[scala] 01/01: Imported Upstream version 2.11.0

Emmanuel Bourg ebourg-guest at moszumanska.debian.org
Fri Jun 5 22:51:30 UTC 2015


This is an automated email from the git hooks/post-receive script.

ebourg-guest pushed a commit to annotated tag upstream/2.11.0
in repository scala.

commit 32325e14b966a58ac9331bdd6262041e9866b73b
Author: Emmanuel Bourg <ebourg at apache.org>
Date:   Tue Jan 6 12:47:25 2015 +0100

    Imported Upstream version 2.11.0
---
 .gitattributes                                     |    6 +
 .gitignore                                         |    3 +-
 .mailmap                                           |   68 +-
 .travis.yml                                        |   20 +
 CONTRIBUTING.md                                    |    2 +-
 Gemfile                                            |    7 +
 META-INF/MANIFEST.MF                               |    7 -
 README.md                                          |    8 +
 README.rst                                         |  207 -
 bincompat-backward.whitelist.conf                  |  287 +-
 bincompat-forward.whitelist.conf                   |  405 +-
 build-ant-macros.xml                               |  781 ++++
 build.detach.xml                                   |  186 -
 build.number                                       |    4 +-
 build.number.maven                                 |    3 -
 build.xml                                          | 1980 ++++----
 dbuild-meta.json                                   |  100 +
 doc/LICENSE.md                                     |   68 +
 doc/License.rtf                                    |   65 +
 doc/README                                         |   36 +
 {docs => doc}/licenses/apache_jansi.txt            |    0
 {docs => doc}/licenses/bsd_asm.txt                 |    0
 {docs => doc}/licenses/bsd_jline.txt               |    0
 {docs => doc}/licenses/mit_jquery-layout.txt       |    0
 {docs => doc}/licenses/mit_jquery-ui.txt           |    0
 {docs => doc}/licenses/mit_jquery.txt              |    0
 {docs => doc}/licenses/mit_sizzle.txt              |    0
 {docs => doc}/licenses/mit_tools.tooltip.txt       |    0
 docs/LICENSE                                       |   63 -
 docs/README                                        |   36 -
 docs/examples/swing/ColorChooserDemo.scala         |   61 +
 docs/examples/swing/PopupDemo.scala                |   33 +
 lib/ant/ant-contrib.jar.desired.sha1               |    1 +
 lib/ant/ant-dotnet-1.0.jar.desired.sha1            |    1 +
 lib/ant/ant.jar.desired.sha1                       |    1 +
 lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1     |    1 +
 lib/ant/vizant.jar.desired.sha1                    |    1 +
 lib/fjbg.jar.desired.sha1                          |    1 -
 lib/jline.jar.desired.sha1                         |    1 -
 lib/msil.jar.desired.sha1                          |    1 -
 lib/scala-compiler-src.jar.desired.sha1            |    1 -
 lib/scala-compiler.jar.desired.sha1                |    1 -
 lib/scala-library-src.jar.desired.sha1             |    1 -
 lib/scala-library.jar.desired.sha1                 |    1 -
 lib/scala-reflect-src.jar.desired.sha1             |    1 -
 lib/scala-reflect.jar.desired.sha1                 |    1 -
 project/Build.scala                                |  336 --
 project/Layers.scala                               |  120 -
 project/Packaging.scala                            |  129 -
 project/Partest.scala                              |  141 -
 project/Release.scala                              |   30 -
 project/RemoteDependencies.scala                   |   53 -
 project/Sametest.scala                             |   63 -
 project/ScalaBuildKeys.scala                       |   23 -
 project/ScalaToolRunner.scala                      |   21 -
 project/ShaResolve.scala                           |  147 -
 project/Testing.scala                              |   41 -
 project/VerifyClassLoad.scala                      |   46 -
 project/Versions.scala                             |  142 -
 project/plugins.sbt                                |    9 -
 project/project/Build.scala                        |    7 -
 spec/01-lexical-syntax.md                          |  615 +++
 spec/02-identifiers-names-and-scopes.md            |  114 +
 spec/03-types.md                                   | 1056 +++++
 spec/04-basic-declarations-and-definitions.md      |  945 ++++
 spec/05-classes-and-objects.md                     | 1173 +++++
 spec/06-expressions.md                             | 1814 ++++++++
 spec/07-implicit-parameters-and-views.md           |  441 ++
 spec/08-pattern-matching.md                        |  722 +++
 spec/09-top-level-definitions.md                   |  201 +
 spec/10-xml-expressions-and-patterns.md            |  147 +
 spec/11-user-defined-annotations.md                |  166 +
 spec/12-the-scala-standard-library.md              |  849 ++++
 spec/13-syntax-summary.md                          |  311 ++
 spec/14-references.md                              |  213 +
 spec/README.md                                     |   43 +
 spec/_config.yml                                   |   10 +
 spec/_includes/numbering.css                       |   56 +
 spec/_layouts/default.yml                          |   36 +
 spec/_layouts/toc.yml                              |   15 +
 spec/id_dsa_travis.enc                             |   15 +
 spec/index.md                                      |   71 +
 spec/public/images/classhierarchy.pdf              |  Bin 0 -> 84078 bytes
 spec/public/stylesheets/screen.css                 |  332 ++
 src/actors/scala/actors/AbstractActor.scala        |    2 +-
 src/actors/scala/actors/Actor.scala                |    2 +
 src/actors/scala/actors/ActorRef.scala             |    3 +-
 src/actors/scala/actors/CanReply.scala             |    1 +
 src/actors/scala/actors/Channel.scala              |    2 +
 src/actors/scala/actors/DaemonActor.scala          |    1 +
 src/actors/scala/actors/Debug.scala                |    1 +
 src/actors/scala/actors/Future.scala               |   18 +-
 src/actors/scala/actors/IScheduler.scala           |    1 +
 src/actors/scala/actors/InputChannel.scala         |    1 +
 src/actors/scala/actors/InternalActor.scala        |    2 +
 src/actors/scala/actors/InternalReplyReactor.scala |    1 +
 src/actors/scala/actors/KillActorControl.scala     |    2 -
 src/actors/scala/actors/OutputChannel.scala        |    1 +
 src/actors/scala/actors/Reactor.scala              |    1 +
 src/actors/scala/actors/ReplyReactor.scala         |    2 +-
 src/actors/scala/actors/Scheduler.scala            |    2 +-
 src/actors/scala/actors/SchedulerAdapter.scala     |    1 +
 src/actors/scala/actors/UncaughtException.scala    |    1 +
 src/actors/scala/actors/package.scala              |    1 +
 .../scala/actors/remote/JavaSerializer.scala       |    1 +
 src/actors/scala/actors/remote/NetKernel.scala     |    8 +-
 src/actors/scala/actors/remote/Proxy.scala         |    6 +-
 src/actors/scala/actors/remote/RemoteActor.scala   |    6 +-
 src/actors/scala/actors/remote/Serializer.scala    |    1 +
 src/actors/scala/actors/remote/Service.scala       |    1 +
 src/actors/scala/actors/remote/TcpService.scala    |   12 +-
 src/actors/scala/actors/scheduler/ActorGC.scala    |    1 +
 .../scala/actors/scheduler/DaemonScheduler.scala   |    1 +
 .../actors/scheduler/DrainableForkJoinPool.scala   |    1 -
 .../scala/actors/scheduler/ExecutorScheduler.scala |    2 +
 .../scala/actors/scheduler/ForkJoinScheduler.scala |    3 +-
 .../scheduler/ResizableThreadPoolScheduler.scala   |    3 +-
 .../actors/scheduler/SingleThreadedScheduler.scala |    1 +
 .../actors/scheduler/TerminationService.scala      |    2 +-
 .../scala/reflect/ScalaBeanInfo.scala              |    1 -
 src/asm/scala/tools/asm/AnnotationVisitor.java     |   66 +-
 src/asm/scala/tools/asm/AnnotationWriter.java      |   58 +-
 src/asm/scala/tools/asm/Attribute.java             |  193 +-
 src/asm/scala/tools/asm/ByteVector.java            |   49 +-
 src/asm/scala/tools/asm/ClassReader.java           | 2980 ++++++------
 src/asm/scala/tools/asm/ClassVisitor.java          |  233 +-
 src/asm/scala/tools/asm/ClassWriter.java           |  499 +-
 src/asm/scala/tools/asm/Context.java               |  110 +
 src/asm/scala/tools/asm/FieldVisitor.java          |   34 +-
 src/asm/scala/tools/asm/FieldWriter.java           |   72 +-
 src/asm/scala/tools/asm/Frame.java                 | 1024 ++---
 src/asm/scala/tools/asm/Handle.java                |   48 +-
 src/asm/scala/tools/asm/Handler.java               |    9 +-
 src/asm/scala/tools/asm/Item.java                  |  162 +-
 src/asm/scala/tools/asm/Label.java                 |  135 +-
 src/asm/scala/tools/asm/MethodVisitor.java         |  516 ++-
 src/asm/scala/tools/asm/MethodWriter.java          | 1165 ++---
 src/asm/scala/tools/asm/Type.java                  |  254 +-
 .../scala/tools/asm/signature/SignatureReader.java |  181 +-
 .../tools/asm/signature/SignatureVisitor.java      |   51 +-
 .../scala/tools/asm/signature/SignatureWriter.java |    2 +-
 src/asm/scala/tools/asm/tree/AbstractInsnNode.java |   30 +-
 src/asm/scala/tools/asm/tree/AnnotationNode.java   |   55 +-
 src/asm/scala/tools/asm/tree/ClassNode.java        |  102 +-
 src/asm/scala/tools/asm/tree/FieldInsnNode.java    |   34 +-
 src/asm/scala/tools/asm/tree/FieldNode.java        |  104 +-
 src/asm/scala/tools/asm/tree/FrameNode.java        |  121 +-
 src/asm/scala/tools/asm/tree/IincInsnNode.java     |    8 +-
 src/asm/scala/tools/asm/tree/InnerClassNode.java   |   42 +-
 src/asm/scala/tools/asm/tree/InsnList.java         |  128 +-
 src/asm/scala/tools/asm/tree/InsnNode.java         |   33 +-
 src/asm/scala/tools/asm/tree/IntInsnNode.java      |   13 +-
 .../tools/asm/tree/InvokeDynamicInsnNode.java      |   22 +-
 src/asm/scala/tools/asm/tree/JumpInsnNode.java     |   25 +-
 src/asm/scala/tools/asm/tree/LabelNode.java        |    2 +-
 src/asm/scala/tools/asm/tree/LdcInsnNode.java      |   11 +-
 src/asm/scala/tools/asm/tree/LineNumberNode.java   |    8 +-
 .../scala/tools/asm/tree/LocalVariableNode.java    |   45 +-
 .../scala/tools/asm/tree/LookupSwitchInsnNode.java |   21 +-
 src/asm/scala/tools/asm/tree/MethodInsnNode.java   |   38 +-
 src/asm/scala/tools/asm/tree/MethodNode.java       |  234 +-
 .../tools/asm/tree/MultiANewArrayInsnNode.java     |   10 +-
 .../scala/tools/asm/tree/TableSwitchInsnNode.java  |   30 +-
 .../scala/tools/asm/tree/TryCatchBlockNode.java    |   32 +-
 src/asm/scala/tools/asm/tree/TypeInsnNode.java     |   19 +-
 src/asm/scala/tools/asm/tree/VarInsnNode.java      |   21 +-
 .../scala/tools/asm/tree/analysis/Analyzer.java    |  160 +-
 .../tools/asm/tree/analysis/AnalyzerException.java |   11 +-
 .../tools/asm/tree/analysis/BasicInterpreter.java  |  483 +-
 .../scala/tools/asm/tree/analysis/BasicValue.java  |    9 +-
 .../tools/asm/tree/analysis/BasicVerifier.java     |  598 ++-
 src/asm/scala/tools/asm/tree/analysis/Frame.java   |  854 ++--
 .../scala/tools/asm/tree/analysis/Interpreter.java |  110 +-
 .../tools/asm/tree/analysis/SimpleVerifier.java    |  119 +-
 .../tools/asm/tree/analysis/SourceInterpreter.java |  148 +-
 .../scala/tools/asm/tree/analysis/SourceValue.java |    8 +-
 .../scala/tools/asm/tree/analysis/Subroutine.java  |    9 +-
 src/asm/scala/tools/asm/util/ASMifiable.java       |   13 +-
 src/asm/scala/tools/asm/util/ASMifier.java         |  485 +-
 .../tools/asm/util/CheckAnnotationAdapter.java     |   28 +-
 .../scala/tools/asm/util/CheckClassAdapter.java    |  587 ++-
 .../scala/tools/asm/util/CheckFieldAdapter.java    |   23 +-
 .../scala/tools/asm/util/CheckMethodAdapter.java   |  937 ++--
 .../tools/asm/util/CheckSignatureAdapter.java      |   57 +-
 src/asm/scala/tools/asm/util/Printer.java          |  316 +-
 src/asm/scala/tools/asm/util/SignatureChecker.java |   47 -
 src/asm/scala/tools/asm/util/Textifiable.java      |    8 +-
 src/asm/scala/tools/asm/util/Textifier.java        |  446 +-
 .../tools/asm/util/TraceAnnotationVisitor.java     |   23 +-
 .../scala/tools/asm/util/TraceClassVisitor.java    |  159 +-
 .../scala/tools/asm/util/TraceFieldVisitor.java    |   10 +-
 .../scala/tools/asm/util/TraceMethodVisitor.java   |   89 +-
 .../tools/asm/util/TraceSignatureVisitor.java      |   59 +-
 src/build/bnd/continuations.bnd                    |    5 -
 src/build/bnd/scala-compiler-doc.bnd               |    6 +
 src/build/bnd/scala-compiler-interactive.bnd       |    6 +
 src/build/bnd/scala-compiler.bnd                   |    2 +-
 src/build/bnd/scala-continuations-library.bnd      |    5 +
 src/build/bnd/scala-continuations-plugin.bnd       |    5 +
 src/build/bnd/scala-parser-combinators.bnd         |    5 +
 src/build/bnd/scala-swing.bnd                      |    2 +-
 src/build/bnd/scala-xml.bnd                        |    5 +
 src/build/dbuild-meta-json-gen.scala               |   47 +
 src/build/genprod.scala                            |    9 +-
 src/build/maven/continuations-plugin-pom.xml       |   62 -
 src/build/maven/jline-pom.xml                      |   68 -
 src/build/maven/maven-deploy.xml                   |  295 --
 src/build/maven/scala-actors-pom.xml               |   85 +-
 src/build/maven/scala-compiler-doc-pom.xml         |   58 +
 src/build/maven/scala-compiler-interactive-pom.xml |   48 +
 src/build/maven/scala-compiler-pom.xml             |  107 +-
 src/build/maven/scala-dist-pom.xml                 |   70 +
 src/build/maven/scala-dotnet-library-pom.xml       |   45 -
 src/build/maven/scala-library-all-pom.xml          |   88 +
 src/build/maven/scala-library-pom.xml              |   82 +-
 src/build/maven/scala-partest-pom.xml              |   62 -
 src/build/maven/scala-reflect-pom.xml              |   85 +-
 src/build/maven/scala-swing-pom.xml                |   64 -
 src/build/maven/scalap-pom.xml                     |   84 +-
 src/build/pack.xml                                 |  274 --
 .../macros/compiler/DefaultMacroCompiler.scala     |   95 +
 .../scala/reflect/macros/compiler/Errors.scala     |  154 +
 .../scala/reflect/macros/compiler/Resolvers.scala  |   35 +
 .../scala/reflect/macros/compiler/Validators.scala |  201 +
 .../scala/reflect/macros/contexts/Aliases.scala    |   35 +
 .../scala/reflect/macros/contexts/Context.scala    |   30 +
 .../scala/reflect/macros/contexts/Enclosures.scala |   32 +
 .../scala/reflect/macros/contexts/Evals.scala      |   23 +
 .../scala/reflect/macros/contexts/ExprUtils.scala  |   34 +
 .../scala/reflect/macros/contexts/FrontEnds.scala  |   22 +
 .../reflect/macros/contexts/Infrastructure.scala   |   16 +
 .../scala/reflect/macros/contexts/Internals.scala  |   47 +
 .../scala/reflect/macros/contexts/Names.scala      |   43 +
 .../scala/reflect/macros/contexts/Parsers.scala    |   20 +
 .../scala/reflect/macros/contexts/Reifiers.scala   |   77 +
 .../scala/reflect/macros/contexts/Traces.scala     |    8 +
 .../scala/reflect/macros/contexts/Typers.scala     |   53 +
 .../scala/reflect/macros/runtime/Aliases.scala     |   36 -
 .../scala/reflect/macros/runtime/Context.scala     |   28 -
 .../scala/reflect/macros/runtime/Enclosures.scala  |   24 -
 .../scala/reflect/macros/runtime/Evals.scala       |   18 -
 .../scala/reflect/macros/runtime/ExprUtils.scala   |   35 -
 .../scala/reflect/macros/runtime/FrontEnds.scala   |   20 -
 .../reflect/macros/runtime/Infrastructure.scala    |   16 -
 .../macros/runtime/JavaReflectionRuntimes.scala    |   38 +
 .../reflect/macros/runtime/MacroRuntimes.scala     |   75 +
 .../scala/reflect/macros/runtime/Names.scala       |   17 -
 .../scala/reflect/macros/runtime/Parsers.scala     |   24 -
 .../scala/reflect/macros/runtime/Reifiers.scala    |   77 -
 .../scala/reflect/macros/runtime/Traces.scala      |    8 -
 .../scala/reflect/macros/runtime/Typers.scala      |   50 -
 .../scala/reflect/macros/runtime/package.scala     |    5 +
 .../scala/reflect/macros/util/Helpers.scala        |   96 +
 src/compiler/scala/reflect/reify/Errors.scala      |    6 -
 src/compiler/scala/reflect/reify/Phases.scala      |    5 +-
 src/compiler/scala/reflect/reify/Reifier.scala     |   39 +-
 src/compiler/scala/reflect/reify/States.scala      |    3 +-
 src/compiler/scala/reflect/reify/Taggers.scala     |   11 +-
 .../reflect/reify/codegen/GenAnnotationInfos.scala |   18 +-
 .../scala/reflect/reify/codegen/GenNames.scala     |    5 +-
 .../scala/reflect/reify/codegen/GenPositions.scala |    3 +-
 .../scala/reflect/reify/codegen/GenSymbols.scala   |   25 +-
 .../scala/reflect/reify/codegen/GenTrees.scala     |   86 +-
 .../scala/reflect/reify/codegen/GenTypes.scala     |   50 +-
 .../scala/reflect/reify/codegen/GenUtils.scala     |   66 +-
 src/compiler/scala/reflect/reify/package.scala     |   20 +-
 .../scala/reflect/reify/phases/Calculate.scala     |    5 +-
 .../scala/reflect/reify/phases/Metalevels.scala    |   23 +-
 .../scala/reflect/reify/phases/Reify.scala         |    6 +-
 .../scala/reflect/reify/phases/Reshape.scala       |   77 +-
 .../scala/reflect/reify/utils/Extractors.scala     |   38 +-
 .../scala/reflect/reify/utils/NodePrinters.scala   |   25 +-
 .../scala/reflect/reify/utils/SymbolTables.scala   |   18 +-
 src/compiler/scala/tools/ant/Pack200Task.scala     |    6 +-
 src/compiler/scala/tools/ant/Same.scala            |    7 +-
 src/compiler/scala/tools/ant/ScalaTool.scala       |   18 +-
 src/compiler/scala/tools/ant/Scalac.scala          |   23 +-
 src/compiler/scala/tools/ant/Scaladoc.scala        |  695 ---
 src/compiler/scala/tools/ant/antlib.xml            |    2 -
 src/compiler/scala/tools/ant/sabbus/Break.scala    |    3 +-
 .../scala/tools/ant/sabbus/Compilers.scala         |    2 +-
 src/compiler/scala/tools/ant/sabbus/Make.scala     |    3 +-
 .../scala/tools/ant/sabbus/ScalacFork.scala        |   12 +-
 src/compiler/scala/tools/ant/sabbus/Settings.scala |   16 +-
 src/compiler/scala/tools/ant/sabbus/TaskArgs.scala |    2 -
 src/compiler/scala/tools/ant/sabbus/Use.scala      |    7 +-
 .../scala/tools/ant/templates/tool-unix.tmpl       |   18 +-
 .../scala/tools/ant/templates/tool-windows.tmpl    |   10 +-
 src/compiler/scala/tools/cmd/CommandLine.scala     |   14 +-
 .../scala/tools/cmd/CommandLineParser.scala        |   72 +
 src/compiler/scala/tools/cmd/Demo.scala            |   84 -
 src/compiler/scala/tools/cmd/FromString.scala      |   26 +-
 src/compiler/scala/tools/cmd/Interpolation.scala   |    3 +-
 src/compiler/scala/tools/cmd/Opt.scala             |    8 +-
 src/compiler/scala/tools/cmd/Parser.scala          |   52 -
 src/compiler/scala/tools/cmd/Reference.scala       |   25 +-
 src/compiler/scala/tools/cmd/Spec.scala            |    2 +-
 src/compiler/scala/tools/cmd/gen/AnyVals.scala     |  134 +-
 src/compiler/scala/tools/cmd/gen/Codegen.scala     |    8 +-
 src/compiler/scala/tools/cmd/gen/CodegenSpec.scala |    6 -
 src/compiler/scala/tools/cmd/package.scala         |   15 +-
 .../scala/tools/nsc/CompilationUnits.scala         |   83 +-
 src/compiler/scala/tools/nsc/CompileClient.scala   |   10 +-
 src/compiler/scala/tools/nsc/CompileServer.scala   |   21 +-
 src/compiler/scala/tools/nsc/CompileSocket.scala   |   18 +-
 src/compiler/scala/tools/nsc/CompilerCommand.scala |   44 +-
 src/compiler/scala/tools/nsc/CompilerRun.scala     |   21 -
 src/compiler/scala/tools/nsc/ConsoleWriter.scala   |    4 +-
 src/compiler/scala/tools/nsc/Driver.scala          |   13 +-
 src/compiler/scala/tools/nsc/EvalLoop.scala        |    2 +-
 .../scala/tools/nsc/GenericRunnerCommand.scala     |    2 +-
 .../scala/tools/nsc/GenericRunnerSettings.scala    |    3 -
 src/compiler/scala/tools/nsc/Global.scala          |  962 ++--
 .../scala/tools/nsc/GlobalSymbolLoaders.scala      |   30 +
 src/compiler/scala/tools/nsc/Main.scala            |   80 +-
 src/compiler/scala/tools/nsc/MainBench.scala       |   16 +-
 .../scala/tools/nsc/MainGenericRunner.scala        |  108 -
 src/compiler/scala/tools/nsc/MainTokenMetric.scala |   13 +-
 src/compiler/scala/tools/nsc/ObjectRunner.scala    |    6 -
 .../scala/tools/nsc/OfflineCompilerCommand.scala   |    6 +-
 src/compiler/scala/tools/nsc/PhaseAssembly.scala   |  108 +-
 src/compiler/scala/tools/nsc/Phases.scala          |   46 -
 src/compiler/scala/tools/nsc/Properties.scala      |    5 -
 src/compiler/scala/tools/nsc/ScalaDoc.scala        |   77 -
 src/compiler/scala/tools/nsc/ScriptRunner.scala    |   37 +-
 src/compiler/scala/tools/nsc/SubComponent.scala    |   25 +-
 src/compiler/scala/tools/nsc/ast/DocComments.scala |   51 +-
 .../scala/tools/nsc/ast/NodePrinters.scala         |   30 +-
 src/compiler/scala/tools/nsc/ast/Positions.scala   |   11 +-
 src/compiler/scala/tools/nsc/ast/Printers.scala    |  123 +-
 .../scala/tools/nsc/ast/TreeBrowsers.scala         |   47 +-
 src/compiler/scala/tools/nsc/ast/TreeDSL.scala     |  216 +-
 src/compiler/scala/tools/nsc/ast/TreeGen.scala     |  212 +-
 src/compiler/scala/tools/nsc/ast/TreeInfo.scala    |   74 +-
 src/compiler/scala/tools/nsc/ast/Trees.scala       |  184 +-
 .../scala/tools/nsc/ast/parser/CommonTokens.scala  |  112 +
 .../scala/tools/nsc/ast/parser/MarkupParsers.scala |   90 +-
 .../scala/tools/nsc/ast/parser/Parsers.scala       | 1546 +++----
 .../scala/tools/nsc/ast/parser/Scanners.scala      |  423 +-
 .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala  |   27 +-
 .../tools/nsc/ast/parser/SyntaxAnalyzer.scala      |   92 +-
 .../scala/tools/nsc/ast/parser/Tokens.scala        |  186 +-
 .../scala/tools/nsc/ast/parser/TreeBuilder.scala   |  504 +--
 .../nsc/ast/parser/xml/MarkupParserCommon.scala    |  211 +
 .../scala/tools/nsc/ast/parser/xml/Utility.scala   |  163 +
 .../scala/tools/nsc/backend/JavaPlatform.scala     |   37 +-
 .../scala/tools/nsc/backend/MSILPlatform.scala     |   69 -
 .../scala/tools/nsc/backend/Platform.scala         |   21 +-
 .../scala/tools/nsc/backend/ScalaPrimitives.scala  |   32 +-
 .../tools/nsc/backend/WorklistAlgorithm.scala      |    5 +-
 .../tools/nsc/backend/icode/BasicBlocks.scala      |  145 +-
 .../tools/nsc/backend/icode/CheckerException.scala |    2 -
 .../nsc/backend/icode/ExceptionHandlers.scala      |   23 +-
 .../scala/tools/nsc/backend/icode/GenICode.scala   |  843 ++--
 .../tools/nsc/backend/icode/ICodeCheckers.scala    |   94 +-
 .../scala/tools/nsc/backend/icode/ICodes.scala     |   16 +-
 .../tools/nsc/backend/icode/Linearizers.scala      |  206 +-
 .../scala/tools/nsc/backend/icode/Members.scala    |  115 +-
 .../scala/tools/nsc/backend/icode/Opcodes.scala    |  134 +-
 .../scala/tools/nsc/backend/icode/Primitives.scala |   29 +-
 .../scala/tools/nsc/backend/icode/Printers.scala   |   66 +-
 .../scala/tools/nsc/backend/icode/Repository.scala |   13 +-
 .../scala/tools/nsc/backend/icode/TypeKinds.scala  |   75 +-
 .../scala/tools/nsc/backend/icode/TypeStacks.scala |   12 -
 .../backend/icode/analysis/CopyPropagation.scala   |   94 +-
 .../backend/icode/analysis/DataFlowAnalysis.scala  |   22 +-
 .../nsc/backend/icode/analysis/Liveness.scala      |    6 +-
 .../icode/analysis/ReachingDefinitions.scala       |   15 +-
 .../backend/icode/analysis/TypeFlowAnalysis.scala  |  104 +-
 .../tools/nsc/backend/jvm/BCodeBodyBuilder.scala   | 1234 +++++
 .../scala/tools/nsc/backend/jvm/BCodeGlue.scala    |  716 +++
 .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 1204 +++++
 .../tools/nsc/backend/jvm/BCodeIdiomatic.scala     |  725 +++
 .../tools/nsc/backend/jvm/BCodeSkelBuilder.scala   |  724 +++
 .../tools/nsc/backend/jvm/BCodeSyncAndTry.scala    |  395 ++
 .../scala/tools/nsc/backend/jvm/BCodeTypes.scala   |  880 ++++
 .../tools/nsc/backend/jvm/BytecodeWriters.scala    |  105 +-
 .../scala/tools/nsc/backend/jvm/GenASM.scala       | 1101 +++--
 .../scala/tools/nsc/backend/jvm/GenAndroid.scala   |   62 -
 .../scala/tools/nsc/backend/jvm/GenBCode.scala     |  381 ++
 .../scala/tools/nsc/backend/jvm/GenJVM.scala       | 1921 --------
 .../scala/tools/nsc/backend/jvm/GenJVMASM.scala    |   32 +-
 .../scala/tools/nsc/backend/jvm/GenJVMUtil.scala   |  142 -
 .../scala/tools/nsc/backend/msil/GenMSIL.scala     | 2358 ----------
 .../tools/nsc/backend/opt/ClosureElimination.scala |   29 +-
 .../nsc/backend/opt/ConstantOptimization.scala     |  625 +++
 .../nsc/backend/opt/DeadCodeElimination.scala      |   54 +-
 .../nsc/backend/opt/InlineExceptionHandlers.scala  |   21 +-
 .../scala/tools/nsc/backend/opt/Inliners.scala     |  133 +-
 .../scala/tools/nsc/dependencies/Changes.scala     |  227 -
 .../nsc/dependencies/DependencyAnalysis.scala      |  254 --
 .../scala/tools/nsc/dependencies/Files.scala       |  177 -
 src/compiler/scala/tools/nsc/doc/DocFactory.scala  |  142 -
 src/compiler/scala/tools/nsc/doc/DocParser.scala   |   74 -
 src/compiler/scala/tools/nsc/doc/Index.scala       |   17 -
 src/compiler/scala/tools/nsc/doc/Settings.scala    |  365 --
 .../scala/tools/nsc/doc/Uncompilable.scala         |   51 -
 .../tools/nsc/doc/base/CommentFactoryBase.scala    |  955 ----
 src/compiler/scala/tools/nsc/doc/base/LinkTo.scala |   15 -
 .../tools/nsc/doc/base/MemberLookupBase.scala      |  206 -
 .../scala/tools/nsc/doc/base/comment/Body.scala    |   95 -
 .../scala/tools/nsc/doc/base/comment/Comment.scala |  134 -
 .../scala/tools/nsc/doc/doclet/Generator.scala     |   30 -
 src/compiler/scala/tools/nsc/doc/html/Doclet.scala |   19 -
 .../scala/tools/nsc/doc/html/HtmlFactory.scala     |  152 -
 .../scala/tools/nsc/doc/html/HtmlPage.scala        |  224 -
 src/compiler/scala/tools/nsc/doc/html/Page.scala   |  108 -
 .../scala/tools/nsc/doc/html/SyntaxHigh.scala      |  286 --
 .../scala/tools/nsc/doc/html/page/Index.scala      |  142 -
 .../tools/nsc/doc/html/page/IndexScript.scala      |   70 -
 .../tools/nsc/doc/html/page/ReferenceIndex.scala   |   58 -
 .../scala/tools/nsc/doc/html/page/Source.scala     |  128 -
 .../scala/tools/nsc/doc/html/page/Template.scala   |  977 ----
 .../nsc/doc/html/page/diagram/DiagramStats.scala   |   66 -
 .../html/page/diagram/DotDiagramGenerator.scala    |  511 ---
 .../nsc/doc/html/page/diagram/DotRunner.scala      |  228 -
 .../scala/tools/nsc/doc/html/resource/lib/index.js |  536 ---
 .../scala/tools/nsc/doc/model/CommentFactory.scala |  114 -
 .../scala/tools/nsc/doc/model/Entity.scala         |  631 ---
 .../tools/nsc/doc/model/IndexModelFactory.scala    |   60 -
 .../scala/tools/nsc/doc/model/MemberLookup.scala   |   63 -
 .../scala/tools/nsc/doc/model/ModelFactory.scala   | 1103 -----
 .../doc/model/ModelFactoryImplicitSupport.scala    |  609 ---
 .../nsc/doc/model/ModelFactoryTypeSupport.scala    |  326 --
 .../scala/tools/nsc/doc/model/TreeFactory.scala    |   95 -
 .../tools/nsc/doc/model/diagram/Diagram.scala      |  146 -
 .../doc/model/diagram/DiagramDirectiveParser.scala |  261 --
 .../nsc/doc/model/diagram/DiagramFactory.scala     |  271 --
 .../scala/tools/nsc/interactive/BuildManager.scala |   93 -
 .../tools/nsc/interactive/CompilerControl.scala    |  481 --
 .../scala/tools/nsc/interactive/ContextTrees.scala |  165 -
 .../scala/tools/nsc/interactive/Global.scala       | 1214 -----
 .../scala/tools/nsc/interactive/Picklers.scala     |  191 -
 .../scala/tools/nsc/interactive/REPL.scala         |  222 -
 .../tools/nsc/interactive/RangePositions.scala     |  285 --
 .../nsc/interactive/RefinedBuildManager.scala      |  355 --
 .../scala/tools/nsc/interactive/Response.scala     |  105 -
 .../tools/nsc/interactive/ScratchPadMaker.scala    |  200 -
 .../tools/nsc/interactive/SimpleBuildManager.scala |  103 -
 .../nsc/interactive/tests/InteractiveTest.scala    |  129 -
 .../tests/InteractiveTestSettings.scala            |   70 -
 .../scala/tools/nsc/interactive/tests/Tester.scala |  208 -
 .../nsc/interactive/tests/core/CoreTestDefs.scala  |  133 -
 .../tests/core/PresentationCompilerInstance.scala  |   35 -
 .../tests/core/PresentationCompilerTestDef.scala   |   19 -
 .../interactive/tests/core/SourcesCollector.scala  |   22 -
 .../nsc/interactive/tests/core/TestMarker.scala    |   29 -
 .../nsc/interpreter/AbstractFileClassLoader.scala  |  107 -
 .../scala/tools/nsc/interpreter/ByteCode.scala     |   63 -
 .../scala/tools/nsc/interpreter/CodeHandlers.scala |   50 -
 .../scala/tools/nsc/interpreter/CommandLine.scala  |   14 -
 .../scala/tools/nsc/interpreter/Completion.scala   |   51 -
 .../tools/nsc/interpreter/CompletionAware.scala    |   83 -
 .../tools/nsc/interpreter/CompletionOutput.scala   |   86 -
 .../nsc/interpreter/ConsoleReaderHelper.scala      |   70 -
 .../scala/tools/nsc/interpreter/Delimited.scala    |   44 -
 .../scala/tools/nsc/interpreter/ExprTyper.scala    |  107 -
 .../scala/tools/nsc/interpreter/ILoop.scala        |  966 ----
 .../scala/tools/nsc/interpreter/ILoopInit.scala    |  125 -
 .../scala/tools/nsc/interpreter/IMain.scala        | 1235 -----
 .../scala/tools/nsc/interpreter/ISettings.scala    |   61 -
 .../scala/tools/nsc/interpreter/Imports.scala      |  195 -
 .../tools/nsc/interpreter/InteractiveReader.scala  |   57 -
 .../tools/nsc/interpreter/JLineCompletion.scala    |  372 --
 .../scala/tools/nsc/interpreter/JLineReader.scala  |   76 -
 .../scala/tools/nsc/interpreter/Logger.scala       |   18 -
 .../scala/tools/nsc/interpreter/LoopCommands.scala |  107 -
 .../tools/nsc/interpreter/MemberHandlers.scala     |  228 -
 .../scala/tools/nsc/interpreter/NamedParam.scala   |   49 -
 .../scala/tools/nsc/interpreter/Naming.scala       |   98 -
 .../scala/tools/nsc/interpreter/Parsed.scala       |   69 -
 .../scala/tools/nsc/interpreter/Phased.scala       |  162 -
 .../scala/tools/nsc/interpreter/Power.scala        |  430 --
 .../scala/tools/nsc/interpreter/ReplConfig.scala   |   61 -
 .../scala/tools/nsc/interpreter/ReplGlobal.scala   |   57 -
 .../scala/tools/nsc/interpreter/ReplProps.scala    |   31 -
 .../scala/tools/nsc/interpreter/ReplStrings.scala  |   35 -
 .../scala/tools/nsc/interpreter/ReplVals.scala     |   83 -
 .../scala/tools/nsc/interpreter/Results.scala      |   22 -
 .../scala/tools/nsc/interpreter/RichClass.scala    |   39 -
 .../scala/tools/nsc/interpreter/SimpleReader.scala |   43 -
 .../scala/tools/nsc/interpreter/TypeStrings.scala  |  263 --
 .../scala/tools/nsc/interpreter/package.scala      |   49 -
 .../tools/nsc/interpreter/session/History.scala    |   28 -
 .../nsc/interpreter/session/SimpleHistory.scala    |   62 -
 .../tools/nsc/interpreter/session/package.scala    |   23 -
 .../scala/tools/nsc/io/DaemonThreadFactory.scala   |   21 -
 src/compiler/scala/tools/nsc/io/Fileish.scala      |   33 -
 src/compiler/scala/tools/nsc/io/Jar.scala          |   38 +-
 src/compiler/scala/tools/nsc/io/Lexer.scala        |  301 --
 src/compiler/scala/tools/nsc/io/MsilFile.scala     |   18 -
 src/compiler/scala/tools/nsc/io/Pickler.scala      |  454 --
 src/compiler/scala/tools/nsc/io/PrettyWriter.scala |   41 -
 src/compiler/scala/tools/nsc/io/Replayer.scala     |   74 -
 src/compiler/scala/tools/nsc/io/Socket.scala       |   12 +-
 src/compiler/scala/tools/nsc/io/SourceReader.scala |    7 +-
 src/compiler/scala/tools/nsc/io/package.scala      |   31 -
 .../scala/tools/nsc/javac/JavaParsers.scala        |  226 +-
 .../scala/tools/nsc/javac/JavaScanners.scala       |  294 +-
 .../scala/tools/nsc/javac/JavaTokens.scala         |  180 +-
 .../scala/tools/nsc/matching/MatchSupport.scala    |  138 -
 src/compiler/scala/tools/nsc/matching/Matrix.scala |  259 --
 .../scala/tools/nsc/matching/MatrixAdditions.scala |  193 -
 .../tools/nsc/matching/ParallelMatching.scala      |  870 ----
 .../scala/tools/nsc/matching/PatternBindings.scala |  137 -
 .../scala/tools/nsc/matching/Patterns.scala        |  499 --
 src/compiler/scala/tools/nsc/package.scala         |   11 +-
 src/compiler/scala/tools/nsc/plugins/Plugin.scala  |  190 +-
 .../scala/tools/nsc/plugins/PluginComponent.scala  |    8 +-
 .../tools/nsc/plugins/PluginDescription.scala      |   80 +-
 .../tools/nsc/plugins/PluginLoadException.scala    |   15 -
 src/compiler/scala/tools/nsc/plugins/Plugins.scala |   58 +-
 .../tools/nsc/reporters/AbstractReporter.scala     |   37 +-
 .../tools/nsc/reporters/ConsoleReporter.scala      |   23 +-
 .../scala/tools/nsc/reporters/Reporter.scala       |   15 +-
 .../scala/tools/nsc/reporters/StoreReporter.scala  |    2 +-
 .../scala/tools/nsc/scratchpad/Mixer.scala         |  102 -
 .../tools/nsc/scratchpad/SourceInserter.scala      |   23 -
 .../tools/nsc/settings/AbsScalaSettings.scala      |   10 +-
 .../scala/tools/nsc/settings/AbsSettings.scala     |   13 +-
 .../tools/nsc/settings/AdvancedScalaSettings.scala |   77 -
 .../tools/nsc/settings/AestheticSettings.scala     |   39 -
 .../scala/tools/nsc/settings/FscSettings.scala     |    6 +-
 .../scala/tools/nsc/settings/MutableSettings.scala |  167 +-
 .../scala/tools/nsc/settings/ScalaSettings.scala   |  169 +-
 .../scala/tools/nsc/settings/ScalaVersion.scala    |   53 +-
 .../tools/nsc/settings/StandardScalaSettings.scala |   11 +-
 .../scala/tools/nsc/settings/Warnings.scala        |   35 +-
 .../scala/tools/nsc/symtab/BrowsingLoaders.scala   |    8 +-
 .../scala/tools/nsc/symtab/SymbolLoaders.scala     |  123 +-
 .../scala/tools/nsc/symtab/SymbolTrackers.scala    |   10 +-
 .../nsc/symtab/classfile/AbstractFileReader.scala  |    7 +-
 .../nsc/symtab/classfile/ClassfileParser.scala     |  819 ++--
 .../tools/nsc/symtab/classfile/ICodeReader.scala   |  415 +-
 .../scala/tools/nsc/symtab/classfile/Pickler.scala | 1062 +----
 .../scala/tools/nsc/symtab/clr/CLRTypes.scala      |  137 -
 .../scala/tools/nsc/symtab/clr/TypeParser.scala    |  850 ----
 .../scala/tools/nsc/transform/AddInterfaces.scala  |   58 +-
 .../scala/tools/nsc/transform/CleanUp.scala        |  521 +--
 .../scala/tools/nsc/transform/Constructors.scala   | 1137 +++--
 .../scala/tools/nsc/transform/Delambdafy.scala     |  464 ++
 .../scala/tools/nsc/transform/Erasure.scala        |  612 +--
 .../scala/tools/nsc/transform/ExplicitOuter.scala  |  184 +-
 .../tools/nsc/transform/ExtensionMethods.scala     |   71 +-
 .../scala/tools/nsc/transform/Flatten.scala        |   73 +-
 .../scala/tools/nsc/transform/InfoTransform.scala  |    4 +-
 .../scala/tools/nsc/transform/InlineErasure.scala  |   10 +-
 .../scala/tools/nsc/transform/LambdaLift.scala     |  158 +-
 .../scala/tools/nsc/transform/LazyVals.scala       |   29 +-
 src/compiler/scala/tools/nsc/transform/Mixin.scala |  336 +-
 .../tools/nsc/transform/OverridingPairs.scala      |  230 +-
 .../scala/tools/nsc/transform/PostErasure.scala    |   66 +-
 .../tools/nsc/transform/SampleTransform.scala      |    5 +-
 .../tools/nsc/transform/SpecializeTypes.scala      |  590 ++-
 .../scala/tools/nsc/transform/Statics.scala        |   52 +
 .../scala/tools/nsc/transform/TailCalls.scala      |  271 +-
 .../nsc/transform/TypeAdaptingTransformer.scala    |  187 +
 .../tools/nsc/transform/TypingTransformers.scala   |    6 +-
 .../scala/tools/nsc/transform/UnCurry.scala        |  385 +-
 .../scala/tools/nsc/transform/patmat/Logic.scala   |   66 +-
 .../tools/nsc/transform/patmat/MatchAnalysis.scala |  163 +-
 .../tools/nsc/transform/patmat/MatchCodeGen.scala  |  106 +-
 .../tools/nsc/transform/patmat/MatchCps.scala      |   37 +
 .../nsc/transform/patmat/MatchOptimization.scala   |   64 +-
 .../nsc/transform/patmat/MatchTranslation.scala    |  733 ++-
 .../nsc/transform/patmat/MatchTreeMaking.scala     |  156 +-
 .../tools/nsc/transform/patmat/MatchWarnings.scala |   86 +
 .../nsc/transform/patmat/PatternExpander.scala     |  155 +
 .../nsc/transform/patmat/PatternMatching.scala     |   49 +-
 .../transform/patmat/ScalacPatternExpanders.scala  |  154 +
 .../scala/tools/nsc/transform/patmat/Solving.scala |   32 +-
 .../scala/tools/nsc/typechecker/Adaptations.scala  |   33 +-
 .../scala/tools/nsc/typechecker/Analyzer.scala     |   33 +-
 .../tools/nsc/typechecker/AnalyzerPlugins.scala    |  295 +-
 .../scala/tools/nsc/typechecker/Checkable.scala    |   72 +-
 .../tools/nsc/typechecker/ConstantFolder.scala     |    8 +-
 .../tools/nsc/typechecker/ContextErrors.scala      |  437 +-
 .../scala/tools/nsc/typechecker/Contexts.scala     | 1394 ++++--
 .../tools/nsc/typechecker/DestructureTypes.scala   |   17 +-
 .../scala/tools/nsc/typechecker/Duplicators.scala  |  124 +-
 .../scala/tools/nsc/typechecker/EtaExpansion.scala |   20 +-
 .../scala/tools/nsc/typechecker/Implicits.scala    |  765 ++--
 .../scala/tools/nsc/typechecker/Infer.scala        | 1705 +++----
 .../scala/tools/nsc/typechecker/Macros.scala       | 1138 +++--
 .../tools/nsc/typechecker/MethodSynthesis.scala    |  219 +-
 .../scala/tools/nsc/typechecker/Modes.scala        |  140 -
 .../scala/tools/nsc/typechecker/Namers.scala       |  524 ++-
 .../tools/nsc/typechecker/NamesDefaults.scala      |   75 +-
 .../tools/nsc/typechecker/PatternTypers.scala      |  376 ++
 .../scala/tools/nsc/typechecker/RefChecks.scala    | 1012 ++---
 .../tools/nsc/typechecker/StdAttachments.scala     |  162 +-
 .../tools/nsc/typechecker/SuperAccessors.scala     |   86 +-
 .../tools/nsc/typechecker/SyntheticMethods.scala   |  170 +-
 .../scala/tools/nsc/typechecker/Tags.scala         |   21 +-
 .../scala/tools/nsc/typechecker/TreeCheckers.scala |  376 +-
 .../tools/nsc/typechecker/TypeDiagnostics.scala    |  187 +-
 .../scala/tools/nsc/typechecker/TypeStrings.scala  |  239 +
 .../scala/tools/nsc/typechecker/Typers.scala       | 4470 +++++++++---------
 .../tools/nsc/typechecker/TypersTracking.scala     |  168 +
 .../scala/tools/nsc/typechecker/Unapplies.scala    |  121 +-
 .../scala/tools/nsc/typechecker/Variances.scala    |   94 -
 .../scala/tools/nsc/util/CharArrayReader.scala     |   57 +-
 src/compiler/scala/tools/nsc/util/ClassPath.scala  |  111 +-
 .../scala/tools/nsc/util/CommandLineParser.scala   |  144 -
 src/compiler/scala/tools/nsc/util/DocStrings.scala |    2 +-
 .../scala/tools/nsc/util/Exceptional.scala         |    2 -
 .../scala/tools/nsc/util/FreshNameCreator.scala    |   45 -
 .../scala/tools/nsc/util/JavaCharArrayReader.scala |   66 +-
 .../scala/tools/nsc/util/MsilClassPath.scala       |  170 -
 .../scala/tools/nsc/util/MultiHashMap.scala        |    9 -
 .../scala/tools/nsc/util/ScalaClassLoader.scala    |  168 -
 .../scala/tools/nsc/util/ShowPickled.scala         |   24 +-
 .../scala/tools/nsc/util/SimpleTracer.scala        |    3 +-
 .../scala/tools/nsc/util/StackTracing.scala        |   76 +
 src/compiler/scala/tools/nsc/util/TreeSet.scala    |   64 -
 .../scala/tools/nsc/util/WorkScheduler.scala       |    6 +-
 src/compiler/scala/tools/nsc/util/package.scala    |  105 +-
 src/compiler/scala/tools/reflect/FastTrack.scala   |   58 +-
 .../scala/tools/reflect/FormatInterpolator.scala   |  379 ++
 src/compiler/scala/tools/reflect/FrontEnd.scala    |    2 +-
 .../scala/tools/reflect/MacroImplementations.scala |  171 -
 .../scala/tools/reflect/ReflectGlobal.scala        |   15 +-
 src/compiler/scala/tools/reflect/ReflectMain.scala |    3 +-
 src/compiler/scala/tools/reflect/StdTags.scala     |   10 +-
 src/compiler/scala/tools/reflect/ToolBox.scala     |   68 +-
 .../scala/tools/reflect/ToolBoxFactory.scala       |  390 +-
 .../scala/tools/reflect/WrappedProperties.scala    |    1 +
 src/compiler/scala/tools/reflect/package.scala     |    5 +-
 .../scala/tools/reflect/quasiquotes/Holes.scala    |  245 +
 .../scala/tools/reflect/quasiquotes/Parsers.scala  |  221 +
 .../tools/reflect/quasiquotes/Placeholders.scala   |  201 +
 .../tools/reflect/quasiquotes/Quasiquotes.scala    |   60 +
 .../scala/tools/reflect/quasiquotes/Reifiers.scala |  487 ++
 src/compiler/scala/tools/util/Javap.scala          |  157 +-
 src/compiler/scala/tools/util/PathResolver.scala   |  191 +-
 src/compiler/scala/tools/util/SocketServer.scala   |    7 +-
 src/compiler/scala/tools/util/VerifyClass.scala    |    2 +-
 .../scala/util/continuations/ControlContext.scala  |  249 -
 .../library/scala/util/continuations/package.scala |  187 -
 .../tools/selectivecps/CPSAnnotationChecker.scala  |  524 ---
 .../plugin/scala/tools/selectivecps/CPSUtils.scala |  138 -
 .../tools/selectivecps/SelectiveANFTransform.scala |  549 ---
 .../tools/selectivecps/SelectiveCPSPlugin.scala    |   62 -
 .../tools/selectivecps/SelectiveCPSTransform.scala |  384 --
 src/continuations/plugin/scalac-plugin.xml         |    5 -
 src/detach/library/scala/remoting/Channel.scala    |  190 -
 src/detach/library/scala/remoting/Debug.scala      |   27 -
 .../library/scala/remoting/ServerChannel.scala     |   68 -
 src/detach/library/scala/remoting/detach.scala     |   49 -
 src/detach/library/scala/runtime/RemoteRef.scala   |  182 -
 .../library/scala/runtime/remoting/Debug.scala     |   85 -
 .../scala/runtime/remoting/RegistryDelegate.scala  |  192 -
 .../scala/runtime/remoting/RemoteBooleanRef.scala  |   51 -
 .../scala/runtime/remoting/RemoteByteRef.scala     |   51 -
 .../scala/runtime/remoting/RemoteCharRef.scala     |   51 -
 .../scala/runtime/remoting/RemoteDoubleRef.scala   |   50 -
 .../scala/runtime/remoting/RemoteFloatRef.scala    |   50 -
 .../library/scala/runtime/remoting/RemoteGC.scala  |   66 -
 .../scala/runtime/remoting/RemoteIntRef.scala      |   51 -
 .../scala/runtime/remoting/RemoteLongRef.scala     |   51 -
 .../scala/runtime/remoting/RemoteObjectRef.scala   |   51 -
 .../scala/runtime/remoting/RemoteShortRef.scala    |   50 -
 src/detach/plugin/scala/tools/detach/Detach.scala  | 1190 -----
 .../plugin/scala/tools/detach/DetachPlugin.scala   |   41 -
 src/detach/plugin/scalac-plugin.xml                |    4 -
 src/eclipse/README.md                              |   89 +-
 src/eclipse/continuations-library/.classpath       |    8 -
 src/eclipse/continuations-library/.project         |   30 -
 .../.settings/org.scala-ide.sdt.core.prefs         |    2 -
 src/eclipse/fjbg/.classpath                        |    7 -
 src/eclipse/fjbg/.project                          |   30 -
 src/eclipse/interactive/.classpath                 |    9 +
 src/eclipse/interactive/.project                   |   35 +
 src/eclipse/partest/.classpath                     |   21 +-
 src/eclipse/partest/.project                       |   10 +-
 src/eclipse/reflect/.classpath                     |    3 +-
 src/eclipse/repl/.classpath                        |   11 +
 src/eclipse/repl/.project                          |   35 +
 src/eclipse/scala-compiler/.classpath              |   12 +-
 src/eclipse/scala-library/.classpath               |    2 +-
 src/eclipse/scaladoc/.classpath                    |   13 +
 src/eclipse/scaladoc/.project                      |   35 +
 src/eclipse/scalap/.classpath                      |   10 +-
 src/eclipse/test-junit/.classpath                  |    6 +-
 src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java        |  195 -
 src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java       |   35 -
 src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java         |   62 -
 src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java         |   84 -
 src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java  |  101 -
 src/fjbg/ch/epfl/lamp/fjbg/JClass.java             |  420 --
 src/fjbg/ch/epfl/lamp/fjbg/JCode.java              | 1308 ------
 src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java     |  125 -
 src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java      |  377 --
 src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java      |  771 ----
 .../ch/epfl/lamp/fjbg/JConstantValueAttribute.java |   69 -
 .../epfl/lamp/fjbg/JEnclosingMethodAttribute.java  |   83 -
 .../ch/epfl/lamp/fjbg/JExceptionsAttribute.java    |   90 -
 src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java      |  667 ---
 src/fjbg/ch/epfl/lamp/fjbg/JField.java             |   62 -
 src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java     |  138 -
 .../ch/epfl/lamp/fjbg/JInnerClassesAttribute.java  |  201 -
 src/fjbg/ch/epfl/lamp/fjbg/JLabel.java             |   30 -
 .../epfl/lamp/fjbg/JLineNumberTableAttribute.java  |  121 -
 src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java     |   42 -
 .../lamp/fjbg/JLocalVariableTableAttribute.java    |  167 -
 src/fjbg/ch/epfl/lamp/fjbg/JMember.java            |  109 -
 src/fjbg/ch/epfl/lamp/fjbg/JMethod.java            |  199 -
 src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java        |   87 -
 src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java        |   65 -
 src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java            | 1267 ------
 src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java    |   77 -
 src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java     |   19 -
 .../ch/epfl/lamp/fjbg/JSourceFileAttribute.java    |   69 -
 .../ch/epfl/lamp/fjbg/JStackMapTableAttribute.java |  282 --
 src/fjbg/ch/epfl/lamp/fjbg/JType.java              |  316 --
 src/fjbg/ch/epfl/lamp/fjbg/Main.java               |  131 -
 src/fjbg/ch/epfl/lamp/util/ByteArray.java          |  145 -
 src/intellij/README                                |   15 +-
 src/intellij/compiler.iml.SAMPLE                   |    7 +-
 src/intellij/diff.sh                               |    8 +
 src/intellij/fjbg.iml.SAMPLE                       |   12 -
 src/intellij/interactive.iml.SAMPLE                |   25 +
 src/intellij/library.iml.SAMPLE                    |    2 +-
 src/intellij/manual.iml.SAMPLE                     |    3 +-
 src/intellij/msil.iml.SAMPLE                       |   24 -
 src/intellij/partest.iml.SAMPLE                    |   28 -
 src/intellij/reflect.iml.SAMPLE                    |    2 +-
 src/intellij/repl.iml.SAMPLE                       |   25 +
 src/intellij/scala-lang.ipr.SAMPLE                 |   48 +-
 src/intellij/scala.iml.SAMPLE                      |    4 +-
 src/intellij/scaladoc.iml.SAMPLE                   |   27 +
 src/intellij/setup.sh                              |   23 +
 src/intellij/swing.iml.SAMPLE                      |   24 -
 src/intellij/test.iml.SAMPLE                       |    5 +-
 .../tools/nsc/interactive/CompilerControl.scala    |  444 ++
 .../scala/tools/nsc/interactive/ContextTrees.scala |  177 +
 .../scala/tools/nsc/interactive/Global.scala       | 1272 ++++++
 .../nsc/interactive/InteractiveReporter.scala      |    0
 .../scala/tools/nsc/interactive/Lexer.scala        |  299 ++
 .../scala/tools/nsc/interactive/Main.scala         |   34 +
 .../scala/tools/nsc/interactive/Pickler.scala      |  377 ++
 .../scala/tools/nsc/interactive/Picklers.scala     |  189 +
 .../interactive/PresentationCompilerThread.scala   |    0
 .../scala/tools/nsc/interactive/PrettyWriter.scala |   41 +
 .../scala/tools/nsc/interactive/REPL.scala         |  164 +
 .../tools/nsc/interactive/RangePositions.scala     |   14 +
 .../scala/tools/nsc/interactive/Replayer.scala     |   74 +
 .../scala/tools/nsc/interactive/Response.scala     |  107 +
 .../nsc/interactive/RichCompilationUnits.scala     |    0
 .../nsc/interactive/tests/InteractiveTest.scala    |  113 +
 .../tests/InteractiveTestSettings.scala            |   69 +
 .../scala/tools/nsc/interactive/tests/Tester.scala |  209 +
 .../nsc/interactive/tests/core/AskCommand.scala    |    0
 .../nsc/interactive/tests/core/CoreTestDefs.scala  |  128 +
 .../tests/core/PresentationCompilerInstance.scala  |   33 +
 .../PresentationCompilerRequestsWorkingMode.scala  |    0
 .../tests/core/PresentationCompilerTestDef.scala   |   18 +
 .../nsc/interactive/tests/core/Reporter.scala      |    0
 .../interactive/tests/core/SourcesCollector.scala  |   20 +
 .../nsc/interactive/tests/core/TestMarker.scala    |   29 +
 .../nsc/interactive/tests/core/TestResources.scala |    0
 .../nsc/interactive/tests/core/TestSettings.scala  |    0
 src/jline/build.sbt                                |   10 +-
 src/jline/manual-test.sh                           |    5 +-
 src/jline/project/build.properties                 |    1 +
 src/jline/project/plugins.sbt                      |    3 +
 src/jline/project/plugins/build.sbt                |    5 -
 .../scala/tools/jline/console/ConsoleReader.java   |    2 +-
 src/library-aux/scala/Any.scala                    |    2 +-
 src/library-aux/scala/AnyRef.scala                 |    6 +-
 src/library-aux/scala/Nothing.scala                |    2 +-
 src/library-aux/scala/Null.scala                   |    2 +-
 src/library/rootdoc.txt                            |   63 +-
 src/library/scala/AnyVal.scala                     |    4 +-
 src/library/scala/App.scala                        |   17 +-
 src/library/scala/Application.scala                |   79 -
 src/library/scala/Array.scala                      |   14 +-
 src/library/scala/Boolean.scala                    |   48 +-
 src/library/scala/Byte.scala                       |  342 +-
 src/library/scala/Char.scala                       |  342 +-
 src/library/scala/Console.scala                    |  344 +-
 src/library/scala/DelayedInit.scala                |    5 +-
 src/library/scala/Double.scala                     |  331 +-
 src/library/scala/Enumeration.scala                |   23 +-
 src/library/scala/Float.scala                      |  336 +-
 src/library/scala/Function0.scala                  |    4 +-
 src/library/scala/Function1.scala                  |    4 +-
 src/library/scala/Function10.scala                 |    2 +-
 src/library/scala/Function11.scala                 |    2 +-
 src/library/scala/Function12.scala                 |    2 +-
 src/library/scala/Function13.scala                 |    2 +-
 src/library/scala/Function14.scala                 |    2 +-
 src/library/scala/Function15.scala                 |    2 +-
 src/library/scala/Function16.scala                 |    2 +-
 src/library/scala/Function17.scala                 |    2 +-
 src/library/scala/Function18.scala                 |    2 +-
 src/library/scala/Function19.scala                 |    2 +-
 src/library/scala/Function2.scala                  |    2 +-
 src/library/scala/Function20.scala                 |    2 +-
 src/library/scala/Function21.scala                 |    2 +-
 src/library/scala/Function22.scala                 |    2 +-
 src/library/scala/Function3.scala                  |    2 +-
 src/library/scala/Function4.scala                  |    2 +-
 src/library/scala/Function5.scala                  |    2 +-
 src/library/scala/Function6.scala                  |    2 +-
 src/library/scala/Function7.scala                  |    2 +-
 src/library/scala/Function8.scala                  |    2 +-
 src/library/scala/Function9.scala                  |    2 +-
 src/library/scala/Int.scala                        |  342 +-
 src/library/scala/Long.scala                       |  342 +-
 src/library/scala/LowPriorityImplicits.scala       |   95 -
 src/library/scala/NotNull.scala                    |    2 +
 src/library/scala/Option.scala                     |   13 +-
 src/library/scala/PartialFunction.scala            |    2 +-
 src/library/scala/Predef.scala                     |  298 +-
 src/library/scala/Product.scala                    |    2 +-
 src/library/scala/Product1.scala                   |    2 +-
 src/library/scala/Product10.scala                  |    2 +-
 src/library/scala/Product11.scala                  |    2 +-
 src/library/scala/Product12.scala                  |    2 +-
 src/library/scala/Product13.scala                  |    2 +-
 src/library/scala/Product14.scala                  |    2 +-
 src/library/scala/Product15.scala                  |    2 +-
 src/library/scala/Product16.scala                  |    2 +-
 src/library/scala/Product17.scala                  |    2 +-
 src/library/scala/Product18.scala                  |    2 +-
 src/library/scala/Product19.scala                  |    2 +-
 src/library/scala/Product2.scala                   |    2 +-
 src/library/scala/Product20.scala                  |    2 +-
 src/library/scala/Product21.scala                  |    2 +-
 src/library/scala/Product22.scala                  |    2 +-
 src/library/scala/Product3.scala                   |    2 +-
 src/library/scala/Product4.scala                   |    2 +-
 src/library/scala/Product5.scala                   |    2 +-
 src/library/scala/Product6.scala                   |    2 +-
 src/library/scala/Product7.scala                   |    2 +-
 src/library/scala/Product8.scala                   |    2 +-
 src/library/scala/Product9.scala                   |    2 +-
 src/library/scala/Proxy.scala                      |    2 +-
 src/library/scala/Responder.scala                  |    2 +
 src/library/scala/ScalaObject.scala                |   16 -
 src/library/scala/SerialVersionUID.scala           |    2 +-
 src/library/scala/Short.scala                      |  342 +-
 src/library/scala/Specializable.scala              |    2 +-
 src/library/scala/SpecializableCompanion.scala     |   14 -
 src/library/scala/StringContext.scala              |   21 +-
 src/library/scala/Tuple1.scala                     |    3 +-
 src/library/scala/Tuple10.scala                    |    3 +-
 src/library/scala/Tuple11.scala                    |    3 +-
 src/library/scala/Tuple12.scala                    |    3 +-
 src/library/scala/Tuple13.scala                    |    3 +-
 src/library/scala/Tuple14.scala                    |    3 +-
 src/library/scala/Tuple15.scala                    |    3 +-
 src/library/scala/Tuple16.scala                    |    3 +-
 src/library/scala/Tuple17.scala                    |    3 +-
 src/library/scala/Tuple18.scala                    |    3 +-
 src/library/scala/Tuple19.scala                    |    3 +-
 src/library/scala/Tuple2.scala                     |    3 +-
 src/library/scala/Tuple20.scala                    |    3 +-
 src/library/scala/Tuple21.scala                    |    3 +-
 src/library/scala/Tuple22.scala                    |    3 +-
 src/library/scala/Tuple3.scala                     |    3 +-
 src/library/scala/Tuple4.scala                     |    3 +-
 src/library/scala/Tuple5.scala                     |    3 +-
 src/library/scala/Tuple6.scala                     |    3 +-
 src/library/scala/Tuple7.scala                     |    3 +-
 src/library/scala/Tuple8.scala                     |    3 +-
 src/library/scala/Tuple9.scala                     |    3 +-
 src/library/scala/UninitializedFieldError.scala    |    6 +-
 src/library/scala/Unit.scala                       |   11 +-
 src/library/scala/annotation/cloneable.scala       |   15 -
 src/library/scala/annotation/compileTimeOnly.scala |   22 +
 .../scala/annotation/implicitNotFound.scala        |    7 +-
 src/library/scala/annotation/migration.scala       |    7 +-
 src/library/scala/annotation/serializable.scala    |   15 -
 src/library/scala/annotation/target/package.scala  |   29 -
 src/library/scala/beans/ScalaBeanInfo.scala        |    6 +-
 src/library/scala/collection/BitSet.scala          |    3 +-
 src/library/scala/collection/BitSetLike.scala      |   47 +-
 .../scala/collection/BufferedIterator.scala        |    3 +-
 .../scala/collection/CustomParallelizable.scala    |    3 +-
 src/library/scala/collection/DefaultMap.scala      |   13 +-
 src/library/scala/collection/GenIterable.scala     |    3 +-
 src/library/scala/collection/GenIterableLike.scala |    5 +-
 src/library/scala/collection/GenIterableView.scala |   18 -
 .../scala/collection/GenIterableViewLike.scala     |   84 -
 src/library/scala/collection/GenMap.scala          |    7 +-
 src/library/scala/collection/GenMapLike.scala      |    3 +-
 src/library/scala/collection/GenSeq.scala          |    3 +-
 src/library/scala/collection/GenSeqLike.scala      |   32 +-
 src/library/scala/collection/GenSeqView.scala      |   18 -
 src/library/scala/collection/GenSeqViewLike.scala  |  164 -
 src/library/scala/collection/GenSet.scala          |    3 +-
 src/library/scala/collection/GenSetLike.scala      |    3 +-
 src/library/scala/collection/GenTraversable.scala  |   10 +-
 .../scala/collection/GenTraversableLike.scala      |   31 +-
 .../scala/collection/GenTraversableOnce.scala      |   62 +-
 .../scala/collection/GenTraversableView.scala      |   18 -
 .../scala/collection/GenTraversableViewLike.scala  |  139 -
 src/library/scala/collection/IndexedSeq.scala      |   10 +-
 src/library/scala/collection/IndexedSeqLike.scala  |    9 +-
 .../scala/collection/IndexedSeqOptimized.scala     |   20 +-
 src/library/scala/collection/Iterable.scala        |    6 +-
 src/library/scala/collection/IterableLike.scala    |   74 +-
 src/library/scala/collection/IterableProxy.scala   |    5 +-
 .../scala/collection/IterableProxyLike.scala       |    4 +-
 src/library/scala/collection/IterableView.scala    |    5 +-
 .../scala/collection/IterableViewLike.scala        |   74 +-
 src/library/scala/collection/Iterator.scala        |   79 +-
 src/library/scala/collection/JavaConversions.scala |   86 +-
 src/library/scala/collection/JavaConverters.scala  |   53 +-
 src/library/scala/collection/LinearSeq.scala       |    3 +-
 src/library/scala/collection/LinearSeqLike.scala   |   11 +-
 .../scala/collection/LinearSeqOptimized.scala      |   23 +-
 src/library/scala/collection/Map.scala             |    7 +-
 src/library/scala/collection/MapLike.scala         |    8 +-
 src/library/scala/collection/MapProxy.scala        |    3 +-
 src/library/scala/collection/MapProxyLike.scala    |    6 +-
 src/library/scala/collection/Parallel.scala        |    3 +-
 src/library/scala/collection/Parallelizable.scala  |    5 +-
 src/library/scala/collection/Searching.scala       |  118 +
 src/library/scala/collection/Seq.scala             |    5 +-
 src/library/scala/collection/SeqExtractors.scala   |    3 +-
 src/library/scala/collection/SeqLike.scala         |   77 +-
 src/library/scala/collection/SeqProxy.scala        |    4 +-
 src/library/scala/collection/SeqProxyLike.scala    |    6 +-
 src/library/scala/collection/SeqView.scala         |    5 +-
 src/library/scala/collection/SeqViewLike.scala     |  163 +-
 .../collection/Sequentializable.scala.disabled     |   10 -
 src/library/scala/collection/Set.scala             |    6 +-
 src/library/scala/collection/SetLike.scala         |   12 +-
 src/library/scala/collection/SetProxy.scala        |    3 +-
 src/library/scala/collection/SetProxyLike.scala    |    7 +-
 src/library/scala/collection/SortedMap.scala       |   15 +-
 src/library/scala/collection/SortedMapLike.scala   |   48 +-
 src/library/scala/collection/SortedSet.scala       |    3 +-
 src/library/scala/collection/SortedSetLike.scala   |   13 +-
 src/library/scala/collection/Traversable.scala     |    9 +-
 src/library/scala/collection/TraversableLike.scala |   32 +-
 src/library/scala/collection/TraversableOnce.scala |   92 +-
 .../scala/collection/TraversableProxy.scala        |    3 +-
 .../scala/collection/TraversableProxyLike.scala    |    4 +-
 src/library/scala/collection/TraversableView.scala |    6 +-
 .../scala/collection/TraversableViewLike.scala     |  139 +-
 .../scala/collection/concurrent/BasicNode.java     |   11 +-
 .../scala/collection/concurrent/CNodeBase.java     |   18 +-
 src/library/scala/collection/concurrent/Gen.java   |    9 +-
 .../scala/collection/concurrent/INodeBase.java     |   20 +-
 .../scala/collection/concurrent/MainNode.java      |   21 +-
 src/library/scala/collection/concurrent/Map.scala  |    3 +-
 .../scala/collection/concurrent/TrieMap.scala      |   29 +-
 .../scala/collection/convert/DecorateAsJava.scala  |   27 +-
 .../scala/collection/convert/DecorateAsScala.scala |   22 +-
 .../scala/collection/convert/Decorators.scala      |    5 +-
 .../scala/collection/convert/WrapAsJava.scala      |   34 +-
 .../scala/collection/convert/WrapAsScala.scala     |   47 +-
 .../scala/collection/convert/Wrappers.scala        |   79 +-
 src/library/scala/collection/convert/package.scala |    3 +-
 .../scala/collection/generic/BitOperations.scala   |    3 +-
 .../scala/collection/generic/BitSetFactory.scala   |    3 +-
 .../scala/collection/generic/CanBuildFrom.scala    |    3 +-
 .../scala/collection/generic/CanCombineFrom.scala  |    3 +-
 .../generic/ClassTagTraversableFactory.scala       |    3 +-
 .../scala/collection/generic/Clearable.scala       |    3 +-
 .../scala/collection/generic/FilterMonadic.scala   |    5 +-
 .../scala/collection/generic/GenMapFactory.scala   |    5 +-
 .../scala/collection/generic/GenSeqFactory.scala   |    3 +-
 .../scala/collection/generic/GenSetFactory.scala   |    3 +-
 .../collection/generic/GenTraversableFactory.scala |   22 +-
 .../generic/GenericClassTagCompanion.scala         |    7 +-
 .../GenericClassTagTraversableTemplate.scala       |    3 +-
 .../collection/generic/GenericCompanion.scala      |    7 +-
 .../generic/GenericOrderedCompanion.scala          |    7 +-
 .../GenericOrderedTraversableTemplate.scala        |    3 +-
 .../collection/generic/GenericParCompanion.scala   |    4 +-
 .../collection/generic/GenericParTemplate.scala    |    4 +-
 .../collection/generic/GenericSeqCompanion.scala   |    4 +-
 .../collection/generic/GenericSetTemplate.scala    |    3 +-
 .../generic/GenericTraversableTemplate.scala       |   67 +-
 .../scala/collection/generic/Growable.scala        |   20 +-
 .../scala/collection/generic/HasNewBuilder.scala   |    3 +-
 .../scala/collection/generic/HasNewCombiner.scala  |    4 +-
 .../collection/generic/ImmutableMapFactory.scala   |    3 +-
 .../collection/generic/ImmutableSetFactory.scala   |    6 +-
 .../generic/ImmutableSortedMapFactory.scala        |    3 +-
 .../generic/ImmutableSortedSetFactory.scala        |    3 +-
 .../collection/generic/IndexedSeqFactory.scala     |   22 +
 .../scala/collection/generic/IsSeqLike.scala       |   58 +
 .../collection/generic/IsTraversableLike.scala     |    3 +-
 .../collection/generic/IsTraversableOnce.scala     |    3 +-
 .../collection/generic/IterableForwarder.scala     |    8 +-
 .../scala/collection/generic/MapFactory.scala      |    3 +-
 .../collection/generic/MutableMapFactory.scala     |    3 +-
 .../collection/generic/MutableSetFactory.scala     |    3 +-
 .../generic/MutableSortedSetFactory.scala          |    3 +-
 .../generic/OrderedTraversableFactory.scala        |    3 +-
 .../scala/collection/generic/ParFactory.scala      |    9 +-
 .../scala/collection/generic/ParMapFactory.scala   |    4 +-
 .../scala/collection/generic/ParSetFactory.scala   |    4 +-
 .../scala/collection/generic/SeqFactory.scala      |    3 +-
 .../scala/collection/generic/SeqForwarder.scala    |    7 +-
 .../scala/collection/generic/SetFactory.scala      |    3 +-
 .../scala/collection/generic/Shrinkable.scala      |   10 +-
 .../scala/collection/generic/Signalling.scala      |   33 +-
 src/library/scala/collection/generic/Sizing.scala  |    4 +-
 .../scala/collection/generic/SliceInterval.scala   |    3 +-
 src/library/scala/collection/generic/Sorted.scala  |   38 +-
 .../collection/generic/SortedMapFactory.scala      |    5 +-
 .../collection/generic/SortedSetFactory.scala      |    7 +-
 .../scala/collection/generic/Subtractable.scala    |    3 +-
 .../collection/generic/TraversableFactory.scala    |    3 +-
 .../collection/generic/TraversableForwarder.scala  |    5 +-
 src/library/scala/collection/generic/package.scala |    3 +-
 .../scala/collection/immutable/BitSet.scala        |   12 +-
 .../scala/collection/immutable/DefaultMap.scala    |   23 +-
 .../immutable/GenIterable.scala.disabled           |   37 -
 .../collection/immutable/GenMap.scala.disabled     |   36 -
 .../collection/immutable/GenSeq.scala.disabled     |   49 -
 .../collection/immutable/GenSet.scala.disabled     |   43 -
 .../immutable/GenTraversable.scala.disabled        |   41 -
 .../scala/collection/immutable/HashMap.scala       |  208 +-
 .../scala/collection/immutable/HashSet.scala       |  793 +++-
 .../scala/collection/immutable/IndexedSeq.scala    |   14 +-
 .../scala/collection/immutable/IntMap.scala        |   11 +-
 .../scala/collection/immutable/Iterable.scala      |    3 +-
 .../scala/collection/immutable/LinearSeq.scala     |    3 +-
 src/library/scala/collection/immutable/List.scala  |  432 +-
 .../scala/collection/immutable/ListMap.scala       |   63 +-
 .../scala/collection/immutable/ListSet.scala       |   17 +-
 .../scala/collection/immutable/LongMap.scala       |   24 +-
 src/library/scala/collection/immutable/Map.scala   |   13 +-
 .../scala/collection/immutable/MapLike.scala       |    9 +-
 .../scala/collection/immutable/MapProxy.scala      |    4 +-
 .../scala/collection/immutable/NumericRange.scala  |  159 +-
 .../scala/collection/immutable/PagedSeq.scala      |   13 +-
 src/library/scala/collection/immutable/Queue.scala |   21 +-
 src/library/scala/collection/immutable/Range.scala |  218 +-
 .../scala/collection/immutable/RedBlack.scala      |  293 --
 .../scala/collection/immutable/RedBlackTree.scala  |  215 +-
 src/library/scala/collection/immutable/Seq.scala   |    3 +-
 src/library/scala/collection/immutable/Set.scala   |   61 +-
 .../scala/collection/immutable/SetProxy.scala      |    4 +-
 .../scala/collection/immutable/SortedMap.scala     |   13 +-
 .../scala/collection/immutable/SortedSet.scala     |    3 +-
 src/library/scala/collection/immutable/Stack.scala |    4 +-
 .../scala/collection/immutable/Stream.scala        |   86 +-
 .../scala/collection/immutable/StreamView.scala    |    3 +-
 .../collection/immutable/StreamViewLike.scala      |    9 +-
 .../scala/collection/immutable/StringLike.scala    |   43 +-
 .../scala/collection/immutable/StringOps.scala     |    5 +-
 .../scala/collection/immutable/Traversable.scala   |    3 +-
 .../scala/collection/immutable/TreeMap.scala       |   23 +-
 .../scala/collection/immutable/TreeSet.scala       |   17 +-
 .../scala/collection/immutable/TrieIterator.scala  |    8 +-
 .../scala/collection/immutable/Vector.scala        |   89 +-
 .../scala/collection/immutable/WrappedString.scala |    4 +-
 .../scala/collection/immutable/package.scala       |   93 -
 src/library/scala/collection/mutable/AVLTree.scala |   20 +-
 .../scala/collection/mutable/AnyRefMap.scala       |  459 ++
 .../scala/collection/mutable/ArrayBuffer.scala     |    3 +-
 .../scala/collection/mutable/ArrayBuilder.scala    |   15 +-
 .../scala/collection/mutable/ArrayLike.scala       |   11 +-
 .../scala/collection/mutable/ArrayOps.scala        |   80 +-
 .../scala/collection/mutable/ArraySeq.scala        |    5 +-
 .../scala/collection/mutable/ArrayStack.scala      |    6 +-
 src/library/scala/collection/mutable/BitSet.scala  |   69 +-
 src/library/scala/collection/mutable/Buffer.scala  |    5 +-
 .../scala/collection/mutable/BufferLike.scala      |   14 +-
 .../scala/collection/mutable/BufferProxy.scala     |   10 +-
 src/library/scala/collection/mutable/Builder.scala |    2 +-
 .../scala/collection/mutable/Cloneable.scala       |    3 +-
 .../scala/collection/mutable/ConcurrentMap.scala   |   90 -
 .../scala/collection/mutable/DefaultEntry.scala    |    3 +-
 .../scala/collection/mutable/DefaultMapModel.scala |    4 +-
 .../collection/mutable/DoubleLinkedList.scala      |    7 +-
 .../collection/mutable/DoubleLinkedListLike.scala  |    4 +-
 .../scala/collection/mutable/FlatHashTable.scala   |  136 +-
 .../collection/mutable/GenIterable.scala.disabled  |   37 -
 .../scala/collection/mutable/GenMap.scala.disabled |   40 -
 .../scala/collection/mutable/GenSeq.scala.disabled |   44 -
 .../scala/collection/mutable/GenSet.scala.disabled |   46 -
 .../mutable/GenTraversable.scala.disabled          |   38 -
 .../scala/collection/mutable/GrowingBuilder.scala  |    3 +-
 .../scala/collection/mutable/HashEntry.scala       |    3 +-
 src/library/scala/collection/mutable/HashMap.scala |   15 +-
 src/library/scala/collection/mutable/HashSet.scala |   23 +-
 .../scala/collection/mutable/HashTable.scala       |   33 +-
 src/library/scala/collection/mutable/History.scala |   11 +-
 .../collection/mutable/ImmutableMapAdaptor.scala   |    4 +-
 .../collection/mutable/ImmutableSetAdaptor.scala   |    9 +-
 .../scala/collection/mutable/IndexedSeq.scala      |    3 +-
 .../scala/collection/mutable/IndexedSeqLike.scala  |    6 +-
 .../collection/mutable/IndexedSeqOptimized.scala   |    6 +-
 .../scala/collection/mutable/IndexedSeqView.scala  |    6 +-
 .../scala/collection/mutable/Iterable.scala        |    5 +-
 .../scala/collection/mutable/LazyBuilder.scala     |    3 +-
 .../scala/collection/mutable/LinearSeq.scala       |    3 +-
 .../scala/collection/mutable/LinkedEntry.scala     |    3 +-
 .../scala/collection/mutable/LinkedHashMap.scala   |   24 +-
 .../scala/collection/mutable/LinkedHashSet.scala   |   10 +-
 .../scala/collection/mutable/LinkedList.scala      |    5 +-
 .../scala/collection/mutable/LinkedListLike.scala  |    9 +-
 .../scala/collection/mutable/ListBuffer.scala      |   87 +-
 src/library/scala/collection/mutable/ListMap.scala |    9 +-
 src/library/scala/collection/mutable/LongMap.scala |  569 +++
 src/library/scala/collection/mutable/Map.scala     |   19 +-
 .../scala/collection/mutable/MapBuilder.scala      |    3 +-
 src/library/scala/collection/mutable/MapLike.scala |    9 +-
 .../scala/collection/mutable/MapProxy.scala        |    4 +-
 .../scala/collection/mutable/MultiMap.scala        |    3 +-
 .../scala/collection/mutable/MutableList.scala     |   12 +-
 .../collection/mutable/ObservableBuffer.scala      |    6 +-
 .../scala/collection/mutable/ObservableMap.scala   |    6 +-
 .../scala/collection/mutable/ObservableSet.scala   |    6 +-
 .../scala/collection/mutable/OpenHashMap.scala     |   46 +-
 .../scala/collection/mutable/PriorityQueue.scala   |   20 +-
 .../collection/mutable/PriorityQueueProxy.scala    |   16 +-
 .../scala/collection/mutable/Publisher.scala       |    5 +-
 src/library/scala/collection/mutable/Queue.scala   |   13 +-
 .../scala/collection/mutable/QueueProxy.scala      |    8 +-
 .../scala/collection/mutable/ResizableArray.scala  |   22 +-
 .../collection/mutable/RevertibleHistory.scala     |    7 +-
 src/library/scala/collection/mutable/Seq.scala     |    5 +-
 src/library/scala/collection/mutable/SeqLike.scala |    4 +-
 src/library/scala/collection/mutable/Set.scala     |    5 +-
 .../scala/collection/mutable/SetBuilder.scala      |    6 +-
 src/library/scala/collection/mutable/SetLike.scala |   20 +-
 .../scala/collection/mutable/SetProxy.scala        |    4 +-
 .../scala/collection/mutable/SortedSet.scala       |    3 +-
 src/library/scala/collection/mutable/Stack.scala   |    5 +-
 .../scala/collection/mutable/StackProxy.scala      |   10 +-
 .../scala/collection/mutable/StringBuilder.scala   |   11 +-
 .../scala/collection/mutable/Subscriber.scala      |    3 +-
 .../collection/mutable/SynchronizedBuffer.scala    |    7 +-
 .../scala/collection/mutable/SynchronizedMap.scala |    6 +-
 .../mutable/SynchronizedPriorityQueue.scala        |   16 +-
 .../collection/mutable/SynchronizedQueue.scala     |   10 +-
 .../scala/collection/mutable/SynchronizedSet.scala |    9 +-
 .../collection/mutable/SynchronizedStack.scala     |    8 +-
 .../scala/collection/mutable/Traversable.scala     |    3 +-
 src/library/scala/collection/mutable/TreeSet.scala |  113 +-
 .../scala/collection/mutable/Undoable.scala        |    3 +-
 .../scala/collection/mutable/UnrolledBuffer.scala  |   23 +-
 .../scala/collection/mutable/WeakHashMap.scala     |    3 +-
 .../scala/collection/mutable/WrappedArray.scala    |    3 +-
 .../collection/mutable/WrappedArrayBuilder.scala   |    4 +-
 .../scala/collection/parallel/Combiner.scala       |   30 +-
 .../scala/collection/parallel/ParIterable.scala    |    4 +-
 .../collection/parallel/ParIterableLike.scala      |  142 +-
 .../collection/parallel/ParIterableView.scala      |   46 -
 .../collection/parallel/ParIterableViewLike.scala  |  203 -
 src/library/scala/collection/parallel/ParMap.scala |    3 +-
 .../scala/collection/parallel/ParMapLike.scala     |   27 +-
 src/library/scala/collection/parallel/ParSeq.scala |   35 +-
 .../scala/collection/parallel/ParSeqLike.scala     |   29 +-
 .../scala/collection/parallel/ParSeqView.scala     |   46 -
 .../scala/collection/parallel/ParSeqViewLike.scala |  188 -
 src/library/scala/collection/parallel/ParSet.scala |   56 +-
 .../scala/collection/parallel/ParSetLike.scala     |   36 +-
 .../collection/parallel/PreciseSplitter.scala      |   11 +-
 .../collection/parallel/RemainsIterator.scala      |  135 +-
 .../scala/collection/parallel/Splitter.scala       |    5 +-
 .../scala/collection/parallel/TaskSupport.scala    |   53 +-
 src/library/scala/collection/parallel/Tasks.scala  |  220 +-
 .../collection/parallel/immutable/ParHashMap.scala |   31 +-
 .../collection/parallel/immutable/ParHashSet.scala |    5 +-
 .../parallel/immutable/ParIterable.scala           |   30 +-
 .../collection/parallel/immutable/ParMap.scala     |    5 +-
 .../immutable/ParNumericRange.scala.disabled       |  128 -
 .../collection/parallel/immutable/ParRange.scala   |   10 +-
 .../collection/parallel/immutable/ParSeq.scala     |   13 +-
 .../collection/parallel/immutable/ParSet.scala     |    4 +-
 .../collection/parallel/immutable/ParVector.scala  |   21 +-
 .../collection/parallel/immutable/package.scala    |    3 +-
 .../collection/parallel/mutable/LazyCombiner.scala |    4 +-
 .../collection/parallel/mutable/ParArray.scala     |   61 +-
 .../parallel/mutable/ParFlatHashTable.scala        |   23 +-
 .../collection/parallel/mutable/ParHashMap.scala   |   55 +-
 .../collection/parallel/mutable/ParHashSet.scala   |   80 +-
 .../collection/parallel/mutable/ParHashTable.scala |   18 +-
 .../collection/parallel/mutable/ParIterable.scala  |   30 +-
 .../scala/collection/parallel/mutable/ParMap.scala |   42 +-
 .../collection/parallel/mutable/ParMapLike.scala   |   10 +-
 .../scala/collection/parallel/mutable/ParSeq.scala |   26 +-
 .../scala/collection/parallel/mutable/ParSet.scala |   12 +-
 .../collection/parallel/mutable/ParSetLike.scala   |   50 +-
 .../collection/parallel/mutable/ParTrieMap.scala   |   25 +-
 .../mutable/ResizableParArrayCombiner.scala        |    5 +-
 .../mutable/UnrolledParArrayCombiner.scala         |    9 +-
 .../collection/parallel/mutable/package.scala      |    3 +-
 .../scala/collection/parallel/package.scala        |   83 +-
 src/library/scala/collection/script/Location.scala |   12 +-
 src/library/scala/collection/script/Message.scala  |   11 +-
 .../scala/collection/script/Scriptable.scala       |    4 +-
 src/library/scala/compat/Platform.scala            |    5 +-
 src/library/scala/concurrent/Awaitable.scala       |   12 +-
 .../scala/concurrent/BatchingExecutor.scala        |  117 +
 .../scala/concurrent/ExecutionContext.scala        |   61 +-
 src/library/scala/concurrent/Future.scala          |  195 +-
 .../scala/concurrent/FutureTaskRunner.scala        |    6 +-
 src/library/scala/concurrent/JavaConversions.scala |   32 -
 src/library/scala/concurrent/Lock.scala            |    1 +
 src/library/scala/concurrent/ManagedBlocker.scala  |    2 +-
 src/library/scala/concurrent/Promise.scala         |   42 +-
 src/library/scala/concurrent/SyncVar.scala         |   24 +-
 src/library/scala/concurrent/TaskRunner.scala      |    3 +-
 src/library/scala/concurrent/TaskRunners.scala     |   36 -
 .../scala/concurrent/ThreadPoolRunner.scala        |    2 +-
 src/library/scala/concurrent/ThreadRunner.scala    |   60 -
 .../scala/concurrent/duration/Duration.scala       |   45 +-
 .../scala/concurrent/duration/package.scala        |   26 +-
 .../concurrent/impl/ExecutionContextImpl.scala     |   57 +-
 src/library/scala/concurrent/impl/Future.scala     |    2 +-
 src/library/scala/concurrent/impl/Promise.scala    |   12 +-
 src/library/scala/concurrent/ops.scala             |   73 -
 src/library/scala/concurrent/package.scala         |   30 +-
 src/library/scala/deprecatedInheritance.scala      |    3 +-
 src/library/scala/io/AnsiColor.scala               |   53 +
 src/library/scala/io/BufferedSource.scala          |   46 +-
 src/library/scala/io/BytePickle.scala              |  318 --
 src/library/scala/io/Codec.scala                   |   59 +-
 src/library/scala/io/Position.scala                |   15 +-
 src/library/scala/io/Source.scala                  |   15 +-
 src/library/scala/io/StdIn.scala                   |  229 +
 src/library/scala/io/UTF8Codec.scala               |   32 -
 src/library/scala/math/BigDecimal.scala            |  501 +-
 src/library/scala/math/BigInt.scala                |   25 +-
 src/library/scala/math/Equiv.scala                 |    3 +-
 src/library/scala/math/Fractional.scala            |    3 +-
 src/library/scala/math/Integral.scala              |    3 +-
 src/library/scala/math/Numeric.scala               |   25 +-
 src/library/scala/math/Ordered.scala               |    3 +-
 src/library/scala/math/Ordering.scala              |    8 +-
 src/library/scala/math/PartialOrdering.scala       |    3 +-
 src/library/scala/math/PartiallyOrdered.scala      |    3 +-
 src/library/scala/math/ScalaNumber.java            |    2 -
 .../scala/math/ScalaNumericConversions.scala       |   20 +-
 src/library/scala/math/package.scala               |   52 +-
 src/library/scala/package.scala                    |   13 +-
 src/library/scala/parallel/Future.scala            |   39 -
 src/library/scala/parallel/package.scala.disabled  |  178 -
 src/library/scala/ref/SoftReference.scala          |    3 +-
 src/library/scala/ref/WeakReference.scala          |    4 +-
 .../reflect/ClassManifestDeprecatedApis.scala      |   26 +-
 src/library/scala/reflect/ClassTag.scala           |    2 +-
 src/library/scala/reflect/Manifest.scala           |   15 +-
 src/library/scala/reflect/NameTransformer.scala    |   21 +-
 src/library/scala/reflect/NoManifest.scala         |    3 +-
 src/library/scala/reflect/OptManifest.scala        |    3 +-
 src/library/scala/reflect/package.scala            |   31 +-
 src/library/scala/runtime/AbstractFunction0.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction1.scala  |    4 +-
 src/library/scala/runtime/AbstractFunction10.scala |    2 +-
 src/library/scala/runtime/AbstractFunction11.scala |    2 +-
 src/library/scala/runtime/AbstractFunction12.scala |    2 +-
 src/library/scala/runtime/AbstractFunction13.scala |    2 +-
 src/library/scala/runtime/AbstractFunction14.scala |    2 +-
 src/library/scala/runtime/AbstractFunction15.scala |    2 +-
 src/library/scala/runtime/AbstractFunction16.scala |    2 +-
 src/library/scala/runtime/AbstractFunction17.scala |    2 +-
 src/library/scala/runtime/AbstractFunction18.scala |    2 +-
 src/library/scala/runtime/AbstractFunction19.scala |    2 +-
 src/library/scala/runtime/AbstractFunction2.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction20.scala |    2 +-
 src/library/scala/runtime/AbstractFunction21.scala |    2 +-
 src/library/scala/runtime/AbstractFunction22.scala |    2 +-
 src/library/scala/runtime/AbstractFunction3.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction4.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction5.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction6.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction7.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction8.scala  |    2 +-
 src/library/scala/runtime/AbstractFunction9.scala  |    2 +-
 .../scala/runtime/AbstractPartialFunction.scala    |    7 +-
 src/library/scala/runtime/BooleanRef.java          |    3 +
 src/library/scala/runtime/Boxed.scala              |   11 +-
 src/library/scala/runtime/BoxesRunTime.java        |    1 -
 src/library/scala/runtime/ByteRef.java             |    3 +
 src/library/scala/runtime/CharRef.java             |    3 +
 src/library/scala/runtime/DoubleRef.java           |    3 +
 src/library/scala/runtime/FloatRef.java            |    3 +
 src/library/scala/runtime/IntRef.java              |    3 +
 src/library/scala/runtime/LongRef.java             |    3 +
 src/library/scala/runtime/MethodCache.scala        |   12 +-
 .../scala/runtime/NonLocalReturnControl.scala      |    3 +-
 src/library/scala/runtime/Nothing$.scala           |    4 +-
 src/library/scala/runtime/Null$.scala              |    8 +-
 src/library/scala/runtime/ObjectRef.java           |    4 +
 src/library/scala/runtime/RichBoolean.scala        |    4 +-
 src/library/scala/runtime/RichByte.scala           |   18 +-
 src/library/scala/runtime/RichChar.scala           |   18 +-
 src/library/scala/runtime/RichDouble.scala         |   47 +-
 src/library/scala/runtime/RichException.scala      |    4 +-
 src/library/scala/runtime/RichFloat.scala          |   55 +-
 src/library/scala/runtime/RichInt.scala            |   53 +-
 src/library/scala/runtime/RichLong.scala           |   33 +-
 src/library/scala/runtime/RichShort.scala          |   18 +-
 src/library/scala/runtime/ScalaNumberProxy.scala   |   11 +-
 src/library/scala/runtime/ScalaRunTime.scala       |   65 +-
 src/library/scala/runtime/SeqCharSequence.scala    |    3 +
 src/library/scala/runtime/ShortRef.java            |    3 +
 src/library/scala/runtime/StringAdd.scala          |    5 +-
 src/library/scala/runtime/StringFormat.scala       |    5 +-
 src/library/scala/runtime/Tuple2Zipped.scala       |   30 +-
 src/library/scala/runtime/Tuple3Zipped.scala       |   30 +-
 src/library/scala/runtime/VolatileBooleanRef.java  |    3 +
 src/library/scala/runtime/VolatileByteRef.java     |    3 +
 src/library/scala/runtime/VolatileCharRef.java     |    3 +
 src/library/scala/runtime/VolatileDoubleRef.java   |    3 +
 src/library/scala/runtime/VolatileFloatRef.java    |    3 +
 src/library/scala/runtime/VolatileIntRef.java      |    3 +
 src/library/scala/runtime/VolatileLongRef.java     |    3 +
 src/library/scala/runtime/VolatileObjectRef.java   |    4 +
 src/library/scala/runtime/VolatileShortRef.java    |    3 +
 src/library/scala/runtime/WorksheetSupport.scala   |   94 -
 src/library/scala/sys/BooleanProp.scala            |    3 +-
 src/library/scala/sys/PropImpl.scala               |    3 +-
 src/library/scala/sys/ShutdownHookThread.scala     |    3 +-
 src/library/scala/sys/SystemProperties.scala       |    4 +-
 src/library/scala/sys/process/BasicIO.scala        |   33 +-
 src/library/scala/sys/process/Process.scala        |   20 +-
 src/library/scala/sys/process/ProcessBuilder.scala |   93 +-
 .../scala/sys/process/ProcessBuilderImpl.scala     |   35 +-
 src/library/scala/sys/process/ProcessIO.scala      |    3 +-
 src/library/scala/sys/process/ProcessImpl.scala    |   19 +-
 src/library/scala/sys/process/ProcessLogger.scala  |    3 +-
 src/library/scala/sys/process/package.scala        |    9 +-
 src/library/scala/testing/Benchmark.scala          |  114 -
 src/library/scala/testing/Show.scala               |   75 -
 src/library/scala/text/Document.scala              |   10 +-
 src/library/scala/throws.scala                     |    2 +-
 src/library/scala/transient.scala                  |    2 -
 src/library/scala/util/DynamicVariable.scala       |    3 +-
 src/library/scala/util/Either.scala                |   11 +-
 src/library/scala/util/Marshal.scala               |   50 -
 src/library/scala/util/MurmurHash.scala            |    8 +-
 src/library/scala/util/Properties.scala            |   19 +-
 src/library/scala/util/Random.scala                |    7 +-
 src/library/scala/util/Sorting.scala               |    4 +-
 src/library/scala/util/Try.scala                   |   34 +-
 .../scala/util/automata/BaseBerrySethi.scala       |   98 -
 src/library/scala/util/automata/DetWordAutom.scala |   49 -
 src/library/scala/util/automata/Inclusion.scala    |   69 -
 .../scala/util/automata/NondetWordAutom.scala      |   59 -
 .../scala/util/automata/SubsetConstruction.scala   |  107 -
 .../scala/util/automata/WordBerrySethi.scala       |  164 -
 src/library/scala/util/control/Breaks.scala        |    3 +-
 .../scala/util/control/ControlThrowable.scala      |    3 +-
 src/library/scala/util/control/Exception.scala     |    3 +-
 src/library/scala/util/control/NonFatal.scala      |   11 +-
 src/library/scala/util/control/TailCalls.scala     |   66 +-
 src/library/scala/util/grammar/HedgeRHS.scala      |   26 -
 src/library/scala/util/grammar/TreeRHS.scala       |   22 -
 .../scala/util/hashing/ByteswapHashing.scala       |   13 +-
 src/library/scala/util/hashing/Hashing.scala       |    3 +-
 src/library/scala/util/hashing/MurmurHash3.scala   |   13 +-
 src/library/scala/util/hashing/package.scala       |    9 +-
 src/library/scala/util/logging/ConsoleLogger.scala |   26 -
 src/library/scala/util/logging/Logged.scala        |   33 -
 src/library/scala/util/matching/Regex.scala        |  201 +-
 .../scala/util/parsing/ast/AbstractSyntax.scala    |   32 -
 src/library/scala/util/parsing/ast/Binders.scala   |  347 --
 .../parsing/combinator/ImplicitConversions.scala   |   42 -
 .../util/parsing/combinator/JavaTokenParsers.scala |   61 -
 .../util/parsing/combinator/PackratParsers.scala   |  312 --
 .../scala/util/parsing/combinator/Parsers.scala    |  928 ----
 .../util/parsing/combinator/RegexParsers.scala     |  165 -
 .../util/parsing/combinator/lexical/Lexical.scala  |   39 -
 .../util/parsing/combinator/lexical/Scanners.scala |   65 -
 .../parsing/combinator/lexical/StdLexical.scala    |   86 -
 .../syntactical/StandardTokenParsers.scala         |   31 -
 .../combinator/syntactical/StdTokenParsers.scala   |   51 -
 .../combinator/syntactical/TokenParsers.scala      |   34 -
 .../parsing/combinator/testing/RegexTest.scala     |   27 -
 .../util/parsing/combinator/testing/Tester.scala   |   45 -
 .../util/parsing/combinator/token/StdTokens.scala  |   38 -
 .../util/parsing/combinator/token/Tokens.scala     |   42 -
 .../scala/util/parsing/input/CharArrayReader.scala |   34 -
 .../util/parsing/input/CharSequenceReader.scala    |   65 -
 .../scala/util/parsing/input/NoPosition.scala      |   24 -
 .../scala/util/parsing/input/OffsetPosition.scala  |   72 -
 .../scala/util/parsing/input/PagedSeqReader.scala  |   70 -
 .../scala/util/parsing/input/Position.scala        |   61 -
 .../scala/util/parsing/input/Positional.scala      |   29 -
 src/library/scala/util/parsing/input/Reader.scala  |   61 -
 .../scala/util/parsing/input/StreamReader.scala    |   75 -
 src/library/scala/util/parsing/json/JSON.scala     |   98 -
 src/library/scala/util/parsing/json/Lexer.scala    |   89 -
 src/library/scala/util/parsing/json/Parser.scala   |  142 -
 src/library/scala/util/regexp/Base.scala           |   66 -
 .../scala/util/regexp/PointedHedgeExp.scala        |   36 -
 src/library/scala/util/regexp/SyntaxError.scala    |   20 -
 src/library/scala/util/regexp/WordExp.scala        |   58 -
 src/library/scala/volatile.scala                   |    2 -
 src/library/scala/xml/Atom.scala                   |   46 -
 src/library/scala/xml/Attribute.scala              |  100 -
 src/library/scala/xml/Comment.scala                |   30 -
 src/library/scala/xml/Document.scala               |   91 -
 src/library/scala/xml/Elem.scala                   |  111 -
 src/library/scala/xml/EntityRef.scala              |   39 -
 src/library/scala/xml/Equality.scala               |  106 -
 src/library/scala/xml/Group.scala                  |   41 -
 .../scala/xml/MalformedAttributeException.scala    |   14 -
 src/library/scala/xml/MetaData.scala               |  216 -
 src/library/scala/xml/NamespaceBinding.scala       |   64 -
 src/library/scala/xml/Node.scala                   |  197 -
 src/library/scala/xml/NodeBuffer.scala             |   46 -
 src/library/scala/xml/NodeSeq.scala                |  151 -
 src/library/scala/xml/Null.scala                   |   61 -
 src/library/scala/xml/PCData.scala                 |   43 -
 src/library/scala/xml/PrefixedAttribute.scala      |   60 -
 src/library/scala/xml/PrettyPrinter.scala          |  263 --
 src/library/scala/xml/ProcInstr.scala              |   38 -
 src/library/scala/xml/QNode.scala                  |   19 -
 src/library/scala/xml/SpecialNode.scala            |   32 -
 src/library/scala/xml/Text.scala                   |   38 -
 src/library/scala/xml/TextBuffer.scala             |   45 -
 src/library/scala/xml/TopScope.scala               |   30 -
 src/library/scala/xml/TypeSymbol.scala             |   14 -
 src/library/scala/xml/Unparsed.scala               |   35 -
 src/library/scala/xml/UnprefixedAttribute.scala    |   60 -
 src/library/scala/xml/Utility.scala                |  409 --
 src/library/scala/xml/XML.scala                    |  110 -
 src/library/scala/xml/Xhtml.scala                  |   96 -
 src/library/scala/xml/dtd/ContentModel.scala       |  120 -
 src/library/scala/xml/dtd/ContentModelParser.scala |  128 -
 src/library/scala/xml/dtd/DTD.scala                |   34 -
 src/library/scala/xml/dtd/Decl.scala               |  156 -
 src/library/scala/xml/dtd/DocType.scala            |   34 -
 src/library/scala/xml/dtd/ElementValidator.scala   |  129 -
 src/library/scala/xml/dtd/ExternalID.scala         |   75 -
 src/library/scala/xml/dtd/Scanner.scala            |   78 -
 src/library/scala/xml/dtd/Tokens.scala             |   44 -
 .../scala/xml/dtd/ValidationException.scala        |   43 -
 src/library/scala/xml/factory/Binder.scala         |   60 -
 .../scala/xml/factory/LoggedNodeFactory.scala      |   85 -
 src/library/scala/xml/factory/NodeFactory.scala    |   60 -
 src/library/scala/xml/factory/XMLLoader.scala      |   60 -
 .../xml/include/CircularIncludeException.scala     |   24 -
 .../xml/include/UnavailableResourceException.scala |   19 -
 .../scala/xml/include/XIncludeException.scala      |   57 -
 .../scala/xml/include/sax/EncodingHeuristics.scala |   99 -
 src/library/scala/xml/include/sax/Main.scala       |   82 -
 .../scala/xml/include/sax/XIncludeFilter.scala     |  372 --
 src/library/scala/xml/include/sax/XIncluder.scala  |  188 -
 src/library/scala/xml/package.scala                |   19 -
 .../scala/xml/parsing/ConstructingHandler.scala    |   33 -
 .../scala/xml/parsing/ConstructingParser.scala     |   54 -
 .../scala/xml/parsing/DefaultMarkupHandler.scala   |   29 -
 .../scala/xml/parsing/ExternalSources.scala        |   37 -
 src/library/scala/xml/parsing/FactoryAdapter.scala |  186 -
 src/library/scala/xml/parsing/FatalError.scala     |   16 -
 src/library/scala/xml/parsing/MarkupHandler.scala  |  124 -
 src/library/scala/xml/parsing/MarkupParser.scala   |  941 ----
 .../scala/xml/parsing/MarkupParserCommon.scala     |  260 --
 .../xml/parsing/NoBindingFactoryAdapter.scala      |   36 -
 src/library/scala/xml/parsing/TokenTests.scala     |  100 -
 .../xml/parsing/ValidatingMarkupHandler.scala      |  119 -
 src/library/scala/xml/parsing/XhtmlEntities.scala  |   53 -
 src/library/scala/xml/parsing/XhtmlParser.scala    |   30 -
 .../scala/xml/persistent/CachedFileStorage.scala   |  125 -
 src/library/scala/xml/persistent/Index.scala       |   16 -
 src/library/scala/xml/persistent/SetStorage.scala  |   43 -
 src/library/scala/xml/pull/XMLEvent.scala          |   59 -
 src/library/scala/xml/pull/XMLEventReader.scala    |  156 -
 src/library/scala/xml/pull/package.scala           |   41 -
 .../scala/xml/transform/BasicTransformer.scala     |   59 -
 src/library/scala/xml/transform/RewriteRule.scala  |   27 -
 .../scala/xml/transform/RuleTransformer.scala      |   17 -
 src/manual/scala/man1/scala.scala                  |   14 +-
 src/manual/scala/man1/scalac.scala                 |  210 +-
 src/manual/scala/tools/docutil/EmitManPage.scala   |    2 +-
 src/manual/scala/tools/docutil/ManPage.scala       |    2 +
 .../scala/tools/docutil/resources/index.html       |    4 +-
 src/msil/ch/epfl/lamp/compiler/msil/Assembly.java  |  253 --
 .../ch/epfl/lamp/compiler/msil/AssemblyName.java   |   96 -
 src/msil/ch/epfl/lamp/compiler/msil/Attribute.java |  654 ---
 .../ch/epfl/lamp/compiler/msil/BindingFlags.java   |  169 -
 .../lamp/compiler/msil/CallingConventions.java     |   75 -
 .../epfl/lamp/compiler/msil/ConstructedType.java   |   48 -
 .../epfl/lamp/compiler/msil/ConstructorInfo.java   |   54 -
 .../compiler/msil/CustomAttributeProvider.java     |   82 -
 .../ch/epfl/lamp/compiler/msil/CustomModifier.java |   45 -
 .../epfl/lamp/compiler/msil/EventAttributes.java   |   32 -
 src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java |   58 -
 .../epfl/lamp/compiler/msil/FieldAttributes.java   |  119 -
 src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java |  141 -
 .../compiler/msil/GenericParamAndConstraints.java  |   40 -
 .../lamp/compiler/msil/HasCustomModifiers.java     |    9 -
 .../compiler/msil/ICustomAttributeProvider.java    |   57 -
 .../ch/epfl/lamp/compiler/msil/MemberInfo.java     |   47 -
 .../ch/epfl/lamp/compiler/msil/MemberTypes.java    |   81 -
 .../epfl/lamp/compiler/msil/MethodAttributes.java  |  158 -
 .../ch/epfl/lamp/compiler/msil/MethodBase.java     |  198 -
 .../lamp/compiler/msil/MethodImplAttributes.java   |  116 -
 .../ch/epfl/lamp/compiler/msil/MethodInfo.java     |   69 -
 src/msil/ch/epfl/lamp/compiler/msil/Module.java    |  155 -
 .../ch/epfl/lamp/compiler/msil/PEAssembly.java     |   69 -
 src/msil/ch/epfl/lamp/compiler/msil/PEFile.java    |  941 ----
 src/msil/ch/epfl/lamp/compiler/msil/PEModule.java  |  456 --
 src/msil/ch/epfl/lamp/compiler/msil/PEType.java    |  419 --
 .../lamp/compiler/msil/ParameterAttributes.java    |   72 -
 .../ch/epfl/lamp/compiler/msil/ParameterInfo.java  |   76 -
 .../ch/epfl/lamp/compiler/msil/PrimitiveType.java  |   62 -
 .../lamp/compiler/msil/PropertyAttributes.java     |   45 -
 .../ch/epfl/lamp/compiler/msil/PropertyInfo.java   |  104 -
 src/msil/ch/epfl/lamp/compiler/msil/Type.java      | 1142 -----
 .../ch/epfl/lamp/compiler/msil/TypeAttributes.java |  190 -
 src/msil/ch/epfl/lamp/compiler/msil/Version.java   |   71 -
 .../lamp/compiler/msil/emit/AssemblyBuilder.scala  |  125 -
 .../compiler/msil/emit/ConstructorBuilder.scala    |   64 -
 .../lamp/compiler/msil/emit/FieldBuilder.scala     |   60 -
 .../msil/emit/ICustomAttributeSetter.scala         |   18 -
 .../epfl/lamp/compiler/msil/emit/ILGenerator.scala |  539 ---
 .../lamp/compiler/msil/emit/ILPrinterVisitor.scala |  861 ----
 .../ch/epfl/lamp/compiler/msil/emit/Label.scala    |  148 -
 .../lamp/compiler/msil/emit/LocalBuilder.scala     |   44 -
 .../lamp/compiler/msil/emit/MethodBuilder.scala    |   70 -
 .../lamp/compiler/msil/emit/ModuleBuilder.scala    |  136 -
 .../msil/emit/MultipleFilesILPrinterVisitor.scala  |  137 -
 .../ch/epfl/lamp/compiler/msil/emit/OpCode.scala   | 1948 --------
 .../ch/epfl/lamp/compiler/msil/emit/OpCodes.scala  | 1205 -----
 .../lamp/compiler/msil/emit/ParameterBuilder.scala |   44 -
 .../msil/emit/SingleFileILPrinterVisitor.scala     |   93 -
 .../epfl/lamp/compiler/msil/emit/TypeBuilder.scala |  261 --
 .../epfl/lamp/compiler/msil/emit/Visitable.scala   |   24 -
 .../ch/epfl/lamp/compiler/msil/emit/Visitor.scala  |   58 -
 .../compiler/msil/tests/CustomAttributesTest.java  |   31 -
 .../lamp/compiler/msil/tests/JavaTypeTest.java     |   18 -
 .../epfl/lamp/compiler/msil/tests/MembersTest.java |  100 -
 .../epfl/lamp/compiler/msil/tests/TableDump.java   |  311 --
 .../ch/epfl/lamp/compiler/msil/tests/Test.java     |   92 -
 .../epfl/lamp/compiler/msil/util/PECustomMod.java  |   23 -
 .../ch/epfl/lamp/compiler/msil/util/PESection.java |   57 -
 .../ch/epfl/lamp/compiler/msil/util/PEStream.java  |  199 -
 .../ch/epfl/lamp/compiler/msil/util/Signature.java |  129 -
 .../ch/epfl/lamp/compiler/msil/util/Table.java     | 1859 --------
 .../scala/tools/partest/ASMConverters.scala        |    0
 .../scala/tools/partest/AsmNode.scala              |   61 +
 .../scala/tools/partest/BytecodeTest.scala         |  167 +
 .../scala/tools/partest/IcodeComparison.scala      |   73 +
 .../scala/tools/partest/JavapTest.scala            |   26 +
 .../scala/tools/partest/ReplTest.scala             |   77 +
 .../scala/tools/partest/ScriptTest.scala           |   22 +
 .../scala/tools/partest/SigTest.scala              |    0
 src/partest-extras/scala/tools/partest/Util.scala  |   52 +
 .../partest/instrumented/Instrumentation.scala     |   93 +
 .../scala/tools/partest/instrumented/Profiler.java |   82 +
 .../tools/partest/javaagent/ASMTransformer.java    |   49 +
 .../scala/tools/partest/javaagent/MANIFEST.MF      |    0
 .../tools/partest/javaagent/ProfilerVisitor.java   |   59 +
 .../tools/partest/javaagent/ProfilingAgent.java    |   25 +
 src/partest/README                                 |   32 -
 src/partest/scala/tools/partest/AsmNode.scala      |   60 -
 src/partest/scala/tools/partest/BytecodeTest.scala |  129 -
 src/partest/scala/tools/partest/CompilerTest.scala |   61 -
 src/partest/scala/tools/partest/DirectTest.scala   |  131 -
 src/partest/scala/tools/partest/IcodeTest.scala    |   45 -
 src/partest/scala/tools/partest/MemoryTest.scala   |   38 -
 .../scala/tools/partest/PartestDefaults.scala      |   31 -
 src/partest/scala/tools/partest/PartestTask.scala  |  438 --
 src/partest/scala/tools/partest/ReplTest.scala     |   31 -
 .../scala/tools/partest/ScaladocModelTest.scala    |  205 -
 src/partest/scala/tools/partest/SecurityTest.scala |   32 -
 .../tools/partest/StoreReporterDirectTest.scala    |   15 -
 src/partest/scala/tools/partest/TestUtil.scala     |   46 -
 src/partest/scala/tools/partest/antlib.xml         |    4 -
 .../partest/instrumented/Instrumentation.scala     |   92 -
 .../scala/tools/partest/instrumented/Profiler.java |   82 -
 .../tools/partest/javaagent/ASMTransformer.java    |   49 -
 .../tools/partest/javaagent/ProfilerVisitor.java   |   59 -
 .../tools/partest/javaagent/ProfilingAgent.java    |   25 -
 .../scala/tools/partest/nest/AntRunner.scala       |   33 -
 .../scala/tools/partest/nest/CompileManager.scala  |  164 -
 .../tools/partest/nest/ConsoleFileManager.scala    |  213 -
 .../scala/tools/partest/nest/ConsoleRunner.scala   |  239 -
 .../scala/tools/partest/nest/DirectRunner.scala    |   75 -
 .../scala/tools/partest/nest/FileManager.scala     |  124 -
 .../scala/tools/partest/nest/NestRunner.scala      |   15 -
 src/partest/scala/tools/partest/nest/NestUI.scala  |  121 -
 .../scala/tools/partest/nest/PathSettings.scala    |   82 -
 .../tools/partest/nest/ReflectiveRunner.scala      |  102 -
 .../scala/tools/partest/nest/RunnerManager.scala   |  862 ----
 .../scala/tools/partest/nest/RunnerUtils.scala     |   29 -
 .../scala/tools/partest/nest/SBTRunner.scala       |   90 -
 .../scala/tools/partest/nest/TestFile.scala        |   81 -
 src/partest/scala/tools/partest/package.scala      |  126 -
 .../scala/tools/partest/utils/PrintMgr.scala       |   52 -
 .../scala/tools/partest/utils/Properties.scala     |   17 -
 src/reflect/scala/reflect/api/Annotations.scala    |  108 +-
 src/reflect/scala/reflect/api/BuildUtils.scala     |   78 -
 src/reflect/scala/reflect/api/Constants.scala      |   13 +-
 src/reflect/scala/reflect/api/Exprs.scala          |   10 +-
 src/reflect/scala/reflect/api/FlagSets.scala       |   97 +-
 src/reflect/scala/reflect/api/ImplicitTags.scala   |  119 +
 src/reflect/scala/reflect/api/Importers.scala      |  103 -
 src/reflect/scala/reflect/api/Internals.scala      | 1238 +++++
 src/reflect/scala/reflect/api/JavaMirrors.scala    |   56 -
 src/reflect/scala/reflect/api/JavaUniverse.scala   |   73 +-
 src/reflect/scala/reflect/api/Liftables.scala      |   75 +
 src/reflect/scala/reflect/api/Mirror.scala         |   39 +-
 src/reflect/scala/reflect/api/Mirrors.scala        |   58 +-
 src/reflect/scala/reflect/api/Names.scala          |   71 +-
 src/reflect/scala/reflect/api/Position.scala       |  167 +-
 src/reflect/scala/reflect/api/Positions.scala      |   11 +-
 src/reflect/scala/reflect/api/Printers.scala       |   79 +-
 src/reflect/scala/reflect/api/Quasiquotes.scala    |   25 +
 src/reflect/scala/reflect/api/Scopes.scala         |   28 +-
 .../scala/reflect/api/StandardDefinitions.scala    |   45 +-
 .../scala/reflect/api/StandardLiftables.scala      |  235 +
 src/reflect/scala/reflect/api/StandardNames.scala  |   20 +-
 src/reflect/scala/reflect/api/Symbols.scala        |  302 +-
 src/reflect/scala/reflect/api/TagInterop.scala     |   43 -
 src/reflect/scala/reflect/api/TreeCreator.scala    |    3 +-
 src/reflect/scala/reflect/api/Trees.scala          |  798 +---
 src/reflect/scala/reflect/api/TypeCreator.scala    |    3 +-
 src/reflect/scala/reflect/api/TypeTags.scala       |    8 +-
 src/reflect/scala/reflect/api/Types.scala          |  466 +-
 src/reflect/scala/reflect/api/Universe.scala       |   16 +-
 src/reflect/scala/reflect/api/package.scala        |    7 +-
 .../reflect/internal/AnnotationCheckers.scala      |   11 +-
 .../scala/reflect/internal/AnnotationInfos.scala   |  116 +-
 .../scala/reflect/internal/BaseTypeSeqs.scala      |   47 +-
 .../scala/reflect/internal/BuildUtils.scala        |   68 -
 .../scala/reflect/internal/CapturedVariables.scala |    5 +-
 src/reflect/scala/reflect/internal/Chars.scala     |    3 +-
 .../reflect/internal/ClassfileConstants.scala      |   17 +-
 src/reflect/scala/reflect/internal/Constants.scala |   39 +-
 .../scala/reflect/internal/Definitions.scala       | 1060 +++--
 src/reflect/scala/reflect/internal/Depth.scala     |   28 +
 .../reflect/internal/ExistentialsAndSkolems.scala  |   10 +-
 .../scala/reflect/internal/FatalError.scala        |    3 +-
 src/reflect/scala/reflect/internal/FlagSets.scala  |    9 +-
 src/reflect/scala/reflect/internal/Flags.scala     |   47 +-
 .../scala/reflect/internal/FreshNames.scala        |   39 +
 src/reflect/scala/reflect/internal/HasFlags.scala  |   20 +-
 src/reflect/scala/reflect/internal/Importers.scala |  692 +--
 .../scala/reflect/internal/InfoTransformers.scala  |    5 +-
 src/reflect/scala/reflect/internal/Internals.scala |  174 +
 .../reflect/internal/JMethodOrConstructor.scala    |   47 +
 .../scala/reflect/internal/JavaAccFlags.scala      |   84 +
 src/reflect/scala/reflect/internal/Kinds.scala     |  193 +-
 src/reflect/scala/reflect/internal/Mirrors.scala   |  131 +-
 .../reflect/internal/MissingRequirementError.scala |    3 +-
 src/reflect/scala/reflect/internal/Mode.scala      |  141 +
 src/reflect/scala/reflect/internal/Names.scala     |  338 +-
 src/reflect/scala/reflect/internal/Phase.scala     |   12 +-
 src/reflect/scala/reflect/internal/Positions.scala |  278 +-
 .../scala/reflect/internal/Precedence.scala        |   38 +
 src/reflect/scala/reflect/internal/Printers.scala  | 1020 ++++-
 .../scala/reflect/internal/PrivateWithin.scala     |   27 +
 .../reflect/internal/ReificationSupport.scala      | 1146 +++++
 src/reflect/scala/reflect/internal/Required.scala  |   11 +-
 src/reflect/scala/reflect/internal/Scopes.scala    |  186 +-
 .../scala/reflect/internal/StdAttachments.scala    |   61 +-
 .../scala/reflect/internal/StdCreators.scala       |    3 +-
 src/reflect/scala/reflect/internal/StdNames.scala  |  613 ++-
 .../scala/reflect/internal/SymbolPairs.scala       |  302 ++
 .../scala/reflect/internal/SymbolTable.scala       |  157 +-
 src/reflect/scala/reflect/internal/Symbols.scala   | 1313 +++---
 src/reflect/scala/reflect/internal/TreeGen.scala   |  709 ++-
 src/reflect/scala/reflect/internal/TreeInfo.scala  |  395 +-
 src/reflect/scala/reflect/internal/Trees.scala     |  701 +--
 .../scala/reflect/internal/TypeDebugging.scala     |  126 +-
 src/reflect/scala/reflect/internal/Types.scala     | 4776 +++++---------------
 src/reflect/scala/reflect/internal/Variance.scala  |   90 +
 src/reflect/scala/reflect/internal/Variances.scala |  218 +
 .../internal/annotations/compileTimeOnly.scala     |   31 -
 .../reflect/internal/annotations/package.scala     |    6 +
 .../reflect/internal/pickling/ByteCodecs.scala     |   19 +-
 .../reflect/internal/pickling/PickleBuffer.scala   |   31 +-
 .../reflect/internal/pickling/PickleFormat.scala   |    8 +-
 .../reflect/internal/pickling/Translations.scala   |  128 +
 .../reflect/internal/pickling/UnPickler.scala      |  594 +--
 .../reflect/internal/settings/AbsSettings.scala    |    3 +-
 .../internal/settings/MutableSettings.scala        |   36 +-
 .../scala/reflect/internal/tpe/CommonOwners.scala  |   51 +
 .../scala/reflect/internal/tpe/FindMembers.scala   |  288 ++
 .../scala/reflect/internal/tpe/GlbLubs.scala       |  611 +++
 .../scala/reflect/internal/tpe/TypeComparers.scala |  592 +++
 .../reflect/internal/tpe/TypeConstraints.scala     |  268 ++
 .../scala/reflect/internal/tpe/TypeMaps.scala      | 1173 +++++
 .../scala/reflect/internal/tpe/TypeToStrings.scala |   48 +
 .../scala/reflect/internal/transform/Erasure.scala |  100 +-
 .../reflect/internal/transform/PostErasure.scala   |   19 +
 .../reflect/internal/transform/RefChecks.scala     |    5 +-
 .../reflect/internal/transform/Transforms.scala    |   14 +-
 .../scala/reflect/internal/transform/UnCurry.scala |   13 +-
 .../internal/util/AbstractFileClassLoader.scala    |  122 +
 .../scala/reflect/internal/util/Collections.scala  |  106 +-
 .../reflect/internal/util/FreshNameCreator.scala   |   28 +
 .../scala/reflect/internal/util/HashSet.scala      |    6 +-
 .../scala/reflect/internal/util/Origins.scala      |    5 +-
 .../scala/reflect/internal/util/Position.scala     |  476 +-
 .../reflect/internal/util/ScalaClassLoader.scala   |  124 +
 src/reflect/scala/reflect/internal/util/Set.scala  |    6 +-
 .../scala/reflect/internal/util/SourceFile.scala   |   77 +-
 .../scala/reflect/internal/util/Statistics.scala   |   22 +-
 .../scala/reflect/internal/util/StringOps.scala    |   59 +-
 .../internal/util/StripMarginInterpolator.scala    |    5 +-
 .../scala/reflect/internal/util/TableDef.scala     |   37 +-
 .../scala/reflect/internal/util/ThreeValues.scala  |    3 +-
 .../internal/util/TraceSymbolActivity.scala        |   52 +-
 .../scala/reflect/internal/util/TriState.scala     |   28 +
 .../scala/reflect/internal/util/WeakHashSet.scala  |   56 +-
 .../scala/reflect/internal/util/package.scala      |   37 +-
 src/reflect/scala/reflect/io/AbstractFile.scala    |   93 +-
 src/reflect/scala/reflect/io/Directory.scala       |   18 +-
 src/reflect/scala/reflect/io/File.scala            |   95 +-
 .../scala/reflect/io/FileOperationException.scala  |    3 +-
 src/reflect/scala/reflect/io/IOStats.scala         |   32 +
 src/reflect/scala/reflect/io/NoAbstractFile.scala  |   10 +-
 src/reflect/scala/reflect/io/Path.scala            |   88 +-
 src/reflect/scala/reflect/io/PlainFile.scala       |   33 +-
 src/reflect/scala/reflect/io/Streamable.scala      |   22 +-
 .../scala/reflect/io/VirtualDirectory.scala        |   20 +-
 src/reflect/scala/reflect/io/VirtualFile.scala     |   43 +-
 src/reflect/scala/reflect/io/ZipArchive.scala      |  100 +-
 src/reflect/scala/reflect/macros/Aliases.scala     |   22 +-
 src/reflect/scala/reflect/macros/Attachments.scala |   12 +-
 src/reflect/scala/reflect/macros/Context.scala     |   90 -
 src/reflect/scala/reflect/macros/Enclosures.scala  |  104 +-
 src/reflect/scala/reflect/macros/Evals.scala       |   15 +-
 src/reflect/scala/reflect/macros/ExprUtils.scala   |   20 +-
 src/reflect/scala/reflect/macros/FrontEnds.scala   |    9 +-
 .../scala/reflect/macros/Infrastructure.scala      |    9 +-
 src/reflect/scala/reflect/macros/Internals.scala   |   79 +
 src/reflect/scala/reflect/macros/Names.scala       |   51 +-
 src/reflect/scala/reflect/macros/Parsers.scala     |    9 +-
 src/reflect/scala/reflect/macros/Reifiers.scala    |   33 +-
 src/reflect/scala/reflect/macros/TreeBuilder.scala |   72 -
 src/reflect/scala/reflect/macros/Typers.scala      |   93 +-
 src/reflect/scala/reflect/macros/Universe.scala    |  500 +-
 .../scala/reflect/macros/blackbox/Context.scala    |   97 +
 src/reflect/scala/reflect/macros/package.scala     |   17 +-
 .../scala/reflect/macros/whitebox/Context.scala    |   77 +
 src/reflect/scala/reflect/runtime/Gil.scala        |   25 +
 .../scala/reflect/runtime/JavaMirrors.scala        |  655 +--
 .../scala/reflect/runtime/JavaUniverse.scala       |  125 +-
 .../scala/reflect/runtime/JavaUniverseForce.scala  |  448 ++
 .../scala/reflect/runtime/ReflectSetup.scala       |    5 +-
 .../scala/reflect/runtime/ReflectionUtils.scala    |   95 +-
 src/reflect/scala/reflect/runtime/Settings.scala   |   13 +-
 .../scala/reflect/runtime/SymbolLoaders.scala      |  107 +-
 .../scala/reflect/runtime/SymbolTable.scala        |   24 +-
 .../scala/reflect/runtime/SynchronizedOps.scala    |   61 +-
 .../reflect/runtime/SynchronizedSymbols.scala      |  222 +-
 .../scala/reflect/runtime/SynchronizedTypes.scala  |   87 +-
 .../scala/reflect/runtime/ThreadLocalStorage.scala |   28 +
 .../scala/reflect/runtime/TwoWayCache.scala        |   15 +-
 .../scala/reflect/runtime/TwoWayCaches.scala       |   68 +
 src/reflect/scala/reflect/runtime/package.scala    |    9 +-
 .../scala/tools/nsc/Interpreter.scala              |    0
 .../scala/tools/nsc/InterpreterLoop.scala          |    0
 src/repl/scala/tools/nsc/MainGenericRunner.scala   |  106 +
 .../nsc/interpreter/AbstractFileClassLoader.scala  |    7 +
 .../nsc/interpreter/AbstractOrMissingHandler.scala |    0
 .../scala/tools/nsc/interpreter/CommandLine.scala  |   13 +
 .../scala/tools/nsc/interpreter/Completion.scala   |   47 +
 .../tools/nsc/interpreter/CompletionAware.scala    |   53 +
 .../tools/nsc/interpreter/CompletionOutput.scala   |   85 +
 .../nsc/interpreter/ConsoleReaderHelper.scala      |  160 +
 .../scala/tools/nsc/interpreter/Delimited.scala    |   41 +
 .../scala/tools/nsc/interpreter/ExprTyper.scala    |   86 +
 .../scala/tools/nsc/interpreter/Formatting.scala   |    0
 .../scala/tools/nsc/interpreter/IBindings.java     |   45 +
 src/repl/scala/tools/nsc/interpreter/ILoop.scala   |  929 ++++
 src/repl/scala/tools/nsc/interpreter/IMain.scala   | 1302 ++++++
 .../scala/tools/nsc/interpreter/ISettings.scala    |   54 +
 src/repl/scala/tools/nsc/interpreter/Imports.scala |  183 +
 .../tools/nsc/interpreter/InteractiveReader.scala  |   49 +
 .../tools/nsc/interpreter/JLineCompletion.scala    |  350 ++
 .../scala/tools/nsc/interpreter/JLineReader.scala  |   75 +
 .../scala/tools/nsc/interpreter/JavapClass.scala   |  742 +++
 src/repl/scala/tools/nsc/interpreter/Logger.scala  |   14 +
 .../scala/tools/nsc/interpreter/LoopCommands.scala |   88 +
 .../tools/nsc/interpreter/MemberHandlers.scala     |  220 +
 .../scala/tools/nsc/interpreter/NamedParam.scala   |   46 +
 src/repl/scala/tools/nsc/interpreter/Naming.scala  |  105 +
 src/repl/scala/tools/nsc/interpreter/Parsed.scala  |   60 +
 .../scala/tools/nsc/interpreter/Pasted.scala       |    0
 src/repl/scala/tools/nsc/interpreter/Phased.scala  |  144 +
 src/repl/scala/tools/nsc/interpreter/Power.scala   |  326 ++
 .../scala/tools/nsc/interpreter/ReplConfig.scala   |   53 +
 src/repl/scala/tools/nsc/interpreter/ReplDir.scala |   48 +
 .../scala/tools/nsc/interpreter/ReplGlobal.scala   |   64 +
 .../scala/tools/nsc/interpreter/ReplProps.scala    |   34 +
 .../scala/tools/nsc/interpreter/ReplReporter.scala |    0
 .../scala/tools/nsc/interpreter/ReplStrings.scala  |   32 +
 .../scala/tools/nsc/interpreter/ReplVals.scala     |   82 +
 src/repl/scala/tools/nsc/interpreter/Results.scala |   22 +
 .../scala/tools/nsc/interpreter/RichClass.scala    |   36 +
 .../scala/tools/nsc/interpreter/SimpleReader.scala |   41 +
 .../scala/tools/nsc/interpreter/StdReplTags.scala  |   15 +
 src/repl/scala/tools/nsc/interpreter/package.scala |  199 +
 .../interpreter/session/FileBackedHistory.scala    |    0
 .../tools/nsc/interpreter/session/History.scala    |   22 +
 .../nsc/interpreter/session/JLineHistory.scala     |    0
 .../nsc/interpreter/session/SimpleHistory.scala    |   63 +
 .../tools/nsc/interpreter/session/package.scala    |   23 +
 src/scalacheck/org/scalacheck/Arbitrary.scala      |  426 --
 src/scalacheck/org/scalacheck/Arg.scala            |   20 -
 src/scalacheck/org/scalacheck/Commands.scala       |  148 -
 .../org/scalacheck/ConsoleReporter.scala           |   52 -
 src/scalacheck/org/scalacheck/Gen.scala            |  531 ---
 src/scalacheck/org/scalacheck/Pretty.scala         |  127 -
 src/scalacheck/org/scalacheck/Prop.scala           |  763 ----
 src/scalacheck/org/scalacheck/Properties.scala     |   82 -
 src/scalacheck/org/scalacheck/Shrink.scala         |  208 -
 src/scalacheck/org/scalacheck/Test.scala           |  269 --
 src/scalacheck/org/scalacheck/util/Buildable.scala |   64 -
 .../org/scalacheck/util/CmdLineParser.scala        |  101 -
 src/scalacheck/org/scalacheck/util/FreqMap.scala   |   65 -
 src/scalacheck/org/scalacheck/util/StdRand.scala   |   12 -
 src/scaladoc/scala/tools/ant/Scaladoc.scala        |  695 +++
 src/scaladoc/scala/tools/nsc/ScalaDoc.scala        |   72 +
 src/scaladoc/scala/tools/nsc/doc/DocFactory.scala  |  132 +
 src/scaladoc/scala/tools/nsc/doc/DocParser.scala   |   69 +
 src/scaladoc/scala/tools/nsc/doc/Index.scala       |   14 +
 .../scala/tools/nsc/doc/ScaladocAnalyzer.scala     |  261 ++
 .../scala/tools/nsc/doc/ScaladocGlobal.scala       |   49 +
 src/scaladoc/scala/tools/nsc/doc/Settings.scala    |  350 ++
 .../scala/tools/nsc/doc/Uncompilable.scala         |   51 +
 .../scala/tools/nsc/doc/Universe.scala             |    0
 .../tools/nsc/doc/base/CommentFactoryBase.scala    |  932 ++++
 src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala |   13 +
 .../tools/nsc/doc/base/MemberLookupBase.scala      |  202 +
 .../scala/tools/nsc/doc/base/comment/Body.scala    |   93 +
 .../scala/tools/nsc/doc/base/comment/Comment.scala |  131 +
 .../scala/tools/nsc/doc/doclet/Generator.scala     |   30 +
 .../scala/tools/nsc/doc/doclet/Indexer.scala       |    0
 .../scala/tools/nsc/doc/doclet/Universer.scala     |    0
 src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala |   19 +
 .../scala/tools/nsc/doc/html/HtmlFactory.scala     |  152 +
 .../scala/tools/nsc/doc/html/HtmlPage.scala        |  222 +
 src/scaladoc/scala/tools/nsc/doc/html/Page.scala   |  103 +
 .../scala/tools/nsc/doc/html/SyntaxHigh.scala      |  287 ++
 .../scala/tools/nsc/doc/html/page/Index.scala      |  133 +
 .../tools/nsc/doc/html/page/IndexScript.scala      |   69 +
 .../tools/nsc/doc/html/page/ReferenceIndex.scala   |   61 +
 .../scala/tools/nsc/doc/html/page/Source.scala     |  127 +
 .../scala/tools/nsc/doc/html/page/Template.scala   |  988 ++++
 .../doc/html/page/diagram/DiagramGenerator.scala   |    0
 .../nsc/doc/html/page/diagram/DiagramStats.scala   |   66 +
 .../html/page/diagram/DotDiagramGenerator.scala    |  508 +++
 .../nsc/doc/html/page/diagram/DotRunner.scala      |  225 +
 .../tools/nsc/doc/html/resource/lib/arrow-down.png |  Bin
 .../nsc/doc/html/resource/lib/arrow-right.png      |  Bin
 .../tools/nsc/doc/html/resource/lib/class.png      |  Bin
 .../tools/nsc/doc/html/resource/lib/class_big.png  |  Bin
 .../nsc/doc/html/resource/lib/class_diagram.png    |  Bin
 .../doc/html/resource/lib/class_to_object_big.png  |  Bin
 .../nsc/doc/html/resource/lib/constructorsbg.gif   |  Bin
 .../nsc/doc/html/resource/lib/conversionbg.gif     |  Bin
 .../tools/nsc/doc/html/resource/lib/defbg-blue.gif |  Bin
 .../nsc/doc/html/resource/lib/defbg-green.gif      |  Bin
 .../tools/nsc/doc/html/resource/lib/diagrams.css   |    0
 .../tools/nsc/doc/html/resource/lib/diagrams.js    |    0
 .../nsc/doc/html/resource/lib/filter_box_left.png  |  Bin
 .../nsc/doc/html/resource/lib/filter_box_left.psd  |  Bin
 .../nsc/doc/html/resource/lib/filter_box_left2.gif |  Bin
 .../nsc/doc/html/resource/lib/filter_box_right.png |  Bin
 .../nsc/doc/html/resource/lib/filter_box_right.psd |  Bin
 .../tools/nsc/doc/html/resource/lib/filterbg.gif   |  Bin
 .../nsc/doc/html/resource/lib/filterboxbarbg.gif   |  Bin
 .../nsc/doc/html/resource/lib/filterboxbarbg.png   |  Bin
 .../nsc/doc/html/resource/lib/filterboxbg.gif      |  Bin
 .../nsc/doc/html/resource/lib/fullcommenttopbg.gif |  Bin
 .../tools/nsc/doc/html/resource/lib/index.css      |    0
 .../scala/tools/nsc/doc/html/resource/lib/index.js |  541 +++
 .../tools/nsc/doc/html/resource/lib/jquery-ui.js   |    0
 .../tools/nsc/doc/html/resource/lib/jquery.js      |    0
 .../nsc/doc/html/resource/lib/jquery.layout.js     |    0
 .../nsc/doc/html/resource/lib/modernizr.custom.js  |    0
 .../nsc/doc/html/resource/lib/navigation-li-a.png  |  Bin
 .../nsc/doc/html/resource/lib/navigation-li.png    |  Bin
 .../tools/nsc/doc/html/resource/lib/object.png     |  Bin
 .../tools/nsc/doc/html/resource/lib/object_big.png |  Bin
 .../nsc/doc/html/resource/lib/object_diagram.png   |  Bin
 .../doc/html/resource/lib/object_to_class_big.png  |  Bin
 .../doc/html/resource/lib/object_to_trait_big.png  |  Bin
 .../doc/html/resource/lib/object_to_type_big.png   |  Bin
 .../tools/nsc/doc/html/resource/lib/ownderbg2.gif  |  Bin
 .../tools/nsc/doc/html/resource/lib/ownerbg.gif    |  Bin
 .../tools/nsc/doc/html/resource/lib/ownerbg2.gif   |  Bin
 .../tools/nsc/doc/html/resource/lib/package.png    |  Bin
 .../nsc/doc/html/resource/lib/package_big.png      |  Bin
 .../tools/nsc/doc/html/resource/lib/packagesbg.gif |  Bin
 .../tools/nsc/doc/html/resource/lib/raphael-min.js |    0
 .../tools/nsc/doc/html/resource/lib/ref-index.css  |    0
 .../tools/nsc/doc/html/resource/lib/remove.png     |  Bin
 .../tools/nsc/doc/html/resource/lib/remove.psd     |  Bin
 .../tools/nsc/doc/html/resource/lib/scheduler.js   |    0
 .../doc/html/resource/lib/selected-implicits.png   |  Bin
 .../html/resource/lib/selected-right-implicits.png |  Bin
 .../nsc/doc/html/resource/lib/selected-right.png   |  Bin
 .../tools/nsc/doc/html/resource/lib/selected.png   |  Bin
 .../nsc/doc/html/resource/lib/selected2-right.png  |  Bin
 .../tools/nsc/doc/html/resource/lib/selected2.png  |  Bin
 .../nsc/doc/html/resource/lib/signaturebg.gif      |  Bin
 .../nsc/doc/html/resource/lib/signaturebg2.gif     |  Bin
 .../tools/nsc/doc/html/resource/lib/template.css   |    0
 .../tools/nsc/doc/html/resource/lib/template.js    |    0
 .../nsc/doc/html/resource/lib/tools.tooltip.js     |    0
 .../tools/nsc/doc/html/resource/lib/trait.png      |  Bin
 .../tools/nsc/doc/html/resource/lib/trait_big.png  |  Bin
 .../nsc/doc/html/resource/lib/trait_diagram.png    |  Bin
 .../doc/html/resource/lib/trait_to_object_big.png  |  Bin
 .../scala/tools/nsc/doc/html/resource/lib/type.png |  Bin
 .../tools/nsc/doc/html/resource/lib/type_big.png   |  Bin
 .../nsc/doc/html/resource/lib/type_diagram.png     |  Bin
 .../tools/nsc/doc/html/resource/lib/type_tags.ai   |    0
 .../doc/html/resource/lib/type_to_object_big.png   |  Bin
 .../tools/nsc/doc/html/resource/lib/typebg.gif     |  Bin
 .../tools/nsc/doc/html/resource/lib/unselected.png |  Bin
 .../nsc/doc/html/resource/lib/valuemembersbg.gif   |  Bin
 .../tools/nsc/doc/html/resource/lib/versions.txt   |    0
 .../scala/tools/nsc/doc/model/CommentFactory.scala |   98 +
 .../scala/tools/nsc/doc/model/Entity.scala         |  597 +++
 .../tools/nsc/doc/model/IndexModelFactory.scala    |   56 +
 .../scala/tools/nsc/doc/model/MemberLookup.scala   |   56 +
 .../scala/tools/nsc/doc/model/ModelFactory.scala   | 1024 +++++
 .../doc/model/ModelFactoryImplicitSupport.scala    |  575 +++
 .../nsc/doc/model/ModelFactoryTypeSupport.scala    |  315 ++
 .../scala/tools/nsc/doc/model/TreeEntity.scala     |    0
 .../scala/tools/nsc/doc/model/TreeFactory.scala    |   95 +
 .../scala/tools/nsc/doc/model/TypeEntity.scala     |    0
 .../scala/tools/nsc/doc/model/ValueArgument.scala  |    0
 .../scala/tools/nsc/doc/model/Visibility.scala     |    0
 .../tools/nsc/doc/model/diagram/Diagram.scala      |  137 +
 .../doc/model/diagram/DiagramDirectiveParser.scala |  257 ++
 .../nsc/doc/model/diagram/DiagramFactory.scala     |  270 ++
 .../scala/tools/partest/ScaladocModelTest.scala    |  203 +
 src/scalap/decoder.properties                      |    2 +-
 src/scalap/scala/tools/scalap/Arguments.scala      |    9 +-
 .../scala/tools/scalap/ByteArrayReader.scala       |    4 +-
 src/scalap/scala/tools/scalap/Classfile.scala      |   10 +-
 src/scalap/scala/tools/scalap/CodeWriter.scala     |    7 +-
 src/scalap/scala/tools/scalap/JavaWriter.scala     |    9 +-
 src/scalap/scala/tools/scalap/Main.scala           |    5 +-
 src/scalap/scala/tools/scalap/MetaParser.scala     |    4 +-
 .../scala/tools/scalap/scalax/rules/Arrows.scala   |   37 -
 .../scala/tools/scalap/scalax/rules/Functors.scala |   81 -
 .../scala/tools/scalap/scalax/rules/Input.scala    |   68 -
 .../tools/scalap/scalax/rules/Memoisable.scala     |   16 +-
 .../scala/tools/scalap/scalax/rules/Monad.scala    |   46 -
 .../scala/tools/scalap/scalax/rules/Result.scala   |   45 +-
 .../scala/tools/scalap/scalax/rules/Rule.scala     |  100 +-
 .../scala/tools/scalap/scalax/rules/Rules.scala    |   63 +-
 .../scala/tools/scalap/scalax/rules/SeqRule.scala  |   52 +-
 .../scala/tools/scalap/scalax/rules/package.scala  |    9 -
 .../scalax/rules/scalasig/ClassFileParser.scala    |   72 +-
 .../tools/scalap/scalax/rules/scalasig/Flags.scala |    2 +-
 .../scalap/scalax/rules/scalasig/ScalaSig.scala    |  119 +-
 .../scalax/rules/scalasig/ScalaSigPrinter.scala    |   31 +-
 .../scalap/scalax/rules/scalasig/Symbol.scala      |   44 +-
 .../tools/scalap/scalax/rules/scalasig/Type.scala  |   31 +-
 src/swing/doc/README                               |   39 -
 src/swing/doc/build.xml                            |   83 -
 src/swing/scala/swing/AbstractButton.scala         |   87 -
 src/swing/scala/swing/Action.scala                 |  157 -
 src/swing/scala/swing/Adjustable.scala             |   53 -
 src/swing/scala/swing/Alignment.scala              |   32 -
 src/swing/scala/swing/Applet.scala                 |   46 -
 src/swing/scala/swing/BorderPanel.scala            |   60 -
 src/swing/scala/swing/BoxPanel.scala               |   26 -
 src/swing/scala/swing/BufferWrapper.scala          |   34 -
 src/swing/scala/swing/Button.scala                 |   37 -
 src/swing/scala/swing/ButtonGroup.scala            |   40 -
 src/swing/scala/swing/CheckBox.scala               |   26 -
 src/swing/scala/swing/ComboBox.scala               |  208 -
 src/swing/scala/swing/Component.scala              |  295 --
 src/swing/scala/swing/Container.scala              |   64 -
 src/swing/scala/swing/EditorPane.scala             |   31 -
 src/swing/scala/swing/FileChooser.scala            |  111 -
 src/swing/scala/swing/FlowPanel.scala              |   46 -
 src/swing/scala/swing/Font.scala.disabled          |   70 -
 src/swing/scala/swing/FormattedTextField.scala     |   44 -
 src/swing/scala/swing/GridBagPanel.scala           |  113 -
 src/swing/scala/swing/GridPanel.scala              |   51 -
 src/swing/scala/swing/Label.scala                  |   62 -
 src/swing/scala/swing/LayoutContainer.scala        |   73 -
 src/swing/scala/swing/ListView.scala               |  248 -
 src/swing/scala/swing/MainFrame.scala              |   21 -
 src/swing/scala/swing/Menu.scala                   |   70 -
 src/swing/scala/swing/Orientable.scala             |   24 -
 src/swing/scala/swing/Orientation.scala            |   19 -
 src/swing/scala/swing/Oriented.scala               |   34 -
 src/swing/scala/swing/Panel.scala                  |   20 -
 src/swing/scala/swing/PasswordField.scala          |   38 -
 src/swing/scala/swing/ProgressBar.scala            |   44 -
 src/swing/scala/swing/Publisher.scala              |  174 -
 src/swing/scala/swing/RadioButton.scala            |   25 -
 src/swing/scala/swing/Reactions.scala              |   56 -
 src/swing/scala/swing/Reactor.scala                |   30 -
 src/swing/scala/swing/RichWindow.scala             |  195 -
 src/swing/scala/swing/RootPanel.scala              |   38 -
 src/swing/scala/swing/ScrollBar.scala              |   39 -
 src/swing/scala/swing/ScrollPane.scala             |   88 -
 src/swing/scala/swing/Scrollable.scala             |   43 -
 src/swing/scala/swing/Separator.scala              |   23 -
 src/swing/scala/swing/SequentialContainer.scala    |   36 -
 src/swing/scala/swing/SimpleSwingApplication.scala |   36 -
 src/swing/scala/swing/Slider.scala                 |   71 -
 src/swing/scala/swing/SplitPane.scala              |   66 -
 src/swing/scala/swing/Swing.scala                  |  140 -
 src/swing/scala/swing/SwingActor.scala             |   15 -
 src/swing/scala/swing/SwingApplication.scala       |   17 -
 src/swing/scala/swing/SwingWorker.scala            |   23 -
 src/swing/scala/swing/TabbedPane.scala             |  134 -
 src/swing/scala/swing/Table.scala                  |  320 --
 src/swing/scala/swing/TextArea.scala               |   47 -
 src/swing/scala/swing/TextComponent.scala          |   78 -
 src/swing/scala/swing/TextField.scala              |   79 -
 src/swing/scala/swing/ToggleButton.scala           |   25 -
 src/swing/scala/swing/UIElement.scala              |  133 -
 src/swing/scala/swing/Window.scala                 |   81 -
 src/swing/scala/swing/event/ActionEvent.scala      |   18 -
 src/swing/scala/swing/event/AdjustingEvent.scala   |   24 -
 .../scala/swing/event/BackgroundChanged.scala      |   14 -
 src/swing/scala/swing/event/ButtonClicked.scala    |   15 -
 src/swing/scala/swing/event/CaretUpdate.scala      |   14 -
 src/swing/scala/swing/event/ComponentEvent.scala   |   16 -
 src/swing/scala/swing/event/ContainerEvent.scala   |   17 -
 src/swing/scala/swing/event/EditDone.scala         |   14 -
 src/swing/scala/swing/event/Event.scala            |   14 -
 src/swing/scala/swing/event/FocusEvent.scala       |   23 -
 src/swing/scala/swing/event/FontChanged.scala      |   14 -
 .../scala/swing/event/ForegroundChanged.scala      |   14 -
 src/swing/scala/swing/event/InputEvent.scala       |   20 -
 src/swing/scala/swing/event/Key.scala              |  232 -
 src/swing/scala/swing/event/KeyEvent.scala         |   43 -
 src/swing/scala/swing/event/ListEvent.scala        |   43 -
 src/swing/scala/swing/event/MouseEvent.scala       |   79 -
 src/swing/scala/swing/event/SelectionEvent.scala   |   34 -
 src/swing/scala/swing/event/TableEvent.scala       |   46 -
 src/swing/scala/swing/event/UIEvent.scala          |   21 -
 src/swing/scala/swing/event/ValueChanged.scala     |   18 -
 src/swing/scala/swing/event/WindowActivated.scala  |   14 -
 src/swing/scala/swing/event/WindowClosed.scala     |    4 -
 src/swing/scala/swing/event/WindowClosing.scala    |   14 -
 .../scala/swing/event/WindowDeactivated.scala      |   14 -
 .../scala/swing/event/WindowDeiconified.scala      |   14 -
 src/swing/scala/swing/event/WindowEvent.scala      |   14 -
 src/swing/scala/swing/event/WindowIconified.scala  |   14 -
 src/swing/scala/swing/event/WindowOpened.scala     |   14 -
 src/swing/scala/swing/model/Matrix.scala           |  121 -
 src/swing/scala/swing/package.scala                |   23 -
 src/swing/swing.version.properties                 |    2 -
 starr.number                                       |    2 -
 test/attic/files/cli/test1/Main.check.j9vm5        |    4 -
 test/attic/files/cli/test1/Main.check.java         |    6 -
 test/attic/files/cli/test1/Main.check.java5        |    6 -
 test/attic/files/cli/test1/Main.check.java5_api    |   19 -
 test/attic/files/cli/test1/Main.check.java5_j9     |    4 -
 test/attic/files/cli/test1/Main.check.javac        |   19 -
 test/attic/files/cli/test1/Main.check.javac5       |   24 -
 test/attic/files/cli/test1/Main.check.javac6       |   29 -
 test/attic/files/cli/test1/Main.check.jikes        |    3 -
 test/attic/files/cli/test1/Main.check.jikes5       |    3 -
 test/attic/files/cli/test1/Main.check.scala        |   24 -
 test/attic/files/cli/test1/Main.check.scala_api    |   33 -
 test/attic/files/cli/test1/Main.check.scala_j9     |   15 -
 test/attic/files/cli/test1/Main.check.scalac       |   63 -
 test/attic/files/cli/test1/Main.check.scalaint     |   45 -
 test/attic/files/cli/test1/Main.java               |    8 -
 test/attic/files/cli/test1/Main.scala              |    8 -
 test/attic/files/cli/test2/Main.check.j9vm5        |    4 -
 test/attic/files/cli/test2/Main.check.java         |    6 -
 test/attic/files/cli/test2/Main.check.java5        |    6 -
 test/attic/files/cli/test2/Main.check.java5_api    |   24 -
 test/attic/files/cli/test2/Main.check.java5_j9     |   36 -
 test/attic/files/cli/test2/Main.check.javac        |   27 -
 test/attic/files/cli/test2/Main.check.javac5       |   28 -
 test/attic/files/cli/test2/Main.check.javac6       |   33 -
 test/attic/files/cli/test2/Main.check.jikes        |    9 -
 test/attic/files/cli/test2/Main.check.jikes5       |    9 -
 test/attic/files/cli/test2/Main.check.scala        |   24 -
 test/attic/files/cli/test2/Main.check.scala_api    |   37 -
 test/attic/files/cli/test2/Main.check.scala_j9     |   15 -
 test/attic/files/cli/test2/Main.check.scalac       |   63 -
 test/attic/files/cli/test2/Main.check.scalaint     |   45 -
 test/attic/files/cli/test2/Main.java               |    8 -
 test/attic/files/cli/test2/Main.scala              |    8 -
 test/attic/files/cli/test3/Main.check.j9vm5        |    5 -
 test/attic/files/cli/test3/Main.check.java         |   10 -
 test/attic/files/cli/test3/Main.check.java5        |   10 -
 test/attic/files/cli/test3/Main.check.java5_api    |   29 -
 test/attic/files/cli/test3/Main.check.java5_j9     |   36 -
 test/attic/files/cli/test3/Main.check.javac        |   33 -
 test/attic/files/cli/test3/Main.check.javac5       |   31 -
 test/attic/files/cli/test3/Main.check.javac6       |   36 -
 test/attic/files/cli/test3/Main.check.jikes        |   14 -
 test/attic/files/cli/test3/Main.check.jikes5       |   14 -
 test/attic/files/cli/test3/Main.check.scala        |   28 -
 test/attic/files/cli/test3/Main.check.scala_api    |   41 -
 test/attic/files/cli/test3/Main.check.scala_j9     |   19 -
 test/attic/files/cli/test3/Main.check.scalac       |   63 -
 test/attic/files/cli/test3/Main.check.scalaint     |   48 -
 test/attic/files/cli/test3/Main.java               |   10 -
 test/attic/files/cli/test3/Main.scala              |   10 -
 .../buildmanager/overloaded_1}/A.scala             |    0
 .../buildmanager/overloaded_1}/overloaded_1.check  |    0
 .../buildmanager/overloaded_1}/overloaded_1.test   |    0
 .../buildmanager}/t4245/A.scala                    |    0
 .../buildmanager}/t4245/t4245.check                |    0
 .../buildmanager}/t4245/t4245.test                 |    0
 test/disabled/continuations-neg/infer0.check       |    4 -
 test/disabled/continuations-neg/infer0.scala       |   12 -
 test/disabled/pos/spec-List.scala                  |    2 +-
 test/disabled/presentation/akka.flags              |    4 +-
 .../akka/src/akka/dispatch/Dispatchers.scala       |   16 +-
 test/disabled/presentation/doc/doc.scala           |   38 +-
 test/disabled/presentation/doc/src/p/Base.scala    |    2 +-
 test/disabled/presentation/simple-tests.check      |    2 -
 test/disabled/presentation/simple-tests.opts       |    4 +-
 test/disabled/run/lisp.scala                       |   16 +-
 test/disabled/run/t4146.scala                      |    7 +
 test/disabled/run/t4602.scala                      |   57 +
 test/disabled/run/t6026.check                      |    9 -
 test/disabled/run/t6026.scala                      |    9 -
 test/files/ant/README                              |   42 -
 test/files/ant/fsc001-build.check                  |   14 -
 test/files/ant/fsc001-build.xml                    |   26 -
 test/files/ant/fsc001.scala                        |    7 -
 test/files/ant/fsc002-build.check                  |   14 -
 test/files/ant/fsc002-build.xml                    |   28 -
 test/files/ant/fsc002.scala                        |    6 -
 test/files/ant/fsc003-build.check                  |   14 -
 test/files/ant/fsc003-build.xml                    |   25 -
 test/files/ant/fsc003.scala                        |    7 -
 test/files/ant/imported.xml                        |  155 -
 test/files/ant/scalac001-build.check               |   14 -
 test/files/ant/scalac001-build.xml                 |   26 -
 test/files/ant/scalac001.scala                     |    6 -
 test/files/ant/scalac002-build.check               |   14 -
 test/files/ant/scalac002-build.xml                 |   28 -
 test/files/ant/scalac002.scala                     |    7 -
 test/files/ant/scalac003-build.check               |   14 -
 test/files/ant/scalac003-build.xml                 |   25 -
 test/files/ant/scalac003.scala                     |    7 -
 test/files/ant/scalac004-build.check               |   24 -
 test/files/ant/scalac004-build.xml                 |   26 -
 test/files/ant/scalac004.scala                     |   11 -
 test/files/ant/scaladoc-build.check                |   15 -
 test/files/ant/scaladoc-build.xml                  |   26 -
 test/files/ant/scaladoc.scala                      |    7 -
 test/files/bench/equality/eqeq.eqlog               |   84 +-
 test/files/buildmanager/annotated/A.scala          |    1 -
 test/files/buildmanager/annotated/annotated.check  |    6 -
 test/files/buildmanager/annotated/annotated.test   |    2 -
 test/files/buildmanager/freshnames/A.scala         |   16 -
 test/files/buildmanager/freshnames/B.scala         |    4 -
 .../files/buildmanager/freshnames/freshnames.check |    6 -
 test/files/buildmanager/freshnames/freshnames.test |    2 -
 test/files/buildmanager/infer/A.scala              |   16 -
 test/files/buildmanager/infer/infer.check          |    6 -
 test/files/buildmanager/infer/infer.test           |    2 -
 .../buildmanager/namesdefaults/defparam-use.scala  |    5 -
 .../buildmanager/namesdefaults/defparam.scala      |    7 -
 .../buildmanager/namesdefaults/namesdefaults.check |    9 -
 .../buildmanager/namesdefaults/namesdefaults.test  |    3 -
 test/files/buildmanager/simpletest/A.scala         |    3 -
 test/files/buildmanager/simpletest/B.scala         |    3 -
 .../simpletest/simpletest.changes/A1.scala         |    1 -
 .../files/buildmanager/simpletest/simpletest.check |   11 -
 test/files/buildmanager/simpletest/simpletest.test |    3 -
 test/files/buildmanager/t2280/A.scala              |    1 -
 test/files/buildmanager/t2280/B.java               |    2 -
 test/files/buildmanager/t2280/t2280.check          |    6 -
 test/files/buildmanager/t2280/t2280.test           |    2 -
 test/files/buildmanager/t2556_1/A.scala            |    3 -
 test/files/buildmanager/t2556_1/B.scala            |    3 -
 .../buildmanager/t2556_1/t2556_1.changes/A2.scala  |    4 -
 test/files/buildmanager/t2556_1/t2556_1.check      |   12 -
 test/files/buildmanager/t2556_1/t2556_1.test       |    3 -
 test/files/buildmanager/t2556_2/A.scala            |    4 -
 test/files/buildmanager/t2556_2/B.scala            |    2 -
 test/files/buildmanager/t2556_2/C.scala            |    4 -
 .../buildmanager/t2556_2/t2556_2.changes/A2.scala  |    4 -
 test/files/buildmanager/t2556_2/t2556_2.check      |   13 -
 test/files/buildmanager/t2556_2/t2556_2.test       |    3 -
 test/files/buildmanager/t2556_3/A.scala            |    5 -
 test/files/buildmanager/t2556_3/B.scala            |    5 -
 test/files/buildmanager/t2556_3/C.scala            |    2 -
 .../buildmanager/t2556_3/t2556_3.changes/A2.scala  |    5 -
 test/files/buildmanager/t2556_3/t2556_3.check      |   18 -
 test/files/buildmanager/t2556_3/t2556_3.test       |    3 -
 test/files/buildmanager/t2557/A.scala              |    4 -
 test/files/buildmanager/t2557/B.scala              |    4 -
 test/files/buildmanager/t2557/C.scala              |    3 -
 test/files/buildmanager/t2557/D.scala              |    1 -
 test/files/buildmanager/t2557/E.scala              |    1 -
 test/files/buildmanager/t2557/F.scala              |    4 -
 .../buildmanager/t2557/t2557.changes/D2.scala      |    2 -
 test/files/buildmanager/t2557/t2557.check          |   10 -
 test/files/buildmanager/t2557/t2557.test           |    3 -
 test/files/buildmanager/t2559/A.scala              |    5 -
 test/files/buildmanager/t2559/D.scala              |    4 -
 .../buildmanager/t2559/t2559.changes/A2.scala      |    5 -
 test/files/buildmanager/t2559/t2559.check          |    9 -
 test/files/buildmanager/t2559/t2559.test           |    3 -
 test/files/buildmanager/t2562/A.scala              |    7 -
 test/files/buildmanager/t2562/B.scala              |    8 -
 .../buildmanager/t2562/t2562.changes/A2.scala      |    8 -
 test/files/buildmanager/t2562/t2562.check          |   12 -
 test/files/buildmanager/t2562/t2562.test           |    3 -
 test/files/buildmanager/t2649/A.scala              |    3 -
 test/files/buildmanager/t2649/B.scala              |    4 -
 .../buildmanager/t2649/t2649.changes/A2.scala      |    4 -
 test/files/buildmanager/t2649/t2649.check          |    9 -
 test/files/buildmanager/t2649/t2649.test           |    3 -
 test/files/buildmanager/t2650_1/A.scala            |    4 -
 test/files/buildmanager/t2650_1/B.scala            |    3 -
 .../buildmanager/t2650_1/t2650_1.changes/A2.scala  |    3 -
 test/files/buildmanager/t2650_1/t2650_1.check      |   12 -
 test/files/buildmanager/t2650_1/t2650_1.test       |    3 -
 test/files/buildmanager/t2650_2/A.scala            |    3 -
 test/files/buildmanager/t2650_2/B.scala            |    4 -
 .../buildmanager/t2650_2/t2650_2.changes/A2.scala  |    4 -
 test/files/buildmanager/t2650_2/t2650_2.check      |   14 -
 test/files/buildmanager/t2650_2/t2650_2.test       |    3 -
 test/files/buildmanager/t2650_3/A.scala            |    4 -
 test/files/buildmanager/t2650_3/B.scala            |    3 -
 .../buildmanager/t2650_3/t2650_3.changes/A2.scala  |    4 -
 test/files/buildmanager/t2650_3/t2650_3.check      |   14 -
 test/files/buildmanager/t2650_3/t2650_3.test       |    3 -
 test/files/buildmanager/t2650_4/A.scala            |    5 -
 test/files/buildmanager/t2650_4/B.scala            |    3 -
 .../buildmanager/t2650_4/t2650_4.changes/A2.scala  |    5 -
 test/files/buildmanager/t2650_4/t2650_4.check      |   14 -
 test/files/buildmanager/t2650_4/t2650_4.test       |    3 -
 test/files/buildmanager/t2651_2/A.scala            |    1 -
 .../buildmanager/t2651_2/t2651_2.changes/A2.scala  |    1 -
 test/files/buildmanager/t2651_2/t2651_2.check      |    6 -
 test/files/buildmanager/t2651_2/t2651_2.test       |    3 -
 test/files/buildmanager/t2651_3/A.scala            |    3 -
 .../buildmanager/t2651_3/t2651_3.changes/A2.scala  |    3 -
 test/files/buildmanager/t2651_3/t2651_3.check      |    6 -
 test/files/buildmanager/t2651_3/t2651_3.test       |    3 -
 test/files/buildmanager/t2651_4/A.scala            |    5 -
 test/files/buildmanager/t2651_4/B.scala            |    3 -
 .../buildmanager/t2651_4/t2651_4.changes/A2.scala  |    5 -
 test/files/buildmanager/t2651_4/t2651_4.check      |   13 -
 test/files/buildmanager/t2651_4/t2651_4.test       |    3 -
 test/files/buildmanager/t2653/A.scala              |    2 -
 test/files/buildmanager/t2653/B.scala              |    3 -
 .../buildmanager/t2653/t2653.changes/A2.scala      |    2 -
 test/files/buildmanager/t2653/t2653.check          |   15 -
 test/files/buildmanager/t2653/t2653.test           |    3 -
 test/files/buildmanager/t2654/A.scala              |    2 -
 test/files/buildmanager/t2654/B.scala              |    1 -
 .../buildmanager/t2654/t2654.changes/A2.scala      |    4 -
 test/files/buildmanager/t2654/t2654.check          |    6 -
 test/files/buildmanager/t2654/t2654.test           |    3 -
 test/files/buildmanager/t2655/A.scala              |    4 -
 test/files/buildmanager/t2655/B.scala              |    3 -
 .../buildmanager/t2655/t2655.changes/A2.scala      |    4 -
 test/files/buildmanager/t2655/t2655.check          |   13 -
 test/files/buildmanager/t2655/t2655.test           |    3 -
 test/files/buildmanager/t2657/A.scala              |    3 -
 test/files/buildmanager/t2657/B.scala              |    4 -
 .../buildmanager/t2657/t2657.changes/A2.scala      |    3 -
 test/files/buildmanager/t2657/t2657.check          |   14 -
 test/files/buildmanager/t2657/t2657.test           |    3 -
 test/files/buildmanager/t2789/A.scala              |    5 -
 test/files/buildmanager/t2789/B.scala              |    3 -
 .../buildmanager/t2789/t2789.changes/A2.scala      |    5 -
 test/files/buildmanager/t2789/t2789.check          |   11 -
 test/files/buildmanager/t2789/t2789.test           |    3 -
 test/files/buildmanager/t2790/A.scala              |    5 -
 test/files/buildmanager/t2790/B.scala              |    4 -
 .../buildmanager/t2790/t2790.changes/A2.scala      |    4 -
 test/files/buildmanager/t2790/t2790.check          |   13 -
 test/files/buildmanager/t2790/t2790.test           |    3 -
 test/files/buildmanager/t2792/A1.scala             |    3 -
 test/files/buildmanager/t2792/A2.scala             |    4 -
 test/files/buildmanager/t2792/A3.scala             |    3 -
 .../buildmanager/t2792/t2792.changes/A1_1.scala    |    3 -
 test/files/buildmanager/t2792/t2792.check          |   14 -
 test/files/buildmanager/t2792/t2792.test           |    3 -
 test/files/buildmanager/t3045/A.java               |    7 -
 test/files/buildmanager/t3045/t3045.check          |    3 -
 test/files/buildmanager/t3045/t3045.test           |    1 -
 test/files/buildmanager/t3054/bar/Bar.java         |    7 -
 test/files/buildmanager/t3054/foo/Foo.scala        |    5 -
 test/files/buildmanager/t3054/t3054.check          |    3 -
 test/files/buildmanager/t3054/t3054.test           |    1 -
 test/files/buildmanager/t3059/A.scala              |    4 -
 test/files/buildmanager/t3059/B.scala              |    4 -
 test/files/buildmanager/t3059/t3059.check          |    6 -
 test/files/buildmanager/t3059/t3059.test           |    2 -
 test/files/buildmanager/t3133/A.java               |    7 -
 test/files/buildmanager/t3133/t3133.check          |    3 -
 test/files/buildmanager/t3133/t3133.test           |    1 -
 test/files/buildmanager/t3140/A.scala              |    8 -
 test/files/buildmanager/t3140/t3140.check          |    6 -
 test/files/buildmanager/t3140/t3140.test           |    2 -
 test/files/buildmanager/t4215/A.scala              |    5 -
 test/files/buildmanager/t4215/t4215.check          |    6 -
 test/files/buildmanager/t4215/t4215.test           |    2 -
 test/files/continuations-neg/function0.check       |    6 -
 test/files/continuations-neg/function0.scala       |   16 -
 test/files/continuations-neg/function2.check       |    6 -
 test/files/continuations-neg/function2.scala       |   16 -
 test/files/continuations-neg/function3.check       |    6 -
 test/files/continuations-neg/function3.scala       |   15 -
 test/files/continuations-neg/infer2.check          |    4 -
 test/files/continuations-neg/infer2.scala          |   19 -
 test/files/continuations-neg/lazy.check            |    4 -
 test/files/continuations-neg/lazy.scala            |   16 -
 test/files/continuations-neg/t1929.check           |    6 -
 test/files/continuations-neg/t1929.scala           |   17 -
 test/files/continuations-neg/t2285.check           |    6 -
 test/files/continuations-neg/t2285.scala           |   11 -
 test/files/continuations-neg/t2949.check           |    6 -
 test/files/continuations-neg/t2949.scala           |   15 -
 test/files/continuations-neg/t3628.check           |    4 -
 test/files/continuations-neg/t3628.scala           |   11 -
 test/files/continuations-neg/t3718.check           |    4 -
 test/files/continuations-neg/t3718.scala           |    3 -
 .../t5314-missing-result-type.check                |    4 -
 .../t5314-missing-result-type.scala                |   13 -
 test/files/continuations-neg/t5314-npe.check       |    4 -
 test/files/continuations-neg/t5314-npe.scala       |    3 -
 .../continuations-neg/t5314-return-reset.check     |    4 -
 .../continuations-neg/t5314-return-reset.scala     |   21 -
 .../files/continuations-neg/t5314-type-error.check |    6 -
 .../files/continuations-neg/t5314-type-error.scala |   17 -
 test/files/continuations-neg/t5445.check           |    4 -
 test/files/continuations-neg/t5445.scala           |    5 -
 test/files/continuations-neg/trycatch2.check       |    7 -
 test/files/continuations-neg/trycatch2.scala       |   33 -
 test/files/continuations-run/basics.check          |    2 -
 test/files/continuations-run/basics.scala          |   23 -
 test/files/continuations-run/function1.scala       |   16 -
 test/files/continuations-run/function4.check       |    1 -
 test/files/continuations-run/function4.scala       |   15 -
 test/files/continuations-run/function5.check       |    1 -
 test/files/continuations-run/function5.scala       |   15 -
 test/files/continuations-run/function6.check       |    1 -
 test/files/continuations-run/function6.scala       |   16 -
 test/files/continuations-run/ifelse0.check         |    2 -
 test/files/continuations-run/ifelse0.scala         |   18 -
 test/files/continuations-run/ifelse1.check         |    4 -
 test/files/continuations-run/ifelse1.scala         |   25 -
 test/files/continuations-run/ifelse2.check         |    4 -
 test/files/continuations-run/ifelse2.scala         |   16 -
 test/files/continuations-run/ifelse3.check         |    2 -
 test/files/continuations-run/ifelse3.scala         |   21 -
 test/files/continuations-run/ifelse4.check         |    4 -
 test/files/continuations-run/ifelse4.scala         |   31 -
 test/files/continuations-run/infer1.scala          |   33 -
 test/files/continuations-run/match0.check          |    2 -
 test/files/continuations-run/match0.scala          |   18 -
 test/files/continuations-run/match1.check          |    2 -
 test/files/continuations-run/match1.scala          |   18 -
 test/files/continuations-run/match2.check          |    2 -
 test/files/continuations-run/match2.scala          |   26 -
 test/files/continuations-run/patvirt.check         |    2 -
 test/files/continuations-run/patvirt.scala         |   32 -
 test/files/continuations-run/shift-pct.check       |   25 -
 test/files/continuations-run/shift-pct.scala       |   30 -
 test/files/continuations-run/t1807.check           |    1 -
 test/files/continuations-run/t1807.scala           |   14 -
 test/files/continuations-run/t1808.scala           |   10 -
 test/files/continuations-run/t1820.scala           |   14 -
 test/files/continuations-run/t1821.check           |    4 -
 test/files/continuations-run/t1821.scala           |   20 -
 test/files/continuations-run/t2864.check           |    1 -
 test/files/continuations-run/t2864.scala           |   30 -
 test/files/continuations-run/t2934.check           |    1 -
 test/files/continuations-run/t2934.scala           |   10 -
 test/files/continuations-run/t3199.check           |    1 -
 test/files/continuations-run/t3199.scala           |   20 -
 test/files/continuations-run/t3199b.scala          |   11 -
 test/files/continuations-run/t3223.check           |    1 -
 test/files/continuations-run/t3223.scala           |   19 -
 test/files/continuations-run/t3225.check           |   12 -
 test/files/continuations-run/t3225.scala           |   56 -
 test/files/continuations-run/t3501.check           |    5 -
 test/files/continuations-run/t3501.scala           |   15 -
 test/files/continuations-run/t5314-2.check         |    5 -
 test/files/continuations-run/t5314-2.scala         |   44 -
 test/files/continuations-run/t5314-3.check         |    4 -
 test/files/continuations-run/t5314-3.scala         |   27 -
 test/files/continuations-run/t5314-with-if.check   |    1 -
 test/files/continuations-run/t5314-with-if.scala   |   17 -
 test/files/continuations-run/t5314.check           |    8 -
 test/files/continuations-run/t5314.scala           |   52 -
 test/files/continuations-run/t5472.check           |    1 -
 test/files/continuations-run/t5472.scala           |   90 -
 test/files/continuations-run/t5506.check           |    7 -
 test/files/continuations-run/t5506.scala           |   58 -
 test/files/continuations-run/t5538.check           |    1 -
 test/files/continuations-run/t5538.scala           |   50 -
 test/files/continuations-run/trycatch0.check       |    2 -
 test/files/continuations-run/trycatch0.scala       |   25 -
 test/files/continuations-run/trycatch1.check       |    4 -
 test/files/continuations-run/trycatch1.scala       |   48 -
 test/files/continuations-run/while0.check          |    1 -
 test/files/continuations-run/while0.scala          |   22 -
 test/files/continuations-run/while1.check          |   11 -
 test/files/continuations-run/while1.scala          |   22 -
 test/files/continuations-run/while2.check          |   19 -
 test/files/continuations-run/while2.scala          |   23 -
 test/files/continuations-run/z1673.scala           |   31 -
 test/files/detach-neg/det_bar.check                |    4 -
 test/files/detach-neg/det_bar.scala                |   13 -
 test/files/detach-run/actor-run.check              |    5 -
 test/files/detach-run/actor/Client.scala           |   54 -
 test/files/detach-run/actor/Server.scala           |   27 -
 test/files/detach-run/actor/ServerConsole.scala    |   75 -
 test/files/detach-run/actor/actor.flags            |    1 -
 test/files/detach-run/actor/actor.scala            |  157 -
 test/files/detach-run/actor/java.policy            |   25 -
 test/files/detach-run/basic-run.check              |    5 -
 test/files/detach-run/basic/Client.scala           |   48 -
 test/files/detach-run/basic/Server.scala           |   22 -
 test/files/detach-run/basic/ServerConsole.scala    |   83 -
 test/files/detach-run/basic/basic.flags            |    1 -
 test/files/detach-run/basic/basic.scala            |  169 -
 test/files/detach-run/basic/java.policy            |   26 -
 test/files/disabled/run/t4602.scala                |   57 -
 test/files/disabled/t7020.check                    |   17 -
 test/files/filters                                 |    8 +
 test/files/instrumented/InstrumentationTest.check  |    6 +
 test/files/instrumented/InstrumentationTest.scala  |   27 +-
 .../instrumented/inline-in-constructors.check      |    4 +
 .../instrumented/inline-in-constructors.flags      |    2 +-
 .../inline-in-constructors/test_3.scala            |   21 +-
 test/files/jvm/actor-exceptions.scala              |    4 +-
 test/files/jvm/actor-executor.scala                |    5 +-
 test/files/jvm/actor-executor2.scala               |    6 +-
 test/files/jvm/actor-executor3.scala               |    6 +-
 test/files/jvm/actor-getstate.scala                |    6 +-
 test/files/jvm/actor-link-getstate.scala           |    9 +-
 test/files/jvm/actor-looping.scala                 |    3 +-
 test/files/jvm/actor-normal-exit.scala             |    4 +-
 test/files/jvm/actor-receivewithin.scala           |    5 +-
 test/files/jvm/actor-sync-send-timeout.scala       |    7 +-
 test/files/jvm/actor-termination.scala             |    3 +-
 test/files/jvm/actor-uncaught-exception.scala      |   55 +-
 test/files/jvm/actor-uncaught-exception2.check     |    4 +-
 test/files/jvm/actor-uncaught-exception2.scala     |   56 +-
 test/files/jvm/annotations.scala                   |    3 +
 test/files/jvm/backendBugUnapply.check             |    2 -
 test/files/jvm/backendBugUnapply.scala             |   17 -
 test/files/jvm/bigints.scala                       |    1 -
 test/files/jvm/bytecode-test-example/Foo_1.flags   |    1 +
 test/files/jvm/console.scala                       |    2 +-
 test/files/jvm/constant-optimization/Foo_1.flags   |    1 +
 test/files/jvm/constant-optimization/Foo_1.scala   |    9 +
 test/files/jvm/constant-optimization/Test.scala    |   27 +
 test/files/jvm/daemon-actor-termination.scala      |    6 +-
 test/files/jvm/deprecation.check                   |    3 +
 test/files/jvm/deprecation/Test_1.scala            |    2 +-
 test/files/jvm/duration-tck.scala                  |    2 +
 test/files/jvm/future-alarm.scala                  |    4 +-
 test/files/jvm/future-awaitall-zero.scala          |    6 +-
 test/files/jvm/future-spec.check                   |    1 +
 test/files/jvm/future-spec/FutureTests.scala       |  235 +-
 test/files/jvm/future-spec/PromiseTests.scala      |   80 +-
 test/files/jvm/future-spec/TryTests.scala          |    2 +-
 test/files/jvm/future-spec/main.scala              |   52 +-
 test/files/jvm/future-termination.scala            |   28 +-
 test/files/jvm/inner.scala                         |    2 +-
 test/files/jvm/interpreter.check                   |   16 +-
 test/files/jvm/interpreter.scala                   |    4 +-
 test/files/jvm/manifests-new.check                 |    4 +-
 test/files/jvm/manifests-new.scala                 |   34 +-
 test/files/jvm/manifests-old.scala                 |   34 +-
 test/files/jvm/methvsfield.java                    |    6 +-
 test/files/jvm/named-args-in-order.check           |    3 +
 .../jvm/named-args-in-order/SameBytecode.scala     |    9 +
 test/files/jvm/named-args-in-order/Test.scala      |   10 +
 test/files/jvm/natives.scala                       |    6 +-
 test/files/jvm/non-fatal-tests.scala               |   12 +-
 test/files/jvm/nooptimise/Foo_1.flags              |    1 +
 test/files/jvm/nooptimise/Foo_1.scala              |    8 +
 test/files/jvm/nooptimise/Test.scala               |   23 +
 test/files/jvm/opt_value_class.check               |    2 +
 test/files/jvm/opt_value_class/Value_1.scala       |   28 +
 test/files/jvm/opt_value_class/test.scala          |   16 +
 .../patmat_opt_ignore_underscore/Analyzed_1.scala  |    2 +-
 .../jvm/patmat_opt_no_nullcheck/Analyzed_1.scala   |    2 +-
 .../patmat_opt_primitive_typetest/Analyzed_1.scala |    2 +-
 test/files/jvm/protectedacc.scala                  |   16 +-
 test/files/jvm/reactor-exceptionOnSend.scala       |    5 +-
 test/files/jvm/reactor-producer-consumer.scala     |    4 +-
 test/files/jvm/reactor.scala                       |   16 +-
 test/files/jvm/replyablereactor.scala              |    5 +-
 test/files/jvm/replyablereactor2.scala             |    5 +-
 test/files/jvm/replyablereactor3.scala             |    5 +-
 test/files/jvm/replyablereactor4.scala             |    5 +-
 test/files/jvm/replyreactor-react-sender.scala     |    6 +-
 test/files/jvm/replyreactor.scala                  |    4 +-
 test/files/jvm/scala-concurrent-tck.scala          |  178 +-
 test/files/jvm/scheduler-adapter.scala             |    5 +-
 test/files/jvm/serialization-new.check             |   55 +-
 test/files/jvm/serialization-new.scala             |   67 +-
 test/files/jvm/serialization.check                 |   55 +-
 test/files/jvm/serialization.scala                 |  116 +-
 test/files/jvm/si5471.scala                        |   17 -
 test/files/jvm/stringbuilder.scala                 |    1 +
 test/files/jvm/t0632.check                         |   12 -
 test/files/jvm/t0632.scala                         |   22 -
 test/files/jvm/t1118.check                         |   11 -
 test/files/jvm/t1118.scala                         |   21 -
 test/files/jvm/t1143-2/t1143-2.scala               |   26 +-
 test/files/jvm/t1143.scala                         |   12 +-
 test/files/jvm/t1342/SI.scala                      |    2 +-
 test/files/jvm/t1449.scala                         |    9 +-
 test/files/jvm/t1461.scala                         |    2 +-
 test/files/jvm/t1464/MyTrait.scala                 |    2 +-
 test/files/jvm/t1600.scala                         |   27 +-
 test/files/jvm/t1948.scala                         |    6 +-
 test/files/jvm/t2163/t2163.java                    |    9 +
 test/files/jvm/t2163/t2163.scala                   |   10 +
 test/files/jvm/t2359.scala                         |    4 +-
 test/files/jvm/t2470.cmds                          |    3 -
 test/files/jvm/t2530.scala                         |    6 +-
 test/files/jvm/t2570/Test.scala                    |    2 +-
 test/files/jvm/t3003.cmds                          |    2 -
 test/files/jvm/t3003/Test_1.scala                  |    2 +-
 test/files/jvm/t3102.scala                         |    7 +-
 test/files/jvm/t3356.scala                         |    5 +-
 test/files/jvm/t3365.scala                         |    5 +-
 test/files/jvm/t3407.scala                         |    4 +-
 test/files/jvm/t3412-channel.scala                 |    4 +-
 test/files/jvm/t3412.scala                         |    4 +-
 test/files/jvm/t3415/HelloWorld.scala              |    2 +-
 test/files/jvm/t3470.scala                         |    4 +-
 test/files/jvm/t3838.scala                         |    4 +-
 .../jvm/{ticket4283 => t4283}/AbstractFoo.java     |    0
 .../jvm/{ticket4283 => t4283}/ScalaBipp.scala      |    0
 test/files/jvm/{ticket4283 => t4283}/Test.scala    |    0
 test/files/jvm/{si5471.check => t5471.check}       |    0
 test/files/jvm/t5471.scala                         |   17 +
 test/files/jvm/t560bis.check                       |    2 -
 test/files/jvm/t560bis.scala                       |   21 -
 test/files/jvm/t6941/Analyzed_1.flags              |    1 +
 test/files/jvm/t6941/Analyzed_1.scala              |    2 +-
 test/files/jvm/t7006.check                         |   29 +
 test/files/jvm/t7006/Foo_1.flags                   |    1 +
 test/files/jvm/t7006/Foo_1.scala                   |   10 +
 test/files/jvm/t7006/Test.scala                    |   19 +
 test/files/jvm/t7146.scala                         |    2 +
 test/files/jvm/t7181/Foo_1.scala                   |   26 +
 test/files/jvm/t7181/Test.scala                    |   24 +
 test/files/jvm/throws-annot.scala                  |   10 +-
 test/files/jvm/ticket2163/ticket2163.java          |    9 -
 test/files/jvm/ticket2163/ticket2163.scala         |    5 -
 test/files/jvm/try-type-tests.scala                |  267 +-
 test/files/jvm/typerep.scala                       |   30 +-
 test/files/jvm/unittest_io_Jvm.check               |    2 +-
 test/files/jvm/unittest_io_Jvm.scala               |    2 +-
 test/files/jvm/unittest_xml.scala                  |  101 -
 test/files/jvm/unreachable/Foo_1.flags             |    1 +
 test/files/jvm/unreachable/Foo_1.scala             |  112 +
 test/files/jvm/unreachable/Test.scala              |   23 +
 test/files/jvm/value-class-boxing.check            |    7 +
 test/files/jvm/value-class-boxing/Analyzed_1.scala |   17 +
 test/files/jvm/value-class-boxing/test.scala       |   15 +
 test/files/jvm/varargs/JavaClass.java              |    2 +-
 test/files/jvm/varargs/VaClass.scala               |    4 +-
 test/files/jvm/xml01.check                         |    8 -
 test/files/jvm/xml01.scala                         |  181 -
 test/files/jvm/xml02.scala                         |   78 -
 test/files/jvm/xml03syntax.check                   |   26 -
 test/files/jvm/xml03syntax.scala                   |   97 -
 test/files/jvm/xml04embed.check                    |    3 -
 test/files/jvm/xml04embed.scala                    |   10 -
 test/files/jvm/xml05.check                         |    4 -
 test/files/jvm/xmlattr.check                       |   18 -
 test/files/jvm/xmlattr.scala                       |   63 -
 test/files/jvm/xmlmore.check                       |   10 -
 test/files/jvm/xmlmore.scala                       |   29 -
 test/files/jvm/xmlpull.scala                       |   31 -
 test/files/jvm/xmlstuff.check                      |   22 -
 test/files/jvm/xmlstuff.scala                      |  181 -
 test/files/lib/jsoup-1.3.1.jar.desired.sha1        |    1 +
 test/files/lib/macro210.jar.desired.sha1           |    1 +
 test/files/lib/scalacheck.jar.desired.sha1         |    1 -
 test/files/neg/abstract-class-2.scala              |    4 +-
 test/files/neg/abstract-explaintypes.check         |   15 +
 test/files/neg/abstract-explaintypes.flags         |    1 +
 test/files/neg/abstract-explaintypes.scala         |   11 +
 test/files/neg/abstract-inaccessible.check         |   10 +-
 test/files/neg/abstract-inaccessible.scala         |    2 +-
 test/files/neg/abstract-report.check               |    2 +-
 test/files/neg/abstract-report2.check              |   12 +-
 test/files/neg/abstract-report2.scala              |    6 +-
 test/files/neg/accesses.scala                      |    2 +-
 test/files/neg/accesses2.check                     |   12 +
 test/files/neg/accesses2.scala                     |   11 +
 test/files/neg/ambiguous-float-dots.check          |   16 -
 test/files/neg/ambiguous-float-dots.flags          |    1 -
 test/files/neg/ambiguous-float-dots.scala          |   14 -
 test/files/neg/ambiguous-float-dots2.check         |    7 +-
 test/files/neg/ambiguous-float-dots2.scala         |    1 -
 test/files/neg/annot-nonconst.check                |    2 +-
 test/files/neg/any-vs-anyref.check                 |   18 +-
 test/files/neg/anytrait.scala                      |    2 +-
 test/files/neg/anyval-anyref-parent.check          |    6 +-
 test/files/neg/applydynamic_sip.check              |   17 +-
 test/files/neg/array-not-seq.check                 |   13 -
 test/files/neg/array-not-seq.flags                 |    1 -
 test/files/neg/array-not-seq.scala                 |   26 -
 test/files/neg/bad-advice.check                    |    6 +
 .../{disabled/t7020.flags => neg/bad-advice.flags} |    0
 test/files/neg/bad-advice.scala                    |    6 +
 test/files/neg/case-collision.check                |   10 +-
 test/files/neg/case-collision.flags                |    2 +-
 test/files/neg/case-collision2.check               |   12 +
 test/files/neg/case-collision2.flags               |    1 +
 test/files/neg/case-collision2.scala               |   12 +
 test/files/neg/catch-all.check                     |   10 +-
 test/files/neg/check-dead.check                    |   12 +-
 test/files/neg/check-dead.scala                    |   12 +-
 test/files/neg/checksensible.check                 |   70 +-
 test/files/neg/checksensible.scala                 |   24 +-
 test/files/neg/choices.check                       |    2 +-
 test/files/neg/choices.flags                       |    2 +-
 test/files/neg/choices.scala                       |    2 +-
 test/files/neg/class-of-double-targs.check         |    4 +
 test/files/neg/class-of-double-targs.scala         |    3 +
 .../neg/classmanifests_new_deprecations.check      |   20 +-
 test/files/neg/compile-time-only-a.check           |   79 +
 test/files/neg/compile-time-only-a.scala           |   76 +
 test/files/neg/compile-time-only-b.check           |   13 +
 test/files/neg/compile-time-only-b.scala           |   15 +
 test/files/neg/constructor-init-order.check        |    9 +
 ...ollision.flags => constructor-init-order.flags} |    0
 test/files/neg/constructor-init-order.scala        |   23 +
 test/files/neg/cycle-bounds.check                  |    4 +
 test/files/neg/cycle-bounds.flags                  |    1 +
 test/files/neg/cycle-bounds.scala                  |    5 +
 test/files/neg/cyclics-import.check                |   11 +-
 test/files/neg/dbldef.check                        |    4 +-
 test/files/neg/delayed-init-ref.check              |   14 +-
 test/files/neg/delayed-init-ref.flags              |    2 +-
 test/files/neg/divergent-implicit.check            |   12 +-
 test/files/neg/dotless-targs.check                 |    4 +
 test/files/neg/dotless-targs.scala                 |    5 +
 test/files/neg/eta-expand-star-deprecation.check   |    4 +
 test/files/neg/eta-expand-star-deprecation.flags   |    1 +
 test/files/neg/eta-expand-star-deprecation.scala   |    8 +
 test/files/neg/exhausting.check                    |   18 +-
 test/files/neg/exhausting.scala                    |    4 +-
 test/files/neg/forgot-interpolator.check           |   27 +
 ...ed-init-ref.flags => forgot-interpolator.flags} |    0
 test/files/neg/forgot-interpolator.scala           |   93 +
 test/files/neg/forward.scala                       |    8 +-
 test/files/neg/found-req-variance.scala            |   14 +-
 test/files/neg/gadts1.check                        |    7 +-
 test/files/neg/gadts1.scala                        |    4 +-
 test/files/neg/gadts2-strict.check                 |    6 +
 test/files/neg/gadts2-strict.flags                 |    1 +
 test/files/neg/gadts2-strict.scala                 |   26 +
 test/files/neg/gadts2.check                        |    6 +
 test/files/neg/gadts2.flags                        |    1 +
 test/files/neg/gadts2.scala                        |   12 +
 test/files/neg/implicits.scala                     |    8 +-
 test/files/neg/import-precedence.check             |   19 +
 test/files/neg/import-precedence.scala             |   68 +
 test/files/neg/java-access-neg/J.java              |    4 +-
 test/files/neg/java-access-neg/S2.scala            |    8 +-
 .../files/neg/javaConversions-2.10-ambiguity.check |    6 -
 .../files/neg/javaConversions-2.10-ambiguity.scala |   10 -
 test/files/neg/lazy-override.scala                 |    2 +-
 test/files/neg/lazyvals.scala                      |    4 +-
 test/files/neg/literate_existentials.check         |    4 +
 test/files/neg/literate_existentials.scala         |  224 +
 test/files/neg/logImplicits.check                  |    4 +-
 test/files/neg/logImplicits.scala                  |    4 +-
 test/files/neg/lubs.check                          |    9 +-
 test/files/neg/lubs.scala                          |    2 +-
 test/files/neg/macro-abort/Macros_1.scala          |    2 +-
 test/files/neg/macro-basic-mamdmi.check            |    5 +-
 .../macro-basic-mamdmi/Impls_Macros_Test_1.scala   |   17 +-
 .../macro-blackbox-dynamic-materialization.check   |    4 +
 .../Macros_1.scala                                 |   25 +
 .../Test_2.scala                                   |    4 +
 test/files/neg/macro-blackbox-extractor.check      |    4 +
 .../neg/macro-blackbox-extractor/Macros_1.scala    |   21 +
 .../neg/macro-blackbox-extractor/Test_2.scala      |    5 +
 .../macro-blackbox-fundep-materialization.check    |    8 +
 .../macro-blackbox-fundep-materialization.flags    |    1 +
 .../Macros_1.scala                                 |   39 +
 .../Test_2.scala                                   |   12 +
 test/files/neg/macro-blackbox-structural.check     |    4 +
 .../macro-blackbox-structural/Impls_Macros_1.scala |   15 +
 .../neg/macro-blackbox-structural/Test_2.scala     |    5 +
 test/files/neg/macro-bundle-abstract.check         |    4 +
 test/files/neg/macro-bundle-abstract.scala         |   11 +
 test/files/neg/macro-bundle-ambiguous.check        |    5 +
 test/files/neg/macro-bundle-ambiguous.scala        |   14 +
 test/files/neg/macro-bundle-need-qualifier.check   |    4 +
 test/files/neg/macro-bundle-need-qualifier.scala   |   11 +
 test/files/neg/macro-bundle-noncontext.check       |    4 +
 test/files/neg/macro-bundle-noncontext.scala       |    9 +
 test/files/neg/macro-bundle-nonpublic-c.check      |    4 +
 test/files/neg/macro-bundle-nonpublic-c.scala      |   11 +
 test/files/neg/macro-bundle-nonpublic-impl.check   |    4 +
 test/files/neg/macro-bundle-nonpublic-impl.scala   |   11 +
 test/files/neg/macro-bundle-nonstatic.check        |   13 +
 test/files/neg/macro-bundle-nonstatic.scala        |   36 +
 test/files/neg/macro-bundle-object.check           |    8 +
 test/files/neg/macro-bundle-object.scala           |   11 +
 test/files/neg/macro-bundle-overloaded.check       |    4 +
 test/files/neg/macro-bundle-overloaded.scala       |   12 +
 test/files/neg/macro-bundle-polymorphic.check      |   19 +
 test/files/neg/macro-bundle-polymorphic.scala      |   43 +
 test/files/neg/macro-bundle-priority-bundle.check  |    8 +
 test/files/neg/macro-bundle-priority-bundle.scala  |   14 +
 .../neg/macro-bundle-priority-nonbundle.check      |    8 +
 .../neg/macro-bundle-priority-nonbundle.scala      |   14 +
 test/files/neg/macro-bundle-trait.check            |    4 +
 test/files/neg/macro-bundle-trait.scala            |   11 +
 test/files/neg/macro-bundle-whitebox-use-raw.check |   17 +
 .../macro-bundle-whitebox-use-raw/Macros_1.scala   |  108 +
 .../neg/macro-bundle-whitebox-use-raw/Test_2.scala |   19 +
 .../neg/macro-bundle-whitebox-use-refined.check    |   17 +
 .../Macros_1.scala                                 |  108 +
 .../macro-bundle-whitebox-use-refined/Test_2.scala |   19 +
 test/files/neg/macro-bundle-wrongcontext-a.check   |    4 +
 test/files/neg/macro-bundle-wrongcontext-a.scala   |   13 +
 test/files/neg/macro-bundle-wrongcontext-b.check   |    4 +
 test/files/neg/macro-bundle-wrongcontext-b.scala   |   11 +
 test/files/neg/macro-cyclic/Impls_Macros_1.scala   |    2 +-
 test/files/neg/macro-deprecate-idents.check        |   51 +-
 .../Impls_Macros_1.scala                           |   10 +-
 test/files/neg/macro-exception.check               |    2 +-
 test/files/neg/macro-exception/Macros_1.scala      |    2 +-
 .../Impls_Macros_1.scala                           |    2 +-
 .../neg/macro-incompatible-macro-engine-a.check    |    7 +
 .../Macros_2.flags                                 |    1 +
 .../Macros_2.scala                                 |    7 +
 .../Plugin_1.scala                                 |   35 +
 .../macro-incompatible-macro-engine-a/Test_3.scala |    4 +
 .../scalac-plugin.xml                              |    4 +
 .../neg/macro-incompatible-macro-engine-b.check    |    7 +
 .../neg/macro-incompatible-macro-engine-b.flags    |    1 +
 .../Macros_2.flags                                 |    1 +
 .../Macros_2.scala                                 |    7 +
 .../Plugin_1.scala                                 |   35 +
 .../macro-incompatible-macro-engine-b/Test_3.scala |    4 +
 .../scalac-plugin.xml                              |    4 +
 .../neg/macro-incompatible-macro-engine-c.check    |    4 +
 .../neg/macro-incompatible-macro-engine-c.scala    |    3 +
 test/files/neg/macro-invalidimpl-a.check           |    4 -
 test/files/neg/macro-invalidimpl-a/Impls_1.scala   |    5 -
 .../neg/macro-invalidimpl-a/Macros_Test_2.scala    |    9 -
 test/files/neg/macro-invalidimpl-b.check           |    4 -
 test/files/neg/macro-invalidimpl-b/Impls_1.scala   |    5 -
 .../neg/macro-invalidimpl-b/Macros_Test_2.scala    |    9 -
 test/files/neg/macro-invalidimpl-c.check           |    4 -
 .../neg/macro-invalidimpl-c/Impls_Macros_1.scala   |    9 -
 test/files/neg/macro-invalidimpl-c/Test_2.scala    |    3 -
 test/files/neg/macro-invalidimpl-d.check           |    4 -
 test/files/neg/macro-invalidimpl-d/Impls_1.scala   |    7 -
 .../neg/macro-invalidimpl-d/Macros_Test_2.scala    |    7 -
 test/files/neg/macro-invalidimpl-e.check           |   13 -
 test/files/neg/macro-invalidimpl-e/Impls_1.scala   |    6 -
 .../neg/macro-invalidimpl-e/Macros_Test_2.scala    |    9 -
 test/files/neg/macro-invalidimpl-f.check           |    7 -
 test/files/neg/macro-invalidimpl-f/Impls_1.scala   |   11 -
 .../neg/macro-invalidimpl-f/Macros_Test_2.scala    |    9 -
 test/files/neg/macro-invalidimpl-g.check           |    7 -
 test/files/neg/macro-invalidimpl-g/Impls_1.scala   |   11 -
 .../neg/macro-invalidimpl-g/Macros_Test_2.scala    |    8 -
 test/files/neg/macro-invalidimpl-h.check           |    4 -
 test/files/neg/macro-invalidimpl-h/Impls_1.scala   |    5 -
 .../neg/macro-invalidimpl-h/Macros_Test_2.scala    |    8 -
 test/files/neg/macro-invalidimpl-i.check           |    4 -
 test/files/neg/macro-invalidimpl-i/Impls_1.scala   |    7 -
 .../neg/macro-invalidimpl-i/Macros_Test_2.scala    |    5 -
 test/files/neg/macro-invalidimpl.check             |   53 +
 ...invalidimpl-a.flags => macro-invalidimpl.flags} |    0
 test/files/neg/macro-invalidimpl/Impls_1.scala     |   39 +
 .../neg/macro-invalidimpl/Macros_Test_2.scala      |   55 +
 test/files/neg/macro-invalidret-nontree.check      |    7 -
 .../neg/macro-invalidret-nontree/Impls_1.scala     |    5 -
 .../macro-invalidret-nontree/Macros_Test_2.scala   |    8 -
 .../neg/macro-invalidret-nonuniversetree.check     |    7 -
 .../macro-invalidret-nonuniversetree/Impls_1.scala |    6 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/neg/macro-invalidret.check              |   35 +
 test/files/neg/macro-invalidret.flags              |    3 +
 test/files/neg/macro-invalidret/Impls_1.scala      |   10 +
 .../files/neg/macro-invalidret/Macros_Test_2.scala |   18 +
 test/files/neg/macro-invalidshape-a.check          |    5 -
 test/files/neg/macro-invalidshape-a/Impls_1.scala  |    5 -
 .../neg/macro-invalidshape-a/Macros_Test_2.scala   |    8 -
 test/files/neg/macro-invalidshape-b.check          |    5 -
 test/files/neg/macro-invalidshape-b/Impls_1.scala  |    5 -
 .../neg/macro-invalidshape-b/Macros_Test_2.scala   |    8 -
 test/files/neg/macro-invalidshape-c.check          |    9 -
 test/files/neg/macro-invalidshape-c/Impls_1.scala  |    5 -
 .../neg/macro-invalidshape-c/Macros_Test_2.scala   |    8 -
 test/files/neg/macro-invalidshape-d.check          |    8 -
 test/files/neg/macro-invalidshape-d.flags          |    1 -
 test/files/neg/macro-invalidshape-d/Impls_1.scala  |    5 -
 .../neg/macro-invalidshape-d/Macros_Test_2.scala   |    8 -
 test/files/neg/macro-invalidshape.check            |   20 +
 ...nvalidimpl-b.flags => macro-invalidshape.flags} |    0
 test/files/neg/macro-invalidshape/Impls_1.scala    |    5 +
 .../neg/macro-invalidshape/Macros_Test_2.scala     |   17 +
 .../neg/macro-invalidsig-context-bounds.check      |    4 -
 .../macro-invalidsig-context-bounds/Impls_1.scala  |    9 -
 .../Macros_Test_1.scala                            |    8 -
 test/files/neg/macro-invalidsig-ctx-badargc.check  |    7 -
 .../neg/macro-invalidsig-ctx-badargc/Impls_1.scala |    3 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/neg/macro-invalidsig-ctx-badtype.check  |    7 -
 .../neg/macro-invalidsig-ctx-badtype/Impls_1.scala |    5 -
 .../Macros_Test_2.scala                            |    8 -
 .../neg/macro-invalidsig-ctx-badvarargs.check      |    7 -
 .../macro-invalidsig-ctx-badvarargs/Impls_1.scala  |    5 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/neg/macro-invalidsig-ctx-noctx.check    |    7 -
 .../neg/macro-invalidsig-ctx-noctx/Impls_1.scala   |    5 -
 .../macro-invalidsig-ctx-noctx/Macros_Test_2.scala |    8 -
 .../neg/macro-invalidsig-implicit-params.check     |    4 -
 .../Impls_Macros_1.scala                           |   19 -
 .../macro-invalidsig-implicit-params/Test_2.scala  |    4 -
 .../neg/macro-invalidsig-params-badargc.check      |    7 -
 .../Impls_Macros_1.scala                           |    9 -
 .../macro-invalidsig-params-badargc/Test_2.scala   |    4 -
 .../neg/macro-invalidsig-params-badtype.check      |    7 +-
 .../Impls_Macros_1.scala                           |    4 +-
 .../macro-invalidsig-params-badtype/Test_2.scala   |    4 -
 .../neg/macro-invalidsig-params-badvarargs.check   |    7 -
 .../Impls_Macros_1.scala                           |    9 -
 .../Test_2.scala                                   |    4 -
 .../neg/macro-invalidsig-params-namemismatch.check |    7 -
 .../Impls_Macros_1.scala                           |    9 -
 .../Test_2.scala                                   |    4 -
 .../neg/macro-invalidsig-tparams-badtype.check     |    7 -
 .../macro-invalidsig-tparams-badtype/Impls_1.scala |    5 -
 .../Macros_Test_2.scala                            |    8 -
 .../neg/macro-invalidsig-tparams-bounds-a.check    |    4 -
 .../Impls_1.scala                                  |    5 -
 .../Macros_Test_2.scala                            |    8 -
 .../neg/macro-invalidsig-tparams-bounds-b.check    |    4 -
 .../Impls_1.scala                                  |    5 -
 .../Macros_Test_2.scala                            |    8 -
 .../neg/macro-invalidsig-tparams-notparams-a.check |    4 -
 .../Impls_1.scala                                  |    6 -
 .../Macros_Test_2.scala                            |    8 -
 .../neg/macro-invalidsig-tparams-notparams-b.check |    4 -
 .../Impls_1.scala                                  |   11 -
 .../Macros_Test_2.scala                            |   11 -
 .../neg/macro-invalidsig-tparams-notparams-c.check |    4 -
 .../Impls_1.scala                                  |   12 -
 .../Macros_Test_2.scala                            |   11 -
 test/files/neg/macro-invalidsig.check              |   85 +
 ...-invalidimpl-c.flags => macro-invalidsig.flags} |    0
 test/files/neg/macro-invalidsig/Impls_1.scala      |   86 +
 .../files/neg/macro-invalidsig/Macros_Test_2.scala |   83 +
 test/files/neg/macro-invalidusage-badargs.check    |   20 +-
 .../neg/macro-invalidusage-badargs/Impls_1.scala   |    4 +-
 .../macro-invalidusage-badargs/Macros_Test_2.scala |   12 +-
 .../macro-invalidusage-badbounds-a/Impls_1.scala   |    5 -
 .../Macros_Test_2.scala                            |    8 -
 ...-a.check => macro-invalidusage-badbounds.check} |    0
 ...-d.flags => macro-invalidusage-badbounds.flags} |    0
 .../neg/macro-invalidusage-badbounds/Impls_1.scala |    5 +
 .../Macros_Test_2.scala                            |    8 +
 test/files/neg/macro-invalidusage-badtargs.check   |   22 +-
 .../neg/macro-invalidusage-badtargs/Impls_1.scala  |    4 +-
 .../Macros_Test_2.scala                            |   14 +-
 .../Impls_1.scala                                  |    7 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../files/neg/macro-invalidusage-nontypeable.check |    4 +
 ....flags => macro-invalidusage-nontypeable.flags} |    0
 .../Impls_Macros_1.scala                           |   13 +
 .../macro-invalidusage-nontypeable/Test_2.scala    |    3 +
 test/files/neg/macro-invalidusage-presuper.check   |    4 +
 ...l-f.flags => macro-invalidusage-presuper.flags} |    0
 .../neg/macro-invalidusage-presuper/Impls_1.scala  |    5 +
 .../Macros_Test_2.scala                            |    3 +
 test/files/neg/macro-noexpand/Impls_1.scala        |    4 +-
 test/files/neg/macro-nontypeablebody/Impls_1.scala |    4 +-
 ...verride-macro-overrides-abstract-method-a.check |    4 +-
 .../Impls_Macros_1.scala                           |    6 +-
 ...verride-macro-overrides-abstract-method-b.check |    6 +-
 .../Impls_Macros_1.scala                           |    2 +-
 .../macro-override-method-overrides-macro.check    |    4 +-
 .../Impls_1.scala                                  |   15 +-
 .../Macros_Test_2.scala                            |   14 +-
 test/files/neg/macro-qmarkqmarkqmark.check         |    2 +-
 test/files/neg/macro-quasiquotes.check             |    8 +
 test/files/neg/macro-quasiquotes/Macros_1.scala    |   15 +
 test/files/neg/macro-quasiquotes/Test_2.scala      |    5 +
 test/files/neg/macro-reify-splice-splice.check     |    7 +
 .../{run => neg}/macro-reify-splice-splice.flags   |    0
 .../neg/macro-reify-splice-splice/Macros_1.scala   |   11 +
 .../macro-reify-splice-splice/Test_2.scala         |    0
 .../neg/macro-without-xmacros-a/Impls_1.scala      |   14 +-
 .../neg/macro-without-xmacros-b/Impls_1.scala      |   14 +-
 test/files/neg/main1.check                         |   14 +-
 test/files/neg/migration28.check                   |    4 +-
 test/files/neg/migration28.scala                   |    6 +-
 test/files/neg/missing-param-type-tuple.check      |   31 +
 test/files/neg/missing-param-type-tuple.scala      |    8 +
 test/files/neg/name-lookup-stable.check            |   11 +
 test/files/neg/name-lookup-stable.scala            |   20 +
 test/files/neg/names-defaults-neg-ref.check        |    2 +-
 test/files/neg/names-defaults-neg-warn.check       |    8 +-
 test/files/neg/names-defaults-neg.check            |   27 +-
 test/files/neg/nested-annotation.check             |   10 +
 test/files/neg/nested-annotation.scala             |    9 +
 test/files/neg/nested-fn-print.scala               |    2 +-
 test/files/neg/newpat_unreachable.check            |   24 +-
 test/files/neg/no-implicit-to-anyref-any-val.check |   34 +
 test/files/neg/no-implicit-to-anyref-any-val.scala |   33 +
 test/files/neg/no-implicit-to-anyref.check         |   28 -
 test/files/neg/no-implicit-to-anyref.scala         |   29 -
 test/files/neg/nonlocal-warning.check              |    9 +
 .../neg/nonlocal-warning.flags}                    |    0
 test/files/neg/nonlocal-warning.scala              |   18 +
 .../neg/not-a-legal-formal-parameter-tuple.check   |   19 +
 .../neg/not-a-legal-formal-parameter-tuple.scala   |    5 +
 test/files/neg/null-unsoundness.scala              |    1 -
 test/files/neg/nullary-override.check              |    4 +-
 test/files/neg/object-not-a-value.scala            |    2 +-
 test/files/neg/overload-msg.scala                  |    2 +-
 test/files/neg/overloaded-implicit.check           |    8 +-
 test/files/neg/overloaded-implicit.flags           |    2 +-
 test/files/neg/overloaded-implicit.scala           |    8 +-
 test/files/neg/override-object-no.scala            |    2 +-
 test/files/neg/package-ob-case.check               |    5 -
 test/files/neg/package-ob-case.flags               |    1 -
 test/files/neg/pat_unreachable.check               |   22 +-
 test/files/neg/pat_unreachable.flags               |    2 +-
 test/files/neg/patmat-classtag-compound.check      |    6 +
 test/files/neg/patmat-classtag-compound.flags      |    1 +
 test/files/neg/patmat-classtag-compound.scala      |   17 +
 test/files/neg/patmat-type-check.check             |   17 +-
 test/files/neg/patmat-type-check.scala             |   12 +-
 test/files/neg/patmatexhaust.check                 |   26 +-
 test/files/neg/patmatexhaust.scala                 |   22 +-
 test/files/neg/patternalts.scala                   |    2 +-
 test/files/neg/permanent-blindness.check           |   10 +-
 test/files/neg/protected-constructors.check        |   11 +-
 test/files/neg/protected-constructors.scala        |    2 +-
 test/files/neg/protected-static-fail.check         |    4 +-
 .../neg/quasiquotes-syntax-error-position.check    |   47 +
 .../neg/quasiquotes-syntax-error-position.scala    |   21 +
 .../neg/quasiquotes-unliftable-not-found.check     |    4 +
 .../neg/quasiquotes-unliftable-not-found.scala     |    5 +
 test/files/neg/raw-types-stubs.check               |   11 +
 test/files/neg/raw-types-stubs/M_1.java            |    3 +
 test/files/neg/raw-types-stubs/Raw_2.java          |    4 +
 test/files/neg/raw-types-stubs/S_3.scala           |    1 +
 test/files/neg/reflection-names-neg.check          |    5 +-
 test/files/neg/run-gadts-strict.check              |   21 +
 test/files/neg/run-gadts-strict.flags              |    1 +
 test/files/neg/run-gadts-strict.scala              |   18 +
 test/files/neg/saito.scala                         |    4 +-
 test/files/neg/sammy_restrictions.check            |   49 +
 .../sammy_restrictions.flags}                      |    0
 test/files/neg/sammy_restrictions.scala            |   45 +
 test/files/neg/sammy_wrong_arity.check             |   52 +
 .../sammy_wrong_arity.flags}                       |    0
 test/files/neg/sammy_wrong_arity.scala             |   22 +
 test/files/neg/sealed-final-neg.check              |    4 +
 test/files/neg/sealed-final-neg.flags              |    1 +
 test/files/neg/sealed-final-neg.scala              |   41 +
 test/files/neg/sealed-java-enums.check             |    4 +-
 test/files/neg/sensitive.scala                     |    5 +-
 test/files/neg/serialversionuid-not-const.check    |   10 +
 test/files/neg/serialversionuid-not-const.scala    |   16 +
 test/files/neg/spec-overrides.scala                |    2 +-
 test/files/neg/specification-scopes.check          |    2 +-
 test/files/neg/stmt-expr-discard.check             |    8 +-
 test/files/neg/stringinterpolation_macro-neg.check |  134 +-
 test/files/neg/stringinterpolation_macro-neg.scala |   45 +
 test/files/neg/structural.scala                    |   28 +-
 test/files/neg/switch.check                        |    8 +-
 test/files/neg/switch.scala                        |   16 +-
 test/files/neg/t0117.scala                         |    1 -
 test/files/neg/t0152.scala                         |    2 +-
 test/files/neg/t0218.check                         |    2 +-
 test/files/neg/t0218.scala                         |    4 +-
 test/files/neg/t0259.check                         |    4 +-
 test/files/neg/t0418.check                         |    5 +-
 test/files/neg/t0503.scala                         |    2 +-
 test/files/neg/t0764.check                         |    3 +-
 test/files/neg/t0764.scala                         |   45 +-
 test/files/neg/t0764b.check                        |   47 +
 test/files/neg/t0764b.scala                        |   63 +
 test/files/neg/t0816.scala                         |    2 +-
 test/files/neg/t1010.scala                         |    4 +-
 test/files/neg/t1011.check                         |    4 -
 test/files/neg/t1011.scala                         |  127 -
 test/files/neg/t1017.check                         |    4 -
 test/files/neg/t1017.scala                         |    4 -
 test/files/neg/t1112.check                         |    2 +-
 test/files/neg/t1112.scala                         |    6 +-
 test/files/neg/t112706A.check                      |    2 +-
 test/files/neg/t112706A.scala                      |    2 +-
 test/files/neg/t1181.check                         |    6 +-
 test/files/neg/t1183.scala                         |    4 +-
 test/files/neg/t1224.check                         |    2 +-
 test/files/neg/t1224.flags                         |    1 +
 test/files/neg/t1432.check                         |    8 +-
 test/files/neg/t1432.scala                         |    4 +-
 test/files/neg/t1477.scala                         |    2 +-
 test/files/neg/t1503.check                         |    8 +
 test/files/neg/{t6675.flags => t1503.flags}        |    0
 test/files/neg/t1503.scala                         |    8 +
 test/files/neg/t1523.scala                         |    4 +-
 test/files/neg/t1623.scala                         |    4 +-
 test/files/neg/t1705.scala                         |    2 +-
 test/files/neg/t1845.scala                         |   12 +-
 test/files/neg/t1878-typer.check                   |    4 -
 test/files/neg/t1878-typer.scala                   |    6 -
 test/files/neg/t1878.check                         |    4 +-
 test/files/neg/t1878.scala                         |    8 +-
 test/files/neg/t1909-object.check                  |    4 +
 test/files/neg/t1909-object.flags                  |    1 +
 test/files/neg/t1909-object.scala                  |   12 +
 test/files/neg/t1980.check                         |   12 +
 .../neg/{delayed-init-ref.flags => t1980.flags}    |    0
 test/files/neg/t1980.scala                         |    9 +
 test/files/neg/t2066.check                         |   21 +
 test/files/neg/t2066.scala                         |   70 +
 test/files/neg/t2066b.check                        |    5 +
 .../neg/t2066.scala => files/neg/t2066b.scala}     |    0
 test/files/neg/t2148.check                         |    2 +-
 test/files/neg/t2148.scala                         |    4 +-
 test/files/neg/t2421b.scala                        |    2 +-
 test/files/neg/t2441.scala                         |    4 +-
 test/files/neg/t2442.check                         |    8 +-
 test/files/neg/t2462b.check                        |    5 +-
 .../neg/{case-collision.flags => t2462b.flags}     |    0
 test/files/neg/t2462b.scala                        |    3 -
 test/files/neg/t2462c.check                        |    7 +
 .../neg/{case-collision.flags => t2462c.flags}     |    0
 test/files/neg/t2462c.scala                        |   25 +
 test/files/neg/t2641.check                         |   12 +-
 test/files/neg/t2641.scala                         |    6 +-
 test/files/neg/t2796.check                         |    7 +-
 test/files/neg/t2796.flags                         |    2 +-
 test/files/neg/t2796.scala                         |    3 +-
 test/files/neg/t284.check                          |    8 +-
 .../files/{disabled/t7020.flags => neg/t284.flags} |    0
 test/files/neg/t284.scala                          |    7 +-
 test/files/neg/t2870.scala                         |    6 +-
 test/files/neg/t2910.scala                         |    4 +-
 test/files/neg/t3015.check                         |    2 +-
 test/files/neg/t3015.scala                         |    2 +-
 test/files/neg/t3098.check                         |    4 +-
 test/files/neg/t3118.scala                         |    4 +-
 test/files/neg/t3160ambiguous.check                |    7 +
 test/files/neg/t3160ambiguous.scala                |   15 +
 test/files/neg/t3189.check                         |    2 +-
 test/files/neg/t3224.check                         |   28 +-
 test/files/neg/t3224.scala                         |   48 +-
 test/files/neg/t3234.check                         |    6 +-
 test/files/neg/t3234.flags                         |    2 +-
 test/files/neg/t3234.scala                         |    2 +-
 test/files/neg/t3346b.check                        |    4 +
 test/files/neg/t3346b.scala                        |   15 +
 test/files/neg/t3346c.check                        |    4 +
 test/files/neg/t3346c.scala                        |   61 +
 test/files/neg/t3346i.check                        |    7 +
 test/files/neg/t3346i.scala                        |   30 +
 test/files/neg/t3399.scala                         |    4 +-
 test/files/neg/t3403.scala                         |    2 +-
 test/files/neg/t3453.scala                         |    6 +-
 test/files/neg/t3507-old.scala                     |    2 +-
 test/files/neg/t3631.check                         |    4 -
 test/files/neg/t3653.check                         |    6 +-
 test/files/neg/t3683a.check                        |    4 +-
 test/files/neg/t3683b.scala                        |    2 +-
 test/files/neg/t3692-new.check                     |   17 +-
 test/files/neg/t3692-new.flags                     |    2 +-
 test/files/neg/t3692-old.check                     |   14 -
 test/files/neg/t3692-old.flags                     |    1 -
 test/files/neg/t3692-old.scala                     |   19 -
 test/files/neg/t3714-neg.check                     |    4 +-
 test/files/neg/t3714-neg.scala                     |    2 +-
 test/files/neg/t3736.scala                         |    4 +-
 test/files/neg/t3757/B.scala                       |    2 +-
 test/files/neg/t3776.check                         |    2 +-
 test/files/neg/t3776.scala                         |   10 +-
 test/files/neg/t3816.scala                         |    6 +-
 test/files/neg/t3871.check                         |    7 +
 test/files/neg/t3871.scala                         |   11 +
 test/files/neg/t3871b.check                        |   97 +
 test/files/neg/t3871b.scala                        |  127 +
 test/files/neg/t3873.check                         |    4 +-
 test/files/neg/t3873.scala                         |    2 +-
 test/files/neg/t3934.check                         |    2 +-
 test/files/neg/t3971.check                         |   21 +
 test/files/neg/t3971.scala                         |   12 +
 test/files/neg/t3977.check                         |    2 +-
 test/files/neg/t3977.scala                         |    6 +-
 test/files/neg/t3987.scala                         |    2 +-
 test/files/neg/t4079/t4079_1.scala                 |    6 +-
 test/files/neg/t409.check                          |    4 +-
 test/files/neg/t4098.check                         |    2 +-
 test/files/neg/t4098.scala                         |    6 +-
 test/files/neg/t4134.scala                         |    6 +-
 test/files/neg/t414.scala                          |    4 +-
 test/files/neg/t4158.check                         |   16 +-
 test/files/neg/t4174.scala                         |    2 +-
 test/files/neg/t418.check                          |    5 +-
 test/files/neg/t4196.scala                         |    4 +-
 test/files/neg/t421.check                          |    2 +-
 test/files/neg/t421.scala                          |    2 +-
 test/files/neg/t4217.check                         |    2 +-
 test/files/neg/t4217.scala                         |    2 +-
 test/files/neg/t4221.scala                         |    2 +-
 test/files/neg/t4271.scala                         |    4 +-
 test/files/neg/t4302.check                         |    4 +-
 test/files/neg/t4417.check                         |    2 +-
 test/files/neg/t4417.scala                         |    4 +-
 test/files/neg/t4425.check                         |   13 +-
 test/files/neg/t4425.scala                         |   10 +
 test/files/neg/t4425b.check                        |   49 +
 test/files/neg/t4425b.scala                        |   38 +
 test/files/neg/t4431.scala                         |    2 +-
 test/files/neg/t4440.check                         |   12 +-
 test/files/neg/t4457_1.scala                       |    2 +-
 test/files/neg/t4457_2.scala                       |    2 +-
 test/files/neg/t4460a.check                        |    4 +
 test/files/neg/t4460a.scala                        |    7 +
 test/files/neg/t4460b.check                        |    4 +
 test/files/neg/t4460b.scala                        |    9 +
 test/files/neg/t4460c.check                        |    7 +
 test/files/neg/t4460c.scala                        |    7 +
 test/files/neg/t4515.check                         |   16 +-
 test/files/neg/t4515.scala                         |    2 +-
 test/files/neg/t4537.check                         |    4 -
 test/files/neg/t4537/a.scala                       |    5 -
 test/files/neg/t4537/b.scala                       |    5 -
 test/files/neg/t4537/c.scala                       |    8 -
 test/files/neg/t4541b.scala                        |    2 +-
 test/files/neg/t4584.check                         |    6 +-
 test/files/neg/t4691_exhaust_extractor.check       |   10 +-
 test/files/neg/t4727.check                         |    8 +-
 test/files/neg/t4728.check                         |    7 +
 test/{pending/run => files/neg}/t4728.scala        |    0
 test/files/neg/t4749.check                         |   22 +-
 test/files/neg/t4749.scala                         |    2 +-
 test/files/neg/t4762.check                         |    8 +-
 test/files/neg/t4851.check                         |   20 +-
 test/files/neg/t4851.flags                         |    2 +-
 test/files/neg/t4851/J.java                        |    6 +-
 test/files/neg/t4851/J2.java                       |    4 +-
 test/files/neg/t4851/S.scala                       |    4 +-
 test/files/neg/t4877.scala                         |    2 +-
 test/files/neg/t4928.check                         |    2 +-
 test/files/neg/t512.check                          |    5 +-
 test/files/neg/t5120.scala                         |    2 +-
 test/files/neg/t5148.check                         |   14 +-
 test/files/neg/t5152.scala                         |   10 +-
 test/files/neg/t5182.check                         |    7 +
 .../neg/{case-collision.flags => t5182.flags}      |    0
 test/files/neg/t5182.scala                         |    5 +
 test/files/neg/t5189.check                         |    2 +-
 test/files/neg/t520.scala                          |    2 +-
 test/files/neg/t5352.check                         |    2 +-
 test/files/neg/t5352.scala                         |    2 +-
 test/files/neg/t5357.scala                         |    2 +-
 test/files/neg/t5426.check                         |   12 +-
 test/files/neg/t5426.scala                         |    4 +-
 test/files/neg/t5440.check                         |    4 +-
 test/files/neg/t545.check                          |    5 +-
 test/files/neg/t5455.scala                         |    4 +-
 test/files/neg/t5497.check                         |    2 +-
 test/files/neg/t5497.scala                         |    2 +-
 test/files/neg/t5529.check                         |    5 +-
 test/files/neg/t556.check                          |    9 +-
 test/files/neg/t5572.check                         |    7 +-
 test/files/neg/t5572.scala                         |    2 +-
 test/files/neg/t5578.check                         |    5 +-
 test/files/neg/t558.scala                          |    2 +-
 test/files/neg/t5580b.check                        |    6 +
 test/files/neg/t5580b.scala                        |   13 +
 test/files/neg/t563.scala                          |    2 +-
 test/files/neg/t5663-badwarneq.check               |   30 +-
 test/files/neg/t5689.check                         |    5 +-
 test/files/neg/t5689.scala                         |    2 +-
 test/files/neg/t5696.check                         |    2 +-
 test/files/neg/t5702-neg-bad-and-wild.check        |   16 +-
 test/files/neg/t5702-neg-bad-and-wild.scala        |    2 +-
 test/files/neg/t5702-neg-bad-xbrace.check          |    4 +-
 test/files/neg/t5702-neg-ugly-xbrace.check         |    2 +-
 test/files/neg/t5753.check                         |    3 +-
 test/files/neg/t5753/Impls_Macros_1.scala          |    4 +-
 test/files/neg/t5753/Test_2.scala                  |    4 +-
 test/files/neg/t576.scala                          |    8 +-
 test/files/neg/t5760-pkgobj-warn.check             |    4 -
 test/files/neg/t5761.check                         |    5 +-
 test/files/neg/t5762.check                         |   12 +-
 test/files/neg/t5830.check                         |    8 +-
 test/files/neg/t5845.check                         |    7 -
 test/files/neg/t588.check                          |   10 +-
 test/files/neg/t588.scala                          |    8 +-
 test/files/neg/t5903a.check                        |    4 +
 test/files/neg/t5903a/Macros_1.scala               |   28 +
 test/files/neg/t5903a/Test_2.scala                 |    6 +
 test/files/neg/t5903b.check                        |    6 +
 test/files/neg/t5903b/Macros_1.scala               |   23 +
 test/files/neg/t5903b/Test_2.scala                 |    6 +
 test/files/neg/t5903c.check                        |    4 +
 test/files/neg/t5903c/Macros_1.scala               |   26 +
 test/files/neg/t5903c/Test_2.scala                 |    6 +
 test/files/neg/t5903d.check                        |    4 +
 test/files/neg/t5903d/Macros_1.scala               |   23 +
 test/files/neg/t5903d/Test_2.scala                 |    6 +
 test/files/neg/t5903e.check                        |    4 +
 test/files/neg/t5903e/Macros_1.scala               |   25 +
 test/files/neg/t5903e/Test_2.scala                 |    6 +
 test/files/neg/t591.scala                          |   22 +-
 test/files/neg/t5956.check                         |   21 +-
 test/files/neg/t5956.scala                         |    4 +-
 test/files/neg/t5969.scala                         |    2 +-
 test/files/neg/t6011.check                         |   10 +-
 test/files/neg/t6048.check                         |   15 +-
 test/files/neg/t608.scala                          |    2 +-
 test/files/neg/t6083.check                         |   10 +
 test/files/neg/t6083.scala                         |    7 +
 test/files/neg/t6120.check                         |   20 +
 test/files/neg/t6120.flags                         |    1 +
 test/files/neg/t6120.scala                         |    7 +
 test/files/neg/t6123-explaintypes-macros.check     |   10 +
 .../neg/t6123-explaintypes-macros/BadMac_2.flags   |    1 +
 .../neg/t6123-explaintypes-macros/BadMac_2.scala   |    8 +
 .../neg/t6123-explaintypes-macros/Macros.flags     |    1 +
 .../neg/t6123-explaintypes-macros/Macros.scala     |   10 +
 test/files/neg/t6162-inheritance.check             |   16 +-
 test/files/neg/t6162-inheritance.scala             |   19 -
 test/files/neg/t6162-inheritance/defn.scala        |   10 +
 test/files/neg/t6162-inheritance/usage.scala       |   10 +
 test/files/neg/t6162-overriding.check              |    8 +-
 test/files/neg/t6231.check                         |    6 -
 test/files/neg/t6260-named.check                   |   13 +
 test/files/neg/t6260-named.scala                   |   15 +
 test/files/neg/t6260.check                         |   13 -
 test/files/neg/t6260c.check                        |    7 +
 test/files/neg/t6260c.scala                        |    4 +
 test/files/neg/t6264.check                         |    4 +-
 test/files/neg/t6276.check                         |   16 +-
 test/files/neg/t6289.check                         |   10 +
 .../neg/{case-collision.flags => t6289.flags}      |    0
 test/files/neg/t6289/J.java                        |    5 +
 test/files/neg/t6289/SUT_5.scala                   |    5 +
 test/files/neg/t6323a.check                        |    4 +-
 test/files/neg/t6355a.check                        |    7 +
 test/files/neg/t6355a.scala                        |   19 +
 test/files/neg/t6355b.check                        |   11 +
 test/files/neg/t6355b.scala                        |   17 +
 test/files/neg/t6375.check                         |   27 +
 .../neg/{case-collision.flags => t6375.flags}      |    0
 test/files/neg/t6375.scala                         |   67 +
 test/files/neg/t6385.check                         |    7 -
 test/files/neg/t6385.scala                         |   13 -
 test/files/neg/t6406-regextract.check              |    6 +
 test/files/neg/t6406-regextract.flags              |    1 +
 test/files/neg/t6406-regextract.scala              |    5 +
 test/files/neg/t6443c.check                        |    4 +-
 test/files/neg/t6446-additional.check              |   39 +
 test/files/neg/t6446-additional/ploogin_1.scala    |   31 +
 test/files/neg/t6446-additional/sample_2.flags     |    1 +
 test/files/neg/t6446-additional/sample_2.scala     |    6 +
 test/files/neg/t6446-additional/scalac-plugin.xml  |    4 +
 test/files/neg/t6446-list.check                    |    1 +
 test/files/neg/t6446-list/ploogin_1.scala          |   31 +
 test/files/neg/t6446-list/sample_2.flags           |    1 +
 test/files/neg/t6446-list/sample_2.scala           |    6 +
 test/files/neg/t6446-list/scalac-plugin.xml        |    4 +
 test/files/neg/t6446-missing.check                 |   38 +
 test/files/neg/t6446-missing/sample_2.flags        |    1 +
 test/files/neg/t6446-missing/sample_2.scala        |    6 +
 test/files/neg/t6446-missing/scalac-plugin.xml     |    4 +
 test/files/neg/t6446-show-phases.check             |   37 +
 test/files/neg/t6446-show-phases.flags             |    1 +
 test/files/neg/t6446-show-phases.scala             |    3 +
 test/files/neg/t6455.check                         |    4 +
 .../{ambiguous-float-dots2.flags => t6455.flags}   |    0
 test/files/neg/t6455.scala                         |    6 +
 test/files/neg/t6534.check                         |    7 -
 test/files/neg/t6539.check                         |    8 +-
 test/files/neg/t6539/Macro_1.scala                 |    4 +-
 test/files/neg/t6566a.check                        |    4 +
 test/files/neg/t6566a.scala                        |   17 +
 test/files/neg/t6566b.check                        |    4 +
 test/files/neg/t6566b.scala                        |   19 +
 test/files/neg/t6567.check                         |    8 +-
 test/files/neg/t6574.check                         |    7 +
 test/files/neg/t6574.scala                         |   10 +
 test/files/neg/t6601.check                         |    4 +
 .../neg/t6601/AccessPrivateConstructor_2.scala     |    3 +
 test/files/neg/t6601/PrivateConstructor_1.scala    |    1 +
 test/files/neg/t663.check                          |    6 +-
 test/files/neg/t6666.check                         |   16 +-
 test/files/neg/t6666.flags                         |    1 +
 test/files/neg/t6666.scala                         |    2 +-
 test/files/neg/t6666c.check                        |    2 +-
 test/files/neg/t6666c.flags                        |    1 +
 test/files/neg/t6666e.check                        |    2 +-
 test/files/neg/t6667.check                         |    3 +-
 test/files/neg/t6667.flags                         |    1 -
 test/files/neg/t6667b.check                        |    3 +-
 test/files/neg/t6667b.flags                        |    1 -
 test/files/neg/t667.check                          |    4 +-
 test/files/neg/t6675-old-patmat.check              |    4 -
 test/files/neg/t6675-old-patmat.flags              |    1 -
 test/files/neg/t6675-old-patmat.scala              |   13 -
 test/files/neg/t6675.check                         |    4 +-
 test/files/neg/t6675.flags                         |    2 +-
 test/files/neg/t6675b.check                        |   37 +
 test/files/neg/t6675b.flags                        |    1 +
 test/files/neg/t6675b.scala                        |   40 +
 test/files/neg/t6680a.check                        |   11 +
 test/files/neg/t6680a.flags                        |    1 +
 test/files/neg/t6680a.scala                        |   18 +
 test/files/neg/t6815.check                         |    5 +
 test/files/neg/t6815.scala                         |   17 +
 test/files/neg/t6829.check                         |   22 +-
 test/files/neg/t6844.check                         |    6 +
 test/files/neg/t6844.scala                         |    5 +
 test/files/neg/t6889.check                         |    7 +
 test/files/neg/t6889.scala                         |   18 +
 test/files/neg/t6902.check                         |   10 +-
 test/files/neg/t692.check                          |    2 +-
 test/files/neg/t692.scala                          |    6 +-
 test/files/neg/t6920.check                         |    6 +
 test/files/neg/t6920.scala                         |   10 +
 test/files/neg/t693.check                          |    2 +-
 test/files/neg/t693.scala                          |    2 +-
 test/files/neg/t6931.check                         |   10 +
 test/files/neg/t6931/Macros_1.scala                |   15 +
 test/files/neg/t6931/Test_2.scala                  |    4 +
 test/files/neg/t696.check                          |    9 +
 test/files/neg/{t696b.scala => t696.scala}         |    0
 test/files/neg/t6963a.check                        |    4 +-
 test/files/neg/t6963a.scala                        |    2 +-
 test/files/neg/t6963b.check                        |   13 -
 test/files/neg/t6963b.flags                        |    1 -
 test/files/neg/t6963b.scala                        |   20 -
 test/files/neg/t696a.check                         |    5 -
 test/files/neg/t696a.scala                         |    6 -
 test/files/neg/t696b.check                         |    9 -
 test/files/neg/t696b.flags                         |    1 -
 test/files/neg/t7007.check                         |    7 +
 test/files/neg/t7007.scala                         |   14 +
 test/files/neg/t7020.check                         |   19 +
 test/files/{disabled => neg}/t7020.flags           |    0
 test/files/{disabled => neg}/t7020.scala           |    0
 test/files/neg/t7110.check                         |    6 +
 .../{disabled/t7020.flags => neg/t7110.flags}      |    0
 test/files/neg/t7110.scala                         |    6 +
 test/files/neg/t712.check                          |    1 +
 test/files/neg/t715.scala                          |    2 +-
 test/files/neg/t7157.check                         |   73 +
 test/files/neg/t7157/Impls_Macros_1.scala          |   32 +
 test/files/neg/t7157/Test_2.scala                  |   63 +
 test/files/neg/t7166.check                         |    4 -
 test/files/neg/t7166/Impls_Macros_1.scala          |   26 -
 test/files/neg/t7166/Test_2.scala                  |    3 -
 test/files/neg/t7171.check                         |    9 +-
 test/files/neg/t7171b.check                        |   12 +-
 test/files/neg/t7185.check                         |    7 -
 test/files/neg/t7185.scala                         |    3 -
 test/files/neg/t7214neg.check                      |    4 +
 test/files/{run/t7214.scala => neg/t7214neg.scala} |    0
 test/files/neg/t7239.check                         |    4 +
 test/files/neg/t7239.scala                         |   12 +
 test/files/neg/t7285.check                         |   10 +-
 test/files/neg/t7290.check                         |   10 +-
 test/files/neg/t7292-deprecation.check             |   12 +
 test/files/neg/t7292-deprecation.flags             |    1 +
 test/files/neg/t7292-deprecation.scala             |    5 +
 test/files/neg/t7292-removal.check                 |   10 +
 test/files/neg/t7292-removal.flags                 |    1 +
 test/files/neg/t7292-removal.scala                 |    5 +
 test/files/neg/t7294.check                         |    6 +
 test/files/neg/t7294.flags                         |    1 +
 test/files/neg/t7294.scala                         |    5 +
 test/files/neg/t7294b.check                        |    6 +
 test/files/neg/t7294b.flags                        |    1 +
 test/files/neg/t7294b.scala                        |    1 +
 test/files/neg/t7324.check                         |    4 +
 test/files/neg/t7324.scala                         |   57 +
 test/files/neg/t7325.check                         |   12 +-
 test/files/neg/t7369.check                         |   12 +-
 test/files/neg/t7475c.check                        |    7 +
 test/files/neg/t7475c.scala                        |    9 +
 test/files/neg/t7475d.check                        |    7 +
 test/files/neg/t7475e.check                        |    4 +
 test/files/neg/t7475e.scala                        |   12 +
 test/files/neg/t7475f.check                        |   10 +
 test/files/neg/t7475f.scala                        |   28 +
 .../neg/t7494-after-terminal.check}                |    0
 .../files/neg/t7494-after-terminal/ThePlugin.scala |   31 +
 test/files/neg/t7494-after-terminal/sample_2.flags |    1 +
 test/files/neg/t7494-after-terminal/sample_2.scala |    6 +
 .../neg/t7494-after-terminal/scalac-plugin.xml     |    5 +
 .../neg/t7494-before-parser.check}                 |    0
 test/files/neg/t7494-before-parser/ThePlugin.scala |   32 +
 test/files/neg/t7494-before-parser/sample_2.flags  |    1 +
 test/files/neg/t7494-before-parser/sample_2.scala  |    6 +
 .../neg/t7494-before-parser}/scalac-plugin.xml     |    0
 test/files/neg/t7494-multi-right-after.check       |    1 +
 .../neg/t7494-multi-right-after/ThePlugin.scala    |   31 +
 .../neg/t7494-multi-right-after/sample_2.flags     |    1 +
 .../neg/t7494-multi-right-after/sample_2.scala     |    6 +
 .../neg/t7494-multi-right-after/scalac-plugin.xml  |    5 +
 test/files/neg/t7494-no-options.check              |   40 +
 test/files/neg/t7494-no-options/ploogin_1.scala    |   31 +
 test/files/neg/t7494-no-options/sample_2.flags     |    1 +
 test/files/neg/t7494-no-options/sample_2.scala     |    6 +
 test/files/neg/t7494-no-options/scalac-plugin.xml  |    4 +
 test/files/neg/t7494-right-after-before.check      |    1 +
 .../neg/t7494-right-after-before/ThePlugin.scala   |   31 +
 .../neg/t7494-right-after-before/sample_2.flags    |    1 +
 .../neg/t7494-right-after-before/sample_2.scala    |    6 +
 .../neg/t7494-right-after-before/scalac-plugin.xml |    5 +
 .../neg/t7494-right-after-terminal.check}          |    0
 .../neg/t7494-right-after-terminal/ThePlugin.scala |   32 +
 .../neg/t7494-right-after-terminal/sample_2.flags  |    1 +
 .../neg/t7494-right-after-terminal/sample_2.scala  |    6 +
 .../t7494-right-after-terminal/scalac-plugin.xml   |    5 +
 test/files/neg/t7501.check                         |    7 +
 test/files/neg/t7501/t7501_1.scala                 |   12 +
 test/files/neg/t7501/t7501_2.scala                 |    5 +
 test/files/neg/t7507.check                         |    2 +-
 test/files/neg/t7519-b.check                       |    4 +-
 test/files/neg/t7519-b/Use_2.scala                 |    2 +
 test/files/neg/t7519.check                         |    8 +-
 test/files/neg/t7605-deprecation.check             |   15 +
 test/files/neg/t7605-deprecation.flags             |    1 +
 test/files/neg/t7605-deprecation.scala             |    8 +
 test/files/neg/t7622-cyclic-dependency.check       |    1 +
 .../neg/t7622-cyclic-dependency/ThePlugin.scala    |   40 +
 .../neg/t7622-cyclic-dependency/sample_2.flags     |    1 +
 .../neg/t7622-cyclic-dependency/sample_2.scala     |    6 +
 .../neg/t7622-cyclic-dependency/scalac-plugin.xml  |    5 +
 test/files/neg/t7622-missing-dependency.check      |    2 +
 .../neg/t7622-missing-dependency/ThePlugin.scala   |   33 +
 .../neg/t7622-missing-dependency/sample_2.flags    |    1 +
 .../neg/t7622-missing-dependency/sample_2.scala    |    6 +
 .../neg/t7622-missing-dependency/scalac-plugin.xml |    5 +
 test/files/neg/t7622-missing-required.check        |    2 +
 test/files/neg/t7622-missing-required.flags        |    1 +
 test/files/neg/t7622-missing-required.scala        |    4 +
 test/files/neg/t7622-multi-followers.check         |    1 +
 .../neg/t7622-multi-followers/ThePlugin.scala      |   44 +
 .../files/neg/t7622-multi-followers/sample_2.flags |    1 +
 .../files/neg/t7622-multi-followers/sample_2.scala |    6 +
 .../neg/t7622-multi-followers/scalac-plugin.xml    |    5 +
 test/files/neg/t7629-view-bounds-deprecation.check |   11 +
 test/files/neg/t7629-view-bounds-deprecation.flags |    1 +
 test/files/neg/t7629-view-bounds-deprecation.scala |    4 +
 test/files/neg/t7669.check                         |    7 +
 .../neg/{case-collision.flags => t7669.flags}      |    0
 test/files/neg/t7669.scala                         |   13 +
 test/files/neg/t7694b.check                        |    7 -
 test/files/neg/t7715.check                         |   13 +
 test/files/neg/t7715.scala                         |   18 +
 test/files/neg/t7721.check                         |   27 +
 .../{disabled/t7020.flags => neg/t7721.flags}      |    0
 test/files/neg/t7721.scala                         |  140 +
 test/files/neg/t7756a.check                        |    7 +
 test/files/neg/t7756a.scala                        |   11 +
 test/files/neg/t7756b.check                        |    6 +
 .../neg/{case-collision.flags => t7756b.flags}     |    0
 test/files/neg/t7756b.scala                        |    5 +
 test/files/neg/t7757a.check                        |    4 +
 test/files/neg/t7757a.scala                        |    1 +
 test/files/neg/t7757b.check                        |    4 +
 test/files/neg/t7757b.scala                        |    2 +
 test/files/neg/t7783.check                         |   14 +-
 test/files/neg/t783.scala                          |    2 +-
 test/files/neg/t7834neg.check                      |   41 +
 test/files/neg/t7834neg.scala                      |   76 +
 test/files/neg/t7848-interp-warn.check             |   12 +
 ...ayed-init-ref.flags => t7848-interp-warn.flags} |    0
 test/files/neg/t7848-interp-warn.scala             |   18 +
 test/files/neg/t7850.check                         |    7 +
 test/files/neg/t7850.scala                         |   16 +
 test/files/neg/t7859.check                         |   19 +
 test/files/neg/t7859/A_1.scala                     |    5 +
 test/files/neg/t7859/B_2.scala                     |    9 +
 test/files/neg/t7870.check                         |    4 +
 test/files/neg/t7870.scala                         |    3 +
 test/files/neg/t7872.check                         |   10 +
 test/files/neg/t7872.scala                         |    9 +
 test/files/neg/t7872b.check                        |    7 +
 test/files/neg/t7872b.scala                        |   23 +
 test/files/neg/t7872c.check                        |   11 +
 test/files/neg/t7872c.scala                        |    8 +
 test/files/neg/t7877.check                         |    7 +
 test/files/neg/t7877.scala                         |   13 +
 test/files/neg/t7895.check                         |    4 +
 test/files/neg/t7895.scala                         |    6 +
 test/files/neg/t7895b.check                        |    7 +
 test/files/neg/t7895b.scala                        |    5 +
 test/files/neg/t7895c.check                        |   13 +
 test/files/neg/t7895c.scala                        |    3 +
 test/files/neg/t7897.check                         |    4 +
 test/files/neg/t7897.scala                         |   23 +
 test/files/neg/t7899.check                         |    6 +
 test/files/neg/t7899.scala                         |    7 +
 test/files/neg/t7967.check                         |    9 +
 test/files/neg/t7967.scala                         |    9 +
 test/files/neg/t798.scala                          |    2 +-
 test/files/neg/t7980.check                         |    4 +
 test/files/neg/t7980.scala                         |    8 +
 test/files/neg/t7984.check                         |    6 +
 .../{disabled/t7020.flags => neg/t7984.flags}      |    0
 test/files/neg/t7984.scala                         |    7 +
 test/files/neg/t8006.check                         |    6 +
 test/files/neg/t8006.scala                         |    8 +
 test/files/neg/t8015-ffa.check                     |    6 +
 test/files/neg/t8015-ffa.scala                     |    8 +
 test/files/neg/t8015-ffb.check                     |    6 +
 .../{delayed-init-ref.flags => t8015-ffb.flags}    |    0
 test/files/neg/t8015-ffb.scala                     |   11 +
 test/files/neg/t8024.check                         |    6 +
 test/files/neg/t8024.scala                         |   14 +
 test/files/neg/t8024b.check                        |    6 +
 test/files/neg/t8024b.scala                        |   17 +
 test/files/neg/t8035-deprecated.check              |   21 +
 test/files/neg/t8035-deprecated.flags              |    1 +
 test/files/neg/t8035-deprecated.scala              |   10 +
 test/files/neg/t8035-removed.check                 |   16 +
 test/files/neg/t8035-removed.flags                 |    1 +
 test/files/neg/t8035-removed.scala                 |   10 +
 test/files/neg/t8072.check                         |    4 +
 test/files/neg/t8072.scala                         |    6 +
 test/files/neg/t8104.check                         |    4 +
 test/files/neg/t8104/Macros_1.scala                |   11 +
 test/files/neg/t8104/Test_2.scala                  |   21 +
 test/files/neg/t8104a.check                        |    4 -
 test/files/neg/t8104a/Macros_1.scala               |   23 -
 test/files/neg/t8104a/Test_2.scala                 |   20 -
 test/files/neg/t8104b.check                        |    4 -
 test/files/neg/t8104b/Macros_1.scala               |   23 -
 test/files/neg/t8104b/Test_2.scala                 |   24 -
 test/files/neg/t8143a.check                        |    5 +
 test/files/neg/t8143a.scala                        |   15 +
 test/files/neg/t8157.check                         |    4 +
 test/files/neg/t8157.scala                         |    4 +
 test/files/neg/t8158.check                         |    4 +
 test/files/neg/t8158/Macros_1.scala                |   34 +
 test/files/neg/t8158/Test_2.scala                  |   14 +
 test/files/neg/t8177a.check                        |    6 +
 test/files/neg/t8177a.scala                        |    6 +
 test/files/neg/t8182.check                         |   22 +
 test/files/neg/t8182.scala                         |   18 +
 test/files/neg/t8207.check                         |    7 +
 test/files/neg/t8207.scala                         |    3 +
 test/files/neg/t8219-any-any-ref-equals.check      |   10 +
 test/files/neg/t8219-any-any-ref-equals.scala      |    8 +
 test/files/neg/t8228.check                         |    4 +
 test/files/neg/t8228.scala                         |    7 +
 test/files/neg/t8229.check                         |    4 +
 test/files/neg/t8229.scala                         |    6 +
 test/files/neg/t8237-default.check                 |   13 +
 test/files/neg/t8237-default.scala                 |   29 +
 test/files/neg/t8244.check                         |    4 +
 test/files/neg/t8244/Raw_1.java                    |    4 +
 test/files/neg/t8244/Test_2.scala                  |   12 +
 test/files/neg/t8244b.check                        |    4 +
 test/files/neg/t8244b.scala                        |   18 +
 test/files/neg/t8244c.check                        |    4 +
 test/files/neg/t8244c.scala                        |   18 +
 test/files/neg/t8244e.check                        |    4 +
 test/files/neg/t8244e/Raw.java                     |    4 +
 test/files/neg/t8244e/Test.scala                   |   12 +
 test/files/neg/t8265.check                         |    6 +
 test/files/neg/t8265.flags                         |    1 +
 test/files/neg/t8265.scala                         |    1 +
 test/files/neg/t8266-invalid-interp.check          |   10 +
 test/files/neg/t8266-invalid-interp.scala          |    9 +
 test/files/neg/t8300-overloading.check             |    7 +
 test/files/neg/t8300-overloading.scala             |   16 +
 test/files/neg/t836.scala                          |    2 +-
 test/files/neg/t8372.check                         |    7 +
 test/files/neg/t8372.scala                         |   10 +
 test/files/neg/t8376.check                         |    7 +
 test/files/neg/t8376/J.java                        |    4 +
 test/files/neg/t8376/S.scala                       |    4 +
 test/files/neg/t8431.check                         |   27 +
 test/files/neg/t8431.scala                         |   63 +
 test/files/neg/t856.check                          |    2 +-
 test/files/neg/t856.scala                          |    2 +-
 test/files/neg/t876.scala                          |    6 +-
 test/files/neg/t877.check                          |    4 +-
 test/files/neg/t877.scala                          |    2 +-
 test/files/neg/t935.check                          |    5 +-
 test/files/neg/t944.scala                          |    6 +-
 test/files/neg/t997.check                          |    9 +-
 test/files/neg/t997.scala                          |    2 +-
 test/files/neg/tailrec-2.check                     |    2 +-
 test/files/neg/tailrec.scala                       |   16 +-
 test/files/neg/tcpoly_infer_ticket1162.scala       |    4 +-
 test/files/neg/tcpoly_ticket2101.scala             |    8 +-
 test/files/neg/tcpoly_typealias.scala              |    6 +-
 test/files/neg/tcpoly_variance_enforce.scala       |   16 +-
 test/files/neg/type-diagnostics.scala              |    4 +-
 test/files/neg/typeerror.check                     |    7 +-
 test/files/neg/unchecked-abstract.check            |   20 +-
 test/files/neg/unchecked-impossible.check          |    8 +-
 test/files/neg/unchecked-knowable.check            |    8 +-
 test/files/neg/unchecked-refinement.check          |   12 +-
 test/files/neg/unchecked-suppress.check            |   10 +-
 test/files/neg/unchecked.check                     |   16 +-
 test/files/neg/unchecked2.check                    |   32 +-
 test/files/neg/unchecked3.check                    |   31 +-
 test/files/neg/unicode-unterminated-quote.check    |    4 +-
 test/files/neg/unit-returns-value.check            |   14 +-
 test/files/neg/unit-returns-value.scala            |   23 +-
 test/files/neg/unreachablechar.check               |   10 +-
 test/files/neg/unreachablechar.flags               |    2 +-
 test/files/neg/valueclasses-doubledefs.check       |    4 +-
 test/files/neg/valueclasses-pavlov.check           |    4 +-
 test/files/neg/valueclasses.check                  |   43 +-
 test/files/neg/valueclasses.scala                  |   13 +-
 test/files/neg/varargs.scala                       |   12 +-
 test/files/neg/variances-refinement.check          |   22 +
 test/files/neg/variances-refinement.scala          |   40 +
 test/files/neg/variances.check                     |    8 +-
 test/files/neg/variances2.check                    |  229 +
 test/files/neg/variances2.scala                    |  303 ++
 test/files/neg/viewtest.scala                      |    6 +-
 test/files/neg/virtpatmat_reach_null.check         |    4 +-
 .../neg/virtpatmat_reach_sealed_unsealed.check     |   12 +-
 test/files/neg/virtpatmat_unreach_select.check     |    4 +-
 test/files/neg/volatile_no_override.check          |    5 +
 test/files/neg/volatile_no_override.scala          |   14 +
 test/files/neg/warn-inferred-any.check             |   12 +
 test/files/neg/warn-inferred-any.flags             |    1 +
 test/files/neg/warn-inferred-any.scala             |   19 +
 test/files/neg/warn-unused-imports.check           |   33 +
 test/files/neg/warn-unused-imports.flags           |    1 +
 test/files/neg/warn-unused-imports.scala           |  125 +
 test/files/neg/warn-unused-privates.check          |   66 +
 test/files/neg/warn-unused-privates.flags          |    1 +
 test/files/neg/warn-unused-privates.scala          |  105 +
 test/files/neg/wellkinded_wrongarity.check         |    4 +-
 test/files/neg/wellkinded_wrongarity.scala         |    2 +-
 test/files/neg/xmltruncated6.check                 |    2 +-
 test/files/pos/CustomGlobal.scala                  |    2 +-
 test/files/pos/List1.scala                         |    6 +-
 test/files/pos/MailBox.scala                       |    4 +-
 test/files/pos/SI-7638.scala                       |   10 +-
 test/files/pos/Transactions.scala                  |   17 +-
 test/files/pos/annotated-original/M_1.scala        |    4 +-
 .../pos/annotated-treecopy/Impls_Macros_1.scala    |   17 +-
 test/files/pos/annotations.scala                   |    2 +-
 test/files/pos/annotations2.scala                  |   31 +
 test/files/pos/array-interfaces.scala              |    2 +-
 test/files/pos/arrays2.scala                       |    4 +-
 .../attachments-typed-another-ident.flags}         |    0
 .../attachments-typed-another-ident/Impls_1.scala  |   18 +
 .../Macros_Test_2.scala                            |    5 +
 .../pos/attachments-typed-ident/Impls_1.scala      |   11 +-
 test/files/pos/attributes.scala                    |    2 +
 test/files/pos/bcode_throw_null/TN.scala           |    7 +
 test/files/pos/bounds.scala                        |    6 +-
 test/files/pos/builders.scala                      |   10 +-
 test/files/pos/chang/Test.scala                    |    2 +-
 test/files/pos/channels.scala                      |    5 +-
 test/files/pos/clsrefine.scala                     |    4 +-
 test/files/pos/collectGenericCC.scala              |    6 +-
 test/files/pos/context.scala                       |    6 +-
 test/files/pos/cycle-jsoup.flags                   |    1 +
 test/files/pos/cycle-jsoup.scala                   |    5 +
 test/files/pos/cycle.flags                         |    1 +
 test/files/pos/cycle/J_1.java                      |   16 +
 test/files/pos/cycle/X_2.scala                     |    3 +
 test/files/pos/cyclics-pos.scala                   |   14 +-
 test/files/pos/debug-reset-local-attrs.flags       |    1 +
 test/files/pos/debug-reset-local-attrs.scala       |    1 +
 test/files/pos/delambdafy-lambdalift.scala         |    8 +
 test/files/pos/delambdafy-patterns.scala           |   15 +
 test/files/pos/delambdafy_t6260_method.check       |   13 +
 test/files/pos/delambdafy_t6260_method.flags       |    1 +
 .../delambdafy_t6260_method.scala}                 |    0
 test/files/pos/depmet_implicit_chaining_zw.scala   |    6 +-
 test/files/pos/depmet_implicit_norm_ret.scala      |   20 +-
 .../files/pos/depmet_implicit_oopsla_session.scala |   12 +-
 .../pos/depmet_implicit_oopsla_session_2.scala     |    8 +-
 .../depmet_implicit_oopsla_session_simpler.scala   |    6 +-
 .../files/pos/depmet_implicit_oopsla_zipwith.scala |    6 +-
 test/files/pos/depmet_implicit_tpbetareduce.scala  |    4 +-
 test/files/pos/dotless-targs.scala                 |    9 +
 test/files/pos/elidable-tparams.scala              |    2 +-
 test/files/pos/erasure-nsquared.scala              |   35 +
 test/{pending => files}/pos/exhaust_2.scala        |    0
 .../pos/existential-java-case-class/Client.scala   |    3 +
 test/files/pos/existential-java-case-class/J.java  |    1 +
 test/files/pos/existentials.scala                  |    2 +-
 test/files/pos/exponential-spec.scala              |    4 +-
 test/files/pos/extractor-types.scala               |   30 +
 test/files/pos/gadt-gilles.scala                   |    2 +-
 test/files/pos/gadts2.scala                        |    6 +-
 test/files/pos/gen-traversable-methods.scala       |    4 +-
 test/files/pos/generic-sigs.scala                  |    4 +-
 test/files/pos/gosh.scala                          |   16 +-
 test/files/pos/gui.scala                           |    8 +-
 test/files/pos/hk-infer.scala                      |    2 +-
 test/files/pos/hkarray.scala                       |    2 +-
 test/files/pos/hkrange.scala                       |    2 +-
 test/files/pos/imp2-pos.scala                      |    2 +-
 test/files/pos/implicit-anyval-2.10.flags          |    1 +
 test/files/pos/implicit-anyval-2.10.scala          |    3 +
 test/files/pos/implicit-infix-ops.scala            |    6 +-
 test/files/pos/implicits-new.scala                 |    8 +-
 test/files/pos/implicits-old.scala                 |   40 +-
 test/files/pos/imports-pos.scala                   |    4 +-
 test/files/pos/infer2-pos.scala                    |    3 +-
 test/files/pos/inferbroadtype.scala                |    2 +-
 test/files/pos/infersingle.scala                   |   51 +-
 test/files/pos/inliner2.scala                      |    8 +-
 test/files/pos/java-access-pos/J.java              |    4 +-
 test/files/pos/java-access-pos/S1.scala            |   10 +-
 .../files/pos/javaConversions-2.10-ambiguity.scala |   10 +
 .../pos/javaConversions-2.10-regression.scala      |    6 +-
 test/files/pos/javaReadsSigs/fromjava.java         |    8 +-
 test/files/pos/kinds.scala                         |   13 +
 test/files/pos/lambdalift.scala                    |    2 +-
 test/files/pos/liftcode_polymorphic.scala          |    2 +-
 test/files/pos/list-optim-check.flags              |    1 +
 test/files/pos/list-optim-check.scala              |   21 +
 test/files/pos/listpattern.scala                   |    2 +-
 test/files/pos/lookupswitch.scala                  |    1 -
 test/files/pos/looping-jsig.scala                  |    8 +-
 test/files/pos/lub-dealias-widen.scala             |    2 +-
 .../pos/macro-bundle-disambiguate-bundle.check}    |    0
 .../pos/macro-bundle-disambiguate-bundle.scala     |   14 +
 .../pos/macro-bundle-disambiguate-nonbundle.check} |    0
 .../pos/macro-bundle-disambiguate-nonbundle.scala  |   14 +
 .../macro-implicit-invalidate-on-error.check}      |    0
 .../pos/macro-implicit-invalidate-on-error.scala   |   25 +
 test/files/pos/matchStarlift.scala                 |    7 -
 test/files/pos/michel6.scala                       |    2 +-
 test/files/pos/needstypeearly.scala                |    4 +-
 test/files/pos/nothing_manifest_disambig-old.scala |    6 +-
 test/files/pos/nullary.scala                       |    4 +-
 test/files/pos/nullary_poly.scala                  |    4 +-
 test/files/pos/optmatch.scala                      |   33 +
 test/files/pos/overloaded-unapply.scala            |    8 +
 test/files/pos/override-object-yes.scala           |    4 +-
 test/files/pos/overzealous-assert-genbcode.scala   |   10 +
 .../package-ob-case.flags}                         |    0
 .../package-ob-case/A_1.scala}                     |    0
 .../package-ob-case/B_2.scala}                     |    0
 test/files/pos/partialfun.scala                    |    2 +-
 test/files/pos/pat_gilles.scala                    |    2 +-
 test/files/pos/patmat-extract-tparam.scala         |   13 +
 test/files/pos/patmat.scala                        |    4 +-
 test/files/pos/private-types-after-typer.scala     |    9 +
 test/files/pos/propagate.scala                     |    1 -
 .../reflection-compat-api-universe.check}          |    0
 .../files/pos/reflection-compat-api-universe.scala |  136 +
 .../t1143.check => pos/reflection-compat-c.check}  |    0
 test/files/pos/reflection-compat-c.scala           |  139 +
 .../reflection-compat-macro-universe.check}        |    0
 .../pos/reflection-compat-macro-universe.scala     |  177 +
 .../t2104.check => pos/reflection-compat-ru.check} |    0
 test/files/pos/reflection-compat-ru.scala          |  135 +
 test/files/pos/relax_implicit_divergence.scala     |    6 +-
 test/files/pos/return_thistype.scala               |    6 +-
 .../interpolation.flags => pos/sammy_poly.flags}   |    0
 test/files/pos/sammy_poly.scala                    |    7 +
 .../interpolation.flags => pos/sammy_scope.flags}  |    0
 test/files/pos/sammy_scope.scala                   |    8 +
 .../interpolation.flags => pos/sammy_single.flags} |    0
 test/files/pos/sammy_single.scala                  |    9 +
 .../interpolation.flags => pos/sammy_twice.flags}  |    0
 test/files/pos/sammy_twice.scala                   |    9 +
 test/files/pos/scala-singleton.scala               |   14 +-
 test/files/pos/scoping1.scala                      |    2 +-
 test/files/pos/sealed-final.flags                  |    1 +
 test/files/pos/sealed-final.scala                  |   14 +
 test/files/pos/selftails.scala                     |    6 +-
 test/files/pos/seq-ordering.scala                  |    2 +-
 test/files/pos/signatures/Test.java                |    2 +-
 test/files/pos/signatures/sig.scala                |    2 +-
 test/files/pos/simple-exceptions.scala             |    2 +-
 test/files/pos/spec-Function1.scala                |    6 +-
 test/files/pos/spec-annotations.scala              |    2 +-
 test/files/pos/spec-arrays.scala                   |   63 +-
 test/files/pos/spec-asseenfrom.scala               |    6 +-
 test/files/pos/spec-cyclic.scala                   |   10 +-
 test/files/pos/spec-doubledef-new.scala            |    6 +-
 test/files/pos/spec-doubledef-old.scala            |    6 +-
 test/files/pos/spec-funs.scala                     |   11 +-
 test/files/pos/spec-params-old.scala               |    6 +-
 test/files/pos/spec-sealed.scala                   |    8 +-
 test/files/pos/spec-short.scala                    |    6 +-
 test/files/pos/spec-sparsearray-new.scala          |   16 +-
 test/files/pos/spec-sparsearray-old.scala          |   14 +-
 test/files/pos/spec-t3497.scala                    |    2 +-
 test/files/pos/spec-tailcall.scala                 |    4 +-
 test/files/pos/spec-traits.scala                   |   12 +-
 test/files/pos/spec-vector.scala                   |    2 +-
 test/files/pos/spec.scala                          |   10 +-
 test/files/pos/specializes-sym-crash.scala         |    6 +-
 test/files/pos/strings.scala                       |    2 +-
 test/files/pos/sudoku.scala                        |   10 +-
 test/files/pos/super.cmds                          |    2 -
 test/files/pos/super/Super_1.java                  |    2 +-
 .../switch-small.flags}                            |    0
 test/files/pos/t0031.scala                         |    6 +-
 test/files/pos/t0064.scala                         |    2 +-
 test/files/pos/t0066.scala                         |    2 +-
 test/files/pos/t0069.scala                         |    1 -
 test/files/pos/t0227.scala                         |    4 +-
 test/files/pos/t0288/Foo.scala                     |    2 +-
 test/files/pos/t0288/Outer.java                    |    2 +-
 test/files/pos/t0301.scala                         |    2 +-
 test/files/pos/t0305.scala                         |    2 +-
 test/files/pos/t0422.scala                         |   17 -
 test/files/pos/t0438.scala                         |    4 +-
 test/files/pos/t0453.scala                         |    2 +-
 test/files/pos/t0599.scala                         |    2 +-
 test/files/pos/t0625.scala                         |    2 +-
 test/files/pos/t0646.scala                         |   21 -
 test/files/pos/t0770.scala                         |    2 +-
 test/files/pos/t0774/unrelated.scala               |    4 +-
 test/files/pos/t0786.scala                         |   12 +-
 test/files/pos/t0851.scala                         |   14 +
 test/files/pos/t0872.scala                         |    8 +
 test/files/pos/t1000.scala                         |    2 +-
 test/files/pos/t1014.scala                         |    5 +-
 test/files/pos/t1029.cmds                          |    2 -
 test/files/pos/t1035.scala                         |   12 +-
 test/files/pos/t1048.scala                         |    3 +-
 test/files/pos/t1059.scala                         |   28 -
 test/files/pos/t1071.scala                         |    2 +-
 test/files/pos/t1090.scala                         |    2 +-
 test/files/pos/{t1107.scala => t1107a.scala}       |    0
 test/files/pos/t1107b/O.scala                      |    4 +-
 test/files/pos/t1107b/T.scala                      |    2 +-
 test/files/pos/t1123.scala                         |    2 +-
 test/files/pos/t1133.scala                         |    6 +-
 test/files/pos/t1164.scala                         |   28 +-
 test/files/pos/t1168.scala                         |    2 +-
 test/files/pos/t1203.scala                         |    7 -
 test/files/pos/t1203a.scala                        |   13 +
 test/files/pos/t1210a.scala                        |    4 +-
 test/files/pos/t122.scala                          |    2 +-
 test/files/pos/t1230/S.scala                       |    2 +-
 test/files/pos/t1231/S.scala                       |    2 +-
 test/files/pos/t1236.scala                         |    2 +-
 test/files/pos/t1237.scala                         |    6 +-
 test/files/pos/t1254/t1254.java                    |    2 +-
 test/files/pos/t1263/test.scala                    |    2 +-
 test/files/pos/t1272.scala                         |    4 +-
 test/files/pos/t1292.scala                         |    2 +-
 test/files/pos/t1318.scala                         |    2 +-
 test/files/pos/t1357.scala                         |    2 +-
 test/files/pos/t1385.scala                         |    4 +-
 test/files/pos/t1439.flags                         |    2 +-
 test/files/pos/t1480.scala                         |    4 +-
 test/files/pos/t1560.scala                         |    8 +-
 test/files/pos/t1565.scala                         |    2 +-
 test/files/pos/t1591b.scala                        |    6 +-
 test/files/pos/t1626.scala                         |    4 -
 test/files/pos/t1648.scala                         |    4 -
 test/files/pos/t1711/Seq.scala                     |    2 +-
 test/files/pos/t1722-A.scala                       |    4 +-
 test/files/pos/t1722/Test.scala                    |    2 +-
 test/files/pos/t1722/Top.scala                     |    4 +-
 test/files/pos/t1745/J.java                        |    6 +-
 test/{pending => files}/pos/t1751/A1_2.scala       |    0
 test/{pending => files}/pos/t1751/A2_1.scala       |    0
 .../{pending => files}/pos/t1751/SuiteClasses.java |    0
 test/files/pos/t1756.scala                         |   12 +-
 test/files/pos/t1761.scala                         |   10 -
 test/{pending => files}/pos/t1782/Ann.java         |    0
 test/{pending => files}/pos/t1782/Days.java        |    0
 .../pos/t1782/ImplementedBy.java                   |    0
 test/{pending => files}/pos/t1782/Test_1.scala     |    0
 test/files/pos/t1786-counter.scala                 |   38 +
 test/files/pos/t1786-cycle.scala                   |   57 +
 test/files/pos/t1798.scala                         |    2 +-
 test/files/pos/t1832.scala                         |    2 +-
 test/files/pos/t1840/J.java                        |    4 +-
 test/files/pos/t1909.scala                         |    8 -
 test/files/pos/t1909b-pos.scala                    |    6 -
 test/files/pos/t1942.cmds                          |    2 -
 test/files/pos/t1957.scala                         |    2 +-
 test/files/pos/t1974.scala                         |    6 +-
 test/files/pos/t2023.scala                         |    4 +-
 test/files/pos/t2060.scala                         |    2 +-
 test/files/pos/t2066-2.10-compat.flags             |    1 +
 test/files/pos/t2066-2.10-compat.scala             |   71 +
 test/files/pos/t2066.scala                         |   25 +
 test/files/pos/t2081.scala                         |    2 +-
 test/files/pos/t2082.scala                         |   16 +-
 test/files/pos/t2130-2.scala                       |    2 +-
 test/files/pos/t2133.scala                         |    2 +-
 test/files/pos/t2168.scala                         |    2 -
 test/files/pos/t2171.scala                         |    2 +-
 test/files/pos/t2261.scala                         |    2 +-
 test/files/pos/t2281.scala                         |   41 -
 test/files/pos/t2305.scala                         |    4 +-
 test/files/pos/t2310.scala                         |   12 +-
 test/files/pos/t2331.scala                         |    4 +-
 test/files/pos/t2399.scala                         |    6 +-
 test/files/pos/t2413/TestScalac.scala              |    2 +-
 test/files/pos/t2421.scala                         |   14 +-
 test/files/pos/t2421_delitedsl.scala               |   10 +-
 test/files/pos/t2421b_pos.scala                    |    2 +-
 test/files/pos/t2429.scala                         |   10 +-
 test/files/pos/t2435.scala                         |    2 +-
 test/files/pos/t2444.scala                         |    6 +-
 test/files/pos/t2464.cmds                          |    3 -
 test/files/pos/t2464/ScalaOne_1.scala              |    2 +-
 test/files/pos/t247.scala                          |    6 +-
 test/files/pos/t2484.scala                         |    4 +-
 test/files/pos/t2504.scala                         |    2 +-
 test/files/pos/t2545.scala                         |    4 +-
 test/files/pos/t2569/Child.scala                   |    6 +-
 test/files/pos/t2569/Parent.java                   |    6 +-
 test/files/pos/t261-ab.scala                       |    9 -
 test/files/pos/t261-ba.scala                       |    9 -
 test/files/pos/t2613.scala                         |   11 +
 test/files/pos/t262.scala                          |    4 +-
 test/files/pos/t2665.scala                         |    2 +-
 test/files/pos/t2669.scala                         |    2 +-
 test/files/pos/t2691.scala                         |    2 +-
 test/files/pos/t2698.scala                         |    6 +-
 test/files/pos/t2726.cmds                          |    2 -
 test/files/pos/t2726/SQLBuilder_1.scala            |    4 +-
 test/files/pos/t2797.scala                         |    4 +-
 test/files/pos/t2910.scala                         |    6 +-
 test/files/pos/t2913.scala                         |    6 +-
 test/files/pos/t2939.scala                         |    4 +-
 test/{pending => files}/pos/t294/Ann.java          |    0
 test/{pending => files}/pos/t294/Ann2.java         |    0
 test/{pending => files}/pos/t294/Test_1.scala      |    0
 test/{pending => files}/pos/t294/Test_2.scala      |    0
 test/files/pos/t2940/Error.scala                   |    4 +-
 test/files/pos/t2994a.scala                        |    4 +-
 test/files/pos/t3020.scala                         |    2 +-
 test/files/pos/t3079.scala                         |    4 +-
 test/files/pos/t3106.scala                         |    2 +-
 test/files/pos/t3108.scala                         |    5 -
 test/files/pos/t3136.scala                         |    2 +-
 test/files/pos/t3152.scala                         |   10 +-
 test/files/pos/t3160.scala                         |    6 +
 test/files/pos/t3174b.scala                        |    6 +-
 test/files/pos/t3175-pos.scala                     |    6 +-
 test/files/pos/t3177.scala                         |   14 +-
 test/files/pos/t3252.scala                         |    6 +-
 test/files/pos/t3274.scala                         |    6 +-
 test/files/pos/t3312.scala                         |    2 +-
 test/files/pos/t3349/AbstractTupleSet.java         |    2 +-
 test/files/pos/t3349/Test.scala                    |    4 +-
 test/files/pos/t3363-new.scala                     |    4 +-
 test/files/pos/t3363-old.scala                     |    2 +-
 test/files/pos/t3411.scala                         |    2 +-
 test/files/pos/t3417.scala                         |   11 -
 test/files/pos/t342.scala                          |    8 -
 test/files/pos/t3429/A.scala                       |    4 +-
 test/files/pos/t3430.scala                         |    2 +-
 test/files/pos/t344.scala                          |    4 +-
 test/files/pos/t3440.scala                         |   10 +-
 test/files/pos/t3452f.scala                        |   10 +
 test/files/pos/t3477.scala                         |    4 +-
 test/files/pos/t3521/DoubleValue.java              |    2 +-
 test/files/pos/t3528.scala                         |    2 +-
 test/files/pos/t3568.scala                         |    2 +-
 test/files/pos/t3578.scala                         |    2 +-
 test/files/pos/t3582.scala                         |    2 +-
 test/files/{neg => pos}/t3631.scala                |    0
 test/files/pos/t3636.scala                         |    4 +-
 test/files/pos/t3670.scala                         |    2 +-
 test/files/pos/t3671.scala                         |    2 +-
 test/files/pos/t3688-redux.scala                   |    8 -
 test/files/pos/t3731.scala                         |    4 +-
 test/files/pos/t3837.scala                         |    4 +-
 test/files/pos/t3856.scala                         |    2 +-
 test/files/pos/t3864/tuples_1.scala                |   36 +-
 test/files/pos/t3866.scala                         |    2 +-
 test/files/pos/t3880.scala                         |    2 +-
 test/files/pos/t3883.scala                         |    8 +-
 test/files/pos/t3898.scala                         |    2 +-
 test/files/pos/t3927.scala                         |    4 +-
 test/files/pos/t3936/BlockingQueue.java            |    3 +
 test/files/pos/t3936/Queue.java                    |    2 +
 test/files/pos/t3936/Test.scala                    |    4 +
 test/files/pos/t3938/Parent.java                   |    2 +-
 test/files/pos/t3938/UseParent.scala               |    2 +-
 test/files/pos/t3943/Client_2.scala                |    7 +
 test/files/pos/t3943/Outer_1.java                  |   14 +
 test/files/pos/t3972.scala                         |    2 +-
 test/files/pos/t4020.scala                         |    6 +-
 test/files/pos/t4070.scala                         |    4 +-
 test/files/pos/t4202.scala                         |    2 +-
 test/files/pos/t422.scala                          |   17 -
 test/files/pos/t4220.scala                         |    2 +-
 test/files/pos/t4243.scala                         |   10 +-
 test/files/pos/t4266.scala                         |   12 +-
 test/files/pos/t4269.scala                         |    6 +-
 test/files/pos/t4273.scala                         |    4 +-
 test/files/pos/t4275.scala                         |    2 +-
 test/files/pos/t430-feb09.scala                    |    4 +-
 test/files/pos/t4351.scala                         |   20 -
 test/files/pos/t4365/a_1.scala                     |   18 +
 test/files/pos/t4365/b_1.scala                     |   24 +
 test/files/pos/t443.scala                          |    8 +-
 test/files/pos/t4432.scala                         |    4 +-
 test/files/pos/t4457_1.scala                       |    2 +-
 test/files/pos/t4501.scala                         |    2 +-
 test/files/pos/t4579.scala                         |   12 +-
 test/files/pos/t460.scala                          |    6 +-
 test/files/pos/t4603/S.scala                       |    2 +-
 test/{pending => files}/pos/t4649.flags            |    0
 test/{pending => files}/pos/t4649.scala            |    0
 test/files/pos/t4716.scala                         |    2 +-
 test/files/pos/t4717.scala                         |    4 +-
 test/files/pos/t4744.flags                         |    1 +
 test/files/pos/t4744/Bar.scala                     |    1 +
 test/files/pos/t4744/Foo.java                      |    1 +
 test/files/pos/t4760.scala                         |    2 +-
 test/{pending => files}/pos/t4786.scala            |    0
 test/files/pos/t4840.scala                         |    2 +-
 test/files/pos/t4853.scala                         |    2 +-
 test/files/pos/t4859.scala                         |   17 +
 test/files/pos/t4970b.scala                        |   32 +
 test/files/pos/t5022.scala                         |   22 +
 test/files/pos/t5031_2.scala                       |    2 +-
 test/files/pos/t5120.scala                         |   10 +-
 test/files/pos/t5127.scala                         |    2 +-
 test/files/pos/t5156.scala                         |    4 +-
 test/files/pos/t516.scala                          |    3 +-
 test/files/pos/t5165b/TestAnnotation_1.java        |   11 +
 test/files/pos/t5165b/TestObject_3.scala           |    3 +
 test/files/pos/t5165b/TestTrait_2.scala            |    3 +
 test/files/pos/t5178.scala                         |    2 +-
 test/files/pos/t5223.scala                         |    2 +-
 test/files/pos/t5240.scala                         |    4 +-
 test/files/pos/t5317.scala                         |    4 +-
 test/{pending => files}/pos/t5399a.scala           |    0
 test/files/pos/t5508-min-okay.scala                |    6 +
 test/files/pos/t5508-min-okay2.scala               |    4 +
 test/files/pos/t5508-min.scala                     |    6 +
 test/files/pos/t5508.scala                         |   83 +
 test/files/pos/t5541.scala                         |    2 +-
 test/files/pos/t5580b.scala                        |   19 -
 test/{pending => files}/pos/t5606.scala            |    0
 test/{pending => files}/pos/t5639/Bar.scala        |    0
 test/files/pos/t5639/Foo.scala                     |    7 +
 test/files/pos/t5644/BoxesRunTime.java             |    2 +-
 test/files/pos/t5692a/Macros_1.scala               |    4 +-
 test/files/pos/t5692b/Macros_1.scala               |    4 +-
 test/files/pos/t5706.scala                         |   13 +-
 test/files/pos/t573.scala                          |   10 +-
 test/files/pos/t5744/Macros_1.scala                |    2 +-
 .../t5760-pkgobj-warn/stalepkg_1.scala             |    0
 .../t5760-pkgobj-warn/stalepkg_2.scala             |    0
 test/files/pos/t577.scala                          |   10 +-
 test/files/pos/t5809.scala                         |    5 +-
 test/files/{neg => pos}/t5845.scala                |    0
 test/files/pos/t5846.scala                         |    4 +-
 test/files/pos/t5853.scala                         |    4 +-
 test/files/pos/t5877.scala                         |    4 +-
 test/files/pos/t5877b.scala                        |    2 +-
 test/files/pos/t5900a.scala                        |    9 +
 test/files/pos/t5954a/A_1.scala                    |    6 +
 test/files/pos/t5954a/B_2.scala                    |    6 +
 test/files/pos/t5954b/A_1.scala                    |    6 +
 test/files/pos/t5954b/B_2.scala                    |    5 +
 .../{neg/case-collision.flags => pos/t5954c.flags} |    0
 test/files/pos/t5954c/A_1.scala                    |   18 +
 test/files/pos/t5954c/B_2.scala                    |   18 +
 test/files/pos/t5954d.flags                        |    1 +
 test/files/pos/t5954d/A_1.scala                    |    6 +
 test/files/pos/t5954d/B_2.scala                    |    7 +
 test/files/pos/t599.scala                          |    2 +-
 test/files/pos/t602.scala                          |    2 +-
 test/files/pos/t6047.scala                         |    2 +-
 test/files/pos/t6123-explaintypes-implicits.flags  |    1 +
 test/files/pos/t6123-explaintypes-implicits.scala  |   13 +
 test/files/pos/t613.scala                          |    4 +-
 test/files/pos/t616.scala                          |    2 +-
 test/files/pos/t6162-inheritance.flags             |    1 +
 test/files/pos/t6162-inheritance.scala             |   22 +
 test/files/pos/t6169/Exist.java                    |    4 +
 test/files/pos/t6169/ExistF.java                   |    4 +
 test/files/pos/t6169/ExistIndir.java               |    4 +
 test/files/pos/t6169/OP.java                       |    1 +
 test/files/pos/t6169/Skin.java                     |    1 +
 test/files/pos/t6169/Skinnable.java                |    3 +
 test/files/pos/t6169/skinnable.scala               |   14 +
 test/files/pos/t6169/t6169.scala                   |    7 +
 test/files/pos/t6201.scala                         |   14 +-
 test/files/pos/t6210.scala                         |    4 +-
 test/files/pos/t6221.scala                         |   33 +
 test/files/{neg => pos}/t6231.scala                |    0
 test/files/pos/t6231b.scala                        |    8 +
 test/files/pos/t6260.flags                         |    1 +
 test/files/{neg => pos}/t6260.scala                |    0
 test/files/pos/t6260a.scala                        |   15 +
 test/files/pos/t6260b.scala                        |    3 +
 test/files/pos/t6301.scala                         |    9 +
 test/files/pos/t6355pos.scala                      |   16 +
 test/files/pos/t640.scala                          |    4 +-
 test/files/pos/t6447.scala                         |   18 +
 test/files/pos/t6485a/Macros_1.scala               |    2 +-
 test/files/pos/t6485b/Test.scala                   |    2 +-
 test/files/pos/t651.scala                          |    4 +-
 test/files/pos/t6516.scala                         |    4 +-
 test/files/pos/t6574.scala                         |   19 +
 test/files/pos/t6624.scala                         |    2 +-
 test/files/pos/t6664.scala                         |    4 +
 test/files/pos/t6664b.scala                        |    5 +
 test/files/pos/t6675.flags                         |    2 +-
 test/files/pos/t6745.scala                         |    4 +
 test/files/pos/t675.scala                          |    4 +-
 test/files/pos/t6780.scala                         |   20 +
 test/files/pos/t6797.scala                         |    4 +
 test/files/pos/t6815.scala                         |   17 +
 test/files/pos/t6815_import.scala                  |   16 +
 test/files/pos/t6897.scala                         |    6 -
 test/files/pos/t690.scala                          |    2 +-
 test/files/pos/t6948.scala                         |   10 +
 test/files/pos/t6963c.scala                        |    4 +-
 test/files/pos/t6966.scala                         |   17 +
 test/files/pos/t6976/ImplicitBug_1.scala           |    8 +-
 test/files/pos/t7014/t7014.scala                   |    1 -
 test/files/pos/t711.scala                          |    2 +-
 test/files/pos/t715.cmds                           |    2 -
 test/files/pos/t715/meredith_1.scala               |   98 -
 test/files/pos/t715/runner_2.scala                 |    3 -
 test/files/pos/t7228.scala                         |   75 +
 test/files/pos/t7264/A_1.scala                     |   11 +
 test/files/pos/t7264/B_2.scala                     |    7 +
 test/files/pos/t7294.scala                         |    6 +
 test/files/pos/t7296.scala                         |    6 +
 test/files/pos/t7315.flags                         |    1 +
 test/files/pos/t7315.scala                         |    4 +
 test/files/pos/t7322.scala                         |   11 +
 test/files/pos/t7364/BadList.java                  |    3 +
 test/files/pos/t7364/UseIt.scala                   |    4 +
 test/files/pos/t7364b/BadList_1.java               |    3 +
 test/files/pos/t7364b/UseIt_2.scala                |    5 +
 test/files/pos/t7377/Macro_1.scala                 |    4 +-
 test/files/pos/t7377b.flags                        |    1 -
 test/files/pos/t7427.flags                         |    1 +
 test/files/pos/t7427.scala                         |    4 +
 .../{disabled/t7020.flags => pos/t7433.flags}      |    0
 test/files/pos/t7433.scala                         |   10 +
 test/files/pos/t7461/Macros_1.scala                |    6 +-
 test/files/pos/t7475a.scala                        |   11 +
 test/files/pos/t7475b.scala                        |    8 +
 test/files/pos/t7475d.scala                        |   11 +
 test/files/pos/t7475e.scala                        |   13 +
 test/files/pos/t7516/A_1.scala                     |    2 +-
 test/files/pos/t7520.scala                         |   10 +
 test/files/pos/t757.scala                          |    4 +-
 test/files/pos/t758.scala                          |   10 +-
 test/files/pos/t7591/Demo.scala                    |   83 +
 test/files/pos/t7649.scala                         |    4 +-
 test/files/pos/t7668.scala                         |   12 +
 test/files/pos/t767.scala                          |    2 +-
 test/files/pos/t7688.scala                         |    7 +
 test/files/pos/t7689.scala                         |    7 +
 test/files/pos/t7690.scala                         |   17 +
 test/files/pos/t7753.scala                         |   36 +
 test/files/pos/t7776.scala                         |   14 +-
 test/files/pos/t7785.scala                         |   34 +
 test/files/pos/t7788.scala                         |    8 +
 test/files/pos/t7834.scala                         |    6 +
 test/files/pos/t7847/A.scala                       |    5 +
 test/files/pos/t7847/B.java                        |   10 +
 test/files/pos/t7853-partial-function.scala        |    7 +
 test/files/pos/t7853.scala                         |   11 +
 test/files/pos/t7864.flags                         |    1 +
 test/files/pos/t7864.scala                         |    5 +
 test/files/pos/t788.scala                          |    2 +-
 test/files/pos/t7919.scala                         |    6 +
 test/files/pos/t7928.scala                         |   16 +
 test/files/pos/t7944.scala                         |   24 +
 test/files/pos/t7983.scala                         |   31 +
 test/files/pos/t7987/Macro_1.scala                 |    6 +
 test/files/pos/t7987/Test_2.scala                  |   12 +
 test/files/{jvm/t2570.check => pos/t8001.check}    |    0
 .../{disabled/t7020.flags => pos/t8001.flags}      |    0
 test/files/pos/t8001/Macros_1.scala                |   10 +
 test/files/pos/t8001/Test_2.scala                  |    4 +
 test/files/pos/t8002-nested-scope.scala            |   20 +
 test/files/pos/t8011.scala                         |    8 +
 test/files/pos/t8013.flags                         |    1 +
 test/files/pos/t8013/inpervolated_2.scala          |   11 +
 test/files/pos/t8013/inpervolator_1.scala          |   33 +
 test/files/pos/t802.scala                          |    8 +-
 test/files/pos/t8023.scala                         |   22 +
 test/files/pos/t8023b.scala                        |    2 +
 test/files/pos/t8045.scala                         |   17 +
 test/files/pos/t8046.scala                         |   20 +
 test/files/pos/t8046b.scala                        |   16 +
 test/files/pos/t8046c.scala                        |   19 +
 test/files/pos/t8054.scala                         |   31 +
 test/files/pos/t8064.flags                         |    1 +
 test/files/pos/t8064/Client_2.scala                |    8 +
 test/files/pos/t8064/Macro_1.scala                 |   10 +
 test/files/pos/t8064b.flags                        |    1 +
 test/files/pos/t8064b/Client_2.scala               |    6 +
 test/files/pos/t8064b/Macro_1.scala                |   11 +
 test/files/pos/t807.scala                          |    2 +-
 test/files/pos/t8120.scala                         |    9 +
 test/files/pos/t8128.scala                         |   15 +
 test/files/pos/t8132.scala                         |    5 +
 test/files/pos/t8134/A_1.scala                     |    4 +
 test/files/pos/t8134/B_2.scala                     |    4 +
 test/files/pos/t8152-performance.scala             |   13 -
 test/files/pos/t8170.scala                         |   27 +
 test/files/pos/t8170b.scala                        |   25 +
 test/files/pos/t8177.scala                         |   12 +
 test/files/pos/t8177a.scala                        |    9 +
 test/files/pos/t8177b.scala                        |   13 +
 test/files/pos/t8177d.scala                        |   12 +
 test/files/pos/t8177e.scala                        |    3 +
 test/files/pos/t8177g.scala                        |   11 +
 test/files/pos/t8177h.scala                        |    5 +
 test/files/{jvm/t2585.check => pos/t8187.check}    |    0
 test/files/pos/t8187.scala                         |    6 +
 test/files/pos/t8207.scala                         |    6 +
 test/files/{jvm/t680.check => pos/t8209a.check}    |    0
 test/files/pos/t8209a/Macros_1.scala               |   17 +
 test/files/pos/t8209a/Test_2.scala                 |    4 +
 test/files/{jvm/xml02.check => pos/t8209b.check}   |    0
 test/files/pos/t8209b/Macros_1.scala               |   17 +
 test/files/pos/t8209b/Test_2.scala                 |    4 +
 test/files/pos/t8219.scala                         |   15 +
 test/files/pos/t8219b.scala                        |   49 +
 test/files/pos/t8223.scala                         |   29 +
 test/files/pos/t8224.scala                         |   12 +
 test/files/pos/t8237.scala                         |   29 +
 test/files/pos/t8237b.scala                        |   10 +
 test/files/pos/t8244d/InodeBase_1.java             |    6 +
 test/files/pos/t8244d/Test_2.scala                 |    3 +
 test/files/pos/t8300-conversions-a.scala           |   23 +
 test/files/pos/t8300-conversions-b.scala           |   23 +
 test/files/pos/t8300-overloading.scala             |   16 +
 test/files/pos/t8300-patmat-a.scala                |   20 +
 test/files/pos/t8300-patmat-b.scala                |   20 +
 test/files/pos/t8301.scala                         |   19 +
 test/files/pos/t8301b.scala                        |   36 +
 test/files/pos/t8306.flags                         |    1 +
 test/files/pos/t8306.scala                         |    8 +
 test/files/pos/t8315.flags                         |    1 +
 test/files/pos/t8315.scala                         |   12 +
 test/files/pos/t8315b.flags                        |    1 +
 test/files/pos/t8315b.scala                        |   11 +
 test/files/pos/t8324.scala                         |   16 +
 test/files/{neg/t696a.flags => pos/t8352.check}    |    0
 test/files/pos/t8352/Macros_1.scala                |    7 +
 test/files/pos/t8352/Test_2.scala                  |    5 +
 test/files/pos/t8363.flags                         |    1 +
 test/files/pos/t8363.scala                         |    7 +
 .../pos/{annotated-treecopy.check => t8364.check}  |    0
 test/files/pos/t8364.scala                         |   12 +
 test/files/pos/t8367.scala                         |   11 +
 ...{attachments-typed-ident.check => t8369a.check} |    0
 test/files/pos/t8369a.scala                        |    5 +
 .../{macro-qmarkqmarkqmark.check => t8369b.check}  |    0
 test/files/pos/t8369b.scala                        |   18 +
 test/files/pos/t8376/BindingsX.java                |   13 +
 test/files/pos/t8376/Test.scala                    |   10 +
 test/files/pos/t8403.scala                         |    9 +
 test/files/pos/t8411/Macros_1.scala                |   10 +
 test/files/pos/t8411/Test_2.scala                  |    4 +
 test/files/pos/t8460.scala                         |   25 +
 test/files/pos/t880.scala                          |    6 -
 test/files/pos/t911.scala                          |    8 +-
 test/files/pos/t927.scala                          |    2 +-
 test/files/pos/t946.scala                          |    2 +-
 test/files/pos/tcpoly_boundedmonad.scala           |   18 +-
 test/files/pos/tcpoly_bounds1.scala                |    6 +-
 test/files/pos/tcpoly_checkkinds_mix.scala         |    6 +-
 test/files/pos/tcpoly_gm.scala                     |    5 +-
 .../pos/tcpoly_higherorder_bound_method.scala      |    2 +-
 .../pos/tcpoly_infer_explicit_tuple_wrapper.scala  |    8 +-
 .../pos/tcpoly_infer_implicit_tuple_wrapper.scala  |    4 +-
 test/files/pos/tcpoly_late_method_params.scala     |    2 +-
 test/files/pos/tcpoly_method.scala                 |    2 +-
 test/files/pos/tcpoly_overloaded.scala             |   18 +-
 test/files/pos/tcpoly_poly.scala                   |    2 +-
 test/files/pos/tcpoly_return_overriding.scala      |    2 +-
 test/files/pos/tcpoly_seq.scala                    |   44 +-
 test/files/pos/tcpoly_seq_typealias.scala          |   40 +-
 test/files/pos/tcpoly_subst.scala                  |    2 +-
 test/files/pos/tcpoly_variance_pos.scala           |    4 +-
 test/files/pos/tcpoly_wildcards.scala              |    2 +-
 test/files/pos/ted.scala                           |    2 +-
 test/files/pos/test5.scala                         |    4 +-
 test/files/pos/test5refine.scala                   |    4 +-
 test/files/pos/testCoercionThis.scala              |    6 +-
 test/files/pos/thistypes.scala                     |    2 +-
 test/files/pos/ticket0137.scala                    |    6 +-
 test/files/pos/ticket2251.scala                    |   14 +
 test/files/pos/trait-force-info.scala              |    2 +-
 test/files/pos/trait-parents.scala                 |    6 +-
 test/files/pos/traits.scala                        |    4 +-
 test/files/pos/typealias_dubious.scala             |   14 +-
 test/files/pos/typealiases.scala                   |   16 +-
 test/files/pos/unapplyNeedsMemberType.scala        |    4 +-
 test/files/pos/unapplySeq.scala                    |    2 +-
 test/files/pos/unapplyVal.scala                    |    4 +-
 test/files/pos/valdefs.scala                       |    2 +-
 test/files/pos/variances-flip.scala                |    7 +
 test/files/pos/variances-local.scala               |    7 +
 test/files/pos/virtpatmat_anonfun_for.flags        |    0
 test/files/pos/virtpatmat_binding_opt.scala        |    4 +-
 test/files/pos/virtpatmat_castbinder.scala         |    2 +-
 test/files/pos/virtpatmat_exist1.scala             |    2 +-
 test/files/pos/virtpatmat_exist2.scala             |   10 +-
 test/files/pos/virtpatmat_exist3.scala             |    4 +-
 test/files/pos/virtpatmat_gadt_array.scala         |   10 +-
 test/files/positions/Anon.scala                    |    2 +-
 test/files/positions/Enclosing1.scala              |    2 +-
 test/files/positions/ExcludedPrefix1.scala         |   18 +-
 test/files/positions/Overlap3.scala                |    2 +-
 test/files/positions/Overlap4.scala                |    2 +-
 test/files/positions/Scaladoc2.scala               |    4 +-
 test/files/positions/Scaladoc3.scala               |    2 +-
 test/files/positions/Scaladoc4.scala               |    2 +-
 test/files/positions/Scaladoc6.scala               |    2 +-
 test/files/positions/Scaladoc7.scala               |    2 +-
 test/files/presentation/callcc-interpreter.check   |  136 +-
 .../callcc-interpreter/src/CallccInterpreter.scala |   36 +-
 .../presentation/completion-implicit-chained.check |   44 +-
 test/files/presentation/hyperlinks-macro.check     |   11 +
 .../presentation/hyperlinks-macro/Runner.scala     |    8 +
 .../hyperlinks-macro/src/MacroCall.scala           |   11 +
 test/files/presentation/hyperlinks/Runner.scala    |    2 +-
 .../hyperlinks/src/NameDefaultTests.scala          |    6 +-
 .../presentation/hyperlinks/src/PatMatTests.scala  |    8 +-
 test/files/presentation/ide-bug-1000349.check      |   62 +-
 test/files/presentation/ide-bug-1000475.check      |  180 +-
 .../presentation/ide-bug-1000475/src/Foo.scala     |    2 +-
 test/files/presentation/ide-bug-1000531.check      |  239 +-
 .../ide-bug-1000531/src/CrashOnLoad.scala          |    4 +-
 test/files/presentation/ide-t1001326.check         |    4 -
 test/files/presentation/ide-t1001326/Test.scala    |   91 -
 test/files/presentation/ide-t1001326/src/a/A.scala |    5 -
 test/files/presentation/implicit-member.check      |   66 +-
 .../implicit-member/src/ImplicitMember.scala       |    4 +-
 .../memory-leaks/MemoryLeaksTest.scala             |   19 +-
 test/files/presentation/parse-invariants.check     |    5 +
 .../files/presentation/parse-invariants/Test.scala |  107 +
 .../presentation/parse-invariants/src/a/A.scala    |  138 +
 test/files/presentation/partial-fun.check          |    2 +
 test/files/presentation/partial-fun/Runner.scala   |   10 +
 .../presentation/partial-fun/src/PartialFun.scala  |    5 +
 test/files/presentation/ping-pong.check            |  140 +-
 .../presentation/ping-pong/src/PingPong.scala      |   14 +-
 test/files/presentation/random.check               |    6 +-
 test/files/presentation/random/src/Random.scala    |   16 +-
 test/files/presentation/scope-completion-1.check   |   12 +-
 test/files/presentation/scope-completion-2.check   |   40 +-
 test/files/presentation/scope-completion-3.check   |  176 +-
 test/files/presentation/scope-completion-4.check   |  320 +-
 .../presentation/scope-completion-import.check     |  276 +-
 test/files/presentation/t1207.check                |   53 +
 test/files/presentation/t1207/Test.scala           |    3 +
 .../files/presentation/t1207/src/Completions.scala |   20 +
 test/files/presentation/t4287.check                |   11 +
 test/files/presentation/t4287/Test.scala           |    3 +
 test/files/presentation/t4287/src/Foo.scala        |    5 +
 test/files/presentation/t4287b.check               |    6 +
 test/files/presentation/t4287b/Test.scala          |    3 +
 test/files/presentation/t4287b/src/Foo.scala       |   15 +
 test/files/presentation/t4287c.check               |   11 +
 test/files/presentation/t4287c.flags               |    1 +
 test/files/presentation/t4287c/Test.scala          |    3 +
 test/files/presentation/t4287c/src/Foo.scala       |    9 +
 test/files/presentation/t5708.check                |   76 +-
 test/files/presentation/t7678.check                |    1 +
 test/files/presentation/t7678/Runner.scala         |   62 +
 test/files/presentation/t7678/src/TypeTag.scala    |    9 +
 test/files/presentation/t8085.check                |    2 +-
 test/files/presentation/t8085b.check               |    2 +-
 test/files/presentation/visibility.check           |  359 +-
 .../presentation/visibility/src/Completions.scala  |    2 +-
 test/files/res/t597/Test.scala                     |    2 +-
 test/files/res/t687.check                          |    6 +-
 test/files/res/t722/Parser.scala                   |    2 +-
 test/files/res/t735/ScalaExpressions.scala         |    2 +-
 test/files/res/t743/BracesXXX.scala                |    2 +-
 test/files/res/t743/ParserXXX.scala                |    6 +-
 test/files/res/t785/ScalaNewTyper.scala            |    2 +-
 test/files/res/t831/NewScalaParserXXX.scala        |   18 +-
 test/files/run/Course-2002-01.check                |    3 +
 test/files/run/Course-2002-02.scala                |    8 +-
 test/files/run/Course-2002-05.scala                |   18 +-
 test/files/run/Course-2002-06.scala                |    2 +-
 test/files/run/Course-2002-07.scala                |  162 +-
 test/files/run/Course-2002-08.scala                |   32 +-
 test/files/run/Course-2002-09.scala                |   50 +-
 test/files/run/Course-2002-13.scala                |   20 +-
 test/files/run/Meter.check                         |    3 +
 test/files/run/MeterCaseClass.check                |    3 +
 test/files/run/OrderingTest.scala                  |    8 +-
 test/files/run/Predef.readLine.scala               |    3 +-
 test/files/run/ReplacementMatching.scala           |   10 +-
 test/files/run/ReverseSeqView.scala                |    4 +-
 test/files/run/SymbolsTest.scala                   |    3 +-
 test/files/run/UnrolledBuffer.scala                |   44 +-
 test/files/run/WeakHashSetTest.scala               |   14 +-
 test/files/run/absoverride.scala                   |   10 +-
 test/files/run/abstypetags_serialize.scala         |    3 +-
 test/files/run/all-overridden.check                |    1 +
 test/files/run/all-overridden.scala                |   11 +
 test/files/run/analyzerPlugins.check               |   54 +-
 test/files/run/analyzerPlugins.scala               |   12 +-
 test/files/run/annotatedRetyping.scala             |    4 +-
 test/files/run/array-addition.check                |    4 +
 test/files/run/array-addition.scala                |   11 +
 test/files/run/array-charSeq.scala                 |    1 +
 test/files/run/array-existential-bound.scala       |    6 +-
 test/files/run/arrayclone-old.scala                |   24 +-
 test/files/run/arraycopy.scala                     |    2 +-
 test/files/run/arrays.check                        |    6 +
 test/files/run/arrays.scala                        |    2 +-
 test/files/run/arrayview.scala                     |    2 +-
 test/files/run/bigDecimalCache.scala               |    4 +-
 test/files/run/bigDecimalTest.check                |    2 +-
 test/files/run/bigDecimalTest.scala                |    2 +-
 test/files/run/bitsets.check                       |    5 +
 test/files/run/bitsets.scala                       |   60 +-
 test/files/run/blame_eye_triple_eee-double.check   |    9 +
 .../blame_eye_triple_eee-double.flags}             |    0
 test/files/run/blame_eye_triple_eee-double.scala   |   61 +
 test/files/run/blame_eye_triple_eee-float.check    |    9 +
 .../blame_eye_triple_eee-float.flags}              |    0
 test/files/run/blame_eye_triple_eee-float.scala    |   61 +
 test/files/run/boolexprs.scala                     |    2 +-
 test/files/run/bridges.scala                       |    2 +-
 test/files/run/bugs.scala                          |    6 +-
 test/files/run/case-class-23.check                 |    2 +
 test/files/run/case-class-23.scala                 |   33 +
 test/files/run/caseClassEquality.scala             |   12 +-
 test/files/run/caseclasses.scala                   |    2 +-
 test/files/run/castsingleton.scala                 |    2 +-
 test/files/run/checked.scala                       |    6 +-
 test/files/run/classfile-format-51.scala           |    8 +-
 test/files/run/classfile-format-52.scala           |   12 +-
 test/files/run/classmanifests_new_alias.scala      |    4 +-
 test/files/run/classmanifests_new_core.scala       |    3 +-
 test/files/run/classof.check                       |    2 +-
 test/files/run/classof.scala                       |    6 +-
 test/files/run/collection-conversions.scala        |    8 +-
 test/files/run/collection-stacks.check             |   15 +
 test/files/run/collection-stacks.scala             |   38 +
 test/files/run/collections-toSelf.scala            |    2 +-
 test/files/run/collections.scala                   |    7 +-
 test/files/run/colltest.check                      |    1 +
 test/files/run/colltest1.scala                     |   14 +-
 test/files/run/comparable-comparator.scala         |   11 +-
 test/files/run/compiler-asSeenFrom.check           |   94 +
 test/files/run/compiler-asSeenFrom.scala           |   55 +-
 test/files/run/concat-two-strings.scala            |    2 +-
 test/files/run/concurrent-map-conversions.scala    |   14 +-
 test/files/run/concurrent-stream.scala             |   45 +-
 test/files/run/constant-optimization.check         |    5 +
 .../constant-optimization.flags}                   |    0
 test/files/run/constant-optimization.scala         |   61 +
 test/files/run/constant-type.check                 |   12 +-
 test/files/run/constant-type.scala                 |    8 +-
 test/files/run/constrained-types.check             |   33 +-
 test/files/run/constrained-types.scala             |   10 +-
 test/files/run/contrib674.check                    |    3 +
 test/files/run/contrib674.scala                    |    4 +-
 test/files/run/ctor-order.scala                    |    2 +-
 test/files/run/ctries-new/concmap.scala            |   62 +-
 test/files/run/ctries-new/iterator.scala           |  114 +-
 test/files/run/ctries-new/lnode.scala              |   18 +-
 test/files/run/ctries-new/main.scala               |    7 +-
 test/files/run/ctries-new/snapshot.scala           |   88 +-
 test/files/run/ctries-old/concmap.scala            |   63 +-
 test/files/run/ctries-old/iterator.scala           |  101 +-
 test/files/run/ctries-old/lnode.scala              |   19 +-
 test/files/run/ctries-old/main.scala               |    8 +-
 test/files/run/ctries-old/snapshot.scala           |   89 +-
 test/files/run/dead-code-elimination.check         |    0
 test/files/run/dead-code-elimination.scala         |   10 +-
 test/files/run/deeps.check                         |   87 +
 test/files/run/deeps.scala                         |  114 +
 .../delambdafy-dependent-on-param-subst-2.scala    |   20 +
 .../run/delambdafy-dependent-on-param-subst.flags  |    1 +
 .../run/delambdafy-dependent-on-param-subst.scala  |   20 +
 test/files/run/delambdafy-nested-by-name.check     |    2 +
 test/files/run/delambdafy-nested-by-name.scala     |   11 +
 test/files/run/delambdafy-two-lambdas.check        |    2 +
 test/files/run/delambdafy-two-lambdas.scala        |   12 +
 test/files/run/delambdafy_t6028.check              |   57 +
 test/files/run/delambdafy_t6028.scala              |   21 +
 test/files/run/delambdafy_t6555.check              |   15 +
 test/files/run/delambdafy_t6555.scala              |   15 +
 .../run/delambdafy_uncurry_byname_inline.check     |   21 +
 .../run/delambdafy_uncurry_byname_inline.scala     |   20 +
 .../run/delambdafy_uncurry_byname_method.check     |   15 +
 .../run/delambdafy_uncurry_byname_method.scala     |   20 +
 test/files/run/delambdafy_uncurry_inline.check     |   23 +
 test/files/run/delambdafy_uncurry_inline.scala     |   20 +
 test/files/run/delambdafy_uncurry_method.check     |   17 +
 test/files/run/delambdafy_uncurry_method.scala     |   20 +
 test/files/run/delay-bad.check                     |    7 +
 test/files/run/delay-good.check                    |    6 +
 test/files/run/deprecate-early-type-defs.check     |    3 +
 test/files/run/deprecate-early-type-defs.flags     |    1 +
 test/files/run/deprecate-early-type-defs.scala     |    1 +
 test/files/run/distinct.scala                      |    4 +-
 test/files/run/duration-coarsest.scala             |   28 +
 test/files/run/dynamic-applyDynamic.check          |    4 +-
 test/files/run/dynamic-applyDynamicNamed.check     |    8 +-
 test/files/run/dynamic-selectDynamic.check         |    4 +-
 test/files/run/dynamic-updateDynamic.check         |    4 +-
 test/files/run/elidable-noflags.scala              |    2 +-
 test/files/run/emptypf.scala                       |    4 +-
 test/files/run/enrich-gentraversable.scala         |   21 +-
 test/files/run/enums.scala                         |   14 +-
 test/files/run/equality.scala                      |   10 +-
 test/files/run/eta-expand-star2.check              |    1 +
 test/files/run/exceptions-2.check                  |    3 +
 test/files/run/exceptions-2.scala                  |   60 +-
 test/files/run/exceptions-nest.scala               |   30 +-
 test/files/run/exceptions.scala                    |    6 +-
 test/files/run/existential-rangepos.check          |   13 +
 test/files/run/existential-rangepos.scala          |   13 +
 test/files/run/existentials-in-compiler.check      |   44 +-
 test/files/run/existentials-in-compiler.scala      |   10 +-
 test/files/run/existentials.scala                  |    7 +-
 test/files/run/existentials3-new.check             |   12 +-
 test/files/run/existentials3-new.scala             |    6 +-
 test/files/run/existentials3-old.scala             |    2 +
 test/files/run/exoticnames.check                   |    0
 test/files/run/exoticnames.scala                   |    8 +-
 test/files/run/fail-non-value-types.scala          |   10 +-
 test/files/run/finally.scala                       |   30 +-
 test/files/run/flat-flat-flat.scala                |    2 +-
 test/files/run/fors.check                          |   18 -
 test/files/run/fors.scala                          |   13 -
 test/files/run/forvaleq.scala                      |   30 +-
 test/files/run/freetypes_false_alarm2.scala        |    3 +-
 test/files/run/gadts.scala                         |    4 +-
 test/files/run/genericValueClass.scala             |   13 +-
 test/files/run/getClassTest-old.scala              |    3 +-
 test/files/run/global-showdef.scala                |   15 +-
 test/files/run/groupby.scala                       |    6 +-
 test/files/run/hashCodeBoxesRunTime.scala          |   12 +-
 test/files/run/hashhash.scala                      |    2 +-
 test/files/run/hashset.check                       |   26 +
 test/files/run/hashset.scala                       |   48 +
 test/files/run/hashsetremove.check                 |    6 +
 test/files/run/hashsetremove.scala                 |   13 +
 test/files/run/idempotency-case-classes.check      |    2 +-
 test/files/run/idempotency-case-classes.scala      |    4 +-
 test/files/run/idempotency-extractors.scala        |    4 +-
 test/files/run/idempotency-labels.scala            |    4 +-
 test/files/run/idempotency-lazy-vals.scala         |    4 +-
 test/files/run/idempotency-partial-functions.check |    2 -
 test/files/run/idempotency-this.check              |    2 +-
 test/files/run/idempotency-this.scala              |    4 +-
 test/files/run/impconvtimes.scala                  |    2 +
 test/files/run/implicits.scala                     |    2 +
 test/files/run/indexedSeq.scala                    |   11 +-
 test/files/run/inferred-type-constructors.check    |   56 +
 test/files/run/inferred-type-constructors.scala    |  125 +
 test/files/run/infix.scala                         |    1 -
 test/files/run/inline-ex-handlers.check            |  168 +-
 test/files/run/inline-ex-handlers.scala            |    6 +-
 test/files/run/inliner-infer.scala                 |    5 +-
 test/files/run/inner-obj-auto.scala                |  570 +--
 .../run/interop_classtags_are_classmanifests.scala |    5 +-
 .../run/interop_manifests_are_classtags.scala      |    3 +-
 .../files/run/interop_typetags_are_manifests.scala |    1 +
 test/files/run/interpolationArgs.check             |    4 +-
 test/files/run/interpolationArgs.flags             |    1 -
 test/files/run/interpolationArgs.scala             |    4 +-
 test/files/run/interpolationMultiline2.flags       |    1 -
 test/files/run/interpolationMultiline2.scala       |   17 +-
 test/files/run/intmap.check                        |    0
 test/files/run/io-position.check                   |  Bin 126 -> 0 bytes
 test/files/run/io-position.scala                   |   13 -
 test/files/run/iq.check                            |    4 +
 test/files/run/iq.scala                            |   39 +-
 test/files/run/is-valid-num.scala                  |   33 +-
 test/files/run/issue192.scala                      |   34 +-
 test/files/run/iterator-concat.check               |    4 +
 test/files/run/iterator-concat.scala               |   15 +
 test/files/run/iterator-from.scala                 |   71 +
 test/files/run/iterator-iterate-lazy.scala         |    2 +-
 test/files/run/iterator3444.scala                  |   12 +-
 test/files/run/iterators.scala                     |    8 +-
 test/files/run/java-erasure.scala                  |    2 +-
 test/files/run/json.check                          |   21 -
 test/files/run/json.scala                          |  283 --
 test/files/run/jtptest.check                       |    7 -
 test/files/run/jtptest.scala                       |   17 -
 test/files/run/kind-repl-command.check             |   28 +
 test/files/run/kind-repl-command.scala             |   12 +
 test/files/run/kmpSliceSearch.scala                |    8 +-
 test/files/run/lazy-exprs.check                    |    8 +
 test/files/run/lazy-exprs.scala                    |   16 +-
 test/files/run/lazy-locals.check                   |    6 +
 test/files/run/lazy-locals.scala                   |   14 +-
 test/files/run/lazy-override-run.scala             |    4 +-
 test/files/run/lazy-traits.scala                   |   74 +-
 test/files/run/lift-and-unlift.scala               |   10 +-
 test/files/run/list_map.scala                      |   26 +
 test/files/run/lists-run.scala                     |   12 +-
 test/files/run/literals.check                      |   19 +-
 test/files/run/literals.scala                      |   23 +-
 test/files/run/longmap.scala                       |    8 +
 test/files/run/lub-visibility.check                |    5 +-
 test/files/run/macro-abort-fresh.check             |    8 +-
 test/files/run/macro-abort-fresh/Macros_1.scala    |    4 +-
 test/files/run/macro-abort-fresh/Test_2.scala      |    2 +-
 test/files/run/macro-auto-duplicate/Macros_1.scala |    2 +-
 test/files/run/macro-basic-ma-md-mi/Impls_1.scala  |   14 +-
 .../run/macro-basic-ma-mdmi/Impls_Macros_1.scala   |   14 +-
 test/files/run/macro-basic-mamd-mi/Impls_1.scala   |   14 +-
 .../files/run/macro-blackbox-materialization.check |    3 +
 .../macro-blackbox-materialization/Macros_1.scala  |   16 +
 .../macro-blackbox-materialization/Test_2.scala    |    5 +
 .../run/macro-bodyexpandstoimpl/Impls_1.scala      |   14 +-
 .../macro-bodyexpandstoimpl/Macros_Test_2.scala    |    4 +-
 test/files/run/macro-bundle-context-alias.check    |    4 +
 .../run/macro-bundle-context-alias/Macros_1.scala  |   38 +
 .../run/macro-bundle-context-alias/Test_2.scala    |    6 +
 .../run/macro-bundle-context-refinement.check      |    2 +
 .../macro-bundle-context-refinement/Macros_1.scala |   19 +
 .../macro-bundle-context-refinement/Test_2.scala   |    4 +
 test/files/run/macro-bundle-repl.check             |   24 +
 test/files/run/macro-bundle-repl.scala             |   13 +
 test/files/run/macro-bundle-static.check           |    6 +
 .../run/macro-bundle-static/Impls_Macros_1.scala   |   30 +
 test/files/run/macro-bundle-static/Test_2.scala    |    8 +
 test/files/run/macro-bundle-toplevel.check         |    6 +
 .../macro-bundle-toplevel.flags}                   |    0
 .../run/macro-bundle-toplevel/Impls_Macros_1.scala |   25 +
 test/files/run/macro-bundle-toplevel/Test_2.scala  |    8 +
 test/files/run/macro-bundle-whitebox-decl.check    |    6 +
 .../Impls_Macros_1.scala                           |   26 +
 .../run/macro-bundle-whitebox-decl/Test_2.scala    |    8 +
 test/files/run/macro-bundle-whitebox-use-raw.check |    5 +
 .../macro-bundle-whitebox-use-raw/Macros_1.scala   |  108 +
 .../run/macro-bundle-whitebox-use-raw/Test_2.scala |   19 +
 .../run/macro-bundle-whitebox-use-refined.check    |    5 +
 .../Macros_1.scala                                 |  108 +
 .../macro-bundle-whitebox-use-refined/Test_2.scala |   19 +
 test/files/run/macro-declared-in-annotation.flags  |    1 -
 .../run/macro-declared-in-annotation/Impls_1.scala |   11 -
 test/files/run/macro-declared-in-anonymous.flags   |    1 -
 .../run/macro-declared-in-anonymous/Impls_1.scala  |   11 -
 .../Macros_Test_2.scala                            |    4 -
 test/files/run/macro-declared-in-block.flags       |    1 -
 .../run/macro-declared-in-block/Impls_1.scala      |   11 -
 .../macro-declared-in-block/Macros_Test_2.scala    |    6 -
 test/files/run/macro-declared-in-class-class.flags |    1 -
 .../macro-declared-in-class-class/Impls_1.scala    |   11 -
 .../Macros_Test_2.scala                            |   10 -
 .../files/run/macro-declared-in-class-object.flags |    1 -
 .../macro-declared-in-class-object/Impls_1.scala   |   11 -
 .../Macros_Test_2.scala                            |   10 -
 test/files/run/macro-declared-in-class.flags       |    1 -
 .../run/macro-declared-in-class/Impls_1.scala      |   11 -
 .../macro-declared-in-class/Macros_Test_2.scala    |    7 -
 .../run/macro-declared-in-default-param.flags      |    1 -
 .../macro-declared-in-default-param/Impls_1.scala  |   11 -
 .../Macros_Test_2.scala                            |    7 -
 .../run/macro-declared-in-implicit-class.flags     |    1 -
 .../Impls_Macros_1.scala                           |   19 -
 test/files/run/macro-declared-in-method.flags      |    1 -
 .../run/macro-declared-in-method/Impls_1.scala     |   11 -
 .../macro-declared-in-method/Macros_Test_2.scala   |    8 -
 .../files/run/macro-declared-in-object-class.flags |    1 -
 .../macro-declared-in-object-class/Impls_1.scala   |   11 -
 .../Macros_Test_2.scala                            |   10 -
 .../run/macro-declared-in-object-object.flags      |    1 -
 .../macro-declared-in-object-object/Impls_1.scala  |   11 -
 .../Macros_Test_2.scala                            |   10 -
 test/files/run/macro-declared-in-object.flags      |    1 -
 .../run/macro-declared-in-object/Impls_1.scala     |   11 -
 .../macro-declared-in-object/Macros_Test_2.scala   |    7 -
 .../run/macro-declared-in-package-object.flags     |    1 -
 .../macro-declared-in-package-object/Impls_1.scala |   11 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/run/macro-declared-in-refinement.flags  |    1 -
 .../run/macro-declared-in-refinement/Impls_1.scala |   11 -
 .../Macros_Test_2.scala                            |    6 -
 test/files/run/macro-declared-in-trait.check       |   15 -
 test/files/run/macro-declared-in-trait.flags       |    1 -
 .../run/macro-declared-in-trait/Impls_1.scala      |   11 -
 .../macro-declared-in-trait/Macros_Test_2.scala    |   13 -
 test/files/run/macro-def-infer-return-type-a.check |    1 -
 test/files/run/macro-def-infer-return-type-a.flags |    1 -
 .../macro-def-infer-return-type-a/Impls_1.scala    |    5 -
 .../Macros_Test_2.scala                            |    4 -
 test/files/run/macro-def-infer-return-type-b.check |    6 -
 test/files/run/macro-def-infer-return-type-b.flags |    1 -
 .../Impls_Macros_1.scala                           |   10 -
 .../run/macro-def-infer-return-type-b/Test_2.scala |    8 -
 test/files/run/macro-def-infer-return-type-c.check |    1 -
 test/files/run/macro-def-infer-return-type-c.flags |    1 -
 .../macro-def-infer-return-type-c/Impls_1.scala    |    5 -
 .../Macros_Test_2.scala                            |    4 -
 test/files/run/macro-def-path-dependent-a.flags    |    1 -
 .../Impls_Macros_1.scala                           |   21 -
 test/files/run/macro-def-path-dependent-b.check    |    1 -
 test/files/run/macro-def-path-dependent-b.flags    |    1 -
 .../Impls_Macros_1.scala                           |   20 -
 .../run/macro-def-path-dependent-b/Test_2.scala    |    3 -
 test/files/run/macro-def-path-dependent-c.check    |    1 -
 test/files/run/macro-def-path-dependent-c.flags    |    1 -
 .../Impls_Macros_1.scala                           |   20 -
 .../run/macro-def-path-dependent-c/Test_2.scala    |    3 -
 test/files/run/macro-def-path-dependent-d1.check   |    1 -
 test/files/run/macro-def-path-dependent-d1.flags   |    1 -
 .../Impls_Macros_1.scala                           |    9 -
 .../run/macro-def-path-dependent-d1/Test_2.scala   |    3 -
 test/files/run/macro-def-path-dependent-d2.check   |    1 -
 test/files/run/macro-def-path-dependent-d2.flags   |    1 -
 .../run/macro-def-path-dependent-d2/Impls_1.scala  |    7 -
 .../run/macro-def-path-dependent-d2/Macros_2.scala |    7 -
 .../run/macro-def-path-dependent-d2/Test_3.scala   |    3 -
 ...tation.check => macro-def-path-dependent.check} |    0
 .../macro-def-path-dependent.flags}                |    0
 .../Dummy.scala}                                   |    0
 .../run/macro-def-path-dependent/Test_1.scala      |   25 +
 .../run/macro-def-path-dependent/Test_2.scala      |   22 +
 .../run/macro-def-path-dependent/Test_3.scala      |   22 +
 .../run/macro-def-path-dependent/Test_4.scala      |   11 +
 .../run/macro-def-path-dependent/Test_5.scala      |    9 +
 .../run/macro-def-path-dependent/Test_6.scala      |    9 +
 test/files/run/macro-default-params.check          |    1 +
 test/files/run/macro-default-params/Macros_1.scala |   27 +
 test/files/run/macro-default-params/Test_2.scala   |    3 +
 .../macro-divergence-spurious/Impls_Macros_1.scala |    8 +-
 test/files/run/macro-duplicate.check               |    3 +
 .../files/run/macro-duplicate/Impls_Macros_1.scala |   10 +-
 .../files/run/macro-enclosingowner-detectvar.check |   16 +
 .../macro-enclosingowner-detectvar/Macros_1.scala  |   14 +
 .../macro-enclosingowner-detectvar/Test_2.scala    |   23 +
 test/files/run/macro-enclosingowner-sbt.check      |   16 +
 .../run/macro-enclosingowner-sbt/Macros_1.scala    |   14 +
 .../run/macro-enclosingowner-sbt/Test_2.scala      |   23 +
 test/files/run/macro-enclosures.check              |   34 +
 .../macro-enclosures.flags}                        |    0
 .../run/macro-enclosures/Impls_Macros_1.scala      |   23 +
 test/files/run/macro-enclosures/Test_2.scala       |   11 +
 .../macro-expand-implicit-argument/Macros_1.scala  |    8 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    3 +-
 .../Impls_1.scala                                  |    4 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../run/macro-expand-implicit-macro-is-view.flags  |    1 -
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    5 +-
 .../macro-expand-multiple-arglists/Impls_1.scala   |    8 +-
 .../Macros_Test_2.scala                            |    2 +-
 test/files/run/macro-expand-nullary-generic.check  |   10 +-
 .../run/macro-expand-nullary-generic/Impls_1.scala |   14 +-
 .../Macros_Test_2.scala                            |    8 +-
 .../run/macro-expand-nullary-nongeneric.check      |   10 +-
 .../macro-expand-nullary-nongeneric/Impls_1.scala  |   15 +-
 .../Macros_Test_2.scala                            |    8 +-
 test/files/run/macro-expand-overload/Impls_1.scala |   14 +-
 .../run/macro-expand-overload/Macros_Test_2.scala  |   12 +-
 test/files/run/macro-expand-override/Impls_1.scala |   14 +-
 .../run/macro-expand-override/Macros_Test_2.scala  |   14 +-
 .../files/run/macro-expand-recursive/Impls_1.scala |   10 +-
 .../run/macro-expand-recursive/Macros_Test_2.scala |    4 +-
 test/files/run/macro-expand-tparams-bounds-a.check |    0
 test/files/run/macro-expand-tparams-bounds-a.flags |    1 -
 .../macro-expand-tparams-bounds-a/Impls_1.scala    |    5 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/run/macro-expand-tparams-bounds-b.check |    0
 test/files/run/macro-expand-tparams-bounds-b.flags |    1 -
 .../macro-expand-tparams-bounds-b/Impls_1.scala    |    7 -
 .../Macros_Test_2.scala                            |   10 -
 test/files/run/macro-expand-tparams-bounds.check   |    2 +
 .../macro-expand-tparams-bounds.flags}             |    0
 .../run/macro-expand-tparams-bounds/Impls_1.scala  |   12 +
 .../Macros_Test_2.scala                            |   12 +
 .../macro-expand-tparams-explicit/Impls_1.scala    |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../macro-expand-tparams-implicit/Impls_1.scala    |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../run/macro-expand-tparams-only-in-impl.flags    |    1 -
 .../Impls_1.scala                                  |    5 -
 .../Macros_Test_2.scala                            |    8 -
 test/files/run/macro-expand-tparams-optional.flags |    1 -
 .../macro-expand-tparams-optional/Impls_1.scala    |    9 -
 .../Macros_Test_2.scala                            |    4 -
 test/files/run/macro-expand-tparams-prefix-a.check |    4 -
 test/files/run/macro-expand-tparams-prefix-a.flags |    1 -
 .../macro-expand-tparams-prefix-a/Impls_1.scala    |   11 -
 .../Macros_Test_2.scala                            |   10 -
 test/files/run/macro-expand-tparams-prefix-b.check |    2 -
 test/files/run/macro-expand-tparams-prefix-b.flags |    1 -
 .../macro-expand-tparams-prefix-b/Impls_1.scala    |   12 -
 .../Macros_Test_2.scala                            |   10 -
 .../files/run/macro-expand-tparams-prefix-c1.check |    3 -
 .../files/run/macro-expand-tparams-prefix-c1.flags |    1 -
 .../macro-expand-tparams-prefix-c1/Impls_1.scala   |   13 -
 .../Macros_Test_2.scala                            |   11 -
 .../files/run/macro-expand-tparams-prefix-c2.check |    3 -
 .../files/run/macro-expand-tparams-prefix-c2.flags |    1 -
 .../Impls_Macros_1.scala                           |   19 -
 .../macro-expand-tparams-prefix-c2/Test_2.scala    |    5 -
 .../files/run/macro-expand-tparams-prefix-d1.check |    3 -
 .../files/run/macro-expand-tparams-prefix-d1.flags |    1 -
 .../macro-expand-tparams-prefix-d1/Impls_1.scala   |   13 -
 .../Macros_Test_2.scala                            |   11 -
 test/files/run/macro-expand-tparams-prefix.check   |   20 +
 .../macro-expand-tparams-prefix.flags}             |    0
 .../run/macro-expand-tparams-prefix/Impls_1.scala  |   39 +
 .../Macros_Test_2.scala                            |   57 +
 test/files/run/macro-expand-unapply-a.check        |    2 +
 .../macro-expand-unapply-a.flags}                  |    0
 .../macro-expand-unapply-a/Impls_Macros_1.scala    |   15 +
 test/files/run/macro-expand-unapply-a/Test_2.scala |    6 +
 ...pand-varargs-explicit-over-nonvarargs-bad.check |    2 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    4 +-
 .../Impls_1.scala                                  |    8 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../Impls_1.scala                                  |    6 +-
 .../Macros_Test_2.scala                            |    2 +-
 .../macro-impl-default-params/Impls_Macros_1.scala |   17 +-
 test/files/run/macro-impl-relaxed.check            |    4 +
 test/files/run/macro-impl-relaxed/Macros_1.scala   |   14 +
 test/files/run/macro-impl-relaxed/Test_2.scala     |    6 +
 .../macro-impl-rename-context/Impls_Macros_1.scala |    9 +-
 ....check => macro-impl-tparam-only-in-impl.check} |    0
 .../macro-impl-tparam-only-in-impl.flags}          |    0
 .../macro-impl-tparam-only-in-impl/Impls_1.scala   |    5 +
 .../Macros_Test_2.scala                            |    8 +
 ...=> macro-impl-tparam-typetag-is-optional.check} |    0
 .../macro-impl-tparam-typetag-is-optional.flags}   |    0
 .../Impls_1.scala                                  |    9 +
 .../Macros_Test_2.scala                            |    4 +
 ...-invalidret-doesnt-conform-to-def-rettype.check |    2 +-
 .../Impls_Macros_1.scala                           |    4 +-
 .../Test_2.scala                                   |    2 +-
 test/files/run/macro-invalidret-nontypeable.check  |    2 +-
 .../Impls_Macros_1.scala                           |    6 +-
 .../run/macro-invalidret-nontypeable/Test_2.scala  |    2 +-
 test/files/run/macro-invalidusage-badret.check     |    4 +-
 .../macro-invalidusage-badret/Impls_Macros_1.scala |    4 +-
 .../run/macro-invalidusage-badret/Test_2.scala     |    2 +-
 ...alidusage-partialapplication-with-tparams.check |    4 +-
 .../Impls_Macros_1.scala                           |    6 +-
 .../Test_2.scala                                   |    2 +-
 .../macro-invalidusage-partialapplication.check    |    4 +-
 .../Impls_Macros_1.scala                           |    8 +-
 .../Test_2.scala                                   |    2 +-
 .../run/macro-openmacros/Impls_Macros_1.scala      |    6 +-
 .../files/run/macro-parse-position-malformed.check |    1 +
 .../Impls_Macros_1.scala                           |   18 +
 .../macro-parse-position-malformed/Test_2.scala    |    3 +
 test/files/run/macro-parse-position.check          |    5 +
 .../run/macro-parse-position/Impls_Macros_1.scala  |   12 +
 test/files/run/macro-parse-position/Test_2.scala   |    3 +
 .../macro-quasiinvalidbody-c/Impls_Macros_1.scala  |    4 +-
 test/files/run/macro-quasiquotes.check             |    4 +
 test/files/run/macro-quasiquotes/Macros_1.scala    |   15 +
 test/files/run/macro-quasiquotes/Test_2.scala      |    5 +
 test/files/run/macro-range/Common_1.scala          |    7 +-
 .../run/macro-range/Expansion_Impossible_2.scala   |   14 +-
 .../Impls_Macros_1.scala                           |    6 +-
 .../macro-reflective-ma-normal-mdmi/Test_2.scala   |    2 +-
 .../run/macro-reflective-mamd-normal-mi.check      |    2 +-
 .../macro-reflective-mamd-normal-mi/Impls_1.scala  |    6 +-
 .../Macros_Test_2.scala                            |   12 +-
 test/files/run/macro-reify-basic/Macros_1.scala    |    4 +-
 .../run/macro-reify-chained1/Impls_Macros_1.scala  |   47 +
 test/files/run/macro-reify-chained1/Test_2.scala   |    9 +
 .../run/macro-reify-chained2/Impls_Macros_1.scala  |   47 +
 test/files/run/macro-reify-chained2/Test_2.scala   |    9 +
 test/files/run/macro-reify-freevars.check          |    2 +-
 test/files/run/macro-reify-freevars/Macros_1.scala |    2 +-
 test/files/run/macro-reify-freevars/Test_2.scala   |    8 +-
 test/files/run/macro-reify-nested-a.check          |    0
 test/files/run/macro-reify-nested-a.flags          |    1 -
 .../run/macro-reify-nested-a/Impls_Macros_1.scala  |   46 -
 test/files/run/macro-reify-nested-a/Test_2.scala   |    4 -
 .../run/macro-reify-nested-a1/Impls_Macros_1.scala |   47 +
 test/files/run/macro-reify-nested-a1/Test_2.scala  |    9 +
 .../run/macro-reify-nested-a2/Impls_Macros_1.scala |   47 +
 test/files/run/macro-reify-nested-a2/Test_2.scala  |    9 +
 test/files/run/macro-reify-nested-b.check          |    0
 test/files/run/macro-reify-nested-b.flags          |    1 -
 .../run/macro-reify-nested-b/Impls_Macros_1.scala  |   46 -
 test/files/run/macro-reify-nested-b/Test_2.scala   |    4 -
 .../run/macro-reify-nested-b1/Impls_Macros_1.scala |   47 +
 test/files/run/macro-reify-nested-b1/Test_2.scala  |    9 +
 .../run/macro-reify-nested-b2/Impls_Macros_1.scala |   47 +
 test/files/run/macro-reify-nested-b2/Test_2.scala  |    9 +
 .../macro-reify-ref-to-packageless/Impls_1.scala   |    4 +-
 .../macro-reify-ref-to-packageless/Test_2.scala    |    2 +-
 .../Impls_Macros_1.scala                           |   10 +-
 .../macro-reify-splice-outside-reify/Test_2.scala  |    2 +-
 .../run/macro-reify-splice-splice/Macros_1.scala   |   11 -
 .../files/run/macro-reify-staticXXX/Macros_1.scala |    2 +-
 test/files/run/macro-reify-tagful-a/Macros_1.scala |    4 +-
 test/files/run/macro-reify-tagless-a.check         |    2 +-
 .../run/macro-reify-tagless-a/Impls_Macros_1.scala |    4 +-
 test/files/run/macro-reify-tagless-a/Test_2.scala  |    6 +-
 test/files/run/macro-reify-type/Macros_1.scala     |   11 +-
 test/files/run/macro-reify-type/Test_2.scala       |   18 +-
 test/files/run/macro-reify-unreify/Macros_1.scala  |   10 +-
 test/files/run/macro-repl-basic.check              |   22 +-
 test/files/run/macro-repl-basic.scala              |   14 +-
 test/files/run/macro-repl-dontexpand.check         |   14 +-
 test/files/run/macro-repl-dontexpand.scala         |    6 +-
 test/files/run/macro-settings/Impls_Macros_1.scala |    9 +-
 .../run/macro-sip19-revised/Impls_Macros_1.scala   |    7 +-
 test/files/run/macro-sip19/Impls_Macros_1.scala    |    7 +-
 test/files/run/macro-subpatterns.check             |    3 +
 test/files/run/macro-subpatterns/Macro_1.scala     |   17 +
 test/files/run/macro-subpatterns/Test_2.scala      |    5 +
 test/files/run/macro-system-properties.check       |   20 +-
 test/files/run/macro-system-properties.scala       |    6 +-
 ...eck => macro-term-declared-in-annotation.check} |    0
 .../macro-term-declared-in-annotation.flags}       |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_2.scala                                 |    0
 .../Test_3.scala                                   |    0
 ...heck => macro-term-declared-in-anonymous.check} |    0
 .../macro-term-declared-in-anonymous.flags}        |    0
 .../macro-term-declared-in-anonymous/Impls_1.scala |   11 +
 .../Macros_Test_2.scala                            |    6 +
 ...ck.check => macro-term-declared-in-block.check} |    0
 .../macro-term-declared-in-block.flags}            |    0
 .../run/macro-term-declared-in-block/Impls_1.scala |   11 +
 .../Macros_Test_2.scala                            |    6 +
 ...ck => macro-term-declared-in-class-class.check} |    0
 .../macro-term-declared-in-class-class.flags}      |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |   10 +
 ...k => macro-term-declared-in-class-object.check} |    0
 .../macro-term-declared-in-class-object.flags}     |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |   10 +
 ...ss.check => macro-term-declared-in-class.check} |    0
 .../macro-term-declared-in-class.flags}            |    0
 .../run/macro-term-declared-in-class/Impls_1.scala |   11 +
 .../Macros_Test_2.scala                            |    7 +
 ... => macro-term-declared-in-default-param.check} |    0
 .../macro-term-declared-in-default-param.flags}    |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |    7 +
 ...=> macro-term-declared-in-implicit-class.check} |    0
 .../macro-term-declared-in-implicit-class.flags}   |    0
 .../Impls_Macros_1.scala                           |   19 +
 .../Test_2.scala                                   |    0
 ...d.check => macro-term-declared-in-method.check} |    0
 .../macro-term-declared-in-method.flags}           |    0
 .../macro-term-declared-in-method/Impls_1.scala    |   11 +
 .../Macros_Test_2.scala                            |    8 +
 ...k => macro-term-declared-in-object-class.check} |    0
 .../macro-term-declared-in-object-class.flags}     |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |   10 +
 ... => macro-term-declared-in-object-object.check} |    0
 .../macro-term-declared-in-object-object.flags}    |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |   10 +
 ...t.check => macro-term-declared-in-object.check} |    0
 .../macro-term-declared-in-object.flags}           |    0
 .../macro-term-declared-in-object/Impls_1.scala    |   11 +
 .../Macros_Test_2.scala                            |    7 +
 ...=> macro-term-declared-in-package-object.check} |    0
 .../macro-term-declared-in-package-object.flags}   |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |    8 +
 ...eck => macro-term-declared-in-refinement.check} |    0
 .../macro-term-declared-in-refinement.flags}       |    0
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |    8 +
 test/files/run/macro-term-declared-in-trait.check  |   15 +
 .../macro-term-declared-in-trait.flags}            |    0
 .../run/macro-term-declared-in-trait/Impls_1.scala |   11 +
 .../Macros_Test_2.scala                            |   13 +
 .../run/macro-typecheck-implicitsdisabled.check    |    2 +-
 .../Impls_Macros_1.scala                           |   16 +-
 .../files/run/macro-typecheck-macrosdisabled.check |   10 +-
 .../Impls_Macros_1.scala                           |   25 +-
 .../run/macro-typecheck-macrosdisabled2.check      |   12 +-
 .../Impls_Macros_1.scala                           |   25 +-
 .../Impls_Macros_1.scala                           |   20 +-
 .../Impls_Macros_1.scala                           |    7 +-
 test/files/run/macro-vampire-false-warning.check   |    2 +
 .../macro-vampire-false-warning.flags}             |    0
 .../run/macro-vampire-false-warning/Macros_1.scala |   52 +
 .../run/macro-vampire-false-warning/Test_2.scala   |    6 +
 .../macro-whitebox-dynamic-materialization.check   |    2 +
 .../Macros_1.scala                                 |   25 +
 .../Test_2.scala                                   |    4 +
 test/files/run/macro-whitebox-extractor.check      |    1 +
 .../run/macro-whitebox-extractor/Macros_1.scala    |   21 +
 .../run/macro-whitebox-extractor/Test_2.scala      |    5 +
 .../macro-whitebox-fundep-materialization.check    |    1 +
 .../Macros_1.scala                                 |   39 +
 .../Test_2.scala                                   |   12 +
 ...t6955.check => macro-whitebox-structural.check} |    0
 .../macro-whitebox-structural/Impls_Macros_1.scala |   16 +
 .../run/macro-whitebox-structural/Test_2.scala     |    5 +
 test/files/run/macroPlugins-macroArgs.check        |    2 +
 .../run/macroPlugins-macroArgs/Macros_2.scala      |   11 +
 .../run/macroPlugins-macroArgs/Plugin_1.scala      |   21 +
 test/files/run/macroPlugins-macroArgs/Test_3.flags |    1 +
 test/files/run/macroPlugins-macroArgs/Test_3.scala |    4 +
 .../run/macroPlugins-macroArgs/scalac-plugin.xml   |    4 +
 test/files/run/macroPlugins-macroExpand.check      |    2 +
 .../run/macroPlugins-macroExpand/Macros_2.scala    |   18 +
 .../run/macroPlugins-macroExpand/Plugin_1.scala    |   27 +
 .../run/macroPlugins-macroExpand/Test_3.flags      |    1 +
 .../run/macroPlugins-macroExpand/Test_3.scala      |    4 +
 .../run/macroPlugins-macroExpand/scalac-plugin.xml |    4 +
 test/files/run/macroPlugins-macroRuntime.check     |    2 +
 .../run/macroPlugins-macroRuntime/Macros_2.scala   |   11 +
 .../run/macroPlugins-macroRuntime/Plugin_1.scala   |   20 +
 .../run/macroPlugins-macroRuntime/Test_3.flags     |    1 +
 .../run/macroPlugins-macroRuntime/Test_3.scala     |    4 +
 .../macroPlugins-macroRuntime/scalac-plugin.xml    |    4 +
 test/files/run/macroPlugins-namerHooks.check       |   45 +
 test/files/run/macroPlugins-namerHooks.scala       |   39 +
 test/files/run/macroPlugins-typedMacroBody.check   |    2 +
 .../run/macroPlugins-typedMacroBody/Macros_2.flags |    1 +
 .../run/macroPlugins-typedMacroBody/Macros_2.scala |   18 +
 .../run/macroPlugins-typedMacroBody/Plugin_1.scala |   21 +
 .../run/macroPlugins-typedMacroBody/Test_3.scala   |    4 +
 .../macroPlugins-typedMacroBody/scalac-plugin.xml  |    4 +
 test/files/run/manifests-new.scala                 |    5 +-
 test/files/run/manifests-old.scala                 |    5 +-
 test/files/run/mapConserve.scala                   |   11 +-
 test/files/run/mapValues.scala                     |    2 +-
 test/files/run/map_java_conversions.scala          |   26 +-
 test/files/run/map_test.scala                      |    2 +-
 test/files/run/matchbytes.scala                    |    2 +-
 test/files/run/matchintasany.scala                 |    2 +-
 test/files/run/matchnull.scala                     |    2 +-
 test/files/run/matchonseq.scala                    |   10 +-
 test/files/run/memberpos.check                     |   11 +
 test/files/run/memberpos.scala                     |   39 +
 test/files/run/mirror_symbolof_x.check             |   13 +
 test/files/run/mirror_symbolof_x.scala             |   43 +
 test/files/run/misc.check                          |   24 +
 test/files/run/misc.scala                          |    2 +-
 test/files/run/missingparams.scala                 |    4 +-
 test/files/run/mixin-signatures.check              |   59 +
 test/files/run/mixin-signatures.scala              |  105 +
 test/files/run/mutable-treeset.scala               |  145 +
 test/files/run/name-based-patmat.check             |   12 +
 test/files/run/name-based-patmat.scala             |  105 +
 test/files/run/names-defaults.check                |    4 +
 test/files/run/names-defaults.scala                |    3 +
 test/files/run/no-pickle-skolems.check             |    1 +
 test/files/run/no-pickle-skolems/Source_1.scala    |    5 +
 test/files/run/no-pickle-skolems/Test_2.scala      |   39 +
 test/files/run/nodebuffer-array.check              |    3 -
 test/files/run/nodebuffer-array.scala              |   15 -
 test/files/run/null-and-intersect.scala            |    6 +-
 test/files/run/null-hash.scala                     |    2 +-
 test/files/run/number-parsing.scala                |    4 +-
 test/files/run/numbereq.scala                      |   45 +-
 test/files/run/option-fold.scala                   |   13 +-
 test/files/run/origins.flags                       |    2 +-
 test/files/run/origins.scala                       |    2 +-
 test/files/run/packrat1.check                      |    7 -
 test/files/run/packrat1.scala                      |   47 -
 test/files/run/packrat2.check                      |    7 -
 test/files/run/packrat2.scala                      |   57 -
 test/files/run/packrat3.check                      |    7 -
 test/files/run/packrat3.scala                      |   51 -
 test/files/run/parmap-ops.scala                    |   12 +-
 test/files/run/parserFilter.check                  |    9 -
 test/files/run/parserFilter.scala                  |   15 -
 test/files/run/parserForFilter.check               |    1 -
 test/files/run/parserForFilter.scala               |   12 -
 test/files/run/parserJavaIdent.check               |   26 -
 test/files/run/parserJavaIdent.scala               |   26 -
 test/files/run/parserNoSuccessMessage.check        |   20 -
 test/files/run/parserNoSuccessMessage.scala        |   19 -
 test/files/run/partialfun.scala                    |    2 +-
 test/files/run/patch-boundary.scala                |    4 +-
 test/files/run/patmat-behavior-2.check             |   24 +
 test/files/run/patmat-behavior-2.scala             |   50 +
 test/files/run/patmat-behavior.check               |   90 +
 test/files/run/patmat-behavior.scala               |   95 +
 test/files/run/patmat-bind-typed.check             |    1 +
 test/files/run/patmat-bind-typed.scala             |    8 +
 test/files/run/patmat-exprs.scala                  |   10 +-
 test/files/run/patmat-finally.scala                |    2 +-
 test/files/run/patmat-mix-case-extractor.check     |    8 +
 test/files/run/patmat-mix-case-extractor.scala     |  110 +
 test/files/run/patmat-seqs.scala                   |   10 +-
 test/files/run/patmat_unapp_abstype-new.check      |    6 +
 test/files/run/patmat_unapp_abstype-new.scala      |    4 +-
 test/files/run/patmat_unapp_abstype-old.check      |    4 -
 test/files/run/patmat_unapp_abstype-old.flags      |    1 -
 test/files/run/patmat_unapp_abstype-old.scala      |   83 -
 test/files/run/patmatnew.check                     |   15 +
 test/files/run/patmatnew.scala                     |   37 +-
 test/files/run/pc-conversions.scala                |   38 +-
 test/files/run/pf-catch.scala                      |   10 +-
 test/files/run/position-val-def.check              |   30 +
 test/files/run/position-val-def.scala              |   26 +
 test/files/run/preinits.check                      |    6 +
 test/files/run/primitive-sigs-2-new.flags          |    1 +
 test/files/run/primitive-sigs-2-new.scala          |    4 +-
 test/files/run/primitive-sigs-2-old.flags          |    1 +
 test/files/run/primitive-sigs-2-old.scala          |    4 +-
 test/files/run/priorityQueue.scala                 |   66 +-
 test/files/run/private-inline.check                |   12 +
 test/files/run/private-inline.flags                |    2 +-
 test/files/run/private-inline.scala                |   18 +-
 .../run/{t5284c.check => private-override.check}   |    0
 test/files/run/programmatic-main.check             |   58 +-
 test/files/run/programmatic-main.scala             |    4 +-
 test/files/run/proxy.scala                         |    6 +-
 test/files/run/range-unit.scala                    |    8 +-
 test/files/run/range.check                         |    0
 test/files/run/range.scala                         |   35 +-
 test/files/run/records.scala                       |    9 +-
 test/files/run/reflect-priv-ctor.check             |    1 +
 test/files/run/reflect-priv-ctor.scala             |   22 +
 .../files/run/reflection-allmirrors-tostring.check |   18 +-
 .../files/run/reflection-allmirrors-tostring.scala |   27 +-
 test/files/run/reflection-companion.check          |    6 +
 test/files/run/reflection-companion.scala          |   16 +
 test/files/run/reflection-companiontype.check      |   12 +
 test/files/run/reflection-companiontype.scala      |   22 +
 ...eflection-constructormirror-inner-badpath.scala |    4 +-
 .../reflection-constructormirror-inner-good.scala  |    4 +-
 ...flection-constructormirror-nested-badpath.scala |    4 +-
 .../reflection-constructormirror-nested-good.scala |    4 +-
 ...ection-constructormirror-toplevel-badpath.scala |    4 +-
 ...eflection-constructormirror-toplevel-good.scala |    4 +-
 test/files/run/reflection-enclosed-basic.scala     |    4 +-
 .../run/reflection-enclosed-inner-basic.scala      |   10 +-
 .../reflection-enclosed-inner-inner-basic.scala    |   10 +-
 .../reflection-enclosed-inner-nested-basic.scala   |   10 +-
 .../run/reflection-enclosed-nested-basic.scala     |   10 +-
 .../reflection-enclosed-nested-inner-basic.scala   |   10 +-
 .../reflection-enclosed-nested-nested-basic.scala  |   10 +-
 test/files/run/reflection-equality.check           |   10 +-
 test/files/run/reflection-equality.scala           |    6 +-
 test/files/run/reflection-fancy-java-classes.check |   12 +
 .../run/reflection-fancy-java-classes/Foo_1.java   |    5 +
 .../run/reflection-fancy-java-classes/Test_2.scala |   20 +
 .../reflection-fieldmirror-accessorsareokay.scala  |    4 +-
 .../run/reflection-fieldmirror-ctorparam.scala     |    2 +-
 .../run/reflection-fieldmirror-getsetval.scala     |    2 +-
 .../run/reflection-fieldmirror-getsetvar.scala     |    2 +-
 ...flection-fieldmirror-nmelocalsuffixstring.scala |    2 +-
 .../run/reflection-fieldmirror-privatethis.scala   |    2 +-
 .../run/reflection-fieldsymbol-navigation.scala    |    2 +-
 test/files/run/reflection-idtc.check               |    6 +
 test/files/run/reflection-idtc.scala               |   16 +
 test/files/run/reflection-implClass.scala          |   16 +-
 test/files/run/reflection-implicit.scala           |   10 +-
 test/files/run/reflection-java-annotations.check   |    3 +
 .../run/reflection-java-annotations/Test_2.scala   |    4 +-
 test/files/run/reflection-java-crtp/Main_2.scala   |    2 +-
 .../files/run/reflection-magicsymbols-invoke.check |   17 +-
 .../files/run/reflection-magicsymbols-invoke.scala |   26 +-
 test/files/run/reflection-magicsymbols-repl.check  |    6 +-
 test/files/run/reflection-magicsymbols-repl.scala  |    4 +-
 .../run/reflection-magicsymbols-vanilla.scala      |    6 +-
 test/files/run/reflection-mem-typecheck.scala      |    2 +-
 .../files/run/reflection-methodsymbol-params.scala |   16 +-
 .../run/reflection-methodsymbol-returntype.scala   |   16 +-
 .../run/reflection-methodsymbol-typeparams.scala   |   16 +-
 test/files/run/reflection-names.scala              |    8 +-
 test/files/run/reflection-repl-classes.check       |    6 +-
 test/files/run/reflection-repl-classes.scala       |    2 +-
 test/files/run/reflection-repl-elementary.check    |    2 -
 test/files/run/reflection-sanitychecks.scala       |   16 +-
 test/files/run/reflection-scala-annotations.check  |    7 +
 test/files/run/reflection-scala-annotations.scala  |   16 +
 test/files/run/reflection-sorted-decls.scala       |    2 +-
 test/files/run/reflection-sorted-members.check     |    1 -
 test/files/run/reflection-sorted-members.scala     |    2 +-
 test/files/run/reflection-sync-potpourri.scala     |   32 +
 .../reflection-sync-subtypes.check}                |    0
 test/files/run/reflection-sync-subtypes.scala      |   20 +
 test/files/run/reflection-tags.check               |    1 +
 test/files/run/reflection-tags.scala               |   21 +
 .../run/reflection-valueclasses-derived.scala      |    6 +-
 test/files/run/reflection-valueclasses-magic.scala |   10 +-
 .../run/reflection-valueclasses-standard.scala     |    4 +-
 test/files/run/reify-aliases.check                 |    2 +-
 test/files/run/reify-each-node-type.check          |   35 +
 test/files/run/reify-each-node-type.scala          |  110 +
 test/files/run/reify-repl-fail-gracefully.check    |    6 +-
 test/files/run/reify_ann1a.check                   |    2 +-
 test/files/run/reify_ann1a.scala                   |    2 +-
 test/files/run/reify_ann1b.check                   |    7 +-
 test/files/run/reify_ann1b.scala                   |    2 +-
 test/files/run/reify_ann2a.check                   |    2 +-
 test/files/run/reify_ann2a.scala                   |    2 +-
 test/files/run/reify_ann3.check                    |    4 +-
 test/files/run/reify_ann3.scala                    |    2 +-
 test/files/run/reify_ann4.scala                    |    2 +-
 test/files/run/reify_ann5.scala                    |    2 +-
 test/files/run/reify_classfileann_a.check          |    5 +
 test/files/run/reify_classfileann_a.scala          |    2 +-
 test/files/run/reify_classfileann_b.check          |    5 +
 test/files/run/reify_classfileann_b.scala          |    2 +-
 test/files/run/reify_closure8b.check               |    2 +-
 test/files/run/reify_copypaste1.scala              |    6 +-
 test/files/run/reify_extendbuiltins.scala          |    4 +-
 test/files/run/reify_for1.check                    |    0
 test/files/run/reify_fors_oldpatmat.flags          |    0
 .../run/reify_implicits-new.check                  |    0
 test/files/run/reify_implicits-new.scala           |   18 +
 .../run/reify_implicits-old.check                  |    0
 test/files/run/reify_implicits-old.scala           |   17 +
 test/files/run/reify_lazyevaluation.scala          |    2 +
 test/files/run/reify_lazyunit.check                |    3 +
 test/files/run/reify_maps_oldpatmat.flags          |    0
 test/files/run/reify_newimpl_11.check              |    6 +-
 test/files/run/reify_newimpl_13.check              |    6 +-
 test/files/run/reify_newimpl_19.check              |    6 +-
 test/files/run/reify_newimpl_22.check              |    6 +-
 test/files/run/reify_newimpl_23.check              |    6 +-
 test/files/run/reify_newimpl_25.check              |    6 +-
 test/files/run/reify_newimpl_26.check              |    6 +-
 test/files/run/reify_newimpl_30.check              |    6 +-
 test/files/run/reify_newimpl_35.check              |    4 -
 test/files/run/reify_newimpl_45.scala              |    6 +-
 test/files/run/reify_printf.check                  |    1 +
 test/files/run/reify_printf.scala                  |   16 +-
 ...si5841.check => reify_renamed_term_t5841.check} |    0
 ...si5841.scala => reify_renamed_term_t5841.scala} |    0
 test/files/run/reify_this.scala                    |    6 +-
 test/files/run/repl-assign.check                   |    4 -
 test/files/run/repl-backticks.scala                |    6 +-
 test/files/run/repl-bare-expr.check                |    8 +-
 test/files/run/repl-colon-type.check               |   28 +-
 test/files/run/repl-empty-package.check            |    7 +
 test/files/run/repl-empty-package/s_1.scala        |    3 +
 test/files/run/repl-empty-package/s_2.scala        |    5 +
 test/files/run/repl-javap-app.check                |   38 +
 test/files/run/repl-javap-app.scala                |   10 +
 test/files/run/repl-javap-def.scala                |   17 +
 test/files/run/repl-javap-fun.scala                |   16 +
 test/files/run/repl-javap-mem.scala                |   19 +
 test/files/run/repl-javap-memfun.scala             |   18 +
 test/files/run/repl-javap-more-fun.scala           |   17 +
 test/files/run/repl-javap-outdir-funs/foo_1.scala  |    6 +
 .../run/repl-javap-outdir-funs/run-repl_7.scala    |   17 +
 test/files/run/repl-javap-outdir/foo_1.scala       |    6 +
 test/files/run/repl-javap-outdir/run-repl_7.scala  |   12 +
 test/files/run/repl-javap.scala                    |   13 +
 test/files/run/repl-out-dir.check                  |   49 +
 test/files/run/repl-out-dir.scala                  |   13 +
 test/files/run/repl-paste-2.check                  |    3 -
 test/files/run/repl-paste-3.check                  |    4 -
 test/files/run/repl-paste-4.pastie                 |    4 +
 test/files/run/repl-paste-4.scala                  |   20 +
 test/files/run/repl-paste-raw.pastie               |    8 +
 test/files/run/repl-paste-raw.scala                |   20 +
 test/files/run/repl-paste.check                    |    3 +-
 test/files/run/repl-power.check                    |   14 +-
 test/files/run/repl-power.scala                    |    2 +-
 test/files/run/repl-reset.check                    |   10 +-
 test/files/run/repl-save.check                     |    3 +
 test/files/run/repl-save.scala                     |   25 +
 test/files/run/repl-term-macros.check              |   40 +
 test/files/run/repl-term-macros.scala              |   20 +
 test/files/run/repl-transcript.check               |    3 -
 test/files/run/repl-trim-stack-trace.scala         |   45 +
 test/files/run/repl-type-verbose.check             |    6 +-
 test/files/run/resetattrs-this.scala               |    8 +-
 test/files/run/richs.check                         |    1 +
 test/files/run/richs.scala                         |    4 +-
 test/files/run/run-bug4840.scala                   |    8 +-
 test/files/run/runtime-richChar.scala              |   10 +-
 test/files/run/runtime.check                       |    6 +
 test/files/run/runtime.scala                       |    4 +-
 .../run/{interpolation.flags => sammy_java8.flags} |    0
 test/files/run/sammy_java8.scala                   |   34 +
 test/files/run/scan.scala                          |    8 +-
 test/files/run/search.check                        |    6 +
 test/files/run/search.scala                        |   14 +
 test/files/run/seqlike-kmp.scala                   |    6 +-
 test/files/run/sequenceComparisons.scala           |   42 +-
 test/files/run/serialize-stream.scala              |    4 +-
 test/files/run/settings-parse.check                |  566 +++
 test/files/run/settings-parse.scala                |   29 +
 test/files/run/shortClass.check                    |   10 +
 test/files/run/shortClass.scala                    |   24 +
 test/files/run/showdecl.check                      |   34 +
 test/files/run/showdecl/Macros_1.scala             |   30 +
 test/files/run/showdecl/Test_2.scala               |   32 +
 test/files/run/showraw_aliases.check               |    4 +-
 test/files/run/showraw_aliases.scala               |    2 +-
 test/files/run/showraw_mods.check                  |    2 +-
 test/files/run/showraw_tree.check                  |    4 +-
 test/files/run/showraw_tree_ids.check              |    4 +-
 test/files/run/showraw_tree_kinds.check            |    4 +-
 test/files/run/showraw_tree_types_ids.check        |   16 +-
 test/files/run/showraw_tree_types_ids.scala        |    4 +-
 test/files/run/showraw_tree_types_typed.check      |   16 +-
 test/files/run/showraw_tree_types_typed.scala      |    4 +-
 test/files/run/showraw_tree_types_untyped.check    |    4 +-
 test/files/run/showraw_tree_ultimate.check         |   24 +-
 test/files/run/showraw_tree_ultimate.scala         |    4 +-
 test/files/run/si5045.scala                        |   46 -
 test/files/run/slice-strings.scala                 |    6 +-
 test/files/run/slices.scala                        |    3 +
 test/files/run/sm-interpolator.scala               |    6 +-
 test/files/run/spec-nlreturn.scala                 |    5 +-
 ...pand-star2.check => static-module-method.check} |    0
 test/files/run/static-module-method.scala          |   14 +
 test/files/run/streamWithFilter.scala              |    2 +-
 test/files/run/stream_flatmap_odds.scala           |    2 +-
 test/files/run/stream_length.check                 |    4 +
 test/files/run/stream_length.scala                 |    6 +-
 test/files/run/streams.scala                       |    2 +-
 test/files/run/string-extractor.check              |    9 +
 test/files/run/string-extractor.scala              |   60 +
 test/files/run/stringbuilder-drop.scala            |    4 +-
 test/files/run/stringbuilder.scala                 |   21 +-
 test/files/run/stringinterpolation_macro-run.check |    5 +
 test/files/run/stringinterpolation_macro-run.scala |   18 +
 test/files/run/structural.scala                    |   59 +-
 test/files/run/synchronized.check                  |    1 +
 test/files/run/synchronized.flags                  |    2 +-
 test/files/run/sysprops.scala                      |   10 +-
 test/files/run/t0017.scala                         |    2 +-
 test/files/run/t0091.check                         |    1 +
 test/files/run/t0091.scala                         |   15 +-
 test/files/run/t0325.scala                         |   10 +-
 test/files/run/t0421-old.scala                     |   12 +-
 test/files/run/t0432.scala                         |    3 +
 test/files/run/t0486.check                         |    8 -
 test/files/run/t0486.scala                         |   24 -
 test/files/run/t0508.scala                         |    2 +-
 test/files/run/t0528.scala                         |    2 +
 test/files/run/t0631.scala                         |    2 +-
 test/files/run/t0663.check                         |    1 -
 test/files/run/t0663.scala                         |    6 -
 test/files/run/t0668.check                         |    0
 test/files/run/t0677-old.scala                     |    5 +-
 test/files/run/t0700.check                         |    2 -
 test/files/run/t0700.scala                         |   24 -
 test/files/run/t0807.scala                         |    2 +-
 test/files/run/t0883.scala                         |   12 +-
 test/files/run/t1005.scala                         |   11 +-
 test/files/run/t1042.scala                         |    2 +-
 .../{virtpatmat_opt_sharing.check => t107.check}   |    0
 test/files/run/t107.scala                          |    8 +
 test/files/run/t1079.check                         |    1 -
 test/files/run/t1079.scala                         |    3 -
 test/files/run/t1100.check                         |    4 -
 test/files/run/t1100.scala                         |   17 -
 test/files/run/t1110.scala                         |    6 +-
 test/files/run/t1141.scala                         |    6 +-
 test/files/run/t1167.flags                         |    1 +
 test/files/run/t1167.scala                         |    6 +-
 test/files/run/t1195-new.scala                     |    6 +-
 test/files/run/t1195-old.scala                     |    3 +
 test/files/run/t1220.scala                         |    2 +-
 test/files/run/t1300.scala                         |    4 +-
 test/files/run/t1309.scala                         |    2 +-
 test/files/run/t1323.scala                         |    4 +-
 test/files/run/t1333.scala                         |    4 +-
 test/files/run/t1368.check                         |    3 +
 test/files/run/t1423.scala                         |    2 +-
 test/files/run/t1427.check                         |    3 +
 test/files/run/t1427.scala                         |    5 +-
 test/files/run/t1430/Bar_1.java                    |    4 +-
 test/files/run/t1430/Test_2.scala                  |    2 +-
 test/files/run/t1500.scala                         |    8 +-
 test/files/run/t1501.scala                         |    8 +-
 test/files/run/t1503.check                         |    1 +
 test/files/run/t1503.scala                         |   20 +
 .../t1503_future.flags}                            |    0
 test/files/run/t1503_future.scala                  |   17 +
 test/files/run/t1505.scala                         |   13 +-
 test/files/run/t153.scala                          |    2 +-
 test/files/run/t1537.scala                         |    6 +-
 test/files/run/t1591.scala                         |    6 +-
 test/files/run/t1620.check                         |    6 -
 test/files/run/t1620.scala                         |   16 -
 test/files/run/t1718.scala                         |    4 +-
 test/files/run/t1766.scala                         |   15 +-
 test/files/run/t1773.scala                         |   12 -
 test/files/run/t1829.check                         |    0
 test/files/run/t1829.scala                         |    2 +-
 test/files/run/t1909.check                         |    3 +
 test/files/run/t1909.scala                         |   12 +
 test/files/run/t1909b.scala                        |    9 +
 test/files/run/t1909c.scala                        |    9 +
 test/files/run/t1987.scala                         |    8 +-
 test/files/run/t2029.scala                         |    4 +-
 test/files/run/t2074_2.scala                       |    2 +-
 test/files/run/t2087-and-2400.scala                |    6 +-
 test/files/run/t2106.check                         |    6 +
 test/files/run/t2106.flags                         |    2 +-
 test/files/run/t2106.scala                         |    2 +-
 test/files/run/t2124.check                         |    1 -
 test/files/run/t2124.scala                         |   25 -
 test/files/run/t2125.check                         |    1 -
 test/files/run/t2125.scala                         |   25 -
 test/files/run/t2212.check                         |    1 +
 test/files/run/t2251.check                         |    1 +
 test/files/run/t2251.flags                         |    1 +
 test/files/run/t2251.scala                         |   19 +
 test/files/run/t2251b.check                        |   11 +
 test/files/run/t2251b.flags                        |    1 +
 test/files/run/t2251b.scala                        |   48 +
 test/files/run/t2276.check                         |    8 -
 test/files/run/t2276.scala                         |   24 -
 test/files/run/t2296c/Action.java                  |    2 +-
 test/files/run/t2308a.scala                        |    6 +-
 test/{pending => files}/run/t2318.check            |    0
 test/files/run/t2318.scala                         |   41 +
 test/files/run/t2333.scala                         |    4 +-
 test/files/run/t2337.scala                         |    2 +-
 test/files/run/t2354.scala                         |   17 -
 test/files/run/t2417.scala                         |   20 +-
 test/files/run/t2464/Annotated.java                |    5 +
 test/files/run/t2464/Connect.java                  |   20 +
 test/files/run/t2464/Test.scala                    |   35 +
 test/files/run/t2512.scala                         |    4 +-
 test/files/run/t2514.scala                         |   10 +-
 test/files/run/t2526.scala                         |   16 +-
 test/files/run/t2552.scala                         |   14 +-
 test/files/run/t2577.check                         |    1 +
 test/files/run/t2577.scala                         |   17 +
 test/files/run/t2594_tcpoly.check                  |    0
 test/files/run/t2594_tcpoly.scala                  |    5 +-
 test/files/run/t261.check                          |    2 +
 test/files/run/t261.scala                          |   11 +
 test/files/run/t2636.scala                         |   17 +-
 test/files/run/t266.scala                          |    4 +-
 test/files/run/t2721.check                         |    2 -
 test/files/run/t2721.scala                         |   12 -
 test/files/run/t2755.scala                         |    6 +-
 test/files/run/t2800.scala                         |    8 +-
 test/files/run/t2818.scala                         |    4 +-
 test/files/run/t2849.scala                         |    6 +-
 test/files/run/t2867.scala                         |    6 +-
 test/files/run/t2873.check                         |    2 +-
 test/files/run/t2873.scala                         |    7 +-
 test/files/run/t2886.check                         |    4 +-
 test/files/run/t2958.scala                         |    4 +-
 test/files/run/t3026.scala                         |    2 +-
 test/files/run/t3038.scala                         |   14 +-
 test/files/run/t3038c/A_1.scala                    |    4 +-
 test/files/run/t3038d.scala                        |   16 +-
 test/files/run/t3050.scala                         |    6 +-
 test/files/run/t3112.scala                         |    2 +-
 test/files/run/t3150.scala                         |    2 +-
 test/files/run/t3158.scala                         |    2 +-
 test/files/run/t3175.scala                         |   29 +-
 test/files/{continuations-run => run}/t3199b.check |    0
 test/files/run/t3199b.scala                        |   11 +
 test/files/run/t3232.scala                         |    8 +-
 test/files/run/t3235-minimal.check                 |   12 +
 .../t5589neg.flags => run/t3235-minimal.flags}     |    0
 test/files/run/t3235-minimal.scala                 |    8 +
 test/files/run/t3242.scala                         |    3 +
 test/files/run/t3269.scala                         |    2 +-
 test/files/run/t3273.scala                         |    2 +-
 test/files/run/t3326.scala                         |   28 +-
 .../{virtpatmat_opt_sharing.check => t3346a.check} |    0
 test/files/run/t3346a.scala                        |   11 +
 test/files/run/t3346d.scala                        |   21 +
 test/files/run/t3346e.check                        |   12 +
 test/files/run/t3346e.scala                        |   81 +
 test/files/run/t3346f.check                        |    2 +
 test/files/run/t3346f.scala                        |   15 +
 test/files/run/t3346g.check                        |    1 +
 test/files/run/t3346g.scala                        |    9 +
 test/files/run/t3346h.check                        |    1 +
 test/files/run/t3346h.scala                        |    9 +
 test/files/run/t3346j.check                        |    1 +
 test/files/run/t3346j.scala                        |   11 +
 test/files/run/t3361.check                         |    1 +
 test/files/run/t3361.scala                         |    6 +-
 test/files/run/t3376.check                         |    2 -
 test/files/run/t3395.scala                         |    6 +-
 test/files/run/t3397.scala                         |    2 +-
 test/files/run/t3425.check                         |    4 +
 test/files/run/t3425.scala                         |   41 +
 test/files/run/t3425b.check                        |  152 +
 test/files/run/t3425b/Base_1.scala                 |   89 +
 test/files/run/t3425b/Generated_2.scala            |  886 ++++
 test/files/run/t3452.check                         |    1 +
 test/files/run/t3452.scala                         |   21 +
 test/files/run/t3452a.check                        |    1 +
 test/files/run/t3452a/J_2.java                     |    5 +
 test/files/run/t3452a/S_1.scala                    |   24 +
 test/files/run/t3452a/S_3.scala                    |    5 +
 test/files/run/t3452b-bcode.check                  |    2 +
 test/files/run/t3452b-bcode.flags                  |    1 +
 test/files/run/t3452b-bcode/J_2.java               |    6 +
 test/files/run/t3452b-bcode/S_1.scala              |   17 +
 test/files/run/t3452b-bcode/S_3.scala              |    5 +
 test/files/run/t3452b.check                        |    2 +
 test/files/run/t3452b/J_2.java                     |    6 +
 test/files/run/t3452b/S_1.scala                    |   17 +
 test/files/run/t3452b/S_3.scala                    |    5 +
 test/files/run/t3452c.check                        |    8 +
 test/files/run/t3452c.scala                        |  113 +
 test/files/run/t3452d/A.scala                      |    7 +
 test/files/run/t3452d/Test.java                    |   12 +
 test/files/run/t3452e/A.scala                      |    4 +
 test/files/run/t3452e/B.java                       |    2 +
 test/files/run/t3452e/Test.scala                   |    3 +
 test/files/run/t3452f.scala                        |   19 +
 test/files/run/t3452g/A.scala                      |    9 +
 test/files/run/t3452g/Test.java                    |   14 +
 test/files/run/t3452h.scala                        |    8 +
 test/files/run/t3488.check                         |    6 +
 test/files/run/t3488.scala                         |    2 +-
 test/files/run/t3493.scala                         |    4 +-
 test/files/run/t3496.scala                         |    4 +-
 test/files/run/t3502.scala                         |    6 +-
 test/files/run/t3507-new.scala                     |    4 +-
 test/files/run/t3509.scala                         |    4 +-
 test/files/run/t3511.scala                         |   18 +-
 test/files/run/t3516.scala                         |    2 +-
 test/files/run/t3518.scala                         |    2 +-
 test/files/run/t3529.scala                         |    1 +
 test/files/run/t3530.scala                         |    6 +-
 test/files/run/t3540.scala                         |    2 +-
 test/files/run/t3563.scala                         |    6 +-
 test/files/run/t3569.scala                         |   10 +-
 test/files/run/t3575.scala                         |    2 +-
 test/files/run/t3580.scala                         |    6 +-
 test/files/run/t3603.scala                         |    8 +-
 test/files/run/t3613.scala                         |    6 +-
 test/files/run/t3619.scala                         |    2 +-
 test/files/run/t363.scala                          |    2 +-
 test/files/run/t3647.scala                         |    4 +-
 test/files/run/t3651.scala                         |    2 +-
 test/files/run/t3667.check                         |    3 -
 test/files/run/t3667.scala                         |   35 -
 test/files/run/t3702.scala                         |    2 +-
 test/files/run/t3705.scala                         |   17 -
 test/files/run/t3714.scala                         |    2 +-
 test/files/run/t3719.scala                         |    4 +-
 test/files/run/t3758-old.scala                     |    4 +-
 test/files/run/t3822.scala                         |    8 +-
 test/files/run/t3829.scala                         |   10 +-
 test/files/run/t3832.scala                         |   17 +
 test/files/run/t3835.scala                         |    2 +-
 test/files/run/t3855.scala                         |    6 +-
 test/files/run/t3877.scala                         |   16 +-
 test/files/run/t3886.scala                         |   11 -
 test/files/run/t3887.scala                         |    2 +-
 test/files/run/t3888.check                         |    1 +
 test/files/run/t3888.scala                         |   11 +-
 test/{pending => files}/run/t3897.check            |    0
 test/files/run/t3897.flags                         |    1 +
 test/files/run/t3897/J_2.java                      |   27 +
 test/{pending => files}/run/t3897/a_1.scala        |    0
 test/files/run/t3897/a_2.scala                     |   23 +
 test/files/run/t3932.scala                         |    4 +-
 test/files/run/t3935.scala                         |    2 +-
 test/files/run/t3964.scala                         |    9 +-
 test/files/run/t3970.check                         |    1 +
 test/files/run/t3970.scala                         |    4 +-
 test/files/run/t3980.scala                         |    2 +-
 test/files/run/t3984.scala                         |    2 +-
 test/files/run/t3996.check                         |    1 +
 test/files/run/t4013.scala                         |    4 +-
 test/files/run/t4013b.scala                        |    2 +-
 test/files/run/t4023.check                         |   21 +
 test/files/run/t4023.scala                         |   34 +
 test/files/run/t4024.scala                         |    4 +-
 test/files/run/t4025.check                         |    2 -
 test/files/run/t4027.scala                         |    6 +-
 test/files/run/t4047.check                         |   12 +
 test/files/run/t4047.scala                         |    2 +-
 test/files/run/t4054.scala                         |    2 +-
 test/files/run/t4062.scala                         |    2 +-
 test/files/run/t4072.scala                         |    6 +-
 test/files/run/t408.scala                          |    2 +-
 test/files/run/t4080.check                         |    1 +
 test/files/run/t4080.scala                         |    5 +-
 test/files/run/t4119/J.java                        |    2 +-
 test/files/run/t4119/S.scala                       |    2 +-
 test/files/run/t4122.scala                         |    2 +-
 test/files/run/t4138.check                         |    2 -
 test/files/run/t4138.scala                         |    6 -
 test/files/run/t4146.scala                         |    7 -
 test/files/run/t4148.scala                         |    8 +-
 test/files/run/t4171.scala                         |    3 +
 test/files/run/t4172.check                         |    6 +-
 test/files/run/t4238/J_1.java                      |    4 +-
 test/files/run/t4285.check                         |    4 -
 test/files/run/t4287inferredMethodTypes.check      |   30 +
 test/files/run/t4287inferredMethodTypes.scala      |   25 +
 test/files/run/t4288.scala                         |    4 +-
 test/files/run/t4294.scala                         |    7 +-
 test/files/run/t4317/S_3.scala                     |    2 +-
 test/files/run/t4332.check                         |   25 +
 test/files/run/t4332.scala                         |   44 +
 test/files/run/t4332b.scala                        |   35 +
 test/files/{pos => run}/t4351.check                |    0
 test/files/run/t4351.scala                         |   21 +
 test/files/run/t4387.scala                         |   12 -
 test/files/run/t4396.check                         |    1 +
 test/files/run/t4396.scala                         |    2 +-
 test/files/run/t4398.scala                         |    2 +
 test/files/run/t4415.scala                         |    2 +-
 test/files/run/t4426.scala                         |    6 +-
 test/files/run/t4461.check                         |    3 +-
 test/files/run/t4461.scala                         |    2 +-
 test/files/run/t4535.scala                         |   10 +-
 test/files/run/t4536.scala                         |    4 +-
 test/files/run/t4537.check                         |    1 +
 test/files/run/t4537/a.scala                       |    5 +
 test/files/run/t4537/b.scala                       |    5 +
 test/files/run/t4537/c.scala                       |    8 +
 test/files/run/t4537/d.scala                       |    6 +
 test/files/run/t4542.check                         |    4 -
 test/files/run/t4560.scala                         |    3 +
 test/files/run/t4570.scala                         |    4 +-
 test/files/run/t4574.check                         |    2 -
 test/files/run/t4577.scala                         |   38 +
 test/files/run/t4594-repl-settings.scala           |   26 +
 test/files/run/t4608.scala                         |    4 +-
 test/files/run/t4617.scala                         |    2 +-
 test/files/run/t4656.scala                         |    2 +-
 test/files/run/t4660.scala                         |    2 +-
 test/files/run/t4671.check                         |    6 +-
 test/files/run/t4671.scala                         |    2 +-
 test/files/run/t4680.check                         |    7 +
 test/files/run/t4680.scala                         |    4 +-
 test/files/run/t4697.scala                         |    2 +-
 test/files/run/t4729/S_2.scala                     |    3 +-
 test/files/run/t4742.flags                         |    1 +
 test/files/run/t4742.scala                         |    7 +
 test/files/run/{si4750.check => t4750.check}       |    0
 test/files/run/{si4750.scala => t4750.scala}       |    0
 test/files/run/t4752.scala                         |    4 +-
 test/files/run/t4753.scala                         |    2 +-
 test/files/run/t4761.scala                         |    2 +-
 test/files/run/t4766.scala                         |    4 +
 test/files/run/t4777.scala                         |    4 +-
 test/files/run/t4794.scala                         |    3 +-
 test/files/run/t4809.scala                         |    8 +-
 test/files/run/t4813.check                         |    1 +
 test/files/run/t4835.scala                         |    2 +-
 test/files/run/t4841-isolate-plugins.check         |    2 +
 test/files/run/t4841-isolate-plugins/ploogin.scala |   30 +
 .../t4841-isolate-plugin.scala                     |   39 +
 test/files/run/t4841-no-plugin.check               |    1 +
 test/files/run/t4841-no-plugin.scala               |   17 +
 test/files/run/t4859.check                         |    8 +
 test/files/run/t4859.scala                         |   29 +
 test/files/run/t4871.scala                         |    2 +-
 test/files/run/t4894.scala                         |    8 +-
 test/files/run/t4895.scala                         |    6 +-
 test/files/run/t4929.check                         |    1 -
 test/files/run/t4929.scala                         |   42 -
 test/files/run/t4935.flags                         |    2 +-
 test/files/run/t4954.scala                         |   10 +-
 test/files/run/t498.scala                          |    3 +
 test/files/run/t4996.check                         |    4 +
 test/files/run/t4996.scala                         |   47 +
 test/files/run/t5018.scala                         |   14 +-
 test/files/run/{si5045.check => t5045.check}       |    0
 test/files/run/t5045.scala                         |   49 +
 test/files/run/t5052.scala                         |    6 -
 test/files/run/t5053.scala                         |    9 +-
 test/files/run/t5064.check                         |    6 +-
 test/files/run/t5072.check                         |    4 -
 test/files/run/t5080.scala                         |    4 +
 test/files/run/t5115.scala                         |   14 -
 test/files/run/t5125b.scala                        |    2 +-
 test/files/run/t5134.scala                         |    8 +
 test/files/run/t5224.check                         |    5 +
 test/files/run/t5225_2.check                       |    2 +-
 test/files/run/t5225_2.scala                       |    2 +-
 test/files/run/t5229_1.check                       |    0
 test/files/run/t5256a.scala                        |    2 +-
 test/files/run/t5256b.scala                        |    2 +-
 test/files/run/t5256c.scala                        |    2 +-
 test/files/run/t5256d.check                        |    6 +-
 test/files/run/t5256d.scala                        |    2 +-
 test/files/run/t5256e.scala                        |    2 +-
 test/files/run/t5256f.scala                        |    4 +-
 test/files/run/t5256g.check                        |    6 +-
 test/files/run/t5256g.scala                        |    2 +-
 test/files/run/t5256h.check                        |    4 +-
 test/files/run/t5256h.scala                        |    2 +-
 test/files/run/t5262.scala                         |    8 +-
 test/files/run/t5271_4.check                       |    0
 test/files/run/t5272_1_oldpatmat.flags             |    0
 test/files/run/t5272_2_oldpatmat.flags             |    0
 test/files/run/t5273_1_oldpatmat.flags             |    0
 test/files/run/t5273_2a_oldpatmat.flags            |    0
 test/files/run/t5273_2b_oldpatmat.flags            |    0
 test/files/run/t5277_1.scala                       |    3 +-
 test/files/run/t5284b.check                        |    3 +
 test/files/run/t5284b.scala                        |    2 +-
 test/files/run/t5284c.check                        |    3 +
 test/files/run/t5284c.scala                        |    2 +-
 test/files/run/t5293-map.scala                     |   88 +
 test/files/run/t5293.scala                         |   83 +
 test/files/run/t5300.scala                         |    2 +-
 test/files/run/t5313.scala                         |   14 +-
 test/files/run/t5356.scala                         |    2 +
 test/files/run/t5375.check                         |    2 +-
 test/files/run/t5375.scala                         |   23 +-
 test/files/run/t5380.check                         |    9 +
 test/files/run/t5380.scala                         |    2 +-
 test/files/run/t5415.check                         |    0
 test/files/run/t5415.scala                         |    2 +-
 test/{pending => files}/run/t5418.scala            |    0
 test/files/run/t5418b.check                        |    2 +-
 test/files/run/t5418b.scala                        |    2 +-
 test/files/run/t5428.check                         |    3 +-
 test/files/run/t5428.scala                         |   14 +-
 test/files/run/t5488-fn.scala                      |    2 +-
 test/files/run/t5488.scala                         |    6 +-
 test/files/run/t5500b.scala                        |    4 +-
 test/files/run/t5514.check                         |   19 -
 test/files/run/t5514.scala                         |   35 -
 test/files/run/t5527.check                         |   99 -
 test/files/run/t5527.scala                         |  107 -
 test/files/run/t5535.check                         |    4 -
 test/files/run/t5537.check                         |    4 -
 test/files/run/t5545.check                         |    0
 test/files/run/t5565.scala                         |   12 +
 test/files/run/t5568.flags                         |    1 +
 test/files/run/t5577.scala                         |    8 +-
 test/files/run/t5583.check                         |    4 -
 test/files/run/t5590.scala                         |   10 +-
 test/files/run/t5603.check                         |    8 +-
 test/files/run/t5603.scala                         |    5 +-
 test/files/run/t5610.scala                         |    2 +-
 test/{pending => files}/run/t5610a.check           |    0
 test/{pending => files}/run/t5610a.scala           |    0
 test/files/run/t5629.scala                         |    4 +-
 test/files/run/t5629b.check                        |    2 +-
 test/files/run/t5629b.scala                        |   23 +-
 test/files/run/t5655.check                         |    6 +-
 test/files/run/t5656.scala                         |    4 +-
 test/files/run/t5676.scala                         |    4 +-
 test/files/run/t5699.scala                         |    2 +-
 test/files/run/t5704.scala                         |    2 +-
 test/files/run/t5713/Impls_Macros_1.scala          |    2 +-
 test/files/run/t5717.scala                         |   21 +
 test/files/run/t5753_1/Impls_Macros_1.scala        |    2 +-
 test/files/run/t5753_1/Test_2.scala                |    2 +-
 test/files/run/t5753_2/Impls_Macros_1.scala        |    4 +-
 test/files/run/t5753_2/Test_2.scala                |    2 +-
 test/files/run/t576.check                          |    1 +
 test/files/run/t576.scala                          |    2 +
 test/files/run/t5789.check                         |    6 +-
 test/files/run/t5804.scala                         |   14 +-
 test/files/run/t5816.scala                         |    2 +-
 test/files/run/t5843.check                         |    9 -
 test/files/run/t5843.scala                         |   15 -
 test/files/run/t5857.scala                         |   20 +-
 test/files/run/t5867.scala                         |    6 +-
 test/files/run/t5879.check                         |    8 -
 test/files/run/t5879.scala                         |   35 +-
 test/files/run/t5880.scala                         |    8 +-
 test/files/run/t5881.scala                         |    3 +-
 test/files/run/t5894.scala                         |    7 +-
 test/files/run/t5903a.check                        |    1 +
 test/files/run/t5903a.flags                        |    1 +
 test/files/run/t5903a/Macros_1.scala               |   28 +
 test/files/run/t5903a/Test_2.scala                 |    6 +
 test/files/run/t5903b.check                        |    1 +
 test/files/run/t5903b.flags                        |    1 +
 test/files/run/t5903b/Macros_1.scala               |   25 +
 test/files/run/t5903b/Test_2.scala                 |    6 +
 test/files/run/{t6956.check => t5903c.check}       |    0
 test/files/run/t5903c.flags                        |    1 +
 test/files/run/t5903c/Macros_1.scala               |   23 +
 test/files/run/t5903c/Test_2.scala                 |    6 +
 test/files/run/t5903d.check                        |    1 +
 test/files/run/t5903d.flags                        |    1 +
 test/files/run/t5903d/Macros_1.scala               |   25 +
 test/files/run/t5903d/Test_2.scala                 |    6 +
 test/files/run/t5912.scala                         |    5 +-
 test/files/run/t5923a/Macros_1.scala               |   45 +-
 test/files/run/t5923c.scala                        |    4 +
 test/files/run/t5923d/Macros_1.scala               |    9 +
 test/files/run/t5923d/Test_2.scala                 |    7 +
 test/files/run/t5937.scala                         |    4 +-
 test/files/run/t5940.scala                         |    8 +-
 test/files/run/t5942.check                         |    0
 test/files/run/t5942.scala                         |    2 +-
 test/files/run/t5943a1.scala                       |    2 +-
 test/files/run/t5971.scala                         |    4 +-
 test/files/run/t5986.scala                         |   10 +-
 test/files/run/t6011c.check                        |    3 +
 test/files/run/t6023.scala                         |    2 +-
 test/files/run/t6028.check                         |   40 +-
 test/files/run/t6028.scala                         |    2 +-
 test/files/run/t603.scala                          |    4 +-
 test/files/run/t6052.scala                         |    4 +-
 test/files/run/t6064.scala                         |    9 +
 test/files/run/t6070.scala                         |    4 +-
 test/files/run/t6086-repl.check                    |    2 -
 test/files/run/t6102.check                         |   33 +-
 test/files/run/t6102.flags                         |    2 +-
 test/files/run/t6111.check                         |    1 +
 test/files/run/t6111.scala                         |    2 +
 test/files/run/t6113.scala                         |    4 +-
 test/files/run/t6146b.check                        |    6 +-
 test/files/run/t6150.scala                         |    8 -
 test/files/run/t6168/Context.java                  |   34 +
 test/files/run/t6168/JavaTest.java                 |    8 +
 test/files/run/t6168/SomeClass.java                |   14 +
 test/files/run/t6168/SomeClass2.java               |   12 +
 test/files/run/t6168/main.scala                    |   15 +
 test/files/run/t6168b/Context.java                 |   34 +
 test/files/run/t6168b/JavaTest.java                |    6 +
 test/files/run/t6168b/SomeClass.java               |   11 +
 test/files/run/t6168b/main.scala                   |    8 +
 test/files/run/t6178.scala                         |    2 +-
 test/files/run/t6181.scala                         |    2 +-
 test/files/run/t6187.check                         |   12 +-
 test/files/run/t6187.scala                         |    4 +-
 test/files/run/t6196.scala                         |   68 +
 test/files/run/t6197.check                         |    0
 test/files/run/t6198.check                         |    0
 test/files/run/t6199-mirror.scala                  |    2 +-
 test/files/run/t6199-toolbox.scala                 |    2 +-
 test/files/run/t6200.scala                         |   68 +
 test/files/run/t6221.check                         |    1 +
 test/files/run/t6221/Macros_1.scala                |   23 +
 test/files/run/t6221/Test_2.scala                  |   10 +
 test/files/run/t6223.check                         |    2 +-
 test/files/run/t6240-universe-code-gen.scala       |   82 +
 test/files/run/t6240a.check                        |    1 +
 test/files/run/t6240a/StepOne.java                 |   41 +
 test/files/run/t6240a/StepTwo.scala                |    7 +
 test/files/run/t6240a/Test.scala                   |   15 +
 test/files/run/t6240b.check                        |    1 +
 test/files/run/t6240b/StepOne.java                 |   41 +
 test/files/run/t6240b/StepThree.scala              |    4 +
 test/files/run/t6240b/StepTwo.scala                |   10 +
 test/files/run/t6240b/Test.scala                   |   15 +
 test/files/run/t6246.scala                         |    3 +-
 test/files/run/t6253a.scala                        |   64 +
 test/files/run/t6253b.scala                        |   62 +
 test/files/run/t6253c.scala                        |   63 +
 test/files/run/t6259.scala                         |    6 +-
 test/files/run/t6260-delambdafy.check              |    4 +
 test/files/run/t6260-delambdafy.flags              |    1 +
 test/files/run/t6260-delambdafy.scala              |   12 +
 test/files/run/t6260b.scala                        |   13 +
 test/files/run/t6260c.check                        |    5 +
 test/files/run/t6260c.scala                        |   17 +
 test/files/run/t6261.scala                         |    7 -
 test/files/run/t6273.check                         |    6 +-
 test/files/run/t6288.check                         |   14 +-
 test/files/run/t6288b-jump-position.check          |    6 +-
 test/files/run/t6288b-jump-position.scala          |    9 +-
 test/files/run/t6292.check                         |    1 +
 test/files/run/t6308.check                         |   16 +
 test/files/run/t6308.scala                         |   45 +
 .../function1.check => run/t6309.check}            |    0
 test/files/run/t6309.scala                         |   16 +
 test/files/run/t6320.check                         |    4 -
 test/files/run/t6323b.scala                        |    2 +-
 test/files/run/t6329_repl.check                    |   30 +-
 test/files/run/t6329_repl.scala                    |   13 +-
 test/files/run/t6329_repl_bug.check                |   10 +-
 ...t6329_repl_bug.pending => t6329_repl_bug.scala} |    0
 test/files/run/t6329_vanilla.check                 |    8 +-
 test/files/run/t6329_vanilla.scala                 |   15 +-
 test/files/run/t6329_vanilla_bug.check             |    3 +-
 ...vanilla_bug.pending => t6329_vanilla_bug.scala} |    0
 test/files/run/t6331.scala                         |    7 +-
 test/files/run/t6331b.scala                        |   11 +-
 test/files/run/t6355.check                         |    2 +
 test/files/run/t6355.scala                         |   17 +
 test/files/run/t6379.check                         |   14 +
 test/files/run/t6379/Macros_1.scala                |   26 +
 test/files/run/t6379/Test_2.scala                  |   22 +
 test/files/run/t6381.check                         |   19 +
 test/files/run/t6381.scala                         |   15 +
 test/files/run/t6385.scala                         |   13 +
 test/files/run/t6392b.check                        |    2 +-
 test/files/run/t6392b.scala                        |    2 +-
 test/files/run/t6394a/Macros_1.scala               |    4 +-
 test/files/run/t6394b/Macros_1.scala               |    4 +-
 test/files/run/t6406-regextract.check              |    4 +
 test/files/run/t6406-regextract.scala              |   30 +
 test/files/run/t6411a.check                        |   96 +
 test/files/run/t6411a.scala                        |   81 +
 test/files/run/t6411b.check                        |    1 +
 test/files/run/t6411b.scala                        |   12 +
 test/files/run/t6439.check                         |   29 +-
 test/files/run/t6439.scala                         |   12 +-
 test/files/run/t6440.check                         |    8 +-
 test/files/run/t6440b.check                        |    8 +-
 test/files/run/t6443.scala                         |    2 +
 test/files/run/t6448.check                         |   32 +
 test/files/run/t6448.scala                         |   61 +
 test/files/run/t6467.scala                         |   20 +
 test/files/run/t6481.check                         |    1 +
 test/files/run/t6488.check                         |    1 -
 test/files/run/t6488.scala                         |   57 +-
 test/files/run/t6507.check                         |   24 +
 test/files/run/t6507.scala                         |   14 +
 test/files/run/t6548.check                         |    2 +
 .../run/t6548/JavaAnnotationWithNestedEnum_1.java  |   17 +
 test/files/run/t6548/Test_2.scala                  |   12 +
 test/files/run/t6549.check                         |    4 -
 test/files/run/t6554.check                         |    1 +
 test/files/run/t6554.scala                         |   11 +
 test/files/run/t6555.check                         |    4 +-
 test/files/run/t6555.scala                         |    2 +-
 test/files/run/t657.scala                          |    2 +
 test/files/run/t6572/bar_1.scala                   |   19 -
 test/files/run/t6572/foo_2.scala                   |   17 -
 test/files/run/t6574b.check                        |    1 +
 test/files/run/t6574b.scala                        |    7 +
 test/files/run/t6591_1.check                       |    2 +-
 test/files/run/t6591_2.check                       |    2 +-
 test/files/run/t6591_3.check                       |    2 +-
 test/files/run/t6591_5.check                       |    2 +-
 test/files/run/t6591_6.check                       |    2 +-
 test/files/run/t6591_7.scala                       |    7 +-
 test/files/run/t6608.check                         |    1 +
 test/files/run/t6608.scala                         |   16 +
 test/files/run/t6611.scala                         |   18 +-
 test/files/run/t6632.check                         |    2 +
 test/files/run/t6632.scala                         |   31 +-
 test/files/run/t6646.check                         |    2 +-
 test/files/run/t6646.scala                         |    6 +-
 test/files/run/t6662/Macro_1.scala                 |    2 +-
 test/files/run/t6662/Test_2.scala                  |    2 +-
 test/files/run/t6669.scala                         |    2 +-
 test/files/run/t6690.check                         |    1 +
 test/files/run/t6690.scala                         |    2 +
 test/files/run/t6719.check                         |    1 +
 test/files/run/t6719.scala                         |    8 +
 test/files/run/t6731.scala                         |    2 +-
 test/files/run/t6732.check                         |    4 +
 test/files/run/t6732.scala                         |   12 +
 test/files/run/t6733.check                         |   27 +
 test/files/run/t6733.scala                         |   35 +
 test/files/run/t6745-2.scala                       |   22 +
 test/files/run/t6814.check                         |    7 +
 test/files/run/t6814/Macros_1.scala                |   24 +
 test/files/run/t6814/Test_2.scala                  |    3 +
 test/files/run/t6860.check                         |    4 +
 test/files/run/t6860.scala                         |   20 +
 test/files/run/t6863.check                         |   13 +
 test/files/run/t6863.scala                         |   24 +-
 test/files/run/t6908.scala                         |    6 +
 test/files/run/t6935.check                         |    1 +
 test/files/run/t6935.scala                         |   18 +-
 test/files/run/t6937.check                         |    6 +-
 test/files/run/t6955.scala                         |   16 +-
 test/files/run/t6956.scala                         |   13 +-
 test/files/run/t6969.scala                         |    4 +
 test/files/run/t6989/Test_2.scala                  |    8 +-
 test/files/run/t6992.check                         |    4 +
 test/files/run/t6992/Macros_1.scala                |   75 +
 test/files/run/t6992/Test_2.scala                  |   14 +
 test/files/run/t7008-scala-defined.flags           |    1 +
 .../run/t7008-scala-defined/Impls_Macros_2.scala   |    7 +-
 test/files/run/t7008-scala-defined/Test_3.scala    |    2 +-
 test/files/run/t7008/Impls_Macros_2.scala          |    7 +-
 test/files/run/t7008/Test_3.scala                  |    2 +-
 test/files/run/t7015.check                         |   11 +
 test/files/run/t7015.scala                         |   49 +
 test/files/run/t7044.check                         |   14 +
 test/files/run/t7044/Macros_1.scala                |   26 +
 test/files/run/t7044/Test_2.scala                  |   19 +
 test/files/run/t7045.check                         |    2 +
 test/files/run/t7045.scala                         |   12 +
 test/files/run/t7046.scala                         |    2 +-
 test/files/run/t7047.check                         |    3 +
 test/files/run/t7047/Impls_Macros_1.scala          |    4 +-
 test/files/run/t7064-old-style-supercalls.scala    |   48 -
 test/files/run/t7074.check                         |    9 -
 test/files/run/t7074.scala                         |   15 -
 test/files/run/t7088.check                         |    2 +
 test/files/run/t7088.scala                         |   13 +
 test/files/run/t7096.scala                         |   52 +-
 test/files/run/t7120.check                         |    1 +
 test/files/run/t7120/Base_1.scala                  |   10 +
 test/files/run/t7120/Derived_2.scala               |    9 +
 test/files/run/t7120/Run_3.scala                   |    3 +
 test/files/run/t7120b.check                        |    2 +
 test/files/run/t7120b.scala                        |   30 +
 test/files/run/t7151.check                         |    6 +
 test/files/run/t7151.scala                         |   24 +
 .../{virtpatmat_opt_sharing.check => t7157.check}  |    0
 test/files/run/t7157/Impls_Macros_1.scala          |   15 +
 test/files/run/t7157/Test_2.scala                  |    5 +
 test/files/run/t7171.check                         |    3 +
 test/files/run/t7171.flags                         |    1 +
 test/files/run/t7171.scala                         |    2 +-
 test/files/run/t7181.check                         |   23 +
 test/files/run/t7181.scala                         |   78 +
 test/files/run/t7185.check                         |   10 +-
 test/files/run/t7185.scala                         |    2 +-
 test/files/run/t7198.check                         |    2 +
 test/files/run/t7198.scala                         |    9 +
 test/files/run/t7214.scala                         |    2 +-
 test/files/run/t7223.check                         |    1 +
 test/files/run/t7223.scala                         |   11 +
 test/files/run/t7231.check                         |    2 +
 test/files/run/t7231.scala                         |   11 +
 test/files/run/t7240.check                         |    0
 test/files/run/t7240/Macros_1.scala                |   10 +-
 test/files/run/t7240/Test_2.scala                  |    4 +-
 test/files/run/t7265.scala                         |   27 -
 test/files/run/t7271.check                         |   10 +-
 test/files/run/t7271.scala                         |    6 +-
 test/files/run/t7290.check                         |    6 +
 test/files/run/{t7291b.check => t7291.check}       |    0
 test/files/run/t7291.scala                         |   22 +
 test/files/run/t7291a.check                        |    1 -
 test/files/run/t7291a.flags                        |    0
 test/files/run/t7291a.scala                        |   19 -
 test/files/run/t7291b.flags                        |    1 -
 test/files/run/t7291b.scala                        |   19 -
 test/files/run/t7300.check                         |    2 +
 test/files/run/t7300.scala                         |   11 +
 test/files/run/t7319.check                         |   13 +-
 test/files/run/t7319.scala                         |    1 +
 test/files/run/t7326.scala                         |   64 +
 test/files/run/t7328.check                         |    4 +
 test/files/run/t7328.scala                         |   18 +
 test/files/run/t7331b.check                        |    2 +-
 test/files/run/t7331c.check                        |    2 +-
 test/files/run/t7336.scala                         |    2 +-
 test/files/run/t7337.check                         |    1 +
 test/files/run/t7337.scala                         |   19 +
 test/files/run/t7341.check                         |    0
 test/files/run/t7341.scala                         |    2 +-
 test/files/run/t7374.check                         |    3 +
 test/files/run/t7374/Some.scala                    |    3 +
 test/files/run/t7374/Test.java                     |    7 +
 test/files/run/t7375b/Macros_1.scala               |    4 +-
 test/files/run/t7398.scala                         |    2 +-
 test/files/run/t7406.check                         |    1 +
 test/files/run/t7406.scala                         |   14 +
 test/files/run/t7407.check                         |    1 +
 test/files/run/t7407.flags                         |    1 +
 test/files/run/t7407.scala                         |   11 +
 test/files/run/t7407b.check                        |    2 +
 test/files/run/t7407b.flags                        |    1 +
 test/files/run/t7407b.scala                        |   20 +
 test/files/run/t7436.scala                         |    9 +
 test/files/run/t7439.check                         |    1 +
 test/files/run/t7439/Test_2.scala                  |    2 +
 test/files/run/t744.scala                          |    2 +-
 test/files/run/t7445.scala                         |    6 +
 test/files/run/t7455/Test.scala                    |    2 +-
 test/files/run/t7475b.check                        |    2 +
 test/files/run/t7475b.scala                        |   11 +
 test/files/run/t7482a.check                        |   10 +
 test/files/run/t7482a.scala                        |    8 +
 test/files/run/t7507.scala                         |    4 +
 test/files/run/t7510.check                         |    0
 test/files/run/t7533.check                         |   30 +
 test/files/run/t7533.scala                         |   38 +
 test/files/run/t7556/Test_2.scala                  |    2 +-
 test/files/run/t7569.check                         |    6 +-
 test/files/run/t7570a.check                        |    1 +
 test/files/run/t7570a.scala                        |   11 +
 test/files/run/t7570b.check                        |    1 +
 test/files/run/t7570b.scala                        |   18 +
 test/files/run/t7570c.check                        |    2 +
 test/files/run/t7570c.scala                        |   13 +
 test/files/run/t7582-private-within.check          |   12 +
 .../t7582-private-within/JavaPackagePrivate.java   |    8 +
 test/files/run/t7582-private-within/Test.scala     |   22 +
 test/files/run/t7582.check                         |    2 +
 test/files/run/{synchronized.flags => t7582.flags} |    0
 test/files/run/t7582/InlineHolder.scala            |   16 +
 test/files/run/t7582/PackageProtectedJava.java     |    6 +
 test/files/run/t7582b.check                        |    2 +
 .../files/run/{synchronized.flags => t7582b.flags} |    0
 test/files/run/t7582b/InlineHolder.scala           |   16 +
 test/files/run/t7582b/PackageProtectedJava.java    |    6 +
 test/files/run/t7584.check                         |    6 +
 .../{disabled/t7020.flags => run/t7584.flags}      |    0
 test/files/run/t7584.scala                         |   14 +
 test/files/run/t7584b.scala                        |   14 +
 test/files/run/t7617a/Macros_1.scala               |    2 +-
 test/files/run/t7617b/Macros_1.scala               |    2 +-
 test/files/run/t7617b/Test_2.scala                 |    2 +-
 test/files/run/t7634.check                         |    8 +
 test/files/run/t7634.scala                         |   22 +
 test/files/run/t7657/Macros_1.scala                |    2 +-
 test/files/run/t7700.check                         |    2 +
 test/files/run/t7700.scala                         |   17 +
 test/files/run/t7711-script-args.check             |    2 +
 test/files/run/t7711-script-args.scala             |    7 +
 test/files/run/t7711-script-args.script            |   12 +
 test/files/run/t7715.check                         |    3 +
 test/files/run/t7715.scala                         |   24 +
 test/files/run/t7747-repl.check                    |  286 ++
 test/files/run/t7747-repl.scala                    |   69 +
 test/files/run/t7763.scala                         |   20 +
 test/files/run/t7775.scala                         |    4 +-
 test/files/run/t7777.check                         |    7 +
 test/files/run/t7777/Macros_1.scala                |   17 +
 test/files/run/t7777/Test_2.scala                  |    6 +
 test/files/run/t7791-script-linenums.check         |    1 +
 test/files/run/t7791-script-linenums.scala         |   16 +
 test/files/run/t7791-script-linenums.script        |    8 +
 test/files/run/t7801.check                         |   11 +
 test/files/run/t7801.scala                         |   12 +
 test/files/run/t7805-repl-i.check                  |   11 +
 test/files/run/t7805-repl-i.scala                  |   42 +
 test/files/run/t7805-repl-i.script                 |    1 +
 test/files/run/t7817-tree-gen.check                |  104 +
 test/files/run/t7817-tree-gen.flags                |    1 +
 test/files/run/t7817-tree-gen.scala                |   65 +
 test/files/run/t7817.scala                         |   31 +
 test/files/run/t7843-jsr223-service.check          |    2 +
 test/files/run/t7843-jsr223-service.scala          |    8 +
 test/files/run/t7852.flags                         |    1 +
 test/files/run/t7852.scala                         |   39 +
 test/files/run/t7859/A_1.scala                     |   11 +
 test/files/run/t7859/B_2.scala                     |   47 +
 test/files/run/t7868.scala                         |   13 +
 test/files/run/t7868b.check                        |    6 +
 test/files/run/t7868b.scala                        |   11 +
 test/files/run/t7871.check                         |    1 +
 test/files/run/t7871/Macros_1.scala                |   27 +
 test/files/run/t7871/Test_2.scala                  |    6 +
 test/files/run/t7876.scala                         |   26 +
 test/files/run/t7880.scala                         |    7 +
 test/files/run/t7899-regression.check              |    1 +
 test/files/run/t7899-regression.flags              |    1 +
 test/files/run/t7899-regression.scala              |   24 +
 test/files/run/t7899.scala                         |    5 +
 test/files/run/t7932.check                         |    3 +
 test/files/run/t7932.scala                         |   11 +
 test/files/run/t7933.check                         |    2 +
 test/files/run/t7933.scala                         |   11 +
 test/files/run/t7974.check                         |  104 +
 test/files/run/t7974/Symbols.scala                 |    6 +
 test/files/run/t7974/Test.scala                    |   20 +
 test/files/run/t7985.scala                         |    3 +
 test/files/run/t7985b.scala                        |    5 +
 test/files/run/t8002.scala                         |   19 +
 test/files/run/t8015-ffc.scala                     |    7 +
 test/files/run/t8017.flags                         |    1 +
 test/files/run/t8017/value-class-lambda.scala      |   40 +
 test/files/run/t8017/value-class.scala             |    3 +
 test/files/run/t8046.check                         |    2 +
 test/files/run/t8046/Test.scala                    |   18 +
 test/files/run/t8046/t8046c.scala                  |   13 +
 test/files/run/t8047.check                         |    7 +
 test/files/run/t8047.scala                         |   31 +
 test/files/run/t8048a.check                        |    1 +
 test/files/run/t8048a/Macros_1.scala               |   11 +
 test/files/run/t8048a/Test_2.scala                 |    4 +
 test/files/run/t8048b.check                        |    3 +
 test/files/run/t8048b/Macros_1.scala               |   37 +
 test/files/run/t8048b/Test_2.scala                 |    5 +
 test/files/run/t8091.check                         |    1 +
 test/files/run/t8091.scala                         |    4 +
 test/files/run/t8100.check                         |    1 +
 test/files/run/t8100.scala                         |    8 +
 test/files/run/t8104.check                         |    2 +
 test/files/run/t8104/Macros_1.scala                |   11 +
 test/files/run/t8104/Test_2.scala                  |   19 +
 test/files/run/t8114.scala                         |   15 -
 test/files/run/t8133/A_1.scala                     |    5 +
 test/files/run/t8133/B_2.scala                     |   15 +
 test/files/run/t8133b/A_1.scala                    |    4 +
 test/files/run/t8133b/B_2.scala                    |    9 +
 ...7064-old-style-supercalls.check => t8153.check} |    0
 test/files/run/t8153.scala                         |   14 +
 test/files/run/t8177f.scala                        |   20 +
 test/files/run/t8188.scala                         |   25 +
 test/files/run/t8190.check                         |   91 +
 test/files/run/t8190.scala                         |  210 +
 test/files/run/t8192.check                         |   32 +
 test/files/run/t8192/Macros_1.scala                |   45 +
 test/files/run/t8192/Test_2.scala                  |   40 +
 test/files/run/t8197.scala                         |   16 +
 test/files/run/t8197b.scala                        |    8 +
 test/files/run/t8199.scala                         |  105 +
 test/files/run/t8233-bcode.flags                   |    1 +
 test/files/run/t8233-bcode.scala                   |   31 +
 test/files/run/t8233.scala                         |   31 +
 test/files/run/t8245.scala                         |   14 +
 test/files/run/t8266-octal-interp.check            |   30 +
 .../t8266-octal-interp.flags}                      |    0
 test/files/run/t8266-octal-interp.scala            |   16 +
 test/files/run/t8280.check                         |    9 +
 test/files/run/t8280.scala                         |   82 +
 test/files/run/{t6955.check => t8321.check}        |    0
 test/files/run/t8321/Macros_1.scala                |   11 +
 test/files/run/t8321/Test_2.scala                  |    3 +
 test/files/run/t8395.scala                         |    9 +
 test/files/run/t8425.check                         |    1 +
 test/files/run/t8425/Macros_1.scala                |   12 +
 test/files/run/t8425/Test_2.scala                  |    3 +
 test/files/run/t8428.scala                         |   12 +
 test/files/run/t8437.check                         |    2 +
 test/files/run/t8437/Macros_1.scala                |   18 +
 test/files/run/t8437/Test_2.scala                  |    4 +
 test/files/run/t874.scala                          |    4 +-
 test/files/run/t920.scala                          |    2 +-
 test/files/run/tailcalls.check                     |   55 +
 test/files/run/tailcalls.scala                     |   35 +-
 test/files/run/takeAndDrop.scala                   |    4 +-
 test/files/run/tcpoly_monads.scala                 |   11 +-
 test/files/run/tcpoly_overriding.scala             |    3 +
 test/files/run/tcpoly_parseridioms.check           |   20 +
 test/files/run/tcpoly_parseridioms.scala           |   53 +-
 test/files/run/test-cpp.scala                      |   24 +-
 test/files/run/toolbox_console_reporter.scala      |   13 +-
 test/files/run/toolbox_current_run_compiles.scala  |    4 +-
 test/files/run/toolbox_parse_package.check         |    8 +
 test/files/run/toolbox_parse_package.scala         |    9 +
 test/files/run/toolbox_rangepos.check              |    1 -
 test/files/run/toolbox_rangepos.scala              |    8 -
 .../run/toolbox_typecheck_implicitsdisabled.check  |    2 +-
 .../run/toolbox_typecheck_implicitsdisabled.scala  |   12 +-
 .../run/toolbox_typecheck_macrosdisabled.check     |    8 +-
 .../run/toolbox_typecheck_macrosdisabled.scala     |   17 +-
 .../run/toolbox_typecheck_macrosdisabled2.check    |   10 +-
 .../run/toolbox_typecheck_macrosdisabled2.scala    |   17 +-
 test/files/run/tpeCache-tyconCache.check           |   19 +
 test/files/run/tpeCache-tyconCache.scala           |   10 +
 test/files/run/trait-renaming/A_1.scala            |    2 +-
 test/files/run/transform.scala                     |    4 +-
 test/files/run/transpose.scala                     |    4 +-
 test/files/run/triemap-hash.scala                  |   20 +-
 test/files/run/triple-quoted-expr.scala            |    8 +-
 test/files/run/try-2.check                         |    3 +
 test/files/run/try-2.scala                         |   24 +-
 test/files/run/try-catch-unify.scala               |    2 +-
 test/files/run/try.check                           |    3 +
 test/files/run/try.scala                           |   18 +-
 test/files/run/tuple-match.scala                   |    4 +-
 test/files/run/tuple-zipped.scala                  |   18 +-
 test/files/run/tuples.scala                        |    1 +
 test/files/run/type-currying.scala                 |    7 +-
 test/files/run/typealias_overriding.scala          |    8 +-
 .../files/{pos/t7461.check => run/typecheck.check} |    0
 test/files/run/typecheck/Macros_1.scala            |   12 +
 test/files/run/typecheck/Test_2.scala              |   10 +
 test/files/run/typed-annotated/Macros_1.scala      |    6 +-
 test/files/run/typetags_symbolof_x.check           |    6 +
 test/files/run/typetags_symbolof_x.scala           |   15 +
 ...ags_without_scala_reflect_manifest_lookup.check |    0
 test/files/run/unapply.check                       |    3 +
 test/files/run/unapply.scala                       |    6 +-
 test/files/run/unittest_collection.check           |    1 +
 test/files/run/unittest_collection.scala           |    6 +-
 test/files/run/unittest_io.scala                   |   40 -
 test/files/run/unittest_iterator.scala             |   12 +-
 test/files/run/unreachable.scala                   |  128 +
 test/files/run/value-class-extractor-2.check       |    8 +
 test/files/run/value-class-extractor-2.scala       |  108 +
 test/files/run/value-class-extractor-seq.check     |    3 +
 test/files/run/value-class-extractor-seq.scala     |   59 +
 test/files/run/value-class-extractor.check         |    9 +
 test/files/run/value-class-extractor.scala         |   91 +
 .../run/value-class-partial-func-depmet.scala      |   24 +
 .../run/valueclasses-classmanifest-basic.scala     |    3 +-
 .../valueclasses-classmanifest-existential.scala   |    3 +-
 .../run/valueclasses-classmanifest-generic.scala   |    3 +-
 test/files/run/var-arity-class-symbol.scala        |   19 +
 test/files/run/vector1.scala                       |   26 +-
 test/files/run/verify-ctor.scala                   |    2 +-
 test/files/run/view-headoption.scala               |    4 +-
 test/files/run/view-iterator-stream.scala          |   11 +-
 test/files/run/viewtest.scala                      |    4 +-
 test/files/run/virtpatmat_alts.check               |    6 +
 test/files/run/virtpatmat_alts.flags               |    1 -
 test/files/run/virtpatmat_alts.scala               |    5 +-
 test/files/run/virtpatmat_apply.scala              |    2 +-
 test/files/run/virtpatmat_casting.scala            |    7 +-
 test/files/run/virtpatmat_literal.scala            |   24 +-
 test/files/run/virtpatmat_nested_lists.check       |    5 +-
 test/files/run/virtpatmat_nested_lists.flags       |    1 -
 test/files/run/virtpatmat_nested_lists.scala       |    3 +
 test/files/run/virtpatmat_opt_sharing.check        |    3 +
 test/files/run/virtpatmat_opt_sharing.flags        |    1 -
 test/files/run/virtpatmat_opt_sharing.scala        |    5 +-
 test/files/run/virtpatmat_staging.scala            |    7 +-
 test/files/run/virtpatmat_stringinterp.scala       |    5 +-
 test/files/run/virtpatmat_switch.scala             |   10 +-
 test/files/run/virtpatmat_try.scala                |    8 +-
 test/files/run/virtpatmat_typed.check              |    3 +
 test/files/run/virtpatmat_typed.scala              |    8 +-
 test/files/run/virtpatmat_unapply.scala            |   14 +-
 test/files/run/withIndex.scala                     |    2 +-
 test/files/run/xml-attribute.check                 |   12 -
 test/files/run/xml-attribute.scala                 |   37 -
 test/files/run/xml-loop-bug.scala                  |    8 -
 test/files/scalacheck/CheckCollections.scala       |   52 +
 test/files/scalacheck/CheckEither.scala            |   36 +-
 test/files/scalacheck/Ctrie.scala                  |   66 +-
 test/files/scalacheck/HashTrieSplit.scala          |   47 -
 test/files/scalacheck/ReflectionExtractors.scala   |   52 +
 test/files/scalacheck/Unrolled.scala               |    6 +-
 test/files/scalacheck/array-new.scala              |    2 +-
 test/files/scalacheck/array-old.scala              |    6 +-
 test/files/scalacheck/avl.scala                    |   10 +-
 test/files/scalacheck/list.scala                   |    2 +-
 .../parallel-collections/IntOperators.scala        |    2 +-
 .../parallel-collections/PairOperators.scala       |   42 +-
 .../parallel-collections/PairValues.scala          |    2 +-
 .../parallel-collections/ParallelArrayCheck.scala  |   19 +-
 .../parallel-collections/ParallelArrayTest.scala   |    2 +-
 .../ParallelArrayViewCheck.scala                   |   22 +-
 .../parallel-collections/ParallelCtrieCheck.scala  |   31 +-
 .../ParallelHashMapCheck.scala                     |   31 +-
 .../ParallelHashSetCheck.scala                     |   33 +-
 .../ParallelHashTrieCheck.scala                    |   48 +-
 .../ParallelIterableCheck.scala                    |  131 +-
 .../parallel-collections/ParallelMapCheck1.scala   |    6 +-
 .../parallel-collections/ParallelRangeCheck.scala  |   26 +-
 .../parallel-collections/ParallelSeqCheck.scala    |   60 +-
 .../parallel-collections/ParallelSetCheck.scala    |    6 +-
 .../parallel-collections/ParallelVectorCheck.scala |   24 +-
 .../files/scalacheck/parallel-collections/pc.scala |   71 +-
 test/files/scalacheck/primitive-eqeq.scala         |   10 +-
 .../quasiquotes/ArbitraryTreesAndNames.scala       |  295 ++
 .../quasiquotes/DefinitionConstructionProps.scala  |  453 ++
 .../DefinitionDeconstructionProps.scala            |  290 ++
 .../scalacheck/quasiquotes/DeprecationProps.scala  |   52 +
 test/files/scalacheck/quasiquotes/ErrorProps.scala |  213 +
 test/files/scalacheck/quasiquotes/ForProps.scala   |   70 +
 .../scalacheck/quasiquotes/LiftableProps.scala     |  174 +
 .../quasiquotes/PatternConstructionProps.scala     |   36 +
 .../quasiquotes/PatternDeconstructionProps.scala   |   44 +
 .../quasiquotes/QuasiquoteProperties.scala         |  120 +
 .../scalacheck/quasiquotes/RuntimeErrorProps.scala |   75 +
 .../quasiquotes/TermConstructionProps.scala        |  313 ++
 .../quasiquotes/TermDeconstructionProps.scala      |  249 +
 test/files/scalacheck/quasiquotes/Test.scala       |   19 +
 .../quasiquotes/TypeConstructionProps.scala        |   42 +
 .../quasiquotes/TypeDeconstructionProps.scala      |   78 +
 .../scalacheck/quasiquotes/TypecheckedProps.scala  |  216 +
 .../scalacheck/quasiquotes/UnliftableProps.scala   |  166 +
 test/files/scalacheck/range.scala                  |   48 +-
 test/files/scalacheck/redblack.scala               |  213 -
 test/files/scalacheck/redblacktree.scala           |   14 +-
 test/files/scalacheck/si4147.scala                 |   67 -
 test/files/scalacheck/substringTests.scala         |    4 +-
 test/files/scalacheck/t2460.scala                  |    5 -
 test/files/scalacheck/t4147.scala                  |   68 +
 test/files/scalacheck/treeset.scala                |    3 +
 test/files/scalap/abstractClass.check              |    4 +
 .../{abstractClass/A.scala => abstractClass.scala} |    0
 test/files/scalap/abstractClass/result.test        |    4 -
 test/files/scalap/abstractMethod.check             |    5 +
 .../A.scala => abstractMethod.scala}               |    0
 test/files/scalap/abstractMethod/result.test       |    5 -
 test/files/scalap/caseClass.check                  |   20 +
 test/files/scalap/caseClass.scala                  |    3 +
 test/files/scalap/caseClass/A.scala                |    3 -
 test/files/scalap/caseClass/result.test            |   20 -
 test/files/scalap/caseObject.check                 |   10 +
 test/files/scalap/caseObject.scala                 |    3 +
 test/files/scalap/caseObject/A.scala               |    3 -
 test/files/scalap/caseObject/result.test           |   10 -
 test/files/scalap/cbnParam.check                   |    3 +
 test/files/scalap/cbnParam.scala                   |    1 +
 test/files/scalap/cbnParam/A.scala                 |    1 -
 test/files/scalap/cbnParam/result.test             |    3 -
 test/files/scalap/classPrivate.check               |   10 +
 .../{classPrivate/A.scala => classPrivate.scala}   |    0
 test/files/scalap/classPrivate/result.test         |   10 -
 test/files/scalap/classWithExistential.check       |    4 +
 .../A.scala => classWithExistential.scala}         |    0
 test/files/scalap/classWithExistential/result.test |    4 -
 test/files/scalap/classWithSelfAnnotation.check    |    5 +
 .../A.scala => classWithSelfAnnotation.scala}      |    0
 .../scalap/classWithSelfAnnotation/result.test     |    5 -
 test/files/scalap/covariantParam.check             |    4 +
 .../A.scala => covariantParam.scala}               |    0
 test/files/scalap/covariantParam/result.test       |    4 -
 test/files/scalap/defaultParameter.check           |    3 +
 .../A.scala => defaultParameter.scala}             |    0
 test/files/scalap/defaultParameter/result.test     |    3 -
 test/files/scalap/implicitParam.check              |    4 +
 .../{implicitParam/A.scala => implicitParam.scala} |    0
 test/files/scalap/implicitParam/result.test        |    4 -
 test/files/scalap/packageObject.check              |    5 +
 .../{packageObject/A.scala => packageObject.scala} |    0
 test/files/scalap/packageObject/result.test        |    5 -
 test/files/scalap/paramClauses.check               |    4 +
 .../{paramClauses/A.scala => paramClauses.scala}   |    0
 test/files/scalap/paramClauses/result.test         |    4 -
 test/files/scalap/paramNames.check                 |    4 +
 .../{paramNames/A.scala => paramNames.scala}       |    0
 test/files/scalap/paramNames/result.test           |    4 -
 test/files/scalap/sequenceParam.check              |    3 +
 .../{sequenceParam/A.scala => sequenceParam.scala} |    0
 test/files/scalap/sequenceParam/result.test        |    3 -
 test/files/scalap/simpleClass.check                |    4 +
 .../{simpleClass/A.scala => simpleClass.scala}     |    0
 test/files/scalap/simpleClass/result.test          |    4 -
 test/files/scalap/traitObject.check                |    8 +
 .../{traitObject/A.scala => traitObject.scala}     |    0
 test/files/scalap/traitObject/result.test          |    8 -
 test/files/scalap/typeAnnotations.check            |    8 +
 .../A.scala => typeAnnotations.scala}              |    0
 test/files/scalap/typeAnnotations/result.test      |    8 -
 test/files/scalap/valAndVar.check                  |    5 +
 .../scalap/{valAndVar/A.scala => valAndVar.scala}  |    0
 test/files/scalap/valAndVar/result.test            |    5 -
 test/files/scalap/wildcardType.check               |    3 +
 .../{wildcardType/A.scala => wildcardType.scala}   |    0
 test/files/scalap/wildcardType/result.test         |    3 -
 test/files/specialized/SI-7343.scala               |    2 +-
 test/files/specialized/SI-7344.scala               |    8 +-
 test/files/specialized/arrays-traits.scala         |    2 +-
 test/files/specialized/arrays.scala                |   16 +-
 test/files/specialized/constant_lambda.check       |    2 +
 test/files/specialized/constant_lambda.scala       |   16 +
 test/files/specialized/fft.scala                   |   26 +-
 test/files/specialized/spec-ame.scala              |   10 +-
 test/files/specialized/spec-hlists.scala           |    4 +-
 test/files/specialized/spec-init.scala             |    2 +-
 test/files/specialized/spec-matrix-old.scala       |   14 +-
 test/files/specialized/spec-overrides.scala        |    2 +-
 test/files/specialized/spec-patmatch.scala         |    2 +-
 test/files/specialized/spec-super.check            |    5 +-
 test/files/specialized/spec-super.scala            |    6 +-
 test/files/specialized/spec-t3896.scala            |    6 +-
 test/files/specialized/tb3651.check                |    5 +-
 test/files/specialized/tc3651.check                |    5 +-
 test/files/specialized/td3651.check                |    8 +-
 test/files/specialized/td3651.scala                |    2 +-
 test/flaky/pos/t2868.cmds                          |    3 -
 test/junit/scala/collection/ArraySortingTest.scala |   29 +
 test/junit/scala/collection/NumericRangeTest.scala |  140 +
 test/junit/scala/collection/PagedSeq.scala         |   16 +
 .../junit/scala/collection/PriorityQueueTest.scala |   32 +
 test/junit/scala/collection/QueueTest.scala        |   28 +
 .../scala/collection/SetMapConsistencyTest.scala   |  517 +++
 .../scala/collection/TraversableOnceTest.scala     |   70 +
 test/junit/scala/collection/VectorTest.scala       |   51 +
 .../scala/collection/convert/MapWrapperTest.scala  |   49 +
 test/junit/scala/math/BigDecimalTest.scala         |  225 +
 test/junit/scala/math/NumericTest.scala            |   18 +
 .../junit/scala/reflect/internal/MirrorsTest.scala |   18 +
 .../scala/reflect/internal/PrintersTest.scala      | 1164 +++++
 .../reflect/internal/util/SourceFileTest.scala     |    5 +-
 .../reflect/internal/util/StringOpsTest.scala      |   52 +
 .../reflect/internal/util/WeakHashSetTest.scala    |  171 +
 test/junit/scala/reflect/io/ZipArchiveTest.scala   |   37 +
 .../tools/nsc/interpreter/TabulatorTest.scala      |   85 +
 .../scala/tools/nsc/settings/SettingsTest.scala    |   52 +
 .../tools/nsc/symtab/CannotHaveAttrsTest.scala     |   67 +
 .../tools/nsc/symtab/FreshNameExtractorTest.scala  |   47 +
 .../scala/tools/nsc/symtab/StdNamesTest.scala      |   46 +
 .../nsc/symtab/SymbolTableForUnitTesting.scala     |   98 +
 .../scala/tools/nsc/symtab/SymbolTableTest.scala   |   47 +
 .../scala/tools/nsc/util/StackTraceTest.scala      |  159 +
 .../scala/tools/testing/AssertThrowsTest.scala     |   34 +
 test/junit/scala/tools/testing/AssertUtil.scala    |   19 +
 test/junit/scala/util/TryTest.scala                |   35 +
 test/junit/scala/util/matching/RegexTest.scala     |   30 +
 .../scala/util/matching/regextract-char.scala      |   58 +
 test/junit/scala/util/t7265.scala                  |   59 +
 test/osgi/src/BasicReflection.scala                |   20 +-
 test/osgi/src/ScalaOsgiHelper.scala                |    8 +-
 test/partest                                       |  147 +-
 test/partest.bat                                   |    2 +-
 test/pending/continuations-pos/t3620.scala         |   73 -
 test/pending/continuations-run/example0.scala      |    9 -
 test/pending/continuations-run/example1.scala      |    9 -
 test/pending/continuations-run/example16.scala     |    9 -
 test/pending/continuations-run/example2.scala      |    9 -
 test/pending/continuations-run/example3.scala      |    9 -
 test/pending/continuations-run/example4.scala      |    9 -
 test/pending/continuations-run/example5.scala      |    9 -
 test/pending/continuations-run/example6.scala      |    9 -
 test/pending/continuations-run/example7.scala      |    9 -
 test/pending/continuations-run/example8.scala      |    9 -
 test/pending/continuations-run/example9.scala      |    9 -
 test/pending/continuations-run/foreach.check       |    4 -
 test/pending/continuations-run/foreach.scala       |   33 -
 test/pending/junit/scala/util/t7265.scala          |   46 -
 test/pending/jvm/cf-attributes.scala               |   26 +-
 .../macro-invalidusage-badbounds-b/Impls_1.scala   |    4 +-
 test/pending/neg/plugin-after-terminal.flags       |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../neg/plugin-after-terminal/misc/build.sh        |   14 -
 .../neg/plugin-after-terminal/src/ThePlugin.scala  |   31 -
 .../neg/plugin-after-terminal/testsource.scala     |    4 -
 test/pending/neg/plugin-before-parser.flags        |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../pending/neg/plugin-before-parser/misc/build.sh |   14 -
 .../plugin-before-parser/misc/scalac-plugin.xml    |    5 -
 .../neg/plugin-before-parser/src/ThePlugin.scala   |   32 -
 .../neg/plugin-before-parser/testsource.scala      |    4 -
 test/pending/neg/plugin-cyclic-dependency.check    |    2 -
 test/pending/neg/plugin-cyclic-dependency.flags    |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../neg/plugin-cyclic-dependency/misc/build.sh     |   14 -
 .../misc/scalac-plugin.xml                         |    5 -
 .../plugin-cyclic-dependency/src/ThePlugin.scala   |   41 -
 .../neg/plugin-cyclic-dependency/testsource.scala  |    4 -
 test/pending/neg/plugin-multiple-rafter.check      |    4 -
 test/pending/neg/plugin-multiple-rafter.flags      |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../neg/plugin-multiple-rafter/misc/build.sh       |   14 -
 .../plugin-multiple-rafter/misc/scalac-plugin.xml  |    5 -
 .../neg/plugin-multiple-rafter/src/ThePlugin.scala |   31 -
 .../neg/plugin-multiple-rafter/testsource.scala    |    4 -
 test/pending/neg/plugin-rafter-before-1.check      |    2 -
 test/pending/neg/plugin-rafter-before-1.flags      |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../neg/plugin-rafter-before-1/misc/build.sh       |   14 -
 .../plugin-rafter-before-1/misc/scalac-plugin.xml  |    5 -
 .../neg/plugin-rafter-before-1/src/ThePlugin.scala |   31 -
 .../neg/plugin-rafter-before-1/testsource.scala    |    4 -
 test/pending/neg/plugin-rightafter-terminal.flags  |    2 -
 .../lib/plugins.jar.desired.sha1                   |    1 -
 .../neg/plugin-rightafter-terminal/misc/build.sh   |   14 -
 .../misc/scalac-plugin.xml                         |    5 -
 .../plugin-rightafter-terminal/src/ThePlugin.scala |   32 -
 .../plugin-rightafter-terminal/testsource.scala    |    4 -
 test/pending/neg/t5353.check                       |    4 -
 test/pending/neg/t5353.scala                       |    3 -
 test/{files => pending}/neg/t5589neg.flags         |    0
 test/{files => pending}/neg/t5589neg2.check        |    0
 test/pending/neg/t7886.scala                       |   22 +
 test/pending/neg/t7886b.scala                      |   23 +
 .../pos/no-widen-locals.flags}                     |    0
 test/pending/pos/overloading-boundaries.scala      |   37 +
 test/pending/pos/pattern-typing.scala              |   29 +
 test/pending/pos/t1751.cmds                        |    3 -
 test/pending/pos/t1782.cmds                        |    2 -
 test/pending/pos/t1786.scala                       |   37 +-
 test/pending/pos/t1832.scala                       |   10 -
 test/pending/pos/t294.cmds                         |    3 -
 test/pending/pos/t4612.scala                       |   15 +
 test/pending/pos/t4695/T_1.scala                   |    4 +
 test/pending/pos/t4695/T_2.scala                   |    4 +
 test/pending/pos/t4717.scala                       |    7 -
 test/pending/pos/t4859.scala                       |   15 -
 test/pending/pos/t5082.scala                       |    8 +
 test/pending/pos/t5259.scala                       |   14 -
 test/pending/pos/t5399.scala                       |    8 -
 test/pending/pos/t5459.scala                       |   48 +
 test/pending/pos/t5626.scala                       |   12 -
 test/pending/pos/t5639/Foo.scala                   |    7 -
 test/pending/pos/t5654.scala                       |    4 -
 test/pending/pos/t5877.scala                       |    5 +
 test/pending/pos/t5954/T_1.scala                   |    8 +
 test/pending/pos/t5954/T_2.scala                   |    8 +
 test/pending/pos/t5954/T_3.scala                   |    8 +
 test/pending/pos/t6161.scala                       |   22 +
 test/pending/pos/t6225.scala                       |   11 +
 test/pending/pos/t8128b.scala                      |   18 +
 test/pending/pos/t8363b.scala                      |    7 +
 test/pending/pos/those-kinds-are-high.scala        |   53 +-
 test/pending/pos/treecheckers.flags                |    1 +
 test/pending/pos/treecheckers/c1.scala             |   12 +
 test/pending/pos/treecheckers/c2.scala             |    1 +
 test/pending/pos/treecheckers/c3.scala             |    8 +
 test/pending/pos/treecheckers/c4.scala             |    9 +
 test/pending/pos/treecheckers/c5.scala             |    3 +
 test/pending/pos/treecheckers/c6.scala             |    4 +
 test/pending/pos/z1720.scala                       |   16 -
 test/pending/presentation/context-bounds1.check    |   51 +
 .../presentation/context-bounds1/Test.scala        |    3 +
 .../context-bounds1/src/ContextBounds.scala        |   13 +
 test/pending/reify_typeof.check                    |   10 +
 test/pending/reify_typeof.scala                    |   14 +
 test/pending/run/hk-lub-fail.scala                 |   10 +-
 .../run/idempotency-partial-functions.scala        |    4 +-
 .../pending/run/macro-expand-default/Impls_1.scala |    8 +-
 .../Impls_1.scala                                  |    2 +-
 .../Impls_1.scala                                  |    8 +-
 test/pending/run/macro-expand-named/Impls_1.scala  |    8 +-
 .../macro-expand-tparams-prefix-e1/Impls_1.scala   |   10 +-
 .../macro-expand-tparams-prefix-f1/Impls_1.scala   |   10 +-
 .../run/macro-quasiinvalidbody-a/Impls_1.scala     |    4 +-
 .../macro-quasiinvalidbody-a/Macros_Test_2.scala   |    2 +-
 .../run/macro-quasiinvalidbody-b/Impls_1.scala     |    4 +-
 .../macro-quasiinvalidbody-b/Macros_Test_2.scala   |    2 +-
 test/pending/run/macro-reify-array/Macros_1.scala  |    4 +-
 .../run/macro-reify-tagful-b/Macros_1.scala        |    4 +-
 .../run/macro-reify-tagless-b/Impls_Macros_1.scala |    4 +-
 .../pending/run/macro-reify-tagless-b/Test_2.scala |    6 +-
 .../Impls_1.scala                                  |   11 +
 .../Macros_Test_2.scala                            |    6 +
 test/pending/run/reify_callccinterpreter.scala     |   12 +-
 test/pending/run/reify_implicits-new.scala         |   16 -
 test/pending/run/reify_implicits-old.scala         |   15 -
 test/pending/run/reify_simpleinterpreter.scala     |   10 +-
 test/pending/run/t2318.scala                       |   38 -
 test/pending/run/t3897/J_2.java                    |   27 -
 test/pending/run/t3897/a_2.scala                   |   23 -
 test/pending/run/t4728.check                       |    2 -
 test/pending/run/t5293-map.scala                   |   88 -
 test/pending/run/t5293.scala                       |   83 -
 test/pending/run/t5418.check                       |    0
 test/pending/run/t5427a.scala                      |    2 +-
 test/pending/run/t5427b.scala                      |    2 +-
 test/pending/run/t5427c.scala                      |    2 +-
 test/pending/run/t5427d.scala                      |    2 +-
 test/pending/run/t5866b.scala                      |   17 +
 test/pending/run/t5943b1.scala                     |    2 +-
 test/{files => pending}/run/t7733.check            |    0
 test/{files => pending}/run/t7733/Separate_1.scala |    0
 test/{files => pending}/run/t7733/Test_2.scala     |    0
 .../run/virtpatmat_anonfun_underscore.check        |    0
 test/pending/shootout/fasta.scala                  |   64 +-
 test/pending/shootout/revcomp.scala-2.scala        |   18 +-
 test/pending/shootout/revcomp.scala-3.scala        |   40 +-
 test/pending/t7629-view-bounds-removal.check       |    9 +
 test/pending/t7629-view-bounds-removal.flags       |    1 +
 test/pending/t7629-view-bounds-removal.scala       |    4 +
 test/pending/typetags_typeof_x.check               |    8 +
 test/pending/typetags_typeof_x.scala               |   14 +
 test/postreview.py                                 | 2540 -----------
 test/review                                        |   44 -
 test/scaladoc/resources/SI-4014_0.scala            |    4 +
 test/scaladoc/resources/SI-4014_1.scala            |    5 +
 test/scaladoc/resources/SI-4014_2.scala            |    6 +
 test/scaladoc/resources/SI_4715.scala              |    4 +-
 test/scaladoc/resources/Trac4325.scala             |    4 +-
 test/scaladoc/resources/doc-root/Any.scala         |    2 +-
 test/scaladoc/resources/doc-root/AnyRef.scala      |    2 +-
 test/scaladoc/resources/doc-root/Nothing.scala     |    2 +-
 test/scaladoc/resources/doc-root/Null.scala        |    2 +-
 test/scaladoc/resources/implicits-base-res.scala   |   80 +-
 test/scaladoc/resources/links.scala                |    2 +
 test/scaladoc/run/SI-191-deprecated.check          |    1 -
 test/scaladoc/run/SI-191-deprecated.scala          |   72 -
 test/scaladoc/run/SI-4676.scala                    |    3 +-
 test/scaladoc/run/SI-6812.check                    |    1 +
 test/scaladoc/run/SI-6812.scala                    |    6 +-
 .../scaladoc/run/{SI-6812.check => SI-6812b.check} |    0
 test/scaladoc/run/SI-6812b.scala                   |   24 +
 test/scaladoc/run/diagrams-base.scala              |    2 +-
 test/scaladoc/run/diagrams-filtering.scala         |    2 +-
 test/scaladoc/run/implicits-base.scala             |  148 +-
 test/scaladoc/run/links.scala                      |    2 +-
 test/scaladoc/run/t5527.check                      |  132 +
 test/scaladoc/run/t5527.scala                      |  155 +
 test/scaladoc/run/t7124.check                      |    3 +
 test/scaladoc/run/t7124.scala                      |   22 +
 test/scaladoc/run/{SI-6812.check => t7876.check}   |    0
 test/scaladoc/run/t7876.scala                      |   19 +
 test/scaladoc/run/t7876b.check                     |    3 +
 test/scaladoc/run/t7876b.scala                     |   24 +
 test/scaladoc/run/t8407.check                      |    4 +
 test/scaladoc/run/t8407.scala                      |   20 +
 test/scaladoc/scalacheck/CommentFactoryTest.scala  |    6 +-
 test/scaladoc/scalacheck/HtmlFactoryTest.scala     |   65 +-
 test/scaladoc/scalacheck/IndexScriptTest.scala     |   16 +-
 test/scaladoc/scalacheck/IndexTest.scala           |   23 +-
 test/script-tests/jar-manifest/run-test.check      |    2 +-
 test/support/java-tests.txt                        |   97 +
 tools/binary-repo-lib.sh                           |    2 +-
 tools/buildcp                                      |   11 -
 tools/compare-java-sigs                            |   56 +
 tools/diffPickled                                  |   51 -
 tools/epfl-build                                   |   28 -
 tools/epfl-publish                                 |   32 -
 tools/locker_scala                                 |    6 -
 tools/locker_scalac                                |    6 -
 tools/lockercp                                     |    4 -
 tools/make-release-notes                           |   49 -
 tools/packcp                                       |    5 -
 tools/partest-ack                                  |  131 +
 tools/partest-paths                                |   27 +
 tools/quick_scala                                  |    6 -
 tools/quick_scalac                                 |    6 -
 tools/quickcp                                      |    4 -
 tools/remotetest                                   |  230 -
 tools/showPickled                                  |   32 -
 tools/starr_scala                                  |    6 -
 tools/starr_scalac                                 |    6 -
 tools/starrcp                                      |    5 -
 tools/strapcp                                      |   12 -
 tools/test-renamer                                 |   82 -
 tools/updatescalacheck                             |  130 -
 versions.properties                                |   34 +
 6710 files changed, 142779 insertions(+), 157566 deletions(-)

diff --git a/.gitattributes b/.gitattributes
index 958b0b9..ac98781 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,10 +1,16 @@
+# fallback on built-in heuristics
+# this must be first so later entries will override it
+* text=auto
+
 # These files are text and should be normalized (convert crlf => lf)
 *.c       text
 *.check   text
 *.css     text
+*.flags   text
 *.html    text
 *.java    text
 *.js      text
+*.policy  text
 *.sbt     text
 *.scala   text
 *.sh      text
diff --git a/.gitignore b/.gitignore
index 4329fce..32a1665 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,6 +34,7 @@
 /out/
 /bin/
 /sandbox/
+/.ant-targets-build.xml
 
 # eclipse, intellij
 /.classpath
@@ -41,7 +42,7 @@
 /src/intellij/*.iml
 /src/intellij/*.ipr
 /src/intellij/*.iws
-/.cache
+**/.cache
 /.idea
 /.settings
 
diff --git a/.mailmap b/.mailmap
index 49d5dc6..7cab5ed 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,25 +1,77 @@
-Aleksandar Prokopec <aleksandar at aleksandar-Latitude-E6500.(none)>
-Aleksandar Prokopec <aleksandar at htpc.(none)>
-Aleksandar Prokopec <aleksandar at htpc-axel22.(none)>
-Aleksandar Prokopec <aleksandar at lampmac14.epfl.ch>
+Adriaan Moors <adriaan.moors at typesafe.com>
+Adriaan Moors <adriaan.moors at typesafe.com> <adriaan.moors at epfl.ch>
+Adriaan Moors <adriaan.moors at typesafe.com> <adriaanm at gmail.com>
 Aleksandar Prokopec <aleksandar.prokopec at epfl.ch>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <aleksandar.prokopec at gmail.com>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <aleksandar at aleksandar-Latitude-E6500.(none)>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <aleksandar at htpc-axel22.(none)>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <aleksandar at htpc.(none)>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <aleksandar at lampmac14.epfl.ch>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch> <axel22 at gmail.com>
+Alex Cruise <alex at cluonflux.com>
+Alex Cruise <alex at cluonflux.com> <alex at metaforsoftware.com>
+A. P. Marki <som.snytt at gmail.com>
 Antonio Cunei <antonio.cunei at typesafe.com>
+Antonio Cunei <antonio.cunei at typesafe.com> <antonio.cunei at epfl.ch>
+Buraq Emir <buraq at epfl.ch>
 Caoyuan Deng <dcaoyuan at epfl.ch>
 Chris Hodapp <clhodapp1 at gmail.com>
 Chris James <chrisJames at epfl.ch>
 Christopher Vogt <vogt at epfl.ch>
+Christopher Vogt <vogt at epfl.ch> <christopher.vogt at epfl.ch>
+Christopher Vogt <vogt at epfl.ch> <github.com.nsp at cvogt.org>
 Damien Obristi <damien.obrist at gmail.com>
-Daniel C. Sobral <dcs at dcs-132-CK-NF79.(none)>
+Daniel C. Sobral <dcsobral at gmail.com>
+Daniel C. Sobral <dcsobral at gmail.com> <dcs at dcs-132-CK-NF79.(none)>
+Daniel Lorch <lorch at epfl.ch>
+Erik Stenman <stenman at epfl.ch>
+Eugene Burmako <xeno.by at gmail.com>
+Eugene Burmako <xeno.by at gmail.com> <burmako at epfl.ch>
+Eugene Vigdorchik <eugenevigdorchik at epfl.ch> <eugene.vigdorchik at gmail.com>
+François Garillot <francois at garillot.net>
+Geoff Reedy <geoff at programmer-monk.net> <gereedy at sandia.gov>
 Ilya Sergei <ilyas at epfl.ch>
-Ingo Maier <ingoem at gmail.com>
+Ingo Maier <ingo.maier at epfl.ch>
+Ingo Maier <ingo.maier at epfl.ch> <ingoem at gmail.com>
+Josh Suereth <joshua.suereth at gmail.com>
+Josh Suereth <joshua.suereth at gmail.com> <Joshua.Suereth at gmail.com>
+Julien Eberle <jeberle at epfl.ch>
 Kenji Yoshida <6b656e6a69 at gmail.com>
+Luc Bourlier <luc.bourlier at typesafe.com>
+Luc Bourlier <luc.bourlier at typesafe.com> <skyluc at epfl.ch>
 Luc Bourlier <skyluc at epfl.ch>
-Martin Odersky <odersky at gamil.com>
+Martin Odersky <odersky at gmail.com>
+Martin Odersky <odersky at gmail.com> <odersky at gamil.com>
+Michael Pradel <pradel at epfl.ch>
+Michel Schinz <schinz at epfl.ch>
+Miguel Garcia <magarcia at epfl.ch>
+Miguel Garcia <magarcia at epfl.ch> <miguelalfredo.garcia at epfl.ch>
+Mirco Dotta <mirco.dotta at typesafe.com>
+Mirco Dotta <mirco.dotta at typesafe.com> <mirco.dotta at gmail.com>
+Moez A. Abdel-Gawad <moez at epfl.ch>
+Mohsen Lesani <lesani at epfl.ch>
 Nada Amin <amin at epfl.ch>
-Nada Amin <nada.amin at epfl.ch>
+Nada Amin <amin at epfl.ch> <nada.amin at epfl.ch>
+Nada Amin <amin at epfl.ch> <namin at alum.mit.edu>
 Natallie Baikevich <lu-a-jalla at ya.ru>
+Nikolay Mihaylov <mihaylov at epfl.ch>
+Paolo Giarrusso <p.giarrusso at gmail.com>
 Pavel Pavlov <pavel.e.pavlov at gmail.com>
 Philipp Haller <philipp.haller at typesafe.com>
+Philipp Haller <philipp.haller at typesafe.com> <hallerp at gmail.com>
+Philippe Altherr <paltherr at epfl.ch>
+Raphaël Noir <noir at epfl.ch>
 Roland Kuhn <rk at rkuhn.info>
 Rüdiger Klaehn <rklaehn at gmail.com>
+Sebastian Hack <shack at epfl.ch>
+Simon Ochsenreither <simon at ochsenreither.de>
+Stepan Koltsov <stepancheg at epfl.ch>
 Stéphane Micheloud <michelou at epfl.ch>
+Unknown Committer <lost.soul at typesafe.com>
+Unknown Committer <lost.soul at typesafe.com> <USER at epfl.ch>
+Unknown Committer <lost.soul at typesafe.com> <noreply at epfl.ch>
+Viktor Klang <viktor.klang at gmail.com>
+Vincent Cremet <cremet at epfl.ch>
+Vladimir Nikolaev <vladimir.nikolaev9 at gmail.com>
+Vojin Jovanovic <vojin.jovanovic at epfl.ch>
+Vojin Jovanovic <vojin.jovanovic at epfl.ch> <gvojin at gmail.com>
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..e90fc35
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,20 @@
+# this builds the spec using jekyll
+# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html
+language: ruby
+rvm:
+  - 1.9.3
+script: bundle exec jekyll build -s spec/ -d build/spec
+install: bundle install
+
+# https://gist.github.com/kzap/5819745, http://docs.travis-ci.com/user/travis-pro/
+env:
+  - secure: "WWU490z7DWAI8MidMyTE+i+Ppgjg46mdr7PviF6P6ulrPlRRKOtKXpLvzgJoQmluwzEK6/+iH7D5ybCUYMLdKkQM9kSqaXJ0jeqjOelaaa1LmuOQ8IbuT8O9DwHzjjp/n4Lj/KRvvN4nGxCMI7HLla4gunvPA7M6WK7FA+YKCOU=" # set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc
+
+# using S3 would be simpler, but we want to upload to scala-lang.org
+# after_success: bundle exec s3_website push --headless
+# the key is restricted using forced commands so that it can only upload to the directory we need here
+after_success:
+  - openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a
+  - chmod 600 spec/id_dsa_travis
+  - eval "$(ssh-agent)"
+  - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && ssh-add -D && ssh-add spec/id_dsa_travis && rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ scalatest at chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.11/'
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 2451a52..1c05b4f 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -63,4 +63,4 @@ Example:
 
 ## The Scala Improvement Process
 A new language feature requires a SIP (Scala Improvement Process) proposal. Note that significant additions to the standard library are also considered candidates for a SIP proposal.
-For more details on submitting SIPs, see (how to submit a SIP)[http://docs.scala-lang.org/sips/sip-submission.html].
+For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html).
diff --git a/Gemfile b/Gemfile
new file mode 100644
index 0000000..53924a4
--- /dev/null
+++ b/Gemfile
@@ -0,0 +1,7 @@
+# To build the spec on Travis CI
+source "https://rubygems.org"
+
+gem "jekyll", "2.0.0.alpha.2"
+gem "rouge"
+# gem 's3_website'
+# gem 'redcarpet'
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 4d5573d..4ee2d08 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -7,9 +7,7 @@ Eclipse-LazyStart: true
 Bundle-ClassPath: 
  .,
  bin,
- lib/fjbg.jar,
  lib/jline.jar,
- lib/msil.jar
 Export-Package: 
  scala.tools.nsc,
  scala.tools.nsc.ast,
@@ -47,11 +45,6 @@ Export-Package:
  scala.reflect.runtime,
  scala.reflect.internal.transform,
  scala.reflect.api,
- ch.epfl.lamp.compiler.msil,
- ch.epfl.lamp.compiler.msil.emit,
- ch.epfl.lamp.compiler.msil.util,
- ch.epfl.lamp.fjbg,
- ch.epfl.lamp.util
 Require-Bundle: 
  org.apache.ant,
  org.scala-ide.scala.library
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..fdc9892
--- /dev/null
+++ b/README.md
@@ -0,0 +1,8 @@
+This is the repository for the [Scala Programming Language](http://www.scala-lang.org).
+
+  - [Report an issue](https://issues.scala-lang.org);
+  - [Read about the development of the compiler and the standard library](http://docs.scala-lang.org/scala/);
+  - [Check our Jenkins status](https://scala-webapps.epfl.ch/jenkins/);
+  - [Download the latest nightly](https://scala-webapps.epfl.ch/jenkins/job/scala-nightly-main-master/ws/dists/latest/*zip*/latest.zip);
+  - ... and contribute right here! Please, first read our [policy](http://docs.scala-lang.org/scala/pull-request-policy.html), our [development guidelines](CONTRIBUTING.md),
+and [sign the contributor's license agreement](http://typesafe.com/contribute/cla/scala).
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 4ed283d..0000000
--- a/README.rst
+++ /dev/null
@@ -1,207 +0,0 @@
-################################################################################
-                              THE SCALA REPOSITORY
-################################################################################
-
-This document describes the Scala core (core library and compiler) repository
-and how to build it. For information about Scala as a language, you can visit
-the web site http://www.scala-lang.org/
-
-Part I. The repository layout
---------------------------------------------------------------------------------                            
-
-Follows the file layout of the Scala repository. Files marked with a † are not
-part of the repository but are either automatically generated by the
-build script or user-created if needed.  This is not a complete listing. :: 
-  scala/
-   +--build/                    Build products output directory for ant.
-   +--build.xml                 The main Ant build script.
-   +--dist/                     The destination folder for Scala distributions.
-   +--docs/                     Documentation and sample code.
-   +--lib/                      Pre-compiled libraries for the build.
-   |   +--fjbg.jar              The Java byte-code generation library.
-   |   +--scala-compiler.jar    The stable reference ('starr') compiler jar
-   |   +--scala-library.jar     The stable reference ('starr') library jar
-   |   +--scala-library-src.jar A snapshot of the source used to build starr.
-   |   ---ant/                  Support libraries for ant.
-   +--pull-binary-libs.sh       Pulls binary artifacts from remote repository.
-   +--push-binary-libs.sh       Pushes new binary artifacts and creates sha.
-   +--README.rst                The file you are currently reading.
-   +--src/                      All the source files of Scala.
-   |   +--actors/               The sources of the Actor library.
-   |   +--compiler/             The sources of the Scala compiler.
-   |   +--library/              The sources of the core Scala library.
-   |   ---swing/                The sources of the Swing library.
-   +--target/       †           Build products output directory for sbt.
-   +--test/                     The Scala test suite.
-   ---tools/                    Developer utilities.
-
-
-
-Part II. Building Scala with SABBUS
---------------------------------------------------------------------------------
-
-SABBUS is the name of the Ant build script used to compile Scala. It is mostly
-automated and takes care of managing the dependencies.
-
-^^^^^^^^^^^^^^^^^^^^^^^^
-        LAYERS:
-^^^^^^^^^^^^^^^^^^^^^^^^
-In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds
-Scala in layers. Each layer is a complete compiled Scala compiler and library.
-A superior layer is always compiled by the layer just below it. Here is a short
-description of the four layers that SABBUS uses, from bottom to top:
-
-- ``starr``: the stable reference Scala release which is shared by all the
-  developers. It is found in the repository as 'lib/scala-compiler.jar' and
-  'lib/scala-library.jar'. Any committable source code must be compiled directly
-  by starr to guarantee the bootstrapping of the compiler.
-
-- ``locker``: the local reference which is compiled by starr and is the work
-  compiler in a typical development cycle. When it has been built once, it is
-  “frozen” in this state. Updating it to fit the current source code must be
-  explicitly requested (see below).
-
-- ``quick``: the layer which is incrementally built when testing changes in the
-  compiler or library. This is considered an actual new version when locker is
-  up-to-date in relation to the source code.
-
-- ``strap``: a test layer used to check stability of the build.
-
-^^^^^^^^^^^^^^^^^^^^^^^^
-  DEPENDANT CHANGES:
-^^^^^^^^^^^^^^^^^^^^^^^^
-SABBUS compiles, for each layer, the Scala library first and the compiler next.
-That means that any changes in the library can immediately be used in the
-compiler without an intermediate build. On the other hand, if building the
-library requires changes in the compiler, a new locker must be built if
-bootstrapping is still possible, or a new starr if it is not.
-
-
-^^^^^^^^^^^^^^^^^^^^^^^^
-REQUIREMENTS FOR SABBUS:
-^^^^^^^^^^^^^^^^^^^^^^^^
-The Scala build system is based on Apache Ant. Most required pre-compiled
-libraries are part of the repository (in 'lib/'). The following however is
-assumed to be installed on the build machine:
-
-- A Java runtime environment (JRE) or SDK 1.6 or above.
-- Apache Ant version 1.7.0 or above.
-- bash (via cygwin for windows)
-- curl
-
-
-Part III. Common use-cases
---------------------------------------------------------------------------------
-- ``./pull-binary-libs.sh``
-
-  Downloads all binary artifacts associated with this commit.  This requires
-  internet access to http://typesafe.artifactoryonline.com/typesafe.
-
-- ``ant -p``
-
-  Prints out information about the commonly used ant targets. The interested
-  developer can find the rest in the XML files.
-
-- ``ant`` or ``ant build``
-
-  A quick compilation (to quick) of your changes using the locker compiler.
-
-  - This will rebuild all quick if locker changed.
-  - This will also rebuild locker if starr changed.
-
-- ``ln -s build/quick/bin qbin`` (once):
-- ``ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test``
-  
-  Incrementally builds quick, and then uses it to compile and run the file
-  ``sandbox/test.scala``. This is a typical debug cycle.
-
-- ``ant replacelocker``
-  
-  "unfreezes" locker by updating it to match the current source code.
-
-  - This will delete quick so as not to mix classes compiled with different
-    versions of locker.
-
-- ``ant test``
-
-  Tests that your code is working and fit to be committed.
-
-  - Runs the test suite and bootstrapping test on quick.
-  - You can run the suite only (skipping strap) with 'ant test.suite'.
-
-- ``ant docs``
-  Generates the HTML documentation for the library from the sources using the
-  scaladoc tool in quick.  Note: on most machines this requires more heap than
-  is allocate by default.  You can adjust the parameters with ANT_OPTS.
-  Example command line::
-    ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
-
-- ``ant dist``
-  
-  Builds a distribution.
-
-  - Rebuilds locker from scratch (to make sure it bootstraps).
-  - Builds everything twice more and compares bit-to-bit the two builds (to
-    make sure it is stable).
-  - Runs the test suite (and refuses to build a distribution if it fails).
-  - Creates a local distribution in 'dists/latest'.
-
-- ``ant clean``
-
-  Removes all temporary build files (locker is preserved).
-
-- ``ant locker.clean``
-
-  Removes all build files.
-
-- ``ant all.clean``
-
-  Removes all build files (including locker) and all distributions.
-
-Many of these targets offer a variant which runs with -optimise enabled.
-Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt,
-replacestarr-opt, replacelocker-opt, and distpack-opt.
-
-Part IV. Contributing to Scala
---------------------------------------------------------------------------------
-
-If you wish to contribute, you can find all of the necessary information on
-the official Scala website: www.scala-lang.org.
-
-Specifically, you can subscribe to the Scala mailing lists, read all of the
-available documentation, and browse the live github repository.  You can contact
-the Scala team by sending us a message on one of the mailing lists, or by using
-the available contact form.
-
-In detail:
-
-- Scala website (links to everything else):
-  http://www.scala-lang.org
-
-- Scala documentation:
-  http://docs.scala-lang.org
-
-- Scala mailing lists:
-  http://www.scala-lang.org/node/199
-
-- Scala bug and issue tracker:
-  https://issues.scala-lang.org
-
-- Scala live git source tree:
-  http://github.com/scala/scala
-
-If you are interested in contributing code, we ask you to sign the
-[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala),
-which allows us to ensure that all code submitted to the project is
-unencumbered by copyrights or patents.
-
-Before submitting a pull-request, please make sure you have followed the guidelines
-outlined in our `Pull Request Policy <https://github.com/scala/scala/wiki/Pull-Request-Policy>`_.
-
-------------------
-
-
-
-Thank you!
-
-The Scala Team
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 2d3c203..703c5ad 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -1,105 +1,186 @@
 filter {
-    packages = [
-     "scala.reflect.internal"
-    ]
-    problems=[
-        # Scala library
-        {
-            # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
-            matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
-            problemName=MissingMethodProblem
-        },
-        {
-            # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
-            matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
-            problemName=MissingMethodProblem
-        },
-        # {
-        #     # private[scala]
-        #     matchName="scala.collection.immutable.ListSerializeStart$"
-        #     problemName=MissingClassProblem
-        # },
-        # {
-        #     # private[scala]
-        #     matchName="scala.collection.immutable.ListSerializeStart"
-        #     problemName=MissingClassProblem
-        # },
-        {
-            # private nested class became private top-level class to fix SI-7018
-            matchName="scala.reflect.macros.Attachments$NonemptyAttachments"
-            problemName=MissingClassProblem
-        },
-
-        # scala.reflect.runtime
-        # {
-        #     matchName="scala.reflect.runtime.JavaUniverse.createClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaUniverse.initClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
-        #     problemName=IncompatibleResultTypeProblem
-        # },
-
-        # scala.concurrent.forkjoin (SI-7442)
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinTask.internalGetCompleter"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.registerWorker"
-            problemName=IncompatibleMethTypeProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.nextWorkerName"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.signalWork"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.idlePerActive"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.tryCompensate"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.forkjoin.ForkJoinPool.helpJoinOnce"
-            problemName=IncompatibleResultTypeProblem
-        },
-        {
-            matchName="scala.reflect.runtime.JavaUniverse.isInvalidClassName"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.SymbolLoaders.isInvalidClassName"
-            problemName=MissingMethodProblem
-        }
-    ]
+  packages = [
+    "scala.reflect.internal"
+    # "scala.concurrent.impl"
+    # "scala.reflect.runtime"
+  ]
+  problems=[
+    // see SI-8372
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofChar.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofChar.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofByte.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofByte.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofShort.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofShort.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofLong.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofLong.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofInt.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofInt.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip3"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip3"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofRef.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofRef.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    // see SI-8200
+    {
+        matchName="scala.reflect.api.StandardLiftables#StandardLiftableInstances.liftTree"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8331
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
+        problemName=IncompatibleResultTypeProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAppliedType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectTerm"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8366
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticPartialFunction"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.symbolOf"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.typeOf"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.weakTypeOf"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8388
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticIdentExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSingletonType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTermIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticCompoundType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAnnotatedType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeProjection"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType"
+        problemName=MissingMethodProblem
+    }
+  ]
 }
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 087fa07..5e869e3 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -1,178 +1,231 @@
 filter {
-    packages = [
-     "scala.reflect.internal"
-    ]
-    problems=[
-        # rework d526f8bd74 to duplicate tailImpl as a private method
-        # {
-        #     matchName="scala.collection.mutable.MutableList.tailImpl"
-        #     problemName=MissingMethodProblem
-        # },
-        {
-            # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
-            matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
-            problemName=MissingMethodProblem
-        },
-        {
-            # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
-            matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
-            problemName=MissingMethodProblem
-        },
-        {
-            # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
-            matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
-            problemName=MissingMethodProblem
-        },
-        # TODO: revert a557a97360: bridge method appeared because result is now Int but the super-method's result type erases to Object
-        # {
-        #     matchName="scala.collection.immutable.Range.head"
-        #     problemName=IncompatibleResultTypeProblem
-        # },
-        # revert 0b92073a38 2aa66bec86: SI-4664 [Make scala.util.Random Serializable] Add test case
-        # {
-        #     matchName="scala.util.Random"
-        #     problemName=MissingTypesProblem
-        # },
-        # {
-        #     matchName="scala.util.Random$"
-        #     problemName=MissingTypesProblem
-        # },
-        # {
-        #     # private[concurrent]
-        #     matchName="scala.concurrent.BatchingExecutor$Batch"
-        #     problemName=MissingClassProblem
-        # },
-        # {
-        #     # private[concurrent]
-        #     matchName="scala.concurrent.BatchingExecutor"
-        #     problemName=MissingClassProblem
-        # },
-        # {
-        #     # private[concurrent]
-        #     matchName="scala.concurrent.impl.ExecutionContextImpl$AdaptedForkJoinTask"
-        #     problemName=MissingClassProblem
-        # },
-        # {
-        #     # private[concurrent]
-        #     matchName="scala.concurrent.impl.ExecutionContextImpl.scala$concurrent$impl$ExecutionContextImpl$$uncaughtExceptionHandler"
-        #     problemName=MissingMethodProblem
-        # },
-        {
-            # private nested class became private top-level class to fix SI-7018
-            matchName="scala.reflect.macros.NonemptyAttachments"
-            problemName=MissingClassProblem
-        },
-
-        # scala.reflect.runtime
-        # {
-        #     matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
-        #     problemName=IncompatibleResultTypeProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala1"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaUniverse"
-        #     problemName=MissingTypesProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaUniverse.initClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-        # {
-        #     matchName="scala.reflect.runtime.JavaUniverse.initAndEnterClassAndModule"
-        #     problemName=MissingMethodProblem
-        # },
-
-        # scala.concurrent.forkjoin (SI-7442)
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.registerWorker"
-             problemName=IncompatibleMethTypeProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.externalPush"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.this"
-             problemName=IncompatibleMethTypeProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.signalWork"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.awaitQuiescence"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.tryCompensate"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinTask.recordExceptionalCompletion"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinTask.internalPropagateException"
-             problemName=MissingMethodProblem
-        },
-        {
-             matchName="scala.concurrent.forkjoin.ForkJoinPool.helpJoinOnce"
-             problemName=IncompatibleResultTypeProblem
-        },
-        {
-             matchName="scala.concurrent.impl.Promise$CompletionLatch"
-             problemName=MissingClassProblem
-        },
-        {
-            matchName="scala.concurrent.impl.Promise#DefaultPromise.linkRootOf"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.concurrent.impl.Promise#DefaultPromise.scala$concurrent$impl$Promise$DefaultPromise$$dispatchOrAddCallback"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.JavaMirrors#JavaMirror#FromJavaClassCompleter.scala$reflect$runtime$JavaMirrors$JavaMirror$FromJavaClassCompleter$$enterEmptyCtorIfNecessary$1"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClassfile"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.ReflectionUtils.isTraitImplementation"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$PackageAndClassPattern"
-            problemName=MissingMethodProblem
-        },
-        {
-            matchName="scala.reflect.runtime.SymbolLoaders.isInvalidClassName"
-            problemName=MissingMethodProblem
-        }
-    ]
+  packages = [
+    "scala.reflect.internal"
+    # "scala.concurrent.impl"
+    # "scala.reflect.runtime"
+  ]
+  problems=[
+    // see SI-8372
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofChar.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofChar.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofByte.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofByte.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofShort.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofShort.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofLong.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofLong.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofInt.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofInt.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip3"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps.unzip3"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofRef.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofRef.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip"
+        problemName=IncompatibleMethTypeProblem
+    },
+    {
+        matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip3"
+        problemName=IncompatibleMethTypeProblem
+    },
+    // see SI-8200
+    {
+        matchName="scala.reflect.api.Liftables#Liftable.liftTree"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.StandardLiftables#StandardLiftableInstances.liftTree"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8331
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAppliedType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectTerm"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTermExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
+        problemName=IncompatibleResultTypeProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTypeExtractor"
+        problemName=MissingClassProblem
+    },
+    // see SI-8366
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticPartialFunctionExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticPartialFunction"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8428
+    {
+        matchName="scala.collection.Iterator#ConcatIterator.this"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.symbolOf"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.typeOf"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Mirror.weakTypeOf"
+        problemName=MissingMethodProblem
+    },
+    // see SI-8388
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSingletonType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTermIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticCompoundType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAnnotatedType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeProjection"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticIdent"
+        problemName=MissingMethodProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticAnnotatedTypeExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTermIdentExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacitcSingletonTypeExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeIdentExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticCompoundTypeExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticExistentialTypeExtractor"
+        problemName=MissingClassProblem
+    },
+    {
+        matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeProjectionExtractor"
+        problemName=MissingClassProblem
+    }
+  ]
 }
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
new file mode 100644
index 0000000..816a18b
--- /dev/null
+++ b/build-ant-macros.xml
@@ -0,0 +1,781 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="build-support" xmlns:artifact="urn:maven-artifact-ant">
+  <description> Macros for Scala's ant build </description>
+
+  <macrodef name="optimized">
+    <attribute name="name"/>
+    <sequential>
+      <antcall target="@{name}">
+        <param name="scalac.args.optimise" value="-optimise"/>
+      </antcall>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="copy-deps" description="Copy a file set based on maven dependency resolution to a directory. Currently used by the IntelliJ config files.">
+    <attribute name="project"/>
+    <attribute name="refid" default="@{project}.fileset"/>
+    <sequential>
+      <delete dir="${build-deps.dir}/@{project}" includes="*.jar"/>
+      <copy todir="${build-deps.dir}/@{project}">
+        <resources refid="@{refid}"/>
+        <mapper type="flatten"/>
+      </copy>
+    </sequential>
+  </macrodef>
+
+  <!-- Set a property @{name}.cross to the actual cross suffix that should be
+	used when resolving the module "@{name}". If the (user-supplied)
+	@{name}.cross.suffix property exists then use that value, otherwise use
+	"_${scala.binary.version}". -->
+  <macrodef name="prepareCross">
+    <attribute name="name" />
+    <sequential>
+      <if>
+        <isset property="@{name}.cross.suffix" />
+        <then>
+          <property name="@{name}.cross" value="${@{name}.cross.suffix}" />
+        </then>
+        <else>
+          <property name="@{name}.cross" value="_${scala.binary.version}" />
+        </else>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <!-- Set property named @{name} to the jar resolved as @{jar}_${scala.binary.version}:jar.
+           @{jar}_${scala.binary.version} must be a maven dependency. -->
+  <macrodef name="propertyForCrossedArtifact">
+    <attribute name="name"/>
+    <attribute name="jar"/>
+    <attribute name="suffix" default="${@{name}.cross}"/>
+    <sequential>
+      <readProperty name="@{name}" property="@{jar}@{suffix}:jar"/>
+      <readProperty name="@{name}-sources" property="@{jar}@{suffix}:java-source:sources"/>
+      <readProperty name="@{name}-javadoc" property="@{jar}@{suffix}:java-source:javadoc"/>
+    </sequential>
+  </macrodef>
+
+  <!-- Set property named @{name} to the value of the property named @{property}.
+           Helper for performing nested property expansion without using the ant props lib -->
+  <macrodef name="readProperty">
+    <attribute name="name"/>
+    <attribute name="property"/>
+    <sequential>
+      <property name="@{name}" value="${@{property}}"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="init-project-prop">
+    <attribute name="project"/>
+    <attribute name="name"/>
+    <attribute name="default"/>
+    <sequential>
+      <local name="@{name}"/>
+      <if>
+        <not>
+          <isset property="@{project}.@{name}"/>
+        </not>
+        <then>
+          <property name="@{project}.@{name}" value="@{default}"/>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="clean">
+    <attribute name="build"/>
+    <sequential>
+      <delete dir="${build-@{build}.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="simple-javac">
+    <attribute name="project"/>
+    <!-- project: asm/forkjoin -->
+    <attribute name="args" default=""/>
+    <attribute name="jar" default="yes"/>
+    <sequential>
+      <uptodate property="@{project}.available" targetfile="${build-libs.dir}/@{project}.complete">
+        <srcfiles dir="${src.dir}/@{project}"/>
+      </uptodate>
+      <if>
+        <not>
+          <isset property="@{project}.available"/>
+        </not>
+        <then>
+          <stopwatch name="@{project}.timer"/>
+          <mkdir dir="${@{project}-classes}"/>
+          <javac debug="true" srcdir="${src.dir}/@{project}" destdir="${@{project}-classes}" classpath="${@{project}-classes}" includes="**/*.java" target="1.6" source="1.5" compiler="javac1.6">
+            <compilerarg line="${javac.args} @{args}"/>
+          </javac>
+          <if>
+            <equals arg1="@{jar}" arg2="yes"/>
+            <then>
+              <jar whenmanifestonly="fail" destfile="${build-libs.dir}/@{project}.jar" basedir="${@{project}-classes}"/>
+            </then>
+          </if>
+          <stopwatch name="@{project}.timer" action="total"/>
+          <mkdir dir="${build-libs.dir}"/>
+          <touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-javac">
+    <attribute name="stage"/>
+    <!-- current stage (locker, quick, strap) -->
+    <attribute name="project"/>
+    <!-- project: library/reflect/compiler/actors -->
+    <attribute name="destproject" default="@{project}"/>
+    <!-- overrides the output directory; used when building multiple projects into the same directory-->
+    <attribute name="args" default=""/>
+    <attribute name="excludes" default=""/>
+    <sequential>
+      <javac debug="true" srcdir="${src.dir}/@{project}" destdir="${build-@{stage}.dir}/classes/@{destproject}" includes="**/*.java" excludes="@{excludes}" target="1.6" source="1.5">
+        <compilerarg line="${javac.args} @{args}"/>
+        <classpath refid="@{stage}.@{destproject}.build.path"/>
+      </javac>
+    </sequential>
+  </macrodef>
+
+  <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
+   When compiling different sets of source files in multiple compilations to the same output directory,
+   Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
+   were deleted and thus deletes the corresponding output files.
+
+   Note that zinc also requires each arg to scalac to be prefixed by -S.
+  -->
+  <macrodef name="zinc">
+    <attribute name="compilerpathref"/>
+    <attribute name="destdir"/>
+    <attribute name="srcdir"/>
+    <attribute name="srcpath" default="NOT SET"/>
+    <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
+    <attribute name="buildpathref"/>
+    <attribute name="params" default=""/>
+    <attribute name="java-excludes" default=""/>
+    <sequential>
+      <local name="sources"/>
+      <pathconvert pathsep=" " property="sources">
+        <fileset dir="@{srcdir}">
+          <include name="**/*.java"/>
+          <include name="**/*.scala"/>
+          <exclude name="@{java-excludes}"/>
+        </fileset>
+      </pathconvert>
+      <local name="args"/>
+      <local name="sargs"/>
+      <if>
+        <not>
+          <equals arg1="@{srcpath}" arg2="NOT SET"/>
+        </not>
+        <then>
+          <property name="args" value="@{params} -sourcepath @{srcpath}"/>
+        </then>
+      </if>
+      <property name="args" value="@{params}"/>
+      <!-- default -->
+      <!-- HACK: prefix scalac args by -S -->
+      <script language="javascript">
+        project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
+      </script>
+      <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
+        <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
+      </exec>
+    </sequential>
+  </macrodef>
+
+  <!-- STAGED COMPILATION MACROS -->
+  <macrodef name="staged-scalac">
+    <attribute name="with"/>
+    <!-- will use path `@{with}.compiler.path` to locate scalac -->
+    <attribute name="stage"/>
+    <!-- current stage (locker, quick, strap) -->
+    <attribute name="project"/>
+    <!-- project: library/reflect/compiler/actors -->
+    <attribute name="srcpath" default="NOT SET"/>
+    <!-- needed to compile the library -->
+    <attribute name="args" default=""/>
+    <!-- additional args -->
+    <attribute name="destproject" default="@{project}"/>
+    <!-- overrides the output directory; used when building multiple projects into the same directory-->
+    <attribute name="srcdir" default="@{project}"/>
+    <attribute name="java-excludes" default=""/>
+    <sequential>
+      <!-- TODO: detect zinc anywhere on PATH
+           use zinc for the quick stage if it's available;
+           would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
+      <if>
+        <and>
+          <available file="tools/zinc"/>
+          <equals arg1="@{stage}" arg2="quick"/>
+        </and>
+        <then>
+          <zinc taskname="Z.@{stage}.@{project}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" srcpath="@{srcpath}" buildpathref="@{stage}.@{project}.build.path" params="${scalac.args.@{stage}} @{args}" java-excludes="@{java-excludes}"/>
+        </then>
+        <else>
+          <if>
+            <equals arg1="@{srcpath}" arg2="NOT SET"/>
+            <then>
+              <scalacfork taskname="@{stage}.@{project}" jvmargs="${scalacfork.jvmargs}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" params="${scalac.args.@{stage}} @{args}">
+                <include name="**/*.scala"/>
+                <compilationpath refid="@{stage}.@{project}.build.path"/>
+              </scalacfork>
+            </then>
+            <else>
+              <scalacfork taskname="@{stage}.@{project}" jvmargs="${scalacfork.jvmargs}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" srcpath="@{srcpath}" params="${scalac.args.@{stage}} @{args}">
+                <include name="**/*.scala"/>
+                <compilationpath refid="@{stage}.@{project}.build.path"/>
+              </scalacfork>
+            </else>
+          </if>
+        </else>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-uptodate">
+    <attribute name="stage"/>
+    <attribute name="project"/>
+    <element name="check"/>
+    <element name="do"/>
+    <sequential>
+      <uptodate property="@{stage}.@{project}.available" targetfile="${build-@{stage}.dir}/@{project}.complete">
+        <check/>
+      </uptodate>
+      <if>
+        <not>
+          <isset property="@{stage}.@{project}.available"/>
+        </not>
+        <then>
+          <do/>
+          <touch file="${build-@{stage}.dir}/@{project}.complete" verbose="no"/>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-build">
+    <attribute name="with"/>
+    <!-- will use path `@{with}.compiler.path` to locate scalac -->
+    <attribute name="stage"/>
+    <!-- current stage (locker, quick, strap) -->
+    <attribute name="project"/>
+    <!-- project: library/reflect/compiler/actors -->
+    <attribute name="srcpath" default="NOT SET"/>
+    <!-- needed to compile the library -->
+    <attribute name="args" default=""/>
+    <!-- additional args -->
+    <attribute name="includes" default="comp.includes"/>
+    <attribute name="java-excludes" default=""/>
+    <attribute name="version" default=""/>
+    <!-- non-empty for scaladoc: use @{version}.version.number in property file-->
+    <sequential>
+      <staged-uptodate stage="@{stage}" project="@{project}">
+        <check>
+          <srcfiles dir="${src.dir}/@{project}"/>
+        </check>
+        <do>
+          <stopwatch name="@{stage}.@{project}.timer"/>
+          <mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
+          <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/>
+          <!-- always compile with javac for simplicity and regularity; it's cheap -->
+          <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
+          <if>
+            <equals arg1="@{version}" arg2=""/>
+            <then>
+              <propertyfile file="${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+                <entry key="version.number" value="${version.number}"/>
+                <entry key="maven.version.number" value="${maven.version.number}"/>
+                <entry key="osgi.version.number" value="${osgi.version.number}"/>
+                <entry key="copyright.string" value="${copyright.string}"/>
+              </propertyfile>
+            </then>
+            <else>
+              <propertyfile file="${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+                <entry key="version.number" value="${@{version}.version.number}"/>
+                <entry key="copyright.string" value="${copyright.string}"/>
+              </propertyfile>
+            </else>
+          </if>
+          <copy todir="${build-@{stage}.dir}/classes/@{project}">
+            <fileset dir="${src.dir}/@{project}">
+              <patternset refid="@{includes}"/>
+            </fileset>
+          </copy>
+          <stopwatch name="@{stage}.@{project}.timer" action="total"/>
+        </do>
+      </staged-uptodate>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-bin">
+    <attribute name="stage"/>
+    <attribute name="classpathref" default="NOT SET"/>
+    <sequential>
+      <staged-uptodate stage="@{stage}" project="bin">
+        <check>
+          <srcfiles dir="${src.dir}">
+            <include name="compiler/scala/tools/ant/templates/**"/>
+          </srcfiles>
+        </check>
+        <do>
+          <taskdef name="mk-bin" classname="scala.tools.ant.ScalaTool" classpathref="@{stage}.bin.tool.path"/>
+          <mkdir dir="${build-@{stage}.dir}/bin"/>
+          <if>
+            <equals arg1="@{classpathref}" arg2="NOT SET"/>
+            <then>
+              <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}"/>
+            </then>
+            <else>
+              <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+              <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+            </else>
+          </if>
+          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scala"/>
+          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalac"/>
+          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scaladoc"/>
+          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/fsc"/>
+          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalap"/>
+        </do>
+      </staged-uptodate>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-pack">
+    <attribute name="project"/>
+    <attribute name="manifest" default=""/>
+    <element name="pre" optional="true"/>
+    <element name="jar-opts" optional="true"/>
+    <sequential>
+      <local name="destfile"/>
+      <property name="destfile" value="${build-pack.dir}/${@{project}.targetdir}/${@{project}.targetjar}"/>
+      <uptodate property="pack.@{project}.available" targetfile="${destfile}">
+        <srcresources>
+          <resources refid="pack.@{project}.files"/>
+          <!-- <path><pathelement location="${build-quick.dir}/@{project}.complete"/></path> -->
+        </srcresources>
+      </uptodate>
+      <if>
+        <not>
+          <isset property="pack.@{project}.available"/>
+        </not>
+        <then>
+          <mkdir dir="${build-pack.dir}/${@{project}.targetdir}"/>
+          <pre/>
+          <if>
+            <not>
+              <equals arg1="@{manifest}" arg2=""/>
+            </not>
+            <then>
+              <jar whenmanifestonly="fail" destfile="${destfile}" manifest="@{manifest}">
+                <!-- update="true" makes no difference on my machine, so starting from scratch-->
+                <jar-opts/>
+                <path refid="pack.@{project}.files"/>
+              </jar>
+            </then>
+            <else>
+              <jar whenmanifestonly="fail" destfile="${destfile}">
+                <jar-opts/>
+                <path refid="pack.@{project}.files"/>
+              </jar>
+            </else>
+          </if>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="staged-docs">
+    <attribute name="project"/>
+    <element name="includes" implicit="true"/>
+    <sequential>
+      <staged-uptodate stage="docs" project="@{project}">
+        <check>
+          <srcfiles dir="${src.dir}/${@{project}.srcdir}"/>
+        </check>
+        <do>
+          <stopwatch name="docs.@{project}.timer"/>
+          <mkdir dir="${build-docs.dir}/@{project}"/>
+          <if>
+            <equals arg1="${@{project}.docroot}" arg2="NOT SET"/>
+            <then>
+              <scaladoc destdir="${build-docs.dir}/@{project}" doctitle="${@{project}.description}" docfooter="epfl" docversion="${version.number}" sourcepath="${src.dir}" classpathref="docs.@{project}.build.path" srcdir="${src.dir}/${@{project}.srcdir}" addparams="${scalac.args.all}" implicits="on" diagrams="on" groups="on" rawOutput="${scaladoc.raw.output}" noPrefixes="${scaladoc.no.prefixes}" docUncompilable="${src.dir}/library-aux" skipPackages="${@{project}.skipPackages}">
+                <includes/>
+              </scaladoc>
+            </then>
+            <else>
+              <scaladoc destdir="${build-docs.dir}/@{project}" doctitle="${@{project}.description}" docfooter="epfl" docversion="${version.number}" sourcepath="${src.dir}" classpathref="docs.@{project}.build.path" srcdir="${src.dir}/${@{project}.srcdir}" docRootContent="${src.dir}/@{project}/${@{project}.docroot}" addparams="${scalac.args.all}" implicits="on" diagrams="on" groups="on" rawOutput="${scaladoc.raw.output}" noPrefixes="${scaladoc.no.prefixes}" docUncompilable="${src.dir}/libr [...]
+                <includes/>
+              </scaladoc>
+            </else>
+          </if>
+          <stopwatch name="docs.@{project}.timer" action="total"/>
+        </do>
+      </staged-uptodate>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="make-bundle">
+    <attribute name="project"/>
+    <element name="srcs" description="Sources for this bundle" optional="true" implicit="true"/>
+    <sequential>
+      <copy file="${src.dir}/build/bnd/${@{project}.name}.bnd" tofile="${build-osgi.dir}/${@{project}.name}.bnd" overwrite="true">
+        <filterset>
+          <filter token="VERSION" value="${osgi.version.number}"/>
+          <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}"/>
+          <filter token="SCALA_FULL_VERSION" value="${scala.full.version}"/>
+          <filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}"/>
+          <filter token="SCALA_COMPILER_INTERACTIVE_VERSION" value="${scala-compiler-interactive.version.number}"/>
+        </filterset>
+      </copy>
+      <bnd classpath="${@{project}.jar}" eclipse="false" failok="false" exceptions="true" files="${build-osgi.dir}/${@{project}.name}.bnd" output="${build-osgi.dir}"/>
+      <if>
+        <equals arg1="${@{project}.src}" arg2="true"/>
+        <then>
+          <!--
+             A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
+             Eclipse to match sources with the corresponding binaries.
+        -->
+          <jar whenmanifestonly="fail" destfile="${build-osgi.dir}/${@{project}.name}-src.jar">
+            <srcs/>
+            <manifest>
+              <attribute name="Manifest-Version" value="1.0"/>
+              <attribute name="Bundle-Name" value="${@{project}.description} Sources"/>
+              <attribute name="Bundle-SymbolicName" value="org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.source"/>
+              <attribute name="Bundle-Version" value="${@{project}.version}"/>
+              <attribute name="Eclipse-SourceBundle" value="org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix};version="${@{project}.version}";roots:=".""/>
+            </manifest>
+          </jar>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="copy-bundle">
+    <attribute name="project"/>
+    <sequential>
+      <copy tofile="${dist.dir}/${@{project}.targetdir}/${@{project}.name}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}.jar" overwrite="true"/>
+      <copy tofile="${dist.dir}/src/${@{project}.name}-src.jar" file="${@{project}.srcjar}" overwrite="true"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="mvn-package">
+    <attribute name="project"/>
+    <sequential>
+      <local name="artifact-base"/>
+      <property name="artifact-base" value="${dist.maven}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
+      <mkdir dir="${dist.maven}/${@{project}.dir}${@{project}.name}"/>
+      <copy tofile="${artifact-base}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.jar" overwrite="true"/>
+      <copy tofile="${artifact-base}-src.jar" file="${build-osgi.dir}/${@{project}.name}-src.jar" overwrite="true"/>
+      <copy tofile="${artifact-base}-pom.xml" file="${src.dir}/build/maven/${@{project}.dir}/${@{project}.name}-pom.xml" overwrite="true"/>
+      <if>
+        <not>
+          <isset property="docs.skip"/>
+        </not>
+        <then>
+          <jar destfile="${artifact-base}-docs.jar" basedir="${build-docs.dir}/@{project}" whenmanifestonly="fail">
+            <include name="**/*"/>
+          </jar>
+        </then>
+      </if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-remote">
+    <attribute name="jar" default=""/>
+    <attribute name="pom"/>
+    <element name="artifacts" implicit="true" optional="true"/>
+    <sequential>
+      <if><equals arg1="@{jar}" arg2="true"/><then>
+        <artifact:deploy settingsFile="${settings.file}">
+          <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+          <artifact:pom refid="@{pom}" />
+          <artifacts/>
+        </artifact:deploy>
+      </then><else>
+        <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
+          <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+          <artifact:pom refid="@{pom}" />
+          <artifacts/>
+        </artifact:deploy>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-local">
+    <attribute name="jar" default=""/>
+    <attribute name="pom"/>
+    <element name="artifacts" implicit="true" optional="true"/>
+    <sequential>
+      <if><equals arg1="@{jar}" arg2="true"/><then>
+        <artifact:install>
+          <artifact:localRepository path="${local.repository}"  id="${repository.credentials.id}" />
+          <artifact:pom refid="@{pom}" />
+          <artifacts/>
+        </artifact:install>
+      </then><else>
+        <artifact:install file="@{jar}">
+          <artifact:localRepository path="${local.repository}"  id="${repository.credentials.id}" />
+          <artifact:pom refid="@{pom}" />
+          <artifacts/>
+        </artifact:install>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-to">
+    <attribute name="jar" default=""/>
+    <attribute name="pom"/>
+    <attribute name="local"/>
+    <element name="artifacts" implicit="true" optional="true"/>
+    <sequential>
+      <if><equals arg1="@{local}" arg2="true"/><then>
+        <deploy-local jar="@{jar}" pom="@{pom}"> <artifacts/> </deploy-local>
+      </then><else>
+        <deploy-remote jar="@{jar}" pom="@{pom}"> <artifacts/> </deploy-remote>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="filter-pom">
+    <attribute name="path" />
+    <attribute name="name" />
+
+    <sequential>
+      <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
+        <filterset>
+          <filter token="VERSION"                    value="${maven.version.number}" />
+          <filter token="SCALA_BINARY_VERSION"       value="${scala.binary.version}" />
+          <filter token="SCALA_FULL_VERSION"         value="${scala.full.version}" />
+          <filter token="XML_VERSION"                value="${scala-xml.version.number}" />
+          <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
+          <filter token="CONTINUATIONS_PLUGIN_VERSION"  value="${scala-continuations-plugin.version.number}" />
+          <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
+          <filter token="SCALA_SWING_VERSION"           value="${scala-swing.version.number}" />
+          <filter token="RELEASE_REPOSITORY"         value="${remote.release.repository}" />
+          <filter token="SNAPSHOT_REPOSITORY"        value="${remote.snapshot.repository}" />
+          <filter token="JLINE_VERSION"              value="${jline.version}" />
+          <filter token="AKKA_ACTOR_VERSION"         value="${akka-actor.version.number}" />
+          <filter token="ACTORS_MIGRATION_VERSION"   value="${actors-migration.version.number}" />
+
+          <!-- TODO modularize compiler.
+          <filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}" />
+          <filter token="SCALA_COMPILER_INTERACTIVE_VERSION" value="${scala-compiler-interactive.version.number}" />
+          -->
+        </filterset>
+      </copy>
+      <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-one">
+    <attribute name="name" />
+    <attribute name="local"  default="false"/>
+    <attribute name="signed" default="false"/>
+
+    <sequential>
+      <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+      <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
+
+      <filter-pom name="@{name}" path="@{path}"/>
+
+      <if><equals arg1="@{signed}" arg2="false"/><then>
+        <if><isset property="docs.skip"/><then>
+          <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom">
+            <artifact:attach type="jar" file="${path}-src.jar"  classifier="sources" />
+          </deploy-to>
+        </then><else>
+          <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom">
+            <artifact:attach type="jar" file="${path}-src.jar"  classifier="sources" />
+            <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
+          </deploy-to>
+        </else></if>
+      </then><else>
+        <local name="repo"/>
+        <if><equals arg1="@{local}" arg2="false"/><then>
+          <property name="repo" value="${remote.repository}"/>
+        </then><else>
+          <property name="repo" value="${local.repository}"/>
+        </else></if>
+        <artifact:mvn failonerror="true">
+          <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+          <arg value="-Durl=${repo}" />
+          <arg value="-DrepositoryId=${repository.credentials.id}" />
+          <arg value="-DpomFile=${path}-pom-filtered.xml" />
+          <arg value=   "-Dfile=${path}.jar" />
+          <arg value="-Dsources=${path}-src.jar" />
+          <arg value="-Djavadoc=${path}-docs.jar" />
+          <arg value="-Pgpg" />
+          <arg value="-Dgpg.useagent=true" />
+        </artifact:mvn>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-jar">
+    <attribute name="name" />
+    <attribute name="local"  default="false"/>
+    <attribute name="signed" default="false"/>
+
+    <sequential>
+      <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+      <echo>Deploying ${path}.jar with ${path}-pom.xml.</echo>
+
+      <filter-pom name="@{name}" path="@{path}"/>
+
+      <if><equals arg1="@{signed}" arg2="false"/><then>
+        <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom"/>
+      </then><else>
+        <local name="repo"/>
+        <if><equals arg1="@{local}" arg2="false"/><then>
+          <property name="repo" value="${remote.repository}"/>
+        </then><else>
+          <property name="repo" value="${local.repository}"/>
+        </else></if>
+        <artifact:mvn failonerror="true">
+          <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+          <arg value="-Durl=${repo}" />
+          <arg value="-DrepositoryId=${repository.credentials.id}" />
+          <arg value="-DpomFile=${path}-pom-filtered.xml" />
+          <arg value=   "-Dfile=${path}.jar" />
+          <arg value="-Pgpg" />
+          <arg value="-Dgpg.useagent=true" />
+        </artifact:mvn>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy-pom">
+    <attribute name="name" />
+    <attribute name="local"  default="false"/>
+    <attribute name="signed" default="false"/>
+
+    <sequential>
+      <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+      <echo>Deploying ${path}-pom.xml.</echo>
+
+      <filter-pom name="@{name}" path="@{path}"/>
+
+      <if><equals arg1="@{signed}" arg2="false"/><then>
+        <deploy-to local="@{local}" pom="@{name}.pom"/>
+      </then><else>
+        <local name="repo"/>
+        <if><equals arg1="@{local}" arg2="false"/><then>
+          <property name="repo" value="${remote.repository}"/>
+        </then><else>
+          <property name="repo" value="${local.repository}"/>
+        </else></if>
+        <artifact:mvn failonerror="true">
+          <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+          <arg value="-Durl=${repo}" />
+          <arg value="-DrepositoryId=${repository.credentials.id}" />
+          <arg value="-DpomFile=${path}-pom-filtered.xml" />
+          <arg value=   "-Dfile=${path}-pom-filtered.xml" />
+          <arg value="-Pgpg" />
+          <arg value="-Dgpg.useagent=true" />
+        </artifact:mvn>
+      </else></if>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="deploy">
+    <attribute name="local"  default="false"/>
+    <attribute name="signed" default="false"/>
+
+    <sequential>
+      <deploy-one name="scala-library"     local="@{local}" signed="@{signed}"/>
+      <deploy-one name="scala-reflect"     local="@{local}" signed="@{signed}"/>
+      <deploy-one name="scala-compiler"    local="@{local}" signed="@{signed}"/>
+
+      <!-- TODO modularize compiler.
+      <deploy-one name="scala-compiler-doc"         local="@{local}" signed="@{signed}"/>
+      <deploy-one name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
+      -->
+
+      <deploy-one name="scala-actors"      local="@{local}" signed="@{signed}"/>
+      <deploy-one name="scalap"            local="@{local}" signed="@{signed}"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="testSuite">
+    <attribute name="dir" default="${partest.dir}"/>
+    <attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
+    <attribute name="colors" default="${partest.colors}"/>
+    <attribute name="scalacOpts" default="${scalac.args.optimise}"/>
+    <attribute name="pcp" default="${toString:partest.compilation.path}"/>
+    <attribute name="kinds"/>
+    <sequential>
+      <property name="partest.dir" value="@{dir}" />
+      <partest    srcdir="@{srcdir}"
+                   kinds="@{kinds}"
+                  colors="@{colors}"
+              scalacOpts="@{scalacOpts}"
+         compilationpath="@{pcp}"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="bc.run-mima">
+    <attribute name="jar-name"/>
+    <attribute name="prev"/>
+    <attribute name="curr"/>
+    <attribute name="direction"/>
+    <sequential>
+      <echo message="Checking @{direction} binary compatibility for @{jar-name} (against ${bc-reference-version})"/>
+      <java taskname="mima" fork="true" failonerror="true" classname="com.typesafe.tools.mima.cli.Main">
+        <arg value="--prev"/>
+        <arg value="@{prev}"/>
+        <arg value="--curr"/>
+        <arg value="@{curr}"/>
+        <arg value="--filters"/>
+        <arg value="${basedir}/bincompat-@{direction}.whitelist.conf"/>
+        <arg value="--generate-filters"/>
+        <classpath>
+          <path refid="mima.classpath"/>
+        </classpath>
+      </java>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="bc.check">
+    <attribute name="project"/>
+    <sequential>
+      <bc.run-mima jar-name="scala-@{project}" prev="${org.scala-lang:scala-@{project}:jar}" curr="${@{project}.jar}" direction="backward"/>
+      <bc.run-mima jar-name="scala-@{project}" prev="${@{project}.jar}" curr="${org.scala-lang:scala-@{project}:jar}" direction="forward"/>
+    </sequential>
+  </macrodef>
+
+  <macrodef name="tarz">
+    <attribute name="name" description="The tar file name (without extension)."/>
+    <element name="file-sets" description="A sequence of fileset elements to be included in the tar balls." optional="false" implicit="true"/>
+    <sequential>
+      <tar destfile="@{name}.tar" compression="none" longfile="gnu">
+        <file-sets/>
+      </tar>
+      <gzip src="@{name}.tar" destfile="@{name}.tgz"/>
+      <if>
+        <not>
+          <equals arg1="${archives.skipxz}" arg2="true"/>
+        </not>
+        <then>
+          <exec executable="xz" failifexecutionfails="false">
+            <arg line="-k -9e -S .xz @{name}.tar"/>
+          </exec>
+          <move file="@{name}.tar.xz" tofile="@{name}.txz" failonerror="false"/>
+        </then>
+      </if>
+      <delete file="@{name}.tar"/>
+    </sequential>
+  </macrodef>
+</project>
diff --git a/build.detach.xml b/build.detach.xml
deleted file mode 100644
index 132c812..0000000
--- a/build.detach.xml
+++ /dev/null
@@ -1,186 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus" default="build">
-
-  <description>
-SuperSabbus for Scala detach plugin.
-  </description>
-
-  <echo level="info" message="Running SABBUS for ${ant.project.name}..."/>
-
-<!-- ===========================================================================
-END-USER TARGETS
-============================================================================ -->
-  
-  <target name="build" depends="pack.done"
-    description="Builds the Scala detach plugin."/>
-
-  <target name="clean" depends="quick.clean">
-  </target>
-
-  <target name="all.clean" depends="quick.clean, pack.clean">
-  </target>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
-  <property environment="env"/>
-  <!-- Prevents system classpath from being used -->
-  <property name="build.sysclasspath" value="ignore"/>
-
-  <!-- Defines the repository layout -->
-  <property name="lib.dir" value="${basedir}/lib"/>
-  <property name="src.dir" value="${basedir}/src"/>
-  <property name="partest.dir" value="${basedir}/test"/>
-
-  <!-- Loads custom properties definitions -->
-  <property file="${basedir}/build.properties"/>
-
-  <!-- Sets location of build folders -->
-  <property name="build.dir" value="${basedir}/build"/>
-  <property name="build-quick.dir" value="${build.dir}/quick"/>
-  <property name="build-pack.dir" value="${build.dir}/pack"/>
-
-  <!-- if ANT_OPTS is already set by the environment, it will be unaltered,
-       but if it is unset it will take this default value. -->
-  <property name="env.ANT_OPTS" value="-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=128M -XX:+UseParallelGC" />
-
-  <property
-      name="scalacfork.jvmargs"
-      value="${env.ANT_OPTS}"/>
-
-  <property name="scalac.args.quick" value="-deprecation"/>
-  <property name="scalac.args.optimise" value=""/>
-
-  <!-- Setting-up Ant contrib tasks -->
-    <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
-
-<!-- ===========================================================================
-QUICK BUILD (QUICK)
-============================================================================ -->
-
-  <target name="quick.clean">
-    <delete includeemptydirs="yes" quiet="yes" failonerror="no">
-      <fileset dir="${build-quick.dir}/classes/detach-library"/>
-      <fileset dir="${build-quick.dir}/classes/detach-plugin"/>
-    </delete>
-  </target>
-
-  <target name="quick.done">
-    <stopwatch name="quick.done.timer"/>
-    <path id="quick.classpath">
-      <pathelement location="${build-quick.dir}/classes/library"/>
-      <pathelement location="${build-quick.dir}/classes/compiler"/>
-      <pathelement location="${lib.dir}/fjbg.jar"/>
-      <pathelement location="${lib.dir}/msil.jar"/>
-      <pathelement location="${lib.dir}/forkjoin.jar"/>
-      <pathelement location="${ant.home}/lib/ant.jar"/>
-    </path>
-    <taskdef
-      resource="scala/tools/ant/sabbus/antlib.xml"
-      classpathref="quick.classpath"
-    />
-    <mkdir dir="${build-quick.dir}/classes/detach-plugin"/>
-    <scalacfork
-      destdir="${build-quick.dir}/classes/detach-plugin"
-      compilerpathref="quick.classpath"
-      params="${scalac.args.quick}"
-      srcdir="${src.dir}/detach/plugin"
-      jvmargs="${scalacfork.jvmargs}">
-      <include name="**/*.scala"/>
-      <compilationpath>
-        <pathelement location="${build-quick.dir}/classes/library"/>
-        <pathelement location="${build-quick.dir}/classes/compiler"/>
-        <pathelement location="${build-quick.dir}/classes/detach-plugin"/>
-        <pathelement location="${lib.dir}/forkjoin.jar"/>
-      </compilationpath>
-    </scalacfork>
-    <copy
-       file="${src.dir}/detach/plugin/scalac-plugin.xml"
-       todir="${build-quick.dir}/classes/detach-plugin"
-    />
-    <mkdir dir="${build-quick.dir}/classes/detach-library"/>
-    <scalacfork
-      destdir="${build-quick.dir}/classes/detach-library"
-      compilerpathref="quick.classpath"
-      params="${scalac.args.quick}"
-      srcdir="${src.dir}/detach/library"
-      jvmargs="${scalacfork.jvmargs}">
-      <include name="**/*.scala"/>
-      <compilationpath>
-        <pathelement location="${build-quick.dir}/classes/library"/>
-        <pathelement location="${lib.dir}/forkjoin.jar"/>
-      </compilationpath>
-    </scalacfork>
-    <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
-    <stopwatch name="quick.done.timer" action="total"/>
-  </target>
-
-<!-- ===========================================================================
-PACKED QUICK BUILD (PACK)
-============================================================================ -->
-  
-  <target name="pack.start" depends="quick.done"/>
-  
-  <target name="pack.pre-lib" depends="pack.start">
-    <uptodate
-      property="pack.lib.available"
-      targetfile="${build-pack.dir}/lib/scala-detach.jar"
-      srcfile="${build-quick.dir}/plugins.complete"/>
-  </target>
-
-  <target name="pack.lib" depends="pack.pre-lib" unless="pack.lib.available">
-    <mkdir dir="${build-pack.dir}/misc/scala-devel/plugins"/>
-    <jar destfile="${build-pack.dir}/misc/scala-devel/plugins/detach.jar">
-      <fileset dir="${build-quick.dir}/classes/detach-plugin"/>
-    </jar>
-    <mkdir dir="${build-pack.dir}/lib"/>
-    <jar destfile="${build-pack.dir}/lib/scala-detach.jar">
-      <fileset dir="${build-quick.dir}/classes/detach-library">
-        <include name="scala/**"/>
-      </fileset>
-    </jar>
-  </target>
-
-  <target name="pack.done" depends="pack.lib">
-    <path id="pack.classpath">
-      <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-detach.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
-      <pathelement location="${ant.home}/lib/ant.jar"/>
-      <pathelement location="${lib.dir}/jline.jar"/>
-    </path>
-    <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
-    <taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
-  </target>
-
-  <target name="pack.clean">
-    <delete includeemptydirs="yes" quiet="yes" failonerror="no">
-      <fileset dir="${build-pack.dir}/lib" includes="scala-detach.jar"/>
-      <fileset dir="${build-pack.dir}/misc/scala-devel/plugins" includes="detach.jar"/>
-    </delete>
-  </target>
-
-<!-- ===========================================================================
-TEST SUITE
-============================================================================ -->
-
-  <target name="test.suite" depends="pack.done">
-    <property name="partest.srcdir" value="files" />
-    <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
-             timeout="2400000"
-             srcdir="${partest.srcdir}"
-             scalacopts="${scalac.args.optimise} -Xpluginsdir ${build-pack.dir}/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable">
-      <compilationpath>
-        <path refid="pack.classpath"/>
-        <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
-      </compilationpath>
-      <negtests  dir="${partest.dir}/${partest.srcdir}/detach-neg" includes="*.scala"/>
-      <runtests  dir="${partest.dir}/${partest.srcdir}/detach-run" includes="*.scala"/>
-    </partest>
-  </target>
-
-</project>
diff --git a/build.number b/build.number
index 7c027e7..51674b6 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
 #Tue Sep 11 19:21:09 CEST 2007
 version.major=2
-version.minor=10
-version.patch=4
+version.minor=11
+version.patch=0
 # This is the -N part of a version.  if it's 0, it's dropped from maven versions.
 version.bnum=0
 
diff --git a/build.number.maven b/build.number.maven
deleted file mode 100644
index eed9f38..0000000
--- a/build.number.maven
+++ /dev/null
@@ -1,3 +0,0 @@
-version.major=2
-version.minor=10
-version.patch=0
diff --git a/build.xml b/build.xml
old mode 100644
new mode 100755
index a54b033..fec1d94
--- a/build.xml
+++ b/build.xml
@@ -1,16 +1,17 @@
 <?xml version="1.0" encoding="UTF-8"?>
 
-<project name="sabbus" default="build" xmlns:artifact="urn:maven-artifact-ant">
+<project name="sabbus" default="build"
+  xmlns:artifact="urn:maven-artifact-ant"
+  xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
+  <include file="build-ant-macros.xml" as="macros"/>
+
   <description>
 SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
   </description>
 
 <!-- HINTS
-
  - for faster builds, have a build.properties in the same directory as build.xml that says:
       locker.skip=1
-      starr.use.released=1
-
 -->
 
 <!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
@@ -23,8 +24,24 @@ antArgs tend to be:
 scalacArgs examples:
   "-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
 
-targets exercised:
-  locker.done build-opt nightly test.suite test.continuations.suite test.scaladoc
+supported/exercised targets
+  to publish: nightly publish-opt-nodocs
+  to build: build build-opt locker.done
+  to run tests: test.suite test.scaladoc
+
+DO NOT RELY ON ANY OTHER TARGETS (ok, you're probably ok assuming the ones defined in the first 100 lines of this file)
+
+To build your own Scala distribution, do, e.g.:
+
+  ant publish-local-opt -Dmaven.version.suffix="-foo"
+  cd ~/git
+  hub clone scala/scala-dist
+  cd scala-dist
+  sbt 'set version := "2.11.0-foo"' 'set resolvers += Resolver.mavenLocal' universal:package-bin
+
+NOTE: `ant build` builds the essence of a Scala distribution under build/pack
+  (The only thing missing are the docs; see `pack.doc` and `docs.done`.)
+
 -->
 
 <!-- To use Zinc with the ant build:
@@ -48,46 +65,70 @@ TODO:
   <target name="test"      depends="test.done"      description="Runs test suite and bootstrapping test on Scala compiler and library."/>
   <target name="docs"      depends="docs.done"      description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
   <target name="docscomp"  depends="docs.comp"      description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
-  <target name="dist" depends="all.clean, all.done" description="Cleans all and builds and tests a new distribution."/>
-  <target name="partialdist" depends="dist.partial" description="Makes a new distribution without documentation, so just for testing."/>
-  <target name="fastdist"    depends="dist.done"    description="Makes a new distribution without testing it or removing partially build elements."/>
 
   <target name="build-opt"       description="Optimized version of build.">       <optimized name="build"/></target>
   <target name="test-opt"        description="Optimized version of test.">        <optimized name="test"/></target>
+  <target name="test-core-opt"   description="Optimized version of test.core.">   <optimized name="test.core"/></target>
+  <target name="test-stab-opt"   description="Optimized version of test.stability.">   <optimized name="test.stability"/></target>
+
+  <target name="all.done" depends="test.done, pack-maven.done"/>
+  <target name="nightly"><optimized name="all.done"/></target>
+  <target name="nightly.checkall"> <antcall target="all.done"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
+
+  <!-- The IDE build requires actors/swing/continuations, so need to publish them during PR validation until they are modules -->
+  <target name="publish-opt-nodocs" description="Publishes Scala (optimized) without generating docs/testing (library/reflect/compiler/actors/swing/continuations).">
+    <antcall target="publish">
+      <param name="docs.skip" value="1"/>
+      <param name="scalac.args.optimise" value="-optimise"/>
+    </antcall>
+  </target>
+  <target name="publish-core-opt-nodocs" description="Builds an untested, undocumented optimised core (library/reflect/compiler) and publishes to maven.">
+    <antcall target="publish-core">
+      <param name="docs.skip" value="1"/>
+      <param name="scalac.args.optimise" value="-optimise"/>
+    </antcall>
+  </target>
+  <target name="publish-core-local-nodocs" description="Builds an untested, undocumented core (library/reflect/compiler) and locally publishes to maven">
+    <antcall target="publish-core-local">
+      <param name="docs.skip" value="1"/>
+    </antcall>
+  </target>
+
+  <!-- prefer the sbt names, but the dotted names are used in jenkins;
+       rename there first before dropping the dotted ones -->
+  <target name="publish-local"  depends="publish.local"/>
+  <target name="publish-local-opt"><optimized name="publish-local"/></target>
+  <target name="publish-signed" depends="publish.signed"/>
+
+
+
+
+
+
+
+
+
+<!-- DEPRECATED -->
+  <target name="dist" depends="all.clean, all.done" description="Cleans all and builds and tests a new distribution."/>
+  <target name="partialdist" depends="pack.done"    description="Makes a new distribution without testing it or removing partially build elements."/>
+  <target name="fastdist"    depends="pack.done, pack.doc" description="Makes a new distribution without testing it or removing partially build elements."/>
   <target name="dist-opt"        description="Optimized version of dist.">        <optimized name="dist"/></target>
   <target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
   <target name="fastdist-opt"    description="Optimized version of fastdist.">    <optimized name="fastdist"/></target>
 
   <!-- packaging -->
-  <target name="distpack" depends="dist.done, docs.done">
-    <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
-
-  <target name="distpack-maven" depends="dist.done, docs.done">
-    <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/></target>
-
+  <target name="distpack" depends="pack-maven.done"/>
+  <target name="distpack-maven" depends="pack-maven.done"/>
   <target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
   <target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
+  <target name="distclean"   depends="dist.clean"   description="Removes all distributions. Binaries and documentation are untouched."/>
 
-  <target name="all.done" depends="dist.done, test.done"/>
-
-  <!-- must use depends for all.done, not antcall: need the properties defined in there (dist.dir) -->
-  <target name="nightly-nopt" depends="all.done, docs.done">
-    <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
-  <target name="nightly"><optimized name="nightly-nopt"/></target>
-
-  <target name="nightly.checkall">
-    <antcall target="nightly-nopt"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
+  <target name="nightly-nopt" depends="all.done"/>
 
   <target name="clean"       depends="quick.clean"  description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
   <target name="docsclean"   depends="docs.clean"   description="Removes generated documentation. Distributions are untouched."/>
-  <target name="distclean"   depends="dist.clean"   description="Removes all distributions. Binaries and documentation are untouched."/>
 
-  <macrodef name="optimized" >
-    <attribute name="name"/>
-    <sequential>
-      <antcall target="@{name}"><param name="scalac.args.optimise" value="-optimise"/></antcall>
-    </sequential>
-  </macrodef>
+
 
 <!-- ===========================================================================
                                   PROPERTIES
@@ -98,7 +139,7 @@ TODO:
   <property name="build.sysclasspath"     value="ignore"/>
 
   <!-- Defines the repository layout -->
-  <property name="docs.dir"               value="${basedir}/docs"/>
+  <property name="doc.dir"                value="${basedir}/doc"/>
   <property name="lib.dir"                value="${basedir}/lib"/>
   <property name="src.dir"                value="${basedir}/src"/>
   <property name="partest.dir"            value="${basedir}/test"/>
@@ -114,28 +155,22 @@ TODO:
   <!-- Generating version number -->
   <property file="${basedir}/build.number"/>
 
-  <!-- read starr.version -->
-  <property file="${basedir}/starr.number"/>
+  <!-- read versions.properties -->
+  <property file="${basedir}/versions.properties"/>
 
   <!-- Sets location of pre-compiled libraries -->
-  <property name="library.starr.jar"       value="${lib.dir}/scala-library.jar"/>
-  <property name="reflect.starr.jar"       value="${lib.dir}/scala-reflect.jar"/>
-  <property name="compiler.starr.jar"      value="${lib.dir}/scala-compiler.jar"/>
-  <property name="msil.starr.jar"          value="${lib.dir}/msil.jar"/>
-  <property name="jline.jar"               value="${lib.dir}/jline.jar"/>
   <property name="ant.jar"                 value="${ant.home}/lib/ant.jar"/>
-  <property name="scalacheck.jar"          value="${lib.dir}/scalacheck.jar"/>
 
   <!-- Sets location of build folders -->
   <property name="build.dir"               value="${basedir}/build"/>
+  <property name="build-deps.dir"          value="${build.dir}/deps"/>
   <property name="build-libs.dir"          value="${build.dir}/libs"/>
   <property name="build-asm.dir"           value="${build.dir}/asm"/>
-  <property name="build-fjbg.dir"          value="${build-libs.dir}"/>
   <property name="build-forkjoin.dir"      value="${build-libs.dir}"/>
   <property name="build-locker.dir"        value="${build.dir}/locker"/>
-  <property name="build-palo.dir"          value="${build.dir}/palo"/>
   <property name="build-quick.dir"         value="${build.dir}/quick"/>
   <property name="build-pack.dir"          value="${build.dir}/pack"/>
+  <property name="build-manual.dir"        value="${build.dir}/manual"/>
   <property name="build-osgi.dir"          value="${build.dir}/osgi"/>
   <property name="build-junit.dir"         value="${build.dir}/junit"/>
   <property name="build-strap.dir"         value="${build.dir}/strap"/>
@@ -151,7 +186,8 @@ TODO:
   <property name="dists.dir"               value="${basedir}/dists"/>
 
   <property name="copyright.string"        value="Copyright 2002-2013, LAMP/EPFL"/>
-  <property name="partest.version.number"  value="0.9.2"/>
+
+  <property name="jline.version"           value="2.11"/>
 
   <!-- These are NOT the flags used to run SuperSabbus, but the ones written
        into the script runners created with scala.tools.ant.ScalaTool -->
@@ -191,10 +227,35 @@ TODO:
     </touch>
   </target>
 
-  <target name="init" depends="boot">
+  <target name="init.git" depends="boot">
+    <!-- replacestarr needs git.commit.sha, but doesn't want to run the init target (it computes maven.version.number) -->
+    <exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
+    <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.sha" failifexecutionfails="false">
+      <arg value="/c"/>
+      <arg value="tools\get-scala-commit-sha.bat"/>
+      <arg value="-p"/>
+    </exec>
+    <exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
+    <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.date" failifexecutionfails="false">
+      <arg value="/c"/>
+      <arg value="tools\get-scala-commit-date.bat"/>
+      <arg value="-p"/>
+    </exec>
+
+    <!-- some default in case something went wrong getting the revision -->
+    <property name="git.commit.sha"          value="unknown"/>
+    <property name="git.commit.date"         value="unknown"/>
+  </target>
+
+  <target name="init" depends="init.git">
     <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
     <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib-ant.dir}/ant-contrib.jar"/>
 
+    <property name="scala.ant.min.version" value="1.8.2"/>
+    <if><not><antversion atleast="${scala.ant.min.version}"/></not>
+      <then><fail message="Ant version ${scala.ant.min.version} is required. You are running ${ant.version}"/></then>
+    </if>
+
     <!-- Add our maven ant tasks -->
     <path id="maven-ant-tasks.classpath" path="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" />
     <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
@@ -217,6 +278,7 @@ TODO:
       <artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
         <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
       </artifact:dependencies>
+      <copy-deps project="junit"/>
 
       <!-- Pax runner -->
       <property name="pax.exam.version" value="2.6.0"/>
@@ -235,38 +297,87 @@ TODO:
       </artifact:dependencies>
 
 
-      <artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
-        <dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
+      <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+      <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
+
+      <!-- prepare, for each of the names below, the property "@{name}.cross", set to the
+           necessary cross suffix (usually something like "_2.11.0-M6". -->
+      <prepareCross name="scala-xml" />
+      <prepareCross name="scala-parser-combinators" />
+      <prepareCross name="scala-continuations-plugin" />
+      <prepareCross name="scala-continuations-library"/>
+      <prepareCross name="scala-swing"/>
+      <prepareCross name="partest"/>
+      <prepareCross name="scalacheck"/>
+
+      <!-- TODO: delay until absolutely necessary to allow minimal build, also move out partest dependency from scaladoc -->
+      <artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
+        <!-- uncomment the following if you're deploying your own partest locally -->
+        <!-- <localRepository path="${user.home}/.m2/repository"/> -->
+        <!-- so we don't have to wait for artifacts to synch to maven central
+            (we don't distribute partest with Scala, so the risk of sonatype and maven being out of synch is irrelevant):
+          -->
+        <artifact:remoteRepository refid="sonatype-release"/>
+        <artifact:remoteRepository refid="extra-repo"/>
+        <dependency groupId="org.scala-lang.modules" artifactId="scala-partest${partest.cross}" version="${partest.version.number}" />
+      </artifact:dependencies>
+      <copy-deps project="partest"/>
+
+      <artifact:dependencies pathId="scalacheck.classpath" filesetId="scalacheck.fileset" versionsId="scalacheck.versions">
+        <artifact:remoteRepository refid="extra-repo"/>
+        <dependency groupId="org.scalacheck"         artifactId="scalacheck${scalacheck.cross}"    version="${scalacheck.version.number}" />
+      </artifact:dependencies>
+
+      <artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.fileset" versionsId="repl.deps.versions">
+        <dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
+      </artifact:dependencies>
+      <copy-deps project="repl"/>
+
+      <!-- used by the test.osgi target to create osgi bundles for the xml, parser-combinator jars
+           must specify sourcesFilesetId, javadocFilesetId to download these types of artifacts -->
+      <artifact:dependencies pathId="external-modules.deps.classpath" sourcesFilesetId="external-modules.sources.fileset" javadocFilesetId="external-modules.javadoc.fileset">
+        <artifact:remoteRepository refid="extra-repo"/>
+        <dependency groupId="org.scala-lang.modules" artifactId="scala-xml${scala-xml.cross}" version="${scala-xml.version.number}"/>
+        <dependency groupId="org.scala-lang.modules" artifactId="scala-parser-combinators${scala-parser-combinators.cross}" version="${scala-parser-combinators.version.number}"/>
+        <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-plugin${scala-continuations-plugin.cross}"  version="${scala-continuations-plugin.version.number}"/>
+        <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-library${scala-continuations-library.cross}" version="${scala-continuations-library.version.number}"/>
+        <dependency groupId="org.scala-lang.modules" artifactId="scala-swing${scala-swing.cross}" version="${scala-swing.version.number}"/>
       </artifact:dependencies>
 
+      <!-- External modules, excluding the core -->
+      <path id="external-modules-nocore">
+        <restrict>
+          <path refid="external-modules.deps.classpath"/>
+          <rsel:not><rsel:or>
+            <rsel:name name="scala-library*.jar"/>
+            <rsel:name name="scala-reflect*.jar"/>
+            <rsel:name name="scala-compiler*.jar"/>
+          </rsel:or></rsel:not>
+        </restrict>
+      </path>
+      <copy-deps refid="external-modules-nocore" project="scaladoc"/>
+
+      <propertyForCrossedArtifact name="scala-parser-combinators" jar="org.scala-lang.modules:scala-parser-combinators"/>
+      <propertyForCrossedArtifact name="scala-xml"                jar="org.scala-lang.modules:scala-xml"/>
+      <propertyForCrossedArtifact name="scala-continuations-plugin"  jar="org.scala-lang.plugins:scala-continuations-plugin"/>
+      <propertyForCrossedArtifact name="scala-continuations-library" jar="org.scala-lang.plugins:scala-continuations-library"/>
+      <propertyForCrossedArtifact name="scala-swing"                 jar="org.scala-lang.modules:scala-swing"/>
+
       <!-- BND support -->
       <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
 
-      <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
-
-      <!-- Download STARR via maven if `starr.use.released` is set,
-           and `starr.version` is specified (see the starr.number properties file).
-           Want to slow down STARR changes, using only released versions. -->
-      <if><isset property="starr.use.released"/><then>
-        <echo message="Using Scala ${starr.version} for STARR."/>
-        <artifact:dependencies pathId="starr.core.path">
-          <artifact:remoteRepository refid="extra-repo"/>
-          <dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
-          <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
-          <dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
-        </artifact:dependencies></then>
-      <else>
-        <path id="starr.core.path">
-          <pathelement location="${library.starr.jar}"/>
-          <pathelement location="${reflect.starr.jar}"/>
-          <pathelement location="${compiler.starr.jar}"/>
-          <pathelement location="${msil.starr.jar}"/>
-        </path></else>
-      </if>
+      <echo message="Using Scala ${starr.version} for STARR."/>
+      <artifact:dependencies pathId="starr.compiler.path">
+        <artifact:remoteRepository refid="extra-repo"/>
+        <dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
+        <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
+        <dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
+      </artifact:dependencies>
 
       <property name="maven-deps-done"     value="yep!"/>
     </then></if>
 
+
     <!-- NOTE: ant properties are write-once: second writes are silently discarded; the logic below relies on this -->
 
     <!-- Compute defaults (i.e., if not specified on command-line) for OSGi/maven version suffixes.
@@ -310,24 +421,6 @@ TODO:
       <property name="osgi.version.suffix"     value="${maven.version.suffix}"/>
       <property name="version.suffix"          value="${maven.version.suffix}"/></else></if>
 
-
-    <exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
-    <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.sha" failifexecutionfails="false">
-      <arg value="/c"/>
-      <arg value="tools\get-scala-commit-sha.bat"/>
-      <arg value="-p"/>
-    </exec>
-    <exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
-    <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.date" failifexecutionfails="false">
-      <arg value="/c"/>
-      <arg value="tools\get-scala-commit-date.bat"/>
-      <arg value="-p"/>
-    </exec>
-
-    <!-- some default in case something went wrong getting the revision -->
-    <property name="git.commit.sha"          value="unknown"/>
-    <property name="git.commit.date"         value="unknown"/>
-
     <!-- We use the git describe to determine the OSGi modifier for our build. -->
     <property name="maven.version.number"
       value="${version.major}.${version.minor}.${version.patch}${maven.version.suffix}"/>
@@ -340,14 +433,21 @@ TODO:
       <property name="version.number"        value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
     </else></if>
 
+    <!-- some default in case something went wrong getting the revision -->
+    <property name="version.number"       value="-unknown-"/>
+
     <condition property="has.java6">
         <equals arg1="${ant.java.version}" arg2="1.6"/>
     </condition>
     <condition property="has.java7">
         <equals arg1="${ant.java.version}" arg2="1.7"/>
     </condition>
+    <condition property="has.java8">
+        <equals arg1="${ant.java.version}" arg2="1.8"/>
+    </condition>
     <condition property="has.unsupported.jdk">
        <not><or>
+         <isset property="has.java8" />
          <isset property="has.java7" />
          <isset property="has.java6" />
        </or></not>
@@ -355,15 +455,8 @@ TODO:
 
     <fail if="has.unsupported.jdk" message="JDK ${ant.java.version} is not supported by this build!"/>
 
-    <if><isset property="has.java7"/><then>
-      <echo level="warning"> You are using JDK7 for this build.
-        While this will be able to build most of Scala, it will not build the Swing project.
-        You will be unable to create a distribution.
-      </echo>
-    </then></if>
-
     <!-- Allow this to be overridden simply -->
-    <property name="sbt.latest.version"    value="0.12.2"/>
+    <property name="sbt.latest.version"    value="0.12.4"/>
 
     <property name="sbt.src.dir"           value="${build-sbt.dir}/${sbt.latest.version}/src"/>
     <property name="sbt.lib.dir"           value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
@@ -378,7 +471,7 @@ TODO:
     <property name="scalac.args"           value=""/>
     <property name="javac.args"            value=""/>
 
-    <property name="scalac.args.always"    value="" />
+    <property name="scalac.args.always"    value="-feature" />
     <property name="scalac.args.optimise"  value=""/> <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
     <property name="scalac.args.all"       value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
     <property name="scalac.args.locker"    value="${scalac.args.all}"/>
@@ -391,10 +484,6 @@ TODO:
       <format property="short" pattern="yyyyMMddHHmmss"/>
     </tstamp>
 
-    <!-- some default in case something went wrong getting the revision -->
-    <property name="version.number"       value="-unknown-"/>
-    <property name="init.avail"           value="yes"/>
-
     <!-- Local libs (developer use.) -->
     <mkdir dir="${lib-extra.dir}"/>
 
@@ -455,27 +544,146 @@ TODO:
     <echo message="canonical suffix: ${version.suffix}" /> -->
     <fail unless="version.suffixes.consistent" message="Version suffixes inconsistent!"/>
 
+
+    <!-- used during releases to bump versions in versions.properties -->
+    <if><isset property="update.versions"/><then>
+      <echo message="Updating `versions.properties`:"/>
+      <echo message="starr.version                              = ${starr.version}"/>
+      <echo message="scala.binary.version                       = ${scala.binary.version}"/>
+      <echo message="scala.full.version                         = ${scala.full.version}"/>
+      <echo message="scala-xml.version.number                   = ${scala-xml.version.number}"/>
+      <echo message="scala-parser-combinators.version.number    = ${scala-parser-combinators.version.number}"/>
+      <echo message="scala-continuations-plugin.version.number  = ${scala-continuations-plugin.version.number}"/>
+      <echo message="scala-continuations-library.version.number = ${scala-continuations-library.version.number}"/>
+      <echo message="scala-swing.version.number                 = ${scala-swing.version.number}"/>
+      <echo message="akka-actor.version.number                  = ${akka-actor.version.number}"/>
+      <echo message="actors-migration.version.number            = ${actors-migration.version.number}"/>
+      <echo message="partest.version.number                     = ${partest.version.number}"/>
+      <echo message="scalacheck.version.number                  = ${scalacheck.version.number}"/>
+
+      <propertyfile file="versions.properties">
+        <entry key="starr.version"                              value="${starr.version}"/>
+        <entry key="scala.binary.version"                       value="${scala.binary.version}"/>
+        <entry key="scala.full.version"                         value="${scala.full.version}"/>
+        <entry key="scala-xml.version.number"                   value="${scala-xml.version.number}"/>
+        <entry key="scala-parser-combinators.version.number"    value="${scala-parser-combinators.version.number}"/>
+        <entry key="scala-continuations-plugin.version.number"  value="${scala-continuations-plugin.version.number}"/>
+        <entry key="scala-continuations-library.version.number" value="${scala-continuations-library.version.number}"/>
+        <entry key="scala-swing.version.number"                 value="${scala-swing.version.number}"/>
+        <entry key="akka-actor.version.number"                  value="${akka-actor.version.number}"/>
+        <entry key="actors-migration.version.number"            value="${actors-migration.version.number}"/>
+        <entry key="partest.version.number"                     value="${partest.version.number}"/>
+        <entry key="scalacheck.version.number"                  value="${scalacheck.version.number}"/>
+      </propertyfile>
+    </then></if>
+
     <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
-    <path id="fjbg.classpath"     path="${build-libs.dir}/classes/fjbg"/>
     <path id="asm.classpath"      path="${build-asm.dir}/classes"/>
     <property name="forkjoin-classes" refid="forkjoin.classpath"/>
-    <property name="fjbg-classes" refid="fjbg.classpath"/>
     <property name="asm-classes" refid="asm.classpath"/>
 
-    <!-- Compilers to use for the various stages.
+    <!-- the following properties fully define staged-docs, staged-pack, make-bundle, copy-bundle and mvn-package for each of the projects -->
+    <property name="library.description"           value="Scala Standard Library"/>
+    <property name="library.docroot"               value="rootdoc.txt"/>
+    <property name="library.skipPackages"          value="scala.concurrent.impl"/>
+
+    <property name="reflect.description"           value="Scala Reflection Library"/>
+    <property name="reflect.skipPackages"          value="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io"/>
+
+    <property name="compiler.description"          value="Scala Compiler"/>
+    <property name="compiler.docroot"              value="rootdoc.txt"/>
+
+    <!-- these are not used used, preparation for the 'TODO modularize compiler' task -->
+    <property name="interactive.description"       value="Scala Interactive Compiler"   />
+    <property name="interactive.package"           value="modules." />
+    <property name="interactive.name"              value="scala-compiler-interactive"/>
+    <property name="interactive.namesuffix"        value="_${scala.binary.version}"/>
+    <property name="interactive.version"           value="${scala-compiler-interactive.version.number}"/>
+    <property name="interactive.targetjar"         value="scala-compiler-interactive_${scala.binary.version}-${scala-compiler-interactive.version.number}.jar"/>
+
+    <property name="scaladoc.description"          value="Scala Documentation Generator"/>
+    <property name="scaladoc.package"              value="modules." />
+    <property name="scaladoc.name"                 value="scala-compiler-doc"         />
+    <property name="scaladoc.namesuffix"           value="_${scala.binary.version}"/>
+    <property name="scaladoc.version"              value="${scala-compiler-doc.version.number}"/>
+    <property name="scaladoc.targetjar"            value="scala-compiler-doc_${scala.binary.version}-${scala-compiler-doc.version.number}.jar"/>
+
+    <property name="actors.description"            value="Scala Actors Library"/>
+
+    <property name="swing.description"             value="Scala Swing Library"/>
+    <property name="swing.package"                 value="modules."/>
+    <property name="swing.jar"                     value="${scala-swing}"/>
+    <property name="swing.src"                     value="false"/>
+    <property name="swing.srcjar"                  value="${scala-swing-sources}"/>
+
+    <property name="continuations-plugin.description" value="Scala Delimited Continuations Compiler Plugin"/>
+    <property name="continuations-plugin.package"     value="plugins." />
+    <property name="continuations-plugin.jar"         value="${scala-continuations-plugin}"/>
+    <property name="continuations-plugin.src"         value="false"/>
+    <property name="continuations-plugin.srcjar"      value="${scala-continuations-plugin-sources}"/>
+
+    <property name="continuations-library.description" value="Scala Delimited Continuations Library"/>
+    <property name="continuations-library.package"     value="plugins." />
+    <property name="continuations-library.jar"         value="${scala-continuations-library}"/>
+    <property name="continuations-library.src"         value="false"/>
+    <property name="continuations-library.srcjar"      value="${scala-continuations-library-sources}"/>
+
+    <property name="parser-combinators.description" value="Scala Parser Combinators Library"/>
+    <property name="parser-combinators.package"    value="modules."/>
+    <property name="parser-combinators.jar"        value="${scala-parser-combinators}"/>
+    <property name="parser-combinators.src"        value="false"/>
+    <property name="parser-combinators.srcjar"     value="${scala-parser-combinators-sources}"/>
+
+    <property name="xml.description"               value="Scala XML Library"/>
+    <property name="xml.package"                   value="modules."/>
+    <property name="xml.jar"                       value="${scala-xml}"/>
+    <property name="xml.src"                       value="false"/>
+    <property name="xml.srcjar"                    value="${scala-xml-sources}"/>
+
+    <property name="scalap.description"            value="Scala Bytecode Parser"/>
+    <property name="scalap.targetjar"              value="scalap.jar"/>
+
+    <property name="partest.description"           value="Scala Compiler Testing Tool"/>
+    <property name="partest-extras.description"    value="Scala Compiler Testing Tool (compiler-specific extras)"/>
+    <property name="partest-javaagent.description" value="Scala Compiler Testing Tool (compiler-specific java agent)"/>
+
+    <!-- projects without project-specific options: asm, forkjoin, manual, bin, repl -->
+    <for list="actors,compiler,interactive,scaladoc,library,parser-combinators,partest,partest-extras,partest-javaagent,reflect,scalap,swing,xml,continuations-plugin,continuations-library" param="project">
+      <sequential>
+        <!-- description is mandatory -->
+        <init-project-prop project="@{project}" name="package"     default=""/> <!-- used by mvn-package, copy-bundle, make-bundle -->
+        <init-project-prop project="@{project}" name="dir"         default=""/> <!-- used by mvn-package -->
+        <init-project-prop project="@{project}" name="name"        default="scala-@{project}"/> <!-- used for defaults in this block and by mvn-package, copy-bundle, make-bundle -->
+        <init-project-prop project="@{project}" name="namesuffix"  default=""/>
+        <init-project-prop project="@{project}" name="version"     default="${osgi.version.number}"/>
+        <init-project-prop project="@{project}" name="targetdir"   default="lib"/>
+        <init-project-prop project="@{project}" name="targetjar"   default="${@{project}.name}.jar"/>
+        <init-project-prop project="@{project}" name="jar"         default="${build-pack.dir}/${@{project}.targetdir}/${@{project}.targetjar}" />
+        <init-project-prop project="@{project}" name="docroot"     default="NOT SET"/>
+        <init-project-prop project="@{project}" name="skipPackages" default=""/>
+        <init-project-prop project="@{project}" name="srcdir"      default="@{project}"/>
+        <init-project-prop project="@{project}" name="src"         default="true"/>
+        <init-project-prop project="@{project}" name="srcjar"      default="${build-osgi.dir}/${@{project}.name}-src.jar"/>
+      </sequential>
+    </for>
 
+
+    <!-- Compilers to use for the various stages.
       There must be a variable of the shape @{stage}.compiler.path  for all @{stage} in starr, locker, quick, strap.
     -->
-    <path id="starr.compiler.path">
-      <path refid="starr.core.path"/>
-      <pathelement location="${lib.dir}/forkjoin.jar"/>
-      <path refid="aux.libs"/>
-    </path>
+
+    <!-- starr is resolved (to starr.compiler.path) in the block protected by maven-deps-done
+         the maven task must not be executed twice, or you get a java.lang.ClassCastException:
+         org.apache.maven.artifact.ant.RemoteRepository cannot be cast to org.apache.maven.artifact.ant.Repository
+    -->
 
     <!-- To skip locker, use -Dlocker.skip=1 -->
     <if><isset property="locker.skip"/><then>
       <echo message="Using STARR to build the quick stage (skipping locker)."/>
       <path id="locker.compiler.path" refid="starr.compiler.path"/>
+       <!-- this is cheating (we don't know the classpath used to build starr)
+            but should be close enough: -->
+      <path id="locker.compiler.build.path" refid="starr.compiler.path"/>
       <property name="locker.locked" value="locker skipped"/></then>
     <else>
       <path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
@@ -489,7 +697,9 @@ TODO:
       There must be a variable of the shape @{stage}.@{project}.build.path
       for all @{stage} in locker, quick, strap
       and all @{project} in library, reflect, compiler
-        when stage is quick, @{project} also includes: actors, swing, plugins, scalacheck, partest, scalap
+        when stage is quick, @{project} also includes: actors, repl, scalap
+
+      NOTE: interactive, scaladoc, are only used upto quick; they are still packed into the compiler jar
     -->
 
     <!-- LOCKER -->
@@ -499,31 +709,19 @@ TODO:
       <path refid="aux.libs"/>
     </path>
 
-    <path id="locker.actors.build.path">
-      <path refid="locker.library.build.path"/>
-      <pathelement location="${build-locker.dir}/classes/actors"/>
-    </path>
-
-    <path id="locker.msil.build.path">
-      <path refid="locker.compiler.build.path"/>
-      <pathelement location="${build-locker.dir}/classes/msil"/>
-    </path>
-
     <path id="locker.reflect.build.path">
       <path refid="locker.library.build.path"/>
       <pathelement location="${build-locker.dir}/classes/reflect"/>
     </path>
 
+    <if><not><isset property="locker.skip"/></not><then>
     <path id="locker.compiler.build.path">
       <path refid="locker.reflect.build.path"/>
       <pathelement location="${build-locker.dir}/classes/compiler"/>
-      <pathelement location="${build-locker.dir}/classes/msil"/>
       <path refid="asm.classpath"/>
-      <path refid="fjbg.classpath"/>
-      <pathelement location="${jline.jar}"/>
     </path>
-
-    <path id="locker.msil.build.path" refid="locker.compiler.build.path"/>
+    </then></if>
+    <!-- else, locker.compiler.build.path is set above -->
 
     <!-- QUICK -->
     <path id="quick.library.build.path">
@@ -542,90 +740,81 @@ TODO:
       <pathelement location="${build-quick.dir}/classes/reflect"/>
     </path>
 
-    <path id="quick.msil.build.path">
-      <path refid="quick.compiler.build.path"/>
-      <pathelement location="${build-quick.dir}/classes/msil"/>
-    </path>
-
     <path id="quick.compiler.build.path">
       <path refid="quick.reflect.build.path"/>
       <pathelement location="${build-quick.dir}/classes/compiler"/>
-      <pathelement location="${build-quick.dir}/classes/msil"/>
       <path refid="asm.classpath"/>
-      <path refid="fjbg.classpath"/>
-      <pathelement location="${jline.jar}"/>
     </path>
 
-    <path id="quick.swing.build.path">
-      <path refid="quick.library.build.path"/>
-      <path refid="quick.actors.build.path"/>
-      <pathelement location="${build-quick.dir}/classes/swing"/>
+    <path id="quick.repl.build.path">
+      <path refid="quick.compiler.build.path"/>
+      <pathelement location="${build-quick.dir}/classes/repl"/>
+      <path refid="repl.deps.classpath"/>
     </path>
 
-    <path id="quick.plugins.build.path">
+    <path id="quick.scalap.build.path">
       <path refid="quick.compiler.build.path"/>
-      <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
+      <pathelement location="${build-quick.dir}/classes/scalap"/>
     </path>
 
-    <path id="quick.scalacheck.build.path">
-      <pathelement location="${build-quick.dir}/classes/library"/>
-      <pathelement location="${build-quick.dir}/classes/actors"/>
-      <pathelement location="${build-quick.dir}/classes/scalacheck"/>
+    <path id="quick.partest-extras.build.path">
+      <path refid="asm.classpath"/>
+      <path refid="partest.classpath"/>
+      <path refid="quick.compiler.build.path"/>
+      <pathelement location="${build-quick.dir}/classes/repl"/>
+      <!-- for the java dependency: Profiler.java -->
+      <pathelement location="${build-quick.dir}/classes/partest-extras"/>
     </path>
 
-    <path id="quick.scalap.build.path">
+    <path id="quick.partest-javaagent.build.path">
+      <path refid="asm.classpath"/>
+    </path>
+
+    <path id="quick.scaladoc.build.path">
       <path refid="quick.compiler.build.path"/>
-      <pathelement location="${build-quick.dir}/classes/scalap"/>
-      <pathelement location="${build-quick.dir}/classes/partest"/>
+      <path refid="partest.classpath"/>
+      <path refid="external-modules-nocore"/>
+      <pathelement location="${build-quick.dir}/classes/scaladoc"/>
     </path>
 
-    <path id="quick.partest.build.path">
-      <path refid="quick.scalap.build.path"/>
-      <path refid="partest.extras.classpath"/>
-      <pathelement location="${scalacheck.jar}"/>
+    <path id="quick.interactive.build.path">
+      <path refid="quick.compiler.build.path"/>
+      <pathelement location="${build-quick.dir}/classes/interactive"/>
     </path>
 
     <path id="quick.bin.tool.path">
-      <pathelement location="${build-quick.dir}/classes/library"/>
-      <pathelement location="${build-quick.dir}/classes/actors"/>
-      <pathelement location="${build-quick.dir}/classes/reflect"/>
-      <pathelement location="${build-quick.dir}/classes/compiler"/>
-      <pathelement location="${build-quick.dir}/classes/msil"/>
+      <path refid="quick.repl.build.path"/>
+      <path refid="quick.actors.build.path"/>
       <pathelement location="${build-quick.dir}/classes/scalap"/>
-      <pathelement location="${build-quick.dir}/classes/continuations-library"/>
-      <pathelement location="${jline.jar}"/>
-      <path refid="asm.classpath"/>
-      <path refid="forkjoin.classpath"/>
-      <path refid="aux.libs"/>
-     </path>
+      <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+      <path refid="external-modules-nocore"/>
+    </path>
 
     <!-- PACK -->
-    <!-- also used for docs.* targets TODO: use separate paths for those -->
     <path id="pack.compiler.path">
-      <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+      <pathelement location="${library.jar}"/>
+      <pathelement location="${reflect.jar}"/>
+      <pathelement location="${compiler.jar}"/>
       <pathelement location="${ant.jar}"/>
-      <pathelement location="${jline.jar}"/>
-      <path refid="partest.extras.classpath"/>
+      <path refid="forkjoin.classpath"/>
       <path refid="aux.libs"/>
     </path>
 
     <path id="pack.bin.tool.path">
-      <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
-      <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
-      <pathelement location="${build-pack.dir}/lib/jline.jar"/>
+      <pathelement location="${library.jar}"/>
+      <pathelement location="${xml.jar}"/>
+      <pathelement location="${parser-combinators.jar}"/>
+      <pathelement location="${actors.jar}"/>
+      <pathelement location="${reflect.jar}"/>
+      <pathelement location="${compiler.jar}"/>
+      <!-- TODO modularize compiler: <pathelement location="${scaladoc.jar}"/> -->
+      <pathelement location="${scalap.jar}"/>
+      <path refid="repl.deps.classpath"/>
       <path refid="aux.libs"/>
     </path>
 
     <path id="pack.library.files">
       <fileset dir="${build-quick.dir}/classes/library"/>
-      <fileset dir="${build-quick.dir}/classes/continuations-library"/>
       <fileset dir="${forkjoin-classes}"/>
     </path>
 
@@ -635,29 +824,25 @@ TODO:
 
     <path id="pack.compiler.files">
       <fileset dir="${build-quick.dir}/classes/compiler"/>
-      <fileset dir="${build-quick.dir}/classes/msil"/>
+
+      <!-- TODO modularize compiler. Remove the other class dirs as soon as they become modules -->
+      <fileset dir="${build-quick.dir}/classes/scaladoc"/>
+      <fileset dir="${build-quick.dir}/classes/interactive"/>
+      <fileset dir="${build-quick.dir}/classes/repl"/>
       <fileset dir="${asm-classes}"/>
-      <fileset dir="${fjbg-classes}"/>
     </path>
 
-    <path id="pack.swing.files">      <fileset dir="${build-quick.dir}/classes/swing"/> </path>
+    <!-- TODO modularize compiler.
+    <path id="pack.scaladoc.files">   <fileset dir="${build-quick.dir}/classes/scaladoc"/> </path>
+    <path id="pack.interactive.files"><fileset dir="${build-quick.dir}/classes/interactive"/> </path>
+    -->
+
     <path id="pack.reflect.files">    <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
-    <path id="pack.plugins.files">    <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
-    <path id="pack.scalacheck.files"> <fileset dir="${build-quick.dir}/classes/scalacheck"/> </path>
     <path id="pack.scalap.files">     <fileset dir="${build-quick.dir}/classes/scalap"/>
                                       <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
 
-    <path id="pack.partest.files">
-      <fileset dir="${build-quick.dir}/classes/partest">
-        <exclude name="scala/tools/partest/javaagent/**"/>
-      </fileset>
-    </path>
-
-    <path id="pack.partest-javaagent.files">
-      <fileset dir="${build-quick.dir}/classes/partest">
-        <include name="scala/tools/partest/javaagent/**"/>
-      </fileset>
-    </path>
+    <path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
+    <path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
 
     <!-- STRAP -->
     <path id="strap.library.build.path">
@@ -666,11 +851,6 @@ TODO:
       <path refid="aux.libs"/>
     </path>
 
-    <path id="strap.msil.build.path">
-      <path refid="strap.compiler.build.path"/>
-      <pathelement location="${build-strap.dir}/classes/msil"/>
-    </path>
-
     <path id="strap.reflect.build.path">
       <path refid="strap.library.build.path"/>
       <pathelement location="${build-strap.dir}/classes/reflect"/>
@@ -679,37 +859,115 @@ TODO:
     <path id="strap.compiler.build.path">
       <path refid="strap.reflect.build.path"/>
       <pathelement location="${build-strap.dir}/classes/compiler"/>
-      <pathelement location="${build-strap.dir}/classes/msil"/>
       <path refid="asm.classpath"/>
-      <path refid="fjbg.classpath"/>
-      <pathelement location="${jline.jar}"/>
+    </path>
+
+    <!-- DOCS -->
+    <path id="docs.library.build.path">               <path refid="quick.library.build.path"/>  </path>
+    <path id="docs.reflect.build.path">               <path refid="quick.reflect.build.path"/>  </path>
+    <path id="docs.compiler.build.path">              <path refid="quick.compiler.build.path"/> </path>
+    <path id="docs.scaladoc.build.path">              <path refid="quick.scaladoc.build.path"/> </path>
+    <path id="docs.interactive.build.path">           <path refid="quick.interactive.build.path"/> </path>
+    <path id="docs.scalap.build.path">                <path refid="quick.scalap.build.path"/>   </path>
+    <path id="docs.actors.build.path">                <path refid="quick.actors.build.path"/>   </path>
+
+    <!-- run-time classpath for scaladoc TODO: resolve through maven -->
+    <path id="scaladoc.classpath">
+      <path refid="external-modules-nocore"/>
+      <pathelement location="${library.jar}"/>
+      <pathelement location="${reflect.jar}"/>
+      <pathelement location="${compiler.jar}"/>
+
+      <!-- TODO modularize compiler
+      <pathelement location="${interactive.jar}"/>
+      <pathelement location="${scaladoc.jar}"/>
+      -->
+
+      <pathelement location="${ant.jar}"/>
+      <path refid="aux.libs"/>
+    </path>
+
+    <path id="manual.build.path">
+      <path refid="external-modules-nocore"/> <!-- xml -->
+      <pathelement location="${library.jar}"/>
+      <pathelement location="${build.dir}/manmaker/classes"/>
+      <path refid="aux.libs"/>  <!-- for ant -->
     </path>
 
     <!-- MISC -->
     <path id="sbt.compile.build.path">
-      <path refid="quick.compiler.build.path"/>
+      <path refid="scaladoc.classpath"/>
+      <!-- TODO modularize compiler: bring back when repl leaves compiler jar
+        <pathelement location="${build-quick.dir}/classes/repl"/>
+      -->
       <pathelement location="${sbt.interface.jar}"/>
     </path>
 
-    <path id="manual.classpath">
-      <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
-      <pathelement location="${build.dir}/manmaker/classes"/>
-    </path>
 
-    <path id="partest.classpath">
-      <path refid="pack.compiler.path"/>
-      <path refid="partest.extras.classpath"/>
+    <!--
+       This is the classpath used to run partest, which is what it uses to run the compiler and find other required jars.
+       "What's on the compiler's compilation path when compiling partest tests," you ask?
+        Why, the compiler we're testing, of course, and partest with all its dependencies.
+    -->
+    <path id="partest.compilation.path">
+      <path refid="partest.compilation.path.core"/>
+      <path refid="partest.compilation.path.noncore"/>
     </path>
+    <path id="partest.compilation.path.core">
+      <pathelement location="${library.jar}"/>
+      <pathelement location="${reflect.jar}"/>
+      <pathelement location="${compiler.jar}"/>
+    </path>
+    <path id="partest.compilation.path.noncore">
+
+      <!-- TODO modularize compiler
+      <pathelement location="${scaladoc.jar}"/>
+      <pathelement location="${interactive.jar}"/>
+      -->
 
-    <path id="partest.build.path">
-      <path refid="pack.compiler.path"/>
+      <!-- TODO: move scalap & actors out of repo -->
+      <pathelement location="${scalap.jar}"/>
+      <pathelement location="${actors.jar}"/>
+
+      <!-- partest's dependencies, which marks most of its dependencies as provided,
+           (but not scala-library, so we filter that one out...)
+           so we provide them: scala-[library/reflect/compiler], scalap built here,
+           scala-xml, scala-parser-combinators via external-modules-nocore,
+           scalacheck as part of `partest.classpath` -->
+      <restrict>
+        <path refid="partest.classpath"/>
+        <rsel:not><rsel:or>
+          <rsel:name name="scala-library*.jar"/>
+        </rsel:or></rsel:not>
+      </restrict>
+      <pathelement location="${scala-xml}"/>
+      <pathelement location="${scala-parser-combinators}"/>
+      <!-- <pathelement location="${scala-swing}"/> -->
+
+      <restrict>
+        <path refid="scalacheck.classpath"/>
+        <rsel:not><rsel:or>
+          <rsel:name name="scala-library*.jar"/>
+          <rsel:name name="scala-compiler*.jar"/>
+          <rsel:name name="scala-reflect*.jar"/>
+          <rsel:name name="scala-actors*.jar"/>
+          <rsel:name name="scala-parser-combinators*.jar"/>
+          <rsel:name name="scala-xml*.jar"/>
+        </rsel:or></rsel:not>
+      </restrict>
+
+      <!-- partest classes specific to the core compiler build -->
+      <pathelement location="${partest-extras.jar}"/>
+      <pathelement location="${partest-javaagent.jar}"/>
+
+      <!-- sneaky extras used in tests -->
       <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
-      <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
     </path>
 
     <path id="test.junit.compiler.build.path">
       <pathelement location="${test.junit.classes}"/>
       <path refid="quick.compiler.build.path"/>
+      <path refid="quick.repl.build.path"/>
       <path refid="junit.classpath"/>
     </path>
 
@@ -723,20 +981,9 @@ TODO:
       <path refid="forkjoin.classpath"/>
     </path>
 
-    <path id="palo.bin.tool.path">
-      <pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
-      <pathelement location="${build-palo.dir}/lib/scala-reflect.jar"/>
-      <pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
-      <pathelement location="${build-palo.dir}/lib/jline.jar"/>
-    </path>
-
     <path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
 
     <!-- TODO: consolidate *.includes -->
-    <patternset id="partest.includes">
-      <include name="**/*.xml"/>
-    </patternset>
-
     <patternset id="lib.includes">
       <include name="**/*.tmpl"/>
       <include name="**/*.xml"/>
@@ -773,381 +1020,57 @@ TODO:
 =============================================================================-->
   <target name="libs.clean">                        <clean build="libs"/> <clean build="asm"/>                            </target>
   <target name="quick.clean"  depends="libs.clean">   <clean build="quick"/> <clean build="pack"/> <clean build="strap"/> </target>
-  <target name="palo.clean"   depends="quick.clean">    <clean build="palo"/>                                             </target>
-  <target name="locker.clean" depends="palo.clean">       <clean build="locker"/>                                         </target>
+  <target name="locker.clean" depends="quick.clean">    <clean build="locker"/>                                           </target>
 
   <target name="docs.clean"> <clean build="docs"/> <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
   <target name="dist.clean"> <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
 
-  <target name="all.clean" depends="locker.clean, docs.clean"> <clean build="sbt"/> <clean build="osgi"/> </target>
+  <target name="junit.clean"> <clean build="junit"/> </target>
+
+  <target name="all.clean" depends="locker.clean, docs.clean, junit.clean">
+    <clean build="sbt"/> <clean build="osgi"/>
+  </target>
 
   <!-- Used by the scala-installer script -->
   <target name="allallclean" depends="all.clean, dist.clean"/>
 
-  <macrodef name="clean">
-    <attribute name="build"/>
-    <sequential>
-      <delete dir="${build-@{build}.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
-    </sequential>
-  </macrodef>
-
 <!-- ===========================================================================
                                   LOCAL DEPENDENCIES
 ============================================================================ -->
-  <macrodef name="simple-javac" >
-    <attribute name="project"/>  <!-- project: fjbg/asm/forkjoin -->
-    <attribute name="args" default=""/>
-    <attribute name="jar"  default="yes"/>
-    <sequential>
-      <uptodate property="@{project}.available" targetfile="${build-libs.dir}/@{project}.complete">
-        <srcfiles dir="${src.dir}/@{project}"/></uptodate>
-      <if><not><isset property="@{project}.available"/></not><then>
-        <stopwatch name="@{project}.timer"/>
-        <mkdir dir="${@{project}-classes}"/>
-        <javac
-          debug="true"
-          srcdir="${src.dir}/@{project}"
-          destdir="${@{project}-classes}"
-          classpath="${@{project}-classes}"
-          includes="**/*.java"
-          target="1.6" source="1.5"
-          compiler="javac1.6">
-            <compilerarg line="${javac.args} @{args}"/>
-        </javac>
-        <if><equals arg1="@{jar}" arg2="yes"/><then>
-          <jar whenmanifestonly="fail" destfile="${build-libs.dir}/@{project}.jar" basedir="${@{project}-classes}"/></then></if>
-        <stopwatch name="@{project}.timer" action="total"/>
-        <mkdir dir="${build-libs.dir}"/>
-        <touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
-      </then></if>
-    </sequential>
-  </macrodef>
 
   <target name="asm.done"      depends="init"> <simple-javac project="asm" jar="no"/> </target>
-  <target name="fjbg.done"     depends="init"> <simple-javac project="fjbg"/> </target>
   <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
 
-<!-- ===========================================================================
-                                  STAGED COMPILATION MACROS
-============================================================================ -->
-  <macrodef name="staged-javac" >
-    <attribute name="stage"/>    <!-- current stage (locker, quick, strap) -->
-    <attribute name="project"/>  <!-- project: library/reflect/compiler/actors -->
-    <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
-    <attribute name="args" default=""/>
-    <attribute name="excludes" default=""/>
-
-    <sequential>
-      <javac
-        debug="true"
-        srcdir="${src.dir}/@{project}"
-        destdir="${build-@{stage}.dir}/classes/@{destproject}"
-        includes="**/*.java"
-        excludes="@{excludes}"
-        target="1.6" source="1.5">
-          <compilerarg line="${javac.args} @{args}"/>
-          <classpath refid="@{stage}.@{destproject}.build.path"/>
-      </javac>
-    </sequential>
-  </macrodef>
-
-  <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
-   When compiling different sets of source files in multiple compilations to the same output directory,
-   Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
-   were deleted and thus deletes the corresponding output files.
-
-   Note that zinc also requires each arg to scalac to be prefixed by -S.
+  <!-- For local development only. We only allow released versions of Scala for STARR.
+    This builds quick (core only) and publishes it with a generated version number,
+    saving it as starr.version in build.properties, so this compiler will be used as STARR in your next build
+    NOTES:
+      - to speed things up, you can also pass -Dlocker.skip=1
   -->
-  <macrodef name="zinc">
-    <attribute name="compilerpathref"   />
-    <attribute name="destdir"           />
-    <attribute name="srcdir"            />
-    <attribute name="srcpath" default="NOT SET"/>        <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
-    <attribute name="buildpathref"      />
-    <attribute name="params" default="" />
-    <attribute name="java-excludes" default=""/>
-
-    <sequential>
-      <local name="sources"/>
-      <pathconvert pathsep=" " property="sources">
-        <fileset dir="@{srcdir}">
-          <include name="**/*.java"/>
-          <include name="**/*.scala"/>
-          <exclude name="@{java-excludes}"/>
-        </fileset>
-      </pathconvert>
-      <local name="args"/>
-      <local name="sargs"/>
-      <if><not><equals arg1="@{srcpath}" arg2="NOT SET"/></not><then>
-        <property name="args" value="@{params} -sourcepath @{srcpath}"/>
-      </then></if>
-      <property name="args" value="@{params}"/> <!-- default -->
-
-      <!-- HACK: prefix scalac args by -S -->
-      <script language="javascript">
-        project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
-      </script>
-
-      <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
-        <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
-      </exec>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-scalac" >
-    <attribute name="with"/>           <!-- will use path `@{with}.compiler.path` to locate scalac -->
-    <attribute name="stage"/>          <!-- current stage (locker, quick, strap) -->
-    <attribute name="project"/>        <!-- project: library/reflect/compiler/actors -->
-    <attribute name="srcpath" default="NOT SET"/>        <!-- needed to compile the library -->
-    <attribute name="args" default=""/>                  <!-- additional args -->
-    <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
-    <attribute name="srcdir" default="@{project}"/>
-    <attribute name="java-excludes" default=""/>
-
-    <sequential>
-      <!-- TODO: detect zinc anywhere on PATH
-           use zinc for the quick stage if it's available;
-           would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
-      <if><and> <available file="tools/zinc"/>
-                <equals arg1="@{stage}" arg2="quick"/>
-                <not><equals arg1="@{project}" arg2="plugins"/></not> <!-- doesn't work in zinc because it requires the quick compiler, which isn't jarred up-->
-          </and><then>
-        <zinc taskname="Z.@{stage}.@{project}"
-          compilerpathref="@{with}.compiler.path"
-          destdir="${build-@{stage}.dir}/classes/@{destproject}"
-          srcdir="${src.dir}/@{srcdir}"
-          srcpath="@{srcpath}"
-          buildpathref="@{stage}.@{project}.build.path"
-          params="${scalac.args.@{stage}} @{args}"
-          java-excludes="@{java-excludes}"/></then>
-      <else>
-      <if><equals arg1="@{srcpath}" arg2="NOT SET"/><then>
-        <scalacfork taskname="@{stage}.@{project}"
-          jvmargs="${scalacfork.jvmargs}"
-          compilerpathref="@{with}.compiler.path"
-          destdir="${build-@{stage}.dir}/classes/@{destproject}"
-          srcdir="${src.dir}/@{srcdir}"
-          params="${scalac.args.@{stage}} @{args}">
-          <include name="**/*.scala"/>
-          <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></then>
-      <else>
-        <scalacfork taskname="@{stage}.@{project}"
-          jvmargs="${scalacfork.jvmargs}"
-          compilerpathref="@{with}.compiler.path"
-          destdir="${build-@{stage}.dir}/classes/@{destproject}"
-          srcdir="${src.dir}/@{srcdir}"
-          srcpath="@{srcpath}"
-          params="${scalac.args.@{stage}} @{args}">
-          <include name="**/*.scala"/>
-          <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></else>
-      </if>
-      </else></if>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-uptodate" >
-    <attribute name="stage"/>
-    <attribute name="project"/>
-    <element name="check"/>
-    <element name="do"/>
-
-    <sequential>
-      <uptodate property="@{stage}.@{project}.available" targetfile="${build-@{stage}.dir}/@{project}.complete">
-        <check/>
-      </uptodate>
-      <if><not><isset property="@{stage}.@{project}.available"/></not><then>
-        <do/>
-        <touch file="${build-@{stage}.dir}/@{project}.complete" verbose="no"/>
-      </then></if>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-build" >
-    <attribute name="with"/>           <!-- will use path `@{with}.compiler.path` to locate scalac -->
-    <attribute name="stage"/>          <!-- current stage (locker, quick, strap) -->
-    <attribute name="project"/>        <!-- project: library/reflect/compiler/actors -->
-    <attribute name="srcpath"       default="NOT SET"/>        <!-- needed to compile the library -->
-    <attribute name="args"          default=""/>                  <!-- additional args -->
-    <attribute name="includes"      default="comp.includes"/>
-    <attribute name="java-excludes" default=""/>
-    <attribute name="version"       default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
-
-    <sequential>
-      <staged-uptodate stage="@{stage}" project="@{project}">
-        <check><srcfiles dir="${src.dir}/@{project}"/></check>
-        <do>
-          <stopwatch name="@{stage}.@{project}.timer"/>
-          <mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
-          <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/> <!-- always compile with javac for simplicity and regularity; it's cheap -->
-          <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
-          <if><equals arg1="@{version}" arg2=""/><then>
-            <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
-              <entry key       = "version.number" value="${version.number}"/>
-              <entry key       = "maven.version.number" value="${maven.version.number}"/>
-              <entry key       = "osgi.version.number" value="${osgi.version.number}"/>
-              <entry key       = "copyright.string" value="${copyright.string}"/>
-            </propertyfile>
-          </then><else>
-            <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
-              <entry key       = "version.number" value="${@{version}.version.number}"/>
-              <entry key       = "copyright.string" value="${copyright.string}"/>
-            </propertyfile>
-          </else></if>
-          <copy todir="${build-@{stage}.dir}/classes/@{project}">
-            <fileset dir="${src.dir}/@{project}">
-              <patternset refid="@{includes}"/>
-            </fileset>
-          </copy>
-          <stopwatch name="@{stage}.@{project}.timer" action="total"/>
-        </do>
-      </staged-uptodate>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-bin">
-    <attribute name="stage"/>
-    <attribute name="classpathref" default="NOT SET"/>
-    <sequential>
-      <staged-uptodate stage="@{stage}" project="bin">
-        <check>
-          <srcfiles dir="${src.dir}">
-            <include name="compiler/scala/tools/ant/templates/**"/>
-          </srcfiles>
-        </check>
-        <do>
-          <taskdef name="mk-bin" classname="scala.tools.ant.ScalaTool" classpathref="@{stage}.bin.tool.path"/>
-          <mkdir dir="${build-@{stage}.dir}/bin"/>
-          <if><equals arg1="@{classpathref}" arg2="NOT SET"/><then>
-            <mk-bin file="${build-@{stage}.dir}/bin/scala"    class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scalac"   class="scala.tools.nsc.Main"              javaFlags="${java.flags}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc"          javaFlags="${java.flags}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/fsc"      class="scala.tools.nsc.CompileClient"     javaFlags="${java.flags}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scalap"   class="scala.tools.scalap.Main"           javaFlags="${java.flags}"/>
-          </then><else>
-            <mk-bin file="${build-@{stage}.dir}/bin/scala"    class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scalac"   class="scala.tools.nsc.Main"              javaFlags="${java.flags}" classpathref="@{classpathref}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc"          javaFlags="${java.flags}" classpathref="@{classpathref}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/fsc"      class="scala.tools.nsc.CompileClient"     javaFlags="${java.flags}" classpathref="@{classpathref}"/>
-            <mk-bin file="${build-@{stage}.dir}/bin/scalap"   class="scala.tools.scalap.Main"           javaFlags="${java.flags}" classpathref="@{classpathref}"/>
-          </else></if>
-          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scala"/>
-          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalac"/>
-          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scaladoc"/>
-          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/fsc"/>
-          <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalap"/>
-        </do>
-      </staged-uptodate>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-pack">
-    <attribute name="project"/>
-    <attribute name="targetdir" default="lib"/>
-    <attribute name="targetjar" default="scala-@{project}.jar"/>
-    <attribute name="destfile"  default="${build-pack.dir}/@{targetdir}/@{targetjar}"/>
-    <attribute name="manifest"  default=""/>
-    <element name="pre"         optional="true"/>
-    <element name="jar-opts"    optional="true"/>
-
-    <sequential>
-      <uptodate property="pack.@{project}.available" targetfile="@{destfile}">
-        <srcresources>
-          <resources refid="pack.@{project}.files"/>
-          <!-- <path><pathelement location="${build-quick.dir}/@{project}.complete"/></path> -->
-        </srcresources>
-      </uptodate>
-      <if><not><isset property="pack.@{project}.available"/></not><then>
-        <mkdir dir="${build-pack.dir}/@{targetdir}"/>
-        <pre/>
-
-        <if><not><equals arg1="@{manifest}" arg2=""/></not><then>
-          <jar whenmanifestonly="fail" destfile="@{destfile}" manifest="@{manifest}"> <!-- update="true" makes no difference on my machine, so starting from scratch-->
-            <jar-opts/>
-            <path refid="pack.@{project}.files"/>
-          </jar></then>
-        <else>
-          <jar whenmanifestonly="fail" destfile="@{destfile}">
-            <jar-opts/>
-            <path refid="pack.@{project}.files"/>
-          </jar>
-        </else></if>
-      </then></if>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="staged-docs">
-    <attribute name="project"/>
-    <attribute name="dir" default="@{project}"/>
-    <attribute name="title"/>
-    <attribute name="docroot" default="NOT SET"/>
-    <element name="includes" implicit="true"/>
-
-    <sequential>
-      <staged-uptodate stage="docs" project="@{project}">
-        <check><srcfiles dir="${src.dir}/@{dir}"/></check>
-        <do>
-          <stopwatch name="docs.@{project}.timer"/>
-          <mkdir dir="${build-docs.dir}/@{project}"/>
-          <if><equals arg1="@{docroot}" arg2="NOT SET"/><then>
-            <!-- TODO: introduce docs.@{project}.build.path for classpathref -->
-            <scaladoc
-              destdir="${build-docs.dir}/@{project}"
-              doctitle="@{title}"
-              docversion="${version.number}"
-              sourcepath="${src.dir}"
-              classpathref="pack.compiler.path"
-              srcdir="${src.dir}/@{dir}"
-              addparams="${scalac.args.all}"
-              implicits="on"
-              diagrams="on"
-              groups="on"
-              rawOutput="${scaladoc.raw.output}"
-              noPrefixes="${scaladoc.no.prefixes}">
-              <includes/>
-            </scaladoc>
-          </then><else>
-            <scaladoc
-              destdir="${build-docs.dir}/@{project}"
-              doctitle="@{title}"
-              docversion="${version.number}"
-              sourcepath="${src.dir}"
-              classpathref="pack.compiler.path"
-              srcdir="${src.dir}/@{dir}"
-              docRootContent="${src.dir}/@{project}/@{docroot}"
-              addparams="${scalac.args.all}"
-              implicits="on"
-              diagrams="on"
-              groups="on"
-              rawOutput="${scaladoc.raw.output}"
-              noPrefixes="${scaladoc.no.prefixes}">
-              <includes/>
-            </scaladoc>
-          </else></if>
-          <stopwatch name="docs.@{project}.timer" action="total"/>
-        </do>
-      </staged-uptodate>
-    </sequential>
-  </macrodef>
+  <target name="replacestarr" depends="init.git" description="Produces a new STARR from current sources. Publishes core locally with a generated version number,
+    stored in build.properties as starr.version (overriding the one in versions.properties).">
+    <antcall target="publish-core-local">
+      <param name="maven.version.suffix" value="-STARR-${git.commit.sha}-SNAPSHOT"/>
+      <param name="docs.skip" value="1"/>
+      <param name="scalac.args.optimise" value="-optimise"/>
+      <param name="update.starr.version" value="alright then"/>
+    </antcall>
+  </target>
+
 
 <!-- ===========================================================================
                                   LOCAL REFERENCE BUILD (LOCKER)
 ============================================================================ -->
-  <target name="locker.start"  depends="asm.done, forkjoin.done, fjbg.done">
+  <target name="locker.start"  depends="asm.done, forkjoin.done">
     <condition property="locker.locked"><available file="${build-locker.dir}/locker.locked"/></condition></target>
 
   <target name="locker.lib"    depends="locker.start"     unless="locker.locked">
     <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
 
-  <target name="locker.actors" depends="locker.lib"       unless="locker.locked">
-    <staged-build with="starr" stage="locker" project="actors"/> </target>
-
-  <target name="locker.msil"   depends="locker.lib"       unless="locker.locked">
-    <staged-build with="starr" stage="locker" project="msil" java-excludes="**/tests/**"/> </target>
-
   <target name="locker.reflect" depends="locker.lib"      unless="locker.locked">
     <staged-build with="starr" stage="locker" project="reflect"/></target>
 
-  <target name="locker.comp"    depends="locker.reflect, locker.msil" unless="locker.locked">
+  <target name="locker.comp"    depends="locker.reflect" unless="locker.locked">
     <staged-build with="starr" stage="locker" project="compiler"/></target>
 
   <target name="locker.done"    depends="locker.comp">
@@ -1165,64 +1088,32 @@ TODO:
   <target name="quick.lib"        depends="quick.start">
     <staged-build with="locker"   stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
 
-  <target name="quick.actors"     depends="quick.lib">
-    <staged-build with="locker"   stage="quick" project="actors"/> </target>
-
-  <target name="quick.msil"       depends="quick.lib">
-    <staged-build with="locker"   stage="quick" project="msil" java-excludes="**/tests/**"/> </target>
-
   <target name="quick.reflect"    depends="quick.lib">
     <staged-build with="locker"   stage="quick" project="reflect"/> </target>
 
-  <target name="quick.comp"       depends="quick.reflect, quick.msil">
+  <target name="quick.comp"       depends="quick.reflect">
     <staged-build with="locker"   stage="quick" project="compiler"/> </target>
 
-  <target name="quick.scalacheck" depends="quick.actors, quick.lib">
-    <staged-build with="locker"   stage="quick" project="scalacheck" args="-nowarn"/> </target>
+  <target name="quick.repl"       depends="quick.comp">
+    <staged-build with="locker"   stage="quick" project="repl"/> </target>
 
-  <target name="quick.scalap"     depends="quick.comp">
+  <target name="quick.scaladoc"   depends="quick.comp">
+    <staged-build with="locker"   stage="quick" project="scaladoc" version="scaladoc"/> </target>
+
+  <target name="quick.interactive" depends="quick.comp, quick.scaladoc">
+    <staged-build with="locker"    stage="quick" project="interactive"/> </target>
+
+  <target name="quick.scalap"     depends="quick.repl">
     <staged-build with="locker"   stage="quick" project="scalap"/> </target>
 
-  <target name="quick.partest"    depends="quick.scalap, quick.comp, asm.done">
-    <staged-build with="locker"   stage="quick" project="partest" version="partest"/> </target>
+  <target name="quick.actors"     depends="quick.lib">
+    <staged-build with="locker"   stage="quick" project="actors"/> </target>
 
-  <target name="quick.swing"      depends="quick.actors, quick.lib" if="has.java6">
-    <staged-build with="locker"   stage="quick" project="swing"/> </target>
 
-  <target name="quick.plugins"    depends="quick.comp">
-    <staged-uptodate              stage="quick" project="plugins">
-      <check><srcfiles dir="${src.dir}/continuations"/></check>
-      <do>
-        <stopwatch name="quick.plugins.timer"/>
-
-        <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
-        <staged-scalac with="locker" stage="quick" project="plugins" srcdir="continuations/plugin" destproject="continuations-plugin"/>
-        <copy
-           file="${src.dir}/continuations/plugin/scalac-plugin.xml"
-           todir="${build-quick.dir}/classes/continuations-plugin"/>
-
-        <!-- not very nice to create jar here but needed to load plugin -->
-        <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
-        <jar whenmanifestonly="fail" destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar" basedir="${build-quick.dir}/classes/continuations-plugin"/>
-
-        <!-- might split off library part into its own ant target -->
-        <mkdir dir="${build-quick.dir}/classes/continuations-library"/>
-        <!-- TODO: must build with quick to avoid
-        [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
-        [quick.plugins] 	at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
-
-        WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
-        -->
-        <staged-scalac with="quick" stage="quick" project="plugins"
-                       srcdir="continuations/library" destproject="continuations-library"
-                       args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
-
-        <stopwatch name="quick.plugins.timer" action="total"/>
-      </do>
-    </staged-uptodate>
-  </target>
 
-  <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.scalacheck, quick.scalap, quick.swing, quick.plugins, quick.partest">
+  <target name="quick.modules" depends="quick.repl, quick.scaladoc, quick.interactive, quick.scalap"/>
+
+  <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.modules">
     <staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
   </target>
 
@@ -1233,17 +1124,18 @@ TODO:
 <!-- ===========================================================================
                                   PACKED QUICK BUILD (PACK)
 ============================================================================ -->
-  <target name="pack.lib"    depends="quick.lib, quick.plugins, forkjoin.done">
-    <staged-pack project="library"/></target>
+  <target name="pack.lib"    depends="quick.lib, forkjoin.done"> <staged-pack project="library"/></target>
 
-  <target name="pack.actors"  depends="quick.lib">                <staged-pack project="actors"/> </target>
-  <target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
-  <target name="pack.reflect" depends="quick.reflect">            <staged-pack project="reflect"/> </target>
+  <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
 
-  <target name="pack.comp"    depends="quick.comp, asm.done">
+  <!-- TODO modularize compiler. Remove other quick targets when they become modules. -->
+  <target name="pack.comp"    depends="quick.comp, quick.scaladoc, quick.interactive, quick.repl, asm.done">
     <staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
       <pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
-        <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
+        <copy todir="${build-pack.dir}/lib">
+          <resources refid="repl.fileset"/>
+          <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
+        </copy>
         <copy todir="${build-pack.dir}/lib">
           <fileset dir="${lib-extra.dir}">
             <include name="**/*.jar"/>
@@ -1256,49 +1148,60 @@ TODO:
           <attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
         </manifest>
       </pre>
-      <!-- script api is 2.11-only so far
+      <!-- JSR-223 support introduced in 2.11 -->
       <jar-opts>
         <service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
       </jar-opts>
-      -->
     </staged-pack>
   </target>
 
-  <target name="pack.plugins"    depends="quick.plugins">    <staged-pack project="plugins"    targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
-  <target name="pack.scalacheck" depends="quick.scalacheck"> <staged-pack project="scalacheck" targetjar="scalacheck.jar"/> </target>
+  <!-- TODO modularize compiler. These targets are currently not used.
+  <target name="pack.scaladoc" depends="quick.scaladoc">       <staged-pack project="scaladoc"/> </target>
+  <target name="pack.interactive" depends="quick.interactive"> <staged-pack project="interactive"/> </target>
+  -->
 
-  <target name="pack.partest" depends="quick.partest">
-    <staged-pack project="partest"/>
-     <!-- TODO the manifest should influence actuality of this target -->
-    <staged-pack project="partest-javaagent" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF"/>
-  </target>
+  <target name="pack.actors" depends="quick.actors"> <staged-pack project="actors"/> </target>
 
-  <target name="pack.scalap"     depends="quick.scalap">     <staged-pack project="scalap"     targetjar="scalap.jar"/> </target>
+  <target name="pack.scalap"     depends="quick.scalap">     <staged-pack project="scalap"/> </target>
 
-  <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.swing">
-    <staged-bin stage="pack"/>
-  </target>
+  <target name="pack.core" depends="pack.reflect, pack.comp, pack.lib"/>
 
-  <!-- depend on quick.done so quick.bin is run when pack.done is -->
-  <target name="pack.done" depends="quick.done, pack.bin">
-    <!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
+  <!-- TODO modularize compiler: pack.scaladoc, pack.interactive, -->
+  <target name="pack.modules" depends="pack.actors, pack.scalap">
     <copy todir="${build-pack.dir}/lib">
-      <resources refid="partest.extras.fileset"/>
-      <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
-              from="${partest.extras.versions}" to="flatten"/>
+      <path refid="external-modules-nocore" />
+      <mapper type="flatten" />
     </copy>
+  </target>
+
+  <!-- depends on pack.core for scaladoc -->
+  <target name="scaladoc.task" depends="pack.core, pack.modules" unless="docs.skip">
+    <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scaladoc.classpath"/>
+  </target>
 
-    <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.compiler.path"/>
-    <taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
+  <target name="pack.partest-extras" depends="quick.comp">
+    <!-- compile compiler-specific parts of partest -->
+    <staged-build with="quick" stage="quick" project="partest-extras" />
+    <staged-build with="quick" stage="quick" project="partest-javaagent" />
+
+    <staged-pack project="partest-extras"/>
+    <staged-pack project="partest-javaagent"
+                 manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
   </target>
 
+  <target name="pack.bin" depends="pack.core, pack.modules, pack.partest-extras">
+    <staged-bin stage="pack"/>
+  </target>
+
+  <!-- depend on quick.done so quick.bin is run when pack.done is -->
+  <target name="pack.done" depends="quick.done, pack.bin"/>
+
 
 <!-- ===========================================================================
                                   BOOTSTRAPPING BUILD (STRAP)
 ============================================================================ -->
   <target name="strap.done" depends="pack.done">
     <staged-build with="pack" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/>
-    <staged-build with="pack" stage="strap" project="msil" java-excludes="**/tests/**"/>
     <staged-build with="pack" stage="strap" project="reflect"/>
     <staged-build with="pack" stage="strap" project="compiler"/>
   </target>
@@ -1307,90 +1210,111 @@ TODO:
 
 
 <!-- ===========================================================================
-                                  PACKED LOCKER BUILD (PALO)
+                                  OSGi Artifacts
 ============================================================================ -->
-  <target name="palo.done" depends="locker.done">
-    <mkdir dir="${build-palo.dir}/lib"/>
-    <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-library.jar">
-      <fileset dir="${build-locker.dir}/classes/library"/>
-      <fileset dir="${forkjoin-classes}"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-reflect.jar" manifest="${basedir}/META-INF/MANIFEST.MF"
-      basedir="${build-locker.dir}/classes/reflect"/>
-    <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
-      <fileset dir="${build-locker.dir}/classes/compiler"/>
-      <fileset dir="${asm-classes}"/>
-      <fileset dir="${fjbg-classes}"/>
-    </jar>
-    <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
-  </target>
-
-  <target name="palo.bin" depends="palo.done"> <staged-bin stage="palo"/></target>
+  <!-- This task takes the output of the pack stage and OSGi-fies the jars based on the bnd files in src/build/bnd
+       This means adding manifests and enforcing the Exports clauses (removing non-exported classes!)
+       These jars are then copied to the distribution and published to maven.
+  -->
 
 
-<!-- ===========================================================================
-                                  OSGi Artifacts
-============================================================================ -->
-  <target name="osgi.done" depends="pack.done">
+  <target name="osgi.core" depends="pack.core">
     <mkdir dir="${build-osgi.dir}"/>
 
-    <!-- simplify fixing pom versions -->
-    <macrodef name="make-bundle">
-      <attribute name="name" />
-      <attribute name="version" />
-      <sequential>
-        <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
-          <filterset>
-            <filter token="VERSION" value="@{version}" />
-          </filterset>
-        </copy>
-        <bnd classpath="${build-pack.dir}/lib/@{name}.jar"
-             eclipse="false"
-             failok="false"
-             exceptions="true"
-             files="${build-osgi.dir}/@{name}.bnd"
-             output="${build-osgi.dir}"/>
-      </sequential>
-    </macrodef>
-    <macrodef name="make-plugin-bundle">
-      <attribute name="name" />
-      <attribute name="version" />
-      <sequential>
-        <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
-          <filterset>
-            <filter token="VERSION" value="@{version}" />
-          </filterset>
-        </copy>
-        <bnd classpath="${build-pack.dir}/misc/scala-devel/plugins/@{name}.jar"
-             eclipse="false"
-             failok="false"
-             exceptions="true"
-             files="${build-osgi.dir}/@{name}.bnd"
-             output="${build-osgi.dir}"/>
-      </sequential>
-    </macrodef>
-
-    <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.complete">
-      <srcfiles dir="${basedir}">
-        <include name="build.xml"/>
-        <include name="src/build/bnd/*.bnd"/>
-      </srcfiles>
+    <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.core.complete">
+      <srcresources>
+        <fileset dir="${basedir}">
+          <include name="build.xml"/>
+          <include name="build-ant-macros.xml"/>
+          <include name="src/build/bnd/*.bnd"/>
+        </fileset>
+        <filelist>
+          <file name="${library.jar}"/>
+          <file name="${reflect.jar}"/>
+          <file name="${compiler.jar}"/>
+        </filelist>
+      </srcresources>
     </uptodate>
 
     <if><not><isset property="osgi.bundles.available"/></not><then>
-      <stopwatch name="osgi.bundle.timer"/>
-      <make-bundle name="scala-library" version="${osgi.version.number}" />
-      <make-bundle name="scala-actors" version="${osgi.version.number}" />
-      <make-bundle name="scala-reflect" version="${osgi.version.number}" />
-      <make-bundle name="scala-compiler" version="${osgi.version.number}" />
-      <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
-      <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
-
-      <if><isset property="has.java6"/><then>
-        <make-bundle name="scala-swing" version="${osgi.version.number}"/></then>
-      </if>
-      <stopwatch name="osgi.bundle.timer" action="total"/></then>
-    </if>
+      <stopwatch name="osgi.core.timer"/>
+      <make-bundle project="library">
+        <fileset dir="${src.dir}/library"/>
+      </make-bundle>
+
+      <make-bundle project="reflect">
+        <fileset dir="${src.dir}/reflect"/>
+      </make-bundle>
+
+      <!-- TODO modularize compiler. Remove the other class dirs as soon as they become modules -->
+      <make-bundle project="compiler">
+        <fileset dir="${src.dir}/compiler"/>
+        <fileset dir="${src.dir}/scaladoc"/>
+        <fileset dir="${src.dir}/interactive"/>
+        <fileset dir="${src.dir}/repl"/>
+      </make-bundle>
+
+      <touch file="${build-osgi.dir}/bundles.core.complete" verbose="no"/>
+      <stopwatch name="osgi.core.timer" action="total"/>
+    </then></if>
+  </target>
+
+  <target name="osgi.done" depends="pack.done, osgi.core">
+    <uptodate property="osgi.all.bundles.available" targetfile="${build-osgi.dir}/bundles.all.complete">
+      <srcresources>
+        <fileset dir="${basedir}">
+          <include name="build.xml"/>
+          <include name="build-ant-macros.xml"/>
+          <include name="src/build/bnd/*.bnd"/>
+        </fileset>
+        <filelist>
+            <!-- TODO modularize compiler
+            <include name="${interactive.jar}"/>
+            <include name="${scaladoc.jar}"/>
+            -->
+
+          <file name="${actors.jar}"/>
+
+          <file name="${continuations-plugin.jar}"/>
+          <file name="${continuations-library.jar}"/>
+          <file name="${parser-combinators.jar}"/>
+          <file name="${xml.jar}"/>
+          <file name="${swing.jar}"/>
+        </filelist>
+      </srcresources>
+    </uptodate>
+
+    <if><not><isset property="osgi.all.bundles.available"/></not><then>
+      <stopwatch name="osgi.all.timer"/>
+
+      <!-- TODO modularize compiler
+      TODO: refactor so that we can restrict exported packages to scala.tools.nsc.doc.*
+           move ant task, partest stuff to other jars,
+           and move scala.tools.nsc.ScalaDoc main class to scala.tools.nsc.doc
+      <make-bundle project="scaladoc">
+        <fileset dir="${src.dir}/scaladoc"/>
+      </make-bundle>
+
+      TODO: refactor so that we can restrict exported packages to scala.tools.nsc.interactive.*
+      <make-bundle project="interactive">
+        <fileset dir="${src.dir}/interactive"/>
+      </make-bundle>
+      -->
+
+      <make-bundle project="actors">
+        <fileset dir="${src.dir}/actors"/>
+      </make-bundle>
+
+
+      <make-bundle project="continuations-plugin"/>
+      <make-bundle project="continuations-library"/>
+      <make-bundle project="parser-combinators"/>
+      <make-bundle project="xml"/>
+      <make-bundle project="swing"/>
+
+      <touch file="${build-osgi.dir}/bundles.all.complete" verbose="no"/>
+      <stopwatch name="osgi.all.timer" action="total"/>
+    </then></if>
   </target>
 
 
@@ -1433,6 +1357,8 @@ TODO:
   <target name="test.osgi" depends="test.osgi.comp">
     <stopwatch name="test.osgi.timer"/>
     <mkdir dir="${test.osgi.classes}"/>
+
+    <echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
     <junit fork="yes" haltonfailure="yes">
       <classpath refid="test.osgi.compiler.build.path"/>
       <batchtest fork="yes" todir="${build-osgi.dir}">
@@ -1440,7 +1366,7 @@ TODO:
           <include name="**/*Test.class"/>
         </fileset>
       </batchtest>
-      <formatter type="brief" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
+      <formatter type="xml" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
     </junit>
     <stopwatch name="test.osgi.timer" action="total"/>
   </target>
@@ -1507,7 +1433,8 @@ TODO:
   <target name="test.junit" depends="test.junit.comp">
     <stopwatch name="test.junit.timer"/>
     <mkdir dir="${test.junit.classes}"/>
-    <junit fork="yes" haltonfailure="yes" showoutput="yes" printsummary="on">
+    <echo message="Note: details of failed tests will be output to ${build-junit.dir}"/>
+    <junit fork="yes" haltonfailure="yes" printsummary="on">
       <classpath refid="test.junit.compiler.build.path"/>
       <batchtest fork="yes" todir="${build-junit.dir}">
         <fileset dir="${test.junit.classes}">
@@ -1519,86 +1446,64 @@ TODO:
     <stopwatch name="test.junit.timer" action="total"/>
   </target>
 
-  <property name="partest.srcdir" value="files" /> <!-- TODO: make targets for `pending` and other subdirs -->
+  <!-- See test/build-partest.xml for the macro(s) being used here. -->
+  <target name="partest.task" depends="pack.done">
+    <!-- note the classpathref! this is the classpath used to run partest,
+    so it must have the new compiler.... -->
+    <taskdef
+       classpathref="partest.compilation.path"
+       resource="scala/tools/partest/antlib.xml"/>
+  </target>
 
-  <target name="test.run" depends="pack.done">
-    <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
-             timeout="1200000"
-             srcdir="${partest.srcdir}"
-             scalacopts="${scalac.args.optimise}">
+  <target name="test.suite.init" depends="partest.task">
+    <!-- read by test/partest to determine classpath used to run partest -->
+    <propertyfile file = "build/pack/partest.properties">
+      <!-- TODO: change "partest.classpath" to "partest.runtime.classpath" or something -->
+      <entry key       = "partest.classpath" value="${toString:partest.compilation.path}"/>
+    </propertyfile>
+  </target>
+
+  <target name="test.suite" depends="test.suite.init">
+    <testSuite kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
+  </target>
 
-      <compilationpath refid="partest.build.path"/>
-      <runtests  dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
-      <jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
-    </partest>
+  <target name="test.suite.color" depends="test.suite.init">
+    <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
   </target>
 
-  <target name="test.suite" depends="pack.done">
-    <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
-             timeout="2400000"
-             srcdir="${partest.srcdir}"
-             scalacopts="${scalac.args.optimise}">
-      <compilationpath refid="partest.build.path"/>
-      <postests  dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
-      <negtests  dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
-      <runtests  dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
-      <jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
-      <residenttests dir="${partest.dir}/${partest.srcdir}/res" includes="*.res"/>
-      <buildmanagertests dir="${partest.dir}/${partest.srcdir}/buildmanager" includes="*"/>
-      <scalaptests dir="${partest.dir}/${partest.srcdir}/scalap" includes="**/*.scala"/>
-      <scalachecktests dir="${partest.dir}/${partest.srcdir}/scalacheck">
-        <include name="*.scala"/>
-      </scalachecktests>
-      <specializedtests dir="${partest.dir}/${partest.srcdir}/specialized">
-        <include name="*.scala"/>
-      </specializedtests>
-      <instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
-        <include name="*.scala"/>
-      </instrumentedtests>
-    </partest>
+  <target name="test.suite.quick" depends="init, quick.done">
+    <path id="test.suite.path">
+      <path refid="quick.bin.tool.path"/>
+      <path refid="quick.interactive.build.path"/>
+      <path refid="partest.compilation.path.noncore"/>
+    </path>
+    <property name="pcp" value="${toString:test.suite.path}"/>
+    <taskdef classpathref="test.suite.path" resource="scala/tools/partest/antlib.xml"/>
+    <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented" pcp="${pcp}"/>
   </target>
 
-  <target name="test.continuations.suite" depends="pack.done">
-    <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
-             timeout="2400000"
-             srcdir="${partest.srcdir}"
-             scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
-      <compilerarg value="-Xpluginsdir"/>
-      <compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
-      <compilationpath refid="partest.build.path"/>
-      <negtests  dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
-      <runtests  dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
-    </partest>
+  <target name="test.run" depends="test.suite.init">
+    <testSuite kinds="run jvm"/>
   </target>
 
-  <target name="test.scaladoc" depends="pack.done">
-    <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
-      <compilationpath refid="partest.build.path"/>
-      <runtests        dir="${partest.dir}/scaladoc/run"        includes="*.scala" />
-      <scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
-    </partest>
+  <target name="test.scaladoc" depends="test.suite.init">
+    <testSuite kinds="run scalacheck" srcdir="scaladoc"/>
   </target>
 
-  <target name="test.interactive" depends="pack.done">
-    <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
-      <compilationpath refid="partest.build.path"/>
-      <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
-        <include name="*/*.scala"/>
-      </presentationtests>
-    </partest>
+  <target name="test.interactive" depends="test.suite.init">
+    <testSuite kinds="presentation"/>
   </target>
 
   <!-- for use in PR validation, where stability is rarely broken, so we're going to use starr for locker,
        and skip test.stability (which requires locker == quick) -->
-  <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.junit, test.interactive, test.continuations.suite, test.scaladoc, test.suite"/>
+  <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.junit, test.interactive, test.scaladoc, test.suite"/>
   <target name="test.done" depends="test.core, test.stability"/>
 
-
 <!-- ===========================================================================
                                   BINARY COMPATIBILITY TESTING
 ============================================================================ -->
   <target name="bc.init" depends="init" unless="maven-deps-done-mima">
-    <property name="bc-reference-version" value="2.10.0"/>
+    <property name="bc-reference-version" value="2.11.0-RC1"/>
 
     <property name="bc-build.dir" value="${build.dir}/bc"/>
     <!-- Obtain mima -->
@@ -1608,65 +1513,22 @@ TODO:
       <dependency groupId="com.typesafe" artifactId="mima-reporter_2.10" version="0.1.6"/>
     </artifact:dependencies>
     <artifact:dependencies pathId="old.bc.classpath">
-      <dependency groupId="org.scala-lang" artifactId="scala-swing" version="${bc-reference-version}"/>
       <dependency groupId="org.scala-lang" artifactId="scala-library" version="${bc-reference-version}"/>
       <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${bc-reference-version}"/>
     </artifact:dependencies>
     <property name="maven-deps-done-mima" value="true"/>
   </target>
 
-  <macrodef name="bc.run-mima">
-    <attribute name="jar-name"/>
-    <attribute name="prev"/>
-    <attribute name="curr"/>
-    <attribute name="direction"/>
-    <sequential>
-      <echo message="Checking @{direction} binary compatibility for @{jar-name} (against ${bc-reference-version})"/>
-      <java taskname="mima"
-         fork="true"
-         failonerror="true"
-         classname="com.typesafe.tools.mima.cli.Main">
-           <arg value="--prev"/>
-           <arg value="@{prev}"/>
-           <arg value="--curr"/>
-           <arg value="@{curr}"/>
-           <arg value="--filters"/>
-           <arg value="${basedir}/bincompat-@{direction}.whitelist.conf"/>
-           <arg value="--generate-filters"/>
-           <classpath>
-             <path refid="mima.classpath"/>
-           </classpath>
-      </java>
-    </sequential>
-  </macrodef>
-
-  <macrodef name="bc.check">
-    <attribute name="jar-name"/>
-    <sequential>
-        <bc.run-mima
-                jar-name="@{jar-name}"
-                prev="${org.scala-lang:@{jar-name}:jar}"
-                curr="${build-pack.dir}/lib/@{jar-name}.jar"
-                direction="backward"/>
-        <bc.run-mima
-                jar-name="@{jar-name}"
-                prev="${build-pack.dir}/lib/@{jar-name}.jar"
-                curr="${org.scala-lang:@{jar-name}:jar}"
-                direction="forward"/>
-    </sequential>
-  </macrodef>
-
   <target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
-  <target name="test.bc" depends="bc.init, pack.lib, pack.reflect, pack.swing">
-    <bc.check jar-name="scala-library"/>
-    <bc.check jar-name="scala-reflect"/>
-    <bc.check jar-name="scala-swing"/>
+  <target name="test.bc" depends="bc.init, pack.lib, pack.reflect">
+    <bc.check project="library"/>
+    <bc.check project="reflect"/>
   </target>
 
 <!-- ===========================================================================
                                   DOCUMENTATION
 ============================================================================ -->
-  <target name="docs.start" depends="pack.done">
+  <target name="docs.start" depends="scaladoc.task" unless="docs.skip">
     <!-- Set the github commit scaladoc sources point to -->
     <!-- For releases, look for the tag with the same name as the maven version -->
     <condition property="scaladoc.git.commit" value="v${maven.version.number}">
@@ -1687,394 +1549,208 @@ TODO:
     <property name="scaladoc.no.prefixes" value="no"/>
   </target>
 
-  <target name="docs.lib" depends="docs.start">
-    <staged-uptodate stage="docs" project="library">
-      <check><srcfiles dir="${src.dir}">
-        <include name="library/**"/>
-        <include name="swing/**"/>
-        <include name="actors/**"/>
-        <include name="reflect/**"/>
-        <include name="continuations/library/**"/>
-      </srcfiles></check>
-      <do>
-        <stopwatch name="docs.lib.timer"/>
-        <mkdir dir="${build-docs.dir}/library"/>
-        <!-- last three attributes not supported by staged-docs: -->
-        <scaladoc
-          destdir="${build-docs.dir}/library"
-          doctitle="Scala Standard Library API (Scaladoc)"
-          docversion="${version.number}"
-          docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
-          sourcepath="${src.dir}"
-          classpathref="pack.compiler.path"
-          addparams="${scalac.args.all}"
-          docRootContent="${src.dir}/library/rootdoc.txt"
-          implicits="on"
-          diagrams="on"
-          groups="on"
-          rawOutput="${scaladoc.raw.output}"
-          noPrefixes="${scaladoc.no.prefixes}"
-          docfooter="epfl"
-          docUncompilable="${src.dir}/library-aux"
-          skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io:scala.concurrent.impl">
-          <src>
-            <files includes="${src.dir}/actors"/>
-            <files includes="${src.dir}/library"/>
-            <files includes="${src.dir}/reflect"/>
-            <files includes="${src.dir}/swing"/>
-            <files includes="${src.dir}/continuations/library"/>
-          </src>
-          <include name="**/*.scala"/>
-          <exclude name="reflect/Code.scala"/>
-          <exclude name="reflect/Print.scala"/>
-          <exclude name="reflect/Symbol.scala"/>
-          <exclude name="reflect/Tree.scala"/>
-          <exclude name="reflect/Type.scala"/>
-          <exclude name="runtime/*$.scala"/>
-          <exclude name="runtime/ScalaRunTime.scala"/>
-          <exclude name="runtime/StringAdd.scala"/>
-        </scaladoc>
-        <stopwatch name="docs.lib.timer" action="total"/>
-      </do>
-    </staged-uptodate>
+  <target name="docs.lib" depends="docs.start" unless="docs.skip">
+    <staged-docs project="library">
+      <include name="**/*.scala"/>
+      <exclude name="runtime/*$.scala"/>
+      <exclude name="runtime/ScalaRunTime.scala"/>
+      <exclude name="runtime/StringAdd.scala"/>
+    </staged-docs>
+  </target>
+
+  <target name="docs.reflect" depends="docs.start" unless="docs.skip">
+    <staged-docs project="reflect">
+      <include name="**/*.scala"/>
+      <exclude name="reflect/Code.scala"/>
+      <exclude name="reflect/Print.scala"/>
+      <exclude name="reflect/Symbol.scala"/>
+      <exclude name="reflect/Tree.scala"/>
+      <exclude name="reflect/Type.scala"/>
+    </staged-docs>
   </target>
 
-  <target name="docs.comp" depends="docs.start">
-    <staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
+  <target name="docs.comp" depends="docs.start" unless="docs.skip">
+    <staged-docs project="compiler">
       <include name="**/*.scala"/>
     </staged-docs>
   </target>
 
-  <target name="docs.jline" depends="docs.start">
-    <staged-docs project="jline" dir="jline/src/main/java" title="Scala JLine">
+  <!-- TODO modularize compiler. These targets are currently not used.
+  <target name="docs.scaladoc" depends="docs.start" unless="docs.skip">
+    <staged-docs project="scaladoc">
       <include name="**/*.scala"/>
-      <include name="**/*.java"/>
     </staged-docs>
   </target>
 
-  <target name="docs.scalap" depends="docs.start">
-    <staged-docs project="scalap" title="Scalap">
+  <target name="docs.interactive" depends="docs.start" unless="docs.skip">
+    <staged-docs project="interactive">
       <include name="**/*.scala"/>
     </staged-docs>
   </target>
+  -->
 
-  <target name="docs.partest" depends="docs.start">
-    <staged-docs project="partest" title="Scala Parallel Testing Framework">
+  <target name="docs.actors" depends="docs.start" unless="docs.skip">
+    <staged-docs project="actors">
       <include name="**/*.scala"/>
     </staged-docs>
   </target>
 
-  <target name="docs.continuations-plugin" depends="docs.start">
-    <staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
+  <target name="docs.scalap" depends="docs.start" unless="docs.skip">
+    <staged-docs project="scalap">
       <include name="**/*.scala"/>
     </staged-docs>
   </target>
 
-  <target name="docs.man" depends="docs.start">
-    <staged-uptodate stage="docs" project="manual">
+  <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
+  <!-- TODO modularize compiler:  docs.scaladoc, docs.interactive, -->
+  <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
+
+  <!-- doc/ and man/ -->
+  <target name="pack.doc" depends="scaladoc.task" unless="docs.skip"> <!-- depends on scaladoc.task for scalac taskdef -->
+    <mkdir dir="${build-pack.dir}/doc"/>
+    <copy toDir="${build-pack.dir}/doc" overwrite="true">
+      <fileset dir="${doc.dir}"/>
+    </copy>
+
+    <mkdir dir="${build-pack.dir}/doc/tools"/>
+    <mkdir dir="${build-pack.dir}/man/man1"/>
+    <staged-uptodate stage="manual" project="manual">
       <check><srcfiles dir="${src.dir}/manual"/></check>
       <do>
         <mkdir dir="${build.dir}/manmaker/classes"/>
         <scalac
           destdir="${build.dir}/manmaker/classes"
-          classpathref="pack.compiler.path"
+          classpathref="manual.build.path"
           srcdir="${src.dir}/manual"
           includes="**/*.scala"
-          addparams="${scalac.args.all}"/>
-        <mkdir dir="${build-docs.dir}/manual/man/man1"/>
-        <mkdir dir="${build-docs.dir}/manual/html"/>
-        <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
+          addparams="${scalac.args.all} -language:implicitConversions"/>
+        <mkdir dir="${build-manual.dir}/genman/man1"/>
         <taskdef name="genman"
                  classname="scala.tools.docutil.ManMaker"
-                 classpathref="manual.classpath"/>
+                 classpathref="manual.build.path"/>
         <genman command="fsc, scala, scalac, scaladoc, scalap"
-                htmlout="${build-docs.dir}/manual/html"
-                manout="${build-docs.dir}/manual/genman"/>
-        <!-- On Windows source and target files can't be the same ! -->
-        <fixcrlf
-          srcdir="${build-docs.dir}/manual/genman"
-          destdir="${build-docs.dir}/manual/man"
-          eol="unix" includes="**/*.1"/>
-        <copy todir="${build-docs.dir}/manual/html">
-          <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
-            <include name="**/*.html"/>
-            <include name="**/*.css"/>
-            <include name="**/*.gif"/>
-            <include name="**/*.png"/>
-          </fileset>
-        </copy>
+                htmlout="${build-pack.dir}/doc/tools"
+                manout="${build-manual.dir}/genman"/>
       </do>
     </staged-uptodate>
-  </target>
-
-  <target name="docs.done" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
-
-
-<!-- ===========================================================================
-                                  DISTRIBUTION
-============================================================================ -->
-  <target name="dist.base" depends="pack.done, osgi.done">
-    <property name="dist.name" value="scala-${version.number}"/>
-    <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
 
-    <macrodef name="copy-bundle">
-      <attribute name="name" />
-      <sequential>
-        <copy file="${build-osgi.dir}/org.scala-lang.@{name}.jar"
-          tofile="${dist.dir}/lib/@{name}.jar"/>
-      </sequential>
-    </macrodef>
-    <macrodef name="copy-plugin-bundle">
-      <attribute name="name" />
-      <sequential>
-        <copy file="${build-osgi.dir}/org.scala-lang.plugins.@{name}.jar"
-          tofile="${dist.dir}/misc/scala-devel/plugins/@{name}.jar"
-          overwrite="yes"/>
-      </sequential>
-    </macrodef>
-
-    <mkdir dir="${dist.dir}/lib"/>
-    <copy toDir="${dist.dir}/lib">
-      <fileset dir="${build-pack.dir}/lib">
-        <include name="jline.jar"/>
-        <include name="scala-partest.jar"/> <!-- needed for maven publish -->
-        <include name="scalap.jar"/>
+    <!-- On Windows source and target files can't be the same ! -->
+    <fixcrlf
+      srcdir="${build-manual.dir}/genman"
+      destdir="${build-pack.dir}/man"
+      eol="unix" includes="**/*.1"/>
+    <copy todir="${build-pack.dir}/doc/tools" overwrite="true">
+      <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
+        <include name="**/*.html"/>
+        <include name="**/*.css"/>
+        <include name="**/*.gif"/>
+        <include name="**/*.png"/>
       </fileset>
     </copy>
-
-    <mkdir dir="${dist.dir}/bin"/>
-    <!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
-    <copy-bundle name="scala-library"/>
-    <copy-bundle name="scala-reflect"/>
-    <copy-bundle name="scala-swing"/>
-    <copy-bundle name="scala-actors"/>
-    <copy-bundle name="scala-compiler"/>
-    <copy toDir="${dist.dir}/bin">
-      <fileset dir="${build-pack.dir}/bin"/>
-    </copy>
-    <chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
-    <chmod perm="ugo+rx" file="${dist.dir}/bin/scalac"/>
-    <chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
-    <chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
-    <chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
-    <mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
-    <copy-plugin-bundle name="continuations"/>
   </target>
 
-  <target name="dist.doc" depends="dist.base, docs.done">
-    <mkdir dir="${dist.dir}/doc"/>
-    <mkdir dir="${dist.dir}/doc/licenses"/>
-    <mkdir dir="${dist.dir}/doc/tools"/>
-    <copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
-    <copy file="${docs.dir}/README"  toDir="${dist.dir}/doc"/>
-    <copy                            toDir="${dist.dir}/doc/licenses">
-      <fileset dir="${docs.dir}/licenses"/>
-    </copy>
-    <copy                            toDir="${dist.dir}/doc/tools">
-      <fileset dir="${build-docs.dir}/manual/html"/>
-    </copy>
-
-    <mkdir dir="${dist.dir}/api"/>
-    <copy toDir="${dist.dir}/api">
-      <fileset dir="${build-docs.dir}/library"/>
-    </copy>
-    <copy file="${src.dir}/swing/doc/README"
-          toFile="${dist.dir}/api/README.scala-swing"/>
+<!-- ===========================================================================
+MAIN DISTRIBUTION PACKAGING
+============================================================================ -->
+  <target name="pack-maven.core" depends="osgi.core, docs.core">
+    <property name="dist.maven" value="${dists.dir}/maven/${version.number}"/>
+    <mkdir dir="${dist.maven}"/>
+
+    <mvn-package project="library"/>
+    <mvn-package project="reflect"/>
+    <mvn-package project="compiler"/>
+
+    <copy tofile="${dist.maven}/scala-library-all/scala-library-all-pom.xml"
+            file="${src.dir}/build/maven/scala-library-all-pom.xml" overwrite="true"/>
+
+    <!-- for replacestarr -->
+    <if><isset property="update.starr.version"/><then>
+      <echo message="From now on, ${maven.version.number} will be used as STARR (`build.properties`'s `starr.version` was modified)."/>
+      <propertyfile file = "build.properties">
+        <entry key       = "starr.version" value="${maven.version.number}"/>
+      </propertyfile>
+    </then></if>
   </target>
 
+  <target name="pack-maven.done" depends="pack-maven.core, osgi.done, docs.done, pack.bin, pack.doc">
+    <!-- TODO modularize compiler
+    <mvn-package project="interactive"/>
+    <mvn-package project="scaladoc"/>
+    -->
 
-  <target name="dist.man" depends="dist.base">
-    <mkdir dir="${dist.dir}/man"/>
-    <copy toDir="${dist.dir}/man">
-      <fileset dir="${build-docs.dir}/manual/man"/>
-    </copy>
-  </target>
+    <mvn-package project="actors"/>
 
-  <!--
-       A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
-       Eclipse to match sources with the corresponding binaries.
-  -->
-  <macrodef name="osgi.source.bundle">
-    <attribute name="destfile"     description="The jar file name"/>
-    <attribute name="symbolicName" description="The original bundle symbolic name (without .source at the end)"/>
-    <attribute name="bundleName"   description="A value for Bundle-Name, usually a textual description"/>
-    <element   name="file-sets"    description="A sequence of fileset elements to be included in the jar" optional="true" implicit="true"/>
-
-    <sequential>
-      <jar whenmanifestonly="fail" destfile="@{destFile}">
-        <file-sets/>
-        <manifest>
-          <attribute name="Manifest-Version" value="1.0"/>
-          <attribute name="Bundle-Name" value="@{bundleName}"/>
-          <attribute name="Bundle-SymbolicName" value="@{symbolicName}.source"/>
-          <attribute name="Bundle-Version" value="${osgi.version.number}"/>
-          <attribute name="Eclipse-SourceBundle" value="@{symbolicName};version="${osgi.version.number}";roots:="."" />
-        </manifest>
-      </jar>
-    </sequential>
-  </macrodef>
-
-  <target name="dist.src" depends="dist.base">
-    <mkdir dir="${dist.dir}/src"/>
-    <osgi.source.bundle destfile="${dist.dir}/src/scala-library-src.jar"
-                        symbolicName="org.scala-lang.scala-library"
-                        bundleName="Scala Library Sources">
-      <fileset dir="${src.dir}/library"/>
-      <fileset dir="${src.dir}/continuations/library"/>
-    </osgi.source.bundle>
-    <osgi.source.bundle destfile="${dist.dir}/src/scala-reflect-src.jar"
-                        symbolicName="org.scala-lang.scala-reflect"
-                        bundleName="Scala Reflect Sources">
-      <fileset dir="${src.dir}/reflect"/>
-    </osgi.source.bundle>
-    <osgi.source.bundle destfile="${dist.dir}/src/scala-swing-src.jar"
-                        symbolicName="org.scala-lang.scala-swing"
-                        bundleName="Scala Swing Sources">
-      <fileset dir="${src.dir}/swing"/>
-    </osgi.source.bundle>
-    <osgi.source.bundle destfile="${dist.dir}/src/scala-compiler-src.jar"
-                        symbolicName="org.scala-lang.scala-compiler"
-                        bundleName="Scala Compiler Sources">
-      <fileset dir="${src.dir}/compiler"/>
-    </osgi.source.bundle>
-    <jar whenmanifestonly="fail" destfile="${dist.dir}/src/fjbg-src.jar"           basedir="${src.dir}/fjbg"/>
-    <jar whenmanifestonly="fail" destfile="${dist.dir}/src/msil-src.jar"           basedir="${src.dir}/msil"/>
-    <osgi.source.bundle destfile="${dist.dir}/src/scala-actors-src.jar"
-                        symbolicName="org.scala-lang.scala-actors"
-                        bundleName="Scala Actors Sources">
-      <fileset dir="${src.dir}/actors"/>
-    </osgi.source.bundle>
-    <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar"         basedir="${src.dir}/scalap"/>
-    <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar"  basedir="${src.dir}/partest"/>
-  </target>
+    <!-- don't bother fitting scalap into the mould: it will move out soon -->
+    <copy tofile="${dist.maven}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
+    <copy tofile="${dist.maven}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
+    <jar destfile="${dist.maven}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
+    <if><not><isset property="docs.skip"/></not><then>
+      <jar destfile="${dist.maven}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
+    </then></if>
 
-  <target name="dist.partial" depends="dist.base">
-    <if><not><os family="windows"/></not><then>
-      <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
-    </then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
-      <copydir dest="${dists.dir}/latest" src="${dist.dir}"/>
-    </else></if>
+    <copy tofile="${dist.maven}/scala-dist/scala-dist-pom.xml" file="${src.dir}/build/maven/scala-dist-pom.xml" overwrite="true"/>
+    <jar whenmanifestonly="fail" destfile="${dist.maven}/scala-dist/scala-dist.jar" basedir="${build-pack.dir}">
+      <include name="bin/" />
+      <include name="doc/" />
+      <include name="man/" />
+    </jar>
   </target>
 
-  <target name="dist.done" depends="dist.doc, dist.man, dist.src, dist.partial"/>
-
-
 <!-- ===========================================================================
-                                  STABLE REFERENCE (STARR)
+                                  MAVEN PUBLISHING
 ============================================================================ -->
-<!-- Does not use any properties other than ${basedir}, so that it can
-     run without 'init' (when using 'replacestarrwin') -->
-
-  <target name="starr.start">
-    <fail message="Library in build/pack not available">
-      <condition><not><and>
-        <available file="${build-pack.dir}/lib/scala-library.jar"/>
-      </and></not></condition>
-    </fail>
-    <fail message="Compiler in build/quick not available">
-      <condition><not><and>
-        <available file="${build-quick.dir}/classes/compiler"/>
-        <available file="${build-quick.dir}/compiler.complete"/>
-      </and></not></condition>
-    </fail>
-  </target>
+  <target name="init.maven" depends="init">
+    <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
+    <property name="remote.release.repository"  value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
 
-  <target name="starr.jars" depends="starr.start">
-    <copy toDir="${lib.dir}" overwrite="yes">
-      <fileset dir="${build-pack.dir}/lib">
-        <include name="scala-library.jar"/>
-        <include name="scala-reflect.jar"/>
-        <include name="scala-compiler.jar"/>
-      </fileset>
-    </copy>
-  </target>
+    <property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
+    <property name="local.release.repository"  value="${user.home}/.m2/repository" />
 
-  <target name="starr.src" depends="starr.jars">
-    <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-library-src.jar">
-      <fileset dir="${src.dir}/library"/>
-      <fileset dir="${src.dir}/swing"/>
-      <fileset dir="${src.dir}/actors"/>
-      <fileset dir="${src.dir}/forkjoin"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
-    <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-compiler-src.jar">
-      <fileset dir="${src.dir}/compiler"/>
-      <fileset dir="${src.dir}/asm"/>
-    </jar>
-  </target>
+    <property name="repository.credentials.id" value="sonatype-nexus" />
+    <property name="settings.file" value="${user.home}/.m2/settings.xml" />
 
-  <target name="starr.removesha1" depends="starr.src">
-    <!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
-    <delete>
-      <fileset dir="${lib.dir}">
-        <include name="scala-compiler.jar.desired.sha1"/>
-        <include name="scala-reflect.jar.desired.sha1"/>
-        <include name="scala-library.jar.desired.sha1"/>
-        <include name="scala-library-src.jar.desired.sha1"/>
-        <include name="scala-reflect-src.jar.desired.sha1"/>
-        <include name="scala-compiler-src.jar.desired.sha1"/>
-      </fileset>
-    </delete>
+    <if><contains string="${maven.version.number}" substring="-SNAPSHOT"/><then>
+      <property name="remote.repository" value="${remote.snapshot.repository}"/>
+      <property name="local.repository"  value="${local.snapshot.repository}"/>
+    </then><else>
+      <property name="remote.repository" value="${remote.release.repository}"/>
+      <property name="local.repository"  value="${local.release.repository}"/>
+    </else></if>
   </target>
 
-  <target name="starr.done" depends="starr.jars, starr.removesha1"/>
-
-  <target name="replacestarr" description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
-    <fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
-      <condition>
-        <os family="windows"/>
-      </condition>
-    </fail>
-    <!-- needs antcall to enforce ordering -->
-    <antcall target="locker.clean"/>
-    <antcall target="pack.done"/>
-    <antcall target="starr.done"/>
-    <antcall target="locker.clean"/>
-    <antcall target="test.done"/>
+  <target name="publish"        depends="pack-maven.done, init.maven" description="Publishes unsigned artifacts to the maven repo.">
+    <deploy />
+    <deploy-pom name="scala-library-all"/>
+    <deploy-jar name="scala-dist"/>
   </target>
 
-  <target name="replacestarr-opt" description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
-    <optimized name="replacestarr"/></target>
-
-  <!-- Ant on Windows is not able to delete jar files that are referenced in any <path>.
-       See ticket 1290 on trac. -->
-  <target name="replacestarrwin" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
-    <fail message="This target is only available on Windows. Use 'ant replacestarr' instead.">
-      <condition>
-        <not><os family="windows"/></not>
-      </condition>
-    </fail>
-    <echo message="CAUTION: Make sure to execute 'ant locker.clean build' prior to calling 'replacestarrwin'."/>
-    <antcall target="starr.done"/>
-    <antcall target="locker.clean"/>
-    <antcall target="test.done"/>
+  <target name="publish.local"  depends="pack-maven.done, init.maven" description="Publishes unsigned artifacts to the local maven repo.">
+    <deploy local="true"/>
+    <deploy-pom name="scala-library-all" local="true"/>
+    <deploy-jar name="scala-dist" local="true"/>
   </target>
 
-  <target name="replacestarrwin-opt" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
-    <optimized name="replacestarrwin"/></target>
-
-  <target name="replacelocker" description="Replaces the Locker compiler and library by fresh ones built from current sources."
-    depends="palo.clean, locker.unlock, palo.done"/>
-
-  <target name="replacelocker-opt" description="Replaces the Locker compiler and library by fresh, optimised ones built from current sources.">
-    <optimized name="replacelocker"/></target>
-
-  <target name="buildlocker" description="Does the same for locker as build does for quick." depends="locker.unlock, palo.bin"/>
-  <target name="unlocklocker" description="Same as buildlocker." depends="buildlocker"/> <!-- REMOVE -->
-
-  <target name="fastlocker.lib" description="Buildlocker without extra fuss" depends="locker.unlock, locker.lib">
-    <property name="fastlocker" value="true"/>
+  <target name="publish.signed" depends="pack-maven.done, init.maven" description="Publishes signed artifacts to the remote maven repo.">
+    <deploy signed="true"/>
+    <deploy-pom name="scala-library-all" signed="true"/>
+    <deploy-jar name="scala-dist" signed="true"/>
   </target>
 
-  <target name="fastlocker.reflect" description="Buildlocker without extra fuss" depends="locker.unlock, locker.reflect">
-    <property name="fastlocker" value="true"/>
+  <target name="publish-core"   depends="pack-maven.core, init.maven">
+    <deploy-one name="scala-compiler" />
+    <deploy-one name="scala-library"  />
+    <deploy-one name="scala-reflect"  />
   </target>
 
-  <target name="fastlocker.comp" description="Buildlocker without extra fuss" depends="locker.unlock, locker.comp">
-    <property name="fastlocker" value="true"/>
+  <target name="publish-core-local" depends="pack-maven.core, init.maven">
+    <deploy-one name="scala-compiler" local="true"/>
+    <deploy-one name="scala-library"  local="true"/>
+    <deploy-one name="scala-reflect"  local="true"/>
   </target>
 
-  <target name="fastlocker" description="Buildlocker without extra fuss" depends="fastlocker.comp"/>
+  <target name="publish-core-opt" description="Builds an untested optimised core (library/reflect/compiler) and publishes to maven.">
+    <optimized name="publish-core"/>
+  </target>
 
 <!-- ===========================================================================
                                   VISUALIZATION
diff --git a/dbuild-meta.json b/dbuild-meta.json
new file mode 100644
index 0000000..90d0104
--- /dev/null
+++ b/dbuild-meta.json
@@ -0,0 +1,100 @@
+{
+    "version": "2.11.0",
+    "subproj": [],
+    "projects": [
+        {
+            "artifacts": [
+                {
+                    "extension": "jar",
+                    "name": "scala-library",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "dependencies": [],
+            "name": "scala-library",
+            "organization": "org.scala-lang"
+        },
+        {
+            "artifacts": [
+                {
+                    "extension": "jar",
+                    "name": "scala-reflect",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "dependencies": [
+                {
+                    "extension": "jar",
+                    "name": "scala-library",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "name": "scala-reflect",
+            "organization": "org.scala-lang"
+        },
+        {
+            "artifacts": [
+                {
+                    "extension": "jar",
+                    "name": "scala-compiler",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "dependencies": [
+                {
+                    "extension": "jar",
+                    "name": "scala-reflect",
+                    "organization": "org.scala-lang"
+                },
+                {
+                    "extension": "jar",
+                    "name": "scala-xml",
+                    "organization": "org.scala-lang.modules"
+                },
+                {
+                    "extension": "jar",
+                    "name": "scala-parser-combinators",
+                    "organization": "org.scala-lang.modules"
+                }
+            ],
+            "name": "scala-compiler",
+            "organization": "org.scala-lang"
+        },
+        {
+            "artifacts": [
+                {
+                    "extension": "jar",
+                    "name": "scala-actors",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "dependencies": [
+                {
+                    "extension": "jar",
+                    "name": "scala-library",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "name": "scala-actors",
+            "organization": "org.scala-lang"
+        },
+        {
+            "artifacts": [
+                {
+                    "extension": "jar",
+                    "name": "scalap",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "dependencies": [
+                {
+                    "extension": "jar",
+                    "name": "scala-compiler",
+                    "organization": "org.scala-lang"
+                }
+            ],
+            "name": "scalap",
+            "organization": "org.scala-lang"
+        }
+    ]
+}
diff --git a/doc/LICENSE.md b/doc/LICENSE.md
new file mode 100644
index 0000000..6b039af
--- /dev/null
+++ b/doc/LICENSE.md
@@ -0,0 +1,68 @@
+Scala is licensed under the [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause).
+
+## Scala License
+
+Copyright (c) 2002-2013 EPFL
+
+Copyright (c) 2011-2013 Typesafe, Inc.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+  * Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+  * Neither the name of the EPFL nor the names of its contributors
+    may be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Other Licenses
+
+This software includes projects with the following licenses,
+which are also included in the `licenses/` directory:
+
+### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html)
+This license is used by the following third-party libraries:
+
+  * jansi
+
+### [BSD License](http://www.opensource.org/licenses/bsd-license.php)
+This license is used by the following third-party libraries:
+
+  * jline
+
+### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause)
+This license is used by the following third-party libraries:
+
+  * asm
+
+### [MIT License](http://www.opensource.org/licenses/MIT)
+This license is used by the following third-party libraries:
+
+  * jquery
+  * jquery-ui
+  * jquery-layout
+  * sizzle
+  * tools tooltip
+
+### Public Domain
+The following libraries are freely available in the public domain:
+
+  * forkjoin
+
diff --git a/doc/License.rtf b/doc/License.rtf
new file mode 100644
index 0000000..62ec2d0
--- /dev/null
+++ b/doc/License.rtf
@@ -0,0 +1,65 @@
+{\rtf1\ansi\ansicpg1252\cocoartf1187\cocoasubrtf400
+{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\margl1440\margr1440\vieww25140\viewh18960\viewkind0
+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural
+
+\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}.\
+\
+
+\fs48 Scala License
+\fs40 \
+
+\fs26 Copyright (c) 2002-2013 EPFL\
+Copyright (c) 2011-2013 Typesafe, Inc.\
+All rights reserved.\
+\
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\
+	\'95	Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\
+	\'95	Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\
+	\'95	Neither the name of the EPFL nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\
+\
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'94 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR [...]
+
+\fs52 \
+
+\fs48 Other Licenses
+\fs52 \
+
+\fs26 This software includes projects with the following licenses, which are also included in the 
+\fs24 licenses/
+\fs26  directory:\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt Apache License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+	\'95	jansi\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt BSD License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+	\'95	jline\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+	\'95	asm\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/MIT"}}{\fldrslt MIT License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+	\'95	jquery\
+	\'95	jquery-ui\
+	\'95	jquery-layout\
+	\'95	sizzle\
+	\'95	tools tooltip\
+
+\fs30 \
+Public Domain\
+
+\fs26 The following libraries are freely available in the public domain:\
+	\'95	forkjoin}
\ No newline at end of file
diff --git a/doc/README b/doc/README
new file mode 100644
index 0000000..29f64c9
--- /dev/null
+++ b/doc/README
@@ -0,0 +1,36 @@
+Scala Distribution
+------------------
+
+The Scala distribution requires Java 1.6 or above.
+
+Please report bugs at https://issues.scala-lang.org/.
+We welcome contributions at https://github.com/scala/scala!
+
+Scala Tools
+-----------
+
+- scala       Scala interactive interpreter
+- scalac      Scala compiler
+- fsc         Scala resident compiler
+- scaladoc    Scala API documentation generator
+- scalap      Scala classfile decoder
+
+Run the command "scalac -help" to display the list of available
+compiler options.
+
+
+Installation
+------------
+
+Decompress the archive and run the above commands directly from `bin` directory.
+We recommend adding the full path of the `bin` directory to the `PATH`
+environment variable.
+
+
+Licenses
+--------
+
+Scala is licensed under the standard 3-clause BSD license,
+included in the distribution as the file `doc/LICENSE`.
+The licenses of the software included in the Scala distribution can
+be found in the `doc/licenses` directory.
\ No newline at end of file
diff --git a/docs/licenses/apache_jansi.txt b/doc/licenses/apache_jansi.txt
similarity index 100%
rename from docs/licenses/apache_jansi.txt
rename to doc/licenses/apache_jansi.txt
diff --git a/docs/licenses/bsd_asm.txt b/doc/licenses/bsd_asm.txt
similarity index 100%
rename from docs/licenses/bsd_asm.txt
rename to doc/licenses/bsd_asm.txt
diff --git a/docs/licenses/bsd_jline.txt b/doc/licenses/bsd_jline.txt
similarity index 100%
rename from docs/licenses/bsd_jline.txt
rename to doc/licenses/bsd_jline.txt
diff --git a/docs/licenses/mit_jquery-layout.txt b/doc/licenses/mit_jquery-layout.txt
similarity index 100%
rename from docs/licenses/mit_jquery-layout.txt
rename to doc/licenses/mit_jquery-layout.txt
diff --git a/docs/licenses/mit_jquery-ui.txt b/doc/licenses/mit_jquery-ui.txt
similarity index 100%
rename from docs/licenses/mit_jquery-ui.txt
rename to doc/licenses/mit_jquery-ui.txt
diff --git a/docs/licenses/mit_jquery.txt b/doc/licenses/mit_jquery.txt
similarity index 100%
rename from docs/licenses/mit_jquery.txt
rename to doc/licenses/mit_jquery.txt
diff --git a/docs/licenses/mit_sizzle.txt b/doc/licenses/mit_sizzle.txt
similarity index 100%
rename from docs/licenses/mit_sizzle.txt
rename to doc/licenses/mit_sizzle.txt
diff --git a/docs/licenses/mit_tools.tooltip.txt b/doc/licenses/mit_tools.tooltip.txt
similarity index 100%
rename from docs/licenses/mit_tools.tooltip.txt
rename to doc/licenses/mit_tools.tooltip.txt
diff --git a/docs/LICENSE b/docs/LICENSE
deleted file mode 100644
index 4daedef..0000000
--- a/docs/LICENSE
+++ /dev/null
@@ -1,63 +0,0 @@
-Scala is licensed under the [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause).
-
-## Scala License
-
-Copyright (c) 2002-2013 EPFL
-
-Copyright (c) 2011-2013 Typesafe, Inc.
-
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
-    * Redistributions of source code must retain the above copyright notice,
-      this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above copyright notice,
-      this list of conditions and the following disclaimer in the documentation
-      and/or other materials provided with the distribution.
-    * Neither the name of the EPFL nor the names of its contributors
-      may be used to endorse or promote products derived from this software
-      without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Other Licenses
-
-This software includes projects with the following licenses,
-which are also included in the `licenses/` directory:
-
-### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html)
-This license is used by the following third-party libraries:
-  * jansi
-
-### [BSD License](http://www.opensource.org/licenses/bsd-license.php)
-This license is used by the following third-party libraries:
-  * jline
-
-### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause)
-This license is used by the following third-party libraries:
-  * asm
-
-### [MIT License](http://www.opensource.org/licenses/MIT)
-This license is used by the following third-party libraries:
-  * jquery
-  * jquery-ui
-  * jquery-layout
-  * sizzle
-  * tools tooltip
-
-### Public Domain
-The following libraries are freely available in the public domain:
-  * forkjoin
-
diff --git a/docs/README b/docs/README
deleted file mode 100644
index 1d5f553..0000000
--- a/docs/README
+++ /dev/null
@@ -1,36 +0,0 @@
-Scala Distribution
-------------------
-
-The Scala distribution requires Java 1.6 or above.
-
-Please report bugs at https://issues.scala-lang.org/.
-We welcome contributions at https://github.com/scala/scala!
-
-Scala Tools
------------
-
-- scala       Scala interactive interpreter
-- scalac      Scala compiler
-- fsc         Scala resident compiler
-- scaladoc    Scala API documentation generator
-- scalap      Scala classfile decoder 
-
-Run the command "scalac -help" to display the list of available
-compiler options.
-
-
-Installation
-------------
-
-Decompress the archive and run the above commands directly from `bin` directory.
-We recommend adding the full path of the `bin` directory to the `PATH`
-environment variable.
-
-
-Licenses
---------
-
-Scala is licensed under the standard 3-clause BSD license,
-included in the distribution as the file `doc/LICENSE`.
-The licenses of the software included in the Scala distribution can
-be found in the `doc/licenses` directory.
\ No newline at end of file
diff --git a/docs/examples/swing/ColorChooserDemo.scala b/docs/examples/swing/ColorChooserDemo.scala
new file mode 100644
index 0000000..1cb2bde
--- /dev/null
+++ b/docs/examples/swing/ColorChooserDemo.scala
@@ -0,0 +1,61 @@
+package examples.swing
+
+import java.awt.{Color, Font, Dimension}
+import swing._
+import event._
+import Swing._
+import BorderPanel._
+
+/**
+ * Demo for ColorChooser.
+ * Based on http://download.oracle.com/javase/tutorial/uiswing/components/colorchooser.html
+ * 
+ * @author andy at hicks.net
+ */
+object ColorChooserDemo extends SimpleSwingApplication {
+  def top = new MainFrame {
+    title = "ColorChooser Demo"
+    size = new Dimension(400, 400)
+    
+    contents = ui
+  }
+
+  def ui = new BorderPanel {
+    val colorChooser = new ColorChooser {
+      reactions += {
+        case ColorChanged(_, c) =>
+          banner.foreground = c
+      }
+    }
+
+    colorChooser.border = TitledBorder(EtchedBorder, "Choose Text Color")
+    
+    val banner = new Label("Welcome to Scala Swing") {
+      horizontalAlignment = Alignment.Center
+      foreground = Color.yellow
+      background = Color.blue
+      opaque = true
+      font = new Font("SansSerif", Font.BOLD, 24)
+    }
+   
+    val bannerArea = new BorderPanel {
+      layout(banner) = Position.Center
+      border = TitledBorder(EtchedBorder, "Banner")
+    }
+    
+    // Display a color selection dialog when button pressed 
+    val selectColor = new Button("Choose Background Color") {
+      reactions += {
+        case ButtonClicked(_) =>
+          ColorChooser.showDialog(this, "Test", Color.red) match {
+            case Some(c) => banner.background = c
+            case None =>
+          }
+      }
+    }
+
+    layout(bannerArea) = Position.North
+    layout(colorChooser) = Position.Center
+    layout(selectColor) = Position.South
+  }
+}
\ No newline at end of file
diff --git a/docs/examples/swing/PopupDemo.scala b/docs/examples/swing/PopupDemo.scala
new file mode 100644
index 0000000..6a9eeb1
--- /dev/null
+++ b/docs/examples/swing/PopupDemo.scala
@@ -0,0 +1,33 @@
+package examples.swing
+
+import swing._
+import event._
+import Swing._
+
+/**
+ * @author John Sullivan
+ * @author Ingo Maier
+ */
+object PopupDemo extends SimpleSwingApplication {
+	def top = new MainFrame {
+		val popupMenu = new PopupMenu {
+      contents += new Menu("menu 1") {
+        contents += new RadioMenuItem("radio 1.1")
+        contents += new RadioMenuItem("radio 1.2")
+      }
+      contents += new Menu("menu 2") {
+        contents += new RadioMenuItem("radio 2.1")
+        contents += new RadioMenuItem("radio 2.2")
+      }
+    }
+    val button = new Button("Show Popup Menu")
+    reactions += {
+      case ButtonClicked(b) => popupMenu.show(b, 0, b.bounds.height)
+      case PopupMenuCanceled(m) => println("Menu " + m + " canceled.")
+    }
+    listenTo(popupMenu)
+    listenTo(button)
+
+    contents = new FlowPanel(button)
+	}
+}
\ No newline at end of file
diff --git a/lib/ant/ant-contrib.jar.desired.sha1 b/lib/ant/ant-contrib.jar.desired.sha1
new file mode 100644
index 0000000..65bcd12
--- /dev/null
+++ b/lib/ant/ant-contrib.jar.desired.sha1
@@ -0,0 +1 @@
+943cd5c8802b2a3a64a010efb86ec19bac142e40 *ant-contrib.jar
diff --git a/lib/ant/ant-dotnet-1.0.jar.desired.sha1 b/lib/ant/ant-dotnet-1.0.jar.desired.sha1
new file mode 100644
index 0000000..d8b6a1c
--- /dev/null
+++ b/lib/ant/ant-dotnet-1.0.jar.desired.sha1
@@ -0,0 +1 @@
+3fc1e35ca8c991fc3488548f7a276bd9053c179d *ant-dotnet-1.0.jar
diff --git a/lib/ant/ant.jar.desired.sha1 b/lib/ant/ant.jar.desired.sha1
new file mode 100644
index 0000000..bcb610d
--- /dev/null
+++ b/lib/ant/ant.jar.desired.sha1
@@ -0,0 +1 @@
+7b456ca6b93900f96e58cc8371f03d90a9c1c8d1 *ant.jar
diff --git a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
new file mode 100644
index 0000000..53f87c3
--- /dev/null
+++ b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
@@ -0,0 +1 @@
+7e50e3e227d834695f1e0bf018a7326e06ee4c86 *maven-ant-tasks-2.1.1.jar
diff --git a/lib/ant/vizant.jar.desired.sha1 b/lib/ant/vizant.jar.desired.sha1
new file mode 100644
index 0000000..998da46
--- /dev/null
+++ b/lib/ant/vizant.jar.desired.sha1
@@ -0,0 +1 @@
+2c61d6e9a912b3253194d5d6d3e1db7e2545ac4b *vizant.jar
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
deleted file mode 100644
index 6f3ccc7..0000000
--- a/lib/fjbg.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8acc87f222210b4a5eb2675477602fc1759e7684 *fjbg.jar
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
deleted file mode 100644
index b042613..0000000
--- a/lib/jline.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a5261e70728c1847639e2b47d953441d0b217bcb *jline.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
deleted file mode 100644
index 9396b27..0000000
--- a/lib/msil.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d48cb950ceded82a5e0ffae8ef2c68d0923ed00c *msil.jar
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
deleted file mode 100644
index 082d86f..0000000
--- a/lib/scala-compiler-src.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cfa3ee21f76cd5c115bd3bc070a3b401587bafb5 ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
deleted file mode 100644
index bb39b4d..0000000
--- a/lib/scala-compiler.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d54b99f215d4d42b3f0b3489fbb1081270700992 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
deleted file mode 100644
index cd42c23..0000000
--- a/lib/scala-library-src.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8bdac1cdd60b73ff7e12fd2b556355fa10343e2d ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
deleted file mode 100644
index 6bdeaa9..0000000
--- a/lib/scala-library.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1e0e39fae15b42e85998740511ec5a3830e26243 ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
deleted file mode 100644
index d630c93..0000000
--- a/lib/scala-reflect-src.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d229f4c91ea8ab1a81559b5803efd9b0b1632f0b ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
deleted file mode 100644
index a5d6701..0000000
--- a/lib/scala-reflect.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-288f47dbe1002653e030fd25ca500b9ffe1ebd64 ?scala-reflect.jar
diff --git a/project/Build.scala b/project/Build.scala
deleted file mode 100644
index a50a572..0000000
--- a/project/Build.scala
+++ /dev/null
@@ -1,336 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import ScalaBuildKeys._
-import Release._
-
-
-object ScalaBuild extends Build with Layers with Packaging with Testing {
-
-  // Build wide settings:
-  override lazy val settings = super.settings ++ Versions.settings ++ Seq(
-    autoScalaLibrary := false,
-    resolvers += Resolver.url(
-      "Typesafe nightlies", 
-      url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
-    )(Resolver.ivyStylePatterns),
-    resolvers ++= Seq(
-      "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
-      ScalaToolsSnapshots
-    ),
-    organization := "org.scala-lang",
-    version <<= Versions.mavenVersion,
-    pomExtra := epflPomExtra
-  ) 
-
-  // Collections of projects to run 'compile' on.
-  lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg)
-  // Collection of projects to 'package' and 'publish' together.
-  lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap)
-  lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
-  
-  private def epflPomExtra = (
-    <xml:group>
-      <inceptionYear>2002</inceptionYear>
-      <licenses>
-        <license>
-          <name>BSD-like</name>
-          <url>http://www.scala-lang.org/downloads/license.html</url>
-        </license>
-      </licenses>
-      <scm>
-        <connection>scm:git:git://github.com/scala/scala.git</connection>
-      </scm>
-      <issueManagement>
-        <system>jira</system>
-        <url>http://issues.scala-lang.org</url>
-      </issueManagement>
-    </xml:group>
-  )
-    
-  // Settings used to make sure publishing goes smoothly.
-  def publishSettings: Seq[Setting[_]] = Seq(
-    ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
-    pomIncludeRepository := (_ => false),
-    publishMavenStyle := true,
-    makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
-    pomExtra := epflPomExtra
-  )
-
-  // Settings for root project.  These are aggregate tasks against the rest of the build.
-  def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
-    doc in Compile <<= (doc in documentation in Compile).identity,
-    // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
-    compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
-    // TODO - just clean target? i.e. target map IO.deleteRecursively
-    clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
-    packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
-    // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
-    publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
-    publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
-    packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
-    packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
-    test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
-    lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
-      Seq(lib,comp).foreach(f => IO.touch(f))
-    },
-    lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
-      Seq(lib,comp).foreach(IO.delete)
-    },
-    genBinQuick <<= (genBinQuick in scaladist).identity,
-    makeDist <<= (makeDist in scaladist).identity,
-    makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
-    // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
-    unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
-      Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
-    },
-    // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
-    commands += Release.pushStarr
-  )
-  // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file(".").  we use aaa_ to 'win'.
-  lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
-
-  // External dependencies used for various projects
-  lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v => 
-    Seq(
-      "org.apache.ant" % "ant" % "1.8.2",
-      "org.scala-sbt" % "compiler-interface" % v % "provided"
-    )
-  )
-
-  def fixArtifactSrc(dir: File, name: String) = name match {
-    case x if x startsWith "scala-" => dir / "src" / (name drop 6)
-    case x                          => dir / "src" / name
-  }
-
-  // These are setting overrides for most artifacts in the Scala build file.
-  def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
-    crossPaths := false,
-    autoScalaLibrary := false,
-    // Work around a bug where scala-library (and forkjoin) is put on classpath for analysis.
-    classpathOptions := ClasspathOptions.manual,
-    publishArtifact in packageDoc := false,
-    publishArtifact in packageSrc := false,
-    target <<= (baseDirectory, name) apply (_ / "target" / _),
-    (classDirectory in Compile) <<= target(_ / "classes"),
-    javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
-    scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
-    javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
-    unmanagedJars in Compile := Seq(),
-    // Most libs in the compiler use this order to build.
-    compileOrder in Compile := CompileOrder.JavaThenScala,
-    lockFile <<= target(_ / "compile.lock"),
-    skip in Compile <<= lockFile map (_.exists),
-    lock <<= lockFile map (f => IO.touch(f)),
-    unlock <<= lockFile map IO.delete
-  )
-
-  // --------------------------------------------------------------
-  //  Libraries used by Scalac that change infrequently
-  //  (or hopefully so).
-  // --------------------------------------------------------------
-
-  // Jline nested project.   Compile this sucker once and be done.
-  lazy val jline = Project("jline", file("src/jline"))
-  // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
-  lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
-  // Our wrapped version of msil.
-  lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
-  // Forkjoin backport
-  lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
-
-  // --------------------------------------------------------------
-  //  The magic kingdom.
-  //  Layered compilation of Scala.
-  //   Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
-  // --------------------------------------------------------------
-
-  // Need a report on this...
-  // TODO - Resolve STARR from a repo..
-  lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
-    val launcher = app.provider.scalaProvider.launcher
-    val library  = file("lib/scala-library.jar")
-    val compiler = file("lib/scala-compiler.jar")
-    val libJars  = (file("lib") * "*.jar").get filterNot Set(library, compiler)
-    ScalaInstance("starr", library, compiler, launcher, libJars: _*)
-  }
-
-  // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
-  lazy val (lockerLib, lockerReflect, lockerComp) = makeLayer("locker", STARR, autoLock = true)
-  lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerReflect, lockerComp)
-
-  // Quick is the general purpose project layer for the Scala compiler.
-  lazy val (quickLib, quickReflect, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerReflect, lockerComp))
-  lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickReflect, quickComp)
-
-  // Reference to quick scala instance.
-  lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
-  def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
-  def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
-  def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
-
-  // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
-  lazy val (strappLib, strappReflect, strappComp) = makeLayer("strapp", quickScalaInstance)
-
-  // --------------------------------------------------------------
-  //  Projects dependent on layered compilation (quick)
-  // --------------------------------------------------------------
-  def addCheaterDependency(projectName: String): Setting[_] = 
-    pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) => 
-      val dependency: scala.xml.Node = 
-        <dependency>
-          <groupId>{o}</groupId>
-          <artifactid>{projectName}</artifactid>
-          <version>{v}</version>
-        </dependency>
-      def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
-         case <dependencies>{nested at _*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
-         case x                                        => x
-      }
-      // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
-      def hasDependencies(root: scala.xml.Node): Boolean =
-        (root.child collectFirst {
-          case n: scala.xml.Elem if n.label == "dependencies" => n
-        } isEmpty)
-      // TODO - Keep namespace on project...
-      k andThen { 
-        case n @ <project>{ nested at _*}</project> if hasDependencies(n)   =>
-          <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
-        case <project>{ nested at _*}</project>                                       => 
-          <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
-      }
-    }
-
-  // TODO - in sabbus, these all use locker to build...  I think tihs way is better, but let's farm this idea around.
-  lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
-  lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
-  lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
-  // This project will generate man pages (in man1 and html) for scala.    
-  lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
-  lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
-
-  // Things that compile against the compiler.
-  lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
-
-  lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
-  lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
-  lazy val partest = Project("partest", file(".")) settings(partestSettings:_*)  dependsOn(actors,forkjoin,scalap,asm)
-  lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
-    name := "scalap",
-    exportJars := true
-  )
-  lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
-
-  // --------------------------------------------------------------
-  //  Continuations plugin + library
-  // --------------------------------------------------------------
-  lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
-    scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
-    resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
-    exportJars := true,
-    name := "continuations"  // Note: This artifact is directly exported.
-
-  )
-  lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
-  lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
-    scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
-    scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map { 
-     case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
-    }
-  )
-  lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
-
-  // TODO - OSGi Manifest
-
-  // --------------------------------------------------------------
-  //  Real Library Artifact
-  // --------------------------------------------------------------
-  val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
-  def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
-  lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
-  lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
-    name := "scala-library",
-    crossPaths := false,
-    exportJars := true,
-    autoScalaLibrary := false,
-    unmanagedJars in Compile := Seq(),
-    packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
-    packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
-    fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
-    quickScalaInstance,
-    target <<= (baseDirectory, name) apply (_ / "target" / _)
-  )
-  lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
-
-  // --------------------------------------------------------------
-  //  Real Reflect Artifact
-  // --------------------------------------------------------------
-
-  lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
-  lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
-    name := "scala-reflect",
-    crossPaths := false,
-    exportJars := true,
-    autoScalaLibrary := false,
-    unmanagedJars in Compile := Seq(),
-    fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
-    quickScalaInstance,
-    target <<= (baseDirectory, name) apply (_ / "target" / _)
-  )
-  lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
-
-
-  // --------------------------------------------------------------
-  //  Real Compiler Artifact
-  // --------------------------------------------------------------
-  lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
-  lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
-    name := "scala-compiler",
-    crossPaths := false,
-    exportJars := true,
-    autoScalaLibrary := false,
-    unmanagedJars in Compile := Seq(),
-    fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
-    quickScalaInstance,
-    target <<= (baseDirectory, name) apply (_ / "target" / _)
-  )
-  lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
-  lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
-
-  
-  // --------------------------------------------------------------
-  //  Generating Documentation.
-  // --------------------------------------------------------------
-  
-  // TODO - Migrate this into the dist project.
-  // Scaladocs
-  lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
-    // TODO - Make these work for realz.
-    defaultExcludes in unmanagedSources in Compile := ((".*"  - ".") || HiddenFileFilter ||
-      "reflect/Print.scala" ||
-      "reflect/Symbol.scala" ||
-      "reflect/Tree.scala" ||
-      "reflect/Type.scala" ||
-      "runtime/*$.scala" ||
-      "runtime/ScalaRuntime.scala" ||
-      "runtime/StringAdd.scala" ||
-      "scala/swing/test/*"),
-    sourceFilter in Compile := ("*.scala"),
-    unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
-      Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
-    },
-    compile := inc.Analysis.Empty,
-    // scaladocOptions in Compile <++= (baseDirectory) map (bd =>
-    //   Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
-    //       "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
-    //       "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
-    //       "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
-    //   )),
-    classpathOptions in Compile := ClasspathOptions.manual
-  )
-  lazy val documentation = (
-    Project("documentation", file("."))
-    settings (documentationSettings: _*)
-    dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
-  )
-}
diff --git a/project/Layers.scala b/project/Layers.scala
deleted file mode 100644
index 35cc79c..0000000
--- a/project/Layers.scala
+++ /dev/null
@@ -1,120 +0,0 @@
-import sbt._
-import Keys._
-import com.jsuereth.git.GitKeys.gitRunner
-import ScalaBuildKeys.lock
-
-/** This trait stores all the helper methods to generate layers in Scala's layered build. */
-trait Layers extends Build {
-  // TODO - Clean this up or use a self-type.
-
-  /** Default SBT overrides needed for layered compilation. */
-  def settingOverrides: Seq[Setting[_]]
-  /** Reference to the jline project */
-  def jline: Project
-  /** Reference to forkjoin library */
-  def forkjoin: Project
-  /** Reference to Fast-Java-Bytecode-Generator library */
-  def fjbg: Project
-  /** Reference to the ASM wrapped project. */
-  def asm: Project
-  /** A setting that adds some external dependencies. */
-  def externalDeps: Setting[_]
-  /** The root project. */
-  def aaa_root: Project
-
-  /** Creates a reference Scala version that can be used to build other projects.   This takes in the raw
-    * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
-    */
-  def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
-     scalaInstance <<= (appConfiguration in library,
-                        version in library,
-                        (exportedProducts in library in Compile),
-                        (exportedProducts in reflect in Compile),
-                        (exportedProducts in compiler in Compile),
-                        (exportedProducts in fjbg in Compile),
-                        (fullClasspath in jline in Runtime),
-                        (exportedProducts in asm in Runtime)) map {
-    (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
-      val launcher = app.provider.scalaProvider.launcher
-      (lib,comp) match {
-         case (Seq(libraryJar), Seq(compilerJar)) =>
-           ScalaInstance(
-             version + "-" + layer + "-",
-             libraryJar.data,
-             compilerJar.data,
-             launcher,
-             ((fjbg.files ++ jline.files ++ asm.files ++ reflect.files):_*))
-         case _ => error("Cannot build a ScalaReference with more than one classpath element")
-      }
-  }
-  
-  /** Creates a "layer" of Scala compilation.  That is, this will build the next version of Scala from a previous version.
-   * Returns the library project and compiler project from the next layer.
-   * Note:  The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
-   */
-  def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
-    val autoLockSettings: Seq[Setting[_]] = 
-      if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) => 
-        c flatMapR { cResult =>
-          val result = Result.tryValue(cResult)
-          l mapR { tx => result }
-        }
-      }) 
-      else Seq.empty
-
-
-    val library = Project(layer + "-library", file("."))  settings(settingOverrides: _*) settings(autoLockSettings:_*) settings(
-      version := layer,
-      // TODO - use depends on.
-      unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
-      managedClasspath in Compile := Seq(),
-      scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
-      resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),   
-      defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
-      // TODO - Allow other scalac option settings.
-      scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
-      resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("library.properties"),
-      referenceScala
-    )
-
-    // Define the reflection
-    val reflect = Project(layer + "-reflect", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
-      version := layer,
-      scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "reflect"),
-      resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "reflect"),
-      defaultExcludes := ("tests"),
-      defaultExcludes in unmanagedResources := "*.scala",
-      resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("reflect.properties"),
-      // TODO - Use depends on *and* SBT's magic dependency mechanisms...
-      unmanagedClasspath in Compile <<= Seq(forkjoin, library).map(exportedProducts in Compile in _).join.map(_.flatten),
-      externalDeps,
-      referenceScala
-    )
-
-    // Define the compiler
-    val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
-      version := layer,
-      scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
-      resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
-      unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"),
-      defaultExcludes := ("tests"),
-      defaultExcludes in unmanagedResources := "*.scala",
-      resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"),
-      // Note, we might be able to use the default task, but for some reason ant was filtering files out.  Not sure what's up, but we'll
-      // stick with that for now.
-      unmanagedResources in Compile <<= (baseDirectory) map {
-        (bd) =>
-          val dirs = Seq(bd / "src" / "compiler")
-          dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
-      },
-      // TODO - Use depends on *and* SBT's magic dependency mechanisms...
-      unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
-      externalDeps,
-      referenceScala
-    )
-
-    // Return the generated projects.
-    (library, reflect, compiler)
-  }
-
-}
diff --git a/project/Packaging.scala b/project/Packaging.scala
deleted file mode 100644
index eb4e69f..0000000
--- a/project/Packaging.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-/** All the settings related to *packaging* the built scala software. */
-trait Packaging { self: ScalaBuild.type =>
-
-  // --------------------------------------------------------------
-  //  Packaging a distro
-  // --------------------------------------------------------------
-  lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
-    crossPaths := false,
-    target <<= (baseDirectory, name) apply (_ / "target" / _),
-    scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
-    autoScalaLibrary := false,
-    unmanagedJars in Compile := Seq(),
-    genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
-    binDir <<= target(_/"bin"),
-    genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
-    binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
-    // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
-    fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
-    fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
-    genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
-    runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
-    runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
-    // TODO - We could *really* clean this up in many ways.   Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
-    // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
-    // really need to figure out a better way to pull jline + jansi.
-    makeDistMappings <<= (genBin, 
-                          runManmakerMan,
-                          runManmakerHtml,
-                          packageBin in scalaLibrary in Compile, 
-                          packageBin in scalaCompiler in Compile,
-                          packageBin in jline in Compile,
-                          packageBin in continuationsPlugin in Compile,
-                          managedClasspath in jline in Compile,
-                          packageBin in scalap in Compile) map {
-      (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
-        val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
-        binaries ++ man ++ html ++ jlineDepMap ++ Seq(
-          lib           -> "lib/scala-library.jar",
-          comp          -> "lib/scala-compiler.jar",
-          jline         -> "lib/jline.jar",
-          continuations -> "misc/scala-devel/plugins/continuations.jar",
-          scalap        -> "lib/scalap.jar"
-        )
-    },
-    // Add in some more dependencies
-    makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
-    makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
-    makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) => 
-      s.log.debug("Map = " + maps.mkString("\n")) 
-      val file = dir / "target" / "scala-dist.zip"
-      IO.zip(maps, file)
-      s.log.info("Created " + file.getAbsolutePath)
-      file
-    },
-    makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) => 
-      def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
-      IO.createDirectory(dir)
-      IO.copy(for {
-       (file, name) <- maps
-       val file2 = dir / name
-       if !sameFile(file,file2)
-      } yield (file, file2))
-      // Hack to make binaries be executable.  TODO - Fix for JDK 5 and below...
-      maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
-      dir
-    }
-  )
-  lazy val scaladist = (
-    Project("dist", file("."))
-    settings (scalaDistSettings: _*)
-  )
-
-
-// Helpers to make a distribution
-
-  /** Generates runner scripts for distribution. */
-  def genBinTask(
-    runner: ScopedTask[ScalaToolRunner], 
-    outputDir: ScopedSetting[File], 
-    classpath: ScopedTask[Classpath], 
-    useClasspath: Boolean
-  ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
-    (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
-      IO.createDirectory(outDir)
-      val classToFilename = Seq(
-        "scala.tools.nsc.MainGenericRunner" -> "scala",
-        "scala.tools.nsc.Main"              -> "scalac",
-        "scala.tools.nsc.ScalaDoc"          -> "scaladoc",
-        "scala.tools.nsc.CompileClient"     -> "fsc",
-        "scala.tools.scalap.Main"           -> "scalap"
-      )
-      if (useClasspath) { 
-        val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
-        s.log.debug("Setting classpath = " + classpath)
-        runner setClasspath classpath
-      }
-      def genBinFiles(cls: String, dest: File) = {
-        runner.setClass(cls)
-        runner.setFile(dest)
-        runner.execute()
-        // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
-        dest.setExecutable(true)
-      }
-      def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
-        val file       = outDir / binName
-        val winBinName = binName + ".bat"
-        genBinFiles(cls, file)
-        Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
-      }
-      classToFilename.flatMap((makeBinMappings _).tupled)
-    }
-  }
-  /** Creates man pages for distribution. */
-  def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
-    (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
-      val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
-      binaries map { bin =>
-        val file = target / "man" / dir / (bin + ext)
-        val classname = "scala.man1." + bin
-        IO.createDirectory(file.getParentFile)
-        toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))   
-        file -> ("man/" + dir + "/" + bin + ext)
-      }
-    }
-}
diff --git a/project/Partest.scala b/project/Partest.scala
deleted file mode 100644
index fbb0a2a..0000000
--- a/project/Partest.scala
+++ /dev/null
@@ -1,141 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import complete._
-import scala.collection.{ mutable, immutable }
-
-/** This object */
-object partest {
-
-  /** The key for the run-partest task that exists in Scala's test suite. */
-  lazy val runPartest       = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
-  lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
-  lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
-  lazy val runPartestGrep   = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
-  lazy val partestRunner    = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
-  lazy val partestTests     = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
-  lazy val partestDirs      = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
-
-  lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
-    javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
-    partestDirs <<= baseDirectory apply { bd =>
-      partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
-    },
-    partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
-    partestTests <<= partestTestsTask(partestDirs),
-    runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
-    runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
-    runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
-  )
-
-  // What's fun here is that we want "*.scala" files *and* directories in the base directory...
-  def partestResources(base: File, testType: String): PathFinder = testType match {
-    case "res"          => base ** "*.res"
-    case "buildmanager" => base * "*"
-    // TODO - Only allow directories that have "*.scala" children...
-    case _              => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
-  }
-  lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
-
-  // TODO - Figure out how to specify only a subset of resources...
-  def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
-    testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
-
-  // TODO - Split partest task into Configurations and build a Task for each Configuration.
-  // *then* mix all of them together for run-testsuite or something clever like this.
-  def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
-    (runner, testRuns, scalacOptions, streams) map {
-      (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
-    }
-  }
-  private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
-    val testArgs  = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
-    val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
-
-    import collection.JavaConverters._
-    val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
-    // TODO - save results
-    val failures = results collect {
-      case (path, "FAIL") => path + " [FAILED]"
-      case (path, "TIMEOUT") => path + " [TIMEOUT]"
-    }
-
-    if (failures.isEmpty)
-      s.log.info(""+results.size+" tests passed.")
-    else {
-      failures foreach (s.log error _)
-      error("Test Failures! ("+failures.size+" of "+results.size+")")
-    }
-  }
-
-  def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
-    (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
-
-  /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
-  def cleanFileName(file: File): String = {
-    // TODO - Something intelligent here
-    val TestPattern = ".*/test/(.*)".r
-    file.getCanonicalPath match {
-      case TestPattern(n) => n
-      case _ => file.getName
-    }
-  }
-
-  // TODO - Allow a filter for the second part of this...
-  def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
-    import DefaultParsers._
-    state => {
-      Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
-        val files: Set[String] = testDirs get kind match {
-          case Some(dir) =>
-            partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
-          case _ =>
-            Set()
-        }
-        Space ~> token(NotSpace examples files) map (kind -> _)
-      }
-    }
-  }
-
-  def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
-    import sbinary.DefaultProtocol._
-
-    InputTask(testDirs apply runSingleTestParser) { result =>
-      (runner, result, testDirs, scalacOptions, streams) map {
-        case (r, (kind, filter), dirs, o, s) =>
-        // TODO - Use partest resources somehow to filter the filter correctly....
-        val files: Seq[File] =
-          if (filter == "*") partestResources(dirs(kind), kind).get
-          else (dirs(kind) * filter).get
-
-        runPartestImpl(r, Map(kind -> files), o, s)
-      }
-    }
-  }
-
-  def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: TaskKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
-   (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
-}
-
-class PartestRunner(classpath: Seq[File], javaOpts: String) {
-  // Classloader that does *not* have this as parent, for differing Scala version.
-  lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
-  lazy val (mainClass, mainMethod) = try {
-    val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
-    val m = c.getMethod("mainReflect", classOf[Array[String]])
-    (c,m)
-  }
-  lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
-  def run(args: Array[String]): java.util.Map[String,String] = try {
-    // TODO - undo this settings after running.  Also globals are bad.
-    System.setProperty("partest.java_opts", javaOpts)
-    val allArgs = (classPathArgs ++ args).toArray
-    mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,String]]
-  } catch {
-    case e =>
-    //error("Could not run Partest: " + e)
-    throw e
-  }
-}
diff --git a/project/Release.scala b/project/Release.scala
deleted file mode 100644
index feab8bd..0000000
--- a/project/Release.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-import sbt._
-import Keys._
-
-object Release {
-
-  // TODO - Just make the STARR artifacts and dump the sha1 files.
-
-  val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
-
-  val pushStarr = Command.command("new-starr") { (state: State) =>
-    /*val extracted = Project.extract(state)
-    import extracted._
-    // First run tests
-    val (s1, result) = runTask(test in Test, state)
-    // If successful, package artifacts
-    val (s2, distDir) = runTask(makeExplodedDist, s1)
-    // Then copy new libs in place
-    val bd = extracted get baseDirectory
-    for {
-      jarName <- starrLibs
-      jar = distDir / "lib" / jarName
-      if jar.exists
-    } IO.copyFile(jar, bd / "lib" / jarName)
-    // Invalidate SHA1 files.
-    ShaResolve.removeInvalidShaFiles(bd)
-    // Now run tests *again*?
-    s2*/
-    state
-  }
-}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
deleted file mode 100644
index 705b9dc..0000000
--- a/project/RemoteDependencies.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-
-object RemoteDependencies {
-  def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
-    commands += Command.command("fix-uri-projects") { (state: State) =>
-      if(state.get(buildFixed) getOrElse false) state
-      else {
-        // TODO -fix up scalacheck's dependencies!
-        val extracted = Project.extract(state)
-        import extracted._
-        val scalaVersionString = extracted get version
-
-        def fix(s: Setting[_]): Setting[_] = s match {
-          case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p)        => localScala mapKey Project.mapScope(_ => s.key.scope)
-          // TODO - Fix Actors dependency...
-          //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p)  => fixProjectDeps(s)
-          case s                                                                                  => s
-        }
-        val transformed = session.mergeSettings map ( s => fix(s) )
-        val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
-        // Create some fixers so we don't download scala or rely on it.
-        // Also add dependencies that disappear in 2.10 for now...
-        val fixers = for { scope <- scopes
-                           setting <- Seq(autoScalaLibrary := false, 
-                                          crossPaths := false,
-                                          scalaVersion := scalaVersionString)
-                     } yield setting mapKey Project.mapScope(_ => scope)
-        val newStructure = Load.reapply(transformed ++ fixers, structure)
-        Project.setProject(session, newStructure, state).put(buildFixed, true)
-      }
-    },
-    onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
-      "fix-uri-projects" :: state
-    })
-  )
-}
-
-
-
-/** Matcher to make updated remote project references easier. */
-object ScopedExternalSetting {
-  def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
-    s.key.scope.project match {
-      case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
-      case _                              => None
-    }
-}
-
-
-
diff --git a/project/Sametest.scala b/project/Sametest.scala
deleted file mode 100644
index 6f12eb2..0000000
--- a/project/Sametest.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
-object SameTest {
-
-  def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
-    (classDirectory in Compile in lhs, classDirectory in Compile in rhs, 
-     compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) => 
-      // Now we generate a complete set of relative files and then
-      def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
-      // This code adapted from SameTask in the compiler.
-      def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
-        filePairs exists { case (a,b) =>
-          if (!a.canRead || !b.canRead) {
-            s.log.error("Either ["+a+"] or ["+b+"] is missing.")
-            true
-          } else {
-            s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")          
-            val diff = !checkSingleFilePair(a,b) 
-            if(diff) s.log.error("["+a+"] differs from ["+b+"]")
-            diff
-          }
-        }
-      }
-      val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
-      val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
-      val result = hasDifferentFiles(comparisons)
-      if (result) error("Binary artifacts differ.")
-    }
-
-  val bufferSize = 1024
-
-  // Tests whether two files are binary equivalents of each other.
-  def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
-    Using.fileInputStream(originFile) { originStream =>
-      Using.fileInputStream(destFile) { destStream =>
-        val originBuffer = new Array[Byte](bufferSize)
-        val destBuffer = new Array[Byte](bufferSize)
-        var equalNow = true
-        var originRemaining = originStream.read(originBuffer)
-        var destRemaining = destStream.read(destBuffer)
-        while (originRemaining > 0 && equalNow) {
-          if (originRemaining == destRemaining) {
-            for (idx <- 0 until originRemaining) {
-              equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
-            }
-          } else {
-            equalNow = false
-          }
-          originRemaining = originStream.read(originBuffer)
-          destRemaining = destStream.read(destBuffer)
-        }
-        if (destRemaining > 0) equalNow = false
-        equalNow
-      }
-    }
-  }
-
-
-}
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
deleted file mode 100644
index 9e495de..0000000
--- a/project/ScalaBuildKeys.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import sbt._
-import Keys._
-
-object ScalaBuildKeys {
-  val lockerLock        = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
-  val lockerUnlock      = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
-  val lockFile          = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
-  val lock              = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
-  val unlock            = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
-  val makeDist          = TaskKey[File]("make-dist",  "Creates a mini-distribution (scala home directory) for this build in a zip file.")
-  val makeExplodedDist  = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
-  val makeDistMappings  = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
-  val buildFixed        = AttributeKey[Boolean]("build-uri-fixed")
-  val genBinRunner      = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")  
-  val genBin            = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
-  val binDir            = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
-  val genBinQuick       = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
-  val runManmakerMan    = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
-  val runManmakerHtml   = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
-  val checkSame         = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
-  val checkSameLibrary  = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
-  val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
-}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
deleted file mode 100644
index d7338a5..0000000
--- a/project/ScalaToolRunner.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-import Keys._
-
-/** Reflection helper that runs ScalaTool. 
- * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
- */
-class ScalaToolRunner(classpath: Classpath) {
-  // TODO - Don't use the ant task directly...
-  lazy val classLoader        = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
-  lazy val mainClass          = classLoader.loadClass("scala.tools.ant.ScalaTool")
-  lazy val executeMethod      = mainClass.getMethod("execute")
-  lazy val setFileMethod      = mainClass.getMethod("setFile", classOf[java.io.File])
-  lazy val setClassMethod     = mainClass.getMethod("setClass", classOf[String])
-  lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
-  lazy val instance           = mainClass.newInstance()
-    
-  def setClass(cls: String): Unit    = setClassMethod.invoke(instance, cls)
-  def setFile(file: File): Unit      = setFileMethod.invoke(instance, file)
-  def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
-  def execute(): Unit                = executeMethod.invoke(instance)
-}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
deleted file mode 100644
index cea2b2d..0000000
--- a/project/ShaResolve.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import scala.collection.{ mutable, immutable }
-import scala.collection.parallel.CompositeThrowable
-import java.security.MessageDigest
-
-case class Credentials(user: String, pw: String)
-
-/** Helpers to resolve SHA artifacts from typesafe repo. */
-object ShaResolve {
-  import dispatch.{Http,url}
-  val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"  
-  
-  val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
-  val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
-  val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
-
-  def settings: Seq[Setting[_]] = Seq(
-    binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
-    pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
-    pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
-  )
-
-  def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
-     val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
-     for {
-       (file, name) <- (files x relativeTo(dir)).par
-       uri = name.dropRight(13).replace('\\', '/')       
-       jar = dir / uri
-       if !jar.exists || !isValidSha(file)
-       sha = getShaFromShafile(file)
-     } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
-  }
-
-  /** This method removes all SHA1 files that don't match their corresponding JAR. */
-  def removeInvalidShaFiles(dir: File): Unit = {
-    val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
-    for {
-      (file, name) <- (files x relativeTo(dir)).par
-      uri = name.dropRight(13).replace('\\', '/')       
-      jar = dir / uri
-      if !jar.exists || !isValidSha(file)
-    } IO.delete(jar)
-  }
-  def getCredentials: Credentials = System.out.synchronized {
-    val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
-    val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
-    Credentials(user, password)
-  }
-
-  def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
-    pushFiles(dir, getCredentials, s)
-
-  def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
-    val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
-    for {
-      (jar, name) <- (files x relativeTo(dir)).par
-      shafile = dir / (name + ".desired.sha1")
-      if !shafile.exists || !isValidSha(shafile)
-    } pushFile(jar, name, cred, s)
-  }
-
-  @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
-    case t: CompositeThrowable =>
-      s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
-      t.throwables foreach (t2 => s.log.trace(t2))
-      throw t
-  }
-
-  // TODO - Finish this publishing aspect.
-
-  def getShaFromShafile(file: File): String = parseShaFile(file)._2
-
-  // This should calculate the SHA sum of a file the same as the linux process.
-  def calculateSha(file: File): String = {
-    val digest = MessageDigest.getInstance("SHA1")
-    val in = new java.io.FileInputStream(file);
-    val buffer = new Array[Byte](8192)
-    try {
-       def read(): Unit = in.read(buffer) match {
-         case x if x <= 0 => ()
-         case size => digest.update(buffer, 0, size); read()
-       }
-       read()
-    } finally in.close()
-    val sha = convertToHex(digest.digest())
-    sha
-  }
-
-  def convertToHex(data: Array[Byte]): String = {
-    def byteToHex(b: Int) =
-      if ((0 <= b) && (b <= 9)) ('0' + b).toChar
-      else ('a' + (b-10)).toChar
-    val buf = new StringBuffer
-    for (i <- 0 until data.length) {
-      buf append byteToHex((data(i) >>> 4) & 0x0F)
-      buf append byteToHex(data(i) & 0x0F)
-    }
-    buf.toString
-  }
-  // Parses a sha file into a file and a sha.
-  def parseShaFile(file: File): (File, String) =
-    IO.read(file).split("\\s") match {
-       case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
-       case Array(sha, filename)                             => (new File(file.getParentFile, filename), sha)
-       case _                                                => error(file.getAbsolutePath + " is an invalid sha file")
-    }
-  
-
-  def isValidSha(file: File): Boolean =
-    try {
-      val (jar, sha) = parseShaFile(file)
-      jar.exists && calculateSha(jar) == sha
-    } catch {
-      case t: Exception => false
-    }
-     
-
-  def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
-    val cachedFile = cacheDir / uri
-    if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
-      // Ensure the directory for the cache exists.
-      cachedFile.getParentFile.mkdirs()
-      val url = remote_urlbase + "/" + uri
-      val fous = new java.io.FileOutputStream(cachedFile)
-      s.log.info("Pulling [" + cachedFile + "] to cache")
-      try Http(dispatch.url(url) >>> fous) finally fous.close()
-    }
-    s.log.info("Pulling [" + file + "] from local cache")
-    IO.copyFile(cachedFile, file)
-  }
-  
-  // Pushes a file and writes the new .desired.sha1 for git.
-  def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
-    val sha = calculateSha(file)
-    val url = remote_urlbase + "/" + sha + "/" + uri
-    val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
-    // TODO - output to logger.
-    Http(sender >>> System.out)
-    val shafile = file.getParentFile / (file.getName + ".desired.sha1")
-    IO.touch(shafile)
-    IO.write(shafile, sha + " ?" + file.getName)
-  }
-}
diff --git a/project/Testing.scala b/project/Testing.scala
deleted file mode 100644
index 5de7211..0000000
--- a/project/Testing.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import SameTest._
-import ScalaBuildKeys._
-
-/** All settings/projects relating to testing. */
-trait Testing { self: ScalaBuild.type =>
-
-  lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
-    unmanagedBase <<= baseDirectory / "test/files/lib",
-    fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
-    autoScalaLibrary := false,
-    checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
-    checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
-    checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
-    autoScalaLibrary := false
-  )
-  lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
-    scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map { 
-     case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
-    },
-    partestDirs <<= baseDirectory apply { bd =>
-      def mkFile(name: String) = bd / "test" / "files" / name
-      def mkTestType(name: String) = name.drop("continuations-".length).toString
-      Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
-    }
-  )
-  val testsuite = (
-    Project("testsuite", file(".")) 
-    settings (testsuiteSettings:_*)
-    dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
-  )
-  val continuationsTestsuite = (
-    Project("continuations-testsuite", file("."))
-    settings (continuationsTestsuiteSettings:_*) 
-    dependsOn (partest, scalaLibrary, scalaCompiler, fjbg)
-  )
-
-}
-
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
deleted file mode 100644
index c8eebb1..0000000
--- a/project/VerifyClassLoad.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
-object VerifyClassLoad {
-  lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
-  lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
-
-
-  def settings: Seq[Setting[_]] = Seq(
-    checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
-    fullClasspath in checkClassLoad := Seq(),
-    checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
-       import collection.JavaConverters._
-       val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
-
-       s.log.info("Processed " + results.size + " classes.")
-       val errors = results.filter(_._2 != null)
-       for( (name, result) <- results; if result != null) {
-         s.log.error(name + " had error: " + result)
-       }
-       if(errors.size > 0) error("Classload validation errors encountered")
-       ()
-    }
-  )
-
-  // TODO - Use 
-  class ClassVerifyRunner(classpath: Seq[File]) {
-    // Classloader that does *not* have this as parent, for differing Scala version.
-    lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
-    lazy val (mainClass, mainMethod) = try {
-      val c = classLoader.loadClass("scala.tools.util.VerifyClass")
-      val m = c.getMethod("run", classOf[Array[String]])
-      (c,m)
-    }
-    def run(args: Array[String]): java.util.Map[String,String] = try {
-      mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
-    } catch {
-      case e =>
-      //error("Could not run Partest: " + e)
-      throw e
-    }
-  }
-}
diff --git a/project/Versions.scala b/project/Versions.scala
deleted file mode 100644
index 57e274c..0000000
--- a/project/Versions.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-import sbt._
-import Keys._
-import java.util.Properties
-import scala.util.control.Exception.catching
-import java.lang.{NumberFormatException => NFE}
-import java.io.FileInputStream
-import com.jsuereth.git.GitRunner
-import com.jsuereth.git.GitKeys.gitRunner
-
-case class VersionInfo(canonical: String,
-                       maven: String,
-                       osgi: String)
-
-/** this file is responsible for setting up Scala versioning schemes and updating all the necessary bits. */
-object Versions {
-  val buildNumberFile = SettingKey[File]("scala-build-number-file")
-  // TODO - Make this a setting?
-  val buildNumberProps = SettingKey[BaseBuildNumber]("scala-build-number-props")
-  val buildRelease = SettingKey[Boolean]("scala-build-release", "This is set to true if we're building a release.")
-  val mavenSuffix = SettingKey[String]("scala-maven-suffix", "This is set to whatever maven suffix is required.")
-
-  val gitSha = TaskKey[String]("scala-git-sha", "The sha of the current git commit.")
-  val gitDate = TaskKey[String]("scala-git-date", "The date of the current git commit.")
-
-  val mavenVersion = SettingKey[String]("scala-maven-version", "The maven version number.")
-  val osgiVersion = TaskKey[String]("scala-osgi-version", "The OSGi version number.")
-  val canonicalVersion = TaskKey[String]("scala-canonical-version", "The canonical version number.")
-
-  val scalaVersions = TaskKey[VersionInfo]("scala-version-info", "The scala versions used for this build.")
-  
-
-  
-  def settings: Seq[Setting[_]] = Seq(
-    buildNumberFile <<= baseDirectory apply (_ / "build.number"),
-    buildNumberProps <<= buildNumberFile apply loadBuildNumberProps,
-    buildRelease := Option(System.getProperty("build.release")) map (!_.isEmpty) getOrElse false,
-    mavenSuffix <<= buildRelease apply pickMavenSuffix,
-    mavenVersion <<= (buildNumberProps, mavenSuffix) apply makeMavenVersion,
-    gitSha <<= (gitRunner, baseDirectory, streams) map getGitSha,
-    gitDate <<= (gitRunner, baseDirectory, streams) map getGitDate,
-    osgiVersion <<= (buildNumberProps, gitDate, gitSha) map makeOsgiVersion,
-    canonicalVersion <<= (buildRelease, mavenVersion, buildNumberProps, gitDate, gitSha) map makeCanonicalVersion,
-    scalaVersions <<= (canonicalVersion, mavenVersion, osgiVersion) map VersionInfo.apply
-  )
-
-
-  /** This generates a  properties file, if it does not already exist, with the maximum lastmodified timestamp
-    * of any source file. */
-  def generateVersionPropertiesFile(name: String)(dir: File, versions: VersionInfo, skip: Boolean, s: TaskStreams): Seq[File] = {
-    // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
-    val target = dir / name        
-    // TODO - Regenerate on triggers, like recompilation or something...
-    def hasSameVersion: Boolean = {
-      val props = new java.util.Properties
-      val in = new java.io.FileInputStream(target)
-      try props.load(in) finally in.close()
-      versions.canonical == (props getProperty "version.number")
-    }
-    if (!target.exists || !(skip || hasSameVersion)) {
-      makeVersionPropertiesFile(target, versions)
-    }
-    target :: Nil
-  }
-  
-  // This creates the *.properties file used to determine the current version of scala at runtime.  TODO - move these somewhere utility like.
-  def makeVersionPropertiesFile(f: File, versions: VersionInfo): Unit =
-    IO.write(f, "version.number = "+versions.canonical+"\n"+
-                "osgi.number = "+versions.osgi+"\n"+
-                "maven.number = "+versions.maven+"\n"+
-                "copyright.string = Copyright 2002-2013, LAMP/EPFL")
-
-  def makeCanonicalVersion(isRelease: Boolean, mvnVersion: String, base: BaseBuildNumber, gitDate: String, gitSha: String): String =
-    if(isRelease) mvnVersion
-    else {
-      val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
-      "%s.%s.%s%s-%s-%s" format (base.major, base.minor, base.patch, suffix, gitDate, gitSha)
-    }
-
-  def makeMavenVersion(base: BaseBuildNumber, suffix: String): String = {
-    val firstSuffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
-    "%d.%d.%d%s%s" format (base.major, base.minor, base.patch, firstSuffix, suffix)
-  }
-
-  def makeOsgiVersion(base: BaseBuildNumber, gitDate: String, gitSha: String): String = {
-    val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
-    "%s.%s.%s.v%s%s-%s" format (base.major, base.minor, base.patch, gitDate, suffix, gitSha)
-  }
-
-  /** Determines what the maven sufffix should be for this build. */
-  def pickMavenSuffix(isRelease: Boolean): String = {
-    def default = if(isRelease) "" else "-SNAPSHOT"
-    Option(System.getProperty("maven.version.suffix")) getOrElse default
-  }
-
-  /** Loads the build.number properties file into SBT. */
-  def loadBuildNumberProps(file: File): BaseBuildNumber = {
-    val fin = new FileInputStream(file)
-    try {
-      val props = new Properties()
-      props.load(fin)
-      def getProp(name: String): Int = 
-        (for {
-          v <- Option(props.getProperty(name))
-          v2 <- catching(classOf[NFE]) opt v.toInt
-        } yield v2) getOrElse sys.error("Could not convert %s to integer!" format (name))
-
-      BaseBuildNumber(
-        major=getProp("version.major"), 
-        minor=getProp("version.minor"),
-        patch=getProp("version.patch"),
-        bnum =getProp("version.bnum")
-      )
-    } finally fin.close()
-  }
-
-
-  def getGitDate(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
-    val lines = getGitLines("log","-1","--format=\"%ci\"")(git,baseDirectory, s)
-    val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
-    // Lines *always* start with " for some reason...
-    line drop 1 split "\\s+" match {
-      case Array(date, time, _*) =>  "%s-%s" format (date.replaceAll("\\-", ""), time.replaceAll(":",""))
-      case _                     => sys.error("Could not parse git date: " + line)
-    }
-  }
-
-  def getGitSha(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
-    val lines = getGitLines("log","-1","--format=\"%H\"", "HEAD")(git,baseDirectory, s)
-    val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
-    val noquote = if(line startsWith "\"") line drop 1 else line
-    val nog = if(noquote startsWith "g") noquote drop 1 else noquote
-    nog take 10
-  }
-
-  def getGitLines(args: String*)(git: GitRunner, baseDirectory: File, s: TaskStreams): Seq[String] =
-     git(args: _*)(baseDirectory, s.log) split "[\r\n]+"
-}
-
-
-case class BaseBuildNumber(major: Int, minor: Int, patch: Int, bnum: Int) {
-  override def toString = "BaseBuildNumber(%d.%d.%d-%d)" format (major, minor, patch, bnum)
-}
diff --git a/project/plugins.sbt b/project/plugins.sbt
deleted file mode 100644
index fdf37e3..0000000
--- a/project/plugins.sbt
+++ /dev/null
@@ -1,9 +0,0 @@
-resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
-
-resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
-
-resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
-
-libraryDependencies += "net.databinder" % "dispatch-http_2.9.1" % "0.8.6"
-
-
diff --git a/project/project/Build.scala b/project/project/Build.scala
deleted file mode 100644
index 902e8b0..0000000
--- a/project/project/Build.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import sbt._
-object PluginDef extends Build {
-  override def projects = Seq(root)
-  lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
-  lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
-  lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
-}
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
new file mode 100644
index 0000000..6c8712c
--- /dev/null
+++ b/spec/01-lexical-syntax.md
@@ -0,0 +1,615 @@
+---
+title: Lexical Syntax
+layout: default
+chapter: 1
+---
+
+# Lexical Syntax
+
+Scala programs are written using the Unicode Basic Multilingual Plane
+(_BMP_) character set; Unicode supplementary characters are not
+presently supported.  This chapter defines the two modes of Scala's
+lexical syntax, the Scala mode and the _XML mode_. If not
+otherwise mentioned, the following descriptions of Scala tokens refer
+to _Scala mode_, and literal characters ‘c’ refer to the ASCII fragment 
+`\u0000` – `\u007F`.
+
+In Scala mode, _Unicode escapes_ are replaced by the corresponding
+Unicode character with the given hexadecimal code.
+
+```ebnf
+UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit
+hexDigit      ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’
+```
+
+<!--
+TODO SI-4583: UnicodeEscape used to allow additional backslashes,
+and there is something in the code `evenSlashPrefix` that alludes to it,
+but I can't make it work nor can I imagine how this would make sense,
+so I removed it for now.
+-->
+
+To construct tokens, characters are distinguished according to the following 
+classes (Unicode general category given in parentheses):
+
+1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`.
+1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`),
+   titlecase letters (`Lt`), other letters (`Lo`), letter numerals (`Nl`) and the
+   two characters `\u0024 ‘$’` and `\u005F ‘_’`, which both count as upper case
+   letters.
+1. Digits `‘0’ | … | ‘9’`.
+1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `.
+1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``.
+1. Operator characters. These consist of all printable ASCII characters
+   `\u0020` - `\u007F` which are in none of the sets above, mathematical 
+   symbols (`Sm`) and other symbols (`So`).
+
+## Identifiers
+
+```ebnf
+op       ::=  opchar {opchar} 
+varid    ::=  lower idrest
+plainid  ::=  upper idrest
+           |  varid
+           |  op
+id       ::=  plainid
+           |  ‘`’ stringLiteral ‘`’
+idrest   ::=  {letter | digit} [‘_’ op]
+```
+
+There are three ways to form an identifier. First, an identifier can
+start with a letter which can be followed by an arbitrary sequence of
+letters and digits. This may be followed by underscore ‘_’
+characters and another string composed of either letters and digits or
+of operator characters.  Second, an identifier can start with an operator 
+character followed by an arbitrary sequence of operator characters.
+The preceding two forms are called _plain_ identifiers.  Finally,
+an identifier may also be formed by an arbitrary string between
+back-quotes (host systems may impose some restrictions on which
+strings are legal for identifiers).  The identifier then is composed
+of all characters excluding the backquotes themselves.
+ 
+As usual, a longest match rule applies. For instance, the string
+
+```scala
+big_bob++=`def`
+```
+
+decomposes into the three identifiers `big_bob`, `++=`, and
+`def`. The rules for pattern matching further distinguish between
+_variable identifiers_, which start with a lower case letter, and
+_constant identifiers_, which do not.
+
+The ‘\$’ character is reserved for compiler-synthesized identifiers.
+User programs should not define identifiers which contain ‘\$’ characters.
+
+The following names are reserved words instead of being members of the
+syntactic class `id` of lexical identifiers.
+
+```scala
+abstract    case        catch       class       def
+do          else        extends     false       final
+finally     for         forSome     if          implicit
+import      lazy        match       new         null
+object      override    package     private     protected
+return      sealed      super       this        throw       
+trait       try         true        type        val         
+var         while       with        yield
+_    :    =    =>    <-    <:    <%     >:    #    @
+```
+
+The Unicode operators `\u21D2` ‘$\Rightarrow$’ and `\u2190` ‘$\leftarrow$’, which have the ASCII
+equivalents `=>` and `<-`, are also reserved.
+
+### Example
+Here are examples of identifiers:
+```scala
+    x         Object        maxIndex   p2p      empty_?
+    +         `yield`       αρετη     _y       dot_product_*
+    __system  _MAX_LEN_
+```
+
+### Example
+When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings.
+For instance, the statement `Thread.yield()` is illegal, since
+`yield` is a reserved word in Scala. However, here's a
+work-around: `` Thread.`yield`() ``
+
+
+## Newline Characters
+
+```ebnf
+semi ::= ‘;’ |  nl {nl}
+```
+
+Scala is a line-oriented language where statements may be terminated by
+semi-colons or newlines. A newline in a Scala source text is treated
+as the special token “nl” if the three following criteria are satisfied:
+
+1. The token immediately preceding the newline can terminate a statement.
+1. The token immediately following the newline can begin a statement.
+1. The token appears in a region where newlines are enabled.
+
+The tokens that can terminate a statement are: literals, identifiers
+and the following delimiters and reserved words:
+
+```scala
+this    null    true    false    return    type    <xml-start>    
+_       )       ]       }
+```
+
+The tokens that can begin a statement are all Scala tokens _except_
+the following delimiters and reserved words:
+
+```scala
+catch    else    extends    finally    forSome    match        
+with    yield    ,    .    ;    :    =    =>    <-    <:    <%    
+>:    #    [    )    ]    }
+```
+
+A `case` token can begin a statement only if followed by a
+`class` or `object` token.
+
+Newlines are enabled in:
+
+1. all of a Scala source file, except for nested regions where newlines
+   are disabled, and
+1. the interval between matching `{` and `}` brace tokens,
+   except for nested regions where newlines are disabled.
+
+Newlines are disabled in:
+
+1. the interval between matching `(` and `)` parenthesis tokens, except for
+   nested regions where newlines are enabled, and
+1. the interval between matching `[` and `]` bracket tokens, except for nested
+   regions where newlines are enabled.
+1. The interval between a `case` token and its matching
+   `=>` token, except for nested regions where newlines are
+   enabled.
+1. Any regions analyzed in [XML mode](#xml-mode).
+
+Note that the brace characters of `{...}` escapes in XML and
+string literals are not tokens, 
+and therefore do not enclose a region where newlines
+are enabled.
+
+Normally, only a single `nl` token is inserted between two
+consecutive non-newline tokens which are on different lines, even if there are multiple lines
+between the two tokens. However, if two tokens are separated by at
+least one completely blank line (i.e a line which contains no
+printable characters), then two `nl` tokens are inserted.
+
+The Scala grammar (given in full [here](#scala-syntax-summary))
+contains productions where optional `nl` tokens, but not
+semicolons, are accepted. This has the effect that a newline in one of these
+positions does not terminate an expression or statement. These positions can
+be summarized as follows:
+
+Multiple newline tokens are accepted in the following places (note
+that a semicolon in place of the newline would be illegal in every one
+of these cases):
+
+- between the condition of a 
+  [conditional expression](06-expressions.html#conditional-expressions)
+  or [while loop](06-expressions.html#while-loop-expressions) and the next
+  following expression,
+- between the enumerators of a 
+  [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops)
+  and the next following expression, and
+- after the initial `type` keyword in a 
+  [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
+
+A single new line token is accepted
+
+- in front of an opening brace ‘{’, if that brace is a legal
+  continuation of the current statement or expression,
+- after an [infix operator](06-expressions.html#prefix-infix-and-postfix-operations),
+  if the first token on the next line can start an expression,
+- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and
+- after an [annotation](11-user-defined-annotations.html#user-defined-annotations).
+
+### Example
+
+The newline tokens between the two lines are not
+treated as statement separators.
+
+```scala
+if (x > 0)
+  x = x - 1
+
+while (x > 0)
+  x = x / 2
+
+for (x <- 1 to 10)
+  println(x)
+
+type
+  IntList = List[Int]
+```
+
+### Example
+
+```scala
+new Iterator[Int]
+{
+  private var x = 0
+  def hasNext = true
+  def next = { x += 1; x }
+}
+```
+
+With an additional newline character, the same code is interpreted as
+an object creation followed by a local block:
+
+```scala
+new Iterator[Int]
+
+{
+  private var x = 0
+  def hasNext = true
+  def next = { x += 1; x }
+}
+```
+
+### Example
+
+```scala
+  x < 0 ||
+  x > 10
+```
+
+With an additional newline character, the same code is interpreted as
+two expressions:
+
+```scala
+  x < 0 ||
+
+  x > 10
+```
+
+### Example
+
+```scala
+def func(x: Int)
+        (y: Int) = x + y
+```
+
+With an additional newline character, the same code is interpreted as
+an abstract function definition and a syntactically illegal statement:
+
+```scala
+def func(x: Int)
+
+        (y: Int) = x + y
+```
+
+### Example
+
+```scala
+ at serializable
+protected class Data { ... }
+```
+
+With an additional newline character, the same code is interpreted as
+an attribute and a separate statement (which is syntactically
+illegal).
+
+```scala
+ at serializable
+
+protected class Data { ... }
+```
+
+
+## Literals
+
+There are literals for integer numbers, floating point numbers,
+characters, booleans, symbols, strings.  The syntax of these literals is in
+each case as in Java.
+
+<!-- TODO 
+  say that we take values from Java, give examples of some lits in
+  particular float and double. 
+-->
+
+```ebnf
+Literal  ::=  [‘-’] integerLiteral
+           |  [‘-’] floatingPointLiteral
+           |  booleanLiteral
+           |  characterLiteral
+           |  stringLiteral
+           |  symbolLiteral
+           |  ‘null’
+```
+
+
+### Integer Literals
+
+```ebnf
+integerLiteral  ::=  (decimalNumeral | hexNumeral | octalNumeral) 
+                       [‘L’ | ‘l’]
+decimalNumeral  ::=  ‘0’ | nonZeroDigit {digit}
+hexNumeral      ::=  ‘0’ ‘x’ hexDigit {hexDigit}
+octalNumeral    ::=  ‘0’ octalDigit {octalDigit}
+digit           ::=  ‘0’ | nonZeroDigit
+nonZeroDigit    ::=  ‘1’ | … | ‘9’
+octalDigit      ::=  ‘0’ | … | ‘7’
+```
+
+Integer literals are usually of type `Int`, or of type
+`Long` when followed by a `L` or
+`l` suffix. Values of type `Int` are all integer
+numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive.  Values of
+type `Long` are all integer numbers between $-2\^{63}$ and
+$2\^{63}-1$, inclusive. A compile-time error occurs if an integer literal
+denotes a number outside these ranges.
+
+However, if the expected type [_pt_](06-expressions.html#expression-typing) of a literal
+in an expression is either `Byte`, `Short`, or `Char`
+and the integer number fits in the numeric range defined by the type,
+then the number is converted to type _pt_ and the literal's type
+is _pt_. The numeric ranges given by these types are:
+
+|                |                          |
+|----------------|--------------------------|
+|`Byte`          | $-2\^7$ to $2\^7-1$      |
+|`Short`         | $-2\^{15}$ to $2\^{15}-1$|
+|`Char`          | $0$ to $2\^{16}-1$       |
+
+
+### Example
+
+```scala
+0          21          0xFFFFFFFF       -42L
+```
+
+
+### Floating Point Literals
+
+```ebnf
+floatingPointLiteral  ::=  digit {digit} ‘.’ digit {digit} [exponentPart] [floatType]
+                        |  ‘.’ digit {digit} [exponentPart] [floatType]
+                        |  digit {digit} exponentPart [floatType]
+                        |  digit {digit} [exponentPart] floatType
+exponentPart          ::=  (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit}
+floatType             ::=  ‘F’ | ‘f’ | ‘D’ | ‘d’
+```
+
+Floating point literals are of type `Float` when followed by
+a floating point type suffix `F` or `f`, and are
+of type `Double` otherwise.  The type `Float`
+consists of all IEEE 754 32-bit single-precision binary floating point
+values, whereas the type `Double` consists of all IEEE 754
+64-bit double-precision binary floating point values.
+
+If a floating point literal in a program is followed by a token
+starting with a letter, there must be at least one intervening
+whitespace character between the two tokens.
+
+### Example
+
+```scala
+0.0        1e30f      3.14159f      1.0e-100      .1
+```
+
+### Example
+
+The phrase `1.toString` parses as three different tokens:
+the integer literal `1`, a `.`, and the identifier `toString`.
+
+### Example
+
+`1.` is not a valid floating point literal because the mandatory digit after the `.` is missing.
+
+### Boolean Literals
+
+```ebnf
+booleanLiteral  ::=  ‘true’ | ‘false’
+```
+
+The boolean literals `true` and `false` are
+members of type `Boolean`.
+
+
+### Character Literals
+
+```ebnf
+characterLiteral  ::=  ‘'’ (printableChar | charEscapeSeq) ‘'’
+```
+
+A character literal is a single character enclosed in quotes.
+The character is either a printable unicode character or is described
+by an [escape sequence](#escape-sequences).
+
+### Example
+
+```scala
+'a'    '\u0041'    '\n'    '\t'
+```
+
+Note that `'\u000A'` is _not_ a valid character literal because
+Unicode conversion is done before literal parsing and the Unicode
+character \\u000A (line feed) is not a printable
+character. One can use instead the escape sequence `'\n'` or
+the octal escape `'\12'` ([see here](#escape-sequences)).
+
+
+### String Literals
+
+```ebnf
+stringLiteral  ::=  ‘"’ {stringElement} ‘"’
+stringElement  ::=  printableCharNoDoubleQuote  |  charEscapeSeq
+```
+
+A string literal is a sequence of characters in double quotes.  The
+characters are either printable unicode character or are described by
+[escape sequences](#escape-sequences). If the string literal
+contains a double quote character, it must be escaped,
+i.e. `"\""`. The value of a string literal is an instance of
+class `String`. 
+
+### Example
+
+```scala
+"Hello,\nWorld!"
+"This string contains a \" character."
+```
+
+#### Multi-Line String Literals
+
+```ebnf
+stringLiteral   ::=  ‘"""’ multiLineChars ‘"""’
+multiLineChars  ::=  {[‘"’] [‘"’] charNoDoubleQuote} {‘"’}
+```
+
+A multi-line string literal is a sequence of characters enclosed in
+triple quotes `""" ... """`. The sequence of characters is
+arbitrary, except that it may contain three or more consuctive quote characters
+only at the very end. Characters
+must not necessarily be printable; newlines or other
+control characters are also permitted.  Unicode escapes work as everywhere else, but none
+of the escape sequences [here](#escape-sequences) are interpreted.
+
+### Example
+
+```scala
+  """the present string
+     spans three
+     lines."""
+```
+
+This would produce the string:
+
+```scala
+the present string
+     spans three
+     lines.
+```
+
+The Scala library contains a utility method `stripMargin`
+which can be used to strip leading whitespace from multi-line strings.
+The expression
+
+```scala
+ """the present string
+   |spans three
+   |lines.""".stripMargin
+```
+
+evaluates to
+
+```scala
+the present string
+spans three 
+lines.
+```
+
+Method `stripMargin` is defined in class
+[scala.collection.immutable.StringLike](http://www.scala-lang.org/api/current/index.html#scala.collection.immutable.StringLike). 
+Because there is a predefined
+[implicit conversion](06-expressions.html#implicit-conversions) from `String` to
+`StringLike`, the method is applicable to all strings.
+
+
+### Escape Sequences
+
+The following escape sequences are recognized in character and string literals.
+
+| charEscapeSeq | unicode  | name            | char   |
+|---------------|----------|-----------------|--------|
+| `‘\‘ ‘b‘`     | `\u0008` | backspace       |  `BS`  |
+| `‘\‘ ‘t‘`     | `\u0009` | horizontal tab  |  `HT`  |
+| `‘\‘ ‘n‘`     | `\u000a` | linefeed        |  `LF`  |
+| `‘\‘ ‘f‘`     | `\u000c` | form feed       |  `FF`  |
+| `‘\‘ ‘r‘`     | `\u000d` | carriage return |  `CR`  |
+| `‘\‘ ‘"‘`     | `\u0022` | double quote    |  `"`   |
+| `‘\‘ ‘'‘`     | `\u0027` | single quote    |  `'`   |
+| `‘\‘ ‘\‘`     | `\u005c` | backslash       |  `\`   |
+
+
+A character with Unicode between 0 and 255 may also be represented by
+an octal escape, i.e. a backslash ‘\’ followed by a
+sequence of up to three octal characters.
+
+It is a compile time error if a backslash character in a character or
+string literal does not start a valid escape sequence.
+
+
+### Symbol literals
+
+```ebnf
+symbolLiteral  ::=  ‘'’ plainid
+```
+
+A symbol literal `'x` is a shorthand for the expression
+`scala.Symbol("x")`. `Symbol` is a [case class](05-classes-and-objects.html#case-classes),
+which is defined as follows.
+
+```scala
+package scala
+final case class Symbol private (name: String) {
+  override def toString: String = "'" + name
+}
+```
+
+The `apply` method of `Symbol`'s companion object
+caches weak references to `Symbol`s, thus ensuring that
+identical symbol literals are equivalent with respect to reference
+equality.
+
+
+## Whitespace and Comments
+
+Tokens may be separated by whitespace characters
+and/or comments. Comments come in two forms:
+
+A single-line comment is a sequence of characters which starts with
+`//` and extends to the end of the line.
+
+A multi-line comment is a sequence of characters between
+`/*` and `*/`. Multi-line comments may be nested,
+but are required to be properly nested.  Therefore, a comment like
+`/* /* */` will be rejected as having an unterminated
+comment.
+
+
+## XML mode
+
+In order to allow literal inclusion of XML fragments, lexical analysis
+switches from Scala mode to XML mode when encountering an opening
+angle bracket ‘<’ in the following circumstance: The ‘<’ must be
+preceded either by whitespace, an opening parenthesis or an opening
+brace and immediately followed by a character starting an XML name.
+
+```ebnf
+ ( whitespace | ‘(’ | ‘{’ ) ‘<’ (XNameStart | ‘!’ | ‘?’)
+
+  XNameStart ::= ‘_’ | BaseChar | Ideographic // as in W3C XML, but without ‘:’
+```
+
+The scanner switches from XML mode to Scala mode if either
+
+- the XML expression or the XML pattern started by the initial ‘<’ has been 
+  successfully parsed, or if
+- the parser encounters an embedded Scala expression or pattern and 
+  forces the Scanner 
+  back to normal mode, until the Scala expression or pattern is
+  successfully parsed. In this case, since code and XML fragments can be
+  nested, the parser has to maintain a stack that reflects the nesting
+  of XML and Scala expressions adequately.
+
+Note that no Scala tokens are constructed in XML mode, and that comments are interpreted
+as text.
+
+### Example
+
+The following value definition uses an XML literal with two embedded
+Scala expressions:
+
+```scala
+val b = <book>
+          <title>The Scala Language Specification</title>
+          <version>{scalaBook.version}</version>
+          <authors>{scalaBook.authors.mkList("", ", ", "")}</authors>
+        </book>
+```
diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md
new file mode 100644
index 0000000..bfb743d
--- /dev/null
+++ b/spec/02-identifiers-names-and-scopes.md
@@ -0,0 +1,114 @@
+---
+title: Identifiers, Names and Scopes
+layout: default
+chapter: 2
+---
+
+# Identifiers, Names and Scopes
+
+Names in Scala identify types, values, methods, and classes which are
+collectively called _entities_. Names are introduced by local
+[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions),
+[inheritance](05-classes-and-objects.html#class-members),
+[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or
+[package clauses](09-top-level-definitions.html#packagings)
+which are collectively called _bindings_.
+
+Bindings of different kinds have a precedence defined on them:
+
+1. Definitions and declarations that are local, inherited, or made
+   available by a package clause in the same compilation unit where the 
+   definition occurs have highest precedence. 
+1. Explicit imports have next highest precedence.
+1. Wildcard imports  have next highest precedence.
+1. Definitions made available by a package clause not in the
+   compilation unit where the definition occurs have lowest precedence.
+
+
+There are two different name spaces, one for [types](03-types.html#types)
+and one for [terms](06-expressions.html#expressions). The same name may designate a
+type and a term, depending on the context where the name is used.
+
+A binding has a _scope_ in which the entity defined by a single
+name can be accessed using a simple name. Scopes are nested.  A binding
+in some inner scope _shadows_ bindings of lower precedence in the
+same scope as well as bindings of the same or lower precedence in outer
+scopes. 
+
+<!-- TODO: either the example, the spec, or the compiler is wrong
+
+Note that shadowing is only a partial order. In a situation like
+
+```scala
+val x = 1
+{
+  import p.x
+  x
+}
+```
+
+neither binding of `x` shadows the other. Consequently, the
+reference to `x` in the last line of the block above would be ambiguous.
+-->
+
+A reference to an unqualified (type- or term-) identifier $x$ is bound
+by the unique binding, which
+
+- defines an entity with name $x$ in the same namespace as the identifier, and
+- shadows all other bindings that define entities with name $x$ in that 
+  namespace.
+
+It is an error if no such binding exists.  If $x$ is bound by an
+import clause, then the simple name $x$ is taken to be equivalent to
+the qualified name to which $x$ is mapped by the import clause. If $x$
+is bound by a definition or declaration, then $x$ refers to the entity
+introduced by that binding. In that case, the type of $x$ is the type
+of the referenced entity.
+
+A reference to a qualified (type- or term-) identifier $e.x$ refers to
+the member of the type $T$ of $e$ which has the name $x$ in the same
+namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types).
+The type of $e.x$ is the member type of the referenced entity in $T$.
+
+
+### Example
+
+Assume the following two definitions of a objects named `X` in packages `P` and `Q`.
+
+```scala
+package P {
+  object X { val x = 1; val y = 2 }
+}
+
+package Q {
+  object X { val x = true; val y = "" }
+}
+```
+
+The following program illustrates different kinds of bindings and
+precedences between them.
+
+```scala
+package P {                  // `X' bound by package clause
+import Console._             // `println' bound by wildcard import
+object A {
+  println("L4: "+X)          // `X' refers to `P.X' here
+  object B {
+    import Q._               // `X' bound by wildcard import
+    println("L7: "+X)        // `X' refers to `Q.X' here
+    import X._               // `x' and `y' bound by wildcard import
+    println("L8: "+x)        // `x' refers to `Q.X.x' here
+    object C {
+      val x = 3              // `x' bound by local definition
+      println("L12: "+x)     // `x' refers to constant `3' here
+      { import Q.X._         // `x' and `y' bound by wildcard import
+//      println("L14: "+x)   // reference to `x' is ambiguous here
+        import X.y           // `y' bound by explicit import
+        println("L16: "+y)   // `y' refers to `Q.X.y' here
+        { val x = "abc"      // `x' bound by local definition
+          import P.X._       // `x' and `y' bound by wildcard import
+//        println("L19: "+y) // reference to `y' is ambiguous here
+          println("L20: "+x) // `x' refers to string "abc" here
+}}}}}}
+```
+
diff --git a/spec/03-types.md b/spec/03-types.md
new file mode 100644
index 0000000..66ddee8
--- /dev/null
+++ b/spec/03-types.md
@@ -0,0 +1,1056 @@
+---
+title: Types
+layout: default
+chapter: 3
+---
+
+# Types
+
+```ebnf
+  Type              ::=  FunctionArgTypes ‘=>’ Type
+                      |  InfixType [ExistentialClause]
+  FunctionArgTypes  ::=  InfixType
+                      |  ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
+  ExistentialClause ::=  ‘forSome’ ‘{’ ExistentialDcl 
+                             {semi ExistentialDcl} ‘}’
+  ExistentialDcl    ::=  ‘type’ TypeDcl 
+                      |  ‘val’ ValDcl
+  InfixType         ::=  CompoundType {id [nl] CompoundType}
+  CompoundType      ::=  AnnotType {‘with’ AnnotType} [Refinement]
+                      |  Refinement
+  AnnotType         ::=  SimpleType {Annotation}
+  SimpleType        ::=  SimpleType TypeArgs
+                      |  SimpleType ‘#’ id
+                      |  StableId
+                      |  Path ‘.’ ‘type’
+                      |  ‘(’ Types ‘)’
+  TypeArgs          ::=  ‘[’ Types ‘]’
+  Types             ::=  Type {‘,’ Type}
+```
+
+We distinguish between first-order types and type constructors, which
+take type parameters and yield types. A subset of first-order types
+called _value types_ represents sets of (first-class) values.
+Value types are either _concrete_ or _abstract_. 
+
+Every concrete value type can be represented as a _class type_, i.e. a
+[type designator](#type-designators) that refers to a
+[class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a
+[compound type](#compound-types) representing an
+intersection of types, possibly with a [refinement](#compound-types)
+that further constrains the types of its members.
+<!-- 
+A shorthand exists for denoting [function types](#function-types) 
+-->
+Abstract value types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters)
+and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
+Parentheses in types can be used for grouping.
+
+[^1]: We assume that objects and packages also implicitly
+      define a class (of the same name as the object or package, but
+      inaccessible to user programs).
+
+Non-value types capture properties of identifiers that 
+[are not values](#non-value-types). For example, a 
+[type constructor](#type-constructors) does not directly specify a type of 
+values. However, when a type constructor is applied to the correct type 
+arguments, it yields a first-order type, which may be a value type. 
+
+Non-value types are expressed indirectly in Scala. E.g., a method type is 
+described by writing down a method signature, which in itself is not a real 
+type, although it  gives rise to a corresponding [method type](#method-types). 
+Type constructors are another example, as one can write 
+`type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write 
+the corresponding anonymous type function directly.
+
+
+## Paths
+
+```ebnf
+Path            ::=  StableId
+                  |  [id ‘.’] this
+StableId        ::=  id
+                  |  Path ‘.’ id
+                  |  [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id
+ClassQualifier  ::= ‘[’ id ‘]’
+```
+
+Paths are not types themselves, but they can be a part of named types
+and in that function form a central role in Scala's type system.
+
+A path is one of the following.
+
+- The empty path ε (which cannot be written explicitly in user programs).
+- $C.$`this`, where $C$ references a class.
+  The path `this` is taken as a shorthand for $C.$`this` where
+  $C$ is the name of the class directly enclosing the reference. 
+- $p.x$ where $p$ is a path and $x$ is a stable member of $p$.
+  _Stable members_ are packages or members introduced by object definitions or 
+  by value definitions of [non-volatile types](#volatile-types).
+- $C.$`super`$.x$ or $C.$`super`$[M].x$
+  where $C$ references a class and $x$ references a 
+  stable member of the super class or designated parent class $M$ of $C$. 
+  The prefix `super` is taken as a shorthand for $C.$`super` where
+  $C$ is the name of the class directly enclosing the reference. 
+
+A _stable identifier_ is a path which ends in an identifier.
+
+
+## Value Types
+
+Every value in Scala has a type which is of one of the following
+forms.
+
+### Singleton Types
+
+```ebnf
+SimpleType  ::=  Path ‘.’ type
+```
+
+A singleton type is of the form $p.$`type`, where $p$ is a
+path pointing to a value expected to [conform](06-expressions.html#expression-typing)
+to `scala.AnyRef`. The type denotes the set of values
+consisting of `null` and the value denoted by $p$. 
+
+A _stable type_ is either a singleton type or a type which is
+declared to be a subtype of trait `scala.Singleton`.
+
+### Type Projection
+
+```ebnf
+SimpleType  ::=  SimpleType ‘#’ id
+```
+
+A type projection $T$#$x$ references the type member named
+$x$ of type $T$. 
+
+<!--
+The following is no longer necessary:
+If $x$ references an abstract type member, then $T$ must be a 
+[stable type](#singleton-types)
+-->
+
+### Type Designators
+
+```ebnf
+SimpleType  ::=  StableId
+```
+
+A type designator refers to a named value type. It can be simple or
+qualified. All such type designators are shorthands for type projections.
+
+Specifically, the unqualified type name $t$ where $t$ is bound in some
+class, object, or package $C$ is taken as a shorthand for
+$C.$`this.type#`$t$. If $t$ is
+not bound in a class, object, or package, then $t$ is taken as a
+shorthand for ε`.type#`$t$.
+
+A qualified type designator has the form `p.t` where `p` is
+a [path](#paths) and _t_ is a type name. Such a type designator is
+equivalent to the type projection `p.type#t`.
+
+### Example
+
+Some type designators and their expansions are listed below. We assume
+a local type parameter $t$, a value `maintable`
+with a type member `Node` and the standard class `scala.Int`,
+
+| Designator          | Expansion                 |
+|-------------------- | --------------------------|
+|t                    | ε.type#t                  |
+|Int                  | scala.type#Int            |
+|scala.Int            | scala.type#Int            |
+|data.maintable.Node  | data.maintable.type#Node  |
+
+
+
+### Parameterized Types
+
+```ebnf
+SimpleType      ::=  SimpleType TypeArgs
+TypeArgs        ::=  ‘[’ Types ‘]’
+```
+
+A parameterized type $T[ U_1 , \ldots , U_n ]$ consists of a type
+designator $T$ and type parameters $U_1 , \ldots , U_n$ where 
+$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type
+parameters $a_1 , \ldots , a_n$.
+
+Say the type parameters have lower bounds $L_1 , \ldots , L_n$ and
+upper bounds $U_1, \ldots, U_n$.  The parameterized type is
+well-formed if each actual type parameter
+_conforms to its bounds_, i.e. $\sigma L_i <: T_i <: \sigma U_i$ where $\sigma$ is the
+substitution $[ a_1 := T_1 , \ldots , a_n := T_n ]$.
+
+### Example
+Given the partial type definitions:
+
+```scala
+class TreeMap[A <: Comparable[A], B] { … }
+class List[A] { … }
+class I extends Comparable[I] { … }
+
+class F[M[_], X] { … }
+class S[K <: String] { … }
+class G[M[ Z <: I ], I] { … }
+```
+
+the following parameterized types are well formed:
+
+```scala
+TreeMap[I, String]
+List[I]
+List[List[Boolean]]
+
+F[List, Int]
+G[S, String]
+```
+
+### Example
+
+Given the [above type definitions](example-parameterized-types),
+the following types are ill-formed:
+
+```scala
+TreeMap[I]            // illegal: wrong number of parameters
+TreeMap[List[I], Int] // illegal: type parameter not within bound
+
+F[Int, Boolean]       // illegal: Int is not a type constructor
+F[TreeMap, Int]       // illegal: TreeMap takes two parameters,
+                      //   F expects a constructor taking one
+G[S, Int]             // illegal: S constrains its parameter to
+                      //   conform to String,
+                      // G expects type constructor with a parameter
+                      //   that conforms to Int
+```
+
+### Tuple Types
+
+```ebnf
+SimpleType    ::=   ‘(’ Types ‘)’
+```
+
+A tuple type $(T_1 , \ldots , T_n)$ is an alias for the
+class `scala.Tuple$_n$[$T_1$, … , $T_n$]`, where $n \geq 2$.
+
+Tuple classes are case classes whose fields can be accessed using
+selectors `_1` , … , `_n`. Their functionality is
+abstracted in a corresponding `Product` trait. The _n_-ary tuple
+class and product trait are defined at least as follows in the
+standard Scala library (they might also add other methods and
+implement other traits).
+
+```scala
+case class Tuple$n$[+T1, … , +$T_n$](_1: T1, … , _n: $T_n$) 
+extends Product_n[T1, … , $T_n$]
+
+trait Product_n[+T1, … , +$T_n$] {
+  override def productArity = $n$
+  def _1: T1
+  …
+  def _n: $T_n$
+}
+```
+
+### Annotated Types
+
+```ebnf
+AnnotType  ::=  SimpleType {Annotation}
+```
+
+An annotated type $T$ $a_1, \ldots, a_n$
+attaches [annotations](11-user-defined-annotations.html#user-defined-annotations)
+$a_1 , \ldots , a_n$ to the type $T$.
+
+### Example
+
+The following type adds the `@suspendable` annotation to the type `String`:
+
+```scala
+String @suspendable
+```
+
+
+### Compound Types
+
+```ebnf
+CompoundType    ::=  AnnotType {‘with’ AnnotType} [Refinement]
+                  |  Refinement
+Refinement      ::=  [nl] ‘{’ RefineStat {semi RefineStat} ‘}’
+RefineStat      ::=  Dcl
+                  |  ‘type’ TypeDef
+                  |
+```
+
+A compound type $T_1$ `with` … `with` $T_n \\{ R \\}$
+represents objects with members as given in the component types 
+$T_1 , \ldots , T_n$ and the refinement $\\{ R \\}$. A refinement
+$\\{ R \\}$ contains declarations and type definitions.
+If a declaration or definition overrides a declaration or definition in
+one of the component types $T_1 , \ldots , T_n$, the usual rules for
+[overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration
+or definition is said to be “structural” [^2].
+
+[^2]: A reference to a structurally defined member (method call or access 
+      to a value or variable) may generate binary code that is significantly
+      slower than an equivalent code to a non-structural member.
+
+Within a method declaration in a structural refinement, the type of
+any value parameter may only refer to type parameters or abstract
+types that are contained inside the refinement. That is, it must refer
+either to a type parameter of the method itself, or to a type
+definition within the refinement. This restriction does not apply to
+the method's result type.
+
+If no refinement is given, the empty refinement is implicitly added,
+i.e. $T_1$ `with` … `with` $T_n$ is a shorthand for $T_1$ `with` … `with` $T_n \\{\\}$.
+
+A compound type may also consist of just a refinement
+$\\{ R \\}$ with no preceding component types. Such a type is
+equivalent to `AnyRef` $\\{ R \\}$.
+
+### Example
+
+The following example shows how to declare and use a method which
+a parameter type that contains a refinement with structural declarations.
+
+```scala
+case class Bird (val name: String) extends Object {
+        def fly(height: Int) = …
+…
+}
+case class Plane (val callsign: String) extends Object {
+        def fly(height: Int) = …
+…
+}
+def takeoff(
+            runway: Int,
+      r: { val callsign: String; def fly(height: Int) }) = {
+  tower.print(r.callsign + " requests take-off on runway " + runway)
+  tower.read(r.callsign + " is clear for take-off")
+  r.fly(1000)
+}
+val bird = new Bird("Polly the parrot"){ val callsign = name }
+val a380 = new Plane("TZ-987")
+takeoff(42, bird)
+takeoff(89, a380)
+```
+
+Although `Bird` and `Plane` do not share any parent class other than
+`Object`, the parameter _r_ of method `takeoff` is defined using a
+refinement with structural declarations to accept any object that declares
+a value `callsign` and a `fly` method.
+
+ 
+### Infix Types
+
+```ebnf
+InfixType     ::=  CompoundType {id [nl] CompoundType}
+```
+
+An infix type $T_1$ `op` $T_2$ consists of an infix
+operator `op` which gets applied to two type operands $T_1$ and
+$T_2$.  The type is equivalent to the type application 
+`op`$[T_1, T_2]$.  The infix operator `op` may be an
+arbitrary identifier, except for `*`, which is reserved as a postfix modifier
+denoting a [repeated parameter type](04-basic-declarations-and-definitions.html#repeated-parameters).
+
+All type infix operators have the same precedence; parentheses have to
+be used for grouping. The [associativity](06-expressions.html#prefix-infix-and-postfix-operations)
+of a type operator is determined as for term operators: type operators
+ending in a colon ‘:’ are right-associative; all other
+operators are left-associative.
+
+In a sequence of consecutive type infix operations 
+$t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, \ldots \, \mathit{op_n} \, t_n$,
+all operators $\mathit{op}_1 , \ldots , \mathit{op}_n$ must have the same
+associativity. If they are all left-associative, the sequence is
+interpreted as 
+$(\ldots (t_0 \mathit{op_1} t_1) \mathit{op_2} \ldots) \mathit{op_n} t_n$,
+otherwise it is interpreted as 
+$t_0 \mathit{op_1} (t_1 \mathit{op_2} ( \ldots \mathit{op_n} t_n) \ldots)$.
+
+### Function Types
+
+```ebnf
+Type              ::=  FunctionArgs ‘=>’ Type
+FunctionArgs      ::=  InfixType
+                    |  ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
+```
+
+The type $(T_1 , \ldots , T_n) \Rightarrow U$ represents the set of function
+values that take arguments of types $T1 , \ldots , Tn$ and yield
+results of type $U$.  In the case of exactly one argument type
+$T \Rightarrow U$ is a shorthand for $(T) \Rightarrow U$.  
+An argument type of the form $\Rightarrow T$
+represents a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters) of type $T$.
+
+Function types associate to the right, e.g.
+$S \Rightarrow T \Rightarrow U$ is the same as 
+$S \Rightarrow (T \Rightarrow U)$.
+
+Function types are shorthands for class types that define `apply`
+functions.  Specifically, the $n$-ary function type 
+$(T_1 , \ldots , T_n) \Rightarrow U$ is a shorthand for the class type
+`Function$_n$[T1 , … , $T_n$, U]`. Such class
+types are defined in the Scala library for $n$ between 0 and 9 as follows.
+
+```scala
+package scala 
+trait Function_n[-T1 , … , -T$_n$, +R] {
+  def apply(x1: T1 , … , x$_n$: T$_n$): R 
+  override def toString = "<function>" 
+}
+```
+
+Hence, function types are [covariant](04-basic-declarations-and-definitions.html#variance-annotations) in their
+result type and contravariant in their argument types.
+
+### Existential Types
+
+```ebnf
+Type               ::= InfixType ExistentialClauses
+ExistentialClauses ::= ‘forSome’ ‘{’ ExistentialDcl 
+                       {semi ExistentialDcl} ‘}’
+ExistentialDcl     ::= ‘type’ TypeDcl 
+                    |  ‘val’ ValDcl
+```
+
+An existential type has the form `$T$ forSome { $Q$ }`
+where $Q$ is a sequence of 
+[type declarations](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
+
+Let 
+$t_1[\mathit{tps}_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}_n] >: L_n <: U_n$ 
+be the types declared in $Q$ (any of the 
+type parameter sections `[ $\mathit{tps}_i$ ]` might be missing).
+The scope of each type $t_i$ includes the type $T$ and the existential clause 
+$Q$. 
+The type variables $t_i$ are said to be _bound_ in the type 
+`$T$ forSome { $Q$ }`.
+Type variables which occur in a type $T$ but which are not bound in $T$ are said
+to be _free_ in $T$.
+
+A _type instance_ of `$T$ forSome { $Q$ }` 
+is a type $\sigma T$ where $\sigma$ is a substitution over $t_1 , \ldots , t_n$
+such that, for each $i$, $\sigma L_i <: \sigma t_i <: \sigma U_i$.
+The set of values denoted by the existential type `$T$ forSome {$\,Q\,$}`
+is the union of the set of values of all its type instances.
+
+A _skolemization_ of `$T$ forSome { $Q$ }` is
+a type instance $\sigma T$, where $\sigma$ is the substitution
+$[t'_1/t_1 , \ldots , t'_n/t_n]$ and each $t'_i$ is a fresh abstract type
+with lower bound $\sigma L_i$ and upper bound $\sigma U_i$.
+
+#### Simplification Rules
+
+Existential types obey the following four equivalences:
+
+1. Multiple for-clauses in an existential type can be merged. E.g.,
+`$T$ forSome { $Q$ } forSome { $Q'$ }`
+is equivalent to
+`$T$ forSome { $Q$ ; $Q'$}`.
+1. Unused quantifications can be dropped. E.g.,
+`$T$ forSome { $Q$ ; $Q'$}`
+where none of the types defined in $Q'$ are referred to by $T$ or $Q$,
+is equivalent to
+`$T$ forSome {$ Q $}`.
+1. An empty quantification can be dropped. E.g.,
+`$T$ forSome { }` is equivalent to $T$.
+1. An existential type `$T$ forSome { $Q$ }` where $Q$ contains
+a clause `type $t[\mathit{tps}] >: L <: U$` is equivalent 
+to the type `$T'$ forSome { $Q$ }` where $T'$ results from $T$ by replacing 
+every [covariant occurrence](04-basic-declarations-and-definitions.html#variance-annotations) of $t$ in $T$ by $U$ and by
+replacing every contravariant occurrence of $t$ in $T$ by $L$.
+
+
+#### Existential Quantification over Values
+
+As a syntactic convenience, the bindings clause
+in an existential type may also contain
+value declarations `val $x$: $T$`. 
+An existential type `$T$ forSome { $Q$; val $x$: $S\,$;$\,Q'$ }`
+is treated as a shorthand for the type
+`$T'$ forSome { $Q$; type $t$ <: $S$ with Singleton; $Q'$ }`, where $t$ is a 
+fresh type name and $T'$ results from $T$ by replacing every occurrence of 
+`$x$.type` with $t$.
+
+#### Placeholder Syntax for Existential Types
+
+```ebnf
+WildcardType   ::=  ‘_’ TypeBounds
+```
+
+Scala supports a placeholder syntax for existential types.
+A _wildcard type_ is of the form `_$\;$>:$\,L\,$<:$\,U$`. Both bound
+clauses may be omitted. If a lower bound clause `>:$\,L$` is missing, 
+`>:$\,$scala.Nothing`
+is assumed. If an upper bound clause `<:$\,U$` is missing, 
+`<:$\,$scala.Any` is assumed. A wildcard type is a shorthand for an 
+existentially quantified type variable, where the existential quantification is 
+implicit.
+
+A wildcard type must appear as type argument of a parameterized type.
+Let $T = p.c[\mathit{targs},T,\mathit{targs}']$ be a parameterized type where 
+$\mathit{targs}, \mathit{targs}'$ may be empty and
+$T$ is a wildcard type `_$\;$>:$\,L\,$<:$\,U$`. Then $T$ is equivalent to the 
+existential
+type
+
+```scala
+$p.c[\mathit{targs},t,\mathit{targs}']$ forSome { type $t$ >: $L$ <: $U$ }
+```
+
+where $t$ is some fresh type variable. 
+Wildcard types may also appear as parts of [infix types](#infix-types)
+, [function types](#function-types),
+or [tuple types](#tuple-types).
+Their expansion is then the expansion in the equivalent parameterized
+type.
+
+### Example
+
+Assume the class definitions
+
+```scala
+class Ref[T]
+abstract class Outer { type T } .
+```
+
+Here are some examples of existential types:
+
+```scala
+Ref[T] forSome { type T <: java.lang.Number }
+Ref[x.T] forSome { val x: Outer }
+Ref[x_type # T] forSome { type x_type <: Outer with Singleton }
+```
+
+The last two types in this list are equivalent.
+An alternative formulation of the first type above using wildcard syntax is:
+
+```scala
+Ref[_ <: java.lang.Number]
+```
+
+### Example
+
+The type `List[List[_]]` is equivalent to the existential type
+
+```scala
+List[List[t] forSome { type t }] .
+```
+
+### Example
+
+Assume a covariant type
+
+```scala
+class List[+T]
+```
+
+The type
+
+```scala
+List[T] forSome { type T <: java.lang.Number }
+```
+
+is equivalent (by simplification rule 4 above) to
+
+```scala
+List[java.lang.Number] forSome { type T <: java.lang.Number }
+```
+
+which is in turn equivalent (by simplification rules 2 and 3 above) to
+`List[java.lang.Number]`.
+
+
+## Non-Value Types
+
+The types explained in the following do not denote sets of values, nor
+do they appear explicitly in programs. They are introduced in this
+report as the internal types of defined identifiers.
+
+
+### Method Types
+
+A method type is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$ 
+is a sequence of parameter names and types $(p_1:T_1 , \ldots , p_n:T_n)$
+for some $n \geq 0$ and $U$ is a (value or method) type.  This type
+represents named methods that take arguments named $p_1 , \ldots , p_n$ 
+of types $T_1 , \ldots , T_n$
+and that return a result of type $U$.
+
+Method types associate to the right: $(\mathit{Ps}_1)(\mathit{Ps}_2)U$ is
+treated as $(\mathit{Ps}_1)((\mathit{Ps}_2)U)$.
+
+A special case are types of methods without any parameters. They are
+written here `=> T`. Parameterless methods name expressions
+that are re-evaluated each time the parameterless method name is
+referenced.
+
+Method types do not exist as types of values. If a method name is used
+as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a
+corresponding function type.
+
+###### Example
+
+The declarations
+    
+```
+def a: Int
+def b (x: Int): Boolean
+def c (x: Int) (y: String, z: String): String
+```
+
+produce the typings
+
+```scala
+a: => Int
+b: (Int) Boolean
+c: (Int) (String, String) String
+```
+
+### Polymorphic Method Types
+
+A polymorphic method type is denoted internally as `[$\mathit{tps}\,$]$T$` where
+`[$\mathit{tps}\,$]` is a type parameter section 
+`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]` 
+for some $n \geq 0$ and $T$ is a
+(value or method) type.  This type represents named methods that
+take type arguments `$S_1 , \ldots , S_n$` which
+[conform](#parameterized-types) to the lower bounds
+`$L_1 , \ldots , L_n$` and the upper bounds
+`$U_1 , \ldots , U_n$` and that yield results of type $T$.
+
+###### Example
+
+The declarations
+
+```scala
+def empty[A]: List[A]
+def union[A <: Comparable[A]] (x: Set[A], xs: Set[A]): Set[A]
+```
+
+produce the typings
+
+```scala
+empty : [A >: Nothing <: Any] List[A]
+union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A]  .
+```
+
+### Type Constructors
+
+A type constructor is represented internally much like a polymorphic method type.
+`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$` 
+represents a type that is expected by a 
+[type constructor parameter](04-basic-declarations-and-definitions.html#type-parameters) or an
+[abstract type constructor binding](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) with
+the corresponding type parameter clause.
+
+###### Example
+
+Consider this fragment of the `Iterable[+X]` class:
+    
+```
+trait Iterable[+X] {
+  def flatMap[newType[+X] <: Iterable[X], S](f: X => newType[S]): newType[S]
+}
+```
+
+Conceptually, the type constructor `Iterable` is a name for the
+anonymous type `[+X] Iterable[X]`, which may be passed to the
+`newType` type constructor parameter in `flatMap`.
+
+
+<!-- ### Overloaded Types
+
+More than one values or methods are defined in the same scope with the
+same name, we model
+
+An overloaded type consisting of type alternatives $T_1 \commadots T_n (n \geq 2)$ is denoted internally $T_1 \overload \ldots \overload T_n$.
+
+### Example
+```
+def println: Unit
+def println(s: String): Unit = $\ldots$
+def println(x: Float): Unit = $\ldots$
+def println(x: Float, width: Int): Unit = $\ldots$
+def println[A](x: A)(tostring: A => String): Unit = $\ldots$
+```
+define a single function `println` which has an overloaded
+type.
+```
+println:  => Unit $\overload$
+          (String) Unit $\overload$
+          (Float) Unit $\overload$
+          (Float, Int) Unit $\overload$
+          [A] (A) (A => String) Unit
+```
+
+### Example
+```
+def f(x: T): T = $\ldots$
+val f = 0
+```
+define a function `f} which has type `(x: T)T $\overload$ Int`.
+-->
+
+
+## Base Types and Member Definitions
+
+Types of class members depend on the way the members are referenced.
+Central here are three notions, namely:
+1. the notion of the set of base types of a type $T$,
+1. the notion of a type $T$ in some class $C$ seen from some
+   prefix type $S$,
+1. the notion of the set of member bindings of some type $T$.
+
+
+These notions are defined mutually recursively as follows.
+
+1. The set of _base types_ of a type is a set of class types,
+   given as follows.
+  - The base types of a class type $C$ with parents $T_1 , \ldots , T_n$ are
+    $C$ itself, as well as the base types of the compound type
+    `$T_1$ with … with $T_n$ { $R$ }`.
+  - The base types of an aliased type are the base types of its alias.
+  - The base types of an abstract type are the base types of its upper bound.
+  - The base types of a parameterized type 
+    `$C$[$T_1 , \ldots , T_n$]` are the base types
+    of type $C$, where every occurrence of a type parameter $a_i$ 
+    of $C$ has been replaced by the corresponding parameter type $T_i$.
+  - The base types of a singleton type `$p$.type` are the base types of
+    the type of $p$.
+  - The base types of a compound type 
+    `$T_1$ with $\ldots$ with $T_n$ { $R$ }`
+    are the _reduced union_ of the base
+    classes of all $T_i$'s. This means: 
+    Let the multi-set $\mathscr{S}$ be the multi-set-union of the
+    base types of all $T_i$'s.
+    If $\mathscr{S}$ contains several type instances of the same class, say
+    `$S^i$#$C$[$T^i_1 , \ldots , T^i_n$]` $(i \in I)$, then
+    all those instances 
+    are replaced by one of them which conforms to all
+    others. It is an error if no such instance exists. It follows that the 
+    reduced union, if it exists,
+    produces a set of class types, where different types are instances of 
+    different classes.
+  - The base types of a type selection `$S$#$T$` are
+    determined as follows. If $T$ is an alias or abstract type, the
+    previous clauses apply. Otherwise, $T$ must be a (possibly
+    parameterized) class type, which is defined in some class $B$.  Then
+    the base types of `$S$#$T$` are the base types of $T$
+    in $B$ seen from the prefix type $S$.
+  - The base types of an existential type `$T$ forSome { $Q$ }` are
+    all types `$S$ forSome { $Q$ }` where $S$ is a base type of $T$.
+
+1. The notion of a type $T$ _in class $C$ seen from some prefix type $S$_
+   makes sense only if the prefix type $S$
+   has a type instance of class $C$ as a base type, say
+   `$S'$#$C$[$T_1 , \ldots , T_n$]`. Then we define as follows.
+    - If `$S$ = $\epsilon$.type`, then $T$ in $C$ seen from $S$ is 
+      $T$ itself.
+    - Otherwise, if $S$ is an existential type `$S'$ forSome { $Q$ }`, and
+      $T$ in $C$ seen from $S'$ is $T'$, 
+      then $T$ in $C$ seen from $S$ is `$T'$ forSome {$\,Q\,$}`.
+    - Otherwise, if $T$ is the $i$'th type parameter of some class $D$, then
+        - If $S$ has a base type `$D$[$U_1 , \ldots , U_n$]`, for some type 
+          parameters `[$U_1 , \ldots , U_n$]`, then $T$ in $C$ seen from $S$ 
+          is $U_i$.
+        - Otherwise, if $C$ is defined in a class $C'$, then
+          $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$.
+        - Otherwise, if $C$ is not defined in another class, then  
+          $T$ in $C$ seen from $S$ is $T$ itself.
+    - Otherwise, if $T$ is the singleton type `$D$.this.type` for some class $D$
+      then
+        - If $D$ is a subclass of $C$ and $S$ has a type instance of class $D$ 
+          among its base types, then $T$ in $C$ seen from $S$ is $S$.
+        - Otherwise, if $C$ is defined in a class $C'$, then
+          $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$.
+        - Otherwise, if $C$ is not defined in another class, then  
+          $T$ in $C$ seen from $S$ is $T$ itself.
+    - If $T$ is some other type, then the described mapping is performed
+      to all its type components.
+
+    If $T$ is a possibly parameterized class type, where $T$'s class
+    is defined in some other class $D$, and $S$ is some prefix type,
+    then we use "$T$ seen from $S$" as a shorthand for
+    "$T$ in $D$ seen from $S$".
+
+1. The _member bindings_ of a type $T$ are
+   1. all bindings $d$ such that there exists a type instance of some class $C$ among the base types of $T$
+     and there exists a definition or declaration $d'$ in $C$
+     such that $d$ results from $d'$ by replacing every
+     type $T'$ in $d'$ by $T'$ in $C$ seen from $T$, and
+   2. all bindings of the type's [refinement](#compound-types), if it has one.
+
+   The _definition_ of a type projection `S#T` is the member
+   binding $d_T$ of the type `T` in `S`. In that case, we also say
+   that `S#T` _is defined by_ $d_T$.
+
+
+
+## Relations between types
+
+We define two relations between types.
+
+|Name             | Symbolically   |Interpretation                                   |
+|-----------------|----------------|-------------------------------------------------|
+|Equivalence      |$T \equiv U$    |$T$ and $U$ are interchangeable in all contexts. |
+|Conformance      |$T <: U$        |Type $T$ conforms to type $U$.                   |
+
+
+### Equivalence
+
+Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that
+the following holds:
+
+- If $t$ is defined by a type alias `type $t$ = $T$`, then $t$ is
+  equivalent to $T$.
+- If a path $p$ has a singleton type `$q$.type`, then
+  `$p$.type $\equiv q$.type`.
+- If $O$ is defined by an object definition, and $p$ is a path
+  consisting only of package or object selectors and ending in $O$, then
+  `$O$.this.type $\equiv p$.type`.
+- Two [compound types](#compound-types) are equivalent if the sequences
+  of their component are pairwise equivalent, and occur in the same order, and 
+  their refinements are equivalent. Two refinements are equivalent if they
+  bind the same names and the modifiers, types and bounds of every
+  declared entity are equivalent in both refinements.
+- Two [method types](#method-types) are equivalent if:
+    - neither are implicit, or they both are [^implicit];
+    - they have equivalent result types;
+    - they have the same number of parameters; and
+    - corresponding parameters have equivalent types.
+      Note that the names of parameters do not matter for method type equivalence.
+- Two [polymorphic method types](#polymorphic-method-types) are equivalent if 
+  they have the same number of type parameters, and, after renaming one set of 
+  type parameters by another, the result types as well as lower and upper bounds
+  of corresponding type parameters are equivalent.
+- Two [existential types](#existential-types) 
+  are equivalent if they have the same number of
+  quantifiers, and, after renaming one list of type quantifiers by
+  another, the quantified types as well as lower and upper bounds of
+  corresponding quantifiers are equivalent.
+- Two [type constructors](#type-constructors) are equivalent if they have the 
+  same number of type parameters, and, after renaming one list of type 
+  parameters by another, the result types as well as variances, lower and upper 
+  bounds of corresponding type parameters are equivalent.
+
+
+[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts
+[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword.
+
+### Conformance
+
+The conformance relation $(<:)$ is the smallest 
+transitive relation that satisfies the following conditions.
+
+- Conformance includes equivalence. If $T \equiv U$ then $T <: U$.
+- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`. 
+- For every type constructor $T$ (with any number of type parameters), 
+  `scala.Nothing <: $T$ <: scala.Any`.
+      
+- For every class type $T$ such that `$T$ <: scala.AnyRef` and not 
+  `$T$ <: scala.NotNull` one has `scala.Null <: $T$`.
+- A type variable or abstract type $t$ conforms to its upper bound and
+  its lower bound conforms to $t$. 
+- A class type or parameterized type conforms to any of its base-types.
+- A singleton type `$p$.type` conforms to the type of the path $p$.
+- A singleton type `$p$.type` conforms to the type `scala.Singleton`.
+- A type projection `$T$#$t$` conforms to `$U$#$t$` if $T$ conforms to $U$.
+- A parameterized type `$T$[$T_1$ , … , $T_n$]` conforms to 
+  `$T$[$U_1$ , … , $U_n$]` if
+  the following three conditions hold for $i \in \{ 1 , \ldots , n \}$:
+ 1. If the $i$'th type parameter of $T$ is declared covariant, then
+       $T_i <: U_i$.
+ 1. If the $i$'th type parameter of $T$ is declared contravariant, then
+       $U_i <: T_i$.
+ 1. If the $i$'th type parameter of $T$ is declared neither covariant
+       nor contravariant, then $U_i \equiv T_i$.
+- A compound type `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` conforms to
+  each of its component types $T_i$.
+- If $T <: U_i$ for $i \in \{ 1 , \ldots , n \}$ and for every
+  binding $d$ of a type or value $x$ in $R$ there exists a member
+  binding of $x$ in $T$ which subsumes $d$, then $T$ conforms to the
+  compound type `$U_1$ with $\ldots$ with $U_n$ {$R\,$}`.
+- The existential type `$T$ forSome {$\,Q\,$}` conforms to 
+  $U$ if its [skolemization](#existential-types)
+  conforms to $U$.
+- The type $T$ conforms to the existential type `$U$ forSome {$\,Q\,$}` 
+  if $T$ conforms to one of the [type instances](#existential-types) 
+  of `$U$ forSome {$\,Q\,$}`.
+- If
+  $T_i \equiv T'_i$ for $i \in \{ 1 , \ldots , n\}$ and $U$ conforms to $U'$ 
+  then the method type $(p_1:T_1 , \ldots , p_n:T_n) U$ conforms to
+  $(p'_1:T'_1 , \ldots , p'_n:T'_n) U'$.
+- The polymorphic type
+  $[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T$ conforms to the 
+  polymorphic type
+  $[a_1 >: L'_1 <: U'_1 , \ldots , a_n >: L'_n <: U'_n] T'$ if, assuming
+  $L'_1 <: a_1 <: U'_1 , \ldots , L'_n <: a_n <: U'_n$ 
+  one has $T <: T'$ and $L_i <: L'_i$ and $U'_i <: U_i$
+  for $i \in \{ 1 , \ldots , n \}$.
+- Type constructors $T$ and $T'$ follow a similar discipline. We characterize 
+  $T$ and $T'$ by their type parameter clauses
+  $[a_1 , \ldots , a_n]$ and
+  $[a'_1 , \ldots , a'_n ]$, where an $a_i$ or $a'_i$ may include a variance 
+  annotation, a higher-order type parameter clause, and bounds. Then, $T$ 
+  conforms to $T'$ if any list $[t_1 , \ldots , t_n]$ -- with declared 
+  variances, bounds and higher-order type parameter clauses -- of valid type 
+  arguments for $T'$ is also a valid list of type arguments for $T$ and 
+  $T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]$. Note that this entails 
+  that:
+    - The bounds on $a_i$ must be weaker than the corresponding bounds declared 
+      for $a'_i$. 
+    - The variance of $a_i$ must match the variance of $a'_i$, where covariance 
+      matches covariance, contravariance matches contravariance and any variance
+      matches invariance.
+    - Recursively, these restrictions apply to the corresponding higher-order
+      type parameter clauses of $a_i$ and $a'_i$.
+
+
+A declaration or definition in some compound type of class type $C$
+_subsumes_ another declaration of the same name in some compound type or class
+type $C'$, if one of the following holds.
+
+- A value declaration or definition that defines a name $x$ with type $T$ 
+  subsumes a value or method declaration that defines $x$ with type $T'$, provided 
+  $T <: T'$.
+- A method declaration or definition that defines a name $x$ with type $T$ 
+  subsumes a method declaration that defines $x$ with type $T'$, provided 
+  $T <: T'$.
+- A type alias
+  `type $t$[$T_1$ , … , $T_n$] = $T$` subsumes a type alias 
+  `type $t$[$T_1$ , … , $T_n$] = $T'$` if $T \equiv T'$. 
+- A type declaration `type $t$[$T_1$ , … , $T_n$] >: $L$ <: $U$` subsumes
+  a type declaration `type $t$[$T_1$ , … , $T_n$] >: $L'$ <: $U'$` if 
+  $L' <: L$ and $U <: U'$.
+- A type or class definition that binds a type name $t$ subsumes an abstract
+  type declaration `type t[$T_1$ , … , $T_n$] >: L <: U` if
+  $L <: t <: U$.
+
+
+The $(<:)$ relation forms pre-order between types,
+i.e. it is transitive and reflexive. _least upper bounds_ and
+_greatest lower bounds_ of a set of types
+are understood to be relative to that order.
+
+###### Note
+The least upper bound or greatest lower bound
+of a set of types does not always exist. For instance, consider
+the class definitions
+
+```scala
+class A[+T] {}
+class B extends A[B]
+class C extends A[C]
+```
+
+Then the types `A[Any], A[A[Any]], A[A[A[Any]]], ...` form
+a descending sequence of upper bounds for `B` and `C`. The
+least upper bound would be the infinite limit of that sequence, which
+does not exist as a Scala type. Since cases like this are in general
+impossible to detect, a Scala compiler is free to reject a term
+which has a type specified as a least upper or greatest lower bound,
+and that bound would be more complex than some compiler-set
+limit [^4].
+
+The least upper bound or greatest lower bound might also not be
+unique. For instance `A with B` and `B with A` are both
+greatest lower of `A` and `B`. If there are several
+least upper bounds or greatest lower bounds, the Scala compiler is
+free to pick any one of them.
+
+
+[^4]: The current Scala compiler limits the nesting level
+      of parameterization in such bounds to be at most two deeper than the
+      maximum nesting level of the operand types
+
+
+
+### Weak Conformance
+
+In some situations Scala uses a more general conformance relation. A 
+type $S$ _weakly conforms_ 
+to a type $T$, written $S <:_w
+T$, if $S <: T$ or both $S$ and $T$ are primitive number types
+and $S$ precedes $T$ in the following ordering.
+
+```scala
+Byte  $<:_w$ Short 
+Short $<:_w$ Int
+Char  $<:_w$ Int
+Int   $<:_w$ Long
+Long  $<:_w$ Float
+Float $<:_w$ Double
+```
+
+A _weak least upper bound_ is a least upper bound with respect to
+weak conformance.
+
+
+## Volatile Types
+
+Type volatility approximates the possibility that a type parameter or abstract 
+type instance
+of a type does not have any non-null values.  A value member of a volatile type 
+cannot appear in a [path](#paths).
+  
+A type is _volatile_ if it falls into one of four categories:
+
+A compound type `$T_1$ with … with $T_n$ {$R\,$}`
+is volatile if one of the following two conditions hold.
+
+1. One of $T_2 , \ldots , T_n$ is a type parameter or abstract type, or
+1. $T_1$ is an abstract type and and either the refinement $R$
+   or a type $T_j$ for $j > 1$ contributes an abstract member
+   to the compound type, or
+1. one of $T_1 , \ldots , T_n$ is a singleton type.
+
+
+Here, a type $S$ _contributes an abstract member_ to a type $T$ if
+$S$ contains an abstract member that is also a member of $T$.
+A refinement $R$ contributes an abstract member to a type $T$ if $R$
+contains an abstract declaration which is also a member of $T$.
+
+A type designator is volatile if it is an alias of a volatile type, or
+if it designates a type parameter or abstract type that has a volatile type as 
+its upper bound.
+
+A singleton type `$p$.type` is volatile, if the underlying
+type of path $p$ is volatile.
+
+An existential type `$T$ forSome {$\,Q\,$}` is volatile if
+$T$ is volatile.
+
+
+## Type Erasure
+
+A type is called _generic_ if it contains type arguments or type variables.
+_Type erasure_ is a mapping from (possibly generic) types to
+non-generic types. We write $|T|$ for the erasure of type $T$.
+The erasure mapping is defined as follows.
+
+- The erasure of an alias type is the erasure of its right-hand side.
+- The erasure of an abstract type is the erasure of its upper bound.
+- The erasure of the parameterized type `scala.Array$[T_1]$` is
+ `scala.Array$[|T_1|]$`.
+- The erasure of every other parameterized type $T[T_1 , \ldots , T_n]$ is $|T|$.
+- The erasure of a singleton type `$p$.type` is the 
+  erasure of the type of $p$.
+- The erasure of a type projection `$T$#$x$` is `|$T$|#$x$`.
+- The erasure of a compound type 
+  `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` is the erasure of the intersection 
+  dominator of $T_1 , \ldots , T_n$.
+- The erasure of an existential type `$T$ forSome {$\,Q\,$}` is $|T|$.
+
+The _intersection dominator_ of a list of types $T_1 , \ldots , T_n$ is computed
+as follows.
+Let $T_{i_1} , \ldots , T_{i_m}$ be the subsequence of types $T_i$  
+which are not supertypes of some other type $T_j$. 
+If this subsequence contains a type designator $T_c$ that refers to a class
+which is not a trait, 
+the intersection dominator is $T_c$. Otherwise, the intersection
+dominator is the first element of the subsequence, $T_{i_1}$.
+
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md
new file mode 100644
index 0000000..ab1f98e
--- /dev/null
+++ b/spec/04-basic-declarations-and-definitions.md
@@ -0,0 +1,945 @@
+---
+title: Basic Declarations and Definitions
+layout: default
+chapter: 4
+---
+
+# Basic Declarations and Definitions
+
+
+```ebnf
+Dcl         ::=  ‘val’ ValDcl
+              |  ‘var’ VarDcl
+              |  ‘def’ FunDcl
+              |  ‘type’ {nl} TypeDcl
+PatVarDef   ::=  ‘val’ PatDef
+              |  ‘var’ VarDef
+Def         ::=  PatVarDef
+              |  ‘def’ FunDef
+              |  ‘type’ {nl} TypeDef
+              |  TmplDef
+```
+
+A _declaration_ introduces names and assigns them types. It can
+form part of a [class definition](05-classes-and-objects.html#templates) or of a
+refinement in a [compound type](03-types.html#compound-types).
+
+A _definition_ introduces names that denote terms or types. It can
+form part of an object or class definition or it can be local to a
+block.  Both declarations and definitions produce _bindings_ that
+associate type names with type definitions or bounds, and that
+associate term names with types.
+
+The scope of a name introduced by a declaration or definition is the
+whole statement sequence containing the binding.  However, there is a
+restriction on forward references in blocks: In a statement sequence
+$s_1 \ldots s_n$ making up a block, if a simple name in $s_i$ refers
+to an entity defined by $s_j$ where $j \geq i$, then for all $s_k$
+between and including $s_i$ and $s_j$,
+
+- $s_k$ cannot be a variable definition.
+- If $s_k$ is a value definition, it must be lazy.
+
+
+<!--
+Every basic definition may introduce several defined names, separated
+by commas. These are expanded according to the following scheme:
+\bda{lcl}
+\VAL;x, y: T = e && \VAL; x: T = e \\
+                 && \VAL; y: T = x \\[0.5em]
+
+\LET;x, y: T = e && \LET; x: T = e \\
+                 && \VAL; y: T = x \\[0.5em]
+
+\DEF;x, y (ps): T = e &\tab\mbox{expands to}\tab& \DEF; x(ps): T = e \\
+                      && \DEF; y(ps): T = x(ps)\\[0.5em]
+
+\VAR;x, y: T := e && \VAR;x: T := e\\
+                  && \VAR;y: T := x\\[0.5em]
+
+\TYPE;t,u = T && \TYPE; t = T\\
+              && \TYPE; u = t\\[0.5em]
+\eda
+
+All definitions have a ``repeated form`` where the initial
+definition keyword is followed by several constituent definitions
+which are separated by commas.  A repeated definition is
+always interpreted as a sequence formed from the
+constituent definitions. E.g. the function definition
+`def f(x) = x, g(y) = y` expands to
+`def f(x) = x; def g(y) = y` and
+the type definition
+`type T, U <: B` expands to
+`type T; type U <: B`.
+}
+\comment{
+If an element in such a sequence introduces only the defined name,
+possibly with some type or value parameters, but leaves out any
+additional parts in the definition, then those parts are implicitly
+copied from the next subsequent sequence element which consists of
+more than just a defined name and parameters. Examples:
+
+
+- []
+The variable declaration `var x, y: Int`
+expands to `var x: Int; var y: Int`.
+- []
+The value definition `val x, y: Int = 1`
+expands to `val x: Int = 1; val y: Int = 1`.
+- []
+The class definition `case class X(), Y(n: Int) extends Z` expands to
+`case class X extends Z; case class Y(n: Int) extends Z`.
+- The object definition `case object Red, Green, Blue extends Color`~
+expands to
+```
+case object Red extends Color
+case object Green extends Color
+case object Blue extends Color .
+```
+-->
+
+
+
+## Value Declarations and Definitions
+
+```ebnf
+Dcl          ::=  ‘val’ ValDcl
+ValDcl       ::=  ids ‘:’ Type
+PatVarDef    ::=  ‘val’ PatDef 
+PatDef       ::=  Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr
+ids          ::=  id {‘,’ id}
+```
+
+A value declaration `val $x$: $T$` introduces $x$ as a name of a value of
+type $T$.  
+
+A value definition `val $x$: $T$ = $e$` defines $x$ as a
+name of the value that results from the evaluation of $e$. 
+If the value definition is not recursive, the type
+$T$ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of
+expression $e$ is assumed.  If a type $T$ is given, then $e$ is expected to 
+conform to it.
+
+Evaluation of the value definition implies evaluation of its
+right-hand side $e$, unless it has the modifier `lazy`.  The
+effect of the value definition is to bind $x$ to the value of $e$
+converted to type $T$. A `lazy` value definition evaluates
+its right hand side $e$ the first time the value is accessed.
+
+A _constant value definition_ is of the form
+
+```scala
+final val x = e
+```
+
+where `e` is a [constant expression](06-expressions.html#constant-expressions).
+The `final` modifier must be
+present and no type annotation may be given. References to the
+constant value `x` are themselves treated as constant expressions; in the
+generated code they are replaced by the definition's right-hand side `e`.
+
+Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns)
+as left-hand side.  If $p$ is some pattern other
+than a simple name or a name followed by a colon and a type, then the
+value definition `val $p$ = $e$` is expanded as follows:
+
+1. If the pattern $p$ has bound variables $x_1 , \ldots , x_n$, where $n > 1$:
+
+```scala
+val $\$ x$ = $e$ match {case $p$ => ($x_1 , \ldots , x_n$)}
+val $x_1$ = $\$ x$._1
+$\ldots$
+val $x_n$ = $\$ x$._n  .
+```
+
+Here, $\$ x$ is a fresh name.  
+
+2. If $p$ has a unique bound variable $x$:
+
+```scala
+val $x$ = $e$ match { case $p$ => $x$ }
+```
+
+3. If $p$ has no bound variables:
+
+```scala
+$e$ match { case $p$ => ()}
+```
+
+###### Example
+
+The following are examples of value definitions
+
+```scala
+val pi = 3.1415
+val pi: Double = 3.1415   // equivalent to first definition
+val Some(x) = f()         // a pattern definition
+val x :: xs = mylist      // an infix pattern definition
+```
+
+The last two definitions have the following expansions.
+
+```scala
+val x = f() match { case Some(x) => x }
+
+val x$\$$ = mylist match { case x :: xs => (x, xs) }
+val x = x$\$$._1
+val xs = x$\$$._2
+```
+
+
+The name of any declared or defined value may not end in `_=`.
+
+A value declaration `val $x_1 , \ldots , x_n$: $T$` is a shorthand for the 
+sequence of value declarations `val $x_1$: $T$; ...; val $x_n$: $T$`.
+A value definition `val $p_1 , \ldots , p_n$ = $e$` is a shorthand for the 
+sequence of value definitions `val $p_1$ = $e$; ...; val $p_n$ = $e$`.
+A value definition `val $p_1 , \ldots , p_n: T$ = $e$` is a shorthand for the 
+sequence of value definitions `val $p_1: T$ = $e$; ...; val $p_n: T$ = $e$`.
+
+
+## Variable Declarations and Definitions
+
+```ebnf
+Dcl            ::=  ‘var’ VarDcl
+PatVarDef      ::=  ‘var’ VarDef
+VarDcl         ::=  ids ‘:’ Type
+VarDef         ::=  PatDef
+                 |  ids ‘:’ Type ‘=’ ‘_’
+```
+
+A variable declaration `var $x$: $T$` is equivalent to the declarations
+of both a _getter function_ $x$ *and* a _setter function_ `$x$_=`:
+
+```scala
+def $x$: $T$ 
+def $x$_= ($y$: $T$): Unit
+```
+
+An implementation of a class may _define_ a declared variable
+using a variable definition, or by defining the corresponding setter and getter methods.
+
+A variable definition `var $x$: $T$ = $e$` introduces a
+mutable variable with type $T$ and initial value as given by the
+expression $e$. The type $T$ can be omitted, in which case the type of
+$e$ is assumed. If $T$ is given, then $e$ is expected to 
+[conform to it](06-expressions.html#expression-typing).
+
+Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns)
+as left-hand side.  A variable definition
+ `var $p$ = $e$` where $p$ is a pattern other
+than a simple name or a name followed by a colon and a type is expanded in the same way 
+as a [value definition](#value-declarations-and-definitions)
+`val $p$ = $e$`, except that
+the free names in $p$ are introduced as mutable variables, not values.
+
+The name of any declared or defined variable may not end in `_=`.
+
+A variable definition `var $x$: $T$ = _` can appear only as a member of a template.
+It introduces a mutable field with type $T$ and a default initial value.
+The default value depends on the type $T$ as follows:
+
+| default  | type $T$                           |
+|----------|------------------------------------|
+|`0`       | `Int` or one of its subrange types |
+|`0L`      | `Long`                             |
+|`0.0f`    | `Float`                            |
+|`0.0d`    | `Double`                           |
+|`false`   | `Boolean`                          |
+|`()`      | `Unit`                             |
+|`null`    | all other types                    |
+
+
+When they occur as members of a template, both forms of variable
+definition also introduce a getter function $x$ which returns the
+value currently assigned to the variable, as well as a setter function
+`$x$_=` which changes the value currently assigned to the variable.
+The functions have the same signatures as for a variable declaration.
+The template then has these getter and setter functions as
+members, whereas the original variable cannot be accessed directly as
+a template member.
+
+###### Example
+
+The following example shows how _properties_ can be
+simulated in Scala. It defines a class `TimeOfDayVar` of time
+values with updatable integer fields representing hours, minutes, and
+seconds. Its implementation contains tests that allow only legal
+values to be assigned to these fields. The user code, on the other
+hand, accesses these fields just like normal variables.
+
+```scala
+class TimeOfDayVar {
+  private var h: Int = 0
+  private var m: Int = 0
+  private var s: Int = 0
+
+  def hours              =  h
+  def hours_= (h: Int)   =  if (0 <= h && h < 24) this.h = h
+                            else throw new DateError()
+
+  def minutes            =  m
+  def minutes_= (m: Int) =  if (0 <= m && m < 60) this.m = m
+                            else throw new DateError()
+
+  def seconds            =  s
+  def seconds_= (s: Int) =  if (0 <= s && s < 60) this.s = s
+                            else throw new DateError()
+}
+val d = new TimeOfDayVar
+d.hours = 8; d.minutes = 30; d.seconds = 0
+d.hours = 25                  // throws a DateError exception
+```
+
+
+A variable declaration `var $x_1 , \ldots , x_n$: $T$` is a shorthand for the 
+sequence of variable declarations `var $x_1$: $T$; ...; var $x_n$: $T$`.
+A variable definition `var $x_1 , \ldots , x_n$ = $e$` is a shorthand for the 
+sequence of variable definitions `var $x_1$ = $e$; ...; var $x_n$ = $e$`.
+A variable definition `var $x_1 , \ldots , x_n: T$ = $e$` is a shorthand for 
+the sequence of variable definitions 
+`var $x_1: T$ = $e$; ...; var $x_n: T$ = $e$`.
+
+## Type Declarations and Type Aliases
+
+<!-- TODO: Higher-kinded tdecls should have a separate section -->
+
+```ebnf
+Dcl        ::=  ‘type’ {nl} TypeDcl
+TypeDcl    ::=  id [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
+Def        ::=  type {nl} TypeDef
+TypeDef    ::=  id [TypeParamClause] ‘=’ Type
+```
+
+A _type declaration_ `type $t$[$\mathit{tps}\,$] >: $L$ <: $U$` declares
+$t$ to be an abstract type with lower bound type $L$ and upper bound
+type $U$. If the type parameter clause `[$\mathit{tps}\,$]` is omitted, $t$ abstracts over a first-order type, otherwise $t$ stands for a type constructor that accepts type arguments as described by the type parameter clause.
+
+If a type declaration appears as a member declaration of a
+type, implementations of the type may implement $t$ with any type $T$
+for which $L <: T <: U$. It is a compile-time error if
+$L$ does not conform to $U$.  Either or both bounds may be omitted.
+If the lower bound $L$ is absent, the bottom type
+`scala.Nothing` is assumed.  If the upper bound $U$ is absent,
+the top type `scala.Any` is assumed.
+
+A type constructor declaration imposes additional restrictions on the
+concrete types for which $t$ may stand. Besides the bounds $L$ and
+$U$, the type parameter clause may impose higher-order bounds and
+variances, as governed by the [conformance of type constructors](03-types.html#conformance).
+
+The scope of a type parameter extends over the bounds `>: $L$ <: $U$` and the type parameter clause $\mathit{tps}$ itself. A
+higher-order type parameter clause (of an abstract type constructor
+$tc$) has the same kind of scope, restricted to the declaration of the
+type parameter $tc$.
+
+To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of $m$ is limited to the declaration of $m$. In all of them, $t$ is an abstract type member that abstracts over two type constructors: $m$ stands for a type constructor that takes one type parameter and that must be a subtype of $Bound$, $t$'s second type construc [...]
+
+A _type alias_ `type $t$ = $T$` defines $t$ to be an alias
+name for the type $T$.  The left hand side of a type alias may
+have a type parameter clause, e.g. `type $t$[$\mathit{tps}\,$] = $T$`.  The scope
+of a type parameter extends over the right hand side $T$ and the
+type parameter clause $\mathit{tps}$ itself.  
+
+The scope rules for [definitions](#basic-declarations-and-definitions) 
+and [type parameters](#function-declarations-and-definitions)
+make it possible that a type name appears in its
+own bound or in its right-hand side.  However, it is a static error if
+a type alias refers recursively to the defined type constructor itself.  
+That is, the type $T$ in a type alias `type $t$[$\mathit{tps}\,$] = $T$` may not 
+refer directly or indirectly to the name $t$.  It is also an error if
+an abstract type is directly or indirectly its own upper or lower bound.
+
+###### Example
+
+The following are legal type declarations and definitions:
+
+```scala
+type IntList = List[Integer]
+type T <: Comparable[T]
+type Two[A] = Tuple2[A, A]
+type MyCollection[+X] <: Iterable[X]
+```
+
+The following are illegal:
+
+```scala
+type Abs = Comparable[Abs]      // recursive type alias
+
+type S <: T                     // S, T are bounded by themselves.
+type T <: S
+
+type T >: Comparable[T.That]    // Cannot select from T.
+                                // T is a type, not a value
+type MyCollection <: Iterable   // Type constructor members must explicitly
+                                // state their type parameters.
+```
+
+If a type alias `type $t$[$\mathit{tps}\,$] = $S$` refers to a class type
+$S$, the name $t$ can also be used as a constructor for
+objects of type $S$.
+
+###### Example
+
+The `Predef` object contains a definition which establishes `Pair`
+as an alias of the parameterized class `Tuple2`:
+
+```scala
+type Pair[+A, +B] = Tuple2[A, B]
+object Pair {
+  def apply[A, B](x: A, y: B) = Tuple2(x, y)
+  def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
+}
+```
+
+As a consequence, for any two types $S$ and $T$, the type
+`Pair[$S$, $T\,$]` is equivalent to the type `Tuple2[$S$, $T\,$]`.
+`Pair` can also be used as a constructor instead of `Tuple2`, as in:
+
+```scala
+val x: Pair[Int, String] = new Pair(1, "abc")
+```
+
+
+## Type Parameters
+
+```ebnf
+TypeParamClause  ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
+VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam
+TypeParam        ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type]
+```
+
+Type parameters appear in type definitions, class definitions, and
+function definitions.  In this section we consider only type parameter
+definitions with lower bounds `>: $L$` and upper bounds
+`<: $U$` whereas a discussion of context bounds
+`: $U$` and view bounds `<% $U$` 
+is deferred to [here](07-implicit-parameters-and-views.html#context-bounds-and-view-bounds).
+
+The most general form of a first-order type parameter is
+`$@a_1 \ldots @a_n$ $\pm$ $t$ >: $L$ <: $U$`.
+Here, $L$, and $U$ are lower and upper bounds that
+constrain possible type arguments for the parameter.  It is a
+compile-time error if $L$ does not conform to $U$. $\pm$ is a _variance_, i.e. an optional prefix of either `+`, or
+`-`. One or more annotations may precede the type parameter.
+
+<!--
+The upper bound $U$ in a type parameter clauses may not be a final
+class. The lower bound may not denote a value type.
+
+TODO: Why
+-->
+
+<!--
+TODO: this is a pretty awkward description of scoping and distinctness of binders
+-->
+
+The names of all type parameters must be pairwise different in their enclosing type parameter clause.  The scope of a type parameter includes in each case the whole type parameter clause. Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause.  However, a type parameter may not be bounded directly or indirectly by itself.
+
+A type constructor parameter adds a nested type parameter clause to the type parameter. The most general form of a type constructor parameter is `$@a_1\ldots at a_n$ $\pm$ $t[\mathit{tps}\,]$ >: $L$ <: $U$`.  
+
+The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. Higher-order type parameters (the type parameters of a type parameter $t$) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of $t$. Therefore, their names must only be pairwise different from the names of other visible parameters. Since the names of higher-order t [...]
+
+###### Example
+Here are some well-formed type parameter clauses:
+
+```scala
+[S, T]
+[@specialized T, U]
+[Ex <: Throwable]
+[A <: Comparable[B], B <: A]
+[A, B >: A, C >: A <: B]
+[M[X], N[X]]
+[M[_], N[_]] // equivalent to previous clause
+[M[X <: Bound[X]], Bound[_]]
+[M[+X] <: Iterable[X]]
+```
+
+The following type parameter clauses are illegal:
+
+```scala
+[A >: A]                  // illegal, `A' has itself as bound
+[A <: B, B <: C, C <: A]  // illegal, `A' has itself as bound
+[A, B, C >: A <: B]       // illegal lower bound `A' of `C' does
+                          // not conform to upper bound `B'.
+```
+
+
+## Variance Annotations
+
+Variance annotations indicate how instances of parameterized types
+vary with respect to [subtyping](03-types.html#conformance).  A
+‘+’ variance indicates a covariant dependency, a
+‘-’ variance indicates a contravariant dependency, and a
+missing variance indication indicates an invariant dependency.
+
+A variance annotation constrains the way the annotated type variable
+may appear in the type or class which binds the type parameter.  In a
+type definition `type $T$[$\mathit{tps}\,$] = $S$`, or a type 
+declaration `type $T$[$\mathit{tps}\,$] >: $L$ <: $U$` type parameters labeled
+‘+’ must only appear in covariant position whereas
+type parameters labeled ‘-’ must only appear in contravariant
+position. Analogously, for a class definition
+`class $C$[$\mathit{tps}\,$]($\mathit{ps}\,$) extends $T$ { $x$: $S$ => ...}`, 
+type parameters labeled
+‘+’ must only appear in covariant position in the
+self type $S$ and the template $T$, whereas type
+parameters labeled ‘-’ must only appear in contravariant
+position. 
+
+The variance position of a type parameter in a type or template is
+defined as follows.  Let the opposite of covariance be contravariance,
+and the opposite of invariance be itself.  The top-level of the type
+or template is always in covariant position. The variance position
+changes at the following constructs.
+
+- The variance position of a method parameter is the opposite of the 
+  variance position of the enclosing parameter clause.
+- The variance position of a type parameter is the opposite of the
+  variance position of the enclosing type parameter clause.
+- The variance position of the lower bound of a type declaration or type parameter 
+  is the opposite of the variance position of the type declaration or parameter.  
+- The type of a mutable variable is always in invariant position.
+- The right-hand side of a type alias is always in invariant position.
+- The prefix $S$ of a type selection `$S$#$T$` is always in invariant position.
+- For a type argument $T$ of a type `$S$[$\ldots T \ldots$ ]`: If the
+  corresponding type parameter is invariant, then $T$ is in
+  invariant position.  If the corresponding type parameter is
+  contravariant, the variance position of $T$ is the opposite of
+  the variance position of the enclosing type `$S$[$\ldots T \ldots$ ]`.
+
+<!-- TODO: handle type aliases --> 
+
+References to the type parameters in 
+[object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not
+checked for their variance position. In these members the type parameter may 
+appear anywhere without restricting its legal variance annotations.
+
+###### Example
+The following variance annotation is legal.
+
+```scala
+abstract class P[+A, +B] {
+  def fst: A; def snd: B
+}
+```
+
+With this variance annotation, type instances
+of $P$ subtype covariantly with respect to their arguments.
+For instance,
+
+```scala
+P[IOException, String] <: P[Throwable, AnyRef]
+```
+
+If the members of $P$ are mutable variables,
+the same variance annotation becomes illegal.
+
+```scala
+abstract class Q[+A, +B](x: A, y: B) {
+  var fst: A = x           // **** error: illegal variance:
+  var snd: B = y           // `A', `B' occur in invariant position.
+}
+```
+
+If the mutable variables are object-private, the class definition
+becomes legal again:
+
+```scala
+abstract class R[+A, +B](x: A, y: B) {
+  private[this] var fst: A = x        // OK
+  private[this] var snd: B = y        // OK
+}
+```
+
+###### Example
+
+The following variance annotation is illegal, since $a$ appears
+in contravariant position in the parameter of `append`:
+
+```scala
+abstract class Sequence[+A] {
+  def append(x: Sequence[A]): Sequence[A]
+                  // **** error: illegal variance:
+                  // `A' occurs in contravariant position.
+}
+```
+
+The problem can be avoided by generalizing the type of `append`
+by means of a lower bound:
+
+```scala
+abstract class Sequence[+A] {
+  def append[B >: A](x: Sequence[B]): Sequence[B]
+}
+```
+
+### Example
+
+```scala
+abstract class OutputChannel[-A] {
+  def write(x: A): Unit
+}
+```
+
+With that annotation, we have that
+`OutputChannel[AnyRef]` conforms to `OutputChannel[String]`.
+That is, a
+channel on which one can write any object can substitute for a channel
+on which one can write only strings.
+
+
+## Function Declarations and Definitions
+
+```ebnf
+Dcl                ::=  ‘def’ FunDcl
+FunDcl             ::=  FunSig ‘:’ Type
+Def                ::=  ‘def’ FunDef
+FunDef             ::=  FunSig [‘:’ Type] ‘=’ Expr 
+FunSig             ::=  id [FunTypeParamClause] ParamClauses
+FunTypeParamClause ::=  ‘[’ TypeParam {‘,’ TypeParam} ‘]’ 
+ParamClauses       ::=  {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
+ParamClause        ::=  [nl] ‘(’ [Params] ‘)’} 
+Params             ::=  Param {‘,’ Param}
+Param              ::=  {Annotation} id [‘:’ ParamType] [‘=’ Expr]
+ParamType          ::=  Type 
+                     |  ‘=>’ Type 
+                     |  Type ‘*’
+```
+
+A function declaration has the form `def $f\,\mathit{psig}$: $T$`, where
+$f$ is the function's name, $\mathit{psig}$ is its parameter
+signature and $T$ is its result type. A function definition
+`def $f\,\mathit{psig}$: $T$ = $e$` also includes a _function body_ $e$,
+i.e. an expression which defines the function's result.  A parameter
+signature consists of an optional type parameter clause `[$\mathit{tps}\,$]`,
+followed by zero or more value parameter clauses
+`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$)`.  Such a declaration or definition
+introduces a value with a (possibly polymorphic) method type whose
+parameter types and result type are as given.
+
+The type of the function body is expected to [conform](06-expressions.html#expression-typing)
+to the function's declared
+result type, if one is given. If the function definition is not
+recursive, the result type may be omitted, in which case it is
+determined from the packed type of the function body.
+
+A type parameter clause $\mathit{tps}$ consists of one or more 
+[type declarations](#type-declarations-and-type-aliases), which introduce type 
+parameters, possibly with bounds.  The scope of a type parameter includes
+the whole signature, including any of the type parameter bounds as
+well as the function body, if it is present.  
+
+A value parameter clause $\mathit{ps}$ consists of zero or more formal
+parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value
+parameters and associate them with their types. Each value parameter
+declaration may optionally define a default argument. The default argument
+expression $e$ is type-checked with an expected type $T'$ obtained
+by replacing all occurences of the function's type parameters in $T$ by
+the undefined type.
+
+For every parameter $p_{i,j}$ with a default argument a method named
+`$f\$$default$\$$n` is generated which computes the default argument
+expression. Here, $n$ denotes the parameter's position in the method
+declaration. These methods are parametrized by the type parameter clause
+`[$\mathit{tps}\,$]` and all value parameter clauses
+`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceeding $p_{i,j}$.
+The `$f\$$default$\$$n` methods are inaccessible for
+user programs.
+
+The scope of a formal value parameter name $x$ comprises all subsequent 
+parameter clauses, as well as the method return type and the function body, if
+they are given. Both type parameter names and value parameter names must
+be pairwise distinct.
+
+###### Example
+In the method
+
+```scala
+def compare[T](a: T = 0)(b: T = a) = (a == b)
+```
+
+the default expression `0` is type-checked with an undefined expected
+type. When applying `compare()`, the default value `0` is inserted
+and `T` is instantiated to `Int`. The methods computing the default
+arguments have the form:
+
+```scala
+def compare$\$$default$\$$1[T]: Int = 0
+def compare$\$$default$\$$2[T](a: T): T = a
+```
+
+
+### By-Name Parameters
+
+
+```ebnf
+ParamType          ::=  ‘=>’ Type
+```
+
+The type of a value parameter may be prefixed by `=>`, e.g.
+`$x$: => $T$`. The type of such a parameter is then the
+parameterless method type `=> $T$`. This indicates that the
+corresponding argument is not evaluated at the point of function
+application, but instead is evaluated at each use within the
+function. That is, the argument is evaluated using _call-by-name_.
+
+The by-name modifier is disallowed for parameters of classes that
+carry a `val` or `var` prefix, including parameters of case
+classes for which a `val` prefix is implicitly generated. The
+by-name modifier is also disallowed for 
+[implicit parameters](07-implicit-parameters-and-views.html#implicit-parameters).
+
+###### Example
+The declaration
+
+```scala
+def whileLoop (cond: => Boolean) (stat: => Unit): Unit
+```
+
+indicates that both parameters of `whileLoop` are evaluated using
+call-by-name.
+
+
+### Repeated Parameters
+
+```ebnf
+ParamType          ::=  Type ‘*’
+```
+
+The last value parameter of a parameter section may be suffixed by
+“*”, e.g. `(..., $x$:$T$*)`.  The type of such a
+_repeated_ parameter inside the method is then the sequence type
+`scala.Seq[$T$]`.  Methods with repeated parameters
+`$T$*` take a variable number of arguments of type $T$.
+That is, if a method $m$ with type 
+`($p_1:T_1 , \ldots , p_n:T_n, p_s:S$*)$U$` is applied to arguments 
+$(e_1 , \ldots , e_k)$ where $k \geq n$, then $m$ is taken in that application 
+to have type $(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}S)U$, with 
+$k - n$ occurrences of type
+$S$ where any parameter names beyond $p_s$ are fresh. The only exception to 
+this rule is if the last argument is
+marked to be a _sequence argument_ via a `_*` type
+annotation. If $m$ above is applied to arguments
+`($e_1 , \ldots , e_n, e'$: _*)`, then the type of $m$ in
+that application is taken to be 
+`($p_1:T_1, \ldots , p_n:T_n,p_{s}:$scala.Seq[$S$])`.
+
+It is not allowed to define any default arguments in a parameter section
+with a repeated parameter.
+
+###### Example
+The following method definition computes the sum of the squares of a
+variable number of integer arguments.
+
+```scala
+def sum(args: Int*) = {
+  var result = 0
+  for (arg <- args) result += arg * arg
+  result
+}
+```
+
+The following applications of this method yield `0`, `1`,
+`6`, in that order.
+
+```scala
+sum()
+sum(1)
+sum(1, 2, 3)
+```
+
+Furthermore, assume the definition:
+
+```scala
+val xs = List(1, 2, 3)
+```
+
+The following application of method `sum` is ill-formed:
+
+```scala
+sum(xs)       // ***** error: expected: Int, found: List[Int]
+```
+
+By contrast, the following application is well formed and yields again
+the result `6`:
+
+```scala
+sum(xs: _*)
+```
+
+
+### Procedures
+
+```ebnf
+FunDcl   ::=  FunSig
+FunDef   ::=  FunSig [nl] ‘{’ Block ‘}’
+```
+
+Special syntax exists for procedures, i.e. functions that return the
+`Unit` value `()`. 
+A procedure declaration is a function declaration where the result type
+is omitted. The result type is then implicitly completed to the
+`Unit` type. E.g., `def $f$($\mathit{ps}$)` is equivalent to
+`def $f$($\mathit{ps}$): Unit`.
+
+A procedure definition is a function definition where the result type
+and the equals sign are omitted; its defining expression must be a block.
+E.g., `def $f$($\mathit{ps}$) {$\mathit{stats}$}` is equivalent to
+`def $f$($\mathit{ps}$): Unit = {$\mathit{stats}$}`.
+
+###### Example
+Here is a declaration and a definition of a procedure named `write`:
+
+```scala
+trait Writer {
+  def write(str: String)
+}
+object Terminal extends Writer {
+  def write(str: String) { System.out.println(str) }
+}
+```
+
+The code above is implicitly completed to the following code:
+
+```scala
+trait Writer {
+  def write(str: String): Unit
+}
+object Terminal extends Writer {
+  def write(str: String): Unit = { System.out.println(str) }
+}
+```
+
+
+### Method Return Type Inference
+
+A class member definition $m$ that overrides some other function $m'$
+in a base class of $C$ may leave out the return type, even if it is
+recursive. In this case, the return type $R'$ of the overridden
+function $m'$, seen as a member of $C$, is taken as the return type of
+$m$ for each recursive invocation of $m$. That way, a type $R$ for the
+right-hand side of $m$ can be determined, which is then taken as the
+return type of $m$. Note that $R$ may be different from $R'$, as long
+as $R$ conforms to $R'$.
+
+###### Example
+Assume the following definitions:
+
+```scala
+trait I {
+  def factorial(x: Int): Int
+}
+class C extends I {
+  def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1)
+}
+```
+
+Here, it is OK to leave out the result type of `factorial`
+in `C`, even though the method is recursive.
+
+
+
+<!-- ## Overloaded Definitions
+\label{sec:overloaded-defs}
+\todo{change}
+
+An overloaded definition is a set of $n > 1$ value or function
+definitions in the same statement sequence that define the same name,
+binding it to types `$T_1 \commadots T_n$`, respectively.
+The individual definitions are called _alternatives_.  Overloaded
+definitions may only appear in the statement sequence of a template.
+Alternatives always need to specify the type of the defined entity
+completely.  It is an error if the types of two alternatives $T_i$ and
+$T_j$ have the same erasure (\sref{sec:erasure}).
+
+\todo{Say something about bridge methods.}
+%This must be a well-formed
+%overloaded type -->
+
+## Import Clauses
+
+```ebnf
+Import          ::= ‘import’ ImportExpr {‘,’ ImportExpr}
+ImportExpr      ::= StableId ‘.’ (id | ‘_’ | ImportSelectors)
+ImportSelectors ::= ‘{’ {ImportSelector ‘,’} 
+                    (ImportSelector | ‘_’) ‘}’
+ImportSelector  ::= id [‘=>’ id | ‘=>’ ‘_’]
+```
+
+An import clause has the form `import $p$.$I$` where $p$ is a 
+[stable identifier](03-types.html#paths) and $I$ is an import expression.
+The import expression determines a set of names of importable members of $p$
+which are made available without qualification.  A member $m$ of $p$ is
+_importable_ if it is not [object-private](05-classes-and-objects.html#modifiers).
+The most general form of an import expression is a list of _import selectors_
+
+```scala
+{ $x_1$ => $y_1 , \ldots , x_n$ => $y_n$, _ } 
+```
+
+for $n \geq 0$, where the final wildcard ‘_’ may be absent.  It
+makes available each importable member `$p$.$x_i$` under the unqualified name
+$y_i$. I.e. every import selector `$x_i$ => $y_i$` renames
+`$p$.$x_i$` to
+$y_i$.  If a final wildcard is present, all importable members $z$ of
+$p$ other than `$x_1 , \ldots , x_n,y_1 , \ldots , y_n$` are also made available
+under their own unqualified names.
+
+Import selectors work in the same way for type and term members. For
+instance, an import clause `import $p$.{$x$ => $y\,$}` renames the term
+name `$p$.$x$` to the term name $y$ and the type name `$p$.$x$`
+to the type name $y$. At least one of these two names must
+reference an importable member of $p$.
+
+If the target in an import selector is a wildcard, the import selector
+hides access to the source member. For instance, the import selector
+`$x$ => _` “renames” $x$ to the wildcard symbol (which is
+unaccessible as a name in user programs), and thereby effectively
+prevents unqualified access to $x$. This is useful if there is a
+final wildcard in the same import selector list, which imports all
+members not mentioned in previous import selectors.
+
+The scope of a binding introduced by an import-clause starts
+immediately after the import clause and extends to the end of the
+enclosing block, template, package clause, or compilation unit,
+whichever comes first.
+
+Several shorthands exist. An import selector may be just a simple name
+$x$. In this case, $x$ is imported without renaming, so the
+import selector is equivalent to `$x$ => $x$`. Furthermore, it is
+possible to replace the whole import selector list by a single
+identifier or wildcard. The import clause `import $p$.$x$` is
+equivalent to `import $p$.{$x\,$}`, i.e. it makes available without
+qualification the member $x$ of $p$. The import clause
+`import $p$._` is equivalent to
+`import $p$.{_}`, 
+i.e. it makes available without qualification all members of $p$
+(this is analogous to `import $p$.*` in Java).
+
+An import clause with multiple import expressions
+`import $p_1$.$I_1 , \ldots , p_n$.$I_n$` is interpreted as a
+sequence of import clauses 
+`import $p_1$.$I_1$; $\ldots$; import $p_n$.$I_n$`.
+
+###### Example
+Consider the object definition:
+
+```scala
+object M {
+  def z = 0, one = 1
+  def add(x: Int, y: Int): Int = x + y
+}
+```
+
+Then the block
+
+```scala
+{ import M.{one, z => zero, _}; add(zero, one) }
+```
+
+is equivalent to the block
+
+```scala
+{ M.add(M.z, M.one) }
+```
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
new file mode 100644
index 0000000..70fa3e0
--- /dev/null
+++ b/spec/05-classes-and-objects.md
@@ -0,0 +1,1173 @@
+---
+title: Classes and Objects
+layout: default
+chapter: 5
+---
+
+# Classes and Objects
+
+```ebnf
+TmplDef          ::= [`case'] `class' ClassDef
+                  |  [`case'] `object' ObjectDef
+                  |  `trait' TraitDef
+```
+
+[Classes](#class-definitions) and [objects](#object-definitions)
+are both defined in terms of _templates_.
+
+
+## Templates
+
+```ebnf
+ClassTemplate   ::=  [EarlyDefs] ClassParents [TemplateBody]
+TraitTemplate   ::=  [EarlyDefs] TraitParents [TemplateBody]
+ClassParents    ::=  Constr {`with' AnnotType}
+TraitParents    ::=  AnnotType {`with' AnnotType}
+TemplateBody    ::=  [nl] `{' [SelfType] TemplateStat {semi TemplateStat} `}'
+SelfType        ::=  id [`:' Type] `=>'
+                 |   this `:' Type `=>'
+```
+
+A template defines the type signature, behavior and initial state of a
+trait or class of objects or of a single object. Templates form part of
+instance creation expressions, class definitions, and object
+definitions.  A template 
+`$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }` 
+consists of a constructor invocation $sc$
+which defines the template's _superclass_, trait references
+`$mt_1 , \ldots , mt_n$` $(n \geq 0)$, which define the
+template's _traits_, and a statement sequence $\mathit{stats}$ which
+contains initialization code and additional member definitions for the
+template.
+
+Each trait reference $mt_i$ must denote a [trait](#traits).
+By contrast, the superclass constructor $sc$ normally refers to a
+class which is not a trait. It is possible to write a list of
+parents that starts with a trait reference, e.g.
+`$mt_1$ with $\ldots$ with $mt_n$`. In that case the list
+of parents is implicitly extended to include the supertype of $mt_1$
+as first parent type. The new supertype must have at least one
+constructor that does not take parameters.  In the following, we will
+always assume that this implicit extension has been performed, so that
+the first parent class of a template is a regular superclass
+constructor, not a trait reference.
+
+The list of parents of a template must be well-formed. This means that
+the class denoted by the superclass constructor $sc$ must be a
+subclass of the superclasses of all the traits $mt_1 , \ldots , mt_n$.
+In other words, the non-trait classes inherited by a template form a
+chain in the inheritance hierarchy which starts with the template's
+superclass.
+
+The _least proper supertype_ of a template is the class type or
+[compound type](03-types.html#compound-types) consisting of all its parent
+class types. 
+
+The statement sequence $\mathit{stats}$ contains member definitions that
+define new members or overwrite members in the parent classes.  If the
+template forms part of an abstract class or trait definition, the
+statement part $\mathit{stats}$ may also contain declarations of abstract
+members. If the template forms part of a concrete class definition,
+$\mathit{stats}$ may still contain declarations of abstract type members, but
+not of abstract term members.  Furthermore, $\mathit{stats}$ may in any case
+also contain expressions; these are executed in the order they are
+given as part of the initialization of a template.
+
+The sequence of template statements may be prefixed with a formal
+parameter definition and an arrow, e.g. `$x$ =>`, or
+`$x$:$T$ =>`.  If a formal parameter is given, it can be
+used as an alias for the reference `this` throughout the
+body of the template.  
+If the formal parameter comes with a type $T$, this definition affects
+the _self type_ $S$ of the underlying class or object as follows:  Let $C$ be the type
+of the class or trait or object defining the template.
+If a type $T$ is given for the formal self parameter, $S$
+is the greatest lower bound of $T$ and $C$.
+If no type $T$ is given, $S$ is just $C$.
+Inside the template, the type of `this` is assumed to be $S$.
+
+The self type of a class or object must conform to the self types of
+all classes which are inherited by the template $t$. 
+
+A second form of self type annotation reads just 
+`this: $S$ =>`. It prescribes the type $S$ for `this`
+without introducing an alias name for it. 
+
+###### Example
+Consider the following class definitions:
+
+```scala
+class Base extends Object {}
+trait Mixin extends Base {}
+object O extends Mixin {}
+```
+
+In this case, the definition of `O` is expanded to:
+
+```scala
+object O extends Base with Mixin {}
+```
+
+
+<!-- TODO: Make all references to Java generic -->
+
+**Inheriting from Java Types** A template may have a Java class as its superclass and Java interfaces as its
+mixins. 
+
+**Template Evaluation** Consider a template `$sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ }`.
+
+If this is the template of a [trait](#traits) then its _mixin-evaluation_ 
+consists of an evaluation of the statement sequence $\mathit{stats}$.
+
+If this is not a template of a trait, then its _evaluation_
+consists of the following steps.
+
+- First, the superclass constructor $sc$ is 
+  [evaluated](#constructor-invocations).
+- Then, all base classes in the template's [linearization](#class-linearization)
+  up to the template's superclass denoted by $sc$ are
+  mixin-evaluated. Mixin-evaluation happens in reverse order of
+  occurrence in the linearization.
+- Finally the statement sequence $\mathit{stats}\,$ is evaluated.
+
+
+###### Delayed Initializaton
+The initialization code of an object or class (but not a trait) that follows 
+the superclass
+constructor invocation and the mixin-evaluation of the template's base
+classes is passed to a special hook, which is inaccessible from user
+code. Normally, that hook simply executes the code that is passed to
+it. But templates inheriting the `scala.DelayedInit` trait
+can override the hook by re-implementing the `delayedInit`
+method, which is defined as follows:
+
+```scala
+def delayedInit(body: => Unit)
+```
+
+
+### Constructor Invocations
+
+```ebnf
+Constr  ::=  AnnotType {`(' [Exprs] `)'}
+```
+
+Constructor invocations define the type, members, and initial state of
+objects created by an instance creation expression, or of parts of an
+object's definition which are inherited by a class or object
+definition. A constructor invocation is a function application
+`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`, where $x$ is a 
+[stable identifier](03-types.html#paths), $c$ is a type name which either designates a
+class or defines an alias type for one, $\mathit{targs}$ is a type argument
+list, $\mathit{args}_1 , \ldots , \mathit{args}_n$ are argument lists, and there is a
+constructor of that class which is [applicable](06-expressions.html#function-applications)
+to the given arguments. If the constructor invocation uses named or
+default arguments, it is transformed into a block expression using the
+same transformation as described [here](sec:named-default).
+
+The prefix `$x$.` can be omitted.  A type argument list
+can be given only if the class $c$ takes type parameters.  Even then
+it can be omitted, in which case a type argument list is synthesized
+using [local type inference](06-expressions.html#local-type-inference). If no explicit
+arguments are given, an empty list `()` is implicitly supplied.
+
+An evaluation of a constructor invocation 
+`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`
+consists of the following steps:
+
+- First, the prefix $x$ is evaluated.
+- Then, the arguments $\mathit{args}_1 , \ldots , \mathit{args}_n$ are evaluated from 
+  left to right.
+- Finally, the class being constructed is initialized by evaluating the
+  template of the class referred to by $c$.
+
+### Class Linearization
+
+The classes reachable through transitive closure of the direct
+inheritance relation from a class $C$ are called the _base classes_ of $C$.  Because of mixins, the inheritance relationship
+on base classes forms in general a directed acyclic graph. A
+linearization of this graph is defined as follows.
+
+
+###### Definition: linearization
+Let $C$ be a class with template
+`$C_1$ with ... with $C_n$ { $\mathit{stats}$ }`.
+The _linearization_ of $C$, $\mathcal{L}(C)$ is defined as follows:
+
+$$\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; \ldots \; \vec{+} \; \mathcal{L}(C_1)$$
+
+Here $\vec{+}$ denotes concatenation where elements of the right operand
+replace identical elements of the left operand:
+
+$$
+\begin{array}{lcll}
+\{a, A\} \;\vec{+}\; B &=& a, (A \;\vec{+}\; B)  &{\bf if} \; a \not\in B \\\\
+                       &=& A \;\vec{+}\; B       &{\bf if} \; a \in B
+\end{array}
+$$
+
+
+###### Example
+Consider the following class definitions.
+
+```scala
+abstract class AbsIterator extends AnyRef { ... }
+trait RichIterator extends AbsIterator { ... }
+class StringIterator extends AbsIterator { ... }
+class Iter extends StringIterator with RichIterator { ... }
+```
+
+Then the linearization of class `Iter` is
+
+```scala
+{ Iter, RichIterator, StringIterator, AbsIterator, AnyRef, Any }
+```
+
+Note that the linearization of a class refines the inheritance
+relation: if $C$ is a subclass of $D$, then $C$ precedes $D$ in any
+linearization where both $C$ and $D$ occur.
+[Linearization](#definition-linearization) also satisfies the property that
+a linearization of a class always contains the linearization of its direct superclass as a suffix.
+
+For instance, the linearization of `StringIterator` is
+
+```scala
+{ StringIterator, AbsIterator, AnyRef, Any }
+```
+
+which is a suffix of the linearization of its subclass `Iter`.
+The same is not true for the linearization of mixins.
+For instance, the linearization of `RichIterator` is
+
+```scala
+{ RichIterator, AbsIterator, AnyRef, Any }
+```
+
+which is not a suffix of the linearization of `Iter`.
+
+
+### Class Members
+
+A class $C$ defined by a template `$C_1$ with $\ldots$ with $C_n$ { $\mathit{stats}$ }`
+can define members in its statement sequence
+$\mathit{stats}$ and can inherit members from all parent classes.  Scala
+adopts Java and C\#'s conventions for static overloading of
+methods. It is thus possible that a class defines and/or inherits
+several methods with the same name.  To decide whether a defined
+member of a class $C$ overrides a member of a parent class, or whether
+the two co-exist as overloaded variants in $C$, Scala uses the
+following definition of _matching_ on members:
+
+###### Definition: matching
+A member definition $M$ _matches_ a member definition $M'$, if $M$
+and $M'$ bind the same name, and one of following holds.
+
+1. Neither $M$ nor $M'$ is a method definition.
+2. $M$ and $M'$ define both monomorphic methods with equivalent argument types.
+3. $M$ defines a parameterless method and $M'$ defines a method
+   with an empty parameter list `()` or _vice versa_.
+4. $M$ and $M'$ define both polymorphic methods with
+   equal number of argument types $\overline T$, $\overline T'$
+   and equal numbers of type parameters
+   $\overline t$, $\overline t'$, say, and  $\overline T' = [\overline t'/\overline t]\overline T$.
+
+<!--
+every argument type
+$T_i$ of $M$ is equal to the corresponding argument type $T`_i$ of
+$M`$ where every occurrence of a type parameter $t`$ of $M`$ has been replaced by the corresponding type parameter $t$ of $M$.
+-->
+
+Member definitions fall into two categories: concrete and abstract.
+Members of class $C$ are either _directly defined_ (i.e. they appear in
+$C$'s statement sequence $\mathit{stats}$) or they are _inherited_.  There are two rules
+that determine the set of members of a class, one for each category:
+
+A _concrete member_ of a class $C$ is any concrete definition $M$ in
+some class $C_i \in \mathcal{L}(C)$, except if there is a preceding class
+$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines a concrete
+member $M'$ matching $M$.
+
+An _abstract member_ of a class $C$ is any abstract definition $M$
+in some class $C_i \in \mathcal{L}(C)$, except if $C$ contains already a
+concrete member $M'$ matching $M$, or if there is a preceding class
+$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines an abstract
+member $M'$ matching $M$.
+
+This definition also determines the [overriding](#overriding) relationships
+between matching members of a class $C$ and its parents.  
+First, a concrete definition always overrides an abstract definition. 
+Second, for definitions $M$ and $M$' which are both concrete or both abstract,
+$M$ overrides $M'$ if $M$ appears in a class that precedes (in the
+linearization of $C$) the class in which $M'$ is defined.
+
+It is an error if a template directly defines two matching members. It
+is also an error if a template contains two members (directly defined
+or inherited) with the same name and the same [erased type](03-types.html#type-erasure).
+Finally, a template is not allowed to contain two methods (directly
+defined or inherited) with the same name which both define default arguments.
+
+
+###### Example
+Consider the trait definitions:
+
+```scala
+trait A { def f: Int }
+trait B extends A { def f: Int = 1 ; def g: Int = 2 ; def h: Int = 3 }
+trait C extends A { override def f: Int = 4 ; def g: Int }
+trait D extends B with C { def h: Int }
+```
+
+Then trait `D` has a directly defined abstract member `h`. It
+inherits member `f` from trait `C` and member `g` from
+trait `B`.
+
+
+### Overriding
+
+<!-- TODO: Explain that classes cannot override each other -->
+
+A member $M$ of class $C$ that [matches](#class-members) 
+a non-private member $M'$ of a
+base class of $C$ is said to _override_ that member.  In this case
+the binding of the overriding member $M$ must [subsume](03-types.html#conformance)
+the binding of the overridden member $M'$.
+Furthermore, the following restrictions on modifiers apply to $M$ and
+$M'$:
+
+- $M'$ must not be labeled `final`.
+- $M$ must not be [`private`](#modifiers).
+- If $M$ is labeled `private[$C$]` for some enclosing class or package $C$,
+  then $M'$ must be labeled `private[$C'$]` for some class or package $C'$ where
+  $C'$ equals $C$ or $C'$ is contained in $C$.
+  <!-- TODO: check whether this is accurate -->
+- If $M$ is labeled `protected`, then $M'$ must also be
+  labeled `protected`.
+- If $M'$ is not an abstract member, then $M$ must be labeled `override`.
+  Furthermore, one of two possibilities must hold:
+    - either $M$ is defined in a subclass of the class where is $M'$ is defined, 
+    - or both $M$ and $M'$ override a third member $M''$ which is defined
+      in a base class of both the classes containing $M$ and $M'$ 
+- If $M'$ is [incomplete](#modifiers) in $C$ then $M$ must be
+  labeled `abstract override`.
+- If $M$ and $M'$ are both concrete value definitions, then either none
+  of them is marked `lazy` or both must be marked `lazy`.
+
+A stable member can only be overridden by a stable member.
+For example, this is not allowed:
+
+```scala
+class X { val stable = 1}
+class Y extends X { override var stable = 1 } // error
+```
+
+Another restriction applies to abstract type members: An abstract type
+member with a [volatile type](03-types.html#volatile-types) as its upper
+bound may not override an abstract type member which does not have a
+volatile upper bound.
+
+A special rule concerns parameterless methods. If a parameterless
+method defined as `def $f$: $T$ = ...` or `def $f$ = ...` overrides a method of
+type $()T'$ which has an empty parameter list, then $f$ is also
+assumed to have an empty parameter list.
+
+An overriding method inherits all default arguments from the definition
+in the superclass. By specifying default arguments in the overriding method
+it is possible to add new defaults (if the corresponding parameter in the
+superclass does not have a default) or to override the defaults of the
+superclass (otherwise).
+
+### Example
+
+Consider the definitions:
+
+```scala
+trait Root { type T <: Root }
+trait A extends Root { type T <: A }
+trait B extends Root { type T <: B }
+trait C extends A with B
+```
+
+Then the class definition `C` is not well-formed because the
+binding of `T` in `C` is
+`type T <: B`,
+which fails to subsume the binding `type T <: A` of `T`
+in type `A`. The problem can be solved by adding an overriding
+definition of type `T` in class `C`:
+
+```scala
+class C extends A with B { type T <: C }
+```
+
+
+### Inheritance Closure
+
+Let $C$ be a class type. The _inheritance closure_ of $C$ is the
+smallest set $\mathscr{S}$ of types such that
+
+- If $T$ is in $\mathscr{S}$, then every type $T'$ which forms syntactically
+  a part of $T$ is also in $\mathscr{S}$.
+- If $T$ is a class type in $\mathscr{S}$, then all [parents](#templates)
+  of $T$ are also in $\mathscr{S}$.
+
+It is a static error if the inheritance closure of a class type
+consists of an infinite number of types. (This restriction is
+necessary to make subtyping decidable[^kennedy]).
+
+[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( http://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007
+
+### Early Definitions
+
+```ebnf
+EarlyDefs         ::= `{' [EarlyDef {semi EarlyDef}] `}' `with'
+EarlyDef          ::=  {Annotation} {Modifier} PatVarDef
+```
+
+A template may start with an _early field definition_ clause,
+which serves to define certain field values before the supertype
+constructor is called. In a template
+
+```scala
+{ val $p_1$: $T_1$ = $e_1$
+  ...
+  val $p_n$: $T_n$ = $e_n$
+} with $sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ }
+```
+
+The initial pattern definitions of $p_1 , \ldots , p_n$ are called
+_early definitions_. They define fields 
+which form part of the template. Every early definition must define
+at least one variable. 
+
+An early definition is type-checked and evaluated in the scope which
+is in effect just before the template being defined, augmented by any
+type parameters of the enclosing class and by any early definitions
+preceding the one being defined. In particular, any reference to
+`this` in the right-hand side of an early definition refers
+to the identity of `this` just outside the template. Consequently, it
+is impossible that an early definition refers to the object being
+constructed by the template, or refers to one of its fields and
+methods, except for any other preceding early definition in the same
+section. Furthermore, references to preceding early definitions
+always refer to the value that's defined there, and do not take into account
+overriding definitions. In other words, a block of early definitions
+is evaluated exactly as if it was a local bock containing a number of value
+definitions.
+ 
+
+Early definitions are evaluated in the order they are being defined
+before the superclass constructor of the template is called.
+
+###### Example
+Early definitions are particularly useful for
+traits, which do not have normal constructor parameters. Example:
+
+```scala
+trait Greeting {
+  val name: String
+  val msg = "How are you, "+name
+}
+class C extends {
+  val name = "Bob"
+} with Greeting {
+  println(msg)
+}
+```
+
+In the code above, the field `name` is initialized before the
+constructor of `Greeting` is called. Therefore, field `msg` in
+class `Greeting` is properly initialized to `"How are you, Bob"`.
+
+If `name` had been initialized instead in `C`'s normal class
+body, it would be initialized after the constructor of
+`Greeting`. In that case, `msg` would be initialized to
+`"How are you, <null>"`.
+
+
+## Modifiers
+
+```ebnf
+Modifier          ::=  LocalModifier 
+                    |  AccessModifier
+                    |  `override'
+LocalModifier     ::=  `abstract'
+                    |  `final'
+                    |  `sealed'
+                    |  `implicit'
+                    |  `lazy'
+AccessModifier    ::=  (`private' | `protected') [AccessQualifier]
+AccessQualifier   ::=  `[' (id | `this') `]'
+```
+
+Member definitions may be preceded by modifiers which affect the
+accessibility and usage of the identifiers bound by them.  If several
+modifiers are given, their order does not matter, but the same
+modifier may not occur more than once.  Modifiers preceding a repeated
+definition apply to all constituent definitions.  The rules governing
+the validity and meaning of a modifier are as follows.
+
+### `private`
+The `private` modifier can be used with any definition or
+declaration in a template.  Such members can be accessed only from
+within the directly enclosing template and its companion module or
+[companion class](#object-definitions). They
+are not inherited by subclasses and they may not override definitions
+in parent classes.
+
+The modifier can be _qualified_ with an identifier $C$ (e.g.
+`private[$C$]`) that must denote a class or package
+enclosing the definition.  Members labeled with such a modifier are
+accessible respectively only from code inside the package $C$ or only
+from code inside the class $C$ and its
+[companion module](#object-definitions).
+
+An different form of qualification is `private[this]`. A member
+$M$ marked with this modifier is called _object-protected_; it can be accessed only from within
+the object in which it is defined. That is, a selection $p.M$ is only
+legal if the prefix is `this` or `$O$.this`, for some
+class $O$ enclosing the reference. In addition, the restrictions for
+unqualified `private` apply.
+
+Members marked private without a qualifier are called _class-private_,
+whereas members labeled with `private[this]`
+are called _object-private_.  A member _is private_ if it is
+either class-private or object-private, but not if it is marked
+`private[$C$]` where $C$ is an identifier; in the latter
+case the member is called _qualified private_.
+
+Class-private or object-private members may not be abstract, and may
+not have `protected` or `override` modifiers.
+
+#### `protected`
+The `protected` modifier applies to class member definitions.
+Protected members of a class can be accessed from within
+  - the template of the defining class,
+  - all templates that have the defining class as a base class,
+  - the companion module of any of those classes.
+
+A `protected` modifier can be qualified with an
+identifier $C$ (e.g.  `protected[$C$]`) that must denote a
+class or package enclosing the definition.  Members labeled with such
+a modifier are also accessible respectively from all code inside the
+package $C$ or from all code inside the class $C$ and its
+[companion module](#object-definitions).
+
+A protected identifier $x$ may be used as a member name in a selection
+`$r$.$x$` only if one of the following applies:
+  - The access is within the template defining the member, or, if
+    a qualification $C$ is given, inside the package $C$,
+    or the class $C$, or its companion module, or
+  - $r$ is one of the reserved words `this` and
+    `super`, or
+  - $r$'s type conforms to a type-instance of the
+    class which contains the access.
+
+A different form of qualification is `protected[this]`. A member
+$M$ marked with this modifier is called _object-protected_; it can be accessed only from within
+the object in which it is defined. That is, a selection $p.M$ is only
+legal if the prefix is `this` or `$O$.this`, for some
+class $O$ enclosing the reference. In addition, the restrictions for
+unqualified `protected` apply.
+
+#### `override`
+The `override` modifier applies to class member definitions or declarations.
+It is mandatory for member definitions or declarations that override some
+other concrete member definition in a parent class. If an `override`
+modifier is given, there must be at least one overridden member
+definition or declaration (either concrete or abstract).
+
+#### `abstract override`
+The `override` modifier has an additional significance when
+combined with the `abstract` modifier.  That modifier combination
+is only allowed for value members of traits.
+
+We call a member $M$ of a template _incomplete_ if it is either
+abstract (i.e. defined by a declaration), or it is labeled
+`abstract` and `override` and
+every member overridden by $M$ is again incomplete.
+
+Note that the `abstract override` modifier combination does not
+influence the concept whether a member is concrete or abstract. A
+member is _abstract_ if only a declaration is given for it;
+it is _concrete_ if a full definition is given.
+
+#### `abstract`
+The `abstract` modifier is used in class definitions. It is
+redundant for traits, and mandatory for all other classes which have
+incomplete members.  Abstract classes cannot be
+[instantiated](06-expressions.html#instance-creation-expressions) with a constructor invocation
+unless followed by mixins and/or a refinement which override all
+incomplete members of the class. Only abstract classes and traits can have
+abstract term members.
+
+The `abstract` modifier can also be used in conjunction with
+`override` for class member definitions. In that case the
+previous discussion applies.
+
+#### `final`
+The `final` modifier applies to class member definitions and to
+class definitions. A `final` class member definition may not be
+overridden in subclasses. A `final` class may not be inherited by
+a template. `final` is redundant for object definitions.  Members
+of final classes or objects are implicitly also final, so the
+`final` modifier is generally redundant for them, too. Note, however, that
+[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require
+an explicit `final` modifier, even if they are defined in a final class or
+object. `final` may not be applied to incomplete members, and it may not be
+combined in one modifier list with `sealed`.
+
+#### `sealed`
+The `sealed` modifier applies to class definitions. A
+`sealed` class may not be directly inherited, except if the inheriting
+template is defined in the same source file as the inherited class.
+However, subclasses of a sealed class can be inherited anywhere.
+
+#### `lazy`
+The `lazy` modifier applies to value definitions. A `lazy`
+value is initialized the first time it is accessed (which might never
+happen at all). Attempting to access a lazy value during its
+initialization might lead to looping behavior. If an exception is
+thrown during initialization, the value is considered uninitialized,
+and a later access will retry to evaluate its right hand side.
+
+
+###### Example
+The following code illustrates the use of qualified private:
+
+```scala
+package outerpkg.innerpkg
+class Outer {
+  class Inner {
+    private[Outer] def f()
+    private[innerpkg] def g()
+    private[outerpkg] def h()
+  }
+}
+```
+
+Here, accesses to the method `f` can appear anywhere within
+`OuterClass`, but not outside it. Accesses to method
+`g` can appear anywhere within the package
+`outerpkg.innerpkg`, as would be the case for
+package-private methods in Java. Finally, accesses to method
+`h` can appear anywhere within package `outerpkg`,
+including packages contained in it.
+
+
+###### Example
+A useful idiom to prevent clients of a class from
+constructing new instances of that class is to declare the class
+`abstract` and `sealed`:
+
+```scala
+object m {
+  abstract sealed class C (x: Int) {
+    def nextC = new C(x + 1) {}
+  }
+  val empty = new C(0) {}
+}
+```
+
+For instance, in the code above clients can create instances of class
+`m.C` only by calling the `nextC` method of an existing `m.C`
+object; it is not possible for clients to create objects of class
+`m.C` directly. Indeed the following two lines are both in error:
+
+```scala
+new m.C(0)    // **** error: C is abstract, so it cannot be instantiated.
+new m.C(0) {} // **** error: illegal inheritance from sealed class.
+```
+
+A similar access restriction can be achieved by marking the primary
+constructor `private` ([example](#example-private-constructor)).
+
+
+## Class Definitions
+
+```ebnf
+TmplDef           ::=  `class' ClassDef 
+ClassDef          ::=  id [TypeParamClause] {Annotation} 
+                       [AccessModifier] ClassParamClauses ClassTemplateOpt 
+ClassParamClauses ::=  {ClassParamClause} 
+                       [[nl] `(' implicit ClassParams `)']
+ClassParamClause  ::=  [nl] `(' [ClassParams] ')'
+ClassParams       ::=  ClassParam {`,' ClassParam}
+ClassParam        ::=  {Annotation} {Modifier} [(`val' | `var')]
+                       id [`:' ParamType] [`=' Expr]
+ClassTemplateOpt  ::=  `extends' ClassTemplate | [[`extends'] TemplateBody]
+```
+
+The most general form of class definition is 
+
+```scala
+class $c$[$\mathit{tps}\,$] $as$ $m$($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) extends $t$    $\gap(n \geq 0)$.
+```
+
+Here,
+
+  - $c$ is the name of the class to be defined.
+  - $\mathit{tps}$ is a non-empty list of type parameters of the class
+    being defined.  The scope of a type parameter is the whole class
+    definition including the type parameter section itself.  It is
+    illegal to define two type parameters with the same name.  The type
+    parameter section `[$\mathit{tps}\,$]` may be omitted. A class with a type
+    parameter section is called _polymorphic_, otherwise it is called
+    _monomorphic_.
+  - $as$ is a possibly empty sequence of 
+    [annotations](11-user-defined-annotations.html#user-defined-annotations).
+    If any annotations are given, they apply to the primary constructor of the 
+    class.
+  - $m$ is an [access modifier](#modifiers) such as
+    `private` or `protected`, possibly with a qualification.
+    If such an access modifier is given it applies to the primary constructor of the class.
+  - $(\mathit{ps}_1)\ldots(\mathit{ps}_n)$ are formal value parameter clauses for
+    the _primary constructor_ of the class. The scope of a formal value parameter includes
+    all subsequent parameter sections and the template $t$. However, a formal 
+    value parameter may not form part of the types of any of the parent classes or members of the class template $t$.
+    It is illegal to define two formal value parameters with the same name.
+
+    If no formal parameter sections are given, an empty parameter section `()` is assumed.
+
+    If a formal parameter declaration $x: T$ is preceded by a `val`
+    or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions)
+    for this parameter is implicitly added to the class.
+
+    The getter introduces a value member $x$ of class $c$ that is defined as an alias of the parameter.
+    If the introducing keyword is `var`, a setter accessor [`$x$_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class.
+    In invocation of that setter  `$x$_=($e$)` changes the value of the parameter to the result of evaluating $e$.
+
+    The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s).
+    When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed.
+    A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters).
+
+  - $t$ is a [template](#templates) of the form
+
+    ``` 
+    $sc$ with $mt_1$ with $\ldots$ with $mt_m$ { $\mathit{stats}$ } // $m \geq 0$
+    ```
+
+    which defines the base classes, behavior and initial state of objects of
+    the class. The extends clause 
+    `extends $sc$ with $mt_1$ with $\ldots$ with $mt_m$` 
+    can be omitted, in which case
+    `extends scala.AnyRef` is assumed.  The class body
+    `{ $\mathit{stats}$ }` may also be omitted, in which case the empty body
+    `{}` is assumed.
+
+
+This class definition defines a type `$c$[$\mathit{tps}\,$]` and a constructor
+which when applied to parameters conforming to types $\mathit{ps}$
+initializes instances of type `$c$[$\mathit{tps}\,$]` by evaluating the template
+$t$.
+
+### Example
+The following example illustrates `val` and `var` parameters of a class `C`:
+
+```scala
+class C(x: Int, val y: String, var z: List[String])
+val c = new C(1, "abc", List())
+c.z = c.y :: c.z
+```
+
+The following class can be created only from its companion module.
+
+```scala
+object Sensitive {
+  def makeSensitive(credentials: Certificate): Sensitive =
+    if (credentials == Admin) new Sensitive()
+    else throw new SecurityViolationException
+}
+class Sensitive private () {
+  ...
+}
+```
+
+
+### Constructor Definitions
+
+```ebnf
+FunDef         ::= `this' ParamClause ParamClauses 
+                   (`=' ConstrExpr | [nl] ConstrBlock)
+ConstrExpr     ::= SelfInvocation
+                |  ConstrBlock
+ConstrBlock    ::= `{' SelfInvocation {semi BlockStat} `}'
+SelfInvocation ::= `this' ArgumentExprs {ArgumentExprs}
+```
+
+A class may have additional constructors besides the primary
+constructor.  These are defined by constructor definitions of the form
+`def this($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) = $e$`.  Such a
+definition introduces an additional constructor for the enclosing
+class, with parameters as given in the formal parameter lists $\mathit{ps}_1
+, \ldots , \mathit{ps}_n$, and whose evaluation is defined by the constructor
+expression $e$.  The scope of each formal parameter is the subsequent
+parameter sections and the constructor
+expression $e$.  A constructor expression is either a self constructor
+invocation `this($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)` or a block
+which begins with a self constructor invocation. The self constructor
+invocation must construct a generic instance of the class. I.e. if the
+class in question has name $C$ and type parameters
+`[$\mathit{tps}\,$]`, then a self constructor invocation must
+generate an instance of `$C$[$\mathit{tps}\,$]`; it is not permitted
+to instantiate formal type parameters.
+
+The signature and the self constructor invocation of a constructor
+definition are type-checked and evaluated in the scope which is in
+effect at the point of the enclosing class definition, augmented by
+any type parameters of the enclosing class and by any 
+[early definitions](#early-definitions) of the enclosing template.
+The rest of the
+constructor expression is type-checked and evaluated as a function
+body in the current class.
+  
+If there are auxiliary constructors of a class $C$, they form together
+with $C$'s primary [constructor](#class-definitions)
+an overloaded constructor
+definition. The usual rules for 
+[overloading resolution](06-expressions.html#overloading-resolution)
+apply for constructor invocations of $C$,
+including for the self constructor invocations in the constructor
+expressions themselves. However, unlike other methods, constructors
+are never inherited.  To prevent infinite cycles of constructor
+invocations, there is the restriction that every self constructor
+invocation must refer to a constructor definition which precedes it
+(i.e. it must refer to either a preceding auxiliary constructor or the
+primary constructor of the class).  
+
+
+###### Example
+Consider the class definition
+
+```scala
+class LinkedList[A]() {
+  var head = _
+  var tail = null
+  def isEmpty = tail != null
+  def this(head: A) = { this(); this.head = head }
+  def this(head: A, tail: List[A]) = { this(head); this.tail = tail }
+}
+```
+
+This defines a class `LinkedList` with three constructors.  The
+second constructor constructs an singleton list, while the
+third one constructs a list with a given head and tail.
+
+
+## Case Classes
+
+```ebnf
+TmplDef  ::=  `case' `class' ClassDef
+```
+
+If a class definition is prefixed with `case`, the class is said
+to be a _case class_.  
+
+The formal parameters in the first parameter section of a case class
+are called _elements_; they are treated
+specially. First, the value of such a parameter can be extracted as a
+field of a constructor pattern. Second, a `val` prefix is
+implicitly added to such a parameter, unless the parameter carries
+already a `val` or `var` modifier. Hence, an accessor
+definition for the parameter is [generated](#class-definitions).
+
+A case class definition of `$c$[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$)` with type
+parameters $\mathit{tps}$ and value parameters $\mathit{ps}$ implicitly
+generates an [extractor object](08-pattern-matching.html#extractor-patterns) which is
+defined as follows:
+
+```scala
+object $c$ {
+  def apply[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$)
+  def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) =
+    if (x eq null) scala.None
+    else scala.Some($x.\mathit{xs}_{11}, \ldots , x.\mathit{xs}_{1k}$)
+}
+```
+
+Here, $\mathit{Ts}$ stands for the vector of types defined in the type
+parameter section $\mathit{tps}$,
+each $\mathit{xs}_i$ denotes the parameter names of the parameter
+section $\mathit{ps}_i$, and
+$\mathit{xs}_{11}, \ldots , \mathit{xs}_{1k}$ denote the names of all parameters
+in the first parameter section $\mathit{xs}_1$.
+If a type parameter section is missing in the
+class, it is also missing in the `apply` and
+`unapply` methods.
+The definition of `apply` is omitted if class $c$ is
+`abstract`.
+
+If the case class definition contains an empty value parameter list, the
+`unapply` method returns a `Boolean` instead of an `Option` type and
+is defined as follows:
+
+```scala
+def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = x ne null
+```
+
+The name of the `unapply` method is changed to `unapplySeq` if the first
+parameter section $\mathit{ps}_1$ of $c$ ends in a 
+[repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters).
+If a companion object $c$ exists already, no new object is created,
+but the `apply` and `unapply` methods are added to the existing
+object instead.
+
+A method named `copy` is implicitly added to every case class unless the
+class already has a member (directly defined or inherited) with that name, or the
+class has a repeated parameter. The method is defined as follows:
+
+```scala
+def copy[$\mathit{tps}\,$]($\mathit{ps}'_1\,$)$\ldots$($\mathit{ps}'_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$)
+```
+
+Again, `$\mathit{Ts}$` stands for the vector of types defined in the type parameter section `$\mathit{tps}$`
+and each `$\xs_i$` denotes the parameter names of the parameter section `$\ps'_i$`. The value
+parameters `$\ps'_{1,j}$` of first parameter list have the form `$x_{1,j}$:$T_{1,j}$=this.$x_{1,j}$`,
+the other parameters `$\ps'_{i,j}$` of the `copy` method are defined as `$x_{i,j}$:$T_{i,j}$`.
+In all cases `$x_{i,j}$` and `$T_{i,j}$` refer to the name and type of the corresponding class parameter
+`$\mathit{ps}_{i,j}$`.
+
+Every case class implicitly overrides some method definitions of class
+[`scala.AnyRef`](12-the-scala-standard-library.html#root-classes) unless a definition of the same
+method is already given in the case class itself or a concrete
+definition of the same method is given in some base class of the case
+class different from `AnyRef`. In particular:
+
+- Method `equals: (Any)Boolean` is structural equality, where two
+  instances are equal if they both belong to the case class in question and they
+  have equal (with respect to `equals`) constructor arguments (restricted to the class's _elements_, i.e., the first parameter section).
+- Method `hashCode: Int` computes a hash-code. If the hashCode methods
+  of the data structure members map equal (with respect to equals)
+  values to equal hash-codes, then the case class hashCode method does
+  too.
+- Method `toString: String` returns a string representation which
+  contains the name of the class and its elements.
+
+
+###### Example
+Here is the definition of abstract syntax for lambda calculus:
+
+```scala
+class Expr
+case class Var   (x: String)          extends Expr
+case class Apply (f: Expr, e: Expr)   extends Expr
+case class Lambda(x: String, e: Expr) extends Expr
+```
+
+This defines a class `Expr` with case classes
+`Var`, `Apply` and `Lambda`. A call-by-value evaluator
+for lambda expressions could then be written as follows.
+
+```scala
+type Env = String => Value
+case class Value(e: Expr, env: Env)
+
+def eval(e: Expr, env: Env): Value = e match {
+  case Var (x) =>
+    env(x)
+  case Apply(f, g) =>
+    val Value(Lambda (x, e1), env1) = eval(f, env)
+    val v = eval(g, env)
+    eval (e1, (y => if (y == x) v else env1(y)))
+  case Lambda(_, _) =>
+    Value(e, env)
+}
+```
+
+It is possible to define further case classes that extend type
+`Expr` in other parts of the program, for instance
+
+```scala
+case class Number(x: Int) extends Expr
+```
+
+This form of extensibility can be excluded by declaring the base class
+`Expr` `sealed`; in this case, all classes that
+directly extend `Expr` must be in the same source file as
+`Expr`.
+
+
+### Traits
+
+```ebnf
+TmplDef          ::=  `trait' TraitDef
+TraitDef         ::=  id [TypeParamClause] TraitTemplateOpt
+TraitTemplateOpt ::=  `extends' TraitTemplate | [[`extends'] TemplateBody]
+```
+
+A trait is a class that is meant to be added to some other class
+as a mixin. Unlike normal classes, traits cannot have
+constructor parameters. Furthermore, no constructor arguments are
+passed to the superclass of the trait. This is not necessary as traits are
+initialized after the superclass is initialized.
+
+Assume a trait $D$ defines some aspect of an instance $x$ of type $C$ (i.e. $D$ is a base class of $C$).
+Then the _actual supertype_ of $D$ in $x$ is the compound type consisting of all the
+base classes in $\mathcal{L}(C)$ that succeed $D$.  The actual supertype gives
+the context for resolving a [`super` reference](06-expressions.html#this-and-super) in a trait.
+Note that the actual supertype depends on the type to which the trait is added in a mixin composition;
+it is not statically known at the time the trait is defined.
+
+If $D$ is not a trait, then its actual supertype is simply its
+least proper supertype (which is statically known).
+
+### Example
+The following trait defines the property
+of being comparable to objects of some type. It contains an abstract
+method `<` and default implementations of the other
+comparison operators `<=`, `>`, and
+`>=`.
+
+```scala
+trait Comparable[T <: Comparable[T]] { self: T =>
+  def < (that: T): Boolean
+  def <=(that: T): Boolean = this < that || this == that
+  def > (that: T): Boolean = that < this
+  def >=(that: T): Boolean = that <= this
+}
+```
+
+###### Example
+Consider an abstract class `Table` that implements maps
+from a type of keys `A` to a type of values `B`. The class
+has a method `set` to enter a new key / value pair into the table,
+and a method `get` that returns an optional value matching a
+given key. Finally, there is a method `apply` which is like
+`get`, except that it returns a given default value if the table
+is undefined for the given key. This class is implemented as follows.
+
+```scala
+abstract class Table[A, B](defaultValue: B) {
+  def get(key: A): Option[B]
+  def set(key: A, value: B)
+  def apply(key: A) = get(key) match {
+    case Some(value) => value
+    case None => defaultValue
+  }
+}
+```
+
+Here is a concrete implementation of the `Table` class.
+
+```scala
+class ListTable[A, B](defaultValue: B) extends Table[A, B](defaultValue) {
+  private var elems: List[(A, B)]
+  def get(key: A) = elems.find(._1.==(key)).map(._2)
+  def set(key: A, value: B) = { elems = (key, value) :: elems }
+}
+```
+
+Here is a trait that prevents concurrent access to the
+`get` and `set` operations of its parent class:
+
+```scala
+trait SynchronizedTable[A, B] extends Table[A, B] {
+  abstract override def get(key: A): B =
+    synchronized { super.get(key) }
+  abstract override def set((key: A, value: B) =
+    synchronized { super.set(key, value) }
+}
+```
+
+Note that `SynchronizedTable` does not pass an argument to
+its superclass, `Table`, even  though `Table` is defined with a
+formal parameter. Note also that the `super` calls
+in `SynchronizedTable`'s `get` and `set` methods
+statically refer to abstract methods in class `Table`. This is
+legal, as long as the calling method is labeled
+[`abstract override`](#modifiers).
+
+Finally, the following mixin composition creates a synchronized list
+table with strings as keys and integers as values and with a default
+value `0`:
+
+```scala
+object MyTable extends ListTable[String, Int](0) with SynchronizedTable
+```
+
+The object `MyTable` inherits its `get` and `set`
+method from `SynchronizedTable`.  The `super` calls in these
+methods are re-bound to refer to the corresponding implementations in
+`ListTable`, which is the actual supertype of `SynchronizedTable`
+in `MyTable`.
+
+
+## Object Definitions
+
+```ebnf
+ObjectDef       ::=  id ClassTemplate
+```
+
+An object definition defines a single object of a new class. Its 
+most general form is
+`object $m$ extends $t$`. Here,
+$m$ is the name of the object to be defined, and 
+$t$ is a [template](#templates) of the form
+
+```scala
+$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }
+```
+
+which defines the base classes, behavior and initial state of $m$.
+The extends clause `extends $sc$ with $mt_1$ with $\ldots$ with $mt_n$` 
+can be omitted, in which case
+`extends scala.AnyRef` is assumed.  The class body
+`{ $\mathit{stats}$ }` may also be omitted, in which case the empty body
+`{}` is assumed.
+
+The object definition defines a single object (or: _module_)
+conforming to the template $t$.  It is roughly equivalent to the
+following definition of a lazy value:
+
+```scala
+lazy val $m$ = new $sc$ with $mt_1$ with $\ldots$ with $mt_n$ { this: $m.type$ => $\mathit{stats}$ }
+```
+
+Note that the value defined by an object definition is instantiated
+lazily.  The `new $m$\$cls` constructor is evaluated
+not at the point of the object definition, but is instead evaluated
+the first time $m$ is dereferenced during execution of the program
+(which might be never at all). An attempt to dereference $m$ again in
+the course of evaluation of the constructor leads to a infinite loop
+or run-time error.  
+Other threads trying to dereference $m$ while the
+constructor is being evaluated block until evaluation is complete.
+
+The expansion given above is not accurate for top-level objects. It
+cannot be because variable and method definition cannot appear on the
+top-level outside of a [package object](09-top-level-definitions.html#package-objects). Instead,
+top-level objects are translated to static fields.
+
+###### Example
+Classes in Scala do not have static members; however, an equivalent
+effect can be achieved by an accompanying object definition
+E.g.
+
+```scala
+abstract class Point {
+  val x: Double
+  val y: Double
+  def isOrigin = (x == 0.0 && y == 0.0)
+}
+object Point {
+  val origin = new Point() { val x = 0.0; val y = 0.0 }
+}
+```
+
+This defines a class `Point` and an object `Point` which
+contains `origin` as a member.  Note that the double use of the
+name `Point` is legal, since the class definition defines the
+name `Point` in the type name space, whereas the object
+definition defines a name in the term namespace.
+
+This technique is applied by the Scala compiler when interpreting a
+Java class with static members. Such a class $C$ is conceptually seen
+as a pair of a Scala class that contains all instance members of $C$
+and a Scala object that contains all static members of $C$.
+
+Generally, a _companion module_ of a class is an object which has
+the same name as the class and is defined in the same scope and
+compilation unit. Conversely, the class is called the _companion class_
+of the module.
+
+Very much like a concrete class definition, an object definition may
+still contain declarations of abstract type members, but not of
+abstract term members.
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
new file mode 100644
index 0000000..b2144aa
--- /dev/null
+++ b/spec/06-expressions.md
@@ -0,0 +1,1814 @@
+---
+title: Expressions
+layout: default
+chapter: 6
+---
+
+# Expressions
+
+```ebnf
+Expr         ::=  (Bindings | id | `_') `=>' Expr
+               |  Expr1
+Expr1        ::=  `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
+               |  `while' `(' Expr `)' {nl} Expr
+               |  `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr]
+               |  `do' Expr [semi] `while' `(' Expr ')'
+               |  `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr
+               |  `throw' Expr
+               |  `return' [Expr]
+               |  [SimpleExpr `.'] id `=' Expr
+               |  SimpleExpr1 ArgumentExprs `=' Expr
+               |  PostfixExpr
+               |  PostfixExpr Ascription
+               |  PostfixExpr `match' `{' CaseClauses `}'
+PostfixExpr  ::=  InfixExpr [id [nl]]
+InfixExpr    ::=  PrefixExpr
+               |  InfixExpr id [nl] InfixExpr
+PrefixExpr   ::=  [`-' | `+' | `~' | `!'] SimpleExpr
+SimpleExpr   ::=  `new' (ClassTemplate | TemplateBody)
+               |  BlockExpr
+               |  SimpleExpr1 [`_']
+SimpleExpr1  ::=  Literal
+               |  Path
+               |  `_'
+               |  `(' [Exprs] `)'
+               |  SimpleExpr `.' id s
+               |  SimpleExpr TypeArgs
+               |  SimpleExpr1 ArgumentExprs
+               |  XmlExpr
+Exprs        ::=  Expr {`,' Expr}
+BlockExpr    ::=  ‘{’ CaseClauses ‘}’
+               |  ‘{’ Block ‘}’
+Block        ::=  BlockStat {semi BlockStat} [ResultExpr]
+ResultExpr   ::=  Expr1
+               |  (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block
+Ascription   ::=  `:' InfixType
+               |  `:' Annotation {Annotation}
+               |  `:' `_' `*'
+```
+
+Expressions are composed of operators and operands. Expression forms are
+discussed subsequently in decreasing order of precedence. 
+
+## Expression Typing
+
+The typing of expressions is often relative to some _expected type_  (which might be undefined). When we write "expression $e$ is expected to conform to type $T$", we mean:
+  1. the expected type of $e$ is $T$, and
+  2. the type of expression $e$ must conform to $T$.
+
+The following skolemization rule is applied universally for every
+expression: If the type of an expression would be an existential type
+$T$, then the type of the expression is assumed instead to be a
+[skolemization](03-types.html#existential-types) of $T$.
+
+Skolemization is reversed by type packing. Assume an expression $e$ of
+type $T$ and let $t_1[\mathit{tps}_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}_n] >: L_n <: U_n$ be
+all the type variables created by skolemization of some part of $e$ which are free in $T$.
+Then the _packed type_ of $e$ is
+
+```scala
+$T$ forSome { type $t_1[\mathit{tps}_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\mathit{tps}_n] >: L_n <: U_n$ }.
+```
+
+
+## Literals
+
+```ebnf
+SimpleExpr    ::=  Literal
+```
+
+Typing of literals is as described [here](01-lexical-syntax.html#literals); their
+evaluation is immediate.
+
+
+## The _Null_ Value
+
+The `null` value is of type `scala.Null`, and is thus
+compatible with every reference type.  It denotes a reference value
+which refers to a special “`null`” object. This object
+implements methods in class `scala.AnyRef` as follows:
+
+- `eq($x\,$)` and `==($x\,$)` return `true` iff the
+  argument $x$ is also the "null" object.
+- `ne($x\,$)` and `!=($x\,$)` return true iff the 
+  argument x is not also the "null" object.
+- `isInstanceOf[$T\,$]` always returns `false`.
+- `asInstanceOf[$T\,$]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type $T$.
+- `##` returns ``0``.
+
+A reference to any other member of the "null" object causes a
+`NullPointerException` to be thrown. 
+
+
+## Designators
+
+```ebnf
+SimpleExpr  ::=  Path
+              |  SimpleExpr `.' id
+```
+
+A designator refers to a named term. It can be a _simple name_ or
+a _selection_. 
+
+A simple name $x$ refers to a value as specified 
+[here](02-identifiers-names-and-scopes.html#identifiers-names-and-scopes).
+If $x$ is bound by a definition or declaration in an enclosing class
+or object $C$, it is taken to be equivalent to the selection
+`$C$.this.$x$` where $C$ is taken to refer to the class containing $x$
+even if the type name $C$ is [shadowed](02-identifiers-names-and-scopes.html#identifiers-names-and-scopes) at the
+occurrence of $x$.
+
+If $r$ is a [stable identifier](03-types.html#paths) of type $T$, the selection $r.x$ refers
+statically to a term member $m$ of $r$ that is identified in $T$ by
+the name $x$. 
+
+<!-- There might be several such members, in which
+case overloading resolution (\sref{overloading-resolution}) is applied
+to pick a unique one.}  -->
+
+For other expressions $e$, $e.x$ is typed as
+if it was `{ val $y$ = $e$; $y$.$x$ }`, for some fresh name
+$y$.  
+
+The expected type of a designator's prefix is always undefined.  The
+type of a designator is the type $T$ of the entity it refers to, with
+the following exception: The type of a [path](03-types.html#paths) $p$
+which occurs in a context where a [stable type](03-types.html#singleton-types)
+is required is the singleton type `$p$.type`.
+
+The contexts where a stable type is required are those that satisfy
+one of the following conditions:
+
+1. The path $p$ occurs as the prefix of a selection and it does not
+designate a constant, or
+1. The expected type $\mathit{pt}$ is a stable type, or
+1. The expected type $\mathit{pt}$ is an abstract type with a stable type as lower
+   bound, and the type $T$ of the entity referred to by $p$ does not
+   conform to $\mathit{pt}$, or
+1. The path $p$ designates a module.
+
+
+The selection $e.x$ is evaluated by first evaluating the qualifier
+expression $e$, which yields an object $r$, say. The selection's
+result is then the member of $r$ that is either defined by $m$ or defined
+by a definition overriding $m$. 
+If that member has a type which
+conforms to `scala.NotNull`, the member's value must be initialized
+to a value different from `null`, otherwise a `scala.UnitializedError`
+is thrown.
+ 
+
+## This and Super
+
+```ebnf
+SimpleExpr  ::=  [id `.'] `this'
+              |  [id '.'] `super' [ClassQualifier] `.' id
+```
+
+The expression `this` can appear in the statement part of a
+template or compound type. It stands for the object being defined by
+the innermost template or compound type enclosing the reference. If
+this is a compound type, the type of `this` is that compound type.
+If it is a template of a
+class or object definition with simple name $C$, the type of this
+is the same as the type of `$C$.this`.
+
+The expression `$C$.this` is legal in the statement part of an
+enclosing class or object definition with simple name $C$. It
+stands for the object being defined by the innermost such definition.
+If the expression's expected type is a stable type, or
+`$C$.this` occurs as the prefix of a selection, its type is
+`$C$.this.type`, otherwise it is the self type of class $C$.
+
+A reference `super.$m$` refers statically to a method or type $m$
+in the least proper supertype of the innermost template containing the
+reference.  It evaluates to the member $m'$ in the actual supertype of
+that template which is equal to $m$ or which overrides $m$.  The
+statically referenced member $m$ must be a type or a
+method.  <!-- explanation: so that we need not create several fields for overriding vals -->
+
+If it is
+a method, it must be concrete, or the template
+containing the reference must have a member $m'$ which overrides $m$
+and which is labeled `abstract override`.  
+
+A reference `$C$.super.$m$` refers statically to a method
+or type $m$ in the least proper supertype of the innermost enclosing class or
+object definition named $C$ which encloses the reference. It evaluates
+to the member $m'$ in the actual supertype of that class or object
+which is equal to $m$ or which overrides $m$. The
+statically referenced member $m$ must be a type or a
+method.  If the statically
+referenced member $m$ is a method, it must be concrete, or the innermost enclosing
+class or object definition named $C$ must have a member $m'$ which
+overrides $m$ and which is labeled `abstract override`.
+
+The `super` prefix may be followed by a trait qualifier
+`[$T\,$]`, as in `$C$.super[$T\,$].$x$`. This is
+called a _static super reference_.  In this case, the reference is
+to the type or method of $x$ in the parent trait of $C$ whose simple
+name is $T$. That member must be uniquely defined. If it is a method,
+it must be concrete.
+
+### Example
+Consider the following class definitions
+
+```scala
+class Root { def x = "Root" }
+class A extends Root { override def x = "A" ; def superA = super.x }
+trait B extends Root { override def x = "B" ; def superB = super.x }
+class C extends Root with B {
+  override def x = "C" ; def superC = super.x
+}
+class D extends A with B {
+  override def x = "D" ; def superD = super.x
+}
+```
+
+The linearization of class `C` is `{C, B, Root}` and
+the linearization of class `D` is `{D, B, A, Root}`.
+Then we have:
+
+```scala
+(new A).superA == "Root",
+                          (new C).superB = "Root", (new C).superC = "B",
+(new D).superA == "Root", (new D).superB = "A",    (new D).superD = "B",
+```
+
+Note that the `superB` function returns different results
+depending on whether `B` is mixed in with class `Root` or `A`.
+
+
+## Function Applications
+
+```ebnf
+SimpleExpr    ::=  SimpleExpr1 ArgumentExprs
+ArgumentExprs ::=  `(' [Exprs] `)'
+                |  `(' [Exprs `,'] PostfixExpr `:' `_' `*' ')'
+                |  [nl] BlockExpr
+Exprs         ::=  Expr {`,' Expr}
+```
+
+An application `$f$($e_1 , \ldots , e_m$)` applies the
+function $f$ to the argument expressions $e_1 , \ldots , e_m$. If $f$
+has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, the type of
+each argument expression $e_i$ is typed with the
+corresponding parameter type $T_i$ as expected type. Let $S_i$ be type
+type of argument $e_i$ $(i = 1 , \ldots , m)$. If $f$ is a polymorphic method,
+[local type inference](#local-type-inference) is used to determine
+type arguments for $f$. If $f$ has some value type, the application is taken to
+be equivalent to `$f$.apply($e_1 , \ldots , e_m$)`,
+i.e. the application of an `apply` method defined by $f$.
+
+The function $f$ must be _applicable_ to its arguments $e_1
+, \ldots , e_n$ of types $S_1 , \ldots , S_n$.
+
+If $f$ has a method type $(p_1:T_1 , \ldots , p_n:T_n)U$
+we say that an argument expression $e_i$ is a _named_ argument if
+it has the form $x_i=e'_i$ and $x_i$ is one of the parameter names
+$p_1 , \ldots , p_n$. The function $f$ is applicable if all of the following conditions
+hold:
+
+- For every named argument $x_i=e'_i$ the type $S_i$
+  is compatible with the parameter type $T_j$ whose name $p_j$ matches $x_i$.
+- For every positional argument $e_i$ the type $S_i$
+is compatible with $T_i$.
+- If the expected type is defined, the result type $U$ is
+  compatible to it.
+
+If $f$ is a polymorphic method it is applicable if 
+[local type inference](#local-type-inference) can
+determine type arguments so that the instantiated method is applicable. If
+$f$ has some value type it is applicable if it has a method member named
+`apply` which is applicable.
+
+
+Evaluation of `$f$($e_1 , \ldots , e_n$)` usually entails evaluation of
+$f$ and $e_1 , \ldots , e_n$ in that order. Each argument expression
+is converted to the type of its corresponding formal parameter.  After
+that, the application is rewritten to the function's right hand side,
+with actual arguments substituted for formal parameters.  The result
+of evaluating the rewritten right-hand side is finally converted to
+the function's declared result type, if one is given.
+
+The case of a formal parameter with a parameterless
+method type `=>$T$` is treated specially. In this case, the
+corresponding actual argument expression $e$ is not evaluated before the
+application. Instead, every use of the formal parameter on the
+right-hand side of the rewrite rule entails a re-evaluation of $e$. 
+In other words, the evaluation order for
+`=>`-parameters is _call-by-name_ whereas the evaluation
+order for normal parameters is _call-by-value_.
+Furthermore, it is required that $e$'s [packed type](#expression-typing)
+conforms to the parameter type $T$.
+The behavior of by-name parameters is preserved if the application is
+transformed into a block due to named or default arguments. In this case,
+the local value for that parameter has the form `val $y_i$ = () => $e$`
+and the argument passed to the function is `$y_i$()`.
+
+The last argument in an application may be marked as a sequence
+argument, e.g. `$e$: _*`. Such an argument must correspond
+to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type
+`$S$*` and it must be the only argument matching this
+parameter (i.e. the number of formal parameters and actual arguments
+must be the same). Furthermore, the type of $e$ must conform to
+`scala.Seq[$T$]`, for some type $T$ which conforms to
+$S$. In this case, the argument list is transformed by replacing the
+sequence $e$ with its elements. When the application uses named
+arguments, the vararg parameter has to be specified exactly once.
+
+A function application usually allocates a new frame on the program's
+run-time stack. However, if a local function or a final method calls
+itself as its last action, the call is executed using the stack-frame
+of the caller.
+
+###### Example
+Assume the following function which computes the sum of a
+variable number of arguments:
+
+```scala
+def sum(xs: Int*) = (0 /: xs) ((x, y) => x + y)
+```
+
+Then
+
+```scala
+sum(1, 2, 3, 4)
+sum(List(1, 2, 3, 4): _*)
+```
+
+both yield `10` as result. On the other hand,
+
+```scala
+sum(List(1, 2, 3, 4))
+```
+
+would not typecheck.
+
+
+### Named and Default Arguments
+
+If an application might uses named arguments $p = e$ or default
+arguments, the following conditions must hold.
+
+- For every named argument $p_i = e_i$ which appears left of a positional argument
+  in the argument list $e_1 \ldots e_m$, the argument position $i$ coincides with
+  the position of parameter $p_i$ in the parameter list of the applied function.
+- The names $x_i$ of all named arguments are pairwise distinct and no named
+  argument defines a parameter which is already specified by a
+  positional argument.
+- Every formal parameter $p_j:T_j$ which is not specified by either a positional
+  or a named argument has a default argument.
+
+
+If the application uses named or default
+arguments the following transformation is applied to convert it into
+an application without named or default arguments. 
+
+If the function $f$
+has the form `$p.m$[$\mathit{targs}$]` it is transformed into the
+block
+
+```scala
+{ val q = $p$
+  q.$m$[$\mathit{targs}$]
+}
+```
+
+If the function $f$ is itself an application expression the transformation
+is applied recursively on $f$. The result of transforming $f$ is a block of
+the form
+
+```scala
+{ val q = $p$
+  val $x_1$ = expr$_1$
+  $\ldots$
+  val $x_k$ = expr$_k$
+  q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$)
+}
+```
+
+where every argument in $(\mathit{args}_1) , \ldots , (\mathit{args}_l)$ is a reference to
+one of the values $x_1 , \ldots , x_k$. To integrate the current application
+into the block, first a value definition using a fresh name $y_i$ is created
+for every argument in $e_1 , \ldots , e_m$, which is initialised to $e_i$ for
+positional arguments and to $e'_i$ for named arguments of the form
+`$x_i=e'_i$`. Then, for every parameter which is not specified
+by the argument list, a value definition using a fresh name $z_i$ is created,
+which is initialized using the method computing the 
+[default argument](04-basic-declarations-and-definitions.html#function-declarations-and-definitions) of
+this parameter.
+
+Let $\mathit{args}$ be a permutation of the generated names $y_i$ and $z_i$ such such
+that the position of each name matches the position of its corresponding
+parameter in the method type `($p_1:T_1 , \ldots , p_n:T_n$)$U$`.
+The final result of the transformation is a block of the form
+
+```scala
+{ val q = $p$
+  val $x_1$ = expr$_1$
+  $\ldots$
+  val $x_l$ = expr$_k$
+  val $y_1$ = $e_1$
+  $\ldots$
+  val $y_m$ = $e_m$
+  val $z_1$ = $q.m\$default\$i[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$
+  $\ldots$
+  val $z_d$ = $q.m\$default\$j[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$
+  q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$)($\mathit{args}$)
+}
+```
+
+
+## Method Values
+
+```ebnf
+SimpleExpr    ::=  SimpleExpr1 `_'
+```
+
+The expression `$e$ _` is well-formed if $e$ is of method
+type or if $e$ is a call-by-name parameter.  If $e$ is a method with
+parameters, `$e$ _` represents $e$ converted to a function
+type by [eta expansion](#eta-expansion). If $e$ is a
+parameterless method or call-by-name parameter of type 
+`=>$T$`, `$e$ _` represents the function of type
+`() => $T$`, which evaluates $e$ when it is applied to the empty
+parameterlist `()`.
+
+###### Example
+The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion) on the right.
+
+| placeholder syntax            | eta-expansion                                                               |
+|------------------------------ | ----------------------------------------------------------------------------|
+|`math.sin _`                   | `x => math.sin(x)`                                                          |
+|`math.pow _`                   | `(x1, x2) => math.pow(x1, x2)`                                              |
+|`val vs = 1 to 9; vs.fold _`   | `(z) => (op) => vs.fold(z)(op)`                                             |
+|`(1 to 9).fold(z)_`            | `{ val eta1 = z; val eta2 = 1 to 9; op => eta2.fold(eta1)(op) }`            |
+|`Some(1).fold(??? : Int)_`     | `{ val eta1 = () => ???; val eta2 = Some(1); op => eta2.fold(eta1())(op) }` |
+
+
+Note that a space is necessary between a method name and the trailing underscore
+because otherwise the underscore would be considered part of the name.
+
+
+
+## Type Applications
+
+```ebnf
+SimpleExpr    ::=  SimpleExpr TypeArgs
+```
+
+A type application `$e$[$T_1 , \ldots , T_n$]` instantiates
+a polymorphic value $e$ of type 
+`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$` 
+with argument types
+`$T_1 , \ldots , T_n$`.  Every argument type $T_i$ must obey
+the corresponding bounds $L_i$ and $U_i$.  That is, for each $i = 1
+, \ldots , n$, we must have $\sigma L_i <: T_i <: \sigma
+U_i$, where $\sigma$ is the substitution $[a_1 := T_1 , \ldots , a_n
+:= T_n]$.  The type of the application is $\sigma S$.
+
+If the function part $e$ is of some value type, the type application
+is taken to be equivalent to 
+`$e$.apply[$T_1 , \ldots ,$ T$_n$]`, i.e. the application of an `apply` method defined by
+$e$.
+
+Type applications can be omitted if 
+[local type inference](#local-type-inference) can infer best type parameters 
+for a polymorphic functions from the types of the actual function arguments
+and the expected result type.
+
+
+## Tuples
+
+```ebnf
+SimpleExpr   ::=  `(' [Exprs] `)'
+```
+
+A tuple expression `($e_1 , \ldots , e_n$)` is an alias
+for the class instance creation 
+`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$.  
+The empty tuple
+`()` is the unique value of type `scala.Unit`.
+
+
+## Instance Creation Expressions
+
+```ebnf
+SimpleExpr     ::=  `new' (ClassTemplate | TemplateBody)
+```
+
+A simple instance creation expression is of the form 
+`new $c$` 
+where $c$ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let $T$ be
+the type of $c$. Then $T$ must
+denote a (a type instance of) a non-abstract subclass of
+`scala.AnyRef`. Furthermore, the _concrete self type_ of the
+expression must conform to the [self type](05-classes-and-objects.html#templates) of the class denoted by
+$T$. The concrete self type is normally
+$T$, except if the expression `new $c$` appears as the
+right hand side of a value definition
+
+```scala
+val $x$: $S$ = new $c$
+```
+
+(where the type annotation `: $S$` may be missing).
+In the latter case, the concrete self type of the expression is the
+compound type `$T$ with $x$.type`.
+
+The expression is evaluated by creating a fresh
+object of type $T$ which is is initialized by evaluating $c$. The
+type of the expression is $T$.
+
+A general instance creation expression is of the form 
+`new $t$` for some [class template](05-classes-and-objects.html#templates) $t$.
+Such an expression is equivalent to the block
+
+```scala
+{ class $a$ extends $t$; new $a$ }
+```
+
+where $a$ is a fresh name of an _anonymous class_ which is
+inaccessible to user programs.
+
+There is also a shorthand form for creating values of structural
+types: If `{$D$}` is a class body, then 
+`new {$D$}` is equivalent to the general instance creation expression
+`new AnyRef{$D$}`.
+
+###### Example
+Consider the following structural instance creation expression:
+
+```scala
+new { def getName() = "aaron" }
+```
+
+This is a shorthand for the general instance creation expression
+
+```scala
+new AnyRef{ def getName() = "aaron" }
+```
+
+The latter is in turn a shorthand for the block
+
+```scala
+{ class anon\$X extends AnyRef{ def getName() = "aaron" }; new anon\$X }
+```
+
+where `anon\$X` is some freshly created name.
+
+
+## Blocks
+
+```ebnf
+BlockExpr  ::=  ‘{’ CaseClauses ‘}’
+             |  ‘{’ Block ‘}’
+Block      ::=  BlockStat {semi BlockStat} [ResultExpr]
+```
+
+A block expression `{$s_1$; $\ldots$; $s_n$; $e\,$}` is
+constructed from a sequence of block statements $s_1 , \ldots , s_n$
+and a final expression $e$.  The statement sequence may not contain
+two definitions or declarations that bind the same name in the same
+namespace.  The final expression can be omitted, in which
+case the unit value `()` is assumed.
+
+
+The expected type of the final expression $e$ is the expected
+type of the block. The expected type of all preceding statements is
+undefined.
+
+The type of a block `$s_1$; $\ldots$; $s_n$; $e$` is
+`$T$ forSome {$\,Q\,$}`, where $T$ is the type of $e$ and $Q$ 
+contains [existential clauses](03-types.html#existential-types)
+for every value or type name which is free in $T$ 
+and which is defined locally in one of the statements $s_1 , \ldots , s_n$.
+We say the existential clause _binds_ the occurrence of the value or type name.
+Specifically, 
+
+- A locally defined type definition  `type$\;t = T$`
+  is bound by the existential clause `type$\;t >: T <: T$`.
+  It is an error if $t$ carries type parameters. 
+- A locally defined value definition `val$\;x: T = e$` is
+  bound by the existential clause `val$\;x: T$`.
+- A locally defined class definition `class$\;c$ extends$\;t$`
+  is bound by the existential clause `type$\;c <: T$` where
+  $T$ is the least class type or refinement type which is a proper
+  supertype of the type $c$. It is an error if $c$ carries type parameters. 
+- A locally defined object definition `object$\;x\;$extends$\;t$`
+  is bound by the existential clause `val$\;x: T$` where
+  $T$ is the least class type or refinement type which is a proper supertype of the type 
+  `$x$.type`.
+
+Evaluation of the block entails evaluation of its
+statement sequence, followed by an evaluation of the final expression
+$e$, which defines the result of the block.
+
+###### Example
+Assuming a class `Ref[T](x: T)`, the block
+
+```scala
+{ class C extends B {$\ldots$} ; new Ref(new C) }
+```
+
+has the type `Ref[_1] forSome { type _1 <: B }`.
+The block
+
+```scala
+{ class C extends B {$\ldots$} ; new C }
+```
+
+simply has type `B`, because with the rules [here](03-types.html#simplification-rules)
+the existentially quantified type
+`_1 forSome { type _1 <: B }` can be simplified to `B`.
+
+
+## Prefix, Infix, and Postfix Operations
+
+```ebnf
+PostfixExpr     ::=  InfixExpr [id [nl]]
+InfixExpr       ::=  PrefixExpr
+                  |  InfixExpr id [nl] InfixExpr
+PrefixExpr      ::=  [`-' | `+' | `!' | `~'] SimpleExpr 
+```
+
+Expressions can be constructed from operands and operators. 
+
+
+### Prefix Operations
+
+A prefix operation $\mathit{op};e$ consists of a prefix operator $\mathit{op}$, which
+must be one of the identifiers ‘`+`’, ‘`-`’,
+‘`!`’ or ‘`~`’. The expression $\mathit{op};e$ is
+equivalent to the postfix method application
+`e.unary_$\mathit{op}$`.
+
+<!-- TODO: Generalize to arbitrary operators -->
+
+Prefix operators are different from normal function applications in
+that their operand expression need not be atomic. For instance, the
+input sequence `-sin(x)` is read as `-(sin(x))`, whereas the
+function application `negate sin(x)` would be parsed as the
+application of the infix operator `sin` to the operands
+`negate` and `(x)`.
+
+### Postfix Operations
+
+A postfix operator can be an arbitrary identifier. The postfix
+operation $e;\mathit{op}$ is interpreted as $e.\mathit{op}$. 
+
+### Infix Operations
+
+An infix operator can be an arbitrary identifier. Infix operators have
+precedence and associativity defined as follows:
+
+The _precedence_ of an infix operator is determined by the operator's first
+character. Characters are listed below in increasing order of
+precedence, with characters on the same line having the same precedence.
+
+```scala
+(all letters)
+|
+^
+&
+= !
+< >
+:
++ -
+* / %
+(all other special characters)
+```
+
+That is, operators starting with a letter have lowest precedence,
+followed by operators starting with ``|`', etc.
+
+There's one exception to this rule, which concerns
+[_assignment operators_](#assignment-operators).
+The precedence of an assigment operator is the same as the one
+of simple assignment `(=)`. That is, it is lower than the
+precedence of any other operator. 
+
+The _associativity_ of an operator is determined by the operator's
+last character.  Operators ending in a colon ``:`' are
+right-associative. All other operators are left-associative.
+
+Precedence and associativity of operators determine the grouping of
+parts of an expression as follows.
+
+- If there are several infix operations in an
+  expression, then operators with higher precedence bind more closely
+  than operators with lower precedence.
+- If there are consecutive infix
+  operations $e_0; \mathit{op}_1; e_1; \mathit{op}_2 \ldots \mathit{op}_n; e_n$ 
+  with operators $\mathit{op}_1 , \ldots , \mathit{op}_n$ of the same precedence, 
+  then all these operators must
+  have the same associativity. If all operators are left-associative,
+  the sequence is interpreted as
+  $(\ldots(e_0;\mathit{op}_1;e_1);\mathit{op}_2\ldots);\mathit{op}_n;e_n$. 
+  Otherwise, if all operators are right-associative, the
+  sequence is interpreted as
+  $e_0;\mathit{op}_1;(e_1;\mathit{op}_2;(\ldots \mathit{op}_n;e_n)\ldots)$.
+- Postfix operators always have lower precedence than infix
+  operators. E.g. $e_1;\mathit{op}_1;e_2;\mathit{op}_2$ is always equivalent to
+  $(e_1;\mathit{op}_1;e_2);\mathit{op}_2$.
+
+The right-hand operand of a left-associative operator may consist of
+several arguments enclosed in parentheses, e.g. $e;\mathit{op};(e_1,\ldots,e_n)$.
+This expression is then interpreted as $e.\mathit{op}(e_1,\ldots,e_n)$.
+
+A left-associative binary
+operation $e_1;\mathit{op};e_2$ is interpreted as $e_1.\mathit{op}(e_2)$. If $\mathit{op}$ is
+right-associative, the same operation is interpreted as
+`{ val $x$=$e_1$; $e_2$.$\mathit{op}$($x\,$) }`, where $x$ is a fresh
+name. 
+
+### Assignment Operators
+
+An assignment operator is an operator symbol (syntax category
+`op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character
+“`=`”, with the exception of operators for which one of 
+the following conditions holds:
+
+1. the operator also starts with an equals character, or
+1. the operator is one of `(<=)`, `(>=)`, `(!=)`.
+
+Assignment operators are treated specially in that they
+can be expanded to assignments if no other interpretation is valid.
+
+Let's consider an assignment operator such as `+=` in an infix
+operation `$l$ += $r$`, where $l$, $r$ are expressions.  
+This operation can be re-interpreted as an operation which corresponds 
+to the assignment
+
+```scala
+$l$ = $l$ + $r$
+```
+
+except that the operation's left-hand-side $l$ is evaluated only once.
+
+The re-interpretation occurs if the following two conditions are fulfilled.
+
+1. The left-hand-side $l$ does not have a member named
+   `+=`, and also cannot be converted by an 
+   [implicit conversion](#implicit-conversions)
+   to a value with a member named `+=`.
+1. The assignment `$l$ = $l$ + $r$` is type-correct.
+   In particular this implies that $l$ refers to a variable or object 
+   that can be assigned to, and that is convertible to a value with a member 
+   named `+`.
+
+
+## Typed Expressions
+
+```ebnf
+Expr1              ::=  PostfixExpr `:' CompoundType
+```
+
+The typed expression $e: T$ has type $T$. The type of
+expression $e$ is expected to conform to $T$. The result of
+the expression is the value of $e$ converted to type $T$.
+
+###### Example
+Here are examples of well-typed and ill-typed expressions.
+
+```scala
+1: Int               // legal, of type Int
+1: Long              // legal, of type Long
+// 1: string         // ***** illegal
+```
+
+
+## Annotated Expressions
+
+```ebnf
+Expr1              ::=  PostfixExpr `:' Annotation {Annotation} 
+```
+
+An annotated expression `$e$: @$a_1$ $\ldots$ @$a_n$`
+attaches [annotations](11-user-defined-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the
+expression $e$.
+
+
+## Assignments
+
+```ebnf
+Expr1        ::=  [SimpleExpr `.'] id `=' Expr
+               |  SimpleExpr1 ArgumentExprs `=' Expr
+```
+
+The interpretation of an assignment to a simple variable `$x$ = $e$`
+depends on the definition of $x$. If $x$ denotes a mutable
+variable, then the assignment changes the current value of $x$ to be
+the result of evaluating the expression $e$. The type of $e$ is
+expected to conform to the type of $x$. If $x$ is a parameterless
+function defined in some template, and the same template contains a
+setter function `$x$_=` as member, then the assignment
+`$x$ = $e$` is interpreted as the invocation
+`$x$_=($e\,$)` of that setter function.  Analogously, an
+assignment `$f.x$ = $e$` to a parameterless function $x$
+is interpreted as the invocation `$f.x$_=($e\,$)`.
+
+An assignment `$f$($\mathit{args}\,$) = $e$` with a function application to the
+left of the ‘`=`’ operator is interpreted as 
+`$f.$update($\mathit{args}$, $e\,$)`, i.e.
+the invocation of an `update` function defined by $f$.
+
+###### Example
+Here are some assignment expressions and their equivalent expansions.
+
+--------------------------    ---------------------
+`x.f = e`             x.f_=(e)
+`x.f() = e`           x.f.update(e)
+`x.f(i) = e`          x.f.update(i, e)
+`x.f(i, j) = e`       x.f.update(i, j, e)
+--------------------------    ---------------------
+
+### Example
+
+Here is the usual imperative code for matrix multiplication.
+
+```scala
+def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
+  val zss: Array[Array[Double]] = new Array(xss.length, yss(0).length)
+  var i = 0
+  while (i < xss.length) {
+    var j = 0
+    while (j < yss(0).length) {
+      var acc = 0.0
+      var k = 0
+      while (k < yss.length) {
+        acc = acc + xss(i)(k) * yss(k)(j)
+        k += 1
+      }
+      zss(i)(j) = acc
+      j += 1
+    }
+    i += 1
+  }
+  zss
+}
+```
+
+Desugaring the array accesses and assignments yields the following
+expanded version:
+
+```scala
+def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
+  val zss: Array[Array[Double]] = new Array(xss.length, yss.apply(0).length)
+  var i = 0
+  while (i < xss.length) {
+    var j = 0
+    while (j < yss.apply(0).length) {
+      var acc = 0.0
+      var k = 0
+      while (k < yss.length) {
+        acc = acc + xss.apply(i).apply(k) * yss.apply(k).apply(j)
+        k += 1
+      }
+      zss.apply(i).update(j, acc)
+      j += 1
+    }
+    i += 1
+  }
+  zss
+}
+```
+
+
+## Conditional Expressions
+
+```ebnf
+Expr1          ::=  `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
+```
+
+The conditional expression `if ($e_1$) $e_2$ else $e_3$` chooses
+one of the values of $e_2$ and $e_3$, depending on the
+value of $e_1$. The condition $e_1$ is expected to
+conform to type `Boolean`.  The then-part $e_2$ and the
+else-part $e_3$ are both expected to conform to the expected
+type of the conditional expression. The type of the conditional
+expression is the [weak least upper bound](03-types.html#weak-conformance)
+of the types of $e_2$ and
+$e_3$.  A semicolon preceding the `else` symbol of a
+conditional expression is ignored.
+
+The conditional expression is evaluated by evaluating first
+$e_1$. If this evaluates to `true`, the result of
+evaluating $e_2$ is returned, otherwise the result of
+evaluating $e_3$ is returned.
+
+A short form of the conditional expression eliminates the
+else-part. The conditional expression `if ($e_1$) $e_2$` is
+evaluated as if it was `if ($e_1$) $e_2$ else ()`.  
+
+## While Loop Expressions
+
+```ebnf
+Expr1          ::=  `while' `(' Expr ')' {nl} Expr
+```
+
+The while loop expression `while ($e_1$) $e_2$` is typed and
+evaluated as if it was an application of `whileLoop ($e_1$) ($e_2$)` where
+the hypothetical function `whileLoop` is defined as follows.
+
+```scala
+def whileLoop(cond: => Boolean)(body: => Unit): Unit  =
+  if (cond) { body ; whileLoop(cond)(body) } else {}
+```
+
+
+## Do Loop Expressions
+
+```ebnf
+Expr1          ::=  `do' Expr [semi] `while' `(' Expr ')'
+```
+
+The do loop expression `do $e_1$ while ($e_2$)` is typed and
+evaluated as if it was the expression `($e_1$ ; while ($e_2$) $e_1$)`.
+A semicolon preceding the `while` symbol of a do loop expression is ignored.
+
+
+## For Comprehensions and For Loops
+
+```ebnf
+Expr1          ::=  `for' (`(' Enumerators `)' | `{' Enumerators `}') 
+                       {nl} [`yield'] Expr
+Enumerators    ::=  Generator {semi Generator}
+Generator      ::=  Pattern1 `<-' Expr {[semi] Guard | semi Pattern1 `=' Expr}
+Guard          ::=  `if' PostfixExpr
+```
+
+A for loop `for ($\mathit{enums}\,$) $e$` executes expression $e$
+for each binding generated by the enumerators $\mathit{enums}$.  A for
+comprehension `for ($\mathit{enums}\,$) yield $e$` evaluates
+expression $e$ for each binding generated by the enumerators $\mathit{enums}$
+and collects the results. An enumerator sequence always starts with a
+generator; this can be followed by further generators, value
+definitions, or guards.  A _generator_ `$p$ <- $e$`
+produces bindings from an expression $e$ which is matched in some way
+against pattern $p$. A _value definition_ `$p$ = $e$` 
+binds the value name $p$ (or several names in a pattern $p$) to
+the result of evaluating the expression $e$.  A _guard_
+`if $e$` contains a boolean expression which restricts
+enumerated bindings. The precise meaning of generators and guards is
+defined by translation to invocations of four methods: `map`,
+`withFilter`, `flatMap`, and `foreach`. These methods can
+be implemented in different ways for different carrier types.
+
+The translation scheme is as follows.  In a first step, every
+generator `$p$ <- $e$`, where $p$ is not [irrefutable](08-pattern-matching.html#patterns)
+for the type of $e$ is replaced by
+
+```scala
+$p$ <- $e$.withFilter { case $p$ => true; case _ => false }
+```
+
+Then, the following rules are applied repeatedly until all
+comprehensions have been eliminated.
+
+  - A for comprehension 
+    `for ($p$ <- $e\,$) yield $e'$` 
+    is translated to
+    `$e$.map { case $p$ => $e'$ }`.
+  - A for loop
+    `for ($p$ <- $e\,$) $e'$` 
+    is translated to
+    `$e$.foreach { case $p$ => $e'$ }`.
+  - A for comprehension
+
+    ``` 
+    for ($p$ <- $e$; $p'$ <- $e'; \ldots$) yield $e''$
+    ```
+
+    where `$\ldots$` is a (possibly empty)
+    sequence of generators, definitions, or guards,
+    is translated to
+
+    ``` 
+    $e$.flatMap { case $p$ => for ($p'$ <- $e'; \ldots$) yield $e''$ }
+    ```
+
+  - A for loop
+
+    ``` 
+    for ($p$ <- $e$; $p'$ <- $e'; \ldots$) $e''$
+    ```
+
+    where `$\ldots$` is a (possibly empty)
+    sequence of generators, definitions, or guards,
+    is translated to
+
+    ``` 
+    $e$.foreach { case $p$ => for ($p'$ <- $e'; \ldots$) $e''$ }
+    ```
+
+  - A generator `$p$ <- $e$` followed by a guard
+    `if $g$` is translated to a single generator 
+    `$p$ <- $e$.withFilter(($x_1 , \ldots , x_n$) => $g\,$)` where
+    $x_1 , \ldots , x_n$ are the free variables of $p$.
+
+  - A generator `$p$ <- $e$` followed by a value definition 
+    `$p'$ = $e'$` is translated to the following generator of pairs of values, where
+    $x$ and $x'$ are fresh names:
+
+    ``` 
+    ($p$, $p'$) <- for ($x @ p$ <- $e$) yield { val $x' @ p'$ = $e'$; ($x$, $x'$) }
+    ```
+
+
+###### Example
+The following code produces all pairs of numbers between $1$ and $n-1$
+whose sums are prime.
+
+```scala
+for  { i <- 1 until n
+       j <- 1 until i
+       if isPrime(i+j)
+} yield (i, j)
+```
+
+The for comprehension is translated to:
+
+```scala
+(1 until n)
+  .flatMap {
+     case i => (1 until i)
+       .withFilter { j => isPrime(i+j) }
+       .map { case j => (i, j) } }
+```
+
+###### Example
+For comprehensions can be used to express vector
+and matrix algorithms concisely.
+For instance, here is a function to compute the transpose of a given matrix:
+
+<!-- see test/files/run/t0421.scala -->
+
+```scala
+def transpose[A](xss: Array[Array[A]]) = {
+  for (i <- Array.range(0, xss(0).length)) yield
+    for (xs <- xss) yield xs(i)
+}
+```
+
+Here is a function to compute the scalar product of two vectors:
+
+```scala
+def scalprod(xs: Array[Double], ys: Array[Double]) = {
+  var acc = 0.0
+  for ((x, y) <- xs zip ys) acc = acc + x * y
+  acc
+}
+```
+
+Finally, here is a function to compute the product of two matrices.
+Compare with the [imperative version](#example-imperative-matrix-multiplication).
+
+```scala
+def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
+  val ysst = transpose(yss)
+  for (xs <- xss) yield
+    for (yst <- ysst) yield
+      scalprod(xs, yst)
+}
+```
+
+The code above makes use of the fact that `map`, `flatMap`,
+`withFilter`, and `foreach` are defined for instances of class
+`scala.Array`.
+
+
+## Return Expressions
+
+```ebnf
+Expr1      ::=  `return' [Expr]
+```
+
+A return expression `return $e$` must occur inside the body of some
+enclosing named method or function. The innermost enclosing named
+method or function in a source program, $f$, must have an explicitly declared result type,
+and the type of $e$ must conform to it.  
+The return expression
+evaluates the expression $e$ and returns its value as the result of
+$f$. The evaluation of any statements or
+expressions following the return expression is omitted. The type of 
+a return expression is `scala.Nothing`.
+
+The expression $e$ may be omitted.  The return expression
+`return` is type-checked and evaluated as if it was `return ()`.
+
+An `apply` method which is generated by the compiler as an
+expansion of an anonymous function does not count as a named function
+in the source program, and therefore is never the target of a return
+expression.
+
+Returning from a nested anonymous function is implemented by throwing
+and catching a `scala.runtime.NonLocalReturnException`.  Any
+exception catches between the point of return and the enclosing
+methods might see the exception.  A key comparison makes sure that
+these exceptions are only caught by the method instance which is
+terminated by the return.
+
+If the return expression is itself part of an anonymous function, it
+is possible that the enclosing instance of $f$ has already returned
+before the return expression is executed. In that case, the thrown
+`scala.runtime.NonLocalReturnException` will not be caught,
+and will propagate up the call stack.
+
+
+## Throw Expressions
+
+```ebnf
+Expr1      ::=  `throw' Expr
+```
+
+A throw expression `throw $e$` evaluates the expression
+$e$. The type of this expression must conform to
+`Throwable`.  If $e$ evaluates to an exception
+reference, evaluation is aborted with the thrown exception. If $e$
+evaluates to `null`, evaluation is instead aborted with a
+`NullPointerException`. If there is an active
+[`try` expression](#try-expressions) which handles the thrown
+exception, evaluation resumes with the handler; otherwise the thread
+executing the `throw` is aborted.  The type of a throw expression
+is `scala.Nothing`.
+
+
+## Try Expressions
+
+```ebnf
+Expr1 ::=  `try' `{' Block `}' [`catch' `{' CaseClauses `}'] 
+           [`finally' Expr]
+```
+
+A try expression is of the form `try { $b$ } catch $h$`
+where the handler $h$ is a 
+[pattern matching anonymous function](#pattern-matching-anonymous-functions)
+
+```scala
+{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
+```
+
+This expression is evaluated by evaluating the block
+$b$.  If evaluation of $b$ does not cause an exception to be
+thrown, the result of $b$ is returned. Otherwise the 
+handler $h$ is applied to the thrown exception.  
+If the handler contains a case matching the thrown exception,
+the first such case is invoked. If the handler contains
+no case matching the thrown exception, the exception is 
+re-thrown. 
+
+Let $\mathit{pt}$ be the expected type of the try expression.  The block
+$b$ is expected to conform to $\mathit{pt}$.  The handler $h$
+is expected conform to type
+`scala.PartialFunction[scala.Throwable, $\mathit{pt}\,$]`.  The
+type of the try expression is the [weak least upper bound](03-types.html#weak-conformance)
+of the type of $b$
+and the result type of $h$.
+
+A try expression `try { $b$ } finally $e$` evaluates the block
+$b$.  If evaluation of $b$ does not cause an exception to be
+thrown, the expression $e$ is evaluated. If an exception is thrown
+during evaluation of $e$, the evaluation of the try expression is
+aborted with the thrown exception. If no exception is thrown during
+evaluation of $e$, the result of $b$ is returned as the
+result of the try expression. 
+
+If an exception is thrown during evaluation of $b$, the finally block
+$e$ is also evaluated. If another exception $e$ is thrown
+during evaluation of $e$, evaluation of the try expression is
+aborted with the thrown exception. If no exception is thrown during
+evaluation of $e$, the original exception thrown in $b$ is
+re-thrown once evaluation of $e$ has completed.  The block
+$b$ is expected to conform to the expected type of the try
+expression. The finally expression $e$ is expected to conform to
+type `Unit`.
+
+A try expression `try { $b$ } catch $e_1$ finally $e_2$` 
+is a shorthand
+for  `try { try { $b$ } catch $e_1$ } finally $e_2$`.
+
+
+## Anonymous Functions
+
+```ebnf
+Expr            ::=  (Bindings | [`implicit'] id | `_') `=>' Expr
+ResultExpr      ::=  (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block
+Bindings        ::=  `(' Binding {`,' Binding} `)'
+Binding         ::=  (id | `_') [`:' Type]
+```
+
+The anonymous function `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e` 
+maps parameters $x_i$ of types $T_i$ to a result given
+by expression $e$. The scope of each formal parameter
+$x_i$ is $e$. Formal parameters must have pairwise distinct names.
+
+If the expected type of the anonymous function is of the form
+`scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]`, the
+expected type of $e$ is $R$ and the type $T_i$ of any of the
+parameters $x_i$ can be omitted, in which
+case`$T_i$ = $S_i$` is assumed.
+If the expected type of the anonymous function is
+some other type, all formal parameter types must be explicitly given,
+and the expected type of $e$ is undefined. The type of the anonymous
+function
+is`scala.Function$n$[$S_1 , \ldots , S_n$, $T\,$]`,
+where $T$ is the [packed type](#expression-typing)
+of $e$. $T$ must be equivalent to a
+type which does not refer to any of the formal parameters $x_i$.
+
+The anonymous function is evaluated as the instance creation expression
+
+```scala
+new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] {
+  def apply($x_1$: $T_1 , \ldots , x_n$: $T_n$): $T$ = $e$
+}
+```
+
+In the case of a single untyped formal parameter, 
+`($x\,$) => $e$` 
+can be abbreviated to `$x$ => $e$`. If an
+anonymous function `($x$: $T\,$) => $e$` with a single
+typed parameter appears as the result expression of a block, it can be
+abbreviated to `$x$: $T$ => e`.
+
+A formal parameter may also be a wildcard represented by an underscore `_`. 
+In that case, a fresh name for the parameter is chosen arbitrarily.
+
+A named parameter of an anonymous function may be optionally preceded
+by an `implicit` modifier. In that case the parameter is
+labeled [`implicit`](07-implicit-parameters-and-views.html#implicit-parameters-and-views); however the
+parameter section itself does not count as an implicit parameter
+section in the sense defined [here](07-implicit-parameters-and-views.html#implicit-parameters). Hence, arguments to
+anonymous functions always have to be given explicitly.
+
+###### Example
+Examples of anonymous functions:
+
+```scala
+x => x                             // The identity function
+
+f => g => x => f(g(x))             // Curried function composition
+
+(x: Int,y: Int) => x + y           // A summation function
+
+() => { count += 1; count }        // The function which takes an
+                                   // empty parameter list $()$,
+                                   // increments a non-local variable
+                                   // `count' and returns the new value.
+
+_ => 5                             // The function that ignores its argument
+                                   // and always returns 5.
+```
+
+
+### Placeholder Syntax for Anonymous Functions
+
+```ebnf
+SimpleExpr1  ::=  `_'
+```
+
+An expression (of syntactic category `Expr`)
+may contain embedded underscore symbols `_` at places where identifiers
+are legal. Such an expression represents an anonymous function where subsequent
+occurrences of underscores denote successive parameters.
+
+Define an _underscore section_ to be an expression of the form
+`_:$T$` where $T$ is a type, or else of the form `_`,
+provided the underscore does not appear as the expression part of a
+type ascription `_:$T$`.
+
+An expression $e$ of syntactic category `Expr` _binds_ an underscore section
+$u$, if the following two conditions hold: (1) $e$ properly contains $u$, and
+(2) there is no other expression of syntactic category `Expr` 
+which is properly contained in $e$ and which itself properly contains $u$.
+
+If an expression $e$ binds underscore sections $u_1 , \ldots , u_n$, in this order, it is equivalent to 
+the anonymous function `($u'_1$, ... $u'_n$) => $e'$`
+where each $u_i'$ results from $u_i$ by replacing the underscore with a fresh identifier and
+$e'$ results from $e$ by replacing each underscore section $u_i$ by $u_i'$.
+
+###### Example
+The anonymous functions in the left column use placeholder
+syntax. Each of these is equivalent to the anonymous function on its right.
+
+| | |
+|---------------------------|----------------------------|
+|`_ + 1`                    | `x => x + 1`               |
+|`_ * _`                    | `(x1, x2) => x1 * x2`      |
+|`(_: Int) * 2`             | `(x: Int) => (x: Int) * 2` |
+|`if (_) x else y`          | `z => if (z) x else y`     |
+|`_.map(f)`                 | `x => x.map(f)`            |
+|`_.map(_ + 1)`             | `x => x.map(y => y + 1)`   |
+
+
+## Constant Expressions
+
+Constant expressions are expressions that the Scala compiler can evaluate to a constant.
+The definition of "constant expression" depends on the platform, but they
+include at least the expressions of the following forms:
+
+- A literal of a value class, such as an integer
+- A string literal
+- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object)
+- An element of an enumeration from the underlying platform
+- A literal array, of the form
+  `Array$(c_1 , \ldots , c_n)$`,
+  where all of the $c_i$'s are themselves constant expressions
+- An identifier defined by a 
+  [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions).
+
+
+## Statements
+
+```ebnf
+BlockStat    ::=  Import
+               |  {Annotation} [‘implicit’ | ‘lazy’] Def
+               |  {Annotation} {LocalModifier} TmplDef
+               |  Expr1
+               |
+TemplateStat ::=  Import
+               |  {Annotation} {Modifier} Def
+               |  {Annotation} {Modifier} Dcl
+               |  Expr
+               | 
+```
+
+Statements occur as parts of blocks and templates.  A statement can be
+an import, a definition or an expression, or it can be empty.
+Statements used in the template of a class definition can also be
+declarations.  An expression that is used as a statement can have an
+arbitrary value type. An expression statement $e$ is evaluated by
+evaluating $e$ and discarding the result of the evaluation. 
+
+<!-- Generalize to implicit coercion? -->
+
+Block statements may be definitions which bind local names in the
+block. The only modifier allowed in all block-local definitions is
+`implicit`. When prefixing a class or object definition,
+modifiers `abstract`, `final`, and `sealed` are also
+permitted.
+
+Evaluation of a statement sequence entails evaluation of the
+statements in the order they are written.
+
+
+## Implicit Conversions
+
+Implicit conversions can be applied to expressions whose type does not
+match their expected type, to qualifiers in selections, and to unapplied methods. The
+available implicit conversions are given in the next two sub-sections.
+
+We say, a type $T$ is _compatible_ to a type $U$ if $T$ weakly conforms
+to $U$ after applying [eta-expansion](#eta-expansion) and 
+[view applications](07-implicit-parameters-and-views.html#views).
+
+### Value Conversions
+
+The following five implicit conversions can be applied to an
+expression $e$ which has some value type $T$ and which is type-checked with
+some expected type $\mathit{pt}$.
+
+#### Overloading Resolution
+If an expression denotes several possible members of a class, 
+[overloading resolution](#overloading-resolution)
+is applied to pick a unique member.
+
+
+###### Type Instantiation
+An expression $e$ of polymorphic type
+
+```scala
+[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$
+```
+
+which does not appear as the function part of
+a type application is converted to a type instance of $T$
+by determining with [local type inference](#local-type-inference)
+instance types `$T_1 , \ldots , T_n$` 
+for the type variables `$a_1 , \ldots , a_n$` and
+implicitly embedding $e$ in the [type application](#type-applications)
+`$e$[$T_1 , \ldots , T_n$]`.
+
+###### Numeric Widening
+If $e$ has a primitive number type which [weakly conforms](03-types.html#weak-conformance)
+to the expected type, it is widened to
+the expected type using one of the numeric conversion methods
+`toShort`, `toChar`, `toInt`, `toLong`,
+`toFloat`, `toDouble` defined [here](12-the-scala-standard-library.html#numeric-value-types).
+
+###### Numeric Literal Narrowing
+If the expected type is `Byte`, `Short` or `Char`, and
+the expression $e$ is an integer literal fitting in the range of that
+type, it is converted to the same literal in that type.
+
+###### Value Discarding
+If $e$ has some value type and the expected type is `Unit`,
+$e$ is converted to the expected type by embedding it in the 
+term `{ $e$; () }`.
+
+###### View Application
+If none of the previous conversions applies, and $e$'s type
+does not conform to the expected type $\mathit{pt}$, it is attempted to convert
+$e$ to the expected type with a [view](07-implicit-parameters-and-views.html#views).
+
+###### Dynamic Member Selection
+If none of the previous conversions applies, and $e$ is a prefix
+of a selection $e.x$, and $e$'s type conforms to class `scala.Dynamic`,
+then the selection is rewritten according to the rules for 
+[dynamic member selection](#dynamic-member-selection).
+
+### Method Conversions
+
+The following four implicit conversions can be applied to methods
+which are not applied to some argument list.
+
+###### Evaluation
+A parameterless method $m$ of type `=> $T$` is always converted to
+type $T$ by evaluating the expression to which $m$ is bound.
+
+###### Implicit Application
+If the method takes only implicit parameters, implicit
+arguments are passed following the rules [here](07-implicit-parameters-and-views.html#implicit-parameters).
+
+###### Eta Expansion
+Otherwise, if the method is not a constructor,
+and the expected type $\mathit{pt}$ is a function type
+$(\mathit{Ts}') \Rightarrow T'$, [eta-expansion](#eta-expansion)
+is performed on the expression $e$.
+
+###### Empty Application
+Otherwise, if $e$ has method type $()T$, it is implicitly applied to the empty
+argument list, yielding $e()$.
+
+### Overloading Resolution
+
+If an identifier or selection $e$ references several members of a
+class, the context of the reference is used to identify a unique
+member.  The way this is done depends on whether or not $e$ is used as
+a function. Let $\mathscr{A}$ be the set of members referenced by $e$.
+
+Assume first that $e$ appears as a function in an application, as in
+`$e$($e_1 , \ldots , e_m$)`.  
+
+One first determines the set of functions that is potentially
+applicable based on the _shape_ of the arguments.
+
+The shape of an argument expression $e$, written  $\mathit{shape}(e)$, is
+a type that is defined as follows:
+
+- For a function expression `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$`:
+  `(Any $, \ldots ,$ Any) => $\mathit{shape}(b)$`, where `Any` occurs $n$ times
+  in the argument type.
+- For a named argument `$n$ = $e$`: $\mathit{shape}(e)$.
+- For all other expressions: `Nothing`.
+
+Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are 
+[_applicable_](#function-applications)
+to expressions $(e_1 , \ldots , e_n)$ of types
+$(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$.
+If there is precisely one
+alternative in $\mathscr{B}$, that alternative is chosen.
+
+Otherwise, let $S_1 , \ldots , S_m$ be the vector of types obtained by
+typing each argument with an undefined expected type.  For every
+member $m$ in $\mathscr{B}$ one determines whether it is 
+applicable to expressions ($e_1 , \ldots , e_m$) of types $S_1
+, \ldots , S_m$.
+It is an error if none of the members in $\mathscr{B}$ is applicable. If there is one
+single applicable alternative, that alternative is chosen. Otherwise, let $\mathscr{CC}$
+be the set of applicable alternatives which don't employ any default argument
+in the application to $e_1 , \ldots , e_m$. It is again an error if $\mathscr{CC}$ is empty.
+Otherwise, one chooses the _most specific_ alternative among the alternatives
+in $\mathscr{CC}$, according to the following definition of being "as specific as", and
+"more specific than":
+
+<!--
+question: given
+  def f(x: Int)
+  val f: { def apply(x: Int) }
+  f(1) // the value is chosen in our current implementation
+ why?
+  - method is as specific as value, because value is applicable to method`s argument types (item 1)
+  - value is as specific as method (item 3, any other type is always as specific..)
+ so the method is not more specific than the value.
+-->
+
+- A parameterized method $m$ of type `($p_1:T_1, \ldots , p_n:T_n$)$U$` is _as specific as_ some other
+  member $m'$ of type $S$ if $m'$ is applicable to arguments
+  `($p_1 , \ldots , p_n\,$)` of
+  types $T_1 , \ldots , T_n$.
+- A polymorphic method of type
+  `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
+  as specific as some other member of type $S$ if $T$ is as 
+  specific as $S$ under the assumption that for
+  $i = 1 , \ldots , n$ each $a_i$ is an abstract type name
+  bounded from below by $L_i$ and from above by $U_i$.
+- A member of any other type is always as specific as a parameterized method
+  or a polymorphic method.
+- Given two members of types $T$ and $U$ which are 
+  neither parameterized nor polymorphic method types, the member of type $T$ is as specific as
+  the member of type $U$ if the existential dual of $T$ conforms to the existential dual of $U$. 
+  Here, the existential dual of a polymorphic type 
+  `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
+  `$T$ forSome { type $a_1$ >: $L_1$ <: $U_1$ $, \ldots ,$ type $a_n$ >: $L_n$ <: $U_n$}`.
+  The existential dual of every other type is the type itself.
+
+The _relative weight_ of an alternative $A$ over an alternative $B$ is a
+number from 0 to 2, defined as the sum of
+
+- 1 if $A$ is as specific as $B$, 0 otherwise, and
+- 1 if $A$ is defined in a class or object which is derived
+  from the class or object defining $B$, 0 otherwise.
+
+A class or object $C$ is _derived_ from a class or object $D$ if one of
+the following holds:
+
+- $C$ is a subclass of $D$, or
+- $C$ is a companion object of a class derived from $D$, or
+- $D$ is a companion object of a class from which $C$ is derived.
+
+An alternative $A$ is _more specific_ than an alternative $B$ if
+the relative weight of $A$ over $B$ is greater than the relative
+weight of $B$ over $A$.
+
+It is an error if there is no alternative in $\mathscr{CC}$ which is more
+specific than all other alternatives in $\mathscr{CC}$.
+
+Assume next that $e$ appears as a function in a type application, as
+in `$e$[$\mathit{targs}\,$]`. Then all alternatives in
+$\mathscr{A}$ which take the same number of type parameters as there are type
+arguments in $\mathit{targs}$ are chosen. It is an error if no such alternative exists.
+If there are several such alternatives, overloading resolution is
+applied again to the whole expression `$e$[$\mathit{targs}\,$]`.  
+
+Assume finally that $e$ does not appear as a function in either
+an application or a type application. If an expected type is given,
+let $\mathscr{B}$ be the set of those alternatives in $\mathscr{A}$ which are
+[compatible](#implicit-conversions) to it. Otherwise, let $\mathscr{B}$ be the same 
+as $\mathscr{A}$.
+We choose in this case the most specific alternative among all
+alternatives in $\mathscr{B}$. It is an error if there is no 
+alternative in $\mathscr{B}$ which is more specific than all other
+alternatives in $\mathscr{B}$.
+
+###### Example
+Consider the following definitions:
+
+```scala
+class A extends B {}
+def f(x: B, y: B) = $\ldots$
+def f(x: A, y: B) = $\ldots$
+val a: A
+val b: B
+```
+
+Then the application `f(b, b)` refers to the first
+definition of $f$ whereas the application `f(a, a)`
+refers to the second.  Assume now we add a third overloaded definition
+
+```scala
+def f(x: B, y: A) = $\ldots$
+```
+
+Then the application `f(a, a)` is rejected for being ambiguous, since
+no most specific applicable signature exists.
+
+
+### Local Type Inference
+
+Local type inference infers type arguments to be passed to expressions
+of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1
+, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ and no explicit type parameters
+are given. 
+
+Local type inference converts this expression to a type
+application `$e$[$T_1 , \ldots , T_n$]`. The choice of the
+type arguments $T_1 , \ldots , T_n$ depends on the context in which
+the expression appears and on the expected type $\mathit{pt}$. 
+There are three cases.
+
+###### Case 1: Selections
+If the expression appears as the prefix of a selection with a name
+$x$, then type inference is _deferred_ to the whole expression
+$e.x$. That is, if $e.x$ has type $S$, it is now treated as having
+type [$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$S$,
+and local type inference is applied in turn to infer type arguments 
+for $a_1 , \ldots , a_n$, using the context in which $e.x$ appears.
+
+###### Case 2: Values
+If the expression $e$ appears as a value without being applied to
+value arguments, the type arguments are inferred by solving a
+constraint system which relates the expression's type $T$ with the
+expected type $\mathit{pt}$. Without loss of generality we can assume that
+$T$ is a value type; if it is a method type we apply 
+[eta-expansion](#eta-expansion) to convert it to a function type. Solving
+means finding a substitution $\sigma$ of types $T_i$ for the type
+parameters $a_i$ such that
+
+- None of inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
+- All type parameter bounds are respected, i.e.
+  $\sigma L_i <: \sigma a_i$ and $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$.
+- The expression's type conforms to the expected type, i.e.
+  $\sigma T <: \sigma \mathit{pt}$.
+
+It is a compile time error if no such substitution exists.  
+If several substitutions exist, local-type inference will choose for
+each type variable $a_i$ a minimal or maximal type $T_i$ of the
+solution space.  A _maximal_ type $T_i$ will be chosen if the type
+parameter $a_i$ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the
+type $T$ of the expression.  A _minimal_ type $T_i$ will be chosen
+in all other situations, i.e. if the variable appears covariantly,
+non-variantly or not at all in the type $T$. We call such a substitution
+an _optimal solution_ of the given constraint system for the type $T$.
+
+###### Case 3: Methods
+The last case applies if the expression
+$e$ appears in an application $e(d_1 , \ldots , d_m)$. In that case
+$T$ is a method type $(p_1:R_1 , \ldots , p_m:R_m)T'$. Without loss of
+generality we can assume that the result type $T'$ is a value type; if
+it is a method type we apply [eta-expansion](#eta-expansion) to
+convert it to a function type.  One computes first the types $S_j$ of
+the argument expressions $d_j$, using two alternative schemes.  Each
+argument expression $d_j$ is typed first with the expected type $R_j$,
+in which the type parameters $a_1 , \ldots , a_n$ are taken as type
+constants.  If this fails, the argument $d_j$ is typed instead with an
+expected type $R_j'$ which results from $R_j$ by replacing every type
+parameter in $a_1 , \ldots , a_n$ with _undefined_.
+
+In a second step, type arguments are inferred by solving a constraint
+system which relates the method's type with the expected type
+$\mathit{pt}$ and the argument types $S_1 , \ldots , S_m$. Solving the
+constraint system means
+finding a substitution $\sigma$ of types $T_i$ for the type parameters
+$a_i$ such that
+
+- None of inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
+- All type parameter bounds are respected, i.e. $\sigma L_i <: \sigma a_i$ and
+  $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$.
+- The method's result type $T'$ conforms to the expected type, i.e. $\sigma T' <: \sigma \mathit{pt}$.
+- Each argument type [weakly conforms](03-types.html#weak-conformance)
+  to the corresponding formal parameter
+  type, i.e. $\sigma S_j <:_w \sigma R_j$ for $j = 1 , \ldots , m$.
+
+It is a compile time error if no such substitution exists.  If several
+solutions exist, an optimal one for the type $T'$ is chosen.
+
+All or parts of an expected type $\mathit{pt}$ may be undefined. The rules for
+[conformance](03-types.html#conformance) are extended to this case by adding
+the rule that for any type $T$ the following two statements are always
+true: $\mathit{undefined} <: T$ and $T <: \mathit{undefined}$
+
+It is possible that no minimal or maximal solution for a type variable
+exists, in which case a compile-time error results. Because $<:$ is a
+pre-order, it is also possible that a solution set has several optimal
+solutions for a type. In that case, a Scala compiler is free to pick
+any one of them.
+
+###### Example
+Consider the two methods:
+
+```scala
+def cons[A](x: A, xs: List[A]): List[A] = x :: xs
+def nil[B]: List[B] = Nil
+```
+
+and the definition
+
+```scala
+val xs = cons(1, nil)
+```
+
+The application of `cons` is typed with an undefined expected
+type. This application is completed by local type inference to
+`cons[Int](1, nil)`.
+Here, one uses the following
+reasoning to infer the type argument `Int` for the type
+parameter `a`:
+
+First, the argument expressions are typed. The first argument `1`
+has type `Int` whereas the second argument `nil` is
+itself polymorphic. One tries to type-check `nil` with an
+expected type `List[a]`. This leads to the constraint system
+
+```scala
+List[b?] <: List[a]
+```
+
+where we have labeled `b?` with a question mark to indicate
+that it is a variable in the constraint system.
+Because class `List` is covariant, the optimal
+solution of this constraint is
+
+```scala
+b = scala.Nothing
+```
+
+In a second step, one solves the following constraint system for
+the type parameter `a` of `cons`:
+
+```scala
+Int <: a?
+List[scala.Nothing] <: List[a?]
+List[a?] <: $\mathit{undefined}$
+```
+
+The optimal solution of this constraint system is
+
+```scala
+a = Int
+```
+
+so `Int` is the type inferred for `a`.
+
+
+###### Example
+
+Consider now the definition
+
+```scala
+val ys = cons("abc", xs)
+```
+
+where `xs` is defined of type `List[Int]` as before.
+In this case local type inference proceeds as follows.
+
+First, the argument expressions are typed. The first argument
+`"abc"` has type `String`. The second argument `xs` is
+first tried to be typed with expected type `List[a]`. This fails,
+as `List[Int]` is not a subtype of `List[a]`. Therefore,
+the second strategy is tried; `xs` is now typed with expected type
+`List[$\mathit{undefined}$]`. This succeeds and yields the argument type
+`List[Int]`.
+
+In a second step, one solves the following constraint system for
+the type parameter `a` of `cons`:
+
+```scala
+String <: a?
+List[Int] <: List[a?]
+List[a?] <: $\mathit{undefined}$
+```
+
+The optimal solution of this constraint system is
+
+```scala
+a = scala.Any
+```
+
+so `scala.Any` is the type inferred for `a`.
+
+
+### Eta Expansion
+
+_Eta-expansion_ converts an expression of method type to an
+equivalent expression of function type. It proceeds in two steps.
+
+First, one identifes the maximal sub-expressions of $e$; let's
+say these are $e_1 , \ldots , e_m$. For each of these, one creates a
+fresh name $x_i$. Let $e'$ be the expression resulting from
+replacing every maximal subexpression $e_i$ in $e$ by the
+corresponding fresh name $x_i$. Second, one creates a fresh name $y_i$
+for every argument type $T_i$ of the method ($i = 1 , \ldots ,
+n$). The result of eta-conversion is then:
+
+```scala
+{ val $x_1$ = $e_1$; 
+  $\ldots$ 
+  val $x_m$ = $e_m$; 
+  ($y_1: T_1 , \ldots , y_n: T_n$) => $e'$($y_1 , \ldots , y_n$) 
+}
+```
+
+The behavior of [call-by-name parameters](#function-applications)
+is preserved under eta-expansion: the corresponding actual argument expression,
+a sub-expression of parameterless method type, is not evaluated in the expanded block.
+
+### Dynamic Member Selection
+
+The standard Scala library defines a trait `scala.Dynamic` which defines a member
+\@invokeDynamic@ as follows:
+
+```scala
+package scala
+trait Dynamic {
+  def applyDynamic (name: String, args: Any*): Any
+  ...
+}
+```
+
+Assume a selection of the form $e.x$ where the type of $e$ conforms to `scala.Dynamic`.
+Further assuming the selection is not followed by any function arguments, such an expression can be rewitten under the conditions given [here](#implicit-conversions) to:
+
+```scala
+$e$.applyDynamic("$x$")
+```
+
+If the selection is followed by some arguments, e.g. $e.x(\mathit{args})$, then that expression
+is rewritten to
+
+```scala
+$e$.applyDynamic("$x$", $\mathit{args}$)
+```
+
diff --git a/spec/07-implicit-parameters-and-views.md b/spec/07-implicit-parameters-and-views.md
new file mode 100644
index 0000000..1a4d704
--- /dev/null
+++ b/spec/07-implicit-parameters-and-views.md
@@ -0,0 +1,441 @@
+---
+title: Implicit Parameters and Views
+layout: default
+chapter: 7
+---
+
+# Implicit Parameters and Views
+
+## The Implicit Modifier
+
+```ebnf
+LocalModifier  ::= ‘implicit’
+ParamClauses   ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’
+```
+
+Template members and parameters labeled with an `implicit`
+modifier can be passed to [implicit parameters](#implicit-parameters)
+and can be used as implicit conversions called [views](#views). 
+The `implicit` modifier is illegal for all
+type members, as well as for [top-level objects](09-top-level-definitions.html#packagings).
+
+### Example
+The following code defines an abstract class of monoids and
+two concrete implementations, `StringMonoid` and
+`IntMonoid`. The two implementations are marked implicit.
+
+```scala
+abstract class Monoid[A] extends SemiGroup[A] {
+  def unit: A
+  def add(x: A, y: A): A
+}
+object Monoids {
+  implicit object stringMonoid extends Monoid[String] {
+    def add(x: String, y: String): String = x.concat(y)
+    def unit: String = ""
+  }
+  implicit object intMonoid extends Monoid[Int] {
+    def add(x: Int, y: Int): Int = x + y
+    def unit: Int = 0
+  }
+}
+```
+
+
+## Implicit Parameters
+
+An implicit parameter list
+`(implicit $p_1$,$\ldots$,$p_n$)` of a method marks the parameters $p_1 , \ldots , p_n$ as
+implicit. A method or constructor can have only one implicit parameter
+list, and it must be the last parameter list given.
+
+A method with implicit parameters can be applied to arguments just
+like a normal method. In this case the `implicit` label has no
+effect. However, if such a method misses arguments for its implicit
+parameters, such arguments will be automatically provided.
+
+The actual arguments that are eligible to be passed to an implicit
+parameter of type $T$ fall into two categories. First, eligible are
+all identifiers $x$ that can be accessed at the point of the method
+call without a prefix and that denote an 
+[implicit definition](#the-implicit-modifier)
+or an implicit parameter.  An eligible
+identifier may thus be a local name, or a member of an enclosing
+template, or it may be have been made accessible without a prefix
+through an [import clause](04-basic-declarations-and-definitions.html#import-clauses). If there are no eligible
+identifiers under this rule, then, second, eligible are also all
+`implicit` members of some object that belongs to the implicit
+scope of the implicit parameter's type, $T$.
+
+The _implicit scope_ of a type $T$ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type.
+Here, we say a class $C$ is _associated_ with a type $T$ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of $T$.
+
+The _parts_ of a type $T$ are:
+
+- if $T$ is a compound type `$T_1$ with $\ldots$ with $T_n$`,
+  the union of the parts of $T_1 , \ldots , T_n$, as well as $T$ itself;
+- if $T$ is a parameterized type `$S$[$T_1 , \ldots , T_n$]`,
+  the union of the parts of $S$ and $T_1 , \ldots , T_n$;
+- if $T$ is a singleton type `$p$.type`,
+  the parts of the type of $p$;
+- if $T$ is a type projection `$S$#$U$`,
+  the parts of $S$ as well as $T$ itself;
+- if $T$ is a type alias, the parts of its expansion;
+- if $T$ is an abstract type, the parts of its upper bound;
+- if $T$ denotes an implicit conversion to a type with a method with argument types $T_1 , \ldots , T_n$ and result type $U$,
+  the union of the parts of $T_1 , \ldots , T_n$ and $U$;
+- the parts of quantified (existential or univeral) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`);
+- in all other cases, just $T$ itself.
+
+Note that packages are internally represented as classes with companion modules to hold the package members.
+Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package.
+
+If there are several eligible arguments which match the implicit
+parameter's type, a most specific one will be chosen using the rules
+of static [overloading resolution](06-expressions.html#overloading-resolution).
+If the parameter has a default argument and no implicit argument can
+be found the default argument is used.
+
+###### Example
+Assuming the classes from the [`Monoid` example](#example-monoid), here is a
+method which computes the sum of a list of elements using the
+monoid's `add` and `unit` operations.
+
+```scala
+def sum[A](xs: List[A])(implicit m: Monoid[A]): A =
+  if (xs.isEmpty) m.unit
+  else m.add(xs.head, sum(xs.tail))
+```
+
+The monoid in question is marked as an implicit parameter, and can therefore
+be inferred based on the type of the list.
+Consider for instance the call `sum(List(1, 2, 3))`
+in a context where `stringMonoid` and `intMonoid`
+are visible.  We know that the formal type parameter `a` of
+`sum` needs to be instantiated to `Int`. The only
+eligible object which matches the implicit formal parameter type
+`Monoid[Int]` is `intMonoid` so this object will
+be passed as implicit parameter.
+
+
+This discussion also shows that implicit parameters are inferred after
+any type arguments are [inferred](06-expressions.html#local-type-inference).
+
+Implicit methods can themselves have implicit parameters. An example
+is the following method from module `scala.List`, which injects
+lists into the `scala.Ordered` class, provided the element
+type of the list is also convertible to this type.
+
+```scala
+implicit def list2ordered[A](x: List[A])
+  (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] = 
+  ...
+```
+
+Assume in addition a method
+
+```scala
+implicit def int2ordered(x: Int): Ordered[Int]
+```
+
+that injects integers into the `Ordered` class.  We can now
+define a `sort` method over ordered lists:
+
+```scala
+def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ...
+```
+
+We can apply `sort` to a list of lists of integers 
+`yss: List[List[Int]]` 
+as follows:
+
+```scala
+sort(yss)
+```
+
+The call above will be completed by passing two nested implicit arguments:
+
+```scala
+sort(yss)(xs: List[Int] => list2ordered[Int](xs)(int2ordered)) .
+```
+
+The possibility of passing implicit arguments to implicit arguments
+raises the possibility of an infinite recursion.  For instance, one
+might try to define the following method, which injects _every_ type into the 
+`Ordered` class:
+
+```scala
+implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] = 
+  a2ordered(x)
+```
+
+Now, if one tried to apply
+`sort` to an argument `arg` of a type that did not have
+another injection into the `Ordered` class, one would obtain an infinite
+expansion:
+
+```scala
+sort(arg)(x => magic(x)(x => magic(x)(x => ... )))
+```
+
+To prevent such infinite expansions, the compiler keeps track of 
+a stack of “open implicit types” for which implicit arguments are currently being
+searched. Whenever an implicit argument for type $T$ is searched, the
+“core type” of $T$ is added to the stack. Here, the _core type_
+of $T$ is $T$ with aliases expanded, top-level type [annotations](11-user-defined-annotations.html#user-defined-annotations) and
+[refinements](03-types.html#compound-types) removed, and occurrences
+of top-level existentially bound variables replaced by their upper
+bounds. The core type is removed from the stack once the search for
+the implicit argument either definitely fails or succeeds. Everytime a
+core type is added to the stack, it is checked that this type does not
+dominate any of the other types in the set.
+
+Here, a core type $T$ _dominates_ a type $U$ if $T$ is 
+[equivalent](03-types.html#type-equivalence)
+to $U$, or if the top-level type constructors of $T$ and $U$ have a
+common element and $T$ is more complex than $U$.
+
+The set of _top-level type constructors_ $\mathit{ttcs}(T)$ of a type $T$ depends on the form of
+the type:
+
+- For a type designator,  $\mathit{ttcs}(p.c) ~=~ \{c\}$;
+- For a parameterized type,  $\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}$;
+- For a singleton type,  $\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)$, provided $p$ has type $T$;
+- For a compound type, `$\mathit{ttcs}(T_1$ with $\ldots$ with $T_n)$` $~=~ \mathit{ttcs}(T_1) \cup \ldots \cup \mathit{ttcs}(T_n)$.
+
+The _complexity_ $\mathit{complexity}(T)$ of a core type is an integer which also depends on the form of
+the type:
+
+- For a type designator, $\mathit{complexity}(p.c) ~=~ 1 + \mathit{complexity}(p)$
+- For a parameterized type, $\mathit{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \mathit{complexity}(\mathit{targs})$
+- For a singleton type denoting a package $p$, $\mathit{complexity}(p.type) ~=~ 0$
+- For any other singleton type, $\mathit{complexity}(p.type) ~=~ 1 + \mathit{complexity}(T)$, provided $p$ has type $T$;
+- For a compound type, `$\mathit{complexity}(T_1$ with $\ldots$ with $T_n)$` $= \Sigma\mathit{complexity}(T_i)$
+
+
+###### Example
+When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`,
+the sequence of types for
+which implicit arguments are searched is
+
+```scala
+List[List[Int]] => Ordered[List[List[Int]]],
+List[Int] => Ordered[List[Int]]
+Int => Ordered[Int]
+```
+
+All types share the common type constructor `scala.Function1`,
+but the complexity of the each new type is lower than the complexity of the previous types.
+Hence, the code typechecks.
+
+
+###### Example
+Let `ys` be a list of some type which cannot be converted
+to `Ordered`. For instance:
+
+```scala
+val ys = List(new IllegalArgumentException, new ClassCastException, new Error)
+```
+
+Assume that the definition of `magic` above is in scope. Then the sequence
+of types for which implicit arguments are searched is
+
+```scala
+Throwable => Ordered[Throwable],
+Throwable => Ordered[Throwable],
+...
+```
+
+Since the second type in the sequence is equal to the first, the compiler
+will issue an error signalling a divergent implicit expansion.
+
+
+## Views
+
+Implicit parameters and methods can also define implicit conversions
+called views. A _view_ from type $S$ to type $T$ is
+defined by an implicit value which has function type
+`$S$=>$T$` or `(=>$S$)=>$T$` or by a method convertible to a value of that
+type.
+
+Views are applied in three situations:
+
+1.  If an expression $e$ is of type $T$, and $T$ does not conform to the
+    expression's expected type $\mathit{pt}$. In this case an implicit $v$ is
+    searched which is applicable to $e$ and whose result type conforms to
+    $\mathit{pt}$.  The search proceeds as in the case of implicit parameters,
+    where the implicit scope is the one of `$T$ => $\mathit{pt}$`. If
+    such a view is found, the expression $e$ is converted to
+    `$v$($e$)`. 
+1.  In a selection $e.m$ with $e$ of type $T$, if the selector $m$ does
+    not denote an accessible member of $T$.  In this case, a view $v$ is searched
+    which is applicable to $e$ and whose result contains a member named
+    $m$.  The search proceeds as in the case of implicit parameters, where
+    the implicit scope is the one of $T$.  If such a view is found, the
+    selection $e.m$ is converted to `$v$($e$).$m$`.
+1.  In a selection $e.m(\mathit{args})$ with $e$ of type $T$, if the selector
+    $m$ denotes some member(s) of $T$, but none of these members is applicable to the arguments
+    $\mathit{args}$. In this case a view $v$ is searched which is applicable to $e$ 
+    and whose result contains a method $m$ which is applicable to $\mathit{args}$.
+    The search proceeds as in the case of implicit parameters, where
+    the implicit scope is the one of $T$.  If such a view is found, the
+    selection $e.m$ is converted to `$v$($e$).$m(\mathit{args})$`.
+
+
+The implicit view, if it is found, can accept is argument $e$ as a
+call-by-value or as a call-by-name parameter. However, call-by-value
+implicits take precedence over call-by-name implicits.
+
+As for implicit parameters, overloading resolution is applied
+if there are several possible candidates (of either the call-by-value
+or the call-by-name category).
+
+### Example
+Class `scala.Ordered[A]` contains a method
+
+```scala
+  def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean .
+```
+
+Assume two lists `xs` and `ys` of type `List[Int]`
+and assume that the `list2ordered` and `int2ordered`
+methods defined [here](#implicit-parameters) are in scope.
+Then the operation
+
+```scala
+  xs <= ys
+```
+
+is legal, and is expanded to:
+
+```scala
+  list2ordered(xs)(int2ordered).<=
+    (ys)
+    (xs => list2ordered(xs)(int2ordered))
+```
+
+The first application of `list2ordered` converts the list
+`xs` to an instance of class `Ordered`, whereas the second
+occurrence is part of an implicit parameter passed to the `<=`
+method.
+
+
+## Context Bounds and View Bounds
+
+```ebnf
+  TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] 
+                {‘<%’ Type} {‘:’ Type}
+```
+
+A type parameter $A$ of a method or non-trait class may have one or more view
+bounds `$A$ <% $T$`. In this case the type parameter may be
+instantiated to any type $S$ which is convertible by application of a 
+view to the bound $T$.
+
+A type parameter $A$ of a method or non-trait class may also have one
+or more context bounds `$A$ : $T$`. In this case the type parameter may be
+instantiated to any type $S$ for which _evidence_ exists at the
+instantiation point that $S$ satisfies the bound $T$. Such evidence
+consists of an implicit value with type $T[S]$.
+
+A method or class containing type parameters with view or context bounds is treated as being
+equivalent to a method with implicit parameters. Consider first the case of a
+single parameter with view and/or context bounds such as:
+
+```scala
+def $f$[$A$ <% $T_1$ ... <% $T_m$ : $U_1$ : $U_n$]($\mathit{ps}$): $R$ = ...
+```
+
+Then the method definition above is expanded to
+
+```scala
+def $f$[$A$]($\mathit{ps}$)(implicit $v_1$: $A$ => $T_1$, ..., $v_m$: $A$ => $T_m$,
+                       $w_1$: $U_1$[$A$], ..., $w_n$: $U_n$[$A$]): $R$ = ...
+```
+
+where the $v_i$ and $w_j$ are fresh names for the newly introduced implicit parameters. These
+parameters are called _evidence parameters_.
+
+If a class or method has several view- or context-bounded type parameters, each
+such type parameter is expanded into evidence parameters in the order
+they appear and all the resulting evidence parameters are concatenated
+in one implicit parameter section.  Since traits do not take
+constructor parameters, this translation does not work for them.
+Consequently, type-parameters in traits may not be view- or context-bounded.
+Also, a method or class with view- or context bounds may not define any
+additional implicit parameters.
+
+###### Example
+The `<=` method from the [`Ordered` example](#example-ordered) can be declared
+more concisely as follows:
+
+```scala
+def <= [B >: A <% Ordered[B]](that: B): Boolean
+```
+
+## Manifests
+
+
+Manifests are type descriptors that can be automatically generated by
+the Scala compiler as arguments to implicit parameters. The Scala
+standard library contains a hierarchy of four manifest classes, 
+with `OptManifest`
+at the top. Their signatures follow the outline below.
+
+```scala
+trait OptManifest[+T]
+object NoManifest extends OptManifest[Nothing]
+trait ClassManifest[T] extends OptManifest[T]
+trait Manifest[T] extends ClassManifest[T]
+```
+
+If an implicit parameter of a method or constructor is of a subtype $M[T]$ of
+class `OptManifest[T]`, _a manifest is determined for $M[S]$_,
+according to the following rules.
+
+First if there is already an implicit argument that matches $M[T]$, this
+argument is selected.
+
+Otherwise, let $\mathit{Mobj}$ be the companion object `scala.reflect.Manifest`
+if $M$ is trait `Manifest`, or be
+the companion object `scala.reflect.ClassManifest` otherwise. Let $M'$ be the trait
+`Manifest` if $M$ is trait `Manifest`, or be the trait `OptManifest` otherwise.  
+Then the following rules apply.
+
+1.  If $T$ is a value class or one of the classes `Any`, `AnyVal`, `Object`,
+    `Null`, or `Nothing`,
+    a manifest for it is generated by selecting
+    the corresponding manifest value `Manifest.$T$`, which exists in the
+    `Manifest` module.
+1.  If $T$ is an instance of `Array[$S$]`, a manifest is generated
+    with the invocation `$\mathit{Mobj}$.arrayType[S](m)`, where $m$ is the manifest
+    determined for $M[S]$.
+1.  If $T$ is some other class type $S$#$C[U_1, \ldots, U_n]$ where the prefix
+    type $S$ cannot be statically determined from the class $C$,
+    a manifest is generated with the invocation `$\mathit{Mobj}$.classType[T]($m_0$, classOf[T], $ms$)`
+    where $m_0$ is the manifest determined for $M'[S]$ and $ms$ are the
+    manifests determined for $M'[U_1], \ldots, M'[U_n]$.
+1.  If $T$ is some other class type with type arguments $U_1 , \ldots , U_n$,
+    a manifest is generated 
+    with the invocation `$\mathit{Mobj}$.classType[T](classOf[T], $ms$)`
+    where $ms$ are the
+    manifests determined for $M'[U_1] , \ldots , M'[U_n]$.
+1.  If $T$ is a singleton type `$p$.type`, a manifest is generated with
+    the invocation `$\mathit{Mobj}$.singleType[T]($p$)` 
+1.  If $T$ is a refined type $T' \{ R \}$, a manifest is generated for $T'$.
+    (That is, refinements are never reflected in manifests).
+1.  If $T$ is an intersection type
+    `$T_1$ with $, \ldots ,$ with $T_n$`
+    where $n > 1$, the result depends on whether a full manifest is
+    to be determined or not. 
+    If $M$ is trait `Manifest`, then
+    a manifest is generated with the invocation
+    `Manifest.intersectionType[T]($ms$)` where $ms$ are the manifests
+    determined for $M[T_1] , \ldots , M[T_n]$.
+    Otherwise, if $M$ is trait `ClassManifest`, 
+    then a manifest is generated for the [intersection dominator](03-types.html#type-erasure)
+    of the types $T_1 , \ldots , T_n$.
+1.  If $T$ is some other type, then if $M$ is trait `OptManifest`,
+    a manifest is generated from the designator `scala.reflect.NoManifest`.
+    If $M$ is a type different from `OptManifest`, a static error results.
+
diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md
new file mode 100644
index 0000000..7b4d070
--- /dev/null
+++ b/spec/08-pattern-matching.md
@@ -0,0 +1,722 @@
+---
+title: Pattern Matching
+layout: default
+chapter: 8
+---
+
+# Pattern Matching
+
+## Patterns
+
+```ebnf
+  Pattern         ::=  Pattern1 { ‘|’ Pattern1 }
+  Pattern1        ::=  varid ‘:’ TypePat
+                    |  ‘_’ ‘:’ TypePat
+                    |  Pattern2
+  Pattern2        ::=  varid [‘@’ Pattern3]
+                    |  Pattern3
+  Pattern3        ::=  SimplePattern 
+                    |  SimplePattern {id [nl] SimplePattern}
+  SimplePattern   ::=  ‘_’
+                    |  varid
+                    |  Literal
+                    |  StableId
+                    |  StableId ‘(’ [Patterns] ‘)’
+                    |  StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’
+                    |  ‘(’ [Patterns] ‘)’
+                    |  XmlPattern
+  Patterns        ::=  Pattern {‘,’ Patterns}
+```
+
+A pattern is built from constants, constructors, variables and type
+tests. Pattern matching tests whether a given value (or sequence of values)
+has the shape defined by a pattern, and, if it does, binds the
+variables in the pattern to the corresponding components of the value
+(or sequence of values).  The same variable name may not be bound more
+than once in a pattern.
+
+###### Example
+Some examples of patterns are:
+ 1.  The pattern `ex: IOException` matches all instances of class
+        `IOException`, binding variable `ex` to the instance.
+ 1.  The pattern `Some(x)` matches values of the form `Some($v$)`,
+        binding `x` to the argument value $v$ of the `Some` constructor.
+ 1.  The pattern `(x, _)` matches pairs of values, binding `x` to
+        the first component of the pair. The second component is matched
+        with a wildcard pattern.
+ 1.  The pattern `x :: y :: xs` matches lists of length $\geq 2$,
+        binding `x` to the list's first element, `y` to the list's
+        second element, and `xs` to the remainder.
+ 1.  The pattern `1 | 2 | 3` matches the integers between 1 and 3.
+
+Pattern matching is always done in a context which supplies an
+expected type of the pattern. We distinguish the following kinds of
+patterns.
+
+### Variable Patterns
+
+```ebnf
+  SimplePattern   ::=  `_'
+                    |  varid
+```
+
+A variable pattern $x$ is a simple identifier which starts with a
+lower case letter.  It matches any value, and binds the variable name
+to that value.  The type of $x$ is the expected type of the pattern as
+given from outside.  A special case is the wild-card pattern $\_$
+which is treated as if it was a fresh variable on each occurrence.
+
+### Typed Patterns
+
+
+```ebnf
+  Pattern1        ::=  varid `:' TypePat
+                    |  `_' `:' TypePat
+```
+
+A typed pattern $x: T$ consists of a pattern variable $x$ and a
+type pattern $T$.  The type of $x$ is the type pattern $T$, where 
+each type variable and wildcard is replaced by a fresh, unknown type.
+This pattern matches any value matched by the [type pattern](#type-patterns) 
+$T$; it binds the variable name to
+that value.  
+
+### Pattern Binders
+
+```ebnf
+  Pattern2        ::=  varid `@' Pattern3
+```
+
+A pattern binder `$x$@$p$` consists of a pattern variable $x$ and a 
+pattern $p$. The type of the variable $x$ is the static type $T$ of the pattern $p$.
+This pattern matches any value $v$ matched by the pattern $p$, 
+provided the run-time type of $v$ is also an instance of $T$, 
+and it binds the variable name to that value.
+
+### Literal Patterns
+
+```ebnf
+  SimplePattern   ::=  Literal
+```
+
+A literal pattern $L$ matches any value that is equal (in terms of
+$==$) to the literal $L$. The type of $L$ must conform to the
+expected type of the pattern.
+
+### Stable Identifier Patterns
+
+```ebnf
+  SimplePattern   ::=  StableId
+```
+
+A stable identifier pattern is a [stable identifier](03-types.html#paths) $r$.
+The type of $r$ must conform to the expected
+type of the pattern. The pattern matches any value $v$ such that
+`$r$ == $v$` (see [here](12-the-scala-standard-library.html#root-classes)).
+
+To resolve the syntactic overlap with a variable pattern, a
+stable identifier pattern may not be a simple name starting with a lower-case
+letter. However, it is possible to enclose a such a variable name in
+backquotes; then it is treated as a stable identifier pattern.
+
+###### Example
+Consider the following function definition:
+
+```scala
+def f(x: Int, y: Int) = x match {
+  case y => ...
+}
+```
+
+Here, `y` is a variable pattern, which matches any value.
+If we wanted to turn the pattern into a stable identifier pattern, this
+can be achieved as follows:
+
+```scala
+def f(x: Int, y: Int) = x match {
+  case `y` => ...
+}
+```
+
+Now, the pattern matches the `y` parameter of the enclosing function `f`.
+That is, the match succeeds only if the `x` argument and the `y`
+argument of `f` are equal.
+
+### Constructor Patterns
+
+```ebnf
+SimplePattern   ::=  StableId `(' [Patterns] `)
+```
+
+A constructor pattern is of the form $c(p_1 , \ldots , p_n)$ where $n
+\geq 0$. It consists of a stable identifier $c$, followed by element
+patterns $p_1 , \ldots , p_n$. The constructor $c$ is a simple or
+qualified name which denotes a [case class](05-classes-and-objects.html#case-classes).
+If the case class is monomorphic, then it
+must conform to the expected type of the pattern, and the formal
+parameter types of $x$'s [primary constructor](05-classes-and-objects.html#class-definitions)
+are taken as the expected types of the element patterns $p_1, \ldots ,
+p_n$.  If the case class is polymorphic, then its type parameters are
+instantiated so that the instantiation of $c$ conforms to the expected
+type of the pattern. The instantiated formal parameter types of $c$'s
+primary constructor are then taken as the expected types of the
+component patterns $p_1, \ldots , p_n$.  The pattern matches all
+objects created from constructor invocations $c(v_1 , \ldots , v_n)$
+where each element pattern $p_i$ matches the corresponding value
+$v_i$.
+
+A special case arises when $c$'s formal parameter types end in a
+repeated parameter. This is further discussed [here](#pattern-sequences).
+
+### Tuple Patterns
+
+```ebnf
+  SimplePattern   ::=  `(' [Patterns] `)'
+```
+
+A tuple pattern `($p_1 , \ldots , p_n$)` is an alias
+for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`, 
+where $n \geq 2$. The empty tuple
+`()` is the unique value of type `scala.Unit`.
+
+### Extractor Patterns
+
+```ebnf
+  SimplePattern   ::=  StableId `(' [Patterns] `)'
+```
+
+An extractor pattern $x(p_1 , \ldots , p_n)$ where $n \geq 0$ is of
+the same syntactic form as a constructor pattern. However, instead of
+a case class, the stable identifier $x$ denotes an object which has a
+member method named `unapply` or `unapplySeq` that matches
+the pattern.
+
+An `unapply` method in an object $x$ _matches_ the pattern
+$x(p_1 , \ldots , p_n)$ if it takes exactly one argument and one of
+the following applies:
+
+* $n=0$ and `unapply`'s result type is `Boolean`. In this case
+  the extractor pattern matches all values $v$ for which 
+  `$x$.unapply($v$)` yields `true`.
+* $n=1$ and `unapply`'s result type is `Option[$T$]`, for some
+  type $T$.  In this case, the (only) argument pattern $p_1$ is typed in
+  turn with expected type $T$.  The extractor pattern matches then all
+  values $v$ for which `$x$.unapply($v$)` yields a value of form
+  `Some($v_1$)`, and $p_1$ matches $v_1$.
+* $n>1$ and `unapply`'s result type is 
+  `Option[($T_1 , \ldots , T_n$)]`, for some
+  types $T_1 , \ldots , T_n$.  In this case, the argument patterns $p_1
+  , \ldots , p_n$ are typed in turn with expected types $T_1 , \ldots ,
+  T_n$.  The extractor pattern matches then all values $v$ for which
+  `$x$.unapply($v$)` yields a value of form
+  `Some(($v_1 , \ldots , v_n$))`, and each pattern
+  $p_i$ matches the corresponding value $v_i$.
+
+An `unapplySeq` method in an object $x$ matches the pattern
+$x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if it takes exactly one argument
+and its result type is of the form `Option[($T_1 , \ldots , T_m$, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted).
+This case is further discussed [below](#pattern-sequences).
+
+###### Example
+The `Predef` object contains a definition of an
+extractor object `Pair`:
+
+```scala
+object Pair {
+  def apply[A, B](x: A, y: B) = Tuple2(x, y)
+  def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
+}
+```
+
+This means that the name `Pair` can be used in place of `Tuple2` for tuple
+formation as well as for deconstruction of tuples in patterns.
+Hence, the following is possible:
+
+```scala
+val x = (1, 2)
+val y = x match {
+  case Pair(i, s) => Pair(s + i, i * i)
+}
+```
+
+### Pattern Sequences
+
+```ebnf
+SimplePattern ::= StableId `(' [Patterns `,'] [varid `@'] `_' `*' `)'
+```
+
+A pattern sequence $p_1 , \ldots , p_n$ appears in two contexts.
+First, in a constructor pattern $c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$, where $c$ is a case class which has $m+1$ primary constructor parameters,  ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`.
+Second, in an extractor pattern $x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if the extractor object $x$ does not have an `unapply` method,
+but it does define an `unapplySeq` method with a result type conforming to `Option[(T_1, ... , T_m, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). The expected type for the patterns $p_i$ is $S$.
+
+The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. 
+Each element pattern $p_i$ is type-checked with
+$S$ as expected type, unless it is a sequence wildcard. If a final
+sequence wildcard is present, the pattern matches all values $v$ that
+are sequences which start with elements matching patterns
+$p_1 , \ldots , p_{n-1}$.  If no final sequence wildcard is given, the
+pattern matches all values $v$ that are sequences of
+length $n$ which consist of elements matching patterns $p_1 , \ldots ,
+p_n$.
+
+### Infix Operation Patterns
+
+```ebnf
+  Pattern3  ::=  SimplePattern {id [nl] SimplePattern}
+```
+
+An infix operation pattern $p;\mathit{op};q$ is a shorthand for the
+constructor or extractor pattern $\mathit{op}(p, q)$.  The precedence and
+associativity of operators in patterns is the same as in 
+[expressions](06-expressions.html#prefix-infix-and-postfix-operations).
+
+An infix operation pattern $p;\mathit{op};(q_1 , \ldots , q_n)$ is a
+shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1
+, \ldots , q_n)$.
+
+### Pattern Alternatives
+
+```ebnf
+  Pattern   ::=  Pattern1 { `|' Pattern1 }
+```
+
+A pattern alternative `$p_1$ | $\ldots$ | $p_n$`
+consists of a number of alternative patterns $p_i$. All alternative
+patterns are type checked with the expected type of the pattern. They
+may no bind variables other than wildcards. The alternative pattern 
+matches a value $v$ if at least one its alternatives matches $v$.
+
+### XML Patterns
+
+XML patterns are treated [here](10-xml-expressions-and-patterns.html#xml-patterns).
+
+### Regular Expression Patterns
+
+Regular expression patterns have been discontinued in Scala from version 2.0.
+
+Later version of Scala provide a much simplified version of regular
+expression patterns that cover most scenarios of non-text sequence
+processing.  A _sequence pattern_ is a pattern that stands in a
+position where either (1) a pattern of a type `T` which is
+conforming to
+`Seq[A]` for some `A` is expected, or (2) a case
+class constructor that has an iterated formal parameter
+`A*`.  A wildcard star pattern `_*` in the
+rightmost position stands for arbitrary long sequences. It can be
+bound to variables using `@`, as usual, in which case the variable will have the
+type `Seq[A]`.
+
+### Irrefutable Patterns
+
+A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies:
+
+1.  $p$ is a variable pattern,
+1.  $p$ is a typed pattern $x: T'$, and $T <: T'$,
+1.  $p$ is a constructor pattern $c(p_1 , \ldots , p_n)$, the type $T$
+    is an instance of class $c$, the [primary constructor](05-classes-and-objects.html#class-definitions)
+    of type $T$ has argument types $T_1 , \ldots , T_n$, and each $p_i$ is 
+    irrefutable for $T_i$.
+
+## Type Patterns
+
+```ebnf
+  TypePat           ::=  Type
+```
+
+Type patterns consist of types, type variables, and wildcards. 
+A type pattern $T$ is of one of the following  forms:
+
+* A reference to a class $C$, $p.C$, or `$T$#$C$`.  This
+  type pattern matches any non-null instance of the given class. 
+  Note that the prefix of the class, if it is given, is relevant for determining
+  class instances. For instance, the pattern $p.C$ matches only
+  instances of classes $C$ which were created with the path $p$ as
+  prefix.
+
+  The bottom types `scala.Nothing` and `scala.Null` cannot
+  be used as type patterns, because they would match nothing in any case.  
+
+* A singleton type `$p$.type`. This type pattern matches only the value
+  denoted by the path $p$ (that is, a pattern match involved a
+  comparison of the matched value with $p$ using method `eq` in class
+  `AnyRef`).
+* A compound type pattern `$T_1$ with $\ldots$ with $T_n$` where each $T_i$ is a
+  type pattern. This type pattern matches all values that are matched by each of
+  the type patterns $T_i$.
+
+* A parameterized type pattern $T[a_1 , \ldots , a_n]$, where the $a_i$
+  are type variable patterns or wildcards $\_$. 
+  This type pattern matches all values which match $T$ for
+  some arbitrary instantiation of the type variables and wildcards. The
+  bounds or alias type of these type variable are determined as
+  described [here](#type-parameter-inference-in-patterns).
+
+* A parameterized type pattern `scala.Array$[T_1]$`, where
+  $T_1$ is a type pattern. This type pattern matches any non-null instance
+  of type `scala.Array$[U_1]$`, where $U_1$ is a type matched by $T_1$.
+
+
+Types which are not of one of the forms described above are also 
+accepted as type patterns. However, such type patterns will be translated to their
+[erasure](03-types.html#type-erasure).  The Scala
+compiler will issue an "unchecked" warning for these patterns to
+flag the possible loss of type-safety.
+
+A _type variable pattern_ is a simple identifier which starts with
+a lower case letter.
+
+## Type Parameter Inference in Patterns
+
+Type parameter inference is the process of finding bounds for the
+bound type variables in a typed pattern or constructor
+pattern. Inference takes into account the expected type of the
+pattern.
+
+
+### Type parameter inference for typed patterns.
+
+Assume a typed pattern $p: T'$. Let $T$ result from $T'$ where all wildcards in
+$T'$ are renamed to fresh variable names.  Let $a_1 , \ldots , a_n$ be
+the type variables in $T$. These type variables are considered bound
+in the pattern. Let the expected type of the pattern be $\mathit{pt}$.
+
+Type parameter inference constructs first a set of subtype constraints over
+the type variables $a_i$. The initial constraints set $\mathcal{C}_0$ reflects
+just the bounds of these type variables. That is, assuming $T$ has
+bound type variables $a_1 , \ldots , a_n$ which correspond to class
+type parameters $a'_1 , \ldots , a'_n$ with lower bounds $L_1, \ldots , L_n$
+and upper bounds $U_1 , \ldots , U_n$, $\mathcal{C}_0$ contains the constraints 
+
+|             |      |               |                        |
+|-------------|------|---------------|------------------------|
+|$a_i$        | $<:$ | $\sigma U_i$  | $(i = 1, \ldots , n)$  |
+|$\sigma L_i$ | $<:$ | $a_i$         | $(i = 1 , \ldots , n)$ |
+
+
+where $\sigma$ is the substitution $[a'_1 := a_1 , \ldots , a'_n :=
+a_n]$.
+
+The set $\mathcal{C}_0$ is then augmented by further subtype constraints. There are two
+cases.
+
+###### Case 1
+If there exists a substitution $\sigma$ over the type variables $a_i , \ldots , a_n$ such that $\sigma T$ conforms to $\mathit{pt}$, one determines the weakest subtype constraints $\mathcal{C}_1$ over the type variables $a_1, \ldots , a_n$ such that $\mathcal{C}_0 \wedge \mathcal{C}_1$ implies that $T$ conforms to $\mathit{pt}$.
+
+###### Case 2
+Otherwise, if $T$ can not be made to conform to $\mathit{pt}$ by
+instantiating its type variables, one determines all type variables in
+$\mathit{pt}$ which are defined as type parameters of a method enclosing
+the pattern. Let the set of such type parameters be $b_1 , \ldots ,
+b_m$. Let $\mathcal{C}'_0$ be the subtype constraints reflecting the bounds of the
+type variables $b_i$.  If $T$ denotes an instance type of a final
+class, let $\mathcal{C}_2$ be the weakest set of subtype constraints over the type
+variables $a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that
+$\mathcal{C}_0 \wedge \mathcal{C}'_0 \wedge \mathcal{C}_2$ implies that $T$ conforms to
+$\mathit{pt}$.  If $T$ does not denote an instance type of a final class,
+let $\mathcal{C}_2$ be the weakest set of subtype constraints over the type variables
+$a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that $\mathcal{C}_0 \wedge
+\mathcal{C}'_0 \wedge \mathcal{C}_2$ implies that it is possible to construct a type
+$T'$ which conforms to both $T$ and $\mathit{pt}$. It is a static error if
+there is no satisfiable set of constraints $\mathcal{C}_2$ with this property.
+
+The final step consists in choosing type bounds for the type
+variables which imply the established constraint system. The process
+is different for the two cases above.
+
+###### Case 1
+We take $a_i >: L_i <: U_i$ where each $L_i$ is minimal and each $U_i$ is maximal wrt $<:$ such that $a_i >: L_i <: U_i$ for $i = 1, \ldots, n$ implies $\mathcal{C}_0 \wedge \mathcal{C}_1$.
+
+###### Case 2
+We take $a_i >: L_i <: U_i$ and $b_i >: L'_i <: U'_i$ where each $L_i$
+and $L'_j$ is minimal and each $U_i$ and $U'_j$ is maximal such that
+$a_i >: L_i <: U_i$ for $i = 1 , \ldots , n$ and 
+$b_j >: L'_j <: U'_j$ for $j = 1 , \ldots , m$
+implies $\mathcal{C}_0 \wedge \mathcal{C}'_0 \wedge \mathcal{C}_2$.
+
+In both cases, local type inference is permitted to limit the
+complexity of inferred bounds. Minimality and maximality of types have
+to be understood relative to the set of types of acceptable
+complexity.
+
+#### Type parameter inference for constructor patterns.
+Assume a constructor pattern $C(p_1 , \ldots , p_n)$ where class $C$
+has type type parameters $a_1 , \ldots , a_n$.  These type parameters
+are inferred in the same way as for the typed pattern
+`(_: $C[a_1 , \ldots , a_n]$)`.
+
+###### Example
+Consider the program fragment:
+
+```scala
+val x: Any
+x match {
+  case y: List[a] => ...
+}
+```
+
+Here, the type pattern `List[a]` is matched against the
+expected type `Any`. The pattern binds the type variable
+`a`.  Since `List[a]` conforms to `Any`
+for every type argument, there are no constraints on `a`.
+Hence, `a` is introduced as an abstract type with no
+bounds. The scope of `a` is right-hand side of its case clause.
+
+On the other hand, if `x` is declared as
+
+```scala
+val x: List[List[String]],
+```
+
+this generates the constraint
+`List[a] <: List[List[String]]`, which simplifies to
+`a <: List[String]`, because `List` is covariant. Hence,
+`a` is introduced with upper bound
+`List[String]`.
+
+###### Example
+Consider the program fragment:
+
+```scala
+val x: Any
+x match {
+  case y: List[String] => ...
+}
+```
+
+Scala does not maintain information about type arguments at run-time,
+so there is no way to check that `x` is a list of strings.
+Instead, the Scala compiler will [erase](03-types.html#type-erasure) the
+pattern to `List[_]`; that is, it will only test whether the
+top-level runtime-class of the value `x` conforms to
+`List`, and the pattern match will succeed if it does.  This
+might lead to a class cast exception later on, in the case where the
+list `x` contains elements other than strings.  The Scala
+compiler will flag this potential loss of type-safety with an
+"unchecked" warning message.
+
+
+###### Example
+Consider the program fragment
+
+```scala
+class Term[A]
+class Number(val n: Int) extends Term[Int]
+def f[B](t: Term[B]): B = t match {
+  case y: Number => y.n
+}
+```
+
+The expected type of the pattern `y: Number` is
+`Term[B]`.  The type `Number` does not conform to
+`Term[B]`; hence Case 2 of the rules above
+applies. This means that `b` is treated as another type
+variable for which subtype constraints are inferred. In our case the
+applicable constraint is `Number <: Term[B]`, which
+entails `B = Int`.  Hence, `B` is treated in
+the case clause as an abstract type with lower and upper bound
+`Int`. Therefore, the right hand side of the case clause,
+`y.n`, of type `Int`, is found to conform to the
+function's declared result type, `Number`.
+
+
+## Pattern Matching Expressions
+
+```ebnf
+  Expr            ::=  PostfixExpr `match' `{' CaseClauses `}'
+  CaseClauses     ::=  CaseClause {CaseClause}
+  CaseClause      ::=  `case' Pattern [Guard] `=>' Block
+```
+
+A pattern matching expression
+
+```scala
+e match { case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
+```
+
+consists of a selector expression $e$ and a number $n > 0$ of
+cases. Each case consists of a (possibly guarded) pattern $p_i$ and a
+block $b_i$. Each $p_i$ might be complemented by a guard
+`if $e$` where $e$ is a boolean expression. 
+The scope of the pattern
+variables in $p_i$ comprises the pattern's guard and the corresponding block $b_i$.
+
+Let $T$ be the type of the selector expression $e$ and let $a_1
+, \ldots , a_m$ be the type parameters of all methods enclosing 
+the pattern matching expression.  For every $a_i$, let $L_i$ be its
+lower bound and $U_i$ be its higher bound.  Every pattern $p \in \{p_1, , \ldots , p_n\}$
+can be typed in two ways. First, it is attempted
+to type $p$ with $T$ as its expected type. If this fails, $p$ is
+instead typed with a modified expected type $T'$ which results from
+$T$ by replacing every occurrence of a type parameter $a_i$ by
+\mbox{\sl undefined}.  If this second step fails also, a compile-time
+error results. If the second step succeeds, let $T_p$ be the type of
+pattern $p$ seen as an expression. One then determines minimal bounds
+$L'_1 , \ldots , L'_m$ and maximal bounds $U'_1 , \ldots , U'_m$ such
+that for all $i$, $L_i <: L'_i$ and $U'_i <: U_i$ and the following
+constraint system is satisfied:
+
+$$L_1 <: a_1 <: U_1\;\wedge\;\ldots\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T$$
+
+If no such bounds can be found, a compile time error results.  If such
+bounds are found, the pattern matching clause starting with $p$ is
+then typed under the assumption that each $a_i$ has lower bound $L'_i$
+instead of $L_i$ and has upper bound $U'_i$ instead of $U_i$.
+
+The expected type of every block $b_i$ is the expected type of the
+whole pattern matching expression.  The type of the pattern matching
+expression is then the [weak least upper bound](03-types.html#weak-conformance)
+of the types of all blocks
+$b_i$.
+
+When applying a pattern matching expression to a selector value,
+patterns are tried in sequence until one is found which matches the
+[selector value](#patterns). Say this case is `$case p_i \Rightarrow b_i$`.
+The result of the whole expression is the result of evaluating $b_i$,
+where all pattern variables of $p_i$ are bound to
+the corresponding parts of the selector value.  If no matching pattern
+is found, a `scala.MatchError` exception is thrown.
+
+The pattern in a case may also be followed by a guard suffix
+`if e` with a boolean expression $e$.  The guard expression is
+evaluated if the preceding pattern in the case matches. If the guard
+expression evaluates to `true`, the pattern match succeeds as
+normal. If the guard expression evaluates to `false`, the pattern
+in the case is considered not to match and the search for a matching
+pattern continues.
+
+In the interest of efficiency the evaluation of a pattern matching
+expression may try patterns in some other order than textual
+sequence. This might affect evaluation through
+side effects in guards. However, it is guaranteed that a guard
+expression is evaluated only if the pattern it guards matches.
+
+If the selector of a pattern match is an instance of a
+[`sealed` class](05-classes-and-objects.html#modifiers),
+the compilation of pattern matching can emit warnings which diagnose
+that a given set of patterns is not exhaustive, i.e. that there is a
+possibility of a `MatchError` being raised at run-time. 
+
+### Example
+
+Consider the following definitions of arithmetic terms:
+
+```scala
+abstract class Term[T]
+case class Lit(x: Int) extends Term[Int]
+case class Succ(t: Term[Int]) extends Term[Int]
+case class IsZero(t: Term[Int]) extends Term[Boolean]
+case class If[T](c: Term[Boolean],
+                 t1: Term[T],
+                 t2: Term[T]) extends Term[T]
+```
+
+There are terms to represent numeric literals, incrementation, a zero
+test, and a conditional. Every term carries as a type parameter the
+type of the expression it representes (either `Int` or `Boolean`).
+
+A type-safe evaluator for such terms can be written as follows.
+
+```scala
+def eval[T](t: Term[T]): T = t match {
+  case Lit(n)        => n
+  case Succ(u)       => eval(u) + 1
+  case IsZero(u)     => eval(u) == 0
+  case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
+}
+```
+
+Note that the evaluator makes crucial use of the fact that type
+parameters of enclosing methods can acquire new bounds through pattern
+matching.
+
+For instance, the type of the pattern in the second case,
+`Succ(u)`, is `Int`. It conforms to the selector type
+`T` only if we assume an upper and lower bound of `Int` for `T`.
+Under the assumption `Int <: T <: Int` we can also
+verify that the type right hand side of the second case, `Int`
+conforms to its expected type, `T`.
+
+
+## Pattern Matching Anonymous Functions
+
+```ebnf
+  BlockExpr ::= `{' CaseClauses `}'
+```
+
+An anonymous function can be defined by a sequence of cases 
+
+```scala
+{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
+```
+
+which appear as an expression without a prior `match`.  The
+expected type of such an expression must in part be defined. It must
+be either `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]` for some $k > 0$,
+or `scala.PartialFunction[$S_1$, $R$]`, where the
+argument type(s) $S_1 , \ldots , S_k$ must be fully determined, but the result type
+$R$ may be undetermined.
+
+If the expected type is `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`,
+the expression is taken to be equivalent to the anonymous function:
+
+```scala
+($x_1: S_1 , \ldots , x_k: S_k$) => ($x_1 , \ldots , x_k$) match { 
+  case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ 
+}
+```
+
+Here, each $x_i$ is a fresh name.
+As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn
+equivalent to the following instance creation expression, where
+ $T$ is the weak least upper bound of the types of all $b_i$.
+
+```scala
+new scala.Function$k$[$S_1 , \ldots , S_k$, $T$] {
+  def apply($x_1: S_1 , \ldots , x_k: S_k$): $T$ = ($x_1 , \ldots , x_k$) match {
+    case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$
+  }
+}
+```
+
+If the expected type is `scala.PartialFunction[$S$, $R$]`,
+the expression is taken to be equivalent to the following instance creation expression:
+
+```scala
+new scala.PartialFunction[$S$, $T$] {
+  def apply($x$: $S$): $T$ = x match {
+    case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$
+  }
+  def isDefinedAt($x$: $S$): Boolean = {
+    case $p_1$ => true $\ldots$ case $p_n$ => true
+    case _ => false
+  }
+}
+```
+
+Here, $x$ is a fresh name and $T$ is the weak least upper bound of the
+types of all $b_i$. The final default case in the `isDefinedAt`
+method is omitted if one of the patterns $p_1 , \ldots , p_n$ is
+already a variable or wildcard pattern.
+
+###### Example
+Here is a method which uses a fold-left operation
+`/:` to compute the scalar product of
+two vectors:
+
+```scala
+def scalarProduct(xs: Array[Double], ys: Array[Double]) =
+  (0.0 /: (xs zip ys)) {
+    case (a, (b, c)) => a + b * c
+  }
+```
+
+The case clauses in this code are equivalent to the following
+anonymous function:
+
+```scala
+(x, y) => (x, y) match {
+  case (a, (b, c)) => a + b * c
+}
+```
+
diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md
new file mode 100644
index 0000000..b9c78b2
--- /dev/null
+++ b/spec/09-top-level-definitions.md
@@ -0,0 +1,201 @@
+---
+title: Top-Level Definitions
+layout: default
+chapter: 9
+---
+
+# Top-Level Definitions
+
+## Compilation Units
+
+```ebnf
+CompilationUnit  ::=  {‘package’ QualId semi} TopStatSeq
+TopStatSeq       ::=  TopStat {semi TopStat}
+TopStat          ::=  {Annotation} {Modifier} TmplDef
+                   |  Import
+                   |  Packaging
+                   |  PackageObject
+                   |
+QualId           ::=  id {‘.’ id}
+```
+
+A compilation unit consists of a sequence of packagings, import
+clauses, and class and object definitions, which may be preceded by a
+package clause.
+
+A compilation unit 
+
+```scala
+package $p_1$;
+$\ldots$
+package $p_n$;
+$\mathit{stats}$
+```
+
+starting with one or more package
+clauses is equivalent to a compilation unit consisting of the
+packaging 
+
+```scala
+package $p_1$ { $\ldots$
+  package $p_n$ {
+    $\mathit{stats}$
+  } $\ldots$
+}
+```
+
+Every compilation unit implicitly imports the following packages, in the given order:
+ 1. the package `java.lang`,
+ 2. the package `scala`, and
+ 3. the object [`scala.Predef`](12-the-scala-standard-library.html#the-predef-object), unless there is an explicit top-level import that references `scala.Predef`.
+
+Members of a later import in that order hide members of an earlier import.
+
+The exception to the implicit import of `scala.Predef` can be useful to hide, e.g., predefined implicit conversions.
+
+## Packagings
+
+```ebnf
+Packaging       ::=  ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’
+```
+
+A package is a special object which defines a set of member classes,
+objects and packages.  Unlike other objects, packages are not introduced
+by a definition.  Instead, the set of members of a package is determined by
+packagings.
+
+A packaging `package $p$ { $\mathit{ds}$ }` injects all
+definitions in $\mathit{ds}$ as members into the package whose qualified name
+is $p$. Members of a package are called _top-level_ definitions.
+If a definition in $\mathit{ds}$ is labeled `private`, it is
+visible only for other members in the package.
+
+Inside the packaging, all members of package $p$ are visible under their
+simple names. However this rule does not extend to members of enclosing
+packages of $p$ that are designated by a prefix of the path $p$.
+
+```scala
+package org.net.prj {
+  ...
+}
+```
+
+all members of package `org.net.prj` are visible under their
+simple names, but members of packages `org` or `org.net` require
+explicit qualification or imports.
+
+Selections $p$.$m$ from $p$ as well as imports from $p$
+work as for objects. However, unlike other objects, packages may not
+be used as values. It is illegal to have a package with the same fully
+qualified name as a module or a class.
+
+Top-level definitions outside a packaging are assumed to be injected
+into a special empty package. That package cannot be named and
+therefore cannot be imported. However, members of the empty package
+are visible to each other without qualification.
+
+
+## Package Objects
+
+```ebnf
+PackageObject   ::=  ‘package’ ‘object’ ObjectDef
+```
+
+A package object `package object $p$ extends $t$` adds the
+members of template $t$ to the package $p$. There can be only one
+package object per package. The standard naming convention is to place
+the definition above in a file named `package.scala` that's
+located in the directory corresponding to package $p$.
+
+The package object should not define a member with the same name as
+one of the top-level objects or classes defined in package $p$. If
+there is a name conflict, the behavior of the program is currently
+undefined. It is expected that this restriction will be lifted in a
+future version of Scala.
+
+
+## Package References
+
+```ebnf
+QualId           ::=  id {‘.’ id}
+```
+
+A reference to a package takes the form of a qualified identifier.
+Like all other references, package references are relative. That is, 
+a package reference starting in a name $p$ will be looked up in the
+closest enclosing scope that defines a member named $p$.
+
+The special predefined name `_root_` refers to the
+outermost root package which contains all top-level packages.  
+
+###### Example
+Consider the following program:
+
+```scala
+package b {
+  class B
+}
+
+package a.b {
+  class A {
+    val x = new _root_.b.B
+  }
+}
+```
+
+Here, the reference `_root_.b.B` refers to class `B` in the
+toplevel package `b`. If the `_root_` prefix had been
+omitted, the name `b` would instead resolve to the package
+`a.b`, and, provided that package does not also
+contain a class `B`, a compiler-time error would result.
+
+
+## Programs
+
+A _program_ is a top-level object that has a member method
+_main_ of type `(Array[String])Unit`. Programs can be
+executed from a command shell. The program's command arguments are are
+passed to the `main` method as a parameter of type
+`Array[String]`.
+
+The `main` method of a program can be directly defined in the
+object, or it can be inherited. The scala library defines a special class
+`scala.App` whose body acts as a `main` method.
+An objects $m$ inheriting from this class is thus a program, 
+which executes the initializaton code of the object $m$.
+
+###### Example
+The following example will create a hello world program by defining
+a method `main` in module `test.HelloWorld`.
+
+```scala
+package test
+object HelloWorld {
+  def main(args: Array[String]) { println("Hello World") }
+}
+```
+
+This program can be started by the command
+
+```scala
+scala test.HelloWorld
+```
+
+In a Java environment, the command
+
+```scala
+java test.HelloWorld
+```
+
+would work as well.
+
+`HelloWorld` can also be defined without a `main` method
+by inheriting from `App` instead:
+
+```scala
+package test
+object HelloWorld extends App {
+  println("Hello World")
+}
+```
+
diff --git a/spec/10-xml-expressions-and-patterns.md b/spec/10-xml-expressions-and-patterns.md
new file mode 100644
index 0000000..d8c45ec
--- /dev/null
+++ b/spec/10-xml-expressions-and-patterns.md
@@ -0,0 +1,147 @@
+---
+title: XML Expressions and Patterns
+layout: default
+chapter: 10
+---
+
+# XML Expressions and Patterns
+
+__By Burak Emir__
+
+This chapter describes the syntactic structure of XML expressions and patterns.
+It follows as closely as possible the XML 1.0 specification,
+changes being mandated by the possibility of embedding Scala code fragments.
+
+## XML expressions
+
+XML expressions are expressions generated by the following production, where the 
+opening bracket `<` of the first element must be in a position to start the lexical
+[XML mode](01-lexical-syntax.html#xml-mode).
+
+```ebnf
+XmlExpr ::= XmlContent {Element}
+```
+
+Well-formedness constraints of the XML specification apply, which
+means for instance that start tags and end tags must match, and
+attributes may only be defined once, with the exception of constraints
+related to entity resolution.
+
+The following productions describe Scala's extensible markup language,
+designed as close as possible to the W3C extensible markup language
+standard. Only the productions for attribute values and character data are changed. 
+Scala does not support declarations, CDATA sections or processing instructions.
+Entity references are not resolved at runtime.
+
+```ebnf
+Element       ::=    EmptyElemTag
+                |    STag Content ETag                                       
+
+EmptyElemTag  ::=    ‘<’ Name {S Attribute} [S] ‘/>’                         
+
+STag          ::=    ‘<’ Name {S Attribute} [S] ‘>’                          
+ETag          ::=    ‘</’ Name [S] ‘>’                                        
+Content       ::=    [CharData] {Content1 [CharData]}
+Content1      ::=    XmlContent
+                |    Reference
+                |    ScalaExpr
+XmlContent    ::=    Element
+                |    CDSect
+                |    PI
+                |    Comment
+```
+
+If an XML expression is a single element, its value is a runtime
+representation of an XML node (an instance of a subclass of 
+`scala.xml.Node`). If the XML expression consists of more
+than one element, then its value is a runtime representation of a
+sequence of XML nodes (an instance of a subclass of 
+`scala.Seq[scala.xml.Node]`).
+
+If an XML expression is an entity reference, CDATA section, processing 
+instructions or a comments, it is represented by an instance of the 
+corresponding Scala runtime class.
+
+By default, beginning and trailing whitespace in element content is removed, 
+and consecutive occurrences of whitespace are replaced by a single space
+character `\u0020`. This behavior can be changed to preserve all whitespace
+with a compiler option.
+
+```ebnf
+Attribute  ::=    Name Eq AttValue                                    
+
+AttValue      ::=    ‘"’ {CharQ | CharRef} ‘"’
+                |    ‘'’ {CharA | CharRef} ‘'’
+                |    ScalaExpr
+
+ScalaExpr     ::=    Block
+
+CharData      ::=   { CharNoRef } $\mbox{\rm\em without}$ {CharNoRef}`{'CharB {CharNoRef} 
+                                  $\mbox{\rm\em and without}$ {CharNoRef}`]]>'{CharNoRef}
+```
+
+<!-- {% raw  %} stupid liquid borks on the double brace below; brace yourself, liquid! -->
+XML expressions may contain Scala expressions as attribute values or
+within nodes. In the latter case, these are embedded using a single opening 
+brace `{` and ended by a closing brace `}`. To express a single opening braces
+within XML text as generated by CharData, it must be doubled.
+Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression.
+<!-- {% endraw %} -->
+
+```ebnf
+BaseChar, Char, Comment, CombiningChar, Ideographic, NameChar, S, Reference
+              ::=  $\mbox{\rm\em “as in W3C XML”}$
+
+Char1         ::=  Char $\mbox{\rm\em without}$ ‘<’ | ‘&’
+CharQ         ::=  Char1 $\mbox{\rm\em without}$ ‘"’
+CharA         ::=  Char1 $\mbox{\rm\em without}$ ‘'’
+CharB         ::=  Char1 $\mbox{\rm\em without}$ ‘{’
+
+Name          ::=  XNameStart {NameChar}
+
+XNameStart    ::= ‘_’ | BaseChar | Ideographic 
+                 $\mbox{\rm\em (as in W3C XML, but without }$ ‘:’
+```
+
+## XML patterns
+
+XML patterns are patterns generated by the following production, where
+the opening bracket `<` of the element patterns must be in a position
+to start the lexical [XML mode](01-lexical-syntax.html#xml-mode).
+
+```ebnf
+XmlPattern  ::= ElementPattern 
+```
+
+Well-formedness constraints of the XML specification apply.
+
+An XML pattern has to be a single element pattern. It
+matches exactly those runtime
+representations of an XML tree
+that have the same structure as described by the pattern.
+XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-matching-expressions).
+
+Whitespace is treated the same way as in XML expressions.
+
+By default, beginning and trailing whitespace in element content is removed, 
+and consecutive occurrences of whitespace are replaced by a single space
+character `\u0020`. This behavior can be changed to preserve all whitespace
+with a compiler option.
+
+```ebnf
+ElemPattern   ::=    EmptyElemTagP
+                |    STagP ContentP ETagP                                    
+
+EmptyElemTagP ::=    ‘<’  Name [S] ‘/>’
+STagP         ::=    ‘<’  Name [S] ‘>’                          
+ETagP         ::=    ‘</’ Name [S] ‘>’                                        
+ContentP      ::=    [CharData] {(ElemPattern|ScalaPatterns) [CharData]}
+ContentP1     ::=    ElemPattern
+                |    Reference
+                |    CDSect
+                |    PI
+                |    Comment
+                |    ScalaPatterns
+ScalaPatterns ::=    ‘{’ Patterns ‘}’
+```
+
diff --git a/spec/11-user-defined-annotations.md b/spec/11-user-defined-annotations.md
new file mode 100644
index 0000000..fd7a7f9
--- /dev/null
+++ b/spec/11-user-defined-annotations.md
@@ -0,0 +1,166 @@
+---
+title: User-Defined Annotations
+layout: default
+chapter: 11
+---
+
+# User-Defined Annotations
+
+```ebnf
+  Annotation       ::=  ‘@’ SimpleType {ArgumentExprs}
+  ConstrAnnotation ::=  ‘@’ SimpleType ArgumentExprs
+```
+
+User-defined annotations associate meta-information with definitions.
+A simple annotation has the form `@$c$` or `@$c(a_1 , \ldots , a_n)$`.
+Here, $c$ is a constructor of a class $C$, which must conform
+to the class `scala.Annotation`. 
+
+Annotations may apply to definitions or declarations, types, or
+expressions.  An annotation of a definition or declaration appears in
+front of that definition.  An annotation of a type appears after
+that type. An annotation of an expression $e$ appears after the
+expression $e$, separated by a colon. More than one annotation clause
+may apply to an entity. The order in which these annotations are given
+does not matter.
+
+Examples:
+
+```scala
+ at deprecated("Use D", "1.0") class C { ... } // Class annotation
+ at transient @volatile var m: Int             // Variable annotation
+String @local                               // Type annotation
+(e: @unchecked) match { ... }               // Expression annotation
+```
+
+The meaning of annotation clauses is implementation-dependent. On the
+Java platform, the following annotations have a standard meaning.
+
+  * `@transient` Marks a field to be non-persistent; this is
+    equivalent to the `transient`
+    modifier in Java.
+
+  * `@volatile` Marks a field which can change its value
+    outside the control of the program; this
+    is equivalent to the `volatile`
+    modifier in Java.
+
+  * `@SerialVersionUID(<longlit>)` Attaches a serial version identifier (a
+    `long` constant) to a class.
+    This is equivalent to a the following field
+    definition in Java:
+
+    ```
+    private final static SerialVersionUID = <longlit>
+    ```
+
+  * `@throws(<classlit>)` A Java compiler checks that a program contains handlers for checked exceptions
+    by analyzing which checked exceptions can result from execution of a method or
+    constructor. For each checked exception which is a possible result, the
+    `throws`
+    clause for the method or constructor must mention the class of that exception
+    or one of the superclasses of the class of that exception.
+
+## Java Beans Annotations
+
+  * `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this
+    annotation causes getter and setter methods `getX`, `setX`
+    in the Java bean style to be added in the class containing the
+    variable. The first letter of the variable appears capitalized after
+    the `get` or `set`. When the annotation is added to the
+    definition of an immutable value definition `X`, only a getter is
+    generated. The construction of these methods is part of
+    code-generation; therefore, these methods become visible only once a
+    classfile for the containing class is generated.
+
+  * `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but
+    the generated getter method is named `isX` instead of `getX`.
+
+## Deprecation Annotations
+
+  * `@deprecated(<stringlit>)` Marks a definition as deprecated. Accesses to the
+    defined entity will then cause a deprecated warning mentioning the
+    message `<stringlit>` to be issued from the compiler.  Deprecated
+    warnings are suppressed in code that belongs itself to a definition
+    that is labeled deprecated.
+
+  * `@deprecatedName(name: <symbollit>)` Marks a formal parameter name as deprecated. Invocations of this entity
+    using named parameter syntax refering to the deprecated parameter name cause a deprecation warning.
+
+## Scala Compiler Annotations
+
+  * `@unchecked` When applied to the selector of a `match` expression,
+    this attribute suppresses any warnings about non-exhaustive pattern
+    matches which would otherwise be emitted. For instance, no warnings
+    would be produced for the method definition below.
+
+    ```
+    def f(x: Option[Int]) = (x: @unchecked) match {
+    case Some(y) => y
+    }
+    ```
+
+    Without the `@unchecked` annotation, a Scala compiler could
+    infer that the pattern match is non-exhaustive, and could produce a
+    warning because `Option` is a `sealed` class.
+
+  * `@uncheckedStable` When applied a value declaration or definition, it allows the defined
+    value to appear in a path, even if its type is [volatile](03-types.html#volatile-types).
+    For instance, the following member definitions are legal:
+
+    ```
+    type A { type T }
+    type B
+    @uncheckedStable val x: A with B // volatile type
+    val y: x.T                       // OK since `x' is still a path
+    ```
+
+    Without the `@uncheckedStable` annotation, the designator `x`
+    would not be a path since its type `A with B` is volatile. Hence,
+    the reference `x.T` would be malformed.
+
+    When applied to value declarations or definitions that have non-volatile
+    types, the annotation has no effect.
+
+
+  * `@specialized` When applied to the definition of a type parameter, this annotation causes
+    the compiler
+    to generate specialized definitions for primitive types. An optional list of
+    primitive
+    types may be given, in which case specialization takes into account only
+    those types.
+    For instance, the following code would generate specialized traits for
+    `Unit`, `Int` and `Double`
+
+    ```
+    trait Function0[@specialized(Unit, Int, Double) T] {
+      def apply: T
+    }
+    ```
+
+    Whenever the static type of an expression matches a specialized variant of
+    a definition, the compiler will instead use the specialized version.
+    See the [specialization sid](http://docs.scala-lang.org/sips/completed/scala-specialization.html) for more details of the implementation.
+
+
+Other annotations may be interpreted by platform- or
+application-dependent tools. Class `scala.Annotation` has two
+sub-traits which are used to indicate how these annotations are
+retained. Instances of an annotation class inheriting from trait
+`scala.ClassfileAnnotation` will be stored in the generated class
+files. Instances of an annotation class inheriting from trait
+`scala.StaticAnnotation` will be visible to the Scala type-checker
+in every compilation unit where the annotated symbol is accessed. An
+annotation class can inherit from both `scala.ClassfileAnnotation`
+and `scala.StaticAnnotation`. If an annotation class inherits from
+neither `scala.ClassfileAnnotation` nor
+`scala.StaticAnnotation`, its instances are visible only locally
+during the compilation run that analyzes them.
+
+Classes inheriting from `scala.ClassfileAnnotation` may be
+subject to further restrictions in order to assure that they can be
+mapped to the host environment. In particular, on both the Java and
+the .NET platforms, such classes must be toplevel; i.e. they may not
+be contained in another class or object.  Additionally, on both
+Java and .NET, all constructor arguments must be constant expressions.
+
diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md
new file mode 100644
index 0000000..9d4d69e
--- /dev/null
+++ b/spec/12-the-scala-standard-library.md
@@ -0,0 +1,849 @@
+---
+title: The Scala Standard Library
+layout: default
+chapter: 12
+---
+
+# The Scala Standard Library
+
+The Scala standard library consists of the package `scala` with a
+number of classes and modules. Some of these classes are described in
+the following.
+
+![Class hierarchy of Scala](public/images/classhierarchy.pdf)
+
+## Root Classes
+
+The root of this hierarchy is formed by class `Any`.
+Every class in a Scala execution environment inherits directly or
+indirectly from this class.  Class `Any` has two direct
+subclasses: `AnyRef` and AnyVal`.
+
+The subclass `AnyRef` represents all values which are represented
+as objects in the underlying host system. Classes written in other languages
+inherit from `scala.AnyRef`.
+
+The predefined subclasses of class `AnyVal` describe
+values which are not implemented as objects in the underlying host
+system.
+
+User-defined Scala classes which do not explicitly inherit from
+`AnyVal` inherit directly or indirectly from `AnyRef`. They can
+not inherit from both `AnyRef` and `AnyVal`.
+
+Classes `AnyRef` and `AnyVal` are required to provide only
+the members declared in class `Any`, but implementations may add
+host-specific methods to these classes (for instance, an
+implementation may identify class `AnyRef` with its own root
+class for objects).
+
+The signatures of these root classes are described by the following
+definitions.
+
+```scala
+package scala 
+/** The universal root class */
+abstract class Any {
+
+  /** Defined equality; abstract here */
+  def equals(that: Any): Boolean 
+
+  /** Semantic equality between values */
+  final def == (that: Any): Boolean  =  
+    if (null eq this) null eq that else this equals that
+
+  /** Semantic inequality between values */
+  final def != (that: Any): Boolean  =  !(this == that)
+
+  /** Hash code; abstract here */
+  def hashCode: Int = $\ldots$
+
+  /** Textual representation; abstract here */
+  def toString: String = $\ldots$
+
+  /** Type test; needs to be inlined to work as given */
+  def isInstanceOf[a]: Boolean
+
+  /** Type cast; needs to be inlined to work as given */ */
+  def asInstanceOf[A]: A = this match {
+    case x: A => x
+    case _ => if (this eq null) this
+              else throw new ClassCastException()
+  }
+}
+
+/** The root class of all value types */
+final class AnyVal extends Any 
+
+/** The root class of all reference types */
+class AnyRef extends Any {
+  def equals(that: Any): Boolean      = this eq that 
+  final def eq(that: AnyRef): Boolean = $\ldots$ // reference equality
+  final def ne(that: AnyRef): Boolean = !(this eq that)
+
+  def hashCode: Int = $\ldots$     // hashCode computed from allocation address
+  def toString: String  = $\ldots$ // toString computed from hashCode and class name
+
+  def synchronized[T](body: => T): T // execute `body` in while locking `this`.
+}                           
+
+```scala
+The type test `$x$.isInstanceOf[$T$]` is equivalent to a typed
+pattern match
+
+```scala
+$x$ match {
+  case _: $T'$ => true
+  case _ => false
+}
+```
+
+where the type $T'$ is the same as $T$ except if $T$ is
+of the form $D$ or $D[\mathit{tps}]$ where $D$ is a type member of some outer class $C$.
+In this case $T'$ is `$C$#$D$` (or `$C$#$D[tps]$`, respectively), whereas $T$ itself would expand to `$C$.this.$D[tps]$`.
+In other words, an `isInstanceOf` test does not check that types have the same enclosing instance.
+
+
+The test `$x$.asInstanceOf[$T$]` is treated specially if $T$ is a
+[numeric value type](#value-classes). In this case the cast will
+be translated to an application of a [conversion method](#numeric-value-types) 
+`x.to$T$`. For non-numeric values $x$ the operation will raise a
+`ClassCastException`.
+
+## Value Classes
+
+Value classes are classes whose instances are not represented as
+objects by the underlying host system.  All value classes inherit from
+class `AnyVal`. Scala implementations need to provide the
+value classes `Unit`, `Boolean`, `Double`, `Float`,
+`Long`, `Int`, `Char`, `Short`, and `Byte`
+(but are free to provide others as well).
+The signatures of these classes are defined in the following.
+
+### Numeric Value Types
+
+Classes `Double`, `Float`,
+`Long`, `Int`, `Char`, `Short`, and `Byte`
+are together called _numeric value types_. Classes `Byte`,
+`Short`, or `Char` are called _subrange types_.
+Subrange types, as well as `Int` and `Long` are called _integer types_, whereas `Float` and `Double` are called _floating point types_.
+
+Numeric value types are ranked in the following partial order:
+
+```scala
+Byte - Short 
+             \
+               Int - Long - Float - Double
+             / 
+        Char 
+```
+
+`Byte` and `Short` are the lowest-ranked types in this order, 
+whereas `Double` is the highest-ranked.  Ranking does _not_
+imply a [conformance relationship](03-types.html#conformance); for
+instance `Int` is not a subtype of `Long`.  However, object
+[`Predef`](#the-predef-object) defines [views](07-implicit-parameters-and-views.html#views)
+from every numeric value type to all higher-ranked numeric value types. 
+Therefore, lower-ranked types are implicitly converted to higher-ranked types
+when required by the [context](06-expressions.html#implicit-conversions).
+
+Given two numeric value types $S$ and $T$, the _operation type_ of
+$S$ and $T$ is defined as follows: If both $S$ and $T$ are subrange
+types then the operation type of $S$ and $T$ is `Int`.  Otherwise
+the operation type of $S$ and $T$ is the larger of the two types wrt
+ranking. Given two numeric values $v$ and $w$ the operation type of
+$v$ and $w$ is the operation type of their run-time types.
+
+Any numeric value type $T$ supports the following methods.
+
+  * Comparison methods for equals (`==`), not-equals (`!=`),
+    less-than (`<`), greater-than (`>`), less-than-or-equals
+    (`<=`), greater-than-or-equals (`>=`), which each exist in 7
+    overloaded alternatives. Each alternative takes a parameter of some
+    numeric value type. Its result type is type `Boolean`. The
+    operation is evaluated by converting the receiver and its argument to
+    their operation type and performing the given comparison operation of
+    that type.
+  * Arithmetic methods addition (`+`), subtraction (`-`),
+    multiplication (`*`), division (`/`), and remainder
+    (`%`), which each exist in 7 overloaded alternatives. Each
+    alternative takes a parameter of some numeric value type $U$.  Its
+    result type is the operation type of $T$ and $U$. The operation is
+    evaluated by converting the receiver and its argument to their
+    operation type and performing the given arithmetic operation of that
+    type.
+  * Parameterless arithmethic methods identity (`+`) and negation
+    (`-`), with result type $T$.  The first of these returns the
+    receiver unchanged, whereas the second returns its negation.
+  * Conversion methods `toByte`, `toShort`, `toChar`,
+    `toInt`, `toLong`, `toFloat`, `toDouble` which
+    convert the receiver object to the target type, using the rules of
+    Java's numeric type cast operation. The conversion might truncate the
+    numeric value (as when going from `Long` to `Int` or from
+    `Int` to `Byte`) or it might lose precision (as when going
+    from `Double` to `Float` or when converting between
+    `Long` and `Float`). 
+
+Integer numeric value types support in addition the following operations:
+
+  * Bit manipulation methods bitwise-and (`&`), bitwise-or
+    {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5
+    overloaded alternatives. Each alternative takes a parameter of some
+    integer numeric value type. Its result type is the operation type of
+    $T$ and $U$. The operation is evaluated by converting the receiver and
+    its argument to their operation type and performing the given bitwise
+    operation of that type.
+
+  * A parameterless bit-negation method (`~`). Its result type is
+    the reciver type $T$ or `Int`, whichever is larger.
+    The operation is evaluated by converting the receiver to the result
+    type and negating every bit in its value.
+  * Bit-shift methods left-shift (`<<`), arithmetic right-shift
+    (`>>`), and unsigned right-shift (`>>>`). Each of these
+    methods has two overloaded alternatives, which take a parameter $n$
+    of type `Int`, respectively `Long`. The result type of the
+    operation is the receiver type $T$, or `Int`, whichever is larger.
+    The operation is evaluated by converting the receiver to the result
+    type and performing the specified shift by $n$ bits.
+
+Numeric value types also implement operations `equals`,
+`hashCode`, and `toString` from class `Any`.
+
+The `equals` method tests whether the argument is a numeric value
+type. If this is true, it will perform the `==` operation which
+is appropriate for that type. That is, the `equals` method of a
+numeric value type can be thought of being defined as follows:
+
+```scala
+def equals(other: Any): Boolean = other match {
+  case that: Byte   => this == that
+  case that: Short  => this == that
+  case that: Char   => this == that
+  case that: Int    => this == that
+  case that: Long   => this == that
+  case that: Float  => this == that
+  case that: Double => this == that
+  case _ => false
+}
+```
+
+The `hashCode` method returns an integer hashcode that maps equal
+numeric values to equal results. It is guaranteed to be the identity for 
+for type `Int` and for all subrange types.
+
+The `toString` method displays its receiver as an integer or
+floating point number.
+
+### Example
+
+This is the signature of the numeric value type `Int`:
+
+```scala
+package scala
+abstract sealed class Int extends AnyVal {
+  def == (that: Double): Boolean  // double equality
+  def == (that: Float): Boolean   // float equality
+  def == (that: Long): Boolean    // long equality
+  def == (that: Int): Boolean     // int equality
+  def == (that: Short): Boolean   // int equality
+  def == (that: Byte): Boolean    // int equality
+  def == (that: Char): Boolean    // int equality
+  /* analogous for !=, <, >, <=, >= */
+
+  def + (that: Double): Double    // double addition
+  def + (that: Float): Double     // float addition
+  def + (that: Long): Long        // long addition
+  def + (that: Int): Int          // int addition
+  def + (that: Short): Int        // int addition
+  def + (that: Byte): Int         // int addition
+  def + (that: Char): Int         // int addition
+  /* analogous for -, *, /, % */
+
+  def & (that: Long): Long        // long bitwise and
+  def & (that: Int): Int          // int bitwise and
+  def & (that: Short): Int        // int bitwise and
+  def & (that: Byte): Int         // int bitwise and
+  def & (that: Char): Int         // int bitwise and
+  /* analogous for |, ^ */
+
+  def << (cnt: Int): Int          // int left shift
+  def << (cnt: Long): Int         // long left shift
+  /* analogous for >>, >>> */
+
+  def unary_+ : Int               // int identity
+  def unary_- : Int               // int negation
+  def unary_~ : Int               // int bitwise negation
+
+  def toByte: Byte                // convert to Byte
+  def toShort: Short              // convert to Short
+  def toChar: Char                // convert to Char
+  def toInt: Int                  // convert to Int
+  def toLong: Long                // convert to Long
+  def toFloat: Float              // convert to Float
+  def toDouble: Double            // convert to Double
+}
+```
+
+
+### Class `Boolean`
+
+Class `Boolean` has only two values: `true` and
+`false`. It implements operations as given in the following
+class definition.
+
+```scala
+package scala 
+abstract sealed class Boolean extends AnyVal {
+  def && (p: => Boolean): Boolean = // boolean and
+    if (this) p else false
+  def || (p: => Boolean): Boolean = // boolean or
+    if (this) true else p
+  def &  (x: Boolean): Boolean =    // boolean strict and
+    if (this) x else false
+  def |  (x: Boolean): Boolean =    // boolean strict or
+    if (this) true else x
+  def == (x: Boolean): Boolean =    // boolean equality
+    if (this) x else x.unary_!
+  def != (x: Boolean): Boolean =    // boolean inequality
+    if (this) x.unary_! else x
+  def unary_!: Boolean =            // boolean negation
+    if (this) false else true
+}
+```
+
+The class also implements operations `equals`, `hashCode`,
+and `toString` from class `Any`.
+
+The `equals` method returns `true` if the argument is the
+same boolean value as the receiver, `false` otherwise.  The
+`hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`, 
+and a different, fixed, implementation-specific hash-code when invoked on `false`. The `toString` method
+returns the receiver converted to a string, i.e. either `"true"` or `"false"`.
+
+### Class `Unit`
+
+Class `Unit` has only one value: `()`. It implements only
+the three methods `equals`, `hashCode`, and `toString`
+from class `Any`.
+
+The `equals` method returns `true` if the argument is the
+unit value `()`, `false` otherwise.  The
+`hashCode` method returns a fixed, implementation-specific hash-code, 
+The `toString` method returns `"()"`.
+
+## Standard Reference Classes
+
+This section presents some standard Scala reference classes which are
+treated in a special way in Scala compiler -- either Scala provides
+syntactic sugar for them, or the Scala compiler generates special code
+for their operations. Other classes in the standard Scala library are
+documented in the Scala library documentation by HTML pages.
+
+### Class `String`
+
+Scala's `String` class is usually derived from the standard String
+class of the underlying host system (and may be identified with
+it). For Scala clients the class is taken to support in each case a
+method
+
+```scala
+def + (that: Any): String 
+```
+
+which concatenates its left operand with the textual representation of its
+right operand.
+
+### The `Tuple` classes
+
+Scala defines tuple classes `Tuple$n$` for $n = 2 , \ldots , 22$.
+These are defined as follows.
+
+```scala
+package scala 
+case class Tuple$n$[+T_1, ..., +T_n](_1: T_1, ..., _$n$: T_$n$) {
+  def toString = "(" ++ _1 ++ "," ++ $\ldots$ ++ "," ++ _$n$ ++ ")"
+}
+```
+
+The implicitly imported [`Predef`](#the-predef-object) object defines
+the names `Pair` as an alias of `Tuple2` and `Triple`
+as an alias for `Tuple3`.
+
+### The `Function` Classes
+
+Scala defines function classes `Function$n$` for $n = 1 , \ldots , 22$.
+These are defined as follows.
+
+```scala
+package scala 
+trait Function$n$[-T_1, ..., -T_$n$, +R] {
+  def apply(x_1: T_1, ..., x_$n$: T_$n$): R
+  def toString = "<function>" 
+}
+```
+
+The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain.
+Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain).
+
+```scala
+class PartialFunction[-A, +B] extends Function1[A, B] {
+  def isDefinedAt(x: A): Boolean
+}
+```
+
+The implicitly imported [`Predef`](#the-predef-object) object defines the name 
+`Function` as an alias of `Function1`.
+
+### Class `Array`
+
+All operations on arrays desugar to the corresponding operations of the
+underlying platform. Therefore, the following class definition is given for
+informational purposes only:
+
+```scala
+final class Array[T](_length: Int)
+extends java.io.Serializable with java.lang.Cloneable {
+  def length: Int = $\ldots$
+  def apply(i: Int): T = $\ldots$
+  def update(i: Int, x: T): Unit = $\ldots$
+  override def clone(): Array[T] = $\ldots$
+}
+```
+
+If $T$ is not a type parameter or abstract type, the type `Array[T]`
+is represented as the array type `|T|[]` in the
+underlying host system, where `|T|` is the erasure of `T`.
+If $T$ is a type parameter or abstract type, a different representation might be
+used (it is `Object` on the Java platform).
+
+#### Operations
+
+`length` returns the length of the array, `apply` means subscripting,
+and `update` means element update.
+
+Because of the syntactic sugar for `apply` and `update` operations,
+we have the following correspondences between Scala and Java code for
+operations on an array `xs`:
+
+|_Scala_           |_Java_      |
+|------------------|------------|
+|`xs.length`       |`xs.length` |
+|`xs(i)`           |`xs[i]`     |
+|`xs(i) = e`       |`xs[i] = e` |
+
+Two implicit conversions exist in `Predef` that are frequently applied to arrays:
+a conversion to `scala.collection.mutable.ArrayOps` and a conversion to
+`scala.collection.mutable.WrappedArray` (a subtype of `scala.collection.Seq`).
+
+Both types make many of the standard operations found in the Scala
+collections API available. The conversion to `ArrayOps` is temporary, as all operations
+defined on `ArrayOps` return a value of type `Array`, while the conversion to `WrappedArray`
+is permanent as all operations return a value of type `WrappedArray`.
+The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`.
+
+Because of the tension between parametrized types in Scala and the ad-hoc
+implementation of arrays in the host-languages, some subtle points
+need to be taken into account when dealing with arrays. These are
+explained in the following.
+
+#### Variance
+
+Unlike arrays in Java, arrays in Scala are _not_
+co-variant; That is, $S <: T$ does not imply 
+`Array[$S$] $<:$ Array[$T$]` in Scala.  
+However, it is possible to cast an array
+of $S$ to an array of $T$ if such a cast is permitted in the host
+environment.
+
+For instance `Array[String]` does not conform to
+`Array[Object]`, even though `String` conforms to `Object`.
+However, it is possible to cast an expression of type
+`Array[String]` to `Array[Object]`, and this
+cast will succeed without raising a `ClassCastException`. Example:
+
+```scala
+val xs = new Array[String](2)
+// val ys: Array[Object] = xs   // **** error: incompatible types
+val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK
+```
+
+The instantiation of an array with a polymorphic element type $T$ requires
+information about type $T$ at runtime.
+This information is synthesized by adding a [context bound](07-implicit-parameters-and-views.html#context-bounds-and-view-bounds)
+of `scala.reflect.ClassTag` to type $T$.
+An example is the
+following implementation of method `mkArray`, which creates
+an array of an arbitrary type $T$, given a sequence of $T$`s which
+defines its elements:
+
+```scala
+import reflect.ClassTag
+def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = {
+  val result = new Array[T](elems.length)
+  var i = 0
+  for (elem <- elems) {
+    result(i) = elem
+    i += 1
+  }
+  result
+}
+```
+
+If type $T$ is a type for which the host platform offers a specialized array
+representation, this representation is used.
+
+###### Example
+On the Java Virtual Machine, an invocation of `mkArray(List(1,2,3))`
+will return a primitive array of `int`s, written as `int[]` in Java.
+
+#### Companion object
+
+`Array`'s companion object provides various factory methods for the
+instantiation of single- and multi-dimensional arrays, an extractor method
+[`unapplySeq`](08-pattern-matching.html#extractor-patterns) which enables pattern matching
+over arrays and additional utility methods:
+
+```scala
+package scala
+object Array { 
+  /** copies array elements from `src` to `dest`. */
+  def copy(src: AnyRef, srcPos: Int,
+           dest: AnyRef, destPos: Int, length: Int): Unit = $\ldots$
+
+  /** Returns an array of length 0 */
+  def empty[T: ClassTag]: Array[T] =
+
+  /** Create an array with given elements. */
+  def apply[T: ClassTag](xs: T*): Array[T] = $\ldots$
+
+  /** Creates array with given dimensions */
+  def ofDim[T: ClassTag](n1: Int): Array[T] = $\ldots$
+  /** Creates a 2-dimensional array */
+  def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = $\ldots$
+  $\ldots$
+
+  /** Concatenate all argument arrays into a single array. */
+  def concat[T: ClassTag](xss: Array[T]*): Array[T] = $\ldots$
+
+  /** Returns an array that contains the results of some element computation a number
+    * of times. */
+  def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = $\ldots$
+  /** Returns a two-dimensional array that contains the results of some element
+    * computation a number of times. */
+  def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = $\ldots$
+  $\ldots$
+
+  /** Returns an array containing values of a given function over a range of integer
+    * values starting from 0. */
+  def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = $\ldots$
+  /** Returns a two-dimensional array containing values of a given function
+    * over ranges of integer values starting from `0`. */
+  def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = $\ldots$
+  $\ldots$
+
+  /** Returns an array containing a sequence of increasing integers in a range. */
+  def range(start: Int, end: Int): Array[Int] = $\ldots$
+  /** Returns an array containing equally spaced values in some integer interval. */
+  def range(start: Int, end: Int, step: Int): Array[Int] = $\ldots$
+
+  /** Returns an array containing repeated applications of a function to a start value. */
+  def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = $\ldots$
+
+  /** Enables pattern matching over arrays */
+  def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x)
+}
+```
+
+## Class Node
+
+```scala
+package scala.xml 
+
+trait Node {
+
+  /** the label of this node */
+  def label: String               
+
+  /** attribute axis */
+  def attribute: Map[String, String] 
+
+  /** child axis (all children of this node) */
+  def child: Seq[Node]          
+
+  /** descendant axis (all descendants of this node) */
+  def descendant: Seq[Node] = child.toList.flatMap { 
+    x => x::x.descendant.asInstanceOf[List[Node]] 
+  } 
+
+  /** descendant axis (all descendants of this node) */
+  def descendant_or_self: Seq[Node] = this::child.toList.flatMap { 
+    x => x::x.descendant.asInstanceOf[List[Node]] 
+  } 
+
+  override def equals(x: Any): Boolean = x match {
+    case that:Node => 
+      that.label == this.label && 
+        that.attribute.sameElements(this.attribute) && 
+          that.child.sameElements(this.child)
+    case _ => false
+  } 
+
+ /** XPath style projection function. Returns all children of this node
+  *  that are labeled with 'that'. The document order is preserved.
+  */
+    def \(that: Symbol): NodeSeq = {
+      new NodeSeq({
+        that.name match {
+          case "_" => child.toList  
+          case _ =>
+            var res:List[Node] = Nil 
+            for (x <- child.elements if x.label == that.name) {
+              res = x::res 
+            }
+            res.reverse
+        }
+      }) 
+    }
+
+ /** XPath style projection function. Returns all nodes labeled with the 
+  *  name 'that' from the 'descendant_or_self' axis. Document order is preserved.
+  */
+  def \\(that: Symbol): NodeSeq = {
+    new NodeSeq(
+      that.name match {
+        case "_" => this.descendant_or_self 
+        case _ => this.descendant_or_self.asInstanceOf[List[Node]].
+        filter(x => x.label == that.name) 
+      })
+  }
+
+  /** hashcode for this XML node */
+  override def hashCode = 
+    Utility.hashCode(label, attribute.toList.hashCode, child) 
+
+  /** string representation of this node */
+  override def toString = Utility.toXML(this) 
+
+}
+```
+
+
+## The `Predef` Object
+
+The `Predef` object defines standard functions and type aliases
+for Scala programs. It is always implicitly imported, so that all its
+defined members are available without qualification. Its definition
+for the JVM environment conforms to the following signature:
+
+```scala
+package scala
+object Predef {
+
+  // classOf ---------------------------------------------------------
+
+  /** Returns the runtime representation of a class type. */
+  def classOf[T]: Class[T] = null  
+   // this is a dummy, classOf is handled by compiler.
+
+  // Standard type aliases ---------------------------------------------
+
+  type String    = java.lang.String
+  type Class[T]  = java.lang.Class[T]
+
+  // Miscellaneous -----------------------------------------------------
+  
+  type Function[-A, +B] = Function1[A, B]
+
+  type Map[A, +B] = collection.immutable.Map[A, B]
+  type Set[A] = collection.immutable.Set[A]
+
+  val Map = collection.immutable.Map
+  val Set = collection.immutable.Set
+
+  // Manifest types, companions, and incantations for summoning ---------
+
+  type ClassManifest[T] = scala.reflect.ClassManifest[T]
+  type Manifest[T]      = scala.reflect.Manifest[T]
+  type OptManifest[T]   = scala.reflect.OptManifest[T]
+  val ClassManifest     = scala.reflect.ClassManifest
+  val Manifest          = scala.reflect.Manifest
+  val NoManifest        = scala.reflect.NoManifest
+  
+  def manifest[T](implicit m: Manifest[T])           = m
+  def classManifest[T](implicit m: ClassManifest[T]) = m
+  def optManifest[T](implicit m: OptManifest[T])     = m
+
+  // Minor variations on identity functions -----------------------------
+  def identity[A](x: A): A         = x    // @see `conforms` for the implicit version
+  def implicitly[T](implicit e: T) = e    // for summoning implicit values from the nether world
+  @inline def locally[T](x: T): T  = x    // to communicate intent and avoid unmoored statements
+
+  // Asserts, Preconditions, Postconditions -----------------------------
+
+  def assert(assertion: Boolean) {
+    if (!assertion)
+      throw new java.lang.AssertionError("assertion failed")
+  }
+
+  def assert(assertion: Boolean, message: => Any) {
+    if (!assertion)
+      throw new java.lang.AssertionError("assertion failed: " + message)
+  }
+
+  def assume(assumption: Boolean) {
+    if (!assumption)
+      throw new IllegalArgumentException("assumption failed")
+  }
+
+  def assume(assumption: Boolean, message: => Any) {
+    if (!assumption)
+      throw new IllegalArgumentException(message.toString)
+  }
+
+  def require(requirement: Boolean) {
+    if (!requirement)
+      throw new IllegalArgumentException("requirement failed")
+  }
+
+  def require(requirement: Boolean, message: => Any) {
+    if (!requirement)
+      throw new IllegalArgumentException("requirement failed: "+ message)
+  }
+```
+
+
+```scala
+  // tupling ---------------------------------------------------------
+
+  type Pair[+A, +B] = Tuple2[A, B]
+  object Pair {
+    def apply[A, B](x: A, y: B) = Tuple2(x, y)
+    def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
+  }
+
+  type Triple[+A, +B, +C] = Tuple3[A, B, C]
+  object Triple {
+    def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z)
+    def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
+  }
+
+  // Printing and reading -----------------------------------------------
+
+  def print(x: Any) = Console.print(x)
+  def println() = Console.println()
+  def println(x: Any) = Console.println(x)
+  def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*))
+
+  def readLine(): String = Console.readLine()
+  def readLine(text: String, args: Any*) = Console.readLine(text, args)
+  def readBoolean() = Console.readBoolean()
+  def readByte() = Console.readByte()
+  def readShort() = Console.readShort()
+  def readChar() = Console.readChar()
+  def readInt() = Console.readInt()
+  def readLong() = Console.readLong()
+  def readFloat() = Console.readFloat()
+  def readDouble() = Console.readDouble()
+  def readf(format: String) = Console.readf(format)
+  def readf1(format: String) = Console.readf1(format)
+  def readf2(format: String) = Console.readf2(format)
+  def readf3(format: String) = Console.readf3(format)
+
+  // Implict conversions ------------------------------------------------
+
+  ...
+}
+```
+
+
+### Predefined Implicit Definitions
+
+The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported).
+Implicit definitions come in two priorities. High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. The rules of 
+static [overloading resolution](06-expressions.html#overloading-resolution)
+stipulate that, all other things being equal, implicit resolution 
+prefers high-priority implicits over low-priority ones.
+
+The available low-priority implicits include definitions falling into the following categories.
+
+1.  For every primitive type, a wrapper that takes values of that type
+    to instances of a `runtime.Rich*` class. For instance, values of type `Int`
+    can be implicitly converted to instances of class `runtime.RichInt`.
+
+1.  For every array type with elements of primitive type, a wrapper that
+    takes the arrays of that type to instances of a `runtime.WrappedArray` class. For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.WrappedArray[Float]`.
+    There are also generic array wrappers that take elements
+    of type `Array[T]` for arbitrary `T` to `WrappedArray`s.
+
+1.  An implicit conversion from `String` to `WrappedString`.
+
+
+The available high-priority implicits include definitions falling into the following categories.
+
+  * An implicit wrapper that adds `ensuring` methods 
+    with the following overloaded variants to type `Any`.
+
+    ``` 
+    def ensuring(cond: Boolean): A = { assert(cond); x }
+    def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
+    def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
+    def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x }
+    ```
+
+  * An implicit wrapper that adds a `->` method with the following implementation
+    to type `Any`.
+
+    ``` 
+    def -> [B](y: B): (A, B) = (x, y)
+    ```
+
+  * For every array type with elements of primitive type, a wrapper that
+    takes the arrays of that type to instances of a `runtime.ArrayOps`
+    class. For instance, values of type `Array[Float]` can be implicitly
+    converted to instances of class `runtime.ArrayOps[Float]`.  There are
+    also generic array wrappers that take elements of type `Array[T]` for
+    arbitrary `T` to `ArrayOps`s.
+
+  * An implicit wrapper that adds `+` and `formatted` method with the following
+    implementations to type `Any`.
+
+    ``` 
+    def +(other: String) = String.valueOf(self) + other
+    def formatted(fmtstr: String): String = fmtstr format self
+    ```
+
+  * Numeric primitive conversions that implement the transitive closure of the 
+    following mappings:
+
+    ```
+    Byte  -> Short
+    Short -> Int
+    Char  -> Int
+    Int   -> Long
+    Long  -> Float
+    Float -> Double
+    ```
+
+  * Boxing and unboxing conversions between primitive types and their boxed 
+    versions:
+
+    ```
+    Byte    <-> java.lang.Byte
+    Short   <-> java.lang.Short
+    Char    <-> java.lang.Character
+    Int     <-> java.lang.Integer
+    Long    <-> java.lang.Long
+    Float   <-> java.lang.Float
+    Double  <-> java.lang.Double
+    Boolean <-> java.lang.Boolean
+    ```
+
+  * An implicit definition that generates instances of type `T <:< T`, for
+    any type `T`. Here, `<:<` is a class defined as follows.
+
+    ``` 
+    sealed abstract class <:<[-From, +To] extends (From => To)
+    ```
+
+    Implicit parameters of `<:<` types are typically used to implement type constraints.
+
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
new file mode 100644
index 0000000..3eecc26
--- /dev/null
+++ b/spec/13-syntax-summary.md
@@ -0,0 +1,311 @@
+---
+title: Syntax Summary
+layout: default
+chapter: 13
+---
+
+# Syntax Summary
+
+The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`.
+
+_Unicode escapes_ are used to represent the Unicode character with the given hexadecimal code:
+
+```ebnf
+UnicodeEscape ::= ‘\‘ ‘u‘ {‘u‘} hexDigit hexDigit hexDigit hexDigit
+hexDigit      ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’
+```
+
+The lexical syntax of Scala is given by the following grammar in EBNF form:
+
+```ebnf
+whiteSpace       ::=  ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’
+upper            ::=  ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’  // and Unicode category Lu
+lower            ::=  ‘a’ | … | ‘z’ // and Unicode category Ll
+letter           ::=  upper | lower // and Unicode categories Lo, Lt, Nl
+digit            ::=  ‘0’ | … | ‘9’
+paren            ::=  ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’
+delim            ::=  ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’
+opchar           ::= // printableChar not matched by (whiteSpace | upper | lower |
+                     // letter | digit | paren | delim | opchar | Unicode_Sm | Unicode_So)
+printableChar    ::= // all characters in [\u0020, \u007F] inclusive
+charEscapeSeq    ::= ‘\‘ (‘b‘ | ‘t‘ | ‘n‘ | ‘f‘ | ‘r‘ | ‘"‘ | ‘'‘ | ‘\‘)
+
+op               ::=  opchar {opchar} 
+varid            ::=  lower idrest
+plainid          ::=  upper idrest
+                 |  varid
+                 |  op
+id               ::=  plainid
+                 |  ‘`’ stringLiteral ‘`’
+idrest           ::=  {letter | digit} [‘_’ op]
+
+integerLiteral   ::=  (decimalNumeral | hexNumeral) [‘L’ | ‘l’]
+decimalNumeral   ::=  ‘0’ | nonZeroDigit {digit}
+hexNumeral       ::=  ‘0’ ‘x’ hexDigit {hexDigit}
+digit            ::=  ‘0’ | nonZeroDigit
+nonZeroDigit     ::=  ‘1’ | … | ‘9’
+
+floatingPointLiteral 
+                 ::=  digit {digit} ‘.’ digit {digit} [exponentPart] [floatType]
+                 |  ‘.’ digit {digit} [exponentPart] [floatType]
+                 |  digit {digit} exponentPart [floatType]
+                 |  digit {digit} [exponentPart] floatType
+exponentPart     ::=  (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit}
+floatType        ::=  ‘F’ | ‘f’ | ‘D’ | ‘d’
+
+booleanLiteral   ::=  ‘true’ | ‘false’
+
+characterLiteral ::=  ‘'’ (printableChar | charEscapeSeq) ‘'’
+
+stringLiteral    ::=  ‘"’ {stringElement} ‘"’
+                 |  ‘"""’ multiLineChars ‘"""’
+stringElement    ::=  (printableChar except ‘"’)
+                 |  charEscapeSeq
+multiLineChars   ::=  {[‘"’] [‘"’] charNoDoubleQuote} {‘"’}
+
+symbolLiteral    ::=  ‘'’ plainid
+
+comment          ::=  ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’
+                 |  ‘//’ “any sequence of characters up to end of line”
+
+nl               ::=  $\mathit{“new line character”}$
+semi             ::=  ‘;’ |  nl {nl}
+```
+
+The context-free syntax of Scala is given by the following EBNF
+grammar.
+
+```ebnf
+  Literal           ::=  [‘-’] integerLiteral
+                      |  [‘-’] floatingPointLiteral
+                      |  booleanLiteral
+                      |  characterLiteral
+                      |  stringLiteral
+                      |  symbolLiteral
+                      |  ‘null’
+
+  QualId            ::=  id {‘.’ id}
+  ids               ::=  id {‘,’ id}
+
+  Path              ::=  StableId
+                      |  [id ‘.’] ‘this’
+  StableId          ::=  id
+                      |  Path ‘.’ id
+                      |  [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id
+  ClassQualifier    ::=  ‘[’ id ‘]’
+
+  Type              ::=  FunctionArgTypes ‘=>’ Type
+                      |  InfixType [ExistentialClause]
+  FunctionArgTypes  ::= InfixType
+                      | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
+  ExistentialClause ::=  ‘forSome’ ‘{’ ExistentialDcl {semi ExistentialDcl} ‘}’
+  ExistentialDcl    ::=  ‘type’ TypeDcl 
+                      |  ‘val’ ValDcl
+  InfixType         ::=  CompoundType {id [nl] CompoundType}
+  CompoundType      ::=  AnnotType {‘with’ AnnotType} [Refinement]
+                      |  Refinement
+  AnnotType         ::=  SimpleType {Annotation}
+  SimpleType        ::=  SimpleType TypeArgs
+                      |  SimpleType ‘#’ id
+                      |  StableId
+                      |  Path ‘.’ ‘type’
+                      |  ‘(’ Types ‘)’
+  TypeArgs          ::=  ‘[’ Types ‘]’
+  Types             ::=  Type {‘,’ Type}
+  Refinement        ::=  [nl] ‘{’ RefineStat {semi RefineStat} ‘}’
+  RefineStat        ::=  Dcl
+                      |  ‘type’ TypeDef
+                      |
+  TypePat           ::=  Type
+
+  Ascription        ::=  ‘:’ InfixType
+                      |  ‘:’ Annotation {Annotation} 
+                      |  ‘:’ ‘_’ ‘*’
+
+  Expr              ::=  (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr
+                      |  Expr1
+  Expr1             ::=  `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
+                      |  `while' `(' Expr `)' {nl} Expr
+                      |  `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr]
+                      |  `do' Expr [semi] `while' `(' Expr ')'
+                      |  `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr
+                      |  `throw' Expr
+                      |  `return' [Expr]
+                      |  [SimpleExpr `.'] id `=' Expr
+                      |  SimpleExpr1 ArgumentExprs `=' Expr
+                      |  PostfixExpr
+                      |  PostfixExpr Ascription
+                      |  PostfixExpr `match' `{' CaseClauses `}'
+  PostfixExpr       ::=  InfixExpr [id [nl]]
+  InfixExpr         ::=  PrefixExpr
+                      |  InfixExpr id [nl] InfixExpr
+  PrefixExpr        ::=  [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr 
+  SimpleExpr        ::=  ‘new’ (ClassTemplate | TemplateBody)
+                      |  BlockExpr
+                      |  SimpleExpr1 [‘_’]
+  SimpleExpr1       ::=  Literal
+                      |  Path
+                      |  ‘_’
+                      |  ‘(’ [Exprs] ‘)’
+                      |  SimpleExpr ‘.’ id 
+                      |  SimpleExpr TypeArgs
+                      |  SimpleExpr1 ArgumentExprs
+                      |  XmlExpr
+  Exprs             ::=  Expr {‘,’ Expr}
+  ArgumentExprs     ::=  ‘(’ [Exprs] ‘)’
+                      |  ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’
+                      |  [nl] BlockExpr
+  BlockExpr         ::=  ‘{’ CaseClauses ‘}’
+                      |  ‘{’ Block ‘}’
+  Block             ::=  BlockStat {semi BlockStat} [ResultExpr]
+  BlockStat         ::=  Import
+                      |  {Annotation} [‘implicit’ | ‘lazy’] Def
+                      |  {Annotation} {LocalModifier} TmplDef
+                      |  Expr1
+                      |
+  ResultExpr        ::=  Expr1
+                      |  (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block
+
+  Enumerators       ::=  Generator {semi Generator}
+  Generator         ::=  Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr}
+
+  CaseClauses       ::=  CaseClause { CaseClause }
+  CaseClause        ::=  ‘case’ Pattern [Guard] ‘=>’ Block 
+  Guard             ::=  ‘if’ PostfixExpr
+
+  Pattern           ::=  Pattern1 { ‘|’ Pattern1 }
+  Pattern1          ::=  varid ‘:’ TypePat
+                      |  ‘_’ ‘:’ TypePat
+                      |  Pattern2
+  Pattern2          ::=  varid [‘@’ Pattern3]
+                      |  Pattern3
+  Pattern3          ::=  SimplePattern
+                      |  SimplePattern { id [nl] SimplePattern }
+  SimplePattern     ::=  ‘_’
+                      |  varid
+                      |  Literal
+                      |  StableId
+                      |  StableId ‘(’ [Patterns ‘)’
+                      |  StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’
+                      |  ‘(’ [Patterns] ‘)’
+                      |  XmlPattern
+  Patterns          ::=  Pattern [‘,’ Patterns]
+                      |  ‘_’ *
+
+  TypeParamClause   ::=  ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
+  FunTypeParamClause::=  ‘[’ TypeParam {‘,’ TypeParam} ‘]’
+  VariantTypeParam  ::=  {Annotation} [‘+’ | ‘-’] TypeParam
+  TypeParam         ::=  (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] 
+                         {‘<%’ Type} {‘:’ Type}
+  ParamClauses      ::=  {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
+  ParamClause       ::=  [nl] ‘(’ [Params] ‘)’
+  Params            ::=  Param {‘,’ Param}
+  Param             ::=  {Annotation} id [‘:’ ParamType] [‘=’ Expr]
+  ParamType         ::=  Type 
+                      |  ‘=>’ Type 
+                      |  Type ‘*’
+  ClassParamClauses ::=  {ClassParamClause} 
+                         [[nl] ‘(’ ‘implicit’ ClassParams ‘)’]
+  ClassParamClause  ::=  [nl] ‘(’ [ClassParams] ‘)’
+  ClassParams       ::=  ClassParam {‘,’ ClassParam}
+  ClassParam        ::=  {Annotation} {Modifier} [(`val' | `var')]
+                         id ‘:’ ParamType [‘=’ Expr]
+  Bindings          ::=  ‘(’ Binding {‘,’ Binding ‘)’
+  Binding           ::=  (id | ‘_’) [‘:’ Type]
+
+  Modifier          ::=  LocalModifier 
+                      |  AccessModifier
+                      |  ‘override’
+  LocalModifier     ::=  ‘abstract’
+                      |  ‘final’
+                      |  ‘sealed’
+                      |  ‘implicit’
+                      |  ‘lazy’
+  AccessModifier    ::=  (‘private’ | ‘protected’) [AccessQualifier]
+  AccessQualifier   ::=  ‘[’ (id | ‘this’) ‘]’
+
+  Annotation        ::=  ‘@’ SimpleType {ArgumentExprs}
+  ConstrAnnotation  ::=  ‘@’ SimpleType ArgumentExprs
+
+  TemplateBody      ::=  [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’
+  TemplateStat      ::=  Import
+                      |  {Annotation [nl]} {Modifier} Def
+                      |  {Annotation [nl]} {Modifier} Dcl
+                      |  Expr
+                      |
+  SelfType          ::=  id [‘:’ Type] ‘=>’
+                      |  ‘this’ ‘:’ Type ‘=>’ 
+
+  Import            ::=  ‘import’ ImportExpr {‘,’ ImportExpr}
+  ImportExpr        ::=  StableId ‘.’ (id | ‘_’ | ImportSelectors)
+  ImportSelectors   ::=  ‘{’ {ImportSelector ‘,’} (ImportSelector | ‘_’) ‘}’
+  ImportSelector    ::=  id [‘=>’ id | ‘=>’ ‘_’]
+
+  Dcl               ::=  ‘val’ ValDcl
+                      |  ‘var’ VarDcl
+                      |  ‘def’ FunDcl
+                      |  ‘type’ {nl} TypeDcl
+
+  ValDcl            ::=  ids ‘:’ Type
+  VarDcl            ::=  ids ‘:’ Type
+  FunDcl            ::=  FunSig [‘:’ Type]
+  FunSig            ::=  id [FunTypeParamClause] ParamClauses
+  TypeDcl           ::=  id [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
+
+  PatVarDef         ::=  ‘val’ PatDef
+                      |  ‘var’ VarDef
+  Def               ::=  PatVarDef
+                      |  ‘def’ FunDef
+                      |  ‘type’ {nl} TypeDef
+                      |  TmplDef
+  PatDef            ::=  Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr
+  VarDef            ::=  PatDef
+                      |  ids ‘:’ Type ‘=’ ‘_’
+  FunDef            ::=  FunSig [‘:’ Type] ‘=’ Expr
+                      |  FunSig [nl] ‘{’ Block ‘}’
+                      |  ‘this’ ParamClause ParamClauses 
+                         (‘=’ ConstrExpr | [nl] ConstrBlock)
+  TypeDef           ::=  id [TypeParamClause] ‘=’ Type
+
+  TmplDef           ::=  [‘case’] ‘class’ ClassDef
+                      |  [‘case’] ‘object’ ObjectDef
+                      |  ‘trait’ TraitDef
+  ClassDef          ::=  id [TypeParamClause] {ConstrAnnotation} [AccessModifier] 
+                         ClassParamClauses ClassTemplateOpt 
+  TraitDef          ::=  id [TypeParamClause] TraitTemplateOpt
+  ObjectDef         ::=  id ClassTemplateOpt
+  ClassTemplateOpt  ::=  ‘extends’ ClassTemplate | [[‘extends’] TemplateBody]
+  TraitTemplateOpt  ::=  ‘extends’ TraitTemplate | [[‘extends’] TemplateBody]
+  ClassTemplate     ::=  [EarlyDefs] ClassParents [TemplateBody]
+  TraitTemplate     ::=  [EarlyDefs] TraitParents [TemplateBody]
+  ClassParents      ::=  Constr {‘with’ AnnotType}
+  TraitParents      ::=  AnnotType {‘with’ AnnotType}
+  Constr            ::=  AnnotType {ArgumentExprs}
+  EarlyDefs         ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’
+  EarlyDef          ::=  {Annotation [nl]} {Modifier} PatVarDef
+
+  ConstrExpr        ::=  SelfInvocation 
+                      |  ConstrBlock
+  ConstrBlock       ::=  ‘{’ SelfInvocation {semi BlockStat} ‘}’
+  SelfInvocation    ::=  ‘this’ ArgumentExprs {ArgumentExprs}
+
+  TopStatSeq        ::=  TopStat {semi TopStat}
+  TopStat           ::=  {Annotation [nl]} {Modifier} TmplDef
+                      |  Import
+                      |  Packaging
+                      |  PackageObject
+                      |  
+  Packaging         ::=  ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’
+  PackageObject     ::=  ‘package’ ‘object’ ObjectDef
+
+  CompilationUnit   ::=  {‘package’ QualId semi} TopStatSeq
+```
+
+<!-- TODO add:
+SeqPattern ::= ...
+
+SimplePattern    ::= StableId  [TypePatArgs] [‘(’ [SeqPatterns] ‘)’]
+TypePatArgs ::= ‘[’ TypePatArg {‘,’ TypePatArg} ‘]’
+TypePatArg    ::=  ‘_’ |   varid}
+
+-->
diff --git a/spec/14-references.md b/spec/14-references.md
new file mode 100644
index 0000000..8c169b9
--- /dev/null
+++ b/spec/14-references.md
@@ -0,0 +1,213 @@
+---
+title: References
+layout: default
+chapter: 14
+---
+
+
+# References
+
+TODO (see comments in markdown source)
+
+<!-- TODO
+
+provide a nice reading list to get up to speed with theory,...
+
+## Scala's Foundations
+[@scala-overview-tech-report;
+ at odersky:scala-experiment;
+ at odersky:sca;
+ at odersky-et-al:ecoop03;
+ at odersky-zenger:fool12]
+
+## Learning Scala
+
+## Related Work
+
+%% Article
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ at article{milner:polymorphism,
+  author	= {Robin Milner},
+  title		= {A {T}heory of {T}ype {P}olymorphism in {P}rogramming},
+  journal	= {Journal of Computer and System Sciences},
+  year		= {1978},
+  month		= {Dec},
+  volume	= {17},
+  pages		= {348-375},
+  folder	= { 2-1}
+}
+
+ at Article{wirth:ebnf,
+  author	= "Niklaus Wirth",
+  title		= "What can we do about the unnecessary diversity of notation
+for syntactic definitions?",
+  journal	= "Comm. ACM",
+  year		= 1977,
+  volume	= 20,
+  pages		= "822-823",
+  month		= nov
+}
+
+
+%% Book
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ at Book{abelson-sussman:structure,
+  author	= {Harold Abelson and Gerald Jay Sussman and Julie Sussman},
+  title		= {The Structure and Interpretation of Computer Programs, 2nd
+                  edition},
+  publisher	= {MIT Press},
+  address	= {Cambridge, Massachusetts},
+  year		= {1996},
+  url		= {http://mitpress.mit.edu/sicp/full-text/sicp/book/book.html}
+}
+
+ at Book{goldberg-robson:smalltalk-language,
+  author	= "Adele Goldberg and David Robson",
+  title		= "{Smalltalk-80}; The {L}anguage and Its {I}mplementation",
+  publisher	= "Addison-Wesley",
+  year		= "1983",
+  note		= "ISBN 0-201-11371-6"
+}
+
+ at Book{matsumtoto:ruby,
+  author	= {Yukihiro Matsumoto},
+  title		= {Ruby in a {N}utshell},
+  publisher	= {O'Reilly \& Associates},
+  year		= "2001",
+  month		= "nov",
+  note		= "ISBN 0-596-00214-9"
+}
+
+ at Book{rossum:python,
+  author	= {Guido van Rossum and Fred L. Drake},
+  title		= {The {P}ython {L}anguage {R}eference {M}anual},
+  publisher	= {Network Theory Ltd},
+  year		= "2003",
+  month		= "sep",
+  note		= {ISBN 0-954-16178-5\hspace*{\fill}\\
+                  \verb at http://www.python.org/doc/current/ref/ref.html@}
+}
+
+ at Manual{odersky:scala-reference,
+  title =        {The {S}cala {L}anguage {S}pecification, Version 2.4},
+  author =       {Martin Odersky},
+  organization = {EPFL},
+  month =        feb,
+  year =         2007,
+  note =         {http://www.scala-lang.org/docu/manuals.html}
+}
+
+ at Book{odersky:scala-reference,
+  ALTauthor =    {Martin Odersky},
+  ALTeditor =    {},
+  title =        {The {S}cala {L}anguage {S}pecification, Version 2.4},
+  publisher =    {},
+  year =         {},
+  OPTkey =       {},
+  OPTvolume =    {},
+  OPTnumber =    {},
+  OPTseries =    {},
+  OPTaddress =   {},
+  OPTedition =   {},
+  OPTmonth =     {},
+  OPTnote =      {},
+  OPTannote =    {}
+}
+
+
+%% InProceedings
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ at InProceedings{odersky-et-al:fool10,
+  author	= {Martin Odersky and Vincent Cremet and Christine R\"ockl
+                  and Matthias Zenger},
+  title		= {A {N}ominal {T}heory of {O}bjects with {D}ependent {T}ypes},
+  booktitle	= {Proc. FOOL 10},
+  year		= 2003,
+  month		= jan,
+  note		= {\hspace*{\fill}\\
+                  \verb at http://www.cis.upenn.edu/~bcpierce/FOOL/FOOL10.html@}
+}
+
+
+%% Misc
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ at Misc{w3c:dom,
+  author	= {W3C},
+  title		= {Document Object Model ({DOM})},
+  howpublished	= {\hspace*{\fill}\\
+                  \verb at http://www.w3.org/DOM/@}
+}
+
+ at Misc{w3c:xml,
+  author	= {W3C},
+  title		= {Extensible {M}arkup {L}anguage ({XML})},
+  howpublished	= {\hspace*{\fill}\\
+                  \verb at http://www.w3.org/TR/REC-xml@}
+}
+
+ at TechReport{scala-overview-tech-report,
+  author =       {Martin Odersky and al.},
+  title =        {An {O}verview of the {S}cala {P}rogramming {L}anguage},
+  institution =  {EPFL Lausanne, Switzerland},
+  year =         2004,
+  number =       {IC/2004/64}
+}
+
+ at InProceedings{odersky:sca,
+  author =       {Martin Odersky and Matthias Zenger},
+  title =        {Scalable {C}omponent {A}bstractions},
+  booktitle =    {Proc. OOPSLA},
+  year =         2005
+}
+
+ at InProceedings{odersky-et-al:ecoop03,
+  author =       {Martin Odersky and Vincent Cremet and Christine R\"ockl and Matthias Zenger},
+  title =        {A {N}ominal {T}heory of {O}bjects with {D}ependent {T}ypes},
+  booktitle =    {Proc. ECOOP'03},
+  year =         2003,
+  month =        jul,
+  series =       {Springer LNCS}
+}
+
+ at InCollection{cremet-odersky:pilib,
+  author =       {Vincent Cremet and Martin Odersky},
+  title =        {PiLib} - A {H}osted {L}anguage for {P}i-{C}alculus {S}tyle {C}oncurrency},
+  booktitle =    {Domain-Specific Program Generation},
+  publisher =    {Springer},
+  year =         2005,
+  volume =       3016,
+  series =       {Lecture Notes in Computer Science}
+}
+
+ at InProceedings{odersky-zenger:fool12,
+  author =       {Martin Odersky and Matthias Zenger},
+  title =        {Independently {E}xtensible {S}olutions to the {E}xpression {P}roblem},
+  booktitle =    {Proc. FOOL 12},
+  year =         2005,
+  month =        jan,
+  note =         {\verb at http://homepages.inf.ed.ac.uk/wadler/fool@}
+}
+
+ at InProceedings{odersky:scala-experiment,
+  author =       {Martin Odersky},
+  title =        {The {S}cala {E}xperiment - {C}an {W}e {P}rovide {B}etter {L}anguage {S}upport for {C}omponent {S}ystems?},
+  booktitle =    {Proc. ACM Symposium on Principles of Programming Languages},
+  year =         2006
+}
+
+ at MISC{kennedy-pierce:decidable,
+  author = {Andrew J. Kennedy and Benjamin C. Pierce},
+  title = {On {D}ecidability of {N}ominal {S}ubtyping with {V}ariance},
+  year = {2007},
+  month = jan,
+  note = {FOOL-WOOD '07},
+  short = {http://www.cis.upenn.edu/~bcpierce/papers/variance.pdf}
+}
+
+
+
+-->
\ No newline at end of file
diff --git a/spec/README.md b/spec/README.md
new file mode 100644
index 0000000..4bba86f
--- /dev/null
+++ b/spec/README.md
@@ -0,0 +1,43 @@
+# Scala Language Reference
+
+First of all, the language specification is meant to be correct, precise and clear.
+
+Second, editing, previewing and generating output for the markdown should be simple and easy.
+
+Third, we'd like to support different output formats. An html page per chapter with MathJax seems like a good start, as it satisfies the second requirement, and enables the first one.
+
+## Editing
+
+We use redcarpet 3.1 and jekyll 2 (currently in alpha) to generate the html. Essentially, this is what github pages use.
+
+## Building
+
+Travis CI builds the spec automatically on every commit to master and publishes to http://www.scala-lang.org/files/archive/spec/2.11/. 
+
+To preview locally, run `bundle exec jekyll serve -d build/spec/ -s spec/ -w --baseurl=""` (in the root of your checkout of scala/scala),
+and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`.
+
+
+## General Advice for editors
+
+- All files must be saved as UTF-8: ensure your editors are configured appropriately.
+- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`.
+- MathJAX errors  will appear within the  rendered DOM as span  elements with class `mtext` and style attribute `color: red` applied. It is  possible to search for this combination in the development  tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this.
+
+
+### Macro replacements:
+
+- While  MathJAX just  support LaTeX style  command definition,  it is recommended  to not use  this as  it will likely cause issues with preparing the document for PDF or ebook distribution.
+- `\SS` (which I could not find defined within the latex source) seems to be closest to `\mathscr{S}`
+- `\TYPE` is equivalent to `\boldsymbol{type}'
+- As MathJAX has  no support for slanted font (latex  command \sl), so in all instances  this should be replaced with \mathit{}
+- The macro \U{ABCD} used for unicode character references can be replaced with \\uABCD.
+- The macro \URange{ABCD}{DCBA} used for unicode character ranges can be replaced with \\uABCD-\\uDBCA.
+- The macro \commadots can be replaced with ` , … , `.
+- There is no adequate replacement for `\textsc{...}`  (small caps) in pandoc markdown. While unicode contains a number of  small capital  letters, it  is notably  missing Q and  X as  these glyphs  are intended  for phonetic spelling, therefore these  cannot be reliably used. For now,  the best option is to use  underscore emphasis and capitalise the text manually, `_LIKE THIS_`.
+
+
+### Unicode Character replacements
+
+- The unicode  left and right single  quotation marks (‘ and  ’) have been used in  place of ` and  ', where the quotation marks  are intended to  be paired. These can  be typed on  a mac using  Option+] for a left  quote and Option+Shift+] for the right quote.
+- Similarly for left and right double quotation marks (“ and ”) in place of ". These can be typed on a mac using Option+[ and Option+Shift+].
diff --git a/spec/_config.yml b/spec/_config.yml
new file mode 100644
index 0000000..1052dde
--- /dev/null
+++ b/spec/_config.yml
@@ -0,0 +1,10 @@
+baseurl: /files/archive/spec/2.11
+safe: true
+lsi: false
+highlighter: null
+markdown: redcarpet
+encoding: utf-8
+redcarpet:
+  extensions: ["no_intra_emphasis", "fenced_code_blocks", "autolink", "tables", "with_toc_data", "strikethrough", "lax_spacing", "space_after_headers", "superscript", "footnotes"]
+# with_toc_data requires redcarpet 3.1 to get
+# pretty ID attributes for Hn headers (https://github.com/vmg/redcarpet/pull/186)
diff --git a/spec/_includes/numbering.css b/spec/_includes/numbering.css
new file mode 100644
index 0000000..86b9463
--- /dev/null
+++ b/spec/_includes/numbering.css
@@ -0,0 +1,56 @@
+// based on http://philarcher.org/css/numberheadings.css, 
+h1 {
+  /* must reset here */
+  counter-reset: chapter {{ page.chapter }};
+}
+h1:before {
+  /* and must reset again here */
+  counter-reset: chapter {{ page.chapter }};
+  content: "Chapter " counter(chapter);
+  display: block;
+}
+
+h2 {
+  /* must increment here */
+  counter-increment: section;
+  counter-reset: subsection;
+}
+h2:before {
+  /* and must reset again here */
+  counter-reset: chapter {{ page.chapter }};
+
+  content: counter(chapter) "." counter(section) ;
+  display: inline;
+  margin-right: 1em;
+}
+h2:after {
+  /* can only have one counter-reset per tag, so can't do it in h2/h2:before... */
+  counter-reset: example;
+}
+
+h3 {
+  /* must increment here */
+  counter-increment: subsection;
+}
+h3:before {
+  /* and must reset again here */
+  counter-reset: chapter {{ page.chapter }};
+
+  content: counter(chapter) "." counter(section) "." counter(subsection);
+  display: inline;
+  margin-right: 1em;
+}
+
+h3[id*='example'] {
+  /* must increment here */
+  counter-increment: example;
+  display: inline;
+}
+h3[id*='example']:before {
+  /* and must reset again here */
+  counter-reset: chapter {{ page.chapter }};
+
+  content: "Example " counter(chapter) "." counter(section) "." counter(example);
+  display: inline;
+  margin-right: 1em;
+}
diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml
new file mode 100644
index 0000000..7f17ba3
--- /dev/null
+++ b/spec/_layouts/default.yml
@@ -0,0 +1,36 @@
+<!DOCTYPE html>
+<html>
+<head>
+  <meta http-equiv='Content-Type' content='text/html; charset=utf-8' />
+  <script type="text/x-mathjax-config">
+  MathJax.Hub.Config({
+    tex2jax: {
+      inlineMath: [['$','$'], ['\\(','\\)']],
+      skipTags: ['script', 'noscript', 'style', 'textarea'],
+      processEscapes: true
+    }
+  });
+  </script>
+  <script type="text/javascript" src="https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+  <script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
+
+  <!-- need to use include to see value of page.chapter variable -->
+  <style type="text/css">
+    {% include numbering.css %}
+  </style>
+  <script type="text/javascript">
+    // clear content of H3 nodes that start with "Example:"
+    // the content is only there to determine ID of the H3 element (redcarpet doesn't let us set css id)
+    $( document ).ready(function(){ $("h3[id*='example']").text("") })
+  </script>
+
+  <link rel="stylesheet" type="text/css" href="public/stylesheets/screen.css">
+
+</head>
+
+<body>
+
+{{ content }}
+
+</body>
+</html>
\ No newline at end of file
diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml
new file mode 100644
index 0000000..d77ea62
--- /dev/null
+++ b/spec/_layouts/toc.yml
@@ -0,0 +1,15 @@
+<!DOCTYPE html>
+<html>
+<head>
+  <meta http-equiv='Content-Type' content='text/html; charset=utf-8' />
+
+  <link rel="stylesheet" type="text/css" href="public/stylesheets/screen.css">
+
+</head>
+
+<body>
+
+{{ content }}
+
+</body>
+</html>
\ No newline at end of file
diff --git a/spec/id_dsa_travis.enc b/spec/id_dsa_travis.enc
new file mode 100644
index 0000000..a9a4036
--- /dev/null
+++ b/spec/id_dsa_travis.enc
@@ -0,0 +1,15 @@
+U2FsdGVkX1/RKhLZeL93vFQikKRRkoa3rqt6Kbs7cJStmcTI+DohoRUidRaeSULa
++xXQCwaSDs4+l1HdW2R4ZV62AVGhvIeKEZxc449c6qT9+wUd2PKkDghuJCy1dLTo
+2OdFLDeop0X32bsauzPQGWwrpb/Llck4KeKffJq2257Hu6T/HnzSfDnvXbjAsVeH
+ZLeXURAyDAdK9vFmFzFiEEztLkW8E3ZVyrk7Qa3GPNpmATiBdhVM8d0JJptKVgwQ
+mZfhbItLrj490sPd5zpUFKAxJjPoKIa75n/+u4butn+ON97vr7xOy6ElX7HSJUgr
+FJdVJgcO7lki0j+lfJVAP0zLnH80CgOkOJSq0Sso/ofs+lQIobo8fQqIdmoqV3z2
+KpYrgnqap1U2+ekIUKsUxk4LuO8uJhwPeMJs6FoDb+O4Aauqpy9242+P05gWkQVd
+KVWRcHVE7DulS8Fp/o5GXJUdw+rdxvQ/voJ8i0HbYpp6UcmQwBheQMSmqtp5+ML9
+rBiBe2sr7pahqI5NKoF3iZCkZW74ge3/GP2d6m2tpOzD+IfdFDXQ/r8DbK2Dvwvz
+eutOb0zrUtua2e2zvvpVxldPVpXA7A1hE0P3lns9o+TqNhEauTQimQ8/X51BHO6E
+Ap4odrf2odocacY5VC4LFYDO3vat0wSTpi6SxkemUMX5yB7euqwD3ZrMcbpPFR1B
+IU5XxW20NxUo8n+WuMUNkXTgk/Cr4OUiavVv4oLsHkmgD9LN3IYI6Rj/DSCzSbDx
+hyWc7R47iu9f5okQScx62DwVK3AyAuVWer94x0Kj8AcIRwU/VwiXjnZ59I89AKTN
+sjZJw1FfpJPqYs7fPtEiotUdaJHzJH8tiEWFrtOTuOg3h6fy0KJTPVh0WjcGXfb6
+Uh1SEgeHtMSUVhq8nd8LGQ==
diff --git a/spec/index.md b/spec/index.md
new file mode 100644
index 0000000..b47cb03
--- /dev/null
+++ b/spec/index.md
@@ -0,0 +1,71 @@
+---
+title: Scala Language Reference
+layout: toc
+---
+
+# The Scala Language Specification
+# Version 2.11
+
+### Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger
+
+### Markdown Conversion by Iain McGinniss.
+
+## Table of Contents
+
+<ol>
+  {% assign sorted_pages = site.pages | sort:"name" %}
+  {% for post in sorted_pages %}
+    <!-- exclude this page from the toc, not sure how to check
+         whether there is no chapter variable in the page  -->
+    {% if post.chapter >= 0 %}
+      <li>
+        <a href="{{site.baseurl}}{{ post.url }}"> {{ post.title }}</a>
+      </li>
+    {% endif %}
+  {% endfor %}
+</ol>
+
+
+## Preface
+
+Scala is a Java-like programming language which unifies
+object-oriented and functional programming.  It is a pure
+object-oriented language in the sense that every value is an
+object. Types and behavior of objects are described by
+classes. Classes can be composed using mixin composition.  Scala is
+designed to work seamlessly with less pure but mainstream
+object-oriented languages like Java.
+
+Scala is a functional language in the sense that every function is a
+value. Nesting of function definitions and higher-order functions are
+naturally supported. Scala also supports a general notion of pattern
+matching which can model the algebraic types used in many functional
+languages.
+
+Scala has been designed to interoperate seamlessly with Java.
+Scala classes can call Java methods, create Java objects, inherit from Java
+classes and implement Java interfaces. None of this requires interface
+definitions or glue code.
+
+Scala has been developed from 2001 in the programming methods
+laboratory at EPFL. Version 1.0 was released in November 2003. This
+document describes the second version of the language, which was
+released in March 2006. It acts a reference for the language
+definition and some core library modules. It is not intended to teach
+Scala or its concepts; for this there are [other documents](14-references.html).
+
+Scala has been a collective effort of many people. The design and the
+implementation of version 1.0 was completed by Philippe Altherr,
+Vincent Cremet, Gilles Dubochet, Burak Emir, Stéphane Micheloud,
+Nikolay Mihaylov, Michel Schinz, Erik Stenman, Matthias Zenger, and
+the author. Iulian Dragos, Gilles Dubochet, Philipp Haller, Sean
+McDirmid, Lex Spoon, and Geoffrey Washburn joined in the effort to
+develop the second version of the language and tools.  Gilad Bracha,
+Craig Chambers, Erik Ernst, Matthias Felleisen, Shriram Krishnamurti,
+Gary Leavens, Sebastian Maneth, Erik Meijer, Klaus Ostermann, Didier
+Rémy, Mads Torgersen, and Philip Wadler have shaped the design of
+the language through lively and inspiring discussions and comments on
+previous versions of this document.  The contributors to the Scala
+mailing list have also given very useful feedback that helped us
+improve the language and its tools.
+
diff --git a/spec/public/images/classhierarchy.pdf b/spec/public/images/classhierarchy.pdf
new file mode 100644
index 0000000..58e0501
Binary files /dev/null and b/spec/public/images/classhierarchy.pdf differ
diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css
new file mode 100644
index 0000000..725eb0b
--- /dev/null
+++ b/spec/public/stylesheets/screen.css
@@ -0,0 +1,332 @@
+/* from https://gist.github.com/andyferra/2554919 */
+
+body {
+  font-family: Helvetica, arial, sans-serif;
+  font-size: 14px;
+  line-height: 1.6;
+  padding-top: 10px;
+  padding-bottom: 10px;
+  background-color: white;
+  padding: 30px;
+}
+
+body > *:first-child {
+  margin-top: 0 !important;
+}
+body > *:last-child {
+  margin-bottom: 0 !important;
+}
+
+a {
+  color: #4183C4;
+}
+a.absent {
+  color: #cc0000;
+}
+a.anchor {
+  display: block;
+  padding-left: 30px;
+  margin-left: -30px;
+  cursor: pointer;
+  position: absolute;
+  top: 0;
+  left: 0;
+  bottom: 0;
+}
+
+h1, h2, h3, h4, h5, h6 {
+  margin: 20px 0 10px;
+  padding: 0;
+  font-weight: bold;
+  -webkit-font-smoothing: antialiased;
+  cursor: text;
+  position: relative;
+}
+
+h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor {
+  background: url("../../images/modules/styleguide/para.png") no-repeat 10px center;
+  text-decoration: none;
+}
+
+h1 tt, h1 code {
+  font-size: inherit;
+}
+
+h2 tt, h2 code {
+  font-size: inherit;
+}
+
+h3 tt, h3 code {
+  font-size: inherit;
+}
+
+h4 tt, h4 code {
+  font-size: inherit;
+}
+
+h5 tt, h5 code {
+  font-size: inherit;
+}
+
+h6 tt, h6 code {
+  font-size: inherit;
+}
+
+h1 {
+  font-size: 28px;
+  color: black;
+}
+
+h2 {
+  font-size: 24px;
+  border-bottom: 1px solid #cccccc;
+  color: black;
+}
+
+h3 {
+  font-size: 18px;
+}
+
+h4 {
+  font-size: 16px;
+}
+
+h5 {
+  font-size: 14px;
+}
+
+h6 {
+  color: #777777;
+  font-size: 14px;
+}
+
+p, blockquote, ul, ol, dl, li, table, pre {
+  margin: 15px 0;
+  -moz-font-feature-settings: "onum";
+  -ms-font-feature-settings: "onum";
+  -webkit-font-feature-settings: "onum";
+  font-feature-settings: "onum";
+}
+
+hr {
+  background: transparent url("../../images/modules/pulls/dirty-shade.png") repeat-x 0 0;
+  border: 0 none;
+  color: #cccccc;
+  height: 4px;
+  padding: 0;
+}
+
+body > h2:first-child {
+  margin-top: 0;
+  padding-top: 0;
+}
+body > h1:first-child {
+  margin-top: 0;
+  padding-top: 0;
+}
+body > h1:first-child + h2 {
+  margin-top: 0;
+  padding-top: 0;
+}
+body > h3:first-child, body > h4:first-child, body > h5:first-child, body > h6:first-child {
+  margin-top: 0;
+  padding-top: 0;
+}
+
+a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 {
+  margin-top: 0;
+  padding-top: 0;
+}
+
+h1 p, h2 p, h3 p, h4 p, h5 p, h6 p {
+  margin-top: 0;
+}
+
+li p.first {
+  display: inline-block;
+}
+
+ul, ol {
+  padding-left: 30px;
+}
+
+ul :first-child, ol :first-child {
+  margin-top: 0;
+}
+
+ul :last-child, ol :last-child {
+  margin-bottom: 0;
+}
+
+dl {
+  padding: 0;
+}
+dl dt {
+  font-size: 14px;
+  font-weight: bold;
+  font-style: italic;
+  padding: 0;
+  margin: 15px 0 5px;
+}
+dl dt:first-child {
+  padding: 0;
+}
+dl dt > :first-child {
+  margin-top: 0;
+}
+dl dt > :last-child {
+  margin-bottom: 0;
+}
+dl dd {
+  margin: 0 0 15px;
+  padding: 0 15px;
+}
+dl dd > :first-child {
+  margin-top: 0;
+}
+dl dd > :last-child {
+  margin-bottom: 0;
+}
+
+blockquote {
+  border-left: 4px solid #dddddd;
+  padding: 0 15px;
+  color: #777777;
+}
+blockquote > :first-child {
+  margin-top: 0;
+}
+blockquote > :last-child {
+  margin-bottom: 0;
+}
+
+table {
+  padding: 0;
+}
+table tr {
+  border-top: 1px solid #cccccc;
+  background-color: white;
+  margin: 0;
+  padding: 0;
+}
+table tr:nth-child(2n) {
+  background-color: #f8f8f8;
+}
+table tr th {
+  font-weight: bold;
+  border: 1px solid #cccccc;
+  text-align: left;
+  margin: 0;
+  padding: 6px 13px;
+}
+table tr td {
+  border: 1px solid #cccccc;
+  text-align: left;
+  margin: 0;
+  padding: 6px 13px;
+}
+table tr th :first-child, table tr td :first-child {
+  margin-top: 0;
+}
+table tr th :last-child, table tr td :last-child {
+  margin-bottom: 0;
+}
+
+img {
+  max-width: 100%;
+}
+
+span.frame {
+  display: block;
+  overflow: hidden;
+}
+span.frame > span {
+  border: 1px solid #dddddd;
+  display: block;
+  float: left;
+  overflow: hidden;
+  margin: 13px 0 0;
+  padding: 7px;
+  width: auto;
+}
+span.frame span img {
+  display: block;
+  float: left;
+}
+span.frame span span {
+  clear: both;
+  color: #333333;
+  display: block;
+  padding: 5px 0 0;
+}
+span.align-center {
+  display: block;
+  overflow: hidden;
+  clear: both;
+}
+span.align-center > span {
+  display: block;
+  overflow: hidden;
+  margin: 13px auto 0;
+  text-align: center;
+}
+span.align-center span img {
+  margin: 0 auto;
+  text-align: center;
+}
+span.align-right {
+  display: block;
+  overflow: hidden;
+  clear: both;
+}
+span.align-right > span {
+  display: block;
+  overflow: hidden;
+  margin: 13px 0 0;
+  text-align: right;
+}
+span.align-right span img {
+  margin: 0;
+  text-align: right;
+}
+span.float-left {
+  display: block;
+  margin-right: 13px;
+  overflow: hidden;
+  float: left;
+}
+span.float-left span {
+  margin: 13px 0 0;
+}
+span.float-right {
+  display: block;
+  margin-left: 13px;
+  overflow: hidden;
+  float: right;
+}
+span.float-right > span {
+  display: block;
+  overflow: hidden;
+  margin: 13px auto 0;
+  text-align: right;
+}
+
+.highlight pre {
+  border: 1px solid #eaeaea;
+  background-color: #f8f8f8;
+  border-radius: 3px;
+  line-height: 19px;
+  overflow: auto;
+  padding: 6px 10px;
+  white-space: nowrap;
+}
+
+code {
+  background-color: transparent;
+  border: none;
+  margin: 0;
+  padding: 0;
+  white-space: pre;
+  font-size: 16px;
+}
+
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 5a4e0d9..28fe689 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -15,6 +15,7 @@ import scala.language.higherKinds
  *
  * @define actor actor
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
 
   type Future[+R] <: scala.actors.Future[R]
@@ -26,5 +27,4 @@ trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
   private[actors] def unlinkFrom(from: AbstractActor): Unit
 
   private[actors] def exit(from: AbstractActor, reason: AnyRef): Unit
-
 }
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 61124b3..75160fa 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -18,6 +18,7 @@ import scala.language.implicitConversions
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object Actor extends Combinators {
 
   /** State of an actor.
@@ -398,6 +399,7 @@ object Actor extends Combinators {
  *  @define channel actor's mailbox
  */
 @SerialVersionUID(-781154067877019505L)
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait Actor extends InternalActor with ReplyReactor {
 
   override def start(): Actor = synchronized {
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
index 5c17906..0da167a 100644
--- a/src/actors/scala/actors/ActorRef.scala
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -45,8 +45,9 @@ trait ActorRef {
  * This is what is used to complete a Future that is returned from an ask/? call,
  * when it times out.
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
   def this(message: String) = this(message, null: Throwable)
 }
-
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object PoisonPill
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index 3d26477..3f2c53f 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -17,6 +17,7 @@ import scala.language.higherKinds
  *
  * @define actor `CanReply`
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait CanReply[-T, +R] {
 
   type Future[+P] <: () => P
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 9669ffb..ddf7b32 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -23,6 +23,7 @@ import scala.concurrent.SyncVar
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 case class ! [a](ch: Channel[a], msg: a)
 
 /**
@@ -34,6 +35,7 @@ case class ! [a](ch: Channel[a], msg: a)
  * @define actor channel
  * @define channel channel
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
 
   type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
index ffe8b75..04a4b4a 100644
--- a/src/actors/scala/actors/DaemonActor.scala
+++ b/src/actors/scala/actors/DaemonActor.scala
@@ -18,6 +18,7 @@ import scheduler.DaemonScheduler
  *
  * @author Erik Engbrecht
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait DaemonActor extends Actor {
   override def scheduler: IScheduler = DaemonScheduler
 }
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index cc51dfd..31ef53b 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -14,6 +14,7 @@ package scala.actors
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object Debug extends Logger("") {}
 
 private[actors] class Logger(tag: String) {
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 3037f82..4421c7a 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -21,6 +21,7 @@ import scala.concurrent.SyncVar
  *
  *  @author Philipp Haller
  */
+ at deprecated("Use the scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 abstract class Future[+T] extends Responder[T] with Function0[T] {
 
   @volatile
@@ -107,6 +108,7 @@ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) exten
  *
  *  @author Philipp Haller
  */
+ at deprecated("Use the object scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object Futures {
 
   /** Arranges for the asynchronous execution of `body`,
@@ -174,21 +176,21 @@ object Futures {
    *                  or timeout + `System.currentTimeMillis()` is negative.
    */
   def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
-    var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
+    val resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
 
     var cnt = 0
     val mappedFts = fts.map(ft =>
-      Pair({cnt+=1; cnt-1}, ft))
+      ({cnt+=1; cnt-1}, ft))
 
-    val unsetFts = mappedFts.filter((p: Pair[Int, Future[Any]]) => {
+    val unsetFts = mappedFts.filter((p: Tuple2[Int, Future[Any]]) => {
       if (p._2.isSet) { resultsMap(p._1) = Some(p._2()); false }
       else { resultsMap(p._1) = None; true }
     })
 
-    val partFuns = unsetFts.map((p: Pair[Int, Future[Any]]) => {
+    val partFuns = unsetFts.map((p: Tuple2[Int, Future[Any]]) => {
       val FutCh = p._2.inputChannel
-      val singleCase: PartialFunction[Any, Pair[Int, Any]] = {
-        case FutCh ! any => Pair(p._1, any)
+      val singleCase: PartialFunction[Any, Tuple2[Int, Any]] = {
+        case FutCh ! any => (p._1, any)
       }
       singleCase
     })
@@ -199,7 +201,7 @@ object Futures {
     }
     Actor.timer.schedule(timerTask, timeout)
 
-    def awaitWith(partFuns: Seq[PartialFunction[Any, Pair[Int, Any]]]) {
+    def awaitWith(partFuns: Seq[PartialFunction[Any, Tuple2[Int, Any]]]) {
       val reaction: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] {
         def isDefinedAt(msg: Any) = msg match {
           case TIMEOUT => true
@@ -210,7 +212,7 @@ object Futures {
           case _ => {
             val pfOpt = partFuns find (_ isDefinedAt msg)
             val pf = pfOpt.get // succeeds always
-            val Pair(idx, subres) = pf(msg)
+            val (idx, subres) = pf(msg)
             resultsMap(idx) = Some(subres)
 
             val partFunsRest = partFuns filter (_ != pf)
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 35c2d32..9d61d48 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -17,6 +17,7 @@ package scala.actors
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait IScheduler {
 
   /** Submits a closure for execution.
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index 3d7dd7d..d2dd6d2 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -16,6 +16,7 @@ package scala.actors
  *
  * @define channel `InputChannel`
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait InputChannel[+Msg] {
 
   /**
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
index ed9e25c..5045ea5 100644
--- a/src/actors/scala/actors/InternalActor.scala
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -524,6 +524,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
  *
  *  @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 case object TIMEOUT
 
 /**
@@ -534,6 +535,7 @@ case object TIMEOUT
  *  @param from   the actor that terminated
  *  @param reason the reason that caused the actor to terminate
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 case class Exit(from: AbstractActor, reason: AnyRef)
 
 /**
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
index 3829513..c744984 100644
--- a/src/actors/scala/actors/InternalReplyReactor.scala
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -12,6 +12,7 @@ import java.util.{TimerTask}
  *
  *  @define actor `ReplyReactor`
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
 
   /* A list of the current senders. The head of the list is
diff --git a/src/actors/scala/actors/KillActorControl.scala b/src/actors/scala/actors/KillActorControl.scala
index 2f1f08e..0f94bbc 100644
--- a/src/actors/scala/actors/KillActorControl.scala
+++ b/src/actors/scala/actors/KillActorControl.scala
@@ -6,8 +6,6 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
 package scala.actors
 
 import scala.util.control.ControlThrowable
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index fd87f81..f0f475e 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -15,6 +15,7 @@ package scala.actors
  *
  * @define actor `OutputChannel`
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait OutputChannel[-Msg] {
 
   /**
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index f025f6b..aa985b3 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -52,6 +52,7 @@ private[actors] object Reactor {
  *
  * @define actor reactor
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
 
   /* The $actor's mailbox. */
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index a2051d4..01e6da0 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -7,7 +7,7 @@
 \*                                                                      */
 package scala.actors
 
- at deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait ReplyReactor extends InternalReplyReactor {
   protected[actors] def sender: OutputChannel[Any] = super.internalSender
 }
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index dd6c110..67c8e5c 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -9,7 +9,6 @@
 
 package scala.actors
 
-import java.util.concurrent._
 import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler, ThreadPoolConfig}
 
 /**
@@ -18,6 +17,7 @@ import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolSch
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object Scheduler extends DelegatingScheduler {
 
   Debug.info("initializing "+this+"...")
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index fb28b3f..b8e66dd 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -18,6 +18,7 @@ package scala.actors
  *
  *  @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait SchedulerAdapter extends IScheduler {
 
   /** Submits a <code>Runnable</code> for execution.
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index f225987..02b916a 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -20,6 +20,7 @@ package scala.actors
  * @author Philipp Haller
  * @author Erik Engbrecht
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 case class UncaughtException(actor: InternalActor,
                              message: Option[Any],
                              sender: Option[OutputChannel[Any]],
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index d176487..ae96086 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -14,6 +14,7 @@ package scala
  * A starting point for using the actors library would be [[scala.actors.Reactor]],
  * [[scala.actors.ReplyReactor]], or [[scala.actors.Actor]] or their companion objects.
  *
+ * @note As of release 2.10.1, replaced by <code>akka.actor</code> package. For migration of existing actors refer to the Actors Migration Guide.
  */
 package object actors {
 
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
index 6e9f4a7..7549bbf 100644
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ b/src/actors/scala/actors/remote/JavaSerializer.scala
@@ -39,6 +39,7 @@ extends ObjectInputStream(in) {
 /**
  *  @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class JavaSerializer(serv: Service, cl: ClassLoader) extends Serializer(serv) {
   def serialize(o: AnyRef): Array[Byte] = {
     val bos = new ByteArrayOutputStream()
diff --git a/src/actors/scala/actors/remote/NetKernel.scala b/src/actors/scala/actors/remote/NetKernel.scala
index 4795ff3..57d7af6 100644
--- a/src/actors/scala/actors/remote/NetKernel.scala
+++ b/src/actors/scala/actors/remote/NetKernel.scala
@@ -43,8 +43,8 @@ private[remote] class NetKernel(service: Service) {
   private val names = new mutable.HashMap[OutputChannel[Any], Symbol]
 
   def register(name: Symbol, a: OutputChannel[Any]): Unit = synchronized {
-    actors += Pair(name, a)
-    names += Pair(a, name)
+    actors(name) = a
+    names(a) = name
   }
 
   def getOrCreateName(from: OutputChannel[Any]) = names.get(from) match {
@@ -79,7 +79,7 @@ private[remote] class NetKernel(service: Service) {
 
   def createProxy(node: Node, sym: Symbol): Proxy = {
     val p = new Proxy(node, sym, this)
-    proxies += Pair((node, sym), p)
+    proxies((node, sym)) = p
     p
   }
 
@@ -99,7 +99,7 @@ private[remote] class NetKernel(service: Service) {
     proxies.synchronized {
       proxies.get((senderNode, senderName)) match {
         case Some(senderProxy) => // do nothing
-        case None              => proxies += Pair((senderNode, senderName), p)
+        case None              => proxies((senderNode, senderName)) = p
       }
     }
 
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
index 73af1ed..9949b36 100644
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ b/src/actors/scala/actors/remote/Proxy.scala
@@ -118,7 +118,7 @@ private[remote] case class Apply0(rfun: Function2[AbstractActor, Proxy, Unit])
  */
 private[remote] class DelegateActor(creator: Proxy, node: Node, name: Symbol, kernel: NetKernel) extends Actor {
   var channelMap = new mutable.HashMap[Symbol, OutputChannel[Any]]
-  var sessionMap = new mutable.HashMap[OutputChannel[Any], Symbol]
+  var sessionMap = new mutable.HashMap[OutputChannel[_], Symbol]
 
   def act() {
     Actor.loop {
@@ -142,7 +142,7 @@ private[remote] class DelegateActor(creator: Proxy, node: Node, name: Symbol, ke
                 // create a new reply channel...
                 val replyCh = new Channel[Any](this)
                 // ...that maps to session
-                sessionMap += Pair(replyCh, session)
+                sessionMap(replyCh) = session
                 // local send
                 out.send(msg, replyCh)
 
@@ -178,7 +178,7 @@ private[remote] class DelegateActor(creator: Proxy, node: Node, name: Symbol, ke
             // create fresh session ID...
             val fresh = FreshNameCreator.newName(node+"@"+name)
             // ...that maps to reply channel
-            channelMap += Pair(fresh, sender)
+            channelMap(fresh) = sender
             kernel.forward(sender, node, name, msg, fresh)
           } else {
             kernel.forward(sender, node, name, msg, 'nosession)
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index f1644c2..2daf9ce 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -38,6 +38,7 @@ package remote
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object RemoteActor {
 
   private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel]
@@ -63,7 +64,7 @@ object RemoteActor {
     val serv = TcpService(port, cl)
     val kern = serv.kernel
     val s = Actor.self(Scheduler)
-    kernels += Pair(s, kern)
+    kernels(s) = kern
 
     s.onTerminate {
       Debug.info("alive actor "+s+" terminated")
@@ -89,7 +90,7 @@ object RemoteActor {
     val kernel = kernels.get(Actor.self(Scheduler)) match {
       case None =>
         val serv = TcpService(TcpService.generatePort, cl)
-        kernels += Pair(Actor.self(Scheduler), serv.kernel)
+        kernels(Actor.self(Scheduler)) = serv.kernel
         serv.kernel
       case Some(k) =>
         k
@@ -127,4 +128,5 @@ object RemoteActor {
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 case class Node(address: String, port: Int)
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
index e39b01f..7be4aa6 100644
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ b/src/actors/scala/actors/remote/Serializer.scala
@@ -16,6 +16,7 @@ import java.lang.ClassNotFoundException
 
 import java.io.{DataInputStream, DataOutputStream, EOFException, IOException}
 
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 abstract class Serializer(val service: Service) {
   def serialize(o: AnyRef): Array[Byte]
   def deserialize(a: Array[Byte]): AnyRef
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
index 4584cc3..d102df1 100644
--- a/src/actors/scala/actors/remote/Service.scala
+++ b/src/actors/scala/actors/remote/Service.scala
@@ -14,6 +14,7 @@ package remote
  * @version 0.9.10
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait Service {
   val kernel = new NetKernel(this)
   val serializer: Serializer
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index 028dd3a..69e5c46 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -24,6 +24,7 @@ import scala.util.Random
  * @version 0.9.9
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object TcpService {
   private val random = new Random
   private val ports = new mutable.HashMap[Int, TcpService]
@@ -34,7 +35,7 @@ object TcpService {
         service
       case None =>
         val service = new TcpService(port, cl)
-        ports += Pair(port, service)
+        ports(port) = service
         service.start()
         Debug.info("created service at "+service.node)
         service
@@ -66,7 +67,7 @@ object TcpService {
       timeout =>
         try {
           val to = timeout.toInt
-          Debug.info("Using socket timeout $to")
+          Debug.info(s"Using socket timeout $to")
           Some(to)
         } catch {
           case e: NumberFormatException =>
@@ -84,6 +85,7 @@ object TcpService {
  * @version 0.9.10
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
   val serializer: JavaSerializer = new JavaSerializer(this, cl)
 
@@ -104,9 +106,9 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
       // when remote net kernel comes up
       (pendingSends.get(node): @unchecked) match {
         case None =>
-          pendingSends += Pair(node, List(data))
+          pendingSends(node) = List(data)
         case Some(msgs) if msgs.length < TcpService.BufSize =>
-          pendingSends += Pair(node, data :: msgs)
+          pendingSends(node) = data :: msgs
       }
     }
 
@@ -181,7 +183,7 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
     new mutable.HashMap[Node, TcpServiceWorker]
 
   private[actors] def addConnection(node: Node, worker: TcpServiceWorker) = synchronized {
-    connections += Pair(node, worker)
+    connections(node) = worker
   }
 
   def getConnection(n: Node) = synchronized {
diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index 6d9a945..a27799d 100644
--- a/src/actors/scala/actors/scheduler/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -23,6 +23,7 @@ import scala.collection.mutable
  * (e.g. act method finishes, exit explicitly called, an exception is thrown),
  * the ActorGC is informed via the `terminated` method.
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait ActorGC extends TerminationMonitor {
   self: IScheduler =>
 
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index a2d6941..b21a1aa 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -14,6 +14,7 @@ package scheduler
  *
  * @author Erik Engbrecht
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object DaemonScheduler extends DelegatingScheduler {
 
   protected def makeNewScheduler(): IScheduler = {
diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
index 15ce605..37710ec 100644
--- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
+++ b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
@@ -8,5 +8,4 @@ private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends
 
   override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int =
     super.drainTasksTo(c)
-
 }
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index a1d5666..4d3ebc3 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -19,6 +19,7 @@ import scala.concurrent.ThreadPoolRunner
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 object ExecutorScheduler {
 
   private def start(sched: ExecutorScheduler): ExecutorScheduler = {
@@ -58,6 +59,7 @@ object ExecutorScheduler {
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 trait ExecutorScheduler extends Thread
                         with IScheduler with TerminationService
                         with ThreadPoolRunner {
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ce67ffd..75a98db 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -9,6 +9,7 @@ import scala.concurrent.forkjoin._
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
       extends Runnable with IScheduler with TerminationMonitor {
 
@@ -62,7 +63,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
       while (true) {
         this.synchronized {
           try {
-            wait(CHECK_FREQ)
+            wait(CHECK_FREQ.toLong)
           } catch {
             case _: InterruptedException =>
           }
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index f370d45..342579d 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -22,6 +22,7 @@ import scala.concurrent.ManagedBlocker
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class ResizableThreadPoolScheduler(protected val terminate: Boolean,
                                    protected val daemon: Boolean)
   extends Thread with IScheduler with TerminationMonitor {
@@ -102,7 +103,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
       while (true) {
         this.synchronized {
           try {
-            wait(CHECK_FREQ)
+            wait(CHECK_FREQ.toLong)
           } catch {
             case _: InterruptedException =>
           }
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 04d1d2c..03b235f 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -17,6 +17,7 @@ import scala.collection.mutable
  *
  * @author Philipp Haller
  */
+ at deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
 class SingleThreadedScheduler extends IScheduler {
 
   private val tasks = new mutable.Queue[Runnable]
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index 280c8f4..ed1805e 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -39,7 +39,7 @@ private[scheduler] trait TerminationService extends TerminationMonitor {
       while (true) {
         this.synchronized {
           try {
-            wait(CHECK_FREQ)
+            wait(CHECK_FREQ.toLong)
           } catch {
             case _: InterruptedException =>
           }
diff --git a/src/android-library/scala/reflect/ScalaBeanInfo.scala b/src/android-library/scala/reflect/ScalaBeanInfo.scala
deleted file mode 100644
index 05c7bce..0000000
--- a/src/android-library/scala/reflect/ScalaBeanInfo.scala
+++ /dev/null
@@ -1 +0,0 @@
-/* ScalaBeanInfo does not exist for the Android target */
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
index b96e730..c806ca7 100644
--- a/src/asm/scala/tools/asm/AnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -54,8 +54,9 @@ public abstract class AnnotationVisitor {
     /**
      * Constructs a new {@link AnnotationVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public AnnotationVisitor(final int api) {
         this(api, null);
@@ -64,15 +65,17 @@ public abstract class AnnotationVisitor {
     /**
      * Constructs a new {@link AnnotationVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param av the annotation visitor to which this visitor must delegate
-     *        method calls. May be null.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param av
+     *            the annotation visitor to which this visitor must delegate
+     *            method calls. May be null.
      */
     public AnnotationVisitor(final int api, final AnnotationVisitor av) {
-        /*if (api != Opcodes.ASM4) {
+        if (api != Opcodes.ASM4) {
             throw new IllegalArgumentException();
-        }*/
+        }
         this.api = api;
         this.av = av;
     }
@@ -80,14 +83,17 @@ public abstract class AnnotationVisitor {
     /**
      * Visits a primitive value of the annotation.
      *
-     * @param name the value name.
-     * @param value the actual value, whose type must be {@link Byte},
-     *        {@link Boolean}, {@link Character}, {@link Short}, {@link Integer}
-     *        , {@link Long}, {@link Float}, {@link Double}, {@link String} or
-     *        {@link Type} or OBJECT or ARRAY sort. This value can also be an
-     *        array of byte, boolean, short, char, int, long, float or double
-     *        values (this is equivalent to using {@link #visitArray visitArray}
-     *        and visiting each array element in turn, but is more convenient).
+     * @param name
+     *            the value name.
+     * @param value
+     *            the actual value, whose type must be {@link Byte},
+     *            {@link Boolean}, {@link Character}, {@link Short},
+     *            {@link Integer} , {@link Long}, {@link Float}, {@link Double},
+     *            {@link String} or {@link Type} or OBJECT or ARRAY sort. This
+     *            value can also be an array of byte, boolean, short, char, int,
+     *            long, float or double values (this is equivalent to using
+     *            {@link #visitArray visitArray} and visiting each array element
+     *            in turn, but is more convenient).
      */
     public void visit(String name, Object value) {
         if (av != null) {
@@ -98,9 +104,12 @@ public abstract class AnnotationVisitor {
     /**
      * Visits an enumeration value of the annotation.
      *
-     * @param name the value name.
-     * @param desc the class descriptor of the enumeration class.
-     * @param value the actual enumeration value.
+     * @param name
+     *            the value name.
+     * @param desc
+     *            the class descriptor of the enumeration class.
+     * @param value
+     *            the actual enumeration value.
      */
     public void visitEnum(String name, String desc, String value) {
         if (av != null) {
@@ -111,12 +120,14 @@ public abstract class AnnotationVisitor {
     /**
      * Visits a nested annotation value of the annotation.
      *
-     * @param name the value name.
-     * @param desc the class descriptor of the nested annotation class.
+     * @param name
+     *            the value name.
+     * @param desc
+     *            the class descriptor of the nested annotation class.
      * @return a visitor to visit the actual nested annotation value, or
-     *         <tt>null</tt> if this visitor is not interested in visiting
-     *         this nested annotation. <i>The nested annotation value must be
-     *         fully visited before calling other methods on this annotation
+     *         <tt>null</tt> if this visitor is not interested in visiting this
+     *         nested annotation. <i>The nested annotation value must be fully
+     *         visited before calling other methods on this annotation
      *         visitor</i>.
      */
     public AnnotationVisitor visitAnnotation(String name, String desc) {
@@ -132,10 +143,11 @@ public abstract class AnnotationVisitor {
      * can be passed as value to {@link #visit visit}. This is what
      * {@link ClassReader} does.
      *
-     * @param name the value name.
+     * @param name
+     *            the value name.
      * @return a visitor to visit the actual array value elements, or
-     *         <tt>null</tt> if this visitor is not interested in visiting
-     *         these values. The 'name' parameters passed to the methods of this
+     *         <tt>null</tt> if this visitor is not interested in visiting these
+     *         values. The 'name' parameters passed to the methods of this
      *         visitor are ignored. <i>All the array values must be visited
      *         before calling other methods on this annotation visitor</i>.
      */
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
index e530780..8eb5b2e 100644
--- a/src/asm/scala/tools/asm/AnnotationWriter.java
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -90,20 +90,20 @@ final class AnnotationWriter extends AnnotationVisitor {
     /**
      * Constructs a new {@link AnnotationWriter}.
      *
-     * @param cw the class writer to which this annotation must be added.
-     * @param named <tt>true<tt> if values are named, <tt>false</tt> otherwise.
-     * @param bv where the annotation values must be stored.
-     * @param parent where the number of annotation values must be stored.
-     * @param offset where in <tt>parent</tt> the number of annotation values must
-     *      be stored.
+     * @param cw
+     *            the class writer to which this annotation must be added.
+     * @param named
+     *            <tt>true<tt> if values are named, <tt>false</tt> otherwise.
+     * @param bv
+     *            where the annotation values must be stored.
+     * @param parent
+     *            where the number of annotation values must be stored.
+     * @param offset
+     *            where in <tt>parent</tt> the number of annotation values must
+     *            be stored.
      */
-    AnnotationWriter(
-        final ClassWriter cw,
-        final boolean named,
-        final ByteVector bv,
-        final ByteVector parent,
-        final int offset)
-    {
+    AnnotationWriter(final ClassWriter cw, final boolean named,
+            final ByteVector bv, final ByteVector parent, final int offset) {
         super(Opcodes.ASM4);
         this.cw = cw;
         this.named = named;
@@ -190,11 +190,8 @@ final class AnnotationWriter extends AnnotationVisitor {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         ++size;
         if (named) {
             bv.putShort(cw.newUTF8(name));
@@ -203,10 +200,8 @@ final class AnnotationWriter extends AnnotationVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public AnnotationVisitor visitAnnotation(final String name,
+            final String desc) {
         ++size;
         if (named) {
             bv.putShort(cw.newUTF8(name));
@@ -259,7 +254,8 @@ final class AnnotationWriter extends AnnotationVisitor {
      * Puts the annotations of this annotation writer list into the given byte
      * vector.
      *
-     * @param out where the annotations must be put.
+     * @param out
+     *            where the annotations must be put.
      */
     void put(final ByteVector out) {
         int n = 0;
@@ -286,15 +282,15 @@ final class AnnotationWriter extends AnnotationVisitor {
     /**
      * Puts the given annotation lists into the given byte vector.
      *
-     * @param panns an array of annotation writer lists.
-     * @param off index of the first annotation to be written.
-     * @param out where the annotations must be put.
+     * @param panns
+     *            an array of annotation writer lists.
+     * @param off
+     *            index of the first annotation to be written.
+     * @param out
+     *            where the annotations must be put.
      */
-    static void put(
-        final AnnotationWriter[] panns,
-        final int off,
-        final ByteVector out)
-    {
+    static void put(final AnnotationWriter[] panns, final int off,
+            final ByteVector out) {
         int size = 1 + 2 * (panns.length - off);
         for (int i = off; i < panns.length; ++i) {
             size += panns[i] == null ? 0 : panns[i].getSize();
diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
index 408f21c..ac40a75 100644
--- a/src/asm/scala/tools/asm/Attribute.java
+++ b/src/asm/scala/tools/asm/Attribute.java
@@ -55,7 +55,8 @@ public class Attribute {
     /**
      * Constructs a new empty attribute.
      *
-     * @param type the type of the attribute.
+     * @param type
+     *            the type of the attribute.
      */
     protected Attribute(final String type) {
         this.type = type;
@@ -91,39 +92,39 @@ public class Attribute {
     }
 
     /**
-     * Reads a {@link #type type} attribute. This method must return a <i>new</i>
-     * {@link Attribute} object, of type {@link #type type}, corresponding to
-     * the <tt>len</tt> bytes starting at the given offset, in the given class
-     * reader.
+     * Reads a {@link #type type} attribute. This method must return a
+     * <i>new</i> {@link Attribute} object, of type {@link #type type},
+     * corresponding to the <tt>len</tt> bytes starting at the given offset, in
+     * the given class reader.
      *
-     * @param cr the class that contains the attribute to be read.
-     * @param off index of the first byte of the attribute's content in {@link
-     *        ClassReader#b cr.b}. The 6 attribute header bytes, containing the
-     *        type and the length of the attribute, are not taken into account
-     *        here.
-     * @param len the length of the attribute's content.
-     * @param buf buffer to be used to call
-     *        {@link ClassReader#readUTF8 readUTF8},
-     *        {@link ClassReader#readClass(int,char[]) readClass} or
-     *        {@link ClassReader#readConst readConst}.
-     * @param codeOff index of the first byte of code's attribute content in
-     *        {@link ClassReader#b cr.b}, or -1 if the attribute to be read is
-     *        not a code attribute. The 6 attribute header bytes, containing the
-     *        type and the length of the attribute, are not taken into account
-     *        here.
-     * @param labels the labels of the method's code, or <tt>null</tt> if the
-     *        attribute to be read is not a code attribute.
+     * @param cr
+     *            the class that contains the attribute to be read.
+     * @param off
+     *            index of the first byte of the attribute's content in
+     *            {@link ClassReader#b cr.b}. The 6 attribute header bytes,
+     *            containing the type and the length of the attribute, are not
+     *            taken into account here.
+     * @param len
+     *            the length of the attribute's content.
+     * @param buf
+     *            buffer to be used to call {@link ClassReader#readUTF8
+     *            readUTF8}, {@link ClassReader#readClass(int,char[]) readClass}
+     *            or {@link ClassReader#readConst readConst}.
+     * @param codeOff
+     *            index of the first byte of code's attribute content in
+     *            {@link ClassReader#b cr.b}, or -1 if the attribute to be read
+     *            is not a code attribute. The 6 attribute header bytes,
+     *            containing the type and the length of the attribute, are not
+     *            taken into account here.
+     * @param labels
+     *            the labels of the method's code, or <tt>null</tt> if the
+     *            attribute to be read is not a code attribute.
      * @return a <i>new</i> {@link Attribute} object corresponding to the given
      *         bytes.
      */
-    protected Attribute read(
-        final ClassReader cr,
-        final int off,
-        final int len,
-        final char[] buf,
-        final int codeOff,
-        final Label[] labels)
-    {
+    protected Attribute read(final ClassReader cr, final int off,
+            final int len, final char[] buf, final int codeOff,
+            final Label[] labels) {
         Attribute attr = new Attribute(type);
         attr.value = new byte[len];
         System.arraycopy(cr.b, off, attr.value, 0, len);
@@ -133,30 +134,30 @@ public class Attribute {
     /**
      * Returns the byte array form of this attribute.
      *
-     * @param cw the class to which this attribute must be added. This parameter
-     *        can be used to add to the constant pool of this class the items
-     *        that corresponds to this attribute.
-     * @param code the bytecode of the method corresponding to this code
-     *        attribute, or <tt>null</tt> if this attribute is not a code
-     *        attributes.
-     * @param len the length of the bytecode of the method corresponding to this
-     *        code attribute, or <tt>null</tt> if this attribute is not a code
-     *        attribute.
-     * @param maxStack the maximum stack size of the method corresponding to
-     *        this code attribute, or -1 if this attribute is not a code
-     *        attribute.
-     * @param maxLocals the maximum number of local variables of the method
-     *        corresponding to this code attribute, or -1 if this attribute is
-     *        not a code attribute.
+     * @param cw
+     *            the class to which this attribute must be added. This
+     *            parameter can be used to add to the constant pool of this
+     *            class the items that corresponds to this attribute.
+     * @param code
+     *            the bytecode of the method corresponding to this code
+     *            attribute, or <tt>null</tt> if this attribute is not a code
+     *            attributes.
+     * @param len
+     *            the length of the bytecode of the method corresponding to this
+     *            code attribute, or <tt>null</tt> if this attribute is not a
+     *            code attribute.
+     * @param maxStack
+     *            the maximum stack size of the method corresponding to this
+     *            code attribute, or -1 if this attribute is not a code
+     *            attribute.
+     * @param maxLocals
+     *            the maximum number of local variables of the method
+     *            corresponding to this code attribute, or -1 if this attribute
+     *            is not a code attribute.
      * @return the byte array form of this attribute.
      */
-    protected ByteVector write(
-        final ClassWriter cw,
-        final byte[] code,
-        final int len,
-        final int maxStack,
-        final int maxLocals)
-    {
+    protected ByteVector write(final ClassWriter cw, final byte[] code,
+            final int len, final int maxStack, final int maxLocals) {
         ByteVector v = new ByteVector();
         v.data = value;
         v.length = value.length;
@@ -181,30 +182,30 @@ public class Attribute {
     /**
      * Returns the size of all the attributes in this attribute list.
      *
-     * @param cw the class writer to be used to convert the attributes into byte
-     *        arrays, with the {@link #write write} method.
-     * @param code the bytecode of the method corresponding to these code
-     *        attributes, or <tt>null</tt> if these attributes are not code
-     *        attributes.
-     * @param len the length of the bytecode of the method corresponding to
-     *        these code attributes, or <tt>null</tt> if these attributes are
-     *        not code attributes.
-     * @param maxStack the maximum stack size of the method corresponding to
-     *        these code attributes, or -1 if these attributes are not code
-     *        attributes.
-     * @param maxLocals the maximum number of local variables of the method
-     *        corresponding to these code attributes, or -1 if these attributes
-     *        are not code attributes.
+     * @param cw
+     *            the class writer to be used to convert the attributes into
+     *            byte arrays, with the {@link #write write} method.
+     * @param code
+     *            the bytecode of the method corresponding to these code
+     *            attributes, or <tt>null</tt> if these attributes are not code
+     *            attributes.
+     * @param len
+     *            the length of the bytecode of the method corresponding to
+     *            these code attributes, or <tt>null</tt> if these attributes
+     *            are not code attributes.
+     * @param maxStack
+     *            the maximum stack size of the method corresponding to these
+     *            code attributes, or -1 if these attributes are not code
+     *            attributes.
+     * @param maxLocals
+     *            the maximum number of local variables of the method
+     *            corresponding to these code attributes, or -1 if these
+     *            attributes are not code attributes.
      * @return the size of all the attributes in this attribute list. This size
      *         includes the size of the attribute headers.
      */
-    final int getSize(
-        final ClassWriter cw,
-        final byte[] code,
-        final int len,
-        final int maxStack,
-        final int maxLocals)
-    {
+    final int getSize(final ClassWriter cw, final byte[] code, final int len,
+            final int maxStack, final int maxLocals) {
         Attribute attr = this;
         int size = 0;
         while (attr != null) {
@@ -219,30 +220,30 @@ public class Attribute {
      * Writes all the attributes of this attribute list in the given byte
      * vector.
      *
-     * @param cw the class writer to be used to convert the attributes into byte
-     *        arrays, with the {@link #write write} method.
-     * @param code the bytecode of the method corresponding to these code
-     *        attributes, or <tt>null</tt> if these attributes are not code
-     *        attributes.
-     * @param len the length of the bytecode of the method corresponding to
-     *        these code attributes, or <tt>null</tt> if these attributes are
-     *        not code attributes.
-     * @param maxStack the maximum stack size of the method corresponding to
-     *        these code attributes, or -1 if these attributes are not code
-     *        attributes.
-     * @param maxLocals the maximum number of local variables of the method
-     *        corresponding to these code attributes, or -1 if these attributes
-     *        are not code attributes.
-     * @param out where the attributes must be written.
+     * @param cw
+     *            the class writer to be used to convert the attributes into
+     *            byte arrays, with the {@link #write write} method.
+     * @param code
+     *            the bytecode of the method corresponding to these code
+     *            attributes, or <tt>null</tt> if these attributes are not code
+     *            attributes.
+     * @param len
+     *            the length of the bytecode of the method corresponding to
+     *            these code attributes, or <tt>null</tt> if these attributes
+     *            are not code attributes.
+     * @param maxStack
+     *            the maximum stack size of the method corresponding to these
+     *            code attributes, or -1 if these attributes are not code
+     *            attributes.
+     * @param maxLocals
+     *            the maximum number of local variables of the method
+     *            corresponding to these code attributes, or -1 if these
+     *            attributes are not code attributes.
+     * @param out
+     *            where the attributes must be written.
      */
-    final void put(
-        final ClassWriter cw,
-        final byte[] code,
-        final int len,
-        final int maxStack,
-        final int maxLocals,
-        final ByteVector out)
-    {
+    final void put(final ClassWriter cw, final byte[] code, final int len,
+            final int maxStack, final int maxLocals, final ByteVector out) {
         Attribute attr = this;
         while (attr != null) {
             ByteVector b = attr.write(cw, code, len, maxStack, maxLocals);
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
index 5081f01..2bc63eb 100644
--- a/src/asm/scala/tools/asm/ByteVector.java
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -59,7 +59,8 @@ public class ByteVector {
      * Constructs a new {@link ByteVector ByteVector} with the given initial
      * size.
      *
-     * @param initialSize the initial size of the byte vector to be constructed.
+     * @param initialSize
+     *            the initial size of the byte vector to be constructed.
      */
     public ByteVector(final int initialSize) {
         data = new byte[initialSize];
@@ -69,7 +70,8 @@ public class ByteVector {
      * Puts a byte into this byte vector. The byte vector is automatically
      * enlarged if necessary.
      *
-     * @param b a byte.
+     * @param b
+     *            a byte.
      * @return this byte vector.
      */
     public ByteVector putByte(final int b) {
@@ -86,8 +88,10 @@ public class ByteVector {
      * Puts two bytes into this byte vector. The byte vector is automatically
      * enlarged if necessary.
      *
-     * @param b1 a byte.
-     * @param b2 another byte.
+     * @param b1
+     *            a byte.
+     * @param b2
+     *            another byte.
      * @return this byte vector.
      */
     ByteVector put11(final int b1, final int b2) {
@@ -106,7 +110,8 @@ public class ByteVector {
      * Puts a short into this byte vector. The byte vector is automatically
      * enlarged if necessary.
      *
-     * @param s a short.
+     * @param s
+     *            a short.
      * @return this byte vector.
      */
     public ByteVector putShort(final int s) {
@@ -125,8 +130,10 @@ public class ByteVector {
      * Puts a byte and a short into this byte vector. The byte vector is
      * automatically enlarged if necessary.
      *
-     * @param b a byte.
-     * @param s a short.
+     * @param b
+     *            a byte.
+     * @param s
+     *            a short.
      * @return this byte vector.
      */
     ByteVector put12(final int b, final int s) {
@@ -146,7 +153,8 @@ public class ByteVector {
      * Puts an int into this byte vector. The byte vector is automatically
      * enlarged if necessary.
      *
-     * @param i an int.
+     * @param i
+     *            an int.
      * @return this byte vector.
      */
     public ByteVector putInt(final int i) {
@@ -167,7 +175,8 @@ public class ByteVector {
      * Puts a long into this byte vector. The byte vector is automatically
      * enlarged if necessary.
      *
-     * @param l a long.
+     * @param l
+     *            a long.
      * @return this byte vector.
      */
     public ByteVector putLong(final long l) {
@@ -194,7 +203,8 @@ public class ByteVector {
      * Puts an UTF8 string into this byte vector. The byte vector is
      * automatically enlarged if necessary.
      *
-     * @param s a String.
+     * @param s
+     *            a String.
      * @return this byte vector.
      */
     public ByteVector putUTF8(final String s) {
@@ -259,14 +269,16 @@ public class ByteVector {
      * Puts an array of bytes into this byte vector. The byte vector is
      * automatically enlarged if necessary.
      *
-     * @param b an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
-     *        null bytes into this byte vector.
-     * @param off index of the fist byte of b that must be copied.
-     * @param len number of bytes of b that must be copied.
+     * @param b
+     *            an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
+     *            null bytes into this byte vector.
+     * @param off
+     *            index of the fist byte of b that must be copied.
+     * @param len
+     *            number of bytes of b that must be copied.
      * @return this byte vector.
      */
-    public ByteVector putByteArray(final byte[] b, final int off, final int len)
-    {
+    public ByteVector putByteArray(final byte[] b, final int off, final int len) {
         if (length + len > data.length) {
             enlarge(len);
         }
@@ -280,8 +292,9 @@ public class ByteVector {
     /**
      * Enlarge this byte vector so that it can receive n more bytes.
      *
-     * @param size number of additional bytes that this byte vector should be
-     *        able to receive.
+     * @param size
+     *            number of additional bytes that this byte vector should be
+     *            able to receive.
      */
     private void enlarge(final int size) {
         int length1 = 2 * data.length;
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
index f3287d4..cc655c1 100644
--- a/src/asm/scala/tools/asm/ClassReader.java
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -112,9 +112,8 @@ public class ClassReader {
     public final byte[] b;
 
     /**
-     * The start index of each constant pool item in {@link #b b}, plus one.
-     * The one byte offset skips the constant pool item tag that indicates its
-     * type.
+     * The start index of each constant pool item in {@link #b b}, plus one. The
+     * one byte offset skips the constant pool item tag that indicates its type.
      */
     private final int[] items;
 
@@ -147,7 +146,8 @@ public class ClassReader {
     /**
      * Constructs a new {@link ClassReader} object.
      *
-     * @param b the bytecode of the class to be read.
+     * @param b
+     *            the bytecode of the class to be read.
      */
     public ClassReader(final byte[] b) {
         this(b, 0, b.length);
@@ -156,14 +156,17 @@ public class ClassReader {
     /**
      * Constructs a new {@link ClassReader} object.
      *
-     * @param b the bytecode of the class to be read.
-     * @param off the start offset of the class data.
-     * @param len the length of the class data.
+     * @param b
+     *            the bytecode of the class to be read.
+     * @param off
+     *            the start offset of the class data.
+     * @param len
+     *            the length of the class data.
      */
     public ClassReader(final byte[] b, final int off, final int len) {
         this.b = b;
         // checks the class version
-        if (readShort(6) > Opcodes.V1_7) {
+        if (readShort(off + 6) > Opcodes.V1_7) {
             throw new IllegalArgumentException();
         }
         // parses the constant pool
@@ -176,35 +179,35 @@ public class ClassReader {
             items[i] = index + 1;
             int size;
             switch (b[index]) {
-                case ClassWriter.FIELD:
-                case ClassWriter.METH:
-                case ClassWriter.IMETH:
-                case ClassWriter.INT:
-                case ClassWriter.FLOAT:
-                case ClassWriter.NAME_TYPE:
-                case ClassWriter.INDY:
-                    size = 5;
-                    break;
-                case ClassWriter.LONG:
-                case ClassWriter.DOUBLE:
-                    size = 9;
-                    ++i;
-                    break;
-                case ClassWriter.UTF8:
-                    size = 3 + readUnsignedShort(index + 1);
-                    if (size > max) {
-                        max = size;
-                    }
-                    break;
-                case ClassWriter.HANDLE:
-                    size = 4;
-                    break;
-                // case ClassWriter.CLASS:
-                // case ClassWriter.STR:
-                // case ClassWriter.MTYPE
-                default:
-                    size = 3;
-                    break;
+            case ClassWriter.FIELD:
+            case ClassWriter.METH:
+            case ClassWriter.IMETH:
+            case ClassWriter.INT:
+            case ClassWriter.FLOAT:
+            case ClassWriter.NAME_TYPE:
+            case ClassWriter.INDY:
+                size = 5;
+                break;
+            case ClassWriter.LONG:
+            case ClassWriter.DOUBLE:
+                size = 9;
+                ++i;
+                break;
+            case ClassWriter.UTF8:
+                size = 3 + readUnsignedShort(index + 1);
+                if (size > max) {
+                    max = size;
+                }
+                break;
+            case ClassWriter.HANDLE:
+                size = 4;
+                break;
+            // case ClassWriter.CLASS:
+            // case ClassWriter.STR:
+            // case ClassWriter.MTYPE
+            default:
+                size = 3;
+                break;
             }
             index += size;
         }
@@ -249,8 +252,7 @@ public class ClassReader {
      * @see ClassVisitor#visit(int, int, String, String, String, String[])
      */
     public String getSuperName() {
-        int n = items[readUnsignedShort(header + 4)];
-        return n == 0 ? null : readUTF8(n, new char[maxStringLength]);
+        return readClass(header + 4, new char[maxStringLength]);
     }
 
     /**
@@ -280,7 +282,8 @@ public class ClassReader {
      * Copies the constant pool data into the given {@link ClassWriter}. Should
      * be called before the {@link #accept(ClassVisitor,int)} method.
      *
-     * @param classWriter the {@link ClassWriter} to copy constant pool into.
+     * @param classWriter
+     *            the {@link ClassWriter} to copy constant pool into.
      */
     void copyPool(final ClassWriter classWriter) {
         char[] buf = new char[maxStringLength];
@@ -292,82 +295,63 @@ public class ClassReader {
             Item item = new Item(i);
             int nameType;
             switch (tag) {
-                case ClassWriter.FIELD:
-                case ClassWriter.METH:
-                case ClassWriter.IMETH:
-                    nameType = items[readUnsignedShort(index + 2)];
-                    item.set(tag,
-                            readClass(index, buf),
-                            readUTF8(nameType, buf),
-                            readUTF8(nameType + 2, buf));
-                    break;
-
-                case ClassWriter.INT:
-                    item.set(readInt(index));
-                    break;
-
-                case ClassWriter.FLOAT:
-                    item.set(Float.intBitsToFloat(readInt(index)));
-                    break;
-
-                case ClassWriter.NAME_TYPE:
-                    item.set(tag,
-                            readUTF8(index, buf),
-                            readUTF8(index + 2, buf),
-                            null);
-                    break;
-
-                case ClassWriter.LONG:
-                    item.set(readLong(index));
-                    ++i;
-                    break;
-
-                case ClassWriter.DOUBLE:
-                    item.set(Double.longBitsToDouble(readLong(index)));
-                    ++i;
-                    break;
-
-                case ClassWriter.UTF8: {
-                    String s = strings[i];
-                    if (s == null) {
-                        index = items[i];
-                        s = strings[i] = readUTF(index + 2,
-                                readUnsignedShort(index),
-                                buf);
-                    }
-                    item.set(tag, s, null, null);
+            case ClassWriter.FIELD:
+            case ClassWriter.METH:
+            case ClassWriter.IMETH:
+                nameType = items[readUnsignedShort(index + 2)];
+                item.set(tag, readClass(index, buf), readUTF8(nameType, buf),
+                        readUTF8(nameType + 2, buf));
+                break;
+            case ClassWriter.INT:
+                item.set(readInt(index));
+                break;
+            case ClassWriter.FLOAT:
+                item.set(Float.intBitsToFloat(readInt(index)));
+                break;
+            case ClassWriter.NAME_TYPE:
+                item.set(tag, readUTF8(index, buf), readUTF8(index + 2, buf),
+                        null);
+                break;
+            case ClassWriter.LONG:
+                item.set(readLong(index));
+                ++i;
+                break;
+            case ClassWriter.DOUBLE:
+                item.set(Double.longBitsToDouble(readLong(index)));
+                ++i;
+                break;
+            case ClassWriter.UTF8: {
+                String s = strings[i];
+                if (s == null) {
+                    index = items[i];
+                    s = strings[i] = readUTF(index + 2,
+                            readUnsignedShort(index), buf);
                 }
-                    break;
-
-                case ClassWriter.HANDLE: {
-                    int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
-                    nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
-                    item.set(ClassWriter.HANDLE_BASE + readByte(index),
-                            readClass(fieldOrMethodRef, buf),
-                            readUTF8(nameType, buf),
-                            readUTF8(nameType + 2, buf));
-
+                item.set(tag, s, null, null);
+                break;
+            }
+            case ClassWriter.HANDLE: {
+                int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
+                nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
+                item.set(ClassWriter.HANDLE_BASE + readByte(index),
+                        readClass(fieldOrMethodRef, buf),
+                        readUTF8(nameType, buf), readUTF8(nameType + 2, buf));
+                break;
+            }
+            case ClassWriter.INDY:
+                if (classWriter.bootstrapMethods == null) {
+                    copyBootstrapMethods(classWriter, items2, buf);
                 }
-                    break;
-
-
-                case ClassWriter.INDY:
-                    if (classWriter.bootstrapMethods == null) {
-                        copyBootstrapMethods(classWriter, items2, buf);
-                    }
-                    nameType = items[readUnsignedShort(index + 2)];
-                    item.set(readUTF8(nameType, buf),
-                            readUTF8(nameType + 2, buf),
-                            readUnsignedShort(index));
-                    break;
-
-
-                // case ClassWriter.STR:
-                // case ClassWriter.CLASS:
-                // case ClassWriter.MTYPE
-                default:
-                    item.set(tag, readUTF8(index, buf), null, null);
-                    break;
+                nameType = items[readUnsignedShort(index + 2)];
+                item.set(readUTF8(nameType, buf), readUTF8(nameType + 2, buf),
+                        readUnsignedShort(index));
+                break;
+            // case ClassWriter.STR:
+            // case ClassWriter.CLASS:
+            // case ClassWriter.MTYPE
+            default:
+                item.set(tag, readUTF8(index, buf), null, null);
+                break;
             }
 
             int index2 = item.hashCode % items2.length;
@@ -382,77 +366,59 @@ public class ClassReader {
         classWriter.index = ll;
     }
 
-    private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) {
-        int i, j, k, u, v;
-
-        // skip class header
-        v = header;
-        v += 8 + (readUnsignedShort(v + 6) << 1);
-
-        // skips fields and methods
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            j = readUnsignedShort(v + 6);
-            v += 8;
-            for (; j > 0; --j) {
-                v += 6 + readInt(v + 2);
+    /**
+     * Copies the bootstrap method data into the given {@link ClassWriter}.
+     * Should be called before the {@link #accept(ClassVisitor,int)} method.
+     *
+     * @param classWriter
+     *            the {@link ClassWriter} to copy bootstrap methods into.
+     */
+    private void copyBootstrapMethods(final ClassWriter classWriter,
+            final Item[] items, final char[] c) {
+        // finds the "BootstrapMethods" attribute
+        int u = getAttributes();
+        boolean found = false;
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            String attrName = readUTF8(u + 2, c);
+            if ("BootstrapMethods".equals(attrName)) {
+                found = true;
+                break;
             }
+            u += 6 + readInt(u + 4);
         }
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            j = readUnsignedShort(v + 6);
-            v += 8;
-            for (; j > 0; --j) {
-                v += 6 + readInt(v + 2);
-            }
+        if (!found) {
+            return;
         }
-
-        // read class attributes
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            String attrName = readUTF8(v, buf);
-            int size = readInt(v + 2);
-            if ("BootstrapMethods".equals(attrName)) {
-                int boostrapMethodCount = readUnsignedShort(v + 6);
-                int x = v + 8;
-                for (j = 0; j < boostrapMethodCount; j++) {
-                    int hashCode = readConst(readUnsignedShort(x), buf).hashCode();
-                    k = readUnsignedShort(x + 2);
-                    u = x + 4;
-                    for(; k > 0; --k) {
-                        hashCode ^= readConst(readUnsignedShort(u), buf).hashCode();
-                        u += 2;
-                    }
-                    Item item = new Item(j);
-                    item.set(x - v - 8, hashCode & 0x7FFFFFFF);
-
-                    int index2 = item.hashCode % items2.length;
-                    item.next = items2[index2];
-                    items2[index2] = item;
-
-                    x = u;
-                }
-
-                classWriter.bootstrapMethodsCount = boostrapMethodCount;
-                ByteVector bootstrapMethods = new ByteVector(size + 62);
-                bootstrapMethods.putByteArray(b, v + 8, size - 2);
-                classWriter.bootstrapMethods = bootstrapMethods;
-                return;
+        // copies the bootstrap methods in the class writer
+        int boostrapMethodCount = readUnsignedShort(u + 8);
+        for (int j = 0, v = u + 10; j < boostrapMethodCount; j++) {
+            int position = v - u - 10;
+            int hashCode = readConst(readUnsignedShort(v), c).hashCode();
+            for (int k = readUnsignedShort(v + 2); k > 0; --k) {
+                hashCode ^= readConst(readUnsignedShort(v + 4), c).hashCode();
+                v += 2;
             }
-            v += 6 + size;
+            v += 4;
+            Item item = new Item(j);
+            item.set(position, hashCode & 0x7FFFFFFF);
+            int index = item.hashCode % items.length;
+            item.next = items[index];
+            items[index] = item;
         }
-
-        // we are in trouble !!!
+        int attrSize = readInt(u + 4);
+        ByteVector bootstrapMethods = new ByteVector(attrSize + 62);
+        bootstrapMethods.putByteArray(b, u + 10, attrSize - 2);
+        classWriter.bootstrapMethodsCount = boostrapMethodCount;
+        classWriter.bootstrapMethods = bootstrapMethods;
     }
 
     /**
      * Constructs a new {@link ClassReader} object.
      *
-     * @param is an input stream from which to read the class.
-     * @throws IOException if a problem occurs during reading.
+     * @param is
+     *            an input stream from which to read the class.
+     * @throws IOException
+     *             if a problem occurs during reading.
      */
     public ClassReader(final InputStream is) throws IOException {
         this(readClass(is, false));
@@ -461,25 +427,30 @@ public class ClassReader {
     /**
      * Constructs a new {@link ClassReader} object.
      *
-     * @param name the binary qualified name of the class to be read.
-     * @throws IOException if an exception occurs during reading.
+     * @param name
+     *            the binary qualified name of the class to be read.
+     * @throws IOException
+     *             if an exception occurs during reading.
      */
     public ClassReader(final String name) throws IOException {
-        this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
-                + ".class"), true));
+        this(readClass(
+                ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+                        + ".class"), true));
     }
 
     /**
      * Reads the bytecode of a class.
      *
-     * @param is an input stream from which to read the class.
-     * @param close true to close the input stream after reading.
+     * @param is
+     *            an input stream from which to read the class.
+     * @param close
+     *            true to close the input stream after reading.
      * @return the bytecode read from the given input stream.
-     * @throws IOException if a problem occurs during reading.
+     * @throws IOException
+     *             if a problem occurs during reading.
      */
     private static byte[] readClass(final InputStream is, boolean close)
-            throws IOException
-    {
+            throws IOException {
         if (is == null) {
             throw new IOException("Class not found");
         }
@@ -520,14 +491,16 @@ public class ClassReader {
     // ------------------------------------------------------------------------
 
     /**
-     * Makes the given visitor visit the Java class of this {@link ClassReader}.
-     * This class is the one specified in the constructor (see
+     * Makes the given visitor visit the Java class of this {@link ClassReader}
+     * . This class is the one specified in the constructor (see
      * {@link #ClassReader(byte[]) ClassReader}).
      *
-     * @param classVisitor the visitor that must visit this class.
-     * @param flags option flags that can be used to modify the default behavior
-     *        of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
-     *        {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+     * @param classVisitor
+     *            the visitor that must visit this class.
+     * @param flags
+     *            option flags that can be used to modify the default behavior
+     *            of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+     *            , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
      */
     public void accept(final ClassVisitor classVisitor, final int flags) {
         accept(classVisitor, new Attribute[0], flags);
@@ -538,1117 +511,923 @@ public class ClassReader {
      * This class is the one specified in the constructor (see
      * {@link #ClassReader(byte[]) ClassReader}).
      *
-     * @param classVisitor the visitor that must visit this class.
-     * @param attrs prototypes of the attributes that must be parsed during the
-     *        visit of the class. Any attribute whose type is not equal to the
-     *        type of one the prototypes will not be parsed: its byte array
-     *        value will be passed unchanged to the ClassWriter. <i>This may
-     *        corrupt it if this value contains references to the constant pool,
-     *        or has syntactic or semantic links with a class element that has
-     *        been transformed by a class adapter between the reader and the
-     *        writer</i>.
-     * @param flags option flags that can be used to modify the default behavior
-     *        of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
-     *        {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+     * @param classVisitor
+     *            the visitor that must visit this class.
+     * @param attrs
+     *            prototypes of the attributes that must be parsed during the
+     *            visit of the class. Any attribute whose type is not equal to
+     *            the type of one the prototypes will not be parsed: its byte
+     *            array value will be passed unchanged to the ClassWriter.
+     *            <i>This may corrupt it if this value contains references to
+     *            the constant pool, or has syntactic or semantic links with a
+     *            class element that has been transformed by a class adapter
+     *            between the reader and the writer</i>.
+     * @param flags
+     *            option flags that can be used to modify the default behavior
+     *            of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+     *            , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
      */
-    public void accept(
-        final ClassVisitor classVisitor,
-        final Attribute[] attrs,
-        final int flags)
-    {
-        byte[] b = this.b; // the bytecode array
+    public void accept(final ClassVisitor classVisitor,
+            final Attribute[] attrs, final int flags) {
+        int u = header; // current offset in the class file
         char[] c = new char[maxStringLength]; // buffer used to read strings
-        int i, j, k; // loop variables
-        int u, v, w; // indexes in b
-        Attribute attr;
-
-        int access;
-        String name;
-        String desc;
-        String attrName;
-        String signature;
-        int anns = 0;
-        int ianns = 0;
-        Attribute cattrs = null;
-
-        // visits the header
-        u = header;
-        access = readUnsignedShort(u);
-        name = readClass(u + 2, c);
-        v = items[readUnsignedShort(u + 4)];
-        String superClassName = v == 0 ? null : readUTF8(v, c);
-        String[] implementedItfs = new String[readUnsignedShort(u + 6)];
-        w = 0;
+
+        Context context = new Context();
+        context.attrs = attrs;
+        context.flags = flags;
+        context.buffer = c;
+
+        // reads the class declaration
+        int access = readUnsignedShort(u);
+        String name = readClass(u + 2, c);
+        String superClass = readClass(u + 4, c);
+        String[] interfaces = new String[readUnsignedShort(u + 6)];
         u += 8;
-        for (i = 0; i < implementedItfs.length; ++i) {
-            implementedItfs[i] = readClass(u, c);
+        for (int i = 0; i < interfaces.length; ++i) {
+            interfaces[i] = readClass(u, c);
             u += 2;
         }
 
-        boolean skipCode = (flags & SKIP_CODE) != 0;
-        boolean skipDebug = (flags & SKIP_DEBUG) != 0;
-        boolean unzip = (flags & EXPAND_FRAMES) != 0;
-
-        // skips fields and methods
-        v = u;
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            j = readUnsignedShort(v + 6);
-            v += 8;
-            for (; j > 0; --j) {
-                v += 6 + readInt(v + 2);
-            }
-        }
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            j = readUnsignedShort(v + 6);
-            v += 8;
-            for (; j > 0; --j) {
-                v += 6 + readInt(v + 2);
-            }
-        }
-        // reads the class's attributes
-        signature = null;
+        // reads the class attributes
+        String signature = null;
         String sourceFile = null;
         String sourceDebug = null;
         String enclosingOwner = null;
         String enclosingName = null;
         String enclosingDesc = null;
-        int[] bootstrapMethods = null;  // start indexed of the bsms
+        int anns = 0;
+        int ianns = 0;
+        int innerClasses = 0;
+        Attribute attributes = null;
 
-        i = readUnsignedShort(v);
-        v += 2;
-        for (; i > 0; --i) {
-            attrName = readUTF8(v, c);
+        u = getAttributes();
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            String attrName = readUTF8(u + 2, c);
             // tests are sorted in decreasing frequency order
             // (based on frequencies observed on typical classes)
             if ("SourceFile".equals(attrName)) {
-                sourceFile = readUTF8(v + 6, c);
+                sourceFile = readUTF8(u + 8, c);
             } else if ("InnerClasses".equals(attrName)) {
-                w = v + 6;
+                innerClasses = u + 8;
             } else if ("EnclosingMethod".equals(attrName)) {
-                enclosingOwner = readClass(v + 6, c);
-                int item = readUnsignedShort(v + 8);
+                enclosingOwner = readClass(u + 8, c);
+                int item = readUnsignedShort(u + 10);
                 if (item != 0) {
                     enclosingName = readUTF8(items[item], c);
                     enclosingDesc = readUTF8(items[item] + 2, c);
                 }
             } else if (SIGNATURES && "Signature".equals(attrName)) {
-                signature = readUTF8(v + 6, c);
-            } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
-                anns = v + 6;
+                signature = readUTF8(u + 8, c);
+            } else if (ANNOTATIONS
+                    && "RuntimeVisibleAnnotations".equals(attrName)) {
+                anns = u + 8;
             } else if ("Deprecated".equals(attrName)) {
                 access |= Opcodes.ACC_DEPRECATED;
             } else if ("Synthetic".equals(attrName)) {
-                access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+                access |= Opcodes.ACC_SYNTHETIC
+                        | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
             } else if ("SourceDebugExtension".equals(attrName)) {
-                int len = readInt(v + 2);
-                sourceDebug = readUTF(v + 6, len, new char[len]);
-            } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
-                ianns = v + 6;
+                int len = readInt(u + 4);
+                sourceDebug = readUTF(u + 8, len, new char[len]);
+            } else if (ANNOTATIONS
+                    && "RuntimeInvisibleAnnotations".equals(attrName)) {
+                ianns = u + 8;
             } else if ("BootstrapMethods".equals(attrName)) {
-                int boostrapMethodCount = readUnsignedShort(v + 6);
-                bootstrapMethods = new int[boostrapMethodCount];
-                int x = v + 8;
-                for (j = 0; j < boostrapMethodCount; j++) {
-                    bootstrapMethods[j] = x;
-                    x += 2 + readUnsignedShort(x + 2) << 1;
+                int[] bootstrapMethods = new int[readUnsignedShort(u + 8)];
+                for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) {
+                    bootstrapMethods[j] = v;
+                    v += 2 + readUnsignedShort(v + 2) << 1;
                 }
+                context.bootstrapMethods = bootstrapMethods;
             } else {
-                attr = readAttribute(attrs,
-                        attrName,
-                        v + 6,
-                        readInt(v + 2),
-                        c,
-                        -1,
-                        null);
+                Attribute attr = readAttribute(attrs, attrName, u + 8,
+                        readInt(u + 4), c, -1, null);
                 if (attr != null) {
-                    attr.next = cattrs;
-                    cattrs = attr;
+                    attr.next = attributes;
+                    attributes = attr;
                 }
             }
-            v += 6 + readInt(v + 2);
+            u += 6 + readInt(u + 4);
         }
-        // calls the visit method
-        classVisitor.visit(readInt(4),
-                access,
-                name,
-                signature,
-                superClassName,
-                implementedItfs);
-
-        // calls the visitSource method
-        if (!skipDebug && (sourceFile != null || sourceDebug != null)) {
+
+        // visits the class declaration
+        classVisitor.visit(readInt(items[1] - 7), access, name, signature,
+                superClass, interfaces);
+
+        // visits the source and debug info
+        if ((flags & SKIP_DEBUG) == 0
+                && (sourceFile != null || sourceDebug != null)) {
             classVisitor.visitSource(sourceFile, sourceDebug);
         }
 
-        // calls the visitOuterClass method
+        // visits the outer class
         if (enclosingOwner != null) {
-            classVisitor.visitOuterClass(enclosingOwner,
-                    enclosingName,
+            classVisitor.visitOuterClass(enclosingOwner, enclosingName,
                     enclosingDesc);
         }
 
         // visits the class annotations
-        if (ANNOTATIONS) {
-            for (i = 1; i >= 0; --i) {
-                v = i == 0 ? ianns : anns;
-                if (v != 0) {
-                    j = readUnsignedShort(v);
-                    v += 2;
-                    for (; j > 0; --j) {
-                        v = readAnnotationValues(v + 2,
-                                c,
-                                true,
-                                classVisitor.visitAnnotation(readUTF8(v, c), i != 0));
-                    }
-                }
+        if (ANNOTATIONS && anns != 0) {
+            for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        classVisitor.visitAnnotation(readUTF8(v, c), true));
+            }
+        }
+        if (ANNOTATIONS && ianns != 0) {
+            for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        classVisitor.visitAnnotation(readUTF8(v, c), false));
             }
         }
 
-        // visits the class attributes
-        while (cattrs != null) {
-            attr = cattrs.next;
-            cattrs.next = null;
-            classVisitor.visitAttribute(cattrs);
-            cattrs = attr;
+        // visits the attributes
+        while (attributes != null) {
+            Attribute attr = attributes.next;
+            attributes.next = null;
+            classVisitor.visitAttribute(attributes);
+            attributes = attr;
         }
 
-        // calls the visitInnerClass method
-        if (w != 0) {
-            i = readUnsignedShort(w);
-            w += 2;
-            for (; i > 0; --i) {
-                classVisitor.visitInnerClass(readUnsignedShort(w) == 0
-                        ? null
-                        : readClass(w, c), readUnsignedShort(w + 2) == 0
-                        ? null
-                        : readClass(w + 2, c), readUnsignedShort(w + 4) == 0
-                        ? null
-                        : readUTF8(w + 4, c), readUnsignedShort(w + 6));
-                w += 8;
+        // visits the inner classes
+        if (innerClasses != 0) {
+            int v = innerClasses + 2;
+            for (int i = readUnsignedShort(innerClasses); i > 0; --i) {
+                classVisitor.visitInnerClass(readClass(v, c),
+                        readClass(v + 2, c), readUTF8(v + 4, c),
+                        readUnsignedShort(v + 6));
+                v += 8;
             }
         }
 
-        // visits the fields
-        i = readUnsignedShort(u);
+        // visits the fields and methods
+        u = header + 10 + 2 * interfaces.length;
+        for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+            u = readField(classVisitor, context, u);
+        }
         u += 2;
-        for (; i > 0; --i) {
-            access = readUnsignedShort(u);
-            name = readUTF8(u + 2, c);
-            desc = readUTF8(u + 4, c);
-            // visits the field's attributes and looks for a ConstantValue
-            // attribute
-            int fieldValueItem = 0;
-            signature = null;
-            anns = 0;
-            ianns = 0;
-            cattrs = null;
-
-            j = readUnsignedShort(u + 6);
-            u += 8;
-            for (; j > 0; --j) {
-                attrName = readUTF8(u, c);
-                // tests are sorted in decreasing frequency order
-                // (based on frequencies observed on typical classes)
-                if ("ConstantValue".equals(attrName)) {
-                    fieldValueItem = readUnsignedShort(u + 6);
-                } else if (SIGNATURES && "Signature".equals(attrName)) {
-                    signature = readUTF8(u + 6, c);
-                } else if ("Deprecated".equals(attrName)) {
-                    access |= Opcodes.ACC_DEPRECATED;
-                } else if ("Synthetic".equals(attrName)) {
-                    access |= Opcodes.ACC_SYNTHETIC  | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
-                } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
-                    anns = u + 6;
-                } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
-                    ianns = u + 6;
-                } else {
-                    attr = readAttribute(attrs,
-                            attrName,
-                            u + 6,
-                            readInt(u + 2),
-                            c,
-                            -1,
-                            null);
-                    if (attr != null) {
-                        attr.next = cattrs;
-                        cattrs = attr;
-                    }
+        for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+            u = readMethod(classVisitor, context, u);
+        }
+
+        // visits the end of the class
+        classVisitor.visitEnd();
+    }
+
+    /**
+     * Reads a field and makes the given visitor visit it.
+     *
+     * @param classVisitor
+     *            the visitor that must visit the field.
+     * @param context
+     *            information about the class being parsed.
+     * @param u
+     *            the start offset of the field in the class file.
+     * @return the offset of the first byte following the field in the class.
+     */
+    private int readField(final ClassVisitor classVisitor,
+            final Context context, int u) {
+        // reads the field declaration
+        char[] c = context.buffer;
+        int access = readUnsignedShort(u);
+        String name = readUTF8(u + 2, c);
+        String desc = readUTF8(u + 4, c);
+        u += 6;
+
+        // reads the field attributes
+        String signature = null;
+        int anns = 0;
+        int ianns = 0;
+        Object value = null;
+        Attribute attributes = null;
+
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            String attrName = readUTF8(u + 2, c);
+            // tests are sorted in decreasing frequency order
+            // (based on frequencies observed on typical classes)
+            if ("ConstantValue".equals(attrName)) {
+                int item = readUnsignedShort(u + 8);
+                value = item == 0 ? null : readConst(item, c);
+            } else if (SIGNATURES && "Signature".equals(attrName)) {
+                signature = readUTF8(u + 8, c);
+            } else if ("Deprecated".equals(attrName)) {
+                access |= Opcodes.ACC_DEPRECATED;
+            } else if ("Synthetic".equals(attrName)) {
+                access |= Opcodes.ACC_SYNTHETIC
+                        | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+            } else if (ANNOTATIONS
+                    && "RuntimeVisibleAnnotations".equals(attrName)) {
+                anns = u + 8;
+            } else if (ANNOTATIONS
+                    && "RuntimeInvisibleAnnotations".equals(attrName)) {
+                ianns = u + 8;
+            } else {
+                Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+                        readInt(u + 4), c, -1, null);
+                if (attr != null) {
+                    attr.next = attributes;
+                    attributes = attr;
                 }
-                u += 6 + readInt(u + 2);
             }
-            // visits the field
-            FieldVisitor fv = classVisitor.visitField(access,
-                    name,
-                    desc,
-                    signature,
-                    fieldValueItem == 0 ? null : readConst(fieldValueItem, c));
-            // visits the field annotations and attributes
-            if (fv != null) {
-                if (ANNOTATIONS) {
-                    for (j = 1; j >= 0; --j) {
-                        v = j == 0 ? ianns : anns;
-                        if (v != 0) {
-                            k = readUnsignedShort(v);
-                            v += 2;
-                            for (; k > 0; --k) {
-                                v = readAnnotationValues(v + 2,
-                                        c,
-                                        true,
-                                        fv.visitAnnotation(readUTF8(v, c), j != 0));
-                            }
-                        }
-                    }
-                }
-                while (cattrs != null) {
-                    attr = cattrs.next;
-                    cattrs.next = null;
-                    fv.visitAttribute(cattrs);
-                    cattrs = attr;
-                }
-                fv.visitEnd();
+            u += 6 + readInt(u + 4);
+        }
+        u += 2;
+
+        // visits the field declaration
+        FieldVisitor fv = classVisitor.visitField(access, name, desc,
+                signature, value);
+        if (fv == null) {
+            return u;
+        }
+
+        // visits the field annotations
+        if (ANNOTATIONS && anns != 0) {
+            for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        fv.visitAnnotation(readUTF8(v, c), true));
+            }
+        }
+        if (ANNOTATIONS && ianns != 0) {
+            for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        fv.visitAnnotation(readUTF8(v, c), false));
             }
         }
 
-        // visits the methods
-        i = readUnsignedShort(u);
-        u += 2;
-        for (; i > 0; --i) {
-            int u0 = u + 6;
-            access = readUnsignedShort(u);
-            name = readUTF8(u + 2, c);
-            desc = readUTF8(u + 4, c);
-            signature = null;
-            anns = 0;
-            ianns = 0;
-            int dann = 0;
-            int mpanns = 0;
-            int impanns = 0;
-            cattrs = null;
-            v = 0;
-            w = 0;
-
-            // looks for Code and Exceptions attributes
-            j = readUnsignedShort(u + 6);
-            u += 8;
-            for (; j > 0; --j) {
-                attrName = readUTF8(u, c);
-                int attrSize = readInt(u + 2);
-                u += 6;
-                // tests are sorted in decreasing frequency order
-                // (based on frequencies observed on typical classes)
-                if ("Code".equals(attrName)) {
-                    if (!skipCode) {
-                        v = u;
-                    }
-                } else if ("Exceptions".equals(attrName)) {
-                    w = u;
-                } else if (SIGNATURES && "Signature".equals(attrName)) {
-                    signature = readUTF8(u, c);
-                } else if ("Deprecated".equals(attrName)) {
-                    access |= Opcodes.ACC_DEPRECATED;
-                } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
-                    anns = u;
-                } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
-                    dann = u;
-                } else if ("Synthetic".equals(attrName)) {
-                    access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
-                } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
-                    ianns = u;
-                } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName))
-                {
-                    mpanns = u;
-                } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName))
-                {
-                    impanns = u;
-                } else {
-                    attr = readAttribute(attrs,
-                            attrName,
-                            u,
-                            attrSize,
-                            c,
-                            -1,
-                            null);
-                    if (attr != null) {
-                        attr.next = cattrs;
-                        cattrs = attr;
-                    }
+        // visits the field attributes
+        while (attributes != null) {
+            Attribute attr = attributes.next;
+            attributes.next = null;
+            fv.visitAttribute(attributes);
+            attributes = attr;
+        }
+
+        // visits the end of the field
+        fv.visitEnd();
+
+        return u;
+    }
+
+    /**
+     * Reads a method and makes the given visitor visit it.
+     *
+     * @param classVisitor
+     *            the visitor that must visit the method.
+     * @param context
+     *            information about the class being parsed.
+     * @param u
+     *            the start offset of the method in the class file.
+     * @return the offset of the first byte following the method in the class.
+     */
+    private int readMethod(final ClassVisitor classVisitor,
+            final Context context, int u) {
+        // reads the method declaration
+        char[] c = context.buffer;
+        int access = readUnsignedShort(u);
+        String name = readUTF8(u + 2, c);
+        String desc = readUTF8(u + 4, c);
+        u += 6;
+
+        // reads the method attributes
+        int code = 0;
+        int exception = 0;
+        String[] exceptions = null;
+        String signature = null;
+        int anns = 0;
+        int ianns = 0;
+        int dann = 0;
+        int mpanns = 0;
+        int impanns = 0;
+        int firstAttribute = u;
+        Attribute attributes = null;
+
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            String attrName = readUTF8(u + 2, c);
+            // tests are sorted in decreasing frequency order
+            // (based on frequencies observed on typical classes)
+            if ("Code".equals(attrName)) {
+                if ((context.flags & SKIP_CODE) == 0) {
+                    code = u + 8;
                 }
-                u += attrSize;
-            }
-            // reads declared exceptions
-            String[] exceptions;
-            if (w == 0) {
-                exceptions = null;
+            } else if ("Exceptions".equals(attrName)) {
+                exceptions = new String[readUnsignedShort(u + 8)];
+                exception = u + 10;
+                for (int j = 0; j < exceptions.length; ++j) {
+                    exceptions[j] = readClass(exception, c);
+                    exception += 2;
+                }
+            } else if (SIGNATURES && "Signature".equals(attrName)) {
+                signature = readUTF8(u + 8, c);
+            } else if ("Deprecated".equals(attrName)) {
+                access |= Opcodes.ACC_DEPRECATED;
+            } else if (ANNOTATIONS
+                    && "RuntimeVisibleAnnotations".equals(attrName)) {
+                anns = u + 8;
+            } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
+                dann = u + 8;
+            } else if ("Synthetic".equals(attrName)) {
+                access |= Opcodes.ACC_SYNTHETIC
+                        | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+            } else if (ANNOTATIONS
+                    && "RuntimeInvisibleAnnotations".equals(attrName)) {
+                ianns = u + 8;
+            } else if (ANNOTATIONS
+                    && "RuntimeVisibleParameterAnnotations".equals(attrName)) {
+                mpanns = u + 8;
+            } else if (ANNOTATIONS
+                    && "RuntimeInvisibleParameterAnnotations".equals(attrName)) {
+                impanns = u + 8;
             } else {
-                exceptions = new String[readUnsignedShort(w)];
-                w += 2;
-                for (j = 0; j < exceptions.length; ++j) {
-                    exceptions[j] = readClass(w, c);
-                    w += 2;
+                Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+                        readInt(u + 4), c, -1, null);
+                if (attr != null) {
+                    attr.next = attributes;
+                    attributes = attr;
                 }
             }
+            u += 6 + readInt(u + 4);
+        }
+        u += 2;
 
-            // visits the method's code, if any
-            MethodVisitor mv = classVisitor.visitMethod(access,
-                    name,
-                    desc,
-                    signature,
-                    exceptions);
+        // visits the method declaration
+        MethodVisitor mv = classVisitor.visitMethod(access, name, desc,
+                signature, exceptions);
+        if (mv == null) {
+            return u;
+        }
 
-            if (mv != null) {
-                /*
-                 * if the returned MethodVisitor is in fact a MethodWriter, it
-                 * means there is no method adapter between the reader and the
-                 * writer. If, in addition, the writer's constant pool was
-                 * copied from this reader (mw.cw.cr == this), and the signature
-                 * and exceptions of the method have not been changed, then it
-                 * is possible to skip all visit events and just copy the
-                 * original code of the method to the writer (the access, name
-                 * and descriptor can have been changed, this is not important
-                 * since they are not copied as is from the reader).
-                 */
-                if (WRITER && mv instanceof MethodWriter) {
-                    MethodWriter mw = (MethodWriter) mv;
-                    if (mw.cw.cr == this) {
-                        if (signature == mw.signature) {
-                            boolean sameExceptions = false;
-                            if (exceptions == null) {
-                                sameExceptions = mw.exceptionCount == 0;
-                            } else {
-                                if (exceptions.length == mw.exceptionCount) {
-                                    sameExceptions = true;
-                                    for (j = exceptions.length - 1; j >= 0; --j)
-                                    {
-                                        w -= 2;
-                                        if (mw.exceptions[j] != readUnsignedShort(w))
-                                        {
-                                            sameExceptions = false;
-                                            break;
-                                        }
-                                    }
-                                }
-                            }
-                            if (sameExceptions) {
-                                /*
-                                 * we do not copy directly the code into
-                                 * MethodWriter to save a byte array copy
-                                 * operation. The real copy will be done in
-                                 * ClassWriter.toByteArray().
-                                 */
-                                mw.classReaderOffset = u0;
-                                mw.classReaderLength = u - u0;
-                                continue;
-                            }
+        /*
+         * if the returned MethodVisitor is in fact a MethodWriter, it means
+         * there is no method adapter between the reader and the writer. If, in
+         * addition, the writer's constant pool was copied from this reader
+         * (mw.cw.cr == this), and the signature and exceptions of the method
+         * have not been changed, then it is possible to skip all visit events
+         * and just copy the original code of the method to the writer (the
+         * access, name and descriptor can have been changed, this is not
+         * important since they are not copied as is from the reader).
+         */
+        if (WRITER && mv instanceof MethodWriter) {
+            MethodWriter mw = (MethodWriter) mv;
+            if (mw.cw.cr == this && signature == mw.signature) {
+                boolean sameExceptions = false;
+                if (exceptions == null) {
+                    sameExceptions = mw.exceptionCount == 0;
+                } else if (exceptions.length == mw.exceptionCount) {
+                    sameExceptions = true;
+                    for (int j = exceptions.length - 1; j >= 0; --j) {
+                        exception -= 2;
+                        if (mw.exceptions[j] != readUnsignedShort(exception)) {
+                            sameExceptions = false;
+                            break;
                         }
                     }
                 }
-
-                if (ANNOTATIONS && dann != 0) {
-                    AnnotationVisitor dv = mv.visitAnnotationDefault();
-                    readAnnotationValue(dann, c, null, dv);
-                    if (dv != null) {
-                        dv.visitEnd();
-                    }
-                }
-                if (ANNOTATIONS) {
-                    for (j = 1; j >= 0; --j) {
-                        w = j == 0 ? ianns : anns;
-                        if (w != 0) {
-                            k = readUnsignedShort(w);
-                            w += 2;
-                            for (; k > 0; --k) {
-                                w = readAnnotationValues(w + 2,
-                                        c,
-                                        true,
-                                        mv.visitAnnotation(readUTF8(w, c), j != 0));
-                            }
-                        }
-                    }
+                if (sameExceptions) {
+                    /*
+                     * we do not copy directly the code into MethodWriter to
+                     * save a byte array copy operation. The real copy will be
+                     * done in ClassWriter.toByteArray().
+                     */
+                    mw.classReaderOffset = firstAttribute;
+                    mw.classReaderLength = u - firstAttribute;
+                    return u;
                 }
-                if (ANNOTATIONS && mpanns != 0) {
-                    readParameterAnnotations(mpanns, desc, c, true, mv);
+            }
+        }
+
+        // visits the method annotations
+        if (ANNOTATIONS && dann != 0) {
+            AnnotationVisitor dv = mv.visitAnnotationDefault();
+            readAnnotationValue(dann, c, null, dv);
+            if (dv != null) {
+                dv.visitEnd();
+            }
+        }
+        if (ANNOTATIONS && anns != 0) {
+            for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        mv.visitAnnotation(readUTF8(v, c), true));
+            }
+        }
+        if (ANNOTATIONS && ianns != 0) {
+            for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+                v = readAnnotationValues(v + 2, c, true,
+                        mv.visitAnnotation(readUTF8(v, c), false));
+            }
+        }
+        if (ANNOTATIONS && mpanns != 0) {
+            readParameterAnnotations(mpanns, desc, c, true, mv);
+        }
+        if (ANNOTATIONS && impanns != 0) {
+            readParameterAnnotations(impanns, desc, c, false, mv);
+        }
+
+        // visits the method attributes
+        while (attributes != null) {
+            Attribute attr = attributes.next;
+            attributes.next = null;
+            mv.visitAttribute(attributes);
+            attributes = attr;
+        }
+
+        // visits the method code
+        if (code != 0) {
+            context.access = access;
+            context.name = name;
+            context.desc = desc;
+            mv.visitCode();
+            readCode(mv, context, code);
+        }
+
+        // visits the end of the method
+        mv.visitEnd();
+
+        return u;
+    }
+
+    /**
+     * Reads the bytecode of a method and makes the given visitor visit it.
+     *
+     * @param mv
+     *            the visitor that must visit the method's code.
+     * @param context
+     *            information about the class being parsed.
+     * @param u
+     *            the start offset of the code attribute in the class file.
+     */
+    private void readCode(final MethodVisitor mv, final Context context, int u) {
+        // reads the header
+        byte[] b = this.b;
+        char[] c = context.buffer;
+        int maxStack = readUnsignedShort(u);
+        int maxLocals = readUnsignedShort(u + 2);
+        int codeLength = readInt(u + 4);
+        u += 8;
+
+        // reads the bytecode to find the labels
+        int codeStart = u;
+        int codeEnd = u + codeLength;
+        Label[] labels = new Label[codeLength + 2];
+        readLabel(codeLength + 1, labels);
+        while (u < codeEnd) {
+            int offset = u - codeStart;
+            int opcode = b[u] & 0xFF;
+            switch (ClassWriter.TYPE[opcode]) {
+            case ClassWriter.NOARG_INSN:
+            case ClassWriter.IMPLVAR_INSN:
+                u += 1;
+                break;
+            case ClassWriter.LABEL_INSN:
+                readLabel(offset + readShort(u + 1), labels);
+                u += 3;
+                break;
+            case ClassWriter.LABELW_INSN:
+                readLabel(offset + readInt(u + 1), labels);
+                u += 5;
+                break;
+            case ClassWriter.WIDE_INSN:
+                opcode = b[u + 1] & 0xFF;
+                if (opcode == Opcodes.IINC) {
+                    u += 6;
+                } else {
+                    u += 4;
                 }
-                if (ANNOTATIONS && impanns != 0) {
-                    readParameterAnnotations(impanns, desc, c, false, mv);
+                break;
+            case ClassWriter.TABL_INSN:
+                // skips 0 to 3 padding bytes
+                u = u + 4 - (offset & 3);
+                // reads instruction
+                readLabel(offset + readInt(u), labels);
+                for (int i = readInt(u + 8) - readInt(u + 4) + 1; i > 0; --i) {
+                    readLabel(offset + readInt(u + 12), labels);
+                    u += 4;
                 }
-                while (cattrs != null) {
-                    attr = cattrs.next;
-                    cattrs.next = null;
-                    mv.visitAttribute(cattrs);
-                    cattrs = attr;
+                u += 12;
+                break;
+            case ClassWriter.LOOK_INSN:
+                // skips 0 to 3 padding bytes
+                u = u + 4 - (offset & 3);
+                // reads instruction
+                readLabel(offset + readInt(u), labels);
+                for (int i = readInt(u + 4); i > 0; --i) {
+                    readLabel(offset + readInt(u + 12), labels);
+                    u += 8;
                 }
+                u += 8;
+                break;
+            case ClassWriter.VAR_INSN:
+            case ClassWriter.SBYTE_INSN:
+            case ClassWriter.LDC_INSN:
+                u += 2;
+                break;
+            case ClassWriter.SHORT_INSN:
+            case ClassWriter.LDCW_INSN:
+            case ClassWriter.FIELDORMETH_INSN:
+            case ClassWriter.TYPE_INSN:
+            case ClassWriter.IINC_INSN:
+                u += 3;
+                break;
+            case ClassWriter.ITFMETH_INSN:
+            case ClassWriter.INDYMETH_INSN:
+                u += 5;
+                break;
+            // case MANA_INSN:
+            default:
+                u += 4;
+                break;
             }
+        }
 
-            if (mv != null && v != 0) {
-                int maxStack = readUnsignedShort(v);
-                int maxLocals = readUnsignedShort(v + 2);
-                int codeLength = readInt(v + 4);
-                v += 8;
+        // reads the try catch entries to find the labels, and also visits them
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            Label start = readLabel(readUnsignedShort(u + 2), labels);
+            Label end = readLabel(readUnsignedShort(u + 4), labels);
+            Label handler = readLabel(readUnsignedShort(u + 6), labels);
+            String type = readUTF8(items[readUnsignedShort(u + 8)], c);
+            mv.visitTryCatchBlock(start, end, handler, type);
+            u += 8;
+        }
+        u += 2;
 
-                int codeStart = v;
-                int codeEnd = v + codeLength;
-
-                mv.visitCode();
-
-                // 1st phase: finds the labels
-                int label;
-                Label[] labels = new Label[codeLength + 2];
-                readLabel(codeLength + 1, labels);
-                while (v < codeEnd) {
-                    w = v - codeStart;
-                    int opcode = b[v] & 0xFF;
-                    switch (ClassWriter.TYPE[opcode]) {
-                        case ClassWriter.NOARG_INSN:
-                        case ClassWriter.IMPLVAR_INSN:
-                            v += 1;
-                            break;
-                        case ClassWriter.LABEL_INSN:
-                            readLabel(w + readShort(v + 1), labels);
-                            v += 3;
-                            break;
-                        case ClassWriter.LABELW_INSN:
-                            readLabel(w + readInt(v + 1), labels);
-                            v += 5;
-                            break;
-                        case ClassWriter.WIDE_INSN:
-                            opcode = b[v + 1] & 0xFF;
-                            if (opcode == Opcodes.IINC) {
-                                v += 6;
-                            } else {
-                                v += 4;
-                            }
-                            break;
-                        case ClassWriter.TABL_INSN:
-                            // skips 0 to 3 padding bytes*
-                            v = v + 4 - (w & 3);
-                            // reads instruction
-                            readLabel(w + readInt(v), labels);
-                            j = readInt(v + 8) - readInt(v + 4) + 1;
-                            v += 12;
-                            for (; j > 0; --j) {
-                                readLabel(w + readInt(v), labels);
-                                v += 4;
-                            }
-                            break;
-                        case ClassWriter.LOOK_INSN:
-                            // skips 0 to 3 padding bytes*
-                            v = v + 4 - (w & 3);
-                            // reads instruction
-                            readLabel(w + readInt(v), labels);
-                            j = readInt(v + 4);
-                            v += 8;
-                            for (; j > 0; --j) {
-                                readLabel(w + readInt(v + 4), labels);
-                                v += 8;
-                            }
-                            break;
-                        case ClassWriter.VAR_INSN:
-                        case ClassWriter.SBYTE_INSN:
-                        case ClassWriter.LDC_INSN:
-                            v += 2;
-                            break;
-                        case ClassWriter.SHORT_INSN:
-                        case ClassWriter.LDCW_INSN:
-                        case ClassWriter.FIELDORMETH_INSN:
-                        case ClassWriter.TYPE_INSN:
-                        case ClassWriter.IINC_INSN:
-                            v += 3;
-                            break;
-                        case ClassWriter.ITFMETH_INSN:
-                        case ClassWriter.INDYMETH_INSN:
-                            v += 5;
-                            break;
-                        // case MANA_INSN:
-                        default:
-                            v += 4;
-                            break;
-                    }
-                }
-                // parses the try catch entries
-                j = readUnsignedShort(v);
-                v += 2;
-                for (; j > 0; --j) {
-                    Label start = readLabel(readUnsignedShort(v), labels);
-                    Label end = readLabel(readUnsignedShort(v + 2), labels);
-                    Label handler = readLabel(readUnsignedShort(v + 4), labels);
-                    int type = readUnsignedShort(v + 6);
-                    if (type == 0) {
-                        mv.visitTryCatchBlock(start, end, handler, null);
-                    } else {
-                        mv.visitTryCatchBlock(start,
-                                end,
-                                handler,
-                                readUTF8(items[type], c));
-                    }
-                    v += 8;
-                }
-                // parses the local variable, line number tables, and code
-                // attributes
-                int varTable = 0;
-                int varTypeTable = 0;
-                int stackMap = 0;
-                int stackMapSize = 0;
-                int frameCount = 0;
-                int frameMode = 0;
-                int frameOffset = 0;
-                int frameLocalCount = 0;
-                int frameLocalDiff = 0;
-                int frameStackCount = 0;
-                Object[] frameLocal = null;
-                Object[] frameStack = null;
-                boolean zip = true;
-                cattrs = null;
-                j = readUnsignedShort(v);
-                v += 2;
-                for (; j > 0; --j) {
-                    attrName = readUTF8(v, c);
-                    if ("LocalVariableTable".equals(attrName)) {
-                        if (!skipDebug) {
-                            varTable = v + 6;
-                            k = readUnsignedShort(v + 6);
-                            w = v + 8;
-                            for (; k > 0; --k) {
-                                label = readUnsignedShort(w);
-                                if (labels[label] == null) {
-                                    readLabel(label, labels).status |= Label.DEBUG;
-                                }
-                                label += readUnsignedShort(w + 2);
-                                if (labels[label] == null) {
-                                    readLabel(label, labels).status |= Label.DEBUG;
-                                }
-                                w += 10;
-                            }
-                        }
-                    } else if ("LocalVariableTypeTable".equals(attrName)) {
-                        varTypeTable = v + 6;
-                    } else if ("LineNumberTable".equals(attrName)) {
-                        if (!skipDebug) {
-                            k = readUnsignedShort(v + 6);
-                            w = v + 8;
-                            for (; k > 0; --k) {
-                                label = readUnsignedShort(w);
-                                if (labels[label] == null) {
-                                    readLabel(label, labels).status |= Label.DEBUG;
-                                }
-                                labels[label].line = readUnsignedShort(w + 2);
-                                w += 4;
-                            }
-                        }
-                    } else if (FRAMES && "StackMapTable".equals(attrName)) {
-                        if ((flags & SKIP_FRAMES) == 0) {
-                            stackMap = v + 8;
-                            stackMapSize = readInt(v + 2);
-                            frameCount = readUnsignedShort(v + 6);
+        // reads the code attributes
+        int varTable = 0;
+        int varTypeTable = 0;
+        boolean zip = true;
+        boolean unzip = (context.flags & EXPAND_FRAMES) != 0;
+        int stackMap = 0;
+        int stackMapSize = 0;
+        int frameCount = 0;
+        Context frame = null;
+        Attribute attributes = null;
+
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            String attrName = readUTF8(u + 2, c);
+            if ("LocalVariableTable".equals(attrName)) {
+                if ((context.flags & SKIP_DEBUG) == 0) {
+                    varTable = u + 8;
+                    for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+                        int label = readUnsignedShort(v + 10);
+                        if (labels[label] == null) {
+                            readLabel(label, labels).status |= Label.DEBUG;
                         }
-                        /*
-                         * here we do not extract the labels corresponding to
-                         * the attribute content. This would require a full
-                         * parsing of the attribute, which would need to be
-                         * repeated in the second phase (see below). Instead the
-                         * content of the attribute is read one frame at a time
-                         * (i.e. after a frame has been visited, the next frame
-                         * is read), and the labels it contains are also
-                         * extracted one frame at a time. Thanks to the ordering
-                         * of frames, having only a "one frame lookahead" is not
-                         * a problem, i.e. it is not possible to see an offset
-                         * smaller than the offset of the current insn and for
-                         * which no Label exist.
-                         */
-                        /*
-                         * This is not true for UNINITIALIZED type offsets. We
-                         * solve this by parsing the stack map table without a
-                         * full decoding (see below).
-                         */
-                    } else if (FRAMES && "StackMap".equals(attrName)) {
-                        if ((flags & SKIP_FRAMES) == 0) {
-                            stackMap = v + 8;
-                            stackMapSize = readInt(v + 2);
-                            frameCount = readUnsignedShort(v + 6);
-                            zip = false;
-                        }
-                        /*
-                         * IMPORTANT! here we assume that the frames are
-                         * ordered, as in the StackMapTable attribute, although
-                         * this is not guaranteed by the attribute format.
-                         */
-                    } else {
-                        for (k = 0; k < attrs.length; ++k) {
-                            if (attrs[k].type.equals(attrName)) {
-                                attr = attrs[k].read(this,
-                                        v + 6,
-                                        readInt(v + 2),
-                                        c,
-                                        codeStart - 8,
-                                        labels);
-                                if (attr != null) {
-                                    attr.next = cattrs;
-                                    cattrs = attr;
-                                }
-                            }
+                        label += readUnsignedShort(v + 12);
+                        if (labels[label] == null) {
+                            readLabel(label, labels).status |= Label.DEBUG;
                         }
+                        v += 10;
                     }
-                    v += 6 + readInt(v + 2);
                 }
-
-                // 2nd phase: visits each instruction
-                if (FRAMES && stackMap != 0) {
-                    // creates the very first (implicit) frame from the method
-                    // descriptor
-                    frameLocal = new Object[maxLocals];
-                    frameStack = new Object[maxStack];
-                    if (unzip) {
-                        int local = 0;
-                        if ((access & Opcodes.ACC_STATIC) == 0) {
-                            if ("<init>".equals(name)) {
-                                frameLocal[local++] = Opcodes.UNINITIALIZED_THIS;
-                            } else {
-                                frameLocal[local++] = readClass(header + 2, c);
-                            }
-                        }
-                        j = 1;
-                        loop: while (true) {
-                            k = j;
-                            switch (desc.charAt(j++)) {
-                                case 'Z':
-                                case 'C':
-                                case 'B':
-                                case 'S':
-                                case 'I':
-                                    frameLocal[local++] = Opcodes.INTEGER;
-                                    break;
-                                case 'F':
-                                    frameLocal[local++] = Opcodes.FLOAT;
-                                    break;
-                                case 'J':
-                                    frameLocal[local++] = Opcodes.LONG;
-                                    break;
-                                case 'D':
-                                    frameLocal[local++] = Opcodes.DOUBLE;
-                                    break;
-                                case '[':
-                                    while (desc.charAt(j) == '[') {
-                                        ++j;
-                                    }
-                                    if (desc.charAt(j) == 'L') {
-                                        ++j;
-                                        while (desc.charAt(j) != ';') {
-                                            ++j;
-                                        }
-                                    }
-                                    frameLocal[local++] = desc.substring(k, ++j);
-                                    break;
-                                case 'L':
-                                    while (desc.charAt(j) != ';') {
-                                        ++j;
-                                    }
-                                    frameLocal[local++] = desc.substring(k + 1,
-                                            j++);
-                                    break;
-                                default:
-                                    break loop;
-                            }
+            } else if ("LocalVariableTypeTable".equals(attrName)) {
+                varTypeTable = u + 8;
+            } else if ("LineNumberTable".equals(attrName)) {
+                if ((context.flags & SKIP_DEBUG) == 0) {
+                    for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+                        int label = readUnsignedShort(v + 10);
+                        if (labels[label] == null) {
+                            readLabel(label, labels).status |= Label.DEBUG;
                         }
-                        frameLocalCount = local;
+                        labels[label].line = readUnsignedShort(v + 12);
+                        v += 4;
                     }
-                    /*
-                     * for the first explicit frame the offset is not
-                     * offset_delta + 1 but only offset_delta; setting the
-                     * implicit frame offset to -1 allow the use of the
-                     * "offset_delta + 1" rule in all cases
-                     */
-                    frameOffset = -1;
-                    /*
-                     * Finds labels for UNINITIALIZED frame types. Instead of
-                     * decoding each element of the stack map table, we look
-                     * for 3 consecutive bytes that "look like" an UNINITIALIZED
-                     * type (tag 8, offset within code bounds, NEW instruction
-                     * at this offset). We may find false positives (i.e. not
-                     * real UNINITIALIZED types), but this should be rare, and
-                     * the only consequence will be the creation of an unneeded
-                     * label. This is better than creating a label for each NEW
-                     * instruction, and faster than fully decoding the whole
-                     * stack map table.
-                     */
-                    for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) {
-                        if (b[j] == 8) { // UNINITIALIZED FRAME TYPE
-                            k = readUnsignedShort(j + 1);
-                            if (k >= 0 && k < codeLength) { // potential offset
-                                if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset
-                                    readLabel(k, labels);
-                                }
-                            }
+                }
+            } else if (FRAMES && "StackMapTable".equals(attrName)) {
+                if ((context.flags & SKIP_FRAMES) == 0) {
+                    stackMap = u + 10;
+                    stackMapSize = readInt(u + 4);
+                    frameCount = readUnsignedShort(u + 8);
+                }
+                /*
+                 * here we do not extract the labels corresponding to the
+                 * attribute content. This would require a full parsing of the
+                 * attribute, which would need to be repeated in the second
+                 * phase (see below). Instead the content of the attribute is
+                 * read one frame at a time (i.e. after a frame has been
+                 * visited, the next frame is read), and the labels it contains
+                 * are also extracted one frame at a time. Thanks to the
+                 * ordering of frames, having only a "one frame lookahead" is
+                 * not a problem, i.e. it is not possible to see an offset
+                 * smaller than the offset of the current insn and for which no
+                 * Label exist.
+                 */
+                /*
+                 * This is not true for UNINITIALIZED type offsets. We solve
+                 * this by parsing the stack map table without a full decoding
+                 * (see below).
+                 */
+            } else if (FRAMES && "StackMap".equals(attrName)) {
+                if ((context.flags & SKIP_FRAMES) == 0) {
+                    zip = false;
+                    stackMap = u + 10;
+                    stackMapSize = readInt(u + 4);
+                    frameCount = readUnsignedShort(u + 8);
+                }
+                /*
+                 * IMPORTANT! here we assume that the frames are ordered, as in
+                 * the StackMapTable attribute, although this is not guaranteed
+                 * by the attribute format.
+                 */
+            } else {
+                for (int j = 0; j < context.attrs.length; ++j) {
+                    if (context.attrs[j].type.equals(attrName)) {
+                        Attribute attr = context.attrs[j].read(this, u + 8,
+                                readInt(u + 4), c, codeStart - 8, labels);
+                        if (attr != null) {
+                            attr.next = attributes;
+                            attributes = attr;
                         }
                     }
                 }
-                v = codeStart;
-                Label l;
-                while (v < codeEnd) {
-                    w = v - codeStart;
-
-                    l = labels[w];
-                    if (l != null) {
-                        mv.visitLabel(l);
-                        if (!skipDebug && l.line > 0) {
-                            mv.visitLineNumber(l.line, l);
+            }
+            u += 6 + readInt(u + 4);
+        }
+        u += 2;
+
+        // generates the first (implicit) stack map frame
+        if (FRAMES && stackMap != 0) {
+            /*
+             * for the first explicit frame the offset is not offset_delta + 1
+             * but only offset_delta; setting the implicit frame offset to -1
+             * allow the use of the "offset_delta + 1" rule in all cases
+             */
+            frame = context;
+            frame.offset = -1;
+            frame.mode = 0;
+            frame.localCount = 0;
+            frame.localDiff = 0;
+            frame.stackCount = 0;
+            frame.local = new Object[maxLocals];
+            frame.stack = new Object[maxStack];
+            if (unzip) {
+                getImplicitFrame(context);
+            }
+            /*
+             * Finds labels for UNINITIALIZED frame types. Instead of decoding
+             * each element of the stack map table, we look for 3 consecutive
+             * bytes that "look like" an UNINITIALIZED type (tag 8, offset
+             * within code bounds, NEW instruction at this offset). We may find
+             * false positives (i.e. not real UNINITIALIZED types), but this
+             * should be rare, and the only consequence will be the creation of
+             * an unneeded label. This is better than creating a label for each
+             * NEW instruction, and faster than fully decoding the whole stack
+             * map table.
+             */
+            for (int i = stackMap; i < stackMap + stackMapSize - 2; ++i) {
+                if (b[i] == 8) { // UNINITIALIZED FRAME TYPE
+                    int v = readUnsignedShort(i + 1);
+                    if (v >= 0 && v < codeLength) {
+                        if ((b[codeStart + v] & 0xFF) == Opcodes.NEW) {
+                            readLabel(v, labels);
                         }
                     }
+                }
+            }
+        }
 
-                    while (FRAMES && frameLocal != null
-                            && (frameOffset == w || frameOffset == -1))
-                    {
-                        // if there is a frame for this offset,
-                        // makes the visitor visit it,
-                        // and reads the next frame if there is one.
-                        if (!zip || unzip) {
-                            mv.visitFrame(Opcodes.F_NEW,
-                                    frameLocalCount,
-                                    frameLocal,
-                                    frameStackCount,
-                                    frameStack);
-                        } else if (frameOffset != -1) {
-                            mv.visitFrame(frameMode,
-                                    frameLocalDiff,
-                                    frameLocal,
-                                    frameStackCount,
-                                    frameStack);
-                        }
+        // visits the instructions
+        u = codeStart;
+        while (u < codeEnd) {
+            int offset = u - codeStart;
+
+            // visits the label and line number for this offset, if any
+            Label l = labels[offset];
+            if (l != null) {
+                mv.visitLabel(l);
+                if ((context.flags & SKIP_DEBUG) == 0 && l.line > 0) {
+                    mv.visitLineNumber(l.line, l);
+                }
+            }
 
-                        if (frameCount > 0) {
-                            int tag, delta, n;
-                            if (zip) {
-                                tag = b[stackMap++] & 0xFF;
-                            } else {
-                                tag = MethodWriter.FULL_FRAME;
-                                frameOffset = -1;
-                            }
-                            frameLocalDiff = 0;
-                            if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME)
-                            {
-                                delta = tag;
-                                frameMode = Opcodes.F_SAME;
-                                frameStackCount = 0;
-                            } else if (tag < MethodWriter.RESERVED) {
-                                delta = tag
-                                        - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
-                                stackMap = readFrameType(frameStack,
-                                        0,
-                                        stackMap,
-                                        c,
-                                        labels);
-                                frameMode = Opcodes.F_SAME1;
-                                frameStackCount = 1;
-                            } else {
-                                delta = readUnsignedShort(stackMap);
-                                stackMap += 2;
-                                if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
-                                {
-                                    stackMap = readFrameType(frameStack,
-                                            0,
-                                            stackMap,
-                                            c,
-                                            labels);
-                                    frameMode = Opcodes.F_SAME1;
-                                    frameStackCount = 1;
-                                } else if (tag >= MethodWriter.CHOP_FRAME
-                                        && tag < MethodWriter.SAME_FRAME_EXTENDED)
-                                {
-                                    frameMode = Opcodes.F_CHOP;
-                                    frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED
-                                            - tag;
-                                    frameLocalCount -= frameLocalDiff;
-                                    frameStackCount = 0;
-                                } else if (tag == MethodWriter.SAME_FRAME_EXTENDED)
-                                {
-                                    frameMode = Opcodes.F_SAME;
-                                    frameStackCount = 0;
-                                } else if (tag < MethodWriter.FULL_FRAME) {
-                                    j = unzip ? frameLocalCount : 0;
-                                    for (k = tag
-                                            - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--)
-                                    {
-                                        stackMap = readFrameType(frameLocal,
-                                                j++,
-                                                stackMap,
-                                                c,
-                                                labels);
-                                    }
-                                    frameMode = Opcodes.F_APPEND;
-                                    frameLocalDiff = tag
-                                            - MethodWriter.SAME_FRAME_EXTENDED;
-                                    frameLocalCount += frameLocalDiff;
-                                    frameStackCount = 0;
-                                } else { // if (tag == FULL_FRAME) {
-                                    frameMode = Opcodes.F_FULL;
-                                    n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap);
-                                    stackMap += 2;
-                                    for (j = 0; n > 0; n--) {
-                                        stackMap = readFrameType(frameLocal,
-                                                j++,
-                                                stackMap,
-                                                c,
-                                                labels);
-                                    }
-                                    n = frameStackCount = readUnsignedShort(stackMap);
-                                    stackMap += 2;
-                                    for (j = 0; n > 0; n--) {
-                                        stackMap = readFrameType(frameStack,
-                                                j++,
-                                                stackMap,
-                                                c,
-                                                labels);
-                                    }
-                                }
-                            }
-                            frameOffset += delta + 1;
-                            readLabel(frameOffset, labels);
-
-                            --frameCount;
-                        } else {
-                            frameLocal = null;
-                        }
+            // visits the frame for this offset, if any
+            while (FRAMES && frame != null
+                    && (frame.offset == offset || frame.offset == -1)) {
+                // if there is a frame for this offset, makes the visitor visit
+                // it, and reads the next frame if there is one.
+                if (frame.offset != -1) {
+                    if (!zip || unzip) {
+                        mv.visitFrame(Opcodes.F_NEW, frame.localCount,
+                                frame.local, frame.stackCount, frame.stack);
+                    } else {
+                        mv.visitFrame(frame.mode, frame.localDiff, frame.local,
+                                frame.stackCount, frame.stack);
                     }
+                }
+                if (frameCount > 0) {
+                    stackMap = readFrame(stackMap, zip, unzip, labels, frame);
+                    --frameCount;
+                } else {
+                    frame = null;
+                }
+            }
 
-                    int opcode = b[v] & 0xFF;
-                    switch (ClassWriter.TYPE[opcode]) {
-                        case ClassWriter.NOARG_INSN:
-                            mv.visitInsn(opcode);
-                            v += 1;
-                            break;
-                        case ClassWriter.IMPLVAR_INSN:
-                            if (opcode > Opcodes.ISTORE) {
-                                opcode -= 59; // ISTORE_0
-                                mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
-                                        opcode & 0x3);
-                            } else {
-                                opcode -= 26; // ILOAD_0
-                                mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2),
-                                        opcode & 0x3);
-                            }
-                            v += 1;
-                            break;
-                        case ClassWriter.LABEL_INSN:
-                            mv.visitJumpInsn(opcode, labels[w
-                                    + readShort(v + 1)]);
-                            v += 3;
-                            break;
-                        case ClassWriter.LABELW_INSN:
-                            mv.visitJumpInsn(opcode - 33, labels[w
-                                    + readInt(v + 1)]);
-                            v += 5;
-                            break;
-                        case ClassWriter.WIDE_INSN:
-                            opcode = b[v + 1] & 0xFF;
-                            if (opcode == Opcodes.IINC) {
-                                mv.visitIincInsn(readUnsignedShort(v + 2),
-                                        readShort(v + 4));
-                                v += 6;
-                            } else {
-                                mv.visitVarInsn(opcode,
-                                        readUnsignedShort(v + 2));
-                                v += 4;
-                            }
-                            break;
-                        case ClassWriter.TABL_INSN:
-                            // skips 0 to 3 padding bytes
-                            v = v + 4 - (w & 3);
-                            // reads instruction
-                            label = w + readInt(v);
-                            int min = readInt(v + 4);
-                            int max = readInt(v + 8);
-                            v += 12;
-                            Label[] table = new Label[max - min + 1];
-                            for (j = 0; j < table.length; ++j) {
-                                table[j] = labels[w + readInt(v)];
-                                v += 4;
-                            }
-                            mv.visitTableSwitchInsn(min,
-                                    max,
-                                    labels[label],
-                                    table);
-                            break;
-                        case ClassWriter.LOOK_INSN:
-                            // skips 0 to 3 padding bytes
-                            v = v + 4 - (w & 3);
-                            // reads instruction
-                            label = w + readInt(v);
-                            j = readInt(v + 4);
-                            v += 8;
-                            int[] keys = new int[j];
-                            Label[] values = new Label[j];
-                            for (j = 0; j < keys.length; ++j) {
-                                keys[j] = readInt(v);
-                                values[j] = labels[w + readInt(v + 4)];
-                                v += 8;
-                            }
-                            mv.visitLookupSwitchInsn(labels[label],
-                                    keys,
-                                    values);
-                            break;
-                        case ClassWriter.VAR_INSN:
-                            mv.visitVarInsn(opcode, b[v + 1] & 0xFF);
-                            v += 2;
-                            break;
-                        case ClassWriter.SBYTE_INSN:
-                            mv.visitIntInsn(opcode, b[v + 1]);
-                            v += 2;
-                            break;
-                        case ClassWriter.SHORT_INSN:
-                            mv.visitIntInsn(opcode, readShort(v + 1));
-                            v += 3;
-                            break;
-                        case ClassWriter.LDC_INSN:
-                            mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c));
-                            v += 2;
-                            break;
-                        case ClassWriter.LDCW_INSN:
-                            mv.visitLdcInsn(readConst(readUnsignedShort(v + 1),
-                                    c));
-                            v += 3;
-                            break;
-                        case ClassWriter.FIELDORMETH_INSN:
-                        case ClassWriter.ITFMETH_INSN: {
-                            int cpIndex = items[readUnsignedShort(v + 1)];
-                            String iowner = readClass(cpIndex, c);
-                            cpIndex = items[readUnsignedShort(cpIndex + 2)];
-                            String iname = readUTF8(cpIndex, c);
-                            String idesc = readUTF8(cpIndex + 2, c);
-                            if (opcode < Opcodes.INVOKEVIRTUAL) {
-                                mv.visitFieldInsn(opcode, iowner, iname, idesc);
-                            } else {
-                                mv.visitMethodInsn(opcode, iowner, iname, idesc);
-                            }
-                            if (opcode == Opcodes.INVOKEINTERFACE) {
-                                v += 5;
-                            } else {
-                                v += 3;
-                            }
-                            break;
-                        }
-                        case ClassWriter.INDYMETH_INSN: {
-                            int cpIndex = items[readUnsignedShort(v + 1)];
-                            int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)];
-                            cpIndex = items[readUnsignedShort(cpIndex + 2)];
-                            String iname = readUTF8(cpIndex, c);
-                            String idesc = readUTF8(cpIndex + 2, c);
-
-                            int mhIndex = readUnsignedShort(bsmIndex);
-                            Handle bsm = (Handle) readConst(mhIndex, c);
-                            int bsmArgCount = readUnsignedShort(bsmIndex + 2);
-                            Object[] bsmArgs = new Object[bsmArgCount];
-                            bsmIndex += 4;
-                            for(int a = 0; a < bsmArgCount; a++) {
-                                int argIndex = readUnsignedShort(bsmIndex);
-                                bsmArgs[a] = readConst(argIndex, c);
-                                bsmIndex += 2;
-                            }
-                            mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
-
-                            v += 5;
-                            break;
-                        }
-                        case ClassWriter.TYPE_INSN:
-                            mv.visitTypeInsn(opcode, readClass(v + 1, c));
-                            v += 3;
-                            break;
-                        case ClassWriter.IINC_INSN:
-                            mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]);
-                            v += 3;
-                            break;
-                        // case MANA_INSN:
-                        default:
-                            mv.visitMultiANewArrayInsn(readClass(v + 1, c),
-                                    b[v + 3] & 0xFF);
-                            v += 4;
-                            break;
-                    }
+            // visits the instruction at this offset
+            int opcode = b[u] & 0xFF;
+            switch (ClassWriter.TYPE[opcode]) {
+            case ClassWriter.NOARG_INSN:
+                mv.visitInsn(opcode);
+                u += 1;
+                break;
+            case ClassWriter.IMPLVAR_INSN:
+                if (opcode > Opcodes.ISTORE) {
+                    opcode -= 59; // ISTORE_0
+                    mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
+                            opcode & 0x3);
+                } else {
+                    opcode -= 26; // ILOAD_0
+                    mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2), opcode & 0x3);
                 }
-                l = labels[codeEnd - codeStart];
-                if (l != null) {
-                    mv.visitLabel(l);
+                u += 1;
+                break;
+            case ClassWriter.LABEL_INSN:
+                mv.visitJumpInsn(opcode, labels[offset + readShort(u + 1)]);
+                u += 3;
+                break;
+            case ClassWriter.LABELW_INSN:
+                mv.visitJumpInsn(opcode - 33, labels[offset + readInt(u + 1)]);
+                u += 5;
+                break;
+            case ClassWriter.WIDE_INSN:
+                opcode = b[u + 1] & 0xFF;
+                if (opcode == Opcodes.IINC) {
+                    mv.visitIincInsn(readUnsignedShort(u + 2), readShort(u + 4));
+                    u += 6;
+                } else {
+                    mv.visitVarInsn(opcode, readUnsignedShort(u + 2));
+                    u += 4;
                 }
-                // visits the local variable tables
-                if (!skipDebug && varTable != 0) {
-                    int[] typeTable = null;
-                    if (varTypeTable != 0) {
-                        k = readUnsignedShort(varTypeTable) * 3;
-                        w = varTypeTable + 2;
-                        typeTable = new int[k];
-                        while (k > 0) {
-                            typeTable[--k] = w + 6; // signature
-                            typeTable[--k] = readUnsignedShort(w + 8); // index
-                            typeTable[--k] = readUnsignedShort(w); // start
-                            w += 10;
-                        }
-                    }
-                    k = readUnsignedShort(varTable);
-                    w = varTable + 2;
-                    for (; k > 0; --k) {
-                        int start = readUnsignedShort(w);
-                        int length = readUnsignedShort(w + 2);
-                        int index = readUnsignedShort(w + 8);
-                        String vsignature = null;
-                        if (typeTable != null) {
-                            for (int a = 0; a < typeTable.length; a += 3) {
-                                if (typeTable[a] == start
-                                        && typeTable[a + 1] == index)
-                                {
-                                    vsignature = readUTF8(typeTable[a + 2], c);
-                                    break;
-                                }
-                            }
-                        }
-                        mv.visitLocalVariable(readUTF8(w + 4, c),
-                                readUTF8(w + 6, c),
-                                vsignature,
-                                labels[start],
-                                labels[start + length],
-                                index);
-                        w += 10;
-                    }
+                break;
+            case ClassWriter.TABL_INSN: {
+                // skips 0 to 3 padding bytes
+                u = u + 4 - (offset & 3);
+                // reads instruction
+                int label = offset + readInt(u);
+                int min = readInt(u + 4);
+                int max = readInt(u + 8);
+                Label[] table = new Label[max - min + 1];
+                u += 12;
+                for (int i = 0; i < table.length; ++i) {
+                    table[i] = labels[offset + readInt(u)];
+                    u += 4;
+                }
+                mv.visitTableSwitchInsn(min, max, labels[label], table);
+                break;
+            }
+            case ClassWriter.LOOK_INSN: {
+                // skips 0 to 3 padding bytes
+                u = u + 4 - (offset & 3);
+                // reads instruction
+                int label = offset + readInt(u);
+                int len = readInt(u + 4);
+                int[] keys = new int[len];
+                Label[] values = new Label[len];
+                u += 8;
+                for (int i = 0; i < len; ++i) {
+                    keys[i] = readInt(u);
+                    values[i] = labels[offset + readInt(u + 4)];
+                    u += 8;
+                }
+                mv.visitLookupSwitchInsn(labels[label], keys, values);
+                break;
+            }
+            case ClassWriter.VAR_INSN:
+                mv.visitVarInsn(opcode, b[u + 1] & 0xFF);
+                u += 2;
+                break;
+            case ClassWriter.SBYTE_INSN:
+                mv.visitIntInsn(opcode, b[u + 1]);
+                u += 2;
+                break;
+            case ClassWriter.SHORT_INSN:
+                mv.visitIntInsn(opcode, readShort(u + 1));
+                u += 3;
+                break;
+            case ClassWriter.LDC_INSN:
+                mv.visitLdcInsn(readConst(b[u + 1] & 0xFF, c));
+                u += 2;
+                break;
+            case ClassWriter.LDCW_INSN:
+                mv.visitLdcInsn(readConst(readUnsignedShort(u + 1), c));
+                u += 3;
+                break;
+            case ClassWriter.FIELDORMETH_INSN:
+            case ClassWriter.ITFMETH_INSN: {
+                int cpIndex = items[readUnsignedShort(u + 1)];
+                String iowner = readClass(cpIndex, c);
+                cpIndex = items[readUnsignedShort(cpIndex + 2)];
+                String iname = readUTF8(cpIndex, c);
+                String idesc = readUTF8(cpIndex + 2, c);
+                if (opcode < Opcodes.INVOKEVIRTUAL) {
+                    mv.visitFieldInsn(opcode, iowner, iname, idesc);
+                } else {
+                    mv.visitMethodInsn(opcode, iowner, iname, idesc);
                 }
-                // visits the other attributes
-                while (cattrs != null) {
-                    attr = cattrs.next;
-                    cattrs.next = null;
-                    mv.visitAttribute(cattrs);
-                    cattrs = attr;
+                if (opcode == Opcodes.INVOKEINTERFACE) {
+                    u += 5;
+                } else {
+                    u += 3;
                 }
-                // visits the max stack and max locals values
-                mv.visitMaxs(maxStack, maxLocals);
+                break;
             }
+            case ClassWriter.INDYMETH_INSN: {
+                int cpIndex = items[readUnsignedShort(u + 1)];
+                int bsmIndex = context.bootstrapMethods[readUnsignedShort(cpIndex)];
+                Handle bsm = (Handle) readConst(readUnsignedShort(bsmIndex), c);
+                int bsmArgCount = readUnsignedShort(bsmIndex + 2);
+                Object[] bsmArgs = new Object[bsmArgCount];
+                bsmIndex += 4;
+                for (int i = 0; i < bsmArgCount; i++) {
+                    bsmArgs[i] = readConst(readUnsignedShort(bsmIndex), c);
+                    bsmIndex += 2;
+                }
+                cpIndex = items[readUnsignedShort(cpIndex + 2)];
+                String iname = readUTF8(cpIndex, c);
+                String idesc = readUTF8(cpIndex + 2, c);
+                mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
+                u += 5;
+                break;
+            }
+            case ClassWriter.TYPE_INSN:
+                mv.visitTypeInsn(opcode, readClass(u + 1, c));
+                u += 3;
+                break;
+            case ClassWriter.IINC_INSN:
+                mv.visitIincInsn(b[u + 1] & 0xFF, b[u + 2]);
+                u += 3;
+                break;
+            // case MANA_INSN:
+            default:
+                mv.visitMultiANewArrayInsn(readClass(u + 1, c), b[u + 3] & 0xFF);
+                u += 4;
+                break;
+            }
+        }
+        if (labels[codeLength] != null) {
+            mv.visitLabel(labels[codeLength]);
+        }
 
-            if (mv != null) {
-                mv.visitEnd();
+        // visits the local variable tables
+        if ((context.flags & SKIP_DEBUG) == 0 && varTable != 0) {
+            int[] typeTable = null;
+            if (varTypeTable != 0) {
+                u = varTypeTable + 2;
+                typeTable = new int[readUnsignedShort(varTypeTable) * 3];
+                for (int i = typeTable.length; i > 0;) {
+                    typeTable[--i] = u + 6; // signature
+                    typeTable[--i] = readUnsignedShort(u + 8); // index
+                    typeTable[--i] = readUnsignedShort(u); // start
+                    u += 10;
+                }
+            }
+            u = varTable + 2;
+            for (int i = readUnsignedShort(varTable); i > 0; --i) {
+                int start = readUnsignedShort(u);
+                int length = readUnsignedShort(u + 2);
+                int index = readUnsignedShort(u + 8);
+                String vsignature = null;
+                if (typeTable != null) {
+                    for (int j = 0; j < typeTable.length; j += 3) {
+                        if (typeTable[j] == start && typeTable[j + 1] == index) {
+                            vsignature = readUTF8(typeTable[j + 2], c);
+                            break;
+                        }
+                    }
+                }
+                mv.visitLocalVariable(readUTF8(u + 4, c), readUTF8(u + 6, c),
+                        vsignature, labels[start], labels[start + length],
+                        index);
+                u += 10;
             }
         }
 
-        // visits the end of the class
-        classVisitor.visitEnd();
+        // visits the code attributes
+        while (attributes != null) {
+            Attribute attr = attributes.next;
+            attributes.next = null;
+            mv.visitAttribute(attributes);
+            attributes = attr;
+        }
+
+        // visits the max stack and max locals values
+        mv.visitMaxs(maxStack, maxLocals);
     }
 
     /**
      * Reads parameter annotations and makes the given visitor visit them.
      *
-     * @param v start offset in {@link #b b} of the annotations to be read.
-     * @param desc the method descriptor.
-     * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
-     *        {@link #readClass(int,char[]) readClass} or
-     *        {@link #readConst readConst}.
-     * @param visible <tt>true</tt> if the annotations to be read are visible
-     *        at runtime.
-     * @param mv the visitor that must visit the annotations.
+     * @param v
+     *            start offset in {@link #b b} of the annotations to be read.
+     * @param desc
+     *            the method descriptor.
+     * @param buf
+     *            buffer to be used to call {@link #readUTF8 readUTF8},
+     *            {@link #readClass(int,char[]) readClass} or {@link #readConst
+     *            readConst}.
+     * @param visible
+     *            <tt>true</tt> if the annotations to be read are visible at
+     *            runtime.
+     * @param mv
+     *            the visitor that must visit the annotations.
      */
-    private void readParameterAnnotations(
-        int v,
-        final String desc,
-        final char[] buf,
-        final boolean visible,
-        final MethodVisitor mv)
-    {
+    private void readParameterAnnotations(int v, final String desc,
+            final char[] buf, final boolean visible, final MethodVisitor mv) {
         int i;
         int n = b[v++] & 0xFF;
         // workaround for a bug in javac (javac compiler generates a parameter
@@ -1679,21 +1458,22 @@ public class ClassReader {
     /**
      * Reads the values of an annotation and makes the given visitor visit them.
      *
-     * @param v the start offset in {@link #b b} of the values to be read
-     *        (including the unsigned short that gives the number of values).
-     * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
-     *        {@link #readClass(int,char[]) readClass} or
-     *        {@link #readConst readConst}.
-     * @param named if the annotation values are named or not.
-     * @param av the visitor that must visit the values.
+     * @param v
+     *            the start offset in {@link #b b} of the values to be read
+     *            (including the unsigned short that gives the number of
+     *            values).
+     * @param buf
+     *            buffer to be used to call {@link #readUTF8 readUTF8},
+     *            {@link #readClass(int,char[]) readClass} or {@link #readConst
+     *            readConst}.
+     * @param named
+     *            if the annotation values are named or not.
+     * @param av
+     *            the visitor that must visit the values.
      * @return the end offset of the annotation values.
      */
-    private int readAnnotationValues(
-        int v,
-        final char[] buf,
-        final boolean named,
-        final AnnotationVisitor av)
-    {
+    private int readAnnotationValues(int v, final char[] buf,
+            final boolean named, final AnnotationVisitor av) {
         int i = readUnsignedShort(v);
         v += 2;
         if (named) {
@@ -1714,210 +1494,371 @@ public class ClassReader {
     /**
      * Reads a value of an annotation and makes the given visitor visit it.
      *
-     * @param v the start offset in {@link #b b} of the value to be read (<i>not
-     *        including the value name constant pool index</i>).
-     * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
-     *        {@link #readClass(int,char[]) readClass} or
-     *        {@link #readConst readConst}.
-     * @param name the name of the value to be read.
-     * @param av the visitor that must visit the value.
+     * @param v
+     *            the start offset in {@link #b b} of the value to be read
+     *            (<i>not including the value name constant pool index</i>).
+     * @param buf
+     *            buffer to be used to call {@link #readUTF8 readUTF8},
+     *            {@link #readClass(int,char[]) readClass} or {@link #readConst
+     *            readConst}.
+     * @param name
+     *            the name of the value to be read.
+     * @param av
+     *            the visitor that must visit the value.
      * @return the end offset of the annotation value.
      */
-    private int readAnnotationValue(
-        int v,
-        final char[] buf,
-        final String name,
-        final AnnotationVisitor av)
-    {
+    private int readAnnotationValue(int v, final char[] buf, final String name,
+            final AnnotationVisitor av) {
         int i;
         if (av == null) {
             switch (b[v] & 0xFF) {
-                case 'e': // enum_const_value
-                    return v + 5;
-                case '@': // annotation_value
-                    return readAnnotationValues(v + 3, buf, true, null);
-                case '[': // array_value
-                    return readAnnotationValues(v + 1, buf, false, null);
-                default:
-                    return v + 3;
+            case 'e': // enum_const_value
+                return v + 5;
+            case '@': // annotation_value
+                return readAnnotationValues(v + 3, buf, true, null);
+            case '[': // array_value
+                return readAnnotationValues(v + 1, buf, false, null);
+            default:
+                return v + 3;
             }
         }
         switch (b[v++] & 0xFF) {
-            case 'I': // pointer to CONSTANT_Integer
-            case 'J': // pointer to CONSTANT_Long
-            case 'F': // pointer to CONSTANT_Float
-            case 'D': // pointer to CONSTANT_Double
-                av.visit(name, readConst(readUnsignedShort(v), buf));
-                v += 2;
-                break;
-            case 'B': // pointer to CONSTANT_Byte
-                av.visit(name,
-                        new Byte((byte) readInt(items[readUnsignedShort(v)])));
-                v += 2;
-                break;
-            case 'Z': // pointer to CONSTANT_Boolean
-                av.visit(name, readInt(items[readUnsignedShort(v)]) == 0
-                        ? Boolean.FALSE
-                        : Boolean.TRUE);
-                v += 2;
-                break;
-            case 'S': // pointer to CONSTANT_Short
-                av.visit(name,
-                        new Short((short) readInt(items[readUnsignedShort(v)])));
-                v += 2;
+        case 'I': // pointer to CONSTANT_Integer
+        case 'J': // pointer to CONSTANT_Long
+        case 'F': // pointer to CONSTANT_Float
+        case 'D': // pointer to CONSTANT_Double
+            av.visit(name, readConst(readUnsignedShort(v), buf));
+            v += 2;
+            break;
+        case 'B': // pointer to CONSTANT_Byte
+            av.visit(name,
+                    new Byte((byte) readInt(items[readUnsignedShort(v)])));
+            v += 2;
+            break;
+        case 'Z': // pointer to CONSTANT_Boolean
+            av.visit(name,
+                    readInt(items[readUnsignedShort(v)]) == 0 ? Boolean.FALSE
+                            : Boolean.TRUE);
+            v += 2;
+            break;
+        case 'S': // pointer to CONSTANT_Short
+            av.visit(name, new Short(
+                    (short) readInt(items[readUnsignedShort(v)])));
+            v += 2;
+            break;
+        case 'C': // pointer to CONSTANT_Char
+            av.visit(name, new Character(
+                    (char) readInt(items[readUnsignedShort(v)])));
+            v += 2;
+            break;
+        case 's': // pointer to CONSTANT_Utf8
+            av.visit(name, readUTF8(v, buf));
+            v += 2;
+            break;
+        case 'e': // enum_const_value
+            av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
+            v += 4;
+            break;
+        case 'c': // class_info
+            av.visit(name, Type.getType(readUTF8(v, buf)));
+            v += 2;
+            break;
+        case '@': // annotation_value
+            v = readAnnotationValues(v + 2, buf, true,
+                    av.visitAnnotation(name, readUTF8(v, buf)));
+            break;
+        case '[': // array_value
+            int size = readUnsignedShort(v);
+            v += 2;
+            if (size == 0) {
+                return readAnnotationValues(v - 2, buf, false,
+                        av.visitArray(name));
+            }
+            switch (this.b[v++] & 0xFF) {
+            case 'B':
+                byte[] bv = new byte[size];
+                for (i = 0; i < size; i++) {
+                    bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
+                    v += 3;
+                }
+                av.visit(name, bv);
+                --v;
                 break;
-            case 'C': // pointer to CONSTANT_Char
-                av.visit(name,
-                        new Character((char) readInt(items[readUnsignedShort(v)])));
-                v += 2;
+            case 'Z':
+                boolean[] zv = new boolean[size];
+                for (i = 0; i < size; i++) {
+                    zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
+                    v += 3;
+                }
+                av.visit(name, zv);
+                --v;
                 break;
-            case 's': // pointer to CONSTANT_Utf8
-                av.visit(name, readUTF8(v, buf));
-                v += 2;
+            case 'S':
+                short[] sv = new short[size];
+                for (i = 0; i < size; i++) {
+                    sv[i] = (short) readInt(items[readUnsignedShort(v)]);
+                    v += 3;
+                }
+                av.visit(name, sv);
+                --v;
                 break;
-            case 'e': // enum_const_value
-                av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
-                v += 4;
+            case 'C':
+                char[] cv = new char[size];
+                for (i = 0; i < size; i++) {
+                    cv[i] = (char) readInt(items[readUnsignedShort(v)]);
+                    v += 3;
+                }
+                av.visit(name, cv);
+                --v;
                 break;
-            case 'c': // class_info
-                av.visit(name, Type.getType(readUTF8(v, buf)));
-                v += 2;
+            case 'I':
+                int[] iv = new int[size];
+                for (i = 0; i < size; i++) {
+                    iv[i] = readInt(items[readUnsignedShort(v)]);
+                    v += 3;
+                }
+                av.visit(name, iv);
+                --v;
                 break;
-            case '@': // annotation_value
-                v = readAnnotationValues(v + 2,
-                        buf,
-                        true,
-                        av.visitAnnotation(name, readUTF8(v, buf)));
+            case 'J':
+                long[] lv = new long[size];
+                for (i = 0; i < size; i++) {
+                    lv[i] = readLong(items[readUnsignedShort(v)]);
+                    v += 3;
+                }
+                av.visit(name, lv);
+                --v;
                 break;
-            case '[': // array_value
-                int size = readUnsignedShort(v);
-                v += 2;
-                if (size == 0) {
-                    return readAnnotationValues(v - 2,
-                            buf,
-                            false,
-                            av.visitArray(name));
+            case 'F':
+                float[] fv = new float[size];
+                for (i = 0; i < size; i++) {
+                    fv[i] = Float
+                            .intBitsToFloat(readInt(items[readUnsignedShort(v)]));
+                    v += 3;
                 }
-                switch (this.b[v++] & 0xFF) {
-                    case 'B':
-                        byte[] bv = new byte[size];
-                        for (i = 0; i < size; i++) {
-                            bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
-                            v += 3;
-                        }
-                        av.visit(name, bv);
-                        --v;
-                        break;
-                    case 'Z':
-                        boolean[] zv = new boolean[size];
-                        for (i = 0; i < size; i++) {
-                            zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
-                            v += 3;
-                        }
-                        av.visit(name, zv);
-                        --v;
-                        break;
-                    case 'S':
-                        short[] sv = new short[size];
-                        for (i = 0; i < size; i++) {
-                            sv[i] = (short) readInt(items[readUnsignedShort(v)]);
-                            v += 3;
-                        }
-                        av.visit(name, sv);
-                        --v;
-                        break;
-                    case 'C':
-                        char[] cv = new char[size];
-                        for (i = 0; i < size; i++) {
-                            cv[i] = (char) readInt(items[readUnsignedShort(v)]);
-                            v += 3;
-                        }
-                        av.visit(name, cv);
-                        --v;
-                        break;
-                    case 'I':
-                        int[] iv = new int[size];
-                        for (i = 0; i < size; i++) {
-                            iv[i] = readInt(items[readUnsignedShort(v)]);
-                            v += 3;
-                        }
-                        av.visit(name, iv);
-                        --v;
-                        break;
-                    case 'J':
-                        long[] lv = new long[size];
-                        for (i = 0; i < size; i++) {
-                            lv[i] = readLong(items[readUnsignedShort(v)]);
-                            v += 3;
-                        }
-                        av.visit(name, lv);
-                        --v;
-                        break;
-                    case 'F':
-                        float[] fv = new float[size];
-                        for (i = 0; i < size; i++) {
-                            fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)]));
-                            v += 3;
-                        }
-                        av.visit(name, fv);
-                        --v;
-                        break;
-                    case 'D':
-                        double[] dv = new double[size];
-                        for (i = 0; i < size; i++) {
-                            dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)]));
-                            v += 3;
-                        }
-                        av.visit(name, dv);
-                        --v;
-                        break;
-                    default:
-                        v = readAnnotationValues(v - 3,
-                                buf,
-                                false,
-                                av.visitArray(name));
+                av.visit(name, fv);
+                --v;
+                break;
+            case 'D':
+                double[] dv = new double[size];
+                for (i = 0; i < size; i++) {
+                    dv[i] = Double
+                            .longBitsToDouble(readLong(items[readUnsignedShort(v)]));
+                    v += 3;
                 }
+                av.visit(name, dv);
+                --v;
+                break;
+            default:
+                v = readAnnotationValues(v - 3, buf, false, av.visitArray(name));
+            }
         }
         return v;
     }
 
-    private int readFrameType(
-        final Object[] frame,
-        final int index,
-        int v,
-        final char[] buf,
-        final Label[] labels)
-    {
-        int type = b[v++] & 0xFF;
-        switch (type) {
-            case 0:
-                frame[index] = Opcodes.TOP;
-                break;
-            case 1:
-                frame[index] = Opcodes.INTEGER;
-                break;
-            case 2:
-                frame[index] = Opcodes.FLOAT;
+    /**
+     * Computes the implicit frame of the method currently being parsed (as
+     * defined in the given {@link Context}) and stores it in the given context.
+     *
+     * @param frame
+     *            information about the class being parsed.
+     */
+    private void getImplicitFrame(final Context frame) {
+        String desc = frame.desc;
+        Object[] locals = frame.local;
+        int local = 0;
+        if ((frame.access & Opcodes.ACC_STATIC) == 0) {
+            if ("<init>".equals(frame.name)) {
+                locals[local++] = Opcodes.UNINITIALIZED_THIS;
+            } else {
+                locals[local++] = readClass(header + 2, frame.buffer);
+            }
+        }
+        int i = 1;
+        loop: while (true) {
+            int j = i;
+            switch (desc.charAt(i++)) {
+            case 'Z':
+            case 'C':
+            case 'B':
+            case 'S':
+            case 'I':
+                locals[local++] = Opcodes.INTEGER;
                 break;
-            case 3:
-                frame[index] = Opcodes.DOUBLE;
+            case 'F':
+                locals[local++] = Opcodes.FLOAT;
                 break;
-            case 4:
-                frame[index] = Opcodes.LONG;
+            case 'J':
+                locals[local++] = Opcodes.LONG;
                 break;
-            case 5:
-                frame[index] = Opcodes.NULL;
+            case 'D':
+                locals[local++] = Opcodes.DOUBLE;
                 break;
-            case 6:
-                frame[index] = Opcodes.UNINITIALIZED_THIS;
+            case '[':
+                while (desc.charAt(i) == '[') {
+                    ++i;
+                }
+                if (desc.charAt(i) == 'L') {
+                    ++i;
+                    while (desc.charAt(i) != ';') {
+                        ++i;
+                    }
+                }
+                locals[local++] = desc.substring(j, ++i);
                 break;
-            case 7: // Object
-                frame[index] = readClass(v, buf);
-                v += 2;
+            case 'L':
+                while (desc.charAt(i) != ';') {
+                    ++i;
+                }
+                locals[local++] = desc.substring(j + 1, i++);
                 break;
-            default: // Uninitialized
-                frame[index] = readLabel(readUnsignedShort(v), labels);
-                v += 2;
+            default:
+                break loop;
+            }
+        }
+        frame.localCount = local;
+    }
+
+    /**
+     * Reads a stack map frame and stores the result in the given
+     * {@link Context} object.
+     *
+     * @param stackMap
+     *            the start offset of a stack map frame in the class file.
+     * @param zip
+     *            if the stack map frame at stackMap is compressed or not.
+     * @param unzip
+     *            if the stack map frame must be uncompressed.
+     * @param labels
+     *            the labels of the method currently being parsed, indexed by
+     *            their offset. A new label for the parsed stack map frame is
+     *            stored in this array if it does not already exist.
+     * @param frame
+     *            where the parsed stack map frame must be stored.
+     * @return the offset of the first byte following the parsed frame.
+     */
+    private int readFrame(int stackMap, boolean zip, boolean unzip,
+            Label[] labels, Context frame) {
+        char[] c = frame.buffer;
+        int tag;
+        int delta;
+        if (zip) {
+            tag = b[stackMap++] & 0xFF;
+        } else {
+            tag = MethodWriter.FULL_FRAME;
+            frame.offset = -1;
+        }
+        frame.localDiff = 0;
+        if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME) {
+            delta = tag;
+            frame.mode = Opcodes.F_SAME;
+            frame.stackCount = 0;
+        } else if (tag < MethodWriter.RESERVED) {
+            delta = tag - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
+            stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+            frame.mode = Opcodes.F_SAME1;
+            frame.stackCount = 1;
+        } else {
+            delta = readUnsignedShort(stackMap);
+            stackMap += 2;
+            if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) {
+                stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+                frame.mode = Opcodes.F_SAME1;
+                frame.stackCount = 1;
+            } else if (tag >= MethodWriter.CHOP_FRAME
+                    && tag < MethodWriter.SAME_FRAME_EXTENDED) {
+                frame.mode = Opcodes.F_CHOP;
+                frame.localDiff = MethodWriter.SAME_FRAME_EXTENDED - tag;
+                frame.localCount -= frame.localDiff;
+                frame.stackCount = 0;
+            } else if (tag == MethodWriter.SAME_FRAME_EXTENDED) {
+                frame.mode = Opcodes.F_SAME;
+                frame.stackCount = 0;
+            } else if (tag < MethodWriter.FULL_FRAME) {
+                int local = unzip ? frame.localCount : 0;
+                for (int i = tag - MethodWriter.SAME_FRAME_EXTENDED; i > 0; i--) {
+                    stackMap = readFrameType(frame.local, local++, stackMap, c,
+                            labels);
+                }
+                frame.mode = Opcodes.F_APPEND;
+                frame.localDiff = tag - MethodWriter.SAME_FRAME_EXTENDED;
+                frame.localCount += frame.localDiff;
+                frame.stackCount = 0;
+            } else { // if (tag == FULL_FRAME) {
+                frame.mode = Opcodes.F_FULL;
+                int n = readUnsignedShort(stackMap);
+                stackMap += 2;
+                frame.localDiff = n;
+                frame.localCount = n;
+                for (int local = 0; n > 0; n--) {
+                    stackMap = readFrameType(frame.local, local++, stackMap, c,
+                            labels);
+                }
+                n = readUnsignedShort(stackMap);
+                stackMap += 2;
+                frame.stackCount = n;
+                for (int stack = 0; n > 0; n--) {
+                    stackMap = readFrameType(frame.stack, stack++, stackMap, c,
+                            labels);
+                }
+            }
+        }
+        frame.offset += delta + 1;
+        readLabel(frame.offset, labels);
+        return stackMap;
+    }
+
+    /**
+     * Reads a stack map frame type and stores it at the given index in the
+     * given array.
+     *
+     * @param frame
+     *            the array where the parsed type must be stored.
+     * @param index
+     *            the index in 'frame' where the parsed type must be stored.
+     * @param v
+     *            the start offset of the stack map frame type to read.
+     * @param buf
+     *            a buffer to read strings.
+     * @param labels
+     *            the labels of the method currently being parsed, indexed by
+     *            their offset. If the parsed type is an Uninitialized type, a
+     *            new label for the corresponding NEW instruction is stored in
+     *            this array if it does not already exist.
+     * @return the offset of the first byte after the parsed type.
+     */
+    private int readFrameType(final Object[] frame, final int index, int v,
+            final char[] buf, final Label[] labels) {
+        int type = b[v++] & 0xFF;
+        switch (type) {
+        case 0:
+            frame[index] = Opcodes.TOP;
+            break;
+        case 1:
+            frame[index] = Opcodes.INTEGER;
+            break;
+        case 2:
+            frame[index] = Opcodes.FLOAT;
+            break;
+        case 3:
+            frame[index] = Opcodes.DOUBLE;
+            break;
+        case 4:
+            frame[index] = Opcodes.LONG;
+            break;
+        case 5:
+            frame[index] = Opcodes.NULL;
+            break;
+        case 6:
+            frame[index] = Opcodes.UNINITIALIZED_THIS;
+            break;
+        case 7: // Object
+            frame[index] = readClass(v, buf);
+            v += 2;
+            break;
+        default: // Uninitialized
+            frame[index] = readLabel(readUnsignedShort(v), labels);
+            v += 2;
         }
         return v;
     }
@@ -1927,10 +1868,12 @@ public class ClassReader {
      * implementation of this method creates a label for the given offset if it
      * has not been already created.
      *
-     * @param offset a bytecode offset in a method.
-     * @param labels the already created labels, indexed by their offset. If a
-     *        label already exists for offset this method must not create a new
-     *        one. Otherwise it must store the new label in this array.
+     * @param offset
+     *            a bytecode offset in a method.
+     * @param labels
+     *            the already created labels, indexed by their offset. If a
+     *            label already exists for offset this method must not create a
+     *            new one. Otherwise it must store the new label in this array.
      * @return a non null Label, which must be equal to labels[offset].
      */
     protected Label readLabel(int offset, Label[] labels) {
@@ -1941,39 +1884,67 @@ public class ClassReader {
     }
 
     /**
+     * Returns the start index of the attribute_info structure of this class.
+     *
+     * @return the start index of the attribute_info structure of this class.
+     */
+    private int getAttributes() {
+        // skips the header
+        int u = header + 8 + readUnsignedShort(header + 6) * 2;
+        // skips fields and methods
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+                u += 6 + readInt(u + 12);
+            }
+            u += 8;
+        }
+        u += 2;
+        for (int i = readUnsignedShort(u); i > 0; --i) {
+            for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+                u += 6 + readInt(u + 12);
+            }
+            u += 8;
+        }
+        // the attribute_info structure starts just after the methods
+        return u + 2;
+    }
+
+    /**
      * Reads an attribute in {@link #b b}.
      *
-     * @param attrs prototypes of the attributes that must be parsed during the
-     *        visit of the class. Any attribute whose type is not equal to the
-     *        type of one the prototypes is ignored (i.e. an empty
-     *        {@link Attribute} instance is returned).
-     * @param type the type of the attribute.
-     * @param off index of the first byte of the attribute's content in
-     *        {@link #b b}. The 6 attribute header bytes, containing the type
-     *        and the length of the attribute, are not taken into account here
-     *        (they have already been read).
-     * @param len the length of the attribute's content.
-     * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
-     *        {@link #readClass(int,char[]) readClass} or
-     *        {@link #readConst readConst}.
-     * @param codeOff index of the first byte of code's attribute content in
-     *        {@link #b b}, or -1 if the attribute to be read is not a code
-     *        attribute. The 6 attribute header bytes, containing the type and
-     *        the length of the attribute, are not taken into account here.
-     * @param labels the labels of the method's code, or <tt>null</tt> if the
-     *        attribute to be read is not a code attribute.
+     * @param attrs
+     *            prototypes of the attributes that must be parsed during the
+     *            visit of the class. Any attribute whose type is not equal to
+     *            the type of one the prototypes is ignored (i.e. an empty
+     *            {@link Attribute} instance is returned).
+     * @param type
+     *            the type of the attribute.
+     * @param off
+     *            index of the first byte of the attribute's content in
+     *            {@link #b b}. The 6 attribute header bytes, containing the
+     *            type and the length of the attribute, are not taken into
+     *            account here (they have already been read).
+     * @param len
+     *            the length of the attribute's content.
+     * @param buf
+     *            buffer to be used to call {@link #readUTF8 readUTF8},
+     *            {@link #readClass(int,char[]) readClass} or {@link #readConst
+     *            readConst}.
+     * @param codeOff
+     *            index of the first byte of code's attribute content in
+     *            {@link #b b}, or -1 if the attribute to be read is not a code
+     *            attribute. The 6 attribute header bytes, containing the type
+     *            and the length of the attribute, are not taken into account
+     *            here.
+     * @param labels
+     *            the labels of the method's code, or <tt>null</tt> if the
+     *            attribute to be read is not a code attribute.
      * @return the attribute that has been read, or <tt>null</tt> to skip this
      *         attribute.
      */
-    private Attribute readAttribute(
-        final Attribute[] attrs,
-        final String type,
-        final int off,
-        final int len,
-        final char[] buf,
-        final int codeOff,
-        final Label[] labels)
-    {
+    private Attribute readAttribute(final Attribute[] attrs, final String type,
+            final int off, final int len, final char[] buf, final int codeOff,
+            final Label[] labels) {
         for (int i = 0; i < attrs.length; ++i) {
             if (attrs[i].type.equals(type)) {
                 return attrs[i].read(this, off, len, buf, codeOff, labels);
@@ -1987,9 +1958,9 @@ public class ClassReader {
     // ------------------------------------------------------------------------
 
     /**
-     *  Returns the number of constant pool items in {@link #b b}.
+     * Returns the number of constant pool items in {@link #b b}.
      *
-     *  @return the number of constant pool items in {@link #b b}.
+     * @return the number of constant pool items in {@link #b b}.
      */
     public int getItemCount() {
         return items.length;
@@ -2000,7 +1971,8 @@ public class ClassReader {
      * one. <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param item the index a constant pool item.
+     * @param item
+     *            the index a constant pool item.
      * @return the start index of the constant pool item in {@link #b b}, plus
      *         one.
      */
@@ -2024,7 +1996,8 @@ public class ClassReader {
      * {@link Attribute} sub classes, and is normally not needed by class
      * generators or adapters.</i>
      *
-     * @param index the start index of the value to be read in {@link #b b}.
+     * @param index
+     *            the start index of the value to be read in {@link #b b}.
      * @return the read value.
      */
     public int readByte(final int index) {
@@ -2032,11 +2005,12 @@ public class ClassReader {
     }
 
     /**
-     * Reads an unsigned short value in {@link #b b}. <i>This method is
-     * intended for {@link Attribute} sub classes, and is normally not needed by
-     * class generators or adapters.</i>
+     * Reads an unsigned short value in {@link #b b}. <i>This method is intended
+     * for {@link Attribute} sub classes, and is normally not needed by class
+     * generators or adapters.</i>
      *
-     * @param index the start index of the value to be read in {@link #b b}.
+     * @param index
+     *            the start index of the value to be read in {@link #b b}.
      * @return the read value.
      */
     public int readUnsignedShort(final int index) {
@@ -2049,7 +2023,8 @@ public class ClassReader {
      * for {@link Attribute} sub classes, and is normally not needed by class
      * generators or adapters.</i>
      *
-     * @param index the start index of the value to be read in {@link #b b}.
+     * @param index
+     *            the start index of the value to be read in {@link #b b}.
      * @return the read value.
      */
     public short readShort(final int index) {
@@ -2062,7 +2037,8 @@ public class ClassReader {
      * {@link Attribute} sub classes, and is normally not needed by class
      * generators or adapters.</i>
      *
-     * @param index the start index of the value to be read in {@link #b b}.
+     * @param index
+     *            the start index of the value to be read in {@link #b b}.
      * @return the read value.
      */
     public int readInt(final int index) {
@@ -2072,11 +2048,12 @@ public class ClassReader {
     }
 
     /**
-     * Reads a signed long value in {@link #b b}. <i>This method is intended
-     * for {@link Attribute} sub classes, and is normally not needed by class
+     * Reads a signed long value in {@link #b b}. <i>This method is intended for
+     * {@link Attribute} sub classes, and is normally not needed by class
      * generators or adapters.</i>
      *
-     * @param index the start index of the value to be read in {@link #b b}.
+     * @param index
+     *            the start index of the value to be read in {@link #b b}.
      * @return the read value.
      */
     public long readLong(final int index) {
@@ -2090,14 +2067,19 @@ public class ClassReader {
      * is intended for {@link Attribute} sub classes, and is normally not needed
      * by class generators or adapters.</i>
      *
-     * @param index the start index of an unsigned short value in {@link #b b},
-     *        whose value is the index of an UTF8 constant pool item.
-     * @param buf buffer to be used to read the item. This buffer must be
-     *        sufficiently large. It is not automatically resized.
+     * @param index
+     *            the start index of an unsigned short value in {@link #b b},
+     *            whose value is the index of an UTF8 constant pool item.
+     * @param buf
+     *            buffer to be used to read the item. This buffer must be
+     *            sufficiently large. It is not automatically resized.
      * @return the String corresponding to the specified UTF8 item.
      */
     public String readUTF8(int index, final char[] buf) {
         int item = readUnsignedShort(index);
+        if (index == 0 || item == 0) {
+            return null;
+        }
         String s = strings[item];
         if (s != null) {
             return s;
@@ -2109,10 +2091,13 @@ public class ClassReader {
     /**
      * Reads UTF8 string in {@link #b b}.
      *
-     * @param index start offset of the UTF8 string to be read.
-     * @param utfLen length of the UTF8 string to be read.
-     * @param buf buffer to be used to read the string. This buffer must be
-     *        sufficiently large. It is not automatically resized.
+     * @param index
+     *            start offset of the UTF8 string to be read.
+     * @param utfLen
+     *            length of the UTF8 string to be read.
+     * @param buf
+     *            buffer to be used to read the string. This buffer must be
+     *            sufficiently large. It is not automatically resized.
      * @return the String corresponding to the specified UTF8 string.
      */
     private String readUTF(int index, final int utfLen, final char[] buf) {
@@ -2125,28 +2110,28 @@ public class ClassReader {
         while (index < endIndex) {
             c = b[index++];
             switch (st) {
-                case 0:
-                    c = c & 0xFF;
-                    if (c < 0x80) {  // 0xxxxxxx
-                        buf[strLen++] = (char) c;
-                    } else if (c < 0xE0 && c > 0xBF) {  // 110x xxxx 10xx xxxx
-                        cc = (char) (c & 0x1F);
-                        st = 1;
-                    } else {  // 1110 xxxx 10xx xxxx 10xx xxxx
-                        cc = (char) (c & 0x0F);
-                        st = 2;
-                    }
-                    break;
+            case 0:
+                c = c & 0xFF;
+                if (c < 0x80) { // 0xxxxxxx
+                    buf[strLen++] = (char) c;
+                } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
+                    cc = (char) (c & 0x1F);
+                    st = 1;
+                } else { // 1110 xxxx 10xx xxxx 10xx xxxx
+                    cc = (char) (c & 0x0F);
+                    st = 2;
+                }
+                break;
 
-                case 1:  // byte 2 of 2-byte char or byte 3 of 3-byte char
-                    buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
-                    st = 0;
-                    break;
+            case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
+                buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
+                st = 0;
+                break;
 
-                case 2:  // byte 2 of 3-byte char
-                    cc = (char) ((cc << 6) | (c & 0x3F));
-                    st = 1;
-                    break;
+            case 2: // byte 2 of 3-byte char
+                cc = (char) ((cc << 6) | (c & 0x3F));
+                st = 1;
+                break;
             }
         }
         return new String(buf, 0, strLen);
@@ -2157,10 +2142,12 @@ public class ClassReader {
      * intended for {@link Attribute} sub classes, and is normally not needed by
      * class generators or adapters.</i>
      *
-     * @param index the start index of an unsigned short value in {@link #b b},
-     *        whose value is the index of a class constant pool item.
-     * @param buf buffer to be used to read the item. This buffer must be
-     *        sufficiently large. It is not automatically resized.
+     * @param index
+     *            the start index of an unsigned short value in {@link #b b},
+     *            whose value is the index of a class constant pool item.
+     * @param buf
+     *            buffer to be used to read the item. This buffer must be
+     *            sufficiently large. It is not automatically resized.
      * @return the String corresponding to the specified class item.
      */
     public String readClass(final int index, final char[] buf) {
@@ -2175,9 +2162,11 @@ public class ClassReader {
      * method is intended for {@link Attribute} sub classes, and is normally not
      * needed by class generators or adapters.</i>
      *
-     * @param item the index of a constant pool item.
-     * @param buf buffer to be used to read the item. This buffer must be
-     *        sufficiently large. It is not automatically resized.
+     * @param item
+     *            the index of a constant pool item.
+     * @param buf
+     *            buffer to be used to read the item. This buffer must be
+     *            sufficiently large. It is not automatically resized.
      * @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double},
      *         {@link String}, {@link Type} or {@link Handle} corresponding to
      *         the given constant pool item.
@@ -2185,32 +2174,29 @@ public class ClassReader {
     public Object readConst(final int item, final char[] buf) {
         int index = items[item];
         switch (b[index - 1]) {
-            case ClassWriter.INT:
-                return new Integer(readInt(index));
-            case ClassWriter.FLOAT:
-                return new Float(Float.intBitsToFloat(readInt(index)));
-            case ClassWriter.LONG:
-                return new Long(readLong(index));
-            case ClassWriter.DOUBLE:
-                return new Double(Double.longBitsToDouble(readLong(index)));
-            case ClassWriter.CLASS:
-                return Type.getObjectType(readUTF8(index, buf));
-            case ClassWriter.STR:
-                return readUTF8(index, buf);
-            case ClassWriter.MTYPE:
-                return Type.getMethodType(readUTF8(index, buf));
-
-            //case ClassWriter.HANDLE_BASE + [1..9]:
-            default: {
-                int tag = readByte(index);
-                int[] items = this.items;
-                int cpIndex = items[readUnsignedShort(index + 1)];
-                String owner = readClass(cpIndex, buf);
-                cpIndex = items[readUnsignedShort(cpIndex + 2)];
-                String name = readUTF8(cpIndex, buf);
-                String desc = readUTF8(cpIndex + 2, buf);
-                return new Handle(tag, owner, name, desc);
-            }
+        case ClassWriter.INT:
+            return new Integer(readInt(index));
+        case ClassWriter.FLOAT:
+            return new Float(Float.intBitsToFloat(readInt(index)));
+        case ClassWriter.LONG:
+            return new Long(readLong(index));
+        case ClassWriter.DOUBLE:
+            return new Double(Double.longBitsToDouble(readLong(index)));
+        case ClassWriter.CLASS:
+            return Type.getObjectType(readUTF8(index, buf));
+        case ClassWriter.STR:
+            return readUTF8(index, buf);
+        case ClassWriter.MTYPE:
+            return Type.getMethodType(readUTF8(index, buf));
+        default: // case ClassWriter.HANDLE_BASE + [1..9]:
+            int tag = readByte(index);
+            int[] items = this.items;
+            int cpIndex = items[readUnsignedShort(index + 1)];
+            String owner = readClass(cpIndex, buf);
+            cpIndex = items[readUnsignedShort(cpIndex + 2)];
+            String name = readUTF8(cpIndex, buf);
+            String desc = readUTF8(cpIndex + 2, buf);
+            return new Handle(tag, owner, name, desc);
         }
     }
 }
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
index ae38ae0..3fc364d 100644
--- a/src/asm/scala/tools/asm/ClassVisitor.java
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -30,11 +30,11 @@
 package scala.tools.asm;
 
 /**
- * A visitor to visit a Java class. The methods of this class must be called
- * in the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
+ * A visitor to visit a Java class. The methods of this class must be called in
+ * the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
  * <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> |
- * <tt>visitField</tt> | <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> | <tt>visitField</tt> |
+ * <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
  *
  * @author Eric Bruneton
  */
@@ -55,8 +55,9 @@ public abstract class ClassVisitor {
     /**
      * Constructs a new {@link ClassVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public ClassVisitor(final int api) {
         this(api, null);
@@ -65,15 +66,17 @@ public abstract class ClassVisitor {
     /**
      * Constructs a new {@link ClassVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param cv the class visitor to which this visitor must delegate method
-     *        calls. May be null.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param cv
+     *            the class visitor to which this visitor must delegate method
+     *            calls. May be null.
      */
     public ClassVisitor(final int api, final ClassVisitor cv) {
-        /*if (api != Opcodes.ASM4) {
+        if (api != Opcodes.ASM4) {
             throw new IllegalArgumentException();
-        }*/
+        }
         this.api = api;
         this.cv = cv;
     }
@@ -81,30 +84,30 @@ public abstract class ClassVisitor {
     /**
      * Visits the header of the class.
      *
-     * @param version the class version.
-     * @param access the class's access flags (see {@link Opcodes}). This
-     *        parameter also indicates if the class is deprecated.
-     * @param name the internal name of the class (see
-     *        {@link Type#getInternalName() getInternalName}).
-     * @param signature the signature of this class. May be <tt>null</tt> if
-     *        the class is not a generic one, and does not extend or implement
-     *        generic classes or interfaces.
-     * @param superName the internal of name of the super class (see
-     *        {@link Type#getInternalName() getInternalName}). For interfaces,
-     *        the super class is {@link Object}. May be <tt>null</tt>, but
-     *        only for the {@link Object} class.
-     * @param interfaces the internal names of the class's interfaces (see
-     *        {@link Type#getInternalName() getInternalName}). May be
-     *        <tt>null</tt>.
+     * @param version
+     *            the class version.
+     * @param access
+     *            the class's access flags (see {@link Opcodes}). This parameter
+     *            also indicates if the class is deprecated.
+     * @param name
+     *            the internal name of the class (see
+     *            {@link Type#getInternalName() getInternalName}).
+     * @param signature
+     *            the signature of this class. May be <tt>null</tt> if the class
+     *            is not a generic one, and does not extend or implement generic
+     *            classes or interfaces.
+     * @param superName
+     *            the internal of name of the super class (see
+     *            {@link Type#getInternalName() getInternalName}). For
+     *            interfaces, the super class is {@link Object}. May be
+     *            <tt>null</tt>, but only for the {@link Object} class.
+     * @param interfaces
+     *            the internal names of the class's interfaces (see
+     *            {@link Type#getInternalName() getInternalName}). May be
+     *            <tt>null</tt>.
      */
-    public void visit(
-        int version,
-        int access,
-        String name,
-        String signature,
-        String superName,
-        String[] interfaces)
-    {
+    public void visit(int version, int access, String name, String signature,
+            String superName, String[] interfaces) {
         if (cv != null) {
             cv.visit(version, access, name, signature, superName, interfaces);
         }
@@ -113,11 +116,13 @@ public abstract class ClassVisitor {
     /**
      * Visits the source of the class.
      *
-     * @param source the name of the source file from which the class was
-     *        compiled. May be <tt>null</tt>.
-     * @param debug additional debug information to compute the correspondance
-     *        between source and compiled elements of the class. May be
-     *        <tt>null</tt>.
+     * @param source
+     *            the name of the source file from which the class was compiled.
+     *            May be <tt>null</tt>.
+     * @param debug
+     *            additional debug information to compute the correspondance
+     *            between source and compiled elements of the class. May be
+     *            <tt>null</tt>.
      */
     public void visitSource(String source, String debug) {
         if (cv != null) {
@@ -129,16 +134,19 @@ public abstract class ClassVisitor {
      * Visits the enclosing class of the class. This method must be called only
      * if the class has an enclosing class.
      *
-     * @param owner internal name of the enclosing class of the class.
-     * @param name the name of the method that contains the class, or
-     *        <tt>null</tt> if the class is not enclosed in a method of its
-     *        enclosing class.
-     * @param desc the descriptor of the method that contains the class, or
-     *        <tt>null</tt> if the class is not enclosed in a method of its
-     *        enclosing class.
+     * @param owner
+     *            internal name of the enclosing class of the class.
+     * @param name
+     *            the name of the method that contains the class, or
+     *            <tt>null</tt> if the class is not enclosed in a method of its
+     *            enclosing class.
+     * @param desc
+     *            the descriptor of the method that contains the class, or
+     *            <tt>null</tt> if the class is not enclosed in a method of its
+     *            enclosing class.
      */
     public void visitOuterClass(String owner, String name, String desc) {
-        if (cv != null)  {
+        if (cv != null) {
             cv.visitOuterClass(owner, name, desc);
         }
     }
@@ -146,8 +154,10 @@ public abstract class ClassVisitor {
     /**
      * Visits an annotation of the class.
      *
-     * @param desc the class descriptor of the annotation class.
-     * @param visible <tt>true</tt> if the annotation is visible at runtime.
+     * @param desc
+     *            the class descriptor of the annotation class.
+     * @param visible
+     *            <tt>true</tt> if the annotation is visible at runtime.
      * @return a visitor to visit the annotation values, or <tt>null</tt> if
      *         this visitor is not interested in visiting this annotation.
      */
@@ -161,7 +171,8 @@ public abstract class ClassVisitor {
     /**
      * Visits a non standard attribute of the class.
      *
-     * @param attr an attribute.
+     * @param attr
+     *            an attribute.
      */
     public void visitAttribute(Attribute attr) {
         if (cv != null) {
@@ -173,23 +184,22 @@ public abstract class ClassVisitor {
      * Visits information about an inner class. This inner class is not
      * necessarily a member of the class being visited.
      *
-     * @param name the internal name of an inner class (see
-     *        {@link Type#getInternalName() getInternalName}).
-     * @param outerName the internal name of the class to which the inner class
-     *        belongs (see {@link Type#getInternalName() getInternalName}). May
-     *        be <tt>null</tt> for not member classes.
-     * @param innerName the (simple) name of the inner class inside its
-     *        enclosing class. May be <tt>null</tt> for anonymous inner
-     *        classes.
-     * @param access the access flags of the inner class as originally declared
-     *        in the enclosing class.
+     * @param name
+     *            the internal name of an inner class (see
+     *            {@link Type#getInternalName() getInternalName}).
+     * @param outerName
+     *            the internal name of the class to which the inner class
+     *            belongs (see {@link Type#getInternalName() getInternalName}).
+     *            May be <tt>null</tt> for not member classes.
+     * @param innerName
+     *            the (simple) name of the inner class inside its enclosing
+     *            class. May be <tt>null</tt> for anonymous inner classes.
+     * @param access
+     *            the access flags of the inner class as originally declared in
+     *            the enclosing class.
      */
-    public void visitInnerClass(
-        String name,
-        String outerName,
-        String innerName,
-        int access)
-    {
+    public void visitInnerClass(String name, String outerName,
+            String innerName, int access) {
         if (cv != null) {
             cv.visitInnerClass(name, outerName, innerName, access);
         }
@@ -198,33 +208,32 @@ public abstract class ClassVisitor {
     /**
      * Visits a field of the class.
      *
-     * @param access the field's access flags (see {@link Opcodes}). This
-     *        parameter also indicates if the field is synthetic and/or
-     *        deprecated.
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link Type Type}).
-     * @param signature the field's signature. May be <tt>null</tt> if the
-     *        field's type does not use generic types.
-     * @param value the field's initial value. This parameter, which may be
-     *        <tt>null</tt> if the field does not have an initial value, must
-     *        be an {@link Integer}, a {@link Float}, a {@link Long}, a
-     *        {@link Double} or a {@link String} (for <tt>int</tt>,
-     *        <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
-     *        respectively). <i>This parameter is only used for static fields</i>.
-     *        Its value is ignored for non static fields, which must be
-     *        initialized through bytecode instructions in constructors or
-     *        methods.
+     * @param access
+     *            the field's access flags (see {@link Opcodes}). This parameter
+     *            also indicates if the field is synthetic and/or deprecated.
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link Type Type}).
+     * @param signature
+     *            the field's signature. May be <tt>null</tt> if the field's
+     *            type does not use generic types.
+     * @param value
+     *            the field's initial value. This parameter, which may be
+     *            <tt>null</tt> if the field does not have an initial value,
+     *            must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+     *            {@link Double} or a {@link String} (for <tt>int</tt>,
+     *            <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
+     *            respectively). <i>This parameter is only used for static
+     *            fields</i>. Its value is ignored for non static fields, which
+     *            must be initialized through bytecode instructions in
+     *            constructors or methods.
      * @return a visitor to visit field annotations and attributes, or
-     *         <tt>null</tt> if this class visitor is not interested in
-     *         visiting these annotations and attributes.
+     *         <tt>null</tt> if this class visitor is not interested in visiting
+     *         these annotations and attributes.
      */
-    public FieldVisitor visitField(
-        int access,
-        String name,
-        String desc,
-        String signature,
-        Object value)
-    {
+    public FieldVisitor visitField(int access, String name, String desc,
+            String signature, Object value) {
         if (cv != null) {
             return cv.visitField(access, name, desc, signature, value);
         }
@@ -233,31 +242,31 @@ public abstract class ClassVisitor {
 
     /**
      * Visits a method of the class. This method <i>must</i> return a new
-     * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is
-     * called, i.e., it should not return a previously returned visitor.
+     * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is called,
+     * i.e., it should not return a previously returned visitor.
      *
-     * @param access the method's access flags (see {@link Opcodes}). This
-     *        parameter also indicates if the method is synthetic and/or
-     *        deprecated.
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type Type}).
-     * @param signature the method's signature. May be <tt>null</tt> if the
-     *        method parameters, return type and exceptions do not use generic
-     *        types.
-     * @param exceptions the internal names of the method's exception classes
-     *        (see {@link Type#getInternalName() getInternalName}). May be
-     *        <tt>null</tt>.
+     * @param access
+     *            the method's access flags (see {@link Opcodes}). This
+     *            parameter also indicates if the method is synthetic and/or
+     *            deprecated.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type Type}).
+     * @param signature
+     *            the method's signature. May be <tt>null</tt> if the method
+     *            parameters, return type and exceptions do not use generic
+     *            types.
+     * @param exceptions
+     *            the internal names of the method's exception classes (see
+     *            {@link Type#getInternalName() getInternalName}). May be
+     *            <tt>null</tt>.
      * @return an object to visit the byte code of the method, or <tt>null</tt>
      *         if this class visitor is not interested in visiting the code of
      *         this method.
      */
-    public MethodVisitor visitMethod(
-        int access,
-        String name,
-        String desc,
-        String signature,
-        String[] exceptions)
-    {
+    public MethodVisitor visitMethod(int access, String name, String desc,
+            String signature, String[] exceptions) {
         if (cv != null) {
             return cv.visitMethod(access, name, desc, signature, exceptions);
         }
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
index c7a0736..93ed731 100644
--- a/src/asm/scala/tools/asm/ClassWriter.java
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -66,12 +66,18 @@ public class ClassWriter extends ClassVisitor {
     public static final int COMPUTE_FRAMES = 2;
 
     /**
-     * Pseudo access flag to distinguish between the synthetic attribute and
-     * the synthetic access flag.
+     * Pseudo access flag to distinguish between the synthetic attribute and the
+     * synthetic access flag.
      */
     static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
 
     /**
+     * Factor to convert from ACC_SYNTHETIC_ATTRIBUTE to Opcode.ACC_SYNTHETIC.
+     */
+    static final int TO_ACC_SYNTHETIC = ACC_SYNTHETIC_ATTRIBUTE
+            / Opcodes.ACC_SYNTHETIC;
+
+    /**
      * The type of instructions without any argument.
      */
     static final int NOARG_INSN = 0;
@@ -238,8 +244,8 @@ public class ClassWriter extends ClassVisitor {
 
     /**
      * The base value for all CONSTANT_MethodHandle constant pool items.
-     * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into
-     * 9 different items.
+     * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into 9
+     * different items.
      */
     static final int HANDLE_BASE = 20;
 
@@ -266,9 +272,8 @@ public class ClassWriter extends ClassVisitor {
     static final int TYPE_MERGED = 32;
 
     /**
-     * The type of BootstrapMethods items. These items are stored in a
-     * special class attribute named BootstrapMethods and
-     * not in the constant pool.
+     * The type of BootstrapMethods items. These items are stored in a special
+     * class attribute named BootstrapMethods and not in the constant pool.
      */
     static final int BSM = 33;
 
@@ -327,10 +332,10 @@ public class ClassWriter extends ClassVisitor {
      * necessarily be stored in the constant pool. This type table is used by
      * the control flow and data flow analysis algorithm used to compute stack
      * map frames from scratch. This array associates to each index <tt>i</tt>
-     * the Item whose index is <tt>i</tt>. All Item objects stored in this
-     * array are also stored in the {@link #items} hash table. These two arrays
-     * allow to retrieve an Item from its index or, conversely, to get the index
-     * of an Item from its value. Each Item stores an internal name in its
+     * the Item whose index is <tt>i</tt>. All Item objects stored in this array
+     * are also stored in the {@link #items} hash table. These two arrays allow
+     * to retrieve an Item from its index or, conversely, to get the index of an
+     * Item from its value. Each Item stores an internal name in its
      * {@link Item#strVal1} field.
      */
     Item[] typeTable;
@@ -439,16 +444,16 @@ public class ClassWriter extends ClassVisitor {
     /**
      * The fields of this class. These fields are stored in a linked list of
      * {@link FieldWriter} objects, linked to each other by their
-     * {@link FieldWriter#fv} field. This field stores the first element of
-     * this list.
+     * {@link FieldWriter#fv} field. This field stores the first element of this
+     * list.
      */
     FieldWriter firstField;
 
     /**
      * The fields of this class. These fields are stored in a linked list of
      * {@link FieldWriter} objects, linked to each other by their
-     * {@link FieldWriter#fv} field. This field stores the last element of
-     * this list.
+     * {@link FieldWriter#fv} field. This field stores the last element of this
+     * list.
      */
     FieldWriter lastField;
 
@@ -463,8 +468,8 @@ public class ClassWriter extends ClassVisitor {
     /**
      * The methods of this class. These methods are stored in a linked list of
      * {@link MethodWriter} objects, linked to each other by their
-     * {@link MethodWriter#mv} field. This field stores the last element of
-     * this list.
+     * {@link MethodWriter#mv} field. This field stores the last element of this
+     * list.
      */
     MethodWriter lastMethod;
 
@@ -584,8 +589,10 @@ public class ClassWriter extends ClassVisitor {
     /**
      * Constructs a new {@link ClassWriter} object.
      *
-     * @param flags option flags that can be used to modify the default behavior
-     *        of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+     * @param flags
+     *            option flags that can be used to modify the default behavior
+     *            of this class. See {@link #COMPUTE_MAXS},
+     *            {@link #COMPUTE_FRAMES}.
      */
     public ClassWriter(final int flags) {
         super(Opcodes.ASM4);
@@ -606,26 +613,32 @@ public class ClassWriter extends ClassVisitor {
      * "mostly add" bytecode transformations. These optimizations are the
      * following:
      *
-     * <ul> <li>The constant pool from the original class is copied as is in the
-     * new class, which saves time. New constant pool entries will be added at
-     * the end if necessary, but unused constant pool entries <i>won't be
-     * removed</i>.</li> <li>Methods that are not transformed are copied as is
-     * in the new class, directly from the original class bytecode (i.e. without
-     * emitting visit events for all the method instructions), which saves a
-     * <i>lot</i> of time. Untransformed methods are detected by the fact that
-     * the {@link ClassReader} receives {@link MethodVisitor} objects that come
-     * from a {@link ClassWriter} (and not from any other {@link ClassVisitor}
-     * instance).</li> </ul>
+     * <ul>
+     * <li>The constant pool from the original class is copied as is in the new
+     * class, which saves time. New constant pool entries will be added at the
+     * end if necessary, but unused constant pool entries <i>won't be
+     * removed</i>.</li>
+     * <li>Methods that are not transformed are copied as is in the new class,
+     * directly from the original class bytecode (i.e. without emitting visit
+     * events for all the method instructions), which saves a <i>lot</i> of
+     * time. Untransformed methods are detected by the fact that the
+     * {@link ClassReader} receives {@link MethodVisitor} objects that come from
+     * a {@link ClassWriter} (and not from any other {@link ClassVisitor}
+     * instance).</li>
+     * </ul>
      *
-     * @param classReader the {@link ClassReader} used to read the original
-     *        class. It will be used to copy the entire constant pool from the
-     *        original class and also to copy other fragments of original
-     *        bytecode where applicable.
-     * @param flags option flags that can be used to modify the default behavior
-     *        of this class. <i>These option flags do not affect methods that
-     *        are copied as is in the new class. This means that the maximum
-     *        stack size nor the stack frames will be computed for these
-     *        methods</i>. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+     * @param classReader
+     *            the {@link ClassReader} used to read the original class. It
+     *            will be used to copy the entire constant pool from the
+     *            original class and also to copy other fragments of original
+     *            bytecode where applicable.
+     * @param flags
+     *            option flags that can be used to modify the default behavior
+     *            of this class. <i>These option flags do not affect methods
+     *            that are copied as is in the new class. This means that the
+     *            maximum stack size nor the stack frames will be computed for
+     *            these methods</i>. See {@link #COMPUTE_MAXS},
+     *            {@link #COMPUTE_FRAMES}.
      */
     public ClassWriter(final ClassReader classReader, final int flags) {
         this(flags);
@@ -638,14 +651,9 @@ public class ClassWriter extends ClassVisitor {
     // ------------------------------------------------------------------------
 
     @Override
-    public final void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public final void visit(final int version, final int access,
+            final String name, final String signature, final String superName,
+            final String[] interfaces) {
         this.version = version;
         this.access = access;
         this.name = newClass(name);
@@ -674,11 +682,8 @@ public class ClassWriter extends ClassVisitor {
     }
 
     @Override
-    public final void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public final void visitOuterClass(final String owner, final String name,
+            final String desc) {
         enclosingMethodOwner = newClass(owner);
         if (name != null && desc != null) {
             enclosingMethod = newNameType(name, desc);
@@ -686,10 +691,8 @@ public class ClassWriter extends ClassVisitor {
     }
 
     @Override
-    public final AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public final AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         if (!ClassReader.ANNOTATIONS) {
             return null;
         }
@@ -714,12 +717,8 @@ public class ClassWriter extends ClassVisitor {
     }
 
     @Override
-    public final void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public final void visitInnerClass(final String name,
+            final String outerName, final String innerName, final int access) {
         if (innerClasses == null) {
             innerClasses = new ByteVector();
         }
@@ -731,32 +730,16 @@ public class ClassWriter extends ClassVisitor {
     }
 
     @Override
-    public final FieldVisitor visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public final FieldVisitor visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
         return new FieldWriter(this, access, name, desc, signature, value);
     }
 
     @Override
-    public final MethodVisitor visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
-        return new MethodWriter(this,
-                access,
-                name,
-                desc,
-                signature,
-                exceptions,
-                computeMaxs,
-                computeFrames);
+    public final MethodVisitor visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
+        return new MethodWriter(this, access, name, desc, signature,
+                exceptions, computeMaxs, computeFrames);
     }
 
     @Override
@@ -773,7 +756,7 @@ public class ClassWriter extends ClassVisitor {
      * @return the bytecode of the class that was build with this class writer.
      */
     public byte[] toByteArray() {
-        if (index > Short.MAX_VALUE) {
+        if (index > 0xFFFF) {
             throw new RuntimeException("Class file too large!");
         }
         // computes the real size of the bytecode of this class
@@ -793,8 +776,9 @@ public class ClassWriter extends ClassVisitor {
             mb = (MethodWriter) mb.mv;
         }
         int attributeCount = 0;
-        if (bootstrapMethods != null) {  // we put it as first argument in order
-                                         // to improve a bit ClassReader.copyBootstrapMethods
+        if (bootstrapMethods != null) {
+            // we put it as first attribute in order to improve a bit
+            // ClassReader.copyBootstrapMethods
             ++attributeCount;
             size += 8 + bootstrapMethods.length;
             newUTF8("BootstrapMethods");
@@ -824,12 +808,13 @@ public class ClassWriter extends ClassVisitor {
             size += 6;
             newUTF8("Deprecated");
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            ++attributeCount;
-            size += 6;
-            newUTF8("Synthetic");
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                ++attributeCount;
+                size += 6;
+                newUTF8("Synthetic");
+            }
         }
         if (innerClasses != null) {
             ++attributeCount;
@@ -856,9 +841,8 @@ public class ClassWriter extends ClassVisitor {
         ByteVector out = new ByteVector(size);
         out.putInt(0xCAFEBABE).putInt(version);
         out.putShort(index).putByteArray(pool.data, 0, pool.length);
-        int mask = Opcodes.ACC_DEPRECATED
-                | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
-                | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+        int mask = Opcodes.ACC_DEPRECATED | ACC_SYNTHETIC_ATTRIBUTE
+                | ((access & ACC_SYNTHETIC_ATTRIBUTE) / TO_ACC_SYNTHETIC);
         out.putShort(access & ~mask).putShort(name).putShort(superName);
         out.putShort(interfaceCount);
         for (int i = 0; i < interfaceCount; ++i) {
@@ -877,9 +861,10 @@ public class ClassWriter extends ClassVisitor {
             mb = (MethodWriter) mb.mv;
         }
         out.putShort(attributeCount);
-        if (bootstrapMethods != null) {   // should be the first class attribute ?
+        if (bootstrapMethods != null) {
             out.putShort(newUTF8("BootstrapMethods"));
-            out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount);
+            out.putInt(bootstrapMethods.length + 2).putShort(
+                    bootstrapMethodsCount);
             out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
         }
         if (ClassReader.SIGNATURES && signature != 0) {
@@ -900,10 +885,11 @@ public class ClassWriter extends ClassVisitor {
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             out.putShort(newUTF8("Deprecated")).putInt(0);
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            out.putShort(newUTF8("Synthetic")).putInt(0);
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                out.putShort(newUTF8("Synthetic")).putInt(0);
+            }
         }
         if (innerClasses != null) {
             out.putShort(newUTF8("InnerClasses"));
@@ -937,10 +923,11 @@ public class ClassWriter extends ClassVisitor {
      * Adds a number or string constant to the constant pool of the class being
      * build. Does nothing if the constant pool already contains a similar item.
      *
-     * @param cst the value of the constant to be added to the constant pool.
-     *        This parameter must be an {@link Integer}, a {@link Float}, a
-     *        {@link Long}, a {@link Double}, a {@link String} or a
-     *        {@link Type}.
+     * @param cst
+     *            the value of the constant to be added to the constant pool.
+     *            This parameter must be an {@link Integer}, a {@link Float}, a
+     *            {@link Long}, a {@link Double}, a {@link String} or a
+     *            {@link Type}.
      * @return a new or already existing constant item with the given value.
      */
     Item newConstItem(final Object cst) {
@@ -973,12 +960,12 @@ public class ClassWriter extends ClassVisitor {
         } else if (cst instanceof Type) {
             Type t = (Type) cst;
             int s = t.getSort();
-            if (s == Type.ARRAY) {
-                return newClassItem(t.getDescriptor());
-            } else if (s == Type.OBJECT) {
+            if (s == Type.OBJECT) {
                 return newClassItem(t.getInternalName());
-            } else { // s == Type.METHOD
+            } else if (s == Type.METHOD) {
                 return newMethodTypeItem(t.getDescriptor());
+            } else { // s == primitive type or array
+                return newClassItem(t.getDescriptor());
             }
         } else if (cst instanceof Handle) {
             Handle h = (Handle) cst;
@@ -994,9 +981,10 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param cst the value of the constant to be added to the constant pool.
-     *        This parameter must be an {@link Integer}, a {@link Float}, a
-     *        {@link Long}, a {@link Double} or a {@link String}.
+     * @param cst
+     *            the value of the constant to be added to the constant pool.
+     *            This parameter must be an {@link Integer}, a {@link Float}, a
+     *            {@link Long}, a {@link Double} or a {@link String}.
      * @return the index of a new or already existing constant item with the
      *         given value.
      */
@@ -1010,7 +998,8 @@ public class ClassWriter extends ClassVisitor {
      * method is intended for {@link Attribute} sub classes, and is normally not
      * needed by class generators or adapters.</i>
      *
-     * @param value the String value.
+     * @param value
+     *            the String value.
      * @return the index of a new or already existing UTF8 item.
      */
     public int newUTF8(final String value) {
@@ -1030,7 +1019,8 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param value the internal name of the class.
+     * @param value
+     *            the internal name of the class.
      * @return a new or already existing class reference item.
      */
     Item newClassItem(final String value) {
@@ -1050,7 +1040,8 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param value the internal name of the class.
+     * @param value
+     *            the internal name of the class.
      * @return the index of a new or already existing class reference item.
      */
     public int newClass(final String value) {
@@ -1063,7 +1054,8 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param methodDesc method descriptor of the method type.
+     * @param methodDesc
+     *            method descriptor of the method type.
      * @return a new or already existing method type reference item.
      */
     Item newMethodTypeItem(final String methodDesc) {
@@ -1083,7 +1075,8 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param methodDesc method descriptor of the method type.
+     * @param methodDesc
+     *            method descriptor of the method type.
      * @return the index of a new or already existing method type reference
      *         item.
      */
@@ -1097,33 +1090,34 @@ public class ClassWriter extends ClassVisitor {
      * intended for {@link Attribute} sub classes, and is normally not needed by
      * class generators or adapters.</i>
      *
-     * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
-     *        {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
-     *        {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
-     *        {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
-     *        {@link Opcodes#H_NEWINVOKESPECIAL} or
-     *        {@link Opcodes#H_INVOKEINTERFACE}.
-     * @param owner the internal name of the field or method owner class.
-     * @param name the name of the field or method.
-     * @param desc the descriptor of the field or method.
+     * @param tag
+     *            the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+     *            {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+     *            {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+     *            {@link Opcodes#H_INVOKESTATIC},
+     *            {@link Opcodes#H_INVOKESPECIAL},
+     *            {@link Opcodes#H_NEWINVOKESPECIAL} or
+     *            {@link Opcodes#H_INVOKEINTERFACE}.
+     * @param owner
+     *            the internal name of the field or method owner class.
+     * @param name
+     *            the name of the field or method.
+     * @param desc
+     *            the descriptor of the field or method.
      * @return a new or an already existing method type reference item.
      */
-    Item newHandleItem(
-        final int tag,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    Item newHandleItem(final int tag, final String owner, final String name,
+            final String desc) {
         key4.set(HANDLE_BASE + tag, owner, name, desc);
         Item result = get(key4);
         if (result == null) {
             if (tag <= Opcodes.H_PUTSTATIC) {
                 put112(HANDLE, tag, newField(owner, name, desc));
             } else {
-                put112(HANDLE, tag, newMethod(owner,
-                        name,
-                        desc,
-                        tag == Opcodes.H_INVOKEINTERFACE));
+                put112(HANDLE,
+                        tag,
+                        newMethod(owner, name, desc,
+                                tag == Opcodes.H_INVOKEINTERFACE));
             }
             result = new Item(index++, key4);
             put(result);
@@ -1132,29 +1126,30 @@ public class ClassWriter extends ClassVisitor {
     }
 
     /**
-     * Adds a handle to the constant pool of the class being
-     * build. Does nothing if the constant pool already contains a similar item.
-     * <i>This method is intended for {@link Attribute} sub classes, and is
-     * normally not needed by class generators or adapters.</i>
+     * Adds a handle to the constant pool of the class being build. Does nothing
+     * if the constant pool already contains a similar item. <i>This method is
+     * intended for {@link Attribute} sub classes, and is normally not needed by
+     * class generators or adapters.</i>
      *
-     * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
-     *        {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
-     *        {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
-     *        {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
-     *        {@link Opcodes#H_NEWINVOKESPECIAL} or
-     *        {@link Opcodes#H_INVOKEINTERFACE}.
-     * @param owner the internal name of the field or method owner class.
-     * @param name the name of the field or method.
-     * @param desc the descriptor of the field or method.
+     * @param tag
+     *            the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+     *            {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+     *            {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+     *            {@link Opcodes#H_INVOKESTATIC},
+     *            {@link Opcodes#H_INVOKESPECIAL},
+     *            {@link Opcodes#H_NEWINVOKESPECIAL} or
+     *            {@link Opcodes#H_INVOKEINTERFACE}.
+     * @param owner
+     *            the internal name of the field or method owner class.
+     * @param name
+     *            the name of the field or method.
+     * @param desc
+     *            the descriptor of the field or method.
      * @return the index of a new or already existing method type reference
      *         item.
      */
-    public int newHandle(
-        final int tag,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public int newHandle(final int tag, final String owner, final String name,
+            final String desc) {
         return newHandleItem(tag, owner, name, desc).index;
     }
 
@@ -1164,19 +1159,19 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param name name of the invoked method.
-     * @param desc descriptor of the invoke method.
-     * @param bsm the bootstrap method.
-     * @param bsmArgs the bootstrap method constant arguments.
+     * @param name
+     *            name of the invoked method.
+     * @param desc
+     *            descriptor of the invoke method.
+     * @param bsm
+     *            the bootstrap method.
+     * @param bsmArgs
+     *            the bootstrap method constant arguments.
      *
      * @return a new or an already existing invokedynamic type reference item.
      */
-    Item newInvokeDynamicItem(
-        final String name,
-        final String desc,
-        final Handle bsm,
-        final Object... bsmArgs)
-    {
+    Item newInvokeDynamicItem(final String name, final String desc,
+            final Handle bsm, final Object... bsmArgs) {
         // cache for performance
         ByteVector bootstrapMethods = this.bootstrapMethods;
         if (bootstrapMethods == null) {
@@ -1186,9 +1181,7 @@ public class ClassWriter extends ClassVisitor {
         int position = bootstrapMethods.length; // record current position
 
         int hashCode = bsm.hashCode();
-        bootstrapMethods.putShort(newHandle(bsm.tag,
-                bsm.owner,
-                bsm.name,
+        bootstrapMethods.putShort(newHandle(bsm.tag, bsm.owner, bsm.name,
                 bsm.desc));
 
         int argsLength = bsmArgs.length;
@@ -1250,20 +1243,20 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param name name of the invoked method.
-     * @param desc descriptor of the invoke method.
-     * @param bsm the bootstrap method.
-     * @param bsmArgs the bootstrap method constant arguments.
+     * @param name
+     *            name of the invoked method.
+     * @param desc
+     *            descriptor of the invoke method.
+     * @param bsm
+     *            the bootstrap method.
+     * @param bsmArgs
+     *            the bootstrap method constant arguments.
      *
-     * @return the index of a new or already existing invokedynamic
-     *         reference item.
-     */
-    public int newInvokeDynamic(
-        final String name,
-        final String desc,
-        final Handle bsm,
-        final Object... bsmArgs)
-    {
+     * @return the index of a new or already existing invokedynamic reference
+     *         item.
+     */
+    public int newInvokeDynamic(final String name, final String desc,
+            final Handle bsm, final Object... bsmArgs) {
         return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
     }
 
@@ -1271,13 +1264,15 @@ public class ClassWriter extends ClassVisitor {
      * Adds a field reference to the constant pool of the class being build.
      * Does nothing if the constant pool already contains a similar item.
      *
-     * @param owner the internal name of the field's owner class.
-     * @param name the field's name.
-     * @param desc the field's descriptor.
+     * @param owner
+     *            the internal name of the field's owner class.
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor.
      * @return a new or already existing field reference item.
      */
-    Item newFieldItem(final String owner, final String name, final String desc)
-    {
+    Item newFieldItem(final String owner, final String name, final String desc) {
         key3.set(FIELD, owner, name, desc);
         Item result = get(key3);
         if (result == null) {
@@ -1294,13 +1289,15 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param owner the internal name of the field's owner class.
-     * @param name the field's name.
-     * @param desc the field's descriptor.
+     * @param owner
+     *            the internal name of the field's owner class.
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor.
      * @return the index of a new or already existing field reference item.
      */
-    public int newField(final String owner, final String name, final String desc)
-    {
+    public int newField(final String owner, final String name, final String desc) {
         return newFieldItem(owner, name, desc).index;
     }
 
@@ -1308,18 +1305,18 @@ public class ClassWriter extends ClassVisitor {
      * Adds a method reference to the constant pool of the class being build.
      * Does nothing if the constant pool already contains a similar item.
      *
-     * @param owner the internal name of the method's owner class.
-     * @param name the method's name.
-     * @param desc the method's descriptor.
-     * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+     * @param owner
+     *            the internal name of the method's owner class.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor.
+     * @param itf
+     *            <tt>true</tt> if <tt>owner</tt> is an interface.
      * @return a new or already existing method reference item.
      */
-    Item newMethodItem(
-        final String owner,
-        final String name,
-        final String desc,
-        final boolean itf)
-    {
+    Item newMethodItem(final String owner, final String name,
+            final String desc, final boolean itf) {
         int type = itf ? IMETH : METH;
         key3.set(type, owner, name, desc);
         Item result = get(key3);
@@ -1337,18 +1334,18 @@ public class ClassWriter extends ClassVisitor {
      * <i>This method is intended for {@link Attribute} sub classes, and is
      * normally not needed by class generators or adapters.</i>
      *
-     * @param owner the internal name of the method's owner class.
-     * @param name the method's name.
-     * @param desc the method's descriptor.
-     * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+     * @param owner
+     *            the internal name of the method's owner class.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor.
+     * @param itf
+     *            <tt>true</tt> if <tt>owner</tt> is an interface.
      * @return the index of a new or already existing method reference item.
      */
-    public int newMethod(
-        final String owner,
-        final String name,
-        final String desc,
-        final boolean itf)
-    {
+    public int newMethod(final String owner, final String name,
+            final String desc, final boolean itf) {
         return newMethodItem(owner, name, desc, itf).index;
     }
 
@@ -1356,7 +1353,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds an integer to the constant pool of the class being build. Does
      * nothing if the constant pool already contains a similar item.
      *
-     * @param value the int value.
+     * @param value
+     *            the int value.
      * @return a new or already existing int item.
      */
     Item newInteger(final int value) {
@@ -1374,7 +1372,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds a float to the constant pool of the class being build. Does nothing
      * if the constant pool already contains a similar item.
      *
-     * @param value the float value.
+     * @param value
+     *            the float value.
      * @return a new or already existing float item.
      */
     Item newFloat(final float value) {
@@ -1392,7 +1391,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds a long to the constant pool of the class being build. Does nothing
      * if the constant pool already contains a similar item.
      *
-     * @param value the long value.
+     * @param value
+     *            the long value.
      * @return a new or already existing long item.
      */
     Item newLong(final long value) {
@@ -1411,7 +1411,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds a double to the constant pool of the class being build. Does nothing
      * if the constant pool already contains a similar item.
      *
-     * @param value the double value.
+     * @param value
+     *            the double value.
      * @return a new or already existing double item.
      */
     Item newDouble(final double value) {
@@ -1430,7 +1431,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds a string to the constant pool of the class being build. Does nothing
      * if the constant pool already contains a similar item.
      *
-     * @param value the String value.
+     * @param value
+     *            the String value.
      * @return a new or already existing string item.
      */
     private Item newString(final String value) {
@@ -1450,8 +1452,10 @@ public class ClassWriter extends ClassVisitor {
      * method is intended for {@link Attribute} sub classes, and is normally not
      * needed by class generators or adapters.</i>
      *
-     * @param name a name.
-     * @param desc a type descriptor.
+     * @param name
+     *            a name.
+     * @param desc
+     *            a type descriptor.
      * @return the index of a new or already existing name and type item.
      */
     public int newNameType(final String name, final String desc) {
@@ -1462,8 +1466,10 @@ public class ClassWriter extends ClassVisitor {
      * Adds a name and type to the constant pool of the class being build. Does
      * nothing if the constant pool already contains a similar item.
      *
-     * @param name a name.
-     * @param desc a type descriptor.
+     * @param name
+     *            a name.
+     * @param desc
+     *            a type descriptor.
      * @return a new or already existing name and type item.
      */
     Item newNameTypeItem(final String name, final String desc) {
@@ -1481,7 +1487,8 @@ public class ClassWriter extends ClassVisitor {
      * Adds the given internal name to {@link #typeTable} and returns its index.
      * Does nothing if the type table already contains this internal name.
      *
-     * @param type the internal name to be added to the type table.
+     * @param type
+     *            the internal name to be added to the type table.
      * @return the index of this internal name in the type table.
      */
     int addType(final String type) {
@@ -1498,9 +1505,11 @@ public class ClassWriter extends ClassVisitor {
      * index. This method is used for UNINITIALIZED types, made of an internal
      * name and a bytecode offset.
      *
-     * @param type the internal name to be added to the type table.
-     * @param offset the bytecode offset of the NEW instruction that created
-     *        this UNINITIALIZED type value.
+     * @param type
+     *            the internal name to be added to the type table.
+     * @param offset
+     *            the bytecode offset of the NEW instruction that created this
+     *            UNINITIALIZED type value.
      * @return the index of this internal name in the type table.
      */
     int addUninitializedType(final String type, final int offset) {
@@ -1518,7 +1527,8 @@ public class ClassWriter extends ClassVisitor {
     /**
      * Adds the given Item to {@link #typeTable}.
      *
-     * @param item the value to be added to the type table.
+     * @param item
+     *            the value to be added to the type table.
      * @return the added Item, which a new Item instance with the same value as
      *         the given Item.
      */
@@ -1544,8 +1554,10 @@ public class ClassWriter extends ClassVisitor {
      * {@link #items} hash table to speedup future calls with the same
      * parameters.
      *
-     * @param type1 index of an internal name in {@link #typeTable}.
-     * @param type2 index of an internal name in {@link #typeTable}.
+     * @param type1
+     *            index of an internal name in {@link #typeTable}.
+     * @param type2
+     *            index of an internal name in {@link #typeTable}.
      * @return the index of the common super type of the two given types.
      */
     int getMergedType(final int type1, final int type2) {
@@ -1572,13 +1584,14 @@ public class ClassWriter extends ClassVisitor {
      * that is currently being generated by this ClassWriter, which can of
      * course not be loaded since it is under construction.
      *
-     * @param type1 the internal name of a class.
-     * @param type2 the internal name of another class.
+     * @param type1
+     *            the internal name of a class.
+     * @param type2
+     *            the internal name of another class.
      * @return the internal name of the common super class of the two given
      *         classes.
      */
-    protected String getCommonSuperClass(final String type1, final String type2)
-    {
+    protected String getCommonSuperClass(final String type1, final String type2) {
         Class<?> c, d;
         ClassLoader classLoader = getClass().getClassLoader();
         try {
@@ -1607,7 +1620,8 @@ public class ClassWriter extends ClassVisitor {
      * Returns the constant pool's hash table item which is equal to the given
      * item.
      *
-     * @param key a constant pool item.
+     * @param key
+     *            a constant pool item.
      * @return the constant pool's hash table item which is equal to the given
      *         item, or <tt>null</tt> if there is no such item.
      */
@@ -1623,7 +1637,8 @@ public class ClassWriter extends ClassVisitor {
      * Puts the given item in the constant pool's hash table. The hash table
      * <i>must</i> not already contains this item.
      *
-     * @param i the item to be added to the constant pool's hash table.
+     * @param i
+     *            the item to be added to the constant pool's hash table.
      */
     private void put(final Item i) {
         if (index + typeCount > threshold) {
@@ -1651,9 +1666,12 @@ public class ClassWriter extends ClassVisitor {
     /**
      * Puts one byte and two shorts into the constant pool.
      *
-     * @param b a byte.
-     * @param s1 a short.
-     * @param s2 another short.
+     * @param b
+     *            a byte.
+     * @param s1
+     *            a short.
+     * @param s2
+     *            another short.
      */
     private void put122(final int b, final int s1, final int s2) {
         pool.put12(b, s1).putShort(s2);
@@ -1662,9 +1680,12 @@ public class ClassWriter extends ClassVisitor {
     /**
      * Puts two bytes and one short into the constant pool.
      *
-     * @param b1 a byte.
-     * @param b2 another byte.
-     * @param s a short.
+     * @param b1
+     *            a byte.
+     * @param b2
+     *            another byte.
+     * @param s
+     *            a short.
      */
     private void put112(final int b1, final int b2, final int s) {
         pool.put11(b1, b2).putShort(s);
diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java
new file mode 100644
index 0000000..7b3a2ad
--- /dev/null
+++ b/src/asm/scala/tools/asm/Context.java
@@ -0,0 +1,110 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ *    contributors may be used to endorse or promote products derived from
+ *    this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * Information about a class being parsed in a {@link ClassReader}.
+ *
+ * @author Eric Bruneton
+ */
+class Context {
+
+    /**
+     * Prototypes of the attributes that must be parsed for this class.
+     */
+    Attribute[] attrs;
+
+    /**
+     * The {@link ClassReader} option flags for the parsing of this class.
+     */
+    int flags;
+
+    /**
+     * The buffer used to read strings.
+     */
+    char[] buffer;
+
+    /**
+     * The start index of each bootstrap method.
+     */
+    int[] bootstrapMethods;
+
+    /**
+     * The access flags of the method currently being parsed.
+     */
+    int access;
+
+    /**
+     * The name of the method currently being parsed.
+     */
+    String name;
+
+    /**
+     * The descriptor of the method currently being parsed.
+     */
+    String desc;
+
+    /**
+     * The offset of the latest stack map frame that has been parsed.
+     */
+    int offset;
+
+    /**
+     * The encoding of the latest stack map frame that has been parsed.
+     */
+    int mode;
+
+    /**
+     * The number of locals in the latest stack map frame that has been parsed.
+     */
+    int localCount;
+
+    /**
+     * The number locals in the latest stack map frame that has been parsed,
+     * minus the number of locals in the previous frame.
+     */
+    int localDiff;
+
+    /**
+     * The local values of the latest stack map frame that has been parsed.
+     */
+    Object[] local;
+
+    /**
+     * The stack size of the latest stack map frame that has been parsed.
+     */
+    int stackCount;
+
+    /**
+     * The stack values of the latest stack map frame that has been parsed.
+     */
+    Object[] stack;
+}
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
index 9ac0f62..9171f33 100644
--- a/src/asm/scala/tools/asm/FieldVisitor.java
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -30,9 +30,9 @@
 package scala.tools.asm;
 
 /**
- * A visitor to visit a Java field. The methods of this class must be called
- * in the following order: ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
+ * A visitor to visit a Java field. The methods of this class must be called in
+ * the following order: ( <tt>visitAnnotation</tt> | <tt>visitAttribute</tt> )*
+ * <tt>visitEnd</tt>.
  *
  * @author Eric Bruneton
  */
@@ -53,8 +53,9 @@ public abstract class FieldVisitor {
     /**
      * Constructs a new {@link FieldVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public FieldVisitor(final int api) {
         this(api, null);
@@ -63,15 +64,17 @@ public abstract class FieldVisitor {
     /**
      * Constructs a new {@link FieldVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param fv the field visitor to which this visitor must delegate method
-     *        calls. May be null.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param fv
+     *            the field visitor to which this visitor must delegate method
+     *            calls. May be null.
      */
     public FieldVisitor(final int api, final FieldVisitor fv) {
-        /*if (api != Opcodes.ASM4) {
+        if (api != Opcodes.ASM4) {
             throw new IllegalArgumentException();
-        }*/
+        }
         this.api = api;
         this.fv = fv;
     }
@@ -79,8 +82,10 @@ public abstract class FieldVisitor {
     /**
      * Visits an annotation of the field.
      *
-     * @param desc the class descriptor of the annotation class.
-     * @param visible <tt>true</tt> if the annotation is visible at runtime.
+     * @param desc
+     *            the class descriptor of the annotation class.
+     * @param visible
+     *            <tt>true</tt> if the annotation is visible at runtime.
      * @return a visitor to visit the annotation values, or <tt>null</tt> if
      *         this visitor is not interested in visiting this annotation.
      */
@@ -94,7 +99,8 @@ public abstract class FieldVisitor {
     /**
      * Visits a non standard attribute of the field.
      *
-     * @param attr an attribute.
+     * @param attr
+     *            an attribute.
      */
     public void visitAttribute(Attribute attr) {
         if (fv != null) {
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
index 45ef6d0..02c6059 100644
--- a/src/asm/scala/tools/asm/FieldWriter.java
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -92,21 +92,21 @@ final class FieldWriter extends FieldVisitor {
     /**
      * Constructs a new {@link FieldWriter}.
      *
-     * @param cw the class writer to which this field must be added.
-     * @param access the field's access flags (see {@link Opcodes}).
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link Type}).
-     * @param signature the field's signature. May be <tt>null</tt>.
-     * @param value the field's constant value. May be <tt>null</tt>.
+     * @param cw
+     *            the class writer to which this field must be added.
+     * @param access
+     *            the field's access flags (see {@link Opcodes}).
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link Type}).
+     * @param signature
+     *            the field's signature. May be <tt>null</tt>.
+     * @param value
+     *            the field's constant value. May be <tt>null</tt>.
      */
-    FieldWriter(
-        final ClassWriter cw,
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    FieldWriter(final ClassWriter cw, final int access, final String name,
+            final String desc, final String signature, final Object value) {
         super(Opcodes.ASM4);
         if (cw.firstField == null) {
             cw.firstField = this;
@@ -131,10 +131,8 @@ final class FieldWriter extends FieldVisitor {
     // ------------------------------------------------------------------------
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         if (!ClassReader.ANNOTATIONS) {
             return null;
         }
@@ -177,11 +175,12 @@ final class FieldWriter extends FieldVisitor {
             cw.newUTF8("ConstantValue");
             size += 8;
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            cw.newUTF8("Synthetic");
-            size += 6;
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                cw.newUTF8("Synthetic");
+                size += 6;
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             cw.newUTF8("Deprecated");
@@ -208,21 +207,23 @@ final class FieldWriter extends FieldVisitor {
     /**
      * Puts the content of this field into the given byte vector.
      *
-     * @param out where the content of this field must be put.
+     * @param out
+     *            where the content of this field must be put.
      */
     void put(final ByteVector out) {
-        int mask = Opcodes.ACC_DEPRECATED
-                | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
-                | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+        final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+        int mask = Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+                | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
         out.putShort(access & ~mask).putShort(name).putShort(desc);
         int attributeCount = 0;
         if (value != 0) {
             ++attributeCount;
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            ++attributeCount;
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                ++attributeCount;
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             ++attributeCount;
@@ -244,10 +245,11 @@ final class FieldWriter extends FieldVisitor {
             out.putShort(cw.newUTF8("ConstantValue"));
             out.putInt(2).putShort(value);
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             out.putShort(cw.newUTF8("Deprecated")).putInt(0);
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
index 387b567..bcc3e84 100644
--- a/src/asm/scala/tools/asm/Frame.java
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -80,13 +80,13 @@ final class Frame {
      * table contains only internal type names (array type descriptors are
      * forbidden - dimensions must be represented through the DIM field).
      *
-     * The LONG and DOUBLE types are always represented by using two slots (LONG +
-     * TOP or DOUBLE + TOP), for local variable types as well as in the operand
-     * stack. This is necessary to be able to simulate DUPx_y instructions,
-     * whose effect would be dependent on the actual type values if types were
-     * always represented by a single slot in the stack (and this is not
-     * possible, since actual type values are not always known - cf LOCAL and
-     * STACK type kinds).
+     * The LONG and DOUBLE types are always represented by using two slots (LONG
+     * + TOP or DOUBLE + TOP), for local variable types as well as in the
+     * operand stack. This is necessary to be able to simulate DUPx_y
+     * instructions, whose effect would be dependent on the actual type values
+     * if types were always represented by a single slot in the stack (and this
+     * is not possible, since actual type values are not always known - cf LOCAL
+     * and STACK type kinds).
      */
 
     /**
@@ -117,9 +117,9 @@ final class Frame {
     /**
      * Flag used for LOCAL and STACK types. Indicates that if this type happens
      * to be a long or double type (during the computations of input frames),
-     * then it must be set to TOP because the second word of this value has
-     * been reused to store other data in the basic block. Hence the first word
-     * no longer stores a valid long or double value.
+     * then it must be set to TOP because the second word of this value has been
+     * reused to store other data in the basic block. Hence the first word no
+     * longer stores a valid long or double value.
      */
     static final int TOP_IF_LONG_OR_DOUBLE = 0x800000;
 
@@ -523,7 +523,8 @@ final class Frame {
     /**
      * Returns the output frame local variable type at the given index.
      *
-     * @param local the index of the local that must be returned.
+     * @param local
+     *            the index of the local that must be returned.
      * @return the output frame local variable type at the given index.
      */
     private int get(final int local) {
@@ -545,8 +546,10 @@ final class Frame {
     /**
      * Sets the output frame local variable type at the given index.
      *
-     * @param local the index of the local that must be set.
-     * @param type the value of the local that must be set.
+     * @param local
+     *            the index of the local that must be set.
+     * @param type
+     *            the value of the local that must be set.
      */
     private void set(final int local, final int type) {
         // creates and/or resizes the output local variables array if necessary
@@ -566,7 +569,8 @@ final class Frame {
     /**
      * Pushes a new type onto the output frame stack.
      *
-     * @param type the type that must be pushed.
+     * @param type
+     *            the type that must be pushed.
      */
     private void push(final int type) {
         // creates and/or resizes the output stack array if necessary
@@ -591,10 +595,12 @@ final class Frame {
     /**
      * Pushes a new type onto the output frame stack.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param desc the descriptor of the type to be pushed. Can also be a method
-     *        descriptor (in this case this method pushes its return type onto
-     *        the output frame stack).
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param desc
+     *            the descriptor of the type to be pushed. Can also be a method
+     *            descriptor (in this case this method pushes its return type
+     *            onto the output frame stack).
      */
     private void push(final ClassWriter cw, final String desc) {
         int type = type(cw, desc);
@@ -609,72 +615,74 @@ final class Frame {
     /**
      * Returns the int encoding of the given type.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param desc a type descriptor.
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param desc
+     *            a type descriptor.
      * @return the int encoding of the given type.
      */
     private static int type(final ClassWriter cw, final String desc) {
         String t;
         int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
         switch (desc.charAt(index)) {
-            case 'V':
-                return 0;
+        case 'V':
+            return 0;
+        case 'Z':
+        case 'C':
+        case 'B':
+        case 'S':
+        case 'I':
+            return INTEGER;
+        case 'F':
+            return FLOAT;
+        case 'J':
+            return LONG;
+        case 'D':
+            return DOUBLE;
+        case 'L':
+            // stores the internal name, not the descriptor!
+            t = desc.substring(index + 1, desc.length() - 1);
+            return OBJECT | cw.addType(t);
+            // case '[':
+        default:
+            // extracts the dimensions and the element type
+            int data;
+            int dims = index + 1;
+            while (desc.charAt(dims) == '[') {
+                ++dims;
+            }
+            switch (desc.charAt(dims)) {
             case 'Z':
+                data = BOOLEAN;
+                break;
             case 'C':
+                data = CHAR;
+                break;
             case 'B':
+                data = BYTE;
+                break;
             case 'S':
+                data = SHORT;
+                break;
             case 'I':
-                return INTEGER;
+                data = INTEGER;
+                break;
             case 'F':
-                return FLOAT;
+                data = FLOAT;
+                break;
             case 'J':
-                return LONG;
+                data = LONG;
+                break;
             case 'D':
-                return DOUBLE;
-            case 'L':
-                // stores the internal name, not the descriptor!
-                t = desc.substring(index + 1, desc.length() - 1);
-                return OBJECT | cw.addType(t);
-                // case '[':
+                data = DOUBLE;
+                break;
+            // case 'L':
             default:
-                // extracts the dimensions and the element type
-                int data;
-                int dims = index + 1;
-                while (desc.charAt(dims) == '[') {
-                    ++dims;
-                }
-                switch (desc.charAt(dims)) {
-                    case 'Z':
-                        data = BOOLEAN;
-                        break;
-                    case 'C':
-                        data = CHAR;
-                        break;
-                    case 'B':
-                        data = BYTE;
-                        break;
-                    case 'S':
-                        data = SHORT;
-                        break;
-                    case 'I':
-                        data = INTEGER;
-                        break;
-                    case 'F':
-                        data = FLOAT;
-                        break;
-                    case 'J':
-                        data = LONG;
-                        break;
-                    case 'D':
-                        data = DOUBLE;
-                        break;
-                    // case 'L':
-                    default:
-                        // stores the internal name, not the descriptor
-                        t = desc.substring(dims + 1, desc.length() - 1);
-                        data = OBJECT | cw.addType(t);
-                }
-                return (dims - index) << 28 | data;
+                // stores the internal name, not the descriptor
+                t = desc.substring(dims + 1, desc.length() - 1);
+                data = OBJECT | cw.addType(t);
+            }
+            return (dims - index) << 28 | data;
         }
     }
 
@@ -695,7 +703,8 @@ final class Frame {
     /**
      * Pops the given number of types from the output frame stack.
      *
-     * @param elements the number of types that must be popped.
+     * @param elements
+     *            the number of types that must be popped.
      */
     private void pop(final int elements) {
         if (outputStackTop >= elements) {
@@ -712,9 +721,10 @@ final class Frame {
     /**
      * Pops a type from the output frame stack.
      *
-     * @param desc the descriptor of the type to be popped. Can also be a method
-     *        descriptor (in this case this method pops the types corresponding
-     *        to the method arguments).
+     * @param desc
+     *            the descriptor of the type to be popped. Can also be a method
+     *            descriptor (in this case this method pops the types
+     *            corresponding to the method arguments).
      */
     private void pop(final String desc) {
         char c = desc.charAt(0);
@@ -731,7 +741,8 @@ final class Frame {
      * Adds a new type to the list of types on which a constructor is invoked in
      * the basic block.
      *
-     * @param var a type on a which a constructor is invoked.
+     * @param var
+     *            a type on a which a constructor is invoked.
      */
     private void init(final int var) {
         // creates and/or resizes the initializations array if necessary
@@ -752,8 +763,10 @@ final class Frame {
      * Replaces the given type with the appropriate type if it is one of the
      * types on which a constructor is invoked in the basic block.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param t a type
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param t
+     *            a type
      * @return t or, if t is one of the types on which a constructor is invoked
      *         in the basic block, the type corresponding to this constructor.
      */
@@ -787,17 +800,17 @@ final class Frame {
      * Initializes the input frame of the first basic block from the method
      * descriptor.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param access the access flags of the method to which this label belongs.
-     * @param args the formal parameter types of this method.
-     * @param maxLocals the maximum number of local variables of this method.
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param access
+     *            the access flags of the method to which this label belongs.
+     * @param args
+     *            the formal parameter types of this method.
+     * @param maxLocals
+     *            the maximum number of local variables of this method.
      */
-    void initInputFrame(
-        final ClassWriter cw,
-        final int access,
-        final Type[] args,
-        final int maxLocals)
-    {
+    void initInputFrame(final ClassWriter cw, final int access,
+            final Type[] args, final int maxLocals) {
         inputLocals = new int[maxLocals];
         inputStack = new int[0];
         int i = 0;
@@ -823,435 +836,435 @@ final class Frame {
     /**
      * Simulates the action of the given instruction on the output stack frame.
      *
-     * @param opcode the opcode of the instruction.
-     * @param arg the operand of the instruction, if any.
-     * @param cw the class writer to which this label belongs.
-     * @param item the operand of the instructions, if any.
+     * @param opcode
+     *            the opcode of the instruction.
+     * @param arg
+     *            the operand of the instruction, if any.
+     * @param cw
+     *            the class writer to which this label belongs.
+     * @param item
+     *            the operand of the instructions, if any.
      */
-    void execute(
-        final int opcode,
-        final int arg,
-        final ClassWriter cw,
-        final Item item)
-    {
+    void execute(final int opcode, final int arg, final ClassWriter cw,
+            final Item item) {
         int t1, t2, t3, t4;
         switch (opcode) {
-            case Opcodes.NOP:
-            case Opcodes.INEG:
-            case Opcodes.LNEG:
-            case Opcodes.FNEG:
-            case Opcodes.DNEG:
-            case Opcodes.I2B:
-            case Opcodes.I2C:
-            case Opcodes.I2S:
-            case Opcodes.GOTO:
-            case Opcodes.RETURN:
-                break;
-            case Opcodes.ACONST_NULL:
-                push(NULL);
-                break;
-            case Opcodes.ICONST_M1:
-            case Opcodes.ICONST_0:
-            case Opcodes.ICONST_1:
-            case Opcodes.ICONST_2:
-            case Opcodes.ICONST_3:
-            case Opcodes.ICONST_4:
-            case Opcodes.ICONST_5:
-            case Opcodes.BIPUSH:
-            case Opcodes.SIPUSH:
-            case Opcodes.ILOAD:
+        case Opcodes.NOP:
+        case Opcodes.INEG:
+        case Opcodes.LNEG:
+        case Opcodes.FNEG:
+        case Opcodes.DNEG:
+        case Opcodes.I2B:
+        case Opcodes.I2C:
+        case Opcodes.I2S:
+        case Opcodes.GOTO:
+        case Opcodes.RETURN:
+            break;
+        case Opcodes.ACONST_NULL:
+            push(NULL);
+            break;
+        case Opcodes.ICONST_M1:
+        case Opcodes.ICONST_0:
+        case Opcodes.ICONST_1:
+        case Opcodes.ICONST_2:
+        case Opcodes.ICONST_3:
+        case Opcodes.ICONST_4:
+        case Opcodes.ICONST_5:
+        case Opcodes.BIPUSH:
+        case Opcodes.SIPUSH:
+        case Opcodes.ILOAD:
+            push(INTEGER);
+            break;
+        case Opcodes.LCONST_0:
+        case Opcodes.LCONST_1:
+        case Opcodes.LLOAD:
+            push(LONG);
+            push(TOP);
+            break;
+        case Opcodes.FCONST_0:
+        case Opcodes.FCONST_1:
+        case Opcodes.FCONST_2:
+        case Opcodes.FLOAD:
+            push(FLOAT);
+            break;
+        case Opcodes.DCONST_0:
+        case Opcodes.DCONST_1:
+        case Opcodes.DLOAD:
+            push(DOUBLE);
+            push(TOP);
+            break;
+        case Opcodes.LDC:
+            switch (item.type) {
+            case ClassWriter.INT:
                 push(INTEGER);
                 break;
-            case Opcodes.LCONST_0:
-            case Opcodes.LCONST_1:
-            case Opcodes.LLOAD:
+            case ClassWriter.LONG:
                 push(LONG);
                 push(TOP);
                 break;
-            case Opcodes.FCONST_0:
-            case Opcodes.FCONST_1:
-            case Opcodes.FCONST_2:
-            case Opcodes.FLOAD:
+            case ClassWriter.FLOAT:
                 push(FLOAT);
                 break;
-            case Opcodes.DCONST_0:
-            case Opcodes.DCONST_1:
-            case Opcodes.DLOAD:
+            case ClassWriter.DOUBLE:
                 push(DOUBLE);
                 push(TOP);
                 break;
-            case Opcodes.LDC:
-                switch (item.type) {
-                    case ClassWriter.INT:
-                        push(INTEGER);
-                        break;
-                    case ClassWriter.LONG:
-                        push(LONG);
-                        push(TOP);
-                        break;
-                    case ClassWriter.FLOAT:
-                        push(FLOAT);
-                        break;
-                    case ClassWriter.DOUBLE:
-                        push(DOUBLE);
-                        push(TOP);
-                        break;
-                    case ClassWriter.CLASS:
-                        push(OBJECT | cw.addType("java/lang/Class"));
-                        break;
-                    case ClassWriter.STR:
-                        push(OBJECT | cw.addType("java/lang/String"));
-                        break;
-                    case ClassWriter.MTYPE:
-                        push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
-                        break;
-                    // case ClassWriter.HANDLE_BASE + [1..9]:
-                    default:
-                        push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
-                }
-                break;
-            case Opcodes.ALOAD:
-                push(get(arg));
-                break;
-            case Opcodes.IALOAD:
-            case Opcodes.BALOAD:
-            case Opcodes.CALOAD:
-            case Opcodes.SALOAD:
-                pop(2);
-                push(INTEGER);
-                break;
-            case Opcodes.LALOAD:
-            case Opcodes.D2L:
-                pop(2);
-                push(LONG);
-                push(TOP);
+            case ClassWriter.CLASS:
+                push(OBJECT | cw.addType("java/lang/Class"));
                 break;
-            case Opcodes.FALOAD:
-                pop(2);
-                push(FLOAT);
+            case ClassWriter.STR:
+                push(OBJECT | cw.addType("java/lang/String"));
                 break;
-            case Opcodes.DALOAD:
-            case Opcodes.L2D:
-                pop(2);
-                push(DOUBLE);
-                push(TOP);
+            case ClassWriter.MTYPE:
+                push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
                 break;
-            case Opcodes.AALOAD:
-                pop(1);
-                t1 = pop();
-                push(ELEMENT_OF + t1);
-                break;
-            case Opcodes.ISTORE:
-            case Opcodes.FSTORE:
-            case Opcodes.ASTORE:
-                t1 = pop();
-                set(arg, t1);
-                if (arg > 0) {
-                    t2 = get(arg - 1);
-                    // if t2 is of kind STACK or LOCAL we cannot know its size!
-                    if (t2 == LONG || t2 == DOUBLE) {
-                        set(arg - 1, TOP);
-                    } else if ((t2 & KIND) != BASE) {
-                        set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
-                    }
+            // case ClassWriter.HANDLE_BASE + [1..9]:
+            default:
+                push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
+            }
+            break;
+        case Opcodes.ALOAD:
+            push(get(arg));
+            break;
+        case Opcodes.IALOAD:
+        case Opcodes.BALOAD:
+        case Opcodes.CALOAD:
+        case Opcodes.SALOAD:
+            pop(2);
+            push(INTEGER);
+            break;
+        case Opcodes.LALOAD:
+        case Opcodes.D2L:
+            pop(2);
+            push(LONG);
+            push(TOP);
+            break;
+        case Opcodes.FALOAD:
+            pop(2);
+            push(FLOAT);
+            break;
+        case Opcodes.DALOAD:
+        case Opcodes.L2D:
+            pop(2);
+            push(DOUBLE);
+            push(TOP);
+            break;
+        case Opcodes.AALOAD:
+            pop(1);
+            t1 = pop();
+            push(ELEMENT_OF + t1);
+            break;
+        case Opcodes.ISTORE:
+        case Opcodes.FSTORE:
+        case Opcodes.ASTORE:
+            t1 = pop();
+            set(arg, t1);
+            if (arg > 0) {
+                t2 = get(arg - 1);
+                // if t2 is of kind STACK or LOCAL we cannot know its size!
+                if (t2 == LONG || t2 == DOUBLE) {
+                    set(arg - 1, TOP);
+                } else if ((t2 & KIND) != BASE) {
+                    set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
                 }
-                break;
-            case Opcodes.LSTORE:
-            case Opcodes.DSTORE:
-                pop(1);
-                t1 = pop();
-                set(arg, t1);
-                set(arg + 1, TOP);
-                if (arg > 0) {
-                    t2 = get(arg - 1);
-                    // if t2 is of kind STACK or LOCAL we cannot know its size!
-                    if (t2 == LONG || t2 == DOUBLE) {
-                        set(arg - 1, TOP);
-                    } else if ((t2 & KIND) != BASE) {
-                        set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
-                    }
+            }
+            break;
+        case Opcodes.LSTORE:
+        case Opcodes.DSTORE:
+            pop(1);
+            t1 = pop();
+            set(arg, t1);
+            set(arg + 1, TOP);
+            if (arg > 0) {
+                t2 = get(arg - 1);
+                // if t2 is of kind STACK or LOCAL we cannot know its size!
+                if (t2 == LONG || t2 == DOUBLE) {
+                    set(arg - 1, TOP);
+                } else if ((t2 & KIND) != BASE) {
+                    set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
                 }
-                break;
-            case Opcodes.IASTORE:
-            case Opcodes.BASTORE:
-            case Opcodes.CASTORE:
-            case Opcodes.SASTORE:
-            case Opcodes.FASTORE:
-            case Opcodes.AASTORE:
-                pop(3);
-                break;
-            case Opcodes.LASTORE:
-            case Opcodes.DASTORE:
-                pop(4);
-                break;
-            case Opcodes.POP:
-            case Opcodes.IFEQ:
-            case Opcodes.IFNE:
-            case Opcodes.IFLT:
-            case Opcodes.IFGE:
-            case Opcodes.IFGT:
-            case Opcodes.IFLE:
-            case Opcodes.IRETURN:
-            case Opcodes.FRETURN:
-            case Opcodes.ARETURN:
-            case Opcodes.TABLESWITCH:
-            case Opcodes.LOOKUPSWITCH:
-            case Opcodes.ATHROW:
-            case Opcodes.MONITORENTER:
-            case Opcodes.MONITOREXIT:
-            case Opcodes.IFNULL:
-            case Opcodes.IFNONNULL:
-                pop(1);
-                break;
-            case Opcodes.POP2:
-            case Opcodes.IF_ICMPEQ:
-            case Opcodes.IF_ICMPNE:
-            case Opcodes.IF_ICMPLT:
-            case Opcodes.IF_ICMPGE:
-            case Opcodes.IF_ICMPGT:
-            case Opcodes.IF_ICMPLE:
-            case Opcodes.IF_ACMPEQ:
-            case Opcodes.IF_ACMPNE:
-            case Opcodes.LRETURN:
-            case Opcodes.DRETURN:
-                pop(2);
-                break;
-            case Opcodes.DUP:
-                t1 = pop();
-                push(t1);
-                push(t1);
-                break;
-            case Opcodes.DUP_X1:
-                t1 = pop();
-                t2 = pop();
-                push(t1);
-                push(t2);
-                push(t1);
-                break;
-            case Opcodes.DUP_X2:
-                t1 = pop();
-                t2 = pop();
-                t3 = pop();
-                push(t1);
-                push(t3);
-                push(t2);
-                push(t1);
-                break;
-            case Opcodes.DUP2:
-                t1 = pop();
-                t2 = pop();
-                push(t2);
-                push(t1);
-                push(t2);
-                push(t1);
-                break;
-            case Opcodes.DUP2_X1:
-                t1 = pop();
-                t2 = pop();
-                t3 = pop();
-                push(t2);
-                push(t1);
-                push(t3);
-                push(t2);
-                push(t1);
-                break;
-            case Opcodes.DUP2_X2:
-                t1 = pop();
-                t2 = pop();
-                t3 = pop();
-                t4 = pop();
-                push(t2);
-                push(t1);
-                push(t4);
-                push(t3);
-                push(t2);
-                push(t1);
-                break;
-            case Opcodes.SWAP:
+            }
+            break;
+        case Opcodes.IASTORE:
+        case Opcodes.BASTORE:
+        case Opcodes.CASTORE:
+        case Opcodes.SASTORE:
+        case Opcodes.FASTORE:
+        case Opcodes.AASTORE:
+            pop(3);
+            break;
+        case Opcodes.LASTORE:
+        case Opcodes.DASTORE:
+            pop(4);
+            break;
+        case Opcodes.POP:
+        case Opcodes.IFEQ:
+        case Opcodes.IFNE:
+        case Opcodes.IFLT:
+        case Opcodes.IFGE:
+        case Opcodes.IFGT:
+        case Opcodes.IFLE:
+        case Opcodes.IRETURN:
+        case Opcodes.FRETURN:
+        case Opcodes.ARETURN:
+        case Opcodes.TABLESWITCH:
+        case Opcodes.LOOKUPSWITCH:
+        case Opcodes.ATHROW:
+        case Opcodes.MONITORENTER:
+        case Opcodes.MONITOREXIT:
+        case Opcodes.IFNULL:
+        case Opcodes.IFNONNULL:
+            pop(1);
+            break;
+        case Opcodes.POP2:
+        case Opcodes.IF_ICMPEQ:
+        case Opcodes.IF_ICMPNE:
+        case Opcodes.IF_ICMPLT:
+        case Opcodes.IF_ICMPGE:
+        case Opcodes.IF_ICMPGT:
+        case Opcodes.IF_ICMPLE:
+        case Opcodes.IF_ACMPEQ:
+        case Opcodes.IF_ACMPNE:
+        case Opcodes.LRETURN:
+        case Opcodes.DRETURN:
+            pop(2);
+            break;
+        case Opcodes.DUP:
+            t1 = pop();
+            push(t1);
+            push(t1);
+            break;
+        case Opcodes.DUP_X1:
+            t1 = pop();
+            t2 = pop();
+            push(t1);
+            push(t2);
+            push(t1);
+            break;
+        case Opcodes.DUP_X2:
+            t1 = pop();
+            t2 = pop();
+            t3 = pop();
+            push(t1);
+            push(t3);
+            push(t2);
+            push(t1);
+            break;
+        case Opcodes.DUP2:
+            t1 = pop();
+            t2 = pop();
+            push(t2);
+            push(t1);
+            push(t2);
+            push(t1);
+            break;
+        case Opcodes.DUP2_X1:
+            t1 = pop();
+            t2 = pop();
+            t3 = pop();
+            push(t2);
+            push(t1);
+            push(t3);
+            push(t2);
+            push(t1);
+            break;
+        case Opcodes.DUP2_X2:
+            t1 = pop();
+            t2 = pop();
+            t3 = pop();
+            t4 = pop();
+            push(t2);
+            push(t1);
+            push(t4);
+            push(t3);
+            push(t2);
+            push(t1);
+            break;
+        case Opcodes.SWAP:
+            t1 = pop();
+            t2 = pop();
+            push(t1);
+            push(t2);
+            break;
+        case Opcodes.IADD:
+        case Opcodes.ISUB:
+        case Opcodes.IMUL:
+        case Opcodes.IDIV:
+        case Opcodes.IREM:
+        case Opcodes.IAND:
+        case Opcodes.IOR:
+        case Opcodes.IXOR:
+        case Opcodes.ISHL:
+        case Opcodes.ISHR:
+        case Opcodes.IUSHR:
+        case Opcodes.L2I:
+        case Opcodes.D2I:
+        case Opcodes.FCMPL:
+        case Opcodes.FCMPG:
+            pop(2);
+            push(INTEGER);
+            break;
+        case Opcodes.LADD:
+        case Opcodes.LSUB:
+        case Opcodes.LMUL:
+        case Opcodes.LDIV:
+        case Opcodes.LREM:
+        case Opcodes.LAND:
+        case Opcodes.LOR:
+        case Opcodes.LXOR:
+            pop(4);
+            push(LONG);
+            push(TOP);
+            break;
+        case Opcodes.FADD:
+        case Opcodes.FSUB:
+        case Opcodes.FMUL:
+        case Opcodes.FDIV:
+        case Opcodes.FREM:
+        case Opcodes.L2F:
+        case Opcodes.D2F:
+            pop(2);
+            push(FLOAT);
+            break;
+        case Opcodes.DADD:
+        case Opcodes.DSUB:
+        case Opcodes.DMUL:
+        case Opcodes.DDIV:
+        case Opcodes.DREM:
+            pop(4);
+            push(DOUBLE);
+            push(TOP);
+            break;
+        case Opcodes.LSHL:
+        case Opcodes.LSHR:
+        case Opcodes.LUSHR:
+            pop(3);
+            push(LONG);
+            push(TOP);
+            break;
+        case Opcodes.IINC:
+            set(arg, INTEGER);
+            break;
+        case Opcodes.I2L:
+        case Opcodes.F2L:
+            pop(1);
+            push(LONG);
+            push(TOP);
+            break;
+        case Opcodes.I2F:
+            pop(1);
+            push(FLOAT);
+            break;
+        case Opcodes.I2D:
+        case Opcodes.F2D:
+            pop(1);
+            push(DOUBLE);
+            push(TOP);
+            break;
+        case Opcodes.F2I:
+        case Opcodes.ARRAYLENGTH:
+        case Opcodes.INSTANCEOF:
+            pop(1);
+            push(INTEGER);
+            break;
+        case Opcodes.LCMP:
+        case Opcodes.DCMPL:
+        case Opcodes.DCMPG:
+            pop(4);
+            push(INTEGER);
+            break;
+        case Opcodes.JSR:
+        case Opcodes.RET:
+            throw new RuntimeException(
+                    "JSR/RET are not supported with computeFrames option");
+        case Opcodes.GETSTATIC:
+            push(cw, item.strVal3);
+            break;
+        case Opcodes.PUTSTATIC:
+            pop(item.strVal3);
+            break;
+        case Opcodes.GETFIELD:
+            pop(1);
+            push(cw, item.strVal3);
+            break;
+        case Opcodes.PUTFIELD:
+            pop(item.strVal3);
+            pop();
+            break;
+        case Opcodes.INVOKEVIRTUAL:
+        case Opcodes.INVOKESPECIAL:
+        case Opcodes.INVOKESTATIC:
+        case Opcodes.INVOKEINTERFACE:
+            pop(item.strVal3);
+            if (opcode != Opcodes.INVOKESTATIC) {
                 t1 = pop();
-                t2 = pop();
-                push(t1);
-                push(t2);
-                break;
-            case Opcodes.IADD:
-            case Opcodes.ISUB:
-            case Opcodes.IMUL:
-            case Opcodes.IDIV:
-            case Opcodes.IREM:
-            case Opcodes.IAND:
-            case Opcodes.IOR:
-            case Opcodes.IXOR:
-            case Opcodes.ISHL:
-            case Opcodes.ISHR:
-            case Opcodes.IUSHR:
-            case Opcodes.L2I:
-            case Opcodes.D2I:
-            case Opcodes.FCMPL:
-            case Opcodes.FCMPG:
-                pop(2);
-                push(INTEGER);
-                break;
-            case Opcodes.LADD:
-            case Opcodes.LSUB:
-            case Opcodes.LMUL:
-            case Opcodes.LDIV:
-            case Opcodes.LREM:
-            case Opcodes.LAND:
-            case Opcodes.LOR:
-            case Opcodes.LXOR:
-                pop(4);
-                push(LONG);
-                push(TOP);
-                break;
-            case Opcodes.FADD:
-            case Opcodes.FSUB:
-            case Opcodes.FMUL:
-            case Opcodes.FDIV:
-            case Opcodes.FREM:
-            case Opcodes.L2F:
-            case Opcodes.D2F:
-                pop(2);
-                push(FLOAT);
-                break;
-            case Opcodes.DADD:
-            case Opcodes.DSUB:
-            case Opcodes.DMUL:
-            case Opcodes.DDIV:
-            case Opcodes.DREM:
-                pop(4);
-                push(DOUBLE);
-                push(TOP);
-                break;
-            case Opcodes.LSHL:
-            case Opcodes.LSHR:
-            case Opcodes.LUSHR:
-                pop(3);
-                push(LONG);
-                push(TOP);
-                break;
-            case Opcodes.IINC:
-                set(arg, INTEGER);
-                break;
-            case Opcodes.I2L:
-            case Opcodes.F2L:
-                pop(1);
-                push(LONG);
-                push(TOP);
-                break;
-            case Opcodes.I2F:
-                pop(1);
-                push(FLOAT);
-                break;
-            case Opcodes.I2D:
-            case Opcodes.F2D:
-                pop(1);
-                push(DOUBLE);
-                push(TOP);
-                break;
-            case Opcodes.F2I:
-            case Opcodes.ARRAYLENGTH:
-            case Opcodes.INSTANCEOF:
-                pop(1);
-                push(INTEGER);
-                break;
-            case Opcodes.LCMP:
-            case Opcodes.DCMPL:
-            case Opcodes.DCMPG:
-                pop(4);
-                push(INTEGER);
-                break;
-            case Opcodes.JSR:
-            case Opcodes.RET:
-                throw new RuntimeException("JSR/RET are not supported with computeFrames option");
-            case Opcodes.GETSTATIC:
-                push(cw, item.strVal3);
-                break;
-            case Opcodes.PUTSTATIC:
-                pop(item.strVal3);
-                break;
-            case Opcodes.GETFIELD:
-                pop(1);
-                push(cw, item.strVal3);
-                break;
-            case Opcodes.PUTFIELD:
-                pop(item.strVal3);
-                pop();
-                break;
-            case Opcodes.INVOKEVIRTUAL:
-            case Opcodes.INVOKESPECIAL:
-            case Opcodes.INVOKESTATIC:
-            case Opcodes.INVOKEINTERFACE:
-                pop(item.strVal3);
-                if (opcode != Opcodes.INVOKESTATIC) {
-                    t1 = pop();
-                    if (opcode == Opcodes.INVOKESPECIAL
-                            && item.strVal2.charAt(0) == '<')
-                    {
-                        init(t1);
-                    }
+                if (opcode == Opcodes.INVOKESPECIAL
+                        && item.strVal2.charAt(0) == '<') {
+                    init(t1);
                 }
-                push(cw, item.strVal3);
+            }
+            push(cw, item.strVal3);
+            break;
+        case Opcodes.INVOKEDYNAMIC:
+            pop(item.strVal2);
+            push(cw, item.strVal2);
+            break;
+        case Opcodes.NEW:
+            push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+            break;
+        case Opcodes.NEWARRAY:
+            pop();
+            switch (arg) {
+            case Opcodes.T_BOOLEAN:
+                push(ARRAY_OF | BOOLEAN);
                 break;
-            case Opcodes.INVOKEDYNAMIC:
-                pop(item.strVal2);
-                push(cw, item.strVal2);
+            case Opcodes.T_CHAR:
+                push(ARRAY_OF | CHAR);
                 break;
-            case Opcodes.NEW:
-                push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+            case Opcodes.T_BYTE:
+                push(ARRAY_OF | BYTE);
                 break;
-            case Opcodes.NEWARRAY:
-                pop();
-                switch (arg) {
-                    case Opcodes.T_BOOLEAN:
-                        push(ARRAY_OF | BOOLEAN);
-                        break;
-                    case Opcodes.T_CHAR:
-                        push(ARRAY_OF | CHAR);
-                        break;
-                    case Opcodes.T_BYTE:
-                        push(ARRAY_OF | BYTE);
-                        break;
-                    case Opcodes.T_SHORT:
-                        push(ARRAY_OF | SHORT);
-                        break;
-                    case Opcodes.T_INT:
-                        push(ARRAY_OF | INTEGER);
-                        break;
-                    case Opcodes.T_FLOAT:
-                        push(ARRAY_OF | FLOAT);
-                        break;
-                    case Opcodes.T_DOUBLE:
-                        push(ARRAY_OF | DOUBLE);
-                        break;
-                    // case Opcodes.T_LONG:
-                    default:
-                        push(ARRAY_OF | LONG);
-                        break;
-                }
+            case Opcodes.T_SHORT:
+                push(ARRAY_OF | SHORT);
                 break;
-            case Opcodes.ANEWARRAY:
-                String s = item.strVal1;
-                pop();
-                if (s.charAt(0) == '[') {
-                    push(cw, '[' + s);
-                } else {
-                    push(ARRAY_OF | OBJECT | cw.addType(s));
-                }
+            case Opcodes.T_INT:
+                push(ARRAY_OF | INTEGER);
                 break;
-            case Opcodes.CHECKCAST:
-                s = item.strVal1;
-                pop();
-                if (s.charAt(0) == '[') {
-                    push(cw, s);
-                } else {
-                    push(OBJECT | cw.addType(s));
-                }
+            case Opcodes.T_FLOAT:
+                push(ARRAY_OF | FLOAT);
                 break;
-            // case Opcodes.MULTIANEWARRAY:
+            case Opcodes.T_DOUBLE:
+                push(ARRAY_OF | DOUBLE);
+                break;
+            // case Opcodes.T_LONG:
             default:
-                pop(arg);
-                push(cw, item.strVal1);
+                push(ARRAY_OF | LONG);
                 break;
+            }
+            break;
+        case Opcodes.ANEWARRAY:
+            String s = item.strVal1;
+            pop();
+            if (s.charAt(0) == '[') {
+                push(cw, '[' + s);
+            } else {
+                push(ARRAY_OF | OBJECT | cw.addType(s));
+            }
+            break;
+        case Opcodes.CHECKCAST:
+            s = item.strVal1;
+            pop();
+            if (s.charAt(0) == '[') {
+                push(cw, s);
+            } else {
+                push(OBJECT | cw.addType(s));
+            }
+            break;
+        // case Opcodes.MULTIANEWARRAY:
+        default:
+            pop(arg);
+            push(cw, item.strVal1);
+            break;
         }
     }
 
@@ -1260,10 +1273,13 @@ final class Frame {
      * frames of this basic block. Returns <tt>true</tt> if the input frame of
      * the given label has been changed by this operation.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param frame the basic block whose input frame must be updated.
-     * @param edge the kind of the {@link Edge} between this label and 'label'.
-     *        See {@link Edge#info}.
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param frame
+     *            the basic block whose input frame must be updated.
+     * @param edge
+     *            the kind of the {@link Edge} between this label and 'label'.
+     *            See {@link Edge#info}.
      * @return <tt>true</tt> if the input frame of the given label has been
      *         changed by this operation.
      */
@@ -1294,7 +1310,8 @@ final class Frame {
                         } else {
                             t = dim + inputStack[nStack - (s & VALUE)];
                         }
-                        if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+                        if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+                                && (t == LONG || t == DOUBLE)) {
                             t = TOP;
                         }
                     }
@@ -1346,7 +1363,8 @@ final class Frame {
                 } else {
                     t = dim + inputStack[nStack - (s & VALUE)];
                 }
-                if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+                if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+                        && (t == LONG || t == DOUBLE)) {
                     t = TOP;
                 }
             }
@@ -1363,19 +1381,19 @@ final class Frame {
      * type. Returns <tt>true</tt> if the type array has been modified by this
      * operation.
      *
-     * @param cw the ClassWriter to which this label belongs.
-     * @param t the type with which the type array element must be merged.
-     * @param types an array of types.
-     * @param index the index of the type that must be merged in 'types'.
+     * @param cw
+     *            the ClassWriter to which this label belongs.
+     * @param t
+     *            the type with which the type array element must be merged.
+     * @param types
+     *            an array of types.
+     * @param index
+     *            the index of the type that must be merged in 'types'.
      * @return <tt>true</tt> if the type array has been modified by this
      *         operation.
      */
-    private static boolean merge(
-        final ClassWriter cw,
-        int t,
-        final int[] types,
-        final int index)
-    {
+    private static boolean merge(final ClassWriter cw, int t,
+            final int[] types, final int index) {
         int u = types[index];
         if (u == t) {
             // if the types are equal, merge(u,t)=u, so there is no change
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
index be8f334..5dd06a5 100644
--- a/src/asm/scala/tools/asm/Handle.java
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -66,18 +66,23 @@ public final class Handle {
     /**
      * Constructs a new field or method handle.
      *
-     * @param tag the kind of field or method designated by this Handle. Must be
-     *        {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
-     *        {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
-     *        {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
-     *        {@link Opcodes#H_INVOKESPECIAL},
-     *        {@link Opcodes#H_NEWINVOKESPECIAL} or
-     *        {@link Opcodes#H_INVOKEINTERFACE}.
-     * @param owner the internal name of the field or method designed by this
-     *        handle.
-     * @param name the name of the field or method designated by this handle.
-     * @param desc the descriptor of the field or method designated by this
-     *        handle.
+     * @param tag
+     *            the kind of field or method designated by this Handle. Must be
+     *            {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+     *            {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+     *            {@link Opcodes#H_INVOKEVIRTUAL},
+     *            {@link Opcodes#H_INVOKESTATIC},
+     *            {@link Opcodes#H_INVOKESPECIAL},
+     *            {@link Opcodes#H_NEWINVOKESPECIAL} or
+     *            {@link Opcodes#H_INVOKEINTERFACE}.
+     * @param owner
+     *            the internal name of the field or method designed by this
+     *            handle.
+     * @param name
+     *            the name of the field or method designated by this handle.
+     * @param desc
+     *            the descriptor of the field or method designated by this
+     *            handle.
      */
     public Handle(int tag, String owner, String name, String desc) {
         this.tag = tag;
@@ -101,11 +106,9 @@ public final class Handle {
     }
 
     /**
-     * Returns the internal name of the field or method designed by this
-     * handle.
+     * Returns the internal name of the field or method designed by this handle.
      *
-     * @return the internal name of the field or method designed by this
-     *         handle.
+     * @return the internal name of the field or method designed by this handle.
      */
     public String getOwner() {
         return owner;
@@ -138,8 +141,8 @@ public final class Handle {
             return false;
         }
         Handle h = (Handle) obj;
-        return tag == h.tag && owner.equals(h.owner)
-                && name.equals(h.name) && desc.equals(h.desc);
+        return tag == h.tag && owner.equals(h.owner) && name.equals(h.name)
+                && desc.equals(h.desc);
     }
 
     @Override
@@ -149,8 +152,13 @@ public final class Handle {
 
     /**
      * Returns the textual representation of this handle. The textual
-     * representation is: <pre>owner '.' name desc ' ' '(' tag ')'</pre>. As
-     * this format is unambiguous, it can be parsed if necessary.
+     * representation is:
+     *
+     * <pre>
+     * owner '.' name desc ' ' '(' tag ')'
+     * </pre>
+     *
+     * . As this format is unambiguous, it can be parsed if necessary.
      */
     @Override
     public String toString() {
diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
index 9e92bb9..a06cb81 100644
--- a/src/asm/scala/tools/asm/Handler.java
+++ b/src/asm/scala/tools/asm/Handler.java
@@ -72,9 +72,12 @@ class Handler {
      * Removes the range between start and end from the given exception
      * handlers.
      *
-     * @param h an exception handler list.
-     * @param start the start of the range to be removed.
-     * @param end the end of the range to be removed. Maybe null.
+     * @param h
+     *            an exception handler list.
+     * @param start
+     *            the start of the range to be removed.
+     * @param end
+     *            the end of the range to be removed. Maybe null.
      * @return the exception handler list with the start-end range removed.
      */
     static Handler remove(Handler h, Label start, Label end) {
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
index 021a0b1..94195a1 100644
--- a/src/asm/scala/tools/asm/Item.java
+++ b/src/asm/scala/tools/asm/Item.java
@@ -53,8 +53,8 @@ final class Item {
      * {@link ClassWriter#METH}, {@link ClassWriter#IMETH},
      * {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}.
      *
-     * MethodHandle constant 9 variations are stored using a range
-     * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to
+     * MethodHandle constant 9 variations are stored using a range of 9 values
+     * from {@link ClassWriter#HANDLE_BASE} + 1 to
      * {@link ClassWriter#HANDLE_BASE} + 9.
      *
      * Special Item types are used for Items that are stored in the ClassWriter
@@ -115,7 +115,8 @@ final class Item {
      * Constructs an uninitialized {@link Item} for constant pool element at
      * given position.
      *
-     * @param index index of the item to be constructed.
+     * @param index
+     *            index of the item to be constructed.
      */
     Item(final int index) {
         this.index = index;
@@ -124,8 +125,10 @@ final class Item {
     /**
      * Constructs a copy of the given item.
      *
-     * @param index index of the item to be constructed.
-     * @param i the item that must be copied into the item to be constructed.
+     * @param index
+     *            index of the item to be constructed.
+     * @param i
+     *            the item that must be copied into the item to be constructed.
      */
     Item(final int index, final Item i) {
         this.index = index;
@@ -141,7 +144,8 @@ final class Item {
     /**
      * Sets this item to an integer item.
      *
-     * @param intVal the value of this item.
+     * @param intVal
+     *            the value of this item.
      */
     void set(final int intVal) {
         this.type = ClassWriter.INT;
@@ -152,7 +156,8 @@ final class Item {
     /**
      * Sets this item to a long item.
      *
-     * @param longVal the value of this item.
+     * @param longVal
+     *            the value of this item.
      */
     void set(final long longVal) {
         this.type = ClassWriter.LONG;
@@ -163,7 +168,8 @@ final class Item {
     /**
      * Sets this item to a float item.
      *
-     * @param floatVal the value of this item.
+     * @param floatVal
+     *            the value of this item.
      */
     void set(final float floatVal) {
         this.type = ClassWriter.FLOAT;
@@ -174,7 +180,8 @@ final class Item {
     /**
      * Sets this item to a double item.
      *
-     * @param doubleVal the value of this item.
+     * @param doubleVal
+     *            the value of this item.
      */
     void set(final double doubleVal) {
         this.type = ClassWriter.DOUBLE;
@@ -185,49 +192,53 @@ final class Item {
     /**
      * Sets this item to an item that do not hold a primitive value.
      *
-     * @param type the type of this item.
-     * @param strVal1 first part of the value of this item.
-     * @param strVal2 second part of the value of this item.
-     * @param strVal3 third part of the value of this item.
+     * @param type
+     *            the type of this item.
+     * @param strVal1
+     *            first part of the value of this item.
+     * @param strVal2
+     *            second part of the value of this item.
+     * @param strVal3
+     *            third part of the value of this item.
      */
-    void set(
-        final int type,
-        final String strVal1,
-        final String strVal2,
-        final String strVal3)
-    {
+    void set(final int type, final String strVal1, final String strVal2,
+            final String strVal3) {
         this.type = type;
         this.strVal1 = strVal1;
         this.strVal2 = strVal2;
         this.strVal3 = strVal3;
         switch (type) {
-            case ClassWriter.UTF8:
-            case ClassWriter.STR:
-            case ClassWriter.CLASS:
-            case ClassWriter.MTYPE:
-            case ClassWriter.TYPE_NORMAL:
-                hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
-                return;
-            case ClassWriter.NAME_TYPE:
-                hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
-                        * strVal2.hashCode());
-                return;
-                // ClassWriter.FIELD:
-                // ClassWriter.METH:
-                // ClassWriter.IMETH:
-                // ClassWriter.HANDLE_BASE + 1..9
-            default:
-                hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
-                        * strVal2.hashCode() * strVal3.hashCode());
+        case ClassWriter.UTF8:
+        case ClassWriter.STR:
+        case ClassWriter.CLASS:
+        case ClassWriter.MTYPE:
+        case ClassWriter.TYPE_NORMAL:
+            hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
+            return;
+        case ClassWriter.NAME_TYPE: {
+            hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+                    * strVal2.hashCode());
+            return;
+        }
+        // ClassWriter.FIELD:
+        // ClassWriter.METH:
+        // ClassWriter.IMETH:
+        // ClassWriter.HANDLE_BASE + 1..9
+        default:
+            hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+                    * strVal2.hashCode() * strVal3.hashCode());
         }
     }
 
     /**
      * Sets the item to an InvokeDynamic item.
      *
-     * @param name invokedynamic's name.
-     * @param desc invokedynamic's desc.
-     * @param bsmIndex zero based index into the class attribute BootrapMethods.
+     * @param name
+     *            invokedynamic's name.
+     * @param desc
+     *            invokedynamic's desc.
+     * @param bsmIndex
+     *            zero based index into the class attribute BootrapMethods.
      */
     void set(String name, String desc, int bsmIndex) {
         this.type = ClassWriter.INDY;
@@ -241,10 +252,12 @@ final class Item {
     /**
      * Sets the item to a BootstrapMethod item.
      *
-     * @param position position in byte in the class attribute BootrapMethods.
-     * @param hashCode hashcode of the item. This hashcode is processed from
-     *        the hashcode of the bootstrap method and the hashcode of
-     *        all bootstrap arguments.
+     * @param position
+     *            position in byte in the class attribute BootrapMethods.
+     * @param hashCode
+     *            hashcode of the item. This hashcode is processed from the
+     *            hashcode of the bootstrap method and the hashcode of all
+     *            bootstrap arguments.
      */
     void set(int position, int hashCode) {
         this.type = ClassWriter.BSM;
@@ -256,41 +269,42 @@ final class Item {
      * Indicates if the given item is equal to this one. <i>This method assumes
      * that the two items have the same {@link #type}</i>.
      *
-     * @param i the item to be compared to this one. Both items must have the
-     *       same {@link #type}.
+     * @param i
+     *            the item to be compared to this one. Both items must have the
+     *            same {@link #type}.
      * @return <tt>true</tt> if the given item if equal to this one,
      *         <tt>false</tt> otherwise.
      */
     boolean isEqualTo(final Item i) {
         switch (type) {
-            case ClassWriter.UTF8:
-            case ClassWriter.STR:
-            case ClassWriter.CLASS:
-            case ClassWriter.MTYPE:
-            case ClassWriter.TYPE_NORMAL:
-                return i.strVal1.equals(strVal1);
-            case ClassWriter.TYPE_MERGED:
-            case ClassWriter.LONG:
-            case ClassWriter.DOUBLE:
-                return i.longVal == longVal;
-            case ClassWriter.INT:
-            case ClassWriter.FLOAT:
-                return i.intVal == intVal;
-            case ClassWriter.TYPE_UNINIT:
-                return i.intVal == intVal && i.strVal1.equals(strVal1);
-            case ClassWriter.NAME_TYPE:
-                return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
-            case ClassWriter.INDY:
-                return i.longVal == longVal && i.strVal1.equals(strVal1)
-                        && i.strVal2.equals(strVal2);
-
-            // case ClassWriter.FIELD:
-            // case ClassWriter.METH:
-            // case ClassWriter.IMETH:
-            // case ClassWriter.HANDLE_BASE + 1..9
-            default:
-                return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
-                        && i.strVal3.equals(strVal3);
+        case ClassWriter.UTF8:
+        case ClassWriter.STR:
+        case ClassWriter.CLASS:
+        case ClassWriter.MTYPE:
+        case ClassWriter.TYPE_NORMAL:
+            return i.strVal1.equals(strVal1);
+        case ClassWriter.TYPE_MERGED:
+        case ClassWriter.LONG:
+        case ClassWriter.DOUBLE:
+            return i.longVal == longVal;
+        case ClassWriter.INT:
+        case ClassWriter.FLOAT:
+            return i.intVal == intVal;
+        case ClassWriter.TYPE_UNINIT:
+            return i.intVal == intVal && i.strVal1.equals(strVal1);
+        case ClassWriter.NAME_TYPE:
+            return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
+        case ClassWriter.INDY: {
+            return i.longVal == longVal && i.strVal1.equals(strVal1)
+                    && i.strVal2.equals(strVal2);
+        }
+        // case ClassWriter.FIELD:
+        // case ClassWriter.METH:
+        // case ClassWriter.IMETH:
+        // case ClassWriter.HANDLE_BASE + 1..9
+        default:
+            return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
+                    && i.strVal3.equals(strVal3);
         }
     }
 
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 712c7f2..5d5529c 100644
--- a/src/asm/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
@@ -32,9 +32,9 @@ package scala.tools.asm;
 /**
  * A label represents a position in the bytecode of a method. Labels are used
  * for jump, goto, and switch instructions, and for try catch blocks. A label
- * designates the <i>instruction</i> that is just after. Note however that
- * there can be other elements between a label and the instruction it
- * designates (such as other labels, stack map frames, line numbers, etc.).
+ * designates the <i>instruction</i> that is just after. Note however that there
+ * can be other elements between a label and the instruction it designates (such
+ * as other labels, stack map frames, line numbers, etc.).
  *
  * @author Eric Bruneton
  */
@@ -110,8 +110,8 @@ public class Label {
     /**
      * Field used to associate user information to a label. Warning: this field
      * is used by the ASM tree package. In order to use it with the ASM tree
-     * package you must override the {@link
-     * org.objectweb.asm.tree.MethodNode#getLabelNode} method.
+     * package you must override the
+     * {@link scala.tools.asm.tree.MethodNode#getLabelNode} method.
      */
     public Object info;
 
@@ -154,7 +154,7 @@ public class Label {
      * indicates if this reference uses 2 or 4 bytes, and its absolute value
      * gives the position of the bytecode instruction. This array is also used
      * as a bitset to store the subroutines to which a basic block belongs. This
-     * information is needed in {@linked  MethodWriter#visitMaxs}, after all
+     * information is needed in {@linked MethodWriter#visitMaxs}, after all
      * forward references have been resolved. Hence the same array can be used
      * for both purposes without problems.
      */
@@ -177,11 +177,11 @@ public class Label {
      * state of the local variables and the operand stack at the end of each
      * basic block, called the "output frame", <i>relatively</i> to the frame
      * state at the beginning of the basic block, which is called the "input
-     * frame", and which is <i>unknown</i> during this step. The second step,
-     * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
-     * computes information about the input frame of each basic block, from the
-     * input state of the first basic block (known from the method signature),
-     * and by the using the previously computed relative output frames.
+     * frame", and which is <i>unknown</i> during this step. The second step, in
+     * {@link MethodWriter#visitMaxs}, is a fix point algorithm that computes
+     * information about the input frame of each basic block, from the input
+     * state of the first basic block (known from the method signature), and by
+     * the using the previously computed relative output frames.
      *
      * The algorithm used to compute the maximum stack size only computes the
      * relative output and absolute input stack heights, while the algorithm
@@ -266,11 +266,13 @@ public class Label {
      * generators or adapters.</i>
      *
      * @return the offset corresponding to this label.
-     * @throws IllegalStateException if this label is not resolved yet.
+     * @throws IllegalStateException
+     *             if this label is not resolved yet.
      */
     public int getOffset() {
         if ((status & RESOLVED) == 0) {
-            throw new IllegalStateException("Label offset position has not been resolved yet");
+            throw new IllegalStateException(
+                    "Label offset position has not been resolved yet");
         }
         return position;
     }
@@ -281,21 +283,21 @@ public class Label {
      * directly. Otherwise, a null offset is written and a new forward reference
      * is declared for this label.
      *
-     * @param owner the code writer that calls this method.
-     * @param out the bytecode of the method.
-     * @param source the position of first byte of the bytecode instruction that
-     *        contains this label.
-     * @param wideOffset <tt>true</tt> if the reference must be stored in 4
-     *        bytes, or <tt>false</tt> if it must be stored with 2 bytes.
-     * @throws IllegalArgumentException if this label has not been created by
-     *         the given code writer.
-     */
-    void put(
-        final MethodWriter owner,
-        final ByteVector out,
-        final int source,
-        final boolean wideOffset)
-    {
+     * @param owner
+     *            the code writer that calls this method.
+     * @param out
+     *            the bytecode of the method.
+     * @param source
+     *            the position of first byte of the bytecode instruction that
+     *            contains this label.
+     * @param wideOffset
+     *            <tt>true</tt> if the reference must be stored in 4 bytes, or
+     *            <tt>false</tt> if it must be stored with 2 bytes.
+     * @throws IllegalArgumentException
+     *             if this label has not been created by the given code writer.
+     */
+    void put(final MethodWriter owner, final ByteVector out, final int source,
+            final boolean wideOffset) {
         if ((status & RESOLVED) == 0) {
             if (wideOffset) {
                 addReference(-1 - source, out.length);
@@ -319,25 +321,21 @@ public class Label {
      * yet. For backward references, the offset of the reference can be, and
      * must be, computed and stored directly.
      *
-     * @param sourcePosition the position of the referencing instruction. This
-     *        position will be used to compute the offset of this forward
-     *        reference.
-     * @param referencePosition the position where the offset for this forward
-     *        reference must be stored.
-     */
-    private void addReference(
-        final int sourcePosition,
-        final int referencePosition)
-    {
+     * @param sourcePosition
+     *            the position of the referencing instruction. This position
+     *            will be used to compute the offset of this forward reference.
+     * @param referencePosition
+     *            the position where the offset for this forward reference must
+     *            be stored.
+     */
+    private void addReference(final int sourcePosition,
+            final int referencePosition) {
         if (srcAndRefPositions == null) {
             srcAndRefPositions = new int[6];
         }
         if (referenceCount >= srcAndRefPositions.length) {
             int[] a = new int[srcAndRefPositions.length + 6];
-            System.arraycopy(srcAndRefPositions,
-                    0,
-                    a,
-                    0,
+            System.arraycopy(srcAndRefPositions, 0, a, 0,
                     srcAndRefPositions.length);
             srcAndRefPositions = a;
         }
@@ -351,9 +349,12 @@ public class Label {
      * position becomes known. This method fills in the blanks that where left
      * in the bytecode by each forward reference previously added to this label.
      *
-     * @param owner the code writer that calls this method.
-     * @param position the position of this label in the bytecode.
-     * @param data the bytecode of the method.
+     * @param owner
+     *            the code writer that calls this method.
+     * @param position
+     *            the position of this label in the bytecode.
+     * @param data
+     *            the bytecode of the method.
      * @return <tt>true</tt> if a blank that was left for this label was to
      *         small to store the offset. In such a case the corresponding jump
      *         instruction is replaced with a pseudo instruction (using unused
@@ -361,14 +362,12 @@ public class Label {
      *         instructions will need to be replaced with true instructions with
      *         wider offsets (4 bytes instead of 2). This is done in
      *         {@link MethodWriter#resizeInstructions}.
-     * @throws IllegalArgumentException if this label has already been resolved,
-     *         or if it has not been created by the given code writer.
-     */
-    boolean resolve(
-        final MethodWriter owner,
-        final int position,
-        final byte[] data)
-    {
+     * @throws IllegalArgumentException
+     *             if this label has already been resolved, or if it has not
+     *             been created by the given code writer.
+     */
+    boolean resolve(final MethodWriter owner, final int position,
+            final byte[] data) {
         boolean needUpdate = false;
         this.status |= RESOLVED;
         this.position = position;
@@ -431,7 +430,8 @@ public class Label {
     /**
      * Returns true is this basic block belongs to the given subroutine.
      *
-     * @param id a subroutine id.
+     * @param id
+     *            a subroutine id.
      * @return true is this basic block belongs to the given subroutine.
      */
     boolean inSubroutine(final long id) {
@@ -445,7 +445,8 @@ public class Label {
      * Returns true if this basic block and the given one belong to a common
      * subroutine.
      *
-     * @param block another basic block.
+     * @param block
+     *            another basic block.
      * @return true if this basic block and the given one belong to a common
      *         subroutine.
      */
@@ -464,8 +465,10 @@ public class Label {
     /**
      * Marks this basic block as belonging to the given subroutine.
      *
-     * @param id a subroutine id.
-     * @param nbSubroutines the total number of subroutines in the method.
+     * @param id
+     *            a subroutine id.
+     * @param nbSubroutines
+     *            the total number of subroutines in the method.
      */
     void addToSubroutine(final long id, final int nbSubroutines) {
         if ((status & VISITED) == 0) {
@@ -481,14 +484,16 @@ public class Label {
      * flow graph to find all the blocks that are reachable from the current
      * block WITHOUT following any JSR target.
      *
-     * @param JSR a JSR block that jumps to this subroutine. If this JSR is not
-     *        null it is added to the successor of the RET blocks found in the
-     *        subroutine.
-     * @param id the id of this subroutine.
-     * @param nbSubroutines the total number of subroutines in the method.
-     */
-    void visitSubroutine(final Label JSR, final long id, final int nbSubroutines)
-    {
+     * @param JSR
+     *            a JSR block that jumps to this subroutine. If this JSR is not
+     *            null it is added to the successor of the RET blocks found in
+     *            the subroutine.
+     * @param id
+     *            the id of this subroutine.
+     * @param nbSubroutines
+     *            the total number of subroutines in the method.
+     */
+    void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) {
         // user managed stack of labels, to avoid using a recursive method
         // (recursivity can lead to stack overflow with very large methods)
         Label stack = this;
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
index a8a859a..e43ca97 100644
--- a/src/asm/scala/tools/asm/MethodVisitor.java
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -30,19 +30,19 @@
 package scala.tools.asm;
 
 /**
- * A visitor to visit a Java method. The methods of this class must be
- * called in the following order: [ <tt>visitAnnotationDefault</tt> ] (
+ * A visitor to visit a Java method. The methods of this class must be called in
+ * the following order: [ <tt>visitAnnotationDefault</tt> ] (
  * <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
  * <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
- * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> | <tt>visitTryCatchBlock</tt> |
- * <tt>visitLocalVariable</tt> | <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ]
- * <tt>visitEnd</tt>. In addition, the <tt>visit</tt><i>X</i>Insn</tt>
- * and <tt>visitLabel</tt> methods must be called in the sequential order of
- * the bytecode instructions of the visited code, <tt>visitTryCatchBlock</tt>
- * must be called <i>before</i> the labels passed as arguments have been
- * visited, and the <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt>
- * methods must be called <i>after</i> the labels passed as arguments have been
- * visited.
+ * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> |
+ * <tt>visitTryCatchBlock</tt> | <tt>visitLocalVariable</tt> |
+ * <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In
+ * addition, the <tt>visit</tt><i>X</i>Insn</tt> and <tt>visitLabel</tt> methods
+ * must be called in the sequential order of the bytecode instructions of the
+ * visited code, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
+ * labels passed as arguments have been visited, and the
+ * <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt> methods must be
+ * called <i>after</i> the labels passed as arguments have been visited.
  *
  * @author Eric Bruneton
  */
@@ -63,8 +63,9 @@ public abstract class MethodVisitor {
     /**
      * Constructs a new {@link MethodVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public MethodVisitor(final int api) {
         this(api, null);
@@ -73,15 +74,17 @@ public abstract class MethodVisitor {
     /**
      * Constructs a new {@link MethodVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param mv the method visitor to which this visitor must delegate method
-     *        calls. May be null.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param mv
+     *            the method visitor to which this visitor must delegate method
+     *            calls. May be null.
      */
     public MethodVisitor(final int api, final MethodVisitor mv) {
-        /*if (api != Opcodes.ASM4) {
+        if (api != Opcodes.ASM4) {
             throw new IllegalArgumentException();
-        }*/
+        }
         this.api = api;
         this.mv = mv;
     }
@@ -94,8 +97,8 @@ public abstract class MethodVisitor {
      * Visits the default value of this annotation interface method.
      *
      * @return a visitor to the visit the actual default value of this
-     *         annotation interface method, or <tt>null</tt> if this visitor
-     *         is not interested in visiting this default value. The 'name'
+     *         annotation interface method, or <tt>null</tt> if this visitor is
+     *         not interested in visiting this default value. The 'name'
      *         parameters passed to the methods of this annotation visitor are
      *         ignored. Moreover, exacly one visit method must be called on this
      *         annotation visitor, followed by visitEnd.
@@ -110,8 +113,10 @@ public abstract class MethodVisitor {
     /**
      * Visits an annotation of this method.
      *
-     * @param desc the class descriptor of the annotation class.
-     * @param visible <tt>true</tt> if the annotation is visible at runtime.
+     * @param desc
+     *            the class descriptor of the annotation class.
+     * @param visible
+     *            <tt>true</tt> if the annotation is visible at runtime.
      * @return a visitor to visit the annotation values, or <tt>null</tt> if
      *         this visitor is not interested in visiting this annotation.
      */
@@ -125,17 +130,17 @@ public abstract class MethodVisitor {
     /**
      * Visits an annotation of a parameter this method.
      *
-     * @param parameter the parameter index.
-     * @param desc the class descriptor of the annotation class.
-     * @param visible <tt>true</tt> if the annotation is visible at runtime.
+     * @param parameter
+     *            the parameter index.
+     * @param desc
+     *            the class descriptor of the annotation class.
+     * @param visible
+     *            <tt>true</tt> if the annotation is visible at runtime.
      * @return a visitor to visit the annotation values, or <tt>null</tt> if
      *         this visitor is not interested in visiting this annotation.
      */
-    public AnnotationVisitor visitParameterAnnotation(
-        int parameter,
-        String desc,
-        boolean visible)
-    {
+    public AnnotationVisitor visitParameterAnnotation(int parameter,
+            String desc, boolean visible) {
         if (mv != null) {
             return mv.visitParameterAnnotation(parameter, desc, visible);
         }
@@ -145,7 +150,8 @@ public abstract class MethodVisitor {
     /**
      * Visits a non standard attribute of this method.
      *
-     * @param attr an attribute.
+     * @param attr
+     *            an attribute.
      */
     public void visitAttribute(Attribute attr) {
         if (mv != null) {
@@ -169,57 +175,74 @@ public abstract class MethodVisitor {
      * such as GOTO or THROW, that is the target of a jump instruction, or that
      * starts an exception handler block. The visited types must describe the
      * values of the local variables and of the operand stack elements <i>just
-     * before</i> <b>i</b> is executed. <br> <br> (*) this is mandatory only
-     * for classes whose version is greater than or equal to
-     * {@link Opcodes#V1_6 V1_6}. <br> <br> Packed frames are basically
-     * "deltas" from the state of the previous frame (very first frame is
-     * implicitly defined by the method's parameters and access flags): <ul>
+     * before</i> <b>i</b> is executed.<br>
+     * <br>
+     * (*) this is mandatory only for classes whose version is greater than or
+     * equal to {@link Opcodes#V1_6 V1_6}. <br>
+     * <br>
+     * The frames of a method must be given either in expanded form, or in
+     * compressed form (all frames must use the same format, i.e. you must not
+     * mix expanded and compressed frames within a single method):
+     * <ul>
+     * <li>In expanded form, all frames must have the F_NEW type.</li>
+     * <li>In compressed form, frames are basically "deltas" from the state of
+     * the previous frame:
+     * <ul>
      * <li>{@link Opcodes#F_SAME} representing frame with exactly the same
-     * locals as the previous frame and with the empty stack.</li> <li>{@link Opcodes#F_SAME1}
-     * representing frame with exactly the same locals as the previous frame and
-     * with single value on the stack (<code>nStack</code> is 1 and
-     * <code>stack[0]</code> contains value for the type of the stack item).</li>
+     * locals as the previous frame and with the empty stack.</li>
+     * <li>{@link Opcodes#F_SAME1} representing frame with exactly the same
+     * locals as the previous frame and with single value on the stack (
+     * <code>nStack</code> is 1 and <code>stack[0]</code> contains value for the
+     * type of the stack item).</li>
      * <li>{@link Opcodes#F_APPEND} representing frame with current locals are
      * the same as the locals in the previous frame, except that additional
      * locals are defined (<code>nLocal</code> is 1, 2 or 3 and
      * <code>local</code> elements contains values representing added types).</li>
-     * <li>{@link Opcodes#F_CHOP} representing frame with current locals are
-     * the same as the locals in the previous frame, except that the last 1-3
-     * locals are absent and with the empty stack (<code>nLocals</code> is 1,
-     * 2 or 3). </li> <li>{@link Opcodes#F_FULL} representing complete frame
-     * data.</li> </li> </ul>
+     * <li>{@link Opcodes#F_CHOP} representing frame with current locals are the
+     * same as the locals in the previous frame, except that the last 1-3 locals
+     * are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li>
+     * <li>{@link Opcodes#F_FULL} representing complete frame data.</li></li>
+     * </ul>
+     * </ul> <br>
+     * In both cases the first frame, corresponding to the method's parameters
+     * and access flags, is implicit and must not be visited. Also, it is
+     * illegal to visit two or more frames for the same code location (i.e., at
+     * least one instruction must be visited between two calls to visitFrame).
      *
-     * @param type the type of this stack map frame. Must be
-     *        {@link Opcodes#F_NEW} for expanded frames, or
-     *        {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
-     *        {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
-     *        {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed
-     *        frames.
-     * @param nLocal the number of local variables in the visited frame.
-     * @param local the local variable types in this frame. This array must not
-     *        be modified. Primitive types are represented by
-     *        {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
-     *        {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
-     *        {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
-     *        {@link Opcodes#UNINITIALIZED_THIS} (long and double are
-     *        represented by a single element). Reference types are represented
-     *        by String objects (representing internal names), and uninitialized
-     *        types by Label objects (this label designates the NEW instruction
-     *        that created this uninitialized value).
-     * @param nStack the number of operand stack elements in the visited frame.
-     * @param stack the operand stack types in this frame. This array must not
-     *        be modified. Its content has the same format as the "local" array.
-     * @throws IllegalStateException if a frame is visited just after another
-     *        one, without any instruction between the two (unless this frame
-     *        is a Opcodes#F_SAME frame, in which case it is silently ignored).
+     * @param type
+     *            the type of this stack map frame. Must be
+     *            {@link Opcodes#F_NEW} for expanded frames, or
+     *            {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+     *            {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+     *            {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for
+     *            compressed frames.
+     * @param nLocal
+     *            the number of local variables in the visited frame.
+     * @param local
+     *            the local variable types in this frame. This array must not be
+     *            modified. Primitive types are represented by
+     *            {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
+     *            {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
+     *            {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
+     *            {@link Opcodes#UNINITIALIZED_THIS} (long and double are
+     *            represented by a single element). Reference types are
+     *            represented by String objects (representing internal names),
+     *            and uninitialized types by Label objects (this label
+     *            designates the NEW instruction that created this uninitialized
+     *            value).
+     * @param nStack
+     *            the number of operand stack elements in the visited frame.
+     * @param stack
+     *            the operand stack types in this frame. This array must not be
+     *            modified. Its content has the same format as the "local"
+     *            array.
+     * @throws IllegalStateException
+     *             if a frame is visited just after another one, without any
+     *             instruction between the two (unless this frame is a
+     *             Opcodes#F_SAME frame, in which case it is silently ignored).
      */
-    public void visitFrame(
-        int type,
-        int nLocal,
-        Object[] local,
-        int nStack,
-        Object[] stack)
-    {
+    public void visitFrame(int type, int nLocal, Object[] local, int nStack,
+            Object[] stack) {
         if (mv != null) {
             mv.visitFrame(type, nLocal, local, nStack, stack);
         }
@@ -232,20 +255,22 @@ public abstract class MethodVisitor {
     /**
      * Visits a zero operand instruction.
      *
-     * @param opcode the opcode of the instruction to be visited. This opcode is
-     *        either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2,
-     *        ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0,
-     *        FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD,
-     *        DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
-     *        DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP,
-     *        DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD,
-     *        DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV,
-     *        FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL,
-     *        LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR,
-     *        I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B,
-     *        I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
-     *        FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
-     *        MONITORENTER, or MONITOREXIT.
+     * @param opcode
+     *            the opcode of the instruction to be visited. This opcode is
+     *            either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+     *            ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+     *            FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+     *            LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+     *            IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+     *            SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+     *            DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+     *            IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+     *            FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+     *            IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+     *            L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+     *            LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+     *            DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+     *            or MONITOREXIT.
      */
     public void visitInsn(int opcode) {
         if (mv != null) {
@@ -256,17 +281,20 @@ public abstract class MethodVisitor {
     /**
      * Visits an instruction with a single int operand.
      *
-     * @param opcode the opcode of the instruction to be visited. This opcode is
-     *        either BIPUSH, SIPUSH or NEWARRAY.
-     * @param operand the operand of the instruction to be visited.<br> When
-     *        opcode is BIPUSH, operand value should be between Byte.MIN_VALUE
-     *        and Byte.MAX_VALUE.<br> When opcode is SIPUSH, operand value
-     *        should be between Short.MIN_VALUE and Short.MAX_VALUE.<br> When
-     *        opcode is NEWARRAY, operand value should be one of
-     *        {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
-     *        {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
-     *        {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
-     *        {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
+     * @param opcode
+     *            the opcode of the instruction to be visited. This opcode is
+     *            either BIPUSH, SIPUSH or NEWARRAY.
+     * @param operand
+     *            the operand of the instruction to be visited.<br>
+     *            When opcode is BIPUSH, operand value should be between
+     *            Byte.MIN_VALUE and Byte.MAX_VALUE.<br>
+     *            When opcode is SIPUSH, operand value should be between
+     *            Short.MIN_VALUE and Short.MAX_VALUE.<br>
+     *            When opcode is NEWARRAY, operand value should be one of
+     *            {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
+     *            {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
+     *            {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
+     *            {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
      */
     public void visitIntInsn(int opcode, int operand) {
         if (mv != null) {
@@ -278,11 +306,13 @@ public abstract class MethodVisitor {
      * Visits a local variable instruction. A local variable instruction is an
      * instruction that loads or stores the value of a local variable.
      *
-     * @param opcode the opcode of the local variable instruction to be visited.
-     *        This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE,
-     *        LSTORE, FSTORE, DSTORE, ASTORE or RET.
-     * @param var the operand of the instruction to be visited. This operand is
-     *        the index of a local variable.
+     * @param opcode
+     *            the opcode of the local variable instruction to be visited.
+     *            This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD,
+     *            ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+     * @param var
+     *            the operand of the instruction to be visited. This operand is
+     *            the index of a local variable.
      */
     public void visitVarInsn(int opcode, int var) {
         if (mv != null) {
@@ -294,11 +324,13 @@ public abstract class MethodVisitor {
      * Visits a type instruction. A type instruction is an instruction that
      * takes the internal name of a class as parameter.
      *
-     * @param opcode the opcode of the type instruction to be visited. This
-     *        opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
-     * @param type the operand of the instruction to be visited. This operand
-     *        must be the internal name of an object or array class (see {@link
-     *        Type#getInternalName() getInternalName}).
+     * @param opcode
+     *            the opcode of the type instruction to be visited. This opcode
+     *            is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+     * @param type
+     *            the operand of the instruction to be visited. This operand
+     *            must be the internal name of an object or array class (see
+     *            {@link Type#getInternalName() getInternalName}).
      */
     public void visitTypeInsn(int opcode, String type) {
         if (mv != null) {
@@ -310,14 +342,19 @@ public abstract class MethodVisitor {
      * Visits a field instruction. A field instruction is an instruction that
      * loads or stores the value of a field of an object.
      *
-     * @param opcode the opcode of the type instruction to be visited. This
-     *        opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
-     * @param owner the internal name of the field's owner class (see {@link
-     *        Type#getInternalName() getInternalName}).
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link Type Type}).
+     * @param opcode
+     *            the opcode of the type instruction to be visited. This opcode
+     *            is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+     * @param owner
+     *            the internal name of the field's owner class (see
+     *            {@link Type#getInternalName() getInternalName}).
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link Type Type}).
      */
-    public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+    public void visitFieldInsn(int opcode, String owner, String name,
+            String desc) {
         if (mv != null) {
             mv.visitFieldInsn(opcode, owner, name, desc);
         }
@@ -327,15 +364,20 @@ public abstract class MethodVisitor {
      * Visits a method instruction. A method instruction is an instruction that
      * invokes a method.
      *
-     * @param opcode the opcode of the type instruction to be visited. This
-     *        opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC
-     *        or INVOKEINTERFACE.
-     * @param owner the internal name of the method's owner class (see {@link
-     *        Type#getInternalName() getInternalName}).
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type Type}).
+     * @param opcode
+     *            the opcode of the type instruction to be visited. This opcode
+     *            is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+     *            INVOKEINTERFACE.
+     * @param owner
+     *            the internal name of the method's owner class (see
+     *            {@link Type#getInternalName() getInternalName}).
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type Type}).
      */
-    public void visitMethodInsn(int opcode, String owner, String name, String desc) {
+    public void visitMethodInsn(int opcode, String owner, String name,
+            String desc) {
         if (mv != null) {
             mv.visitMethodInsn(opcode, owner, name, desc);
         }
@@ -344,16 +386,21 @@ public abstract class MethodVisitor {
     /**
      * Visits an invokedynamic instruction.
      *
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type Type}).
-     * @param bsm the bootstrap method.
-     * @param bsmArgs the bootstrap method constant arguments. Each argument
-     *        must be an {@link Integer}, {@link Float}, {@link Long},
-     *        {@link Double}, {@link String}, {@link Type} or {@link Handle}
-     *        value. This method is allowed to modify the content of the array
-     *        so a caller should expect that this array may change.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type Type}).
+     * @param bsm
+     *            the bootstrap method.
+     * @param bsmArgs
+     *            the bootstrap method constant arguments. Each argument must be
+     *            an {@link Integer}, {@link Float}, {@link Long},
+     *            {@link Double}, {@link String}, {@link Type} or {@link Handle}
+     *            value. This method is allowed to modify the content of the
+     *            array so a caller should expect that this array may change.
      */
-    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         if (mv != null) {
             mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
         }
@@ -363,13 +410,15 @@ public abstract class MethodVisitor {
      * Visits a jump instruction. A jump instruction is an instruction that may
      * jump to another instruction.
      *
-     * @param opcode the opcode of the type instruction to be visited. This
-     *        opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
-     *        IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
-     *        IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
-     * @param label the operand of the instruction to be visited. This operand
-     *        is a label that designates the instruction to which the jump
-     *        instruction may jump.
+     * @param opcode
+     *            the opcode of the type instruction to be visited. This opcode
+     *            is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+     *            IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+     *            IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+     * @param label
+     *            the operand of the instruction to be visited. This operand is
+     *            a label that designates the instruction to which the jump
+     *            instruction may jump.
      */
     public void visitJumpInsn(int opcode, Label label) {
         if (mv != null) {
@@ -381,7 +430,8 @@ public abstract class MethodVisitor {
      * Visits a label. A label designates the instruction that will be visited
      * just after it.
      *
-     * @param label a {@link Label Label} object.
+     * @param label
+     *            a {@link Label Label} object.
      */
     public void visitLabel(Label label) {
         if (mv != null) {
@@ -398,41 +448,44 @@ public abstract class MethodVisitor {
      * future versions of the Java Virtual Machine. To easily detect new
      * constant types, implementations of this method should check for
      * unexpected constant types, like this:
+     *
      * <pre>
      * if (cst instanceof Integer) {
-     *   // ...
+     *     // ...
      * } else if (cst instanceof Float) {
-     *   // ...
+     *     // ...
      * } else if (cst instanceof Long) {
-     *   // ...
-     * } else if (cst instanceof Double) {
-     *   // ...
-     * } else if (cst instanceof String) {
-     *   // ...
-     * } else if (cst instanceof Type) {
-     *   int sort = ((Type) cst).getSort();
-     *   if (sort == Type.OBJECT) {
      *     // ...
-     *   } else if (sort == Type.ARRAY) {
+     * } else if (cst instanceof Double) {
      *     // ...
-     *   } else if (sort == Type.METHOD) {
+     * } else if (cst instanceof String) {
      *     // ...
-     *   } else {
-     *     // throw an exception
-     *   }
+     * } else if (cst instanceof Type) {
+     *     int sort = ((Type) cst).getSort();
+     *     if (sort == Type.OBJECT) {
+     *         // ...
+     *     } else if (sort == Type.ARRAY) {
+     *         // ...
+     *     } else if (sort == Type.METHOD) {
+     *         // ...
+     *     } else {
+     *         // throw an exception
+     *     }
      * } else if (cst instanceof Handle) {
-     *   // ...
+     *     // ...
      * } else {
-     *   // throw an exception
-     * }</pre>
+     *     // throw an exception
+     * }
+     * </pre>
      *
-     * @param cst the constant to be loaded on the stack. This parameter must be
-     *        a non null {@link Integer}, a {@link Float}, a {@link Long}, a
-     *        {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY
-     *        sort for <tt>.class</tt> constants, for classes whose version is
-     *        49.0, a {@link Type} of METHOD sort or a {@link Handle} for
-     *        MethodType and MethodHandle constants, for classes whose version
-     *        is 51.0.
+     * @param cst
+     *            the constant to be loaded on the stack. This parameter must be
+     *            a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+     *            {@link Double}, a {@link String}, a {@link Type} of OBJECT or
+     *            ARRAY sort for <tt>.class</tt> constants, for classes whose
+     *            version is 49.0, a {@link Type} of METHOD sort or a
+     *            {@link Handle} for MethodType and MethodHandle constants, for
+     *            classes whose version is 51.0.
      */
     public void visitLdcInsn(Object cst) {
         if (mv != null) {
@@ -443,8 +496,10 @@ public abstract class MethodVisitor {
     /**
      * Visits an IINC instruction.
      *
-     * @param var index of the local variable to be incremented.
-     * @param increment amount to increment the local variable by.
+     * @param var
+     *            index of the local variable to be incremented.
+     * @param increment
+     *            amount to increment the local variable by.
      */
     public void visitIincInsn(int var, int increment) {
         if (mv != null) {
@@ -455,13 +510,18 @@ public abstract class MethodVisitor {
     /**
      * Visits a TABLESWITCH instruction.
      *
-     * @param min the minimum key value.
-     * @param max the maximum key value.
-     * @param dflt beginning of the default handler block.
-     * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
-     *        the beginning of the handler block for the <tt>min + i</tt> key.
+     * @param min
+     *            the minimum key value.
+     * @param max
+     *            the maximum key value.
+     * @param dflt
+     *            beginning of the default handler block.
+     * @param labels
+     *            beginnings of the handler blocks. <tt>labels[i]</tt> is the
+     *            beginning of the handler block for the <tt>min + i</tt> key.
      */
-    public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
+    public void visitTableSwitchInsn(int min, int max, Label dflt,
+            Label... labels) {
         if (mv != null) {
             mv.visitTableSwitchInsn(min, max, dflt, labels);
         }
@@ -470,10 +530,13 @@ public abstract class MethodVisitor {
     /**
      * Visits a LOOKUPSWITCH instruction.
      *
-     * @param dflt beginning of the default handler block.
-     * @param keys the values of the keys.
-     * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
-     *        the beginning of the handler block for the <tt>keys[i]</tt> key.
+     * @param dflt
+     *            beginning of the default handler block.
+     * @param keys
+     *            the values of the keys.
+     * @param labels
+     *            beginnings of the handler blocks. <tt>labels[i]</tt> is the
+     *            beginning of the handler block for the <tt>keys[i]</tt> key.
      */
     public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
         if (mv != null) {
@@ -484,8 +547,10 @@ public abstract class MethodVisitor {
     /**
      * Visits a MULTIANEWARRAY instruction.
      *
-     * @param desc an array type descriptor (see {@link Type Type}).
-     * @param dims number of dimensions of the array to allocate.
+     * @param desc
+     *            an array type descriptor (see {@link Type Type}).
+     * @param dims
+     *            number of dimensions of the array to allocate.
      */
     public void visitMultiANewArrayInsn(String desc, int dims) {
         if (mv != null) {
@@ -500,17 +565,22 @@ public abstract class MethodVisitor {
     /**
      * Visits a try catch block.
      *
-     * @param start beginning of the exception handler's scope (inclusive).
-     * @param end end of the exception handler's scope (exclusive).
-     * @param handler beginning of the exception handler's code.
-     * @param type internal name of the type of exceptions handled by the
-     *        handler, or <tt>null</tt> to catch any exceptions (for "finally"
-     *        blocks).
-     * @throws IllegalArgumentException if one of the labels has already been
-     *         visited by this visitor (by the {@link #visitLabel visitLabel}
-     *         method).
+     * @param start
+     *            beginning of the exception handler's scope (inclusive).
+     * @param end
+     *            end of the exception handler's scope (exclusive).
+     * @param handler
+     *            beginning of the exception handler's code.
+     * @param type
+     *            internal name of the type of exceptions handled by the
+     *            handler, or <tt>null</tt> to catch any exceptions (for
+     *            "finally" blocks).
+     * @throws IllegalArgumentException
+     *             if one of the labels has already been visited by this visitor
+     *             (by the {@link #visitLabel visitLabel} method).
      */
-    public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
+    public void visitTryCatchBlock(Label start, Label end, Label handler,
+            String type) {
         if (mv != null) {
             mv.visitTryCatchBlock(start, end, handler, type);
         }
@@ -519,28 +589,28 @@ public abstract class MethodVisitor {
     /**
      * Visits a local variable declaration.
      *
-     * @param name the name of a local variable.
-     * @param desc the type descriptor of this local variable.
-     * @param signature the type signature of this local variable. May be
-     *        <tt>null</tt> if the local variable type does not use generic
-     *        types.
-     * @param start the first instruction corresponding to the scope of this
-     *        local variable (inclusive).
-     * @param end the last instruction corresponding to the scope of this local
-     *        variable (exclusive).
-     * @param index the local variable's index.
-     * @throws IllegalArgumentException if one of the labels has not already
-     *         been visited by this visitor (by the
-     *         {@link #visitLabel visitLabel} method).
+     * @param name
+     *            the name of a local variable.
+     * @param desc
+     *            the type descriptor of this local variable.
+     * @param signature
+     *            the type signature of this local variable. May be
+     *            <tt>null</tt> if the local variable type does not use generic
+     *            types.
+     * @param start
+     *            the first instruction corresponding to the scope of this local
+     *            variable (inclusive).
+     * @param end
+     *            the last instruction corresponding to the scope of this local
+     *            variable (exclusive).
+     * @param index
+     *            the local variable's index.
+     * @throws IllegalArgumentException
+     *             if one of the labels has not already been visited by this
+     *             visitor (by the {@link #visitLabel visitLabel} method).
      */
-    public void visitLocalVariable(
-        String name,
-        String desc,
-        String signature,
-        Label start,
-        Label end,
-        int index)
-    {
+    public void visitLocalVariable(String name, String desc, String signature,
+            Label start, Label end, int index) {
         if (mv != null) {
             mv.visitLocalVariable(name, desc, signature, start, end, index);
         }
@@ -549,12 +619,14 @@ public abstract class MethodVisitor {
     /**
      * Visits a line number declaration.
      *
-     * @param line a line number. This number refers to the source file from
-     *        which the class was compiled.
-     * @param start the first instruction corresponding to this line number.
-     * @throws IllegalArgumentException if <tt>start</tt> has not already been
-     *         visited by this visitor (by the {@link #visitLabel visitLabel}
-     *         method).
+     * @param line
+     *            a line number. This number refers to the source file from
+     *            which the class was compiled.
+     * @param start
+     *            the first instruction corresponding to this line number.
+     * @throws IllegalArgumentException
+     *             if <tt>start</tt> has not already been visited by this
+     *             visitor (by the {@link #visitLabel visitLabel} method).
      */
     public void visitLineNumber(int line, Label start) {
         if (mv != null) {
@@ -566,8 +638,10 @@ public abstract class MethodVisitor {
      * Visits the maximum stack size and the maximum number of local variables
      * of the method.
      *
-     * @param maxStack maximum stack size of the method.
-     * @param maxLocals maximum number of local variables for the method.
+     * @param maxStack
+     *            maximum stack size of the method.
+     * @param maxLocals
+     *            maximum number of local variables for the method.
      */
     public void visitMaxs(int maxStack, int maxLocals) {
         if (mv != null) {
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
index 887cb28..87acab1 100644
--- a/src/asm/scala/tools/asm/MethodWriter.java
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -42,7 +42,7 @@ class MethodWriter extends MethodVisitor {
     /**
      * Pseudo access flag used to denote constructors.
      */
-    static final int ACC_CONSTRUCTOR = 262144;
+    static final int ACC_CONSTRUCTOR = 0x80000;
 
     /**
      * Frame has exactly the same locals as the previous stack map frame and
@@ -229,7 +229,7 @@ class MethodWriter extends MethodVisitor {
     private int maxLocals;
 
     /**
-     *  Number of local variables in the current stack map frame.
+     * Number of local variables in the current stack map frame.
      */
     private int currentLocals;
 
@@ -257,11 +257,6 @@ class MethodWriter extends MethodVisitor {
     private int[] previousFrame;
 
     /**
-     * Index of the next element to be added in {@link #frame}.
-     */
-    private int frameIndex;
-
-    /**
      * The current stack map frame. The first element contains the offset of the
      * instruction to which the frame corresponds, the second element is the
      * number of locals and the third one is the number of stack elements. The
@@ -357,7 +352,8 @@ class MethodWriter extends MethodVisitor {
      * A list of labels. This list is the list of basic blocks in the method,
      * i.e. a list of Label objects linked to each other by their
      * {@link Label#successor} field, in the order they are visited by
-     * {@link MethodVisitor#visitLabel}, and starting with the first basic block.
+     * {@link MethodVisitor#visitLabel}, and starting with the first basic
+     * block.
      */
     private Label labels;
 
@@ -396,28 +392,30 @@ class MethodWriter extends MethodVisitor {
     /**
      * Constructs a new {@link MethodWriter}.
      *
-     * @param cw the class writer in which the method must be added.
-     * @param access the method's access flags (see {@link Opcodes}).
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type}).
-     * @param signature the method's signature. May be <tt>null</tt>.
-     * @param exceptions the internal names of the method's exceptions. May be
-     *        <tt>null</tt>.
-     * @param computeMaxs <tt>true</tt> if the maximum stack size and number
-     *        of local variables must be automatically computed.
-     * @param computeFrames <tt>true</tt> if the stack map tables must be
-     *        recomputed from scratch.
-     */
-    MethodWriter(
-        final ClassWriter cw,
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions,
-        final boolean computeMaxs,
-        final boolean computeFrames)
-    {
+     * @param cw
+     *            the class writer in which the method must be added.
+     * @param access
+     *            the method's access flags (see {@link Opcodes}).
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type}).
+     * @param signature
+     *            the method's signature. May be <tt>null</tt>.
+     * @param exceptions
+     *            the internal names of the method's exceptions. May be
+     *            <tt>null</tt>.
+     * @param computeMaxs
+     *            <tt>true</tt> if the maximum stack size and number of local
+     *            variables must be automatically computed.
+     * @param computeFrames
+     *            <tt>true</tt> if the stack map tables must be recomputed from
+     *            scratch.
+     */
+    MethodWriter(final ClassWriter cw, final int access, final String name,
+            final String desc, final String signature,
+            final String[] exceptions, final boolean computeMaxs,
+            final boolean computeFrames) {
         super(Opcodes.ASM4);
         if (cw.firstMethod == null) {
             cw.firstMethod = this;
@@ -427,6 +425,9 @@ class MethodWriter extends MethodVisitor {
         cw.lastMethod = this;
         this.cw = cw;
         this.access = access;
+        if ("<init>".equals(name)) {
+            this.access |= ACC_CONSTRUCTOR;
+        }
         this.name = cw.newUTF8(name);
         this.desc = cw.newUTF8(desc);
         this.descriptor = desc;
@@ -442,9 +443,6 @@ class MethodWriter extends MethodVisitor {
         }
         this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING);
         if (computeMaxs || computeFrames) {
-            if (computeFrames && "<init>".equals(name)) {
-                this.access |= ACC_CONSTRUCTOR;
-            }
             // updates maxLocals
             int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
             if ((access & Opcodes.ACC_STATIC) != 0) {
@@ -473,10 +471,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         if (!ClassReader.ANNOTATIONS) {
             return null;
         }
@@ -495,11 +491,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
         if (!ClassReader.ANNOTATIONS) {
             return null;
         }
@@ -545,20 +538,18 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
         if (!ClassReader.FRAMES || compute == FRAMES) {
             return;
         }
 
         if (type == Opcodes.F_NEW) {
+            if (previousFrame == null) {
+                visitImplicitFirstFrame();
+            }
             currentLocals = nLocal;
-            startFrame(code.length, nLocal, nStack);
+            int frameIndex = startFrame(code.length, nLocal, nStack);
             for (int i = 0; i < nLocal; ++i) {
                 if (local[i] instanceof String) {
                     frame[frameIndex++] = Frame.OBJECT
@@ -601,48 +592,44 @@ class MethodWriter extends MethodVisitor {
             }
 
             switch (type) {
-                case Opcodes.F_FULL:
-                    currentLocals = nLocal;
-                    stackMap.putByte(FULL_FRAME)
-                            .putShort(delta)
-                            .putShort(nLocal);
-                    for (int i = 0; i < nLocal; ++i) {
-                        writeFrameType(local[i]);
-                    }
-                    stackMap.putShort(nStack);
-                    for (int i = 0; i < nStack; ++i) {
-                        writeFrameType(stack[i]);
-                    }
-                    break;
-                case Opcodes.F_APPEND:
-                    currentLocals += nLocal;
-                    stackMap.putByte(SAME_FRAME_EXTENDED + nLocal)
-                            .putShort(delta);
-                    for (int i = 0; i < nLocal; ++i) {
-                        writeFrameType(local[i]);
-                    }
-                    break;
-                case Opcodes.F_CHOP:
-                    currentLocals -= nLocal;
-                    stackMap.putByte(SAME_FRAME_EXTENDED - nLocal)
+            case Opcodes.F_FULL:
+                currentLocals = nLocal;
+                stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal);
+                for (int i = 0; i < nLocal; ++i) {
+                    writeFrameType(local[i]);
+                }
+                stackMap.putShort(nStack);
+                for (int i = 0; i < nStack; ++i) {
+                    writeFrameType(stack[i]);
+                }
+                break;
+            case Opcodes.F_APPEND:
+                currentLocals += nLocal;
+                stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta);
+                for (int i = 0; i < nLocal; ++i) {
+                    writeFrameType(local[i]);
+                }
+                break;
+            case Opcodes.F_CHOP:
+                currentLocals -= nLocal;
+                stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta);
+                break;
+            case Opcodes.F_SAME:
+                if (delta < 64) {
+                    stackMap.putByte(delta);
+                } else {
+                    stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+                }
+                break;
+            case Opcodes.F_SAME1:
+                if (delta < 64) {
+                    stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+                } else {
+                    stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
                             .putShort(delta);
-                    break;
-                case Opcodes.F_SAME:
-                    if (delta < 64) {
-                        stackMap.putByte(delta);
-                    } else {
-                        stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
-                    }
-                    break;
-                case Opcodes.F_SAME1:
-                    if (delta < 64) {
-                        stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
-                    } else {
-                        stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
-                                .putShort(delta);
-                    }
-                    writeFrameType(stack[0]);
-                    break;
+                }
+                writeFrameType(stack[0]);
+                break;
             }
 
             previousFrameOffset = code.length;
@@ -672,8 +659,7 @@ class MethodWriter extends MethodVisitor {
             }
             // if opcode == ATHROW or xRETURN, ends current block (no successor)
             if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
-                    || opcode == Opcodes.ATHROW)
-            {
+                    || opcode == Opcodes.ATHROW) {
                 noSuccessor();
             }
         }
@@ -731,8 +717,7 @@ class MethodWriter extends MethodVisitor {
             // updates max locals
             int n;
             if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
-                    || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE)
-            {
+                    || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) {
                 n = var + 2;
             } else {
                 n = var + 1;
@@ -784,12 +769,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         Item i = cw.newFieldItem(owner, name, desc);
         // Label currentBlock = this.currentBlock;
         if (currentBlock != null) {
@@ -800,19 +781,19 @@ class MethodWriter extends MethodVisitor {
                 // computes the stack size variation
                 char c = desc.charAt(0);
                 switch (opcode) {
-                    case Opcodes.GETSTATIC:
-                        size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
-                        break;
-                    case Opcodes.PUTSTATIC:
-                        size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
-                        break;
-                    case Opcodes.GETFIELD:
-                        size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
-                        break;
-                    // case Constants.PUTFIELD:
-                    default:
-                        size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
-                        break;
+                case Opcodes.GETSTATIC:
+                    size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
+                    break;
+                case Opcodes.PUTSTATIC:
+                    size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
+                    break;
+                case Opcodes.GETFIELD:
+                    size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
+                    break;
+                // case Constants.PUTFIELD:
+                default:
+                    size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
+                    break;
                 }
                 // updates current and max stack sizes
                 if (size > maxStackSize) {
@@ -826,12 +807,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         boolean itf = opcode == Opcodes.INVOKEINTERFACE;
         Item i = cw.newMethodItem(owner, name, desc, itf);
         int argSize = i.intVal;
@@ -882,12 +859,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        final String name,
-        final String desc,
-        final Handle bsm,
-        final Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(final String name, final String desc,
+            final Handle bsm, final Object... bsmArgs) {
         Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
         int argSize = i.intVal;
         // Label currentBlock = this.currentBlock;
@@ -967,8 +940,7 @@ class MethodWriter extends MethodVisitor {
         }
         // adds the instruction to the bytecode of the method
         if ((label.status & Label.RESOLVED) != 0
-                && label.position - code.length < Short.MIN_VALUE)
-        {
+                && label.position - code.length < Short.MIN_VALUE) {
             /*
              * case of a backward jump with an offset < -32768. In this case we
              * automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
@@ -986,8 +958,7 @@ class MethodWriter extends MethodVisitor {
                 if (nextInsn != null) {
                     nextInsn.status |= Label.TARGET;
                 }
-                code.putByte(opcode <= 166
-                        ? ((opcode + 1) ^ 1) - 1
+                code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
                         : opcode ^ 1);
                 code.putShort(8); // jump offset
                 code.putByte(200); // GOTO_W
@@ -1082,8 +1053,7 @@ class MethodWriter extends MethodVisitor {
             } else {
                 int size;
                 // computes the stack size variation
-                if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE)
-                {
+                if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
                     size = stackSize + 2;
                 } else {
                     size = stackSize + 1;
@@ -1122,8 +1092,7 @@ class MethodWriter extends MethodVisitor {
         }
         // adds the instruction to the bytecode of the method
         if ((var > 255) || (increment > 127) || (increment < -128)) {
-            code.putByte(196 /* WIDE */)
-                    .put12(Opcodes.IINC, var)
+            code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var)
                     .putShort(increment);
         } else {
             code.putByte(Opcodes.IINC).put11(var, increment);
@@ -1131,12 +1100,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
         // adds the instruction to the bytecode of the method
         int source = code.length;
         code.putByte(Opcodes.TABLESWITCH);
@@ -1151,11 +1116,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
         // adds the instruction to the bytecode of the method
         int source = code.length;
         code.putByte(Opcodes.LOOKUPSWITCH);
@@ -1214,12 +1176,8 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         ++handlerCount;
         Handler h = new Handler();
         h.start = start;
@@ -1236,14 +1194,9 @@ class MethodWriter extends MethodVisitor {
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
         if (signature != null) {
             if (localVarType == null) {
                 localVarType = new ByteVector();
@@ -1251,8 +1204,7 @@ class MethodWriter extends MethodVisitor {
             ++localVarTypeCount;
             localVarType.putShort(start.position)
                     .putShort(end.position - start.position)
-                    .putShort(cw.newUTF8(name))
-                    .putShort(cw.newUTF8(signature))
+                    .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature))
                     .putShort(index);
         }
         if (localVar == null) {
@@ -1261,8 +1213,7 @@ class MethodWriter extends MethodVisitor {
         ++localVarCount;
         localVar.putShort(start.position)
                 .putShort(end.position - start.position)
-                .putShort(cw.newUTF8(name))
-                .putShort(cw.newUTF8(desc))
+                .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc))
                 .putShort(index);
         if (compute != NOTHING) {
             // updates max locals
@@ -1294,8 +1245,7 @@ class MethodWriter extends MethodVisitor {
                 Label h = handler.handler.getFirst();
                 Label e = handler.end.getFirst();
                 // computes the kind of the edges to 'h'
-                String t = handler.desc == null
-                        ? "java/lang/Throwable"
+                String t = handler.desc == null ? "java/lang/Throwable"
                         : handler.desc;
                 int kind = Frame.OBJECT | cw.addType(t);
                 // h is an exception handler
@@ -1382,11 +1332,12 @@ class MethodWriter extends MethodVisitor {
                         }
                         code.data[end] = (byte) Opcodes.ATHROW;
                         // emits a frame for this unreachable block
-                        startFrame(start, 0, 1);
-                        frame[frameIndex++] = Frame.OBJECT
+                        int frameIndex = startFrame(start, 0, 1);
+                        frame[frameIndex] = Frame.OBJECT
                                 | cw.addType("java/lang/Throwable");
                         endFrame();
-                        // removes the start-end range from the exception handlers
+                        // removes the start-end range from the exception
+                        // handlers
                         firstHandler = Handler.remove(firstHandler, l, k);
                     }
                 }
@@ -1535,8 +1486,10 @@ class MethodWriter extends MethodVisitor {
     /**
      * Adds a successor to the {@link #currentBlock currentBlock} block.
      *
-     * @param info information about the control flow edge to be added.
-     * @param successor the successor block to be added to the current block.
+     * @param info
+     *            information about the control flow edge to be added.
+     * @param successor
+     *            the successor block to be added to the current block.
      */
     private void addSuccessor(final int info, final Label successor) {
         // creates and initializes an Edge object...
@@ -1573,7 +1526,8 @@ class MethodWriter extends MethodVisitor {
     /**
      * Visits a frame that has been computed from scratch.
      *
-     * @param f the frame that must be visited.
+     * @param f
+     *            the frame that must be visited.
      */
     private void visitFrame(final Frame f) {
         int i, t;
@@ -1606,7 +1560,7 @@ class MethodWriter extends MethodVisitor {
             }
         }
         // visits the frame and its content
-        startFrame(f.owner.position, nLocal, nStack);
+        int frameIndex = startFrame(f.owner.position, nLocal, nStack);
         for (i = 0; nLocal > 0; ++i, --nLocal) {
             t = locals[i];
             frame[frameIndex++] = t;
@@ -1625,15 +1579,78 @@ class MethodWriter extends MethodVisitor {
     }
 
     /**
+     * Visit the implicit first frame of this method.
+     */
+    private void visitImplicitFirstFrame() {
+        // There can be at most descriptor.length() + 1 locals
+        int frameIndex = startFrame(0, descriptor.length() + 1, 0);
+        if ((access & Opcodes.ACC_STATIC) == 0) {
+            if ((access & ACC_CONSTRUCTOR) == 0) {
+                frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName);
+            } else {
+                frame[frameIndex++] = 6; // Opcodes.UNINITIALIZED_THIS;
+            }
+        }
+        int i = 1;
+        loop: while (true) {
+            int j = i;
+            switch (descriptor.charAt(i++)) {
+            case 'Z':
+            case 'C':
+            case 'B':
+            case 'S':
+            case 'I':
+                frame[frameIndex++] = 1; // Opcodes.INTEGER;
+                break;
+            case 'F':
+                frame[frameIndex++] = 2; // Opcodes.FLOAT;
+                break;
+            case 'J':
+                frame[frameIndex++] = 4; // Opcodes.LONG;
+                break;
+            case 'D':
+                frame[frameIndex++] = 3; // Opcodes.DOUBLE;
+                break;
+            case '[':
+                while (descriptor.charAt(i) == '[') {
+                    ++i;
+                }
+                if (descriptor.charAt(i) == 'L') {
+                    ++i;
+                    while (descriptor.charAt(i) != ';') {
+                        ++i;
+                    }
+                }
+                frame[frameIndex++] = Frame.OBJECT
+                        | cw.addType(descriptor.substring(j, ++i));
+                break;
+            case 'L':
+                while (descriptor.charAt(i) != ';') {
+                    ++i;
+                }
+                frame[frameIndex++] = Frame.OBJECT
+                        | cw.addType(descriptor.substring(j + 1, i++));
+                break;
+            default:
+                break loop;
+            }
+        }
+        frame[1] = frameIndex - 3;
+        endFrame();
+    }
+
+    /**
      * Starts the visit of a stack map frame.
      *
-     * @param offset the offset of the instruction to which the frame
-     *        corresponds.
-     * @param nLocal the number of local variables in the frame.
-     * @param nStack the number of stack elements in the frame.
-     */
-    private void startFrame(final int offset, final int nLocal, final int nStack)
-    {
+     * @param offset
+     *            the offset of the instruction to which the frame corresponds.
+     * @param nLocal
+     *            the number of local variables in the frame.
+     * @param nStack
+     *            the number of stack elements in the frame.
+     * @return the index of the next element to be written in this frame.
+     */
+    private int startFrame(final int offset, final int nLocal, final int nStack) {
         int n = 3 + nLocal + nStack;
         if (frame == null || frame.length < n) {
             frame = new int[n];
@@ -1641,7 +1658,7 @@ class MethodWriter extends MethodVisitor {
         frame[0] = offset;
         frame[1] = nLocal;
         frame[2] = nStack;
-        frameIndex = 3;
+        return 3;
     }
 
     /**
@@ -1686,24 +1703,23 @@ class MethodWriter extends MethodVisitor {
         if (cstackSize == 0) {
             k = clocalsSize - localsSize;
             switch (k) {
-                case -3:
-                case -2:
-                case -1:
-                    type = CHOP_FRAME;
-                    localsSize = clocalsSize;
-                    break;
-                case 0:
-                    type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
-                    break;
-                case 1:
-                case 2:
-                case 3:
-                    type = APPEND_FRAME;
-                    break;
+            case -3:
+            case -2:
+            case -1:
+                type = CHOP_FRAME;
+                localsSize = clocalsSize;
+                break;
+            case 0:
+                type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
+                break;
+            case 1:
+            case 2:
+            case 3:
+                type = APPEND_FRAME;
+                break;
             }
         } else if (clocalsSize == localsSize && cstackSize == 1) {
-            type = delta < 63
-                    ? SAME_LOCALS_1_STACK_ITEM_FRAME
+            type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME
                     : SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
         }
         if (type != FULL_FRAME) {
@@ -1718,36 +1734,34 @@ class MethodWriter extends MethodVisitor {
             }
         }
         switch (type) {
-            case SAME_FRAME:
-                stackMap.putByte(delta);
-                break;
-            case SAME_LOCALS_1_STACK_ITEM_FRAME:
-                stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
-                writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
-                break;
-            case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
-                stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
-                        .putShort(delta);
-                writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
-                break;
-            case SAME_FRAME_EXTENDED:
-                stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
-                break;
-            case CHOP_FRAME:
-                stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
-                break;
-            case APPEND_FRAME:
-                stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
-                writeFrameTypes(3 + localsSize, 3 + clocalsSize);
-                break;
-            // case FULL_FRAME:
-            default:
-                stackMap.putByte(FULL_FRAME)
-                        .putShort(delta)
-                        .putShort(clocalsSize);
-                writeFrameTypes(3, 3 + clocalsSize);
-                stackMap.putShort(cstackSize);
-                writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+        case SAME_FRAME:
+            stackMap.putByte(delta);
+            break;
+        case SAME_LOCALS_1_STACK_ITEM_FRAME:
+            stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+            writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+            break;
+        case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
+            stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort(
+                    delta);
+            writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+            break;
+        case SAME_FRAME_EXTENDED:
+            stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+            break;
+        case CHOP_FRAME:
+            stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+            break;
+        case APPEND_FRAME:
+            stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+            writeFrameTypes(3 + localsSize, 3 + clocalsSize);
+            break;
+        // case FULL_FRAME:
+        default:
+            stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize);
+            writeFrameTypes(3, 3 + clocalsSize);
+            stackMap.putShort(cstackSize);
+            writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
         }
     }
 
@@ -1757,8 +1771,10 @@ class MethodWriter extends MethodVisitor {
      * in {@link Label} to the format used in StackMapTable attributes. In
      * particular, it converts type table indexes to constant pool indexes.
      *
-     * @param start index of the first type in {@link #frame} to write.
-     * @param end index of last type in {@link #frame} to write (exclusive).
+     * @param start
+     *            index of the first type in {@link #frame} to write.
+     * @param end
+     *            index of last type in {@link #frame} to write (exclusive).
      */
     private void writeFrameTypes(final int start, final int end) {
         for (int i = start; i < end; ++i) {
@@ -1767,15 +1783,15 @@ class MethodWriter extends MethodVisitor {
             if (d == 0) {
                 int v = t & Frame.BASE_VALUE;
                 switch (t & Frame.BASE_KIND) {
-                    case Frame.OBJECT:
-                        stackMap.putByte(7)
-                                .putShort(cw.newClass(cw.typeTable[v].strVal1));
-                        break;
-                    case Frame.UNINITIALIZED:
-                        stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
-                        break;
-                    default:
-                        stackMap.putByte(v);
+                case Frame.OBJECT:
+                    stackMap.putByte(7).putShort(
+                            cw.newClass(cw.typeTable[v].strVal1));
+                    break;
+                case Frame.UNINITIALIZED:
+                    stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
+                    break;
+                default:
+                    stackMap.putByte(v);
                 }
             } else {
                 StringBuffer buf = new StringBuffer();
@@ -1789,29 +1805,29 @@ class MethodWriter extends MethodVisitor {
                     buf.append(';');
                 } else {
                     switch (t & 0xF) {
-                        case 1:
-                            buf.append('I');
-                            break;
-                        case 2:
-                            buf.append('F');
-                            break;
-                        case 3:
-                            buf.append('D');
-                            break;
-                        case 9:
-                            buf.append('Z');
-                            break;
-                        case 10:
-                            buf.append('B');
-                            break;
-                        case 11:
-                            buf.append('C');
-                            break;
-                        case 12:
-                            buf.append('S');
-                            break;
-                        default:
-                            buf.append('J');
+                    case 1:
+                        buf.append('I');
+                        break;
+                    case 2:
+                        buf.append('F');
+                        break;
+                    case 3:
+                        buf.append('D');
+                        break;
+                    case 9:
+                        buf.append('Z');
+                        break;
+                    case 10:
+                        buf.append('B');
+                        break;
+                    case 11:
+                        buf.append('C');
+                        break;
+                    case 12:
+                        buf.append('S');
+                        break;
+                    default:
+                        buf.append('J');
                     }
                 }
                 stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
@@ -1880,10 +1896,7 @@ class MethodWriter extends MethodVisitor {
                 size += 8 + stackMap.length;
             }
             if (cattrs != null) {
-                size += cattrs.getSize(cw,
-                        code.data,
-                        code.length,
-                        maxStack,
+                size += cattrs.getSize(cw, code.data, code.length, maxStack,
                         maxLocals);
             }
         }
@@ -1891,11 +1904,12 @@ class MethodWriter extends MethodVisitor {
             cw.newUTF8("Exceptions");
             size += 8 + 2 * exceptionCount;
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            cw.newUTF8("Synthetic");
-            size += 6;
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                cw.newUTF8("Synthetic");
+                size += 6;
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             cw.newUTF8("Deprecated");
@@ -1941,13 +1955,15 @@ class MethodWriter extends MethodVisitor {
     /**
      * Puts the bytecode of this method in the given byte vector.
      *
-     * @param out the byte vector into which the bytecode of this method must be
-     *        copied.
+     * @param out
+     *            the byte vector into which the bytecode of this method must be
+     *            copied.
      */
     final void put(final ByteVector out) {
-        int mask = Opcodes.ACC_DEPRECATED
+        final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+        int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED
                 | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
-                | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+                | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
         out.putShort(access & ~mask).putShort(name).putShort(desc);
         if (classReaderOffset != 0) {
             out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
@@ -1960,10 +1976,11 @@ class MethodWriter extends MethodVisitor {
         if (exceptionCount > 0) {
             ++attributeCount;
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            ++attributeCount;
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                ++attributeCount;
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             ++attributeCount;
@@ -2005,10 +2022,7 @@ class MethodWriter extends MethodVisitor {
                 size += 8 + stackMap.length;
             }
             if (cattrs != null) {
-                size += cattrs.getSize(cw,
-                        code.data,
-                        code.length,
-                        maxStack,
+                size += cattrs.getSize(cw, code.data, code.length, maxStack,
                         maxLocals);
             }
             out.putShort(cw.newUTF8("Code")).putInt(size);
@@ -2018,10 +2032,8 @@ class MethodWriter extends MethodVisitor {
             if (handlerCount > 0) {
                 Handler h = firstHandler;
                 while (h != null) {
-                    out.putShort(h.start.position)
-                            .putShort(h.end.position)
-                            .putShort(h.handler.position)
-                            .putShort(h.type);
+                    out.putShort(h.start.position).putShort(h.end.position)
+                            .putShort(h.handler.position).putShort(h.type);
                     h = h.next;
                 }
             }
@@ -2068,24 +2080,24 @@ class MethodWriter extends MethodVisitor {
             }
         }
         if (exceptionCount > 0) {
-            out.putShort(cw.newUTF8("Exceptions"))
-                    .putInt(2 * exceptionCount + 2);
+            out.putShort(cw.newUTF8("Exceptions")).putInt(
+                    2 * exceptionCount + 2);
             out.putShort(exceptionCount);
             for (int i = 0; i < exceptionCount; ++i) {
                 out.putShort(exceptions[i]);
             }
         }
-        if ((access & Opcodes.ACC_SYNTHETIC) != 0
-                && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
-        {
-            out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            if ((cw.version & 0xFFFF) < Opcodes.V1_5
+                    || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+                out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+            }
         }
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             out.putShort(cw.newUTF8("Deprecated")).putInt(0);
         }
         if (ClassReader.SIGNATURES && signature != null) {
-            out.putShort(cw.newUTF8("Signature"))
-                    .putInt(2)
+            out.putShort(cw.newUTF8("Signature")).putInt(2)
                     .putShort(cw.newUTF8(signature));
         }
         if (ClassReader.ANNOTATIONS && annd != null) {
@@ -2128,10 +2140,12 @@ class MethodWriter extends MethodVisitor {
      * 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W
      * 32765. This, in turn, may require to increase the size of another jump
      * instruction, and so on... All these operations are handled automatically
-     * by this method. <p> <i>This method must be called after all the method
-     * that is being built has been visited</i>. In particular, the
-     * {@link Label Label} objects used to construct the method are no longer
-     * valid after this method has been called.
+     * by this method.
+     * <p>
+     * <i>This method must be called after all the method that is being built
+     * has been visited</i>. In particular, the {@link Label Label} objects used
+     * to construct the method are no longer valid after this method has been
+     * called.
      */
     private void resizeInstructions() {
         byte[] b = code.data; // bytecode of the method
@@ -2181,158 +2195,14 @@ class MethodWriter extends MethodVisitor {
                 int insert = 0; // bytes to be added after this instruction
 
                 switch (ClassWriter.TYPE[opcode]) {
-                    case ClassWriter.NOARG_INSN:
-                    case ClassWriter.IMPLVAR_INSN:
-                        u += 1;
-                        break;
-                    case ClassWriter.LABEL_INSN:
-                        if (opcode > 201) {
-                            // converts temporary opcodes 202 to 217, 218 and
-                            // 219 to IFEQ ... JSR (inclusive), IFNULL and
-                            // IFNONNULL
-                            opcode = opcode < 218 ? opcode - 49 : opcode - 20;
-                            label = u + readUnsignedShort(b, u + 1);
-                        } else {
-                            label = u + readShort(b, u + 1);
-                        }
-                        newOffset = getNewOffset(allIndexes, allSizes, u, label);
-                        if (newOffset < Short.MIN_VALUE
-                                || newOffset > Short.MAX_VALUE)
-                        {
-                            if (!resize[u]) {
-                                if (opcode == Opcodes.GOTO
-                                        || opcode == Opcodes.JSR)
-                                {
-                                    // two additional bytes will be required to
-                                    // replace this GOTO or JSR instruction with
-                                    // a GOTO_W or a JSR_W
-                                    insert = 2;
-                                } else {
-                                    // five additional bytes will be required to
-                                    // replace this IFxxx <l> instruction with
-                                    // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
-                                    // is the "opposite" opcode of IFxxx (i.e.,
-                                    // IFNE for IFEQ) and where <l'> designates
-                                    // the instruction just after the GOTO_W.
-                                    insert = 5;
-                                }
-                                resize[u] = true;
-                            }
-                        }
-                        u += 3;
-                        break;
-                    case ClassWriter.LABELW_INSN:
-                        u += 5;
-                        break;
-                    case ClassWriter.TABL_INSN:
-                        if (state == 1) {
-                            // true number of bytes to be added (or removed)
-                            // from this instruction = (future number of padding
-                            // bytes - current number of padding byte) -
-                            // previously over estimated variation =
-                            // = ((3 - newOffset%4) - (3 - u%4)) - u%4
-                            // = (-newOffset%4 + u%4) - u%4
-                            // = -(newOffset & 3)
-                            newOffset = getNewOffset(allIndexes, allSizes, 0, u);
-                            insert = -(newOffset & 3);
-                        } else if (!resize[u]) {
-                            // over estimation of the number of bytes to be
-                            // added to this instruction = 3 - current number
-                            // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
-                            insert = u & 3;
-                            resize[u] = true;
-                        }
-                        // skips instruction
-                        u = u + 4 - (u & 3);
-                        u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
-                        break;
-                    case ClassWriter.LOOK_INSN:
-                        if (state == 1) {
-                            // like TABL_INSN
-                            newOffset = getNewOffset(allIndexes, allSizes, 0, u);
-                            insert = -(newOffset & 3);
-                        } else if (!resize[u]) {
-                            // like TABL_INSN
-                            insert = u & 3;
-                            resize[u] = true;
-                        }
-                        // skips instruction
-                        u = u + 4 - (u & 3);
-                        u += 8 * readInt(b, u + 4) + 8;
-                        break;
-                    case ClassWriter.WIDE_INSN:
-                        opcode = b[u + 1] & 0xFF;
-                        if (opcode == Opcodes.IINC) {
-                            u += 6;
-                        } else {
-                            u += 4;
-                        }
-                        break;
-                    case ClassWriter.VAR_INSN:
-                    case ClassWriter.SBYTE_INSN:
-                    case ClassWriter.LDC_INSN:
-                        u += 2;
-                        break;
-                    case ClassWriter.SHORT_INSN:
-                    case ClassWriter.LDCW_INSN:
-                    case ClassWriter.FIELDORMETH_INSN:
-                    case ClassWriter.TYPE_INSN:
-                    case ClassWriter.IINC_INSN:
-                        u += 3;
-                        break;
-                    case ClassWriter.ITFMETH_INSN:
-                    case ClassWriter.INDYMETH_INSN:
-                        u += 5;
-                        break;
-                    // case ClassWriter.MANA_INSN:
-                    default:
-                        u += 4;
-                        break;
-                }
-                if (insert != 0) {
-                    // adds a new (u, insert) entry in the allIndexes and
-                    // allSizes arrays
-                    int[] newIndexes = new int[allIndexes.length + 1];
-                    int[] newSizes = new int[allSizes.length + 1];
-                    System.arraycopy(allIndexes,
-                            0,
-                            newIndexes,
-                            0,
-                            allIndexes.length);
-                    System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
-                    newIndexes[allIndexes.length] = u;
-                    newSizes[allSizes.length] = insert;
-                    allIndexes = newIndexes;
-                    allSizes = newSizes;
-                    if (insert > 0) {
-                        state = 3;
-                    }
-                }
-            }
-            if (state < 3) {
-                --state;
-            }
-        } while (state != 0);
-
-        // 2nd step:
-        // copies the bytecode of the method into a new bytevector, updates the
-        // offsets, and inserts (or removes) bytes as requested.
-
-        ByteVector newCode = new ByteVector(code.length);
-
-        u = 0;
-        while (u < code.length) {
-            int opcode = b[u] & 0xFF;
-            switch (ClassWriter.TYPE[opcode]) {
                 case ClassWriter.NOARG_INSN:
                 case ClassWriter.IMPLVAR_INSN:
-                    newCode.putByte(opcode);
                     u += 1;
                     break;
                 case ClassWriter.LABEL_INSN:
                     if (opcode > 201) {
-                        // changes temporary opcodes 202 to 217 (inclusive), 218
-                        // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+                        // converts temporary opcodes 202 to 217, 218 and
+                        // 219 to IFEQ ... JSR (inclusive), IFNULL and
                         // IFNONNULL
                         opcode = opcode < 218 ? opcode - 49 : opcode - 20;
                         label = u + readUnsignedShort(b, u + 1);
@@ -2340,100 +2210,78 @@ class MethodWriter extends MethodVisitor {
                         label = u + readShort(b, u + 1);
                     }
                     newOffset = getNewOffset(allIndexes, allSizes, u, label);
-                    if (resize[u]) {
-                        // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
-                        // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
-                        // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
-                        // and where <l'> designates the instruction just after
-                        // the GOTO_W.
-                        if (opcode == Opcodes.GOTO) {
-                            newCode.putByte(200); // GOTO_W
-                        } else if (opcode == Opcodes.JSR) {
-                            newCode.putByte(201); // JSR_W
-                        } else {
-                            newCode.putByte(opcode <= 166
-                                    ? ((opcode + 1) ^ 1) - 1
-                                    : opcode ^ 1);
-                            newCode.putShort(8); // jump offset
-                            newCode.putByte(200); // GOTO_W
-                            // newOffset now computed from start of GOTO_W
-                            newOffset -= 3;
+                    if (newOffset < Short.MIN_VALUE
+                            || newOffset > Short.MAX_VALUE) {
+                        if (!resize[u]) {
+                            if (opcode == Opcodes.GOTO || opcode == Opcodes.JSR) {
+                                // two additional bytes will be required to
+                                // replace this GOTO or JSR instruction with
+                                // a GOTO_W or a JSR_W
+                                insert = 2;
+                            } else {
+                                // five additional bytes will be required to
+                                // replace this IFxxx <l> instruction with
+                                // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
+                                // is the "opposite" opcode of IFxxx (i.e.,
+                                // IFNE for IFEQ) and where <l'> designates
+                                // the instruction just after the GOTO_W.
+                                insert = 5;
+                            }
+                            resize[u] = true;
                         }
-                        newCode.putInt(newOffset);
-                    } else {
-                        newCode.putByte(opcode);
-                        newCode.putShort(newOffset);
                     }
                     u += 3;
                     break;
                 case ClassWriter.LABELW_INSN:
-                    label = u + readInt(b, u + 1);
-                    newOffset = getNewOffset(allIndexes, allSizes, u, label);
-                    newCode.putByte(opcode);
-                    newCode.putInt(newOffset);
                     u += 5;
                     break;
                 case ClassWriter.TABL_INSN:
-                    // skips 0 to 3 padding bytes
-                    v = u;
-                    u = u + 4 - (v & 3);
-                    // reads and copies instruction
-                    newCode.putByte(Opcodes.TABLESWITCH);
-                    newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
-                    label = v + readInt(b, u);
-                    u += 4;
-                    newOffset = getNewOffset(allIndexes, allSizes, v, label);
-                    newCode.putInt(newOffset);
-                    j = readInt(b, u);
-                    u += 4;
-                    newCode.putInt(j);
-                    j = readInt(b, u) - j + 1;
-                    u += 4;
-                    newCode.putInt(readInt(b, u - 4));
-                    for (; j > 0; --j) {
-                        label = v + readInt(b, u);
-                        u += 4;
-                        newOffset = getNewOffset(allIndexes, allSizes, v, label);
-                        newCode.putInt(newOffset);
+                    if (state == 1) {
+                        // true number of bytes to be added (or removed)
+                        // from this instruction = (future number of padding
+                        // bytes - current number of padding byte) -
+                        // previously over estimated variation =
+                        // = ((3 - newOffset%4) - (3 - u%4)) - u%4
+                        // = (-newOffset%4 + u%4) - u%4
+                        // = -(newOffset & 3)
+                        newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+                        insert = -(newOffset & 3);
+                    } else if (!resize[u]) {
+                        // over estimation of the number of bytes to be
+                        // added to this instruction = 3 - current number
+                        // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
+                        insert = u & 3;
+                        resize[u] = true;
                     }
+                    // skips instruction
+                    u = u + 4 - (u & 3);
+                    u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
                     break;
                 case ClassWriter.LOOK_INSN:
-                    // skips 0 to 3 padding bytes
-                    v = u;
-                    u = u + 4 - (v & 3);
-                    // reads and copies instruction
-                    newCode.putByte(Opcodes.LOOKUPSWITCH);
-                    newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
-                    label = v + readInt(b, u);
-                    u += 4;
-                    newOffset = getNewOffset(allIndexes, allSizes, v, label);
-                    newCode.putInt(newOffset);
-                    j = readInt(b, u);
-                    u += 4;
-                    newCode.putInt(j);
-                    for (; j > 0; --j) {
-                        newCode.putInt(readInt(b, u));
-                        u += 4;
-                        label = v + readInt(b, u);
-                        u += 4;
-                        newOffset = getNewOffset(allIndexes, allSizes, v, label);
-                        newCode.putInt(newOffset);
+                    if (state == 1) {
+                        // like TABL_INSN
+                        newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+                        insert = -(newOffset & 3);
+                    } else if (!resize[u]) {
+                        // like TABL_INSN
+                        insert = u & 3;
+                        resize[u] = true;
                     }
+                    // skips instruction
+                    u = u + 4 - (u & 3);
+                    u += 8 * readInt(b, u + 4) + 8;
                     break;
                 case ClassWriter.WIDE_INSN:
                     opcode = b[u + 1] & 0xFF;
                     if (opcode == Opcodes.IINC) {
-                        newCode.putByteArray(b, u, 6);
                         u += 6;
                     } else {
-                        newCode.putByteArray(b, u, 4);
                         u += 4;
                     }
                     break;
                 case ClassWriter.VAR_INSN:
                 case ClassWriter.SBYTE_INSN:
                 case ClassWriter.LDC_INSN:
-                    newCode.putByteArray(b, u, 2);
                     u += 2;
                     break;
                 case ClassWriter.SHORT_INSN:
@@ -2441,19 +2289,178 @@ class MethodWriter extends MethodVisitor {
                 case ClassWriter.FIELDORMETH_INSN:
                 case ClassWriter.TYPE_INSN:
                 case ClassWriter.IINC_INSN:
-                    newCode.putByteArray(b, u, 3);
                     u += 3;
                     break;
                 case ClassWriter.ITFMETH_INSN:
                 case ClassWriter.INDYMETH_INSN:
-                    newCode.putByteArray(b, u, 5);
                     u += 5;
                     break;
-                // case MANA_INSN:
+                // case ClassWriter.MANA_INSN:
                 default:
-                    newCode.putByteArray(b, u, 4);
                     u += 4;
                     break;
+                }
+                if (insert != 0) {
+                    // adds a new (u, insert) entry in the allIndexes and
+                    // allSizes arrays
+                    int[] newIndexes = new int[allIndexes.length + 1];
+                    int[] newSizes = new int[allSizes.length + 1];
+                    System.arraycopy(allIndexes, 0, newIndexes, 0,
+                            allIndexes.length);
+                    System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
+                    newIndexes[allIndexes.length] = u;
+                    newSizes[allSizes.length] = insert;
+                    allIndexes = newIndexes;
+                    allSizes = newSizes;
+                    if (insert > 0) {
+                        state = 3;
+                    }
+                }
+            }
+            if (state < 3) {
+                --state;
+            }
+        } while (state != 0);
+
+        // 2nd step:
+        // copies the bytecode of the method into a new bytevector, updates the
+        // offsets, and inserts (or removes) bytes as requested.
+
+        ByteVector newCode = new ByteVector(code.length);
+
+        u = 0;
+        while (u < code.length) {
+            int opcode = b[u] & 0xFF;
+            switch (ClassWriter.TYPE[opcode]) {
+            case ClassWriter.NOARG_INSN:
+            case ClassWriter.IMPLVAR_INSN:
+                newCode.putByte(opcode);
+                u += 1;
+                break;
+            case ClassWriter.LABEL_INSN:
+                if (opcode > 201) {
+                    // changes temporary opcodes 202 to 217 (inclusive), 218
+                    // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+                    // IFNONNULL
+                    opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+                    label = u + readUnsignedShort(b, u + 1);
+                } else {
+                    label = u + readShort(b, u + 1);
+                }
+                newOffset = getNewOffset(allIndexes, allSizes, u, label);
+                if (resize[u]) {
+                    // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
+                    // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
+                    // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
+                    // and where <l'> designates the instruction just after
+                    // the GOTO_W.
+                    if (opcode == Opcodes.GOTO) {
+                        newCode.putByte(200); // GOTO_W
+                    } else if (opcode == Opcodes.JSR) {
+                        newCode.putByte(201); // JSR_W
+                    } else {
+                        newCode.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
+                                : opcode ^ 1);
+                        newCode.putShort(8); // jump offset
+                        newCode.putByte(200); // GOTO_W
+                        // newOffset now computed from start of GOTO_W
+                        newOffset -= 3;
+                    }
+                    newCode.putInt(newOffset);
+                } else {
+                    newCode.putByte(opcode);
+                    newCode.putShort(newOffset);
+                }
+                u += 3;
+                break;
+            case ClassWriter.LABELW_INSN:
+                label = u + readInt(b, u + 1);
+                newOffset = getNewOffset(allIndexes, allSizes, u, label);
+                newCode.putByte(opcode);
+                newCode.putInt(newOffset);
+                u += 5;
+                break;
+            case ClassWriter.TABL_INSN:
+                // skips 0 to 3 padding bytes
+                v = u;
+                u = u + 4 - (v & 3);
+                // reads and copies instruction
+                newCode.putByte(Opcodes.TABLESWITCH);
+                newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+                label = v + readInt(b, u);
+                u += 4;
+                newOffset = getNewOffset(allIndexes, allSizes, v, label);
+                newCode.putInt(newOffset);
+                j = readInt(b, u);
+                u += 4;
+                newCode.putInt(j);
+                j = readInt(b, u) - j + 1;
+                u += 4;
+                newCode.putInt(readInt(b, u - 4));
+                for (; j > 0; --j) {
+                    label = v + readInt(b, u);
+                    u += 4;
+                    newOffset = getNewOffset(allIndexes, allSizes, v, label);
+                    newCode.putInt(newOffset);
+                }
+                break;
+            case ClassWriter.LOOK_INSN:
+                // skips 0 to 3 padding bytes
+                v = u;
+                u = u + 4 - (v & 3);
+                // reads and copies instruction
+                newCode.putByte(Opcodes.LOOKUPSWITCH);
+                newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+                label = v + readInt(b, u);
+                u += 4;
+                newOffset = getNewOffset(allIndexes, allSizes, v, label);
+                newCode.putInt(newOffset);
+                j = readInt(b, u);
+                u += 4;
+                newCode.putInt(j);
+                for (; j > 0; --j) {
+                    newCode.putInt(readInt(b, u));
+                    u += 4;
+                    label = v + readInt(b, u);
+                    u += 4;
+                    newOffset = getNewOffset(allIndexes, allSizes, v, label);
+                    newCode.putInt(newOffset);
+                }
+                break;
+            case ClassWriter.WIDE_INSN:
+                opcode = b[u + 1] & 0xFF;
+                if (opcode == Opcodes.IINC) {
+                    newCode.putByteArray(b, u, 6);
+                    u += 6;
+                } else {
+                    newCode.putByteArray(b, u, 4);
+                    u += 4;
+                }
+                break;
+            case ClassWriter.VAR_INSN:
+            case ClassWriter.SBYTE_INSN:
+            case ClassWriter.LDC_INSN:
+                newCode.putByteArray(b, u, 2);
+                u += 2;
+                break;
+            case ClassWriter.SHORT_INSN:
+            case ClassWriter.LDCW_INSN:
+            case ClassWriter.FIELDORMETH_INSN:
+            case ClassWriter.TYPE_INSN:
+            case ClassWriter.IINC_INSN:
+                newCode.putByteArray(b, u, 3);
+                u += 3;
+                break;
+            case ClassWriter.ITFMETH_INSN:
+            case ClassWriter.INDYMETH_INSN:
+                newCode.putByteArray(b, u, 5);
+                u += 5;
+                break;
+            // case MANA_INSN:
+            default:
+                newCode.putByteArray(b, u, 4);
+                u += 4;
+                break;
             }
         }
 
@@ -2476,8 +2483,7 @@ class MethodWriter extends MethodVisitor {
                      * must therefore never have been called for this label.
                      */
                     u = l.position - 3;
-                    if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u]))
-                    {
+                    if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) {
                         getNewOffset(allIndexes, allSizes, l);
                         // TODO update offsets in UNINITIALIZED values
                         visitFrame(l.frame);
@@ -2533,10 +2539,11 @@ class MethodWriter extends MethodVisitor {
             b = lineNumber.data;
             u = 0;
             while (u < lineNumber.length) {
-                writeShort(b, u, getNewOffset(allIndexes,
-                        allSizes,
-                        0,
-                        readUnsignedShort(b, u)));
+                writeShort(
+                        b,
+                        u,
+                        getNewOffset(allIndexes, allSizes, 0,
+                                readUnsignedShort(b, u)));
                 u += 4;
             }
         }
@@ -2559,8 +2566,10 @@ class MethodWriter extends MethodVisitor {
     /**
      * Reads an unsigned short value in the given byte array.
      *
-     * @param b a byte array.
-     * @param index the start index of the value to be read.
+     * @param b
+     *            a byte array.
+     * @param index
+     *            the start index of the value to be read.
      * @return the read value.
      */
     static int readUnsignedShort(final byte[] b, final int index) {
@@ -2570,8 +2579,10 @@ class MethodWriter extends MethodVisitor {
     /**
      * Reads a signed short value in the given byte array.
      *
-     * @param b a byte array.
-     * @param index the start index of the value to be read.
+     * @param b
+     *            a byte array.
+     * @param index
+     *            the start index of the value to be read.
      * @return the read value.
      */
     static short readShort(final byte[] b, final int index) {
@@ -2581,8 +2592,10 @@ class MethodWriter extends MethodVisitor {
     /**
      * Reads a signed int value in the given byte array.
      *
-     * @param b a byte array.
-     * @param index the start index of the value to be read.
+     * @param b
+     *            a byte array.
+     * @param index
+     *            the start index of the value to be read.
      * @return the read value.
      */
     static int readInt(final byte[] b, final int index) {
@@ -2593,9 +2606,12 @@ class MethodWriter extends MethodVisitor {
     /**
      * Writes a short value in the given byte array.
      *
-     * @param b a byte array.
-     * @param index where the first byte of the short value must be written.
-     * @param s the value to be written in the given byte array.
+     * @param b
+     *            a byte array.
+     * @param index
+     *            where the first byte of the short value must be written.
+     * @param s
+     *            the value to be written in the given byte array.
      */
     static void writeShort(final byte[] b, final int index, final int s) {
         b[index] = (byte) (s >>> 8);
@@ -2603,32 +2619,34 @@ class MethodWriter extends MethodVisitor {
     }
 
     /**
-     * Computes the future value of a bytecode offset. <p> Note: it is possible
-     * to have several entries for the same instruction in the <tt>indexes</tt>
-     * and <tt>sizes</tt>: two entries (index=a,size=b) and (index=a,size=b')
-     * are equivalent to a single entry (index=a,size=b+b').
+     * Computes the future value of a bytecode offset.
+     * <p>
+     * Note: it is possible to have several entries for the same instruction in
+     * the <tt>indexes</tt> and <tt>sizes</tt>: two entries (index=a,size=b) and
+     * (index=a,size=b') are equivalent to a single entry (index=a,size=b+b').
      *
-     * @param indexes current positions of the instructions to be resized. Each
-     *        instruction must be designated by the index of its <i>last</i>
-     *        byte, plus one (or, in other words, by the index of the <i>first</i>
-     *        byte of the <i>next</i> instruction).
-     * @param sizes the number of bytes to be <i>added</i> to the above
-     *        instructions. More precisely, for each i < <tt>len</tt>,
-     *        <tt>sizes</tt>[i] bytes will be added at the end of the
-     *        instruction designated by <tt>indexes</tt>[i] or, if
-     *        <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
-     *        bytes of the instruction will be removed (the instruction size
-     *        <i>must not</i> become negative or null).
-     * @param begin index of the first byte of the source instruction.
-     * @param end index of the first byte of the target instruction.
+     * @param indexes
+     *            current positions of the instructions to be resized. Each
+     *            instruction must be designated by the index of its <i>last</i>
+     *            byte, plus one (or, in other words, by the index of the
+     *            <i>first</i> byte of the <i>next</i> instruction).
+     * @param sizes
+     *            the number of bytes to be <i>added</i> to the above
+     *            instructions. More precisely, for each i < <tt>len</tt>,
+     *            <tt>sizes</tt>[i] bytes will be added at the end of the
+     *            instruction designated by <tt>indexes</tt>[i] or, if
+     *            <tt>sizes</tt>[i] is negative, the <i>last</i> |
+     *            <tt>sizes[i]</tt>| bytes of the instruction will be removed
+     *            (the instruction size <i>must not</i> become negative or
+     *            null).
+     * @param begin
+     *            index of the first byte of the source instruction.
+     * @param end
+     *            index of the first byte of the target instruction.
      * @return the future value of the given bytecode offset.
      */
-    static int getNewOffset(
-        final int[] indexes,
-        final int[] sizes,
-        final int begin,
-        final int end)
-    {
+    static int getNewOffset(final int[] indexes, final int[] sizes,
+            final int begin, final int end) {
         int offset = end - begin;
         for (int i = 0; i < indexes.length; ++i) {
             if (begin < indexes[i] && indexes[i] <= end) {
@@ -2645,24 +2663,25 @@ class MethodWriter extends MethodVisitor {
     /**
      * Updates the offset of the given label.
      *
-     * @param indexes current positions of the instructions to be resized. Each
-     *        instruction must be designated by the index of its <i>last</i>
-     *        byte, plus one (or, in other words, by the index of the <i>first</i>
-     *        byte of the <i>next</i> instruction).
-     * @param sizes the number of bytes to be <i>added</i> to the above
-     *        instructions. More precisely, for each i < <tt>len</tt>,
-     *        <tt>sizes</tt>[i] bytes will be added at the end of the
-     *        instruction designated by <tt>indexes</tt>[i] or, if
-     *        <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
-     *        bytes of the instruction will be removed (the instruction size
-     *        <i>must not</i> become negative or null).
-     * @param label the label whose offset must be updated.
-     */
-    static void getNewOffset(
-        final int[] indexes,
-        final int[] sizes,
-        final Label label)
-    {
+     * @param indexes
+     *            current positions of the instructions to be resized. Each
+     *            instruction must be designated by the index of its <i>last</i>
+     *            byte, plus one (or, in other words, by the index of the
+     *            <i>first</i> byte of the <i>next</i> instruction).
+     * @param sizes
+     *            the number of bytes to be <i>added</i> to the above
+     *            instructions. More precisely, for each i < <tt>len</tt>,
+     *            <tt>sizes</tt>[i] bytes will be added at the end of the
+     *            instruction designated by <tt>indexes</tt>[i] or, if
+     *            <tt>sizes</tt>[i] is negative, the <i>last</i> |
+     *            <tt>sizes[i]</tt>| bytes of the instruction will be removed
+     *            (the instruction size <i>must not</i> become negative or
+     *            null).
+     * @param label
+     *            the label whose offset must be updated.
+     */
+    static void getNewOffset(final int[] indexes, final int[] sizes,
+            final Label label) {
         if ((label.status & Label.RESIZED) == 0) {
             label.position = getNewOffset(indexes, sizes, 0, label.position);
             label.status |= Label.RESIZED;
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
index bf11071..7821a49 100644
--- a/src/asm/scala/tools/asm/Type.java
+++ b/src/asm/scala/tools/asm/Type.java
@@ -190,13 +190,16 @@ public class Type {
     /**
      * Constructs a reference type.
      *
-     * @param sort the sort of the reference type to be constructed.
-     * @param buf a buffer containing the descriptor of the previous type.
-     * @param off the offset of this descriptor in the previous buffer.
-     * @param len the length of this descriptor.
-     */
-    private Type(final int sort, final char[] buf, final int off, final int len)
-    {
+     * @param sort
+     *            the sort of the reference type to be constructed.
+     * @param buf
+     *            a buffer containing the descriptor of the previous type.
+     * @param off
+     *            the offset of this descriptor in the previous buffer.
+     * @param len
+     *            the length of this descriptor.
+     */
+    private Type(final int sort, final char[] buf, final int off, final int len) {
         this.sort = sort;
         this.buf = buf;
         this.off = off;
@@ -206,7 +209,8 @@ public class Type {
     /**
      * Returns the Java type corresponding to the given type descriptor.
      *
-     * @param typeDescriptor a field or method type descriptor.
+     * @param typeDescriptor
+     *            a field or method type descriptor.
      * @return the Java type corresponding to the given type descriptor.
      */
     public static Type getType(final String typeDescriptor) {
@@ -216,7 +220,8 @@ public class Type {
     /**
      * Returns the Java type corresponding to the given internal name.
      *
-     * @param internalName an internal name.
+     * @param internalName
+     *            an internal name.
      * @return the Java type corresponding to the given internal name.
      */
     public static Type getObjectType(final String internalName) {
@@ -228,7 +233,8 @@ public class Type {
      * Returns the Java type corresponding to the given method descriptor.
      * Equivalent to <code>Type.getType(methodDescriptor)</code>.
      *
-     * @param methodDescriptor a method descriptor.
+     * @param methodDescriptor
+     *            a method descriptor.
      * @return the Java type corresponding to the given method descriptor.
      */
     public static Type getMethodType(final String methodDescriptor) {
@@ -239,18 +245,23 @@ public class Type {
      * Returns the Java method type corresponding to the given argument and
      * return types.
      *
-     * @param returnType the return type of the method.
-     * @param argumentTypes the argument types of the method.
-     * @return the Java type corresponding to the given argument and return types.
+     * @param returnType
+     *            the return type of the method.
+     * @param argumentTypes
+     *            the argument types of the method.
+     * @return the Java type corresponding to the given argument and return
+     *         types.
      */
-    public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
+    public static Type getMethodType(final Type returnType,
+            final Type... argumentTypes) {
         return getType(getMethodDescriptor(returnType, argumentTypes));
     }
 
     /**
      * Returns the Java type corresponding to the given class.
      *
-     * @param c a class.
+     * @param c
+     *            a class.
      * @return the Java type corresponding to the given class.
      */
     public static Type getType(final Class<?> c) {
@@ -282,7 +293,8 @@ public class Type {
     /**
      * Returns the Java method type corresponding to the given constructor.
      *
-     * @param c a {@link Constructor Constructor} object.
+     * @param c
+     *            a {@link Constructor Constructor} object.
      * @return the Java method type corresponding to the given constructor.
      */
     public static Type getType(final Constructor<?> c) {
@@ -292,7 +304,8 @@ public class Type {
     /**
      * Returns the Java method type corresponding to the given method.
      *
-     * @param m a {@link Method Method} object.
+     * @param m
+     *            a {@link Method Method} object.
      * @return the Java method type corresponding to the given method.
      */
     public static Type getType(final Method m) {
@@ -303,7 +316,8 @@ public class Type {
      * Returns the Java types corresponding to the argument types of the given
      * method descriptor.
      *
-     * @param methodDescriptor a method descriptor.
+     * @param methodDescriptor
+     *            a method descriptor.
      * @return the Java types corresponding to the argument types of the given
      *         method descriptor.
      */
@@ -338,7 +352,8 @@ public class Type {
      * Returns the Java types corresponding to the argument types of the given
      * method.
      *
-     * @param method a method.
+     * @param method
+     *            a method.
      * @return the Java types corresponding to the argument types of the given
      *         method.
      */
@@ -355,7 +370,8 @@ public class Type {
      * Returns the Java type corresponding to the return type of the given
      * method descriptor.
      *
-     * @param methodDescriptor a method descriptor.
+     * @param methodDescriptor
+     *            a method descriptor.
      * @return the Java type corresponding to the return type of the given
      *         method descriptor.
      */
@@ -368,7 +384,8 @@ public class Type {
      * Returns the Java type corresponding to the return type of the given
      * method.
      *
-     * @param method a method.
+     * @param method
+     *            a method.
      * @return the Java type corresponding to the return type of the given
      *         method.
      */
@@ -379,12 +396,13 @@ public class Type {
     /**
      * Computes the size of the arguments and of the return value of a method.
      *
-     * @param desc the descriptor of a method.
+     * @param desc
+     *            the descriptor of a method.
      * @return the size of the arguments of the method (plus one for the
      *         implicit this argument), argSize, and the size of its return
      *         value, retSize, packed into a single int i =
-     *         <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal
-     *         to <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+     *         <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal to
+     *         <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
      */
     public static int getArgumentsAndReturnSizes(final String desc) {
         int n = 1;
@@ -419,52 +437,54 @@ public class Type {
      * method descriptors, buf is supposed to contain nothing more than the
      * descriptor itself.
      *
-     * @param buf a buffer containing a type descriptor.
-     * @param off the offset of this descriptor in the previous buffer.
+     * @param buf
+     *            a buffer containing a type descriptor.
+     * @param off
+     *            the offset of this descriptor in the previous buffer.
      * @return the Java type corresponding to the given type descriptor.
      */
     private static Type getType(final char[] buf, final int off) {
         int len;
         switch (buf[off]) {
-            case 'V':
-                return VOID_TYPE;
-            case 'Z':
-                return BOOLEAN_TYPE;
-            case 'C':
-                return CHAR_TYPE;
-            case 'B':
-                return BYTE_TYPE;
-            case 'S':
-                return SHORT_TYPE;
-            case 'I':
-                return INT_TYPE;
-            case 'F':
-                return FLOAT_TYPE;
-            case 'J':
-                return LONG_TYPE;
-            case 'D':
-                return DOUBLE_TYPE;
-            case '[':
-                len = 1;
-                while (buf[off + len] == '[') {
-                    ++len;
-                }
-                if (buf[off + len] == 'L') {
-                    ++len;
-                    while (buf[off + len] != ';') {
-                        ++len;
-                    }
-                }
-                return new Type(ARRAY, buf, off, len + 1);
-            case 'L':
-                len = 1;
+        case 'V':
+            return VOID_TYPE;
+        case 'Z':
+            return BOOLEAN_TYPE;
+        case 'C':
+            return CHAR_TYPE;
+        case 'B':
+            return BYTE_TYPE;
+        case 'S':
+            return SHORT_TYPE;
+        case 'I':
+            return INT_TYPE;
+        case 'F':
+            return FLOAT_TYPE;
+        case 'J':
+            return LONG_TYPE;
+        case 'D':
+            return DOUBLE_TYPE;
+        case '[':
+            len = 1;
+            while (buf[off + len] == '[') {
+                ++len;
+            }
+            if (buf[off + len] == 'L') {
+                ++len;
                 while (buf[off + len] != ';') {
                     ++len;
                 }
-                return new Type(OBJECT, buf, off + 1, len - 1);
+            }
+            return new Type(ARRAY, buf, off, len + 1);
+        case 'L':
+            len = 1;
+            while (buf[off + len] != ';') {
+                ++len;
+            }
+            return new Type(OBJECT, buf, off + 1, len - 1);
             // case '(':
-            default:
-                return new Type(METHOD, buf, 0, buf.length);
+        default:
+            return new Type(METHOD, buf, off, buf.length - off);
         }
     }
 
@@ -475,11 +495,11 @@ public class Type {
     /**
      * Returns the sort of this Java type.
      *
-     * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
-     *         {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
-     *         {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
-     *         {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY},
-     *         {@link #OBJECT OBJECT} or {@link #METHOD METHOD}.
+     * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN}, {@link #CHAR CHAR},
+     *         {@link #BYTE BYTE}, {@link #SHORT SHORT}, {@link #INT INT},
+     *         {@link #FLOAT FLOAT}, {@link #LONG LONG}, {@link #DOUBLE DOUBLE},
+     *         {@link #ARRAY ARRAY}, {@link #OBJECT OBJECT} or {@link #METHOD
+     *         METHOD}.
      */
     public int getSort() {
         return sort;
@@ -517,34 +537,34 @@ public class Type {
      */
     public String getClassName() {
         switch (sort) {
-            case VOID:
-                return "void";
-            case BOOLEAN:
-                return "boolean";
-            case CHAR:
-                return "char";
-            case BYTE:
-                return "byte";
-            case SHORT:
-                return "short";
-            case INT:
-                return "int";
-            case FLOAT:
-                return "float";
-            case LONG:
-                return "long";
-            case DOUBLE:
-                return "double";
-            case ARRAY:
-                StringBuffer b = new StringBuffer(getElementType().getClassName());
-                for (int i = getDimensions(); i > 0; --i) {
-                    b.append("[]");
-                }
-                return b.toString();
-            case OBJECT:
-                return new String(buf, off, len).replace('/', '.');
-            default:
-                return null;
+        case VOID:
+            return "void";
+        case BOOLEAN:
+            return "boolean";
+        case CHAR:
+            return "char";
+        case BYTE:
+            return "byte";
+        case SHORT:
+            return "short";
+        case INT:
+            return "int";
+        case FLOAT:
+            return "float";
+        case LONG:
+            return "long";
+        case DOUBLE:
+            return "double";
+        case ARRAY:
+            StringBuffer b = new StringBuffer(getElementType().getClassName());
+            for (int i = getDimensions(); i > 0; --i) {
+                b.append("[]");
+            }
+            return b.toString();
+        case OBJECT:
+            return new String(buf, off, len).replace('/', '.');
+        default:
+            return null;
         }
     }
 
@@ -613,15 +633,15 @@ public class Type {
      * Returns the descriptor corresponding to the given argument and return
      * types.
      *
-     * @param returnType the return type of the method.
-     * @param argumentTypes the argument types of the method.
+     * @param returnType
+     *            the return type of the method.
+     * @param argumentTypes
+     *            the argument types of the method.
      * @return the descriptor corresponding to the given argument and return
      *         types.
      */
-    public static String getMethodDescriptor(
-        final Type returnType,
-        final Type... argumentTypes)
-    {
+    public static String getMethodDescriptor(final Type returnType,
+            final Type... argumentTypes) {
         StringBuffer buf = new StringBuffer();
         buf.append('(');
         for (int i = 0; i < argumentTypes.length; ++i) {
@@ -636,11 +656,13 @@ public class Type {
      * Appends the descriptor corresponding to this Java type to the given
      * string buffer.
      *
-     * @param buf the string buffer to which the descriptor must be appended.
+     * @param buf
+     *            the string buffer to which the descriptor must be appended.
      */
     private void getDescriptor(final StringBuffer buf) {
         if (this.buf == null) {
-            // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+            // descriptor is in byte 3 of 'off' for primitive types (buf ==
+            // null)
             buf.append((char) ((off & 0xFF000000) >>> 24));
         } else if (sort == OBJECT) {
             buf.append('L');
@@ -661,7 +683,8 @@ public class Type {
      * class is its fully qualified name, as returned by Class.getName(), where
      * '.' are replaced by '/'.
      *
-     * @param c an object or array class.
+     * @param c
+     *            an object or array class.
      * @return the internal name of the given class.
      */
     public static String getInternalName(final Class<?> c) {
@@ -671,7 +694,8 @@ public class Type {
     /**
      * Returns the descriptor corresponding to the given Java type.
      *
-     * @param c an object class, a primitive class or an array class.
+     * @param c
+     *            an object class, a primitive class or an array class.
      * @return the descriptor corresponding to the given class.
      */
     public static String getDescriptor(final Class<?> c) {
@@ -683,7 +707,8 @@ public class Type {
     /**
      * Returns the descriptor corresponding to the given constructor.
      *
-     * @param c a {@link Constructor Constructor} object.
+     * @param c
+     *            a {@link Constructor Constructor} object.
      * @return the descriptor of the given constructor.
      */
     public static String getConstructorDescriptor(final Constructor<?> c) {
@@ -699,7 +724,8 @@ public class Type {
     /**
      * Returns the descriptor corresponding to the given method.
      *
-     * @param m a {@link Method Method} object.
+     * @param m
+     *            a {@link Method Method} object.
      * @return the descriptor of the given method.
      */
     public static String getMethodDescriptor(final Method m) {
@@ -717,8 +743,10 @@ public class Type {
     /**
      * Appends the descriptor of the given class to the given string buffer.
      *
-     * @param buf the string buffer to which the descriptor must be appended.
-     * @param c the class whose descriptor must be computed.
+     * @param buf
+     *            the string buffer to which the descriptor must be appended.
+     * @param c
+     *            the class whose descriptor must be computed.
      */
     private static void getDescriptor(final StringBuffer buf, final Class<?> c) {
         Class<?> d = c;
@@ -783,9 +811,10 @@ public class Type {
      * Returns a JVM instruction opcode adapted to this Java type. This method
      * must not be used for method types.
      *
-     * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
-     *        ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
-     *        ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+     * @param opcode
+     *            a JVM instruction opcode. This opcode must be one of ILOAD,
+     *            ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG,
+     *            ISHL, ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
      * @return an opcode that is similar to the given opcode, but adapted to
      *         this Java type. For example, if this type is <tt>float</tt> and
      *         <tt>opcode</tt> is IRETURN, this method returns FRETURN.
@@ -809,7 +838,8 @@ public class Type {
     /**
      * Tests if the given object is equal to this type.
      *
-     * @param o the object to be compared to this type.
+     * @param o
+     *            the object to be compared to this type.
      * @return <tt>true</tt> if the given object is equal to this type.
      */
     @Override
diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
index 22e6427..9c7c388 100644
--- a/src/asm/scala/tools/asm/signature/SignatureReader.java
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
@@ -46,8 +46,9 @@ public class SignatureReader {
     /**
      * Constructs a {@link SignatureReader} for the given signature.
      *
-     * @param signature A <i>ClassSignature</i>, <i>MethodTypeSignature</i>,
-     *        or <i>FieldTypeSignature</i>.
+     * @param signature
+     *            A <i>ClassSignature</i>, <i>MethodTypeSignature</i>, or
+     *            <i>FieldTypeSignature</i>.
      */
     public SignatureReader(final String signature) {
         this.signature = signature;
@@ -58,15 +59,15 @@ public class SignatureReader {
      * {@link SignatureReader}. This signature is the one specified in the
      * constructor (see {@link #SignatureReader(String) SignatureReader}). This
      * method is intended to be called on a {@link SignatureReader} that was
-     * created using a <i>ClassSignature</i> (such as the
+     * created using a <i>ClassSignature</i> (such as the <code>signature</code>
+     * parameter of the {@link scala.tools.asm.ClassVisitor#visit
+     * ClassVisitor.visit} method) or a <i>MethodTypeSignature</i> (such as the
      * <code>signature</code> parameter of the
-     * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method)
-     * or a <i>MethodTypeSignature</i> (such as the <code>signature</code>
-     * parameter of the
-     * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod}
-     * method).
+     * {@link scala.tools.asm.ClassVisitor#visitMethod
+     * ClassVisitor.visitMethod} method).
      *
-     * @param v the visitor that must visit this signature.
+     * @param v
+     *            the visitor that must visit this signature.
      */
     public void accept(final SignatureVisitor v) {
         String signature = this.signature;
@@ -118,12 +119,12 @@ public class SignatureReader {
      * method is intended to be called on a {@link SignatureReader} that was
      * created using a <i>FieldTypeSignature</i>, such as the
      * <code>signature</code> parameter of the
-     * {@link org.objectweb.asm.ClassVisitor#visitField
-     * ClassVisitor.visitField} or {@link
-     * org.objectweb.asm.MethodVisitor#visitLocalVariable
+     * {@link scala.tools.asm.ClassVisitor#visitField ClassVisitor.visitField}
+     * or {@link scala.tools.asm.MethodVisitor#visitLocalVariable
      * MethodVisitor.visitLocalVariable} methods.
      *
-     * @param v the visitor that must visit this signature.
+     * @param v
+     *            the visitor that must visit this signature.
      */
     public void acceptType(final SignatureVisitor v) {
         parseType(this.signature, 0, v);
@@ -132,98 +133,96 @@ public class SignatureReader {
     /**
      * Parses a field type signature and makes the given visitor visit it.
      *
-     * @param signature a string containing the signature that must be parsed.
-     * @param pos index of the first character of the signature to parsed.
-     * @param v the visitor that must visit this signature.
+     * @param signature
+     *            a string containing the signature that must be parsed.
+     * @param pos
+     *            index of the first character of the signature to parsed.
+     * @param v
+     *            the visitor that must visit this signature.
      * @return the index of the first character after the parsed signature.
      */
-    private static int parseType(
-        final String signature,
-        int pos,
-        final SignatureVisitor v)
-    {
+    private static int parseType(final String signature, int pos,
+            final SignatureVisitor v) {
         char c;
         int start, end;
         boolean visited, inner;
         String name;
 
         switch (c = signature.charAt(pos++)) {
-            case 'Z':
-            case 'C':
-            case 'B':
-            case 'S':
-            case 'I':
-            case 'F':
-            case 'J':
-            case 'D':
-            case 'V':
-                v.visitBaseType(c);
-                return pos;
+        case 'Z':
+        case 'C':
+        case 'B':
+        case 'S':
+        case 'I':
+        case 'F':
+        case 'J':
+        case 'D':
+        case 'V':
+            v.visitBaseType(c);
+            return pos;
 
-            case '[':
-                return parseType(signature, pos, v.visitArrayType());
+        case '[':
+            return parseType(signature, pos, v.visitArrayType());
 
-            case 'T':
-                end = signature.indexOf(';', pos);
-                v.visitTypeVariable(signature.substring(pos, end));
-                return end + 1;
+        case 'T':
+            end = signature.indexOf(';', pos);
+            v.visitTypeVariable(signature.substring(pos, end));
+            return end + 1;
 
-            default: // case 'L':
-                start = pos;
-                visited = false;
-                inner = false;
-                for (;;) {
-                    switch (c = signature.charAt(pos++)) {
-                        case '.':
-                        case ';':
-                            if (!visited) {
-                                name = signature.substring(start, pos - 1);
-                                if (inner) {
-                                    v.visitInnerClassType(name);
-                                } else {
-                                    v.visitClassType(name);
-                                }
-                            }
-                            if (c == ';') {
-                                v.visitEnd();
-                                return pos;
-                            }
-                            start = pos;
-                            visited = false;
-                            inner = true;
-                            break;
+        default: // case 'L':
+            start = pos;
+            visited = false;
+            inner = false;
+            for (;;) {
+                switch (c = signature.charAt(pos++)) {
+                case '.':
+                case ';':
+                    if (!visited) {
+                        name = signature.substring(start, pos - 1);
+                        if (inner) {
+                            v.visitInnerClassType(name);
+                        } else {
+                            v.visitClassType(name);
+                        }
+                    }
+                    if (c == ';') {
+                        v.visitEnd();
+                        return pos;
+                    }
+                    start = pos;
+                    visited = false;
+                    inner = true;
+                    break;
 
-                        case '<':
-                            name = signature.substring(start, pos - 1);
-                            if (inner) {
-                                v.visitInnerClassType(name);
-                            } else {
-                                v.visitClassType(name);
-                            }
-                            visited = true;
-                            top: for (;;) {
-                                switch (c = signature.charAt(pos)) {
-                                    case '>':
-                                        break top;
-                                    case '*':
-                                        ++pos;
-                                        v.visitTypeArgument();
-                                        break;
-                                    case '+':
-                                    case '-':
-                                        pos = parseType(signature,
-                                                pos + 1,
-                                                v.visitTypeArgument(c));
-                                        break;
-                                    default:
-                                        pos = parseType(signature,
-                                                pos,
-                                                v.visitTypeArgument('='));
-                                        break;
-                                }
-                            }
+                case '<':
+                    name = signature.substring(start, pos - 1);
+                    if (inner) {
+                        v.visitInnerClassType(name);
+                    } else {
+                        v.visitClassType(name);
+                    }
+                    visited = true;
+                    top: for (;;) {
+                        switch (c = signature.charAt(pos)) {
+                        case '>':
+                            break top;
+                        case '*':
+                            ++pos;
+                            v.visitTypeArgument();
+                            break;
+                        case '+':
+                        case '-':
+                            pos = parseType(signature, pos + 1,
+                                    v.visitTypeArgument(c));
+                            break;
+                        default:
+                            pos = parseType(signature, pos,
+                                    v.visitTypeArgument('='));
+                            break;
+                        }
                     }
                 }
+            }
         }
     }
 }
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
index 2fc364e..f38f81f 100644
--- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -35,21 +35,21 @@ import scala.tools.asm.Opcodes;
  * A visitor to visit a generic signature. The methods of this interface must be
  * called in one of the three following orders (the last one is the only valid
  * order for a {@link SignatureVisitor} that is returned by a method of this
- * interface): <ul> <li><i>ClassSignature</i> = (
- * <tt>visitFormalTypeParameter</tt>
- *   <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitSuperClass</tt>
- *   <tt>visitInterface</tt>* )</li>
+ * interface):
+ * <ul>
+ * <li><i>ClassSignature</i> = ( <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitSuperClass</tt> <tt>visitInterface</tt>* )</li>
  * <li><i>MethodSignature</i> = ( <tt>visitFormalTypeParameter</tt>
- *   <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitParameterType</tt>*
- * <tt>visitReturnType</tt>
- *   <tt>visitExceptionType</tt>* )</li> <li><i>TypeSignature</i> =
- * <tt>visitBaseType</tt> | <tt>visitTypeVariable</tt> |
- * <tt>visitArrayType</tt> | (
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitParameterType</tt>* <tt>visitReturnType</tt>
+ * <tt>visitExceptionType</tt>* )</li>
+ * <li><i>TypeSignature</i> = <tt>visitBaseType</tt> |
+ * <tt>visitTypeVariable</tt> | <tt>visitArrayType</tt> | (
  * <tt>visitClassType</tt> <tt>visitTypeArgument</tt>* (
- * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )*
- * <tt>visitEnd</tt> ) )</li> </ul>
+ * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )* <tt>visitEnd</tt>
+ * ) )</li>
+ * </ul>
  *
  * @author Thomas Hallgren
  * @author Eric Bruneton
@@ -80,8 +80,9 @@ public abstract class SignatureVisitor {
     /**
      * Constructs a new {@link SignatureVisitor}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public SignatureVisitor(final int api) {
         this.api = api;
@@ -90,7 +91,8 @@ public abstract class SignatureVisitor {
     /**
      * Visits a formal type parameter.
      *
-     * @param name the name of the formal parameter.
+     * @param name
+     *            the name of the formal parameter.
      */
     public void visitFormalTypeParameter(String name) {
     }
@@ -162,8 +164,9 @@ public abstract class SignatureVisitor {
     /**
      * Visits a signature corresponding to a primitive type.
      *
-     * @param descriptor the descriptor of the primitive type, or 'V' for
-     *        <tt>void</tt>.
+     * @param descriptor
+     *            the descriptor of the primitive type, or 'V' for <tt>void</tt>
+     *            .
      */
     public void visitBaseType(char descriptor) {
     }
@@ -171,7 +174,8 @@ public abstract class SignatureVisitor {
     /**
      * Visits a signature corresponding to a type variable.
      *
-     * @param name the name of the type variable.
+     * @param name
+     *            the name of the type variable.
      */
     public void visitTypeVariable(String name) {
     }
@@ -190,7 +194,8 @@ public abstract class SignatureVisitor {
      * Starts the visit of a signature corresponding to a class or interface
      * type.
      *
-     * @param name the internal name of the class or interface.
+     * @param name
+     *            the internal name of the class or interface.
      */
     public void visitClassType(String name) {
     }
@@ -198,7 +203,8 @@ public abstract class SignatureVisitor {
     /**
      * Visits an inner class.
      *
-     * @param name the local name of the inner class in its enclosing class.
+     * @param name
+     *            the local name of the inner class in its enclosing class.
      */
     public void visitInnerClassType(String name) {
     }
@@ -213,7 +219,8 @@ public abstract class SignatureVisitor {
     /**
      * Visits a type argument of the last visited class or inner class type.
      *
-     * @param wildcard '+', '-' or '='.
+     * @param wildcard
+     *            '+', '-' or '='.
      * @return a non null visitor to visit the signature of the type argument.
      */
     public SignatureVisitor visitTypeArgument(char wildcard) {
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
index a59fdfd..ebf4fe0 100644
--- a/src/asm/scala/tools/asm/signature/SignatureWriter.java
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -224,4 +224,4 @@ public class SignatureWriter extends SignatureVisitor {
         }
         argumentStack /= 2;
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
index 471f842..411eead 100644
--- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -148,7 +148,8 @@ public abstract class AbstractInsnNode {
     /**
      * Constructs a new {@link AbstractInsnNode}.
      *
-     * @param opcode the opcode of the instruction to be constructed.
+     * @param opcode
+     *            the opcode of the instruction to be constructed.
      */
     protected AbstractInsnNode(final int opcode) {
         this.opcode = opcode;
@@ -197,38 +198,47 @@ public abstract class AbstractInsnNode {
     /**
      * Makes the given code visitor visit this instruction.
      *
-     * @param cv a code visitor.
+     * @param cv
+     *            a code visitor.
      */
     public abstract void accept(final MethodVisitor cv);
 
     /**
      * Returns a copy of this instruction.
      *
-     * @param labels a map from LabelNodes to cloned LabelNodes.
+     * @param labels
+     *            a map from LabelNodes to cloned LabelNodes.
      * @return a copy of this instruction. The returned instruction does not
      *         belong to any {@link InsnList}.
      */
-    public abstract AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels);
+    public abstract AbstractInsnNode clone(
+            final Map<LabelNode, LabelNode> labels);
 
     /**
      * Returns the clone of the given label.
      *
-     * @param label a label.
-     * @param map a map from LabelNodes to cloned LabelNodes.
+     * @param label
+     *            a label.
+     * @param map
+     *            a map from LabelNodes to cloned LabelNodes.
      * @return the clone of the given label.
      */
-    static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) {
+    static LabelNode clone(final LabelNode label,
+            final Map<LabelNode, LabelNode> map) {
         return map.get(label);
     }
 
     /**
      * Returns the clones of the given labels.
      *
-     * @param labels a list of labels.
-     * @param map a map from LabelNodes to cloned LabelNodes.
+     * @param labels
+     *            a list of labels.
+     * @param map
+     *            a map from LabelNodes to cloned LabelNodes.
      * @return the clones of the given labels.
      */
-    static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) {
+    static LabelNode[] clone(final List<LabelNode> labels,
+            final Map<LabelNode, LabelNode> map) {
         LabelNode[] clones = new LabelNode[labels.size()];
         for (int i = 0; i < clones.length; ++i) {
             clones[i] = map.get(labels.get(i));
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
index 9f13255..1f4beef 100644
--- a/src/asm/scala/tools/asm/tree/AnnotationNode.java
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -52,11 +52,11 @@ public class AnnotationNode extends AnnotationVisitor {
      * as two consecutive elements in the list. The name is a {@link String},
      * and the value may be a {@link Byte}, {@link Boolean}, {@link Character},
      * {@link Short}, {@link Integer}, {@link Long}, {@link Float},
-     * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an
+     * {@link Double}, {@link String} or {@link scala.tools.asm.Type}, or an
      * two elements String array (for enumeration values), a
      * {@link AnnotationNode}, or a {@link List} of values of one of the
-     * preceding types. The list may be <tt>null</tt> if there is no name
-     * value pair.
+     * preceding types. The list may be <tt>null</tt> if there is no name value
+     * pair.
      */
     public List<Object> values;
 
@@ -65,7 +65,8 @@ public class AnnotationNode extends AnnotationVisitor {
      * constructor</i>. Instead, they must use the
      * {@link #AnnotationNode(int, String)} version.
      *
-     * @param desc the class descriptor of the annotation class.
+     * @param desc
+     *            the class descriptor of the annotation class.
      */
     public AnnotationNode(final String desc) {
         this(Opcodes.ASM4, desc);
@@ -74,9 +75,11 @@ public class AnnotationNode extends AnnotationVisitor {
     /**
      * Constructs a new {@link AnnotationNode}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param desc the class descriptor of the annotation class.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param desc
+     *            the class descriptor of the annotation class.
      */
     public AnnotationNode(final int api, final String desc) {
         super(api);
@@ -86,7 +89,8 @@ public class AnnotationNode extends AnnotationVisitor {
     /**
      * Constructs a new {@link AnnotationNode} to visit an array value.
      *
-     * @param values where the visited values must be stored.
+     * @param values
+     *            where the visited values must be stored.
      */
     AnnotationNode(final List<Object> values) {
         super(Opcodes.ASM4);
@@ -109,11 +113,8 @@ public class AnnotationNode extends AnnotationVisitor {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         if (values == null) {
             values = new ArrayList<Object>(this.desc != null ? 2 : 1);
         }
@@ -124,10 +125,8 @@ public class AnnotationNode extends AnnotationVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public AnnotationVisitor visitAnnotation(final String name,
+            final String desc) {
         if (values == null) {
             values = new ArrayList<Object>(this.desc != null ? 2 : 1);
         }
@@ -166,7 +165,8 @@ public class AnnotationNode extends AnnotationVisitor {
      * recursively, do not contain elements that were introduced in more recent
      * versions of the ASM API than the given version.
      *
-     * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+     * @param api
+     *            an ASM API version. Must be one of {@link Opcodes#ASM4}.
      */
     public void check(final int api) {
         // nothing to do
@@ -175,7 +175,8 @@ public class AnnotationNode extends AnnotationVisitor {
     /**
      * Makes the given visitor visit this annotation.
      *
-     * @param av an annotation visitor. Maybe <tt>null</tt>.
+     * @param av
+     *            an annotation visitor. Maybe <tt>null</tt>.
      */
     public void accept(final AnnotationVisitor av) {
         if (av != null) {
@@ -193,15 +194,15 @@ public class AnnotationNode extends AnnotationVisitor {
     /**
      * Makes the given visitor visit a given annotation value.
      *
-     * @param av an annotation visitor. Maybe <tt>null</tt>.
-     * @param name the value name.
-     * @param value the actual value.
+     * @param av
+     *            an annotation visitor. Maybe <tt>null</tt>.
+     * @param name
+     *            the value name.
+     * @param value
+     *            the actual value.
      */
-    static void accept(
-        final AnnotationVisitor av,
-        final String name,
-        final Object value)
-    {
+    static void accept(final AnnotationVisitor av, final String name,
+            final Object value) {
         if (av != null) {
             if (value instanceof String[]) {
                 String[] typeconst = (String[]) value;
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
index 64effae..c3d9999 100644
--- a/src/asm/scala/tools/asm/tree/ClassNode.java
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -53,33 +53,33 @@ public class ClassNode extends ClassVisitor {
     public int version;
 
     /**
-     * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This
+     * The class's access flags (see {@link scala.tools.asm.Opcodes}). This
      * field also indicates if the class is deprecated.
      */
     public int access;
 
     /**
      * The internal name of the class (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
      */
     public String name;
 
     /**
-     * The signature of the class. Mayt be <tt>null</tt>.
+     * The signature of the class. May be <tt>null</tt>.
      */
     public String signature;
 
     /**
      * The internal of name of the super class (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For
-     * interfaces, the super class is {@link Object}. May be <tt>null</tt>,
-     * but only for the {@link Object} class.
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}). For
+     * interfaces, the super class is {@link Object}. May be <tt>null</tt>, but
+     * only for the {@link Object} class.
      */
     public String superName;
 
     /**
      * The internal names of the class's interfaces (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}). This
      * list is a list of {@link String} objects.
      */
     public List<String> interfaces;
@@ -91,7 +91,7 @@ public class ClassNode extends ClassVisitor {
     public String sourceFile;
 
     /**
-     * Debug information to compute the correspondance between source and
+     * Debug information to compute the correspondence between source and
      * compiled elements of the class. May be <tt>null</tt>.
      */
     public String sourceDebug;
@@ -109,8 +109,8 @@ public class ClassNode extends ClassVisitor {
     public String outerMethod;
 
     /**
-     * The descriptor of the method that contains the class, or <tt>null</tt>
-     * if the class is not enclosed in a method.
+     * The descriptor of the method that contains the class, or <tt>null</tt> if
+     * the class is not enclosed in a method.
      */
     public String outerMethodDesc;
 
@@ -118,7 +118,7 @@ public class ClassNode extends ClassVisitor {
      * The runtime visible annotations of this class. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label visible
      */
     public List<AnnotationNode> visibleAnnotations;
@@ -127,7 +127,7 @@ public class ClassNode extends ClassVisitor {
      * The runtime invisible annotations of this class. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label invisible
      */
     public List<AnnotationNode> invisibleAnnotations;
@@ -136,7 +136,7 @@ public class ClassNode extends ClassVisitor {
      * The non standard attributes of this class. This list is a list of
      * {@link Attribute} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.Attribute
+     * @associates scala.tools.asm.Attribute
      */
     public List<Attribute> attrs;
 
@@ -144,7 +144,7 @@ public class ClassNode extends ClassVisitor {
      * Informations about the inner classes of this class. This list is a list
      * of {@link InnerClassNode} objects.
      *
-     * @associates org.objectweb.asm.tree.InnerClassNode
+     * @associates scala.tools.asm.tree.InnerClassNode
      */
     public List<InnerClassNode> innerClasses;
 
@@ -152,7 +152,7 @@ public class ClassNode extends ClassVisitor {
      * The fields of this class. This list is a list of {@link FieldNode}
      * objects.
      *
-     * @associates org.objectweb.asm.tree.FieldNode
+     * @associates scala.tools.asm.tree.FieldNode
      */
     public List<FieldNode> fields;
 
@@ -160,7 +160,7 @@ public class ClassNode extends ClassVisitor {
      * The methods of this class. This list is a list of {@link MethodNode}
      * objects.
      *
-     * @associates org.objectweb.asm.tree.MethodNode
+     * @associates scala.tools.asm.tree.MethodNode
      */
     public List<MethodNode> methods;
 
@@ -176,8 +176,9 @@ public class ClassNode extends ClassVisitor {
     /**
      * Constructs a new {@link ClassNode}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public ClassNode(final int api) {
         super(api);
@@ -192,14 +193,9 @@ public class ClassNode extends ClassVisitor {
     // ------------------------------------------------------------------------
 
     @Override
-    public void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public void visit(final int version, final int access, final String name,
+            final String signature, final String superName,
+            final String[] interfaces) {
         this.version = version;
         this.access = access;
         this.name = name;
@@ -217,21 +213,16 @@ public class ClassNode extends ClassVisitor {
     }
 
     @Override
-    public void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitOuterClass(final String owner, final String name,
+            final String desc) {
         outerClass = owner;
         outerMethod = name;
         outerMethodDesc = desc;
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         AnnotationNode an = new AnnotationNode(desc);
         if (visible) {
             if (visibleAnnotations == null) {
@@ -256,44 +247,25 @@ public class ClassNode extends ClassVisitor {
     }
 
     @Override
-    public void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
-        InnerClassNode icn = new InnerClassNode(name,
-                outerName,
-                innerName,
+    public void visitInnerClass(final String name, final String outerName,
+            final String innerName, final int access) {
+        InnerClassNode icn = new InnerClassNode(name, outerName, innerName,
                 access);
         innerClasses.add(icn);
     }
 
     @Override
-    public FieldVisitor visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public FieldVisitor visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
         FieldNode fn = new FieldNode(access, name, desc, signature, value);
         fields.add(fn);
         return fn;
     }
 
     @Override
-    public MethodVisitor visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
-        MethodNode mn = new MethodNode(access,
-                name,
-                desc,
-                signature,
+    public MethodVisitor visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
+        MethodNode mn = new MethodNode(access, name, desc, signature,
                 exceptions);
         methods.add(mn);
         return mn;
@@ -313,7 +285,8 @@ public class ClassNode extends ClassVisitor {
      * contain elements that were introduced in more recent versions of the ASM
      * API than the given version.
      *
-     * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+     * @param api
+     *            an ASM API version. Must be one of {@link Opcodes#ASM4}.
      */
     public void check(final int api) {
         // nothing to do
@@ -322,7 +295,8 @@ public class ClassNode extends ClassVisitor {
     /**
      * Makes the given class visitor visit this class.
      *
-     * @param cv a class visitor.
+     * @param cv
+     *            a class visitor.
      */
     public void accept(final ClassVisitor cv) {
         // visits header
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
index 6b7a6a1..0c94f18 100644
--- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -43,7 +43,7 @@ public class FieldInsnNode extends AbstractInsnNode {
 
     /**
      * The internal name of the field's owner class (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
      */
     public String owner;
 
@@ -53,26 +53,27 @@ public class FieldInsnNode extends AbstractInsnNode {
     public String name;
 
     /**
-     * The field's descriptor (see {@link org.objectweb.asm.Type}).
+     * The field's descriptor (see {@link scala.tools.asm.Type}).
      */
     public String desc;
 
     /**
      * Constructs a new {@link FieldInsnNode}.
      *
-     * @param opcode the opcode of the type instruction to be constructed. This
-     *        opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
-     * @param owner the internal name of the field's owner class (see
-     *        {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}).
+     * @param opcode
+     *            the opcode of the type instruction to be constructed. This
+     *            opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+     * @param owner
+     *            the internal name of the field's owner class (see
+     *            {@link scala.tools.asm.Type#getInternalName()
+     *            getInternalName}).
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link scala.tools.asm.Type}).
      */
-    public FieldInsnNode(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public FieldInsnNode(final int opcode, final String owner,
+            final String name, final String desc) {
         super(opcode);
         this.owner = owner;
         this.name = name;
@@ -82,8 +83,9 @@ public class FieldInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be GETSTATIC,
-     *        PUTSTATIC, GETFIELD or PUTFIELD.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be GETSTATIC,
+     *            PUTSTATIC, GETFIELD or PUTFIELD.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
index 9a1e170..61b614e 100644
--- a/src/asm/scala/tools/asm/tree/FieldNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -46,7 +46,7 @@ import scala.tools.asm.Opcodes;
 public class FieldNode extends FieldVisitor {
 
     /**
-     * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This
+     * The field's access flags (see {@link scala.tools.asm.Opcodes}). This
      * field also indicates if the field is synthetic and/or deprecated.
      */
     public int access;
@@ -57,7 +57,7 @@ public class FieldNode extends FieldVisitor {
     public String name;
 
     /**
-     * The field's descriptor (see {@link org.objectweb.asm.Type}).
+     * The field's descriptor (see {@link scala.tools.asm.Type}).
      */
     public String desc;
 
@@ -67,8 +67,8 @@ public class FieldNode extends FieldVisitor {
     public String signature;
 
     /**
-     * The field's initial value. This field, which may be <tt>null</tt> if
-     * the field does not have an initial value, must be an {@link Integer}, a
+     * The field's initial value. This field, which may be <tt>null</tt> if the
+     * field does not have an initial value, must be an {@link Integer}, a
      * {@link Float}, a {@link Long}, a {@link Double} or a {@link String}.
      */
     public Object value;
@@ -77,7 +77,7 @@ public class FieldNode extends FieldVisitor {
      * The runtime visible annotations of this field. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label visible
      */
     public List<AnnotationNode> visibleAnnotations;
@@ -86,7 +86,7 @@ public class FieldNode extends FieldVisitor {
      * The runtime invisible annotations of this field. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label invisible
      */
     public List<AnnotationNode> invisibleAnnotations;
@@ -95,7 +95,7 @@ public class FieldNode extends FieldVisitor {
      * The non standard attributes of this field. This list is a list of
      * {@link Attribute} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.Attribute
+     * @associates scala.tools.asm.Attribute
      */
     public List<Attribute> attrs;
 
@@ -104,25 +104,25 @@ public class FieldNode extends FieldVisitor {
      * constructor</i>. Instead, they must use the
      * {@link #FieldNode(int, int, String, String, String, Object)} version.
      *
-     * @param access the field's access flags (see
-     *        {@link org.objectweb.asm.Opcodes}). This parameter also indicates
-     *        if the field is synthetic and/or deprecated.
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
-     *        Type}).
-     * @param signature the field's signature.
-     * @param value the field's initial value. This parameter, which may be
-     *        <tt>null</tt> if the field does not have an initial value, must be
-     *        an {@link Integer}, a {@link Float}, a {@link Long}, a
-     *        {@link Double} or a {@link String}.
+     * @param access
+     *            the field's access flags (see
+     *            {@link scala.tools.asm.Opcodes}). This parameter also
+     *            indicates if the field is synthetic and/or deprecated.
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link scala.tools.asm.Type
+     *            Type}).
+     * @param signature
+     *            the field's signature.
+     * @param value
+     *            the field's initial value. This parameter, which may be
+     *            <tt>null</tt> if the field does not have an initial value,
+     *            must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+     *            {@link Double} or a {@link String}.
      */
-    public FieldNode(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public FieldNode(final int access, final String name, final String desc,
+            final String signature, final Object value) {
         this(Opcodes.ASM4, access, name, desc, signature, value);
     }
 
@@ -131,28 +131,28 @@ public class FieldNode extends FieldVisitor {
      * constructor</i>. Instead, they must use the
      * {@link #FieldNode(int, int, String, String, String, Object)} version.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param access the field's access flags (see
-     *        {@link org.objectweb.asm.Opcodes}). This parameter also indicates
-     *        if the field is synthetic and/or deprecated.
-     * @param name the field's name.
-     * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
-     *        Type}).
-     * @param signature the field's signature.
-     * @param value the field's initial value. This parameter, which may be
-     *        <tt>null</tt> if the field does not have an initial value, must be
-     *        an {@link Integer}, a {@link Float}, a {@link Long}, a
-     *        {@link Double} or a {@link String}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param access
+     *            the field's access flags (see
+     *            {@link scala.tools.asm.Opcodes}). This parameter also
+     *            indicates if the field is synthetic and/or deprecated.
+     * @param name
+     *            the field's name.
+     * @param desc
+     *            the field's descriptor (see {@link scala.tools.asm.Type
+     *            Type}).
+     * @param signature
+     *            the field's signature.
+     * @param value
+     *            the field's initial value. This parameter, which may be
+     *            <tt>null</tt> if the field does not have an initial value,
+     *            must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+     *            {@link Double} or a {@link String}.
      */
-    public FieldNode(
-        final int api,
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public FieldNode(final int api, final int access, final String name,
+            final String desc, final String signature, final Object value) {
         super(api);
         this.access = access;
         this.name = name;
@@ -166,10 +166,8 @@ public class FieldNode extends FieldVisitor {
     // ------------------------------------------------------------------------
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         AnnotationNode an = new AnnotationNode(desc);
         if (visible) {
             if (visibleAnnotations == null) {
@@ -207,7 +205,8 @@ public class FieldNode extends FieldVisitor {
      * contain elements that were introduced in more recent versions of the ASM
      * API than the given version.
      *
-     * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+     * @param api
+     *            an ASM API version. Must be one of {@link Opcodes#ASM4}.
      */
     public void check(final int api) {
         // nothing to do
@@ -216,7 +215,8 @@ public class FieldNode extends FieldVisitor {
     /**
      * Makes the given class visitor visit this field.
      *
-     * @param cv a class visitor.
+     * @param cv
+     *            a class visitor.
      */
     public void accept(final ClassVisitor cv) {
         FieldVisitor fv = cv.visitField(access, name, desc, signature, value);
diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
index 66825de..f13fc66 100644
--- a/src/asm/scala/tools/asm/tree/FrameNode.java
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
@@ -45,8 +45,9 @@ import scala.tools.asm.Opcodes;
  * the target of a jump instruction, or that starts an exception handler block.
  * The stack map frame types must describe the values of the local variables and
  * of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
- * <br> (*) this is mandatory only for classes whose version is greater than or
- * equal to {@link Opcodes#V1_6 V1_6}.
+ * <br>
+ * (*) this is mandatory only for classes whose version is greater than or equal
+ * to {@link Opcodes#V1_6 V1_6}.
  *
  * @author Eric Bruneton
  */
@@ -83,48 +84,48 @@ public class FrameNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link FrameNode}.
      *
-     * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for
-     *        expanded frames, or {@link Opcodes#F_FULL},
-     *        {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
-     *        {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
-     *        {@link Opcodes#F_SAME1} for compressed frames.
-     * @param nLocal number of local variables of this stack map frame.
-     * @param local the types of the local variables of this stack map frame.
-     *        Elements of this list can be Integer, String or LabelNode objects
-     *        (for primitive, reference and uninitialized types respectively -
-     *        see {@link MethodVisitor}).
-     * @param nStack number of operand stack elements of this stack map frame.
-     * @param stack the types of the operand stack elements of this stack map
-     *        frame. Elements of this list can be Integer, String or LabelNode
-     *        objects (for primitive, reference and uninitialized types
-     *        respectively - see {@link MethodVisitor}).
+     * @param type
+     *            the type of this frame. Must be {@link Opcodes#F_NEW} for
+     *            expanded frames, or {@link Opcodes#F_FULL},
+     *            {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
+     *            {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
+     *            {@link Opcodes#F_SAME1} for compressed frames.
+     * @param nLocal
+     *            number of local variables of this stack map frame.
+     * @param local
+     *            the types of the local variables of this stack map frame.
+     *            Elements of this list can be Integer, String or LabelNode
+     *            objects (for primitive, reference and uninitialized types
+     *            respectively - see {@link MethodVisitor}).
+     * @param nStack
+     *            number of operand stack elements of this stack map frame.
+     * @param stack
+     *            the types of the operand stack elements of this stack map
+     *            frame. Elements of this list can be Integer, String or
+     *            LabelNode objects (for primitive, reference and uninitialized
+     *            types respectively - see {@link MethodVisitor}).
      */
-    public FrameNode(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public FrameNode(final int type, final int nLocal, final Object[] local,
+            final int nStack, final Object[] stack) {
         super(-1);
         this.type = type;
         switch (type) {
-            case Opcodes.F_NEW:
-            case Opcodes.F_FULL:
-                this.local = asList(nLocal, local);
-                this.stack = asList(nStack, stack);
-                break;
-            case Opcodes.F_APPEND:
-                this.local = asList(nLocal, local);
-                break;
-            case Opcodes.F_CHOP:
-                this.local = Arrays.asList(new Object[nLocal]);
-                break;
-            case Opcodes.F_SAME:
-                break;
-            case Opcodes.F_SAME1:
-                this.stack = asList(1, stack);
-                break;
+        case Opcodes.F_NEW:
+        case Opcodes.F_FULL:
+            this.local = asList(nLocal, local);
+            this.stack = asList(nStack, stack);
+            break;
+        case Opcodes.F_APPEND:
+            this.local = asList(nLocal, local);
+            break;
+        case Opcodes.F_CHOP:
+            this.local = Arrays.asList(new Object[nLocal]);
+            break;
+        case Opcodes.F_SAME:
+            break;
+        case Opcodes.F_SAME1:
+            this.stack = asList(1, stack);
+            break;
         }
     }
 
@@ -136,31 +137,29 @@ public class FrameNode extends AbstractInsnNode {
     /**
      * Makes the given visitor visit this stack map frame.
      *
-     * @param mv a method visitor.
+     * @param mv
+     *            a method visitor.
      */
     @Override
     public void accept(final MethodVisitor mv) {
         switch (type) {
-            case Opcodes.F_NEW:
-            case Opcodes.F_FULL:
-                mv.visitFrame(type,
-                        local.size(),
-                        asArray(local),
-                        stack.size(),
-                        asArray(stack));
-                break;
-            case Opcodes.F_APPEND:
-                mv.visitFrame(type, local.size(), asArray(local), 0, null);
-                break;
-            case Opcodes.F_CHOP:
-                mv.visitFrame(type, local.size(), null, 0, null);
-                break;
-            case Opcodes.F_SAME:
-                mv.visitFrame(type, 0, null, 0, null);
-                break;
-            case Opcodes.F_SAME1:
-                mv.visitFrame(type, 0, null, 1, asArray(stack));
-                break;
+        case Opcodes.F_NEW:
+        case Opcodes.F_FULL:
+            mv.visitFrame(type, local.size(), asArray(local), stack.size(),
+                    asArray(stack));
+            break;
+        case Opcodes.F_APPEND:
+            mv.visitFrame(type, local.size(), asArray(local), 0, null);
+            break;
+        case Opcodes.F_CHOP:
+            mv.visitFrame(type, local.size(), null, 0, null);
+            break;
+        case Opcodes.F_SAME:
+            mv.visitFrame(type, 0, null, 0, null);
+            break;
+        case Opcodes.F_SAME1:
+            mv.visitFrame(type, 0, null, 1, asArray(stack));
+            break;
         }
     }
 
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
index 75ac408..f9adf2e 100644
--- a/src/asm/scala/tools/asm/tree/IincInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -54,8 +54,10 @@ public class IincInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link IincInsnNode}.
      *
-     * @param var index of the local variable to be incremented.
-     * @param incr increment amount to increment the local variable by.
+     * @param var
+     *            index of the local variable to be incremented.
+     * @param incr
+     *            increment amount to increment the local variable by.
      */
     public IincInsnNode(final int var, final int incr) {
         super(Opcodes.IINC);
@@ -77,4 +79,4 @@ public class IincInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new IincInsnNode(var, incr);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
index 4579488..aa3810c 100644
--- a/src/asm/scala/tools/asm/tree/InnerClassNode.java
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
@@ -40,14 +40,14 @@ public class InnerClassNode {
 
     /**
      * The internal name of an inner class (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
      */
     public String name;
 
     /**
      * The internal name of the class to which the inner class belongs (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May
-     * be <tt>null</tt>.
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}). May be
+     * <tt>null</tt>.
      */
     public String outerName;
 
@@ -66,24 +66,23 @@ public class InnerClassNode {
     /**
      * Constructs a new {@link InnerClassNode}.
      *
-     * @param name the internal name of an inner class (see
-     *        {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
-     * @param outerName the internal name of the class to which the inner class
-     *        belongs (see
-     *        {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
-     *        May be <tt>null</tt>.
-     * @param innerName the (simple) name of the inner class inside its
-     *        enclosing class. May be <tt>null</tt> for anonymous inner
-     *        classes.
-     * @param access the access flags of the inner class as originally declared
-     *        in the enclosing class.
+     * @param name
+     *            the internal name of an inner class (see
+     *            {@link scala.tools.asm.Type#getInternalName()
+     *            getInternalName}).
+     * @param outerName
+     *            the internal name of the class to which the inner class
+     *            belongs (see {@link scala.tools.asm.Type#getInternalName()
+     *            getInternalName}). May be <tt>null</tt>.
+     * @param innerName
+     *            the (simple) name of the inner class inside its enclosing
+     *            class. May be <tt>null</tt> for anonymous inner classes.
+     * @param access
+     *            the access flags of the inner class as originally declared in
+     *            the enclosing class.
      */
-    public InnerClassNode(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public InnerClassNode(final String name, final String outerName,
+            final String innerName, final int access) {
         this.name = name;
         this.outerName = outerName;
         this.innerName = innerName;
@@ -93,7 +92,8 @@ public class InnerClassNode {
     /**
      * Makes the given class visitor visit this inner class.
      *
-     * @param cv a class visitor.
+     * @param cv
+     *            a class visitor.
      */
     public void accept(final ClassVisitor cv) {
         cv.visitInnerClass(name, outerName, innerName, access);
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index dedd3bb..b1e2d97 100644
--- a/src/asm/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -73,8 +73,8 @@ public class InsnList {
     /**
      * Returns the first instruction in this list.
      *
-     * @return the first instruction in this list, or <tt>null</tt> if the
-     *         list is empty.
+     * @return the first instruction in this list, or <tt>null</tt> if the list
+     *         is empty.
      */
     public AbstractInsnNode getFirst() {
         return first;
@@ -96,9 +96,11 @@ public class InsnList {
      * time it is called. Once the cache is built, this method run in constant
      * time. This cache is invalidated by all the methods that modify the list.
      *
-     * @param index the index of the instruction that must be returned.
+     * @param index
+     *            the index of the instruction that must be returned.
      * @return the instruction whose index is given.
-     * @throws IndexOutOfBoundsException if (index < 0 || index >= size()).
+     * @throws IndexOutOfBoundsException
+     *             if (index < 0 || index >= size()).
      */
     public AbstractInsnNode get(final int index) {
         if (index < 0 || index >= size) {
@@ -111,11 +113,12 @@ public class InsnList {
     }
 
     /**
-     * Returns <tt>true</tt> if the given instruction belongs to this list.
-     * This method always scans the instructions of this list until it finds the
+     * Returns <tt>true</tt> if the given instruction belongs to this list. This
+     * method always scans the instructions of this list until it finds the
      * given instruction or reaches the end of the list.
      *
-     * @param insn an instruction.
+     * @param insn
+     *            an instruction.
      * @return <tt>true</tt> if the given instruction belongs to this list.
      */
     public boolean contains(final AbstractInsnNode insn) {
@@ -133,7 +136,8 @@ public class InsnList {
      * constant time. The cache is invalidated by all the methods that modify
      * the list.
      *
-     * @param insn an instruction <i>of this list</i>.
+     * @param insn
+     *            an instruction <i>of this list</i>.
      * @return the index of the given instruction in this list. <i>The result of
      *         this method is undefined if the given instruction does not belong
      *         to this list</i>. Use {@link #contains contains} to test if an
@@ -149,7 +153,8 @@ public class InsnList {
     /**
      * Makes the given visitor visit all of the instructions in this list.
      *
-     * @param mv the method visitor that must visit the instructions.
+     * @param mv
+     *            the method visitor that must visit the instructions.
      */
     public void accept(final MethodVisitor mv) {
         AbstractInsnNode insn = first;
@@ -198,9 +203,11 @@ public class InsnList {
     /**
      * Replaces an instruction of this list with another instruction.
      *
-     * @param location an instruction <i>of this list</i>.
-     * @param insn another instruction, <i>which must not belong to any
-     *        {@link InsnList}</i>.
+     * @param location
+     *            an instruction <i>of this list</i>.
+     * @param insn
+     *            another instruction, <i>which must not belong to any
+     *            {@link InsnList}</i>.
      */
     public void set(final AbstractInsnNode location, final AbstractInsnNode insn) {
         AbstractInsnNode next = location.next;
@@ -232,10 +239,19 @@ public class InsnList {
     /**
      * Adds the given instruction to the end of this list.
      *
-     * @param insn an instruction, <i>which must not belong to any
-     *        {@link InsnList}</i>.
+     * @param insn
+     *            an instruction, <i>which must not belong to any
+     *            {@link InsnList}</i>.
      */
     public void add(final AbstractInsnNode insn) {
+        if(insn.prev != null || insn.next != null) {
+            // Adding an instruction that still refers to others (in the same or another InsnList) leads to hard to debug bugs.
+            // Initially everything may look ok (e.g. iteration follows `next` thus a stale `prev` isn't noticed).
+            // However, a stale link brings the doubly-linked into disarray e.g. upon removing an element,
+            // which results in the `next` of a stale `prev` being updated, among other failure scenarios.
+            // Better fail early.
+            throw new RuntimeException("Instruction " + insn + " already belongs to some InsnList.");
+        }
         ++size;
         if (last == null) {
             first = insn;
@@ -252,8 +268,9 @@ public class InsnList {
     /**
      * Adds the given instructions to the end of this list.
      *
-     * @param insns an instruction list, which is cleared during the process.
-     *        This list must be different from 'this'.
+     * @param insns
+     *            an instruction list, which is cleared during the process. This
+     *            list must be different from 'this'.
      */
     public void add(final InsnList insns) {
         if (insns.size == 0) {
@@ -276,8 +293,9 @@ public class InsnList {
     /**
      * Inserts the given instruction at the begining of this list.
      *
-     * @param insn an instruction, <i>which must not belong to any
-     *        {@link InsnList}</i>.
+     * @param insn
+     *            an instruction, <i>which must not belong to any
+     *            {@link InsnList}</i>.
      */
     public void insert(final AbstractInsnNode insn) {
         ++size;
@@ -296,8 +314,9 @@ public class InsnList {
     /**
      * Inserts the given instructions at the begining of this list.
      *
-     * @param insns an instruction list, which is cleared during the process.
-     *        This list must be different from 'this'.
+     * @param insns
+     *            an instruction list, which is cleared during the process. This
+     *            list must be different from 'this'.
      */
     public void insert(final InsnList insns) {
         if (insns.size == 0) {
@@ -320,12 +339,15 @@ public class InsnList {
     /**
      * Inserts the given instruction after the specified instruction.
      *
-     * @param location an instruction <i>of this list</i> after which insn must be
-     *        inserted.
-     * @param insn the instruction to be inserted, <i>which must not belong to
-     *        any {@link InsnList}</i>.
+     * @param location
+     *            an instruction <i>of this list</i> after which insn must be
+     *            inserted.
+     * @param insn
+     *            the instruction to be inserted, <i>which must not belong to
+     *            any {@link InsnList}</i>.
      */
-    public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) {
+    public void insert(final AbstractInsnNode location,
+            final AbstractInsnNode insn) {
         ++size;
         AbstractInsnNode next = location.next;
         if (next == null) {
@@ -343,10 +365,12 @@ public class InsnList {
     /**
      * Inserts the given instructions after the specified instruction.
      *
-     * @param location an instruction <i>of this list</i> after which the
-     *        instructions must be inserted.
-     * @param insns the instruction list to be inserted, which is cleared during
-     *        the process. This list must be different from 'this'.
+     * @param location
+     *            an instruction <i>of this list</i> after which the
+     *            instructions must be inserted.
+     * @param insns
+     *            the instruction list to be inserted, which is cleared during
+     *            the process. This list must be different from 'this'.
      */
     public void insert(final AbstractInsnNode location, final InsnList insns) {
         if (insns.size == 0) {
@@ -371,12 +395,15 @@ public class InsnList {
     /**
      * Inserts the given instruction before the specified instruction.
      *
-     * @param location an instruction <i>of this list</i> before which insn must be
-     *        inserted.
-     * @param insn the instruction to be inserted, <i>which must not belong to
-     *        any {@link InsnList}</i>.
+     * @param location
+     *            an instruction <i>of this list</i> before which insn must be
+     *            inserted.
+     * @param insn
+     *            the instruction to be inserted, <i>which must not belong to
+     *            any {@link InsnList}</i>.
      */
-    public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) {
+    public void insertBefore(final AbstractInsnNode location,
+            final AbstractInsnNode insn) {
         ++size;
         AbstractInsnNode prev = location.prev;
         if (prev == null) {
@@ -394,37 +421,39 @@ public class InsnList {
     /**
      * Inserts the given instructions before the specified instruction.
      *
-     * @param location  an instruction <i>of this list</i> before which the instructions
-     *        must be inserted.
-     * @param insns the instruction list to be inserted, which is cleared during
-     *        the process. This list must be different from 'this'.
+     * @param location
+     *            an instruction <i>of this list</i> before which the
+     *            instructions must be inserted.
+     * @param insns
+     *            the instruction list to be inserted, which is cleared during
+     *            the process. This list must be different from 'this'.
      */
-    public void insertBefore(final AbstractInsnNode location, final InsnList insns) {
+    public void insertBefore(final AbstractInsnNode location,
+            final InsnList insns) {
         if (insns.size == 0) {
             return;
         }
         size += insns.size;
         AbstractInsnNode ifirst = insns.first;
         AbstractInsnNode ilast = insns.last;
-        AbstractInsnNode prev = location .prev;
+        AbstractInsnNode prev = location.prev;
         if (prev == null) {
             first = ifirst;
         } else {
             prev.next = ifirst;
         }
-        location .prev = ilast;
-        ilast.next = location ;
+        location.prev = ilast;
+        ilast.next = location;
         ifirst.prev = prev;
         cache = null;
         insns.removeAll(false);
     }
 
-
-
     /**
      * Removes the given instruction from this list.
      *
-     * @param insn the instruction <i>of this list</i> that must be removed.
+     * @param insn
+     *            the instruction <i>of this list</i> that must be removed.
      */
     public void remove(final AbstractInsnNode insn) {
         --size;
@@ -456,8 +485,9 @@ public class InsnList {
     /**
      * Removes all of the instructions of this list.
      *
-     * @param mark if the instructions must be marked as no longer belonging to
-     *        any {@link InsnList}.
+     * @param mark
+     *            if the instructions must be marked as no longer belonging to
+     *            any {@link InsnList}.
      */
     void removeAll(final boolean mark) {
         if (mark) {
@@ -499,14 +529,14 @@ public class InsnList {
     }
 
     // this class is not generified because it will create bridges
-    private final class InsnListIterator implements ListIterator/*<AbstractInsnNode>*/ {
+    private final class InsnListIterator implements ListIterator {
 
         AbstractInsnNode next;
 
         AbstractInsnNode prev;
 
         InsnListIterator(int index) {
-            if(index==size()) {
+            if (index == size()) {
                 next = null;
                 prev = getLast();
             } else {
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
index d4664d2..4d5288c 100644
--- a/src/asm/scala/tools/asm/tree/InsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -43,20 +43,22 @@ public class InsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link InsnNode}.
      *
-     * @param opcode the opcode of the instruction to be constructed. This
-     *        opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
-     *        ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
-     *        FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD,
-     *        FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE,
-     *        FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2,
-     *        DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD,
-     *        FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
-     *        LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG,
-     *        ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR,
-     *        LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F,
-     *        I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
-     *        FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
-     *        MONITORENTER, or MONITOREXIT.
+     * @param opcode
+     *            the opcode of the instruction to be constructed. This opcode
+     *            must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+     *            ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+     *            FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+     *            LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+     *            IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+     *            SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+     *            DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+     *            IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+     *            FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+     *            IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+     *            L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+     *            LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+     *            DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+     *            or MONITOREXIT.
      */
     public InsnNode(final int opcode) {
         super(opcode);
@@ -70,7 +72,8 @@ public class InsnNode extends AbstractInsnNode {
     /**
      * Makes the given visitor visit this instruction.
      *
-     * @param mv a method visitor.
+     * @param mv
+     *            a method visitor.
      */
     @Override
     public void accept(final MethodVisitor mv) {
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
index b61270c..e0aeed4 100644
--- a/src/asm/scala/tools/asm/tree/IntInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -48,9 +48,11 @@ public class IntInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link IntInsnNode}.
      *
-     * @param opcode the opcode of the instruction to be constructed. This
-     *        opcode must be BIPUSH, SIPUSH or NEWARRAY.
-     * @param operand the operand of the instruction to be constructed.
+     * @param opcode
+     *            the opcode of the instruction to be constructed. This opcode
+     *            must be BIPUSH, SIPUSH or NEWARRAY.
+     * @param operand
+     *            the operand of the instruction to be constructed.
      */
     public IntInsnNode(final int opcode, final int operand) {
         super(opcode);
@@ -60,8 +62,9 @@ public class IntInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be BIPUSH,
-     *        SIPUSH or NEWARRAY.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be BIPUSH, SIPUSH
+     *            or NEWARRAY.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
index d993b5a..7ee84b8 100644
--- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -65,17 +65,17 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link InvokeDynamicInsnNode}.
      *
-     * @param name invokedynamic name.
-     * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}).
-     * @param bsm the bootstrap method.
-     * @param bsmArgs the boostrap constant arguments.
+     * @param name
+     *            invokedynamic name.
+     * @param desc
+     *            invokedynamic descriptor (see {@link scala.tools.asm.Type}).
+     * @param bsm
+     *            the bootstrap method.
+     * @param bsmArgs
+     *            the boostrap constant arguments.
      */
-    public InvokeDynamicInsnNode(
-        final String name,
-        final String desc,
-        final Handle bsm,
-        final Object... bsmArgs)
-    {
+    public InvokeDynamicInsnNode(final String name, final String desc,
+            final Handle bsm, final Object... bsmArgs) {
         super(Opcodes.INVOKEDYNAMIC);
         this.name = name;
         this.desc = desc;
@@ -97,4 +97,4 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
index 339ebbd..81e1e09 100644
--- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -50,13 +50,15 @@ public class JumpInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link JumpInsnNode}.
      *
-     * @param opcode the opcode of the type instruction to be constructed. This
-     *        opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
-     *        IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
-     *        IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
-     * @param label the operand of the instruction to be constructed. This
-     *        operand is a label that designates the instruction to which the
-     *        jump instruction may jump.
+     * @param opcode
+     *            the opcode of the type instruction to be constructed. This
+     *            opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+     *            IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+     *            IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+     * @param label
+     *            the operand of the instruction to be constructed. This operand
+     *            is a label that designates the instruction to which the jump
+     *            instruction may jump.
      */
     public JumpInsnNode(final int opcode, final LabelNode label) {
         super(opcode);
@@ -66,10 +68,11 @@ public class JumpInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE,
-     *        IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
-     *        IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
-     *        IFNULL or IFNONNULL.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be IFEQ, IFNE,
+     *            IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
+     *            IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO,
+     *            JSR, IFNULL or IFNONNULL.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
index 523a8d6..44c48c1 100644
--- a/src/asm/scala/tools/asm/tree/LabelNode.java
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
@@ -75,4 +75,4 @@ public class LabelNode extends AbstractInsnNode {
     public void resetLabel() {
         label = null;
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
index f8d115a..4e328f9 100644
--- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -44,16 +44,17 @@ public class LdcInsnNode extends AbstractInsnNode {
     /**
      * The constant to be loaded on the stack. This parameter must be a non null
      * {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a
-     * {@link String} or a {@link org.objectweb.asm.Type}.
+     * {@link String} or a {@link scala.tools.asm.Type}.
      */
     public Object cst;
 
     /**
      * Constructs a new {@link LdcInsnNode}.
      *
-     * @param cst the constant to be loaded on the stack. This parameter must be
-     *        a non null {@link Integer}, a {@link Float}, a {@link Long}, a
-     *        {@link Double} or a {@link String}.
+     * @param cst
+     *            the constant to be loaded on the stack. This parameter must be
+     *            a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+     *            {@link Double} or a {@link String}.
      */
     public LdcInsnNode(final Object cst) {
         super(Opcodes.LDC);
@@ -74,4 +75,4 @@ public class LdcInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new LdcInsnNode(cst);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
index acc83c8..9947aa7 100644
--- a/src/asm/scala/tools/asm/tree/LineNumberNode.java
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
@@ -55,9 +55,11 @@ public class LineNumberNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link LineNumberNode}.
      *
-     * @param line a line number. This number refers to the source file from
-     *        which the class was compiled.
-     * @param start the first instruction corresponding to this line number.
+     * @param line
+     *            a line number. This number refers to the source file from
+     *            which the class was compiled.
+     * @param start
+     *            the first instruction corresponding to this line number.
      */
     public LineNumberNode(final int line, final LabelNode start) {
         super(-1);
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
index 51cbd3c..0d8e273 100644
--- a/src/asm/scala/tools/asm/tree/LocalVariableNode.java
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
@@ -73,24 +73,24 @@ public class LocalVariableNode {
     /**
      * Constructs a new {@link LocalVariableNode}.
      *
-     * @param name the name of a local variable.
-     * @param desc the type descriptor of this local variable.
-     * @param signature the signature of this local variable. May be
-     *        <tt>null</tt>.
-     * @param start the first instruction corresponding to the scope of this
-     *        local variable (inclusive).
-     * @param end the last instruction corresponding to the scope of this local
-     *        variable (exclusive).
-     * @param index the local variable's index.
+     * @param name
+     *            the name of a local variable.
+     * @param desc
+     *            the type descriptor of this local variable.
+     * @param signature
+     *            the signature of this local variable. May be <tt>null</tt>.
+     * @param start
+     *            the first instruction corresponding to the scope of this local
+     *            variable (inclusive).
+     * @param end
+     *            the last instruction corresponding to the scope of this local
+     *            variable (exclusive).
+     * @param index
+     *            the local variable's index.
      */
-    public LocalVariableNode(
-        final String name,
-        final String desc,
-        final String signature,
-        final LabelNode start,
-        final LabelNode end,
-        final int index)
-    {
+    public LocalVariableNode(final String name, final String desc,
+            final String signature, final LabelNode start, final LabelNode end,
+            final int index) {
         this.name = name;
         this.desc = desc;
         this.signature = signature;
@@ -102,14 +102,11 @@ public class LocalVariableNode {
     /**
      * Makes the given visitor visit this local variable declaration.
      *
-     * @param mv a method visitor.
+     * @param mv
+     *            a method visitor.
      */
     public void accept(final MethodVisitor mv) {
-        mv.visitLocalVariable(name,
-                desc,
-                signature,
-                start.getLabel(),
-                end.getLabel(),
-                index);
+        mv.visitLocalVariable(name, desc, signature, start.getLabel(),
+                end.getLabel(), index);
     }
 }
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
index 6d0f971..d2479b4 100644
--- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -64,20 +64,21 @@ public class LookupSwitchInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link LookupSwitchInsnNode}.
      *
-     * @param dflt beginning of the default handler block.
-     * @param keys the values of the keys.
-     * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
-     *        the beginning of the handler block for the <tt>keys[i]</tt> key.
+     * @param dflt
+     *            beginning of the default handler block.
+     * @param keys
+     *            the values of the keys.
+     * @param labels
+     *            beginnings of the handler blocks. <tt>labels[i]</tt> is the
+     *            beginning of the handler block for the <tt>keys[i]</tt> key.
      */
-    public LookupSwitchInsnNode(
-        final LabelNode dflt,
-        final int[] keys,
-        final LabelNode[] labels)
-    {
+    public LookupSwitchInsnNode(final LabelNode dflt, final int[] keys,
+            final LabelNode[] labels) {
         super(Opcodes.LOOKUPSWITCH);
         this.dflt = dflt;
         this.keys = new ArrayList<Integer>(keys == null ? 0 : keys.length);
-        this.labels = new ArrayList<LabelNode>(labels == null ? 0 : labels.length);
+        this.labels = new ArrayList<LabelNode>(labels == null ? 0
+                : labels.length);
         if (keys != null) {
             for (int i = 0; i < keys.length; ++i) {
                 this.keys.add(new Integer(keys[i]));
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
index c3036bc..bf09f55 100644
--- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -43,7 +43,7 @@ public class MethodInsnNode extends AbstractInsnNode {
 
     /**
      * The internal name of the method's owner class (see
-     * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+     * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
      */
     public String owner;
 
@@ -53,27 +53,28 @@ public class MethodInsnNode extends AbstractInsnNode {
     public String name;
 
     /**
-     * The method's descriptor (see {@link org.objectweb.asm.Type}).
+     * The method's descriptor (see {@link scala.tools.asm.Type}).
      */
     public String desc;
 
     /**
      * Constructs a new {@link MethodInsnNode}.
      *
-     * @param opcode the opcode of the type instruction to be constructed. This
-     *        opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
-     *        INVOKEINTERFACE.
-     * @param owner the internal name of the method's owner class (see
-     *        {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}).
+     * @param opcode
+     *            the opcode of the type instruction to be constructed. This
+     *            opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+     *            INVOKEINTERFACE.
+     * @param owner
+     *            the internal name of the method's owner class (see
+     *            {@link scala.tools.asm.Type#getInternalName()
+     *            getInternalName}).
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link scala.tools.asm.Type}).
      */
-    public MethodInsnNode(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public MethodInsnNode(final int opcode, final String owner,
+            final String name, final String desc) {
         super(opcode);
         this.owner = owner;
         this.name = name;
@@ -83,8 +84,9 @@ public class MethodInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be
-     *        INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be INVOKEVIRTUAL,
+     *            INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
@@ -104,4 +106,4 @@ public class MethodInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new MethodInsnNode(opcode, owner, name, desc);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index 70ec39e..a161600 100644
--- a/src/asm/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -81,7 +81,7 @@ public class MethodNode extends MethodVisitor {
      * The runtime visible annotations of this method. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label visible
      */
     public List<AnnotationNode> visibleAnnotations;
@@ -90,7 +90,7 @@ public class MethodNode extends MethodVisitor {
      * The runtime invisible annotations of this method. This list is a list of
      * {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label invisible
      */
     public List<AnnotationNode> invisibleAnnotations;
@@ -99,7 +99,7 @@ public class MethodNode extends MethodVisitor {
      * The non standard attributes of this method. This list is a list of
      * {@link Attribute} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.Attribute
+     * @associates scala.tools.asm.Attribute
      */
     public List<Attribute> attrs;
 
@@ -117,7 +117,7 @@ public class MethodNode extends MethodVisitor {
      * The runtime visible parameter annotations of this method. These lists are
      * lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label invisible parameters
      */
     public List<AnnotationNode>[] visibleParameterAnnotations;
@@ -126,7 +126,7 @@ public class MethodNode extends MethodVisitor {
      * The runtime invisible parameter annotations of this method. These lists
      * are lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
      *
-     * @associates org.objectweb.asm.tree.AnnotationNode
+     * @associates scala.tools.asm.tree.AnnotationNode
      * @label visible parameters
      */
     public List<AnnotationNode>[] invisibleParameterAnnotations;
@@ -135,7 +135,7 @@ public class MethodNode extends MethodVisitor {
      * The instructions of this method. This list is a list of
      * {@link AbstractInsnNode} objects.
      *
-     * @associates org.objectweb.asm.tree.AbstractInsnNode
+     * @associates scala.tools.asm.tree.AbstractInsnNode
      * @label instructions
      */
     public InsnList instructions;
@@ -144,7 +144,7 @@ public class MethodNode extends MethodVisitor {
      * The try catch blocks of this method. This list is a list of
      * {@link TryCatchBlockNode} objects.
      *
-     * @associates org.objectweb.asm.tree.TryCatchBlockNode
+     * @associates scala.tools.asm.tree.TryCatchBlockNode
      */
     public List<TryCatchBlockNode> tryCatchBlocks;
 
@@ -162,7 +162,7 @@ public class MethodNode extends MethodVisitor {
      * The local variables of this method. This list is a list of
      * {@link LocalVariableNode} objects. May be <tt>null</tt>
      *
-     * @associates org.objectweb.asm.tree.LocalVariableNode
+     * @associates scala.tools.asm.tree.LocalVariableNode
      */
     public List<LocalVariableNode> localVariables;
 
@@ -170,7 +170,7 @@ public class MethodNode extends MethodVisitor {
      * If the accept method has been called on this object.
      */
     private boolean visited;
-    
+
     /**
      * Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
      * use this constructor</i>. Instead, they must use the
@@ -183,8 +183,9 @@ public class MethodNode extends MethodVisitor {
     /**
      * Constructs an uninitialized {@link MethodNode}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     public MethodNode(final int api) {
         super(api);
@@ -196,56 +197,55 @@ public class MethodNode extends MethodVisitor {
      * constructor</i>. Instead, they must use the
      * {@link #MethodNode(int, int, String, String, String, String[])} version.
      *
-     * @param access the method's access flags (see {@link Opcodes}). This
-     *        parameter also indicates if the method is synthetic and/or
-     *        deprecated.
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type}).
-     * @param signature the method's signature. May be <tt>null</tt>.
-     * @param exceptions the internal names of the method's exception classes
-     *        (see {@link Type#getInternalName() getInternalName}). May be
-     *        <tt>null</tt>.
+     * @param access
+     *            the method's access flags (see {@link Opcodes}). This
+     *            parameter also indicates if the method is synthetic and/or
+     *            deprecated.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type}).
+     * @param signature
+     *            the method's signature. May be <tt>null</tt>.
+     * @param exceptions
+     *            the internal names of the method's exception classes (see
+     *            {@link Type#getInternalName() getInternalName}). May be
+     *            <tt>null</tt>.
      */
-    public MethodNode(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
+    public MethodNode(final int access, final String name, final String desc,
+            final String signature, final String[] exceptions) {
         this(Opcodes.ASM4, access, name, desc, signature, exceptions);
     }
 
     /**
      * Constructs a new {@link MethodNode}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param access the method's access flags (see {@link Opcodes}). This
-     *        parameter also indicates if the method is synthetic and/or
-     *        deprecated.
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type}).
-     * @param signature the method's signature. May be <tt>null</tt>.
-     * @param exceptions the internal names of the method's exception classes
-     *        (see {@link Type#getInternalName() getInternalName}). May be
-     *        <tt>null</tt>.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param access
+     *            the method's access flags (see {@link Opcodes}). This
+     *            parameter also indicates if the method is synthetic and/or
+     *            deprecated.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type}).
+     * @param signature
+     *            the method's signature. May be <tt>null</tt>.
+     * @param exceptions
+     *            the internal names of the method's exception classes (see
+     *            {@link Type#getInternalName() getInternalName}). May be
+     *            <tt>null</tt>.
      */
-    public MethodNode(
-        final int api,
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
+    public MethodNode(final int api, final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
         super(api);
         this.access = access;
         this.name = name;
         this.desc = desc;
         this.signature = signature;
-        this.exceptions = new ArrayList<String>(exceptions == null
-                ? 0
+        this.exceptions = new ArrayList<String>(exceptions == null ? 0
                 : exceptions.length);
         boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0;
         if (!isAbstract) {
@@ -274,10 +274,8 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         AnnotationNode an = new AnnotationNode(desc);
         if (visible) {
             if (visibleAnnotations == null) {
@@ -294,28 +292,27 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
         AnnotationNode an = new AnnotationNode(desc);
         if (visible) {
             if (visibleParameterAnnotations == null) {
                 int params = Type.getArgumentTypes(this.desc).length;
-                visibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+                visibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
             }
             if (visibleParameterAnnotations[parameter] == null) {
-                visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+                visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+                        1);
             }
             visibleParameterAnnotations[parameter].add(an);
         } else {
             if (invisibleParameterAnnotations == null) {
                 int params = Type.getArgumentTypes(this.desc).length;
-                invisibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+                invisibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
             }
             if (invisibleParameterAnnotations[parameter] == null) {
-                invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+                invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+                        1);
             }
             invisibleParameterAnnotations[parameter].add(an);
         }
@@ -335,17 +332,10 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
-        instructions.add(new FrameNode(type, nLocal, local == null
-                ? null
-                : getLabelNodes(local), nStack, stack == null
-                ? null
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
+        instructions.add(new FrameNode(type, nLocal, local == null ? null
+                : getLabelNodes(local), nStack, stack == null ? null
                 : getLabelNodes(stack)));
     }
 
@@ -370,32 +360,20 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         instructions.add(new FieldInsnNode(opcode, owner, name, desc));
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         instructions.add(new MethodInsnNode(opcode, owner, name, desc));
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
     }
 
@@ -420,26 +398,16 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
-        instructions.add(new TableSwitchInsnNode(min,
-                max,
-                getLabelNode(dflt),
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
+        instructions.add(new TableSwitchInsnNode(min, max, getLabelNode(dflt),
                 getLabelNodes(labels)));
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
-        instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt),
-                keys,
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
+        instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt), keys,
                 getLabelNodes(labels)));
     }
 
@@ -449,33 +417,18 @@ public class MethodNode extends MethodVisitor {
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
-                getLabelNode(end),
-                getLabelNode(handler),
-                type));
+                getLabelNode(end), getLabelNode(handler), type));
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
-        localVariables.add(new LocalVariableNode(name,
-                desc,
-                signature,
-                getLabelNode(start),
-                getLabelNode(end),
-                index));
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
+        localVariables.add(new LocalVariableNode(name, desc, signature,
+                getLabelNode(start), getLabelNode(end), index));
     }
 
     @Override
@@ -499,7 +452,8 @@ public class MethodNode extends MethodVisitor {
      * the {@link Label#info} field to store associations between labels and
      * label nodes.
      *
-     * @param l a Label.
+     * @param l
+     *            a Label.
      * @return the LabelNode corresponding to l.
      */
     protected LabelNode getLabelNode(final Label l) {
@@ -539,7 +493,8 @@ public class MethodNode extends MethodVisitor {
      * recursively, do not contain elements that were introduced in more recent
      * versions of the ASM API than the given version.
      *
-     * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+     * @param api
+     *            an ASM API version. Must be one of {@link Opcodes#ASM4}.
      */
     public void check(final int api) {
         // nothing to do
@@ -548,15 +503,13 @@ public class MethodNode extends MethodVisitor {
     /**
      * Makes the given class visitor visit this method.
      *
-     * @param cv a class visitor.
+     * @param cv
+     *            a class visitor.
      */
     public void accept(final ClassVisitor cv) {
         String[] exceptions = new String[this.exceptions.size()];
         this.exceptions.toArray(exceptions);
-        MethodVisitor mv = cv.visitMethod(access,
-                name,
-                desc,
-                signature,
+        MethodVisitor mv = cv.visitMethod(access, name, desc, signature,
                 exceptions);
         if (mv != null) {
             accept(mv);
@@ -566,7 +519,8 @@ public class MethodNode extends MethodVisitor {
     /**
      * Makes the given method visitor visit this method.
      *
-     * @param mv a method visitor.
+     * @param mv
+     *            a method visitor.
      */
     public void accept(final MethodVisitor mv) {
         // visits the method attributes
@@ -588,8 +542,7 @@ public class MethodNode extends MethodVisitor {
             AnnotationNode an = invisibleAnnotations.get(i);
             an.accept(mv.visitAnnotation(an.desc, false));
         }
-        n = visibleParameterAnnotations == null
-                ? 0
+        n = visibleParameterAnnotations == null ? 0
                 : visibleParameterAnnotations.length;
         for (i = 0; i < n; ++i) {
             List<?> l = visibleParameterAnnotations[i];
@@ -601,8 +554,7 @@ public class MethodNode extends MethodVisitor {
                 an.accept(mv.visitParameterAnnotation(i, an.desc, true));
             }
         }
-        n = invisibleParameterAnnotations == null
-                ? 0
+        n = invisibleParameterAnnotations == null ? 0
                 : invisibleParameterAnnotations.length;
         for (i = 0; i < n; ++i) {
             List<?> l = invisibleParameterAnnotations[i];
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
index 9dfba77..fe5e883 100644
--- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -42,7 +42,7 @@ import scala.tools.asm.Opcodes;
 public class MultiANewArrayInsnNode extends AbstractInsnNode {
 
     /**
-     * An array type descriptor (see {@link org.objectweb.asm.Type}).
+     * An array type descriptor (see {@link scala.tools.asm.Type}).
      */
     public String desc;
 
@@ -54,8 +54,10 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link MultiANewArrayInsnNode}.
      *
-     * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}).
-     * @param dims number of dimensions of the array to allocate.
+     * @param desc
+     *            an array type descriptor (see {@link scala.tools.asm.Type}).
+     * @param dims
+     *            number of dimensions of the array to allocate.
      */
     public MultiANewArrayInsnNode(final String desc, final int dims) {
         super(Opcodes.MULTIANEWARRAY);
@@ -78,4 +80,4 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
         return new MultiANewArrayInsnNode(desc, dims);
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
index 929ad9b..9b3c2a3 100644
--- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -69,18 +69,18 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link TableSwitchInsnNode}.
      *
-     * @param min the minimum key value.
-     * @param max the maximum key value.
-     * @param dflt beginning of the default handler block.
-     * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
-     *        the beginning of the handler block for the <tt>min + i</tt> key.
+     * @param min
+     *            the minimum key value.
+     * @param max
+     *            the maximum key value.
+     * @param dflt
+     *            beginning of the default handler block.
+     * @param labels
+     *            beginnings of the handler blocks. <tt>labels[i]</tt> is the
+     *            beginning of the handler block for the <tt>min + i</tt> key.
      */
-    public TableSwitchInsnNode(
-        final int min,
-        final int max,
-        final LabelNode dflt,
-        final LabelNode... labels)
-    {
+    public TableSwitchInsnNode(final int min, final int max,
+            final LabelNode dflt, final LabelNode... labels) {
         super(Opcodes.TABLESWITCH);
         this.min = min;
         this.max = max;
@@ -107,9 +107,7 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
 
     @Override
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
-        return new TableSwitchInsnNode(min,
-                max,
-                clone(dflt, labels),
-                clone(this.labels, labels));
+        return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone(
+                this.labels, labels));
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
index 375b4cf..ab4fa97 100644
--- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -62,19 +62,19 @@ public class TryCatchBlockNode {
     /**
      * Constructs a new {@link TryCatchBlockNode}.
      *
-     * @param start beginning of the exception handler's scope (inclusive).
-     * @param end end of the exception handler's scope (exclusive).
-     * @param handler beginning of the exception handler's code.
-     * @param type internal name of the type of exceptions handled by the
-     *        handler, or <tt>null</tt> to catch any exceptions (for "finally"
-     *        blocks).
+     * @param start
+     *            beginning of the exception handler's scope (inclusive).
+     * @param end
+     *            end of the exception handler's scope (exclusive).
+     * @param handler
+     *            beginning of the exception handler's code.
+     * @param type
+     *            internal name of the type of exceptions handled by the
+     *            handler, or <tt>null</tt> to catch any exceptions (for
+     *            "finally" blocks).
      */
-    public TryCatchBlockNode(
-        final LabelNode start,
-        final LabelNode end,
-        final LabelNode handler,
-        final String type)
-    {
+    public TryCatchBlockNode(final LabelNode start, final LabelNode end,
+            final LabelNode handler, final String type) {
         this.start = start;
         this.end = end;
         this.handler = handler;
@@ -84,11 +84,11 @@ public class TryCatchBlockNode {
     /**
      * Makes the given visitor visit this try catch block.
      *
-     * @param mv a method visitor.
+     * @param mv
+     *            a method visitor.
      */
     public void accept(final MethodVisitor mv) {
-        mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null
-                ? null
-                : handler.getLabel(), type);
+        mv.visitTryCatchBlock(start.getLabel(), end.getLabel(),
+                handler == null ? null : handler.getLabel(), type);
     }
 }
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
index 0b2666c..3210dd6 100644
--- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -43,17 +43,19 @@ public class TypeInsnNode extends AbstractInsnNode {
 
     /**
      * The operand of this instruction. This operand is an internal name (see
-     * {@link org.objectweb.asm.Type}).
+     * {@link scala.tools.asm.Type}).
      */
     public String desc;
 
     /**
      * Constructs a new {@link TypeInsnNode}.
      *
-     * @param opcode the opcode of the type instruction to be constructed. This
-     *        opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
-     * @param desc the operand of the instruction to be constructed. This
-     *        operand is an internal name (see {@link org.objectweb.asm.Type}).
+     * @param opcode
+     *            the opcode of the type instruction to be constructed. This
+     *            opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+     * @param desc
+     *            the operand of the instruction to be constructed. This operand
+     *            is an internal name (see {@link scala.tools.asm.Type}).
      */
     public TypeInsnNode(final int opcode, final String desc) {
         super(opcode);
@@ -63,8 +65,9 @@ public class TypeInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be NEW,
-     *        ANEWARRAY, CHECKCAST or INSTANCEOF.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be NEW,
+     *            ANEWARRAY, CHECKCAST or INSTANCEOF.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
@@ -84,4 +87,4 @@ public class TypeInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new TypeInsnNode(opcode, desc);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
index 89f572d..5dd9ef6 100644
--- a/src/asm/scala/tools/asm/tree/VarInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -51,11 +51,13 @@ public class VarInsnNode extends AbstractInsnNode {
     /**
      * Constructs a new {@link VarInsnNode}.
      *
-     * @param opcode the opcode of the local variable instruction to be
-     *        constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
-     *        ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
-     * @param var the operand of the instruction to be constructed. This operand
-     *        is the index of a local variable.
+     * @param opcode
+     *            the opcode of the local variable instruction to be
+     *            constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
+     *            ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+     * @param var
+     *            the operand of the instruction to be constructed. This operand
+     *            is the index of a local variable.
      */
     public VarInsnNode(final int opcode, final int var) {
         super(opcode);
@@ -65,9 +67,10 @@ public class VarInsnNode extends AbstractInsnNode {
     /**
      * Sets the opcode of this instruction.
      *
-     * @param opcode the new instruction opcode. This opcode must be ILOAD,
-     *        LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE
-     *        or RET.
+     * @param opcode
+     *            the new instruction opcode. This opcode must be ILOAD, LLOAD,
+     *            FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or
+     *            RET.
      */
     public void setOpcode(final int opcode) {
         this.opcode = opcode;
@@ -87,4 +90,4 @@ public class VarInsnNode extends AbstractInsnNode {
     public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
         return new VarInsnNode(opcode, var);
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index df387b0..0134555 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -51,9 +51,10 @@ import scala.tools.asm.tree.VarInsnNode;
  * A semantic bytecode analyzer. <i>This class does not fully check that JSR and
  * RET instructions are valid.</i>
  *
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ *            type of the Value used for the analysis.
  *
- *  @author Eric Bruneton
+ * @author Eric Bruneton
  */
 public class Analyzer<V extends Value> implements Opcodes {
 
@@ -78,8 +79,9 @@ public class Analyzer<V extends Value> implements Opcodes {
     /**
      * Constructs a new {@link Analyzer}.
      *
-     * @param interpreter the interpreter to be used to symbolically interpret
-     *        the bytecode instructions.
+     * @param interpreter
+     *            the interpreter to be used to symbolically interpret the
+     *            bytecode instructions.
      */
     public Analyzer(final Interpreter<V> interpreter) {
         this.interpreter = interpreter;
@@ -88,26 +90,28 @@ public class Analyzer<V extends Value> implements Opcodes {
     /**
      * Analyzes the given method.
      *
-     * @param owner the internal name of the class to which the method belongs.
-     * @param m the method to be analyzed.
+     * @param owner
+     *            the internal name of the class to which the method belongs.
+     * @param m
+     *            the method to be analyzed.
      * @return the symbolic state of the execution stack frame at each bytecode
      *         instruction of the method. The size of the returned array is
      *         equal to the number of instructions (and labels) of the method. A
      *         given frame is <tt>null</tt> if and only if the corresponding
      *         instruction cannot be reached (dead code).
-     * @throws AnalyzerException if a problem occurs during the analysis.
+     * @throws AnalyzerException
+     *             if a problem occurs during the analysis.
      */
     public Frame<V>[] analyze(final String owner, final MethodNode m)
-            throws AnalyzerException
-    {
+            throws AnalyzerException {
         if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
-            frames = (Frame<V>[])new Frame<?>[0];
+            frames = (Frame<V>[]) new Frame<?>[0];
             return frames;
         }
         n = m.instructions.size();
         insns = m.instructions;
-        handlers = (List<TryCatchBlockNode>[])new List<?>[n];
-        frames = (Frame<V>[])new Frame<?>[n];
+        handlers = (List<TryCatchBlockNode>[]) new List<?>[n];
+        frames = (Frame<V>[]) new Frame<?>[n];
         subroutines = new Subroutine[n];
         queued = new boolean[n];
         queue = new int[n];
@@ -188,8 +192,7 @@ public class Analyzer<V extends Value> implements Opcodes {
 
                 if (insnType == AbstractInsnNode.LABEL
                         || insnType == AbstractInsnNode.LINE
-                        || insnType == AbstractInsnNode.FRAME)
-                {
+                        || insnType == AbstractInsnNode.FRAME) {
                     merge(insn + 1, f, subroutine);
                     newControlFlowEdge(insn, insn + 1);
                 } else {
@@ -205,8 +208,7 @@ public class Analyzer<V extends Value> implements Opcodes {
                         int jump = insns.indexOf(j.label);
                         if (insnOpcode == JSR) {
                             merge(jump, current, new Subroutine(j.label,
-                                    m.maxLocals,
-                                    j));
+                                    m.maxLocals, j));
                         } else {
                             merge(jump, current, subroutine);
                         }
@@ -235,31 +237,27 @@ public class Analyzer<V extends Value> implements Opcodes {
                         }
                     } else if (insnOpcode == RET) {
                         if (subroutine == null) {
-                            throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine");
+                            throw new AnalyzerException(insnNode,
+                                    "RET instruction outside of a sub routine");
                         }
                         for (int i = 0; i < subroutine.callers.size(); ++i) {
                             JumpInsnNode caller = subroutine.callers.get(i);
                             int call = insns.indexOf(caller);
                             if (frames[call] != null) {
-                                merge(call + 1,
-                                        frames[call],
-                                        current,
-                                        subroutines[call],
-                                        subroutine.access);
+                                merge(call + 1, frames[call], current,
+                                        subroutines[call], subroutine.access);
                                 newControlFlowEdge(insn, call + 1);
                             }
                         }
                     } else if (insnOpcode != ATHROW
-                            && (insnOpcode < IRETURN || insnOpcode > RETURN))
-                    {
+                            && (insnOpcode < IRETURN || insnOpcode > RETURN)) {
                         if (subroutine != null) {
                             if (insnNode instanceof VarInsnNode) {
                                 int var = ((VarInsnNode) insnNode).var;
                                 subroutine.access[var] = true;
                                 if (insnOpcode == LLOAD || insnOpcode == DLOAD
                                         || insnOpcode == LSTORE
-                                        || insnOpcode == DSTORE)
-                                {
+                                        || insnOpcode == DSTORE) {
                                     subroutine.access[var + 1] = true;
                                 }
                             } else if (insnNode instanceof IincInsnNode) {
@@ -292,23 +290,23 @@ public class Analyzer<V extends Value> implements Opcodes {
                     }
                 }
             } catch (AnalyzerException e) {
-                throw new AnalyzerException(e.node, "Error at instruction " + insn
-                        + ": " + e.getMessage(), e);
+                throw new AnalyzerException(e.node, "Error at instruction "
+                        + insn + ": " + e.getMessage(), e);
             } catch (Exception e) {
-                throw new AnalyzerException(insnNode, "Error at instruction " + insn
-                        + ": " + e.getMessage(), e);
+                throw new AnalyzerException(insnNode, "Error at instruction "
+                        + insn + ": " + e.getMessage(), e);
             }
         }
 
         return frames;
     }
 
-    private void findSubroutine(int insn, final Subroutine sub, final List<AbstractInsnNode> calls)
-            throws AnalyzerException
-    {
+    private void findSubroutine(int insn, final Subroutine sub,
+            final List<AbstractInsnNode> calls) throws AnalyzerException {
         while (true) {
             if (insn < 0 || insn >= n) {
-                throw new AnalyzerException(null, "Execution can fall off end of the code");
+                throw new AnalyzerException(null,
+                        "Execution can fall off end of the code");
             }
             if (subroutines[insn] != null) {
                 return;
@@ -352,18 +350,18 @@ public class Analyzer<V extends Value> implements Opcodes {
 
             // if insn does not falls through to the next instruction, return.
             switch (node.getOpcode()) {
-                case GOTO:
-                case RET:
-                case TABLESWITCH:
-                case LOOKUPSWITCH:
-                case IRETURN:
-                case LRETURN:
-                case FRETURN:
-                case DRETURN:
-                case ARETURN:
-                case RETURN:
-                case ATHROW:
-                    return;
+            case GOTO:
+            case RET:
+            case TABLESWITCH:
+            case LOOKUPSWITCH:
+            case IRETURN:
+            case LRETURN:
+            case FRETURN:
+            case DRETURN:
+            case ARETURN:
+            case RETURN:
+            case ATHROW:
+                return;
             }
             insn++;
         }
@@ -387,8 +385,9 @@ public class Analyzer<V extends Value> implements Opcodes {
     /**
      * Returns the exception handlers for the given instruction.
      *
-     * @param insn the index of an instruction of the last recently analyzed
-     *        method.
+     * @param insn
+     *            the index of an instruction of the last recently analyzed
+     *            method.
      * @return a list of {@link TryCatchBlockNode} objects.
      */
     public List<TryCatchBlockNode> getHandlers(final int insn) {
@@ -400,9 +399,12 @@ public class Analyzer<V extends Value> implements Opcodes {
      * execution of control flow analysis loop in #analyze. The default
      * implementation of this method does nothing.
      *
-     * @param owner the internal name of the class to which the method belongs.
-     * @param m the method to be analyzed.
-     * @throws AnalyzerException if a problem occurs.
+     * @param owner
+     *            the internal name of the class to which the method belongs.
+     * @param m
+     *            the method to be analyzed.
+     * @throws AnalyzerException
+     *             if a problem occurs.
      */
     protected void init(String owner, MethodNode m) throws AnalyzerException {
     }
@@ -410,8 +412,10 @@ public class Analyzer<V extends Value> implements Opcodes {
     /**
      * Constructs a new frame with the given size.
      *
-     * @param nLocals the maximum number of local variables of the frame.
-     * @param nStack the maximum stack size of the frame.
+     * @param nLocals
+     *            the maximum number of local variables of the frame.
+     * @param nStack
+     *            the maximum stack size of the frame.
      * @return the created frame.
      */
     protected Frame<V> newFrame(final int nLocals, final int nStack) {
@@ -421,7 +425,8 @@ public class Analyzer<V extends Value> implements Opcodes {
     /**
      * Constructs a new frame that is identical to the given frame.
      *
-     * @param src a frame.
+     * @param src
+     *            a frame.
      * @return the created frame.
      */
     protected Frame<V> newFrame(final Frame<? extends V> src) {
@@ -434,8 +439,10 @@ public class Analyzer<V extends Value> implements Opcodes {
      * control flow graph of a method (this method is called by the
      * {@link #analyze analyze} method during its visit of the method's code).
      *
-     * @param insn an instruction index.
-     * @param successor index of a successor instruction.
+     * @param insn
+     *            an instruction index.
+     * @param successor
+     *            index of a successor instruction.
      */
     protected void newControlFlowEdge(final int insn, final int successor) {
     }
@@ -447,16 +454,16 @@ public class Analyzer<V extends Value> implements Opcodes {
      * method is called by the {@link #analyze analyze} method during its visit
      * of the method's code).
      *
-     * @param insn an instruction index.
-     * @param successor index of a successor instruction.
+     * @param insn
+     *            an instruction index.
+     * @param successor
+     *            index of a successor instruction.
      * @return true if this edge must be considered in the data flow analysis
      *         performed by this analyzer, or false otherwise. The default
      *         implementation of this method always returns true.
      */
-    protected boolean newControlFlowExceptionEdge(
-        final int insn,
-        final int successor)
-    {
+    protected boolean newControlFlowExceptionEdge(final int insn,
+            final int successor) {
         return true;
     }
 
@@ -469,28 +476,25 @@ public class Analyzer<V extends Value> implements Opcodes {
      * the {@link #analyze analyze} method during its visit of the method's
      * code).
      *
-     * @param insn an instruction index.
-     * @param tcb TryCatchBlockNode corresponding to this edge.
+     * @param insn
+     *            an instruction index.
+     * @param tcb
+     *            TryCatchBlockNode corresponding to this edge.
      * @return true if this edge must be considered in the data flow analysis
      *         performed by this analyzer, or false otherwise. The default
      *         implementation of this method delegates to
      *         {@link #newControlFlowExceptionEdge(int, int)
      *         newControlFlowExceptionEdge(int, int)}.
      */
-    protected boolean newControlFlowExceptionEdge(
-        final int insn,
-        final TryCatchBlockNode tcb)
-    {
+    protected boolean newControlFlowExceptionEdge(final int insn,
+            final TryCatchBlockNode tcb) {
         return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler));
     }
 
     // -------------------------------------------------------------------------
 
-    private void merge(
-        final int insn,
-        final Frame<V> frame,
-        final Subroutine subroutine) throws AnalyzerException
-    {
+    private void merge(final int insn, final Frame<V> frame,
+            final Subroutine subroutine) throws AnalyzerException {
         Frame<V> oldFrame = frames[insn];
         Subroutine oldSubroutine = subroutines[insn];
         boolean changes;
@@ -518,13 +522,9 @@ public class Analyzer<V extends Value> implements Opcodes {
         }
     }
 
-    private void merge(
-        final int insn,
-        final Frame<V> beforeJSR,
-        final Frame<V> afterRET,
-        final Subroutine subroutineBeforeJSR,
-        final boolean[] access) throws AnalyzerException
-    {
+    private void merge(final int insn, final Frame<V> beforeJSR,
+            final Frame<V> afterRET, final Subroutine subroutineBeforeJSR,
+            final boolean[] access) throws AnalyzerException {
         Frame<V> oldFrame = frames[insn];
         Subroutine oldSubroutine = subroutines[insn];
         boolean changes;
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
index a89bb35..5e3f51f 100644
--- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -46,17 +46,14 @@ public class AnalyzerException extends Exception {
         this.node = node;
     }
 
-    public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) {
+    public AnalyzerException(final AbstractInsnNode node, final String msg,
+            final Throwable exception) {
         super(msg, exception);
         this.node = node;
     }
 
-    public AnalyzerException(
-        final AbstractInsnNode node,
-        final String msg,
-        final Object expected,
-        final Value encountered)
-    {
+    public AnalyzerException(final AbstractInsnNode node, final String msg,
+            final Object expected, final Value encountered) {
         super((msg == null ? "Expected " : msg + ": expected ") + expected
                 + ", but found " + encountered);
         this.node = node;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
index 64ddcc1..8d6653c 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -50,8 +50,7 @@ import scala.tools.asm.tree.TypeInsnNode;
  * @author Bing Ran
  */
 public class BasicInterpreter extends Interpreter<BasicValue> implements
-        Opcodes
-{
+        Opcodes {
 
     public BasicInterpreter() {
         super(ASM4);
@@ -67,292 +66,286 @@ public class BasicInterpreter extends Interpreter<BasicValue> implements
             return BasicValue.UNINITIALIZED_VALUE;
         }
         switch (type.getSort()) {
-            case Type.VOID:
-                return null;
-            case Type.BOOLEAN:
-            case Type.CHAR:
-            case Type.BYTE:
-            case Type.SHORT:
-            case Type.INT:
-                return BasicValue.INT_VALUE;
-            case Type.FLOAT:
-                return BasicValue.FLOAT_VALUE;
-            case Type.LONG:
-                return BasicValue.LONG_VALUE;
-            case Type.DOUBLE:
-                return BasicValue.DOUBLE_VALUE;
-            case Type.ARRAY:
-            case Type.OBJECT:
-                return BasicValue.REFERENCE_VALUE;
-            default:
-                throw new Error("Internal error");
+        case Type.VOID:
+            return null;
+        case Type.BOOLEAN:
+        case Type.CHAR:
+        case Type.BYTE:
+        case Type.SHORT:
+        case Type.INT:
+            return BasicValue.INT_VALUE;
+        case Type.FLOAT:
+            return BasicValue.FLOAT_VALUE;
+        case Type.LONG:
+            return BasicValue.LONG_VALUE;
+        case Type.DOUBLE:
+            return BasicValue.DOUBLE_VALUE;
+        case Type.ARRAY:
+        case Type.OBJECT:
+            return BasicValue.REFERENCE_VALUE;
+        default:
+            throw new Error("Internal error");
         }
     }
 
     @Override
     public BasicValue newOperation(final AbstractInsnNode insn)
-            throws AnalyzerException
-    {
+            throws AnalyzerException {
         switch (insn.getOpcode()) {
-            case ACONST_NULL:
-                return newValue(Type.getObjectType("null"));
-            case ICONST_M1:
-            case ICONST_0:
-            case ICONST_1:
-            case ICONST_2:
-            case ICONST_3:
-            case ICONST_4:
-            case ICONST_5:
+        case ACONST_NULL:
+            return newValue(Type.getObjectType("null"));
+        case ICONST_M1:
+        case ICONST_0:
+        case ICONST_1:
+        case ICONST_2:
+        case ICONST_3:
+        case ICONST_4:
+        case ICONST_5:
+            return BasicValue.INT_VALUE;
+        case LCONST_0:
+        case LCONST_1:
+            return BasicValue.LONG_VALUE;
+        case FCONST_0:
+        case FCONST_1:
+        case FCONST_2:
+            return BasicValue.FLOAT_VALUE;
+        case DCONST_0:
+        case DCONST_1:
+            return BasicValue.DOUBLE_VALUE;
+        case BIPUSH:
+        case SIPUSH:
+            return BasicValue.INT_VALUE;
+        case LDC:
+            Object cst = ((LdcInsnNode) insn).cst;
+            if (cst instanceof Integer) {
                 return BasicValue.INT_VALUE;
-            case LCONST_0:
-            case LCONST_1:
-                return BasicValue.LONG_VALUE;
-            case FCONST_0:
-            case FCONST_1:
-            case FCONST_2:
+            } else if (cst instanceof Float) {
                 return BasicValue.FLOAT_VALUE;
-            case DCONST_0:
-            case DCONST_1:
+            } else if (cst instanceof Long) {
+                return BasicValue.LONG_VALUE;
+            } else if (cst instanceof Double) {
                 return BasicValue.DOUBLE_VALUE;
-            case BIPUSH:
-            case SIPUSH:
-                return BasicValue.INT_VALUE;
-            case LDC:
-                Object cst = ((LdcInsnNode) insn).cst;
-                if (cst instanceof Integer) {
-                    return BasicValue.INT_VALUE;
-                } else if (cst instanceof Float) {
-                    return BasicValue.FLOAT_VALUE;
-                } else if (cst instanceof Long) {
-                    return BasicValue.LONG_VALUE;
-                } else if (cst instanceof Double) {
-                    return BasicValue.DOUBLE_VALUE;
-                } else if (cst instanceof String) {
-                    return newValue(Type.getObjectType("java/lang/String"));
-                } else if (cst instanceof Type) {
-                    int sort = ((Type) cst).getSort();
-                    if (sort == Type.OBJECT || sort == Type.ARRAY) {
-                        return newValue(Type.getObjectType("java/lang/Class"));
-                    } else if (sort == Type.METHOD) {
-                        return newValue(Type.getObjectType("java/lang/invoke/MethodType"));
-                    } else {
-                        throw new IllegalArgumentException("Illegal LDC constant " + cst);
-                    }
-                } else if (cst instanceof Handle) {
-                    return newValue(Type.getObjectType("java/lang/invoke/MethodHandle"));
+            } else if (cst instanceof String) {
+                return newValue(Type.getObjectType("java/lang/String"));
+            } else if (cst instanceof Type) {
+                int sort = ((Type) cst).getSort();
+                if (sort == Type.OBJECT || sort == Type.ARRAY) {
+                    return newValue(Type.getObjectType("java/lang/Class"));
+                } else if (sort == Type.METHOD) {
+                    return newValue(Type
+                            .getObjectType("java/lang/invoke/MethodType"));
                 } else {
-                    throw new IllegalArgumentException("Illegal LDC constant " + cst);
+                    throw new IllegalArgumentException("Illegal LDC constant "
+                            + cst);
                 }
-            case JSR:
-                return BasicValue.RETURNADDRESS_VALUE;
-            case GETSTATIC:
-                return newValue(Type.getType(((FieldInsnNode) insn).desc));
-            case NEW:
-                return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
-            default:
-                throw new Error("Internal error.");
+            } else if (cst instanceof Handle) {
+                return newValue(Type
+                        .getObjectType("java/lang/invoke/MethodHandle"));
+            } else {
+                throw new IllegalArgumentException("Illegal LDC constant "
+                        + cst);
+            }
+        case JSR:
+            return BasicValue.RETURNADDRESS_VALUE;
+        case GETSTATIC:
+            return newValue(Type.getType(((FieldInsnNode) insn).desc));
+        case NEW:
+            return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
+        default:
+            throw new Error("Internal error.");
         }
     }
 
     @Override
-    public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
-            throws AnalyzerException
-    {
+    public BasicValue copyOperation(final AbstractInsnNode insn,
+            final BasicValue value) throws AnalyzerException {
         return value;
     }
 
     @Override
-    public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
-            throws AnalyzerException
-    {
+    public BasicValue unaryOperation(final AbstractInsnNode insn,
+            final BasicValue value) throws AnalyzerException {
         switch (insn.getOpcode()) {
-            case INEG:
-            case IINC:
-            case L2I:
-            case F2I:
-            case D2I:
-            case I2B:
-            case I2C:
-            case I2S:
-                return BasicValue.INT_VALUE;
-            case FNEG:
-            case I2F:
-            case L2F:
-            case D2F:
-                return BasicValue.FLOAT_VALUE;
-            case LNEG:
-            case I2L:
-            case F2L:
-            case D2L:
-                return BasicValue.LONG_VALUE;
-            case DNEG:
-            case I2D:
-            case L2D:
-            case F2D:
-                return BasicValue.DOUBLE_VALUE;
-            case IFEQ:
-            case IFNE:
-            case IFLT:
-            case IFGE:
-            case IFGT:
-            case IFLE:
-            case TABLESWITCH:
-            case LOOKUPSWITCH:
-            case IRETURN:
-            case LRETURN:
-            case FRETURN:
-            case DRETURN:
-            case ARETURN:
-            case PUTSTATIC:
-                return null;
-            case GETFIELD:
-                return newValue(Type.getType(((FieldInsnNode) insn).desc));
-            case NEWARRAY:
-                switch (((IntInsnNode) insn).operand) {
-                    case T_BOOLEAN:
-                        return newValue(Type.getType("[Z"));
-                    case T_CHAR:
-                        return newValue(Type.getType("[C"));
-                    case T_BYTE:
-                        return newValue(Type.getType("[B"));
-                    case T_SHORT:
-                        return newValue(Type.getType("[S"));
-                    case T_INT:
-                        return newValue(Type.getType("[I"));
-                    case T_FLOAT:
-                        return newValue(Type.getType("[F"));
-                    case T_DOUBLE:
-                        return newValue(Type.getType("[D"));
-                    case T_LONG:
-                        return newValue(Type.getType("[J"));
-                    default:
-                        throw new AnalyzerException(insn, "Invalid array type");
-                }
-            case ANEWARRAY:
-                String desc = ((TypeInsnNode) insn).desc;
-                return newValue(Type.getType("[" + Type.getObjectType(desc)));
-            case ARRAYLENGTH:
-                return BasicValue.INT_VALUE;
-            case ATHROW:
-                return null;
-            case CHECKCAST:
-                desc = ((TypeInsnNode) insn).desc;
-                return newValue(Type.getObjectType(desc));
-            case INSTANCEOF:
-                return BasicValue.INT_VALUE;
-            case MONITORENTER:
-            case MONITOREXIT:
-            case IFNULL:
-            case IFNONNULL:
-                return null;
+        case INEG:
+        case IINC:
+        case L2I:
+        case F2I:
+        case D2I:
+        case I2B:
+        case I2C:
+        case I2S:
+            return BasicValue.INT_VALUE;
+        case FNEG:
+        case I2F:
+        case L2F:
+        case D2F:
+            return BasicValue.FLOAT_VALUE;
+        case LNEG:
+        case I2L:
+        case F2L:
+        case D2L:
+            return BasicValue.LONG_VALUE;
+        case DNEG:
+        case I2D:
+        case L2D:
+        case F2D:
+            return BasicValue.DOUBLE_VALUE;
+        case IFEQ:
+        case IFNE:
+        case IFLT:
+        case IFGE:
+        case IFGT:
+        case IFLE:
+        case TABLESWITCH:
+        case LOOKUPSWITCH:
+        case IRETURN:
+        case LRETURN:
+        case FRETURN:
+        case DRETURN:
+        case ARETURN:
+        case PUTSTATIC:
+            return null;
+        case GETFIELD:
+            return newValue(Type.getType(((FieldInsnNode) insn).desc));
+        case NEWARRAY:
+            switch (((IntInsnNode) insn).operand) {
+            case T_BOOLEAN:
+                return newValue(Type.getType("[Z"));
+            case T_CHAR:
+                return newValue(Type.getType("[C"));
+            case T_BYTE:
+                return newValue(Type.getType("[B"));
+            case T_SHORT:
+                return newValue(Type.getType("[S"));
+            case T_INT:
+                return newValue(Type.getType("[I"));
+            case T_FLOAT:
+                return newValue(Type.getType("[F"));
+            case T_DOUBLE:
+                return newValue(Type.getType("[D"));
+            case T_LONG:
+                return newValue(Type.getType("[J"));
             default:
-                throw new Error("Internal error.");
+                throw new AnalyzerException(insn, "Invalid array type");
+            }
+        case ANEWARRAY:
+            String desc = ((TypeInsnNode) insn).desc;
+            return newValue(Type.getType("[" + Type.getObjectType(desc)));
+        case ARRAYLENGTH:
+            return BasicValue.INT_VALUE;
+        case ATHROW:
+            return null;
+        case CHECKCAST:
+            desc = ((TypeInsnNode) insn).desc;
+            return newValue(Type.getObjectType(desc));
+        case INSTANCEOF:
+            return BasicValue.INT_VALUE;
+        case MONITORENTER:
+        case MONITOREXIT:
+        case IFNULL:
+        case IFNONNULL:
+            return null;
+        default:
+            throw new Error("Internal error.");
         }
     }
 
     @Override
-    public BasicValue binaryOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value1,
-        final BasicValue value2) throws AnalyzerException
-    {
+    public BasicValue binaryOperation(final AbstractInsnNode insn,
+            final BasicValue value1, final BasicValue value2)
+            throws AnalyzerException {
         switch (insn.getOpcode()) {
-            case IALOAD:
-            case BALOAD:
-            case CALOAD:
-            case SALOAD:
-            case IADD:
-            case ISUB:
-            case IMUL:
-            case IDIV:
-            case IREM:
-            case ISHL:
-            case ISHR:
-            case IUSHR:
-            case IAND:
-            case IOR:
-            case IXOR:
-                return BasicValue.INT_VALUE;
-            case FALOAD:
-            case FADD:
-            case FSUB:
-            case FMUL:
-            case FDIV:
-            case FREM:
-                return BasicValue.FLOAT_VALUE;
-            case LALOAD:
-            case LADD:
-            case LSUB:
-            case LMUL:
-            case LDIV:
-            case LREM:
-            case LSHL:
-            case LSHR:
-            case LUSHR:
-            case LAND:
-            case LOR:
-            case LXOR:
-                return BasicValue.LONG_VALUE;
-            case DALOAD:
-            case DADD:
-            case DSUB:
-            case DMUL:
-            case DDIV:
-            case DREM:
-                return BasicValue.DOUBLE_VALUE;
-            case AALOAD:
-                return BasicValue.REFERENCE_VALUE;
-            case LCMP:
-            case FCMPL:
-            case FCMPG:
-            case DCMPL:
-            case DCMPG:
-                return BasicValue.INT_VALUE;
-            case IF_ICMPEQ:
-            case IF_ICMPNE:
-            case IF_ICMPLT:
-            case IF_ICMPGE:
-            case IF_ICMPGT:
-            case IF_ICMPLE:
-            case IF_ACMPEQ:
-            case IF_ACMPNE:
-            case PUTFIELD:
-                return null;
-            default:
-                throw new Error("Internal error.");
+        case IALOAD:
+        case BALOAD:
+        case CALOAD:
+        case SALOAD:
+        case IADD:
+        case ISUB:
+        case IMUL:
+        case IDIV:
+        case IREM:
+        case ISHL:
+        case ISHR:
+        case IUSHR:
+        case IAND:
+        case IOR:
+        case IXOR:
+            return BasicValue.INT_VALUE;
+        case FALOAD:
+        case FADD:
+        case FSUB:
+        case FMUL:
+        case FDIV:
+        case FREM:
+            return BasicValue.FLOAT_VALUE;
+        case LALOAD:
+        case LADD:
+        case LSUB:
+        case LMUL:
+        case LDIV:
+        case LREM:
+        case LSHL:
+        case LSHR:
+        case LUSHR:
+        case LAND:
+        case LOR:
+        case LXOR:
+            return BasicValue.LONG_VALUE;
+        case DALOAD:
+        case DADD:
+        case DSUB:
+        case DMUL:
+        case DDIV:
+        case DREM:
+            return BasicValue.DOUBLE_VALUE;
+        case AALOAD:
+            return BasicValue.REFERENCE_VALUE;
+        case LCMP:
+        case FCMPL:
+        case FCMPG:
+        case DCMPL:
+        case DCMPG:
+            return BasicValue.INT_VALUE;
+        case IF_ICMPEQ:
+        case IF_ICMPNE:
+        case IF_ICMPLT:
+        case IF_ICMPGE:
+        case IF_ICMPGT:
+        case IF_ICMPLE:
+        case IF_ACMPEQ:
+        case IF_ACMPNE:
+        case PUTFIELD:
+            return null;
+        default:
+            throw new Error("Internal error.");
         }
     }
 
     @Override
-    public BasicValue ternaryOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value1,
-        final BasicValue value2,
-        final BasicValue value3) throws AnalyzerException
-    {
+    public BasicValue ternaryOperation(final AbstractInsnNode insn,
+            final BasicValue value1, final BasicValue value2,
+            final BasicValue value3) throws AnalyzerException {
         return null;
     }
 
     @Override
-    public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
-            throws AnalyzerException
-    {
+    public BasicValue naryOperation(final AbstractInsnNode insn,
+            final List<? extends BasicValue> values) throws AnalyzerException {
         int opcode = insn.getOpcode();
         if (opcode == MULTIANEWARRAY) {
             return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
-        } else if (opcode == INVOKEDYNAMIC){
-            return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc));
+        } else if (opcode == INVOKEDYNAMIC) {
+            return newValue(Type
+                    .getReturnType(((InvokeDynamicInsnNode) insn).desc));
         } else {
             return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
         }
     }
 
     @Override
-    public void returnOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value,
-        final BasicValue expected) throws AnalyzerException
-    {
+    public void returnOperation(final AbstractInsnNode insn,
+            final BasicValue value, final BasicValue expected)
+            throws AnalyzerException {
     }
 
     @Override
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
index 6c449db..439941f 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
@@ -48,11 +48,14 @@ public class BasicValue implements Value {
 
     public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE);
 
-    public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE);
+    public static final BasicValue DOUBLE_VALUE = new BasicValue(
+            Type.DOUBLE_TYPE);
 
-    public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object"));
+    public static final BasicValue REFERENCE_VALUE = new BasicValue(
+            Type.getObjectType("java/lang/Object"));
 
-    public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE);
+    public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(
+            Type.VOID_TYPE);
 
     private final Type type;
 
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
index 9297dd9..71666ed 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -55,47 +55,41 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     @Override
-    public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
-            throws AnalyzerException
-    {
+    public BasicValue copyOperation(final AbstractInsnNode insn,
+            final BasicValue value) throws AnalyzerException {
         Value expected;
         switch (insn.getOpcode()) {
-            case ILOAD:
-            case ISTORE:
-                expected = BasicValue.INT_VALUE;
-                break;
-            case FLOAD:
-            case FSTORE:
-                expected = BasicValue.FLOAT_VALUE;
-                break;
-            case LLOAD:
-            case LSTORE:
-                expected = BasicValue.LONG_VALUE;
-                break;
-            case DLOAD:
-            case DSTORE:
-                expected = BasicValue.DOUBLE_VALUE;
-                break;
-            case ALOAD:
-                if (!value.isReference()) {
-                    throw new AnalyzerException(insn,
-                            null,
-                            "an object reference",
-                            value);
-                }
-                return value;
-            case ASTORE:
-                if (!value.isReference()
-                        && !BasicValue.RETURNADDRESS_VALUE.equals(value))
-                {
-                    throw new AnalyzerException(insn,
-                            null,
-                            "an object reference or a return address",
-                            value);
-                }
-                return value;
-            default:
-                return value;
+        case ILOAD:
+        case ISTORE:
+            expected = BasicValue.INT_VALUE;
+            break;
+        case FLOAD:
+        case FSTORE:
+            expected = BasicValue.FLOAT_VALUE;
+            break;
+        case LLOAD:
+        case LSTORE:
+            expected = BasicValue.LONG_VALUE;
+            break;
+        case DLOAD:
+        case DSTORE:
+            expected = BasicValue.DOUBLE_VALUE;
+            break;
+        case ALOAD:
+            if (!value.isReference()) {
+                throw new AnalyzerException(insn, null, "an object reference",
+                        value);
+            }
+            return value;
+        case ASTORE:
+            if (!value.isReference()
+                    && !BasicValue.RETURNADDRESS_VALUE.equals(value)) {
+                throw new AnalyzerException(insn, null,
+                        "an object reference or a return address", value);
+            }
+            return value;
+        default:
+            return value;
         }
         if (!expected.equals(value)) {
             throw new AnalyzerException(insn, null, expected, value);
@@ -104,91 +98,85 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     @Override
-    public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
-            throws AnalyzerException
-    {
+    public BasicValue unaryOperation(final AbstractInsnNode insn,
+            final BasicValue value) throws AnalyzerException {
         BasicValue expected;
         switch (insn.getOpcode()) {
-            case INEG:
-            case IINC:
-            case I2F:
-            case I2L:
-            case I2D:
-            case I2B:
-            case I2C:
-            case I2S:
-            case IFEQ:
-            case IFNE:
-            case IFLT:
-            case IFGE:
-            case IFGT:
-            case IFLE:
-            case TABLESWITCH:
-            case LOOKUPSWITCH:
-            case IRETURN:
-            case NEWARRAY:
-            case ANEWARRAY:
-                expected = BasicValue.INT_VALUE;
-                break;
-            case FNEG:
-            case F2I:
-            case F2L:
-            case F2D:
-            case FRETURN:
-                expected = BasicValue.FLOAT_VALUE;
-                break;
-            case LNEG:
-            case L2I:
-            case L2F:
-            case L2D:
-            case LRETURN:
-                expected = BasicValue.LONG_VALUE;
-                break;
-            case DNEG:
-            case D2I:
-            case D2F:
-            case D2L:
-            case DRETURN:
-                expected = BasicValue.DOUBLE_VALUE;
-                break;
-            case GETFIELD:
-                expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner));
-                break;
-            case CHECKCAST:
-                if (!value.isReference()) {
-                    throw new AnalyzerException(insn,
-                            null,
-                            "an object reference",
-                            value);
-                }
-                return super.unaryOperation(insn, value);
-            case ARRAYLENGTH:
-                if (!isArrayValue(value)) {
-                    throw new AnalyzerException(insn,
-                            null,
-                            "an array reference",
-                            value);
-                }
-                return super.unaryOperation(insn, value);
-            case ARETURN:
-            case ATHROW:
-            case INSTANCEOF:
-            case MONITORENTER:
-            case MONITOREXIT:
-            case IFNULL:
-            case IFNONNULL:
-                if (!value.isReference()) {
-                    throw new AnalyzerException(insn,
-                            null,
-                            "an object reference",
-                            value);
-                }
-                return super.unaryOperation(insn, value);
-            case PUTSTATIC:
-                expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
-                break;
-            default:
-                throw new Error("Internal error.");
+        case INEG:
+        case IINC:
+        case I2F:
+        case I2L:
+        case I2D:
+        case I2B:
+        case I2C:
+        case I2S:
+        case IFEQ:
+        case IFNE:
+        case IFLT:
+        case IFGE:
+        case IFGT:
+        case IFLE:
+        case TABLESWITCH:
+        case LOOKUPSWITCH:
+        case IRETURN:
+        case NEWARRAY:
+        case ANEWARRAY:
+            expected = BasicValue.INT_VALUE;
+            break;
+        case FNEG:
+        case F2I:
+        case F2L:
+        case F2D:
+        case FRETURN:
+            expected = BasicValue.FLOAT_VALUE;
+            break;
+        case LNEG:
+        case L2I:
+        case L2F:
+        case L2D:
+        case LRETURN:
+            expected = BasicValue.LONG_VALUE;
+            break;
+        case DNEG:
+        case D2I:
+        case D2F:
+        case D2L:
+        case DRETURN:
+            expected = BasicValue.DOUBLE_VALUE;
+            break;
+        case GETFIELD:
+            expected = newValue(Type
+                    .getObjectType(((FieldInsnNode) insn).owner));
+            break;
+        case CHECKCAST:
+            if (!value.isReference()) {
+                throw new AnalyzerException(insn, null, "an object reference",
+                        value);
+            }
+            return super.unaryOperation(insn, value);
+        case ARRAYLENGTH:
+            if (!isArrayValue(value)) {
+                throw new AnalyzerException(insn, null, "an array reference",
+                        value);
+            }
+            return super.unaryOperation(insn, value);
+        case ARETURN:
+        case ATHROW:
+        case INSTANCEOF:
+        case MONITORENTER:
+        case MONITOREXIT:
+        case IFNULL:
+        case IFNONNULL:
+            if (!value.isReference()) {
+                throw new AnalyzerException(insn, null, "an object reference",
+                        value);
+            }
+            return super.unaryOperation(insn, value);
+        case PUTSTATIC:
+            expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
+            break;
+        default:
+            throw new Error("Internal error.");
         }
         if (!isSubTypeOf(value, expected)) {
             throw new AnalyzerException(insn, null, expected, value);
@@ -197,125 +185,125 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     @Override
-    public BasicValue binaryOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value1,
-        final BasicValue value2) throws AnalyzerException
-    {
+    public BasicValue binaryOperation(final AbstractInsnNode insn,
+            final BasicValue value1, final BasicValue value2)
+            throws AnalyzerException {
         BasicValue expected1;
         BasicValue expected2;
         switch (insn.getOpcode()) {
-            case IALOAD:
-                expected1 = newValue(Type.getType("[I"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case BALOAD:
-                if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
-                    expected1 = newValue(Type.getType("[Z"));
-                } else {
-                    expected1 = newValue(Type.getType("[B"));
-                }
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case CALOAD:
-                expected1 = newValue(Type.getType("[C"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case SALOAD:
-                expected1 = newValue(Type.getType("[S"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case LALOAD:
-                expected1 = newValue(Type.getType("[J"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case FALOAD:
-                expected1 = newValue(Type.getType("[F"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case DALOAD:
-                expected1 = newValue(Type.getType("[D"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case AALOAD:
-                expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case IADD:
-            case ISUB:
-            case IMUL:
-            case IDIV:
-            case IREM:
-            case ISHL:
-            case ISHR:
-            case IUSHR:
-            case IAND:
-            case IOR:
-            case IXOR:
-            case IF_ICMPEQ:
-            case IF_ICMPNE:
-            case IF_ICMPLT:
-            case IF_ICMPGE:
-            case IF_ICMPGT:
-            case IF_ICMPLE:
-                expected1 = BasicValue.INT_VALUE;
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case FADD:
-            case FSUB:
-            case FMUL:
-            case FDIV:
-            case FREM:
-            case FCMPL:
-            case FCMPG:
-                expected1 = BasicValue.FLOAT_VALUE;
-                expected2 = BasicValue.FLOAT_VALUE;
-                break;
-            case LADD:
-            case LSUB:
-            case LMUL:
-            case LDIV:
-            case LREM:
-            case LAND:
-            case LOR:
-            case LXOR:
-            case LCMP:
-                expected1 = BasicValue.LONG_VALUE;
-                expected2 = BasicValue.LONG_VALUE;
-                break;
-            case LSHL:
-            case LSHR:
-            case LUSHR:
-                expected1 = BasicValue.LONG_VALUE;
-                expected2 = BasicValue.INT_VALUE;
-                break;
-            case DADD:
-            case DSUB:
-            case DMUL:
-            case DDIV:
-            case DREM:
-            case DCMPL:
-            case DCMPG:
-                expected1 = BasicValue.DOUBLE_VALUE;
-                expected2 = BasicValue.DOUBLE_VALUE;
-                break;
-            case IF_ACMPEQ:
-            case IF_ACMPNE:
-                expected1 = BasicValue.REFERENCE_VALUE;
-                expected2 = BasicValue.REFERENCE_VALUE;
-                break;
-            case PUTFIELD:
-                FieldInsnNode fin = (FieldInsnNode) insn;
-                expected1 = newValue(Type.getObjectType(fin.owner));
-                expected2 = newValue(Type.getType(fin.desc));
-                break;
-            default:
-                throw new Error("Internal error.");
+        case IALOAD:
+            expected1 = newValue(Type.getType("[I"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case BALOAD:
+            if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+                expected1 = newValue(Type.getType("[Z"));
+            } else {
+                expected1 = newValue(Type.getType("[B"));
+            }
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case CALOAD:
+            expected1 = newValue(Type.getType("[C"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case SALOAD:
+            expected1 = newValue(Type.getType("[S"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case LALOAD:
+            expected1 = newValue(Type.getType("[J"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case FALOAD:
+            expected1 = newValue(Type.getType("[F"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case DALOAD:
+            expected1 = newValue(Type.getType("[D"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case AALOAD:
+            expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case IADD:
+        case ISUB:
+        case IMUL:
+        case IDIV:
+        case IREM:
+        case ISHL:
+        case ISHR:
+        case IUSHR:
+        case IAND:
+        case IOR:
+        case IXOR:
+        case IF_ICMPEQ:
+        case IF_ICMPNE:
+        case IF_ICMPLT:
+        case IF_ICMPGE:
+        case IF_ICMPGT:
+        case IF_ICMPLE:
+            expected1 = BasicValue.INT_VALUE;
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case FADD:
+        case FSUB:
+        case FMUL:
+        case FDIV:
+        case FREM:
+        case FCMPL:
+        case FCMPG:
+            expected1 = BasicValue.FLOAT_VALUE;
+            expected2 = BasicValue.FLOAT_VALUE;
+            break;
+        case LADD:
+        case LSUB:
+        case LMUL:
+        case LDIV:
+        case LREM:
+        case LAND:
+        case LOR:
+        case LXOR:
+        case LCMP:
+            expected1 = BasicValue.LONG_VALUE;
+            expected2 = BasicValue.LONG_VALUE;
+            break;
+        case LSHL:
+        case LSHR:
+        case LUSHR:
+            expected1 = BasicValue.LONG_VALUE;
+            expected2 = BasicValue.INT_VALUE;
+            break;
+        case DADD:
+        case DSUB:
+        case DMUL:
+        case DDIV:
+        case DREM:
+        case DCMPL:
+        case DCMPG:
+            expected1 = BasicValue.DOUBLE_VALUE;
+            expected2 = BasicValue.DOUBLE_VALUE;
+            break;
+        case IF_ACMPEQ:
+        case IF_ACMPNE:
+            expected1 = BasicValue.REFERENCE_VALUE;
+            expected2 = BasicValue.REFERENCE_VALUE;
+            break;
+        case PUTFIELD:
+            FieldInsnNode fin = (FieldInsnNode) insn;
+            expected1 = newValue(Type.getObjectType(fin.owner));
+            expected2 = newValue(Type.getType(fin.desc));
+            break;
+        default:
+            throw new Error("Internal error.");
         }
         if (!isSubTypeOf(value1, expected1)) {
-            throw new AnalyzerException(insn, "First argument", expected1, value1);
+            throw new AnalyzerException(insn, "First argument", expected1,
+                    value1);
         } else if (!isSubTypeOf(value2, expected2)) {
-            throw new AnalyzerException(insn, "Second argument", expected2, value2);
+            throw new AnalyzerException(insn, "Second argument", expected2,
+                    value2);
         }
         if (insn.getOpcode() == AALOAD) {
             return getElementValue(value1);
@@ -325,79 +313,73 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     @Override
-    public BasicValue ternaryOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value1,
-        final BasicValue value2,
-        final BasicValue value3) throws AnalyzerException
-    {
+    public BasicValue ternaryOperation(final AbstractInsnNode insn,
+            final BasicValue value1, final BasicValue value2,
+            final BasicValue value3) throws AnalyzerException {
         BasicValue expected1;
         BasicValue expected3;
         switch (insn.getOpcode()) {
-            case IASTORE:
-                expected1 = newValue(Type.getType("[I"));
-                expected3 = BasicValue.INT_VALUE;
-                break;
-            case BASTORE:
-                if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
-                    expected1 = newValue(Type.getType("[Z"));
-                } else {
-                    expected1 = newValue(Type.getType("[B"));
-                }
-                expected3 = BasicValue.INT_VALUE;
-                break;
-            case CASTORE:
-                expected1 = newValue(Type.getType("[C"));
-                expected3 = BasicValue.INT_VALUE;
-                break;
-            case SASTORE:
-                expected1 = newValue(Type.getType("[S"));
-                expected3 = BasicValue.INT_VALUE;
-                break;
-            case LASTORE:
-                expected1 = newValue(Type.getType("[J"));
-                expected3 = BasicValue.LONG_VALUE;
-                break;
-            case FASTORE:
-                expected1 = newValue(Type.getType("[F"));
-                expected3 = BasicValue.FLOAT_VALUE;
-                break;
-            case DASTORE:
-                expected1 = newValue(Type.getType("[D"));
-                expected3 = BasicValue.DOUBLE_VALUE;
-                break;
-            case AASTORE:
-                expected1 = value1;
-                expected3 = BasicValue.REFERENCE_VALUE;
-                break;
-            default:
-                throw new Error("Internal error.");
+        case IASTORE:
+            expected1 = newValue(Type.getType("[I"));
+            expected3 = BasicValue.INT_VALUE;
+            break;
+        case BASTORE:
+            if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+                expected1 = newValue(Type.getType("[Z"));
+            } else {
+                expected1 = newValue(Type.getType("[B"));
+            }
+            expected3 = BasicValue.INT_VALUE;
+            break;
+        case CASTORE:
+            expected1 = newValue(Type.getType("[C"));
+            expected3 = BasicValue.INT_VALUE;
+            break;
+        case SASTORE:
+            expected1 = newValue(Type.getType("[S"));
+            expected3 = BasicValue.INT_VALUE;
+            break;
+        case LASTORE:
+            expected1 = newValue(Type.getType("[J"));
+            expected3 = BasicValue.LONG_VALUE;
+            break;
+        case FASTORE:
+            expected1 = newValue(Type.getType("[F"));
+            expected3 = BasicValue.FLOAT_VALUE;
+            break;
+        case DASTORE:
+            expected1 = newValue(Type.getType("[D"));
+            expected3 = BasicValue.DOUBLE_VALUE;
+            break;
+        case AASTORE:
+            expected1 = value1;
+            expected3 = BasicValue.REFERENCE_VALUE;
+            break;
+        default:
+            throw new Error("Internal error.");
         }
         if (!isSubTypeOf(value1, expected1)) {
-            throw new AnalyzerException(insn, "First argument", "a " + expected1
-                    + " array reference", value1);
+            throw new AnalyzerException(insn, "First argument", "a "
+                    + expected1 + " array reference", value1);
         } else if (!BasicValue.INT_VALUE.equals(value2)) {
             throw new AnalyzerException(insn, "Second argument",
-                    BasicValue.INT_VALUE,
-                    value2);
+                    BasicValue.INT_VALUE, value2);
         } else if (!isSubTypeOf(value3, expected3)) {
-            throw new AnalyzerException(insn, "Third argument", expected3, value3);
+            throw new AnalyzerException(insn, "Third argument", expected3,
+                    value3);
         }
         return null;
     }
 
     @Override
-    public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
-            throws AnalyzerException
-    {
+    public BasicValue naryOperation(final AbstractInsnNode insn,
+            final List<? extends BasicValue> values) throws AnalyzerException {
         int opcode = insn.getOpcode();
         if (opcode == MULTIANEWARRAY) {
             for (int i = 0; i < values.size(); ++i) {
                 if (!BasicValue.INT_VALUE.equals(values.get(i))) {
-                    throw new AnalyzerException(insn,
-                            null,
-                            BasicValue.INT_VALUE,
-                            values.get(i));
+                    throw new AnalyzerException(insn, null,
+                            BasicValue.INT_VALUE, values.get(i));
                 }
             }
         } else {
@@ -407,22 +389,18 @@ public class BasicVerifier extends BasicInterpreter {
                 Type owner = Type.getObjectType(((MethodInsnNode) insn).owner);
                 if (!isSubTypeOf(values.get(i++), newValue(owner))) {
                     throw new AnalyzerException(insn, "Method owner",
-                            newValue(owner),
-                            values.get(0));
+                            newValue(owner), values.get(0));
                 }
             }
-            String desc = (opcode == INVOKEDYNAMIC)?
-                    ((InvokeDynamicInsnNode) insn).desc:
-                        ((MethodInsnNode) insn).desc;
+            String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+                    : ((MethodInsnNode) insn).desc;
             Type[] args = Type.getArgumentTypes(desc);
             while (i < values.size()) {
                 BasicValue expected = newValue(args[j++]);
                 BasicValue encountered = values.get(i++);
                 if (!isSubTypeOf(encountered, expected)) {
-                    throw new AnalyzerException(insn,
-                            "Argument " + j,
-                            expected,
-                            encountered);
+                    throw new AnalyzerException(insn, "Argument " + j,
+                            expected, encountered);
                 }
             }
         }
@@ -430,16 +408,12 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     @Override
-    public void returnOperation(
-        final AbstractInsnNode insn,
-        final BasicValue value,
-        final BasicValue expected) throws AnalyzerException
-    {
+    public void returnOperation(final AbstractInsnNode insn,
+            final BasicValue value, final BasicValue expected)
+            throws AnalyzerException {
         if (!isSubTypeOf(value, expected)) {
-            throw new AnalyzerException(insn,
-                    "Incompatible return type",
-                    expected,
-                    value);
+            throw new AnalyzerException(insn, "Incompatible return type",
+                    expected, value);
         }
     }
 
@@ -448,12 +422,12 @@ public class BasicVerifier extends BasicInterpreter {
     }
 
     protected BasicValue getElementValue(final BasicValue objectArrayValue)
-            throws AnalyzerException
-    {
+            throws AnalyzerException {
         return BasicValue.REFERENCE_VALUE;
     }
 
-    protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+    protected boolean isSubTypeOf(final BasicValue value,
+            final BasicValue expected) {
         return value.equals(expected);
     }
 }
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
index fe19c2c..0d92edc 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Frame.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -44,10 +44,11 @@ import scala.tools.asm.tree.VarInsnNode;
 /**
  * A symbolic execution stack frame. A stack frame contains a set of local
  * variable slots, and an operand stack. Warning: long and double values are
- * represented by <i>two</i> slots in local variables, and by <i>one</i> slot
- * in the operand stack.
+ * represented by <i>two</i> slots in local variables, and by <i>one</i> slot in
+ * the operand stack.
  *
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ *            type of the Value used for the analysis.
  *
  * @author Eric Bruneton
  */
@@ -77,8 +78,10 @@ public class Frame<V extends Value> {
     /**
      * Constructs a new frame with the given size.
      *
-     * @param nLocals the maximum number of local variables of the frame.
-     * @param nStack the maximum stack size of the frame.
+     * @param nLocals
+     *            the maximum number of local variables of the frame.
+     * @param nStack
+     *            the maximum stack size of the frame.
      */
     public Frame(final int nLocals, final int nStack) {
         this.values = (V[]) new Value[nLocals + nStack];
@@ -88,7 +91,8 @@ public class Frame<V extends Value> {
     /**
      * Constructs a new frame that is identical to the given frame.
      *
-     * @param src a frame.
+     * @param src
+     *            a frame.
      */
     public Frame(final Frame<? extends V> src) {
         this(src.locals, src.values.length - src.locals);
@@ -98,7 +102,8 @@ public class Frame<V extends Value> {
     /**
      * Copies the state of the given frame into this frame.
      *
-     * @param src a frame.
+     * @param src
+     *            a frame.
      * @return this frame.
      */
     public Frame<V> init(final Frame<? extends V> src) {
@@ -111,8 +116,9 @@ public class Frame<V extends Value> {
     /**
      * Sets the expected return type of the analyzed method.
      *
-     * @param v the expected return type of the analyzed method, or
-     *        <tt>null</tt> if the method returns void.
+     * @param v
+     *            the expected return type of the analyzed method, or
+     *            <tt>null</tt> if the method returns void.
      */
     public void setReturn(final V v) {
         returnValue = v;
@@ -130,13 +136,16 @@ public class Frame<V extends Value> {
     /**
      * Returns the value of the given local variable.
      *
-     * @param i a local variable index.
+     * @param i
+     *            a local variable index.
      * @return the value of the given local variable.
-     * @throws IndexOutOfBoundsException if the variable does not exist.
+     * @throws IndexOutOfBoundsException
+     *             if the variable does not exist.
      */
     public V getLocal(final int i) throws IndexOutOfBoundsException {
         if (i >= locals) {
-            throw new IndexOutOfBoundsException("Trying to access an inexistant local variable");
+            throw new IndexOutOfBoundsException(
+                    "Trying to access an inexistant local variable");
         }
         return values[i];
     }
@@ -144,15 +153,18 @@ public class Frame<V extends Value> {
     /**
      * Sets the value of the given local variable.
      *
-     * @param i a local variable index.
-     * @param value the new value of this local variable.
-     * @throws IndexOutOfBoundsException if the variable does not exist.
+     * @param i
+     *            a local variable index.
+     * @param value
+     *            the new value of this local variable.
+     * @throws IndexOutOfBoundsException
+     *             if the variable does not exist.
      */
     public void setLocal(final int i, final V value)
-            throws IndexOutOfBoundsException
-    {
+            throws IndexOutOfBoundsException {
         if (i >= locals) {
-            throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i);
+            throw new IndexOutOfBoundsException(
+                    "Trying to access an inexistant local variable " + i);
         }
         values[i] = value;
     }
@@ -170,10 +182,11 @@ public class Frame<V extends Value> {
     /**
      * Returns the value of the given operand stack slot.
      *
-     * @param i the index of an operand stack slot.
+     * @param i
+     *            the index of an operand stack slot.
      * @return the value of the given operand stack slot.
-     * @throws IndexOutOfBoundsException if the operand stack slot does not
-     *         exist.
+     * @throws IndexOutOfBoundsException
+     *             if the operand stack slot does not exist.
      */
     public V getStack(final int i) throws IndexOutOfBoundsException {
         return values[i + locals];
@@ -190,11 +203,13 @@ public class Frame<V extends Value> {
      * Pops a value from the operand stack of this frame.
      *
      * @return the value that has been popped from the stack.
-     * @throws IndexOutOfBoundsException if the operand stack is empty.
+     * @throws IndexOutOfBoundsException
+     *             if the operand stack is empty.
      */
     public V pop() throws IndexOutOfBoundsException {
         if (top == 0) {
-            throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack.");
+            throw new IndexOutOfBoundsException(
+                    "Cannot pop operand off an empty stack.");
         }
         return values[--top + locals];
     }
@@ -202,466 +217,469 @@ public class Frame<V extends Value> {
     /**
      * Pushes a value into the operand stack of this frame.
      *
-     * @param value the value that must be pushed into the stack.
-     * @throws IndexOutOfBoundsException if the operand stack is full.
+     * @param value
+     *            the value that must be pushed into the stack.
+     * @throws IndexOutOfBoundsException
+     *             if the operand stack is full.
      */
     public void push(final V value) throws IndexOutOfBoundsException {
         if (top + locals >= values.length) {
-            throw new IndexOutOfBoundsException("Insufficient maximum stack size.");
+            throw new IndexOutOfBoundsException(
+                    "Insufficient maximum stack size.");
         }
         values[top++ + locals] = value;
     }
 
-    public void execute(
-        final AbstractInsnNode insn,
-        final Interpreter<V> interpreter) throws AnalyzerException
-    {
+    public void execute(final AbstractInsnNode insn,
+            final Interpreter<V> interpreter) throws AnalyzerException {
         V value1, value2, value3, value4;
         List<V> values;
         int var;
 
         switch (insn.getOpcode()) {
-            case Opcodes.NOP:
-                break;
-            case Opcodes.ACONST_NULL:
-            case Opcodes.ICONST_M1:
-            case Opcodes.ICONST_0:
-            case Opcodes.ICONST_1:
-            case Opcodes.ICONST_2:
-            case Opcodes.ICONST_3:
-            case Opcodes.ICONST_4:
-            case Opcodes.ICONST_5:
-            case Opcodes.LCONST_0:
-            case Opcodes.LCONST_1:
-            case Opcodes.FCONST_0:
-            case Opcodes.FCONST_1:
-            case Opcodes.FCONST_2:
-            case Opcodes.DCONST_0:
-            case Opcodes.DCONST_1:
-            case Opcodes.BIPUSH:
-            case Opcodes.SIPUSH:
-            case Opcodes.LDC:
-                push(interpreter.newOperation(insn));
-                break;
-            case Opcodes.ILOAD:
-            case Opcodes.LLOAD:
-            case Opcodes.FLOAD:
-            case Opcodes.DLOAD:
-            case Opcodes.ALOAD:
-                push(interpreter.copyOperation(insn,
-                        getLocal(((VarInsnNode) insn).var)));
-                break;
-            case Opcodes.IALOAD:
-            case Opcodes.LALOAD:
-            case Opcodes.FALOAD:
-            case Opcodes.DALOAD:
-            case Opcodes.AALOAD:
-            case Opcodes.BALOAD:
-            case Opcodes.CALOAD:
-            case Opcodes.SALOAD:
-                value2 = pop();
-                value1 = pop();
-                push(interpreter.binaryOperation(insn, value1, value2));
-                break;
-            case Opcodes.ISTORE:
-            case Opcodes.LSTORE:
-            case Opcodes.FSTORE:
-            case Opcodes.DSTORE:
-            case Opcodes.ASTORE:
-                value1 = interpreter.copyOperation(insn, pop());
-                var = ((VarInsnNode) insn).var;
-                setLocal(var, value1);
-                if (value1.getSize() == 2) {
-                    setLocal(var + 1, interpreter.newValue(null));
+        case Opcodes.NOP:
+            break;
+        case Opcodes.ACONST_NULL:
+        case Opcodes.ICONST_M1:
+        case Opcodes.ICONST_0:
+        case Opcodes.ICONST_1:
+        case Opcodes.ICONST_2:
+        case Opcodes.ICONST_3:
+        case Opcodes.ICONST_4:
+        case Opcodes.ICONST_5:
+        case Opcodes.LCONST_0:
+        case Opcodes.LCONST_1:
+        case Opcodes.FCONST_0:
+        case Opcodes.FCONST_1:
+        case Opcodes.FCONST_2:
+        case Opcodes.DCONST_0:
+        case Opcodes.DCONST_1:
+        case Opcodes.BIPUSH:
+        case Opcodes.SIPUSH:
+        case Opcodes.LDC:
+            push(interpreter.newOperation(insn));
+            break;
+        case Opcodes.ILOAD:
+        case Opcodes.LLOAD:
+        case Opcodes.FLOAD:
+        case Opcodes.DLOAD:
+        case Opcodes.ALOAD:
+            push(interpreter.copyOperation(insn,
+                    getLocal(((VarInsnNode) insn).var)));
+            break;
+        case Opcodes.IALOAD:
+        case Opcodes.LALOAD:
+        case Opcodes.FALOAD:
+        case Opcodes.DALOAD:
+        case Opcodes.AALOAD:
+        case Opcodes.BALOAD:
+        case Opcodes.CALOAD:
+        case Opcodes.SALOAD:
+            value2 = pop();
+            value1 = pop();
+            push(interpreter.binaryOperation(insn, value1, value2));
+            break;
+        case Opcodes.ISTORE:
+        case Opcodes.LSTORE:
+        case Opcodes.FSTORE:
+        case Opcodes.DSTORE:
+        case Opcodes.ASTORE:
+            value1 = interpreter.copyOperation(insn, pop());
+            var = ((VarInsnNode) insn).var;
+            setLocal(var, value1);
+            if (value1.getSize() == 2) {
+                setLocal(var + 1, interpreter.newValue(null));
+            }
+            if (var > 0) {
+                Value local = getLocal(var - 1);
+                if (local != null && local.getSize() == 2) {
+                    setLocal(var - 1, interpreter.newValue(null));
                 }
-                if (var > 0) {
-                    Value local = getLocal(var - 1);
-                    if (local != null && local.getSize() == 2) {
-                        setLocal(var - 1, interpreter.newValue(null));
-                    }
+            }
+            break;
+        case Opcodes.IASTORE:
+        case Opcodes.LASTORE:
+        case Opcodes.FASTORE:
+        case Opcodes.DASTORE:
+        case Opcodes.AASTORE:
+        case Opcodes.BASTORE:
+        case Opcodes.CASTORE:
+        case Opcodes.SASTORE:
+            value3 = pop();
+            value2 = pop();
+            value1 = pop();
+            interpreter.ternaryOperation(insn, value1, value2, value3);
+            break;
+        case Opcodes.POP:
+            if (pop().getSize() == 2) {
+                throw new AnalyzerException(insn, "Illegal use of POP");
+            }
+            break;
+        case Opcodes.POP2:
+            if (pop().getSize() == 1) {
+                if (pop().getSize() != 1) {
+                    throw new AnalyzerException(insn, "Illegal use of POP2");
                 }
-                break;
-            case Opcodes.IASTORE:
-            case Opcodes.LASTORE:
-            case Opcodes.FASTORE:
-            case Opcodes.DASTORE:
-            case Opcodes.AASTORE:
-            case Opcodes.BASTORE:
-            case Opcodes.CASTORE:
-            case Opcodes.SASTORE:
-                value3 = pop();
+            }
+            break;
+        case Opcodes.DUP:
+            value1 = pop();
+            if (value1.getSize() != 1) {
+                throw new AnalyzerException(insn, "Illegal use of DUP");
+            }
+            push(value1);
+            push(interpreter.copyOperation(insn, value1));
+            break;
+        case Opcodes.DUP_X1:
+            value1 = pop();
+            value2 = pop();
+            if (value1.getSize() != 1 || value2.getSize() != 1) {
+                throw new AnalyzerException(insn, "Illegal use of DUP_X1");
+            }
+            push(interpreter.copyOperation(insn, value1));
+            push(value2);
+            push(value1);
+            break;
+        case Opcodes.DUP_X2:
+            value1 = pop();
+            if (value1.getSize() == 1) {
                 value2 = pop();
-                value1 = pop();
-                interpreter.ternaryOperation(insn, value1, value2, value3);
-                break;
-            case Opcodes.POP:
-                if (pop().getSize() == 2) {
-                    throw new AnalyzerException(insn, "Illegal use of POP");
-                }
-                break;
-            case Opcodes.POP2:
-                if (pop().getSize() == 1) {
-                    if (pop().getSize() != 1) {
-                        throw new AnalyzerException(insn, "Illegal use of POP2");
+                if (value2.getSize() == 1) {
+                    value3 = pop();
+                    if (value3.getSize() == 1) {
+                        push(interpreter.copyOperation(insn, value1));
+                        push(value3);
+                        push(value2);
+                        push(value1);
+                        break;
                     }
+                } else {
+                    push(interpreter.copyOperation(insn, value1));
+                    push(value2);
+                    push(value1);
+                    break;
                 }
-                break;
-            case Opcodes.DUP:
-                value1 = pop();
-                if (value1.getSize() != 1) {
-                    throw new AnalyzerException(insn, "Illegal use of DUP");
+            }
+            throw new AnalyzerException(insn, "Illegal use of DUP_X2");
+        case Opcodes.DUP2:
+            value1 = pop();
+            if (value1.getSize() == 1) {
+                value2 = pop();
+                if (value2.getSize() == 1) {
+                    push(value2);
+                    push(value1);
+                    push(interpreter.copyOperation(insn, value2));
+                    push(interpreter.copyOperation(insn, value1));
+                    break;
                 }
+            } else {
                 push(value1);
                 push(interpreter.copyOperation(insn, value1));
                 break;
-            case Opcodes.DUP_X1:
-                value1 = pop();
+            }
+            throw new AnalyzerException(insn, "Illegal use of DUP2");
+        case Opcodes.DUP2_X1:
+            value1 = pop();
+            if (value1.getSize() == 1) {
                 value2 = pop();
-                if (value1.getSize() != 1 || value2.getSize() != 1) {
-                    throw new AnalyzerException(insn, "Illegal use of DUP_X1");
-                }
-                push(interpreter.copyOperation(insn, value1));
-                push(value2);
-                push(value1);
-                break;
-            case Opcodes.DUP_X2:
-                value1 = pop();
-                if (value1.getSize() == 1) {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
-                        value3 = pop();
-                        if (value3.getSize() == 1) {
-                            push(interpreter.copyOperation(insn, value1));
-                            push(value3);
-                            push(value2);
-                            push(value1);
-                            break;
-                        }
-                    } else {
+                if (value2.getSize() == 1) {
+                    value3 = pop();
+                    if (value3.getSize() == 1) {
+                        push(interpreter.copyOperation(insn, value2));
                         push(interpreter.copyOperation(insn, value1));
+                        push(value3);
                         push(value2);
                         push(value1);
                         break;
                     }
                 }
-                throw new AnalyzerException(insn, "Illegal use of DUP_X2");
-            case Opcodes.DUP2:
-                value1 = pop();
-                if (value1.getSize() == 1) {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
-                        push(value2);
-                        push(value1);
-                        push(interpreter.copyOperation(insn, value2));
-                        push(interpreter.copyOperation(insn, value1));
-                        break;
-                    }
-                } else {
-                    push(value1);
+            } else {
+                value2 = pop();
+                if (value2.getSize() == 1) {
                     push(interpreter.copyOperation(insn, value1));
+                    push(value2);
+                    push(value1);
                     break;
                 }
-                throw new AnalyzerException(insn, "Illegal use of DUP2");
-            case Opcodes.DUP2_X1:
-                value1 = pop();
-                if (value1.getSize() == 1) {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
-                        value3 = pop();
-                        if (value3.getSize() == 1) {
+            }
+            throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
+        case Opcodes.DUP2_X2:
+            value1 = pop();
+            if (value1.getSize() == 1) {
+                value2 = pop();
+                if (value2.getSize() == 1) {
+                    value3 = pop();
+                    if (value3.getSize() == 1) {
+                        value4 = pop();
+                        if (value4.getSize() == 1) {
                             push(interpreter.copyOperation(insn, value2));
                             push(interpreter.copyOperation(insn, value1));
+                            push(value4);
                             push(value3);
                             push(value2);
                             push(value1);
                             break;
                         }
-                    }
-                } else {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
+                    } else {
+                        push(interpreter.copyOperation(insn, value2));
                         push(interpreter.copyOperation(insn, value1));
+                        push(value3);
                         push(value2);
                         push(value1);
                         break;
                     }
                 }
-                throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
-            case Opcodes.DUP2_X2:
-                value1 = pop();
-                if (value1.getSize() == 1) {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
-                        value3 = pop();
-                        if (value3.getSize() == 1) {
-                            value4 = pop();
-                            if (value4.getSize() == 1) {
-                                push(interpreter.copyOperation(insn, value2));
-                                push(interpreter.copyOperation(insn, value1));
-                                push(value4);
-                                push(value3);
-                                push(value2);
-                                push(value1);
-                                break;
-                            }
-                        } else {
-                            push(interpreter.copyOperation(insn, value2));
-                            push(interpreter.copyOperation(insn, value1));
-                            push(value3);
-                            push(value2);
-                            push(value1);
-                            break;
-                        }
-                    }
-                } else {
-                    value2 = pop();
-                    if (value2.getSize() == 1) {
-                        value3 = pop();
-                        if (value3.getSize() == 1) {
-                            push(interpreter.copyOperation(insn, value1));
-                            push(value3);
-                            push(value2);
-                            push(value1);
-                            break;
-                        }
-                    } else {
+            } else {
+                value2 = pop();
+                if (value2.getSize() == 1) {
+                    value3 = pop();
+                    if (value3.getSize() == 1) {
                         push(interpreter.copyOperation(insn, value1));
+                        push(value3);
                         push(value2);
                         push(value1);
                         break;
                     }
-                }
-                throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
-            case Opcodes.SWAP:
-                value2 = pop();
-                value1 = pop();
-                if (value1.getSize() != 1 || value2.getSize() != 1) {
-                    throw new AnalyzerException(insn, "Illegal use of SWAP");
-                }
-                push(interpreter.copyOperation(insn, value2));
-                push(interpreter.copyOperation(insn, value1));
-                break;
-            case Opcodes.IADD:
-            case Opcodes.LADD:
-            case Opcodes.FADD:
-            case Opcodes.DADD:
-            case Opcodes.ISUB:
-            case Opcodes.LSUB:
-            case Opcodes.FSUB:
-            case Opcodes.DSUB:
-            case Opcodes.IMUL:
-            case Opcodes.LMUL:
-            case Opcodes.FMUL:
-            case Opcodes.DMUL:
-            case Opcodes.IDIV:
-            case Opcodes.LDIV:
-            case Opcodes.FDIV:
-            case Opcodes.DDIV:
-            case Opcodes.IREM:
-            case Opcodes.LREM:
-            case Opcodes.FREM:
-            case Opcodes.DREM:
-                value2 = pop();
-                value1 = pop();
-                push(interpreter.binaryOperation(insn, value1, value2));
-                break;
-            case Opcodes.INEG:
-            case Opcodes.LNEG:
-            case Opcodes.FNEG:
-            case Opcodes.DNEG:
-                push(interpreter.unaryOperation(insn, pop()));
-                break;
-            case Opcodes.ISHL:
-            case Opcodes.LSHL:
-            case Opcodes.ISHR:
-            case Opcodes.LSHR:
-            case Opcodes.IUSHR:
-            case Opcodes.LUSHR:
-            case Opcodes.IAND:
-            case Opcodes.LAND:
-            case Opcodes.IOR:
-            case Opcodes.LOR:
-            case Opcodes.IXOR:
-            case Opcodes.LXOR:
-                value2 = pop();
-                value1 = pop();
-                push(interpreter.binaryOperation(insn, value1, value2));
-                break;
-            case Opcodes.IINC:
-                var = ((IincInsnNode) insn).var;
-                setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
-                break;
-            case Opcodes.I2L:
-            case Opcodes.I2F:
-            case Opcodes.I2D:
-            case Opcodes.L2I:
-            case Opcodes.L2F:
-            case Opcodes.L2D:
-            case Opcodes.F2I:
-            case Opcodes.F2L:
-            case Opcodes.F2D:
-            case Opcodes.D2I:
-            case Opcodes.D2L:
-            case Opcodes.D2F:
-            case Opcodes.I2B:
-            case Opcodes.I2C:
-            case Opcodes.I2S:
-                push(interpreter.unaryOperation(insn, pop()));
-                break;
-            case Opcodes.LCMP:
-            case Opcodes.FCMPL:
-            case Opcodes.FCMPG:
-            case Opcodes.DCMPL:
-            case Opcodes.DCMPG:
-                value2 = pop();
-                value1 = pop();
-                push(interpreter.binaryOperation(insn, value1, value2));
-                break;
-            case Opcodes.IFEQ:
-            case Opcodes.IFNE:
-            case Opcodes.IFLT:
-            case Opcodes.IFGE:
-            case Opcodes.IFGT:
-            case Opcodes.IFLE:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            case Opcodes.IF_ICMPEQ:
-            case Opcodes.IF_ICMPNE:
-            case Opcodes.IF_ICMPLT:
-            case Opcodes.IF_ICMPGE:
-            case Opcodes.IF_ICMPGT:
-            case Opcodes.IF_ICMPLE:
-            case Opcodes.IF_ACMPEQ:
-            case Opcodes.IF_ACMPNE:
-                value2 = pop();
-                value1 = pop();
-                interpreter.binaryOperation(insn, value1, value2);
-                break;
-            case Opcodes.GOTO:
-                break;
-            case Opcodes.JSR:
-                push(interpreter.newOperation(insn));
-                break;
-            case Opcodes.RET:
-                break;
-            case Opcodes.TABLESWITCH:
-            case Opcodes.LOOKUPSWITCH:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            case Opcodes.IRETURN:
-            case Opcodes.LRETURN:
-            case Opcodes.FRETURN:
-            case Opcodes.DRETURN:
-            case Opcodes.ARETURN:
-                value1 = pop();
-                interpreter.unaryOperation(insn, value1);
-                interpreter.returnOperation(insn, value1, returnValue);
-                break;
-            case Opcodes.RETURN:
-                if (returnValue != null) {
-                    throw new AnalyzerException(insn, "Incompatible return type");
-                }
-                break;
-            case Opcodes.GETSTATIC:
-                push(interpreter.newOperation(insn));
-                break;
-            case Opcodes.PUTSTATIC:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            case Opcodes.GETFIELD:
-                push(interpreter.unaryOperation(insn, pop()));
-                break;
-            case Opcodes.PUTFIELD:
-                value2 = pop();
-                value1 = pop();
-                interpreter.binaryOperation(insn, value1, value2);
-                break;
-            case Opcodes.INVOKEVIRTUAL:
-            case Opcodes.INVOKESPECIAL:
-            case Opcodes.INVOKESTATIC:
-            case Opcodes.INVOKEINTERFACE: {
-                values = new ArrayList<V>();
-                String desc = ((MethodInsnNode) insn).desc;
-                for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
-                    values.add(0, pop());
-                }
-                if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
-                    values.add(0, pop());
-                }
-                if (Type.getReturnType(desc) == Type.VOID_TYPE) {
-                    interpreter.naryOperation(insn, values);
                 } else {
-                    push(interpreter.naryOperation(insn, values));
+                    push(interpreter.copyOperation(insn, value1));
+                    push(value2);
+                    push(value1);
+                    break;
                 }
-                break;
             }
-            case Opcodes.INVOKEDYNAMIC: {
-                values = new ArrayList<V>();
-                String desc = ((InvokeDynamicInsnNode) insn).desc;
-                for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
-                    values.add(0, pop());
-                }
-                if (Type.getReturnType(desc) == Type.VOID_TYPE) {
-                    interpreter.naryOperation(insn, values);
-                } else {
-                    push(interpreter.naryOperation(insn, values));
-                }
-                break;
+            throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
+        case Opcodes.SWAP:
+            value2 = pop();
+            value1 = pop();
+            if (value1.getSize() != 1 || value2.getSize() != 1) {
+                throw new AnalyzerException(insn, "Illegal use of SWAP");
             }
-            case Opcodes.NEW:
-                push(interpreter.newOperation(insn));
-                break;
-            case Opcodes.NEWARRAY:
-            case Opcodes.ANEWARRAY:
-            case Opcodes.ARRAYLENGTH:
-                push(interpreter.unaryOperation(insn, pop()));
-                break;
-            case Opcodes.ATHROW:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            case Opcodes.CHECKCAST:
-            case Opcodes.INSTANCEOF:
-                push(interpreter.unaryOperation(insn, pop()));
-                break;
-            case Opcodes.MONITORENTER:
-            case Opcodes.MONITOREXIT:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            case Opcodes.MULTIANEWARRAY:
-                values = new ArrayList<V>();
-                for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
-                    values.add(0, pop());
-                }
+            push(interpreter.copyOperation(insn, value2));
+            push(interpreter.copyOperation(insn, value1));
+            break;
+        case Opcodes.IADD:
+        case Opcodes.LADD:
+        case Opcodes.FADD:
+        case Opcodes.DADD:
+        case Opcodes.ISUB:
+        case Opcodes.LSUB:
+        case Opcodes.FSUB:
+        case Opcodes.DSUB:
+        case Opcodes.IMUL:
+        case Opcodes.LMUL:
+        case Opcodes.FMUL:
+        case Opcodes.DMUL:
+        case Opcodes.IDIV:
+        case Opcodes.LDIV:
+        case Opcodes.FDIV:
+        case Opcodes.DDIV:
+        case Opcodes.IREM:
+        case Opcodes.LREM:
+        case Opcodes.FREM:
+        case Opcodes.DREM:
+            value2 = pop();
+            value1 = pop();
+            push(interpreter.binaryOperation(insn, value1, value2));
+            break;
+        case Opcodes.INEG:
+        case Opcodes.LNEG:
+        case Opcodes.FNEG:
+        case Opcodes.DNEG:
+            push(interpreter.unaryOperation(insn, pop()));
+            break;
+        case Opcodes.ISHL:
+        case Opcodes.LSHL:
+        case Opcodes.ISHR:
+        case Opcodes.LSHR:
+        case Opcodes.IUSHR:
+        case Opcodes.LUSHR:
+        case Opcodes.IAND:
+        case Opcodes.LAND:
+        case Opcodes.IOR:
+        case Opcodes.LOR:
+        case Opcodes.IXOR:
+        case Opcodes.LXOR:
+            value2 = pop();
+            value1 = pop();
+            push(interpreter.binaryOperation(insn, value1, value2));
+            break;
+        case Opcodes.IINC:
+            var = ((IincInsnNode) insn).var;
+            setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
+            break;
+        case Opcodes.I2L:
+        case Opcodes.I2F:
+        case Opcodes.I2D:
+        case Opcodes.L2I:
+        case Opcodes.L2F:
+        case Opcodes.L2D:
+        case Opcodes.F2I:
+        case Opcodes.F2L:
+        case Opcodes.F2D:
+        case Opcodes.D2I:
+        case Opcodes.D2L:
+        case Opcodes.D2F:
+        case Opcodes.I2B:
+        case Opcodes.I2C:
+        case Opcodes.I2S:
+            push(interpreter.unaryOperation(insn, pop()));
+            break;
+        case Opcodes.LCMP:
+        case Opcodes.FCMPL:
+        case Opcodes.FCMPG:
+        case Opcodes.DCMPL:
+        case Opcodes.DCMPG:
+            value2 = pop();
+            value1 = pop();
+            push(interpreter.binaryOperation(insn, value1, value2));
+            break;
+        case Opcodes.IFEQ:
+        case Opcodes.IFNE:
+        case Opcodes.IFLT:
+        case Opcodes.IFGE:
+        case Opcodes.IFGT:
+        case Opcodes.IFLE:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        case Opcodes.IF_ICMPEQ:
+        case Opcodes.IF_ICMPNE:
+        case Opcodes.IF_ICMPLT:
+        case Opcodes.IF_ICMPGE:
+        case Opcodes.IF_ICMPGT:
+        case Opcodes.IF_ICMPLE:
+        case Opcodes.IF_ACMPEQ:
+        case Opcodes.IF_ACMPNE:
+            value2 = pop();
+            value1 = pop();
+            interpreter.binaryOperation(insn, value1, value2);
+            break;
+        case Opcodes.GOTO:
+            break;
+        case Opcodes.JSR:
+            push(interpreter.newOperation(insn));
+            break;
+        case Opcodes.RET:
+            break;
+        case Opcodes.TABLESWITCH:
+        case Opcodes.LOOKUPSWITCH:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        case Opcodes.IRETURN:
+        case Opcodes.LRETURN:
+        case Opcodes.FRETURN:
+        case Opcodes.DRETURN:
+        case Opcodes.ARETURN:
+            value1 = pop();
+            interpreter.unaryOperation(insn, value1);
+            interpreter.returnOperation(insn, value1, returnValue);
+            break;
+        case Opcodes.RETURN:
+            if (returnValue != null) {
+                throw new AnalyzerException(insn, "Incompatible return type");
+            }
+            break;
+        case Opcodes.GETSTATIC:
+            push(interpreter.newOperation(insn));
+            break;
+        case Opcodes.PUTSTATIC:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        case Opcodes.GETFIELD:
+            push(interpreter.unaryOperation(insn, pop()));
+            break;
+        case Opcodes.PUTFIELD:
+            value2 = pop();
+            value1 = pop();
+            interpreter.binaryOperation(insn, value1, value2);
+            break;
+        case Opcodes.INVOKEVIRTUAL:
+        case Opcodes.INVOKESPECIAL:
+        case Opcodes.INVOKESTATIC:
+        case Opcodes.INVOKEINTERFACE: {
+            values = new ArrayList<V>();
+            String desc = ((MethodInsnNode) insn).desc;
+            for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+                values.add(0, pop());
+            }
+            if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
+                values.add(0, pop());
+            }
+            if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+                interpreter.naryOperation(insn, values);
+            } else {
                 push(interpreter.naryOperation(insn, values));
-                break;
-            case Opcodes.IFNULL:
-            case Opcodes.IFNONNULL:
-                interpreter.unaryOperation(insn, pop());
-                break;
-            default:
-                throw new RuntimeException("Illegal opcode "+insn.getOpcode());
+            }
+            break;
+        }
+        case Opcodes.INVOKEDYNAMIC: {
+            values = new ArrayList<V>();
+            String desc = ((InvokeDynamicInsnNode) insn).desc;
+            for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+                values.add(0, pop());
+            }
+            if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+                interpreter.naryOperation(insn, values);
+            } else {
+                push(interpreter.naryOperation(insn, values));
+            }
+            break;
+        }
+        case Opcodes.NEW:
+            push(interpreter.newOperation(insn));
+            break;
+        case Opcodes.NEWARRAY:
+        case Opcodes.ANEWARRAY:
+        case Opcodes.ARRAYLENGTH:
+            push(interpreter.unaryOperation(insn, pop()));
+            break;
+        case Opcodes.ATHROW:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        case Opcodes.CHECKCAST:
+        case Opcodes.INSTANCEOF:
+            push(interpreter.unaryOperation(insn, pop()));
+            break;
+        case Opcodes.MONITORENTER:
+        case Opcodes.MONITOREXIT:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        case Opcodes.MULTIANEWARRAY:
+            values = new ArrayList<V>();
+            for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
+                values.add(0, pop());
+            }
+            push(interpreter.naryOperation(insn, values));
+            break;
+        case Opcodes.IFNULL:
+        case Opcodes.IFNONNULL:
+            interpreter.unaryOperation(insn, pop());
+            break;
+        default:
+            throw new RuntimeException("Illegal opcode " + insn.getOpcode());
         }
     }
 
     /**
      * Merges this frame with the given frame.
      *
-     * @param frame a frame.
-     * @param interpreter the interpreter used to merge values.
+     * @param frame
+     *            a frame.
+     * @param interpreter
+     *            the interpreter used to merge values.
      * @return <tt>true</tt> if this frame has been changed as a result of the
      *         merge operation, or <tt>false</tt> otherwise.
-     * @throws AnalyzerException if the frames have incompatible sizes.
+     * @throws AnalyzerException
+     *             if the frames have incompatible sizes.
      */
-    public boolean merge(final Frame<? extends V> frame, final Interpreter<V> interpreter)
-            throws AnalyzerException
-    {
+    public boolean merge(final Frame<? extends V> frame,
+            final Interpreter<V> interpreter) throws AnalyzerException {
         if (top != frame.top) {
             throw new AnalyzerException(null, "Incompatible stack heights");
         }
         boolean changes = false;
         for (int i = 0; i < locals + top; ++i) {
             V v = interpreter.merge(values[i], frame.values[i]);
-            if (v != values[i]) {
+            if (!v.equals(values[i])) {
                 values[i] = v;
                 changes = true;
             }
@@ -672,9 +690,11 @@ public class Frame<V extends Value> {
     /**
      * Merges this frame with the given frame (case of a RET instruction).
      *
-     * @param frame a frame
-     * @param access the local variables that have been accessed by the
-     *        subroutine to which the RET instruction corresponds.
+     * @param frame
+     *            a frame
+     * @param access
+     *            the local variables that have been accessed by the subroutine
+     *            to which the RET instruction corresponds.
      * @return <tt>true</tt> if this frame has been changed as a result of the
      *         merge operation, or <tt>false</tt> otherwise.
      */
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 930c8f4..56f4bed 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -42,7 +42,8 @@ import scala.tools.asm.tree.AbstractInsnNode;
  * various semantic interpreters, without needing to duplicate the code to
  * simulate the transfer of values.
  *
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ *            type of the Value used for the analysis.
  *
  * @author Eric Bruneton
  */
@@ -57,12 +58,13 @@ public abstract class Interpreter<V extends Value> {
     /**
      * Creates a new value that represents the given type.
      *
-     * Called for method parameters (including <code>this</code>),
-     * exception handler variable and with <code>null</code> type
-     * for variables reserved by long and double types.
+     * Called for method parameters (including <code>this</code>), exception
+     * handler variable and with <code>null</code> type for variables reserved
+     * by long and double types.
      *
-     * @param type a primitive or reference type, or <tt>null</tt> to
-     *        represent an uninitialized value.
+     * @param type
+     *            a primitive or reference type, or <tt>null</tt> to represent
+     *            an uninitialized value.
      * @return a value that represents the given type. The size of the returned
      *         value must be equal to the size of the given type.
      */
@@ -76,9 +78,11 @@ public abstract class Interpreter<V extends Value> {
      * ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
      * DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW
      *
-     * @param insn the bytecode instruction to be interpreted.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
      * @return the result of the interpretation of the given instruction.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
     public abstract V newOperation(AbstractInsnNode insn)
             throws AnalyzerException;
@@ -90,11 +94,14 @@ public abstract class Interpreter<V extends Value> {
      * ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE,
      * ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param value the value that must be moved by the instruction.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param value
+     *            the value that must be moved by the instruction.
      * @return the result of the interpretation of the given instruction. The
      *         returned value must be <tt>equal</tt> to the given value.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
     public abstract V copyOperation(AbstractInsnNode insn, V value)
             throws AnalyzerException;
@@ -109,10 +116,13 @@ public abstract class Interpreter<V extends Value> {
      * PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST,
      * INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param value the argument of the instruction to be interpreted.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param value
+     *            the argument of the instruction to be interpreted.
      * @return the result of the interpretation of the given instruction.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
     public abstract V unaryOperation(AbstractInsnNode insn, V value)
             throws AnalyzerException;
@@ -128,11 +138,15 @@ public abstract class Interpreter<V extends Value> {
      * DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
      * IF_ACMPEQ, IF_ACMPNE, PUTFIELD
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param value1 the first argument of the instruction to be interpreted.
-     * @param value2 the second argument of the instruction to be interpreted.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param value1
+     *            the first argument of the instruction to be interpreted.
+     * @param value2
+     *            the second argument of the instruction to be interpreted.
      * @return the result of the interpretation of the given instruction.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
     public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
             throws AnalyzerException;
@@ -143,18 +157,20 @@ public abstract class Interpreter<V extends Value> {
      *
      * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param value1 the first argument of the instruction to be interpreted.
-     * @param value2 the second argument of the instruction to be interpreted.
-     * @param value3 the third argument of the instruction to be interpreted.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param value1
+     *            the first argument of the instruction to be interpreted.
+     * @param value2
+     *            the second argument of the instruction to be interpreted.
+     * @param value3
+     *            the third argument of the instruction to be interpreted.
      * @return the result of the interpretation of the given instruction.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
-    public abstract V ternaryOperation(
-        AbstractInsnNode insn,
-        V value1,
-        V value2,
-        V value3) throws AnalyzerException;
+    public abstract V ternaryOperation(AbstractInsnNode insn, V value1,
+            V value2, V value3) throws AnalyzerException;
 
     /**
      * Interprets a bytecode instruction with a variable number of arguments.
@@ -163,14 +179,16 @@ public abstract class Interpreter<V extends Value> {
      * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE,
      * MULTIANEWARRAY and INVOKEDYNAMIC
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param values the arguments of the instruction to be interpreted.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param values
+     *            the arguments of the instruction to be interpreted.
      * @return the result of the interpretation of the given instruction.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
-    public abstract V naryOperation(
-        AbstractInsnNode insn,
-        List< ? extends V> values) throws AnalyzerException;
+    public abstract V naryOperation(AbstractInsnNode insn,
+            List<? extends V> values) throws AnalyzerException;
 
     /**
      * Interprets a bytecode return instruction. This method is called for the
@@ -178,15 +196,17 @@ public abstract class Interpreter<V extends Value> {
      *
      * IRETURN, LRETURN, FRETURN, DRETURN, ARETURN
      *
-     * @param insn the bytecode instruction to be interpreted.
-     * @param value the argument of the instruction to be interpreted.
-     * @param expected the expected return type of the analyzed method.
-     * @throws AnalyzerException if an error occured during the interpretation.
+     * @param insn
+     *            the bytecode instruction to be interpreted.
+     * @param value
+     *            the argument of the instruction to be interpreted.
+     * @param expected
+     *            the expected return type of the analyzed method.
+     * @throws AnalyzerException
+     *             if an error occured during the interpretation.
      */
-    public abstract void returnOperation(
-        AbstractInsnNode insn,
-        V value,
-        V expected) throws AnalyzerException;
+    public abstract void returnOperation(AbstractInsnNode insn, V value,
+            V expected) throws AnalyzerException;
 
     /**
      * Merges two values. The merge operation must return a value that
@@ -195,8 +215,10 @@ public abstract class Interpreter<V extends Value> {
      * values are integer intervals, the merged value must be an interval that
      * contains the previous ones. Likewise for other types of values).
      *
-     * @param v a value.
-     * @param w another value.
+     * @param v
+     *            a value.
+     * @param w
+     *            another value.
      * @return the merged value. If the merged value is equal to <tt>v</tt>,
      *         this method <i>must</i> return <tt>v</tt>.
      */
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
index c4f515d..eaecd05 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -79,15 +79,15 @@ public class SimpleVerifier extends BasicVerifier {
      * Constructs a new {@link SimpleVerifier} to verify a specific class. This
      * class will not be loaded into the JVM since it may be incorrect.
      *
-     * @param currentClass the class that is verified.
-     * @param currentSuperClass the super class of the class that is verified.
-     * @param isInterface if the class that is verified is an interface.
+     * @param currentClass
+     *            the class that is verified.
+     * @param currentSuperClass
+     *            the super class of the class that is verified.
+     * @param isInterface
+     *            if the class that is verified is an interface.
      */
-    public SimpleVerifier(
-        final Type currentClass,
-        final Type currentSuperClass,
-        final boolean isInterface)
-    {
+    public SimpleVerifier(final Type currentClass,
+            final Type currentSuperClass, final boolean isInterface) {
         this(currentClass, currentSuperClass, null, isInterface);
     }
 
@@ -95,32 +95,25 @@ public class SimpleVerifier extends BasicVerifier {
      * Constructs a new {@link SimpleVerifier} to verify a specific class. This
      * class will not be loaded into the JVM since it may be incorrect.
      *
-     * @param currentClass the class that is verified.
-     * @param currentSuperClass the super class of the class that is verified.
-     * @param currentClassInterfaces the interfaces implemented by the class
-     *        that is verified.
-     * @param isInterface if the class that is verified is an interface.
+     * @param currentClass
+     *            the class that is verified.
+     * @param currentSuperClass
+     *            the super class of the class that is verified.
+     * @param currentClassInterfaces
+     *            the interfaces implemented by the class that is verified.
+     * @param isInterface
+     *            if the class that is verified is an interface.
      */
-    public SimpleVerifier(
-        final Type currentClass,
-        final Type currentSuperClass,
-        final List<Type> currentClassInterfaces,
-        final boolean isInterface)
-    {
-        this(ASM4,
-                currentClass,
-                currentSuperClass,
-                currentClassInterfaces,
+    public SimpleVerifier(final Type currentClass,
+            final Type currentSuperClass,
+            final List<Type> currentClassInterfaces, final boolean isInterface) {
+        this(ASM4, currentClass, currentSuperClass, currentClassInterfaces,
                 isInterface);
     }
 
-    protected SimpleVerifier(
-        final int api,
-        final Type currentClass,
-        final Type currentSuperClass,
-        final List<Type> currentClassInterfaces,
-        final boolean isInterface)
-    {
+    protected SimpleVerifier(final int api, final Type currentClass,
+            final Type currentSuperClass,
+            final List<Type> currentClassInterfaces, final boolean isInterface) {
         super(api);
         this.currentClass = currentClass;
         this.currentSuperClass = currentSuperClass;
@@ -133,7 +126,8 @@ public class SimpleVerifier extends BasicVerifier {
      * classes. This is useful if you are verifying multiple interdependent
      * classes.
      *
-     * @param loader a <code>ClassLoader</code> to use
+     * @param loader
+     *            a <code>ClassLoader</code> to use
      */
     public void setClassLoader(final ClassLoader loader) {
         this.loader = loader;
@@ -148,11 +142,11 @@ public class SimpleVerifier extends BasicVerifier {
         boolean isArray = type.getSort() == Type.ARRAY;
         if (isArray) {
             switch (type.getElementType().getSort()) {
-                case Type.BOOLEAN:
-                case Type.CHAR:
-                case Type.BYTE:
-                case Type.SHORT:
-                    return new BasicValue(type);
+            case Type.BOOLEAN:
+            case Type.CHAR:
+            case Type.BYTE:
+            case Type.SHORT:
+                return new BasicValue(type);
             }
         }
 
@@ -181,8 +175,7 @@ public class SimpleVerifier extends BasicVerifier {
 
     @Override
     protected BasicValue getElementValue(final BasicValue objectArrayValue)
-            throws AnalyzerException
-    {
+            throws AnalyzerException {
         Type arrayType = objectArrayValue.getType();
         if (arrayType != null) {
             if (arrayType.getSort() == Type.ARRAY) {
@@ -196,28 +189,28 @@ public class SimpleVerifier extends BasicVerifier {
     }
 
     @Override
-    protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+    protected boolean isSubTypeOf(final BasicValue value,
+            final BasicValue expected) {
         Type expectedType = expected.getType();
         Type type = value.getType();
         switch (expectedType.getSort()) {
-            case Type.INT:
-            case Type.FLOAT:
-            case Type.LONG:
-            case Type.DOUBLE:
-                return type.equals(expectedType);
-            case Type.ARRAY:
-            case Type.OBJECT:
-                if ("Lnull;".equals(type.getDescriptor())) {
-                    return true;
-                } else if (type.getSort() == Type.OBJECT
-                        || type.getSort() == Type.ARRAY)
-                {
-                    return isAssignableFrom(expectedType, type);
-                } else {
-                    return false;
-                }
-            default:
-                throw new Error("Internal error");
+        case Type.INT:
+        case Type.FLOAT:
+        case Type.LONG:
+        case Type.DOUBLE:
+            return type.equals(expectedType);
+        case Type.ARRAY:
+        case Type.OBJECT:
+            if ("Lnull;".equals(type.getDescriptor())) {
+                return true;
+            } else if (type.getSort() == Type.OBJECT
+                    || type.getSort() == Type.ARRAY) {
+                return isAssignableFrom(expectedType, type);
+            } else {
+                return false;
+            }
+        default:
+            throw new Error("Internal error");
         }
     }
 
@@ -227,11 +220,9 @@ public class SimpleVerifier extends BasicVerifier {
             Type t = v.getType();
             Type u = w.getType();
             if (t != null
-                    && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
-            {
+                    && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY)) {
                 if (u != null
-                        && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
-                {
+                        && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY)) {
                     if ("Lnull;".equals(t.getDescriptor())) {
                         return w;
                     }
@@ -288,7 +279,8 @@ public class SimpleVerifier extends BasicVerifier {
                 return false;
             } else {
                 if (isInterface) {
-                    return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY;
+                    return u.getSort() == Type.OBJECT
+                            || u.getSort() == Type.ARRAY;
                 }
                 return isAssignableFrom(t, getSuperClass(u));
             }
@@ -318,8 +310,7 @@ public class SimpleVerifier extends BasicVerifier {
         try {
             if (t.getSort() == Type.ARRAY) {
                 return Class.forName(t.getDescriptor().replace('/', '.'),
-                        false,
-                        loader);
+                        false, loader);
             }
             return Class.forName(t.getClassName(), false, loader);
         } catch (ClassNotFoundException e) {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
index 067200b..a68086c 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -47,8 +47,7 @@ import scala.tools.asm.tree.MethodInsnNode;
  * @author Eric Bruneton
  */
 public class SourceInterpreter extends Interpreter<SourceValue> implements
-        Opcodes
-{
+        Opcodes {
 
     public SourceInterpreter() {
         super(ASM4);
@@ -70,125 +69,118 @@ public class SourceInterpreter extends Interpreter<SourceValue> implements
     public SourceValue newOperation(final AbstractInsnNode insn) {
         int size;
         switch (insn.getOpcode()) {
-            case LCONST_0:
-            case LCONST_1:
-            case DCONST_0:
-            case DCONST_1:
-                size = 2;
-                break;
-            case LDC:
-                Object cst = ((LdcInsnNode) insn).cst;
-                size = cst instanceof Long || cst instanceof Double ? 2 : 1;
-                break;
-            case GETSTATIC:
-                size = Type.getType(((FieldInsnNode) insn).desc).getSize();
-                break;
-            default:
-                size = 1;
+        case LCONST_0:
+        case LCONST_1:
+        case DCONST_0:
+        case DCONST_1:
+            size = 2;
+            break;
+        case LDC:
+            Object cst = ((LdcInsnNode) insn).cst;
+            size = cst instanceof Long || cst instanceof Double ? 2 : 1;
+            break;
+        case GETSTATIC:
+            size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+            break;
+        default:
+            size = 1;
         }
         return new SourceValue(size, insn);
     }
 
     @Override
-    public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) {
+    public SourceValue copyOperation(final AbstractInsnNode insn,
+            final SourceValue value) {
         return new SourceValue(value.getSize(), insn);
     }
 
     @Override
-    public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value)
-    {
+    public SourceValue unaryOperation(final AbstractInsnNode insn,
+            final SourceValue value) {
         int size;
         switch (insn.getOpcode()) {
-            case LNEG:
-            case DNEG:
-            case I2L:
-            case I2D:
-            case L2D:
-            case F2L:
-            case F2D:
-            case D2L:
-                size = 2;
-                break;
-            case GETFIELD:
-                size = Type.getType(((FieldInsnNode) insn).desc).getSize();
-                break;
-            default:
-                size = 1;
+        case LNEG:
+        case DNEG:
+        case I2L:
+        case I2D:
+        case L2D:
+        case F2L:
+        case F2D:
+        case D2L:
+            size = 2;
+            break;
+        case GETFIELD:
+            size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+            break;
+        default:
+            size = 1;
         }
         return new SourceValue(size, insn);
     }
 
     @Override
-    public SourceValue binaryOperation(
-        final AbstractInsnNode insn,
-        final SourceValue value1,
-        final SourceValue value2)
-    {
+    public SourceValue binaryOperation(final AbstractInsnNode insn,
+            final SourceValue value1, final SourceValue value2) {
         int size;
         switch (insn.getOpcode()) {
-            case LALOAD:
-            case DALOAD:
-            case LADD:
-            case DADD:
-            case LSUB:
-            case DSUB:
-            case LMUL:
-            case DMUL:
-            case LDIV:
-            case DDIV:
-            case LREM:
-            case DREM:
-            case LSHL:
-            case LSHR:
-            case LUSHR:
-            case LAND:
-            case LOR:
-            case LXOR:
-                size = 2;
-                break;
-            default:
-                size = 1;
+        case LALOAD:
+        case DALOAD:
+        case LADD:
+        case DADD:
+        case LSUB:
+        case DSUB:
+        case LMUL:
+        case DMUL:
+        case LDIV:
+        case DDIV:
+        case LREM:
+        case DREM:
+        case LSHL:
+        case LSHR:
+        case LUSHR:
+        case LAND:
+        case LOR:
+        case LXOR:
+            size = 2;
+            break;
+        default:
+            size = 1;
         }
         return new SourceValue(size, insn);
     }
 
     @Override
-    public SourceValue ternaryOperation(
-        final AbstractInsnNode insn,
-        final SourceValue value1,
-        final SourceValue value2,
-        final SourceValue value3)
-    {
+    public SourceValue ternaryOperation(final AbstractInsnNode insn,
+            final SourceValue value1, final SourceValue value2,
+            final SourceValue value3) {
         return new SourceValue(1, insn);
     }
 
     @Override
-    public SourceValue naryOperation(final AbstractInsnNode insn, final List<? extends SourceValue> values) {
+    public SourceValue naryOperation(final AbstractInsnNode insn,
+            final List<? extends SourceValue> values) {
         int size;
         int opcode = insn.getOpcode();
         if (opcode == MULTIANEWARRAY) {
             size = 1;
         } else {
-            String desc = (opcode == INVOKEDYNAMIC)?
-                    ((InvokeDynamicInsnNode) insn).desc:
-                    ((MethodInsnNode) insn).desc;
+            String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+                    : ((MethodInsnNode) insn).desc;
             size = Type.getReturnType(desc).getSize();
         }
         return new SourceValue(size, insn);
     }
 
     @Override
-    public void returnOperation(
-        final AbstractInsnNode insn,
-        final SourceValue value,
-        final SourceValue expected)
-    {
+    public void returnOperation(final AbstractInsnNode insn,
+            final SourceValue value, final SourceValue expected) {
     }
 
     @Override
     public SourceValue merge(final SourceValue d, final SourceValue w) {
         if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) {
-            Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns).union((SmallSet<AbstractInsnNode>) w.insns);
+            Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns)
+                    .union((SmallSet<AbstractInsnNode>) w.insns);
             if (s == d.insns && d.size == w.size) {
                 return d;
             } else {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
index 57ff212..40d6b68 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
@@ -48,8 +48,8 @@ public class SourceValue implements Value {
 
     /**
      * The instructions that can produce this value. For example, for the Java
-     * code below, the instructions that can produce the value of <tt>i</tt>
-     * at line 5 are the txo ISTORE instructions at line 1 and 3:
+     * code below, the instructions that can produce the value of <tt>i</tt> at
+     * line 5 are the txo ISTORE instructions at line 1 and 3:
      *
      * <pre>
      * 1: i = 0;
@@ -64,7 +64,7 @@ public class SourceValue implements Value {
     public final Set<AbstractInsnNode> insns;
 
     public SourceValue(final int size) {
-        this(size, SmallSet.<AbstractInsnNode>emptySet());
+        this(size, SmallSet.<AbstractInsnNode> emptySet());
     }
 
     public SourceValue(final int size, final AbstractInsnNode insn) {
@@ -84,7 +84,7 @@ public class SourceValue implements Value {
     @Override
     public boolean equals(final Object value) {
         if (!(value instanceof SourceValue)) {
-        	return false;
+            return false;
         }
         SourceValue v = (SourceValue) value;
         return size == v.size && insns.equals(v.insns);
diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
index 038880d..d734bbd 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
@@ -51,11 +51,8 @@ class Subroutine {
     private Subroutine() {
     }
 
-    Subroutine(
-        final LabelNode start,
-        final int maxLocals,
-        final JumpInsnNode caller)
-    {
+    Subroutine(final LabelNode start, final int maxLocals,
+            final JumpInsnNode caller) {
         this.start = start;
         this.access = new boolean[maxLocals];
         this.callers = new ArrayList<JumpInsnNode>();
@@ -90,4 +87,4 @@ class Subroutine {
         }
         return changes;
     }
-}
\ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
index 6a31dd5..95cc6e3 100644
--- a/src/asm/scala/tools/asm/util/ASMifiable.java
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
 import scala.tools.asm.Label;
 
 /**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
+ * An {@link scala.tools.asm.Attribute Attribute} that can print the ASM code
  * to create an equivalent attribute.
  *
  * @author Eugene Kuleshov
@@ -44,10 +44,13 @@ public interface ASMifiable {
     /**
      * Prints the ASM code to create an attribute equal to this attribute.
      *
-     * @param buf a buffer used for printing Java code.
-     * @param varName name of the variable in a printed code used to store
-     *        attribute instance.
-     * @param labelNames map of label instances to their names.
+     * @param buf
+     *            a buffer used for printing Java code.
+     * @param varName
+     *            name of the variable in a printed code used to store attribute
+     *            instance.
+     * @param labelNames
+     *            map of label instances to their names.
      */
     void asmify(StringBuffer buf, String varName, Map<Label, String> labelNames);
 }
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
index 5967c87..7e6b223 100644
--- a/src/asm/scala/tools/asm/util/ASMifier.java
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -91,11 +91,14 @@ public class ASMifier extends Printer {
     /**
      * Constructs a new {@link ASMifier}.
      *
-     * @param api the ASM API version implemented by this class. Must be one of
-     *        {@link Opcodes#ASM4}.
-     * @param name the name of the visitor variable in the produced code.
-     * @param id identifier of the annotation visitor variable in the produced
-     *        code.
+     * @param api
+     *            the ASM API version implemented by this class. Must be one of
+     *            {@link Opcodes#ASM4}.
+     * @param name
+     *            the name of the visitor variable in the produced code.
+     * @param id
+     *            identifier of the annotation visitor variable in the produced
+     *            code.
      */
     protected ASMifier(final int api, final String name, final int id) {
         super(api);
@@ -105,13 +108,15 @@ public class ASMifier extends Printer {
 
     /**
      * Prints the ASM source code to generate the given class to the standard
-     * output. <p> Usage: ASMifier [-debug] <binary
-     * class name or class file name>
+     * output.
+     * <p>
+     * Usage: ASMifier [-debug] <binary class name or class file name>
      *
-     * @param args the command line arguments.
+     * @param args
+     *            the command line arguments.
      *
-     * @throws Exception if the class cannot be found, or if an IO exception
-     *         occurs.
+     * @throws Exception
+     *             if the class cannot be found, or if an IO exception occurs.
      */
     public static void main(final String[] args) throws Exception {
         int i = 0;
@@ -129,22 +134,21 @@ public class ASMifier extends Printer {
             }
         }
         if (!ok) {
-            System.err.println("Prints the ASM code to generate the given class.");
+            System.err
+                    .println("Prints the ASM code to generate the given class.");
             System.err.println("Usage: ASMifier [-debug] "
                     + "<fully qualified class name or class file name>");
             return;
         }
         ClassReader cr;
         if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
-                || args[i].indexOf('/') > -1)
-        {
+                || args[i].indexOf('/') > -1) {
             cr = new ClassReader(new FileInputStream(args[i]));
         } else {
             cr = new ClassReader(args[i]);
         }
-        cr.accept(new TraceClassVisitor(null,
-                new ASMifier(),
-                new PrintWriter(System.out)), flags);
+        cr.accept(new TraceClassVisitor(null, new ASMifier(), new PrintWriter(
+                System.out)), flags);
     }
 
     // ------------------------------------------------------------------------
@@ -152,14 +156,9 @@ public class ASMifier extends Printer {
     // ------------------------------------------------------------------------
 
     @Override
-    public void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public void visit(final int version, final int access, final String name,
+            final String signature, final String superName,
+            final String[] interfaces) {
         String simpleName;
         int n = name.lastIndexOf('/');
         if (n == -1) {
@@ -170,8 +169,8 @@ public class ASMifier extends Printer {
             simpleName = name.substring(n + 1);
         }
         text.add("import java.util.*;\n");
-        text.add("import org.objectweb.asm.*;\n");
-        text.add("import org.objectweb.asm.attrs.*;\n");
+        text.add("import scala.tools.asm.*;\n");
+        text.add("import scala.tools.asm.attrs.*;\n");
         text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
         text.add("public static byte[] dump () throws Exception {\n\n");
         text.add("ClassWriter cw = new ClassWriter(0);\n");
@@ -182,30 +181,30 @@ public class ASMifier extends Printer {
         buf.setLength(0);
         buf.append("cw.visit(");
         switch (version) {
-            case Opcodes.V1_1:
-                buf.append("V1_1");
-                break;
-            case Opcodes.V1_2:
-                buf.append("V1_2");
-                break;
-            case Opcodes.V1_3:
-                buf.append("V1_3");
-                break;
-            case Opcodes.V1_4:
-                buf.append("V1_4");
-                break;
-            case Opcodes.V1_5:
-                buf.append("V1_5");
-                break;
-            case Opcodes.V1_6:
-                buf.append("V1_6");
-                break;
-            case Opcodes.V1_7:
-                buf.append("V1_7");
-                break;
-            default:
-                buf.append(version);
-                break;
+        case Opcodes.V1_1:
+            buf.append("V1_1");
+            break;
+        case Opcodes.V1_2:
+            buf.append("V1_2");
+            break;
+        case Opcodes.V1_3:
+            buf.append("V1_3");
+            break;
+        case Opcodes.V1_4:
+            buf.append("V1_4");
+            break;
+        case Opcodes.V1_5:
+            buf.append("V1_5");
+            break;
+        case Opcodes.V1_6:
+            buf.append("V1_6");
+            break;
+        case Opcodes.V1_7:
+            buf.append("V1_7");
+            break;
+        default:
+            buf.append(version);
+            break;
         }
         buf.append(", ");
         appendAccess(access | ACCESS_CLASS);
@@ -242,11 +241,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitOuterClass(final String owner, final String name,
+            final String desc) {
         buf.setLength(0);
         buf.append("cw.visitOuterClass(");
         appendConstant(owner);
@@ -259,10 +255,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public ASMifier visitClassAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public ASMifier visitClassAnnotation(final String desc,
+            final boolean visible) {
         return visitAnnotation(desc, visible);
     }
 
@@ -272,12 +266,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public void visitInnerClass(final String name, final String outerName,
+            final String innerName, final int access) {
         buf.setLength(0);
         buf.append("cw.visitInnerClass(");
         appendConstant(name);
@@ -292,13 +282,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public ASMifier visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public ASMifier visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
         buf.setLength(0);
         buf.append("{\n");
         buf.append("fv = cw.visitField(");
@@ -320,13 +305,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public ASMifier visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
+    public ASMifier visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
         buf.setLength(0);
         buf.append("{\n");
         buf.append("mv = cw.visitMethod(");
@@ -380,11 +360,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         buf.setLength(0);
         buf.append("av").append(id).append(".visitEnum(");
         appendConstant(buf, name);
@@ -397,10 +374,7 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public ASMifier visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public ASMifier visitAnnotation(final String name, final String desc) {
         buf.setLength(0);
         buf.append("{\n");
         buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
@@ -443,10 +417,8 @@ public class ASMifier extends Printer {
     // ------------------------------------------------------------------------
 
     @Override
-    public ASMifier visitFieldAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public ASMifier visitFieldAnnotation(final String desc,
+            final boolean visible) {
         return visitAnnotation(desc, visible);
     }
 
@@ -469,9 +441,7 @@ public class ASMifier extends Printer {
     @Override
     public ASMifier visitAnnotationDefault() {
         buf.setLength(0);
-        buf.append("{\n")
-                .append("av0 = ")
-                .append(name)
+        buf.append("{\n").append("av0 = ").append(name)
                 .append(".visitAnnotationDefault();\n");
         text.add(buf.toString());
         ASMifier a = createASMifier("av", 0);
@@ -481,23 +451,17 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public ASMifier visitMethodAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public ASMifier visitMethodAnnotation(final String desc,
+            final boolean visible) {
         return visitAnnotation(desc, visible);
     }
 
     @Override
-    public ASMifier visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
+    public ASMifier visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
         buf.setLength(0);
-        buf.append("{\n")
-                .append("av0 = ").append(name).append(".visitParameterAnnotation(")
-                .append(parameter)
+        buf.append("{\n").append("av0 = ").append(name)
+                .append(".visitParameterAnnotation(").append(parameter)
                 .append(", ");
         appendConstant(desc);
         buf.append(", ").append(visible).append(");\n");
@@ -519,52 +483,47 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
         buf.setLength(0);
         switch (type) {
-            case Opcodes.F_NEW:
-            case Opcodes.F_FULL:
-                declareFrameTypes(nLocal, local);
-                declareFrameTypes(nStack, stack);
-                if (type == Opcodes.F_NEW) {
-                    buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
-                } else {
-                    buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
-                }
-                buf.append(nLocal).append(", new Object[] {");
-                appendFrameTypes(nLocal, local);
-                buf.append("}, ").append(nStack).append(", new Object[] {");
-                appendFrameTypes(nStack, stack);
-                buf.append('}');
-                break;
-            case Opcodes.F_APPEND:
-                declareFrameTypes(nLocal, local);
-                buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
-                        .append(nLocal)
-                        .append(", new Object[] {");
-                appendFrameTypes(nLocal, local);
-                buf.append("}, 0, null");
-                break;
-            case Opcodes.F_CHOP:
-                buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
-                        .append(nLocal)
-                        .append(", null, 0, null");
-                break;
-            case Opcodes.F_SAME:
-                buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
-                break;
-            case Opcodes.F_SAME1:
-                declareFrameTypes(1, stack);
-                buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
-                appendFrameTypes(1, stack);
-                buf.append('}');
-                break;
+        case Opcodes.F_NEW:
+        case Opcodes.F_FULL:
+            declareFrameTypes(nLocal, local);
+            declareFrameTypes(nStack, stack);
+            if (type == Opcodes.F_NEW) {
+                buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
+            } else {
+                buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
+            }
+            buf.append(nLocal).append(", new Object[] {");
+            appendFrameTypes(nLocal, local);
+            buf.append("}, ").append(nStack).append(", new Object[] {");
+            appendFrameTypes(nStack, stack);
+            buf.append('}');
+            break;
+        case Opcodes.F_APPEND:
+            declareFrameTypes(nLocal, local);
+            buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
+                    .append(nLocal).append(", new Object[] {");
+            appendFrameTypes(nLocal, local);
+            buf.append("}, 0, null");
+            break;
+        case Opcodes.F_CHOP:
+            buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
+                    .append(nLocal).append(", null, 0, null");
+            break;
+        case Opcodes.F_SAME:
+            buf.append(name).append(
+                    ".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
+            break;
+        case Opcodes.F_SAME1:
+            declareFrameTypes(1, stack);
+            buf.append(name).append(
+                    ".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
+            appendFrameTypes(1, stack);
+            buf.append('}');
+            break;
         }
         buf.append(");\n");
         text.add(buf.toString());
@@ -573,7 +532,8 @@ public class ASMifier extends Printer {
     @Override
     public void visitInsn(final int opcode) {
         buf.setLength(0);
-        buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n");
+        buf.append(name).append(".visitInsn(").append(OPCODES[opcode])
+                .append(");\n");
         text.add(buf.toString());
     }
 
@@ -584,43 +544,35 @@ public class ASMifier extends Printer {
                 .append(".visitIntInsn(")
                 .append(OPCODES[opcode])
                 .append(", ")
-                .append(opcode == Opcodes.NEWARRAY
-                        ? TYPES[operand]
-                        : Integer.toString(operand))
-                .append(");\n");
+                .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+                        .toString(operand)).append(");\n");
         text.add(buf.toString());
     }
 
     @Override
     public void visitVarInsn(final int opcode, final int var) {
         buf.setLength(0);
-        buf.append(name)
-                .append(".visitVarInsn(")
-                .append(OPCODES[opcode])
-                .append(", ")
-                .append(var)
-                .append(");\n");
+        buf.append(name).append(".visitVarInsn(").append(OPCODES[opcode])
+                .append(", ").append(var).append(");\n");
         text.add(buf.toString());
     }
 
     @Override
     public void visitTypeInsn(final int opcode, final String type) {
         buf.setLength(0);
-        buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", ");
+        buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode])
+                .append(", ");
         appendConstant(type);
         buf.append(");\n");
         text.add(buf.toString());
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         buf.setLength(0);
-        buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", ");
+        buf.append(this.name).append(".visitFieldInsn(")
+                .append(OPCODES[opcode]).append(", ");
         appendConstant(owner);
         buf.append(", ");
         appendConstant(name);
@@ -631,14 +583,11 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         buf.setLength(0);
-        buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", ");
+        buf.append(this.name).append(".visitMethodInsn(")
+                .append(OPCODES[opcode]).append(", ");
         appendConstant(owner);
         buf.append(", ");
         appendConstant(name);
@@ -649,12 +598,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         buf.setLength(0);
         buf.append(this.name).append(".visitInvokeDynamicInsn(");
         appendConstant(name);
@@ -677,7 +622,8 @@ public class ASMifier extends Printer {
     public void visitJumpInsn(final int opcode, final Label label) {
         buf.setLength(0);
         declareLabel(label);
-        buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", ");
+        buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode])
+                .append(", ");
         appendLabel(label);
         buf.append(");\n");
         text.add(buf.toString());
@@ -705,34 +651,22 @@ public class ASMifier extends Printer {
     @Override
     public void visitIincInsn(final int var, final int increment) {
         buf.setLength(0);
-        buf.append(name)
-                .append(".visitIincInsn(")
-                .append(var)
-                .append(", ")
-                .append(increment)
-                .append(");\n");
+        buf.append(name).append(".visitIincInsn(").append(var).append(", ")
+                .append(increment).append(");\n");
         text.add(buf.toString());
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
         buf.setLength(0);
         for (int i = 0; i < labels.length; ++i) {
             declareLabel(labels[i]);
         }
         declareLabel(dflt);
 
-        buf.append(name)
-                .append(".visitTableSwitchInsn(")
-                .append(min)
-                .append(", ")
-                .append(max)
-                .append(", ");
+        buf.append(name).append(".visitTableSwitchInsn(").append(min)
+                .append(", ").append(max).append(", ");
         appendLabel(dflt);
         buf.append(", new Label[] {");
         for (int i = 0; i < labels.length; ++i) {
@@ -744,11 +678,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
         buf.setLength(0);
         for (int i = 0; i < labels.length; ++i) {
             declareLabel(labels[i]);
@@ -780,12 +711,8 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         buf.setLength(0);
         declareLabel(start);
         declareLabel(end);
@@ -803,14 +730,9 @@ public class ASMifier extends Printer {
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
         buf.setLength(0);
         buf.append(this.name).append(".visitLocalVariable(");
         appendConstant(name);
@@ -838,12 +760,8 @@ public class ASMifier extends Printer {
     @Override
     public void visitMaxs(final int maxStack, final int maxLocals) {
         buf.setLength(0);
-        buf.append(name)
-                .append(".visitMaxs(")
-                .append(maxStack)
-                .append(", ")
-                .append(maxLocals)
-                .append(");\n");
+        buf.append(name).append(".visitMaxs(").append(maxStack).append(", ")
+                .append(maxLocals).append(");\n");
         text.add(buf.toString());
     }
 
@@ -858,14 +776,9 @@ public class ASMifier extends Printer {
     // Common methods
     // ------------------------------------------------------------------------
 
-    public ASMifier visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public ASMifier visitAnnotation(final String desc, final boolean visible) {
         buf.setLength(0);
-        buf.append("{\n")
-                .append("av0 = ")
-                .append(name)
+        buf.append("{\n").append("av0 = ").append(name)
                 .append(".visitAnnotation(");
         appendConstant(desc);
         buf.append(", ").append(visible).append(");\n");
@@ -895,15 +808,16 @@ public class ASMifier extends Printer {
     // Utility methods
     // ------------------------------------------------------------------------
 
-    protected ASMifier createASMifier(final String name, final int id)    {
+    protected ASMifier createASMifier(final String name, final int id) {
         return new ASMifier(Opcodes.ASM4, name, id);
     }
 
     /**
-     * Appends a string representation of the given access modifiers to {@link
-     * #buf buf}.
+     * Appends a string representation of the given access modifiers to
+     * {@link #buf buf}.
      *
-     * @param access some access modifiers.
+     * @param access
+     *            some access modifiers.
      */
     void appendAccess(final int access) {
         boolean first = true;
@@ -945,8 +859,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_VOLATILE) != 0
-                && (access & ACCESS_FIELD) != 0)
-        {
+                && (access & ACCESS_FIELD) != 0) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -954,8 +867,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0
-                && (access & ACCESS_FIELD) == 0)
-        {
+                && (access & ACCESS_FIELD) == 0) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -963,8 +875,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0
-                && (access & ACCESS_FIELD) == 0)
-        {
+                && (access & ACCESS_FIELD) == 0) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -972,8 +883,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_TRANSIENT) != 0
-                && (access & ACCESS_FIELD) != 0)
-        {
+                && (access & ACCESS_FIELD) != 0) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -981,8 +891,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0
-                && (access & ACCESS_FIELD) == 0)
-        {
+                && (access & ACCESS_FIELD) == 0) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -991,8 +900,7 @@ public class ASMifier extends Printer {
         }
         if ((access & Opcodes.ACC_ENUM) != 0
                 && ((access & ACCESS_CLASS) != 0
-                        || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0))
-        {
+                        || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -1000,8 +908,7 @@ public class ASMifier extends Printer {
             first = false;
         }
         if ((access & Opcodes.ACC_ANNOTATION) != 0
-                && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0))
-        {
+                && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0)) {
             if (!first) {
                 buf.append(" + ");
             }
@@ -1052,8 +959,9 @@ public class ASMifier extends Printer {
      * Appends a string representation of the given constant to the given
      * buffer.
      *
-     * @param cst an {@link Integer}, {@link Float}, {@link Long},
-     *        {@link Double} or {@link String} object. May be <tt>null</tt>.
+     * @param cst
+     *            an {@link Integer}, {@link Float}, {@link Long},
+     *            {@link Double} or {@link String} object. May be <tt>null</tt>.
      */
     protected void appendConstant(final Object cst) {
         appendConstant(buf, cst);
@@ -1063,9 +971,11 @@ public class ASMifier extends Printer {
      * Appends a string representation of the given constant to the given
      * buffer.
      *
-     * @param buf a string buffer.
-     * @param cst an {@link Integer}, {@link Float}, {@link Long},
-     *        {@link Double} or {@link String} object. May be <tt>null</tt>.
+     * @param buf
+     *            a string buffer.
+     * @param cst
+     *            an {@link Integer}, {@link Float}, {@link Long},
+     *            {@link Double} or {@link String} object. May be <tt>null</tt>.
      */
     static void appendConstant(final StringBuffer buf, final Object cst) {
         if (cst == null) {
@@ -1079,14 +989,16 @@ public class ASMifier extends Printer {
         } else if (cst instanceof Handle) {
             buf.append("new Handle(");
             Handle h = (Handle) cst;
-            buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \"");
+            buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()])
+                    .append(", \"");
             buf.append(h.getOwner()).append("\", \"");
             buf.append(h.getName()).append("\", \"");
             buf.append(h.getDesc()).append("\")");
         } else if (cst instanceof Byte) {
             buf.append("new Byte((byte)").append(cst).append(')');
         } else if (cst instanceof Boolean) {
-            buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE");
+            buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE"
+                    : "Boolean.FALSE");
         } else if (cst instanceof Short) {
             buf.append("new Short((short)").append(cst).append(')');
         } else if (cst instanceof Character) {
@@ -1125,8 +1037,7 @@ public class ASMifier extends Printer {
             char[] v = (char[]) cst;
             buf.append("new char[] {");
             for (int i = 0; i < v.length; i++) {
-                buf.append(i == 0 ? "" : ",")
-                        .append("(char)")
+                buf.append(i == 0 ? "" : ",").append("(char)")
                         .append((int) v[i]);
             }
             buf.append('}');
@@ -1178,27 +1089,27 @@ public class ASMifier extends Printer {
                 appendConstant(o[i]);
             } else if (o[i] instanceof Integer) {
                 switch (((Integer) o[i]).intValue()) {
-                    case 0:
-                        buf.append("Opcodes.TOP");
-                        break;
-                    case 1:
-                        buf.append("Opcodes.INTEGER");
-                        break;
-                    case 2:
-                        buf.append("Opcodes.FLOAT");
-                        break;
-                    case 3:
-                        buf.append("Opcodes.DOUBLE");
-                        break;
-                    case 4:
-                        buf.append("Opcodes.LONG");
-                        break;
-                    case 5:
-                        buf.append("Opcodes.NULL");
-                        break;
-                    case 6:
-                        buf.append("Opcodes.UNINITIALIZED_THIS");
-                        break;
+                case 0:
+                    buf.append("Opcodes.TOP");
+                    break;
+                case 1:
+                    buf.append("Opcodes.INTEGER");
+                    break;
+                case 2:
+                    buf.append("Opcodes.FLOAT");
+                    break;
+                case 3:
+                    buf.append("Opcodes.DOUBLE");
+                    break;
+                case 4:
+                    buf.append("Opcodes.LONG");
+                    break;
+                case 5:
+                    buf.append("Opcodes.NULL");
+                    break;
+                case 6:
+                    buf.append("Opcodes.UNINITIALIZED_THIS");
+                    break;
                 }
             } else {
                 appendLabel((Label) o[i]);
@@ -1211,7 +1122,8 @@ public class ASMifier extends Printer {
      * declaration is of the form "Label lXXX = new Label();". Does nothing if
      * the given label has already been declared.
      *
-     * @param l a label.
+     * @param l
+     *            a label.
      */
     protected void declareLabel(final Label l) {
         if (labelNames == null) {
@@ -1227,10 +1139,11 @@ public class ASMifier extends Printer {
 
     /**
      * Appends the name of the given label to {@link #buf buf}. The given label
-     * <i>must</i> already have a name. One way to ensure this is to always
-     * call {@link #declareLabel declared} before calling this method.
+     * <i>must</i> already have a name. One way to ensure this is to always call
+     * {@link #declareLabel declared} before calling this method.
      *
-     * @param l a label.
+     * @param l
+     *            a label.
      */
     protected void appendLabel(final Label l) {
         buf.append(labelNames.get(l));
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
index 8030c14..f00a8f0 100644
--- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -65,8 +65,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
                 || value instanceof byte[] || value instanceof boolean[]
                 || value instanceof char[] || value instanceof short[]
                 || value instanceof int[] || value instanceof long[]
-                || value instanceof float[] || value instanceof double[]))
-        {
+                || value instanceof float[] || value instanceof double[])) {
             throw new IllegalArgumentException("Invalid annotation value");
         }
         if (value instanceof Type) {
@@ -81,11 +80,8 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         checkEnd();
         checkName(name);
         CheckMethodAdapter.checkDesc(desc, false);
@@ -98,15 +94,12 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public AnnotationVisitor visitAnnotation(final String name,
+            final String desc) {
         checkEnd();
         checkName(name);
         CheckMethodAdapter.checkDesc(desc, false);
-        return new CheckAnnotationAdapter(av == null
-                ? null
+        return new CheckAnnotationAdapter(av == null ? null
                 : av.visitAnnotation(name, desc));
     }
 
@@ -114,8 +107,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
     public AnnotationVisitor visitArray(final String name) {
         checkEnd();
         checkName(name);
-        return new CheckAnnotationAdapter(av == null
-                ? null
+        return new CheckAnnotationAdapter(av == null ? null
                 : av.visitArray(name), false);
     }
 
@@ -130,13 +122,15 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
 
     private void checkEnd() {
         if (end) {
-            throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+            throw new IllegalStateException(
+                    "Cannot call a visit method after visitEnd has been called");
         }
     }
 
     private void checkName(final String name) {
         if (named && name == null) {
-            throw new IllegalArgumentException("Annotation value name must not be null");
+            throw new IllegalArgumentException(
+                    "Annotation value name must not be null");
         }
     }
 }
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
index a455322..0bfa143 100644
--- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -59,10 +59,10 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
  * <i>only</i> on its arguments, but does <i>not</i> check the <i>sequence</i>
  * of method calls. For example, the invalid sequence
  * <tt>visitField(ACC_PUBLIC, "i", "I", null)</tt> <tt>visitField(ACC_PUBLIC,
- * "i", "D", null)</tt>
- * will <i>not</i> be detected by this class adapter.
+ * "i", "D", null)</tt> will <i>not</i> be detected by this class adapter.
  *
- * <p><code>CheckClassAdapter</code> can be also used to verify bytecode
+ * <p>
+ * <code>CheckClassAdapter</code> can be also used to verify bytecode
  * transformations in order to make sure transformed bytecode is sane. For
  * example:
  *
@@ -80,19 +80,20 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
  * </pre>
  *
  * Above code runs transformed bytecode trough the
- * <code>CheckClassAdapter</code>. It won't be exactly the same verification
- * as JVM does, but it run data flow analysis for the code of each method and
+ * <code>CheckClassAdapter</code>. It won't be exactly the same verification as
+ * JVM does, but it run data flow analysis for the code of each method and
  * checks that expectations are met for each method instruction.
  *
- * <p>If method bytecode has errors, assertion text will show the erroneous
+ * <p>
+ * If method bytecode has errors, assertion text will show the erroneous
  * instruction number and dump of the failed method with information about
  * locals and stack slot for each instruction. For example (format is -
  * insnNumber locals : stack):
  *
  * <pre>
- * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
- *   at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
- *   at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ *   at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ *   at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
  * ...
  * remove()V
  * 00000 LinkedBlockingQueue$Itr . . . . . . . .  :
@@ -114,8 +115,9 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
  * initialized. You can also see that at the beginning of the method (code
  * inserted by the transformation) variable 2 is initialized.
  *
- * <p>Note that when used like that, <code>CheckClassAdapter.verify()</code>
- * can trigger additional class loading, because it is using
+ * <p>
+ * Note that when used like that, <code>CheckClassAdapter.verify()</code> can
+ * trigger additional class loading, because it is using
  * <code>SimpleVerifier</code>.
  *
  * @author Eric Bruneton
@@ -159,13 +161,15 @@ public class CheckClassAdapter extends ClassVisitor {
     private boolean checkDataFlow;
 
     /**
-     * Checks a given class. <p> Usage: CheckClassAdapter <binary
-     * class name or class file name>
+     * Checks a given class.
+     * <p>
+     * Usage: CheckClassAdapter <binary class name or class file name>
      *
-     * @param args the command line arguments.
+     * @param args
+     *            the command line arguments.
      *
-     * @throws Exception if the class cannot be found, or if an IO exception
-     *         occurs.
+     * @throws Exception
+     *             if the class cannot be found, or if an IO exception occurs.
      */
     public static void main(final String[] args) throws Exception {
         if (args.length != 1) {
@@ -187,27 +191,26 @@ public class CheckClassAdapter extends ClassVisitor {
     /**
      * Checks a given class.
      *
-     * @param cr a <code>ClassReader</code> that contains bytecode for the
-     *        analysis.
-     * @param loader a <code>ClassLoader</code> which will be used to load
-     *        referenced classes. This is useful if you are verifiying multiple
-     *        interdependent classes.
-     * @param dump true if bytecode should be printed out not only when errors
-     *        are found.
-     * @param pw write where results going to be printed
+     * @param cr
+     *            a <code>ClassReader</code> that contains bytecode for the
+     *            analysis.
+     * @param loader
+     *            a <code>ClassLoader</code> which will be used to load
+     *            referenced classes. This is useful if you are verifiying
+     *            multiple interdependent classes.
+     * @param dump
+     *            true if bytecode should be printed out not only when errors
+     *            are found.
+     * @param pw
+     *            write where results going to be printed
      */
-    public static void verify(
-        final ClassReader cr,
-        final ClassLoader loader,
-        final boolean dump,
-        final PrintWriter pw)
-    {
+    public static void verify(final ClassReader cr, final ClassLoader loader,
+            final boolean dump, final PrintWriter pw) {
         ClassNode cn = new ClassNode();
         cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
 
-        Type syperType = cn.superName == null
-                ? null
-                : Type.getObjectType(cn.superName);
+        Type syperType = cn.superName == null ? null : Type
+                .getObjectType(cn.superName);
         List<MethodNode> methods = cn.methods;
 
         List<Type> interfaces = new ArrayList<Type>();
@@ -217,9 +220,8 @@ public class CheckClassAdapter extends ClassVisitor {
 
         for (int i = 0; i < methods.size(); ++i) {
             MethodNode method = methods.get(i);
-            SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name),
-                    syperType,
-                    interfaces,
+            SimpleVerifier verifier = new SimpleVerifier(
+                    Type.getObjectType(cn.name), syperType, interfaces,
                     (cn.access & Opcodes.ACC_INTERFACE) != 0);
             Analyzer<BasicValue> a = new Analyzer<BasicValue>(verifier);
             if (loader != null) {
@@ -241,25 +243,22 @@ public class CheckClassAdapter extends ClassVisitor {
     /**
      * Checks a given class
      *
-     * @param cr a <code>ClassReader</code> that contains bytecode for the
-     *        analysis.
-     * @param dump true if bytecode should be printed out not only when errors
-     *        are found.
-     * @param pw write where results going to be printed
+     * @param cr
+     *            a <code>ClassReader</code> that contains bytecode for the
+     *            analysis.
+     * @param dump
+     *            true if bytecode should be printed out not only when errors
+     *            are found.
+     * @param pw
+     *            write where results going to be printed
      */
-    public static void verify(
-        final ClassReader cr,
-        final boolean dump,
-        final PrintWriter pw)
-    {
+    public static void verify(final ClassReader cr, final boolean dump,
+            final PrintWriter pw) {
         verify(cr, null, dump, pw);
     }
 
-    static void printAnalyzerResult(
-        MethodNode method,
-        Analyzer<BasicValue> a,
-        final PrintWriter pw)
-    {
+    static void printAnalyzerResult(MethodNode method, Analyzer<BasicValue> a,
+            final PrintWriter pw) {
         Frame<BasicValue>[] frames = a.getFrames();
         Textifier t = new Textifier();
         TraceMethodVisitor mv = new TraceMethodVisitor(t);
@@ -310,7 +309,8 @@ public class CheckClassAdapter extends ClassVisitor {
      * this constructor</i>. Instead, they must use the
      * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
      *
-     * @param cv the class visitor to which this adapter must delegate calls.
+     * @param cv
+     *            the class visitor to which this adapter must delegate calls.
      */
     public CheckClassAdapter(final ClassVisitor cv) {
         this(cv, true);
@@ -321,33 +321,34 @@ public class CheckClassAdapter extends ClassVisitor {
      * this constructor</i>. Instead, they must use the
      * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
      *
-     * @param cv the class visitor to which this adapter must delegate calls.
-     * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
-     *        <tt>false</tt> to not perform any data flow check (see
-     *        {@link CheckMethodAdapter}). This option requires valid maxLocals
-     *        and maxStack values.
+     * @param cv
+     *            the class visitor to which this adapter must delegate calls.
+     * @param checkDataFlow
+     *            <tt>true</tt> to perform basic data flow checks, or
+     *            <tt>false</tt> to not perform any data flow check (see
+     *            {@link CheckMethodAdapter}). This option requires valid
+     *            maxLocals and maxStack values.
      */
-    public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow)
-    {
+    public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) {
         this(Opcodes.ASM4, cv, checkDataFlow);
     }
 
     /**
      * Constructs a new {@link CheckClassAdapter}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param cv the class visitor to which this adapter must delegate calls.
-     * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
-     *        <tt>false</tt> to not perform any data flow check (see
-     *        {@link CheckMethodAdapter}). This option requires valid maxLocals
-     *        and maxStack values.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param cv
+     *            the class visitor to which this adapter must delegate calls.
+     * @param checkDataFlow
+     *            <tt>true</tt> to perform basic data flow checks, or
+     *            <tt>false</tt> to not perform any data flow check (see
+     *            {@link CheckMethodAdapter}). This option requires valid
+     *            maxLocals and maxStack values.
      */
-    protected CheckClassAdapter(
-        final int api,
-        final ClassVisitor cv,
-        final boolean checkDataFlow)
-    {
+    protected CheckClassAdapter(final int api, final ClassVisitor cv,
+            final boolean checkDataFlow) {
         super(api, cv);
         this.labels = new HashMap<Label, Integer>();
         this.checkDataFlow = checkDataFlow;
@@ -358,14 +359,9 @@ public class CheckClassAdapter extends ClassVisitor {
     // ------------------------------------------------------------------------
 
     @Override
-    public void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public void visit(final int version, final int access, final String name,
+            final String signature, final String superName,
+            final String[] interfaces) {
         if (start) {
             throw new IllegalStateException("visit must be called only once");
         }
@@ -375,24 +371,25 @@ public class CheckClassAdapter extends ClassVisitor {
                 + Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE
                 + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
                 + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM
-                + Opcodes.ACC_DEPRECATED
-                + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+                + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
         if (name == null || !name.endsWith("package-info")) {
             CheckMethodAdapter.checkInternalName(name, "class name");
         }
         if ("java/lang/Object".equals(name)) {
             if (superName != null) {
-                throw new IllegalArgumentException("The super class name of the Object class must be 'null'");
+                throw new IllegalArgumentException(
+                        "The super class name of the Object class must be 'null'");
             }
         } else {
             CheckMethodAdapter.checkInternalName(superName, "super class name");
         }
         if (signature != null) {
-            CheckMethodAdapter.checkClassSignature(signature);
+            checkClassSignature(signature);
         }
         if ((access & Opcodes.ACC_INTERFACE) != 0) {
             if (!"java/lang/Object".equals(superName)) {
-                throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'");
+                throw new IllegalArgumentException(
+                        "The super class name of interfaces must be 'java/lang/Object'");
             }
         }
         if (interfaces != null) {
@@ -409,21 +406,20 @@ public class CheckClassAdapter extends ClassVisitor {
     public void visitSource(final String file, final String debug) {
         checkState();
         if (source) {
-            throw new IllegalStateException("visitSource can be called only once.");
+            throw new IllegalStateException(
+                    "visitSource can be called only once.");
         }
         source = true;
         super.visitSource(file, debug);
     }
 
     @Override
-    public void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitOuterClass(final String owner, final String name,
+            final String desc) {
         checkState();
         if (outer) {
-            throw new IllegalStateException("visitOuterClass can be called only once.");
+            throw new IllegalStateException(
+                    "visitOuterClass can be called only once.");
         }
         outer = true;
         if (owner == null) {
@@ -436,12 +432,8 @@ public class CheckClassAdapter extends ClassVisitor {
     }
 
     @Override
-    public void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public void visitInnerClass(final String name, final String outerName,
+            final String innerName, final int access) {
         checkState();
         CheckMethodAdapter.checkInternalName(name, "class name");
         if (outerName != null) {
@@ -459,52 +451,44 @@ public class CheckClassAdapter extends ClassVisitor {
     }
 
     @Override
-    public FieldVisitor visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public FieldVisitor visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
         checkState();
         checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
                 + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
                 + Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE
                 + Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC
-                + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED
-                + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+                + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
         CheckMethodAdapter.checkUnqualifiedName(version, name, "field name");
         CheckMethodAdapter.checkDesc(desc, false);
         if (signature != null) {
-            CheckMethodAdapter.checkFieldSignature(signature);
+            checkFieldSignature(signature);
         }
         if (value != null) {
             CheckMethodAdapter.checkConstant(value);
         }
-        FieldVisitor av = super.visitField(access, name, desc, signature, value);
+        FieldVisitor av = super
+                .visitField(access, name, desc, signature, value);
         return new CheckFieldAdapter(av);
     }
 
     @Override
-    public MethodVisitor visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
+    public MethodVisitor visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
         checkState();
         checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
                 + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
                 + Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED
                 + Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE
                 + Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT
-                + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED
-                + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
-        CheckMethodAdapter.checkMethodIdentifier(version, name, "method name");
+                + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+        if (!"<init>".equals(name) && !"<clinit>".equals(name)) {
+            CheckMethodAdapter.checkMethodIdentifier(version, name,
+                    "method name");
+        }
         CheckMethodAdapter.checkMethodDesc(desc);
         if (signature != null) {
-            CheckMethodAdapter.checkMethodSignature(signature);
+            checkMethodSignature(signature);
         }
         if (exceptions != null) {
             for (int i = 0; i < exceptions.length; ++i) {
@@ -514,27 +498,19 @@ public class CheckClassAdapter extends ClassVisitor {
         }
         CheckMethodAdapter cma;
         if (checkDataFlow) {
-            cma = new CheckMethodAdapter(access,
-                    name,
-                    desc,
-                    super.visitMethod(access, name, desc, signature, exceptions),
-                    labels);
+            cma = new CheckMethodAdapter(access, name, desc, super.visitMethod(
+                    access, name, desc, signature, exceptions), labels);
         } else {
-            cma = new CheckMethodAdapter(super.visitMethod(access,
-                    name,
-                    desc,
-                    signature,
-                    exceptions), labels);
+            cma = new CheckMethodAdapter(super.visitMethod(access, name, desc,
+                    signature, exceptions), labels);
         }
         cma.version = version;
         return cma;
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         checkState();
         CheckMethodAdapter.checkDesc(desc, false);
         return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -544,7 +520,8 @@ public class CheckClassAdapter extends ClassVisitor {
     public void visitAttribute(final Attribute attr) {
         checkState();
         if (attr == null) {
-            throw new IllegalArgumentException("Invalid attribute (must not be null)");
+            throw new IllegalArgumentException(
+                    "Invalid attribute (must not be null)");
         }
         super.visitAttribute(attr);
     }
@@ -566,10 +543,12 @@ public class CheckClassAdapter extends ClassVisitor {
      */
     private void checkState() {
         if (!start) {
-            throw new IllegalStateException("Cannot visit member before visit has been called.");
+            throw new IllegalStateException(
+                    "Cannot visit member before visit has been called.");
         }
         if (end) {
-            throw new IllegalStateException("Cannot visit member after visitEnd has been called.");
+            throw new IllegalStateException(
+                    "Cannot visit member after visitEnd has been called.");
         }
     }
 
@@ -578,8 +557,10 @@ public class CheckClassAdapter extends ClassVisitor {
      * method also checks that mutually incompatible flags are not set
      * simultaneously.
      *
-     * @param access the access flags to be checked
-     * @param possibleAccess the valid access flags.
+     * @param access
+     *            the access flags to be checked
+     * @param possibleAccess
+     *            the valid access flags.
      */
     static void checkAccess(final int access, final int possibleAccess) {
         if ((access & ~possibleAccess) != 0) {
@@ -590,14 +571,336 @@ public class CheckClassAdapter extends ClassVisitor {
         int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1;
         int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1;
         if (pub + pri + pro > 1) {
-            throw new IllegalArgumentException("public private and protected are mutually exclusive: "
-                    + access);
+            throw new IllegalArgumentException(
+                    "public private and protected are mutually exclusive: "
+                            + access);
         }
         int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1;
         int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1;
         if (fin + abs > 1) {
-            throw new IllegalArgumentException("final and abstract are mutually exclusive: "
-                    + access);
+            throw new IllegalArgumentException(
+                    "final and abstract are mutually exclusive: " + access);
+        }
+    }
+
+    /**
+     * Checks a class signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     */
+    public static void checkClassSignature(final String signature) {
+        // ClassSignature:
+        // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
+
+        int pos = 0;
+        if (getChar(signature, 0) == '<') {
+            pos = checkFormalTypeParameters(signature, pos);
+        }
+        pos = checkClassTypeSignature(signature, pos);
+        while (getChar(signature, pos) == 'L') {
+            pos = checkClassTypeSignature(signature, pos);
+        }
+        if (pos != signature.length()) {
+            throw new IllegalArgumentException(signature + ": error at index "
+                    + pos);
+        }
+    }
+
+    /**
+     * Checks a method signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     */
+    public static void checkMethodSignature(final String signature) {
+        // MethodTypeSignature:
+        // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
+        // ^ClassTypeSignature | ^TypeVariableSignature )*
+
+        int pos = 0;
+        if (getChar(signature, 0) == '<') {
+            pos = checkFormalTypeParameters(signature, pos);
+        }
+        pos = checkChar('(', signature, pos);
+        while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
+            pos = checkTypeSignature(signature, pos);
+        }
+        pos = checkChar(')', signature, pos);
+        if (getChar(signature, pos) == 'V') {
+            ++pos;
+        } else {
+            pos = checkTypeSignature(signature, pos);
+        }
+        while (getChar(signature, pos) == '^') {
+            ++pos;
+            if (getChar(signature, pos) == 'L') {
+                pos = checkClassTypeSignature(signature, pos);
+            } else {
+                pos = checkTypeVariableSignature(signature, pos);
+            }
+        }
+        if (pos != signature.length()) {
+            throw new IllegalArgumentException(signature + ": error at index "
+                    + pos);
+        }
+    }
+
+    /**
+     * Checks a field signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     */
+    public static void checkFieldSignature(final String signature) {
+        int pos = checkFieldTypeSignature(signature, 0);
+        if (pos != signature.length()) {
+            throw new IllegalArgumentException(signature + ": error at index "
+                    + pos);
+        }
+    }
+
+    /**
+     * Checks the formal type parameters of a class or method signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkFormalTypeParameters(final String signature, int pos) {
+        // FormalTypeParameters:
+        // < FormalTypeParameter+ >
+
+        pos = checkChar('<', signature, pos);
+        pos = checkFormalTypeParameter(signature, pos);
+        while (getChar(signature, pos) != '>') {
+            pos = checkFormalTypeParameter(signature, pos);
+        }
+        return pos + 1;
+    }
+
+    /**
+     * Checks a formal type parameter of a class or method signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkFormalTypeParameter(final String signature, int pos) {
+        // FormalTypeParameter:
+        // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
+
+        pos = checkIdentifier(signature, pos);
+        pos = checkChar(':', signature, pos);
+        if ("L[T".indexOf(getChar(signature, pos)) != -1) {
+            pos = checkFieldTypeSignature(signature, pos);
+        }
+        while (getChar(signature, pos) == ':') {
+            pos = checkFieldTypeSignature(signature, pos + 1);
+        }
+        return pos;
+    }
+
+    /**
+     * Checks a field type signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkFieldTypeSignature(final String signature, int pos) {
+        // FieldTypeSignature:
+        // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
+        //
+        // ArrayTypeSignature:
+        // [ TypeSignature
+
+        switch (getChar(signature, pos)) {
+        case 'L':
+            return checkClassTypeSignature(signature, pos);
+        case '[':
+            return checkTypeSignature(signature, pos + 1);
+        default:
+            return checkTypeVariableSignature(signature, pos);
         }
     }
+
+    /**
+     * Checks a class type signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkClassTypeSignature(final String signature, int pos) {
+        // ClassTypeSignature:
+        // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
+        // TypeArguments? )* ;
+
+        pos = checkChar('L', signature, pos);
+        pos = checkIdentifier(signature, pos);
+        while (getChar(signature, pos) == '/') {
+            pos = checkIdentifier(signature, pos + 1);
+        }
+        if (getChar(signature, pos) == '<') {
+            pos = checkTypeArguments(signature, pos);
+        }
+        while (getChar(signature, pos) == '.') {
+            pos = checkIdentifier(signature, pos + 1);
+            if (getChar(signature, pos) == '<') {
+                pos = checkTypeArguments(signature, pos);
+            }
+        }
+        return checkChar(';', signature, pos);
+    }
+
+    /**
+     * Checks the type arguments in a class type signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkTypeArguments(final String signature, int pos) {
+        // TypeArguments:
+        // < TypeArgument+ >
+
+        pos = checkChar('<', signature, pos);
+        pos = checkTypeArgument(signature, pos);
+        while (getChar(signature, pos) != '>') {
+            pos = checkTypeArgument(signature, pos);
+        }
+        return pos + 1;
+    }
+
+    /**
+     * Checks a type argument in a class type signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkTypeArgument(final String signature, int pos) {
+        // TypeArgument:
+        // * | ( ( + | - )? FieldTypeSignature )
+
+        char c = getChar(signature, pos);
+        if (c == '*') {
+            return pos + 1;
+        } else if (c == '+' || c == '-') {
+            pos++;
+        }
+        return checkFieldTypeSignature(signature, pos);
+    }
+
+    /**
+     * Checks a type variable signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkTypeVariableSignature(final String signature,
+            int pos) {
+        // TypeVariableSignature:
+        // T Identifier ;
+
+        pos = checkChar('T', signature, pos);
+        pos = checkIdentifier(signature, pos);
+        return checkChar(';', signature, pos);
+    }
+
+    /**
+     * Checks a type signature.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkTypeSignature(final String signature, int pos) {
+        // TypeSignature:
+        // Z | C | B | S | I | F | J | D | FieldTypeSignature
+
+        switch (getChar(signature, pos)) {
+        case 'Z':
+        case 'C':
+        case 'B':
+        case 'S':
+        case 'I':
+        case 'F':
+        case 'J':
+        case 'D':
+            return pos + 1;
+        default:
+            return checkFieldTypeSignature(signature, pos);
+        }
+    }
+
+    /**
+     * Checks an identifier.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkIdentifier(final String signature, int pos) {
+        if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
+            throw new IllegalArgumentException(signature
+                    + ": identifier expected at index " + pos);
+        }
+        ++pos;
+        while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
+            ++pos;
+        }
+        return pos;
+    }
+
+    /**
+     * Checks a single character.
+     *
+     * @param signature
+     *            a string containing the signature that must be checked.
+     * @param pos
+     *            index of first character to be checked.
+     * @return the index of the first character after the checked part.
+     */
+    private static int checkChar(final char c, final String signature, int pos) {
+        if (getChar(signature, pos) == c) {
+            return pos + 1;
+        }
+        throw new IllegalArgumentException(signature + ": '" + c
+                + "' expected at index " + pos);
+    }
+
+    /**
+     * Returns the signature car at the given index.
+     *
+     * @param signature
+     *            a signature.
+     * @param pos
+     *            an index in signature.
+     * @return the character at the given index, or 0 if there is no such
+     *         character.
+     */
+    private static char getChar(final String signature, int pos) {
+        return pos < signature.length() ? signature.charAt(pos) : (char) 0;
+    }
 }
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
index bdcbe14..4657605 100644
--- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -46,7 +46,8 @@ public class CheckFieldAdapter extends FieldVisitor {
      * this constructor</i>. Instead, they must use the
      * {@link #CheckFieldAdapter(int, FieldVisitor)} version.
      *
-     * @param fv the field visitor to which this adapter must delegate calls.
+     * @param fv
+     *            the field visitor to which this adapter must delegate calls.
      */
     public CheckFieldAdapter(final FieldVisitor fv) {
         this(Opcodes.ASM4, fv);
@@ -55,19 +56,19 @@ public class CheckFieldAdapter extends FieldVisitor {
     /**
      * Constructs a new {@link CheckFieldAdapter}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param fv the field visitor to which this adapter must delegate calls.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param fv
+     *            the field visitor to which this adapter must delegate calls.
      */
     protected CheckFieldAdapter(final int api, final FieldVisitor fv) {
         super(api, fv);
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         checkEnd();
         CheckMethodAdapter.checkDesc(desc, false);
         return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -77,7 +78,8 @@ public class CheckFieldAdapter extends FieldVisitor {
     public void visitAttribute(final Attribute attr) {
         checkEnd();
         if (attr == null) {
-            throw new IllegalArgumentException("Invalid attribute (must not be null)");
+            throw new IllegalArgumentException(
+                    "Invalid attribute (must not be null)");
         }
         super.visitAttribute(attr);
     }
@@ -91,7 +93,8 @@ public class CheckFieldAdapter extends FieldVisitor {
 
     private void checkEnd() {
         if (end) {
-            throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+            throw new IllegalStateException(
+                    "Cannot call a visit method after visitEnd has been called");
         }
     }
 }
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
index 7549765..9da01c9 100644
--- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -58,7 +58,7 @@ import scala.tools.asm.tree.analysis.BasicVerifier;
  * arguments - such as the fact that the given opcode is correct for a given
  * visit method. This adapter can also perform some basic data flow checks (more
  * precisely those that can be performed without the full class hierarchy - see
- * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a
+ * {@link scala.tools.asm.tree.analysis.BasicVerifier}). For instance in a
  * method whose signature is <tt>void m ()</tt>, the invalid instruction
  * IRETURN, or the invalid sequence IADD L2I will be detected if the data flow
  * checks are enabled. These checks are enabled by using the
@@ -75,6 +75,11 @@ public class CheckMethodAdapter extends MethodVisitor {
     public int version;
 
     /**
+     * The access flags of the method.
+     */
+    private int access;
+
+    /**
      * <tt>true</tt> if the visitCode method has been called.
      */
     private boolean startCode;
@@ -107,6 +112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
     private Set<Label> usedLabels;
 
     /**
+     * Number of visited frames in expanded form.
+     */
+    private int expandedFrames;
+
+    /**
+     * Number of visited frames in compressed form.
+     */
+    private int compressedFrames;
+
+    /**
+     * Number of instructions before the last visited frame.
+     */
+    private int lastFrame = -1;
+
+    /**
      * The exception handler ranges. Each pair of list element contains the
      * start and end labels of an exception handler block.
      */
@@ -352,7 +372,8 @@ public class CheckMethodAdapter extends MethodVisitor {
      * <i>Subclasses must not use this constructor</i>. Instead, they must use
      * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
      *
-     * @param mv the method visitor to which this adapter must delegate calls.
+     * @param mv
+     *            the method visitor to which this adapter must delegate calls.
      */
     public CheckMethodAdapter(final MethodVisitor mv) {
         this(mv, new HashMap<Label, Integer>());
@@ -365,13 +386,13 @@ public class CheckMethodAdapter extends MethodVisitor {
      * <i>Subclasses must not use this constructor</i>. Instead, they must use
      * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
      *
-     * @param mv the method visitor to which this adapter must delegate calls.
-     * @param labels a map of already visited labels (in other methods).
+     * @param mv
+     *            the method visitor to which this adapter must delegate calls.
+     * @param labels
+     *            a map of already visited labels (in other methods).
      */
-    public CheckMethodAdapter(
-        final MethodVisitor mv,
-        final Map<Label, Integer> labels)
-    {
+    public CheckMethodAdapter(final MethodVisitor mv,
+            final Map<Label, Integer> labels) {
         this(Opcodes.ASM4, mv, labels);
     }
 
@@ -380,14 +401,13 @@ public class CheckMethodAdapter extends MethodVisitor {
      * will not perform any data flow check (see
      * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
      *
-     * @param mv the method visitor to which this adapter must delegate calls.
-     * @param labels a map of already visited labels (in other methods).
+     * @param mv
+     *            the method visitor to which this adapter must delegate calls.
+     * @param labels
+     *            a map of already visited labels (in other methods).
      */
-    protected CheckMethodAdapter(
-        final int api,
-        final MethodVisitor mv,
-        final Map<Label, Integer> labels)
-    {
+    protected CheckMethodAdapter(final int api, final MethodVisitor mv,
+            final Map<Label, Integer> labels) {
         super(api, mv);
         this.labels = labels;
         this.usedLabels = new HashSet<Label>();
@@ -400,30 +420,32 @@ public class CheckMethodAdapter extends MethodVisitor {
      * signature is <tt>void m ()</tt>, the invalid instruction IRETURN, or the
      * invalid sequence IADD L2I will be detected.
      *
-     * @param access the method's access flags.
-     * @param name the method's name.
-     * @param desc the method's descriptor (see {@link Type Type}).
-     * @param cmv the method visitor to which this adapter must delegate calls.
-     * @param labels a map of already visited labels (in other methods).
+     * @param access
+     *            the method's access flags.
+     * @param name
+     *            the method's name.
+     * @param desc
+     *            the method's descriptor (see {@link Type Type}).
+     * @param cmv
+     *            the method visitor to which this adapter must delegate calls.
+     * @param labels
+     *            a map of already visited labels (in other methods).
      */
-    public CheckMethodAdapter(
-        final int access,
-        final String name,
-        final String desc,
-        final MethodVisitor cmv,
-        final Map<Label, Integer> labels)
-    {
+    public CheckMethodAdapter(final int access, final String name,
+            final String desc, final MethodVisitor cmv,
+            final Map<Label, Integer> labels) {
         this(new MethodNode(access, name, desc, null, null) {
             @Override
             public void visitEnd() {
-                Analyzer<BasicValue> a = new Analyzer<BasicValue>(new BasicVerifier());
+                Analyzer<BasicValue> a = new Analyzer<BasicValue>(
+                        new BasicVerifier());
                 try {
                     a.analyze("dummy", this);
                 } catch (Exception e) {
                     if (e instanceof IndexOutOfBoundsException
-                            && maxLocals == 0 && maxStack == 0)
-                    {
-                        throw new RuntimeException("Data flow checking option requires valid, non zero maxLocals and maxStack values.");
+                            && maxLocals == 0 && maxStack == 0) {
+                        throw new RuntimeException(
+                                "Data flow checking option requires valid, non zero maxLocals and maxStack values.");
                     }
                     e.printStackTrace();
                     StringWriter sw = new StringWriter();
@@ -435,15 +457,13 @@ public class CheckMethodAdapter extends MethodVisitor {
                 }
                 accept(cmv);
             }
-        },
-                labels);
+        }, labels);
+        this.access = access;
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         checkEndMethod();
         checkDesc(desc, false);
         return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -456,68 +476,68 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
         checkEndMethod();
         checkDesc(desc, false);
-        return new CheckAnnotationAdapter(super.visitParameterAnnotation(parameter,
-                desc,
-                visible));
+        return new CheckAnnotationAdapter(super.visitParameterAnnotation(
+                parameter, desc, visible));
     }
 
     @Override
     public void visitAttribute(final Attribute attr) {
         checkEndMethod();
         if (attr == null) {
-            throw new IllegalArgumentException("Invalid attribute (must not be null)");
+            throw new IllegalArgumentException(
+                    "Invalid attribute (must not be null)");
         }
         super.visitAttribute(attr);
     }
 
     @Override
     public void visitCode() {
+        if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+            throw new RuntimeException("Abstract methods cannot have code");
+        }
         startCode = true;
         super.visitCode();
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
+        if (insnCount == lastFrame) {
+            throw new IllegalStateException(
+                    "At most one frame can be visited at a given code location.");
+        }
+        lastFrame = insnCount;
         int mLocal;
         int mStack;
         switch (type) {
-            case Opcodes.F_NEW:
-            case Opcodes.F_FULL:
-                mLocal = Integer.MAX_VALUE;
-                mStack = Integer.MAX_VALUE;
-                break;
+        case Opcodes.F_NEW:
+        case Opcodes.F_FULL:
+            mLocal = Integer.MAX_VALUE;
+            mStack = Integer.MAX_VALUE;
+            break;
 
-            case Opcodes.F_SAME:
-                mLocal = 0;
-                mStack = 0;
-                break;
+        case Opcodes.F_SAME:
+            mLocal = 0;
+            mStack = 0;
+            break;
 
-            case Opcodes.F_SAME1:
-                mLocal = 0;
-                mStack = 1;
-                break;
+        case Opcodes.F_SAME1:
+            mLocal = 0;
+            mStack = 1;
+            break;
 
-            case Opcodes.F_APPEND:
-            case Opcodes.F_CHOP:
-                mLocal = 3;
-                mStack = 0;
-                break;
+        case Opcodes.F_APPEND:
+        case Opcodes.F_CHOP:
+            mLocal = 3;
+            mStack = 0;
+            break;
 
-            default:
-                throw new IllegalArgumentException("Invalid frame type " + type);
+        default:
+            throw new IllegalArgumentException("Invalid frame type " + type);
         }
 
         if (nLocal > mLocal) {
@@ -531,19 +551,29 @@ public class CheckMethodAdapter extends MethodVisitor {
 
         if (type != Opcodes.F_CHOP) {
             if (nLocal > 0 && (local == null || local.length < nLocal)) {
-                throw new IllegalArgumentException("Array local[] is shorter than nLocal");
+                throw new IllegalArgumentException(
+                        "Array local[] is shorter than nLocal");
             }
             for (int i = 0; i < nLocal; ++i) {
                 checkFrameValue(local[i]);
             }
         }
         if (nStack > 0 && (stack == null || stack.length < nStack)) {
-            throw new IllegalArgumentException("Array stack[] is shorter than nStack");
+            throw new IllegalArgumentException(
+                    "Array stack[] is shorter than nStack");
         }
         for (int i = 0; i < nStack; ++i) {
             checkFrameValue(stack[i]);
         }
-
+        if (type == Opcodes.F_NEW) {
+            ++expandedFrames;
+        } else {
+            ++compressedFrames;
+        }
+        if (expandedFrames > 0 && compressedFrames > 0) {
+            throw new RuntimeException(
+                    "Expanded and compressed frames must not be mixed.");
+        }
         super.visitFrame(type, nLocal, local, nStack, stack);
     }
 
@@ -562,18 +592,19 @@ public class CheckMethodAdapter extends MethodVisitor {
         checkEndCode();
         checkOpcode(opcode, 1);
         switch (opcode) {
-            case Opcodes.BIPUSH:
-                checkSignedByte(operand, "Invalid operand");
-                break;
-            case Opcodes.SIPUSH:
-                checkSignedShort(operand, "Invalid operand");
-                break;
-            // case Constants.NEWARRAY:
-            default:
-                if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
-                    throw new IllegalArgumentException("Invalid operand (must be an array type code T_...): "
-                            + operand);
-                }
+        case Opcodes.BIPUSH:
+            checkSignedByte(operand, "Invalid operand");
+            break;
+        case Opcodes.SIPUSH:
+            checkSignedShort(operand, "Invalid operand");
+            break;
+        // case Constants.NEWARRAY:
+        default:
+            if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
+                throw new IllegalArgumentException(
+                        "Invalid operand (must be an array type code T_...): "
+                                + operand);
+            }
         }
         super.visitIntInsn(opcode, operand);
         ++insnCount;
@@ -596,20 +627,16 @@ public class CheckMethodAdapter extends MethodVisitor {
         checkOpcode(opcode, 3);
         checkInternalName(type, "type");
         if (opcode == Opcodes.NEW && type.charAt(0) == '[') {
-            throw new IllegalArgumentException("NEW cannot be used to create arrays: "
-                    + type);
+            throw new IllegalArgumentException(
+                    "NEW cannot be used to create arrays: " + type);
         }
         super.visitTypeInsn(opcode, type);
         ++insnCount;
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         checkStartCode();
         checkEndCode();
         checkOpcode(opcode, 4);
@@ -621,16 +648,14 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         checkStartCode();
         checkEndCode();
         checkOpcode(opcode, 5);
-        checkMethodIdentifier(version, name, "name");
+        if (opcode != Opcodes.INVOKESPECIAL || !"<init>".equals(name)) {
+            checkMethodIdentifier(version, name, "name");
+        }
         checkInternalName(owner, "owner");
         checkMethodDesc(desc);
         super.visitMethodInsn(opcode, owner, name, desc);
@@ -638,19 +663,14 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         checkStartCode();
         checkEndCode();
         checkMethodIdentifier(version, name, "name");
         checkMethodDesc(desc);
         if (bsm.getTag() != Opcodes.H_INVOKESTATIC
-                && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL)
-        {
+                && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL) {
             throw new IllegalArgumentException("invalid handle tag "
                     + bsm.getTag());
         }
@@ -705,12 +725,8 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
         checkStartCode();
         checkEndCode();
         if (max < min) {
@@ -720,7 +736,8 @@ public class CheckMethodAdapter extends MethodVisitor {
         checkLabel(dflt, false, "default label");
         checkNonDebugLabel(dflt);
         if (labels == null || labels.length != max - min + 1) {
-            throw new IllegalArgumentException("There must be max - min + 1 labels");
+            throw new IllegalArgumentException(
+                    "There must be max - min + 1 labels");
         }
         for (int i = 0; i < labels.length; ++i) {
             checkLabel(labels[i], false, "label at index " + i);
@@ -734,17 +751,15 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
         checkEndCode();
         checkStartCode();
         checkLabel(dflt, false, "default label");
         checkNonDebugLabel(dflt);
         if (keys == null || labels == null || keys.length != labels.length) {
-            throw new IllegalArgumentException("There must be the same number of keys and labels");
+            throw new IllegalArgumentException(
+                    "There must be the same number of keys and labels");
         }
         for (int i = 0; i < labels.length; ++i) {
             checkLabel(labels[i], false, "label at index " + i);
@@ -764,28 +779,26 @@ public class CheckMethodAdapter extends MethodVisitor {
         checkEndCode();
         checkDesc(desc, false);
         if (desc.charAt(0) != '[') {
-            throw new IllegalArgumentException("Invalid descriptor (must be an array type descriptor): "
-                    + desc);
+            throw new IllegalArgumentException(
+                    "Invalid descriptor (must be an array type descriptor): "
+                            + desc);
         }
         if (dims < 1) {
-            throw new IllegalArgumentException("Invalid dimensions (must be greater than 0): "
-                    + dims);
+            throw new IllegalArgumentException(
+                    "Invalid dimensions (must be greater than 0): " + dims);
         }
         if (dims > desc.lastIndexOf('[') + 1) {
-            throw new IllegalArgumentException("Invalid dimensions (must not be greater than dims(desc)): "
-                    + dims);
+            throw new IllegalArgumentException(
+                    "Invalid dimensions (must not be greater than dims(desc)): "
+                            + dims);
         }
         super.visitMultiANewArrayInsn(desc, dims);
         ++insnCount;
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         checkStartCode();
         checkEndCode();
         checkLabel(start, false, "start label");
@@ -795,9 +808,9 @@ public class CheckMethodAdapter extends MethodVisitor {
         checkNonDebugLabel(end);
         checkNonDebugLabel(handler);
         if (labels.get(start) != null || labels.get(end) != null
-                || labels.get(handler) != null)
-        {
-            throw new IllegalStateException("Try catch blocks must be visited before their labels");
+                || labels.get(handler) != null) {
+            throw new IllegalStateException(
+                    "Try catch blocks must be visited before their labels");
         }
         if (type != null) {
             checkInternalName(type, "type");
@@ -808,14 +821,9 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
         checkStartCode();
         checkEndCode();
         checkUnqualifiedName(version, name, "name");
@@ -826,7 +834,8 @@ public class CheckMethodAdapter extends MethodVisitor {
         int s = labels.get(start).intValue();
         int e = labels.get(end).intValue();
         if (e < s) {
-            throw new IllegalArgumentException("Invalid start and end labels (end must be greater than start)");
+            throw new IllegalArgumentException(
+                    "Invalid start and end labels (end must be greater than start)");
         }
         super.visitLocalVariable(name, desc, signature, start, end, index);
     }
@@ -850,14 +859,16 @@ public class CheckMethodAdapter extends MethodVisitor {
                 throw new IllegalStateException("Undefined label used");
             }
         }
-        for (int i = 0; i < handlers.size(); ) {
+        for (int i = 0; i < handlers.size();) {
             Integer start = labels.get(handlers.get(i++));
             Integer end = labels.get(handlers.get(i++));
             if (start == null || end == null) {
-                throw new IllegalStateException("Undefined try catch block labels");
+                throw new IllegalStateException(
+                        "Undefined try catch block labels");
             }
             if (end.intValue() <= start.intValue()) {
-                throw new IllegalStateException("Emty try catch block handler range");
+                throw new IllegalStateException(
+                        "Emty try catch block handler range");
             }
         }
         checkUnsignedShort(maxStack, "Invalid max stack");
@@ -879,7 +890,8 @@ public class CheckMethodAdapter extends MethodVisitor {
      */
     void checkStartCode() {
         if (!startCode) {
-            throw new IllegalStateException("Cannot visit instructions before visitCode has been called.");
+            throw new IllegalStateException(
+                    "Cannot visit instructions before visitCode has been called.");
         }
     }
 
@@ -888,7 +900,8 @@ public class CheckMethodAdapter extends MethodVisitor {
      */
     void checkEndCode() {
         if (endCode) {
-            throw new IllegalStateException("Cannot visit instructions after visitMaxs has been called.");
+            throw new IllegalStateException(
+                    "Cannot visit instructions after visitMaxs has been called.");
         }
     }
 
@@ -897,21 +910,22 @@ public class CheckMethodAdapter extends MethodVisitor {
      */
     void checkEndMethod() {
         if (endMethod) {
-            throw new IllegalStateException("Cannot visit elements after visitEnd has been called.");
+            throw new IllegalStateException(
+                    "Cannot visit elements after visitEnd has been called.");
         }
     }
 
     /**
      * Checks a stack frame value.
      *
-     * @param value the value to be checked.
+     * @param value
+     *            the value to be checked.
      */
     void checkFrameValue(final Object value) {
         if (value == Opcodes.TOP || value == Opcodes.INTEGER
                 || value == Opcodes.FLOAT || value == Opcodes.LONG
                 || value == Opcodes.DOUBLE || value == Opcodes.NULL
-                || value == Opcodes.UNINITIALIZED_THIS)
-        {
+                || value == Opcodes.UNINITIALIZED_THIS) {
             return;
         }
         if (value instanceof String) {
@@ -929,8 +943,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the type of the given opcode is equal to the given type.
      *
-     * @param opcode the opcode to be checked.
-     * @param type the expected opcode type.
+     * @param opcode
+     *            the opcode to be checked.
+     * @param type
+     *            the expected opcode type.
      */
     static void checkOpcode(final int opcode, final int type) {
         if (opcode < 0 || opcode > 199 || TYPE[opcode] != type) {
@@ -941,8 +957,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given value is a signed byte.
      *
-     * @param value the value to be checked.
-     * @param msg an message to be used in case of error.
+     * @param value
+     *            the value to be checked.
+     * @param msg
+     *            an message to be used in case of error.
      */
     static void checkSignedByte(final int value, final String msg) {
         if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
@@ -954,8 +972,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given value is a signed short.
      *
-     * @param value the value to be checked.
-     * @param msg an message to be used in case of error.
+     * @param value
+     *            the value to be checked.
+     * @param msg
+     *            an message to be used in case of error.
      */
     static void checkSignedShort(final int value, final String msg) {
         if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
@@ -967,8 +987,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given value is an unsigned short.
      *
-     * @param value the value to be checked.
-     * @param msg an message to be used in case of error.
+     * @param value
+     *            the value to be checked.
+     * @param msg
+     *            an message to be used in case of error.
      */
     static void checkUnsignedShort(final int value, final String msg) {
         if (value < 0 || value > 65535) {
@@ -981,13 +1003,13 @@ public class CheckMethodAdapter extends MethodVisitor {
      * Checks that the given value is an {@link Integer}, a{@link Float}, a
      * {@link Long}, a {@link Double} or a {@link String}.
      *
-     * @param cst the value to be checked.
+     * @param cst
+     *            the value to be checked.
      */
     static void checkConstant(final Object cst) {
         if (!(cst instanceof Integer) && !(cst instanceof Float)
                 && !(cst instanceof Long) && !(cst instanceof Double)
-                && !(cst instanceof String))
-        {
+                && !(cst instanceof String)) {
             throw new IllegalArgumentException("Invalid constant: " + cst);
         }
     }
@@ -999,19 +1021,21 @@ public class CheckMethodAdapter extends MethodVisitor {
                 throw new IllegalArgumentException("Illegal LDC constant value");
             }
             if (s != Type.METHOD && (version & 0xFFFF) < Opcodes.V1_5) {
-                throw new IllegalArgumentException("ldc of a constant class requires at least version 1.5");
+                throw new IllegalArgumentException(
+                        "ldc of a constant class requires at least version 1.5");
             }
             if (s == Type.METHOD && (version & 0xFFFF) < Opcodes.V1_7) {
-                throw new IllegalArgumentException("ldc of a method type requires at least version 1.7");
+                throw new IllegalArgumentException(
+                        "ldc of a method type requires at least version 1.7");
             }
         } else if (cst instanceof Handle) {
             if ((version & 0xFFFF) < Opcodes.V1_7) {
-                throw new IllegalArgumentException("ldc of a handle requires at least version 1.7");
+                throw new IllegalArgumentException(
+                        "ldc of a handle requires at least version 1.7");
             }
             int tag = ((Handle) cst).getTag();
             if (tag < Opcodes.H_GETFIELD || tag > Opcodes.H_INVOKEINTERFACE) {
-                throw new IllegalArgumentException("invalid handle tag "
-                        + tag);
+                throw new IllegalArgumentException("invalid handle tag " + tag);
             }
         } else {
             checkConstant(cst);
@@ -1021,15 +1045,15 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given string is a valid unqualified name.
      *
-     * @param version the class version.
-     * @param name the string to be checked.
-     * @param msg a message to be used in case of error.
+     * @param version
+     *            the class version.
+     * @param name
+     *            the string to be checked.
+     * @param msg
+     *            a message to be used in case of error.
      */
-    static void checkUnqualifiedName(
-        int version,
-        final String name,
-        final String msg)
-    {
+    static void checkUnqualifiedName(int version, final String name,
+            final String msg) {
         if ((version & 0xFFFF) < Opcodes.V1_5) {
             checkIdentifier(name, msg);
         } else {
@@ -1045,8 +1069,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given string is a valid Java identifier.
      *
-     * @param name the string to be checked.
-     * @param msg a message to be used in case of error.
+     * @param name
+     *            the string to be checked.
+     * @param msg
+     *            a message to be used in case of error.
      */
     static void checkIdentifier(final String name, final String msg) {
         checkIdentifier(name, 0, -1, msg);
@@ -1055,21 +1081,20 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given substring is a valid Java identifier.
      *
-     * @param name the string to be checked.
-     * @param start index of the first character of the identifier (inclusive).
-     * @param end index of the last character of the identifier (exclusive). -1
-     *        is equivalent to <tt>name.length()</tt> if name is not
-     *        <tt>null</tt>.
-     * @param msg a message to be used in case of error.
+     * @param name
+     *            the string to be checked.
+     * @param start
+     *            index of the first character of the identifier (inclusive).
+     * @param end
+     *            index of the last character of the identifier (exclusive). -1
+     *            is equivalent to <tt>name.length()</tt> if name is not
+     *            <tt>null</tt>.
+     * @param msg
+     *            a message to be used in case of error.
      */
-    static void checkIdentifier(
-        final String name,
-        final int start,
-        final int end,
-        final String msg)
-    {
-        if (name == null || (end == -1 ? name.length() <= start : end <= start))
-        {
+    static void checkIdentifier(final String name, final int start,
+            final int end, final String msg) {
+        if (name == null || (end == -1 ? name.length() <= start : end <= start)) {
             throw new IllegalArgumentException("Invalid " + msg
                     + " (must not be null or empty)");
         }
@@ -1087,25 +1112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     /**
-     * Checks that the given string is a valid Java identifier or is equal to
-     * '<init>' or '<clinit>'.
+     * Checks that the given string is a valid Java identifier.
      *
-     * @param version the class version.
-     * @param name the string to be checked.
-     * @param msg a message to be used in case of error.
+     * @param version
+     *            the class version.
+     * @param name
+     *            the string to be checked.
+     * @param msg
+     *            a message to be used in case of error.
      */
-    static void checkMethodIdentifier(
-        int version,
-        final String name,
-        final String msg)
-    {
+    static void checkMethodIdentifier(int version, final String name,
+            final String msg) {
         if (name == null || name.length() == 0) {
             throw new IllegalArgumentException("Invalid " + msg
                     + " (must not be null or empty)");
         }
-        if ("<init>".equals(name) || "<clinit>".equals(name)) {
-            return;
-        }
         if ((version & 0xFFFF) >= Opcodes.V1_5) {
             for (int i = 0; i < name.length(); ++i) {
                 if (".;[/<>".indexOf(name.charAt(i)) != -1) {
@@ -1116,17 +1137,19 @@ public class CheckMethodAdapter extends MethodVisitor {
             return;
         }
         if (!Character.isJavaIdentifierStart(name.charAt(0))) {
-            throw new IllegalArgumentException("Invalid "
-                    + msg
-                    + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
-                    + name);
+            throw new IllegalArgumentException(
+                    "Invalid "
+                            + msg
+                            + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
+                            + name);
         }
         for (int i = 1; i < name.length(); ++i) {
             if (!Character.isJavaIdentifierPart(name.charAt(i))) {
-                throw new IllegalArgumentException("Invalid "
-                        + msg
-                        + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
-                        + name);
+                throw new IllegalArgumentException(
+                        "Invalid "
+                                + msg
+                                + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
+                                + name);
             }
         }
     }
@@ -1134,8 +1157,10 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given string is a valid internal class name.
      *
-     * @param name the string to be checked.
-     * @param msg a message to be used in case of error.
+     * @param name
+     *            the string to be checked.
+     * @param msg
+     *            a message to be used in case of error.
      */
     static void checkInternalName(final String name, final String msg) {
         if (name == null || name.length() == 0) {
@@ -1152,19 +1177,19 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given substring is a valid internal class name.
      *
-     * @param name the string to be checked.
-     * @param start index of the first character of the identifier (inclusive).
-     * @param end index of the last character of the identifier (exclusive). -1
-     *        is equivalent to <tt>name.length()</tt> if name is not
-     *        <tt>null</tt>.
-     * @param msg a message to be used in case of error.
+     * @param name
+     *            the string to be checked.
+     * @param start
+     *            index of the first character of the identifier (inclusive).
+     * @param end
+     *            index of the last character of the identifier (exclusive). -1
+     *            is equivalent to <tt>name.length()</tt> if name is not
+     *            <tt>null</tt>.
+     * @param msg
+     *            a message to be used in case of error.
      */
-    static void checkInternalName(
-        final String name,
-        final int start,
-        final int end,
-        final String msg)
-    {
+    static void checkInternalName(final String name, final int start,
+            final int end, final String msg) {
         int max = end == -1 ? name.length() : end;
         try {
             int begin = start;
@@ -1178,18 +1203,21 @@ public class CheckMethodAdapter extends MethodVisitor {
                 begin = slash + 1;
             } while (slash != max);
         } catch (IllegalArgumentException _) {
-            throw new IllegalArgumentException("Invalid "
-                    + msg
-                    + " (must be a fully qualified class name in internal form): "
-                    + name);
+            throw new IllegalArgumentException(
+                    "Invalid "
+                            + msg
+                            + " (must be a fully qualified class name in internal form): "
+                            + name);
         }
     }
 
     /**
      * Checks that the given string is a valid type descriptor.
      *
-     * @param desc the string to be checked.
-     * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+     * @param desc
+     *            the string to be checked.
+     * @param canBeVoid
+     *            <tt>true</tt> if <tt>V</tt> can be considered valid.
      */
     static void checkDesc(final String desc, final boolean canBeVoid) {
         int end = checkDesc(desc, 0, canBeVoid);
@@ -1201,75 +1229,77 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that a the given substring is a valid type descriptor.
      *
-     * @param desc the string to be checked.
-     * @param start index of the first character of the identifier (inclusive).
-     * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+     * @param desc
+     *            the string to be checked.
+     * @param start
+     *            index of the first character of the identifier (inclusive).
+     * @param canBeVoid
+     *            <tt>true</tt> if <tt>V</tt> can be considered valid.
      * @return the index of the last character of the type decriptor, plus one.
      */
-    static int checkDesc(
-        final String desc,
-        final int start,
-        final boolean canBeVoid)
-    {
+    static int checkDesc(final String desc, final int start,
+            final boolean canBeVoid) {
         if (desc == null || start >= desc.length()) {
-            throw new IllegalArgumentException("Invalid type descriptor (must not be null or empty)");
+            throw new IllegalArgumentException(
+                    "Invalid type descriptor (must not be null or empty)");
         }
         int index;
         switch (desc.charAt(start)) {
-            case 'V':
-                if (canBeVoid) {
-                    return start + 1;
-                } else {
-                    throw new IllegalArgumentException("Invalid descriptor: "
-                            + desc);
-                }
-            case 'Z':
-            case 'C':
-            case 'B':
-            case 'S':
-            case 'I':
-            case 'F':
-            case 'J':
-            case 'D':
+        case 'V':
+            if (canBeVoid) {
                 return start + 1;
-            case '[':
-                index = start + 1;
-                while (index < desc.length() && desc.charAt(index) == '[') {
-                    ++index;
-                }
-                if (index < desc.length()) {
-                    return checkDesc(desc, index, false);
-                } else {
-                    throw new IllegalArgumentException("Invalid descriptor: "
-                            + desc);
-                }
-            case 'L':
-                index = desc.indexOf(';', start);
-                if (index == -1 || index - start < 2) {
-                    throw new IllegalArgumentException("Invalid descriptor: "
-                            + desc);
-                }
-                try {
-                    checkInternalName(desc, start + 1, index, null);
-                } catch (IllegalArgumentException _) {
-                    throw new IllegalArgumentException("Invalid descriptor: "
-                            + desc);
-                }
-                return index + 1;
-            default:
+            } else {
+                throw new IllegalArgumentException("Invalid descriptor: "
+                        + desc);
+            }
+        case 'Z':
+        case 'C':
+        case 'B':
+        case 'S':
+        case 'I':
+        case 'F':
+        case 'J':
+        case 'D':
+            return start + 1;
+        case '[':
+            index = start + 1;
+            while (index < desc.length() && desc.charAt(index) == '[') {
+                ++index;
+            }
+            if (index < desc.length()) {
+                return checkDesc(desc, index, false);
+            } else {
                 throw new IllegalArgumentException("Invalid descriptor: "
                         + desc);
+            }
+        case 'L':
+            index = desc.indexOf(';', start);
+            if (index == -1 || index - start < 2) {
+                throw new IllegalArgumentException("Invalid descriptor: "
+                        + desc);
+            }
+            try {
+                checkInternalName(desc, start + 1, index, null);
+            } catch (IllegalArgumentException _) {
+                throw new IllegalArgumentException("Invalid descriptor: "
+                        + desc);
+            }
+            return index + 1;
+        default:
+            throw new IllegalArgumentException("Invalid descriptor: " + desc);
         }
     }
 
     /**
      * Checks that the given string is a valid method descriptor.
      *
-     * @param desc the string to be checked.
+     * @param desc
+     *            the string to be checked.
      */
     static void checkMethodDesc(final String desc) {
         if (desc == null || desc.length() == 0) {
-            throw new IllegalArgumentException("Invalid method descriptor (must not be null or empty)");
+            throw new IllegalArgumentException(
+                    "Invalid method descriptor (must not be null or empty)");
         }
         if (desc.charAt(0) != '(' || desc.length() < 3) {
             throw new IllegalArgumentException("Invalid descriptor: " + desc);
@@ -1291,322 +1321,18 @@ public class CheckMethodAdapter extends MethodVisitor {
     }
 
     /**
-     * Checks a class signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    static void checkClassSignature(final String signature) {
-        // ClassSignature:
-        // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
-
-        int pos = 0;
-        if (getChar(signature, 0) == '<') {
-            pos = checkFormalTypeParameters(signature, pos);
-        }
-        pos = checkClassTypeSignature(signature, pos);
-        while (getChar(signature, pos) == 'L') {
-            pos = checkClassTypeSignature(signature, pos);
-        }
-        if (pos != signature.length()) {
-            throw new IllegalArgumentException(signature + ": error at index "
-                    + pos);
-        }
-    }
-
-    /**
-     * Checks a method signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    static void checkMethodSignature(final String signature) {
-        // MethodTypeSignature:
-        // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
-        // ^ClassTypeSignature | ^TypeVariableSignature )*
-
-        int pos = 0;
-        if (getChar(signature, 0) == '<') {
-            pos = checkFormalTypeParameters(signature, pos);
-        }
-        pos = checkChar('(', signature, pos);
-        while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
-            pos = checkTypeSignature(signature, pos);
-        }
-        pos = checkChar(')', signature, pos);
-        if (getChar(signature, pos) == 'V') {
-            ++pos;
-        } else {
-            pos = checkTypeSignature(signature, pos);
-        }
-        while (getChar(signature, pos) == '^') {
-            ++pos;
-            if (getChar(signature, pos) == 'L') {
-                pos = checkClassTypeSignature(signature, pos);
-            } else {
-                pos = checkTypeVariableSignature(signature, pos);
-            }
-        }
-        if (pos != signature.length()) {
-            throw new IllegalArgumentException(signature + ": error at index "
-                    + pos);
-        }
-    }
-
-    /**
-     * Checks a field signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    static void checkFieldSignature(final String signature) {
-        int pos = checkFieldTypeSignature(signature, 0);
-        if (pos != signature.length()) {
-            throw new IllegalArgumentException(signature + ": error at index "
-                    + pos);
-        }
-    }
-
-    /**
-     * Checks the formal type parameters of a class or method signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkFormalTypeParameters(final String signature, int pos)
-    {
-        // FormalTypeParameters:
-        // < FormalTypeParameter+ >
-
-        pos = checkChar('<', signature, pos);
-        pos = checkFormalTypeParameter(signature, pos);
-        while (getChar(signature, pos) != '>') {
-            pos = checkFormalTypeParameter(signature, pos);
-        }
-        return pos + 1;
-    }
-
-    /**
-     * Checks a formal type parameter of a class or method signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkFormalTypeParameter(final String signature, int pos)
-    {
-        // FormalTypeParameter:
-        // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
-
-        pos = checkIdentifier(signature, pos);
-        pos = checkChar(':', signature, pos);
-        if ("L[T".indexOf(getChar(signature, pos)) != -1) {
-            pos = checkFieldTypeSignature(signature, pos);
-        }
-        while (getChar(signature, pos) == ':') {
-            pos = checkFieldTypeSignature(signature, pos + 1);
-        }
-        return pos;
-    }
-
-    /**
-     * Checks a field type signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkFieldTypeSignature(final String signature, int pos)
-    {
-        // FieldTypeSignature:
-        // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
-        //
-        // ArrayTypeSignature:
-        // [ TypeSignature
-
-        switch (getChar(signature, pos)) {
-            case 'L':
-                return checkClassTypeSignature(signature, pos);
-            case '[':
-                return checkTypeSignature(signature, pos + 1);
-            default:
-                return checkTypeVariableSignature(signature, pos);
-        }
-    }
-
-    /**
-     * Checks a class type signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkClassTypeSignature(final String signature, int pos)
-    {
-        // ClassTypeSignature:
-        // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
-        // TypeArguments? )* ;
-
-        pos = checkChar('L', signature, pos);
-        pos = checkIdentifier(signature, pos);
-        while (getChar(signature, pos) == '/') {
-            pos = checkIdentifier(signature, pos + 1);
-        }
-        if (getChar(signature, pos) == '<') {
-            pos = checkTypeArguments(signature, pos);
-        }
-        while (getChar(signature, pos) == '.') {
-            pos = checkIdentifier(signature, pos + 1);
-            if (getChar(signature, pos) == '<') {
-                pos = checkTypeArguments(signature, pos);
-            }
-        }
-        return checkChar(';', signature, pos);
-    }
-
-    /**
-     * Checks the type arguments in a class type signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkTypeArguments(final String signature, int pos) {
-        // TypeArguments:
-        // < TypeArgument+ >
-
-        pos = checkChar('<', signature, pos);
-        pos = checkTypeArgument(signature, pos);
-        while (getChar(signature, pos) != '>') {
-            pos = checkTypeArgument(signature, pos);
-        }
-        return pos + 1;
-    }
-
-    /**
-     * Checks a type argument in a class type signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkTypeArgument(final String signature, int pos) {
-        // TypeArgument:
-        // * | ( ( + | - )? FieldTypeSignature )
-
-        char c = getChar(signature, pos);
-        if (c == '*') {
-            return pos + 1;
-        } else if (c == '+' || c == '-') {
-            pos++;
-        }
-        return checkFieldTypeSignature(signature, pos);
-    }
-
-    /**
-     * Checks a type variable signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkTypeVariableSignature(
-        final String signature,
-        int pos)
-    {
-        // TypeVariableSignature:
-        // T Identifier ;
-
-        pos = checkChar('T', signature, pos);
-        pos = checkIdentifier(signature, pos);
-        return checkChar(';', signature, pos);
-    }
-
-    /**
-     * Checks a type signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkTypeSignature(final String signature, int pos) {
-        // TypeSignature:
-        // Z | C | B | S | I | F | J | D | FieldTypeSignature
-
-        switch (getChar(signature, pos)) {
-            case 'Z':
-            case 'C':
-            case 'B':
-            case 'S':
-            case 'I':
-            case 'F':
-            case 'J':
-            case 'D':
-                return pos + 1;
-            default:
-                return checkFieldTypeSignature(signature, pos);
-        }
-    }
-
-    /**
-     * Checks an identifier.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkIdentifier(final String signature, int pos) {
-        if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
-            throw new IllegalArgumentException(signature
-                    + ": identifier expected at index " + pos);
-        }
-        ++pos;
-        while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
-            ++pos;
-        }
-        return pos;
-    }
-
-    /**
-     * Checks a single character.
-     *
-     * @param signature a string containing the signature that must be checked.
-     * @param pos index of first character to be checked.
-     * @return the index of the first character after the checked part.
-     */
-    private static int checkChar(final char c, final String signature, int pos)
-    {
-        if (getChar(signature, pos) == c) {
-            return pos + 1;
-        }
-        throw new IllegalArgumentException(signature + ": '" + c
-                + "' expected at index " + pos);
-    }
-
-    /**
-     * Returns the signature car at the given index.
-     *
-     * @param signature a signature.
-     * @param pos an index in signature.
-     * @return the character at the given index, or 0 if there is no such
-     *         character.
-     */
-    private static char getChar(final String signature, int pos) {
-        return pos < signature.length() ? signature.charAt(pos) : (char) 0;
-    }
-
-    /**
      * Checks that the given label is not null. This method can also check that
      * the label has been visited.
      *
-     * @param label the label to be checked.
-     * @param checkVisited <tt>true</tt> to check that the label has been
-     *        visited.
-     * @param msg a message to be used in case of error.
+     * @param label
+     *            the label to be checked.
+     * @param checkVisited
+     *            <tt>true</tt> to check that the label has been visited.
+     * @param msg
+     *            a message to be used in case of error.
      */
-    void checkLabel(
-        final Label label,
-        final boolean checkVisited,
-        final String msg)
-    {
+    void checkLabel(final Label label, final boolean checkVisited,
+            final String msg) {
         if (label == null) {
             throw new IllegalArgumentException("Invalid " + msg
                     + " (must not be null)");
@@ -1620,7 +1346,8 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Checks that the given label is not a label used only for debug purposes.
      *
-     * @param label the label to be checked.
+     * @param label
+     *            the label to be checked.
      */
     private static void checkNonDebugLabel(final Label label) {
         Field f = getLabelStatusField();
@@ -1631,7 +1358,8 @@ public class CheckMethodAdapter extends MethodVisitor {
             throw new Error("Internal error");
         }
         if ((status & 0x01) != 0) {
-            throw new IllegalArgumentException("Labels used for debug info cannot be reused for control flow");
+            throw new IllegalArgumentException(
+                    "Labels used for debug info cannot be reused for control flow");
         }
     }
 
@@ -1653,7 +1381,8 @@ public class CheckMethodAdapter extends MethodVisitor {
     /**
      * Returns the field of the Label class whose name is given.
      *
-     * @param name a field name.
+     * @param name
+     *            a field name.
      * @return the field of the Label class whose name is given, or null.
      */
     private static Field getLabelField(final String name) {
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
index 3a6c3e7..e69302b 100644
--- a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -41,19 +41,22 @@ public class CheckSignatureAdapter extends SignatureVisitor {
 
     /**
      * Type to be used to check class signatures. See
-     * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+     * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+     * CheckSignatureAdapter}.
      */
     public static final int CLASS_SIGNATURE = 0;
 
     /**
      * Type to be used to check method signatures. See
-     * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+     * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+     * CheckSignatureAdapter}.
      */
     public static final int METHOD_SIGNATURE = 1;
 
     /**
      * Type to be used to check type signatures.See
-     * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+     * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+     * CheckSignatureAdapter}.
      */
     public static final int TYPE_SIGNATURE = 2;
 
@@ -101,11 +104,13 @@ public class CheckSignatureAdapter extends SignatureVisitor {
      * not use this constructor</i>. Instead, they must use the
      * {@link #CheckSignatureAdapter(int, int, SignatureVisitor)} version.
      *
-     * @param type the type of signature to be checked. See
-     *        {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
-     *        {@link #TYPE_SIGNATURE}.
-     * @param sv the visitor to which this adapter must delegate calls. May be
-     *        <tt>null</tt>.
+     * @param type
+     *            the type of signature to be checked. See
+     *            {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+     *            {@link #TYPE_SIGNATURE}.
+     * @param sv
+     *            the visitor to which this adapter must delegate calls. May be
+     *            <tt>null</tt>.
      */
     public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
         this(Opcodes.ASM4, type, sv);
@@ -114,19 +119,19 @@ public class CheckSignatureAdapter extends SignatureVisitor {
     /**
      * Creates a new {@link CheckSignatureAdapter} object.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
-     * @param type the type of signature to be checked. See
-     *        {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
-     *        {@link #TYPE_SIGNATURE}.
-     * @param sv the visitor to which this adapter must delegate calls. May be
-     *        <tt>null</tt>.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
+     * @param type
+     *            the type of signature to be checked. See
+     *            {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+     *            {@link #TYPE_SIGNATURE}.
+     * @param sv
+     *            the visitor to which this adapter must delegate calls. May be
+     *            <tt>null</tt>.
      */
-    protected CheckSignatureAdapter(
-        final int api,
-        final int type,
-        final SignatureVisitor sv)
-    {
+    protected CheckSignatureAdapter(final int api, final int type,
+            final SignatureVisitor sv) {
         super(api);
         this.type = type;
         this.state = EMPTY;
@@ -138,8 +143,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
     @Override
     public void visitFormalTypeParameter(final String name) {
         if (type == TYPE_SIGNATURE
-                || (state != EMPTY && state != FORMAL && state != BOUND))
-        {
+                || (state != EMPTY && state != FORMAL && state != BOUND)) {
             throw new IllegalStateException();
         }
         CheckMethodAdapter.checkIdentifier(name, "formal type parameter");
@@ -172,8 +176,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
 
     @Override
     public SignatureVisitor visitSuperclass() {
-        if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0)
-        {
+        if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0) {
             throw new IllegalArgumentException();
         }
         state = SUPER;
@@ -195,8 +198,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
     @Override
     public SignatureVisitor visitParameterType() {
         if (type != METHOD_SIGNATURE
-                || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
-        {
+                || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
             throw new IllegalArgumentException();
         }
         state = PARAM;
@@ -207,8 +209,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
     @Override
     public SignatureVisitor visitReturnType() {
         if (type != METHOD_SIGNATURE
-                || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
-        {
+                || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
             throw new IllegalArgumentException();
         }
         state = RETURN;
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index c39fd54..86e0f9e 100644
--- a/src/asm/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -52,14 +52,14 @@ public abstract class Printer {
 
     /**
      * The names of the for <code>operand</code> parameter values of the
-     * {@link org.objectweb.asm.MethodVisitor#visitIntInsn} method when
+     * {@link scala.tools.asm.MethodVisitor#visitIntInsn} method when
      * <code>opcode</code> is <code>NEWARRAY</code>.
      */
     public static final String[] TYPES;
 
     /**
      * The names of the <code>tag</code> field values for
-     * {@link org.objectweb.asm.Handle}.
+     * {@link scala.tools.asm.Handle}.
      */
     public static final String[] HANDLE_TAG;
 
@@ -103,8 +103,8 @@ public abstract class Printer {
         }
 
         s = "H_GETFIELD,H_GETSTATIC,H_PUTFIELD,H_PUTSTATIC,"
-          + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
-          + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
+                + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
+                + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
         HANDLE_TAG = new String[10];
         j = 0;
         i = 1;
@@ -149,81 +149,58 @@ public abstract class Printer {
     }
 
     /**
-     * Class header.
-     * See {@link org.objectweb.asm.ClassVisitor#visit}.
+     * Class header. See {@link scala.tools.asm.ClassVisitor#visit}.
      */
-    public abstract void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces);
+    public abstract void visit(final int version, final int access,
+            final String name, final String signature, final String superName,
+            final String[] interfaces);
 
     /**
-     * Class source.
-     * See {@link org.objectweb.asm.ClassVisitor#visitSource}.
+     * Class source. See {@link scala.tools.asm.ClassVisitor#visitSource}.
      */
     public abstract void visitSource(final String file, final String debug);
 
     /**
-     * Class outer class.
-     * See {@link org.objectweb.asm.ClassVisitor#visitOuterClass}.
+     * Class outer class. See
+     * {@link scala.tools.asm.ClassVisitor#visitOuterClass}.
      */
-    public abstract void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc);
+    public abstract void visitOuterClass(final String owner, final String name,
+            final String desc);
 
     /**
-     * Class annotation.
-     * See {@link org.objectweb.asm.ClassVisitor#visitAnnotation}.
+     * Class annotation. See
+     * {@link scala.tools.asm.ClassVisitor#visitAnnotation}.
      */
-    public abstract Printer visitClassAnnotation(
-        final String desc,
-        final boolean visible);
+    public abstract Printer visitClassAnnotation(final String desc,
+            final boolean visible);
 
     /**
-     * Class attribute.
-     * See {@link org.objectweb.asm.ClassVisitor#visitAttribute}.
+     * Class attribute. See
+     * {@link scala.tools.asm.ClassVisitor#visitAttribute}.
      */
     public abstract void visitClassAttribute(final Attribute attr);
 
     /**
-     * Class inner name.
-     * See {@link org.objectweb.asm.ClassVisitor#visitInnerClass}.
+     * Class inner name. See
+     * {@link scala.tools.asm.ClassVisitor#visitInnerClass}.
      */
-    public abstract void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access);
+    public abstract void visitInnerClass(final String name,
+            final String outerName, final String innerName, final int access);
 
     /**
-     * Class field.
-     * See {@link org.objectweb.asm.ClassVisitor#visitField}.
+     * Class field. See {@link scala.tools.asm.ClassVisitor#visitField}.
      */
-    public abstract Printer visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value);
+    public abstract Printer visitField(final int access, final String name,
+            final String desc, final String signature, final Object value);
 
     /**
-     * Class method.
-     * See {@link org.objectweb.asm.ClassVisitor#visitMethod}.
+     * Class method. See {@link scala.tools.asm.ClassVisitor#visitMethod}.
      */
-    public abstract Printer visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions);
+    public abstract Printer visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions);
 
     /**
-     * Class end.
-     * See {@link org.objectweb.asm.ClassVisitor#visitEnd}.
+     * Class end. See {@link scala.tools.asm.ClassVisitor#visitEnd}.
      */
     public abstract void visitClassEnd();
 
@@ -232,37 +209,31 @@ public abstract class Printer {
     // ------------------------------------------------------------------------
 
     /**
-     * Annotation value.
-     * See {@link org.objectweb.asm.AnnotationVisitor#visit}.
+     * Annotation value. See {@link scala.tools.asm.AnnotationVisitor#visit}.
      */
     public abstract void visit(final String name, final Object value);
 
     /**
-     * Annotation enum value.
-     * See {@link org.objectweb.asm.AnnotationVisitor#visitEnum}.
+     * Annotation enum value. See
+     * {@link scala.tools.asm.AnnotationVisitor#visitEnum}.
      */
-    public abstract void visitEnum(
-        final String name,
-        final String desc,
-        final String value);
+    public abstract void visitEnum(final String name, final String desc,
+            final String value);
 
     /**
-     * Nested annotation value.
-     * See {@link org.objectweb.asm.AnnotationVisitor#visitAnnotation}.
+     * Nested annotation value. See
+     * {@link scala.tools.asm.AnnotationVisitor#visitAnnotation}.
      */
-    public abstract Printer visitAnnotation(
-        final String name,
-        final String desc);
+    public abstract Printer visitAnnotation(final String name, final String desc);
 
     /**
-     * Annotation array value.
-     * See {@link org.objectweb.asm.AnnotationVisitor#visitArray}.
+     * Annotation array value. See
+     * {@link scala.tools.asm.AnnotationVisitor#visitArray}.
      */
     public abstract Printer visitArray(final String name);
 
     /**
-     * Annotation end.
-     * See {@link org.objectweb.asm.AnnotationVisitor#visitEnd}.
+     * Annotation end. See {@link scala.tools.asm.AnnotationVisitor#visitEnd}.
      */
     public abstract void visitAnnotationEnd();
 
@@ -271,22 +242,20 @@ public abstract class Printer {
     // ------------------------------------------------------------------------
 
     /**
-     * Field annotation.
-     * See {@link org.objectweb.asm.FieldVisitor#visitAnnotation}.
+     * Field annotation. See
+     * {@link scala.tools.asm.FieldVisitor#visitAnnotation}.
      */
-    public abstract Printer visitFieldAnnotation(
-        final String desc,
-        final boolean visible);
+    public abstract Printer visitFieldAnnotation(final String desc,
+            final boolean visible);
 
     /**
-     * Field attribute.
-     * See {@link org.objectweb.asm.FieldVisitor#visitAttribute}.
+     * Field attribute. See
+     * {@link scala.tools.asm.FieldVisitor#visitAttribute}.
      */
     public abstract void visitFieldAttribute(final Attribute attr);
 
     /**
-     * Field end.
-     * See {@link org.objectweb.asm.FieldVisitor#visitEnd}.
+     * Field end. See {@link scala.tools.asm.FieldVisitor#visitEnd}.
      */
     public abstract void visitFieldEnd();
 
@@ -295,193 +264,161 @@ public abstract class Printer {
     // ------------------------------------------------------------------------
 
     /**
-     * Method default annotation.
-     * See {@link org.objectweb.asm.MethodVisitor#visitAnnotationDefault}.
+     * Method default annotation. See
+     * {@link scala.tools.asm.MethodVisitor#visitAnnotationDefault}.
      */
     public abstract Printer visitAnnotationDefault();
 
     /**
-     * Method annotation.
-     * See {@link org.objectweb.asm.MethodVisitor#visitAnnotation}.
+     * Method annotation. See
+     * {@link scala.tools.asm.MethodVisitor#visitAnnotation}.
      */
-    public abstract Printer visitMethodAnnotation(
-        final String desc,
-        final boolean visible);
+    public abstract Printer visitMethodAnnotation(final String desc,
+            final boolean visible);
 
     /**
-     * Method parameter annotation.
-     * See {@link org.objectweb.asm.MethodVisitor#visitParameterAnnotation}.
+     * Method parameter annotation. See
+     * {@link scala.tools.asm.MethodVisitor#visitParameterAnnotation}.
      */
-    public abstract Printer visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible);
+    public abstract Printer visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible);
 
     /**
-     * Method attribute.
-     * See {@link org.objectweb.asm.MethodVisitor#visitAttribute}.
+     * Method attribute. See
+     * {@link scala.tools.asm.MethodVisitor#visitAttribute}.
      */
     public abstract void visitMethodAttribute(final Attribute attr);
 
     /**
-     * Method start.
-     * See {@link org.objectweb.asm.MethodVisitor#visitCode}.
+     * Method start. See {@link scala.tools.asm.MethodVisitor#visitCode}.
      */
     public abstract void visitCode();
 
     /**
-     * Method stack frame.
-     * See {@link org.objectweb.asm.MethodVisitor#visitFrame}.
+     * Method stack frame. See
+     * {@link scala.tools.asm.MethodVisitor#visitFrame}.
      */
-    public abstract void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack);
+    public abstract void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitInsn}.
+     * Method instruction. See {@link scala.tools.asm.MethodVisitor#visitInsn}
+     * .
      */
     public abstract void visitInsn(final int opcode);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitIntInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitIntInsn}.
      */
     public abstract void visitIntInsn(final int opcode, final int operand);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitVarInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitVarInsn}.
      */
     public abstract void visitVarInsn(final int opcode, final int var);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitTypeInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitTypeInsn}.
      */
     public abstract void visitTypeInsn(final int opcode, final String type);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitFieldInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitFieldInsn}.
      */
-    public abstract void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc);
+    public abstract void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitMethodInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}.
      */
-    public abstract void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc);
+    public abstract void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitInvokeDynamicInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitInvokeDynamicInsn}.
      */
-    public abstract void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs);
+    public abstract void visitInvokeDynamicInsn(String name, String desc,
+            Handle bsm, Object... bsmArgs);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitJumpInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitJumpInsn}.
      */
     public abstract void visitJumpInsn(final int opcode, final Label label);
 
     /**
-     * Method label.
-     * See {@link org.objectweb.asm.MethodVisitor#visitLabel}.
+     * Method label. See {@link scala.tools.asm.MethodVisitor#visitLabel}.
      */
     public abstract void visitLabel(final Label label);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitLdcInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitLdcInsn}.
      */
     public abstract void visitLdcInsn(final Object cst);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitIincInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitIincInsn}.
      */
     public abstract void visitIincInsn(final int var, final int increment);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitTableSwitchInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitTableSwitchInsn}.
      */
-    public abstract void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels);
+    public abstract void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitLookupSwitchInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitLookupSwitchInsn}.
      */
-    public abstract void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels);
+    public abstract void visitLookupSwitchInsn(final Label dflt,
+            final int[] keys, final Label[] labels);
 
     /**
-     * Method instruction.
-     * See {@link org.objectweb.asm.MethodVisitor#visitMultiANewArrayInsn}.
+     * Method instruction. See
+     * {@link scala.tools.asm.MethodVisitor#visitMultiANewArrayInsn}.
      */
-    public abstract void visitMultiANewArrayInsn(
-        final String desc,
-        final int dims);
+    public abstract void visitMultiANewArrayInsn(final String desc,
+            final int dims);
 
     /**
-     * Method exception handler.
-     * See {@link org.objectweb.asm.MethodVisitor#visitTryCatchBlock}.
+     * Method exception handler. See
+     * {@link scala.tools.asm.MethodVisitor#visitTryCatchBlock}.
      */
-    public abstract void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type);
+    public abstract void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type);
 
     /**
-     * Method debug info.
-     * See {@link org.objectweb.asm.MethodVisitor#visitLocalVariable}.
+     * Method debug info. See
+     * {@link scala.tools.asm.MethodVisitor#visitLocalVariable}.
      */
-    public abstract void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index);
+    public abstract void visitLocalVariable(final String name,
+            final String desc, final String signature, final Label start,
+            final Label end, final int index);
 
     /**
-     * Method debug info.
-     * See {@link org.objectweb.asm.MethodVisitor#visitLineNumber}.
+     * Method debug info. See
+     * {@link scala.tools.asm.MethodVisitor#visitLineNumber}.
      */
     public abstract void visitLineNumber(final int line, final Label start);
 
     /**
-     * Method max stack and max locals.
-     * See {@link org.objectweb.asm.MethodVisitor#visitMaxs}.
+     * Method max stack and max locals. See
+     * {@link scala.tools.asm.MethodVisitor#visitMaxs}.
      */
     public abstract void visitMaxs(final int maxStack, final int maxLocals);
 
     /**
-     * Method end.
-     * See {@link org.objectweb.asm.MethodVisitor#visitEnd}.
+     * Method end. See {@link scala.tools.asm.MethodVisitor#visitEnd}.
      */
     public abstract void visitMethodEnd();
 
@@ -497,7 +434,8 @@ public abstract class Printer {
     /**
      * Prints the text constructed by this visitor.
      *
-     * @param pw the print writer to be used.
+     * @param pw
+     *            the print writer to be used.
      */
     public void print(final PrintWriter pw) {
         printList(pw, text);
@@ -506,8 +444,10 @@ public abstract class Printer {
     /**
      * Appends a quoted string to a given buffer.
      *
-     * @param buf the buffer where the string must be added.
-     * @param s the string to be added.
+     * @param buf
+     *            the buffer where the string must be added.
+     * @param s
+     *            the string to be added.
      */
     public static void appendString(final StringBuffer buf, final String s) {
         buf.append('\"');
@@ -541,9 +481,11 @@ public abstract class Printer {
     /**
      * Prints the given string tree.
      *
-     * @param pw the writer to be used to print the tree.
-     * @param l a string tree, i.e., a string list that can contain other string
-     *        lists, and so on recursively.
+     * @param pw
+     *            the writer to be used to print the tree.
+     * @param l
+     *            a string tree, i.e., a string list that can contain other
+     *            string lists, and so on recursively.
      */
     static void printList(final PrintWriter pw, final List<?> l) {
         for (int i = 0; i < l.size(); ++i) {
diff --git a/src/asm/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
deleted file mode 100644
index 71f0d80..0000000
--- a/src/asm/scala/tools/asm/util/SignatureChecker.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.asm.util;
-
-import scala.tools.asm.util.CheckMethodAdapter;
-import scala.tools.asm.MethodVisitor;
-
-/**
- * A subclass of ASM's CheckMethodAdapter for the sole purpose of accessing some protected methods there.
- *
- */
-public class SignatureChecker extends CheckMethodAdapter {
-
-    public SignatureChecker(final MethodVisitor mv) {
-        super(mv);
-    }
-
-    /**
-     * Checks a class signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    public static void checkClassSignature(final String signature) {
-      CheckMethodAdapter.checkClassSignature(signature);
-    }
-
-    /**
-     * Checks a method signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    public static void checkMethodSignature(final String signature) {
-      CheckMethodAdapter.checkMethodSignature(signature);
-    }
-
-    /**
-     * Checks a field signature.
-     *
-     * @param signature a string containing the signature that must be checked.
-     */
-    public static void checkFieldSignature(final String signature) {
-      CheckMethodAdapter.checkFieldSignature(signature);
-    }
-
-}
diff --git a/src/asm/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
index b80d013..85e051e 100644
--- a/src/asm/scala/tools/asm/util/Textifiable.java
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
 import scala.tools.asm.Label;
 
 /**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print a readable
+ * An {@link scala.tools.asm.Attribute Attribute} that can print a readable
  * representation of itself.
  *
  * Implementations should construct readable output from an attribute data
@@ -47,8 +47,10 @@ public interface Textifiable {
     /**
      * Build a human readable representation of this attribute.
      *
-     * @param buf a buffer used for printing Java code.
-     * @param labelNames map of label instances to their names.
+     * @param buf
+     *            a buffer used for printing Java code.
+     * @param labelNames
+     *            map of label instances to their names.
      */
     void textify(StringBuffer buf, Map<Label, String> labelNames);
 }
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
index 8d40ebd..a5c4f67 100644
--- a/src/asm/scala/tools/asm/util/Textifier.java
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -149,22 +149,24 @@ public class Textifier extends Printer {
     /**
      * Constructs a new {@link Textifier}.
      *
-     * @param api the ASM API version implemented by this visitor. Must be one
-     *        of {@link Opcodes#ASM4}.
+     * @param api
+     *            the ASM API version implemented by this visitor. Must be one
+     *            of {@link Opcodes#ASM4}.
      */
     protected Textifier(final int api) {
         super(api);
     }
 
     /**
-     * Prints a disassembled view of the given class to the standard output. <p>
-     * Usage: Textifier [-debug] <binary class name or class
-     * file name >
+     * Prints a disassembled view of the given class to the standard output.
+     * <p>
+     * Usage: Textifier [-debug] <binary class name or class file name >
      *
-     * @param args the command line arguments.
+     * @param args
+     *            the command line arguments.
      *
-     * @throws Exception if the class cannot be found, or if an IO exception
-     *         occurs.
+     * @throws Exception
+     *             if the class cannot be found, or if an IO exception occurs.
      */
     public static void main(final String[] args) throws Exception {
         int i = 0;
@@ -182,21 +184,20 @@ public class Textifier extends Printer {
             }
         }
         if (!ok) {
-            System.err.println("Prints a disassembled view of the given class.");
+            System.err
+                    .println("Prints a disassembled view of the given class.");
             System.err.println("Usage: Textifier [-debug] "
                     + "<fully qualified class name or class file name>");
             return;
         }
         ClassReader cr;
         if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
-                || args[i].indexOf('/') > -1)
-        {
+                || args[i].indexOf('/') > -1) {
             cr = new ClassReader(new FileInputStream(args[i]));
         } else {
             cr = new ClassReader(args[i]);
         }
-        cr.accept(new TraceClassVisitor(new PrintWriter(System.out)),
-                flags);
+        cr.accept(new TraceClassVisitor(new PrintWriter(System.out)), flags);
     }
 
     // ------------------------------------------------------------------------
@@ -204,38 +205,27 @@ public class Textifier extends Printer {
     // ------------------------------------------------------------------------
 
     @Override
-    public void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public void visit(final int version, final int access, final String name,
+            final String signature, final String superName,
+            final String[] interfaces) {
         int major = version & 0xFFFF;
         int minor = version >>> 16;
         buf.setLength(0);
-        buf.append("// class version ")
-                .append(major)
-                .append('.')
-                .append(minor)
-                .append(" (")
-                .append(version)
-                .append(")\n");
+        buf.append("// class version ").append(major).append('.').append(minor)
+                .append(" (").append(version).append(")\n");
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             buf.append("// DEPRECATED\n");
         }
-        buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+        buf.append("// access flags 0x")
+                .append(Integer.toHexString(access).toUpperCase()).append('\n');
 
         appendDescriptor(CLASS_SIGNATURE, signature);
         if (signature != null) {
             TraceSignatureVisitor sv = new TraceSignatureVisitor(access);
             SignatureReader r = new SignatureReader(signature);
             r.accept(sv);
-            buf.append("// declaration: ")
-                    .append(name)
-                    .append(sv.getDeclaration())
-                    .append('\n');
+            buf.append("// declaration: ").append(name)
+                    .append(sv.getDeclaration()).append('\n');
         }
 
         appendAccess(access & ~Opcodes.ACC_SUPER);
@@ -269,15 +259,11 @@ public class Textifier extends Printer {
     public void visitSource(final String file, final String debug) {
         buf.setLength(0);
         if (file != null) {
-            buf.append(tab)
-                    .append("// compiled from: ")
-                    .append(file)
+            buf.append(tab).append("// compiled from: ").append(file)
                     .append('\n');
         }
         if (debug != null) {
-            buf.append(tab)
-                    .append("// debug info: ")
-                    .append(debug)
+            buf.append(tab).append("// debug info: ").append(debug)
                     .append('\n');
         }
         if (buf.length() > 0) {
@@ -286,11 +272,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitOuterClass(final String owner, final String name,
+            final String desc) {
         buf.setLength(0);
         buf.append(tab).append("OUTERCLASS ");
         appendDescriptor(INTERNAL_NAME, owner);
@@ -304,10 +287,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitClassAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public Textifier visitClassAnnotation(final String desc,
+            final boolean visible) {
         text.add("\n");
         return visitAnnotation(desc, visible);
     }
@@ -319,15 +300,13 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public void visitInnerClass(final String name, final String outerName,
+            final String innerName, final int access) {
         buf.setLength(0);
         buf.append(tab).append("// access flags 0x");
-        buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n');
+        buf.append(
+                Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase())
+                .append('\n');
         buf.append(tab);
         appendAccess(access);
         buf.append("INNERCLASS ");
@@ -341,19 +320,15 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
+    public Textifier visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
         buf.setLength(0);
         buf.append('\n');
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             buf.append(tab).append("// DEPRECATED\n");
         }
-        buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+        buf.append(tab).append("// access flags 0x")
+                .append(Integer.toHexString(access).toUpperCase()).append('\n');
         if (signature != null) {
             buf.append(tab);
             appendDescriptor(FIELD_SIGNATURE, signature);
@@ -361,10 +336,8 @@ public class Textifier extends Printer {
             TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
             SignatureReader r = new SignatureReader(signature);
             r.acceptType(sv);
-            buf.append(tab)
-                    .append("// declaration: ")
-                    .append(sv.getDeclaration())
-                    .append('\n');
+            buf.append(tab).append("// declaration: ")
+                    .append(sv.getDeclaration()).append('\n');
         }
 
         buf.append(tab);
@@ -390,19 +363,15 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
+    public Textifier visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
         buf.setLength(0);
         buf.append('\n');
         if ((access & Opcodes.ACC_DEPRECATED) != 0) {
             buf.append(tab).append("// DEPRECATED\n");
         }
-        buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+        buf.append(tab).append("// access flags 0x")
+                .append(Integer.toHexString(access).toUpperCase()).append('\n');
 
         if (signature != null) {
             buf.append(tab);
@@ -415,12 +384,8 @@ public class Textifier extends Printer {
             String genericReturn = v.getReturnType();
             String genericExceptions = v.getExceptions();
 
-            buf.append(tab)
-                    .append("// declaration: ")
-                    .append(genericReturn)
-                    .append(' ')
-                    .append(name)
-                    .append(genericDecl);
+            buf.append(tab).append("// declaration: ").append(genericReturn)
+                    .append(' ').append(name).append(genericDecl);
             if (genericExceptions != null) {
                 buf.append(" throws ").append(genericExceptions);
             }
@@ -593,11 +558,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         buf.setLength(0);
         appendComa(valueNumber++);
         if (name != null) {
@@ -609,10 +571,7 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public Textifier visitAnnotation(final String name, final String desc) {
         buf.setLength(0);
         appendComa(valueNumber++);
         if (name != null) {
@@ -629,9 +588,7 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitArray(
-        final String name)
-    {
+    public Textifier visitArray(final String name) {
         buf.setLength(0);
         appendComa(valueNumber++);
         if (name != null) {
@@ -654,10 +611,8 @@ public class Textifier extends Printer {
     // ------------------------------------------------------------------------
 
     @Override
-    public Textifier visitFieldAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public Textifier visitFieldAnnotation(final String desc,
+            final boolean visible) {
         return visitAnnotation(desc, visible);
     }
 
@@ -684,19 +639,14 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public Textifier visitMethodAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public Textifier visitMethodAnnotation(final String desc,
+            final boolean visible) {
         return visitAnnotation(desc, visible);
     }
 
     @Override
-    public Textifier visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
+    public Textifier visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
         buf.setLength(0);
         buf.append(tab2).append('@');
         appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -730,40 +680,35 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
         buf.setLength(0);
         buf.append(ltab);
         buf.append("FRAME ");
         switch (type) {
-            case Opcodes.F_NEW:
-            case Opcodes.F_FULL:
-                buf.append("FULL [");
-                appendFrameTypes(nLocal, local);
-                buf.append("] [");
-                appendFrameTypes(nStack, stack);
-                buf.append(']');
-                break;
-            case Opcodes.F_APPEND:
-                buf.append("APPEND [");
-                appendFrameTypes(nLocal, local);
-                buf.append(']');
-                break;
-            case Opcodes.F_CHOP:
-                buf.append("CHOP ").append(nLocal);
-                break;
-            case Opcodes.F_SAME:
-                buf.append("SAME");
-                break;
-            case Opcodes.F_SAME1:
-                buf.append("SAME1 ");
-                appendFrameTypes(1, stack);
-                break;
+        case Opcodes.F_NEW:
+        case Opcodes.F_FULL:
+            buf.append("FULL [");
+            appendFrameTypes(nLocal, local);
+            buf.append("] [");
+            appendFrameTypes(nStack, stack);
+            buf.append(']');
+            break;
+        case Opcodes.F_APPEND:
+            buf.append("APPEND [");
+            appendFrameTypes(nLocal, local);
+            buf.append(']');
+            break;
+        case Opcodes.F_CHOP:
+            buf.append("CHOP ").append(nLocal);
+            break;
+        case Opcodes.F_SAME:
+            buf.append("SAME");
+            break;
+        case Opcodes.F_SAME1:
+            buf.append("SAME1 ");
+            appendFrameTypes(1, stack);
+            break;
         }
         buf.append('\n');
         text.add(buf.toString());
@@ -782,20 +727,15 @@ public class Textifier extends Printer {
         buf.append(tab2)
                 .append(OPCODES[opcode])
                 .append(' ')
-                .append(opcode == Opcodes.NEWARRAY
-                        ? TYPES[operand]
-                        : Integer.toString(operand))
-                .append('\n');
+                .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+                        .toString(operand)).append('\n');
         text.add(buf.toString());
     }
 
     @Override
     public void visitVarInsn(final int opcode, final int var) {
         buf.setLength(0);
-        buf.append(tab2)
-                .append(OPCODES[opcode])
-                .append(' ')
-                .append(var)
+        buf.append(tab2).append(OPCODES[opcode]).append(' ').append(var)
                 .append('\n');
         text.add(buf.toString());
     }
@@ -810,12 +750,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         buf.setLength(0);
         buf.append(tab2).append(OPCODES[opcode]).append(' ');
         appendDescriptor(INTERNAL_NAME, owner);
@@ -826,12 +762,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         buf.setLength(0);
         buf.append(tab2).append(OPCODES[opcode]).append(' ');
         appendDescriptor(INTERNAL_NAME, owner);
@@ -842,12 +774,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         buf.setLength(0);
         buf.append(tab2).append("INVOKEDYNAMIC").append(' ');
         buf.append(name);
@@ -855,11 +783,11 @@ public class Textifier extends Printer {
         buf.append(" [");
         appendHandle(bsm);
         buf.append(tab3).append("// arguments:");
-        if(bsmArgs.length == 0) {
+        if (bsmArgs.length == 0) {
             buf.append(" none");
         } else {
             buf.append('\n').append(tab3);
-            for(int i = 0; i < bsmArgs.length; i++) {
+            for (int i = 0; i < bsmArgs.length; i++) {
                 Object cst = bsmArgs[i];
                 if (cst instanceof String) {
                     Printer.appendString(buf, (String) cst);
@@ -915,22 +843,14 @@ public class Textifier extends Printer {
     @Override
     public void visitIincInsn(final int var, final int increment) {
         buf.setLength(0);
-        buf.append(tab2)
-                .append("IINC ")
-                .append(var)
-                .append(' ')
-                .append(increment)
-                .append('\n');
+        buf.append(tab2).append("IINC ").append(var).append(' ')
+                .append(increment).append('\n');
         text.add(buf.toString());
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
         buf.setLength(0);
         buf.append(tab2).append("TABLESWITCH\n");
         for (int i = 0; i < labels.length; ++i) {
@@ -945,11 +865,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
         buf.setLength(0);
         buf.append(tab2).append("LOOKUPSWITCH\n");
         for (int i = 0; i < labels.length; ++i) {
@@ -973,12 +890,8 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         buf.setLength(0);
         buf.append(tab2).append("TRYCATCHBLOCK ");
         appendLabel(start);
@@ -993,14 +906,9 @@ public class Textifier extends Printer {
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
         buf.setLength(0);
         buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' ');
         appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1017,10 +925,8 @@ public class Textifier extends Printer {
             TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
             SignatureReader r = new SignatureReader(signature);
             r.acceptType(sv);
-            buf.append(tab2)
-                    .append("// declaration: ")
-                    .append(sv.getDeclaration())
-                    .append('\n');
+            buf.append(tab2).append("// declaration: ")
+                    .append(sv.getDeclaration()).append('\n');
         }
         text.add(buf.toString());
     }
@@ -1056,14 +962,13 @@ public class Textifier extends Printer {
     /**
      * Prints a disassembled view of the given annotation.
      *
-     * @param desc the class descriptor of the annotation class.
-     * @param visible <tt>true</tt> if the annotation is visible at runtime.
+     * @param desc
+     *            the class descriptor of the annotation class.
+     * @param visible
+     *            <tt>true</tt> if the annotation is visible at runtime.
      * @return a visitor to visit the annotation values.
      */
-    public Textifier visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public Textifier visitAnnotation(final String desc, final boolean visible) {
         buf.setLength(0);
         buf.append(tab).append('@');
         appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1078,7 +983,8 @@ public class Textifier extends Printer {
     /**
      * Prints a disassembled view of the given attribute.
      *
-     * @param attr an attribute.
+     * @param attr
+     *            an attribute.
      */
     public void visitAttribute(final Attribute attr) {
         buf.setLength(0);
@@ -1111,15 +1017,16 @@ public class Textifier extends Printer {
      * Appends an internal name, a type descriptor or a type signature to
      * {@link #buf buf}.
      *
-     * @param type indicates if desc is an internal name, a field descriptor, a
-     *        method descriptor, a class signature, ...
-     * @param desc an internal name, type descriptor, or type signature. May be
-     *        <tt>null</tt>.
+     * @param type
+     *            indicates if desc is an internal name, a field descriptor, a
+     *            method descriptor, a class signature, ...
+     * @param desc
+     *            an internal name, type descriptor, or type signature. May be
+     *            <tt>null</tt>.
      */
     protected void appendDescriptor(final int type, final String desc) {
         if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE
-                || type == METHOD_SIGNATURE)
-        {
+                || type == METHOD_SIGNATURE) {
             if (desc != null) {
                 buf.append("// signature ").append(desc).append('\n');
             }
@@ -1132,7 +1039,8 @@ public class Textifier extends Printer {
      * Appends the name of the given label to {@link #buf buf}. Creates a new
      * label name if the given label does not yet have one.
      *
-     * @param l a label.
+     * @param l
+     *            a label.
      */
     protected void appendLabel(final Label l) {
         if (labelNames == null) {
@@ -1149,40 +1057,42 @@ public class Textifier extends Printer {
     /**
      * Appends the information about the given handle to {@link #buf buf}.
      *
-     * @param h a handle, non null.
+     * @param h
+     *            a handle, non null.
      */
     protected void appendHandle(final Handle h) {
         buf.append('\n').append(tab3);
         int tag = h.getTag();
-        buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : ");
+        buf.append("// handle kind 0x").append(Integer.toHexString(tag))
+                .append(" : ");
         switch (tag) {
-            case Opcodes.H_GETFIELD:
-                buf.append("GETFIELD");
-                break;
-            case Opcodes.H_GETSTATIC:
-                buf.append("GETSTATIC");
-                break;
-            case Opcodes.H_PUTFIELD:
-                buf.append("PUTFIELD");
-                break;
-            case Opcodes.H_PUTSTATIC:
-                buf.append("PUTSTATIC");
-                break;
-            case Opcodes.H_INVOKEINTERFACE:
-                buf.append("INVOKEINTERFACE");
-                break;
-            case Opcodes.H_INVOKESPECIAL:
-                buf.append("INVOKESPECIAL");
-                break;
-            case Opcodes.H_INVOKESTATIC:
-                buf.append("INVOKESTATIC");
-                break;
-            case Opcodes.H_INVOKEVIRTUAL:
-                buf.append("INVOKEVIRTUAL");
-                break;
-            case Opcodes.H_NEWINVOKESPECIAL:
-                buf.append("NEWINVOKESPECIAL");
-                break;
+        case Opcodes.H_GETFIELD:
+            buf.append("GETFIELD");
+            break;
+        case Opcodes.H_GETSTATIC:
+            buf.append("GETSTATIC");
+            break;
+        case Opcodes.H_PUTFIELD:
+            buf.append("PUTFIELD");
+            break;
+        case Opcodes.H_PUTSTATIC:
+            buf.append("PUTSTATIC");
+            break;
+        case Opcodes.H_INVOKEINTERFACE:
+            buf.append("INVOKEINTERFACE");
+            break;
+        case Opcodes.H_INVOKESPECIAL:
+            buf.append("INVOKESPECIAL");
+            break;
+        case Opcodes.H_INVOKESTATIC:
+            buf.append("INVOKESTATIC");
+            break;
+        case Opcodes.H_INVOKEVIRTUAL:
+            buf.append("INVOKEVIRTUAL");
+            break;
+        case Opcodes.H_NEWINVOKESPECIAL:
+            buf.append("NEWINVOKESPECIAL");
+            break;
         }
         buf.append('\n');
         buf.append(tab3);
@@ -1195,10 +1105,11 @@ public class Textifier extends Printer {
     }
 
     /**
-     * Appends a string representation of the given access modifiers to {@link
-     * #buf buf}.
+     * Appends a string representation of the given access modifiers to
+     * {@link #buf buf}.
      *
-     * @param access some access modifiers.
+     * @param access
+     *            some access modifiers.
      */
     private void appendAccess(final int access) {
         if ((access & Opcodes.ACC_PUBLIC) != 0) {
@@ -1231,6 +1142,9 @@ public class Textifier extends Printer {
         if ((access & Opcodes.ACC_STRICT) != 0) {
             buf.append("strictfp ");
         }
+        if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+            buf.append("synthetic ");
+        }
         if ((access & Opcodes.ACC_ENUM) != 0) {
             buf.append("enum ");
         }
@@ -1256,27 +1170,27 @@ public class Textifier extends Printer {
                 }
             } else if (o[i] instanceof Integer) {
                 switch (((Integer) o[i]).intValue()) {
-                    case 0:
-                        appendDescriptor(FIELD_DESCRIPTOR, "T");
-                        break;
-                    case 1:
-                        appendDescriptor(FIELD_DESCRIPTOR, "I");
-                        break;
-                    case 2:
-                        appendDescriptor(FIELD_DESCRIPTOR, "F");
-                        break;
-                    case 3:
-                        appendDescriptor(FIELD_DESCRIPTOR, "D");
-                        break;
-                    case 4:
-                        appendDescriptor(FIELD_DESCRIPTOR, "J");
-                        break;
-                    case 5:
-                        appendDescriptor(FIELD_DESCRIPTOR, "N");
-                        break;
-                    case 6:
-                        appendDescriptor(FIELD_DESCRIPTOR, "U");
-                        break;
+                case 0:
+                    appendDescriptor(FIELD_DESCRIPTOR, "T");
+                    break;
+                case 1:
+                    appendDescriptor(FIELD_DESCRIPTOR, "I");
+                    break;
+                case 2:
+                    appendDescriptor(FIELD_DESCRIPTOR, "F");
+                    break;
+                case 3:
+                    appendDescriptor(FIELD_DESCRIPTOR, "D");
+                    break;
+                case 4:
+                    appendDescriptor(FIELD_DESCRIPTOR, "J");
+                    break;
+                case 5:
+                    appendDescriptor(FIELD_DESCRIPTOR, "N");
+                    break;
+                case 6:
+                    appendDescriptor(FIELD_DESCRIPTOR, "U");
+                    break;
                 }
             } else {
                 appendLabel((Label) o[i]);
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
index f112609..33e7cf0 100644
--- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -58,33 +58,26 @@ public final class TraceAnnotationVisitor extends AnnotationVisitor {
     }
 
     @Override
-    public void visitEnum(
-        final String name,
-        final String desc,
-        final String value)
-    {
+    public void visitEnum(final String name, final String desc,
+            final String value) {
         p.visitEnum(name, desc, value);
         super.visitEnum(name, desc, value);
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String name,
-        final String desc)
-    {
+    public AnnotationVisitor visitAnnotation(final String name,
+            final String desc) {
         Printer p = this.p.visitAnnotation(name, desc);
-        AnnotationVisitor av = this.av == null
-                ? null
-                : this.av.visitAnnotation(name, desc);
+        AnnotationVisitor av = this.av == null ? null : this.av
+                .visitAnnotation(name, desc);
         return new TraceAnnotationVisitor(av, p);
     }
 
     @Override
     public AnnotationVisitor visitArray(final String name) {
         Printer p = this.p.visitArray(name);
-        AnnotationVisitor av = this.av == null
-                ? null
-                : this.av.visitArray(name);
+        AnnotationVisitor av = this.av == null ? null : this.av
+                .visitArray(name);
         return new TraceAnnotationVisitor(av, p);
     }
 
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
index bb830b7..ff7a017 100644
--- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -42,30 +42,41 @@ import scala.tools.asm.Opcodes;
  * A {@link ClassVisitor} that prints the classes it visits with a
  * {@link Printer}. This class visitor can be used in the middle of a class
  * visitor chain to trace the class that is visited at a given point in this
- * chain. This may be useful for debugging purposes. <p> The trace printed when
- * visiting the <tt>Hello</tt> class is the following: <p> <blockquote>
- * 
- * <pre> // class version 49.0 (49) // access flags 0x21 public class Hello {
- * 
+ * chain. This may be useful for debugging purposes.
+ * <p>
+ * The trace printed when visiting the <tt>Hello</tt> class is the following:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * // class version 49.0 (49) // access flags 0x21 public class Hello {
+ *
  * // compiled from: Hello.java
- * 
+ *
  * // access flags 0x1 public <init> ()V ALOAD 0 INVOKESPECIAL
  * java/lang/Object <init> ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
- * 
+ *
  * // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
  * java/lang/System out Ljava/io/PrintStream; LDC "hello"
  * INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
- * MAXSTACK = 2 MAXLOCALS = 1 } </pre>
- * 
- * </blockquote> where <tt>Hello</tt> is defined by: <p> <blockquote>
- * 
- * <pre> public class Hello {
- * 
- * public static void main(String[] args) {
- * System.out.println("hello"); } } </pre>
- * 
+ * MAXSTACK = 2 MAXLOCALS = 1 }
+ * </pre>
+ *
+ * </blockquote> where <tt>Hello</tt> is defined by:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * public class Hello {
+ *
+ *     public static void main(String[] args) {
+ *         System.out.println("hello");
+ *     }
+ * }
+ * </pre>
+ *
  * </blockquote>
- * 
+ *
  * @author Eric Bruneton
  * @author Eugene Kuleshov
  */
@@ -83,8 +94,9 @@ public final class TraceClassVisitor extends ClassVisitor {
 
     /**
      * Constructs a new {@link TraceClassVisitor}.
-     * 
-     * @param pw the print writer to be used to print the class.
+     *
+     * @param pw
+     *            the print writer to be used to print the class.
      */
     public TraceClassVisitor(final PrintWriter pw) {
         this(null, pw);
@@ -92,10 +104,12 @@ public final class TraceClassVisitor extends ClassVisitor {
 
     /**
      * Constructs a new {@link TraceClassVisitor}.
-     * 
-     * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
-     *        May be <tt>null</tt>.
-     * @param pw the print writer to be used to print the class.
+     *
+     * @param cv
+     *            the {@link ClassVisitor} to which this visitor delegates
+     *            calls. May be <tt>null</tt>.
+     * @param pw
+     *            the print writer to be used to print the class.
      */
     public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) {
         this(cv, new Textifier(), pw);
@@ -103,33 +117,28 @@ public final class TraceClassVisitor extends ClassVisitor {
 
     /**
      * Constructs a new {@link TraceClassVisitor}.
-     * 
-     * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
-     *        May be <tt>null</tt>.
-     * @param p the object that actually converts visit events into text.
-     * @param pw the print writer to be used to print the class. May be null if
-     *        you simply want to use the result via
-     *        {@link Printer#getText()}, instead of printing it.
+     *
+     * @param cv
+     *            the {@link ClassVisitor} to which this visitor delegates
+     *            calls. May be <tt>null</tt>.
+     * @param p
+     *            the object that actually converts visit events into text.
+     * @param pw
+     *            the print writer to be used to print the class. May be null if
+     *            you simply want to use the result via
+     *            {@link Printer#getText()}, instead of printing it.
      */
-    public TraceClassVisitor(
-        final ClassVisitor cv,
-        final Printer p,
-        final PrintWriter pw)
-    {
+    public TraceClassVisitor(final ClassVisitor cv, final Printer p,
+            final PrintWriter pw) {
         super(Opcodes.ASM4, cv);
         this.pw = pw;
         this.p = p;
     }
 
     @Override
-    public void visit(
-        final int version,
-        final int access,
-        final String name,
-        final String signature,
-        final String superName,
-        final String[] interfaces)
-    {
+    public void visit(final int version, final int access, final String name,
+            final String signature, final String superName,
+            final String[] interfaces) {
         p.visit(version, access, name, signature, superName, interfaces);
         super.visit(version, access, name, signature, superName, interfaces);
     }
@@ -141,20 +150,15 @@ public final class TraceClassVisitor extends ClassVisitor {
     }
 
     @Override
-    public void visitOuterClass(
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitOuterClass(final String owner, final String name,
+            final String desc) {
         p.visitOuterClass(owner, name, desc);
         super.visitOuterClass(owner, name, desc);
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         Printer p = this.p.visitClassAnnotation(desc, visible);
         AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc,
                 visible);
@@ -168,55 +172,28 @@ public final class TraceClassVisitor extends ClassVisitor {
     }
 
     @Override
-    public void visitInnerClass(
-        final String name,
-        final String outerName,
-        final String innerName,
-        final int access)
-    {
+    public void visitInnerClass(final String name, final String outerName,
+            final String innerName, final int access) {
         p.visitInnerClass(name, outerName, innerName, access);
         super.visitInnerClass(name, outerName, innerName, access);
     }
 
     @Override
-    public FieldVisitor visitField(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final Object value)
-    {
-        Printer p = this.p.visitField(access,
-                name,
-                desc,
-                signature,
-                value);
-        FieldVisitor fv = cv == null ? null : cv.visitField(access,
-                name,
-                desc,
-                signature,
-                value);
+    public FieldVisitor visitField(final int access, final String name,
+            final String desc, final String signature, final Object value) {
+        Printer p = this.p.visitField(access, name, desc, signature, value);
+        FieldVisitor fv = cv == null ? null : cv.visitField(access, name, desc,
+                signature, value);
         return new TraceFieldVisitor(fv, p);
     }
 
     @Override
-    public MethodVisitor visitMethod(
-        final int access,
-        final String name,
-        final String desc,
-        final String signature,
-        final String[] exceptions)
-    {
-        Printer p = this.p.visitMethod(access,
-                name,
-                desc,
-                signature,
-                exceptions);
-        MethodVisitor mv = cv == null ? null : cv.visitMethod(access,
-                name,
-                desc,
-                signature,
+    public MethodVisitor visitMethod(final int access, final String name,
+            final String desc, final String signature, final String[] exceptions) {
+        Printer p = this.p.visitMethod(access, name, desc, signature,
                 exceptions);
+        MethodVisitor mv = cv == null ? null : cv.visitMethod(access, name,
+                desc, signature, exceptions);
         return new TraceMethodVisitor(mv, p);
     }
 
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
index f537e83..9547a70 100644
--- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -37,7 +37,7 @@ import scala.tools.asm.Opcodes;
 /**
  * A {@link FieldVisitor} that prints the fields it visits with a
  * {@link Printer}.
- * 
+ *
  * @author Eric Bruneton
  */
 public final class TraceFieldVisitor extends FieldVisitor {
@@ -52,12 +52,10 @@ public final class TraceFieldVisitor extends FieldVisitor {
         super(Opcodes.ASM4, fv);
         this.p = p;
     }
-        
+
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         Printer p = this.p.visitFieldAnnotation(desc, visible);
         AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc,
                 visible);
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
index 9aabf20..9034567 100644
--- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -56,10 +56,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitAnnotation(
-        final String desc,
-        final boolean visible)
-    {
+    public AnnotationVisitor visitAnnotation(final String desc,
+            final boolean visible) {
         Printer p = this.p.visitMethodAnnotation(desc, visible);
         AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc,
                 visible);
@@ -80,17 +78,11 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public AnnotationVisitor visitParameterAnnotation(
-        final int parameter,
-        final String desc,
-        final boolean visible)
-    {
-        Printer p = this.p.visitParameterAnnotation(parameter,
-                desc,
-                visible);
-        AnnotationVisitor av = mv == null
-                ? null
-                : mv.visitParameterAnnotation(parameter, desc, visible);
+    public AnnotationVisitor visitParameterAnnotation(final int parameter,
+            final String desc, final boolean visible) {
+        Printer p = this.p.visitParameterAnnotation(parameter, desc, visible);
+        AnnotationVisitor av = mv == null ? null : mv.visitParameterAnnotation(
+                parameter, desc, visible);
         return new TraceAnnotationVisitor(av, p);
     }
 
@@ -101,13 +93,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public void visitFrame(
-        final int type,
-        final int nLocal,
-        final Object[] local,
-        final int nStack,
-        final Object[] stack)
-    {
+    public void visitFrame(final int type, final int nLocal,
+            final Object[] local, final int nStack, final Object[] stack) {
         p.visitFrame(type, nLocal, local, nStack, stack);
         super.visitFrame(type, nLocal, local, nStack, stack);
     }
@@ -137,34 +124,22 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public void visitFieldInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitFieldInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         p.visitFieldInsn(opcode, owner, name, desc);
         super.visitFieldInsn(opcode, owner, name, desc);
     }
 
     @Override
-    public void visitMethodInsn(
-        final int opcode,
-        final String owner,
-        final String name,
-        final String desc)
-    {
+    public void visitMethodInsn(final int opcode, final String owner,
+            final String name, final String desc) {
         p.visitMethodInsn(opcode, owner, name, desc);
         super.visitMethodInsn(opcode, owner, name, desc);
     }
 
     @Override
-    public void visitInvokeDynamicInsn(
-        String name,
-        String desc,
-        Handle bsm,
-        Object... bsmArgs)
-    {
+    public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+            Object... bsmArgs) {
         p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
         super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
     }
@@ -194,22 +169,15 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public void visitTableSwitchInsn(
-        final int min,
-        final int max,
-        final Label dflt,
-        final Label... labels)
-    {
+    public void visitTableSwitchInsn(final int min, final int max,
+            final Label dflt, final Label... labels) {
         p.visitTableSwitchInsn(min, max, dflt, labels);
         super.visitTableSwitchInsn(min, max, dflt, labels);
     }
 
     @Override
-    public void visitLookupSwitchInsn(
-        final Label dflt,
-        final int[] keys,
-        final Label[] labels)
-    {
+    public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+            final Label[] labels) {
         p.visitLookupSwitchInsn(dflt, keys, labels);
         super.visitLookupSwitchInsn(dflt, keys, labels);
     }
@@ -221,25 +189,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
     }
 
     @Override
-    public void visitTryCatchBlock(
-        final Label start,
-        final Label end,
-        final Label handler,
-        final String type)
-    {
+    public void visitTryCatchBlock(final Label start, final Label end,
+            final Label handler, final String type) {
         p.visitTryCatchBlock(start, end, handler, type);
         super.visitTryCatchBlock(start, end, handler, type);
     }
 
     @Override
-    public void visitLocalVariable(
-        final String name,
-        final String desc,
-        final String signature,
-        final Label start,
-        final Label end,
-        final int index)
-    {
+    public void visitLocalVariable(final String name, final String desc,
+            final String signature, final Label start, final Label end,
+            final int index) {
         p.visitLocalVariable(name, desc, signature, start, end, index);
         super.visitLocalVariable(name, desc, signature, start, end, index);
     }
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
index a37b759..1e23c7e 100644
--- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -117,8 +117,7 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
 
     @Override
     public SignatureVisitor visitInterface() {
-        separator = seenInterface ? ", " : isInterface
-                ? " extends "
+        separator = seenInterface ? ", " : isInterface ? " extends "
                 : " implements ";
         seenInterface = true;
         startType();
@@ -165,34 +164,34 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
     @Override
     public void visitBaseType(final char descriptor) {
         switch (descriptor) {
-            case 'V':
-                declaration.append("void");
-                break;
-            case 'B':
-                declaration.append("byte");
-                break;
-            case 'J':
-                declaration.append("long");
-                break;
-            case 'Z':
-                declaration.append("boolean");
-                break;
-            case 'I':
-                declaration.append("int");
-                break;
-            case 'S':
-                declaration.append("short");
-                break;
-            case 'C':
-                declaration.append("char");
-                break;
-            case 'F':
-                declaration.append("float");
-                break;
-            // case 'D':
-            default:
-                declaration.append("double");
-                break;
+        case 'V':
+            declaration.append("void");
+            break;
+        case 'B':
+            declaration.append("byte");
+            break;
+        case 'J':
+            declaration.append("long");
+            break;
+        case 'Z':
+            declaration.append("boolean");
+            break;
+        case 'I':
+            declaration.append("int");
+            break;
+        case 'S':
+            declaration.append("short");
+            break;
+        case 'C':
+            declaration.append("char");
+            break;
+        case 'F':
+            declaration.append("float");
+            break;
+        // case 'D':
+        default:
+            declaration.append("double");
+            break;
         }
         endType();
     }
diff --git a/src/build/bnd/continuations.bnd b/src/build/bnd/continuations.bnd
deleted file mode 100644
index 748502f..0000000
--- a/src/build/bnd/continuations.bnd
+++ /dev/null
@@ -1,5 +0,0 @@
-Bundle-Name: Scala Continuations Plugin
-Bundle-SymbolicName: org.scala-lang.plugins.continuations
-ver: @VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-compiler-doc.bnd b/src/build/bnd/scala-compiler-doc.bnd
new file mode 100644
index 0000000..4910e5f
--- /dev/null
+++ b/src/build/bnd/scala-compiler-doc.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Documentation Generator
+Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-doc_ at SCALA_BINARY_VERSION@
+ver: @SCALA_COMPILER_DOC_VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: *
diff --git a/src/build/bnd/scala-compiler-interactive.bnd b/src/build/bnd/scala-compiler-interactive.bnd
new file mode 100644
index 0000000..34d2f29
--- /dev/null
+++ b/src/build/bnd/scala-compiler-interactive.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Interactive Compiler
+Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-interactive_ at SCALA_BINARY_VERSION@
+ver: @SCALA_COMPILER_INTERACTIVE_VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: *
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
index c289843..dc30513 100644
--- a/src/build/bnd/scala-compiler.bnd
+++ b/src/build/bnd/scala-compiler.bnd
@@ -3,6 +3,6 @@ Bundle-SymbolicName: org.scala-lang.scala-compiler
 ver: @VERSION@
 Bundle-Version: ${ver}
 Export-Package: *;version=${ver}
-Import-Package: scala.tools.jline.*;resolution:=optional, \
+Import-Package: jline.*;resolution:=optional, \
                 org.apache.tools.ant.*;resolution:=optional, \
                 *
diff --git a/src/build/bnd/scala-continuations-library.bnd b/src/build/bnd/scala-continuations-library.bnd
new file mode 100644
index 0000000..bb505b6
--- /dev/null
+++ b/src/build/bnd/scala-continuations-library.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Delimited Continuations Library
+Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-library
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-continuations-plugin.bnd b/src/build/bnd/scala-continuations-plugin.bnd
new file mode 100644
index 0000000..cd66614
--- /dev/null
+++ b/src/build/bnd/scala-continuations-plugin.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Delimited Continuations Compiler Plugin
+Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-plugin
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd
new file mode 100644
index 0000000..6ffc3b2
--- /dev/null
+++ b/src/build/bnd/scala-parser-combinators.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Parser Combinators Library
+Bundle-SymbolicName: org.scala-lang.modules.scala-parser-combinators
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd
index eeacb9b..7cccb13 100644
--- a/src/build/bnd/scala-swing.bnd
+++ b/src/build/bnd/scala-swing.bnd
@@ -1,5 +1,5 @@
 Bundle-Name: Scala Swing
-Bundle-SymbolicName: org.scala-lang.scala-swing
+Bundle-SymbolicName: org.scala-lang.modules.scala-swing
 ver: @VERSION@
 Bundle-Version: ${ver}
 Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd
new file mode 100644
index 0000000..5d64c05
--- /dev/null
+++ b/src/build/bnd/scala-xml.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala XML Library
+Bundle-SymbolicName: org.scala-lang.modules.scala-xml
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
new file mode 100644
index 0000000..d1d4c12
--- /dev/null
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -0,0 +1,47 @@
+// use this script to generate dbuild-meta.json
+// make sure the version is specified correctly,
+// update the dependency structure and
+// check out distributed-build and run `sbt console`:
+// TODO: also generate build.xml and eclipse config from a similar data-structure
+
+import distributed.project.model._
+
+val meta =
+  ExtractedBuildMeta("2.11.0", Seq(
+    Project("scala-library", "org.scala-lang",
+      Seq(ProjectRef("scala-library", "org.scala-lang")),
+      Seq.empty), // TODO: forkjoin
+    Project("scala-reflect", "org.scala-lang",
+      Seq(ProjectRef("scala-reflect", "org.scala-lang")),
+      Seq(ProjectRef("scala-library", "org.scala-lang"))),
+    Project("scala-compiler", "org.scala-lang",
+      Seq(ProjectRef("scala-compiler", "org.scala-lang")),
+      Seq(ProjectRef("scala-reflect", "org.scala-lang"),
+          ProjectRef("scala-xml", "org.scala-lang.modules"),
+          ProjectRef("scala-parser-combinators", "org.scala-lang.modules")
+          // asm
+         )),
+
+    // Project("scala-repl", "org.scala-lang",
+    //   Seq(ProjectRef("scala-repl", "org.scala-lang")),
+    //   Seq(ProjectRef("scala-compiler", "org.scala-lang"))), // jline
+
+    // Project("scala-interactive", "org.scala-lang",
+    //   Seq(ProjectRef("scala-interactive", "org.scala-lang")),
+    //   Seq(ProjectRef("scala-compiler", "org.scala-lang"), ProjectRef("scaladoc", "org.scala-lang"))),
+
+    Project("scala-actors", "org.scala-lang",
+      Seq(ProjectRef("scala-actors", "org.scala-lang")),
+      Seq(ProjectRef("scala-library", "org.scala-lang"))),
+
+    // Project("scaladoc", "org.scala-lang",
+    //   Seq(ProjectRef("scaladoc", "org.scala-lang")),
+    //   Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
+
+    Project("scalap", "org.scala-lang",
+      Seq(ProjectRef("scalap", "org.scala-lang")),
+      Seq(ProjectRef("scala-compiler", "org.scala-lang")))
+
+  ))
+
+println(Utils.writeValue(meta))
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index b9511c1..ed436fe2 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -63,7 +63,7 @@ object genprod extends App {
     def header = """
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -113,8 +113,8 @@ object FunctionZero extends Function(0) {
 
 object FunctionOne extends Function(1) {
   override def classAnnotation    = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n"
-  override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) "
-  override def covariantSpecs     = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) "
+  override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
+  override def covariantSpecs     = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
 
   override def descriptiveComment = "  " + functionNTemplate.format("succ", "anonfun1",
 """
@@ -319,6 +319,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity {
  *  @constructor  Create a new tuple with {i} elements.{idiomatic}
 {params}
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class {className}{covariantArgs}({fields})
   extends {Product.className(i)}{invariantArgs}
 {{
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
deleted file mode 100644
index 9abb0a3..0000000
--- a/src/build/maven/continuations-plugin-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang.plugins</groupId>
-	<artifactId>continuations</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
-  <name>Scala Continuations Plugin</name>
-  <description>Delimited continuations compilation for Scala</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2010</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-compiler</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
-  <developers>
-    <developer>
-      <id>lamp</id>
-      <name>EPFL LAMP</name>
-    </developer>
-    <developer>
-      <id>Typesafe</id>
-      <name>Typesafe, Inc.</name>
-    </developer>
-  </developers>
-</project>
diff --git a/src/build/maven/jline-pom.xml b/src/build/maven/jline-pom.xml
deleted file mode 100644
index 0d6e801..0000000
--- a/src/build/maven/jline-pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>jline</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
-  <name>jline</name>
-  <description>Like readline, but better</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2011</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-    <license>
-      <name>The BSD License</name>
-      <url>http://www.opensource.org/licenses/bsd-license.php</url>
-      <distribution>repo</distribution>
-    </license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
-
-	<dependencies>
-    <dependency>
-        <groupId>org.fusesource.jansi</groupId>
-        <artifactId>jansi</artifactId>
-        <version>1.4</version>
-        <!--<scope>provided</scope>-->
-    </dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
-  <developers>
-    <developer>
-      <id>lamp</id>
-      <name>EPFL LAMP</name>
-    </developer>
-    <developer>
-      <id>Typesafe</id>
-      <name>Typesafe, Inc.</name>
-    </developer>
-  </developers>
-</project>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
deleted file mode 100644
index 8da1d76..0000000
--- a/src/build/maven/maven-deploy.xml
+++ /dev/null
@@ -1,295 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus-maven-deploy" xmlns:artifact="urn:maven-artifact-ant">
-
-  <description>
-    SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY
-  </description>
-  <target name="init.properties">
-    <!-- Pull in properties from build -->
-    <property file="build.properties" />
-    <!-- Initialize specific properties -->
-    <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
-    <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
-
-    <property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
-    <property name="local.release.repository" value="${user.home}/.m2/repository" />
-    <property name="repository.credentials.id" value="sonatype-nexus" />
-    <property name="settings.file" value="${user.home}/.m2/settings.xml" />
-    <condition property="version.is.snapshot">
-      <contains string="${maven.version.number}" substring="-SNAPSHOT"/>
-    </condition>
-    
-    <echo>Using server[${repository.credentials.id}] for maven repository credentials.
-       Please make sure that your ~/.m2/settings.xml has the needed username/password for this server id
-     </echo>
-
-    
-  </target>
-
-  <target name="init.maven" depends="init.properties">
-
-    <!-- Add our maven ant tasks -->
-    <path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
-    <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
-
-    <!-- simplify fixing pom versions -->
-    <macrodef name="make-pom">
-      <attribute name="name" />
-      <attribute name="version" />
-      <sequential>
-        <copy file="@{name}/@{name}-pom.xml" tofile="@{name}/@{name}-pom-fixed.xml" overwrite="true">
-          <filterset>
-            <filter token="VERSION" value="@{version}" />
-            <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
-            <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
-          </filterset>
-        </copy>
-        <artifact:pom id="@{name}.pom" file="@{name}/@{name}-pom-fixed.xml" />
-      </sequential>
-    </macrodef>
-
-    <macrodef name="make-pom-plugin">
-      <attribute name="name" />
-      <attribute name="version" />
-      <sequential>
-        <copy file="plugins/@{name}/@{name}-plugin-pom.xml" tofile="plugins/@{name}/@{name}-pom-fixed.xml" overwrite="true">
-          <filterset>
-            <filter token="VERSION" value="@{version}" />
-            <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
-            <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
-          </filterset>
-        </copy>
-        <artifact:pom id="plugin-@{name}.pom" file="plugins/@{name}/@{name}-pom-fixed.xml" />
-      </sequential>
-    </macrodef>
-  </target>
-  <!-- macros for local deployment -->
-  <target name="deploy.local.init" depends="init.maven">
-    <!-- Deploy single artifact locally -->
-    <macrodef name="deploy-local">
-      <attribute name="name" />
-      <attribute name="version" />
-      <attribute name="repository" />
-      <element name="extra-attachments" optional="yes" />
-      <sequential>
-        <make-pom name="@{name}" version="@{version}" />
-        <artifact:install file="@{name}/@{name}.jar">
-          <artifact:pom refid="@{name}.pom" />
-          <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
-          <artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
-          <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
-          <extra-attachments />
-        </artifact:install>
-      </sequential>
-    </macrodef>
-
-    <!-- Deploy compiler plugins -->
-    <macrodef name="deploy-local-plugin">
-        <attribute name="name" />
-        <attribute name="version" />
-        <attribute name="repository" />
-        <element name="extra-attachments" optional="yes" />
-        <sequential>
-          <make-pom-plugin name="@{name}" version="@{version}" />
-          <artifact:install file="plugins/@{name}/@{name}.jar">
-            <artifact:pom refid="plugin-@{name}.pom" />
-            <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
-            <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
-            <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
-            <extra-attachments />
-          </artifact:install>
-        </sequential>
-      </macrodef>
-
-
-    <!-- Deploy all artifacts locally -->
-    <macrodef name="deploy-local-all">
-      <attribute name="repository" />
-      <attribute name="version" />
-      <sequential>
-        <deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
-        <deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
-        <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
-        <deploy-local name="scala-reflect" version="@{version}" repository="@{repository}" />
-        <deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
-        <deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
-        <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
-        <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
-        <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
-      </sequential>
-    </macrodef>
-  </target>
-
-  <!-- macros for remote deployment -->
-  <target name="deploy.remote.init" depends="init.maven">
-    <!-- Deploy single artifact locally -->
-    <macrodef name="deploy-remote">
-      <attribute name="name" />
-      <attribute name="repository" />
-      <attribute name="version" />
-      <element name="extra-attachments" optional="yes" />
-      <sequential>
-        <make-pom name="@{name}" version="@{version}" />
-        <artifact:deploy file="@{name}/@{name}.jar" settingsFile="${settings.file}">
-          <artifact:pom refid="@{name}.pom" />
-          <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
-          <artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
-          <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
-          <extra-attachments />
-        </artifact:deploy>
-      </sequential>
-    </macrodef>
-
-    <!-- Deploy compiler plugins -->
-    <macrodef name="deploy-remote-plugin">
-        <attribute name="name" />
-        <attribute name="version" />
-        <attribute name="repository" />
-        <element name="extra-attachments" optional="yes" />
-        <sequential>
-          <make-pom-plugin name="@{name}" version="@{version}" />
-          <artifact:deploy file="plugins/@{name}/@{name}.jar" settingsFile="${settings.file}">
-            <artifact:pom refid="plugin-@{name}.pom" />
-            <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
-            <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
-            <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
-            <extra-attachments />
-          </artifact:deploy>
-        </sequential>
-      </macrodef>
-
-    <!-- Deploy all artifacts locally -->
-    <macrodef name="deploy-remote-all">
-      <attribute name="repository" />
-      <attribute name="version" />
-      <sequential>
-        <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
-        <deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/> 
-      </sequential>
-    </macrodef>
-
-    <!-- IDE needs swing/actors/continuations -->
-    <macrodef name="deploy-remote-core">
-      <attribute name="repository" />
-      <attribute name="version" />
-      <sequential>
-        <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
-        <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
-        <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
-      </sequential>
-    </macrodef>
-
-    <!-- PGP Signed deployment -->
-    <macrodef name="deploy-remote-signed-single">
-      <attribute name="pom" />
-      <attribute name="repository" />
-      <attribute name="jar" />
-      <attribute name="srcjar" />
-      <attribute name="docjar" />
-      <sequential>
-        <artifact:mvn>
-          <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
-          <arg value="-Durl=@{repository}" />
-          <arg value="-DrepositoryId=${repository.credentials.id}" />
-          <arg value="-DpomFile=@{pom}" />
-          <arg value="-Dfile=@{jar}" />
-          <arg value="-Dsources=@{srcjar}" />
-          <arg value="-Djavadoc=@{docjar}" />
-          <arg value="-Pgpg" />
-          <arg value="-Dgpg.useagent=true" />
-        </artifact:mvn>
-      </sequential>
-    </macrodef>
-    <macrodef name="deploy-remote-signed">
-      <attribute name="name" />
-      <attribute name="repository" />
-      <attribute name="version" />
-      <element name="extra-attachments" optional="yes" />
-      <sequential>
-        <make-pom name="@{name}" version="@{version}" />
-        <deploy-remote-signed-single
-           pom="@{name}/@{name}-pom-fixed.xml"
-           repository="@{repository}"
-           jar="@{name}/@{name}.jar"
-           srcjar="@{name}/@{name}-src.jar"
-           docjar="@{name}/@{name}-docs.jar" />
-      </sequential>
-    </macrodef>
-    <macrodef name="deploy-remote-plugin-signed">
-      <attribute name="name" />
-      <attribute name="repository" />
-      <attribute name="version" />
-      <element name="extra-attachments" optional="yes" />
-      <sequential>
-        <make-pom-plugin name="@{name}" version="@{version}" />
-        <deploy-remote-signed-single
-           pom="plugins/@{name}/@{name}-pom-fixed.xml"
-           repository="@{repository}"
-           jar="plugins/@{name}/@{name}.jar"
-           srcjar="plugins/@{name}/@{name}-src.jar"
-           docjar="plugins/@{name}/@{name}-docs.jar" />
-      </sequential>
-    </macrodef>
-    <macrodef name="deploy-remote-signed-all">
-      <attribute name="repository" />
-      <attribute name="version" />
-      <sequential>
-        <deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/> 
-        <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="scala-reflect" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
-        <deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
-        <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
-      </sequential>
-    </macrodef>
-  </target>
-
-  <!-- Local Targets -->
-  <target name="deploy.snapshot.local" depends="deploy.local.init" if="version.is.snapshot" description="Deploys the bundled snapshot of the Scala Lanaguage to a local maven repository">
-    <deploy-local-all version="${maven.version.number}" repository="${local.snapshot.repository}" />
-  </target>
-
-  <target name="deploy.release.local" depends="deploy.local.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the local Maven repository">
-    <deploy-local-all version="${maven.version.number}" repository="${local.release.repository}" />
-  </target>
-  <target name="deploy.local" depends="deploy.snapshot.local, deploy.release.local" description="Deploys the bundle files to the local maven repo."/>
-
-  <!-- Remote Signed Targets -->
-  <target name="deploy.signed.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
-      <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
-  </target>
-
-  <target name="deploy.signed.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
-    <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.release.repository}" />
-  </target>
-  <target name="deploy.signed" depends="deploy.signed.release, deploy.signed.snapshot" description="Deploys signed bundles to remote repo"/>
-  <!-- Remote unsigned targets -->
-  <target name="deploy.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
-      <deploy-remote-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
-  </target>
-
-  <!-- for PR validation -->
-  <target name="deploy-core.snapshot" depends="deploy.remote.init">
-      <deploy-remote-core version="${maven.version.number}" repository="${remote.snapshot.repository}" />
-  </target>
-
-  <target name="deploy.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
-    <deploy-remote-all version="${maven.version.number}" repository="${remote.release.repository}" />
-  </target>
-  <target name="deploy" depends="deploy.snapshot, deploy.release" description="Deploys unsigned artifacts to the maven repo."/>
-</project>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
index 3d37ef8..a0ebcec 100644
--- a/src/build/maven/scala-actors-pom.xml
+++ b/src/build/maven/scala-actors-pom.xml
@@ -1,56 +1,43 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-actors</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-actors</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
   <name>Scala Actors library</name>
   <description>Deprecated Actors Library for Scala</description>
-  <url>http://www.scala-lang.org/</url>  
+  <url>http://www.scala-lang.org/</url>
   <inceptionYear>2006</inceptionYear>
   <organization>
-      <name>LAMP/EPFL</name>
-      <url>http://lamp.epfl.ch/</url>
-   </organization>
-   <licenses>
-      <license>
-         <name>BSD-like</name>
-         <url>http://www.scala-lang.org/downloads/license.html
-         </url>
-         <distribution>repo</distribution>
-      </license>
-   </licenses>
-   <scm>
-      <connection>scm:git:git://github.com/scala/scala.git</connection>
-      <url>https://github.com/scala/scala.git</url>
-   </scm>
-   <issueManagement>
-      <system>JIRA</system>
-      <url>https://issues.scala-lang.org/</url>
-   </issueManagement>	
-   <properties>
-       <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
-   </properties>
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-library</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <properties>
+    <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+  </dependencies>
   <developers>
     <developer>
       <id>lamp</id>
diff --git a/src/build/maven/scala-compiler-doc-pom.xml b/src/build/maven/scala-compiler-doc-pom.xml
new file mode 100644
index 0000000..8572e55
--- /dev/null
+++ b/src/build/maven/scala-compiler-doc-pom.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang.modules</groupId>
+  <artifactId>scala-compiler-doc_ at SCALA_BINARY_VERSION@</artifactId>
+  <packaging>jar</packaging>
+  <version>@SCALA_COMPILER_DOC_VERSION@</version>
+  <name>Scala Documentation Generator</name>
+  <description>Documentation generator for the Scala Programming Language</description>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-xml_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@XML_VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-parser-combinators_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@PARSER_COMBINATORS_VERSION@</version>
+    </dependency>
+  </dependencies>
+  <developers>
+    <developer>
+      <id>lamp</id>
+      <name>EPFL LAMP</name>
+    </developer>
+    <developer>
+      <id>Typesafe</id>
+      <name>Typesafe, Inc.</name>
+    </developer>
+  </developers>
+</project>
diff --git a/src/build/maven/scala-compiler-interactive-pom.xml b/src/build/maven/scala-compiler-interactive-pom.xml
new file mode 100644
index 0000000..ad8192b
--- /dev/null
+++ b/src/build/maven/scala-compiler-interactive-pom.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang.modules</groupId>
+  <artifactId>scala-compiler-interactive_ at SCALA_BINARY_VERSION@</artifactId>
+  <packaging>jar</packaging>
+  <version>@SCALA_COMPILER_INTERACTIVE_VERSION@</version>
+  <name>Scala Interactive Compiler</name>
+  <description>Interactive Compiler for the Scala Programming Language</description>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+  </dependencies>
+  <developers>
+    <developer>
+      <id>lamp</id>
+      <name>EPFL LAMP</name>
+    </developer>
+    <developer>
+      <id>Typesafe</id>
+      <name>Typesafe, Inc.</name>
+    </developer>
+  </developers>
+</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index fedc34a..8ca18f6 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -1,65 +1,62 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-compiler</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-compiler</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
   <name>Scala Compiler</name>
   <description>Compiler for the Scala Programming Language</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2002</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-library</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-reflect</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
     <dependency>
       <groupId>org.scala-lang</groupId>
-      <artifactId>jline</artifactId>
+      <artifactId>scala-library</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-reflect</artifactId>
       <version>@VERSION@</version>
+    </dependency>
+    <!-- TODO modularize compiler: these dependencies will disappear when the compiler is modularized -->
+    <dependency> <!-- for scala-compiler-doc -->
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-xml_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@XML_VERSION@</version>
+    </dependency>
+    <dependency> <!-- for scala-compiler-doc -->
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-parser-combinators_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@PARSER_COMBINATORS_VERSION@</version>
+    </dependency>
+    <dependency> <!-- for scala-compiler-repl; once it moves there, make it required -->
+      <groupId>jline</groupId>
+      <artifactId>jline</artifactId>
+      <version>@JLINE_VERSION@</version>
       <optional>true</optional>
     </dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
+  </dependencies>
   <developers>
     <developer>
       <id>lamp</id>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
new file mode 100644
index 0000000..22a24de
--- /dev/null
+++ b/src/build/maven/scala-dist-pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-dist</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
+  <name>Scala Distribution Artifacts</name>
+  <description>The Artifacts Distributed with Scala</description>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library-all</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.plugins</groupId>
+      <!-- plugins are fully cross-versioned. But, we don't publish with 2.11.0-SNAPSHOT, instead use full version of the last non-snapshot version -->
+      <artifactId>scala-continuations-plugin_ at SCALA_FULL_VERSION@</artifactId>
+      <version>@CONTINUATIONS_PLUGIN_VERSION@</version>
+    </dependency>
+    <!-- duplicated from scala-compiler, where it's optional,
+    so that resolving scala-dist's transitive dependencies does not include jline,
+    even though we need to include it in the dist, but macros depending on the compiler
+    shouldn't have to require jline...
+    another reason to modularize and move the dependency to scala-compiler-repl
+    TODO: remove duplication once we have the scala-compiler-repl module -->
+    <dependency>
+      <groupId>jline</groupId>
+      <artifactId>jline</artifactId>
+      <version>@JLINE_VERSION@</version>
+    </dependency>
+  </dependencies>
+  <developers>
+    <developer>
+      <id>lamp</id>
+      <name>EPFL LAMP</name>
+    </developer>
+    <developer>
+      <id>Typesafe</id>
+      <name>Typesafe, Inc.</name>
+    </developer>
+  </developers>
+</project>
diff --git a/src/build/maven/scala-dotnet-library-pom.xml b/src/build/maven/scala-dotnet-library-pom.xml
deleted file mode 100644
index 007e8be..0000000
--- a/src/build/maven/scala-dotnet-library-pom.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.scala-lang</groupId>
-  <artifactId>scala-dotnet-library</artifactId>
-  <version>@VERSION@</version>
-  <name>Class Library</name>
-  <packaging>dotnet:library</packaging>
-  
-   <url>http://www.scala-lang.org/</url>
-   <inceptionYear>2002</inceptionYear>
-   <organization>
-      <name>LAMP/EPFL</name>
-      <url>http://lamp.epfl.ch/</url>
-   </organization>
-   <licenses>
-      <license>
-         <name>BSD-like</name>
-         <url>http://www.scala-lang.org/downloads/license.html
-         </url>
-         <distribution>repo</distribution>
-      </license>
-   </licenses>
-   <scm>
-      <connection>scm:git:git://github.com/scala/scala.git</connection>
-      <url>https://github.com/scala/scala.git</url>
-   </scm>
-   <issueManagement>
-      <system>JIRA</system>
-      <url>https://issues.scala-lang.org/</url>
-   </issueManagement>
-   <distributionManagement>
-      <repository>
-         <id>scala-tools.org</id>
-         <url>@RELEASE_REPOSITORY@</url>
-      </repository>
-      <snapshotRepository>
-         <id>scala-tools.org</id>
-         <url>@SNAPSHOT_REPOSITORY@</url>
-         <uniqueVersion>false</uniqueVersion>
-      </snapshotRepository>
-   </distributionManagement>
-</project>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
new file mode 100644
index 0000000..3fcf207
--- /dev/null
+++ b/src/build/maven/scala-library-all-pom.xml
@@ -0,0 +1,88 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-library-all</artifactId>
+  <packaging>pom</packaging>
+  <version>@VERSION@</version>
+  <name>Scala Library Powerpack</name>
+  <description>The Scala Standard Library and Official Modules</description>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-reflect</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-xml_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@XML_VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-parser-combinators_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@PARSER_COMBINATORS_VERSION@</version>
+    </dependency>
+    <!--
+         the continuations plugin is a dependency of scala-dist, as scala-library-all should be
+         a drop-in replacement for scala-library, and as such should not (indirectly)
+         depend on plugins/the compiler.
+    -->
+    <dependency>
+      <groupId>org.scala-lang.plugins</groupId>
+      <artifactId>scala-continuations-library_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@CONTINUATIONS_LIBRARY_VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang.modules</groupId>
+      <artifactId>scala-swing_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@SCALA_SWING_VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>com.typesafe.akka</groupId>
+      <artifactId>akka-actor_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@AKKA_ACTOR_VERSION@</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-actors-migration_ at SCALA_BINARY_VERSION@</artifactId>
+      <version>@ACTORS_MIGRATION_VERSION@</version>
+    </dependency>
+  </dependencies>
+  <developers>
+    <developer>
+      <id>lamp</id>
+      <name>EPFL LAMP</name>
+    </developer>
+    <developer>
+      <id>Typesafe</id>
+      <name>Typesafe, Inc.</name>
+    </developer>
+  </developers>
+</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index fc9964a..78fc05a 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -1,56 +1,38 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-library</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-library</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
   <name>Scala Library</name>
   <description>Standard library for the Scala Programming Language</description>
-	<url>http://www.scala-lang.org/</url>
-   <inceptionYear>2002</inceptionYear>
-   <organization>
-      <name>LAMP/EPFL</name>
-      <url>http://lamp.epfl.ch/</url>
-   </organization>
-   <licenses>
-      <license>
-         <name>BSD-like</name>
-         <url>http://www.scala-lang.org/downloads/license.html
-         </url>
-         <distribution>repo</distribution>
-      </license>
-   </licenses>
-   <scm>
-      <connection>scm:git:git://github.com/scala/scala.git</connection>
-      <url>https://github.com/scala/scala.git</url>
-   </scm>
-   <issueManagement>
-      <system>JIRA</system>
-      <url>https://issues.scala-lang.org/</url>
-   </issueManagement>
-   <properties>
-       <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
-   </properties>
-   <dependencies>
-      <!--<dependency>
-         <groupId>com.typesafe</groupId>
-         <artifactId>config</artifactId>
-         <version>0.4.0</version>
-     </dependency>-->
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <properties>
+    <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+  </properties>
+  <dependencies>
    </dependencies>
-   <distributionManagement>
-      <repository>
-         <id>scala-tools.org</id>
-         <url>@RELEASE_REPOSITORY@</url>
-      </repository>
-      <snapshotRepository>
-         <id>scala-tools.org</id>
-         <url>@SNAPSHOT_REPOSITORY@</url>
-         <uniqueVersion>false</uniqueVersion>
-      </snapshotRepository>
-  </distributionManagement>
   <developers>
     <developer>
       <id>lamp</id>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
deleted file mode 100644
index ac05f24..0000000
--- a/src/build/maven/scala-partest-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-partest</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
-  <name>Parallel Test Framework</name>
-  <description>testing framework for the Scala compiler.</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2002</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-compiler</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
-  <developers>
-    <developer>
-      <id>lamp</id>
-      <name>EPFL LAMP</name>
-    </developer>
-    <developer>
-      <id>Typesafe</id>
-      <name>Typesafe, Inc.</name>
-    </developer>
-  </developers>
-</project>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
index 56d2ffc..c21caef 100644
--- a/src/build/maven/scala-reflect-pom.xml
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -1,56 +1,43 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-reflect</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scala-reflect</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
   <name>Scala Compiler</name>
   <description>Compiler for the Scala Programming Language</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2002</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
   <properties>
-      <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+    <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
   </properties>
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-library</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+  </dependencies>
   <developers>
     <developer>
       <id>lamp</id>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
deleted file mode 100644
index 5099fe1..0000000
--- a/src/build/maven/scala-swing-pom.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scala-swing</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
-  <name>Scala Swing library</name>
-  <description>Swing for Scala</description>
-	<url>http://www.scala-lang.org/</url>
-   <inceptionYear>2002</inceptionYear>
-   <organization>
-      <name>LAMP/EPFL</name>
-      <url>http://lamp.epfl.ch/</url>
-   </organization>
-   <licenses>
-      <license>
-         <name>BSD-like</name>
-         <url>http://www.scala-lang.org/downloads/license.html
-         </url>
-         <distribution>repo</distribution>
-      </license>
-   </licenses>
-   <scm>
-      <connection>scm:git:git://github.com/scala/scala.git</connection>
-      <url>https://github.com/scala/scala.git</url>
-   </scm>
-   <issueManagement>
-      <system>JIRA</system>
-      <url>https://issues.scala-lang.org/</url>
-   </issueManagement>	
-   <properties>
-       <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
-   </properties>
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-library</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
-  <developers>
-    <developer>
-      <id>lamp</id>
-      <name>EPFL LAMP</name>
-    </developer>
-    <developer>
-      <id>Typesafe</id>
-      <name>Typesafe, Inc.</name>
-    </developer>
-  </developers>
-</project>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index 50c08e8..236ac99 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -1,54 +1,40 @@
-<project
-        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.scala-lang</groupId>
-	<artifactId>scalap</artifactId>
-	<packaging>jar</packaging>
-	<version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.scala-lang</groupId>
+  <artifactId>scalap</artifactId>
+  <packaging>jar</packaging>
+  <version>@VERSION@</version>
   <name>Scalap</name>
   <description>bytecode analysis tool</description>
-	<url>http://www.scala-lang.org/</url>
-	<inceptionYear>2002</inceptionYear>
-	<organization>
-		<name>LAMP/EPFL</name>
-		<url>http://lamp.epfl.ch/</url>
-	</organization>
-	<licenses>
-		<license>
-			<name>BSD-like</name>
-			<url>http://www.scala-lang.org/downloads/license.html
-			</url>
-			<distribution>repo</distribution>
-		</license>
-	</licenses>
-	<scm>
-                <connection>scm:git:git://github.com/scala/scala.git</connection>
-                <url>https://github.com/scala/scala.git</url>
-	</scm>
-	<issueManagement>
-                <system>JIRA</system>
-                <url>https://issues.scala-lang.org/</url>
-	</issueManagement>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.scala-lang</groupId>
-			<artifactId>scala-compiler</artifactId>
-			<version>@VERSION@</version>
-		</dependency>
-	</dependencies>
-	<distributionManagement>
-		<repository>
-			<id>scala-tools.org</id>
-			<url>@RELEASE_REPOSITORY@</url>
-		</repository>
-		<snapshotRepository>
-			<id>scala-tools.org</id>
-			<url>@SNAPSHOT_REPOSITORY@</url>
-			<uniqueVersion>false</uniqueVersion>
-		</snapshotRepository>
-	</distributionManagement>
+  <url>http://www.scala-lang.org/</url>
+  <inceptionYear>2002</inceptionYear>
+  <organization>
+    <name>LAMP/EPFL</name>
+    <url>http://lamp.epfl.ch/</url>
+  </organization>
+  <licenses>
+    <license>
+      <name>BSD 3-Clause</name>
+      <url>http://www.scala-lang.org/license.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git:git://github.com/scala/scala.git</connection>
+    <url>https://github.com/scala/scala.git</url>
+  </scm>
+  <issueManagement>
+    <system>JIRA</system>
+    <url>https://issues.scala-lang.org/</url>
+  </issueManagement>
+  <dependencies>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+      <version>@VERSION@</version>
+    </dependency>
+  </dependencies>
   <developers>
     <developer>
       <id>lamp</id>
diff --git a/src/build/pack.xml b/src/build/pack.xml
deleted file mode 100644
index 8aedd3f..0000000
--- a/src/build/pack.xml
+++ /dev/null
@@ -1,274 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus-pack">
-
-  <description>
-    SuperSabbus extension for packaging a distribution. THIS FILE IS NOT STAND-ALONE AND SHOULD ONLY BE USED THROUGH ENTRY POINTS IN SUPERSABBUS.
-  </description>
-  
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-  
-  <property file="${basedir}/build.number.maven"/>
-  <!-- the maven stuff requires version.major, version.minor and version.patch properties.
-       the "get-scala-revision" script only returns "version.number" -->
-  
-<!-- ===========================================================================
-MAIN DISTRIBUTION PACKAGING
-============================================================================ -->
-  
-  <target name="pack-archives.start">
-    <mkdir dir="${dists.dir}/archives"/>
-  </target>
-  
-  <target name="pack-archives.tar" depends="pack-archives.start">
-    <tar destfile="${dists.dir}/archives/${dist.name}.tar"
-         compression="none" longfile="gnu">
-      <tarfileset dir="${dist.dir}" prefix="${dist.name}" includes="bin/**" mode="755"/>
-      <tarfileset dir="${dist.dir}" prefix="${dist.name}" excludes="bin/**"/>
-    </tar>
-    <gzip src="${dists.dir}/archives/${dist.name}.tar" destfile="${dists.dir}/archives/${dist.name}.tgz"/>
-    <if>
-      <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
-      <then>
-        <exec executable="xz" failifexecutionfails="false">
-          <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}.tar"/>
-        </exec>
-        <move file="${dists.dir}/archives/${dist.name}.tar.xz" tofile="${dists.dir}/archives/${dist.name}.txz" failonerror="false"/>
-      </then>
-    </if>
-    <delete file="${dists.dir}/archives/${dist.name}.tar" />
-    <checksum fileext=".md5">
-      <fileset dir="${dists.dir}/archives">
-        <include name="${dist.name}.t?z"/>
-      </fileset>
-    </checksum>
-  </target>
-  
-  <target name="pack-archives.zip" depends="pack-archives.tar">
-    <zip destfile="${dists.dir}/archives/${dist.name}.zip">
-      <zipfileset prefix="${dist.name}" dir="${dist.dir}"/>
-    </zip>
-    <checksum file="${dists.dir}/archives/${dist.name}.zip" fileext=".md5"/>
-  </target>
-
-  <target name="pack-devel-docs.tar" depends="pack-archives.zip">
-    <tar destfile="${dists.dir}/archives/${dist.name}-devel-docs.tar"
-         compression="none" longfile="gnu">
-      <tarfileset dir="${dist.dir}/api" prefix="${dist.name}-devel-docs"/>
-    </tar>
-    <gzip src="${dists.dir}/archives/${dist.name}-devel-docs.tar" destfile="${dists.dir}/archives/${dist.name}-devel-docs.tgz"/>
-    <if>
-      <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
-      <then>
-        <exec executable="xz" failifexecutionfails="false">
-          <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-devel-docs.tar"/>
-        </exec>
-        <move file="${dists.dir}/archives/${dist.name}-devel-docs.tar.xz" tofile="${dists.dir}/archives/${dist.name}-devel-docs.txz" failonerror="false"/>
-      </then>
-    </if>
-    <delete file="${dists.dir}/archives/${dist.name}-devel-docs.tar" />
-    <checksum fileext=".md5">
-      <fileset dir="${dists.dir}/archives">
-        <include name="${dist.name}-devel-docs.t?z"/>
-      </fileset>
-    </checksum>
-  </target>
-  
-  <target name="pack-archives.src" depends="pack-devel-docs.tar">
-    <tar destfile="${dists.dir}/archives/${dist.name}-sources.tar"
-         compression="none" longfile="gnu">
-      <tarfileset dir="${basedir}" prefix="${dist.name}-sources">
-        <exclude name="bin/**"/>
-        <exclude name="build/**"/>
-        <exclude name="debian/**"/>
-        <exclude name="dists/**"/>
-        <exclude name="logs/**"/>
-        <exclude name="sandbox/**"/>
-        <exclude name="test/partest"/>
-        <exclude name=".git"/>
-      </tarfileset>
-      <tarfileset dir="${basedir}" prefix="${dist.name}-sources" filemode="755">
-        <include name="test/partest"/>
-      </tarfileset>
-    </tar>
-    <gzip src="${dists.dir}/archives/${dist.name}-sources.tar" destfile="${dists.dir}/archives/${dist.name}-sources.tgz"/>
-    <if>
-      <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
-      <then>
-        <exec executable="xz" failifexecutionfails="false">
-          <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-sources.tar"/>
-        </exec>
-        <move file="${dists.dir}/archives/${dist.name}-sources.tar.xz" tofile="${dists.dir}/archives/${dist.name}-sources.txz" failonerror="false"/>
-      </then>
-    </if>
-    <delete file="${dists.dir}/archives/${dist.name}-sources.tar" />
-    <checksum fileext=".md5">
-      <fileset dir="${dists.dir}/archives">
-        <include name="${dist.name}-sources.t?z"/>
-      </fileset>
-    </checksum>
-  </target>
-
-  <target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
-    <!-- be sure to use a relative symlink to make the distribution portable,
-        `resource` is relative to directory of `link` -->
-    <symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
-             resource="scala-${version.number}-sources.tgz"
-             overwrite="yes"/>
-  </target>
-
-  <target name="pack-archives.latest.win" depends="pack-archives.src" if="os.win">
-    <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz">
-      <fileset dir="${dists.dir}/archives">
-         <include name="scala-${version.number}-sources.tgz"/>
-      </fileset>
-    </copy>
-  </target>
-  
-  <target name="pack-archives.done" depends="pack-archives.src, pack-archives.latest.win, pack-archives.latest.unix"/>
-
-  <target name="pack-maven.start">
-    <mkdir dir="${dists.dir}/maven/${version.number}"/>
-  </target>
-
-  <target name="pack-maven.libs" depends="pack-maven.start">
-    <macrodef name="mvn-copy-lib">
-      <attribute name="mvn.artifact.name"/>
-      <sequential>
-        <mkdir dir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}"/>
-        <copy todir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}">
-          <fileset dir="${dist.dir}/lib/">
-            <filename name="@{mvn.artifact.name}.jar"/>
-          </fileset>
-          <fileset dir="${src.dir}/build/maven/">
-            <filename name="@{mvn.artifact.name}-pom.xml"/>
-          </fileset>
-          <fileset dir="${dist.dir}/src/">
-            <filename name="@{mvn.artifact.name}-src.jar"/>
-          </fileset>
-        </copy>
-      </sequential>
-    </macrodef>
-    <mvn-copy-lib mvn.artifact.name="jline"/>
-    <mvn-copy-lib mvn.artifact.name="scala-library"/>
-    <mvn-copy-lib mvn.artifact.name="scala-reflect"/>
-    <mvn-copy-lib mvn.artifact.name="scala-compiler"/>
-    <mvn-copy-lib mvn.artifact.name="scala-swing"/>
-    <mvn-copy-lib mvn.artifact.name="scala-actors"/>
-    <mvn-copy-lib mvn.artifact.name="scala-partest"/>
-    <mvn-copy-lib mvn.artifact.name="scalap"/>
-  </target>
-
-  <target name="pack-maven.plugins" depends="pack-maven.start">
-    <macrodef name="mvn-copy-plugin">
-      <attribute name="mvn.artifact.name"/>
-      <sequential>
-        <mkdir dir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}"/>
-        <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}">
-          <fileset dir="${dist.dir}/misc/scala-devel/plugins/">
-            <filename name="@{mvn.artifact.name}.jar"/>
-          </fileset>
-          <fileset dir="${src.dir}/build/maven/">
-            <filename name="@{mvn.artifact.name}-plugin-pom.xml"/>
-          </fileset>
-        </copy>
-      </sequential>
-    </macrodef>
-    <mvn-copy-plugin mvn.artifact.name="continuations"/>
-  </target>
-
-  <target name="pack-maven.srcs" depends="pack-maven.libs">
-    <!-- Add missing src jars. -->
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
-         basedir="${src.dir}/jline/src/main/java">
-      <include name="**/*"/>
-    </jar>
-
-
-    <!-- Continuations plugin -->
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
-         basedir="${src.dir}/continuations/plugin">
-      <include name="**/*"/>
-    </jar>
-  </target>
-
-  <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
-         basedir="${build-docs.dir}/jline">
-      <include name="**/*"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
-         basedir="${build-docs.dir}/library">
-      <include name="**/*"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
-         basedir="${build-docs.dir}/compiler">
-      <include name="**/*"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
-         basedir="${build-docs.dir}/scalap">
-      <include name="**/*"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
-         basedir="${build-docs.dir}/partest">
-      <include name="**/*"/>
-    </jar>
-    <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
-         basedir="${build-docs.dir}/continuations-plugin">
-      <include name="**/*"/>
-    </jar>
-
-    <!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
-    <copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
-          file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
-    <copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
-          file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
-    <copy tofile="${dists.dir}/maven/${version.number}/scala-reflect/scala-reflect-docs.jar"
-          file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
-  </target>
-
-  <target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
-    <symlink link="${dists.dir}/maven/latest"
-             resource="${version.number}"
-             overwrite="yes"/>
-  </target>
-
-  <target name="pack-maven.latest.win" depends="pack-maven.docs" if="os.win">
-    <copy todir="${dists.dir}/maven/latest">
-      <fileset dir="${dists.dir}/maven/${version.number}"/>
-    </copy>
-  </target>
-
-  <target name="pack-maven.scripts" depends="pack-maven.latest.unix,pack-maven.latest.win,pack-maven.srcs">
-    <copy todir="${dists.dir}/maven/${version.number}"
-          file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar"/>
-    <copy tofile="${dists.dir}/maven/${version.number}/build.xml"
-          file="${src.dir}/build/maven/maven-deploy.xml"/>
-    <!-- export properties for use when deploying -->
-    <echoproperties destfile="${dists.dir}/maven/${version.number}/build.properties"/>
-  </target>
-
-  <target name="pack-maven.done" depends="pack-maven.scripts"/>
-
-<!-- ===========================================================================
-MISCELLANEOUS
-============================================================================ -->
-
-  <target name="pack-all.done" depends="pack-archives.done, pack-maven.done"/>
-
-<!-- ===========================================================================
-MISCELLANEOUS
-============================================================================ -->
-
-  <target name="graph.init">
-    <echo message="${basedir}/lib/ant/vizant.jar"/>
-    <taskdef name="vizant" classname="vizant.Vizant" classpath="${basedir}/../../lib/ant/vizant.jar"/>
-  </target>
-  
-  <target name="graph.pack" depends="graph.init">
-    <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot"/>
-  </target>
-  
-</project>
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
new file mode 100644
index 0000000..1413065
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -0,0 +1,95 @@
+package scala.reflect.macros
+package compiler
+
+import scala.tools.nsc.Global
+
+abstract class DefaultMacroCompiler extends Resolvers
+                                       with Validators
+                                       with Errors {
+  val global: Global
+  import global._
+  import analyzer._
+  import treeInfo._
+  import definitions._
+  val runDefinitions = currentRun.runDefinitions
+  import runDefinitions.{Predef_???, _}
+
+  val typer: global.analyzer.Typer
+  val context = typer.context
+
+  val macroDdef: DefDef
+  lazy val macroDef = macroDdef.symbol
+
+  case class MacroImplRefCompiler(untypedMacroImplRef: Tree, isImplBundle: Boolean) extends Resolver with Validator with Error
+  private case class MacroImplResolutionException(pos: Position, msg: String) extends Exception
+  def abort(pos: Position, msg: String) = throw MacroImplResolutionException(pos, msg)
+
+  /** Resolves a macro impl reference provided in the right-hand side of the given macro definition.
+   *
+   *  Acceptable shapes of the right-hand side:
+   *    1) [<static object>].<method name>[[<type args>]] // vanilla macro impl ref
+   *    2) [<macro bundle>].<method name>[[<type args>]]  // shiny new macro bundle impl ref
+   *
+   *  Produces a tree, which represents a reference to a macro implementation if everything goes well,
+   *  otherwise reports found errors and returns EmptyTree. The resulting tree should have the following format:
+   *
+   *    qualifier.method[targs]
+   *
+   *  Qualifier here might be omitted (local macro defs), be a static object (vanilla macro defs)
+   *  or be a dummy instance of a macro bundle (e.g. new MyMacro(???).expand).
+   */
+  def resolveMacroImpl: Tree = {
+    def tryCompile(compiler: MacroImplRefCompiler): scala.util.Try[Tree] = {
+      try { compiler.validateMacroImplRef(); scala.util.Success(compiler.macroImplRef) }
+      catch { case ex: MacroImplResolutionException => scala.util.Failure(ex) }
+    }
+    val vanillaImplRef = MacroImplRefCompiler(macroDdef.rhs.duplicate, isImplBundle = false)
+    val (maybeBundleRef, methName, targs) = macroDdef.rhs.duplicate match {
+      case Applied(Select(Applied(RefTree(qual, bundleName), _, Nil), methName), targs, Nil) =>
+        (RefTree(qual, bundleName.toTypeName), methName, targs)
+      case Applied(Ident(methName), targs, Nil) =>
+        (Ident(context.owner.enclClass), methName, targs)
+      case _ =>
+        (EmptyTree, TermName(""), Nil)
+    }
+    val bundleImplRef = MacroImplRefCompiler(
+      atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(maybeBundleRef, List(List(Ident(Predef_???)))), methName), targs)),
+      isImplBundle = true
+    )
+    val vanillaResult = tryCompile(vanillaImplRef)
+    val bundleResult = tryCompile(bundleImplRef)
+
+    def ensureUnambiguousSuccess() = {
+      // we now face a hard choice of whether to report ambiguity:
+      //   1) when there are eponymous methods in both bundle and object
+      //   2) when both references to eponymous methods are resolved successfully
+      // doing #1 would cause less confusion in the long run, but it would also cause more frequent source incompatibilities
+      // e.g. it would fail to compile https://github.com/ReifyIt/basis
+      // therefore here we go for #2
+      // if (vanillaImplRef.looksCredible && bundleImplRef.looksCredible) MacroImplAmbiguousError()
+      if (vanillaResult.isSuccess && bundleResult.isSuccess) MacroImplAmbiguousError()
+    }
+
+    def reportMostAppropriateFailure() = {
+      typer.silent(_.typedTypeConstructor(maybeBundleRef)) match {
+        case SilentResultValue(result) if looksLikeMacroBundleType(result.tpe) =>
+          val bundle = result.tpe.typeSymbol
+          if (!isMacroBundleType(bundle.tpe)) MacroBundleWrongShapeError()
+          if (!bundle.owner.isStaticOwner) MacroBundleNonStaticError()
+          bundleResult.get
+        case _ =>
+          vanillaResult.get
+      }
+    }
+
+    try {
+      if (vanillaResult.isSuccess || bundleResult.isSuccess) ensureUnambiguousSuccess()
+      if (vanillaResult.isFailure && bundleResult.isFailure) reportMostAppropriateFailure()
+      vanillaResult.orElse(bundleResult).get
+    } catch {
+      case MacroImplResolutionException(pos, msg) =>
+        context.error(pos, msg)
+        EmptyTree
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
new file mode 100644
index 0000000..cc4508e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -0,0 +1,154 @@
+package scala.reflect.macros
+package compiler
+
+import scala.compat.Platform.EOL
+import scala.reflect.macros.util.Traces
+
+trait Errors extends Traces {
+  self: DefaultMacroCompiler =>
+
+  import global._
+  import analyzer._
+  import definitions._
+  import treeInfo._
+  import typer.TyperErrorGen._
+  import typer.infer.InferErrorGen._
+  import runDefinitions._
+  def globalSettings = global.settings
+
+  private def implRefError(message: String) = {
+    val Applied(culprit, _, _) = macroDdef.rhs
+    abort(culprit.pos, message)
+  }
+
+  private def bundleRefError(message: String) = {
+    val Applied(core, _, _) = macroDdef.rhs
+    val culprit = core match {
+      case Select(Applied(core, _, _), _) => core
+      case _ => core
+    }
+    abort(culprit.pos, message)
+  }
+
+  def MacroImplAmbiguousError() = implRefError(
+    "macro implementation reference is ambiguous: makes sense both as\n"+
+    "a macro bundle method reference and a vanilla object method reference")
+
+  def MacroBundleNonStaticError() = bundleRefError("macro bundles must be static")
+
+  def MacroBundleWrongShapeError() = bundleRefError("macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter")
+
+  trait Error {
+    self: MacroImplRefCompiler =>
+
+    // sanity check errors
+
+    def MacroImplReferenceWrongShapeError() = implRefError(
+      "macro implementation reference has wrong shape. required:\n"+
+      "macro [<static object>].<method name>[[<type args>]] or\n" +
+      "macro [<macro bundle>].<method name>[[<type args>]]")
+
+    def MacroImplWrongNumberOfTypeArgumentsError() = {
+      val diagnostic = if (macroImpl.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments"
+      implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef))
+    }
+
+    private def macroImplementationWording =
+      if (isImplBundle) "bundle implementation"
+      else "macro implementation"
+
+    def MacroImplNotPublicError() = implRefError(s"${macroImplementationWording} must be public")
+
+    def MacroImplOverloadedError() = implRefError(s"${macroImplementationWording} cannot be overloaded")
+
+    def MacroImplNonTagImplicitParameters(params: List[Symbol]) = implRefError(s"${macroImplementationWording}s cannot have implicit parameters other than WeakTypeTag evidences")
+
+    // compatibility errors
+
+    // helpers
+
+    private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+      val noun = if (flavor == "value") "parameter" else "type parameter"
+      val message = noun + " lists have different length, " + violation + " extra " + noun
+      val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+      message + suffix
+    }
+
+    private def abbreviateCoreAliases(s: String): String = {
+      val coreAliases = List("WeakTypeTag", "Expr", "Tree")
+      coreAliases.foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+    }
+
+    private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean, untype: Boolean) = {
+      def preprocess(tpe: Type) = if (untype) untypeMetalevel(tpe) else tpe
+      var pssPart = (pss map (ps => ps map (p => p.defStringSeenAs(preprocess(p.info))) mkString ("(", ", ", ")"))).mkString
+      if (abbreviate) pssPart = abbreviateCoreAliases(pssPart)
+      var retPart = preprocess(restpe).toString
+      if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+      pssPart + ": " + retPart
+    }
+
+    // not exactly an error generator, but very related
+    // and I dearly wanted to push it away from Macros.scala
+    private def checkConforms(slot: String, rtpe: Type, atpe: Type) = {
+      val verbose = macroDebugVerbose
+
+      def check(rtpe: Type, atpe: Type): Boolean = {
+        def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true }
+        (rtpe, atpe) match {
+          case _ if rtpe eq atpe => success()
+          case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe)
+          case (ExprClassOf(_), TreeType()) if rtpe.prefix =:= atpe.prefix => success()
+          case (SubtreeType(), ExprClassOf(_)) if rtpe.prefix =:= atpe.prefix => success()
+          case _ => rtpe <:< atpe
+        }
+      }
+
+      val ok =
+        if (verbose) withTypesExplained(check(rtpe, atpe))
+        else check(rtpe, atpe)
+      if (!ok) {
+        if (!verbose) explainTypes(rtpe, atpe)
+        val msg = {
+          val ss = Seq(rtpe, atpe) map (this abbreviateCoreAliases _.toString)
+          s"type mismatch for $slot: ${ss(0)} does not conform to ${ss(1)}"
+        }
+        compatibilityError(msg)
+      }
+    }
+
+    private def compatibilityError(message: String) =
+      implRefError(
+        s"${macroImplementationWording} has incompatible shape:"+
+        "\n required: " + showMeth(rparamss, rret, abbreviate = true, untype = false) +
+        "\n or      : " + showMeth(rparamss, rret, abbreviate = true, untype = true) +
+        "\n found   : " + showMeth(aparamss, aret, abbreviate = false, untype = false) +
+        "\n" + message)
+
+    def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+    def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+    def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+    def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkConforms("parameter " + rparam.name, rparam.tpe, atpe)
+
+    def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkConforms("return type", atpe, rret)
+
+    def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+    def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+      def fail(paramName: Name) = compatibilityError("types incompatible for parameter " + paramName + ": corresponding is not a vararg parameter")
+      if (isRepeated(rparam) && !isRepeated(aparam)) fail(rparam.name)
+      if (!isRepeated(rparam) && isRepeated(aparam)) fail(aparam.name)
+    }
+
+    def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+      compatibilityError(NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+    def MacroImplTparamInstantiationError(atparams: List[Symbol], e: NoInstance) = {
+      val badps = atparams map (_.defString) mkString ", "
+      compatibilityError(f"type parameters $badps cannot be instantiated%n${e.getMessage}")
+    }
+  }
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
new file mode 100644
index 0000000..4484c23
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -0,0 +1,35 @@
+package scala.reflect.macros
+package compiler
+
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Resolvers {
+  self: DefaultMacroCompiler =>
+
+  import global._
+  import analyzer._
+  import definitions._
+  import treeInfo._
+  import gen._
+  import runDefinitions._
+
+  trait Resolver {
+    self: MacroImplRefCompiler =>
+
+    val isImplBundle: Boolean
+    val isImplMethod = !isImplBundle
+
+    lazy val looksCredible: Boolean = {
+      val Applied(core, _, _) = untypedMacroImplRef
+      typer.silent(_.typed(markMacroImplRef(core)), reportAmbiguousErrors = false).nonEmpty
+    }
+
+    lazy val (macroImplRef, isBlackbox, macroImplOwner, macroImpl, targs) =
+      typer.silent(_.typed(markMacroImplRef(untypedMacroImplRef)), reportAmbiguousErrors = false) match {
+        case SilentResultValue(macroImplRef @ MacroImplReference(_, isBlackbox, owner, meth, targs)) => (macroImplRef, isBlackbox, owner, meth, targs)
+        case SilentResultValue(macroImplRef) => MacroImplReferenceWrongShapeError()
+        case SilentTypeError(err) => abort(err.errPos, err.errMsg)
+      }
+  }
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
new file mode 100644
index 0000000..a146818
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -0,0 +1,201 @@
+package scala.reflect.macros
+package compiler
+
+import scala.reflect.internal.Flags._
+
+trait Validators {
+  self: DefaultMacroCompiler =>
+
+  import global._
+  import analyzer._
+  import definitions._
+  import runDefinitions.{Predef_???, _}
+
+  trait Validator {
+    self: MacroImplRefCompiler =>
+
+    def validateMacroImplRef() = {
+      sanityCheck()
+      if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence()
+    }
+
+    private def sanityCheck() = {
+      if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError()
+      if (macroImpl.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError()
+      if (!macroImpl.isPublic) MacroImplNotPublicError()
+      if (macroImpl.isOverloaded) MacroImplOverloadedError()
+      val implicitParams = aparamss.flatten filter (_.isImplicit)
+      if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+      val effectiveOwner = if (isImplMethod) macroImplOwner else macroImplOwner.owner
+      val effectivelyStatic = effectiveOwner.isStaticOwner || effectiveOwner.moduleClass.isStaticOwner
+      val correctBundleness = if (isImplMethod) macroImplOwner.isModuleClass else macroImplOwner.isClass && !macroImplOwner.isModuleClass
+      if (!effectivelyStatic || !correctBundleness) MacroImplReferenceWrongShapeError()
+    }
+
+    private def checkMacroDefMacroImplCorrespondence() = {
+      val atvars = atparams map freshVar
+      def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
+
+      // we only check strict correspondence between value parameterss
+      // type parameters of macro defs and macro impls don't have to coincide with each other
+      if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+      map2(aparamss, rparamss)((aparams, rparams) => {
+        if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+        if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+      })
+
+      try {
+        // cannot fuse this map2 and the map2 above because if aparamss.flatten != rparamss.flatten
+        // then `atpeToRtpe` is going to fail with an unsound substitution
+        map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+          if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+          if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+          val aparamtpe = aparam.tpe match {
+            case MacroContextType(tpe) => tpe
+            case tpe => tpe
+          }
+          checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+        })
+
+        checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
+
+        val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+        val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, maxLubDepth)
+        val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+        boundsOk match {
+          case SilentResultValue(true) => // do nothing, success
+          case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
+        }
+      } catch {
+        case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
+      }
+    }
+
+    // aXXX (e.g. aparamss) => characteristics of the actual macro impl signature extracted from the macro impl ("a" stands for "actual")
+    // rXXX (e.g. rparamss) => characteristics of the reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+    // FIXME: cannot write this concisely because of SI-7507
+    //lazy val MacroImplSig(atparams, aparamss, aret) = macroImplSig
+    //lazy val MacroImplSig(_, rparamss, rret) = referenceMacroImplSig
+    lazy val atparams = macroImplSig.tparams
+    lazy val aparamss = macroImplSig.paramss
+    lazy val aret = macroImplSig.ret
+    lazy val rparamss = referenceMacroImplSig.paramss
+    lazy val rret = referenceMacroImplSig.ret
+
+    // Technically this can be just an alias to MethodType, but promoting it to a first-class entity
+    // provides better encapsulation and convenient syntax for pattern matching.
+    private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type) {
+      private def tparams_s = if (tparams.isEmpty) "" else tparams.map(_.defString).mkString("[", ", ", "]")
+      private def paramss_s = paramss map (ps => ps.map(s => s"${s.name}: ${s.tpe_*}").mkString("(", ", ", ")")) mkString ""
+      override def toString = "MacroImplSig(" + tparams_s + paramss_s + ret + ")"
+    }
+
+    /** An actual macro implementation signature extracted from a macro implementation method.
+     *
+     *  For the following macro impl:
+     *    def fooBar[T: c.WeakTypeTag]
+     *           (c: scala.reflect.macros.blackbox.Context)
+     *           (xs: c.Expr[List[T]])
+     *           : c.Expr[T] = ...
+     *
+     *  This function will return:
+     *    (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T]
+     *
+     *  Note that type tag evidence parameters are not included into the result.
+     *  Type tag context bounds for macro impl tparams are optional.
+     *  Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+     *
+     *  This method cannot be reduced to just macroImpl.info, because macro implementations might
+     *  come in different shapes. If the implementation is an apply method of a *box.Macro-compatible object,
+     *  then it won't have (c: *box.Context) in its parameters, but will rather refer to *boxMacro.c.
+     *
+     *  @param macroImpl The macro implementation symbol
+     */
+    private lazy val macroImplSig: MacroImplSig = {
+      val tparams = macroImpl.typeParams
+      val paramss = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol)
+      val ret = macroImpl.info.finalResultType
+      MacroImplSig(tparams, paramss, ret)
+    }
+
+    /** A reference macro implementation signature extracted from a given macro definition.
+     *
+     *  For the following macro def:
+     *    def foo[T](xs: List[T]): T = macro fooBar
+     *
+     *  This function will return:
+     *    (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T] or
+     *    (c: scala.reflect.macros.whitebox.Context)(xs: c.Expr[List[T]])c.Expr[T]
+     *
+     *  Note that type tag evidence parameters are not included into the result.
+     *  Type tag context bounds for macro impl tparams are optional.
+     *  Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+     *
+     *  Also note that we need a DefDef, not the corresponding MethodSymbol, because that symbol would be of no use for us.
+     *  Macro signatures are verified when typechecking macro defs, which means that at that moment inspecting macroDef.info
+     *  means asking for cyclic reference errors.
+     *
+     *  We need macro implementation symbol as well, because the return type of the macro definition might be omitted,
+     *  and in that case we'd need to infer it from the return type of the macro implementation. Luckily for us, we can
+     *  use that symbol without a risk of running into cycles.
+     *
+     *  @param typer     Typechecker of `macroDdef`
+     *  @param macroDdef The macro definition tree
+     *  @param macroImpl The macro implementation symbol
+     */
+    private lazy val referenceMacroImplSig: MacroImplSig = {
+      // had to move method's body to an object because of the recursive dependencies between sigma and param
+      object SigGenerator {
+        val cache = scala.collection.mutable.Map[Symbol, Symbol]()
+        val ctxTpe = if (isBlackbox) BlackboxContextClass.tpe else WhiteboxContextClass.tpe
+        val ctxPrefix =
+          if (isImplMethod) singleType(NoPrefix, makeParam(nme.macroContext, macroDdef.pos, ctxTpe, SYNTHETIC))
+          else singleType(ThisType(macroImpl.owner), macroImpl.owner.tpe.member(nme.c))
+        val paramss =
+          if (isImplMethod) List(ctxPrefix.termSymbol) :: mmap(macroDdef.vparamss)(param)
+          else mmap(macroDdef.vparamss)(param)
+        val macroDefRet =
+          if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+          else computeMacroDefTypeFromMacroImplRef(macroDdef, macroImplRef) orElse AnyTpe
+        val implReturnType = sigma(increaseMetalevel(ctxPrefix, macroDefRet))
+
+        object SigmaTypeMap extends TypeMap {
+          def mapPrefix(pre: Type) = pre match {
+            case ThisType(sym) if sym == macroDef.owner =>
+              singleType(singleType(ctxPrefix, MacroContextPrefix), ExprValue)
+            case SingleType(NoPrefix, sym) =>
+              mfind(macroDdef.vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
+            case _ =>
+              mapOver(pre)
+          }
+          def apply(tp: Type): Type = tp match {
+            case TypeRef(pre, sym, args) =>
+              val pre1  = mapPrefix(pre)
+              val args1 = mapOverArgs(args, sym.typeParams)
+              if ((pre eq pre1) && (args eq args1)) tp
+              else typeRef(pre1, sym, args1)
+            case _ =>
+              mapOver(tp)
+          }
+        }
+        def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
+
+        def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
+          macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
+        def param(tree: Tree): Symbol = (
+          cache.getOrElseUpdate(tree.symbol, {
+            val sym = tree.symbol
+            assert(sym.isTerm, s"sym = $sym, tree = $tree")
+            makeParam(sym.name, sym.pos, sigma(increaseMetalevel(ctxPrefix, sym.tpe)), sym.flags)
+          })
+        )
+      }
+
+      import SigGenerator._
+      macroLogVerbose(s"generating macroImplSigs for: $macroDdef")
+      val result = MacroImplSig(macroDdef.tparams map (_.symbol), paramss, implReturnType)
+      macroLogVerbose(s"result is: $result")
+      result
+    }
+  }
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
new file mode 100644
index 0000000..cc64d97
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
@@ -0,0 +1,35 @@
+package scala.reflect.macros
+package contexts
+
+trait Aliases {
+  self: Context =>
+
+  override type Symbol = universe.Symbol
+  override type Type = universe.Type
+  override type Name = universe.Name
+  override type TermName = universe.TermName
+  override type TypeName = universe.TypeName
+  override type Tree = universe.Tree
+  override type Position = universe.Position
+  override type Scope = universe.Scope
+  override type Modifiers = universe.Modifiers
+
+  override type Expr[+T] = universe.Expr[T]
+  override val Expr = universe.Expr
+  def Expr[T: WeakTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
+
+  override type WeakTypeTag[T] = universe.WeakTypeTag[T]
+  override type TypeTag[T] = universe.TypeTag[T]
+  override val WeakTypeTag = universe.WeakTypeTag
+  override val TypeTag = universe.TypeTag
+  def WeakTypeTag[T](tpe: Type): WeakTypeTag[T] = universe.WeakTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+  def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+  override def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+  override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+  override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+  override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+
+  implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
+    def toImplicitCandidate = ImplicitCandidate(oi.info.pre, oi.info.sym, oi.pt, oi.tree)
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
new file mode 100644
index 0000000..f3dd29d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -0,0 +1,30 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.Global
+
+abstract class Context extends scala.reflect.macros.blackbox.Context
+                          with scala.reflect.macros.whitebox.Context
+                          with Aliases
+                          with Enclosures
+                          with Names
+                          with Reifiers
+                          with FrontEnds
+                          with Infrastructure
+                          with Typers
+                          with Parsers
+                          with Evals
+                          with ExprUtils
+                          with Traces
+                          with Internals {
+
+  val universe: Global
+
+  val mirror: universe.Mirror = universe.rootMirror
+
+  val callsiteTyper: universe.analyzer.Typer
+
+  val prefix: Expr[PrefixType]
+
+  val expandee: Tree
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
new file mode 100644
index 0000000..5e93181
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
@@ -0,0 +1,32 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.{ClassTag, classTag}
+
+trait Enclosures {
+  self: Context =>
+
+  import universe._
+
+  private lazy val site       = callsiteTyper.context
+  private lazy val enclTrees  = site.enclosingContextChain map (_.tree)
+  private lazy val enclPoses  = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+  private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
+  private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
+
+  // vals are eager to simplify debugging
+  // after all we wouldn't save that much time by making them lazy
+  val macroApplication: Tree                      = expandee
+  def enclosingPackage: PackageDef                = strictEnclosure[PackageDef]
+  val enclosingClass: Tree                        = lenientEnclosure[ImplDef]
+  def enclosingImpl: ImplDef                      = strictEnclosure[ImplDef]
+  def enclosingTemplate: Template                 = strictEnclosure[Template]
+  val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
+  val enclosingMacros: List[Context]              = this :: universe.analyzer.openMacros // include self
+  val enclosingMethod: Tree                       = lenientEnclosure[DefDef]
+  def enclosingDef: DefDef                        = strictEnclosure[DefDef]
+  val enclosingPosition: Position                 = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+  val enclosingUnit: CompilationUnit              = universe.currentRun.currentUnit
+  val enclosingRun: Run                           = universe.currentRun
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala
new file mode 100644
index 0000000..a715af9
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala
@@ -0,0 +1,23 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.ToolBox
+
+trait Evals {
+  self: Context =>
+
+  private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.defaultMacroClassloader)
+  private lazy val evalToolBox = evalMirror.mkToolBox()
+  private lazy val evalImporter = ru.internal.createImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
+
+  def eval[T](expr: Expr[T]): T = {
+    expr.tree match {
+      case global.Literal(global.Constant(value)) =>
+        value.asInstanceOf[T]
+      case _ =>
+        val imported = evalImporter.importTree(expr.tree)
+        evalToolBox.eval(imported).asInstanceOf[T]
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
new file mode 100644
index 0000000..4846325
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
@@ -0,0 +1,34 @@
+package scala.reflect.macros
+package contexts
+
+trait ExprUtils {
+  self: Context =>
+
+  import universe._
+
+  def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
+
+  def literalUnit = Expr[Unit](Literal(Constant(())))(TypeTag.Unit)
+
+  def literalTrue = Expr[Boolean](Literal(Constant(true)))(TypeTag.Boolean)
+
+  def literalFalse = Expr[Boolean](Literal(Constant(false)))(TypeTag.Boolean)
+
+  def literal(x: Boolean) = Expr[Boolean](Literal(Constant(x)))(TypeTag.Boolean)
+
+  def literal(x: Byte) = Expr[Byte](Literal(Constant(x)))(TypeTag.Byte)
+
+  def literal(x: Short) = Expr[Short](Literal(Constant(x)))(TypeTag.Short)
+
+  def literal(x: Int) = Expr[Int](Literal(Constant(x)))(TypeTag.Int)
+
+  def literal(x: Long) = Expr[Long](Literal(Constant(x)))(TypeTag.Long)
+
+  def literal(x: Float) = Expr[Float](Literal(Constant(x)))(TypeTag.Float)
+
+  def literal(x: Double) = Expr[Double](Literal(Constant(x)))(TypeTag.Double)
+
+  def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag[String](definitions.StringClass.toTypeConstructor))
+
+  def literal(x: Char) = Expr[Char](Literal(Constant(x)))(TypeTag.Char)
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
new file mode 100644
index 0000000..fda05de
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
@@ -0,0 +1,22 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.macros.runtime.AbortMacroException
+
+trait FrontEnds {
+  self: Context =>
+
+  def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg)
+
+  def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.info(pos, msg, force)
+
+  def hasWarnings: Boolean = universe.reporter.hasErrors
+
+  def hasErrors: Boolean = universe.reporter.hasErrors
+
+  def warning(pos: Position, msg: String): Unit = callsiteTyper.context.warning(pos, msg)
+
+  def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg)
+
+  def abort(pos: Position, msg: String): Nothing = throw new AbortMacroException(pos, msg)
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
new file mode 100644
index 0000000..df7aa4d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -0,0 +1,16 @@
+package scala.reflect.macros
+package contexts
+
+trait Infrastructure {
+  self: Context =>
+
+  def settings: List[String] = {
+    val us = universe.settings
+    import us._
+    userSetSettings collectFirst { case x: MultiStringSetting if x.name == XmacroSettings.name => x.value } getOrElse Nil
+  }
+
+  def compilerSettings: List[String] = universe.settings.recreateArgs
+
+  def classPath: List[java.net.URL] = global.classPath.asURLs
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Internals.scala b/src/compiler/scala/reflect/macros/contexts/Internals.scala
new file mode 100644
index 0000000..8c784d7
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Internals.scala
@@ -0,0 +1,47 @@
+package scala.reflect.macros
+package contexts
+
+trait Internals extends scala.tools.nsc.transform.TypingTransformers {
+  self: Context =>
+
+  import global._
+
+  lazy val internal: ContextInternalApi = new global.SymbolTableInternal with ContextInternalApi {
+    val enclosingOwner = callsiteTyper.context.owner
+
+    class HofTransformer(hof: (Tree, TransformApi) => Tree) extends Transformer {
+      val api = new TransformApi {
+        def recur(tree: Tree): Tree = hof(tree, this)
+        def default(tree: Tree): Tree = superTransform(tree)
+      }
+      def superTransform(tree: Tree) = super.transform(tree)
+      override def transform(tree: Tree): Tree = hof(tree, api)
+    }
+
+    def transform(tree: Tree)(transformer: (Tree, TransformApi) => Tree): Tree = new HofTransformer(transformer).transform(tree)
+
+    class HofTypingTransformer(hof: (Tree, TypingTransformApi) => Tree) extends TypingTransformer(callsiteTyper.context.unit) { self =>
+      currentOwner = callsiteTyper.context.owner
+      curTree = EmptyTree
+      localTyper = global.analyzer.newTyper(callsiteTyper.context.make(unit = callsiteTyper.context.unit))
+
+      val api = new TypingTransformApi {
+        def recur(tree: Tree): Tree = hof(tree, this)
+        def default(tree: Tree): Tree = superTransform(tree)
+        def atOwner[T](owner: Symbol)(op: => T): T = self.atOwner(owner)(op)
+        def atOwner[T](tree: Tree, owner: Symbol)(op: => T): T = self.atOwner(tree, owner)(op)
+        def currentOwner: Symbol = self.currentOwner
+        def typecheck(tree: Tree): Tree = localTyper.typed(tree)
+      }
+      def superTransform(tree: Tree) = super.transform(tree)
+      override def transform(tree: Tree): Tree = hof(tree, api)
+    }
+
+    def typingTransform(tree: Tree)(transformer: (Tree, TypingTransformApi) => Tree): Tree = new HofTypingTransformer(transformer).transform(tree)
+
+    def typingTransform(tree: Tree, owner: Symbol)(transformer: (Tree, TypingTransformApi) => Tree): Tree = {
+      val trans = new HofTypingTransformer(transformer)
+      trans.atOwner(owner)(trans.transform(tree))
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
new file mode 100644
index 0000000..5a5bb42
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -0,0 +1,43 @@
+package scala.reflect.macros
+package contexts
+
+trait Names {
+  self: Context =>
+
+  import global._
+
+  def freshNameCreator = globalFreshNameCreator
+
+  def fresh(): String =
+    freshName()
+
+  def fresh(name: String): String =
+    freshName(name)
+
+  def fresh[NameType <: Name](name: NameType): NameType =
+    freshName[NameType](name)
+
+  def freshName(): String =
+    freshName(nme.FRESH_PREFIX)
+
+  def freshName(name: String): String = {
+    // In comparison with the first version of freshName, current "fresh" names
+    // at least can't clash with legible user-written identifiers and are much less likely to clash with each other.
+    // It is still not good enough however, because the counter gets reset every time we create a new Global.
+    //
+    // This would most certainly cause problems if Scala featured something like introduceTopLevel,
+    // but even for def macros this can lead to unexpected troubles. Imagine that one Global
+    // creates a term of an anonymous type with a member featuring a "fresh" name, and then another Global
+    // imports that term with a wildcard and then generates a "fresh" name of its own. Given unlucky
+    // circumstances these "fresh" names might end up clashing.
+    //
+    // TODO: hopefully SI-7823 will provide an ultimate answer to this problem.
+    // In the meanwhile I will also keep open the original issue: SI-6879 "c.freshName is broken".
+    val prefix = if (name.endsWith("$")) name else name + "$" // SI-8425
+    val sortOfUniqueSuffix = freshNameCreator.newName(nme.FRESH_SUFFIX)
+    prefix + sortOfUniqueSuffix
+  }
+
+  def freshName[NameType <: Name](name: NameType): NameType =
+    name.mapName(freshName(_)).asInstanceOf[NameType]
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
new file mode 100644
index 0000000..88cfea8
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -0,0 +1,20 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.reporters.StoreReporter
+
+trait Parsers {
+  self: Context =>
+  import global._
+
+  def parse(code: String) = {
+    val sreporter = new StoreReporter()
+    val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
+    val parser = newUnitParser(unit)
+    val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
+    sreporter.infos.foreach {
+      case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
+    }
+    tree
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
new file mode 100644
index 0000000..ecef1c7
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
@@ -0,0 +1,77 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Gilles Dubochet
+ */
+
+package scala.reflect.macros
+package contexts
+
+trait Reifiers {
+  self: Context =>
+
+  val global: universe.type = universe
+  import universe._
+  import definitions._
+
+  def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = {
+    assert(ExprClass != NoSymbol)
+    val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree)
+    logFreeVars(enclosingPosition, result)
+    result
+  }
+
+  def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = {
+    assert(TypeTagsClass != NoSymbol)
+    val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete)
+    logFreeVars(enclosingPosition, result)
+    result
+  }
+
+  def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree =
+    scala.reflect.reify.`package`.reifyRuntimeClass(universe)(callsiteTyper, tpe, concrete = concrete)
+
+  def reifyEnclosingRuntimeClass: Tree =
+    scala.reflect.reify.`package`.reifyEnclosingRuntimeClass(universe)(callsiteTyper)
+
+  def unreifyTree(tree: Tree): Tree = {
+    assert(ExprSplice != NoSymbol)
+    Select(tree, ExprSplice)
+  }
+
+  // fixme: if I put utils here, then "global" from utils' early initialization syntax
+  // and "global" that comes from here conflict with each other when incrementally compiling
+  // the problem is that both are pickled with the same owner - trait Reifiers
+  // and this upsets the compiler, so that oftentimes it throws assertion failures
+  // Martin knows the details
+  //
+  // object utils extends {
+  //   val global: self.global.type = self.global
+  //   val typer: global.analyzer.Typer = self.callsiteTyper
+  // } with scala.reflect.reify.utils.Utils
+  // import utils._
+
+  private def logFreeVars(position: Position, reification: Tree): Unit = {
+    object utils extends {
+      val global: self.global.type = self.global
+      val typer: global.analyzer.Typer = self.callsiteTyper
+    } with scala.reflect.reify.utils.Utils
+    import utils._
+
+    def logFreeVars(symtab: SymbolTable): Unit =
+      // logging free vars only when they are untyped prevents avalanches of duplicate messages
+      symtab.syms map (sym => symtab.symDef(sym)) foreach {
+        case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null =>
+          reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
+        case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null =>
+          reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
+        case _ =>
+          // do nothing
+      }
+
+    if (universe.settings.logFreeTerms || universe.settings.logFreeTypes)
+      reification match {
+        case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
+        case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
+      }
+  }
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala
new file mode 100644
index 0000000..df47f6b
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala
@@ -0,0 +1,8 @@
+package scala.reflect.macros
+package contexts
+
+trait Traces extends util.Traces {
+  self: Context =>
+
+  def globalSettings = universe.settings
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
new file mode 100644
index 0000000..28c1e3d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -0,0 +1,53 @@
+package scala.reflect.macros
+package contexts
+
+trait Typers {
+  self: Context =>
+
+  def openMacros: List[Context] = this :: universe.analyzer.openMacros
+
+  def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
+
+  type TypecheckMode = scala.reflect.internal.Mode
+  val TypecheckMode = scala.reflect.internal.Mode
+  val TERMmode = TypecheckMode.EXPRmode
+  val TYPEmode = TypecheckMode.TYPEmode | TypecheckMode.FUNmode
+  val PATTERNmode = TypecheckMode.PATTERNmode
+
+  /**
+   * @see [[scala.tools.reflect.ToolBox.typeCheck]]
+   */
+  def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
+    macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
+    val context = callsiteTyper.context
+    val withImplicitFlag = if (!withImplicitViewsDisabled) (context.withImplicitsEnabled[Tree] _) else (context.withImplicitsDisabled[Tree] _)
+    val withMacroFlag = if (!withMacrosDisabled) (context.withMacrosEnabled[Tree] _) else (context.withMacrosDisabled[Tree] _)
+    def withContext(tree: => Tree) = withImplicitFlag(withMacroFlag(tree))
+    def withWrapping(tree: Tree)(op: Tree => Tree) = if (mode == TERMmode) universe.wrappingIntoTerm(tree)(op) else op(tree)
+    def typecheckInternal(tree: Tree) = callsiteTyper.silent(_.typed(universe.duplicateAndKeepPositions(tree), mode, pt), reportAmbiguousErrors = false)
+    withWrapping(tree)(wrappedTree => withContext(typecheckInternal(wrappedTree) match {
+      case universe.analyzer.SilentResultValue(result) =>
+        macroLogVerbose(result)
+        result
+      case error @ universe.analyzer.SilentTypeError(_) =>
+        macroLogVerbose(error.err.errMsg)
+        if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg)
+        universe.EmptyTree
+    }))
+  }
+
+  def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
+    macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
+    universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
+  }
+
+  def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
+    macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s".format(from, to, tree, !withMacrosDisabled))
+    val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+    universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
+  }
+
+  def resetLocalAttrs(tree: Tree): Tree = universe.resetAttrs(universe.duplicateAndKeepPositions(tree))
+
+  def untypecheck(tree: Tree): Tree = resetLocalAttrs(tree)
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
deleted file mode 100644
index 96cf50e..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Aliases {
-  self: Context =>
-
-  override type Symbol = universe.Symbol
-  override type Type = universe.Type
-  override type Name = universe.Name
-  override type TermName = universe.TermName
-  override type TypeName = universe.TypeName
-  override type Tree = universe.Tree
-  override type Position = universe.Position
-  override type Scope = universe.Scope
-  override type Modifiers = universe.Modifiers
-
-  override type Expr[+T] = universe.Expr[T]
-  override val Expr = universe.Expr
-  def Expr[T: WeakTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
-
-  override type WeakTypeTag[T] = universe.WeakTypeTag[T]
-  override type TypeTag[T] = universe.TypeTag[T]
-  override val WeakTypeTag = universe.WeakTypeTag
-  override val TypeTag = universe.TypeTag
-  def WeakTypeTag[T](tpe: Type): WeakTypeTag[T] = universe.WeakTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
-  def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
-  override def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
-  override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
-  override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
-  override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
-
-  type ImplicitCandidate = (Type, Tree)
-  implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
-    def toImplicitCandidate = (oi.pt, oi.tree)
-  }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
deleted file mode 100644
index 8e8b0fc..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.tools.nsc.Global
-
-abstract class Context extends scala.reflect.macros.Context
-                         with Aliases
-                         with Enclosures
-                         with Names
-                         with Reifiers
-                         with FrontEnds
-                         with Infrastructure
-                         with Typers
-                         with Parsers
-                         with Evals
-                         with ExprUtils
-                         with Traces {
-
-  val universe: Global
-
-  val mirror: universe.Mirror = universe.rootMirror
-
-  val callsiteTyper: universe.analyzer.Typer
-
-  val prefix: Expr[PrefixType]
-
-  val expandee: Tree
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
deleted file mode 100644
index 2a4a22f..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Enclosures {
-  self: Context =>
-
-  import universe._
-  import mirror._
-
-  private def site       = callsiteTyper.context
-  private def enclTrees  = site.enclosingContextChain map (_.tree)
-  private def enclPoses  = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
-
-  // vals are eager to simplify debugging
-  // after all we wouldn't save that much time by making them lazy
-  val macroApplication: Tree                      = expandee
-  val enclosingClass: Tree                        = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
-  val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
-  val enclosingMacros: List[Context]              = this :: universe.analyzer.openMacros // include self
-  val enclosingMethod: Tree                       = site.enclMethod.tree
-  val enclosingPosition: Position                 = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
-  val enclosingUnit: CompilationUnit              = universe.currentRun.currentUnit
-  val enclosingRun: Run                           = universe.currentRun
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/runtime/Evals.scala
deleted file mode 100644
index 1f7b5f2..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.reflect.ToolBox
-
-trait Evals {
-  self: Context =>
-
-  private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
-  private lazy val evalToolBox = evalMirror.mkToolBox()
-  private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
-
-  def eval[T](expr: Expr[T]): T = {
-    val imported = evalImporter.importTree(expr.tree)
-    evalToolBox.eval(imported).asInstanceOf[T]
-  }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
deleted file mode 100644
index 672699f..0000000
--- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait ExprUtils {
-  self: Context =>
-
-  import universe._
-  import mirror._
-
-  def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
-
-  def literalUnit = Expr[Unit](Literal(Constant(())))(TypeTag.Unit)
-
-  def literalTrue = Expr[Boolean](Literal(Constant(true)))(TypeTag.Boolean)
-
-  def literalFalse = Expr[Boolean](Literal(Constant(false)))(TypeTag.Boolean)
-
-  def literal(x: Boolean) = Expr[Boolean](Literal(Constant(x)))(TypeTag.Boolean)
-
-  def literal(x: Byte) = Expr[Byte](Literal(Constant(x)))(TypeTag.Byte)
-
-  def literal(x: Short) = Expr[Short](Literal(Constant(x)))(TypeTag.Short)
-
-  def literal(x: Int) = Expr[Int](Literal(Constant(x)))(TypeTag.Int)
-
-  def literal(x: Long) = Expr[Long](Literal(Constant(x)))(TypeTag.Long)
-
-  def literal(x: Float) = Expr[Float](Literal(Constant(x)))(TypeTag.Float)
-
-  def literal(x: Double) = Expr[Double](Literal(Constant(x)))(TypeTag.Double)
-
-  def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag[String](definitions.StringClass.toTypeConstructor))
-
-  def literal(x: Char) = Expr[Char](Literal(Constant(x)))(TypeTag.Char)
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
deleted file mode 100644
index a6a198e..0000000
--- a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait FrontEnds {
-  self: Context =>
-
-  def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg)
-
-  def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.info(pos, msg, force)
-
-  def hasWarnings: Boolean = universe.reporter.hasErrors
-
-  def hasErrors: Boolean = universe.reporter.hasErrors
-
-  def warning(pos: Position, msg: String): Unit = callsiteTyper.context.warning(pos, msg)
-
-  def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg)
-
-  def abort(pos: Position, msg: String): Nothing = throw new AbortMacroException(pos, msg)
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
deleted file mode 100644
index 7781693..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Infrastructure {
-  self: Context =>
-
-  def settings: List[String] = {
-    val us = universe.settings
-    import us._
-    userSetSettings collectFirst { case x: MultiStringSetting if x.name == XmacroSettings.name => x.value } getOrElse Nil
-  }
-
-  def compilerSettings: List[String] = universe.settings.recreateArgs
-
-  def classPath: List[java.net.URL] = global.classPath.asURLs
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
new file mode 100644
index 0000000..be114ef
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -0,0 +1,38 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+import java.lang.reflect.{Constructor => jConstructor}
+
+trait JavaReflectionRuntimes {
+  self: scala.tools.nsc.typechecker.Analyzer =>
+
+  trait JavaReflectionResolvers {
+    self: MacroRuntimeResolver =>
+
+    def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+      val implClass = Class.forName(className, true, classLoader)
+      val implMeths = implClass.getMethods.find(_.getName == methName)
+      // relies on the fact that macro impls cannot be overloaded
+      // so every methName can resolve to at maximum one method
+      val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+      macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)")
+      args => {
+        val implObj =
+          if (isBundle) {
+            def isMacroContext(clazz: Class[_]) = clazz == classOf[BlackboxContext] || clazz == classOf[WhiteboxContext]
+            def isBundleCtor(ctor: jConstructor[_]) = ctor.getParameterTypes match {
+              case Array(param) if isMacroContext(param) => true
+              case _ => false
+            }
+            val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor)
+            bundleCtor.newInstance(args.c)
+          } else ReflectionUtils.staticSingletonInstance(implClass)
+        val implArgs = if (isBundle) args.others else args.c +: args.others
+        implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*)
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
new file mode 100644
index 0000000..5fd9c0d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -0,0 +1,75 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.internal.Flags._
+import scala.reflect.runtime.ReflectionUtils
+
+trait MacroRuntimes extends JavaReflectionRuntimes {
+  self: scala.tools.nsc.typechecker.Analyzer =>
+
+  import global._
+  import definitions._
+
+  /** Produces a function that can be used to invoke macro implementation for a given macro definition:
+   *    1) Looks up macro implementation symbol in this universe.
+   *    2) Loads its enclosing class from the macro classloader.
+   *    3) Loads the companion of that enclosing class from the macro classloader.
+   *    4) Resolves macro implementation within the loaded companion.
+   *
+   *  @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+   *          `null` otherwise.
+   */
+  def macroRuntime(expandee: Tree): MacroRuntime = pluginsMacroRuntime(expandee)
+
+  /** Default implementation of `macroRuntime`.
+   *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroRuntime for more details)
+   */
+  private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+  def standardMacroRuntime(expandee: Tree): MacroRuntime = {
+    val macroDef = expandee.symbol
+    macroLogVerbose(s"looking for macro implementation: $macroDef")
+    if (fastTrack contains macroDef) {
+      macroLogVerbose("macro expansion is serviced by a fast track")
+      fastTrack(macroDef)
+    } else {
+      macroRuntimesCache.getOrElseUpdate(macroDef, new MacroRuntimeResolver(macroDef).resolveRuntime())
+    }
+  }
+
+  /** Macro classloader that is used to resolve and run macro implementations.
+   *  Loads classes from from -cp (aka the library classpath).
+   *  Is also capable of detecting REPL and reusing its classloader.
+   *
+   *  When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations,
+   *  which compiles implementations into a virtual directory (very much like REPL does) and then conjures
+   *  a classloader mapped to that virtual directory.
+   */
+  lazy val defaultMacroClassloader: ClassLoader = findMacroClassLoader()
+
+  /** Abstracts away resolution of macro runtimes.
+   */
+  type MacroRuntime = MacroArgs => Any
+  class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers {
+    val binding = loadMacroImplBinding(macroDef).get
+    val isBundle = binding.isBundle
+    val className = binding.className
+    val methName = binding.methName
+
+    def resolveRuntime(): MacroRuntime = {
+      if (className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded) {
+        args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
+      } else {
+        try {
+          macroLogVerbose(s"resolving macro implementation as $className.$methName (isBundle = $isBundle)")
+          macroLogVerbose(s"classloader is: ${ReflectionUtils.show(defaultMacroClassloader)}")
+          resolveJavaReflectionRuntime(defaultMacroClassloader)
+        } catch {
+          case ex: Exception =>
+            macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
+            macroDef setFlag IS_ERROR
+            null
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala
deleted file mode 100644
index ee9f3a5..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Names {
-  self: Context =>
-
-  lazy val freshNameCreator = callsiteTyper.context.unit.fresh
-
-  def fresh(): String =
-    freshNameCreator.newName()
-
-  def fresh(name: String): String =
-    freshNameCreator.newName(name)
-
-  def fresh[NameType <: Name](name: NameType): NameType =
-    name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
-}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
deleted file mode 100644
index 566bcde..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.language.existentials
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBoxError
-
-trait Parsers {
-  self: Context =>
-
-  def parse(code: String): Tree =
-    // todo. provide decent implementation
-    // see `Typers.typedUseCase` for details
-    try {
-      import scala.reflect.runtime.{universe => ru}
-      val parsed = ru.rootMirror.mkToolBox().parse(code)
-      val importer = universe.mkImporter(ru)
-      importer.importTree(parsed)
-    } catch {
-      case ToolBoxError(msg, cause) =>
-        // todo. provide a position
-        throw new ParseException(universe.NoPosition, msg)
-    }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
deleted file mode 100644
index 8bb388b..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Gilles Dubochet
- */
-
-package scala.reflect.macros
-package runtime
-
-trait Reifiers {
-  self: Context =>
-
-  val global: universe.type = universe
-  import universe._
-  import definitions._
-
-  def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = {
-    assert(ExprClass != NoSymbol)
-    val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree)
-    logFreeVars(enclosingPosition, result)
-    result
-  }
-
-  def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = {
-    assert(TypeTagsClass != NoSymbol)
-    val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete)
-    logFreeVars(enclosingPosition, result)
-    result
-  }
-
-  def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree =
-    scala.reflect.reify.`package`.reifyRuntimeClass(universe)(callsiteTyper, tpe, concrete = concrete)
-
-  def reifyEnclosingRuntimeClass: Tree =
-    scala.reflect.reify.`package`.reifyEnclosingRuntimeClass(universe)(callsiteTyper)
-
-  def unreifyTree(tree: Tree): Tree = {
-    assert(ExprSplice != NoSymbol)
-    Select(tree, ExprSplice)
-  }
-
-  // fixme: if I put utils here, then "global" from utils' early initialization syntax
-  // and "global" that comes from here conflict with each other when incrementally compiling
-  // the problem is that both are pickled with the same owner - trait Reifiers
-  // and this upsets the compiler, so that oftentimes it throws assertion failures
-  // Martin knows the details
-  //
-  // object utils extends {
-  //   val global: self.global.type = self.global
-  //   val typer: global.analyzer.Typer = self.callsiteTyper
-  // } with scala.reflect.reify.utils.Utils
-  // import utils._
-
-  private def logFreeVars(position: Position, reification: Tree): Unit = {
-    object utils extends {
-      val global: self.global.type = self.global
-      val typer: global.analyzer.Typer = self.callsiteTyper
-    } with scala.reflect.reify.utils.Utils
-    import utils._
-
-    def logFreeVars(symtab: SymbolTable): Unit =
-      // logging free vars only when they are untyped prevents avalanches of duplicate messages
-      symtab.syms map (sym => symtab.symDef(sym)) foreach {
-        case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null =>
-          reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
-        case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null =>
-          reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
-        case _ =>
-          // do nothing
-      }
-
-    if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value)
-      reification match {
-        case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
-        case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
-      }
-  }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Traces.scala b/src/compiler/scala/reflect/macros/runtime/Traces.scala
deleted file mode 100644
index 0238e9f..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Traces.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Traces extends util.Traces {
-  self: Context =>
-
-  def globalSettings = universe.settings
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/runtime/Typers.scala
deleted file mode 100644
index a51bee0..0000000
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Typers {
-  self: Context =>
-
-  def openMacros: List[Context] = this :: universe.analyzer.openMacros
-
-  def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
-
-  /**
-   * @see [[scala.tools.reflect.Toolbox.typeCheck]]
-   */
-  def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
-    macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
-    val context = callsiteTyper.context
-    val wrapper1 = if (!withImplicitViewsDisabled) (context.withImplicitsEnabled[Tree] _) else (context.withImplicitsDisabled[Tree] _)
-    val wrapper2 = if (!withMacrosDisabled) (context.withMacrosEnabled[Tree] _) else (context.withMacrosDisabled[Tree] _)
-    def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
-    // if you get a "silent mode is not available past typer" here
-    // don't rush to change the typecheck not to use the silent method when the silent parameter is false
-    // typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
-    // I'd advise fixing the root cause: finding why the context is not set to report errors
-    // (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
-    wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
-      case universe.analyzer.SilentResultValue(result) =>
-        macroLogVerbose(result)
-        result
-      case error @ universe.analyzer.SilentTypeError(_) =>
-        macroLogVerbose(error.err.errMsg)
-        if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg)
-        universe.EmptyTree
-    })
-  }
-
-  def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
-    macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
-    universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
-  }
-
-  def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
-    macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s".format(from, to, tree, !withMacrosDisabled))
-    val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
-    universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
-  }
-
-  def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(tree)
-
-  def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(tree)
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala
new file mode 100644
index 0000000..9ef8200
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/package.scala
@@ -0,0 +1,5 @@
+package scala.reflect.macros
+
+package object runtime {
+  type Context = scala.reflect.macros.contexts.Context
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
new file mode 100644
index 0000000..bddc42d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -0,0 +1,96 @@
+package scala.reflect.macros
+package util
+
+import scala.tools.nsc.typechecker.Analyzer
+
+trait Helpers {
+  self: Analyzer =>
+
+  import global._
+  import definitions._
+
+  /** Transforms parameters lists of a macro impl.
+   *  The `transform` function is invoked only for WeakTypeTag evidence parameters.
+   *
+   *  The transformer takes two arguments: a value parameter from the parameter list
+   *  and a type parameter that is witnesses by the value parameter.
+   *
+   *  If the transformer returns a NoSymbol, the value parameter is not included from the result.
+   *  If the transformer returns something else, this something else is included in the result instead of the value parameter.
+   *
+   *  Despite of being highly esoteric, this function significantly simplifies signature analysis.
+   *  For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+   *  or to streamline creation of the list of macro arguments.
+   */
+  def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+
+    val MacroContextUniverse = definitions.MacroContextUniverse
+    val treeInfo.MacroImplReference(isBundle, _, _, macroImpl, _) = macroImplRef
+    val paramss = macroImpl.paramss
+    val ContextParam = paramss match {
+      case Nil | _ :+ Nil                                       => NoSymbol // no implicit parameters in the signature => nothing to do
+      case _ if isBundle                                        => macroImpl.owner.tpe member nme.c
+      case (cparam :: _) :: _ if isMacroContextType(cparam.tpe) => cparam
+      case _                                                    => NoSymbol // no context parameter in the signature => nothing to do
+    }
+    def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+      case TypeRef(SingleType(SingleType(_, ContextParam), MacroContextUniverse), WeakTypeTagClass, targ :: Nil) => transform(param, targ.typeSymbol)
+      case _                                                                                                     => param
+    }
+    ContextParam match {
+      case NoSymbol => paramss
+      case _        =>
+        paramss.last map transformTag filter (_.exists) match {
+          case Nil         => paramss.init
+          case transformed => paramss.init :+ transformed
+        }
+    }
+  }
+
+  /** Increases metalevel of the type, i.e. transforms:
+   *    * T to c.Expr[T]
+   *
+   *  @see Metalevels.scala for more information and examples about metalevels
+   */
+  def increaseMetalevel(pre: Type, tp: Type): Type = {
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+
+    transparentShallowTransform(RepeatedParamClass, tp) {
+      case tp => typeRef(pre, MacroContextExprClass, List(tp))
+    }
+  }
+
+  /** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
+   */
+  def untypeMetalevel(tp: Type): Type = {
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+
+    transparentShallowTransform(RepeatedParamClass, tp) {
+      case ExprClassOf(_) => typeRef(tp.prefix, TreesTreeType, Nil)
+      case tp => tp
+    }
+  }
+
+  /** Decreases metalevel of the type, i.e. transforms:
+   *    * c.Expr[T] to T
+   *    * Nothing to Nothing
+   *    * Anything else to NoType
+   *
+   *  @see Metalevels.scala for more information and examples about metalevels
+   */
+  def decreaseMetalevel(tp: Type): Type = {
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+    transparentShallowTransform(RepeatedParamClass, tp) {
+      case ExprClassOf(runtimeType) => runtimeType
+      // special-casing Nothing here is a useful convention
+      // that enables no-hassle prototyping with `macro ???` and `macro { ...; ??? }`
+      case nothing if nothing =:= NothingTpe => NothingTpe
+      case _ => NoType
+    }
+  }
+}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 2e57bc5..860dfd7 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -7,7 +7,6 @@ trait Errors {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   def defaultErrorPosition = {
     val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos }
@@ -22,11 +21,6 @@ trait Errors {
     throw new ReificationException(defaultErrorPosition, msg)
   }
 
-  def CannotReifySymbol(sym: Symbol) = {
-    val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString)
-    throw new ReificationException(defaultErrorPosition, msg)
-  }
-
   def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
     val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
     throw new ReificationException(ctt.pos, msg)
diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala
index 1710cae..4572cae 100644
--- a/src/compiler/scala/reflect/reify/Phases.scala
+++ b/src/compiler/scala/reflect/reify/Phases.scala
@@ -10,7 +10,6 @@ trait Phases extends Reshape
   self: Reifier =>
 
   import global._
-  import definitions._
 
   private var alreadyRun = false
 
@@ -26,7 +25,7 @@ trait Phases extends Reshape
     if (reifyDebug) println("[reshape phase]")
     tree = reshape.transform(tree)
     if (reifyDebug) println("[interlude]")
-    if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
+    if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
 
     if (reifyDebug) println("[calculate phase]")
     calculate.traverse(tree)
@@ -41,4 +40,4 @@ trait Phases extends Reshape
 
     result
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 47669f5..b1cc797 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -6,10 +6,11 @@ import scala.reflect.macros.UnexpectedReificationException
 import scala.reflect.reify.utils.Utils
 
 /** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
- *  See more info in the comments to ``reify'' in scala.reflect.api.Universe.
+ *  See more info in the comments to `reify` in scala.reflect.api.Universe.
  *
- *  @author Martin Odersky
- *  @version 2.10
+ *  @author   Martin Odersky
+ *  @version  2.10
+ *  @since    2.10
  */
 abstract class Reifier extends States
                           with Phases
@@ -19,6 +20,8 @@ abstract class Reifier extends States
   val global: Global
   import global._
   import definitions._
+  private val runDefinitions = currentRun.runDefinitions
+  import runDefinitions._
 
   val typer: global.analyzer.Typer
   val universe: Tree
@@ -31,20 +34,20 @@ abstract class Reifier extends States
     this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
   override def hasReifier = true
 
-  /**
-   *  For ``reifee'' and other reification parameters, generate a tree of the form
-   *
+  /** For `reifee` and other reification parameters, generate a tree of the form
+   *  {{{
    *    {
-   *      val $u: universe.type = <[ universe ]>
-   *      val $m: $u.Mirror = <[ mirror ]>
-   *      $u.Expr[T](rtree)       // if data is a Tree
-   *      $u.TypeTag[T](rtree)    // if data is a Type
+   *      val \$u: universe.type = <[ universe ]>
+   *      val \$m: \$u.Mirror = <[ mirror ]>
+   *      \$u.Expr[T](rtree)       // if data is a Tree
+   *      \$u.TypeTag[T](rtree)    // if data is a Type
    *    }
+   *  }}}
    *
    *  where
    *
-   *    - `universe` is the tree that represents the universe the result will be bound to
-   *    - `mirror` is the tree that represents the mirror the result will be initially bound to
+   *    - `universe` is the tree that represents the universe the result will be bound to.
+   *    - `mirror` is the tree that represents the mirror the result will be initially bound to.
    *    - `rtree` is code that generates `reifee` at runtime.
    *    - `T` is the type that corresponds to `data`.
    *
@@ -57,7 +60,7 @@ abstract class Reifier extends States
 
       val result = reifee match {
         case tree: Tree =>
-          reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+          reifyTrace("reifying = ")(if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
           reifyTrace("reifee is located at: ")(tree.pos)
           reifyTrace("universe = ")(universe)
           reifyTrace("mirror = ")(mirror)
@@ -83,7 +86,7 @@ abstract class Reifier extends States
           throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString))
       }
 
-      // todo. why do we resetAllAttrs?
+      // todo. why do we reset attrs?
       //
       // typically we do some preprocessing before reification and
       // the code emitted/moved around during preprocessing is very hard to typecheck, so we leave it as it is
@@ -106,15 +109,15 @@ abstract class Reifier extends States
       //
       // todo. this is a common problem with non-trivial macros in our current macro system
       // needs to be solved some day
-      // maybe try `resetLocalAttrs` once the dust settles
+      // upd. a new hope: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ
       var importantSymbols = Set[Symbol](
         NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
-        ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, ReflectRuntimeCurrentMirror)
+        ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, runDefinitions.ReflectRuntimeCurrentMirror)
       importantSymbols ++= importantSymbols map (_.companionSymbol)
       importantSymbols ++= importantSymbols map (_.moduleClass)
       importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
       def isImportantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym)
-      val untyped = resetAllAttrs(result, leaveAlone = {
+      val untyped = brutallyResetAttrs(result, leaveAlone = {
         case ValDef(_, u, _, _) if u == nme.UNIVERSE_SHORT => true
         case ValDef(_, m, _, _) if m == nme.MIRROR_SHORT => true
         case tree if symtab.syms contains tree.symbol => true
@@ -140,4 +143,4 @@ abstract class Reifier extends States
         throw new UnexpectedReificationException(defaultErrorPosition, "reification crashed", ex)
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index 58455c9..65f3f42 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -4,7 +4,6 @@ trait States {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   /** Encapsulates reifier state
    *
@@ -36,7 +35,7 @@ trait States {
       if (!value && concrete) {
         current match {
           case tpe: Type => CannotReifyWeakType(s" having unresolved type parameter $tpe")
-          case sym: Symbol => CannotReifyWeakType(s" referring to local ${sym.kindString} ${sym.fullName}")
+          case sym: Symbol => CannotReifyWeakType(s" referring to ${sym.kindString} ${sym.fullName} local to the reifee")
           case _ => CannotReifyWeakType("")
         }
       }
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index cbaee41..093c2be 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -1,14 +1,15 @@
 package scala.reflect.reify
 
 import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
 
 abstract class Taggers {
   val c: Context
 
   import c.universe._
   import definitions._
-  import treeBuild._
+  private val runDefinitions = currentRun.runDefinitions
+  import runDefinitions._
 
   val coreTags = Map(
     ByteTpe -> nme.Byte,
@@ -59,18 +60,18 @@ abstract class Taggers {
     val result =
       tpe match {
         case coreTpe if coreTags contains coreTpe =>
-          val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+          val ref = if (tagModule.isTopLevel) Ident(tagModule) else Select(prefix, tagModule.name)
           Select(ref, coreTags(coreTpe))
         case _ =>
           translatingReificationErrors(materializer)
       }
-    try c.typeCheck(result)
+    try c.typecheck(result)
     catch { case terr @ TypecheckException(pos, msg) => failTag(result, terr) }
   }
 
   def materializeExpr(universe: Tree, mirror: Tree, expr: Tree): Tree = {
     val result = translatingReificationErrors(c.reifyTree(universe, mirror, expr))
-    try c.typeCheck(result)
+    try c.typecheck(result)
     catch { case terr @ TypecheckException(pos, msg) => failExpr(result, terr) }
   }
 
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index dec491a..ce26232 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -5,7 +5,6 @@ trait GenAnnotationInfos {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   // usually annotations are reified as their originals from Modifiers
   // however, when reifying free and tough types, we're forced to reify annotation infos as is
@@ -39,17 +38,10 @@ trait GenAnnotationInfos {
       }
     }
 
-    def reifyClassfileAnnotArg(arg: ClassfileAnnotArg): Tree = arg match {
-      case LiteralAnnotArg(const) =>
-        mirrorFactoryCall(nme.LiteralAnnotArg, reifyProduct(const))
-      case ArrayAnnotArg(args) =>
-        mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*))
-      case NestedAnnotArg(ann) =>
-        mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann))
-    }
-
     // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
-    val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
-    mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
+    val Apply(Select(New(tpt), name), args) = annotationToTree(ann)
+    val reifiedAtp = mirrorCall(nme.Select, mirrorCall(nme.New, mirrorCall(nme.TypeTree, reifyType(tpt.tpe))), reify(name))
+    val reifiedAnnRepr = mirrorCall(nme.Apply, reifiedAtp, reifyList(args))
+    mirrorFactoryCall(nme.Annotation, reifiedAnnRepr)
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
index 4abf88f..4266c6f 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
@@ -5,10 +5,9 @@ trait GenNames {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   def reifyName(name: Name) = {
-    val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName
+    val factory = if (name.isTypeName) nme.TypeName else nme.TermName
     mirrorCall(factory, Literal(Constant(name.toString)))
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
index 8c5db04..1d151c5 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
@@ -5,7 +5,6 @@ trait GenPositions {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   // we do not reify positions because this inflates resulting trees, but doesn't buy as anything
   // where would one use positions? right, in error messages
@@ -14,4 +13,4 @@ trait GenPositions {
   // however both macros and toolboxes have their own means to report errors in synthetic trees
   def reifyPosition(pos: Position): Tree =
     reifyMirrorObject(NoPosition)
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 47c966e..52ddcb1 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -7,14 +7,13 @@ trait GenSymbols {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   /** Symbol table of the reifee.
    *
    *  Keeps track of auxiliary symbols that are necessary for this reification session.
    *  These include:
    *    1) Free vars (terms, types and existentials),
-   *    2) Non-locatable symbols (sometimes, e.g. for RefinedTypes, we need to reify these; to do that we create their local copies in the reificode)
+   *    2) Non-locatable symbols (sometimes, e.g. for RefinedTypes, we need to reify these; to do that we create their copies in the reificode)
    *    3) Non-locatable symbols that are referred by #1, #2 and #3
    *
    *  Exposes three main methods:
@@ -43,7 +42,7 @@ trait GenSymbols {
     else if (sym.isPackage)
       mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
     else if (sym.isLocatable) {
-      /** This is a fancy conundrum that stems from the fact that Scala allows
+      /*  This is a fancy conundrum that stems from the fact that Scala allows
        *  packageless packages and packageless objects with the same names in the same program.
        *
        *  For more details read the docs to staticModule and staticPackage.
@@ -91,17 +90,17 @@ trait GenSymbols {
           mirrorBuildCall(nme.selectTerm, rowner, rname)
       }
     } else {
-      // todo. make sure that free methods and free local defs work correctly
+      // todo. make sure that free methods work correctly
       if (sym.isExistential) reifySymDef(sym)
       else if (sym.isTerm) reifyFreeTerm(Ident(sym))
-      else reifyFreeType(Ident(sym))
+      else reifyFreeType(Ident(sym)) // TODO: reify refinement classes
     }
   }
 
   def reifyFreeTerm(binding: Tree): Tree =
     reifyIntoSymtab(binding.symbol) { sym =>
       if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
-      val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
+      val name = newTermName("" + nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
       // We need to note whether the free value being reified is stable or not to guide subsequent reflective compilation.
       // Here's why reflection compilation needs our help.
       //
@@ -132,9 +131,9 @@ trait GenSymbols {
       if (sym.isCapturedVariable) {
         assert(binding.isInstanceOf[Ident], showRaw(binding))
         val capturedBinding = referenceCapturedVariable(sym)
-        Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+        Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
       } else {
-        Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+        Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
       }
     }
 
@@ -142,16 +141,16 @@ trait GenSymbols {
     reifyIntoSymtab(binding.symbol) { sym =>
       if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
       state.reificationIsConcrete = false
-      val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
-      Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+      val name: TermName = nme.REIFY_FREE_PREFIX append sym.name
+      Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
     }
 
   def reifySymDef(sym: Symbol): Tree =
     reifyIntoSymtab(sym) { sym =>
       if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
-      val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+      val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name
       def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
-      Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+      Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(sym.isClass)))
     }
 
   case class Reification(name: Name, binding: Tree, tree: Tree)
@@ -173,7 +172,7 @@ trait GenSymbols {
       val reification = reificode(sym)
       import reification.{name, binding}
       val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
-      state.symtab += (sym, name, tree)
+      state.symtab += (sym, name.toTermName, tree)
     }
     fromSymtab
   }
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 9894e35..743fe13 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -15,7 +15,7 @@ trait GenTrees {
 
   /**
    *  Reify a tree.
-   *  For internal use only, use ``reified'' instead.
+   *  For internal use only, use `reified` instead.
    */
   def reifyTree(tree: Tree): Tree = {
     assert(tree != null, "tree is null")
@@ -29,42 +29,29 @@ trait GenTrees {
 
     // the idea behind the new reincarnation of reifier is a simple maxim:
     //
-    //   never call ``reifyType'' to reify a tree
+    //   never call `reifyType` to reify a tree
     //
     // this works because the stuff we are reifying was once represented with trees only
     // and lexical scope information can be fully captured by reifying symbols
     //
-    // to enable this idyll, we work hard in the ``Reshape'' phase
+    // to enable this idyll, we work hard in the `Reshape` phase
     // which replaces all types with equivalent trees and works around non-idempotencies of the typechecker
     //
     // why bother? because this brings method to the madness
     // the first prototype of reification reified all types and symbols for all trees => this quickly became unyieldy
-    // the second prototype reified external types, but avoided reifying local ones => this created an ugly irregularity
+    // the second prototype reified external types, but avoided reifying ones local to the reifee => this created an ugly irregularity
     // current approach is uniform and compact
-    var rtree = tree match {
-      case global.EmptyTree =>
-        reifyMirrorObject(EmptyTree)
-      case global.emptyValDef =>
-        mirrorBuildSelect(nme.emptyValDef)
-      case FreeDef(_, _, _, _, _) =>
-        reifyNestedFreeDef(tree)
-      case FreeRef(_, _) =>
-        reifyNestedFreeRef(tree)
-      case BoundTerm(tree) =>
-        reifyBoundTerm(tree)
-      case BoundType(tree) =>
-        reifyBoundType(tree)
-      case Literal(const @ Constant(_)) =>
-        mirrorCall(nme.Literal, reifyProduct(const))
-      case Import(expr, selectors) =>
-        mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
-      case _ =>
-        reifyProduct(tree)
+    var rtree: Tree = tree match {
+      case FreeDef(_, _, _, _, _) => reifyNestedFreeDef(tree)
+      case FreeRef(_, _)          => reifyNestedFreeRef(tree)
+      case BoundTerm(tree)        => reifyBoundTerm(tree)
+      case BoundType(tree)        => reifyBoundType(tree)
+      case _                      => reifyTreeSyntactically(tree)
     }
 
     // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
-    // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
-    if (reifyTreeSymbols && tree.hasSymbol) {
+    // however, reification of AnnotatedTypes is special. see `reifyType` to find out why.
+    if (reifyTreeSymbols && tree.hasSymbolField) {
       if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
       rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol))
     }
@@ -76,21 +63,34 @@ trait GenTrees {
     rtree
   }
 
+  def reifyTreeSyntactically(tree: Tree): Tree = tree match {
+    case global.EmptyTree             => reifyMirrorObject(EmptyTree)
+    case global.noSelfType            => mirrorSelect(nme.noSelfType)
+    case global.pendingSuperCall      => mirrorSelect(nme.pendingSuperCall)
+    case Literal(const @ Constant(_)) => mirrorCall(nme.Literal, reifyProduct(const))
+    case Import(expr, selectors)      => mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
+    case _                            => reifyProduct(tree)
+  }
+
+  def reifyFlags(flags: FlagSet) =
+    if (flags != 0) reifyBuildCall(nme.FlagsRepr, flags) else mirrorSelect(nme.NoFlags)
+
   def reifyModifiers(m: global.Modifiers) =
-    mirrorFactoryCall(nme.Modifiers, mirrorBuildCall(nme.flagsFromBits, reify(m.flags)), reify(m.privateWithin), reify(m.annotations))
+    if (m == NoMods) mirrorSelect(nme.NoMods)
+    else mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reify(m.annotations))
 
   private def spliceTree(tree: Tree): Tree = {
     tree match {
       case TreeSplice(splicee) =>
         if (reifyDebug) println("splicing " + tree)
 
-        // see ``Metalevels'' for more info about metalevel breaches
+        // see `Metalevels` for more info about metalevel breaches
         // and about how we deal with splices that contain them
-        val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
-        val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice)
+        val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+        val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice)
         if (isMetalevelBreach || isRuntimeEval) {
           // we used to convert dynamic splices into runtime evals transparently, but we no longer do that
-          // why? see comments in ``Metalevels''
+          // why? see comments in `Metalevels`
           // if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach")
           // EmptyTree
           CannotReifyRuntimeSplice(tree)
@@ -100,9 +100,9 @@ trait GenTrees {
             // we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match)
             case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) =>
               if (reifyDebug) println("inlining the splicee")
-              // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
+              // all free vars local to the enclosing reifee should've already been inlined by `Metalevels`
               for (sym <- inlinedSymtab.syms if sym.isLocalToReifee)
-                abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
+                abort("free var local to the reifee, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
               state.symtab ++= inlinedSymtab
               rtree
             case tree =>
@@ -129,32 +129,32 @@ trait GenTrees {
         else if (sym.isClass && !sym.isModuleClass) {
           if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
           if (reifyDebug) println("Free: " + sym)
-          mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym)))
+          mirrorBuildCall(nme.mkIdent, reifyFreeTerm(This(sym)))
         }
         else {
           if (reifyDebug) println("This for %s, reified as This".format(sym))
-          mirrorBuildCall(nme.This, reify(sym))
+          mirrorBuildCall(nme.mkThis, reify(sym))
         }
 
       case Ident(name) =>
         if (sym == NoSymbol) {
           // this sometimes happens, e.g. for binds that don't have a body
           // or for untyped code generated during previous phases
-          // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?")
+          // (see a comment in Reifiers about the latter, starting with "why do we reset attrs?")
           mirrorCall(nme.Ident, reify(name))
         }
         else if (!sym.isLocalToReifee) {
           if (sym.isVariable && sym.owner.isTerm) {
             captureVariable(sym) // Note order dependency: captureVariable needs to come before reification here.
-            mirrorCall(nme.Select, mirrorBuildCall(nme.Ident, reify(sym)), reify(nme.elem))
+            mirrorCall(nme.Select, mirrorBuildCall(nme.mkIdent, reify(sym)), reify(nme.elem))
           }
-          else mirrorBuildCall(nme.Ident, reify(sym))
+          else mirrorBuildCall(nme.mkIdent, reify(sym))
         }
         else mirrorCall(nme.Ident, reify(name))
 
       case Select(qual, name) =>
         if (qual.symbol != null && qual.symbol.isPackage) {
-          mirrorBuildCall(nme.Ident, reify(sym))
+          mirrorBuildCall(nme.mkIdent, reify(sym))
         } else {
           val effectiveName = if (sym != null && sym != NoSymbol) sym.name else name
           reifyProduct(Select(qual, effectiveName))
@@ -173,7 +173,7 @@ trait GenTrees {
       assert(tpe != null, "unexpected: bound type that doesn't have a tpe: " + showRaw(tree))
 
       // if a symbol or a type of the scrutinee are local to reifee
-      // (e.g. point to a locally declared class or to a path-dependent thingie that depends on a local variable)
+      // (e.g. point to a locally declared class or to a path-dependent thingie that depends on a variable defined within the reifee)
       // then we can reify the scrutinee as a symless AST and that will definitely be hygienic
       // why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote
       // otherwise we need to reify the corresponding type
@@ -187,7 +187,7 @@ trait GenTrees {
 
           if (spliced == EmptyTree) {
             if (reifyDebug) println("splicing failed: reify as is")
-            mirrorBuildCall(nme.TypeTree, reify(tpe))
+            mirrorBuildCall(nme.mkTypeTree, reify(tpe))
           }
           else spliced match {
             case TypeRefToFreeType(freeType) =>
@@ -195,7 +195,7 @@ trait GenTrees {
               Ident(freeType)
             case _ =>
               if (reifyDebug) println("splicing succeeded: " + spliced)
-              mirrorBuildCall(nme.TypeTree, spliced)
+              mirrorBuildCall(nme.mkTypeTree, spliced)
           }
         }
         else tree match {
@@ -207,10 +207,10 @@ trait GenTrees {
             mirrorCall(nme.SelectFromTypeTree, reify(qual), reify(name))
           case _ if sym.isLocatable =>
             if (reifyDebug) println(s"tpe is locatable: reify as Ident($sym)")
-            mirrorBuildCall(nme.Ident, reify(sym))
+            mirrorBuildCall(nme.mkIdent, reify(sym))
           case _ =>
             if (reifyDebug) println(s"tpe is not locatable: reify as TypeTree($tpe)")
-            mirrorBuildCall(nme.TypeTree, reify(tpe))
+            mirrorBuildCall(nme.mkTypeTree, reify(tpe))
         }
       }
     }
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index bb7e1f9..d007df7 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -6,10 +6,12 @@ trait GenTypes {
 
   import global._
   import definitions._
+  private val runDefinitions = currentRun.runDefinitions
+  import runDefinitions.{ReflectRuntimeUniverse, ReflectRuntimeCurrentMirror, _}
 
   /**
    *  Reify a type.
-   *  For internal use only, use ``reified'' instead.
+   *  For internal use only, use `reified` instead.
    */
   def reifyType(tpe: Type): Tree = {
     assert(tpe != null, "tpe is null")
@@ -32,9 +34,9 @@ trait GenTypes {
     if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
       Select(Select(reify(tsym), nme.asType), nme.toTypeConstructor)
     else tpe match {
-      case tpe @ NoType =>
+      case tpe : NoType.type =>
         reifyMirrorObject(tpe)
-      case tpe @ NoPrefix =>
+      case tpe : NoPrefix.type =>
         reifyMirrorObject(tpe)
       case tpe @ ThisType(root) if root.isRoot =>
         mirrorBuildCall(nme.thisPrefix, mirrorMirrorSelect(nme.RootClass))
@@ -43,22 +45,22 @@ trait GenTypes {
       case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
         val module = reify(clazz.sourceModule)
         val moduleClass = Select(Select(module, nme.asModule), nme.moduleClass)
-        mirrorFactoryCall(nme.ThisType, moduleClass)
-      case tpe @ ThisType(_) =>
-        reifyProduct(tpe)
+        mirrorBuildCall(nme.ThisType, moduleClass)
+      case tpe @ ThisType(sym) =>
+        reifyBuildCall(nme.ThisType, sym)
       case tpe @ SuperType(thistpe, supertpe) =>
-        reifyProduct(tpe)
+        reifyBuildCall(nme.SuperType, thistpe, supertpe)
       case tpe @ SingleType(pre, sym) =>
-        reifyProduct(tpe)
+        reifyBuildCall(nme.SingleType, pre, sym)
       case tpe @ ConstantType(value) =>
-        mirrorFactoryCall(nme.ConstantType, reifyProduct(value))
+        mirrorBuildCall(nme.ConstantType, reifyProduct(value))
       case tpe @ TypeRef(pre, sym, args) =>
-        reifyProduct(tpe)
+        reifyBuildCall(nme.TypeRef, pre, sym, args)
       case tpe @ TypeBounds(lo, hi) =>
-        reifyProduct(tpe)
+        reifyBuildCall(nme.TypeBounds, lo, hi)
       case tpe @ NullaryMethodType(restpe) =>
-        reifyProduct(tpe)
-      case tpe @ AnnotatedType(anns, underlying, selfsym) =>
+        reifyBuildCall(nme.NullaryMethodType, restpe)
+      case tpe @ AnnotatedType(anns, underlying) =>
         reifyAnnotatedType(tpe)
       case _ =>
         reifyToughType(tpe)
@@ -73,7 +75,6 @@ trait GenTypes {
       if (reifyDebug) println("splicing " + tpe)
 
       val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
-      val key = (tagFlavor, tpe.typeSymbol)
       // if this fails, it might produce the dreaded "erroneous or inaccessible type" error
       // to find out the whereabouts of the error run scalac with -Ydebug
       if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe))
@@ -118,7 +119,8 @@ trait GenTypes {
             // todo. write a test for this
             if (ReflectRuntimeUniverse == NoSymbol) CannotConvertManifestToTagWithoutScalaReflect(tpe, manifestInScope)
             val cm = typer.typed(Ident(ReflectRuntimeCurrentMirror))
-            val tagTree = gen.mkMethodCall(ReflectRuntimeUniverse, nme.manifestToTypeTag, List(tpe), List(cm, manifestInScope))
+            val internal = gen.mkAttributedSelect(gen.mkAttributedRef(ReflectRuntimeUniverse), UniverseInternal)
+            val tagTree = gen.mkMethodCall(Select(internal, nme.manifestToTypeTag), List(tpe), List(cm, manifestInScope))
             Select(Apply(Select(tagTree, nme.in), List(Ident(nme.MIRROR_SHORT))), nme.tpe)
           case _ =>
             EmptyTree
@@ -156,13 +158,13 @@ trait GenTypes {
    */
   private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match {
     case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential =>
-      return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
+      mirrorBuildCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
   }
 
   /** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
   private def reifyAnnotatedType(tpe: AnnotatedType): Tree = {
-    val AnnotatedType(anns, underlying, selfsym) = tpe
-    mirrorFactoryCall(nme.AnnotatedType, mkList(anns map reifyAnnotationInfo), reify(underlying), reify(selfsym))
+    val AnnotatedType(anns, underlying) = tpe
+    mirrorBuildCall(nme.AnnotatedType, mkList(anns map reifyAnnotationInfo), reify(underlying))
   }
 
   /** Reify a tough type, i.e. the one that leads to creation of auxiliary symbols */
@@ -171,25 +173,25 @@ trait GenTypes {
 
     def reifyScope(scope: Scope): Tree = {
       scope foreach reifySymDef
-      mirrorCall(nme.newScopeWith, scope.toList map reify: _*)
+      mirrorBuildCall(nme.newScopeWith, scope.toList map reify: _*)
     }
 
     tpe match {
       case tpe @ RefinedType(parents, decls) =>
         reifySymDef(tpe.typeSymbol)
-        mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+        mirrorBuildCall(nme.RefinedType, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
       case tpe @ ExistentialType(tparams, underlying) =>
         tparams foreach reifySymDef
-        mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+        reifyBuildCall(nme.ExistentialType, tparams, underlying)
       case tpe @ ClassInfoType(parents, decls, clazz) =>
         reifySymDef(clazz)
-        mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+        mirrorBuildCall(nme.ClassInfoType, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
       case tpe @ MethodType(params, restpe) =>
         params foreach reifySymDef
-        mirrorFactoryCall(tpe, reify(params), reify(restpe))
+        reifyBuildCall(nme.MethodType, params, restpe)
       case tpe @ PolyType(tparams, underlying) =>
         tparams foreach reifySymDef
-        mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+        reifyBuildCall(nme.PolyType, tparams, underlying)
       case _ =>
         throw new Error("internal error: %s (%s) is not supported".format(tpe, tpe.kind))
     }
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 49877b4..4512b2c 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,7 +5,10 @@ trait GenUtils {
   self: Reifier =>
 
   import global._
+  import treeInfo._
   import definitions._
+  private val runDefinitions = currentRun.runDefinitions
+  import runDefinitions._
 
   def reifyList(xs: List[Any]): Tree =
     mkList(xs map reify)
@@ -31,41 +34,35 @@ trait GenUtils {
   def call(fname: String, args: Tree*): Tree =
     Apply(termPath(fname), args.toList)
 
-  def mirrorSelect(name: String): Tree =
-    termPath(nme.UNIVERSE_PREFIX + name)
+  def mirrorSelect(name: String): Tree   = termPath(nme.UNIVERSE_PREFIX + name)
+  def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString)
 
-  def mirrorBuildSelect(name: String): Tree =
-    termPath(nme.UNIVERSE_BUILD_PREFIX + name)
-
-  def mirrorMirrorSelect(name: String): Tree =
-    termPath(nme.MIRROR_PREFIX + name)
+  def mirrorMirrorSelect(name: TermName): Tree =
+    termPath("" + nme.MIRROR_PREFIX + name)
 
   def mirrorCall(name: TermName, args: Tree*): Tree =
-    call("" + (nme.UNIVERSE_PREFIX append name), args: _*)
-
-  def mirrorCall(name: String, args: Tree*): Tree =
-    call(nme.UNIVERSE_PREFIX + name, args: _*)
+    call("" + nme.UNIVERSE_PREFIX + name, args: _*)
 
   def mirrorBuildCall(name: TermName, args: Tree*): Tree =
-    call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*)
+    call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
 
-  def mirrorBuildCall(name: String, args: Tree*): Tree =
-    call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+  def reifyBuildCall(name: TermName, args: Any*) =
+      mirrorBuildCall(name, args map reify: _*)
 
   def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
-    call("" + (nme.MIRROR_PREFIX append name), args: _*)
-
-  def mirrorMirrorCall(name: String, args: Tree*): Tree =
-    call(nme.MIRROR_PREFIX + name, args: _*)
+    call("" + nme.MIRROR_PREFIX + name, args: _*)
 
   def mirrorFactoryCall(value: Product, args: Tree*): Tree =
     mirrorFactoryCall(value.productPrefix, args: _*)
 
-  def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
-    mirrorCall(prefix, args: _*)
+  def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree =
+    mirrorCall("" + prefix, args: _*)
+
+  def scalaFactoryCall(name: TermName, args: Tree*): Tree =
+    call(s"scala.$name.apply", args: _*)
 
   def scalaFactoryCall(name: String, args: Tree*): Tree =
-    call("scala." + name + ".apply", args: _*)
+    scalaFactoryCall(name: TermName, args: _*)
 
   def mkList(args: List[Tree]): Tree =
     scalaFactoryCall("collection.immutable.List", args: _*)
@@ -91,22 +88,6 @@ trait GenUtils {
   /** An (unreified) path that refers to term definition with given fully qualified name */
   def termPath(fullname: String): Tree = path(fullname, newTermName)
 
-  /** An (unreified) path that refers to type definition with given fully qualified name */
-  def typePath(fullname: String): Tree = path(fullname, newTypeName)
-
-  def isTough(tpe: Type) = {
-    def isTough(tpe: Type) = tpe match {
-      case _: RefinedType => true
-      case _: ExistentialType => true
-      case _: ClassInfoType => true
-      case _: MethodType => true
-      case _: PolyType => true
-      case _ => false
-    }
-
-    tpe != null && (tpe exists isTough)
-  }
-
   object TypedOrAnnotated {
     def unapply(tree: Tree): Option[Tree] = tree match {
       case ty @ Typed(_, _) =>
@@ -118,15 +99,6 @@ trait GenUtils {
     }
   }
 
-  def isAnnotated(tpe: Type) = {
-    def isAnnotated(tpe: Type) = tpe match {
-      case _: AnnotatedType => true
-      case _ => false
-    }
-
-    tpe != null && (tpe exists isAnnotated)
-  }
-
   def isSemiConcreteTypeMember(tpe: Type) = tpe match {
     case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true
     case _ => false
@@ -145,4 +117,4 @@ trait GenUtils {
     if (origin == "") origin = "of unknown origin"
     origin
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 6777bb0..eea63d8 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,11 +1,11 @@
-package scala.reflect
+package scala
+package reflect
 
-import scala.language.implicitConversions
-import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException}
+import scala.reflect.macros.ReificationException
 import scala.tools.nsc.Global
 
 package object reify {
-  private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = {
+  private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean): Reifier { val global: global1.type } = {
     val typer1: typer.type = typer
     val universe1: universe.type = universe
     val mirror1: mirror.type = mirror
@@ -24,14 +24,15 @@ package object reify {
 
   private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
     import global._
-    import definitions._
+    import definitions.JavaUniverseClass
+
     val enclosingErasure = {
       val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
       // HACK around SI-6259
       // If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
       // the class of that object.  Instead, we construct an anonymous class and grab his class file, assuming
       // this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
-      rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+      rClassTree orElse Apply(Select(gen.mkAnonymousNew(Nil), sn.GetClass), Nil)
     }
     // JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
     val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
@@ -59,14 +60,14 @@ package object reify {
       if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
     }
 
-    tpe match {
+    tpe.dealiasWiden match {
       case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
         val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
-        gen.mkMethodCall(arrayClassMethod, List(componentErasure))
+        gen.mkMethodCall(currentRun.runDefinitions.arrayClassMethod, List(componentErasure))
       case _ =>
         var erasure = tpe.erasure
         if (tpe.typeSymbol.isDerivedValueClass && global.phase.id < global.currentRun.erasurePhase.id) erasure = tpe
-        gen.mkNullaryCall(Predef_classOf, List(erasure))
+        gen.mkNullaryCall(currentRun.runDefinitions.Predef_classOf, List(erasure))
     }
   }
 
@@ -74,7 +75,6 @@ package object reify {
   // a class/object body, this will return an EmptyTree.
   def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
     import global._
-    import definitions._
     def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
     if (isThisInScope) {
       val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 4d1e22a..a0035d7 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -5,7 +5,6 @@ trait Calculate {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   implicit class RichCalculateSymbol(sym: Symbol) {
     def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
@@ -27,10 +26,10 @@ trait Calculate {
     }
 
   /**
-   *  Merely traverses the reifiee and records local symbols along with their metalevels.
+   *  Merely traverses the reifiee and records symbols local to the reifee along with their metalevels.
    */
   val calculate = new Traverser {
-    // see the explanation of metalevels in ``Metalevels''
+    // see the explanation of metalevels in `Metalevels`
     var currMetalevel = 1
 
     override def traverse(tree: Tree): Unit = tree match {
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index fbbd12a..c692633 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -1,16 +1,17 @@
 package scala.reflect.reify
 package phases
 
+import scala.collection.{ mutable }
+
 trait Metalevels {
   self: Reifier =>
 
   import global._
-  import definitions._
 
   /**
    *  Makes sense of cross-stage bindings.
    *
-   *  ================
+   *  ----------------
    *
    *  Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
    *  Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
@@ -32,27 +33,27 @@ trait Metalevels {
    *
    *  1) symbol.metalevel < curr_metalevel. In this case reifier will generate a free variable
    *  that captures both the name of the symbol (to be compiled successfully) and its value (to be run successfully).
-   *  For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntClass.tpe, x))
+   *  For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntTpe, x))
    *
    *  2) symbol.metalevel > curr_metalevel. This leads to a metalevel breach that violates intuitive perception of splicing.
    *  As defined in macro spec, splicing takes a tree and inserts it into another tree - as simple as that.
    *  However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it,
    *  but what if y were a var, and what if it were calculated randomly at runtime?
    *
-   *  This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''),
+   *  This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of `reify`),
    *  but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime
-   *  (in fact, this is something that ``Expr.splice'' does transparently).
+   *  (in fact, this is something that `Expr.splice` does transparently).
    *
    *  This is akin to early vs late binding dilemma.
    *  The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms.
    *  But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them.
    *
-   *  upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error.
+   *  upd. We no longer do that. In case of a runaway `splice` inside a `reify`, one will get a static error.
    *  Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed.
    *  1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
    *  2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
    *
-   *  ================
+   *  ----------------
    *
    *  As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
    *  Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
@@ -71,7 +72,7 @@ trait Metalevels {
    *  Since the result of the inner reify is wrapped in a splice, it won't be reified
    *  together with the other parts of the outer reify, but will be inserted into that result verbatim.
    *
-   *  The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntClass.tpe, x)).
+   *  The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntTpe, x)).
    *  However the freevar the reification points to will vanish when the compiler processes the outer reify.
    *  That's why we need to replace that freevar with a regular symbol that will point to reified x.
    *
@@ -102,7 +103,7 @@ trait Metalevels {
    */
   val metalevels = new Transformer {
     var insideSplice = false
-    var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
+    val inlineableBindings = mutable.Map[TermName, Tree]()
 
     def withinSplice[T](op: => T) = {
       val old = insideSplice
@@ -124,7 +125,7 @@ trait Metalevels {
         withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) }
       case TreeSplice(splicee) =>
         if (reifyDebug) println("entering splice: " + splicee)
-        val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+        val breaches = splicee filter (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
         if (!insideSplice && breaches.nonEmpty) {
           // we used to convert dynamic splices into runtime evals transparently, but we no longer do that
           // why? see comments above
@@ -135,7 +136,7 @@ trait Metalevels {
         } else {
           withinSplice { super.transform(tree) }
         }
-      // todo. also inline usages of ``inlineableBindings'' in the symtab itself
+      // todo. also inline usages of `inlineableBindings` in the symtab itself
       // e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x
       // FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
       case FreeRef(_, name) if inlineableBindings contains name =>
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index 8e13a45..143424d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -2,7 +2,6 @@ package scala.reflect.reify
 package phases
 
 import scala.runtime.ScalaRunTime.isAnyVal
-import scala.runtime.ScalaRunTime.isTuple
 import scala.reflect.reify.codegen._
 
 trait Reify extends GenSymbols
@@ -16,7 +15,6 @@ trait Reify extends GenSymbols
   self: Reifier =>
 
   import global._
-  import definitions._
 
   private object reifyStack {
     def currents: List[Any] = state.reifyStack
@@ -37,7 +35,7 @@ trait Reify extends GenSymbols
 
   /**
    *  Reifies any supported value.
-   *  For internal use only, use ``reified'' instead.
+   *  For internal use only, use `reified` instead.
    */
   def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
     // before adding some case here, in global scope, please, consider
@@ -59,4 +57,4 @@ trait Reify extends GenSymbols
     case _                        =>
       throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
   })
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 535a933..6c073c0 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -8,6 +8,9 @@ trait Reshape {
 
   import global._
   import definitions._
+  import treeInfo.Unapplied
+  private val runDefinitions = currentRun.runDefinitions
+  import runDefinitions._
 
   /**
    *  Rolls back certain changes that were introduced during typechecking of the reifee.
@@ -48,13 +51,13 @@ trait Reshape {
           val Template(parents, self, body) = impl
           var body1 = trimAccessors(classDef, reshapeLazyVals(body))
           body1 = trimSyntheticCaseClassMembers(classDef, body1)
-          var impl1 = Template(parents, self, body1).copyAttrs(impl)
+          val impl1 = Template(parents, self, body1).copyAttrs(impl)
           ClassDef(mods, name, params, impl1).copyAttrs(classDef)
         case moduledef @ ModuleDef(mods, name, impl) =>
           val Template(parents, self, body) = impl
           var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
           body1 = trimSyntheticCaseClassMembers(moduledef, body1)
-          var impl1 = Template(parents, self, body1).copyAttrs(impl)
+          val impl1 = Template(parents, self, body1).copyAttrs(impl)
           ModuleDef(mods, name, impl1).copyAttrs(moduledef)
         case template @ Template(parents, self, body) =>
           val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
@@ -65,22 +68,9 @@ trait Reshape {
         case block @ Block(stats, expr) =>
           val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
           Block(stats1, expr).copyAttrs(block)
-        case unapply @ UnApply(fun, args) =>
-          def extractExtractor(tree: Tree): Tree = {
-            val Apply(fun, args) = tree
-            args match {
-              case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
-                val Select(extractor, flavor) = fun
-                assert(flavor == nme.unapply || flavor == nme.unapplySeq)
-                extractor
-              case _ =>
-                extractExtractor(fun)
-            }
-          }
-
+        case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
           if (reifyDebug) println("unapplying unapply: " + tree)
-          val fun1 = extractExtractor(fun)
-          Apply(fun1, args).copyAttrs(unapply)
+          Apply(fun, args).copyAttrs(unapply)
         case _ =>
           tree
       }
@@ -89,8 +79,8 @@ trait Reshape {
     }
 
     private def undoMacroExpansion(tree: Tree): Tree =
-      tree.attachments.get[MacroExpansionAttachment] match {
-        case Some(MacroExpansionAttachment(original)) =>
+      tree.attachments.get[analyzer.MacroExpansionAttachment] match {
+        case Some(analyzer.MacroExpansionAttachment(original, _)) =>
           def mkImplicitly(tp: Type) = atPos(tree.pos)(
             gen.mkNullaryCall(Predef_implicitly, List(tp))
           )
@@ -116,7 +106,6 @@ trait Reshape {
 
     private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
       if (!sym.annotations.isEmpty) {
-        val Modifiers(flags, privateWithin, annotations) = mods
         val postTyper = sym.annotations filter (_.original != EmptyTree)
         if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
         if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
@@ -131,8 +120,8 @@ trait Reshape {
      *
      *  NB: This is the trickiest part of reification!
      *
-     *  In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
-     *  However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+     *  In most cases, we're perfectly fine to reify a Type itself (see `reifyType`).
+     *  However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`),
      *  then we cannot reify it, or otherwise subsequent reflective compilation will fail.
      *
      *  Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
@@ -140,7 +129,7 @@ trait Reshape {
      *  https://issues.scala-lang.org/browse/SI-5230
      *
      *  To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
-     *  Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+     *  Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees.
      *  And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
      *  In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
      *
@@ -157,8 +146,8 @@ trait Reshape {
      *  upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
      *
      *  upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
-     *  As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!).
-     *  The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well.
+     *  As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!).
+     *  The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well.
      */
     private def isDiscarded(tt: TypeTree) = tt.original == null
     private def toPreTyperTypeTree(tt: TypeTree): Tree = {
@@ -168,7 +157,7 @@ trait Reshape {
         // if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
         // but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
         if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
-        if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original))
+        if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original.toString.replaceAll("\\s+", " ")))
         transform(tt.original)
       } else {
         // type is deemed to be non-essential
@@ -182,7 +171,7 @@ trait Reshape {
     private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
       val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
       if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
-      assert(self eq emptyValDef, self)
+      assert(self eq noSelfType, self)
       val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
       val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
       CompoundTypeTree(Template(parents1, self, stats1))
@@ -232,13 +221,10 @@ trait Reshape {
       val args = if (ann.assocs.isEmpty) {
         ann.args
       } else {
-        def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match {
-          case LiteralAnnotArg(const) =>
-            Literal(const)
-          case ArrayAnnotArg(arr) =>
-            Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
-          case NestedAnnotArg(ann) =>
-            toPreTyperAnnotation(ann)
+        def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = (jann: @unchecked) match {
+          case LiteralAnnotArg(const) => Literal(const)
+          case ArrayAnnotArg(arr)     => Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
+          case NestedAnnotArg(ann)    => toPreTyperAnnotation(ann)
         }
 
         ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
@@ -255,9 +241,9 @@ trait Reshape {
         case _ => rhs // unit or trait case
       }
       val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
-      val name1 = nme.dropLocalSuffix(name0)
+      val name1 = name0.dropLocal
       val Modifiers(flags0, privateWithin0, annotations0) = mods0
-      var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
+      val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
       val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
       val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
       ValDef(mods2, name1, tpt0, extractRhs(rhs0))
@@ -272,9 +258,11 @@ trait Reshape {
 
         def detectBeanAccessors(prefix: String): Unit = {
           if (defdef.name.startsWith(prefix)) {
-            var name = defdef.name.toString.substring(prefix.length)
+            val name = defdef.name.toString.substring(prefix.length)
             def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
-            def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption
+            def findValDef(name: String) = symdefs.values collectFirst {
+              case vdef: ValDef if vdef.name.dropLocal string_== name => vdef
+            }
             val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
             if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
           }
@@ -282,13 +270,13 @@ trait Reshape {
         detectBeanAccessors("get")
         detectBeanAccessors("set")
         detectBeanAccessors("is")
-      });
+      })
 
-      var stats1 = stats flatMap {
+      val stats1 = stats flatMap {
         case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
           val mods1 = if (accessors.contains(vdef)) {
             val ddef = accessors(vdef)(0) // any accessor will do
-            val Modifiers(flags, privateWithin, annotations) = mods
+            val Modifiers(flags, _, annotations) = mods
             var flags1 = flags & ~LOCAL
             if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
             val privateWithin1 = ddef.mods.privateWithin
@@ -298,8 +286,8 @@ trait Reshape {
             mods
           }
           val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
-          val name1 = nme.dropLocalSuffix(name)
-          val vdef1 = ValDef(mods2, name1, tpt, rhs)
+          val name1 = name.dropLocal
+          val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs)
           if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
           Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
         case ddef: DefDef if !ddef.mods.isLazy =>
@@ -331,7 +319,8 @@ trait Reshape {
             case Some(ddef) =>
               toPreTyperLazyVal(ddef)
             case None       =>
-              CannotReifyInvalidLazyVal(vdef)
+              if (reifyDebug) println("couldn't find corresponding lazy val accessor")
+              vdef
           }
           if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
           vdef1::Nil
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 59cd4e5..4ec4de2 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -11,7 +11,7 @@ trait Extractors {
   // Example of a reified tree for `reify(List(1, 2))`:
   // (also contains an example of a reified type as a third argument to the constructor of Expr)
   // {
-  //   val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+  //   val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
   //   val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
   //   $u.Expr[List[Int]]($m, {
   //     final class $treecreator1 extends scala.reflect.api.TreeCreator {
@@ -75,12 +75,12 @@ trait Extractors {
       newTypeName(global.currentUnit.fresh.newName(flavor.toString)),
       List(),
       Template(List(Ident(reifierBase)),
-      emptyValDef,
+      noSelfType,
       List(
         DefDef(NoMods, nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
         DefDef(NoMods,
           reifierName,
-          List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), emptyValDef, List()))))),
+          List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), noSelfType, List()))))),
           List(List(ValDef(Modifiers(PARAM), nme.MIRROR_UNTYPED, AppliedTypeTree(Ident(MirrorClass), List(Ident(tparamu))), EmptyTree))),
           reifierTpt, reifierBody))))
     Block(tpec, ApplyConstructor(Ident(tpec.name), List()))
@@ -164,6 +164,16 @@ trait Extractors {
     }
   }
 
+  // abstract over possible additional .apply select
+  // which is sometimes inserted after desugaring of calls
+  object ApplyCall {
+    def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+      case Apply(Select(id, nme.apply), args) => Some((id, args))
+      case Apply(id, args) => Some((id, args))
+      case _ => None
+    }
+  }
+
   sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
     def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
       def acceptFreeTermFactory(name: Name) = {
@@ -173,13 +183,13 @@ trait Extractors {
       tree match {
         case
           ValDef(_, name, _, Apply(
-            Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
+            Select(Select(Select(uref1 @ Ident(_), internal1), rs1), freeTermFactory),
             _ :+
-            Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+            ApplyCall(Select(Select(Select(uref2 @ Ident(_), internal2), rs2), flagsRepr), List(Literal(Constant(flags: Long)))) :+
             Literal(Constant(origin: String))))
-        if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
-           uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
-          Some(uref1, name, reifyBinding(tree), flags, origin)
+        if uref1.name == nme.UNIVERSE_SHORT && internal1 == nme.internal && rs1 == nme.reificationSupport && acceptFreeTermFactory(freeTermFactory) &&
+           uref2.name == nme.UNIVERSE_SHORT && internal2 == nme.internal && rs2 == nme.reificationSupport && flagsRepr == nme.FlagsRepr =>
+          Some((uref1, name, reifyBinding(tree), flags, origin))
         case _ =>
           None
       }
@@ -191,8 +201,8 @@ trait Extractors {
 
   object FreeRef {
     def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
-      case Apply(Select(Select(uref @ Ident(_), build), ident), List(Ident(name: TermName)))
-      if build == nme.build && ident == nme.Ident && name.startsWith(nme.REIFY_FREE_PREFIX) =>
+      case Apply(Select(Select(Select(uref @ Ident(_), internal), rs), mkIdent), List(Ident(name: TermName)))
+      if internal == nme.internal && rs == nme.reificationSupport && mkIdent == nme.mkIdent && name.startsWith(nme.REIFY_FREE_PREFIX) =>
         Some((uref, name))
       case _ =>
         None
@@ -203,15 +213,15 @@ trait Extractors {
     def unapply(tree: Tree): Option[(Tree, TermName, Long, Boolean)] = tree match {
       case
         ValDef(_, name, _, Apply(
-          Select(Select(uref1 @ Ident(_), build1), newNestedSymbol),
+          Select(Select(Select(uref1 @ Ident(_), internal1), rs1), newNestedSymbol),
           List(
             _,
             _,
             _,
-            Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
+            ApplyCall(Select(Select(Select(uref2 @ Ident(_), internal2), rs2), flagsRepr), List(Literal(Constant(flags: Long)))),
             Literal(Constant(isClass: Boolean)))))
-      if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newNestedSymbol == nme.newNestedSymbol &&
-         uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+      if uref1.name == nme.UNIVERSE_SHORT && internal1 == nme.internal && rs1 == nme.reificationSupport && newNestedSymbol == nme.newNestedSymbol &&
+         uref2.name == nme.UNIVERSE_SHORT && internal2 == nme.internal && rs2 == nme.reificationSupport && flagsRepr == nme.FlagsRepr =>
         Some((uref1, name, flags, isClass))
       case _ =>
         None
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index aca18c7..3b91d28 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -11,8 +11,6 @@ trait NodePrinters {
   self: Utils =>
 
   import global._
-  import definitions._
-  import Flag._
 
   object reifiedNodeToString extends (Tree => String) {
     def apply(tree: Tree): String = {
@@ -25,8 +23,8 @@ trait NodePrinters {
       // Rolling a full-fledged, robust TreePrinter would be several times more code.
       // Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
       val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
-      var (List(universe, mirror), reification) = lines
-      reification = (for (line <- reification) yield {
+      val (List(universe, mirror), reification0) = lines
+      val reification = (for (line <- reification0) yield {
         var s = line substring 2
         s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
         s = s.replace(".apply", "")
@@ -34,12 +32,12 @@ trait NodePrinters {
         s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
         s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
         s = s.replace("immutable.this.Nil", "List()")
-        s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
+        s = """internal\.reificationSupport\.FlagsRepr\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
           flagsAreUsed = true
           show(m.group(1).toLong)
         })
-        s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
-        s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+        s = s.replace("Modifiers(0L, TypeName(\"\"), List())", "Modifiers()")
+        s = """Modifiers\((\d+)[lL], TypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
           val buf = new scala.collection.mutable.ListBuffer[String]
 
           val annotations = m.group(3)
@@ -48,7 +46,7 @@ trait NodePrinters {
 
           val privateWithin = "" + m.group(2)
           if (buf.nonEmpty || privateWithin != "")
-            buf.append("newTypeName(\"" + privateWithin + "\")")
+            buf.append("TypeName(\"" + privateWithin + "\")")
 
           val bits = m.group(1)
           if (buf.nonEmpty || bits != "0L") {
@@ -73,14 +71,13 @@ trait NodePrinters {
         s.trim
       })
 
-      val printout = scala.collection.mutable.ListBuffer[String]();
+      val printout = scala.collection.mutable.ListBuffer[String]()
       printout += universe.trim
       if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
-      val imports = scala.collection.mutable.ListBuffer[String]();
-      imports += nme.UNIVERSE_SHORT
-      // if (buildIsUsed) imports += nme.build
-      if (mirrorIsUsed) imports += nme.MIRROR_SHORT
-      if (flagsAreUsed) imports += nme.Flag
+      val imports = scala.collection.mutable.ListBuffer[String]()
+      imports += nme.UNIVERSE_SHORT.toString
+      if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString
+      if (flagsAreUsed) imports += nme.Flag.toString
       printout += s"""import ${imports map (_ + "._") mkString ", "}"""
 
       val name = if (isExpr) "tree" else "tpe"
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index dbb0836..b6ae3b8 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -8,8 +8,6 @@ trait SymbolTables {
   self: Utils =>
 
   import global._
-  import definitions._
-  import Flag._
 
   class SymbolTable private[SymbolTable] (
     private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
@@ -17,9 +15,6 @@ trait SymbolTables {
     private[SymbolTable] val original: Option[List[Tree]] = None) {
 
     def syms: List[Symbol] = symtab.keys.toList
-    def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
-
-//    def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
 
     def symDef(sym: Symbol): Tree =
       symtab.getOrElse(sym, EmptyTree)
@@ -89,11 +84,6 @@ trait SymbolTables {
       add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
     }
 
-    private def add(sym: Symbol, name: TermName): SymbolTable = {
-      if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table")
-      add(sym, name, EmptyTree)
-    }
-
     private def remove(sym: Symbol): SymbolTable = {
       val newSymtab = symtab - sym
       val newAliases = aliases filter (_._1 != sym)
@@ -107,7 +97,7 @@ trait SymbolTables {
       newSymtab = newSymtab map { case ((sym, tree)) =>
         val ValDef(mods, primaryName, tpt, rhs) = tree
         val tree1 =
-          if (!(newAliases contains (sym, primaryName))) {
+          if (!(newAliases contains ((sym, primaryName)))) {
             val primaryName1 = newAliases.find(_._1 == sym).get._2
             ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
           } else tree
@@ -143,7 +133,7 @@ trait SymbolTables {
       var result = new SymbolTable(original = Some(encoded))
       encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
         case (Some(ReifyBindingAttachment(_)), _) => result += entry
-        case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
+        case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias)))
         case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
       })
       result
@@ -172,7 +162,7 @@ trait SymbolTables {
               else if (isFreeTerm) sym.tpe
               else sym.info
             } else NoType
-          val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(signature))
+          val rset = reifier.mirrorBuildCall(nme.setInfo, currtab.symRef(sym), reifier.reify(signature))
           // `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
           // also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
           // so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
@@ -214,4 +204,4 @@ trait SymbolTables {
       }
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 255efe5..3c1bc8c 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -65,7 +65,7 @@ class Pack200Task extends ScalaMatchingTask {
 
   /** Set the flag to specify if file reordering should be performed. Reordering
     * is used to remove empty packages and improve pack200 optimization.
-    * @param keep
+    * @param x
     *         `'''true'''` to retain file ordering.
     *         `'''false'''` to optimize directory structure (DEFAULT).  */
   def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
@@ -99,8 +99,8 @@ class Pack200Task extends ScalaMatchingTask {
   private def getFileList: List[File] = {
     var files: List[File] = Nil
     val fs = getImplicitFileSet
-    var ds = fs.getDirectoryScanner(getProject())
-    var dir = fs.getDir(getProject())
+    val ds = fs.getDirectoryScanner(getProject())
+    val dir = fs.getDir(getProject())
     for (filename <- ds.getIncludedFiles()
          if filename.toLowerCase.endsWith(".jar")) {
       val file = new File(dir, filename)
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index e53679f..6036b23 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.tools.ant
+package scala
+package tools.ant
 
 import java.io.{File, FileInputStream}
 
@@ -32,7 +33,7 @@ import org.apache.tools.ant.types.Mapper
  *
  * @author  Gilles Dubochet
  * @version 1.0 */
-class Same extends ScalaMatchingTask {
+ at deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask {
 /*============================================================================*\
 **                             Ant user-properties                            **
 \*============================================================================*/
@@ -110,7 +111,7 @@ class Same extends ScalaMatchingTask {
 \*============================================================================*/
 
   override def execute() = {
-    validateAttributes
+    validateAttributes()
     val mapper = getMapper
     allEqualNow = true
     val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index 57d24f6..bb6a933 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -108,7 +108,7 @@ class ScalaTool extends ScalaMatchingTask {
    *  for general purpose scripts, as this does not assume all elements are
    *  relative to the Ant `basedir`.  Additionally, the platform specific
    *  demarcation of any script variables (e.g. `${SCALA_HOME}` or
-   * `%SCALA_HOME%`) can be specified in a platform independant way (e.g.
+   * `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
    * `@SCALA_HOME@`) and automatically translated for you.
    */
   def setClassPath(input: String) {
@@ -139,7 +139,7 @@ class ScalaTool extends ScalaMatchingTask {
       val st = s.trim
       val stArray = st.split("=", 2)
       if (stArray.length == 2) {
-        if (input != "") List(Pair(stArray(0), stArray(1))) else Nil
+        if (input != "") List((stArray(0), stArray(1))) else Nil
       }
       else
         buildError("Property " + st + " is not formatted properly.")
@@ -170,7 +170,7 @@ class ScalaTool extends ScalaMatchingTask {
 
     private def getProperties: String =
       properties.map({
-        case Pair(name,value) => "-D" + name + "=\"" + value + "\""
+        case (name,value) => "-D" + name + "=\"" + value + "\""
       }).mkString("", " ", "")
 
 /*============================================================================*\
@@ -190,13 +190,13 @@ class ScalaTool extends ScalaMatchingTask {
       val builder = new StringBuilder()
 
       while (chars.hasNext) {
-        val char = chars.next
+        val char = chars.next()
         if (char == '@') {
-          var char = chars.next
+          var char = chars.next()
           val token = new StringBuilder()
           while (chars.hasNext && char != '@') {
             token.append(char)
-            char = chars.next
+            char = chars.next()
           }
           if (token.toString == "")
             builder.append('@')
@@ -212,13 +212,13 @@ class ScalaTool extends ScalaMatchingTask {
       val builder = new StringBuilder()
 
       while (chars.hasNext) {
-        val char = chars.next
+        val char = chars.next()
         if (char == '@') {
-          var char = chars.next
+          var char = chars.next()
           val token = new StringBuilder()
           while (chars.hasNext && char != '@') {
             token.append(char)
-            char = chars.next
+            char = chars.next()
           }
           if (tokens.contains(token.toString))
             builder.append(tokens(token.toString))
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 73d09e8..1747405 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -55,8 +55,6 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
  *  - `usejavacp`,
  *  - `failonerror`,
  *  - `scalacdebugging`,
- *  - `assemname`,
- *  - `assemrefs`.
  *
  *  It also takes the following parameters as nested elements:
  *  - `src` (for `srcdir`),
@@ -93,13 +91,13 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
     val values = List("namer", "typer", "pickler", "refchecks",
                       "uncurry", "tailcalls", "specialize", "explicitouter",
                       "erasure", "lazyvals", "lambdalift", "constructors",
-                      "flatten", "mixin", "cleanup", "icode", "inliner",
+                      "flatten", "mixin", "delambdafy", "cleanup", "icode", "inliner",
                       "closelim", "dce", "jvm", "terminal")
   }
 
   /** Defines valid values for the `target` property. */
   object Target extends PermissibleValue {
-    val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil")
+    val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
   }
 
   /** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -169,11 +167,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
   /** Indicates whether compilation errors will fail the build; defaults to true. */
   protected var failonerror: Boolean = true
 
-  // Name of the output assembly (only relevant with -target:msil)
-  protected var assemname: Option[String] = None
-  // List of assemblies referenced by the program (only relevant with -target:msil)
-  protected var assemrefs: Option[String] = None
-
   /** Prints out the files being compiled by the scalac ant task
    *  (not only the number of files). */
   protected var scalacDebugging: Boolean = false
@@ -420,9 +413,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
    *  @param input The specified flag */
   def setScalacdebugging(input: Boolean) { scalacDebugging = input }
 
-  def setAssemname(input: String) { assemname = Some(input) }
-  def setAssemrefs(input: String) { assemrefs = Some(input) }
-
   /** Sets the `compilerarg` as a nested compilerarg Ant parameter.
    *  @return A compiler argument to be configured. */
   def createCompilerArg(): ImplementationSpecificArgument = {
@@ -505,7 +495,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
     path.map(asString) mkString File.pathSeparator
 
   /** Transforms a file into a Scalac-readable string.
-   *  @param path A file to convert.
+   *  @param file A file to convert.
    *  @return     A string-representation of the file like `/x/k/a.scala`. */
   protected def asString(file: File): String =
     file.getAbsolutePath()
@@ -518,7 +508,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
     new Settings(error)
 
   protected def newGlobal(settings: Settings, reporter: Reporter) =
-    new Global(settings, reporter)
+    Global(settings, reporter)
 
 /*============================================================================*\
 **                           The big execute method                           **
@@ -612,9 +602,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
     if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
     if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
 
-    if (!assemname.isEmpty) settings.assemname.value = assemname.get
-    if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
-
     val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J")
     if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList
     val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D")
@@ -685,7 +672,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
 
         file
       }
-      val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath))
+      val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath))
       if (failonerror && res != 0)
         buildError("Compilation failed because of an internal compiler error;"+
               " see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
deleted file mode 100644
index 7fc8117..0000000
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ /dev/null
@@ -1,695 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala Ant Tasks                      **
-**    / __/ __// _ | / /  / _ |    (c) 2005-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.tools.ant
-
-import java.io.File
-
-import org.apache.tools.ant.Project
-import org.apache.tools.ant.types.{Path, Reference}
-import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.doc.Settings
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-
-/** An Ant task to document Scala code.
- *
- *  This task can take the following parameters as attributes:
- *  - `srcdir` (mandatory),
- *  - `srcref`,
- *  - `destdir`,
- *  - `classpath`,
- *  - `classpathref`,
- *  - `sourcepath`,
- *  - `sourcepathref`,
- *  - `bootclasspath`,
- *  - `bootclasspathref`,
- *  - `extdirs`,
- *  - `extdirsref`,
- *  - `encoding`,
- *  - `doctitle`,
- *  - `header`,
- *  - `footer`,
- *  - `top`,
- *  - `bottom`,
- *  - `addparams`,
- *  - `deprecation`,
- *  - `docgenerator`,
- *  - `docrootcontent`,
- *  - `unchecked`,
- *  - `nofail`,
- *  - `skipPackages`.
- *
- *  It also takes the following parameters as nested elements:
- *  - `src` (for srcdir),
- *  - `classpath`,
- *  - `sourcepath`,
- *  - `bootclasspath`,
- *  - `extdirs`.
- *
- *  @author Gilles Dubochet, Stephane Micheloud
- */
-class Scaladoc extends ScalaMatchingTask {
-
-  /** The unique Ant file utilities instance to use in this task. */
-  private val fileUtils = FileUtils.getFileUtils()
-
-/*============================================================================*\
-**                             Ant user-properties                            **
-\*============================================================================*/
-
-  abstract class PermissibleValue {
-    val values: List[String]
-    def isPermissible(value: String): Boolean =
-      (value == "") || values.exists(_.startsWith(value))
-  }
-
-  /** Defines valid values for the `deprecation` and
-   *  `unchecked` properties.
-   */
-  object Flag extends PermissibleValue {
-    val values = List("yes", "no", "on", "off")
-    def getBooleanValue(value: String, flagName: String): Boolean =
-      if (Flag.isPermissible(value))
-        return ("yes".equals(value) || "on".equals(value))
-      else
-        buildError("Unknown " + flagName + " flag '" + value + "'")
-  }
-
-  /** The directories that contain source files to compile. */
-  private var origin: Option[Path] = None
-  /** The directory to put the compiled files in. */
-  private var destination: Option[File] = None
-
-  /** The class path to use for this compilation. */
-  private var classpath: Option[Path] = None
-  /** The source path to use for this compilation. */
-  private var sourcepath: Option[Path] = None
-  /** The boot class path to use for this compilation. */
-  private var bootclasspath: Option[Path] = None
-  /** The external extensions path to use for this compilation. */
-  private var extdirs: Option[Path] = None
-
-  /** The character encoding of the files to compile. */
-  private var encoding: Option[String] = None
-
-  /** The fully qualified name of a doclet class, which will be used to generate the documentation. */
-  private var docgenerator: Option[String] = None
-
-  /** The file from which the documentation content of the root package will be taken */
-  private var docrootcontent: Option[File] = None
-
-  /** The document title of the generated HTML documentation. */
-  private var doctitle: Option[String] = None
-
-  /** The document footer of the generated HTML documentation. */
-  private var docfooter: Option[String] = None
-
-  /** The document version, to be added to the title. */
-  private var docversion: Option[String] = None
-
-  /** Instruct the compiler to generate links to sources */
-  private var docsourceurl: Option[String] = None
-
-  /** Point scaladoc at uncompilable sources. */
-  private var docUncompilable: Option[String] = None
-
-  /** Instruct the compiler to use additional parameters */
-  private var addParams: String = ""
-
-  /** Instruct the compiler to generate deprecation information. */
-  private var deprecation: Boolean = false
-
-  /** Instruct the compiler to generate unchecked information. */
-  private var unchecked: Boolean = false
-
-  /** Instruct the ant task not to fail in the event of errors */
-  private var nofail: Boolean = false
-
-  /** Instruct the scaladoc tool to document implicit conversions */
-  private var docImplicits: Boolean = false
-
-  /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
-  private var docImplicitsShowAll: Boolean = false
-
-  /** Instruct the scaladoc tool to output implicits debugging information */
-  private var docImplicitsDebug: Boolean = false
-
-  /** Instruct the scaladoc tool to create diagrams */
-  private var docDiagrams: Boolean = false
-
-  /** Instruct the scaladoc tool to output diagram creation debugging information */
-  private var docDiagramsDebug: Boolean = false
-
-  /** Instruct the scaladoc tool to use the binary given to create diagrams */
-  private var docDiagramsDotPath: Option[String] = None
-
-  /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */
-  private var docRawOutput: Boolean = false
-
-  /** Instruct the scaladoc not to generate prefixes */
-  private var docNoPrefixes: Boolean = false
-
-  /** Instruct the scaladoc tool to group similar functions together */
-  private var docGroups: Boolean = false
-
-  /** Instruct the scaladoc tool to skip certain packages */
-  private var docSkipPackages: String = ""
-
-/*============================================================================*\
-**                             Properties setters                             **
-\*============================================================================*/
-
-  /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `origin`.
-   */
-  def setSrcdir(input: Path) {
-    if (origin.isEmpty) origin = Some(input)
-    else origin.get.append(input)
-  }
-
-  /** Sets the `origin` as a nested src Ant parameter.
-   *
-   *  @return An origin path to be configured.
-   */
-  def createSrc(): Path = {
-    if (origin.isEmpty) origin = Some(new Path(getProject))
-    origin.get.createPath()
-  }
-
-  /** Sets the `origin` as an external reference Ant parameter.
-   *
-   *  @param input A reference to an origin path.
-   */
-  def setSrcref(input: Reference) {
-    createSrc().setRefid(input)
-  }
-
-  /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `destination`.
-   */
-  def setDestdir(input: File) {
-    destination = Some(input)
-  }
-
-  /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `classpath`.
-   */
-  def setClasspath(input: Path) {
-    if (classpath.isEmpty) classpath = Some(input)
-    else classpath.get.append(input)
-  }
-
-  /** Sets the `classpath` as a nested classpath Ant parameter.
-   *
-   *  @return A class path to be configured.
-   */
-  def createClasspath(): Path = {
-    if (classpath.isEmpty) classpath = Some(new Path(getProject))
-    classpath.get.createPath()
-  }
-
-  /** Sets the `classpath` as an external reference Ant parameter.
-   *
-   *  @param input A reference to a class path.
-   */
-  def setClasspathref(input: Reference) =
-    createClasspath().setRefid(input)
-
-  /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `sourcepath`.
-   */
-  def setSourcepath(input: Path) =
-    if (sourcepath.isEmpty) sourcepath = Some(input)
-    else sourcepath.get.append(input)
-
-  /** Sets the `sourcepath` as a nested sourcepath Ant parameter.
-   *
-   *  @return A source path to be configured.
-   */
-  def createSourcepath(): Path = {
-    if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject))
-    sourcepath.get.createPath()
-  }
-
-  /** Sets the `sourcepath` as an external reference Ant parameter.
-   *
-   *  @param input A reference to a source path.
-   */
-  def setSourcepathref(input: Reference) =
-    createSourcepath().setRefid(input)
-
-  /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `bootclasspath`.
-   */
-  def setBootclasspath(input: Path) =
-    if (bootclasspath.isEmpty) bootclasspath = Some(input)
-    else bootclasspath.get.append(input)
-
-  /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter.
-   *
-   *  @return A source path to be configured.
-   */
-  def createBootclasspath(): Path = {
-    if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject))
-    bootclasspath.get.createPath()
-  }
-
-  /** Sets the `bootclasspath` as an external reference Ant parameter.
-   *
-   *  @param input A reference to a source path.
-   */
-  def setBootclasspathref(input: Reference) {
-    createBootclasspath().setRefid(input)
-  }
-
-  /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]].
-   *
-   *  @param input The value of `extdirs`.
-   */
-  def setExtdirs(input: Path) {
-    if (extdirs.isEmpty) extdirs = Some(input)
-    else extdirs.get.append(input)
-  }
-
-  /** Sets the `extdirs` as a nested sourcepath Ant parameter.
-   *
-   *  @return An extensions path to be configured.
-   */
-  def createExtdirs(): Path = {
-    if (extdirs.isEmpty) extdirs = Some(new Path(getProject))
-    extdirs.get.createPath()
-  }
-
-  /** Sets the `extdirs` as an external reference Ant parameter.
-   *
-   *  @param input A reference to an extensions path.
-   */
-  def setExtdirsref(input: Reference) {
-    createExtdirs().setRefid(input)
-  }
-
-  /** Sets the `encoding` attribute. Used by Ant.
-   *
-   *  @param input The value of `encoding`.
-   */
-  def setEncoding(input: String) {
-    encoding = Some(input)
-  }
-
-  /** Sets the `docgenerator` attribute.
-   *
-   *  @param input A fully qualified class name of a doclet.
-   */
-  def setDocgenerator(input: String) {
-    docgenerator = Some(input)
-  }
-
-  /**
-   * Sets the `docrootcontent` attribute.
-   *
-   * @param input The file from which the documentation content of the root
-   * package will be taken.
-   */
-  def setDocrootcontent(input : File) {
-    docrootcontent = Some(input)
-  }
-
-  /** Sets the `docversion` attribute.
-   *
-   *  @param input The value of `docversion`.
-   */
-  def setDocversion(input: String) {
-    docversion = Some(input)
-  }
-
-  /** Sets the `docsourceurl` attribute.
-   *
-   *  @param input The value of `docsourceurl`.
-   */
-  def setDocsourceurl(input: String) {
-    docsourceurl = Some(input)
-  }
-
-  /** Sets the `doctitle` attribute.
-   *
-   *  @param input The value of `doctitle`.
-   */
-  def setDoctitle(input: String) {
-    doctitle = Some(input)
-  }
-
-  /** Sets the `docfooter` attribute.
-   *
-   *  @param input The value of `docfooter`.
-   */
-  def setDocfooter(input: String) {
-    docfooter = Some(input)
-  }
-
-  /** Set the `addparams` info attribute.
-   *
-   *  @param input The value for `addparams`.
-   */
-  def setAddparams(input: String) {
-    addParams = input
-  }
-
-  /** Set the `deprecation` info attribute.
-   *
-   *  @param input One of the flags `yes/no` or `on/off`.
-   */
-  def setDeprecation(input: String) {
-    if (Flag.isPermissible(input))
-      deprecation = "yes".equals(input) || "on".equals(input)
-    else
-      buildError("Unknown deprecation flag '" + input + "'")
-  }
-
-  /** Set the `unchecked` info attribute.
-   *
-   *  @param input One of the flags `yes/no` or `on/off`.
-   */
-  def setUnchecked(input: String) {
-    if (Flag.isPermissible(input))
-      unchecked = "yes".equals(input) || "on".equals(input)
-    else
-      buildError("Unknown unchecked flag '" + input + "'")
-  }
-
-  def setDocUncompilable(input: String) {
-    docUncompilable = Some(input)
-  }
-
-  /** Set the `nofail` info attribute.
-   *
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off.
-   */
-  def setNoFail(input: String) =
-      nofail = Flag.getBooleanValue(input, "nofail")
-
-  /** Set the `implicits` info attribute.
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setImplicits(input: String) =
-    docImplicits = Flag.getBooleanValue(input, "implicits")
-
-  /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
-   *  convert to from the default scope
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setImplicitsShowAll(input: String) =
-    docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
-
-  /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setImplicitsDebug(input: String) =
-    docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
-
-  /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setDiagrams(input: String) =
-    docDiagrams = Flag.getBooleanValue(input, "diagrams")
-
-  /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setDiagramsDebug(input: String) =
-    docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
-
-  /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
-   *  eg: /usr/bin/dot) */
-  def setDiagramsDotPath(input: String) =
-    docDiagramsDotPath = Some(input)
-
-  /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setRawOutput(input: String) =
-    docRawOutput = Flag.getBooleanValue(input, "rawOutput")
-
-  /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in
-   *  front of types -- may lead to confusion, but significantly speeds up the generation.
-   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
-  def setNoPrefixes(input: String) =
-    docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes")
-
-  /** Instruct the scaladoc tool to group similar functions together */
-  def setGroups(input: String) =
-    docGroups = Flag.getBooleanValue(input, "groups")
-
-  /** Instruct the scaladoc tool to skip certain packages.
-   *  @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc.
-   */
-  def setSkipPackages(input: String) =
-    docSkipPackages = input
-
-/*============================================================================*\
-**                             Properties getters                             **
-\*============================================================================*/
-
-  /** Gets the value of the `classpath` attribute in a
-   *  Scala-friendly form.
-   *
-   *  @return The class path as a list of files.
-   */
-  private def getClasspath: List[File] =
-    if (classpath.isEmpty) buildError("Member 'classpath' is empty.")
-    else classpath.get.list().toList map nameToFile
-
-  /** Gets the value of the `origin` attribute in a Scala-friendly
-   *  form.
-   *
-   *  @return The origin path as a list of files.
-   */
-  private def getOrigin: List[File] =
-    if (origin.isEmpty) buildError("Member 'origin' is empty.")
-    else origin.get.list().toList map nameToFile
-
-  /** Gets the value of the `destination` attribute in a
-   *  Scala-friendly form.
-   *
-   *  @return The destination as a file.
-   */
-  private def getDestination: File =
-    if (destination.isEmpty) buildError("Member 'destination' is empty.")
-    else existing(getProject resolveFile destination.get.toString)
-
-  /** Gets the value of the `sourcepath` attribute in a
-   *  Scala-friendly form.
-   *
-   *  @return The source path as a list of files.
-   */
-  private def getSourcepath: List[File] =
-    if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.")
-    else sourcepath.get.list().toList map nameToFile
-
-  /** Gets the value of the `bootclasspath` attribute in a
-   *  Scala-friendly form.
-   *
-   *  @return The boot class path as a list of files.
-   */
-  private def getBootclasspath: List[File] =
-    if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.")
-    else bootclasspath.get.list().toList map nameToFile
-
-  /** Gets the value of the `extdirs` attribute in a
-   *  Scala-friendly form.
-   *
-   *  @return The extensions path as a list of files.
-   */
-  private def getExtdirs: List[File] =
-    if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.")
-    else extdirs.get.list().toList map nameToFile
-
-/*============================================================================*\
-**                       Compilation and support methods                      **
-\*============================================================================*/
-
-  /** This is forwarding method to circumvent bug #281 in Scala 2. Remove when
-   *  bug has been corrected.
-   */
-  override protected def getDirectoryScanner(baseDir: java.io.File) =
-    super.getDirectoryScanner(baseDir)
-
-  /** Transforms a string name into a file relative to the provided base
-   *  directory.
-   *
-   *  @param base A file pointing to the location relative to which the name
-   *              will be resolved.
-   *  @param name A relative or absolute path to the file as a string.
-   *  @return     A file created from the name and the base file.
-   */
-  private def nameToFile(base: File)(name: String): File =
-    existing(fileUtils.resolveFile(base, name))
-
-  /** Transforms a string name into a file relative to the build root
-   *  directory.
-   *
-   *  @param name A relative or absolute path to the file as a string.
-   *  @return     A file created from the name.
-   */
-  private def nameToFile(name: String): File =
-    existing(getProject resolveFile name)
-
-  /** Tests if a file exists and prints a warning in case it doesn't. Always
-   *  returns the file, even if it doesn't exist.
-   *
-   *  @param file A file to test for existance.
-   *  @return     The same file.
-   */
-  private def existing(file: File): File = {
-    if (!file.exists())
-      log("Element '" + file.toString + "' does not exist.",
-          Project.MSG_WARN)
-    file
-  }
-
-  /** Transforms a path into a Scalac-readable string.
-   *
-   *  @param path A path to convert.
-   *  @return     A string-representation of the path like `a.jar:b.jar`.
-   */
-  private def asString(path: List[File]): String =
-    path.map(asString).mkString("", File.pathSeparator, "")
-
-  /** Transforms a file into a Scalac-readable string.
-   *
-   *  @param path A file to convert.
-   *  @return     A string-representation of the file like `/x/k/a.scala`.
-   */
-  private def asString(file: File): String =
-    file.getAbsolutePath()
-
-/*============================================================================*\
-**                           The big execute method                           **
-\*============================================================================*/
-
-  /** Initializes settings and source files */
-  protected def initialize: Pair[Settings, List[File]] = {
-    // Tests if all mandatory attributes are set and valid.
-    if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.")
-    if (getOrigin.isEmpty) buildError("Attribute 'srcdir' is not set.")
-    if (!destination.isEmpty && !destination.get.isDirectory())
-      buildError("Attribute 'destdir' does not refer to an existing directory.")
-    if (destination.isEmpty) destination = Some(getOrigin.head)
-
-    val mapper = new GlobPatternMapper()
-    mapper setTo "*.html"
-    mapper setFrom "*.scala"
-
-    // Scans source directories to build up a compile lists.
-    // If force is false, only files were the .class file in destination is
-    // older than the .scala file will be used.
-    val sourceFiles: List[File] =
-      for {
-        originDir <- getOrigin
-        originFile <- {
-          val includedFiles =
-            getDirectoryScanner(originDir).getIncludedFiles()
-          val list = includedFiles.toList
-          if (list.length > 0)
-            log(
-              "Documenting " + list.length + " source file" +
-              (if (list.length > 1) "s" else "") +
-              (" to " + getDestination.toString)
-            )
-          else
-            log("No files selected for documentation", Project.MSG_VERBOSE)
-
-          list
-        }
-      } yield {
-        log(originFile, Project.MSG_DEBUG)
-        nameToFile(originDir)(originFile)
-      }
-
-    def decodeEscapes(s: String): String = {
-      // In Ant script characters '<' and '>' must be encoded when
-      // used in attribute values, e.g. for attributes "doctitle", "header", ..
-      // in task Scaladoc you may write:
-      //   doctitle="<div>Scala</div>"
-      // so we have to decode them here.
-      s.replaceAll("<", "<").replaceAll(">",">")
-       .replaceAll("&", "&").replaceAll(""", "\"")
-    }
-
-    // Builds-up the compilation settings for Scalac with the existing Ant
-    // parameters.
-    val docSettings = new Settings(buildError)
-    docSettings.outdir.value = asString(destination.get)
-    if (!classpath.isEmpty)
-      docSettings.classpath.value = asString(getClasspath)
-    if (!sourcepath.isEmpty)
-      docSettings.sourcepath.value = asString(getSourcepath)
-    /*else if (origin.get.size() > 0)
-      settings.sourcepath.value = origin.get.list()(0)*/
-    if (!bootclasspath.isEmpty)
-      docSettings.bootclasspath.value = asString(getBootclasspath)
-    if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs)
-    if (!encoding.isEmpty) docSettings.encoding.value = encoding.get
-    if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
-    if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get)
-    if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
-    if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get)
-    if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get)
-
-    docSettings.deprecation.value = deprecation
-    docSettings.unchecked.value = unchecked
-    docSettings.docImplicits.value = docImplicits
-    docSettings.docImplicitsDebug.value = docImplicitsDebug
-    docSettings.docImplicitsShowAll.value = docImplicitsShowAll
-    docSettings.docDiagrams.value = docDiagrams
-    docSettings.docDiagramsDebug.value = docDiagramsDebug
-    docSettings.docRawOutput.value = docRawOutput
-    docSettings.docNoPrefixes.value = docNoPrefixes
-    docSettings.docGroups.value = docGroups
-    docSettings.docSkipPackages.value = docSkipPackages
-    if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
-
-    if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
-    if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
-    log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
-
-    docSettings processArgumentString addParams
-    Pair(docSettings, sourceFiles)
-  }
-
-  def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
-
-  /** Performs the compilation. */
-  override def execute() = {
-    val Pair(docSettings, sourceFiles) = initialize
-    val reporter = new ConsoleReporter(docSettings)
-    try {
-      val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
-      docProcessor.document(sourceFiles.map (_.toString))
-      if (reporter.ERROR.count > 0)
-        safeBuildError(
-          "Document failed with " +
-          reporter.ERROR.count + " error" +
-          (if (reporter.ERROR.count > 1) "s" else "") +
-          "; see the documenter error output for details.")
-      else if (reporter.WARNING.count > 0)
-        log(
-          "Document succeeded with " +
-          reporter.WARNING.count + " warning" +
-          (if (reporter.WARNING.count > 1) "s" else "") +
-          "; see the documenter output for details.")
-      reporter.printSummary()
-    } catch {
-      case exception: Throwable =>
-        exception.printStackTrace()
-        val msg = Option(exception.getMessage) getOrElse "no error message provided"
-        safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.")
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 78159e6..7885534 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -11,8 +11,6 @@
              classname="scala.tools.ant.Scaladoc"/>
     <taskdef name="scalatool"
              classname="scala.tools.ant.ScalaTool"/>
-    <taskdef name="same"
-             classname="scala.tools.ant.Same"/>
     <taskdef name="pack200"
              classname="scala.tools.ant.Pack200Task"/>
 </antlib>
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 0b6701b..b170cea 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
 
 import org.apache.tools.ant.Task
 
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index b199423..a0aad49 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -27,7 +27,7 @@ object Compilers extends scala.collection.DefaultMap[String, Compiler] {
     if (debug) println("Making compiler " + id)
     if (debug) println("  memory before: " + freeMemoryString)
     val comp = new Compiler(classpath, settings)
-    container += Pair(id, comp)
+    container(id) = comp
     if (debug) println("  memory after: " + freeMemoryString)
     comp
   }
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 5274594..027a828 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
 
 import java.io.File
 import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 9cdf484..595b45a 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.tools.ant
+package scala
+package tools.ant
 package sabbus
 
 import java.io.{ File, FileWriter }
@@ -80,7 +81,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
 
   private def createMapper() = {
     val mapper = new GlobPatternMapper()
-    val extension = if (isMSIL) "*.msil" else "*.class"
+    val extension = "*.class"
     mapper setTo extension
     mapper setFrom "*.scala"
 
@@ -104,9 +105,6 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
     sourcePath foreach (settings.sourcepath = _)
     settings.extraParams = extraArgsFlat
 
-    if (isMSIL)
-      settings.sourcedir = sourceDir
-
     val mapper = createMapper()
 
     val includedFiles: Array[File] =
@@ -117,12 +115,12 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
         mapper
       ) map (x => new File(sourceDir, x))
 
-    /** Nothing to do. */
+    /* Nothing to do. */
     if (includedFiles.isEmpty && argfile.isEmpty)
       return
 
     if (includedFiles.nonEmpty)
-      log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+      log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir))
 
     argfile foreach (x => log("Using argfile file: @" + x))
 
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index fde61e9..a86af73 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -10,7 +10,7 @@ package scala.tools.ant.sabbus
 
 import java.io.File
 
-import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.types.Path
 
 class Settings {
 
@@ -93,4 +93,18 @@ class Settings {
     case _ => false
   }
 
+  override lazy val hashCode: Int = Seq[Any](
+    gBf,
+    uncheckedBf,
+    classpathBf,
+    sourcepathBf,
+    sourcedirBf,
+    bootclasspathBf,
+    extdirsBf,
+    dBf,
+    encodingBf,
+    targetBf,
+    optimiseBf,
+    extraParamsBf
+  ).##
 }
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 6bb1aaa..b061bcf 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -98,6 +98,4 @@ trait TaskArgs extends CompilationPathProperty {
     val parts = a.getParts
     if(parts eq null) Seq[String]() else parts.toSeq
   }
-
-  def isMSIL = compTarget exists (_ == "msil")
 }
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 2c97232..a8736f2 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.tools.ant
+package scala
+package tools.ant
 package sabbus
 
 import java.io.File
@@ -53,9 +54,9 @@ class Use extends ScalaMatchingTask {
         compiler.settings.d,
         mapper
       ) map (new File(sourceDir.get, _))
-    if (includedFiles.size > 0)
+    if (includedFiles.length > 0)
       try {
-        log("Compiling " + includedFiles.size + " file" + (if (includedFiles.size > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
+        log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
         val (errors, warnings) = compiler.compile(includedFiles)
         if (errors > 0)
           sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
old mode 100644
new mode 100755
index f1c6c52..f58223a
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -73,8 +73,7 @@ SEP=":"
 
 # Possible additional command line options
 WINDOWS_OPT=""
-EMACS_OPT=""
-[[ -n "$EMACS" ]] && EMACS_OPT="-Denv.emacs=$EMACS"
+EMACS_OPT="-Denv.emacs=$EMACS"
 
 # Remove spaces from SCALA_HOME on windows
 if [[ -n "$cygwin" ]]; then
@@ -102,6 +101,9 @@ if [[ -n "$cygwin" ]]; then
         format=windows
     fi
     SCALA_HOME="$(cygpath --$format "$SCALA_HOME")"
+    if [[ -n "$JAVA_HOME" ]]; then
+        JAVA_HOME="$(cygpath --$format "$JAVA_HOME")"
+    fi
     TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
 elif [[ -n "$mingw" ]]; then
     SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
@@ -112,7 +114,7 @@ if [[ -n "$cygwin$mingw" ]]; then
     case "$TERM" in
         rxvt* | xterm*)
             stty -icanon min 1 -echo
-            WINDOWS_OPT="-Djline.terminal=scala.tools.jline.UnixTerminal"
+            WINDOWS_OPT="-Djline.terminal=unix"
         ;;
     esac
 fi
@@ -142,6 +144,10 @@ classpathArgs () {
   fi
 }
 
+# SI-8358, SI-8368 -- the default should really be false,
+# but I don't want to flip the default during 2.11's RC cycle
+OVERRIDE_USEJAVACP="-Dscala.usejavacp=true"
+
 while [[ $# -gt 0 ]]; do
   case "$1" in
     -D*)
@@ -149,6 +155,8 @@ while [[ $# -gt 0 ]]; do
       # need it, e.g. communicating with a server compiler.
       java_args=("${java_args[@@]}" "$1")
       scala_args=("${scala_args[@@]}" "$1")
+      # respect user-supplied -Dscala.usejavacp
+      case "$1" in -Dscala.usejavacp*) OVERRIDE_USEJAVACP="";; esac
       shift
       ;;
     -J*)
@@ -197,8 +205,8 @@ execCommand \
   "${java_args[@@]}" \
   $(classpathArgs) \
   -Dscala.home="$SCALA_HOME" \
-  -Dscala.usejavacp=true \
-  $EMACS_OPT \
+  $OVERRIDE_USEJAVACP \
+  "$EMACS_OPT" \
   $WINDOWS_OPT \
   @properties@ @class@ @toolflags@ "$@@"
 
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 8441f3a..cf0e003 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -25,6 +25,10 @@ shift
 
 :notoolcp
 
+rem SI-8358, SI-8368 -- the default should really be false,
+rem but I don't want to flip the default during 2.11's RC cycle
+set _OVERRIDE_USEJAVACP="-Dscala.usejavacp=true"
+
 rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
 set _JAVA_PARAMS=
 
@@ -45,6 +49,10 @@ if "%_TEST_PARAM:~0,2%"=="-J" (
 )
 
 if "%_TEST_PARAM:~0,2%"=="-D" (
+  rem Only match beginning of the -D option. The relevant bit is 17 chars long.
+  if "%_TEST_PARAM:~0,17%"=="-Dscala.usejavacp" (
+    set _OVERRIDE_USEJAVACP=
+  )
   rem test if this was double-quoted property "-Dprop=42"
   for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
     if not "%%G" == "%_TEST_PARAM%" (
@@ -126,7 +134,7 @@ if "%_TOOL_CLASSPATH%"=="" (
 
 if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%"
 
-set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
+set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" %_OVERRIDE_USEJAVACP% @properties@
 
 rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
 "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index 75f96d3..781cc56 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -16,27 +16,27 @@ trait CommandLineConfig {
 /** An instance of a command line, parsed according to a Spec.
  */
 class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig {
-  def this(spec: Reference, line: String) = this(spec, Parser tokenize line)
+  def this(spec: Reference, line: String) = this(spec, CommandLineParser tokenize line)
   def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
 
-  import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption }
+  import spec.{ isUnaryOption, isBinaryOption, isExpandOption }
 
   val Terminator = "--"
   val ValueForUnaryOption = "true"  // so if --opt is given, x(--opt) = true
 
-  def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+  def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption)
   def errorFn(msg: String) = println(msg)
 
   /** argMap is option -> argument (or "" if it is a unary argument)
    *  residualArgs are what is left after removing the options and their args.
    */
-  lazy val (argMap, residualArgs) = {
+  lazy val (argMap, residualArgs): (Map[String, String], List[String]) = {
     val residualBuffer = new ListBuffer[String]
 
     def loop(args: List[String]): Map[String, String] = {
       def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
 
-      /** Returns Some(List(args)) if this option expands to an
+      /*  Returns Some(List(args)) if this option expands to an
        *  argument list and it's not returning only the same arg.
        */
       def expand(s1: String) = {
@@ -48,7 +48,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
         else None
       }
 
-      /** Assumes known options have all been ruled out already. */
+      /* Assumes known options have all been ruled out already. */
       def isUnknown(opt: String) =
         onlyKnownOptions && (opt startsWith "-") && {
           errorFn("Option '%s' not recognized.".format(opt))
@@ -72,7 +72,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
 
           if (x2 == Terminator)         mapForUnary(x1) ++ residual(xs)
           else if (isUnaryOption(x1))   mapForUnary(x1) ++ loop(args.tail)
-          else if (isBinaryOption(x1))  Map(x1 -> x2) ++ loop(xs)
+          else if (isBinaryOption(x1))  Map(fromOpt(x1) -> x2) ++ loop(xs)
           else if (isUnknown(x1))       loop(args.tail)
           else                          residual(List(x1)) ++ loop(args.tail)
       }
diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala
new file mode 100644
index 0000000..6132eff
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala
@@ -0,0 +1,72 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import scala.annotation.tailrec
+
+/** A simple (overly so) command line parser.
+ *  !!! This needs a thorough test suite to make sure quoting is
+ *  done correctly and portably.
+ */
+object CommandLineParser {
+  // splits a string into a quoted prefix and the rest of the string,
+  // taking escaping into account (using \)
+  // `"abc"def` will match as `DoubleQuoted(abc, def)`
+  private class QuotedExtractor(quote: Char) {
+    def unapply(in: String): Option[(String, String)] = {
+      val del = quote.toString
+      if (in startsWith del) {
+        var escaped = false
+        val (quoted, next) = (in substring 1) span {
+          case `quote` if !escaped => false
+          case '\\'    if !escaped => escaped = true; true
+          case _                   => escaped = false; true
+        }
+        // the only way to get out of the above loop is with an empty next or !escaped
+        // require(next.isEmpty || !escaped)
+        if (next startsWith del) Some((quoted, next substring 1))
+        else None
+      } else None
+    }
+  }
+  private object DoubleQuoted extends QuotedExtractor('"')
+  private object SingleQuoted extends QuotedExtractor('\'')
+  private val Word = """(\S+)(.*)""".r
+
+  // parse `in` for an argument, return it and the remainder of the input (or an error message)
+  // (argument may be in single/double quotes, taking escaping into account, quotes are stripped)
+  private def argument(in: String): Either[String, (String, String)] = in match {
+    case DoubleQuoted(arg, rest) => Right((arg, rest))
+    case SingleQuoted(arg, rest) => Right((arg, rest))
+    case Word(arg, rest)         => Right((arg, rest))
+    case _                       => Left(s"Illegal argument: $in")
+  }
+
+  // parse a list of whitespace-separated arguments (ignoring whitespace in quoted arguments)
+  @tailrec private def commandLine(in: String, accum: List[String] = Nil): Either[String, (List[String], String)] = {
+    val trimmed = in.trim
+    if (trimmed.isEmpty) Right((accum.reverse, ""))
+    else argument(trimmed) match {
+      case Right((arg, next)) =>
+        (next span Character.isWhitespace) match {
+          case("", rest) if rest.nonEmpty => Left("Arguments should be separated by whitespace.") // TODO: can this happen?
+          case(ws, rest)                  => commandLine(rest, arg :: accum)
+        }
+      case Left(msg) => Left(msg)
+    }
+  }
+
+  class ParseException(msg: String) extends RuntimeException(msg)
+
+  def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
+  def tokenize(line: String, errorFn: String => Unit): List[String] = {
+    commandLine(line) match {
+      case Right((args, _)) => args
+      case Left(msg)        => errorFn(msg) ; Nil
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
deleted file mode 100644
index af81884..0000000
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools
-package cmd
-
-/** A sample command specification for illustrative purposes.
- *  First take advantage of the meta-options:
- *
- *    // this command creates an executable runner script "demo"
- *    % scala scala.tools.cmd.Demo --self-update demo
- *
- *    // this one creates and sources a completion file - note backticks
- *    % `./demo --bash`
- *
- *    // and now you have a runner with working completion
- *    % ./demo --<tab>
- *       --action           --defint           --int
- *       --bash             --defstr           --str
- *       --defenv           --self-update      --unary
- *
- *  The normal option configuration is plausibly self-explanatory.
- */
-trait DemoSpec extends Spec with Meta.StdOpts with Interpolation {
-  lazy val referenceSpec  = DemoSpec
-  lazy val programInfo    = Spec.Info("demo", "Usage: demo [<options>]", "scala.tools.cmd.Demo")
-
-  help("""Usage: demo [<options>]""")
-  heading("Unary options:")
-
-  val optIsUnary      = "unary"         / "a unary option"              --?  ;
-  ("action" / "a body which may be run") --> println("Hello, I am the --action body.")
-
-  heading("Binary options:")
-  val optopt          = "str"       / "an optional String"        --|
-  val optoptInt       = ("int"      / "an optional Int") .        --^[Int]
-  val optEnv          = "defenv"    / "an optional String"        defaultToEnv  "PATH"
-  val optDefault      = "defstr"    / "an optional String"        defaultTo     "default"
-  val optDefaultInt   = "defint"    / "an optional Int"           defaultTo     -1
-  val optExpand       = "alias"     / "an option which expands"   expandTo      ("--int", "15")
-}
-
-object DemoSpec extends DemoSpec with Property {
-  lazy val propMapper = new PropertyMapper(DemoSpec)
-
-  type ThisCommandLine = SpecCommandLine
-  def creator(args: List[String]) =
-    new SpecCommandLine(args) {
-      override def errorFn(msg: String) = { println("Error: " + msg) ; sys.exit(0) }
-    }
-}
-
-class Demo(args: List[String]) extends {
-  val parsed = DemoSpec(args: _*)
-} with DemoSpec with Instance {
-  import java.lang.reflect._
-
-  def helpMsg = DemoSpec.helpMsg
-  def demoSpecMethods = this.getClass.getMethods.toList
-  private def isDemo(m: Method) = (m.getName startsWith "opt") && !(m.getName contains "$") && (m.getParameterTypes.isEmpty)
-
-  def demoString(ms: List[Method]) = {
-    val longest   = ms map (_.getName.length) max
-    val formatStr = "    %-" + longest + "s: %s"
-    val xs        = ms map (m => formatStr.format(m.getName, m.invoke(this)))
-
-    xs mkString ("Demo(\n  ", "\n  ", "\n)\n")
-  }
-
-  override def toString = demoString(demoSpecMethods filter isDemo)
-}
-
-object Demo {
-  def main(args: Array[String]): Unit = {
-    val runner = new Demo(args.toList)
-
-    if (args.isEmpty)
-      println(runner.helpMsg)
-
-    println(runner)
-  }
-}
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index cba2e99..0b074ef 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -7,42 +7,34 @@ package scala.tools
 package cmd
 
 import nsc.io.{ Path, File, Directory }
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.reflect.StdRuntimeTags._
+import scala.reflect.OptManifest
 
 /** A general mechanism for defining how a command line argument
  *  (always a String) is transformed into an arbitrary type.  A few
  *  example instances are in the companion object, but in general
  *  either IntFromString will suffice or you'll want custom transformers.
  */
-abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction[String, T] {
+abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] {
   def apply(s: String): T
   def isDefinedAt(s: String): Boolean = true
   def zero: T = apply("")
 
-  def targetString: String = t.toString
+  def targetString: String = m.toString
 }
 
 object FromString {
-  // We need these because we clash with the String => Path implicits.
-  private def toFile(s: String) = new File(new java.io.File(s))
+  // We need this because we clash with the String => Path implicits.
   private def toDir(s: String)  = new Directory(new java.io.File(s))
 
   /** Path related stringifiers.
    */
-  val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) {
-    override def isDefinedAt(s: String) = toFile(s).isFile
-    def apply(s: String): File =
-      if (isDefinedAt(s)) toFile(s)
-      else cmd.runAndExit(println("'%s' is not an existing file." format s))
-  }
-  val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
+  val ExistingDir: FromString[Directory] = new FromString[Directory] {
     override def isDefinedAt(s: String) = toDir(s).isDirectory
     def apply(s: String): Directory =
       if (isDefinedAt(s)) toDir(s)
       else cmd.runAndExit(println("'%s' is not an existing directory." format s))
   }
-  def ExistingDirRelativeTo(root: Directory) = new FromString[Directory]()(tagOfDirectory) {
+  def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] {
     private def resolve(s: String) = (toDir(s) toAbsoluteWithRoot root).toDirectory
     override def isDefinedAt(s: String) = resolve(s).isDirectory
     def apply(s: String): Directory =
@@ -53,19 +45,19 @@ object FromString {
   /** Argument expander, i.e. turns single argument "foo bar baz" into argument
    *  list "foo", "bar", "baz".
    */
-  val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]]()(tagOfListOfString) {
+  val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] {
     def apply(s: String) = toArgs(s)
   }
 
   /** Identity.
    */
-  implicit val StringFromString: FromString[String] = new FromString[String]()(tagOfString) {
+  implicit val StringFromString: FromString[String] = new FromString[String] {
     def apply(s: String): String = s
   }
 
   /** Implicit as the most likely to be useful as-is.
    */
-  implicit val IntFromString: FromString[Int] = new FromString[Int]()(tagOfInt) {
+  implicit val IntFromString: FromString[Int] = new FromString[Int] {
     override def isDefinedAt(s: String)   = safeToInt(s).isDefined
     def apply(s: String)                  = safeToInt(s).get
     def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None }
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
index abffd6b..d1c798b 100644
--- a/src/compiler/scala/tools/cmd/Interpolation.scala
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -3,7 +3,8 @@
  * @author  Paul Phillips
  */
 
-package scala.tools
+package scala
+package tools
 package cmd
 
 /** Interpolation logic for generated files.  The idea is to be
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index 2c19312..df3d0c4 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -26,10 +26,10 @@ object Opt {
   trait Implicit {
     def name: String
     def programInfo: Info
-    protected def opt = toOpt(name)
+    protected def opt = fromOpt(name)
 
     def --? : Boolean                       // --opt is set
-    def --> (body: => Unit): Unit           // if --opt is set, execute body
+    def --> (body: => Unit): Boolean        // if --opt is set, execute body
     def --| : Option[String]                // --opt <arg: String> is optional, result is Option[String]
     def --^[T: FromString] : Option[T]      // --opt <arg: T> is optional, result is Option[T]
 
@@ -51,7 +51,7 @@ object Opt {
     import options._
 
     def --?                             = { addUnary(opt) ; false }
-    def --> (body: => Unit)             = { addUnary(opt) }
+    def --> (body: => Unit)             = { addUnary(opt) ; false }
     def --|                             = { addBinary(opt) ; None }
     def --^[T: FromString]              = { addBinary(opt) ; None }
 
@@ -65,7 +65,7 @@ object Opt {
 
   class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error {
     def --?                             = parsed isSet opt
-    def --> (body: => Unit)             = if (parsed isSet opt) body
+    def --> (body: => Unit)             = { val isSet = parsed isSet opt ; if (isSet) body ; isSet }
     def --|                             = parsed get opt
     def --^[T: FromString]              = {
       val fs = implicitly[FromString[T]]
diff --git a/src/compiler/scala/tools/cmd/Parser.scala b/src/compiler/scala/tools/cmd/Parser.scala
deleted file mode 100644
index 6e2afa4..0000000
--- a/src/compiler/scala/tools/cmd/Parser.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input.CharArrayReader.EofCh
-
-/** A simple (overly so) command line parser.
- *  !!! This needs a thorough test suite to make sure quoting is
- *  done correctly and portably.
- */
-trait ParserUtil extends Parsers {
-  class ParserPlus[+T](underlying: Parser[T]) {
-    def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b  => b }
-    def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b  => a }
-  }
-  protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
-}
-
-object Parser extends RegexParsers with ParserUtil {
-  override def skipWhitespace = false
-
-  def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
-  def elemOf(xs: Elem*): Parser[Elem]     = elem("elemOf", xs contains _)
-  def escaped(ch: Char): Parser[String] = "\\" + ch
-  def mkQuoted(ch: Char): Parser[String] = (
-      elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
-    | failure("Unmatched %s in input." format ch)
-  )
-
-  /** Apparently windows can't deal with the quotes sticking around. */
-  lazy val squoted: Parser[String] = mkQuoted('\'')   // ^^ (x => "'%s'" format x)
-  lazy val dquoted: Parser[String] = mkQuoted('"')    // ^^ (x => "\"" + x + "\"")
-  lazy val token: Parser[String]   = """\S+""".r
-
-  lazy val argument: Parser[String] = squoted | dquoted | token
-  lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
-
-  class ParseException(msg: String) extends RuntimeException(msg)
-
-  def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
-  def tokenize(line: String, errorFn: String => Unit): List[String] = {
-    parse(commandLine, line.trim) match {
-      case Success(args, _)     => args
-      case NoSuccess(msg, rest) => errorFn(msg) ; Nil
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index bcbb454..62b6c89 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -23,14 +23,13 @@ trait Reference extends Spec {
   def helpMsg     = options.helpMsg
   def propertyArgs: List[String] = Nil
 
-  def isUnaryOption(s: String)  = unary contains toOpt(s)
-  def isBinaryOption(s: String) = binary contains toOpt(s)
-  def isExpandOption(s: String) = expansionMap contains toOpt(s)
-  def isAnyOption(s: String)    = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s)
+  def isUnaryOption(s: String)  = unary contains fromOpt(s)
+  def isBinaryOption(s: String) = binary contains fromOpt(s)
+  def isExpandOption(s: String) = expansionMap contains fromOpt(s)
 
-  def expandArg(arg: String)      = expansionMap.getOrElse(fromOpt(arg), List(arg))
+  def expandArg(arg: String): List[String] = expansionMap.getOrElse(fromOpt(arg), List(arg))
 
-  protected def help(str: => String)        = addHelp(() => str)
+  protected def help(str: => String): Unit = addHelp(() => str)
 
   type ThisCommandLine <: CommandLine
 
@@ -46,7 +45,7 @@ object Reference {
   val MaxLine = 80
 
   class Accumulators() {
-    private var _help     = new ListBuffer[() => String]
+    private val _help    = new ListBuffer[() => String]
     private var _unary   = List[String]()
     private var _binary  = List[String]()
     private var _expand  = Map[String, List[String]]()
@@ -54,20 +53,20 @@ object Reference {
     def helpFormatStr     = "    %-" + longestArg + "s %s"
     def defaultFormatStr  = (" " * (longestArg + 7)) + "%s"
 
-    def addUnary(s: String)   = _unary +:= s
-    def addBinary(s: String)  = _binary +:= s
+    def addUnary(s: String): Unit  = _unary +:= s
+    def addBinary(s: String): Unit = _binary +:= s
 
     def addExpand(opt: String, expanded: List[String]) =
       _expand += (opt -> expanded)
 
-    def mapHelp(g: String => String) = {
+    def mapHelp(g: String => String): Unit = {
       val idx = _help.length - 1
       val f = _help(idx)
 
       _help(idx) = () => g(f())
     }
 
-    def addHelp(f: () => String)      = _help += f
+    def addHelp(f: () => String): Unit = _help += f
     def addHelpAlias(f: () => String) = mapHelp { s =>
       val str = "alias for '%s'" format f()
       def noHelp = (helpFormatStr.format("", "")).length == s.length
@@ -75,13 +74,13 @@ object Reference {
 
       s + str2
     }
-    def addHelpDefault(f: () => String) = mapHelp { s =>
+    def addHelpDefault(f: () => String): Unit = mapHelp { s =>
       val str = "(default: %s)" format f()
 
       if (s.length + str.length < MaxLine) s + " " + str
       else defaultFormatStr.format(s, str)
     }
-    def addHelpEnvDefault(name: String) = mapHelp { s =>
+    def addHelpEnvDefault(name: String): Unit = mapHelp { s =>
       val line1     = "%s (default: %s)".format(s, name)
       val envNow    = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
       val line2     = defaultFormatStr.format("Currently " + envNow)
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
index b761601..a1cb31f 100644
--- a/src/compiler/scala/tools/cmd/Spec.scala
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -15,7 +15,7 @@ trait Spec {
   def programInfo: Spec.Info
 
   protected def help(str: => String): Unit
-  protected def heading(str: => String): Unit = help("\n  " + str)
+  protected def heading(str: => String): Unit = help(s"\n  $str")
 
   type OptionMagic <: Opt.Implicit
   protected implicit def optionMagicAdditions(s: String): OptionMagic
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index dbd2195..842851b 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -6,24 +6,23 @@
 package scala.tools.cmd
 package gen
 
-/** Code generation of the AnyVal types and their companions.
- */
+/** Code generation of the AnyVal types and their companions. */
 trait AnyValReps {
   self: AnyVals =>
 
-  sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
+  sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String)
+      extends AnyValRep(name,repr,javaEquiv) {
 
-    case class Op(val op : String, val doc : String)
+    case class Op(op : String, doc : String)
 
     private def companionCoercions(tos: AnyValRep*) = {
       tos.toList map (to =>
-        """implicit def @javaequiv at 2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+        s"implicit def @javaequiv at 2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}"
       )
     }
-    def coercionCommentExtra = ""
-    def coercionComment = """
-  /** Language mandated coercions from @name@ to "wider" types.%s
-   */""".format(coercionCommentExtra)
+    def coercionComment =
+"""/** Language mandated coercions from @name@ to "wider" types. */
+import scala.language.implicitConversions"""
 
     def implicitCoercions: List[String] = {
       val coercions = this match {
@@ -35,18 +34,14 @@ trait AnyValReps {
         case _     => Nil
       }
       if (coercions.isEmpty) Nil
-      else coercionComment :: coercions
+      else coercionComment.lines.toList ++ coercions
     }
 
     def isCardinal: Boolean = isIntegerType(this)
     def unaryOps = {
       val ops = List(
-        Op("+", "/**\n" +
-                " * Returns this value, unmodified.\n" +
-                " */"),
-        Op("-", "/**\n" +
-                " * Returns the negation of this value.\n" +
-                " */"))
+        Op("+", "/** Returns this value, unmodified. */"),
+        Op("-", "/** Returns the negation of this value. */"))
 
       if(isCardinal)
         Op("~", "/**\n" +
@@ -95,7 +90,7 @@ trait AnyValReps {
                      "  */"))
       else Nil
 
-    def shiftOps            =
+    def shiftOps =
       if (isCardinal)
         List(
           Op("<<",  "/**\n" +
@@ -127,20 +122,20 @@ trait AnyValReps {
                        "  */"))
       else Nil
 
-    def comparisonOps       = List(
-      Op("==", "/**\n  * Returns `true` if this value is equal to x, `false` otherwise.\n  */"),
-      Op("!=", "/**\n  * Returns `true` if this value is not equal to x, `false` otherwise.\n  */"),
-      Op("<",  "/**\n  * Returns `true` if this value is less than x, `false` otherwise.\n  */"),
-      Op("<=", "/**\n  * Returns `true` if this value is less than or equal to x, `false` otherwise.\n  */"),
-      Op(">",  "/**\n  * Returns `true` if this value is greater than x, `false` otherwise.\n  */"),
-      Op(">=", "/**\n  * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n  */"))
+    def comparisonOps = List(
+      Op("==", "/** Returns `true` if this value is equal to x, `false` otherwise. */"),
+      Op("!=", "/** Returns `true` if this value is not equal to x, `false` otherwise. */"),
+      Op("<",  "/** Returns `true` if this value is less than x, `false` otherwise. */"),
+      Op("<=", "/** Returns `true` if this value is less than or equal to x, `false` otherwise. */"),
+      Op(">",  "/** Returns `true` if this value is greater than x, `false` otherwise. */"),
+      Op(">=", "/** Returns `true` if this value is greater than or equal to x, `false` otherwise. */"))
 
     def otherOps = List(
-      Op("+", "/**\n  * Returns the sum of this value and `x`.\n  */"),
-      Op("-", "/**\n  * Returns the difference of this value and `x`.\n  */"),
-      Op("*", "/**\n  * Returns the product of this value and `x`.\n  */"),
-      Op("/", "/**\n  * Returns the quotient of this value and `x`.\n  */"),
-      Op("%", "/**\n  * Returns the remainder of the division of this value by `x`.\n  */"))
+      Op("+", "/** Returns the sum of this value and `x`. */"),
+      Op("-", "/** Returns the difference of this value and `x`. */"),
+      Op("*", "/** Returns the product of this value and `x`. */"),
+      Op("/", "/** Returns the quotient of this value and `x`. */"),
+      Op("%", "/** Returns the remainder of the division of this value by `x`. */"))
 
     // Given two numeric value types S and T , the operation type of S and T is defined as follows:
     // If both S and T are subrange types then the operation type of S and T is Int.
@@ -183,7 +178,7 @@ trait AnyValReps {
     }
     def objectLines = {
       val comp = if (isCardinal) cardinalCompanion else floatingCompanion
-      (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
+      interpolate(comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ (implicitCoercions map interpolate)
     }
 
     /** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -209,11 +204,14 @@ trait AnyValReps {
     )
 
     def lcname = name.toLowerCase
+    def boxedSimpleName = this match {
+      case C => "Character"
+      case I => "Integer"
+      case _ => name
+    }
     def boxedName = this match {
       case U => "scala.runtime.BoxedUnit"
-      case C => "java.lang.Character"
-      case I => "java.lang.Integer"
-      case _ => "java.lang." + name
+      case _ => "java.lang." + boxedSimpleName
     }
     def zeroRep = this match {
       case L => "0L"
@@ -228,7 +226,13 @@ trait AnyValReps {
     def indentN(s: String) = s.lines map indent mkString "\n"
 
     def boxUnboxImpls = Map(
+      "@boxRunTimeDoc@" -> """
+ *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(boxedSimpleName),
       "@boxImpl@"   -> "%s.valueOf(x)".format(boxedName),
+      "@unboxRunTimeDoc@" -> """
+ *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(name),
       "@unboxImpl@" -> "x.asInstanceOf[%s].%sValue()".format(boxedName, lcname),
       "@unboxDoc@"  -> "the %s resulting from calling %sValue() on `x`".format(name, lcname)
     )
@@ -269,8 +273,7 @@ trait AnyValReps {
 }
 
 trait AnyValTemplates {
-  def headerTemplate = ("""
-/*                     __                                               *\
+  def headerTemplate = """/*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
 **    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
@@ -278,12 +281,13 @@ trait AnyValTemplates {
 **                          |/                                          **
 \*                                                                      */
 
-%s
-package scala
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
-import scala.language.implicitConversions
+package scala
 
-""".trim.format(timestampString) + "\n\n")
+"""
 
   def classDocTemplate = ("""
 /** `@name@`@representation@ (equivalent to Java's `@javaequiv@` primitive type) is a
@@ -295,11 +299,9 @@ import scala.language.implicitConversions
  */
 """.trim + "\n")
 
-  def timestampString = "// DO NOT EDIT, CHANGES WILL BE LOST.\n"
-
   def allCompanions = """
 /** Transform a value type into a boxed reference type.
- *
+ *@boxRunTimeDoc@
  *  @param  x   the @name@ to be boxed
  *  @return     a @boxed@ offering `x` as its underlying value.
  */
@@ -308,27 +310,24 @@ def box(x: @name@): @boxed@ = @boxImpl@
 /** Transform a boxed type into a value type.  Note that this
  *  method is not typesafe: it accepts any Object, but will throw
  *  an exception if the argument is not a @boxed at .
- *
+ *@unboxRunTimeDoc@
  *  @param  x   the @boxed@ to be unboxed.
  *  @throws     ClassCastException  if the argument is not a @boxed@
  *  @return     @unboxDoc@
  */
 def unbox(x: java.lang.Object): @name@ = @unboxImpl@
 
-/** The String representation of the scala. at name@ companion object.
- */
+/** The String representation of the scala. at name@ companion object. */
 override def toString = "object scala. at name@"
 """
 
   def nonUnitCompanions = ""  // todo
 
   def cardinalCompanion = """
-/** The smallest value representable as a @name at .
- */
+/** The smallest value representable as a @name at . */
 final val MinValue = @boxed at .MIN_VALUE
 
-/** The largest value representable as a @name at .
- */
+/** The largest value representable as a @name at . */
 final val MaxValue = @boxed at .MAX_VALUE
 """
 
@@ -363,18 +362,16 @@ class AnyVals extends AnyValReps with AnyValTemplates {
   object D extends AnyValNum("Double",  Some("64-bit IEEE-754 floating point number"), "double")
   object Z extends AnyValRep("Boolean", None,                                          "boolean") {
     def classLines = """
-/**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+/** Negates a Boolean expression.
+  *
+  * - `!a` results in `false` if and only if `a` evaluates to `true` and
+  * - `!a` results in `true` if and only if `a` evaluates to `false`.
+  *
+  * @return the negated expression
+  */
 def unary_! : Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
   *
   * `a == b` returns `true` if and only if
   *  - `a` and `b` are `true` or
@@ -391,8 +388,7 @@ def ==(x: Boolean): Boolean
   */
 def !=(x: Boolean): Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
   *
   * `a || b` returns `true` if and only if
   *  - `a` is `true` or
@@ -405,8 +401,7 @@ def !=(x: Boolean): Boolean
   */
 def ||(x: Boolean): Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
   *
   * `a && b` returns `true` if and only if
   *  - `a` and `b` are `true`.
@@ -421,8 +416,7 @@ def &&(x: Boolean): Boolean
 // def ||(x: => Boolean): Boolean
 // def &&(x: => Boolean): Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
   *
   * `a | b` returns `true` if and only if
   *  - `a` is `true` or
@@ -433,8 +427,7 @@ def &&(x: Boolean): Boolean
   */
 def |(x: Boolean): Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
   *
   * `a & b` returns `true` if and only if
   *  - `a` and `b` are `true`.
@@ -443,8 +436,7 @@ def |(x: Boolean): Boolean
   */
 def &(x: Boolean): Boolean
 
-/**
-  * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
   *
   * `a ^ b` returns `true` if and only if
   *  - `a` is `true` and `b` is `false` or
@@ -471,7 +463,9 @@ override def getClass(): Class[Boolean] = null
     def objectLines = interpolate(allCompanions).lines.toList
 
     override def boxUnboxImpls = Map(
+      "@boxRunTimeDoc@" -> "",
       "@boxImpl@"   -> "scala.runtime.BoxedUnit.UNIT",
+      "@unboxRunTimeDoc@" -> "",
       "@unboxImpl@" -> "()",
       "@unboxDoc@"  -> "the Unit value ()"
     )
@@ -488,5 +482,3 @@ override def getClass(): Class[Boolean] = null
 
   def make() = values map (x => (x.name, x.make()))
 }
-
-object AnyVals extends AnyVals { }
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index 4ca9b6c..c3aa527 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,11 +6,9 @@
 package scala.tools.cmd
 package gen
 
-import scala.language.postfixOps
-
 class Codegen(args: List[String]) extends {
   val parsed = CodegenSpec(args: _*)
-} with CodegenSpec with Instance { }
+} with CodegenSpec with Instance
 
 object Codegen {
   def echo(msg: String) = Console println msg
@@ -23,7 +21,7 @@ object Codegen {
       return println (CodegenSpec.helpMsg)
 
     val out = outDir getOrElse { return println("--out is required.") }
-    val all = genall || (!anyvals && !products)
+    val all = genall || !anyvals
 
     echo("Generating sources into " + out)
 
@@ -31,7 +29,7 @@ object Codegen {
       val av = new AnyVals { }
 
       av.make() foreach { case (name, code ) =>
-        val file = out / (name + ".scala") toFile;
+        val file = (out / (name + ".scala")).toFile
         echo("Writing: " + file)
         file writeAll code
       }
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
index 903517c..4b4a1e4 100644
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
@@ -12,17 +12,11 @@ trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation {
   def referenceSpec       = CodegenSpec
   def programInfo         = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen")
 
-  import FromString.ExistingDir
-
   help("Usage: codegen [<options>]")
 
-  // val inDir    = "in" / "directory containing templates" --^ ExistingDir
   val outDir   = "out" / "directory for generated files" --^ ExistingDir
-  // val install  = "install" / "write source files directly to src/library/scala"
   val anyvals  = "anyvals" / "generate sources for AnyVal types" --?
-  val products = "products" / "generate sources for ProductN, FunctionN, etc." --?
   val genall   = "all" / "generate sources for everything" --?
-  val stamp    = "stamp" / "add a timestamp to the generated files" --?
 }
 
 object CodegenSpec extends CodegenSpec with Reference {
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index d605eca..9754bec 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -3,7 +3,8 @@
  * @author Paul Phillips
  */
 
-package scala.tools
+package scala
+package tools
 
 package object cmd {
   def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
@@ -12,19 +13,19 @@ package object cmd {
   implicit def implicitConversions = scala.language.implicitConversions
   implicit def postfixOps = scala.language.postfixOps
 
-  private[cmd] def debug(msg: String) = println(msg)
+  private[cmd] def debug(msg: String): Unit = println(msg)
 
   def runAndExit(body: => Unit): Nothing = {
     body
     sys.exit(0)
   }
 
-  def toOpt(s: String)              = if (s startsWith "--") s else "--" + s
-  def fromOpt(s: String)            = s stripPrefix "--"
-  def toArgs(line: String)          = Parser tokenize line
-  def fromArgs(args: List[String])  = args mkString " "
+  def toOpt(s: String): String             = if (s startsWith "--") s else "--" + s
+  def fromOpt(s: String): String           = s stripPrefix "--"
+  def toArgs(line: String): List[String]   = CommandLineParser tokenize line
+  def fromArgs(args: List[String]): String = args mkString " "
 
-  def stripQuotes(s: String) = {
+  def stripQuotes(s: String): String = {
     def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
     if (List('"', '\'') exists isQuotedBy) s.tail.init else s
   }
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 355a1fd..c2caed7 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,12 +5,12 @@
 
 package scala.tools.nsc
 
-import util.FreshNameCreator
-import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
 import scala.collection.mutable
 import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
+import scala.tools.nsc.reporters.Reporter
 
-trait CompilationUnits { self: Global =>
+trait CompilationUnits { global: Global =>
 
   /** An object representing a missing compilation unit.
    */
@@ -26,34 +26,57 @@ trait CompilationUnits { self: Global =>
   class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
 
     /** the fresh name creator */
-    var fresh: FreshNameCreator = new FreshNameCreator.Default
-
-    def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
-    def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
+    implicit val fresh: FreshNameCreator     = new FreshNameCreator
+    def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
+    def freshTypeName(prefix: String)        = global.freshTypeName(prefix)
 
     /** the content of the compilation unit in tree form */
     var body: Tree = EmptyTree
 
+    /** The position of the first xml literal encountered while parsing this compilation unit.
+     * NoPosition if there were none. Write-once.
+     */
+    private[this] var _firstXmlPos: Position = NoPosition
+
+    /** Record that we encountered XML. Should only be called once. */
+    protected[nsc] def encounteredXml(pos: Position) = _firstXmlPos = pos
+
+    /** Does this unit contain XML? */
+    def hasXml = _firstXmlPos ne NoPosition
+
+    /** Position of first XML literal in this unit. */
+    def firstXmlPos = _firstXmlPos
+
     def exists = source != NoSourceFile && source != null
 
-//    def parseSettings() = {
-//      val argsmarker = "SCALAC_ARGS"
-//      if(comments nonEmpty) {
-//        val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
-//        pragmas foreach { p =>
-//          val i = p.text.indexOf(argsmarker)
-//          if(i > 0)
-//        }
-//      }
-//    }
     /** Note: depends now contains toplevel classes.
      *  To get their sourcefiles, you need to dereference with .sourcefile
      */
-    val depends = mutable.HashSet[Symbol]()
+    private[this] val _depends = mutable.HashSet[Symbol]()
+    // SBT compatibility (SI-6875)
+    //
+    // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
+    // Main contains a call to a macro, which calls compileLate to define a mock for Foo
+    // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+    // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
+    //
+    // without this workaround in scalac or without being patched itself, sbt will think that
+    // * Virt35af32 depends on A (because it extends Foo from A)
+    // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32)
+    //
+    // after compiling A.scala, SBT will notice that it has a new source file named Virt35af32.
+    // it will also think that this file hasn't yet been compiled and since A depends on it
+    // it will think that A needs to be recompiled.
+    //
+    // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock,
+    // producing another virtual file, say, Virtee509a, which will again trick SBT into thinking that A needs a recompile,
+    // which will lead to another macro expansion, which will produce another virtual file and so on
+    def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]()
 
     /** so we can relink
      */
-    val defined = mutable.HashSet[Symbol]()
+    private[this] val _defined = mutable.HashSet[Symbol]()
+    def defined = if (exists && !source.file.isVirtual) _defined else mutable.HashSet[Symbol]()
 
     /** Synthetic definitions generated by namer, eliminated by typer.
      */
@@ -67,7 +90,7 @@ trait CompilationUnits { self: Global =>
         debuglog(s"removing synthetic $sym from $self")
         map -= sym
       }
-      def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+      def get(sym: Symbol): Option[Tree] = debuglogResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
         map get sym
       }
       def keys: Iterable[Symbol] = map.keys
@@ -75,6 +98,11 @@ trait CompilationUnits { self: Global =>
       override def toString = map.toString
     }
 
+    // namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
+    // is cached here and re-used in typedDefDef / typedValDef
+    // Also used to cache imports type-checked by namer.
+    val transformed = new mutable.AnyRefMap[Tree, Tree]
+
     /** things to check at end of compilation unit */
     val toCheck = new ListBuffer[() => Unit]
 
@@ -95,6 +123,8 @@ trait CompilationUnits { self: Global =>
      */
     val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
 
+    def reporter = global.reporter
+
     def echo(pos: Position, msg: String) =
       reporter.echo(pos, msg)
 
@@ -123,18 +153,5 @@ trait CompilationUnits { self: Global =>
     lazy val isJava = source.file.name.endsWith(".java")
 
     override def toString() = source.toString()
-
-    def clear() {
-      fresh = new FreshNameCreator.Default
-      body = EmptyTree
-      depends.clear()
-      defined.clear()
-      synthetics.clear()
-      toCheck.clear()
-      checkedFeatures = Set()
-      icode.clear()
-    }
   }
 }
-
-
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 731f692..3017d8c 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -3,12 +3,12 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 
-import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
 import settings.FscSettings
 import scala.tools.util.CompileOutputCommon
-import sys.SystemProperties.preferIPv4Stack
+import scala.sys.SystemProperties.preferIPv4Stack
 
 /** The client part of the fsc offline compiler.  Instead of compiling
  *  things itself, it send requests to a CompileServer.
@@ -27,12 +27,12 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
     val settings     = new FscSettings(Console.println)
     val command      = new OfflineCompilerCommand(args.toList, settings)
     val shutdown     = settings.shutdown.value
-    val extraVmArgs  = if (settings.preferIPv4.value) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
+    val extraVmArgs  = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
 
     val vmArgs  = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs
     val fscArgs = args.toList ++ command.extraFscArgs
 
-    if (settings.version.value) {
+    if (settings.version) {
       Console println versionMsg
       return true
     }
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 7a0a072..6f068e1 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -5,7 +5,7 @@
 
 package scala.tools.nsc
 
-import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream }
+import java.io.PrintStream
 import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
 import scala.reflect.internal.util.FakePos //Position
 import scala.tools.util.SocketServer
@@ -29,8 +29,6 @@ class StandardCompileServer extends SocketServer {
   var shutdown = false
   var verbose = false
 
-  val versionMsg = "Fast " + Properties.versionMsg
-
   val MaxCharge = 0.8
 
   private val runtime = Runtime.getRuntime()
@@ -58,9 +56,6 @@ class StandardCompileServer extends SocketServer {
     (totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
   }
 
-  protected def newOfflineCompilerCommand(arguments: List[String], settings: FscSettings): OfflineCompilerCommand =
-    new OfflineCompilerCommand(arguments, settings)
-
   /** Problematically, Settings are only considered equal if every setting
    *  is exactly equal.  In fsc this immediately breaks down because the randomly
    *  chosen temporary outdirs differ between client and server.  Among other
@@ -90,9 +85,9 @@ class StandardCompileServer extends SocketServer {
     if (input == null || password != guessedPassword)
       return
 
-    val args        = input.split("\0", -1).toList
+    val args        = input.split("\u0000", -1).toList
     val newSettings = new FscSettings(fscError)
-    val command     = newOfflineCompilerCommand(args, newSettings)
+    val command     = new OfflineCompilerCommand(args, newSettings)
     this.verbose    = newSettings.verbose.value
 
     info("Settings after normalizing paths: " + newSettings)
@@ -120,7 +115,7 @@ class StandardCompileServer extends SocketServer {
 
     reporter = new ConsoleReporter(newSettings, in, out) {
       // disable prompts, so that compile server cannot block
-      override def displayPrompt = ()
+      override def displayPrompt() = ()
     }
     def isCompilerReusable: Boolean = {
       if (compiler == null) {
@@ -162,7 +157,7 @@ class StandardCompileServer extends SocketServer {
       }
     }
     reporter.printSummary()
-    if (isMemoryFullEnough) {
+    if (isMemoryFullEnough()) {
       info("Nulling out compiler due to memory utilization.")
       clearCompiler()
     }
@@ -177,9 +172,9 @@ object CompileServer extends StandardCompileServer {
   private def createRedirect(filename: String) =
     new PrintStream((redirectDir / filename).createFile().bufferedOutput())
 
-  def main(args: Array[String]) = 
+  def main(args: Array[String]) =
     execute(() => (), args)
-  
+
   /**
    * Used for internal testing. The callback is called upon
    * server start, notifying the caller that the server is
@@ -204,7 +199,7 @@ object CompileServer extends StandardCompileServer {
         compileSocket setPort port
         startupCallback()
         run()
-    
+
         compileSocket deletePort port
       }
     }
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 4051bda..c4f06b5 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -5,13 +5,9 @@
 
 package scala.tools.nsc
 
-import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream }
-import java.io.{ BufferedReader, FileReader }
-import java.util.regex.Pattern
-import java.net._
+import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream }
 import java.security.SecureRandom
 import io.{ File, Path, Directory, Socket }
-import scala.util.control.Exception.catching
 import scala.tools.util.CompileOutputCommon
 import scala.reflect.internal.util.StringOps.splitWhere
 import scala.sys.process._
@@ -28,7 +24,7 @@ trait HasCompileSocket {
 
     sock.applyReaderAndWriter { (in, out) =>
       out println (compileSocket getPassword sock.getPort())
-      out println (args mkString "\0")
+      out println (args mkString "\u0000")
 
       def loop(): Boolean = in.readLine() match {
         case null => noErrors
@@ -117,7 +113,7 @@ class CompileSocket extends CompileOutputCommon {
    */
   def getPort(vmArgs: String): Int = {
     val maxPolls = 300
-    val sleepTime = 25
+    val sleepTime = 25L
 
     var attempts = 0
     var port = pollPort()
@@ -156,9 +152,9 @@ class CompileSocket extends CompileOutputCommon {
     * cannot be established.
     */
   def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
-    val maxMillis = 10 * 1000   // try for 10 seconds
-    val retryDelay = 50
-    val maxAttempts = maxMillis / retryDelay
+    val maxMillis = 10L * 1000   // try for 10 seconds
+    val retryDelay = 50L
+    val maxAttempts = (maxMillis / retryDelay).toInt
 
     def getsock(attempts: Int): Option[Socket] = attempts match {
       case 0    => warn("Unable to establish connection to compilation daemon") ; None
@@ -190,7 +186,7 @@ class CompileSocket extends CompileOutputCommon {
     catch { case _: NumberFormatException => None }
 
   def getSocket(serverAdr: String): Socket = (
-    for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+    for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
       getSocket(name, port)
   ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
 
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index e994150..bab0768 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -5,7 +5,6 @@
 
 package scala.tools.nsc
 
-import scala.collection.mutable.ListBuffer
 import io.File
 
 /** A class representing command line info for scalac */
@@ -15,9 +14,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
 
   type Setting = Settings#Setting
 
-  /** file extensions of files that the compiler can process */
-  lazy val fileEndings = Properties.fileEndings
-
   private val processArgumentsResult =
     if (shouldProcessArguments) processArguments
     else (true, Nil)
@@ -31,7 +27,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
     |-- Notes on option parsing --
     |Boolean settings are always false unless set.
     |Where multiple values are accepted, they should be comma-separated.
-    |  example: -Xplugin:plugin1,plugin2
+    |  example: -Xplugin:option1,option2
     |<phases> means one or a comma-separated list of:
     |  (partial) phase names, phase ids, phase id ranges, or the string "all".
     |  example: -Xprint:all prints all phases.
@@ -41,8 +37,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
   """.stripMargin.trim + "\n"
 
   def shortUsage = "Usage: %s <options> <source files>" format cmdName
-  def createUsagePreface(shouldExplain: Boolean) =
-    if (shouldExplain) shortUsage + "\n" + explainAdvanced else ""
 
   /** Creates a help message for a subset of options based on cond */
   def createUsageMsg(cond: Setting => Boolean): String = {
@@ -82,27 +76,27 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
   }
 
   /** Messages explaining usage and options */
-  def usageMsg    = createUsageMsg("where possible standard", false, _.isStandard)
-  def xusageMsg   = createUsageMsg("Possible advanced", true, _.isAdvanced)
-  def yusageMsg   = createUsageMsg("Possible private", true, _.isPrivate)
-
-  // If any of these settings is set, the compiler shouldn't start;
-  // an informative message of some sort should be printed instead.
-  def shouldStopWithInfo = {
-    import settings.{ Setting => _, _ }
-    Set[BooleanSetting](help, Xhelp, Yhelp, showPlugins, showPhases) exists (_.value)
-  }
+  def usageMsg    = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard)
+  def xusageMsg   = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced)
+  def yusageMsg   = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate)
+
+  /** For info settings, compiler should just print a message and quit. */
+  def shouldStopWithInfo = settings.isInfo
 
   def getInfoMessage(global: Global): String = {
     import settings._
-    if (help.value)               usageMsg + global.pluginOptionsHelp
-    else if (Xhelp.value)         xusageMsg
-    else if (Yhelp.value)         yusageMsg
-    else if (showPlugins.value)   global.pluginDescriptions
-    else if (showPhases.value)    global.phaseDescriptions + (
-      if (debug.value) "\n" + global.phaseFlagDescriptions else ""
+    if (help)               usageMsg + global.pluginOptionsHelp
+    else if (Xhelp)         xusageMsg
+    else if (Yhelp)         yusageMsg
+    else if (showPlugins)   global.pluginDescriptions
+    else if (showPhases)    global.phaseDescriptions + (
+      if (debug) "\n" + global.phaseFlagDescriptions else ""
     )
-    else                          ""
+    else if (genPhaseGraph.isSetByUser) {
+      val components = global.phaseNames  // global.phaseDescriptors // one initializes
+      s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot."
+    }
+    else                    ""
   }
 
   /**
@@ -128,6 +122,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
       case x                      => List(x)
     }
 
-    settings.processArguments(expandedArguments, true)
+    settings.processArguments(expandedArguments, processAll = true)
   }
 }
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
deleted file mode 100644
index 6746b08..0000000
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-
-class CompilerRun {
-  def firstPhase: Phase = NoPhase
-  def terminalPhase: Phase = NoPhase
-  def namerPhase: Phase = NoPhase
-  def typerPhase: Phase = NoPhase
-  def refchecksPhase: Phase = NoPhase
-  def explicitouterPhase: Phase = NoPhase
-  def erasurePhase: Phase = NoPhase
-  def flattenPhase: Phase = NoPhase
-  def mixinPhase: Phase = NoPhase
-  def icodePhase: Phase = NoPhase
-  def phaseNamed(name: String): Phase = NoPhase
-}
-
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index 5c5606e..6c16d19 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -13,9 +13,9 @@ import java.io.Writer
  *  @version 1.0
  */
 class ConsoleWriter extends Writer {
-  def close = flush
+  def close() = flush()
 
-  def flush = Console.flush
+  def flush() = Console.flush()
 
   def write(cbuf: Array[Char], off: Int, len: Int) {
     if (len > 0)
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 814bd58..3ac27a4 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,11 +1,12 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
 
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.reporters.ConsoleReporter
 import Properties.{ versionString, copyrightString, residentPromptString }
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos }
+import scala.reflect.internal.util.FakePos
 
 abstract class Driver {
-  
+
   val prompt = residentPromptString
 
   val versionMsg = "Scala compiler " +
@@ -41,7 +42,7 @@ abstract class Driver {
     command  = new CompilerCommand(args.toList, ss)
     settings = command.settings
 
-    if (settings.version.value) {
+    if (settings.version) {
       reporter.echo(versionMsg)
     } else if (processSettingsHook()) {
       val compiler = newCompiler()
@@ -68,4 +69,4 @@ abstract class Driver {
     sys.exit(if (reporter.hasErrors) 1 else 0)
   }
 
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index c4147fa..15a296c 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -14,7 +14,7 @@ trait EvalLoop {
   def loop(action: (String) => Unit) {
     @tailrec def inner() {
       Console.print(prompt)
-      val line = try Console.readLine catch { case _: EOFException => null }
+      val line = try Console.readLine() catch { case _: EOFException => null }
       if (line != null && line != "") {
         action(line)
         inner()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index c8fd598..e710222 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -26,7 +26,7 @@ extends CompilerCommand(args, settings) {
   // change CompilerCommand behavior
   override def shouldProcessArguments: Boolean = false
 
-  private lazy val (_ok, targetAndArguments) = settings.processArguments(args, false)
+  private lazy val (_ok, targetAndArguments) = settings.processArguments(args, processAll = false)
   override def ok = _ok
   private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = {
     if (!ok) Error
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 9c2db11..ad75d02 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -39,7 +39,4 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
   val nc = BooleanSetting(
       "-nc",
       "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon"
-
-  @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc
-  @deprecated("Use `save` instead", "2.9.0") def savecompiled = save
 }
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index aea3e0d..35eab94 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -3,22 +3,24 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools
+package nsc
 
 import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
 import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
+import java.util.UUID._
 import scala.compat.Platform.currentTime
-import scala.tools.util.PathResolver
 import scala.collection.{ mutable, immutable }
 import io.{ SourceReader, AbstractFile, Path }
 import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
+import scala.reflect.ClassTag
+import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
 import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import settings.{ AestheticSettings }
+import scala.reflect.io.VirtualFile
 import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
 import symtab.classfile.Pickler
-import dependencies.DependencyAnalysis
 import plugins.Plugins
 import ast._
 import ast.parser._
@@ -26,13 +28,13 @@ import typechecker._
 import transform.patmat.PatternMatching
 import transform._
 import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.{GenJVM, GenASM}
-import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
+import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.jvm.GenBCode
+import backend.jvm.GenASM
+import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
 import backend.icode.analysis._
 import scala.language.postfixOps
-import scala.reflect.internal.StdAttachments
-import scala.reflect.ClassTag
+import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
 
 class Global(var currentSettings: Settings, var reporter: Reporter)
     extends SymbolTable
@@ -47,12 +49,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   // the mirror --------------------------------------------------
 
   override def isCompilerUniverse = true
+  override val useOffsetPositions = !currentSettings.Yrangepos
+
+  type RuntimeClass = java.lang.Class[_]
+  implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass])
 
   class GlobalMirror extends Roots(NoSymbol) {
     val universe: self.type = self
-    def rootLoader: LazyType = platform.rootLoader
+    def rootLoader: LazyType = new loaders.PackageLoader(classPath)
     override def toString = "compiler mirror"
   }
+  implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
 
   lazy val rootMirror: Mirror = {
     val rm = new GlobalMirror
@@ -69,51 +76,49 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
   override def settings = currentSettings
 
+  /** Switch to turn on detailed type logs */
+  var printTypings = settings.Ytyperdebug.value
+
   def this(reporter: Reporter) =
     this(new Settings(err => reporter.error(null, err)), reporter)
 
   def this(settings: Settings) =
     this(settings, new ConsoleReporter(settings))
 
-  def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
-
   def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
 
+  def erasurePhase: Phase = if (currentRun.isDefined) currentRun.erasurePhase else NoPhase
+
   // platform specific elements
 
-  type ThisPlatform = Platform { val global: Global.this.type }
+  protected class GlobalPlatform extends {
+    val global: Global.this.type = Global.this
+    val settings: Settings = Global.this.settings
+  } with JavaPlatform
 
-  lazy val platform: ThisPlatform =
-    if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
-    else new { val global: Global.this.type = Global.this } with JavaPlatform
+  type ThisPlatform = JavaPlatform { val global: Global.this.type }
+  lazy val platform: ThisPlatform  = new GlobalPlatform
 
-  type PlatformClassPath = ClassPath[platform.BinaryRepr]
+  type PlatformClassPath = ClassPath[AbstractFile]
   type OptClassPath = Option[PlatformClassPath]
 
   def classPath: PlatformClassPath = platform.classPath
 
   // sub-components --------------------------------------------------
 
-  /** Generate ASTs */
-  type TreeGen = scala.tools.nsc.ast.TreeGen
-
   /** Tree generation, usually based on existing symbols. */
   override object gen extends {
     val global: Global.this.type = Global.this
-  } with TreeGen {
+  } with AstTreeGen {
     def mkAttributedCast(tree: Tree, pt: Type): Tree =
       typer.typed(mkCast(tree, pt))
   }
 
-  /** Trees fresh from the oven, mostly for use by the parser. */
-  object treeBuilder extends {
-    val global: Global.this.type = Global.this
-  } with TreeBuilder {
-    def freshName(prefix: String): Name               = freshTermName(prefix)
-    def freshTermName(prefix: String): TermName       = currentUnit.freshTermName(prefix)
-    def freshTypeName(prefix: String): TypeName       = currentUnit.freshTypeName(prefix)
-    def o2p(offset: Int): Position                    = new OffsetPosition(currentUnit.source, offset)
-    def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+  /** A spare instance of TreeBuilder left for backwards compatibility. */
+  lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new TreeBuilder {
+    val global: Global.this.type = Global.this;
+    def unit = currentUnit
+    def source = currentUnit.source
   }
 
   /** Fold constants */
@@ -136,6 +141,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val global: Global.this.type = Global.this
   } with OverridingPairs
 
+  type SymbolPair = overridingPairs.SymbolPair
+
   // Optimizer components
 
   /** ICode analysis for optimization */
@@ -173,7 +180,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
         if (lastPrintedSource == source)
           println(": tree is unchanged since " + lastPrintedPhase)
         else {
-          lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+          lastPrintedPhase = phase.prev // since we're running inside "exitingPhase"
           lastPrintedSource = source
           println("")
           println(source)
@@ -224,25 +231,33 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
   // not deprecated yet, but a method called "error" imported into
   // nearly every trait really must go.  For now using globalError.
-  def error(msg: String)                = globalError(msg)
-  def inform(msg: String)               = reporter.echo(msg)
-  override def globalError(msg: String) = reporter.error(NoPosition, msg)
-  override def warning(msg: String)     =
-    if (settings.fatalWarnings.value) globalError(msg)
-    else reporter.warning(NoPosition, msg)
+  def error(msg: String) = globalError(msg)
+
+  override def inform(msg: String)      = inform(NoPosition, msg)
+  override def globalError(msg: String) = globalError(NoPosition, msg)
+  override def warning(msg: String)     = warning(NoPosition, msg)
+  override def deprecationWarning(pos: Position, msg: String) = currentUnit.deprecationWarning(pos, msg)
+
+  def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+  def warning(pos: Position, msg: String)     = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
+  def inform(pos: Position, msg: String)      = reporter.echo(pos, msg)
 
   // Getting in front of Predef's asserts to supplement with more info.
   // This has the happy side effect of masking the one argument forms
   // of assert and require (but for now I've reproduced them here,
   // because there are a million to fix.)
   @inline final def assert(assertion: Boolean, message: => Any) {
-    Predef.assert(assertion, supplementErrorMessage("" + message))
+    // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument.
+    if (!assertion)
+      throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message))
   }
   @inline final def assert(assertion: Boolean) {
     assert(assertion, "")
   }
   @inline final def require(requirement: Boolean, message: => Any) {
-    Predef.require(requirement, supplementErrorMessage("" + message))
+    // calling Predef.require would send a freshly allocated closure wrapping the one received as argument.
+    if (!requirement)
+      throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message))
   }
   @inline final def require(requirement: Boolean) {
     require(requirement, "")
@@ -255,30 +270,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   }
 
   @inline final def ifDebug(body: => Unit) {
-    if (settings.debug.value)
+    if (settings.debug)
       body
   }
-  // Warnings issued only under -Ydebug.  For messages which should reach
-  // developer ears, but are not adequately actionable by users.
-  @inline final override def debugwarn(msg: => String) {
-    if (settings.debug.value)
-      warning(msg)
-  }
 
-  private def elapsedMessage(msg: String, start: Long) =
-    msg + " in " + (currentTime - start) + "ms"
+  override protected def isDeveloper = settings.developer || super.isDeveloper
+
+  /** This is for WARNINGS which should reach the ears of scala developers
+   *  whenever they occur, but are not useful for normal users. They should
+   *  be precise, explanatory, and infrequent. Please don't use this as a
+   *  logging mechanism. !!! is prefixed to all messages issued via this route
+   *  to make them visually distinct.
+   */
+  @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg)
+  @inline final def devWarning(pos: Position, msg: => String) {
+    def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]"
+    if (isDeveloper)
+      warning(pos, "!!! " + msg)
+    else
+      log(s"!!!$pos_s $msg") // such warnings always at least logged
+  }
 
   def informComplete(msg: String): Unit    = reporter.withoutTruncating(inform(msg))
-  def informProgress(msg: String)          = if (opt.verbose) inform("[" + msg + "]")
-  def inform[T](msg: String, value: T): T  = returning(value)(x => inform(msg + x))
-  def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
 
   def logError(msg: String, t: Throwable): Unit = ()
 
-  def logAfterEveryPhase[T](msg: String)(op: => T) {
-    log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
-  }
-
   override def shouldLogAtThisPhase = settings.log.isSetByUser && (
     (settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
   )
@@ -289,7 +305,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   }
 
   @inline final override def debuglog(msg: => String) {
-    if (settings.debug.value)
+    if (settings.debug)
       log(msg)
   }
 
@@ -302,7 +318,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
   private val reader: SourceReader = {
     val defaultEncoding = Properties.sourceEncoding
-    val defaultReader   = Properties.sourceReader
 
     def loadCharset(name: String) =
       try Some(Charset.forName(name))
@@ -315,7 +330,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
           None
       }
 
-    val charset = opt.encoding flatMap loadCharset getOrElse {
+    val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
       settings.encoding.value = defaultEncoding // A mandatory charset
       Charset.forName(defaultEncoding)
     }
@@ -330,62 +345,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       }
     }
 
-    opt.sourceReader flatMap loadReader getOrElse {
+    ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
       new SourceReader(charset.newDecoder(), reporter)
     }
   }
 
-  if (!dependencyAnalysis.off)
-    dependencyAnalysis.loadDependencyAnalysis()
-
-  if (opt.verbose || opt.logClasspath) {
+  if (settings.verbose || settings.Ylogcp) {
     // Uses the "do not truncate" inform
     informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
     informComplete("[search path for class files: " + classPath.asClasspathString + "]")
   }
 
-  object opt extends AestheticSettings {
-    def settings = Global.this.settings
-
-    // protected implicit lazy val globalPhaseOrdering: Ordering[Phase] = Ordering[Int] on (_.id)
-    def isActive(ph: Settings#PhasesSetting)  = ph containsPhase globalPhase
-    def wasActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase.prev
-
-    // Allows for syntax like scalac -Xshow-class Random at erasure,typer
-    private def splitClassAndPhase(str: String, term: Boolean): Name = {
-      def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
-      (str indexOf '@') match {
-        case -1   => mkName(str)
-        case idx  =>
-          val phasePart = str drop (idx + 1)
-          settings.Yshow.tryToSetColon(phasePart split ',' toList)
-          mkName(str take idx)
-      }
-    }
-
-    // behavior
-
-    // debugging
-    def checkPhase = wasActive(settings.check)
-    def logPhase   = isActive(settings.log)
-
-    // Write *.icode files right after GenICode when -Xprint-icode was given.
-    def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
-
-    // showing/printing things
-    def browsePhase   = isActive(settings.browse)
-    def echoFilenames = opt.debug && (opt.verbose || currentRun.size < 5)
-    def noShow        = settings.Yshow.isDefault
-    def printLate     = settings.printLate.value
-    def printPhase    = isActive(settings.Xprint)
-    def showNames     = List(showClass, showObject).flatten
-    def showPhase     = isActive(settings.Yshow)
-    def showSymbols   = settings.Yshowsyms.value
-    def showTrees     = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
-    val showClass     = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
-    val showObject    = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
-  }
-
   // The current division between scala.reflect.* and scala.tools.nsc.* is pretty
   // clunky.  It is often difficult to have a setting influence something without having
   // to create it on that side.  For this one my strategy is a constant def at the file
@@ -394,12 +364,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   // Here comes another one...
   override protected val enableTypeVarExperimentals = settings.Xexperimental.value
 
-  // True if -Xscript has been set, indicating a script run.
-  def isScriptRun = opt.script.isDefined
-
-  def getSourceFile(f: AbstractFile): BatchSourceFile =
-    if (isScriptRun) ScriptSourceFile(f, reader read f)
-    else new BatchSourceFile(f, reader read f)
+  def getSourceFile(f: AbstractFile): BatchSourceFile = new BatchSourceFile(f, reader read f)
 
   def getSourceFile(name: String): SourceFile = {
     val f = AbstractFile.getFile(name)
@@ -408,9 +373,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     getSourceFile(f)
   }
 
-  lazy val loaders = new SymbolLoaders {
+  lazy val loaders = new {
     val global: Global.this.type = Global.this
-  }
+    val platform: Global.this.platform.type = Global.this.platform
+  } with GlobalSymbolLoaders
 
   /** Returns the mirror that loaded given symbol */
   def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
@@ -453,7 +419,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       if ((unit ne null) && unit.exists)
         lastSeenSourceFile = unit.source
 
-      if (opt.echoFilenames)
+      if (settings.debug && (settings.verbose || currentRun.size < 5))
         inform("[running phase " + name + " on " + unit + "]")
 
       val unit0 = currentUnit
@@ -463,7 +429,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
           currentRun.informUnitStarting(this, unit)
           apply(unit)
         }
-        currentRun.advanceUnit
+        currentRun.advanceUnit()
       } finally {
         //assert(currentUnit == unit)
         currentRun.currentUnit = unit0
@@ -471,16 +437,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     }
   }
 
-  /** Switch to turn on detailed type logs */
-  var printTypings = settings.Ytyperdebug.value
-  var printInfers = settings.Yinferdebug.value
-
   // phaseName = "parser"
-  object syntaxAnalyzer extends {
+  lazy val syntaxAnalyzer = new {
     val global: Global.this.type = Global.this
+  } with SyntaxAnalyzer {
     val runsAfter = List[String]()
     val runsRightAfter = None
-  } with SyntaxAnalyzer
+    override val initial = true
+  }
+
+  import syntaxAnalyzer.{ UnitScanner, UnitParser }
 
   // !!! I think we're overdue for all these phase objects being lazy vals.
   // There's no way for a Global subclass to provide a custom typer
@@ -498,9 +464,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   object patmat extends {
     val global: Global.this.type = Global.this
     val runsAfter = List("typer")
-    // patmat doesn't need to be right after typer, as long as we run before supperaccesors
-    // (sbt does need to run right after typer, so don't conflict)
     val runsRightAfter = None
+    // patmat doesn't need to be right after typer, as long as we run before superaccessors
+    // (sbt does need to run right after typer, so don't conflict)
   } with PatternMatching
 
   // phaseName = "superaccessors"
@@ -567,7 +533,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   } with Erasure
 
   // phaseName = "posterasure"
-  object postErasure extends {
+  override object postErasure extends {
     val global: Global.this.type = Global.this
     val runsAfter = List("erasure")
     val runsRightAfter = Some("erasure")
@@ -615,6 +581,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val runsRightAfter = None
   } with CleanUp
 
+  // phaseName = "delambdafy"
+  object delambdafy extends {
+    val global: Global.this.type = Global.this
+    val runsAfter = List("cleanup")
+    val runsRightAfter = None
+  } with Delambdafy
+
   // phaseName = "icode"
   object genicode extends {
     val global: Global.this.type = Global.this
@@ -629,7 +602,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val runsRightAfter = None
   } with Inliners
 
-  // phaseName = "inlineExceptionHandlers"
+  // phaseName = "inlinehandlers"
   object inlineExceptionHandlers extends {
     val global: Global.this.type = Global.this
     val runsAfter = List("inliner")
@@ -639,23 +612,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   // phaseName = "closelim"
   object closureElimination extends {
     val global: Global.this.type = Global.this
-    val runsAfter = List("inlineExceptionHandlers")
+    val runsAfter = List("inlinehandlers")
     val runsRightAfter = None
   } with ClosureElimination
 
-  // phaseName = "dce"
-  object deadCode extends {
+  // phaseName = "constopt"
+  object constantOptimization extends {
     val global: Global.this.type = Global.this
     val runsAfter = List("closelim")
     val runsRightAfter = None
-  } with DeadCodeElimination
+  } with ConstantOptimization
 
-  // phaseName = "jvm", FJBG-based version
-  object genJVM extends {
+  // phaseName = "dce"
+  object deadCode extends {
     val global: Global.this.type = Global.this
-    val runsAfter = List("dce")
+    val runsAfter = List("closelim")
     val runsRightAfter = None
-  } with GenJVM
+  } with DeadCodeElimination
 
   // phaseName = "jvm", ASM-based version
   object genASM extends {
@@ -664,40 +637,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val runsRightAfter = None
   } with GenASM
 
-  // This phase is optional: only added if settings.make option is given.
-  // phaseName = "dependencyAnalysis"
-  object dependencyAnalysis extends {
+  // phaseName = "bcode"
+  object genBCode extends {
     val global: Global.this.type = Global.this
-    val runsAfter = List("jvm")
+    val runsAfter = List("dce")
     val runsRightAfter = None
-  } with DependencyAnalysis
+  } with GenBCode
 
   // phaseName = "terminal"
   object terminal extends {
     val global: Global.this.type = Global.this
+  } with SubComponent {
     val phaseName = "terminal"
-    val runsAfter = List("jvm", "msil")
+    val runsAfter = List("jvm")
     val runsRightAfter = None
-  } with SubComponent {
-    private var cache: Option[GlobalPhase] = None
-    def reset(): Unit = cache = None
+    override val terminal = true
 
-    def newPhase(prev: Phase): GlobalPhase =
-      cache getOrElse returning(new TerminalPhase(prev))(x => cache = Some(x))
-
-    class TerminalPhase(prev: Phase) extends GlobalPhase(prev) {
-      def name = "terminal"
+    def newPhase(prev: Phase): GlobalPhase = {
+      new TerminalPhase(prev)
+    }
+    private class TerminalPhase(prev: Phase) extends GlobalPhase(prev) {
+      def name = phaseName
       def apply(unit: CompilationUnit) {}
     }
   }
 
-  // phaseName = "SAMPLE PHASE"
-  object sampleTransform extends {
-    val global: Global.this.type = Global.this
-    val runsAfter = List[String]()
-    val runsRightAfter = None
-  } with SampleTransform
-
   /** The checkers are for validating the compiler data structures
    *  at phase boundaries.
    */
@@ -721,7 +685,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   /** Add the internal compiler phases to the phases set.
    *  This implementation creates a description map at the same time.
    */
-  protected def computeInternalPhases() {
+  protected def computeInternalPhases(): Unit = {
     // Note: this fits -Xshow-phases into 80 column width, which it is
     // desirable to preserve.
     val phs = List(
@@ -737,20 +701,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       uncurry                 -> "uncurry, translate function values to anonymous classes",
       tailCalls               -> "replace tail calls by jumps",
       specializeTypes         -> "@specialized-driven class and method specialization",
-      explicitOuter           -> "this refs to outer pointers, translate patterns",
+      explicitOuter           -> "this refs to outer pointers",
       erasure                 -> "erase types, add interfaces for traits",
       postErasure             -> "clean up erased inline classes",
       lazyVals                -> "allocate bitmaps, translate lazy vals into lazified defs",
       lambdaLift              -> "move nested functions to top level",
       constructors            -> "move field definitions into constructors",
       mixer                   -> "mixin composition",
+      delambdafy              -> "remove lambdas",
       cleanup                 -> "platform-specific cleanups, generate reflective calls",
       genicode                -> "generate portable intermediate code",
       inliner                 -> "optimization: do inlining",
       inlineExceptionHandlers -> "optimization: inline exception handlers",
       closureElimination      -> "optimization: eliminate uncalled closures",
+      constantOptimization    -> "optimization: optimize null and other constants",
       deadCode                -> "optimization: eliminate dead code",
-      terminal                -> "The last phase in the compiler chain"
+      terminal                -> "the last phase during a compilation run"
     )
 
     phs foreach (addToPhasesSet _).tupled
@@ -768,13 +734,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
   // sequences the phase assembly
   protected def computePhaseDescriptors: List[SubComponent] = {
-    computeInternalPhases()       // Global.scala
-    computePlatformPhases()       // backend/Platform.scala
-    computePluginPhases()         // plugins/Plugins.scala
-    buildCompilerFromPhasesSet()  // PhaseAssembly.scala
+    /** Allow phases to opt out of the phase assembly. */
+    def cullPhases(phases: List[SubComponent]) = {
+      val enabled = if (settings.debug && settings.isInfo) phases else phases filter (_.enabled)
+      def isEnabled(q: String) = enabled exists (_.phaseName == q)
+      val (satisfied, unhappy) = enabled partition (_.requires forall isEnabled)
+      unhappy foreach (u => globalError(s"Phase '${u.phaseName}' requires: ${u.requires filterNot isEnabled}"))
+      satisfied   // they're happy now, but they may need an unhappy phase that was booted
+    }
+    computeInternalPhases()             // Global.scala
+    computePlatformPhases()             // backend/Platform.scala
+    computePluginPhases()               // plugins/Plugins.scala
+    cullPhases(computePhaseAssembly())  // PhaseAssembly.scala
   }
 
-  /* The phase descriptor list */
+  /* The phase descriptor list. Components that are phase factories. */
   lazy val phaseDescriptors: List[SubComponent] = computePhaseDescriptors
 
   /* The set of phase objects that is the basis for the compiler phase chain */
@@ -792,86 +766,91 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     phaseDescriptors map (_.phaseName)
   }
 
-  /** A description of the phases that will run */
-  def phaseDescriptions: String = {
-    val width = phaseNames map (_.length) max
-    val fmt   = "%" + width + "s  %2s  %s\n"
+  /** A description of the phases that will run in this configuration, or all if -Ydebug. */
+  def phaseDescriptions: String = phaseHelp("description", elliptically = true, phasesDescMap)
 
-    val line1 = fmt.format("phase name", "id", "description")
-    val line2 = fmt.format("----------", "--", "-----------")
-    val descs = phaseDescriptors.zipWithIndex map {
-      case (ph, idx) => fmt.format(ph.phaseName, idx + 1, phasesDescMap(ph))
+  /** Summary of the per-phase values of nextFlags and newFlags, shown under -Xshow-phases -Ydebug. */
+  def phaseFlagDescriptions: String = {
+    def fmt(ph: SubComponent) = {
+      def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags)
+      def fstr2 = if (ph.phaseNextFlags == 0L) "" else "[END] " + Flags.flagsToString(ph.phaseNextFlags)
+      if (ph.initial) Flags.flagsToString(Flags.InitialFlags)
+      else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
+      else fstr1 + fstr2
     }
-    line1 :: line2 :: descs mkString
+    phaseHelp("new flags", elliptically = false, fmt)
   }
-  /** Summary of the per-phase values of nextFlags and newFlags, shown
-   *  with -Xshow-phases if -Ydebug also given.
+
+  /** Emit a verbose phase table.
+   *  The table includes the phase id in the current assembly,
+   *  or "oo" to indicate a skipped phase, or "xx" to indicate
+   *  a disabled phase.
+   *
+   *  @param title descriptive header
+   *  @param elliptically whether to truncate the description with an ellipsis (...)
+   *  @param describe how to describe a component
    */
-  def phaseFlagDescriptions: String = {
-    val width = phaseNames map (_.length) max
-    val fmt   = "%" + width + "s  %2s  %s\n"
-
-    val line1 = fmt.format("phase name", "id", "new flags")
-    val line2 = fmt.format("----------", "--", "---------")
-    val descs = phaseDescriptors.zipWithIndex map {
-      case (ph, idx) =>
-        def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags)
-        def fstr2 = if (ph.phaseNextFlags == 0L) "" else "[END] " + Flags.flagsToString(ph.phaseNextFlags)
-        val fstr = (
-          if (ph.ownPhase.id == 1) Flags.flagsToString(Flags.InitialFlags)
-          else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
-          else fstr1 + fstr2
-        )
-        fmt.format(ph.phaseName, idx + 1, fstr)
+  def phaseHelp(title: String, elliptically: Boolean, describe: SubComponent => String) = {
+    val Limit   = 16    // phase names should not be absurdly long
+    val MaxCol  = 80    // because some of us edit on green screens
+    val maxName = phaseNames map (_.length) max
+    val width   = maxName min Limit
+    val maxDesc = MaxCol - (width + 6)  // descriptions not novels
+    val fmt     = if (settings.verbose || !elliptically) s"%${maxName}s  %2s  %s%n"
+                  else s"%${width}.${width}s  %2s  %.${maxDesc}s%n"
+
+    val line1 = fmt.format("phase name", "id", title)
+    val line2 = fmt.format("----------", "--", "-" * title.length)
+
+    // built-in string precision merely truncates
+    import java.util.{ Formattable, FormattableFlags, Formatter }
+    def dotfmt(s: String) = new Formattable {
+      def elliptically(s: String, max: Int) = (
+        if (max < 0 || s.length <= max) s
+        else if (max < 4) s.take(max)
+        else s.take(max - 3) + "..."
+      )
+      override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) {
+        val p = elliptically(s, precision)
+        val w = if (width > 0 && p.length < width) {
+          import FormattableFlags.LEFT_JUSTIFY
+          val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY
+          val sb = new StringBuilder
+          def pad() = 1 to width - p.length foreach (_ => sb.append(' '))
+          if (!leftly) pad()
+          sb.append(p)
+          if (leftly) pad()
+          sb.toString
+        } else p
+        formatter.out.append(w)
+      }
     }
-    line1 :: line2 :: descs mkString
+
+    // phase id in run, or suitable icon
+    def idOf(p: SubComponent) = (
+      if (settings.skip contains p.phaseName) "oo"   // (currentRun skipPhase p.phaseName)
+      else if (!p.enabled) "xx"
+      else p.ownPhase.id.toString
+    )
+    def mkText(p: SubComponent) = {
+      val (name, text) = if (elliptically) (dotfmt(p.phaseName), dotfmt(describe(p)))
+                         else (p.phaseName, describe(p))
+      fmt.format(name, idOf(p), text)
+    }
+    line1 :: line2 :: (phaseDescriptors map mkText) mkString
   }
 
   /** Returns List of (phase, value) pairs, including only those
    *  where the value compares unequal to the previous phase's value.
    */
-  def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+  def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests
     phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
-      val value = afterPhase(ph)(op)
+      val value = exitingPhase(ph)(op)
       if (res.nonEmpty && res.head._2 == value) res
       else ((ph, value)) :: res
     } reverse
   }
 
-  /** Returns List of ChangeAfterPhase objects, encapsulating those
-   *  phase transitions where the result of the operation gave a different
-   *  list than it had when run during the previous phase.
-   */
-  def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
-    val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
-
-    ops sliding 2 map {
-      case (_, before) :: (ph, after) :: Nil =>
-        val lost   = before filterNot (after contains _)
-        val gained = after filterNot (before contains _)
-        ChangeAfterPhase(ph, lost, gained)
-      case _ => ???
-    } toList
-  }
-  private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
-
-  case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
-    private def mkStr(what: String, xs: List[_]) = (
-      if (xs.isEmpty) ""
-      else xs.mkString(what + " after " + numberedPhase(ph) + " {\n  ", "\n  ", "\n}\n")
-    )
-    override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
-  }
-
-  def describeAfterEachPhase[T](op: => T): List[String] =
-    afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
-
-  def describeAfterEveryPhase[T](op: => T): String =
-    describeAfterEachPhase(op) map ("  " + _ + "\n") mkString
-
-  def printAfterEachPhase[T](op: => T): Unit =
-    describeAfterEachPhase(op) foreach (m => println("  " + m))
-
   // ------------ Invalidations ---------------------------------
 
   /** Is given package class a system package class that cannot be invalidated?
@@ -885,8 +864,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
   /** Invalidates packages that contain classes defined in a classpath entry, and
    *  rescans that entry.
-   *  @param path  A fully qualified name that refers to a directory or jar file that's
-   *               an entry on the classpath.
+   *  @param paths  Fully qualified names that refer to directories or jar files that are
+   *                a entries on the classpath.
    *  First, causes the classpath entry referred to by `path` to be rescanned, so that
    *  any new files or deleted files or changes in subpackages are picked up.
    *  Second, invalidates any packages for which one of the following considitions is met:
@@ -980,7 +959,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
              invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
     ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
 
-    val getName: ClassPath[platform.BinaryRepr] => String = (_.name)
+    val getName: ClassPath[AbstractFile] => String = (_.name)
     def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
     def invalidateOrRemove(root: ClassSymbol) = {
       allEntries match {
@@ -1064,17 +1043,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
      * Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`.
      */
 
-  /** Remove the current run when not needed anymore. Used by the build
-   *  manager to save on the memory foot print. The current run holds on
-   *  to all compilation units, which in turn hold on to trees.
-   */
-  private [nsc] def dropRun() {
-    curRun = null
-  }
-
   object typeDeconstruct extends {
     val global: Global.this.type = Global.this
-  } with interpreter.StructuredTypeStrings
+  } with typechecker.StructuredTypeStrings
 
   /** There are common error conditions where when the exception hits
    *  here, currentRun.currentUnit is null.  This robs us of the knowledge
@@ -1093,102 +1064,101 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   def currentRun: Run              = curRun
   def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
   def currentSource: SourceFile    = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+  def currentFreshNameCreator      = currentUnit.fresh
 
-  // TODO - trim these to the absolute minimum.
-  @inline final def afterErasure[T](op: => T): T        = afterPhase(currentRun.erasurePhase)(op)
-  @inline final def afterPostErasure[T](op: => T): T    = afterPhase(currentRun.posterasurePhase)(op)
-  @inline final def afterExplicitOuter[T](op: => T): T  = afterPhase(currentRun.explicitouterPhase)(op)
-  @inline final def afterFlatten[T](op: => T): T        = afterPhase(currentRun.flattenPhase)(op)
-  @inline final def afterIcode[T](op: => T): T          = afterPhase(currentRun.icodePhase)(op)
-  @inline final def afterMixin[T](op: => T): T          = afterPhase(currentRun.mixinPhase)(op)
-  @inline final def afterPickler[T](op: => T): T        = afterPhase(currentRun.picklerPhase)(op)
-  @inline final def afterRefchecks[T](op: => T): T      = afterPhase(currentRun.refchecksPhase)(op)
-  @inline final def afterSpecialize[T](op: => T): T     = afterPhase(currentRun.specializePhase)(op)
-  @inline final def afterTyper[T](op: => T): T          = afterPhase(currentRun.typerPhase)(op)
-  @inline final def afterUncurry[T](op: => T): T        = afterPhase(currentRun.uncurryPhase)(op)
-  @inline final def beforeErasure[T](op: => T): T       = beforePhase(currentRun.erasurePhase)(op)
-  @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
-  @inline final def beforeFlatten[T](op: => T): T       = beforePhase(currentRun.flattenPhase)(op)
-  @inline final def beforeIcode[T](op: => T): T         = beforePhase(currentRun.icodePhase)(op)
-  @inline final def beforeMixin[T](op: => T): T         = beforePhase(currentRun.mixinPhase)(op)
-  @inline final def beforePickler[T](op: => T): T       = beforePhase(currentRun.picklerPhase)(op)
-  @inline final def beforeRefchecks[T](op: => T): T     = beforePhase(currentRun.refchecksPhase)(op)
-  @inline final def beforeSpecialize[T](op: => T): T    = beforePhase(currentRun.specializePhase)(op)
-  @inline final def beforeTyper[T](op: => T): T         = beforePhase(currentRun.typerPhase)(op)
-  @inline final def beforeUncurry[T](op: => T): T       = beforePhase(currentRun.uncurryPhase)(op)
-
-  def explainContext(c: analyzer.Context): String = (
-    if (c == null) "" else (
-     """| context owners: %s
-        |
-        |Enclosing block or template:
-        |%s""".format(
-          c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
-          nodePrinters.nodeToString(c.enclClassOrMethod.tree)
-        )
-    )
+  def isGlobalInitialized = (
+       definitions.isDefinitionsInitialized
+    && rootMirror.isMirrorInitialized
   )
-  // Owners up to and including the first package class.
+  override def isPastTyper = (
+       (curRun ne null)
+    && isGlobalInitialized // defense against init order issues
+    && (globalPhase.id > currentRun.typerPhase.id)
+  )
+
+  // TODO - trim these to the absolute minimum.
+  @inline final def exitingErasure[T](op: => T): T        = exitingPhase(currentRun.erasurePhase)(op)
+  @inline final def exitingPostErasure[T](op: => T): T    = exitingPhase(currentRun.posterasurePhase)(op)
+  @inline final def exitingExplicitOuter[T](op: => T): T  = exitingPhase(currentRun.explicitouterPhase)(op)
+  @inline final def exitingFlatten[T](op: => T): T        = exitingPhase(currentRun.flattenPhase)(op)
+  @inline final def exitingMixin[T](op: => T): T          = exitingPhase(currentRun.mixinPhase)(op)
+  @inline final def exitingDelambdafy[T](op: => T): T     = exitingPhase(currentRun.delambdafyPhase)(op)
+  @inline final def exitingPickler[T](op: => T): T        = exitingPhase(currentRun.picklerPhase)(op)
+  @inline final def exitingRefchecks[T](op: => T): T      = exitingPhase(currentRun.refchecksPhase)(op)
+  @inline final def exitingSpecialize[T](op: => T): T     = exitingPhase(currentRun.specializePhase)(op)
+  @inline final def exitingTyper[T](op: => T): T          = exitingPhase(currentRun.typerPhase)(op)
+  @inline final def exitingUncurry[T](op: => T): T        = exitingPhase(currentRun.uncurryPhase)(op)
+  @inline final def enteringErasure[T](op: => T): T       = enteringPhase(currentRun.erasurePhase)(op)
+  @inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op)
+  @inline final def enteringFlatten[T](op: => T): T       = enteringPhase(currentRun.flattenPhase)(op)
+  @inline final def enteringIcode[T](op: => T): T         = enteringPhase(currentRun.icodePhase)(op)
+  @inline final def enteringMixin[T](op: => T): T         = enteringPhase(currentRun.mixinPhase)(op)
+  @inline final def enteringDelambdafy[T](op: => T): T    = enteringPhase(currentRun.delambdafyPhase)(op)
+  @inline final def enteringPickler[T](op: => T): T       = enteringPhase(currentRun.picklerPhase)(op)
+  @inline final def enteringSpecialize[T](op: => T): T    = enteringPhase(currentRun.specializePhase)(op)
+  @inline final def enteringTyper[T](op: => T): T         = enteringPhase(currentRun.typerPhase)(op)
+  @inline final def enteringUncurry[T](op: => T): T       = enteringPhase(currentRun.uncurryPhase)(op)
+
+  // Owners which aren't package classes.
   private def ownerChainString(sym: Symbol): String = (
     if (sym == null) ""
-    else sym.ownerChain.span(!_.isPackageClass) match {
-      case (xs, pkg :: _) => (xs :+ pkg) mkString " -> "
-      case _              => sym.ownerChain mkString " -> " // unlikely
-    }
+    else sym.ownerChain takeWhile (!_.isPackageClass) mkString " -> "
   )
+
   private def formatExplain(pairs: (String, Any)*): String = (
     pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
   )
 
-  def explainTree(t: Tree): String = formatExplain(
-  )
-
   /** Don't want to introduce new errors trying to report errors,
    *  so swallow exceptions.
    */
-  override def supplementErrorMessage(errorMessage: String): String =
+  override def supplementErrorMessage(errorMessage: String): String = {
     if (currentRun.supplementedError) errorMessage
     else try {
+      currentRun.supplementedError = true
       val tree      = analyzer.lastTreeToTyper
       val sym       = tree.symbol
       val tpe       = tree.tpe
-      val enclosing = lastSeenContext.enclClassOrMethod.tree
+      val site      = lastSeenContext.enclClassOrMethod.owner
+      val pos_s     = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
+      val context_s = try {
+        // Taking 3 before, 3 after the fingered line.
+        val start = 0 max (tree.pos.line - 3)
+        val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
+        val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
+        strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
+      }
+      catch { case t: Exception => devWarning("" + t) ; "<Cannot read source file>" }
 
       val info1 = formatExplain(
         "while compiling"    -> currentSource.path,
-        "during phase"       -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+        "during phase"       -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ),
         "library version"    -> scala.util.Properties.versionString,
         "compiler version"   -> Properties.versionString,
         "reconstructed args" -> settings.recreateArgs.mkString(" ")
       )
       val info2 = formatExplain(
         "last tree to typer" -> tree.summaryString,
+        "tree position"      -> pos_s,
+        "tree tpe"           -> tpe,
         "symbol"             -> Option(sym).fold("null")(_.debugLocationString),
-        "symbol definition"  -> Option(sym).fold("null")(_.defString),
-        "tpe"                -> tpe,
+        "symbol definition"  -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"),
+        "symbol package"     -> sym.enclosingPackage.fullName,
         "symbol owners"      -> ownerChainString(sym),
-        "context owners"     -> ownerChainString(lastSeenContext.owner)
+        "call site"          -> (site.fullLocationString + " in " + site.enclosingPackage)
       )
-      val info3: List[String] = (
-           ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
-        ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
-        ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
-        ++ ( List(errorMessage) )
-      )
-
-      currentRun.supplementedError = true
-
-      ("\n" + info1) :: info2 :: info3 mkString "\n\n"
+      ("\n  " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
     }
     catch { case _: Exception | _: TypeError => errorMessage }
+  }
 
   /** The id of the currently active run
    */
   override def currentRunId = curRunId
 
   def echoPhaseSummary(ph: Phase) = {
-    /** Only output a summary message under debug if we aren't echoing each file. */
-    if (opt.debug && !opt.echoFilenames)
+    /* Only output a summary message under debug if we aren't echoing each file. */
+    if (settings.debug && !(settings.verbose || currentRun.size < 5))
       inform("[running phase " + ph.name + " on " + currentRun.size +  " compilation units]")
   }
 
@@ -1196,19 +1166,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
     val warnings = mutable.LinkedHashMap[Position, String]()
     def warn(pos: Position, msg: String) =
-      if (option.value) reporter.warning(pos, msg)
+      if (option) reporter.warning(pos, msg)
       else if (!(warnings contains pos)) warnings += ((pos, msg))
     def summarize() =
-      if (option.isDefault && warnings.nonEmpty)
-        reporter.warning(NoPosition, "there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
+      if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings))
+        warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
   }
 
-  def newUnitParser(code: String)      = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
-  def newUnitScanner(code: String)     = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
-  def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
-  def newSourceFile(code: String)      = new BatchSourceFile("<console>", code)
+  def newSourceFile(code: String, filename: String = "<console>") =
+    new BatchSourceFile(filename, code)
+
+  def newCompilationUnit(code: String, filename: String = "<console>") =
+    new CompilationUnit(newSourceFile(code, filename))
 
-  /** A Run is a single execution of the compiler on a sets of units
+  def newUnitScanner(unit: CompilationUnit): UnitScanner =
+    new UnitScanner(unit)
+
+  def newUnitParser(unit: CompilationUnit): UnitParser =
+    new UnitParser(unit)
+
+  def newUnitParser(code: String, filename: String = "<console>"): UnitParser =
+    newUnitParser(newCompilationUnit(code, filename))
+
+  /** A Run is a single execution of the compiler on a set of units.
    */
   class Run extends RunContextApi {
     /** Have been running into too many init order issues with Run
@@ -1227,9 +1207,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
     val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
 
-    // for sbt's benefit
-    def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
-    def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+    def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt
+    def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt
 
     var reportedFeature = Set[Symbol]()
 
@@ -1239,10 +1218,26 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     /** Have we already supplemented the error message of a compiler crash? */
     private[nsc] final var supplementedError = false
 
-    /** To be initialized from firstPhase. */
-    private var terminalPhase: Phase = NoPhase
+    private class SyncedCompilationBuffer { self =>
+      private val underlying = new mutable.ArrayBuffer[CompilationUnit]
+      def size = synchronized { underlying.size }
+      def +=(cu: CompilationUnit): this.type = { synchronized { underlying += cu }; this }
+      def head: CompilationUnit = synchronized{ underlying.head }
+      def apply(i: Int): CompilationUnit = synchronized { underlying(i) }
+      def iterator: Iterator[CompilationUnit] = new collection.AbstractIterator[CompilationUnit] {
+        private var used = 0
+        def hasNext = self.synchronized{ used < underlying.size }
+        def next = self.synchronized {
+          if (!hasNext) throw new NoSuchElementException("next on empty Iterator")
+          used += 1
+          underlying(used-1)
+        }
+      }
+      def toList: List[CompilationUnit] = synchronized{ underlying.toList }
+    }
+
+    private val unitbuf = new SyncedCompilationBuffer
 
-    private val unitbuf = new mutable.ListBuffer[CompilationUnit]
     val compiledFiles   = new mutable.HashSet[String]
 
     /** A map from compiled top-level symbols to their source files */
@@ -1251,64 +1246,99 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     /** A map from compiled top-level symbols to their picklers */
     val symData = new mutable.HashMap[Symbol, PickleBuffer]
 
-    private var phasec: Int       = 0   // phases completed
-    private var unitc: Int        = 0   // units completed this phase
-    private var _unitbufSize = 0
+    private var phasec: Int  = 0   // phases completed
+    private var unitc: Int   = 0   // units completed this phase
 
-    def size = _unitbufSize
+    def size = unitbuf.size
     override def toString = "scalac Run for:\n  " + compiledFiles.toList.sorted.mkString("\n  ")
 
     // Calculate where to stop based on settings -Ystop-before or -Ystop-after.
-    // Slightly complicated logic due to wanting -Ystop-before:parser to fail rather
-    // than mysteriously running to completion.
+    // The result is the phase to stop at BEFORE running it.
     private lazy val stopPhaseSetting = {
-      val result = phaseDescriptors sliding 2 collectFirst {
-        case xs if xs exists (settings.stopBefore contains _.phaseName) => if (settings.stopBefore contains xs.head.phaseName) xs.head else xs.last
-        case xs if settings.stopAfter contains xs.head.phaseName        => xs.last
+      def isBefore(pd: SubComponent) = settings.stopBefore contains pd.phaseName
+      phaseDescriptors sliding 2 collectFirst {
+        case xs if xs exists isBefore
+                => (xs find isBefore).get
+        case xs if settings.stopAfter contains xs.head.phaseName
+                => xs.last
       }
-      if (result exists (_.phaseName == "parser"))
-        globalError("Cannot stop before parser phase.")
-
-      result
     }
-    // The phase to stop BEFORE running.
+    /** Should we stop right before entering the given phase? */
     protected def stopPhase(name: String) = stopPhaseSetting exists (_.phaseName == name)
+    /** Should we skip the given phase? */
     protected def skipPhase(name: String) = settings.skip contains name
 
-    /** As definitions.init requires phase != NoPhase, and calling phaseDescriptors.head
-     *  will force init, there is some jockeying herein regarding init order: instead of
-     *  taking the head descriptor we create a parser phase directly.
-     */
     private val firstPhase = {
-      /** Initialization. */
+      // Initialization.  definitions.init requires phase != NoPhase
+      import scala.reflect.internal.SomePhase
       curRunId += 1
       curRun = this
-
-      /** Set phase to a newly created syntaxAnalyzer and call definitions.init. */
-      val parserPhase: Phase = syntaxAnalyzer.newPhase(NoPhase)
-      phase = parserPhase
+      phase = SomePhase
+      phaseWithId(phase.id) = phase
       definitions.init()
 
-      // Flush the cache in the terminal phase: the chain could have been built
-      // before without being used. (This happens in the interpreter.)
-      terminal.reset
-
-      // Each subcomponent supplies a phase, which are chained together.
-      //   If -Ystop:phase is given, neither that phase nor any beyond it is added.
-      //   If -Yskip:phase is given, that phase will be skipped.
-      val phaseLinks = {
-        val phs = (
-          phaseDescriptors.tail
-            takeWhile (pd => !stopPhase(pd.phaseName))
-            filterNot (pd =>  skipPhase(pd.phaseName))
-        )
+      // the components to use, omitting those named by -Yskip and stopping at the -Ystop phase
+      val components = {
+        // stop on a dime, but this test fails if pd is after the stop phase
+        def unstoppable(pd: SubComponent) = {
+          val stoppable = stopPhase(pd.phaseName)
+          if (stoppable && pd.initial) {
+            globalError(s"Cannot stop before initial phase '${pd.phaseName}'.")
+            true
+          } else
+            !stoppable
+        }
+        // skip a component for -Yskip or if not enabled
+        def skippable(pd: SubComponent) = {
+          val skippable = skipPhase(pd.phaseName)
+          if (skippable && (pd.initial || pd.terminal)) {
+            globalError(s"Cannot skip an initial or terminal phase '${pd.phaseName}'.")
+            false
+          } else
+            skippable || !pd.enabled
+        }
+        val phs = phaseDescriptors takeWhile unstoppable filterNot skippable
         // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
-        if (phs.isEmpty || (phs.last ne terminal)) phs :+ terminal
-        else phs
+        if (phs.isEmpty || !phs.last.terminal) {
+          val t = if (phaseDescriptors.last.terminal) phaseDescriptors.last else terminal
+          phs :+ t
+        } else phs
       }
-      // Link them together.
-      phaseLinks.foldLeft(parserPhase)((chain, ph) => ph newPhase chain)
-      parserPhase
+      // Create phases and link them together. We supply the previous, and the ctor sets prev.next.
+      val last  = components.foldLeft(NoPhase: Phase)((prev, c) => c newPhase prev)
+      // rewind (Iterator.iterate(last)(_.prev) dropWhile (_.prev ne NoPhase)).next
+      val first = { var p = last ; while (p.prev ne NoPhase) p = p.prev ; p }
+      val ss    = settings
+
+      // As a final courtesy, see if the settings make any sense at all.
+      // If a setting selects no phase, it's a mistake. If a name prefix
+      // doesn't select a unique phase, that might be surprising too.
+      def checkPhaseSettings(including: Boolean, specs: Seq[String]*) = {
+        def isRange(s: String) = s.forall(c => c.isDigit || c == '-')
+        def isSpecial(s: String) = (s == "all" || isRange(s))
+        val setting = new ss.PhasesSetting("fake","fake")
+        for (p <- specs.flatten.to[Set]) {
+          setting.value = List(p)
+          val count = (
+            if (including) first.iterator count (setting containsPhase _)
+            else phaseDescriptors count (setting contains _.phaseName)
+          )
+          if (count == 0) warning(s"'$p' specifies no phase")
+          if (count > 1 && !isSpecial(p)) warning(s"'$p' selects $count phases")
+          if (!including && isSpecial(p)) globalError(s"-Yskip and -Ystop values must name phases: '$p'")
+          setting.clear()
+        }
+      }
+      // phases that are excluded; for historical reasons, these settings only select by phase name
+      val exclusions = List(ss.stopBefore, ss.stopAfter, ss.skip)
+      val inclusions = ss.visibleSettings collect {
+        case s: ss.PhasesSetting if !(exclusions contains s) => s.value
+      }
+      checkPhaseSettings(including = true, inclusions.toSeq: _*)
+      checkPhaseSettings(including = false, exclusions map (_.value): _*)
+
+      phase = first   //parserPhase
+      first
     }
 
     /** Reset all classes contained in current project, as determined by
@@ -1318,11 +1348,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     def resetProjectClasses(root: Symbol): Unit = try {
       def unlink(sym: Symbol) =
         if (sym != NoSymbol) root.info.decls.unlink(sym)
-      if (settings.verbose.value) inform("[reset] recursing in "+root)
+      if (settings.verbose) inform("[reset] recursing in "+root)
       val toReload = mutable.Set[String]()
       for (sym <- root.info.decls) {
         if (sym.isInitialized && clearOnNextRun(sym))
-          if (sym.isPackage) {
+          if (sym.hasPackageFlag) {
             resetProjectClasses(sym.moduleClass)
             openPackageModule(sym.moduleClass)
           } else {
@@ -1338,7 +1368,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       for (fullname <- toReload)
         classPath.findClass(fullname) match {
           case Some(classRep) =>
-            if (settings.verbose.value) inform("[reset] reinit "+fullname)
+            if (settings.verbose) inform("[reset] reinit "+fullname)
             loaders.initializeFromClassPath(root, classRep)
           case _ =>
         }
@@ -1347,8 +1377,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
         // this handler should not be nessasary, but it seems that `fsc`
         // eats exceptions if they appear here. Need to find out the cause for
         // this and fix it.
-        inform("[reset] exception happened: "+ex);
-        ex.printStackTrace();
+        inform("[reset] exception happened: "+ex)
+        ex.printStackTrace()
         throw ex
     }
 
@@ -1374,14 +1404,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     def advancePhase() {
       unitc = 0
       phasec += 1
-      refreshProgress
+      refreshProgress()
     }
     /** take note that a phase on a unit is completed
      *  (for progress reporting)
      */
     def advanceUnit() {
       unitc += 1
-      refreshProgress
+      refreshProgress()
     }
 
     def cancel() { reporter.cancelled = true }
@@ -1402,7 +1432,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val namerPhase                   = phaseNamed("namer")
     // val packageobjectsPhase          = phaseNamed("packageobjects")
     val typerPhase                   = phaseNamed("typer")
-    val inlineclassesPhase           = phaseNamed("inlineclasses")
+    // val inlineclassesPhase           = phaseNamed("inlineclasses")
     // val superaccessorsPhase          = phaseNamed("superaccessors")
     val picklerPhase                 = phaseNamed("pickler")
     val refchecksPhase               = phaseNamed("refchecks")
@@ -1415,22 +1445,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     val erasurePhase                 = phaseNamed("erasure")
     val posterasurePhase             = phaseNamed("posterasure")
     // val lazyvalsPhase                = phaseNamed("lazyvals")
-    val lambdaliftPhase              = phaseNamed("lambdalift")
+    // val lambdaliftPhase              = phaseNamed("lambdalift")
     // val constructorsPhase            = phaseNamed("constructors")
     val flattenPhase                 = phaseNamed("flatten")
     val mixinPhase                   = phaseNamed("mixin")
+    val delambdafyPhase              = phaseNamed("delambdafy")
     val cleanupPhase                 = phaseNamed("cleanup")
     val icodePhase                   = phaseNamed("icode")
     val inlinerPhase                 = phaseNamed("inliner")
-    val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+    val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers")
     val closelimPhase                = phaseNamed("closelim")
     val dcePhase                     = phaseNamed("dce")
-    val jvmPhase                     = phaseNamed("jvm")
-    // val msilPhase                    = phaseNamed("msil")
+    // val jvmPhase                     = phaseNamed("jvm")
 
     def runIsAt(ph: Phase)   = globalPhase.id == ph.id
-    def runIsPast(ph: Phase) = globalPhase.id > ph.id
-    // def runIsAtBytecodeGen   = (runIsAt(jvmPhase) || runIsAt(msilPhase))
     def runIsAtOptimiz       = {
       runIsAt(inlinerPhase)                 || // listing phases in full for robustness when -Ystop-after has been given.
       runIsAt(inlineExceptionHandlersPhase) ||
@@ -1446,7 +1474,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     /** add unit to be compiled in this run */
     private def addUnit(unit: CompilationUnit) {
       unitbuf += unit
-      _unitbufSize += 1 // counting as they're added so size is cheap
       compiledFiles += unit.source.file.path
     }
     private def checkDeprecatedSettings(unit: CompilationUnit) {
@@ -1462,18 +1489,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     /* !!! Note: changing this to unitbuf.toList.iterator breaks a bunch
        of tests in tests/res.  This is bad, it means the resident compiler
        relies on an iterator of a mutable data structure reflecting changes
-       made to the underlying structure (in whatever accidental way it is
-       currently depending upon.)
+       made to the underlying structure.
      */
     def units: Iterator[CompilationUnit] = unitbuf.iterator
 
     def registerPickle(sym: Symbol): Unit = ()
 
     /** does this run compile given class, module, or case factory? */
+    // NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
+    //       Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+    //       *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
     def compiles(sym: Symbol): Boolean =
       if (sym == NoSymbol) false
       else if (symSource.isDefinedAt(sym)) true
-      else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
+      else if (sym.isTopLevel && sym.isEarlyInitialized) true
+      else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
       else if (sym.isModuleClass) compiles(sym.sourceModule)
       else false
 
@@ -1494,13 +1524,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
 
       if (canCheck) {
         phase = globalPhase
-        if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
-        else treeChecker.checkTrees
+        if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes()
+        else treeChecker.checkTrees()
       }
     }
 
-    private def showMembers() =
-      opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
+    private def showMembers() = {
+      // Allows for syntax like scalac -Xshow-class Random at erasure,typer
+      def splitClassAndPhase(str: String, term: Boolean): Name = {
+        def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
+        (str indexOf '@') match {
+          case -1   => mkName(str)
+          case idx  =>
+            val phasePart = str drop (idx + 1)
+            settings.Yshow.tryToSetColon(phasePart split ',' toList)
+            mkName(str take idx)
+        }
+      }
+      if (settings.Xshowcls.isSetByUser)
+        showDef(splitClassAndPhase(settings.Xshowcls.value, term = false), declsOnly = false, globalPhase)
+
+      if (settings.Xshowobj.isSetByUser)
+        showDef(splitClassAndPhase(settings.Xshowobj.value, term = true), declsOnly = false, globalPhase)
+    }
 
     // Similarly, this will only be created under -Yshow-syms.
     object trackerFactory extends SymbolTrackers {
@@ -1508,7 +1554,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
       def snapshot() = {
         inform("\n[[symbol layout at end of " + phase + "]]")
-        afterPhase(phase) {
+        exitingPhase(phase) {
           trackers foreach { t =>
             t.snapshot()
             inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1518,6 +1564,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     }
 
     def reportCompileErrors() {
+      if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings)
+        globalError("No warnings can be incurred under -Xfatal-warnings.")
+
       if (reporter.hasErrors) {
         for ((sym, file) <- symSource.iterator) {
           sym.reset(new loaders.SourcefileLoader(file))
@@ -1526,7 +1575,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
         }
       }
       else {
-        allConditionalWarnings foreach (_.summarize)
+        allConditionalWarnings foreach (_.summarize())
 
         if (seenMacroExpansionsFallingBack)
           warning("some macros could not be expanded and code fell back to overridden methods;"+
@@ -1535,38 +1584,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       }
     }
 
-    /** Compile list of source files */
-    def compileSources(_sources: List[SourceFile]) {
-      val depSources = dependencyAnalysis calculateFiles _sources.distinct
-      val sources    = coreClassesFirst(depSources)
-      // there is a problem already, e.g. a plugin was passed a bad option
-      if (reporter.hasErrors)
-        return
+    /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */
+    val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions
 
-      // nothing to compile, but we should still report use of deprecated options
-      if (sources.isEmpty) {
+    /** Compile list of source files,
+     *  unless there is a problem already,
+     *  such as a plugin was passed a bad option.
+     */
+    def compileSources(sources: List[SourceFile]) = if (!reporter.hasErrors) {
+
+      def checkDeprecations() = {
         checkDeprecatedSettings(newCompilationUnit(""))
         reportCompileErrors()
-        return
       }
 
-      compileUnits(sources map (new CompilationUnit(_)), firstPhase)
-    }
+      val units = sources map scripted map (new CompilationUnit(_))
 
-    def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
-      try compileUnitsInternal(units, fromPhase)
-      catch { case ex: Throwable =>
-        val shown = if (settings.verbose.value) {
-          val pw = new java.io.PrintWriter(new java.io.StringWriter)
-          ex.printStackTrace(pw)
-          pw.toString
-        } else ex.getClass.getName
-        // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
-        globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
-        throw ex
+      units match {
+        case Nil => checkDeprecations()   // nothing to compile, report deprecated options
+        case _   => compileUnits(units, firstPhase)
       }
     }
 
+    def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit =
+      compileUnitsInternal(units, fromPhase)
+
     private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
       doInvalidation()
 
@@ -1580,67 +1622,66 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
      while (globalPhase.hasNext && !reporter.hasErrors) {
         val startTime = currentTime
         phase = globalPhase
-        globalPhase.run
+        globalPhase.run()
 
         // progress update
         informTime(globalPhase.description, startTime)
-
-        if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+        val shouldWriteIcode = (
+             (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase))
+          || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz)
+        )
+        if (shouldWriteIcode) {
           // Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
           writeICode()
-        } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+        } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) {
           // print trees
-          if (opt.showTrees) nodePrinters.printAll()
+          if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll()
           else printAllUnits()
         }
 
         // print the symbols presently attached to AST nodes
-        if (opt.showSymbols)
+        if (settings.Yshowsyms)
           trackerFactory.snapshot()
 
         // print members
-        if (opt.showPhase)
+        if (settings.Yshow containsPhase globalPhase)
           showMembers()
 
         // browse trees with swing tree viewer
-        if (opt.browsePhase)
+        if (settings.browse containsPhase globalPhase)
           treeBrowser browse (phase.name, units)
 
         // move the pointer
         globalPhase = globalPhase.next
 
         // run tree/icode checkers
-        if (opt.checkPhase)
+        if (settings.check containsPhase globalPhase.prev)
           runCheckers()
 
         // output collected statistics
-        if (opt.printStats)
+        if (settings.Ystatistics)
           statistics.print(phase)
 
-        advancePhase
+        advancePhase()
       }
 
       if (traceSymbolActivity)
         units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
 
       // In case no phase was specified for -Xshow-class/object, show it now for sure.
-      if (opt.noShow)
+      if (settings.Yshow.isDefault)
         showMembers()
 
       reportCompileErrors()
       symSource.keys foreach (x => resetPackageClass(x.owner))
       informTime("total", startTime)
 
-      // record dependency data
-      if (!dependencyAnalysis.off)
-        dependencyAnalysis.saveDependencyAnalysis()
-
       // Clear any sets or maps created via perRunCaches.
       perRunCaches.clearAll()
 
       // Reset project
       if (!stopPhase("namer")) {
-        atPhase(namerPhase) {
+        enteringPhase(namerPhase) {
           resetProjectClasses(RootClass)
         }
       }
@@ -1656,7 +1697,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
     def compile(filenames: List[String]) {
       try {
         val sources: List[SourceFile] =
-          if (isScriptRun && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
+          if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
           else filenames map getSourceFile
 
         compileSources(sources)
@@ -1664,12 +1705,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       catch { case ex: IOException => globalError(ex.getMessage()) }
     }
 
+    /** If this compilation is scripted, convert the source to a script source. */
+    private def scripted(s: SourceFile) = s match {
+      case b: BatchSourceFile if settings.script.isSetByUser => ScriptSourceFile(b)
+      case _ => s
+    }
+
     /** Compile abstract file until `globalPhase`, but at least
      *  to phase "namer".
      */
     def compileLate(file: AbstractFile) {
       if (!compiledFiles(file.path))
-        compileLate(new CompilationUnit(getSourceFile(file)))
+        compileLate(new CompilationUnit(scripted(getSourceFile(file))))
     }
 
     /** Compile abstract file until `globalPhase`, but at least to phase "namer".
@@ -1680,8 +1727,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
       if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
         val maxId = math.max(globalPhase.id, typerPhase.id)
         firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
-          atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
-        refreshProgress
+          enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+        refreshProgress()
       }
     }
 
@@ -1689,56 +1736,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
      *  is needed for?)
      */
     private def resetPackageClass(pclazz: Symbol) {
-      atPhase(firstPhase) {
-        pclazz.setInfo(atPhase(typerPhase)(pclazz.info))
+      enteringPhase(firstPhase) {
+        pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
       }
       if (!pclazz.isRoot) resetPackageClass(pclazz.owner)
     }
-
-    /**
-     * Re-orders the source files to
-     *  1. This Space Intentionally Left Blank
-     *  2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
-     *  3. the rest
-     *
-     * 1 is to avoid cyclic reference errors.
-     * 2 is due to the following. When completing "Predef" (*), typedIdent is called
-     * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks whether
-     * the symbol reallyExists, which tests if the type of the symbol after running
-     * its completer is != NoType.
-     * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol
-     * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at
-     * all, because the source file is part of the files to be compiled anyway.
-     * So the "reallyExists" test will return "false".
-     * Only after the namer, the symbol has a lazy type which actually computes
-     * the info, and "reallyExists" behaves as expected.
-     * So we need to make sure that the "namer" phase is run on predef's parents
-     * before running it on predef.
-     *
-     * (*) Predef is completed early when calling "mkAttributedRef" during the
-     *   addition of "import Predef._" to sourcefiles. So this situation can't
-     *   happen for user classes.
-     *
-     */
-    private def coreClassesFirst(files: List[SourceFile]) = {
-      val goLast = 4
-      def rank(f: SourceFile) = {
-        if (f.file.container.name != "scala") goLast
-        else f.file.name match {
-          case "LowPriorityImplicits.scala"   => 2
-          case "StandardEmbeddings.scala"     => 2
-          case "EmbeddedControls.scala"       => 2
-          case "Predef.scala"                 => 3 /* Predef.scala before Any.scala, etc. */
-          case _                              => goLast
-        }
-      }
-      files sortBy rank
-    }
   } // class Run
 
   def printAllUnits() {
     print("[[syntax trees at end of %25s]]".format(phase))
-    afterPhase(phase)(currentRun.units foreach { unit =>
+    exitingPhase(phase)(currentRun.units foreach { unit =>
       nodePrinters showUnit unit
     })
   }
@@ -1747,7 +1754,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
    */
   def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
     val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
-    def phased[T](body: => T): T = afterPhase(ph)(body)
+    def phased[T](body: => T): T = exitingPhase(ph)(body)
     def boringMember(sym: Symbol) = boringOwners(sym.owner)
     def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
 
@@ -1792,10 +1799,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
   private def writeICode() {
     val printer = new icodes.TextPrinter(null, icodes.linearizer)
     icodes.classes.values.foreach((cls) => {
-      val suffix = if (cls.symbol.hasModuleFlag) "$.icode" else ".icode"
-      var file = getFile(cls.symbol, suffix)
-//      if (file.exists())
-//        file = new File(file.getParentFile(), file.getName() + "1")
+      val moduleSfx = if (cls.symbol.hasModuleFlag) "$" else ""
+      val phaseSfx  = if (settings.debug) phase else "" // only for debugging, appending the full phasename breaks windows build
+      val file      = getFile(cls.symbol, s"$moduleSfx$phaseSfx.icode")
+
       try {
         val stream = new FileOutputStream(file)
         printer.setWriter(new PrintWriter(stream, true))
@@ -1803,25 +1810,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
         informProgress("wrote " + file)
       } catch {
         case ex: IOException =>
-          if (opt.debug) ex.printStackTrace()
+          if (settings.debug) ex.printStackTrace()
         globalError("could not write file " + file)
       }
     })
   }
-  // In order to not outright break code which overrides onlyPresentation (like sbt 0.7.5.RC0)
-  // I restored and deprecated it.  That would be enough to avoid the compilation
-  // failure, but the override wouldn't accomplish anything.  So now forInteractive
-  // and forScaladoc default to onlyPresentation, which is the same as defaulting
-  // to false except in old code.  The downside is that this leaves us calling a
-  // deprecated method: but I see no simple way out, so I leave it for now.
-  def forJVM           = opt.jvm
-  override def forMSIL = opt.msil
-  def forInteractive   = onlyPresentation
-  def forScaladoc      = onlyPresentation
   def createJavadoc    = false
-
-  @deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0")
-  def onlyPresentation = false
 }
 
 object Global {
diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
new file mode 100644
index 0000000..6921548
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools
+package nsc
+
+/**
+ * Symbol loaders implementation that wires dependencies using Global.
+ */
+abstract class GlobalSymbolLoaders extends symtab.SymbolLoaders {
+  val global: Global
+  val symbolTable: global.type = global
+  val platform: symbolTable.platform.type
+  import global._
+  def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = {
+    def lookup = sym.info.member(name)
+    // if loading during initialization of `definitions` typerPhase is not yet set.
+    // in that case we simply load the member at the current phase
+    if (currentRun.typerPhase eq null)
+      lookup
+    else
+      enteringTyper { lookup }
+  }
+
+  protected def compileLate(srcfile: io.AbstractFile): Unit =
+    currentRun.compileLate(srcfile)
+}
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 7d112df..a66ee57 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -2,80 +2,26 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
+package scala.tools
+package nsc
 
-package scala.tools.nsc
-
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.msilLibPath
+import scala.language.postfixOps
 
 /** The main class for NSC, a compiler for the programming
- *  language Scala. 
+ *  language Scala.
  */
-object Main extends Driver with EvalLoop {
-
-  def resident(compiler: Global) {
-    loop { line =>
-      val args = line.split(' ').toList
-      val command = new CompilerCommand(args, new Settings(scalacError))
-      compiler.reporter.reset()
-      new compiler.Run() compile command.files
-    }
+class MainClass extends Driver with EvalLoop {
+  def resident(compiler: Global): Unit = loop { line =>
+    val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
+    compiler.reporter.reset()
+    new compiler.Run() compile command.files
   }
 
-  override def processSettingsHook(): Boolean =
-    if (settings.Yidedebug.value) {
-      settings.Xprintpos.value = true
-      settings.Yrangepos.value = true
-      val compiler = new interactive.Global(settings, reporter)
-      import compiler.{ reporter => _, _ }
-
-      val sfs = command.files map getSourceFile
-      val reloaded = new interactive.Response[Unit]
-      askReload(sfs, reloaded)
-
-      reloaded.get.right.toOption match {
-        case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
-        case None => reporter.reset() // Causes other compiler errors to be ignored
-      }
-      askShutdown
-      false
-    }
-    else if (settings.Ybuilderdebug.value != "none") {
-      def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile)
-
-      val buildManager = settings.Ybuilderdebug.value match {
-        case "simple"   => new SimpleBuildManager(settings)
-        case _          => new RefinedBuildManager(settings)
-      }
-      buildManager.addSourceFiles(fileSet(command.files))
-
-      // enter resident mode
-      loop { line =>
-        val args = line.split(' ').toList
-        val command = new CompilerCommand(args.toList, settings)
-        buildManager.update(fileSet(command.files), Set.empty)
-      }
-      false
-    }
-    else {
-      if (settings.target.value == "msil")
-        msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x))
-      true
-    }
-
-  override def newCompiler(): Global =
-    if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions
-    else Global(settings, reporter)
-
+  override def newCompiler(): Global = Global(settings, reporter)
   override def doCompile(compiler: Global) {
-    if (settings.resident.value)
-      resident(compiler)
+    if (settings.resident) resident(compiler)
     else super.doCompile(compiler)
   }
 }
+
+object Main extends MainClass { }
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
index f18ff19..03190a6 100644
--- a/src/compiler/scala/tools/nsc/MainBench.scala
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -5,28 +5,20 @@
 
 package scala.tools.nsc
 
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
 import scala.reflect.internal.util.Statistics
 
 /** The main class for NSC, a compiler for the programming
  *  language Scala.
  */
 object MainBench extends Driver with EvalLoop {
-  
+
   lazy val theCompiler = Global(settings, reporter)
-  
+
   override def newCompiler() = theCompiler
-  
+
   val NIter = 50
   val NBest = 10
-  
+
   override def main(args: Array[String]) = {
     val times = new Array[Long](NIter)
     var start = System.nanoTime()
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
deleted file mode 100644
index e4a20b4..0000000
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author  Lex Spoon
- */
-
-package scala.tools.nsc
-
-import java.net.URL
-import scala.tools.util.PathResolver
-import io.{ File }
-import util.{ ClassPath, ScalaClassLoader }
-import Properties.{ versionString, copyrightString }
-import interpreter.{ ILoop }
-import GenericRunnerCommand._
-
-object JarRunner extends CommonRunner {
-  def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
-    val jar       = new io.Jar(jarPath)
-    val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath)
-    val jarURLs   = ClassPath expandManifestPath jarPath
-    val urls      = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs
-
-    if (settings.Ylogcp.value) {
-      Console.err.println("Running jar with these URLs as the classpath:")
-      urls foreach println
-    }
-
-    runAndCatch(urls, mainClass, arguments)
-  }
-}
-
-/** An object that runs Scala code.  It has three possible
-  * sources for the code to run: pre-compiled code, a script file,
-  * or interactive entry.
-  */
-class MainGenericRunner {
-  def errorFn(ex: Throwable): Boolean = {
-    ex.printStackTrace()
-    false
-  }
-  def errorFn(str: String): Boolean = {
-    Console.err println str
-    false
-  }
-
-  def process(args: Array[String]): Boolean = {
-    val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x))
-    import command.{ settings, howToRun, thingToRun }
-    def sampleCompiler = new Global(settings)   // def so its not created unless needed
-
-    if (!command.ok)                      return errorFn("\n" + command.shortUsageMsg)
-    else if (settings.version.value)      return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
-    else if (command.shouldStopWithInfo)  return errorFn(command getInfoMessage sampleCompiler)
-
-    def isE   = !settings.execute.isDefault
-    def dashe = settings.execute.value
-
-    def isI   = !settings.loadfiles.isDefault
-    def dashi = settings.loadfiles.value
-
-    // Deadlocks on startup under -i unless we disable async.
-    if (isI)
-      settings.Yreplsync.value = true
-
-    def combinedCode  = {
-      val files   = if (isI) dashi map (file => File(file).slurp()) else Nil
-      val str     = if (isE) List(dashe) else Nil
-
-      files ++ str mkString "\n\n"
-    }
-
-    def runTarget(): Either[Throwable, Boolean] = howToRun match {
-      case AsObject =>
-        ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments)
-      case AsScript =>
-        ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
-      case AsJar    =>
-        JarRunner.runJar(settings, thingToRun, command.arguments)
-      case Error =>
-        Right(false)
-      case _  =>
-        // We start the repl when no arguments are given.
-        Right(new ILoop process settings)
-    }
-
-    /** If -e and -i were both given, we want to execute the -e code after the
-     *  -i files have been included, so they are read into strings and prepended to
-     *  the code given in -e.  The -i option is documented to only make sense
-     *  interactively so this is a pretty reasonable assumption.
-     *
-     *  This all needs a rewrite though.
-     */
-    if (isE) {
-      ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)
-    }
-    else runTarget() match {
-      case Left(ex) => errorFn(ex)
-      case Right(b) => b
-    }
-  }
-}
-
-object MainGenericRunner extends MainGenericRunner {
-  def main(args: Array[String]) {
-    if (!process(args))
-      sys.exit(1)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 50cd51d..84eb688 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 
 import scala.tools.nsc.reporters.ConsoleReporter
 
@@ -21,11 +22,11 @@ object MainTokenMetric {
     var totale = 0
     for (source <- fnames) {
       val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source)))
-      s.nextToken
+      s.nextToken()
       var i = 0
       while (s.token != EOF) {
         i += 1
-        s.nextToken
+        s.nextToken()
       }
       Console.println(i.toString + " " + source.toString())
       totale += i
@@ -42,9 +43,9 @@ object MainTokenMetric {
       tokenMetric(compiler, command.files)
     } catch {
       case ex @ FatalError(msg) =>
-        if (command.settings.debug.value)
-          ex.printStackTrace();
-      reporter.error(null, "fatal error: " + msg)
+        if (command.settings.debug)
+          ex.printStackTrace()
+        reporter.error(null, "fatal error: " + msg)
     }
   }
 
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index f512351..95264ae 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -8,15 +8,9 @@ package scala.tools.nsc
 
 import java.net.URL
 import util.ScalaClassLoader
-import java.lang.reflect.InvocationTargetException
 import util.Exceptional.unwrap
 
 trait CommonRunner {
-  /** Check whether a class with the specified name
-   *  exists on the specified class path. */
-  def classExists(urls: List[URL], objectName: String): Boolean =
-    ScalaClassLoader.classExists(urls, objectName)
-
   /** Run a given object, specified by name, using a
    *  specified classpath and argument list.
    *
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index caf6ad1..899aa93 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -26,8 +26,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
       // instead of whatever it's supposed to be doing.
       val baseDirectory = {
         val pwd = System.getenv("PWD")
-        if (pwd != null && !isWin) Directory(pwd)
-        else Directory.Current getOrElse Directory("/")
+        if (pwd == null || isWin) Directory.Current getOrElse Directory("/")
+        else Directory(pwd)
       }
       currentDir.value = baseDirectory.path
     }
@@ -39,7 +39,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
 
   override def cmdName = "fsc"
   override def usageMsg = (
-    createUsageMsg("where possible fsc", false, x => x.isStandard && settings.isFscSpecific(x.name)) +
+    createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) +
     "\n\nStandard scalac options also available:" +
     createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name))
   )
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cff3590..cfb4cd2 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -6,15 +6,12 @@
 
 package scala.tools.nsc
 
-import java.io.{ BufferedWriter, FileWriter }
 import scala.collection.mutable
 import scala.language.postfixOps
 
-/**
- * PhaseAssembly
- * Trait made to separate the constraint solving of the phase order from
- * the rest of the compiler. See SIP 00002
- *
+/** Converts an unordered morass of components into an order that
+ *  satisfies their mutual constraints.
+ *  @see SIP 00002. You have read SIP 00002?
  */
 trait PhaseAssembly {
   self: Global =>
@@ -23,18 +20,16 @@ trait PhaseAssembly {
    * Aux datastructure for solving the constraint system
    * The depency graph container with helper methods for node and edge creation
    */
-  class DependencyGraph {
+  private class DependencyGraph {
 
-    /**
-     * Simple edge with to and from refs
-     */
-    class Edge(var frm: Node, var to: Node, var hard: Boolean)
+    /** Simple edge with to and from refs */
+    case class Edge(var frm: Node, var to: Node, var hard: Boolean)
 
     /**
      * Simple node with name and object ref for the phase object,
      * also sets of in and out going dependencies
      */
-    class Node(name: String) {
+    case class Node(name: String) {
       val phasename = name
       var phaseobj: Option[List[SubComponent]] = None
       val after = new mutable.HashSet[Edge]()
@@ -51,11 +46,11 @@ trait PhaseAssembly {
     val nodes = new mutable.HashMap[String,Node]()
     val edges = new mutable.HashSet[Edge]()
 
-    /* Given a phase object, get the node for this phase object. If the
-     * node object does not exist, then create it.
+    /** Given a phase object, get the node for this phase object. If the
+     *  node object does not exist, then create it.
      */
     def getNodeByPhase(phs: SubComponent): Node = {
-      var node: Node = getNodeByPhase(phs.phaseName)
+      val node: Node = getNodeByPhase(phs.phaseName)
       node.phaseobj match {
         case None =>
           node.phaseobj = Some(List[SubComponent](phs))
@@ -75,7 +70,7 @@ trait PhaseAssembly {
      * list of the nodes
      */
     def softConnectNodes(frm: Node, to: Node) {
-      var e = new Edge(frm, to, false)
+      val e = new Edge(frm, to, false)
       this.edges += e
 
       frm.after += e
@@ -87,7 +82,7 @@ trait PhaseAssembly {
      * list of the nodes
      */
     def hardConnectNodes(frm: Node, to: Node) {
-      var e = new Edge(frm, to, true)
+      val e = new Edge(frm, to, true)
       this.edges += e
 
       frm.after += e
@@ -105,9 +100,8 @@ trait PhaseAssembly {
      */
     def collapseHardLinksAndLevels(node: Node, lvl: Int) {
       if (node.visited) {
-        throw new FatalError(
-          "Cycle in compiler phase dependencies detected, phase " +
-          node.phasename + " reacted twice!")
+        dump("phase-cycle")
+        throw new FatalError(s"Cycle in phase dependencies detected at ${node.phasename}, created phase-cycle.dot")
       }
 
       if (node.level < lvl) node.level = lvl
@@ -140,7 +134,8 @@ trait PhaseAssembly {
       var hardlinks = edges.filter(_.hard)
       for (hl <- hardlinks) {
         if (hl.frm.after.size > 1) {
-          throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
+          dump("phase-order")
+          throw new FatalError(s"Phase ${hl.frm.phasename} can't follow ${hl.to.phasename}, created phase-order.dot")
         }
       }
 
@@ -149,23 +144,17 @@ trait PhaseAssembly {
         rerun = false
         hardlinks = edges.filter(_.hard)
         for (hl <- hardlinks) {
-          var sanity = Nil ++ hl.to.before.filter(_.hard)
+          val sanity = Nil ++ hl.to.before.filter(_.hard)
           if (sanity.length == 0) {
             throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
           } else if (sanity.length > 1) {
-            var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
-            msg += "Phases: "
-            sanity = sanity sortBy (_.frm.phasename)
-            for (edge <- sanity) {
-              msg += edge.frm.phasename + ", "
-            }
-            msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
-            throw new FatalError(msg)
-
+            dump("phase-order")
+            val following = (sanity map (_.frm.phasename)).sorted mkString ","
+            throw new FatalError(s"Multiple phases want to run right after ${sanity.head.to.phasename}; followers: $following; created phase-order.dot")
           } else {
 
-            var promote = hl.to.before.filter(e => (!e.hard))
-            hl.to.before.clear
+            val promote = hl.to.before.filter(e => (!e.hard))
+            hl.to.before.clear()
             sanity foreach (edge => hl.to.before += edge)
             for (edge <- promote) {
               rerun = true
@@ -182,7 +171,7 @@ trait PhaseAssembly {
 
     /** Remove all nodes in the given graph, that have no phase object
      *  Make sure to clean up all edges when removing the node object
-     *  <code>Inform</code> with warnings, if an external phase has a
+     *  `Inform` with warnings, if an external phase has a
      *  dependency on something that is dropped.
      */
     def removeDanglingNodes() {
@@ -199,39 +188,38 @@ trait PhaseAssembly {
         }
       }
     }
+
+    def dump(title: String = "phase-assembly") = graphToDotFile(this, s"$title.dot")
   }
 
-  /* Method called from computePhaseDescriptors in class Global
-   */
-  def buildCompilerFromPhasesSet(): List[SubComponent] = {
+
+  /** Called by Global#computePhaseDescriptors to compute phase order. */
+  def computePhaseAssembly(): List[SubComponent] = {
 
     // Add all phases in the set to the graph
     val graph = phasesSetToDepGraph(phasesSet)
 
+    val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None
+
     // Output the phase dependency graph at this stage
-    if (settings.genPhaseGraph.value != "")
-      graphToDotFile(graph, settings.genPhaseGraph.value + "1.dot")
+    def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot"))
+
+    dump(1)
 
     // Remove nodes without phaseobj
     graph.removeDanglingNodes()
 
-    // Output the phase dependency graph at this stage
-    if (settings.genPhaseGraph.value != "")
-      graphToDotFile(graph, settings.genPhaseGraph.value + "2.dot")
+    dump(2)
 
     // Validate and Enforce hardlinks / runsRightAfter and promote nodes down the tree
     graph.validateAndEnforceHardlinks()
 
-    // Output the phase dependency graph at this stage
-    if (settings.genPhaseGraph.value != "")
-      graphToDotFile(graph, settings.genPhaseGraph.value + "3.dot")
+    dump(3)
 
     // test for cycles, assign levels and collapse hard links into nodes
     graph.collapseHardLinksAndLevels(graph.getNodeByPhase("parser"), 1)
 
-    // Output the phase dependency graph at this stage
-    if (settings.genPhaseGraph.value != "")
-      graphToDotFile(graph, settings.genPhaseGraph.value + "4.dot")
+    dump(4)
 
     // assemble the compiler
     graph.compilerPhaseList()
@@ -245,7 +233,7 @@ trait PhaseAssembly {
 
     for (phs <- phsSet) {
 
-      var fromnode = graph.getNodeByPhase(phs)
+      val fromnode = graph.getNodeByPhase(phs)
 
       phs.runsRightAfter match {
         case None =>
@@ -288,16 +276,11 @@ trait PhaseAssembly {
     sbuf.append("digraph G {\n")
     for (edge <- graph.edges) {
       sbuf.append("\"" + edge.frm.allPhaseNames + "(" + edge.frm.level + ")" + "\"->\"" + edge.to.allPhaseNames + "(" + edge.to.level + ")" + "\"")
-      if (! edge.frm.phaseobj.get.head.internal) {
-               extnodes += edge.frm
-      }
-      edge.frm.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.frm }
-      edge.to.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.to }
-      if (edge.hard) {
-        sbuf.append(" [color=\"#0000ff\"]\n")
-      } else {
-        sbuf.append(" [color=\"#000000\"]\n")
-      }
+      if (!edge.frm.phaseobj.get.head.internal) extnodes += edge.frm
+      edge.frm.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.frm )
+      edge.to.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.to )
+      val color = if (edge.hard) "#0000ff" else "#000000"
+      sbuf.append(s""" [color="$color"]\n""")
     }
     for (node <- extnodes) {
       sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#00ff00\"]\n")
@@ -306,10 +289,7 @@ trait PhaseAssembly {
       sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n")
     }
     sbuf.append("}\n")
-    var out = new BufferedWriter(new FileWriter(filename))
-    out.write(sbuf.toString)
-    out.flush()
-    out.close()
+    import reflect.io._
+    for (d <- settings.outputDirs.getSingleOutput if !d.isVirtual) Path(d.file) / File(filename) writeAll sbuf.toString
   }
-
 }
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
deleted file mode 100644
index 0901ade..0000000
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-
-import symtab.Flags
-import scala.reflect.internal.util.TableDef
-import scala.language.postfixOps
-
- at deprecated("Scheduled for removal as being a dead-code in the compiler.", "2.10.1")
-object Phases {
-  val MaxPhases = 64
-
-  /** A class for tracking something about each phase.
-   */
-  class Model[T] {
-    case class Cell(ph: Phase, value: T) {
-      def name = ph.name
-      def id = ph.id
-    }
-    val values                            = new Array[Cell](MaxPhases + 1)
-    def results                           = values filterNot (_ == null)
-    def apply(ph: Phase): T               = values(ph.id).value
-    def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
-  }
-  /** A class for recording the elapsed time of each phase in the
-   *  interests of generating a classy and informative table.
-   */
-  class TimingModel extends Model[Long] {
-    var total: Long = 0
-    def table() = {
-      total = results map (_.value) sum;
-      new Format.Table(results sortBy (-_.value))
-    }
-    object Format extends TableDef[Cell] {
-      >> ("phase"   -> (_.name)) >+ "  "
-      << ("id"      -> (_.id))  >+ "  "
-      >> ("ms"      -> (_.value)) >+ "  "
-      << ("share"   -> (_.value.toDouble * 100 / total formatted "%.2f"))
-    }
-    def formatted = "" + table()
-  }
-}
-
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 55fd196..ed5fda9 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -12,14 +12,9 @@ object Properties extends scala.util.PropertiesTrait {
   protected def pickJarBasedOn = classOf[Global]
 
   // settings based on jar properties
-  def fileEndingString     = scalaPropOrElse("file.ending", ".scala|.java")
   def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
   def shellPromptString    = scalaPropOrElse("shell.prompt", "\nscala> ")
 
-  // settings based on system properties
-  def msilLibPath          = propOrNone("msil.libpath")
-
   // derived values
   def isEmacsShell         = propOrEmpty("env.emacs") != ""
-  def fileEndings          = fileEndingString.split("""\|""").toList
 }
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
deleted file mode 100644
index ba434bc..0000000
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* scaladoc, a documentation generator for Scala
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- * @author  Geoffrey Washburn
- */
-
-package scala.tools.nsc
-
-import java.io.File.pathSeparator
-import scala.tools.nsc.doc.DocFactory
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.reflect.internal.util.FakePos
-import Properties.msilLibPath
-
-/** The main class for scaladoc, a front-end for the Scala compiler
- *  that generates documentation from source files.
- */
-class ScalaDoc {
-  val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString)
-
-  def process(args: Array[String]): Boolean = {
-    var reporter: ConsoleReporter = null
-    val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n  scaladoc -help  gives more information"),
-                                       msg => reporter.printMessage(msg))
-    reporter = new ConsoleReporter(docSettings) {
-      // need to do this so that the Global instance doesn't trash all the
-      // symbols just because there was an error
-      override def hasErrors = false
-    }
-    val command = new ScalaDoc.Command(args.toList, docSettings)
-    def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
-
-    if (docSettings.version.value)
-      reporter.echo(versionMsg)
-    else if (docSettings.Xhelp.value)
-      reporter.echo(command.xusageMsg)
-    else if (docSettings.Yhelp.value)
-      reporter.echo(command.yusageMsg)
-    else if (docSettings.showPlugins.value)
-      reporter.warning(null, "Plugins are not available when using Scaladoc")
-    else if (docSettings.showPhases.value)
-      reporter.warning(null, "Phases are restricted when using Scaladoc")
-    else if (docSettings.help.value || !hasFiles)
-      reporter.echo(command.usageMsg)
-    else try {
-      if (docSettings.target.value == "msil")
-        msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
-
-      new DocFactory(reporter, docSettings) document command.files
-    }
-    catch {
-      case ex @ FatalError(msg) =>
-        if (docSettings.debug.value) ex.printStackTrace()
-        reporter.error(null, "fatal error: " + msg)
-    }
-    finally reporter.printSummary()
-
-    // not much point in returning !reporter.hasErrors when it has
-    // been overridden with constant false.
-    true
-  }
-}
-
-object ScalaDoc extends ScalaDoc {
-  class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
-    override def cmdName = "scaladoc"
-    override def usageMsg = (
-      createUsageMsg("where possible scaladoc", false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
-      "\n\nStandard scalac options also available:" +
-      createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name))
-    )
-  }
-
-  def main(args: Array[String]): Unit = sys exit {
-    if (process(args)) 0 else 1
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 107c4b3..c2d62db 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -3,11 +3,11 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 
 import io.{ Directory, File, Path }
 import java.io.IOException
-import java.net.URL
 import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
 import util.Exceptional.unwrap
 
@@ -19,7 +19,7 @@ import util.Exceptional.unwrap
  *    exec scala "$0" "$@"
  *    !#
  *    Console.println("Hello, world!")
- *    argv.toList foreach Console.println
+ *    args.toList foreach Console.println
  *  </pre>
  *  <p>And here is a batch file example on Windows XP:</p>
  *  <pre>
@@ -29,7 +29,7 @@ import util.Exceptional.unwrap
  *    goto :eof
  *    ::!#
  *    Console.println("Hello, world!")
- *    argv.toList foreach Console.println
+ *    args.toList foreach Console.println
  *  </pre>
  *
  *  @author  Lex Spoon
@@ -49,25 +49,12 @@ class ScriptRunner extends HasCompileSocket {
     case x  => x
   }
 
-  def isScript(settings: Settings) = settings.script.value != ""
-
   /** Choose a jar filename to hold the compiled version of a script. */
   private def jarFileFor(scriptFile: String)= File(
     if (scriptFile endsWith ".jar") scriptFile
     else scriptFile.stripSuffix(".scala") + ".jar"
   )
 
-  /** Read the entire contents of a file as a String. */
-  private def contentsOfFile(filename: String) = File(filename).slurp()
-
-  /** Split a fully qualified object name into a
-   *  package and an unqualified object name */
-  private def splitObjectName(fullname: String): (Option[String], String) =
-    (fullname lastIndexOf '.') match {
-      case -1   => (None, fullname)
-      case idx  => (Some(fullname take idx), fullname drop (idx + 1))
-    }
-
   /** Compile a script using the fsc compilation daemon.
    */
   private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = {
@@ -98,8 +85,8 @@ class ScriptRunner extends HasCompileSocket {
   {
     def mainClass = scriptMain(settings)
 
-    /** Compiles the script file, and returns the directory with the compiled
-     *  class files, if the compilation succeeded.
+    /* Compiles the script file, and returns the directory with the compiled
+     * class files, if the compilation succeeded.
      */
     def compile: Option[Directory] = {
       val compiledPath = Directory makeTemp "scalascript"
@@ -109,9 +96,9 @@ class ScriptRunner extends HasCompileSocket {
 
       settings.outdir.value = compiledPath.path
 
-      if (settings.nc.value) {
-        /** Setting settings.script.value informs the compiler this is not a
-         *  self contained compilation unit.
+      if (settings.nc) {
+        /* Setting settings.script.value informs the compiler this is not a
+         * self contained compilation unit.
          */
         settings.script.value = mainClass
         val reporter = new ConsoleReporter(settings)
@@ -124,11 +111,11 @@ class ScriptRunner extends HasCompileSocket {
       else None
     }
 
-    /** The script runner calls sys.exit to communicate a return value, but this must
-     *  not take place until there are no non-daemon threads running.  Tickets #1955, #2006.
+    /* The script runner calls sys.exit to communicate a return value, but this must
+     * not take place until there are no non-daemon threads running.  Tickets #1955, #2006.
      */
     util.waitingForThreads {
-      if (settings.save.value) {
+      if (settings.save) {
         val jarFile = jarFileFor(scriptFile)
         def jarOK   = jarFile.canRead && (jarFile isFresher File(scriptFile))
 
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index a0468a2..b21d156 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -19,19 +19,30 @@ abstract class SubComponent {
   /** The name of the phase */
   val phaseName: String
 
-  /** List of phase names, this phase should run after  */
+  /** Names of phases that must run before this phase. */
   val runsAfter: List[String]
 
-  /** List of phase names, this phase should run before  */
+  /** Names of phases that must run after this phase. Default is `Nil`. */
   val runsBefore: List[String] = Nil
 
-  /** Phase name this phase will attach itself to, not allowing any phase to come between it
-   * and the phase name declared  */
+  /** Name of the phase that this phase must follow immediately. */
   val runsRightAfter: Option[String]
 
-  /** Internal flag to tell external from internal phases */
+  /** Names of phases required by this component. Default is `Nil`. */
+  val requires: List[String] = Nil
+
+  /** Is this component enabled? Default is true. */
+  def enabled: Boolean = true
+
+  /** True if this phase is not provided by a plug-in. */
   val internal: Boolean = true
 
+  /** True if this phase runs before all other phases. Usually, `parser`. */
+  val initial: Boolean = false
+
+  /** True if this phase runs after all other phases. Usually, `terminal`. */
+  val terminal: Boolean = false
+
   /** SubComponent are added to a HashSet and two phases are the same if they have the same name  */
   override def hashCode() = phaseName.hashCode()
 
@@ -47,8 +58,8 @@ abstract class SubComponent {
   private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
   private var ownPhaseRunId = global.NoRunId
 
-  @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
-  @inline final def afterOwnPhase[T](op: => T)  = global.afterPhase(ownPhase)(op)
+  @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op)
+  @inline final def afterOwnPhase[T](op: => T)  = global.exitingPhase(ownPhase)(op)
 
   /** The phase corresponding to this subcomponent in the current compiler run */
   def ownPhase: Phase = {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6e39fc9..6d9b41e 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,10 +7,7 @@ package scala.tools.nsc
 package ast
 
 import symtab._
-import reporters._
-import scala.reflect.internal.util.{Position, NoPosition}
 import util.DocStrings._
-import scala.reflect.internal.Chars._
 import scala.collection.mutable
 
 /*
@@ -21,8 +18,14 @@ trait DocComments { self: Global =>
 
   val cookedDocComments = mutable.HashMap[Symbol, String]()
 
-  /** The raw doc comment map */
-  val docComments = mutable.HashMap[Symbol, DocComment]()
+  /** The raw doc comment map
+   *
+   * In IDE, background compilation runs get interrupted by
+   * reloading new sourcefiles. This is weak to avoid
+   * memleaks due to the doc of their cached symbols
+   * (e.g. in baseTypeSeq) between periodic doc reloads.
+   */
+  val docComments = mutable.WeakHashMap[Symbol, DocComment]()
 
   def clearDocComments() {
     cookedDocComments.clear()
@@ -30,11 +33,6 @@ trait DocComments { self: Global =>
     defs.clear()
   }
 
-  /** Associate comment with symbol `sym` at position `pos`. */
-  def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) =
-    if ((sym ne null) && (sym ne NoSymbol))
-      docComments += (sym -> DocComment(docStr, pos))
-
   /** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing.
    */
   def rawDocComment(sym: Symbol): String =
@@ -96,11 +94,6 @@ trait DocComments { self: Global =>
     expandVariables(cookedDocComment(sym, docStr), sym, site1)
   }
 
-  /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
-   *  @param sym  The symbol for which doc comment is returned (site is always the containing class)
-   */
-  def expandedDocComment(sym: Symbol): String = expandedDocComment(sym, sym.enclClass)
-
   /** The list of use cases of doc comment of symbol `sym` seen as a member of class
    *  `site`. Each use case consists of a synthetic symbol (which is entered nowhere else),
    *  of an expanded doc comment string, and of its position.
@@ -129,12 +122,6 @@ trait DocComments { self: Global =>
     getDocComment(sym) map getUseCases getOrElse List()
   }
 
-  def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
-
-  /** Returns the javadoc format of doc comment string `s`, including wiki expansion
-   */
-  def toJavaDoc(s: String): String = expandWiki(s)
-
   private val wikiReplacements = List(
     ("""(\n\s*\*?)(\s*\n)"""    .r, """$1 <p>$2"""),
     ("""<([^\w/])"""            .r, """<$1"""),
@@ -275,7 +262,7 @@ trait DocComments { self: Global =>
               cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
             case None =>
               reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
-                  " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+                  " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true)
               "<invalid inheritdoc annotation>"
           }
 
@@ -313,7 +300,6 @@ trait DocComments { self: Global =>
   /** Lookup definition of variable.
    *
    *  @param vble  The variable for which a definition is searched
-   *  @param owner The current owner in which variable definitions are searched.
    *  @param site  The class for which doc comments are generated
    */
   def lookupVariable(vble: String, site: Symbol): Option[String] = site match {
@@ -330,12 +316,12 @@ trait DocComments { self: Global =>
   }
 
   /** Expand variable occurrences in string `str`, until a fix point is reached or
-   *  a expandLimit is exceeded.
+   *  an expandLimit is exceeded.
    *
-   *  @param str   The string to be expanded
-   *  @param sym   The symbol for which doc comments are generated
-   *  @param site  The class for which doc comments are generated
-   *  @return      Expanded string
+   *  @param initialStr   The string to be expanded
+   *  @param sym          The symbol for which doc comments are generated
+   *  @param site         The class for which doc comments are generated
+   *  @return             Expanded string
    */
   protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = {
     val expandLimit = 10
@@ -372,7 +358,10 @@ trait DocComments { self: Global =>
             case vname  =>
               lookupVariable(vname, site) match {
                 case Some(replacement) => replaceWith(replacement)
-                case None              => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+                case None              =>
+                  val pos = docCommentPos(sym)
+                  val loc = pos withPoint (pos.start + vstart + 1)
+                  reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
               }
             }
         }
@@ -470,7 +459,7 @@ trait DocComments { self: Global =>
         //val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
         //val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
         //findIn(sites)
-        findIn(site.ownerChain ::: List(definitions.EmptyPackage))
+        findIn(site.ownerChain ::: List(rootMirror.EmptyPackage))
       }
 
       def getType(str: String, variable: String): Type = {
@@ -507,7 +496,7 @@ trait DocComments { self: Global =>
         result
       }
 
-      /**
+      /*
        * work around the backticks issue suggested by Simon in
        * https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
        * ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index deea4de..9c8e13a 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -32,7 +32,7 @@ abstract class NodePrinters {
   }
 
   trait DefaultPrintAST extends PrintAST {
-    val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+    val printPos = settings.Xprintpos || settings.Yposdebug
 
     def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
     def showDefTreeName(tree: DefTree) = showName(tree.name)
@@ -100,9 +100,9 @@ abstract class NodePrinters {
 
     def stringify(tree: Tree): String = {
       buf.clear()
-      if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
-      if (settings.XshowtreesCompact.value) {
-        buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value))
+      if (settings.XshowtreesStringified) buf.append(tree.toString + EOL)
+      if (settings.XshowtreesCompact) {
+        buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes))
       } else {
         level = 0
         traverse(tree)
@@ -168,6 +168,13 @@ abstract class NodePrinters {
       }
     }
 
+    def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+      printMultiline(tree) {
+        traverse(fun)
+        traverseList("[]", "type argument")(args)
+      }
+    }
+
     def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
     def printMultiline(tree: Tree)(body: => Unit) {
       printMultiline(treePrefix(tree), showAttributes(tree))(body)
@@ -203,9 +210,11 @@ abstract class NodePrinters {
       showPosition(tree)
 
       tree match {
-        case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
-        case ApplyDynamic(fun, args)    => applyCommon(tree, fun, args)
-        case Apply(fun, args)           => applyCommon(tree, fun, args)
+        case ApplyDynamic(fun, args)      => applyCommon(tree, fun, args)
+        case Apply(fun, args)             => applyCommon(tree, fun, args)
+
+        case TypeApply(fun, args)         => typeApplyCommon(tree, fun, args)
+        case AppliedTypeTree(tpt, args)   => typeApplyCommon(tree, tpt, args)
 
         case Throw(Ident(name)) =>
           printSingle(tree, name)
@@ -273,7 +282,7 @@ abstract class NodePrinters {
             traverseList("[]", "type parameter")(tparams)
             vparamss match {
               case Nil        => println("Nil")
-              case Nil :: Nil => println("List(Nil)")
+              case ListOfNil  => println("List(Nil)")
               case ps  :: Nil =>
                 printLine("", "1 parameter list")
                 ps foreach traverse
@@ -312,11 +321,6 @@ abstract class NodePrinters {
           }
         case This(qual) =>
           printSingle(tree, qual)
-        case TypeApply(fun, args) =>
-          printMultiline(tree) {
-            traverse(fun)
-            traverseList("[]", "type argument")(args)
-          }
         case tt @ TypeTree() =>
           println(showTypeTree(tt))
 
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index d8fb632..beab801 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -1,16 +1,9 @@
 package scala.tools.nsc
 package ast
 
-import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
-
 trait Positions extends scala.reflect.internal.Positions {
   self: Global =>
 
-  def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
-    new OffsetPosition(source, point)
-
-  def validatePositions(tree: Tree) {}
-
   class ValidatingPosAssigner extends PosAssigner {
     var pos: Position = _
     override def traverse(t: Tree) {
@@ -20,7 +13,7 @@ trait Positions extends scala.reflect.internal.Positions {
         // When we prune due to encountering a position, traverse the
         // pruned children so we can warn about those lacking positions.
         t.children foreach { c =>
-          if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+          if (!c.canHaveAttrs) ()
           else if (c.pos == NoPosition) {
             reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
             inform("parent: " + treeSymStatus(t))
@@ -32,6 +25,6 @@ trait Positions extends scala.reflect.internal.Positions {
   }
 
   override protected[this] lazy val posAssigner: PosAssigner =
-    if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner
+    if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner
     else new DefaultPosAssigner
 }
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 83222a2..f3def3c 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
 package ast
 
 import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
 
 trait Printers extends scala.reflect.internal.Printers { this: Global =>
 
@@ -22,7 +20,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
         printTree(
             if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
               tree match {
-                case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
+                case ClassDef(_, _, _, impl @ Template(ps, noSelfType, body))
                 if (tree.symbol.thisSym != tree.symbol) =>
                   ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
                 case ClassDef(_, _, _, impl)           => ClassDef(tree.symbol, impl)
@@ -44,7 +42,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
     }
   }
 
-  // overflow cases missing from TreePrinter in reflect.api
+  // overflow cases missing from TreePrinter in scala.reflect.api
   override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
     case DocDef(comment, definition) =>
       treePrinter.print(comment.raw)
@@ -130,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
         case Select(qualifier, name) =>
           printTree(qualifier)
           print(".")
-          print(quotedName(name, true))
+          print(quotedName(name, decode = true))
 
         // target.toString() ==> target.toString
         case Apply(fn, Nil)   => printTree(fn)
@@ -154,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
         // If thenp or elsep has only one statement, it doesn't need more than one line.
         case If(cond, thenp, elsep) =>
           def ifIndented(x: Tree) = {
-            indent ; println() ; printTree(x) ; undent
+            indent() ; println() ; printTree(x) ; undent()
           }
 
           val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
@@ -168,128 +166,27 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
 
           if (elseStmts.nonEmpty) {
             print(" else")
-            indent ; println()
+            indent() ; println()
             elseStmts match {
               case List(x)  => printTree(x)
               case _        => printTree(elsep)
             }
-            undent ; println()
+            undent() ; println()
           }
         case _        => s()
       }
     }
   }
 
-  /** This must guarantee not to force any evaluation, so we can learn
-   *  a little bit about trees in the midst of compilation without altering
-   *  the natural course of events.
-   */
-  class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
-
-    private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
-    private def params(trees: List[Tree]): String = trees map safe mkString ", "
-
-    private def safe(name: Name): String = name.decode
-    private def safe(tree: Tree): String = tree match {
-      case Apply(fn, args)        => "%s(%s)".format(safe(fn), params(args))
-      case Select(qual, name)     => safe(qual) + "." + safe(name)
-      case This(qual)             => safe(qual) + ".this"
-      case Ident(name)            => safe(name)
-      case Literal(value)         => value.stringValue
-      case _                      => "(?: %s)".format(default(tree))
-    }
-
-    override def printTree(tree: Tree) { print(safe(tree)) }
-  }
-
-  class TreeMatchTemplate {
-    // non-trees defined in Trees
-    //
-    // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
-    // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
-    //
-    def apply(t: Tree): Unit = t match {
-      // eliminated by typer
-      case Annotated(annot, arg)  =>
-      case AssignOrNamedArg(lhs, rhs) =>
-      case DocDef(comment, definition) =>
-      case Import(expr, selectors) =>
-
-      // eliminated by refchecks
-      case ModuleDef(mods, name, impl) =>
-      case TypeTreeWithDeferredRefCheck() =>
-
-      // eliminated by erasure
-      case TypeDef(mods, name, tparams, rhs) =>
-      case Typed(expr, tpt) =>
-
-      // eliminated by cleanup
-      case ApplyDynamic(qual, args) =>
-
-      // eliminated by explicitouter
-      case Alternative(trees) =>
-      case Bind(name, body) =>
-      case CaseDef(pat, guard, body) =>
-      case Star(elem) =>
-      case UnApply(fun, args) =>
-
-      // eliminated by lambdalift
-      case Function(vparams, body) =>
-
-      // eliminated by uncurry
-      case AppliedTypeTree(tpt, args) =>
-      case CompoundTypeTree(templ) =>
-      case ExistentialTypeTree(tpt, whereClauses) =>
-      case SelectFromTypeTree(qual, selector) =>
-      case SingletonTypeTree(ref) =>
-      case TypeBoundsTree(lo, hi) =>
-
-      // survivors
-      case Apply(fun, args) =>
-      case ArrayValue(elemtpt, trees) =>
-      case Assign(lhs, rhs) =>
-      case Block(stats, expr) =>
-      case ClassDef(mods, name, tparams, impl) =>
-      case DefDef(mods, name, tparams, vparamss, tpt, rhs)  =>
-      case EmptyTree =>
-      case Ident(name) =>
-      case If(cond, thenp, elsep) =>
-      case LabelDef(name, params, rhs) =>
-      case Literal(value) =>
-      case Match(selector, cases) =>
-      case New(tpt) =>
-      case PackageDef(pid, stats) =>
-      case Return(expr) =>
-      case Select(qualifier, selector) =>
-      case Super(qual, mix) =>
-      case Template(parents, self, body) =>
-      case This(qual) =>
-      case Throw(expr) =>
-      case Try(block, catches, finalizer) =>
-      case TypeApply(fun, args) =>
-      case TypeTree() =>
-      case ValDef(mods, name, tpt, rhs) =>
-
-      // missing from the Trees comment
-      case Parens(args) =>                          // only used during parsing
-      case SelectFromArray(qual, name, erasure) =>  // only used during erasure
-    }
-  }
-
-  def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
-  def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
-  def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
+  def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds)
+  def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds)
+  def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true, true)
 
   def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
-  def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
-  def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
   def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
-  def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
-  def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
 
   override def newTreePrinter(writer: PrintWriter): TreePrinter =
-    if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+    if (settings.Ycompacttrees) newCompactTreePrinter(writer)
     else newStandardTreePrinter(writer)
   override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
   override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 5c95409..eafecf9 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package ast
 
 import java.awt.{List => awtList, _}
@@ -16,8 +17,6 @@ import javax.swing.tree._
 
 import scala.concurrent.Lock
 import scala.text._
-import symtab.Flags._
-import symtab.SymbolTable
 import scala.language.implicitConversions
 
 /**
@@ -34,7 +33,7 @@ abstract class TreeBrowsers {
 
   val borderSize = 10
 
-  def create(): SwingBrowser = new SwingBrowser();
+  def create(): SwingBrowser = new SwingBrowser()
 
   /** Pseudo tree class, so that all JTree nodes are treated uniformly */
   case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -50,21 +49,6 @@ abstract class TreeBrowsers {
    * Java Swing pretty printer for Scala abstract syntax trees.
    */
   class SwingBrowser {
-
-    def browse(t: Tree): Tree = {
-      val tm = new ASTTreeModel(t)
-
-      val frame = new BrowserFrame()
-      frame.setTreeModel(tm)
-
-      val lock = new Lock()
-      frame.createFrame(lock)
-
-      // wait for the frame to be closed
-      lock.acquire
-      t
-    }
-
     def browse(pName: String, units: Iterator[CompilationUnit]): Unit =
       browse(pName, units.toList)
 
@@ -83,7 +67,7 @@ abstract class TreeBrowsers {
       frame.createFrame(lock)
 
       // wait for the frame to be closed
-      lock.acquire
+      lock.acquire()
     }
   }
 
@@ -171,8 +155,8 @@ abstract class TreeBrowsers {
       _setExpansionState(root, new TreePath(root.getModel.getRoot))
     }
 
-    def expandAll(subtree: JTree) = setExpansionState(subtree, true)
-    def collapseAll(subtree: JTree) = setExpansionState(subtree, false)
+    def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
+    def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
 
 
     /** Create a frame that displays the AST.
@@ -184,14 +168,14 @@ abstract class TreeBrowsers {
      * especially symbols/types would change while the window is visible.
      */
     def createFrame(lock: Lock): Unit = {
-      lock.acquire // keep the lock until the user closes the window
+      lock.acquire() // keep the lock until the user closes the window
 
       frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
 
       frame.addWindowListener(new WindowAdapter() {
         /** Release the lock, so compilation may resume after the window is closed. */
-        override def windowClosed(e: WindowEvent): Unit = lock.release
-      });
+        override def windowClosed(e: WindowEvent): Unit = lock.release()
+      })
 
       jTree = new JTree(treeModel) {
         /** Return the string for a tree node. */
@@ -253,7 +237,7 @@ abstract class TreeBrowsers {
           putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
           override def actionPerformed(e: ActionEvent) {
             closeWindow()
-            global.currentRun.cancel
+            global.currentRun.cancel()
           }
         }
       )
@@ -509,7 +493,7 @@ abstract class TreeBrowsers {
     /** Return a textual representation of this t's symbol */
     def symbolText(t: Tree): String = {
       val prefix =
-        if (t.hasSymbol)  "[has] "
+        if (t.hasSymbolField)  "[has] "
         else if (t.isDef) "[defines] "
         else ""
 
@@ -529,11 +513,10 @@ abstract class TreeBrowsers {
      * attributes */
     def symbolAttributes(t: Tree): String = {
       val s = t.symbol
-      var att = ""
 
       if ((s ne null) && (s != NoSymbol)) {
-        var str = flagsToString(s.flags)
-        if (s.isStaticMember) str = str + " isStatic ";
+        var str = s.flagString
+        if (s.isStaticMember) str = str + " isStatic "
         (str + " annotations: " + s.annotations.mkString("", " ", "")
           + (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
       }
@@ -636,7 +619,7 @@ abstract class TreeBrowsers {
                         toDocument(result) :: ")")
         )
 
-      case AnnotatedType(annots, tp, _) =>
+      case AnnotatedType(annots, tp) =>
         Document.group(
           Document.nest(4, "AnnotatedType(" :/:
                         annots.mkString("[", ",", "]") :/:
@@ -649,7 +632,7 @@ abstract class TreeBrowsers {
                 Document.group("(" :/: symsToDocument(tparams) :/: "), ") :/:
                 toDocument(result) :: ")"))
 
-      case global.analyzer.ImportType(expr) =>
+      case ImportType(expr) =>
         "ImportType(" + expr.toString + ")"
 
 
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 9a5b92e..6dda30b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -7,7 +7,6 @@
 package scala.tools.nsc
 package ast
 
-import PartialFunction._
 import symtab.Flags
 import scala.language.implicitConversions
 
@@ -21,7 +20,6 @@ trait TreeDSL {
 
   import global._
   import definitions._
-  import gen.{ scalaDot }
 
   object CODE {
     // Add a null check to a Tree => Tree function
@@ -31,30 +29,16 @@ trait TreeDSL {
     def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
 
     object LIT extends (Any => Literal) {
+      def typed(x: Any)   = apply(x) setType ConstantType(Constant(x))
       def apply(x: Any)   = Literal(Constant(x))
-      def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
     }
 
-    // You might think these could all be vals, but empirically I have found that
-    // at least in the case of UNIT the compiler breaks if you re-use trees.
-    // However we need stable identifiers to have attractive pattern matching.
-    // So it's inconsistent until I devise a better way.
-    val TRUE          = LIT(true)
-    val FALSE         = LIT(false)
-    val ZERO          = LIT(0)
-    def NULL          = LIT(null)
-    def UNIT          = LIT(())
-
-    // for those preferring boring, predictable lives, without the thrills of tree-sharing
-    // (but with the perk of typed trees)
-    def TRUE_typed  = LIT(true) setType ConstantType(Constant(true))
-    def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
-
-    object WILD {
-      def empty               = Ident(nme.WILDCARD)
-      def apply(tpe: Type)    = Ident(nme.WILDCARD) setType tpe
-      def unapply(other: Any) = cond(other) { case Ident(nme.WILDCARD) => true }
-    }
+    // Boring, predictable trees.
+    def TRUE  = LIT typed true
+    def FALSE = LIT typed false
+    def ZERO  = LIT(0)
+    def NULL  = LIT(null)
+    def UNIT  = LIT(())
 
     def fn(lhs: Tree, op:   Name, args: Tree*)  = Apply(Select(lhs, op), args.toList)
     def fn(lhs: Tree, op: Symbol, args: Tree*)  = Apply(Select(lhs, op), args.toList)
@@ -82,19 +66,15 @@ trait TreeDSL {
         if (opSym == NoSymbol) ANY_==(other)
         else fn(target, opSym, other)
       }
-      def ANY_EQ  (other: Tree)     = OBJ_EQ(other AS ObjectClass.tpe)
+      def ANY_EQ  (other: Tree)     = OBJ_EQ(other AS ObjectTpe)
       def ANY_==  (other: Tree)     = fn(target, Any_==, other)
       def ANY_!=  (other: Tree)     = fn(target, Any_!=, other)
-      def OBJ_==  (other: Tree)     = fn(target, Object_==, other)
-      def OBJ_!=  (other: Tree)     = fn(target, Object_!=, other)
       def OBJ_EQ  (other: Tree)     = fn(target, Object_eq, other)
       def OBJ_NE  (other: Tree)     = fn(target, Object_ne, other)
 
-      def INT_|   (other: Tree)     = fn(target, getMember(IntClass, nme.OR), other)
-      def INT_&   (other: Tree)     = fn(target, getMember(IntClass, nme.AND), other)
       def INT_>=  (other: Tree)     = fn(target, getMember(IntClass, nme.GE), other)
       def INT_==  (other: Tree)     = fn(target, getMember(IntClass, nme.EQ), other)
-      def INT_!=  (other: Tree)     = fn(target, getMember(IntClass, nme.NE), other)
+      def INT_-   (other: Tree)     = fn(target, getMember(IntClass, nme.MINUS), other)
 
       // generic operations on ByteClass, IntClass, LongClass
       def GEN_|   (other: Tree, kind: ClassSymbol)  = fn(target, getMember(kind, nme.OR), other)
@@ -102,37 +82,28 @@ trait TreeDSL {
       def GEN_==  (other: Tree, kind: ClassSymbol)  = fn(target, getMember(kind, nme.EQ), other)
       def GEN_!=  (other: Tree, kind: ClassSymbol)  = fn(target, getMember(kind, nme.NE), other)
 
-      def BOOL_&& (other: Tree)     = fn(target, Boolean_and, other)
-      def BOOL_|| (other: Tree)     = fn(target, Boolean_or, other)
-
       /** Apply, Select, Match **/
       def APPLY(params: Tree*)      = Apply(target, params.toList)
       def APPLY(params: List[Tree]) = Apply(target, params)
-      def MATCH(cases: CaseDef*)    = Match(target, cases.toList)
 
       def DOT(member: Name)         = SelectStart(Select(target, member))
       def DOT(sym: Symbol)          = SelectStart(Select(target, sym))
 
       /** Assignment */
+      // !!! This method is responsible for some tree sharing, but a diligent
+      // reviewer pointed out that we shouldn't blindly duplicate these trees
+      // as there might be DefTrees nested beneath them.  It's not entirely
+      // clear how to proceed, so for now it retains the non-duplicating behavior.
       def ===(rhs: Tree)            = Assign(target, rhs)
 
-      /** Methods for sequences **/
-      def DROP(count: Int): Tree =
-        if (count == 0) target
-        else (target DOT nme.drop)(LIT(count))
-
       /** Casting & type tests -- working our way toward understanding exactly
        *  what differs between the different forms of IS and AS.
        *
        *  See ticket #2168 for one illustration of AS vs. AS_ANY.
        */
       def AS(tpe: Type)       = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
-      def IS(tpe: Type)       = gen.mkIsInstanceOf(target, tpe, true)
-      def IS_OBJ(tpe: Type)   = gen.mkIsInstanceOf(target, tpe, false)
+      def IS_OBJ(tpe: Type)   = gen.mkIsInstanceOf(target, tpe, any = false)
 
-      // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types
-      // def TOSTRING()          = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target)
-      def TOSTRING()          = fn(target, nme.toString_)
       def GETCLASS()          = fn(target, Object_getClass)
     }
 
@@ -145,98 +116,6 @@ trait TreeDSL {
       def ==>(body: Tree): CaseDef  = CaseDef(pat, guard, body)
     }
 
-    /** VODD, if it's not obvious, means ValOrDefDef.  This is the
-     *  common code between a tree based on a pre-existing symbol and
-     *  one being built from scratch.
-     */
-    trait VODDStart {
-      def name: Name
-      def defaultMods: Modifiers
-      def defaultTpt: Tree
-      def defaultPos: Position
-
-      type ResultTreeType <: ValOrDefDef
-      def mkTree(rhs: Tree): ResultTreeType
-      def ===(rhs: Tree): ResultTreeType
-
-      private var _mods: Modifiers = null
-      private var _tpt: Tree = null
-      private var _pos: Position = null
-
-      def withType(tp: Type): this.type = {
-        _tpt = TypeTree(tp)
-        this
-      }
-      def withFlags(flags: Long*): this.type = {
-        if (_mods == null)
-          _mods = defaultMods
-
-        _mods = flags.foldLeft(_mods)(_ | _)
-        this
-      }
-      def withPos(pos: Position): this.type = {
-        _pos = pos
-        this
-      }
-
-      final def mods = if (_mods == null) defaultMods else _mods
-      final def tpt  = if (_tpt == null) defaultTpt else _tpt
-      final def pos  = if (_pos == null) defaultPos else _pos
-    }
-    trait SymVODDStart extends VODDStart {
-      def sym: Symbol
-      def symType: Type
-
-      def name        = sym.name
-      def defaultMods = Modifiers(sym.flags)
-      def defaultTpt  = TypeTree(symType) setPos sym.pos.focus
-      def defaultPos  = sym.pos
-
-      final def ===(rhs: Tree): ResultTreeType =
-        atPos(pos)(mkTree(rhs) setSymbol sym)
-    }
-    trait ValCreator {
-      self: VODDStart =>
-
-      type ResultTreeType = ValDef
-      def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs)
-    }
-    trait DefCreator {
-      self: VODDStart =>
-
-      def tparams: List[TypeDef]
-      def vparamss: List[List[ValDef]]
-
-      type ResultTreeType = DefDef
-      def mkTree(rhs: Tree): DefDef = DefDef(mods, name, tparams, vparamss, tpt, rhs)
-    }
-
-    class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
-      def symType  = sym.tpe.finalResultType
-      def tparams  = sym.typeParams map TypeDef
-      def vparamss = mapParamss(sym)(ValDef)
-    }
-    class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
-      def symType = sym.tpe
-    }
-
-    trait TreeVODDStart extends VODDStart {
-      def defaultMods = NoMods
-      def defaultTpt  = TypeTree()
-      def defaultPos  = NoPosition
-
-      final def ===(rhs: Tree): ResultTreeType =
-        if (pos == NoPosition) mkTree(rhs)
-        else atPos(pos)(mkTree(rhs))
-    }
-
-    class ValTreeStart(val name: Name) extends TreeVODDStart with ValCreator {
-    }
-    class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
-      def tparams: List[TypeDef] = Nil
-      def vparamss: List[List[ValDef]] = ListOfNil
-    }
-
     class IfStart(cond: Tree, thenp: Tree) {
       def THEN(x: Tree)     = new IfStart(cond, x)
       def ELSE(elsep: Tree) = If(cond, thenp, elsep)
@@ -244,84 +123,29 @@ trait TreeDSL {
     }
     class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) {
       def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin)
-      def FINALLY(x: Tree)    = Try(body, catches, x)
       def ENDTRY              = Try(body, catches, fin)
     }
 
     def CASE(pat: Tree): CaseStart  = new CaseStart(pat, EmptyTree)
-    def DEFAULT: CaseStart          = new CaseStart(WILD.empty, EmptyTree)
-
-    class SymbolMethods(target: Symbol) {
-      def BIND(body: Tree) = Bind(target, body)
-      def IS_NULL()  = REF(target) OBJ_EQ NULL
-      def NOT_NULL() = REF(target) OBJ_NE NULL
-
-      def GET() = fn(REF(target), nme.get)
-
-      // name of nth indexed argument to a method (first parameter list), defaults to 1st
-      def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
-      def ARGS = target.paramss.head
-      def ARGNAMES = ARGS map Ident
-    }
-
-    /** Top level accessible. */
-    def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
-    def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
+    def DEFAULT: CaseStart          = new CaseStart(Ident(nme.WILDCARD), EmptyTree)
 
     def NEW(tpt: Tree, args: Tree*): Tree   = New(tpt, List(args.toList))
-    def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
-
-    def DEF(name: Name, tp: Type): DefTreeStart     = DEF(name) withType tp
-    def DEF(name: Name): DefTreeStart               = new DefTreeStart(name)
-    def DEF(sym: Symbol): DefSymStart               = new DefSymStart(sym)
-
-    def VAL(name: Name, tp: Type): ValTreeStart     = VAL(name) withType tp
-    def VAL(name: Name): ValTreeStart               = new ValTreeStart(name)
-    def VAL(sym: Symbol): ValSymStart               = new ValSymStart(sym)
-
-    def VAR(name: Name, tp: Type): ValTreeStart     = VAL(name, tp) withFlags Flags.MUTABLE
-    def VAR(name: Name): ValTreeStart               = VAL(name) withFlags Flags.MUTABLE
-    def VAR(sym: Symbol): ValSymStart               = VAL(sym) withFlags Flags.MUTABLE
-
-    def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY
-    def LAZYVAL(name: Name): ValTreeStart           = VAL(name) withFlags Flags.LAZY
-    def LAZYVAL(sym: Symbol): ValSymStart           = VAL(sym) withFlags Flags.LAZY
 
-    def AND(guards: Tree*) =
-      if (guards.isEmpty) EmptyTree
-      else guards reduceLeft gen.mkAnd
-
-    def OR(guards: Tree*) =
-      if (guards.isEmpty) EmptyTree
-      else guards reduceLeft gen.mkOr
+    def NOT(tree: Tree)   = Select(tree, Boolean_not)
+    def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd
 
     def IF(tree: Tree)    = new IfStart(tree, EmptyTree)
     def TRY(tree: Tree)   = new TryStart(tree, Nil, EmptyTree)
     def BLOCK(xs: Tree*)  = Block(xs.init.toList, xs.last)
-    def NOT(tree: Tree)   = Select(tree, Boolean_not)
-    def SOME(xs: Tree*)   = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
+    def SOME(xs: Tree*)   = Apply(SomeClass.companionSymbol, gen.mkTuple(xs.toList))
 
     /** Typed trees from symbols. */
-    def THIS(sym: Symbol)             = gen.mkAttributedThis(sym)
-    def ID(sym: Symbol)               = gen.mkAttributedIdent(sym)
-    def REF(sym: Symbol)              = gen.mkAttributedRef(sym)
-    def REF(pre: Type, sym: Symbol)   = gen.mkAttributedRef(pre, sym)
-
-    def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
-      case Nil                        => UNIT
-      case List(tree) if flattenUnary => tree
-      case _                          => Apply(TupleClass(trees.length).companionModule, trees: _*)
-    }
-    def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
-      case Nil                        => gen.scalaUnitConstr
-      case List(tree) if flattenUnary => tree
-      case _                          => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
-    }
+    def REF(sym: Symbol)            = gen.mkAttributedRef(sym)
+    def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
 
     /** Implicits - some of these should probably disappear **/
     implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
     implicit def mkTreeMethodsFromSymbol(target: Symbol): TreeMethods = new TreeMethods(Ident(target))
-    implicit def mkSymbolMethodsFromSymbol(target: Symbol): SymbolMethods = new SymbolMethods(target)
 
     /** (foo DOT bar) might be simply a Select, but more likely it is to be immediately
      *  followed by an Apply.  We don't want to add an actual apply method to arbitrary
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 99b82d9..0575b97 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -8,7 +8,6 @@ package ast
 
 import scala.collection.mutable.ListBuffer
 import symtab.Flags._
-import symtab.SymbolTable
 import scala.language.postfixOps
 
 /** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
@@ -20,31 +19,28 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
   import global._
   import definitions._
 
-  def mkCheckInit(tree: Tree): Tree = {
-    val tpe =
-      if (tree.tpe != null || !tree.hasSymbol) tree.tpe
-      else tree.symbol.tpe
-
-    if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
-        tpe <:< NotNullClass.tpe && !tpe.isNotNull)
-      mkRuntimeCall(nme.checkInitialized, List(tree))
-    else
-      tree
-  }
-
-  /** Builds a fully attributed wildcard import node.
+  /** Builds a fully attributed, synthetic wildcard import node.
    */
-  def mkWildcardImport(pkg: Symbol): Import = {
-    assert(pkg ne null, this)
-    val qual = gen.mkAttributedStableRef(pkg)
+  def mkWildcardImport(pkg: Symbol): Import =
+    mkImportFromSelector(pkg, ImportSelector.wildList)
+
+  /** Builds a fully attributed, synthetic import node.
+    * import `qualSym`.{`name` => `toName`}
+    */
+  def mkImport(qualSym: Symbol, name: Name, toName: Name): Import =
+    mkImportFromSelector(qualSym, ImportSelector(name, 0, toName, 0) :: Nil)
+
+  private def mkImportFromSelector(qualSym: Symbol, selector: List[ImportSelector]): Import = {
+    assert(qualSym ne null, this)
+    val qual = gen.mkAttributedStableRef(qualSym)
     val importSym = (
       NoSymbol
         newImport NoPosition
           setFlag SYNTHETIC
-          setInfo analyzer.ImportType(qual)
+          setInfo ImportType(qual)
     )
     val importTree = (
-      Import(qual, ImportSelector.wildList)
+      Import(qual, selector)
         setSymbol importSym
           setType NoType
     )
@@ -52,120 +48,23 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
   }
 
   // wrap the given expression in a SoftReference so it can be gc-ed
-  def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
-
-  // annotate the expression with @unchecked
-  def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
-    // This can't be "Annotated(New(UncheckedClass), expr)" because annotations
-    // are very picky about things and it crashes the compiler with "unexpected new".
-    Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
-  }
-  // if it's a Match, mark the selector unchecked; otherwise nothing.
-  def mkUncheckedMatch(tree: Tree) = tree match {
-    case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
-    case _                      => tree
-  }
-
-  def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
-    // This can't be "Annotated(New(SwitchClass), expr)" because annotations
-    // are very picky about things and it crashes the compiler with "unexpected new".
-    Annotated(Ident(nme.synthSwitch), expr)
-  }
-
-  // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
-  // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
-  class MatchMatcher {
-    def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
-    def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
-    def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
-
-    def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
-
-    def apply(matchExpr: Tree): Tree = matchExpr match {
-      // old-style match or virtpatmat switch
-      case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
-        caseMatch(matchExpr, selector, cases, identity)
-      // old-style match or virtpatmat switch
-      case Block((vd: ValDef) :: Nil, orig at Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
-        caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
-      // virtpatmat
-      case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
-        caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
-      // optimized version of virtpatmat
-      case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
-        // the assumption is once we encounter a case, the remainder of the block will consist of cases
-        // the prologue may be empty, usually it is the valdef that stores the scrut
-        val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
-        caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
-      // optimized version of virtpatmat
-      case Block(outerStats, orig at Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
-        val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
-        caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
-      case other =>
-        unknownTree(other)
-    }
-
-    def unknownTree(t: Tree): Tree = throw new MatchError(t)
-    def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
-
-    def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
-      if (!opt.virtPatmat) cases
-      else cases filter {
-             case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
-             case CaseDef(pat, guard, body) => true
-           }
-  }
-
-  def mkCached(cvar: Symbol, expr: Tree): Tree = {
-    val cvarRef = mkUnattributedRef(cvar)
-    Block(
-      List(
-        If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
-           Assign(cvarRef, expr),
-           EmptyTree)),
-      cvarRef
-    )
+  def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
+    val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1)
+    NewFromConstructor(constructor, expr)
   }
 
   // Builds a tree of the form "{ lhs = rhs ; lhs  }"
   def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
-    val lhsRef = mkUnattributedRef(lhs)
+    def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs)
     Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
   }
 
-  def mkModuleVarDef(accessor: Symbol) = {
-    val inClass    = accessor.owner.isClass
-    val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
-    val mval = (
-      accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
-        setInfo accessor.tpe.finalResultType
-        addAnnotation VolatileAttr
-    )
-    if (inClass)
-      mval.owner.info.decls enter mval
-
-    ValDef(mval)
-  }
-
-  // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
-  // where (...) are eventual outer accessors
-  def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) =
-    DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe)))
-
-  def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
-    DefDef(accessor, Select(This(msym.owner), msym))
-
   def newModule(accessor: Symbol, tpe: Type) = {
     val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
     if (ps.isEmpty) New(tpe)
     else New(tpe, This(accessor.owner.enclClass))
   }
 
-  // def m: T;
-  def mkModuleAccessDcl(accessor: Symbol) =
-    DefDef(accessor setFlag lateDEFERRED, EmptyTree)
-
   def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
     mkRuntimeCall(meth, Nil, args)
 
@@ -206,7 +105,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
       else AppliedTypeTree(Ident(clazz), targs map TypeTree)
     ))
   }
-  def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
 
   def wildcardStar(tree: Tree) =
     atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -244,7 +142,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
    *    x.asInstanceOf[`pt`]()   if after uncurry but before erasure
    *    x.$asInstanceOf[`pt`]()  if at or after erasure
    */
-  def mkCast(tree: Tree, pt: Type): Tree = {
+  override def mkCast(tree: Tree, pt: Type): Tree = {
     debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase)
     assert(!tree.tpe.isInstanceOf[MethodType], tree)
     assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize))
@@ -267,25 +165,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
     else
       mkCast(tree, pt)
 
-  def mkZeroContravariantAfterTyper(tp: Type): Tree = {
-    // contravariant -- for replacing an argument in a method call
-    // must use subtyping, as otherwise we miss types like `Any with Int`
-    val tree =
-      if      (NullClass.tpe    <:< tp) Literal(Constant(null))
-      else if (UnitClass.tpe    <:< tp) Literal(Constant())
-      else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
-      else if (FloatClass.tpe   <:< tp) Literal(Constant(0.0f))
-      else if (DoubleClass.tpe  <:< tp) Literal(Constant(0.0d))
-      else if (ByteClass.tpe    <:< tp) Literal(Constant(0.toByte))
-      else if (ShortClass.tpe   <:< tp) Literal(Constant(0.toShort))
-      else if (IntClass.tpe     <:< tp) Literal(Constant(0))
-      else if (LongClass.tpe    <:< tp) Literal(Constant(0L))
-      else if (CharClass.tpe    <:< tp) Literal(Constant(0.toChar))
-      else mkCast(Literal(Constant(null)), tp)
-
-    tree
-  }
-
   /** Translate names in Select/Ident nodes to type names.
    */
   def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -307,7 +186,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
    */
   private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
     val packedType = typer.packedType(expr, owner)
-    val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+    val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
 
     (ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
   }
@@ -368,4 +247,53 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
       attrThis,
       If(cond, Block(syncBody: _*), EmptyTree)) ::
       stats: _*)
+
+  /** Creates a tree representing new Object { stats }.
+   *  To make sure an anonymous subclass of Object is created,
+   *  if there are no stats, a () is added.
+   */
+  def mkAnonymousNew(stats: List[Tree]): Tree = {
+    val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+    mkNew(Nil, noSelfType, stats1, NoPosition, NoPosition)
+  }
+
+  /**
+   * Create a method based on a Function
+   *
+   * Used both to under `-Ydelambdafy:method` create a lifted function and
+   * under `-Ydelamdafy:inline` to create the apply method on the anonymous
+   * class.
+   *
+   * It creates a method definition with value params cloned from the
+   * original lambda. Then it calls a supplied function to create
+   * the body and types the result. Finally
+   * everything is wrapped up in a DefDef
+   *
+   * @param owner The owner for the new method
+   * @param name name for the new method
+   * @param additionalFlags flags to be put on the method in addition to FINAL
+   */
+  def mkMethodFromFunction(localTyper: analyzer.Typer)
+                          (fun: Function, owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags) = {
+    val funParams = fun.vparams map (_.symbol)
+    val formals :+ restpe = fun.tpe.typeArgs
+
+    val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+
+    val paramSyms = map2(formals, fun.vparams) {
+      (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+    }
+
+    methSym setInfo MethodType(paramSyms, restpe.deconst)
+
+    fun.body.substituteSymbols(funParams, paramSyms)
+    fun.body changeOwner (fun.symbol -> methSym)
+
+    val methDef = DefDef(methSym, fun.body)
+
+    // Have to repack the type to avoid mismatches when existentials
+    // appear in the result - see SI-4869.
+    methDef.tpt setType localTyper.packedType(fun.body, methSym).deconst
+    methDef
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index cbbb4c8..0731d78 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,10 +6,6 @@
 package scala.tools.nsc
 package ast
 
-import scala.reflect.internal.HasFlags
-import scala.reflect.internal.Flags._
-import symtab._
-
 /** This class ...
  *
  *  @author Martin Odersky
@@ -18,8 +14,65 @@ import symtab._
 abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
   val global: Global
   import global._
+  import definitions._
+
+  // arg1.op(arg2) returns (arg1, op.symbol, arg2)
+  object BinaryOp {
+    def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+      case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2))
+      case _                                         => None
+    }
+  }
+  // recv.op[T1, ...] returns (recv, op.symbol, type argument types)
+  object TypeApplyOp {
+    def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match {
+      case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe)))
+      case _                                       => None
+    }
+  }
+
+  // x.asInstanceOf[T] returns (x, typeOf[T])
+  object AsInstanceOf {
+    def unapply(t: Tree): Option[(Tree, Type)] = t match {
+      case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe))
+      case _                                                              => None
+    }
+  }
 
-  import definitions.ThrowableClass
+  // Extractors for value classes.
+  object ValueClass {
+    def isValueClass(tpe: Type)                  = enteringErasure(tpe.typeSymbol.isDerivedValueClass)
+    def valueUnbox(tpe: Type)                    = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox)
+
+    // B.unbox. Returns B.
+    object Unbox {
+      def unapply(t: Tree): Option[Tree] = t match {
+        case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref)
+        case _                                                                     => None
+      }
+    }
+    // new B(v). Returns B and v.
+    object Box {
+      def unapply(t: Tree): Option[(Tree, Type)] = t match {
+        case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType))
+        case _                                                        => None
+      }
+    }
+    // (new B(v)).unbox. returns v.
+    object BoxAndUnbox {
+      def unapply(t: Tree): Option[Tree] = t match {
+        case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v)
+        case _                                       => None
+      }
+    }
+    // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2.
+    object BoxAndCompare {
+      def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+        case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2))
+        case _                                                                                                           => None
+      }
+    }
+  }
 
   /** Is tree legal as a member definition of an interface?
    */
@@ -34,15 +87,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
     case DocDef(_, definition) => isPureDef(definition)
     case _ => super.isPureDef(tree)
   }
-
- /** Does list of trees start with a definition of
-   *  a class of module with given name (ignoring imports)
-   */
-  override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
-    case ClassDef(_, `name`, _, _) :: Nil => true
-    case _ => super.firstDefinesClassOrObject(trees, name)
-  }
-
-  def isInterface(mods: HasFlags, body: List[Tree]) =
-    mods.isTrait && (body forall isInterfaceMember)
 }
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 0a12737..3652f51 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -6,6 +6,7 @@
 package scala.tools.nsc
 package ast
 
+import scala.reflect.ClassTag
 import scala.reflect.internal.Flags.BYNAMEPARAM
 import scala.reflect.internal.Flags.DEFAULTPARAM
 import scala.reflect.internal.Flags.IMPLICIT
@@ -16,24 +17,6 @@ import scala.reflect.internal.Flags.TRAIT
 import scala.compat.Platform.EOL
 
 trait Trees extends scala.reflect.internal.Trees { self: Global =>
-
-  def treeLine(t: Tree): String =
-    if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
-    else t.summaryString
-
-  def treeStatus(t: Tree, enclosingTree: Tree = null) = {
-    val parent = if (enclosingTree eq null) "        " else " P#%5s".format(enclosingTree.id)
-
-    "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
-  }
-  def treeSymStatus(t: Tree) = {
-    val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else "         "
-    "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
-      if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
-      else treeLine(t)
-    )
-  }
-
   // --- additional cases --------------------------------------------------------
   /** Only used during parsing */
   case class Parens(args: List[Tree]) extends Tree
@@ -65,69 +48,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
 
   // --- factory methods ----------------------------------------------------------
 
-    /** Generates a template with constructor corresponding to
-   *
-   *  constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
-   *  extends superclass(args_1) ... (args_n) with mixins { self => body }
-   *
-   *  This gets translated to
-   *
-   *  extends superclass with mixins { self =>
-   *    presupers' // presupers without rhs
-   *    vparamss   // abstract fields corresponding to value parameters
-   *    def <init>(vparamss) {
-   *      presupers
-   *      super.<init>(args)
-   *    }
-   *    body
-   *  }
+  /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
    */
-  def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
-    /* Add constructor to template */
-
-    // create parameters for <init> as synthetic trees.
-    var vparamss1 = mmap(vparamss) { vd =>
-      atPos(vd.pos.focus) {
-        val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
-        ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
-      }
-    }
-    val (edefs, rest) = body span treeInfo.isEarlyDef
-    val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
-    val gvdefs = evdefs map {
-      case vdef @ ValDef(_, _, tpt, _) =>
-        copyValDef(vdef)(
-        // atPos for the new tpt is necessary, since the original tpt might have no position
-        // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
-        // position of TypeTree, it would still be NoPosition. That's what the author meant.
-        tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
-        rhs = EmptyTree
-      )
-    }
-    val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
-
-    val constrs = {
-      if (constrMods hasFlag TRAIT) {
-        if (body forall treeInfo.isInterfaceMember) List()
-        else List(
-          atPos(wrappingPos(superPos, lvdefs)) (
-            DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
-      } else {
-        // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
-        if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
-          vparamss1 = List() :: vparamss1;
-        val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
-        val superCall = (superRef /: argss) (Apply.apply)
-        List(
-          atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
-            DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
-      }
-    }
-    constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
-    // Field definitions for the class - remove defaults.
-    val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
-
-    Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
+  def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
+    case Nil        => Apply(gen.mkSuperInitCall, Nil)
+    case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
   }
 
   /** Construct class definition with given class symbol, value parameters,
@@ -137,21 +62,17 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
    *  @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
    *  @param vparamss   the value parameters -- if they have symbols they
    *                    should be owned by `sym`
-   *  @param argss      the supercall arguments
    *  @param body       the template statements without primary constructor
    *                    and value parameter fields.
    */
-  def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+  def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
     // "if they have symbols they should be owned by `sym`"
-    assert(
-      mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
-      ((mmap(vparamss)(_.symbol), sym))
-    )
+    assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
 
     ClassDef(sym,
-      Template(sym.info.parents map TypeTree,
-               if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
-               constrMods, vparamss, argss, body, superPos))
+      gen.mkTemplate(sym.info.parents map TypeTree,
+                    if (sym.thisSym == sym || phase.erasedTypes) noSelfType else ValDef(sym.thisSym),
+                    constrMods, vparamss, body, superPos))
   }
 
  // --- subcomponents --------------------------------------------------
@@ -160,8 +81,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
     val global: Trees.this.type = self
   } with TreeInfo
 
-  lazy val treePrinter = newTreePrinter()
-
   // --- additional cases in operations ----------------------------------
 
   override protected def xtraverse(traverser: Traverser, tree: Tree): Unit = tree match {
@@ -184,6 +103,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
     def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
     def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
   }
+  implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier])
 
   def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
   def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
@@ -227,7 +147,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
       try unit.body = transform(unit.body)
       catch {
         case ex: Exception =>
-          println(supplementErrorMessage("unhandled exception while transforming "+unit))
+          log(supplementErrorMessage("unhandled exception while transforming "+unit))
           throw ex
       }
     }
@@ -258,14 +178,34 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
     }
   }
 
-  /** resets symbol and tpe fields in a tree, @see ResetAttrs
-   */
-//  def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
-//  def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
-  def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
-  def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
-  def resetLocalAttrsKeepLabels(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone, true).transform(x)
+  // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler.
+  // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past.
+  //
+  // So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs.
+  // resetAllAttrs destroyed all symbols and types in the tree in order to reset its state to something suitable for retypechecking
+  // and/or embedding into bigger trees / different lexical scopes. (Btw here's some background on why people would want to use
+  // reset attrs in the first place: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ).
+  //
+  // However resetAllAttrs was more of a poison than of a treatment, because along with locally defined symbols that are the cause
+  // for almost every or maybe even every case of tree corruption, it erased external bindings that sometimes could not be restored.
+  // This is how we came up with resetLocalAttrs that left external bindings alone, and that was a big step forward.
+  // Then slowly but steadily we've evicted all usages of resetAllAttrs from our codebase in favor of resetLocalAttrs
+  // and have been living happily ever after.
+  //
+  // def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(localOnly = false, leaveAlone).transform(x)
+
+  // upd. Unfortunately this didn't work out quite as we expected. The last two users of resetAllAttrs:
+  // reification and typedLabelDef broke in very weird ways when we replaced resetAllAttrs with resetLocalAttrs
+  // (see SI-8316 change from resetAllAttrs to resetLocalAttrs in reifiers broke Slick and
+  // SI-8318 NPE in mixin in scala-continuations for more information).
+  // Given that we're supposed to release 2.11.0-RC1 in less than a week, I'm temporarily reinstating resetAllAttrs
+  // until we have time to better understand what's going on. In order to dissuade people from using it,
+  // it now comes with a new, ridiculous name.
+  /** @see ResetAttrs */
+  def brutallyResetAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(brutally = true, leaveAlone).transform(x)
+
+  /** @see ResetAttrs */
+  def resetAttrs(x: Tree): Tree = new ResetAttrs(brutally = false, leaveAlone = null).transform(x)
 
   /** A transformer which resets symbol and tpe fields of all nodes in a given tree,
    *  with special treatment of:
@@ -276,8 +216,10 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
    *
    *  (bq:) This transformer has mutable state and should be discarded after use
    */
-  private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
-    val debug = settings.debug.value
+  private class ResetAttrs(brutally: Boolean, leaveAlone: Tree => Boolean) {
+    // this used to be based on -Ydebug, but the need for logging in this code is so situational
+    // that I've reverted to a hard-coded constant here.
+    val debug = false
     val trace = scala.tools.nsc.util.trace when debug
 
     val locals = util.HashSet[Symbol](8)
@@ -298,6 +240,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
           registerLocal(sym.moduleClass)
           registerLocal(sym.companionClass)
           registerLocal(sym.companionModule)
+          registerLocal(sym.deSkolemize)
           sym match {
             case sym: TermSymbol => registerLocal(sym.referenced)
             case _ => ;
@@ -324,6 +267,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
         else
           super.transform {
             tree match {
+              case tree if !tree.canHaveAttrs =>
+                tree
               case tpt: TypeTree =>
                 if (tpt.original != null)
                   transform(tpt.original)
@@ -331,9 +276,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
                   val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
                   val isInferred = tpt.wasEmpty
                   if (refersToLocalSymbols || isInferred) {
-                    val dupl = tpt.duplicate
-                    dupl.tpe = null
-                    dupl
+                    tpt.duplicate.clearType()
                   } else {
                     tpt
                   }
@@ -358,42 +301,29 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
                 // vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
                 //
                 // The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
-                // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+                // Here we take care of references to package classes (SI-5705).
                 // There are other non-idempotencies, but they are not worked around yet.
                 //
-                // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
-                //
-                // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
-                // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
-                // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
-                // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
-                // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
-                //
-                // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
-                // in one lexical scope, it can get resolved to something else.
-                //
-                // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
-                // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
-                // That's what localOnly and vetoScope are for.
+                // The second reason has to do with the fact that resetAttrs needs to be less destructive.
+                // Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not,
+                // therefore we want to retain those bindings, especially given that restoring them can be impossible
+                // if we move these trees into lexical contexts different from their original locations.
                 if (dupl.hasSymbol) {
                   val sym = dupl.symbol
-                  val vetoScope = localOnly && !(locals contains sym)
-                  val vetoLabel = keepLabels && sym.isLabel
+                  val vetoScope = !brutally && !(locals contains sym) && !(locals contains sym.deSkolemize)
                   val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
-                  if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+                  if (!(vetoScope || vetoThis)) dupl.symbol = NoSymbol
                 }
-                dupl.tpe = null
-                dupl
+                dupl.clearType()
             }
           }
       }
     }
 
     def transform(x: Tree): Tree = {
-      if (localOnly)
       new MarkLocals().traverse(x)
 
-      if (localOnly && debug) {
+      if (debug) {
         assert(locals.size == orderedLocals.size)
         val msg = orderedLocals.toList filter {_ != NoSymbol} map {"  " + _} mkString EOL
         trace("locals (%d total): %n".format(orderedLocals.size))(msg)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala
new file mode 100644
index 0000000..5fcb028
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala
@@ -0,0 +1,112 @@
+package scala.tools.nsc
+package ast.parser
+
+/** Common code between Scala's Tokens and JavaTokens. */
+abstract class CommonTokens {
+
+  def isIdentifier(code: Int): Boolean
+  def isLiteral(code: Int): Boolean
+
+  /** special tokens */
+  final val EMPTY = -3
+  final val UNDEF = -2
+  final val ERROR = -1
+  final val EOF = 0
+
+  /** literals */
+  final val CHARLIT = 1
+  final val INTLIT = 2
+  final val LONGLIT = 3
+  final val FLOATLIT = 4
+  final val DOUBLELIT = 5
+  final val STRINGLIT = 6
+
+  /** keywords */
+  final val NEW = 20
+  final val THIS = 21
+  final val SUPER = 23
+
+  final val NULL = 24
+  final val TRUE = 25
+  final val FALSE = 26
+
+  // J: INSTANCEOF = 27
+  // J: CONST = 28
+
+  /** modifiers */
+  // S: IMPLICIT = 40
+  // S: OVERRIDE = 41
+  // J: PUBLIC = 42
+  final val PROTECTED = 43
+  final val PRIVATE = 44
+  // S: SEALED = 45
+  final val ABSTRACT = 46
+  // J: DEFAULT = 47
+  // J: STATIC = 48
+  final val FINAL = 49
+  // J: TRANSIENT = 50
+  // J: VOLATILE = 51
+  // J: SYNCHRONIZED = 52
+  // J: NATIVE = 53
+  // J: STRICTFP = 54
+  // S: LAZY = 55
+  // J: THROWS = 56
+  // S: MACRO = 57
+
+  /** templates */
+  final val PACKAGE = 60
+  final val IMPORT = 61
+  final val CLASS = 62
+  // S: CASECLASS = 63
+  // S: OBJECT = 64
+  // S: CASEOBJECT = 65
+  // S: TRAIT, J: INTERFACE = 66
+  // J: ENUM = 67
+  final val EXTENDS = 68
+  // S: WITH, J: IMPLEMENTS = 69
+  // S: TYPE = 70
+  // S: FORSOME = 71
+  // S: DEF = 72
+  // S: VAL = 73
+  // S: VAR = 74
+
+  /** control structures */
+  final val IF = 80
+  // S: THEN = 81
+  final val ELSE = 82
+  final val WHILE = 83
+  final val DO = 84
+  final val FOR = 85
+  // S: YIELD = 86
+  // J: BREAK = 87
+  // J: CONTINUE = 88
+  // J: GOTO = 89
+  final val THROW = 90
+  final val TRY = 91
+  final val CATCH = 92
+  final val FINALLY = 93
+  // J: SWITCH = 94
+  // S: MATCH = 95
+  final val CASE = 96
+  final val RETURN = 97
+  // J: ASSERT = 98
+
+  /** parenthesis */
+  final val LPAREN = 100
+  final val RPAREN = 101
+  final val LBRACKET = 102
+  final val RBRACKET = 103
+  final val LBRACE = 104
+  final val RBRACE = 105
+
+  /** special symbols */
+  final val COMMA = 120
+  final val SEMI = 121
+  final val DOT = 122
+  final val COLON = 123
+  final val EQUALS = 124
+  final val AT = 125
+  // S: <special symbols> = 130 - 139
+  // J: <special symbols> = 140 - 179
+  // J: <primitive types> = 180 - 189
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 553a208..d3f495f 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -10,10 +10,7 @@ import scala.collection.mutable
 import mutable.{ Buffer, ArrayBuffer, ListBuffer }
 import scala.util.control.ControlThrowable
 import scala.tools.nsc.util.CharArrayReader
-import scala.reflect.internal.util.SourceFile
-import scala.xml.{ Text, TextBuffer }
-import scala.xml.parsing.MarkupParserCommon
-import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
+import scala.tools.nsc.ast.parser.xml.{MarkupParserCommon, Utility}
 import scala.reflect.internal.Chars.{ SU, LF }
 
 // XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -26,12 +23,6 @@ import scala.reflect.internal.Chars.{ SU, LF }
 // I rewrote most of these, but not as yet the library versions: so if you are
 // tempted to touch any of these, please be aware of that situation and try not
 // to let it get any worse.  -- paulp
-
-/** This trait ...
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
 trait MarkupParsers {
   self: Parsers =>
 
@@ -50,8 +41,8 @@ trait MarkupParsers {
   import global._
 
   class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
-
-    import Tokens.{ EMPTY, LBRACE, RBRACE }
+    import Utility.{ isNameStart, isSpace }
+    import Tokens.{ LBRACE, RBRACE }
 
     type PositionType = Position
     type InputType    = CharArrayReader
@@ -89,7 +80,7 @@ trait MarkupParsers {
 
     var xEmbeddedBlock = false
 
-    private var debugLastStartElement = new mutable.Stack[(Int, String)]
+    private val debugLastStartElement = new mutable.Stack[(Int, String)]
     private def debugLastPos = debugLastStartElement.top._1
     private def debugLastElem = debugLastStartElement.top._2
 
@@ -107,7 +98,7 @@ trait MarkupParsers {
      */
     def xCheckEmbeddedBlock: Boolean = {
       // attentions, side-effect, used in xText
-      xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+      xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') }
       xEmbeddedBlock
     }
 
@@ -123,8 +114,7 @@ trait MarkupParsers {
       while (isNameStart(ch)) {
         val start = curOffset
         val key = xName
-        xEQ
-        val delim = ch
+        xEQ()
         val mid = curOffset
         val value: Tree = ch match {
           case '"' | '\'' =>
@@ -137,7 +127,7 @@ trait MarkupParsers {
             }
 
           case '{'  =>
-            nextch
+            nextch()
             xEmbeddedExpr
           case SU =>
             throw TruncatedXMLControl
@@ -150,7 +140,7 @@ trait MarkupParsers {
 
         aMap(key) = value
         if (ch != '/' && ch != '>')
-          xSpace
+          xSpace()
       }
       aMap
     }
@@ -181,22 +171,31 @@ trait MarkupParsers {
       xTakeUntil(handle.comment, () => r2p(start, start, curOffset), "-->")
     }
 
-    def appendText(pos: Position, ts: Buffer[Tree], txt: String) {
-      val toAppend =
-        if (preserveWS) Seq(txt)
-        else TextBuffer.fromString(txt).toText map (_.text)
+    def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
+      def append(t: String) = ts append handle.text(pos, t)
+
+      if (preserveWS) append(txt)
+      else {
+        val sb = new StringBuilder()
+
+        txt foreach { c =>
+          if (!isSpace(c)) sb append c
+          else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
+        }
 
-      toAppend foreach (t => ts append handle.text(pos, t))
+        val trimmed = sb.toString.trim
+        if (!trimmed.isEmpty) append(trimmed)
+      }
     }
 
     /** adds entity/character to ts as side-effect
      *  @precond ch == '&'
      */
     def content_AMP(ts: ArrayBuffer[Tree]) {
-      nextch
+      nextch()
       val toAppend = ch match {
         case '#' => // CharacterRef
-          nextch
+          nextch()
           val theChar = handle.text(tmppos, xCharRef)
           xToken(';')
           theChar
@@ -219,17 +218,14 @@ trait MarkupParsers {
 
     /** Returns true if it encounters an end tag (without consuming it),
      *  appends trees to ts as side-effect.
-     *
-     *  @param ts ...
-     *  @return   ...
      */
     private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
       if (ch == '/')
         return true   // end tag
 
       val toAppend = ch match {
-        case '!'    => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
-        case '?'    => nextch ; xProcInstr                            // PI
+        case '!'    => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+        case '?'    => nextch() ; xProcInstr                            // PI
         case _      => element                                        // child node
       }
 
@@ -246,7 +242,7 @@ trait MarkupParsers {
           tmppos = o2p(curOffset)
           ch match {
             // end tag, cdata, comment, pi or child node
-            case '<'  => nextch ; if (content_LT(ts)) return ts
+            case '<'  => nextch() ; if (content_LT(ts)) return ts
             // either the character '{' or an embedded scala block }
             case '{'  => content_BRACE(tmppos, ts)  // }
             // EntityRef or CharRef
@@ -268,7 +264,7 @@ trait MarkupParsers {
       val (qname, attrMap) = xTag(())
       if (ch == '/') { // empty element
         xToken("/>")
-        handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
+        handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree])
       }
       else { // handle content
         xToken('>')
@@ -278,11 +274,11 @@ trait MarkupParsers {
         debugLastStartElement.push((start, qname))
         val ts = content
         xEndTag(qname)
-        debugLastStartElement.pop
+        debugLastStartElement.pop()
         val pos = r2p(start, start, curOffset)
         qname match {
           case "xml:group" => handle.group(pos, ts)
-          case _ => handle.element(pos, qname, attrMap, false, ts)
+          case _ => handle.element(pos, qname, attrMap, empty = false, ts)
         }
       }
     }
@@ -297,12 +293,12 @@ trait MarkupParsers {
 
       while (ch != SU) {
         if (ch == '}') {
-          if (charComingAfter(nextch) == '}') nextch
+          if (charComingAfter(nextch()) == '}') nextch()
           else errorBraces()
         }
 
         buf append ch
-        nextch
+        nextch()
         if (xCheckEmbeddedBlock || ch == '<' ||  ch == '&')
           return done
       }
@@ -349,12 +345,12 @@ trait MarkupParsers {
         content_LT(ts)
 
         // parse more XML ?
-        if (charComingAfter(xSpaceOpt) == '<') {
-          xSpaceOpt
+        if (charComingAfter(xSpaceOpt()) == '<') {
+          xSpaceOpt()
           while (ch == '<') {
-            nextch
+            nextch()
             ts append element
-            xSpaceOpt
+            xSpaceOpt()
           }
           handle.makeXMLseq(r2p(start, start, curOffset), ts)
         }
@@ -375,7 +371,7 @@ trait MarkupParsers {
         saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
           handle.isPattern = true
           val tree = xPattern
-          xSpaceOpt
+          xSpaceOpt()
           tree
         }
       },
@@ -410,13 +406,13 @@ trait MarkupParsers {
      *                  | Name [S] '/' '>'
      */
     def xPattern: Tree = {
-      var start = curOffset
+      val start = curOffset
       val qname = xName
       debugLastStartElement.push((start, qname))
-      xSpaceOpt
+      xSpaceOpt()
 
       val ts = new ArrayBuffer[Tree]
-      val isEmptyTag = (ch == '/') && { nextch ; true }
+      val isEmptyTag = (ch == '/') && { nextch() ; true }
       xToken('>')
 
       if (!isEmptyTag) {
@@ -426,13 +422,13 @@ trait MarkupParsers {
           if (xEmbeddedBlock) ts ++= xScalaPatterns
           else ch match {
             case '<'  => // tag
-              nextch
+              nextch()
               if (ch != '/') ts append xPattern   // child
               else return false                   // terminate
 
             case '{'  => // embedded Scala patterns
               while (ch == '{') {
-                nextch
+                nextch()
                 ts ++= xScalaPatterns
               }
               assert(!xEmbeddedBlock, "problem with embedded block")
@@ -450,7 +446,7 @@ trait MarkupParsers {
 
         while (doPattern) { }  // call until false
         xEndTag(qname)
-        debugLastStartElement.pop
+        debugLastStartElement.pop()
       }
 
       handle.makeXMLpat(r2p(start, start, curOffset), qname, ts)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index b9e4109..9e631fe 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,12 +9,12 @@
 package scala.tools.nsc
 package ast.parser
 
-import scala.collection.mutable.{ListBuffer, StringBuilder}
-import scala.reflect.internal.{ ModifierFlags => Flags }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, StringBuilder }
+import scala.reflect.internal.{ Precedence, ModifierFlags => Flags }
 import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
 import Tokens._
-import util.FreshNameCreator
 
 /** Historical note: JavaParsers started life as a direct copy of Parsers
  *  but at a time when that Parsers had been replaced by a different one.
@@ -25,20 +25,23 @@ import util.FreshNameCreator
  *  the beginnings of a campaign against this latest incursion by Cutty
  *  McPastington and his army of very similar soldiers.
  */
-trait ParsersCommon extends ScannersCommon {
+trait ParsersCommon extends ScannersCommon { self =>
   val global : Global
-  import global._
+  // the use of currentUnit in the parser should be avoided as it might
+  // cause unexpected behaviour when you work with two units at the
+  // same time; use Parser.unit instead
+  import global.{currentUnit => _, _}
+
+  def newLiteral(const: Any) = Literal(Constant(const))
+  def literalUnit            = gen.mkSyntheticUnit()
 
   /** This is now an abstract class, only to work around the optimizer:
    *  methods in traits are never inlined.
    */
   abstract class ParserCommon {
     val in: ScannerCommon
-    def freshName(prefix: String): Name
-    def freshTermName(prefix: String): TermName
-    def freshTypeName(prefix: String): TypeName
-    def deprecationWarning(off: Int, msg: String): Unit
-    def accept(token: Int): Int
+    def deprecationWarning(off: Offset, msg: String): Unit
+    def accept(token: Token): Int
 
     /** Methods inParensOrError and similar take a second argument which, should
      *  the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -56,7 +59,7 @@ trait ParsersCommon extends ScannersCommon {
       if (in.token == LPAREN) inParens(body)
       else { accept(LPAREN) ; alt }
 
-    @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant()))
+    @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
     @inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
 
     @inline final def inBraces[T](body: => T): T = {
@@ -70,7 +73,7 @@ trait ParsersCommon extends ScannersCommon {
       else { accept(LBRACE) ; alt }
 
     @inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
-    @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+    @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
     @inline final def dropAnyBraces[T](body: => T): T =
       if (in.token == LBRACE) inBraces(body)
       else body
@@ -94,7 +97,7 @@ trait ParsersCommon extends ScannersCommon {
  *  <ol>
  *    <li>
  *      Places all pattern variables in Bind nodes. In a pattern, for
- *      identifiers <code>x</code>:<pre>
+ *      identifiers `x`:<pre>
  *                 x  => x @ _
  *               x:T  => x @ (_ : T)</pre>
  *    </li>
@@ -130,7 +133,9 @@ self =>
   val global: Global
   import global._
 
-  case class OpInfo(operand: Tree, operator: Name, offset: Offset)
+  case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], offset: Offset) {
+    def precedence = Precedence(operator.toString)
+  }
 
   class SourceFileParser(val source: SourceFile) extends Parser {
 
@@ -141,37 +146,36 @@ self =>
       if (source.isSelfContained) () => compilationUnit()
       else () => scriptBody()
 
-    def newScanner = new SourceFileScanner(source)
+    def newScanner(): Scanner = new SourceFileScanner(source)
 
-    val in = newScanner
+    val in = newScanner()
     in.init()
 
-    private val globalFresh = new FreshNameCreator.Default
-
-    def freshName(prefix: String): Name = freshTermName(prefix)
-    def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
-    def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
-    def o2p(offset: Int): Position = new OffsetPosition(source, offset)
-    def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
+    def unit = global.currentUnit
 
     // suppress warnings; silent abort on errors
-    def warning(offset: Int, msg: String) {}
-    def deprecationWarning(offset: Int, msg: String) {}
+    def warning(offset: Offset, msg: String) {}
+    def deprecationWarning(offset: Offset, msg: String) {}
 
-    def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+    def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
     def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
 
-    /** the markup parser */
-    lazy val xmlp = new MarkupParser(this, preserveWS = true)
-
     object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
       val global: self.global.type = self.global
-      def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
     }
 
-    def xmlLiteral : Tree = xmlp.xLiteral
-    def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+    /** the markup parser
+     * The first time this lazy val is accessed, we assume we were trying to parse an xml literal.
+     * The current position is recorded for later error reporting if it turns out
+     * that we don't have the xml library on the compilation classpath.
+     */
+    private[this] lazy val xmlp = {
+      unit.encounteredXml(o2p(in.offset))
+      new MarkupParser(this, preserveWS = true)
+    }
+
+    def xmlLiteral() : Tree = xmlp.xLiteral
+    def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
   }
 
   class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
@@ -192,23 +196,19 @@ self =>
 
     override def blockExpr(): Tree = skipBraces(EmptyTree)
 
-    override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
+    override def templateBody(isPre: Boolean) = skipBraces((noSelfType, EmptyTree.asList))
   }
 
-  class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
-
-    def this(unit: global.CompilationUnit) = this(unit, List())
+  class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself =>
+    def this(unit: global.CompilationUnit) = this(unit, Nil)
 
-    override def newScanner = new UnitScanner(unit, patches)
+    override def newScanner() = new UnitScanner(unit, patches)
 
-    override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
-    override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
-    override def warning(offset: Int, msg: String) {
+    override def warning(offset: Offset, msg: String) {
       unit.warning(o2p(offset), msg)
     }
 
-    override def deprecationWarning(offset: Int, msg: String) {
+    override def deprecationWarning(offset: Offset, msg: String) {
       unit.deprecationWarning(o2p(offset), msg)
     }
 
@@ -219,13 +219,14 @@ self =>
       try body
       finally smartParsing = saved
     }
+    def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
 
     val syntaxErrors = new ListBuffer[(Int, String)]
     def showSyntaxErrors() =
       for ((offset, msg) <- syntaxErrors)
         unit.error(o2p(offset), msg)
 
-    override def syntaxError(offset: Int, msg: String) {
+    override def syntaxError(offset: Offset, msg: String) {
       if (smartParsing) syntaxErrors += ((offset, msg))
       else unit.error(o2p(offset), msg)
     }
@@ -244,14 +245,15 @@ self =>
       if (syntaxErrors.isEmpty) firstTry
       else in.healBraces() match {
         case Nil      => showSyntaxErrors() ; firstTry
-        case patches  => new UnitParser(unit, patches).parse()
+        case patches  => (this withPatches patches).parse()
       }
     }
   }
 
-  final val Local = 0
-  final val InBlock = 1
-  final val InTemplate = 2
+  type Location = Int
+  final val Local: Location = 0
+  final val InBlock: Location = 1
+  final val InTemplate: Location = 2
 
   // These symbols may not yet be loaded (e.g. in the ide) so don't go
   // through definitions to obtain the names.
@@ -268,20 +270,57 @@ self =>
 
   import nme.raw
 
-  abstract class Parser extends ParserCommon {
+  abstract class Parser extends ParserCommon { parser =>
     val in: Scanner
+    def unit: CompilationUnit
+    def source: SourceFile
 
-    def freshName(prefix: String): Name
-    def freshTermName(prefix: String): TermName
-    def freshTypeName(prefix: String): TypeName
-    def o2p(offset: Int): Position
-    def r2p(start: Int, mid: Int, end: Int): Position
+    /** Scoping operator used to temporarily look into the future.
+     *  Backs up scanner data before evaluating a block and restores it after.
+     */
+    @inline final def lookingAhead[T](body: => T): T = {
+      val saved = new ScannerData {} copyFrom in
+      in.nextToken()
+      try body finally in copyFrom saved
+    }
+
+    /** Perform an operation while peeking ahead.
+     *  Pushback if the operation yields an empty tree or blows to pieces.
+     */
+    @inline def peekingAhead(tree: =>Tree): Tree = {
+      @inline def peekahead() = {
+        in.prev copyFrom in
+        in.nextToken()
+      }
+      @inline def pushback() = {
+        in.next copyFrom in
+        in copyFrom in.prev
+      }
+      peekahead()
+      // try it, in case it is recoverable
+      val res = try tree catch { case e: Exception => pushback() ; throw e }
+      if (res.isEmpty) pushback()
+      res
+    }
+
+    class ParserTreeBuilder extends TreeBuilder {
+      val global: self.global.type = self.global
+      def unit = parser.unit
+      def source = parser.source
+    }
+    val treeBuilder = new ParserTreeBuilder
+    import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+    implicit def fresh: FreshNameCreator = unit.fresh
+
+    def o2p(offset: Offset): Position                          = Position.offset(source, offset)
+    def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+    def r2p(start: Offset, mid: Offset): Position              = r2p(start, mid, in.lastOffset max start)
+    def r2p(offset: Offset): Position                          = r2p(offset, offset)
 
     /** whether a non-continuable syntax error has been seen */
     private var lastErrorOffset : Int = -1
 
-    import treeBuilder.{global => _, _}
-
     /** The types of the context bounds of type parameters of the surrounding class
      */
     private var classContextBounds: List[Tree] = Nil
@@ -291,6 +330,7 @@ self =>
       finally classContextBounds = saved
     }
 
+
     /** Are we inside the Scala package? Set for files that start with package scala
      */
     private var inScalaPackage = false
@@ -299,112 +339,108 @@ self =>
       inScalaPackage = false
       currentPackage = ""
     }
-    private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
-
-    private def inScalaRootPackage       = inScalaPackage && currentPackage == "scala"
-    private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
-    private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
+    private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
 
     def parseStartRule: () => Tree
 
-    /** This is the general parse entry point.
-     */
-    def parse(): Tree = {
-      val t = parseStartRule()
+    def parseRule[T](rule: this.type => T): T = {
+      val t = rule(this)
       accept(EOF)
       t
     }
 
+    /** This is the general parse entry point.
+     */
+    def parse(): Tree = parseRule(_.parseStartRule())
+
+    /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
+     */
+    def parseStats(): List[Tree] = parseRule(_.templateStats())
+    def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
+
     /** This is the parse entry point for code which is not self-contained, e.g.
      *  a script which is a series of template statements.  They will be
      *  swaddled in Trees until the AST is equivalent to the one returned
      *  by compilationUnit().
      */
     def scriptBody(): Tree = {
-      val stmts = templateStats()
-      accept(EOF)
+      val stmts = parseStats()
 
       def mainModuleName = newTermName(settings.script.value)
-      /** If there is only a single object template in the file and it has a
-       *  suitable main method, we will use it rather than building another object
-       *  around it.  Since objects are loaded lazily the whole script would have
-       *  been a no-op, so we're not taking much liberty.
+      /* If there is only a single object template in the file and it has a
+       * suitable main method, we will use it rather than building another object
+       * around it.  Since objects are loaded lazily the whole script would have
+       * been a no-op, so we're not taking much liberty.
        */
       def searchForMain(): Option[Tree] = {
-        /** Have to be fairly liberal about what constitutes a main method since
-         *  nothing has been typed yet - for instance we can't assume the parameter
-         *  type will look exactly like "Array[String]" as it could have been renamed
-         *  via import, etc.
+        /* Have to be fairly liberal about what constitutes a main method since
+         * nothing has been typed yet - for instance we can't assume the parameter
+         * type will look exactly like "Array[String]" as it could have been renamed
+         * via import, etc.
          */
         def isMainMethod(t: Tree) = t match {
           case DefDef(_, nme.main, Nil, List(_), _, _)  => true
           case _                                        => false
         }
-        /** For now we require there only be one top level object. */
+        /* For now we require there only be one top level object. */
         var seenModule = false
         val newStmts = stmts collect {
           case t @ Import(_, _) => t
           case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) =>
             seenModule = true
-            /** This slightly hacky situation arises because we have no way to communicate
-             *  back to the scriptrunner what the name of the program is.  Even if we were
-             *  willing to take the sketchy route of settings.script.value = progName, that
-             *  does not work when using fsc.  And to find out in advance would impose a
-             *  whole additional parse.  So instead, if the actual object's name differs from
-             *  what the script is expecting, we transform it to match.
+            /* This slightly hacky situation arises because we have no way to communicate
+             * back to the scriptrunner what the name of the program is.  Even if we were
+             * willing to take the sketchy route of settings.script.value = progName, that
+             * does not work when using fsc.  And to find out in advance would impose a
+             * whole additional parse.  So instead, if the actual object's name differs from
+             * what the script is expecting, we transform it to match.
              */
             if (name == mainModuleName) md
             else treeCopy.ModuleDef(md, mods, mainModuleName, template)
           case _ =>
-            /** If we see anything but the above, fail. */
+            /* If we see anything but the above, fail. */
             return None
         }
-        Some(makePackaging(0, emptyPkg, newStmts))
+        Some(makeEmptyPackage(0, newStmts))
       }
 
       if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
         searchForMain() foreach { return _ }
 
-      /** Here we are building an AST representing the following source fiction,
+      /*  Here we are building an AST representing the following source fiction,
        *  where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
        *  the result of parsing the script file.
        *
        *  {{{
        *  object moduleName {
-       *    def main(argv: Array[String]): Unit = {
-       *      val args = argv
+       *    def main(args: Array[String]): Unit =
        *      new AnyRef {
        *        stmts
        *      }
-       *    }
        *  }
        *  }}}
        */
-      import definitions._
-
-      def emptyPkg    = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
       def emptyInit   = DefDef(
         NoMods,
         nme.CONSTRUCTOR,
         Nil,
         ListOfNil,
         TypeTree(),
-        Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
+        Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
       )
 
       // def main
       def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
-      def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
-      def mainSetArgv   = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
-      def mainDef       = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
+      def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree))
+      def mainDef       = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts))
 
       // object Main
       def moduleName  = newTermName(ScriptRunner scriptMain settings)
-      def moduleBody  = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
+      def moduleBody  = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef))
       def moduleDef   = ModuleDef(NoMods, moduleName, moduleBody)
 
       // package <empty> { ... }
-      makePackaging(0, emptyPkg, List(moduleDef))
+      makeEmptyPackage(0, moduleDef :: Nil)
     }
 
 /* --------------- PLACEHOLDERS ------------------------------------------- */
@@ -429,13 +465,13 @@ self =>
 
       placeholderParams match {
         case vd :: _ =>
-          syntaxError(vd.pos, "unbound placeholder parameter", false)
+          syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
           placeholderParams = List()
         case _ =>
       }
       placeholderTypes match {
         case td :: _ =>
-          syntaxError(td.pos, "unbound wildcard type", false)
+          syntaxError(td.pos, "unbound wildcard type", skipIt = false)
           placeholderTypes = List()
         case _ =>
       }
@@ -468,7 +504,7 @@ self =>
 
 /* ------------- ERROR HANDLING ------------------------------------------- */
 
-    var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+    val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
 
     private var inFunReturnType = false
     @inline private def fromWithinReturnType[T](body: => T): T = {
@@ -478,7 +514,7 @@ self =>
       finally inFunReturnType = saved
     }
 
-    protected def skip(targetToken: Int) {
+    protected def skip(targetToken: Token) {
       var nparens = 0
       var nbraces = 0
       while (true) {
@@ -506,17 +542,17 @@ self =>
         in.nextToken()
       }
     }
-    def warning(offset: Int, msg: String): Unit
+    def warning(offset: Offset, msg: String): Unit
     def incompleteInputError(msg: String): Unit
     private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
       syntaxError(pos pointOrElse in.offset, msg, skipIt)
     }
-    def syntaxError(offset: Int, msg: String): Unit
+    def syntaxError(offset: Offset, msg: String): Unit
     def syntaxError(msg: String, skipIt: Boolean) {
       syntaxError(in.offset, msg, skipIt)
     }
 
-    def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+    def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
       if (offset > lastErrorOffset) {
         syntaxError(offset, msg)
         // no more errors on this token.
@@ -534,15 +570,19 @@ self =>
       else
         syntaxError(in.offset, msg, skipIt)
     }
+    def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
+      syntaxErrorOrIncomplete(msg, skipIt)
+      and
+    }
 
-    def expectedMsg(token: Int): String =
-      token2string(token) + " expected but " +token2string(in.token) + " found."
+    def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
+    def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
 
     /** Consume one token of the specified type, or signal an error if it is not there. */
-    def accept(token: Int): Int = {
+    def accept(token: Token): Offset = {
       val offset = in.offset
       if (in.token != token) {
-        syntaxErrorOrIncomplete(expectedMsg(token), false)
+        syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
         if ((token == RPAREN || token == RBRACE || token == RBRACKET))
           if (in.parenBalance(token) + assumedClosingParens(token) < 0)
             assumedClosingParens(token) += 1
@@ -568,25 +608,16 @@ self =>
       if (!isStatSeqEnd)
         acceptStatSep()
 
-    def errorTypeTree    = TypeTree() setType ErrorType setPos o2p(in.offset)
-    def errorTermTree    = Literal(Constant(null)) setPos o2p(in.offset)
-    def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
+    def errorTypeTree    = setInPos(TypeTree() setType ErrorType)
+    def errorTermTree    = setInPos(newLiteral(null))
+    def errorPatternTree = setInPos(Ident(nme.WILDCARD))
 
     /** Check that type parameter is not by name or repeated. */
     def checkNotByNameOrVarargs(tpt: Tree) = {
       if (treeInfo isByNameParamType tpt)
-        syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
+        syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
       else if (treeInfo isRepeatedParamType tpt)
-        syntaxError(tpt.pos, "no * parameter type allowed here", false)
-    }
-
-    /** Check that tree is a legal clause of a forSome. */
-    def checkLegalExistential(t: Tree) = t match {
-      case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
-           ValDef(_, _, _, EmptyTree) | EmptyTree =>
-             ;
-      case _ =>
-        syntaxError(t.pos, "not a legal existential clause", false)
+        syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
     }
 
 /* -------------- TOKEN CLASSES ------------------------------------------- */
@@ -597,6 +628,8 @@ self =>
       case _ => false
     }
 
+    def isAnnotation: Boolean = in.token == AT
+
     def isLocalModifier: Boolean = in.token match {
       case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
       case _ => false
@@ -617,20 +650,25 @@ self =>
       case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
       case _ => false
     }
+
+    def isIdentExcept(except: Name) = isIdent && in.name != except
+    def isIdentOf(name: Name)       = isIdent && in.name == name
+
     def isUnaryOp = isIdent && raw.isUnary(in.name)
     def isRawStar = isIdent && in.name == raw.STAR
     def isRawBar  = isIdent && in.name == raw.BAR
 
     def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+    def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw
 
-    def isLiteralToken(token: Int) = token match {
+    def isLiteralToken(token: Token) = token match {
       case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
            STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
       case _                                                        => false
     }
     def isLiteral = isLiteralToken(in.token)
 
-    def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+    def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
       case IDENTIFIER | BACKQUOTED_IDENT |
            THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
            DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -639,17 +677,17 @@ self =>
 
     def isExprIntro: Boolean = isExprIntroToken(in.token)
 
-    def isTypeIntroToken(token: Int): Boolean = token match {
+    def isTypeIntroToken(token: Token): Boolean = token match {
       case IDENTIFIER | BACKQUOTED_IDENT | THIS |
            SUPER | USCORE | LPAREN | AT => true
       case _ => false
     }
 
-    def isTypeIntro: Boolean = isTypeIntroToken(in.token)
-
     def isStatSeqEnd = in.token == RBRACE || in.token == EOF
 
-    def isStatSep(token: Int): Boolean =
+    def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+    def isStatSep(token: Token): Boolean =
       token == NEWLINE || token == NEWLINES || token == SEMI
 
     def isStatSep: Boolean = isStatSep(in.token)
@@ -657,42 +695,20 @@ self =>
 
 /* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
 
-    /** Join the comment associated with a definition. */
-    def joinComment(trees: => List[Tree]): List[Tree] = {
-      val doc = in.flushDoc
-      if ((doc ne null) && doc.raw.length > 0) {
-        val joined = trees map {
-          t =>
-            DocDef(doc, t) setPos {
-              if (t.pos.isDefined) {
-                val pos = doc.pos.withEnd(t.pos.endOrPoint)
-                // always make the position transparent
-                pos.makeTransparent
-              } else {
-                t.pos
-              }
-            }
-        }
-        joined.find(_.pos.isOpaqueRange) foreach {
-          main =>
-            val mains = List(main)
-            joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
-        }
-        joined
-      }
-      else trees
-    }
+    /** A hook for joining the comment associated with a definition.
+     *  Overridden by scaladoc.
+     */
+    def joinComment(trees: => List[Tree]): List[Tree] = trees
 
 /* ---------- TREE CONSTRUCTION ------------------------------------------- */
 
-    def atPos[T <: Tree](offset: Int)(t: T): T =
-      global.atPos(r2p(offset, offset, in.lastOffset max offset))(t)
-    def atPos[T <: Tree](start: Int, point: Int)(t: T): T =
-      global.atPos(r2p(start, point, in.lastOffset max start))(t)
-    def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T =
-      global.atPos(r2p(start, point, end))(t)
-    def atPos[T <: Tree](pos: Position)(t: T): T =
-      global.atPos(pos)(t)
+    def atPos[T <: Tree](offset: Offset)(t: T): T                            = atPos(r2p(offset))(t)
+    def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T              = atPos(r2p(start, point))(t)
+    def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+    def atPos[T <: Tree](pos: Position)(t: T): T                             = global.atPos(pos)(t)
+
+    def atInPos[T <: Tree](t: T): T  = atPos(o2p(in.offset))(t)
+    def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
 
     /** Convert tree to formal parameter list. */
     def convertToParams(tree: Tree): List[ValDef] = tree match {
@@ -705,29 +721,40 @@ self =>
       def removeAsPlaceholder(name: Name) {
         placeholderParams = placeholderParams filter (_.name != name)
       }
+      def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end))
       tree match {
         case Ident(name) =>
           removeAsPlaceholder(name)
-          makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint))
+          makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end))
         case Typed(Ident(name), tpe) if tpe.isType => // get the ident!
           removeAsPlaceholder(name)
-          makeParam(name, tpe)
+          makeParam(name.toTermName, tpe)
+        case build.SyntacticTuple(as) =>
+          val arity = as.length
+          val example = analyzer.exampleTuplePattern(as map { case Ident(name) => name; case _ => nme.EMPTY })
+          val msg =
+            sm"""|not a legal formal parameter.
+                 |Note: Tuples cannot be directly destructured in method or function parameters.
+                 |      Either create a single parameter accepting the Tuple${arity},
+                 |      or consider a pattern matching anonymous function: `{ case $example => ... }"""
+          syntaxError(tree.pos, msg, skipIt = false)
+          errorParam
         case _ =>
-          syntaxError(tree.pos, "not a legal formal parameter", false)
-          makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint))
+          syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
+          errorParam
       }
     }
 
     /** Convert (qual)ident to type identifier. */
     def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
       convertToTypeName(tree) getOrElse {
-        syntaxError(tree.pos, "identifier expected", false)
+        syntaxError(tree.pos, "identifier expected", skipIt = false)
         errorTypeTree
       }
     }
 
     /** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
-    final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+    final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
       val ts = new ListBuffer[T]
       if (!sepFirst)
         ts += part
@@ -740,7 +767,7 @@ self =>
     }
     @inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
     @inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
-    @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
+    def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part)
 
 /* --------- OPERAND/OPERATOR STACK --------------------------------------- */
 
@@ -751,53 +778,72 @@ self =>
 
     var opstack: List[OpInfo] = Nil
 
-    def precedence(operator: Name): Int =
-      if (operator eq nme.ERROR) -1
-      else {
-        val firstCh = operator.startChar
-        if (isScalaLetter(firstCh)) 1
-        else if (nme.isOpAssignmentName(operator)) 0
-        else firstCh match {
-          case '|'             => 2
-          case '^'             => 3
-          case '&'             => 4
-          case '=' | '!'       => 5
-          case '<' | '>'       => 6
-          case ':'             => 7
-          case '+' | '-'       => 8
-          case '*' | '/' | '%' => 9
-          case _               => 10
-        }
-      }
+    @deprecated("Use `scala.reflect.internal.Precedence`", "2.11.0")
+    def precedence(operator: Name): Int = Precedence(operator.toString).level
 
-    def checkSize(kind: String, size: Int, max: Int) {
-      if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
+    private def opHead = opstack.head
+    private def headPrecedence = opHead.precedence
+    private def popOpInfo(): OpInfo = try opHead finally opstack = opstack.tail
+    private def pushOpInfo(top: Tree): Unit = {
+      val name   = in.name
+      val offset = in.offset
+      ident()
+      val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil
+      val opinfo = OpInfo(top, name, targs, offset)
+      opstack ::= opinfo
     }
 
-    def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+    def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.offset, opHead.operator, leftAssoc)
+    def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) = (
       if (treeInfo.isLeftAssoc(op) != leftAssoc)
-        syntaxError(
-          offset, "left- and right-associative operators with same precedence may not be mixed", false)
-
-    def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
-      var top = top0
-      if (opstack != base && precedence(opstack.head.operator) == prec)
-        checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
-      while (opstack != base &&
-             (prec < precedence(opstack.head.operator) ||
-              leftAssoc && prec == precedence(opstack.head.operator))) {
-        val opinfo = opstack.head
-        opstack = opstack.tail
-        val opPos = r2p(opinfo.offset, opinfo.offset, opinfo.offset+opinfo.operator.length)
-        val lPos = opinfo.operand.pos
-        val start = if (lPos.isDefined) lPos.startOrPoint else  opPos.startOrPoint
-        val rPos = top.pos
-        val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
-        top = atPos(start, opinfo.offset, end) {
-          makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
+        syntaxError(offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
+    )
+
+    def finishPostfixOp(start: Int, base: List[OpInfo], opinfo: OpInfo): Tree = {
+      if (opinfo.targs.nonEmpty)
+        syntaxError(opinfo.offset, "type application is not allowed for postfix operators")
+
+      val od = stripParens(reduceExprStack(base, opinfo.lhs))
+      makePostfixSelect(start, opinfo.offset, od, opinfo.operator)
+    }
+
+    def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = {
+      import opinfo._
+      val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length)
+      val pos                   = lhs.pos union rhs.pos union operatorPos withPoint offset
+
+      atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs))
+    }
+
+    def reduceExprStack(base: List[OpInfo], top: Tree): Tree    = reduceStack(isExpr = true, base, top)
+    def reducePatternStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = false, base, top)
+
+    def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree): Tree = {
+      val opPrecedence = if (isIdent) Precedence(in.name.toString) else Precedence(0)
+      val leftAssoc    = !isIdent || (treeInfo isLeftAssoc in.name)
+
+      reduceStack(isExpr, base, top, opPrecedence, leftAssoc)
+    }
+
+    def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree, opPrecedence: Precedence, leftAssoc: Boolean): Tree = {
+      def isDone          = opstack == base
+      def lowerPrecedence = !isDone && (opPrecedence < headPrecedence)
+      def samePrecedence  = !isDone && (opPrecedence == headPrecedence)
+      def canReduce       = lowerPrecedence || leftAssoc && samePrecedence
+
+      if (samePrecedence)
+        checkHeadAssoc(leftAssoc)
+
+      def loop(top: Tree): Tree = if (canReduce) {
+        val info = popOpInfo()
+        if (!isExpr && info.targs.nonEmpty) {
+          syntaxError(info.offset, "type application is not allowed in pattern")
+          info.targs.foreach(_.setType(ErrorType))
         }
-      }
-      top
+        loop(finishBinaryOp(isExpr, info, top))
+      } else top
+
+      loop(top)
     }
 
 /* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
@@ -814,7 +860,7 @@ self =>
       def argType(): Tree
       def functionArgType(): Tree
 
-      private def tupleInfixType(start: Int) = {
+      private def tupleInfixType(start: Offset) = {
         in.nextToken()
         if (in.token == RPAREN) {
           in.nextToken()
@@ -827,7 +873,7 @@ self =>
             atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
           else {
             ts foreach checkNotByNameOrVarargs
-            val tuple = atPos(start) { makeTupleType(ts, flattenUnary = true) }
+            val tuple = atPos(start) { makeTupleType(ts) }
             infixTypeRest(
               compoundTypeRest(
                 annotTypeRest(
@@ -839,9 +885,14 @@ self =>
         }
       }
       private def makeExistentialTypeTree(t: Tree) = {
-        val whereClauses = refinement()
-        whereClauses foreach checkLegalExistential
-        ExistentialTypeTree(t, whereClauses)
+        // EmptyTrees in the result of refinement() stand for parse errors
+        // so it's okay for us to filter them out here
+        ExistentialTypeTree(t, refinement() flatMap {
+          case t @ TypeDef(_, _, _, TypeBoundsTree(_, _)) => Some(t)
+          case t @ ValDef(_, _, _, EmptyTree) => Some(t)
+          case EmptyTree => None
+          case _ => syntaxError(t.pos, "not a legal existential clause", skipIt = false); None
+        })
       }
 
       /** {{{
@@ -889,7 +940,7 @@ self =>
       def simpleType(): Tree = {
         val start = in.offset
         simpleTypeRest(in.token match {
-          case LPAREN   => atPos(start)(makeTupleType(inParens(types()), flattenUnary = true))
+          case LPAREN   => atPos(start)(makeTupleType(inParens(types())))
           case USCORE   => wildcardType(in.skipToken())
           case _        =>
             path(thisOK = false, typeOK = true) match {
@@ -904,11 +955,11 @@ self =>
         val nameOffset = in.offset
         val name       = identForType(skipIt = false)
         val point      = if (name == tpnme.ERROR) hashOffset else nameOffset
-        atPos(t.pos.startOrPoint, point)(SelectFromTypeTree(t, name))
+        atPos(t.pos.start, point)(SelectFromTypeTree(t, name))
       }
       def simpleTypeRest(t: Tree): Tree = in.token match {
         case HASH     => simpleTypeRest(typeProjection(t))
-        case LBRACKET => simpleTypeRest(atPos(t.pos.startOrPoint, t.pos.point)(AppliedTypeTree(t, typeArgs())))
+        case LBRACKET => simpleTypeRest(atPos(t.pos.start, t.pos.point)(AppliedTypeTree(t, typeArgs())))
         case _        => t
       }
 
@@ -918,32 +969,34 @@ self =>
        *  }}}
        */
       def compoundType(): Tree = compoundTypeRest(
-        if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
+        if (in.token == LBRACE) atInPos(scalaAnyRefConstr)
         else annotType()
       )
 
       def compoundTypeRest(t: Tree): Tree = {
-        var ts = new ListBuffer[Tree] += t
+        val ts = new ListBuffer[Tree] += t
         while (in.token == WITH) {
           in.nextToken()
           ts += annotType()
         }
         newLineOptWhenFollowedBy(LBRACE)
-        atPos(t.pos.startOrPoint) {
-          if (in.token == LBRACE) {
-            // Warn if they are attempting to refine Unit; we can't be certain it's
-            // scala.Unit they're refining because at this point all we have is an
-            // identifier, but at a later stage we lose the ability to tell an empty
-            // refinement from no refinement at all.  See bug #284.
-            for (Ident(name) <- ts) name.toString match {
-              case "Unit" | "scala.Unit"  =>
-                warning("Detected apparent refinement of Unit; are you missing an '=' sign?")
-              case _ =>
-            }
-            CompoundTypeTree(Template(ts.toList, emptyValDef, refinement()))
-          }
-          else
-            makeIntersectionTypeTree(ts.toList)
+        val types         = ts.toList
+        val braceOffset   = in.offset
+        val hasRefinement = in.token == LBRACE
+        val refinements   = if (hasRefinement) refinement() else Nil
+        // Warn if they are attempting to refine Unit; we can't be certain it's
+        // scala.Unit they're refining because at this point all we have is an
+        // identifier, but at a later stage we lose the ability to tell an empty
+        // refinement from no refinement at all.  See bug #284.
+        if (hasRefinement) types match {
+          case Ident(name) :: Nil if name endsWith "Unit" => warning(braceOffset, "Detected apparent refinement of Unit; are you missing an '=' sign?")
+          case _                                          =>
+        }
+        // The second case includes an empty refinement - refinements is empty, but
+        // it still gets a CompoundTypeTree.
+        ts.toList match {
+          case tp :: Nil if !hasRefinement => tp  // single type, no refinement, already positioned
+          case tps                         => atPos(t.pos.start)(CompoundTypeTree(Template(tps, noSelfType, refinements)))
         }
       }
 
@@ -955,7 +1008,7 @@ self =>
           val op = identForType()
           val tycon = atPos(opOffset) { Ident(op) }
           newLineOptWhenFollowing(isTypeIntroToken)
-          def mkOp(t1: Tree) = atPos(t.pos.startOrPoint, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
+          def mkOp(t1: Tree) = atPos(t.pos.start, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
           if (leftAssoc)
             infixTypeRest(mkOp(compoundType()), InfixMode.LeftOp)
           else
@@ -979,15 +1032,15 @@ self =>
     }
 
     /** Assumed (provisionally) to be TermNames. */
-    def ident(skipIt: Boolean): Name =
+    def ident(skipIt: Boolean): Name = (
       if (isIdent) {
         val name = in.name.encode
         in.nextToken()
         name
-      } else {
-        syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
-        nme.ERROR
       }
+      else syntaxErrorOrIncompleteAnd(expectedMsg(IDENTIFIER), skipIt)(nme.ERROR)
+    )
+
     def ident(): Name = ident(skipIt = true)
     def rawIdent(): Name = try in.name finally in.nextToken()
 
@@ -995,11 +1048,13 @@ self =>
     def identForType(): TypeName = ident().toTypeName
     def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName
 
+    def identOrMacro(): Name = if (isMacro) rawIdent() else ident()
+
     def selector(t: Tree): Tree = {
       val point = in.offset
       //assert(t.pos.isDefined, t)
       if (t != EmptyTree)
-        Select(t, ident(skipIt = false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
+        Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset)
       else
         errorTermTree // has already been reported
     }
@@ -1053,10 +1108,10 @@ self =>
       t
     }
 
-    def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+    def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
       if (typeOK && in.token == TYPE) {
         in.nextToken()
-        atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
+        atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
       }
       else {
         val t1 = selector(t)
@@ -1111,72 +1166,90 @@ self =>
      *                  | null
      *  }}}
      */
-    def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
-      atPos(start) {
-        def finish(value: Any): Tree = {
-          val t = Literal(Constant(value))
-          in.nextToken()
-          t
-        }
-        if (in.token == SYMBOLLIT)
-          Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
-        else if (in.token == INTERPOLATIONID)
-          interpolatedString(inPattern = inPattern)
-        else finish(in.token match {
-          case CHARLIT   => in.charVal
-          case INTLIT    => in.intVal(isNegated).toInt
-          case LONGLIT   => in.intVal(isNegated)
-          case FLOATLIT  => in.floatVal(isNegated).toFloat
-          case DOUBLELIT => in.floatVal(isNegated)
-          case STRINGLIT | STRINGPART => in.strVal.intern()
-          case TRUE      => true
-          case FALSE     => false
-          case NULL      => null
-          case _         =>
-            syntaxErrorOrIncomplete("illegal literal", true)
-            null
+    def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
+      def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
+      if (in.token == SYMBOLLIT)
+        Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+      else if (in.token == INTERPOLATIONID)
+        interpolatedString(inPattern = inPattern)
+      else finish(in.token match {
+        case CHARLIT                => in.charVal
+        case INTLIT                 => in.intVal(isNegated).toInt
+        case LONGLIT                => in.intVal(isNegated)
+        case FLOATLIT               => in.floatVal(isNegated).toFloat
+        case DOUBLELIT              => in.floatVal(isNegated)
+        case STRINGLIT | STRINGPART => in.strVal.intern()
+        case TRUE                   => true
+        case FALSE                  => false
+        case NULL                   => null
+        case _                      => syntaxErrorOrIncompleteAnd("illegal literal", skipIt = true)(null)
+      })
+    }
+
+    /** Handle placeholder syntax.
+     *  If evaluating the tree produces placeholders, then make it a function.
+     */
+    private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = {
+      val savedPlaceholderParams = placeholderParams
+      placeholderParams = List()
+      var res = tree
+      if (placeholderParams.nonEmpty && !isWildcard(res)) {
+        res = atPos(res.pos)(Function(placeholderParams.reverse, res))
+        if (isAny) placeholderParams foreach (_.tpt match {
+          case tpt @ TypeTree() => tpt setType definitions.AnyTpe
+          case _                => // some ascription
         })
+        placeholderParams = List()
       }
+      placeholderParams = placeholderParams ::: savedPlaceholderParams
+      res
     }
 
-    private def stringOp(t: Tree, op: TermName) = {
-      val str = in.strVal
+    /** Consume a USCORE and create a fresh synthetic placeholder param. */
+    private def freshPlaceholder(): Tree = {
+      val start = in.offset
+      val pname = freshTermName()
       in.nextToken()
-      if (str.length == 0) t
-      else atPos(t.pos.startOrPoint) {
-        Apply(Select(t, op), List(Literal(Constant(str))))
-      }
+      val id = atPos(start)(Ident(pname))
+      val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
+      placeholderParams = param :: placeholderParams
+      id
     }
 
-    private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
-      val start = in.offset
-      val interpolator = in.name
+    private def interpolatedString(inPattern: Boolean): Tree = {
+      def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected",
+                                                     skipIt = true)(EmptyTree)
+      // Like Swiss cheese, with holes
+      def stringCheese: Tree = atPos(in.offset) {
+        val start = in.offset
+        val interpolator = in.name.encoded // ident() for INTERPOLATIONID
 
-      val partsBuf = new ListBuffer[Tree]
-      val exprBuf = new ListBuffer[Tree]
-      in.nextToken()
-      while (in.token == STRINGPART) {
-        partsBuf += literal()
-        exprBuf += {
-          if (inPattern) dropAnyBraces(pattern())
-          else {
-            if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
-            else if(in.token == LBRACE) expr()
-            else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
-            else {
-               syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
-               EmptyTree
+        val partsBuf = new ListBuffer[Tree]
+        val exprBuf = new ListBuffer[Tree]
+        in.nextToken()
+        while (in.token == STRINGPART) {
+          partsBuf += literal()
+          exprBuf += (
+            if (inPattern) dropAnyBraces(pattern())
+            else in.token match {
+              case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+              //case USCORE   => freshPlaceholder()  // ifonly etapolation
+              case LBRACE     => expr()              // dropAnyBraces(expr0(Local))
+              case THIS       => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+              case _          => errpolation()
             }
-          }
+          )
         }
-      }
-      if (in.token == STRINGLIT) partsBuf += literal()
+        if (in.token == STRINGLIT) partsBuf += literal()
 
-      val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
-      val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
-      t2 setPos t2.pos.makeTransparent
-      val t3 = Select(t2, interpolator) setPos t2.pos
-      atPos(start) { Apply(t3, exprBuf.toList) }
+        val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+        val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+        t2 setPos t2.pos.makeTransparent
+        val t3 = Select(t2, interpolator) setPos t2.pos
+        atPos(start) { Apply(t3, exprBuf.toList) }
+      }
+      if (inPattern) stringCheese
+      else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
     }
 
 /* ------------- NEW LINES ------------------------------------------------- */
@@ -1190,12 +1263,12 @@ self =>
         in.nextToken()
     }
 
-    def newLineOptWhenFollowedBy(token: Int) {
+    def newLineOptWhenFollowedBy(token: Offset) {
       // note: next is defined here because current == NEWLINE
       if (in.token == NEWLINE && in.next.token == token) newLineOpt()
     }
 
-    def newLineOptWhenFollowing(p: Int => Boolean) {
+    def newLineOptWhenFollowing(p: Token => Boolean) {
       // note: next is defined here because current == NEWLINE
       if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
     }
@@ -1210,7 +1283,7 @@ self =>
       if (in.token == COLON) { in.nextToken(); typ() }
       else TypeTree()
 
-    def typeOrInfixType(location: Int): Tree =
+    def typeOrInfixType(location: Location): Tree =
       if (location == Local) typ()
       else startInfixType()
 
@@ -1221,7 +1294,7 @@ self =>
      *  WildcardType ::= `_' TypeBounds
      *  }}}
      */
-    def wildcardType(start: Int) = {
+    def wildcardType(start: Offset) = {
       val pname = freshTypeName("_$")
       val t = atPos(start)(Ident(pname))
       val bounds = typeBounds()
@@ -1232,15 +1305,6 @@ self =>
 
 /* ----------- EXPRESSIONS ------------------------------------------------ */
 
-    /** {{{
-     *  EqualsExpr ::= `=' Expr
-     *  }}}
-     */
-    def equalsExpr(): Tree = {
-      accept(EQUALS)
-      expr()
-    }
-
     def condExpr(): Tree = {
       if (in.token == LPAREN) {
         in.nextToken()
@@ -1249,14 +1313,14 @@ self =>
         r
       } else {
         accept(LPAREN)
-        Literal(Constant(true))
+        newLiteral(true)
       }
     }
 
     /* hook for IDE, unlike expression can be stubbed
      * don't use for any tree that can be inspected in the parser!
      */
-    def statement(location: Int): Tree = expr(location) // !!! still needed?
+    def statement(location: Location): Tree = expr(location) // !!! still needed?
 
     /** {{{
      *  Expr       ::= (Bindings | [`implicit'] Id | `_')  `=>' Expr
@@ -1283,27 +1347,16 @@ self =>
      */
     def expr(): Tree = expr(Local)
 
-    def expr(location: Int): Tree = {
-      var savedPlaceholderParams = placeholderParams
-      placeholderParams = List()
-      var res = expr0(location)
-      if (!placeholderParams.isEmpty && !isWildcard(res)) {
-        res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
-        placeholderParams = List()
-      }
-      placeholderParams = placeholderParams ::: savedPlaceholderParams
-      res
-    }
-
+    def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
 
-    def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+    def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
       case IF =>
         def parseIf = atPos(in.skipToken()) {
           val cond = condExpr()
           newLinesOpt()
           val thenp = expr()
           val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
-          else Literal(Constant())
+          else literalUnit
           If(cond, thenp, elsep)
         }
         parseIf
@@ -1345,14 +1398,13 @@ self =>
         parseWhile
       case DO =>
         def parseDo = {
-          val start = in.offset
           atPos(in.skipToken()) {
             val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
             val body = expr()
             if (isStatSep) in.nextToken()
             accept(WHILE)
             val cond = condExpr()
-            makeDoWhile(lname, body, cond)
+            makeDoWhile(lname.toTermName, body, cond)
           }
         }
         parseDo
@@ -1365,9 +1417,9 @@ self =>
           newLinesOpt()
           if (in.token == YIELD) {
             in.nextToken()
-            makeForYield(enums, expr())
+            gen.mkFor(enums, gen.Yield(expr()))
           } else {
-            makeFor(enums, expr())
+            gen.mkFor(enums, expr())
           }
         }
         def adjustStart(tree: Tree) =
@@ -1378,7 +1430,7 @@ self =>
       case RETURN =>
         def parseReturn =
           atPos(in.skipToken()) {
-            Return(if (isExprIntro) expr() else Literal(Constant()))
+            Return(if (isExprIntro) expr() else literalUnit)
           }
         parseReturn
       case THROW =>
@@ -1395,7 +1447,7 @@ self =>
           if (in.token == EQUALS) {
             t match {
               case Ident(_) | Select(_, _) | Apply(_, _) =>
-                t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
+                t = atPos(t.pos.start, in.skipToken()) { gen.mkAssign(t, expr()) }
               case _ =>
             }
           } else if (in.token == COLON) {
@@ -1406,16 +1458,16 @@ self =>
               val uscorePos = in.skipToken()
               if (isIdent && in.name == nme.STAR) {
                 in.nextToken()
-                t = atPos(t.pos.startOrPoint, colonPos) {
+                t = atPos(t.pos.start, colonPos) {
                   Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
                 }
               } else {
-                syntaxErrorOrIncomplete("`*' expected", true)
+                syntaxErrorOrIncomplete("`*' expected", skipIt = true)
               }
-            } else if (in.token == AT) {
+            } else if (isAnnotation) {
               t = (t /: annotations(skipNewLines = false))(makeAnnotated)
             } else {
-              t = atPos(t.pos.startOrPoint, colonPos) {
+              t = atPos(t.pos.start, colonPos) {
                 val tpt = typeOrInfixType(location)
                 if (isWildcard(t))
                   (placeholderParams: @unchecked) match {
@@ -1428,18 +1480,18 @@ self =>
               }
             }
           } else if (in.token == MATCH) {
-            t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
+            t = atPos(t.pos.start, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
           }
           // in order to allow anonymous functions as statements (as opposed to expressions) inside
           // templates, we have to disambiguate them from self type declarations - bug #1565
           // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
           // may be impossible to distinguish from a self-type and so remains an error.  (See #1564)
           def lhsIsTypedParamList() = t match {
-            case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+            case Parens(xs) if xs.forall(isTypedParam) => true
             case _ => false
           }
           if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
-            t = atPos(t.pos.startOrPoint, in.skipToken()) {
+            t = atPos(t.pos.start, in.skipToken()) {
               Function(convertToParams(t), if (location != InBlock) expr() else block())
             }
           }
@@ -1448,12 +1500,14 @@ self =>
         parseOther
     }
 
+    def isTypedParam(t: Tree) = t.isInstanceOf[Typed]
+
     /** {{{
      *  Expr ::= implicit Id => Expr
      *  }}}
      */
 
-    def implicitClosure(start: Int, location: Int): Tree = {
+    def implicitClosure(start: Offset, location: Location): Tree = {
       val param0 = convertToParam {
         atPos(in.offset) {
           Ident(ident()) match {
@@ -1479,28 +1533,19 @@ self =>
     def postfixExpr(): Tree = {
       val start = in.offset
       val base  = opstack
-      var top   = prefixExpr()
 
-      while (isIdent) {
-        top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
-        val op = in.name
-        opstack = OpInfo(top, op, in.offset) :: opstack
-        ident()
+      def loop(top: Tree): Tree = if (!isIdent) top else {
+        pushOpInfo(reduceExprStack(base, top))
         newLineOptWhenFollowing(isExprIntroToken)
-        if (isExprIntro) {
-          val next = prefixExpr()
-          if (next == EmptyTree)
-            return reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
-          top = next
-        } else {
-          // postfix expression
-          val topinfo = opstack.head
-          opstack = opstack.tail
-          val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
-          return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
-        }
+        if (isExprIntro)
+          prefixExpr() match {
+            case EmptyTree => reduceExprStack(base, top)
+            case next      => loop(next)
+          }
+        else finishPostfixOp(start, base, popOpInfo())
       }
-      reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
+
+      reduceExprStack(base, loop(prefixExpr()))
     }
 
     /** {{{
@@ -1510,7 +1555,7 @@ self =>
     def prefixExpr(): Tree = {
       if (isUnaryOp) {
         atPos(in.offset) {
-          val name = nme.toUnaryName(rawIdent())
+          val name = nme.toUnaryName(rawIdent().toTermName)
           if (name == nme.UNARY_- && isNumericLit)
             simpleExprRest(literal(isNegated = true), canApply = true)
           else
@@ -1544,15 +1589,9 @@ self =>
           case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
             path(thisOK = true, typeOK = false)
           case USCORE =>
-            val start = in.offset
-            val pname = freshName("x$")
-            in.nextToken()
-            val id = atPos(start) (Ident(pname))
-            val param = atPos(id.pos.focus){ makeSyntheticParam(pname) }
-            placeholderParams = param :: placeholderParams
-            id
+            freshPlaceholder()
           case LPAREN =>
-            atPos(in.offset)(makeParens(commaSeparated(expr)))
+            atPos(in.offset)(makeParens(commaSeparated(expr())))
           case LBRACE =>
             canApply = false
             blockExpr()
@@ -1561,12 +1600,11 @@ self =>
             val nstart = in.skipToken()
             val npos = r2p(nstart, nstart, in.lastOffset)
             val tstart = in.offset
-            val (parents, argss, self, stats) = template(isTrait = false)
+            val (parents, self, stats) = template()
             val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
-            makeNew(parents, self, stats, argss, npos, cpos)
+            gen.mkNew(parents, self, stats, npos, cpos)
           case _ =>
-            syntaxErrorOrIncomplete("illegal start of simple expression", true)
-            errorTermTree
+            syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
         }
       simpleExprRest(t, canApply = canApply)
     }
@@ -1583,14 +1621,14 @@ self =>
             case Ident(_) | Select(_, _) | Apply(_, _) =>
               var app: Tree = t1
               while (in.token == LBRACKET)
-                app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
+                app = atPos(app.pos.start, in.offset)(TypeApply(app, exprTypeArgs()))
 
               simpleExprRest(app, canApply = true)
             case _ =>
               t1
           }
         case LPAREN | LBRACE if (canApply) =>
-          val app = atPos(t.pos.startOrPoint, in.offset) {
+          val app = atPos(t.pos.start, in.offset) {
             // look for anonymous function application like (f _)(x) and
             // translate to (f _).apply(x), bug #460
             val sel = t match {
@@ -1603,7 +1641,7 @@ self =>
           }
           simpleExprRest(app, canApply = true)
         case USCORE =>
-          atPos(t.pos.startOrPoint, in.skipToken()) {
+          atPos(t.pos.start, in.skipToken()) {
             Typed(stripParens(t), Function(Nil, EmptyTree))
           }
         case _ =>
@@ -1617,14 +1655,9 @@ self =>
      *  }}}
      */
     def argumentExprs(): List[Tree] = {
-      def args(): List[Tree] = commaSeparated {
-        val maybeNamed = isIdent
-        expr() match {
-          case a @ Assign(id, rhs) if maybeNamed =>
-            atPos(a.pos) { AssignOrNamedArg(id, rhs) }
-          case e => e
-        }
-      }
+      def args(): List[Tree] = commaSeparated(
+        if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr()
+      )
       in.token match {
         case LBRACE   => List(blockExpr())
         case LPAREN   => inParens(if (in.token == RPAREN) Nil else args())
@@ -1655,13 +1688,16 @@ self =>
      */
     def block(): Tree = makeBlock(blockStatSeq())
 
+    def caseClause(): CaseDef =
+      atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock()))
+
     /** {{{
      *  CaseClauses ::= CaseClause {CaseClause}
      *  CaseClause  ::= case Pattern [Guard] `=>' Block
      *  }}}
      */
     def caseClauses(): List[CaseDef] = {
-      val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
+      val cases = caseSeparated { caseClause() }
       if (cases.isEmpty)  // trigger error if there are no cases
         accept(CASE)
 
@@ -1687,22 +1723,25 @@ self =>
      *                |  val Pattern1 `=' Expr
      *  }}}
      */
-    def enumerators(): List[Enumerator] = {
-      val enums = new ListBuffer[Enumerator]
-      generator(enums, eqOK = false)
+    def enumerators(): List[Tree] = {
+      val enums = new ListBuffer[Tree]
+      enums ++= enumerator(isFirst = true)
       while (isStatSep) {
         in.nextToken()
-        if (in.token == IF) enums += makeFilter(in.offset, guard())
-        else generator(enums, eqOK = true)
+        enums ++= enumerator(isFirst = false)
       }
       enums.toList
     }
 
+    def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true): List[Tree] =
+      if (in.token == IF && !isFirst) makeFilter(in.offset, guard()) :: Nil
+      else generator(!isFirst, allowNestedIf)
+
     /** {{{
      *  Generator ::= Pattern1 (`<-' | `=') Expr [Guard]
      *  }}}
      */
-    def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
+    def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = {
       val start  = in.offset
       val hasVal = in.token == VAL
       if (hasVal)
@@ -1720,13 +1759,22 @@ self =>
       if (hasEq && eqOK) in.nextToken()
       else accept(LARROW)
       val rhs = expr()
-      enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, hasEq, rhs)
-      // why max above? IDE stress tests have shown that lastOffset could be less than start,
+
+      def loop(): List[Tree] =
+        if (in.token != IF) Nil
+        else makeFilter(in.offset, guard()) :: loop()
+
+      val tail =
+        if (allowNestedIf) loop()
+        else Nil
+
+      // why max? IDE stress tests have shown that lastOffset could be less than start,
       // I guess this happens if instead if a for-expression we sit on a closing paren.
-      while (in.token == IF) enums += makeFilter(in.offset, guard())
+      val genPos = r2p(start, point, in.lastOffset max start)
+      gen.mkGenerator(genPos, pat, hasEq, rhs) :: tail
     }
 
-    def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.endOrPoint), tree)
+    def makeFilter(start: Offset, tree: Tree) = gen.Filter(tree).setPos(r2p(start, tree.pos.point, tree.pos.end))
 
 /* -------- PATTERNS ------------------------------------------- */
 
@@ -1749,10 +1797,12 @@ self =>
             in.nextToken()
             if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start)
             else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) }
-          case IDENTIFIER if nme.isVariableName(in.name) =>
-            atPos(start) { Bind(identForType(), EmptyTree) }
           case _ =>
-            typ()
+            typ() match {
+              case Ident(name: TypeName) if nme.isVariableName(name) =>
+                atPos(start) { Bind(name, EmptyTree) }
+              case t => t
+            }
         }
       }
 
@@ -1792,7 +1842,7 @@ self =>
       def pattern1(): Tree = pattern2() match {
         case p @ Ident(name) if in.token == COLON =>
           if (treeInfo.isVarPattern(p))
-            atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+            atPos(p.pos.start, in.skipToken())(Typed(p, compoundType()))
           else {
             syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
             p
@@ -1808,7 +1858,6 @@ self =>
        *  }}}
        */
       def pattern2(): Tree = {
-        val nameOffset = in.offset
         val p = pattern3()
 
         if (in.token != AT) p
@@ -1818,7 +1867,7 @@ self =>
             pattern3()
           case Ident(name) if treeInfo.isVarPattern(p) =>
             in.nextToken()
-            atPos(p.pos.startOrPoint) { Bind(name, pattern3()) }
+            atPos(p.pos.start) { Bind(name, pattern3()) }
           case _ => p
         }
       }
@@ -1829,71 +1878,51 @@ self =>
        *  }}}
        */
       def pattern3(): Tree = {
-        var top = simplePattern(badPattern3)
-        // after peekahead
-        def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
-        def peekahead() = {
-          in.prev copyFrom in
-          in.nextToken()
-        }
-        def pushback() = {
-          in.next copyFrom in
-          in copyFrom in.prev
-        }
+        val top = simplePattern(badPattern3)
+        val base = opstack
         // See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
-        // TODO: dredge out the remnants of regexp patterns.
-        // /{/ peek for _*) or _*} (for xml escape)
-        if (isSequenceOK) {
-          top match {
-            case Ident(nme.WILDCARD) if (isRawStar) =>
-              peekahead()
-              in.token match {
-                case RBRACE if (isXML) => return acceptWildStar()
-                case RPAREN if (!isXML) => return acceptWildStar()
-                case _ => pushback()
-              }
-            case _ =>
-          }
+        def isCloseDelim = in.token match {
+          case RBRACE => isXML
+          case RPAREN => !isXML
+          case _      => false
         }
-        val base = opstack
-        while (isIdent && in.name != raw.BAR) {
-          top = reduceStack(isExpr = false, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
-          val op = in.name
-          opstack = OpInfo(top, op, in.offset) :: opstack
-          ident()
-          top = simplePattern(badPattern3)
+        def checkWildStar: Tree = top match {
+          case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead (
+            if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top)))
+            else EmptyTree
+          )
+          case _ => EmptyTree
+        }
+        def loop(top: Tree): Tree = reducePatternStack(base, top) match {
+          case next if isIdentExcept(raw.BAR) => pushOpInfo(next) ; loop(simplePattern(badPattern3))
+          case next                           => next
         }
-        stripParens(reduceStack(isExpr = false, base, top, 0, leftAssoc = true))
+        checkWildStar orElse stripParens(loop(top))
       }
+
       def badPattern3(): Tree = {
-        def isComma = in.token == COMMA
-        def isAnyBrace = in.token == RPAREN || in.token == RBRACE
-        val badStart = "illegal start of simple pattern"
+        def isComma                = in.token == COMMA
+        def isDelimiter            = in.token == RPAREN || in.token == RBRACE
+        def isCommaOrDelimiter     = isComma || isDelimiter
+        val (isUnderscore, isStar) = opstack match {
+          case OpInfo(Ident(nme.WILDCARD), nme.STAR, _, _) :: _ => (true,   true)
+          case OpInfo(_, nme.STAR, _, _) :: _                   => (false,  true)
+          case _                                                => (false, false)
+        }
+        def isSeqPatternClose = isUnderscore && isStar && isSequenceOK && isDelimiter
+        val preamble = "bad simple pattern:"
+        val subtext = (isUnderscore, isStar, isSequenceOK) match {
+          case (true,  true, true)  if isComma            => "bad use of _* (a sequence pattern must be the last pattern)"
+          case (true,  true, true)  if isDelimiter        => "bad brace or paren after _*"
+          case (true,  true, false) if isDelimiter        => "bad use of _* (sequence pattern not allowed)"
+          case (false, true, true)  if isDelimiter        => "use _* to match a sequence"
+          case (false, true, _)     if isCommaOrDelimiter => "trailing * is not a valid pattern"
+          case _                                          => null
+        }
+        val msg = if (subtext != null) s"$preamble $subtext" else "illegal start of simple pattern"
         // better recovery if don't skip delims of patterns
-        var skip = !(isComma || isAnyBrace)
-        val msg = if (!opstack.isEmpty && opstack.head.operator == nme.STAR) {
-            opstack.head.operand match {
-              case Ident(nme.WILDCARD) =>
-                if (isSequenceOK && isComma)
-                  "bad use of _* (a sequence pattern must be the last pattern)"
-                else if (isSequenceOK && isAnyBrace) {
-                  skip = true  // do skip bad paren; scanner may skip bad brace already
-                  "bad brace or paren after _*"
-                } else if (!isSequenceOK && isAnyBrace)
-                  "bad use of _* (sequence pattern not allowed)"
-                else badStart
-              case _ =>
-                if (isSequenceOK && isAnyBrace)
-                  "use _* to match a sequence"
-                else if (isComma || isAnyBrace)
-                  "trailing * is not a valid pattern"
-                else badStart
-            }
-          } else {
-            badStart
-          }
-        syntaxErrorOrIncomplete(msg, skip)
-        errorPatternTree
+        val skip = !isCommaOrDelimiter || isSeqPatternClose
+        syntaxErrorOrIncompleteAnd(msg, skip)(errorPatternTree)
       }
 
       /** {{{
@@ -1909,19 +1938,15 @@ self =>
        *
        * XXX: Hook for IDE
        */
-      def simplePattern(): Tree = {
+      def simplePattern(): Tree = (
         // simple diagnostics for this entry point
-        def badStart(): Tree = {
-          syntaxErrorOrIncomplete("illegal start of simple pattern", true)
-          errorPatternTree
-        }
-        simplePattern(badStart)
-      }
+        simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree))
+      )
       def simplePattern(onError: () => Tree): Tree = {
         val start = in.offset
         in.token match {
           case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
-            var t = stableId()
+            val t = stableId()
             in.token match {
               case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
                 t match {
@@ -1983,7 +2008,6 @@ self =>
 
     /** Default entry points into some pattern contexts. */
     def pattern(): Tree = noSeq.pattern()
-    def patterns(): List[Tree] = noSeq.patterns()
     def seqPatterns(): List[Tree] = seqOK.patterns()
     def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
     def argumentPatterns(): List[Tree] = inParens {
@@ -1997,16 +2021,16 @@ self =>
     /** Drop `private` modifier when followed by a qualifier.
      *  Contract `abstract` and `override` to ABSOVERRIDE
      */
-    private def normalize(mods: Modifiers): Modifiers =
+    private def normalizeModifers(mods: Modifiers): Modifiers =
       if (mods.isPrivate && mods.hasAccessBoundary)
-        normalize(mods &~ Flags.PRIVATE)
+        normalizeModifers(mods &~ Flags.PRIVATE)
       else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
-        normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+        normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
       else
         mods
 
     private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = {
-      if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false)
+      if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false)
       in.nextToken()
       (mods | mod) withPosition (mod, pos)
     }
@@ -2023,7 +2047,7 @@ self =>
       if (in.token == LBRACKET) {
         in.nextToken()
         if (mods.hasAccessBoundary)
-          syntaxError("duplicate private/protected qualifier", false)
+          syntaxError("duplicate private/protected qualifier", skipIt = false)
         result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
                  else Modifiers(mods.flags, identForType())
         accept(RBRACKET)
@@ -2046,7 +2070,7 @@ self =>
      *  AccessModifier ::= (private | protected) [AccessQualifier]
      *  }}}
      */
-    def accessModifierOpt(): Modifiers = normalize {
+    def accessModifierOpt(): Modifiers = normalizeModifers {
       in.token match {
         case m @ (PRIVATE | PROTECTED)  => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
         case _                          => NoMods
@@ -2060,7 +2084,7 @@ self =>
      *              |  override
      *  }}}
      */
-    def modifiers(): Modifiers = normalize {
+    def modifiers(): Modifiers = normalizeModifers {
       def loop(mods: Modifiers): Modifiers = in.token match {
         case PRIVATE | PROTECTED =>
           loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
@@ -2105,7 +2129,7 @@ self =>
     def annotationExpr(): Tree = atPos(in.offset) {
       val t = exprSimpleType()
       if (in.token == LPAREN) New(t, multipleArgumentExprs())
-      else New(t, ListOfNil)
+      else New(t, Nil)
     }
 
 /* -------- PARAMETERS ------------------------------------------- */
@@ -2124,56 +2148,6 @@ self =>
     def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
       var implicitmod = 0
       var caseParam = ofCaseClass
-      def param(): ValDef = {
-        val start = in.offset
-        val annots = annotations(skipNewLines = false)
-        var mods = Modifiers(Flags.PARAM)
-        if (owner.isTypeName) {
-          mods = modifiers() | Flags.PARAMACCESSOR
-          if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
-          in.token match {
-            case v @ (VAL | VAR) =>
-              mods = mods withPosition (in.token, tokenRange(in))
-              if (v == VAR) mods |= Flags.MUTABLE
-              in.nextToken()
-            case _ =>
-              if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
-              if (!caseParam) mods |= Flags.PrivateLocal
-          }
-          if (caseParam) mods |= Flags.CASEACCESSOR
-        }
-        val nameOffset = in.offset
-        val name = ident()
-        var bynamemod = 0
-        val tpt =
-          if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
-            TypeTree()
-          } else { // XX-METHOD-INFER
-            accept(COLON)
-            if (in.token == ARROW) {
-              if (owner.isTypeName && !mods.hasLocalFlag)
-                syntaxError(
-                  in.offset,
-                  (if (mods.isMutable) "`var'" else "`val'") +
-                  " parameters may not be call-by-name", false)
-              else if (implicitmod != 0)
-                syntaxError(
-                  in.offset,
-                  "implicit parameters may not be call-by-name", false)
-              else bynamemod = Flags.BYNAMEPARAM
-            }
-            paramType()
-          }
-        val default =
-          if (in.token == EQUALS) {
-            in.nextToken()
-            mods |= Flags.DEFAULTPARAM
-            expr()
-          } else EmptyTree
-        atPos(start, if (name == nme.ERROR) start else nameOffset) {
-          ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default)
-        }
-      }
       def paramClause(): List[ValDef] = {
         if (in.token == RPAREN)
           return Nil
@@ -2182,14 +2156,14 @@ self =>
           in.nextToken()
           implicitmod = Flags.IMPLICIT
         }
-        commaSeparated(param())
+        commaSeparated(param(owner, implicitmod, caseParam  ))
       }
       val vds = new ListBuffer[List[ValDef]]
       val start = in.offset
       newLineOptWhenFollowedBy(LPAREN)
       if (ofCaseClass && in.token != LPAREN)
-        deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+
-                           "use either case objects or case classes with `()' as parameter list.")
+        syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
+                                   "use either case objects or case classes with an explicit `()' as a parameter list.")
       while (implicitmod == 0 && in.token == LPAREN) {
         in.nextToken()
         vds += paramClause()
@@ -2200,9 +2174,9 @@ self =>
       val result = vds.toList
       if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
         in.token match {
-          case LBRACKET   => syntaxError(in.offset, "no type parameters allowed here", false)
+          case LBRACKET   => syntaxError(in.offset, "no type parameters allowed here", skipIt = false)
           case EOF        => incompleteInputError("auxiliary constructor needs non-implicit parameter list")
-          case _          => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false)
+          case _          => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false)
         }
       }
       addEvidenceParams(owner, result, contextBounds)
@@ -2224,12 +2198,63 @@ self =>
           if (isRawStar) {
             in.nextToken()
             if (useStartAsPosition) atPos(start)(repeatedApplication(t))
-            else atPos(t.pos.startOrPoint, t.pos.point)(repeatedApplication(t))
+            else atPos(t.pos.start, t.pos.point)(repeatedApplication(t))
           }
           else t
       }
     }
 
+    def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = {
+      val start = in.offset
+      val annots = annotations(skipNewLines = false)
+      var mods = Modifiers(Flags.PARAM)
+      if (owner.isTypeName) {
+        mods = modifiers() | Flags.PARAMACCESSOR
+        if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
+        in.token match {
+          case v @ (VAL | VAR) =>
+            mods = mods withPosition (in.token.toLong, tokenRange(in))
+            if (v == VAR) mods |= Flags.MUTABLE
+            in.nextToken()
+          case _ =>
+            if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+            if (!caseParam) mods |= Flags.PrivateLocal
+        }
+        if (caseParam) mods |= Flags.CASEACCESSOR
+      }
+      val nameOffset = in.offset
+      val name = ident()
+      var bynamemod = 0
+      val tpt =
+        if ((settings.YmethodInfer && !owner.isTypeName) && in.token != COLON) {
+          TypeTree()
+        } else { // XX-METHOD-INFER
+          accept(COLON)
+          if (in.token == ARROW) {
+            if (owner.isTypeName && !mods.isLocalToThis)
+              syntaxError(
+                in.offset,
+                (if (mods.isMutable) "`var'" else "`val'") +
+                " parameters may not be call-by-name", skipIt = false)
+            else if (implicitmod != 0)
+              syntaxError(
+                in.offset,
+                "implicit parameters may not be call-by-name", skipIt = false)
+            else bynamemod = Flags.BYNAMEPARAM
+          }
+          paramType()
+        }
+      val default =
+        if (in.token == EQUALS) {
+          in.nextToken()
+          mods |= Flags.DEFAULTPARAM
+          expr()
+        } else EmptyTree
+      atPos(start, if (name == nme.ERROR) start else nameOffset) {
+        ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
+      }
+    }
+
     /** {{{
      *  TypeParamClauseOpt    ::= [TypeParamClause]
      *  TypeParamClause       ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
@@ -2253,7 +2278,7 @@ self =>
           }
         }
         val nameOffset = in.offset
-        // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+        // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
         val pname: TypeName = wildcardOrIdent().toTypeName
         val param = atPos(start, nameOffset) {
           val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2261,9 +2286,10 @@ self =>
         }
         if (contextBoundBuf ne null) {
           while (in.token == VIEWBOUND) {
-            contextBoundBuf += atPos(in.skipToken()) {
-              makeFunctionTypeTree(List(Ident(pname)), typ())
-            }
+            val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`."
+            if (settings.future)
+              deprecationWarning(in.offset, s"View bounds are deprecated. $msg")
+            contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ()))
           }
           while (in.token == COLON) {
             contextBoundBuf += atPos(in.skipToken()) {
@@ -2283,16 +2309,18 @@ self =>
      *  }}}
      */
     def typeBounds(): TypeBoundsTree = {
-      val t = TypeBoundsTree(
-        bound(SUPERTYPE, tpnme.Nothing),
-        bound(SUBTYPE, tpnme.Any)
-      )
-      t setPos wrappingPos(List(t.hi, t.lo))
+      val lo      = bound(SUPERTYPE)
+      val hi      = bound(SUBTYPE)
+      val t       = TypeBoundsTree(lo, hi)
+      val defined = List(t.hi, t.lo) filter (_.pos.isDefined)
+
+      if (defined.nonEmpty)
+        t setPos wrappingPos(defined)
+      else
+        t setPos o2p(in.offset)
     }
 
-    def bound(tok: Int, default: TypeName): Tree =
-      if (in.token == tok) { in.nextToken(); typ() }
-      else atPos(o2p(in.lastOffset)) { rootScalaDot(default) }
+    def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
 
 /* -------- DEFS ------------------------------------------- */
 
@@ -2326,8 +2354,8 @@ self =>
         accept(DOT)
         result
       }
-      /** Walks down import `foo.bar.baz.{ ... }` until it ends at a
-       *  an underscore, a left brace, or an undotted identifier.
+      /* Walks down import `foo.bar.baz.{ ... }` until it ends at a
+       * an underscore, a left brace, or an undotted identifier.
        */
       def loop(expr: Tree): Tree = {
         expr setPos expr.pos.makeTransparent
@@ -2411,9 +2439,9 @@ self =>
      *           | type [nl] TypeDcl
      *  }}}
      */
-    def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+    def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
       if (mods.isLazy && in.token != VAL)
-        syntaxError("lazy not allowed here. Only vals can be lazy", false)
+        syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
       in.token match {
         case VAL =>
           patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in)))
@@ -2462,17 +2490,15 @@ self =>
           EmptyTree
         }
       def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
-        //Console.println("DEBUG: p = "+p.toString()); // DEBUG
-        val trees =
-          makePatDef(newmods,
-                     if (tp.isEmpty) p
-                     else Typed(p, tp) setPos (p.pos union tp.pos),
-                     rhs)
+        val trees = {
+          val pat = if (tp.isEmpty) p else Typed(p, tp) setPos (p.pos union tp.pos)
+          makePatDef(newmods, pat, rhs)
+        }
         if (newmods.isDeferred) {
           trees match {
             case List(ValDef(_, _, _, EmptyTree)) =>
-              if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false)
-            case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+              if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
+            case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
           }
         }
         trees
@@ -2522,7 +2548,7 @@ self =>
      *  }}}
      */
     def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
-      in.nextToken
+      in.nextToken()
       if (in.token == THIS) {
         atPos(start, in.skipToken()) {
           val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
@@ -2536,12 +2562,12 @@ self =>
       }
       else {
         val nameOffset = in.offset
-        val name = ident()
+        val name = identOrMacro()
         funDefRest(start, nameOffset, mods, name)
       }
     }
 
-    def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+    def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
       val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
         var newmods = mods
         // contextBoundBuf is for context bounded type parameters of the form
@@ -2554,16 +2580,22 @@ self =>
         var restype = fromWithinReturnType(typedOpt())
         val rhs =
           if (isStatSep || in.token == RBRACE) {
-            if (restype.isEmpty) restype = scalaUnitConstr
+            if (restype.isEmpty) {
+              if (settings.future)
+                deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.")
+              restype = scalaUnitConstr
+            }
             newmods |= Flags.DEFERRED
             EmptyTree
           } else if (restype.isEmpty && in.token == LBRACE) {
+            if (settings.future)
+              deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.")
             restype = scalaUnitConstr
             blockExpr()
           } else {
             if (in.token == EQUALS) {
               in.nextTokenAllow(nme.MACROkw)
-              if (in.token == IDENTIFIER && in.name == nme.MACROkw) {
+              if (isMacro) {
                 in.nextToken()
                 newmods |= Flags.MACRO
               }
@@ -2572,7 +2604,7 @@ self =>
             }
             expr()
           }
-        DefDef(newmods, name, tparams, vparamss, restype, rhs)
+        DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs)
       }
       signalParseProgress(result.pos)
       result
@@ -2585,7 +2617,7 @@ self =>
      */
     def constrExpr(vparamss: List[List[ValDef]]): Tree =
       if (in.token == LBRACE) constrBlock(vparamss)
-      else Block(List(selfInvocation(vparamss)), Literal(Constant()))
+      else Block(selfInvocation(vparamss) :: Nil, literalUnit)
 
     /** {{{
      *  SelfInvocation  ::= this ArgumentExprs {ArgumentExprs}
@@ -2615,7 +2647,7 @@ self =>
           else Nil
         }
         accept(RBRACE)
-        Block(stats, Literal(Constant()))
+        Block(stats, literalUnit)
       }
 
     /** {{{
@@ -2624,11 +2656,10 @@ self =>
      *  TypeDcl ::= type Id [TypeParamClause] TypeBounds
      *  }}}
      */
-    def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+    def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
       in.nextToken()
       newLinesOpt()
       atPos(start, in.offset) {
-        val nameOffset = in.offset
         val name = identForType()
         // @M! a type alias as well as an abstract type may declare type parameters
         val tparams = typeParamClauseOpt(name, null)
@@ -2636,11 +2667,10 @@ self =>
           case EQUALS =>
             in.nextToken()
             TypeDef(mods, name, tparams, typ())
-          case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+          case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) =>
             TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
           case _ =>
-            syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
-            EmptyTree
+            syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
         }
       }
     }
@@ -2659,8 +2689,8 @@ self =>
      *            |  [override] trait TraitDef
      *  }}}
      */
-    def tmplDef(pos: Int, mods: Modifiers): Tree = {
-      if (mods.isLazy) syntaxError("classes cannot be lazy", false)
+    def tmplDef(pos: Offset, mods: Modifiers): Tree = {
+      if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
       in.token match {
         case TRAIT =>
           classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in)))
@@ -2673,8 +2703,7 @@ self =>
         case CASEOBJECT =>
           objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
         case _ =>
-          syntaxErrorOrIncomplete("expected start of definition", true)
-          EmptyTree
+          syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)(EmptyTree)
       }
     }
 
@@ -2684,8 +2713,8 @@ self =>
      *  TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
      *  }}}
      */
-    def classDef(start: Int, mods: Modifiers): ClassDef = {
-      in.nextToken
+    def classDef(start: Offset, mods: Modifiers): ClassDef = {
+      in.nextToken()
       val nameOffset = in.offset
       val name = identForType()
       atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
@@ -2693,12 +2722,13 @@ self =>
           val contextBoundBuf = new ListBuffer[Tree]
           val tparams = typeParamClauseOpt(name, contextBoundBuf)
           classContextBounds = contextBoundBuf.toList
-          val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
+          val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min
           if (!classContextBounds.isEmpty && mods.isTrait) {
-            syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+            val viewBoundsExist = if (settings.future) "" else " nor view bounds `<% ...'"
+              syntaxError(s"traits cannot have type parameters with context bounds `: ...'$viewBoundsExist", skipIt = false)
             classContextBounds = List()
           }
-          val constrAnnots = constructorAnnotations()
+          val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
           val (constrMods, vparamss) =
             if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
             else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
@@ -2706,11 +2736,10 @@ self =>
           if (mods.isTrait) {
             if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
           } else if (in.token == SUBTYPE) {
-            syntaxError("classes are not allowed to be virtual", false)
+            syntaxError("classes are not allowed to be virtual", skipIt = false)
           }
           val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
-          if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
-          val result = ClassDef(mods1, name, tparams, template)
+          val result = gen.mkClassDef(mods1, name, tparams, template)
           // Context bounds generate implicit parameters (part of the template) with types
           // from tparams: we need to ensure these don't overlap
           if (!classContextBounds.isEmpty)
@@ -2724,16 +2753,49 @@ self =>
      *  ObjectDef       ::= Id ClassTemplateOpt
      *  }}}
      */
-    def objectDef(start: Int, mods: Modifiers): ModuleDef = {
-      in.nextToken
+    def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
+      in.nextToken()
       val nameOffset = in.offset
       val name = ident()
       val tstart = in.offset
       atPos(start, if (name == nme.ERROR) start else nameOffset) {
         val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
         val template = templateOpt(mods1, name, NoMods, Nil, tstart)
-        ModuleDef(mods1, name, template)
+        ModuleDef(mods1, name.toTermName, template)
+      }
+    }
+
+    /** Create a tree representing a package object, converting
+     *  {{{
+     *    package object foo { ... }
+     *  }}}
+     *  to
+     *  {{{
+     *    package foo {
+     *      object `package` { ... }
+     *    }
+     *  }}}
+     */
+    def packageObjectDef(start: Offset): PackageDef = {
+      val defn   = objectDef(in.offset, NoMods)
+      val pidPos = o2p(defn.pos.startOrPoint)
+      val pkgPos = r2p(start, pidPos.point)
+      gen.mkPackageObject(defn, pidPos, pkgPos)
+    }
+    def packageOrPackageObject(start: Offset): Tree = (
+      if (in.token == OBJECT)
+        joinComment(packageObjectDef(start) :: Nil).head
+      else {
+        in.flushDoc
+        makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq()))
       }
+    )
+    // TODO - eliminate this and use "def packageObjectDef" (see call site of this
+    // method for small elaboration.)
+    def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
+      case ModuleDef(mods, name, impl) =>
+        makePackaging(
+          start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
     }
 
     /** {{{
@@ -2741,20 +2803,19 @@ self =>
      *  TraitParents       ::= AnnotType {with AnnotType}
      *  }}}
      */
-    def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
-      val parents = new ListBuffer[Tree] += startAnnotType()
-      val argss = (
-        // TODO: the insertion of ListOfNil here is where "new Foo" becomes
-        // indistinguishable from "new Foo()".
-        if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
-        else ListOfNil
-      )
-
-      while (in.token == WITH) {
-        in.nextToken()
-        parents += startAnnotType()
+    def templateParents(): List[Tree] = {
+      val parents = new ListBuffer[Tree]
+      def readAppliedParent() = {
+        val start = in.offset
+        val parent = startAnnotType()
+        parents += (in.token match {
+          case LPAREN => atPos(start)((parent /: multipleArgumentExprs())(Apply.apply))
+          case _      => parent
+        })
       }
-      (parents.toList, argss)
+      readAppliedParent()
+      while (in.token == WITH) { in.nextToken(); readAppliedParent() }
+      parents.toList
     }
 
     /** {{{
@@ -2764,79 +2825,75 @@ self =>
      *  EarlyDef      ::= Annotations Modifiers PatDef
      *  }}}
      */
-    def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+    def template(): (List[Tree], ValDef, List[Tree]) = {
       newLineOptWhenFollowedBy(LBRACE)
       if (in.token == LBRACE) {
         // @S: pre template body cannot stub like post body can!
         val (self, body) = templateBody(isPre = true)
-        if (in.token == WITH && self.isEmpty) {
-          val earlyDefs: List[Tree] = body flatMap {
-            case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
-              List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
-            case tdef @ TypeDef(mods, name, tparams, rhs) =>
-              List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
-            case docdef @ DocDef(comm, rhs) =>
-              List(treeCopy.DocDef(docdef, comm, rhs))
-            case stat if !stat.isEmpty =>
-              syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
-              List()
-            case _ => List()
-          }
+        if (in.token == WITH && (self eq noSelfType)) {
+          val earlyDefs: List[Tree] = body.map(ensureEarlyDef).filter(_.nonEmpty)
           in.nextToken()
-          val (parents, argss) = templateParents(isTrait = isTrait)
-          val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
-          (parents, argss, self1, earlyDefs ::: body1)
+          val parents = templateParents()
+          val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
+          (parents, self1, earlyDefs ::: body1)
         } else {
-          (List(), ListOfNil, self, body)
+          (List(), self, body)
         }
       } else {
-        val (parents, argss) = templateParents(isTrait = isTrait)
-        val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
-        (parents, argss, self, body)
-      }
+        val parents = templateParents()
+        val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
+        (parents, self, body)
+      }
+    }
+
+    def ensureEarlyDef(tree: Tree): Tree = tree match {
+      case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+        copyValDef(vdef)(mods = mods | Flags.PRESUPER)
+      case tdef @ TypeDef(mods, name, tparams, rhs) =>
+        deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
+        treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
+      case docdef @ DocDef(comm, rhs) =>
+        treeCopy.DocDef(docdef, comm, rhs)
+      case stat if !stat.isEmpty =>
+        syntaxError(stat.pos, "only concrete field definitions allowed in early object initialization section", skipIt = false)
+        EmptyTree
+      case _ =>
+        EmptyTree
     }
 
-    def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
-      mods.isTrait && (body forall treeInfo.isInterfaceMember)
-
     /** {{{
      *  ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
      *  TraitTemplateOpt ::= TraitExtends TraitTemplate | [[`extends'] TemplateBody] | `<:' TemplateBody
      *  TraitExtends     ::= `extends' | `<:'
      *  }}}
      */
-    def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
-      val (parents0, argss, self, body) = (
+    def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
+      val (parents, self, body) = (
         if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
           in.nextToken()
-          template(isTrait = mods.isTrait)
+          template()
         }
         else {
           newLineOptWhenFollowedBy(LBRACE)
-          val (self, body) = templateBodyOpt(traitParentSeen = false)
-          (List(), ListOfNil, self, body)
+          val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
+          (List(), self, body)
         }
       )
-      def anyrefParents() = {
-        val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
-        parents0 ::: caseParents match {
-          case Nil  => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
-          case ps   => ps
-        }
-      }
       def anyvalConstructor() = (
         // Not a well-formed constructor, has to be finished later - see note
         // regarding AnyVal constructor in AddInterfaces.
-        DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
+        DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
       )
-      val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
+      val parentPos = o2p(in.offset)
+      val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
 
-      atPos(tstart0) {
+      atPos(tstart1) {
         // Exclude only the 9 primitives plus AnyVal.
         if (inScalaRootPackage && ScalaValueClassNames.contains(name))
-          Template(parents0, self, anyvalConstructor :: body)
+          Template(parents, self, anyvalConstructor :: body)
         else
-          Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
+          gen.mkTemplate(gen.mkParents(mods, parents, parentPos),
+                         self, constrMods, vparamss, body, o2p(tstart))
       }
     }
 
@@ -2851,15 +2908,16 @@ self =>
       case (self, Nil)  => (self, EmptyTree.asList)
       case result       => result
     }
-    def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+    def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
       newLineOptWhenFollowedBy(LBRACE)
       if (in.token == LBRACE) {
         templateBody(isPre = false)
       } else {
-        if (in.token == LPAREN)
-          syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
-                      " may not have parameters", true)
-        (emptyValDef, List())
+        if (in.token == LPAREN) {
+          if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true)
+          else abort("unexpected opening parenthesis")
+        }
+        (noSelfType, List())
       }
     }
 
@@ -2872,45 +2930,24 @@ self =>
 /* -------- STATSEQS ------------------------------------------- */
 
   /** Create a tree representing a packaging. */
-    def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+    def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
       case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
     }
-/*
-        pkg match {
-          case id @ Ident(_) =>
-            PackageDef(id, stats)
-          case Select(qual, name) => // drop this to flatten packages
-            makePackaging(start, qual, List(PackageDef(Ident(name), stats)))
-        }
-      }
-*/
 
-    /** Create a tree representing a package object, converting
-     *  {{{
-     *    package object foo { ... }
-     *  }}}
-     *  to
-     *  {{{
-     *    package foo {
-     *      object `package` { ... }
-     *    }
-     *  }}}
-     */
-    def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
-      case ModuleDef(mods, name, impl) =>
-        makePackaging(
-          start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
-    }
+    def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
+      makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
+    )
 
-    /** {{{
-     *  Packaging ::= package QualId [nl] `{' TopStatSeq `}'
-     *  }}}
-     */
-    def packaging(start: Int): Tree = {
-      val nameOffset = in.offset
-      val pkg = pkgQualId()
-      val stats = inBracesOrNil(topStatSeq())
-      makePackaging(start, pkg, stats)
+    def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+      val stats = new ListBuffer[Tree]
+      def default(tok: Token) =
+        if (isStatSep) Nil
+        else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+      while (!isStatSeqEnd) {
+        stats ++= stat.applyOrElse(in.token, default)
+        acceptStatSepOpt()
+      }
+      stats.toList
     }
 
     /** {{{
@@ -2922,54 +2959,25 @@ self =>
      *            |
      *  }}}
      */
-    def topStatSeq(): List[Tree] = {
-      val stats = new ListBuffer[Tree]
-      while (!isStatSeqEnd) {
-        stats ++= (in.token match {
-          case PACKAGE  =>
-            val start = in.skipToken()
-            if (in.token == OBJECT)
-              joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
-            else {
-              in.flushDoc
-              List(packaging(start))
-            }
-          case IMPORT =>
-            in.flushDoc
-            importClause()
-          case x if x == AT || isTemplateIntro || isModifier =>
-            joinComment(List(topLevelTmplDef))
-          case _ =>
-            if (!isStatSep)
-              syntaxErrorOrIncomplete("expected class or object definition", true)
-            Nil
-        })
-        acceptStatSepOpt()
-      }
-      stats.toList
-    }
-
-    /** Informal - for the repl and other direct parser accessors.
-     */
-    def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
-      case Nil    => EmptyTree.asList
-      case stats  => stats
+    def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+    def topStat: PartialFunction[Token, List[Tree]] = {
+      case PACKAGE  =>
+        packageOrPackageObject(in.skipToken()) :: Nil
+      case IMPORT =>
+        in.flushDoc
+        importClause()
+      case _ if isAnnotation || isTemplateIntro || isModifier =>
+        joinComment(topLevelTmplDef :: Nil)
     }
 
     /** {{{
-     *  TemplateStatSeq  ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
-     *  TemplateStat     ::= Import
-     *                     | Annotations Modifiers Def
-     *                     | Annotations Modifiers Dcl
-     *                     | Expr1
-     *                     | super ArgumentExprs {ArgumentExprs}
-     *                     |
+     *  TemplateStatSeq  ::= [id [`:' Type] `=>'] TemplateStats
      *  }}}
      * @param isPre specifies whether in early initializer (true) or not (false)
      */
     def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
-      var self: ValDef = emptyValDef
-      val stats = new ListBuffer[Tree]
+      var self: ValDef = noSelfType
+      var firstOpt: Option[Tree] = None
       if (isExprIntro) {
         in.flushDoc
         val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
@@ -2986,27 +2994,37 @@ self =>
           }
           in.nextToken()
         } else {
-          stats += first
+          firstOpt = Some(first)
           acceptStatSepOpt()
         }
       }
-      while (!isStatSeqEnd) {
-        if (in.token == IMPORT) {
-          in.flushDoc
-          stats ++= importClause()
-        } else if (isExprIntro) {
-          in.flushDoc
-          stats += statement(InTemplate)
-        } else if (isDefIntro || isModifier || in.token == AT) {
-          stats ++= joinComment(nonLocalDefOrDcl)
-        } else if (!isStatSep) {
-          syntaxErrorOrIncomplete("illegal start of definition", true)
-        }
-        acceptStatSepOpt()
-      }
-      (self, stats.toList)
+      (self, firstOpt ++: templateStats())
+    }
+
+    /** {{{
+     *  TemplateStats    ::= TemplateStat {semi TemplateStat}
+     *  TemplateStat     ::= Import
+     *                     | Annotations Modifiers Def
+     *                     | Annotations Modifiers Dcl
+     *                     | Expr1
+     *                     | super ArgumentExprs {ArgumentExprs}
+     *                     |
+     *  }}}
+     */
+    def templateStats(): List[Tree] = statSeq(templateStat)
+    def templateStat: PartialFunction[Token, List[Tree]] = {
+      case IMPORT =>
+        in.flushDoc
+        importClause()
+      case _ if isDefIntro || isModifier || isAnnotation =>
+        joinComment(nonLocalDefOrDcl)
+      case _ if isExprIntro =>
+        in.flushDoc
+        statement(InTemplate) :: Nil
     }
 
+    def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
     /** {{{
      *  RefineStatSeq    ::= RefineStat {semi RefineStat}
      *  RefineStat       ::= Dcl
@@ -3017,19 +3035,23 @@ self =>
     def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
       val stats = new ListBuffer[Tree]
       while (!isStatSeqEnd) {
-        if (isDclIntro) { // don't IDE hook
-          stats ++= joinComment(defOrDcl(in.offset, NoMods))
-        } else if (!isStatSep) {
-          syntaxErrorOrIncomplete(
-            "illegal start of declaration"+
-            (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
-             else ""), true)
-        }
+        stats ++= refineStat()
         if (in.token != RBRACE) acceptStatSep()
       }
       stats.toList
     }
 
+    def refineStat(): List[Tree] =
+      if (isDclIntro) { // don't IDE hook
+        joinComment(defOrDcl(in.offset, NoMods))
+      } else if (!isStatSep) {
+        syntaxErrorOrIncomplete(
+          "illegal start of declaration"+
+          (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+           else ""), skipIt = true)
+        Nil
+      } else Nil
+
     /** overridable IDE hook for local definitions of blockStatSeq
      *  Here's an idea how to fill in start and end positions.
     def localDef : List[Tree] = {
@@ -3047,13 +3069,13 @@ self =>
     def localDef(implicitMod: Int): List[Tree] = {
       val annots = annotations(skipNewLines = true)
       val pos = in.offset
-      val mods = (localModifiers() | implicitMod) withAnnotations annots
+      val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots
       val defs =
         if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
         else List(tmplDef(pos, mods))
 
       in.token match {
-        case RBRACE | CASE  => defs :+ (Literal(Constant()) setPos o2p(in.offset))
+        case RBRACE | CASE  => defs :+ setInPos(literalUnit)
         case _              => defs
       }
     }
@@ -3069,16 +3091,12 @@ self =>
      */
     def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
       val stats = new ListBuffer[Tree]
-      while (!isStatSeqEnd && in.token != CASE) {
+      while (!isStatSeqEnd && !isCaseDefEnd) {
         if (in.token == IMPORT) {
           stats ++= importClause()
           acceptStatSepOpt()
         }
-        else if (isExprIntro) {
-          stats += statement(InBlock)
-          if (in.token != RBRACE && in.token != CASE) acceptStatSep()
-        }
-        else if (isDefIntro || isLocalModifier || in.token == AT) {
+        else if (isDefIntro || isLocalModifier || isAnnotation) {
           if (in.token == IMPLICIT) {
             val start = in.skipToken()
             if (isIdent) stats += implicitClosure(start, InBlock)
@@ -3088,12 +3106,16 @@ self =>
           }
           acceptStatSepOpt()
         }
+        else if (isExprIntro) {
+          stats += statement(InBlock)
+          if (!isCaseDefEnd) acceptStatSep()
+        }
         else if (isStatSep) {
           in.nextToken()
         }
         else {
           val addendum = if (isModifier) " (no modifiers allowed here)" else ""
-          syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
+          syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true)
         }
       }
       stats.toList
@@ -3103,7 +3125,7 @@ self =>
      *  CompilationUnit ::= {package QualId semi} TopStatSeq
      *  }}}
      */
-    def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+    def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
       def topstats(): List[Tree] = {
         val ts = new ListBuffer[Tree]
         while (in.token == SEMI) in.nextToken()
@@ -3111,13 +3133,15 @@ self =>
         if (in.token == PACKAGE) {
           in.nextToken()
           if (in.token == OBJECT) {
+            // TODO - this next line is supposed to be
+            //    ts += packageObjectDef(start)
+            // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow.
             ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
             if (in.token != EOF) {
               acceptStatSep()
               ts ++= topStatSeq()
             }
           } else {
-            val nameOffset = in.offset
             in.flushDoc
             val pkg = pkgQualId()
 
@@ -3140,17 +3164,17 @@ self =>
 
       resetPackage()
       topstats() match {
-        case List(stat @ PackageDef(_, _)) => stat
-        case stats =>
+        case (stat @ PackageDef(_, _)) :: Nil => stat
+        case stats                            =>
           val start =
             if (stats forall (_ == EmptyTree)) 0
             else {
               val wpos = wrappingPos(stats)
-              if (wpos.isDefined) wpos.startOrPoint
+              if (wpos.isDefined) wpos.start
               else 0
             }
 
-          makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+          makeEmptyPackage(start, stats)
       }
     }
   }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 8d295a2..e8d4670 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -5,13 +5,15 @@
 package scala.tools.nsc
 package ast.parser
 
-import scala.tools.nsc.util.CharArrayReader
+import scala.tools.nsc.util.{ CharArrayReader, CharArrayReaderData }
 import scala.reflect.internal.util._
 import scala.reflect.internal.Chars._
 import Tokens._
-import scala.annotation.switch
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
-import scala.xml.Utility.{ isNameStart }
+import scala.annotation.{ switch, tailrec }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, ArrayBuffer }
+import scala.tools.nsc.ast.parser.xml.Utility.isNameStart
+import scala.language.postfixOps
 
 /** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
  */
@@ -19,20 +21,24 @@ trait ScannersCommon {
   val global : Global
   import global._
 
+  /** Offset into source character array */
+  type Offset = Int
+
+  type Token = Int
+
   trait CommonTokenData {
-    def token: Int
+    def token: Token
     def name: TermName
   }
 
   trait ScannerCommon extends CommonTokenData {
     // things to fill in, in addition to buf, decodeUni which come from CharArrayReader
-    def warning(off: Int, msg: String): Unit
-    def error  (off: Int, msg: String): Unit
-    def incompleteInputError(off: Int, msg: String): Unit
-    def deprecationWarning(off: Int, msg: String): Unit
+    def error(off: Offset, msg: String): Unit
+    def incompleteInputError(off: Offset, msg: String): Unit
+    def deprecationWarning(off: Offset, msg: String): Unit
   }
 
-  def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+  def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
     val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
     val low   = names.head._1
     val high  = names.last._1
@@ -47,16 +53,10 @@ trait Scanners extends ScannersCommon {
   val global : Global
   import global._
 
-  /** Offset into source character array */
-  type Offset = Int
-
-  /** An undefined offset */
-  val NoOffset: Offset = -1
-
   trait TokenData extends CommonTokenData {
 
     /** the next token */
-    var token: Int = EMPTY
+    var token: Token = EMPTY
 
     /** the offset of the first character of the current token */
     var offset: Offset = 0
@@ -73,24 +73,105 @@ trait Scanners extends ScannersCommon {
     /** the base of a number */
     var base: Int = 0
 
-    def copyFrom(td: TokenData) = {
+    def copyFrom(td: TokenData): this.type = {
       this.token = td.token
       this.offset = td.offset
       this.lastOffset = td.lastOffset
       this.name = td.name
       this.strVal = td.strVal
       this.base = td.base
+      this
+    }
+  }
+
+  /** An interface to most of mutable data in Scanner defined in TokenData
+   *  and CharArrayReader (+ next, prev fields) with copyFrom functionality
+   *  to backup/restore data (used by quasiquotes' lookingAhead).
+   */
+  trait ScannerData extends TokenData with CharArrayReaderData {
+    /** we need one token lookahead and one token history
+     */
+    val next: TokenData = new TokenData{}
+    val prev: TokenData = new TokenData{}
+
+    def copyFrom(sd: ScannerData): this.type = {
+      this.next copyFrom sd.next
+      this.prev copyFrom sd.prev
+      super[CharArrayReaderData].copyFrom(sd)
+      super[TokenData].copyFrom(sd)
+      this
     }
   }
 
-  abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
+  abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
     private def isDigit(c: Char) = java.lang.Character isDigit c
 
-    def isAtEnd = charOffset >= buf.length
+    private var openComments = 0
+    protected def putCommentChar(): Unit = nextChar()
 
-    def flush = { charOffset = offset; nextChar(); this }
+    @tailrec private def skipLineComment(): Unit = ch match {
+      case SU | CR | LF =>
+      case _            => nextChar() ; skipLineComment()
+    }
+    private def maybeOpen() {
+      putCommentChar()
+      if (ch == '*') {
+        putCommentChar()
+        openComments += 1
+      }
+    }
+    private def maybeClose(): Boolean = {
+      putCommentChar()
+      (ch == '/') && {
+        putCommentChar()
+        openComments -= 1
+        openComments == 0
+      }
+    }
+    @tailrec final def skipNestedComments(): Unit = ch match {
+      case '/' => maybeOpen() ; skipNestedComments()
+      case '*' => if (!maybeClose()) skipNestedComments()
+      case SU  => incompleteInputError("unclosed comment")
+      case _   => putCommentChar() ; skipNestedComments()
+    }
+    def skipDocComment(): Unit = skipNestedComments()
+    def skipBlockComment(): Unit = skipNestedComments()
 
-    def resume(lastCode: Int) = {
+    private def skipToCommentEnd(isLineComment: Boolean) {
+      nextChar()
+      if (isLineComment) skipLineComment()
+      else {
+        openComments = 1
+        val isDocComment = (ch == '*') && { nextChar(); true }
+        if (isDocComment) {
+          // Check for the amazing corner case of /**/
+          if (ch == '/')
+            nextChar()
+          else
+            skipDocComment()
+        }
+        else skipBlockComment()
+      }
+    }
+
+    /** @pre ch == '/'
+     *  Returns true if a comment was skipped.
+     */
+    def skipComment(): Boolean = ch match {
+      case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+      case _         => false
+    }
+    def flushDoc(): DocComment = null
+
+    /** To prevent doc comments attached to expressions from leaking out of scope
+     *  onto the next documentable entity, they are discarded upon passing a right
+     *  brace, bracket, or parenthesis.
+     */
+    def discardDocBuffer(): Unit = ()
+
+    def isAtEnd = charOffset >= buf.length
+
+    def resume(lastCode: Token) = {
       token = lastCode
       if (next.token != EMPTY && !reporter.hasErrors)
         syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -98,10 +179,6 @@ trait Scanners extends ScannersCommon {
       nextToken()
     }
 
-    /** the last error offset
-     */
-    var errOffset: Offset = NoOffset
-
     /** A character buffer for literals
      */
     val cbuf = new StringBuilder
@@ -119,7 +196,7 @@ trait Scanners extends ScannersCommon {
     protected def emitIdentifierDeprecationWarnings = true
 
     /** Clear buffer and set name and token */
-    private def finishNamed(idtoken: Int = IDENTIFIER) {
+    private def finishNamed(idtoken: Token = IDENTIFIER) {
       name = newTermName(cbuf.toString)
       cbuf.clear()
       token = idtoken
@@ -127,8 +204,12 @@ trait Scanners extends ScannersCommon {
         val idx = name.start - kwOffset
         if (idx >= 0 && idx < kwArray.length) {
           token = kwArray(idx)
-          if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
-            deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
+          if (token == IDENTIFIER && allowIdent != name) {
+            if (name == nme.MACROkw)
+              syntaxError(s"$name is now a reserved word; usage as an identifier is disallowed")
+            else if (emitIdentifierDeprecationWarnings)
+              deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated")
+          }
         }
       }
     }
@@ -139,29 +220,6 @@ trait Scanners extends ScannersCommon {
       cbuf.clear()
     }
 
-    /** Should doc comments be built? */
-    def buildDocs: Boolean = forScaladoc
-
-    /** holder for the documentation comment
-     */
-    var docComment: DocComment = null
-
-    def flushDoc: DocComment = {
-      val ret = docComment
-      docComment = null
-      ret
-    }
-
-    protected def foundComment(value: String, start: Int, end: Int) = ()
-    protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
-    private class TokenData0 extends TokenData
-
-    /** we need one token lookahead and one token history
-     */
-    val next : TokenData = new TokenData0
-    val prev : TokenData = new TokenData0
-
     /** a stack of tokens which indicates whether line-ends can be statement separators
      *  also used for keeping track of nesting levels.
      *  We keep track of the closing symbol of a region. This can be
@@ -173,7 +231,7 @@ trait Scanners extends ScannersCommon {
      *            (the STRINGLIT appears twice in succession on the stack iff the
      *             expression is a multiline string literal).
      */
-    var sepRegions: List[Int] = List()
+    var sepRegions: List[Token] = List()
 
 // Get next token ------------------------------------------------------------
 
@@ -227,12 +285,15 @@ trait Scanners extends ScannersCommon {
         case RBRACE =>
           while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
             sepRegions = sepRegions.tail
-          if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
-          docComment = null
+          if (!sepRegions.isEmpty)
+            sepRegions = sepRegions.tail
+
+          discardDocBuffer()
         case RBRACKET | RPAREN =>
           if (!sepRegions.isEmpty && sepRegions.head == lastToken)
             sepRegions = sepRegions.tail
-          docComment = null
+
+          discardDocBuffer()
         case ARROW =>
           if (!sepRegions.isEmpty && sepRegions.head == lastToken)
             sepRegions = sepRegions.tail
@@ -262,11 +323,11 @@ trait Scanners extends ScannersCommon {
         next.token = EMPTY
       }
 
-      /** Insert NEWLINE or NEWLINES if
-       *  - we are after a newline
-       *  - we are within a { ... } or on toplevel (wrt sepRegions)
-       *  - the current token can start a statement and the one before can end it
-       *  insert NEWLINES if we are past a blank line, NEWLINE otherwise
+      /* Insert NEWLINE or NEWLINES if
+       * - we are after a newline
+       * - we are within a { ... } or on toplevel (wrt sepRegions)
+       * - the current token can start a statement and the one before can end it
+       * insert NEWLINES if we are past a blank line, NEWLINE otherwise
        */
       if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) &&
           (sepRegions.isEmpty || sepRegions.head == RBRACE)) {
@@ -328,7 +389,7 @@ trait Scanners extends ScannersCommon {
 //              println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
               return true
             }
-	    if (idx == end) return false
+            if (idx == end) return false
           } while (ch <= ' ')
         }
         idx += 1; ch = buf(idx)
@@ -375,7 +436,7 @@ trait Scanners extends ScannersCommon {
                 getOperatorRest()
             }
           }
-          fetchLT
+          fetchLT()
         case '~' | '!' | '@' | '#' | '%' |
              '^' | '*' | '+' | '-' | /*'<' | */
              '>' | '?' | ':' | '=' | '&' |
@@ -399,20 +460,11 @@ trait Scanners extends ScannersCommon {
               nextChar()
               base = 16
             } else {
-              /**
-               * What should leading 0 be in the future? It is potentially dangerous
-               *  to let it be base-10 because of history.  Should it be an error? Is
-               *  there a realistic situation where one would need it?
-               */
-              if (isDigit(ch)) {
-                if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
-                else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
-              }
               base = 8
             }
             getNumber()
           }
-          fetchZero
+          fetchZero()
         case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
           base = 10
           getNumber()
@@ -423,14 +475,17 @@ trait Scanners extends ScannersCommon {
             if (token == INTERPOLATIONID) {
               nextRawChar()
               if (ch == '\"') {
-                nextRawChar()
-                if (ch == '\"') {
+                val lookahead = lookaheadReader
+                lookahead.nextChar()
+                if (lookahead.ch == '\"') {
+                  nextRawChar()                        // now eat it
                   offset += 3
                   nextRawChar()
                   getStringPart(multiLine = true)
                   sepRegions = STRINGPART :: sepRegions // indicate string part
                   sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
                 } else {
+                  nextChar()
                   token = STRINGLIT
                   strVal = ""
                 }
@@ -455,7 +510,7 @@ trait Scanners extends ScannersCommon {
               }
             }
           }
-          fetchDoubleQuote
+          fetchDoubleQuote()
         case '\'' =>
           def fetchSingleQuote() = {
             nextChar()
@@ -474,7 +529,7 @@ trait Scanners extends ScannersCommon {
               }
             }
           }
-          fetchSingleQuote
+          fetchSingleQuote()
         case '.' =>
           nextChar()
           if ('0' <= ch && ch <= '9') {
@@ -519,72 +574,16 @@ trait Scanners extends ScannersCommon {
               nextChar()
               getOperatorRest()
             } else {
-              syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+              syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'")
               nextChar()
             }
           }
-          fetchOther
-      }
-    }
-
-    private def skipComment(): Boolean = {
-
-      if (ch == '/' || ch == '*') {
-
-        val comment = new StringBuilder("/")
-        def appendToComment() = comment.append(ch)
-
-        if (ch == '/') {
-          do {
-        	appendToComment()
-            nextChar()
-          } while ((ch != CR) && (ch != LF) && (ch != SU))
-        } else {
-          docComment = null
-          var openComments = 1
-          appendToComment()
-          nextChar()
-          appendToComment()
-          var buildingDocComment = false
-          if (ch == '*' && buildDocs) {
-            buildingDocComment = true
-          }
-          while (openComments > 0) {
-            do {
-              do {
-                if (ch == '/') {
-                  nextChar(); appendToComment()
-                  if (ch == '*') {
-                    nextChar(); appendToComment()
-                    openComments += 1
-                  }
-                }
-                if (ch != '*' && ch != SU) {
-                  nextChar(); appendToComment()
-                }
-              } while (ch != '*' && ch != SU)
-              while (ch == '*') {
-                nextChar(); appendToComment()
-              }
-            } while (ch != '/' && ch != SU)
-            if (ch == '/') nextChar()
-            else incompleteInputError("unclosed comment")
-            openComments -= 1
-          }
-
-          if (buildingDocComment)
-            foundDocComment(comment.toString, offset, charOffset - 2)
-        }
-
-        foundComment(comment.toString, offset, charOffset - 2)
-        true
-      } else {
-        false
+          fetchOther()
       }
     }
 
     /** Can token start a statement? */
-    def inFirstOfStat(token: Int) = token match {
+    def inFirstOfStat(token: Token) = token match {
       case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
            COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
            SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -594,7 +593,7 @@ trait Scanners extends ScannersCommon {
     }
 
     /** Can token end a statement? */
-    def inLastOfStat(token: Int) = token match {
+    def inLastOfStat(token: Token) = token match {
       case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
            IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
            TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -709,7 +708,7 @@ trait Scanners extends ScannersCommon {
       }
     }
 
-    @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+    @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
       def finishStringPart() = {
         setStrVal()
         token = STRINGPART
@@ -739,6 +738,10 @@ trait Scanners extends ScannersCommon {
           finishStringPart()
           nextRawChar()
           next.token = LBRACE
+        } else if (ch == '_') {
+          finishStringPart()
+          nextRawChar()
+          next.token = USCORE
         } else if (Character.isUnicodeIdentifierStart(ch)) {
           finishStringPart()
           do {
@@ -803,6 +806,7 @@ trait Scanners extends ScannersCommon {
       if (ch == '\\') {
         nextChar()
         if ('0' <= ch && ch <= '7') {
+          val start = charOffset - 2
           val leadch: Char = ch
           var oct: Int = digit2int(ch, 8)
           nextChar()
@@ -814,6 +818,12 @@ trait Scanners extends ScannersCommon {
               nextChar()
             }
           }
+          val alt = if (oct == LF) "\\n" else "\\u%04x" format oct
+          def msg(what: String) = s"Octal escape literals are $what, use $alt instead."
+          if (settings.future)
+            syntaxError(start, msg("unsupported"))
+          else
+            deprecationWarning(start, msg("deprecated"))
           putChar(oct.toChar)
         } else {
           ch match {
@@ -895,7 +905,7 @@ trait Scanners extends ScannersCommon {
      */
     def intVal(negated: Boolean): Long = {
       if (token == CHARLIT && !negated) {
-        charVal
+        charVal.toLong
       } else {
         var value: Long = 0
         val divider = if (base == 10) 1 else 2
@@ -923,7 +933,7 @@ trait Scanners extends ScannersCommon {
       }
     }
 
-    def intVal: Long = intVal(false)
+    def intVal: Long = intVal(negated = false)
 
     /** Convert current strVal, base to double value
     */
@@ -943,9 +953,8 @@ trait Scanners extends ScannersCommon {
         }
         if (value > limit)
           syntaxError("floating point number too large")
-        if (isDeprecatedForm) {
-          deprecationWarning("This lexical syntax is deprecated.  From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.")
-        }
+        if (isDeprecatedForm)
+          syntaxError("floating point number is missing digit after dot")
 
         if (negated) -value else value
       } catch {
@@ -955,7 +964,7 @@ trait Scanners extends ScannersCommon {
       }
     }
 
-    def floatVal: Double = floatVal(false)
+    def floatVal: Double = floatVal(negated = false)
 
     def checkNoLetter() {
       if (isIdentifierPart(ch) && ch >= ' ')
@@ -966,14 +975,19 @@ trait Scanners extends ScannersCommon {
     */
     protected def getNumber() {
       val base1 = if (base < 10) 10 else base
-      // read 8,9's even if format is octal, produce a malformed number error afterwards.
+      // Read 8,9's even if format is octal, produce a malformed number error afterwards.
+      // At this point, we have already read the first digit, so to tell an innocent 0 apart
+      // from an octal literal 0123... (which we want to disallow), we check whether there
+      // are any additional digits coming after the first one we have already read.
+      var notSingleZero = false
       while (digit2int(ch, base1) >= 0) {
         putChar(ch)
         nextChar()
+        notSingleZero = true
       }
       token = INTLIT
 
-      /** When we know for certain it's a number after using a touch of lookahead */
+      /* When we know for certain it's a number after using a touch of lookahead */
       def restOfNumber() = {
         putChar(ch)
         nextChar()
@@ -986,6 +1000,9 @@ trait Scanners extends ScannersCommon {
         if (base <= 10 && isEfd)
           getFraction()
         else {
+          // Checking for base == 8 is not enough, because base = 8 is set
+          // as soon as a 0 is read in `case '0'` of method fetchToken.
+          if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
           setStrVal()
           if (isL) {
             nextChar()
@@ -1001,10 +1018,8 @@ trait Scanners extends ScannersCommon {
         val lookahead = lookaheadReader
         val c = lookahead.getc()
 
-        /** As of scala 2.11, it isn't a number unless c here is a digit, so
-         *  opt.future excludes the rest of the logic.
-         */
-        if (opt.future && !isDigit(c))
+        /* Prohibit 1. */
+        if (!isDigit(c))
           return setStrVal()
 
         val isDefinitelyNumber = (c: @switch) match {
@@ -1012,16 +1027,16 @@ trait Scanners extends ScannersCommon {
           case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9'  =>
             true
 
-          /** Backquoted idents like 22.`foo`. */
+          /* Backquoted idents like 22.`foo`. */
           case '`' =>
             return setStrVal()  /** Note the early return */
 
-          /** These letters may be part of a literal, or a method invocation on an Int.
+          /* These letters may be part of a literal, or a method invocation on an Int.
            */
           case 'd' | 'D' | 'f' | 'F' =>
             !isIdentifierPart(lookahead.getc())
 
-          /** A little more special handling for e.g. 5e7 */
+          /* A little more special handling for e.g. 5e7 */
           case 'e' | 'E' =>
             val ch = lookahead.getc()
             !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
@@ -1058,7 +1073,6 @@ trait Scanners extends ScannersCommon {
     def syntaxError(off: Offset, msg: String) {
       error(off, msg)
       token = ERROR
-      errOffset = off
     }
 
     /** generate an error at the current token offset
@@ -1071,7 +1085,6 @@ trait Scanners extends ScannersCommon {
     def incompleteInputError(msg: String) {
       incompleteInputError(offset, msg)
       token = EOF
-      errOffset = offset
     }
 
     override def toString() = token match {
@@ -1114,7 +1127,7 @@ trait Scanners extends ScannersCommon {
     def applyBracePatch(): Boolean = false
 
     /** overridden in UnitScanners */
-    def parenBalance(token: Int) = 0
+    def parenBalance(token: Token) = 0
 
     /** overridden in UnitScanners */
     def healBraces(): List[BracePatch] = List()
@@ -1129,7 +1142,7 @@ trait Scanners extends ScannersCommon {
 
   // ------------- keyword configuration -----------------------------------
 
-  private val allKeywords = List[(Name, Int)](
+  private val allKeywords = List[(Name, Token)](
     nme.ABSTRACTkw  -> ABSTRACT,
     nme.CASEkw      -> CASE,
     nme.CATCHkw     -> CATCH,
@@ -1183,8 +1196,8 @@ trait Scanners extends ScannersCommon {
     nme.MACROkw     -> IDENTIFIER,
     nme.THENkw      -> IDENTIFIER)
 
-  private var kwOffset: Int = -1
-  private val kwArray: Array[Int] = {
+  private var kwOffset: Offset = -1
+  private val kwArray: Array[Token] = {
     val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
     kwOffset = offset
     arr
@@ -1195,7 +1208,7 @@ trait Scanners extends ScannersCommon {
 // Token representation ----------------------------------------------------
 
   /** Returns the string representation of given token. */
-  def token2string(token: Int): String = (token: @switch) match {
+  def token2string(token: Token): String = (token: @switch) match {
     case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
     case CHARLIT => "character literal"
     case INTLIT => "integer literal"
@@ -1226,17 +1239,16 @@ trait Scanners extends ScannersCommon {
       }
   }
 
-  class MalformedInput(val offset: Int, val msg: String) extends Exception
+  class MalformedInput(val offset: Offset, val msg: String) extends Exception
 
   /** A scanner for a given source file not necessarily attached to a compilation unit.
    *  Useful for looking inside source files that aren not currently compiled to see what's there
    */
   class SourceFileScanner(val source: SourceFile) extends Scanner {
     val buf = source.content
-    override val decodeUni: Boolean = !settings.nouescape.value
+    override val decodeUni: Boolean = !settings.nouescape
 
     // suppress warnings, throw exception on errors
-    def warning(off: Offset, msg: String): Unit = ()
     def deprecationWarning(off: Offset, msg: String): Unit = ()
     def error  (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
     def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
@@ -1244,10 +1256,9 @@ trait Scanners extends ScannersCommon {
 
   /** A scanner over a given compilation unit
    */
-  class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+  class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
     def this(unit: CompilationUnit) = this(unit, List())
 
-    override def warning(off: Offset, msg: String)              = unit.warning(unit.position(off), msg)
     override def deprecationWarning(off: Offset, msg: String)   = unit.deprecationWarning(unit.position(off), msg)
     override def error  (off: Offset, msg: String)              = unit.error(unit.position(off), msg)
     override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
@@ -1256,7 +1267,7 @@ trait Scanners extends ScannersCommon {
 
     lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
 
-    override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+    override def parenBalance(token: Token) = parensAnalyzer.balance(token)
 
     override def healBraces(): List[BracePatch] = {
       var patches: List[BracePatch] = List()
@@ -1293,23 +1304,21 @@ trait Scanners extends ScannersCommon {
         }
       }
     }
-
-    override def foundComment(value: String, start: Int, end: Int) {
-      val pos = new RangePosition(unit.source, start, start, end)
-      unit.comment(pos, value)
-    }
-
-    override def foundDocComment(value: String, start: Int, end: Int) {
-      val docPos = new RangePosition(unit.source, start, start, end)
-      docComment = new DocComment(value, docPos)
-      unit.comment(docPos, value)
-    }
   }
 
   class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
-    var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+    val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+
+    /** The source code with braces and line starts annotated with [NN] showing the index */
+    private def markedSource = {
+      val code   = unit.source.content
+      val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet;
+      val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx))
+      mapped.mkString("")
+    }
 
     init()
+    log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```")
 
     /** The offset of the first token on this line, or next following line if blank
      */
@@ -1385,23 +1394,30 @@ trait Scanners extends ScannersCommon {
           bpbuf += current
         }
       }
+      def bracePairString(bp: BracePair, indent: Int): String = {
+        val rangeString = {
+          import bp._
+          val lline = line(loff)
+          val rline = line(roff)
+          val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+          "%s:%s to %s:%s".format(tokens: _*)
+        }
+        val outer  = (" " * indent) + rangeString
+        val inners = bp.nested map (bracePairString(_, indent + 2))
 
-      def printBP(bp: BracePair, indent: Int) {
-        println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
-        if (bp.nested.nonEmpty)
-          for (bp1 <- bp.nested) {
-            printBP(bp1, indent + 2)
-          }
+        if (inners.isEmpty) outer
+        else inners.mkString(outer + "\n", "\n", "")
       }
-//      println("lineStart = "+lineStart)//DEBUG
-//      println("bracepairs = ")
-//      for (bp <- bpbuf.toList) printBP(bp, 0)
+      def bpString    = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+      def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+      log(s"\n$startString\n$bpString")
       bpbuf.toList
     }
 
     var tabSeen = false
 
-    def line(offset: Int): Int = {
+    def line(offset: Offset): Int = {
       def findLine(lo: Int, hi: Int): Int = {
         val mid = (lo + hi) / 2
         if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1412,7 +1428,7 @@ trait Scanners extends ScannersCommon {
       else findLine(0, lineStart.length - 1)
     }
 
-    def column(offset: Int): Int = {
+    def column(offset: Offset): Int = {
       var col = 0
       var i = offset - 1
       while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1429,18 +1445,6 @@ trait Scanners extends ScannersCommon {
                         else bp :: insertPatch(bps, patch)
     }
 
-    def leftColumn(offset: Int) =
-      if (offset == -1) -1 else column(lineStart(line(offset)))
-
-    def rightColumn(offset: Int, default: Int) =
-      if (offset == -1) -1
-      else {
-        val rlin = line(offset)
-        if (lineStart(rlin) == offset) column(offset)
-        else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1))
-        else default
-      }
-
     def insertRBrace(): List[BracePatch] = {
       def insert(bps: List[BracePair]): List[BracePatch] = bps match {
         case List() => patches
@@ -1455,7 +1459,7 @@ trait Scanners extends ScannersCommon {
               while (lin < lineStart.length && column(lineStart(lin)) > lindent)
                 lin += 1
               if (lin < lineStart.length) {
-                val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true))
+                val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true))
                 //println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure)
                 /*if (improves(patches1))*/
                 patches1
@@ -1476,27 +1480,16 @@ trait Scanners extends ScannersCommon {
           else {
             val patches1 = delete(nested)
             if (patches1 ne patches) patches1
-            else insertPatch(patches, BracePatch(roff, false))
+            else insertPatch(patches, BracePatch(roff, inserted = false))
           }
       }
       delete(bracePairs)
     }
 
-    def imbalanceMeasure: Int = {
-      def measureList(bps: List[BracePair]): Int =
-        (bps map measure).sum
-      def measure(bp: BracePair): Int =
-        (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested)
-      measureList(bracePairs)
-    }
-
-    def improves(patches1: List[BracePatch]): Boolean =
-      imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
-
     // don't emit deprecation warnings about identifiers like `macro` or `then`
     // when skimming through the source file trying to heal braces
     override def emitIdentifierDeprecationWarnings = false
 
-    override def error(offset: Int, msg: String) {}
+    override def error(offset: Offset, msg: String) {}
   }
 }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index e8ef670..1abc0c8 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -7,11 +7,8 @@ package scala.tools.nsc
 package ast.parser
 
 import scala.collection.{ mutable, immutable }
-import scala.xml.{ EntityRef, Text }
-import scala.xml.XML.{ xmlns }
 import symtab.Flags.MUTABLE
 import scala.reflect.internal.util.StringOps.splitWhere
-import scala.language.implicitConversions
 
 /** This class builds instance of `Tree` that represent XML.
  *
@@ -133,7 +130,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
       case (Some(pre), rest)  => (const(pre), const(rest))
       case _                  => (wild, const(n))
     }
-    mkXML(pos, true, prepat, labpat, null, null, false, args)
+    mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args)
   }
 
   protected def convertToTextPat(t: Tree): Tree = t match {
@@ -144,14 +141,12 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
     (buf map convertToTextPat).toList
 
   def parseAttribute(pos: Position, s: String): Tree = {
-    val ts = scala.xml.Utility.parseAttributeValue(s) map {
-      case Text(s)      => text(pos, s)
-      case EntityRef(s) => entityRef(pos, s)
-    }
-    ts.length match {
-      case 0 => gen.mkNil
-      case 1 => ts.head
-      case _ => makeXMLseq(pos, ts.toList)
+    import xml.Utility.parseAttributeValue
+
+    parseAttributeValue(s, text(pos, _), entityRef(pos, _)) match {
+      case Nil      => gen.mkNil
+      case t :: Nil => t
+      case ts       => makeXMLseq(pos, ts.toList)
     }
   }
 
@@ -169,7 +164,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
   }
 
   /** Returns (Some(prefix) | None, rest) based on position of ':' */
-  def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+  def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match {
     case Some((pre, rest))  => (Some(pre), rest)
     case _                  => (None, name)
   }
@@ -197,9 +192,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
       uri1
     }
 
-    /** Extract all the namespaces from the attribute map. */
+    /* Extract all the namespaces from the attribute map. */
     val namespaces: List[Tree] =
-      for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
+      for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield {
         val ns = splitPrefix(z) match {
           case (Some(_), rest)  => rest
           case _                => null
@@ -247,7 +242,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
 
     val body = mkXML(
       pos.makeTransparent,
-      false,
+      isPattern = false,
       const(pre),
       const(newlabel),
       makeSymbolicAttrs,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 8a9ce89..3a695c6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -11,26 +11,98 @@ import javac._
 /** An nsc sub-component.
  */
 abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners {
+  import global._
 
   val phaseName = "parser"
-
   def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
 
-  class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+  abstract class MemberDefTraverser extends Traverser {
+    def onMember(defn: MemberDef): Unit
+
+    private var depth: Int = 0
+    private def lower[T](body: => T): T = {
+      depth += 1
+      try body finally depth -= 1
+    }
+    def currentDepth = depth
+
+    /** Prune this tree and all trees beneath it. Can be overridden. */
+    def prune(md: MemberDef): Boolean = (
+         md.mods.isSynthetic
+      || md.mods.isParamAccessor
+      || nme.isConstructorName(md.name)
+      || (md.name containsName nme.ANON_CLASS_NAME)
+    )
+
+    override def traverse(t: Tree): Unit = t match {
+      case md: MemberDef if prune(md) =>
+      case md @ PackageDef(_, stats)  => traverseTrees(stats)
+      case md: ImplDef                => onMember(md) ; lower(traverseTrees(md.impl.body))
+      case md: ValOrDefDef            => onMember(md) ; lower(traverse(md.rhs))
+      case _                          => super.traverse(t)
+    }
+  }
+
+  class MemberPosReporter(unit: CompilationUnit) extends MemberDefTraverser {
+    private var outputFn: MemberDef => String = outputForScreen
+    val path = unit.source.file.path
+
+    // If a single line, outputs the line; if it spans multiple lines
+    // outputs NN,NN with start and end lines, e.g. 15,25.
+    def outputPos(md: MemberDef): String = {
+      val pos   = md.pos
+      val start = pos.focusStart.line
+      val end   = pos.focusEnd.line
+
+      if (start == end) "" + start else s"$start,$end"
+    }
+    def outputForSed(md: MemberDef): String = {
+      val pos_s = "%-12s" format outputPos(md) + "p"
+      s"$pos_s $path    # ${md.keyword} ${md.name}"
+    }
+    def outputForScreen(md: MemberDef): String = {
+      val pos_s = "%-20s" format " " * currentDepth + outputPos(md)
+      s"$pos_s ${md.keyword} ${md.name}"
+    }
+
+    def onMember(md: MemberDef) = println(outputFn(md))
+    // It recognizes "sed" and "anything else".
+    def show(style: String) {
+      if (style == "sed") {
+        outputFn = outputForSed
+        traverse(unit.body)
+      }
+      else {
+        outputFn = outputForScreen
+        println(path)
+        traverse(unit.body)
+      }
+      println("")
+    }
+  }
+
+  private def initialUnitBody(unit: CompilationUnit): Tree = {
+    if (unit.isJava) new JavaUnitParser(unit).parse()
+    else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+    else newUnitParser(unit).smartParse()
+  }
+
+  class ParserPhase(prev: Phase) extends StdPhase(prev) {
     override val checkable = false
     override val keepsTypeParams = false
 
-    def apply(unit: global.CompilationUnit) {
-      import global._
+    def apply(unit: CompilationUnit) {
       informProgress("parsing " + unit)
-      unit.body =
-        if (unit.isJava) new JavaUnitParser(unit).parse()
-        else if (reporter.incompleteHandled) new UnitParser(unit).parse()
-        else new UnitParser(unit).smartParse()
+      // if the body is already filled in, don't overwrite it
+      // otherwise compileLate is going to overwrite bodies of synthetic source files
+      if (unit.body == EmptyTree)
+        unit.body = initialUnitBody(unit)
 
-      if (settings.Yrangepos.value && !reporter.hasErrors)
+      if (settings.Yrangepos && !reporter.hasErrors)
         validatePositions(unit.body)
+
+      if (settings.Ymemberpos.isSetByUser)
+        new MemberPosReporter(unit) show (style = settings.Ymemberpos.value)
     }
   }
 }
-
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index c3fd414..e624aec 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,159 +6,57 @@
 package scala.tools.nsc
 package ast.parser
 
-import scala.annotation.switch
-
-/** Common code between JavaTokens and Tokens.  Not as much (and not as concrete)
- *  as one might like because JavaTokens for no clear reason chose new numbers for
- *  identical token sets.
- */
-abstract class Tokens {
-  import scala.reflect.internal.Chars._
-
-  /** special tokens */
-  final val EMPTY = -3
-  final val UNDEF = -2
-  final val ERROR = -1
-  final val EOF = 0
-
-  /** literals */
-  final val CHARLIT = 1
-  final val INTLIT = 2
-  final val LONGLIT = 3
-  final val FLOATLIT = 4
-  final val DOUBLELIT = 5
-  final val STRINGLIT = 6
-
-  def LPAREN: Int
-  def RBRACE: Int
-
-  def isIdentifier(code: Int): Boolean
-  def isLiteral(code: Int): Boolean
-  def isKeyword(code: Int): Boolean
-  def isSymbol(code: Int): Boolean
-
-  final def isSpace(at: Char)       = at == ' ' || at == '\t'
-  final def isNewLine(at: Char)     = at == CR || at == LF || at == FF
-  final def isBrace(code: Int)      = code >= LPAREN && code <= RBRACE
-  final def isOpenBrace(code: Int)  = isBrace(code) && (code % 2 == 0)
-  final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
-}
-
-object Tokens extends Tokens {
-  final val STRINGPART = 7  // a part of an interpolated string
+object Tokens extends CommonTokens {
+  final val STRINGPART = 7 // a part of an interpolated string
   final val SYMBOLLIT = 8
   final val INTERPOLATIONID = 9 // the lead identifier of an interpolated string
 
-  def isLiteral(code: Int) =
-    code >= CHARLIT && code <= INTERPOLATIONID
-
+  def isLiteral(code: Int) = code >= CHARLIT && code <= INTERPOLATIONID
 
   /** identifiers */
   final val IDENTIFIER = 10
   final val BACKQUOTED_IDENT = 11
-  def isIdentifier(code: Int) =
-    code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
-  @switch def canBeginExpression(code: Int) = code match {
-    case IDENTIFIER|BACKQUOTED_IDENT|USCORE       => true
-    case LBRACE|LPAREN|LBRACKET|COMMENT           => true
-    case IF|DO|WHILE|FOR|NEW|TRY|THROW            => true
-    case NULL|THIS|TRUE|FALSE                     => true
-    case code                                     => isLiteral(code)
-  }
-
-  /** keywords */
-  final val IF = 20
-  final val FOR = 21
-  final val ELSE = 22
-  final val THIS = 23
-  final val NULL = 24
-  final val NEW = 25
-  final val WITH = 26
-  final val SUPER = 27
-  final val CASE = 28
-  final val CASECLASS = 29
-  final val CASEOBJECT = 30
-  final val VAL = 31
-  final val ABSTRACT = 32
-  final val FINAL = 33
-  final val PRIVATE = 34
-  final val PROTECTED = 35
-  final val OVERRIDE = 36
-  final val IMPLICIT = 37
-  final val VAR = 38
-  final val DEF = 39
-  final val TYPE = 40
-  final val EXTENDS = 41
-  final val TRUE = 42
-  final val FALSE = 43
-  final val OBJECT = 44
-  final val CLASS = 45
-
-  final val IMPORT = 46
-  final val PACKAGE = 47
-  final val YIELD = 48
-  final val DO = 49
-  final val TRAIT = 50
-  final val SEALED = 51
-  final val THROW = 52
-  final val TRY = 53
-  final val CATCH = 54
-  final val FINALLY = 55
-  final val WHILE = 56
-  final val RETURN = 57
-  final val MATCH = 58
-  final val FORSOME = 59
-  final val LAZY = 61
-  final val MACRO = 62 // not yet used in 2.10
-  final val THEN = 63  // not yet used in 2.10
-
-  def isKeyword(code: Int) =
-    code >= IF && code <= LAZY
-
-  @switch def isDefinition(code: Int) = code match {
-    case CLASS|TRAIT|OBJECT => true
-    case CASECLASS|CASEOBJECT => true
-    case DEF|VAL|VAR => true
-    case TYPE => true
-    case _ => false
-  }
+  def isIdentifier(code: Int) = code == IDENTIFIER || code == BACKQUOTED_IDENT // used by ide
+
+  /** modifiers */
+  final val IMPLICIT = 40
+  final val OVERRIDE = 41
+  final val SEALED = 45
+  final val LAZY = 55
+  final val MACRO = 57
+
+  /** templates */
+  final val CASECLASS = 63
+  final val OBJECT = 64
+  final val CASEOBJECT = 65
+  final val TRAIT = 66
+  final val WITH = 69
+  final val TYPE = 70
+  final val FORSOME = 71
+  final val DEF = 72
+  final val VAL = 73
+  final val VAR = 74
+
+  /** control structures */
+  final val THEN = 81
+  final val YIELD = 86
+  final val MATCH = 95
 
   /** special symbols */
-  final val COMMA = 70
-  final val SEMI = 71
-  final val DOT = 72
-  final val USCORE = 73
-  final val COLON = 74
-  final val EQUALS = 75
-  final val LARROW = 76
-  final val ARROW = 77
-  final val NEWLINE = 78
-  final val NEWLINES = 79
-  final val SUBTYPE = 80
-  final val SUPERTYPE = 81
-  final val HASH = 82
-  final val AT = 83
-  final val VIEWBOUND = 84
-
-  def isSymbol(code: Int) =
-    code >= COMMA && code <= VIEWBOUND
-
-  /** parenthesis */
-  final val LPAREN = 90
-  final val RPAREN = 91
-  final val LBRACKET = 92
-  final val RBRACKET = 93
-  final val LBRACE = 94
-  final val RBRACE = 95
-
-  /** XML mode */
-  final val XMLSTART = 96
+  final val HASH = 130
+  final val USCORE = 131
+  final val ARROW = 132
+  final val LARROW = 133
+  final val SUBTYPE = 134
+  final val SUPERTYPE = 135
+  final val VIEWBOUND = 136
+  final val NEWLINE = 137
+  final val NEWLINES = 138
+  final val XMLSTART = 139
 
   /** for IDE only */
-  final val COMMENT = 97
-
-  final val WHITESPACE = 105
-  final val IGNORE = 106
-  final val ESCAPE = 109
+  final val COMMENT = 200
+  final val WHITESPACE = 201
+  final val IGNORE = 202
+  final val ESCAPE = 203
 }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 1412bff..6e5a3f6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,119 +8,29 @@ package ast.parser
 
 import symtab.Flags._
 import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
 
 /** Methods for building trees, used in the parser.  All the trees
  *  returned by this class must be untyped.
  */
 abstract class TreeBuilder {
-
   val global: Global
   import global._
 
-  def freshName(): Name = freshName("x$")
-  def freshTermName(): TermName = freshTermName("x$")
+  def unit: CompilationUnit
+  def source: SourceFile
 
-  def freshName(prefix: String): Name
-  def freshTermName(prefix: String): TermName
-  def freshTypeName(prefix: String): TypeName
-  def o2p(offset: Int): Position
-  def r2p(start: Int, point: Int, end: Int): Position
+  implicit def fresh: FreshNameCreator              = unit.fresh
+  def o2p(offset: Int): Position                    = Position.offset(source, offset)
+  def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
 
-  def rootId(name: Name)       = gen.rootId(name)
   def rootScalaDot(name: Name) = gen.rootScalaDot(name)
   def scalaDot(name: Name)     = gen.scalaDot(name)
   def scalaAnyRefConstr        = scalaDot(tpnme.AnyRef)
-  def scalaAnyValConstr        = scalaDot(tpnme.AnyVal)
-  def scalaAnyConstr           = scalaDot(tpnme.Any)
   def scalaUnitConstr          = scalaDot(tpnme.Unit)
-  def productConstr            = scalaDot(tpnme.Product)
-  def productConstrN(n: Int)   = scalaDot(newTypeName("Product" + n))
-  def serializableConstr       = scalaDot(tpnme.Serializable)
 
   def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
 
-  /** Convert all occurrences of (lower-case) variables in a pattern as follows:
-   *    x                  becomes      x @ _
-   *    x: T               becomes      x @ (_: T)
-   */
-  private object patvarTransformer extends Transformer {
-    override def transform(tree: Tree): Tree = tree match {
-      case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
-        atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
-      case Typed(id @ Ident(name), tpt) if (treeInfo.isVarPattern(id) && name != nme.WILDCARD) =>
-        atPos(tree.pos.withPoint(id.pos.point)) {
-          Bind(name, atPos(tree.pos.withStart(tree.pos.point)) {
-            Typed(Ident(nme.WILDCARD), tpt)
-          })
-        }
-      case Apply(fn @ Apply(_, _), args) =>
-        treeCopy.Apply(tree, transform(fn), transformTrees(args))
-      case Apply(fn, args) =>
-        treeCopy.Apply(tree, fn, transformTrees(args))
-      case Typed(expr, tpt) =>
-        treeCopy.Typed(tree, transform(expr), tpt)
-      case Bind(name, body) =>
-        treeCopy.Bind(tree, name, transform(body))
-      case Alternative(_) | Star(_) =>
-        super.transform(tree)
-      case _ =>
-        tree
-    }
-  }
-
-  /** Traverse pattern and collect all variable names with their types in buffer
-   *  The variables keep their positions; whereas the pattern is converted to be
-   *  synthetic for all nodes that contain a variable position.
-   */
-  class GetVarTraverser extends Traverser {
-    val buf = new ListBuffer[(Name, Tree, Position)]
-
-    def namePos(tree: Tree, name: Name): Position =
-      if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus
-      else {
-        val start = tree.pos.start
-        val end = start + name.decode.length
-        r2p(start, start, end)
-      }
-
-    override def traverse(tree: Tree): Unit = {
-      def seenName(name: Name)     = buf exists (_._1 == name)
-      def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
-      val bl = buf.length
-
-      tree match {
-        case Bind(nme.WILDCARD, _)          =>
-          super.traverse(tree)
-
-        case Bind(name, Typed(tree1, tpt))  =>
-          val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
-          add(name, newTree)
-          traverse(tree1)
-
-        case Bind(name, tree1)              =>
-          // can assume only name range as position, as otherwise might overlap
-          // with binds embedded in pattern tree1
-          add(name, TypeTree())
-          traverse(tree1)
-
-        case _ =>
-          super.traverse(tree)
-      }
-      if (buf.length > bl)
-        tree setPos tree.pos.makeTransparent
-    }
-    def apply(tree: Tree) = {
-      traverse(tree)
-      buf.toList
-    }
-  }
-
-  /** Returns list of all pattern variables, possibly with their types,
-   *  without duplicates
-   */
-  private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
-    new GetVarTraverser apply tree
-
   def byNameApplication(tpe: Tree): Tree =
     AppliedTypeTree(rootScalaDot(tpnme.BYNAME_PARAM_CLASS_NAME), List(tpe))
   def repeatedApplication(tpe: Tree): Tree =
@@ -129,25 +39,12 @@ abstract class TreeBuilder {
   def makeImportSelector(name: Name, nameOffset: Int): ImportSelector =
     ImportSelector(name, nameOffset, name, nameOffset)
 
-  private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
-    val tupString = "Tuple" + trees.length
-    Apply(scalaDot(if (isType) newTypeName(tupString) else newTermName(tupString)), trees)
-  }
+  def makeTupleTerm(elems: List[Tree]) = gen.mkTuple(elems)
 
-  def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
-    case Nil => Literal(Constant())
-    case List(tree) if flattenUnary => tree
-    case _ => makeTuple(trees, false)
-  }
-
-  def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
-    case Nil => scalaUnitConstr
-    case List(tree) if flattenUnary => tree
-    case _ => AppliedTypeTree(scalaDot(newTypeName("Tuple" + trees.length)), trees)
-  }
+  def makeTupleType(elems: List[Tree]) = gen.mkTupleType(elems)
 
   def stripParens(t: Tree) = t match {
-    case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) }
+    case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts) }
     case _ => t
   }
 
@@ -157,323 +54,67 @@ abstract class TreeBuilder {
   def makeSelfDef(name: TermName, tpt: Tree): ValDef =
     ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree)
 
-  /** If tree is a variable pattern, return Some("its name and type").
-   *  Otherwise return none */
-  private def matchVarPattern(tree: Tree): Option[(Name, Tree)] = {
-    def wildType(t: Tree): Option[Tree] = t match {
-      case Ident(x) if x.toTermName == nme.WILDCARD             => Some(TypeTree())
-      case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
-      case _                                                    => None
-    }
-    tree match {
-      case Ident(name)             => Some((name, TypeTree()))
-      case Bind(name, body)        => wildType(body) map (x => (name, x))
-      case Typed(Ident(name), tpt) => Some((name, tpt))
-      case _                       => None
-    }
-  }
-
   /** Create tree representing (unencoded) binary operation expression or pattern. */
-  def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
-    def mkNamed(args: List[Tree]) =
-      if (isExpr) args map {
-        case a @ Assign(id @ Ident(name), rhs) =>
-          atPos(a.pos) { AssignOrNamedArg(id, rhs) }
-        case e => e
-      } else args
+  def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
+    require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+
+    def mkSelection(t: Tree) = {
+      def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
+      if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
+    }
+    def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
     val arguments = right match {
       case Parens(args) => mkNamed(args)
-      case _ => List(right)
+      case _            => List(right)
     }
     if (isExpr) {
       if (treeInfo.isLeftAssoc(op)) {
-        Apply(atPos(opPos union left.pos) { Select(stripParens(left), op.encode) }, arguments)
+        Apply(mkSelection(left), arguments)
       } else {
         val x = freshTermName()
         Block(
-          List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
-          Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
+          List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
+          Apply(mkSelection(right), List(Ident(x))))
       }
     } else {
       Apply(Ident(op.encode), stripParens(left) :: arguments)
     }
   }
 
-  /** Creates a tree representing new Object { stats }.
-   *  To make sure an anonymous subclass of Object is created,
-   *  if there are no stats, a () is added.
-   */
-  def makeAnonymousNew(stats: List[Tree]): Tree = {
-    val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
-    makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
-  }
-
-  /** Create positioned tree representing an object creation <new parents { stats }
-   *  @param npos  the position of the new
-   *  @param cpos  the position of the anonymous class starting with parents
-   */
-  def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
-              npos: Position, cpos: Position): Tree =
-    if (parents.isEmpty)
-      makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
-    else if (parents.tail.isEmpty && stats.isEmpty)
-      atPos(npos union cpos) { New(parents.head, argss) }
-    else {
-      val x = tpnme.ANON_CLASS_NAME
-      atPos(npos union cpos) {
-        Block(
-          List(
-            atPos(cpos) {
-              ClassDef(
-                Modifiers(FINAL), x, Nil,
-                Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
-            }),
-          atPos(npos) {
-            New(
-              Ident(x) setPos npos.focus,
-              ListOfNil)
-          }
-        )
-      }
-    }
-
-  /** Create a tree representing an assignment <lhs = rhs> */
-  def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
-    case Apply(fn, args) =>
-      Apply(atPos(fn.pos) { Select(fn, nme.update) }, args ::: List(rhs))
-    case _ =>
-      Assign(lhs, rhs)
-  }
-
   /** Tree for `od op`, start is start0 if od.pos is borked. */
   def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
-    val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+    val start = if (od.pos.isDefined) od.pos.start else start0
     atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
   }
 
-  /** A type tree corresponding to (possibly unary) intersection type */
-  def makeIntersectionTypeTree(tps: List[Tree]): Tree =
-    if (tps.tail.isEmpty) tps.head
-    else CompoundTypeTree(Template(tps, emptyValDef, Nil))
-
   /** Create tree representing a while loop */
   def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
     val lname = freshTermName(nme.WHILE_PREFIX)
     def default = wrappingPos(List(cond, body)) match {
-      case p if p.isDefined => p.endOrPoint
+      case p if p.isDefined => p.end
       case _                => startPos
     }
     val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
-    val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
+    val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
     LabelDef(lname, Nil, rhs)
   }
 
   /** Create tree representing a do-while loop */
   def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
     val continu = Apply(Ident(lname), Nil)
-    val rhs = Block(List(body), If(cond, continu, Literal(Constant())))
+    val rhs = Block(List(body), If(cond, continu, Literal(Constant(()))))
     LabelDef(lname, Nil, rhs)
   }
 
   /** Create block of statements `stats`  */
-  def makeBlock(stats: List[Tree]): Tree =
-    if (stats.isEmpty) Literal(Constant())
-    else if (!stats.last.isTerm) Block(stats, Literal(Constant()))
-    else if (stats.length == 1) stats.head
-    else Block(stats.init, stats.last)
-
-  def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
-    val cases = List(
-      CaseDef(condition, EmptyTree, Literal(Constant(true))),
-      CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
-    )
-    val matchTree = makeVisitor(cases, false, scrutineeName)
-
-    atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
-  }
-
-  /** Create tree for for-comprehension generator <val pat0 <- rhs0> */
-  def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
-    val pat1 = patvarTransformer.transform(pat)
-    val rhs1 =
-      if (valeq || treeInfo.isVarPatternDeep(pat)) rhs
-      else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
-
-    if (valeq) ValEq(pos, pat1, rhs1)
-    else ValFrom(pos, pat1, rhs1)
-  }
+  def makeBlock(stats: List[Tree]): Tree = gen.mkBlock(stats)
 
   def makeParam(pname: TermName, tpe: Tree) =
     ValDef(Modifiers(PARAM), pname, tpe, EmptyTree)
 
-  def makeSyntheticParam(pname: TermName) =
-    ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
-
   def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
     TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
 
-  abstract class Enumerator { def pos: Position }
-  case class ValFrom(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
-  case class ValEq(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
-  case class Filter(pos: Position, test: Tree) extends Enumerator
-
-  /** Create tree for for-comprehension <for (enums) do body> or
-  *   <for (enums) yield body> where mapName and flatMapName are chosen
-  *  corresponding to whether this is a for-do or a for-yield.
-  *  The creation performs the following rewrite rules:
-  *
-  *  1.
-  *
-  *    for (P <- G) E   ==>   G.foreach (P => E)
-  *
-  *     Here and in the following (P => E) is interpreted as the function (P => E)
-  *     if P is a variable pattern and as the partial function { case P => E } otherwise.
-  *
-  *  2.
-  *
-  *    for (P <- G) yield E  ==>  G.map (P => E)
-  *
-  *  3.
-  *
-  *    for (P_1 <- G_1; P_2 <- G_2; ...) ...
-  *      ==>
-  *    G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
-  *
-  *  4.
-  *
-  *    for (P <- G; E; ...) ...
-  *      =>
-  *    for (P <- G.filter (P => E); ...) ...
-  *
-  *  5. For N < MaxTupleArity:
-  *
-  *    for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
-  *      ==>
-  *    for (TupleN(P_1, P_2, ... P_N) <-
-  *      for (x_1 @ P_1 <- G) yield {
-  *        val x_2 @ P_2 = E_2
-  *        ...
-  *        val x_N & P_N = E_N
-  *        TupleN(x_1, ..., x_N)
-  *      } ...)
-  *
-  *    If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
-  *    and the variable constituting P_i is used instead of x_i
-  *
-  *  @param mapName      The name to be used for maps (either map or foreach)
-  *  @param flatMapName  The name to be used for flatMaps (either flatMap or foreach)
-  *  @param enums        The enumerators in the for expression
-  *  @param body          The body of the for expression
-  */
-  private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
-
-    /** make a closure pat => body.
-     *  The closure is assigned a transparent position with the point at pos.point and
-     *  the limits given by pat and body.
-     */
-    def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
-      def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent
-      matchVarPattern(pat) match {
-        case Some((name, tpt)) =>
-          Function(
-            List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }),
-            body) setPos splitpos
-        case None =>
-          atPos(splitpos) {
-            makeVisitor(List(CaseDef(pat, EmptyTree, body)), false)
-          }
-      }
-    }
-
-    /** Make an application  qual.meth(pat => body) positioned at `pos`.
-     */
-    def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
-      Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos
-
-    /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
-     */
-    def patternVar(pat: Tree): Option[Name] = pat match {
-      case Bind(name, _) => Some(name)
-      case _ => None
-    }
-
-    /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
-     */
-    def makeBind(pat: Tree): Tree = pat match {
-      case Bind(_, _) => pat
-      case _ => Bind(freshName(), pat) setPos pat.pos
-    }
-
-    /** A reference to the name bound in Bind `pat`.
-     */
-    def makeValue(pat: Tree): Tree = pat match {
-      case Bind(name, _) => Ident(name) setPos pat.pos.focus
-    }
-
-    /** The position of the closure that starts with generator at position `genpos`.
-     */
-    def closurePos(genpos: Position) = {
-      val end = body.pos match {
-        case NoPosition => genpos.point
-        case bodypos => bodypos.endOrPoint
-      }
-      r2p(genpos.startOrPoint, genpos.point, end)
-    }
-
-//    val result =
-    enums match {
-      case ValFrom(pos, pat, rhs) :: Nil =>
-        makeCombination(closurePos(pos), mapName, rhs, pat, body)
-      case ValFrom(pos, pat, rhs) :: (rest @ (ValFrom(_,  _, _) :: _)) =>
-        makeCombination(closurePos(pos), flatMapName, rhs, pat,
-                        makeFor(mapName, flatMapName, rest, body))
-      case ValFrom(pos, pat, rhs) :: Filter(_, test) :: rest =>
-        makeFor(mapName, flatMapName,
-                ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
-                body)
-      case ValFrom(pos, pat, rhs) :: rest =>
-        val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
-        assert(!valeqs.isEmpty)
-        val rest1 = rest.drop(valeqs.length)
-        val pats = valeqs map { case ValEq(_, pat, _) => pat }
-        val rhss = valeqs map { case ValEq(_, _, rhs) => rhs }
-        val defpat1 = makeBind(pat)
-        val defpats = pats map makeBind
-        val pdefs = (defpats, rhss).zipped flatMap makePatDef
-        val ids = (defpat1 :: defpats) map makeValue
-        val rhs1 = makeForYield(
-          List(ValFrom(pos, defpat1, rhs)),
-          Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs))
-        val allpats = (pat :: pats) map (_.duplicate)
-        val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1)
-        makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
-      case _ =>
-        EmptyTree //may happen for erroneous input
-    }
-//    println("made for "+result)
-//    result
-  }
-
-  /** Create tree for for-do comprehension <for (enums) body> */
-  def makeFor(enums: List[Enumerator], body: Tree): Tree =
-    makeFor(nme.foreach, nme.foreach, enums, body)
-
-  /** Create tree for for-yield comprehension <for (enums) yield body> */
-  def makeForYield(enums: List[Enumerator], body: Tree): Tree =
-    makeFor(nme.map, nme.flatMap, enums, body)
-
-  /** Create tree for a lifted expression XX-LIFTING
-   */
-  def makeLifted(gs: List[ValFrom], body: Tree): Tree = {
-    def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match {
-      case g :: Nil => g
-      case ValFrom(pos1, pat1, rhs1) :: gs2 =>
-        val ValFrom(pos2, pat2, rhs2) = combine(gs2)
-        ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2)))
-    }
-    makeForYield(List(combine(gs)), body)
-  }
-
   /** Create tree for a pattern alternative */
   def makeAlternative(ts: List[Tree]): Tree = {
     def alternatives(t: Tree): List[Tree] = t match {
@@ -483,21 +124,9 @@ abstract class TreeBuilder {
     Alternative(ts flatMap alternatives)
   }
 
-  /** Create visitor <x => x match cases> */
-  def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
-    makeVisitor(cases, checkExhaustive, "x$")
-
-  /** Create visitor <x => x match cases> */
-  def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
-    val x   = freshTermName(prefix)
-    val id  = Ident(x)
-    val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
-    Function(List(makeSyntheticParam(x)), Match(sel, cases))
-  }
-
   /** Create tree for case definition <case pat if guard => rhs> */
   def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef =
-    CaseDef(patvarTransformer.transform(pat), guard, rhs)
+    CaseDef(gen.patvarTransformer.transform(pat), guard, rhs)
 
   /** Creates tree representing:
    *    { case x: Throwable =>
@@ -506,9 +135,9 @@ abstract class TreeBuilder {
    *    }
    */
   def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
-    val binder   = freshTermName("x")
+    val binder   = freshTermName()
     val pat      = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
-    val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr)
+    val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
     val catchFn  = Ident(catchDef.name)
     val body     = atPos(catchExpr.pos.makeTransparent)(Block(
       List(catchDef),
@@ -521,79 +150,8 @@ abstract class TreeBuilder {
     makeCaseDef(pat, EmptyTree, body)
   }
 
-  /** Create tree for pattern definition <val pat0 = rhs> */
-  def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
-    makePatDef(Modifiers(0), pat, rhs)
-
-  /** Create tree for pattern definition <mods val pat0 = rhs> */
-  def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
-    case Some((name, tpt)) =>
-      List(atPos(pat.pos union rhs.pos) {
-        ValDef(mods, name.toTermName, tpt, rhs)
-      })
-
-    case None =>
-      //  in case there is exactly one variable x_1 in pattern
-      //  val/var p = e  ==>  val/var x_1 = e.match (case p => (x_1))
-      //
-      //  in case there are zero or more than one variables in pattern
-      //  val/var p = e  ==>  private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
-      //                  val/var x_1 = t$._1
-      //                  ...
-      //                  val/var x_N = t$._N
-
-      val rhsUnchecked = gen.mkUnchecked(rhs)
-
-      // TODO: clean this up -- there is too much information packked into makePatDef's `pat` argument
-      // when it's a simple identifier (case Some((name, tpt)) -- above),
-      // pat should have the type ascription that was specified by the user
-      // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
-      // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
-      // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
-      val (pat1, rhs1) = patvarTransformer.transform(pat) match {
-        // move the Typed ascription to the rhs
-        case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
-          val rhsTypedUnchecked =
-            if (tpt.isEmpty) rhsUnchecked
-            else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
-          (expr, rhsTypedUnchecked)
-        case ok =>
-          (ok, rhsUnchecked)
-      }
-      val vars = getVariables(pat1)
-      val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
-        Match(
-          rhs1,
-          List(
-            atPos(pat1.pos) {
-              CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
-            }
-          ))
-      }
-      vars match {
-        case List((vname, tpt, pos)) =>
-          List(atPos(pat.pos union pos union rhs.pos) {
-            ValDef(mods, vname.toTermName, tpt, matchExpr)
-          })
-        case _ =>
-          val tmp = freshTermName()
-          val firstDef =
-            atPos(matchExpr.pos) {
-              ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
-                     tmp, TypeTree(), matchExpr)
-            }
-          var cnt = 0
-          val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) {
-            cnt += 1
-            ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))
-          }
-          firstDef :: restDefs
-      }
-  }
-
   /** Create a tree representing the function type (argtpes) => restpe */
-  def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
-    AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+  def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe)
 
   /** Append implicit parameter section if `contextBounds` nonempty */
   def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
@@ -610,4 +168,6 @@ abstract class TreeBuilder {
         vparamss ::: List(evidenceParams)
     }
   }
+
+  def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree) = gen.mkPatDef(mods, pat, rhs)
 }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
new file mode 100644
index 0000000..82dce9f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
@@ -0,0 +1,211 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala.tools.nsc.ast.parser.xml
+
+/** This is not a public trait - it contains common code shared
+ *  between the library level XML parser and the compiler's.
+ *  All members should be accessed through those.
+ */
+private[scala] trait MarkupParserCommon {
+  import Utility._
+  import scala.reflect.internal.Chars.SU
+
+  protected def unreachable = scala.sys.error("Cannot be reached.")
+
+  type PositionType     // Int, Position
+  type ElementType      // NodeSeq, Tree
+  type NamespaceType    // NamespaceBinding, Any
+  type AttributesType   // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+  def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+  def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+  /** parse a start or empty tag.
+   *  [40] STag         ::= '<' Name { S Attribute } [S]
+   *  [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+   */
+  protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+    val name = xName
+    xSpaceOpt()
+
+    (name, mkAttributes(name, pscope))
+  }
+
+  /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+   *
+   * see [15]
+   */
+  def xProcInstr: ElementType = {
+    val n = xName
+    xSpaceOpt()
+    xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+  }
+
+  /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+   @param endCh either `'` or `"`
+   */
+  def xAttributeValue(endCh: Char): String = {
+    val buf = new StringBuilder
+    while (ch != endCh) {
+      // well-formedness constraint
+      if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+      else if (ch == SU) truncatedError("")
+      else buf append ch_returning_nextch
+    }
+    ch_returning_nextch
+    // @todo: normalize attribute value
+    buf.toString
+  }
+
+  /** [42]  '<' xmlEndTag ::=  '<' '/' Name S? '>'
+   */
+  def xEndTag(startName: String) {
+    xToken('/')
+    if (xName != startName)
+      errorNoEnd(startName)
+
+    xSpaceOpt()
+    xToken('>')
+  }
+
+  /** actually, Name ::= (Letter | '_' | ':') (NameChar)*  but starting with ':' cannot happen
+   *  Name ::= (Letter | '_') (NameChar)*
+   *
+   *  see  [5] of XML 1.0 specification
+   *
+   *  pre-condition:  ch != ':' // assured by definition of XMLSTART token
+   *  post-condition: name does neither start, nor end in ':'
+   */
+  def xName: String = {
+    if (ch == SU)
+      truncatedError("")
+    else if (!isNameStart(ch))
+      return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+    val buf = new StringBuilder
+
+    do buf append ch_returning_nextch
+    while (isNameChar(ch))
+
+    if (buf.last == ':') {
+      reportSyntaxError( "name cannot end in ':'" )
+      buf.toString dropRight 1
+    }
+    else buf.toString
+  }
+
+  /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+   *            | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+   *
+   * see [66]
+   */
+  def xCharRef(ch: () => Char, nextch: () => Unit): String =
+    Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+  def xCharRef(it: Iterator[Char]): String = {
+    var c = it.next()
+    Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
+  }
+
+  def xCharRef: String = xCharRef(() => ch, () => nextch())
+
+  /** Create a lookahead reader which does not influence the input */
+  def lookahead(): BufferedIterator[Char]
+
+  /** The library and compiler parsers had the interesting distinction of
+   *  different behavior for nextch (a function for which there are a total
+   *  of two plausible behaviors, so we know the design space was fully
+   *  explored.) One of them returned the value of nextch before the increment
+   *  and one of them the new value.  So to unify code we have to at least
+   *  temporarily abstract over the nextchs.
+   */
+  def ch: Char
+  def nextch(): Unit
+  protected def ch_returning_nextch: Char
+  def eof: Boolean
+
+  // def handle: HandleType
+  var tmppos: PositionType
+
+  def xHandleError(that: Char, msg: String): Unit
+  def reportSyntaxError(str: String): Unit
+  def reportSyntaxError(pos: Int, str: String): Unit
+
+  def truncatedError(msg: String): Nothing
+  def errorNoEnd(tag: String): Nothing
+
+  protected def errorAndResult[T](msg: String, x: T): T = {
+    reportSyntaxError(msg)
+    x
+  }
+
+  def xToken(that: Char) {
+    if (ch == that) nextch()
+    else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+  }
+  def xToken(that: Seq[Char]) { that foreach xToken }
+
+  /** scan [S] '=' [S]*/
+  def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
+
+  /** skip optional space S? */
+  def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
+
+  /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+  def xSpace() =
+    if (isSpace(ch)) { nextch(); xSpaceOpt() }
+    else xHandleError(ch, "whitespace expected")
+
+  /** Apply a function and return the passed value */
+  def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+  /** Execute body with a variable saved and restored after execution */
+  def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+    val saved = getter
+    try body
+    finally setter(saved)
+  }
+
+  /** Take characters from input stream until given String "until"
+   *  is seen.  Once seen, the accumulated characters are passed
+   *  along with the current Position to the supplied handler function.
+   */
+  protected def xTakeUntil[T](
+    handler: (PositionType, String) => T,
+    positioner: () => PositionType,
+    until: String): T =
+  {
+    val sb = new StringBuilder
+    val head = until.head
+    val rest = until.tail
+
+    while (true) {
+      if (ch == head && peek(rest))
+        return handler(positioner(), sb.toString)
+      else if (ch == SU)
+        truncatedError("")  // throws TruncatedXMLControl in compiler
+
+      sb append ch
+      nextch()
+    }
+    unreachable
+  }
+
+  /** Create a non-destructive lookahead reader and see if the head
+   *  of the input would match the given String.  If yes, return true
+   *  and drop the entire String from input; if no, return false
+   *  and leave input unchanged.
+   */
+  private def peek(lookingFor: String): Boolean =
+    (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+      // drop the chars from the real reader (all lookahead + orig)
+      (0 to lookingFor.length) foreach (_ => nextch())
+      true
+    }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
new file mode 100755
index 0000000..6dcfa17
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
@@ -0,0 +1,163 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala.tools.nsc.ast.parser.xml
+
+import scala.collection.mutable
+
+
+/**
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
+ *
+ * @author Burak Emir
+ */
+object Utility {
+  import scala.reflect.internal.Chars.SU
+
+  private val unescMap = Map(
+    "lt"    -> '<',
+    "gt"    -> '>',
+    "amp"   -> '&',
+    "quot"  -> '"',
+    "apos"  -> '\''
+  )
+
+  /**
+   * Appends unescaped string to `s`, `amp` becomes `&`,
+   * `lt` becomes `<` etc..
+   *
+   * @return    `'''null'''` if `ref` was not a predefined entity.
+   */
+  private final def unescape(ref: String, s: StringBuilder): StringBuilder =
+    ((unescMap get ref) map (s append _)).orNull
+
+  def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
+    val sb  = new StringBuilder
+    var rfb: StringBuilder = null
+    val nb = new mutable.ListBuffer[T]()
+
+    val it = value.iterator
+    while (it.hasNext) {
+      var c = it.next()
+      // entity! flush buffer into text node
+      if (c == '&') {
+        c = it.next()
+        if (c == '#') {
+          c = it.next()
+          val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+          sb.append(theChar)
+        }
+        else {
+          if (rfb eq null) rfb = new StringBuilder()
+          rfb append c
+          c = it.next()
+          while (c != ';') {
+            rfb.append(c)
+            c = it.next()
+          }
+          val ref = rfb.toString()
+          rfb.clear()
+          unescape(ref,sb) match {
+            case null =>
+              if (!sb.isEmpty) {  // flush buffer
+                nb += text(sb.toString())
+                sb.clear()
+              }
+              nb += entityRef(ref) // add entityref
+            case _ =>
+          }
+        }
+      }
+      else sb append c
+    }
+
+    if(!sb.isEmpty) // flush buffer
+      nb += text(sb.toString())
+
+    nb.toList
+  }
+
+  /**
+   * {{{
+   *   CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+   *             | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+   * }}}
+   * See [66]
+   */
+  def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
+    val hex  = (ch() == 'x') && { nextch(); true }
+    val base = if (hex) 16 else 10
+    var i = 0
+    while (ch() != ';') {
+      ch() match {
+        case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+          i = i * base + ch().asDigit
+        case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+           | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+          if (! hex)
+            reportSyntaxError("hex char not allowed in decimal char ref\n" +
+                              "Did you mean to write &#x ?")
+          else
+            i = i * base + ch().asDigit
+        case SU =>
+          reportTruncatedError("")
+        case _ =>
+          reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
+      }
+      nextch()
+    }
+    new String(Array(i), 0, 1)
+  }
+
+  /** {{{
+   *  (#x20 | #x9 | #xD | #xA)
+   *  }}} */
+  final def isSpace(ch: Char): Boolean = ch match {
+    case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
+    case _                                         => false
+  }
+
+  /** {{{
+   *  NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+   *             | CombiningChar | Extender
+   *  }}}
+   *  See [4] and Appendix B of XML 1.0 specification.
+  */
+  def isNameChar(ch: Char) = {
+    import java.lang.Character._
+    // The constants represent groups Mc, Me, Mn, Lm, and Nd.
+
+    isNameStart(ch) || (getType(ch).toByte match {
+      case COMBINING_SPACING_MARK |
+              ENCLOSING_MARK | NON_SPACING_MARK |
+              MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+      case _                                         => ".-:" contains ch
+    })
+  }
+
+  /** {{{
+   *  NameStart ::= ( Letter | '_' )
+   *  }}}
+   *  where Letter means in one of the Unicode general
+   *  categories `{ Ll, Lu, Lo, Lt, Nl }`.
+   *
+   *  We do not allow a name to start with `:`.
+   *  See [3] and Appendix B of XML 1.0 specification
+   */
+  def isNameStart(ch: Char) = {
+    import java.lang.Character._
+
+    getType(ch).toByte match {
+      case LOWERCASE_LETTER |
+              UPPERCASE_LETTER | OTHER_LETTER |
+              TITLECASE_LETTER | LETTER_NUMBER => true
+      case _                                   => ch == '_'
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index fc5d437..32b5a98 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,50 +7,34 @@ package scala.tools.nsc
 package backend
 
 import io.AbstractFile
-import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
-import util.ClassPath.{ JavaContext, DefaultJavaContext }
+import util.{ClassPath,MergedClassPath,DeltaClassPath}
 import scala.tools.util.PathResolver
 
 trait JavaPlatform extends Platform {
+  val global: Global
+  override val symbolTable: global.type = global
   import global._
   import definitions._
 
-  type BinaryRepr = AbstractFile
+  private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
 
-  private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None
-
-  def classPath: ClassPath[BinaryRepr] = {
+  def classPath: ClassPath[AbstractFile] = {
     if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
     currentClassPath.get
   }
 
   /** Update classpath with a substituted subentry */
-  def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+  def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
     currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
 
-  def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
-    // [Martin] Why do we need a cast here?
-    // The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform.
-    // So we cannot infer that global.platform.BinaryRepr is AbstractFile.
-    // Ideally, we should be able to write at the top of the JavaPlatform trait:
-    //   val global: Global { val platform: JavaPlatform }
-    //   import global._
-    // Right now, this does nothing because the concrete definition of platform in Global
-    // replaces the tighter abstract definition here. If we had DOT typing rules, the two
-    // types would be conjoined and everything would work out. Yet another reason to push for DOT.
-
-  private def depAnalysisPhase =
-    if (settings.make.isDefault) Nil
-    else List(dependencyAnalysis)
-
   private def classEmitPhase =
-    if (settings.target.value == "jvm-1.5-fjbg") genJVM
+    if (settings.isBCodeActive) genBCode
     else genASM
 
   def platformPhases = List(
     flatten,        // get rid of inner classes
     classEmitPhase  // generate .class files
-  ) ++ depAnalysisPhase
+  )
 
   lazy val externalEquals          = getDecl(BoxesRunTimeClass, nme.equals_)
   lazy val externalEqualsNumNum    = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
@@ -71,10 +55,7 @@ trait JavaPlatform extends Platform {
     (sym isNonBottomSubClass BoxedBooleanClass)
   }
 
-  def newClassLoader(bin: AbstractFile): loaders.SymbolLoader =
-    new loaders.ClassfileLoader(bin)
-
-  def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = true
+  def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
 
   def needCompile(bin: AbstractFile, src: AbstractFile) =
     src.lastModified >= bin.lastModified
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
deleted file mode 100644
index 4493685..0000000
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package backend
-
-import ch.epfl.lamp.compiler.{ msil => msillib }
-import util.{ ClassPath, MsilClassPath }
-import msil.GenMSIL
-import io.{ AbstractFile, MsilFile }
-
-trait MSILPlatform extends Platform {
-  import global._
-  import definitions.{ ComparatorClass, BoxedNumberClass, getMember }
-
-  type BinaryRepr = MsilFile
-
-  if (settings.verbose.value)
-    inform("[AssemRefs = " + settings.assemrefs.value + "]")
-
-  // phaseName = "msil"
-  object genMSIL extends {
-    val global: MSILPlatform.this.global.type = MSILPlatform.this.global
-    val runsAfter = List[String]("dce")
-    val runsRightAfter = None
-  } with GenMSIL
-
-  lazy val classPath = MsilClassPath.fromSettings(settings)
-  def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
-    // See discussion in JavaPlatForm for why we need a cast here.
-
-  /** Update classpath with a substituted subentry */
-  def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
-    throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
-
-  def platformPhases = List(
-    genMSIL   // generate .msil files
-  )
-
-  lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
-  def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
-
-  def newClassLoader(bin: MsilFile): loaders.SymbolLoader =  new loaders.MsilFileLoader(bin)
-
-  /**
-   * Tells whether a class should be loaded and entered into the package
-   * scope. On .NET, this method returns `false` for all synthetic classes
-   * (anonymous classes, implementation classes, module classes), their
-   * symtab is encoded in the pickle of another class.
-   */
-  def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = {
-    if (cls.binary.isDefined) {
-      val typ = cls.binary.get.msilType
-      if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) {
-        val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)
-        assert(attrs.length == 1, attrs.length)
-        val a = attrs(0).asInstanceOf[msillib.Attribute]
-        // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
-        // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
-        a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR
-      } else true // always load non-scala types
-    } else true // always load source
-  }
-
-  def needCompile(bin: MsilFile, src: AbstractFile) =
-    false // always use compiled file on .net
-}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index e2b22c0..499f8a9 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -12,20 +12,14 @@ import io.AbstractFile
 /** The platform dependent pieces of Global.
  */
 trait Platform {
-  val global: Global
-  import global._
-
-  /** The binary classfile representation type */
-  type BinaryRepr
+  val symbolTable: symtab.SymbolTable
+  import symbolTable._
 
   /** The compiler classpath. */
-  def classPath: ClassPath[BinaryRepr]
-
-  /** The root symbol loader. */
-  def rootLoader: LazyType
+  def classPath: ClassPath[AbstractFile]
 
   /** Update classpath with a substitution that maps entries to entries */
-  def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]])
+  def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
 
   /** Any platform-specific phases. */
   def platformPhases: List[SubComponent]
@@ -36,16 +30,13 @@ trait Platform {
   /** The various ways a boxed primitive might materialize at runtime. */
   def isMaybeBoxed(sym: Symbol): Boolean
 
-  /** Create a new class loader to load class file `bin` */
-  def newClassLoader(bin: BinaryRepr): loaders.SymbolLoader
-
   /**
    * Tells whether a class should be loaded and entered into the package
    * scope. On .NET, this method returns `false` for all synthetic classes
    * (anonymous classes, implementation classes, module classes), their
    * symtab is encoded in the pickle of another class.
    */
-  def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean
+  def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean
 
   /**
    * Tells whether a class with both a binary and a source representation
@@ -53,6 +44,6 @@ trait Platform {
    * on the JVM similar to javac, i.e. if the source file is newer than the classfile,
    * a re-compile is triggered. On .NET by contrast classfiles always take precedence.
    */
-  def needCompile(bin: BinaryRepr, src: AbstractFile): Boolean
+  def needCompile(bin: AbstractFile, src: AbstractFile): Boolean
 }
 
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 8cbb5bc..b8ddb65 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -3,10 +3,10 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend
 
-import scala.tools.nsc.backend.icode._
 import scala.collection.{ mutable, immutable }
 
 /** Scala primitive operations are represented as methods in `Any` and
@@ -442,15 +442,17 @@ abstract class ScalaPrimitives {
   }
 
   def addPrimitives(cls: Symbol, method: Name, code: Int) {
-    val tpe = cls.info
-    val sym = tpe.member(method)
-    if (sym == NoSymbol)
-      inform("Unknown primitive method " + cls + "." + method)
-    for (s <- sym.alternatives)
-      addPrimitive(
-        s,
-        if (code == ADD && s.info.paramTypes.head == definitions.StringClass.tpe) CONCAT
-        else code)
+    val alts = (cls.info member method).alternatives
+    if (alts.isEmpty)
+      inform(s"Unknown primitive method $cls.$method")
+    else alts foreach (s =>
+      addPrimitive(s,
+        s.info.paramTypes match {
+          case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
+          case _                                          => code
+        }
+      )
+    )
   }
 
   def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
@@ -495,8 +497,8 @@ abstract class ScalaPrimitives {
   def isArraySet(code: Int): Boolean = code match {
     case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET |
          IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET |
-         OARRAY_SET | UPDATE => true;
-    case _ => false;
+         OARRAY_SET | UPDATE => true
+    case _ => false
   }
 
   /** Check whether the given code is a comparison operator */
@@ -515,7 +517,7 @@ abstract class ScalaPrimitives {
          DIV | MOD       => true; // binary
     case OR  | XOR | AND |
          LSL | LSR | ASR => true; // bitwise
-    case _ => false;
+    case _ => false
   }
 
   def isLogicalOp(code: Int): Boolean = code match {
@@ -565,7 +567,7 @@ abstract class ScalaPrimitives {
     import definitions._
     val code = getPrimitive(fun)
 
-    def elementType = beforeTyper {
+    def elementType = enteringTyper {
       val arrayParent = tpe :: tpe.parents collectFirst {
         case TypeRef(_, ArrayClass, elem :: Nil) => elem
       }
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index 798a80e..45ca39f 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -6,8 +6,7 @@
 package scala.tools.nsc
 package backend
 
-import scala.tools.nsc.ast._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
 
 /**
  * Simple implementation of a worklist algorithm. A processing
@@ -32,8 +31,6 @@ trait WorklistAlgorithm {
    * Run the iterative algorithm until the worklist remains empty.
    * The initializer is run once before the loop starts and should
    * initialize the worklist.
-   *
-   * @param initWorklist ...
    */
   def run(initWorklist: => Unit) = {
     initWorklist
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index d50d4cd..f955169 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -8,8 +8,7 @@ package backend
 package icode
 
 import scala.collection.{ mutable, immutable }
-import mutable.{ ListBuffer, ArrayBuffer }
-import scala.reflect.internal.util.{ Position, NoPosition }
+import mutable.ListBuffer
 import backend.icode.analysis.ProgramPoint
 import scala.language.postfixOps
 
@@ -17,8 +16,7 @@ trait BasicBlocks {
   self: ICodes =>
 
   import opcodes._
-  import global.{ ifDebug, settings, log, nme }
-  import nme.isExceptionResultName
+  import global._
 
   /** Override Array creation for efficiency (to not go through reflection). */
   private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
@@ -38,7 +36,7 @@ trait BasicBlocks {
 
     import BBFlags._
 
-    def code = method.code
+    def code = if (method eq null) NoCode else method.code
 
     private final class SuccessorList() {
       private var successors: List[BasicBlock] = Nil
@@ -68,10 +66,10 @@ trait BasicBlocks {
           addBlock(scratchBlocks.head)
           scratchBlocks = scratchBlocks.tail
         }
-        /** Return a list of successors for 'b' that come from exception handlers
-         *  covering b's (non-exceptional) successors. These exception handlers
-         *  might not cover 'b' itself. This situation corresponds to an
-         *  exception being thrown as the first thing of one of b's successors.
+        /* Return a list of successors for 'b' that come from exception handlers
+         * covering b's (non-exceptional) successors. These exception handlers
+         * might not cover 'b' itself. This situation corresponds to an
+         * exception being thrown as the first thing of one of b's successors.
          */
         while (scratchHandlers ne Nil) {
           val handler = scratchHandlers.head
@@ -122,7 +120,7 @@ trait BasicBlocks {
     def closed: Boolean = hasFlag(CLOSED)
     def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED)
 
-    /** When set, the <code>emit</code> methods will be ignored. */
+    /** When set, the `emit` methods will be ignored. */
     def ignore: Boolean = hasFlag(IGNORING)
     def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING)
 
@@ -260,13 +258,9 @@ trait BasicBlocks {
       }
     }
 
-    /** Replaces <code>oldInstr</code> with <code>is</code>. It does not update
+    /** Replaces `oldInstr` with `is`. It does not update
      *  the position field in the newly inserted instructions, so it behaves
      *  differently than the one-instruction versions of this function.
-     *
-     *  @param iold ..
-     *  @param is   ..
-     *  @return     ..
      */
     def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = {
       assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -280,17 +274,7 @@ trait BasicBlocks {
       }
     }
 
-    /** Insert instructions in 'is' immediately after index 'idx'. */
-    def insertAfter(idx: Int, is: List[Instruction]) {
-      assert(closed, "Instructions can be replaced only after the basic block is closed")
-
-      instrs = instrs.patch(idx + 1, is, 0)
-      code.touched = true
-    }
-
     /** Removes instructions found at the given positions.
-     *
-     *  @param positions ...
      */
     def removeInstructionsAt(positions: Int*) {
       assert(closed, this)
@@ -311,8 +295,6 @@ trait BasicBlocks {
     }
 
     /** Replaces all instructions found in the map.
-     *
-     *  @param map ...
      */
     def subst(map: Map[Instruction, Instruction]): Unit =
       if (!closed)
@@ -344,21 +326,17 @@ trait BasicBlocks {
      *  is closed, which sets the DIRTYSUCCS flag.
      */
     def emit(instr: Instruction, pos: Position) {
-/*      if (closed) {
-        print()
-        Console.println("trying to emit: " + instr)
-      } */
       assert(!closed || ignore, this)
 
       if (ignore) {
-        if (settings.debug.value) {
-          /** Trying to pin down what it's likely to see after a block has been
-           *  put into ignore mode so we hear about it if there's a problem.
+        if (settings.debug) {
+          /* Trying to pin down what it's likely to see after a block has been
+           * put into ignore mode so we hear about it if there's a problem.
            */
           instr match {
-            case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_)               => // ok
-            case STORE_LOCAL(local) if isExceptionResultName(local.sym.name)  => // ok
-            case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
+            case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_)                  => // ok
+            case STORE_LOCAL(local) if nme.isExceptionResultName(local.sym.name) => // ok
+            case x                                                               => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
           }
         }
       }
@@ -403,7 +381,6 @@ trait BasicBlocks {
     /** Close the block */
     def close() {
       assert(!closed || ignore, this)
-      assert(instructionList.nonEmpty, "Empty block: " + this)
       if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
         // not doing anything to this block is important...
         // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
@@ -413,9 +390,38 @@ trait BasicBlocks {
         setFlag(DIRTYSUCCS)
         instructionList = instructionList.reverse
         instrs = instructionList.toArray
+        if (instructionList.isEmpty) {
+          debuglog(s"Removing empty block $this")
+          code removeBlock this
+        }
+      }
+    }
+
+    /**
+     * if cond is true, closes this block, entersIgnoreMode, and removes the block from
+     * its list of blocks. Used to allow a block to be started and then cancelled when it
+     * is discovered to be unreachable.
+     */
+    def killIf(cond: Boolean) {
+      if (!settings.YdisableUnreachablePrevention && cond) {
+        debuglog(s"Killing block $this")
+        assert(instructionList.isEmpty, s"Killing a non empty block $this")
+        // only checked under debug because fetching predecessor list is moderately expensive
+        if (settings.debug)
+          assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}")
+
+        close()
+        enterIgnoreMode()
       }
     }
 
+    /**
+     * Same as killIf but with the logic of the condition reversed
+     */
+    def killUnless(cond: Boolean) {
+      this killIf !cond
+    }
+
     def open() {
       assert(closed, this)
       closed = false
@@ -441,20 +447,11 @@ trait BasicBlocks {
       ignore = true
     }
 
-    def exitIgnoreMode() {
-      assert(ignore, "Exit ignore mode when not in ignore mode: " + this)
-      ignore = false
-    }
-
     /** Return the last instruction of this basic block. */
     def lastInstruction =
       if (closed) instrs(instrs.length - 1)
       else instructionList.head
 
-    def firstInstruction =
-      if (closed) instrs(0)
-      else instructionList.last
-
     def exceptionSuccessors: List[BasicBlock] =
       exceptionSuccessorsForBlock(this)
 
@@ -474,16 +471,17 @@ trait BasicBlocks {
 
     def directSuccessors: List[BasicBlock] =
       if (isEmpty) Nil else lastInstruction match {
-        case JUMP(whereto)              => whereto :: Nil
-        case CJUMP(succ, fail, _, _)    => fail :: succ :: Nil
-        case CZJUMP(succ, fail, _, _)   => fail :: succ :: Nil
-        case SWITCH(_, labels)          => labels
-        case RETURN(_)                  => Nil
-        case THROW(_)                   => Nil
-        case _ =>
+        case JUMP(whereto)            => whereto :: Nil
+        case CJUMP(succ, fail, _, _)  => fail :: succ :: Nil
+        case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
+        case SWITCH(_, labels)        => labels
+        case RETURN(_)                => Nil
+        case THROW(_)                 => Nil
+        case _                        =>
           if (closed)
-            dumpClassesAndAbort("The last instruction is not a control flow instruction: " + lastInstruction)
-          else Nil
+            devWarning(s"$lastInstruction/${lastInstruction.getClass.getName} is not a control flow instruction")
+
+          Nil
       }
 
     /** Returns the predecessors of this block.     */
@@ -502,17 +500,6 @@ trait BasicBlocks {
 
     override def hashCode = label * 41 + code.hashCode
 
-    // Instead of it, rather use a printer
-    def print() { print(java.lang.System.out) }
-
-    def print(out: java.io.PrintStream) {
-      out.println("block #"+label+" :")
-      foreach(i => out.println("  " + i))
-      out.print("Successors: ")
-      successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
-      out.println()
-    }
-
     private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]")
     private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]")
 
@@ -532,18 +519,6 @@ trait BasicBlocks {
 }
 
 object BBFlags {
-  val flagMap = Map[Int, String](
-    LOOP_HEADER -> "loopheader",
-    IGNORING    -> "ignore",
-    EX_HEADER   -> "exheader",
-    CLOSED      -> "closed",
-    DIRTYSUCCS  -> "dirtysuccs",
-    DIRTYPREDS  -> "dirtypreds"
-  )
-  def flagsToString(flags: Int) = {
-    flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
-  }
-
   /** This block is a loop header (was translated from a while). */
   final val LOOP_HEADER = (1 << 0)
 
@@ -561,4 +536,16 @@ object BBFlags {
 
   /** Code has been changed, recompute predecessors. */
   final val DIRTYPREDS  = (1 << 5)
+
+  val flagMap = Map[Int, String](
+    LOOP_HEADER -> "loopheader",
+    IGNORING    -> "ignore",
+    EX_HEADER   -> "exheader",
+    CLOSED      -> "closed",
+    DIRTYSUCCS  -> "dirtysuccs",
+    DIRTYPREDS  -> "dirtypreds"
+  )
+  def flagsToString(flags: Int) = {
+    flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 0856f2f..8bcdb6d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -3,10 +3,8 @@
  * @author  Martin Odersky
  */
 
-
 package scala.tools.nsc
 package backend
 package icode
 
 class CheckerException(s: String) extends Exception(s)
-
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 2cebf7a..7243264 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
 package backend
 package icode
 
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
 
 /**
  * Exception handlers are pieces of code that `handle` exceptions on
@@ -24,14 +24,11 @@ trait ExceptionHandlers {
 
   class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
     def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
-    private var _startBlock: BasicBlock = _;
-    var finalizer: Finalizer = _;
-
-    /** Needed for the MSIL backend. */
-    var resultKind: TypeKind = _;
+    private var _startBlock: BasicBlock = _
+    var finalizer: Finalizer = _
 
     def setStartBlock(b: BasicBlock) = {
-      _startBlock = b;
+      _startBlock = b
       b.exceptionHandlerStart = true
     }
     def startBlock = _startBlock
@@ -49,11 +46,11 @@ trait ExceptionHandlers {
 
     /** The body of this exception handler. May contain 'dead' blocks (which will not
       * make it into generated code because linearizers may not include them) */
-    var blocks: List[BasicBlock] = Nil;
+    var blocks: List[BasicBlock] = Nil
 
-    def addBlock(b: BasicBlock): Unit = blocks = b :: blocks;
+    def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
 
-    override def toString() = "exh_" + label + "(" + cls.simpleName + ")";
+    override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
 
     /** A standard copy constructor */
     def this(other: ExceptionHandler) = {
@@ -71,10 +68,4 @@ trait ExceptionHandlers {
     override def toString() = "finalizer_" + label
     override def dup: Finalizer = new Finalizer(method, label, pos)
   }
-
-  object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
-    override def startBlock: BasicBlock             = sys.error("NoFinalizer cannot have a start block.");
-    override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
-    override def dup = this
-  }
 }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 71a5b85..1cea4be 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -4,7 +4,8 @@
  */
 
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend
 package icode
 
@@ -13,10 +14,8 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
 import scala.tools.nsc.symtab._
 import scala.annotation.switch
 import PartialFunction._
-import scala.language.postfixOps
 
-/** This class ...
- *
+/**
  *  @author  Iulian Dragos
  *  @version 1.0
  */
@@ -24,12 +23,7 @@ abstract class GenICode extends SubComponent  {
   import global._
   import icodes._
   import icodes.opcodes._
-  import definitions.{
-    ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, AnyRefClass,
-    Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
-    BoxedNumberClass, BoxedCharacterClass,
-    getMember
-  }
+  import definitions._
   import scalaPrimitives.{
     isArrayOp, isComparisonOp, isLogicalOp,
     isUniversalEqualityOp, isReferenceEqualityOp
@@ -41,7 +35,7 @@ abstract class GenICode extends SubComponent  {
   override def newPhase(prev: Phase) = new ICodePhase(prev)
 
   @inline private def debugassert(cond: => Boolean, msg: => Any) {
-    if (settings.debug.value)
+    if (settings.debug)
       assert(cond, msg)
   }
 
@@ -52,14 +46,17 @@ abstract class GenICode extends SubComponent  {
     var unit: CompilationUnit = NoCompilationUnit
 
     override def run() {
-      scalaPrimitives.init
-      classes.clear()
+      if (!settings.isBCodeActive) {
+        scalaPrimitives.init()
+        classes.clear()
+      }
       super.run()
     }
 
     override def apply(unit: CompilationUnit): Unit = {
+      if (settings.isBCodeActive) { return }
       this.unit = unit
-      unit.icode.clear
+      unit.icode.clear()
       informProgress("Generating icode for " + unit)
       gen(unit.body)
       this.unit = NoCompilationUnit
@@ -77,7 +74,7 @@ abstract class GenICode extends SubComponent  {
      *  it is the host class; otherwise the symbol's owner.
      */
     def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
-      case NoSymbol   => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+      case NoSymbol   => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
       case _          => selector.typeSymbol
     }
 
@@ -93,7 +90,7 @@ abstract class GenICode extends SubComponent  {
         debuglog("Generating class: " + tree.symbol.fullName)
         val outerClass = ctx.clazz
         ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
-        addClassFields(ctx, tree.symbol);
+        addClassFields(ctx, tree.symbol)
         classes += (tree.symbol -> ctx.clazz)
         unit.icode += ctx.clazz
         gen(impl, ctx)
@@ -121,7 +118,7 @@ abstract class GenICode extends SubComponent  {
         m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
 
         if (!m.isAbstractMethod && !m.native) {
-          ctx1 = genLoad(rhs, ctx1, m.returnType);
+          ctx1 = genLoad(rhs, ctx1, m.returnType)
 
           // reverse the order of the local variables, to match the source-order
           m.locals = m.locals.reverse
@@ -131,7 +128,7 @@ abstract class GenICode extends SubComponent  {
             case Return(_) => ()
             case EmptyTree =>
               globalError("Concrete method has no definition: " + tree + (
-                if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+                if (settings.debug) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
                 else "")
               )
             case _ => if (ctx1.bb.isEmpty)
@@ -139,7 +136,7 @@ abstract class GenICode extends SubComponent  {
             else
               ctx1.bb.closeWith(RETURN(m.returnType))
           }
-          if (!ctx1.bb.closed) ctx1.bb.close
+          if (!ctx1.bb.closed) ctx1.bb.close()
           prune(ctx1.method)
         } else
           ctx1.method.setCode(NoCode)
@@ -160,18 +157,13 @@ abstract class GenICode extends SubComponent  {
      * and not produce any value. Use genLoad for expressions which leave
      * a value on top of the stack.
      *
-     * @param tree ...
-     * @param ctx  ...
      * @return a new context. This is necessary for control flow instructions
      *         which may change the current basic block.
      */
     private def genStat(tree: Tree, ctx: Context): Context = tree match {
       case Assign(lhs @ Select(_, _), rhs) =>
         val isStatic = lhs.symbol.isStaticMember
-        var ctx1 = if (isStatic) ctx
-                   else if (forMSIL && msil_IsValuetypeInstField(lhs.symbol))
-                     msil_genLoadQualifierAddress(lhs, ctx)
-                   else genLoadQualifier(lhs, ctx)
+        var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
 
         ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
         ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
@@ -188,12 +180,12 @@ abstract class GenICode extends SubComponent  {
     }
 
     private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
-      require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
+      require(expr.tpe <:< ThrowableTpe, expr.tpe)
 
       val thrownKind = toTypeKind(expr.tpe)
       val ctx1       = genLoad(expr, ctx, thrownKind)
       ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos)
-      ctx1.bb.enterIgnoreMode
+      ctx1.bb.enterIgnoreMode()
 
       (ctx1, NothingReference)
     }
@@ -231,10 +223,10 @@ abstract class GenICode extends SubComponent  {
 
         // binary operation
         case rarg :: Nil =>
-          resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+          resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
           if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
             assert(resKind.isIntegralType | resKind == BOOL,
-                 resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+                 resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
 
           ctx1 = genLoad(larg, ctx1, resKind)
           ctx1 = genLoad(rarg,
@@ -264,11 +256,6 @@ abstract class GenICode extends SubComponent  {
     }
 
     /** Generate primitive array operations.
-     *
-     *  @param tree ...
-     *  @param ctx  ...
-     *  @param code ...
-     *  @return     ...
      */
     private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
       import scalaPrimitives._
@@ -283,14 +270,19 @@ abstract class GenICode extends SubComponent  {
       if (scalaPrimitives.isArrayGet(code)) {
         // load argument on stack
         debugassert(args.length == 1,
-                 "Too many arguments for array get operation: " + tree);
+                 "Too many arguments for array get operation: " + tree)
         ctx1 = genLoad(args.head, ctx1, INT)
         generatedType = elem
         ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+        // it's tempting to just drop array loads of type Null instead
+        // of adapting them but array accesses can cause
+        // ArrayIndexOutOfBounds so we can't. Besides, Array[Null]
+        // probably isn't common enough to figure out an optimization
+        adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
       }
       else if (scalaPrimitives.isArraySet(code)) {
         debugassert(args.length == 2,
-                 "Too many arguments for array set operation: " + tree);
+                 "Too many arguments for array set operation: " + tree)
         ctx1 = genLoad(args.head, ctx1, INT)
         ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
         // the following line should really be here, but because of bugs in erasure
@@ -308,11 +300,8 @@ abstract class GenICode extends SubComponent  {
     }
     private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
       val Apply(fun, args) = tree
-      val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+      val monitor = ctx.makeLocal(tree.pos, ObjectTpe, "monitor")
       var monitorResult: Local = null
-
-      // if the synchronized block returns a result, store it in a local variable. just leaving
-      // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
       val argTpe = args.head.tpe
       val hasResult = expectedType != UNIT
       if (hasResult)
@@ -345,7 +334,7 @@ abstract class GenICode extends SubComponent  {
               MONITOR_EXIT() setPos tree.pos,
               THROW(ThrowableClass)
             ))
-            exhCtx.bb.enterIgnoreMode
+            exhCtx.bb.enterIgnoreMode()
             exhCtx
           })), EmptyTree, tree)
 
@@ -359,9 +348,9 @@ abstract class GenICode extends SubComponent  {
     private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
       val If(cond, thenp, elsep) = tree
 
-      var thenCtx = ctx.newBlock
-      var elseCtx = ctx.newBlock
-      val contCtx = ctx.newBlock
+      var thenCtx = ctx.newBlock()
+      var elseCtx = ctx.newBlock()
+      val contCtx = ctx.newBlock()
 
       genCond(cond, ctx, thenCtx, elseCtx)
 
@@ -386,12 +375,14 @@ abstract class GenICode extends SubComponent  {
         "I produce UNIT in a context where " + expectedType + " is expected!")
 
       // alternatives may be already closed by a tail-recursive jump
+      val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore)
       thenCtx.bb.closeWith(JUMP(contCtx.bb))
       elseCtx.bb.closeWith(
           if (elsep == EmptyTree) JUMP(contCtx.bb)
           else JUMP(contCtx.bb) setPos tree.pos
         )
 
+      contCtx.bb killUnless contReachable
       (contCtx, resKind)
     }
     private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
@@ -414,8 +405,8 @@ abstract class GenICode extends SubComponent  {
 
               (pat.symbol.tpe.typeSymbol, kind, {
                 ctx: Context =>
-                  ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
-                  genLoad(body, ctx, kind);
+                  ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
+                  genLoad(body, ctx, kind)
               })
           }
         }
@@ -432,7 +423,7 @@ abstract class GenICode extends SubComponent  {
 
     private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
       val sym = tree.symbol
-      val Apply(fun @ Select(receiver, _), args) = tree
+      val Apply(fun @ Select(receiver, _), _) = tree
       val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
 
       if (scalaPrimitives.isArithmeticOp(code))
@@ -444,7 +435,7 @@ abstract class GenICode extends SubComponent  {
       else if (isArrayOp(code))
         genArrayOp(tree, ctx, code, expectedType)
       else if (isLogicalOp(code) || isComparisonOp(code)) {
-        val trueCtx, falseCtx, afterCtx = ctx.newBlock
+        val trueCtx, falseCtx, afterCtx = ctx.newBlock()
 
         genCond(tree, ctx, trueCtx, falseCtx)
         trueCtx.bb.emitOnly(
@@ -471,132 +462,6 @@ abstract class GenICode extends SubComponent  {
     }
 
     /**
-     * forMSIL
-     */
-    private def msil_IsValuetypeInstMethod(msym: Symbol) = (
-      loaders.clrTypes.methods get msym exists (mMSIL =>
-        mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
-      )
-    )
-    private def msil_IsValuetypeInstField(fsym: Symbol) = (
-      loaders.clrTypes.fields get fsym exists (fMSIL =>
-        !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
-      )
-    )
-
-    /**
-     * forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry
-     */
-    private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
-      val REFERENCE(clssym) = kind
-      assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
-      val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
-      ctx.method.addLocal(local)
-      ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
-      ctx.bb.emit(CIL_INITOBJ(kind), pos)
-      val instr = if (leaveAddressOnStackInstead)
-                    CIL_LOAD_LOCAL_ADDRESS(local)
-                  else
-                    LOAD_LOCAL(local)
-      ctx.bb.emit(instr, pos)
-    }
-
-    /**
-     * forMSIL
-     */
-    private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = {
-      var generatedType = expectedType
-      var addressTaken = false
-      debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
-
-      var resCtx: Context = tree match {
-
-        // emits CIL_LOAD_FIELD_ADDRESS
-        case Select(qualifier, selector) if (!tree.symbol.isModule) =>
-          addressTaken = true
-          val sym = tree.symbol
-          generatedType = toTypeKind(sym.info)
-
-          if (sym.isStaticMember) {
-            ctx.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, true), tree.pos)
-            ctx
-          } else {
-            val ctx1 = genLoadQualifier(tree, ctx)
-            ctx1.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, false), tree.pos)
-            ctx1
-          }
-
-        // emits CIL_LOAD_LOCAL_ADDRESS
-        case Ident(name) if (!tree.symbol.isPackage && !tree.symbol.isModule)=>
-          addressTaken = true
-          val sym = tree.symbol
-          try {
-            val Some(l) = ctx.method.lookupLocal(sym)
-            ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(l), tree.pos)
-            generatedType = l.kind // actually, should be "V&" but the callsite is aware of this
-          } catch {
-            case ex: MatchError =>
-              abort("symbol " + sym + " does not exist in " + ctx.method)
-          }
-          ctx
-
-        // emits CIL_LOAD_ARRAY_ITEM_ADDRESS
-        case Apply(fun, args) =>
-          if (isPrimitive(fun.symbol)) {
-
-            val sym = tree.symbol
-            val Apply(fun @ Select(receiver, _), args) = tree
-            val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
-
-            if (isArrayOp(code)) {
-              val arrayObj = receiver
-              val k = toTypeKind(arrayObj.tpe)
-              val ARRAY(elementType) = k
-              if (scalaPrimitives.isArrayGet(code)) {
-                var ctx1 = genLoad(arrayObj, ctx, k)
-                // load argument on stack
-                debugassert(args.length == 1, "Too many arguments for array get operation: " + tree)
-                ctx1 = genLoad(args.head, ctx1, INT)
-                generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this
-                ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos)
-                addressTaken = true
-                ctx1
-              } else null
-            } else null
-          } else null
-
-        case This(qual) =>
-          /* TODO: this case handler is a placeholder for the time when Level 2 support for valuetypes is in place,
-             in particular when invoking other methods on this where this is a valuetype value (boxed or not).
-             As receiver, a managed pointer is expected, and a plain ldarg.0 achieves just that. */
-          addressTaken = true
-          genLoad(tree, ctx, expectedType)
-
-        case _ =>
-          null /* A method returning ByRef won't pass peverify, so I guess this case handler is dead code.
-                  Even if it's not, the code below to handler !addressTaken below. */
-      }
-
-      if (!addressTaken) {
-        resCtx = genLoad(tree, ctx, expectedType)
-        if (!butRawValueIsAlsoGoodEnough) {
-          // raw value on stack (must be an intermediate result, e.g. returned by method call), take address
-          addressTaken = true
-          val boxType = expectedType // toTypeKind(expectedType /* TODO FIXME */)
-          resCtx.bb.emit(BOX(boxType), tree.pos)
-          resCtx.bb.emit(CIL_UNBOX(boxType), tree.pos)
-        }
-      }
-
-      // emit conversion
-      if (generatedType != expectedType)
-        abort("Unexpected tree in msil_genLoadAddressOf: " + tree + " at: " + tree.pos)
-
-      resCtx
-    }
-
-
-    /**
      * Generate code for trees that produce values on the stack
      *
      * @param tree The tree to be translated
@@ -613,7 +478,11 @@ abstract class GenICode extends SubComponent  {
       val resCtx: Context = tree match {
         case LabelDef(name, params, rhs) =>
           def genLoadLabelDef = {
-            val ctx1 = ctx.newBlock
+            val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because
+                                      // label defs can be the target of jumps from other locations.
+                                      // that means label defs can lead to unreachable code without
+                                      // proper reachability analysis
+
             if (nme.isLoopHeaderLabel(name))
               ctx1.bb.loopHeader = true
 
@@ -627,7 +496,7 @@ abstract class GenICode extends SubComponent  {
                 val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
                 debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
                 ctx1.labels += pair
-                ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+                ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
             }
 
             ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
@@ -645,13 +514,13 @@ abstract class GenICode extends SubComponent  {
               val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
 
               if (rhs == EmptyTree) {
-                debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+                debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
                 ctx.bb.emit(getZeroOf(local.kind))
               }
 
               var ctx1 = ctx
               if (rhs != EmptyTree)
-                ctx1 = genLoad(rhs, ctx, local.kind);
+                ctx1 = genLoad(rhs, ctx, local.kind)
 
               ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
               ctx1.scope.add(local)
@@ -695,7 +564,8 @@ abstract class GenICode extends SubComponent  {
                   // we have to run this without the same finalizer in
                   // the list, otherwise infinite recursion happens for
                   // finalizers that contain 'return'
-                  val fctx = finalizerCtx.newBlock
+                  val fctx = finalizerCtx.newBlock()
+                  fctx.bb killIf ctx1.bb.ignore
                   ctx1.bb.closeWith(JUMP(fctx.bb))
                   ctx1 = genLoad(f1, fctx, UNIT)
               }
@@ -708,7 +578,7 @@ abstract class GenICode extends SubComponent  {
             }
             adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
             ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
-            ctx1.bb.enterIgnoreMode
+            ctx1.bb.enterIgnoreMode()
             generatedType = expectedType
             ctx1
           }
@@ -760,7 +630,7 @@ abstract class GenICode extends SubComponent  {
             } else {
               genCast(l, r, ctx1, cast)
             }
-            generatedType = if (cast) r else BOOL;
+            generatedType = if (cast) r else BOOL
             ctx1
           }
           genLoadApply1
@@ -773,7 +643,7 @@ abstract class GenICode extends SubComponent  {
         // on the stack (contrary to what the type in the AST says).
         case Apply(fun @ Select(Super(_, mix), _), args) =>
           def genLoadApply2 = {
-            debuglog("Call to super: " + tree);
+            debuglog("Call to super: " + tree)
             val invokeStyle = SuperCall(mix)
             // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
 
@@ -819,47 +689,31 @@ abstract class GenICode extends SubComponent  {
                 debugassert(ctor.owner == cls,
                             "Symbol " + ctor.owner.fullName + " is different than " + tpt)
 
-                val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
-                  /* parameterful constructors are the only possible custom constructors,
-                     a default constructor can't be defined for valuetypes, CLR dixit */
-                  val isDefaultConstructor = args.isEmpty
-                  if (isDefaultConstructor) {
-                    msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
-                    ctx
-                  } else {
-                    val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-                    ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
-                    ctx1
-                  }
-                } else {
-                  val nw = NEW(rt)
-                  ctx.bb.emit(nw, tree.pos)
-                  ctx.bb.emit(DUP(generatedType))
-                  val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
-                  val init = CALL_METHOD(ctor, Static(true))
-                  nw.init = init
-                  ctx1.bb.emit(init, tree.pos)
-                  ctx1
-                }
-                ctx2
+                val nw = NEW(rt)
+                ctx.bb.emit(nw, tree.pos)
+                ctx.bb.emit(DUP(generatedType))
+                val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
 
+                val init = CALL_METHOD(ctor, Static(onInstance = true))
+                nw.init = init
+                ctx1.bb.emit(init, tree.pos)
+                ctx1
               case _ =>
                 abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
             }
           }
           genLoadApply3
 
-        case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
+        case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
           def genLoadApply4 = {
-            debuglog("BOX : " + fun.symbol.fullName);
+            debuglog("BOX : " + fun.symbol.fullName)
             val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
             val nativeKind = toTypeKind(expr.tpe)
-            if (settings.Xdce.value) {
+            if (settings.Xdce) {
               // we store this boxed value to a local, even if not really needed.
               // boxing optimization might use it, and dead code elimination will
               // take care of unnecessary stores
-              var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+              val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
               ctx1.bb.emit(STORE_LOCAL(loc1))
               ctx1.bb.emit(LOAD_LOCAL(loc1))
             }
@@ -869,7 +723,7 @@ abstract class GenICode extends SubComponent  {
           }
           genLoadApply4
 
-        case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
+        case Apply(fun @ _, List(expr)) if (currentRun.runDefinitions.isUnbox(fun.symbol)) =>
           debuglog("UNBOX : " + fun.symbol.fullName)
           val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
           val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
@@ -877,12 +731,6 @@ abstract class GenICode extends SubComponent  {
           ctx1.bb.emit(UNBOX(boxType), expr.pos)
           ctx1
 
-        case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) =>
-          debuglog("ADDRESSOF : " + fun.symbol.fullName);
-          val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false)
-          generatedType = toTypeKind(fun.symbol.tpe.resultType)
-          ctx1
-
         case app @ Apply(fun, args) =>
           def genLoadApply6 = {
             val sym = fun.symbol
@@ -893,7 +741,7 @@ abstract class GenICode extends SubComponent  {
                 resolveForwardLabel(ctx.defdef, ctx, sym)
                 ctx.labels.get(sym) match {
                   case Some(l) =>
-                    log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
+                    debuglog("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
                     l
                   case _       =>
                     abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
@@ -908,35 +756,28 @@ abstract class GenICode extends SubComponent  {
               // (if it's not in ignore mode, double-closing is an error)
               val ctx1 = genLoadLabelArguments(args, label, ctx)
               ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
-              ctx1.bb.enterIgnoreMode
+              ctx1.bb.enterIgnoreMode()
               ctx1
             } else if (isPrimitive(sym)) { // primitive method call
               val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
               generatedType = resKind
               newCtx
             } else {  // normal method call
-              debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+              debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
               val invokeStyle =
                 if (sym.isStaticMember)
-                  Static(false)
+                  Static(onInstance = false)
                 else if (sym.isPrivate || sym.isClassConstructor)
-                  Static(true)
+                  Static(onInstance = true)
                 else
                   Dynamic
 
-              var ctx1 =
-                if (invokeStyle.hasInstance) {
-                  if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
-                    msil_genLoadQualifierAddress(fun, ctx)
-                  else
-                    genLoadQualifier(fun, ctx)
-                } else ctx
-
+              var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx
               ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
               val cm = CALL_METHOD(sym, invokeStyle)
 
-              /** In a couple cases, squirrel away a little extra information in the
-               *  CALL_METHOD for use by GenJVM.
+              /* In a couple cases, squirrel away a little extra information in the
+               * CALL_METHOD for use by GenASM.
                */
               fun match {
                 case Select(qual, _) =>
@@ -957,14 +798,15 @@ abstract class GenICode extends SubComponent  {
               ctx1.method.updateRecursive(sym)
               generatedType =
                 if (sym.isClassConstructor) UNIT
-                else toTypeKind(sym.info.resultType);
+                else toTypeKind(sym.info.resultType)
+              // deal with methods that return Null
+              adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
               ctx1
             }
           }
           genLoadApply6
 
         case ApplyDynamic(qual, args) =>
-          assert(!forMSIL, tree)
           // TODO - this is where we'd catch dynamic applies for invokedynamic.
           sys.error("No invokedynamic support yet.")
           // val ctx1 = genLoad(qual, ctx, ObjectReference)
@@ -1002,17 +844,23 @@ abstract class GenICode extends SubComponent  {
             val sym = tree.symbol
             generatedType = toTypeKind(sym.info)
             val hostClass = findHostClass(qualifier.tpe, sym)
-            log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+            debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+            val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+            def genLoadQualUnlessElidable: Context =
+              if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx)
 
             if (sym.isModule) {
-              genLoadModule(ctx, tree)
-            }
-            else if (sym.isStaticMember) {
-              ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
-              ctx
+              genLoadModule(genLoadQualUnlessElidable, tree)
             } else {
-              val ctx1 = genLoadQualifier(tree, ctx)
-              ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+              val isStatic = sym.isStaticMember
+              val ctx1 = if (isStatic) genLoadQualUnlessElidable
+                         else          genLoadQualifier(tree, ctx)
+              ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos)
+              // it's tempting to drop field accesses of type Null instead of adapting them,
+              // but field access can cause static class init so we can't. Besides, fields
+              // of type Null probably aren't common enough to figure out an optimization
+              adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
               ctx1
             }
           }
@@ -1021,7 +869,7 @@ abstract class GenICode extends SubComponent  {
         case Ident(name) =>
           def genLoadIdent = {
             val sym = tree.symbol
-            if (!sym.isPackage) {
+            if (!sym.hasPackageFlag) {
               if (sym.isModule) {
                 genLoadModule(ctx, tree)
                 generatedType = toTypeKind(sym.info)
@@ -1044,16 +892,16 @@ abstract class GenICode extends SubComponent  {
           def genLoadLiteral = {
             if (value.tag != UnitTag) (value.tag, expectedType) match {
               case (IntTag, LONG) =>
-                ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+                ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
                 generatedType = LONG
               case (FloatTag, DOUBLE) =>
-                ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+                ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
                 generatedType = DOUBLE
               case (NullTag, _) =>
-                ctx.bb.emit(CONSTANT(value), tree.pos);
+                ctx.bb.emit(CONSTANT(value), tree.pos)
                 generatedType = NullReference
               case _ =>
-                ctx.bb.emit(CONSTANT(value), tree.pos);
+                ctx.bb.emit(CONSTANT(value), tree.pos)
                 generatedType = toTypeKind(tree.tpe)
             }
             ctx
@@ -1061,10 +909,10 @@ abstract class GenICode extends SubComponent  {
           genLoadLiteral
 
         case Block(stats, expr) =>
-          ctx.enterScope
+          ctx.enterScope()
           var ctx1 = genStat(stats, ctx)
           ctx1 = genLoad(expr, ctx1, expectedType)
-          ctx1.exitScope
+          ctx1.exitScope()
           ctx1
 
         case Typed(Super(_, _), _) =>
@@ -1101,9 +949,11 @@ abstract class GenICode extends SubComponent  {
 
         case Match(selector, cases) =>
           def genLoadMatch = {
-            debuglog("Generating SWITCH statement.");
-            var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
-            val afterCtx = ctx1.newBlock
+            debuglog("Generating SWITCH statement.")
+            val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+            val afterCtx = ctx1.newBlock()
+            afterCtx.bb killIf ctx1.bb.ignore
+            var afterCtxReachable = false
             var caseCtx: Context  = null
             generatedType = toTypeKind(tree.tpe)
 
@@ -1113,7 +963,8 @@ abstract class GenICode extends SubComponent  {
 
             for (caze @ CaseDef(pat, guard, body) <- cases) {
               assert(guard == EmptyTree, guard)
-              val tmpCtx = ctx1.newBlock
+              val tmpCtx = ctx1.newBlock()
+              tmpCtx.bb killIf ctx1.bb.ignore
               pat match {
                 case Literal(value) =>
                   tags = value.intValue :: tags
@@ -1135,12 +986,15 @@ abstract class GenICode extends SubComponent  {
               }
 
               caseCtx = genLoad(body, tmpCtx, generatedType)
+              afterCtxReachable ||= !caseCtx.bb.ignore
               // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
               caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
             }
+            afterCtxReachable ||= (default == afterCtx)
             ctx1.bb.emitOnly(
               SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
             )
+            afterCtx.bb killUnless afterCtxReachable
             afterCtx
           }
           genLoadMatch
@@ -1155,34 +1009,77 @@ abstract class GenICode extends SubComponent  {
       }
 
       // emit conversion
-      if (generatedType != expectedType)
-        adapt(generatedType, expectedType, resCtx, tree.pos)
+      if (generatedType != expectedType) {
+        tree match {
+          case Literal(Constant(null)) if generatedType == NullReference && expectedType != UNIT =>
+            // literal null on the stack (as opposed to a boxed null, see SI-8233),
+            // we can bypass `adapt` which would otherwise emitt a redundant [DROP, CONSTANT(null)]
+            // except one case: when expected type is UNIT (unboxed) where we need to emit just a DROP
+          case _ =>
+            adapt(generatedType, expectedType, resCtx, tree.pos)
+        }
+      }
 
       resCtx
     }
 
+    /**
+     * If we have a method call, field load, or array element load of type Null then
+     * we need to convince the JVM that we have a null value because in Scala
+     * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$
+     * is not. Note we don't have to adapt loads of locals because the JVM type
+     * system for locals does have a null type which it tracks internally. As
+     * long as we adapt these other things, the JVM will know that a Scala local of
+     * type Null is holding a null.
+     */
+    private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+      debuglog(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
+
+      // Don't need to adapt null to unit because we'll just drop it anyway. Don't
+      // need to adapt to Object or AnyRef because the JVM is happy with
+      // upcasting Null to them.
+      // We do have to adapt from NullReference to NullReference because we could be storing
+      // this value into a local of type Null and we want the JVM to see that it's
+      // a null value so we don't have to also adapt local loads.
+      if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) {
+        assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.")
+        // adapt by dropping what we've got and pushing a null which
+        // will convince the JVM we really do have null
+        ctx.bb.emit(DROP(from), pos)
+        ctx.bb.emit(CONSTANT(Constant(null)), pos)
+      }
+    }
+
     private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
       // An awful lot of bugs explode here - let's leave ourselves more clues.
       // A typical example is an overloaded type assigned after typer.
-      log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+      debuglog(s"GenICode#adapt($from, $to, $ctx, $pos)")
 
-      val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
       def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
-      def checkAssertions() {
-        def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
-        debugassert(from != UNIT, msg)
-        assert(!from.isReferenceType && !to.isReferenceType, msg)
-      }
-      if (conforms) from match {
-        case NothingReference                                          => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
-        case NullReference                                             => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
-        case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
-        case BYTE | SHORT | CHAR | INT if to == LONG                   => coerce(INT, LONG)           // widen subrange types
-        case _                                                         => ()
-      }
-      else to match {
-        case UNIT => ctx.bb.emit(DROP(from), pos)           // value discarding
-        case _    => checkAssertions() ; coerce(from, to)   // other primitive coercions
+
+      (from, to) match {
+        // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with
+        //    def f: String = ???
+        // we need
+        //   0:	getstatic	#25; //Field scala/Predef$.MODULE$:Lscala/Predef$;
+        //   3:	invokevirtual	#29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$;
+        //   6:	athrow
+        // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable
+        case (NothingReference, _) =>
+          ctx.bb.emit(THROW(ThrowableClass))
+          ctx.bb.enterIgnoreMode()
+        case (NullReference, REFERENCE(_)) =>
+          // SI-8223 we can't assume that the stack contains a `null`, it might contain a Null$
+          ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+        case _ if from isAssignabledTo to =>
+          ()
+        case (_, UNIT) =>
+          ctx.bb.emit(DROP(from), pos)
+        // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+        case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
+          coerce(from, to)
+        case _ =>
+          assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos")
       }
     }
 
@@ -1195,15 +1092,6 @@ abstract class GenICode extends SubComponent  {
           abort("Unknown qualifier " + tree)
       }
 
-    /** forMSIL */
-    private def msil_genLoadQualifierAddress(tree: Tree, ctx: Context): Context =
-      tree match {
-        case Select(qualifier, _) =>
-          msil_genLoadAddressOf(qualifier, ctx, toTypeKind(qualifier.tpe), butRawValueIsAlsoGoodEnough = false)
-        case _ =>
-          abort("Unknown qualifier " + tree)
-      }
-
     /**
      * Generate code that loads args into label parameters.
      */
@@ -1250,7 +1138,9 @@ abstract class GenICode extends SubComponent  {
         if (!tree.symbol.isPackageClass) tree.symbol
         else tree.symbol.info.member(nme.PACKAGE) match {
           case NoSymbol => abort("Cannot use package as value: " + tree)
-          case s        => debugwarn("Bug: found package class where package object expected.  Converting.") ; s.moduleClass
+          case s        =>
+            devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
+            s.moduleClass
         }
       )
       debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
@@ -1384,18 +1274,14 @@ abstract class GenICode extends SubComponent  {
     // }
 
     /** Generate string concatenation.
-     *
-     *  @param tree ...
-     *  @param ctx  ...
-     *  @return     ...
      */
     def genStringConcat(tree: Tree, ctx: Context): Context = {
       liftStringConcat(tree) match {
         // Optimization for expressions of the form "" + x.  We can avoid the StringBuilder.
-        case List(Literal(Constant("")), arg) if !forMSIL =>
+        case List(Literal(Constant("")), arg) =>
           debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
           val ctx1 = genLoad(arg, ctx, ObjectReference)
-          ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos)
+          ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos)
           ctx1
         case concatenations =>
           debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
@@ -1420,7 +1306,7 @@ abstract class GenICode extends SubComponent  {
       }
 
       val ctx1 = genLoad(tree, ctx, ObjectReference)
-      ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+      ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false)))
       ctx1
     }
 
@@ -1443,6 +1329,8 @@ abstract class GenICode extends SubComponent  {
     /** Some useful equality helpers.
      */
     def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
+    def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
+    def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
 
     /* If l or r is constant null, returns the other ; otherwise null */
     def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
@@ -1476,9 +1364,17 @@ abstract class GenICode extends SubComponent  {
     private def genCond(tree: Tree,
                         ctx: Context,
                         thenCtx: Context,
-                        elseCtx: Context): Unit =
+                        elseCtx: Context): Boolean =
     {
-      def genComparisonOp(l: Tree, r: Tree, code: Int) {
+      /**
+       * Generate the de-sugared comparison mechanism that will underly an '=='
+       *
+       * @param l       left-hand side of the '=='
+       * @param r       right-hand side of the '=='
+       * @param code    the comparison operator to use
+       * @return true if either branch can continue normally to a follow on block, false otherwise
+       */
+      def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = {
         val op: TestOp = code match {
           case scalaPrimitives.LT => LT
           case scalaPrimitives.LE => LE
@@ -1494,27 +1390,33 @@ abstract class GenICode extends SubComponent  {
         lazy val nonNullSide = ifOneIsNull(l, r)
         if (isReferenceEqualityOp(code) && nonNullSide != null) {
           val ctx1 = genLoad(nonNullSide, ctx, ObjectReference)
+          val branchesReachable = !ctx1.bb.ignore
           ctx1.bb.emitOnly(
             CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference)
           )
+          branchesReachable
         }
         else {
           val kind = getMaxType(l.tpe :: r.tpe :: Nil)
           var ctx1 = genLoad(l, ctx, kind)
           ctx1 = genLoad(r, ctx1, kind)
+          val branchesReachable = !ctx1.bb.ignore
 
           ctx1.bb.emitOnly(
             CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos
           )
+          branchesReachable
         }
       }
 
-      debuglog("Entering genCond with tree: " + tree);
+      debuglog("Entering genCond with tree: " + tree)
 
       // the default emission
-      def default() = {
+      def default(): Boolean = {
         val ctx1 = genLoad(tree, ctx, BOOL)
+        val branchesReachable = !ctx1.bb.ignore
         ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos)
+        branchesReachable
       }
 
       tree match {
@@ -1526,11 +1428,12 @@ abstract class GenICode extends SubComponent  {
           lazy val Select(lhs, _) = fun
           lazy val rhs = args.head
 
-          def genZandOrZor(and: Boolean) = {
-            val ctxInterm = ctx.newBlock
+          def genZandOrZor(and: Boolean): Boolean = {
+            val ctxInterm = ctx.newBlock()
 
-            if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
+            val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
             else genCond(lhs, ctx, thenCtx, ctxInterm)
+            ctxInterm.bb killUnless branchesReachable
 
             genCond(rhs, ctxInterm, thenCtx, elseCtx)
           }
@@ -1553,10 +1456,10 @@ abstract class GenICode extends SubComponent  {
               else if (isComparisonOp(code))
                 genComparisonOp(lhs, rhs, code)
               else
-                default
+                default()
           }
 
-        case _ => default
+        case _ => default()
       }
     }
 
@@ -1569,17 +1472,18 @@ abstract class GenICode extends SubComponent  {
      * @param ctx     current context
      * @param thenCtx target context if the comparison yields true
      * @param elseCtx target context if the comparison yields false
+     * @return true if either branch can continue normally to a follow on block, false otherwise
      */
-    def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = {
+    def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
       def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
-        ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR)
+        ctx.makeLocal(l.pos, AnyRefTpe, nme.EQEQ_LOCAL_VAR.toString)
       }
 
-      /** True if the equality comparison is between values that require the use of the rich equality
-        * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
-        * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
-        * When it is statically known that both sides are equal and subtypes of Number of Character,
-        * not using the rich equality is possible (their own equals method will do ok.)*/
+      /* True if the equality comparison is between values that require the use of the rich equality
+       * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+       * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+       * When it is statically known that both sides are equal and subtypes of Number of Character,
+       * not using the rich equality is possible (their own equals method will do ok.)*/
       def mustUseAnyComparator: Boolean = {
         def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
         !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
@@ -1587,49 +1491,72 @@ abstract class GenICode extends SubComponent  {
 
       if (mustUseAnyComparator) {
         // when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
-        val equalsMethod =
-          if (!settings.optimise.value) {
-            def default = platform.externalEquals
-            platform match {
-              case x: JavaPlatform =>
-                import x._
-                  if (l.tpe <:< BoxedNumberClass.tpe) {
-                    if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
-                    else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
-                    else externalEqualsNumObject
-                  }
-                  else default
-
-              case _ => default
-            }
-          }
-          else {
+        val equalsMethod: Symbol = {
+          if (!settings.optimise) {
+            if (l.tpe <:< BoxedNumberClass.tpe) {
+              if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+              else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+              else platform.externalEqualsNumObject
+            } else platform.externalEquals
+          } else {
             ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
             getMember(ScalaRunTimeModule, nme.inlinedEquals)
           }
+        }
 
         val ctx1 = genLoad(l, ctx, ObjectReference)
         val ctx2 = genLoad(r, ctx1, ObjectReference)
+        val branchesReachable = !ctx2.bb.ignore
         ctx2.bb.emitOnly(
-          CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+          CALL_METHOD(equalsMethod, if (settings.optimise) Dynamic else Static(onInstance = false)),
           CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
         )
+        branchesReachable
       }
       else {
-        if (isNull(l))
+        if (isNull(l)) {
           // null == expr -> expr eq null
-          genLoad(r, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
-        else if (isNull(r)) {
+          val ctx1 = genLoad(r, ctx, ObjectReference)
+          val branchesReachable = !ctx1.bb.ignore
+          ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+          branchesReachable
+        } else if (isNull(r)) {
           // expr == null -> expr eq null
-          genLoad(l, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+          val ctx1 = genLoad(l, ctx, ObjectReference)
+          val branchesReachable = !ctx1.bb.ignore
+          ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+          branchesReachable
+        } else if (isNonNullExpr(l)) {
+          // Avoid null check if L is statically non-null.
+          //
+          // "" == expr -> "".equals(expr)
+          // Nil == expr -> Nil.equals(expr)
+          //
+          // Common enough (through pattern matching) to treat this specially here rather than
+          // hoping that -Yconst-opt is enabled. The impossible branches for null checks lead
+          // to spurious "branch not covered" warnings in Jacoco code coverage.
+          var ctx1 = genLoad(l, ctx, ObjectReference)
+          val branchesReachable = !ctx1.bb.ignore
+          ctx1 = genLoad(r, ctx1, ObjectReference)
+          ctx1.bb emitOnly(
+            CALL_METHOD(Object_equals, Dynamic),
+            CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+          )
+          branchesReachable
         } else {
           val eqEqTempLocal = getTempLocal
           var ctx1 = genLoad(l, ctx, ObjectReference)
-          lazy val nonNullCtx = ctx1.newBlock
+          val branchesReachable = !ctx1.bb.ignore
+          lazy val nonNullCtx = {
+            val block = ctx1.newBlock()
+            block.bb killUnless branchesReachable
+            block
+          }
 
           // l == r -> if (l eq null) r eq null else l.equals(r)
           ctx1 = genLoad(r, ctx1, ObjectReference)
-          val nullCtx = ctx1.newBlock
+          val nullCtx = ctx1.newBlock()
+          nullCtx.bb killUnless branchesReachable
 
           ctx1.bb.emitOnly(
             STORE_LOCAL(eqEqTempLocal) setPos l.pos,
@@ -1646,6 +1573,7 @@ abstract class GenICode extends SubComponent  {
             CALL_METHOD(Object_equals, Dynamic),
             CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
           )
+          branchesReachable
         }
       }
     }
@@ -1658,12 +1586,12 @@ abstract class GenICode extends SubComponent  {
       debugassert(ctx.clazz.symbol eq cls,
                "Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
 
-      /** Non-method term members are fields, except for module members. Module
-       *  members can only happen on .NET (no flatten) for inner traits. There,
-       *  a module symbol is generated (transformInfo in mixin) which is used
-       *  as owner for the members of the implementation class (so that the
-       *  backend emits them as static).
-       *  No code is needed for this module symbol.
+      /* Non-method term members are fields, except for module members. Module
+       * members can only happen on .NET (no flatten) for inner traits. There,
+       * a module symbol is generated (transformInfo in mixin) which is used
+       * as owner for the members of the implementation class (so that the
+       * backend emits them as static).
+       * No code is needed for this module symbol.
        */
       for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
         ctx.clazz addField new IField(f)
@@ -1701,8 +1629,6 @@ abstract class GenICode extends SubComponent  {
      *  If the block consists of a single unconditional jump, prune
      *  it by replacing the instructions in the predecessor to jump
      *  directly to the JUMP target of the block.
-     *
-     *  @param method ...
      */
     def prune(method: IMethod) = {
       var changed = false
@@ -1714,14 +1640,14 @@ abstract class GenICode extends SubComponent  {
           case _ => None
         }
         if (block.size == 1 && optCont.isDefined) {
-          val Some(cont) = optCont;
-          val pred = block.predecessors;
-          debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
+          val Some(cont) = optCont
+          val pred = block.predecessors
+          debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
           pred foreach { p =>
             changed = true
             p.lastInstruction match {
               case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
-                debuglog("Pruning empty if branch.");
+                debuglog("Pruning empty if branch.")
                 p.replaceInstruction(p.lastInstruction,
                                      if (block == succ)
                                        if (block == fail)
@@ -1734,7 +1660,7 @@ abstract class GenICode extends SubComponent  {
                                        abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
 
               case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
-                debuglog("Pruning empty ifz branch.");
+                debuglog("Pruning empty ifz branch.")
                 p.replaceInstruction(p.lastInstruction,
                                      if (block == succ)
                                        if (block == fail)
@@ -1747,12 +1673,12 @@ abstract class GenICode extends SubComponent  {
                                        abort("Could not find block in preds"))
 
               case JUMP(b) if (b == block) =>
-                debuglog("Pruning empty JMP branch.");
+                debuglog("Pruning empty JMP branch.")
                 val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
                 debugassert(replaced, "Didn't find p.lastInstruction")
 
               case SWITCH(tags, labels) if (labels contains block) =>
-                debuglog("Pruning empty SWITCH branch.");
+                debuglog("Pruning empty SWITCH branch.")
                 p.replaceInstruction(p.lastInstruction,
                                      SWITCH(tags, labels map (l => if (l == block) cont else l)))
 
@@ -1768,7 +1694,7 @@ abstract class GenICode extends SubComponent  {
               e.covered = e.covered filter (_ != block)
               e.blocks  = e.blocks filter (_ != block)
               if (e.startBlock eq block)
-                e setStartBlock cont;
+                e setStartBlock cont
             }
           }
         }
@@ -1780,7 +1706,7 @@ abstract class GenICode extends SubComponent  {
         method.blocks foreach prune0
       } while (changed)
 
-      debuglog("Prune fixpoint reached in " + n + " iterations.");
+      debuglog("Prune fixpoint reached in " + n + " iterations.")
     }
 
     def getMaxType(ts: List[Type]): TypeKind =
@@ -1822,9 +1748,7 @@ abstract class GenICode extends SubComponent  {
         t match {
           case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) =>
             val newSym = getLabel(sym.pos, sym.name)
-            val tree = Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos
-            tree.tpe = t.tpe
-            tree
+            Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe
 
           case t @ LabelDef(name, params, rhs) =>
             val newSym = getLabel(t.pos, name)
@@ -1845,7 +1769,7 @@ abstract class GenICode extends SubComponent  {
 
     /////////////////////// Context ////////////////////////////////
 
-    abstract class Cleanup(val value: AnyRef) {
+    sealed abstract class Cleanup(val value: AnyRef) {
       def contains(x: AnyRef) = value == x
     }
     case class MonitorRelease(m: Local) extends Cleanup(m) { }
@@ -1954,22 +1878,11 @@ abstract class GenICode extends SubComponent  {
       }
 
       def addFinalizer(f: Tree, ctx: Context): this.type = {
-        cleanups = Finalizer(f, ctx) :: cleanups;
-        this
-      }
-
-      def removeFinalizer(f: Tree): this.type = {
-        assert(cleanups.head contains f,
-               "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f);
-        cleanups = cleanups.tail
+        cleanups = Finalizer(f, ctx) :: cleanups
         this
       }
 
       /** Prepare a new context upon entry into a method.
-       *
-       *  @param m ...
-       *  @param d ...
-       *  @return  ...
        */
       def enterMethod(m: IMethod, d: DefDef): Context = {
         val ctx1 = new Context(this) setMethod(m)
@@ -1978,13 +1891,13 @@ abstract class GenICode extends SubComponent  {
         ctx1.bb = ctx1.method.startBlock
         ctx1.defdef = d
         ctx1.scope = EmptyScope
-        ctx1.enterScope
+        ctx1.enterScope()
         ctx1
       }
 
       /** Return a new context for a new basic block. */
       def newBlock(): Context = {
-        val block = method.code.newBlock
+        val block = method.code.newBlock()
         handlers foreach (_ addCoveredBlock block)
         currentExceptionHandlers foreach (_ addBlock block)
         block.varsInScope.clear()
@@ -2008,13 +1921,12 @@ abstract class GenICode extends SubComponent  {
        * 'covered' by this exception handler (in addition to the
        * previously active handlers).
        */
-      private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
+      private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = {
         handlerCount += 1
         val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
-        exh.resultKind = resultKind
         method.addHandler(exh)
         handlers = exh :: handlers
-        debuglog("added handler: " + exh);
+        debuglog("added handler: " + exh)
 
         exh
       }
@@ -2024,7 +1936,7 @@ abstract class GenICode extends SubComponent  {
       private def addActiveHandler(exh: ExceptionHandler) {
         handlerCount += 1
         handlers = exh :: handlers
-        debuglog("added handler: " + exh);
+        debuglog("added handler: " + exh)
       }
 
       /** Return a new context for generating code for the given
@@ -2032,7 +1944,7 @@ abstract class GenICode extends SubComponent  {
        */
       private def enterExceptionHandler(exh: ExceptionHandler): Context = {
         currentExceptionHandlers ::= exh
-        val ctx = newBlock
+        val ctx = newBlock()
         exh.setStartBlock(ctx.bb)
         ctx
       }
@@ -2041,16 +1953,6 @@ abstract class GenICode extends SubComponent  {
         currentExceptionHandlers = currentExceptionHandlers.tail
       }
 
-      /** Remove the given handler from the list of active exception handlers. */
-      def removeActiveHandler(exh: ExceptionHandler): Unit = {
-        assert(handlerCount > 0 && handlers.head == exh,
-               "Wrong nesting of exception handlers." + this + " for " + exh)
-        handlerCount -= 1
-        handlers = handlers.tail
-        debuglog("removed handler: " + exh);
-
-      }
-
       /** Clone the current context */
       def dup: Context = new Context(this)
 
@@ -2069,23 +1971,55 @@ abstract class GenICode extends SubComponent  {
        * It returns the resulting context, with the same active handlers as
        * before the call. Use it like:
        *
-       * <code> ctx.Try( ctx => {
+       * ` ctx.Try( ctx => {
        *   ctx.bb.emit(...) // protected block
        * }, (ThrowableClass,
        *   ctx => {
        *     ctx.bb.emit(...); // exception handler
        *   }), (AnotherExceptionClass,
        *   ctx => {...
-       *   } ))</code>
+       *   } ))`
+       *
+       *   The resulting structure will look something like
+       *
+       *   outer:
+       *     // this 'useless' jump will be removed later,
+       *     // for now it separates the try body's blocks from previous
+       *     // code since the try body needs its own exception handlers
+       *     JUMP body
+       *
+       *   body:
+       *     [ try body ]
+       *     JUMP normalExit
+       *
+       *   catch[i]:
+       *     [ handler[i] body ]
+       *     JUMP normalExit
+       *
+       *   catchAll:
+       *     STORE exception
+       *     [ finally body ]
+       *     THROW exception
+       *
+       *   normalExit:
+       *     [ finally body ]
+       *
+       *  each catch[i] will cover body.  catchAll will cover both body and each catch[i]
+       *  Additional finally copies are created on the emission of every RETURN in the try body and exception handlers.
+       *
+       *  This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
+       *  handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
+       *  Later reachability analysis will remove unreacahble code.
        */
       def Try(body: Context => Context,
               handlers: List[(Symbol, TypeKind, Context => Context)],
               finalizer: Tree,
-              tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else {
+              tree: Tree) = {
 
-        val outerCtx = this.dup       // context for generating exception handlers, covered by finalizer
+        val outerCtx = this.dup       // context for generating exception handlers, covered by the catch-all finalizer
         val finalizerCtx = this.dup   // context for generating finalizer handler
-        val afterCtx = outerCtx.newBlock
+        val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler
+        var normalExitReachable = false
         var tmp: Local = null
         val kind = toTypeKind(tree.tpe)
         val guardResult = kind != UNIT && mayCleanStack(finalizer)
@@ -2099,7 +2033,8 @@ abstract class GenICode extends SubComponent  {
         }
 
         def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) {
-          val ctx1 = finalizerCtx.dup.newBlock
+          val ctx1 = finalizerCtx.dup.newBlock()
+          ctx1.bb killIf ctx.bb.ignore
           ctx.bb.closeWith(JUMP(ctx1.bb))
 
           if (guardResult) {
@@ -2112,107 +2047,53 @@ abstract class GenICode extends SubComponent  {
         } else ctx
 
 
-        val finalizerExh = if (finalizer != EmptyTree) Some({
-          val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers
-          this.addActiveHandler(exh)  // .. and body aswell
-          val ctx = finalizerCtx.enterExceptionHandler(exh)
-          val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
-          loadException(ctx, exh, finalizer.pos)
-          ctx.bb.emit(STORE_LOCAL(exception));
-          val ctx1 = genLoad(finalizer, ctx, UNIT);
-          ctx1.bb.emit(LOAD_LOCAL(exception));
-          ctx1.bb.emit(THROW(ThrowableClass));
-          ctx1.bb.enterIgnoreMode;
-          ctx1.bb.close
-          finalizerCtx.endHandler()
-          exh
-        }) else None
-
-        val exhs = handlers.map { case (sym, kind, handler) =>  // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
-            val exh = this.newExceptionHandler(sym, kind, tree.pos)
-            var ctx1 = outerCtx.enterExceptionHandler(exh)
-            ctx1.addFinalizer(finalizer, finalizerCtx)
-            loadException(ctx1, exh, tree.pos)
-            ctx1 = handler(ctx1)
-            // emit finalizer
-            val ctx2 = emitFinalizer(ctx1)
-            ctx2.bb.closeWith(JUMP(afterCtx.bb))
-            outerCtx.endHandler()
-            exh
+        // Generate the catch-all exception handler that deals with uncaught exceptions coming
+        // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows
+        // the exception
+        if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) {
+          if (finalizer != EmptyTree) {
+            val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
+            this.addActiveHandler(exh)  // .. and body aswell
+            val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
+            exhStartCtx.bb killIf outerCtx.bb.ignore
+            val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
+            loadException(exhStartCtx, exh, finalizer.pos)
+            exhStartCtx.bb.emit(STORE_LOCAL(exception))
+            val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
+            exhEndCtx.bb.emit(LOAD_LOCAL(exception))
+            exhEndCtx.bb.closeWith(THROW(ThrowableClass))
+            exhEndCtx.bb.enterIgnoreMode()
+            finalizerCtx.endHandler()
           }
-        val bodyCtx = this.newBlock
-        if (finalizer != EmptyTree)
-          bodyCtx.addFinalizer(finalizer, finalizerCtx)
-
-        var finalCtx = body(bodyCtx)
-        finalCtx = emitFinalizer(finalCtx)
-
-        outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
-
-        finalCtx.bb.closeWith(JUMP(afterCtx.bb))
 
-        afterCtx
-      }
-
-
-      /** try-catch-finally blocks are actually simpler to emit in MSIL, because there
-       *  is support for `finally` in bytecode.
-       *
-       *  A
-       *    try { .. } catch { .. } finally { .. }
-       *  block is de-sugared into
-       *    try { try { ..} catch { .. } } finally { .. }
-       *
-       *  In ICode `finally` block is represented exactly the same as an exception handler,
-       *  but with `NoSymbol` as the exception class. The covered blocks are all blocks of
-       *  the `try { .. } catch { .. }`.
-       *
-       *  Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
-       *  CLI takes care of running the finalizer when seeing a `leave` statement inside
-       *  a try / catch.
-       */
-      def TryMsil(body: Context => Context,
-                  handlers: List[(Symbol, TypeKind, (Context => Context))],
-                  finalizer: Tree,
-                  tree: Tree) = {
-
-        val outerCtx = this.dup       // context for generating exception handlers, covered by finalizer
-        val finalizerCtx = this.dup   // context for generating finalizer handler
-        val afterCtx = outerCtx.newBlock
-
-        if (finalizer != EmptyTree) {
-          // finalizer is covers try and all catch blocks, i.e.
-          //   try { try { .. } catch { ..} } finally { .. }
-          val exh = outerCtx.newExceptionHandler(NoSymbol, UNIT, tree.pos)
-          this.addActiveHandler(exh)
-          val ctx = finalizerCtx.enterExceptionHandler(exh)
-          loadException(ctx, exh, tree.pos)
-          val ctx1 = genLoad(finalizer, ctx, UNIT)
-          // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
-          ctx1.bb.closeWith(JUMP(afterCtx.bb))
-          finalizerCtx.endHandler()
-        }
-
-        for (handler <- handlers) {
-          val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
-          var ctx1 = outerCtx.enterExceptionHandler(exh)
-          loadException(ctx1, exh, tree.pos)
-          ctx1 = handler._3(ctx1)
-          // msil backend will emit `Leave` to jump out of a handler
-          ctx1.bb.closeWith(JUMP(afterCtx.bb))
-          outerCtx.endHandler()
+          // Generate each exception handler
+          for ((sym, kind, handler) <- handlers) {
+            val exh = this.newExceptionHandler(sym, tree.pos)
+            val exhStartCtx = outerCtx.enterExceptionHandler(exh)
+            exhStartCtx.bb killIf outerCtx.bb.ignore
+            exhStartCtx.addFinalizer(finalizer, finalizerCtx)
+            loadException(exhStartCtx, exh, tree.pos)
+            val exhEndCtx = handler(exhStartCtx)
+            normalExitReachable ||= !exhEndCtx.bb.ignore
+            exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
+            outerCtx.endHandler()
+          }
         }
 
-        val bodyCtx = this.newBlock
+        val bodyCtx = this.newBlock()
+        bodyCtx.bb killIf outerCtx.bb.ignore
+        if (finalizer != EmptyTree)
+          bodyCtx.addFinalizer(finalizer, finalizerCtx)
 
-        val finalCtx = body(bodyCtx)
+        val bodyEndCtx = body(bodyCtx)
 
         outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
 
-        // msil backend will emit `Leave` to jump out of a try-block
-        finalCtx.bb.closeWith(JUMP(afterCtx.bb))
+        normalExitReachable ||= !bodyEndCtx.bb.ignore
+        normalExitCtx.bb killUnless normalExitReachable
+        bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
 
-        afterCtx
+        emitFinalizer(normalExitCtx)
       }
     }
   }
@@ -2246,7 +2127,7 @@ abstract class GenICode extends SubComponent  {
 
       /** Add an instruction that refers to this label. */
       def addCallingInstruction(i: Instruction) =
-        toPatch = i :: toPatch;
+        toPatch = i :: toPatch
 
       /**
        * Patch the code by replacing pseudo call instructions with
@@ -2301,14 +2182,13 @@ abstract class GenICode extends SubComponent  {
      * by a real JUMP instruction when all labels are resolved.
      */
     abstract class PseudoJUMP(label: Label) extends Instruction {
-      override def toString(): String = "PJUMP " + label.symbol
-
+      override def toString = s"PJUMP(${label.symbol})"
       override def consumed = 0
       override def produced = 0
 
       // register with the given label
       if (!label.anchored)
-        label.addCallingInstruction(this);
+        label.addCallingInstruction(this)
     }
 
     case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
@@ -2338,7 +2218,6 @@ abstract class GenICode extends SubComponent  {
     val locals: ListBuffer[Local] = new ListBuffer
 
     def add(l: Local)     = locals += l
-    def remove(l: Local)  = locals -= l
 
     /** Return all locals that are in scope. */
     def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index f05def3..0cdf629 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -9,7 +9,6 @@ package icode
 
 import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab._
 
 abstract class ICodeCheckers {
   val global: Global
@@ -49,7 +48,7 @@ abstract class ICodeCheckers {
    *  @author  Iulian Dragos
    *  @version 1.0, 06/09/2005
    *
-   *  @todo Better checks for <code>MONITOR_ENTER/EXIT</code>
+   *  @todo Better checks for `MONITOR_ENTER/EXIT`
    *        Better checks for local var initializations
    *
    *  @todo Iulian says: I think there's some outdated logic in the checker.
@@ -95,7 +94,7 @@ abstract class ICodeCheckers {
     }
 
     def checkICodes(): Unit = {
-      if (settings.verbose.value)
+      if (settings.verbose)
       println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
       classes.values foreach check
     }
@@ -103,7 +102,6 @@ abstract class ICodeCheckers {
     private def posStr(p: Position) =
       if (p.isDefined) p.line.toString else "<??>"
 
-    private def indent(s: String, spaces: Int): String = indent(s, " " * spaces)
     private def indent(s: String, prefix: String): String = {
       val lines = s split "\\n"
       lines map (prefix + _) mkString "\n"
@@ -121,11 +119,11 @@ abstract class ICodeCheckers {
       clasz = cls
 
       for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
-        if (isConfict(f1, f2, false))
+        if (isConfict(f1, f2, canOverload = false))
           icodeError("Repetitive field name: " + f1.symbol.fullName)
 
       for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
-        if (isConfict(m1, m2, true))
+        if (isConfict(m1, m2, canOverload = true))
           icodeError("Repetitive method: " + m1.symbol.fullName)
 
       clasz.methods foreach check
@@ -170,12 +168,11 @@ abstract class ICodeCheckers {
       val preds = bl.predecessors
 
       def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference)
-      def hasNullType(s: TypeStack) = s.nonEmpty && (s.head == NullReference)
 
-      /** XXX workaround #1: one stack empty, the other has BoxedUnit.
-       *  One example where this arises is:
+      /* XXX workaround #1: one stack empty, the other has BoxedUnit.
+       * One example where this arises is:
        *
-       *  def f(b: Boolean): Unit = synchronized { if (b) () }
+       * def f(b: Boolean): Unit = synchronized { if (b) () }
        */
       def allUnits(s: TypeStack)   = s.types forall (_ == BoxedUnitReference)
 
@@ -184,10 +181,10 @@ abstract class ICodeCheckers {
         case (x1, x2) if f(x2)  => x1
       }
 
-      /** XXX workaround #2: different stacks heading into an exception
-       *  handler which will clear them anyway.  Examples where it arises:
+      /* XXX workaround #2: different stacks heading into an exception
+       * handler which will clear them anyway.  Examples where it arises:
        *
-       *  var bippy: Int = synchronized { if (b) 5 else 10 }
+       * var bippy: Int = synchronized { if (b) 5 else 10 }
        */
       def isHandlerBlock() = bl.exceptionHandlerStart
 
@@ -211,7 +208,7 @@ abstract class ICodeCheckers {
               if (s1.length != s2.length) {
                 if (allUnits(s1) && allUnits(s2))
                   workaround("Ignoring mismatched boxed units")
-                else if (isHandlerBlock)
+                else if (isHandlerBlock())
                   workaround("Ignoring mismatched stacks entering exception handler")
                 else
                   throw new CheckerException(incompatibleString)
@@ -236,8 +233,8 @@ abstract class ICodeCheckers {
       }
 
       if (preds.nonEmpty) {
-        in(bl) = (preds map out.apply) reduceLeft meet2;
-        log("Input changed for block: " + bl +" to: " + in(bl));
+        in(bl) = (preds map out.apply) reduceLeft meet2
+        log("Input changed for block: " + bl +" to: " + in(bl))
       }
     }
 
@@ -296,7 +293,7 @@ abstract class ICodeCheckers {
         else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]")
       })
 
-      var stack = new TypeStack(initial)
+      val stack = new TypeStack(initial)
       def checkStack(len: Int) {
         if (stack.length < len)
           ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack)
@@ -324,14 +321,14 @@ abstract class ICodeCheckers {
       def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = {
         List.range(0, num) map { _ =>
           val res = _popStack
-          printStackString(false, res, instrFn())
+          printStackString(isPush = false, res, instrFn())
           res
         }
       }
       def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = {
         xs foreach { x =>
           stack push x
-          printStackString(true, x, instrFn())
+          printStackString(isPush = true, x, instrFn())
         }
       }
 
@@ -339,7 +336,7 @@ abstract class ICodeCheckers {
       def popStack2    = { checkStack(2) ; (popStackN(2): @unchecked) match { case List(x, y) => (x, y) } }
       def popStack3    = { checkStack(3) ; (popStackN(3): @unchecked) match { case List(x, y, z) => (x, y, z) } }
 
-      /** Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
+      /* Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
       def clearStack() = {
         if (stack.nonEmpty)
           logChecker("Wiping out the " + stack.length + " element stack for exception handler: " + stack)
@@ -354,7 +351,7 @@ abstract class ICodeCheckers {
       def typeError(k1: TypeKind, k2: TypeKind) {
         icodeError("\n  expected: " + k1 + "\n     found: " + k2)
       }
-      def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 <:< k2) || {
+      def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || {
         import platform.isMaybeBoxed
 
         (k1, k2) match {
@@ -369,11 +366,6 @@ abstract class ICodeCheckers {
         }
       }
 
-      /** Return true if k1 is a subtype of any of the following types,
-       *  according to the somewhat relaxed subtyping standards in effect here.
-       */
-      def isOneOf(k1: TypeKind, kinds: TypeKind*) = kinds exists (k => isSubtype(k1, k))
-
       def subtypeTest(k1: TypeKind, k2: TypeKind): Unit =
         if (isSubtype(k1, k2)) ()
         else typeError(k2, k1)
@@ -381,20 +373,19 @@ abstract class ICodeCheckers {
       for (instr <- b) {
         this.instruction = instr
 
-        def checkLocal(local: Local): Unit = {
-          method lookupLocal local.sym.name getOrElse {
-            icodeError(" " + local + " is not defined in method " + method)
-          }
+        def checkLocal(local: Local) {
+          if ((method lookupLocal local.sym.name).isEmpty)
+            icodeError(s" $local is not defined in method $method")
         }
         def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
           case REFERENCE(sym) =>
             if (sym.info.member(field.name) == NoSymbol)
-              icodeError(" " + field + " is not defined in class " + clasz);
+              icodeError(" " + field + " is not defined in class " + clasz)
           case _ =>
-            icodeError(" expected reference type, but " + obj + " found");
+            icodeError(" expected reference type, but " + obj + " found")
         }
 
-        /** Checks that tpe is a subtype of one of the allowed types */
+        /* Checks that tpe is a subtype of one of the allowed types */
         def checkType(tpe: TypeKind, allowed: TypeKind*) = (
           if (allowed exists (k => isSubtype(tpe, k))) ()
           else icodeError(tpe + " is not one of: " + allowed.mkString("{ ", ", ", " }"))
@@ -402,16 +393,14 @@ abstract class ICodeCheckers {
         def checkNumeric(tpe: TypeKind) =
           checkType(tpe, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE)
 
-        /** Checks that the 2 topmost elements on stack are of the
-         *  kind TypeKind.
-         */
+        /* Checks that the 2 topmost elements on stack are of the kind TypeKind. */
         def checkBinop(kind: TypeKind) {
           val (a, b) = popStack2
           checkType(a, kind)
           checkType(b, kind)
         }
 
-        /** Check that arguments on the stack match method params. */
+        /* Check that arguments on the stack match method params. */
         def checkMethodArgs(method: Symbol) {
           val params = method.info.paramTypes
           checkStack(params.length)
@@ -421,21 +410,18 @@ abstract class ICodeCheckers {
           )
         }
 
-        /** Checks that the object passed as receiver has a method
-         *  <code>method</code> and that it is callable from the current method.
-         *
-         *  @param receiver ...
-         *  @param method   ...
+        /* Checks that the object passed as receiver has a method
+         * `method` and that it is callable from the current method.
          */
         def checkMethod(receiver: TypeKind, method: Symbol) =
           receiver match {
             case REFERENCE(sym) =>
               checkBool(sym.info.member(method.name) != NoSymbol,
-                        "Method " + method + " does not exist in " + sym.fullName);
+                        "Method " + method + " does not exist in " + sym.fullName)
               if (method.isPrivate)
                 checkBool(method.owner == clasz.symbol,
                           "Cannot call private method of " + method.owner.fullName
-                          + " from " + clasz.symbol.fullName);
+                          + " from " + clasz.symbol.fullName)
               else if (method.isProtected) {
                 val isProtectedOK = (
                   (clasz.symbol isSubClass method.owner) ||
@@ -444,7 +430,7 @@ abstract class ICodeCheckers {
 
                 checkBool(isProtectedOK,
                           "Cannot call protected method of " + method.owner.fullName
-                          + " from " + clasz.symbol.fullName);
+                          + " from " + clasz.symbol.fullName)
               }
 
             case ARRAY(_) =>
@@ -458,7 +444,7 @@ abstract class ICodeCheckers {
         def checkBool(cond: Boolean, msg: String) =
           if (!cond) icodeError(msg)
 
-        if (settings.debug.value) {
+        if (settings.debug) {
           log("PC: " + instr)
           log("stack: " + stack)
           log("================")
@@ -476,8 +462,8 @@ abstract class ICodeCheckers {
                 subtypeTest(elem, kind)
                 pushStack(elem)
               case (a, b) =>
-                icodeError(" expected and INT and a array reference, but " +
-                    a + ", " + b + " found");
+                icodeError(" expected an INT and an array reference, but " +
+                    a + ", " + b + " found")
             }
 
          case LOAD_LOCAL(local) =>
@@ -495,10 +481,10 @@ abstract class ICodeCheckers {
 
          case LOAD_MODULE(module) =>
            checkBool((module.isModule || module.isModuleClass),
-                     "Expected module: " + module + " flags: " + Flags.flagsToString(module.flags));
-           pushStack(toTypeKind(module.tpe));
+                     "Expected module: " + module + " flags: " + module.flagString)
+           pushStack(toTypeKind(module.tpe))
 
-         case STORE_THIS(kind) =>
+          case STORE_THIS(kind) =>
            val actualType = popStack
            if (actualType.isReferenceType) subtypeTest(actualType, kind)
            else icodeError("Expected this reference but found: " + actualType)
@@ -510,7 +496,7 @@ abstract class ICodeCheckers {
                subtypeTest(k, elem)
              case (a, b, c) =>
                 icodeError(" expected and array reference, and int and " + kind +
-                      " but " + a + ", " + b + ", " + c + " found");
+                      " but " + a + ", " + b + ", " + c + " found")
            }
 
          case STORE_LOCAL(local) =>
@@ -606,7 +592,7 @@ abstract class ICodeCheckers {
              case x if style.hasInstance  => x + 1
              case x                       => x
            }
-           if (style == Static(true))
+           if (style == Static(onInstance = true))
              checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.")
 
           checkStack(paramCount)
@@ -665,7 +651,7 @@ abstract class ICodeCheckers {
           case RETURN(kind) =>
             val top = popStack
             if (kind.isValueType) checkType(top, kind)
-            else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not");
+            else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
 
           case THROW(clasz) =>
             checkType(popStack, toTypeKind(clasz.tpe))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 9320108..bc35a9e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -8,10 +8,9 @@ package backend
 package icode
 
 import java.io.PrintWriter
-import scala.collection.mutable
-import scala.tools.nsc.symtab._
 import analysis.{ Liveness, ReachingDefinitions }
 import scala.tools.nsc.symtab.classfile.ICodeReader
+import scala.reflect.io.AbstractFile
 
 /** Glue together ICode parts.
  *
@@ -30,14 +29,14 @@ abstract class ICodes extends AnyRef
                                  with Repository
 {
   val global: Global
-  import global.{ log, definitions, settings, perRunCaches }
+  import global.{ log, definitions, settings, perRunCaches, devWarning }
 
   /** The ICode representation of classes */
   val classes = perRunCaches.newMap[global.Symbol, IClass]()
 
   /** Debugging flag */
   def shouldCheckIcode = settings.check contains global.genicode.phaseName
-  def checkerDebug(msg: String) = if (shouldCheckIcode && global.opt.debug) println(msg)
+  def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug) println(msg)
 
   /** The ICode linearizer. */
   val linearizer: Linearizer = settings.Xlinearizer.value match {
@@ -84,7 +83,7 @@ abstract class ICodes extends AnyRef
         // Something is leaving open/empty blocks around (see SI-4840) so
         // let's not kill the deal unless it's nonempty.
         if (b.isEmpty) {
-          log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+          devWarning(s"Found open but empty block while inlining $m: removing from block list.")
           m.code removeBlock b
         }
         else dumpMethodAndAbort(m, b)
@@ -106,10 +105,15 @@ abstract class ICodes extends AnyRef
   lazy val NullReference: TypeKind      = REFERENCE(definitions.NullClass)
   lazy val ObjectReference: TypeKind    = REFERENCE(definitions.ObjectClass)
   lazy val StringReference: TypeKind    = REFERENCE(definitions.StringClass)
-  lazy val ThrowableReference: TypeKind = REFERENCE(definitions.ThrowableClass)
 
   object icodeReader extends ICodeReader {
     lazy val global: ICodes.this.global.type = ICodes.this.global
+    import global._
+    def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+      global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
+    lazy val symbolTable: global.type = global
+    lazy val loaders: global.loaders.type = global.loaders
+    def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
   }
 
   /** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index a38eab4..54be9d1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -4,11 +4,11 @@
  */
 
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend
 package icode
 
-import scala.tools.nsc.ast._
 import scala.collection.{ mutable, immutable }
 import mutable.ListBuffer
 
@@ -36,15 +36,15 @@ trait Linearizers {
     var blocks: List[BasicBlock] = Nil
 
     def linearize(m: IMethod): List[BasicBlock] = {
-      val b = m.startBlock;
-      blocks = Nil;
+      val b = m.startBlock
+      blocks = Nil
 
       run {
-        worklist pushAll (m.exh map (_.startBlock));
-        worklist.push(b);
+        worklist pushAll (m.exh map (_.startBlock))
+        worklist.push(b)
       }
 
-      blocks.reverse;
+      blocks.reverse
     }
 
     def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -56,30 +56,30 @@ trait Linearizers {
     /** Linearize another subtree and append it to the existing blocks. */
     def linearize(startBlock: BasicBlock): List[BasicBlock] = {
       //blocks = startBlock :: Nil;
-      run( { worklist.push(startBlock); } );
-      blocks.reverse;
+      run( { worklist.push(startBlock); } )
+      blocks.reverse
     }
 
     def processElement(b: BasicBlock) =
       if (b.nonEmpty) {
-        add(b);
+        add(b)
         b.lastInstruction match {
           case JUMP(whereto) =>
-            add(whereto);
+            add(whereto)
           case CJUMP(success, failure, _, _) =>
-            add(success);
-            add(failure);
+            add(success)
+            add(failure)
           case CZJUMP(success, failure, _, _) =>
-            add(success);
-            add(failure);
+            add(success)
+            add(failure)
           case SWITCH(_, labels) =>
-            add(labels);
-          case RETURN(_) => ();
-          case THROW(clasz) =>   ();
+            add(labels)
+          case RETURN(_) => ()
+          case THROW(clasz) =>   ()
         }
       }
 
-    def dequeue: Elem = worklist.pop;
+    def dequeue: Elem = worklist.pop()
 
     /**
      * Prepend b to the list, if not already scheduled.
@@ -89,25 +89,25 @@ trait Linearizers {
       if (blocks.contains(b))
         ()
       else {
-        blocks = b :: blocks;
-        worklist push b;
+        blocks = b :: blocks
+        worklist push b
       }
     }
 
-    def add(bs: List[BasicBlock]): Unit = bs foreach add;
+    def add(bs: List[BasicBlock]): Unit = bs foreach add
   }
 
   /**
    * Linearize code using a depth first traversal.
    */
   class DepthFirstLinerizer extends Linearizer {
-    var blocks: List[BasicBlock] = Nil;
+    var blocks: List[BasicBlock] = Nil
 
     def linearize(m: IMethod): List[BasicBlock] = {
-      blocks = Nil;
+      blocks = Nil
 
-      dfs(m.startBlock);
-      m.exh foreach (b => dfs(b.startBlock));
+      dfs(m.startBlock)
+      m.exh foreach (b => dfs(b.startBlock))
 
       blocks.reverse
     }
@@ -120,7 +120,7 @@ trait Linearizers {
 
     def dfs(b: BasicBlock): Unit =
       if (b.nonEmpty && add(b))
-        b.successors foreach dfs;
+        b.successors foreach dfs
 
     /**
      * Prepend b to the list, if not already scheduled.
@@ -129,7 +129,7 @@ trait Linearizers {
      */
     def add(b: BasicBlock): Boolean =
       !(blocks contains b) && {
-        blocks = b :: blocks;
+        blocks = b :: blocks
         true
       }
   }
@@ -145,12 +145,12 @@ trait Linearizers {
     val added = new mutable.BitSet
 
     def linearize(m: IMethod): List[BasicBlock] = {
-      blocks = Nil;
+      blocks = Nil
       visited.clear()
-      added.clear;
+      added.clear()
 
-      m.exh foreach (b => rpo(b.startBlock));
-      rpo(m.startBlock);
+      m.exh foreach (b => rpo(b.startBlock))
+      rpo(m.startBlock)
 
       // if the start block has predecessors, it won't be the first one
       // in the linearization, so we need to enforce it here
@@ -171,7 +171,7 @@ trait Linearizers {
 
     def rpo(b: BasicBlock): Unit =
       if (b.nonEmpty && !visited(b)) {
-        visited += b;
+        visited += b
         b.successors foreach rpo
         add(b)
       }
@@ -185,7 +185,7 @@ trait Linearizers {
 
       if (!added(b.label)) {
         added += b.label
-        blocks = b :: blocks;
+        blocks = b :: blocks
       }
     }
   }
@@ -198,142 +198,4 @@ trait Linearizers {
     def linearize(m: IMethod): List[BasicBlock] = m.blocks
     def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
   }
-
-  /** The MSIL linearizer is used only for methods with at least one exception handler.
-   *  It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block
-   *  are emitted in an order that allows the lexical nesting of try-catch-finally, just
-   *  like in the source code.
-   */
-  class MSILLinearizer extends Linearizer {
-    /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks
-     *  the stack size before emitting instructions. For instance, to emit a `store`, there needs
-     *  to be some value on the stack. This can blow up in situations like this:
-     *       ...
-     *       jump 3
-     *    4: store_local 0
-     *       jump 5
-     *    3: load_value
-     *       jump 4
-     *    5: ...
-     *  here, 3 must be scheduled first.
-     *
-     *  The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important
-     *  in the following example:
-     *     try { throw new Exception }
-     *     catch { case e => throw e }
-     *  which adds a dead block containing just a "throw" (which, again, would blow up code generation
-     *  because of the stack size; there's no value on the stack when emitting that `throw`)
-     */
-    val normalLinearizer = new NormalLinearizer()
-
-    def linearize(m: IMethod): List[BasicBlock] = {
-
-      val handlersByCovered = m.exh.groupBy(_.covered)
-
-      // number of basic blocks covered by the entire try-catch expression
-      def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
-        val hs = handlersByCovered(covered)
-        covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
-      }
-
-      val tryBlocks = handlersByCovered.keys.toList sortBy size
-      var result    = normalLinearizer.linearize(m)
-      val frozen    = mutable.HashSet[BasicBlock](result.head)
-
-      for (tryBlock <- tryBlocks) {
-        result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen)
-      }
-      result
-    }
-
-    /** @param handlers a list of handlers covering the same blocks (same try, multiple catches)
-     *  @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
-     */
-    def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
-      assert(blocks.head == method.startBlock, method)
-
-      // blocks before the try, and blocks for the try
-      val beforeAndTry = new ListBuffer[BasicBlock]()
-      // blocks for the handlers
-      val catches = handlers map (_ => new ListBuffer[BasicBlock]())
-      // blocks to be put at the end
-      val after = new ListBuffer[BasicBlock]()
-
-      var beforeTry = true
-      val head = handlers.head
-
-      for (b <- blocks) {
-        if (head covers b) {
-          beforeTry = false
-          beforeAndTry += b
-        } else {
-          val handlerIndex = handlers.indexWhere(_.blocks.contains(b))
-          if (handlerIndex >= 0) {
-            catches(handlerIndex) += b
-          } else if (beforeTry) {
-            beforeAndTry += b
-          } else {
-            after += b
-          }
-        }
-      }
-
-      // reorder the blocks in "catches" so that the "firstBlock" is actually first
-      (catches, handlers).zipped foreach { (lb, handler) =>
-        lb -= handler.startBlock
-        handler.startBlock +=: lb
-      }
-
-      // The first block emitted after a try-catch must be the one that the try / catch
-      // blocks jump to (because in msil, these jumps cannot be emitted manually)
-      var firstAfter: Option[BasicBlock] = None
-
-      // Find the (hopefully) unique successor, look at the try and all catch blocks
-      var blks = head.covered.toList :: handlers.map(_.blocks)
-      while (firstAfter.isEmpty && !blks.isEmpty) {
-        val b = blks.head
-        blks = blks.tail
-
-        val leaving = leavingBlocks(b)
-        // no leaving blocks when the try or catch ends with THROW or RET
-        if (!leaving.isEmpty) {
-          assert(leaving.size <= 1, leaving)
-          firstAfter = Some(leaving.head)
-        }
-      }
-      if (firstAfter.isDefined) {
-        val b = firstAfter.get
-        if (frozen(b)) {
-          assert(after contains b, b +", "+ method)
-        } else {
-          frozen += b
-          if (beforeAndTry contains b) {
-            beforeAndTry -= b
-          } else {
-            assert(after contains b, after)
-            after -= b
-          }
-          b +=: after
-        }
-      }
-
-      for (lb <- catches) { beforeAndTry ++= lb }
-      beforeAndTry ++= after
-      beforeAndTry.toList
-    }
-
-    /** Returns all direct successors of `blocks` wich are not part
-     *  that list, i.e. successors outside the `blocks` list.
-     */
-    private def leavingBlocks(blocks: List[BasicBlock]) = {
-      val res = new mutable.HashSet[BasicBlock]()
-      for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s)))
-        res += s
-      res
-    }
-
-    def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
-      sys.error("not implemented")
-    }
-  }
 }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 00bcf60..6414658 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,14 +3,13 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend
 package icode
 
-import java.io.PrintWriter
 import scala.collection.{ mutable, immutable }
 import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import symtab.Flags.{ DEFERRED }
 
 trait ReferenceEquality {
   override def hashCode = System.identityHashCode(this)
@@ -22,7 +21,7 @@ trait Members {
 
   import global._
 
-  object NoCode extends Code(null, "NoCode") {
+  object NoCode extends Code(null, TermName("NoCode")) {
     override def blocksList: List[BasicBlock] = Nil
   }
 
@@ -30,8 +29,8 @@ trait Members {
    * This class represents the intermediate code of a method or
    * other multi-block piece of code, like exception handlers.
    */
-  class Code(method: IMethod, name: String) {
-    def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
+  class Code(method: IMethod, name: Name) {
+    def this(method: IMethod) = this(method, method.symbol.name)
     /** The set of all blocks */
     val blocks = mutable.ListBuffer[BasicBlock]()
 
@@ -48,27 +47,33 @@ trait Members {
 
     def touched = _touched
     def touched_=(b: Boolean): Unit = {
-      if (b)
-        blocks foreach (_.touched = true)
+      @annotation.tailrec def loop(xs: List[BasicBlock]) {
+        xs match {
+          case Nil     =>
+          case x :: xs => x.touched = true ; loop(xs)
+        }
+      }
+      if (b) loop(blocks.toList)
 
       _touched = b
     }
 
     // Constructor code
-    startBlock = newBlock
+    startBlock = newBlock()
 
     def removeBlock(b: BasicBlock) {
-      if (settings.debug.value) {
-        assert(blocks forall (p => !(p.successors contains b)),
-          "Removing block that is still referenced in method code " + b + "preds: " + b.predecessors
-        )
-        assert(b != startBlock || b.successors.length == 1,
-          "Removing start block with more than one successor."
-        )
+      if (settings.debug) {
+        // only do this sanity check when debug is turned on because it's moderately expensive
+        val referers = blocks filter (_.successors contains b)
+        assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}")
       }
 
-      if (b == startBlock)
+      if (b == startBlock) {
+        assert(b.successors.length == 1,
+          s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})."
+        )
         startBlock = b.successors.head
+      }
 
       blocks -= b
       assert(!blocks.contains(b))
@@ -77,7 +82,7 @@ trait Members {
     }
 
     /** This methods returns a string representation of the ICode */
-    override def toString = "ICode '" + name + "'";
+    override def toString = "ICode '" + name.decoded + "'"
 
     /* Compute a unique new label */
     def nextLabel: Int = {
@@ -89,8 +94,8 @@ trait Members {
      */
     def newBlock(): BasicBlock = {
       touched = true
-      val block = new BasicBlock(nextLabel, method);
-      blocks += block;
+      val block = new BasicBlock(nextLabel, method)
+      blocks += block
       block
     }
   }
@@ -103,6 +108,14 @@ trait Members {
       if (symbol eq other.symbol) 0
       else if (symbol isLess other.symbol) -1
       else 1
+
+    override def equals(other: Any): Boolean =
+      other match {
+        case other: IMember => (this compare other) == 0
+        case _ => false
+      }
+
+    override def hashCode = symbol.##
   }
 
   /** Represent a class in ICode */
@@ -112,25 +125,23 @@ trait Members {
     var cunit: CompilationUnit = _
 
     def addField(f: IField): this.type = {
-      fields = f :: fields;
+      fields = f :: fields
       this
     }
 
     def addMethod(m: IMethod): this.type = {
-      methods = m :: methods;
+      methods = m :: methods
       this
     }
 
     def setCompilationUnit(unit: CompilationUnit): this.type = {
-      this.cunit = unit;
+      this.cunit = unit
       this
     }
 
     override def toString() = symbol.fullName
 
-    def lookupField(s: Symbol)  = fields find (_.symbol == s)
     def lookupMethod(s: Symbol) = methods find (_.symbol == s)
-    def lookupMethod(s: Name)   = methods find (_.symbol.name == s)
 
     /* returns this methods static ctor if it has one. */
     def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor)
@@ -154,14 +165,13 @@ trait Members {
   class IMethod(val symbol: Symbol) extends IMember {
     var code: Code = NoCode
 
-    def newBlock() = code.newBlock
+    def newBlock() = code.newBlock()
     def startBlock = code.startBlock
     def lastBlock  = { assert(blocks.nonEmpty, symbol); blocks.last }
     def blocks = code.blocksList
     def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
 
     def foreachBlock[U](f: BasicBlock  => U): Unit = blocks foreach f
-    def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
 
     var native = false
 
@@ -181,7 +191,7 @@ trait Members {
 
     def hasCode = code ne NoCode
     def setCode(code: Code): IMethod = {
-      this.code = code;
+      this.code = code
       this
     }
 
@@ -199,7 +209,6 @@ trait Members {
       }
 
     def addLocals(ls: List[Local]) = ls foreach addLocal
-    def addParams(as: List[Local]) = as foreach addParam
 
     def lookupLocal(n: Name): Option[Local]     = locals find (_.sym.name == n)
     def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym)
@@ -214,28 +223,7 @@ trait Members {
 
     override def toString() = symbol.fullName
 
-    def matchesSignature(other: IMethod) = {
-      (symbol.name == other.symbol.name) &&
-      (params corresponds other.params)(_.kind == _.kind) &&
-      (returnType == other.returnType)
-    }
-
     import opcodes._
-    def checkLocals(): Unit = {
-      def localsSet = (code.blocks flatMap { bb =>
-        bb.iterator collect {
-          case LOAD_LOCAL(l)  => l
-          case STORE_LOCAL(l) => l
-        }
-      }).toSet
-
-      if (hasCode) {
-        log("[checking locals of " + this + "]")
-        locals filterNot localsSet foreach { l =>
-          log("Local " + l + " is not declared in " + this)
-        }
-      }
-    }
 
     /** Merge together blocks that have a single successor which has a
      * single predecessor. Exception handlers are taken into account (they
@@ -247,10 +235,10 @@ trait Members {
       val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
       for (b <- code.blocks.toList
         if b.successors.length == 1;
-        succ = b.successors.head;
-        if succ ne b;
-        if succ.predecessors.length == 1;
-        if succ.predecessors.head eq b;
+        succ = b.successors.head
+        if succ ne b
+        if succ.predecessors.length == 1
+        if succ.predecessors.head eq b
         if !(exh.exists { (e: ExceptionHandler) =>
             (e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
           nextBlock(b) = succ
@@ -259,10 +247,10 @@ trait Members {
       var bb = code.startBlock
       while (!nextBlock.isEmpty) {
         if (nextBlock.isDefinedAt(bb)) {
-          bb.open
+          bb.open()
           var succ = bb
           do {
-            succ = nextBlock(succ);
+            succ = nextBlock(succ)
             val lastInstr = bb.lastInstruction
             /* Ticket SI-5672
              * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
@@ -273,7 +261,7 @@ trait Members {
             val oldTKs = lastInstr.consumedTypes
             assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " +  lastInstr)
 
-              bb.removeLastInstruction
+              bb.removeLastInstruction()
               for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
               succ.toList foreach { i => bb.emit(i, i.pos) }
               code.removeBlock(succ)
@@ -281,9 +269,9 @@ trait Members {
 
             nextBlock -= bb
           } while (nextBlock.isDefinedAt(succ))
-          bb.close
+          bb.close()
         } else
-          bb = nextBlock.keysIterator.next
+          bb = nextBlock.keysIterator.next()
       }
       checkValid(this)
     }
@@ -298,15 +286,6 @@ trait Members {
   class Local(val sym: Symbol, val kind: TypeKind, val arg: Boolean) {
     var index: Int = -1
 
-    /** Starting PC for this local's visibility range. */
-    var start: Int = _
-
-    /** Ending PC for this local's visibility range. */
-    var end: Int = _
-
-    /** PC-based ranges for this local variable's visibility */
-    var ranges: List[(Int, Int)] = Nil
-
     override def equals(other: Any): Boolean = other match {
       case x: Local => sym == x.sym
       case _        => false
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index a3a0edb..076f84c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -3,13 +3,11 @@
  * @author  Martin Odersky
  */
 
-
-
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend
 package icode
 
-import scala.tools.nsc.ast._
 import scala.reflect.internal.util.{Position,NoPosition}
 
 /*
@@ -67,7 +65,7 @@ import scala.reflect.internal.util.{Position,NoPosition}
  * in the source files.
  */
 trait Opcodes { self: ICodes =>
-  import global.{Symbol, NoSymbol, Type, Name, Constant};
+  import global.{Symbol, NoSymbol, Name, Constant}
 
   // categories of ICode instructions
   final val localsCat =  1
@@ -111,17 +109,11 @@ trait Opcodes { self: ICodes =>
     // Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
     def producedTypes: List[TypeKind] = Nil
 
-    /** This method returns the difference of size of the stack when the instruction is used */
-    def difference = produced-consumed
-
     /** The corresponding position in the source file */
     private var _pos: Position = NoPosition
 
     def pos: Position = _pos
 
-    /** Used by dead code elimination. */
-    var useful: Boolean = false
-
     def setPos(p: Position): this.type = {
       _pos = p
       this
@@ -133,13 +125,6 @@ trait Opcodes { self: ICodes =>
   }
 
   object opcodes {
-
-    def mayThrow(i: Instruction): Boolean = i match {
-      case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _)
-              | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false
-      case _ => true
-    }
-
     /** Loads "this" on top of the stack.
      * Stack: ...
      *    ->: ...:ref
@@ -211,7 +196,7 @@ trait Opcodes { self: ICodes =>
     case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
       /** Returns a string representation of this instruction */
       override def toString(): String =
-        "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
+        "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
 
       override def consumed = if (isStatic) 0 else 1
       override def produced = 1
@@ -273,16 +258,17 @@ trait Opcodes { self: ICodes =>
     case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
       /** Returns a string representation of this instruction */
       override def toString(): String =
-        "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)");
+        "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
 
-      override def consumed = if(isStatic) 1 else 2;
-      override def produced = 0;
+      override def consumed = if(isStatic) 1 else 2
+
+      override def produced = 0
 
       override def consumedTypes =
         if (isStatic)
           toTypeKind(field.tpe) :: Nil
         else
-          REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+          REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
 
       override def category = fldsCat
     }
@@ -409,19 +395,19 @@ trait Opcodes { self: ICodes =>
 
       override def category = mthdsCat
     }
-    
+
     /**
      * A place holder entry that allows us to parse class files with invoke dynamic
      * instructions. Because the compiler doesn't yet really understand the
      * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
      * this instruction will cause a failure. The only optimization that
      * should ever look at non-Scala generated icode is the inliner, and it
-     * has been modified to not examine any method with invokeDynamic 
+     * has been modified to not examine any method with invokeDynamic
      * instructions. So if this poison pill ever causes problems then
      * there's been a serious misunderstanding
      */
     // TODO do the real thing
-    case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+    case class INVOKE_DYNAMIC(poolEntry: Int) extends Instruction {
       private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
       override def consumed = error
       override def produced = error
@@ -455,10 +441,12 @@ trait Opcodes { self: ICodes =>
      */
     case class NEW(kind: REFERENCE) extends Instruction {
       /** Returns a string representation of this instruction */
-      override def toString(): String = "NEW "+ kind;
+      override def toString(): String = "NEW "+ kind
+
+      override def consumed = 0
+
+      override def produced = 1
 
-      override def consumed = 0;
-      override def produced = 1;
       override def producedTypes = kind :: Nil
 
       /** The corresponding constructor call. */
@@ -474,11 +462,13 @@ trait Opcodes { self: ICodes =>
      */
     case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
       /** Returns a string representation of this instruction */
-      override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims;
+      override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
+
+      override def consumed = dims
 
-      override def consumed = dims;
       override def consumedTypes = List.fill(dims)(INT)
-      override def produced = 1;
+      override def produced = 1
+
       override def producedTypes = ARRAY(elem) :: Nil
 
       override def category = arraysCat
@@ -567,7 +557,7 @@ trait Opcodes { self: ICodes =>
       override def toString(): String = (
         "CJUMP (" + kind + ")" +
         cond + " ? "+successBlock.label+" : "+failureBlock.label
-      );
+      )
 
       override def consumed = 2
       override def produced = 0
@@ -590,7 +580,7 @@ trait Opcodes { self: ICodes =>
       override def toString(): String = (
         "CZJUMP (" + kind + ")" +
         cond + " ? "+successBlock.label+" : "+failureBlock.label
-      );
+      )
 
       override def consumed = 1
       override def produced = 0
@@ -682,10 +672,11 @@ trait Opcodes { self: ICodes =>
      */
     case class MONITOR_EXIT() extends Instruction {
       /** Returns a string representation of this instruction */
-      override def toString(): String ="MONITOR_EXIT";
+      override def toString(): String ="MONITOR_EXIT"
 
-      override def consumed = 1;
-      override def produced = 0;
+      override def consumed = 1
+
+      override def produced = 0
 
       override def consumedTypes = ObjectReference :: Nil
 
@@ -772,74 +763,5 @@ trait Opcodes { self: ICodes =>
       override def isSuper = true
       override def toString(): String = { "super(" + mix + ")" }
     }
-
-
-    // CLR backend
-
-    case class CIL_LOAD_LOCAL_ADDRESS(local: Local) extends Instruction {
-      /** Returns a string representation of this instruction */
-      override def toString(): String = "CIL_LOAD_LOCAL_ADDRESS "+local  //+isArgument?" (argument)":"";
-
-      override def consumed = 0
-      override def produced = 1
-
-      override def producedTypes = msil_mgdptr(local.kind) :: Nil
-
-      override def category = localsCat
-    }
-
-    case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
-      /** Returns a string representation of this instruction */
-      override def toString(): String =
-        "CIL_LOAD_FIELD_ADDRESS " + (if (isStatic) field.fullName else field.toString)
-
-      override def consumed = if (isStatic) 0 else 1
-      override def produced = 1
-
-      override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
-      override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
-
-      override def category = fldsCat
-    }
-
-    case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
-      /** Returns a string representation of this instruction */
-      override def toString(): String = "CIL_LOAD_ARRAY_ITEM_ADDRESS (" + kind + ")"
-
-      override def consumed = 2
-      override def produced = 1
-
-      override def consumedTypes = ARRAY(kind) :: INT :: Nil
-      override def producedTypes = msil_mgdptr(kind) :: Nil
-
-      override def category = arraysCat
-    }
-
-    case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
-      override def toString(): String = "CIL_UNBOX " + valueType
-      override def consumed = 1
-      override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
-      override def produced = 1
-      override def producedTypes = msil_mgdptr(valueType) :: Nil
-      override def category = objsCat
-    }
-
-    case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
-      override def toString(): String = "CIL_INITOBJ " + valueType
-      override def consumed = 1
-      override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
-      override def produced = 0
-      override def category = objsCat
-    }
-
-    case class CIL_NEWOBJ(method: Symbol) extends Instruction {
-      override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName
-      var hostClass: Symbol = method.owner;
-      override def consumed = method.tpe.paramTypes.length
-      override def consumedTypes = method.tpe.paramTypes map toTypeKind
-      override def produced = 1
-      override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
-      override def category = objsCat
-    }
   }
 }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index c857904..f81c42d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -6,9 +6,9 @@
 
 package scala.tools.nsc
 package backend
-package icode;
+package icode
 
-import java.io.PrintWriter;
+import java.io.PrintWriter
 
 trait Primitives { self: ICodes =>
 
@@ -51,12 +51,12 @@ trait Primitives { self: ICodes =>
   // type : (src) => dst
   // range: src,dst <- { Ix, Ux, Rx }
   // jvm  : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
-  case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive;
+  case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
 
   // type : (Array[REF]) => I4
   // range: type <- { BOOL, Ix, Ux, Rx, REF }
   // jvm  : arraylength
-  case class ArrayLength(kind: TypeKind) extends Primitive;
+  case class ArrayLength(kind: TypeKind) extends Primitive
 
   // type : (buf,el) => buf
   // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
@@ -76,25 +76,10 @@ trait Primitives { self: ICodes =>
 
   /** Pretty printer for primitives */
   class PrimitivePrinter(out: PrintWriter) {
-
     def print(s: String): PrimitivePrinter = {
       out.print(s)
       this
     }
-
-    def print(o: AnyRef): PrimitivePrinter = print(o.toString())
-
-    def printPrimitive(prim: Primitive) = prim match {
-      case Negation(kind) =>
-        print("!")
-
-      case Test(op, kind, zero) =>
-        print(op).print(kind)
-
-      case Comparison(op, kind) =>
-        print(op).print("(").print(kind)
-
-    }
   }
 
   /** This class represents a comparison operation. */
@@ -243,9 +228,9 @@ trait Primitives { self: ICodes =>
 
     /** Returns a string representation of this operation. */
     override def toString(): String = this match {
-      case AND => return "AND"
-      case OR  => return "OR"
-      case XOR => return "XOR"
+      case AND => "AND"
+      case OR  => "OR"
+      case XOR => "XOR"
       case _  => throw new RuntimeException("LogicalOp unknown case")
     }
   }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 6cac641..1fe33f7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -8,13 +8,9 @@ package backend
 package icode
 
 import java.io.PrintWriter
-import scala.tools.nsc.symtab.Flags
-import scala.reflect.internal.util.Position
 
 trait Printers { self: ICodes =>
   import global._
-  import global.icodes.opcodes._
-  import global.icodes._
 
   class TextPrinter(writer: PrintWriter, lin: Linearizer) {
     private var margin = 0
@@ -31,15 +27,15 @@ trait Printers { self: ICodes =>
     def print(o: Any) { print(o.toString()) }
 
     def println(s: String) {
-      print(s);
-      println
+      print(s)
+      println()
     }
 
     def println() {
       out.println()
       var i = 0
       while (i < margin) {
-        print(" ");
+        print(" ")
         i += 1
       }
     }
@@ -57,26 +53,26 @@ trait Printers { self: ICodes =>
     }
 
     def printClass(cls: IClass) {
-      print(cls.symbol.toString()); print(" extends ");
-      printList(cls.symbol.info.parents, ", ");
-      indent; println(" {");
-      println("// fields:");
-      cls.fields.foreach(printField); println;
-      println("// methods");
-      cls.methods.foreach(printMethod);
-      undent; println;
+      print(cls.symbol.toString()); print(" extends ")
+      printList(cls.symbol.info.parents, ", ")
+      indent(); println(" {")
+      println("// fields:")
+      cls.fields.foreach(printField); println()
+      println("// methods")
+      cls.methods.foreach(printMethod)
+      undent(); println()
       println("}")
     }
 
     def printField(f: IField) {
-      print(f.symbol.keyString); print(" ");
-      print(f.symbol.nameString); print(": ");
-      println(f.symbol.info.toString());
+      print(f.symbol.keyString); print(" ")
+      print(f.symbol.nameString); print(": ")
+      println(f.symbol.info.toString())
     }
 
     def printMethod(m: IMethod) {
-      print("def "); print(m.symbol.name);
-      print("("); printList(printParam)(m.params, ", "); print(")");
+      print("def "); print(m.symbol.name)
+      print("("); printList(printParam)(m.params, ", "); print(")")
       print(": "); print(m.symbol.info.resultType)
 
       if (!m.isAbstractMethod) {
@@ -84,40 +80,40 @@ trait Printers { self: ICodes =>
         println("locals: " + m.locals.mkString("", ", ", ""))
         println("startBlock: " + m.startBlock)
         println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
-        println
+        println()
         lin.linearize(m) foreach printBlock
         println("}")
 
-        indent; println("Exception handlers: ")
+        indent(); println("Exception handlers: ")
         m.exh foreach printExceptionHandler
 
-        undent; println
+        undent(); println()
       } else
-        println
+        println()
     }
 
     def printParam(p: Local) {
-      print(p.sym.name); print(": "); print(p.sym.info);
+      print(p.sym.name); print(": "); print(p.sym.info)
       print(" ("); print(p.kind); print(")")
     }
 
     def printExceptionHandler(e: ExceptionHandler) {
-      indent;
-      println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
-      println("consisting of blocks: " + e.blocks);
-      undent;
-      println("with finalizer: " + e.finalizer);
-//      linearizer.linearize(e.startBlock) foreach printBlock;
+      indent()
+      println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
+      println("consisting of blocks: " + e.blocks)
+      undent()
+      println("with finalizer: " + e.finalizer)
+      //      linearizer.linearize(e.startBlock) foreach printBlock;
     }
 
     def printBlock(bb: BasicBlock) {
       print(bb.label)
       if (bb.loopHeader) print("[loop header]")
-      print(": ");
-      if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
-      indent; println
+      print(": ")
+      if (settings.debug) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
+      indent(); println()
       bb.toList foreach printInstruction
-      undent; println
+      undent(); println()
     }
 
     def printInstruction(i: Instruction) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index e73015c..10d57df 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -26,17 +26,6 @@ trait Repository {
   /** The icode of the given class, if available */
   def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym)
 
-  /** The icode of the given class. If not available, it loads
-   *  its bytecode.
-   */
-  def icode(sym: Symbol, force: Boolean): IClass =
-    icode(sym) getOrElse {
-      log("loading " + sym)
-      load(sym)
-      assert(available(sym))
-      loaded(sym)
-    }
-
   /** Load bytecode for given symbol. */
   def load(sym: Symbol): Boolean = {
     try {
@@ -50,7 +39,7 @@ trait Repository {
     } catch {
       case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
         log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
-        if (settings.debug.value) { e.printStackTrace }
+        if (settings.debug) { e.printStackTrace }
 
         false
     }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 4f8fda8..a6d0d3b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -55,7 +55,7 @@ trait TypeKinds { self: ICodes =>
 
     def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse {
       this match {
-        case REFERENCE(cls) => cls.tpe
+        case REFERENCE(cls) => cls.tpe_*
         case ARRAY(elem)    => arrayType(elem.toType)
         case _              => abort("Unknown type kind.")
       }
@@ -66,7 +66,6 @@ trait TypeKinds { self: ICodes =>
     def isValueType               = false
     def isBoxedType               = false
     final def isRefOrArrayType    = isReferenceType || isArrayType
-    final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType
     final def isNothingType       = this == NothingReference
     final def isNullType          = this == NullReference
     final def isInterfaceType     = this match {
@@ -89,10 +88,19 @@ trait TypeKinds { self: ICodes =>
     final def isNumericType: Boolean = isIntegralType | isRealType
 
     /** Simple subtyping check */
-    def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
-      case BOOL | BYTE | SHORT | CHAR => other == INT || other == LONG
-      case _                          => this eq other
-    })
+    def <:<(other: TypeKind): Boolean
+
+    /**
+     * this is directly assignable to other if no coercion or
+     * casting is needed to convert this to other. It's a distinct
+     * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
+     * SHORT need no coercion to INT even though JVM arrays
+     * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
+     */
+    final def isAssignabledTo(other: TypeKind): Boolean = other match {
+      case INT  => this.isIntSizedType
+      case _    => this <:< other
+    }
 
     /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
     def isWideType: Boolean = false
@@ -112,10 +120,9 @@ trait TypeKinds { self: ICodes =>
     override def toString = {
       this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1
     }
+    def <:<(other: TypeKind): Boolean = this eq other
   }
 
-  var lubs0 = 0
-
   /**
    * The least upper bound of two typekinds. They have to be either
    * REFERENCE or ARRAY kinds.
@@ -123,24 +130,23 @@ trait TypeKinds { self: ICodes =>
    * The lub is based on the lub of scala types.
    */
   def lub(a: TypeKind, b: TypeKind): TypeKind = {
-    /** The compiler's lub calculation does not order classes before traits.
-     *  This is apparently not wrong but it is inconvenient, and causes the
-     *  icode checker to choke when things don't match up.  My attempts to
-     *  alter the calculation at the compiler level were failures, so in the
-     *  interests of a working icode checker I'm making the adjustment here.
+    /* The compiler's lub calculation does not order classes before traits.
+     * This is apparently not wrong but it is inconvenient, and causes the
+     * icode checker to choke when things don't match up.  My attempts to
+     * alter the calculation at the compiler level were failures, so in the
+     * interests of a working icode checker I'm making the adjustment here.
      *
-     *  Example where we'd like a different answer:
+     * Example where we'd like a different answer:
      *
-     *    abstract class Tom
-     *    case object Bob extends Tom
-     *    case object Harry extends Tom
-     *    List(Bob, Harry)  // compiler calculates "Product with Tom" rather than "Tom with Product"
+     *   abstract class Tom
+     *   case object Bob extends Tom
+     *   case object Harry extends Tom
+     *   List(Bob, Harry)  // compiler calculates "Product with Tom" rather than "Tom with Product"
      *
-     *  Here we make the adjustment by rewinding to a pre-erasure state and
-     *  sifting through the parents for a class type.
+     * Here we make the adjustment by rewinding to a pre-erasure state and
+     * sifting through the parents for a class type.
      */
-    def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
-      import definitions._
+    def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry {
       val tp = global.lub(List(tk1.toType, tk2.toType))
       val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
 
@@ -284,7 +290,7 @@ trait TypeKinds { self: ICodes =>
     }
 
     /** Checks subtyping relationship. */
-    override def <:<(other: TypeKind) = isNothingType || (other match {
+    def <:<(other: TypeKind) = isNothingType || (other match {
       case REFERENCE(cls2)  => cls.tpe <:< cls2.tpe
       case ARRAY(_)         => cls == NullClass
       case _                => false
@@ -298,7 +304,7 @@ trait TypeKinds { self: ICodes =>
     else ARRAY(ArrayN(elem, dims - 1))
   }
 
-  final case class ARRAY(val elem: TypeKind) extends TypeKind {
+  final case class ARRAY(elem: TypeKind) extends TypeKind {
     override def toString    = "ARRAY[" + elem + "]"
     override def isArrayType = true
     override def dimensions  = 1 + elem.dimensions
@@ -322,7 +328,7 @@ trait TypeKinds { self: ICodes =>
 
     /** Array subtyping is covariant, as in Java. Necessary for checking
      *  code that interacts with Java. */
-    override def <:<(other: TypeKind) = other match {
+    def <:<(other: TypeKind) = other match {
       case ARRAY(elem2)                         => elem <:< elem2
       case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
       case _                                    => false
@@ -340,7 +346,7 @@ trait TypeKinds { self: ICodes =>
     }
 
     /** Checks subtyping relationship. */
-    override def <:<(other: TypeKind) = other match {
+    def <:<(other: TypeKind) = other match {
       case BOXED(`kind`)                        => true
       case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
       case _                                    => false
@@ -353,6 +359,7 @@ trait TypeKinds { self: ICodes =>
   */
   case object ConcatClass extends TypeKind {
     override def toString = "ConcatClass"
+    def <:<(other: TypeKind): Boolean = this eq other
 
     /**
      * Approximate `lub`. The common type of two references is
@@ -363,19 +370,16 @@ trait TypeKinds { self: ICodes =>
       case REFERENCE(_) => AnyRefReference
       case _            => uncomparable(other)
     }
-
-    /** Checks subtyping relationship. */
-    override def <:<(other: TypeKind) = this eq other
   }
 
   ////////////////// Conversions //////////////////////////////
 
   /** Return the TypeKind of the given type
    *
-   *  Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+   *  Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
    *  arrayOrClassType below would return ObjectReference.
    */
-  def toTypeKind(t: Type): TypeKind = t.normalize match {
+  def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
     case ThisType(ArrayClass)            => ObjectReference
     case ThisType(sym)                   => REFERENCE(sym)
     case SingleType(_, sym)              => primitiveOrRefType(sym)
@@ -389,7 +393,7 @@ trait TypeKinds { self: ICodes =>
     // if the first two cases exist because they do or as a defensive measure, but
     // at the time I added it, RefinedTypes were indeed reaching here.
     case ExistentialType(_, t)           => toTypeKind(t)
-    case AnnotatedType(_, t, _)          => toTypeKind(t)
+    case AnnotatedType(_, t)             => toTypeKind(t)
     case RefinedType(parents, _)         => parents map toTypeKind reduceLeft lub
     // For sure WildcardTypes shouldn't reach here either, but when
     // debugging such situations this may come in handy.
@@ -431,11 +435,4 @@ trait TypeKinds { self: ICodes =>
     primitiveTypeMap.getOrElse(sym, newReference(sym))
   private def primitiveOrClassType(sym: Symbol, targs: List[Type]) =
     primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs))
-
-  def msil_mgdptr(tk: TypeKind): TypeKind = (tk: @unchecked) match {
-    case REFERENCE(cls)  => REFERENCE(loaders.clrTypes.mdgptrcls4clssym(cls))
-    // TODO have ready class-symbols for the by-ref versions of built-in valuetypes
-    case _ => abort("cannot obtain a managed pointer for " + tk)
-  }
-
 }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 23d3d05..57d51da 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -15,15 +15,11 @@ package icode
 trait TypeStacks {
   self: ICodes =>
 
-  import opcodes._
-
   /* This class simulates the type of the operand
    * stack of the ICode.
    */
   type Rep = List[TypeKind]
 
-  object NoTypeStack extends TypeStack(Nil) { }
-
   class TypeStack(var types: Rep) {
     if (types.nonEmpty)
       checkerDebug("Created " + this)
@@ -71,14 +67,6 @@ trait TypeStacks {
 
     def apply(n: Int): TypeKind = types(n)
 
-    /**
-     * A TypeStack agrees with another one if they have the same
-     * length and each type kind agrees position-wise. Two
-     * types agree if one is a subtype of the other.
-     */
-    def agreesWith(other: TypeStack): Boolean =
-      (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
-
     /* This method returns a String representation of the stack */
     override def toString() =
       if (types.isEmpty) "[]"
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 53111d0..9d48d7a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend.icode.analysis
 
 import scala.collection.{ mutable, immutable }
@@ -26,12 +27,8 @@ abstract class CopyPropagation {
   case object This extends Location
 
   /** Values that can be on the stack. */
-  abstract class Value {
-    def isRecord = false
-  }
-  case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value {
-    override def isRecord = true
-  }
+  sealed abstract class Value { }
+  case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
   /** The value of some location in memory. */
   case class Deref(l: Location) extends Value
 
@@ -91,16 +88,6 @@ abstract class CopyPropagation {
         loop(l) getOrElse Deref(LocalVar(l))
       }
 
-      /* Return the binding for the given field of the given record */
-      def getBinding(r: Record, f: Symbol): Value = {
-        assert(r.bindings contains f, "Record " + r + " does not contain a field " + f)
-
-        r.bindings(f) match {
-          case Deref(LocalVar(l)) => getBinding(l)
-          case target             => target
-        }
-      }
-
       /** Return a local which contains the same value as this field, if any.
        * If the field holds a reference to a local, the returned value is the
        * binding of that local.
@@ -137,7 +124,7 @@ abstract class CopyPropagation {
       }
 
       override def toString(): String =
-        "\nBindings: " + bindings + "\nStack: " + stack;
+        "\nBindings: " + bindings + "\nStack: " + stack
 
       def dup: State = {
         val b: Bindings = mutable.HashMap()
@@ -178,7 +165,7 @@ abstract class CopyPropagation {
         val resBindings = mutable.HashMap[Location, Value]()
 
         for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
-          resBindings += (k -> v);
+          resBindings += (k -> v)
         new State(resBindings, resStack)
       }
     }
@@ -203,20 +190,20 @@ abstract class CopyPropagation {
           debuglog("CopyAnalysis added point: " + b)
         }
         m.exh foreach { e =>
-          in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
+          in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
         }
 
         // first block is special: it's not bottom, but a precisely defined state with no bindings
-        in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+        in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
       }
     }
 
     override def run() {
       forwardAnalysis(blockTransfer)
-      if (settings.debug.value) {
+      if (settings.debug) {
         linearizer.linearize(method).foreach(b => if (b != method.startBlock)
           assert(in(b) != lattice.bottom,
-            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
       }
     }
 
@@ -241,7 +228,7 @@ abstract class CopyPropagation {
 
         case CONSTANT(k) =>
           if (k.tag != UnitTag)
-            out.stack = Const(k) :: out.stack;
+            out.stack = Const(k) :: out.stack
 
         case LOAD_ARRAY_ITEM(_) =>
           out.stack = (Unknown :: out.stack.drop(2))
@@ -290,14 +277,14 @@ abstract class CopyPropagation {
               v match {
                 case Deref(LocalVar(other)) =>
                   if (other != local)
-                    out.bindings += (LocalVar(local) -> v);
+                    out.bindings += (LocalVar(local) -> v)
                 case _ =>
                   out.bindings += (LocalVar(local) -> v)
               }
             case Nil =>
               sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
           }
-          out.stack = out.stack drop 1;
+          out.stack = out.stack drop 1
 
         case STORE_THIS(_) =>
           cleanReferencesTo(out, This)
@@ -305,14 +292,14 @@ abstract class CopyPropagation {
 
         case STORE_FIELD(field, isStatic) =>
           if (isStatic)
-            out.stack = out.stack.drop(1);
+            out.stack = out.stack.drop(1)
           else {
-            out.stack = out.stack.drop(2);
-            cleanReferencesTo(out, Field(AllRecords, field));
+            out.stack = out.stack.drop(2)
+            cleanReferencesTo(out, Field(AllRecords, field))
             in.stack match {
               case v :: Record(_, bindings) :: vs =>
                 bindings += (field -> v)
-              case _ => ();
+              case _ => ()
             }
           }
 
@@ -322,7 +309,7 @@ abstract class CopyPropagation {
 
         case CALL_METHOD(method, style) => style match {
           case Dynamic =>
-            out = simulateCall(in, method, false)
+            out = simulateCall(in, method, static = false)
 
           case Static(onInstance) =>
             if (onInstance) {
@@ -333,19 +320,19 @@ abstract class CopyPropagation {
                   case Record(_, bindings) =>
                     for (v <- out.stack.take(method.info.paramTypes.length + 1)
                          if v ne obj) {
-                       bindings ++= getBindingsForPrimaryCtor(in, method);
+                       bindings ++= getBindingsForPrimaryCtor(in, method)
                     }
                   case _ => ()
                 }
                 // put the Record back on the stack and remove the 'returned' value
                 out.stack = out.stack.drop(1 + method.info.paramTypes.length)
               } else
-                out = simulateCall(in, method, false)
+                out = simulateCall(in, method, static = false)
             } else
-              out = simulateCall(in, method, true)
+              out = simulateCall(in, method, static = true)
 
           case SuperCall(_) =>
-            out = simulateCall(in, method, false)
+            out = simulateCall(in, method, static = false)
         }
 
         case BOX(tpe) =>
@@ -404,7 +391,7 @@ abstract class CopyPropagation {
           out.stack = out.stack.head :: out.stack
 
         case MONITOR_ENTER() =>
-          out.stack = out.stack.drop(1);
+          out.stack = out.stack.drop(1)
 
         case MONITOR_EXIT() =>
           out.stack = out.stack.drop(1)
@@ -452,7 +439,7 @@ abstract class CopyPropagation {
           case Deref(loc1) if (loc1 == target) => false
           case Boxed(loc1) if (loc1 == target) => false
           case rec @ Record(_, _) =>
-            cleanRecord(rec);
+            cleanRecord(rec)
             true
           case _ => true
         }) &&
@@ -463,22 +450,17 @@ abstract class CopyPropagation {
       }
     }
 
-    /** Update the state <code>s</code> after the call to <code>method</code>.
+    /** Update the state `s` after the call to `method`.
      *  The stack elements are dropped and replaced by the result of the call.
      *  If the method is impure, all bindings to record fields are cleared.
-     *
-     *  @param state  ...
-     *  @param method ...
-     *  @param static ...
-     *  @return       ...
      */
     final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
-      val out = new copyLattice.State(state.bindings, state.stack);
-      out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1));
-      if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
-        out.stack = Unknown :: out.stack;
+      val out = new copyLattice.State(state.bindings, state.stack)
+      out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
+      if (method.info.resultType != definitions.UnitTpe && !method.isConstructor)
+        out.stack = Unknown :: out.stack
       if (!isPureMethod(method))
-        invalidateRecords(out);
+        invalidateRecords(out)
       out
     }
 
@@ -519,8 +501,8 @@ abstract class CopyPropagation {
      *  they are passed on the stack. It works for primary constructors.
      */
     private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
-      val paramAccessors = ctor.owner.constrParamAccessors;
-      var values         = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+      val paramAccessors = ctor.owner.constrParamAccessors
+      var values         = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
       val bindings       = mutable.HashMap[Symbol, Value]()
 
       debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -546,24 +528,22 @@ abstract class CopyPropagation {
 //               + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
 //               + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
         if (p.tpe == paramTypes(i))
-          bindings += (p -> values.head);
-        values = values.tail;
+          bindings += (p -> values.head)
+        values = values.tail
       }
 
       debuglog("\t" + bindings)
       bindings
     }
 
-    /** Is symbol <code>m</code> a pure method?
-     *
-     *  @param m ...
-     *  @return  ...
+    /** Is symbol `m` a pure method?
      */
     final def isPureMethod(m: Symbol): Boolean =
       m.isGetter // abstract getters are still pure, as we 'know'
 
     final override def toString() = (
-      method.blocks map { b =>
+      if (method eq null) List("<null>")
+      else method.blocks map { b =>
         "\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
         "\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
       }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 04c3eed..a378998 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -4,7 +4,8 @@
  */
 
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend.icode.analysis
 
 import scala.collection.{ mutable, immutable }
@@ -30,16 +31,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
   /* Implement this function to initialize the worklist.  */
   def init(f: => Unit): Unit = {
     iterations = 0
-    in.clear; out.clear; worklist.clear; visited.clear;
-    f
-  }
-
-  /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the
-   *  same method, after some code transformation.
-   */
-  def reinit(f: => Unit): Unit = {
-    iterations = 0
-    worklist.clear; visited.clear;
+    in.clear(); out.clear(); worklist.clear(); visited.clear()
     f
   }
 
@@ -55,7 +47,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
     while (!worklist.isEmpty) {
       if (stat) iterations += 1
       //Console.println("worklist in: " + worklist);
-      val point = worklist.iterator.next; worklist -= point; visited += point;
+      val point = worklist.iterator.next(); worklist -= point; visited += point
       //Console.println("taking out point: " + point + " worklist out: " + worklist);
       val output = f(point, in(point))
 
@@ -82,17 +74,13 @@ trait DataFlowAnalysis[L <: SemiLattice] {
       sys.error("Could not find element " + e.getMessage)
   }
 
-  /** ...
-   *
-   *  @param f ...
-   */
   def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit =
     while (worklist.nonEmpty) {
       if (stat) iterations += 1
       val point = worklist.head
       worklist -= point
 
-      out(point) = lattice.lub(point.successors map in.apply, false) // TODO check for exception handlers
+      out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers
       val input = f(point, out(point))
 
       if ((lattice.bottom == in(point)) || input != in(point)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index abda639..939641c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -69,15 +69,15 @@ abstract class Liveness {
         case STORE_LOCAL(local) if (!genSet(local))  => killSet = killSet + local
         case _ => ()
       }
-      Pair(genSet, killSet)
+      (genSet, killSet)
     }
 
     override def run() {
       backwardAnalysis(blockTransfer)
-      if (settings.debug.value) {
+      if (settings.debug) {
         linearizer.linearize(method).foreach(b => if (b != method.startBlock)
           assert(lattice.bottom != in(b),
-            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
       }
     }
 
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 2717c43..fecd48e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -51,8 +51,8 @@ abstract class ReachingDefinitions {
           // it'd be nice not to call zip with mismatched sequences because
           // it makes it harder to spot the real problems.
           val result = (a.stack, b.stack).zipped map (_ ++ _)
-          if (settings.debug.value && (a.stack.length != b.stack.length))
-            debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+          if (settings.debug && (a.stack.length != b.stack.length))
+            devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result")
           result
         }
       )
@@ -141,13 +141,13 @@ abstract class ReachingDefinitions {
 
     override def run() {
       forwardAnalysis(blockTransfer)
-      if (settings.debug.value) {
+      if (settings.debug) {
         linearizer.linearize(method).foreach(b => if (b != method.startBlock)
           assert(lattice.bottom != in(b),
             "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
                  + ": bot: " + lattice.bottom
                  + "\nin(b) == bottom: " + (in(b) == lattice.bottom)
-                 + "\nbottom == in(b): " + (lattice.bottom == in(b))));
+                 + "\nbottom == in(b): " + (lattice.bottom == in(b))))
       }
     }
 
@@ -155,7 +155,7 @@ abstract class ReachingDefinitions {
     import lattice.IState
     def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
       val STORE_LOCAL(local) = b(idx)
-      var tmp = local
+      val tmp = local
       (rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
     }
 
@@ -197,7 +197,7 @@ abstract class ReachingDefinitions {
     def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
       assert(bb.closed, bb)
 
-      var instrs = bb.getArray
+      val instrs = bb.getArray
       var res: List[(BasicBlock, Int)] = Nil
       var i = idx
       var n = m
@@ -240,7 +240,8 @@ abstract class ReachingDefinitions {
       findDefs(bb, idx, m, 0)
 
     override def toString: String = {
-      method.code.blocks map { b =>
+      if (method eq null) "<null>"
+      else method.code.blocks map { b =>
         "  entry(%s) = %s\n".format(b, in(b)) +
         "   exit(%s) = %s\n".format(b, out(b))
       } mkString ("ReachingDefinitions {\n", "\n", "\n}")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index b2ecb43..676ee12 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -3,10 +3,12 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend.icode.analysis
 
 import scala.collection.{mutable, immutable}
+import java.util.concurrent.TimeUnit
 
 /** A data-flow analysis on types, that works on `ICode`.
  *
@@ -68,7 +70,6 @@ abstract class TypeFlowAnalysis {
    *  names to types and a type stack.
    */
   object typeFlowLattice extends SemiLattice {
-    import icodes._
     type Elem = IState[VarBinding, icodes.TypeStack]
 
     val top    = new Elem(new VarBinding, typeStackLattice.top)
@@ -132,15 +133,15 @@ abstract class TypeFlowAnalysis {
       init(m)
     }
 
-    def run = {
-      timer.start
+    def run() = {
+      timer.start()
       // icodes.lubs0 = 0
       forwardAnalysis(blockTransfer)
-      val t = timer.stop
-      if (settings.debug.value) {
+      timer.stop
+      if (settings.debug) {
         linearizer.linearize(method).foreach(b => if (b != method.startBlock)
           assert(visited.contains(b),
-            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
+            "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
       }
       // log("" + method.symbol.fullName + " ["  + method.code.blocks.size + " blocks] "
       //     + "\n\t" + iterations + " iterations: " + t + " ms."
@@ -168,7 +169,7 @@ abstract class TypeFlowAnalysis {
       val bindings = out.vars
       val stack = out.stack
 
-      if (settings.debug.value) {
+      if (settings.debug) {
         // Console.println("[before] Stack: " + stack);
         // Console.println(i);
       }
@@ -208,7 +209,7 @@ abstract class TypeFlowAnalysis {
             case Test(_, kind, zero) =>
               stack.pop
               if (!zero) { stack.pop }
-              stack push BOOL;
+              stack push BOOL
 
             case Comparison(_, _) => stack.pop2; stack push INT
 
@@ -269,36 +270,6 @@ abstract class TypeFlowAnalysis {
       out
     } // interpret
 
-
-    class SimulatedStack {
-      private var types: List[InferredType] = Nil
-      private var depth = 0
-
-      /** Remove and return the topmost element on the stack. If the
-       *  stack is empty, return a reference to a negative index on the
-       *  stack, meaning it refers to elements pushed by a predecessor block.
-       */
-      def pop: InferredType = types match {
-        case head :: rest =>
-          types = rest
-          head
-        case _ =>
-          depth -= 1
-          TypeOfStackPos(depth)
-      }
-
-      def pop2: (InferredType, InferredType) = {
-        (pop, pop)
-      }
-
-      def push(t: InferredType) {
-        depth += 1
-        types = types ::: List(t)
-      }
-
-      def push(k: TypeKind) { push(Const(k)) }
-    }
-
 	abstract class InferredType {
       /** Return the type kind pointed by this inferred type. */
       def getKind(in: lattice.Elem): icodes.TypeKind = this match {
@@ -326,7 +297,6 @@ abstract class TypeFlowAnalysis {
 	class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) {
 	  def apply(in: lattice.Elem): lattice.Elem = {
         val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
-        val bindings = out.vars
         val stack = out.stack
 
         out.stack.pop(consumed)
@@ -387,9 +357,9 @@ abstract class TypeFlowAnalysis {
 
     override def run {
 
-      timer.start
+      timer.start()
       forwardAnalysis(blockTransfer)
-      val t = timer.stop
+      timer.stop
 
       /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
          whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
@@ -399,7 +369,7 @@ abstract class TypeFlowAnalysis {
         preCandidates += rc._2.bb
       }
 
-      if (settings.debug.value) {
+      if (settings.debug) {
         for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
           assert(visited.contains(b),
                  "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
@@ -428,7 +398,7 @@ abstract class TypeFlowAnalysis {
     override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
       var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
 
-      val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+      val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
       var isPastLast = false
 
       var instrs = b.toList
@@ -445,11 +415,11 @@ abstract class TypeFlowAnalysis {
           }
           val concreteMethod = inliner.lookupImplFor(msym, receiver)
           val isCandidate = {
-            ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isEffectivelyFinal ) &&
+            ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinalOrNotOverridden || receiver.isEffectivelyFinalOrNotOverridden ) &&
             !blackballed(concreteMethod)
           }
           if(isCandidate) {
-            remainingCALLs += Pair(cm, CallsiteInfo(b, receiver, result.stack.length, concreteMethod))
+            remainingCALLs(cm) = CallsiteInfo(b, receiver, result.stack.length, concreteMethod)
           } else {
             remainingCALLs.remove(cm)
             isOnWatchlist.remove(cm)
@@ -531,7 +501,7 @@ abstract class TypeFlowAnalysis {
     }
 
     private def isReceiverKnown(cm: opcodes.CALL_METHOD): Boolean = {
-      cm.method.isEffectivelyFinal && cm.method.owner.isEffectivelyFinal
+      cm.method.isEffectivelyFinalOrNotOverridden && cm.method.owner.isEffectivelyFinalOrNotOverridden
     }
 
     private def putOnRadar(blocks: Traversable[BasicBlock]) {
@@ -546,9 +516,6 @@ abstract class TypeFlowAnalysis {
       relevantBBs ++= blocks
     }
 
-    /* the argument is also included in the result */
-    private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
-
     /* those BBs in the argument are also included in the result */
     private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
       val result = mutable.Set.empty[BasicBlock]
@@ -562,19 +529,6 @@ abstract class TypeFlowAnalysis {
       result.toSet
     }
 
-    /* those BBs in the argument are also included in the result */
-    private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
-      val result = mutable.Set.empty[BasicBlock]
-      var toVisit: List[BasicBlock] = starters.toList.distinct
-      while(toVisit.nonEmpty) {
-        val h   = toVisit.head
-        toVisit = toVisit.tail
-        result += h
-        for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
-      }
-      result.toSet
-    }
-
     /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
      * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
      * In particular we can do without computing the outflow at B. */
@@ -646,10 +600,10 @@ abstract class TypeFlowAnalysis {
         return
       } else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
         // this promotes invoking reinit if in doubt, no performance degradation will ensue!
-        return;
+        return
       }
 
-      worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+      worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
 
       // asserts conveying an idea what CFG shapes arrive here:
       //   staleIn foreach (p => assert( !in.isDefinedAt(p), p))
@@ -685,12 +639,6 @@ abstract class TypeFlowAnalysis {
       if(!worklist.contains(b)) { worklist += b }
     }
 
-    /* this is not a general purpose method to add to the worklist,
-     * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
-    private def enqueue(bs: Traversable[BasicBlock]) {
-      bs foreach enqueue
-    }
-
     private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
       blocks foreach { b =>
         in(b)  = typeFlowLattice.bottom
@@ -719,14 +667,14 @@ abstract class TypeFlowAnalysis {
     override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
       while (!worklist.isEmpty && relevantBBs.nonEmpty) {
         if (stat) iterations += 1
-        val point = worklist.iterator.next; worklist -= point;
+        val point = worklist.iterator.next(); worklist -= point
         if(relevantBBs(point)) {
           shrinkedWatchlist = false
           val output = f(point, in(point))
-          visited += point;
+          visited += point
           if(isOnPerimeter(point)) {
             if(shrinkedWatchlist && !isWatching(point)) {
-              relevantBBs -= point;
+              relevantBBs -= point
               populatePerimeter()
             }
           } else {
@@ -761,19 +709,15 @@ abstract class TypeFlowAnalysis {
 
     private var lastStart = 0L
 
-    def reset() {
-      millis = 0L
-    }
-
     def start() {
-      lastStart = System.currentTimeMillis
+      lastStart = System.nanoTime()
     }
 
     /** Stop the timer and return the number of milliseconds since the last
      * call to start. The 'millis' field is increased by the elapsed time.
      */
     def stop: Long = {
-      val elapsed = System.currentTimeMillis - lastStart
+      val elapsed = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - lastStart)
       millis += elapsed
       elapsed
     }
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
new file mode 100644
index 0000000..53142fb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -0,0 +1,1234 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ *  @version 1.0
+ *
+ */
+abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
+  import global._
+  import definitions._
+
+  /*
+   * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
+   */
+  abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
+
+    import icodes.TestOp
+    import icodes.opcodes.InvokeStyle
+
+    /*  If the selector type has a member with the right name,
+     *  it is the host class; otherwise the symbol's owner.
+     */
+    def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
+      case NoSymbol   => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
+      case _          => selector.typeSymbol
+    }
+
+    /* ---------------- helper utils for generating methods and code ---------------- */
+
+    def emit(opc: Int) { mnode.visitInsn(opc) }
+
+    def emitZeroOf(tk: BType) {
+      (tk.sort: @switch) match {
+        case asm.Type.BOOLEAN => bc.boolconst(false)
+        case asm.Type.BYTE  |
+             asm.Type.SHORT |
+             asm.Type.CHAR  |
+             asm.Type.INT     => bc.iconst(0)
+        case asm.Type.LONG    => bc.lconst(0)
+        case asm.Type.FLOAT   => bc.fconst(0)
+        case asm.Type.DOUBLE  => bc.dconst(0)
+        case asm.Type.VOID    => ()
+        case _ => emit(asm.Opcodes.ACONST_NULL)
+      }
+    }
+
+    /*
+     * Emits code that adds nothing to the operand stack.
+     * Two main cases: `tree` is an assignment,
+     * otherwise an `adapt()` to UNIT is performed if needed.
+     */
+    def genStat(tree: Tree) {
+      lineNumber(tree)
+      tree match {
+        case Assign(lhs @ Select(_, _), rhs) =>
+          val isStatic = lhs.symbol.isStaticMember
+          if (!isStatic) { genLoadQualifier(lhs) }
+          genLoad(rhs, symInfoTK(lhs.symbol))
+          lineNumber(tree)
+          fieldStore(lhs.symbol)
+
+        case Assign(lhs, rhs) =>
+          val s = lhs.symbol
+          val Local(tk, _, idx, _) = locals.getOrMakeLocal(s)
+          genLoad(rhs, tk)
+          lineNumber(tree)
+          bc.store(idx, tk)
+
+        case _ =>
+          genLoad(tree, UNIT)
+      }
+    }
+
+    def genThrow(expr: Tree): BType = {
+      val thrownKind = tpeTK(expr)
+      // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable.
+      // Similarly for scala.Nothing (again, as defined in src/libray-aux).
+      assert(thrownKind.isNullType || thrownKind.isNothingType || exemplars.get(thrownKind).isSubtypeOf(ThrowableReference))
+      genLoad(expr, thrownKind)
+      lineNumber(expr)
+      emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+
+      RT_NOTHING // always returns the same, the invoker should know :)
+    }
+
+    /* Generate code for primitive arithmetic operations. */
+    def genArithmeticOp(tree: Tree, code: Int): BType = {
+      val Apply(fun @ Select(larg, _), args) = tree
+      var resKind = tpeTK(larg)
+
+      assert(resKind.isNumericType || (resKind == BOOL),
+             s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]")
+
+      import scalaPrimitives._
+
+      args match {
+        // unary operation
+        case Nil =>
+          genLoad(larg, resKind)
+          code match {
+            case POS => () // nothing
+            case NEG => bc.neg(resKind)
+            case NOT => bc.genPrimitiveArithmetic(icodes.NOT, resKind)
+            case _ => abort(s"Unknown unary operation: ${fun.symbol.fullName} code: $code")
+          }
+
+        // binary operation
+        case rarg :: Nil =>
+          resKind = maxType(tpeTK(larg), tpeTK(rarg))
+          if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) {
+            assert(resKind.isIntegralType || (resKind == BOOL),
+                   s"$resKind incompatible with arithmetic modulo operation.")
+          }
+
+          genLoad(larg, resKind)
+          genLoad(rarg, // check .NET size of shift arguments!
+                  if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+
+          (code: @switch) match {
+            case ADD => bc add resKind
+            case SUB => bc sub resKind
+            case MUL => bc mul resKind
+            case DIV => bc div resKind
+            case MOD => bc rem resKind
+
+            case OR  | XOR | AND => bc.genPrimitiveLogical(code, resKind)
+
+            case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind)
+
+            case _                   => abort(s"Unknown primitive: ${fun.symbol}[$code]")
+          }
+
+        case _ =>
+          abort(s"Too many arguments for primitive function: $tree")
+      }
+      lineNumber(tree)
+      resKind
+    }
+
+    /* Generate primitive array operations. */
+    def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = {
+      val Apply(Select(arrayObj, _), args) = tree
+      val k = tpeTK(arrayObj)
+      genLoad(arrayObj, k)
+      val elementType = typeOfArrayOp.getOrElse(code, abort(s"Unknown operation on arrays: $tree code: $code"))
+
+      var generatedType = expectedType
+
+      if (scalaPrimitives.isArrayGet(code)) {
+        // load argument on stack
+        assert(args.length == 1, s"Too many arguments for array get operation: $tree");
+        genLoad(args.head, INT)
+        generatedType = k.getComponentType
+        bc.aload(elementType)
+      }
+      else if (scalaPrimitives.isArraySet(code)) {
+        args match {
+          case a1 :: a2 :: Nil =>
+            genLoad(a1, INT)
+            genLoad(a2)
+            // the following line should really be here, but because of bugs in erasure
+            // we pretend we generate whatever type is expected from us.
+            //generatedType = UNIT
+            bc.astore(elementType)
+          case _ =>
+            abort(s"Too many arguments for array set operation: $tree")
+        }
+      }
+      else {
+        generatedType = INT
+        emit(asm.Opcodes.ARRAYLENGTH)
+      }
+      lineNumber(tree)
+
+      generatedType
+    }
+
+    def genLoadIf(tree: If, expectedType: BType): BType = {
+      val If(condp, thenp, elsep) = tree
+
+      val success = new asm.Label
+      val failure = new asm.Label
+
+      val hasElse = !elsep.isEmpty
+      val postIf  = if (hasElse) new asm.Label else failure
+
+      genCond(condp, success, failure)
+
+      val thenKind      = tpeTK(thenp)
+      val elseKind      = if (!hasElse) UNIT else tpeTK(elsep)
+      def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT)
+      val resKind       = if (hasUnitBranch) UNIT else tpeTK(tree)
+
+      markProgramPoint(success)
+      genLoad(thenp, resKind)
+      if (hasElse) { bc goTo postIf }
+      markProgramPoint(failure)
+      if (hasElse) {
+        genLoad(elsep, resKind)
+        markProgramPoint(postIf)
+      }
+
+      resKind
+    }
+
+    def genPrimitiveOp(tree: Apply, expectedType: BType): BType = {
+      val sym = tree.symbol
+      val Apply(fun @ Select(receiver, _), _) = tree
+      val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
+
+      import scalaPrimitives.{isArithmeticOp, isArrayOp, isLogicalOp, isComparisonOp}
+
+      if (isArithmeticOp(code))                genArithmeticOp(tree, code)
+      else if (code == scalaPrimitives.CONCAT) genStringConcat(tree)
+      else if (code == scalaPrimitives.HASH)   genScalaHash(receiver)
+      else if (isArrayOp(code))                genArrayOp(tree, code, expectedType)
+      else if (isLogicalOp(code) || isComparisonOp(code)) {
+        val success, failure, after = new asm.Label
+        genCond(tree, success, failure)
+        // success block
+          markProgramPoint(success)
+          bc boolconst true
+          bc goTo after
+        // failure block
+          markProgramPoint(failure)
+          bc boolconst false
+        // after
+        markProgramPoint(after)
+
+        BOOL
+      }
+      else if (code == scalaPrimitives.SYNCHRONIZED)
+        genSynchronized(tree, expectedType)
+      else if (scalaPrimitives.isCoercion(code)) {
+        genLoad(receiver)
+        lineNumber(tree)
+        genCoercion(code)
+        coercionTo(code)
+      }
+      else abort(
+        s"Primitive operation not handled yet: ${sym.fullName}(${fun.symbol.simpleName}) at: ${tree.pos}"
+      )
+    }
+
+    def genLoad(tree: Tree) {
+      genLoad(tree, tpeTK(tree))
+    }
+
+    /* Generate code for trees that produce values on the stack */
+    def genLoad(tree: Tree, expectedType: BType) {
+      var generatedType = expectedType
+
+      lineNumber(tree)
+
+      tree match {
+        case lblDf : LabelDef => genLabelDef(lblDf, expectedType)
+
+        case ValDef(_, nme.THIS, _, _) =>
+          debuglog("skipping trivial assign to _$this: " + tree)
+
+        case ValDef(_, _, _, rhs) =>
+          val sym = tree.symbol
+          /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called
+             while duplicating a finalizer that contains this ValDef. */
+          val Local(tk, _, idx, isSynth) = locals.getOrMakeLocal(sym)
+          if (rhs == EmptyTree) { emitZeroOf(tk) }
+          else { genLoad(rhs, tk) }
+          bc.store(idx, tk)
+          if (!isSynth) { // there are case <synthetic> ValDef's emitted by patmat
+            varsInScope ::= (sym -> currProgramPoint())
+          }
+          generatedType = UNIT
+
+        case t : If =>
+          generatedType = genLoadIf(t, expectedType)
+
+        case r : Return =>
+          genReturn(r)
+          generatedType = expectedType
+
+        case t : Try =>
+          generatedType = genLoadTry(t)
+
+        case Throw(expr) =>
+          generatedType = genThrow(expr)
+
+        case New(tpt) =>
+          abort(s"Unexpected New(${tpt.summaryString}/$tpt) reached GenBCode.\n" +
+                "  Call was genLoad" + ((tree, expectedType)))
+
+        case app : Apply =>
+          generatedType = genApply(app, expectedType)
+
+        case ApplyDynamic(qual, args) => sys.error("No invokedynamic support yet.")
+
+        case This(qual) =>
+          val symIsModuleClass = tree.symbol.isModuleClass
+          assert(tree.symbol == claszSymbol || symIsModuleClass,
+                 s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit")
+          if (symIsModuleClass && tree.symbol != claszSymbol) {
+            generatedType = genLoadModule(tree)
+          }
+          else {
+            mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+            generatedType =
+              if (tree.symbol == ArrayClass) ObjectReference
+              else brefType(thisName) // inner class (if any) for claszSymbol already tracked.
+          }
+
+        case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
+          assert(tree.symbol.isModule, s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.pos}")
+          genLoadModule(tree)
+
+        case Select(qualifier, selector) =>
+          val sym = tree.symbol
+          generatedType = symInfoTK(sym)
+          val hostClass = findHostClass(qualifier.tpe, sym)
+          debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+          val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+          def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } }
+
+          if (sym.isModule) {
+            genLoadQualUnlessElidable()
+            genLoadModule(tree)
+          }
+          else if (sym.isStaticMember) {
+            genLoadQualUnlessElidable()
+            fieldLoad(sym, hostClass)
+          }
+          else {
+            genLoadQualifier(tree)
+            fieldLoad(sym, hostClass)
+          }
+
+        case Ident(name) =>
+          val sym = tree.symbol
+          if (!sym.hasPackageFlag) {
+            val tk = symInfoTK(sym)
+            if (sym.isModule) { genLoadModule(tree) }
+            else { locals.load(sym) }
+            generatedType = tk
+          }
+
+        case Literal(value) =>
+          if (value.tag != UnitTag) (value.tag, expectedType) match {
+            case (IntTag,   LONG  ) => bc.lconst(value.longValue);       generatedType = LONG
+            case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue);     generatedType = DOUBLE
+            case (NullTag,  _     ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = RT_NULL
+            case _                  => genConstant(value);               generatedType = tpeTK(tree)
+          }
+
+        case blck : Block => genBlock(blck, expectedType)
+
+        case Typed(Super(_, _), _) => genLoad(This(claszSymbol), expectedType)
+
+        case Typed(expr, _) => genLoad(expr, expectedType)
+
+        case Assign(_, _) =>
+          generatedType = UNIT
+          genStat(tree)
+
+        case av : ArrayValue =>
+          generatedType = genArrayValue(av)
+
+        case mtch : Match =>
+          generatedType = genMatch(mtch)
+
+        case EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) }
+
+        case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.pos}")
+      }
+
+      // emit conversion
+      if (generatedType != expectedType) {
+        adapt(generatedType, expectedType)
+      }
+
+    } // end of GenBCode.genLoad()
+
+    // ---------------- field load and store ----------------
+
+    /*
+     * must-single-thread
+     */
+    def fieldLoad( field: Symbol, hostClass: Symbol = null) {
+      fieldOp(field, isLoad = true,  hostClass)
+    }
+    /*
+     * must-single-thread
+     */
+    def fieldStore(field: Symbol, hostClass: Symbol = null) {
+      fieldOp(field, isLoad = false, hostClass)
+    }
+
+    /*
+     * must-single-thread
+     */
+    private def fieldOp(field: Symbol, isLoad: Boolean, hostClass: Symbol) {
+      // LOAD_FIELD.hostClass , CALL_METHOD.hostClass , and #4283
+      val owner      =
+        if (hostClass == null) internalName(field.owner)
+        else                  internalName(hostClass)
+      val fieldJName = field.javaSimpleName.toString
+      val fieldDescr = symInfoTK(field).getDescriptor
+      val isStatic   = field.isStaticMember
+      val opc =
+        if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
+        else        { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD }
+      mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+    }
+
+    // ---------------- emitting constant values ----------------
+
+    /*
+     * For const.tag in {ClazzTag, EnumTag}
+     *   must-single-thread
+     * Otherwise it's safe to call from multiple threads.
+     */
+    def genConstant(const: Constant) {
+      (const.tag: @switch) match {
+
+        case BooleanTag => bc.boolconst(const.booleanValue)
+
+        case ByteTag    => bc.iconst(const.byteValue)
+        case ShortTag   => bc.iconst(const.shortValue)
+        case CharTag    => bc.iconst(const.charValue)
+        case IntTag     => bc.iconst(const.intValue)
+
+        case LongTag    => bc.lconst(const.longValue)
+        case FloatTag   => bc.fconst(const.floatValue)
+        case DoubleTag  => bc.dconst(const.doubleValue)
+
+        case UnitTag    => ()
+
+        case StringTag  =>
+          assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+          mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
+
+        case NullTag    => emit(asm.Opcodes.ACONST_NULL)
+
+        case ClazzTag   =>
+          val toPush: BType = {
+            val kind = toTypeKind(const.typeValue)
+            if (kind.isValueType) classLiteral(kind)
+            else kind
+          }
+          mnode.visitLdcInsn(toPush.toASMType)
+
+        case EnumTag   =>
+          val sym       = const.symbolValue
+          val ownerName = internalName(sym.owner)
+          val fieldName = sym.javaSimpleName.toString
+          val fieldDesc = toTypeKind(sym.tpe.underlying).getDescriptor
+          mnode.visitFieldInsn(
+            asm.Opcodes.GETSTATIC,
+            ownerName,
+            fieldName,
+            fieldDesc
+          )
+
+        case _ => abort(s"Unknown constant value: $const")
+      }
+    }
+
+    private def genLabelDef(lblDf: LabelDef, expectedType: BType) {
+      // duplication of LabelDefs contained in `finally`-clauses is handled when emitting RETURN. No bookkeeping for that required here.
+      // no need to call index() over lblDf.params, on first access that magic happens (moreover, no LocalVariableTable entries needed for them).
+      markProgramPoint(programPoint(lblDf.symbol))
+      lineNumber(lblDf)
+      genLoad(lblDf.rhs, expectedType)
+    }
+
+    private def genReturn(r: Return) {
+      val Return(expr) = r
+      val returnedKind = tpeTK(expr)
+      genLoad(expr, returnedKind)
+      adapt(returnedKind, returnType)
+      val saveReturnValue = (returnType != UNIT)
+      lineNumber(r)
+
+      cleanups match {
+        case Nil =>
+          // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`.
+          bc emitRETURN returnType
+        case nextCleanup :: rest =>
+          if (saveReturnValue) {
+            if (insideCleanupBlock) {
+              cunit.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
+              bc drop returnType
+            } else {
+              // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+              if (earlyReturnVar == null) {
+                earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
+              }
+              locals.store(earlyReturnVar)
+            }
+          }
+          bc goTo nextCleanup
+          shouldEmitCleanup = true
+      }
+
+    } // end of genReturn()
+
+    private def genApply(app: Apply, expectedType: BType): BType = {
+      var generatedType = expectedType
+      lineNumber(app)
+      app match {
+
+        case Apply(TypeApply(fun, targs), _) =>
+
+          val sym = fun.symbol
+          val cast = sym match {
+            case Object_isInstanceOf  => false
+            case Object_asInstanceOf  => true
+            case _                    => abort(s"Unexpected type application $fun[sym: ${sym.fullName}] in: $app")
+          }
+
+          val Select(obj, _) = fun
+          val l = tpeTK(obj)
+          val r = tpeTK(targs.head)
+
+          def genTypeApply(): BType = {
+            genLoadQualifier(fun)
+
+            if (l.isValueType && r.isValueType)
+              genConversion(l, r, cast)
+            else if (l.isValueType) {
+              bc drop l
+              if (cast) {
+                mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.getInternalName)
+                bc dup ObjectReference
+                emit(asm.Opcodes.ATHROW)
+              } else {
+                bc boolconst false
+              }
+            }
+            else if (r.isValueType && cast) {
+              abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $app")
+            }
+            else if (r.isValueType) {
+              bc isInstance classLiteral(r)
+            }
+            else {
+              genCast(r, cast)
+            }
+
+            if (cast) r else BOOL
+          } // end of genTypeApply()
+
+          generatedType = genTypeApply()
+
+        // 'super' call: Note: since constructors are supposed to
+        // return an instance of what they construct, we have to take
+        // special care. On JVM they are 'void', and Scala forbids (syntactically)
+        // to call super constructors explicitly and/or use their 'returned' value.
+        // therefore, we can ignore this fact, and generate code that leaves nothing
+        // on the stack (contrary to what the type in the AST says).
+        case Apply(fun @ Select(Super(_, mix), _), args) =>
+          val invokeStyle = icodes.opcodes.SuperCall(mix)
+          // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+          mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+          genLoadArguments(args, paramTKs(app))
+          genCallMethod(fun.symbol, invokeStyle, pos = app.pos)
+          generatedType = asmMethodType(fun.symbol).getReturnType
+
+        // 'new' constructor call: Note: since constructors are
+        // thought to return an instance of what they construct,
+        // we have to 'simulate' it by DUPlicating the freshly created
+        // instance (on JVM, <init> methods return VOID).
+        case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
+          val ctor = fun.symbol
+          assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}")
+
+          generatedType = tpeTK(tpt)
+          assert(generatedType.isRefOrArrayType, s"Non reference type cannot be instantiated: $generatedType")
+
+          generatedType match {
+            case arr if generatedType.isArray =>
+              genLoadArguments(args, paramTKs(app))
+              val dims     = arr.getDimensions
+              var elemKind = arr.getElementType
+              val argsSize = args.length
+              if (argsSize > dims) {
+                cunit.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)")
+              }
+              if (argsSize < dims) {
+                /* In one step:
+                 *   elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize)
+                 * however the above does not enter a TypeName for each nested arrays in chrs.
+                 */
+                for (i <- args.length until dims) elemKind = arrayOf(elemKind)
+              }
+              (argsSize : @switch) match {
+                case 1 => bc newarray elemKind
+                case _ =>
+                  val descr = ('[' * argsSize) + elemKind.getDescriptor // denotes the same as: arrayN(elemKind, argsSize).getDescriptor
+                  mnode.visitMultiANewArrayInsn(descr, argsSize)
+              }
+
+            case rt if generatedType.hasObjectSort =>
+              assert(exemplar(ctor.owner).c == rt, s"Symbol ${ctor.owner.fullName} is different from $rt")
+              mnode.visitTypeInsn(asm.Opcodes.NEW, rt.getInternalName)
+              bc dup generatedType
+              genLoadArguments(args, paramTKs(app))
+              genCallMethod(ctor, icodes.opcodes.Static(onInstance = true))
+
+            case _ =>
+              abort(s"Cannot instantiate $tpt of kind: $generatedType")
+          }
+
+        case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
+          val nativeKind = tpeTK(expr)
+          genLoad(expr, nativeKind)
+          val MethodNameAndType(mname, mdesc) = asmBoxTo(nativeKind)
+          bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+          generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
+
+        case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) =>
+          genLoad(expr)
+          val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
+          generatedType = boxType
+          val MethodNameAndType(mname, mdesc) = asmUnboxTo(boxType)
+          bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+
+        case app @ Apply(fun, args) =>
+          val sym = fun.symbol
+
+          if (sym.isLabel) {  // jump to a label
+            genLoadLabelArguments(args, labelDef(sym), app.pos)
+            bc goTo programPoint(sym)
+          } else if (isPrimitive(sym)) { // primitive method call
+            generatedType = genPrimitiveOp(app, expectedType)
+          } else {  // normal method call
+
+            def genNormalMethodCall() {
+
+              val invokeStyle =
+                if (sym.isStaticMember) icodes.opcodes.Static(onInstance = false)
+                else if (sym.isPrivate || sym.isClassConstructor) icodes.opcodes.Static(onInstance = true)
+                else icodes.opcodes.Dynamic;
+
+              if (invokeStyle.hasInstance) {
+                genLoadQualifier(fun)
+              }
+
+              genLoadArguments(args, paramTKs(app))
+
+              // In "a couple cases", squirrel away a extra information (hostClass, targetTypeKind). TODO Document what "in a couple cases" refers to.
+              var hostClass:      Symbol = null
+              var targetTypeKind: BType  = null
+              fun match {
+                case Select(qual, _) =>
+                  val qualSym = findHostClass(qual.tpe, sym)
+                  if (qualSym == ArrayClass) {
+                    targetTypeKind = tpeTK(qual)
+                    log(s"Stored target type kind for ${sym.fullName} as $targetTypeKind")
+                  }
+                  else {
+                    hostClass = qualSym
+                    if (qual.tpe.typeSymbol != qualSym) {
+                      log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+                    }
+                  }
+
+                case _ =>
+              }
+              if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) {
+                val target: String = targetTypeKind.getInternalName
+                bc.invokevirtual(target, "clone", "()Ljava/lang/Object;")
+              }
+              else {
+                genCallMethod(sym, invokeStyle, hostClass, app.pos)
+              }
+
+            } // end of genNormalMethodCall()
+
+            genNormalMethodCall()
+
+            generatedType = asmMethodType(sym).getReturnType
+          }
+
+      }
+
+      generatedType
+    } // end of genApply()
+
+    private def genArrayValue(av: ArrayValue): BType = {
+      val ArrayValue(tpt @ TypeTree(), elems) = av
+
+      val elmKind       = tpeTK(tpt)
+      val generatedType = arrayOf(elmKind)
+
+      lineNumber(av)
+      bc iconst   elems.length
+      bc newarray elmKind
+
+      var i = 0
+      var rest = elems
+      while (!rest.isEmpty) {
+        bc dup     generatedType
+        bc iconst  i
+        genLoad(rest.head, elmKind)
+        bc astore  elmKind
+        rest = rest.tail
+        i = i + 1
+      }
+
+      generatedType
+    }
+
+    /*
+     * A Match node contains one or more case clauses,
+     * each case clause lists one or more Int values to use as keys, and a code block.
+     * Except the "default" case clause which (if it exists) doesn't list any Int key.
+     *
+     * On a first pass over the case clauses, we flatten the keys and their targets (the latter represented with asm.Labels).
+     * That representation allows JCodeMethodV to emit a lookupswitch or a tableswitch.
+     *
+     * On a second pass, we emit the switch blocks, one for each different target.
+     */
+    private def genMatch(tree: Match): BType = {
+      lineNumber(tree)
+      genLoad(tree.selector, INT)
+      val generatedType = tpeTK(tree)
+
+      var flatKeys: List[Int]       = Nil
+      var targets:  List[asm.Label] = Nil
+      var default:  asm.Label       = null
+      var switchBlocks: List[Tuple2[asm.Label, Tree]] = Nil
+
+      // collect switch blocks and their keys, but don't emit yet any switch-block.
+      for (caze @ CaseDef(pat, guard, body) <- tree.cases) {
+        assert(guard == EmptyTree, guard)
+        val switchBlockPoint = new asm.Label
+        switchBlocks ::= (switchBlockPoint, body)
+        pat match {
+          case Literal(value) =>
+            flatKeys ::= value.intValue
+            targets  ::= switchBlockPoint
+          case Ident(nme.WILDCARD) =>
+            assert(default == null, s"multiple default targets in a Match node, at ${tree.pos}")
+            default = switchBlockPoint
+          case Alternative(alts) =>
+            alts foreach {
+              case Literal(value) =>
+                flatKeys ::= value.intValue
+                targets  ::= switchBlockPoint
+              case _ =>
+                abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.pos}")
+            }
+          case _ =>
+            abort(s"Invalid pattern in Match node: $tree at: ${tree.pos}")
+        }
+      }
+      bc.emitSWITCH(mkArrayReverse(flatKeys), mkArray(targets.reverse), default, MIN_SWITCH_DENSITY)
+
+      // emit switch-blocks.
+      val postMatch = new asm.Label
+      for (sb <- switchBlocks.reverse) {
+        val (caseLabel, caseBody) = sb
+        markProgramPoint(caseLabel)
+        genLoad(caseBody, generatedType)
+        bc goTo postMatch
+      }
+
+      markProgramPoint(postMatch)
+      generatedType
+    }
+
+    def genBlock(tree: Block, expectedType: BType) {
+      val Block(stats, expr) = tree
+      val savedScope = varsInScope
+      varsInScope = Nil
+      stats foreach genStat
+      genLoad(expr, expectedType)
+      val end = currProgramPoint()
+      if (emitVars) { // add entries to LocalVariableTable JVM attribute
+        for ((sym, start) <- varsInScope.reverse) { emitLocalVarScope(sym, start, end) }
+      }
+      varsInScope = savedScope
+    }
+
+    def adapt(from: BType, to: BType) {
+      if (!conforms(from, to)) {
+        to match {
+          case UNIT => bc drop from
+          case _    => bc.emitT2T(from, to)
+        }
+      } else if (from.isNothingType) {
+        emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+      } else if (from.isNullType) {
+        bc drop from
+        mnode.visitInsn(asm.Opcodes.ACONST_NULL)
+      }
+      else (from, to) match  {
+        case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
+        case _ => ()
+      }
+    }
+
+    /* Emit code to Load the qualifier of `tree` on top of the stack. */
+    def genLoadQualifier(tree: Tree) {
+      lineNumber(tree)
+      tree match {
+        case Select(qualifier, _) => genLoad(qualifier)
+        case _                    => abort(s"Unknown qualifier $tree")
+      }
+    }
+
+    /* Generate code that loads args into label parameters. */
+    def genLoadLabelArguments(args: List[Tree], lblDef: LabelDef, gotoPos: Position) {
+
+      val aps = {
+        val params: List[Symbol] = lblDef.params.map(_.symbol)
+        assert(args.length == params.length, s"Wrong number of arguments in call to label at: $gotoPos")
+
+        def isTrivial(kv: (Tree, Symbol)) = kv match {
+          case (This(_), p) if p.name == nme.THIS     => true
+          case (arg @ Ident(_), p) if arg.symbol == p => true
+          case _                                      => false
+        }
+
+        (args zip params) filterNot isTrivial
+      }
+
+      // first push *all* arguments. This makes sure muliple uses of the same labelDef-var will all denote the (previous) value.
+      aps foreach { case (arg, param) => genLoad(arg, locals(param).tk) } // `locals` is known to contain `param` because `genDefDef()` visited `labelDefsAtOrUnder`
+
+      // second assign one by one to the LabelDef's variables.
+      aps.reverse foreach {
+        case (_, param) =>
+          // TODO FIXME a "this" param results from tail-call xform. If so, the `else` branch seems perfectly fine. And the `then` branch must be wrong.
+          if (param.name == nme.THIS) mnode.visitVarInsn(asm.Opcodes.ASTORE, 0)
+          else locals.store(param)
+      }
+
+    }
+
+    def genLoadArguments(args: List[Tree], btpes: List[BType]) {
+      (args zip btpes) foreach { case (arg, btpe) => genLoad(arg, btpe) }
+    }
+
+    def genLoadModule(tree: Tree): BType = {
+      val module = (
+        if (!tree.symbol.isPackageClass) tree.symbol
+        else tree.symbol.info.member(nme.PACKAGE) match {
+          case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
+          case s        => abort(s"SI-5604: found package class where package object expected: $tree")
+        }
+      )
+      lineNumber(tree)
+      genLoadModule(module)
+      symInfoTK(module)
+    }
+
+    def genLoadModule(module: Symbol) {
+      def inStaticMethod = methSymbol != null && methSymbol.isStaticMember
+      if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) {
+        mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+      } else {
+        val mbt  = symInfoTK(module)
+        mnode.visitFieldInsn(
+          asm.Opcodes.GETSTATIC,
+          mbt.getInternalName /* + "$" */ ,
+          strMODULE_INSTANCE_FIELD,
+          mbt.getDescriptor // for nostalgics: toTypeKind(module.tpe).getDescriptor
+        )
+      }
+    }
+
+    def genConversion(from: BType, to: BType, cast: Boolean) {
+      if (cast) { bc.emitT2T(from, to) }
+      else {
+        bc drop from
+        bc boolconst (from == to)
+      }
+    }
+
+    def genCast(to: BType, cast: Boolean) {
+      if (cast) { bc checkCast  to }
+      else      { bc isInstance to }
+    }
+
+    /* Is the given symbol a primitive operation? */
+    def isPrimitive(fun: Symbol): Boolean = scalaPrimitives.isPrimitive(fun)
+
+    /* Generate coercion denoted by "code" */
+    def genCoercion(code: Int) {
+      import scalaPrimitives._
+      (code: @switch) match {
+        case B2B | S2S | C2C | I2I | L2L | F2F | D2D => ()
+        case _ =>
+          val from = coercionFrom(code)
+          val to   = coercionTo(code)
+          bc.emitT2T(from, to)
+      }
+    }
+
+    def genStringConcat(tree: Tree): BType = {
+      lineNumber(tree)
+      liftStringConcat(tree) match {
+
+        // Optimization for expressions of the form "" + x.  We can avoid the StringBuilder.
+        case List(Literal(Constant("")), arg) =>
+          genLoad(arg, ObjectReference)
+          genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false))
+
+        case concatenations =>
+          bc.genStartConcat
+          for (elem <- concatenations) {
+            val kind = tpeTK(elem)
+            genLoad(elem, kind)
+            bc.genStringConcat(kind)
+          }
+          bc.genEndConcat
+
+      }
+
+      StringReference
+    }
+
+    def genCallMethod(method: Symbol, style: InvokeStyle, hostClass0: Symbol = null, pos: Position = NoPosition) {
+
+      val siteSymbol = claszSymbol
+      val hostSymbol = if (hostClass0 == null) method.owner else hostClass0;
+      val methodOwner = method.owner
+      // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+      hostSymbol.info ; methodOwner.info
+
+      def needsInterfaceCall(sym: Symbol) = (
+           sym.isInterface
+        || sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass)
+      )
+
+      // whether to reference the type of the receiver or
+      // the type of the method owner
+      val useMethodOwner = (
+           style != icodes.opcodes.Dynamic
+        || hostSymbol.isBottomClass
+        || methodOwner == definitions.ObjectClass
+      )
+      val receiver = if (useMethodOwner) methodOwner else hostSymbol
+      val bmOwner  = asmClassType(receiver)
+      val jowner   = bmOwner.getInternalName
+      val jname    = method.javaSimpleName.toString
+      val bmType   = asmMethodType(method)
+      val mdescr   = bmType.getDescriptor
+
+      def initModule() {
+        // we initialize the MODULE$ field immediately after the super ctor
+        if (!isModuleInitialized &&
+            jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+            jname == INSTANCE_CONSTRUCTOR_NAME &&
+            isStaticModule(siteSymbol)) {
+          isModuleInitialized = true
+          mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+          mnode.visitFieldInsn(
+            asm.Opcodes.PUTSTATIC,
+            thisName,
+            strMODULE_INSTANCE_FIELD,
+            "L" + thisName + ";"
+          )
+        }
+      }
+
+      if (style.isStatic) {
+        if (style.hasInstance) { bc.invokespecial  (jowner, jname, mdescr) }
+        else                   { bc.invokestatic   (jowner, jname, mdescr) }
+      }
+      else if (style.isDynamic) {
+        if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr) }
+        else                              { bc.invokevirtual  (jowner, jname, mdescr) }
+      }
+      else {
+        assert(style.isSuper, s"An unknown InvokeStyle: $style")
+        bc.invokespecial(jowner, jname, mdescr)
+        initModule()
+      }
+
+    } // end of genCallMethod()
+
+    /* Generate the scala ## method. */
+    def genScalaHash(tree: Tree): BType = {
+      genLoadModule(ScalaRunTimeModule) // TODO why load ScalaRunTimeModule if ## has InvokeStyle of Static(false) ?
+      genLoad(tree, ObjectReference)
+      genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false))
+
+      INT
+    }
+
+    /*
+     * Returns a list of trees that each should be concatenated, from left to right.
+     * It turns a chained call like "a".+("b").+("c") into a list of arguments.
+     */
+    def liftStringConcat(tree: Tree): List[Tree] = tree match {
+      case Apply(fun @ Select(larg, method), rarg) =>
+        if (isPrimitive(fun.symbol) &&
+            scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT)
+          liftStringConcat(larg) ::: rarg
+        else
+          tree :: Nil
+      case _ =>
+        tree :: Nil
+    }
+
+    /* Some useful equality helpers. */
+    def isNull(t: Tree) = {
+      t match {
+        case Literal(Constant(null)) => true
+        case _ => false
+      }
+    }
+
+    /* If l or r is constant null, returns the other ; otherwise null */
+    def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
+
+    /* Emit code to compare the two top-most stack values using the 'op' operator. */
+    private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+      if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+        bc.emitIF_ICMP(op, success)
+      } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+        bc.emitIF_ACMP(op, success)
+      } else {
+        (tk: @unchecked) match {
+          case LONG   => emit(asm.Opcodes.LCMP)
+          case FLOAT  =>
+            if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+            else emit(asm.Opcodes.FCMPL)
+          case DOUBLE =>
+            if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+            else emit(asm.Opcodes.DCMPL)
+        }
+        bc.emitIF(op, success)
+      }
+      bc goTo failure
+    }
+
+    /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
+    private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+      if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+        bc.emitIF(op, success)
+      } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+        // @unchecked because references aren't compared with GT, GE, LT, LE.
+        (op : @unchecked) match {
+          case icodes.EQ => bc emitIFNULL    success
+          case icodes.NE => bc emitIFNONNULL success
+        }
+      } else {
+        (tk: @unchecked) match {
+          case LONG   =>
+            emit(asm.Opcodes.LCONST_0)
+            emit(asm.Opcodes.LCMP)
+          case FLOAT  =>
+            emit(asm.Opcodes.FCONST_0)
+            if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+            else emit(asm.Opcodes.FCMPL)
+          case DOUBLE =>
+            emit(asm.Opcodes.DCONST_0)
+            if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+            else emit(asm.Opcodes.DCMPL)
+        }
+        bc.emitIF(op, success)
+      }
+      bc goTo failure
+    }
+
+    val testOpForPrimitive: Array[TestOp] = Array(
+      icodes.EQ, icodes.NE, icodes.EQ, icodes.NE, icodes.LT, icodes.LE, icodes.GE, icodes.GT
+    )
+
+    /*
+     * Generate code for conditional expressions.
+     * The jump targets success/failure of the test are `then-target` and `else-target` resp.
+     */
+    private def genCond(tree: Tree, success: asm.Label, failure: asm.Label) {
+
+      def genComparisonOp(l: Tree, r: Tree, code: Int) {
+        val op: TestOp = testOpForPrimitive(code - scalaPrimitives.ID)
+        // special-case reference (in)equality test for null (null eq x, x eq null)
+        var nonNullSide: Tree = null
+        if (scalaPrimitives.isReferenceEqualityOp(code) &&
+            { nonNullSide = ifOneIsNull(l, r); nonNullSide != null }
+        ) {
+          genLoad(nonNullSide, ObjectReference)
+          genCZJUMP(success, failure, op, ObjectReference)
+        }
+        else {
+          val tk = maxType(tpeTK(l), tpeTK(r))
+          genLoad(l, tk)
+          genLoad(r, tk)
+          genCJUMP(success, failure, op, tk)
+        }
+      }
+
+      def default() = {
+        genLoad(tree, BOOL)
+        genCZJUMP(success, failure, icodes.NE, BOOL)
+      }
+
+      lineNumber(tree)
+      tree match {
+
+        case Apply(fun, args) if isPrimitive(fun.symbol) =>
+          import scalaPrimitives.{ ZNOT, ZAND, ZOR, EQ, getPrimitive }
+
+          // lhs and rhs of test
+          lazy val Select(lhs, _) = fun
+          val rhs = if (args.isEmpty) EmptyTree else args.head; // args.isEmpty only for ZNOT
+
+          def genZandOrZor(and: Boolean) { // TODO WRONG
+            // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited).
+            val keepGoing = new asm.Label
+
+            if (and) genCond(lhs, keepGoing, failure)
+            else     genCond(lhs, success,   keepGoing)
+
+            markProgramPoint(keepGoing)
+            genCond(rhs, success, failure)
+          }
+
+          getPrimitive(fun.symbol) match {
+            case ZNOT   => genCond(lhs, failure, success)
+            case ZAND   => genZandOrZor(and = true)
+            case ZOR    => genZandOrZor(and = false)
+            case code   =>
+              // TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null)
+              if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).hasObjectSort) {
+                // `lhs` has reference type
+                if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure)
+                else            genEqEqPrimitive(lhs, rhs, failure, success)
+              }
+              else if (scalaPrimitives.isComparisonOp(code))
+                genComparisonOp(lhs, rhs, code)
+              else
+                default
+          }
+
+        case _ => default
+      }
+
+    } // end of genCond()
+
+    /*
+     * Generate the "==" code for object references. It is equivalent of
+     * if (l eq null) r eq null else l.equals(r);
+     *
+     * @param l       left-hand-side  of the '=='
+     * @param r       right-hand-side of the '=='
+     */
+    def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label) {
+
+      /* True if the equality comparison is between values that require the use of the rich equality
+       * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+       * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+       * When it is statically known that both sides are equal and subtypes of Number of Character,
+       * not using the rich equality is possible (their own equals method will do ok.)
+       */
+      val mustUseAnyComparator: Boolean = {
+        val areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
+
+        !areSameFinals && platform.isMaybeBoxed(l.tpe.typeSymbol) && platform.isMaybeBoxed(r.tpe.typeSymbol)
+      }
+
+      if (mustUseAnyComparator) {
+        val equalsMethod: Symbol = {
+          if (l.tpe <:< BoxedNumberClass.tpe) {
+            if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+            else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+            else platform.externalEqualsNumObject
+          } else platform.externalEquals
+        }
+        genLoad(l, ObjectReference)
+        genLoad(r, ObjectReference)
+        genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false))
+        genCZJUMP(success, failure, icodes.NE, BOOL)
+      }
+      else {
+        if (isNull(l)) {
+          // null == expr -> expr eq null
+          genLoad(r, ObjectReference)
+          genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+        } else if (isNull(r)) {
+          // expr == null -> expr eq null
+          genLoad(l, ObjectReference)
+          genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+        } else {
+          // l == r -> if (l eq null) r eq null else l.equals(r)
+          val eqEqTempLocal = locals.makeLocal(AnyRefReference, nme.EQEQ_LOCAL_VAR.toString)
+          val lNull    = new asm.Label
+          val lNonNull = new asm.Label
+
+          genLoad(l, ObjectReference)
+          genLoad(r, ObjectReference)
+          locals.store(eqEqTempLocal)
+          bc dup ObjectReference
+          genCZJUMP(lNull, lNonNull, icodes.EQ, ObjectReference)
+
+          markProgramPoint(lNull)
+          bc drop ObjectReference
+          locals.load(eqEqTempLocal)
+          genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+
+          markProgramPoint(lNonNull)
+          locals.load(eqEqTempLocal)
+          genCallMethod(Object_equals, icodes.opcodes.Dynamic)
+          genCZJUMP(success, failure, icodes.NE, BOOL)
+        }
+      }
+    }
+
+
+    def genSynchronized(tree: Apply, expectedType: BType): BType
+    def genLoadTry(tree: Try): BType
+
+  }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
new file mode 100644
index 0000000..cc3265c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
@@ -0,0 +1,716 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+
+/*
+ *  Immutable representations of bytecode-level types.
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ *  @version 1.0
+ *
+ */
+abstract class BCodeGlue extends SubComponent {
+
+  import global._
+
+  object BType {
+
+    import global.chrs
+
+    // ------------- sorts -------------
+
+    val VOID   : Int =  0
+    val BOOLEAN: Int =  1
+    val CHAR   : Int =  2
+    val BYTE   : Int =  3
+    val SHORT  : Int =  4
+    val INT    : Int =  5
+    val FLOAT  : Int =  6
+    val LONG   : Int =  7
+    val DOUBLE : Int =  8
+    val ARRAY  : Int =  9
+    val OBJECT : Int = 10
+    val METHOD : Int = 11
+
+    // ------------- primitive types -------------
+
+    val VOID_TYPE    = new BType(VOID,    ('V' << 24) | (5 << 16) | (0 << 8) | 0, 1)
+    val BOOLEAN_TYPE = new BType(BOOLEAN, ('Z' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+    val CHAR_TYPE    = new BType(CHAR,    ('C' << 24) | (0 << 16) | (6 << 8) | 1, 1)
+    val BYTE_TYPE    = new BType(BYTE,    ('B' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+    val SHORT_TYPE   = new BType(SHORT,   ('S' << 24) | (0 << 16) | (7 << 8) | 1, 1)
+    val INT_TYPE     = new BType(INT,     ('I' << 24) | (0 << 16) | (0 << 8) | 1, 1)
+    val FLOAT_TYPE   = new BType(FLOAT,   ('F' << 24) | (2 << 16) | (2 << 8) | 1, 1)
+    val LONG_TYPE    = new BType(LONG,    ('J' << 24) | (1 << 16) | (1 << 8) | 2, 1)
+    val DOUBLE_TYPE  = new BType(DOUBLE,  ('D' << 24) | (3 << 16) | (3 << 8) | 2, 1)
+
+    /*
+     * Returns the Java type corresponding to the given type descriptor.
+     *
+     * @param off the offset of this descriptor in the chrs buffer.
+     * @return the Java type corresponding to the given type descriptor.
+     *
+     * can-multi-thread
+     */
+    def getType(off: Int): BType = {
+      var len = 0
+      chrs(off) match {
+        case 'V' => VOID_TYPE
+        case 'Z' => BOOLEAN_TYPE
+        case 'C' => CHAR_TYPE
+        case 'B' => BYTE_TYPE
+        case 'S' => SHORT_TYPE
+        case 'I' => INT_TYPE
+        case 'F' => FLOAT_TYPE
+        case 'J' => LONG_TYPE
+        case 'D' => DOUBLE_TYPE
+        case '[' =>
+          len = 1
+          while (chrs(off + len) == '[') {
+            len += 1
+          }
+          if (chrs(off + len) == 'L') {
+            len += 1
+            while (chrs(off + len) != ';') {
+              len += 1
+            }
+          }
+          new BType(ARRAY, off, len + 1)
+        case 'L' =>
+          len = 1
+          while (chrs(off + len) != ';') {
+            len += 1
+          }
+          new BType(OBJECT, off + 1, len - 1)
+        // case '(':
+        case _ =>
+          assert(chrs(off) == '(')
+          var resPos = off + 1
+          while (chrs(resPos) != ')') { resPos += 1 }
+          val resType = getType(resPos + 1)
+          val len = resPos - off + 1 + resType.len;
+          new BType(
+            METHOD,
+            off,
+            if (resType.hasObjectSort) {
+              len + 2 // "+ 2" accounts for the "L ... ;" in a descriptor for a non-array reference.
+            } else {
+              len
+            }
+          )
+      }
+    }
+
+    /* Params denote an internal name.
+     *  can-multi-thread
+     */
+    def getObjectType(index: Int, length: Int): BType = {
+      val sort = if (chrs(index) == '[') ARRAY else OBJECT;
+      new BType(sort, index, length)
+    }
+
+    /*
+     * @param methodDescriptor a method descriptor.
+     *
+     * must-single-thread
+     */
+    def getMethodType(methodDescriptor: String): BType = {
+      val n = global.newTypeName(methodDescriptor)
+      new BType(BType.METHOD, n.start, n.length) // TODO assert isValidMethodDescriptor
+    }
+
+    /*
+     * Returns the Java method type corresponding to the given argument and return types.
+     *
+     * @param returnType the return type of the method.
+     * @param argumentTypes the argument types of the method.
+     * @return the Java type corresponding to the given argument and return types.
+     *
+     * must-single-thread
+     */
+    def getMethodType(returnType: BType, argumentTypes: Array[BType]): BType = {
+      val n = global.newTypeName(getMethodDescriptor(returnType, argumentTypes))
+      new BType(BType.METHOD, n.start, n.length)
+    }
+
+    /*
+     * Returns the Java types corresponding to the argument types of method descriptor whose first argument starts at idx0.
+     *
+     * @param idx0 index into chrs of the first argument.
+     * @return the Java types corresponding to the argument types of the given method descriptor.
+     *
+     * can-multi-thread
+     */
+    private def getArgumentTypes(idx0: Int): Array[BType] = {
+      assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+      val args = new Array[BType](getArgumentCount(idx0))
+      var off = idx0
+      var size = 0
+      while (chrs(off) != ')') {
+        args(size) = getType(off)
+        off += args(size).len
+        if (args(size).sort == OBJECT) { off += 2 }
+        // debug: assert("LVZBSCIJFD[)".contains(chrs(off)))
+        size += 1
+      }
+      // debug: var check = 0; while (check < args.length) { assert(args(check) != null); check += 1 }
+      args
+    }
+
+    /*
+     * Returns the number of argument types of this method type, whose first argument starts at idx0.
+     *
+     * @param idx0 index into chrs of the first argument.
+     * @return the number of argument types of this method type.
+     *
+     * can-multi-thread
+     */
+    private def getArgumentCount(idx0: Int): Int = {
+      assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+      var off  = idx0
+      var size = 0
+      var keepGoing = true
+      while (keepGoing) {
+        val car = chrs(off)
+        off += 1
+        if (car == ')') {
+          keepGoing = false
+        } else if (car == 'L') {
+          while (chrs(off) != ';') { off += 1 }
+          off += 1
+          size += 1
+        } else if (car != '[') {
+          size += 1
+        }
+      }
+
+      size
+    }
+
+    /*
+     * Returns the Java type corresponding to the return type of the given
+     * method descriptor.
+     *
+     * @param methodDescriptor a method descriptor.
+     * @return the Java type corresponding to the return type of the given method descriptor.
+     *
+     * must-single-thread
+     */
+    def getReturnType(methodDescriptor: String): BType = {
+      val n     = global.newTypeName(methodDescriptor)
+      val delta = n.pos(')') // `delta` is relative to the Name's zero-based start position, not a valid index into chrs.
+      assert(delta < n.length, s"not a valid method descriptor: $methodDescriptor")
+      getType(n.start + delta + 1)
+    }
+
+    /*
+     * Returns the descriptor corresponding to the given argument and return types.
+     * Note: no BType is created here for the resulting method descriptor,
+     *       if that's desired the invoker is responsible for that.
+     *
+     * @param returnType the return type of the method.
+     * @param argumentTypes the argument types of the method.
+     * @return the descriptor corresponding to the given argument and return types.
+     *
+     * can-multi-thread
+     */
+    def getMethodDescriptor(
+        returnType: BType,
+        argumentTypes: Array[BType]): String =
+    {
+      val buf = new StringBuffer()
+      buf.append('(')
+      var i = 0
+      while (i < argumentTypes.length) {
+        argumentTypes(i).getDescriptor(buf)
+        i += 1
+      }
+      buf.append(')')
+      returnType.getDescriptor(buf)
+      buf.toString()
+    }
+
+  } // end of object BType
+
+  /*
+   * Based on ASM's Type class. Namer's chrs is used in this class for the same purposes as the `buf` char array in asm.Type.
+   *
+   * All methods of this classs can-multi-thread
+   */
+  final class BType(val sort: Int, val off: Int, val len: Int) {
+
+    import global.chrs
+
+    /*
+     * can-multi-thread
+     */
+    def toASMType: scala.tools.asm.Type = {
+      import scala.tools.asm
+      // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+      (sort: @switch) match {
+        case asm.Type.VOID    => asm.Type.VOID_TYPE
+        case asm.Type.BOOLEAN => asm.Type.BOOLEAN_TYPE
+        case asm.Type.CHAR    => asm.Type.CHAR_TYPE
+        case asm.Type.BYTE    => asm.Type.BYTE_TYPE
+        case asm.Type.SHORT   => asm.Type.SHORT_TYPE
+        case asm.Type.INT     => asm.Type.INT_TYPE
+        case asm.Type.FLOAT   => asm.Type.FLOAT_TYPE
+        case asm.Type.LONG    => asm.Type.LONG_TYPE
+        case asm.Type.DOUBLE  => asm.Type.DOUBLE_TYPE
+        case asm.Type.ARRAY   |
+             asm.Type.OBJECT  => asm.Type.getObjectType(getInternalName)
+        case asm.Type.METHOD  => asm.Type.getMethodType(getDescriptor)
+      }
+    }
+
+    /*
+     * Unlike for ICode's REFERENCE, isBoxedType(t) implies isReferenceType(t)
+     * Also, `isReferenceType(RT_NOTHING) == true` , similarly for RT_NULL.
+     * Use isNullType() , isNothingType() to detect Nothing and Null.
+     *
+     * can-multi-thread
+     */
+    def hasObjectSort = (sort == BType.OBJECT)
+
+    /*
+     * Returns the number of dimensions of this array type. This method should
+     * only be used for an array type.
+     *
+     * @return the number of dimensions of this array type.
+     *
+     * can-multi-thread
+     */
+    def getDimensions: Int = {
+      var i = 1
+      while (chrs(off + i) == '[') {
+        i += 1
+      }
+      i
+    }
+
+    /*
+     * Returns the (ultimate) element type of this array type.
+     * This method should only be used for an array type.
+     *
+     * @return Returns the type of the elements of this array type.
+     *
+     * can-multi-thread
+     */
+    def getElementType: BType = {
+      assert(isArray, s"Asked for the element type of a non-array type: $this")
+      BType.getType(off + getDimensions)
+    }
+
+    /*
+     * Returns the internal name of the class corresponding to this object or
+     * array type. The internal name of a class is its fully qualified name (as
+     * returned by Class.getName(), where '.' are replaced by '/'. This method
+     * should only be used for an object or array type.
+     *
+     * @return the internal name of the class corresponding to this object type.
+     *
+     * can-multi-thread
+     */
+    def getInternalName: String = {
+      new String(chrs, off, len)
+    }
+
+    /*
+     * @return the suffix of the internal name until the last '/' (if '/' present), internal name otherwise.
+     *
+     * can-multi-thread
+     */
+    def getSimpleName: String = {
+      assert(hasObjectSort, s"not of object sort: $toString")
+      val iname = getInternalName
+      val idx = iname.lastIndexOf('/')
+      if (idx == -1) iname
+      else iname.substring(idx + 1)
+    }
+
+    /*
+     * Returns the argument types of methods of this type.
+     * This method should only be used for method types.
+     *
+     * @return the argument types of methods of this type.
+     *
+     * can-multi-thread
+     */
+    def getArgumentTypes: Array[BType] = {
+      BType.getArgumentTypes(off + 1)
+    }
+
+    /*
+     * Returns the return type of methods of this type.
+     * This method should only be used for method types.
+     *
+     * @return the return type of methods of this type.
+     *
+     * can-multi-thread
+     */
+    def getReturnType: BType = {
+      assert(chrs(off) == '(', s"doesn't look like a method descriptor: $toString")
+      var resPos = off + 1
+      while (chrs(resPos) != ')') { resPos += 1 }
+      BType.getType(resPos + 1)
+    }
+
+    // ------------------------------------------------------------------------
+    // Inspector methods
+    // ------------------------------------------------------------------------
+
+    def isPrimitiveOrVoid = (sort <  BType.ARRAY) // can-multi-thread
+    def isValueType       = (sort <  BType.ARRAY) // can-multi-thread
+    def isArray           = (sort == BType.ARRAY) // can-multi-thread
+    def isUnitType        = (sort == BType.VOID)  // can-multi-thread
+
+    def isRefOrArrayType   = { hasObjectSort ||  isArray    } // can-multi-thread
+    def isNonUnitValueType = { isValueType   && !isUnitType } // can-multi-thread
+
+    def isNonSpecial  = { !isValueType && !isArray && !isPhantomType   } // can-multi-thread
+    def isNothingType = { (this == RT_NOTHING) || (this == CT_NOTHING) } // can-multi-thread
+    def isNullType    = { (this == RT_NULL)    || (this == CT_NULL)    } // can-multi-thread
+    def isPhantomType = { isNothingType || isNullType } // can-multi-thread
+
+    /*
+     * can-multi-thread
+     */
+    def isBoxed = {
+      this match {
+        case BOXED_UNIT  | BOXED_BOOLEAN | BOXED_CHAR   |
+             BOXED_BYTE  | BOXED_SHORT   | BOXED_INT    |
+             BOXED_FLOAT | BOXED_LONG    | BOXED_DOUBLE
+          => true
+        case _
+          => false
+      }
+    }
+
+    /* On the JVM,
+     *    BOOL, BYTE, CHAR, SHORT, and INT
+     *  are like Ints for the purpose of lub calculation.
+     *
+     * can-multi-thread
+     */
+    def isIntSizedType = {
+      (sort : @switch) match {
+        case BType.BOOLEAN | BType.CHAR  |
+             BType.BYTE    | BType.SHORT | BType.INT
+          => true
+        case _
+          => false
+      }
+    }
+
+    /* On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is.
+     *
+     * can-multi-thread
+     */
+    def isIntegralType = {
+      (sort : @switch) match {
+        case BType.CHAR  |
+             BType.BYTE  | BType.SHORT | BType.INT |
+             BType.LONG
+          => true
+        case _
+          => false
+      }
+    }
+
+    /* On the JVM, FLOAT and DOUBLE.
+     *
+     * can-multi-thread
+     */
+    def isRealType = { (sort == BType.FLOAT ) || (sort == BType.DOUBLE) }
+
+    def isNumericType = (isIntegralType || isRealType) // can-multi-thread
+
+    /* Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?)
+     *
+     * can-multi-thread
+     */
+    def isWideType = (getSize == 2)
+
+    /*
+     * Element vs. Component type of an array:
+     * Quoting from the JVMS, Sec. 2.4 "Reference Types and Values"
+     *
+     *   An array type consists of a component type with a single dimension (whose
+     *   length is not given by the type). The component type of an array type may itself be
+     *   an array type. If, starting from any array type, one considers its component type,
+     *   and then (if that is also an array type) the component type of that type, and so on,
+     *   eventually one must reach a component type that is not an array type; this is called
+     *   the element type of the array type. The element type of an array type is necessarily
+     *   either a primitive type, or a class type, or an interface type.
+     *
+     */
+
+    /* The type of items this array holds.
+     *
+     * can-multi-thread
+     */
+    def getComponentType: BType = {
+      assert(isArray, s"Asked for the component type of a non-array type: $this")
+      BType.getType(off + 1)
+    }
+
+    // ------------------------------------------------------------------------
+    // Conversion to type descriptors
+    // ------------------------------------------------------------------------
+
+    /*
+     * @return the descriptor corresponding to this Java type.
+     *
+     * can-multi-thread
+     */
+    def getDescriptor: String = {
+      val buf = new StringBuffer()
+      getDescriptor(buf)
+      buf.toString()
+    }
+
+    /*
+     * Appends the descriptor corresponding to this Java type to the given string buffer.
+     *
+     * @param buf the string buffer to which the descriptor must be appended.
+     *
+     * can-multi-thread
+     */
+    private def getDescriptor(buf: StringBuffer) {
+      if (isPrimitiveOrVoid) {
+        // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+        buf.append(((off & 0xFF000000) >>> 24).asInstanceOf[Char])
+      } else if (sort == BType.OBJECT) {
+        buf.append('L')
+        buf.append(chrs, off, len)
+        buf.append(';')
+      } else { // sort == ARRAY || sort == METHOD
+        buf.append(chrs, off, len)
+      }
+    }
+
+    // ------------------------------------------------------------------------
+    // Corresponding size and opcodes
+    // ------------------------------------------------------------------------
+
+    /*
+     * Returns the size of values of this type.
+     * This method must not be used for method types.
+     *
+     * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
+     *         <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
+     *
+     * can-multi-thread
+     */
+    def getSize: Int = {
+      // the size is in byte 0 of 'off' for primitive types (buf == null)
+      if (isPrimitiveOrVoid) (off & 0xFF) else 1
+    }
+
+    /*
+     * Returns a JVM instruction opcode adapted to this Java type. This method
+     * must not be used for method types.
+     *
+     * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
+     *        ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
+     *        ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+     * @return an opcode that is similar to the given opcode, but adapted to
+     *         this Java type. For example, if this type is <tt>float</tt> and
+     *         <tt>opcode</tt> is IRETURN, this method returns FRETURN.
+     *
+     * can-multi-thread
+     */
+    def getOpcode(opcode: Int): Int = {
+      import scala.tools.asm.Opcodes
+      if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
+        // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
+        // primitive types (buf == null)
+        opcode + (if (isPrimitiveOrVoid) (off & 0xFF00) >> 8 else 4)
+      } else {
+        // the offset for other instructions is in byte 2 of 'off' for
+        // primitive types (buf == null)
+        opcode + (if (isPrimitiveOrVoid) (off & 0xFF0000) >> 16 else 4)
+      }
+    }
+
+    // ------------------------------------------------------------------------
+    // Equals, hashCode and toString
+    // ------------------------------------------------------------------------
+
+    /*
+     * Tests if the given object is equal to this type.
+     *
+     * @param o the object to be compared to this type.
+     * @return <tt>true</tt> if the given object is equal to this type.
+     *
+     * can-multi-thread
+     */
+    override def equals(o: Any): Boolean = {
+      if (!(o.isInstanceOf[BType])) {
+        return false
+      }
+      val t = o.asInstanceOf[BType]
+      if (this eq t) {
+        return true
+      }
+      if (sort != t.sort) {
+        return false
+      }
+      if (sort >= BType.ARRAY) {
+        if (len != t.len) {
+          return false
+        }
+        // sort checked already
+        if (off == t.off) {
+          return true
+        }
+        var i = 0
+        while (i < len) {
+          if (chrs(off + i) != chrs(t.off + i)) {
+            return false
+          }
+          i += 1
+        }
+        // If we reach here, we could update the largest of (this.off, t.off) to match the other, so as to simplify future == comparisons.
+        // But that would require a var rather than val.
+      }
+      true
+    }
+
+    /*
+     * @return a hash code value for this type.
+     *
+     * can-multi-thread
+     */
+    override def hashCode(): Int = {
+      var hc = 13 * sort;
+      if (sort >= BType.ARRAY) {
+        var i = off
+        val end = i + len
+        while (i < end) {
+          hc = 17 * (hc + chrs(i))
+          i += 1
+        }
+      }
+      hc
+    }
+
+    /*
+     * @return the descriptor of this type.
+     *
+     * can-multi-thread
+     */
+    override def toString: String = { getDescriptor }
+
+  }
+
+  /*
+   * Creates a TypeName and the BType token for it.
+   * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+   *
+   * must-single-thread
+   */
+  def brefType(iname: String): BType = { brefType(newTypeName(iname.toCharArray(), 0, iname.length())) }
+
+  /*
+   * Creates a BType token for the TypeName received as argument.
+   * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+   *
+   *  can-multi-thread
+   */
+  def brefType(iname: TypeName): BType = { BType.getObjectType(iname.start, iname.length) }
+
+  // due to keyboard economy only
+  val UNIT   = BType.VOID_TYPE
+  val BOOL   = BType.BOOLEAN_TYPE
+  val CHAR   = BType.CHAR_TYPE
+  val BYTE   = BType.BYTE_TYPE
+  val SHORT  = BType.SHORT_TYPE
+  val INT    = BType.INT_TYPE
+  val LONG   = BType.LONG_TYPE
+  val FLOAT  = BType.FLOAT_TYPE
+  val DOUBLE = BType.DOUBLE_TYPE
+
+  val BOXED_UNIT    = brefType("java/lang/Void")
+  val BOXED_BOOLEAN = brefType("java/lang/Boolean")
+  val BOXED_BYTE    = brefType("java/lang/Byte")
+  val BOXED_SHORT   = brefType("java/lang/Short")
+  val BOXED_CHAR    = brefType("java/lang/Character")
+  val BOXED_INT     = brefType("java/lang/Integer")
+  val BOXED_LONG    = brefType("java/lang/Long")
+  val BOXED_FLOAT   = brefType("java/lang/Float")
+  val BOXED_DOUBLE  = brefType("java/lang/Double")
+
+  /*
+   * RT_NOTHING and RT_NULL exist at run-time only.
+   * They are the bytecode-level manifestation (in method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
+   * Therefore, when RT_NOTHING or RT_NULL are to be emitted,
+   * a mapping is needed: the internal names of NothingClass and NullClass can't be emitted as-is.
+   */
+  val RT_NOTHING = brefType("scala/runtime/Nothing$")
+  val RT_NULL    = brefType("scala/runtime/Null$")
+  val CT_NOTHING = brefType("scala/Nothing") // TODO needed?
+  val CT_NULL    = brefType("scala/Null")    // TODO needed?
+
+  val srBooleanRef = brefType("scala/runtime/BooleanRef")
+  val srByteRef    = brefType("scala/runtime/ByteRef")
+  val srCharRef    = brefType("scala/runtime/CharRef")
+  val srIntRef     = brefType("scala/runtime/IntRef")
+  val srLongRef    = brefType("scala/runtime/LongRef")
+  val srFloatRef   = brefType("scala/runtime/FloatRef")
+  val srDoubleRef  = brefType("scala/runtime/DoubleRef")
+
+  /*  Map from type kinds to the Java reference types.
+   *  Useful when pushing class literals onto the operand stack (ldc instruction taking a class literal).
+   *  @see Predef.classOf
+   *  @see genConstant()
+   */
+  val classLiteral = immutable.Map[BType, BType](
+    UNIT   -> BOXED_UNIT,
+    BOOL   -> BOXED_BOOLEAN,
+    BYTE   -> BOXED_BYTE,
+    SHORT  -> BOXED_SHORT,
+    CHAR   -> BOXED_CHAR,
+    INT    -> BOXED_INT,
+    LONG   -> BOXED_LONG,
+    FLOAT  -> BOXED_FLOAT,
+    DOUBLE -> BOXED_DOUBLE
+  )
+
+  case class MethodNameAndType(mname: String, mdesc: String)
+
+  val asmBoxTo: Map[BType, MethodNameAndType] = {
+    Map(
+      BOOL   -> MethodNameAndType("boxToBoolean",   "(Z)Ljava/lang/Boolean;"  ) ,
+      BYTE   -> MethodNameAndType("boxToByte",      "(B)Ljava/lang/Byte;"     ) ,
+      CHAR   -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
+      SHORT  -> MethodNameAndType("boxToShort",     "(S)Ljava/lang/Short;"    ) ,
+      INT    -> MethodNameAndType("boxToInteger",   "(I)Ljava/lang/Integer;"  ) ,
+      LONG   -> MethodNameAndType("boxToLong",      "(J)Ljava/lang/Long;"     ) ,
+      FLOAT  -> MethodNameAndType("boxToFloat",     "(F)Ljava/lang/Float;"    ) ,
+      DOUBLE -> MethodNameAndType("boxToDouble",    "(D)Ljava/lang/Double;"   )
+    )
+  }
+
+  val asmUnboxTo: Map[BType, MethodNameAndType] = {
+    Map(
+      BOOL   -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
+      BYTE   -> MethodNameAndType("unboxToByte",    "(Ljava/lang/Object;)B") ,
+      CHAR   -> MethodNameAndType("unboxToChar",    "(Ljava/lang/Object;)C") ,
+      SHORT  -> MethodNameAndType("unboxToShort",   "(Ljava/lang/Object;)S") ,
+      INT    -> MethodNameAndType("unboxToInt",     "(Ljava/lang/Object;)I") ,
+      LONG   -> MethodNameAndType("unboxToLong",    "(Ljava/lang/Object;)J") ,
+      FLOAT  -> MethodNameAndType("unboxToFloat",   "(Ljava/lang/Object;)F") ,
+      DOUBLE -> MethodNameAndType("unboxToDouble",  "(Ljava/lang/Object;)D")
+    )
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
new file mode 100644
index 0000000..359e5d6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -0,0 +1,1204 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import scala.tools.nsc.io.AbstractFile
+
+/*
+ *  Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes.
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ *  @version 1.0
+ *
+ */
+abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
+
+  import global._
+
+  /*
+   * must-single-thread
+   */
+  def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+    getFile(base, clsName, suffix)
+  }
+
+  /*
+   * must-single-thread
+   */
+  def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = {
+    try {
+      outputDirectory(csym)
+    } catch {
+      case ex: Throwable =>
+        cunit.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}")
+        null
+    }
+  }
+
+  var pickledBytes = 0 // statistics
+
+  // -----------------------------------------------------------------------------------------
+  // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
+  // Background:
+  //  http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+  //  http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+  //  https://issues.scala-lang.org/browse/SI-3872
+  // -----------------------------------------------------------------------------------------
+
+  /*
+   * can-multi-thread
+   */
+  def firstCommonSuffix(as: List[Tracked], bs: List[Tracked]): BType = {
+    var chainA = as
+    var chainB = bs
+    var fcs: Tracked = null
+    do {
+      if      (chainB contains chainA.head) fcs = chainA.head
+      else if (chainA contains chainB.head) fcs = chainB.head
+      else {
+        chainA = chainA.tail
+        chainB = chainB.tail
+      }
+    } while (fcs == null)
+    fcs.c
+  }
+
+  /*  An `asm.ClassWriter` that uses `jvmWiseLUB()`
+   *  The internal name of the least common ancestor of the types given by inameA and inameB.
+   *  It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow
+   */
+  final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
+
+    /*
+     *  This method is thread re-entrant because chrs never grows during its operation (that's because all TypeNames being looked up have already been entered).
+     *  To stress this point, rather than using `newTypeName()` we use `lookupTypeName()`
+     *
+     *  can-multi-thread
+     */
+    override def getCommonSuperClass(inameA: String, inameB: String): String = {
+      val a = brefType(lookupTypeName(inameA.toCharArray))
+      val b = brefType(lookupTypeName(inameB.toCharArray))
+      val lca = jvmWiseLUB(a, b)
+      val lcaName = lca.getInternalName // don't call javaName because that side-effects innerClassBuffer.
+      assert(lcaName != "scala/Any")
+
+      lcaName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things.
+    }
+
+  }
+
+  /*
+   *  Finding the least upper bound in agreement with the bytecode verifier (given two internal names handed out by ASM)
+   *  Background:
+   *    http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+   *    http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+   *    https://issues.scala-lang.org/browse/SI-3872
+   *
+   *  can-multi-thread
+   */
+  def jvmWiseLUB(a: BType, b: BType): BType = {
+
+    assert(a.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $a")
+    assert(b.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $b")
+
+    val ta = exemplars.get(a)
+    val tb = exemplars.get(b)
+
+    val res = (ta.isInterface, tb.isInterface) match {
+      case (true, true) =>
+        // exercised by test/files/run/t4761.scala
+        if      (tb.isSubtypeOf(ta.c)) ta.c
+        else if (ta.isSubtypeOf(tb.c)) tb.c
+        else ObjectReference
+      case (true, false) =>
+        if (tb.isSubtypeOf(a)) a else ObjectReference
+      case (false, true) =>
+        if (ta.isSubtypeOf(b)) b else ObjectReference
+      case _ =>
+        firstCommonSuffix(ta :: ta.superClasses, tb :: tb.superClasses)
+    }
+    assert(res.isNonSpecial, "jvmWiseLUB() returned a non-plain-class.")
+    res
+  }
+
+  /*
+   * must-single-thread
+   */
+  object isJavaEntryPoint {
+
+    /*
+     * must-single-thread
+     */
+    def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = {
+      def fail(msg: String, pos: Position = sym.pos) = {
+        csymCompUnit.warning(sym.pos,
+          sym.name +
+          s" has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program.\n  Reason: $msg"
+          // TODO: make this next claim true, if possible
+          //   by generating valid main methods as static in module classes
+          //   not sure what the jvm allows here
+          // + "  You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+        )
+        false
+      }
+      def failNoForwarder(msg: String) = {
+        fail(s"$msg, which means no static forwarder can be generated.\n")
+      }
+      val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+      val hasApproximate = possibles exists { m =>
+        m.info match {
+          case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass
+          case _                       => false
+        }
+      }
+      // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+      hasApproximate && {
+        // Before erasure so we can identify generic mains.
+        enteringErasure {
+          val companion     = sym.linkedClassOfClass
+
+          if (definitions.hasJavaMainMethod(companion))
+            failNoForwarder("companion contains its own main method")
+          else if (companion.tpe.member(nme.main) != NoSymbol)
+            // this is only because forwarders aren't smart enough yet
+            failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+          else if (companion.isTrait)
+            failNoForwarder("companion is a trait")
+          // Now either succeeed, or issue some additional warnings for things which look like
+          // attempts to be java main methods.
+        else (possibles exists definitions.isJavaMainMethod) || {
+            possibles exists { m =>
+              m.info match {
+                case PolyType(_, _) =>
+                  fail("main methods cannot be generic.")
+                case MethodType(params, res) =>
+                  if (res.typeSymbol :: params exists (_.isAbstractType))
+                    fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+                  else
+                    definitions.isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+                case tp =>
+                  fail(s"don't know what this is: $tp", m.pos)
+              }
+            }
+          }
+        }
+      }
+    }
+
+  }
+
+  /*
+   * must-single-thread
+   */
+  def initBytecodeWriter(entryPoints: List[Symbol]): BytecodeWriter = {
+    settings.outputDirs.getSingleOutput match {
+      case Some(f) if f hasExtension "jar" =>
+        // If no main class was specified, see if there's only one
+        // entry point among the classes going into the jar.
+        if (settings.mainClass.isDefault) {
+          entryPoints map (_.fullName('.')) match {
+            case Nil      =>
+              log("No Main-Class designated or discovered.")
+            case name :: Nil =>
+              log(s"Unique entry point: setting Main-Class to $name")
+              settings.mainClass.value = name
+            case names =>
+              log(s"No Main-Class due to multiple entry points:\n  ${names.mkString("\n  ")}")
+          }
+        }
+        else log(s"Main-Class was specified: ${settings.mainClass.value}")
+
+        new DirectToJarfileWriter(f.file)
+
+      case _ => factoryNonJarBytecodeWriter()
+    }
+  }
+
+  /*
+   *  must-single-thread
+   */
+  def fieldSymbols(cls: Symbol): List[Symbol] = {
+    for (f <- cls.info.decls.toList ;
+         if !f.isMethod && f.isTerm && !f.isModule
+    ) yield f;
+  }
+
+  /*
+   * can-multi-thread
+   */
+  def methodSymbols(cd: ClassDef): List[Symbol] = {
+    cd.impl.body collect { case dd: DefDef => dd.symbol }
+  }
+
+  /*
+   * Populates the InnerClasses JVM attribute with `refedInnerClasses`.
+   * In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted)
+   * `refedInnerClasses` should contain those inner classes defined as direct member classes of `jclass`
+   * but otherwise not mentioned in `jclass`.
+   *
+   * `refedInnerClasses` may contain duplicates,
+   * need not contain the enclosing inner classes of each inner class it lists (those are looked up for consistency).
+   *
+   * This method serializes in the InnerClasses JVM attribute in an appropriate order,
+   * not necessarily that given by `refedInnerClasses`.
+   *
+   * can-multi-thread
+   */
+  final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: Iterable[BType]) {
+    // used to detect duplicates.
+    val seen = mutable.Map.empty[String, String]
+    // result without duplicates, not yet sorted.
+    val result = mutable.Set.empty[InnerClassEntry]
+
+    for(s: BType           <- refedInnerClasses;
+        e: InnerClassEntry <- exemplars.get(s).innersChain) {
+
+      assert(e.name != null, "saveInnerClassesFor() is broken.") // documentation
+      val doAdd = seen.get(e.name) match {
+        // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+        case Some(prevOName) =>
+          // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+          // i.e. for them it must be the case that oname == java/lang/Thread
+          assert(prevOName == e.outerName, "duplicate")
+          false
+        case None => true
+      }
+
+      if (doAdd) {
+        seen   += (e.name -> e.outerName)
+        result += e
+      }
+
+    }
+    // sorting ensures inner classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
+    for(e <- result.toList sortBy (_.name.toString)) {
+      jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.access)
+    }
+
+  } // end of method addInnerClassesASM()
+
+  /*
+   * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
+   * i.e., the pickle is contained in a custom annotation, see:
+   *   (1) `addAnnotations()`,
+   *   (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
+   *   (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
+   * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
+   * other than both ending up encoded as attributes (JVMS 4.7)
+   * (with the caveat that the "ScalaSig" attribute is associated to some classes,
+   * while the "Signature" attribute can be associated to classes, methods, and fields.)
+   *
+   */
+  trait BCPickles {
+
+    import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+
+    val versionPickle = {
+      val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+      assert(vp.writeIndex == 0, vp)
+      vp writeNat PickleFormat.MajorVersion
+      vp writeNat PickleFormat.MinorVersion
+      vp writeNat 0
+      vp
+    }
+
+    /*
+     * can-multi-thread
+     */
+    def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
+      val dest = new Array[Byte](len);
+      System.arraycopy(b, offset, dest, 0, len);
+      new asm.CustomAttr(name, dest)
+    }
+
+    /*
+     * can-multi-thread
+     */
+    def pickleMarkerLocal = {
+      createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
+    }
+
+    /*
+     * can-multi-thread
+     */
+    def pickleMarkerForeign = {
+      createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
+    }
+
+    /*  Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
+     *  This annotation must be added to the class' annotations list when generating them.
+     *
+     *  Depending on whether the returned option is defined, it adds to `jclass` one of:
+     *    (a) the ScalaSig marker attribute
+     *        (indicating that a scala-signature-annotation aka pickle is present in this class); or
+     *    (b) the Scala marker attribute
+     *        (indicating that a scala-signature-annotation aka pickle is to be found in another file).
+     *
+     *
+     *  @param jclassName The class file that is being readied.
+     *  @param sym    The symbol for which the signature has been entered in the symData map.
+     *                This is different than the symbol
+     *                that is being generated in the case of a mirror class.
+     *  @return       An option that is:
+     *                - defined and contains an AnnotationInfo of the ScalaSignature type,
+     *                  instantiated with the pickle signature for sym.
+     *                - empty if the jclass/sym pair must not contain a pickle.
+     *
+     *  must-single-thread
+     */
+    def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
+      currentRun.symData get sym match {
+        case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
+          val scalaAnnot = {
+            val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+            AnnotationInfo(sigBytes.sigAnnot, Nil, (nme.bytes, sigBytes) :: Nil)
+          }
+          pickledBytes += pickle.writeIndex
+          currentRun.symData -= sym
+          currentRun.symData -= sym.companionSymbol
+          Some(scalaAnnot)
+        case _ =>
+          None
+      }
+    }
+
+  } // end of trait BCPickles
+
+  trait BCInnerClassGen {
+
+    def debugLevel = settings.debuginfo.indexOfChoice
+
+    val emitSource = debugLevel >= 1
+    val emitLines  = debugLevel >= 2
+    val emitVars   = debugLevel >= 3
+
+    /*
+     *  Contains class-symbols that:
+     *    (a) are known to denote inner classes
+     *    (b) are mentioned somewhere in the class being generated.
+     *
+     *  In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated".
+     */
+    val innerClassBufferASM = mutable.Set.empty[BType]
+
+    /*
+     *  Tracks (if needed) the inner class given by `sym`.
+     *
+     *  must-single-thread
+     */
+    final def internalName(sym: Symbol): String = { asmClassType(sym).getInternalName }
+
+    /*
+     *  Tracks (if needed) the inner class given by `sym`.
+     *
+     *  must-single-thread
+     */
+    final def asmClassType(sym: Symbol): BType = {
+      assert(
+        hasInternalName(sym),
+        {
+          val msg0 = if (sym.isAbstractType) "An AbstractTypeSymbol (SI-7122) " else "A symbol ";
+          msg0 + s"has reached the bytecode emitter, for which no JVM-level internal name can be found: ${sym.fullName}"
+        }
+      )
+      val phantOpt = phantomTypeMap.get(sym)
+      if (phantOpt.isDefined) {
+        return phantOpt.get
+      }
+      val tracked = exemplar(sym)
+      val tk = tracked.c
+      if (tracked.isInnerClass) {
+        innerClassBufferASM += tk
+      }
+
+      tk
+    }
+
+    /*
+     *  Returns the BType for the given type.
+     *  Tracks (if needed) the inner class given by `t`.
+     *
+     * must-single-thread
+     */
+    final def toTypeKind(t: Type): BType = {
+
+      /* Interfaces have to be handled delicately to avoid introducing spurious errors,
+       *  but if we treat them all as AnyRef we lose too much information.
+       */
+      def newReference(sym0: Symbol): BType = {
+        assert(!primitiveTypeMap.contains(sym0), "Use primitiveTypeMap instead.")
+        assert(sym0 != definitions.ArrayClass,   "Use arrayOf() instead.")
+
+        if (sym0 == definitions.NullClass)    return RT_NULL;
+        if (sym0 == definitions.NothingClass) return RT_NOTHING;
+
+        val sym = (
+          if (!sym0.isPackageClass) sym0
+          else sym0.info.member(nme.PACKAGE) match {
+            case NoSymbol => abort(s"SI-5604: Cannot use package as value: ${sym0.fullName}")
+            case s        => abort(s"SI-5604: found package class where package object expected: $s")
+          }
+        )
+
+        // Can't call .toInterface (at this phase) or we trip an assertion.
+        // See PackratParser#grow for a method which fails with an apparent mismatch
+        // between "object PackratParsers$class" and "trait PackratParsers"
+        if (sym.isImplClass) {
+          // pos/spec-List.scala is the sole failure if we don't check for NoSymbol
+          val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
+          if (traitSym != NoSymbol) {
+            // this tracks the inner class in innerClassBufferASM, if needed.
+            return asmClassType(traitSym)
+          }
+        }
+
+        assert(hasInternalName(sym), s"Invoked for a symbol lacking JVM internal name: ${sym.fullName}")
+        assert(!phantomTypeMap.contains(sym), "phantom types not supposed to reach here.")
+
+        val tracked = exemplar(sym)
+        val tk = tracked.c
+        if (tracked.isInnerClass) {
+          innerClassBufferASM += tk
+        }
+
+        tk
+      }
+
+      def primitiveOrRefType(sym: Symbol): BType = {
+        assert(sym != definitions.ArrayClass, "Use primitiveOrArrayOrRefType() instead.")
+
+        primitiveTypeMap.getOrElse(sym, newReference(sym))
+      }
+
+      def primitiveOrRefType2(sym: Symbol): BType = {
+        primitiveTypeMap.get(sym) match {
+          case Some(pt) => pt
+          case None =>
+            sym match {
+              case definitions.NullClass    => RT_NULL
+              case definitions.NothingClass => RT_NOTHING
+              case _ if sym.isClass         => newReference(sym)
+              case _ =>
+                assert(sym.isType, sym) // it must be compiling Array[a]
+                ObjectReference
+            }
+        }
+      }
+
+      import definitions.ArrayClass
+
+      // Call to .normalize fixes #3003 (follow type aliases). Otherwise, primitiveOrArrayOrRefType() would return ObjectReference.
+      t.normalize match {
+
+        case ThisType(sym) =>
+          if (sym == ArrayClass) ObjectReference
+          else                   phantomTypeMap.getOrElse(sym, exemplar(sym).c)
+
+        case SingleType(_, sym) => primitiveOrRefType(sym)
+
+        case _: ConstantType    => toTypeKind(t.underlying)
+
+        case TypeRef(_, sym, args)    =>
+          if (sym == ArrayClass) arrayOf(toTypeKind(args.head))
+          else                   primitiveOrRefType2(sym)
+
+        case ClassInfoType(_, _, sym) =>
+          assert(sym != ArrayClass, "ClassInfoType to ArrayClass!")
+          primitiveOrRefType(sym)
+
+        // !!! Iulian says types which make no sense after erasure should not reach here, which includes the ExistentialType, AnnotatedType, RefinedType.
+        case ExistentialType(_, t)   => toTypeKind(t) // TODO shouldn't get here but the following does: akka-actor/src/main/scala/akka/util/WildcardTree.scala
+        case AnnotatedType(_, w)     => toTypeKind(w) // TODO test/files/jvm/annotations.scala causes an AnnotatedType to reach here.
+        case RefinedType(parents, _) => parents map toTypeKind reduceLeft jvmWiseLUB
+
+        // For sure WildcardTypes shouldn't reach here either, but when debugging such situations this may come in handy.
+        // case WildcardType    => REFERENCE(ObjectClass)
+        case norm => abort(
+          s"Unknown type: $t, $norm [${t.getClass}, ${norm.getClass}] TypeRef? ${t.isInstanceOf[TypeRef]}"
+        )
+      }
+
+    } // end of method toTypeKind()
+
+    /*
+     * must-single-thread
+     */
+    def asmMethodType(msym: Symbol): BType = {
+      assert(msym.isMethod, s"not a method-symbol: $msym")
+      val resT: BType =
+        if (msym.isClassConstructor || msym.isConstructor) BType.VOID_TYPE
+        else toTypeKind(msym.tpe.resultType);
+      BType.getMethodType( resT, mkArray(msym.tpe.paramTypes map toTypeKind) )
+    }
+
+    /*
+     *  Returns all direct member inner classes of `csym`,
+     *  thus making sure they get entries in the InnerClasses JVM attribute
+     *  even if otherwise not mentioned in the class being built.
+     *
+     *  must-single-thread
+     */
+    final def trackMemberClasses(csym: Symbol, lateClosuresBTs: List[BType]): List[BType] = {
+      val lateInnerClasses = exitingErasure {
+        for (sym <- List(csym, csym.linkedClassOfClass); memberc <- sym.info.decls.map(innerClassSymbolFor) if memberc.isClass)
+        yield memberc
+      }
+      // as a precaution, do the following outside the above `exitingErasure` otherwise funny internal names might be computed.
+      val result = for(memberc <- lateInnerClasses) yield {
+        val tracked = exemplar(memberc)
+        val memberCTK = tracked.c
+        assert(tracked.isInnerClass, s"saveInnerClassesFor() says this was no inner-class after all: ${memberc.fullName}")
+
+        memberCTK
+      }
+
+      exemplar(csym).directMemberClasses = result
+
+      result
+    }
+
+    /*
+     *  Tracks (if needed) the inner class given by `t`.
+     *
+     *  must-single-thread
+     */
+    final def descriptor(t: Type):   String = { toTypeKind(t).getDescriptor   }
+
+    /*
+     *  Tracks (if needed) the inner class given by `sym`.
+     *
+     *  must-single-thread
+     */
+    final def descriptor(sym: Symbol): String = { asmClassType(sym).getDescriptor }
+
+  } // end of trait BCInnerClassGen
+
+  trait BCAnnotGen extends BCInnerClassGen {
+
+    import genASM.{ubytesToCharArray, arrEncode}
+
+    /*
+     *  can-multi-thread
+     */
+    private def strEncode(sb: ScalaSigBytes): String = {
+      val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+      new java.lang.String(ca)
+      // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+      // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+      // debug assert(enc(idx) == bvA.getByte(idx + 2))
+      // debug assert(bvA.getLength == enc.size + 2)
+    }
+
+    /*
+     * For arg a LiteralAnnotArg(constt) with const.tag in {ClazzTag, EnumTag}
+     * as well as for arg a NestedAnnotArg
+     *   must-single-thread
+     * Otherwise it's safe to call from multiple threads.
+     */
+    def emitArgument(av:   asm.AnnotationVisitor,
+                     name: String,
+                     arg:  ClassfileAnnotArg) {
+      (arg: @unchecked) match {
+
+        case LiteralAnnotArg(const) =>
+          if (const.isNonUnitAnyVal) { av.visit(name, const.value) }
+          else {
+            const.tag match {
+              case StringTag  =>
+                assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+                av.visit(name, const.stringValue)  // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+              case ClazzTag   => av.visit(name, toTypeKind(const.typeValue).toASMType)
+              case EnumTag =>
+                val edesc  = descriptor(const.tpe) // the class descriptor of the enumeration class.
+                val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+                av.visitEnum(name, edesc, evalue)
+            }
+          }
+
+        case sb @ ScalaSigBytes(bytes) =>
+          // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+          // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+          if (sb.fitsInOneString) {
+            av.visit(name, strEncode(sb))
+          } else {
+            val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+            for(arg <- genASM.arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+            arrAnnotV.visitEnd()
+          }          // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+
+        case ArrayAnnotArg(args) =>
+          val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+          for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
+          arrAnnotV.visitEnd()
+
+        case NestedAnnotArg(annInfo) =>
+          val AnnotationInfo(typ, args, assocs) = annInfo
+          assert(args.isEmpty, args)
+          val desc = descriptor(typ) // the class descriptor of the nested annotation class
+          val nestedVisitor = av.visitAnnotation(name, desc)
+          emitAssocs(nestedVisitor, assocs)
+      }
+    }
+
+    /* Whether an annotation should be emitted as a Java annotation
+     *   .initialize: if 'annot' is read from pickle, atp might be un-initialized
+     *
+     * must-single-thread
+     */
+    private def shouldEmitAnnotation(annot: AnnotationInfo) =
+      annot.symbol.initialize.isJavaDefined &&
+      annot.matches(definitions.ClassfileAnnotationClass) &&
+      annot.args.isEmpty &&
+      !annot.matches(definitions.DeprecatedAttr)
+
+    /*
+     * In general,
+     *   must-single-thread
+     * but not  necessarily always.
+     */
+    def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
+      for ((name, value) <- assocs) {
+        emitArgument(av, name.toString(), value)
+      }
+      av.visitEnd()
+    }
+
+    /*
+     * must-single-thread
+     */
+    def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
+      for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+        val AnnotationInfo(typ, args, assocs) = annot
+        assert(args.isEmpty, args)
+        val av = cw.visitAnnotation(descriptor(typ), true)
+        emitAssocs(av, assocs)
+      }
+    }
+
+    /*
+     * must-single-thread
+     */
+    def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
+      for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+        val AnnotationInfo(typ, args, assocs) = annot
+        assert(args.isEmpty, args)
+        val av = mw.visitAnnotation(descriptor(typ), true)
+        emitAssocs(av, assocs)
+      }
+    }
+
+    /*
+     * must-single-thread
+     */
+    def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
+      for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+        val AnnotationInfo(typ, args, assocs) = annot
+        assert(args.isEmpty, args)
+        val av = fw.visitAnnotation(descriptor(typ), true)
+        emitAssocs(av, assocs)
+      }
+    }
+
+    /*
+     * must-single-thread
+     */
+    def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
+      val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+      if (annotationss forall (_.isEmpty)) return
+      for ((annots, idx) <- annotationss.zipWithIndex;
+           annot <- annots) {
+        val AnnotationInfo(typ, args, assocs) = annot
+        assert(args.isEmpty, args)
+        val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+        emitAssocs(pannVisitor, assocs)
+      }
+    }
+
+  } // end of trait BCAnnotGen
+
+  trait BCJGenSigGen {
+
+    def getCurrentCUnit(): CompilationUnit
+
+    /* @return
+     *   - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+     *   - otherwise the signature in question
+     *
+     * must-single-thread
+     */
+    def getGenericSignature(sym: Symbol, owner: Symbol): String = genASM.getGenericSignature(sym, owner, getCurrentCUnit())
+
+  } // end of trait BCJGenSigGen
+
+  trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen {
+
+    // -----------------------------------------------------------------------------------------
+    // Static forwarders (related to mirror classes but also present in
+    // a plain class lacking companion module, for details see `isCandidateForForwarders`).
+    // -----------------------------------------------------------------------------------------
+
+    val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags
+
+    /* Adds a @remote annotation, actual use unknown.
+     *
+     * Invoked from genMethod() and addForwarder().
+     *
+     * must-single-thread
+     */
+    def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
+      val needsAnnotation = (
+        (  isRemoteClass ||
+           isRemote(meth) && isJMethodPublic
+        ) && !(meth.throwsAnnotations contains definitions.RemoteExceptionClass)
+      )
+      if (needsAnnotation) {
+        val c   = Constant(definitions.RemoteExceptionClass.tpe)
+        val arg = Literal(c) setType c.tpe
+        meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe), arg)
+      }
+    }
+
+    /* Add a forwarder for method m. Used only from addForwarders().
+     *
+     * must-single-thread
+     */
+    private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
+      val moduleName     = internalName(module)
+      val methodInfo     = module.thisType.memberInfo(m)
+      val paramJavaTypes: List[BType] = methodInfo.paramTypes map toTypeKind
+      // val paramNames     = 0 until paramJavaTypes.length map ("x_" + _)
+
+      /* Forwarders must not be marked final,
+       *  as the JVM will not allow redefinition of a final static method,
+       *  and we don't know what classes might be subclassing the companion class.  See SI-4827.
+       */
+      // TODO: evaluate the other flags we might be dropping on the floor here.
+      // TODO: ACC_SYNTHETIC ?
+      val flags = PublicStatic | (
+        if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
+      )
+
+      // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
+      val jgensig = genASM.staticForwarderGenericSignature(m, module, getCurrentCUnit())
+      addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
+      val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass)
+      val thrownExceptions: List[String] = getExceptions(throws)
+
+      val jReturnType = toTypeKind(methodInfo.resultType)
+      val mdesc = BType.getMethodType(jReturnType, mkArray(paramJavaTypes)).getDescriptor
+      val mirrorMethodName = m.javaSimpleName.toString
+      val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
+        flags,
+        mirrorMethodName,
+        mdesc,
+        jgensig,
+        mkArray(thrownExceptions)
+      )
+
+      emitAnnotations(mirrorMethod, others)
+      emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
+
+      mirrorMethod.visitCode()
+
+      mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+
+      var index = 0
+      for(jparamType <- paramJavaTypes) {
+        mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
+        assert(jparamType.sort != BType.METHOD, jparamType)
+        index += jparamType.getSize
+      }
+
+      mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).getDescriptor)
+      mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+
+      mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+      mirrorMethod.visitEnd()
+
+    }
+
+    /* Add forwarders for all methods defined in `module` that don't conflict
+     *  with methods in the companion class of `module`. A conflict arises when
+     *  a method with the same name is defined both in a class and its companion object:
+     *  method signature is not taken into account.
+     *
+     * must-single-thread
+     */
+    def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
+      assert(moduleClass.isModuleClass, moduleClass)
+      debuglog(s"Dumping mirror class for object: $moduleClass")
+
+      val linkedClass  = moduleClass.companionClass
+      lazy val conflictingNames: Set[Name] = {
+        (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
+      }
+      debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
+
+      for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+        if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
+          debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
+        else if (conflictingNames(m.name))
+          log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}")
+        else if (m.hasAccessBoundary)
+          log(s"No forwarder for non-public member $m")
+        else {
+          log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
+          addForwarder(isRemoteClass, jclass, moduleClass, m)
+        }
+      }
+    }
+
+    /*
+     * Quoting from JVMS 4.7.5 The Exceptions Attribute
+     *   "The Exceptions attribute indicates which checked exceptions a method may throw.
+     *    There may be at most one Exceptions attribute in each method_info structure."
+     *
+     * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
+     * This method returns such list of internal names.
+     *
+     * must-single-thread
+     */
+    def getExceptions(excs: List[AnnotationInfo]): List[String] = {
+      for (ThrownException(exc) <- excs.distinct)
+      yield internalName(exc)
+    }
+
+  } // end of trait BCForwardersGen
+
+  trait BCClassGen extends BCInnerClassGen {
+
+    // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch.
+    // There's a space tradeoff between these multi-branch instructions (details in the JVM spec).
+    // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic.
+    val MIN_SWITCH_DENSITY = 0.7
+
+    /*
+     *  must-single-thread
+     */
+    def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect {
+      case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+    }
+
+    /*
+     *  Add public static final field serialVersionUID with value `id`
+     *
+     *  can-multi-thread
+     */
+    def addSerialVUID(id: Long, jclass: asm.ClassVisitor) {
+      // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
+      jclass.visitField(
+        PublicStaticFinal,
+        "serialVersionUID",
+        "J",
+        null, // no java-generic-signature
+        new java.lang.Long(id)
+      ).visitEnd()
+    }
+
+    /*
+     * @param owner internal name of the enclosing class of the class.
+     *
+     * @param name the name of the method that contains the class.
+
+     * @param methodType the method that contains the class.
+     */
+    case class EnclMethodEntry(owner: String, name: String, methodType: BType)
+
+    /*
+     * @return null if the current class is not internal to a method
+     *
+     * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
+     *   A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
+     *   A class may have no more than one EnclosingMethod attribute.
+     *
+     * must-single-thread
+     */
+    def getEnclosingMethodAttribute(clazz: Symbol): EnclMethodEntry = { // JVMS 4.7.7
+
+      def newEEE(eClass: Symbol, m: Symbol) = {
+        EnclMethodEntry(
+          internalName(eClass),
+          m.javaSimpleName.toString,
+          asmMethodType(m)
+        )
+      }
+
+      var res: EnclMethodEntry = null
+      val sym = clazz.originalEnclosingMethod
+      if (sym.isMethod) {
+        debuglog(s"enclosing method for $clazz is $sym (in ${sym.enclClass})")
+        res = newEEE(sym.enclClass, sym)
+      } else if (clazz.isAnonymousClass) {
+        val enclClass = clazz.rawowner
+        assert(enclClass.isClass, enclClass)
+        val sym = enclClass.primaryConstructor
+        if (sym == NoSymbol) {
+          log(s"Ran out of room looking for an enclosing method for $clazz: no constructor here: $enclClass.")
+        } else {
+          debuglog(s"enclosing method for $clazz is $sym (in $enclClass)")
+          res = newEEE(enclClass, sym)
+        }
+      }
+
+      res
+    }
+
+  } // end of trait BCClassGen
+
+  /* basic functionality for class file building of plain, mirror, and beaninfo classes. */
+  abstract class JBuilder extends BCInnerClassGen {
+
+  } // end of class JBuilder
+
+  /* functionality for building plain and mirror classes */
+  abstract class JCommonBuilder
+    extends JBuilder
+    with    BCAnnotGen
+    with    BCForwardersGen
+    with    BCPickles { }
+
+  /* builder of mirror classes */
+  class JMirrorBuilder extends JCommonBuilder {
+
+    private var cunit: CompilationUnit = _
+    def getCurrentCUnit(): CompilationUnit = cunit;
+
+    /* Generate a mirror class for a top-level module. A mirror class is a class
+     *  containing only static methods that forward to the corresponding method
+     *  on the MODULE instance of the given Scala object.  It will only be
+     *  generated if there is no companion class: if there is, an attempt will
+     *  instead be made to add the forwarder methods to the companion class.
+     *
+     *  must-single-thread
+     */
+    def genMirrorClass(modsym: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
+      assert(modsym.companionClass == NoSymbol, modsym)
+      innerClassBufferASM.clear()
+      this.cunit = cunit
+      val moduleName = internalName(modsym) // + "$"
+      val mirrorName = moduleName.substring(0, moduleName.length() - 1)
+
+      val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+      val mirrorClass = new asm.tree.ClassNode
+      mirrorClass.visit(
+        classfileVersion,
+        flags,
+        mirrorName,
+        null /* no java-generic-signature */,
+        JAVA_LANG_OBJECT.getInternalName,
+        EMPTY_STRING_ARRAY
+      )
+
+      if (emitSource) {
+        mirrorClass.visitSource("" + cunit.source,
+                                null /* SourceDebugExtension */)
+      }
+
+      val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+      mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+      emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+
+      addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+
+      innerClassBufferASM ++= trackMemberClasses(modsym, Nil /* TODO what about Late-Closure-Classes */ )
+      addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
+
+      mirrorClass.visitEnd()
+
+      ("" + modsym.name) // this side-effect is necessary, really.
+
+      mirrorClass
+    }
+
+  } // end of class JMirrorBuilder
+
+  /* builder of bean info classes */
+  class JBeanInfoBuilder extends JBuilder {
+
+    /*
+     * Generate a bean info class that describes the given class.
+     *
+     * @author Ross Judson (ross.judson at soletta.com)
+     *
+     * must-single-thread
+     */
+    def genBeanInfoClass(cls: Symbol, cunit: CompilationUnit, fieldSymbols: List[Symbol], methodSymbols: List[Symbol]): asm.tree.ClassNode = {
+
+      def javaSimpleName(s: Symbol): String = { s.javaSimpleName.toString }
+
+      innerClassBufferASM.clear()
+
+      val flags = mkFlags(
+        javaFlags(cls),
+        if (isDeprecated(cls)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+      )
+
+      val beanInfoName  = (internalName(cls) + "BeanInfo")
+      val beanInfoClass = new asm.tree.ClassNode
+      beanInfoClass.visit(
+        classfileVersion,
+        flags,
+        beanInfoName,
+        null, // no java-generic-signature
+        "scala/beans/ScalaBeanInfo",
+        EMPTY_STRING_ARRAY
+      )
+
+      beanInfoClass.visitSource(
+        cunit.source.toString,
+        null /* SourceDebugExtension */
+      )
+
+      var fieldList = List[String]()
+
+      for (f <- fieldSymbols if f.hasGetter;
+	         g = f.getter(cls);
+	         s = f.setter(cls);
+	         if g.isPublic && !(f.name startsWith "$")
+          ) {
+             // inserting $outer breaks the bean
+             fieldList = javaSimpleName(f) :: javaSimpleName(g) :: (if (s != NoSymbol) javaSimpleName(s) else null) :: fieldList
+      }
+
+      val methodList: List[String] =
+	     for (m <- methodSymbols
+	          if !m.isConstructor &&
+	          m.isPublic &&
+	          !(m.name startsWith "$") &&
+	          !m.isGetter &&
+	          !m.isSetter)
+       yield javaSimpleName(m)
+
+      val constructor = beanInfoClass.visitMethod(
+        asm.Opcodes.ACC_PUBLIC,
+        INSTANCE_CONSTRUCTOR_NAME,
+        "()V",
+        null, // no java-generic-signature
+        EMPTY_STRING_ARRAY // no throwable exceptions
+      )
+
+      val stringArrayJType: BType = arrayOf(JAVA_LANG_STRING)
+      val conJType: BType =
+        BType.getMethodType(
+          BType.VOID_TYPE,
+          Array(exemplar(definitions.ClassClass).c, stringArrayJType, stringArrayJType)
+        )
+
+      def push(lst: List[String]) {
+        var fi = 0
+        for (f <- lst) {
+          constructor.visitInsn(asm.Opcodes.DUP)
+          constructor.visitLdcInsn(new java.lang.Integer(fi))
+          if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
+          else           { constructor.visitLdcInsn(f) }
+          constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+          fi += 1
+        }
+      }
+
+      constructor.visitCode()
+
+      constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
+      // push the class
+      constructor.visitLdcInsn(exemplar(cls).c)
+
+      // push the string array of field information
+      constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
+      constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+      push(fieldList)
+
+      // push the string array of method information
+      constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
+      constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+      push(methodList)
+
+      // invoke the superclass constructor, which will do the
+      // necessary java reflection and create Method objects.
+      constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+      constructor.visitInsn(asm.Opcodes.RETURN)
+
+      constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+      constructor.visitEnd()
+
+      innerClassBufferASM ++= trackMemberClasses(cls, Nil /* TODO what about Late-Closure-Classes */ )
+      addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
+
+      beanInfoClass.visitEnd()
+
+      beanInfoClass
+    }
+
+  } // end of class JBeanInfoBuilder
+
+  trait JAndroidBuilder {
+    self: BCInnerClassGen =>
+
+    /* From the reference documentation of the Android SDK:
+     *  The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+     *  Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+     *  which is an object implementing the `Parcelable.Creator` interface.
+     */
+    val androidFieldName = newTermName("CREATOR")
+
+    /*
+     * must-single-thread
+     */
+    def isAndroidParcelableClass(sym: Symbol) =
+      (AndroidParcelableInterface != NoSymbol) &&
+      (sym.parentSymbols contains AndroidParcelableInterface)
+
+    /*
+     * must-single-thread
+     */
+    def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) {
+      // this tracks the inner class in innerClassBufferASM, if needed.
+      val androidCreatorType = asmClassType(AndroidCreatorClass)
+      val tdesc_creator = androidCreatorType.getDescriptor
+
+      cnode.visitField(
+        PublicStaticFinal,
+        "CREATOR",
+        tdesc_creator,
+        null, // no java-generic-signature
+        null  // no initial value
+      ).visitEnd()
+
+      val moduleName = (thisName + "$")
+
+      // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
+      clinit.visitFieldInsn(
+        asm.Opcodes.GETSTATIC,
+        moduleName,
+        strMODULE_INSTANCE_FIELD,
+        "L" + moduleName + ";"
+      )
+
+      // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
+      val bt = BType.getMethodType(androidCreatorType, Array.empty[BType])
+      clinit.visitMethodInsn(
+        asm.Opcodes.INVOKEVIRTUAL,
+        moduleName,
+        "CREATOR",
+        bt.getDescriptor
+      )
+
+      // PUTSTATIC `thisName`.CREATOR;
+      clinit.visitFieldInsn(
+        asm.Opcodes.PUTSTATIC,
+        thisName,
+        "CREATOR",
+        tdesc_creator
+      )
+    }
+
+  } // end of trait JAndroidBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
new file mode 100644
index 0000000..c3492b7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -0,0 +1,725 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import collection.convert.Wrappers.JListWrapper
+
+/*
+ *  A high-level facade to the ASM API for bytecode generation.
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ *  @version 1.0
+ *
+ */
+abstract class BCodeIdiomatic extends BCodeGlue {
+
+  import global._
+
+  val classfileVersion: Int = settings.target.value match {
+    case "jvm-1.5"     => asm.Opcodes.V1_5
+    case "jvm-1.6"     => asm.Opcodes.V1_6
+    case "jvm-1.7"     => asm.Opcodes.V1_7
+  }
+
+  val majorVersion: Int = (classfileVersion & 0xFF)
+  val emitStackMapFrame = (majorVersion >= 50)
+
+  def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
+  val extraProc: Int = mkFlags(
+    asm.ClassWriter.COMPUTE_MAXS,
+    if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
+  )
+
+  val StringBuilderClassName = "scala/collection/mutable/StringBuilder"
+
+  val CLASS_CONSTRUCTOR_NAME    = "<clinit>"
+  val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+
+  val ObjectReference   = brefType("java/lang/Object")
+  val AnyRefReference   = ObjectReference
+  val objArrayReference = arrayOf(ObjectReference)
+
+  val JAVA_LANG_OBJECT  = ObjectReference
+  val JAVA_LANG_STRING  = brefType("java/lang/String")
+
+  var StringBuilderReference: BType = null
+
+  val EMPTY_STRING_ARRAY   = Array.empty[String]
+  val EMPTY_INT_ARRAY      = Array.empty[Int]
+  val EMPTY_LABEL_ARRAY    = Array.empty[asm.Label]
+  val EMPTY_BTYPE_ARRAY    = Array.empty[BType]
+
+  /* can-multi-thread */
+  final def mkArray(xs: List[BType]): Array[BType] = {
+    if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY }
+    val a = new Array[BType](xs.size); xs.copyToArray(a); a
+  }
+  /* can-multi-thread */
+  final def mkArray(xs: List[String]): Array[String] = {
+    if (xs.isEmpty) { return EMPTY_STRING_ARRAY }
+    val a = new Array[String](xs.size); xs.copyToArray(a); a
+  }
+  /* can-multi-thread */
+  final def mkArray(xs: List[asm.Label]): Array[asm.Label] = {
+    if (xs.isEmpty) { return EMPTY_LABEL_ARRAY }
+    val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a
+  }
+
+  /*
+   * can-multi-thread
+   */
+  final def mkArrayReverse(xs: List[String]): Array[String] = {
+    val len = xs.size
+    if (len == 0) { return EMPTY_STRING_ARRAY }
+    val a = new Array[String](len)
+    var i = len - 1
+    var rest = xs
+    while (!rest.isEmpty) {
+      a(i) = rest.head
+      rest = rest.tail
+      i -= 1
+    }
+    a
+  }
+
+  /*
+   * can-multi-thread
+   */
+  final def mkArrayReverse(xs: List[Int]): Array[Int] = {
+    val len = xs.size
+    if (len == 0) { return EMPTY_INT_ARRAY }
+    val a = new Array[Int](len)
+    var i = len - 1
+    var rest = xs
+    while (!rest.isEmpty) {
+      a(i) = rest.head
+      rest = rest.tail
+      i -= 1
+    }
+    a
+  }
+
+  /*
+   * The type of 1-dimensional arrays of `elem` type.
+   * The invoker is responsible for tracking (if needed) the inner class given by the elem BType.
+   *
+   * must-single-thread
+   */
+  final def arrayOf(elem: BType): BType = {
+    assert(!(elem.isUnitType), s"The element type of an array can't be: $elem")
+    brefType("[" + elem.getDescriptor)
+  }
+
+  /* Just a namespace for utilities that encapsulate MethodVisitor idioms.
+   *  In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
+   *  but the methods here allow choosing when to transition from ICode to ASM types
+   *  (including not at all, e.g. for performance).
+   */
+  abstract class JCodeMethodN {
+
+    def jmethod: asm.MethodVisitor
+
+    import asm.Opcodes;
+    import icodes.opcodes.{ Static, Dynamic,  SuperCall }
+
+    final def emit(opc: Int) { jmethod.visitInsn(opc) }
+
+    /*
+     * can-multi-thread
+     */
+    final def genPrimitiveArithmetic(op: icodes.ArithmeticOp, kind: BType) {
+
+      import icodes.{ ADD, SUB, MUL, DIV, REM, NOT }
+
+      op match {
+
+        case ADD => add(kind)
+        case SUB => sub(kind)
+        case MUL => mul(kind)
+        case DIV => div(kind)
+        case REM => rem(kind)
+
+        case NOT =>
+          if (kind.isIntSizedType) {
+            emit(Opcodes.ICONST_M1)
+            emit(Opcodes.IXOR)
+          } else if (kind == LONG) {
+            jmethod.visitLdcInsn(new java.lang.Long(-1))
+            jmethod.visitInsn(Opcodes.LXOR)
+          } else {
+            abort(s"Impossible to negate an $kind")
+          }
+
+        case _ =>
+          abort(s"Unknown arithmetic primitive $op")
+      }
+
+    } // end of method genPrimitiveArithmetic()
+
+    /*
+     * can-multi-thread
+     */
+    final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType) {
+
+      import scalaPrimitives.{ AND, OR, XOR }
+
+      ((op, kind): @unchecked) match {
+        case (AND, LONG) => emit(Opcodes.LAND)
+        case (AND, INT)  => emit(Opcodes.IAND)
+        case (AND, _)    =>
+          emit(Opcodes.IAND)
+          if (kind != BOOL) { emitT2T(INT, kind) }
+
+        case (OR, LONG) => emit(Opcodes.LOR)
+        case (OR, INT)  => emit(Opcodes.IOR)
+        case (OR, _) =>
+          emit(Opcodes.IOR)
+          if (kind != BOOL) { emitT2T(INT, kind) }
+
+        case (XOR, LONG) => emit(Opcodes.LXOR)
+        case (XOR, INT)  => emit(Opcodes.IXOR)
+        case (XOR, _) =>
+          emit(Opcodes.IXOR)
+          if (kind != BOOL) { emitT2T(INT, kind) }
+      }
+
+    } // end of method genPrimitiveLogical()
+
+    /*
+     * can-multi-thread
+     */
+    final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType) {
+
+      import scalaPrimitives.{ LSL, ASR, LSR }
+
+      ((op, kind): @unchecked) match {
+        case (LSL, LONG) => emit(Opcodes.LSHL)
+        case (LSL, INT)  => emit(Opcodes.ISHL)
+        case (LSL, _) =>
+          emit(Opcodes.ISHL)
+          emitT2T(INT, kind)
+
+        case (ASR, LONG) => emit(Opcodes.LSHR)
+        case (ASR, INT)  => emit(Opcodes.ISHR)
+        case (ASR, _) =>
+          emit(Opcodes.ISHR)
+          emitT2T(INT, kind)
+
+        case (LSR, LONG) => emit(Opcodes.LUSHR)
+        case (LSR, INT)  => emit(Opcodes.IUSHR)
+        case (LSR, _) =>
+          emit(Opcodes.IUSHR)
+          emitT2T(INT, kind)
+      }
+
+    } // end of method genPrimitiveShift()
+
+    /*
+     * can-multi-thread
+     */
+    final def genStartConcat {
+      jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+      jmethod.visitInsn(Opcodes.DUP)
+      invokespecial(
+        StringBuilderClassName,
+        INSTANCE_CONSTRUCTOR_NAME,
+        "()V"
+      )
+    }
+
+    /*
+     * can-multi-thread
+     */
+    final def genStringConcat(el: BType) {
+
+      val jtype =
+        if (el.isArray || el.hasObjectSort) JAVA_LANG_OBJECT
+        else el;
+
+      val bt = BType.getMethodType(StringBuilderReference, Array(jtype))
+
+      invokevirtual(StringBuilderClassName, "append", bt.getDescriptor)
+    }
+
+    /*
+     * can-multi-thread
+     */
+    final def genEndConcat {
+      invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;")
+    }
+
+    /*
+     * Emits one or more conversion instructions based on the types given as arguments.
+     *
+     * @param from The type of the value to be converted into another type.
+     * @param to   The type the value will be converted into.
+     *
+     * can-multi-thread
+     */
+    final def emitT2T(from: BType, to: BType) {
+
+      assert(
+        from.isNonUnitValueType && to.isNonUnitValueType,
+        s"Cannot emit primitive conversion from $from to $to"
+      )
+
+          def pickOne(opcs: Array[Int]) { // TODO index on to.sort
+            val chosen = (to: @unchecked) match {
+              case BYTE   => opcs(0)
+              case SHORT  => opcs(1)
+              case CHAR   => opcs(2)
+              case INT    => opcs(3)
+              case LONG   => opcs(4)
+              case FLOAT  => opcs(5)
+              case DOUBLE => opcs(6)
+            }
+            if (chosen != -1) { emit(chosen) }
+          }
+
+      if (from == to) { return }
+      // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+      assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
+
+      // We're done with BOOL already
+      (from.sort: @switch) match {
+
+        // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+
+        case asm.Type.BYTE  => pickOne(JCodeMethodN.fromByteT2T)
+        case asm.Type.SHORT => pickOne(JCodeMethodN.fromShortT2T)
+        case asm.Type.CHAR  => pickOne(JCodeMethodN.fromCharT2T)
+        case asm.Type.INT   => pickOne(JCodeMethodN.fromIntT2T)
+
+        case asm.Type.FLOAT  =>
+          import asm.Opcodes.{ F2L, F2D, F2I }
+          (to.sort: @switch) match {
+            case asm.Type.LONG    => emit(F2L)
+            case asm.Type.DOUBLE  => emit(F2D)
+            case _                => emit(F2I); emitT2T(INT, to)
+          }
+
+        case asm.Type.LONG   =>
+          import asm.Opcodes.{ L2F, L2D, L2I }
+          (to.sort: @switch) match {
+            case asm.Type.FLOAT   => emit(L2F)
+            case asm.Type.DOUBLE  => emit(L2D)
+            case _                => emit(L2I); emitT2T(INT, to)
+          }
+
+        case asm.Type.DOUBLE =>
+          import asm.Opcodes.{ D2L, D2F, D2I }
+          (to.sort: @switch) match {
+            case asm.Type.FLOAT   => emit(D2F)
+            case asm.Type.LONG    => emit(D2L)
+            case _                => emit(D2I); emitT2T(INT, to)
+          }
+      }
+    } // end of emitT2T()
+
+    // can-multi-thread
+    final def boolconst(b: Boolean) { iconst(if (b) 1 else 0) }
+
+    // can-multi-thread
+    final def iconst(cst: Int) {
+      if (cst >= -1 && cst <= 5) {
+        emit(Opcodes.ICONST_0 + cst)
+      } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
+        jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
+      } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
+        jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
+      } else {
+        jmethod.visitLdcInsn(new Integer(cst))
+      }
+    }
+
+    // can-multi-thread
+    final def lconst(cst: Long) {
+      if (cst == 0L || cst == 1L) {
+        emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
+      } else {
+        jmethod.visitLdcInsn(new java.lang.Long(cst))
+      }
+    }
+
+    // can-multi-thread
+    final def fconst(cst: Float) {
+      val bits: Int = java.lang.Float.floatToIntBits(cst)
+      if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
+        emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
+      } else {
+        jmethod.visitLdcInsn(new java.lang.Float(cst))
+      }
+    }
+
+    // can-multi-thread
+    final def dconst(cst: Double) {
+      val bits: Long = java.lang.Double.doubleToLongBits(cst)
+      if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
+        emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
+      } else {
+        jmethod.visitLdcInsn(new java.lang.Double(cst))
+      }
+    }
+
+    // can-multi-thread
+    final def newarray(elem: BType) {
+      if (elem.isRefOrArrayType || elem.isPhantomType ) {
+        /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which hasObjectSort. */
+        jmethod.visitTypeInsn(Opcodes.ANEWARRAY, elem.getInternalName)
+      } else {
+        val rand = {
+          // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+          (elem.sort: @switch) match {
+            case asm.Type.BOOLEAN => Opcodes.T_BOOLEAN
+            case asm.Type.BYTE    => Opcodes.T_BYTE
+            case asm.Type.SHORT   => Opcodes.T_SHORT
+            case asm.Type.CHAR    => Opcodes.T_CHAR
+            case asm.Type.INT     => Opcodes.T_INT
+            case asm.Type.LONG    => Opcodes.T_LONG
+            case asm.Type.FLOAT   => Opcodes.T_FLOAT
+            case asm.Type.DOUBLE  => Opcodes.T_DOUBLE
+          }
+        }
+        jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+      }
+    }
+
+
+    final def load( idx: Int, tk: BType) { emitVarInsn(Opcodes.ILOAD,  idx, tk) } // can-multi-thread
+    final def store(idx: Int, tk: BType) { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread
+
+    final def aload( tk: BType) { emitTypeBased(JCodeMethodN.aloadOpcodes,  tk) } // can-multi-thread
+    final def astore(tk: BType) { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread
+
+    final def neg(tk: BType) { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread
+    final def add(tk: BType) { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread
+    final def sub(tk: BType) { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread
+    final def mul(tk: BType) { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread
+    final def div(tk: BType) { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread
+    final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread
+
+    // can-multi-thread
+    final def invokespecial(owner: String, name: String, desc: String) {
+      jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+    }
+    // can-multi-thread
+    final def invokestatic(owner: String, name: String, desc: String) {
+      jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+    }
+    // can-multi-thread
+    final def invokeinterface(owner: String, name: String, desc: String) {
+      jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+    }
+    // can-multi-thread
+    final def invokevirtual(owner: String, name: String, desc: String) {
+      jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+    }
+
+    // can-multi-thread
+    final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+    // can-multi-thread
+    final def emitIF(cond: icodes.TestOp, label: asm.Label)      { jmethod.visitJumpInsn(cond.opcodeIF,     label) }
+    // can-multi-thread
+    final def emitIF_ICMP(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+    // can-multi-thread
+    final def emitIF_ACMP(cond: icodes.TestOp, label: asm.Label) {
+      assert((cond == icodes.EQ) || (cond == icodes.NE), cond)
+      val opc = (if (cond == icodes.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+      jmethod.visitJumpInsn(opc, label)
+    }
+    // can-multi-thread
+    final def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+    // can-multi-thread
+    final def emitIFNULL   (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL,    label) }
+
+    // can-multi-thread
+    final def emitRETURN(tk: BType) {
+      if (tk == UNIT) { emit(Opcodes.RETURN) }
+      else            { emitTypeBased(JCodeMethodN.returnOpcodes, tk)      }
+    }
+
+    /* Emits one of tableswitch or lookoupswitch.
+     *
+     * can-multi-thread
+     */
+    final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
+      assert(keys.length == branches.length)
+
+      // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
+      // Similar to what javac emits for a switch statement consisting only of a default case.
+      if (keys.length == 0) {
+        jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+        return
+      }
+
+      // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
+      var i = 1
+      while (i < keys.length) {
+        var j = 1
+        while (j <= keys.length - i) {
+          if (keys(j) < keys(j - 1)) {
+            val tmp     = keys(j)
+            keys(j)     = keys(j - 1)
+            keys(j - 1) = tmp
+            val tmpL        = branches(j)
+            branches(j)     = branches(j - 1)
+            branches(j - 1) = tmpL
+          }
+          j += 1
+        }
+        i += 1
+      }
+
+      // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
+      i = 1
+      while (i < keys.length) {
+        if (keys(i-1) == keys(i)) {
+          abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
+        }
+        i += 1
+      }
+
+      val keyMin = keys(0)
+      val keyMax = keys(keys.length - 1)
+
+      val isDenseEnough: Boolean = {
+        /* Calculate in long to guard against overflow. TODO what overflow? */
+        val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
+        val klenD:     Double = keys.length
+        val kdensity:  Double = (klenD / keyRangeD)
+
+        kdensity >= minDensity
+      }
+
+      if (isDenseEnough) {
+        // use a table in which holes are filled with defaultBranch.
+        val keyRange    = (keyMax - keyMin + 1)
+        val newBranches = new Array[asm.Label](keyRange)
+        var oldPos = 0
+        var i = 0
+        while (i < keyRange) {
+          val key = keyMin + i;
+          if (keys(oldPos) == key) {
+            newBranches(i) = branches(oldPos)
+            oldPos += 1
+          } else {
+            newBranches(i) = defaultBranch
+          }
+          i += 1
+        }
+        assert(oldPos == keys.length, "emitSWITCH")
+        jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+      } else {
+        jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+      }
+    }
+
+    // internal helpers -- not part of the public API of `jcode`
+    // don't make private otherwise inlining will suffer
+
+    // can-multi-thread
+    final def emitVarInsn(opc: Int, idx: Int, tk: BType) {
+      assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
+      jmethod.visitVarInsn(tk.getOpcode(opc), idx)
+    }
+
+    // ---------------- array load and store ----------------
+
+    // can-multi-thread
+    final def emitTypeBased(opcs: Array[Int], tk: BType) {
+      assert(tk != UNIT, tk)
+      val opc = {
+        if (tk.isRefOrArrayType) {  opcs(0) }
+        else if (tk.isIntSizedType) {
+          (tk: @unchecked) match {
+            case BOOL | BYTE     => opcs(1)
+            case SHORT           => opcs(2)
+            case CHAR            => opcs(3)
+            case INT             => opcs(4)
+          }
+        } else {
+          (tk: @unchecked) match {
+            case LONG            => opcs(5)
+            case FLOAT           => opcs(6)
+            case DOUBLE          => opcs(7)
+          }
+        }
+      }
+      emit(opc)
+    }
+
+    // ---------------- primitive operations ----------------
+
+     // can-multi-thread
+    final def emitPrimitive(opcs: Array[Int], tk: BType) {
+      val opc = {
+        // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+        (tk.sort: @switch) match {
+          case asm.Type.LONG   => opcs(1)
+          case asm.Type.FLOAT  => opcs(2)
+          case asm.Type.DOUBLE => opcs(3)
+          case _               => opcs(0)
+        }
+      }
+      emit(opc)
+    }
+
+    // can-multi-thread
+    final def drop(tk: BType) { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) }
+
+    // can-multi-thread
+    final def dup(tk: BType)  { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) }
+
+    // ---------------- type checks and casts ----------------
+
+    // can-multi-thread
+    final def isInstance(tk: BType) {
+      jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.getInternalName)
+    }
+
+    // can-multi-thread
+    final def checkCast(tk: BType) {
+      assert(tk.isRefOrArrayType, s"checkcast on primitive type: $tk")
+      // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk),     "checkcast on boxed type: " + tk)
+      jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.getInternalName)
+    }
+
+  } // end of class JCodeMethodN
+
+  /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */
+  object JCodeMethodN {
+
+    import asm.Opcodes._
+
+    // ---------------- conversions ----------------
+
+    val fromByteT2T  = { Array( -1,  -1, I2C,  -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
+    val fromCharT2T  = { Array(I2B, I2S,  -1,  -1, I2L, I2F, I2D) } // for (CHAR  -> INT) do nothing
+    val fromShortT2T = { Array(I2B,  -1, I2C,  -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
+    val fromIntT2T   = { Array(I2B, I2S, I2C,  -1, I2L, I2F, I2D) }
+
+    // ---------------- array load and store ----------------
+
+    val aloadOpcodes  = { Array(AALOAD,  BALOAD,  SALOAD,  CALOAD,  IALOAD,  LALOAD,  FALOAD,  DALOAD)  }
+    val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
+    val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
+
+    // ---------------- primitive operations ----------------
+
+    val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) }
+    val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) }
+    val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) }
+    val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) }
+    val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) }
+    val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) }
+
+  } // end of object JCodeMethodN
+
+  // ---------------- adapted from scalaPrimitives ----------------
+
+  /* Given `code` reports the src TypeKind of the coercion indicated by `code`.
+   * To find the dst TypeKind, `ScalaPrimitives.generatedKind(code)` can be used.
+   *
+   * can-multi-thread
+   */
+  final def coercionFrom(code: Int): BType = {
+    import scalaPrimitives._
+    (code: @switch) match {
+      case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE
+      case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT
+      case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR
+      case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT
+      case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG
+      case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT
+      case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE
+    }
+  }
+
+  /* If code is a coercion primitive, the result type.
+   *
+   * can-multi-thread
+   */
+  final def coercionTo(code: Int): BType = {
+    import scalaPrimitives._
+    (code: @scala.annotation.switch) match {
+      case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
+      case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
+      case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT
+      case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT
+      case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG
+      case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT
+      case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE
+    }
+  }
+
+  final val typeOfArrayOp: Map[Int, BType] = {
+    import scalaPrimitives._
+    Map(
+      (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL))   ++
+      (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE))   ++
+      (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT))  ++
+      (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR))   ++
+      (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT))    ++
+      (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG))   ++
+      (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT))  ++
+      (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+      (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
+    )
+  }
+
+  /*
+   * Collects (in `result`) all LabelDef nodes enclosed (directly or not) by each node it visits.
+   *
+   * In other words, this traverser prepares a map giving
+   * all labelDefs (the entry-value) having a Tree node (the entry-key) as ancestor.
+   * The entry-value for a LabelDef entry-key always contains the entry-key.
+   *
+   */
+  class LabelDefsFinder extends Traverser {
+    val result = mutable.Map.empty[Tree, List[LabelDef]]
+    var acc: List[LabelDef] = Nil
+
+    /*
+     * can-multi-thread
+     */
+    override def traverse(tree: Tree) {
+      val saved = acc
+      acc = Nil
+      super.traverse(tree)
+      // acc contains all LabelDefs found under (but not at) `tree`
+      tree match {
+        case lblDf: LabelDef => acc ::= lblDf
+        case _               => ()
+      }
+      if (acc.isEmpty) {
+        acc = saved
+      } else {
+        result += (tree -> acc)
+        acc = acc ::: saved
+      }
+    }
+  }
+
+  implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) {
+    @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) { mnode.instructions.foreachInsn(f) }
+  }
+
+  implicit class InsnIterInsnList(lst: asm.tree.InsnList) {
+
+    @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) {
+      val insnIter = lst.iterator()
+      while (insnIter.hasNext) {
+        f(insnIter.next())
+      }
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
new file mode 100644
index 0000000..360ce58
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -0,0 +1,724 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.nsc.symtab._
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ *  @version 1.0
+ *
+ */
+abstract class BCodeSkelBuilder extends BCodeHelpers {
+  import global._
+
+  /*
+   * There's a dedicated PlainClassBuilder for each CompilationUnit,
+   * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()`
+   *
+   * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized,
+   * including `resetMethodBookkeeping()` and `initJMethod()`.
+   * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates
+   * the ASM MethodNode instance with ASM AbstractInsnNodes.
+   *
+   * Given that CleanUp delivers trees that produce values on the stack,
+   * the entry-point to all-things instruction-emit is `genLoad()`.
+   * There, an operation taking N arguments results in recursively emitting instructions to lead each of them,
+   * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack).
+   *
+   * In a few cases the above recipe deserves more details, as provided in the documentation for:
+   *   - `genLoadTry()`
+   *   - `genSynchronized()
+   *   - `jumpDest` , `cleanups` , `labelDefsAtOrUnder`
+   */
+  abstract class PlainSkelBuilder(cunit: CompilationUnit)
+    extends BCClassGen
+    with    BCAnnotGen
+    with    BCInnerClassGen
+    with    JAndroidBuilder
+    with    BCForwardersGen
+    with    BCPickles
+    with    BCJGenSigGen {
+
+    // Strangely I can't find this in the asm code 255, but reserving 1 for "this"
+    final val MaximumJvmParameters = 254
+
+    // current class
+    var cnode: asm.tree.ClassNode  = null
+    var thisName: String           = null // the internal name of the class being emitted
+
+    var claszSymbol: Symbol        = null
+    var isCZParcelable             = false
+    var isCZStaticModule           = false
+    var isCZRemote                 = false
+
+    /* ---------------- idiomatic way to ask questions to typer ---------------- */
+
+    def paramTKs(app: Apply): List[BType] = {
+      val Apply(fun, _)  = app
+      val funSym = fun.symbol
+      (funSym.info.paramTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM)
+    }
+
+    def symInfoTK(sym: Symbol): BType = {
+      toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM)
+    }
+
+    def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) }
+
+    def log(msg: => AnyRef) {
+      global synchronized { global.log(msg) }
+    }
+
+    override def getCurrentCUnit(): CompilationUnit = { cunit }
+
+    /* ---------------- helper utils for generating classes and fiels ---------------- */
+
+    def genPlainClass(cd: ClassDef) {
+      assert(cnode == null, "GenBCode detected nested methods.")
+      innerClassBufferASM.clear()
+
+      claszSymbol       = cd.symbol
+      isCZParcelable    = isAndroidParcelableClass(claszSymbol)
+      isCZStaticModule  = isStaticModule(claszSymbol)
+      isCZRemote        = isRemote(claszSymbol)
+      thisName          = internalName(claszSymbol)
+
+      cnode = new asm.tree.ClassNode()
+
+      initJClass(cnode)
+
+      val hasStaticCtor = methodSymbols(cd) exists (_.isStaticConstructor)
+      if (!hasStaticCtor) {
+        // but needs one ...
+        if (isCZStaticModule || isCZParcelable) {
+          fabricateStaticInit()
+        }
+      }
+
+      val optSerial: Option[Long] = serialVUID(claszSymbol)
+      if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)}
+
+      addClassFields()
+
+      innerClassBufferASM ++= trackMemberClasses(claszSymbol, Nil)
+
+      gen(cd.impl)
+
+      assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
+
+    } // end of method genPlainClass()
+
+    /*
+     * must-single-thread
+     */
+    private def initJClass(jclass: asm.ClassVisitor) {
+
+      val ps = claszSymbol.info.parents
+      val superClass: String = if (ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else internalName(ps.head.typeSymbol);
+      val ifaces: Array[String] = {
+        val arrIfacesTr: Array[Tracked] = exemplar(claszSymbol).ifaces
+        val arrIfaces = new Array[String](arrIfacesTr.length)
+        var i = 0
+        while (i < arrIfacesTr.length) {
+          val ifaceTr = arrIfacesTr(i)
+          val bt = ifaceTr.c
+          if (ifaceTr.isInnerClass) { innerClassBufferASM += bt }
+          arrIfaces(i) = bt.getInternalName
+          i += 1
+        }
+        arrIfaces
+      }
+      // `internalName()` tracks inner classes.
+
+      val flags = mkFlags(
+        javaFlags(claszSymbol),
+        if (isDeprecated(claszSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+      )
+
+      val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner)
+      cnode.visit(classfileVersion, flags,
+                  thisName, thisSignature,
+                  superClass, ifaces)
+
+      if (emitSource) {
+        cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */)
+      }
+
+      val enclM = getEnclosingMethodAttribute(claszSymbol)
+      if (enclM != null) {
+        val EnclMethodEntry(className, methodName, methodType) = enclM
+        cnode.visitOuterClass(className, methodName, methodType.getDescriptor)
+      }
+
+      val ssa = getAnnotPickle(thisName, claszSymbol)
+      cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+      emitAnnotations(cnode, claszSymbol.annotations ++ ssa)
+
+      if (isCZStaticModule || isCZParcelable) {
+
+        if (isCZStaticModule) { addModuleInstanceField() }
+
+      } else {
+
+        val skipStaticForwarders = (claszSymbol.isInterface || settings.noForwarders)
+        if (!skipStaticForwarders) {
+          val lmoc = claszSymbol.companionModule
+          // add static forwarders if there are no name conflicts; see bugs #363 and #1735
+          if (lmoc != NoSymbol) {
+            // it must be a top level class (name contains no $s)
+            val isCandidateForForwarders = {
+              exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+            }
+            if (isCandidateForForwarders) {
+              log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'")
+              addForwarders(isRemote(claszSymbol), cnode, thisName, lmoc.moduleClass)
+            }
+          }
+        }
+
+      }
+
+      // the invoker is responsible for adding a class-static constructor.
+
+    } // end of method initJClass
+
+    /*
+     * can-multi-thread
+     */
+    private def addModuleInstanceField() {
+      val fv =
+        cnode.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+                         strMODULE_INSTANCE_FIELD,
+                         "L" + thisName + ";",
+                         null, // no java-generic-signature
+                         null  // no initial value
+        )
+
+      fv.visitEnd()
+    }
+
+    /*
+     * must-single-thread
+     */
+    private def fabricateStaticInit() {
+
+      val clinit: asm.MethodVisitor = cnode.visitMethod(
+        PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+        CLASS_CONSTRUCTOR_NAME,
+        "()V",
+        null, // no java-generic-signature
+        null  // no throwable exceptions
+      )
+      clinit.visitCode()
+
+      /* "legacy static initialization" */
+      if (isCZStaticModule) {
+        clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+        clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
+                               thisName, INSTANCE_CONSTRUCTOR_NAME, "()V")
+      }
+      if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) }
+      clinit.visitInsn(asm.Opcodes.RETURN)
+
+      clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+      clinit.visitEnd()
+    }
+
+    def addClassFields() {
+      /*  Non-method term members are fields, except for module members. Module
+       *  members can only happen on .NET (no flatten) for inner traits. There,
+       *  a module symbol is generated (transformInfo in mixin) which is used
+       *  as owner for the members of the implementation class (so that the
+       *  backend emits them as static).
+       *  No code is needed for this module symbol.
+       */
+      for (f <- fieldSymbols(claszSymbol)) {
+        val javagensig = getGenericSignature(f, claszSymbol)
+        val flags = mkFlags(
+          javaFieldFlags(f),
+          if (isDeprecated(f)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+        )
+
+        val jfield = new asm.tree.FieldNode(
+          flags,
+          f.javaSimpleName.toString,
+          symInfoTK(f).getDescriptor,
+          javagensig,
+          null // no initial value
+        )
+        cnode.fields.add(jfield)
+        emitAnnotations(jfield, f.annotations)
+      }
+
+    } // end of method addClassFields()
+
+    // current method
+    var mnode: asm.tree.MethodNode = null
+    var jMethodName: String        = null
+    var isMethSymStaticCtor        = false
+    var returnType: BType          = null
+    var methSymbol: Symbol         = null
+    // in GenASM this is local to genCode(), ie should get false whenever a new method is emitted (including fabricated ones eg addStaticInit())
+    var isModuleInitialized        = false
+    // used by genLoadTry() and genSynchronized()
+    var earlyReturnVar: Symbol     = null
+    var shouldEmitCleanup          = false
+    var insideCleanupBlock         = false
+    // line numbers
+    var lastEmittedLineNr          = -1
+
+    object bc extends JCodeMethodN {
+      override def jmethod = PlainSkelBuilder.this.mnode
+    }
+
+    /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */
+
+    /*
+     *  A jump is represented as an Apply node whose symbol denotes a LabelDef, the target of the jump.
+     *  The `jumpDest` map is used to:
+     *    (a) find the asm.Label for the target, given an Apply node's symbol;
+     *    (b) anchor an asm.Label in the instruction stream, given a LabelDef node.
+     *  In other words, (a) is necessary when visiting a jump-source, and (b) when visiting a jump-target.
+     *  A related map is `labelDef`: it has the same keys as `jumpDest` but its values are LabelDef nodes not asm.Labels.
+     *
+     */
+    var jumpDest: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+    def programPoint(labelSym: Symbol): asm.Label = {
+      assert(labelSym.isLabel, s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.pos}")
+      jumpDest.getOrElse(labelSym, {
+        val pp = new asm.Label
+        jumpDest += (labelSym -> pp)
+        pp
+      })
+    }
+
+    /*
+     *  A program point may be lexically nested (at some depth)
+     *    (a) in the try-clause of a try-with-finally expression
+     *    (b) in a synchronized block.
+     *  Each of the constructs above establishes a "cleanup block" to execute upon
+     *  both normal-exit, early-return, and abrupt-termination of the instructions it encloses.
+     *
+     *  The `cleanups` LIFO queue represents the nesting of active (for the current program point)
+     *  pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block.
+     *  At any given time during traversal of the method body,
+     *  the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression.
+     *
+     *  `cleanups` is used:
+     *
+     *    (1) upon visiting a Return statement.
+     *        In case of pending cleanups, we can't just emit a RETURN instruction, but must instead:
+     *          - store the result (if any) in `earlyReturnVar`, and
+     *          - jump to the next pending cleanup.
+     *        See `genReturn()`
+     *
+     *    (2) upon emitting a try-with-finally or a synchronized-expr,
+     *        In these cases, the targets of the above jumps are emitted,
+     *        provided an early exit was actually encountered somewhere in the protected clauses.
+     *        See `genLoadTry()` and `genSynchronized()`
+     *
+     *  The code thus emitted for jumps and targets covers the early-return case.
+     *  The case of abrupt (ie exceptional) termination is covered by exception handlers
+     *  emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`.
+     */
+    var cleanups: List[asm.Label] = Nil
+    def registerCleanup(finCleanup: asm.Label) {
+      if (finCleanup != null) { cleanups = finCleanup :: cleanups }
+    }
+    def unregisterCleanup(finCleanup: asm.Label) {
+      if (finCleanup != null) {
+        assert(cleanups.head eq finCleanup,
+               s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup")
+        cleanups = cleanups.tail
+      }
+    }
+
+    /* ---------------- local variables and params ---------------- */
+
+    case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean)
+
+    /*
+     * Bookkeeping for method-local vars and method-params.
+     */
+    object locals {
+
+      private val slots = mutable.Map.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth))
+
+      private var nxtIdx = -1 // next available index for local-var
+
+      def reset(isStaticMethod: Boolean) {
+        slots.clear()
+        nxtIdx = if (isStaticMethod) 0 else 1
+      }
+
+      def contains(locSym: Symbol): Boolean = { slots.contains(locSym) }
+
+      def apply(locSym: Symbol): Local = { slots.apply(locSym) }
+
+      /* Make a fresh local variable, ensuring a unique name.
+       * The invoker must make sure inner classes are tracked for the sym's tpe.
+       */
+      def makeLocal(tk: BType, name: String): Symbol = {
+        val locSym = methSymbol.newVariable(cunit.freshTermName(name), NoPosition, Flags.SYNTHETIC) // setInfo tpe
+        makeLocal(locSym, tk)
+        locSym
+      }
+
+      def makeLocal(locSym: Symbol): Local = {
+        makeLocal(locSym, symInfoTK(locSym))
+      }
+
+      def getOrMakeLocal(locSym: Symbol): Local = {
+        // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map.
+        slots.getOrElse(locSym, makeLocal(locSym))
+      }
+
+      private def makeLocal(sym: Symbol, tk: BType): Local = {
+        assert(!slots.contains(sym), "attempt to create duplicate local var.")
+        assert(nxtIdx != -1, "not a valid start index")
+        val loc = Local(tk, sym.javaSimpleName.toString, nxtIdx, sym.isSynthetic)
+        slots += (sym -> loc)
+        assert(tk.getSize > 0, "makeLocal called for a symbol whose type is Unit.")
+        nxtIdx += tk.getSize
+        loc
+      }
+
+      // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol.
+      def store(locSym: Symbol) {
+        val Local(tk, _, idx, _) = slots(locSym)
+        bc.store(idx, tk)
+      }
+
+      def load(locSym: Symbol) {
+        val Local(tk, _, idx, _) = slots(locSym)
+        bc.load(idx, tk)
+      }
+
+    }
+
+    /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */
+
+    /*
+     *  The semantics of try-with-finally and synchronized-expr require their cleanup code
+     *  to be present in three forms in the emitted bytecode:
+     *    (a) as normal-exit code, reached via fall-through from the last program point being protected,
+     *    (b) as code reached upon early-return from an enclosed return statement.
+     *        The only difference between (a) and (b) is their next program-point:
+     *          the former must continue with fall-through while
+     *          the latter must continue to the next early-return cleanup (if any, otherwise return from the method).
+     *        Otherwise they are identical.
+     *    (c) as exception-handler, reached via exceptional control flow,
+     *        which rethrows the caught exception once it's done with the cleanup code.
+     *
+     *  A particular cleanup may in general contain LabelDefs. Care is needed when duplicating such jump-targets,
+     *  so as to preserve agreement wit the (also duplicated) jump-sources.
+     *  This is achieved based on the bookkeeping provided by two maps:
+     *    - `labelDefsAtOrUnder` lists all LabelDefs enclosed by a given Tree node (the key)
+     *    - `labelDef` provides the LabelDef node whose symbol is used as key.
+     *       As a sidenote, a related map is `jumpDest`: it has the same keys as `labelDef` but its values are asm.Labels not LabelDef nodes.
+     *
+     *  Details in `emitFinalizer()`, which is invoked from `genLoadTry()` and `genSynchronized()`.
+     */
+    var labelDefsAtOrUnder: scala.collection.Map[Tree, List[LabelDef]] = null
+    var labelDef: scala.collection.Map[Symbol, LabelDef] = null// (LabelDef-sym -> LabelDef)
+
+    // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`).
+    var varsInScope: List[Tuple2[Symbol, asm.Label]] = null // (local-var-sym -> start-of-scope)
+
+    // helpers around program-points.
+    def lastInsn: asm.tree.AbstractInsnNode = {
+      mnode.instructions.getLast
+    }
+    def currProgramPoint(): asm.Label = {
+      lastInsn match {
+        case labnode: asm.tree.LabelNode => labnode.getLabel
+        case _ =>
+          val pp = new asm.Label
+          mnode visitLabel pp
+          pp
+      }
+    }
+    def markProgramPoint(lbl: asm.Label) {
+      val skip = (lbl == null) || isAtProgramPoint(lbl)
+      if (!skip) { mnode visitLabel lbl }
+    }
+    def isAtProgramPoint(lbl: asm.Label): Boolean = {
+      (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } )
+    }
+    def lineNumber(tree: Tree) {
+      if (!emitLines || !tree.pos.isDefined) return;
+      val nr = tree.pos.finalPosition.line
+      if (nr != lastEmittedLineNr) {
+        lastEmittedLineNr = nr
+        lastInsn match {
+          case lnn: asm.tree.LineNumberNode =>
+            // overwrite previous landmark as no instructions have been emitted for it
+            lnn.line = nr
+          case _ =>
+            mnode.visitLineNumber(nr, currProgramPoint())
+        }
+      }
+    }
+
+    // on entering a method
+    def resetMethodBookkeeping(dd: DefDef) {
+      locals.reset(isStaticMethod = methSymbol.isStaticMember)
+      jumpDest = immutable.Map.empty[ /* LabelDef */ Symbol, asm.Label ]
+      // populate labelDefsAtOrUnder
+      val ldf = new LabelDefsFinder
+      ldf.traverse(dd.rhs)
+      labelDefsAtOrUnder = ldf.result.withDefaultValue(Nil)
+      labelDef = labelDefsAtOrUnder(dd.rhs).map(ld => (ld.symbol -> ld)).toMap
+      // check previous invocation of genDefDef exited as many varsInScope as it entered.
+      assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().")
+      // check previous invocation of genDefDef unregistered as many cleanups as it registered.
+      assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.")
+      isModuleInitialized = false
+      earlyReturnVar      = null
+      shouldEmitCleanup   = false
+
+      lastEmittedLineNr = -1
+    }
+
+    /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */
+
+    def gen(tree: Tree) {
+      tree match {
+        case EmptyTree => ()
+
+        case _: ModuleDef => abort(s"Modules should have been eliminated by refchecks: $tree")
+
+        case ValDef(mods, name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()`
+
+        case dd : DefDef => genDefDef(dd)
+
+        case Template(_, _, body) => body foreach gen
+
+        case _ => abort(s"Illegal tree in gen: $tree")
+      }
+    }
+
+    /*
+     * must-single-thread
+     */
+    def initJMethod(flags: Int, paramAnnotations: List[List[AnnotationInfo]]) {
+
+      val jgensig = getGenericSignature(methSymbol, claszSymbol)
+      addRemoteExceptionAnnot(isCZRemote, hasPublicBitSet(flags), methSymbol)
+      val (excs, others) = methSymbol.annotations partition (_.symbol == definitions.ThrowsClass)
+      val thrownExceptions: List[String] = getExceptions(excs)
+
+      val bytecodeName =
+        if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME
+        else jMethodName
+
+      val mdesc = asmMethodType(methSymbol).getDescriptor
+      mnode = cnode.visitMethod(
+        flags,
+        bytecodeName,
+        mdesc,
+        jgensig,
+        mkArray(thrownExceptions)
+      ).asInstanceOf[asm.tree.MethodNode]
+
+      // TODO param names: (m.params map (p => javaName(p.sym)))
+
+      emitAnnotations(mnode, others)
+      emitParamAnnotations(mnode, paramAnnotations)
+
+    } // end of method initJMethod
+
+
+    def genDefDef(dd: DefDef) {
+      // the only method whose implementation is not emitted: getClass()
+      if (definitions.isGetClass(dd.symbol)) { return }
+      assert(mnode == null, "GenBCode detected nested method.")
+
+      methSymbol  = dd.symbol
+      jMethodName = methSymbol.javaSimpleName.toString
+      returnType  = asmMethodType(dd.symbol).getReturnType
+      isMethSymStaticCtor = methSymbol.isStaticConstructor
+
+      resetMethodBookkeeping(dd)
+
+      // add method-local vars for params
+      val DefDef(_, _, _, vparamss, _, rhs) = dd
+      assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss")
+      val params = if (vparamss.isEmpty) Nil else vparamss.head
+      for (p <- params) { locals.makeLocal(p.symbol) }
+      // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug")
+
+      if (params.size > MaximumJvmParameters) {
+        // SI-7324
+        cunit.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+        return
+      }
+
+      val isNative         = methSymbol.hasAnnotation(definitions.NativeAttr)
+      val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface)
+      val flags = mkFlags(
+        javaFlags(methSymbol),
+        if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT   else 0,
+        if (methSymbol.isStrictFP)   asm.Opcodes.ACC_STRICT     else 0,
+        if (isNative)                asm.Opcodes.ACC_NATIVE     else 0, // native methods of objects are generated in mirror classes
+        if (isDeprecated(methSymbol)) asm.Opcodes.ACC_DEPRECATED else 0  // ASM pseudo access flag
+      )
+
+      // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
+      initJMethod(flags, params.map(p => p.symbol.annotations))
+
+      /* Add method-local vars for LabelDef-params.
+       *
+       * This makes sure that:
+       *   (1) upon visiting any "forward-jumping" Apply (ie visited before its target LabelDef), and after
+       *   (2) grabbing the corresponding param symbols,
+       * those param-symbols can be used to access method-local vars.
+       *
+       * When duplicating a finally-contained LabelDef, another program-point is needed for the copy (each such copy has its own asm.Label),
+       * but the same vars (given by the LabelDef's params) can be reused,
+       * because no LabelDef ends up nested within itself after such duplication.
+       */
+      for(ld <- labelDefsAtOrUnder(dd.rhs); ldp <- ld.params; if !locals.contains(ldp.symbol)) {
+        // the tail-calls xform results in symbols shared btw method-params and labelDef-params, thus the guard above.
+        locals.makeLocal(ldp.symbol)
+      }
+
+      if (!isAbstractMethod && !isNative) {
+
+        def emitNormalMethodBody() {
+          val veryFirstProgramPoint = currProgramPoint()
+          genLoad(rhs, returnType)
+
+          rhs match {
+            case Block(_, Return(_)) => ()
+            case Return(_) => ()
+            case EmptyTree =>
+              globalError("Concrete method has no definition: " + dd + (
+                if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")"
+                else "")
+              )
+            case _ =>
+              bc emitRETURN returnType
+          }
+          if (emitVars) {
+            // add entries to LocalVariableTable JVM attribute
+            val onePastLastProgramPoint = currProgramPoint()
+            val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
+            if (!hasStaticBitSet) {
+              mnode.visitLocalVariable(
+                "this",
+                "L" + thisName + ";",
+                null,
+                veryFirstProgramPoint,
+                onePastLastProgramPoint,
+                0
+              )
+            }
+            for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) }
+          }
+
+          if (isMethSymStaticCtor) { appendToStaticCtor(dd) }
+        } // end of emitNormalMethodBody()
+
+        lineNumber(rhs)
+        emitNormalMethodBody()
+
+        // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions.
+        // The only non-instruction nodes to be found are LabelNode and LineNumberNode.
+      }
+      mnode = null
+    } // end of method genDefDef()
+
+    /*
+     *  must-single-thread
+     *
+     *  TODO document, explain interplay with `fabricateStaticInit()`
+     */
+    private def appendToStaticCtor(dd: DefDef) {
+
+      def insertBefore(
+            location: asm.tree.AbstractInsnNode,
+            i0: asm.tree.AbstractInsnNode,
+            i1: asm.tree.AbstractInsnNode) {
+        if (i0 != null) {
+          mnode.instructions.insertBefore(location, i0.clone(null))
+          mnode.instructions.insertBefore(location, i1.clone(null))
+        }
+      }
+
+      // collect all return instructions
+      var rets: List[asm.tree.AbstractInsnNode] = Nil
+      mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i  } }
+      if (rets.isEmpty) { return }
+
+      var insnModA: asm.tree.AbstractInsnNode = null
+      var insnModB: asm.tree.AbstractInsnNode = null
+      // call object's private ctor from static ctor
+      if (isCZStaticModule) {
+        // NEW `moduleName`
+        val className = internalName(methSymbol.enclClass)
+        insnModA      = new asm.tree.TypeInsnNode(asm.Opcodes.NEW, className)
+        // INVOKESPECIAL <init>
+        val callee = methSymbol.enclClass.primaryConstructor
+        val jname  = callee.javaSimpleName.toString
+        val jowner = internalName(callee.owner)
+        val jtype  = asmMethodType(callee).getDescriptor
+        insnModB   = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype)
+      }
+
+      var insnParcA: asm.tree.AbstractInsnNode = null
+      var insnParcB: asm.tree.AbstractInsnNode = null
+      // android creator code
+      if (isCZParcelable) {
+        // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator
+        val andrFieldDescr = asmClassType(AndroidCreatorClass).getDescriptor
+        cnode.visitField(
+          asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL,
+          "CREATOR",
+          andrFieldDescr,
+          null,
+          null
+        )
+        // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from?
+        val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName)
+        val jowner = internalName(callee.owner)
+        val jname  = callee.javaSimpleName.toString
+        val jtype  = asmMethodType(callee).getDescriptor
+        insnParcA  = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype)
+        // PUTSTATIC `thisName`.CREATOR;
+        insnParcB  = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr)
+      }
+
+      // insert a few instructions for initialization before each return instruction
+      for(r <- rets) {
+        insertBefore(r, insnModA,  insnModB)
+        insertBefore(r, insnParcA, insnParcB)
+      }
+
+    }
+
+    def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) {
+      val Local(tk, name, idx, isSynth) = locals(sym)
+      if (force || !isSynth) {
+        mnode.visitLocalVariable(name, tk.getDescriptor, null, start, end, idx)
+      }
+    }
+
+    def genLoad(tree: Tree, expectedType: BType)
+
+  } // end of class PlainSkelBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
new file mode 100644
index 0000000..9ddb7a3
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -0,0 +1,395 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ *  @version 1.0
+ *
+ */
+abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
+  import global._
+
+
+  /*
+   * Functionality to lower `synchronized` and `try` expressions.
+   */
+  abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) {
+
+    def genSynchronized(tree: Apply, expectedType: BType): BType = {
+      val Apply(fun, args) = tree
+      val monitor = locals.makeLocal(ObjectReference, "monitor")
+      val monCleanup = new asm.Label
+
+      // if the synchronized block returns a result, store it in a local variable.
+      // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
+      val hasResult = (expectedType != UNIT)
+      val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null;
+
+      /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
+      genLoadQualifier(fun)
+      bc dup ObjectReference
+      locals.store(monitor)
+      emit(asm.Opcodes.MONITORENTER)
+
+      /* ------ (2) Synchronized block.
+       *            Reached by fall-through from (1).
+       *            Protected by:
+       *            (2.a) the EH-version of the monitor-exit, and
+       *            (2.b) whatever protects the whole synchronized expression.
+       * ------
+       */
+      val startProtected = currProgramPoint()
+      registerCleanup(monCleanup)
+      genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */)
+      unregisterCleanup(monCleanup)
+      if (hasResult) { locals.store(monitorResult) }
+      nopIfNeeded(startProtected)
+      val endProtected = currProgramPoint()
+
+      /* ------ (3) monitor-exit after normal, non-early-return, termination of (2).
+       *            Reached by fall-through from (2).
+       *            Protected by whatever protects the whole synchronized expression.
+       * ------
+       */
+      locals.load(monitor)
+      emit(asm.Opcodes.MONITOREXIT)
+      if (hasResult) { locals.load(monitorResult) }
+      val postHandler = new asm.Label
+      bc goTo postHandler
+
+      /* ------ (4) exception-handler version of monitor-exit code.
+       *            Reached upon abrupt termination of (2).
+       *            Protected by whatever protects the whole synchronized expression.
+       * ------
+       */
+      protect(startProtected, endProtected, currProgramPoint(), ThrowableReference)
+      locals.load(monitor)
+      emit(asm.Opcodes.MONITOREXIT)
+      emit(asm.Opcodes.ATHROW)
+
+      /* ------ (5) cleanup version of monitor-exit code.
+       *            Reached upon early-return from (2).
+       *            Protected by whatever protects the whole synchronized expression.
+       * ------
+       */
+      if (shouldEmitCleanup) {
+        markProgramPoint(monCleanup)
+        locals.load(monitor)
+        emit(asm.Opcodes.MONITOREXIT)
+        pendingCleanups()
+      }
+
+      /* ------ (6) normal exit of the synchronized expression.
+       *            Reached after normal, non-early-return, termination of (3).
+       *            Protected by whatever protects the whole synchronized expression.
+       * ------
+       */
+      mnode visitLabel postHandler
+
+      lineNumber(tree)
+
+      expectedType
+    }
+
+    /*
+     *  Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP.
+     *  Useful to avoid emitting an empty try-block being protected by exception handlers,
+     *  which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102.
+     */
+    def nopIfNeeded(lbl: asm.Label) {
+      val noInstructionEmitted = isAtProgramPoint(lbl)
+      if (noInstructionEmitted) { emit(asm.Opcodes.NOP) }
+    }
+
+    /*
+     *  Emitting try-catch is easy, emitting try-catch-finally not quite so.
+     *  A finally-block (which always has type Unit, thus leaving the operand stack unchanged)
+     *  affects control-transfer from protected regions, as follows:
+     *
+     *    (a) `return` statement:
+     *
+     *        First, the value to return (if any) is evaluated.
+     *        Afterwards, all enclosing finally-blocks are run, from innermost to outermost.
+     *        Only then is the return value (if any) returned.
+     *
+     *        Some terminology:
+     *          (a.1) Executing a return statement that is protected
+     *                by one or more finally-blocks is called "early return"
+     *          (a.2) the chain of code sections (a code section for each enclosing finally-block)
+     *                to run upon early returns is called "cleanup chain"
+     *
+     *        As an additional spin, consider a return statement in a finally-block.
+     *        In this case, the value to return depends on how control arrived at that statement:
+     *        in case it arrived via a previous return, the previous return enjoys priority:
+     *        the value to return is given by that statement.
+     *
+     *    (b) A finally-block protects both the try-clause and the catch-clauses.
+     *
+     *           Sidenote:
+     *             A try-clause may contain an empty block. On CLR, a finally-block has special semantics
+     *             regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler
+     *             that protects an "empty" range ("empty" as in "containing NOPs only",
+     *             see `asm.optimiz.DanglingExcHandlers` and SI-6720).
+     *
+     *        This means a finally-block indicates instructions that can be reached:
+     *          (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause
+     *                In this case, the next-program-point is that following the try-catch-finally expression.
+     *          (b.2) Upon early-return initiated in the try-clause or a catch-clause
+     *                In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return.
+     *          (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause
+     *                In this case, the unhandled exception must be re-thrown after running the finally-block.
+     *
+     *    (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock)
+     *        that's why `genSynchronized()` too emits cleanup-sections.
+     *
+     *  A number of code patterns can be emitted to realize the intended semantics.
+     *
+     *  A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position.
+     *  The principle at work being that once control is transferred to a cleanup-section,
+     *  control will always stay within the cleanup-chain.
+     *  That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block
+     *  (reached via abrupt termination) takes over.
+     *
+     *  The observations above hint at another code layout, less verbose, for the cleanup-chain.
+     *
+     *  The code layout that GenBCode emits takes into account that once a cleanup section has been reached,
+     *  jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics.
+     *
+     *  There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway):
+     *  one for normal control flow and another chain consisting of exception handlers.
+     *  The in-line comments below refer to them as
+     *    - "early-return-cleanups" and
+     *    - "exception-handler-version-of-finally-block" respectively.
+     *
+     */
+    def genLoadTry(tree: Try): BType = {
+
+      val Try(block, catches, finalizer) = tree
+      val kind = tpeTK(tree)
+
+      val caseHandlers: List[EHClause] =
+        for (CaseDef(pat, _, caseBody) <- catches) yield {
+          pat match {
+            case Typed(Ident(nme.WILDCARD), tpt)  => NamelessEH(tpeTK(tpt), caseBody)
+            case Ident(nme.WILDCARD)              => NamelessEH(ThrowableReference,  caseBody)
+            case Bind(_, _)                       => BoundEH   (pat.symbol, caseBody)
+          }
+        }
+
+      // ------ (0) locals used later ------
+
+      /*
+       * `postHandlers` is a program point denoting:
+       *     (a) the finally-clause conceptually reached via fall-through from try-catch-finally
+       *         (in case a finally-block is present); or
+       *     (b) the program point right after the try-catch
+       *         (in case there's no finally-block).
+       * The name choice emphasizes that the code section lies "after all exception handlers",
+       * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks.
+       */
+      val postHandlers = new asm.Label
+
+      val hasFinally   = (finalizer != EmptyTree)
+
+      /*
+       * used in the finally-clause reached via fall-through from try-catch, if any.
+       */
+      val guardResult  = hasFinally && (kind != UNIT) && mayCleanStack(finalizer)
+
+      /*
+       * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
+       * Because those two types can be different, dedicated vars are needed.
+       */
+      val tmp          = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null;
+
+      /*
+       * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
+       * AND hasFinally, a cleanup is needed.
+       */
+      val finCleanup   = if (hasFinally) new asm.Label else null
+
+      /* ------ (1) try-block, protected by:
+       *                       (1.a) the EHs due to case-clauses,   emitted in (2),
+       *                       (1.b) the EH  due to finally-clause, emitted in (3.A)
+       *                       (1.c) whatever protects the whole try-catch-finally expression.
+       * ------
+       */
+
+      val startTryBody = currProgramPoint()
+      registerCleanup(finCleanup)
+      genLoad(block, kind)
+      unregisterCleanup(finCleanup)
+      nopIfNeeded(startTryBody)
+      val endTryBody = currProgramPoint()
+      bc goTo postHandlers
+
+      /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
+       *            An EH in (2) is reached upon abrupt termination of (1).
+       *            An EH in (2) is protected by:
+       *                         (2.a) the EH-version of the finally-clause, if any.
+       *                         (2.b) whatever protects the whole try-catch-finally expression.
+       * ------
+       */
+
+      for (ch <- caseHandlers) {
+
+        // (2.a) emit case clause proper
+        val startHandler = currProgramPoint()
+        var endHandler: asm.Label = null
+        var excType: BType = null
+        registerCleanup(finCleanup)
+        ch match {
+          case NamelessEH(typeToDrop, caseBody) =>
+            bc drop typeToDrop
+            genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`.
+            nopIfNeeded(startHandler)
+            endHandler = currProgramPoint()
+            excType = typeToDrop
+
+          case BoundEH   (patSymbol,  caseBody) =>
+            // test/files/run/contrib674.scala , a local-var already exists for patSymbol.
+            // rather than creating on first-access, we do it right away to emit debug-info for the created local var.
+            val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol)
+            bc.store(patIdx, patTK)
+            genLoad(caseBody, kind)
+            nopIfNeeded(startHandler)
+            endHandler = currProgramPoint()
+            emitLocalVarScope(patSymbol, startHandler, endHandler)
+            excType = patTK
+        }
+        unregisterCleanup(finCleanup)
+        // (2.b)  mark the try-body as protected by this case clause.
+        protect(startTryBody, endTryBody, startHandler, excType)
+        // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
+        bc goTo postHandlers
+
+      }
+
+      /* ------ (3.A) The exception-handler-version of the finally-clause.
+       *              Reached upon abrupt termination of (1) or one of the EHs in (2).
+       *              Protected only by whatever protects the whole try-catch-finally expression.
+       * ------
+       */
+
+      // a note on terminology: this is not "postHandlers", despite appearences.
+      // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
+      if (hasFinally) {
+        nopIfNeeded(startTryBody)
+        val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
+        protect(startTryBody, finalHandler, finalHandler, null)
+        val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(ThrowableReference, "exc"))
+        bc.store(eIdx, eTK)
+        emitFinalizer(finalizer, null, isDuplicate = true)
+        bc.load(eIdx, eTK)
+        emit(asm.Opcodes.ATHROW)
+      }
+
+      /* ------ (3.B) Cleanup-version of the finally-clause.
+       *              Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2)
+       *              (and only from there, ie reached only upon early RETURN from
+       *               program regions bracketed by registerCleanup/unregisterCleanup).
+       *              Protected only by whatever protects the whole try-catch-finally expression.
+       *
+       *              Given that control arrives to a cleanup section only upon early RETURN,
+       *              the value to return (if any) is always available. Therefore, a further RETURN
+       *              found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`).
+       *              In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section,
+       *              the variable `insideCleanupBlock` is used.
+       * ------
+       */
+
+      // this is not "postHandlers" either.
+      // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
+      // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
+      if (hasFinally && shouldEmitCleanup) {
+        val savedInsideCleanup = insideCleanupBlock
+        insideCleanupBlock = true
+        markProgramPoint(finCleanup)
+        // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+        emitFinalizer(finalizer, null, isDuplicate = true)
+        pendingCleanups()
+        insideCleanupBlock = savedInsideCleanup
+      }
+
+      /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
+       *            Reached upon normal, non-early-return termination of (1) or of an EH in (2).
+       *            Protected only by whatever protects the whole try-catch-finally expression.
+       * TODO explain what happens upon RETURN contained in (4)
+       * ------
+       */
+
+      markProgramPoint(postHandlers)
+      if (hasFinally) {
+        emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false`
+      }
+
+      kind
+    } // end of genLoadTry()
+
+    /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */
+    private def pendingCleanups() {
+      cleanups match {
+        case Nil =>
+          if (earlyReturnVar != null) {
+            locals.load(earlyReturnVar)
+            bc.emitRETURN(locals(earlyReturnVar).tk)
+          } else {
+            bc emitRETURN UNIT
+          }
+          shouldEmitCleanup = false
+
+        case nextCleanup :: _ =>
+          bc goTo nextCleanup
+      }
+    }
+
+    def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: BType) {
+      val excInternalName: String =
+        if (excType == null) null
+        else excType.getInternalName
+      assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.")
+      mnode.visitTryCatchBlock(start, end, handler, excInternalName)
+    }
+
+    /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */
+    def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean) {
+      var saved: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+      if (isDuplicate) {
+        saved = jumpDest
+        for(ldef <- labelDefsAtOrUnder(finalizer)) {
+          jumpDest -= ldef.symbol
+        }
+      }
+      // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok)
+      if (tmp != null) { locals.store(tmp) }
+      genLoad(finalizer, UNIT)
+      if (tmp != null) { locals.load(tmp)  }
+      if (isDuplicate) {
+        jumpDest = saved
+      }
+    }
+
+    /* Does this tree have a try-catch block? */
+    def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] }
+
+    trait EHClause
+    case class NamelessEH(typeToDrop: BType,  caseBody: Tree) extends EHClause
+    case class BoundEH    (patSymbol: Symbol, caseBody: Tree) extends EHClause
+
+  }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
new file mode 100644
index 0000000..1eca699
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
@@ -0,0 +1,880 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.collection.{ immutable, mutable }
+
+/*
+ *  Utilities to mediate between types as represented in Scala ASTs and ASM trees.
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ *  @version 1.0
+ *
+ */
+abstract class BCodeTypes extends BCodeIdiomatic {
+
+  import global._
+
+  // when compiling the Scala library, some assertions don't hold (e.g., scala.Boolean has null superClass although it's not an interface)
+  val isCompilingStdLib = !(settings.sourcepath.isDefault)
+
+  val srBoxedUnit  = brefType("scala/runtime/BoxedUnit")
+
+  // special names
+  var StringReference             : BType = null
+  var ThrowableReference          : BType = null
+  var jlCloneableReference        : BType = null // java/lang/Cloneable
+  var jlNPEReference              : BType = null // java/lang/NullPointerException
+  var jioSerializableReference    : BType = null // java/io/Serializable
+  var scalaSerializableReference  : BType = null // scala/Serializable
+  var classCastExceptionReference : BType = null // java/lang/ClassCastException
+
+  /* A map from scala primitive type-symbols to BTypes */
+  var primitiveTypeMap: Map[Symbol, BType] = null
+  /* A map from scala type-symbols for Nothing and Null to (runtime version) BTypes */
+  var phantomTypeMap:   Map[Symbol, BType] = null
+  /* Maps the method symbol for a box method to the boxed type of the result.
+   *  For example, the method symbol for `Byte.box()`) is mapped to the BType `Ljava/lang/Integer;`. */
+  var boxResultType:    Map[Symbol, BType] = null
+  /* Maps the method symbol for an unbox method to the primitive type of the result.
+   *  For example, the method symbol for `Byte.unbox()`) is mapped to the BType BYTE. */
+  var unboxResultType:  Map[Symbol, BType] = null
+
+  var hashMethodSym: Symbol = null // scala.runtime.ScalaRunTime.hash
+
+  var AndroidParcelableInterface: Symbol = null
+  var AndroidCreatorClass       : Symbol = null // this is an inner class, use asmType() to get hold of its BType while tracking in innerClassBufferASM
+
+  var BeanInfoAttr: Symbol = null
+
+  /* The Object => String overload. */
+  var String_valueOf: Symbol = null
+
+  var ArrayInterfaces: Set[Tracked] = null
+
+  // scala.FunctionX and scala.runtim.AbstractFunctionX
+  val FunctionReference                 = new Array[Tracked](definitions.MaxFunctionArity + 1)
+  val AbstractFunctionReference         = new Array[Tracked](definitions.MaxFunctionArity + 1)
+  val abstractFunctionArityMap = mutable.Map.empty[BType, Int]
+
+  var PartialFunctionReference:         BType = null // scala.PartialFunction
+  var AbstractPartialFunctionReference: BType = null // scala.runtime.AbstractPartialFunction
+
+  var BoxesRunTime: BType = null
+
+  /*
+   * must-single-thread
+   */
+  def initBCodeTypes() {
+    import definitions._
+
+    primitiveTypeMap =
+      Map(
+        UnitClass     -> UNIT,
+        BooleanClass  -> BOOL,
+        CharClass     -> CHAR,
+        ByteClass     -> BYTE,
+        ShortClass    -> SHORT,
+        IntClass      -> INT,
+        LongClass     -> LONG,
+        FloatClass    -> FLOAT,
+        DoubleClass   -> DOUBLE
+      )
+
+    phantomTypeMap =
+      Map(
+        NothingClass -> RT_NOTHING,
+        NullClass    -> RT_NULL,
+        NothingClass -> RT_NOTHING, // we map on purpose to RT_NOTHING, getting rid of the distinction compile-time vs. runtime for NullClass.
+        NullClass    -> RT_NULL     // ditto.
+      )
+
+    boxResultType =
+      for((csym, msym) <- currentRun.runDefinitions.boxMethod)
+      yield (msym -> classLiteral(primitiveTypeMap(csym)))
+
+    unboxResultType =
+      for((csym, msym) <- currentRun.runDefinitions.unboxMethod)
+      yield (msym -> primitiveTypeMap(csym))
+
+    // boxed classes are looked up in the `exemplars` map by jvmWiseLUB().
+    // Other than that, they aren't needed there (e.g., `isSubtypeOf()` special-cases boxed classes, similarly for others).
+    val boxedClasses = List(BoxedBooleanClass, BoxedCharacterClass, BoxedByteClass, BoxedShortClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass)
+    for(csym <- boxedClasses) {
+      val key = brefType(csym.javaBinaryName.toTypeName)
+      val tr  = buildExemplar(key, csym)
+      symExemplars.put(csym, tr)
+      exemplars.put(tr.c, tr)
+    }
+
+    // reversePrimitiveMap = (primitiveTypeMap map { case (s, pt) => (s.tpe, pt) } map (_.swap)).toMap
+
+    hashMethodSym = getMember(ScalaRunTimeModule, nme.hash_)
+
+    // TODO avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
+    AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+    AndroidCreatorClass        = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
+    // the following couldn't be an eager vals in Phase constructors:
+    // that might cause cycles before Global has finished initialization.
+    BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+
+    String_valueOf = {
+      getMember(StringModule, nme.valueOf) filter (sym =>
+        sym.info.paramTypes match {
+          case List(pt) => pt.typeSymbol == ObjectClass
+          case _        => false
+        }
+      )
+    }
+
+    exemplar(JavaCloneableClass)
+    exemplar(JavaSerializableClass)
+    exemplar(SerializableClass)
+
+    StringReference             = exemplar(StringClass).c
+    StringBuilderReference      = exemplar(StringBuilderClass).c
+    ThrowableReference          = exemplar(ThrowableClass).c
+    jlCloneableReference        = exemplar(JavaCloneableClass).c
+    jlNPEReference              = exemplar(NullPointerExceptionClass).c
+    jioSerializableReference    = exemplar(JavaSerializableClass).c
+    scalaSerializableReference  = exemplar(SerializableClass).c
+    classCastExceptionReference = exemplar(ClassCastExceptionClass).c
+
+    /*
+     *  The bytecode emitter special-cases String concatenation, in that three methods of `JCodeMethodN`
+     *  ( `genStartConcat()` , `genStringConcat()` , and `genEndConcat()` )
+     *  don't obtain the method descriptor of the callee via `asmMethodType()` (as normally done)
+     *  but directly emit callsites on StringBuilder using literal constant for method descriptors.
+     *  In order to make sure those method descriptors are available as BTypes, they are initialized here.
+     */
+    BType.getMethodType("()V")                   // necessary for JCodeMethodN.genStartConcat
+    BType.getMethodType("()Ljava/lang/String;")  // necessary for JCodeMethodN.genEndConcat
+
+    PartialFunctionReference    = exemplar(PartialFunctionClass).c
+    for(idx <- 0 to definitions.MaxFunctionArity) {
+      FunctionReference(idx)           = exemplar(FunctionClass(idx))
+      AbstractFunctionReference(idx)   = exemplar(AbstractFunctionClass(idx))
+      abstractFunctionArityMap        += (AbstractFunctionReference(idx).c -> idx)
+      AbstractPartialFunctionReference = exemplar(AbstractPartialFunctionClass).c
+    }
+
+    // later a few analyses (e.g. refreshInnerClasses) will look up BTypes based on descriptors in instructions
+    // we make sure those BTypes can be found via lookup as opposed to creating them on the fly.
+    BoxesRunTime = brefType("scala/runtime/BoxesRunTime")
+    asmBoxTo.values   foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+    asmUnboxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+
+  }
+
+  /*
+   * must-single-thread
+   */
+  def clearBCodeTypes() {
+    symExemplars.clear()
+    exemplars.clear()
+  }
+
+  val PublicStatic      = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+  val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+  val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+  // ------------------------------------------------
+  // accessory maps tracking the isInterface, innerClasses, superClass, and supportedInterfaces relations,
+  // allowing answering `conforms()` without resorting to typer.
+  // ------------------------------------------------
+
+  val exemplars       = new java.util.concurrent.ConcurrentHashMap[BType,  Tracked]
+  val symExemplars    = new java.util.concurrent.ConcurrentHashMap[Symbol, Tracked]
+
+  /*
+   *  Typically, a question about a BType can be answered only by using the BType as lookup key in one or more maps.
+   *  A `Tracked` object saves time by holding together information required to answer those questions:
+   *
+   *    - `sc`     denotes the bytecode-level superclass if any, null otherwise
+   *
+   *    - `ifaces` denotes the interfaces explicitly declared.
+   *               Not included are those transitively supported, but the utility method `allLeafIfaces()` can be used for that.
+   *
+   *    - `innersChain` denotes the containing classes for a non-package-level class `c`, null otherwise.
+   *               Note: the optimizer may inline anonymous closures, thus eliding those inner classes
+   *               (no physical class file is emitted for elided classes).
+   *               Before committing `innersChain` to bytecode, cross-check with the list of elided classes (SI-6546).
+   *
+   *  All methods of this class can-multi-thread
+   */
+  case class Tracked(c: BType, flags: Int, sc: Tracked, ifaces: Array[Tracked], innersChain: Array[InnerClassEntry]) {
+
+    // not a case-field because we initialize it only for JVM classes we emit.
+    private var _directMemberClasses: List[BType] = null
+
+    def directMemberClasses: List[BType] = {
+      assert(_directMemberClasses != null, s"getter directMemberClasses() invoked too early for $c")
+      _directMemberClasses
+    }
+
+    def directMemberClasses_=(bs: List[BType]) {
+      if (_directMemberClasses != null) {
+        // TODO we enter here when both mirror class and plain class are emitted for the same ModuleClassSymbol.
+        assert(_directMemberClasses == bs.sortBy(_.off))
+      }
+      _directMemberClasses = bs.sortBy(_.off)
+    }
+
+    /* `isCompilingStdLib` saves the day when compiling:
+     *     (1) scala.Nothing (the test `c.isNonSpecial` fails for it)
+     *     (2) scala.Boolean (it has null superClass and is not an interface)
+     */
+    assert(c.isNonSpecial || isCompilingStdLib /*(1)*/, s"non well-formed plain-type: $this")
+    assert(
+        if (sc == null) { (c == ObjectReference) || isInterface || isCompilingStdLib /*(2)*/ }
+        else            { (c != ObjectReference) && !sc.isInterface }
+      , "non well-formed plain-type: " + this
+    )
+    assert(ifaces.forall(i => i.c.isNonSpecial && i.isInterface), s"non well-formed plain-type: $this")
+
+    import asm.Opcodes._
+    def hasFlags(mask: Int) = (flags & mask) != 0
+    def isInterface  = hasFlags(ACC_INTERFACE)
+    def isFinal      = hasFlags(ACC_FINAL)
+    def isInnerClass = { innersChain != null }
+    def isLambda = {
+      // ie isLCC || isTraditionalClosureClass
+      isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c)
+    }
+
+    /* can-multi-thread */
+    def superClasses: List[Tracked] = {
+      if (sc == null) Nil else sc :: sc.superClasses
+    }
+
+    /* can-multi-thread */
+    def isSubtypeOf(other: BType): Boolean = {
+      assert(other.isNonSpecial, "so called special cases have to be handled in BCodeTypes.conforms()")
+
+      if (c == other) return true;
+
+      val otherIsIface = exemplars.get(other).isInterface
+
+      if (this.isInterface) {
+        if (other == ObjectReference) return true;
+        if (!otherIsIface) return false;
+      }
+      else {
+        if (sc != null && sc.isSubtypeOf(other)) return true;
+        if (!otherIsIface) return false;
+      }
+
+      var idx = 0
+      while (idx < ifaces.length) {
+        if (ifaces(idx).isSubtypeOf(other)) return true;
+        idx += 1
+      }
+
+      false
+    }
+
+    /*
+     *  The `ifaces` field lists only those interfaces declared by `c`
+     *  From the set of all supported interfaces, this method discards those which are supertypes of others in the set.
+     */
+    def allLeafIfaces: Set[Tracked] = {
+      if (sc == null) { ifaces.toSet }
+      else { minimizeInterfaces(ifaces.toSet ++ sc.allLeafIfaces) }
+    }
+
+    /*
+     *  This type may not support in its entirety the interface given by the argument, however it may support some of its super-interfaces.
+     *  We visualize each such supported subset of the argument's functionality as a "branch". This method returns all such branches.
+     *
+     *  In other words, let Ri be a branch supported by `ib`,
+     *  this method returns all Ri such that this <:< Ri, where each Ri is maximally deep.
+     */
+    def supportedBranches(ib: Tracked): Set[Tracked] = {
+      assert(ib.isInterface, s"Non-interface argument: $ib")
+
+      val result: Set[Tracked] =
+        if (this.isSubtypeOf(ib.c)) { Set(ib) }
+        else { ib.ifaces.toSet[Tracked].flatMap( bi => supportedBranches(bi) ) }
+
+      checkAllInterfaces(result)
+
+      result
+    }
+
+    override def toString = { c.toString }
+
+  }
+
+  /* must-single-thread */
+  final def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+
+  /* must-single-thread */
+  final def hasInternalName(sym: Symbol) = { sym.isClass || (sym.isModule && !sym.isMethod) }
+
+  /* must-single-thread */
+  def getSuperInterfaces(csym: Symbol): List[Symbol] = {
+
+    // Additional interface parents based on annotations and other cues
+    def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+      case definitions.RemoteAttr => definitions.RemoteInterfaceClass
+      case _                      => NoSymbol
+    }
+
+    /* Drop redundant interfaces (which are implemented by some other parent) from the immediate parents.
+     *  In other words, no two interfaces in the result are related by subtyping.
+     *  This method works on Symbols, a similar one (not duplicate) works on Tracked instances.
+     */
+    def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
+      var rest   = lstIfaces
+      var leaves = List.empty[Symbol]
+      while (!rest.isEmpty) {
+        val candidate = rest.head
+        val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+        if (!nonLeaf) {
+          leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+        }
+        rest = rest.tail
+      }
+
+      leaves
+    }
+
+    val superInterfaces0: List[Symbol] = csym.mixinClasses
+    val superInterfaces = existingSymbols(superInterfaces0 ++ csym.annotations.map(newParentForAttr)).distinct
+
+    assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString}")
+    assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString}")
+
+    minimizeInterfaces(superInterfaces)
+  }
+
+  /*
+   * Records the superClass and supportedInterfaces relations,
+   * so that afterwards queries can be answered without resorting to typer.
+   * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+   * On the other hand, this method does record the inner-class status of the argument, via `buildExemplar()`.
+   *
+   * must-single-thread
+   */
+  final def exemplar(csym0: Symbol): Tracked = {
+    assert(csym0 != NoSymbol, "NoSymbol can't be tracked")
+
+    val csym = {
+      if (csym0.isJavaDefined && csym0.isModuleClass) csym0.linkedClassOfClass
+      else if (csym0.isModule) csym0.moduleClass
+      else csym0 // we track only module-classes and plain-classes
+    }
+
+    assert(!primitiveTypeMap.contains(csym) || isCompilingStdLib, s"primitive types not tracked here: ${csym.fullName}")
+    assert(!phantomTypeMap.contains(csym), s"phantom types not tracked here: ${csym.fullName}")
+
+    val opt = symExemplars.get(csym)
+    if (opt != null) {
+      return opt
+    }
+
+    val key = brefType(csym.javaBinaryName.toTypeName)
+    assert(key.isNonSpecial || isCompilingStdLib, s"Not a class to track: ${csym.fullName}")
+
+    // TODO accomodate the fix for SI-5031 of https://github.com/scala/scala/commit/0527b2549bcada2fda2201daa630369b377d0877
+    // TODO Weaken this assertion? buildExemplar() needs to be updated, too. In the meantime, pos/t5031_3 has been moved to test/disabled/pos.
+    val whatWasInExemplars = exemplars.get(key)
+    assert(whatWasInExemplars == null, "Maps `symExemplars` and `exemplars` got out of synch.")
+    val tr = buildExemplar(key, csym)
+    symExemplars.put(csym, tr)
+    if (csym != csym0) { symExemplars.put(csym0, tr) }
+    exemplars.put(tr.c, tr) // tr.c is the hash-consed, internalized, canonical representative for csym's key.
+    tr
+  }
+
+  val EMPTY_TRACKED_ARRAY  = Array.empty[Tracked]
+
+  /*
+   * must-single-thread
+   */
+  private def buildExemplar(key: BType, csym: Symbol): Tracked = {
+    val sc =
+     if (csym.isImplClass) definitions.ObjectClass
+     else csym.superClass
+    assert(
+      if (csym == definitions.ObjectClass)
+        sc == NoSymbol
+      else if (csym.isInterface)
+        sc == definitions.ObjectClass
+      else
+        ((sc != NoSymbol) && !sc.isInterface) || isCompilingStdLib,
+      "superClass out of order"
+    )
+    val ifaces    = getSuperInterfaces(csym) map exemplar;
+    val ifacesArr =
+     if (ifaces.isEmpty) EMPTY_TRACKED_ARRAY
+     else {
+      val arr = new Array[Tracked](ifaces.size)
+      ifaces.copyToArray(arr)
+      arr
+     }
+
+    val flags = mkFlags(
+      javaFlags(csym),
+      if (isDeprecated(csym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+    )
+
+    val tsc = if (sc == NoSymbol) null else exemplar(sc)
+
+    val innersChain = saveInnerClassesFor(csym, key)
+
+    Tracked(key, flags, tsc, ifacesArr, innersChain)
+  }
+
+  // ---------------- utilities around interfaces represented by Tracked instances. ----------------
+
+  /*  Drop redundant interfaces (those which are implemented by some other).
+   *  In other words, no two interfaces in the result are related by subtyping.
+   *  This method works on Tracked elements, a similar one (not duplicate) works on Symbols.
+   */
+  def minimizeInterfaces(lstIfaces: Set[Tracked]): Set[Tracked] = {
+    checkAllInterfaces(lstIfaces)
+    var rest   = lstIfaces.toList
+    var leaves = List.empty[Tracked]
+    while (!rest.isEmpty) {
+      val candidate = rest.head
+      val nonLeaf = leaves exists { leaf => leaf.isSubtypeOf(candidate.c) }
+      if (!nonLeaf) {
+        leaves = candidate :: (leaves filterNot { leaf => candidate.isSubtypeOf(leaf.c) })
+      }
+      rest = rest.tail
+    }
+
+    leaves.toSet
+  }
+
+  def allInterfaces(is: Iterable[Tracked]): Boolean = { is forall { i => i.isInterface } }
+  def nonInterfaces(is: Iterable[Tracked]): Iterable[Tracked] = { is filterNot { i => i.isInterface } }
+
+  def checkAllInterfaces(ifaces: Iterable[Tracked]) {
+    assert(allInterfaces(ifaces), s"Non-interfaces: ${nonInterfaces(ifaces).mkString}")
+  }
+
+  /*
+   * Subtype check `a <:< b` on BTypes that takes into account the JVM built-in numeric promotions (e.g. BYTE to INT).
+   * Its operation can be visualized more easily in terms of the Java bytecode type hierarchy.
+   * This method used to be called, in the ICode world, TypeKind.<:<()
+   *
+   * can-multi-thread
+   */
+  final def conforms(a: BType, b: BType): Boolean = {
+    if (a.isArray) { // may be null
+      /* Array subtyping is covariant here, as in Java bytecode. Also necessary for Java interop. */
+      if ((b == jlCloneableReference)     ||
+          (b == jioSerializableReference) ||
+          (b == AnyRefReference))    { true  }
+      else if (b.isArray)            { conforms(a.getComponentType, b.getComponentType) }
+      else                           { false }
+    }
+    else if (a.isBoxed) { // may be null
+      if (b.isBoxed)                 { a == b }
+      else if (b == AnyRefReference) { true   }
+      else if (!(b.hasObjectSort))   { false  }
+      else                           { exemplars.get(a).isSubtypeOf(b) } // e.g., java/lang/Double conforms to java/lang/Number
+    }
+    else if (a.isNullType) { // known to be null
+      if (b.isNothingType)      { false }
+      else if (b.isValueType)   { false }
+      else                      { true  }
+    }
+    else if (a.isNothingType) { // known to be Nothing
+      true
+    }
+    else if (a.isUnitType) {
+      b.isUnitType
+    }
+    else if (a.hasObjectSort) { // may be null
+      if (a.isNothingType)      { true  }
+      else if (b.hasObjectSort) { exemplars.get(a).isSubtypeOf(b) }
+      else if (b.isArray)       { a.isNullType } // documentation only, because `if(a.isNullType)` (above) covers this case already.
+      else                      { false }
+    }
+    else {
+
+      def msg = s"(a: $a, b: $b)"
+
+      assert(a.isNonUnitValueType, s"a isn't a non-Unit value type. $msg")
+      assert(b.isValueType, s"b isn't a value type. $msg")
+
+      (a eq b) || (a match {
+        case BOOL | BYTE | SHORT | CHAR => b == INT || b == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt().
+        case _                          => a == b
+      })
+    }
+  }
+
+  /* The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative values of Byte and Short. See ticket #2087.
+   *
+   * can-multi-thread
+   */
+  def maxValueType(a: BType, other: BType): BType = {
+    assert(a.isValueType, "maxValueType() is defined only for 1st arg valuetypes (2nd arg doesn't matter).")
+
+    def uncomparable: Nothing = {
+      abort(s"Uncomparable BTypes: $a with $other")
+    }
+
+    if (a.isNothingType)      return other;
+    if (other.isNothingType)  return a;
+    if (a == other)           return a;
+
+    a match {
+
+      case UNIT => uncomparable
+      case BOOL => uncomparable
+
+      case BYTE =>
+        if (other == CHAR)             INT
+        else if (other.isNumericType)  other
+        else                           uncomparable
+
+      case SHORT =>
+        other match {
+          case BYTE                          => SHORT
+          case CHAR                          => INT
+          case INT  | LONG  | FLOAT | DOUBLE => other
+          case _                             => uncomparable
+        }
+
+      case CHAR =>
+        other match {
+          case BYTE | SHORT                 => INT
+          case INT  | LONG | FLOAT | DOUBLE => other
+          case _                            => uncomparable
+        }
+
+      case INT =>
+        other match {
+          case BYTE | SHORT | CHAR   => INT
+          case LONG | FLOAT | DOUBLE => other
+          case _                     => uncomparable
+        }
+
+      case LONG =>
+        if (other.isIntegralType)   LONG
+        else if (other.isRealType)  DOUBLE
+        else                        uncomparable
+
+      case FLOAT =>
+        if (other == DOUBLE)           DOUBLE
+        else if (other.isNumericType)  FLOAT
+        else                           uncomparable
+
+      case DOUBLE =>
+        if (other.isNumericType)  DOUBLE
+        else                      uncomparable
+
+      case _ => uncomparable
+    }
+  }
+
+  /* Takes promotions of numeric primitives into account.
+   *
+   *  can-multi-thread
+   */
+  final def maxType(a: BType, other: BType): BType = {
+    if (a.isValueType) { maxValueType(a, other) }
+    else {
+      if (a.isNothingType)     return other;
+      if (other.isNothingType) return a;
+      if (a == other)          return a;
+       // Approximate `lub`. The common type of two references is always AnyRef.
+       // For 'real' least upper bound wrt to subclassing use method 'lub'.
+      assert(a.isArray || a.isBoxed || a.hasObjectSort, s"This is not a valuetype and it's not something else, what is it? $a")
+      // TODO For some reason, ICode thinks `REFERENCE(...).maxType(BOXED(whatever))` is `uncomparable`. Here, that has maxType AnyRefReference.
+      //      BTW, when swapping arguments, ICode says BOXED(whatever).maxType(REFERENCE(...)) == AnyRefReference, so I guess the above was an oversight in REFERENCE.maxType()
+      if (other.isRefOrArrayType) { AnyRefReference }
+      else                        { abort(s"Uncomparable BTypes: $a with $other") }
+    }
+  }
+
+  /*
+   *  Whether the argument is a subtype of
+   *    scala.PartialFunction[-A, +B] extends (A => B)
+   *  N.B.: this method returns true for a scala.runtime.AbstractPartialFunction
+   *
+   *  can-multi-thread
+   */
+  def isPartialFunctionType(t: BType): Boolean = {
+    (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(PartialFunctionReference)
+  }
+
+  /*
+   *  Whether the argument is a subtype of scala.FunctionX where 0 <= X <= definitions.MaxFunctionArity
+   *
+   *  can-multi-thread
+   */
+  def isFunctionType(t: BType): Boolean = {
+    if (!t.hasObjectSort) return false
+    var idx = 0
+    val et: Tracked = exemplars.get(t)
+    while (idx <= definitions.MaxFunctionArity) {
+      if (et.isSubtypeOf(FunctionReference(idx).c)) {
+        return true
+      }
+      idx += 1
+    }
+    false
+  }
+
+  /*
+   * must-single-thread
+   */
+  def isTopLevelModule(sym: Symbol): Boolean = {
+    exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+  }
+
+  /*
+   * must-single-thread
+   */
+  def isStaticModule(sym: Symbol): Boolean = {
+    sym.isModuleClass && !sym.isImplClass && !sym.isLifted
+  }
+
+  // ---------------------------------------------------------------------
+  // ---------------- InnerClasses attribute (JVMS 4.7.6) ----------------
+  // ---------------------------------------------------------------------
+
+  val INNER_CLASSES_FLAGS =
+    (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE   | asm.Opcodes.ACC_PROTECTED |
+     asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT  | asm.Opcodes.ACC_FINAL)
+
+  /*
+   * @param name the internal name of an inner class.
+   * @param outerName the internal name of the class to which the inner class belongs.
+   *                  May be `null` for non-member inner classes (ie for a Java local class or a Java anonymous class).
+   * @param innerName the (simple) name of the inner class inside its enclosing class. It's `null` for anonymous inner classes.
+   * @param access the access flags of the inner class as originally declared in the enclosing class.
+   */
+  case class InnerClassEntry(name: String, outerName: String, innerName: String, access: Int) {
+    assert(name != null, "Null isn't good as class name in an InnerClassEntry.")
+  }
+
+  /* For given symbol return a symbol corresponding to a class that should be declared as inner class.
+   *
+   *  For example:
+   *  class A {
+   *    class B
+   *    object C
+   *  }
+   *
+   *  then method will return:
+   *    NoSymbol for A,
+   *    the same symbol for A.B (corresponding to A$B class), and
+   *    A$C$ symbol for A.C.
+   *
+   * must-single-thread
+   */
+  def innerClassSymbolFor(s: Symbol): Symbol =
+    if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
+
+  /*
+   *  Computes the chain of inner-class (over the is-member-of relation) for the given argument.
+   *  The resulting chain will be cached in `exemplars`.
+   *
+   *  The chain thus cached is valid during this compiler run, see in contrast
+   *  `innerClassBufferASM` for a cache that is valid only for the class being emitted.
+   *
+   *  The argument can be any symbol, but given that this method is invoked only from `buildExemplar()`,
+   *  in practice it has been vetted to be a class-symbol.
+   *
+   *  Returns:
+   *
+   *    - a non-empty array of entries for an inner-class argument.
+   *      The array's first element is the outermost top-level class,
+   *      the array's last element corresponds to csym.
+   *
+   *    - null otherwise.
+   *
+   *  This method does not add to `innerClassBufferASM`, use instead `exemplar()` for that.
+   *
+   *  must-single-thread
+   */
+  final def saveInnerClassesFor(csym: Symbol, csymTK: BType): Array[InnerClassEntry] = {
+
+    val ics = innerClassSymbolFor(csym)
+    if (ics == NoSymbol) {
+      return null
+    }
+    assert(ics == csym, s"Disagreement between innerClassSymbolFor() and exemplar()'s tracked symbol for the same input: ${csym.fullName}")
+
+    var chain: List[Symbol] = Nil
+    var x = ics
+    while (x ne NoSymbol) {
+      assert(x.isClass, s"not a class symbol: ${x.fullName}")
+      val isInner = !x.rawowner.isPackageClass
+      if (isInner) {
+        chain ::= x
+        x = innerClassSymbolFor(x.rawowner)
+      } else {
+        x = NoSymbol
+      }
+    }
+
+    // now that we have all of `ics` , `csym` , and soon the inner-classes-chain, it's too tempting not to cache.
+    if (chain.isEmpty) { null }
+    else {
+      val arr = new Array[InnerClassEntry](chain.size)
+      (chain map toInnerClassEntry).copyToArray(arr)
+
+      arr
+    }
+  }
+
+  /*
+   * must-single-thread
+   */
+  private def toInnerClassEntry(innerSym: Symbol): InnerClassEntry = {
+
+    /* The outer name for this inner class. Note that it returns null
+     *  when the inner class should not get an index in the constant pool.
+     *  That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+     */
+    def outerName(innerSym: Symbol): Name = {
+      if (innerSym.originalEnclosingMethod != NoSymbol)
+        null
+      else {
+        val outerName = innerSym.rawowner.javaBinaryName
+        if (isTopLevelModule(innerSym.rawowner)) nme.stripModuleSuffix(outerName)
+        else outerName
+      }
+    }
+
+    def innerName(innerSym: Symbol): String = {
+      if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+        null
+      else
+        innerSym.rawname + innerSym.moduleSuffix
+    }
+
+    val flagsWithFinal: Int = mkFlags(
+      if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+      javaFlags(innerSym),
+      if (isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+    ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+    val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
+
+    val jname = innerSym.javaBinaryName.toString // never null
+    val oname = { // null when method-enclosed
+      val on = outerName(innerSym)
+      if (on == null) null else on.toString
+    }
+    val iname = { // null for anonymous inner class
+      val in = innerName(innerSym)
+      if (in == null) null else in.toString
+    }
+
+    InnerClassEntry(jname, oname, iname, flags)
+  }
+
+  // --------------------------------------------
+  // ---------------- Java flags ----------------
+  // --------------------------------------------
+
+  /*
+   * can-multi-thread
+   */
+  final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
+
+  /*
+   * must-single-thread
+   */
+  final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr)
+
+  /*
+   * Return the Java modifiers for the given symbol.
+   * Java modifiers for classes:
+   *  - public, abstract, final, strictfp (not used)
+   * for interfaces:
+   *  - the same as for classes, without 'final'
+   * for fields:
+   *  - public, private (*)
+   *  - static, final
+   * for methods:
+   *  - the same as for fields, plus:
+   *  - abstract, synchronized (not used), strictfp (not used), native (not used)
+   *
+   *  (*) protected cannot be used, since inner classes 'see' protected members,
+   *      and they would fail verification after lifted.
+   *
+   * must-single-thread
+   */
+  def javaFlags(sym: Symbol): Int = {
+    // constructors of module classes should be private
+    // PP: why are they only being marked private at this stage and not earlier?
+    val privateFlag =
+      sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+    // Final: the only fields which can receive ACC_FINAL are eager vals.
+    // Neither vars nor lazy vals can, because:
+    //
+    // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+    // "Another problem is that the specification allows aggressive
+    // optimization of final fields. Within a thread, it is permissible to
+    // reorder reads of a final field with those modifications of a final
+    // field that do not take place in the constructor."
+    //
+    // A var or lazy val which is marked final still has meaning to the
+    // scala compiler. The word final is heavily overloaded unfortunately;
+    // for us it means "not overridable". At present you can't override
+    // vars regardless; this may change.
+    //
+    // The logic does not check .isFinal (which checks flags for the FINAL flag,
+    // and includes symbols marked lateFINAL) instead inspecting rawflags so
+    // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+    // avoid breaking proxy software which depends on subclassing, we do not
+    // emit ACC_FINAL.
+    // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+    val finalFlag = (
+         (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModule(sym))
+      && !sym.enclClass.isInterface
+      && !sym.isClassConstructor
+      && !sym.isMutable // lazy vals and vars both
+    )
+
+    // Primitives are "abstract final" to prohibit instantiation
+    // without having to provide any implementations, but that is an
+    // illegal combination of modifiers at the bytecode level so
+    // suppress final if abstract if present.
+    import asm.Opcodes._
+    mkFlags(
+      if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+      if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+      if (sym.isInterface) ACC_INTERFACE else 0,
+      if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
+      if (sym.isStaticMember) ACC_STATIC else 0,
+      if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+      if (sym.isArtifact) ACC_SYNTHETIC else 0,
+      if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+      if (sym.hasEnumFlag) ACC_ENUM else 0,
+      if (sym.isVarargsMethod) ACC_VARARGS else 0,
+      if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
+    )
+  }
+
+  /*
+   * must-single-thread
+   */
+  def javaFieldFlags(sym: Symbol) = {
+    javaFlags(sym) | mkFlags(
+      if (sym hasAnnotation definitions.TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+      if (sym hasAnnotation definitions.VolatileAttr)  asm.Opcodes.ACC_VOLATILE  else 0,
+      if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+    )
+  }
+
+} // end of class BCodeTypes
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index fb1f45f..8e6c092 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -6,14 +6,14 @@
 package scala.tools.nsc
 package backend.jvm
 
-import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
+import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile }
 import scala.tools.nsc.io._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.JavapClass
-import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
-import Attributes.Name
+import java.util.jar.Attributes.Name
 import scala.language.postfixOps
 
+/** Can't output a file due to the state of the file system. */
+class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg)
+
 /** For the last mile: turning generated bytecode in memory into
  *  something you can use.  Has implementations for writing to class
  *  files, jars, and disassembled/javap output.
@@ -22,22 +22,37 @@ trait BytecodeWriters {
   val global: Global
   import global._
 
-  private def outputDirectory(sym: Symbol): AbstractFile = (
-    settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
-  )
-  private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
+  def outputDirectory(sym: Symbol): AbstractFile =
+    settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
+
+  /**
+   * @param clsName cls.getName
+   */
+  def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+    def ensureDirectory(dir: AbstractFile): AbstractFile =
+      if (dir.isDirectory) dir
+      else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir)
     var dir = base
     val pathParts = clsName.split("[./]").toList
-    for (part <- pathParts.init) {
-      dir = dir.subdirectoryNamed(part)
-    }
-    dir.fileNamed(pathParts.last + suffix)
+    for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part
+    ensureDirectory(dir) fileNamed pathParts.last + suffix
   }
-  private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+  def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
     getFile(outputDirectory(sym), clsName, suffix)
 
+  def factoryNonJarBytecodeWriter(): BytecodeWriter = {
+    val emitAsmp  = settings.Ygenasmp.isSetByUser
+    val doDump    = settings.Ydumpclasses.isSetByUser
+    (emitAsmp, doDump) match {
+      case (false, false) => new ClassBytecodeWriter { }
+      case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { }
+      case (true,  false) => new ClassBytecodeWriter with AsmpBytecodeWriter
+      case (true,  true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { }
+    }
+  }
+
   trait BytecodeWriter {
-    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
+    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit
     def close(): Unit = ()
   }
 
@@ -48,7 +63,9 @@ trait BytecodeWriters {
     )
     val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
 
-    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+      assert(outfile == null,
+             "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.")
       val path = jclassName + ".class"
       val out  = writer.newOutputStream(path)
 
@@ -60,33 +77,47 @@ trait BytecodeWriters {
     override def close() = writer.close()
   }
 
-  trait JavapBytecodeWriter extends BytecodeWriter {
-    val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
-
-    def emitJavap(bytes: Array[Byte], javapFile: io.File) {
-      val pw    = javapFile.printWriter()
-      val javap = new JavapClass(ScalaClassLoader.appLoader, pw) {
-        override def findBytes(path: String): Array[Byte] = bytes
+  /*
+   * The ASM textual representation for bytecode overcomes disadvantages of javap ouput in three areas:
+   *    (a) pickle dingbats undecipherable to the naked eye;
+   *    (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+   *    (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap,
+   *        their expansion by ASM is more readable.
+   *
+   * */
+  trait AsmpBytecodeWriter extends BytecodeWriter {
+    import scala.tools.asm
+
+    private val baseDir = Directory(settings.Ygenasmp.value).createDirectory()
+
+    private def emitAsmp(jclassBytes: Array[Byte], asmpFile: io.File) {
+      val pw = asmpFile.printWriter()
+      try {
+        val cnode = new asm.tree.ClassNode()
+        val cr    = new asm.ClassReader(jclassBytes)
+        cr.accept(cnode, 0)
+        val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter()))
+        cnode.accept(trace)
+        trace.p.print(pw)
       }
-
-      try javap(Seq("-verbose", "dummy")) foreach (_.show())
       finally pw.close()
     }
-    abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
-      super.writeClass(label, jclassName, jclassBytes, sym)
 
-      val bytes     = getFile(sym, jclassName, ".class").toByteArray
-      val segments  = jclassName.split("[./]")
-      val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
+    abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+      super.writeClass(label, jclassName, jclassBytes, outfile)
+
+      val segments = jclassName.split("[./]")
+      val asmpFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "asmp" toFile;
 
-      javapFile.parent.createDirectory()
-      emitJavap(bytes, javapFile)
+      asmpFile.parent.createDirectory()
+      emitAsmp(jclassBytes, asmpFile)
     }
   }
 
   trait ClassBytecodeWriter extends BytecodeWriter {
-    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
-      val outfile   = getFile(sym, jclassName, ".class")
+    def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+      assert(outfile != null,
+             "Precisely this override requires its invoker to hand out a non-null AbstractFile.")
       val outstream = new DataOutputStream(outfile.bufferedOutput)
 
       try outstream.write(jclassBytes, 0, jclassBytes.length)
@@ -98,11 +129,11 @@ trait BytecodeWriters {
   trait DumpBytecodeWriter extends BytecodeWriter {
     val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
 
-    abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
-      super.writeClass(label, jclassName, jclassBytes, sym)
+    abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+      super.writeClass(label, jclassName, jclassBytes, outfile)
 
       val pathName = jclassName
-      var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
+      val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
       dumpFile.parent.createDirectory()
       val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
 
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 3712745..a389816 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -3,17 +3,16 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package backend.jvm
 
-import java.nio.ByteBuffer
 import scala.collection.{ mutable, immutable }
 import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
 import scala.tools.nsc.symtab._
-import scala.tools.nsc.io.AbstractFile
-
 import scala.tools.asm
 import asm.Label
+import scala.annotation.tailrec
 
 /**
  *  @author  Iulian Dragos (version 1.0, FJBG-based implementation)
@@ -21,17 +20,31 @@ import asm.Label
  *
  * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
  */
-abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
+abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { self =>
   import global._
   import icodes._
   import icodes.opcodes._
   import definitions._
 
+  // Strangely I can't find this in the asm code
+  // 255, but reserving 1 for "this"
+  final val MaximumJvmParameters = 254
+
   val phaseName = "jvm"
 
   /** Create a new phase */
   override def newPhase(p: Phase): Phase = new AsmPhase(p)
 
+  /** From the reference documentation of the Android SDK:
+   *  The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+   *  Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+   *  which is an object implementing the `Parcelable.Creator` interface.
+   */
+  private val androidFieldName = newTermName("CREATOR")
+
+  private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+  private lazy val AndroidCreatorClass        = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
   /** JVM code generation phase
    */
   class AsmPhase(prev: Phase) extends ICodePhase(prev) {
@@ -39,7 +52,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     override def erasedTypes = true
     def apply(cls: IClass) = sys.error("no implementation")
 
-    val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+    // An AsmPhase starts and ends within a Run, thus the caches in question will get populated and cleared within a Run, too), SI-7422
+    javaNameCache.clear()
+    javaNameCache ++= List(
+      NothingClass        -> binarynme.RuntimeNothing,
+      RuntimeNothingClass -> binarynme.RuntimeNothing,
+      NullClass           -> binarynme.RuntimeNull,
+      RuntimeNullClass    -> binarynme.RuntimeNull
+    )
+
+    // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+    reverseJavaName.clear()
+    reverseJavaName ++= List(
+      binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+      binarynme.RuntimeNull.toString()    -> RuntimeNullClass
+    )
+
+    // Lazy val; can't have eager vals in Phase constructors which may
+    // cause cycles before Global has finished initialization.
+    lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
 
     private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
       settings.outputDirs.getSingleOutput match {
@@ -61,29 +92,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
           new DirectToJarfileWriter(f.file)
 
-        case _                               =>
-          if (settings.Ygenjavap.isDefault) {
-            if(settings.Ydumpclasses.isDefault)
-              new ClassBytecodeWriter { }
-            else
-              new ClassBytecodeWriter with DumpBytecodeWriter { }
-          }
-          else new ClassBytecodeWriter with JavapBytecodeWriter { }
-
-          // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
-          //      Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
-          //        (a) unreadable pickle;
-          //        (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
-          //        (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+        case _ => factoryNonJarBytecodeWriter()
       }
     }
 
     override def run() {
 
-      if (settings.debug.value)
+      if (settings.debug)
         inform("[running phase " + name + " on icode]")
 
-      if (settings.Xdce.value)
+      if (settings.Xdce)
         for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
           log(s"Optimizer eliminated ${sym.fullNameString}")
           deadCode.elidedClosures += sym
@@ -100,41 +118,41 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           "Such classes will overwrite one another on case-insensitive filesystems.")
       }
 
-      debuglog("Created new bytecode generator for " + classes.size + " classes.")
+      debuglog(s"Created new bytecode generator for ${classes.size} classes.")
       val bytecodeWriter  = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
-      val plainCodeGen    = new JPlainBuilder(bytecodeWriter)
-      val mirrorCodeGen   = new JMirrorBuilder(bytecodeWriter)
-      val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
-
-      while(!sortedClasses.isEmpty) {
-        val c = sortedClasses.head
+      val needsOutfile    = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+      val plainCodeGen    = new JPlainBuilder(   bytecodeWriter, needsOutfile)
+      val mirrorCodeGen   = new JMirrorBuilder(  bytecodeWriter, needsOutfile)
+      val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter, needsOutfile)
 
+      def emitFor(c: IClass) {
         if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
-          if (c.symbol.companionClass == NoSymbol) {
-            mirrorCodeGen.genMirrorClass(c.symbol, c.cunit)
-          } else {
-            log("No mirror class for module with linked class: " + c.symbol.fullName)
-          }
+          if (c.symbol.companionClass == NoSymbol)
+            mirrorCodeGen genMirrorClass (c.symbol, c.cunit)
+          else
+            log(s"No mirror class for module with linked class: ${c.symbol.fullName}")
         }
+        plainCodeGen genClass c
+        if (c.symbol hasAnnotation BeanInfoAttr) beanInfoCodeGen genBeanInfoClass c
+      }
 
-        plainCodeGen.genClass(c)
-
-        if (c.symbol hasAnnotation BeanInfoAttr) {
-          beanInfoCodeGen.genBeanInfoClass(c)
+      while (!sortedClasses.isEmpty) {
+        val c = sortedClasses.head
+        try emitFor(c)
+        catch {
+          case e: FileConflictException =>
+            c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
         }
-
         sortedClasses = sortedClasses.tail
         classes -= c.symbol // GC opportunity
       }
 
       bytecodeWriter.close()
-      classes.clear()
-      reverseJavaName.clear()
 
       /* don't javaNameCache.clear() because that causes the following tests to fail:
        *   test/files/run/macro-repl-dontexpand.scala
        *   test/files/jvm/interpreter.scala
-       * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+       * TODO but why? what use could javaNameCache possibly see once GenASM is over?
        */
 
       /* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
@@ -153,19 +171,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
   var pickledBytes = 0 // statistics
 
-  // Don't put this in per run caches. Contains entries for classes as well as members.
-  val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
-    NothingClass        -> binarynme.RuntimeNothing,
-    RuntimeNothingClass -> binarynme.RuntimeNothing,
-    NullClass           -> binarynme.RuntimeNull,
-    RuntimeNullClass    -> binarynme.RuntimeNull
-  )
+  val javaNameCache = perRunCaches.newAnyRefMap[Symbol, Name]()
 
   // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
-  val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
-    binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
-    binarynme.RuntimeNull.toString()    -> RuntimeNullClass
-  )
+  val reverseJavaName = perRunCaches.newAnyRefMap[String, Symbol]()
 
   private def mkFlags(args: Int*)         = args.foldLeft(0)(_ | _)
   private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
@@ -235,6 +244,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
       if (sym.isArtifact) ACC_SYNTHETIC else 0,
       if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+      if (sym.hasEnumFlag) ACC_ENUM else 0,
       if (sym.isVarargsMethod) ACC_VARARGS else 0,
       if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
     )
@@ -249,7 +259,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
   }
 
   def isTopLevelModule(sym: Symbol): Boolean =
-    afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+    exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
 
   def isStaticModule(sym: Symbol): Boolean = {
     sym.isModuleClass && !sym.isImplClass && !sym.isLifted
@@ -284,7 +294,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
   def inameToSymbol(iname: String): Symbol = {
     val name = global.newTypeName(iname)
     val res0 =
-      if (nme.isModuleName(name)) rootMirror.getModule(nme.stripModuleSuffix(name))
+      if (nme.isModuleName(name)) rootMirror.getModuleByName(name.dropModule)
       else                        rootMirror.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
     assert(res0 != NoSymbol)
     val res = jsymbol(res0)
@@ -326,7 +336,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     assert(a.isClass)
     assert(b.isClass)
 
-    val res = Pair(a.isInterface, b.isInterface) match {
+    val res = (a.isInterface, b.isInterface) match {
       case (true, true) =>
         global.lub(List(a.tpe, b.tpe)).typeSymbol // TODO assert == firstCommonSuffix of resp. parents
       case (true, false) =>
@@ -369,7 +379,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
   private val classfileVersion: Int = settings.target.value match {
     case "jvm-1.5"     => asm.Opcodes.V1_5
-    case "jvm-1.5-asm" => asm.Opcodes.V1_5
     case "jvm-1.6"     => asm.Opcodes.V1_6
     case "jvm-1.7"     => asm.Opcodes.V1_7
   }
@@ -397,9 +406,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
   }
 
   /** basic functionality for class file building */
-  abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+  abstract class JBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) {
 
-    val EMPTY_JTYPE_ARRAY  = Array.empty[asm.Type]
     val EMPTY_STRING_ARRAY = Array.empty[String]
 
     val mdesc_arglessvoid = "()V"
@@ -409,7 +417,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
     val INNER_CLASSES_FLAGS =
       (asm.Opcodes.ACC_PUBLIC    | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
-       asm.Opcodes.ACC_STATIC    | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+       asm.Opcodes.ACC_STATIC    | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
 
     // -----------------------------------------------------------------------------------------
     // factory methods
@@ -444,8 +452,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     }
 
     def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
-      val dest = new Array[Byte](len);
-      System.arraycopy(b, offset, dest, 0, len);
+      val dest = new Array[Byte](len)
+      System.arraycopy(b, offset, dest, 0, len)
       new asm.CustomAttr(name, dest)
     }
 
@@ -456,7 +464,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
       try {
         val arr = jclass.toByteArray()
-        bytecodeWriter.writeClass(label, jclassName, arr, sym)
+        val outF: scala.tools.nsc.io.AbstractFile = {
+          if(needsOutfile) getFile(sym, jclassName, ".class") else null
+        }
+        bytecodeWriter.writeClass(label, jclassName, arr, outF)
       } catch {
         case e: java.lang.RuntimeException if e != null && (e.getMessage contains "too large!") =>
           reporter.error(sym.pos,
@@ -467,7 +478,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     /** Specialized array conversion to prevent calling
      *  java.lang.reflect.Array.newInstance via TraversableOnce.toArray
      */
-    def mkArray(xs: Traversable[asm.Type]):  Array[asm.Type]  = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
     def mkArray(xs: Traversable[String]):    Array[String]    = { val a = new Array[String](xs.size);   xs.copyToArray(a); a }
 
     // -----------------------------------------------------------------------------------------
@@ -510,14 +520,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
      */
     def javaName(sym: Symbol): String = {
 
-        /**
+        /*
          * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
          *
          * Note: This method is called recursively thus making sure that we add complete chain
          * of inner class all until root class.
          */
         def collectInnerClass(s: Symbol): Unit = {
-          // TODO: some beforeFlatten { ... } which accounts for
+          // TODO: some enteringFlatten { ... } which accounts for
           // being nested in parameterized classes (if we're going to selectively flatten.)
           val x = innerClassSymbolFor(s)
           if(x ne NoSymbol) {
@@ -532,7 +542,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
       collectInnerClass(sym)
 
-      var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+      val hasInternalName = sym.isClass || sym.isModuleNotMethod
       val cachedJN = javaNameCache.getOrElseUpdate(sym, {
         if (hasInternalName) { sym.javaBinaryName }
         else                 { sym.javaSimpleName }
@@ -542,12 +552,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         val internalName = cachedJN.toString()
         val trackedSym = jsymbol(sym)
         reverseJavaName.get(internalName) match {
-          case None         =>
+          case Some(oldsym) if oldsym.exists && trackedSym.exists =>
+            assert(
+              // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+              (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
+              s"""|Different class symbols have the same bytecode-level internal name:
+                  |     name: $internalName
+                  |   oldsym: ${oldsym.fullNameString}
+                  |  tracked: ${trackedSym.fullNameString}
+              """.stripMargin
+            )
+          case _ =>
             reverseJavaName.put(internalName, trackedSym)
-          case Some(oldsym) =>
-            assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) ||
-                    (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
-                   "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName)
         }
       }
 
@@ -589,7 +605,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
     def javaType(s: Symbol): asm.Type = {
       if (s.isMethod) {
-        val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+        val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
         asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
       } else { javaType(s.tpe) }
     }
@@ -599,9 +615,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
 
     def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
-      /** The outer name for this inner class. Note that it returns null
-       *  when the inner class should not get an index in the constant pool.
-       *  That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+      /* The outer name for this inner class. Note that it returns null
+       * when the inner class should not get an index in the constant pool.
+       * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
        */
       def outerName(innerSym: Symbol): String = {
         if (innerSym.originalEnclosingMethod != NoSymbol)
@@ -620,7 +636,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           innerSym.rawname + innerSym.moduleSuffix
 
       // add inner classes which might not have been referenced yet
-      afterErasure {
+      exitingErasure {
         for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
           innerClassBuffer += m
       }
@@ -635,11 +651,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
         // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
         for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
-          val flags = mkFlags(
+          val flagsWithFinal: Int = mkFlags(
             if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
             javaFlags(innerSym),
             if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
           ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+          val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
           val jname = javaName(innerSym)  // never null
           val oname = outerName(innerSym) // null when method-enclosed
           val iname = innerName(innerSym) // null for anonymous inner class
@@ -683,7 +700,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
 
   /** functionality for building plain and mirror classes */
-  abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+  abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
 
     def debugLevel = settings.debuginfo.indexOfChoice
 
@@ -786,139 +803,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       annot.args.isEmpty &&
       !annot.matches(DeprecatedAttr)
 
-    // @M don't generate java generics sigs for (members of) implementation
-    // classes, as they are monomorphic (TODO: ok?)
-    private def needsGenericSignature(sym: Symbol) = !(
-      // PP: This condition used to include sym.hasExpandedName, but this leads
-      // to the total loss of generic information if a private member is
-      // accessed from a closure: both the field and the accessor were generated
-      // without it.  This is particularly bad because the availability of
-      // generic information could disappear as a consequence of a seemingly
-      // unrelated change.
-         settings.Ynogenericsig.value
-      || sym.isArtifact
-      || sym.isLiftedMethod
-      || sym.isBridge
-      || (sym.ownerChain exists (_.isImplClass))
-    )
-
     def getCurrentCUnit(): CompilationUnit
 
-    /** @return
-     *   - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
-     *   - otherwise the signature in question
-     */
-    def getGenericSignature(sym: Symbol, owner: Symbol): String = {
-
-      if (!needsGenericSignature(sym)) { return null }
-
-      val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
-
-      val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
-      if (jsOpt.isEmpty) { return null }
-
-      val sig = jsOpt.get
-      log(sig) // This seems useful enough in the general case.
-
-          def wrap(op: => Unit) = {
-            try   { op; true }
-            catch { case _: Throwable => false }
-          }
-
-      if (settings.Xverify.value) {
-        // Run the signature parser to catch bogus signatures.
-        val isValidSignature = wrap {
-          // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
-          import scala.tools.asm.util.SignatureChecker
-          if (sym.isMethod)    { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
-          else if (sym.isTerm) { SignatureChecker checkFieldSignature  sig }
-          else                 { SignatureChecker checkClassSignature  sig }
-        }
-
-        if(!isValidSignature) {
-          getCurrentCUnit().warning(sym.pos,
-              """|compiler bug: created invalid generic signature for %s in %s
-                 |signature: %s
-                 |if this is reproducible, please report bug at https://issues.scala-lang.org/
-              """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
-          return null
-        }
-      }
-
-      if ((settings.check containsName phaseName)) {
-        val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
-        val bytecodeTpe = owner.thisType.memberInfo(sym)
-        if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
-          getCurrentCUnit().warning(sym.pos,
-              """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
-                 |signature: %s
-                 |original type: %s
-                 |normalized type: %s
-                 |erasure type: %s
-                 |if this is reproducible, please report bug at http://issues.scala-lang.org/
-              """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
-           return null
-        }
-      }
-
-      sig
-    }
-
-    def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
-      val ca = new Array[Char](bytes.size)
-      var idx = 0
-      while(idx < bytes.size) {
-        val b: Byte = bytes(idx)
-        assert((b & ~0x7f) == 0)
-        ca(idx) = b.asInstanceOf[Char]
-        idx += 1
-      }
-
-      ca
-    }
-
-    private def arrEncode(sb: ScalaSigBytes): Array[String] = {
-      var strs: List[String]  = Nil
-      val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
-      // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
-      var prevOffset = 0
-      var offset     = 0
-      var encLength  = 0
-      while(offset < bSeven.size) {
-        val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
-        val newEncLength = encLength.toLong + deltaEncLength
-        if(newEncLength >= 65535) {
-          val ba     = bSeven.slice(prevOffset, offset)
-          strs     ::= new java.lang.String(ubytesToCharArray(ba))
-          encLength  = 0
-          prevOffset = offset
-        } else {
-          encLength += deltaEncLength
-          offset    += 1
-        }
-      }
-      if(prevOffset < offset) {
-        assert(offset == bSeven.length)
-        val ba = bSeven.slice(prevOffset, offset)
-        strs ::= new java.lang.String(ubytesToCharArray(ba))
-      }
-      assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
-      strs.reverse.toArray
-    }
-
-    private def strEncode(sb: ScalaSigBytes): String = {
-      val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
-      new java.lang.String(ca)
-      // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
-      // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
-      // debug assert(enc(idx) == bvA.getByte(idx + 2))
-      // debug assert(bvA.getLength == enc.size + 2)
-    }
+    def getGenericSignature(sym: Symbol, owner: Symbol) = self.getGenericSignature(sym, owner, getCurrentCUnit())
 
     def emitArgument(av:   asm.AnnotationVisitor,
                      name: String,
                      arg:  ClassfileAnnotArg) {
-      arg match {
+      (arg: @unchecked) match {
 
         case LiteralAnnotArg(const) =>
           if(const.isNonUnitAnyVal) { av.visit(name, const.value) }
@@ -998,7 +890,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
       val annotationss = pannotss map (_ filter shouldEmitAnnotation)
       if (annotationss forall (_.isEmpty)) return
-      for (Pair(annots, idx) <- annotationss.zipWithIndex;
+      for ((annots, idx) <- annotationss.zipWithIndex;
            annot <- annots) {
         val AnnotationInfo(typ, args, assocs) = annot
         assert(args.isEmpty, args)
@@ -1036,9 +928,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
       // val paramNames     = 0 until paramJavaTypes.length map ("x_" + _)
 
-      /** Forwarders must not be marked final,
-       *  as the JVM will not allow redefinition of a final static method,
-       *  and we don't know what classes might be subclassing the companion class.  See SI-4827.
+      /* Forwarders must not be marked final,
+       * as the JVM will not allow redefinition of a final static method,
+       * and we don't know what classes might be subclassing the companion class.  See SI-4827.
        */
       // TODO: evaluate the other flags we might be dropping on the floor here.
       // TODO: ACC_SYNTHETIC ?
@@ -1047,7 +939,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       )
 
       // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
-      val jgensig = if (m.isDeferred) null else getGenericSignature(m, module); // only add generic signature if method concrete; bug #1745
+      val jgensig = staticForwarderGenericSignature(m, module, getCurrentCUnit())
       addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
       val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
       val thrownExceptions: List[String] = getExceptions(throws)
@@ -1101,7 +993,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       debuglog("Dumping mirror class for object: " + moduleClass)
 
       val linkedClass  = moduleClass.companionClass
-      val linkedModule = linkedClass.companionSymbol
       lazy val conflictingNames: Set[Name] = {
         (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
       }
@@ -1109,13 +1000,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
       for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
         if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
-          debuglog("No forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+          debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
         else if (conflictingNames(m.name))
-          log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+          log(s"No forwarder for $m due to conflict with " + linkedClass.info.member(m.name))
         else if (m.hasAccessBoundary)
           log(s"No forwarder for non-public member $m")
         else {
-          log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+          debuglog(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
           addForwarder(isRemoteClass, jclass, moduleClass, m)
         }
       }
@@ -1127,16 +1018,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
   trait JAndroidBuilder {
     self: JPlainBuilder =>
 
-    /** From the reference documentation of the Android SDK:
-     *  The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
-     *  Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
-     *  which is an object implementing the `Parcelable.Creator` interface.
-     */
-    private val androidFieldName = newTermName("CREATOR")
-
-    private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
-    private lazy val AndroidCreatorClass        = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
     def isAndroidParcelableClass(sym: Symbol) =
       (AndroidParcelableInterface != NoSymbol) &&
       (sym.parentSymbols contains AndroidParcelableInterface)
@@ -1144,13 +1025,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     /* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
     def addCreatorCode(block: BasicBlock) {
       val fieldSymbol = (
-        clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+        clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL)
           setInfo AndroidCreatorClass.tpe
       )
       val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
       clasz addField new IField(fieldSymbol)
-      block emit CALL_METHOD(methodSymbol, Static(false))
-      block emit STORE_FIELD(fieldSymbol, true)
+      block emit CALL_METHOD(methodSymbol, Static(onInstance = false))
+      block emit STORE_FIELD(fieldSymbol, isStatic = true)
     }
 
     def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
@@ -1159,7 +1040,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
       jclass.visitField(
         PublicStaticFinal,
-        androidFieldName,
+        androidFieldName.toString,
         tdesc_creator,
         null, // no java-generic-signature
         null  // no initial value
@@ -1179,7 +1060,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       clinit.visitMethodInsn(
         asm.Opcodes.INVOKEVIRTUAL,
         moduleName,
-        androidFieldName,
+        androidFieldName.toString,
         asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
       )
 
@@ -1187,7 +1068,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       clinit.visitFieldInsn(
         asm.Opcodes.PUTSTATIC,
         thisName,
-        androidFieldName,
+        androidFieldName.toString,
         tdesc_creator
       )
     }
@@ -1244,8 +1125,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
   case class BlockInteval(start: BasicBlock, end: BasicBlock)
 
   /** builder of plain classes */
-  class JPlainBuilder(bytecodeWriter: BytecodeWriter)
-    extends JCommonBuilder(bytecodeWriter)
+  class JPlainBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean)
+    extends JCommonBuilder(bytecodeWriter, needsOutfile)
     with    JAndroidBuilder {
 
     val MIN_SWITCH_DENSITY = 0.7
@@ -1268,15 +1149,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
     private def getSuperInterfaces(c: IClass): Array[String] = {
 
         // Additional interface parents based on annotations and other cues
-        def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
-          case SerializableAttr => Some(SerializableClass)
-          case CloneableAttr    => Some(CloneableClass)
-          case RemoteAttr       => Some(RemoteInterfaceClass)
-          case _                => None
+        def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+          case RemoteAttr       => RemoteInterfaceClass
+          case _                => NoSymbol
         }
 
-        /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
-         *  This is important on Android because there is otherwise an interface explosion.
+        /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+         * This is important on Android because there is otherwise an interface explosion.
          */
         def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
           var rest   = lstIfaces
@@ -1294,8 +1173,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         }
 
       val ps = c.symbol.info.parents
-      val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
-      val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
+      val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
+      val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
 
       if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
       else mkArray(minimizeInterfaces(superInterfaces) map javaName)
@@ -1319,7 +1198,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       thisName = javaName(c.symbol) // the internal name of the class being emitted
 
       val ps = c.symbol.info.parents
-      val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+      val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
 
       val ifaces = getSuperInterfaces(c)
 
@@ -1366,14 +1245,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         for (constructor <- c.lookupStaticCtor) {
           addStaticInit(Some(constructor))
         }
-        val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value)
+        val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders)
         if (!skipStaticForwarders) {
           val lmoc = c.symbol.companionModule
           // add static forwarders if there are no name conflicts; see bugs #363 and #1735
           if (lmoc != NoSymbol) {
             // it must be a top level class (name contains no $s)
             val isCandidateForForwarders = {
-              afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+              exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
             }
             if (isCandidateForForwarders) {
               log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
@@ -1397,38 +1276,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       }
 
       clasz.fields  foreach genField
-      clasz.methods foreach { im =>
-        if (im.symbol.isBridge && isRedundantBridge(im, clasz))
-          // We can't backport the erasure fix of SI-7120 to 2.10.x, but we can detect and delete
-          // bridge methods with identical signatures to their targets.
-          //
-          // NOTE: this backstop only implemented here in the ASM backend, and is not implemented in the FJBG backend.
-          debugwarn(s"Discarding redundant bridge method: ${im.symbol.debugLocationString}. See SI-8114.")
-        else 
-          genMethod(im, c.symbol.isInterface)
-      }
+      clasz.methods foreach { im => genMethod(im, c.symbol.isInterface) }
 
       addInnerClasses(clasz.symbol, jclass)
       jclass.visitEnd()
       writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
     }
 
-    private def isRedundantBridge(bridge: IMethod, owner: IClass): Boolean = {
-      def lastCalledMethod: Option[Symbol] = bridge.code.instructions.reverseIterator.collectFirst {
-        case CALL_METHOD(meth, _) => meth
-      }
-      def hasSameSignatureAsBridge(targetMethod: Symbol): Boolean = {
-        val targetIMethod = clasz.methods find (m => m.symbol == targetMethod)
-        // Important to compare the IMethod#paramss, rather then the erased MethodTypes, as
-        // due to the bug SI-7120, these are out of sync. For example, in the `applyOrElse`
-        // method in run/t8114.scala, the method symbol info has a parameter of type `Long`,
-        // but the IMethod parameter has type `Object`. The latter comes from the info of the
-        // symbol representing the parameter ValDef in the tree, which is incorrectly erased.
-        targetIMethod exists (m => bridge.matchesSignature(m))
-      }
-      lastCalledMethod exists hasSameSignatureAsBridge
-    }
-
     /**
      * @param owner internal name of the enclosing class of the class.
      *
@@ -1458,7 +1312,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         assert(enclClass.isClass, enclClass)
         val sym = enclClass.primaryConstructor
         if (sym == NoSymbol) {
-          log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+          log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass))
         } else {
           debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
           res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
@@ -1503,12 +1357,17 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           sym.owner.isSynthetic &&
           sym.owner.tpe.parents.exists { t =>
             val TypeRef(_, sym, _) = t
-            FunctionClass contains sym
+            FunctionClass.seq contains sym
           }
         }
 
       if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
 
+      if (m.params.size > MaximumJvmParameters) {
+        getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+        return
+      }
+
       debuglog("Generating method " + m.symbol.fullName)
       method = m
       computeLocalVarsIndex(m)
@@ -1633,19 +1492,20 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           if (isStaticModule(clasz.symbol)) {
             // call object's private ctor from static ctor
             lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
-            lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+            lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true))
           }
 
           if (isParcelableClass) { addCreatorCode(lastBlock) }
 
           lastBlock emit RETURN(UNIT)
-          lastBlock.close
+          lastBlock.close()
 
-       	  method = m
+          method = m
        	  jmethod = clinitMethod
           jMethodName = CLASS_CONSTRUCTOR_NAME
           jmethod.visitCode()
-       	  genCode(m, false, true)
+          computeLocalVarsIndex(m)
+          genCode(m, emitVars = false, isStatic = true)
           jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
           jmethod.visitEnd()
 
@@ -1680,8 +1540,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
         case BooleanTag => jcode.boolconst(const.booleanValue)
 
-        case ByteTag    => jcode.iconst(const.byteValue)
-        case ShortTag   => jcode.iconst(const.shortValue)
+        case ByteTag    => jcode.iconst(const.byteValue.toInt)
+        case ShortTag   => jcode.iconst(const.shortValue.toInt)
         case CharTag    => jcode.iconst(const.charValue)
         case IntTag     => jcode.iconst(const.intValue)
 
@@ -1701,7 +1561,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           val kind = toTypeKind(const.typeValue)
           val toPush: asm.Type =
             if (kind.isValueType) classLiteral(kind)
-            else javaType(kind);
+            else javaType(kind)
           mv.visitLdcInsn(toPush)
 
         case EnumTag   =>
@@ -1724,15 +1584,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
      */
     object jcode {
 
-      import asm.Opcodes;
-
-      def aconst(cst: AnyRef) {
-        if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
-        else             { jmethod.visitLdcInsn(cst) }
-      }
+      import asm.Opcodes
 
       final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
 
+      def iconst(cst: Char) { iconst(cst.toInt) }
       def iconst(cst: Int) {
         if (cst >= -1 && cst <= 5) {
           jmethod.visitInsn(Opcodes.ICONST_0 + cst)
@@ -1824,8 +1680,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       }
 
       def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
-      def emitIF(cond: TestOp, label: asm.Label)      { jmethod.visitJumpInsn(cond.opcodeIF,     label) }
-      def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+      def emitIF(cond: TestOp, label: asm.Label)      { jmethod.visitJumpInsn(cond.opcodeIF(),     label) }
+      def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) }
       def emitIF_ACMP(cond: TestOp, label: asm.Label) {
         assert((cond == EQ) || (cond == NE), cond)
         val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
@@ -1881,9 +1737,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         val keyMax = keys(keys.length - 1)
 
         val isDenseEnough: Boolean = {
-          /** Calculate in long to guard against overflow. TODO what overflow??? */
+          /* Calculate in long to guard against overflow. TODO what overflow??? */
           val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
-          val klenD:     Double = keys.length
+          val klenD:     Double = keys.length.toDouble
           val kdensity:  Double = (klenD / keyRangeD)
 
           kdensity >= minDensity
@@ -1893,10 +1749,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           // use a table in which holes are filled with defaultBranch.
           val keyRange    = (keyMax - keyMin + 1)
           val newBranches = new Array[asm.Label](keyRange)
-          var oldPos = 0;
+          var oldPos = 0
           var i = 0
           while(i < keyRange) {
-            val key = keyMin + i;
+            val key = keyMin + i
             if (keys(oldPos) == key) {
               newBranches(i) = branches(oldPos)
               oldPos += 1
@@ -2016,7 +1872,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       // Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
       // ------------------------------------------------------------------------------------------------------------
 
-        /**Generate exception handlers for the current method.
+        /* Generate exception handlers for the current method.
          *
          * Quoting from the JVMS 4.7.3 The Code Attribute
          * The items of the Code_attribute structure are as follows:
@@ -2039,16 +1895,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
          */
         def genExceptionHandlers() {
 
-          /** Return a list of pairs of intervals where the handler is active.
-           *  Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
-           *  Preconditions:
-           *    - e.covered non-empty
-           *  Postconditions for the result:
-           *    - always non-empty
-           *    - intervals are sorted as per `linearization`
-           *    - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
-           *      ie. between any two intervals in the result there is a non-empty gap.
-           *    - each of the `covered` blocks in the argument is contained in some interval in the result
+          /* Return a list of pairs of intervals where the handler is active.
+           * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
+           * Preconditions:
+           *   - e.covered non-empty
+           * Postconditions for the result:
+           *   - always non-empty
+           *   - intervals are sorted as per `linearization`
+           *   - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
+           *     ie. between any two intervals in the result there is a non-empty gap.
+           *   - each of the `covered` blocks in the argument is contained in some interval in the result
            */
           def intervals(e: ExceptionHandler): List[BlockInteval] = {
             assert(e.covered.nonEmpty, e)
@@ -2095,7 +1951,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           // TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
           for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
             debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
-                     " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+                     " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
             val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
                               else javaName(e.cls)
             jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
@@ -2119,8 +1975,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
           def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
 
           def mergeWith(that: Interval): Interval = {
-            val newStart = if(this.start <= that.start) this.lstart else that.lstart;
-            val newEnd   = if(this.end   <= that.end)   that.lend   else this.lend;
+            val newStart = if(this.start <= that.start) this.lstart else that.lstart
+            val newEnd   = if(this.end   <= that.end)   that.lend   else this.lend
             Interval(newStart, newEnd)
           }
 
@@ -2176,7 +2032,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
           def getMerged(): scala.collection.Map[Local, List[Interval]] = {
             // TODO should but isn't: unbalanced start(s) of scope(s)
-            val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+            val shouldBeEmpty = pending filter { p => val (_, st) = p; st.nonEmpty }
             val merged = mutable.Map[Local, List[Interval]]()
             def addToMerged(lv: Local, start: Label, end: Label) {
               val intv   = Interval(start, end)
@@ -2189,15 +2045,15 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                  (b) take the latest end (onePastLast if none available)
                  (c) merge the thus made-up interval
              */
-            for(Pair(k, st) <- shouldBeEmpty) {
+            for((k, st) <- shouldBeEmpty) {
               var start = st.toList.sortBy(_.getOffset).head
               if(merged.isDefinedAt(k)) {
                 val balancedStart = merged(k).head.lstart
                 if(balancedStart.getOffset < start.getOffset) {
-                  start = balancedStart;
+                  start = balancedStart
                 }
               }
-              val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+              val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
               val end = endOpt.getOrElse(onePastLast)
               addToMerged(k, start, end)
             }
@@ -2226,25 +2082,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         }
         // adding non-param locals
         var anonCounter = 0
-        var fltnd: List[Triple[String, Local, Interval]] = Nil
-        for(Pair(local, ranges) <- scoping.getMerged()) {
+        var fltnd: List[Tuple3[String, Local, Interval]] = Nil
+        for((local, ranges) <- scoping.getMerged()) {
           var name = javaName(local.sym)
           if (name == null) {
-            anonCounter += 1;
+            anonCounter += 1
             name = "<anon" + anonCounter + ">"
           }
           for(intrvl <- ranges) {
-            fltnd ::= Triple(name, local, intrvl)
+            fltnd ::= (name, local, intrvl)
           }
         }
         // quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
         val srtd = fltnd.sortBy { kr =>
-          val Triple(name: String, local: Local, intrvl: Interval) = kr
+          val (name: String, _, intrvl: Interval) = kr
 
-          Triple(intrvl.start, intrvl.end - intrvl.start, name)  // ie sort by (start, length, name)
+          (intrvl.start, intrvl.end - intrvl.start, name)  // ie sort by (start, length, name)
         }
 
-        for(Triple(name, local, Interval(start, end)) <- srtd) {
+        for((name, local, Interval(start, end)) <- srtd) {
           jmethod.visitLocalVariable(name, descriptor(local.kind), null, start, end, indexOf(local))
         }
         // "There may be no more than one LocalVariableTable attribute per local variable in the Code attribute"
@@ -2270,13 +2126,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
       }
 
-      def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
-        target.isPublic || target.isProtected && {
-          (site.enclClass isSubClass target.enclClass) ||
-          (site.enclosingPackage == target.privateWithin)
-        }
-      } // end of genCode()'s isAccessibleFrom()
-
       def genCallMethod(call: CALL_METHOD) {
         val CALL_METHOD(method, style) = call
         val siteSymbol  = clasz.symbol
@@ -2345,7 +2194,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               lastLineNr = currentLineNr
               val lineLab = new asm.Label
               jmethod.visitLabel(lineLab)
-              lnEntries ::= LineNumberEntry(currentLineNr, lineLab)
+              lnEntries ::= LineNumberEntry(iPos.finalPosition.line, lineLab)
             }
           }
 
@@ -2359,6 +2208,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
         import asm.Opcodes
         (instr.category: @scala.annotation.switch) match {
 
+
           case icodes.localsCat =>
           def genLocalInstr() = (instr: @unchecked) match {
             case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
@@ -2390,15 +2240,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                 scoping.popScope(lv, end, instr.pos)
               }
           }
-          genLocalInstr
+          genLocalInstr()
 
           case icodes.stackCat =>
           def genStackInstr() = (instr: @unchecked) match {
 
             case LOAD_MODULE(module) =>
               // assert(module.isModule, "Expected module: " + module)
-              debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
-              if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+              debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
+              def inStaticMethod = this.method != null && this.method.symbol.isStaticMember
+              if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString && !inStaticMethod) {
                 jmethod.visitVarInsn(Opcodes.ALOAD, 0)
               } else {
                 jmethod.visitFieldInsn(
@@ -2414,7 +2265,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
             case LOAD_EXCEPTION(_) => ()
           }
-          genStackInstr
+          genStackInstr()
 
           case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
 
@@ -2448,11 +2299,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               }
 
           }
-          genCastInstr
+          genCastInstr()
 
           case icodes.objsCat =>
           def genObjsInstr() = (instr: @unchecked) match {
-
             case BOX(kind) =>
               val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
               jcode.invokestatic(BoxesRunTime, mname, mdesc)
@@ -2468,14 +2318,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
             case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
             case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
           }
-          genObjsInstr
+          genObjsInstr()
 
           case icodes.fldsCat =>
           def genFldsInstr() = (instr: @unchecked) match {
 
             case lf @ LOAD_FIELD(field, isStatic) =>
-              var owner = javaName(lf.hostClass)
-              debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+              val owner = javaName(lf.hostClass)
+              debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString)
               val fieldJName = javaName(field)
               val fieldDescr = descriptor(field)
               val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
@@ -2489,12 +2339,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
 
           }
-          genFldsInstr
+          genFldsInstr()
 
           case icodes.mthdsCat =>
           def genMethodsInstr() = (instr: @unchecked) match {
 
-            /** Special handling to access native Array.clone() */
+            /* Special handling to access native Array.clone() */
             case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
               val target: String = javaType(call.targetTypeKind).getInternalName
               jcode.invokevirtual(target, "clone", mdesc_arrayClone)
@@ -2502,7 +2352,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
             case call @ CALL_METHOD(method, style) => genCallMethod(call)
 
           }
-          genMethodsInstr
+          genMethodsInstr()
 
           case icodes.arraysCat =>
           def genArraysInstr() = (instr: @unchecked) match {
@@ -2511,7 +2361,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
             case CREATE_ARRAY(elem, 1) => jcode newarray elem
             case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
           }
-          genArraysInstr
+          genArraysInstr()
 
           case icodes.jumpsCat =>
           def genJumpInstr() = (instr: @unchecked) match {
@@ -2528,7 +2378,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               while (restTagss.nonEmpty) {
                 val currLabel = labels(restBranches.head)
                 for (cTag <- restTagss.head) {
-                  flatKeys(k) = cTag;
+                  flatKeys(k) = cTag
                   flatBranches(k) = currLabel
                   k += 1
                 }
@@ -2541,27 +2391,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
 
             case JUMP(whereto) =>
-              if (nextBlock != whereto) {
+              if (nextBlock != whereto)
                 jcode goTo labels(whereto)
-              } else if (m.exh.exists(eh => eh.covers(b))) {
                 // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
                 // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
-                val isSthgLeft = b.toList.exists {
-                  case _: LOAD_EXCEPTION => false
-                  case _: SCOPE_ENTER => false
-                  case _: SCOPE_EXIT => false
-                  case _: JUMP => false
-                  case _ => true
-                }
-                if (!isSthgLeft) {
-                  emit(asm.Opcodes.NOP)
-                }
+              else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
+                debugwarn("Had a jump only block that wasn't collapsed")
+                emit(asm.Opcodes.NOP)
               }
 
             case CJUMP(success, failure, cond, kind) =>
               if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
                 if (nextBlock == success) {
-                  jcode.emitIF_ICMP(cond.negate, labels(failure))
+                  jcode.emitIF_ICMP(cond.negate(), labels(failure))
                   // .. and fall through to success label
                 } else {
                   jcode.emitIF_ICMP(cond, labels(success))
@@ -2569,7 +2411,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                 }
               } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
                 if (nextBlock == success) {
-                  jcode.emitIF_ACMP(cond.negate, labels(failure))
+                  jcode.emitIF_ACMP(cond.negate(), labels(failure))
                   // .. and fall through to success label
                 } else {
                   jcode.emitIF_ACMP(cond, labels(success))
@@ -2586,7 +2428,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                     else emit(Opcodes.DCMPL)
                 }
                 if (nextBlock == success) {
-                  jcode.emitIF(cond.negate, labels(failure))
+                  jcode.emitIF(cond.negate(), labels(failure))
                   // .. and fall through to success label
                 } else {
                   jcode.emitIF(cond, labels(success))
@@ -2597,7 +2439,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
             case CZJUMP(success, failure, cond, kind) =>
               if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
                 if (nextBlock == success) {
-                  jcode.emitIF(cond.negate, labels(failure))
+                  jcode.emitIF(cond.negate(), labels(failure))
                 } else {
                   jcode.emitIF(cond, labels(success))
                   if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2633,7 +2475,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                     else emit(Opcodes.DCMPL)
                 }
                 if (nextBlock == success) {
-                  jcode.emitIF(cond.negate, labels(failure))
+                  jcode.emitIF(cond.negate(), labels(failure))
                 } else {
                   jcode.emitIF(cond, labels(success))
                   if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2641,26 +2483,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
               }
 
           }
-          genJumpInstr
+          genJumpInstr()
 
           case icodes.retCat =>
           def genRetInstr() = (instr: @unchecked) match {
             case RETURN(kind) => jcode emitRETURN kind
             case THROW(_) => emit(Opcodes.ATHROW)
           }
-          genRetInstr
+          genRetInstr()
         }
       }
 
-      /**
+      /*
        * Emits one or more conversion instructions based on the types given as arguments.
        *
        * @param from The type of the value to be converted into another type.
        * @param to   The type the value will be converted into.
        */
       def emitT2T(from: TypeKind, to: TypeKind) {
-        assert(isNonUnitValueTK(from), from)
-        assert(isNonUnitValueTK(to),   to)
+        assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to")
 
             def pickOne(opcs: Array[Int]) {
               val chosen = (to: @unchecked) match {
@@ -2676,10 +2517,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
             }
 
         if(from == to) { return }
-        if((from == BOOL) || (to == BOOL)) {
-          // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
-          throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
-        }
+        // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+        assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
 
         if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
 
@@ -2727,7 +2566,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
       def genPrimitive(primitive: Primitive, pos: Position) {
 
-        import asm.Opcodes;
+        import asm.Opcodes
 
         primitive match {
 
@@ -2758,7 +2597,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                 abort("Unknown arithmetic primitive " + primitive)
             }
             }
-            genArith
+            genArith()
 
           // TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
           // TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
@@ -2790,7 +2629,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                     if (kind != BOOL) { emitT2T(INT, kind) }
                 }
             }
-            genLogical
+            genLogical()
 
           case Shift(op, kind) =>
             def genShift() = op match {
@@ -2819,7 +2658,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                     emitT2T(INT, kind)
                 }
             }
-            genShift
+            genShift()
 
           case Comparison(op, kind) =>
             def genCompare() = op match {
@@ -2839,12 +2678,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
                 }
             }
-            genCompare
+            genCompare()
 
           case Conversion(src, dst) =>
             debuglog("Converting from: " + src + " to: " + dst)
-            if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
-            else { emitT2T(src, dst) }
+            emitT2T(src, dst)
 
           case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
 
@@ -2893,15 +2731,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
     ////////////////////// local vars ///////////////////////
 
-    // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
     def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
 
-    // def indexOf(m: IMethod, sym: Symbol): Int = {
-    //   val Some(local) = m lookupLocal sym
-    //   indexOf(local)
-    // }
-
     final def indexOf(local: Local): Int = {
       assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
       local.index
@@ -2912,7 +2743,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
      * *Does not assume the parameters come first!*
      */
     def computeLocalVarsIndex(m: IMethod) {
-      var idx = if (m.symbol.isStaticMember) 0 else 1;
+      var idx = if (m.symbol.isStaticMember) 0 else 1
 
       for (l <- m.params) {
         debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -2931,10 +2762,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
 
   /** builder of mirror classes */
-  class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+  class JMirrorBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JCommonBuilder(bytecodeWriter, needsOutfile) {
 
     private var cunit: CompilationUnit = _
-    def getCurrentCUnit(): CompilationUnit = cunit;
+    def getCurrentCUnit(): CompilationUnit = cunit
 
     /** Generate a mirror class for a top-level module. A mirror class is a class
      *  containing only static methods that forward to the corresponding method
@@ -2956,7 +2787,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
                                      JAVA_LANG_OBJECT.getInternalName,
                                      EMPTY_STRING_ARRAY)
 
-      log("Dumping mirror class for '%s'".format(mirrorName))
+      log(s"Dumping mirror class for '$mirrorName'")
 
       // typestate: entering mode with valid call sequences:
       //   [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
@@ -2979,13 +2810,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
       mirrorClass.visitEnd()
       writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
     }
-
-
   } // end of class JMirrorBuilder
 
 
   /** builder of bean info classes */
-  class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+  class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
 
     /**
      * Generate a bean info class that describes the given class.
@@ -3027,8 +2856,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
 
       for (f <- clasz.fields if f.symbol.hasGetter;
 	         g = f.symbol.getter(clasz.symbol);
-	         s = f.symbol.setter(clasz.symbol);
-	         if g.isPublic && !(f.symbol.name startsWith "$")
+	         s = f.symbol.setter(clasz.symbol)
+           if g.isPublic && !(f.symbol.name startsWith "$")
           ) {
              // inserting $outer breaks the bean
              fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
@@ -3117,111 +2946,50 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
    * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
    */
   object newNormal {
-
-    def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
-
-    /** Prune from an exception handler those covered blocks which are jump-only. */
-    private def coverWhatCountsOnly(m: IMethod): Boolean = {
-      assert(m.hasCode, "code-less method")
-
-      var wasReduced = false
-      for(h <- m.exh) {
-        val shouldntCover = (h.covered filter startsWithJump)
-        if(shouldntCover.nonEmpty) {
-          wasReduced = true
-          h.covered --= shouldntCover // not removing any block on purpose.
-        }
-      }
-
-      wasReduced
+    /**
+     * True if a block is "jump only" which is defined
+     * as being a block that consists only of 0 or more instructions that
+     * won't make it to the JVM followed by a JUMP.
+     */
+    def isJumpOnly(b: BasicBlock): Boolean = {
+      val nonICode = firstNonIcodeOnlyInstructions(b)
+      // by definition a block has to have a jump, conditional jump, return, or throw
+      assert(nonICode.hasNext, "empty block")
+      nonICode.next.isInstanceOf[JUMP]
     }
 
-    /** An exception handler is pruned provided any of the following holds:
-     *   (1) it covers nothing (for example, this may result after removing unreachable blocks)
-     *   (2) each block it covers is of the form: JUMP(_)
-     * Return true iff one or more ExceptionHandlers were removed.
-     *
-     * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
-     * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+    /**
+     * Returns the list of instructions in a block that follow all ICode only instructions,
+     * where an ICode only instruction is one that won't make it to the JVM
      */
-    private def elimNonCoveringExh(m: IMethod): Boolean = {
-      assert(m.hasCode, "code-less method")
-
-        def isRedundant(eh: ExceptionHandler): Boolean = {
-          (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
-                eh.covered.isEmpty
-            || (eh.covered forall startsWithJump)
-          )
-        }
-
-      var wasReduced = false
-      val toPrune = (m.exh.toSet filter isRedundant)
-      if(toPrune.nonEmpty) {
-        wasReduced = true
-        for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
-        m.exh = (m.exh filterNot toPrune)
-      }
-
-      wasReduced
+    private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = {
+	  def isICodeOnlyInstruction(i: Instruction) = i match {
+	    case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true
+	    case _ => false
+	  }
+	  b.iterator dropWhile isICodeOnlyInstruction
     }
 
-    private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
-      b.toList match {
-        case JUMP(whereto) :: rest =>
-          assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+    /**
+     * Returns the target of a block that is "jump only" which is defined
+     * as being a block that consists only of 0 or more instructions that
+     * won't make it to the JVM followed by a JUMP.
+     *
+     * @param b The basic block to examine
+     * @return Some(target) if b is a "jump only" block or None if it's not
+     */
+    private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = {
+      val nonICode = firstNonIcodeOnlyInstructions(b)
+              // by definition a block has to have a jump, conditional jump, return, or throw
+      assert(nonICode.nonEmpty, "empty block")
+      nonICode.next match {
+        case JUMP(whereto) =>
+          assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
           Some(whereto)
         case _ => None
       }
     }
 
-    private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
-
-    /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
-       Those BBs in the argument are also included in the result */
-    private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
-      val result = new mutable.ListBuffer[BasicBlock]
-      var toVisit: List[BasicBlock] = starters.toList.distinct
-      while(toVisit.nonEmpty) {
-        val h   = toVisit.head
-        toVisit = toVisit.tail
-        result += h
-        for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
-      }
-      result.toList
-    }
-
-    /** Returns:
-     *  for single-block self-loops, the pair (start, Nil)
-     *  for other cycles,            the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
-     *  otherwise a pair consisting of:
-     *    (a) the endpoint of a (single or multi-hop) chain of JUMPs
-     *        (such endpoint does not start with a JUMP and therefore is not part of the chain); and
-     *    (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
-     *  Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
-     */
-    private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
-      assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
-      var hops: List[BasicBlock] = Nil
-      var prev = start
-      var done = false
-      do {
-        done = isJumpOnly(prev) match {
-          case Some(dest) =>
-            if (dest == start) { return (start, hops) } // leave infinite-loops in place
-            hops ::= prev
-            if (hops.contains(dest)) {
-              // leave infinite-loops in place
-              return (dest, hops filterNot (dest eq _))
-            }
-            prev = dest;
-            false
-          case None => true
-        }
-      } while(!done)
-
-      (prev, hops)
-    }
-
     /**
      * Collapse a chain of "jump-only" blocks such as:
      *
@@ -3237,7 +3005,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
      *  In more detail:
      *    Starting at each of the entry points (m.startBlock, the start block of each exception handler)
      *    rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
-     *    The blocks thus skipped are also removed from IMethod.blocks.
+     *    The blocks thus skipped become eligible to removed by the reachability analyzer
      *
      *  Rationale for this normalization:
      *    test/files/run/private-inline.scala after -optimize is chock full of
@@ -3248,108 +3016,309 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
      *    and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
      *    could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
      *    Now that visitTryCatchBlock() must be called before Labels are resolved,
-     *    this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+     *    renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable.
      */
-    private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+    private def collapseJumpOnlyBlocks(m: IMethod) {
       assert(m.hasCode, "code-less method")
 
-          /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
-          def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
-            assert(startsWithJump(jumpStart), "not part of a jump-chain")
-            val Pair(dest, redundants) = finalDestination(jumpStart)
-            (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
-          }
+      def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
+        def lookup(b: BasicBlock) = detour.getOrElse(b, b)
 
-          def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
-            for(Pair(oldTarget, newTarget) <- detour.iterator) {
-              if(m.startBlock == oldTarget) {
-                m.code.startBlock = newTarget
-              }
-              for(eh <- m.exh; if eh.startBlock == oldTarget) {
-                eh.setStartBlock(newTarget)
-              }
-              for(b <- m.blocks; if !detour.isDefinedAt(b)) {
-                val idxLast = (b.size - 1)
-                b.lastInstruction match {
-                  case JUMP(whereto) =>
-                    if (whereto == oldTarget) {
-                      b.replaceInstruction(idxLast, JUMP(newTarget))
-                    }
-                  case CJUMP(succ, fail, cond, kind) =>
-                    if ((succ == oldTarget) || (fail == oldTarget)) {
-                      b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
-                                                          detour.getOrElse(fail, fail),
-                                                          cond, kind))
-                    }
-                  case CZJUMP(succ, fail, cond, kind) =>
-                    if ((succ == oldTarget) || (fail == oldTarget)) {
-                      b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
-                                                           detour.getOrElse(fail, fail),
-                                                           cond, kind))
-                    }
-                  case SWITCH(tags, labels) =>
-                    if(labels exists (detour.isDefinedAt(_))) {
-                      val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
-                      b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
-                    }
-                  case _ => ()
-                }
-              }
+        m.code.startBlock = lookup(m.code.startBlock)
+
+        for(eh <- m.exh)
+          eh.setStartBlock(lookup(eh.startBlock))
+
+        for (b <- m.blocks) {
+          def replaceLastInstruction(i: Instruction) = {
+            if (b.lastInstruction != i) {
+              val idxLast = b.size - 1
+	          debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}")
+	          b.replaceInstruction(idxLast, i)
             }
           }
 
-          /* remove from all containers that may contain a reference to */
-          def elide(redu: BasicBlock) {
-            assert(m.startBlock != redu, "startBlock should have been re-wired by now")
-            m.code.removeBlock(redu);
+          b.lastInstruction match {
+            case JUMP(whereto) =>
+              replaceLastInstruction(JUMP(lookup(whereto)))
+            case CJUMP(succ, fail, cond, kind) =>
+              replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
+            case CZJUMP(succ, fail, cond, kind)  =>
+              replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
+            case SWITCH(tags, labels) =>
+              val newLabels = (labels map lookup)
+              replaceLastInstruction(SWITCH(tags, newLabels))
+            case _ => ()
           }
+        }
+      }
 
-      var wasReduced = false
-      val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
-
-      var elided     = mutable.Set.empty[BasicBlock] // debug
-      var newTargets = mutable.Set.empty[BasicBlock] // debug
-
-      for (ep <- entryPoints) {
-        var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
-        while(reachable.nonEmpty) {
-          val h = reachable.head
-          reachable = reachable.tail
-          if(startsWithJump(h)) {
-            val detour = realTarget(h)
-            if(detour.nonEmpty) {
-              wasReduced = true
-              reachable = (reachable filterNot (detour.keySet.contains(_)))
-              rephraseGotos(detour)
-              detour.keySet foreach elide
-              elided     ++= detour.keySet
-              newTargets ++= detour.values
-            }
+      /*
+       * Computes a mapping from jump only block to its
+       * final destination which is either a non-jump-only
+       * block or, if it's in a jump-only block cycle, is
+       * itself
+       */
+      def computeDetour: mutable.Map[BasicBlock, BasicBlock] = {
+        // fetch the jump only blocks and their immediate destinations
+        val pairs = for {
+          block <- m.blocks.toIterator
+          target <- getJumpOnlyTarget(block)
+        } yield(block, target)
+
+        // mapping from a jump-only block to our current knowledge of its
+        // final destination. Initially it's just jump block to immediate jump
+        // target
+        val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*)
+
+        // for each jump-only block find its final destination
+        // taking advantage of the destinations we found for previous
+        // blocks
+        for (key <- detour.keySet) {
+          // we use the Robert Floyd's classic Tortoise and Hare algorithm
+          @tailrec
+          def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
+            if (tortoise == hare)
+              // cycle detected, map key to key
+              key
+            else if (detour contains hare) {
+              // advance hare once
+              val hare1 = detour(hare)
+              // make sure we can advance hare a second time
+              if (detour contains hare1)
+                // advance tortoise once and hare a second time
+                findDestination(detour(tortoise), detour(hare1))
+              else
+                // hare1 is not in the map so it's not a jump-only block, it's the destination
+                hare1
+            } else
+              // hare is not in the map so it's not a jump-only block, it's the destination
+              hare
           }
+          // update the mapping for key based on its final destination
+          detour(key) = findDestination(key, detour(key))
         }
+        detour
       }
-      assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
 
-      wasReduced
+      val detour = computeDetour
+      rephraseGotos(detour)
+
+      if (settings.debug) {
+        val (remappings, cycles) = detour partition {case (source, target) => source != target}
+        for ((source, target) <- remappings) {
+		   debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
+		   if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+        }
+        val sources = remappings.keySet
+        val targets = remappings.values.toSet
+        val intersection = sources intersect targets
+
+        if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
+
+        for ((source, _) <- cycles) {
+          debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
+        }
+      }
+    }
+
+    /**
+     * Removes all blocks that are unreachable in a method using a standard reachability analysis.
+     */
+    def elimUnreachableBlocks(m: IMethod) {
+      assert(m.hasCode, "code-less method")
+
+      // assume nothing is reachable until we prove it can be reached
+      val reachable = mutable.Set[BasicBlock]()
+
+      // the set of blocks that we know are reachable but have
+      // yet to be  marked reachable, initially only the start block
+      val worklist = mutable.Set(m.startBlock)
+
+      while (worklist.nonEmpty) {
+        val block = worklist.head
+        worklist remove block
+        // we know that one is reachable
+        reachable add block
+        // so are its successors, so go back around and add the ones we still
+        // think are unreachable
+        worklist ++= (block.successors filterNot reachable)
+      }
+
+      // exception handlers need to be told not to cover unreachable blocks
+      // and exception handlers that no longer cover any blocks need to be
+      // removed entirely
+      val unusedExceptionHandlers = mutable.Set[ExceptionHandler]()
+      for (exh <- m.exh) {
+        exh.covered = exh.covered filter reachable
+        if (exh.covered.isEmpty) {
+          unusedExceptionHandlers += exh
+        }
+      }
+
+      // remove the unusued exception handler references
+      if (settings.debug)
+        for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
+      m.exh = m.exh filterNot unusedExceptionHandlers
+
+      // everything not in the reachable set is unreachable, unused, and unloved. buh bye
+      for (b <- m.blocks filterNot reachable) {
+    	  debuglog(s"eliding block $b because it is unreachable")
+    	  m.code removeBlock b
+      }
     }
 
     def normalize(m: IMethod) {
       if(!m.hasCode) { return }
       collapseJumpOnlyBlocks(m)
-      var wasReduced = false;
-      do {
-        wasReduced = false
-        // Prune from an exception handler those covered blocks which are jump-only.
-        wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
-        // Prune exception handlers covering nothing.
-        wasReduced |= elimNonCoveringExh(m);  icodes.checkValid(m)
-
-        // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
-      } while (wasReduced)
-
-      // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+      if (settings.optimise)
+        elimUnreachableBlocks(m)
+      icodes checkValid m
+    }
+
+  }
+
+  // @M don't generate java generics sigs for (members of) implementation
+  // classes, as they are monomorphic (TODO: ok?)
+  private def needsGenericSignature(sym: Symbol) = !(
+    // PP: This condition used to include sym.hasExpandedName, but this leads
+    // to the total loss of generic information if a private member is
+    // accessed from a closure: both the field and the accessor were generated
+    // without it.  This is particularly bad because the availability of
+    // generic information could disappear as a consequence of a seemingly
+    // unrelated change.
+       settings.Ynogenericsig
+    || sym.isArtifact
+    || sym.isLiftedMethod
+    || sym.isBridge
+    || (sym.ownerChain exists (_.isImplClass))
+  )
+
+  final def staticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol, unit: CompilationUnit): String = {
+    if (sym.isDeferred) null // only add generic signature if method concrete; bug #1745
+    else {
+      // SI-3452 Static forwarder generation uses the same erased signature as the method if forwards to.
+      // By rights, it should use the signature as-seen-from the module class, and add suitable
+      // primitive and value-class boxing/unboxing.
+      // But for now, just like we did in mixin, we just avoid writing a wrong generic signature
+      // (one that doesn't erase to the actual signature). See run/t3452b for a test case.
+      val memberTpe = enteringErasure(moduleClass.thisType.memberInfo(sym))
+      val erasedMemberType = erasure.erasure(sym)(memberTpe)
+      if (erasedMemberType =:= sym.info)
+        getGenericSignature(sym, moduleClass, memberTpe, unit)
+      else null
+    }
+  }
+
+  /** @return
+   *   - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+   *   - otherwise the signature in question
+   */
+  def getGenericSignature(sym: Symbol, owner: Symbol, unit: CompilationUnit): String = {
+    val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
+    getGenericSignature(sym, owner, memberTpe, unit)
+  }
+  def getGenericSignature(sym: Symbol, owner: Symbol, memberTpe: Type, unit: CompilationUnit): String = {
+    if (!needsGenericSignature(sym)) { return null }
+
+    val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+    if (jsOpt.isEmpty) { return null }
+
+    val sig = jsOpt.get
+    log(sig) // This seems useful enough in the general case.
+
+        def wrap(op: => Unit) = {
+          try   { op; true }
+          catch { case _: Throwable => false }
+        }
+
+    if (settings.Xverify) {
+      // Run the signature parser to catch bogus signatures.
+      val isValidSignature = wrap {
+        // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+        import scala.tools.asm.util.CheckClassAdapter
+        if (sym.isMethod)    { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
+        else if (sym.isTerm) { CheckClassAdapter checkFieldSignature  sig }
+        else                 { CheckClassAdapter checkClassSignature  sig }
+      }
+
+      if(!isValidSignature) {
+        unit.warning(sym.pos,
+            """|compiler bug: created invalid generic signature for %s in %s
+               |signature: %s
+               |if this is reproducible, please report bug at https://issues.scala-lang.org/
+            """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
+        return null
+      }
     }
 
+    if ((settings.check containsName phaseName)) {
+      val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
+      val bytecodeTpe = owner.thisType.memberInfo(sym)
+      if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+        unit.warning(sym.pos,
+            """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+               |signature: %s
+               |original type: %s
+               |normalized type: %s
+               |erasure type: %s
+               |if this is reproducible, please report bug at http://issues.scala-lang.org/
+            """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+         return null
+      }
+    }
+
+    sig
   }
 
+  def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+    val ca = new Array[Char](bytes.length)
+    var idx = 0
+    while(idx < bytes.length) {
+      val b: Byte = bytes(idx)
+      assert((b & ~0x7f) == 0)
+      ca(idx) = b.asInstanceOf[Char]
+      idx += 1
+    }
+
+    ca
+  }
+
+  final def arrEncode(sb: ScalaSigBytes): Array[String] = {
+    var strs: List[String]  = Nil
+    val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+    // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+    var prevOffset = 0
+    var offset     = 0
+    var encLength  = 0
+    while(offset < bSeven.length) {
+      val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
+      val newEncLength = encLength.toLong + deltaEncLength
+      if(newEncLength >= 65535) {
+        val ba     = bSeven.slice(prevOffset, offset)
+        strs     ::= new java.lang.String(ubytesToCharArray(ba))
+        encLength  = 0
+        prevOffset = offset
+      } else {
+        encLength += deltaEncLength
+        offset    += 1
+      }
+    }
+    if(prevOffset < offset) {
+      assert(offset == bSeven.length)
+      val ba = bSeven.slice(prevOffset, offset)
+      strs ::= new java.lang.String(ubytesToCharArray(ba))
+    }
+    assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+    strs.reverse.toArray
+  }
+
+  private def strEncode(sb: ScalaSigBytes): String = {
+    val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+    new java.lang.String(ca)
+    // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+    // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+    // debug assert(enc(idx) == bvA.getByte(idx + 2))
+    // debug assert(bvA.getLength == enc.size + 2)
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
deleted file mode 100644
index 72b7e35..0000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Stephane Micheloud
- */
-
-
-package scala.tools.nsc
-package backend.jvm
-
-import ch.epfl.lamp.fjbg._
-import symtab.Flags
-
-trait GenAndroid {
-  self: GenJVM =>
-
-  import global._
-  import icodes._
-  import opcodes._
-
-  /** From the reference documentation of the Android SDK:
-   *  The `Parcelable` interface identifies classes whose instances can be
-   *  written to and restored from a `Parcel`. Classes implementing the
-   *  `Parcelable` interface must also have a static field called `CREATOR`,
-   *  which is an object implementing the `Parcelable.Creator` interface.
-   */
-  private val fieldName = newTermName("CREATOR")
-
-  private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
-  private lazy val AndroidCreatorClass        = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
-  def isAndroidParcelableClass(sym: Symbol) =
-    (AndroidParcelableInterface != NoSymbol) &&
-    (sym.parentSymbols contains AndroidParcelableInterface)
-
-  def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
-    import codegen._
-    val fieldSymbol = (
-      clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
-        setInfo AndroidCreatorClass.tpe
-    )
-    val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
-    clasz addField new IField(fieldSymbol)
-    block emit CALL_METHOD(methodSymbol, Static(false))
-    block emit STORE_FIELD(fieldSymbol, true)
-  }
-
-  def legacyAddCreatorCode(codegen: BytecodeGenerator, clinit: JExtendedCode) {
-    import codegen._
-    val creatorType = javaType(AndroidCreatorClass)
-    jclass.addNewField(PublicStaticFinal,
-                       fieldName,
-                       creatorType)
-    val moduleName = javaName(clasz.symbol)+"$"
-    clinit.emitGETSTATIC(moduleName,
-                         nme.MODULE_INSTANCE_FIELD.toString,
-                         new JObjectType(moduleName))
-    clinit.emitINVOKEVIRTUAL(moduleName, fieldName,
-                             new JMethodType(creatorType, Array()))
-    clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType)
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
new file mode 100644
index 0000000..1931004
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -0,0 +1,381 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *  Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
+ *
+ *  Three pipelines are at work, each taking work items from a queue dedicated to that pipeline:
+ *
+ *  (There's another pipeline so to speak, the one that populates queue-1 by traversing a CompilationUnit until ClassDefs are found,
+ *   but the "interesting" pipelines are the ones described below)
+ *
+ *    (1) In the first queue, an item consists of a ClassDef along with its arrival position.
+ *        This position is needed at the time classfiles are serialized to disk,
+ *        so as to emit classfiles in the same order CleanUp handed them over.
+ *        As a result, two runs of the compiler on the same files produce jars that are identical on a byte basis.
+ *        See `ant test.stability`
+ *
+ *    (2) The second queue contains items where a ClassDef has been lowered into:
+ *          (a) an optional mirror class,
+ *          (b) a plain class, and
+ *          (c) an optional bean class.
+ *
+ *    (3) The third queue contains items ready for serialization.
+ *        It's a priority queue that follows the original arrival order,
+ *        so as to emit identical jars on repeated compilation of the same sources.
+ *
+ *  Plain, mirror, and bean classes are built respectively by PlainClassBuilder, JMirrorBuilder, and JBeanInfoBuilder.
+ *
+ *  @author  Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ *  @version 1.0
+ *
+ */
+abstract class GenBCode extends BCodeSyncAndTry {
+  import global._
+
+  val phaseName = "jvm"
+
+  override def newPhase(prev: Phase) = new BCodePhase(prev)
+
+  final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit)
+
+  class BCodePhase(prev: Phase) extends StdPhase(prev) {
+
+    override def name = phaseName
+    override def description = "Generate bytecode from ASTs using the ASM library"
+    override def erasedTypes = true
+
+    private var bytecodeWriter  : BytecodeWriter   = null
+    private var mirrorCodeGen   : JMirrorBuilder   = null
+    private var beanInfoCodeGen : JBeanInfoBuilder = null
+
+    /* ---------------- q1 ---------------- */
+
+    case class Item1(arrivalPos: Int, cd: ClassDef, cunit: CompilationUnit) {
+      def isPoison = { arrivalPos == Int.MaxValue }
+    }
+    private val poison1 = Item1(Int.MaxValue, null, null)
+    private val q1 = new java.util.LinkedList[Item1]
+
+    /* ---------------- q2 ---------------- */
+
+    case class Item2(arrivalPos:   Int,
+                     mirror:       asm.tree.ClassNode,
+                     plain:        asm.tree.ClassNode,
+                     bean:         asm.tree.ClassNode,
+                     outFolder:    scala.tools.nsc.io.AbstractFile) {
+      def isPoison = { arrivalPos == Int.MaxValue }
+    }
+
+    private val poison2 = Item2(Int.MaxValue, null, null, null, null)
+    private val q2 = new _root_.java.util.LinkedList[Item2]
+
+    /* ---------------- q3 ---------------- */
+
+    /*
+     *  An item of queue-3 (the last queue before serializing to disk) contains three of these
+     *  (one for each of mirror, plain, and bean classes).
+     *
+     *  @param jclassName  internal name of the class
+     *  @param jclassBytes bytecode emitted for the class SubItem3 represents
+     */
+    case class SubItem3(
+      jclassName:  String,
+      jclassBytes: Array[Byte]
+    )
+
+    case class Item3(arrivalPos: Int,
+                     mirror:     SubItem3,
+                     plain:      SubItem3,
+                     bean:       SubItem3,
+                     outFolder:  scala.tools.nsc.io.AbstractFile) {
+
+      def isPoison  = { arrivalPos == Int.MaxValue }
+    }
+    private val i3comparator = new java.util.Comparator[Item3] {
+      override def compare(a: Item3, b: Item3) = {
+        if (a.arrivalPos < b.arrivalPos) -1
+        else if (a.arrivalPos == b.arrivalPos) 0
+        else 1
+      }
+    }
+    private val poison3 = Item3(Int.MaxValue, null, null, null, null)
+    private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator)
+
+    /*
+     *  Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2
+     */
+    class Worker1(needsOutFolder: Boolean) {
+
+      val caseInsensitively = mutable.Map.empty[String, Symbol]
+
+      def run() {
+        while (true) {
+          val item = q1.poll
+          if (item.isPoison) {
+            q2 add poison2
+            return
+          }
+          else {
+            try   { visit(item) }
+            catch {
+              case ex: Throwable =>
+                ex.printStackTrace()
+                error(s"Error while emitting ${item.cunit.source}\n${ex.getMessage}")
+            }
+          }
+        }
+      }
+
+      /*
+       *  Checks for duplicate internal names case-insensitively,
+       *  builds ASM ClassNodes for mirror, plain, and bean classes;
+       *  enqueues them in queue-2.
+       *
+       */
+      def visit(item: Item1) {
+        val Item1(arrivalPos, cd, cunit) = item
+        val claszSymbol = cd.symbol
+
+        // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
+        val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
+        caseInsensitively.get(lowercaseJavaClassName) match {
+          case None =>
+            caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
+          case Some(dupClassSym) =>
+            item.cunit.warning(
+              claszSymbol.pos,
+              s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
+              "Such classes will overwrite one another on case-insensitive filesystems."
+            )
+        }
+
+        // -------------- mirror class, if needed --------------
+        val mirrorC =
+          if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
+            if (claszSymbol.companionClass == NoSymbol) {
+              mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
+            } else {
+              log(s"No mirror class for module with linked class: ${claszSymbol.fullName}")
+              null
+            }
+          } else null
+
+        // -------------- "plain" class --------------
+        val pcb = new PlainClassBuilder(cunit)
+        pcb.genPlainClass(cd)
+        val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
+        val plainC = pcb.cnode
+
+        // -------------- bean info class, if needed --------------
+        val beanC =
+          if (claszSymbol hasAnnotation BeanInfoAttr) {
+            beanInfoCodeGen.genBeanInfoClass(
+              claszSymbol, cunit,
+              fieldSymbols(claszSymbol),
+              methodSymbols(cd)
+            )
+          } else null
+
+          // ----------- hand over to pipeline-2
+
+        val item2 =
+          Item2(arrivalPos,
+                mirrorC, plainC, beanC,
+                outF)
+
+        q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done.
+
+      } // end of method visit(Item1)
+
+    } // end of class BCodePhase.Worker1
+
+    /*
+     *  Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level:
+     *
+     *    (a) no optimization involves:
+     *          - converting the plain ClassNode to byte array and placing it on queue-3
+     */
+    class Worker2 {
+
+      def run() {
+        while (true) {
+          val item = q2.poll
+          if (item.isPoison) {
+            q3 add poison3
+            return
+          }
+          else {
+            try   { addToQ3(item) }
+            catch {
+              case ex: Throwable =>
+                ex.printStackTrace()
+                error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
+            }
+          }
+        }
+      }
+
+      private def addToQ3(item: Item2) {
+
+        def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
+          val cw = new CClassWriter(extraProc)
+          cn.accept(cw)
+          cw.toByteArray
+        }
+
+        val Item2(arrivalPos, mirror, plain, bean, outFolder) = item
+
+        val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror))
+        val plainC  = SubItem3(plain.name, getByteArray(plain))
+        val beanC   = if (bean == null)   null else SubItem3(bean.name, getByteArray(bean))
+
+        q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder)
+
+      }
+
+    } // end of class BCodePhase.Worker2
+
+    var arrivalPos = 0
+
+    /*
+     *  A run of the BCodePhase phase comprises:
+     *
+     *    (a) set-up steps (most notably supporting maps in `BCodeTypes`,
+     *        but also "the" writer where class files in byte-array form go)
+     *
+     *    (b) building of ASM ClassNodes, their optimization and serialization.
+     *
+     *    (c) tear down (closing the classfile-writer and clearing maps)
+     *
+     */
+    override def run() {
+
+      arrivalPos = 0 // just in case
+      scalaPrimitives.init
+      initBCodeTypes()
+
+      // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
+      bytecodeWriter  = initBytecodeWriter(cleanup.getEntryPoints)
+      mirrorCodeGen   = new JMirrorBuilder
+      beanInfoCodeGen = new JBeanInfoBuilder
+
+      val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+      buildAndSendToDisk(needsOutfileForSymbol)
+
+      // closing output files.
+      bytecodeWriter.close()
+
+      /* TODO Bytecode can be verified (now that all classfiles have been written to disk)
+       *
+       * (1) asm.util.CheckAdapter.verify()
+       *       public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+       *     passing a custom ClassLoader to verify inter-dependent classes.
+       *     Alternatively,
+       *       - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+       *       - -Xverify:all
+       *
+       * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()`
+       *
+       */
+
+      // clearing maps
+      clearBCodeTypes()
+    }
+
+    /*
+     *  Sequentially:
+     *    (a) place all ClassDefs in queue-1
+     *    (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2
+     *    (c) dequeue one at a time from queue-2, convert it to byte-array,    place in queue-3
+     *    (d) serialize to disk by draining queue-3.
+     */
+    private def buildAndSendToDisk(needsOutFolder: Boolean) {
+
+      feedPipeline1()
+      (new Worker1(needsOutFolder)).run()
+      (new Worker2).run()
+      drainQ3()
+
+    }
+
+    /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */
+    private def feedPipeline1() {
+      super.run()
+      q1 add poison1
+    }
+
+    /* Pipeline that writes classfile representations to disk. */
+    private def drainQ3() {
+
+      def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile) {
+        if (cfr != null){
+          val SubItem3(jclassName, jclassBytes) = cfr
+          try {
+            val outFile =
+              if (outFolder == null) null
+              else getFileForClassfile(outFolder, jclassName, ".class")
+            bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+          }
+          catch {
+            case e: FileConflictException =>
+              error(s"error writing $jclassName: ${e.getMessage}")
+          }
+        }
+      }
+
+      var moreComing = true
+      // `expected` denotes the arrivalPos whose Item3 should be serialized next
+      var expected = 0
+
+      while (moreComing) {
+        val incoming = q3.poll
+        moreComing   = !incoming.isPoison
+        if (moreComing) {
+          val item = incoming
+          val outFolder = item.outFolder
+          sendToDisk(item.mirror, outFolder)
+          sendToDisk(item.plain,  outFolder)
+          sendToDisk(item.bean,   outFolder)
+          expected += 1
+        }
+      }
+
+      // we're done
+      assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1")
+      assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2")
+      assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3")
+
+    }
+
+    override def apply(cunit: CompilationUnit): Unit = {
+
+      def gen(tree: Tree) {
+        tree match {
+          case EmptyTree            => ()
+          case PackageDef(_, stats) => stats foreach gen
+          case cd: ClassDef         =>
+            q1 add Item1(arrivalPos, cd, cunit)
+            arrivalPos += 1
+        }
+      }
+
+      gen(cunit.body)
+    }
+
+  } // end of class BCodePhase
+
+} // end of class GenBCode
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
deleted file mode 100644
index 36b294b..0000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ /dev/null
@@ -1,1921 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
-import java.nio.ByteBuffer
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.nsc.symtab._
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import scala.reflect.internal.ClassfileConstants._
-import ch.epfl.lamp.fjbg._
-import JAccessFlags._
-import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
-import java.util.jar.{ JarEntry, JarOutputStream }
-import scala.tools.nsc.io.AbstractFile
-import scala.language.postfixOps
-
-/** This class ...
- *
- *  @author  Iulian Dragos
- *  @version 1.0
- *
- */
-abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters with GenJVMASM {
-  import global._
-  import icodes._
-  import icodes.opcodes._
-  import definitions._
-
-  val phaseName = "jvm"
-
-  /** Create a new phase */
-  override def newPhase(p: Phase): Phase = new JvmPhase(p)
-
-  /** JVM code generation phase
-   */
-  class JvmPhase(prev: Phase) extends ICodePhase(prev) {
-    def name = phaseName
-    override def erasedTypes = true
-    def apply(cls: IClass) = sys.error("no implementation")
-
-    override def run() {
-      // we reinstantiate the bytecode generator at each run, to allow the GC
-      // to collect everything
-      if (settings.debug.value)
-        inform("[running phase " + name + " on icode]")
-
-      if (settings.Xdce.value)
-        for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
-          log(s"Optimizer eliminated ${sym.fullNameString}")
-          icodes.classes -= sym
-        }
-
-      // For predictably ordered error messages.
-      val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
-      val entryPoints   = sortedClasses filter isJavaEntryPoint
-
-      val bytecodeWriter = settings.outputDirs.getSingleOutput match {
-        case Some(f) if f hasExtension "jar" =>
-          // If no main class was specified, see if there's only one
-          // entry point among the classes going into the jar.
-          if (settings.mainClass.isDefault) {
-            entryPoints map (_.symbol fullName '.') match {
-              case Nil      =>
-                log("No Main-Class designated or discovered.")
-              case name :: Nil =>
-                log("Unique entry point: setting Main-Class to " + name)
-                settings.mainClass.value = name
-              case names =>
-                log("No Main-Class due to multiple entry points:\n  " + names.mkString("\n  "))
-            }
-          }
-          else log("Main-Class was specified: " + settings.mainClass.value)
-
-          new DirectToJarfileWriter(f.file)
-
-        case _                               =>
-          if (settings.Ygenjavap.isDefault) {
-            if(settings.Ydumpclasses.isDefault)
-              new ClassBytecodeWriter { }
-            else
-              new ClassBytecodeWriter with DumpBytecodeWriter { }
-          }
-          else new ClassBytecodeWriter with JavapBytecodeWriter { }
-      }
-
-      val codeGenerator = new BytecodeGenerator(bytecodeWriter)
-      debuglog("Created new bytecode generator for " + classes.size + " classes.")
-
-      sortedClasses foreach { c =>
-        try codeGenerator.genClass(c)
-        catch {
-          case e: JCode.CodeSizeTooBigException =>
-            log("Skipped class %s because it has methods that are too long.".format(c))
-        }
-      }
-
-      bytecodeWriter.close()
-      classes.clear()
-    }
-  }
-
-  var pickledBytes = 0 // statistics
-
-  /**
-   * Java bytecode generator.
-   *
-   */
-  class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil {
-    def this() = this(new ClassBytecodeWriter { })
-    def debugLevel = settings.debuginfo.indexOfChoice
-    import bytecodeWriter.writeClass
-
-    val MIN_SWITCH_DENSITY = 0.7
-    val INNER_CLASSES_FLAGS =
-      (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
-
-    val PublicStatic      = ACC_PUBLIC | ACC_STATIC
-    val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
-
-    val StringBuilderClassName = javaName(definitions.StringBuilderClass)
-    val BoxesRunTime = "scala.runtime.BoxesRunTime"
-
-    val StringBuilderType = new JObjectType(StringBuilderClassName)               // TODO use ASMType.getObjectType
-    val toStringType      = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY)  // TODO use ASMType.getMethodType
-    val arrayCloneType    = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
-    val MethodTypeType    = new JObjectType("java.dyn.MethodType")
-    val JavaLangClassType = new JObjectType("java.lang.Class")
-    val MethodHandleType  = new JObjectType("java.dyn.MethodHandle")
-
-    // Scala attributes
-    val BeanInfoAttr        = rootMirror.getRequiredClass("scala.beans.BeanInfo")
-    val BeanInfoSkipAttr    = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip")
-    val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName")
-    val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription")
-
-    // Additional interface parents based on annotations and other cues
-    def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
-      case SerializableAttr => Some(SerializableClass)
-      case CloneableAttr    => Some(JavaCloneableClass)
-      case RemoteAttr       => Some(RemoteInterfaceClass)
-      case _                => None
-    }
-
-    val versionPickle = {
-      val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
-      assert(vp.writeIndex == 0, vp)
-      vp writeNat PickleFormat.MajorVersion
-      vp writeNat PickleFormat.MinorVersion
-      vp writeNat 0
-      vp
-    }
-
-    private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
-      val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
-      val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
-
-      Pair("boxTo" + boxedType.decodedName, mtype)
-    }
-
-    private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
-      BOOL   -> helperBoxTo(BOOL)  ,
-      BYTE   -> helperBoxTo(BYTE)  ,
-      CHAR   -> helperBoxTo(CHAR)  ,
-      SHORT  -> helperBoxTo(SHORT) ,
-      INT    -> helperBoxTo(INT)   ,
-      LONG   -> helperBoxTo(LONG)  ,
-      FLOAT  -> helperBoxTo(FLOAT) ,
-      DOUBLE -> helperBoxTo(DOUBLE)
-    )
-
-    private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
-      val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
-      val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
-
-      Pair(mname, mtype)
-    }
-
-    private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
-      BOOL   -> helperUnboxTo(BOOL)  ,
-      BYTE   -> helperUnboxTo(BYTE)  ,
-      CHAR   -> helperUnboxTo(CHAR)  ,
-      SHORT  -> helperUnboxTo(SHORT) ,
-      INT    -> helperUnboxTo(INT)   ,
-      LONG   -> helperUnboxTo(LONG)  ,
-      FLOAT  -> helperUnboxTo(FLOAT) ,
-      DOUBLE -> helperUnboxTo(DOUBLE)
-    )
-
-    var clasz: IClass = _
-    var method: IMethod = _
-    var jclass: JClass = _
-    var jmethod: JMethod = _
-    // var jcode: JExtendedCode = _
-
-    def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
-    def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
-    def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
-      case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
-    }
-
-    val fjbgContext = new FJBGContext(49, 0)
-
-    val emitSource = debugLevel >= 1
-    val emitLines  = debugLevel >= 2
-    val emitVars   = debugLevel >= 3
-
-    // bug had phase with wrong name; leaving enabled for brief pseudo deprecation
-    private val checkSignatures = (
-         (settings.check containsName phaseName)
-      || (settings.check.value contains "genjvm") && {
-            global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName)
-            true
-         }
-    )
-
-    /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
-     *
-     *  For example:
-     *  class A {
-     *    class B
-     *    object C
-     *  }
-     *
-     *  then method will return NoSymbol for A, the same symbol for A.B (corresponding to A$B class) and A$C$ symbol
-     *  for A.C.
-     */
-    private def innerClassSymbolFor(s: Symbol): Symbol =
-      if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
-    override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
-      /**
-       * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
-       *
-       * Note: This method is called recursively thus making sure that we add complete chain
-       * of inner class all until root class.
-       */
-      def collectInnerClass(s: Symbol): Unit = {
-        // TODO: some beforeFlatten { ... } which accounts for
-        // being nested in parameterized classes (if we're going to selectively flatten.)
-        val x = innerClassSymbolFor(s)
-        if(x ne NoSymbol) {
-          assert(x.isClass, "not an inner-class symbol")
-          val isInner = !x.rawowner.isPackageClass
-          if (isInner) {
-            innerClassBuffer += x
-            collectInnerClass(x.rawowner)
-          }
-        }
-      }
-      collectInnerClass(sym)
-
-      super.javaName(sym)
-    }
-
-    /** Write a class to disk, adding the Scala signature (pickled type
-     *  information) and inner classes.
-     *
-     * @param jclass The FJBG class, where code was emitted
-     * @param sym    The corresponding symbol, used for looking up pickled information
-     */
-    def emitClass(jclass: JClass, sym: Symbol) {
-      addInnerClasses(jclass)
-      writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
-    }
-
-    /** Returns the ScalaSignature annotation if it must be added to this class,
-     *  none otherwise; furthermore, it adds to `jclass` the ScalaSig marker
-     *  attribute (marking that a scala signature annotation is present) or the
-     *  Scala marker attribute (marking that the signature for this class is in
-     *  another file). The annotation that is returned by this method must be
-     *  added to the class' annotations list when generating them.
-     *
-     *  @param jclass The class file that is being readied.
-     *  @param sym    The symbol for which the signature has been entered in
-     *                the symData map. This is different than the symbol
-     *                that is being generated in the case of a mirror class.
-     *  @return       An option that is:
-     *                - defined and contains an annotation info of the
-     *                  ScalaSignature type, instantiated with the pickle
-     *                  signature for sym (a ScalaSig marker attribute has
-     *                  been written);
-     *                - undefined if the jclass/sym couple must not contain a
-     *                  signature (a Scala marker attribute has been written).
-     */
-    def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
-      currentRun.symData get sym match {
-        case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
-          val scalaAttr =
-            fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
-                                        versionPickle.bytes, versionPickle.writeIndex)
-          jclass addAttribute scalaAttr
-          val scalaAnnot = {
-            val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
-            AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
-          }
-          pickledBytes += pickle.writeIndex
-          currentRun.symData -= sym
-          currentRun.symData -= sym.companionSymbol
-          Some(scalaAnnot)
-        case _ =>
-          val markerAttr =
-            fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaATTR.toString, new Array[Byte](0), 0)
-          jclass addAttribute markerAttr
-          None
-      }
-
-    private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-
-    /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
-     *  This is important on Android because there is otherwise an interface explosion.
-     */
-    private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
-      var rest   = interfaces
-      var leaves = List.empty[Symbol]
-      while(!rest.isEmpty) {
-        val candidate = rest.head
-        val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
-        if(!nonLeaf) {
-          leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
-        }
-        rest = rest.tail
-      }
-
-      leaves
-    }
-
-    def genClass(c: IClass) {
-      clasz = c
-      innerClassBuffer.clear()
-
-      val name    = javaName(c.symbol)
-
-      val ps = c.symbol.info.parents
-
-      val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
-
-      val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
-      val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
-
-      val ifaces =
-        if(superInterfaces.isEmpty) JClass.NO_INTERFACES
-        else mkArray(minimizeInterfaces(superInterfaces) map javaName)
-
-      jclass = fjbgContext.JClass(javaFlags(c.symbol),
-                                  name,
-                                  javaName(superClass),
-                                  ifaces,
-                                  c.cunit.source.toString)
-
-      if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
-        if (isStaticModule(c.symbol))
-          addModuleInstanceField
-        addStaticInit(jclass, c.lookupStaticCtor)
-
-        if (isTopLevelModule(c.symbol)) {
-          if (c.symbol.companionClass == NoSymbol)
-            generateMirrorClass(c.symbol, c.cunit.source)
-          else
-            log("No mirror class for module with linked class: " +
-                c.symbol.fullName)
-        }
-      }
-      else {
-        c.lookupStaticCtor foreach (constructor => addStaticInit(jclass, Some(constructor)))
-
-        // it must be a top level class (name contains no $s)
-        def isCandidateForForwarders(sym: Symbol): Boolean =
-          afterPickler {
-            !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
-          }
-
-        // At some point this started throwing lots of exceptions as a compile was finishing.
-        // error: java.lang.AssertionError:
-        //   assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable)
-        // ...is the one I've seen repeatedly.  Suppressing.
-        val lmoc = (
-          try c.symbol.companionModule
-          catch { case x: AssertionError =>
-            Console.println("Suppressing failed assert: " + x)
-            NoSymbol
-          }
-        )
-        // add static forwarders if there are no name conflicts; see bugs #363 and #1735
-        if (lmoc != NoSymbol && !c.symbol.isInterface) {
-          if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
-            log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
-            addForwarders(jclass, lmoc.moduleClass)
-          }
-        }
-      }
-
-      clasz.fields foreach genField
-      clasz.methods foreach genMethod
-
-      val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
-      addGenericSignature(jclass, c.symbol, c.symbol.owner)
-      addAnnotations(jclass, c.symbol.annotations ++ ssa)
-      addEnclosingMethodAttribute(jclass, c.symbol)
-      emitClass(jclass, c.symbol)
-
-      if (c.symbol hasAnnotation BeanInfoAttr)
-        genBeanInfoClass(c)
-    }
-
-    private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
-      val sym = clazz.originalEnclosingMethod
-      if (sym.isMethod) {
-        debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
-        jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
-          jclass,
-          javaName(sym.enclClass),
-          javaName(sym),
-          javaType(sym)
-        )
-      } else if (clazz.isAnonymousClass) {
-        val enclClass = clazz.rawowner
-        assert(enclClass.isClass, enclClass)
-        val sym = enclClass.primaryConstructor
-        if (sym == NoSymbol)
-          log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(
-            enclClass, clazz)
-          )
-        else {
-          debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
-          jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
-            jclass,
-            javaName(enclClass),
-            javaName(sym),
-            javaType(sym).asInstanceOf[JMethodType]
-          )
-        }
-      }
-    }
-
-    private def toByteArray(jc: JClass): Array[Byte] = {
-      val bos = new java.io.ByteArrayOutputStream()
-      val dos = new java.io.DataOutputStream(bos)
-      jc.writeTo(dos)
-      dos.close()
-      bos.toByteArray
-    }
-
-    /**
-     * Generate a bean info class that describes the given class.
-     *
-     * @author Ross Judson (ross.judson at soletta.com)
-     */
-    def genBeanInfoClass(c: IClass) {
-      val description = c.symbol getAnnotation BeanDescriptionAttr
-      // informProgress(description.toString)
-
-      val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol),
-            javaName(c.symbol) + "BeanInfo",
-            "scala/beans/ScalaBeanInfo",
-            JClass.NO_INTERFACES,
-            c.cunit.source.toString)
-
-      var fieldList = List[String]()
-      for (f <- clasz.fields if f.symbol.hasGetter;
-	         g = f.symbol.getter(c.symbol);
-	         s = f.symbol.setter(c.symbol);
-	         if g.isPublic && !(f.symbol.name startsWith "$"))  // inserting $outer breaks the bean
-        fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
-      val methodList =
-	     for (m <- clasz.methods
-	         if !m.symbol.isConstructor &&
-	         m.symbol.isPublic &&
-	         !(m.symbol.name startsWith "$") &&
-	         !m.symbol.isGetter &&
-	         !m.symbol.isSetter) yield javaName(m.symbol)
-
-      val constructor = beanInfoClass.addNewMethod(ACC_PUBLIC, "<init>", JType.VOID, new Array[JType](0), new Array[String](0))
-      val jcode = constructor.getCode().asInstanceOf[JExtendedCode]
-      val strKind = new JObjectType(javaName(StringClass))
-      val stringArrayKind = new JArrayType(strKind)
-      val conType = new JMethodType(JType.VOID, Array(javaType(ClassClass), stringArrayKind, stringArrayKind))
-
-      def push(lst:Seq[String]) {
-        var fi = 0
-        for (f <- lst) {
-          jcode.emitDUP()
-          jcode emitPUSH fi
-          if (f != null)
-            jcode emitPUSH f
-          else
-            jcode.emitACONST_NULL()
-          jcode emitASTORE strKind
-          fi += 1
-        }
-      }
-
-      jcode.emitALOAD_0()
-      // push the class
-      jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType]
-
-      // push the string array of field information
-      jcode emitPUSH fieldList.length
-      jcode emitANEWARRAY strKind
-      push(fieldList)
-
-      // push the string array of method information
-      jcode emitPUSH methodList.length
-      jcode emitANEWARRAY strKind
-      push(methodList)
-
-      // invoke the superclass constructor, which will do the
-      // necessary java reflection and create Method objects.
-      jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "<init>", conType)
-      jcode.emitRETURN()
-
-      // write the bean information class file.
-      writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
-    }
-
-    /** Add the given 'throws' attributes to jmethod */
-    def addExceptionsAttribute(jmethod: JMethod, excs: List[AnnotationInfo]) {
-      if (excs.isEmpty) return
-
-      val cpool = jmethod.getConstantPool
-      val buf: ByteBuffer = ByteBuffer.allocate(512)
-      var nattr = 0
-
-      // put some random value; the actual number is determined at the end
-      buf putShort 0xbaba.toShort
-
-      for (ThrownException(exc) <- excs.distinct) {
-        buf.putShort(
-          cpool.addClass(
-            javaName(exc)).shortValue)
-        nattr += 1
-      }
-
-      assert(nattr > 0, nattr)
-      buf.putShort(0, nattr.toShort)
-      addAttribute(jmethod, tpnme.ExceptionsATTR, buf)
-    }
-
-    /** Whether an annotation should be emitted as a Java annotation
-     *   .initialize: if 'annot' is read from pickle, atp might be un-initialized
-     */
-    private def shouldEmitAnnotation(annot: AnnotationInfo) =
-      annot.symbol.initialize.isJavaDefined &&
-      annot.matches(ClassfileAnnotationClass) &&
-      annot.args.isEmpty
-
-    private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = {
-      def emitArgument(arg: ClassfileAnnotArg): Unit = arg match {
-        case LiteralAnnotArg(const) =>
-          const.tag match {
-            case BooleanTag =>
-              buf put 'Z'.toByte
-              buf putShort cpool.addInteger(if(const.booleanValue) 1 else 0).toShort
-            case ByteTag    =>
-              buf put 'B'.toByte
-              buf putShort cpool.addInteger(const.byteValue).toShort
-            case ShortTag   =>
-              buf put 'S'.toByte
-              buf putShort cpool.addInteger(const.shortValue).toShort
-            case CharTag    =>
-              buf put 'C'.toByte
-              buf putShort cpool.addInteger(const.charValue).toShort
-            case IntTag     =>
-              buf put 'I'.toByte
-              buf putShort cpool.addInteger(const.intValue).toShort
-            case LongTag    =>
-              buf put 'J'.toByte
-              buf putShort cpool.addLong(const.longValue).toShort
-            case FloatTag   =>
-              buf put 'F'.toByte
-              buf putShort cpool.addFloat(const.floatValue).toShort
-            case DoubleTag  =>
-              buf put 'D'.toByte
-              buf putShort cpool.addDouble(const.doubleValue).toShort
-            case StringTag  =>
-              buf put 's'.toByte
-              buf putShort cpool.addUtf8(const.stringValue).toShort
-            case ClazzTag   =>
-              buf put 'c'.toByte
-              buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort
-            case EnumTag =>
-              buf put 'e'.toByte
-              buf putShort cpool.addUtf8(javaType(const.tpe).getSignature()).toShort
-              buf putShort cpool.addUtf8(const.symbolValue.name.toString).toShort
-          }
-
-        case sb at ScalaSigBytes(bytes) if !sb.isLong =>
-          buf put 's'.toByte
-          buf putShort cpool.addUtf8(sb.encodedBytes).toShort
-
-        case sb at ScalaSigBytes(bytes) if sb.isLong =>
-          buf put '['.toByte
-          val stringCount = (sb.encodedBytes.length / 65534) + 1
-          buf putShort stringCount.toShort
-          for (i <- 0 until stringCount) {
-            buf put 's'.toByte
-            val j = i * 65535
-            val string = sb.encodedBytes.slice(j, j + 65535)
-            buf putShort cpool.addUtf8(string).toShort
-          }
-
-        case ArrayAnnotArg(args) =>
-          buf put '['.toByte
-          buf putShort args.length.toShort
-          args foreach emitArgument
-
-        case NestedAnnotArg(annInfo) =>
-          buf put '@'.toByte
-          emitAnnotation(annInfo)
-      }
-
-      def emitAnnotation(annotInfo: AnnotationInfo) {
-        val AnnotationInfo(typ, args, assocs) = annotInfo
-        val jtype = javaType(typ)
-        buf putShort cpool.addUtf8(jtype.getSignature()).toShort
-        assert(args.isEmpty, args)
-        buf putShort assocs.length.toShort
-        for ((name, value) <- assocs) {
-          buf putShort cpool.addUtf8(name.toString).toShort
-          emitArgument(value)
-        }
-      }
-
-      var nannots = 0
-      val pos = buf.position()
-
-      // put some random value; the actual number of annotations is determined at the end
-      buf putShort 0xbaba.toShort
-
-      for (annot <- annotations if shouldEmitAnnotation(annot)) {
-        nannots += 1
-        emitAnnotation(annot)
-      }
-
-      // save the number of annotations
-      buf.putShort(pos, nannots.toShort)
-      nannots
-    }
-
-    // @M don't generate java generics sigs for (members of) implementation
-    // classes, as they are monomorphic (TODO: ok?)
-    private def needsGenericSignature(sym: Symbol) = !(
-      // PP: This condition used to include sym.hasExpandedName, but this leads
-      // to the total loss of generic information if a private member is
-      // accessed from a closure: both the field and the accessor were generated
-      // without it.  This is particularly bad because the availability of
-      // generic information could disappear as a consequence of a seemingly
-      // unrelated change.
-         settings.Ynogenericsig.value
-      || sym.isArtifact
-      || sym.isLiftedMethod
-      || sym.isBridge
-      || (sym.ownerChain exists (_.isImplClass))
-    )
-    def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
-      if (needsGenericSignature(sym)) {
-        val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
-
-        erasure.javaSig(sym, memberTpe) foreach { sig =>
-          // This seems useful enough in the general case.
-          log(sig)
-          if (checkSignatures) {
-            val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
-            val bytecodeTpe = owner.thisType.memberInfo(sym)
-            if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
-              clasz.cunit.warning(sym.pos,
-                  """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
-                     |signature: %s
-                     |original type: %s
-                     |normalized type: %s
-                     |erasure type: %s
-                     |if this is reproducible, please report bug at https://issues.scala-lang.org/
-                  """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
-               return
-            }
-          }
-          val index = jmember.getConstantPool.addUtf8(sig).toShort
-          if (opt.verboseDebug)
-            beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
-
-          val buf = ByteBuffer.allocate(2)
-          buf putShort index
-          addAttribute(jmember, tpnme.SignatureATTR, buf)
-        }
-      }
-    }
-
-    def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
-      if (annotations exists (_ matches definitions.DeprecatedAttr)) {
-        val attr = jmember.getContext().JOtherAttribute(
-          jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString,
-          new Array[Byte](0), 0)
-        jmember addAttribute attr
-      }
-
-      val toEmit = annotations filter shouldEmitAnnotation
-      if (toEmit.isEmpty) return
-
-      val buf: ByteBuffer = ByteBuffer.allocate(2048)
-      emitJavaAnnotations(jmember.getConstantPool, buf, toEmit)
-      addAttribute(jmember, tpnme.RuntimeAnnotationATTR, buf)
-    }
-
-    def addParamAnnotations(jmethod: JMethod, pannotss: List[List[AnnotationInfo]]) {
-      val annotations = pannotss map (_ filter shouldEmitAnnotation)
-      if (annotations forall (_.isEmpty)) return
-
-      val buf: ByteBuffer = ByteBuffer.allocate(2048)
-
-      // number of parameters
-      buf.put(annotations.length.toByte)
-      for (annots <- annotations)
-        emitJavaAnnotations(jmethod.getConstantPool, buf, annots)
-
-      addAttribute(jmethod, tpnme.RuntimeParamAnnotationATTR, buf)
-    }
-
-    def addAttribute(jmember: JMember, name: Name, buf: ByteBuffer) {
-      if (buf.position() < 2)
-        return
-
-      val length = buf.position()
-      val arr = buf.array().slice(0, length)
-
-      val attr = jmember.getContext().JOtherAttribute(jmember.getJClass(),
-                                                      jmember,
-                                                      name.toString,
-                                                      arr,
-                                                      length)
-      jmember addAttribute attr
-    }
-
-    def addInnerClasses(jclass: JClass) {
-      /** The outer name for this inner class. Note that it returns null
-       *  when the inner class should not get an index in the constant pool.
-       *  That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
-       */
-      def outerName(innerSym: Symbol): String = {
-        if (innerSym.originalEnclosingMethod != NoSymbol)
-          null
-        else {
-          val outerName = javaName(innerSym.rawowner)
-          if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
-          else outerName
-        }
-      }
-
-      def innerName(innerSym: Symbol): String =
-        if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
-          null
-        else
-          innerSym.rawname + innerSym.moduleSuffix
-
-      // add inner classes which might not have been referenced yet
-      afterErasure {
-        for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
-          innerClassBuffer += m
-      }
-
-      val allInners = innerClassBuffer.toList
-      if (allInners.nonEmpty) {
-        debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
-        val innerClassesAttr = jclass.getInnerClasses()
-        // sort them so inner classes succeed their enclosing class
-        // to satisfy the Eclipse Java compiler
-        for (innerSym <- allInners sortBy (_.name.length)) {
-          val flags = {
-            val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0
-            (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS
-          }
-          val jname = javaName(innerSym)
-          val oname = outerName(innerSym)
-          val iname = innerName(innerSym)
-
-          // Mimicking javap inner class output
-          debuglog(
-            if (oname == null || iname == null) "//class " + jname
-            else "//%s=class %s of class %s".format(iname, jname, oname)
-          )
-
-          innerClassesAttr.addEntry(jname, oname, iname, flags)
-        }
-      }
-    }
-
-    def genField(f: IField) {
-      debuglog("Adding field: " + f.symbol.fullName)
-
-      val jfield = jclass.addNewField(
-        javaFieldFlags(f.symbol),
-        javaName(f.symbol),
-        javaType(f.symbol.tpe)
-      )
-
-      addGenericSignature(jfield, f.symbol, clasz.symbol)
-      addAnnotations(jfield, f.symbol.annotations)
-    }
-
-    def genMethod(m: IMethod) {
-      if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
-
-      debuglog("Generating method " + m.symbol.fullName)
-      method = m
-      endPC.clear
-      computeLocalVarsIndex(m)
-
-      var resTpe = javaType(m.symbol.tpe.resultType)
-      if (m.symbol.isClassConstructor)
-        resTpe = JType.VOID
-
-      var flags = javaFlags(m.symbol)
-      if (jclass.isInterface)
-        flags |= ACC_ABSTRACT
-
-      if (m.symbol.isStrictFP)
-        flags |= ACC_STRICT
-
-      // native methods of objects are generated in mirror classes
-      if (method.native)
-        flags |= ACC_NATIVE
-
-      jmethod = jclass.addNewMethod(flags,
-                                    javaName(m.symbol),
-                                    resTpe,
-                                    mkArray(m.params map (p => javaType(p.kind))),
-                                    mkArray(m.params map (p => javaName(p.sym))))
-
-      addRemoteException(jmethod, m.symbol)
-
-      if (!jmethod.isAbstract() && !method.native) {
-        val jcode = jmethod.getCode().asInstanceOf[JExtendedCode]
-
-        // add a fake local for debugging purposes
-        if (emitVars && isClosureApply(method.symbol)) {
-          val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
-          if (outerField != NoSymbol) {
-            log("Adding fake local to represent outer 'this' for closure " + clasz)
-            val _this = new Local(
-              method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
-            m.locals = m.locals ::: List(_this)
-            computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
-
-            jcode.emitALOAD_0()
-            jcode.emitGETFIELD(javaName(clasz.symbol),
-                               javaName(outerField),
-                               javaType(outerField))
-            jcode.emitSTORE(indexOf(_this), javaType(_this.kind))
-          }
-        }
-
-        for (local <- m.locals if ! m.params.contains(local)) {
-          debuglog("add local var: " + local)
-          jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym))
-        }
-
-        genCode(m)
-        if (emitVars)
-          genLocalVariableTable(m, jcode)
-      }
-
-      addGenericSignature(jmethod, m.symbol, clasz.symbol)
-      val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
-      addExceptionsAttribute(jmethod, excs)
-      addAnnotations(jmethod, others)
-      addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
-
-      // check for code size
-      try jmethod.freeze()
-      catch {
-        case e: JCode.CodeSizeTooBigException =>
-          clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize))
-          throw e
-      }
-    }
-
-    /** Adds a @remote annotation, actual use unknown.
-     */
-    private def addRemoteException(jmethod: JMethod, meth: Symbol) {
-      val needsAnnotation = (
-        (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic)
-          && !(meth.throwsAnnotations contains RemoteExceptionClass)
-      )
-      if (needsAnnotation) {
-        val c   = Constant(RemoteExceptionClass.tpe)
-        val arg = Literal(c) setType c.tpe
-        meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
-      }
-    }
-
-    private def isClosureApply(sym: Symbol): Boolean = {
-      (sym.name == nme.apply) &&
-      sym.owner.isSynthetic &&
-      sym.owner.tpe.parents.exists { t =>
-        val TypeRef(_, sym, _) = t
-        FunctionClass contains sym
-      }
-    }
-
-    def addModuleInstanceField() {
-      jclass.addNewField(PublicStaticFinal,
-                        nme.MODULE_INSTANCE_FIELD.toString,
-                        jclass.getType())
-    }
-
-    def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
-      val clinitMethod = cls.addNewMethod(PublicStatic,
-                                          "<clinit>",
-                                          JType.VOID,
-                                          JType.EMPTY_ARRAY,
-                                          new Array[String](0))
-      val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
-
-      mopt match {
-       	case Some(m) =>
-          val oldLastBlock = m.lastBlock
-          val lastBlock = m.newBlock()
-          oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-
-          if (isStaticModule(clasz.symbol)) {
-            // call object's private ctor from static ctor
-            lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
-            lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
-          }
-
-          // add serialVUID code
-          serialVUID foreach { value =>
-            import Flags._, definitions._
-            val fieldName = "serialVersionUID"
-            val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe
-            clasz addField new IField(fieldSymbol)
-            lastBlock emit CONSTANT(Constant(value))
-            lastBlock emit STORE_FIELD(fieldSymbol, true)
-          }
-
-          if (isParcelableClass)
-            addCreatorCode(BytecodeGenerator.this, lastBlock)
-
-          lastBlock emit RETURN(UNIT)
-          lastBlock.close
-
-       	  method = m
-       	  jmethod = clinitMethod
-       	  genCode(m)
-       	case None =>
-          legacyStaticInitializer(cls, clinit)
-      }
-    }
-
-    private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
-      if (isStaticModule(clasz.symbol)) {
-        clinit emitNEW cls.getName()
-        clinit.emitINVOKESPECIAL(cls.getName(),
-                                 JMethod.INSTANCE_CONSTRUCTOR_NAME,
-                                 JMethodType.ARGLESS_VOID_FUNCTION)
-      }
-
-      serialVUID foreach { value =>
-        val fieldName = "serialVersionUID"
-        jclass.addNewField(PublicStaticFinal, fieldName, JType.LONG)
-        clinit emitPUSH value
-        clinit.emitPUSH(value)
-        clinit.emitPUTSTATIC(jclass.getName(), fieldName, JType.LONG)
-      }
-
-      if (isParcelableClass)
-        legacyAddCreatorCode(BytecodeGenerator.this, clinit)
-
-      clinit.emitRETURN()
-    }
-
-    /** Add a forwarder for method m */
-    def addForwarder(jclass: JClass, module: Symbol, m: Symbol) {
-      val moduleName     = javaName(module)
-      val methodInfo     = module.thisType.memberInfo(m)
-      val paramJavaTypes = methodInfo.paramTypes map javaType
-      val paramNames     = 0 until paramJavaTypes.length map ("x_" + _)
-      // TODO: evaluate the other flags we might be dropping on the floor here.
-      val flags = PublicStatic | (
-        if (m.isVarargsMethod) ACC_VARARGS else 0
-      )
-
-      /** Forwarders must not be marked final, as the JVM will not allow
-       *  redefinition of a final static method, and we don't know what classes
-       *  might be subclassing the companion class.  See SI-4827.
-       */
-      val mirrorMethod = jclass.addNewMethod(
-        flags,
-        javaName(m),
-        javaType(methodInfo.resultType),
-        mkArray(paramJavaTypes),
-        mkArray(paramNames))
-      val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode]
-      mirrorCode.emitGETSTATIC(moduleName,
-                               nme.MODULE_INSTANCE_FIELD.toString,
-                               new JObjectType(moduleName))
-
-      var i = 0
-      var index = 0
-      var argTypes = mirrorMethod.getArgumentTypes()
-      while (i < argTypes.length) {
-        mirrorCode.emitLOAD(index, argTypes(i))
-        index += argTypes(i).getSize()
-        i += 1
-      }
-
-      mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName, javaType(m).asInstanceOf[JMethodType])
-      mirrorCode emitRETURN mirrorMethod.getReturnType()
-
-      addRemoteException(mirrorMethod, m)
-      // only add generic signature if the method is concrete; bug #1745
-      if (!m.isDeferred)
-        addGenericSignature(mirrorMethod, m, module)
-
-      val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
-      addExceptionsAttribute(mirrorMethod, throws)
-      addAnnotations(mirrorMethod, others)
-      addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
-    }
-
-    /** Add forwarders for all methods defined in `module` that don't conflict
-     *  with methods in the companion class of `module`. A conflict arises when
-     *  a method with the same name is defined both in a class and its companion
-     *  object: method signature is not taken into account.
-     */
-    def addForwarders(jclass: JClass, moduleClass: Symbol) {
-      assert(moduleClass.isModuleClass, moduleClass)
-      debuglog("Dumping mirror class for object: " + moduleClass)
-
-      val className    = jclass.getName
-      val linkedClass  = moduleClass.companionClass
-      val linkedModule = linkedClass.companionSymbol
-      lazy val conflictingNames: Set[Name] = {
-        linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
-      }
-      debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
-      for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
-        if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
-          debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
-        else if (conflictingNames(m.name))
-          log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
-        else {
-          log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
-          addForwarder(jclass, moduleClass, m)
-        }
-      }
-    }
-
-    /** Generate a mirror class for a top-level module. A mirror class is a class
-     *  containing only static methods that forward to the corresponding method
-     *  on the MODULE instance of the given Scala object.  It will only be
-     *  generated if there is no companion class: if there is, an attempt will
-     *  instead be made to add the forwarder methods to the companion class.
-     */
-    def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
-      import JAccessFlags._
-      /* We need to save inner classes buffer and create a new one to make sure
-       * that we do confuse inner classes of the class  we mirror with inner
-       * classes of the class we are mirroring. These two sets can be different
-       * as seen in this case:
-       *
-       *  class A {
-       *   class B
-       *   def b: B = new B
-       *  }
-       *  object C extends A
-       *
-       *  Here mirror class of C has a static forwarder for (inherited) method `b`
-       *  therefore it refers to class `B` and needs InnerClasses entry. However,
-       *  the real class for `C` (named `C$`) is empty and does not refer to `B`
-       *  thus does not need InnerClasses entry it.
-       *
-       *  NOTE: This logic has been refactored in GenASM and everything is
-       *  implemented in a much cleaner way by having two separate buffers.
-       */
-      val savedInnerClasses = innerClassBuffer
-      innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-      val moduleName = javaName(clasz) // + "$"
-      val mirrorName = moduleName.substring(0, moduleName.length() - 1)
-      val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
-                                           mirrorName,
-                                           JAVA_LANG_OBJECT.getName,
-                                           JClass.NO_INTERFACES,
-                                           "" + sourceFile)
-
-      log("Dumping mirror class for '%s'".format(mirrorClass.getName))
-      addForwarders(mirrorClass, clasz)
-      val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
-      addAnnotations(mirrorClass, clasz.annotations ++ ssa)
-      emitClass(mirrorClass, clasz)
-      innerClassBuffer = savedInnerClasses
-    }
-
-    var linearization: List[BasicBlock] = Nil
-    var isModuleInitialized = false
-
-    /**
-     *  @param m ...
-     */
-    def genCode(m: IMethod) {
-      val jcode = jmethod.getCode.asInstanceOf[JExtendedCode]
-
-      def makeLabels(bs: List[BasicBlock]) = {
-        debuglog("Making labels for: " + method)
-
-        mutable.HashMap(bs map (_ -> jcode.newLabel) : _*)
-      }
-
-      isModuleInitialized = false
-
-      linearization = linearizer.linearize(m)
-      val labels = makeLabels(linearization)
-
-      var nextBlock: BasicBlock = linearization.head
-
-      def genBlocks(l: List[BasicBlock]): Unit = l match {
-        case Nil => ()
-        case x :: Nil => nextBlock = null; genBlock(x)
-        case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
-      }
-
-      /** Generate exception handlers for the current method. */
-      def genExceptionHandlers() {
-
-        /** Return a list of pairs of intervals where the handler is active.
-         *  The intervals in the list have to be inclusive in the beginning and
-         *  exclusive in the end: [start, end).
-         */
-        def ranges(e: ExceptionHandler): List[(Int, Int)] = {
-          var covered = e.covered
-          var ranges: List[(Int, Int)] = Nil
-          var start = -1
-          var end = -1
-
-          linearization foreach { b =>
-            if (! (covered contains b) ) {
-              if (start >= 0) { // we're inside a handler range
-                end = labels(b).getAnchor()
-                ranges ::= ((start, end))
-                start = -1
-              }
-            } else {
-              if (start < 0)  // we're not inside a handler range
-                start = labels(b).getAnchor()
-
-              end = endPC(b)
-              covered -= b
-            }
-          }
-
-          /* Add the last interval. Note that since the intervals are
-           * open-ended to the right, we have to give a number past the actual
-           * code!
-           */
-          if (start >= 0) {
-            ranges ::= ((start, jcode.getPC()))
-          }
-
-          if (!covered.isEmpty)
-            debuglog("Some covered blocks were not found in method: " + method +
-                  " covered: " + covered + " not in " + linearization)
-          ranges
-        }
-
-        for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
-          if (p._1 < p._2) {
-            debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
-                  " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
-            val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
-                      else javaName(e.cls)
-            jcode.addExceptionHandler(p._1, p._2,
-                                      labels(e.startBlock).getAnchor(),
-                                      cls)
-          } else
-            log("Empty exception range: " + p)
-        }
-      }
-
-      def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
-        target.isPublic || target.isProtected && {
-          (site.enclClass isSubClass target.enclClass) ||
-          (site.enclosingPackage == target.privateWithin)
-        }
-      }
-
-      def genCallMethod(call: CALL_METHOD) {
-        val CALL_METHOD(method, style) = call
-        val siteSymbol  = clasz.symbol
-        val hostSymbol  = call.hostClass
-        val methodOwner = method.owner
-        // info calls so that types are up to date; erasure may add lateINTERFACE to traits
-        hostSymbol.info ; methodOwner.info
-
-        def needsInterfaceCall(sym: Symbol) = (
-             sym.isInterface
-          || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
-        )
-        // whether to reference the type of the receiver or
-        // the type of the method owner
-        val useMethodOwner = (
-             style != Dynamic
-          || hostSymbol.isBottomClass
-          || methodOwner == ObjectClass
-        )
-        val receiver = if (useMethodOwner) methodOwner else hostSymbol
-        val jowner   = javaName(receiver)
-        val jname    = javaName(method)
-        val jtype    = javaType(method).asInstanceOf[JMethodType]
-
-        def dbg(invoke: String) {
-          debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
-        }
-
-        def initModule() {
-          // we initialize the MODULE$ field immediately after the super ctor
-          if (isStaticModule(siteSymbol) && !isModuleInitialized &&
-              jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
-              jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
-            isModuleInitialized = true
-            jcode.emitALOAD_0()
-            jcode.emitPUTSTATIC(jclass.getName(),
-                                nme.MODULE_INSTANCE_FIELD.toString,
-                                jclass.getType())
-          }
-        }
-
-        style match {
-          case Static(true)                            => dbg("invokespecial");    jcode.emitINVOKESPECIAL(jowner, jname, jtype)
-          case Static(false)                           => dbg("invokestatic");      jcode.emitINVOKESTATIC(jowner, jname, jtype)
-          case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
-          case Dynamic                                 => dbg("invokevirtual");    jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
-          case SuperCall(_)                            =>
-            dbg("invokespecial")
-            jcode.emitINVOKESPECIAL(jowner, jname, jtype)
-            initModule()
-        }
-      }
-
-      def genBlock(b: BasicBlock) {
-        labels(b).anchorToNext()
-
-        debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
-        var lastMappedPC = 0
-        var lastLineNr = 0
-        var crtPC = 0
-
-        /** local variables whose scope appears in this block. */
-        val varsInBlock: mutable.Set[Local] = new mutable.HashSet
-        val lastInstr = b.lastInstruction
-
-        for (instr <- b) {
-          instr match {
-            case THIS(clasz)           => jcode.emitALOAD_0()
-
-            case CONSTANT(const)       => genConstant(jcode, const)
-
-            case LOAD_ARRAY_ITEM(kind) =>
-              if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
-              else {
-                (kind: @unchecked) match {
-                  case UNIT            => throw new IllegalArgumentException("invalid type for aload " + kind)
-                  case BOOL | BYTE     => jcode.emitBALOAD()
-                  case SHORT           => jcode.emitSALOAD()
-                  case CHAR            => jcode.emitCALOAD()
-                  case INT             => jcode.emitIALOAD()
-                  case LONG            => jcode.emitLALOAD()
-                  case FLOAT           => jcode.emitFALOAD()
-                  case DOUBLE          => jcode.emitDALOAD()
-                }
-              }
-
-            case LOAD_LOCAL(local)     => jcode.emitLOAD(indexOf(local), javaType(local.kind))
-
-            case lf @ LOAD_FIELD(field, isStatic) =>
-              var owner = javaName(lf.hostClass)
-              debuglog("LOAD_FIELD with owner: " + owner +
-                    " flags: " + Flags.flagsToString(field.owner.flags))
-              val fieldJName = javaName(field)
-              val fieldJType = javaType(field)
-              if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
-              else          jcode.emitGETFIELD( owner, fieldJName, fieldJType)
-
-            case LOAD_MODULE(module) =>
-              // assert(module.isModule, "Expected module: " + module)
-              debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
-              if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
-                jcode.emitALOAD_0()
-              else
-                jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
-                                    nme.MODULE_INSTANCE_FIELD.toString,
-                                    javaType(module))
-
-            case STORE_ARRAY_ITEM(kind) =>
-              if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
-              else {
-                (kind: @unchecked) match {
-                  case UNIT            => throw new IllegalArgumentException("invalid type for astore " + kind)
-                  case BOOL | BYTE     => jcode.emitBASTORE()
-                  case SHORT           => jcode.emitSASTORE()
-                  case CHAR            => jcode.emitCASTORE()
-                  case INT             => jcode.emitIASTORE()
-                  case LONG            => jcode.emitLASTORE()
-                  case FLOAT           => jcode.emitFASTORE()
-                  case DOUBLE          => jcode.emitDASTORE()
-                }
-              }
-
-            case STORE_LOCAL(local) =>
-              jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
-            case STORE_THIS(_) =>
-              // this only works for impl classes because the self parameter comes first
-              // in the method signature. If that changes, this code has to be revisited.
-              jcode.emitASTORE_0()
-
-            case STORE_FIELD(field, isStatic) =>
-              val owner = javaName(field.owner)
-              val fieldJName = javaName(field)
-              val fieldJType = javaType(field)
-              if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
-              else          jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
-
-            case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
-
-            /** Special handling to access native Array.clone() */
-            case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
-              val target: String = javaType(call.targetTypeKind).getSignature()
-              jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
-            case call @ CALL_METHOD(method, style) => genCallMethod(call)
-
-            case BOX(kind) =>
-              val Pair(mname, mtype) = jBoxTo(kind)
-              jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
-            case UNBOX(kind) =>
-              val Pair(mname, mtype) = jUnboxTo(kind)
-              jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
-            case NEW(REFERENCE(cls)) =>
-              val className = javaName(cls)
-              jcode emitNEW className
-
-            case CREATE_ARRAY(elem, 1) =>
-              if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
-              else                      { jcode emitNEWARRAY  javaType(elem) }
-
-            case CREATE_ARRAY(elem, dims) =>
-              jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
-
-            case IS_INSTANCE(tpe) =>
-              tpe match {
-                case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
-                case ARRAY(elem)    => jcode emitINSTANCEOF new JArrayType(javaType(elem))
-                case _              => abort("Unknown reference type in IS_INSTANCE: " + tpe)
-              }
-
-            case CHECK_CAST(tpe) =>
-              tpe match {
-                case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
-                case ARRAY(elem)    => jcode emitCHECKCAST new JArrayType(javaType(elem))
-                case _              => abort("Unknown reference type in IS_INSTANCE: " + tpe)
-              }
-
-            case SWITCH(tags, branches) =>
-              val tagArray = new Array[Array[Int]](tags.length)
-              var caze = tags
-              var i = 0
-
-              while (i < tagArray.length) {
-                tagArray(i) = new Array[Int](caze.head.length)
-                caze.head.copyToArray(tagArray(i), 0)
-                i += 1
-                caze = caze.tail
-              }
-              val branchArray = jcode.newLabels(tagArray.length)
-              i = 0
-              while (i < branchArray.length) {
-                branchArray(i) = labels(branches(i))
-                i += 1
-              }
-              debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
-              jcode.emitSWITCH(tagArray,
-                               branchArray,
-                               labels(branches.last),
-                               MIN_SWITCH_DENSITY)
-              ()
-
-            case JUMP(whereto) =>
-              if (nextBlock != whereto)
-                jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
-            case CJUMP(success, failure, cond, kind) =>
-              if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
-                if (nextBlock == success) {
-                  jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
-                  // .. and fall through to success label
-                } else {
-                  jcode.emitIF_ICMP(conds(cond), labels(success))
-                  if (nextBlock != failure)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                }
-              } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
-                if (nextBlock == success) {
-                  jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
-                  // .. and fall through to success label
-                } else {
-                  jcode.emitIF_ACMP(conds(cond), labels(success))
-                  if (nextBlock != failure)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                }
-              } else {
-                (kind: @unchecked) match {
-                  case LONG   => jcode.emitLCMP()
-                  case FLOAT  =>
-                    if (cond == LT || cond == LE) jcode.emitFCMPG()
-                    else jcode.emitFCMPL()
-                  case DOUBLE =>
-                    if (cond == LT || cond == LE) jcode.emitDCMPG()
-                    else jcode.emitDCMPL()
-                }
-                if (nextBlock == success) {
-                  jcode.emitIF(conds(cond.negate()), labels(failure))
-                  // .. and fall through to success label
-                } else {
-                  jcode.emitIF(conds(cond), labels(success));
-                  if (nextBlock != failure)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                }
-              }
-
-            case CZJUMP(success, failure, cond, kind) =>
-              if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
-                if (nextBlock == success) {
-                  jcode.emitIF(conds(cond.negate()), labels(failure))
-                } else {
-                  jcode.emitIF(conds(cond), labels(success))
-                  if (nextBlock != failure)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                }
-              } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
-                val Success = success
-                val Failure = failure
-                (cond, nextBlock) match {
-                  case (EQ, Success) => jcode emitIFNONNULL labels(failure)
-                  case (NE, Failure) => jcode emitIFNONNULL labels(success)
-                  case (EQ, Failure) => jcode emitIFNULL    labels(success)
-                  case (NE, Success) => jcode emitIFNULL    labels(failure)
-                  case (EQ, _) =>
-                    jcode emitIFNULL labels(success)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                  case (NE, _) =>
-                    jcode emitIFNONNULL labels(success)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                  case _ =>
-                }
-              } else {
-                (kind: @unchecked) match {
-                  case LONG   =>
-                    jcode.emitLCONST_0()
-                    jcode.emitLCMP()
-                  case FLOAT  =>
-                    jcode.emitFCONST_0()
-                    if (cond == LT || cond == LE) jcode.emitFCMPG()
-                    else jcode.emitFCMPL()
-                  case DOUBLE =>
-                    jcode.emitDCONST_0()
-                    if (cond == LT || cond == LE) jcode.emitDCMPG()
-                    else jcode.emitDCMPL()
-                }
-                if (nextBlock == success) {
-                  jcode.emitIF(conds(cond.negate()), labels(failure))
-                } else {
-                  jcode.emitIF(conds(cond), labels(success))
-                  if (nextBlock != failure)
-                    jcode.emitGOTO_maybe_W(labels(failure), false)
-                }
-              }
-
-            case RETURN(kind) => jcode emitRETURN javaType(kind)
-
-            case THROW(_)     => jcode.emitATHROW()
-
-            case DROP(kind) =>
-              if(kind.isWideType) jcode.emitPOP2()
-              else                jcode.emitPOP()
-
-            case DUP(kind) =>
-              if(kind.isWideType) jcode.emitDUP2()
-              else                jcode.emitDUP()
-
-            case MONITOR_ENTER() => jcode.emitMONITORENTER()
-
-            case MONITOR_EXIT()  => jcode.emitMONITOREXIT()
-
-            case SCOPE_ENTER(lv) =>
-              varsInBlock += lv
-              lv.start = jcode.getPC()
-
-            case SCOPE_EXIT(lv) =>
-              if (varsInBlock(lv)) {
-                lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
-                varsInBlock -= lv
-              }
-              else if (b.varsInScope(lv)) {
-                lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
-                b.varsInScope -= lv
-              }
-              else dumpMethodAndAbort(method, "Illegal local var nesting")
-
-            case LOAD_EXCEPTION(_) =>
-              ()
-          }
-
-          crtPC = jcode.getPC()
-
-          // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
-          // val crtLine = instr.pos.line.get(lastLineNr);
-
-          val crtLine = try {
-            if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
-          } catch {
-            case _: UnsupportedOperationException =>
-              log("Warning: wrong position in: " + method)
-              lastLineNr
-          }
-
-          if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
-
-          //System.err.println("CRTLINE: " + instr.pos + " " +
-          //           /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
-
-          if (crtPC > lastMappedPC) {
-            jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
-            lastMappedPC = crtPC
-            lastLineNr   = crtLine
-          }
-        }
-
-        // local vars that survived this basic block
-        for (lv <- varsInBlock) {
-          lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
-        }
-        for (lv <- b.varsInScope) {
-          lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
-        }
-      }
-
-
-      /**
-       *  @param primitive ...
-       *  @param pos       ...
-       */
-      def genPrimitive(primitive: Primitive, pos: Position) {
-        primitive match {
-          case Negation(kind) =>
-            if(kind.isIntSizedType) { jcode.emitINEG() }
-            else {
-              kind match {
-                case LONG   => jcode.emitLNEG()
-                case FLOAT  => jcode.emitFNEG()
-                case DOUBLE => jcode.emitDNEG()
-                case _ => abort("Impossible to negate a " + kind)
-              }
-            }
-
-          case Arithmetic(op, kind) =>
-            op match {
-              case ADD =>
-                if(kind.isIntSizedType) { jcode.emitIADD() }
-                else {
-                  (kind: @unchecked) match {
-                    case LONG   => jcode.emitLADD()
-                    case FLOAT  => jcode.emitFADD()
-                    case DOUBLE => jcode.emitDADD()
-                  }
-                }
-
-              case SUB =>
-                if(kind.isIntSizedType) { jcode.emitISUB() }
-                else {
-                  (kind: @unchecked) match {
-                    case LONG   => jcode.emitLSUB()
-                    case FLOAT  => jcode.emitFSUB()
-                    case DOUBLE => jcode.emitDSUB()
-                  }
-                }
-
-              case MUL =>
-                if(kind.isIntSizedType) { jcode.emitIMUL() }
-                else {
-                  (kind: @unchecked) match {
-                    case LONG   => jcode.emitLMUL()
-                    case FLOAT  => jcode.emitFMUL()
-                    case DOUBLE => jcode.emitDMUL()
-                  }
-                }
-
-              case DIV =>
-                if(kind.isIntSizedType) { jcode.emitIDIV() }
-                else {
-                  (kind: @unchecked) match {
-                    case LONG   => jcode.emitLDIV()
-                    case FLOAT  => jcode.emitFDIV()
-                    case DOUBLE => jcode.emitDDIV()
-                  }
-                }
-
-              case REM =>
-                if(kind.isIntSizedType) { jcode.emitIREM() }
-                else {
-                  (kind: @unchecked) match {
-                    case LONG   => jcode.emitLREM()
-                    case FLOAT  => jcode.emitFREM()
-                    case DOUBLE => jcode.emitDREM()
-                  }
-                }
-
-              case NOT =>
-                if(kind.isIntSizedType) {
-                  jcode.emitPUSH(-1)
-                  jcode.emitIXOR()
-                } else if(kind == LONG) {
-                  jcode.emitPUSH(-1l)
-                  jcode.emitLXOR()
-                } else {
-                  abort("Impossible to negate an " + kind)
-                }
-
-              case _ =>
-                abort("Unknown arithmetic primitive " + primitive)
-            }
-
-          case Logical(op, kind) => ((op, kind): @unchecked) match {
-            case (AND, LONG) => jcode.emitLAND()
-            case (AND, INT)  => jcode.emitIAND()
-            case (AND, _)    =>
-              jcode.emitIAND()
-              if (kind != BOOL)
-                jcode.emitT2T(javaType(INT), javaType(kind));
-
-            case (OR, LONG) => jcode.emitLOR()
-            case (OR, INT)  => jcode.emitIOR()
-            case (OR, _) =>
-              jcode.emitIOR()
-              if (kind != BOOL)
-                jcode.emitT2T(javaType(INT), javaType(kind));
-
-            case (XOR, LONG) => jcode.emitLXOR()
-            case (XOR, INT)  => jcode.emitIXOR()
-            case (XOR, _) =>
-              jcode.emitIXOR()
-              if (kind != BOOL)
-                jcode.emitT2T(javaType(INT), javaType(kind));
-          }
-
-          case Shift(op, kind) => ((op, kind): @unchecked) match {
-            case (LSL, LONG) => jcode.emitLSHL()
-            case (LSL, INT)  => jcode.emitISHL()
-            case (LSL, _) =>
-              jcode.emitISHL()
-              jcode.emitT2T(javaType(INT), javaType(kind))
-
-            case (ASR, LONG) => jcode.emitLSHR()
-            case (ASR, INT)  => jcode.emitISHR()
-            case (ASR, _) =>
-              jcode.emitISHR()
-              jcode.emitT2T(javaType(INT), javaType(kind))
-
-            case (LSR, LONG) => jcode.emitLUSHR()
-            case (LSR, INT)  => jcode.emitIUSHR()
-            case (LSR, _) =>
-              jcode.emitIUSHR()
-              jcode.emitT2T(javaType(INT), javaType(kind))
-          }
-
-          case Comparison(op, kind) => ((op, kind): @unchecked) match {
-            case (CMP, LONG)    => jcode.emitLCMP()
-            case (CMPL, FLOAT)  => jcode.emitFCMPL()
-            case (CMPG, FLOAT)  => jcode.emitFCMPG()
-            case (CMPL, DOUBLE) => jcode.emitDCMPL()
-            case (CMPG, DOUBLE) => jcode.emitDCMPL()
-          }
-
-          case Conversion(src, dst) =>
-            debuglog("Converting from: " + src + " to: " + dst)
-            if (dst == BOOL) {
-              println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
-            } else
-              jcode.emitT2T(javaType(src), javaType(dst))
-
-          case ArrayLength(_) =>
-            jcode.emitARRAYLENGTH()
-
-          case StartConcat =>
-            jcode emitNEW StringBuilderClassName
-            jcode.emitDUP()
-            jcode.emitINVOKESPECIAL(StringBuilderClassName,
-                                    JMethod.INSTANCE_CONSTRUCTOR_NAME,
-                                    JMethodType.ARGLESS_VOID_FUNCTION)
-
-          case StringConcat(el) =>
-            val jtype = el match {
-              case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
-              case _ => javaType(el)
-            }
-            jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
-                                    "append",
-                                    new JMethodType(StringBuilderType,
-                                    Array(jtype)))
-          case EndConcat =>
-            jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
-                                    "toString",
-                                    toStringType)
-
-          case _ =>
-            abort("Unimplemented primitive " + primitive)
-        }
-      }
-
-      // genCode starts here
-      genBlocks(linearization)
-
-      if (this.method.exh != Nil)
-        genExceptionHandlers;
-    }
-
-
-    /** Emit a Local variable table for debugging purposes.
-     *  Synthetic locals are skipped. All variables are method-scoped.
-     */
-    private def genLocalVariableTable(m: IMethod, jcode: JCode) {
-      val vars = m.locals filterNot (_.sym.isSynthetic)
-      if (vars.isEmpty) return
-
-      val pool = jclass.getConstantPool
-      val pc = jcode.getPC()
-      var anonCounter = 0
-      var entries = 0
-      vars.foreach { lv =>
-        lv.ranges = mergeEntries(lv.ranges.reverse);
-        entries += lv.ranges.length
-      }
-      if (!jmethod.isStatic()) entries += 1
-
-      val lvTab = ByteBuffer.allocate(2 + 10 * entries)
-      def emitEntry(name: String, signature: String, idx: Short, start: Short, end: Short) {
-        lvTab putShort start
-        lvTab putShort end
-        lvTab putShort pool.addUtf8(name).toShort
-        lvTab putShort pool.addUtf8(signature).toShort
-        lvTab putShort idx
-      }
-
-      lvTab.putShort(entries.toShort)
-
-      if (!jmethod.isStatic()) {
-        emitEntry("this", jclass.getType().getSignature(), 0, 0.toShort, pc.toShort)
-      }
-
-      for (lv <- vars) {
-        val name = if (javaName(lv.sym) eq null) {
-          anonCounter += 1
-          "<anon" + anonCounter + ">"
-        } else javaName(lv.sym)
-
-        val index = indexOf(lv).toShort
-        val tpe   = javaType(lv.kind).getSignature()
-        for ((start, end) <- lv.ranges) {
-          emitEntry(name, tpe, index, start.toShort, (end - start).toShort)
-        }
-      }
-      val attr =
-        fjbgContext.JOtherAttribute(jclass,
-                                    jcode,
-                                    tpnme.LocalVariableTableATTR.toString,
-                                    lvTab.array())
-      jcode addAttribute attr
-    }
-
-
-    /** For each basic block, the first PC address following it. */
-    val endPC = new mutable.HashMap[BasicBlock, Int]
-
-    ////////////////////// local vars ///////////////////////
-
-    def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
-    def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
-
-    def indexOf(m: IMethod, sym: Symbol): Int = {
-      val Some(local) = m lookupLocal sym
-      indexOf(local)
-    }
-
-    def indexOf(local: Local): Int = {
-      assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
-      local.index
-    }
-
-    /**
-     * Compute the indexes of each local variable of the given
-     * method. *Does not assume the parameters come first!*
-     */
-    def computeLocalVarsIndex(m: IMethod) {
-      var idx = if (m.symbol.isStaticMember) 0 else 1;
-
-      for (l <- m.params) {
-        debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
-        l.index = idx
-        idx += sizeOf(l.kind)
-      }
-
-      for (l <- m.locals if !(m.params contains l)) {
-        debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
-        l.index = idx
-        idx += sizeOf(l.kind)
-      }
-    }
-
-    ////////////////////// Utilities ////////////////////////
-
-    /** Merge adjacent ranges. */
-    private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
-      (ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
-        case (Nil, _) => List(p)
-        case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest
-        case _ => p :: collapsed
-      }}).reverse
-  }
-
-  private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
-  /**
-   * Return the Java modifiers for the given symbol.
-   * Java modifiers for classes:
-   *  - public, abstract, final, strictfp (not used)
-   * for interfaces:
-   *  - the same as for classes, without 'final'
-   * for fields:
-   *  - public, private (*)
-   *  - static, final
-   * for methods:
-   *  - the same as for fields, plus:
-   *  - abstract, synchronized (not used), strictfp (not used), native (not used)
-   *
-   *  (*) protected cannot be used, since inner classes 'see' protected members,
-   *      and they would fail verification after lifted.
-   */
-  def javaFlags(sym: Symbol): Int = {
-    // constructors of module classes should be private
-    // PP: why are they only being marked private at this stage and not earlier?
-    val privateFlag =
-      sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
-    // Final: the only fields which can receive ACC_FINAL are eager vals.
-    // Neither vars nor lazy vals can, because:
-    //
-    // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
-    // "Another problem is that the specification allows aggressive
-    // optimization of final fields. Within a thread, it is permissible to
-    // reorder reads of a final field with those modifications of a final
-    // field that do not take place in the constructor."
-    //
-    // A var or lazy val which is marked final still has meaning to the
-    // scala compiler.  The word final is heavily overloaded unfortunately;
-    // for us it means "not overridable".  At present you can't override
-    // vars regardless; this may change.
-    //
-    // The logic does not check .isFinal (which checks flags for the FINAL flag,
-    // and includes symbols marked lateFINAL) instead inspecting rawflags so
-    // we can exclude lateFINAL.  Such symbols are eligible for inlining, but to
-    // avoid breaking proxy software which depends on subclassing, we do not
-    // emit ACC_FINAL.
-    // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
-    val finalFlag = (
-         (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
-      && !sym.enclClass.isInterface
-      && !sym.isClassConstructor
-      && !sym.isMutable   // lazy vals and vars both
-    )
-
-    // Primitives are "abstract final" to prohibit instantiation
-    // without having to provide any implementations, but that is an
-    // illegal combination of modifiers at the bytecode level so
-    // suppress final if abstract if present.
-    mkFlags(
-      if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
-      if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
-      if (sym.isInterface) ACC_INTERFACE else 0,
-      if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
-      if (sym.isStaticMember) ACC_STATIC else 0,
-      if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
-      if (sym.isArtifact) ACC_SYNTHETIC else 0,
-      if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
-      if (sym.isVarargsMethod) ACC_VARARGS else 0,
-      if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
-    )
-  }
-  def javaFieldFlags(sym: Symbol) = (
-    javaFlags(sym) | mkFlags(
-      if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0,
-      if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0,
-      if (sym.isMutable) 0 else ACC_FINAL
-    )
-  )
-
-  def isTopLevelModule(sym: Symbol): Boolean =
-    afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
-
-  def isStaticModule(sym: Symbol): Boolean = {
-    sym.isModuleClass && !sym.isImplClass && !sym.isLifted
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 540935f..01c4ff5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -5,38 +5,23 @@
 
 package scala.tools.nsc
 package backend.jvm
-import scala.tools.nsc.io.AbstractFile
 import scala.tools.nsc.symtab._
 
-/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
-  * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
-  * more here, but for now I'm starting with the refactorings that are either
-  * straightforward to review or necessary for maintenance.
-  */
+/** Code shared between the erstwhile legacy backend (aka GenJVM)
+ *  and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ *  more here, but for now I'm starting with the refactorings that are either
+ *  straightforward to review or necessary for maintenance.
+ */
 trait GenJVMASM {
   val global: Global
   import global._
   import icodes._
   import definitions._
 
-  protected def outputDirectory(sym: Symbol): AbstractFile =
-    settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
-
-  protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
-    var dir = base
-    val pathParts = clsName.split("[./]").toList
-    for (part <- pathParts.init) {
-      dir = dir.subdirectoryNamed(part)
-    }
-    dir.fileNamed(pathParts.last + suffix)
-  }
-  protected def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
-    getFile(outputDirectory(sym), clsName, suffix)
-
-  protected val ExcludedForwarderFlags = {
+  val ExcludedForwarderFlags = {
     import Flags._
     // Should include DEFERRED but this breaks findMember.
-    ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
+    ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
   }
 
   protected def isJavaEntryPoint(icls: IClass) = {
@@ -65,9 +50,8 @@ trait GenJVMASM {
     // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
     hasApproximate && {
       // Before erasure so we can identify generic mains.
-      beforeErasure {
+      enteringErasure {
         val companion     = sym.linkedClassOfClass
-        val companionMain = companion.tpe.member(nme.main)
 
         if (hasJavaMainMethod(companion))
           failNoForwarder("companion contains its own main method")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
deleted file mode 100644
index e002a61..0000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import scala.collection.{ mutable, immutable }
-import ch.epfl.lamp.fjbg._
-
-trait GenJVMUtil {
-  self: GenJVM =>
-
-  import global._
-  import icodes._
-  import icodes.opcodes._
-  import definitions._
-
-  /** Map from type kinds to the Java reference types. It is used for
-   *  loading class constants. @see Predef.classOf.
-   */
-  val classLiteral = immutable.Map[TypeKind, JObjectType](
-    UNIT   -> new JObjectType("java.lang.Void"),
-    BOOL   -> new JObjectType("java.lang.Boolean"),
-    BYTE   -> new JObjectType("java.lang.Byte"),
-    SHORT  -> new JObjectType("java.lang.Short"),
-    CHAR   -> new JObjectType("java.lang.Character"),
-    INT    -> new JObjectType("java.lang.Integer"),
-    LONG   -> new JObjectType("java.lang.Long"),
-    FLOAT  -> new JObjectType("java.lang.Float"),
-    DOUBLE -> new JObjectType("java.lang.Double")
-  )
-
-  // Don't put this in per run caches.
-  private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
-    NothingClass        -> binarynme.RuntimeNothing,
-    RuntimeNothingClass -> binarynme.RuntimeNothing,
-    NullClass           -> binarynme.RuntimeNull,
-    RuntimeNullClass    -> binarynme.RuntimeNull
-  )
-
-  /** This trait may be used by tools who need access to
-   *  utility methods like javaName and javaType. (for instance,
-   *  the Eclipse plugin uses it).
-   */
-  trait BytecodeUtil {
-
-    val conds = immutable.Map[TestOp, Int](
-      EQ -> JExtendedCode.COND_EQ,
-      NE -> JExtendedCode.COND_NE,
-      LT -> JExtendedCode.COND_LT,
-      GT -> JExtendedCode.COND_GT,
-      LE -> JExtendedCode.COND_LE,
-      GE -> JExtendedCode.COND_GE
-    )
-
-    /** Specialized array conversion to prevent calling
-     *  java.lang.reflect.Array.newInstance via TraversableOnce.toArray
-     */
-
-    def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
-    def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
-    /** Return the a name of this symbol that can be used on the Java
-     *  platform.  It removes spaces from names.
-     *
-     *  Special handling:
-     *    scala.Nothing erases to scala.runtime.Nothing$
-     *       scala.Null erases to scala.runtime.Null$
-     *
-     *  This is needed because they are not real classes, and they mean
-     *  'abrupt termination upon evaluation of that expression' or null respectively.
-     *  This handling is done already in GenICode, but here we need to remove
-     *  references from method signatures to these types, because such classes can
-     *  not exist in the classpath: the type checker will be very confused.
-     */
-    def javaName(sym: Symbol): String =
-      javaNameCache.getOrElseUpdate(sym, {
-        if (sym.isClass || (sym.isModule && !sym.isMethod))
-          sym.javaBinaryName
-        else
-          sym.javaSimpleName
-      }).toString
-
-    def javaType(t: TypeKind): JType = (t: @unchecked) match {
-      case UNIT            => JType.VOID
-      case BOOL            => JType.BOOLEAN
-      case BYTE            => JType.BYTE
-      case SHORT           => JType.SHORT
-      case CHAR            => JType.CHAR
-      case INT             => JType.INT
-      case LONG            => JType.LONG
-      case FLOAT           => JType.FLOAT
-      case DOUBLE          => JType.DOUBLE
-      case REFERENCE(cls)  => new JObjectType(javaName(cls))
-      case ARRAY(elem)     => new JArrayType(javaType(elem))
-    }
-
-    def javaType(t: Type): JType = javaType(toTypeKind(t))
-
-    def javaType(s: Symbol): JType =
-      if (s.isMethod)
-        new JMethodType(
-          if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
-          mkArray(s.tpe.paramTypes map javaType)
-        )
-      else
-        javaType(s.tpe)
-
-    protected def genConstant(jcode: JExtendedCode, const: Constant) {
-      const.tag match {
-        case UnitTag    => ()
-        case BooleanTag => jcode emitPUSH const.booleanValue
-        case ByteTag    => jcode emitPUSH const.byteValue
-        case ShortTag   => jcode emitPUSH const.shortValue
-        case CharTag    => jcode emitPUSH const.charValue
-        case IntTag     => jcode emitPUSH const.intValue
-        case LongTag    => jcode emitPUSH const.longValue
-        case FloatTag   => jcode emitPUSH const.floatValue
-        case DoubleTag  => jcode emitPUSH const.doubleValue
-        case StringTag  => jcode emitPUSH const.stringValue
-        case NullTag    => jcode.emitACONST_NULL()
-        case ClazzTag   =>
-          val kind = toTypeKind(const.typeValue)
-          val toPush =
-            if (kind.isValueType) classLiteral(kind)
-            else javaType(kind).asInstanceOf[JReferenceType]
-
-          jcode emitPUSH toPush
-
-        case EnumTag   =>
-          val sym = const.symbolValue
-          jcode.emitGETSTATIC(javaName(sym.owner),
-                              javaName(sym),
-                              javaType(sym.tpe.underlying))
-        case _         =>
-          abort("Unknown constant value: " + const)
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
deleted file mode 100644
index aaffaa8..0000000
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ /dev/null
@@ -1,2358 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Nikolay Mihaylov
- */
-
-
-package scala.tools.nsc
-package backend.msil
-
-import java.io.{File, IOException}
-import java.nio.{ByteBuffer, ByteOrder}
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.symtab._
-
-import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
-import ch.epfl.lamp.compiler.msil.emit._
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import scala.language.postfixOps
-
-abstract class GenMSIL extends SubComponent {
-  import global._
-  import loaders.clrTypes
-  import clrTypes.{types, constructors, methods, fields}
-  import icodes._
-  import icodes.opcodes._
-
-  val x = loaders
-
-  /** Create a new phase */
-  override def newPhase(p: Phase) = new MsilPhase(p)
-
-  val phaseName = "msil"
-  /** MSIL code generation phase
-   */
-  class MsilPhase(prev: Phase) extends GlobalPhase(prev) {
-    def name = phaseName
-    override def newFlags = phaseNewFlags
-
-    override def erasedTypes = true
-
-    override def run() {
-      if (settings.debug.value) inform("[running phase " + name + " on icode]")
-
-      val codeGenerator = new BytecodeGenerator
-
-      //classes is ICodes.classes, a HashMap[Symbol, IClass]
-      classes.values foreach codeGenerator.findEntryPoint
-      if( opt.showClass.isDefined && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead
-        val entryclass = opt.showClass.get.toString
-        warning("Couldn't find entry class " + entryclass)
-      }
-
-      codeGenerator.initAssembly
-
-      val classesSorted = classes.values.toList.sortBy(c => c.symbol.id) // simplifies comparing cross-compiler vs. .exe output
-      classesSorted foreach codeGenerator.createTypeBuilder
-      classesSorted foreach codeGenerator.createClassMembers
-
-      try {
-        classesSorted foreach codeGenerator.genClass
-      } finally {
-        codeGenerator.writeAssembly
-      }
-    }
-
-    override def apply(unit: CompilationUnit) {
-      abort("MSIL works on icode classes, not on compilation units!")
-    }
-  }
-
-  /**
-   * MSIL bytecode generator.
-   *
-   */
-  class BytecodeGenerator {
-
-    val MODULE_INSTANCE_NAME = "MODULE$"
-
-    import clrTypes.{VOID => MVOID, BOOLEAN => MBOOL, BYTE => MBYTE, SHORT => MSHORT,
-                   CHAR => MCHAR, INT => MINT, LONG => MLONG, FLOAT => MFLOAT,
-                   DOUBLE => MDOUBLE, OBJECT => MOBJECT, STRING => MSTRING,
-                   STRING_ARRAY => MSTRING_ARRAY,
-                   SYMTAB_CONSTR => SYMTAB_ATTRIBUTE_CONSTRUCTOR,
-                   SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR}
-
-    val EXCEPTION = clrTypes.getType("System.Exception")
-    val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE)
-
-    val ICLONEABLE = clrTypes.getType("System.ICloneable")
-    val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes)
-
-    val MMONITOR       = clrTypes.getType("System.Threading.Monitor")
-    val MMONITOR_ENTER = MMONITOR.GetMethod("Enter", Array(MOBJECT))
-    val MMONITOR_EXIT  = MMONITOR.GetMethod("Exit", Array(MOBJECT))
-
-    val MSTRING_BUILDER = clrTypes.getType("System.Text.StringBuilder")
-    val MSTRING_BUILDER_CONSTR = MSTRING_BUILDER.GetConstructor(MsilType.EmptyTypes)
-    val MSTRING_BUILDER_TOSTRING = MSTRING_BUILDER.GetMethod("ToString",
-                                                             MsilType.EmptyTypes)
-
-    val TYPE_FROM_HANDLE =
-      clrTypes.getType("System.Type").GetMethod("GetTypeFromHandle", Array(clrTypes.getType("System.RuntimeTypeHandle")))
-
-    val INT_PTR = clrTypes.getType("System.IntPtr")
-
-    val JOBJECT = definitions.ObjectClass
-    val JSTRING = definitions.StringClass
-
-    val SystemConvert = clrTypes.getType("System.Convert")
-
-    val objParam = Array(MOBJECT)
-
-    val toBool:   MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox
-    val toSByte:  MethodInfo = SystemConvert.GetMethod("ToSByte",   objParam)
-    val toShort:  MethodInfo = SystemConvert.GetMethod("ToInt16",   objParam)
-    val toChar:   MethodInfo = SystemConvert.GetMethod("ToChar",    objParam)
-    val toInt:    MethodInfo = SystemConvert.GetMethod("ToInt32",   objParam)
-    val toLong:   MethodInfo = SystemConvert.GetMethod("ToInt64",   objParam)
-    val toFloat:  MethodInfo = SystemConvert.GetMethod("ToSingle",  objParam)
-    val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble",  objParam)
-
-    //val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT")
-    val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT)
-
-    // Scala attributes
-    // symtab.Definitions -> object (singleton..)
-    val SerializableAttr = definitions.SerializableAttr.tpe
-    val CloneableAttr    = definitions.CloneableAttr.tpe
-    val TransientAtt     = definitions.TransientAttr.tpe
-    // remoting: the architectures are too different, no mapping (no portable code
-    // possible)
-
-    // java instance methods that are mapped to static methods in .net
-    // these will need to be called with OpCodes.Call (not Callvirt)
-    val dynToStatMapped = mutable.HashSet[Symbol]()
-
-    initMappings()
-
-    /** Create the mappings between java and .net classes and methods */
-    private def initMappings() {
-      mapType(definitions.AnyClass, MOBJECT)
-      mapType(definitions.AnyRefClass, MOBJECT)
-      //mapType(definitions.NullClass, clrTypes.getType("scala.AllRef$"))
-      //mapType(definitions.NothingClass, clrTypes.getType("scala.All$"))
-      // FIXME: for some reason the upper two lines map to null
-      mapType(definitions.NullClass, EXCEPTION)
-      mapType(definitions.NothingClass, EXCEPTION)
-
-      mapType(definitions.BooleanClass, MBOOL)
-      mapType(definitions.ByteClass, MBYTE)
-      mapType(definitions.ShortClass, MSHORT)
-      mapType(definitions.CharClass, MCHAR)
-      mapType(definitions.IntClass, MINT)
-      mapType(definitions.LongClass, MLONG)
-      mapType(definitions.FloatClass, MFLOAT)
-      mapType(definitions.DoubleClass, MDOUBLE)
-    }
-
-    var clasz: IClass = _
-    var method: IMethod = _
-
-    var massembly: AssemblyBuilder = _
-    var mmodule: ModuleBuilder = _
-    var mcode: ILGenerator = _
-
-    var assemName: String = _
-    var firstSourceName = ""
-    var outDir: File = _
-    var srcPath: File = _
-    var moduleName: String = _
-
-    def initAssembly() {
-
-      assemName = settings.assemname.value
-
-      if (assemName == "") {
-        if (entryPoint != null) {
-          assemName = msilName(entryPoint.enclClass)
-          // remove the $ at the end (from module-name)
-          assemName = assemName.substring(0, assemName.length() - 1)
-        } else {
-          // assuming filename of first source file
-          assert(firstSourceName.endsWith(".scala"), firstSourceName)
-          assemName = firstSourceName.substring(0, firstSourceName.length() - 6)
-        }
-      } else {
-        if (assemName.endsWith(".msil"))
-          assemName = assemName.substring(0, assemName.length()-5)
-        if (assemName.endsWith(".il"))
-          assemName = assemName.substring(0, assemName.length()-3)
-        val f: File = new File(assemName)
-        assemName = f.getName()
-      }
-
-      outDir = new File(settings.outdir.value)
-
-      srcPath = new File(settings.sourcedir.value)
-
-      val assemblyName = new AssemblyName()
-      assemblyName.Name = assemName
-      massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
-
-      moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
-      // filename here: .dll or .exe (in both parameters), second: give absolute-path
-      mmodule = massembly.DefineDynamicModule(moduleName,
-                                              new File(outDir, moduleName).getAbsolutePath())
-      assert (mmodule != null)
-    }
-
-
-    /**
-     * Form of the custom Attribute parameter (Ecma-335.pdf)
-     *      - p. 163 for CustomAttrib Form,
-     *      - p. 164 for FixedArg Form (Array and Element) (if array or not is known!)
-     *  !! least significant byte first if values longer than one byte !!
-     *
-     * 1: Prolog (unsigned int16, value 0x0001) -> symtab[0] = 0x01, symtab[1] = 0x00
-     * 2: FixedArgs (directly the data, get number and types from related constructor)
-     *  2.1: length of the array (unsigned int32, 4 bytes, least significant first)
-     *  2.2: the byte array data
-     * 3: NumNamed (unsigned int16, number of named fields and properties, 0x0000)
-     */
-    def addSymtabAttribute(sym: Symbol, tBuilder: TypeBuilder) {
-      def addMarker() {
-        val markerSymtab = new Array[Byte](4)
-        markerSymtab(0) = 1.toByte
-        tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR, markerSymtab)
-      }
-
-      // both conditions are needed (why exactly..?)
-      if (tBuilder.Name.endsWith("$") || sym.isModuleClass) {
-        addMarker()
-      } else {
-        currentRun.symData.get(sym) match {
-          case Some(pickle) =>
-            var size = pickle.writeIndex
-            val symtab = new Array[Byte](size + 8)
-            symtab(0) = 1.toByte
-            for (i <- 2 until 6) {
-              symtab(i) = (size & 0xff).toByte
-              size = size >> 8
-            }
-            java.lang.System.arraycopy(pickle.bytes, 0, symtab, 6, pickle.writeIndex)
-
-            tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
-
-            currentRun.symData -= sym
-            currentRun.symData -= sym.companionSymbol
-
-          case _ =>
-            addMarker()
-        }
-      }
-    }
-
-    /**
-     * Mutates `member` adding CLR attributes (if any) based on sym.annotations.
-     * Please notice that CLR custom modifiers are a different beast (see customModifiers below)
-     * and thus shouldn't be added by this method.
-     */
-    def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) {
-      val attributes = annotations.map(_.atp.typeSymbol).collect {
-        case definitions.TransientAttr => null // TODO this is just an example
-      }
-      return // TODO: implement at some point
-    }
-
-    /**
-     * What's a CLR custom modifier? Intro available as source comments in compiler.msil.CustomModifier.
-     * It's basically a marker associated with a location (think of FieldInfo, ParameterInfo, and PropertyInfo)
-     * and thus that marker (be it optional or required) becomes part of the signature of that location.
-     * Some annotations will become CLR attributes (see addAttributes above), others custom modifiers (this method).
-     */
-    def customModifiers(annotations: List[AnnotationInfo]): Array[CustomModifier] = {
-      annotations.map(_.atp.typeSymbol).collect {
-        case definitions.VolatileAttr  => new CustomModifier(true, CustomModifier.VolatileMarker)
-      } toArray
-    }
-
-
-
-    /*
-      debuglog("creating annotations: " + annotations + " for member : " + member)
-      for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ;
-           if annot.isConstant)
-           //!typ.typeSymbol.isJavaDefined
-      {
-//        assert(consts.length <= 1,
-//               "too many constant arguments for annotations; "+consts.toString())
-
-        // Problem / TODO having the symbol of the annotations type would be nicer
-        // (i hope that type.typeSymbol is the same as the one in types2create)
-        // AND: this will crash if the annotations Type is already compiled (-> not a typeBuilder)
-        // when this is solved, types2create will be the same as icodes.classes, thus superfluous
-        val annType: TypeBuilder = getType(typ.typeSymbol).asInstanceOf[TypeBuilder]
-//        val annType: MsilType = getType(typ.typeSymbol)
-
-        // Problem / TODO: i have no idea which constructor is used. This
-        // information should be available in AnnotationInfo.
-        annType.CreateType() // else, GetConstructors can't be used
-        val constr: ConstructorInfo = annType.GetConstructors()(0)
-        // prevent a second call of CreateType, only needed because there's no
-        // other way than GetConstructors()(0) to get the constructor, if there's
-        // no constructor symbol available.
-
-        val args: Array[Byte] =
-          getAttributeArgs(
-            annArgs map (_.constant.get),
-            (for((n,v) <- nvPairs) yield (n, v.constant.get)))
-        member.SetCustomAttribute(constr, args)
-      }
-    } */
-
-/*    def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = {
-      val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough!
-      buf.order(ByteOrder.LITTLE_ENDIAN)
-      buf.putShort(1.toShort) // signature
-
-      def emitSerString(str: String) = {
-          // this is wrong, it has to be the length of the UTF-8 byte array, which
-          // may be longer (see clr-book on page 302)
-//          val length: Int = str.length
-            val strBytes: Array[Byte] = try {
-              str.getBytes("UTF-8")
-            } catch {
-              case _: Error => abort("could not get byte-array for string: " + str)
-            }
-            val length: Int = strBytes.length //this length is stored big-endian
-            if (length < 128)
-              buf.put(length.toByte)
-            else if (length < (1<<14)) {
-              buf.put(((length >> 8) | 0x80).toByte) // the bits 14 and 15 of length are '0'
-              buf.put((length | 0xff).toByte)
-            } else if (length < (1 << 29)) {
-              buf.put(((length >> 24) | 0xc0).toByte)
-              buf.put(((length >> 16) & 0xff).toByte)
-              buf.put(((length >>  8) & 0xff).toByte)
-              buf.put(((length      ) & 0xff).toByte)
-            } else
-              abort("string too long for attribute parameter: " + length)
-            buf.put(strBytes)
-      }
-
-      def emitConst(const: Constant): Unit = const.tag match {
-        case BooleanTag => buf.put((if (const.booleanValue) 1 else 0).toByte)
-        case ByteTag => buf.put(const.byteValue)
-        case ShortTag => buf.putShort(const.shortValue)
-        case CharTag => buf.putChar(const.charValue)
-        case IntTag => buf.putInt(const.intValue)
-        case LongTag => buf.putLong(const.longValue)
-        case FloatTag => buf.putFloat(const.floatValue)
-        case DoubleTag => buf.putDouble(const.doubleValue)
-        case StringTag =>
-          val str: String = const.stringValue
-          if (str == null) {
-            buf.put(0xff.toByte)
-          } else {
-            emitSerString(str)
-          }
-        case ArrayTag =>
-          val arr: Array[Constant] = const.arrayValue
-          if (arr == null) {
-            buf.putInt(0xffffffff)
-          } else {
-            buf.putInt(arr.length)
-            arr.foreach(emitConst)
-          }
-
-        // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ???
-
-        case _ => abort("could not handle attribute argument: " + const)
-      }
-
-      consts foreach emitConst
-      buf.putShort(nvPairs.length.toShort)
-      def emitNamedArg(nvPair: (Name, Constant)) {
-        // the named argument is a property of the attribute (it can't be a field, since
-        //  all fields in scala are private)
-        buf.put(0x54.toByte)
-
-        def emitType(c: Constant) = c.tag match { // type of the constant, Ecma-335.pdf, page 151
-          case BooleanTag => buf.put(0x02.toByte)
-          case ByteTag =>    buf.put(0x05.toByte)
-          case ShortTag =>   buf.put(0x06.toByte)
-          case CharTag =>    buf.put(0x07.toByte)
-          case IntTag =>     buf.put(0x08.toByte)
-          case LongTag =>    buf.put(0x0a.toByte)
-          case FloatTag =>   buf.put(0x0c.toByte)
-          case DoubleTag =>  buf.put(0x0d.toByte)
-          case StringTag =>  buf.put(0x0e.toByte)
-
-          // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ???
-
-          // ArrayTag falls in here
-          case _ => abort("could not handle attribute argument: " + c)
-        }
-
-        val cnst: Constant = nvPair._2
-        if (cnst.tag == ArrayTag) {
-          buf.put(0x1d.toByte)
-          emitType(cnst.arrayValue(0)) // FIXME: will crash if array length = 0
-        } else if (cnst.tag == EnumTag) {
-          buf.put(0x55.toByte)
-          // TODO: put a SerString (don't know what exactly, names of the enums somehow..)
-              } else {
-          buf.put(0x51.toByte)
-          emitType(cnst)
-        }
-
-        emitSerString(nvPair._1.toString)
-        emitConst(nvPair._2)
-      }
-
-      val length = buf.position()
-      buf.array().slice(0, length)
-    } */
-
-    def writeAssembly() {
-      if (entryPoint != null) {
-        assert(entryPoint.enclClass.isModuleClass, entryPoint.enclClass)
-        val mainMethod = methods(entryPoint)
-        val stringArrayTypes: Array[MsilType] = Array(MSTRING_ARRAY)
-        val globalMain = mmodule.DefineGlobalMethod(
-          "Main", MethodAttributes.Public | MethodAttributes.Static,
-          MVOID, stringArrayTypes)
-        globalMain.DefineParameter(0, ParameterAttributes.None, "args")
-        massembly.SetEntryPoint(globalMain)
-        val code = globalMain.GetILGenerator()
-        val moduleField = getModuleInstanceField(entryPoint.enclClass)
-        code.Emit(OpCodes.Ldsfld, moduleField)
-        code.Emit(OpCodes.Ldarg_0)
-        code.Emit(OpCodes.Callvirt, mainMethod)
-        code.Emit(OpCodes.Ret)
-      }
-      createTypes()
-      var outDirName: String = null
-      try {
-        if (settings.Ygenjavap.isDefault) { // we reuse the JVM-sounding setting because it's conceptually similar
-          outDirName = outDir.getPath()
-          massembly.Save(outDirName + "\\" + assemName + ".msil") /* use SingleFileILPrinterVisitor */
-        } else {
-          outDirName = srcPath.getPath()
-          massembly.Save(settings.Ygenjavap.value, outDirName)  /* use MultipleFilesILPrinterVisitor */
-        }
-      } catch {
-        case e:IOException => abort("Could not write to " + outDirName + ": " + e.getMessage())
-      }
-    }
-
-    private def createTypes() {
-      for (sym <- classes.keys) {
-        val iclass   = classes(sym)
-        val tBuilder = types(sym).asInstanceOf[TypeBuilder]
-
-        debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString)
-
-        tBuilder.CreateType()
-        tBuilder.setSourceFilepath(iclass.cunit.source.file.path)
-      }
-    }
-
-    private[GenMSIL] def ilasmFileName(iclass: IClass) : String = {
-      // method.sourceFile contains just the filename
-      iclass.cunit.source.file.toString.replace("\\", "\\\\")
-    }
-
-    private[GenMSIL] def genClass(iclass: IClass) {
-      val sym = iclass.symbol
-      debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
-      clasz = iclass
-
-      val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-      if (isCloneable(sym)) {
-        // FIXME: why there's no nme.clone_ ?
-        // "Clone": if the code is non-portable, "Clone" is defined, not "clone"
-        // TODO: improve condition (should override AnyRef.clone)
-        if (iclass.methods.forall(m => {
-          !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") &&
-            m.symbol.tpe.paramTypes.length != 0)
-        })) {
-          debuglog("auto-generating cloneable method for " + sym)
-          val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual |
-                              MethodAttributes.HideBySig).toShort
-          val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT,
-                                                  MsilType.EmptyTypes)
-          val clCode = cloneMethod.GetILGenerator()
-          clCode.Emit(OpCodes.Ldarg_0)
-          clCode.Emit(OpCodes.Call, MEMBERWISE_CLONE)
-          clCode.Emit(OpCodes.Ret)
-        }
-      }
-
-      val line = sym.pos.line
-      tBuilder.setPosition(line, ilasmFileName(iclass))
-
-      if (isTopLevelModule(sym)) {
-        if (sym.companionClass == NoSymbol)
-          generateMirrorClass(sym)
-        else
-          log("No mirror class for module with linked class: " +
-              sym.fullName)
-      }
-
-      addSymtabAttribute(sym, tBuilder)
-      addAttributes(tBuilder, sym.annotations)
-
-      if (iclass.symbol != definitions.ArrayClass)
-        iclass.methods foreach genMethod
-
-    } //genClass
-
-
-    private def genMethod(m: IMethod) {
-      debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
-            " owner: " + m.symbol.owner)
-      method = m
-      localBuilders.clear
-      computeLocalVarsIndex(m)
-
-      if (m.symbol.isClassConstructor) {
-        mcode = constructors(m.symbol).asInstanceOf[ConstructorBuilder].GetILGenerator()
-      } else {
-        val mBuilder = methods(m.symbol).asInstanceOf[MethodBuilder]
-        if (!mBuilder.IsAbstract())
-          try {
-            mcode = mBuilder.GetILGenerator()
-          } catch {
-            case e: Exception =>
-              java.lang.System.out.println("m.symbol       = " + Flags.flagsToString(m.symbol.flags) + " " + m.symbol)
-              java.lang.System.out.println("m.symbol.owner = " + Flags.flagsToString(m.symbol.owner.flags) + " " + m.symbol.owner)
-              java.lang.System.out.println("mBuilder       = " + mBuilder)
-              java.lang.System.out.println("mBuilder.DeclaringType = " +
-                                 TypeAttributes.toString(mBuilder.DeclaringType.Attributes) +
-                                 "::" + mBuilder.DeclaringType)
-              throw e
-          }
-          else
-            mcode = null
-      }
-
-      if (mcode != null) {
-        for (local <- m.locals ; if !(m.params contains local)) {
-          debuglog("add local var: " + local + ", of kind " + local.kind)
-          val t: MsilType = msilType(local.kind)
-          val localBuilder = mcode.DeclareLocal(t)
-          localBuilder.SetLocalSymInfo(msilName(local.sym))
-          localBuilders(local) = localBuilder
-        }
-        genCode(m)
-      }
-
-    }
-
-    /** Special linearizer for methods with at least one exception handler. This
-     *  linearizer brings all basic blocks in the right order so that nested
-     *  try-catch and try-finally blocks can be emitted.
-     */
-    val msilLinearizer = new MSILLinearizer()
-
-    val labels = mutable.HashMap[BasicBlock, Label]()
-
-    /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
-     * this scheme relies on the fact that the entry block is emitted first. */
-    var dbFilenameSeen = false
-
-    def genCode(m: IMethod) {
-
-      def makeLabels(blocks: List[BasicBlock]) = {
-        debuglog("Making labels for: " + method)
-        for (bb <- blocks) labels(bb) = mcode.DefineLabel()
-      }
-
-      labels.clear
-
-      var linearization = if(!m.exh.isEmpty) msilLinearizer.linearize(m)
-                          else linearizer.linearize(m)
-
-      if (!m.exh.isEmpty)
-        linearization = computeExceptionMaps(linearization, m)
-
-      makeLabels(linearization)
-
-      // debug val blocksInM = m.code.blocks.toList.sortBy(bb => bb.label)
-      // debug val blocksInL = linearization.sortBy(bb => bb.label)
-      // debug val MButNotL  = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP)
-      // debug if(!MButNotL.isEmpty) { }
-
-      dbFilenameSeen = false
-      genBlocks(linearization)
-
-      // RETURN inside exception blocks are replaced by Leave. The target of the
-      // leave is a `Ret` outside any exception block (generated here).
-      if (handlerReturnMethod == m) {
-        mcode.MarkLabel(handlerReturnLabel)
-        if (handlerReturnKind != UNIT)
-          mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
-        mcode.Emit(OpCodes.Ret)
-      }
-
-      beginExBlock.clear()
-      beginCatchBlock.clear()
-      endExBlock.clear()
-      endFinallyLabels.clear()
-    }
-
-    def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
-      blocks match {
-        case Nil => ()
-        case x :: Nil => genBlock(x, prev = previous, next = null)
-        case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x)
-      }
-    }
-
-    // the try blocks starting at a certain BasicBlock
-    val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
-    // the catch blocks starting / endling at a certain BasicBlock
-    val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]()
-    val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
-    /** When emitting the code (genBlock), the number of currently active try / catch
-     *  blocks. When seeing a `RETURN` inside a try / catch, we need to
-     *   - store the result in a local (if it's not UNIT)
-     *   - emit `Leave handlerReturnLabel` instead of the Return
-     *   - emit code at the end: load the local and return its value
-     */
-    var currentHandlers = new mutable.Stack[ExceptionHandler]
-    // The IMethod the Local/Label/Kind below belong to
-    var handlerReturnMethod: IMethod = _
-    // Stores the result when returning inside an exception block
-    var handlerReturnLocal: LocalBuilder = _
-    // Label for a return instruction outside any exception block
-    var handlerReturnLabel: Label = _
-    // The result kind.
-    var handlerReturnKind: TypeKind = _
-    def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
-      if (handlerReturnMethod != method) {
-        handlerReturnMethod = method
-        if (kind != UNIT) {
-          handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
-          handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
-        }
-        handlerReturnLabel = mcode.DefineLabel()
-        handlerReturnKind = kind
-      }
-      (handlerReturnLocal, handlerReturnLabel)
-    }
-
-    /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
-     *  Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
-     *  So for every finalizer, we have a label which marks the place of the `endfinally`,
-     *  nested try/catch blocks will leave there.
-     */
-    val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]()
-
-    /** Computes which blocks are the beginning / end of a try or catch block */
-    private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
-      val visitedBlocks = new mutable.HashSet[BasicBlock]()
-
-      // handlers which have not been introduced so far
-      var openHandlers = m.exh
-
-
-      /** Example
-       *   try {
-       *     try {
-       *         // *1*
-       *     } catch {
-       *       case h1 =>
-       *     }
-       *   } catch {
-       *     case h2 =>
-       *     case h3 =>
-       *       try {
-       *
-       *       } catch {
-       *         case h4 =>  // *2*
-       *         case h5 =>
-       *       }
-       *   }
-       */
-
-      // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
-      // catch statements). Example *1*: Stack(List(h2, h3), List(h1))
-      val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
-      // Stack of nested catch blocks. The head of the list is the current catch block. The
-      // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
-      val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
-      for (b <- blocks) {
-
-        // are we past the current catch blocks?
-        def endHandlers(): List[ExceptionHandler] = {
-          var res: List[ExceptionHandler] = Nil
-          if (!currentCatchHandlers.isEmpty) {
-            val handler = currentCatchHandlers.top.head
-            if (!handler.blocks.contains(b)) {
-              // all blocks of the handler are either visited, or not part of the linearization (i.e. dead)
-              assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
-                     "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
-                     b.fullString +"\nwhile in catch-part of\n"+ handler)
-
-              val rest = currentCatchHandlers.pop.tail
-              if (rest.isEmpty) {
-                // all catch blocks of that exception handler are covered
-                res = handler :: endHandlers()
-              } else {
-                // there are more catch blocks for that try (handlers covering the same)
-                currentCatchHandlers.push(rest)
-                beginCatchBlock(b) = rest.head
-              }
-            }
-          }
-          res
-        }
-        val end = endHandlers()
-        if (!end.isEmpty) endExBlock(b) = end
-
-        // are we past the current try block?
-        if (!currentTryHandlers.isEmpty) {
-          val handler = currentTryHandlers.top.head
-          if (!handler.covers(b)) {
-            // all of the covered blocks are visited, or not part of the linearization
-            assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
-                   "Bad linearization of basic blocks inside try. Found non-covered block\n"+
-                   b.fullString +"\nwhile in try-part of\n"+ handler)
-
-            assert(handler.startBlock == b,
-                   "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+
-                   b.fullString +"\n"+ handler)
-
-            val handlers = currentTryHandlers.pop
-            currentCatchHandlers.push(handlers)
-            beginCatchBlock(b) = handler
-          }
-        }
-
-        // are there try blocks starting at b?
-        val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b))
-        openHandlers = stillOpen
-
-        val newHandlersBySize = newHandlers.groupBy(_.covered.size)
-        // big handlers first, smaller ones are nested inside the try of the big one
-        // (checked by the assertions below)
-        val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
-
-        val beginHandlers = new mutable.ListBuffer[ExceptionHandler]
-        for (s <- sizes) {
-          val sHandlers = newHandlersBySize(s)
-          for (h <- sHandlers) {
-            assert(h.covered == sHandlers.head.covered,
-                   "bad nesting of exception handlers. same size, but not covering same blocks\n"+
-                   h +"\n"+ sHandlers.head)
-            assert(h.resultKind == sHandlers.head.resultKind,
-                   "bad nesting of exception handlers. same size, but the same resultKind\n"+
-                   h +"\n"+ sHandlers.head)
-          }
-          for (bigger <- beginHandlers; h <- sHandlers) {
-            assert(h.covered.subsetOf(bigger.covered),
-                   "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+
-                   h +"\n"+ bigger)
-            assert(h.blocks.toSet.subsetOf(bigger.covered),
-                   "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+
-                   h +"\n"+ bigger)
-          }
-          beginHandlers += sHandlers.head
-          currentTryHandlers.push(sHandlers)
-        }
-        beginExBlock(b) = beginHandlers.toList
-        visitedBlocks += b
-      }
-
-      // if there handlers left (i.e. handlers covering nothing, or a
-      // non-existent (dead) block), remove their catch-blocks.
-      val liveBlocks = if (openHandlers.isEmpty) blocks else {
-        blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b)))
-      }
-
-      /** There might be open handlers, but no more blocks. happens when try/catch end
-       *  with `throw` or `return`
-       *     def foo() { try { .. throw } catch { _ => .. throw } }
-       *
-       *  In this case we need some code after the catch block for the auto-generated
-       *  `leave` instruction. So we're adding a (dead) `throw new Exception`.
-       */
-      val rest = currentCatchHandlers.map(handlers => {
-        assert(handlers.length == 1, handlers)
-        handlers.head
-      }).toList
-
-      if (rest.isEmpty) {
-        liveBlocks
-      } else {
-        val b = m.code.newBlock
-        b.emit(Seq(
-          NEW(REFERENCE(definitions.ThrowableClass)),
-          DUP(REFERENCE(definitions.ObjectClass)),
-          CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)),
-          THROW(definitions.ThrowableClass)
-        ))
-        b.close
-        endExBlock(b) = rest
-        liveBlocks ::: List(b)
-      }
-    }
-
-    /**
-     *  @param block the BasicBlock to emit code for
-     *  @param next  the following BasicBlock, `null` if `block` is the last one
-     */
-    def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
-
-      def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) {
-        debuglog(msg + " for " + local)
-        val isArg = local.arg
-        val i = local.index
-        if (isArg)
-          loadArg(mcode, loadAddr)(i)
-        else
-          loadLocal(i, local, mcode, loadAddr)
-      }
-
-      def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) {
-        debuglog(msg + " with owner: " + field.owner +
-              " flags: " + Flags.flagsToString(field.owner.flags))
-        var fieldInfo = fields.get(field) match {
-          case Some(fInfo) => fInfo
-          case None =>
-            val fInfo = getType(field.owner).GetField(msilName(field))
-            fields(field) = fInfo
-            fInfo
-        }
-        if (fieldInfo.IsVolatile) {
-          mcode.Emit(OpCodes.Volatile)
-        }
-        if (!fieldInfo.IsLiteral) {
-          if (loadAddr) {
-            mcode.Emit(if (isStatic) OpCodes.Ldsflda else OpCodes.Ldflda, fieldInfo)
-          } else {
-            mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo)
-          }
-        } else {
-          assert(!loadAddr, "can't take AddressOf a literal field (not even with readonly. prefix) because no memory was allocated to such field ...")
-          // TODO the above can be overcome by loading the value, boxing, and finally unboxing. An address to a copy of the raw value will be on the stack.
-         /*  We perform `field inlining' as required by CLR.
-          *  Emit as for a CONSTANT ICode stmt, with the twist that the constant value is available
-          *  as a java.lang.Object and its .NET type allows constant initialization in CLR, i.e. that type
-          *  is one of I1, I2, I4, I8, R4, R8, CHAR, BOOLEAN, STRING, or CLASS (in this last case,
-          *  only accepting nullref as value). See Table 9-1 in Lidin's book on ILAsm. */
-          val value = fieldInfo.getValue()
-          if (value == null) {
-            mcode.Emit(OpCodes.Ldnull)
-          } else {
-            val typ = if (fieldInfo.FieldType.IsEnum) fieldInfo.FieldType.getUnderlyingType
-                      else fieldInfo.FieldType
-            if (typ == clrTypes.STRING) {
-              mcode.Emit(OpCodes.Ldstr, value.asInstanceOf[String])
-            } else if (typ == clrTypes.BOOLEAN) {
-                mcode.Emit(if (value.asInstanceOf[Boolean]) OpCodes.Ldc_I4_1
-                           else OpCodes.Ldc_I4_0)
-            } else if (typ == clrTypes.BYTE || typ == clrTypes.UBYTE) {
-              loadI4(value.asInstanceOf[Byte], mcode)
-            } else if (typ == clrTypes.SHORT || typ == clrTypes.USHORT) {
-              loadI4(value.asInstanceOf[Int], mcode)
-            } else if (typ == clrTypes.CHAR) {
-              loadI4(value.asInstanceOf[Char], mcode)
-            } else if (typ == clrTypes.INT || typ == clrTypes.UINT) {
-              loadI4(value.asInstanceOf[Int], mcode)
-            } else if (typ == clrTypes.LONG || typ == clrTypes.ULONG) {
-              mcode.Emit(OpCodes.Ldc_I8, value.asInstanceOf[Long])
-            } else if (typ == clrTypes.FLOAT) {
-              mcode.Emit(OpCodes.Ldc_R4, value.asInstanceOf[Float])
-            } else if (typ == clrTypes.DOUBLE) {
-              mcode.Emit(OpCodes.Ldc_R8, value.asInstanceOf[Double])
-            } else {
-              /* TODO one more case is described in Partition II, 16.2: bytearray(...) */
-              abort("Unknown type for static literal field: " + fieldInfo)
-            }
-          }
-        }
-      }
-
-      /** Creating objects works differently on .NET. On the JVM
-       *  - NEW(type) => reference on Stack
-       *  - DUP, load arguments, CALL_METHOD(constructor)
-       *
-       * On .NET, the NEW and DUP are ignored, but we emit a special method call
-       *  - load arguments
-       *  - NewObj(constructor) => reference on stack
-       *
-       * This variable tells whether the previous instruction was a NEW,
-       * we expect a DUP which is not emitted. */
-      var previousWasNEW = false
-
-      var lastLineNr: Int = 0
-      var lastPos: Position = NoPosition
-
-
-      // EndExceptionBlock must happen before MarkLabel because it adds the
-      // Leave instruction. Otherwise, labels(block) points to the Leave
-      // (inside the catch) instead of the instruction afterwards.
-      for (handlers <- endExBlock.get(block); exh <- handlers) {
-        currentHandlers.pop()
-        for (l <- endFinallyLabels.get(exh))
-          mcode.MarkLabel(l)
-        mcode.EndExceptionBlock()
-      }
-
-      mcode.MarkLabel(labels(block))
-      debuglog("Generating code for block: " + block)
-
-      for (handler <- beginCatchBlock.get(block)) {
-        if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
-          currentHandlers.pop()
-          currentHandlers.push(handler)
-        }
-        if (handler.cls == NoSymbol) {
-          // `finally` blocks are represented the same as `catch`, but with no catch-type
-          mcode.BeginFinallyBlock()
-        } else {
-          val t = getType(handler.cls)
-          mcode.BeginCatchBlock(t)
-        }
-      }
-      for (handlers <- beginExBlock.get(block); exh <- handlers) {
-        currentHandlers.push(exh)
-        mcode.BeginExceptionBlock()
-      }
-
-      for (instr <- block) {
-        try {
-          val currentLineNr = instr.pos.line
-          val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr);
-          if(!skip || !dbFilenameSeen) {
-            val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)};
-            if(instr.pos.isRange) {
-              val startLine = instr.pos.focusStart.line
-              val endLine   = instr.pos.focusEnd.line
-              val startCol  = instr.pos.focusStart.column
-              val endCol    = instr.pos.focusEnd.column
-              mcode.setPosition(startLine, endLine, startCol, endCol, fileName)
-            } else {
-              mcode.setPosition(instr.pos.line, fileName)
-            }
-            lastLineNr = currentLineNr
-            lastPos = instr.pos
-          }
-        } catch { case _: UnsupportedOperationException => () }
-
-        if (previousWasNEW)
-          assert(instr.isInstanceOf[DUP], block)
-
-        instr match {
-          case THIS(clasz) =>
-            mcode.Emit(OpCodes.Ldarg_0)
-
-          case CONSTANT(const) =>
-            const.tag match {
-              case UnitTag    => ()
-              case BooleanTag => mcode.Emit(if (const.booleanValue) OpCodes.Ldc_I4_1
-                                            else OpCodes.Ldc_I4_0)
-              case ByteTag    => loadI4(const.byteValue, mcode)
-              case ShortTag   => loadI4(const.shortValue, mcode)
-              case CharTag    => loadI4(const.charValue, mcode)
-              case IntTag     => loadI4(const.intValue, mcode)
-              case LongTag    => mcode.Emit(OpCodes.Ldc_I8, const.longValue)
-              case FloatTag   => mcode.Emit(OpCodes.Ldc_R4, const.floatValue)
-              case DoubleTag  => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue)
-              case StringTag  => mcode.Emit(OpCodes.Ldstr, const.stringValue)
-              case NullTag    => mcode.Emit(OpCodes.Ldnull)
-              case ClazzTag   =>
-                mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue))
-                mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE)
-              case _          => abort("Unknown constant value: " + const)
-            }
-
-          case LOAD_ARRAY_ITEM(kind) =>
-            (kind: @unchecked) match {
-              case BOOL           => mcode.Emit(OpCodes.Ldelem_I1)
-              case BYTE           => mcode.Emit(OpCodes.Ldelem_I1) // I1 for System.SByte, i.e. a scala.Byte
-              case SHORT          => mcode.Emit(OpCodes.Ldelem_I2)
-              case CHAR           => mcode.Emit(OpCodes.Ldelem_U2)
-              case INT            => mcode.Emit(OpCodes.Ldelem_I4)
-              case LONG           => mcode.Emit(OpCodes.Ldelem_I8)
-              case FLOAT          => mcode.Emit(OpCodes.Ldelem_R4)
-              case DOUBLE         => mcode.Emit(OpCodes.Ldelem_R8)
-              case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
-              case ARRAY(elem)    => mcode.Emit(OpCodes.Ldelem_Ref)
-
-              // case UNIT is not possible: an Array[Unit] will be an
-              //  Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
-            }
-
-          case LOAD_LOCAL(local) => loadLocalOrAddress(local, "load_local", false)
-
-          case CIL_LOAD_LOCAL_ADDRESS(local) => loadLocalOrAddress(local, "cil_load_local_address", true)
-
-          case LOAD_FIELD(field, isStatic) => loadFieldOrAddress(field, isStatic, "load_field", false)
-
-          case CIL_LOAD_FIELD_ADDRESS(field, isStatic) => loadFieldOrAddress(field, isStatic, "cil_load_field_address", true)
-
-          case CIL_LOAD_ARRAY_ITEM_ADDRESS(kind) => mcode.Emit(OpCodes.Ldelema, msilType(kind))
-
-          case CIL_NEWOBJ(msym) =>
-            assert(msym.isClassConstructor)
-            val constructorInfo: ConstructorInfo = getConstructor(msym)
-            mcode.Emit(OpCodes.Newobj, constructorInfo)
-
-          case LOAD_MODULE(module) =>
-            debuglog("Generating LOAD_MODULE for: " + showsym(module))
-            mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module))
-
-          case STORE_ARRAY_ITEM(kind) =>
-            (kind: @unchecked) match {
-              case BOOL           => mcode.Emit(OpCodes.Stelem_I1)
-              case BYTE           => mcode.Emit(OpCodes.Stelem_I1)
-              case SHORT          => mcode.Emit(OpCodes.Stelem_I2)
-              case CHAR           => mcode.Emit(OpCodes.Stelem_I2)
-              case INT            => mcode.Emit(OpCodes.Stelem_I4)
-              case LONG           => mcode.Emit(OpCodes.Stelem_I8)
-              case FLOAT          => mcode.Emit(OpCodes.Stelem_R4)
-              case DOUBLE         => mcode.Emit(OpCodes.Stelem_R8)
-              case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
-              case ARRAY(elem)    => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
-
-              // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
-            }
-
-          case STORE_LOCAL(local) =>
-            val isArg = local.arg
-            val i = local.index
-            debuglog("store_local for " + local + ", index " + i)
-
-            // there are some locals defined by the compiler that
-            // are isArg and are need to be stored.
-            if (isArg) {
-              if (i >= -128 && i <= 127)
-                mcode.Emit(OpCodes.Starg_S, i)
-              else
-                mcode.Emit(OpCodes.Starg, i)
-            } else {
-              i match {
-                case 0 => mcode.Emit(OpCodes.Stloc_0)
-                case 1 => mcode.Emit(OpCodes.Stloc_1)
-                case 2 => mcode.Emit(OpCodes.Stloc_2)
-                case 3 => mcode.Emit(OpCodes.Stloc_3)
-                case _      =>
-                  if (i >= -128 && i <= 127)
-                    mcode.Emit(OpCodes.Stloc_S, localBuilders(local))
-                  else
-                    mcode.Emit(OpCodes.Stloc, localBuilders(local))
-              }
-            }
-
-          case STORE_THIS(_) =>
-            // this only works for impl classes because the self parameter comes first
-            // in the method signature. If that changes, this code has to be revisited.
-            mcode.Emit(OpCodes.Starg_S, 0)
-
-          case STORE_FIELD(field, isStatic) =>
-            val fieldInfo = fields.get(field) match {
-              case Some(fInfo) => fInfo
-              case None =>
-                val fInfo = getType(field.owner).GetField(msilName(field))
-                fields(field) = fInfo
-                fInfo
-            }
-            mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo)
-
-          case CALL_PRIMITIVE(primitive) =>
-            genPrimitive(primitive, instr.pos)
-
-          case CALL_METHOD(msym, style) =>
-            if (msym.isClassConstructor) {
-              val constructorInfo: ConstructorInfo = getConstructor(msym)
-              (style: @unchecked) match {
-                // normal constructor calls are Static..
-                case Static(_) =>
-                  if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner)
-                    // we're generating a constructor (method: IMethod is a constructor), and we're
-                    // calling another constructor of the same class.
-
-                    // @LUC TODO: this can probably break, namely when having: class A { def this() { new A() } }
-                    // instead, we should instruct the CALL_METHOD with additional information, know whether it's
-                    // an instance creation constructor call or not.
-                    mcode.Emit(OpCodes.Call, constructorInfo)
-                  else
-                    mcode.Emit(OpCodes.Newobj, constructorInfo)
-                case SuperCall(_) =>
-                  mcode.Emit(OpCodes.Call, constructorInfo)
-                  if (isStaticModule(clasz.symbol) &&
-                      notInitializedModules.contains(clasz.symbol) &&
-                      method.symbol.isClassConstructor)
-                    {
-                      notInitializedModules -= clasz.symbol
-                      mcode.Emit(OpCodes.Ldarg_0)
-                      mcode.Emit(OpCodes.Stsfld, getModuleInstanceField(clasz.symbol))
-                    }
-              }
-
-            } else {
-
-              var doEmit = true
-              getTypeOpt(msym.owner) match {
-                case Some(typ) if (typ.IsEnum) => {
-                  def negBool() = {
-                    mcode.Emit(OpCodes.Ldc_I4_0)
-                    mcode.Emit(OpCodes.Ceq)
-                  }
-                  doEmit = false
-                  val name = msym.name
-                  if (name eq nme.EQ)       { mcode.Emit(OpCodes.Ceq) }
-                  else if (name eq nme.NE)  { mcode.Emit(OpCodes.Ceq); negBool }
-                  else if (name eq nme.LT)  { mcode.Emit(OpCodes.Clt) }
-                  else if (name eq nme.LE)  { mcode.Emit(OpCodes.Cgt); negBool }
-                  else if (name eq nme.GT)  { mcode.Emit(OpCodes.Cgt) }
-                  else if (name eq nme.GE)  { mcode.Emit(OpCodes.Clt); negBool }
-                  else if (name eq nme.OR)  { mcode.Emit(OpCodes.Or) }
-                  else if (name eq nme.AND) { mcode.Emit(OpCodes.And) }
-                  else if (name eq nme.XOR) { mcode.Emit(OpCodes.Xor) }
-                  else
-                    doEmit = true
-                }
-                case _ => ()
-              }
-
-              // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
-              val (isDelegateView, paramType, resType) = beforeTyper {
-                msym.tpe match {
-                  case MethodType(params, resultType)
-                  if (params.length == 1 && msym.name == nme.view_) =>
-                    val paramType = params(0).tpe
-                    val isDel = definitions.isCorrespondingDelegate(resultType, paramType)
-                    (isDel, paramType, resultType)
-                  case _ => (false, null, null)
-                }
-              }
-              if (doEmit && isDelegateView) {
-                doEmit = false
-                createDelegateCaller(paramType, resType)
-              }
-
-              if (doEmit &&
-                  (msym.name == nme.PLUS || msym.name == nme.MINUS)
-                  && clrTypes.isDelegateType(msilType(msym.owner.tpe)))
-                {
-                doEmit = false
-                val methodInfo: MethodInfo = getMethod(msym)
-                // call it as a static method, even if the compiler (symbol) thinks it's virtual
-                mcode.Emit(OpCodes.Call, methodInfo)
-                mcode.Emit(OpCodes.Castclass, msilType(msym.owner.tpe))
-              }
-
-              if (doEmit && definitions.Delegate_scalaCallers.contains(msym)) {
-                doEmit = false
-                val methodSym: Symbol = definitions.Delegate_scalaCallerTargets(msym)
-                val delegateType: Type = msym.tpe match {
-                  case MethodType(_, retType) => retType
-                  case _ => abort("not a method type: " + msym.tpe)
-                }
-                val methodInfo: MethodInfo = getMethod(methodSym)
-                val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
-                if (methodSym.isStatic) {
-                  mcode.Emit(OpCodes.Ldftn, methodInfo)
-                } else {
-                  mcode.Emit(OpCodes.Dup)
-                  mcode.Emit(OpCodes.Ldvirtftn, methodInfo)
-                }
-                mcode.Emit(OpCodes.Newobj, delegCtor)
-              }
-
-              if (doEmit) {
-                val methodInfo: MethodInfo = getMethod(msym)
-                (style: @unchecked) match {
-                  case SuperCall(_) =>
-                    mcode.Emit(OpCodes.Call, methodInfo)
-                  case Dynamic =>
-                    // methodInfo.DeclaringType is null for global methods
-                    val isValuetypeMethod = (methodInfo.DeclaringType ne null) && (methodInfo.DeclaringType.IsValueType)
-                    val isValuetypeVirtualMethod = isValuetypeMethod && (methodInfo.IsVirtual)
-                    if (dynToStatMapped(msym)) {
-                      mcode.Emit(OpCodes.Call, methodInfo)
-                    } else if (isValuetypeVirtualMethod) {
-                      mcode.Emit(OpCodes.Constrained, methodInfo.DeclaringType)
-                      mcode.Emit(OpCodes.Callvirt, methodInfo)
-                    } else if (isValuetypeMethod) {
-                      // otherwise error "Callvirt on a value type method" ensues
-                      mcode.Emit(OpCodes.Call, methodInfo)
-                    } else {
-                      mcode.Emit(OpCodes.Callvirt, methodInfo)
-                    }
-                  case Static(_) =>
-                    if(methodInfo.IsVirtual && !mcode.Ldarg0WasJustEmitted) {
-                      mcode.Emit(OpCodes.Callvirt, methodInfo)
-                    } else mcode.Emit(OpCodes.Call, methodInfo)
-              }
-            }
-            }
-
-          case BOX(boxType) =>
-            emitBox(mcode, boxType)
-
-          case UNBOX(boxType) =>
-            emitUnbox(mcode, boxType)
-
-          case CIL_UNBOX(boxType) =>
-            mcode.Emit(OpCodes.Unbox, msilType(boxType))
-
-          case CIL_INITOBJ(valueType) =>
-            mcode.Emit(OpCodes.Initobj, msilType(valueType))
-
-          case NEW(REFERENCE(cls)) =>
-            // the next instruction must be a DUP, see comment on `var previousWasNEW`
-            previousWasNEW = true
-
-          // works also for arrays and reference-types
-          case CREATE_ARRAY(elem, dims) =>
-            // TODO: handle multi dimensional arrays
-            assert(dims == 1, "Can't handle multi dimensional arrays")
-            mcode.Emit(OpCodes.Newarr, msilType(elem))
-
-          // works for arrays and reference-types
-          case IS_INSTANCE(tpe) =>
-            mcode.Emit(OpCodes.Isinst, msilType(tpe))
-            mcode.Emit(OpCodes.Ldnull)
-            mcode.Emit(OpCodes.Ceq)
-            mcode.Emit(OpCodes.Ldc_I4_0)
-            mcode.Emit(OpCodes.Ceq)
-
-          // works for arrays and reference-types
-          // part from the scala reference: "S <: T does not imply
-          //  Array[S] <: Array[T] in Scala. However, it is possible
-          //  to cast an array of S to an array of T if such a cast
-          //  is permitted in the host environment."
-          case CHECK_CAST(tpknd) =>
-            val tMSIL = msilType(tpknd)
-              mcode.Emit(OpCodes.Castclass, tMSIL)
-
-          // no SWITCH is generated when there's
-          //  - a default case ("case _ => ...") in the matching expr
-          //  - OR is used ("case 1 | 2 => ...")
-          case SWITCH(tags, branches) =>
-            // tags is List[List[Int]]; a list of integers for every label.
-            //    if the int on stack is 4, and 4 is in the second list => jump
-            //    to second label
-            // branches is List[BasicBlock]
-            //    the labels to jump to (the last one is the default one)
-
-            val switchLocal = mcode.DeclareLocal(MINT)
-            // several switch variables will appear with the same name in the
-            //  assembly code, but this makes no truble
-            switchLocal.SetLocalSymInfo("$switch_var")
-
-            mcode.Emit(OpCodes.Stloc, switchLocal)
-            var i = 0
-            for (l <- tags) {
-              var targetLabel = labels(branches(i))
-              for (i <- l) {
-                mcode.Emit(OpCodes.Ldloc, switchLocal)
-                loadI4(i, mcode)
-                mcode.Emit(OpCodes.Beq, targetLabel)
-              }
-              i += 1
-            }
-            val defaultTarget = labels(branches(i))
-            if (next != branches(i))
-              mcode.Emit(OpCodes.Br, defaultTarget)
-
-          case JUMP(whereto) =>
-            val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
-            if (leaveHandler) {
-              if (leaveFinally) {
-                if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
-                else mcode.Emit(OpCodes.Endfinally)
-              } else
-                mcode.Emit(OpCodes.Leave, labels(whereto))
-            } else if (next != whereto)
-              mcode.Emit(OpCodes.Br, labels(whereto))
-
-          case CJUMP(success, failure, cond, kind) =>
-            // cond is TestOp (see Primitives.scala), and can take
-            // values EQ, NE, LT, GE LE, GT
-            // kind is TypeKind
-            val isFloat = kind == FLOAT || kind == DOUBLE
-            val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
-            emitCondBr(block, cond, success, failure, next, emit)
-
-          case CZJUMP(success, failure, cond, kind) =>
-            emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
-
-          case RETURN(kind) =>
-            if (currentHandlers.isEmpty)
-              mcode.Emit(OpCodes.Ret)
-            else {
-              val (local, label) = returnFromHandler(kind)
-              if (kind != UNIT)
-                mcode.Emit(OpCodes.Stloc, local)
-              mcode.Emit(OpCodes.Leave, label)
-            }
-
-          case THROW(_) =>
-            mcode.Emit(OpCodes.Throw)
-
-          case DROP(kind) =>
-            mcode.Emit(OpCodes.Pop)
-
-          case DUP(kind) =>
-            // see comment on `var previousWasNEW`
-            if (!previousWasNEW)
-              mcode.Emit(OpCodes.Dup)
-            else
-              previousWasNEW = false
-
-          case MONITOR_ENTER() =>
-            mcode.Emit(OpCodes.Call, MMONITOR_ENTER)
-
-          case MONITOR_EXIT() =>
-            mcode.Emit(OpCodes.Call, MMONITOR_EXIT)
-
-          case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION(_) =>
-            ()
-        }
-
-      } // end for (instr <- b) { .. }
-    } // end genBlock
-
-    def genPrimitive(primitive: Primitive, pos: Position) {
-      primitive match {
-        case Negation(kind) =>
-          kind match {
-            // CHECK: is ist possible to get this for BOOL? in this case, verify.
-            case BOOL | BYTE | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE =>
-              mcode.Emit(OpCodes.Neg)
-
-            case _ => abort("Impossible to negate a " + kind)
-          }
-
-        case Arithmetic(op, kind) =>
-          op match {
-            case ADD => mcode.Emit(OpCodes.Add)
-            case SUB => mcode.Emit(OpCodes.Sub)
-            case MUL => mcode.Emit(OpCodes.Mul)
-            case DIV => mcode.Emit(OpCodes.Div)
-            case REM => mcode.Emit(OpCodes.Rem)
-            case NOT => mcode.Emit(OpCodes.Not) //bitwise complement (one's complement)
-            case _ => abort("Unknown arithmetic primitive " + primitive )
-          }
-
-        case Logical(op, kind) => op match {
-          case AND => mcode.Emit(OpCodes.And)
-          case OR => mcode.Emit(OpCodes.Or)
-          case XOR => mcode.Emit(OpCodes.Xor)
-        }
-
-        case Shift(op, kind) => op match {
-          case LSL => mcode.Emit(OpCodes.Shl)
-          case ASR => mcode.Emit(OpCodes.Shr)
-          case LSR => mcode.Emit(OpCodes.Shr_Un)
-        }
-
-        case Conversion(src, dst) =>
-          debuglog("Converting from: " + src + " to: " + dst)
-
-          dst match {
-            case BYTE =>   mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte
-            case SHORT =>  mcode.Emit(OpCodes.Conv_I2)
-            case CHAR =>   mcode.Emit(OpCodes.Conv_U2)
-            case INT =>    mcode.Emit(OpCodes.Conv_I4)
-            case LONG =>   mcode.Emit(OpCodes.Conv_I8)
-            case FLOAT =>  mcode.Emit(OpCodes.Conv_R4)
-            case DOUBLE => mcode.Emit(OpCodes.Conv_R8)
-            case _ =>
-              Console.println("Illegal conversion at: " + clasz +
-                              " at: " + pos.source + ":" + pos.line)
-          }
-
-        case ArrayLength(_) =>
-          mcode.Emit(OpCodes.Ldlen)
-
-        case StartConcat =>
-          mcode.Emit(OpCodes.Newobj, MSTRING_BUILDER_CONSTR)
-
-
-        case StringConcat(el) =>
-          val elemType : MsilType = el match {
-            case REFERENCE(_) | ARRAY(_) => MOBJECT
-            case _ => msilType(el)
-          }
-
-          val argTypes:Array[MsilType] = Array(elemType)
-          val stringBuilderAppend = MSTRING_BUILDER.GetMethod("Append", argTypes )
-          mcode.Emit(OpCodes.Callvirt,  stringBuilderAppend)
-
-        case EndConcat =>
-          mcode.Emit(OpCodes.Callvirt, MSTRING_BUILDER_TOSTRING)
-
-        case _ =>
-          abort("Unimplemented primitive " + primitive)
-      }
-    } // end genPrimitive
-
-
-    ////////////////////// loading ///////////////////////
-
-    def loadI4(value: Int, code: ILGenerator): Unit = value match {
-      case -1 => code.Emit(OpCodes.Ldc_I4_M1)
-      case 0  => code.Emit(OpCodes.Ldc_I4_0)
-      case 1  => code.Emit(OpCodes.Ldc_I4_1)
-      case 2  => code.Emit(OpCodes.Ldc_I4_2)
-      case 3  => code.Emit(OpCodes.Ldc_I4_3)
-      case 4  => code.Emit(OpCodes.Ldc_I4_4)
-      case 5  => code.Emit(OpCodes.Ldc_I4_5)
-      case 6  => code.Emit(OpCodes.Ldc_I4_6)
-      case 7  => code.Emit(OpCodes.Ldc_I4_7)
-      case 8  => code.Emit(OpCodes.Ldc_I4_8)
-      case _  =>
-        if (value >= -128 && value <= 127)
-          code.Emit(OpCodes.Ldc_I4_S, value)
-        else
-          code.Emit(OpCodes.Ldc_I4, value)
-    }
-
-    def loadArg(code: ILGenerator, loadAddr: Boolean)(i: Int) =
-      if (loadAddr) {
-        if (i >= -128 && i <= 127)
-          code.Emit(OpCodes.Ldarga_S, i)
-        else
-          code.Emit(OpCodes.Ldarga, i)
-      } else {
-        i match {
-          case 0 => code.Emit(OpCodes.Ldarg_0)
-          case 1 => code.Emit(OpCodes.Ldarg_1)
-          case 2 => code.Emit(OpCodes.Ldarg_2)
-          case 3 => code.Emit(OpCodes.Ldarg_3)
-          case _      =>
-            if (i >= -128 && i <= 127)
-              code.Emit(OpCodes.Ldarg_S, i)
-            else
-              code.Emit(OpCodes.Ldarg, i)
-        }
-      }
-
-    def loadLocal(i: Int, local: Local, code: ILGenerator, loadAddr: Boolean) =
-      if (loadAddr) {
-        if (i >= -128 && i <= 127)
-          code.Emit(OpCodes.Ldloca_S, localBuilders(local))
-        else
-          code.Emit(OpCodes.Ldloca, localBuilders(local))
-      } else {
-        i match {
-          case 0 => code.Emit(OpCodes.Ldloc_0)
-          case 1 => code.Emit(OpCodes.Ldloc_1)
-          case 2 => code.Emit(OpCodes.Ldloc_2)
-          case 3 => code.Emit(OpCodes.Ldloc_3)
-          case _      =>
-            if (i >= -128 && i <= 127)
-              code.Emit(OpCodes.Ldloc_S, localBuilders(local))
-            else
-              code.Emit(OpCodes.Ldloc, localBuilders(local))
-        }
-      }
-
-    ////////////////////// branches ///////////////////////
-
-    /** Returns a Triple (Boolean, Boolean, Option[Label])
-     *   - whether the jump leaves some exception block (try / catch / finally)
-     *   - whether it leaves a finally handler (finally block, but not it's try / catch)
-     *   - a label where to jump for leaving the finally handler
-     *     . None to leave directly using `endfinally`
-     *     . Some(label) to emit `leave label` (for try / catch inside a finally handler)
-     */
-    def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
-      if (currentHandlers.isEmpty) (false, false, None)
-      else {
-        val h = currentHandlers.head
-        val leaveHead = { h.covers(from) != h.covers(to) ||
-                          h.blocks.contains(from) != h.blocks.contains(to) }
-        if (leaveHead) {
-          // we leave the innermost exception block.
-          // find out if we also leave som e `finally` handler
-          currentHandlers.find(e => {
-            e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
-          }) match {
-            case Some(finallyHandler) =>
-              if (h == finallyHandler) {
-                // the finally handler is the innermost, so we can emit `endfinally` directly
-                (true, true, None)
-              } else {
-                // we need to `Leave` to the `endfinally` of the next outer finally handler
-                val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
-                (true, true, Some(l))
-              }
-            case None =>
-              (true, false, None)
-          }
-        } else (false, false, None)
-      }
-
-    def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
-                   next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
-      val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
-      val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
-
-      if (sLeaveHandler || fLeaveHandler) {
-        val sLabelOpt = if (sLeaveHandler) {
-          val leaveSLabel = mcode.DefineLabel()
-          emitBrFun(cond, leaveSLabel)
-          Some(leaveSLabel)
-        } else {
-          emitBrFun(cond, labels(success))
-          None
-        }
-
-        if (fLeaveHandler) {
-          if (fLeaveFinally) {
-            if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
-            else mcode.Emit(OpCodes.Endfinally)
-          } else
-            mcode.Emit(OpCodes.Leave, labels(failure))
-        } else
-          mcode.Emit(OpCodes.Br, labels(failure))
-
-        sLabelOpt.map(l => {
-          mcode.MarkLabel(l)
-          if (sLeaveFinally) {
-            if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
-            else mcode.Emit(OpCodes.Endfinally)
-          } else
-            mcode.Emit(OpCodes.Leave, labels(success))
-        })
-      } else {
-        if (next == success) {
-          emitBrFun(cond.negate, labels(failure))
-        } else {
-          emitBrFun(cond, labels(success))
-          if (next != failure) {
-            mcode.Emit(OpCodes.Br, labels(failure))
-          }
-        }
-      }
-    }
-
-    def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
-      condition match {
-        case EQ => mcode.Emit(OpCodes.Beq, dest)
-        case NE => mcode.Emit(OpCodes.Bne_Un, dest)
-        case LT => mcode.Emit(if (isFloat) OpCodes.Blt_Un else OpCodes.Blt, dest)
-        case GE => mcode.Emit(if (isFloat) OpCodes.Bge_Un else OpCodes.Bge, dest)
-        case LE => mcode.Emit(if (isFloat) OpCodes.Ble_Un else OpCodes.Ble, dest)
-        case GT => mcode.Emit(if (isFloat) OpCodes.Bgt_Un else OpCodes.Bgt, dest)
-      }
-    }
-
-    def emitBrBool(cond: TestOp, dest: Label) {
-      (cond: @unchecked) match {
-        // EQ -> Brfalse, NE -> Brtrue; this is because we come from
-        // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
-        // method returned false), and we are in the case EQ, then
-        // we need to emit Brfalse (EQ Zero means false). vice versa
-        case EQ => mcode.Emit(OpCodes.Brfalse, dest)
-        case NE => mcode.Emit(OpCodes.Brtrue, dest)
-      }
-    }
-
-    ////////////////////// local vars ///////////////////////
-
-    /**
-     * Compute the indexes of each local variable of the given
-     * method.
-     */
-    def computeLocalVarsIndex(m: IMethod) {
-      var idx = if (m.symbol.isStaticMember) 0 else 1
-
-      val params = m.params
-      for (l <- params) {
-        debuglog("Index value for parameter " + l + ": " + idx)
-        l.index = idx
-        idx += 1 // sizeOf(l.kind)
-      }
-
-      val locvars = m.locals filterNot (params contains)
-      idx = 0
-
-      for (l <- locvars) {
-        debuglog("Index value for local variable " + l + ": " + idx)
-        l.index = idx
-        idx += 1 // sizeOf(l.kind)
-      }
-
-    }
-
-    ////////////////////// Utilities ////////////////////////
-
-    /** Return the a name of this symbol that can be used on the .NET
-     * platform. It removes spaces from names.
-     *
-     * Special handling: scala.All and scala.AllRef are 'erased' to
-     * scala.All$ and scala.AllRef$. This is needed because they are
-     * not real classes, and they mean 'abrupt termination upon evaluation
-     * of that expression' or 'null' respectively. This handling is
-     * done already in GenICode, but here we need to remove references
-     * from method signatures to these types, because such classes can
-     * not exist in the classpath: the type checker will be very confused.
-     */
-    def msilName(sym: Symbol): String = {
-      val suffix = sym.moduleSuffix
-      // Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class)
-
-      if (sym == definitions.NothingClass)
-        return "scala.runtime.Nothing$"
-      else if (sym == definitions.NullClass)
-        return "scala.runtime.Null$"
-
-      (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
-        if (sym.isNestedClass) sym.simpleName
-        else sym.fullName
-       } else
-         sym.simpleName.toString.trim()) + suffix
-    }
-
-
-    ////////////////////// flags ///////////////////////
-
-    def msilTypeFlags(sym: Symbol): Int = {
-      var mf: Int = TypeAttributes.AutoLayout | TypeAttributes.AnsiClass
-
-      if(sym.isNestedClass) {
-        mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NestedPrivate else TypeAttributes.NestedPublic)
-      } else {
-        mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NotPublic else TypeAttributes.Public)
-      }
-      mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0)
-      mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class)
-      mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0)
-
-      sym.annotations foreach { a => a match {
-        case AnnotationInfo(SerializableAttr, _, _) =>
-          // TODO: add the Serializable TypeAttribute also if the annotation
-          // System.SerializableAttribute is present (.net annotation, not scala)
-          //  Best way to do it: compare with
-          //  definitions.getClass("System.SerializableAttribute").tpe
-          //  when frontend available
-          mf = mf | TypeAttributes.Serializable
-        case _ => ()
-      }}
-
-      mf
-      // static: not possible (or?)
-    }
-
-    def msilMethodFlags(sym: Symbol): Short = {
-      var mf: Int = MethodAttributes.HideBySig |
-        (if (sym hasFlag Flags.PRIVATE) MethodAttributes.Private
-         else MethodAttributes.Public)
-
-      if (!sym.isClassConstructor) {
-        if (sym.isStaticMember)
-          mf = mf | FieldAttributes.Static // coincidentally, same value as for MethodAttributes.Static ...
-        else {
-          mf = mf | MethodAttributes.Virtual
-          if (sym.isFinal && !getType(sym.owner).IsInterface)
-            mf = mf | MethodAttributes.Final
-          if (sym.isDeferred || getType(sym.owner).IsInterface)
-            mf = mf | MethodAttributes.Abstract
-        }
-      }
-
-      if (sym.isStaticMember) {
-        mf = mf | MethodAttributes.Static
-      }
-
-      // constructors of module classes should be private
-      if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) {
-        mf |= MethodAttributes.Private
-        mf &= ~(MethodAttributes.Public)
-      }
-
-      mf.toShort
-    }
-
-    def msilFieldFlags(sym: Symbol): Short = {
-      var mf: Int =
-        if (sym hasFlag Flags.PRIVATE) FieldAttributes.Private
-        else if (sym hasFlag Flags.PROTECTED) FieldAttributes.FamORAssem
-        else FieldAttributes.Public
-
-      if (sym hasFlag Flags.FINAL)
-        mf = mf | FieldAttributes.InitOnly
-
-      if (sym.isStaticMember)
-        mf = mf | FieldAttributes.Static
-
-      // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
-      // TODO: add this annotation also if the class has the custom attribute
-      // System.NotSerializedAttribute
-      sym.annotations.foreach( a => a match {
-        case AnnotationInfo(TransientAtt, _, _) =>
-          mf = mf | FieldAttributes.NotSerialized
-        case _ => ()
-      })
-
-      mf.toShort
-    }
-
-    ////////////////////// builders, types ///////////////////////
-
-    var entryPoint: Symbol = _
-
-    val notInitializedModules = mutable.HashSet[Symbol]()
-
-    // TODO: create fields also in def createType, and not in genClass,
-    // add a getField method (it only works as it is because fields never
-    // accessed from outside a class)
-
-    val localBuilders = mutable.HashMap[Local, LocalBuilder]()
-
-    private[GenMSIL] def findEntryPoint(cls: IClass) {
-
-      def isEntryPoint(sym: Symbol):Boolean = {
-        if (isStaticModule(sym.owner) && msilName(sym) == "main")
-          if (sym.tpe.paramTypes.length == 1) {
-            toTypeKind(sym.tpe.paramTypes(0)) match {
-              case ARRAY(elem) =>
-                if (elem.toType.typeSymbol == definitions.StringClass) {
-                  return true
-                }
-              case _ => ()
-            }
-          }
-        false
-      }
-
-      if((entryPoint == null) && opt.showClass.isDefined) {  // TODO introduce dedicated setting instead
-        val entryclass = opt.showClass.get.toString
-        val cfn = cls.symbol.fullName
-        if(cfn == entryclass) {
-          for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol }
-          if(entryPoint == null) { warning("Couldn't find main method in class " + cfn) }
-        }
-      }
-
-      if (firstSourceName == "")
-        if (cls.symbol.sourceFile != null) // is null for nested classes
-          firstSourceName = cls.symbol.sourceFile.name
-    }
-
-    // #####################################################################
-    // get and create types
-
-    private def msilType(t: TypeKind): MsilType = (t: @unchecked) match {
-      case UNIT           => MVOID
-      case BOOL           => MBOOL
-      case BYTE           => MBYTE
-      case SHORT          => MSHORT
-      case CHAR           => MCHAR
-      case INT            => MINT
-      case LONG           => MLONG
-      case FLOAT          => MFLOAT
-      case DOUBLE         => MDOUBLE
-      case REFERENCE(cls) => getType(cls)
-      case ARRAY(elem)    =>
-        msilType(elem) match {
-          // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
-          // the type "tp" in the assembly (not in the HashMap "types" of the backend).
-          // This can fail for nested types because the builders are not complete yet.
-          case tb: TypeBuilder => tb.MakeArrayType()
-          case tp: MsilType => clrTypes.mkArrayType(tp)
-        }
-    }
-
-    private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
-
-    private def msilParamTypes(sym: Symbol): Array[MsilType] = {
-      sym.tpe.paramTypes.map(msilType).toArray
-    }
-
-    def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym)))
-
-    /**
-     * Get an MSIL type from a symbol. First look in the clrTypes.types map, then
-     * lookup the name using clrTypes.getType
-     */
-    def getTypeOpt(sym: Symbol): Option[MsilType] = {
-      val tmp = types.get(sym)
-      tmp match {
-        case typ @ Some(_) => typ
-        case None =>
-          def typeString(sym: Symbol): String = {
-            val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
-                    else sym.fullName
-            if (sym.isModuleClass && !sym.isTrait) s + "$" else s
-          }
-          val name = typeString(sym)
-          val typ = clrTypes.getType(name)
-          if (typ == null)
-            None
-          else {
-            types(sym) = typ
-            Some(typ)
-          }
-      }
-    }
-
-    def mapType(sym: Symbol, mType: MsilType) {
-      assert(mType != null, showsym(sym))
-      types(sym) = mType
-    }
-
-    def createTypeBuilder(iclass: IClass) {
-      /**
-       * First look in the clrTypes.types map, if that fails check if it's a class being compiled, otherwise
-       * lookup by name (clrTypes.getType calls the static method msil.Type.GetType(fullname)).
-       */
-      def msilTypeFromSym(sym: Symbol): MsilType = {
-        types.get(sym).getOrElse {
-          classes.get(sym) match {
-            case Some(iclass) =>
-	              msilTypeBuilderFromSym(sym)
-            case None =>
-              getType(sym)
-          }
-        }
-      }
-
-      def msilTypeBuilderFromSym(sym: Symbol): TypeBuilder = {
-        if(!(types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])){
-          val iclass = classes(sym)
-          assert(iclass != null)
-          createTypeBuilder(iclass)
-        }
-        types(sym).asInstanceOf[TypeBuilder]
-      }
-
-      val sym = iclass.symbol
-      if (types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])
-        return
-
-      def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
-      val parents: List[Type] =
-        if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
-        else sym.info.parents.distinct
-
-      val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
-      debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
-
-      val interfaces: Array[MsilType] =
-	parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray
-      if (parents.length > 1) {
-        if (settings.debug.value) {
-          log("interfaces:")
-          for (i <- 0.until(interfaces.length)) {
-            log("  type: " + parents(i + 1).typeSymbol + ", msil type: " + interfaces(i))
-          }
-        }
-      }
-
-      val tBuilder = if (sym.isNestedClass) {
-        val ownerT = msilTypeBuilderFromSym(sym.owner).asInstanceOf[TypeBuilder]
-        ownerT.DefineNestedType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
-      } else {
-        mmodule.DefineType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
-      }
-      mapType(sym, tBuilder)
-    } // createTypeBuilder
-
-    def createClassMembers(iclass: IClass) {
-      try {
-        createClassMembers0(iclass)
-      }
-      catch {
-        case e: Throwable =>
-          java.lang.System.err.println(showsym(iclass.symbol))
-          java.lang.System.err.println("with methods = " + iclass.methods)
-          throw e
-      }
-    }
-
-    def createClassMembers0(iclass: IClass) {
-
-      val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder]
-
-      for (ifield <- iclass.fields) {
-        val sym = ifield.symbol
-        debuglog("Adding field: " + sym.fullName)
-
-        var attributes = msilFieldFlags(sym)
-        val fieldTypeWithCustomMods =
-          new PECustomMod(msilType(sym.tpe),
-                          customModifiers(sym.annotations))
-        val fBuilder = mtype.DefineField(msilName(sym),
-                                         fieldTypeWithCustomMods,
-                                         attributes)
-        fields(sym) = fBuilder
-        addAttributes(fBuilder, sym.annotations)
-      } // all iclass.fields iterated over
-
-      if (isStaticModule(iclass.symbol)) {
-        val sc = iclass.lookupStaticCtor
-        if (sc.isDefined) {
-          val m = sc.get
-          val oldLastBlock = m.lastBlock
-          val lastBlock = m.newBlock()
-          oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-          // call object's private ctor from static ctor
-          lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
-          lastBlock.emit(DROP(toTypeKind(iclass.symbol.tpe)))
-          lastBlock emit RETURN(UNIT)
-          lastBlock.close
-        }
-      }
-
-      if (iclass.symbol != definitions.ArrayClass) {
-      for (m: IMethod <- iclass.methods) {
-        val sym = m.symbol
-        debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
-              sym.owner.fullName + "::" + sym.name)
-
-        val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
-        assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
-        var paramTypes = msilParamTypes(sym)
-        val attr = msilMethodFlags(sym)
-
-        if (m.symbol.isClassConstructor) {
-          val constr =
-            ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes)
-          for (i <- 0.until(paramTypes.length)) {
-            constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
-          }
-          mapConstructor(sym, constr)
-          addAttributes(constr, sym.annotations)
-        } else {
-          var resType = msilType(m.returnType)
-          val method =
-            ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes)
-          for (i <- 0.until(paramTypes.length)) {
-            method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
-          }
-          if (!methods.contains(sym))
-            mapMethod(sym, method)
-          addAttributes(method, sym.annotations)
-          debuglog("\t created MethodBuilder " + method)
-        }
-      }
-      } // method builders created for non-array iclass
-
-      if (isStaticModule(iclass.symbol)) {
-        addModuleInstanceField(iclass.symbol)
-        notInitializedModules += iclass.symbol
-        if (iclass.lookupStaticCtor.isEmpty) {
-          addStaticInit(iclass.symbol)
-        }
-      }
-
-    } // createClassMembers0
-
-    private def isTopLevelModule(sym: Symbol): Boolean =
-      beforeRefchecks {
-        sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
-      }
-
-    // if the module is lifted it does not need to be initialized in
-    // its static constructor, and the MODULE$ field is not required.
-    // the outer class will care about it.
-    private def isStaticModule(sym: Symbol): Boolean = {
-      // .net inner classes: removed '!sym.hasFlag(Flags.LIFTED)', added
-      // 'sym.isStatic'. -> no longer compatible without skipping flatten!
-      sym.isModuleClass && sym.isStatic && !sym.isImplClass
-    }
-
-    private def isCloneable(sym: Symbol): Boolean = {
-      !sym.annotations.forall( a => a match {
-        case AnnotationInfo(CloneableAttr, _, _) => false
-        case _ => true
-      })
-    }
-
-    private def addModuleInstanceField(sym: Symbol) {
-      debuglog("Adding Module-Instance Field for " + showsym(sym))
-      val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-      val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME,
-                           tBuilder,
-                           (FieldAttributes.Public |
-                            //FieldAttributes.InitOnly |
-                            FieldAttributes.Static).toShort)
-      fields(sym) = fb
-    }
-
-
-    // the symbol may be a object-symbol (module-symbol), or a module-class-symbol
-    private def getModuleInstanceField(sym: Symbol): FieldInfo = {
-      assert(sym.isModule || sym.isModuleClass, "Expected module: " + showsym(sym))
-
-      // when called by LOAD_MODULE, the corresponding type maybe doesn't
-      // exist yet -> make a getType
-      val moduleClassSym = if (sym.isModule) sym.moduleClass else sym
-
-      // TODO: get module field for modules not defined in the
-      // source currently compiling (e.g. Console)
-
-      fields get moduleClassSym match {
-        case Some(sym) => sym
-        case None =>
-          //val mclass = types(moduleClassSym)
-          val nameInMetadata = nestingAwareFullClassname(moduleClassSym)
-          val mClass = clrTypes.getType(nameInMetadata)
-          val mfield = mClass.GetField("MODULE$")
-          assert(mfield ne null, "module not found " + showsym(moduleClassSym))
-          fields(moduleClassSym) = mfield
-          mfield
-      }
-
-      //fields(moduleClassSym)
-    }
-
-    def nestingAwareFullClassname(csym: Symbol) : String = {
-      val suffix = csym.moduleSuffix
-      val res = if (csym.isNestedClass)
-        nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName
-      else
-        csym.fullName
-      res + suffix
-    }
-
-    /** Adds a static initializer which creates an instance of the module
-     *  class (calls the primary constructor). A special primary constructor
-     *  will be generated (notInitializedModules) which stores the new instance
-     *  in the MODULE$ field right after the super call.
-     */
-    private def addStaticInit(sym: Symbol) {
-      val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-
-      val staticInit = tBuilder.DefineConstructor(
-        (MethodAttributes.Static | MethodAttributes.Public).toShort,
-        CallingConventions.Standard,
-        MsilType.EmptyTypes)
-
-      val sicode = staticInit.GetILGenerator()
-
-      val instanceConstructor = constructors(sym.primaryConstructor)
-
-      // there are no constructor parameters. assuming the constructor takes no parameter
-      // is fine: we call (in the static constructor) the constructor of the module class,
-      // which takes no arguments - an object definition cannot take constructor arguments.
-      sicode.Emit(OpCodes.Newobj, instanceConstructor)
-      // the stsfld is done in the instance constructor, just after the super call.
-      sicode.Emit(OpCodes.Pop)
-
-      sicode.Emit(OpCodes.Ret)
-    }
-
-    private def generateMirrorClass(sym: Symbol) {
-      val tBuilder = getType(sym)
-      assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym)
-      debuglog("Dumping mirror class for object: " + sym)
-      val moduleName = msilName(sym)
-      val mirrorName = moduleName.substring(0, moduleName.length() - 1)
-      val mirrorTypeBuilder = mmodule.DefineType(mirrorName,
-                                                 TypeAttributes.Class |
-                                                 TypeAttributes.Public |
-                                                 TypeAttributes.Sealed,
-                                                 MOBJECT,
-                                                 MsilType.EmptyTypes)
-
-      val iclass = classes(sym)
-
-      for (m <- sym.tpe.nonPrivateMembers
-           if m.owner != definitions.ObjectClass && !m.isProtected &&
-           m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase &&
-           !m.isDeferred)
-        {
-          debuglog("   Mirroring method: " + m)
-          val paramTypes = msilParamTypes(m)
-          val paramNames: Array[String] = new Array[String](paramTypes.length)
-          for (i <- 0 until paramTypes.length)
-            paramNames(i) = "x_" + i
-
-          // CHECK: verify if getMethodName is better than msilName
-          val mirrorMethod = mirrorTypeBuilder.DefineMethod(msilName(m),
-                                                            (MethodAttributes.Public |
-                                                            MethodAttributes.Static).toShort,
-                                                            msilType(m.tpe.resultType),
-                                                            paramTypes)
-
-          var i = 0
-          while (i < paramTypes.length) {
-            mirrorMethod.DefineParameter(i, ParameterAttributes.None, paramNames(i))
-            i += 1
-          }
-
-          val mirrorCode = mirrorMethod.GetILGenerator()
-          mirrorCode.Emit(OpCodes.Ldsfld, getModuleInstanceField(sym))
-          val mInfo = getMethod(m)
-          for (paramidx <- 0.until(paramTypes.length)) {
-            val mInfoParams = mInfo.GetParameters
-            val loadAddr = mInfoParams(paramidx).ParameterType.IsByRef
-            loadArg(mirrorCode, loadAddr)(paramidx)
-          }
-
-          mirrorCode.Emit(OpCodes.Callvirt, getMethod(m))
-          mirrorCode.Emit(OpCodes.Ret)
-        }
-
-      addSymtabAttribute(sym.sourceModule, mirrorTypeBuilder)
-
-      mirrorTypeBuilder.CreateType()
-      mirrorTypeBuilder.setSourceFilepath(iclass.cunit.source.file.path)
-    }
-
-
-    // #####################################################################
-    // delegate callers
-
-    var delegateCallers: TypeBuilder = _
-    var nbDelegateCallers: Int = 0
-
-    private def initDelegateCallers() = {
-      delegateCallers = mmodule.DefineType("$DelegateCallers", TypeAttributes.Public |
-                                          TypeAttributes.Sealed)
-    }
-
-    private def createDelegateCaller(functionType: Type, delegateType: Type) = {
-      if (delegateCallers == null)
-        initDelegateCallers()
-      // create a field an store the function-object
-      val mFunctionType: MsilType = msilType(functionType)
-      val anonfunField: FieldBuilder = delegateCallers.DefineField(
-        "$anonfunField$$" + nbDelegateCallers, mFunctionType,
-        (FieldAttributes.InitOnly | FieldAttributes.Public | FieldAttributes.Static).toShort)
-      mcode.Emit(OpCodes.Stsfld, anonfunField)
-
-
-      // create the static caller method and the delegate object
-      val (params, returnType) = delegateType.member(nme.apply).tpe match {
-        case MethodType(delParams, delReturn) => (delParams, delReturn)
-        case _ => abort("not a delegate type: "  + delegateType)
-      }
-      val caller: MethodBuilder = delegateCallers.DefineMethod(
-        "$delegateCaller$$" + nbDelegateCallers,
-        (MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static).toShort,
-        msilType(returnType), (params map (_.tpe)).map(msilType).toArray)
-      for (i <- 0 until params.length)
-        caller.DefineParameter(i, ParameterAttributes.None, "arg" + i) // FIXME: use name of parameter symbol
-      val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
-      mcode.Emit(OpCodes.Ldnull)
-      mcode.Emit(OpCodes.Ldftn, caller)
-      mcode.Emit(OpCodes.Newobj, delegCtor)
-
-
-      // create the static caller method body
-      val functionApply: MethodInfo = getMethod(functionType.member(nme.apply))
-      val dcode: ILGenerator = caller.GetILGenerator()
-      dcode.Emit(OpCodes.Ldsfld, anonfunField)
-      for (i <- 0 until params.length) {
-        loadArg(dcode, false /* TODO confirm whether passing actual as-is to formal is correct wrt the ByRef attribute of the param */)(i)
-        emitBox(dcode, toTypeKind(params(i).tpe))
-      }
-      dcode.Emit(OpCodes.Callvirt, functionApply)
-      emitUnbox(dcode, toTypeKind(returnType))
-      dcode.Emit(OpCodes.Ret)
-
-      nbDelegateCallers = nbDelegateCallers + 1
-
-    } //def createDelegateCaller
-
-    def emitBox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
-      // doesn't make sense, unit as parameter..
-      case UNIT   => code.Emit(OpCodes.Ldsfld, boxedUnit)
-      case BOOL | BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE =>
-        code.Emit(OpCodes.Box, msilType(boxType))
-      case REFERENCE(cls) if clrTypes.isValueType(cls) =>
-        code.Emit(OpCodes.Box, (msilType(boxType)))
-      case REFERENCE(_) | ARRAY(_) =>
-        warning("Tried to BOX a non-valuetype.")
-        ()
-    }
-
-    def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
-      case UNIT   => code.Emit(OpCodes.Pop)
-      /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods)
-             behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods
-             (case null: that's easy, case boxed: track changes to unboxBlaBla)
-         (2) See also: asInstanceOf to cast from Any to number,
-             tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437  */
-      case BOOL   => code.Emit(OpCodes.Call, toBool)
-      case BYTE   => code.Emit(OpCodes.Call, toSByte)
-      case SHORT  => code.Emit(OpCodes.Call, toShort)
-      case CHAR   => code.Emit(OpCodes.Call, toChar)
-      case INT    => code.Emit(OpCodes.Call, toInt)
-      case LONG   => code.Emit(OpCodes.Call, toLong)
-      case FLOAT  => code.Emit(OpCodes.Call, toFloat)
-      case DOUBLE => code.Emit(OpCodes.Call, toDouble)
-      case REFERENCE(cls) if clrTypes.isValueType(cls) =>
-        code.Emit(OpCodes.Unbox, msilType(boxType))
-        code.Emit(OpCodes.Ldobj, msilType(boxType))
-      case REFERENCE(_) | ARRAY(_) =>
-        warning("Tried to UNBOX a non-valuetype.")
-        ()
-    }
-
-    // #####################################################################
-    // get and create methods / constructors
-
-    def getConstructor(sym: Symbol): ConstructorInfo = constructors.get(sym) match {
-      case Some(constr) => constr
-      case None =>
-        val mClass = getType(sym.owner)
-        val constr = mClass.GetConstructor(msilParamTypes(sym))
-        if (constr eq null) {
-          java.lang.System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
-          java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
-          abort(sym.fullName)
-        }
-        else {
-          mapConstructor(sym, constr)
-          constr
-        }
-    }
-
-    def mapConstructor(sym: Symbol, cInfo: ConstructorInfo) = {
-      constructors(sym) = cInfo
-    }
-
-    private def getMethod(sym: Symbol): MethodInfo = {
-
-        methods.get(sym) match {
-        case Some(method) => method
-        case None =>
-          val mClass = getType(sym.owner)
-          try {
-            val method = mClass.GetMethod(msilName(sym), msilParamTypes(sym),
-                                          msilType(sym.tpe.resultType))
-            if (method eq null) {
-              java.lang.System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
-              java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
-              abort(sym.fullName)
-            }
-            else {
-              mapMethod(sym, method)
-              method
-            }
-          }
-          catch {
-            case e: Exception =>
-              Console.println("While looking up " + mClass + "::" + sym.nameString)
-            Console.println("\t" + showsym(sym))
-            throw e
-          }
-      }
-    }
-
-    /*
-     * add a mapping between sym and mInfo
-     */
-    private def mapMethod(sym: Symbol, mInfo: MethodInfo) {
-      assert (mInfo != null, mInfo)
-      methods(sym) = mInfo
-    }
-
-    /*
-     * add mapping between sym and method with newName, paramTypes of newClass
-     */
-    private def mapMethod(sym: Symbol, newClass: MsilType, newName: String, paramTypes: Array[MsilType]) {
-      val methodInfo = newClass.GetMethod(newName, paramTypes)
-      assert(methodInfo != null, "Can't find mapping for " + sym + " -> " +
-             newName + "(" + paramTypes + ")")
-      mapMethod(sym, methodInfo)
-      if (methodInfo.IsStatic)
-        dynToStatMapped += sym
-    }
-
-    /*
-     * add mapping between method with name and paramTypes of clazz to
-     * method with newName and newParamTypes of newClass (used for instance
-     * for "wait")
-     */
-    private def mapMethod(
-      clazz: Symbol, name: Name, paramTypes: Array[Type],
-      newClass: MsilType, newName: String, newParamTypes: Array[MsilType]) {
-        val methodSym = lookupMethod(clazz, name, paramTypes)
-        assert(methodSym != null, "cannot find method " + name + "(" +
-               paramTypes + ")" + " in class " + clazz)
-        mapMethod(methodSym, newClass, newName, newParamTypes)
-      }
-
-    /*
-     * add mapping for member with name and paramTypes to member
-     * newName of newClass (same parameters)
-     */
-    private def mapMethod(
-      clazz: Symbol, name: Name, paramTypes: Array[Type],
-      newClass: MsilType, newName: String) {
-        mapMethod(clazz, name, paramTypes, newClass, newName, paramTypes map msilType)
-      }
-
-    /*
-     * add mapping for all methods with name of clazz to the corresponding
-     * method (same parameters) with newName of newClass
-     */
-    private def mapMethod(
-      clazz: Symbol, name: Name,
-      newClass: MsilType, newName: String) {
-        val memberSym: Symbol = clazz.tpe.member(name)
-        memberSym.tpe match {
-          // alternatives: List[Symbol]
-          case OverloadedType(_, alternatives) =>
-            alternatives.foreach(s => mapMethod(s, newClass, newName, msilParamTypes(s)))
-
-          // paramTypes: List[Type], resType: Type
-          case MethodType(params, resType) =>
-            mapMethod(memberSym, newClass, newName, msilParamTypes(memberSym))
-
-          case _ =>
-            abort("member not found: " + clazz + ", " + name)
-        }
-      }
-
-
-    /*
-     * find the method in clazz with name and paramTypes
-     */
-    private def lookupMethod(clazz: Symbol, name: Name, paramTypes: Array[Type]): Symbol = {
-      val memberSym = clazz.tpe.member(name)
-      memberSym.tpe match {
-        case OverloadedType(_, alternatives) =>
-          alternatives.find(s => {
-            var i: Int = 0
-            var typesOK: Boolean = true
-            if (paramTypes.length == s.tpe.paramTypes.length) {
-              while(i < paramTypes.length) {
-                if (paramTypes(i) != s.tpe.paramTypes(i))
-                  typesOK = false
-                i += 1
-              }
-            } else {
-              typesOK = false
-            }
-            typesOK
-          }) match {
-            case Some(sym) => sym
-            case None => abort("member of " + clazz + ", " + name + "(" +
-                               paramTypes + ") not found")
-          }
-
-        case MethodType(_, _) => memberSym
-
-        case _ => abort("member not found: " + name + " of " + clazz)
-      }
-    }
-
-    private def showsym(sym: Symbol): String = (sym.toString +
-      "\n  symbol = " + Flags.flagsToString(sym.flags) + " " + sym +
-      "\n  owner  = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner
-    )
-
-  } // class BytecodeGenerator
-
-} // class GenMSIL
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 23f932b..c49f238 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
 package backend.opt
 
 import scala.tools.nsc.backend.icode.analysis.LubException
-import scala.tools.nsc.symtab._
 
 /**
  *  @author Iulian Dragos
@@ -19,6 +18,8 @@ abstract class ClosureElimination extends SubComponent {
 
   val phaseName = "closelim"
 
+  override val enabled: Boolean = settings.Xcloselim
+
   /** Create a new phase */
   override def newPhase(p: Phase) = new ClosureEliminationPhase(p)
 
@@ -72,8 +73,10 @@ abstract class ClosureElimination extends SubComponent {
     def name = phaseName
     val closser = new ClosureElim
 
-    override def apply(c: IClass): Unit =
-      closser analyzeClass c
+    override def apply(c: IClass): Unit = {
+      if (closser ne null)
+        closser analyzeClass c
+    }
   }
 
   /**
@@ -83,7 +86,7 @@ abstract class ClosureElimination extends SubComponent {
    *
    */
   class ClosureElim {
-    def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+    def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim) {
       log(s"Analyzing ${cls.methods.size} methods in $cls.")
       cls.methods foreach { m =>
         analyzeMethod(m)
@@ -97,7 +100,7 @@ abstract class ClosureElimination extends SubComponent {
     /* Some embryonic copy propagation. */
     def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
       cpp.init(m)
-      cpp.run
+      cpp.run()
 
       m.linearizedBlocks() foreach { bb =>
         var info = cpp.in(bb)
@@ -109,7 +112,7 @@ abstract class ClosureElimination extends SubComponent {
               val t = info.getBinding(l)
               t match {
               	case Deref(This) | Const(_) =>
-                  bb.replaceInstruction(i, valueToInstruction(t));
+                  bb.replaceInstruction(i, valueToInstruction(t))
                   debuglog(s"replaced $i with $t")
 
                 case _ =>
@@ -120,7 +123,7 @@ abstract class ClosureElimination extends SubComponent {
 
             case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
               def replaceFieldAccess(r: Record) {
-                val Record(cls, bindings) = r
+                val Record(cls, _) = r
                 info.getFieldNonRecordValue(r, f) foreach { v =>
                         bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
                         debuglog(s"replaced $i with $v")
@@ -188,28 +191,20 @@ abstract class ClosureElimination extends SubComponent {
       case Boxed(LocalVar(v)) =>
         LOAD_LOCAL(v)
     }
-
-    /** is field 'f' accessible from method 'm'? */
-    def accessible(f: Symbol, m: Symbol): Boolean =
-      f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass))
   } /* class ClosureElim */
 
 
   /** Peephole optimization. */
   abstract class PeepholeOpt {
-
-    private var method: IMethod = NoIMethod
-
     /** Concrete implementations will perform their optimizations here */
     def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
 
     var liveness: global.icodes.liveness.LivenessAnalysis = null
 
     def apply(m: IMethod): Unit = if (m.hasCode) {
-      method = m
       liveness = new global.icodes.liveness.LivenessAnalysis
       liveness.init(m)
-      liveness.run
+      liveness.run()
       m foreachBlock transformBlock
     }
 
@@ -235,7 +230,7 @@ abstract class ClosureElimination extends SubComponent {
           h = t.head
           t = t.tail
         }
-      } while (redo);
+      } while (redo)
       b fromList newInstructions
     }
   }
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
new file mode 100644
index 0000000..1fadcb8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -0,0 +1,625 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  James Iry
+ */
+
+package scala
+package tools.nsc
+package backend.opt
+
+import scala.tools.nsc.backend.icode.analysis.LubException
+import scala.annotation.tailrec
+
+/**
+ * ConstantOptimization uses abstract interpretation to approximate for
+ * each instruction what constants a variable or stack slot might hold
+ * or cannot hold. From this it will eliminate unreachable conditionals
+ * where only one branch is reachable, e.g. to eliminate unnecessary
+ * null checks.
+ *
+ * With some more work it could be extended to
+ * - cache stable values (final fields, modules) in locals
+ * - replace the copy propagation in ClosureElilmination
+ * - fold constants
+ * - eliminate unnecessary stores and loads
+ * - propagate knowledge gathered from conditionals for further optimization
+ */
+abstract class ConstantOptimization extends SubComponent {
+  import global._
+  import icodes._
+  import icodes.opcodes._
+
+  val phaseName = "constopt"
+
+  /** Create a new phase */
+  override def newPhase(p: Phase) = new ConstantOptimizationPhase(p)
+
+  override val enabled: Boolean = settings.YconstOptimization
+
+  /**
+   * The constant optimization phase.
+   */
+  class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) {
+
+    def name = phaseName
+
+    override def apply(c: IClass) {
+      if (settings.YconstOptimization) {
+        val analyzer = new ConstantOptimizer
+        analyzer optimizeClass c
+      }
+    }
+  }
+
+  class ConstantOptimizer {
+    def optimizeClass(cls: IClass) {
+      log(s"Analyzing ${cls.methods.size} methods in $cls.")
+      cls.methods foreach { m =>
+        optimizeMethod(m)
+      }
+    }
+
+    def optimizeMethod(m: IMethod) {
+      if (m.hasCode) {
+        log(s"Analyzing ${m.symbol}")
+        val replacementInstructions = interpretMethod(m)
+        for (block <- m.blocks) {
+          if (replacementInstructions contains block) {
+            val instructions = replacementInstructions(block)
+            block.replaceInstruction(block.lastInstruction, instructions)
+          }
+        }
+      }
+    }
+
+    /**
+     * A single possible (or impossible) datum that can be held in Contents
+     */
+    private sealed abstract class Datum
+    /**
+     * A constant datum
+     */
+    private case class Const(c: Constant) extends Datum {
+      def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag
+      def toInt = c.tag match {
+        case BooleanTag => if (c.booleanValue) 1 else 0
+        case _ => c.intValue
+      }
+
+      /**
+       * True if this constant would compare to other as true under primitive eq
+       */
+      override def equals(other: Any) = other match {
+        case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value)
+        case _ => false
+      }
+
+      /**
+       * Hash code consistent with equals
+       */
+      override def hashCode = if (this.isIntAssignable) this.toInt else c.hashCode
+
+    }
+    /**
+     * A datum that has been Boxed via a BOX instruction
+     */
+    private case class Boxed(c: Datum) extends Datum
+
+    /**
+     * The knowledge we have about the abstract state of one location in terms
+     * of what constants it might or cannot hold. Forms a lower
+     * lattice where lower elements in the lattice indicate less knowledge.
+     *
+     * With the following partial ordering (where '>' indicates more precise knowledge)
+     *
+     * Possible(xs) > Possible(xs + y)
+     * Possible(xs) > Impossible(ys)
+     * Impossible(xs + y) > Impossible(xs)
+     *
+     * and the following merges, which indicate merging knowledge from two paths through
+     * the code,
+     *
+     * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3
+     * Possible(xs) merge Possible(ys) => Possible(xs union ys)
+     *
+     * // Left says can't be 2 or 3, right says can't be 3 or 4
+     * // then it's not 3 (it could be 2 from the right or 4 from the left)
+     * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys)
+     *
+     * // Left says it can't be 2 or 3, right says it must be 3 or 4, then
+     * // it can't be 2 (left rules out 4 and right says 3 is possible)
+     * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys)
+     *
+     * Intuitively, Possible(empty) says that a location can't hold anything,
+     * it's uninitialized. However, Possible(empty) never appears in the code.
+     *
+     * Conversely, Impossible(empty) says nothing is impossible, it could be
+     * anything. Impossible(empty) is given a synonym UNKNOWN and is used
+     * for, e.g., the result of an arbitrary method call.
+     */
+    private sealed abstract class Contents {
+      /**
+       * Join this Contents with another coming from another path. Join enforces
+       * the lattice structure. It is symmetrical and never moves upward in the
+       * lattice
+       */
+      final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match {
+        case (Possible(possible1), Possible(possible2)) =>
+          Possible(possible1 union possible2)
+        case (Impossible(impossible1), Impossible(impossible2)) =>
+          Impossible(impossible1 intersect impossible2)
+        case (Impossible(impossible), Possible(possible)) =>
+          Impossible(impossible -- possible)
+        case (Possible(possible), Impossible(impossible)) =>
+          Impossible(impossible -- possible)
+      }
+      // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start.
+      def mightEqual(other: Contents): Boolean
+      def mightNotEqual(other: Contents): Boolean
+    }
+    private def SingleImpossible(x: Datum) = new Impossible(Set(x))
+
+    /**
+     * The location is known to have one of a set of values.
+     */
+    private case class Possible(possible: Set[Datum]) extends Contents {
+      assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location")
+      def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+        // two Possibles might be equal if they have any possible members in common
+        case Possible(possible2) => (possible intersect possible2).nonEmpty
+        // a possible can be equal to an impossible if the impossible doesn't rule
+        // out all the possibilities
+        case Impossible(possible2) => (possible -- possible2).nonEmpty
+      })
+      def mightNotEqual(other: Contents): Boolean = (this ne other) && (other match {
+        // two Possibles might not be equal if either has possible members that the other doesn't
+        case Possible(possible2) => (possible -- possible2).nonEmpty || (possible2 -- possible).nonEmpty
+        case Impossible(_) => true
+      })
+    }
+    private def SinglePossible(x: Datum) = new Possible(Set(x))
+
+    /**
+     * The location is known to not have any of a set of values value (e.g null).
+     */
+    private case class Impossible(impossible: Set[Datum]) extends Contents {
+      def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+        case Possible(_) => other mightEqual this
+        case _ => true
+      })
+      def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match {
+        case Possible(_) => other mightNotEqual this
+        case _ => true
+      })
+    }
+
+    /**
+     * Our entire knowledge about the contents of all variables and the stack. It forms
+     * a lattice primarily driven by the lattice structure of Contents.
+     *
+     * In addition to the rules of contents, State has the following properties:
+     * - The merge of two sets of locals holds the merges of locals found in the intersection
+     * of the two sets of locals. Locals not found in a
+     * locals map are thus possibly uninitialized and attempting to load them results
+     * in an error.
+     * - The stack heights of two states must match otherwise it's an error to merge them
+     *
+     * State is immutable in order to aid in structure sharing of local maps and stacks
+     */
+    private case class State(locals: Map[Local, Contents], stack: List[Contents]) {
+      def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for {
+        key <- (locals.keySet intersect olocals.keySet).toSeq
+      } yield (key, locals(key) merge olocals(key))): _*)
+
+      def merge(other: State): State = if (this eq other) this else {
+        @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match {
+          case (Nil, Nil) => out.reverse
+          case (l, r) if l eq r => out.reverse ++ l
+          case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out)
+          case _ => sys.error("Mismatched stack heights")
+        }
+
+        val newLocals = mergeLocals(other.locals)
+
+        val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil)
+        State(newLocals, newStack)
+      }
+
+      /**
+       * Peek at the top of the stack without modifying it. Error if the stack is empty
+       */
+      def peek(n: Int): Contents = stack(n)
+      /**
+       * Push contents onto a stack
+       */
+      def push(contents: Contents): State = this copy (stack = contents :: stack)
+      /**
+       * Drop n elements from the stack
+       */
+      def drop(number: Int): State = this copy (stack = stack drop number)
+      /**
+       * Store the top of the stack into the specified local. An error if the stack
+       * is empty
+       */
+      def store(variable: Local): State = {
+        val contents = stack.head
+        val newVariables = locals + ((variable, contents))
+        new State(newVariables, stack.tail)
+      }
+      /**
+       * Load the specified local onto the top of the stack. An error the the local is uninitialized.
+       */
+      def load(variable: Local): State = {
+        val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized"))
+        push(contents)
+      }
+      /**
+       * A copy of this State with an empty stack
+       */
+      def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil)
+    }
+
+    // some precomputed constants
+    private val NULL = Const(Constant(null: Any))
+    private val UNKNOWN = Impossible(Set.empty)
+    private val NOT_NULL = SingleImpossible(NULL)
+    private val CONST_UNIT = SinglePossible(Const(Constant(())))
+    private val CONST_FALSE = SinglePossible(Const(Constant(false)))
+    private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte)))
+    private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short)))
+    private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char)))
+    private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int)))
+    private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long)))
+    private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f)))
+    private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d)))
+    private val CONST_NULL = SinglePossible(NULL)
+
+    /**
+     * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP
+     */
+    private def getZeroOf(k: TypeKind): Contents = k match {
+      case UNIT => CONST_UNIT
+      case BOOL => CONST_FALSE
+      case BYTE => CONST_ZERO_BYTE
+      case SHORT => CONST_ZERO_SHORT
+      case CHAR => CONST_ZERO_CHAR
+      case INT => CONST_ZERO_INT
+      case LONG => CONST_ZERO_LONG
+      case FLOAT => CONST_ZERO_FLOAT
+      case DOUBLE => CONST_ZERO_DOUBLE
+      case REFERENCE(_) => CONST_NULL
+      case ARRAY(_) => CONST_NULL
+      case BOXED(_) => CONST_NULL
+      case ConcatClass => abort("no zero of ConcatClass")
+    }
+
+    // normal locals can't be null, so we use null to mean the magic 'this' local
+    private val THIS_LOCAL: Local = null
+
+    /**
+     * interpret a single instruction to find its impact on the abstract state
+     */
+    private def interpretInst(in: State, inst: Instruction): State = {
+      // pop the consumed number of values off the `in` state's stack, producing a new state
+      def dropConsumed: State = in drop inst.consumed
+
+      inst match {
+        case THIS(_) =>
+          in load THIS_LOCAL
+
+        case CONSTANT(k) =>
+          // treat NaN as UNKNOWN because NaN must never equal NaN
+          val const = if (k.isNaN) UNKNOWN
+          else SinglePossible(Const(k))
+          in push const
+
+        case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) =>
+          dropConsumed push UNKNOWN
+
+        case LOAD_LOCAL(local) =>
+          // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant
+          in load local
+
+        case STORE_LOCAL(local) =>
+          in store local
+
+        case STORE_THIS(_) =>
+          // if a local is already known to have a constant and we're replacing with the same constant then we can
+          // replace this with a drop
+          in store THIS_LOCAL
+
+        case CALL_METHOD(_, _) =>
+          // TODO we could special case implementations of equals that are known, e.g. String#equals
+          // We could turn Possible(string constants).equals(Possible(string constants) into an eq check
+          // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString)
+          //  and eliminate the null check that likely precedes this call
+          val initial = dropConsumed
+          (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN }
+
+        case BOX(_) =>
+          val value = in peek 0
+          // we simulate boxing by, um, boxing the possible/impossible contents
+          // so if we have Possible(1,2) originally then we'll end up with
+          // a Possible(Boxed(1), Boxed(2))
+          // Similarly, if we know the input is not a 0 then we'll know the
+          // output is not a Boxed(0)
+          val newValue = value match {
+            case Possible(values) => Possible(values map Boxed)
+            case Impossible(values) => Impossible(values map Boxed)
+          }
+          dropConsumed push newValue
+
+        case UNBOX(_) =>
+          val value = in peek 0
+          val newValue = value match {
+            // if we have a Possible, then all the possibilities
+            // should themselves be Boxes. In that
+            // case we can merge them to figure out what the UNBOX will produce
+            case Possible(inners) =>
+              assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location")
+              val sanitized: Set[Contents] = (inners map {
+                case Boxed(content) => SinglePossible(content)
+                case _ => UNKNOWN
+              })
+              sanitized reduce (_ merge _)
+            // if we have an impossible then the thing that's impossible
+            // should be a box. We'll unbox that to see what we get
+            case unknown at Impossible(inners) =>
+              if (inners.isEmpty) {
+                unknown
+              } else {
+                val sanitized: Set[Contents] = (inners map {
+                  case Boxed(content) => SingleImpossible(content)
+                  case _ => UNKNOWN
+                })
+                sanitized reduce (_ merge _)
+              }
+          }
+          dropConsumed push newValue
+
+        case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) =>
+          in push NOT_NULL
+
+        case CREATE_ARRAY(_, _) =>
+          dropConsumed push NOT_NULL
+
+        case IS_INSTANCE(_) =>
+          // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP
+          // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can
+          // know that whatever was checked was not a null
+          // see the TODO on CJUMP for more information about propagating null
+          // information
+          // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
+          // replace with a constant false, but how often is a knowable null checked for instanceof?
+          // TODO we could track type information and statically know to eliminate IS_INSTANCE
+          // which might be a nice win under specialization
+          dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction
+        // will be a conditional jump comparing to true or false there
+        // nothing to be gained by being more precise
+
+        case CHECK_CAST(_) =>
+          // TODO we could track type information and statically know to eliminate CHECK_CAST
+          // but that's probably not a huge win
+          in
+
+        case DUP(_) =>
+          val value = in peek 0
+          in push value
+
+        case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) =>
+          dropConsumed
+
+        case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
+          in
+
+        case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) =>
+          dumpClassesAndAbort("Unexpected block ending instruction: " + inst)
+      }
+    }
+    /**
+     * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return.
+     * It will result in a map from target blocks to the input state computed for that block. It
+     * also computes a replacement list of instructions
+     */
+    private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = {
+      def canSwitch(in1: Contents, tagSet: List[Int]) = {
+        in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) })
+      }
+
+      /* common code for interpreting CJUMP and CZJUMP */
+      def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = {
+        // TODO use reaching analysis to update the state in the two branches
+        // e.g. if the comparison was checking null equality on local x
+        // then the in the success branch we know x is null and
+        // on the failure branch we know it is not
+        // in fact, with copy propagation we could propagate that knowledge
+        // back through a chain of locations
+        //
+        // TODO if we do all that we need to be careful in the
+        // case that success and failure are the same target block
+        // because we're using a Map and don't want one possible state to clobber the other
+        // alternative mayb we should just replace the conditional with a jump if both targets are the same
+
+        def mightEqual = val1 mightEqual val2
+        def mightNotEqual = val1 mightNotEqual val2
+        def guaranteedEqual = mightEqual && !mightNotEqual
+
+        def succPossible = cond match {
+          case EQ => mightEqual
+          case NE => mightNotEqual
+          case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT
+          case LE | GE => true
+        }
+
+        def failPossible = cond match {
+          case EQ => mightNotEqual
+          case NE => mightEqual
+          case LT | GT => true
+          case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE
+        }
+
+        val out = in drop inst.consumed
+
+        var result = Map[BasicBlock, State]()
+        if (succPossible) {
+          result += ((success, out))
+        }
+
+        if (failPossible) {
+          result += ((failure, out))
+        }
+
+        val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head)
+        else inst :: Nil
+
+        (result, replacements)
+      }
+
+      inst match {
+        case JUMP(whereto) =>
+          (Map((whereto, in)), inst :: Nil)
+
+        case CJUMP(success, failure, cond, kind) =>
+          val in1 = in peek 0
+          val in2 = in peek 1
+          interpretConditional(kind, in1, in2, success, failure, cond)
+
+        case CZJUMP(success, failure, cond, kind) =>
+          val in1 = in peek 0
+          val in2 = getZeroOf(kind)
+          interpretConditional(kind, in1, in2, success, failure, cond)
+
+        case SWITCH(tags, labels) =>
+          val in1 = in peek 0
+          val reachableNormalLabels = tags zip labels collect { case (tagSet, label) if canSwitch(in1, tagSet) => label }
+          val reachableLabels = if (tags.isEmpty) {
+            assert(labels.size == 1, s"When SWITCH node has empty array of tags it should have just one (default) label: $labels")
+            labels
+          } else if (labels.lengthCompare(tags.length) > 0) {
+            // if we've got an extra label then it's the default
+            val defaultLabel = labels.last
+            // see if the default is reachable by seeing if the input might be out of the set
+            // of all tags
+            val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) })
+            if (in1 mightNotEqual allTags) {
+              reachableNormalLabels :+ defaultLabel
+            } else {
+              reachableNormalLabels
+            }
+          } else {
+            reachableNormalLabels
+          }
+          // TODO similar to the comment in interpretConditional, we should update our the State going into each
+          // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks
+          // are the same we need to merge State rather than clobber
+
+          // alternative, maybe we should simplify the SWITCH to not have same target labels
+          val newState = in drop inst.consumed
+          val result = Map(reachableLabels map { label => (label, newState) }: _*)
+          if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil)
+          else (result, inst :: Nil)
+
+        // these instructions don't have target blocks
+        // (exceptions are assumed to be reachable from all instructions)
+        case RETURN(_) | THROW(_) =>
+          (Map.empty, inst :: Nil)
+
+        case _ =>
+          dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst)
+      }
+    }
+
+    /**
+     * Analyze a single block to find how it transforms an input state into a states for its successor blocks
+     * Also computes a list of instructions to be used to replace its last instruction
+     */
+    private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = {
+      debuglog(s"interpreting block $block")
+      // number of instructions excluding the last one
+      val normalCount = block.size - 1
+
+      val exceptionState = in.cleanStack
+      var normalExitState = in
+      var idx = 0
+      while (idx < normalCount) {
+        val inst = block(idx)
+        normalExitState = interpretInst(normalExitState, inst)
+        if (normalExitState.locals ne exceptionState.locals)
+          exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals)
+        idx += 1
+      }
+
+      val pairs = block.exceptionSuccessors map { b => (b, exceptionState) }
+      val exceptionMap = Map(pairs: _*)
+
+      val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction)
+
+      (normalExitMap, exceptionMap, newInstructions)
+    }
+
+    /**
+     * Analyze a single method to find replacement instructions
+     */
+    private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = {
+      import scala.collection.mutable.{ Set => MSet, Map => MMap }
+
+      debuglog(s"interpreting method $m")
+      var iterations = 0
+
+      // initially we know that 'this' is not null and the params are initialized to some unknown value
+      val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL))
+      val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) }
+      val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*)
+      val initialState = State(initialLocals, Nil)
+
+      // worklist of basic blocks to process, initially the start block
+      val worklist = MSet(m.startBlock)
+      // worklist of exception basic blocks. They're kept in a separate set so they can be
+      // processed after normal flow basic blocks. That's because exception basic blocks
+      // are more likely to have multiple predecessors and queueing them for later
+      // increases the chances that they'll only need to be interpreted once
+      val exceptionlist = MSet[BasicBlock]()
+      // our current best guess at what the input state is for each block
+      // initially we only know about the start block
+      val inputState = MMap[BasicBlock, State]((m.startBlock, initialState))
+
+      // update the inputState map based on new information from interpreting a block
+      // When the input state of a block changes, add it back to the work list to be
+      // reinterpreted
+      def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) {
+        for ((block, newState) <- outputStates) {
+          val oldState = inputState get block
+          val updatedState = oldState map (x => x merge newState) getOrElse newState
+          if (oldState != Some(updatedState)) {
+            worklist add block
+            inputState(block) = updatedState
+          }
+        }
+      }
+
+      // the instructions to be used as the last instructions on each block
+      val replacements = MMap[BasicBlock, List[Instruction]]()
+
+      while (worklist.nonEmpty || exceptionlist.nonEmpty) {
+        if (worklist.isEmpty) {
+          // once the worklist is empty, start processing exception blocks
+          val block = exceptionlist.head
+          exceptionlist remove block
+          worklist add block
+        } else {
+          iterations += 1
+          val block = worklist.head
+          worklist remove block
+          val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block)
+
+          updateInputStates(normalExitMap, worklist)
+          updateInputStates(exceptionMap, exceptionlist)
+          replacements(block) = newInstructions
+        }
+      }
+
+      debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations")
+      replacements.toMap
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index db56f61..90c37ba 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
 package backend.opt
 
 import scala.collection.{ mutable, immutable }
-import symtab._
 
 /**
  */
@@ -23,6 +22,8 @@ abstract class DeadCodeElimination extends SubComponent {
 
   val phaseName = "dce"
 
+  override val enabled: Boolean = settings.Xdce
+
   /** Create a new phase */
   override def newPhase(p: Phase) = new DeadCodeEliminationPhase(p)
 
@@ -34,7 +35,7 @@ abstract class DeadCodeElimination extends SubComponent {
     val dce = new DeadCode()
 
     override def apply(c: IClass) {
-      if (settings.Xdce.value)
+      if (settings.Xdce && (dce ne null))
         dce.analyzeClass(c)
     }
   }
@@ -61,7 +62,7 @@ abstract class DeadCodeElimination extends SubComponent {
       }
     }
 
-    val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
+    val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
 
     /** Use-def chain: give the reaching definitions at the beginning of given instruction. */
     var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
@@ -89,13 +90,15 @@ abstract class DeadCodeElimination extends SubComponent {
 
     def dieCodeDie(m: IMethod) {
       if (m.hasCode) {
-        debuglog("dead code elimination on " + m);
+        debuglog("dead code elimination on " + m)
         dropOf.clear()
         localStores.clear()
         clobbers.clear()
         m.code.blocks.clear()
+        m.code.touched = true
         accessedLocals = m.params.reverse
         m.code.blocks ++= linearizer.linearize(m)
+        m.code.touched = true
         collectRDef(m)
         mark()
         sweep(m)
@@ -111,17 +114,17 @@ abstract class DeadCodeElimination extends SubComponent {
 
     /** collect reaching definitions and initial useful instructions for this method. */
     def collectRDef(m: IMethod): Unit = if (m.hasCode) {
-      defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
-      rdef.init(m);
-      rdef.run;
+      defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
+      rdef.init(m)
+      rdef.run()
 
       m foreachBlock { bb =>
         useful(bb) = new mutable.BitSet(bb.size)
-        var rd = rdef.in(bb);
-        for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+        var rd = rdef.in(bb)
+        for ((i, idx) <- bb.toList.zipWithIndex) {
 
           // utility for adding to worklist
-          def moveToWorkList() = moveToWorkListIf(true)
+          def moveToWorkList() = moveToWorkListIf(cond = true)
 
           // utility for (conditionally) adding to worklist
           def moveToWorkListIf(cond: Boolean) =
@@ -136,8 +139,8 @@ abstract class DeadCodeElimination extends SubComponent {
           i match {
 
             case LOAD_LOCAL(_) =>
-              defs = defs + Pair(((bb, idx)), rd.vars)
-              moveToWorkListIf(false)
+              defs = defs + (((bb, idx), rd.vars))
+              moveToWorkListIf(cond = false)
 
             case STORE_LOCAL(l) =>
               /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
@@ -166,7 +169,7 @@ abstract class DeadCodeElimination extends SubComponent {
 
             case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
                  THROW(_)   | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
-                 LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+                 LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) =>
               moveToWorkList()
 
             case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
@@ -188,8 +191,10 @@ abstract class DeadCodeElimination extends SubComponent {
                 }
               }
               moveToWorkListIf(necessary)
+            case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
+              moveToWorkList() // SI-4859 Module initialization might side-effect.
             case _ => ()
-              moveToWorkListIf(false)
+              moveToWorkListIf(cond = false)
           }
           rd = rdef.interpret(bb, idx, rd)
         }
@@ -223,7 +228,7 @@ abstract class DeadCodeElimination extends SubComponent {
         // worklist so we also mark their reaching defs as useful - see SI-7060
         if (!useful(bb)(idx)) {
           useful(bb) += idx
-          dropOf.get(bb, idx) foreach {
+          dropOf.get((bb, idx)) foreach {
             for ((bb1, idx1) <- _) {
               /*
                * SI-7060: A drop that we now mark as useful can be reached via several paths,
@@ -345,13 +350,13 @@ abstract class DeadCodeElimination extends SubComponent {
       m foreachBlock { bb =>
         debuglog(bb + ":")
         val oldInstr = bb.toList
-        bb.open
-        bb.clear
-        for (Pair(i, idx) <- oldInstr.zipWithIndex) {
+        bb.open()
+        bb.clear()
+        for ((i, idx) <- oldInstr.zipWithIndex) {
           if (useful(bb)(idx)) {
             debuglog(" * " + i + " is useful")
             bb.emit(i, i.pos)
-            compensations.get(bb, idx) match {
+            compensations.get((bb, idx)) match {
               case Some(is) => is foreach bb.emit
               case None => ()
             }
@@ -379,7 +384,7 @@ abstract class DeadCodeElimination extends SubComponent {
           }
         }
 
-        if (bb.nonEmpty) bb.close
+        if (bb.nonEmpty) bb.close()
         else log(s"empty block encountered in $m")
       }
     }
@@ -418,13 +423,6 @@ abstract class DeadCodeElimination extends SubComponent {
       compensations
     }
 
-    private def withClosed[a](bb: BasicBlock)(f: => a): a = {
-      if (bb.nonEmpty) bb.close
-      val res = f
-      if (bb.nonEmpty) bb.open
-      res
-    }
-
     private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
       for (b <- linearizer.linearizeAt(method, bb)) {
         val idx = b.toList indexWhere (_ eq i)
@@ -435,7 +433,7 @@ abstract class DeadCodeElimination extends SubComponent {
     }
 
     private def isPure(sym: Symbol) = (
-         (sym.isGetter && sym.isEffectivelyFinal && !sym.isLazy)
+         (sym.isGetter && sym.isEffectivelyFinalOrNotOverridden && !sym.isLazy)
       || (sym.isPrimaryConstructor && (sym.enclosingPackage == RuntimePackage || inliner.isClosureClass(sym.owner)))
     )
     /** Is 'sym' a side-effecting method? TODO: proper analysis.  */
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index ab238af..235e954 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -4,7 +4,8 @@
 
 package scala.tools.nsc
 package backend.opt
-import scala.util.control.Breaks._
+
+import java.util.concurrent.TimeUnit
 
 /**
   * This optimization phase inlines the exception handlers so that further phases can optimize the code better
@@ -53,11 +54,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
   import icodes._
   import icodes.opcodes._
 
-  val phaseName = "inlineExceptionHandlers"
+  val phaseName = "inlinehandlers"
 
   /** Create a new phase */
   override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
 
+  override def enabled = settings.inlineHandlers
+
   /**
     * Inlining Exception Handlers
     */
@@ -70,9 +73,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
      * -some exception handler duplicates expect the exception on the stack while others expect it in a local
      *   => Option[Local]
      */
-    private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]
+    private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]()
     /* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
-    private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]
+    private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]()
     private def handlerLocal(bb: BasicBlock): Option[Local] =
       for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
 
@@ -89,13 +92,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
 
     /** Apply exception handler inlining to a class */
     override def apply(c: IClass): Unit =
-      if (settings.inlineHandlers.value) {
-        val startTime = System.currentTimeMillis
+      if (settings.inlineHandlers) {
+        val startTime = System.nanoTime()
         currentClass = c
 
         debuglog("Starting InlineExceptionHandlers on " + c)
         c.methods foreach applyMethod
-        debuglog("Finished InlineExceptionHandlers on " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
+        debuglog("Finished InlineExceptionHandlers on " + c + "... " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) + "ms")
         currentClass = null
       }
 
@@ -263,7 +266,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
       if (analyzedMethod eq NoIMethod) {
         analyzedMethod = bblock.method
         tfa.init(bblock.method)
-        tfa.run
+        tfa.run()
         log("      performed tfa on method: " + bblock.method)
 
         for (block <- bblock.method.blocks.sortBy(_.label))
@@ -358,7 +361,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
           }
           val caughtException = toTypeKind(caughtClass.tpe)
           // copy the exception handler code once again, dropping the LOAD_EXCEPTION
-          val copy = handler.code.newBlock
+          val copy = handler.code.newBlock()
           copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
 
           // extend the handlers of the handler to the copy
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 498db78..f6de522 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -49,7 +49,10 @@ abstract class Inliners extends SubComponent {
 
   val phaseName = "inliner"
 
+  override val enabled: Boolean = settings.inline
+
   /** Debug - for timing the inliner. */
+  /****
   private def timed[T](s: String, body: => T): T = {
     val t1 = System.currentTimeMillis()
     val res = body
@@ -60,6 +63,7 @@ abstract class Inliners extends SubComponent {
 
     res
   }
+  ****/
 
   /** Look up implementation of method 'sym in 'clazz'.
    */
@@ -68,18 +72,18 @@ abstract class Inliners extends SubComponent {
     def needsLookup = (
          (clazz != NoSymbol)
       && (clazz != sym.owner)
-      && !sym.isEffectivelyFinal
-      && clazz.isEffectivelyFinal
+      && !sym.isEffectivelyFinalOrNotOverridden
+      && clazz.isEffectivelyFinalOrNotOverridden
     )
     def lookup(clazz: Symbol): Symbol = {
       // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
       assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
                                 ", most likely this reveals the TFA at fault (receiver and callee don't match).")
       if (sym.owner == clazz || isBottomType(clazz)) sym
-      else sym.overridingSymbol(clazz) match {
-        case NoSymbol  => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
-        case imp       => imp
-      }
+      else sym.overridingSymbol(clazz) orElse (
+        if (sym.owner.isTrait) sym
+        else lookup(clazz.superClass)
+      )
     }
     if (needsLookup) {
       val concreteMethod = lookup(clazz)
@@ -193,7 +197,7 @@ abstract class Inliners extends SubComponent {
     private var currentIClazz: IClass = _
     private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
 
-    private def ownedName(sym: Symbol): String = afterUncurry {
+    private def ownedName(sym: Symbol): String = exitingUncurry {
       val count = (
         if (!sym.isMethod) 1
         else if (sym.owner.isAnonymousFunction) 3
@@ -230,7 +234,7 @@ abstract class Inliners extends SubComponent {
 
       val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
       var a: analysis.MethodTFA = null
-      if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+      if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() }
 
       if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
 
@@ -240,7 +244,7 @@ abstract class Inliners extends SubComponent {
     def clearCaches() {
       // methods
       NonPublicRefs.usesNonPublics.clear()
-      recentTFAs.clear
+      recentTFAs.clear()
       tfa.knownUnsafe.clear()
       tfa.knownSafe.clear()
       tfa.knownNever.clear()
@@ -263,7 +267,7 @@ abstract class Inliners extends SubComponent {
     }
 
     def analyzeClass(cls: IClass): Unit =
-      if (settings.inline.value) {
+      if (settings.inline) {
         inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
 
         this.currentIClazz = cls
@@ -279,7 +283,7 @@ abstract class Inliners extends SubComponent {
       }
 
     val tfa   = new analysis.MTFAGrowable()
-    tfa.stat  = global.opt.printStats
+    tfa.stat  = global.settings.Ystatistics.value
     val staleOut      = new mutable.ListBuffer[BasicBlock]
     val splicedBlocks = mutable.Set.empty[BasicBlock]
     val staleIn       = mutable.Set.empty[BasicBlock]
@@ -317,11 +321,11 @@ abstract class Inliners extends SubComponent {
      * */
     def analyzeMethod(m: IMethod): Unit = {
       // m.normalize
-      if (settings.debug.value)
+      if (settings.debug)
         inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
 
-      var sizeBeforeInlining  = m.code.blockCount
-      var instrBeforeInlining = m.code.instructionCount
+      val sizeBeforeInlining  = m.code.blockCount
+      val instrBeforeInlining = m.code.instructionCount
       var retry = false
       var count = 0
 
@@ -340,7 +344,7 @@ abstract class Inliners extends SubComponent {
         inlineWithoutTFA(inputBlocks, callsites)
       }
 
-      /**
+      /*
        *  Inline straightforward callsites (those that can be inlined without a TFA).
        *
        *  To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
@@ -358,10 +362,10 @@ abstract class Inliners extends SubComponent {
         for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) {
           breakable {
             for(ocm <- easyCake) {
-              assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
+              assert(ocm.method.isEffectivelyFinalOrNotOverridden && ocm.method.owner.isEffectivelyFinalOrNotOverridden)
               if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
                 inlineCount += 1
-                break
+                break()
               }
             }
           }
@@ -370,7 +374,7 @@ abstract class Inliners extends SubComponent {
         inlineCount
       }
 
-      /**
+      /*
        *  Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
        *  at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
        *
@@ -380,8 +384,8 @@ abstract class Inliners extends SubComponent {
         val shouldWarn = hasInline(i.method)
 
         def warnNoInline(reason: String): Boolean = {
-          def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
-          if (settings.debug.value)
+          def msg = "Could not inline required method %s because %s.".format(i.method.unexpandedName.decode, reason)
+          if (settings.debug)
             inlineLog("fail", i.method.fullName, reason)
           if (shouldWarn)
             warn(i.pos, msg)
@@ -405,8 +409,8 @@ abstract class Inliners extends SubComponent {
 
         def isCandidate = (
              isClosureClass(receiver)
-          || concreteMethod.isEffectivelyFinal
-          || receiver.isEffectivelyFinal
+          || concreteMethod.isEffectivelyFinalOrNotOverridden
+          || receiver.isEffectivelyFinalOrNotOverridden
         )
 
         def isApply     = concreteMethod.name == nme.apply
@@ -421,7 +425,7 @@ abstract class Inliners extends SubComponent {
         debuglog("Treating " + i
               + "\n\treceiver: " + receiver
               + "\n\ticodes.available: " + isAvailable
-              + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
+              + "\n\tconcreteMethod.isEffectivelyFinalOrNotOverridden: " + concreteMethod.isEffectivelyFinalOrNotOverridden)
 
         if (!isCandidate) warnNoInline("it can be overridden")
         else if (!isAvailable) warnNoInline("bytecode unavailable")
@@ -439,7 +443,6 @@ abstract class Inliners extends SubComponent {
               case DontInlineHere(msg)                       => warnNoInline(msg)
               case NeverSafeToInline                         => false
               case InlineableAtThisCaller                    => true
-              case inl @ FeasibleInline(_, _) if !inl.isSafe => false
               case FeasibleInline(required, toPublicize)     =>
                 for (f <- toPublicize) {
                   inlineLog("access", f, "making public")
@@ -477,9 +480,9 @@ abstract class Inliners extends SubComponent {
        * As a whole, both `preInline()` invocations amount to priming the inlining process,
        * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
        */
-      val totalPreInlines = {
-        val firstRound = preInline(true)
-        if(firstRound == 0) 0 else (firstRound + preInline(false))
+      /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used
+        val firstRound = preInline(isFirstRound = true)
+        if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false))
       }
       staleOut.clear()
       splicedBlocks.clear()
@@ -511,7 +514,7 @@ abstract class Inliners extends SubComponent {
             for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
               val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
               if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
-                break
+                break()
               }
             }
           }
@@ -563,13 +566,12 @@ abstract class Inliners extends SubComponent {
       while (retry && count < MAX_INLINE_RETRY)
 
       for(inlFail <- tfa.warnIfInlineFails) {
-        warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode)
+        warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.unexpandedName.decode)
       }
 
-      m.normalize
+      m.normalize()
       if (sizeBeforeInlining > 0) {
         val instrAfterInlining = m.code.instructionCount
-        val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
         val inlinings = caller.inlinedCalls
         if (inlinings > 0) {
           val s1      = s"instructions $instrBeforeInlining -> $instrAfterInlining"
@@ -584,13 +586,13 @@ abstract class Inliners extends SubComponent {
 
     private def isHigherOrderMethod(sym: Symbol) = (
          sym.isMethod
-      && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+      && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
     )
 
     /** Should method 'sym' being called in 'receiver' be loaded from disk? */
     def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
       def alwaysLoad    = (receiver.enclosingPackage == RuntimePackage) || (receiver == PredefModule.moduleClass)
-      def loadCondition = sym.isEffectivelyFinal && isMonadicMethod(sym) && isHigherOrderMethod(sym)
+      def loadCondition = sym.isEffectivelyFinalOrNotOverridden && isMonadicMethod(sym) && isHigherOrderMethod(sym)
 
       val res = hasInline(sym) || alwaysLoad || loadCondition
       debuglog("shouldLoadImplFor: " + receiver + "." + sym + ": " + res)
@@ -601,7 +603,6 @@ abstract class Inliners extends SubComponent {
       override def toString = m.toString
 
       val sym           = m.symbol
-      val name          = sym.name
       def owner         = sym.owner
       def paramTypes    = sym.info.paramTypes
       def minimumStack  = paramTypes.length + 1
@@ -617,13 +618,11 @@ abstract class Inliners extends SubComponent {
       def length        = blocks.length
       def openBlocks    = blocks filterNot (_.closed)
       def instructions  = m.code.instructions
-      // def linearized    = linearizer linearize m
 
       def isSmall         = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
       def isLarge         = length > MAX_INLINE_SIZE
       def isRecursive     = m.recursive
       def hasHandlers     = handlers.nonEmpty || m.bytecodeHasEHs
-      def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
 
       def isSynchronized         = sym.hasFlag(Flags.SYNCHRONIZED)
       def hasNonFinalizerHandler = handlers exists {
@@ -681,9 +680,18 @@ abstract class Inliners extends SubComponent {
         }
         */
 
-        def checkField(f: Symbol)   = check(f, f.isPrivate && !canMakePublic(f))
-        def checkSuper(n: Symbol)   = check(n, n.isPrivate || !n.isClassConstructor)
-        def checkMethod(n: Symbol)  = check(n, n.isPrivate)
+
+        def isPrivateForInlining(sym: Symbol): Boolean = {
+          if (sym.isJavaDefined) {
+            def check(sym: Symbol) = !(sym.isPublic || sym.isProtected)
+            check(sym) || check(sym.owner) // SI-7582 Must check the enclosing class *and* the symbol for Java.
+          }
+          else sym.isPrivate // Scala never emits package-private bytecode
+        }
+
+        def checkField(f: Symbol)   = check(f, isPrivateForInlining(f) && !canMakePublic(f))
+        def checkSuper(n: Symbol)   = check(n, isPrivateForInlining(n) || !n.isClassConstructor)
+        def checkMethod(n: Symbol)  = check(n, isPrivateForInlining(n))
 
         def getAccess(i: Instruction) = i match {
           case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
@@ -729,17 +737,11 @@ abstract class Inliners extends SubComponent {
      *   - either log the reason for failure --- case (b) ---,
      *   - or perform inlining --- case (a) ---.
      */
-    sealed abstract class InlineSafetyInfo {
-      def isSafe   = false
-      def isUnsafe = !isSafe
-    }
+    sealed abstract class InlineSafetyInfo
     case object NeverSafeToInline           extends InlineSafetyInfo
-    case object InlineableAtThisCaller      extends InlineSafetyInfo { override def isSafe = true }
+    case object InlineableAtThisCaller      extends InlineSafetyInfo
     case class  DontInlineHere(msg: String) extends InlineSafetyInfo
-    case class  FeasibleInline(accessNeeded: NonPublicRefs.Value,
-                               toBecomePublic: List[Symbol]) extends InlineSafetyInfo {
-      override def isSafe = true
-    }
+    case class  FeasibleInline(accessNeeded: NonPublicRefs.Value, toBecomePublic: List[Symbol]) extends InlineSafetyInfo
 
     case class AccessReq(
       accessNeeded:   NonPublicRefs.Value,
@@ -791,7 +793,7 @@ abstract class Inliners extends SubComponent {
 
         val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope
 
-        /** Side effects varsInScope when it sees SCOPE_ENTERs. */
+        /* Side effects varsInScope when it sees SCOPE_ENTERs. */
         def instrBeforeFilter(i: Instruction): Boolean = {
           i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
           i ne instr
@@ -804,7 +806,7 @@ abstract class Inliners extends SubComponent {
         // store the '$this' into the special local
         val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
 
-        /** buffer for the returned value */
+        /* buffer for the returned value */
         val retVal = inc.m.returnType match {
           case UNIT  => null
           case x     => newLocal("$retVal", x)
@@ -812,9 +814,9 @@ abstract class Inliners extends SubComponent {
 
         val inlinedLocals = mutable.HashMap.empty[Local, Local]
 
-        /** Add a new block in the current context. */
+        /* Add a new block in the current context. */
         def newBlock() = {
-          val b = caller.m.code.newBlock
+          val b = caller.m.code.newBlock()
           activeHandlers foreach (_ addCoveredBlock b)
           if (retVal ne null) b.varsInScope += retVal
           b.varsInScope += inlinedThis
@@ -829,7 +831,7 @@ abstract class Inliners extends SubComponent {
           handler
         }
 
-        /** alfa-rename `l` in caller's context. */
+        /* alfa-rename `l` in caller's context. */
         def dupLocal(l: Local): Local = {
           val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
           // sym.setInfo(l.sym.tpe)
@@ -840,10 +842,10 @@ abstract class Inliners extends SubComponent {
 
         val afterBlock = newBlock()
 
-        /** Map from nw.init instructions to their matching NEW call */
+        /* Map from nw.init instructions to their matching NEW call */
         val pending: mutable.Map[Instruction, NEW] = new mutable.HashMap
 
-        /** Map an instruction from the callee to one suitable for the caller. */
+        /* Map an instruction from the callee to one suitable for the caller. */
         def map(i: Instruction): Instruction = {
           def assertLocal(l: Local) = {
             assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
@@ -872,7 +874,7 @@ abstract class Inliners extends SubComponent {
               r
 
             case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
-              CALL_METHOD(meth, Static(true))
+              CALL_METHOD(meth, Static(onInstance = true))
 
             case _ => i.clone()
           }
@@ -893,8 +895,8 @@ abstract class Inliners extends SubComponent {
         }
 
         // re-emit the instructions before the call
-        block.open
-        block.clear
+        block.open()
+        block.clear()
         block emit instrBefore
 
         // store the arguments into special locals
@@ -903,7 +905,7 @@ abstract class Inliners extends SubComponent {
 
         // jump to the start block of the callee
         blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
-        block.close
+        block.close()
 
         // duplicate the other blocks in the callee
         val calleeLin = inc.m.linearizedBlocks()
@@ -926,11 +928,11 @@ abstract class Inliners extends SubComponent {
             emitInlined(map(i))
             info = if(hasRETURN) a.interpret(info, i) else null
           }
-          inlinedBlock(bb).close
+          inlinedBlock(bb).close()
         }
 
         afterBlock emit instrAfter
-        afterBlock.close
+        afterBlock.close()
 
         staleIn        += afterBlock
         splicedBlocks ++= (calleeLin map inlinedBlock)
@@ -938,7 +940,7 @@ abstract class Inliners extends SubComponent {
         // add exception handlers of the callee
         caller addHandlers (inc.handlers map translateExh)
         assert(pending.isEmpty, "Pending NEW elements: " + pending)
-        if (settings.debug.value) icodes.checkValid(caller.m)
+        if (settings.debug) icodes.checkValid(caller.m)
       }
 
       def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
@@ -973,7 +975,7 @@ abstract class Inliners extends SubComponent {
           }
 
           if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
-            tfa.knownUnsafe += inc.sym;
+            tfa.knownUnsafe += inc.sym
             return DontInlineHere("sameSymbols (ie caller == callee)")
           }
 
@@ -1032,7 +1034,6 @@ abstract class Inliners extends SubComponent {
         case Public     => true
       }
       private def sameSymbols = caller.sym == inc.sym
-      private def sameOwner   = caller.owner == inc.owner
 
       /** Gives green light for inlining (which may still be vetoed later). Heuristics:
        *   - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
@@ -1048,9 +1049,9 @@ abstract class Inliners extends SubComponent {
         if (caller.isInClosure)           score -= 2
         else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
 
-        if (inc.isSmall) score += 1;
+        if (inc.isSmall) score += 1
         // if (inc.hasClosureParam) score += 2
-        if (inc.isLarge) score -= 1;
+        if (inc.isLarge) score -= 1
         if (caller.isSmall && isLargeSum) {
           score -= 1
           debuglog(s"inliner score decreased to $score because small caller $caller would become large")
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
deleted file mode 100644
index 7f5f412..0000000
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ /dev/null
@@ -1,227 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import symtab.Flags
-
-import scala.collection._
-
-/** A component that describes the possible changes between successive
- *  compilations of a class.
- */
-abstract class Changes {
-
-  /** A compiler instance used to compile files on demand. */
-  val compiler: Global
-
-  import compiler._
-  import symtab.Flags._
-
-  abstract class Change
-
-  private lazy val annotationsChecked =
-    List(definitions.SpecializedClass) // Any others that should be checked?
-
-  private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
-                             OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD |
-                             MODULE | INTERFACE | PARAM | BYNAMEPARAM | CONTRAVARIANT |
-                             DEFAULTPARAM | ACCESSOR | LAZY | SPECIALIZED
-
-  /** Are the new modifiers more restrictive than the old ones? */
-  private def moreRestrictive(from: Long, to: Long): Boolean =
-    ((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L)
-     || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L))
-
-  /** Check if flags have changed **/
-  private def modifiedFlags(from: Long, to: Long): Boolean =
-    (from & IMPLICIT) != (to & IMPLICIT)
-
-  /** An entity in source code, either a class or a member definition.
-   *  Name is fully-qualified.
-   */
-  abstract class Entity
-  case class Class(name: String) extends Entity
-  case class Definition(name: String) extends Entity
-
-  case class Added(e: Entity) extends Change
-  case class Removed(e: Entity) extends Change
-  case class Changed(e: Entity)(implicit val reason: String) extends Change {
-    override def toString = "Changed(" + e + ")[" + reason + "]"
-  }
-  case class ParentChanged(e: Entity) extends Change
-
-  private val changedTypeParams = new mutable.HashSet[String]
-
-  private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean =
-  	sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
-  private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
-    if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
-  private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
-    	(sym1.flags & flagsToCheck) == (sym2.flags & flagsToCheck)
-  private def sameAnnotations(sym1: Symbol, sym2: Symbol): Boolean =
-    annotationsChecked.forall(a =>
-      (sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
-
-  private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
-    def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
-    val res = sameType0(tp1, tp2)
-    //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
-    res
-  }
-
-  private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
-    /*case (ErrorType, _) => false
-    case (WildcardType, _) => false
-    case (_, ErrorType) => false
-    case (_, WildcardType) => false
-    */
-    case (NoType, _) => false
-    case (NoPrefix, NoPrefix) => true
-    case (_, NoType) => false
-    case (_, NoPrefix) => false
-
-    case (ThisType(sym1), ThisType(sym2))
-      if sameSymbol(sym1, sym2) => true
-
-    case (SingleType(pre1, sym1), SingleType(pre2, sym2))
-      if sameType(pre1, pre2) && sameSymbol(sym1, sym2) => true
-    case (ConstantType(value1), ConstantType(value2)) =>
-      value1 == value2
-    case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
-      val testSymbols =
-        if (!sameSymbol(sym1, sym2)) {
-          val v = (!strict && sym1.isType && sym2.isType && sameType(sym1.info, sym2.info))
-          if (v) changedTypeParams += sym1.fullName
-          v
-        } else
-          !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
-
-      testSymbols && sameType(pre1, pre2) &&
-        (sym1.variance == sym2.variance) &&
-        ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
-           sameTypes(args1, args2))
-         // @M! normalize reduces higher-kinded case to PolyType's
-
-    case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
-      def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
-        sym2 =>
-          var e1 = s1.lookupEntry(sym2.name)
-          (e1 ne null) && {
-            var isEqual = false
-            while (!isEqual && (e1 ne null)) {
-              isEqual = sameType(e1.sym.info, sym2.info)
-              e1 = s1.lookupNextEntry(e1)
-            }
-            isEqual
-          }
-      }
-      sameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
-    case (mt1 @ MethodType(params1, res1), mt2 @ MethodType(params2, res2)) =>
-      // new dependent types: probably fix this, use substSym as done for PolyType
-      sameTypes(tp1.paramTypes, tp2.paramTypes) &&
-      (tp1.params corresponds tp2.params)((t1, t2) => sameParameterSymbolNames(t1, t2) && sameFlags(t1, t2)) &&
-      sameType(res1, res2) &&
-      mt1.isImplicit == mt2.isImplicit
-    case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
-      sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
-    case (NullaryMethodType(res1), NullaryMethodType(res2)) =>
-      sameType(res1, res2)
-    case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
-      sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
-    case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
-      sameType(lo1, lo2) && sameType(hi1, hi2)
-    case (BoundedWildcardType(bounds), _) =>
-      bounds containsType tp2
-    case (_, BoundedWildcardType(bounds)) =>
-      bounds containsType tp1
-    case (AnnotatedType(_,_,_), _) =>
-      annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
-      sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
-    case (_, AnnotatedType(_,_,_)) =>
-      annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
-      sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
-    case (_: SingletonType, _: SingletonType) =>
-      var origin1 = tp1
-      while (origin1.underlying.isInstanceOf[SingletonType]) {
-        assert(origin1 ne origin1.underlying, origin1)
-        origin1 = origin1.underlying
-      }
-      var origin2 = tp2
-      while (origin2.underlying.isInstanceOf[SingletonType]) {
-        assert(origin2 ne origin2.underlying, origin2)
-        origin2 = origin2.underlying
-      }
-      ((origin1 ne tp1) || (origin2 ne tp2)) && sameType(origin1, origin2)
-    case _ =>
-      false
-    }) || {
-      val tp1n = normalizePlus(tp1)
-      val tp2n = normalizePlus(tp2)
-      ((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n)
-    }
-
-  private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol])(implicit strict: Boolean) =
-    sameTypes(tparams1 map (_.info), tparams2 map (_.info)) &&
-    sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe)) &&
-    (tparams1 corresponds tparams2)((t1, t2) => sameAnnotations(t1, t2))
-
-  private def sameTypes(tps1: List[Type], tps2: List[Type])(implicit strict: Boolean) =
-    (tps1 corresponds tps2)(sameType(_, _))
-
-  /** Return the list of changes between 'from' and 'toSym.info'.
-   */
-  def changeSet(from: Type, toSym: Symbol): List[Change] = {
-    implicit val defaultReason = "types"
-    implicit val defaultStrictTypeRefTest = true
-
-    val to = toSym.info
-    changedTypeParams.clear
-    def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC)
-    val cs = new mutable.ListBuffer[Change]
-
-    if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
-      cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
-    if (!sameTypeParams(from.typeParams, to.typeParams)(false))
-      cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
-
-    // new members not yet visited
-    val newMembers = mutable.HashSet[Symbol]()
-    newMembers ++= to.decls.iterator filter omitSymbols
-
-    for (o <- from.decls.iterator filter omitSymbols) {
-      val n = to.decl(o.name)
-      newMembers -= n
-
-      if (o.isClass)
-        cs ++= changeSet(o.info, n)
-      else if (n == NoSymbol)
-        cs += Removed(toEntity(o))
-      else {
-        val newSym =
-            o match {
-              case _:TypeSymbol if o.isAliasType =>
-                n.suchThat(ov => sameType(ov.info, o.info))
-              case _                             =>
-                n.suchThat(ov => sameType(ov.tpe, o.tpe))
-             }
-        if (newSym == NoSymbol || moreRestrictive(o.flags, newSym.flags) || modifiedFlags(o.flags, newSym.flags))
-          cs += Changed(toEntity(o))(n + " changed from " + o.tpe + " to " + n.tpe + " flags: " + Flags.flagsToString(o.flags))
-        else if (newSym.isGetter && (o.accessed(from).hasFlag(MUTABLE) != newSym.accessed.hasFlag(MUTABLE)))
-          // o.owner is already updated to newSym.owner
-          // so o.accessed will return the accessed for the new owner
-          cs += Changed(toEntity(o))(o.accessed(from) + " changed to " + newSym.accessed)
-        else
-          newMembers -= newSym
-      }
-    }: Unit // Give the type explicitly until #2281 is fixed
-
-    cs ++= (newMembers map (Added compose toEntity))
-    cs.toList
-  }
-  def removeChangeSet(sym: Symbol): Change = Removed(toEntity(sym))
-  def changeChangeSet(sym: Symbol, msg: String): Change = Changed(toEntity(sym))(msg)
-  def parentChangeSet(sym: Symbol): Change = ParentChanged(toEntity(sym))
-
-  private def toEntity(sym: Symbol): Entity =
-    if (sym.isClass) Class(sym.fullName)
-    else Definition(sym.fullName)
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
deleted file mode 100644
index cdde768..0000000
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import io.Path
-import scala.collection._
-import symtab.Flags
-import scala.tools.nsc.io.AbstractFile
-import scala.reflect.internal.util.SourceFile
-
-trait DependencyAnalysis extends SubComponent with Files {
-  import global._
-
-  val phaseName = "dependencyAnalysis"
-
-  def off                  = settings.make.isDefault || settings.make.value == "all"
-  def shouldCheckClasspath = settings.make.value != "transitivenocp"
-
-  def newPhase(prev: Phase) = new AnalysisPhase(prev)
-
-  private def depPath = Path(settings.dependenciesFile.value)
-  def loadDependencyAnalysis(): Boolean = (
-    depPath.path != "none" && depPath.isFile && loadFrom(
-      AbstractFile.getFile(depPath),
-      path => AbstractFile.getFile(depPath.parent resolve Path(path))
-    )
-  )
-  def saveDependencyAnalysis(): Unit = {
-    if (!depPath.exists)
-      dependenciesFile = AbstractFile.getFile(depPath.createFile())
-
-    /** The directory where file lookup should start */
-    val rootPath = depPath.parent.normalize
-    saveDependencies(
-      file => rootPath.relativize(Path(file.file).normalize).path
-    )
-  }
-
-  lazy val maxDepth = settings.make.value match {
-    case "changed"   => 0
-    case "immediate" => 1
-    case _           => Int.MaxValue
-  }
-
-  // todo: order insensible checking and, also checking timestamp?
-  def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2
-
-  def nameToFile(src: AbstractFile, name: String) =
-    settings.outputDirs.outputDirFor(src)
-      .lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false)
-
-  private var depFile: Option[AbstractFile] = None
-
-  def dependenciesFile_=(file: AbstractFile) {
-    assert(file ne null)
-    depFile = Some(file)
-  }
-
-  def dependenciesFile: Option[AbstractFile] = depFile
-
-  def classpath = settings.classpath.value
-  def newDeps = new FileDependencies(classpath)
-
-  var dependencies = newDeps
-
-  def managedFiles = dependencies.dependencies.keySet
-
-  /** Top level definitions per source file. */
-  val definitions: mutable.Map[AbstractFile, List[Symbol]] =
-    new mutable.HashMap[AbstractFile, List[Symbol]] {
-      override def default(f: AbstractFile) = Nil
-  }
-
-  /** External references used by source file. */
-  val references: mutable.Map[AbstractFile, immutable.Set[String]] =
-    new mutable.HashMap[AbstractFile, immutable.Set[String]] {
-      override def default(f: AbstractFile) = immutable.Set()
-    }
-
-  /** External references for inherited members used in the source file */
-  val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
-    new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
-      override def default(f: AbstractFile) = immutable.Set()
-    }
-
-  /** Write dependencies to the current file. */
-  def saveDependencies(fromFile: AbstractFile => String) =
-    if(dependenciesFile.isDefined)
-      dependencies.writeTo(dependenciesFile.get, fromFile)
-
-  /** Load dependencies from the given file and save the file reference for
-   *  future saves.
-   */
-  def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = {
-    dependenciesFile = f
-    FileDependencies.readFrom(f, toFile) match {
-      case Some(fd) =>
-        val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true
-        dependencies = if (success) fd else {
-          if (settings.debug.value)
-            println("Classpath has changed. Nuking dependencies")
-          newDeps
-        }
-
-        success
-      case None => false
-    }
-  }
-
-  def calculateFiles(files: List[SourceFile]): List[SourceFile] =
-    if (off) files
-    else if (dependencies.isEmpty) {
-      println("No known dependencies. Compiling " +
-              (if (settings.debug.value) files.mkString(", ") else "everything"))
-      files
-    } else {
-      val (direct, indirect) = dependencies.invalidatedFiles(maxDepth);
-      val filtered = files.filter(x => {
-        val f = x.file.absolute
-        direct(f) || indirect(f) || !dependencies.containsFile(f);
-      })
-      filtered match {
-        case Nil => println("No changes to recompile");
-        case x => println("Recompiling " + (
-          if(settings.debug.value) x.mkString(", ") else x.length + " files")
-        )
-      }
-      filtered
-    }
-
-  case class Inherited(qualifier: String, member: Name)
-
-  class AnalysisPhase(prev: Phase) extends StdPhase(prev) {
-
-    override def cancelled(unit: CompilationUnit) =
-      super.cancelled(unit) && !unit.isJava
-
-    def apply(unit : global.CompilationUnit) {
-      val f = unit.source.file.file
-      // When we're passed strings by the interpreter
-      // they  have no source file. We simply ignore this case
-      // as irrelevant to dependency analysis.
-      if (f != null){
-        val source: AbstractFile = unit.source.file;
-        for (d <- unit.icode){
-          val name = d.toString
-          d.symbol match {
-            case s : ModuleClassSymbol =>
-              val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
-
-              if (isTopLevelModule && (s.companionModule != NoSymbol)) {
-                dependencies.emits(source, nameToFile(unit.source.file, name))
-              }
-              dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
-            case _ =>
-              dependencies.emits(source, nameToFile(unit.source.file, name))
-          }
-        }
-
-        dependencies.reset(source)
-        for (d <- unit.depends; if (d.sourceFile != null)){
-          dependencies.depends(source, d.sourceFile)
-        }
-      }
-
-      // find all external references in this compilation unit
-      val file = unit.source.file
-      references += file -> immutable.Set.empty[String]
-      inherited += file -> immutable.Set.empty[Inherited]
-
-      val buf = new mutable.ListBuffer[Symbol]
-
-      (new Traverser {
-        override def traverse(tree: Tree) {
-          if ((tree.symbol ne null)
-              && (tree.symbol != NoSymbol)
-              && (!tree.symbol.isPackage)
-              && (!tree.symbol.isJavaDefined)
-              && (!tree.symbol.tpe.isError)
-              && ((tree.symbol.sourceFile eq null)
-                  || (tree.symbol.sourceFile.path != file.path))
-              && (!tree.symbol.isClassConstructor)) {
-            updateReferences(tree.symbol.fullName)
-            // was "at uncurryPhase.prev", which is actually non-deterministic
-            // because the continuations plugin may or may not supply uncurry's
-            // immediately preceding phase.
-            beforeRefchecks(checkType(tree.symbol.tpe))
-          }
-
-          tree match {
-            case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
-                                   !cdef.symbol.isAnonymousFunction =>
-              if (cdef.symbol != NoSymbol) buf += cdef.symbol
-              // was "at erasurePhase.prev"
-              beforeExplicitOuter {
-                for (s <- cdef.symbol.info.decls)
-                  s match {
-                    case ts: TypeSymbol if !ts.isClass =>
-                      checkType(s.tpe)
-                    case _ =>
-                  }
-              }
-              super.traverse(tree)
-
-            case ddef: DefDef =>
-              // was "at typer.prev"
-              beforeTyper { checkType(ddef.symbol.tpe) }
-              super.traverse(tree)
-            case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
-              if (!a.symbol.isConstructor &&
-                  !a.symbol.owner.isPackageClass &&
-                  !isSameType(q.tpe, a.symbol.owner.tpe))
-                  inherited += file ->
-                    (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
-              super.traverse(tree)
-            case _            =>
-              super.traverse(tree)
-          }
-        }
-
-        def checkType(tpe: Type): Unit =
-          tpe match {
-            case t: MethodType =>
-              checkType(t.resultType)
-              for (s <- t.params) checkType(s.tpe)
-
-            case t: TypeRef    =>
-              if (t.sym.isAliasType) {
-                  updateReferences(t.typeSymbolDirect.fullName)
-                  checkType(t.typeSymbolDirect.info)
-              }
-              updateReferences(t.typeSymbol.fullName)
-              for (tp <- t.args) checkType(tp)
-
-            case t: PolyType   =>
-              checkType(t.resultType)
-              updateReferences(t.typeSymbol.fullName)
-
-            case t: NullaryMethodType =>
-              checkType(t.resultType)
-              updateReferences(t.typeSymbol.fullName)
-
-            case t             =>
-              updateReferences(t.typeSymbol.fullName)
-          }
-
-        def updateReferences(s: String): Unit =
-          references += file -> (references(file) + s)
-
-      }).apply(unit.body)
-
-      definitions(unit.source.file) = buf.toList
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
deleted file mode 100644
index 194351a..0000000
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
-import io.{AbstractFile, PlainFile, VirtualFile}
-
-import scala.collection._
-
-
-trait Files { self : SubComponent =>
-
-  class FileDependencies(val classpath: String) {
-    import FileDependencies._
-
-    class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] {
-      override def default(key: AbstractFile) = {
-        this(key) = new mutable.HashSet[AbstractFile]
-        this(key)
-      }
-    }
-
-    val dependencies = new Tracker
-    val targets =  new Tracker
-
-    def isEmpty = dependencies.isEmpty && targets.isEmpty
-
-    def emits(source: AbstractFile, result: AbstractFile) =
-      targets(source) += result
-    def depends(from: AbstractFile, on: AbstractFile) =
-      dependencies(from) += on
-
-    def reset(file: AbstractFile) = dependencies -= file
-
-    def cleanEmpty = {
-      dependencies foreach {case (_, value) =>
-                               value retain (x => x.exists && (x ne removedFile))}
-      dependencies retain ((key, value) => key.exists && !value.isEmpty)
-      targets foreach {case (_, value) => value retain (_.exists)}
-      targets retain ((key, value) => key.exists && !value.isEmpty)
-    }
-
-    def containsFile(f: AbstractFile) = targets.contains(f.absolute)
-
-    def invalidatedFiles(maxDepth: Int) = {
-      val direct = new mutable.HashSet[AbstractFile]
-
-      for ((file, products) <- targets) {
-        // This looks a bit odd. It may seem like one should invalidate a file
-        // if *any* of its dependencies are older than it. The forall is there
-        // to deal with the fact that a) Some results might have been orphaned
-        // and b) Some files might not need changing.
-        direct(file) ||= products.forall(d => d.lastModified < file.lastModified)
-      }
-
-      val indirect = dependentFiles(maxDepth, direct)
-
-      for ((source, targets) <- targets
-           if direct(source) || indirect(source) || (source eq removedFile)) {
-        targets foreach (_.delete)
-        targets -= source
-      }
-
-      (direct, indirect)
-    }
-
-    /** Return the set of files that depend on the given changed files.
-     *  It computes the transitive closure up to the given depth.
-     */
-    def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = {
-      val indirect = new mutable.HashSet[AbstractFile]
-      val newInvalidations = new mutable.HashSet[AbstractFile]
-
-      def invalid(file: AbstractFile) =
-        indirect(file) || changed(file) || (file eq removedFile)
-
-      def go(i: Int) : Unit = if(i > 0) {
-        newInvalidations.clear
-        for((target, depends) <- dependencies if !invalid(target);
-            d <- depends)
-          newInvalidations(target) ||= invalid(d)
-
-        indirect ++= newInvalidations
-        if (!newInvalidations.isEmpty) go(i - 1)
-      }
-
-      go(depth)
-
-      indirect --= changed
-    }
-
-    def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit =
-      writeToFile(file)(out => writeTo(new PrintStream(out), fromFile))
-
-    def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = {
-      def emit(tracker: Tracker) =
-        for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d))
-
-      cleanEmpty
-      print.println(classpath)
-      print.println(separator)
-      emit(dependencies)
-      print.println(separator)
-      emit(targets)
-    }
-  }
-
-  object FileDependencies {
-    private val separator:String = "-------"
-    private val arrow = " -> "
-    private val removedFile = new VirtualFile("removed")
-
-    private def validLine(l: String) = (l != null) && (l != separator)
-
-    def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] =
-      readFromFile(file) { in =>
-        val reader = new BufferedReader(new InputStreamReader(in))
-        val it = new FileDependencies(reader.readLine)
-
-        def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = {
-          var continue = valid
-          var line: String = null
-          while (continue && {line = reader.readLine; validLine(line)}) {
-            line.split(arrow) match {
-              case Array(from, on) => f(toFile(from), toFile(on))
-              case _ =>
-                global.inform("Parse error: Unrecognised string " + line)
-                continue = false
-            }
-          }
-          continue
-        }
-
-        reader.readLine
-
-        val dResult = readLines(true)(
-          (_, _) match {
-            case (null, _)          => // fromFile is removed, it's ok
-            case (fromFile, null)   =>
-              // onFile is removed, should recompile fromFile
-              it.depends(fromFile, removedFile)
-            case (fromFile, onFile) => it.depends(fromFile, onFile)
-          })
-
-        readLines(dResult)(
-          (_, _) match {
-            case (null, null)             =>
-              // source and target are all removed, it's ok
-            case (null, targetFile)       =>
-              // source is removed, should remove relative target later
-              it.emits(removedFile, targetFile)
-            case (_, null)                =>
-              // it may has been cleaned outside, or removed during last phase
-            case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
-          })
-
-        Some(it)
-      }
-  }
-
-  def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = {
-    val out = file.bufferedOutput
-    try {
-      f(out)
-    } finally {
-      out.close
-    }
-  }
-
-  def readFromFile[T](file: AbstractFile)(f: InputStream => T) : T = {
-    val in = file.input
-    try{
-      f(in)
-    } finally {
-      in.close
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
deleted file mode 100644
index a091b04..0000000
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-
-import scala.util.control.ControlThrowable
-import reporters.Reporter
-import scala.reflect.internal.util.{ NoPosition, BatchSourceFile}
-import io.{ File, Directory }
-import DocParser.Parsed
-
-/** A documentation processor controls the process of generating Scala
-  * documentation, which is as follows.
-  *
-  * * A simplified compiler instance (with only the front-end phases enabled)
-  * * is created, and additional ''sourceless'' comments are registered.
-  * * Documentable files are compiled, thereby filling the compiler's symbol table.
-  * * A documentation model is extracted from the post-compilation symbol table.
-  * * A generator is used to transform the model into the correct final format (HTML).
-  *
-  * A processor contains a single compiler instantiated from the processor's
-  * `settings`. Each call to `document` uses the same compiler instance with
-  * the same symbol table. In particular, this implies that the scaladoc site
-  * obtained from a call to `run` will contain documentation about files compiled
-  * during previous calls to the same processor's `run` method.
-  *
-  * @param reporter The reporter to which both documentation and compilation errors will be reported.
-  * @param settings The settings to be used by the documenter and compiler for generating documentation.
-  *
-  * @author Gilles Dubochet */
-class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
-  /** The unique compiler instance used by this processor and constructed from its `settings`. */
-  object compiler extends Global(settings, reporter) with interactive.RangePositions {
-    override protected def computeInternalPhases() {
-      phasesSet += syntaxAnalyzer
-      phasesSet += analyzer.namerFactory
-      phasesSet += analyzer.packageObjects
-      phasesSet += analyzer.typerFactory
-    }
-    override def forScaladoc = true
-  }
-
-  /** Creates a scaladoc site for all symbols defined in this call's `source`,
-    * as well as those defined in `sources` of previous calls to the same processor.
-    * @param source The list of paths (relative to the compiler's source path,
-    *        or absolute) of files to document or the source code. */
-  def makeUniverse(source: Either[List[String], String]): Option[Universe] = {
-    assert(settings.docformat.value == "html")
-    source match {
-      case Left(files) =>
-        new compiler.Run() compile files
-      case Right(sourceCode) =>
-        new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
-    }
-
-    if (reporter.hasErrors)
-      return None
-
-    val extraTemplatesToDocument: Set[compiler.Symbol] = {
-      if (settings.docUncompilable.isDefault) Set()
-      else {
-        val uncompilable = new {
-          val global: compiler.type = compiler
-          val settings = processor.settings
-        } with Uncompilable { }
-
-        compiler.docComments ++= uncompilable.comments
-        docdbg("" + uncompilable)
-
-        uncompilable.templates
-      }
-    }
-
-    val modelFactory = (
-      new { override val global: compiler.type = compiler }
-        with model.ModelFactory(compiler, settings)
-        with model.ModelFactoryImplicitSupport
-        with model.ModelFactoryTypeSupport
-        with model.diagram.DiagramFactory
-        with model.CommentFactory
-        with model.TreeFactory
-        with model.MemberLookup {
-          override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) =
-            extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl)
-        }
-    )
-
-    modelFactory.makeModel match {
-      case Some(madeModel) =>
-        if (!settings.scaladocQuietRun)
-          println("model contains " + modelFactory.templatesCount + " documentable templates")
-        Some(madeModel)
-      case None =>
-        if (!settings.scaladocQuietRun)
-          println("no documentable class found in compilation units")
-        None
-    }
-  }
-
-  object NoCompilerRunException extends ControlThrowable { }
-
-  val documentError: PartialFunction[Throwable, Unit] = {
-    case NoCompilerRunException =>
-      reporter.info(null, "No documentation generated with unsucessful compiler run", false)
-    case _: ClassNotFoundException =>
-      ()
-  }
-
-  /** Generate document(s) for all `files` containing scaladoc documenataion.
-    * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
-  def document(files: List[String]) {
-    def generate() = {
-      import doclet._
-      val docletClass    = Class.forName(settings.docgenerator.value) // default is html.Doclet
-      val docletInstance = docletClass.newInstance().asInstanceOf[Generator]
-
-      docletInstance match {
-        case universer: Universer =>
-          val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
-          universer setUniverse universe
-
-          docletInstance match {
-            case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe)
-            case _                => ()
-          }
-        case _ => ()
-      }
-      docletInstance.generate
-    }
-
-    try generate()
-    catch documentError
-  }
-
-  private[doc] def docdbg(msg: String) {
-    if (settings.Ydocdebug.value)
-      println(msg)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala
deleted file mode 100644
index 27c995e..0000000
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools
-package nsc
-package doc
-
-import reporters._
-import scala.reflect.internal.util._
-import interactive.RangePositions
-import DocParser.Parsed
-
-/** A very minimal global customized for extracting `DocDefs`.  It stops
- *  right after parsing so it can read `DocDefs` from source code which would
- *  otherwise cause the compiler to go haywire.
- */
-class DocParser(settings: nsc.Settings, reporter: Reporter)
-        extends Global(settings, reporter)
-           with RangePositions {
-
-  def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
-  def this() = this(new Settings(Console println _))
-
-  // the usual global initialization
-  locally { new Run() }
-
-  override def forScaladoc = true
-  override protected def computeInternalPhases() {
-    phasesSet += syntaxAnalyzer
-  }
-
-  /** Returns a list of `DocParser.Parseds`, which hold the DocDefs found
-   *  in the given code along with the surrounding trees.
-   */
-  def docDefs(code: String) = {
-    def loop(enclosing: List[Tree], tree: Tree): List[Parsed] = tree match {
-      case x: PackageDef => x.stats flatMap (t => loop(enclosing :+ x, t))
-      case x: DocDef     => new Parsed(enclosing, x) :: loop(enclosing :+ x.definition, x.definition)
-      case x             => x.children flatMap (t => loop(enclosing, t))
-    }
-    loop(Nil, docUnit(code))
-  }
-
-  /** A compilation unit containing parsed source.
-   */
-  def docUnit(code: String) = {
-    val unit    = new CompilationUnit(new BatchSourceFile("<console>", code))
-    val scanner = new syntaxAnalyzer.UnitParser(unit)
-
-    scanner.compilationUnit()
-  }
-}
-
-/** Since the DocParser's whole reason for existing involves trashing a
- *  global, it is designed to bottle up general `Global#Tree` types rather
- *  than path dependent ones.  The recipient will have to deal.
- */
-object DocParser {
-  type Tree    = Global#Tree
-  type DefTree = Global#DefTree
-  type DocDef  = Global#DocDef
-  type Name    = Global#Name
-
-  class Parsed(val enclosing: List[Tree], val docDef: DocDef) {
-    def nameChain: List[Name] = (enclosing :+ docDef.definition) collect { case x: DefTree => x.name }
-    def raw: String           = docDef.comment.raw
-
-    override def toString = (
-      nameChain.init.map(x => if (x.isTypeName) x + "#" else x + ".").mkString + nameChain.last
-    )
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/compiler/scala/tools/nsc/doc/Index.scala
deleted file mode 100644
index f9b9eec..0000000
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc.doc
-
-import scala.collection._
-
-
-trait Index {
-
-  type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
-
-  def firstLetterIndex: Map[Char, SymbolMap]
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
deleted file mode 100644
index 8c0628c..0000000
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ /dev/null
@@ -1,365 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package doc
-
-import java.io.File
-import scala.language.postfixOps
-
-/** An extended version of compiler settings, with additional Scaladoc-specific options.
-  * @param error A function that prints a string to the appropriate error stream
-  * @param print A function that prints the string, without any extra boilerplate of error */
-class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
-
-  /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
-    * `html`. */
-  val docformat = ChoiceSetting (
-    "-doc-format",
-    "format",
-    "Selects in which format documentation is rendered",
-    List("html"),
-    "html"
-  )
-
-  /** A setting that defines the overall title of the documentation, typically the name of the library being
-    * documented. ''Note:'' This setting is currently not used. */
-  val doctitle = StringSetting (
-    "-doc-title",
-    "title",
-    "The overall name of the Scaladoc site",
-    ""
-  )
-
-  /** A setting that defines the overall version number of the documentation, typically the version of the library being
-    * documented. ''Note:'' This setting is currently not used. */
-  val docversion = StringSetting (
-    "-doc-version",
-    "version",
-    "An optional version number, to be appended to the title",
-    ""
-  )
-
-  val docfooter = StringSetting (
-    "-doc-footer",
-    "footer",
-    "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.",
-    ""
-  )
-
-  val docUncompilable = StringSetting (
-    "-doc-no-compile",
-    "path",
-    "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
-    ""
-  )
-
-  lazy val uncompilableFiles = docUncompilable.value match {
-    case ""     => Nil
-    case path   => io.Directory(path).deepFiles filter (_ hasExtension "scala") toList
-  }
-
-  /** A setting that defines a URL to be concatenated with source locations and show a link to source files.
-   * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
-  val docsourceurl = StringSetting (
-    "-doc-source-url",
-    "url",
-    "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')",
-    ""
-  )
-
-  val docExternalDoc = MultiStringSetting (
-    "-doc-external-doc",
-    "external-doc",
-    "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
-  )
-
-  val useStupidTypes = BooleanSetting (
-    "-Yuse-stupid-types",
-    "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!"
-  )
-
-  val docgenerator = StringSetting (
-    "-doc-generator",
-    "class-name",
-    "The fully qualified name of a doclet class, which will be used to generate the documentation",
-    "scala.tools.nsc.doc.html.Doclet"
-  )
-
-  val docRootContent = PathSetting (
-    "-doc-root-content",
-    "The file from which the root package documentation should be imported.",
-    ""
-  )
-
-  val docImplicits = BooleanSetting (
-    "-implicits",
-    "Document members inherited by implicit conversions."
-  )
-
-  val docImplicitsDebug = BooleanSetting (
-    "-implicits-debug",
-    "Show debugging information for members inherited by implicit conversions."
-  )
-
-  val docImplicitsShowAll = BooleanSetting (
-    "-implicits-show-all",
-    "Show members inherited by implicit conversions that are impossible in the default scope. " +
-    "(for example conversions that require Numeric[String] to be in scope)"
-  )
-
-  val docImplicitsSoundShadowing = BooleanSetting (
-    "-implicits-sound-shadowing",
-    "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
-    "only use it if you haven't defined usecase for implicitly inherited members."
-  )
-
-  val docImplicitsHide = MultiStringSetting (
-	  "-implicits-hide",
-    "implicit(s)",
-    "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
-  )
-
-  val docDiagrams = BooleanSetting (
-    "-diagrams",
-    "Create inheritance diagrams for classes, traits and packages."
-  )
-
-  val docDiagramsDebug = BooleanSetting (
-    "-diagrams-debug",
-    "Show debugging information for the diagram creation process."
-  )
-
-  val docDiagramsDotPath = PathSetting (
-    "-diagrams-dot-path",
-    "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
-    "dot" // by default, just pick up the system-wide dot
-  )
-
-  /** The maxium nuber of normal classes to show in the diagram */
-  val docDiagramsMaxNormalClasses = IntSetting(
-    "-diagrams-max-classes",
-    "The maximum number of superclasses or subclasses to show in a diagram",
-    15,
-    None,
-    _ => None
-  )
-
-  /** The maxium nuber of implcit classes to show in the diagram */
-  val docDiagramsMaxImplicitClasses = IntSetting(
-    "-diagrams-max-implicits",
-    "The maximum number of implicitly converted classes to show in a diagram",
-    10,
-    None,
-    _ => None
-  )
-
-  val docDiagramsDotTimeout = IntSetting(
-    "-diagrams-dot-timeout",
-    "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)",
-    10,
-    None,
-    _ => None
-  )
-
-  val docDiagramsDotRestart = IntSetting(
-    "-diagrams-dot-restart",
-    "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
-    5,
-    None,
-    _ => None
-  )
-
-  val docRawOutput = BooleanSetting (
-    "-raw-output",
-    "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)"
-  )
-
-  val docNoPrefixes = BooleanSetting (
-    "-no-prefixes",
-    "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc."
-  )
-
-  val docNoLinkWarnings = BooleanSetting (
-    "-no-link-warnings",
-    "Avoid warnings for ambiguous and incorrect links."
-  )
-
-  val docSkipPackages = StringSetting (
-    "-skip-packages",
-    "<package1>:...:<packageN>",
-    "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.",
-    ""
-  )
-
-  val docExpandAllTypes = BooleanSetting (
-    "-expand-all-types",
-    "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
-  )
-
-  val docExternalUrls = MultiStringSetting (
-    "-external-urls",
-    "externalUrl(s)",
-    "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
-  )
-
-  val docGroups = BooleanSetting (
-    "-groups",
-    "Group similar functions together (based on the @group annotation)"
-  )
-
-  // Somewhere slightly before r18708 scaladoc stopped building unless the
-  // self-type check was suppressed.  I hijacked the slotted-for-removal-anyway
-  // suppress-vt-warnings option and renamed it for this purpose.
-  noSelfCheck.value = true
-
-  // For improved help output.
-  def scaladocSpecific = Set[Settings#Setting](
-    docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
-    docDiagrams, docDiagramsDebug, docDiagramsDotPath,
-    docDiagramsDotTimeout, docDiagramsDotRestart,
-    docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
-    docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
-    docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
-    docExpandAllTypes, docGroups
-  )
-  val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
-
-  override def isScaladoc = true
-
-  // set by the testsuite, when checking test output
-  var scaladocQuietRun = false
-
-  lazy val skipPackageNames =
-    if (docSkipPackages.value == "")
-      Set[String]()
-    else
-      docSkipPackages.value.toLowerCase.split(':').toSet
-
-  def skipPackage(qname: String) =
-    skipPackageNames(qname.toLowerCase)
-
-  lazy val hiddenImplicits: Set[String] = {
-    if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets
-    else docImplicitsHide.value.toSet flatMap { name: String =>
-      if(name == ".") hardcoded.commonConversionTargets
-      else Set(name)
-    }
-  }
-
-  def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
-
-  // Deprecated together with 'docExternalUrls' option.
-  lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
-    case (map, binding) =>
-      val idx = binding indexOf "="
-      val pkgs = binding substring (0, idx) split ":"
-      val url = appendIndex(binding substring (idx + 1))
-      map ++ (pkgs map (_ -> url))
-  }
-
-  lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
-    val idx = s.indexOf("#")
-    if (idx > 0) {
-      val (first, last) = s.splitAt(idx)
-      Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
-    } else {
-      error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
-      None
-    }
-  } toMap
-
-  /**
-   *  This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
-   *  but ultimately scaladoc has to be useful. :)
-   */
-  object hardcoded {
-
-    /** The common context bounds and some humanly explanations. Feel free to add more explanations
-     *  `<root>.scala.package.Numeric` is the type class
-     *  `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
-     *  the function result should be a humanly-understandable description of the type class
-     */
-    val knownTypeClasses: Map[String, String => String] = Map() +
-      ("scala.math.Numeric"                     -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
-      ("scala.math.Integral"                    -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
-      ("scala.math.Fractional"                  -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
-      ("scala.reflect.Manifest"                 -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
-      ("scala.reflect.ClassManifest"            -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
-      ("scala.reflect.OptManifest"              -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
-      ("scala.reflect.ClassTag"                 -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
-      ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
-      ("scala.reflect.api.TypeTags.TypeTag"     -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
-
-    /**
-     * Set of classes to exclude from index and diagrams
-     * TODO: Should be configurable
-     */
-    def isExcluded(qname: String) = {
-      ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
-         qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
-       ) && !(
-        qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
-        qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
-        qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
-        qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
-        qname == "scala.runtime.AbstractFunction2"
-      )
-     )
-    }
-
-    /** Common conversion targets that affect any class in Scala */
-    val commonConversionTargets = Set(
-      "scala.Predef.any2stringfmt",
-      "scala.Predef.any2stringadd",
-      "scala.Predef.any2ArrowAssoc",
-      "scala.Predef.any2Ensuring",
-      "scala.collection.TraversableOnce.alternateImplicit")
-
-    /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
-    val arraySkipConversions = List(
-      "scala.Predef.refArrayOps",
-      "scala.Predef.intArrayOps",
-      "scala.Predef.doubleArrayOps",
-      "scala.Predef.longArrayOps",
-      "scala.Predef.floatArrayOps",
-      "scala.Predef.charArrayOps",
-      "scala.Predef.byteArrayOps",
-      "scala.Predef.shortArrayOps",
-      "scala.Predef.booleanArrayOps",
-      "scala.Predef.unitArrayOps",
-      "scala.LowPriorityImplicits.wrapRefArray",
-      "scala.LowPriorityImplicits.wrapIntArray",
-      "scala.LowPriorityImplicits.wrapDoubleArray",
-      "scala.LowPriorityImplicits.wrapLongArray",
-      "scala.LowPriorityImplicits.wrapFloatArray",
-      "scala.LowPriorityImplicits.wrapCharArray",
-      "scala.LowPriorityImplicits.wrapByteArray",
-      "scala.LowPriorityImplicits.wrapShortArray",
-      "scala.LowPriorityImplicits.wrapBooleanArray",
-      "scala.LowPriorityImplicits.wrapUnitArray",
-      "scala.LowPriorityImplicits.genericWrapArray")
-
-    // included as names as here we don't have access to a Global with Definitions :(
-    def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
-    def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
-
-    /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
-     *  but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
-     *  know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
-    def valueClassFilter(value: String, conversionName: String): Boolean = {
-      val valueName = value.toLowerCase
-      val otherValues = valueClassList.filterNot(_ == valueName)
-
-      for (prefix <- valueClassFilterPrefixes)
-        if (conversionName.startsWith(prefix))
-          for (otherValue <- otherValues)
-            if (conversionName.startsWith(prefix + "." + otherValue))
-              return false
-
-      true
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
deleted file mode 100644
index d3e5c86..0000000
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package doc
-import scala.language.implicitConversions
-import scala.language.postfixOps
-
-/** Some glue between DocParser (which reads source files which can't be compiled)
- *  and the scaladoc model.
- */
-trait Uncompilable {
-  val global: Global
-  val settings: Settings
-
-  import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
-  import global.definitions.AnyRefClass
-  import global.rootMirror.RootClass
-
-  private implicit def translateName(name: Global#Name) =
-    if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
-
-  def docSymbol(p: DocParser.Parsed) = p.nameChain.foldLeft(RootClass: Symbol)(_.tpe member _)
-  def docDefs(code: String)          = new DocParser(settings, reporter) docDefs code
-  def docPairs(code: String)         = docDefs(code) map (p => (docSymbol(p), new DocComment(p.raw)))
-
-  lazy val pairs = files flatMap { f =>
-    val comments = docPairs(f.slurp())
-    if (settings.verbose.value)
-      inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", ")))
-
-    comments
-  }
-  def files     = settings.uncompilableFiles
-  def symbols   = pairs map (_._1)
-  def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
-  def comments = {
-    if (settings.debug.value || settings.verbose.value)
-      inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
-
-    if (pairs.isEmpty)
-      warning("no doc comments read from " + settings.docUncompilable.value)
-
-    pairs
-  }
-  override def toString = pairs.size + " uncompilable symbols:\n" + (
-    symbols filterNot (_ == NoSymbol) map (x => "  " + x.owner.fullName + " " + x.defString) mkString "\n"
-  )
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
deleted file mode 100755
index f509c63..0000000
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ /dev/null
@@ -1,955 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
- * @author  Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-
-import base.comment._
-import reporters.Reporter
-import scala.collection._
-import scala.util.matching.Regex
-import scala.annotation.switch
-import scala.reflect.internal.util.{NoPosition, Position}
-import scala.language.postfixOps
-
-/** The comment parser transforms raw comment strings into `Comment` objects.
-  * Call `parse` to run the parser. Note that the parser is stateless and
-  * should only be built once for a given Scaladoc run.
-  *
-  * @param reporter The reporter on which user messages (error, warnings) should be printed.
-  *
-  * @author Manohar Jonnalagedda
-  * @author Gilles Dubochet */
-trait CommentFactoryBase { this: MemberLookupBase =>
-
-  val global: Global
-  import global.{ reporter, definitions, Symbol }
-
-  /* Creates comments with necessary arguments */
-  def createComment (
-    body0:           Option[Body]     = None,
-    authors0:        List[Body]       = List.empty,
-    see0:            List[Body]       = List.empty,
-    result0:         Option[Body]     = None,
-    throws0:         Map[String,Body] = Map.empty,
-    valueParams0:    Map[String,Body] = Map.empty,
-    typeParams0:     Map[String,Body] = Map.empty,
-    version0:        Option[Body]     = None,
-    since0:          Option[Body]     = None,
-    todo0:           List[Body]       = List.empty,
-    deprecated0:     Option[Body]     = None,
-    note0:           List[Body]       = List.empty,
-    example0:        List[Body]       = List.empty,
-    constructor0:    Option[Body]     = None,
-    source0:         Option[String]   = None,
-    inheritDiagram0: List[String]     = List.empty,
-    contentDiagram0: List[String]     = List.empty,
-    group0:          Option[Body]     = None,
-    groupDesc0:      Map[String,Body] = Map.empty,
-    groupNames0:     Map[String,Body] = Map.empty,
-    groupPrio0:      Map[String,Body] = Map.empty
-  ) : Comment = new Comment{
-    val body           = if(body0 isDefined) body0.get else Body(Seq.empty)
-    val authors        = authors0
-    val see            = see0
-    val result         = result0
-    val throws         = throws0
-    val valueParams    = valueParams0
-    val typeParams     = typeParams0
-    val version        = version0
-    val since          = since0
-    val todo           = todo0
-    val deprecated     = deprecated0
-    val note           = note0
-    val example        = example0
-    val constructor    = constructor0
-    val source         = source0
-    val inheritDiagram = inheritDiagram0
-    val contentDiagram = contentDiagram0
-    val groupDesc      = groupDesc0
-    val group          =
-      group0 match {
-        case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim)
-        case _                                                                => None
-      }
-    val groupPrio      = groupPrio0 flatMap {
-      case (group, body) =>
-        try {
-          body match {
-            case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt)
-            case _                                                       => List()
-          }
-        } catch {
-          case _: java.lang.NumberFormatException => List()
-        }
-    }
-    val groupNames     = groupNames0 flatMap {
-      case (group, body) =>
-        try {
-          body match {
-            case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim))
-            case _                                                       => List()
-          }
-        } catch {
-          case _: java.lang.NumberFormatException => List()
-        }
-    }
-
-  }
-
-  private val endOfText = '\u0003'
-  private val endOfLine = '\u000A'
-
-  /** Something that should not have happened, happened, and Scaladoc should exit. */
-  private def oops(msg: String): Nothing =
-    throw FatalError("program logic: " + msg)
-
-  /** The body of a line, dropping the (optional) start star-marker,
-    * one leading whitespace and all trailing whitespace. */
-  private val CleanCommentLine =
-    new Regex("""(?:\s*\*\s?)?(.*)""")
-
-  /** Dangerous HTML tags that should be replaced by something safer,
-    * such as wiki syntax, or that should be dropped. */
-  private val DangerousTags =
-    new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
-
-  /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
-    * if it cannot be salvaged. */
-  private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
-    case "p" | "div" => "\n\n"
-    case "h1"  => "\n= "
-    case "/h1" => " =\n"
-    case "h2"  => "\n== "
-    case "/h2" => " ==\n"
-    case "h3"  => "\n=== "
-    case "/h3" => " ===\n"
-    case "h4" | "h5" | "h6" => "\n==== "
-    case "/h4" | "/h5" | "/h6" => " ====\n"
-    case "li" => "\n *  - "
-    case _ => ""
-  }
-
-  /** Javadoc tags that should be replaced by something useful, such as wiki
-    * syntax, or that should be dropped. */
-  private val JavadocTags =
-    new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
-
-  /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
-  private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
-    case "code" => "`" + mtch.group(2) + "`"
-    case "docRoot"  => ""
-    case "inheritDoc" => ""
-    case "link"  => "`" + mtch.group(2) + "`"
-    case "linkplain" => "`" + mtch.group(2) + "`"
-    case "literal"  => mtch.group(2)
-    case "value" => "`" + mtch.group(2) + "`"
-    case _ => ""
-  }
-
-  /** Safe HTML tags that can be kept. */
-  private val SafeTags =
-    new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
-
-  private val safeTagMarker = '\u000E'
-
-  /** A Scaladoc tag not linked to a symbol and not followed by text */
-  private val SingleTagRegex =
-    new Regex("""\s*@(\S+)\s*""")
-
-  /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
-  private val SimpleTagRegex =
-    new Regex("""\s*@(\S+)\s+(.*)""")
-
-  /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
-    * of the symbol, and the rest of the line. */
-  private val SymbolTagRegex =
-    new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
-
-  /** The start of a scaladoc code block */
-  private val CodeBlockStartRegex =
-    new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
-
-  /** The end of a scaladoc code block */
-  private val CodeBlockEndRegex =
-    new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
-
-  /** A key used for a tag map. The key is built from the name of the tag and
-    * from the linked symbol if the tag has one.
-    * Equality on tag keys is structural. */
-  private sealed abstract class TagKey {
-    def name: String
-  }
-
-  private final case class SimpleTagKey(name: String) extends TagKey
-  private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
-
-  /** Parses a raw comment string into a `Comment` object.
-    * @param comment The expanded comment string (including start and end markers) to be parsed.
-    * @param src     The raw comment source string.
-    * @param pos     The position of the comment in source. */
-  protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = {
-    /** The cleaned raw comment as a list of lines. Cleaning removes comment
-      * start and end markers, line start markers  and unnecessary whitespace. */
-    def clean(comment: String): List[String] = {
-      def cleanLine(line: String): String = {
-        //replaceAll removes trailing whitespaces
-        line.replaceAll("""\s+$""", "") match {
-          case CleanCommentLine(ctl) => ctl
-          case tl => tl
-        }
-      }
-      val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
-      val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
-      val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
-      val markedTagComment =
-        SafeTags.replaceAllIn(javadoclessComment, { mtch =>
-          java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
-        })
-      markedTagComment.lines.toList map (cleanLine(_))
-    }
-
-    /** Parses a comment (in the form of a list of lines) to a `Comment`
-      * instance, recursively on lines. To do so, it splits the whole comment
-      * into main body and tag bodies, then runs the `WikiParser` on each body
-      * before creating the comment instance.
-      *
-      * @param docBody     The body of the comment parsed until now.
-      * @param tags        All tags parsed until now.
-      * @param lastTagKey  The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
-      *                    are part of the previous tag or, if none exists, of the body.
-      * @param remaining   The lines that must still recursively be parsed.
-      * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
-    def parse0 (
-      docBody: StringBuilder,
-      tags: Map[TagKey, List[String]],
-      lastTagKey: Option[TagKey],
-      remaining: List[String],
-      inCodeBlock: Boolean
-    ): Comment = remaining match {
-
-      case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
-        if (!before.trim.isEmpty && !after.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
-        else if (!before.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
-        else if (!after.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
-        else lastTagKey match {
-          case Some(key) =>
-            val value =
-              ((tags get key): @unchecked) match {
-                case Some(b :: bs) => (b + endOfLine + marker) :: bs
-                case None => oops("lastTagKey set when no tag exists for key")
-              }
-            parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
-          case None =>
-            parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
-        }
-
-      case CodeBlockEndRegex(before, marker, after) :: ls =>
-        if (!before.trim.isEmpty && !after.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
-        if (!before.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
-        else if (!after.trim.isEmpty)
-          parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
-        else lastTagKey match {
-          case Some(key) =>
-            val value =
-              ((tags get key): @unchecked) match {
-                case Some(b :: bs) => (b + endOfLine + marker) :: bs
-                case None => oops("lastTagKey set when no tag exists for key")
-              }
-            parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
-          case None =>
-            parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
-        }
-
-      case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
-        val key = SymbolTagKey(name, sym)
-        val value = body :: tags.getOrElse(key, Nil)
-        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
-      case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
-        val key = SimpleTagKey(name)
-        val value = body :: tags.getOrElse(key, Nil)
-        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
-      case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
-        val key = SimpleTagKey(name)
-        val value = "" :: tags.getOrElse(key, Nil)
-        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
-      case line :: ls if (lastTagKey.isDefined) =>
-        val key = lastTagKey.get
-        val value =
-          ((tags get key): @unchecked) match {
-            case Some(b :: bs) => (b + endOfLine + line) :: bs
-            case None => oops("lastTagKey set when no tag exists for key")
-          }
-        parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
-
-      case line :: ls =>
-        if (docBody.length > 0) docBody append endOfLine
-        docBody append line
-        parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
-
-      case Nil =>
-        // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
-        val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
-        val contentDiagramTag = SimpleTagKey("contentDiagram")
-
-        val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
-          case Some(list) => list
-          case None => List.empty
-        }
-
-        val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
-          case Some(list) => list
-          case None => List.empty
-        }
-
-        val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
-        val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
-
-        val bodyTags: mutable.Map[TagKey, List[Body]] =
-          mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*)
-
-        def oneTag(key: SimpleTagKey): Option[Body] =
-          ((bodyTags remove key): @unchecked) match {
-            case Some(r :: rs) =>
-              if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
-              Some(r)
-            case None => None
-          }
-
-        def allTags(key: SimpleTagKey): List[Body] =
-          (bodyTags remove key) getOrElse Nil
-
-        def allSymsOneTag(key: TagKey): Map[String, Body] = {
-          val keys: Seq[SymbolTagKey] =
-            bodyTags.keys.toSeq flatMap {
-              case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
-              case stk: SimpleTagKey if (stk.name == key.name) =>
-                reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
-                None
-              case _ => None
-            }
-          val pairs: Seq[(String, Body)] =
-            for (key <- keys) yield {
-              val bs = (bodyTags remove key).get
-              if (bs.length > 1)
-                reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
-              (key.symbol, bs.head)
-            }
-          Map.empty[String, Body] ++ pairs
-        }
-
-        val com = createComment (
-          body0           = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)),
-          authors0        = allTags(SimpleTagKey("author")),
-          see0            = allTags(SimpleTagKey("see")),
-          result0         = oneTag(SimpleTagKey("return")),
-          throws0         = allSymsOneTag(SimpleTagKey("throws")),
-          valueParams0    = allSymsOneTag(SimpleTagKey("param")),
-          typeParams0     = allSymsOneTag(SimpleTagKey("tparam")),
-          version0        = oneTag(SimpleTagKey("version")),
-          since0          = oneTag(SimpleTagKey("since")),
-          todo0           = allTags(SimpleTagKey("todo")),
-          deprecated0     = oneTag(SimpleTagKey("deprecated")),
-          note0           = allTags(SimpleTagKey("note")),
-          example0        = allTags(SimpleTagKey("example")),
-          constructor0    = oneTag(SimpleTagKey("constructor")),
-          source0         = Some(clean(src).mkString("\n")),
-          inheritDiagram0 = inheritDiagramText,
-          contentDiagram0 = contentDiagramText,
-          group0          = oneTag(SimpleTagKey("group")),
-          groupDesc0      = allSymsOneTag(SimpleTagKey("groupdesc")),
-          groupNames0     = allSymsOneTag(SimpleTagKey("groupname")),
-          groupPrio0      = allSymsOneTag(SimpleTagKey("groupprio"))
-        )
-
-        for ((key, _) <- bodyTags)
-          reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
-
-        com
-
-    }
-
-    parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
-
-  }
-
-  /** Parses a string containing wiki syntax into a `Comment` object.
-    * Note that the string is assumed to be clean:
-    *  - Removed Scaladoc start and end markers.
-    *  - Removed start-of-line star and one whitespace afterwards (if present).
-    *  - Removed all end-of-line whitespace.
-    *  - Only `endOfLine` is used to mark line endings. */
-  def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document()
-
-  /** TODO
-    *
-    * @author Ingo Maier
-    * @author Manohar Jonnalagedda
-    * @author Gilles Dubochet */
-  protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki =>
-    var summaryParsed = false
-
-    def document(): Body = {
-      val blocks = new mutable.ListBuffer[Block]
-      while (char != endOfText)
-        blocks += block()
-      Body(blocks.toList)
-    }
-
-    /* BLOCKS */
-
-    /** {{{ block ::= code | title | hrule | para }}} */
-    def block(): Block = {
-      if (checkSkipInitWhitespace("{{{"))
-        code()
-      else if (checkSkipInitWhitespace('='))
-        title()
-      else if (checkSkipInitWhitespace("----"))
-        hrule()
-      else if (checkList)
-        listBlock
-      else {
-        para()
-      }
-    }
-
-    /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
-      * Characters used to build lists and their constructors */
-    protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
-      "- "  -> ( UnorderedList(_) ),
-      "1. " -> ( OrderedList(_,"decimal") ),
-      "I. " -> ( OrderedList(_,"upperRoman") ),
-      "i. " -> ( OrderedList(_,"lowerRoman") ),
-      "A. " -> ( OrderedList(_,"upperAlpha") ),
-      "a. " -> ( OrderedList(_,"lowerAlpha") )
-    )
-
-    /** Checks if the current line is formed with more than one space and one the listStyles */
-    def checkList =
-      (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
-
-    /** {{{
-      * nListBlock ::= nLine { mListBlock }
-      *      nLine ::= nSpc listStyle para '\n'
-      * }}}
-      * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
-    def listBlock: Block = {
-
-      /** Consumes one list item block and returns it, or None if the block is
-        * not a list or a different list. */
-      def listLine(indent: Int, style: String): Option[Block] =
-        if (countWhitespace > indent && checkList)
-          Some(listBlock)
-        else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
-          None
-        else {
-          jumpWhitespace()
-          jump(style)
-          val p = Paragraph(inline(false))
-          blockEnded("end of list line ")
-          Some(p)
-        }
-
-      /** Consumes all list item blocks (possibly with nested lists) of the
-        * same list and returns the list block. */
-      def listLevel(indent: Int, style: String): Block = {
-        val lines = mutable.ListBuffer.empty[Block]
-        var line: Option[Block] = listLine(indent, style)
-        while (line.isDefined) {
-          lines += line.get
-          line = listLine(indent, style)
-        }
-        val constructor = listStyles(style)
-        constructor(lines)
-      }
-
-      val indent = countWhitespace
-      val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
-      listLevel(indent, style)
-    }
-
-    def code(): Block = {
-      jumpWhitespace()
-      jump("{{{")
-      val str = readUntil("}}}")
-      if (char == endOfText)
-        reportError(pos, "unclosed code block")
-      else
-        jump("}}}")
-      blockEnded("code block")
-      Code(normalizeIndentation(str))
-    }
-
-    /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
-    def title(): Block = {
-      jumpWhitespace()
-      val inLevel = repeatJump('=')
-      val text = inline(check("=" * inLevel))
-      val outLevel = repeatJump('=', inLevel)
-      if (inLevel != outLevel)
-        reportError(pos, "unbalanced or unclosed heading")
-      blockEnded("heading")
-      Title(text, inLevel)
-    }
-
-    /** {{{ hrule ::= "----" { '-' } '\n' }}} */
-    def hrule(): Block = {
-      jumpWhitespace()
-      repeatJump('-')
-      blockEnded("horizontal rule")
-      HorizontalRule()
-    }
-
-    /** {{{ para ::= inline '\n' }}} */
-    def para(): Block = {
-      val p =
-        if (summaryParsed)
-          Paragraph(inline(false))
-        else {
-          val s = summary()
-          val r =
-            if (checkParaEnded) List(s) else List(s, inline(false))
-          summaryParsed = true
-          Paragraph(Chain(r))
-        }
-      while (char == endOfLine && char != endOfText)
-        nextChar()
-      p
-    }
-
-    /* INLINES */
-
-    val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
-    val CLOSE_TAG = "^</([A-Za-z]+)>$".r
-    private def readHTMLFrom(begin: HtmlTag): String = {
-      val list = mutable.ListBuffer.empty[String]
-      val stack = mutable.ListBuffer.empty[String]
-
-      begin.close match {
-        case Some(HtmlTag(CLOSE_TAG(s))) =>
-          stack += s
-        case _ =>
-          return ""
-      }
-
-      do {
-        val str = readUntil { char == safeTagMarker || char == endOfText }
-        nextChar()
-
-        list += str
-
-        str match {
-          case OPEN_TAG(s, _, standalone) => {
-            if (standalone != "/") {
-              stack += s
-            }
-          }
-          case CLOSE_TAG(s) => {
-            if (s == stack.last) {
-              stack.remove(stack.length-1)
-            }
-          }
-          case _ => ;
-        }
-      } while (stack.length > 0 && char != endOfText)
-
-      list mkString ""
-    }
-
-    def inline(isInlineEnd: => Boolean): Inline = {
-
-      def inline0(): Inline = {
-        if (char == safeTagMarker) {
-          val tag = htmlTag()
-          HtmlTag(tag.data + readHTMLFrom(tag))
-        }
-        else if (check("'''")) bold()
-        else if (check("''")) italic()
-        else if (check("`"))  monospace()
-        else if (check("__")) underline()
-        else if (check("^"))  superscript()
-        else if (check(",,")) subscript()
-        else if (check("[[")) link()
-        else {
-          val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
-          Text(str)
-        }
-      }
-
-      val inlines: List[Inline] = {
-        val iss = mutable.ListBuffer.empty[Inline]
-        iss += inline0()
-        while (!isInlineEnd && !checkParaEnded) {
-          val skipEndOfLine = if (char == endOfLine) {
-            nextChar()
-            true
-          } else {
-            false
-          }
-
-          val current = inline0()
-          (iss.last, current) match {
-            case (Text(t1), Text(t2)) if skipEndOfLine =>
-              iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
-            case (i1, i2) if skipEndOfLine =>
-              iss ++= List(Text(endOfLine.toString), i2)
-            case _ => iss += current
-          }
-        }
-        iss.toList
-      }
-
-      inlines match {
-        case Nil => Text("")
-        case i :: Nil => i
-        case is => Chain(is)
-      }
-
-    }
-
-    def htmlTag(): HtmlTag = {
-      jump(safeTagMarker)
-      val read = readUntil(safeTagMarker)
-      if (char != endOfText) jump(safeTagMarker)
-      HtmlTag(read)
-    }
-
-    def bold(): Inline = {
-      jump("'''")
-      val i = inline(check("'''"))
-      jump("'''")
-      Bold(i)
-    }
-
-    def italic(): Inline = {
-      jump("''")
-      val i = inline(check("''"))
-      jump("''")
-      Italic(i)
-    }
-
-    def monospace(): Inline = {
-      jump("`")
-      val i = inline(check("`"))
-      jump("`")
-      Monospace(i)
-    }
-
-    def underline(): Inline = {
-      jump("__")
-      val i = inline(check("__"))
-      jump("__")
-      Underline(i)
-    }
-
-    def superscript(): Inline = {
-      jump("^")
-      val i = inline(check("^"))
-      if (jump("^")) {
-        Superscript(i)
-      } else {
-        Chain(Seq(Text("^"), i))
-      }
-    }
-
-    def subscript(): Inline = {
-      jump(",,")
-      val i = inline(check(",,"))
-      jump(",,")
-      Subscript(i)
-    }
-
-    def summary(): Inline = {
-      val i = inline(check("."))
-      Summary(
-        if (jump("."))
-          Chain(List(i, Text(".")))
-        else
-          i
-      )
-    }
-
-    def link(): Inline = {
-      val SchemeUri = """([a-z]+:.*)""".r
-      jump("[[")
-      var parens = 2 + repeatJump('[')
-      val start = "[" * parens
-      val stop  = "]" * parens
-      //println("link with " + parens + " matching parens")
-      val target = readUntil { check(stop) || check(" ") }
-      val title =
-        if (!check(stop)) Some({
-          jump(" ")
-          inline(check(stop))
-        })
-        else None
-      jump(stop)
-
-      (target, title) match {
-        case (SchemeUri(uri), optTitle) =>
-          Link(uri, optTitle getOrElse Text(uri))
-        case (qualName, optTitle) =>
-          makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt)
-      }
-    }
-
-    /* UTILITY */
-
-    /** {{{ eol ::= { whitespace } '\n' }}} */
-    def blockEnded(blockType: String): Unit = {
-      if (char != endOfLine && char != endOfText) {
-        reportError(pos, "no additional content on same line after " + blockType)
-        jumpUntil(endOfLine)
-      }
-      while (char == endOfLine)
-        nextChar()
-    }
-
-    /**
-     *  Eliminates the (common) leading spaces in all lines, based on the first line
-     *  For indented pieces of code, it reduces the indent to the least whitespace prefix:
-     *    {{{
-     *       indented example
-     *       another indented line
-     *       if (condition)
-     *         then do something;
-     *       ^ this is the least whitespace prefix
-     *    }}}
-     */
-    def normalizeIndentation(_code: String): String = {
-
-      var code = _code.trim
-      var maxSkip = Integer.MAX_VALUE
-      var crtSkip = 0
-      var wsArea = true
-      var index = 0
-      var firstLine = true
-      var emptyLine = true
-
-      while (index < code.length) {
-        code(index) match {
-          case ' ' =>
-            if (wsArea)
-              crtSkip += 1
-          case c =>
-            wsArea = (c == '\n')
-            maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
-            crtSkip = if (c == '\n') 0 else crtSkip
-            firstLine = if (c == '\n') false else firstLine
-            emptyLine = if (c == '\n') true else false
-        }
-        index += 1
-      }
-
-      if (maxSkip == 0)
-        code
-      else {
-        index = 0
-        val builder = new StringBuilder
-        while (index < code.length) {
-          builder.append(code(index))
-          if (code(index) == '\n') {
-            // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
-            // over-consume them)
-            index += 1
-            val limit = index + maxSkip
-            while ((index < code.length) && (code(index) == ' ') && index < limit)
-              index += 1
-          }
-          else
-            index += 1
-        }
-        builder.toString
-      }
-    }
-
-    def checkParaEnded(): Boolean = {
-      (char == endOfText) ||
-      ((char == endOfLine) && {
-        val poff = offset
-        nextChar() // read EOL
-        val ok = {
-          checkSkipInitWhitespace(endOfLine) ||
-          checkSkipInitWhitespace('=') ||
-          checkSkipInitWhitespace("{{{") ||
-          checkList ||
-          checkSkipInitWhitespace('\u003D')
-        }
-        offset = poff
-        ok
-      })
-    }
-
-    def reportError(pos: Position, message: String) {
-      reporter.warning(pos, message)
-    }
-  }
-
-  protected sealed class CharReader(buffer: String) { reader =>
-
-    var offset: Int = 0
-    def char: Char =
-      if (offset >= buffer.length) endOfText else buffer charAt offset
-
-    final def nextChar() {
-      offset += 1
-    }
-
-    final def check(chars: String): Boolean = {
-      val poff = offset
-      val ok = jump(chars)
-      offset = poff
-      ok
-    }
-
-    def checkSkipInitWhitespace(c: Char): Boolean = {
-      val poff = offset
-      jumpWhitespace()
-      val ok = jump(c)
-      offset = poff
-      ok
-    }
-
-    def checkSkipInitWhitespace(chars: String): Boolean = {
-      val poff = offset
-      jumpWhitespace()
-      val (ok0, chars0) =
-        if (chars.charAt(0) == ' ')
-          (offset > poff, chars substring 1)
-        else
-          (true, chars)
-      val ok = ok0 && jump(chars0)
-      offset = poff
-      ok
-    }
-
-    def countWhitespace: Int = {
-      var count = 0
-      val poff = offset
-      while (isWhitespace(char) && char != endOfText) {
-        nextChar()
-        count += 1
-      }
-      offset = poff
-      count
-    }
-
-    /* JUMPERS */
-
-    /** jumps a character and consumes it
-      * @return true only if the correct character has been jumped */
-    final def jump(ch: Char): Boolean = {
-      if (char == ch) {
-        nextChar()
-        true
-      }
-      else false
-    }
-
-    /** jumps all the characters in chars, consuming them in the process.
-      * @return true only if the correct characters have been jumped */
-    final def jump(chars: String): Boolean = {
-      var index = 0
-      while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
-        nextChar()
-        index += 1
-      }
-      index == chars.length
-    }
-
-    final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
-      var count = 0
-      while (jump(c) && count < max)
-        count += 1
-      count
-    }
-
-    final def jumpUntil(ch: Char): Int = {
-      var count = 0
-      while (char != ch && char != endOfText) {
-        nextChar()
-        count += 1
-      }
-      count
-    }
-
-    final def jumpUntil(chars: String): Int = {
-      assert(chars.length > 0)
-      var count = 0
-      val c = chars.charAt(0)
-      while (!check(chars) && char != endOfText) {
-        nextChar()
-        while (char != c && char != endOfText) {
-          nextChar()
-          count += 1
-        }
-      }
-      count
-    }
-
-    final def jumpUntil(pred: => Boolean): Int = {
-      var count = 0
-      while (!pred && char != endOfText) {
-        nextChar()
-        count += 1
-      }
-      count
-    }
-
-    def jumpWhitespace() = jumpUntil(!isWhitespace(char))
-
-    /* READERS */
-
-    final def readUntil(c: Char): String = {
-      withRead {
-        while (char != c && char != endOfText) {
-          nextChar()
-        }
-      }
-    }
-
-    final def readUntil(chars: String): String = {
-      assert(chars.length > 0)
-      withRead {
-        val c = chars.charAt(0)
-        while (!check(chars) && char != endOfText) {
-          nextChar()
-          while (char != c && char != endOfText)
-            nextChar()
-        }
-      }
-    }
-
-    final def readUntil(pred: => Boolean): String = {
-      withRead {
-        while (char != endOfText && !pred) {
-          nextChar()
-        }
-      }
-    }
-
-    private def withRead(read: => Unit): String = {
-      val start = offset
-      read
-      buffer.substring(start, offset)
-    }
-
-
-    /* CHARS CLASSES */
-
-    def isWhitespace(c: Char) = c == ' ' || c == '\t'
-
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
deleted file mode 100755
index c111798..0000000
--- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package doc
-package base
-
-import scala.collection._
-
-sealed trait LinkTo
-final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo
-final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo
-final case class LinkToExternal(name: String, url: String) extends LinkTo
-final case class Tooltip(name: String) extends LinkTo
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
deleted file mode 100755
index cdcfeaa..0000000
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ /dev/null
@@ -1,206 +0,0 @@
-package scala.tools.nsc
-package doc
-package base
-
-import comment._
-
-/** This trait extracts all required information for documentation from compilation units.
- *  The base trait has been extracted to allow getting light-weight documentation
-  * for a particular symbol in the IDE.*/
-trait MemberLookupBase {
-
-  val global: Global
-  import global._
-
-  def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
-  def chooseLink(links: List[LinkTo]): LinkTo
-  def toString(link: LinkTo): String
-  def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
-  def warnNoLink: Boolean
-
-  import global._
-  import rootMirror.{RootPackage, EmptyPackage}
-
-  private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
-
-  def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) =
-    new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) }
-
-  private var showExplanation = true
-  private def explanation: String =
-    if (showExplanation) {
-      showExplanation = false
-      """
-      |Quick crash course on using Scaladoc links
-      |==========================================
-      |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use:
-      | - [[scala.collection.immutable.List!.apply class List's apply method]] and
-      | - [[scala.collection.immutable.List$.apply object List's apply method]]
-      |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *:
-      | - [[[scala.collection.immutable.List$.fill[A](Int)(⇒A):List[A]* Fill with a single parameter]]]
-      | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(⇒A):List[List[A]]* Fill with a two parameters]]]
-      |Notes:
-      | - you can use any number of matching square brackets to avoid interference with the signature
-      | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!)
-      | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
-    } else ""
-
-  def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = {
-    var members = breakMembers(query)
-
-    // (1) First look in the root package, as most of the links are qualified
-    val fromRoot = lookupInRootPackage(pos, members)
-
-    // (2) Or recursively go into each containing template.
-    val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s =>
-      Stream.iterate(s)(_.owner)
-    }.takeWhile (!isRoot(_)).map {
-      lookupInTemplate(pos, members, _)
-    }
-
-    val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
-
-    val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match {
-      case Nil =>
-        // (3) Look at external links
-        syms.flatMap { case (sym, owner) =>
-          // reconstruct the original link
-          def linkName(sym: Symbol) = {
-            def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
-            val packageSuffix = if (sym.isPackage) ".package" else ""
-
-            sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
-          }
-
-          if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
-            findExternalLink(sym, linkName(sym))
-          else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
-            findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
-          else
-            None
-        }
-      case links => links
-    }
-    links match {
-      case Nil =>
-        if (warnNoLink)
-          reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
-        // (4) if we still haven't found anything, create a tooltip
-        Tooltip(query)
-      case List(l) => l
-      case links => 
-        val chosen = chooseLink(links)
-        def linkToString(link: LinkTo) = {
-          val chosenInfo =
-            if (link == chosen) " [chosen]" else ""
-          toString(link) + chosenInfo + "\n"
-        }
-        if (warnNoLink) {
-          val allLinks = links.map(linkToString).mkString
-          reporter.warning(pos,
-            s"""The link target \"$query\" is ambiguous. Several members fit the target:
-            |$allLinks
-            |$explanation""".stripMargin)
-        }
-        chosen
-    }
-  }
-
-  private sealed trait SearchStrategy
-  private case object BothTypeAndTerm extends SearchStrategy
-  private case object OnlyType extends SearchStrategy
-  private case object OnlyTerm extends SearchStrategy
-
-  private def lookupInRootPackage(pos: Position, members: List[String]) =
-    lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage)
-
-  private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = {
-    // Maintaining compatibility with previous links is a bit tricky here:
-    // we have a preference for term names for all terms except for the last, where we prefer a class:
-    // How to do this:
-    //  - at each step we do a DFS search with the prefered strategy
-    //  - if the search doesn't return any members, we backtrack on the last decision
-    //     * we look for terms with the last member's name
-    //     * we look for types with the same name, all the way up
-    val result = members match {
-      case Nil => Nil
-      case mbrName::Nil =>
-        var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container))
-        if (syms.isEmpty)
-          syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container))
-        syms
-
-      case tplName::rest =>
-        def completeSearch(syms: List[Symbol]) =
-          syms flatMap (lookupInTemplate(pos, rest, _))
-
-        completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match {
-          case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType))
-          case syms => syms
-      }
-    }
-    //println("lookupInTemplate(" + members + ", " + container + ") => " + result)
-    result
-  }
-
-  private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = {
-    val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*")
-    def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name)
-
-    // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves
-    // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info
-    // and removing NoType classes
-    def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) }
-
-    def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives
-    def termSyms = cleanupBogusClasses(syms(newTermName(name)))
-    def typeSyms = cleanupBogusClasses(syms(newTypeName(name)))
-
-    val result = if (member.endsWith("$"))
-      termSyms
-    else if (member.endsWith("!"))
-      typeSyms
-    else if (member.endsWith("*"))
-      cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch
-    else
-      strategy match {
-        case BothTypeAndTerm => termSyms ::: typeSyms
-        case OnlyType => typeSyms
-        case OnlyTerm => termSyms
-      }
-
-    //println("lookupInTemplate(" + member + ", " + container + ") => " + result)
-    result
-  }
-
-  private def breakMembers(query: String): List[String] = {
-    // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex
-    // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\."))
-    // The same code, just faster:
-    var members = List[String]()
-    var index = 0
-    var last_index = 0
-    val length = query.length
-    while (index < length) {
-      if ((query.charAt(index) == '.' || query.charAt(index) == '#') &&
-          ((index == 0) || (query.charAt(index-1) != '\\'))) {
-
-        val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
-        // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
-        // elemnt in the list
-        if ((member != "") || (!members.isEmpty))
-          members ::= member
-        last_index = index + 1
-      }
-      index += 1
-    }
-    if (last_index < length)
-      members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".")
-    members.reverse
-  }
-
-  def externalSignature(sym: Symbol) = {
-    sym.info // force it, otherwise we see lazy types
-    (sym.nameString + sym.signatureString).replaceAll("\\s", "")
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
deleted file mode 100755
index eb0d751..0000000
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-package comment
-
-import scala.collection._
-
-import java.net.URL
-
-/** A body of text. A comment has a single body, which is composed of
-  * at least one block. Inside every body is exactly one summary (see
-  * [[scala.tools.nsc.doc.model.comment.Summary]]). */
-final case class Body(blocks: Seq[Block]) {
-
-  /** The summary text of the comment body. */
-  lazy val summary: Option[Inline] = {
-    def summaryInBlock(block: Block): Seq[Inline] = block match {
-      case Title(text, _)        => summaryInInline(text)
-      case Paragraph(text)       => summaryInInline(text)
-      case UnorderedList(items)  => items flatMap summaryInBlock
-      case OrderedList(items, _) => items flatMap summaryInBlock
-      case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
-      case _                     => Nil
-    }
-    def summaryInInline(text: Inline): Seq[Inline] = text match {
-      case Summary(text)     => List(text)
-      case Chain(items)      => items flatMap summaryInInline
-      case Italic(text)      => summaryInInline(text)
-      case Bold(text)        => summaryInInline(text)
-      case Underline(text)   => summaryInInline(text)
-      case Superscript(text) => summaryInInline(text)
-      case Subscript(text)   => summaryInInline(text)
-      case Link(_, title)    => summaryInInline(title)
-      case _                 => Nil
-    }
-    (blocks flatMap { summaryInBlock(_) }).toList match {
-      case Nil => None
-      case inline :: Nil => Some(inline)
-      case inlines => Some(Chain(inlines))
-    }
-  }
-}
-
-/** A block-level element of text, such as a paragraph or code block. */
-sealed abstract class Block
-
-final case class Title(text: Inline, level: Int) extends Block
-final case class Paragraph(text: Inline) extends Block
-final case class Code(data: String) extends Block
-final case class UnorderedList(items: Seq[Block]) extends Block
-final case class OrderedList(items: Seq[Block], style: String) extends Block
-final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
-final case class HorizontalRule() extends Block
-
-/** An section of text inside a block, possibly with formatting. */
-sealed abstract class Inline
-
-final case class Chain(items: Seq[Inline]) extends Inline
-final case class Italic(text: Inline) extends Inline
-final case class Bold(text: Inline) extends Inline
-final case class Underline(text: Inline) extends Inline
-final case class Superscript(text: Inline) extends Inline
-final case class Subscript(text: Inline) extends Inline
-final case class Link(target: String, title: Inline) extends Inline
-final case class Monospace(text: Inline) extends Inline
-final case class Text(text: String) extends Inline
-abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
-object EntityLink {
-  def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
-  def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
-}
-final case class HtmlTag(data: String) extends Inline {
-  private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
-  private val (isEnd, tagName) = data match {
-    case Pattern(s1, s2) =>
-      (! s1.isEmpty, Some(s2.toLowerCase))
-    case _ =>
-      (false, None)
-  }
-
-  def canClose(open: HtmlTag) = {
-    isEnd && tagName == open.tagName
-  }
-
-  private val TagsNotToClose = Set("br", "img")
-  def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
-}
-
-/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
-final case class Summary(text: Inline) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
deleted file mode 100644
index 2b28164..0000000
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-package comment
-
-import scala.collection._
-
-/** A Scaladoc comment and all its tags.
-  *
-  * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
-  *
-  * @author Manohar Jonnalagedda
-  * @author Gilles Dubochet */
-abstract class Comment {
-
-  /** The main body of the comment that describes what the entity does and is.  */
-  def body: Body
-
-  private def closeHtmlTags(inline: Inline) = {
-    val stack = mutable.ListBuffer.empty[HtmlTag]
-    def scan(i: Inline) {
-      i match {
-        case Chain(list) =>
-          list foreach scan
-        case tag: HtmlTag => {
-          if (stack.length > 0 && tag.canClose(stack.last)) {
-            stack.remove(stack.length-1)
-          } else {
-            tag.close match {
-              case Some(t) =>
-                stack += t
-              case None =>
-                ;
-            }
-          }
-        }
-        case _ =>
-          ;
-      }
-    }
-    scan(inline)
-    Chain(List(inline) ++ stack.reverse)
-  }
-
-  /** A shorter version of the body. Usually, this is the first sentence of the body. */
-  def short: Inline = {
-    body.summary match {
-      case Some(s) =>
-        closeHtmlTags(s)
-      case _ =>
-        Text("")
-    }
-  }
-
-  /** A list of authors. The empty list is used when no author is defined. */
-  def authors: List[Body]
-
-  /** A list of other resources to see, including links to other entities or
-    * to external documentation. The empty list is used when no other resource
-    * is mentionned. */
-  def see: List[Body]
-
-  /** A description of the result of the entity. Typically, this provides additional
-    * information on the domain of the result, contractual post-conditions, etc. */
-  def result: Option[Body]
-
-  /** A map of exceptions that the entity can throw when accessed, and a
-    * description of what they mean. */
-  def throws: Map[String, Body]
-
-  /** A map of value parameters, and a description of what they are. Typically,
-    * this provides additional information on the domain of the parameters,
-    * contractual pre-conditions, etc. */
-  def valueParams: Map[String, Body]
-
-  /** A map of type parameters, and a description of what they are. Typically,
-    * this provides additional information on the domain of the parameters. */
-  def typeParams: Map[String, Body]
-
-  /** The version number of the entity. There is no formatting or further
-    * meaning attached to this value. */
-  def version: Option[Body]
-
-  /** A version number of a containing entity where this member-entity was introduced. */
-  def since: Option[Body]
-
-  /** An annotation as to expected changes on this entity. */
-  def todo: List[Body]
-
-  /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute
-    * is prefereable to using this Scaladoc tag. */
-  def deprecated: Option[Body]
-
-  /** An additional note concerning the contract of the entity. */
-  def note: List[Body]
-
-  /** A usage example related to the entity. */
-  def example: List[Body]
-
-  /** The comment as it appears in the source text. */
-  def source: Option[String]
-
-  /** A description for the primary constructor */
-  def constructor: Option[Body]
-
-  /** A set of diagram directives for the inheritance diagram */
-  def inheritDiagram: List[String]
-
-  /** A set of diagram directives for the content diagram */
-  def contentDiagram: List[String]
-
-  /** The group this member is part of */
-  def group: Option[String]
-
-  /** Member group descriptions */
-  def groupDesc: Map[String,Body]
-
-  /** Member group names (overriding the short tag) */
-  def groupNames: Map[String,String]
-
-  /** Member group priorities */
-  def groupPrio: Map[String,Int]
-
-  override def toString =
-    body.toString + "\n" +
-    (authors map ("@author " + _.toString)).mkString("\n") +
-    (result map ("@return " + _.toString)).mkString("\n") +
-    (version map ("@version " + _.toString)).mkString
-}
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
deleted file mode 100644
index 735b79c..0000000
--- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.tools.nsc.doc
-package doclet
-
-import scala.collection._
-
-/** Custom Scaladoc generators must implement the `Generator` class. A custom generator can be selected in Scaladoc
-  * using the `-doc-generator` command line option.
-  * The `Generator` class does not provide data about the documented code. A number of data provider traits can be used
-  * to configure what data is actually available to the generator:
-  *  - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented
-  *    program.
-  *  - An `Indexer` provides precalculated indexing information about a universe.
-  * To implement this class only requires defining method `generateImpl`. */
-abstract class Generator {
-
-  /** A series of tests that must be true before generation can be done. This is used by data provider traits to
-    * confirm that they have been correctly initialised before allowing generation to proceed. */
-  protected val checks: mutable.Set[()=>Boolean] =
-    mutable.Set.empty[()=>Boolean]
-
-  /** Outputs documentation (as a side effect). */
-  def generate(): Unit = {
-    assert(checks forall { check => check() })
-    generateImpl
-  }
-
-  /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */
-  protected def generateImpl(): Unit
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
deleted file mode 100644
index 3aa3e87..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc
-package html
-
-import doclet._
-
-/** The default doclet used by the scaladoc command line tool
-  * when no user-provided doclet is provided. */
-class Doclet extends Generator with Universer with Indexer {
-
-  def generateImpl() {
-    new html.HtmlFactory(universe, index).generate
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
deleted file mode 100644
index 4630c3d..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ /dev/null
@@ -1,152 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-
-import model._
-import java.io.{ File => JFile }
-import io.{ Streamable, Directory }
-import scala.collection._
-import page.diagram._
-
-import html.page.diagram.DiagramGenerator
-
-/** A class that can generate Scaladoc sites to some fixed root folder.
-  * @author David Bernard
-  * @author Gilles Dubochet */
-class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
-
-  /** The character encoding to be used for generated Scaladoc sites.
-    * This value is currently always UTF-8. */
-  def encoding: String = "UTF-8"
-
-  def siteRoot: JFile = new JFile(universe.settings.outdir.value)
-
-  def libResources = List(
-    "index.js",
-    "jquery-ui.js",
-    "jquery.js",
-    "jquery.layout.js",
-    "scheduler.js",
-    "diagrams.js",
-    "template.js",
-    "tools.tooltip.js",
-    "modernizr.custom.js",
-
-    "index.css",
-    "ref-index.css",
-    "template.css",
-    "diagrams.css",
-
-    "class.png",
-    "class_big.png",
-    "class_diagram.png",
-    "object.png",
-    "object_big.png",
-    "object_diagram.png",
-    "package.png",
-    "package_big.png",
-    "trait.png",
-    "trait_big.png",
-    "trait_diagram.png",
-    "type.png",
-    "type_big.png",
-    "type_diagram.png",
-
-    "class_to_object_big.png",
-    "object_to_class_big.png",
-    "trait_to_object_big.png",
-    "object_to_trait_big.png",
-    "type_to_object_big.png",
-    "object_to_type_big.png",
-
-    "arrow-down.png",
-    "arrow-right.png",
-    "filter_box_left.png",
-    "filter_box_left2.gif",
-    "filter_box_right.png",
-    "filterbg.gif",
-    "filterboxbarbg.gif",
-    "filterboxbg.gif",
-
-    "constructorsbg.gif",
-    "defbg-blue.gif",
-    "defbg-green.gif",
-    "filterboxbarbg.png",
-    "fullcommenttopbg.gif",
-    "ownderbg2.gif",
-    "ownerbg.gif",
-    "ownerbg2.gif",
-    "packagesbg.gif",
-    "signaturebg.gif",
-    "signaturebg2.gif",
-    "typebg.gif",
-    "conversionbg.gif",
-    "valuemembersbg.gif",
-
-    "navigation-li-a.png",
-    "navigation-li.png",
-    "remove.png",
-    "selected-right.png",
-    "selected.png",
-    "selected2-right.png",
-    "selected2.png",
-    "selected-right-implicits.png",
-    "selected-implicits.png",
-    "unselected.png"
-  )
-
-  /** Generates the Scaladoc site for a model into the site root.
-    * A scaladoc site is a set of HTML and related files
-    * that document a model extracted from a compiler run.
-    * @param model The model to generate in the form of a sequence of packages. */
-  def generate() {
-
-    def copyResource(subPath: String) {
-      val bytes = new Streamable.Bytes {
-        val p = "/scala/tools/nsc/doc/html/resource/" + subPath
-        val inputStream = getClass.getResourceAsStream(p)
-        assert(inputStream != null, p)
-      }.toByteArray
-      val dest = Directory(siteRoot) / subPath
-      dest.parent.createDirectory()
-      val out = dest.toFile.bufferedOutput()
-      try out.write(bytes, 0, bytes.length)
-      finally out.close()
-    }
-
-    DiagramGenerator.initialize(universe.settings)
-
-    libResources foreach (s => copyResource("lib/" + s))
-
-    new page.Index(universe, index) writeFor this
-    new page.IndexScript(universe, index) writeFor this
-
-    writeTemplates(_ writeFor this)
-
-    for (letter <- index.firstLetterIndex) {
-      new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
-    }
-
-    DiagramGenerator.cleanup()
-  }
-
-  def writeTemplates(writeForThis: HtmlPage => Unit) {
-    val written = mutable.HashSet.empty[DocTemplateEntity]
-    val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
-
-    def writeTemplate(tpl: DocTemplateEntity) {
-      if (!(written contains tpl)) {
-        writeForThis(new page.Template(universe, diagramGenerator, tpl))
-        written += tpl
-        tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
-      }
-    }
-
-    writeTemplate(universe.rootPackage)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
deleted file mode 100644
index 69da322..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-
-import base._
-import base.comment._
-import model._
-
-import scala.xml.{XML, NodeSeq}
-import scala.xml.dtd.{DocType, PublicID}
-import scala.collection._
-import java.io.Writer
-
-/** An html page that is part of a Scaladoc site.
-  * @author David Bernard
-  * @author Gilles Dubochet */
-abstract class HtmlPage extends Page { thisPage =>
-  /** The title of this page. */
-  protected def title: String
-
-  /** The page description */
-  protected def description: String =
-    // unless overwritten, will display the title in a spaced format, keeping - and .
-    title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
-
-  /** The page keywords */
-  protected def keywords: String =
-    // unless overwritten, same as description, minus the " - "
-    description.replaceAll(" - ", " ")
-
-  /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
-  protected def headers: NodeSeq
-
-  /** The body of this page. */
-  def body: NodeSeq
-
-  def writeFor(site: HtmlFactory) {
-    val doctype =
-      DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
-    val html =
-      <html>
-        <head>
-          <title>{ title }</title>
-          <meta name="description" content={ description }/>
-          <meta name="keywords" content={ keywords }/>
-          <meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
-          { headers }
-        </head>
-        { body }
-      </html>
-
-    writeFile(site) { (w: Writer) =>
-      w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
-      w.write(doctype.toString + "\n")
-      w.write(xml.Xhtml.toXhtml(html))
-    }
-
-    if (site.universe.settings.docRawOutput.value)
-      writeFile(site, ".raw") {
-        // we're only interested in the body, as this will go into the diff
-        _.write(body.text)
-      }
-
-    //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
-  }
-
-  /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
-    * node sequence if it is not. */
-  def commentToHtml(comment: Option[Comment]): NodeSeq =
-    (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty
-
-  /** Transforms a comment into an styled HTML tree representing its body. */
-  def commentToHtml(comment: Comment): NodeSeq =
-    bodyToHtml(comment.body)
-
-  def bodyToHtml(body: Body): NodeSeq =
-    body.blocks flatMap (blockToHtml(_))
-
-  def blockToHtml(block: Block): NodeSeq = block match {
-    case Title(in, 1) => <h3>{ inlineToHtml(in) }</h3>
-    case Title(in, 2) => <h4>{ inlineToHtml(in) }</h4>
-    case Title(in, 3) => <h5>{ inlineToHtml(in) }</h5>
-    case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
-    case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
-    case Code(data) =>
-      <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
-    case UnorderedList(items) =>
-      <ul>{ listItemsToHtml(items) }</ul>
-    case OrderedList(items, listStyle) =>
-      <ol class={ listStyle }>{ listItemsToHtml(items) }</ol>
-    case DefinitionList(items) =>
-      <dl>{items map { case (t, d) => <dt>{ inlineToHtml(t) }</dt><dd>{ blockToHtml(d) }</dd> } }</dl>
-    case HorizontalRule() =>
-      <hr/>
-  }
-
-  def listItemsToHtml(items: Seq[Block]) =
-    items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) =>
-      item match {
-        case OrderedList(_, _) | UnorderedList(_) =>  // html requires sub ULs to be put into the last LI
-          xmlList.init ++ <li>{ xmlList.last.child ++ blockToHtml(item) }</li>
-        case Paragraph(inline) =>
-          xmlList :+ <li>{ inlineToHtml(inline) }</li>  // LIs are blocks, no need to use Ps
-        case block =>
-          xmlList :+ <li>{ blockToHtml(block) }</li>
-      }
-  }
-
-  def inlineToHtml(inl: Inline): NodeSeq = inl match {
-    case Chain(items) => items flatMap (inlineToHtml(_))
-    case Italic(in) => <i>{ inlineToHtml(in) }</i>
-    case Bold(in) => <b>{ inlineToHtml(in) }</b>
-    case Underline(in) => <u>{ inlineToHtml(in) }</u>
-    case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
-    case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
-    case Link(raw, title) => <a href={ raw } target="_blank">{ inlineToHtml(title) }</a>
-    case Monospace(in) => <code>{ inlineToHtml(in) }</code>
-    case Text(text) => scala.xml.Text(text)
-    case Summary(in) => inlineToHtml(in)
-    case HtmlTag(tag) => scala.xml.Unparsed(tag)
-    case EntityLink(target, link) => linkToHtml(target, link, true)
-  }
-
-  def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
-    case LinkToTpl(dtpl: TemplateEntity) =>
-      if (hasLinks)
-        <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</a>
-      else
-        <span class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</span>
-    case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) =>
-      if (hasLinks)
-        <a href={ relativeLinkTo(inTpl) + "#" + mbr.signature } class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</a>
-      else
-        <span class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</span>
-    case Tooltip(tooltip) =>
-      <span class="extype" name={ tooltip }>{ inlineToHtml(text) }</span>
-    case LinkToExternal(name, url) =>
-      <a href={ url } class="extype" target="_top">{ inlineToHtml(text) }</a>
-    case _ =>
-      inlineToHtml(text)
-  }
-
-  def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
-    case Nil =>
-      NodeSeq.Empty
-    case List(tpe) =>
-      typeToHtml(tpe, hasLinks)
-    case tpe :: rest =>
-      typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
-  }
-
-  def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
-    val string = tpe.name
-    def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = {
-      if (starts.isEmpty && (inPos == string.length))
-        NodeSeq.Empty
-      else if (starts.isEmpty)
-        scala.xml.Text(string.slice(inPos, string.length))
-      else if (inPos == starts.head)
-        toLinksIn(inPos, starts)
-      else {
-        scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
-      }
-    }
-    def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
-      val (link, width) = tpe.refEntity(inPos)
-      val text = comment.Text(string.slice(inPos, inPos + width))
-      linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail)
-    }
-    if (hasLinks)
-      toLinksOut(0, tpe.refEntity.keySet.toList)
-    else
-      scala.xml.Text(string)
-  }
-
-  def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
-    case Nil         => NodeSeq.Empty
-    case tpe :: Nil  => typeToHtml(tpe, hasLinks)
-    case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
-  }
-
-  def hasPage(e: DocTemplateEntity) = {
-    e.isPackage || e.isTrait || e.isClass || e.isObject || e.isCaseClass
-  }
-
-  /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
-  def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
-    case dTpl: DocTemplateEntity =>
-      if (hasPage(dTpl)) {
-        <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
-      } else {
-        scala.xml.Text(if (name eq null) dTpl.name else name)
-      }
-    case ndTpl: NoDocTemplate =>
-      scala.xml.Text(if (name eq null) ndTpl.name else name)
-  }
-
-  /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
-  def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match {
-    case Nil         => NodeSeq.Empty
-    case tpl :: Nil  => templateToHtml(tpl)
-    case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
-  }
-
-  /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
-  def docEntityKindToBigImage(ety: DocTemplateEntity) =
-    if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
-    else if (ety.isTrait) "trait_big.png"
-    else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
-    else if (ety.isClass) "class_big.png"
-    else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
-    else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
-    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
-    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
-    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
-    else if (ety.isObject) "object_big.png"
-    else if (ety.isPackage) "package_big.png"
-    else "class_big.png"  // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
deleted file mode 100644
index 62166f7..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc.html
-
-import scala.tools.nsc.doc.model._
-import java.io.{FileOutputStream, File}
-import scala.reflect.NameTransformer
-import java.nio.channels.Channels
-import java.io.Writer
-
-abstract class Page {
-  thisPage =>
-
-  /** The path of this page, relative to the API site. `path.tail` is a list
-    * of folder names leading to this page (from closest package to
-    * one-above-root package), `path.head` is the file name of this page.
-    * Note that `path` has a length of at least one. */
-  def path: List[String]
-
-  def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
-
-  def createFileOutputStream(site: HtmlFactory, suffix: String = "") = {
-    val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix)
-    val folder = file.getParentFile
-    if (! folder.exists) {
-      folder.mkdirs
-    }
-    new FileOutputStream(file.getPath)
-  }
-
-  def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = {
-    val fos = createFileOutputStream(site, suffix)
-    val w = Channels.newWriter(fos.getChannel, site.encoding)
-    try {
-      fn(w)
-    }
-    finally {
-      w.close()
-      fos.close()
-    }
-  }
-
-  /** Writes this page as a file. The file's location is relative to the
-    * generator's site root, and the encoding is also defined by the generator.
-    * @param generator The generator that is writing this page. */
-  def writeFor(site: HtmlFactory): Unit
-
-  def kindToString(mbr: MemberEntity) =
-    mbr match {
-      case c: Class => if (c.isCaseClass) "case class" else "class"
-      case _: Trait => "trait"
-      case _: Package => "package"
-      case _: Object => "object"
-      case _: AbstractType => "type"
-      case _: AliasType => "type"
-      case _: Constructor => "new"
-      case v: Def => "def"
-      case v: Val if (v.isLazyVal) => "lazy val"
-      case v: Val if (v.isVal) => "val"
-      case v: Val if (v.isVar) => "var"
-      case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass)
-    }
-
-  def templateToPath(tpl: TemplateEntity): List[String] = {
-    def doName(tpl: TemplateEntity): String =
-      (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
-    def downPacks(pack: Package): List[String] =
-      if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
-    def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
-      tpl.inTemplate match {
-        case inPkg: Package => (nme + ".html", inPkg)
-        case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
-      }
-    }
-    val (file, pack) =
-      tpl match {
-        case p: Package => ("package.html", p)
-        case _ => downInner(doName(tpl), tpl)
-      }
-    file :: downPacks(pack)
-  }
-
-  /** A relative link from this page to some destination class entity.
-    * @param destEntity The class or object entity that the link will point to. */
-  def relativeLinkTo(destClass: TemplateEntity): String =
-    relativeLinkTo(templateToPath(destClass))
-
-  /** A relative link from this page to some destination page in the Scaladoc site.
-    * @param destPage The page that the link will point to. */
-  def relativeLinkTo(destPage: HtmlPage): String = {
-    relativeLinkTo(destPage.path)
-  }
-
-  /** A relative link from this page to some destination path.
-    * @param destPath The path that the link will point to. */
-  def relativeLinkTo(destPath: List[String]): String = {
-    def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
-      case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
-        relativize(fs, ts)
-      case (fss, tss) =>
-        List.fill(fss.length - 1)("..") ::: tss
-    }
-    relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
deleted file mode 100644
index 6fdaaed..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ /dev/null
@@ -1,286 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2010-2013 LAMP/EPFL
- * @author  Stephane Micheloud
- */
-
-package scala.tools.nsc.doc.html
-
-import scala.xml.NodeSeq
-import scala.annotation.tailrec
-
-/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
-  * (see method `HtmlPage.blockToHtml`).
-  *
-  * @author Stephane Micheloud
-  * @version 1.0
-  */
-private[html] object SyntaxHigh {
-
-  /** Reserved words, sorted alphabetically
-    * (see [[scala.reflect.internal.StdNames]]) */
-  val reserved = Array(
-    "abstract", "case", "catch", "class", "def",
-    "do", "else", "extends", "false", "final", "finally",
-    "for", "if", "implicit", "import", "lazy", "match",
-    "new", "null", "object", "override", "package",
-    "private", "protected", "return", "sealed", "super",
-    "this", "throw", "trait", "true", "try", "type",
-    "val", "var", "while", "with", "yield")
-
-  /** Annotations, sorted alphabetically */
-  val annotations = Array(
-    "BeanProperty", "SerialVersionUID",
-    "beanGetter", "beanSetter", "bridge", "cloneable",
-    "deprecated", "deprecatedName",
-    "elidable", "field", "getter", "inline",
-    "migration", "native", "noinline", "param",
-    "remote", "setter", "specialized", "strictfp", "switch",
-    "tailrec", "throws", "transient",
-    "unchecked", "uncheckedStable", "uncheckedVariance",
-    "varargs", "volatile")
-
-  /** Standard library classes/objects, sorted alphabetically */
-  val standards = Array (
-    "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
-    "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
-    "Console", "Double", "Enumeration", "Float", "Function", "Int",
-    "List", "Long", "Manifest", "Map",
-    "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
-    "Pair", "Predef",
-    "Seq", "Set", "Short", "Some", "String", "Symbol",
-    "Triple", "TypeTag", "Unit")
-
-  def apply(data: String): NodeSeq = {
-    val buf = data.getBytes
-    val out = new StringBuilder
-
-    def compare(offset: Int, key: String): Int = {
-      var i = offset
-      var j = 0
-      val l = key.length
-      while (i < buf.length && j < l) {
-        val bch = buf(i).toChar
-        val kch = key charAt j
-        if (bch < kch) return -1
-        else if (bch > kch) return 1
-        i += 1
-        j += 1
-      }
-      if (j < l) -1
-      else if (i < buf.length &&
-               ('A' <= buf(i) && buf(i) <= 'Z' ||
-                'a' <= buf(i) && buf(i) <= 'z' ||
-                '0' <= buf(i) && buf(i) <= '9' ||
-                buf(i) == '_')) 1
-      else 0
-    }
-
-    def lookup(a: Array[String], i: Int): Int = {
-      var lo = 0
-      var hi = a.length - 1
-      while (lo <= hi) {
-        val m = (hi + lo) / 2
-        val d = compare(i, a(m))
-        if (d < 0) hi = m - 1
-        else if (d > 0) lo = m + 1
-        else return m
-      }
-      -1
-    }
-
-    def comment(i: Int): String = {
-      val out = new StringBuilder("/")
-      def line(i: Int): Int =
-        if (i == buf.length || buf(i) == '\n') i
-        else {
-          out append buf(i).toChar
-          line(i+1)
-        }
-      var level = 0
-      def multiline(i: Int, star: Boolean): Int = {
-        if (i == buf.length) return i
-        val ch = buf(i).toChar
-        out append ch
-        ch match {
-          case '*' =>
-            if (star) level += 1
-            multiline(i+1, !star)
-          case '/' =>
-            if (star) {
-              if (level > 0) level -= 1
-              if (level == 0) i else multiline(i+1, true)
-            } else
-              multiline(i+1, false)
-          case _ =>
-            multiline(i+1, false)
-        }
-      }
-      if (buf(i) == '/') line(i) else multiline(i, true)
-      out.toString
-    }
-
-    /* e.g. `val endOfLine = '\u000A'`*/
-    def charlit(j: Int): String = {
-      val out = new StringBuilder("'")
-      def charlit0(i: Int, bslash: Boolean): Int = {
-        if (i == buf.length) i
-        else if (i > j+6) { out setLength 0; j }
-        else {
-          val ch = buf(i).toChar
-          out append ch
-          ch match {
-            case '\\' =>
-              charlit0(i+1, true)
-            case '\'' if !bslash =>
-              i
-            case _ =>
-              if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, true)
-              else charlit0(i+1, false)
-          }
-        }
-      }
-      charlit0(j, false)
-      out.toString
-    }
-
-    def strlit(i: Int): String = {
-      val out = new StringBuilder("\"")
-      def strlit0(i: Int, bslash: Boolean): Int = {
-        if (i == buf.length) return i
-        val ch = buf(i).toChar
-        out append ch
-        ch match {
-          case '\\' =>
-            strlit0(i+1, true)
-          case '"' if !bslash =>
-            i
-          case _ =>
-            strlit0(i+1, false)
-        }
-      }
-      strlit0(i, false)
-      out.toString
-    }
-
-    def numlit(i: Int): String = {
-      val out = new StringBuilder
-      def intg(i: Int): Int = {
-        if (i == buf.length) return i
-        val ch = buf(i).toChar
-        ch match {
-          case '.' =>
-            out append ch
-            frac(i+1)
-          case _ =>
-            if (Character.isDigit(ch)) {
-              out append ch
-              intg(i+1)
-            } else i
-        }
-      }
-      def frac(i: Int): Int = {
-        if (i == buf.length) return i
-        val ch = buf(i).toChar
-        ch match {
-          case 'e' | 'E' =>
-            out append ch
-            expo(i+1, false)
-          case _ =>
-            if (Character.isDigit(ch)) {
-              out append ch
-              frac(i+1)
-            } else i
-        }
-      }
-      def expo(i: Int, signed: Boolean): Int = {
-        if (i == buf.length) return i
-        val ch = buf(i).toChar
-        ch match {
-          case '+' | '-' if !signed =>
-            out append ch
-            expo(i+1, true)
-          case _ =>
-            if (Character.isDigit(ch)) {
-              out append ch
-              expo(i+1, signed)
-            } else i
-        }
-      }
-      intg(i)
-      out.toString
-    }
-
-    @tailrec def parse(pre: String, i: Int): Unit = {
-      out append pre
-      if (i == buf.length) return
-      buf(i) match {
-        case '\n' =>
-          parse("\n", i+1)
-        case ' ' =>
-          parse(" ", i+1)
-        case '&' =>
-          parse("&", i+1)
-        case '<' if i+1 < buf.length =>
-          val ch = buf(i+1).toChar
-          if (ch == '-' || ch == ':' || ch == '%')
-            parse("<span class=\"kw\"><"+ch+"</span>", i+2)
-          else
-            parse("<", i+1)
-        case '>' =>
-          if (i+1 < buf.length && buf(i+1) == ':')
-            parse("<span class=\"kw\">>:</span>", i+2)
-          else
-            parse(">", i+1)
-        case '=' =>
-          if (i+1 < buf.length && buf(i+1) == '>')
-            parse("<span class=\"kw\">=></span>", i+2)
-          else
-            parse(buf(i).toChar.toString, i+1)
-        case '/' =>
-          if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
-            val c = comment(i+1)
-            parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
-          } else
-            parse(buf(i).toChar.toString, i+1)
-        case '\'' =>
-          val s = charlit(i+1)
-          if (s.length > 0)
-            parse("<span class=\"lit\">"+s+"</span>", i+s.length)
-          else
-            parse(buf(i).toChar.toString, i+1)
-        case '"' =>
-          val s = strlit(i+1)
-          parse("<span class=\"lit\">"+s+"</span>", i+s.length)
-        case '@' =>
-          val k = lookup(annotations, i+1)
-          if (k >= 0)
-            parse("<span class=\"ano\">@"+annotations(k)+"</span>", i+annotations(k).length+1)
-          else
-            parse(buf(i).toChar.toString, i+1)
-        case _ =>
-          if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
-            if (Character.isDigit(buf(i)) ||
-                (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
-              val s = numlit(i)
-              parse("<span class=\"num\">"+s+"</span>", i+s.length)
-            } else {
-              val k = lookup(reserved, i)
-              if (k >= 0)
-                parse("<span class=\"kw\">"+reserved(k)+"</span>", i+reserved(k).length)
-              else {
-                val k = lookup(standards, i)
-                if (k >= 0)
-                  parse("<span class=\"std\">"+standards(k)+"</span>", i+standards(k).length)
-                else
-                  parse(buf(i).toChar.toString, i+1)
-              }
-            }
-          } else
-            parse(buf(i).toChar.toString, i+1)
-      }
-    }
-
-    parse("", 0)
-    scala.xml.Unparsed(out.toString)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
deleted file mode 100644
index 8802d7c..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import model._
-
-import scala.collection._
-import scala.xml._
-import scala.util.parsing.json.{JSONObject, JSONArray}
-
-class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
-
-  def path = List("index.html")
-
-  def title = {
-    val s = universe.settings
-    ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
-    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
-  }
-
-  val headers =
-    <xml:group>
-      <link href={ relativeLinkTo{List("index.css", "lib")} }  media="screen" type="text/css" rel="stylesheet"/>
-    </xml:group>
-
-  private val scripts = {
-    val sources =
-      (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
-        x => relativeLinkTo(List(x, "lib"))
-      }) :+ "index.js"
-
-    sources map {
-      src => <script defer="defer" type="text/javascript" src={src}></script>
-    }
-  }
-
-  val body =
-    <body>
-      <div id="library">
-        <img class='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
-        <img class='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
-        <img class='object icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
-        <img class='package icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
-      </div>
-      { browser }
-      <div id="content" class="ui-layout-center">
-        <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
-      </div>
-      { scripts }
-    </body>
-
-  def letters: NodeSeq =
-    '_' +: ('a' to 'z') map {
-      char => {
-        val label = if (char == '_') '#' else char.toUpper
-
-        index.firstLetterIndex.get(char) match {
-          case Some(_) =>
-            <a target="template" href={ "index/index-" + char + ".html" }>{
-              label
-            }</a>
-          case None => <span>{ label }</span>
-        }
-      }
-    }
-
-  def browser =
-    <div id="browser" class="ui-layout-west">
-      <div class="ui-west-center">
-      <div id="filter">
-          <div id="textfilter"></div>
-          <div id="letters">{ letters }</div>
-      </div>
-      <div class="pack" id="tpl">{
-        def packageElem(pack: model.Package): NodeSeq = {
-          <xml:group>
-            { if (!pack.isRootPackage)
-                <a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a>
-              else NodeSeq.Empty
-            }
-            <ol class="templates">{
-              val tpls: Map[String, Seq[DocTemplateEntity]] =
-                (pack.templates collect {
-                  case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
-                }) groupBy (_.name)
-
-              val placeholderSeq: NodeSeq = <div class="placeholder"></div>
-
-              def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
-                val entityType = kindToString(entity)
-                val linkContent = (
-                  { if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
-                  ++
-                  { if (includeText) <span class="tplLink">{ Text(packageQualifiedName(entity)) }</span> else NodeSeq.Empty }
-                )
-                <a class="tplshow" href={ relativeLinkTo(entity) } target="template"><span class={ entityType }>({ Text(entityType) })</span>{ linkContent }</a>
-              }
-
-              for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
-                val entities = tpls(tn)
-                val row = (entities find (e => e.isPackage || e.isObject), entities find (e => e.isTrait || e.isClass))
-
-                val itemContents = row match {
-                  case (Some(obj), None) => createLink(obj, includePlaceholder = true, includeText = true)
-
-                  case (maybeObj, Some(template)) =>
-                    val firstLink = maybeObj match {
-                      case Some(obj) => createLink(obj, includePlaceholder = false, includeText = false)
-                      case None => placeholderSeq
-                    }
-
-                    firstLink ++ createLink(template, includePlaceholder = false, includeText = true)
-
-                  case _ => // FIXME: this default case should not be necessary. For some reason AnyRef is not a package, object, trait, or class
-                    val entry = entities.head
-                    placeholderSeq ++ createLink(entry, includePlaceholder = false, includeText = true)
-                }
-
-                <li title={ entities.head.qualifiedName }>{ itemContents }</li>
-              }
-            }</ol>
-            <ol class="packages"> {
-              for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
-                <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
-            }</ol>
-          </xml:group>
-        }
-        packageElem(universe.rootPackage)
-      }</div></div>
-    </div>
-
-  def packageQualifiedName(ety: DocTemplateEntity): String =
-    if (ety.inTemplate.isPackage) ety.name
-    else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
deleted file mode 100644
index a205e02..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc.html.page
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
-import scala.tools.nsc.doc.html.{Page, HtmlFactory}
-import java.nio.channels.Channels
-import scala.util.parsing.json.{JSONObject, JSONArray}
-
-class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
-  def path = List("index.js")
-
-  override def writeFor(site: HtmlFactory) {
-    writeFile(site) {
-      _.write("Index.PACKAGES = " + packages.toString() + ";")
-    }
-  }
-
-  val packages = {
-    val pairs = allPackagesWithTemplates.toIterable.map(_ match {
-      case (pack, templates) => {
-        val merged = mergeByQualifiedName(templates)
-
-        val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
-          val pairs = merged(key).map(
-            t => kindToString(t) -> relativeLinkTo(t)
-          ) :+ ("name" -> key)
-
-          JSONObject(scala.collection.immutable.Map(pairs : _*))
-        })
-
-        pack.qualifiedName -> JSONArray(ary)
-      }
-    }).toSeq
-
-    JSONObject(scala.collection.immutable.Map(pairs : _*))
-  }
-
-  def mergeByQualifiedName(source: List[DocTemplateEntity]) = {
-    var result = Map[String, List[DocTemplateEntity]]()
-
-    for (t <- source) {
-      val k = t.qualifiedName
-      result += k -> (result.getOrElse(k, List()) :+ t)
-    }
-
-    result
-  }
-
-  def allPackages = {
-    def f(parent: Package): List[Package] = {
-      parent.packages.flatMap(
-        p => f(p) :+ p
-      )
-    }
-    f(universe.rootPackage).sortBy(_.toString)
-  }
-
-  def allPackagesWithTemplates = {
-    Map(allPackages.map((key) => {
-      key -> key.templates.collect {
-        case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
-      }
-    }) : _*)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
deleted file mode 100755
index a74c2ee..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Pedro Furlanetto
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-import doc.model._
-
-class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
-
-  def path = List("index-"+letter+".html", "index")
-
-  def title = {
-    val s = universe.settings
-    ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
-    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
-  }
-
-  def headers =
-    <xml:group>
-      <link href={ relativeLinkTo(List("ref-index.css", "lib")) }  media="screen" type="text/css" rel="stylesheet"/>
-      <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
-    </xml:group>
-
-
-  private def entry(name: String, methods: Iterable[MemberEntity]) = {
-    val occurrences = methods.map(method => {
-      val html = templateToHtml(method.inDefinitionTemplates.head)
-      if (method.deprecation.isDefined) {
-        <strike>{ html }</strike>
-      } else {
-        html
-      }
-    }).toList.distinct
-
-    <div class="entry">
-      <div class="name">{
-        if (methods.find { ! _.deprecation.isDefined } != None)
-          name
-        else
-          <strike>{ name }</strike>
-      }</div>
-      <div class="occurrences">{
-        for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
-      }</div>
-    </div>
-  }
-
-  def body =
-    <body>{
-      for(groups <- index.firstLetterIndex(letter)) yield
-        entry(groups._1, groups._2.view)
-    }</body>
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
deleted file mode 100644
index 68289b7..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import model._
-import scala.xml.{NodeSeq, Unparsed}
-import java.io.File
-
-class Source(sourceFile: File) extends HtmlPage {
-
-  val path = List("source.html")
-
-  val title = "Scaladoc: page source"
-
-  val headers =
-    NodeSeq.Empty
-
-  val body =
-    <body>
-      <h1>Page source is not implemented yet</h1>
-    </body>
-
-    /*
-
-
-    def readTextFromSrcDir(subPath: String) :Option[String] = {
-      readTextFromFile(new File(sourceDir, subPath))
-    }
-
-    def readTextFromFile(f : File) :Option[String] = {
-      if (f.exists) {
-        Some(Source.fromFile(f)(Codec.default).getLines().mkString(""))
-      } else {
-        None
-      }
-    }
-
-
-    def writeTextToFile(f : File, txt : String, header: Option[String], footer: Option[String]) {
-      val out = new FileOutputStream(f)
-      try {
-        val enc = "UTF-8"
-        header.foreach(s => out.write(s.getBytes(enc)))
-        out.write(txt.getBytes(enc))
-        footer.foreach(s => out.write(s.getBytes(enc)))
-      } finally {
-        try {
-          out.close()
-        } catch {
-          case _ => //ignore
-        }
-      }
-    }
-
-    trait SourceHtmlizer {
-      def scalaToHtml(src :File) : Option[File]
-    }
-
-    lazy val sourceHtmlizer : SourceHtmlizer = {
-      if (cfg.htmlizeSource) {
-        new SourceHtmlizer {
-
-          val inDir: File = cfg.sourcedir
-          val outDir: File = cfg.outputdir
-
-          private def relativize(uri: URI, from: URI) = linkHelper.relativize(uri, from).getOrElse("__notFound__" + uri.getPath)
-
-          def header(dest: URI) = Some("""
-          <html>
-          <head>
-            <link href='""" + relativize(new URI("site:/_highlighter/SyntaxHighlighter.css"), dest) + """' rel='stylesheet' type='text/css'/>
-            <script language='javascript' src='""" + relativize(new URI("site:/_highlighter/shAll.js"), dest) + """'></script>
-          </head>
-          <body>
-            <pre name="code" class="scala" style="width:100%">
-        """)
-
-          def footer(dest: URI) = Some("""</pre>
-            <script language='javascript'>
-              dp.SyntaxHighlighter.ClipboardSwf = '""" + relativize(new URI("site:/_highlighter/clipboard.swf"), dest) + """';
-              dp.SyntaxHighlighter.HighlightAll('code');
-            </script>
-          </body>
-          </html>
-        """)
-
-          //TODO: escape the source code
-          def scalaToHtml(src :File) = {
-            val dest = new File(outDir, fileHelper.relativePathUnderDir(src, inDir) + ".html")
-            if (!dest.exists || dest.lastModified < src.lastModified) {
-
-              //we need to verify whether the directory we are trying to write to has already been created or not
-              if(!dest.getParentFile.exists) dest.getParentFile.mkdirs
-
-              val uri = linkHelper.uriFor(dest).get
-              var txt = fileHelper.readTextFromFile(src).getOrElse("")
-              txt = txt.replace("<", "<")
-              fileHelper.writeTextToFile(dest, txt, header(uri), footer(uri))
-            }
-            Some(dest)
-          }
-
-          def copyResources() {
-            val loader = this.getClass().getClassLoader()
-            val buf = new Array[Byte](1024)
-            def copyResource(name: String) = fileHelper.copyResource("/scala/tools/nsc/doc/html/resource/", name, outDir, loader, buf)
-            copyResource("_highlighter/clipboard.swf")
-            copyResource("_highlighter/shAll.js")
-            copyResource("_highlighter/SyntaxHighlighter.css")
-          }
-
-          copyResources()
-        }
-      } else {
-        new SourceHtmlizer {
-          def scalaToHtml(src :File) = None
-        }
-      }
-    }
-    */
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
deleted file mode 100644
index 63c77e7..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ /dev/null
@@ -1,977 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import base._
-import base.comment._
-
-import model._
-import model.diagram._
-import diagram._
-
-import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-import scala.language.postfixOps
-
-import model._
-import model.diagram._
-import diagram._
-
-class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
-
-  val path =
-    templateToPath(tpl)
-
-  def title = {
-    val s = universe.settings
-
-    tpl.name +
-    ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
-    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
-    " - " + tpl.qualifiedName
-  }
-
-  val headers =
-    <xml:group>
-      <link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
-      <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
-      <script type="text/javascript">
-         if(top === self) {{
-            var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
-            var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
-            var anchor = window.location.hash;
-            var anchor_opt = '';
-            if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
-              anchor_opt = '@' + anchor.substring(1);
-            window.location.href = url + '#' + hash + anchor_opt;
-         }}
-   	  </script>
-    </xml:group>
-
-  private val scripts = {
-    val sources = {
-      val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
-      val forDiagrams = List("modernizr.custom.js", "diagrams.js")
-
-      (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
-        x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
-      }
-    }
-
-    sources map {
-      case (id, src) =>
-        <script defer="defer" type="text/javascript" id={id} src={src}></script>
-    }
-  }
-
-  val valueMembers =
-    tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
-
-  val (absValueMembers, nonAbsValueMembers) =
-    valueMembers partition (_.isAbstract)
-
-  val (deprValueMembers, nonDeprValueMembers) =
-    nonAbsValueMembers partition (_.deprecation.isDefined)
-
-  val (concValueMembers, shadowedImplicitMembers) =
-    nonDeprValueMembers partition (!_.isShadowedOrAmbiguousImplicit)
-
-  val typeMembers =
-    tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted (implicitly[Ordering[MemberEntity]])
-
-  val constructors = (tpl match {
-    case cls: Class => (cls.constructors: List[MemberEntity]).sorted
-    case _ => Nil
-  })
-
-  /* for body, there is a special case for AnyRef, otherwise AnyRef appears
-   * like a package/object this problem should be fixed, this implementation
-   * is just a patch. */
-  val body = {
-    val templateName = if (tpl.isRootPackage) "root package" else tpl.name
-    val displayName = tpl.companion match {
-      case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
-        <a href={relativeLinkTo(companion)} title="Go to companion">{ templateName }</a>
-      case _ =>
-        templateName
-    }
-    val owner = {
-      if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
-        NodeSeq.Empty
-      else
-        <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
-    }
-
-    <body class={ if (tpl.isType) "type" else "value" }>
-      <div id="definition">
-        {
-          tpl.companion match {
-            case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
-              <a href={relativeLinkTo(companion)} title="Go to companion"><img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/></a>
-            case _ =>
-              <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
-        }}
-        { owner }
-        <h1>{ displayName }</h1>
-      </div>
-
-      { signature(tpl, true) }
-      { memberToCommentHtml(tpl, tpl.inTemplate, true) }
-
-      <div id="mbrsel">
-        <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
-        { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty && (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1)))
-            NodeSeq.Empty
-          else
-            <div id="order">
-              <span class="filtertype">Ordering</span>
-              <ol>
-                {
-                  if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
-                    NodeSeq.Empty
-                  else
-                    <li class="group out"><span>Grouped</span></li>
-                }
-                <li class="alpha in"><span>Alphabetic</span></li>
-                {
-                  if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
-                    NodeSeq.Empty
-                  else
-                    <li class="inherit out"><span>By inheritance</span></li>
-                }
-              </ol>
-            </div>
-        }
-        { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
-          {
-            if (!tpl.linearizationTemplates.isEmpty)
-              <div id="ancestors">
-                <span class="filtertype">Inherited<br/>
-                </span>
-                <ol id="linearization">
-                  { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
-                </ol>
-              </div>
-            else NodeSeq.Empty
-          } ++ {
-            if (!tpl.conversions.isEmpty)
-              <div id="ancestors">
-                <span class="filtertype">Implicitly<br/>
-                </span>
-                <ol id="implicits"> {
-                  tpl.conversions.map { conv =>
-                    val name = conv.conversionQualifiedName
-                    val hide = universe.settings.hiddenImplicits(name)
-                    <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
-                  }
-                }
-                </ol>
-              </div>
-            else NodeSeq.Empty
-          } ++
-          <div id="ancestors">
-            <span class="filtertype"></span>
-            <ol>
-              <li class="hideall out"><span>Hide All</span></li>
-              <li class="showall in"><span>Show all</span></li>
-            </ol>
-            <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
-          </div>
-        }
-        {
-          <div id="visbl">
-            <span class="filtertype">Visibility</span>
-            <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
-          </div>
-        }
-      </div>
-
-      <div id="template">
-        <div id="allMembers">
-        { if (constructors.isEmpty) NodeSeq.Empty else
-            <div id="constructors" class="members">
-              <h3>Instance Constructors</h3>
-              <ol>{ constructors map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-
-        { if (typeMembers.isEmpty) NodeSeq.Empty else
-            <div id="types" class="types members">
-              <h3>Type Members</h3>
-              <ol>{ typeMembers map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-
-        { if (absValueMembers.isEmpty) NodeSeq.Empty else
-            <div id="values" class="values members">
-              <h3>Abstract Value Members</h3>
-              <ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-
-        { if (concValueMembers.isEmpty) NodeSeq.Empty else
-            <div id="values" class="values members">
-              <h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
-              <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-
-        { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
-            <div id="values" class="values members">
-              <h3>Shadowed Implicit Value Members</h3>
-              <ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-
-        { if (deprValueMembers.isEmpty) NodeSeq.Empty else
-            <div id="values" class="values members">
-              <h3>Deprecated Value Members</h3>
-              <ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
-            </div>
-        }
-        </div>
-
-        <div id="inheritedMembers">
-        {
-          // linearization
-          NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
-            <div class="parent" name={ superTpl.qualifiedName }>
-              <h3>Inherited from {
-                typeToHtmlWithStupidTypes(tpl, superTpl, superType)
-              }</h3>
-            </div>
-          )
-        }
-        {
-          // implicitly inherited
-          NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
-            <div class="conversion" name={ conversion.conversionQualifiedName }>
-              <h3>Inherited by implicit conversion { conversion.conversionShortName } from
-                { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
-              </h3>
-            </div>
-          )
-        }
-        </div>
-
-        <div id="groupedMembers">
-        {
-          val allGroups = tpl.members.map(_.group).distinct
-          val orderedGroups = allGroups.map(group => (tpl.groupPriority(group), group)).sorted.map(_._2)
-          // linearization
-          NodeSeq fromSeq (for (group <- orderedGroups) yield
-            <div class="group" name={ group }>
-              <h3>{ tpl.groupName(group) }</h3>
-              {
-                tpl.groupDescription(group) match {
-                  case Some(body) => <div class="comment cmt">{ bodyToHtml(body) }</div>
-                  case _ => NodeSeq.Empty
-                }
-              }
-            </div>
-          )
-        }
-        </div>
-
-      </div>
-
-      <div id="tooltip" ></div>
-
-      {
-        if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value))
-          <div id="footer">Scala programming documentation. Copyright (c) 2003-2013 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
-        else
-          <div id="footer"> { tpl.universe.settings.docfooter.value } </div>
-      }
-      { scripts }
-    </body>
-  }
-
-  def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
-    val memberComment = memberToCommentHtml(mbr, inTpl, false)
-    <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
-      data-isabs={ mbr.isAbstract.toString }
-      fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
-      group={ mbr.group }>
-      <a id={ mbr.signature }/>
-      <a id={ mbr.signatureCompat }/>
-      { signature(mbr, false) }
-      { memberComment }
-    </li>
-  }
-
-  def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = {
-    mbr match {
-      case dte: DocTemplateEntity if isSelf =>
-        // comment of class itself
-        <xml:group>
-          <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
-        </xml:group>
-      case dte: DocTemplateEntity if mbr.comment.isDefined =>
-        // comment of inner, documented class (only short comment, full comment is on the class' own page)
-        memberToInlineCommentHtml(mbr, isSelf)
-      case _ =>
-        // comment of non-class member or non-documentented inner class
-        val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
-        if (commentBody.isEmpty)
-          NodeSeq.Empty
-        else {
-          val shortComment = memberToShortCommentHtml(mbr, isSelf)
-          val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf)
-
-          val includedLongComment = if (shortComment.text.trim == longComment.text.trim)
-            NodeSeq.Empty
-          else
-            <div class="fullcomment">{ longComment }</div>
-
-          shortComment ++ includedLongComment
-        }
-    }
-  }
-
-  def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
-    mbr match {
-      case nte: NonTemplateMemberEntity if nte.isUseCase =>
-        inlineToHtml(comment.Text("[use case] "))
-      case _ => NodeSeq.Empty
-    }
-  }
-
-  def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
-    mbr.comment.fold(NodeSeq.Empty) { comment =>
-      <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(comment.short) }</p>
-    }
-
-  def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
-    <p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
-
-  def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
-    val s = universe.settings
-
-    val memberComment =
-      if (mbr.comment.isEmpty) NodeSeq.Empty
-      else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
-
-    val paramComments = {
-      val prs: List[ParameterEntity] = mbr match {
-        case cls: Class => cls.typeParams ::: cls.valueParams.flatten
-        case trt: Trait => trt.typeParams
-        case dfe: Def => dfe.typeParams ::: dfe.valueParams.flatten
-        case ctr: Constructor => ctr.valueParams.flatten
-        case _ => Nil
-      }
-
-      def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): NodeSeq = prs match {
-
-        case (tp: TypeParam) :: rest =>
-          val paramEntry: NodeSeq = {
-            <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(comment.typeParams(tp.name)) }</dd>
-          }
-          paramEntry ++ paramCommentToHtml(rest, comment)
-
-        case (vp: ValueParam) :: rest  =>
-          val paramEntry: NodeSeq = {
-            <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(comment.valueParams(vp.name)) }</dd>
-          }
-          paramEntry ++ paramCommentToHtml(rest, comment)
-
-        case _ =>
-          NodeSeq.Empty
-      }
-
-      mbr.comment.fold(NodeSeq.Empty) { comment =>
-        val cmtedPrs = prs filter {
-          case tp: TypeParam => comment.typeParams isDefinedAt tp.name
-          case vp: ValueParam => comment.valueParams isDefinedAt vp.name
-        }
-        if (cmtedPrs.isEmpty && comment.result.isEmpty) NodeSeq.Empty
-        else {
-          <dl class="paramcmts block">{
-            paramCommentToHtml(cmtedPrs, comment) ++ (
-            comment.result match {
-              case None => NodeSeq.Empty
-              case Some(cmt) =>
-                <dt>returns</dt><dd class="cmt">{ bodyToHtml(cmt) }</dd>
-            })
-          }</dl>
-        }
-      }
-    }
-
-    val implicitInformation = mbr.byConversion match {
-      case Some(conv) =>
-        <dt class="implicit">Implicit information</dt> ++
-        {
-          val targetType = typeToHtml(conv.targetType, true)
-          val conversionMethod = conv.convertorMethod match {
-            case Left(member) => Text(member.name)
-            case Right(name)  => Text(name)
-          }
-
-          // strip off the package object endings, they make things harder to follow
-          val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
-          val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
-
-          val constraintText = conv.constraints match {
-            case Nil =>
-              NodeSeq.Empty
-            case List(constraint) =>
-              scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
-            case List(constraint1, constraint2) =>
-              scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
-                scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
-            case constraints =>
-              <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
-                var index = 0
-                constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
-              }
-          }
-
-          <dd>
-            This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
-            { targetType } performed by method { conversionMethod } in { conversionOwner }.
-            { constraintText }
-          </dd>
-        } ++ {
-          if (mbr.isShadowedOrAmbiguousImplicit) {
-            // These are the members that are shadowing or ambiguating the current implicit
-            // see ImplicitMemberShadowing trait for more information
-            val shadowingSuggestion = {
-              val params = mbr match {
-                case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
-                case _      => "" // no parameters
-              }
-              <br/> ++ scala.xml.Text("To access this member you can use a ") ++
-              <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
-                target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
-              <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
-            }
-
-            val shadowingWarning: NodeSeq =
-              if (mbr.isShadowedImplicit)
-                  scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
-                  "class.") ++ shadowingSuggestion
-              else if (mbr.isAmbiguousImplicit)
-                  scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
-                  "inherited members have similar signatures, so calling this member may produce an ambiguous " +
-                  "implicit conversion compiler error.") ++ shadowingSuggestion
-              else NodeSeq.Empty
-
-            <dt class="implicit">Shadowing</dt> ++
-            <dd>{ shadowingWarning }</dd>
-
-          } else NodeSeq.Empty
-        }
-      case _ =>
-        NodeSeq.Empty
-    }
-
-    // --- start attributes block vals
-    val attributes: NodeSeq = {
-      val fvs: List[comment.Paragraph] = visibility(mbr).toList
-      if (fvs.isEmpty || isReduced) NodeSeq.Empty
-      else {
-        <dt>Attributes</dt>
-        <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
-      }
-    }
-
-    val definitionClasses: NodeSeq = {
-      val inDefTpls = mbr.inDefinitionTemplates
-      if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
-      else {
-        <dt>Definition Classes</dt>
-        <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }</dd>
-      }
-    }
-
-    val fullSignature: NodeSeq = {
-      mbr match {
-        case nte: NonTemplateMemberEntity if nte.isUseCase =>
-          <div class="full-signature-block toggleContainer">
-            <span class="toggle">Full Signature</span>
-            <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
-          </div>
-        case _ => NodeSeq.Empty
-      }
-    }
-
-    val selfType: NodeSeq = mbr match {
-      case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
-        <dt>Self Type</dt>
-        <dd>{ typeToHtml(dtpl.selfType.get, hasLinks = true) }</dd>
-      case _ => NodeSeq.Empty
-    }
-
-    val annotations: NodeSeq = {
-      // A list of annotations which don't show their arguments, e. g. because they are shown separately.
-      val annotationsWithHiddenArguments = List("deprecated", "Deprecated", "migration")
-
-      def showArguments(annotation: Annotation) =
-        !(annotationsWithHiddenArguments.contains(annotation.qualifiedName))
-
-      if (!mbr.annotations.isEmpty) {
-        <dt>Annotations</dt>
-        <dd>{
-            mbr.annotations.map { annot =>
-              <xml:group>
-                <span class="name">@{ templateToHtml(annot.annotationClass) }</span>{
-                  if (showArguments(annot)) argumentsToHtml(annot.arguments) else NodeSeq.Empty
-                }
-              </xml:group>
-            }
-          }
-        </dd>
-      } else NodeSeq.Empty
-    }
-
-    val sourceLink: NodeSeq = mbr match {
-      case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
-        val (absFile, line) = dtpl.inSource.get
-        <dt>Source</dt>
-        <dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
-      case _ => NodeSeq.Empty
-    }
-
-    val deprecation: NodeSeq =
-      mbr.deprecation match {
-        case Some(deprecation) if !isReduced =>
-          <dt>Deprecated</dt>
-          <dd class="cmt">{ bodyToHtml(deprecation) }</dd>
-        case _ => NodeSeq.Empty
-      }
-
-    val migration: NodeSeq =
-      mbr.migration match {
-        case Some(migration) if !isReduced =>
-          <dt>Migration</dt>
-          <dd class="cmt">{ bodyToHtml(migration) }</dd>
-        case _ => NodeSeq.Empty
-      }
-
-    val mainComment: NodeSeq = mbr.comment match {
-      case Some(comment) if (! isReduced) =>
-        def orEmpty[T](it: Iterable[T])(gen:  =>NodeSeq): NodeSeq =
-          if (it.isEmpty) NodeSeq.Empty else gen
-
-        val example =
-          orEmpty(comment.example) {
-            <div class="block">Example{ if (comment.example.length > 1) "s" else ""}:
-               <ol>{
-                 val exampleXml: List[NodeSeq] = for (ex <- comment.example) yield
-                   <li class="cmt">{ bodyToHtml(ex) }</li>
-                 exampleXml.reduceLeft(_ ++ Text(", ") ++ _)
-              }</ol>
-            </div>
-	  }
-
-        val version: NodeSeq =
-          orEmpty(comment.version) {
-            <dt>Version</dt>
-            <dd>{ for(body <- comment.version.toList) yield bodyToHtml(body) }</dd>
-          }
-
-        val sinceVersion: NodeSeq =
-          orEmpty(comment.since) {
-            <dt>Since</dt>
-            <dd>{ for(body <- comment.since.toList) yield bodyToHtml(body) }</dd>
-          }
-
-        val note: NodeSeq =
-          orEmpty(comment.note) {
-            <dt>Note</dt>
-            <dd>{
-              val noteXml: List[NodeSeq] =  for(note <- comment.note ) yield <span class="cmt">{bodyToHtml(note)}</span>
-              noteXml.reduceLeft(_ ++ Text(", ") ++ _)
-            }</dd>
-          }
-
-        val seeAlso: NodeSeq =
-          orEmpty(comment.see) {
-            <dt>See also</dt>
-            <dd>{
-              val seeXml: List[NodeSeq] = for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span>
-              seeXml.reduceLeft(_ ++ _)
-            }</dd>
-          }
-
-        val exceptions: NodeSeq =
-          orEmpty(comment.throws) {
-            <dt>Exceptions thrown</dt>
-            <dd>{
-              val exceptionsXml: List[NodeSeq] =
-                for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
-                  <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
-              exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
-            }</dd>
-          }
-
-        val todo: NodeSeq =
-          orEmpty(comment.todo) {
-            <dt>To do</dt>
-            <dd>{
-              val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
-              todoXml.reduceLeft(_ ++ Text(", ") ++ _)
-            }</dd>
-          }
-
-        example ++ version ++ sinceVersion ++ exceptions ++ todo ++ note ++ seeAlso
-
-      case _ => NodeSeq.Empty
-    }
-    // end attributes block vals ---
-
-    val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
-    val attributesBlock =
-      if (attributesInfo.isEmpty)
-        NodeSeq.Empty
-      else
-        <dl class="attributes block"> { attributesInfo }</dl>
-
-    val linearization = mbr match {
-      case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty =>
-        <div class="toggleContainer block">
-          <span class="toggle">Linear Supertypes</span>
-          <div class="superTypes hiddenContent">{
-            typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
-          }</div>
-        </div>
-      case _ => NodeSeq.Empty
-    }
-
-    val subclasses = mbr match {
-      case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
-        <div class="toggleContainer block">
-          <span class="toggle">Known Subclasses</span>
-          <div class="subClasses hiddenContent">{
-            templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
-          }</div>
-        </div>
-      case _ => NodeSeq.Empty
-    }
-
-    def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq =
-      if (s.docDiagrams.value) mbr match {
-        case dtpl: DocTemplateEntity if isSelf && !isReduced =>
-          val diagram = f(dtpl)
-          if (diagram.isDefined) {
-            val s = universe.settings
-            val diagramSvg = generator.generate(diagram.get, tpl, this)
-            if (diagramSvg != NodeSeq.Empty) {
-              <div class="toggleContainer block diagram-container" id={ id + "-container"}>
-                <span class="toggle diagram-link">{ description }</span>
-                <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
-                <div class="diagram" id={ id }>{
-                  diagramSvg
-                }</div>
-              </div>
-            } else NodeSeq.Empty
-          } else NodeSeq.Empty
-        case _ => NodeSeq.Empty
-      } else NodeSeq.Empty // diagrams not generated
-
-    val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
-    val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
-
-    memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
-  }
-
-  def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
-    def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
-      case None => NodeSeq.Empty
-      case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
-    }
-    bound0(lo, " >: ") ++ bound0(hi, " <: ")
-  }
-
-  def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
-    import comment._
-    import comment.{ Text => CText }
-    mbr.visibility match {
-      case PrivateInInstance() =>
-        Some(Paragraph(CText("private[this]")))
-      case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
-        Some(Paragraph(CText("private")))
-      case PrivateInTemplate(owner) =>
-        Some(Paragraph(Chain(List(CText("private["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
-      case ProtectedInInstance() =>
-        Some(Paragraph(CText("protected[this]")))
-      case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
-        Some(Paragraph(CText("protected")))
-      case ProtectedInTemplate(owner) =>
-        Some(Paragraph(Chain(List(CText("protected["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
-      case Public() =>
-        None
-    }
-  }
-
-  /** name, tparams, params, result */
-  def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
-    def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
-      <xml:group>
-      <span class="modifier_kind">
-        <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
-        <span class="kind">{ kindToString(mbr) }</span>
-      </span>
-      <span class="symbol">
-        {
-          val nameClass =
-            if (mbr.isImplicitlyInherited)
-              if (mbr.isShadowedOrAmbiguousImplicit)
-                "implicit shadowed"
-              else
-                "implicit"
-            else
-              "name"
-
-          val nameHtml = {
-            val value = if (mbr.isConstructor) tpl.name else mbr.name
-            val span = if (mbr.deprecation.isDefined)
-              <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
-            else
-              <span class={ nameClass }>{ value }</span>
-            val encoded = scala.reflect.NameTransformer.encode(value)
-            if (encoded != value) {
-              span % new UnprefixedAttribute("title",
-                                             "gt4s: " + encoded +
-                                             span.attribute("title").map(
-                                               node => ". " + node
-                                             ).getOrElse(""),
-                                             scala.xml.Null)
-            } else {
-              span
-            }
-          }
-          if (!nameLink.isEmpty)
-            <a href={nameLink}>{nameHtml}</a>
-          else nameHtml
-        }{
-          def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
-            case hk: HigherKinded =>
-              val tpss = hk.typeParams
-              if (tpss.isEmpty) NodeSeq.Empty else {
-                def tparam0(tp: TypeParam): NodeSeq =
-                  <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
-                def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
-                  case tp :: Nil => tparam0(tp)
-                  case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
-                }
-                <span class="tparams">[{ tparams0(tpss) }]</span>
-              }
-            case _ => NodeSeq.Empty
-          }
-          tparamsToHtml(mbr)
-        }{
-          if (isReduced) NodeSeq.Empty else {
-            def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
-              def param0(vl: ValueParam): NodeSeq =
-                // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
-                <span name={ vl.name }>{
-                  Text(vl.name)
-                }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
-                  vl.defaultValue match {
-                    case Some(v) => Text(" = ") ++ treeToHtml(v)
-                    case None => NodeSeq.Empty
-                  }
-                }</span>
-
-              def params0(vlss: List[ValueParam]): NodeSeq = vlss match {
-                case Nil => NodeSeq.Empty
-                case vl :: Nil => param0(vl)
-                case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls)
-              }
-              def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match {
-                case vl :: vls => if(vl.isImplicit) { <span class="implicit">implicit </span> } else Text("")
-                case _ => Text("")
-              }
-              vlsss map { vlss => <span class="params">({implicitCheck(vlss) ++ params0(vlss) })</span> }
-            }
-            mbr match {
-              case cls: Class => paramsToHtml(cls.valueParams)
-              case ctr: Constructor => paramsToHtml(ctr.valueParams)
-              case dfe: Def => paramsToHtml(dfe.valueParams)
-              case _ => NodeSeq.Empty
-            }
-          }
-        }{ if (isReduced) NodeSeq.Empty else {
-          mbr match {
-            case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
-              <span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
-
-            case abt: MemberEntity with AbstractType =>
-              val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks)
-              if (b2s != NodeSeq.Empty)
-                <span class="result">{ b2s }</span>
-              else NodeSeq.Empty
-
-            case alt: MemberEntity with AliasType =>
-              <span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
-
-            case tpl: MemberTemplateEntity if !tpl.parentTypes.isEmpty =>
-              <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
-
-            case _ => NodeSeq.Empty
-          }
-        }}
-      </span>
-      </xml:group>
-    mbr match {
-      case dte: DocTemplateEntity if !isSelf =>
-        <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4>
-      case _ if isSelf =>
-        <h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
-      case _ =>
-        <h4 class="signature">{ inside(hasLinks = true) }</h4>
-    }
-
-  }
-
-  /** */
-  def treeToHtml(tree: TreeEntity): NodeSeq = {
-
-    /** Makes text good looking in the html page : newlines and basic indentation,
-     * You must change this function if you want to improve pretty printing of default Values
-     */
-    def codeStringToXml(text: String): NodeSeq = {
-      var goodLookingXml: NodeSeq = NodeSeq.Empty
-      var indent = 0
-      for (c <- text) c match {
-        case '{' => indent+=1
-          goodLookingXml ++= Text("{")
-        case '}' => indent-=1
-          goodLookingXml ++= Text("}")
-        case '\n' =>
-          goodLookingXml++= <br/> ++ indentation
-        case _ => goodLookingXml ++= Text(c.toString)
-      }
-      def indentation:NodeSeq = {
-        var indentXml = NodeSeq.Empty
-        for (x <- 1 to indent) indentXml ++= Text("  ")
-        indentXml
-      }
-      goodLookingXml
-    }
-
-    var index = 0
-    val str = tree.expression
-    val length = str.length
-    var myXml: NodeSeq = NodeSeq.Empty
-    for ((from, (member, to)) <- tree.refEntity.toSeq) {
-      if (index < from) {
-        myXml ++= codeStringToXml(str.substring(index,from))
-        index = from
-      }
-      if (index == from) {
-        member match {
-          case mbr: DocTemplateEntity =>
-            val link = relativeLinkTo(mbr)
-            myXml ++= <span class="name"><a href={link}>{str.substring(from, to)}</a></span>
-          case mbr: MemberEntity =>
-            val anchor = "#" + mbr.signature
-            val link = relativeLinkTo(mbr.inTemplate)
-            myXml ++= <span class="name"><a href={link ++ anchor}>{str.substring(from, to)}</a></span>
-        }
-        index = to
-      }
-    }
-
-    if (index <= length-1)
-      myXml ++= codeStringToXml(str.substring(index, length ))
-
-    if (length < 36)
-      <span class="symbol">{ myXml }</span>
-    else
-      <span class="defval" name={ myXml }>{ "..." }</span>
-  }
-
-  private def argumentsToHtml(argss: List[ValueArgument]): NodeSeq = {
-    def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
-      case Nil         => NodeSeq.Empty
-      case arg :: Nil  => argumentToHtml(arg)
-      case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
-    }
-    <span class="args">({ argumentsToHtml0(argss) })</span>
-  }
-
-  private def argumentToHtml(arg: ValueArgument): NodeSeq = {
-    <span>
-      {
-        arg.parameter match {
-          case Some(param) => Text(param.name + " = ")
-          case None => NodeSeq.Empty
-        }
-      }
-      { treeToHtml(arg.value) }
-    </span>
-  }
-
-  private def bodyToStr(body: comment.Body): String =
-    body.blocks flatMap (blockToStr(_)) mkString ""
-
-  private def blockToStr(block: comment.Block): String = block match {
-    case comment.Paragraph(in) => inlineToStr(in)
-    case _ => block.toString
-  }
-
-  private def inlineToStr(inl: comment.Inline): String = inl match {
-    case comment.Chain(items) => items flatMap (inlineToStr(_)) mkString ""
-    case comment.Italic(in) => inlineToStr(in)
-    case comment.Bold(in) => inlineToStr(in)
-    case comment.Underline(in) => inlineToStr(in)
-    case comment.Monospace(in) => inlineToStr(in)
-    case comment.Text(text) => text
-    case comment.Summary(in) => inlineToStr(in)
-    case _ => inl.toString
-  }
-
-  private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
-    if (tpl.universe.settings.useStupidTypes.value)
-      superTpl match {
-        case dtpl: DocTemplateEntity =>
-          val sig = signature(dtpl, false, true) \ "_"
-          sig
-        case tpl: TemplateEntity =>
-          Text(tpl.name)
-      }
-  else
-    typeToHtml(superType, true)
-
-  private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
-    case ktcc: KnownTypeClassConstraint =>
-      scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
-        templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
-    case tcc: TypeClassConstraint =>
-      scala.xml.Text(tcc.typeParamName + " is ") ++
-        <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
-        context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
-        templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
-    case impl: ImplicitInScopeConstraint =>
-      scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
-    case eq: EqualTypeParamConstraint =>
-      scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
-        typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
-    case bt: BoundedTypeParamConstraint =>
-      scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
-        bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
-        typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
-        typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
-    case lb: LowerBoundedTypeParamConstraint =>
-      scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
-        typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
-    case ub: UpperBoundedTypeParamConstraint =>
-      scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
-        typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
-  }
-}
-
-object Template {
-  /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
-   * it won't be garbage collected and you'll end up filling the heap with garbage */
-
-  def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
deleted file mode 100644
index ec00cac..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * @author Vlad Ureche
- */
-package scala.tools.nsc.doc
-package html.page.diagram
-
-object DiagramStats {
-
-  class TimeTracker(title: String) {
-    var totalTime: Long = 0l
-    var maxTime: Long = 0l
-    var instances: Int = 0
-
-    def addTime(ms: Long) = {
-      if (maxTime < ms)
-        maxTime = ms
-      totalTime += ms
-      instances += 1
-    }
-
-    def printStats(print: String => Unit) = {
-      if (instances == 0)
-        print(title + ": no stats gathered")
-      else {
-        print("  " + title)
-        print("  " + "=" * title.length)
-        print("    count:        " + instances + " items")
-        print("    total time:   " + totalTime + " ms")
-        print("    average time: " + (totalTime/instances) + " ms")
-        print("    maximum time: " + maxTime + " ms")
-        print("")
-      }
-    }
-  }
-
-  private[this] val filterTrack = new TimeTracker("diagrams model filtering")
-  private[this] val modelTrack = new TimeTracker("diagrams model generation")
-  private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
-  private[this] val dotRunTrack = new TimeTracker("dot process runnning")
-  private[this] val svgTrack = new TimeTracker("svg processing")
-  private[this] var brokenImages = 0
-  private[this] var fixedImages = 0
-
-  def printStats(settings: Settings) = {
-    if (settings.docDiagramsDebug.value) {
-      settings.printMsg("\nDiagram generation running time breakdown:\n")
-      filterTrack.printStats(settings.printMsg)
-      modelTrack.printStats(settings.printMsg)
-      dotGenTrack.printStats(settings.printMsg)
-      dotRunTrack.printStats(settings.printMsg)
-      svgTrack.printStats(settings.printMsg)
-      println("  Broken images: " + brokenImages)
-      println("  Fixed images: " + fixedImages)
-      println("")
-    }
-  }
-
-  def addFilterTime(ms: Long) = filterTrack.addTime(ms)
-  def addModelTime(ms: Long) = modelTrack.addTime(ms)
-  def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
-  def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
-  def addSvgTime(ms: Long) = svgTrack.addTime(ms)
-
-  def addBrokenImage(): Unit = brokenImages += 1
-  def addFixedImage(): Unit = fixedImages += 1
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
deleted file mode 100644
index 8473678..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ /dev/null
@@ -1,511 +0,0 @@
-/**
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
-import scala.collection.immutable._
-import javax.xml.parsers.SAXParser
-import model._
-import model.diagram._
-
-class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
-
-  // the page where the diagram will be embedded
-  private var page: HtmlPage = null
-  // path to the "lib" folder relative to the page
-  private var pathToLib: String = null
-  // maps nodes to unique indices
-  private var node2Index: Map[Node, Int] = null
-  // maps an index to its corresponding node
-  private var index2Node: Map[Int, Node] = null
-  // true if the current diagram is a class diagram
-  private var isInheritanceDiagram = false
-  // incoming implicit nodes (needed for determining the CSS class of a node)
-  private var incomingImplicitNodes: List[Node] = List()
-  // the suffix used when there are two many classes to show
-  private final val MultiSuffix = " classes/traits"
-  // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
-  private var counter = 0
-
-  def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
-    counter = counter + 1;
-    this.page = page
-    pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
-    val dot = generateDot(diagram)
-    val result = generateSVG(dot, template)
-    // clean things up a bit, so we don't leave garbage on the heap
-    this.page = null
-    node2Index = null
-    index2Node = null
-    incomingImplicitNodes = List()
-    result
-  }
-
-  /**
-   * Generates a dot string for a given diagram.
-   */
-  private def generateDot(d: Diagram) = {
-    // inheritance nodes (all nodes except thisNode and implicit nodes)
-    var nodes: List[Node] = null
-    // inheritance edges (all edges except implicit edges)
-    var edges: List[(Node, List[Node])] = null
-
-    // timing
-    var tDot = -System.currentTimeMillis
-
-    // variables specific to class diagrams:
-    // current node of a class diagram
-    var thisNode:Node = null
-    var subClasses = List[Node]()
-    var superClasses = List[Node]()
-    var incomingImplicits = List[Node]()
-    var outgoingImplicits = List[Node]()
-    isInheritanceDiagram = false
-
-    d match {
-      case InheritanceDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
-
-        def textTypeEntity(text: String) =
-          new TypeEntity {
-            val name = text
-            def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap()
-          }
-
-        // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
-        // conservatively, we'll limit at 4000, to be sure:
-        def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
-
-        // avoid overcrowding the diagram:
-        //   if there are too many super / sub / implicit nodes, represent
-        //   them by on node with a corresponding tooltip
-        superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
-          val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
-          List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None)(superClassesTooltip))
-        } else _superClasses
-
-        subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
-          val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
-          List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None)(subClassesTooltip))
-        } else _subClasses
-
-        incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
-          val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
-          List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None)(incomingImplicitsTooltip))
-        } else _incomingImplicits
-
-        outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
-          val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
-          List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None)(outgoingImplicitsTooltip))
-        } else _outgoingImplicits
-
-        thisNode = _thisNode
-        nodes = List()
-        edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
-        node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
-        isInheritanceDiagram = true
-        incomingImplicitNodes = incomingImplicits
-      case _ =>
-        nodes = d.nodes
-        edges = d.edges
-        node2Index = d.nodes.zipWithIndex.toMap
-        incomingImplicitNodes = List()
-    }
-    index2Node = node2Index map {_.swap}
-
-    val implicitsDot = {
-      if (!isInheritanceDiagram) ""
-      else {
-        // dot cluster containing thisNode
-        val thisCluster = "subgraph clusterThis {\n" +
-          "style=\"invis\"\n" +
-          node2Dot(thisNode) +
-        "}"
-        // dot cluster containing incoming implicit nodes, if any
-        val incomingCluster = {
-          if(incomingImplicits.isEmpty) ""
-          else "subgraph clusterIncoming {\n" +
-            "style=\"invis\"\n" +
-            incomingImplicits.reverse.map(n => node2Dot(n)).mkString +
-            (if (incomingImplicits.size > 1)
-              incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
-              " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
-            else "") +
-          "}"
-        }
-        // dot cluster containing outgoing implicit nodes, if any
-        val outgoingCluster = {
-          if(outgoingImplicits.isEmpty) ""
-          else "subgraph clusterOutgoing {\n" +
-            "style=\"invis\"\n" +
-            outgoingImplicits.reverse.map(n => node2Dot(n)).mkString +
-            (if (outgoingImplicits.size > 1)
-              outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
-              " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
-            else "") +
-          "}"
-        }
-
-        // assemble clusters into another cluster
-        val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
-        val outgoingTooltip =  thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
-        "subgraph clusterAll {\n" +
-      	"style=\"invis\"\n" +
-          outgoingCluster + "\n" +
-      	  thisCluster + "\n" +
-      	  incomingCluster + "\n" +
-      	  // incoming implicit edge
-      	  (if (!incomingImplicits.isEmpty) {
-      	    val n = incomingImplicits.last
-      	    "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
-      	    " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
-      	    ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
-      	  } else "") +
-      	  // outgoing implicit edge
-      	  (if (!outgoingImplicits.isEmpty) {
-      	    val n = outgoingImplicits.head
-      	    "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
-      	    " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
-      	    ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
-      	  } else "") +
-        "}"
-      }
-    }
-
-    // assemble graph
-    val graph = "digraph G {\n" +
-      // graph / node / edge attributes
-      graphAttributesStr +
-      "node [" + nodeAttributesStr + "];\n" +
-      "edge [" + edgeAttributesStr + "];\n" +
-      implicitsDot + "\n" +
-      // inheritance nodes
-      nodes.map(n => node2Dot(n)).mkString +
-      subClasses.map(n => node2Dot(n)).mkString +
-      superClasses.map(n => node2Dot(n)).mkString +
-      // inheritance edges
-      edges.map{ case (from, tos) => tos.map(to => {
-        val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
-        // the X -> Y edge is inverted twice to keep the diagram flowing the right way
-        // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
-        "node" + node2Index(to) + " -> node" + node2Index(from) +
-        " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
-        "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
-          to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
-      }).mkString}.mkString +
-    "}"
-
-    tDot += System.currentTimeMillis
-    DiagramStats.addDotGenerationTime(tDot)
-
-    graph
-  }
-
-  /**
-   * Generates the dot string of a given node.
-   */
-  private def node2Dot(node: Node) = {
-
-    // escape HTML characters in node names
-    def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">");
-
-    // assemble node attribues in a map
-    var attr = scala.collection.mutable.Map[String, String]()
-
-    // link
-    node.doctpl match {
-      case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
-      case _ =>
-    }
-
-    // tooltip
-    node.tooltip match {
-      case Some(text) => attr += "tooltip" -> text
-      // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
-      case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
-      case _ =>
-    }
-
-    // styles
-    if(node.isImplicitNode)
-      attr ++= implicitStyle
-    else if(node.isOutsideNode)
-      attr ++= outsideStyle
-    else if(node.isTraitNode)
-      attr ++= traitStyle
-    else if(node.isClassNode)
-      attr ++= classStyle
-    else if(node.isObjectNode)
-      attr ++= objectStyle
-    else if(node.isTypeNode)
-      attr ++= typeStyle
-    else
-      attr ++= defaultStyle
-
-    // HTML label
-    var name = escape(node.name)
-    var img = ""
-    if(node.isTraitNode)
-      img = "trait_diagram.png"
-    else if(node.isClassNode)
-      img = "class_diagram.png"
-    else if(node.isObjectNode)
-      img = "object_diagram.png"
-    else if(node.isTypeNode)
-      img = "type_diagram.png"
-
-    if(!img.equals("")) {
-      img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
-      name = name + " "
-    }
-    val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
-    		       "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
-    		    "</TABLE>>"
-
-    // dot does not allow to specify a CSS class, therefore
-    // set the id to "{class}|{id}", which will be used in
-    // the transform method
-    val id = "graph" + counter + "_" + node2Index(node)
-    attr += ("id" -> (cssClass(node) + "|" + id))
-
-    // return dot string
-    "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
-  }
-
-  /**
-   * Returns the CSS class for an edge connecting node1 and node2.
-   */
-  private def cssClass(node1: Node, node2: Node): String = {
-    if (node1.isImplicitNode && node2.isThisNode)
-      "implicit-incoming"
-    else if (node1.isThisNode && node2.isImplicitNode)
-      "implicit-outgoing"
-    else
-      "inheritance"
-  }
-
-  /**
-   * Returns the CSS class for a node.
-   */
-  private def cssClass(node: Node): String =
-    if (node.isImplicitNode && incomingImplicitNodes.contains(node))
-      "implicit-incoming" + cssBaseClass(node, "", " ")
-    else if (node.isImplicitNode)
-      "implicit-outgoing" + cssBaseClass(node, "", " ")
-    else if (node.isThisNode)
-      "this" + cssBaseClass(node, "", " ")
-    else if (node.isOutsideNode)
-      "outside" + cssBaseClass(node, "", " ")
-    else
-      cssBaseClass(node, "default", "")
-
-  private def cssBaseClass(node: Node, default: String, space: String) =
-    if (node.isClassNode)
-      space + "class"
-    else if (node.isTraitNode)
-      space + "trait"
-    else if (node.isObjectNode)
-      space + "object"
-    else if (node.isTypeNode)
-      space + "type"
-    else
-      default
-
-  /**
-   * Calls dot with a given dot string and returns the SVG output.
-   */
-  private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
-    val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
-    var tSVG = -System.currentTimeMillis
-
-    val result = if (dotOutput != null) {
-      val src = scala.io.Source.fromString(dotOutput);
-      try {
-        val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
-        val doc = cpa.document()
-        if (doc != null)
-          transform(doc.docElem)
-        else
-          NodeSeq.Empty
-      } catch {
-        case exc: Exception =>
-          if (settings.docDiagramsDebug.value) {
-            settings.printMsg("\n\n**********************************************************************")
-            settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
-            settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
-            settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
-            settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
-            settings.printMsg("\n\n**********************************************************************")
-          } else {
-            settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
-            settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
-          }
-          NodeSeq.Empty
-      }
-    } else
-      NodeSeq.Empty
-
-    tSVG += System.currentTimeMillis
-    DiagramStats.addSvgTime(tSVG)
-
-    result
-  }
-
-  /**
-   * Transforms the SVG generated by dot:
-   * - adds a class attribute to the SVG element
-   * - changes the path of the node images from absolute to relative
-   * - assigns id and class attributes to nodes and edges
-   * - removes title elements
-   */
-  private def transform(e:scala.xml.Node): scala.xml.Node = e match {
-    // add an id and class attribute to the SVG element
-    case Elem(prefix, "svg", attribs, scope, child @ _*) => {
-      val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
-      Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
-      new UnprefixedAttribute("id", "graph" + counter, Null) %
-      new UnprefixedAttribute("class", klass, Null)
-    }
-    // change the path of the node images from absolute to relative
-    case img @  => {
-      val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
-      val file = href.substring(href.lastIndexOf("/") + 1, href.size)
-      img.asInstanceOf[Elem] %
-      new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
-    }
-    // assign id and class attributes to edges and nodes:
-    // the id attribute generated by dot has the format: "{class}|{id}"
-    case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
-      var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
-      val dotId = (g \ "@id").toString
-      if (dotId.count(_ == '|') == 1) {
-        val Array(klass, id) = dotId.toString.split("\\|")
-        /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
-         * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
-         * back in the node */
-        val kind = getKind(klass)
-        if (kind != "")
-          if (((g \ "a" \ "image").isEmpty)) {
-            DiagramStats.addBrokenImage()
-            val xposition = getPosition(g, "x", -22)
-            val yposition = getPosition(g, "y", -11.3334)
-            if (xposition.isDefined && yposition.isDefined) {
-              val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
-              val anchorNode = (g \ "a") match {
-                case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
-                  transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
-                case _ =>
-                  g \ "a"
-              }
-              res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
-              DiagramStats.addFixedImage()
-            }
-          }
-        res % new UnprefixedAttribute("id", id, Null) %
-        new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
-      }
-      else res
-    }
-    // remove titles
-    case <title>{ _* }</title> =>
-      scala.xml.Text("")
-    // apply recursively
-    case Elem(prefix, label, attribs, scope, child @ _*) =>
-      Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
-    case x => x
-  }
-
-  def getKind(klass: String): String =
-    if (klass.contains("class")) "class"
-    else if (klass.contains("trait")) "trait"
-    else if (klass.contains("object")) "object"
-    else ""
-
-  def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
-    val node = g \ "a" \ "text" \ ("@" + axis)
-    if (node.isEmpty)
-      None
-    else
-      Some(node.toString.toDouble + offset)
-  }
-
-  /* graph / node / edge attributes */
-
-  private val graphAttributes: Map[String, String] = Map(
-      "compound" -> "true",
-      "rankdir" -> "TB"
-  )
-
-  private val nodeAttributes = Map(
-    "shape" -> "rectangle",
-    "style" -> "filled",
-    "penwidth" -> "1",
-    "margin" -> "0.08,0.01",
-    "width" -> "0.0",
-    "height" -> "0.0",
-    "fontname" -> "Arial",
-    "fontsize" -> "10.00"
-  )
-
-  private val edgeAttributes = Map(
-    "color" -> "#d4d4d4",
-    "arrowsize" -> "0.5",
-    "fontcolor" -> "#aaaaaa",
-    "fontsize" -> "10.00",
-    "fontname" -> "Arial"
-  )
-
-  private val defaultStyle = Map(
-    "color" -> "#ababab",
-    "fillcolor" -> "#e1e1e1",
-    "fontcolor" -> "#7d7d7d",
-    "margin" -> "0.1,0.04"
-  )
-
-  private val implicitStyle = Map(
-    "color" -> "#ababab",
-    "fillcolor" -> "#e1e1e1",
-    "fontcolor" -> "#7d7d7d"
-  )
-
-  private val outsideStyle = Map(
-    "color" -> "#ababab",
-    "fillcolor" -> "#e1e1e1",
-    "fontcolor" -> "#7d7d7d"
-  )
-
-  private val traitStyle = Map(
-    "color" -> "#37657D",
-    "fillcolor" -> "#498AAD",
-    "fontcolor" -> "#ffffff"
-  )
-
-  private val classStyle = Map(
-    "color" -> "#115F3B",
-    "fillcolor" -> "#0A955B",
-    "fontcolor" -> "#ffffff"
-  )
-
-  private val objectStyle = Map(
-    "color" -> "#102966",
-    "fillcolor" -> "#3556a7",
-    "fontcolor" -> "#ffffff"
-  )
-
-  private val typeStyle = Map(
-    "color" -> "#115F3B",
-    "fillcolor" -> "#0A955B",
-    "fontcolor" -> "#ffffff"
-  )
-
-  private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
-
-  private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
-  private val nodeAttributesStr = flatten(nodeAttributes)
-  private val edgeAttributesStr = flatten(edgeAttributes)
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
deleted file mode 100644
index 5cdd5c7..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import java.io.InputStream
-import java.io.OutputStream
-import java.io.InputStreamReader
-import java.io.OutputStreamWriter
-import java.io.BufferedWriter
-import java.io.BufferedReader
-import java.io.IOException
-import scala.sys.process._
-import scala.concurrent.SyncVar
-
-import model._
-import model.diagram._
-
-/** This class takes care of running the graphviz dot utility */
-class DotRunner(settings: doc.Settings) {
-
-  private[this] var dotRestarts = 0
-  private[this] var dotProcess: DotProcess  = null
-
-  def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
-
-    if (dotProcess == null) {
-      if (dotRestarts < settings.docDiagramsDotRestart.value) {
-        if (dotRestarts != 0)
-          settings.printMsg("Graphviz will be restarted...\n")
-        dotRestarts += 1
-        dotProcess = new DotProcess(settings)
-      } else
-        return null
-    }
-
-    val tStart = System.currentTimeMillis
-    val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
-    val tFinish = System.currentTimeMillis
-    DiagramStats.addDotRunningTime(tFinish - tStart)
-
-    if (result == null) {
-      dotProcess.cleanup()
-      dotProcess = null
-      if (dotRestarts == settings.docDiagramsDotRestart.value) {
-        settings.printMsg("\n")
-        settings.printMsg("**********************************************************************")
-        settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool")
-        settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
-        settings.printMsg("")
-        val baseList = List(settings.docDiagramsDebug,
-                            settings.docDiagramsDotPath,
-                            settings.docDiagramsDotRestart,
-                            settings.docDiagramsDotTimeout)
-        val width    = (baseList map (_.helpSyntax.length)).max
-        def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + "  " + s.helpDescription
-        baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
-        settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
-        settings.printMsg("**********************************************************************\n\n")
-
-      }
-    }
-
-    result
-  }
-
-  def cleanup() =
-    if (dotProcess != null)
-      dotProcess.cleanup()
-}
-
-class DotProcess(settings: doc.Settings) {
-
-  @volatile var error: Boolean = false           // signal an error
-  val inputString = new SyncVar[String]                 // used for the dot process input
-  val outputString = new SyncVar[String]                // used for the dot process output
-  val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
-
-  // set in only one place, in the main thread
-  var process: Process = null
-  var templateName: String = ""
-  var templateInput: String = ""
-
-  def feedToDot(input: String, template: String): String = {
-
-    templateName = template
-    templateInput = input
-
-    try {
-
-      // process creation
-      if (process == null) {
-        val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
-        val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
-        process = processBuilder.run(procIO)
-      }
-
-      // pass the input and wait for the output
-      assert(!inputString.isSet)
-      assert(!outputString.isSet)
-      inputString.put(input)
-      var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
-      if (error) result = null
-
-      result
-
-    } catch {
-      case exc: Throwable =>
-        errorBuffer.append("  Main thread in " + templateName + ": " +
-          (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
-        error = true
-        return null
-    }
-  }
-
-  def cleanup(): Unit = {
-
-    // we'll need to know if there was any error for reporting
-    val _error = error
-
-    if (process != null) {
-      // if there's no error, this should exit cleanly
-      if (!error) feedToDot("<finish>", "<finishing>")
-
-      // just in case there's any thread hanging, this will take it out of the loop
-      error = true
-      process.destroy()
-      // we'll need to unblock the input again
-      if (!inputString.isSet) inputString.put("")
-      if (outputString.isSet) outputString.take()
-    }
-
-    if (_error) {
-      if (settings.docDiagramsDebug.value) {
-        settings.printMsg("\n**********************************************************************")
-        settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
-        settings.printMsg("\nThe following is the log of the failure:")
-        settings.printMsg(errorBuffer.toString)
-        settings.printMsg("  Cleanup: Last template: " + templateName)
-        settings.printMsg("  Cleanup: Last dot input: \n    " + templateInput.replaceAll("\n","\n    ") + "\n")
-        settings.printMsg("  Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
-        if (process != null)
-          settings.printMsg("  Cleanup: Dot exit code: " + process.exitValue)
-        settings.printMsg("**********************************************************************")
-      } else {
-        // we shouldn't just sit there for 50s not reporting anything, no?
-        settings.printMsg("Graphviz dot encountered an error when generating the diagram for:")
-        settings.printMsg(templateName)
-        settings.printMsg("These are usually spurious errors, but if you notice a persistant error on")
-        settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.")
-      }
-    }
-  }
-
-  /* The standard input passing function */
-  private[this] def inputFn(stdin: OutputStream): Unit = {
-    val writer = new BufferedWriter(new OutputStreamWriter(stdin))
-    try {
-      var input = inputString.take()
-
-      while (!error) {
-        if (input == "<finish>") {
-          // empty => signal to finish
-          stdin.close()
-          return
-        } else {
-          // send output to dot
-          writer.write(input + "\n\n")
-          writer.flush()
-        }
-
-        if (!error) input = inputString.take()
-      }
-      stdin.close()
-    } catch {
-      case exc: Throwable =>
-        error = true
-        stdin.close()
-        errorBuffer.append("  Input thread in " + templateName + ": Exception: " + exc + "\n")
-    }
-  }
-
-  private[this] def outputFn(stdOut: InputStream): Unit = {
-    val reader = new BufferedReader(new InputStreamReader(stdOut))
-    var buffer: StringBuilder = new StringBuilder()
-    try {
-      var line = reader.readLine
-      while (!error && line != null) {
-        buffer.append(line + "\n")
-        // signal the last element in the svg (only for output)
-        if (line == "</svg>") {
-          outputString.put(buffer.toString)
-          buffer.setLength(0)
-        }
-        if (error) { stdOut.close(); return }
-        line = reader.readLine
-      }
-      assert(!outputString.isSet)
-      outputString.put(buffer.toString)
-      stdOut.close()
-    } catch {
-      case exc: Throwable =>
-        error = true
-        stdOut.close()
-        errorBuffer.append("  Output thread in " + templateName + ": Exception: " + exc + "\n")
-    }
-  }
-
-  private[this] def errorFn(stdErr: InputStream): Unit = {
-    val reader = new BufferedReader(new InputStreamReader(stdErr))
-    var buffer: StringBuilder = new StringBuilder()
-    try {
-      var line = reader.readLine
-      while (line != null) {
-        errorBuffer.append("  DOT <error console>: " + line + "\n")
-        error = true
-        line = reader.readLine
-      }
-      stdErr.close()
-    } catch {
-      case exc: Throwable =>
-        error = true
-        stdErr.close()
-        errorBuffer.append("  Error thread in " + templateName + ": Exception: " + exc + "\n")
-    }
-  }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
deleted file mode 100644
index 96689ae..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ /dev/null
@@ -1,536 +0,0 @@
-// © 2009–2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
-
-var topLevelTemplates = undefined;
-var topLevelPackages = undefined;
-
-var scheduler = undefined;
-
-var kindFilterState = undefined;
-var focusFilterState = undefined;
-
-var title = $(document).attr('title');
-
-var lastHash = "";
-
-$(document).ready(function() {
-    $('body').layout({
-        west__size: '20%',
-        center__maskContents: true
-    });
-    $('#browser').layout({
-        center__paneSelector: ".ui-west-center"
-        //,center__initClosed:true
-        ,north__paneSelector: ".ui-west-north"
-    });
-    $('iframe').bind("load", function(){
-        var subtitle = $(this).contents().find('title').text();
-        $(document).attr('title', (title ? title + " - " : "") + subtitle);
-
-        setUrlFragmentFromFrameSrc();
-    });
-
-    // workaround for IE's iframe sizing lack of smartness
-    if($.browser.msie) {
-        function fixIFrame() {
-            $('iframe').height($(window).height() )
-        }
-        $('iframe').bind("load",fixIFrame)
-        $('iframe').bind("resize",fixIFrame)
-    }
-
-    scheduler = new Scheduler();
-    scheduler.addLabel("init", 1);
-    scheduler.addLabel("focus", 2);
-    scheduler.addLabel("filter", 4);
-
-    prepareEntityList();
-
-    configureTextFilter();
-    configureKindFilter();
-    configureEntityList();
-
-    setFrameSrcFromUrlFragment();
-
-    // If the url fragment changes, adjust the src of iframe "template".
-    $(window).bind('hashchange', function() {
-      if(lastFragment != window.location.hash) {
-        lastFragment = window.location.hash;
-        setFrameSrcFromUrlFragment();
-      }
-    });
-});
-
-// Set the iframe's src according to the fragment of the current url.
-// fragment = "#scala.Either" => iframe url = "scala/Either.html"
-// fragment = "#scala.Either at isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
-function setFrameSrcFromUrlFragment() {
-  var fragment = location.hash.slice(1);
-  if(fragment) {
-    var loc = fragment.split("@")[0].replace(/\./g, "/");
-    if(loc.indexOf(".html") < 0) loc += ".html";
-    if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]);
-    frames["template"].location.replace(loc);
-  }
-  else
-    frames["template"].location.replace("package.html");
-}
-
-// Set the url fragment according to the src of the iframe "template".
-// iframe url = "scala/Either.html"  =>  url fragment = "#scala.Either"
-// iframe url = "scala/Either.html#isRight:Boolean"  =>  url fragment = "#scala.Either at isRight:Boolean"
-function setUrlFragmentFromFrameSrc() {
-  try {
-    var commonLength = location.pathname.lastIndexOf("/");
-    var frameLocation = frames["template"].location;
-    var relativePath = frameLocation.pathname.slice(commonLength + 1);
-
-    if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
-      return;
-
-    // Add #, remove ".html" and replace "/" with "."
-    fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
-
-    // Add the frame's hash after an @
-    if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
-
-    // Use replace to not add history items
-    lastFragment = fragment;
-    location.replace(fragment);
-  }
-  catch(e) {
-    // Chrome doesn't allow reading the iframe's location when
-    // used on the local file system.
-  }
-}
-
-var Index = {};
-
-(function (ns) {
-    function openLink(t, type) {
-        var href;
-        if (type == 'object') {
-            href = t['object'];
-        } else {
-            href = t['class'] || t['trait'] || t['case class'] || t['type'];
-        }
-        return [
-            '<a class="tplshow" target="template" href="',
-            href,
-            '"><img width="13" height="13" class="',
-            type,
-            ' icon" src="lib/',
-            type,
-            '.png" />'
-        ].join('');
-    }
-
-    function createPackageHeader(pack) {
-        return [
-            '<li class="pack">',
-            '<a class="packfocus">focus</a><a class="packhide">hide</a>',
-            '<a class="tplshow" target="template" href="',
-            pack.replace(/\./g, '/'),
-            '/package.html">',
-            pack,
-            '</a></li>'
-        ].join('');
-    };
-
-    function createListItem(template) {
-        var inner = '';
-
-
-        if (template.object) {
-            inner += openLink(template, 'object');
-        }
-
-        if (template['class'] || template['trait'] || template['case class'] || template['type']) {
-            inner += (inner == '') ?
-                '<div class="placeholder" />' : '</a>';
-            inner += openLink(template, template['trait'] ? 'trait' : template['type'] ? 'type' : 'class');
-        } else {
-            inner += '<div class="placeholder"/>';
-        }
-
-        return [
-            '<li>',
-            inner,
-            '<span class="tplLink">',
-            template.name.replace(/^.*\./, ''),
-            '</span></a></li>'
-        ].join('');
-    }
-
-
-    ns.createPackageTree = function (pack, matched, focused) {
-        var html = $.map(matched, function (child, i) {
-            return createListItem(child);
-        }).join('');
-
-        var header;
-        if (focused && pack == focused) {
-            header = '';
-        } else {
-            header = createPackageHeader(pack);
-        }
-
-        return [
-            '<ol class="packages">',
-            header,
-            '<ol class="templates">',
-            html,
-            '</ol></ol>'
-        ].join('');
-    }
-
-    ns.keys = function (obj) {
-        var result = [];
-        var key;
-        for (key in obj) {
-            result.push(key);
-        }
-        return result;
-    }
-
-    var hiddenPackages = {};
-
-    function subPackages(pack) {
-        return $.grep($('#tpl ol.packages'), function (element, index) {
-            var pack = $('li.pack > .tplshow', element).text();
-            return pack.indexOf(pack + '.') == 0;
-        });
-    }
-
-    ns.hidePackage = function (ol) {
-        var selected = $('li.pack > .tplshow', ol).text();
-        hiddenPackages[selected] = true;
-
-        $('ol.templates', ol).hide();
-
-        $.each(subPackages(selected), function (index, element) {
-            $(element).hide();
-        });
-    }
-
-    ns.showPackage = function (ol, state) {
-        var selected = $('li.pack > .tplshow', ol).text();
-        hiddenPackages[selected] = false;
-
-        $('ol.templates', ol).show();
-
-        $.each(subPackages(selected), function (index, element) {
-            $(element).show();
-
-            // When the filter is in "packs" state,
-            // we don't want to show the `.templates`
-            var key = $('li.pack > .tplshow', element).text();
-            if (hiddenPackages[key] || state == 'packs') {
-                $('ol.templates', element).hide();
-            }
-        });
-    }
-
-})(Index);
-
-function configureEntityList() {
-    kindFilterSync();
-    configureHideFilter();
-    configureFocusFilter();
-    textFilter();
-}
-
-/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
-   form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
-   the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
-   topLevelPackages) to serve as reference for resetting the list when needed.
-   Be advised: this function should only be called once, on page load. */
-function prepareEntityList() {
-    var classIcon = $("#library > img.class");
-    var traitIcon = $("#library > img.trait");
-    var typeIcon = $("#library > img.type");
-    var objectIcon = $("#library > img.object");
-    var packageIcon = $("#library > img.package");
-
-    $('#tpl li.pack > a.tplshow').attr("target", "template");
-    $('#tpl li.pack').each(function () {
-        $("span.class", this).each(function() { $(this).replaceWith(classIcon.clone()); });
-        $("span.trait", this).each(function() { $(this).replaceWith(traitIcon.clone()); });
-        $("span.type", this).each(function() { $(this).replaceWith(typeIcon.clone()); });
-        $("span.object", this).each(function() { $(this).replaceWith(objectIcon.clone()); });
-        $("span.package", this).each(function() { $(this).replaceWith(packageIcon.clone()); });
-    });
-    $('#tpl li.pack')
-        .prepend("<a class='packhide'>hide</a>")
-        .prepend("<a class='packfocus'>focus</a>");
-}
-
-/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
-function keyboardScrolldownLeftPane() {
-    scheduler.add("init", function() {
-        $("#textfilter input").blur();
-        var $items = $("#tpl li");
-        $items.first().addClass('selected');
-
-        $(window).bind("keydown", function(e) {
-            var $old = $items.filter('.selected'),
-                $new;
-
-            switch ( e.keyCode ) {
-
-            case 9: // tab
-                $old.removeClass('selected');
-                break;
-
-            case 13: // enter
-                $old.removeClass('selected');
-                var $url = $old.children().filter('a:last').attr('href');
-                $("#template").attr("src",$url);
-                break;
-
-            case 27: // escape
-                $old.removeClass('selected');
-                $(window).unbind(e);
-                $("#textfilter input").focus();
-
-                break;
-
-            case 38: // up
-                $new = $old.prev();
-
-                if (!$new.length) {
-                    $new = $old.parent().prev();
-                }
-
-                if ($new.is('ol') && $new.children(':last').is('ol')) {
-                    $new = $new.children().children(':last');
-                } else if ($new.is('ol')) {
-                    $new = $new.children(':last');
-                }
-
-                break;
-
-            case 40: // down
-                $new = $old.next();
-                if (!$new.length) {
-                    $new = $old.parent().parent().next();
-                }
-                if ($new.is('ol')) {
-                    $new = $new.children(':first');
-                }
-                break;
-            }
-
-            if ($new.is('li')) {
-                $old.removeClass('selected');
-                $new.addClass('selected');
-            } else if (e.keyCode == 38) {
-                $(window).unbind(e);
-                $("#textfilter input").focus();
-            }
-        });
-    });
-}
-
-/* Configures the text filter  */
-function configureTextFilter() {
-    scheduler.add("init", function() {
-        $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
-        var input = $("#textfilter input");
-        resizeFilterBlock();
-        input.bind('keyup', function(event) {
-            if (event.keyCode == 27) { // escape
-                input.attr("value", "");
-            }
-            if (event.keyCode == 40) { // down arrow
-                $(window).unbind("keydown");
-                keyboardScrolldownLeftPane();
-                return false;
-            }
-            textFilter();
-        });
-        input.bind('keydown', function(event) {
-            if (event.keyCode == 9) { // tab
-                $("#template").contents().find("#mbrsel-input").focus();
-                input.attr("value", "");
-                return false;
-            }
-            textFilter();
-        });
-        input.focus(function(event) { input.select(); });
-    });
-    scheduler.add("init", function() {
-        $("#textfilter > .post").click(function(){
-            $("#textfilter input").attr("value", "");
-            textFilter();
-        });
-    });
-}
-
-function compilePattern(query) {
-    var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1');
-
-    if (query.toLowerCase() != query) {
-        // Regexp that matches CamelCase subbits: "BiSe" is
-        // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
-        return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1"));
-    }
-    else { // if query is all lower case make a normal case insensitive search
-        return new RegExp(escaped, "i");
-    }
-}
-
-// Filters all focused templates and packages. This function should be made less-blocking.
-//   @param query The string of the query
-function textFilter() {
-    scheduler.clear("filter");
-
-    $('#tpl').html('');
-
-    var query = $("#textfilter input").attr("value") || '';
-    var queryRegExp = compilePattern(query);
-
-    var index = 0;
-
-    var searchLoop = function () {
-        var packages = Index.keys(Index.PACKAGES).sort();
-
-        while (packages[index]) {
-            var pack = packages[index];
-            var children = Index.PACKAGES[pack];
-            index++;
-
-            if (focusFilterState) {
-                if (pack == focusFilterState ||
-                    pack.indexOf(focusFilterState + '.') == 0) {
-                    ;
-                } else {
-                    continue;
-                }
-            }
-
-            var matched = $.grep(children, function (child, i) {
-                return queryRegExp.test(child.name);
-            });
-
-            if (matched.length > 0) {
-                $('#tpl').append(Index.createPackageTree(pack, matched,
-                                                         focusFilterState));
-                scheduler.add('filter', searchLoop);
-                return;
-            }
-        }
-
-        $('#tpl a.packfocus').click(function () {
-            focusFilter($(this).parent().parent());
-        });
-        configureHideFilter();
-    };
-
-    scheduler.add('filter', searchLoop);
-}
-
-/* Configures the hide tool by adding the hide link to all packages. */
-function configureHideFilter() {
-    $('#tpl li.pack a.packhide').click(function () {
-        var packhide = $(this)
-        var action = packhide.text();
-
-        var ol = $(this).parent().parent();
-
-        if (action == "hide") {
-            Index.hidePackage(ol);
-            packhide.text("show");
-        }
-        else {
-            Index.showPackage(ol, kindFilterState);
-            packhide.text("hide");
-        }
-        return false;
-    });
-}
-
-/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
-   link to all packages. */
-function configureFocusFilter() {
-    scheduler.add("init", function() {
-        focusFilterState = null;
-        if ($("#focusfilter").length == 0) {
-            $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
-            $("#focusfilter > .focusremove").click(function(event) {
-                textFilter();
-
-                $("#focusfilter").hide();
-                $("#kindfilter").show();
-                resizeFilterBlock();
-                focusFilterState = null;
-            });
-            $("#focusfilter").hide();
-            resizeFilterBlock();
-        }
-    });
-    scheduler.add("init", function() {
-        $('#tpl li.pack a.packfocus').click(function () {
-            focusFilter($(this).parent());
-            return false;
-        });
-    });
-}
-
-/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
-   focuses package into the top-level templates and packages position of the index. The original top-level
-     @param package The <li> element that corresponds to the package in the entity index */
-function focusFilter(package) {
-    scheduler.clear("filter");
-
-    var currentFocus = $('li.pack > .tplshow', package).text();
-    $("#focusfilter > .focuscoll").empty();
-    $("#focusfilter > .focuscoll").append(currentFocus);
-
-    $("#focusfilter").show();
-    $("#kindfilter").hide();
-    resizeFilterBlock();
-    focusFilterState = currentFocus;
-    kindFilterSync();
-
-    textFilter();
-}
-
-function configureKindFilter() {
-    scheduler.add("init", function() {
-        kindFilterState = "all";
-        $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
-        $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
-        resizeFilterBlock();
-    });
-}
-
-function kindFilter(kind) {
-    if (kind == "packs") {
-        kindFilterState = "packs";
-        kindFilterSync();
-        $("#kindfilter > a").replaceWith("<a>display all entities</a>");
-        $("#kindfilter > a").click(function(event) { kindFilter("all"); });
-    }
-    else {
-        kindFilterState = "all";
-        kindFilterSync();
-        $("#kindfilter > a").replaceWith("<a>display packages only</a>");
-        $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
-    }
-}
-
-/* Applies the kind filter. */
-function kindFilterSync() {
-    if (kindFilterState == "all" || focusFilterState != null) {
-        $("#tpl a.packhide").text('hide');
-        $("#tpl ol.templates").show();
-    } else {
-        $("#tpl a.packhide").text('show');
-        $("#tpl ol.templates").hide();
-    }
-}
-
-function resizeFilterBlock() {
-    $("#tpl").css("top", $("#filter").outerHeight(true));
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
deleted file mode 100644
index 9ba8914..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base.comment._
-
-import reporters.Reporter
-import scala.collection._
-import scala.reflect.internal.util.{NoPosition, Position}
-import scala.language.postfixOps
-
-/** The comment parser transforms raw comment strings into `Comment` objects.
-  * Call `parse` to run the parser. Note that the parser is stateless and
-  * should only be built once for a given Scaladoc run.
-  *
-  * @param reporter The reporter on which user messages (error, warnings) should be printed.
-  *
-  * @author Manohar Jonnalagedda
-  * @author Gilles Dubochet */
-trait CommentFactory extends base.CommentFactoryBase {
-  thisFactory: ModelFactory with CommentFactory with MemberLookup =>
-
-  val global: Global
-  import global.{ reporter, definitions, Symbol }
-
-  protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
-
-  def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = {
-    commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None)
-    sym
-  }
-
-  def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = {
-    val key = (sym, inTpl)
-    if (commentCache isDefinedAt key)
-      Some(commentCache(key))
-    else {
-      val c = defineComment(sym, currentTpl, inTpl)
-      if (c isDefined) commentCache += (sym, inTpl) -> c.get
-      c
-    }
-  }
-
-  /** A comment is usualy created by the parser, however for some special
-    * cases we have to give some `inTpl` comments (parent class for example)
-    * to the comment of the symbol.
-    * This function manages some of those cases : Param accessor and Primary constructor */
-  def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = {
-
-    //param accessor case
-    // We just need the @param argument, we put it into the body
-    if( sym.isParamAccessor &&
-        inTpl.comment.isDefined &&
-        inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
-      val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
-      Some(createComment(body0 = comContent))
-    }
-
-    // Primary constructor case
-    // We need some content of the class definition : @constructor for the body,
-    // @param and @deprecated, we can add some more if necessary
-    else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
-      val tplComment = inTpl.comment.get
-      // If there is nothing to put into the comment there is no need to create it
-      if(tplComment.constructor.isDefined ||
-        tplComment.throws != Map.empty ||
-        tplComment.valueParams != Map.empty ||
-        tplComment.typeParams != Map.empty ||
-        tplComment.deprecated.isDefined
-        )
-        Some(createComment( body0 = tplComment.constructor,
-                            throws0 = tplComment.throws,
-                            valueParams0 = tplComment.valueParams,
-                            typeParams0 = tplComment.typeParams,
-                            deprecated0 = tplComment.deprecated
-                            ))
-      else None
-    }
-
-    //other comment cases
-    // parse function will make the comment
-    else {
-      val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
-      if (rawComment != "") {
-        val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl)
-        val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt)
-        Some(c)
-      }
-      else None
-    }
-
-  }
-
-  protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = {
-    assert(!inTplOpt.isDefined || inTplOpt.get != null)
-    parseAtSymbol(comment, src, pos, inTplOpt map (_.sym))
-  }
-
-  /** Parses a string containing wiki syntax into a `Comment` object.
-    * Note that the string is assumed to be clean:
-    *  - Removed Scaladoc start and end markers.
-    *  - Removed start-of-line star and one whitespace afterwards (if present).
-    *  - Removed all end-of-line whitespace.
-    *  - Only `endOfLine` is used to mark line endings. */
-  def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = {
-    assert(!inTplOpt.isDefined || inTplOpt.get != null)
-    parseWikiAtSymbol(string,pos, inTplOpt map (_.sym))
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
deleted file mode 100644
index cbc1a23..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ /dev/null
@@ -1,631 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import base.comment._
-import diagram._
-
-/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
-  * compiler. Entities model the following Scala concepts:
-  *  - classes and traits;
-  *  - objects and package;
-  *  - constructors;
-  *  - methods;
-  *  - values, lazy values, and variables;
-  *  - abstract type members and type aliases;
-  *  - type and value parameters;
-  *  - annotations. */
-trait Entity {
-
-  /** Similar to symbols, so we can track entities */
-  def id: Int
-
-  /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
-    * instead. */
-  def name : String
-
-  /** The qualified name of the entity. This is this entity's name preceded by the qualified name of the template
-    * of which this entity is a member. The qualified name is unique to this entity. */
-  def qualifiedName: String
-
-  /** The template of which this entity is a member. */
-  def inTemplate: TemplateEntity
-
-  /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
-    * entity, the last the root package entity. */
-  def toRoot: List[Entity]
-
-  /** The qualified name of this entity. */
-  override def toString = qualifiedName
-
-  /** The Scaladoc universe of which this entity is a member. */
-  def universe: Universe
-
-  /** The annotations attached to this entity, if any. */
-  def annotations: List[Annotation]
-
-  /** The kind of the entity */
-  def kind: String
-
-  /** Whether or not the template was defined in a package object */
-  def inPackageObject: Boolean
-
-  /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
-  def isType: Boolean
-
-  /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */
-  def isTerm: Boolean
-}
-
-object Entity {
-  private def isDeprecated(x: Entity) = x match {
-    case x: MemberEntity  => x.deprecation.isDefined
-    case _                => false
-  }
-  /** Ordering deprecated things last. */
-  implicit lazy val EntityOrdering: Ordering[Entity] =
-    Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name))
-}
-
-/** A template, which is either a class, trait, object or package. Depending on whether documentation is available
-  * or not, the template will be modeled as a [scala.tools.nsc.doc.model.NoDocTemplate] or a
-  * [scala.tools.nsc.doc.model.DocTemplateEntity]. */
-trait TemplateEntity extends Entity {
-
-  /** Whether this template is a package (including the root package). */
-  def isPackage: Boolean
-
-  /** Whether this template is the root package. */
-  def isRootPackage: Boolean
-
-  /** Whether this template is a trait. */
-  def isTrait: Boolean
-
-  /** Whether this template is a class. */
-  def isClass: Boolean
-
-  /** Whether this template is an object. */
-  def isObject: Boolean
-
-  /** Whether documentation is available for this template. */
-  def isDocTemplate: Boolean
-
-  /** Whether documentation is available for this template. */
-  def isNoDocMemberTemplate: Boolean
-
-  /** Whether this template is a case class. */
-  def isCaseClass: Boolean
-
-  /** The self-type of this template, if it differs from the template type. */
-  def selfType : Option[TypeEntity]
-}
-
-
-/** An entity that is a member of a template. All entities, including templates, are member of another entity
-  * except for parameters and annotations. Note that all members of a template are modelled, including those that are
-  * inherited and not declared locally. */
-trait MemberEntity extends Entity {
-
-  /** The comment attached to this member, if any. */
-  def comment: Option[Comment]
-
-  /** The group this member is from */
-  def group: String
-
-  /** The template of which this entity is a member. */
-  def inTemplate: DocTemplateEntity
-
-  /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
-    * member, the last the root package entity. */
-  def toRoot: List[MemberEntity]
-
-  /** The templates in which this member has been declared. The first element of the list is the template that contains
-    * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If
-    * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All
-    * elements of this list are in the linearization of `inTemplate`. */
-  def inDefinitionTemplates: List[TemplateEntity]
-
-  /** The qualified name of the member in its currently active declaration template. */
-  def definitionName: String
-
-  /** The visibility of this member. Note that members with restricted visibility may not be modeled in some
-    * universes. */
-  def visibility: Visibility
-
-  /** The flags that have been set for this entity. The following flags are supported: `implicit`, `sealed`, `abstract`,
-    * and `final`. */
-  def flags: List[Paragraph]
-
-  /** Some deprecation message if this member is deprecated, or none otherwise. */
-  def deprecation: Option[Body]
-
-  /** Some migration warning if this member has a migration annotation, or none otherwise. */
-  def migration: Option[Body]
-
-  @deprecated("Use `inDefinitionTemplates` instead", "2.9.0")
-  def inheritedFrom: List[TemplateEntity]
-
-  /** For members representing values: the type of the value returned by this member; for members
-    * representing types: the type itself. */
-  def resultType: TypeEntity
-
-  /** Whether this member is a method. */
-  def isDef: Boolean
-
-  /** Whether this member is a value (this excludes lazy values). */
-  def isVal: Boolean
-
-  /** Whether this member is a lazy value. */
-  def isLazyVal: Boolean
-
-  /** Whether this member is a variable. */
-  def isVar: Boolean
-
-  /** Whether this member is a constructor. */
-  def isConstructor: Boolean
-
-  /** Whether this member is an alias type. */
-  def isAliasType: Boolean
-
-  /** Whether this member is an abstract type. */
-  def isAbstractType: Boolean
-
-  /** Whether this member is a template. */
-  def isTemplate: Boolean
-
-  /** Whether this member is implicit.  */
-  def isImplicit: Boolean
-
-  /** Whether this member is abstract. */
-  def isAbstract: Boolean
-
-  /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
-    * signature and the complete parameter descriptions. */
-  def useCaseOf: Option[MemberEntity]
-
-  /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
-  def byConversion: Option[ImplicitConversion]
-
-  /** The identity of this member, used for linking */
-  def signature: String
-
-  /** Compatibility signature, will be removed from future versions */
-  def signatureCompat: String
-
-  /** Indicates whether the member is inherited by implicit conversion */
-  def isImplicitlyInherited: Boolean
-
-  /** Indicates whether there is another member with the same name in the template that will take precendence */
-  def isShadowedImplicit: Boolean
-
-  /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all
-   *  become ambiguous) */
-  def isAmbiguousImplicit: Boolean
-
-  /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */
-  def isShadowedOrAmbiguousImplicit: Boolean
-}
-
-object MemberEntity {
-  // Oh contravariance, contravariance, wherefore art thou contravariance?
-  // Note: the above works for both the commonly misunderstood meaning of the line and the real one.
-  implicit lazy val MemberEntityOrdering: Ordering[MemberEntity] = Entity.EntityOrdering on (x => x)
-}
-
-/** An entity that is parameterized by types */
-trait HigherKinded {
-
-  /** The type parameters of this entity. */
-  def typeParams: List[TypeParam]
-}
-
-
-/** A template (class, trait, object or package) which is referenced in the universe, but for which no further
-  * documentation is available. Only templates for which a source file is given are documented by Scaladoc. */
-trait NoDocTemplate extends TemplateEntity {
-  def kind =
-    if (isClass) "class"
-    else if (isTrait) "trait"
-    else if (isObject) "object"
-    else ""
-}
-
-/** An inherited template that was not documented in its original owner - example:
- *  in classpath:  trait T { class C } -- T (and implicitly C) are not documented
- *  in the source: trait U extends T -- C appears in U as a MemberTemplateImpl
- *    -- that is, U has a member for it but C doesn't get its own page */
-trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded {
-
-  /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value
-    * parameters cannot be curried, the outer list has exactly one element. */
-  def valueParams: List[List[ValueParam]]
-
-  /** The direct super-type of this template
-      e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
-      NOTE: we are dropping the refinement here! */
-  def parentTypes: List[(TemplateEntity, TypeEntity)]
-}
-
-/** A template (class, trait, object or package) for which documentation is available. Only templates for which
-  * a source file is given are documented by Scaladoc. */
-trait DocTemplateEntity extends MemberTemplateEntity {
-
-  /** The list of templates such that each is a member of the template that follows it; the first template is always
-    * this template, the last the root package entity. */
-  def toRoot: List[DocTemplateEntity]
-
-  /** The source file in which the current template is defined and the line where the definition starts, if they exist.
-    * A source file exists for all templates, except for those that are generated synthetically by Scaladoc. */
-  def inSource: Option[(io.AbstractFile, Int)]
-
-  /** An HTTP address at which the source of this template is available, if it is available. An address is available
-    * only if the `docsourceurl` setting has been set. */
-  def sourceUrl: Option[java.net.URL]
-
-  /** All class, trait and object templates which are part of this template's linearization, in lineratization order.
-    * This template's linearization contains all of its direct and indirect super-classes and super-traits. */
-  def linearizationTemplates: List[TemplateEntity]
-
-  /** All instantiated types which are part of this template's linearization, in lineratization order.
-    * This template's linearization contains all of its direct and indirect super-types. */
-  def linearizationTypes: List[TypeEntity]
-
-  /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
-   *  Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
-  def allSubClasses: List[DocTemplateEntity]
-
-  /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
-   *  Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
-  def directSubClasses: List[DocTemplateEntity]
-
-  /** All members of this template. If this template is a package, only templates for which documentation is available
-    * in the universe (`DocTemplateEntity`) are listed. */
-  def members: List[MemberEntity]
-
-  /** All templates that are members of this template. If this template is a package, only templates for which
-    * documentation is available  in the universe (`DocTemplateEntity`) are listed. */
-  def templates: List[TemplateEntity with MemberEntity]
-
-  /** All methods that are members of this template. */
-  def methods: List[Def]
-
-  /** All values, lazy values and variables that are members of this template. */
-  def values: List[Val]
-
-  /** All abstract types that are members of this template. */
-  def abstractTypes: List[AbstractType]
-
-  /** All type aliases that are members of this template. */
-  def aliasTypes: List[AliasType]
-
-  /** The primary constructor of this class, if it has been defined. */
-  def primaryConstructor: Option[Constructor]
-
-  /** All constructors of this class, including the primary constructor. */
-  def constructors: List[Constructor]
-
-  /** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the
-    * other entity of the pair is the companion. */
-  def companion: Option[DocTemplateEntity]
-
-  /** The implicit conversions this template (class or trait, objects and packages are not affected) */
-  def conversions: List[ImplicitConversion]
-
-  /** The shadowing information for the implicitly added members */
-  def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
-
-  /** Classes that can be implcitly converted to this class */
-  def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
-
-  /** Classes to which this class can be implicitly converted to
-      NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
-  def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)]
-
-  /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
-  def inheritanceDiagram: Option[Diagram]
-
-  /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
-  def contentDiagram: Option[Diagram]
-
-  /** Returns the group description taken either from this template or its linearizationTypes */
-  def groupDescription(group: String): Option[Body]
-
-  /** Returns the group description taken either from this template or its linearizationTypes */
-  def groupPriority(group: String): Int
-
-  /** Returns the group description taken either from this template or its linearizationTypes */
-  def groupName(group: String): String
-}
-
-/** A trait template. */
-trait Trait extends MemberTemplateEntity {
-  def kind = "trait"
-}
-
-/** A class template. */
-trait Class extends MemberTemplateEntity {
-  override def kind = "class"
-}
-
-/** An object template. */
-trait Object extends MemberTemplateEntity {
-  def kind = "object"
-}
-
-/** A package template. A package is in the universe if it is declared as a package object, or if it
-  * contains at least one template. */
-trait Package extends DocTemplateEntity {
-
-  /** The package of which this package is a member. */
-  def inTemplate: Package
-
-  /** The package such that each is a member of the package that follows it; the first package is always this
-    * package, the last the root package. */
-  def toRoot: List[Package]
-
-  /** All packages that are member of this package. */
-  def packages: List[Package]
-
-  override def kind = "package"
-}
-
-
-/** The root package, which contains directly or indirectly all members in the universe. A universe
-  * contains exactly one root package. */
-trait RootPackage extends Package
-
-
-/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */
-trait NonTemplateMemberEntity extends MemberEntity {
-
-  /** Whether this member is a use case. A use case is a member which does not exist in the documented code.
-    * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
-  def isUseCase: Boolean
-
-  /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
-    * and should not appear in ScalaDoc. */
-  def isBridge: Boolean
-}
-
-
-/** A method (`def`) of a template. */
-trait Def extends NonTemplateMemberEntity with HigherKinded {
-
-  /** The value parameters of this method. Each parameter block of a curried method is an element of the list.
-    * Each parameter block is a list of value parameters. */
-  def valueParams : List[List[ValueParam]]
-
-  def kind = "method"
-}
-
-
-/** A constructor of a class. */
-trait Constructor extends NonTemplateMemberEntity {
-
-  /** Whether this is the primary constructor of a class. The primary constructor is defined syntactically as part of
-    * the declaration of the class. */
-  def isPrimary: Boolean
-
-  /** The value parameters of this constructor. As constructors cannot be curried, the outer list has exactly one
-    * element. */
-  def valueParams : List[List[ValueParam]]
-
-  def kind = "constructor"
-}
-
-
-/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */
-trait Val extends NonTemplateMemberEntity {
-  def kind = "[lazy] value/variable"
-}
-
-
-/** An abstract type member of a template. */
-trait AbstractType extends MemberTemplateEntity with HigherKinded {
-
-  /** The lower bound for this abstract type, if it has been defined. */
-  def lo: Option[TypeEntity]
-
-  /** The upper bound for this abstract type, if it has been defined. */
-  def hi: Option[TypeEntity]
-
-  def kind = "abstract type"
-}
-
-
-/** An type alias of a template. */
-trait AliasType extends MemberTemplateEntity with HigherKinded {
-
-  /** The type aliased by this type alias. */
-  def alias: TypeEntity
-
-  def kind = "type alias"
-}
-
-
-/** A parameter to an entity. */
-trait ParameterEntity {
-
-  def name: String
-}
-
-
-/** A type parameter to a class, trait, or method. */
-trait TypeParam extends ParameterEntity with HigherKinded {
-
-  /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */
-  def variance: String
-
-  /** The lower bound for this type parameter, if it has been defined. */
-  def lo: Option[TypeEntity]
-
-  /** The upper bound for this type parameter, if it has been defined. */
-  def hi: Option[TypeEntity]
-}
-
-
-/** A value parameter to a constructor or method. */
-trait ValueParam extends ParameterEntity {
-
-  /** The type of this value parameter. */
-  def resultType: TypeEntity
-
-  /** The devault value of this value parameter, if it has been defined. */
-  def defaultValue: Option[TreeEntity]
-
-  /** Whether this value parameter is implicit. */
-  def isImplicit: Boolean
-}
-
-
-/** An annotation to an entity. */
-trait Annotation extends Entity {
-
-  /** The class of this annotation. */
-  def annotationClass: TemplateEntity
-
-  /** The arguments passed to the constructor of the annotation class. */
-  def arguments: List[ValueArgument]
-
-  def kind = "annotation"
-}
-
-/** A trait that signals the member results from an implicit conversion */
-trait ImplicitConversion {
-
-  /** The source of the implicit conversion*/
-  def source: DocTemplateEntity
-
-  /** The result type after the conversion */
-  def targetType: TypeEntity
-
-  /** The result type after the conversion
-   *  Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
-   *  types. Need to check it's not option!
-   */
-  def targetTemplate: Option[TemplateEntity]
-
-  /** The components of the implicit conversion type parents */
-  def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
-
-  /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
-  def convertorMethod: Either[MemberEntity, String]
-
-  /** A short name of the convertion */
-  def conversionShortName: String
-
-  /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
-  def conversionQualifiedName: String
-
-  /** The entity that performed the conversion */
-  def convertorOwner: TemplateEntity
-
-  /** The constraints that the transformations puts on the type parameters */
-  def constraints: List[Constraint]
-
-  /** The members inherited by this implicit conversion */
-  def members: List[MemberEntity]
-
-  /** Is this a hidden implicit conversion (as specified in the settings) */
-  def isHiddenConversion: Boolean
-}
-
-/** Shadowing captures the information that the member is shadowed by some other members
- *  There are two cases of implicitly added member shadowing:
- *  1) shadowing from a original class member (the class already has that member)
- *     in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
- *     the call arguments (or if they fit it will call the original class' method)
- *  2) shadowing from other possible implicit conversions ()
- *     this will result in an ambiguous implicit converion error
- */
-trait ImplicitMemberShadowing {
-  /** The members that shadow the current entry use .inTemplate to get to the template name */
-  def shadowingMembers: List[MemberEntity]
-
-  /** The members that ambiguate this implicit conversion
-      Note: for ambiguatingMembers you have the following invariant:
-      assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
-  def ambiguatingMembers: List[MemberEntity]
-
-  def isShadowed: Boolean = !shadowingMembers.isEmpty
-  def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty
-}
-
-/** A trait that encapsulates a constraint necessary for implicit conversion */
-trait Constraint
-
-/** A constraint involving a type parameter which must be in scope */
-trait ImplicitInScopeConstraint extends Constraint {
-  /** The type of the implicit value required */
-  def implicitType: TypeEntity
-
-  /** toString for debugging */
-  override def toString = "an implicit _: " + implicitType.name + " must be in scope"
-}
-
-trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
-  /** Type class name */
-  def typeClassEntity: TemplateEntity
-
-  /** toString for debugging */
-  override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
-    typeParamName + ": " + typeClassEntity.name + ")"
-}
-
-trait KnownTypeClassConstraint extends TypeClassConstraint {
-  /** Type explanation, takes the type parameter name and generates the explanation */
-  def typeExplanation: (String) => String
-
-  /** toString for debugging */
-  override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
-}
-
-/** A constraint involving a type parameter */
-trait TypeParamConstraint extends Constraint {
-  /** The type parameter involved */
-  def typeParamName: String
-}
-
-trait EqualTypeParamConstraint extends TypeParamConstraint {
-  /** The rhs */
-  def rhs: TypeEntity
-  /** toString for debugging */
-  override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
-}
-
-trait BoundedTypeParamConstraint extends TypeParamConstraint {
-  /** The lower bound */
-  def lowerBound: TypeEntity
-
-  /** The upper bound */
-  def upperBound: TypeEntity
-
-  /** toString for debugging */
-  override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
-    upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
-}
-
-trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
-  /** The lower bound */
-  def lowerBound: TypeEntity
-
-  /** toString for debugging */
-  override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
-    lowerBound.name + ")"
-}
-
-trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
-  /** The lower bound */
-  def upperBound: TypeEntity
-
-  /** toString for debugging */
-  override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
-    upperBound.name + ")"
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
deleted file mode 100755
index 4ee6daf..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author  Pedro Furlanetto
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-
-object IndexModelFactory {
-
-  def makeIndex(universe: Universe): Index = new Index {
-
-    lazy val firstLetterIndex: Map[Char, SymbolMap] = {
-
-      object result extends mutable.HashMap[Char,SymbolMap] {
-
-        /* Owner template ordering */
-        implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
-        /* symbol name ordering */
-        implicit def orderingMap = math.Ordering.String
-
-        def addMember(d: MemberEntity) = {
-          val firstLetter = {
-            val ch = d.name.head.toLower
-            if(ch.isLetterOrDigit) ch else '_'
-          }
-          val letter = this.get(firstLetter).getOrElse {
-            immutable.SortedMap[String, SortedSet[MemberEntity]]()
-          }
-          val members = letter.get(d.name).getOrElse {
-            SortedSet.empty[MemberEntity](Ordering.by { _.toString })
-          } + d
-          this(firstLetter) = letter + (d.name -> members)
-        }
-      }
-
-      //@scala.annotation.tailrec // TODO
-      def gather(owner: DocTemplateEntity): Unit =
-        for(m <- owner.members if m.inDefinitionTemplates.isEmpty || m.inDefinitionTemplates.head == owner)
-          m match {
-            case tpl: DocTemplateEntity =>
-              result.addMember(tpl)
-              gather(tpl)
-            case non: MemberEntity if !non.isConstructor =>
-              result.addMember(non)
-            case x @ _ =>
-          }
-
-      gather(universe.rootPackage)
-
-      result.toMap
-
-    }
-
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
deleted file mode 100644
index 23259a4..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-
-/** This trait extracts all required information for documentation from compilation units */
-trait MemberLookup extends base.MemberLookupBase {
-  thisFactory: ModelFactory =>
-
-  import global._
-  import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
-
-  override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
-    findTemplateMaybe(sym) match {
-      case Some(tpl) => Some(LinkToTpl(tpl))
-      case None =>
-        findTemplateMaybe(site) flatMap { inTpl =>
-          inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl))
-        }
-    }
-
-  override def chooseLink(links: List[LinkTo]): LinkTo = {
-    val mbrs = links.collect {
-      case lm at LinkToMember(mbr: MemberEntity, _) => (mbr, lm)
-    }
-    if (mbrs.isEmpty)
-      links.head
-    else
-      mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2
-  }
-
-  override def toString(link: LinkTo) = link match {
-    case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString
-    case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) =>
-      mbr.sym.signatureString + " in " + inTpl.sym.toString
-    case _ => link.toString
-  }
-
-  override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
-    val sym1 =
-      if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
-      else if (sym.isPackage) 
-        /* Get package object which has associatedFile ne null */
-        sym.info.member(newTermName("package"))
-      else sym
-    Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
-      val path = src.path
-      settings.extUrlMapping get path map { url =>
-        LinkToExternal(name, url + "#" + name)
-      }
-    } orElse {
-      // Deprecated option.
-      settings.extUrlPackageMapping find {
-        case (pkg, _) => name startsWith pkg
-      } map {
-        case (_, url) => LinkToExternal(name, url + "#" + name)
-      }
-    }
-  }
-
-  override def warnNoLink = !settings.docNoLinkWarnings.value
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
deleted file mode 100644
index d9b173b..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ /dev/null
@@ -1,1103 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-import base.comment._
-import diagram._
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/** This trait extracts all required information for documentation from compilation units */
-class ModelFactory(val global: Global, val settings: doc.Settings) {
-  thisFactory: ModelFactory
-               with ModelFactoryImplicitSupport
-               with ModelFactoryTypeSupport
-               with DiagramFactory
-               with CommentFactory
-               with TreeFactory
-               with MemberLookup =>
-
-  import global._
-  import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
-  import rootMirror.{ RootPackage, RootClass, EmptyPackage }
-
-  // Defaults for member grouping, that may be overridden by the template
-  val defaultGroup = "Ungrouped"
-  val defaultGroupName = "Ungrouped"
-  val defaultGroupDesc = None
-  val defaultGroupPriority = 1000
-
-  def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size
-
-  private var _modelFinished = false
-  def modelFinished: Boolean = _modelFinished
-  private var universe: Universe = null
-
-  private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
-  protected def closestPackage(sym: Symbol) = {
-    if (sym.isPackage || sym.isPackageClass) sym
-    else sym.enclosingPackage
-  }
-
-  private def printWithoutPrefix(memberSym: Symbol, templateSym: Symbol) = {
-    dbg(
-      "memberSym " + memberSym + " templateSym " + templateSym + " encls = " +
-      closestPackage(memberSym) + ", " + closestPackage(templateSym)
-    )
-    memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
-  }
-
-  def makeModel: Option[Universe] = {
-    val universe = new Universe { thisUniverse =>
-      thisFactory.universe = thisUniverse
-      val settings = thisFactory.settings
-      val rootPackage = modelCreation.createRootPackage
-    }
-    _modelFinished = true
-    // complete the links between model entities, everthing that couldn't have been done before
-    universe.rootPackage.completeModel
-
-    Some(universe) filter (_.rootPackage != null)
-  }
-
-  // state:
-  var ids = 0
-  private val droppedPackages = mutable.Set[PackageImpl]()
-  protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
-  protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl]
-  def packageDropped(tpl: DocTemplateImpl) = tpl match {
-    case p: PackageImpl => droppedPackages(p)
-    case _ => false
-  }
-
-  def optimize(str: String): String =
-    if (str.length < 16) str.intern else str
-
-  /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
-
-  abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
-    val id = { ids += 1; ids }
-    val name = optimize(sym.nameString)
-    val universe = thisFactory.universe
-
-    // Debugging:
-    // assert(id != 36, sym + "  " + sym.getClass)
-    //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString("."))
-
-    def inTemplate: TemplateImpl = inTpl
-    def toRoot: List[EntityImpl] = this :: inTpl.toRoot
-    def qualifiedName = name
-    def annotations = sym.annotations.map(makeAnnotation)
-    def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
-    def isType = sym.name.isTypeName
-    def isTerm = sym.name.isTermName
-  }
-
-  trait TemplateImpl extends EntityImpl with TemplateEntity {
-    override def qualifiedName: String =
-      if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
-    def isPackage = sym.isPackage
-    def isTrait = sym.isTrait
-    def isClass = sym.isClass && !sym.isTrait
-    def isObject = sym.isModule && !sym.isPackage
-    def isCaseClass = sym.isCaseClass
-    def isRootPackage = false
-    def isNoDocMemberTemplate = false
-    def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
-  }
-
-  abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
-    lazy val comment = {
-      // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
-      // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
-      // in the doc comment of MyClass
-      val thisTpl = this match {
-        case d: DocTemplateImpl => Some(d)
-        case _ => None
-      }
-      if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
-    }
-    def group = if (comment.isDefined) comment.get.group.getOrElse(defaultGroup) else defaultGroup
-    override def inTemplate = inTpl
-    override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
-    def inDefinitionTemplates = this match {
-        case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
-          mb.useCaseOf.get.inDefinitionTemplates
-        case _ =>
-          if (inTpl == null)
-            List(makeRootPackage)
-          else
-            makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
-      }
-    def visibility = {
-      if (sym.isPrivateLocal) PrivateInInstance()
-      else if (sym.isProtectedLocal) ProtectedInInstance()
-      else {
-        val qual =
-          if (sym.hasAccessBoundary)
-            Some(makeTemplate(sym.privateWithin))
-          else None
-        if (sym.isPrivate) PrivateInTemplate(inTpl)
-        else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
-        else if (qual.isDefined) PrivateInTemplate(qual.get)
-        else Public()
-      }
-    }
-    def flags = {
-      val fgs = mutable.ListBuffer.empty[Paragraph]
-      if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
-      if (sym.isSealed) fgs += Paragraph(Text("sealed"))
-      if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
-      /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
-       * {{{
-       *     implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
-       *       def isParallel = ...
-       * }}}
-       * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
-       * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
-      if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
-      if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
-      fgs.toList
-    }
-    def deprecation =
-      if (sym.isDeprecated)
-        Some((sym.deprecationMessage, sym.deprecationVersion) match {
-          case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
-          case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
-          case (None, Some(ver)) =>  parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl))
-          case (None, None) => Body(Nil)
-        })
-      else
-        comment flatMap { _.deprecated }
-    def migration =
-      if(sym.hasMigrationAnnotation)
-        Some((sym.migrationMessage, sym.migrationVersion) match {
-          case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
-          case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
-          case (None, Some(ver)) =>  parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl))
-          case (None, None) => Body(Nil)
-        })
-      else
-        None
-    def inheritedFrom =
-      if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
-        makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
-    def resultType = {
-      def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
-        case PolyType(_, res) => resultTpe(res)
-        case MethodType(_, res) => resultTpe(res)
-        case NullaryMethodType(res) => resultTpe(res)
-        case _ => tpe
-      }
-      val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym
-      makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
-    }
-    def isDef = false
-    def isVal = false
-    def isLazyVal = false
-    def isVar = false
-    def isImplicit = sym.isImplicit
-    def isConstructor = false
-    def isAliasType = false
-    def isAbstractType = false
-    def isAbstract =
-      // for the explanation of conversion == null see comment on flags
-      ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
-      sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
-    def isTemplate = false
-    def signature = externalSignature(sym)
-    lazy val signatureCompat = {
-
-      def defParams(mbr: Any): String = mbr match {
-        case d: MemberEntity with Def =>
-          val paramLists: List[String] =
-            if (d.valueParams.isEmpty) Nil
-            else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")"))
-          paramLists.mkString
-        case _ => ""
-      }
-
-      def tParams(mbr: Any): String = mbr match {
-        case hk: HigherKinded if !hk.typeParams.isEmpty =>
-          def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
-            def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
-              case None => ""
-              case Some(tpe) => pre ++ tpe.toString
-            }
-            bound0(hi, "<:") ++ bound0(lo, ">:")
-          }
-          "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]"
-        case _ => ""
-      }
-
-      (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
-    }
-    // these only apply for NonTemplateMemberEntities
-    def useCaseOf: Option[MemberEntity] = None
-    def byConversion: Option[ImplicitConversionImpl] = None
-    def isImplicitlyInherited = false
-    def isShadowedImplicit    = false
-    def isAmbiguousImplicit   = false
-    def isShadowedOrAmbiguousImplicit = false
-  }
-
-  /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class
-   *  exists, but should not be documented (either it's not included in the source or it's not visible)
-   */
-  class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
-    assert(modelFinished)
-    assert(!(noDocTemplatesCache isDefinedAt sym))
-    noDocTemplatesCache += (sym -> this)
-    def isDocTemplate = false
-  }
-
-  /** An inherited template that was not documented in its original owner - example:
-   *  in classpath:  trait T { class C } -- T (and implicitly C) are not documented
-   *  in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it
-   *  but C doesn't get its own page
-   */
-  abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
-    // no templates cache for this class, each owner gets its own instance
-    override def isTemplate = true
-    def isDocTemplate = false
-    override def isNoDocMemberTemplate = true
-    lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
-    def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
-
-    // Seems unused
-    // def parentTemplates =
-    //   if (sym.isPackage || sym == AnyClass)
-    //     List()
-    //   else
-    //     sym.tpe.parents.flatMap { tpe: Type =>
-    //       val tSym = tpe.typeSymbol
-    //       if (tSym != NoSymbol)
-    //         List(makeTemplate(tSym))
-    //       else
-    //         List()
-    //     } filter (_.isInstanceOf[DocTemplateEntity])
-
-    def parentTypes =
-      if (sym.isPackage || sym == AnyClass) List() else {
-        val tps = (this match {
-          case a: AliasType => sym.tpe.dealias.parents
-          case a: AbstractType => sym.info.bounds match {
-            case TypeBounds(lo, RefinedType(parents, decls)) => parents
-            case TypeBounds(lo, hi) => hi :: Nil
-            case _ => Nil
-          }
-          case _ => sym.tpe.parents
-        }) map { _.asSeenFrom(sym.thisType, sym) }
-        makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl)
-      }
-  }
-
-   /** The instantiation of `TemplateImpl` triggers the creation of the following entities:
-    *  All ancestors of the template and all non-package members.
-    */
-  abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
-    assert(!modelFinished)
-    assert(!(docTemplatesCache isDefinedAt sym), sym)
-    docTemplatesCache += (sym -> this)
-
-    if (settings.verbose.value)
-      inform("Creating doc template for " + sym)
-
-    override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
-
-    protected def inSourceFromSymbol(symbol: Symbol) =
-      if (symbol.sourceFile != null && ! symbol.isSynthetic)
-        Some((symbol.sourceFile, symbol.pos.line))
-      else
-        None
-
-    def inSource = inSourceFromSymbol(sym)
-
-    def sourceUrl = {
-      def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
-      val assumedSourceRoot  = fixPath(settings.sourcepath.value) stripSuffix "/"
-
-      if (!settings.docsourceurl.isDefault)
-        inSource map { case (file, _) =>
-          val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala")
-          val tplOwner = this.inTemplate.qualifiedName
-          val tplName = this.name
-          val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""")
-          def substitute(name: String): String = name match {
-            case "FILE_PATH" => filePath
-            case "TPL_OWNER" => tplOwner
-            case "TPL_NAME" => tplName
-          }
-          val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) )
-          new java.net.URL(patchedString)
-        }
-      else None
-    }
-
-    protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
-      symbol.ancestors map { ancestor =>
-        val typeEntity = makeType(symbol.info.baseType(ancestor), this)
-        val tmplEntity = makeTemplate(ancestor) match {
-          case tmpl: DocTemplateImpl  => tmpl registerSubClass this ; tmpl
-          case tmpl                   => tmpl
-        }
-        (tmplEntity, typeEntity)
-      }
-    }
-
-    lazy val linearization = linearizationFromSymbol(sym)
-    def linearizationTemplates = linearization map { _._1 }
-    def linearizationTypes = linearization map { _._2 }
-
-    /* Subclass cache */
-    private lazy val subClassesCache = (
-      if (sym == AnyRefClass) null
-      else mutable.ListBuffer[DocTemplateEntity]()
-    )
-    def registerSubClass(sc: DocTemplateEntity): Unit = {
-      if (subClassesCache != null)
-        subClassesCache += sc
-    }
-    def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
-    def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
-
-    /* Implcitly convertible class cache */
-    private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
-    def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
-      if (implicitlyConvertibleClassesCache == null)
-        implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]()
-      implicitlyConvertibleClassesCache += ((dtpl, conv))
-    }
-
-    def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] =
-      if (implicitlyConvertibleClassesCache == null)
-        List()
-      else
-        implicitlyConvertibleClassesCache.toList
-
-    // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
-    // lazily, on completeModel
-    val conversions: List[ImplicitConversionImpl] =
-      if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
-
-    // members as given by the compiler
-    lazy val memberSyms      = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
-
-    // the inherited templates (classes, traits or objects)
-    var memberSymsLazy  = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
-    // the direct members (methods, values, vars, types and directly contained templates)
-    var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
-    // the members generated by the symbols in memberSymsEager
-    val ownMembers      = (memberSymsEager.flatMap(makeMember(_, None, this)))
-
-    // all the members that are documentented PLUS the members inherited by implicit conversions
-    var members: List[MemberImpl] = ownMembers
-
-    def templates       = members collect { case c: TemplateEntity with MemberEntity => c }
-    def methods         = members collect { case d: Def => d }
-    def values          = members collect { case v: Val => v }
-    def abstractTypes   = members collect { case t: AbstractType => t }
-    def aliasTypes      = members collect { case t: AliasType => t }
-
-    /**
-     * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
-     * inherited templates and implicit members are added to the members at this point.
-     */
-    def completeModel(): Unit = {
-      // DFS completion
-      // since alias types and abstract types have no own members, there's no reason for them to call completeModel
-      if (!sym.isAliasType && !sym.isAbstractType)
-        for (member <- members)
-          member match {
-            case d: DocTemplateImpl => d.completeModel
-            case _ =>
-          }
-
-      members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
-
-      // compute linearization to register subclasses
-      linearization
-      outgoingImplicitlyConvertedClasses
-
-      // the members generated by the symbols in memberSymsEager PLUS the members from the usecases
-      val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
-      implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
-      // finally, add the members generated by implicit conversions
-      members :::= conversions.flatMap(_.memberImpls)
-    }
-
-    var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]()
-
-    lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] =
-      conversions flatMap (conv =>
-        if (!implicitExcluded(conv.conversionQualifiedName))
-          conv.targetTypeComponents map {
-            case pair@(template, tpe) =>
-              template match {
-                case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
-                case _ => // nothing
-              }
-              (pair._1, pair._2, conv)
-          }
-        else List()
-      )
-
-    override def isTemplate = true
-    override def isDocTemplate = true
-    private[this] lazy val companionSymbol =
-      if (sym.isAliasType || sym.isAbstractType) {
-        inTpl.sym.info.member(sym.name.toTermName) match {
-          case NoSymbol => NoSymbol
-          case s =>
-            s.info match {
-              case ot: OverloadedType =>
-                NoSymbol
-              case _ =>
-                // that's to navigate from val Foo: FooExtractor to FooExtractor :)
-                s.info.resultType.typeSymbol
-            }
-        }
-      }
-      else
-        sym.companionSymbol
-
-    def companion =
-      companionSymbol match {
-        case NoSymbol => None
-        case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
-          makeTemplate(comSym) match {
-            case d: DocTemplateImpl => Some(d)
-            case _ => None
-          }
-        case _ => None
-      }
-
-    def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil
-    def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None
-    override def valueParams =
-      // we don't want params on a class (non case class) signature
-      if (isCaseClass) primaryConstructor match {
-        case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this)))
-        case None => List()
-      }
-      else List.empty
-
-    // These are generated on-demand, make sure you don't call them more than once
-    def inheritanceDiagram = makeInheritanceDiagram(this)
-    def contentDiagram = makeContentDiagram(this)
-
-    def groupSearch[T](extractor: Comment => Option[T]): Option[T] = {
-      val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment }
-      comments.flatten.map(extractor).flatten.headOption orElse {
-        Option(inTpl) flatMap (_.groupSearch(extractor))
-      }
-    }
-
-    def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None }
-    def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 }
-    def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group }
-  }
-
-  abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
-    override def inTemplate = inTpl
-    override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
-    override lazy val (inSource, linearization) = {
-      val representive = sym.info.members.find {
-        s => s.isPackageObject
-      } getOrElse sym
-      (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
-    }
-    def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
-  }
-
-  abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
-
-  abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
-                                       override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
-           extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
-    override lazy val comment = {
-      val inRealTpl =
-        /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
-         * 1. the target of the implicit conversion
-         * 2. the definition template (owner)
-         * 3. the current template
-         */
-        if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match {
-          case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
-          case _ => findTemplateMaybe(sym.owner) match {
-            case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
-            case _ => inTpl
-          }
-        } else inTpl
-      if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None
-    }
-
-    override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
-    lazy val definitionName = {
-      // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and
-      // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break
-      // the test suite...
-      val packageObject = if (inPackageObject) ".package" else ""
-      if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name)
-      else                       optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name)
-    }
-    def isBridge = sym.isBridge
-    def isUseCase = useCaseOf.isDefined
-    override def byConversion: Option[ImplicitConversionImpl] = conversion
-    override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
-    override def isShadowedImplicit    = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false)
-    override def isAmbiguousImplicit   = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false)
-    override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit
-  }
-
-  abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
-                                            useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
-           extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
-    def valueParams = {
-      val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym
-      info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
-        if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
-      }}
-    }
-  }
-
-  abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity {
-    val name = optimize(sym.nameString)
-  }
-
-  private trait AliasImpl {
-    def sym: Symbol
-    def inTpl: TemplateImpl
-    def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
-  }
-
-  private trait TypeBoundsImpl {
-    def sym: Symbol
-    def inTpl: TemplateImpl
-    def lo = sym.info.bounds match {
-      case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
-        Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym))
-      case _ => None
-    }
-    def hi = sym.info.bounds match {
-      case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
-        Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym))
-      case _ => None
-    }
-  }
-
-  trait HigherKindedImpl extends HigherKinded {
-    def sym: Symbol
-    def inTpl: TemplateImpl
-    def typeParams =
-      sym.typeParams map (makeTypeParam(_, inTpl))
-  }
-  /* ============== MAKER METHODS ============== */
-
-  /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the
-   * package object abstraction and placing members directly in the package.
-   *
-   * Here's the explanation of what we do. The code:
-   *
-   * package foo {
-   *   object `package` {
-   *     class Bar
-   *   }
-   * }
-   *
-   * will yield this Symbol structure:
-   *                                       +---------+ (2)
-   *                                       |         |
-   * +---------------+         +---------- v ------- | ---+                              +--------+ (2)
-   * | package foo#1 <---(1)---- module class foo#2  |    |                              |        |
-   * +---------------+         | +------------------ | -+ |         +------------------- v ---+   |
-   *                           | | package object foo#3 <-----(1)---- module class package#4  |   |
-   *                           | +----------------------+ |         | +---------------------+ |   |
-   *                           +--------------------------+         | | class package$Bar#5 | |   |
-   *                                                                | +----------------- | -+ |   |
-   *                                                                +------------------- | ---+   |
-   *                                                                                     |        |
-   *                                                                                     +--------+
-   * (1) sourceModule
-   * (2) you get out of owners with .owner
-   *
-   * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object.
-   */
-  def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
-    case null | rootMirror.EmptyPackage | NoSymbol =>
-      normalizeTemplate(RootPackage)
-    case ObjectClass =>
-      normalizeTemplate(AnyRefClass)
-    case _ if aSym.isPackageObject =>
-      normalizeTemplate(aSym.owner)
-    case _ if aSym.isModuleClass =>
-      normalizeTemplate(aSym.sourceModule)
-    case _ =>
-      aSym
-  }
-
-  /**
-   * These are all model construction methods. Please do not use them directly, they are calling each other recursively
-   * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used
-   * after the model was created (modelFinished=true) otherwise assertions will start failing.
-   */
-  object modelCreation {
-
-    def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match {
-      case Some(root: PackageImpl) => root
-      case _ => modelCreation.createTemplate(RootPackage, null) match {
-        case Some(root: PackageImpl) => root
-        case _ => sys.error("Scaladoc: Unable to create root package!")
-      }
-    }
-
-    /**
-     *  Create a template, either a package, class, trait or object
-     */
-    def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
-      // don't call this after the model finished!
-      assert(!modelFinished)
-
-      def createRootPackageComment: Option[Comment] =
-        if(settings.docRootContent.isDefault) None
-        else {
-          import Streamable._
-          Path(settings.docRootContent.value) match {
-            case f : File => {
-              val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
-              Some(rootComment)
-            }
-            case _ => None
-          }
-        }
-
-      def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
-        assert(!modelFinished) // only created BEFORE the model is finished
-        if (bSym.isAliasType && bSym != AnyRefClass)
-          new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
-        else if (bSym.isAbstractType)
-          new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true }
-        else if (bSym.isModule)
-          new DocTemplateImpl(bSym, inTpl) with Object {}
-        else if (bSym.isTrait)
-          new DocTemplateImpl(bSym, inTpl) with Trait {}
-        else if (bSym.isClass || bSym == AnyRefClass)
-          new DocTemplateImpl(bSym, inTpl) with Class {}
-        else
-          sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.")
-      }
-
-      val bSym = normalizeTemplate(aSym)
-      if (docTemplatesCache isDefinedAt bSym)
-        return Some(docTemplatesCache(bSym))
-
-      /* Three cases of templates:
-       * (1) root package -- special cased for bootstrapping
-       * (2) package
-       * (3) class/object/trait
-       */
-      if (bSym == RootPackage) // (1)
-        Some(new RootPackageImpl(bSym) {
-          override lazy val comment = createRootPackageComment
-          override val name = "root"
-          override def inTemplate = this
-          override def toRoot = this :: Nil
-          override def qualifiedName = "_root_"
-          override def inheritedFrom = Nil
-          override def isRootPackage = true
-          override lazy val memberSyms =
-            (bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
-              s != EmptyPackage && s != RootPackage
-            }
-        })
-      else if (bSym.isPackage) // (2)
-        if (settings.skipPackage(makeQualifiedName(bSym)))
-          None
-        else
-          inTpl match {
-            case inPkg: PackageImpl =>
-              val pack = new PackageImpl(bSym, inPkg) {}
-              // Used to check package pruning works:
-              //println(pack.qualifiedName)
-              if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) {
-                droppedPackages += pack
-                None
-              } else
-                Some(pack)
-            case _ =>
-              sys.error("'" + bSym + "' must be in a package")
-          }
-      else {
-        // no class inheritance at this point
-        assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl)
-        Some(createDocTemplate(bSym, inTpl))
-      }
-    }
-
-    /**
-     *  After the model is completed, no more DocTemplateEntities are created.
-     *  Therefore any symbol that still appears is:
-     *   - MemberTemplateEntity (created here)
-     *   - NoDocTemplateEntity (created in makeTemplate)
-     */
-    def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = {
-
-      // Code is duplicate because the anonymous classes are created statically
-      def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = {
-        assert(modelFinished) // only created AFTER the model is finished
-        if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
-          new MemberTemplateImpl(bSym, inTpl) with Object {}
-        else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
-          new MemberTemplateImpl(bSym, inTpl) with Trait {}
-        else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
-          new MemberTemplateImpl(bSym, inTpl) with Class {}
-        else
-          sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.")
-      }
-
-      assert(modelFinished)
-      val bSym = normalizeTemplate(aSym)
-
-      if (docTemplatesCache isDefinedAt bSym)
-        docTemplatesCache(bSym)
-      else
-        docTemplatesCache.get(bSym.owner) match {
-          case Some(inTpl) =>
-            val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr })
-            assert(mbrs.length == 1)
-            mbrs.head
-          case _ =>
-            // move the class completely to the new location
-            createNoDocMemberTemplate(bSym, inTpl)
-        }
-    }
-  }
-
-  /** Get the root package */
-  def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
-
-  // TODO: Should be able to override the type
-  def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
-
-    def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
-      if (bSym.isGetter && bSym.isLazy)
-          Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
-            override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
-              thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
-            override def isLazyVal = true
-          })
-      else if (bSym.isGetter && bSym.accessed.isMutable)
-        Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
-          override def isVar = true
-        })
-      else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) {
-        val cSym = { // This unsightly hack closes issue #4086.
-          if (bSym == definitions.Object_synchronized) {
-            val cSymInfo = (bSym.info: @unchecked) match {
-              case PolyType(ts, MethodType(List(bp), mt)) =>
-                val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
-                PolyType(ts, MethodType(List(cp), mt))
-            }
-            bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
-          }
-          else bSym
-        }
-        Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def {
-          override def isDef = true
-        })
-      }
-      else if (bSym.isConstructor)
-        if (conversion.isDefined)
-          None // don't list constructors inherted by implicit conversion
-        else
-          Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
-            override def isConstructor = true
-            def isPrimary = sym.isPrimaryConstructor
-          })
-      else if (bSym.isGetter) // Scala field accessor or Java field
-        Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
-          override def isVal = true
-        })
-      else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl))
-        Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
-          override def isAbstractType = true
-        })
-      else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl))
-        Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
-          override def isAliasType = true
-        })
-      else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
-        modelCreation.createTemplate(bSym, inTpl)
-      else
-        None
-    }
-
-    if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
-      Nil
-    else {
-      val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
-        docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
-        bSym
-      }
-
-      val member = makeMember0(aSym, None)
-      if (allSyms.isEmpty)
-        member.toList
-      else
-        // Use cases replace the original definitions - SI-5054
-        allSyms flatMap { makeMember0(_, member) }
-    }
-  }
-
-  def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
-    val tplSym = normalizeTemplate(aSym.owner)
-    inTpl.members.find(_.sym == aSym)
-  }
-
-  @deprecated("Use `findLinkTarget` instead.", "2.10.0")
-  def findTemplate(query: String): Option[DocTemplateImpl] = {
-    assert(modelFinished)
-    docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject }
-  }
-
-  def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
-    assert(modelFinished)
-    docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
-  }
-
-  def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None)
-
-  def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
-    assert(modelFinished)
-
-    def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
-      val bSym = normalizeTemplate(aSym)
-      noDocTemplatesCache.get(bSym) match {
-        case Some(noDocTpl) => noDocTpl
-        case None => new NoDocTemplateImpl(bSym, inTpl)
-      }
-    }
-
-    findTemplateMaybe(aSym) match {
-      case Some(dtpl) =>
-        dtpl
-      case None =>
-        val bSym = normalizeTemplate(aSym)
-        makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner))
-    }
-  }
-
-  def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
-    val aSym = annot.symbol
-    new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
-      lazy val annotationClass =
-        makeTemplate(annot.symbol)
-      val arguments = {
-        val paramsOpt: Option[List[ValueParam]] = annotationClass match {
-          case aClass: DocTemplateEntity with Class =>
-            val constr = aClass.constructors collectFirst {
-              case c: MemberImpl if c.sym == annot.original.symbol => c
-            }
-            constr flatMap (_.valueParams.headOption)
-          case _ => None
-        }
-        val argTrees = annot.args map makeTree
-        paramsOpt match {
-          case Some (params) =>
-            params zip argTrees map { case (param, tree) =>
-              new ValueArgument {
-                def parameter = Some(param)
-                def value = tree
-              }
-            }
-          case None => 
-            argTrees map { tree =>
-              new ValueArgument {
-                def parameter = None
-                def value = tree
-              }
-            }
-        }
-      }
-    }
-  }
-
-  /** */
-  def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam =
-    new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
-      def variance: String = {
-        if (sym hasFlag Flags.COVARIANT) "+"
-        else if (sym hasFlag Flags.CONTRAVARIANT) "-"
-        else ""
-      }
-    }
-
-  /** */
-  def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = {
-    makeValueParam(aSym, inTpl, aSym.nameString)
-  }
-
-
-  /** */
-  def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam =
-    new ParameterImpl(aSym, inTpl) with ValueParam {
-      override val name = newName
-      def defaultValue =
-        if (aSym.hasDefault) {
-          // units.filter should return only one element
-          (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
-            case List(unit) =>
-              // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol
-              //         of the parameter in the tree, as can happen with type parametric methods.
-              def isCorrespondingParam(sym: Symbol) = (
-                sym != null &&
-                sym != NoSymbol &&
-                sym.owner == aSym.owner &&
-                sym.name == aSym.name &&
-                sym.isParamWithDefault
-              )
-              unit.body find (t => isCorrespondingParam(t.symbol)) collect {
-                case ValDef(_,_,_,rhs) if rhs ne EmptyTree  => makeTree(rhs)
-              }
-            case _ => None
-          }
-        }
-        else None
-      def resultType =
-        makeTypeInTemplateContext(aSym.tpe, inTpl, aSym)
-      def isImplicit = aSym.isImplicit
-    }
-
-  /** */
-  def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = {
-    def ownerTpl(sym: Symbol): Symbol =
-      if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
-    val tpe =
-      if (thisFactory.settings.useStupidTypes.value) aType else {
-        def ownerTpl(sym: Symbol): Symbol =
-          if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
-        val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
-        aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym))
-      }
-    makeType(tpe, inTpl)
-  }
-
-  /** Get the types of the parents of the current class, ignoring the refinements */
-  def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
-    case RefinedType(parents, defs) =>
-      val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
-      val filtParents =
-        // we don't want to expose too many links to AnyRef, that will just be redundant information
-        if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) })
-          parents
-        else
-          parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
-
-      /** Returns:
-       *   - a DocTemplate if the type's symbol is documented
-       *   - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template
-       *   - a NoDocTemplate if the type's symbol is not documented at all */
-      def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = {
-        def noDocTemplate = makeTemplate(parent.typeSymbol)
-        findTemplateMaybe(parent.typeSymbol) match {
-          case Some(tpl) => tpl
-          case None => parent match {
-            case TypeRef(pre, sym, args) =>
-              findTemplateMaybe(pre.typeSymbol) match {
-                case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate)
-                case None => noDocTemplate
-              }
-            case _ => noDocTemplate
-          }
-        }
-      }
-
-      filtParents.map(parent => {
-        val templateEntity = makeTemplateOrMemberTemplate(parent)
-        val typeEntity = makeType(parent, inTpl)
-        (templateEntity, typeEntity)
-      })
-    case _ =>
-      List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
-  }
-
-  def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
-    val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]()
-    var sym1 = sym
-    var path = new StringBuilder()
-    // var path = List[Symbol]()
-
-    while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
-      val sym1Norm = normalizeTemplate(sym1)
-      if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) {
-        if (path.length != 0)
-          path.insert(0, ".")
-        path.insert(0, sym1Norm.nameString)
-        // path::= sym1Norm
-      }
-      sym1 = sym1.owner
-    }
-
-    optimize(path.toString)
-    //path.mkString(".")
-  }
-
-  def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean =
-    normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym)
-
-  def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean =
-    (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) &&
-    localShouldDocument(aSym) &&
-    !isEmptyJavaObject(aSym) &&
-    // either it's inside the original owner or we can document it later:
-    (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null)))
-
-  def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = {
-    // pruning modules that shouldn't be documented
-    // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc
-    // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we
-    // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored
-    // during typer and refchecks, it's not necessary for the current application and there's no need to explore it.
-    (!sym.isModule || sym.moduleClass.isInitialized) &&
-    // documenting only public and protected members
-    localShouldDocument(sym) &&
-    // Only this class's constructors are part of its members, inherited constructors are not.
-    (!sym.isConstructor || sym.owner == inTpl.sym) &&
-    // If the @bridge annotation overrides a normal member, show it
-    !isPureBridge(sym)
-  }
-
-  def isEmptyJavaObject(aSym: Symbol): Boolean =
-    aSym.isModule && aSym.isJavaDefined &&
-    aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
-
-  def localShouldDocument(aSym: Symbol): Boolean =
-    !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
-
-  /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
-  def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
-
-  // the classes that are excluded from the index should also be excluded from the diagrams
-  def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
-
-  // the implicit conversions that are excluded from the pages should not appear in the diagram
-  def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod)
-
-  // whether or not to create a page for an {abstract,alias} type
-  def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
-    (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) ||
-    (bSym.isAliasType || bSym.isAbstractType) &&
-    { val rawComment = global.expandedDocComment(bSym, inTpl.sym)
-      rawComment.contains("@template") || rawComment.contains("@documentable") }
-}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
deleted file mode 100644
index f88251b..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ /dev/null
@@ -1,609 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL
- *
- * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
- *
- * @author Vlad Ureche
- * @author Adriaan Moors
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/**
- * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
- *
- * Let's take this as an example:
- * {{{
- *    object Test {
- *      class A
- *
- *      class B {
- *        def foo = 1
- *      }
- *
- *      class C extends B {
- *        def bar = 2
- *        class implicit
- *      }
- *
- *      D def conv(a: A) = new C
- *    }
- * }}}
- *
- * Overview:
- * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
- * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
- * `A` (see makeMember0 in ModelFactory, last 3 cases)
- * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
- * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
- * `definitionName` in MemberImpl
- *
- * Internals:
- * TODO: Give an overview here
- */
-trait ModelFactoryImplicitSupport {
-  thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory =>
-
-  import global._
-  import global.analyzer._
-  import global.definitions._
-  import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
-  import settings.hardcoded
-
-  // debugging:
-  val DEBUG: Boolean = settings.docImplicitsDebug.value
-  val ERROR: Boolean = true // currently we show all errors
-  @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
-  @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
-
-  /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
-   * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
-   *  {{{
-   *     class A[T]
-   *     class B extends A[Int]
-   *     class C extends A[String]
-   *     implicit def pimpA[T: Numeric](a: A[T]): D
-   *  }}}
-   *  For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
-   *  conversion from C to D, depending on -implicits-show-all, the conversion can:
-   *   - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
-   *   - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
-   */
-  class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
-
-  /* ============== MAKER METHODS ============== */
-
-  /**
-   *  Make the implicit conversion objects
-   *
-   *  A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
-   *  default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
-   *  future we might want to extend this to more complex scopes.
-   */
-  def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
-    // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
-    // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
-    if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
-    else {
-      var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
-
-      val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
-      var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
-      // also keep empty conversions, so they appear in diagrams
-      // conversions = conversions.filter(!_.members.isEmpty)
-
-      // Filter out specialized conversions from array
-      if (sym == ArrayClass)
-        conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
-          hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
-
-      // Filter out non-sensical conversions from value types
-      if (isPrimitiveValueType(sym.tpe))
-        conversions = conversions.filter((ic: ImplicitConversionImpl) =>
-          hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
-
-      // Put the visible conversions in front
-      val (ownConversions, commonConversions) =
-        conversions.partition(!_.isHiddenConversion)
-
-      ownConversions ::: commonConversions
-    }
-
-  /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
-   * - for each possible conversion function (also called view)
-   *    * figures out the final result of the view (to what is our class transformed?)
-   *    * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
-   *    * lists all inherited members
-   *
-   * What? in details:
-   *  - say we start from a class A[T1, T2, T3, T4]
-   *  - we have an implicit function (view) in scope:
-   *     def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
-   *  - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
-   *     * T1 must be equal to Int
-   *     * T2 must be equal to Foo[Bar[X]]
-   *     * T3 must be upper bounded by Long
-   *     * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
-   *  - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
-   *
-   * How?
-   * some notes:
-   *  - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
-   * to maintain generality
-   *  - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
-   * but are never solved down to a type
-   *  - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
-   * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
-   *  - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
-   * appears as a constraint
-   */
-  def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
-    if (result.tree == EmptyTree) Nil
-    else {
-      // `result` will contain the type of the view (= implicit conversion method)
-      // the search introduces untouchable type variables, but we want to get back to type parameters
-      val viewFullType = result.tree.tpe
-      // set the previously implicit parameters to being explicit
-
-      val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
-
-      // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
-      if (viewSimplifiedType.params.length != 1) {
-        // This is known to be caused by the `<%<` object in Predef:
-        // {{{
-        //    sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
-        //    object <%< {
-        //      implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
-        //    }
-        // }}}
-        // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
-        return Nil
-      }
-
-      // type the view application so we get the exact type of the result (not the formal type)
-      val viewTree = result.tree.setType(viewSimplifiedType)
-      val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
-      val appliedTreeTyped: Tree = {
-        val newContext = context.makeImplicit(context.ambiguousErrors)
-        newContext.macrosEnabled = false
-        val newTyper = global.analyzer.newTyper(newContext)
-          newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
-
-          case global.analyzer.SilentResultValue(t: Tree) => t
-          case global.analyzer.SilentTypeError(err) =>
-            global.reporter.warning(sym.pos, err.toString)
-            return Nil
-        }
-      }
-
-      // now we have the final type:
-      val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
-
-      try {
-        // Transform bound constraints into scaladoc constraints
-        val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
-        val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
-        // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
-        val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
-        val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
-
-        List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
-      } catch {
-        case i: ImplicitNotFound =>
-          //println("  Eliminating: " + toType)
-          Nil
-      }
-    }
-
-  def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] =
-    types.flatMap((tpe:Type) => {
-      // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
-      val implType = typeVarToOriginOrWildcard(tpe)
-      val qualifiedName = makeQualifiedName(implType.typeSymbol)
-
-      var available: Option[Boolean] = None
-
-      // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
-      //
-      // println(implType + " => " + implType.isTrivial)
-      // var tpes: List[Type] = List(implType)
-      // while (!tpes.isEmpty) {
-      //   val tpe = tpes.head
-      //   tpes = tpes.tail
-      //   tpe match {
-      //     case TypeRef(pre, sym, args) =>
-      //       tpes = pre :: args ::: tpes
-      //       println(tpe + " => " + tpe.isTrivial)
-      //     case _ =>
-      //       println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
-      //   }
-      // }
-      // println("\n")
-
-      // look for type variables in the type. If there are none, we can decide if the implicit is there or not
-      if (implType.isTrivial) {
-        try {
-          context.flushBuffer() /* any errors here should not prevent future findings */
-          // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
-          val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
-          val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
-          context.flushBuffer() /* any errors here should not prevent future findings */
-
-          available = Some(search.tree != EmptyTree)
-        } catch {
-          case _: TypeError =>
-        }
-      }
-
-      available match {
-        case Some(true) =>
-          Nil
-        case Some(false) if (!settings.docImplicitsShowAll.value) =>
-          // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
-          throw new ImplicitNotFound(implType)
-        case _ =>
-          val typeParamNames = sym.typeParams.map(_.name)
-
-          // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
-          // learn more about symbols, it'll have to do.
-          implType match {
-            case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
-              hardcoded.knownTypeClasses.get(qualifiedName) match {
-                case Some(explanation) =>
-                  List(new KnownTypeClassConstraint {
-                    val typeParamName = targ.nameString
-                    lazy val typeExplanation = explanation
-                    lazy val typeClassEntity = makeTemplate(sym)
-                    lazy val implicitType: TypeEntity = makeType(implType, inTpl)
-                  })
-                case None =>
-                  List(new TypeClassConstraint {
-                    val typeParamName = targ.nameString
-                    lazy val typeClassEntity = makeTemplate(sym)
-                    lazy val implicitType: TypeEntity = makeType(implType, inTpl)
-                  })
-              }
-            case _ =>
-              List(new ImplicitInScopeConstraint{
-                lazy val implicitType: TypeEntity = makeType(implType, inTpl)
-              })
-          }
-      }
-    })
-
-  def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] =
-    (subst.from zip subst.to) map {
-      case (from, to) =>
-        new EqualTypeParamConstraint {
-          error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
-          val typeParamName = from.toString
-          val rhs = makeType(to, inTpl)
-        }
-    }
-
-  def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] =
-    (tparams zip constrs) flatMap {
-      case (tparam, constr) => {
-        uniteConstraints(constr) match {
-          case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
-            case (Nil, Nil) =>
-              Nil
-            case (List(lo), List(up)) if (lo == up) =>
-              List(new EqualTypeParamConstraint {
-                val typeParamName = tparam.nameString
-                lazy val rhs = makeType(lo, inTpl)
-              })
-            case (List(lo), List(up)) =>
-              List(new BoundedTypeParamConstraint {
-                val typeParamName = tparam.nameString
-                lazy val lowerBound = makeType(lo, inTpl)
-                lazy val upperBound = makeType(up, inTpl)
-              })
-            case (List(lo), Nil) =>
-              List(new LowerBoundedTypeParamConstraint {
-                val typeParamName = tparam.nameString
-                lazy val lowerBound = makeType(lo, inTpl)
-              })
-            case (Nil, List(up)) =>
-              List(new UpperBoundedTypeParamConstraint {
-                val typeParamName = tparam.nameString
-                lazy val upperBound = makeType(up, inTpl)
-              })
-            case other =>
-              // this is likely an error on the lub/glb side
-              error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
-              Nil
-          }
-        }
-      }
-    }
-
-  /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
-
-  class ImplicitConversionImpl(
-    val sym: Symbol,
-    val convSym: Symbol,
-    val toType: Type,
-    val constrs: List[Constraint],
-    inTpl: DocTemplateImpl)
-      extends ImplicitConversion {
-
-    def source: DocTemplateEntity = inTpl
-
-    def targetType: TypeEntity = makeType(toType, inTpl)
-
-    def convertorOwner: TemplateEntity =
-      if (convSym != NoSymbol)
-        makeTemplate(convSym.owner)
-      else {
-        error("Scaladoc implicits: " + toString + " = NoSymbol!")
-        makeRootPackage
-      }
-
-    def targetTemplate: Option[TemplateEntity] = toType match {
-      // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
-      // such as refinement types because the template can't represent the type corectly (a template corresponds to a
-      // package, class, trait or object)
-      case t: TypeRef => Some(makeTemplate(t.sym))
-      case RefinedType(parents, decls) => None
-      case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
-    }
-
-    def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
-
-    def convertorMethod: Either[MemberEntity, String] = {
-      var convertor: MemberEntity = null
-
-      convertorOwner match {
-        case doc: DocTemplateImpl =>
-          val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
-          if (convertors.length == 1)
-            convertor = convertors.head
-        case _ =>
-      }
-      if (convertor ne null)
-        Left(convertor)
-      else
-        Right(convSym.nameString)
-    }
-
-    def conversionShortName = convSym.nameString
-
-    def conversionQualifiedName = makeQualifiedName(convSym)
-
-    lazy val constraints: List[Constraint] = constrs
-
-    lazy val memberImpls: List[MemberImpl] = {
-      // Obtain the members inherited by the implicit conversion
-      val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
-      val existingSyms = sym.info.members
-
-      // Debugging part :)
-      debug(sym.nameString + "\n" + "=" * sym.nameString.length())
-      debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
-
-      debug("   -> full type: " + toType)
-      if (constraints.length != 0) {
-        debug("   -> constraints: ")
-        constraints foreach { constr => debug("      - " + constr) }
-      }
-      debug("   -> members:")
-      memberSyms foreach (sym => debug("      - "+ sym.decodedName +" : " + sym.info))
-      debug("")
-
-      memberSyms.flatMap({ aSym =>
-        // we can't just pick up nodes from the original template, although that would be very convenient:
-        // they need the byConversion field to be attached to themselves and the types to be transformed by
-        // asSeenFrom
-
-        // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up
-        // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change
-        // the template when expanding variables in the comment :)
-        makeMember(aSym, Some(this), inTpl)
-      })
-    }
-
-    lazy val members: List[MemberEntity] = memberImpls
-
-    def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
-
-    override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
-  }
-
-  /* ========================= HELPER METHODS ========================== */
-  /**
-   *  Computes the shadowing table for all the members in the implicit conversions
-   *  @param mbrs All template's members, including usecases and full signature members
-   *  @param convs All the conversions the template takes part in
-   *  @param inTpl the ususal :)
-   */
-  def makeShadowingTable(mbrs: List[MemberImpl],
-                         convs: List[ImplicitConversionImpl],
-                         inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
-    assert(modelFinished)
-
-    var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
-
-    for (conv <- convs) {
-      val otherConvs = convs.filterNot(_ == conv)
-
-      for (member <- conv.memberImpls) {
-        // for each member in our list
-        val sym1 = member.sym
-        val tpe1 = conv.toType.memberInfo(sym1)
-
-        // check if it's shadowed by a member in the original class
-        var shadowedBySyms: List[Symbol] = List()
-        for (mbr <- mbrs) {
-          val sym2 = mbr.sym
-          if (sym1.name == sym2.name) {
-            val shadowed = !settings.docImplicitsSoundShadowing.value || {
-              val tpe2 = inTpl.sym.info.memberInfo(sym2)
-              !isDistinguishableFrom(tpe1, tpe2)
-            }
-            if (shadowed)
-              shadowedBySyms ::= sym2
-          }
-        }
-
-        val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
-
-        // check if it's shadowed by another member
-        var ambiguousByMembers: List[MemberEntity] = List()
-        for (conv <- otherConvs)
-          for (member2 <- conv.memberImpls) {
-            val sym2 = member2.sym
-            if (sym1.name == sym2.name) {
-              val tpe2 = conv.toType.memberInfo(sym2)
-              // Ambiguity should be an equivalence relation
-              val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
-              if (ambiguated)
-                ambiguousByMembers ::= member2
-            }
-          }
-
-        // we finally have the shadowing info
-        val shadowing = new ImplicitMemberShadowing {
-          def shadowingMembers: List[MemberEntity] = shadowedByMembers
-          def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
-        }
-
-        shadowingTable += (member -> shadowing)
-      }
-    }
-
-    shadowingTable
-  }
-
-
-  /**
-   * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
-   *
-   * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
-   * upper bound. Here are a couple of catches we need to be aware of:
-   *  - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
-   * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
-   * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
-   * applicable -- now, we want to transform those type variables back to the original type parameters
-   *  - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
-   * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
-   * into thinking there's nothing there
-   *  - we don't want the wildcard types surviving the unification so we replace them back to Nothings
-   */
-  def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
-    try {
-      (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
-       List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
-    } catch {
-      // does this actually ever happen? (probably when type vars occur in the bounds)
-      case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
-    }
-
-  /**
-   *  Make implicits explicit - Not used curently
-   */
-  object implicitToExplicit extends TypeMap {
-    def apply(tp: Type): Type = mapOver(tp) match {
-      case MethodType(params, resultType) =>
-        MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
-      case other =>
-        other
-    }
-  }
-
-  /**
-   * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
-   * returns the simplified type of the view
-   *
-   * for the example view:
-   *   implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
-   * the implicit view result type is:
-   *   (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
-   * and the simplified type will be:
-   *   MyClass[T] => PimpedMyClass[T]
-   */
-  def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
-
-    val params = viewType.paramss.flatten
-    val (normalParams, implParams) = params.partition(!_.isImplicit)
-    val simplifiedType = MethodType(normalParams, viewType.finalResultType)
-    val implicitTypes = implParams.map(_.tpe)
-
-    (simplifiedType, implicitTypes)
-  }
-
-  /**
-   * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
-   * type parameters) or into wildcard types if nothing matches
-   */
-  object typeVarToOriginOrWildcard extends TypeMap {
-    def apply(tp: Type): Type = mapOver(tp) match {
-      case tv: TypeVar =>
-        if (tv.constr.inst.typeSymbol == NothingClass)
-          WildcardType
-        else
-          tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
-      case other =>
-        if (other.typeSymbol == NothingClass)
-          WildcardType
-        else
-          other
-    }
-  }
-
-  /**
-   * wildcardToNothing transforms wildcard types back to Nothing
-   */
-  object wildcardToNothing extends TypeMap {
-    def apply(tp: Type): Type = mapOver(tp) match {
-      case WildcardType =>
-        NothingClass.tpe
-      case other =>
-        other
-    }
-  }
-
-  /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
-  def implicitShouldDocument(aSym: Symbol): Boolean = {
-    // We shouldn't document:
-    // - constructors
-    // - common methods (in Any, AnyRef, Object) as they are automatically removed
-    // - private and protected members (not accessible following an implicit conversion)
-    // - members starting with _ (usually reserved for internal stuff)
-    localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
-    (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
-    (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
-    (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
-    (aSym.nameString != "getClass")
-  }
-
-  /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the
-   * class. We suppose the name of the two members coincides
-   *
-   * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
-   * structure (A => B => C may not override (A, B) => C) and that all the types involved are
-   * of the implcit conversion's member are subtypes of the parent members' parameters */
-  def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
-    // Vlad: I tried using matches but it's not exactly what we need:
-    // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
-    // !(t1 matches t2)
-    if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
-      for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
-       if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
-         return true // if on the corresponding parameter you give a type that is in t1 but not in t2
-                     // def foo(a: Either[Int, Double]): Int = 3
-                     // def foo(b: Left[T1]): Int = 6
-                     // a.foo(Right(4.5d)) prints out 3 :)
-      false
-    } else true // the member structure is different foo(3, 5) vs foo(3)(5)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
deleted file mode 100644
index 844a509..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ /dev/null
@@ -1,326 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-import diagram._
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/** This trait extracts all required information for documentation from compilation units */
-trait ModelFactoryTypeSupport {
-  thisFactory: ModelFactory
-               with ModelFactoryImplicitSupport
-               with ModelFactoryTypeSupport
-               with DiagramFactory
-               with CommentFactory
-               with TreeFactory
-               with MemberLookup =>
-
-  import global._
-  import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
-  import rootMirror.{ RootPackage, RootClass, EmptyPackage }
-
-  protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
-
-  /** */
-  def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
-    def templatePackage = closestPackage(inTpl.sym)
-
-    def createTypeEntity = new TypeEntity {
-      private var nameBuffer = new StringBuilder
-      private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
-      private def appendTypes0(types: List[Type], sep: String): Unit = types match {
-        case Nil =>
-        case tp :: Nil =>
-          appendType0(tp)
-        case tp :: tps =>
-          appendType0(tp)
-          nameBuffer append sep
-          appendTypes0(tps, sep)
-      }
-
-      private def appendType0(tpe: Type): Unit = tpe match {
-        /* Type refs */
-        case tp: TypeRef if definitions.isFunctionType(tp) =>
-          val args = tp.normalize.typeArgs
-          nameBuffer append '('
-          appendTypes0(args.init, ", ")
-          nameBuffer append ") ⇒ "
-          appendType0(args.last)
-        case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) =>
-          appendType0(tp.args.head)
-          nameBuffer append '*'
-        case tp: TypeRef if definitions.isByNameParamType(tp) =>
-          nameBuffer append "⇒ "
-          appendType0(tp.args.head)
-        case tp: TypeRef if definitions.isTupleType(tp) =>
-          val args = tp.normalize.typeArgs
-          nameBuffer append '('
-          appendTypes0(args, ", ")
-          nameBuffer append ')'
-        case TypeRef(pre, aSym, targs) =>
-          val preSym = pre.widen.typeSymbol
-
-          // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another:
-          // class Enum { abstract class Value }
-          // class Day extends Enum { object Mon extends Value /*...*/ }
-          // ===> in such cases we have two options:
-          // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly
-          // (1) if we generate the doc template for Day, we can link to the correct member
-          // (2) If the symbol comes from an external library for which we know the documentation URL, point to it.
-          // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
-          val bSym = normalizeTemplate(aSym)
-          val owner =
-            if ((preSym != NoSymbol) &&                  /* it needs a prefix */
-                (preSym != bSym.owner) &&                /* prefix is different from owner */
-                (aSym == bSym))                          /* normalization doesn't play tricks on us */
-              preSym
-            else
-              bSym.owner
-
-          val link =
-            findTemplateMaybe(bSym) match {
-              case Some(bTpl) if owner == bSym.owner =>
-                // (0) the owner's class is linked AND has a template - lovely
-                bTpl match {
-                  case dtpl: DocTemplateEntity => new LinkToTpl(dtpl)
-                  case _ => new Tooltip(bTpl.qualifiedName)
-                }
-              case _ =>
-                val oTpl = findTemplateMaybe(owner)
-                (oTpl, oTpl flatMap (findMember(bSym, _))) match {
-                  case (Some(oTpl), Some(bMbr)) =>
-                    // (1) the owner's class
-                    LinkToMember(bMbr, oTpl)
-                  case _ =>
-                    val name = makeQualifiedName(bSym)
-                    if (!bSym.owner.isPackage)
-                      Tooltip(name)
-                    else
-                      findExternalLink(bSym, name).getOrElse (
-                        // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name
-                        Tooltip(name)
-                      )
-                }
-            }
-
-          // SI-4360 Showing prefixes when necessary
-          // We check whether there's any directly accessible type with the same name in the current template OR if the
-          // type is inherited from one template to another. There may be multiple symbols with the same name in scope,
-          // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing
-          // the prefix only for ambiguous references, not for overloaded ones.
-          def needsPrefix: Boolean = {
-            if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym))
-              return true
-            // don't get tricked into prefixng method type params and existentials:
-            // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale,
-            // as ValueParams are independent of their parent member, and I really don't want to add this information to
-            // all terms, as we're already over the allowed memory footprint
-            if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */)
-              return false
-
-            for (tpl <- inTpl.sym.ownerChain) {
-              tpl.info.member(bSym.name) match {
-                case NoSymbol =>
-                  // No syms with that name, look further inside the owner chain
-                case sym =>
-                  // Symbol found -- either the correct symbol, another one OR an overloaded alternative
-                  if (sym == bSym)
-                    return false
-                  else sym.info match {
-                    case OverloadedType(owner, alternatives) =>
-                      return alternatives.contains(bSym)
-                    case _ =>
-                      return true
-                  }
-              }
-            }
-            // if it's not found in the owner chain, we can safely leave out the prefix
-            false
-          }
-
-          val prefix =
-            if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
-              if (!owner.isRefinementClass) {
-                val qName = makeQualifiedName(owner, Some(inTpl.sym))
-                if (qName != "") qName + "." else ""
-              }
-              else {
-                nameBuffer append "("
-                appendType0(pre)
-                nameBuffer append ")#"
-                "" // we already appended the prefix
-              }
-            } else ""
-
-          //DEBUGGING:
-          //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + "  and prefix=" + prefix)
-
-          val name = prefix + bSym.nameString
-          val pos0 = nameBuffer.length
-          refBuffer += pos0 -> ((link, name.length))
-          nameBuffer append name
-
-          if (!targs.isEmpty) {
-            nameBuffer append '['
-            appendTypes0(targs, ", ")
-            nameBuffer append ']'
-          }
-        /* Refined types */
-        case RefinedType(parents, defs) =>
-          val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
-          val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
-            case Nil    => parents
-            case ps     => ps
-          }
-          appendTypes0(filtParents, " with ")
-          // XXX Still todo: properly printing refinements.
-          // Since I didn't know how to go about displaying a multi-line type, I went with
-          // printing single method refinements (which should be the most common) and printing
-          // the number of members if there are more.
-          defs.toList match {
-            case Nil      => ()
-            case x :: Nil => nameBuffer append (" { " + x.defString + " }")
-            case xs       => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size)
-          }
-        /* Eval-by-name types */
-        case NullaryMethodType(result) =>
-          nameBuffer append '⇒'
-          appendType0(result)
-
-        /* Polymorphic types */
-        case PolyType(tparams, result) => assert(tparams.nonEmpty)
-          def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else
-            tps.map{tparam =>
-              tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
-            }.mkString("[", ", ", "]")
-          nameBuffer append typeParamsToString(tparams)
-          appendType0(result)
-
-        case et at ExistentialType(quantified, underlying) =>
-
-          def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = {
-            if (sym.isType && !sym.isAliasType && !sym.isClass) {
-                tp match {
-                  case PolyType(tparams, _) =>
-                    nameBuffer append "["
-                    appendTypes0(tparams.map(_.tpe), ", ")
-                    nameBuffer append "]"
-                  case _ =>
-                }
-                tp.resultType match {
-                  case rt @ TypeBounds(_, _) =>
-                    appendType0(rt)
-                  case rt                    =>
-                    nameBuffer append " <: "
-                    appendType0(rt)
-                }
-            } else {
-              // fallback to the Symbol infoString
-              nameBuffer append sym.infoString(tp)
-            }
-          }
-
-          def appendClauses = {
-            nameBuffer append " forSome {"
-            var first = true
-            val qset = quantified.toSet
-            for (sym <- quantified) {
-              if (!first) { nameBuffer append ", " } else first = false
-              if (sym.isSingletonExistential) {
-                nameBuffer append "val "
-                nameBuffer append tpnme.dropSingletonName(sym.name)
-                nameBuffer append ": "
-                appendType0(dropSingletonType(sym.info.bounds.hi))
-              } else {
-                if (sym.flagString != "") nameBuffer append (sym.flagString + " ")
-                if (sym.keyString != "") nameBuffer append (sym.keyString + " ")
-                nameBuffer append sym.varianceString
-                nameBuffer append sym.nameString
-                appendInfoStringReduced(sym, sym.info)
-              }
-            }
-            nameBuffer append "}"
-          }
-
-          underlying match {
-            case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards =>
-              appendType0(typeRef(pre, sym, Nil))
-              nameBuffer append "["
-              var first = true
-              val qset = quantified.toSet
-              for (arg <- args) {
-                if (!first) { nameBuffer append ", " } else first = false
-                arg match {
-                  case TypeRef(_, sym, _) if (qset contains sym) =>
-                    nameBuffer append "_"
-                    appendInfoStringReduced(sym, sym.info)
-                  case arg =>
-                    appendType0(arg)
-                }
-              }
-              nameBuffer append "]"
-            case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
-              nameBuffer append "("
-              appendType0(underlying)
-              nameBuffer append ")"
-              appendClauses
-            case _ =>
-              appendType0(underlying)
-              appendClauses
-          }
-
-        case tb at TypeBounds(lo, hi) =>
-          if (tb.lo != TypeBounds.empty.lo) {
-            nameBuffer append " >: "
-            appendType0(lo)
-          }
-          if (tb.hi != TypeBounds.empty.hi) {
-            nameBuffer append " <: "
-            appendType0(hi)
-          }
-        // case tpen: ThisType | SingleType | SuperType =>
-        //   if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) {
-        //     appendType0 typeRef(NoPrefix, sym, Nil)
-        //   } else {
-        //     val underlying =
-        //     val pre = underlying.typeSymbol.skipPackageObject
-        //     if (pre.isOmittablePrefix) pre.fullName + ".type"
-        //     else prefixString + "type"
-        case tpen at ThisType(sym) =>
-          appendType0(typeRef(NoPrefix, sym, Nil))
-          nameBuffer append ".this"
-          if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
-        case tpen at SuperType(thistpe, supertpe) =>
-          nameBuffer append "super["
-          appendType0(supertpe)
-          nameBuffer append "]"
-        case tpen at SingleType(pre, sym) =>
-          appendType0(typeRef(pre, sym, Nil))
-          if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
-        case tpen =>
-          nameBuffer append tpen.toString
-      }
-      appendType0(aType)
-      val refEntity = refBuffer
-      val name = optimize(nameBuffer.toString)
-      nameBuffer = null
-    }
-
-    // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
-    // same type based on the template the type is shown in.
-    if (settings.docNoPrefixes.value)
-      typeCache.getOrElseUpdate(aType, createTypeEntity)
-    else createTypeEntity
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
deleted file mode 100755
index fdad84d..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile}
-
-/** The goal of this trait is , using makeTree,
-  * to browse a tree to
-  * 1- have the String of the complete tree (tree.expression)
-  * 2- fill references to create hyperLinks later in html.pageTemplate
-  *
-  * It is applied in ModelFactory => makeTree
-  *
-  */
-
-trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
-
-  val global: Global
-  import global._
-
-  def makeTree(rhs: Tree): TreeEntity = {
-
-    var expr = new StringBuilder
-    var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
-
-    rhs.pos match {
-      case pos: RangePosition => {
-        val source: SourceFile = pos.source
-        val firstIndex = pos.startOrPoint
-        val lastIndex = pos.endOrPoint
-
-        assert(firstIndex < lastIndex, "Invalid position indices for tree " + rhs + " (" + firstIndex + ", " + lastIndex + ")")
-        expr.appendAll(source.content, firstIndex, lastIndex - firstIndex)
-
-        val traverser = new Traverser {
-
-          /** Finds the Entity on which we will later create a link on,
-           * stores it in tree.refs with its position
-           */
-          def makeLink(rhs: Tree){
-            var start = pos.startOrPoint - firstIndex
-            val end = pos.endOrPoint - firstIndex
-            if(start != end) {
-              var asym = rhs.symbol
-              if (asym.isClass) makeTemplate(asym) match{
-                case docTmpl: DocTemplateImpl =>
-                  refs += ((start, (docTmpl,end)))
-                case _ =>
-              }
-              else if (asym.isTerm && asym.owner.isClass){
-                if (asym.isSetter) asym = asym.getter(asym.owner)
-                makeTemplate(asym.owner) match {
-                  case docTmpl: DocTemplateImpl =>
-                    val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
-                    mbrs foreach { mbr => refs += ((start, (mbr,end))) }
-                  case _ =>
-                }
-              }
-            }
-          }
-          /**
-           * Goes through the tree and makes links when a Select occurs,
-           * The case of New(_) is ignored because the object we want to create a link on
-           * will be reached with recursivity and we don't want a link on the "new" string
-           * If a link is not created, its case is probably not defined in here
-           */
-          override def traverse(tree: Tree) = tree match {
-            case Select(qualifier, name) =>
-              qualifier match {
-                case New(_) =>
-                  case _ => makeLink(tree)
-              }
-            traverse(qualifier)
-            case Ident(_) => makeLink(tree)
-            case _ =>
-              super.traverse(tree)
-          }
-        }
-
-        traverser.traverse(rhs)
-
-        new TreeEntity {
-          val expression = expr.toString
-          val refEntity = refs
-        }
-      }
-      case _ =>
-        new TreeEntity {
-          val expression = rhs.toString
-          val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
-        }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
deleted file mode 100644
index c2aa1f1..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-
-/**
- *  The diagram base classes
- *
- *  @author Damien Obrist
- *  @author Vlad Ureche
- */
-abstract class Diagram {
-  def nodes: List[Node]
-  def edges: List[(Node, List[Node])]
-  def isContentDiagram = false     // Implemented by ContentDiagram
-  def isInheritanceDiagram = false // Implemented by InheritanceDiagram
-  def depthInfo: DepthInfo
-}
-
-case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
-  override def isContentDiagram = true
-  lazy val depthInfo = new ContentDiagramDepth(this)
-}
-
-/** A class diagram */
-case class InheritanceDiagram(thisNode: ThisNode,
-                        superClasses: List[/*Class*/Node],
-                        subClasses: List[/*Class*/Node],
-                        incomingImplicits: List[ImplicitNode],
-                        outgoingImplicits: List[ImplicitNode]) extends Diagram {
-  def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
-  def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
-              (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
-
-  override def isInheritanceDiagram = true
-  lazy val depthInfo = new DepthInfo {
-    def maxDepth = 3
-    def nodeDepth(node: Node) =
-      if (node == thisNode) 1
-      else if (superClasses.contains(node)) 0
-      else if (subClasses.contains(node)) 2
-      else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
-      else -1
-  }
-}
-
-trait DepthInfo {
-  /** Gives the maximum depth */
-  def maxDepth: Int
-  /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
-  def nodeDepth(node: Node): Int
-}
-
-abstract class Node {
-  def name = tpe.name
-  def tpe: TypeEntity
-  def tpl: Option[TemplateEntity]
-  /** shortcut to get a DocTemplateEntity */
-  def doctpl: Option[DocTemplateEntity] = tpl match {
-    case Some(tpl) => tpl match {
-      case d: DocTemplateEntity => Some(d)
-      case _ => None
-    }
-    case _ => None
-  }
-  /* shortcuts to find the node type without matching */
-  def isThisNode = false
-  def isNormalNode = false
-  def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
-  def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
-  def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
-  def isTypeNode  = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false
-  def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode)
-  def isImplicitNode = false
-  def isOutsideNode = false
-  def tooltip: Option[String]
-}
-
-// different matchers, allowing you to use the pattern matcher against any node
-// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
-// case class specification -- thus a complete match would be:
-//   node match {
-//     case ThisNode(tpe, _) =>     /* case for this node, you can still use .isClass, .isTrait and .isOther */
-//     case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
-//     case _ => node match {
-//       case ClassNode(tpe, _) =>  /* case for a non-this, non-implicit Class node */
-//       case TraitNode(tpe, _) =>  /* case for a non-this, non-implicit Trait node */
-//       case OtherNode(tpe, _) =>  /* case for a non-this, non-implicit Other node */
-//     }
-//   }
-object Node        { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
-object ClassNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode)   Some((n.tpe, n.tpl)) else None }
-object TraitNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode)   Some((n.tpe, n.tpl)) else None }
-object TypeNode    { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode)    Some((n.tpe, n.tpl)) else None }
-object ObjectNode  { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode)  Some((n.tpe, n.tpl)) else None }
-object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
-object OtherNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode)   Some((n.tpe, n.tpl)) else None }
-
-
-
-/** The node for the current class */
-case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true }
-
-/** The usual node */
-case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true }
-
-/** A class or trait the thisnode can be converted to by an implicit conversion
- *  TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
- *  since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
- */
-case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true }
-
-/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
- * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */
-case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true }
-
-
-// Computing and offering node depth information
-class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
-  private[this] var _maxDepth = 0
-  private[this] var _nodeDepth = Map[Node, Int]()
-  private[this] var seedNodes = Set[Node]()
-  private[this] val invertedEdges: Map[Node, List[Node]] =
-    pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
-  private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
-
-  // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
-  seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
-
-  while (!seedNodes.isEmpty) {
-    var newSeedNodes = Set[Node]()
-    for (node <- seedNodes) {
-      val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
-      if (depth != _nodeDepth.getOrElse(node, -1)) {
-        _nodeDepth += (node -> depth)
-        newSeedNodes ++= invertedEdges(node)
-        if (depth > _maxDepth) _maxDepth = depth
-      }
-    }
-    seedNodes = newSeedNodes
-  }
-
-  val maxDepth = _maxDepth
-  def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
deleted file mode 100644
index cd60865..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import java.util.regex.{Pattern, Matcher}
-import scala.util.matching.Regex
-
-// statistics
-import  html.page.diagram.DiagramStats
-
-/**
- *  This trait takes care of parsing @{inheritance, content}Diagram annotations
- *
- *  @author Damien Obrist
- *  @author Vlad Ureche
- */
-trait DiagramDirectiveParser {
-  this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
-
-  import this.global.definitions.AnyRefClass
-
-  ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
-
-  /**
-   *  The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
-   *
-   *  Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
-   *  all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
-   *  reason is you would break the separate scaladoc compilation:
-   *  If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
-   *  A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
-   *  instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
-   *  diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
-   *  information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
-   *  file, could we? (Turns out we could, but that's another story)
-   *
-   *  Any flaming for this decision should go to scala-internals at googlegroups.com
-   */
-  trait DiagramFilter {
-    /** A flag to hide the diagram completely */
-    def hideDiagram: Boolean
-    /** Hide incoming implicit conversions (for type hierarchy diagrams) */
-    def hideIncomingImplicits: Boolean
-    /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
-    def hideOutgoingImplicits: Boolean
-    /** Hide superclasses (for type hierarchy diagrams) */
-    def hideSuperclasses: Boolean
-    /** Hide subclasses (for type hierarchy diagrams) */
-    def hideSubclasses: Boolean
-    /** Show related classes from other objects/traits/packages (for content diagrams) */
-    def hideInheritedNodes: Boolean
-    /** Hide a node from the diagram */
-    def hideNode(clazz: Node): Boolean
-    /** Hide an edge from the diagram */
-    def hideEdge(clazz1: Node, clazz2: Node): Boolean
-  }
-
-  /** Main entry point into this trait: generate the filter for inheritance diagrams */
-  def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
-
-    val defaultFilter =
-      if (template.isClass || template.isTrait || template.sym == AnyRefClass)
-        FullDiagram
-      else
-        NoDiagramAtAll
-
-    if (template.comment.isDefined)
-      makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
-    else
-      defaultFilter
-  }
-
-  /** Main entry point into this trait: generate the filter for content diagrams */
-  def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
-    val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
-    if (template.comment.isDefined)
-      makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
-    else
-      defaultFilter
-  }
-
-  protected var tFilter = 0l
-  protected var tModel = 0l
-
-  /** Show the entire diagram, no filtering */
-  case object FullDiagram extends DiagramFilter {
-    val hideDiagram: Boolean = false
-    val hideIncomingImplicits: Boolean = false
-    val hideOutgoingImplicits: Boolean = false
-    val hideSuperclasses: Boolean = false
-    val hideSubclasses: Boolean = false
-    val hideInheritedNodes: Boolean = false
-    def hideNode(clazz: Node): Boolean = false
-    def hideEdge(clazz1: Node, clazz2: Node): Boolean = false
-  }
-
-  /** Hide the diagram completely, no need for special filtering */
-  case object NoDiagramAtAll extends DiagramFilter {
-    val hideDiagram: Boolean = true
-    val hideIncomingImplicits: Boolean = true
-    val hideOutgoingImplicits: Boolean = true
-    val hideSuperclasses: Boolean = true
-    val hideSubclasses: Boolean = true
-    val hideInheritedNodes: Boolean = true
-    def hideNode(clazz: Node): Boolean = true
-    def hideEdge(clazz1: Node, clazz2: Node): Boolean = true
-  }
-
-  /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
-   *  TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
-  case class AnnotationDiagramFilter(hideDiagram: Boolean,
-                                             hideIncomingImplicits: Boolean,
-                                             hideOutgoingImplicits: Boolean,
-                                             hideSuperclasses: Boolean,
-                                             hideSubclasses: Boolean,
-                                             hideInheritedNodes: Boolean,
-                                             hideNodesFilter: List[Pattern],
-                                             hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
-
-    private[this] def getName(n: Node): String =
-      if (n.tpl.isDefined)
-        n.tpl.get.qualifiedName
-      else
-        n.name
-
-    def hideNode(clazz: Node): Boolean = {
-      val qualifiedName = getName(clazz)
-      for (hideFilter <- hideNodesFilter)
-        if (hideFilter.matcher(qualifiedName).matches) {
-          // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
-          return true
-        }
-      false
-    }
-
-    def hideEdge(clazz1: Node, clazz2: Node): Boolean = {
-      val clazz1Name = getName(clazz1)
-      val clazz2Name = getName(clazz2)
-      for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
-        if (clazz1Filter.matcher(clazz1Name).matches &&
-            clazz2Filter.matcher(clazz2Name).matches) {
-          // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
-          // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
-          return true
-        }
-      }
-      false
-    }
-  }
-
-  // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
-  private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
-  private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
-  private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
-  private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
-  // And the composed regexes:
-  private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
-  private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
-
-  private def makeDiagramFilter(template: DocTemplateImpl,
-                                directives: List[String],
-                                defaultFilter: DiagramFilter,
-                                isInheritanceDiagram: Boolean): DiagramFilter = directives match {
-
-    // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
-    case Nil =>
-      defaultFilter
-
-    // compute the exact filters. By including the annotation, the diagram is autmatically added
-    case _ =>
-      tFilter -= System.currentTimeMillis
-      var hideDiagram0: Boolean = false
-      var hideIncomingImplicits0: Boolean = false
-      var hideOutgoingImplicits0: Boolean = false
-      var hideSuperclasses0: Boolean = false
-      var hideSubclasses0: Boolean = false
-      var hideInheritedNodes0: Boolean = false
-      var hideNodesFilter0: List[Pattern] = Nil
-      var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
-
-      def warning(message: String) = {
-        // we need the position from the package object (well, ideally its comment, but yeah ...)
-        val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
-        assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
-        global.reporter.warning(sym.pos, message)
-      }
-
-      def preparePattern(className: String) =
-        "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
-
-      // separate entries:
-      val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
-      for (entry <- entries)
-        entry match {
-          case "hideDiagram" =>
-              hideDiagram0 = true
-          case "hideIncomingImplicits" if isInheritanceDiagram =>
-              hideIncomingImplicits0 = true
-          case "hideOutgoingImplicits" if isInheritanceDiagram  =>
-              hideOutgoingImplicits0 = true
-          case "hideSuperclasses" if isInheritanceDiagram =>
-              hideSuperclasses0 = true
-          case "hideSubclasses" if isInheritanceDiagram =>
-              hideSubclasses0 = true
-          case "hideInheritedNodes" if !isInheritanceDiagram =>
-              hideInheritedNodes0 = true
-          case HideNodesRegex(last) =>
-            val matcher = NodeSpecPattern.matcher(entry)
-            while (matcher.find()) {
-              val classPattern = Pattern.compile(preparePattern(matcher.group()))
-              hideNodesFilter0 ::= classPattern
-            }
-          case HideEdgesRegex(last) =>
-            val matcher = NodeSpecPattern.matcher(entry)
-            while (matcher.find()) {
-              val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
-              assert(matcher.find()) // it's got to be there, just matched it!
-              val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
-              hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
-            }
-          case "" =>
-            // don't need to do anything about it
-          case _ =>
-            warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName +
-              ": unmatched entry \"" + entry + "\".\n" +
-              "  This could be because:\n" +
-              "   - you forgot to separate entries by commas\n" +
-              "   - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
-              "   - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
-        }
-      val result =
-        if  (hideDiagram0)
-          NoDiagramAtAll
-        else if ((hideNodesFilter0.isEmpty) &&
-                 (hideEdgesFilter0.isEmpty) &&
-                 (hideIncomingImplicits0 == false) &&
-                 (hideOutgoingImplicits0 == false) &&
-                 (hideSuperclasses0 == false) &&
-                 (hideSubclasses0 == false) &&
-                 (hideInheritedNodes0 == false) &&
-                 (hideDiagram0 == false))
-          FullDiagram
-        else
-          AnnotationDiagramFilter(
-            hideDiagram = hideDiagram0,
-            hideIncomingImplicits = hideIncomingImplicits0,
-            hideOutgoingImplicits = hideOutgoingImplicits0,
-            hideSuperclasses = hideSuperclasses0,
-            hideSubclasses = hideSubclasses0,
-            hideInheritedNodes = hideInheritedNodes0,
-            hideNodesFilter = hideNodesFilter0,
-            hideEdgesFilter = hideEdgesFilter0)
-
-      if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
-        settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
-      tFilter += System.currentTimeMillis
-
-      result
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
deleted file mode 100644
index cb54a73..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ /dev/null
@@ -1,271 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import scala.collection.mutable
-
-// statistics
-import  html.page.diagram.DiagramStats
-
-import scala.collection.immutable.SortedMap
-
-/**
- *  This trait takes care of generating the diagram for classes and packages
- *
- *  @author Damien Obrist
- *  @author Vlad Ureche
- */
-trait DiagramFactory extends DiagramDirectiveParser {
-  this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory =>
-
-  import this.global.definitions._
-  import this.global._
-
-  // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes
-  def aggregationNode(text: String) =
-    NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)()
-
-  /** Create the inheritance diagram for this template */
-  def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
-
-    tFilter = 0
-    tModel = -System.currentTimeMillis
-
-    // the diagram filter
-    val diagramFilter = makeInheritanceDiagramFilter(tpl)
-
-    def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) =
-      Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method "
-        + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName)
-
-    val result =
-      if (diagramFilter == NoDiagramAtAll)
-        None
-      else {
-        // the main node
-        val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
-
-        // superclasses
-        var superclasses: List[Node] =
-          tpl.parentTypes.collect {
-            case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
-          }.reverse
-
-        // incoming implcit conversions
-        lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
-          case (incomingTpl, conv) =>
-            ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv))
-        }
-
-        // subclasses
-        var subclasses: List[Node] =
-          tpl.directSubClasses.collect {
-            case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))()
-          }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
-
-        // outgoing implicit coversions
-        lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
-          case (outgoingTpl, outgoingType, conv) =>
-            ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv))
-        }
-
-        // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
-        // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges
-        // The implementation would need to add the annotations and the logic to select nodes (or create new ones)
-        // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days
-        // at most) and it would be a great add to the diagrams.
-        if (tpl.sym == AnyRefClass)
-          subclasses = List(aggregationNode("All user-defined classes and traits"))
-
-        val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
-        val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
-        val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
-        val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes
-
-        // final diagram filter
-        filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
-      }
-
-    tModel += System.currentTimeMillis
-    DiagramStats.addFilterTime(tFilter)
-    DiagramStats.addModelTime(tModel-tFilter)
-
-    result
-  }
-
-  /** Create the content diagram for this template */
-  def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
-
-    tFilter = 0
-    tModel = -System.currentTimeMillis
-
-    // the diagram filter
-    val diagramFilter = makeContentDiagramFilter(pack)
-
-    val result =
-      if (diagramFilter == NoDiagramAtAll)
-        None
-      else {
-        var mapNodes = Map[TemplateEntity, Node]()
-        var nodesShown = Set[TemplateEntity]()
-        var edgesAll = List[(TemplateEntity, List[TemplateEntity])]()
-
-        // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
-        // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
-        val nodesAll = pack.members collect {
-          case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
-        }
-
-        def listSuperClasses(member: MemberTemplateImpl) = {
-          // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
-          (pack.sym, member.sym) match {
-            case (ScalaPackage, NullClass) =>
-              List(makeTemplate(AnyRefClass))
-            case (ScalaPackage, NothingClass) =>
-              (List(NullClass) ::: ScalaValueClasses) map { makeTemplate(_) }
-            case _ =>
-              member.parentTypes map {
-                case (template, tpe) => template
-              } filter {
-                nodesAll.contains(_)
-              }
-          }
-        }
-
-        // for each node, add its subclasses
-        for (node <- nodesAll if !classExcluded(node)) {
-          node match {
-            case dnode: MemberTemplateImpl =>
-              val superClasses = listSuperClasses(dnode)
-
-              if (!superClasses.isEmpty) {
-                nodesShown += dnode
-                nodesShown ++= superClasses
-              }
-              edgesAll ::= dnode -> superClasses
-            case _ =>
-          }
-
-          mapNodes += node -> (
-            if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType))
-              NormalNode(node.resultType, Some(node))()
-            else
-              OutsideNode(node.resultType, Some(node))()
-          )
-        }
-
-        if (nodesShown.isEmpty)
-          None
-        else {
-          val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
-          val edges = edgesAll.map {
-            case (entity, superClasses) => {
-              (mapNodes(entity), superClasses flatMap { mapNodes.get(_) })
-            }
-          } filterNot {
-            case (node, superClassNodes) => superClassNodes.isEmpty
-          }
-
-          val diagram =
-            // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
-            if (pack.sym == ScalaPackage) {
-              // Tried it, but it doesn't look good:
-              // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass)))
-              // var dirty = true
-              // do {
-              //   val length = anyRefSubtypes.length
-              //   anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 }
-              //   anyRefSubtypes = anyRefSubtypes.distinct
-              //   dirty = (anyRefSubtypes.length != length)
-              // } while (dirty)
-              // println(anyRefSubtypes)
-              val anyRefSubtypes = Nil
-              val allAnyRefTypes = aggregationNode("All AnyRef subtypes")
-              val nullTemplate = makeTemplate(NullClass)
-              if (nullTemplate.isDocTemplate)
-                ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate)))
-              else
-                ContentDiagram(nodes, edges)
-            } else
-              ContentDiagram(nodes, edges)
-
-          filterDiagram(diagram, diagramFilter)
-        }
-      }
-
-    tModel += System.currentTimeMillis
-    DiagramStats.addFilterTime(tFilter)
-    DiagramStats.addModelTime(tModel-tFilter)
-
-    result
-  }
-
-  /** Diagram filtering logic */
-  private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
-    tFilter -= System.currentTimeMillis
-
-    val result =
-      if (diagramFilter == FullDiagram)
-        Some(diagram)
-      else if (diagramFilter == NoDiagramAtAll)
-        None
-      else {
-        // Final diagram, with the filtered nodes and edges
-        diagram match {
-          case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) =>
-            None
-
-          case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
-
-            def hideIncoming(node: Node): Boolean =
-              diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode)
-
-            def hideOutgoing(node: Node): Boolean =
-              diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node)
-
-            // println(thisNode)
-            // println(superClasses.map(cl => "super: " + cl + "  " + hideOutgoing(cl)).mkString("\n"))
-            // println(subClasses.map(cl => "sub: " + cl + "  " + hideIncoming(cl)).mkString("\n"))
-            Some(InheritanceDiagram(thisNode,
-                             superClasses.filterNot(hideOutgoing(_)),
-                             subClasses.filterNot(hideIncoming(_)),
-                             incomingImplicits.filterNot(hideIncoming(_)),
-                             outgoingImplicits.filterNot(hideOutgoing(_))))
-
-          case ContentDiagram(nodes0, edges0) =>
-            // Filter out all edges that:
-            // (1) are sources of hidden classes
-            // (2) are manually hidden by the user
-            // (3) are destinations of hidden classes
-            val edges: List[(Node, List[Node])] =
-              diagram.edges.flatMap({
-                case (source, dests) if !diagramFilter.hideNode(source) =>
-                  val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest })
-                  if (dests2 != Nil)
-                    List((source, dests2))
-                  else
-                    Nil
-                case _ => Nil
-              })
-
-            // Only show the the non-isolated nodes
-            // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
-            // TODO: Does .distinct cause any stability issues?
-            val sourceNodes = edges.map(_._1)
-            val sinkNodes = edges.map(_._2).flatten
-            val nodes = (sourceNodes ::: sinkNodes).distinct
-            Some(ContentDiagram(nodes, edges))
-        }
-      }
-
-    tFilter += System.currentTimeMillis
-
-    // eliminate all empty diagrams
-    if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
-      None
-    else
-      result
-  }
-
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
deleted file mode 100644
index 3e7ac57..0000000
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.FakePos
-
-import dependencies._
-import io.AbstractFile
-import scala.language.implicitConversions
-
-trait BuildManager {
-
-  /** Add the given source files to the managed build process. */
-  def addSourceFiles(files: Set[AbstractFile])
-
-  /** Remove the given files from the managed build process. */
-  def removeFiles(files: Set[AbstractFile])
-
-  /** The given files have been modified by the user. Recompile
-   *  them and their dependent files.
-   */
-  def update(added: Set[AbstractFile], removed: Set[AbstractFile])
-
-  /** Notification that the supplied set of files is being built */
-  def buildingFiles(included: Set[AbstractFile]) {}
-
-  /** Load saved dependency information. */
-  def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
-
-  /** Save dependency information to `file`. */
-  def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
-
-  def compiler: scala.tools.nsc.Global
-
-  /** Delete classfiles derived from the supplied set of sources */
-  def deleteClassfiles(sources : Set[AbstractFile]) {
-    val targets = compiler.dependencyAnalysis.dependencies.targets
-    for(source <- sources; cf <- targets(source))
-      cf.delete
-  }
-}
-
-
-/** Simple driver for testing the build manager. It presents
- *  the user to a 'resident compiler' prompt. Each line is
- *  interpreted as a set of files that have changed. The builder
- *  then derives the dependent files and recompiles them.
- */
-object BuildManagerTest extends EvalLoop {
-
-  def prompt = "builder > "
-
-  private def buildError(msg: String) {
-    println(msg + "\n  scalac -help  gives more information")
-  }
-
-  def main(args: Array[String]) {
-    implicit def filesToSet(fs: List[String]): Set[AbstractFile] = {
-      def partition(s: String, r: Tuple2[List[AbstractFile], List[String]])= {
-	    val v = AbstractFile.getFile(s)
-        if (v == null) (r._1, s::r._2) else (v::r._1, r._2)
-      }
-      val result =  fs.foldRight((List[AbstractFile](), List[String]()))(partition)
-      if (!result._2.isEmpty)
-        Console.err.println("No such file(s): " + result._2.mkString(","))
-      Set.empty ++ result._1
-    }
-
-    val settings = new Settings(buildError)
-    settings.Ybuildmanagerdebug.value = true
-    val command = new CompilerCommand(args.toList, settings)
-//    settings.make.value = "off"
-//    val buildManager: BuildManager = new SimpleBuildManager(settings)
-    val buildManager: BuildManager = new RefinedBuildManager(settings)
-
-    buildManager.addSourceFiles(command.files)
-
-    // enter resident mode
-    loop { line =>
-      val args = line.split(' ').toList
-      val command = new CompilerCommand(args, settings)
-      buildManager.update(command.files, Set.empty)
-    }
-
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
deleted file mode 100644
index 8d12581..0000000
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ /dev/null
@@ -1,481 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.util.FailedInterrupt
-import scala.tools.nsc.util.EmptyAction
-import scala.tools.nsc.util.WorkScheduler
-import scala.reflect.internal.util.{SourceFile, Position}
-import scala.tools.nsc.util.InterruptReq
-
-/** Interface of interactive compiler to a client such as an IDE
- *  The model the presentation compiler consists of the following parts:
- *
- *  unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
- *
- *  manipulated by: removeUnitOf, reloadSources.
- *
- *  A call to reloadSources will add the given sources to the loaded units, and
- *  start a new background compiler pass to compile all loaded units (with the indicated sources first).
- *  Each background compiler pass has its own typer run.
- *  The background compiler thread can be interrupted each time an AST node is
- *  completely typechecked in the following ways:
-
- *  1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
- *  2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
- *  3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
- *  4. by raising an exception in the scheduler.
- *  5. by passing a high-priority action wrapped in ask { ... }.
- *
- *  Actions under 1-3 can themselves be interrupted if they involve typechecking
- *  AST nodes. High-priority actions under 5 cannot; they always run to completion.
- *  So these high-priority actions should to be short.
- *
- *  Normally, an interrupted action continues after the interrupting action is finished.
- *  However, if the interrupting action created a new typer run, the interrupted
- *  action is aborted. If there's an outstanding response, it will be set to
- *  a Right value with a FreshRunReq exception.
- */
-trait CompilerControl { self: Global =>
-
-  import syntaxAnalyzer.UnitParser
-
-  type Response[T] = scala.tools.nsc.interactive.Response[T]
-
-  /** The scheduler by which client and compiler communicate
-   *  Must be initialized before starting compilerRunner
-   */
-  @volatile protected[interactive] var scheduler = new WorkScheduler
-
-  /** Return the compilation unit attached to a source file, or None
-   *  if source is not loaded.
-   */
-  def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
-
-  /** Run operation `op` on a compilation unit associated with given `source`.
-   *  If source has a loaded compilation unit, this one is passed to `op`.
-   *  Otherwise a new compilation unit is created, but not added to the set of loaded units.
-   */
-  def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
-    op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
-
-  /** The compilation unit corresponding to a source file
-   *  if it does not yet exist create a new one atomically
-   *  Note: We want to get roid of this operation as it messes compiler invariants.
-   */
-  @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.10.0")
-  def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s)
-
-  /** The compilation unit corresponding to a position */
-  @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.10.0")
-  def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source)
-
-  /** Removes the CompilationUnit corresponding to the given SourceFile
-   *  from consideration for recompilation.
-   */
-  def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
-
-  /** Returns the top level classes and objects that were deleted
-   * in the editor since last time recentlyDeleted() was called.
-   */
-  def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
-    val result = deletedTopLevelSyms
-    deletedTopLevelSyms.clear()
-    result.toList
-  }
-
-  /** Locate smallest tree that encloses position
-   *  @pre Position must be loaded
-   */
-  def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
-
-  /** Locates smallest context that encloses position as an optional value.
-   */
-  def locateContext(pos: Position): Option[Context] =
-    for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
-
-  /** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
-   */
-  def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse {
-    throw new FatalError("no context found for "+pos)
-  }
-
-  private def postWorkItem(item: WorkItem) =
-    if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
-
-  /** Makes sure a set of compilation units is loaded and parsed.
-   *  Returns () to syncvar `response` on completion.
-   *  Afterwards a new background compiler run is started with
-   *  the given sources at the head of the list of to-be-compiled sources.
-   */
-  def askReload(sources: List[SourceFile], response: Response[Unit]) = {
-    val superseeded = scheduler.dequeueAll {
-      case ri: ReloadItem if ri.sources == sources => Some(ri)
-      case _ => None
-    }
-    superseeded.foreach(_.response.set())
-    postWorkItem(new ReloadItem(sources, response))
-  }
-
-  /** Removes source files and toplevel symbols, and issues a new typer run.
-   *  Returns () to syncvar `response` on completion.
-   */
-  def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = {
-    postWorkItem(new FilesDeletedItem(sources, response))
-  }
-
-  /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
-   *  Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
-   */
-  def askTypeAt(pos: Position, response: Response[Tree]) =
-    postWorkItem(new AskTypeAtItem(pos, response))
-
-  /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
-   *  @pre `source` needs to be loaded.
-   *
-   *  @note Deprecated because of race conditions in the typechecker when the background compiler
-   *        is interrupted while typing the same `source`.
-   *  @see  SI-6578
-   */
-  @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
-  def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
-    postWorkItem(new AskTypeItem(source, forceReload, response))
-
-  /** Sets sync var `response` to the position of the definition of the given link in
-   *  the given sourcefile.
-   *
-   *  @param   sym      The symbol referenced by the link (might come from a classfile)
-   *  @param   source   The source file that's supposed to contain the definition
-   *  @param   response A response that will be set to the following:
-   *                    If `source` contains a definition that is referenced by the given link
-   *                    the position of that definition, otherwise NoPosition.
-   *  Note: This operation does not automatically load `source`. If `source`
-   *  is unloaded, it stays that way.
-   */
-  def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
-    postWorkItem(new AskLinkPosItem(sym, source, response))
-
-  /** Sets sync var `response` to doc comment information for a given symbol.
-   *
-   *  @param   sym        The symbol whose doc comment should be retrieved (might come from a classfile)
-   *  @param   source     The source file that's supposed to contain the definition
-   *  @param   site       The symbol where 'sym' is observed
-   *  @param   fragments  All symbols that can contribute to the generated documentation
-   *                      together with their source files.
-   *  @param   response   A response that will be set to the following:
-   *                      If `source` contains a definition of a given symbol that has a doc comment,
-   *                      the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
-   *  Note: This operation does not automatically load sources that are not yet loaded.
-   */
-  def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
-    postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
-
-  @deprecated("Use method that accepts fragments", "2.10.2")
-  def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
-    askDocComment(sym, source, site, (sym,source)::Nil, response)
-
-  /** Sets sync var `response` to list of members that are visible
-   *  as members of the tree enclosing `pos`, possibly reachable by an implicit.
-   *  @pre  source is loaded
-   */
-  def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
-    postWorkItem(new AskTypeCompletionItem(pos, response))
-
-  /** Sets sync var `response` to list of members that are visible
-   *  as members of the scope enclosing `pos`.
-   *  @pre  source is loaded
-   */
-  def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
-    postWorkItem(new AskScopeCompletionItem(pos, response))
-
-  /** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
-   *  If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
-   */
-  def askToDoFirst(source: SourceFile) =
-    postWorkItem(new AskToDoFirstItem(source))
-
-  /** If source is not yet loaded, loads it, and starts a new run, otherwise
-   * continues with current pass.
-   * Waits until source is fully type checked and returns body in response.
-   * @param source     The source file that needs to be fully typed.
-   * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If 
-                       the file is already loaded, this flag is ignored.
-   * @param response   The response, which is set to the fully attributed tree of `source`.
-   *                   If the unit corresponding to `source` has been removed in the meantime
-   *                   the a NoSuchUnitError is raised in the response.
-   */
-  def askLoadedTyped(source:SourceFile, keepLoaded: Boolean, response: Response[Tree]): Unit =
-    postWorkItem(new AskLoadedTypedItem(source, keepLoaded, response))
-
-  final def askLoadedTyped(source: SourceFile, response: Response[Tree]): Unit =
-    askLoadedTyped(source, false, response)
-
-  /** If source if not yet loaded, get an outline view with askParseEntered.
-   *  If source is loaded, wait for it to be typechecked.
-   *  In both cases, set response to parsed (and possibly typechecked) tree.
-   *  @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
-   */
-  def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
-    getUnit(source) match {
-      case Some(_) => askLoadedTyped(source, keepSrcLoaded, response)
-      case None => askParsedEntered(source, keepSrcLoaded, response)
-    }
-  }
-
-  /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
-   *  @param source       The source file to be analyzed
-   *  @param keepLoaded   If set to `true`, source file will be kept as a loaded unit afterwards.
-   *                      If keepLoaded is `false` the operation is run at low priority, only after
-   *                      everything is brought up to date in a regular type checker run.
-   *  @param response     The response.
-   */
-  def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
-    postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
-
-  /** Set sync var `response` to a pair consisting of
-   *                  - the fully qualified name of the first top-level object definition in the file.
-   *                    or "" if there are no object definitions.
-   *                  - the text of the instrumented program which, when run,
-   *                    prints its output and all defined values in a comment column.
-   *
-   *  @param source       The source file to be analyzed
-   *  @param keepLoaded   If set to `true`, source file will be kept as a loaded unit afterwards.
-   *                      If keepLoaded is `false` the operation is run at low priority, only after
-   *                      everything is brought up to date in a regular type checker run.
-   *  @param response     The response.
-   */
-  @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-  def askInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
-    postWorkItem(new AskInstrumentedItem(source, line, response))
-
-  /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
-   *  (but not re-loaded).
-   */
-  def askReset() = scheduler raise (new FreshRunReq)
-
-  /** Tells the compile server to shutdown, and not to restart again */
-  def askShutdown() = scheduler raise ShutdownReq
-
-  @deprecated("use parseTree(source) instead", "2.10.0") // deleted 2nd parameter, as this has to run on 2.8 also.
-  def askParse(source: SourceFile, response: Response[Tree]) = respond(response) {
-    parseTree(source)
-  }
-
-  /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
-   *
-   *  This method is thread-safe and as such can safely run outside of the presentation
-   *  compiler thread.
-   */
-  def parseTree(source: SourceFile): Tree = {
-    new UnitParser(new CompilationUnit(source)).parse()
-  }
-
-  /** Asks for a computation to be done quickly on the presentation compiler thread */
-  def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
-
-  /** Asks for a computation to be done on presentation compiler thread, returning
-   *  a response with the result or an exception
-   */
-  def askForResponse[A](op: () => A): Response[A] = {
-    val r = new Response[A]
-    if (self.onCompilerThread) {
-      try   { r set op() }
-      catch { case exc: Throwable => r raise exc }
-      r
-    } else {
-      val ir = scheduler askDoQuickly op
-      ir onComplete {
-        case Left(result) => r set result
-        case Right(exc)   => r raise exc
-      }
-      r
-    }
-  }
-
-  def onCompilerThread = Thread.currentThread == compileRunner
-
-  /** Info given for every member found by completion
-   */
-  abstract class Member {
-    val sym: Symbol
-    val tpe: Type
-    val accessible: Boolean
-    def implicitlyAdded = false
-  }
-
-  case class TypeMember(
-    sym: Symbol,
-    tpe: Type,
-    accessible: Boolean,
-    inherited: Boolean,
-    viaView: Symbol) extends Member {
-    override def implicitlyAdded = viaView != NoSymbol
-  }
-
-  case class ScopeMember(
-    sym: Symbol,
-    tpe: Type,
-    accessible: Boolean,
-    viaImport: Tree) extends Member
-
-  // items that get sent to scheduler
-
-  abstract class WorkItem extends (() => Unit) {
-    val onCompilerThread = self.onCompilerThread
-
-    /** Raise a MissingReponse, if the work item carries a response. */
-    def raiseMissing(): Unit
-  }
-
-  case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
-    def apply() = reload(sources, response)
-    override def toString = "reload "+sources
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
-    def apply() = filesDeleted(sources, response)
-    override def toString = "files deleted "+sources
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
-    def apply() = self.getTypedTreeAt(pos, response)
-    override def toString = "typeat "+pos.source+" "+pos.show
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
-    def apply() = self.getTypedTree(source, forceReload, response)
-    override def toString = "typecheck"
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
-    def apply() = self.getTypeCompletion(pos, response)
-    override def toString = "type completion "+pos.source+" "+pos.show
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
-    def apply() = self.getScopeCompletion(pos, response)
-    override def toString = "scope completion "+pos.source+" "+pos.show
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
-    def apply() = {
-      moveToFront(List(source))
-      enableIgnoredFile(source.file)
-    }
-    override def toString = "dofirst "+source
-
-    def raiseMissing() = ()
-  }
-
-  case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
-    def apply() = self.getLinkPos(sym, source, response)
-    override def toString = "linkpos "+sym+" in "+source
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
-    def apply() = self.getDocComment(sym, source, site, fragments, response)
-    override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskLoadedTypedItem(val source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
-    def apply() = self.waitLoadedTyped(source, response, keepLoaded, this.onCompilerThread)
-    override def toString = "wait loaded & typed "+source
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
-    def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
-    override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-  case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
-    def apply() = self.getInstrumented(source, line, response)
-    override def toString = "getInstrumented "+source
-
-    def raiseMissing() =
-      response raise new MissingResponse
-  }
-
-  /** A do-nothing work scheduler that responds immediately with MissingResponse.
-   *
-   *  Used during compiler shutdown.
-   */
-  class NoWorkScheduler extends WorkScheduler {
-
-    override def postWorkItem(action: Action) = synchronized {
-      action match {
-        case w: WorkItem => w.raiseMissing()
-        case e: EmptyAction => // do nothing
-        case _ => println("don't know what to do with this " + action.getClass)
-      }
-    }
-
-    override def doQuickly[A](op: () => A): A = {
-      throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
-    }
-
-    override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
-      val ir = new InterruptReq {
-        type R = A
-        val todo = () => throw new MissingResponse
-      }
-      ir.execute()
-      ir
-    }
-
-  }
-
-}
-
-  // ---------------- Interpreted exceptions -------------------
-
-/** Signals a request for a fresh background compiler run.
- *  Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
- */
-class FreshRunReq extends ControlThrowable
-
-/** Signals a request for a shutdown of the presentation compiler.
- *  Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
- */
-object ShutdownReq extends ControlThrowable
-
-class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
-
-class MissingResponse extends Exception("response missing")
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
deleted file mode 100644
index 4a61a98..0000000
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection.mutable.ArrayBuffer
-import scala.annotation.tailrec
-
-trait ContextTrees { self: Global =>
-
-  type Context = analyzer.Context
-  lazy val NoContext = analyzer.NoContext
-  type Contexts = ArrayBuffer[ContextTree]
-
-  /** A context tree contains contexts that are indexed by positions.
-   *  It satisfies the following properties:
-   *  1. All context come from compiling the same unit.
-   *  2. Child contexts have parent contexts in their outer chain.
-   *  3. The `pos` field of a context is the same as `context.tree.pos`, unless that
-   *     position is transparent. In that case, `pos` equals the position of
-   *     one of the solid descendants of `context.tree`.
-   *  4. Children of a context have non-overlapping increasing positions.
-   *  5. No context in the tree has a transparent position.
-   */
-  class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) {
-    def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree])
-    override def toString = "ContextTree("+pos+", "+children+")"
-  }
-
-  /** Returns the most precise context possible for the given `pos`.
-   *
-   *  It looks for the finest ContextTree containing `pos`, and then look inside
-   *  this ContextTree for a child ContextTree located immediately before `pos`.
-   *  If such a child exists, returns its context, otherwise returns the context of
-   *  the parent ContextTree.
-   *
-   *  This is required to always return a context which contains the all the imports
-   *  declared up to `pos` (see SI-7280 for a test case).
-   *
-   *  Can return None if `pos` is before any valid Scala code.
-   */
-  def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
-    @tailrec
-    def locateFinestContextTree(context: ContextTree): ContextTree = {
-      if (context.pos includes pos) {
-        locateContextTree(context.children, pos) match {
-          case Some(x) =>
-            locateFinestContextTree(x)
-          case None =>
-            context
-        }
-      } else {
-        context
-      }
-    }
-    locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
-  }
-
-  /** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
-   *  or None if `pos` is located before all ContextTrees.
-   */
-  def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
-    if (contexts.isEmpty) None
-    else {
-      @tailrec
-      def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
-        if (pos properlyPrecedes contexts(lo).pos)
-          previousSibling
-        else if (contexts(hi).pos properlyPrecedes pos)
-          Some(contexts(hi))
-        else {
-          val mid = (lo + hi) / 2
-          val midpos = contexts(mid).pos
-          if (midpos includes pos)
-            Some(contexts(mid))
-          else if (midpos properlyPrecedes pos)
-            loop(mid + 1, hi, Some(contexts(mid)))
-          else
-            loop(lo, mid, previousSibling)
-        }
-      }
-      loop(0, contexts.length - 1, None)
-    }
-  }
-
-  /** Insert a context at correct position into a buffer of context trees.
-   *  If the `context` has a transparent position, add it multiple times
-   *  at the positions of all its solid descendant trees.
-   */
-  def addContext(contexts: Contexts, context: Context): Unit = {
-    val cpos = context.tree.pos
-    if (cpos.isTransparent)
-      for (t <- context.tree.children flatMap solidDescendants)
-        addContext(contexts, context, t.pos)
-    else
-      addContext(contexts, context, cpos)
-  }
-
-  /** Insert a context with non-transparent position `cpos`
-   *  at correct position into a buffer of context trees.
-   */
-  def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized {
-    try {
-      if (!cpos.isRange) {}
-      else if (contexts.isEmpty) contexts += new ContextTree(cpos, context)
-      else {
-        val hi = contexts.length - 1
-        if (contexts(hi).pos precedes cpos)
-          contexts += new ContextTree(cpos, context)
-        else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search
-          addContext(contexts(hi).children, context, cpos)
-        else if (cpos precedes contexts(0).pos)
-          new ContextTree(cpos, context) +=: contexts
-        else {
-          def insertAt(idx: Int): Boolean = {
-            val oldpos = contexts(idx).pos
-            if (oldpos sameRange cpos) {
-              contexts(idx) = new ContextTree(cpos, context, contexts(idx).children)
-              true
-            } else if (oldpos includes cpos) {
-              addContext(contexts(idx).children, context, cpos)
-              true
-            } else if (cpos includes oldpos) {
-              val start = contexts.indexWhere(cpos includes _.pos)
-              val last = contexts.lastIndexWhere(cpos includes _.pos)
-              contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1))
-              contexts.remove(start + 1, last - start)
-              true
-            } else false
-          }
-          def loop(lo: Int, hi: Int) {
-            if (hi - lo > 1) {
-              val mid = (lo + hi) / 2
-              val midpos = contexts(mid).pos
-              if (cpos precedes midpos)
-                loop(lo, mid)
-              else if (midpos precedes cpos)
-                loop(mid, hi)
-              else
-                addContext(contexts(mid).children, context, cpos)
-            } else if (!insertAt(lo) && !insertAt(hi)) {
-              val lopos = contexts(lo).pos
-              val hipos = contexts(hi).pos
-              if ((lopos precedes cpos) && (cpos precedes hipos))
-                contexts.insert(hi, new ContextTree(cpos, context))
-              else
-                inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos)
-            }
-          }
-          loop(0, hi)
-        }
-      }
-    } catch {
-      case ex: Throwable =>
-        println(ex)
-        ex.printStackTrace()
-        println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+
-                (contexts(contexts.length - 1).pos includes cpos))
-        throw ex
-    }
-  }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
deleted file mode 100644
index d6fa42b..0000000
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ /dev/null
@@ -1,1214 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
-import scala.collection.mutable
-import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
-import scala.concurrent.SyncVar
-import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
-import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap }
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition }
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.io.Pickler._
-import scala.tools.nsc.typechecker.DivergentImplicit
-import scala.annotation.tailrec
-import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import scala.annotation.elidable
-import scala.language.implicitConversions
-
-/** The main class of the presentation compiler in an interactive environment such as an IDE
- */
-class Global(settings: Settings, _reporter: Reporter, projectName: String = "")  extends {
-  /* Is the compiler initializing? Early def, so that the field is true during the
-   *  execution of the super constructor.
-   */
-  private var initializing = true
-} with scala.tools.nsc.Global(settings, _reporter)
-  with CompilerControl
-  with RangePositions
-  with ContextTrees
-  with RichCompilationUnits
-  with ScratchPadMaker
-  with Picklers {
-
-  import definitions._
-
-  val debugIDE: Boolean = settings.YpresentationDebug.value
-  val verboseIDE: Boolean = settings.YpresentationVerbose.value
-
-  private def replayName = settings.YpresentationReplay.value
-  private def logName = settings.YpresentationLog.value
-  private def afterTypeDelay = settings.YpresentationDelay.value
-  private final val SleepTime = 10
-
-  val log =
-    if (replayName != "") new Replayer(new FileReader(replayName))
-    else if (logName != "") new Logger(new FileWriter(logName))
-    else NullLogger
-
-  import log.logreplay
-  debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
-  debugLog("classpath: "+classPath)
-
-  private var curTime = System.nanoTime
-  private def timeStep = {
-    val last = curTime
-    curTime = System.nanoTime
-    ", delay = " + (curTime - last) / 1000000 + "ms"
-  }
-
-  /** Print msg only when debugIDE is true. */
-  @inline final def debugLog(msg: => String) =
-    if (debugIDE) println("[%s] %s".format(projectName, msg))
-
-  /** Inform with msg only when verboseIDE is true. */
-  @inline final def informIDE(msg: => String) =
-    if (verboseIDE) println("[%s][%s]".format(projectName, msg))
-
-  override def forInteractive = true
-
-  /** A map of all loaded files to the rich compilation units that correspond to them.
-   */
-  val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
-                       SynchronizedMap[AbstractFile, RichCompilationUnit] {
-    override def put(key: AbstractFile, value: RichCompilationUnit) = {
-      val r = super.put(key, value)
-      if (r.isEmpty) debugLog("added unit for "+key)
-      r
-    }
-    override def remove(key: AbstractFile) = {
-      val r = super.remove(key)
-      if (r.nonEmpty) debugLog("removed unit for "+key)
-      r
-    }
-  }
-
-  /** A set containing all those files that need to be removed
-   *  Units are removed by getUnit, typically once a unit is finished compiled.
-   */
-  protected val toBeRemoved: mutable.Set[AbstractFile] =
-    new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
-
-  /** A set containing all those files that need to be removed after a full background compiler run
-   */
-  protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] =
-    new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
-
-  class ResponseMap extends MultiHashMap[SourceFile, Response[Tree]] {
-    override def += (binding: (SourceFile, Set[Response[Tree]])) = {
-      assert(interruptsEnabled, "delayed operation within an ask")
-      super.+=(binding)
-    }
-  }
-
-  /** A map that associates with each abstract file the set of responses that are waiting
-   *  (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
-   */
-  protected val waitLoadedTypeResponses = new ResponseMap
-
-  /** A map that associates with each abstract file the set of responses that ware waiting
-   *  (via build) for the unit associated with the abstract file to be parsed and entered
-   */
-  protected var getParsedEnteredResponses = new ResponseMap
-
-  private def cleanResponses(rmap: ResponseMap): Unit = {
-    for ((source, rs) <- rmap.toList) {
-      for (r <- rs) {
-        if (getUnit(source).isEmpty)
-          r raise new NoSuchUnitError(source.file)
-        if (r.isComplete)
-          rmap(source) -= r
-      }
-      if (rmap(source).isEmpty)
-        rmap -= source
-    }
-  }
-
-  private def cleanAllResponses() {
-    cleanResponses(waitLoadedTypeResponses)
-    cleanResponses(getParsedEnteredResponses)
-  }
-
-  private def checkNoOutstanding(rmap: ResponseMap): Unit =
-    for ((_, rs) <- rmap.toList; r <- rs) {
-      debugLog("ERROR: missing response, request will be discarded")
-      r raise new MissingResponse
-    }
-
-  def checkNoResponsesOutstanding() {
-    checkNoOutstanding(waitLoadedTypeResponses)
-    checkNoOutstanding(getParsedEnteredResponses)
-  }
-
-  /** The compilation unit corresponding to a source file
-   *  if it does not yet exist create a new one atomically
-   *  Note: We want to remove this.
-   */
-  protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
-    unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
-
-  /** Work through toBeRemoved list to remove any units.
-   *  Then return optionally unit associated with given source.
-   */
-  protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
-    toBeRemoved.synchronized {
-      for (f <- toBeRemoved) {
-        informIDE("removed: "+s)
-        unitOfFile -= f
-        allSources = allSources filter (_.file != f)
-      }
-      toBeRemoved.clear()
-    }
-    unitOfFile get s.file
-  }
-
-  /** A list giving all files to be typechecked in the order they should be checked.
-   */
-  protected var allSources: List[SourceFile] = List()
-
-  private var lastException: Option[Throwable] = None
-
-  /** A list of files that crashed the compiler. They will be ignored during background
-   *  compilation until they are removed from this list.
-   */
-  private var ignoredFiles: Set[AbstractFile] = Set()
-
-  /** Flush the buffer of sources that are ignored during background compilation. */
-  def clearIgnoredFiles() {
-    ignoredFiles = Set()
-  }
-
-  /** Remove a crashed file from the ignore buffer. Background compilation will take it into account
-   *  and errors will be reported against it. */
-  def enableIgnoredFile(file: AbstractFile) {
-    ignoredFiles -= file
-    debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles))
-  }
-
-  /** The currently active typer run */
-  private var currentTyperRun: TyperRun = _
-  newTyperRun()
-
-  /** Is a background compiler run needed?
-   *  Note: outOfDate is true as long as there is a background compile scheduled or going on.
-   */
-  private var outOfDate = false
-
-  def isOutOfDate: Boolean = outOfDate
-
-  def demandNewCompilerRun() = {
-    if (outOfDate) throw new FreshRunReq // cancel background compile
-    else outOfDate = true            // proceed normally and enable new background compile
-  }
-
-  protected[interactive] var minRunId = 1
-
-  private[interactive] var interruptsEnabled = true
-
-  private val NoResponse: Response[_] = new Response[Any]
-
-  /** The response that is currently pending, i.e. the compiler
-   *  is working on providing an asnwer for it.
-   */
-  private var pendingResponse: Response[_] = NoResponse
-
-  // ----------- Overriding hooks in nsc.Global -----------------------
-
-  /** Called from parser, which signals hereby that a method definition has been parsed.
-   */
-  override def signalParseProgress(pos: Position) {
-    // We only want to be interruptible when running on the PC thread.
-    if(onCompilerThread) {
-      checkForMoreWork(pos)
-    }
-  }
-
-  /** Called from typechecker, which signals hereby that a node has been completely typechecked.
-   *  If the node includes unit.targetPos, abandons run and returns newly attributed tree.
-   *  Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
-   *  @param  context  The context that typechecked the node
-   *  @param  old      The original node
-   *  @param  result   The transformed node
-   */
-  override def signalDone(context: Context, old: Tree, result: Tree) {
-    val canObserveTree = (
-         interruptsEnabled
-      && analyzer.lockedCount == 0
-      && !context.bufferErrors // SI-7558 look away during exploratory typing in "silent mode"
-    )
-    if (canObserveTree) {
-      if (context.unit.exists &&
-          result.pos.isOpaqueRange &&
-          (result.pos includes context.unit.targetPos)) {
-        var located = new TypedLocator(context.unit.targetPos) locateIn result
-        if (located == EmptyTree) {
-          println("something's wrong: no "+context.unit+" in "+result+result.pos)
-          located = result
-        }
-        throw new TyperResult(located)
-      }
-      else {
-        try {
-          checkForMoreWork(old.pos)
-        } catch {
-          case ex: ValidateException => // Ignore, this will have been reported elsewhere
-            debugLog("validate exception caught: "+ex)
-          case ex: Throwable =>
-            log.flush()
-            throw ex
-        }
-      }
-    }
-  }
-
-  /** Called from typechecker every time a context is created.
-   *  Registers the context in a context tree
-   */
-  override def registerContext(c: Context) = c.unit match {
-    case u: RichCompilationUnit => addContext(u.contexts, c)
-    case _ =>
-  }
-
-  /** The top level classes and objects currently seen in the presentation compiler
-   */
-  private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol]
-
-  /** The top level classes and objects no longer seen in the presentation compiler
-   */
-  val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol]
-
-  /** Called from typechecker every time a top-level class or object is entered.
-   */
-  override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
-
-  /** Symbol loaders in the IDE parse all source files loaded from a package for
-   *  top-level idents. Therefore, we can detect top-level symbols that have a name
-   *  different from their source file
-   */
-  override lazy val loaders = new BrowsingLoaders {
-    val global: Global.this.type = Global.this
-  }
-
-  // ----------------- Polling ---------------------------------------
-
-  case class WorkEvent(atNode: Int, atMillis: Long)
-
-  private var moreWorkAtNode: Int = -1
-  private var nodesSeen = 0
-  private var lastWasReload = false
-
-  /** The number of pollForWorks after which the presentation compiler yields.
-   *  Yielding improves responsiveness on systems with few cores because it
-   *  gives the UI thread a chance to get new tasks and interrupt the presentation
-   *  compiler with them.
-   */
-  private final val yieldPeriod = 10
-
-  /** Called from runner thread and signalDone:
-   *  Poll for interrupts and execute them immediately.
-   *  Then, poll for exceptions and execute them.
-   *  Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
-   *  @param pos   The position of the tree if polling while typechecking, NoPosition otherwise
-   *
-   */
-  private[interactive] def pollForWork(pos: Position) {
-    if (!interruptsEnabled) return
-    if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
-      Thread.`yield`()
-
-    def nodeWithWork(): Option[WorkEvent] =
-      if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
-      else None
-
-    nodesSeen += 1
-    logreplay("atnode", nodeWithWork()) match {
-      case Some(WorkEvent(id, _)) =>
-        debugLog("some work at node "+id+" current = "+nodesSeen)
-//        assert(id >= nodesSeen)
-        moreWorkAtNode = id
-      case None =>
-    }
-
-    if (nodesSeen >= moreWorkAtNode) {
-
-      logreplay("asked", scheduler.pollInterrupt()) match {
-        case Some(ir) =>
-          try {
-            interruptsEnabled = false
-            debugLog("ask started"+timeStep)
-            ir.execute()
-          } finally {
-            debugLog("ask finished"+timeStep)
-            interruptsEnabled = true
-          }
-          pollForWork(pos)
-        case _ =>
-      }
-
-      if (logreplay("cancelled", pendingResponse.isCancelled)) {
-        throw CancelException
-      }
-
-      logreplay("exception thrown", scheduler.pollThrowable()) match {
-        case Some(ex: FreshRunReq) =>
-          newTyperRun()
-          minRunId = currentRunId
-          demandNewCompilerRun()
-
-        case Some(ShutdownReq) =>
-          scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up
-            val units = scheduler.dequeueAll {
-              case item: WorkItem => Some(item.raiseMissing())
-              case _ => Some(())
-            }
-
-            // don't forget to service interrupt requests
-            val iqs = scheduler.dequeueAllInterrupts(_.execute())
-
-            debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
-            debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
-                .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
-            checkNoResponsesOutstanding()
-
-            log.flush();
-            scheduler = new NoWorkScheduler
-            throw ShutdownReq
-          }
-
-        case Some(ex: Throwable) => log.flush(); throw ex
-        case _ =>
-      }
-
-      lastWasReload = false
-
-      logreplay("workitem", scheduler.nextWorkItem()) match {
-        case Some(action) =>
-          try {
-            debugLog("picked up work item at "+pos+": "+action+timeStep)
-            action()
-            debugLog("done with work item: "+action)
-          } finally {
-            debugLog("quitting work item: "+action+timeStep)
-          }
-        case None =>
-      }
-    }
-  }
-
-  protected def checkForMoreWork(pos: Position) {
-    val typerRun = currentTyperRun
-    pollForWork(pos)
-    if (typerRun != currentTyperRun) demandNewCompilerRun()
-  }
-
-  def debugInfo(source : SourceFile, start : Int, length : Int): String = {
-    println("DEBUG INFO "+source+"/"+start+"/"+length)
-    val end = start+length
-    val pos = rangePos(source, start, start, end)
-
-    val tree = locateTree(pos)
-    val sw = new StringWriter
-    val pw = new PrintWriter(sw)
-    newTreePrinter(pw).print(tree)
-    pw.flush
-
-    val typed = new Response[Tree]
-    askTypeAt(pos, typed)
-    val typ = typed.get.left.toOption match {
-      case Some(tree) =>
-        val sw = new StringWriter
-        val pw = new PrintWriter(sw)
-        newTreePrinter(pw).print(tree)
-        pw.flush
-        sw.toString
-      case None => "<None>"
-    }
-
-    val completionResponse = new Response[List[Member]]
-    askTypeCompletion(pos, completionResponse)
-    val completion = completionResponse.get.left.toOption match {
-      case Some(members) =>
-        members mkString "\n"
-      case None => "<None>"
-    }
-
-    source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+
-    ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion
-  }
-
-  // ----------------- The Background Runner Thread -----------------------
-
-  private var threadId = 0
-
-  /** The current presentation compiler runner */
-  @volatile private[interactive] var compileRunner: Thread = newRunnerThread()
-
-  /** Check that the currenyly executing thread is the presentation compiler thread.
-   *
-   *  Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
-   */
-  @elidable(elidable.WARNING)
-  override def assertCorrectThread() {
-    assert(initializing || onCompilerThread,
-        "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
-        " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
-  }
-
-  /** Create a new presentation compiler runner.
-   */
-  private def newRunnerThread(): Thread = {
-    threadId += 1
-    compileRunner = new PresentationCompilerThread(this, projectName)
-    compileRunner.setDaemon(true)
-    compileRunner.start()
-    compileRunner
-  }
-
-  private def ensureUpToDate(unit: RichCompilationUnit) =
-    if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
-
-  /** Compile all loaded source files in the order given by `allSources`.
-   */
-  private[interactive] final def backgroundCompile() {
-    informIDE("Starting new presentation compiler type checking pass")
-    reporter.reset()
-
-    // remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
-    allSources = allSources filter (s => unitOfFile contains (s.file))
-
-    // ensure all loaded units are parsed
-    for (s <- allSources; unit <- getUnit(s)) {
-      // checkForMoreWork(NoPosition)  // disabled, as any work done here would be in an inconsistent state
-      ensureUpToDate(unit)
-      parseAndEnter(unit)
-      serviceParsedEntered()
-    }
-
-    // sleep window
-    if (afterTypeDelay > 0 && lastWasReload) {
-      val limit = System.currentTimeMillis() + afterTypeDelay
-      while (System.currentTimeMillis() < limit) {
-        Thread.sleep(SleepTime)
-        checkForMoreWork(NoPosition)
-      }
-    }
-
-    // ensure all loaded units are typechecked
-    for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
-      try {
-        if (!unit.isUpToDate)
-          if (unit.problems.isEmpty || !settings.YpresentationStrict.value)
-            typeCheck(unit)
-          else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
-        else debugLog("already up to date: "+unit)
-        for (r <- waitLoadedTypeResponses(unit.source))
-          r set unit.body
-        serviceParsedEntered()
-      } catch {
-        case ex: FreshRunReq => throw ex           // propagate a new run request
-        case ShutdownReq     => throw ShutdownReq  // propagate a shutdown request
-        case ex: ControlThrowable => throw ex
-        case ex: Throwable =>
-          println("[%s]: exception during background compile: ".format(unit.source) + ex)
-          ex.printStackTrace()
-          for (r <- waitLoadedTypeResponses(unit.source)) {
-            r.raise(ex)
-          }
-          serviceParsedEntered()
-
-          lastException = Some(ex)
-          ignoredFiles += unit.source.file
-          println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles))
-
-          reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString()))
-      }
-    }
-
-    // move units removable after this run to the "to-be-removed" buffer
-    toBeRemoved ++= toBeRemovedAfterRun
-
-    // clean out stale waiting responses
-    cleanAllResponses()
-
-    // wind down
-    if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) {
-      // need another cycle to treat those
-      newTyperRun()
-      backgroundCompile()
-    } else {
-      outOfDate = false
-      informIDE("Everything is now up to date")
-    }
-  }
-
-  /** Service all pending getParsedEntered requests
-   */
-  private def serviceParsedEntered() {
-    var atOldRun = true
-    for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
-      if (atOldRun) { newTyperRun(); atOldRun = false }
-      getParsedEnteredNow(source, r)
-    }
-    getParsedEnteredResponses.clear()
-  }
-
-  /** Reset unit to unloaded state */
-  private def reset(unit: RichCompilationUnit): Unit = {
-    unit.depends.clear()
-    unit.defined.clear()
-    unit.synthetics.clear()
-    unit.toCheck.clear()
-    unit.checkedFeatures = Set()
-    unit.targetPos = NoPosition
-    unit.contexts.clear()
-    unit.problems.clear()
-    unit.body = EmptyTree
-    unit.status = NotLoaded
-  }
-
-  /** Parse unit and create a name index, unless this has already been done before */
-  private def parseAndEnter(unit: RichCompilationUnit): Unit =
-    if (unit.status == NotLoaded) {
-      debugLog("parsing: "+unit)
-      currentTyperRun.compileLate(unit)
-      if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
-      if (!unit.isJava) syncTopLevelSyms(unit)
-      unit.status = JustParsed
-    }
-
-  /** Make sure unit is typechecked
-   */
-  private def typeCheck(unit: RichCompilationUnit) {
-    debugLog("type checking: "+unit)
-    parseAndEnter(unit)
-    unit.status = PartiallyChecked
-    currentTyperRun.typeCheck(unit)
-    unit.lastBody = unit.body
-    unit.status = currentRunId
-  }
-
-  /** Update deleted and current top-level symbols sets */
-  def syncTopLevelSyms(unit: RichCompilationUnit) {
-    val deleted = currentTopLevelSyms filter { sym =>
-      /** We sync after namer phase and it resets all the top-level symbols
-       *  that survive the new parsing
-       *  round to NoPeriod.
-       */
-      sym.sourceFile == unit.source.file &&
-      sym.validTo != NoPeriod &&
-      runId(sym.validTo) < currentRunId
-    }
-    for (d <- deleted) {
-      d.owner.info.decls unlink d
-      deletedTopLevelSyms += d
-      currentTopLevelSyms -= d
-    }
-  }
-
-  /** Move list of files to front of allSources */
-  def moveToFront(fs: List[SourceFile]) {
-    allSources = fs ::: (allSources diff fs)
-  }
-
-  // ----------------- Implementations of client commands -----------------------
-
-  def respond[T](result: Response[T])(op: => T): Unit =
-    respondGradually(result)(Stream(op))
-
-  def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = {
-    val prevResponse = pendingResponse
-    try {
-      pendingResponse = response
-      if (!response.isCancelled) {
-        var results = op
-        while (!response.isCancelled && results.nonEmpty) {
-          val result = results.head
-          results = results.tail
-          if (results.isEmpty) {
-            response set result
-            debugLog("responded"+timeStep)
-          } else response setProvisionally result
-        }
-      }
-    } catch {
-      case CancelException =>
-        debugLog("cancelled")
-      case ex: FreshRunReq =>
-        if (debugIDE) {
-          println("FreshRunReq thrown during response")
-          ex.printStackTrace()
-        }
-        response raise ex
-        throw ex
-
-      case ex @ ShutdownReq =>
-        if (debugIDE) {
-          println("ShutdownReq thrown during response")
-          ex.printStackTrace()
-        }
-        response raise ex
-        throw ex
-
-      case ex: Throwable =>
-        if (debugIDE) {
-          println("exception thrown during response: "+ex)
-          ex.printStackTrace()
-        }
-        response raise ex
-    } finally {
-      pendingResponse = prevResponse
-    }
-  }
-
-  private def reloadSource(source: SourceFile) {
-    val unit = new RichCompilationUnit(source)
-    unitOfFile(source.file) = unit
-    toBeRemoved -= source.file
-    toBeRemovedAfterRun -= source.file
-    reset(unit)
-    //parseAndEnter(unit)
-  }
-
-  /** Make sure a set of compilation units is loaded and parsed */
-  private def reloadSources(sources: List[SourceFile]) {
-    newTyperRun()
-    minRunId = currentRunId
-    sources foreach reloadSource
-    moveToFront(sources)
-  }
-
-  /** Make sure a set of compilation units is loaded and parsed */
-  private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) {
-    informIDE("reload: " + sources)
-    lastWasReload = true
-    respond(response)(reloadSources(sources))
-    demandNewCompilerRun()
-  }
-
-  private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) {
-    informIDE("files deleted: " + sources)
-    val deletedFiles = sources.map(_.file).toSet
-    val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile}
-    for (d <- deletedSyms) {
-      d.owner.info.decls unlink d
-      deletedTopLevelSyms += d
-      currentTopLevelSyms -= d
-    }
-    sources foreach (removeUnitOf(_))
-    minRunId = currentRunId
-    respond(response)(())
-    demandNewCompilerRun()
-  }
-
-  /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully.
-   *  If we do just removeUnit, some problems with default parameters can ensue.
-   *  Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
-   */
-  private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
-    toBeRemovedAfterRun ++= sources map (_.file)
-  }
-
-  /** A fully attributed tree located at position `pos` */
-  private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
-    case None =>
-      reloadSources(List(pos.source))
-      try typedTreeAt(pos)
-      finally afterRunRemoveUnitsOf(List(pos.source))
-    case Some(unit) =>
-      informIDE("typedTreeAt " + pos)
-      parseAndEnter(unit)
-      val tree = locateTree(pos)
-      debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
-      tree match {
-        case Import(expr, _) =>
-          debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members))
-        case _ =>
-      }
-      if (stabilizedType(tree) ne null) {
-        debugLog("already attributed: "+tree.symbol+" "+tree.tpe)
-        tree
-      } else {
-        unit.targetPos = pos
-        try {
-          debugLog("starting targeted type check")
-          typeCheck(unit)
-//          println("tree not found at "+pos)
-          EmptyTree
-        } catch {
-          case ex: TyperResult => new Locator(pos) locateIn ex.tree
-        } finally {
-          unit.targetPos = NoPosition
-        }
-      }
-  }
-
-  /** A fully attributed tree corresponding to the entire compilation unit  */
-  private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
-    informIDE("typedTree " + source + " forceReload: " + forceReload)
-    val unit = getOrCreateUnitOf(source)
-    if (forceReload) reset(unit)
-    parseAndEnter(unit)
-    if (unit.status <= PartiallyChecked) typeCheck(unit)
-    unit.body
-  }
-
-  /** Set sync var `response` to a fully attributed tree located at position `pos`  */
-  private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) {
-    respond(response)(typedTreeAt(pos))
-  }
-
-  /** Set sync var `response` to a fully attributed tree corresponding to the
-   *  entire compilation unit  */
-  private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
-    respond(response)(typedTree(source, forceReload))
-  }
-
-  private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
-    val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
-    sources filterNot (getUnit(_).isDefined) match {
-      case Nil =>
-        f(unitOfSrc)
-      case unknown =>
-        reloadSources(unknown)
-        try {
-          f(unitOfSrc)
-        } finally
-          afterRunRemoveUnitsOf(unknown)
-    }
-  }
-
-  private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
-    withTempUnits(List(source)){ srcToUnit =>
-      f(srcToUnit(source))
-    }
-
-  /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
-  private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
-    val originalTypeParams = sym.owner.typeParams
-    ensureUpToDate(unit)
-    parseAndEnter(unit)
-    val pre = adaptToNewRunMap(ThisType(sym.owner))
-    val rawsym = pre.typeSymbol.info.decl(sym.name)
-    val newsym = rawsym filter { alt =>
-      sym.isType || {
-        try {
-          val tp1 = pre.memberType(alt) onTypeError NoType
-          val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
-          matchesType(tp1, tp2, false) || {
-            debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
-            val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
-            matchesType(tp1, tp3, false) || {
-              debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
-              false
-            }
-          }
-        }
-        catch {
-          case ex: ControlThrowable => throw ex
-          case ex: Throwable =>
-            debugLog("error in findMirrorSymbol: " + ex)
-            ex.printStackTrace()
-            false
-        }
-      }
-    }
-    if (newsym == NoSymbol) {
-      if (rawsym.exists && !rawsym.isOverloaded) rawsym
-      else {
-        debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
-        NoSymbol
-      }
-    } else if (newsym.isOverloaded) {
-      settings.uniqid.value = true
-      debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
-      NoSymbol
-    } else {
-      debugLog("mirror found for " + newsym + ": " + newsym.pos)
-      newsym
-    }
-  }
-
-  /** Implements CompilerControl.askLinkPos */
-  private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
-    informIDE("getLinkPos "+sym+" "+source)
-    respond(response) {
-      if (sym.owner.isClass) {
-        withTempUnit(source){ u =>
-          findMirrorSymbol(sym, u).pos
-        }
-      } else {
-        debugLog("link not in class "+sym+" "+source+" "+sym.owner)
-        NoPosition
-      }
-    }
-  }
-
-  private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
-    unit.body foreachPartial {
-      case DocDef(comment, defn) if defn.symbol == sym =>
-        fillDocComment(defn.symbol, comment)
-        EmptyTree
-      case _: ValOrDefDef =>
-        EmptyTree
-    }
-  }
-
-  /** Implements CompilerControl.askDocComment */
-  private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
-                                         response: Response[(String, String, Position)]) {
-    informIDE(s"getDocComment $sym at $source, site $site")
-    respond(response) {
-      withTempUnits(fragments.unzip._2){ units =>
-        for((sym, src) <- fragments) {
-          val mirror = findMirrorSymbol(sym, units(src))
-          if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
-        }
-        val mirror = findMirrorSymbol(sym, units(source))
-        if (mirror eq NoSymbol)
-          ("", "", NoPosition)
-        else {
-          (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
-        }
-      }
-    }
-    // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
-    newTyperRun()
-  }
-
-  def stabilizedType(tree: Tree): Type = tree match {
-    case Ident(_) if tree.symbol.isStable =>
-      singleType(NoPrefix, tree.symbol)
-    case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
-      singleType(qual.tpe, tree.symbol)
-    case Import(expr, selectors) =>
-      tree.symbol.info match {
-        case analyzer.ImportType(expr) => expr match {
-          case s at Select(qual, name) => singleType(qual.tpe, s.symbol)
-          case i : Ident => i.tpe
-          case _ => tree.tpe
-        }
-        case _ => tree.tpe
-      }
-
-    case _ => tree.tpe
-  }
-
-  import analyzer.{SearchResult, ImplicitSearch}
-
-  private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
-    informIDE("getScopeCompletion" + pos)
-    respond(response) { scopeMembers(pos) }
-  }
-
-  private val Dollar = newTermName("$")
-
-  private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
-    override def default(key: Name) = Set()
-
-    private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
-      (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
-    }
-
-    private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
-      m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
-      !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
-      (!implicitlyAdded || m.implicitlyAdded)
-
-    def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
-      if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
-        add(sym.accessed, pre, implicitlyAdded)(toMember)
-      } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
-        val symtpe = pre.memberType(sym) onTypeError ErrorType
-        matching(sym, symtpe, this(sym.name)) match {
-          case Some(m) =>
-            if (keepSecond(m, sym, implicitlyAdded)) {
-              //print(" -+ "+sym.name)
-              this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
-            }
-          case None =>
-            //print(" + "+sym.name)
-            this(sym.name) = this(sym.name) + toMember(sym, symtpe)
-        }
-      }
-    }
-
-    def addNonShadowed(other: Members[M]) = {
-      for ((name, ms) <- other)
-        if (ms.nonEmpty && this(name).isEmpty) this(name) = ms
-    }
-
-    def allMembers: List[M] = values.toList.flatten
-  }
-
-  /** Return all members visible without prefix in context enclosing `pos`. */
-  private def scopeMembers(pos: Position): List[ScopeMember] = {
-    typedTreeAt(pos) // to make sure context is entered
-    val context = doLocateContext(pos)
-    val locals = new Members[ScopeMember]
-    val enclosing = new Members[ScopeMember]
-    def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
-      locals.add(sym, pre, false) { (s, st) =>
-        // imported val and var are always marked as inaccessible, but they could be accessed through their getters. SI-7995
-        if (s.hasGetter) 
-          new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport)
-        else
-          new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
-      }
-    def localsToEnclosing() = {
-      enclosing.addNonShadowed(locals)
-      locals.clear()
-    }
-    //print("add scope members")
-    var cx = context
-    while (cx != NoContext) {
-      for (sym <- cx.scope)
-        addScopeMember(sym, NoPrefix, EmptyTree)
-      localsToEnclosing()
-      if (cx == cx.enclClass) {
-        val pre = cx.prefix
-        for (sym <- pre.members)
-          addScopeMember(sym, pre, EmptyTree)
-        localsToEnclosing()
-      }
-      cx = cx.outer
-    }
-    //print("\nadd imported members")
-    for (imp <- context.imports) {
-      val pre = imp.qual.tpe
-      for (sym <- imp.allImportedSymbols)
-        addScopeMember(sym, pre, imp.qual)
-      localsToEnclosing()
-    }
-    // println()
-    val result = enclosing.allMembers
-//    if (debugIDE) for (m <- result) println(m)
-    result
-  }
-
-  private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
-    informIDE("getTypeCompletion " + pos)
-    respondGradually(response) { typeMembers(pos) }
-    //if (debugIDE) typeMembers(pos)
-  }
-
-  private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
-    var tree = typedTreeAt(pos)
-
-    // if tree consists of just x. or x.fo where fo is not yet a full member name
-    // ignore the selection and look in just x.
-    tree match {
-      case Select(qual, name) if tree.tpe == ErrorType => tree = qual
-      case _ =>
-    }
-
-    val context = doLocateContext(pos)
-
-    val shouldTypeQualifier = tree.tpe match {
-      case null           => true
-      case mt: MethodType => mt.isImplicit
-      case _              => false
-    }
-
-    if (shouldTypeQualifier)
-      // TODO: guard with try/catch to deal with ill-typed qualifiers.
-      tree = analyzer.newTyper(context).typedQualifier(tree)
-
-    debugLog("typeMembers at "+tree+" "+tree.tpe)
-
-    val superAccess = tree.isInstanceOf[Super]
-    val members = new Members[TypeMember]
-
-    def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
-      val implicitlyAdded = viaView != NoSymbol
-      members.add(sym, pre, implicitlyAdded) { (s, st) =>
-        new TypeMember(s, st,
-          context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
-          inherited,
-          viaView)
-      }
-    }
-
-    /** Create a function application of a given view function to `tree` and typechecked it.
-     */
-    def viewApply(view: SearchResult): Tree = {
-      assert(view.tree != EmptyTree)
-      analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
-        .typed(Apply(view.tree, List(tree)) setPos tree.pos)
-        .onTypeError(EmptyTree)
-    }
-
-    val pre = stabilizedType(tree)
-
-    val ownerTpe = tree.tpe match {
-      case analyzer.ImportType(expr) => expr.tpe
-      case null => pre
-      case MethodType(List(), rtpe) => rtpe
-      case _ => tree.tpe
-    }
-
-    //print("add members")
-    for (sym <- ownerTpe.members)
-      addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
-    members.allMembers #:: {
-      //print("\nadd pimped")
-      val applicableViews: List[SearchResult] =
-        if (ownerTpe.isErroneous) List()
-        else new ImplicitSearch(
-          tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
-          context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
-      for (view <- applicableViews) {
-        val vtree = viewApply(view)
-        val vpre = stabilizedType(vtree)
-        for (sym <- vtree.tpe.members) {
-          addTypeMember(sym, vpre, false, view.tree.symbol)
-        }
-      }
-      //println()
-      Stream(members.allMembers)
-    }
-  }
-
-  /** Implements CompilerControl.askLoadedTyped */
-  private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean = false, onSameThread: Boolean = true) {
-    getUnit(source) match {
-      case Some(unit) =>
-        if (unit.isUpToDate) {
-          debugLog("already typed");
-          response set unit.body
-        } else if (ignoredFiles(source.file)) {
-          response.raise(lastException.getOrElse(CancelException))
-        } else if (onSameThread) {
-          getTypedTree(source, forceReload = false, response)
-        } else {
-          debugLog("wait for later")
-          outOfDate = true
-          waitLoadedTypeResponses(source) += response
-        }
-      case None =>
-        debugLog("load unit and type")
-        try reloadSources(List(source))
-        finally {
-          waitLoadedTyped(source, response, onSameThread)
-          if (!keepLoaded) removeUnitOf(source)
-        }
-    }
-  }
-
-  /** Implements CompilerControl.askParsedEntered */
-  private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) {
-    getUnit(source) match {
-      case Some(unit) =>
-        getParsedEnteredNow(source, response)
-      case None =>
-        try {
-          if (keepLoaded || outOfDate && onSameThread)
-            reloadSources(List(source))
-        } finally {
-          if (keepLoaded || !outOfDate || onSameThread)
-            getParsedEnteredNow(source, response)
-          else
-            getParsedEnteredResponses(source) += response
-        }
-    }
-  }
-
-  /** Parses and enters given source file, stroring parse tree in response */
-  private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
-    respond(response) {
-      onUnitOf(source) { unit =>
-        parseAndEnter(unit)
-        unit.body
-      }
-    }
-  }
-
-  @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-  def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
-    try {
-      interruptsEnabled = false
-      respond(response) {
-        instrument(source, line)
-      }
-    } finally {
-      interruptsEnabled = true
-    }
-
-  // ---------------- Helper classes ---------------------------
-
-  /** A transformer that replaces tree `from` with tree `to` in a given tree */
-  class TreeReplacer(from: Tree, to: Tree) extends Transformer {
-    override def transform(t: Tree): Tree = {
-      if (t == from) to
-      else if ((t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
-      else t
-    }
-  }
-
-  /** The typer run */
-  class TyperRun extends Run {
-    // units is always empty
-
-    /** canRedefine is used to detect double declarations of classes and objects
-     *  in multiple source files.
-     *  Since the IDE rechecks units several times in the same run, these tests
-     *  are disabled by always returning true here.
-     */
-    override def canRedefine(sym: Symbol) = true
-
-    def typeCheck(unit: CompilationUnit): Unit = {
-      applyPhase(typerPhase, unit)
-    }
-
-    /** Apply a phase to a compilation unit
-     *  @return true iff typechecked correctly
-     */
-    private def applyPhase(phase: Phase, unit: CompilationUnit) {
-      atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
-    }
-  }
-
-  def newTyperRun() {
-    currentTyperRun = new TyperRun
-  }
-
-  class TyperResult(val tree: Tree) extends ControlThrowable
-
-  assert(globalPhase.id == 0)
-
-  implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
-
-  // OnTypeError should still catch TypeError because of cyclic references,
-  // but DivergentImplicit shouldn't leak anymore here
-  class OnTypeError[T](op: => T) {
-    def onTypeError(alt: => T) = try {
-      op
-    } catch {
-      case ex: TypeError =>
-        debugLog("type error caught: "+ex)
-        alt
-      case ex: DivergentImplicit =>
-        if (settings.Xdivergence211.value) {
-          debugLog("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
-          alt
-        } else {
-          debugLog("divergent implicit caught: "+ex)
-          alt
-        }
-    }
-  }
-
-  /** The compiler has been initialized. Constructors are evaluated in textual order,
-   *  so this is set to true only after all super constructors and the primary constructor
-   *  have been executed.
-   */
-  initializing = false
-}
-
-object CancelException extends Exception
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
deleted file mode 100644
index 64e050e..0000000
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import util.InterruptReq
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile}
-import io.{AbstractFile, PlainFile}
-
-import util.EmptyAction
-import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
-import io.{Pickler, CondPickler}
-import io.Pickler._
-import scala.collection.mutable
-import mutable.ListBuffer
-
-trait Picklers { self: Global =>
-
-  lazy val freshRunReq =
-    unitPickler
-      .wrapped { _ => new FreshRunReq } { x => () }
-      .labelled ("FreshRunReq")
-      .cond (_.isInstanceOf[FreshRunReq])
-
-      lazy val shutdownReq = singletonPickler(ShutdownReq)
-
-  def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
-
-  implicit lazy val throwable: Pickler[Throwable] =
-    freshRunReq | shutdownReq | defaultThrowable
-
-  implicit def abstractFile: Pickler[AbstractFile] =
-    pkl[String]
-      .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
-      .asClass (classOf[PlainFile])
-
-  private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
-    override def default(key: AbstractFile) = Array()
-  }
-
-  type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
-
-  def delta(f: AbstractFile, cs: Array[Char]): Diff = {
-    val bs = sourceFilesSeen(f)
-    var start = 0
-    while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
-    var end = bs.length
-    var end2 = cs.length
-    while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
-    sourceFilesSeen(f) = cs
-    (start, end, cs.slice(start, end2).mkString(""))
-  }
-
-  def patch(f: AbstractFile, d: Diff): Array[Char] = {
-    val (start, end, replacement) = d
-    val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
-    sourceFilesSeen(f) = patched
-    patched
-  }
-
-  implicit lazy val sourceFile: Pickler[SourceFile] =
-    (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
-      case f ~ d => new BatchSourceFile(f, patch(f, d))
-    } {
-      f => f.file ~ delta(f.file, f.content)
-    }.asClass (classOf[BatchSourceFile])
-
-  lazy val offsetPosition: CondPickler[OffsetPosition] =
-    (pkl[SourceFile] ~ pkl[Int])
-      .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point }
-      .asClass (classOf[OffsetPosition])
-
-  lazy val rangePosition: CondPickler[RangePosition] =
-    (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
-      .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
-      .asClass (classOf[RangePosition])
-
-  lazy val transparentPosition: CondPickler[TransparentPosition] =
-    (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
-      .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
-      .asClass (classOf[TransparentPosition])
-
-  lazy val noPosition = singletonPickler(NoPosition)
-
-  implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
-
-  implicit lazy val namePickler: Pickler[Name] =
-    pkl[String] .wrapped[Name] {
-      str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
-    } {
-      name => if (name.isTypeName) name.toString+"!" else name.toString
-    }
-
-  implicit lazy val symPickler: Pickler[Symbol] = {
-    def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
-      if (!sym.isRoot) {
-        ownerNames(sym.owner, buf)
-        buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
-        if (!sym.isType && !sym.isStable) {
-          val sym1 = sym.owner.info.decl(sym.name)
-          if (sym1.isOverloaded) {
-            val index = sym1.alternatives.indexOf(sym)
-            assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
-            buf += newTermName(index.toString)
-          }
-        }
-      }
-      buf
-    }
-    def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
-      case List() =>
-        root
-      case name :: rest =>
-        val sym = root.info.decl(name)
-        if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
-        else makeSymbol(sym, rest)
-    }
-    pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
-  }
-
-  implicit def workEvent: Pickler[WorkEvent] = {
-    (pkl[Int] ~ pkl[Long])
-      .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
-  }
-
-  implicit def interruptReq: Pickler[InterruptReq] = {
-    val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
-    pkl[Unit] .wrapped { _ =>  emptyIR } { _ => () }
-  }
-
-  implicit def reloadItem: CondPickler[ReloadItem] =
-    pkl[List[SourceFile]]
-      .wrapped { ReloadItem(_, new Response) } { _.sources }
-      .asClass (classOf[ReloadItem])
-
-  implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
-    pkl[Position]
-      .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
-      .asClass (classOf[AskTypeAtItem])
-
-  implicit def askTypeItem: CondPickler[AskTypeItem] =
-    (pkl[SourceFile] ~ pkl[Boolean])
-      .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
-      .asClass (classOf[AskTypeItem])
-
-  implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
-    pkl[Position]
-      .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
-      .asClass (classOf[AskTypeCompletionItem])
-
-  implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
-    pkl[Position]
-      .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
-      .asClass (classOf[AskScopeCompletionItem])
-
-  implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
-    pkl[SourceFile]
-      .wrapped { new AskToDoFirstItem(_) } { _.source }
-      .asClass (classOf[AskToDoFirstItem])
-
-  implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
-    (pkl[Symbol] ~ pkl[SourceFile])
-      .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
-      .asClass (classOf[AskLinkPosItem])
-
-  implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
-    (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
-      .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
-      .asClass (classOf[AskDocCommentItem])
-
-  implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
-    pkl[SourceFile]
-      .wrapped { source => new AskLoadedTypedItem(source, false, new Response) } { _.source }
-      .asClass (classOf[AskLoadedTypedItem])
-
-  implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
-    (pkl[SourceFile] ~ pkl[Boolean])
-      .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
-      .asClass (classOf[AskParsedEnteredItem])
-
-  implicit def emptyAction: CondPickler[EmptyAction] =
-    pkl[Unit]
-      .wrapped { _ => new EmptyAction } { _ => () }
-      .asClass (classOf[EmptyAction])
-
-  implicit def action: Pickler[() => Unit] =
-    reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
-    askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
deleted file mode 100644
index 4b64313..0000000
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ /dev/null
@@ -1,222 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.concurrent.SyncVar
-import scala.reflect.internal.util._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.io._
-import scala.tools.nsc.scratchpad.SourceInserter
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import java.io.{File, FileWriter}
-
-/** Interface of interactive compiler to a client such as an IDE
- */
-object REPL {
-
-  val versionMsg = "Scala compiler " +
-    Properties.versionString + " -- " +
-    Properties.copyrightString
-
-  val prompt = "> "
-
-  var reporter: ConsoleReporter = _
-
-  private def replError(msg: String) {
-    reporter.error(/*new Position */FakePos("scalac"),
-                   msg + "\n  scalac -help  gives more information")
-  }
-
-  def process(args: Array[String]) {
-    val settings = new Settings(replError)
-    reporter = new ConsoleReporter(settings)
-    val command = new CompilerCommand(args.toList, settings)
-    if (command.settings.version.value)
-      reporter.echo(versionMsg)
-    else {
-      try {
-        object compiler extends Global(command.settings, reporter) {
-//          printTypings = true
-        }
-        if (reporter.hasErrors) {
-          reporter.flush()
-          return
-        }
-        if (command.shouldStopWithInfo) {
-          reporter.echo(command.getInfoMessage(compiler))
-        } else {
-          run(compiler)
-        }
-      } catch {
-        case ex @ FatalError(msg) =>
-          if (true || command.settings.debug.value) // !!!
-            ex.printStackTrace();
-        reporter.error(null, "fatal error: " + msg)
-      }
-    }
-  }
-
-  def main(args: Array[String]) {
-    process(args)
-    /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also.
-  }
-
-  def loop(action: (String) => Unit) {
-    Console.print(prompt)
-    try {
-      val line = Console.readLine
-      if (line.length() > 0) {
-        action(line)
-      }
-      loop(action)
-    }
-    catch {
-      case _: java.io.EOFException => //nop
-    }
-  }
-
-  /** Commands:
-   *
-   *  reload file1 ... fileN
-   *  typeat file off1 off2?
-   *  complete file off1 off2?
-   */
-  def run(comp: Global) {
-    val reloadResult = new Response[Unit]
-    val typeatResult = new Response[comp.Tree]
-    val completeResult = new Response[List[comp.Member]]
-    val typedResult = new Response[comp.Tree]
-    val structureResult = new Response[comp.Tree]
-    @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-    val instrumentedResult = new Response[(String, Array[Char])]
-
-    def makePos(file: String, off1: String, off2: String) = {
-      val source = toSourceFile(file)
-      comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
-    }
-
-    def doTypeAt(pos: Position) {
-      comp.askTypeAt(pos, typeatResult)
-      show(typeatResult)
-    }
-
-    def doComplete(pos: Position) {
-      comp.askTypeCompletion(pos, completeResult)
-      show(completeResult)
-    }
-
-    def doStructure(file: String) {
-      comp.askParsedEntered(toSourceFile(file), false, structureResult)
-      show(structureResult)
-    }
-
-    /** Write instrumented source file to disk.
-     *  @param iFullName  The full name of the first top-level object in source
-     *  @param iContents  An Array[Char] containing the instrumented source
-     *  @return The name of the instrumented source file
-     */
-    @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-    def writeInstrumented(iFullName: String, suffix: String, iContents: Array[Char]): String = {
-      val iSimpleName = iFullName drop ((iFullName lastIndexOf '.') + 1)
-      val iSourceName = iSimpleName + suffix
-      val ifile = new FileWriter(iSourceName)
-      ifile.write(iContents)
-      ifile.close()
-      iSourceName
-    }
-
-    /** The method for implementing worksheet functionality.
-     *  @param arguments  a file name, followed by optional command line arguments that are passed
-     *                    to the compiler that processes the instrumented source.
-     *  @param line       A line number that controls uop to which line results should be produced
-     *                    If line = -1, results are produced for all expressions in the worksheet.
-     *  @return           The generated file content containing original source in the left column
-     *                    and outputs in the right column, or None if the presentation compiler
-     *                    does not respond to askInstrumented.
-     */
-    @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-    def instrument(arguments: List[String], line: Int): Option[(String, String)] = {
-      val source = toSourceFile(arguments.head)
-      // strip right hand side comment column and any trailing spaces from all lines
-      val strippedContents = SourceInserter.stripRight(source.content)
-      val strippedSource = new BatchSourceFile(source.file, strippedContents)
-      println("stripped source = "+strippedSource+":"+strippedContents.mkString)
-      comp.askReload(List(strippedSource), reloadResult)
-      comp.askInstrumented(strippedSource, line, instrumentedResult)
-      using(instrumentedResult) {
-        case (iFullName, iContents) =>
-          println(s"instrumented source $iFullName = ${iContents.mkString}")
-          val iSourceName = writeInstrumented(iFullName, "$instrumented.scala", iContents)
-          val sSourceName = writeInstrumented(iFullName, "$stripped.scala", strippedContents)
-          (iSourceName, sSourceName)
-/*
- *           val vdirOpt = compileInstrumented(iSourceName, arguments.tail)
-          runInstrumented(vdirOpt, iFullName, strippedSource.content)
- */
-      }
-    }
-
-    loop { line =>
-      (line split " ").toList match {
-        case "reload" :: args =>
-          comp.askReload(args map toSourceFile, reloadResult)
-          show(reloadResult)
-        case "reloadAndAskType" :: file :: millis :: Nil =>
-          comp.askReload(List(toSourceFile(file)), reloadResult)
-          Thread.sleep(millis.toInt)
-          println("ask type now")
-          comp.askLoadedTyped(toSourceFile(file), keepLoaded = true, typedResult)
-          typedResult.get
-        case List("typeat", file, off1, off2) =>
-          doTypeAt(makePos(file, off1, off2))
-        case List("typeat", file, off1) =>
-          doTypeAt(makePos(file, off1, off1))
-        case List("complete", file, off1, off2) =>
-          doComplete(makePos(file, off1, off2))
-        case List("complete", file, off1) =>
-          doComplete(makePos(file, off1, off1))
-        case "instrument" :: arguments =>
-          println(instrument(arguments, -1))
-        case "instrumentTo" :: line :: arguments =>
-          println(instrument(arguments, line.toInt))
-        case List("quit") =>
-          comp.askShutdown()
-          exit(1) // Don't use sys yet as this has to run on 2.8.2 also.
-        case List("structure", file) =>
-          doStructure(file)
-        case _ =>
-          print("""Available commands:
-                  | reload <file_1> ... <file_n>
-                  | reloadAndAskType <file> <sleep-ms>
-                  | typed <file>
-                  | typeat <file> <start-pos> <end-pos>
-                  | typeat <file> <pos>
-                  | complete <file> <start-pos> <end-pos>
-                  | compile <file> <pos>
-                  | instrument <file> <arg>*
-                  | instrumentTo <line-num> <file> <arg>*
-                  | structure <file>
-                  | quit
-                  |""".stripMargin)
-      }
-    }
-  }
-
-  def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
-
-  def using[T, U](svar: Response[T])(op: T => U): Option[U] = {
-    val res = svar.get match {
-      case Left(result) => Some(op(result))
-      case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None
-    }
-    svar.clear()
-    res
-  }
-
-  def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res))
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
deleted file mode 100644
index b95f1fa..0000000
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import ast.Trees
-import ast.Positions
-import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition}
-import scala.tools.nsc.util.WorkScheduler
-import scala.collection.mutable.ListBuffer
-
-/** Handling range positions
- *  atPos, the main method in this trait, will add positions to a tree,
- *  and will ensure the following properties:
- *
- *    1. All nodes between the root of the tree and nodes that already have positions
- *       will be assigned positions.
- *    2. No node which already has a position will be assigned a different range; however
- *       a RangePosition might become a TransparentPosition.
- *    3. The position of each assigned node includes the positions of each of its children.
- *    4. The positions of all solid descendants of children of an assigned node
- *       are mutually non-overlapping.
- *
- * Here, the solid descendant of a node are:
- *
- *   If the node has a TransparentPosition, the solid descendants of all its children
- *   Otherwise, the singleton consisting of the node itself.
- */
-trait RangePositions extends Trees with Positions {
-self: scala.tools.nsc.Global =>
-
-  case class Range(pos: Position, tree: Tree) {
-    def isFree = tree == EmptyTree
-  }
-
-  override def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
-    new RangePosition(source, start, point, end)
-
-  /** A position that wraps a set of trees.
-   *  The point of the wrapping position is the point of the default position.
-   *  If some of the trees are ranges, returns a range position enclosing all ranges
-   *  Otherwise returns default position that is either focused or not.
-   */
-  override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
-    val ranged = trees filter (_.pos.isRange)
-    if (ranged.isEmpty) if (focus) default.focus else default
-    else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
-  }
-
-  /** A position that wraps a non-empty set of trees.
-   *  The point of the wrapping position is the point of the first trees' position.
-   *  If some of the trees are ranges, returns a range position enclosing all ranges
-   *  Otherwise returns first tree's position.
-   */
-  override def wrappingPos(trees: List[Tree]): Position = {
-    val headpos = trees.head.pos
-    if (headpos.isDefined) wrappingPos(headpos, trees) else headpos
-  }
-
-  // -------------- ensuring no overlaps -------------------------------
-  
-  /** Ensure that given tree has no positions that overlap with
-   *  any of the positions of `others`. This is done by
-   *  shortening the range, assigning TransparentPositions
-   *  to some of the nodes in `tree` or focusing on the position.
-   */
-  override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
-    def isOverlapping(pos: Position) =
-      pos.isRange && (others exists (pos overlaps _.pos))
-    if (isOverlapping(tree.pos)) {
-      val children = tree.children
-      children foreach (ensureNonOverlapping(_, others, focus))
-      if (tree.pos.isOpaqueRange) {
-        val wpos = wrappingPos(tree.pos, children, focus)
-        tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
-      }
-    }
-  }
-
-  def solidDescendants(tree: Tree): List[Tree] =
-    if (tree.pos.isTransparent) tree.children flatMap solidDescendants
-    else List(tree)
-
-  /** A free range from `lo` to `hi` */
-  private def free(lo: Int, hi: Int): Range =
-    Range(new RangePosition(null, lo, lo, hi), EmptyTree)
-
-  /** The maximal free range */
-  private lazy val maxFree: Range = free(0, Int.MaxValue)
-
-  /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
-  private def maybeFree(lo: Int, hi: Int) =
-    if (lo < hi) List(free(lo, hi))
-    else List()
-
-  /** Insert `pos` into ranges `rs` if possible;
-   *  otherwise add conflicting trees to `conflicting`.
-   */
-  private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
-    case List() =>
-      assert(conflicting.nonEmpty)
-      rs
-    case r :: rs1 =>
-      assert(!t.pos.isTransparent)
-      if (r.isFree && (r.pos includes t.pos)) {
-//      println("subdividing "+r+"/"+t.pos)
-        maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
-      } else {
-        if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
-        r :: insert(rs1, t, conflicting)
-      }
-  }
-
-  /** Replace elem `t` of `ts` by `replacement` list. */
-  private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
-    if (ts.head == t) replacement ::: ts.tail
-    else ts.head :: replace(ts.tail, t, replacement)
-
-  /** Does given list of trees have mutually non-overlapping positions?
-   *  pre: None of the trees is transparent
-   */
-  def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
-    var ranges = List(maxFree)
-    for (ct <- cts) {
-      if (ct.pos.isOpaqueRange) {
-        val conflicting = new ListBuffer[Tree]
-        ranges = insert(ranges, ct, conflicting)
-        if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
-      }
-    }
-    List()
-  }
-
-  // -------------- setting positions -------------------------------
-
-  /** Set position of all children of a node
-   *  @param  pos   A target position.
-   *                Uses the point of the position as the point of all positions it assigns.
-   *                Uses the start of this position as an Offset position for unpositioed trees
-   *                without children.
-   *  @param  trees  The children to position. All children must be positionable.
-   */
-  private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
-    for (tree <- trees) {
-      if (!tree.isEmpty && tree.pos == NoPosition) {
-        val children = tree.children
-        if (children.isEmpty) {
-          tree setPos pos.focus
-        } else {
-          setChildrenPos(pos, children)
-          tree setPos wrappingPos(pos, children)
-        }
-      }
-    }
-  } catch {
-    case ex: Exception =>
-      println("error while set children pos "+pos+" of "+trees)
-      throw ex
-  }
-
-  /** Position a tree.
-   *  This means: Set position of a node and position all its unpositioned children.
-   */
-  override def atPos[T <: Tree](pos: Position)(tree: T): T = {
-    if (pos.isOpaqueRange) {
-      if (!tree.isEmpty && tree.pos == NoPosition) {
-        tree.setPos(pos)
-        val children = tree.children
-        if (children.nonEmpty) {
-          if (children.tail.isEmpty) atPos(pos)(children.head)
-          else setChildrenPos(pos, children)
-        }
-      }
-      tree
-    } else {
-      super.atPos(pos)(tree)
-    }
-  }
-
-  // ---------------- Validating positions ----------------------------------
-
-  override def validatePositions(tree: Tree) {
-    def reportTree(prefix : String, tree : Tree) {
-      val source = if (tree.pos.isDefined) tree.pos.source else ""
-      inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
-      inform("")
-      inform(treeStatus(tree))
-      inform("")
-    }
-
-    def positionError(msg: String)(body : => Unit) {
-      inform("======= Position error\n" + msg)
-      body
-      inform("\nWhile validating #" + tree.id)
-      inform(treeStatus(tree))
-      inform("\nChildren:")
-      tree.children map (t => "  " + treeStatus(t, tree)) foreach inform
-      inform("=======")
-      throw new ValidateException(msg)
-    }
-
-    def validate(tree: Tree, encltree: Tree): Unit = {
-
-      if (!tree.isEmpty) {
-        if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
-          println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
-
-        if (!tree.pos.isDefined)
-          positionError("Unpositioned tree #"+tree.id) {
-            inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
-            inform("%15s %s".format("enclosing", treeStatus(encltree)))
-            encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
-          }
-        if (tree.pos.isRange) {
-          if (!encltree.pos.isRange)
-            positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
-            reportTree("Enclosing", encltree)
-            reportTree("Enclosed", tree)
-            }
-          if (!(encltree.pos includes tree.pos))
-            positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
-              reportTree("Enclosing", encltree)
-              reportTree("Enclosed", tree)
-            }
-
-          findOverlapping(tree.children flatMap solidDescendants) match {
-            case List() => ;
-            case xs => {
-              positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
-                reportTree("Ancestor", tree)
-                for((x, y) <- xs) {
-                  reportTree("First overlapping", x)
-                  reportTree("Second overlapping", y)
-                }
-              }
-            }
-          }
-        }
-        for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
-      }
-    }
-
-    if (phase.id <= currentRun.typerPhase.id)
-      validate(tree, tree)
-  }
-
-  class ValidateException(msg : String) extends Exception(msg)
-
-  // ---------------- Locating trees ----------------------------------
-
-  /** A locator for trees with given positions.
-   *  Given a position `pos`, locator.apply returns
-   *  the smallest tree that encloses `pos`.
-   */
-  class Locator(pos: Position) extends Traverser {
-    var last: Tree = _
-    def locateIn(root: Tree): Tree = {
-      this.last = EmptyTree
-      traverse(root)
-      this.last
-    }
-    protected def isEligible(t: Tree) = !t.pos.isTransparent
-    override def traverse(t: Tree) {
-      t match {
-        case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
-          traverse(tt.original)
-        case _ =>
-          if (t.pos includes pos) {
-            if (isEligible(t)) last = t
-            super.traverse(t)
-          } else t match {
-            case mdef: MemberDef =>
-              traverseTrees(mdef.mods.annotations)
-            case _ =>
-          }
-      }
-    }
-  }
-
-  class TypedLocator(pos: Position) extends Locator(pos) {
-    override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
deleted file mode 100644
index b2ef45a..0000000
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ /dev/null
@@ -1,355 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.util.control.Breaks._
-import scala.tools.nsc.symtab.Flags
-
-import dependencies._
-import scala.reflect.internal.util.FakePos
-import util.ClassPath
-import io.AbstractFile
-import scala.tools.util.PathResolver
-
-/** A more defined build manager, based on change sets. For each
- *  updated source file, it computes the set of changes to its
- *  definitions, then checks all dependent units to see if the
- *  changes require a compilation. It repeats this process until
- *  a fixpoint is reached.
- */
- at deprecated("Use sbt incremental compilation mechanism", "2.10.0")
-class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
-
-  class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter)  {
-
-    def this(settings: Settings) =
-      this(settings, new ConsoleReporter(settings))
-
-    override def computeInternalPhases() {
-      super.computeInternalPhases
-      phasesSet += dependencyAnalysis
-    }
-    lazy val _classpath = new NoSourcePathPathResolver(settings).result
-    override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]]
-       // See discussion in JavaPlatForm for why we need a cast here.
-
-    def newRun() = new Run()
-  }
-
-  class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
-    override def containers = Calculated.basis.dropRight(1).flatten.distinct
-  }
-
-  protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
-  val compiler = newCompiler(settings)
-  import compiler.{ Symbol, Type, beforeErasure }
-  import compiler.dependencyAnalysis.Inherited
-
-  private case class SymWithHistory(sym: Symbol, befErasure: Type)
-
-  /** Managed source files. */
-  private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
-  private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] =
-    new mutable.HashMap[AbstractFile, List[SymWithHistory]] {
-      override def default(key: AbstractFile) = Nil
-    }
-
-  /** External references used by source file. */
-  private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
-
-  /** External references for inherited members */
-  private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
-
-  /** Reverse of definitions, used for caching */
-  private var classes: mutable.Map[String, AbstractFile] =
-    new mutable.HashMap[String, AbstractFile] {
-      override def default(key: String) = null
-  }
-
-  /** Add the given source files to the managed build process. */
-  def addSourceFiles(files: Set[AbstractFile]) {
-    sources ++= files
-    update(files)
-  }
-
-  /** Remove the given files from the managed build process. */
-  def removeFiles(files: Set[AbstractFile]) {
-    sources --= files
-    deleteClassfiles(files)
-    update(invalidatedByRemove(files))
-  }
-
-  /** Return the set of invalidated files caused by removing the given files.
-   */
-  private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
-    val changes = new mutable.HashMap[Symbol, List[Change]]
-    for (f <- files; SymWithHistory(sym, _) <- definitions(f))
-      changes += sym -> List(Removed(Class(sym.fullName)))
-    invalidated(files, changes)
-  }
-
-  def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
-    sources --= removed
-    deleteClassfiles(removed)
-    update(added ++ invalidatedByRemove(removed))
-  }
-
-  /** The given files have been modified by the user. Recompile
-   *  them and all files that depend on them. Only files that
-   *  have been previously added as source files are recompiled.
-   *  Files that were already compiled are taken out from the result
-   *  of the dependency analysis.
-   */
-  private def update(files: Set[AbstractFile]) = {
-    val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
-        mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
-    compiler.reporter.reset()
-
-    // See if we really have corresponding symbols, not just those
-    // which share the name
-    def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
-      (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
-      (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
-
-    // For testing purposes only, order irrelevant for compilation
-    def toStringSet(set: Set[AbstractFile]): String =
-      set.toList sortBy (_.name) mkString("Set(", ", ", ")")
-
-    def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
-      deleteClassfiles(files)
-      val run = compiler.newRun()
-      if (settings.Ybuildmanagerdebug.value)
-        compiler.inform("compiling " + toStringSet(files))
-      buildingFiles(files)
-
-      run.compileFiles(files.toList)
-      if (compiler.reporter.hasErrors) {
-        return
-      }
-
-      // Deterministic behaviour required by partest
-      val changesOf = new mutable.HashMap[Symbol, List[Change]] {
-          override def toString: String = {
-            val changesOrdered =
-              toList.map(e => {
-                e._1.toString + " -> " +
-                e._2.sortBy(_.toString).mkString("List(", ", ", ")")
-              })
-            changesOrdered.sorted.mkString("Map(", ", ", ")")
-          }
-      }
-      val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
-
-      val defs = compiler.dependencyAnalysis.definitions
-      for (src <- files) {
-        if (definitions(src).isEmpty)
-          additionalDefs ++= compiler.dependencyAnalysis.
-                             dependencies.dependentFiles(1, mutable.Set(src))
-        else {
-          val syms = defs(src)
-          for (sym <- syms) {
-            definitions(src).find(
-               s => (s.sym.fullName == sym.fullName) &&
-                    isCorrespondingSym(s.sym, sym)) match {
-              case Some(SymWithHistory(oldSym, info)) =>
-                val changes = changeSet(oldSym.info, sym)
-                val changesErasure = beforeErasure(changeSet(info, sym))
-
-                changesOf(oldSym) = (changes ++ changesErasure).distinct
-              case _ =>
-                // a new top level definition
-                changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
-                    changeChangeSet(p, sym+" extends a sealed "+p))
-            }
-          }
-          // Create a change for the top level classes that were removed
-          val removed = definitions(src) filterNot ((s:SymWithHistory) =>
-            syms.find(_.fullName == (s.sym.fullName)) != None)
-          for (s <- removed) {
-            changesOf(s.sym) = List(removeChangeSet(s.sym))
-          }
-        }
-      }
-      if (settings.Ybuildmanagerdebug.value)
-        compiler.inform("Changes: " + changesOf)
-      updateDefinitions(files)
-      val invalid = invalidated(files, changesOf, additionalDefs)
-      update0(checkCycles(invalid, files, coll))
-    }
-
-    update0(files)
-    // remove the current run in order to save some memory
-    compiler.dropRun()
-  }
-
-  // Attempt to break the cycling reference deps as soon as possible and reduce
-  // the number of compilations to minimum without having too coarse grained rules
-  private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile],
-                          collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]):
-    Set[AbstractFile] = {
-      def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]):
-        immutable.Set[AbstractFile] = {
-        val deps:Set[AbstractFile] = set.flatMap(
-              s => collect.get(s) match {
-                     case Some(x) => x
-                     case _ => Set[AbstractFile]()
-              })
-          val newDeps = deps -- rest
-          if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps)
-      }
-      var res:Set[AbstractFile] = mutable.Set()
-      files.foreach( f =>
-        if (collect contains f) {
-          val chain = followChain(Set(f), immutable.Set()) ++ files
-          chain.foreach((fc: AbstractFile) => collect += fc -> chain)
-          res ++= chain
-        } else
-          res += f
-       )
-
-      initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res)))
-      if (res.subsetOf(initial)) Set() else res
-  }
-
-  /** Return the set of source files that are invalidated by the given changes. */
-  def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
-                  processed: Set[AbstractFile] = Set.empty):
-    Set[AbstractFile] = {
-    val buf = new mutable.HashSet[AbstractFile]
-    val newChangesOf = new mutable.HashMap[Symbol, List[Change]]
-    var directDeps =
-      compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
-
-    def invalidate(file: AbstractFile, reason: String, change: Change) = {
-      if (settings.Ybuildmanagerdebug.value)
-        compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
-      buf += file
-      directDeps -= file
-      for (syms <- definitions(file))     // fixes #2557
-        newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
-      break
-    }
-
-    for ((oldSym, changes) <- changesOf; change <- changes) {
-      def checkParents(cls: Symbol, file: AbstractFile) {
-        val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
-          // if (settings.buildmanagerdebug.value)
-          //   compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
-        change match {
-          case Changed(Class(_)) if parentChange =>
-            invalidate(file, "parents have changed", change)
-
-          case Changed(Definition(_)) if parentChange =>
-            invalidate(file, "inherited method changed", change)
-
-          case Added(Definition(_)) if parentChange =>
-            invalidate(file, "inherited new method", change)
-
-          case Removed(Definition(_)) if parentChange =>
-            invalidate(file, "inherited method removed", change)
-
-          case _ => ()
-        }
-      }
-
-      def checkInterface(cls: Symbol, file: AbstractFile) {
-        change match {
-          case Added(Definition(name)) =>
-            if (cls.info.decls.iterator.exists(_.fullName == name))
-              invalidate(file, "of new method with existing name", change)
-          case Changed(Class(name)) =>
-            if (cls.info.typeSymbol.fullName == name)
-              invalidate(file, "self type changed", change)
-          case _ =>
-            ()
-        }
-      }
-
-      def checkReferences(file: AbstractFile) {
-        //if (settings.buildmanagerdebug.value)
-        //  compiler.inform(file + ":" + references(file))
-        val refs = references(file)
-        if (refs.isEmpty)
-          invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
-        else {
-          change match {
-            case Removed(Definition(name)) if refs(name) =>
-              invalidate(file, "it references deleted definition", change)
-            case Removed(Class(name)) if (refs(name)) =>
-              invalidate(file, "it references deleted class", change)
-            case Changed(Class(name)) if (refs(name)) =>
-              invalidate(file, "it references changed class", change)
-            case Changed(Definition(name)) if (refs(name)) =>
-              invalidate(file, "it references changed definition", change)
-            case Added(Definition(name)) if (refs(name)) =>
-              invalidate(file, "it references added definition", change)
-            case _ => ()
-          }
-        }
-      }
-
-      def checkInheritedReferences(file: AbstractFile) {
-        val refs = inherited(file)
-        if (!refs.isEmpty)
-          change match {
-            case ParentChanged(Class(name)) =>
-              for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
-                   classFile <- classes.get(q);
-                   defs <- definitions.get(classFile);
-                   s <- defs.find(p => p.sym.fullName == q)
-                     if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
-                invalidate(file, "it references invalid (no longer inherited) definition", change)
-              ()
-            case _ => ()
-        }
-      }
-
-        for (file <- directDeps) {
-          breakable {
-            for (cls <- definitions(file)) checkParents(cls.sym, file)
-            for (cls <- definitions(file)) checkInterface(cls.sym, file)
-            checkReferences(file)
-            checkInheritedReferences(file)
-          }
-        }
-    }
-    if (buf.isEmpty)
-      processed
-    else
-      invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
-  }
-
-  /** Update the map of definitions per source file */
-  private def updateDefinitions(files: Set[AbstractFile]) {
-    for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
-      definitions(src) = (localDefs map (s => {
-        this.classes += s.fullName -> src
-        SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
-      }))
-    }
-    this.references = compiler.dependencyAnalysis.references
-    this.inherited = compiler.dependencyAnalysis.inherited
-  }
-
-  /** Load saved dependency information. */
-  def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
-    val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
-    if (success)
-      sources ++= compiler.dependencyAnalysis.managedFiles
-    success
-  }
-
-  /** Save dependency information to `file`. */
-  def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
-    compiler.dependencyAnalysis.dependenciesFile = file
-    compiler.dependencyAnalysis.saveDependencies(fromFile)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala
deleted file mode 100644
index f36f769..0000000
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-/** Typical interaction, given a predicate <user-input>, a function <display>,
- *  and an exception handler <handle>:
- *
- *  val TIMEOUT = 100 // (milliseconds) or something like that
- *  val r = new Response()
- *  while (!r.isComplete && !r.isCancelled) {
- *    if (<user-input>) r.cancel()
- *    else r.get(TIMEOUT) match {
- *      case Some(Left(data)) => <display>(data)
- *      case Some(Right(exc)) => <handle>(exc)
- *      case None =>
- *    }
- *  }
- */
-class Response[T] {
-
-  private var data: Option[Either[T, Throwable]] = None
-  private var complete = false
-  private var cancelled = false
-
-  /** Set provisional data, more to come
-   */
-  def setProvisionally(x: T) = synchronized {
-    data = Some(Left(x))
-  }
-
-  /** Set final data, and mark response as complete.
-   */
-  def set(x: T) = synchronized {
-    data = Some(Left(x))
-    complete = true
-    notifyAll()
-  }
-
-  /** Store raised exception in data, and mark response as complete.
-   */
-  def raise(exc: Throwable) = synchronized {
-    data = Some(Right(exc))
-    complete = true
-    notifyAll()
-  }
-
-  /** Get final data, wait as long as necessary.
-   *  When interrupted will return with Right(InterruptedException)
-   */
-  def get: Either[T, Throwable] = synchronized {
-    while (!complete) {
-      try {
-        wait()
-      } catch {
-        case exc: InterruptedException => raise(exc)
-      }
-    }
-    data.get
-  }
-
-  /** Optionally get data within `timeout` milliseconds.
-   *  When interrupted will return with Some(Right(InterruptedException))
-   *  When timeout ends, will return last stored provisional result,
-   *  or else None if no provisional result was stored.
-   */
-  def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
-    val start = System.currentTimeMillis
-    var current = start
-    while (!complete && start + timeout > current) {
-      try {
-        wait(timeout - (current - start))
-      } catch {
-        case exc: InterruptedException => raise(exc)
-      }
-      current = System.currentTimeMillis
-    }
-    data
-  }
-
-  /** Final data set was stored
-   */
-  def isComplete = synchronized { complete }
-
-  /** Cancel action computing this response (Only the
-   *  party that calls get on a response may cancel).
-   */
-  def cancel() = synchronized { cancelled = true }
-
-  /** A cancel request for this response has been issued
-   */
-  def isCancelled = synchronized { cancelled }
-
-  def clear() = synchronized {
-    data = None
-    complete = false
-    cancelled = false
-  }
-}
-
-
-
-
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
deleted file mode 100644
index 7f0265b..0000000
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-package scala.tools.nsc
-package interactive
-
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
-import scala.collection.mutable.ArrayBuffer
-import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace}
-import ast.parser.Tokens._
-
- at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-trait ScratchPadMaker { self: Global =>
-
-  import definitions._
-
-  private case class Patch(offset: Int, text: String)
-
-  private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser {
-    var objectName: String = ""
-
-    private val patches = new ArrayBuffer[Patch]
-    private val toPrint = new ArrayBuffer[String]
-    private var skipped = 0
-    private var resNum: Int = -1
-
-    private def nextRes(): String = {
-      resNum += 1
-      "res$"+resNum
-    }
-
-    private def nameType(name: String, tpe: Type): String = {
-      // if name ends in symbol character, add a space to separate it from the following ':'
-      val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " "
-      name+pad+": "+tpe
-    }
-
-    private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe)
-
-    private def literal(str: String) = "\"\"\""+str+"\"\"\""
-
-    private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{"
-
-    private val epilogue = "}"
-
-    private def applyPendingPatches(offset: Int) = {
-      if (skipped == 0) patches += Patch(offset, prologue)
-      for (msg <- toPrint) patches += Patch(offset, ";System.out.println("+msg+")")
-      toPrint.clear()
-    }
-
-    /** The position where to insert an instrumentation statement in front of giuven statement.
-     *  This is at the latest `stat.pos.start`. But in order not to mess with column numbers
-     *  in position we try to insert it at the end of the previous token instead.
-     *  Furthermore, `(' tokens have to be skipped because they do not show up
-     *  in statement range positions.
-     */
-    private def instrumentPos(start: Int): Int = {
-      val (prevToken, prevStart, prevEnd) = lex.locate(start - 1)
-      if (prevStart >= start) start
-      else if (prevToken == LPAREN) instrumentPos(prevStart)
-      else prevEnd
-    }
-
-    private def addSkip(stat: Tree): Unit = {
-      val ipos = instrumentPos(stat.pos.start)
-      if (stat.pos.start > skipped) applyPendingPatches(ipos)
-      if (stat.pos.start >= endOffset)
-        patches += Patch(ipos, ";$stop()")
-      var end = stat.pos.end
-      if (end > skipped) {
-        while (end < contents.length && !isLineBreakChar(contents(end))) end += 1
-        patches += Patch(ipos, ";$skip("+(end-skipped)+"); ")
-        skipped = end
-      }
-    }
-
-    private def addSandbox(expr: Tree) = {}
-//      patches += (Patch(expr.pos.start, "sandbox("), Patch(expr.pos.end, ")"))
-
-    private def resultString(prefix: String, expr: String) =
-      literal(prefix + " = ") + " + $show(" + expr + ")"
-
-    private def traverseStat(stat: Tree) =
-      if (stat.pos.isInstanceOf[RangePosition]) {
-        stat match {
-          case ValDef(_, _, _, rhs) =>
-            addSkip(stat)
-            if (stat.symbol.isLazy)
-              toPrint += literal(nameType(stat.symbol) + " = <lazy>")
-            else if (!stat.symbol.isSynthetic) {
-              addSandbox(rhs)
-              toPrint += resultString(nameType(stat.symbol), stat.symbol.name.toString)
-            }
-          case DefDef(_, _, _, _, _, _) =>
-            addSkip(stat)
-            toPrint += literal(nameType(stat.symbol))
-          case Annotated(_, arg) =>
-            traverse(arg)
-          case DocDef(_, defn) =>
-            traverse(defn)
-          case _ =>
-            if (stat.isTerm) {
-              addSkip(stat)
-              if (stat.tpe.typeSymbol == UnitClass) {
-                addSandbox(stat)
-              } else {
-                val resName = nextRes()
-                val dispResName = resName filter ('$' != _)
-                val offset = instrumentPos(stat.pos.start)
-                patches += Patch(offset, "val " + resName + " = ")
-                addSandbox(stat)
-                toPrint += resultString(nameType(dispResName, stat.tpe), resName)
-              }
-            }
-        }
-      }
-
-    override def traverse(tree: Tree): Unit = tree match {
-      case PackageDef(_, _) =>
-        super.traverse(tree)
-      case ModuleDef(_, name, Template(_, _, body)) =>
-        val topLevel = objectName.isEmpty
-        if (topLevel) {
-          objectName = tree.symbol.fullName
-          body foreach traverseStat
-          if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements
-            applyPendingPatches(skipped)
-            patches += Patch(skipped, epilogue)
-          }
-        }
-      case _ =>
-    }
-
-    /** The patched text.
-     *  @require  traverse is run first
-     */
-    def result: Array[Char] = {
-      val reslen = contents.length + (patches map (_.text.length)).sum
-      val res = Array.ofDim[Char](reslen)
-      var lastOffset = 0
-      var from = 0
-      var to = 0
-      for (Patch(offset, text) <- patches) {
-        val delta = offset - lastOffset
-        assert(delta >= 0)
-        Array.copy(contents, from, res, to, delta)
-        from += delta
-        to += delta
-        lastOffset = offset
-        text.copyToArray(res, to)
-        to += text.length
-      }
-      assert(contents.length - from == reslen - to)
-      Array.copy(contents, from, res, to, contents.length - from)
-      res
-    }
-  }
-
-  class LexicalStructure(source: SourceFile) {
-    val token = new ArrayBuffer[Int]
-    val startOffset = new ArrayBuffer[Int]
-    val endOffset = new ArrayBuffer[Int]
-    private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source))
-    scanner.init()
-    while (scanner.token != EOF) {
-      startOffset += scanner.offset
-      token += scanner.token
-      scanner.nextToken
-      endOffset += scanner.lastOffset
-    }
-
-    /** @return token that starts before or at offset, its startOffset, its endOffset
-     */
-    def locate(offset: Int): (Int, Int, Int) = {
-      var lo = 0
-      var hi = token.length - 1
-      while (lo < hi) {
-        val mid = (lo + hi + 1) / 2
-        if (startOffset(mid) <= offset) lo = mid
-        else hi = mid - 1
-      }
-      (token(lo), startOffset(lo), endOffset(lo))
-    }
-  }
-
-  /** Compute an instrumented version of a sourcefile.
-   *  @param source  The given sourcefile.
-   *  @param line    The line up to which results should be printed, -1 = whole document.
-   *  @return        A pair consisting of
-   *                  - the fully qualified name of the first top-level object definition in the file.
-   *                    or "" if there are no object definitions.
-   *                  - the text of the instrumented program which, when run,
-   *                    prints its output and all defined values in a comment column.
-   */
-  protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
-    val tree = typedTree(source, true)
-    val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
-    val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
-    patcher.traverse(tree)
-    (patcher.objectName, patcher.result)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
deleted file mode 100644
index 465dcaa..0000000
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import dependencies._
-
-import scala.reflect.internal.util.FakePos
-import io.AbstractFile
-
-/** A simple build manager, using the default scalac dependency tracker.
- *  The transitive closure of all dependent files on a modified file
- *  is recompiled at once.
- *
- *  It is equivalent to using a resident compiler mode with the
- *  '-make:transitive' option.
- */
-class SimpleBuildManager(val settings: Settings) extends BuildManager {
-
-  class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter)  {
-
-    def this(settings: Settings) =
-      this(settings, new ConsoleReporter(settings))
-
-    def newRun() = new Run()
-  }
-
-  protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
-  val compiler = newCompiler(settings)
-
-  /** Managed source files. */
-  private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
-  /** Add the given source files to the managed build process. */
-  def addSourceFiles(files: Set[AbstractFile]) {
-    sources ++= files
-    update(files)
-  }
-
-  /** Remove the given files from the managed build process. */
-  def removeFiles(files: Set[AbstractFile]) {
-    sources --= files
-    deleteClassfiles(files)
-    update(invalidatedByRemove(files))
-  }
-
-
-  /** Return the set of invalidated files caused by removing the given files. */
-  private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
-    val deps = compiler.dependencyAnalysis.dependencies
-    deps.dependentFiles(Int.MaxValue, files)
-  }
-
-  def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
-    sources --= removed
-    deleteClassfiles(removed)
-    update(added ++ invalidatedByRemove(removed))
-  }
-
-  /** The given files have been modified by the user. Recompile
-   *  them and all files that depend on them. Only files that
-   *  have been previously added as source files are recompiled.
-   */
-  def update(files: Set[AbstractFile]) {
-    deleteClassfiles(files)
-
-    val deps = compiler.dependencyAnalysis.dependencies
-    val run = compiler.newRun()
-    compiler.inform("compiling " + files)
-
-    val toCompile =
-      (files ++ deps.dependentFiles(Int.MaxValue, files)) intersect sources
-
-
-    compiler.inform("Recompiling " +
-                    (if(settings.debug.value) toCompile.mkString(", ")
-                     else toCompile.size + " files"))
-
-    buildingFiles(toCompile)
-
-    run.compileFiles(files.toList)
-  }
-
-  /** Load saved dependency information. */
-  def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
-    val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
-    if (success)
-      sources ++= compiler.dependencyAnalysis.managedFiles
-    success
-  }
-
-  /** Save dependency information to `file`. */
-  def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
-    compiler.dependencyAnalysis.dependenciesFile = file
-    compiler.dependencyAnalysis.saveDependencies(fromFile)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
deleted file mode 100644
index 1c722ea..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-package tests
-
-import core._
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-
-import scala.annotation.migration
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
-import scala.collection.mutable.ListBuffer
-
-/** A base class for writing interactive compiler tests.
- *
- *  This class tries to cover common functionality needed when testing the presentation
- *  compiler: instantiation source files, reloading, creating positions, instantiating
- *  the presentation compiler, random stress testing.
- *
- *  By default, this class loads all scala and java classes found under `src/`, going
- *  recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources`
- *  The presentation compiler is available through `compiler`.
- *
- *  It is easy to test member completion, type and hyperlinking at a given position. Source
- *  files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the
- *  typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in
- *  your source files, and the test framework will automatically pick them up and test the
- *  corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking
- *  call). All ask operations are placed on the work queue without waiting for each one to
- *  complete before asking the next. After all asks, it waits for each response in turn and
- *  prints the result. The default timeout is 1 second per operation.
- *
- *  To define a custom operation you have to:
- *
- *  	(1) Define a new marker by extending `TestMarker`
- *  	(2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef`
- *  	(3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`.
- *
- *  Then you can simply use the new defined `marker` in your test sources and the testing
- *  framework will automatically pick it up.
- *
- *  @see   Check existing tests under test/files/presentation
- *
- *  @author Iulian Dragos
- *  @author Mirco Dotta
- */
-abstract class InteractiveTest
-  extends AskParse
-  with AskShutdown
-  with AskReload
-  with AskLoadedTyped
-  with PresentationCompilerInstance
-  with CoreTestDefs
-  with InteractiveTestSettings { self =>
-
-  protected val runRandomTests = false
-
-  /** Should askAllSources wait for each ask to finish before issuing the next? */
-  override protected val synchronousRequests = true
-
-  /** The core set of test actions that are executed during each test run are
-   *  `CompletionAction`, `TypeAction` and `HyperlinkAction`.
-   *  Override this member if you need to change the default set of executed test actions.
-   */
-  protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = {
-    ListBuffer(new TypeCompletionAction(compiler), new ScopeCompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler))
-  }
-
-  /** Add new presentation compiler actions to test. Presentation compiler's test
-   *  need to extends trait `PresentationCompilerTestDef`.
-   */
-  protected def ++(tests: PresentationCompilerTestDef*) {
-    testActions ++= tests
-  }
-
-  /** Test's entry point */
-  def main(args: Array[String]) {
-    try execute()
-    finally shutdown()
-  }
-
-  protected def execute(): Unit = {
-    loadSources()
-    runDefaultTests()
-  }
-
-  /** Load all sources before executing the test. */
-  protected def loadSources() {
-    // ask the presentation compiler to track all sources. We do
-    // not wait for the file to be entirely typed because we do want
-    // to exercise the presentation compiler on scoped type requests.
-    askReload(sourceFiles)
-    // make sure all sources are parsed before running the test. This
-    // is because test may depend on the sources having been parsed at
-    // least once
-    askParse(sourceFiles)
-  }
-
-  /** Run all defined `PresentationCompilerTestDef` */
-  protected def runDefaultTests() {
-    //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
-    testActions.foreach(_.runTest())
-  }
-
-  /** Perform n random tests with random changes. */
-  private def randomTests(n: Int, files: Array[SourceFile]) {
-    val tester = new Tester(n, files, settings) {
-      override val compiler = self.compiler
-      override val reporter = new reporters.StoreReporter
-    }
-    tester.run()
-  }
-
-  /** shutdown the presentation compiler. */
-  protected def shutdown() {
-    askShutdown()
-
-    // this is actually needed to force exit on test completion.
-    // Note: May be a bug on either the testing framework or (less likely)
-    //           the presentation compiler
-    sys.exit(0)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
deleted file mode 100644
index 4d85ab9..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
-import scala.tools.nsc.io.{File,Path}
-import core.Reporter
-import core.TestSettings
-
-trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance {
-  /** Character delimiter for comments in .opts file */
-  private final val CommentStartDelimiter = "#"
-
-  private final val TestOptionsFileExtension = "flags"
-
-  /** Prepare the settings object. Load the .opts file and adjust all paths from the
-   *  Unix-like syntax to the platform specific syntax. This is necessary so that a
-   *  single .opts file can be used on all platforms.
-   *
-   *  @note Bootclasspath is treated specially. If there is a -bootclasspath option in
-   *        the file, the 'usejavacp' setting is set to false. This ensures that the
-   *        bootclasspath takes precedence over the scala-library used to run the current
-   *        test.
-   */
-  override protected def prepareSettings(settings: Settings) {
-    import java.io.File._
-    def adjustPaths(paths: settings.PathSetting*) {
-      for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
-        case '/' => separatorChar
-        case ':' => pathSeparatorChar
-        case c   => c
-      }
-    }
-
-    // need this so that the classpath comes from what partest
-    // instead of scala.home
-    settings.usejavacp.value = !argsString.contains("-bootclasspath")
-
-    // pass any options coming from outside
-    settings.processArgumentString(argsString) match {
-      case (false, rest) =>
-        println("error processing arguments (unprocessed: %s)".format(rest))
-      case _ => ()
-    }
-
-    // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory 
-    if(settings.sourcepath.isSetByUser)
-      settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
-    
-    adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
-  }
-
-  /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
-  protected val argsString = {
-    val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension)
-    val str = try File(optsFile).slurp() catch {
-      case e: java.io.IOException => ""
-    }
-    str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ")
-  }
-
-  override protected def printClassPath(implicit reporter: Reporter) {
-    reporter.println("\toutDir: %s".format(outDir.path))
-    reporter.println("\tbaseDir: %s".format(baseDir.path))
-    reporter.println("\targsString: %s".format(argsString))
-    super.printClassPath(reporter)
-  }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
deleted file mode 100644
index 26aabbd..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-package tests
-
-import scala.reflect.internal.util._
-import reporters._
-import io.AbstractFile
-import scala.collection.mutable.ArrayBuffer
-
-class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
-
-  val reporter = new StoreReporter
-  val compiler = new Global(settings, reporter)
-
-  def askAndListen[T, U](msg: String,  arg: T, op: (T, Response[U]) => Unit) {
-    if (settings.verbose.value) print(msg+" "+arg+": ")
-    val TIMEOUT = 10 // ms
-    val limit = System.currentTimeMillis() + randomDelayMillis
-    val res = new Response[U]
-    op(arg, res)
-    while (!res.isComplete && !res.isCancelled) {
-      if (System.currentTimeMillis() > limit) {
-        print("c"); res.cancel()
-      } else res.get(TIMEOUT) match {
-        case Some(Left(t)) =>
-          /**/
-          if (settings.verbose.value) println(t)
-        case Some(Right(ex)) =>
-          ex.printStackTrace()
-          println(ex)
-        case None =>
-      }
-    }
-  }
-
-  def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload)
-  def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt)
-  def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion)
-  def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion)
-
-  val rand = new java.util.Random()
-
-  private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1)
-
-  private def randomDecreasing(n: Int) = {
-    var r = rand.nextInt((1 to n).sum)
-    var limit = n
-    var result = 0
-    while (r > limit) {
-      result += 1
-      r -= limit
-      limit -= 1
-    }
-    result
-  }
-
-  def randomSourceFileIdx() = rand.nextInt(inputs.length)
-
-  def randomBatchesPerSourceFile(): Int = randomDecreasing(100)
-
-  def randomChangesPerBatch(): Int = randomInverse(50)
-
-  def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length)
-
-  def randomNumChars() = randomInverse(100)
-
-  def randomDelayMillis = randomInverse(10000)
-
-  class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) {
-
-    private var pos = start
-    private var deleted: List[Char] = List()
-
-    override def toString =
-      "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+
-      (if (toLeft) "left" else "right")
-
-    def deleteOne() {
-      val sf = inputs(sfidx)
-      deleted = sf.content(pos) :: deleted
-      val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1))
-      inputs(sfidx) = sf1
-      askReload(sf1)
-    }
-
-    def deleteAll() {
-      print("/"+nchars)
-      for (i <- 0 until nchars) {
-        if (toLeft) {
-          if (pos > 0 && pos <= inputs(sfidx).length) {
-            pos -= 1
-            deleteOne()
-          }
-        } else {
-          if (pos  < inputs(sfidx).length) {
-            deleteOne()
-          }
-        }
-      }
-    }
-
-    def insertAll() {
-      for (chr <- if (toLeft) deleted else deleted.reverse) {
-        val sf = inputs(sfidx)
-        val (pre, post) = sf./**/content splitAt pos
-        pos += 1
-        val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post))
-        inputs(sfidx) = sf1
-        askReload(sf1)
-      }
-    }
-  }
-
-  val testComment = "/**/"
-
-  def testFileChanges(sfidx: Int) = {
-    lazy val testPositions: Seq[Int] = {
-      val sf = inputs(sfidx)
-      val buf = new ArrayBuffer[Int]
-      var pos = sf.content.indexOfSlice(testComment)
-      while (pos > 0) {
-        buf += pos
-        pos = sf.content.indexOfSlice(testComment, pos + 1)
-      }
-      buf
-    }
-    def otherTest() {
-      if (testPositions.nonEmpty) {
-        val pos = new OffsetPosition(inputs(sfidx), rand.nextInt(testPositions.length))
-        rand.nextInt(3) match {
-          case 0 => askTypeAt(pos)
-          case 1 => askTypeCompletion(pos)
-          case 2 => askScopeCompletion(pos)
-        }
-      }
-    }
-    for (i <- 0 until randomBatchesPerSourceFile()) {
-      val changes = Vector.fill(/**/randomChangesPerBatch()) {
-        /**/
-        new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean())
-      }
-      doTest(sfidx, changes, testPositions, otherTest) match {
-        case Some(errortrace) =>
-          println(errortrace)
-          minimize(errortrace)
-        case None =>
-      }
-    }
-  }
-
-  def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = {
-    print("new round with "+changes.length+" changes:")
-    changes foreach (_.deleteAll())
-    otherTest()
-    def errorCount() = compiler.ask(() => reporter.ERROR.count)
-//    println("\nhalf test round: "+errorCount())
-    changes.view.reverse foreach (_.insertAll())
-    otherTest()
-    println("done test round: "+errorCount())
-    if (errorCount() != 0)
-      Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content))
-    else
-      None
-  }
-
-  case class ErrorTrace(
-    sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
-    override def toString =
-      "Sourcefile: "+inputs(sfidx)+
-      "\nChanges:\n  "+changes.mkString("\n  ")+
-      "\nErrors:\n  "+infos.mkString("\n  ")+
-      "\nContents:\n"+content.mkString
-  }
-
-  def minimize(etrace: ErrorTrace) {}
-
-  /**/
-  def run() {
-    askReload(inputs: _*)
-    for (i <- 0 until ntests)
-      testFileChanges(randomSourceFileIdx())
-  }
-}
-
-/* A program to do presentation compiler stress tests.
- * Usage:
- *
- *  scala scala.tools.nsc.interactive.test.Tester <n> <files>
- *
- * where <n> is the number os tests to be run and <files> is the set of files to test.
- * This will do random deletions and re-insertions in any of the files.
- * At places where an empty comment /**/ appears it will in addition randomly
- * do ask-types, type-completions, or scope-completions.
- */
-object Tester {
-  def main(args: Array[String]) {
-    val settings = new Settings()
-    val (_, filenames) = settings.processArguments(args.toList.tail, true)
-    println("filenames = "+filenames)
-    val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
-    new Tester(args(0).toInt, files, settings).run()
-    sys.exit(0)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
deleted file mode 100644
index 214f7a4..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import scala.reflect.internal.util.Position
-import scala.tools.nsc.interactive.tests.core._
-
-/** Set of core test definitions that are executed for each test run. */
-private[tests] trait CoreTestDefs
-	extends PresentationCompilerRequestsWorkingMode {
-
-  import scala.tools.nsc.interactive.Global
-
-  /** Ask the presentation compiler for completion at all locations
-   * (in all sources) where the defined `marker` is found. */
-  class TypeCompletionAction(override val compiler: Global)
-    extends PresentationCompilerTestDef
-    with AskTypeCompletionAt {
-
-    def memberPrinter(member: compiler.Member): String =
-        "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
-
-    override def runTest() {
-      askAllSources(TypeCompletionMarker) { pos =>
-        askTypeCompletionAt(pos)
-      } { (pos, members) =>
-        withResponseDelimiter {
-          reporter.println("[response] askTypeCompletion at " + format(pos))
-          // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
-          // universal check file that we can provide for this to work
-          reporter.println("retrieved %d members".format(members.size))
-          compiler ask { () =>
-            val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor)
-            reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
-          }
-        }
-      }
-    }
-  }
-
-  /** Ask the presentation compiler for completion at all locations
-   * (in all sources) where the defined `marker` is found. */
-  class ScopeCompletionAction(override val compiler: Global)
-    extends PresentationCompilerTestDef
-    with AskScopeCompletionAt {
-
-    def memberPrinter(member: compiler.Member): String =
-        "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
-
-    override def runTest() {
-      askAllSources(ScopeCompletionMarker) { pos =>
-        askScopeCompletionAt(pos)
-      } { (pos, members) =>
-        withResponseDelimiter {
-          reporter.println("[response] askScopeCompletion at " + format(pos))
-          try {
-            // exclude members not from source (don't have position), for more focussed and self contained tests.
-            def eligible(sym: compiler.Symbol) = sym.pos != compiler.NoPosition
-            val filtered = members.filter(member => eligible(member.sym))
-            reporter.println("retrieved %d members".format(filtered.size))
-            compiler ask { () =>
-              reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
-            }
-          } catch {
-            case t: Throwable =>
-              t.printStackTrace()
-          }
-
-        }
-      }
-    }
-  }
-
-  /** Ask the presentation compiler for type info at all locations
-   * (in all sources) where the defined `marker` is found. */
-  class TypeAction(override val compiler: Global)
-    extends PresentationCompilerTestDef
-    with AskTypeAt {
-
-    override def runTest() {
-      askAllSources(TypeMarker) { pos =>
-        askTypeAt(pos)
-      } { (pos, tree) =>
-        withResponseDelimiter {
-          reporter.println("[response] askTypeAt at " + format(pos))
-          compiler.ask(() => reporter.println(tree))
-        }
-      }
-    }
-  }
-
-  /** Ask the presentation compiler for hyperlink at all locations
-   * (in all sources) where the defined `marker` is found. */
-  class HyperlinkAction(override val compiler: Global)
-    extends PresentationCompilerTestDef
-    with AskTypeAt
-    with AskTypeCompletionAt {
-
-    override def runTest() {
-      askAllSources(HyperlinkMarker) { pos =>
-        askTypeAt(pos)(NullReporter)
-      } { (pos, tree) =>
-        if(tree.symbol == compiler.NoSymbol) {
-          reporter.println("\nNo symbol is associated with tree: "+tree)
-        }
-        else {
-          reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
-          val r = new Response[Position]
-          // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
-          // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
-          val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
-          val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
-          val sourceFile = sourceFiles.find(_.path == treePath) match {
-            case Some(source) =>
-              compiler.askLinkPos(tree.symbol, source, r)
-              r.get match {
-                case Left(pos) =>
-                  val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
-                  withResponseDelimiter {
-                    reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
-                  }
-                case Right(ex) =>
-                  ex.printStackTrace()
-              }
-            case None =>
-              reporter.println("[error] could not locate sourcefile `" + treeName + "`." +
-                "Hint: Does the looked up definition come form a binary?")
-          }
-        }
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
deleted file mode 100644
index f304eda..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import reporters.{Reporter => CompilerReporter}
-import scala.reflect.internal.util.Position
-
-/** Trait encapsulating the creation of a presentation compiler's instance.*/
-private[tests] trait PresentationCompilerInstance extends TestSettings {
-  protected val settings = new Settings
-  protected val withDocComments = false
-
-  protected val compilerReporter: CompilerReporter = new InteractiveReporter {
-    override def compiler = PresentationCompilerInstance.this.compiler
-  }
-
-  protected lazy val compiler: Global = {
-    prepareSettings(settings)
-    new Global(settings, compilerReporter) {
-      override def forScaladoc = withDocComments
-    }
-  }
-
-  /**
-   * Called before instantiating the presentation compiler's instance.
-   * You should provide an implementation of this method if you need
-   * to customize the `settings` used to instantiate the presentation compiler.
-   * */
-  protected def prepareSettings(settings: Settings) {}
-
-  protected def printClassPath(implicit reporter: Reporter) {
-    reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value))
-    reporter.println("\tverbose: %b".format(settings.verbose.value))
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
deleted file mode 100644
index 9cf2aa4..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.tools.nsc.interactive.Global
-import scala.reflect.internal.util.Position
-
-trait PresentationCompilerTestDef {
-
-  private[tests] def runTest(): Unit
-
-  protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
-    def printDelimiter() = reporter.println("=" * 80)
-    printDelimiter()
-    block
-    printDelimiter()
-  }
-
-  protected def format(pos: Position): String =
-    (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
deleted file mode 100644
index e80b741..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
-import scala.tools.nsc.io.{AbstractFile,Path}
-
-private[tests] object SourcesCollector {
-  import Path._
-  type SourceFilter =  Path => Boolean
-
-  /**
-   * All files below `base` directory that pass the `filter`.
-   * With the default `filter` only .scala and .java files are collected.
-   * */
-  def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
-    assert(base.isDirectory)
-    base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
-  }
-
-  private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
-  private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename))
-  private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
deleted file mode 100644
index 8698ada..0000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-case class DuplicateTestMarker(msg: String) extends Exception(msg)
-
-object TestMarker {
-  import scala.collection.mutable.Map
-  private val markers: Map[String, TestMarker] = Map.empty
-
-  private def checkForDuplicate(marker: TestMarker) {
-    markers.get(marker.marker) match {
-      case None => markers(marker.marker) = marker
-      case Some(otherMarker) =>
-        val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker)
-        throw new DuplicateTestMarker(msg)
-    }
-  }
-}
-
-abstract case class TestMarker(val marker: String) {
-  TestMarker.checkForDuplicate(this)
-}
-
-object TypeCompletionMarker extends TestMarker("/*!*/")
-
-object ScopeCompletionMarker extends TestMarker("/*_*/")
-
-object TypeMarker extends TestMarker("/*?*/")
-
-object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
deleted file mode 100644
index 59508fa..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.nsc.io.{ File, AbstractFile }
-import util.ScalaClassLoader
-import java.net.{ URL, URLConnection, URLStreamHandler }
-import scala.collection.{ mutable, immutable }
-
-/**
- * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
- *
- * @author Lex Spoon
- */
-class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
-    extends ClassLoader(parent)
-    with ScalaClassLoader
-{
-  protected def classNameToPath(name: String): String =
-    if (name endsWith ".class") name
-    else name.replace('.', '/') + ".class"
-
-  protected def findAbstractFile(name: String): AbstractFile = {
-    var file: AbstractFile = root
-    val pathParts          = classNameToPath(name) split '/'
-
-    for (dirPart <- pathParts.init) {
-      file = file.lookupName(dirPart, true)
-      if (file == null)
-        return null
-    }
-
-    file.lookupName(pathParts.last, false) match {
-      case null   => null
-      case file   => file
-    }
-  }
-
-  protected def dirNameToPath(name: String): String =
-    name.replace('.', '/')
-
-  protected def findAbstractDir(name: String): AbstractFile = {
-    var file: AbstractFile = root
-    val pathParts          = dirNameToPath(name) split '/'
-
-    for (dirPart <- pathParts) {
-      file = file.lookupName(dirPart, true)
-      if (file == null)
-        return null
-    }
-
-    return file
-  }
-
-  // parent delegation in JCL uses getResource; so either add parent.getResAsStream
-  // or implement findResource, which we do here as a study in scarlet (my complexion
-  // after looking at CLs and URLs)
-  override def findResource(name: String): URL = findAbstractFile(name) match {
-    case null => null
-    case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
-      override def openConnection(url: URL): URLConnection = new URLConnection(url) {
-        override def connect() { }
-        override def getInputStream = file.input
-      }
-    })
-  }
-  // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
-  override def getResourceAsStream(name: String) = findAbstractFile(name) match {
-    case null => super.getResourceAsStream(name)
-    case file => file.input
-  }
-  // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating 
-  override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match {
-    case null => super.classBytes(name)
-    case file => file.toByteArray
-  }
-  override def findClass(name: String): JClass = {
-    val bytes = classBytes(name)
-    if (bytes.length == 0)
-      throw new ClassNotFoundException(name)
-    else
-      defineClass(name, bytes, 0, bytes.length)
-  }
-
-  private val packages = mutable.Map[String, Package]()
-
-  override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
-    throw new UnsupportedOperationException()
-  }
-
-  override def getPackage(name: String): Package = {
-    findAbstractDir(name) match {
-      case null => super.getPackage(name)
-      case file => packages.getOrElseUpdate(name, {
-        val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
-        ctor.setAccessible(true)
-        ctor.newInstance(name, null, null, null, null, null, null, null, this)
-      })
-    }
-  }
-
-  override def getPackages(): Array[Package] =
-    root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
deleted file mode 100644
index 40e9d3d..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.lang.reflect
-import java.util.concurrent.ConcurrentHashMap
-import util.ScalaClassLoader
-import ScalaClassLoader.appLoader
-import scala.reflect.NameTransformer._
-
-object ByteCode {
-  /** Until I figure out why I can't get scalap onto the classpath such
-   *  that the compiler will bootstrap, we have to use reflection.
-   */
-  private lazy val DECODER: Option[AnyRef] =
-    for (clazz <- appLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
-      clazz.getField(MODULE_INSTANCE_NAME).get(null)
-
-  private def decoderMethod(name: String, args: JClass*): Option[reflect.Method] = {
-    for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m
-  }
-
-  private lazy val aliasMap = {
-    for (module <- DECODER ; method <- decoderMethod("typeAliases", classOf[String])) yield
-      method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
-  }
-
-  /** Scala sig bytes.
-   */
-  def scalaSigBytesForPath(path: String) =
-    for {
-      module <- DECODER
-      method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
-      names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
-    }
-    yield names
-
-  /** Attempts to retrieve case parameter names for given class name.
-   */
-  def caseParamNamesForPath(path: String) =
-    for {
-      module <- DECODER
-      method <- decoderMethod("caseParamNames", classOf[String])
-      names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
-    }
-    yield names
-
-  def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
-
-  /** Attempts to find type aliases in package objects.
-   */
-  def aliasForType(path: String): Option[String] = {
-    val (pkg, name) = (path lastIndexOf '.') match {
-      case -1   => return None
-      case idx  => (path take idx, path drop (idx + 1))
-    }
-    aliasesForPackage(pkg) flatMap (_ get name)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
deleted file mode 100644
index 1741a82..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import CodeHandlers.NoSuccess
-import scala.util.control.ControlThrowable
-
-/**
- *  The start of a simpler interface for utilizing the compiler with piecemeal
- *  code strings.  The "T" here could potentially be a Tree, a Type, a Symbol,
- *  a Boolean, or something even more exotic.
- */
-trait CodeHandlers[T] {
-  self =>
-
-  // Expressions are composed of operators and operands.
-  def expr(code: String): T
-
-  // Statements occur as parts of blocks and templates.
-  // A statement can be an import, a definition or an expression, or it can be empty.
-  // Statements used in the template of a class definition can also be declarations.
-  def stmt(code: String): T
-  def stmts(code: String): Seq[T]
-
-  object opt extends CodeHandlers[Option[T]] {
-    val handler: PartialFunction[Throwable, Option[T]] = {
-      case _: NoSuccess => None
-    }
-    val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = {
-      case _: NoSuccess => Nil
-    }
-
-    def expr(code: String)   = try Some(self.expr(code)) catch handler
-    def stmt(code: String)   = try Some(self.stmt(code)) catch handler
-    def stmts(code: String)  = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
-  }
-}
-
-object CodeHandlers {
-  def incomplete() = throw CodeIncomplete
-  def fail(msg: String) = throw new CodeException(msg)
-
-  trait NoSuccess extends ControlThrowable
-  class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { }
-  object CodeIncomplete extends CodeException("CodeIncomplete")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
deleted file mode 100644
index 8042f0a..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Lex Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** A command line for the interpreter.
- */
-class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
-  override def cmdName = "scala"
-  override lazy val fileEndings = List(".scalaint")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
deleted file mode 100644
index 1dfccbf..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Completion._
-
-/** An implementation-agnostic completion interface which makes no
- *  reference to the jline classes.
- */
-trait Completion {
-  type ExecResult
-  def resetVerbosity(): Unit
-  def completer(): ScalaCompleter
-}
-object NoCompletion extends Completion {
-  type ExecResult = Nothing
-  def resetVerbosity() = ()
-  def completer() = NullCompleter
-}
-
-object Completion {
-  def empty: Completion = NoCompletion
-
-  case class Candidates(cursor: Int, candidates: List[String]) { }
-  val NoCandidates = Candidates(-1, Nil)
-
-  object NullCompleter extends ScalaCompleter {
-    def complete(buffer: String, cursor: Int): Candidates = NoCandidates
-  }
-  trait ScalaCompleter {
-    def complete(buffer: String, cursor: Int): Candidates
-  }
-
-  def looksLikeInvocation(code: String) = (
-        (code != null)
-    &&  (code startsWith ".")
-    && !(code == ".")
-    && !(code startsWith "./")
-    && !(code startsWith "..")
-  )
-  object Forwarder {
-    def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
-      def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
-      override def follow(s: String) = forwardTo() flatMap (_ follow s)
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
deleted file mode 100644
index ab96f41..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.NameTransformer
-
-/** An interface for objects which are aware of tab completion and
- *  will supply their own candidates and resolve their own paths.
- */
-trait CompletionAware {
-  /** The complete list of unqualified Strings to which this
-   *  object will complete.
-   */
-  def completions(verbosity: Int): List[String]
-
-  /** The next completor in the chain.
-   */
-  def follow(id: String): Option[CompletionAware] = None
-
-  /** A list of useful information regarding a specific uniquely
-   *  identified completion.  This is specifically written for the
-   *  following situation, but should be useful elsewhere too:
-   *
-   *    x.y.z.methodName<tab>
-   *
-   *  If "methodName" is among z's completions, and verbosity > 0
-   *  indicating tab has been pressed twice consecutively, then we
-   *  call alternativesFor and show a list of overloaded method
-   *  signatures.
-   */
-  def alternativesFor(id: String): List[String] = Nil
-
-  /** Given string 'buf', return a list of all the strings
-   *  to which it can complete.  This may involve delegating
-   *  to other CompletionAware objects.
-   */
-  def completionsFor(parsed: Parsed): List[String] = {
-    import parsed.{ buffer, verbosity }
-    val comps = completions(verbosity) filter (_ startsWith buffer)
-    val exact = comps contains buffer
-
-    val results =
-      if (parsed.isEmpty) comps
-      else if (parsed.isUnqualified && !parsed.isLastDelimiter)
-        if (verbosity > 0 && exact) alternativesFor(buffer)
-        else comps
-      else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
-
-    results.sorted
-  }
-}
-
-object CompletionAware {
-  val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
-
-  def unapply(that: Any): Option[CompletionAware] = that match {
-    case x: CompletionAware => Some((x))
-    case _                  => None
-  }
-
-  /** Create a CompletionAware object from the given functions.
-   *  The first should generate the list of completions whenever queried,
-   *  and the second should return Some(CompletionAware) object if
-   *  subcompletions are possible.
-   */
-  def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
-    new CompletionAware {
-      def completions = terms()
-      def completions(verbosity: Int) = completions
-      override def follow(id: String) = followFunction(id)
-    }
-
-  /** Convenience factories.
-   */
-  def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
-  def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
-    apply(() => map.keys.toList, map.get _)
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
deleted file mode 100644
index d14b5c7..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** This has a lot of duplication with other methods in Symbols and Types,
- *  but repl completion utility is very sensitive to precise output.  Best
- *  thing would be to abstract an interface for how such things are printed,
- *  as is also in progress with error messages.
- */
-trait CompletionOutput {
-  val global: Global
-
-  import global._
-  import definitions.{ isTupleType, isFunctionType, isRepeatedParamType }
-
-  /** Reducing fully qualified noise for some common packages.
-   */
-  val typeTransforms = List(
-    "java.lang." -> "",
-    "scala.collection.immutable." -> "immutable.",
-    "scala.collection.mutable." -> "mutable.",
-    "scala.collection.generic." -> "generic."
-  )
-
-  def quietString(tp: String): String =
-    typeTransforms.foldLeft(tp) {
-      case (str, (prefix, replacement)) =>
-        if (str startsWith prefix) replacement + (str stripPrefix prefix)
-        else str
-    }
-
-  class MethodSymbolOutput(method: Symbol) {
-    val pkg       = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
-
-    def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
-    def relativize(tp: Type): String    = relativize(tp.dealiasWiden.toString)
-    def relativize(sym: Symbol): String = relativize(sym.info)    
-
-    def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
-    def parenList(params: List[Any])  = params.mkString("(", ", ", ")")
-
-    def methodTypeToString(mt: MethodType) =
-      (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
-
-    def typeToString(tp: Type): String = relativize(
-      tp match {
-        case x if isFunctionType(x)      => functionString(x)
-        case x if isTupleType(x)         => tupleString(x)
-        case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
-        case mt @ MethodType(_, _)       => methodTypeToString(mt)
-        case x                           => x.toString
-      }
-    )
-
-    def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
-    def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
-      case List(t, r) => t + " => " + r
-      case xs         => parenList(xs.init) + " => " + xs.last
-    }
-
-    def tparamsString(tparams: List[Symbol])  = braceList(tparams map (_.defString))
-    def paramsString(params: List[Symbol])    = {
-      def paramNameString(sym: Symbol)  = if (sym.isSynthetic) "" else sym.nameString + ": "
-      def paramString(sym: Symbol)      = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
-
-      val isImplicit = params.nonEmpty && params.head.isImplicit
-      val strs = (params map paramString) match {
-        case x :: xs if isImplicit  => ("implicit " + x) :: xs
-        case xs                     => xs
-      }
-      parenList(strs)
-    }
-
-    def methodString() =
-      method.keyString + " " + method.nameString + (method.info.normalize match {
-        case NullaryMethodType(resType)         => ": " + typeToString(resType)
-        case PolyType(tparams, resType)         => tparamsString(tparams) + typeToString(resType)
-        case mt @ MethodType(_, _)              => methodTypeToString(mt)
-        case x                                  => x.toString
-      })
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
deleted file mode 100644
index 07e36f4..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.{ ConsoleReader, CursorBuffer }
-import scala.tools.jline.console.completer.CompletionHandler
-import Completion._
-
-trait ConsoleReaderHelper extends ConsoleReader {
-  def currentLine = "" + getCursorBuffer.buffer
-  def currentPos  = getCursorBuffer.cursor
-  def terminal    = getTerminal()
-  def width       = terminal.getWidth()
-  def height      = terminal.getHeight()
-  def paginate    = isPaginationEnabled()
-  def paginate_=(value: Boolean) = setPaginationEnabled(value)
-
-  def goBack(num: Int): Unit
-  def readOneKey(prompt: String): Int
-  def eraseLine(): Unit
-
-  private val marginSize = 3
-  private def morePrompt = "--More--"
-  private def emulateMore(): Int = {
-    val key = readOneKey(morePrompt)
-    try key match {
-      case '\r' | '\n'  => 1
-      case 'q'          => -1
-      case _            => height - 1
-    }
-    finally {
-      eraseLine()
-      // TODO: still not quite managing to erase --More-- and get
-      // back to a scala prompt without another keypress.
-      if (key == 'q') {
-        putString(getPrompt())
-        redrawLine()
-        flush()
-      }
-    }
-  }
-
-  override def printColumns(items: JCollection[_ <: CharSequence]): Unit =
-    printColumns(items: List[String])
-
-  def printColumns(items: List[String]): Unit = {
-    if (items forall (_ == ""))
-      return
-
-    val longest    = items map (_.length) max
-    var linesLeft  = if (isPaginationEnabled()) height - 1 else Int.MaxValue
-    val columnSize = longest + marginSize
-    val padded     = items map ("%-" + columnSize + "s" format _)
-    val groupSize  = 1 max (width / columnSize)   // make sure it doesn't divide to 0
-
-    padded grouped groupSize foreach { xs =>
-      println(xs.mkString)
-      linesLeft -= 1
-      if (linesLeft <= 0) {
-        linesLeft = emulateMore()
-        if (linesLeft < 0)
-          return
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
deleted file mode 100644
index 80debfa..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
-
-class JLineDelimiter extends ArgumentDelimiter {
-  def toJLine(args: List[String], cursor: Int) = args match {
-    case Nil    => new ArgumentList(new Array[String](0), 0, 0, cursor)
-    case xs     => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor)
-  }
-
-  def delimit(buffer: CharSequence, cursor: Int) = {
-    val p = Parsed(buffer.toString, cursor)
-    toJLine(p.args, cursor)
-  }
-  def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter
-}
-
-trait Delimited {
-  self: Parsed =>
-
-  def delimited: Char => Boolean
-  def escapeChars: List[Char] = List('\\')
-  def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
-
-  /** Break String into args based on delimiting function.
-   */
-  protected def toArgs(s: String): List[String] =
-    if (s == "") Nil
-    else (s indexWhere isDelimiterChar) match {
-      case -1   => List(s)
-      case idx  => (s take idx) :: toArgs(s drop (idx + 1))
-    }
-
-  def isDelimiterChar(ch: Char) = delimited(ch)
-  def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
-  def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
-  def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
deleted file mode 100644
index 827ebe1..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.BatchSourceFile
-import scala.tools.nsc.ast.parser.Tokens.EOF
-
-trait ExprTyper {
-  val repl: IMain
-
-  import repl._
-  import global.{ reporter => _, Import => _, _ }
-  import definitions._
-  import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
-  import naming.freshInternalVarName
-
-  object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
-    def applyRule[T](code: String, rule: UnitParser => T): T = {
-      reporter.reset()
-      val scanner = newUnitParser(code)
-      val result  = rule(scanner)
-
-      if (!reporter.hasErrors)
-        scanner.accept(EOF)
-
-      result
-    }
-
-    def defns(code: String) = stmts(code) collect { case x: DefTree => x }
-    def expr(code: String)  = applyRule(code, _.expr())
-    def stmts(code: String) = applyRule(code, _.templateStats())
-    def stmt(code: String)  = stmts(code).last  // guaranteed nonempty
-  }
-
-  /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
-  def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""")  {
-    var isIncomplete = false
-    reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
-      val trees = codeParser.stmts(line)
-      if (reporter.hasErrors) Some(Nil)
-      else if (isIncomplete) None
-      else Some(trees)
-    }
-  }
-  // def parsesAsExpr(line: String) = {
-  //   import codeParser._
-  //   (opt expr line).isDefined
-  // }
-
-  def symbolOfLine(code: String): Symbol = {
-    def asExpr(): Symbol = {
-      val name  = freshInternalVarName()
-      // Typing it with a lazy val would give us the right type, but runs
-      // into compiler bugs with things like existentials, so we compile it
-      // behind a def and strip the NullaryMethodType which wraps the expr.
-      val line = "def " + name + " = " + code
-
-      interpretSynthetic(line) match {
-        case IR.Success =>
-          val sym0 = symbolOfTerm(name)
-          // drop NullaryMethodType
-          sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
-        case _          => NoSymbol
-      }
-    }
-    def asDefn(): Symbol = {
-      val old = repl.definedSymbolList.toSet
-
-      interpretSynthetic(code) match {
-        case IR.Success =>
-          repl.definedSymbolList filterNot old match {
-            case Nil        => NoSymbol
-            case sym :: Nil => sym
-            case syms       => NoSymbol.newOverloaded(NoPrefix, syms)
-          }
-        case _ => NoSymbol
-      }
-    }
-    def asError(): Symbol = {
-      interpretSynthetic(code)
-      NoSymbol
-    }
-    beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
-  }
-
-  private var typeOfExpressionDepth = 0
-  def typeOfExpression(expr: String, silent: Boolean = true): Type = {
-    if (typeOfExpressionDepth > 2) {
-      repldbg("Terminating typeOfExpression recursion for expression: " + expr)
-      return NoType
-    }
-    typeOfExpressionDepth += 1
-    // Don't presently have a good way to suppress undesirable success output
-    // while letting errors through, so it is first trying it silently: if there
-    // is an error, and errors are desired, then it re-evaluates non-silently
-    // to induce the error message.
-    try beSilentDuring(symbolOfLine(expr).tpe) match {
-      case NoType if !silent => symbolOfLine(expr).tpe // generate error
-      case tpe               => tpe
-    }
-    finally typeOfExpressionDepth -= 1
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
deleted file mode 100644
index ee45dc5..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ /dev/null
@@ -1,966 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Predef.{ println => _, _ }
-import java.io.{ BufferedReader, FileReader }
-import java.util.concurrent.locks.ReentrantLock
-import scala.sys.process.Process
-import session._
-import scala.util.Properties.{ envOrNone, javaHome, jdkHome, javaVersion }
-import scala.tools.util.{ Javap }
-import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.ops
-import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import interpreter._
-import io.{ File, Directory, Path }
-import scala.reflect.NameTransformer._
-import util.ScalaClassLoader
-import ScalaClassLoader._
-import scala.tools.util._
-import scala.language.{implicitConversions, existentials}
-import scala.reflect.{ClassTag, classTag}
-import scala.tools.reflect.StdRuntimeTags._
-
-/** The Scala interactive shell.  It provides a read-eval-print loop
- *  around the Interpreter class.
- *  After instantiation, clients should call the main() method.
- *
- *  If no in0 is specified, then input will come from the console, and
- *  the class will attempt to provide input editing feature such as
- *  input history.
- *
- *  @author Moez A. Abdel-Gawad
- *  @author  Lex Spoon
- *  @version 1.2
- */
-class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
-                extends AnyRef
-                   with LoopCommands
-                   with ILoopInit
-{
-  def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
-  def this() = this(None, new JPrintWriter(Console.out, true))
-
-  var in: InteractiveReader = _   // the input stream from which commands come
-  var settings: Settings = _
-  var intp: IMain = _
-
-  @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
-  @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
-
-  /** Having inherited the difficult "var-ness" of the repl instance,
-   *  I'm trying to work around it by moving operations into a class from
-   *  which it will appear a stable prefix.
-   */
-  private def onIntp[T](f: IMain => T): T = f(intp)
-
-  class IMainOps[T <: IMain](val intp: T) {
-    import intp._
-    import global._
-
-    def printAfterTyper(msg: => String) =
-      intp.reporter printUntruncatedMessage afterTyper(msg)
-
-    /** Strip NullaryMethodType artifacts. */
-    private def replInfo(sym: Symbol) = {
-      sym.info match {
-        case NullaryMethodType(restpe) if sym.isAccessor  => restpe
-        case info                                         => info
-      }
-    }
-    def echoTypeStructure(sym: Symbol) =
-      printAfterTyper("" + deconstruct.show(replInfo(sym)))
-
-    def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
-      if (verbose) ILoop.this.echo("// Type signature")
-      printAfterTyper("" + replInfo(sym))
-
-      if (verbose) {
-        ILoop.this.echo("\n// Internal Type structure")
-        echoTypeStructure(sym)
-      }
-    }
-  }
-  implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
-
-  /** TODO -
-   *  -n normalize
-   *  -l label with case class parameter names
-   *  -c complete - leave nothing out
-   */
-  private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
-    onIntp { intp =>
-      val sym = intp.symbolOfLine(expr)
-      if (sym.exists) intp.echoTypeSignature(sym, verbose)
-      else ""
-    }
-  }
-
-  override def echoCommandMessage(msg: String) {
-    intp.reporter printUntruncatedMessage msg
-  }
-
-  def isAsync = !settings.Yreplsync.value
-  lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
-  def history = in.history
-
-  /** The context class loader at the time this object was created */
-  protected val originalClassLoader = Thread.currentThread.getContextClassLoader
-
-  // classpath entries added via :cp
-  var addedClasspath: String = ""
-
-  /** A reverse list of commands to replay if the user requests a :replay */
-  var replayCommandStack: List[String] = Nil
-
-  /** A list of commands to replay if the user requests a :replay */
-  def replayCommands = replayCommandStack.reverse
-
-  /** Record a command for replay should the user request a :replay */
-  def addReplay(cmd: String) = replayCommandStack ::= cmd
-
-  def savingReplayStack[T](body: => T): T = {
-    val saved = replayCommandStack
-    try body
-    finally replayCommandStack = saved
-  }
-  def savingReader[T](body: => T): T = {
-    val saved = in
-    try body
-    finally in = saved
-  }
-
-  /** Close the interpreter and set the var to null. */
-  def closeInterpreter() {
-    if (intp ne null) {
-      intp.close()
-      intp = null
-    }
-  }
-
-  class ILoopInterpreter extends IMain(settings, out) {
-    outer =>
-
-    override lazy val formatting = new Formatting {
-      def prompt = ILoop.this.prompt
-    }
-    override protected def parentClassLoader =
-      settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
-  }
-
-  /** Create a new interpreter. */
-  def createInterpreter() {
-    if (addedClasspath != "")
-      settings.classpath append addedClasspath
-
-    intp = new ILoopInterpreter
-  }
-
-  /** print a friendly help message */
-  def helpCommand(line: String): Result = {
-    if (line == "") helpSummary()
-    else uniqueCommand(line) match {
-      case Some(lc) => echo("\n" + lc.longHelp)
-      case _        => ambiguousError(line)
-    }
-  }
-  private def helpSummary() = {
-    val usageWidth  = commands map (_.usageMsg.length) max
-    val formatStr   = "%-" + usageWidth + "s %s %s"
-
-    echo("All commands can be abbreviated, e.g. :he instead of :help.")
-    echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
-
-    commands foreach { cmd =>
-      val star = if (cmd.hasLongHelp) "*" else " "
-      echo(formatStr.format(cmd.usageMsg, star, cmd.help))
-    }
-  }
-  private def ambiguousError(cmd: String): Result = {
-    matchingCommands(cmd) match {
-      case Nil  => echo(cmd + ": no such command.  Type :help for help.")
-      case xs   => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
-    }
-    Result(true, None)
-  }
-  private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
-  private def uniqueCommand(cmd: String): Option[LoopCommand] = {
-    // this lets us add commands willy-nilly and only requires enough command to disambiguate
-    matchingCommands(cmd) match {
-      case List(x)  => Some(x)
-      // exact match OK even if otherwise appears ambiguous
-      case xs       => xs find (_.name == cmd)
-    }
-  }
-
-  /** Show the history */
-  lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
-    override def usage = "[num]"
-    def defaultLines = 20
-
-    def apply(line: String): Result = {
-      if (history eq NoHistory)
-        return "No history available."
-
-      val xs      = words(line)
-      val current = history.index
-      val count   = try xs.head.toInt catch { case _: Exception => defaultLines }
-      val lines   = history.asStrings takeRight count
-      val offset  = current - lines.size + 1
-
-      for ((line, index) <- lines.zipWithIndex)
-        echo("%3d  %s".format(index + offset, line))
-    }
-  }
-
-  // When you know you are most likely breaking into the middle
-  // of a line being typed.  This softens the blow.
-  protected def echoAndRefresh(msg: String) = {
-    echo("\n" + msg)
-    in.redrawLine()
-  }
-  protected def echo(msg: String) = {
-    out println msg
-    out.flush()
-  }
-  protected def echoNoNL(msg: String) = {
-    out print msg
-    out.flush()
-  }
-
-  /** Search the history */
-  def searchHistory(_cmdline: String) {
-    val cmdline = _cmdline.toLowerCase
-    val offset  = history.index - history.size + 1
-
-    for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
-      echo("%d %s".format(index + offset, line))
-  }
-
-  private var currentPrompt = Properties.shellPromptString
-  def setPrompt(prompt: String) = currentPrompt = prompt
-  /** Prompt to print when awaiting input */
-  def prompt = currentPrompt
-
-  import LoopCommand.{ cmd, nullary }
-
-  /** Standard commands **/
-  lazy val standardCommands = List(
-    cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
-    cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
-    historyCommand,
-    cmd("h?", "<string>", "search the history", searchHistory),
-    cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
-    cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
-    cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
-    cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
-    nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
-    nullary("power", "enable power user mode", powerCmd),
-    nullary("quit", "exit the interpreter", () => Result(false, None)),
-    nullary("replay", "reset execution and replay all previous commands", replay),
-    nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
-    shCommand,
-    nullary("silent", "disable/enable automatic printing of results", verbosity),
-    cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
-    nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
-  )
-
-  /** Power user commands */
-  lazy val powerCommands: List[LoopCommand] = List(
-    cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
-  )
-
-  private def dumpCommand(): Result = {
-    echo("" + power)
-    history.asStrings takeRight 30 foreach echo
-    in.redrawLine()
-  }
-  private def valsCommand(): Result = power.valsDescription
-
-  private val typeTransforms = List(
-    "scala.collection.immutable." -> "immutable.",
-    "scala.collection.mutable."   -> "mutable.",
-    "scala.collection.generic."   -> "generic.",
-    "java.lang."                  -> "jl.",
-    "scala.runtime."              -> "runtime."
-  )
-
-  private def importsCommand(line: String): Result = {
-    val tokens    = words(line)
-    val handlers  = intp.languageWildcardHandlers ++ intp.importHandlers
-    val isVerbose = tokens contains "-v"
-
-    handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
-      case (handler, idx) =>
-        val (types, terms) = handler.importedSymbols partition (_.name.isTypeName)
-        val imps           = handler.implicitSymbols
-        val found          = tokens filter (handler importsSymbolNamed _)
-        val typeMsg        = if (types.isEmpty) "" else types.size + " types"
-        val termMsg        = if (terms.isEmpty) "" else terms.size + " terms"
-        val implicitMsg    = if (imps.isEmpty) "" else imps.size + " are implicit"
-        val foundMsg       = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
-        val statsMsg       = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
-
-        intp.reporter.printMessage("%2d) %-30s %s%s".format(
-          idx + 1,
-          handler.importString,
-          statsMsg,
-          foundMsg
-        ))
-    }
-  }
-
-  private def implicitsCommand(line: String): Result = onIntp { intp =>
-    import intp._
-    import global._
-
-    def p(x: Any) = intp.reporter.printMessage("" + x)
-
-    // If an argument is given, only show a source with that
-    // in its name somewhere.
-    val args     = line split "\\s+"
-    val filtered = intp.implicitSymbolsBySource filter {
-      case (source, syms) =>
-        (args contains "-v") || {
-          if (line == "") (source.fullName.toString != "scala.Predef")
-          else (args exists (source.name.toString contains _))
-        }
-    }
-
-    if (filtered.isEmpty)
-      return "No implicits have been imported other than those in Predef."
-
-    filtered foreach {
-      case (source, syms) =>
-        p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
-
-        // This groups the members by where the symbol is defined
-        val byOwner = syms groupBy (_.owner)
-        val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
-
-        sortedOwners foreach {
-          case (owner, members) =>
-            // Within each owner, we cluster results based on the final result type
-            // if there are more than a couple, and sort each cluster based on name.
-            // This is really just trying to make the 100 or so implicits imported
-            // by default into something readable.
-            val memberGroups: List[List[Symbol]] = {
-              val groups = members groupBy (_.tpe.finalResultType) toList
-              val (big, small) = groups partition (_._2.size > 3)
-              val xss = (
-                (big sortBy (_._1.toString) map (_._2)) :+
-                (small flatMap (_._2))
-              )
-
-              xss map (xs => xs sortBy (_.name.toString))
-            }
-
-            val ownerMessage = if (owner == source) " defined in " else " inherited from "
-            p("  /* " + members.size + ownerMessage + owner.fullName + " */")
-
-            memberGroups foreach { group =>
-              group foreach (s => p("  " + intp.symbolDefString(s)))
-              p("")
-            }
-        }
-        p("")
-    }
-  }
-
-  private[this] lazy val platformTools: Option[File] = {
-    val jarName = "tools.jar"
-    def jarPath(path: Path) = (path / "lib" / jarName).toFile
-    def jarAt(path: Path) = {
-      val f = jarPath(path)
-      if (f.isFile) Some(f) else None
-    }
-    val jdkDir = {
-      val d = Directory(jdkHome)
-      if (d.isDirectory) Some(d) else None
-    }
-    def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
-
-    val home    = envOrNone("JDK_HOME") orElse envOrNone("JAVA_HOME") map (p => Path(p))
-    val install = Some(Path(javaHome))
- 
-    (home flatMap jarAt)                   orElse
-    (install flatMap jarAt)                orElse
-    (install map (_.parent) flatMap jarAt) orElse
-    (jdkDir flatMap deeply)
-  } 
-  private def addToolsJarToLoader() = (
-    if (Javap isAvailable intp.classLoader) {
-      repldbg(":javap available on interpreter class path.")
-      intp.classLoader
-    } else {
-      val cl = platformTools match {
-        case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
-        case _           => intp.classLoader
-      }
-      if (Javap isAvailable cl) {
-        repldbg(":javap available on extended class path.")
-        cl
-      } else {
-        repldbg(s":javap unavailable: no tools.jar at $jdkHome")
-        intp.classLoader
-      }
-    }
-  )
-
-  protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
-    override def tryClass(path: String): Array[Byte] = {
-      val hd :: rest = path split '.' toList;
-      // If there are dots in the name, the first segment is the
-      // key to finding it.
-      if (rest.nonEmpty) {
-        intp optFlatName hd match {
-          case Some(flat) =>
-            val clazz = flat :: rest mkString NAME_JOIN_STRING
-            val bytes = super.tryClass(clazz)
-            if (bytes.nonEmpty) bytes
-            else super.tryClass(clazz + MODULE_SUFFIX_STRING)
-          case _          => super.tryClass(path)
-        }
-      }
-      else {
-        // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
-        // we have to drop the $ to find object Foo, then tack it back onto
-        // the end of the flattened name.
-        def className  = intp flatName path
-        def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
-
-        val bytes = super.tryClass(className)
-        if (bytes.nonEmpty) bytes
-        else super.tryClass(moduleName)
-      }
-    }
-  }
-  private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
-
-  // Still todo: modules.
-  private def typeCommand(line0: String): Result = {
-    line0.trim match {
-      case ""                      => ":type [-v] <expression>"
-      case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
-      case s                       => typeCommandInternal(s, false)
-    }
-  }
-
-  private def warningsCommand(): Result = {
-    if (intp.lastWarnings.isEmpty)
-      "Can't find any cached warnings."
-    else
-      intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
-  }
-
-  private def javapCommand(line: String): Result = {
-    if (javap == null)
-      ":javap unavailable, no tools.jar at %s.  Set JDK_HOME.".format(jdkHome)
-    else if (javaVersion startsWith "1.7")
-      ":javap not yet working with java 1.7"
-    else if (line == "")
-      ":javap [-lcsvp] [path1 path2 ...]"
-    else
-      javap(words(line)) foreach { res =>
-        if (res.isError) return "Failed: " + res.value
-        else res.show()
-      }
-  }
-
-  private def wrapCommand(line: String): Result = {
-    def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
-    onIntp { intp =>
-      import intp._
-      import global._
-
-      words(line) match {
-        case Nil            =>
-          intp.executionWrapper match {
-            case ""   => "No execution wrapper is set."
-            case s    => "Current execution wrapper: " + s
-          }
-        case "clear" :: Nil =>
-          intp.executionWrapper match {
-            case ""   => "No execution wrapper is set."
-            case s    => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
-          }
-        case wrapper :: Nil =>
-          intp.typeOfExpression(wrapper) match {
-            case PolyType(List(targ), MethodType(List(arg), restpe)) =>
-              intp setExecutionWrapper intp.pathToTerm(wrapper)
-              "Set wrapper to '" + wrapper + "'"
-            case tp =>
-              failMsg + "\nFound: <unknown>"
-          }
-        case _ => failMsg
-      }
-    }
-  }
-
-  private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
-  private def phaseCommand(name: String): Result = {
-    val phased: Phased = power.phased
-    import phased.NoPhaseName
-
-    if (name == "clear") {
-      phased.set(NoPhaseName)
-      intp.clearExecutionWrapper()
-      "Cleared active phase."
-    }
-    else if (name == "") phased.get match {
-      case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
-      case ph          => "Active phase is '%s'.  (To clear, :phase clear)".format(phased.get)
-    }
-    else {
-      val what = phased.parse(name)
-      if (what.isEmpty || !phased.set(what))
-        "'" + name + "' does not appear to represent a valid phase."
-      else {
-        intp.setExecutionWrapper(pathToPhaseWrapper)
-        val activeMessage =
-          if (what.toString.length == name.length) "" + what
-          else "%s (%s)".format(what, name)
-
-        "Active phase is now: " + activeMessage
-      }
-    }
-  }
-
-  /** Available commands */
-  def commands: List[LoopCommand] = standardCommands ++ (
-    if (isReplPower) powerCommands else Nil
-  )
-
-  val replayQuestionMessage =
-    """|That entry seems to have slain the compiler.  Shall I replay
-       |your session? I can re-run each line except the last one.
-       |[y/n]
-    """.trim.stripMargin
-
-  private val crashRecovery: PartialFunction[Throwable, Boolean] = {
-    case ex: Throwable =>
-      echo(intp.global.throwableAsString(ex))
-
-      ex match {
-        case _: NoSuchMethodError | _: NoClassDefFoundError =>
-          echo("\nUnrecoverable error.")
-          throw ex
-        case _  =>
-          def fn(): Boolean =
-            try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
-            catch { case _: RuntimeException => false }
-
-          if (fn()) replay()
-          else echo("\nAbandoning crashed session.")
-      }
-      true
-  }
-
-  /** The main read-eval-print loop for the repl.  It calls
-   *  command() for each line of input, and stops when
-   *  command() returns false.
-   */
-  def loop() {
-    def readOneLine() = {
-      out.flush()
-      in readLine prompt
-    }
-    // return false if repl should exit
-    def processLine(line: String): Boolean = {
-      if (isAsync) {
-        if (!awaitInitialized()) return false
-        runThunks()
-      }
-      if (line eq null) false               // assume null means EOF
-      else command(line) match {
-        case Result(false, _)           => false
-        case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
-        case _                          => true
-      }
-    }
-    def innerLoop() {
-      if ( try processLine(readOneLine()) catch crashRecovery )
-        innerLoop()
-    }
-    innerLoop()
-  }
-
-  /** interpret all lines from a specified file */
-  def interpretAllFrom(file: File) {
-    savingReader {
-      savingReplayStack {
-        file applyReader { reader =>
-          in = SimpleReader(reader, out, false)
-          echo("Loading " + file + "...")
-          loop()
-        }
-      }
-    }
-  }
-
-  /** create a new interpreter and replay the given commands */
-  def replay() {
-    reset()
-    if (replayCommandStack.isEmpty)
-      echo("Nothing to replay.")
-    else for (cmd <- replayCommands) {
-      echo("Replaying: " + cmd)  // flush because maybe cmd will have its own output
-      command(cmd)
-      echo("")
-    }
-  }
-  def resetCommand() {
-    echo("Resetting interpreter state.")
-    if (replayCommandStack.nonEmpty) {
-      echo("Forgetting this session history:\n")
-      replayCommands foreach echo
-      echo("")
-      replayCommandStack = Nil
-    }
-    if (intp.namedDefinedTerms.nonEmpty)
-      echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
-    if (intp.definedTypes.nonEmpty)
-      echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
-
-    reset()
-  }
-  def reset() {
-    intp.reset()
-    unleashAndSetPhase()
-  }
-
-  /** fork a shell and run a command */
-  lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
-    override def usage = "<command line>"
-    def apply(line: String): Result = line match {
-      case ""   => showUsage()
-      case _    =>
-        val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")"
-        intp interpret toRun
-        ()
-    }
-  }
-
-  def withFile(filename: String)(action: File => Unit) {
-    val f = File(filename)
-
-    if (f.exists) action(f)
-    else echo("That file does not exist")
-  }
-
-  def loadCommand(arg: String) = {
-    var shouldReplay: Option[String] = None
-    withFile(arg)(f => {
-      interpretAllFrom(f)
-      shouldReplay = Some(":load " + arg)
-    })
-    Result(true, shouldReplay)
-  }
-
-  def addClasspath(arg: String): Unit = {
-    val f = File(arg).normalize
-    if (f.exists) {
-      addedClasspath = ClassPath.join(addedClasspath, f.path)
-      val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
-      echo("Added '%s'.  Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
-      replay()
-    }
-    else echo("The path '" + f + "' doesn't seem to exist.")
-  }
-
-  def powerCmd(): Result = {
-    if (isReplPower) "Already in power mode."
-    else enablePowerMode(false)
-  }
-  def enablePowerMode(isDuringInit: Boolean) = {
-    replProps.power setValue true
-    unleashAndSetPhase()
-    asyncEcho(isDuringInit, power.banner)
-  }
-  private def unleashAndSetPhase() {
-    if (isReplPower) {
-      power.unleash()
-      // Set the phase to "typer"
-      intp beSilentDuring phaseCommand("typer")
-    }
-  }
-
-  def asyncEcho(async: Boolean, msg: => String) {
-    if (async) asyncMessage(msg)
-    else echo(msg)
-  }
-
-  def verbosity() = {
-    val old = intp.printResults
-    intp.printResults = !old
-    echo("Switched " + (if (old) "off" else "on") + " result printing.")
-  }
-
-  /** Run one command submitted by the user.  Two values are returned:
-    * (1) whether to keep running, (2) the line to record for replay,
-    * if any. */
-  def command(line: String): Result = {
-    if (line startsWith ":") {
-      val cmd = line.tail takeWhile (x => !x.isWhitespace)
-      uniqueCommand(cmd) match {
-        case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
-        case _        => ambiguousError(cmd)
-      }
-    }
-    else if (intp.global == null) Result(false, None)  // Notice failure to create compiler
-    else Result(true, interpretStartingWith(line))
-  }
-
-  private def readWhile(cond: String => Boolean) = {
-    Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
-  }
-
-  def pasteCommand(): Result = {
-    echo("// Entering paste mode (ctrl-D to finish)\n")
-    val code = readWhile(_ => true) mkString "\n"
-    echo("\n// Exiting paste mode, now interpreting.\n")
-    intp interpret code
-    ()
-  }
-
-  private object paste extends Pasted {
-    val ContinueString = "     | "
-    val PromptString   = "scala> "
-
-    def interpret(line: String): Unit = {
-      echo(line.trim)
-      intp interpret line
-      echo("")
-    }
-
-    def transcript(start: String) = {
-      echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
-      apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
-    }
-  }
-  import paste.{ ContinueString, PromptString }
-
-  /** Interpret expressions starting with the first line.
-    * Read lines until a complete compilation unit is available
-    * or until a syntax error has been seen.  If a full unit is
-    * read, go ahead and interpret it.  Return the full string
-    * to be recorded for replay, if any.
-    */
-  def interpretStartingWith(code: String): Option[String] = {
-    // signal completion non-completion input has been received
-    in.completion.resetVerbosity()
-
-    def reallyInterpret = {
-      val reallyResult = intp.interpret(code)
-      (reallyResult, reallyResult match {
-        case IR.Error       => None
-        case IR.Success     => Some(code)
-        case IR.Incomplete  =>
-          if (in.interactive && code.endsWith("\n\n")) {
-            echo("You typed two blank lines.  Starting a new command.")
-            None
-          }
-          else in.readLine(ContinueString) match {
-            case null =>
-              // we know compilation is going to fail since we're at EOF and the
-              // parser thinks the input is still incomplete, but since this is
-              // a file being read non-interactively we want to fail.  So we send
-              // it straight to the compiler for the nice error message.
-              intp.compileString(code)
-              None
-
-            case line => interpretStartingWith(code + "\n" + line)
-          }
-      })
-    }
-
-    /** Here we place ourselves between the user and the interpreter and examine
-     *  the input they are ostensibly submitting.  We intervene in several cases:
-     *
-     *  1) If the line starts with "scala> " it is assumed to be an interpreter paste.
-     *  2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
-     *     on the previous result.
-     *  3) If the Completion object's execute returns Some(_), we inject that value
-     *     and avoid the interpreter, as it's likely not valid scala code.
-     */
-    if (code == "") None
-    else if (!paste.running && code.trim.startsWith(PromptString)) {
-      paste.transcript(code)
-      None
-    }
-    else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
-      interpretStartingWith(intp.mostRecentVar + code)
-    }
-    else if (code.trim startsWith "//") {
-      // line comment, do nothing
-      None
-    }
-    else
-      reallyInterpret._2
-  }
-
-  // runs :load `file` on any files passed via -i
-  def loadFiles(settings: Settings) = settings match {
-    case settings: GenericRunnerSettings =>
-      for (filename <- settings.loadfiles.value) {
-        val cmd = ":load " + filename
-        command(cmd)
-        addReplay(cmd)
-        echo("")
-      }
-    case _ =>
-  }
-
-  /** Tries to create a JLineReader, falling back to SimpleReader:
-   *  unless settings or properties are such that it should start
-   *  with SimpleReader.
-   */
-  def chooseReader(settings: Settings): InteractiveReader = {
-    if (settings.Xnojline.value || Properties.isEmacsShell)
-      SimpleReader()
-    else try new JLineReader(
-      if (settings.noCompletion.value) NoCompletion
-      else new JLineCompletion(intp)
-    )
-    catch {
-      case ex @ (_: Exception | _: NoClassDefFoundError) =>
-        echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
-        SimpleReader()
-    }
-  }
-  def process(settings: Settings): Boolean = savingContextLoader {
-    this.settings = settings
-    createInterpreter()
-
-    // sets in to some kind of reader depending on environmental cues
-    in = in0 match {
-      case Some(reader) => SimpleReader(reader, out, true)
-      case None         =>
-        // some post-initialization
-        chooseReader(settings) match {
-          case x: JLineReader => addThunk(x.consoleReader.postInit) ; x
-          case x              => x
-        }
-    }
-    // Bind intp somewhere out of the regular namespace where
-    // we can get at it in generated code.
-    addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])))
-    addThunk({
-      import scala.tools.nsc.io._
-      import Properties.userHome
-      import scala.compat.Platform.EOL
-      val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
-      if (autorun.isDefined) intp.quietRun(autorun.get)
-    })
-
-    loadFiles(settings)
-    // it is broken on startup; go ahead and exit
-    if (intp.reporter.hasErrors)
-      return false
-
-    // This is about the illusion of snappiness.  We call initialize()
-    // which spins off a separate thread, then print the prompt and try
-    // our best to look ready.  The interlocking lazy vals tend to
-    // inter-deadlock, so we break the cycle with a single asynchronous
-    // message to an actor.
-    if (isAsync) {
-      intp initialize initializedCallback()
-      createAsyncListener() // listens for signal to run postInitialization
-    }
-    else {
-      intp.initializeSynchronous()
-      postInitialization()
-    }
-    printWelcome()
-
-    try loop()
-    catch AbstractOrMissingHandler()
-    finally closeInterpreter()
-
-    true
-  }
-
-  /** process command-line arguments and do as they request */
-  def process(args: Array[String]): Boolean = {
-    val command = new CommandLine(args.toList, echo)
-    def neededHelp(): String =
-      (if (command.settings.help.value) command.usageMsg + "\n" else "") +
-      (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-
-    // if they asked for no help and command is valid, we call the real main
-    neededHelp() match {
-      case ""     => command.ok && process(command.settings)
-      case help   => echoNoNL(help) ; true
-    }
-  }
-
-  @deprecated("Use `process` instead", "2.9.0")
-  def main(settings: Settings): Unit = process(settings)
-}
-
-object ILoop {
-  implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp
-  private def echo(msg: String) = Console println msg
-
-  // Designed primarily for use by test code: take a String with a
-  // bunch of code, and prints out a transcript of what it would look
-  // like if you'd just typed it into the repl.
-  def runForTranscript(code: String, settings: Settings): String = {
-    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-
-    stringFromStream { ostream =>
-      Console.withOut(ostream) {
-        val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
-          override def write(str: String) = {
-            // completely skip continuation lines
-            if (str forall (ch => ch.isWhitespace || ch == '|')) ()
-            // print a newline on empty scala prompts
-            else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n")
-            else super.write(str)
-          }
-        }
-        val input = new BufferedReader(new StringReader(code)) {
-          override def readLine(): String = {
-            val s = super.readLine()
-            // helping out by printing the line being interpreted.
-            if (s != null)
-              output.println(s)
-            s
-          }
-        }
-        val repl = new ILoop(input, output)
-        if (settings.classpath.isDefault)
-          settings.classpath.value = sys.props("java.class.path")
-
-        repl process settings
-      }
-    }
-  }
-
-  /** Creates an interpreter loop with default settings and feeds
-   *  the given code to it as input.
-   */
-  def run(code: String, sets: Settings = new Settings): String = {
-    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-
-    stringFromStream { ostream =>
-      Console.withOut(ostream) {
-        val input    = new BufferedReader(new StringReader(code))
-        val output   = new JPrintWriter(new OutputStreamWriter(ostream), true)
-        val repl     = new ILoop(input, output)
-
-        if (sets.classpath.isDefault)
-          sets.classpath.value = sys.props("java.class.path")
-
-        repl process sets
-      }
-    }
-  }
-  def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
deleted file mode 100644
index e3c0494..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.Position
-import scala.util.control.Exception.ignoring
-import scala.tools.nsc.util.stackTraceString
-
-/**
- *  Machinery for the asynchronous initialization of the repl.
- */
-trait ILoopInit {
-  self: ILoop =>
-
-  /** Print a welcome message */
-  def printWelcome() {
-    import Properties._
-    val welcomeMsg =
-     """|Welcome to Scala %s (%s, Java %s).
-        |Type in expressions to have them evaluated.
-        |Type :help for more information.""" .
-    stripMargin.format(versionString, javaVmName, javaVersion)
-    echo(welcomeMsg)
-    replinfo("[info] started at " + new java.util.Date)
-  }
-
-  protected def asyncMessage(msg: String) {
-    if (isReplInfo || isReplPower)
-      echoAndRefresh(msg)
-  }
-
-  private val initLock = new java.util.concurrent.locks.ReentrantLock()
-  private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
-  private val initLoopCondition = initLock.newCondition()     // signal the whole repl is initialized
-  private val initStart = System.nanoTime
-
-  private def withLock[T](body: => T): T = {
-    initLock.lock()
-    try body
-    finally initLock.unlock()
-  }
-  // a condition used to ensure serial access to the compiler.
-  @volatile private var initIsComplete = false
-  @volatile private var initError: String = null
-  private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
-
-  // the method to be called when the interpreter is initialized.
-  // Very important this method does nothing synchronous (i.e. do
-  // not try to use the interpreter) because until it returns, the
-  // repl's lazy val `global` is still locked.
-  protected def initializedCallback() = withLock(initCompilerCondition.signal())
-
-  // Spins off a thread which awaits a single message once the interpreter
-  // has been initialized.
-  protected def createAsyncListener() = {
-    io.spawn {
-      withLock(initCompilerCondition.await())
-      asyncMessage("[info] compiler init time: " + elapsed() + " s.")
-      postInitialization()
-    }
-  }
-
-  // called from main repl loop
-  protected def awaitInitialized(): Boolean = {
-    if (!initIsComplete)
-      withLock { while (!initIsComplete) initLoopCondition.await() }
-    if (initError != null) {
-      println("""
-        |Failed to initialize the REPL due to an unexpected error.
-        |This is a bug, please, report it along with the error diagnostics printed below.
-        |%s.""".stripMargin.format(initError)
-      )
-      false
-    } else true
-  }
-  // private def warningsThunks = List(
-  //   () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
-  // )
-
-  protected def postInitThunks = List[Option[() => Unit]](
-    Some(intp.setContextClassLoader _),
-    if (isReplPower) Some(() => enablePowerMode(true)) else None
-  ).flatten
-  // ++ (
-  //   warningsThunks
-  // )
-  // called once after init condition is signalled
-  protected def postInitialization() {
-    try {
-      postInitThunks foreach (f => addThunk(f()))
-      runThunks()
-    } catch {
-      case ex: Throwable =>
-        initError = stackTraceString(ex)
-        throw ex
-    } finally {
-      initIsComplete = true
-
-      if (isAsync) {
-        asyncMessage("[info] total init time: " + elapsed() + " s.")
-        withLock(initLoopCondition.signal())
-      }
-    }
-  }
-  // code to be executed only after the interpreter is initialized
-  // and the lazy val `global` can be accessed without risk of deadlock.
-  private var pendingThunks: List[() => Unit] = Nil
-  protected def addThunk(body: => Unit) = synchronized {
-    pendingThunks :+= (() => body)
-  }
-  protected def runThunks(): Unit = synchronized {
-    if (pendingThunks.nonEmpty)
-      repldbg("Clearing " + pendingThunks.size + " thunks.")
-
-    while (pendingThunks.nonEmpty) {
-      val thunk = pendingThunks.head
-      pendingThunks = pendingThunks.tail
-      thunk()
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
deleted file mode 100644
index bed8570..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ /dev/null
@@ -1,1235 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Predef.{ println => _, _ }
-import util.stringFromWriter
-import scala.reflect.internal.util._
-import java.net.URL
-import scala.sys.BooleanProp
-import io.VirtualDirectory
-import scala.tools.nsc.io.AbstractFile
-import reporters._
-import symtab.Flags
-import scala.reflect.internal.Names
-import scala.tools.util.PathResolver
-import scala.tools.nsc.util.ScalaClassLoader
-import ScalaClassLoader.URLClassLoader
-import scala.tools.nsc.util.Exceptional.unwrap
-import scala.collection.{ mutable, immutable }
-import scala.util.control.Exception.{ ultimately }
-import IMain._
-import java.util.concurrent.Future
-import typechecker.Analyzer
-import scala.language.implicitConversions
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import scala.tools.reflect.StdRuntimeTags._
-
-/** directory to save .class files to */
-private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
-  private def pp(root: AbstractFile, indentLevel: Int) {
-    val spaces = "    " * indentLevel
-    out.println(spaces + root.name)
-    if (root.isDirectory)
-      root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
-  }
-  // print the contents hierarchically
-  def show() = pp(this, 0)
-}
-
-/** An interpreter for Scala code.
- *
- *  The main public entry points are compile(), interpret(), and bind().
- *  The compile() method loads a complete Scala file.  The interpret() method
- *  executes one line of Scala code at the request of the user.  The bind()
- *  method binds an object to a variable that can then be used by later
- *  interpreted code.
- *
- *  The overall approach is based on compiling the requested code and then
- *  using a Java classloader and Java reflection to run the code
- *  and access its results.
- *
- *  In more detail, a single compiler instance is used
- *  to accumulate all successfully compiled or interpreted Scala code.  To
- *  "interpret" a line of code, the compiler generates a fresh object that
- *  includes the line of code and which has public member(s) to export
- *  all variables defined by that code.  To extract the result of an
- *  interpreted line to show the user, a second "result object" is created
- *  which imports the variables exported by the above object and then
- *  exports members called "$eval" and "$print". To accomodate user expressions
- *  that read from variables or methods defined in previous statements, "import"
- *  statements are used.
- *
- *  This interpreter shares the strengths and weaknesses of using the
- *  full compiler-to-Java.  The main strength is that interpreted code
- *  behaves exactly as does compiled code, including running at full speed.
- *  The main weakness is that redefining classes and methods is not handled
- *  properly, because rebinding at the Java level is technically difficult.
- *
- *  @author Moez A. Abdel-Gawad
- *  @author Lex Spoon
- */
-class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports {
-  imain =>
-
-  /** Leading with the eagerly evaluated.
-   */
-  val virtualDirectory: VirtualDirectory            = new ReplVirtualDirectory(out) // "directory" for classfiles
-  private var currentSettings: Settings             = initialSettings
-  private[nsc] var printResults                     = true      // whether to print result lines
-  private[nsc] var totalSilence                     = false     // whether to print anything
-  private var _initializeComplete                   = false     // compiler is initialized
-  private var _isInitialized: Future[Boolean]       = null      // set up initialization future
-  private var bindExceptions                        = true      // whether to bind the lastException variable
-  private var _executionWrapper                     = ""        // code to be wrapped around all lines
-
-  /** We're going to go to some trouble to initialize the compiler asynchronously.
-   *  It's critical that nothing call into it until it's been initialized or we will
-   *  run into unrecoverable issues, but the perceived repl startup time goes
-   *  through the roof if we wait for it.  So we initialize it with a future and
-   *  use a lazy val to ensure that any attempt to use the compiler object waits
-   *  on the future.
-   */
-  private var _classLoader: AbstractFileClassLoader = null                              // active classloader
-  private val _compiler: Global                     = newCompiler(settings, reporter)   // our private compiler
-
-  private val nextReqId = {
-    var counter = 0
-    () => { counter += 1 ; counter }
-  }
-
-  def compilerClasspath: Seq[URL] = (
-    if (isInitializeComplete) global.classPath.asURLs
-    else new PathResolver(settings).result.asURLs  // the compiler's classpath
-  )
-  def settings = currentSettings
-  def mostRecentLine = prevRequestList match {
-    case Nil      => ""
-    case req :: _ => req.originalLine
-  }
-  // Run the code body with the given boolean settings flipped to true.
-  def withoutWarnings[T](body: => T): T = beQuietDuring {
-    val saved = settings.nowarn.value
-    if (!saved)
-      settings.nowarn.value = true
-
-    try body
-    finally if (!saved) settings.nowarn.value = false
-  }
-
-  /** construct an interpreter that reports to Console */
-  def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
-  def this() = this(new Settings())
-
-  lazy val repllog: Logger = new Logger {
-    val out: JPrintWriter = imain.out
-    val isInfo: Boolean  = BooleanProp keyExists "scala.repl.info"
-    val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
-    val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
-  }
-  lazy val formatting: Formatting = new Formatting {
-    val prompt = Properties.shellPromptString
-  }
-  lazy val reporter: ReplReporter = new ReplReporter(this)
-
-  import formatting._
-  import reporter.{ printMessage, withoutTruncating }
-
-  // This exists mostly because using the reporter too early leads to deadlock.
-  private def echo(msg: String) { Console println msg }
-  private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
-  private def _initialize() = {
-    try {
-      // todo. if this crashes, REPL will hang
-      new _compiler.Run() compileSources _initSources
-      _initializeComplete = true
-      true
-    }
-    catch AbstractOrMissingHandler()
-  }
-  private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
-
-  // argument is a thunk to execute after init is done
-  def initialize(postInitSignal: => Unit) {
-    synchronized {
-      if (_isInitialized == null) {
-        _isInitialized = io.spawn {
-          try _initialize()
-          finally postInitSignal
-        }
-      }
-    }
-  }
-  def initializeSynchronous(): Unit = {
-    if (!isInitializeComplete) {
-      _initialize()
-      assert(global != null, global)
-    }
-  }
-  def isInitializeComplete = _initializeComplete
-
-  /** the public, go through the future compiler */
-  lazy val global: Global = {
-    if (isInitializeComplete) _compiler
-    else {
-      // If init hasn't been called yet you're on your own.
-      if (_isInitialized == null) {
-        repldbg("Warning: compiler accessed before init set up.  Assuming no postInit code.")
-        initialize(())
-      }
-      // blocks until it is ; false means catastrophic failure
-      if (_isInitialized.get()) _compiler
-      else null
-    }
-  }
-  @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
-  lazy val compiler: global.type = global
-
-  import global._
-  import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
-  import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
-
-  implicit class ReplTypeOps(tp: Type) {
-    def orElse(other: => Type): Type    = if (tp ne NoType) tp else other
-    def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
-  }
-
-  // TODO: If we try to make naming a lazy val, we run into big time
-  // scalac unhappiness with what look like cycles.  It has not been easy to
-  // reduce, but name resolution clearly takes different paths.
-  object naming extends {
-    val global: imain.global.type = imain.global
-  } with Naming {
-    // make sure we don't overwrite their unwisely named res3 etc.
-    def freshUserTermName(): TermName = {
-      val name = newTermName(freshUserVarName())
-      if (definedNameMap contains name) freshUserTermName()
-      else name
-    }
-    def isUserTermName(name: Name) = isUserVarName("" + name)
-    def isInternalTermName(name: Name) = isInternalVarName("" + name)
-  }
-  import naming._
-
-  object deconstruct extends {
-    val global: imain.global.type = imain.global
-  } with StructuredTypeStrings
-
-  lazy val memberHandlers = new {
-    val intp: imain.type = imain
-  } with MemberHandlers
-  import memberHandlers._
-
-  /** Temporarily be quiet */
-  def beQuietDuring[T](body: => T): T = {
-    val saved = printResults
-    printResults = false
-    try body
-    finally printResults = saved
-  }
-  def beSilentDuring[T](operation: => T): T = {
-    val saved = totalSilence
-    totalSilence = true
-    try operation
-    finally totalSilence = saved
-  }
-
-  def quietRun[T](code: String) = beQuietDuring(interpret(code))
-
-  /** takes AnyRef because it may be binding a Throwable or an Exceptional */
-  private def withLastExceptionLock[T](body: => T, alt: => T): T = {
-    assert(bindExceptions, "withLastExceptionLock called incorrectly.")
-    bindExceptions = false
-
-    try     beQuietDuring(body)
-    catch   logAndDiscard("withLastExceptionLock", alt)
-    finally bindExceptions = true
-  }
-
-  def executionWrapper = _executionWrapper
-  def setExecutionWrapper(code: String) = _executionWrapper = code
-  def clearExecutionWrapper() = _executionWrapper = ""
-
-  /** interpreter settings */
-  lazy val isettings = new ISettings(this)
-
-  /** Instantiate a compiler.  Overridable. */
-  protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
-    settings.outputDirs setSingleOutput virtualDirectory
-    settings.exposeEmptyPackage.value = true
-    new Global(settings, reporter) with ReplGlobal {
-      override def toString: String = "<global>"
-    }
-  }
-
-  /** Parent classloader.  Overridable. */
-  protected def parentClassLoader: ClassLoader =
-    settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
-
-  /* A single class loader is used for all commands interpreted by this Interpreter.
-     It would also be possible to create a new class loader for each command
-     to interpret.  The advantages of the current approach are:
-
-       - Expressions are only evaluated one time.  This is especially
-         significant for I/O, e.g. "val x = Console.readLine"
-
-     The main disadvantage is:
-
-       - Objects, classes, and methods cannot be rebound.  Instead, definitions
-         shadow the old ones, and old code objects refer to the old
-         definitions.
-  */
-  def resetClassLoader() = {
-    repldbg("Setting new classloader: was " + _classLoader)
-    _classLoader = null
-    ensureClassLoader()
-  }
-  final def ensureClassLoader() {
-    if (_classLoader == null)
-      _classLoader = makeClassLoader()
-  }
-  def classLoader: AbstractFileClassLoader = {
-    ensureClassLoader()
-    _classLoader
-  }
-  private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
-    /** Overridden here to try translating a simple name to the generated
-     *  class name if the original attempt fails.  This method is used by
-     *  getResourceAsStream as well as findClass.
-     */
-    override protected def findAbstractFile(name: String): AbstractFile = {
-      super.findAbstractFile(name) match {
-        // deadlocks on startup if we try to translate names too early
-        case null if isInitializeComplete =>
-          generatedName(name) map (x => super.findAbstractFile(x)) orNull
-        case file                         =>
-          file
-      }
-    }
-  }
-  private def makeClassLoader(): AbstractFileClassLoader =
-    new TranslatingClassLoader(parentClassLoader match {
-      case null   => ScalaClassLoader fromURLs compilerClasspath
-      case p      => new URLClassLoader(compilerClasspath, p)
-    })
-
-  def getInterpreterClassLoader() = classLoader
-
-  // Set the current Java "context" class loader to this interpreter's class loader
-  def setContextClassLoader() = classLoader.setAsContext()
-
-  /** Given a simple repl-defined name, returns the real name of
-   *  the class representing it, e.g. for "Bippy" it may return
-   *  {{{
-   *    $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
-   *  }}}
-   */
-  def generatedName(simpleName: String): Option[String] = {
-    if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
-    else optFlatName(simpleName)
-  }
-  def flatName(id: String)    = optFlatName(id) getOrElse id
-  def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
-
-  def allDefinedNames = definedNameMap.keys.toList.sorted
-  def pathToType(id: String): String = pathToName(newTypeName(id))
-  def pathToTerm(id: String): String = pathToName(newTermName(id))
-  def pathToName(name: Name): String = {
-    if (definedNameMap contains name)
-      definedNameMap(name) fullPath name
-    else name.toString
-  }
-
-  /** Most recent tree handled which wasn't wholly synthetic. */
-  private def mostRecentlyHandledTree: Option[Tree] = {
-    prevRequests.reverse foreach { req =>
-      req.handlers.reverse foreach {
-        case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
-        case _ => ()
-      }
-    }
-    None
-  }
-
-  /** Stubs for work in progress. */
-  def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
-    for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
-      repldbg("Redefining type '%s'\n  %s -> %s".format(name, t1, t2))
-    }
-  }
-
-  def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
-    for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
-      // Printing the types here has a tendency to cause assertion errors, like
-      //   assertion failed: fatal: <refinement> has owner value x, but a class owner is required
-      // so DBG is by-name now to keep it in the family.  (It also traps the assertion error,
-      // but we don't want to unnecessarily risk hosing the compiler's internal state.)
-      repldbg("Redefining term '%s'\n  %s -> %s".format(name, t1, t2))
-    }
-  }
-
-  def recordRequest(req: Request) {
-    if (req == null || referencedNameMap == null)
-      return
-
-    prevRequests += req
-    req.referencedNames foreach (x => referencedNameMap(x) = req)
-
-    // warning about serially defining companions.  It'd be easy
-    // enough to just redefine them together but that may not always
-    // be what people want so I'm waiting until I can do it better.
-    for {
-      name   <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
-      oldReq <- definedNameMap get name.companionName
-      newSym <- req.definedSymbols get name
-      oldSym <- oldReq.definedSymbols get name.companionName
-      if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
-    } {
-      afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
-      replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
-    }
-
-    // Updating the defined name map
-    req.definedNames foreach { name =>
-      if (definedNameMap contains name) {
-        if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
-        else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
-      }
-      definedNameMap(name) = req
-    }
-  }
-
-  private[nsc] def replwarn(msg: => String) {
-    if (!settings.nowarnings.value)
-      printMessage(msg)
-  }
-
-  def isParseable(line: String): Boolean = {
-    beSilentDuring {
-      try parse(line) match {
-        case Some(xs) => xs.nonEmpty  // parses as-is
-        case None     => true         // incomplete
-      }
-      catch { case x: Exception =>    // crashed the compiler
-        replwarn("Exception in isParseable(\"" + line + "\"): " + x)
-        false
-      }
-    }
-  }
-
-  def compileSourcesKeepingRun(sources: SourceFile*) = {
-    val run = new Run()
-    reporter.reset()
-    run compileSources sources.toList
-    (!reporter.hasErrors, run)
-  }
-
-  /** Compile an nsc SourceFile.  Returns true if there are
-   *  no compilation errors, or false otherwise.
-   */
-  def compileSources(sources: SourceFile*): Boolean =
-    compileSourcesKeepingRun(sources: _*)._1
-
-  /** Compile a string.  Returns true if there are no
-   *  compilation errors, or false otherwise.
-   */
-  def compileString(code: String): Boolean =
-    compileSources(new BatchSourceFile("<script>", code))
-
-  /** Build a request from the user. `trees` is `line` after being parsed.
-   */
-  private def buildRequest(line: String, trees: List[Tree]): Request = {
-    executingRequest = new Request(line, trees)
-    executingRequest
-  }
-
-  // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
-  // the close brace is commented out.  Strip single-line comments.
-  // ... but for error message output reasons this is not used, and rather than
-  // enclosing in braces it is constructed like "val x =\n5 // foo".
-  private def removeComments(line: String): String = {
-    showCodeIfDebugging(line) // as we're about to lose our // show
-    line.lines map (s => s indexOf "//" match {
-      case -1   => s
-      case idx  => s take idx
-    }) mkString "\n"
-  }
-
-  private def safePos(t: Tree, alt: Int): Int =
-    try t.pos.startOrPoint
-    catch { case _: UnsupportedOperationException => alt }
-
-  // Given an expression like 10 * 10 * 10 we receive the parent tree positioned
-  // at a '*'.  So look at each subtree and find the earliest of all positions.
-  private def earliestPosition(tree: Tree): Int = {
-    var pos = Int.MaxValue
-    tree foreach { t =>
-      pos = math.min(pos, safePos(t, Int.MaxValue))
-    }
-    pos
-  }
-
-  private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
-    val content = indentCode(line)
-    val trees = parse(content) match {
-      case None         => return Left(IR.Incomplete)
-      case Some(Nil)    => return Left(IR.Error) // parse error or empty input
-      case Some(trees)  => trees
-    }
-    repltrace(
-      trees map (t => {
-        // [Eugene to Paul] previously it just said `t map ...`
-        // because there was an implicit conversion from Tree to a list of Trees
-        // however Martin and I have removed the conversion
-        // (it was conflicting with the new reflection API),
-        // so I had to rewrite this a bit
-        val subs = t collect { case sub => sub }
-        subs map (t0 =>
-          "  " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
-        ) mkString ""
-      }) mkString "\n"
-    )
-    // If the last tree is a bare expression, pinpoint where it begins using the
-    // AST node position and snap the line off there.  Rewrite the code embodied
-    // by the last tree as a ValDef instead, so we can access the value.
-    trees.last match {
-      case _:Assign                        => // we don't want to include assignments
-      case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
-        val varName  = if (synthetic) freshInternalVarName() else freshUserVarName()
-        val rewrittenLine = (
-          // In theory this would come out the same without the 1-specific test, but
-          // it's a cushion against any more sneaky parse-tree position vs. code mismatches:
-          // this way such issues will only arise on multiple-statement repl input lines,
-          // which most people don't use.
-          if (trees.size == 1) "val " + varName + " =\n" + content
-          else {
-            // The position of the last tree
-            val lastpos0 = earliestPosition(trees.last)
-            // Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
-            // with increasingly hard to decipher positions as we move on to "() => 5",
-            // (x: Int) => x + 1, and more.  So I abandon attempts to finesse and just
-            // look for semicolons and newlines, which I'm sure is also buggy.
-            val (raw1, raw2) = content splitAt lastpos0
-            repldbg("[raw] " + raw1 + "   <--->   " + raw2)
-
-            val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\n'))).size
-            val lastpos = lastpos0 - adjustment
-
-            // the source code split at the laboriously determined position.
-            val (l1, l2) = content splitAt lastpos
-            repldbg("[adj] " + l1 + "   <--->   " + l2)
-
-            val prefix   = if (l1.trim == "") "" else l1 + ";\n"
-            // Note to self: val source needs to have this precise structure so that
-            // error messages print the user-submitted part without the "val res0 = " part.
-            val combined   = prefix + "val " + varName + " =\n" + l2
-
-            repldbg(List(
-              "    line" -> line,
-              " content" -> content,
-              "     was" -> l2,
-              "combined" -> combined) map {
-                case (label, s) => label + ": '" + s + "'"
-              } mkString "\n"
-            )
-            combined
-          }
-        )
-        // Rewriting    "foo ; bar ; 123"
-        // to           "foo ; bar ; val resXX = 123"
-        requestFromLine(rewrittenLine, synthetic) match {
-          case Right(req) => return Right(req withOriginalLine line)
-          case x          => return x
-        }
-      case _ =>
-    }
-    Right(buildRequest(line, trees))
-  }
-
-  // normalize non-public types so we don't see protected aliases like Self
-  def normalizeNonPublic(tp: Type) = tp match {
-    case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
-    case _                                                      => tp
-  }
-
-  /**
-   *  Interpret one line of input. All feedback, including parse errors
-   *  and evaluation results, are printed via the supplied compiler's
-   *  reporter. Values defined are available for future interpreted strings.
-   *
-   *  The return value is whether the line was interpreter successfully,
-   *  e.g. that there were no parse errors.
-   */
-  def interpret(line: String): IR.Result = interpret(line, false)
-  def interpretSynthetic(line: String): IR.Result = interpret(line, true)
-  def interpret(line: String, synthetic: Boolean): IR.Result = {
-    def loadAndRunReq(req: Request) = {
-      classLoader.setAsContext()
-      val (result, succeeded) = req.loadAndRun
-
-      /** To our displeasure, ConsoleReporter offers only printMessage,
-       *  which tacks a newline on the end.  Since that breaks all the
-       *  output checking, we have to take one off to balance.
-       */
-      if (succeeded) {
-        if (printResults && result != "")
-          printMessage(result stripSuffix "\n")
-        else if (isReplDebug) // show quiet-mode activity
-          printMessage(result.trim.lines map ("[quiet] " + _) mkString "\n")
-
-        // Book-keeping.  Have to record synthetic requests too,
-        // as they may have been issued for information, e.g. :type
-        recordRequest(req)
-        IR.Success
-      }
-      else {
-        // don't truncate stack traces
-        withoutTruncating(printMessage(result))
-        IR.Error
-      }
-    }
-
-    if (global == null) IR.Error
-    else requestFromLine(line, synthetic) match {
-      case Left(result) => result
-      case Right(req)   =>
-        // null indicates a disallowed statement type; otherwise compile and
-        // fail if false (implying e.g. a type error)
-        if (req == null || !req.compile) IR.Error
-        else loadAndRunReq(req)
-    }
-  }
-
-  /** Bind a specified name to a specified value.  The name may
-   *  later be used by expressions passed to interpret.
-   *
-   *  @param name      the variable name to bind
-   *  @param boundType the type of the variable, as a string
-   *  @param value     the object value to bind to it
-   *  @return          an indication of whether the binding succeeded
-   */
-  def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
-    val bindRep = new ReadEvalPrint()
-    val run = bindRep.compile("""
-        |object %s {
-        |  var value: %s = _
-        |  def set(x: Any) = value = x.asInstanceOf[%s]
-        |}
-      """.stripMargin.format(bindRep.evalName, boundType, boundType)
-      )
-    bindRep.callEither("set", value) match {
-      case Left(ex) =>
-        repldbg("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
-        repldbg(util.stackTraceString(ex))
-        IR.Error
-
-      case Right(_) =>
-        val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
-        repldbg("Interpreting: " + line)
-        interpret(line)
-    }
-  }
-  def directBind(name: String, boundType: String, value: Any): IR.Result = {
-    val result = bind(name, boundType, value)
-    if (result == IR.Success)
-      directlyBoundNames += newTermName(name)
-    result
-  }
-  def directBind(p: NamedParam): IR.Result                                    = directBind(p.name, p.tpe, p.value)
-  def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
-
-  def rebind(p: NamedParam): IR.Result = {
-    val name     = p.name
-    val oldType  = typeOfTerm(name) orElse { return IR.Error }
-    val newType  = p.tpe
-    val tempName = freshInternalVarName()
-
-    quietRun("val %s = %s".format(tempName, name))
-    quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
-  }
-  def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
-  def addImports(ids: String*): IR.Result =
-    if (ids.isEmpty) IR.Success
-    else interpret("import " + ids.mkString(", "))
-
-  def quietBind(p: NamedParam): IR.Result                               = beQuietDuring(bind(p))
-  def bind(p: NamedParam): IR.Result                                    = bind(p.name, p.tpe, p.value)
-  def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
-  def bindSyntheticValue(x: Any): IR.Result                             = bindValue(freshInternalVarName(), x)
-  def bindValue(x: Any): IR.Result                                      = bindValue(freshUserVarName(), x)
-  def bindValue(name: String, x: Any): IR.Result                        = bind(name, TypeStrings.fromValue(x), x)
-
-  /** Reset this interpreter, forgetting all user-specified requests. */
-  def reset() {
-    clearExecutionWrapper()
-    resetClassLoader()
-    resetAllCreators()
-    prevRequests.clear()
-    referencedNameMap.clear()
-    definedNameMap.clear()
-    virtualDirectory.clear()
-  }
-
-  /** This instance is no longer needed, so release any resources
-   *  it is using.  The reporter's output gets flushed.
-   */
-  def close() {
-    reporter.flush()
-  }
-
-  /** Here is where we:
-   *
-   *  1) Read some source code, and put it in the "read" object.
-   *  2) Evaluate the read object, and put the result in the "eval" object.
-   *  3) Create a String for human consumption, and put it in the "print" object.
-   *
-   *  Read! Eval! Print! Some of that not yet centralized here.
-   */
-  class ReadEvalPrint(lineId: Int) {
-    def this() = this(freshLineId())
-
-    private var lastRun: Run = _
-    private var evalCaught: Option[Throwable] = None
-    private var conditionalWarnings: List[ConditionalWarning] = Nil
-
-    val packageName = sessionNames.line + lineId
-    val readName    = sessionNames.read
-    val evalName    = sessionNames.eval
-    val printName   = sessionNames.print
-    val resultName  = sessionNames.result
-
-    def bindError(t: Throwable) = {
-      if (!bindExceptions) // avoid looping if already binding
-        throw t
-
-      val unwrapped = unwrap(t)
-      withLastExceptionLock[String]({
-        directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
-        util.stackTraceString(unwrapped)
-      }, util.stackTraceString(unwrapped))
-    }
-
-    // TODO: split it out into a package object and a regular
-    // object and we can do that much less wrapping.
-    def packageDecl = "package " + packageName
-
-    def pathTo(name: String)   = packageName + "." + name
-    def packaged(code: String) = packageDecl + "\n\n" + code
-
-    def readPath  = pathTo(readName)
-    def evalPath  = pathTo(evalName)
-    def printPath = pathTo(printName)
-
-    def call(name: String, args: Any*): AnyRef = {
-      val m = evalMethod(name)
-      repldbg("Invoking: " + m)
-      if (args.nonEmpty)
-        repldbg("  with args: " + args.mkString(", "))
-
-      m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
-    }
-
-    def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
-      try Right(call(name, args: _*))
-      catch { case ex: Throwable => Left(ex) }
-
-    def callOpt(name: String, args: Any*): Option[AnyRef] =
-      try Some(call(name, args: _*))
-      catch { case ex: Throwable => bindError(ex) ; None }
-
-    class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
-
-    private def evalError(path: String, ex: Throwable) =
-      throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex)
-
-    private def load(path: String): Class[_] = {
-      try Class.forName(path, true, classLoader)
-      catch { case ex: Throwable => evalError(path, unwrap(ex)) }
-    }
-
-    lazy val evalClass = load(evalPath)
-    lazy val evalValue = callEither(resultName) match {
-      case Left(ex)      => evalCaught = Some(ex) ; None
-      case Right(result) => Some(result)
-    }
-
-    def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
-
-    /** The innermost object inside the wrapper, found by
-      * following accessPath into the outer one.
-      */
-    def resolvePathToSymbol(accessPath: String): Symbol = {
-      val readRoot  = getRequiredModule(readPath)   // the outermost wrapper
-      (accessPath split '.').foldLeft(readRoot: Symbol) {
-        case (sym, "")    => sym
-        case (sym, name)  => afterTyper(termMember(sym, name))
-      }
-    }
-    /** We get a bunch of repeated warnings for reasons I haven't
-     *  entirely figured out yet.  For now, squash.
-     */
-    private def updateRecentWarnings(run: Run) {
-      def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
-        case Nil                  => Nil
-        case ((pos, msg)) :: rest =>
-          val filtered = rest filter { case (pos0, msg0) =>
-            (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
-              // same messages and same line content after whitespace removal
-              // but we want to let through multiple warnings on the same line
-              // from the same run.  The untrimmed line will be the same since
-              // there's no whitespace indenting blowing it.
-              (pos.lineContent == pos0.lineContent)
-            }
-          }
-          ((pos, msg)) :: loop(filtered)
-      }
-      val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
-      if (warnings.nonEmpty)
-        mostRecentWarnings = warnings
-    }
-    private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
-      case Array(method) => method
-      case xs            => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
-    }
-    private def compileAndSaveRun(label: String, code: String) = {
-      showCodeIfDebugging(code)
-      val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
-      updateRecentWarnings(run)
-      lastRun = run
-      success
-    }
-  }
-
-  /** One line of code submitted by the user for interpretation */
-  // private
-  class Request(val line: String, val trees: List[Tree]) {
-    val reqId = nextReqId()
-    val lineRep = new ReadEvalPrint()
-
-    private var _originalLine: String = null
-    def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
-    def originalLine = if (_originalLine == null) line else _originalLine
-
-    /** handlers for each tree in this request */
-    val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
-    def defHandlers = handlers collect { case x: MemberDefHandler => x }
-
-    /** all (public) names defined by these statements */
-    val definedNames = handlers flatMap (_.definedNames)
-
-    /** list of names used by this expression */
-    val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
-
-    /** def and val names */
-    def termNames = handlers flatMap (_.definesTerm)
-    def typeNames = handlers flatMap (_.definesType)
-    def definedOrImported = handlers flatMap (_.definedOrImported)
-    def definedSymbolList = defHandlers flatMap (_.definedSymbols)
-
-    def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
-    def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
-
-    /** Code to import bound names from previous lines - accessPath is code to
-      * append to objectName to access anything bound by request.
-      */
-    val ComputedImports(importsPreamble, importsTrailer, accessPath) =
-      importsCode(referencedNames.toSet)
-
-    /** Code to access a variable with the specified name */
-    def fullPath(vname: String) = (
-      lineRep.readPath + accessPath + ".`%s`".format(vname)
-    )
-    /** Same as fullpath, but after it has been flattened, so:
-     *  $line5.$iw.$iw.$iw.Bippy      // fullPath
-     *  $line5.$iw$$iw$$iw$Bippy      // fullFlatName
-     */
-    def fullFlatName(name: String) =
-      lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
-
-    /** The unmangled symbol name, but supplemented with line info. */
-    def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
-
-    /** Code to access a variable with the specified name */
-    def fullPath(vname: Name): String = fullPath(vname.toString)
-
-    /** the line of code to compute */
-    def toCompute = line
-
-    /** generate the source code for the object that computes this request */
-    private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
-      def path = pathToTerm("$intp")
-      def envLines = {
-        if (!isReplPower) Nil // power mode only for now
-        // $intp is not bound; punt, but include the line.
-        else if (path == "$intp") List(
-          "def $line = " + tquoted(originalLine),
-          "def $trees = Nil"
-        )
-        else List(
-          "def $line  = " + tquoted(originalLine),
-          "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
-          "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
-        )
-      }
-
-      val preamble = """
-        |object %s {
-        |%s%s%s
-      """.stripMargin.format(lineRep.readName, envLines.map("  " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
-      val postamble = importsTrailer + "\n}"
-      val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
-    }
-
-    private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
-      /** We only want to generate this code when the result
-       *  is a value which can be referred to as-is.
-       */
-      val evalResult =
-        if (!handlers.last.definesValue) ""
-        else handlers.last.definesTerm match {
-          case Some(vname) if typeOf contains vname =>
-            "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
-          case _  => ""
-        }
-      // first line evaluates object to make sure constructor is run
-      // initial "" so later code can uniformly be: + etc
-      val preamble = """
-      |object %s {
-      |  %s
-      |  val %s: String = %s {
-      |    %s
-      |    (""
-      """.stripMargin.format(
-        lineRep.evalName, evalResult, lineRep.printName,
-        executionWrapper, lineRep.readName + accessPath
-      )
-
-      val postamble = """
-      |    )
-      |  }
-      |}
-      """.stripMargin
-      val generate = (m: MemberHandler) => m resultExtractionCode Request.this
-    }
-
-    // get it
-    def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
-    def getEval: Option[AnyRef] = {
-      // ensure it has been compiled
-      compile
-      // try to load it and call the value method
-      lineRep.evalValue filterNot (_ == null)
-    }
-
-    /** Compile the object file.  Returns whether the compilation succeeded.
-     *  If all goes well, the "types" map is computed. */
-    lazy val compile: Boolean = {
-      // error counting is wrong, hence interpreter may overlook failure - so we reset
-      reporter.reset()
-
-      // compile the object containing the user's code
-      lineRep.compile(ObjectSourceCode(handlers)) && {
-        // extract and remember types
-        typeOf
-        typesOfDefinedTerms
-
-        // Assign symbols to the original trees
-        // TODO - just use the new trees.
-        defHandlers foreach { dh =>
-          val name = dh.member.name
-          definedSymbols get name foreach { sym =>
-            dh.member setSymbol sym
-            repldbg("Set symbol of " + name + " to " + sym.defString)
-          }
-        }
-
-        // compile the result-extraction object
-        withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
-      }
-    }
-
-    lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
-    def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
-
-    /* typeOf lookup with encoding */
-    def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
-    def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
-
-    private def typeMap[T](f: Type => T) =
-      mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
-
-    /** Types of variables defined by this request. */
-    lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
-    /** String representations of same. */
-    lazy val typeOf         = typeMap[String](tp => afterTyper(tp.toString))
-
-    // lazy val definedTypes: Map[Name, Type] = {
-    //   typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
-    // }
-    lazy val definedSymbols = (
-      termNames.map(x => x -> applyToResultMember(x, x => x)) ++
-      typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
-    ).toMap[Name, Symbol] withDefaultValue NoSymbol
-
-    lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
-
-    /** load and run the code using reflection */
-    def loadAndRun: (String, Boolean) = {
-      try   { ("" + (lineRep call sessionNames.print), true) }
-      catch { case ex: Throwable => (lineRep.bindError(ex), false) }
-    }
-
-    override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
-  }
-
-  /** Returns the name of the most recent interpreter result.
-   *  Mostly this exists so you can conveniently invoke methods on
-   *  the previous result.
-   */
-  def mostRecentVar: String =
-    if (mostRecentlyHandledTree.isEmpty) ""
-    else "" + (mostRecentlyHandledTree.get match {
-      case x: ValOrDefDef           => x.name
-      case Assign(Ident(name), _)   => name
-      case ModuleDef(_, name, _)    => name
-      case _                        => naming.mostRecentVar
-    })
-
-  private var mostRecentWarnings: List[(global.Position, String)] = Nil
-  def lastWarnings = mostRecentWarnings
-
-  def treesForRequestId(id: Int): List[Tree] =
-    requestForReqId(id).toList flatMap (_.trees)
-
-  def requestForReqId(id: Int): Option[Request] =
-    if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
-    else prevRequests find (_.reqId == id)
-
-  def requestForName(name: Name): Option[Request] = {
-    assert(definedNameMap != null, "definedNameMap is null")
-    definedNameMap get name
-  }
-
-  def requestForIdent(line: String): Option[Request] =
-    requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
-
-  def requestHistoryForName(name: Name): List[Request] =
-    prevRequests.toList.reverse filter (_.definedNames contains name)
-
-  def definitionForName(name: Name): Option[MemberHandler] =
-    requestForName(name) flatMap { req =>
-      req.handlers find (_.definedNames contains name)
-    }
-
-  def valueOfTerm(id: String): Option[AnyRef] =
-    requestForName(newTermName(id)) flatMap (_.getEval)
-
-  def classOfTerm(id: String): Option[JClass] =
-    valueOfTerm(id) map (_.getClass)
-
-  def typeOfTerm(id: String): Type = newTermName(id) match {
-    case nme.ROOTPKG  => RootClass.tpe
-    case name         => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
-  }
-
-  def symbolOfType(id: String): Symbol =
-    requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
-
-  def symbolOfTerm(id: String): Symbol =
-    requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
-
-  def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
-    classOfTerm(id) flatMap { clazz =>
-      clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
-        (nonAnon, runtimeTypeOfTerm(id))
-      }
-    }
-  }
-
-  def runtimeTypeOfTerm(id: String): Type = {
-    typeOfTerm(id) andAlso { tpe =>
-      val clazz      = classOfTerm(id) getOrElse { return NoType }
-      val staticSym  = tpe.typeSymbol
-      val runtimeSym = getClassIfDefined(clazz.getName)
-
-      if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
-        runtimeSym.info
-      else NoType
-    }
-  }
-  def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
-    normalizeNonPublic {
-      owner.info.nonPrivateDecl(member).tpe match {
-        case NullaryMethodType(tp) => tp
-        case tp                    => tp
-      }
-    }
-  }
-
-  object exprTyper extends {
-    val repl: IMain.this.type = imain
-  } with ExprTyper { }
-
-  def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
-
-  def symbolOfLine(code: String): Symbol =
-    exprTyper.symbolOfLine(code)
-
-  def typeOfExpression(expr: String, silent: Boolean = true): Type =
-    exprTyper.typeOfExpression(expr, silent)
-
-  protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
-  protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
-
-  def definedTerms      = onlyTerms(allDefinedNames) filterNot isInternalTermName
-  def definedTypes      = onlyTypes(allDefinedNames)
-  def definedSymbols    = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
-  def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
-
-  // Terms with user-given names (i.e. not res0 and not synthetic)
-  def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
-
-  private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
-
-  /** Translate a repl-defined identifier into a Symbol.
-   */
-  def apply(name: String): Symbol =
-    types(name) orElse terms(name)
-
-  def types(name: String): Symbol = {
-    val tpname = newTypeName(name)
-    findName(tpname) orElse getClassIfDefined(tpname)
-  }
-  def terms(name: String): Symbol = {
-    val termname = newTypeName(name)
-    findName(termname) orElse getModuleIfDefined(termname)
-  }
-  // [Eugene to Paul] possibly you could make use of TypeTags here
-  def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
-  def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
-  def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
-
-  def classSymbols  = allDefSymbols collect { case x: ClassSymbol => x }
-  def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
-
-  /** the previous requests this interpreter has processed */
-  private var executingRequest: Request = _
-  private val prevRequests       = mutable.ListBuffer[Request]()
-  private val referencedNameMap  = mutable.Map[Name, Request]()
-  private val definedNameMap     = mutable.Map[Name, Request]()
-  private val directlyBoundNames = mutable.Set[Name]()
-
-  def allHandlers    = prevRequestList flatMap (_.handlers)
-  def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
-  def allDefSymbols  = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
-
-  def lastRequest         = if (prevRequests.isEmpty) null else prevRequests.last
-  def prevRequestList     = prevRequests.toList
-  def allSeenTypes        = prevRequestList flatMap (_.typeOf.values.toList) distinct
-  def allImplicits        = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
-  def importHandlers      = allHandlers collect { case x: ImportHandler => x }
-
-  def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
-
-  /** Another entry point for tab-completion, ids in scope */
-  def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
-
-  /** Parse the ScalaSig to find type aliases */
-  def aliasForType(path: String) = ByteCode.aliasForType(path)
-
-  def withoutUnwrapping(op: => Unit): Unit = {
-    val saved = isettings.unwrapStrings
-    isettings.unwrapStrings = false
-    try op
-    finally isettings.unwrapStrings = saved
-  }
-
-  def symbolDefString(sym: Symbol) = {
-    TypeStrings.quieter(
-      afterTyper(sym.defString),
-      sym.owner.name + ".this.",
-      sym.owner.fullName + "."
-    )
-  }
-
-  def showCodeIfDebugging(code: String) {
-    /** Secret bookcase entrance for repl debuggers: end the line
-     *  with "// show" and see what's going on.
-     */
-    def isShow    = code.lines exists (_.trim endsWith "// show")
-    def isShowRaw = code.lines exists (_.trim endsWith "// raw")
-
-    // old style
-    beSilentDuring(parse(code)) foreach { ts =>
-      ts foreach { t =>
-        withoutUnwrapping(repldbg(asCompactString(t)))
-      }
-    }
-  }
-
-  // debugging
-  def debugging[T](msg: String)(res: T) = {
-    repldbg(msg + " " + res)
-    res
-  }
-}
-
-/** Utility methods for the Interpreter. */
-object IMain {
-  // The two name forms this is catching are the two sides of this assignment:
-  //
-  // $line3.$read.$iw.$iw.Bippy =
-  //   $line3.$read$$iw$$iw$Bippy at 4a6a00ca
-  private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
-  private def removeIWPackages(s: String)  = s.replaceAll("""\$(iw|read|eval|print)[$.]""", "")
-  def stripString(s: String)               = removeIWPackages(removeLineWrapper(s))
-
-  trait CodeAssembler[T] {
-    def preamble: String
-    def generate: T => String
-    def postamble: String
-
-    def apply(contributors: List[T]): String = stringFromWriter { code =>
-      code println preamble
-      contributors map generate foreach (code println _)
-      code println postamble
-    }
-  }
-
-  trait StrippingWriter {
-    def isStripping: Boolean
-    def stripImpl(str: String): String
-    def strip(str: String): String = if (isStripping) stripImpl(str) else str
-  }
-  trait TruncatingWriter {
-    def maxStringLength: Int
-    def isTruncating: Boolean
-    def truncate(str: String): String = {
-      if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
-        (str take maxStringLength - 3) + "..."
-      else str
-    }
-  }
-  abstract class StrippingTruncatingWriter(out: JPrintWriter)
-          extends JPrintWriter(out)
-             with StrippingWriter
-             with TruncatingWriter {
-    self =>
-
-    def clean(str: String): String = truncate(strip(str))
-    override def write(str: String) = super.write(clean(str))
-  }
-  class ReplStrippingWriter(intp: IMain) extends StrippingTruncatingWriter(intp.out) {
-    import intp._
-    def maxStringLength    = isettings.maxPrintString
-    def isStripping        = isettings.unwrapStrings
-    def isTruncating       = reporter.truncationOK
-
-    def stripImpl(str: String): String = naming.unmangle(str)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
deleted file mode 100644
index a8f77af..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** Settings for the interpreter
- *
- * @version 1.0
- * @author Lex Spoon, 2007/3/24
- **/
-class ISettings(intp: IMain) {
-  /** A list of paths where :load should look */
-  var loadPath = List(".")
-
-  /** Set this to true to see repl machinery under -Yrich-exceptions.
-   */
-  var showInternalStackTraces = false
-
-  /** The maximum length of toString to use when printing the result
-   *  of an evaluation.  0 means no maximum.  If a printout requires
-   *  more than this number of characters, then the printout is
-   *  truncated.
-   */
-  var maxPrintString = replProps.maxPrintString.option.getOrElse(800)
-
-  /** The maximum number of completion candidates to print for tab
-   *  completion without requiring confirmation.
-   */
-  var maxAutoprintCompletion = 250
-
-  /** String unwrapping can be disabled if it is causing issues.
-   *  Settings this to false means you will see Strings like "$iw.$iw.".
-   */
-  var unwrapStrings = true
-
-  def deprecation_=(x: Boolean) = {
-    val old = intp.settings.deprecation.value
-    intp.settings.deprecation.value = x
-    if (!old && x) println("Enabled -deprecation output.")
-    else if (old && !x) println("Disabled -deprecation output.")
-  }
-  def deprecation: Boolean = intp.settings.deprecation.value
-
-  def allSettings = Map(
-    "maxPrintString" -> maxPrintString,
-    "maxAutoprintCompletion" -> maxAutoprintCompletion,
-    "unwrapStrings" -> unwrapStrings,
-    "deprecation" -> deprecation
-  )
-
-  private def allSettingsString =
-    allSettings.toList sortBy (_._1) map { case (k, v) => "  " + k + " = " + v + "\n" } mkString
-
-  override def toString = """
-    | ISettings {
-    | %s
-    | }""".stripMargin.format(allSettingsString)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
deleted file mode 100644
index 73d962b..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ /dev/null
@@ -1,195 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-
-trait Imports {
-  self: IMain =>
-
-  import global._
-  import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
-  import memberHandlers._
-
-  def isNoImports = settings.noimports.value
-  def isNoPredef  = settings.nopredef.value
-
-  /** Synthetic import handlers for the language defined imports. */
-  private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
-    val hd :: tl = sym.fullName.split('.').toList map newTermName
-    val tree = Import(
-      tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
-      ImportSelector.wildList
-    )
-    tree setSymbol sym
-    new ImportHandler(tree)
-  }
-
-  /** Symbols whose contents are language-defined to be imported. */
-  def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
-  def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
-  def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
-
-  def allImportedNames = importHandlers flatMap (_.importedNames)
-  def importedTerms    = onlyTerms(allImportedNames)
-  def importedTypes    = onlyTypes(allImportedNames)
-
-  /** Types which have been wildcard imported, such as:
-   *    val x = "abc" ; import x._  // type java.lang.String
-   *    import java.lang.String._   // object java.lang.String
-   *
-   *  Used by tab completion.
-   *
-   *  XXX right now this gets import x._ and import java.lang.String._,
-   *  but doesn't figure out import String._.  There's a lot of ad hoc
-   *  scope twiddling which should be swept away in favor of digging
-   *  into the compiler scopes.
-   */
-  def sessionWildcards: List[Type] = {
-    importHandlers filter (_.importsWildcard) map (_.targetType) distinct
-  }
-  def wildcardTypes = languageWildcards ++ sessionWildcards
-
-  def languageSymbols        = languageWildcardSyms flatMap membersAtPickler
-  def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
-  def importedSymbols        = languageSymbols ++ sessionImportedSymbols
-  def importedTermSymbols    = importedSymbols collect { case x: TermSymbol => x }
-  def importedTypeSymbols    = importedSymbols collect { case x: TypeSymbol => x }
-  def implicitSymbols        = importedSymbols filter (_.isImplicit)
-
-  def importedTermNamed(name: String): Symbol =
-    importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
-
-  /** Tuples of (source, imported symbols) in the order they were imported.
-   */
-  def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
-    val lang    = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
-    val session = importHandlers filter (_.targetType != NoType) map { mh =>
-      (mh.targetType.typeSymbol, mh.importedSymbols)
-    }
-
-    lang ++ session
-  }
-  def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
-    importedSymbolsBySource map {
-      case (k, vs) => (k, vs filter (_.isImplicit))
-    } filterNot (_._2.isEmpty)
-  }
-
-  /** Compute imports that allow definitions from previous
-   *  requests to be visible in a new request.  Returns
-   *  three pieces of related code:
-   *
-   *  1. An initial code fragment that should go before
-   *  the code of the new request.
-   *
-   *  2. A code fragment that should go after the code
-   *  of the new request.
-   *
-   *  3. An access path which can be traversed to access
-   *  any bindings inside code wrapped by #1 and #2 .
-   *
-   * The argument is a set of Names that need to be imported.
-   *
-   * Limitations: This method is not as precise as it could be.
-   * (1) It does not process wildcard imports to see what exactly
-   * they import.
-   * (2) If it imports any names from a request, it imports all
-   * of them, which is not really necessary.
-   * (3) It imports multiple same-named implicits, but only the
-   * last one imported is actually usable.
-   */
-  case class ComputedImports(prepend: String, append: String, access: String)
-  protected def importsCode(wanted: Set[Name]): ComputedImports = {
-    /** Narrow down the list of requests from which imports
-     *  should be taken.  Removes requests which cannot contribute
-     *  useful imports for the specified set of wanted names.
-     */
-    case class ReqAndHandler(req: Request, handler: MemberHandler) { }
-
-    def reqsToUse: List[ReqAndHandler] = {
-      /** Loop through a list of MemberHandlers and select which ones to keep.
-        * 'wanted' is the set of names that need to be imported.
-       */
-      def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
-        // Single symbol imports might be implicits! See bug #1752.  Rather than
-        // try to finesse this, we will mimic all imports for now.
-        def keepHandler(handler: MemberHandler) = handler match {
-          case _: ImportHandler => true
-          case x                => x.definesImplicit || (x.definedNames exists wanted)
-        }
-
-        reqs match {
-          case Nil                                    => Nil
-          case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
-          case rh :: rest                             =>
-            import rh.handler._
-            val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
-            rh :: select(rest, newWanted)
-        }
-      }
-
-      /** Flatten the handlers out and pair each with the original request */
-      select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
-    }
-
-    val code, trailingBraces, accessPath = new StringBuilder
-    val currentImps = mutable.HashSet[Name]()
-
-    // add code for a new object to hold some imports
-    def addWrapper() {
-      val impname = nme.INTERPRETER_IMPORT_WRAPPER
-      code append "object %s {\n".format(impname)
-      trailingBraces append "}\n"
-      accessPath append ("." + impname)
-
-      currentImps.clear
-    }
-
-    addWrapper()
-
-    // loop through previous requests, adding imports for each one
-    for (ReqAndHandler(req, handler) <- reqsToUse) {
-      handler match {
-        // If the user entered an import, then just use it; add an import wrapping
-        // level if the import might conflict with some other import
-        case x: ImportHandler =>
-          if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
-            addWrapper()
-
-          code append (x.member + "\n")
-
-          // give wildcard imports a import wrapper all to their own
-          if (x.importsWildcard) addWrapper()
-          else currentImps ++= x.importedNames
-
-        // For other requests, import each defined name.
-        // import them explicitly instead of with _, so that
-        // ambiguity errors will not be generated. Also, quote
-        // the name of the variable, so that we don't need to
-        // handle quoting keywords separately.
-        case x =>
-          for (imv <- x.definedNames) {
-            if (currentImps contains imv) addWrapper()
-
-            code append ("import " + (req fullPath imv) + "\n")
-            currentImps += imv
-          }
-      }
-    }
-    // add one extra wrapper, to prevent warnings in the common case of
-    // redefining the value bound in the last interpreter request.
-    addWrapper()
-    ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
-  }
-
-  private def allReqAndHandlers =
-    prevRequestList flatMap (req => req.handlers map (req -> _))
-
-  private def membersAtPickler(sym: Symbol): List[Symbol] =
-    beforePickler(sym.info.nonPrivateMembers.toList)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
deleted file mode 100644
index 8331fdd..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.io.IOException
-import java.nio.channels.ClosedByInterruptException
-import scala.util.control.Exception._
-import session.History
-import InteractiveReader._
-import Properties.isMac
-
-/** Reads lines from an input stream */
-trait InteractiveReader {
-  val interactive: Boolean
-
-  def init(): Unit
-  def reset(): Unit
-
-  def history: History
-  def completion: Completion
-  def eraseLine(): Unit
-  def redrawLine(): Unit
-  def currentLine: String
-
-  def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
-    case 'y'  => true
-    case 'n'  => false
-    case _    => alt
-  }
-  def readAssumingNo(prompt: String)  = readYesOrNo(prompt, false)
-  def readAssumingYes(prompt: String) = readYesOrNo(prompt, true)
-
-  protected def readOneLine(prompt: String): String
-  protected def readOneKey(prompt: String): Int
-
-  def readLine(prompt: String): String =
-    // hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
-    if (isMac) restartSysCalls(readOneLine(prompt), reset())
-    else readOneLine(prompt)
-}
-
-object InteractiveReader {
-  val msgEINTR = "Interrupted system call"
-  def restartSysCalls[R](body: => R, reset: => Unit): R =
-    try body catch {
-      case e: IOException if e.getMessage == msgEINTR => reset ; body
-    }
-
-  def apply(): InteractiveReader = SimpleReader()
-  @deprecated("Use `apply` instead.", "2.9.0")
-  def createDefault(): InteractiveReader = apply()
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
deleted file mode 100644
index 219cb35..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ /dev/null
@@ -1,372 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline._
-import scala.tools.jline.console.completer._
-import Completion._
-import scala.collection.mutable.ListBuffer
-
-// REPL completor - queries supplied interpreter for valid
-// completions based on current contents of buffer.
-class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
-  val global: intp.global.type = intp.global
-  import global._
-  import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
-  import rootMirror.{ RootClass, getModuleIfDefined }
-  type ExecResult = Any
-  import intp.{ debugging }
-
-  // verbosity goes up with consecutive tabs
-  private var verbosity: Int = 0
-  def resetVerbosity() = verbosity = 0
-
-  def getSymbol(name: String, isModule: Boolean) = (
-    if (isModule) getModuleIfDefined(name)
-    else getModuleIfDefined(name)
-  )
-  def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
-  def typeOf(name: String)                     = getType(name, false)
-  def moduleOf(name: String)                   = getType(name, true)
-
-  trait CompilerCompletion {
-    def tp: Type
-    def effectiveTp = tp match {
-      case MethodType(Nil, resType)   => resType
-      case NullaryMethodType(resType) => resType
-      case _                          => tp
-    }
-
-    // for some reason any's members don't show up in subclasses, which
-    // we need so 5.<tab> offers asInstanceOf etc.
-    private def anyMembers = AnyClass.tpe.nonPrivateMembers
-    def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
-
-    def tos(sym: Symbol): String = sym.decodedName
-    def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
-    def hasMethod(s: String) = memberNamed(s).isMethod
-
-    // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
-    // compiler to crash for reasons not yet known.
-    def members     = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
-    def methods     = members.toList filter (_.isMethod)
-    def packages    = members.toList filter (_.isPackage)
-    def aliases     = members.toList filter (_.isAliasType)
-
-    def memberNames   = members map tos
-    def methodNames   = methods map tos
-    def packageNames  = packages map tos
-    def aliasNames    = aliases map tos
-  }
-
-  object NoTypeCompletion extends TypeMemberCompletion(NoType) {
-    override def memberNamed(s: String) = NoSymbol
-    override def members = Nil
-    override def follow(s: String) = None
-    override def alternativesFor(id: String) = Nil
-  }
-
-  object TypeMemberCompletion {
-    def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
-      new TypeMemberCompletion(tp) {
-        var upgraded = false
-        lazy val upgrade = {
-          intp rebind param
-          intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
-          upgraded = true
-          new TypeMemberCompletion(runtimeType)
-        }
-        override def completions(verbosity: Int) = {
-          super.completions(verbosity) ++ (
-            if (verbosity == 0) Nil
-            else upgrade.completions(verbosity)
-          )
-        }
-        override def follow(s: String) = super.follow(s) orElse {
-          if (upgraded) upgrade.follow(s)
-          else None
-        }
-        override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
-          if (upgraded) upgrade.alternativesFor(id)
-          else Nil
-        ) distinct
-      }
-    }
-    def apply(tp: Type): TypeMemberCompletion = {
-      if (tp eq NoType) NoTypeCompletion
-      else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
-      else new TypeMemberCompletion(tp)
-    }
-    def imported(tp: Type) = new ImportCompletion(tp)
-  }
-
-  class TypeMemberCompletion(val tp: Type) extends CompletionAware
-                                              with CompilerCompletion {
-    def excludeEndsWith: List[String] = Nil
-    def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
-    def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
-
-    def methodSignatureString(sym: Symbol) = {
-      IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
-    }
-
-    def exclude(name: String): Boolean = (
-      (name contains "$") ||
-      (excludeNames contains name) ||
-      (excludeEndsWith exists (name endsWith _)) ||
-      (excludeStartsWith exists (name startsWith _))
-    )
-    def filtered(xs: List[String]) = xs filterNot exclude distinct
-
-    def completions(verbosity: Int) =
-      debugging(tp + " completions ==> ")(filtered(memberNames))
-
-    override def follow(s: String): Option[CompletionAware] =
-      debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
-
-    override def alternativesFor(id: String): List[String] =
-      debugging(id + " alternatives ==> ") {
-        val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
-
-        if (alts.nonEmpty) "" :: alts else Nil
-      }
-
-    override def toString = "%s (%d members)".format(tp, members.size)
-  }
-
-  class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
-    override def excludeNames = anyref.methodNames
-  }
-
-  class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
-    override def completions(verbosity: Int) = verbosity match {
-      case 0    => filtered(memberNames)
-      case _    => memberNames
-    }
-  }
-
-  class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
-    override def completions(verbosity: Int) = verbosity match {
-      case 0    => filtered(members filterNot (_.isSetter) map tos)
-      case _    => super.completions(verbosity)
-    }
-  }
-
-  // not for completion but for excluding
-  object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
-
-  // the unqualified vals/defs/etc visible in the repl
-  object ids extends CompletionAware {
-    override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
-    // now we use the compiler for everything.
-    override def follow(id: String): Option[CompletionAware] = {
-      if (!completions(0).contains(id))
-        return None
-
-      val tpe = intp typeOfExpression id
-      if (tpe == NoType)
-        return None
-
-      def default = Some(TypeMemberCompletion(tpe))
-
-      // only rebinding vals in power mode for now.
-      if (!isReplPower) default
-      else intp runtimeClassAndTypeOfTerm id match {
-        case Some((clazz, runtimeType)) =>
-          val sym = intp.symbolOfTerm(id)
-          if (sym.isStable) {
-            val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
-            Some(TypeMemberCompletion(tpe, runtimeType, param))
-          }
-          else default
-        case _        =>
-          default
-      }
-    }
-    override def toString = "<repl ids> (%s)".format(completions(0).size)
-  }
-
-  // user-issued wildcard imports like "import global._" or "import String._"
-  private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
-
-  // literal Ints, Strings, etc.
-  object literals extends CompletionAware {
-    def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
-    def completions(verbosity: Int) = Nil
-
-    override def follow(id: String) = simpleParse(id) match {
-      case x: Literal   => Some(new LiteralCompletion(x))
-      case _            => None
-    }
-  }
-
-  // top level packages
-  object rootClass extends TypeMemberCompletion(RootClass.tpe) {
-    override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
-    override def follow(id: String) = id match {
-      case "_root_" => Some(this)
-      case _        => super.follow(id)
-    }
-  }
-  // members of Predef
-  object predef extends TypeMemberCompletion(PredefModule.tpe) {
-    override def excludeEndsWith    = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
-    override def excludeStartsWith  = super.excludeStartsWith ++ List("wrap")
-    override def excludeNames       = anyref.methodNames
-
-    override def exclude(name: String) = super.exclude(name) || (
-      (name contains "2")
-    )
-
-    override def completions(verbosity: Int) = verbosity match {
-      case 0    => Nil
-      case _    => super.completions(verbosity)
-    }
-  }
-  // members of scala.*
-  object scalalang extends PackageCompletion(ScalaPackage.tpe) {
-    def arityClasses = List("Product", "Tuple", "Function")
-    def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
-    override def exclude(name: String) = super.exclude(name) || (
-      skipArity(name)
-    )
-
-    override def completions(verbosity: Int) = verbosity match {
-      case 0    => filtered(packageNames ++ aliasNames)
-      case _    => super.completions(verbosity)
-    }
-  }
-  // members of java.lang.*
-  object javalang extends PackageCompletion(JavaLangPackage.tpe) {
-    override lazy val excludeEndsWith   = super.excludeEndsWith ++ List("Exception", "Error")
-    override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
-
-    override def completions(verbosity: Int) = verbosity match {
-      case 0    => filtered(packageNames)
-      case _    => super.completions(verbosity)
-    }
-  }
-
-  // the list of completion aware objects which should be consulted
-  // for top level unqualified, it's too noisy to let much in.
-  lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
-  def topLevel = topLevelBase ++ imported
-  def topLevelThreshold = 50
-
-  // the first tier of top level objects (doesn't include file completion)
-  def topLevelFor(parsed: Parsed): List[String] = {
-    val buf = new ListBuffer[String]
-    topLevel foreach { ca =>
-      buf ++= (ca completionsFor parsed)
-
-      if (buf.size > topLevelThreshold)
-        return buf.toList.sorted
-    }
-    buf.toList
-  }
-
-  // the most recent result
-  def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
-
-  def lastResultFor(parsed: Parsed) = {
-    /** The logic is a little tortured right now because normally '.' is
-     *  ignored as a delimiter, but on .<tab> it needs to be propagated.
-     */
-    val xs = lastResult completionsFor parsed
-    if (parsed.isEmpty) xs map ("." + _) else xs
-  }
-
-  // generic interface for querying (e.g. interpreter loop, testing)
-  def completions(buf: String): List[String] =
-    topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
-
-  def completer(): ScalaCompleter = new JLineTabCompletion
-
-  /** This gets a little bit hairy.  It's no small feat delegating everything
-   *  and also keeping track of exactly where the cursor is and where it's supposed
-   *  to end up.  The alternatives mechanism is a little hacky: if there is an empty
-   *  string in the list of completions, that means we are expanding a unique
-   *  completion, so don't update the "last" buffer because it'll be wrong.
-   */
-  class JLineTabCompletion extends ScalaCompleter {
-    // For recording the buffer on the last tab hit
-    private var lastBuf: String = ""
-    private var lastCursor: Int = -1
-
-    // Does this represent two consecutive tabs?
-    def isConsecutiveTabs(buf: String, cursor: Int) =
-      cursor == lastCursor && buf == lastBuf
-
-    // Longest common prefix
-    def commonPrefix(xs: List[String]): String = {
-      if (xs.isEmpty || xs.contains("")) ""
-      else xs.head.head match {
-        case ch =>
-          if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
-          else ""
-      }
-    }
-
-    // This is jline's entry point for completion.
-    override def complete(buf: String, cursor: Int): Candidates = {
-      verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
-      repldbg("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
-
-      // we don't try lower priority completions unless higher ones return no results.
-      def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
-        val winners = completionFunction(p)
-        if (winners.isEmpty)
-          return None
-        val newCursor =
-          if (winners contains "") p.cursor
-          else {
-            val advance = commonPrefix(winners)
-            lastCursor = p.position + advance.length
-            lastBuf = (buf take p.position) + advance
-            repldbg("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
-              p, lastBuf, lastCursor, p.position))
-            p.position
-          }
-
-        Some(Candidates(newCursor, winners))
-      }
-
-      def mkDotted      = Parsed.dotted(buf, cursor) withVerbosity verbosity
-      def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
-
-      // a single dot is special cased to completion on the previous result
-      def lastResultCompletion =
-        if (!looksLikeInvocation(buf)) None
-        else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
-
-      def tryAll = (
-                  lastResultCompletion
-           orElse tryCompletion(mkDotted, topLevelFor)
-        getOrElse Candidates(cursor, Nil)
-      )
-
-      /**
-       *  This is the kickoff point for all manner of theoretically
-       *  possible compiler unhappiness. The fault may be here or
-       *  elsewhere, but we don't want to crash the repl regardless.
-       *  The compiler makes it impossible to avoid catching Throwable
-       *  with its unfortunate tendency to throw java.lang.Errors and
-       *  AssertionErrors as the hats drop. We take two swings at it
-       *  because there are some spots which like to throw an assertion
-       *  once, then work after that. Yeah, what can I say.
-       */
-      try tryAll
-      catch { case ex: Throwable =>
-        repldbg("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
-        Candidates(cursor,
-          if (isReplDebug) List("<error:" + ex + ">")
-          else Nil
-        )
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
deleted file mode 100644
index 5fd5b41..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.ConsoleReader
-import scala.tools.jline.console.completer._
-import session._
-import scala.collection.JavaConverters._
-import Completion._
-import io.Streamable.slurp
-
-/**
- *  Reads from the console using JLine.
- */
-class JLineReader(_completion: => Completion) extends InteractiveReader {
-  val interactive = true
-  val consoleReader = new JLineConsoleReader()
-
-  lazy val completion = _completion
-  lazy val history: JLineHistory = JLineHistory()
-
-  private def term = consoleReader.getTerminal()
-  def reset() = term.reset()
-  def init()  = term.init()
-
-  def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
-    def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
-      val buf   = if (_buf == null) "" else _buf
-      val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
-      newCandidates foreach (candidates add _)
-      newCursor
-    }
-  }
-
-  class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
-    if ((history: History) ne NoHistory)
-      this setHistory history
-
-    // working around protected/trait/java insufficiencies.
-    def goBack(num: Int): Unit = back(num)
-    def readOneKey(prompt: String) = {
-      this.print(prompt)
-      this.flush()
-      this.readVirtualKey()
-    }
-    def eraseLine() = consoleReader.resetPromptLine("", "", 0)
-    def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
-    // override def readLine(prompt: String): String
-
-    // A hook for running code after the repl is done initializing.
-    lazy val postInit: Unit = {
-      this setBellEnabled false
-
-      if (completion ne NoCompletion) {
-        val argCompletor: ArgumentCompleter =
-          new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
-        argCompletor setStrict false
-
-        this addCompleter argCompletor
-        this setAutoprintThreshold 400 // max completion candidates without warning
-      }
-    }
-  }
-
-  def currentLine = consoleReader.getCursorBuffer.buffer.toString
-  def redrawLine() = consoleReader.redrawLineAndFlush()
-  def eraseLine() = consoleReader.eraseLine()
-  // Alternate implementation, not sure if/when I need this.
-  // def eraseLine() = while (consoleReader.delete()) { }
-  def readOneLine(prompt: String) = consoleReader readLine prompt
-  def readOneKey(prompt: String)  = consoleReader readOneKey prompt
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala
deleted file mode 100644
index aeb25fc..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-trait Logger {
-  def isInfo: Boolean
-  def isDebug: Boolean
-  def isTrace: Boolean
-  def out: JPrintWriter
-
-  def info(msg: => Any): Unit  = if (isInfo) out println msg
-  def debug(msg: => Any): Unit = if (isDebug) out println msg
-  def trace(msg: => Any): Unit = if (isTrace) out println msg
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
deleted file mode 100644
index 60325ec..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
-import scala.language.implicitConversions
-
-class ProcessResult(val line: String) {
-  import scala.sys.process._
-  private val buffer = new ListBuffer[String]
-
-  val builder  = Process(line)
-  val logger   = ProcessLogger(buffer += _)
-  val exitCode = builder ! logger
-  def lines    = buffer.toList
-
-  def show() = lines foreach println
-  override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
-}
-object ProcessResult {
-  implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines
-  def apply(line: String): ProcessResult = new ProcessResult(line)
-}
-
-trait LoopCommands {
-  protected def out: JPrintWriter
-
-  // So outputs can be suppressed.
-  def echoCommandMessage(msg: String): Unit = out println msg
-
-  // a single interpreter command
-  abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
-    private var _longHelp: String = null
-    final def defaultHelp = usageMsg + " (no extended help available.)"
-    def hasLongHelp = _longHelp != null || longHelp != defaultHelp
-    def withLongHelp(text: String): this.type = { _longHelp = text ; this }
-    def longHelp = _longHelp match {
-      case null   => defaultHelp
-      case text   => text
-    }
-    def usage: String = ""
-    def usageMsg: String = ":" + name + (
-      if (usage == "") "" else " " + usage
-    )
-    def apply(line: String): Result
-
-    // called if no args are given
-    def showUsage(): Result = {
-      "usage is " + usageMsg
-      Result(true, None)
-    }
-
-    def onError(msg: String) = {
-      out.println("error: " + msg)
-      showUsage()
-    }
-  }
-  object LoopCommand {
-    def nullary(name: String, help: String, f: () => Result): LoopCommand =
-      new NullaryCmd(name, help, _ => f())
-
-    def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
-      if (usage == "") new NullaryCmd(name, help, f)
-      else new LineCmd(name, usage, help, f)
-
-    def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand =
-      new VarArgsCmd(name, usage, help, f)
-  }
-
-  class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
-    def apply(line: String): Result = f(line)
-  }
-
-  class LineCmd(name: String, argWord: String, help: String, f: String => Result) extends LoopCommand(name, help) {
-    override def usage = argWord
-    def apply(line: String): Result = f(line)
-  }
-
-  class VarArgsCmd(name: String, argWord: String, help: String, f: List[String] => Result)
-            extends LoopCommand(name, help) {
-    override def usage = argWord
-    def apply(line: String): Result = apply(words(line))
-    def apply(args: List[String]) = f(args)
-  }
-
-  // the result of a single command
-  case class Result(val keepRunning: Boolean, val lineToRecord: Option[String])
-
-  object Result {
-    // the default result means "keep running, and don't record that line"
-    val default = Result(true, None)
-
-    // most commands do not want to micromanage the Result, but they might want
-    // to print something to the console, so we accomodate Unit and String returns.
-    implicit def resultFromUnit(x: Unit): Result = default
-    implicit def resultFromString(msg: String): Result = {
-      echoCommandMessage(msg)
-      default
-    }
-  }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
deleted file mode 100644
index 67519cf..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
-import scala.reflect.internal.Flags._
-import scala.language.implicitConversions
-
-trait MemberHandlers {
-  val intp: IMain
-
-  import intp.{ Request, global, naming }
-  import global._
-  import naming._
-
-  private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
-  private def codegenln(xs: String*): String = codegenln(true, xs: _*)
-
-  private def codegen(xs: String*): String = codegen(true, xs: _*)
-  private def codegen(leadingPlus: Boolean, xs: String*): String = {
-    val front = if (leadingPlus) "+ " else ""
-    front + (xs map string2codeQuoted mkString " + ")
-  }
-  private implicit def name2string(name: Name) = name.toString
-
-  /** A traverser that finds all mentioned identifiers, i.e. things
-   *  that need to be imported.  It might return extra names.
-   */
-  private class ImportVarsTraverser extends Traverser {
-    val importVars = new mutable.HashSet[Name]()
-
-    override def traverse(ast: Tree) = ast match {
-      case Ident(name) =>
-        // XXX this is obviously inadequate but it's going to require some effort
-        // to get right.
-        if (name.toString startsWith "x$") ()
-        else importVars += name
-      case _        => super.traverse(ast)
-    }
-  }
-  private object ImportVarsTraverser {
-    def apply(member: Tree) = {
-      val ivt = new ImportVarsTraverser()
-      ivt traverse member
-      ivt.importVars.toList
-    }
-  }
-
-  def chooseHandler(member: Tree): MemberHandler = member match {
-    case member: DefDef        => new DefHandler(member)
-    case member: ValDef        => new ValHandler(member)
-    case member: Assign        => new AssignHandler(member)
-    case member: ModuleDef     => new ModuleHandler(member)
-    case member: ClassDef      => new ClassHandler(member)
-    case member: TypeDef       => new TypeAliasHandler(member)
-    case member: Import        => new ImportHandler(member)
-    case DocDef(_, documented) => chooseHandler(documented)
-    case member                => new GenericHandler(member)
-  }
-
-  sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
-    def symbol          = if (member.symbol eq null) NoSymbol else member.symbol
-    def name: Name      = member.name
-    def mods: Modifiers = member.mods
-    def keyword         = member.keyword
-    def prettyName      = name.decode
-
-    override def definesImplicit = member.mods.isImplicit
-    override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
-    override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
-    override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
-  }
-
-  /** Class to handle one member among all the members included
-   *  in a single interpreter request.
-   */
-  sealed abstract class MemberHandler(val member: Tree) {
-    def definesImplicit = false
-    def definesValue    = false
-    def isLegalTopLevel = false
-
-    def definesTerm     = Option.empty[TermName]
-    def definesType     = Option.empty[TypeName]
-
-    lazy val referencedNames = ImportVarsTraverser(member)
-    def importedNames        = List[Name]()
-    def definedNames         = definesTerm.toList ++ definesType.toList
-    def definedOrImported    = definedNames ++ importedNames
-    def definedSymbols       = List[Symbol]()
-
-    def extraCodeToEvaluate(req: Request): String = ""
-    def resultExtractionCode(req: Request): String = ""
-
-    private def shortName = this.getClass.toString split '.' last
-    override def toString = shortName + referencedNames.mkString(" (refs: ", ", ", ")")
-  }
-
-  class GenericHandler(member: Tree) extends MemberHandler(member)
-
-  class ValHandler(member: ValDef) extends MemberDefHandler(member) {
-    val maxStringElements = 1000  // no need to mkString billions of elements
-    override def definesValue = true
-
-    override def resultExtractionCode(req: Request): String = {
-      val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
-      if (!mods.isPublic || isInternal) ""
-      else {
-        // if this is a lazy val we avoid evaluating it here
-        val resultString =
-          if (mods.isLazy) codegenln(false, "<lazy>")
-          else any2stringOf(req fullPath name, maxStringElements)
-
-        val vidString =
-          if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
-          else ""
-
-        """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
-      }
-    }
-  }
-
-  class DefHandler(member: DefDef) extends MemberDefHandler(member) {
-    private def vparamss = member.vparamss
-    private def isMacro = member.symbol hasFlag MACRO
-    // true if not a macro and 0-arity
-    override def definesValue = !isMacro && flattensToEmpty(vparamss)
-    override def resultExtractionCode(req: Request) =
-      if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
-  }
-
-  class AssignHandler(member: Assign) extends MemberHandler(member) {
-    val Assign(lhs, rhs) = member
-    val name = newTermName(freshInternalVarName())
-
-    override def definesTerm = Some(name)
-    override def definesValue = true
-    override def extraCodeToEvaluate(req: Request) =
-      """val %s = %s""".format(name, lhs)
-
-    /** Print out lhs instead of the generated varName */
-    override def resultExtractionCode(req: Request) = {
-      val lhsType = string2code(req lookupTypeOf name)
-      val res     = string2code(req fullPath name)
-      """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
-    }
-  }
-
-  class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
-    override def definesTerm = Some(name)
-    override def definesValue = true
-    override def isLegalTopLevel = true
-
-    override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
-  }
-
-  class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
-    override def definesType = Some(name.toTypeName)
-    override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
-    override def isLegalTopLevel = true
-
-    override def resultExtractionCode(req: Request) =
-      codegenln("defined %s %s".format(keyword, name))
-  }
-
-  class TypeAliasHandler(member: TypeDef) extends MemberDefHandler(member) {
-    private def isAlias = mods.isPublic && treeInfo.isAliasTypeDef(member)
-    override def definesType = Some(name.toTypeName) filter (_ => isAlias)
-
-    override def resultExtractionCode(req: Request) =
-      codegenln("defined type alias ", name) + "\n"
-  }
-
-  class ImportHandler(imp: Import) extends MemberHandler(imp) {
-    val Import(expr, selectors) = imp
-    def targetType: Type = intp.typeOfExpression("" + expr)
-    override def isLegalTopLevel = true
-
-    def createImportForName(name: Name): String = {
-      selectors foreach {
-        case sel @ ImportSelector(old, _, `name`, _)  => return "import %s.{ %s }".format(expr, sel)
-        case _ => ()
-      }
-      "import %s.%s".format(expr, name)
-    }
-    // TODO: Need to track these specially to honor Predef masking attempts,
-    // because they must be the leading imports in the code generated for each
-    // line.  We can use the same machinery as Contexts now, anyway.
-    def isPredefImport = isReferenceToPredef(expr)
-
-    // wildcard imports, e.g. import foo._
-    private def selectorWild    = selectors filter (_.name == nme.USCOREkw)
-    // renamed imports, e.g. import foo.{ bar => baz }
-    private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
-
-    /** Whether this import includes a wildcard import */
-    val importsWildcard = selectorWild.nonEmpty
-
-    /** Whether anything imported is implicit .*/
-    def importsImplicit = implicitSymbols.nonEmpty
-
-    def implicitSymbols = importedSymbols filter (_.isImplicit)
-    def importedSymbols = individualSymbols ++ wildcardSymbols
-
-    lazy val individualSymbols: List[Symbol] =
-      beforePickler(individualNames map (targetType nonPrivateMember _))
-
-    lazy val wildcardSymbols: List[Symbol] =
-      if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
-      else Nil
-
-    /** Complete list of names imported by a wildcard */
-    lazy val wildcardNames: List[Name]   = wildcardSymbols map (_.name)
-    lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
-
-    /** The names imported by this statement */
-    override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
-    lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
-
-    def importString = imp.toString
-    override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
deleted file mode 100644
index eff0ef5..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import NamedParam._
-import scala.language.implicitConversions
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
-
-trait NamedParamCreator {
-  protected def freshName: () => String
-
-  def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
-  def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
-  def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
-
-  def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
-  def clazz(x: Any): NamedParam = clazz(freshName(), x)
-
-  implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x)
-  implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam       = apply(pair._1, pair._2)
-}
-
-object NamedParam extends NamedParamCreator {
-  class Typed[T: ru.TypeTag : ClassTag](val name: String, val value: T) extends NamedParam {
-    val tpe = TypeStrings.fromTag[T]
-  }
-  class Untyped(val name: String, val value: Any) extends NamedParam {
-    val tpe = TypeStrings.fromValue(value)
-  }
-
-  protected val freshName = {
-    var counter = 0
-    () => { counter += 1; "p" + counter }
-  }
-}
-
-case class NamedParamClass(name: String, tpe: String, value: Any) extends NamedParam { }
-
-trait NamedParam {
-  def name: String
-  def tpe: String
-  def value: Any
-  override def toString = name + ": " + tpe
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
deleted file mode 100644
index 0d03a86..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** This is for name logic which is independent of the compiler (notice there's no Global.)
- *  That includes at least generating, metaquoting, mangling, and unmangling.
- */
-trait Naming {
-  def unmangle(str: String): String = {
-    val ESC = '\u001b'
-    val cleaned = removeIWPackages(removeLineWrapper(str))
-    // Looking to exclude binary data which hoses the terminal, but
-    // let through the subset of it we need, like whitespace and also
-    // <ESC> for ansi codes.
-    val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
-    // Lots of binary chars - translate all supposed whitespace into spaces
-    if (binaryChars > 5)
-      cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
-    // Not lots - preserve whitespace and ESC
-    else
-      cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
-  }
-
-  // The two name forms this is catching are the two sides of this assignment:
-  //
-  // $line3.$read.$iw.$iw.Bippy =
-  //   $line3.$read$$iw$$iw$Bippy at 4a6a00ca
-
-  private def noMeta(s: String) = "\\Q" + s + "\\E"
-  private lazy val lineRegex = {
-    val sn = sessionNames
-    val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")")
-    debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""")
-  }
-
-  private def removeLineWrapper(s: String) = s.replaceAll(lineRegex, "")
-  private def removeIWPackages(s: String)  = s.replaceAll("""\$iw[$.]""", "")
-
-  trait SessionNames {
-    // All values are configurable by passing e.g. -Dscala.repl.name.read=XXX
-    final def propOr(name: String): String = propOr(name, "$" + name)
-    final def propOr(name: String, default: String): String =
-      sys.props.getOrElse("scala.repl.name." + name, default)
-
-    // Prefixes used in repl machinery.  Default to $line, $read, etc.
-    def line   = propOr("line")
-    def read   = propOr("read")
-    def eval   = propOr("eval")
-    def print  = propOr("print")
-    def result = propOr("result")
-
-    // The prefix for unnamed results: by default res0, res1, etc.
-    def res   = propOr("res", "res")  // INTERPRETER_VAR_PREFIX
-    // Internal ones
-    def ires  = propOr("ires")
-  }
-  lazy val sessionNames: SessionNames = new SessionNames { }
-
-  /** Generates names pre0, pre1, etc. via calls to apply method */
-  class NameCreator(pre: String) {
-    private var x = -1
-    var mostRecent: String = ""
-
-    def apply(): String = {
-      x += 1
-      mostRecent = pre + x
-      mostRecent
-    }
-    def reset(): Unit = x = -1
-    def didGenerate(name: String) =
-      (name startsWith pre) && ((name drop pre.length) forall (_.isDigit))
-  }
-
-  private lazy val userVar     = new NameCreator(sessionNames.res)  // var name, like res0
-  private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0
-
-  def isLineName(name: String)        = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit))
-  def isUserVarName(name: String)     = userVar didGenerate name
-  def isInternalVarName(name: String) = internalVar didGenerate name
-
-  val freshLineId            = {
-    var x = 0
-    () => { x += 1 ; x }
-  }
-  def freshUserVarName() = userVar()
-  def freshInternalVarName() = internalVar()
-
-  def resetAllCreators() {
-    userVar.reset()
-    internalVar.reset()
-  }
-
-  def mostRecentVar = userVar.mostRecent
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
deleted file mode 100644
index b0be956..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
-import util.returning
-
-/** One instance of a command buffer.
- */
-class Parsed private (
-  val buffer: String,
-  val cursor: Int,
-  val delimited: Char => Boolean
-) extends Delimited {
-  def isEmpty       = args.isEmpty
-  def isUnqualified = args.size == 1
-  def isQualified   = args.size > 1
-  def isAtStart     = cursor <= 0
-
-  private var _verbosity = 0
-
-  def verbosity = _verbosity
-  def withVerbosity(v: Int): this.type = returning[this.type](this)(_ => _verbosity = v)
-
-  def args = toArgs(buffer take cursor).toList
-  def bufferHead = args.head
-  def headLength = bufferHead.length + 1
-  def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
-
-  def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
-  def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity
-  def currentChar = buffer(cursor)
-  def currentArg = args.last
-  def position =
-    if (isEmpty) 0
-    else if (isLastDelimiter) cursor
-    else cursor - currentArg.length
-
-  def isFirstDelimiter  = !isEmpty && isDelimiterChar(buffer.head)
-  def isLastDelimiter   = !isEmpty && isDelimiterChar(buffer.last)
-  def firstIfDelimiter  = if (isFirstDelimiter) buffer.head.toString else ""
-  def lastIfDelimiter   = if (isLastDelimiter) buffer.last.toString else ""
-
-  def isQuoted = false // TODO
-  def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
-  def isDelimiter = !isQuoted && !isEscaped && isDelimiterChar(currentChar)
-
-  override def toString = "Parsed(%s / %d)".format(buffer, cursor)
-}
-
-object Parsed {
-  val DefaultDelimiters = "[]{},`; \t".toSet
-
-  private def onull(s: String) = if (s == null) "" else s
-
-  def apply(s: String): Parsed = apply(onull(s), onull(s).length)
-  def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters)
-  def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
-    new Parsed(onull(s), cursor, delimited)
-
-  def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
-  def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
-
-  def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
deleted file mode 100644
index 6389447..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.language.implicitConversions
-
-/** Mix this into an object and use it as a phasing
- *  swiss army knife.
- */
-trait Phased {
-  val global: Global
-  import global._
-
-  private var active: PhaseName = NoPhaseName
-  private var multi: Seq[PhaseName] = Nil
-
-  def get = active
-  def set(phase: PhaseName): Boolean = phase match {
-    case NoPhaseName  => false
-    case name         => active = name ; true
-  }
-  def getMulti = multi
-  def setMulti(phases: Seq[PhaseName]): Boolean = {
-    if (phases contains NoPhaseName) false
-    else {
-      multi = phases
-      true
-    }
-  }
-
-  private def parsePhaseChange(str: String): Option[Int] = {
-    if (str == "") Some(0)
-    else if (str startsWith ".prev") parsePhaseChange(str drop 5) map (_ - 1)
-    else if (str startsWith ".next") parsePhaseChange(str drop 5) map (_ + 1)
-    else str.head match {
-      case '+' | '-' =>
-        val (num, rest) = str.tail.span(_.isDigit)
-        val diff = if (str.head == '+') num.toInt else -num.toInt
-        parsePhaseChange(rest) map (_ + diff)
-      case _ =>
-        None
-    }
-  }
-
-  /** Takes a string like 4, typer+2, typer.next, etc.
-   *  and turns it into a PhaseName instance.
-   */
-  private def parseInternal(str: String): PhaseName = {
-    if (str == "") NoPhaseName
-    else if (str forall (_.isDigit)) PhaseName(str.toInt)
-    else {
-      val (name, rest) = str.toLowerCase span (_.isLetter)
-      val start        = PhaseName(name)
-      val change       = parsePhaseChange(rest)
-
-      if (start.isEmpty || change.isEmpty) NoPhaseName
-      else PhaseName(start.id + change.get)
-    }
-  }
-  def parse(str: String): PhaseName =
-    try parseInternal(str)
-    catch { case _: Exception => NoPhaseName }
-
-  def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
-
-  def atCurrent[T](body: => T): T = atPhase(get)(body)
-  def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
-  def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body)
-  def show[T](body: => T): Seq[T] = {
-    val pairs = atMap(PhaseName.all)(body)
-    pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) }
-    pairs map (_._2)
-  }
-
-  def at[T](ph: PhaseName)(body: => T): T = {
-    val saved = get
-    set(ph)
-    try atCurrent(body)
-    finally set(saved)
-  }
-  def atMulti[T](phs: Seq[PhaseName])(body: => T): Seq[T] = {
-    val saved = multi
-    setMulti(phs)
-    try multi(body)
-    finally setMulti(saved)
-  }
-
-  def showAt[T](phs: Seq[PhaseName])(body: => T): Unit =
-    atMap[T](phs)(body) foreach {
-      case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240))
-    }
-
-  def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] =
-    phs zip atMulti(phs)(body)
-
-  object PhaseName {
-    implicit lazy val phaseNameOrdering: Ordering[PhaseName] = Ordering[Int] on (_.id)
-
-    lazy val all = List(
-      Parser, Namer, Packageobjects, Typer, Superaccessors, Pickler, Refchecks,
-      Selectiveanf, Liftcode, Selectivecps, Uncurry, Tailcalls, Specialize,
-      Explicitouter, Erasure, Lazyvals, Lambdalift, Constructors, Flatten, Mixin,
-      Cleanup, Icode, Inliner, Closelim, Dce, Jvm, Terminal
-    )
-    lazy val nameMap = all.map(x => x.name -> x).toMap withDefaultValue NoPhaseName
-    multi = all
-
-    def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName
-    implicit def apply(s: String): PhaseName = nameMap(s)
-    implicit def defaultPhaseName: PhaseName = active
-  }
-  sealed abstract class PhaseName {
-    lazy val id   = phase.id
-    lazy val name = toString.toLowerCase
-    def phase     = currentRun.phaseNamed(name)
-    def isEmpty   = this eq NoPhaseName
-
-    // Execute some code during this phase.
-    def apply[T](body: => T): T = atPhase(phase)(body)
-  }
-
-  case object Parser extends PhaseName
-  case object Namer extends PhaseName
-  case object Packageobjects extends PhaseName
-  case object Typer extends PhaseName
-  case object Superaccessors extends PhaseName
-  case object Pickler extends PhaseName
-  case object Refchecks extends PhaseName
-  case object Selectiveanf extends PhaseName
-  case object Liftcode extends PhaseName
-  case object Selectivecps extends PhaseName
-  case object Uncurry extends PhaseName
-  case object Tailcalls extends PhaseName
-  case object Specialize extends PhaseName
-  case object Explicitouter extends PhaseName
-  case object Erasure extends PhaseName
-  case object Lazyvals extends PhaseName
-  case object Lambdalift extends PhaseName
-  case object Constructors extends PhaseName
-  case object Flatten extends PhaseName
-  case object Mixin extends PhaseName
-  case object Cleanup extends PhaseName
-  case object Icode extends PhaseName
-  case object Inliner extends PhaseName
-  case object Closelim extends PhaseName
-  case object Dce extends PhaseName
-  case object Jvm extends PhaseName
-  case object Terminal extends PhaseName
-  case object NoPhaseName extends PhaseName {
-    override lazy val id   = -1
-    override lazy val name = phase.name
-    override def phase     = NoPhase
-  }
-
-  implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase
-  implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
deleted file mode 100644
index 5e6bf88..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ /dev/null
@@ -1,430 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.util.matching.Regex
-import scala.reflect.internal.util.{ BatchSourceFile }
-import session.{ History }
-import scala.io.Codec
-import java.net.{ URL, MalformedURLException }
-import io.{ Path }
-import scala.language.implicitConversions
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
-
-/** Collecting some power mode examples.
-
-scala> trait F[@specialized(Int) T] { def f: T = ??? }
-defined trait F
-
-scala> trait G[@specialized(Long, Int) T] extends F[T] { override def f: T = super.f }
-defined trait G
-
-scala> changesAfterEachPhase(intp("G").info.members filter (_.name.toString contains "super")) >
-Gained after  1/parser {
-  method super$f
-}
-
-Gained after 12/specialize {
-  method super$f$mcJ$sp
-  method super$f$mcI$sp
-}
-
-Lost after 18/flatten {
-  method super$f$mcJ$sp
-  method super$f$mcI$sp
-  method super$f
-}
-*/
-
-/** A class for methods to be injected into the intp in power mode.
- */
-class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, replVals: ReplValsImpl) {
-  import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
-  import intp.global._
-  import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
-  import rootMirror.{ getClassIfDefined, getModuleIfDefined }
-
-  abstract class SymSlurper {
-    def isKeep(sym: Symbol): Boolean
-    def isIgnore(sym: Symbol): Boolean
-    def isRecur(sym: Symbol): Boolean
-    def isFinished(): Boolean
-
-    val keep = mutable.HashSet[Symbol]()
-    val seen = mutable.HashSet[Symbol]()
-    def processed = keep.size + seen.size
-    def discarded = seen.size - keep.size
-
-    def members(x: Symbol): List[Symbol] =
-      if (x.rawInfo.isComplete) x.info.members.toList
-      else Nil
-
-    var lastCount = -1
-    var pass = 0
-    val unseenHistory = new mutable.ListBuffer[Int]
-
-    def loop(todo: Set[Symbol]): Set[Symbol] = {
-      pass += 1
-      val (repeats, unseen) = todo partition seen
-      unseenHistory += unseen.size
-      if (opt.verbose) {
-        println("%3d  %s accumulated, %s discarded.  This pass: %s unseen, %s repeats".format(
-          pass, keep.size, discarded, unseen.size, repeats.size))
-      }
-      if (lastCount == processed || unseen.isEmpty || isFinished())
-        return keep.toSet
-
-      lastCount = processed
-      keep ++= (unseen filter isKeep filterNot isIgnore)
-      seen ++= unseen
-      loop(unseen filter isRecur flatMap members)
-    }
-
-    def apply(sym: Symbol): Set[Symbol] = {
-      keep.clear()
-      seen.clear()
-      loop(Set(sym))
-    }
-  }
-
-  class PackageSlurper(packageClass: Symbol) extends SymSlurper {
-    /** Looking for dwindling returns */
-    def droppedEnough() = unseenHistory.size >= 4 && {
-      unseenHistory takeRight 4 sliding 2 forall { it =>
-        val List(a, b) = it.toList
-        a > b
-      }
-    }
-
-    def isRecur(sym: Symbol)  = true
-    def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc")
-    def isKeep(sym: Symbol)   = sym.hasTransOwner(packageClass)
-    def isFinished()          = droppedEnough()
-    def slurp()               = {
-      if (packageClass.isPackageClass)
-        apply(packageClass)
-      else {
-        repldbg("Not a package class! " + packageClass)
-        Set()
-      }
-    }
-  }
-
-  private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp())
-  private def customInit   = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
-
-  def banner = customBanner getOrElse """
-    |** Power User mode enabled - BEEP WHIR GYVE **
-    |** :phase has been set to 'typer'.          **
-    |** scala.tools.nsc._ has been imported      **
-    |** global._, definitions._ also imported    **
-    |** Try  :help, :vals, power.<tab>           **
-  """.stripMargin.trim
-
-  private def initImports = List(
-    "scala.tools.nsc._",
-    "scala.collection.JavaConverters._",
-    "intp.global.{ error => _, _ }",
-    "definitions.{ getClass => _, _ }",
-    "power.rutil._",
-    "replImplicits._",
-    "treedsl.CODE._"
-  )
-
-  def init = customInit match {
-    case Some(x)  => x
-    case _        => initImports.mkString("import ", ", ", "")
-  }
-
-  /** Starts up power mode and runs whatever is in init.
-   */
-  def unleash(): Unit = beQuietDuring {
-    // First we create the ReplVals instance and bind it to $r
-    intp.bind("$r", replVals)
-    // Then we import everything from $r.
-    intp interpret ("import " + intp.pathToTerm("$r") + "._")
-    // And whatever else there is to do.
-    init.lines foreach (intp interpret _)
-  }
-  def valsDescription: String = {
-    def to_str(m: Symbol) = "%12s %s".format(
-      m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
-
-    ( rutil.info[ReplValsImpl].membersDeclared
-        filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
-        sortBy (_.decodedName)
-           map to_str
-      mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
-    )
-  }
-
-  trait LowPriorityInternalInfo {
-    implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
-  }
-  object InternalInfo extends LowPriorityInternalInfo { }
-
-  /** Now dealing with the problem of acidentally calling a method on Type
-   *  when you're holding a Symbol and seeing the Symbol converted to the
-   *  type of Symbol rather than the type of the thing represented by the
-   *  symbol, by only implicitly installing one method, "?", and the rest
-   *  of the conveniences exist on that wrapper.
-   */
-  trait LowPriorityInternalInfoWrapper {
-    implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
-  }
-  object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
-  }
-  class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
-    def ? : InternalInfo[T] = new InternalInfo[T](value)
-  }
-
-  /** Todos...
-   *    translate tag type arguments into applied types
-   *    customizable symbol filter (had to hardcode no-spec to reduce noise)
-   */
-  class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
-    private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
-    private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
-    private def isImplClass(s: Symbol)   = s.name.toString endsWith "$class"
-
-    /** Standard noise reduction filter. */
-    def excludeMember(s: Symbol) = (
-         isSpecialized(s)
-      || isImplClass(s)
-      || s.isAnonOrRefinementClass
-      || s.isAnonymousFunction
-    )
-    def symbol      = compilerSymbolFromTag(tag)
-    def tpe         = compilerTypeFromTag(tag)
-    def name        = symbol.name
-    def companion   = symbol.companionSymbol
-    def info        = symbol.info
-    def moduleClass = symbol.moduleClass
-    def owner       = symbol.owner
-    def owners      = symbol.ownerChain drop 1
-    def signature   = symbol.defString
-
-    def decls         = info.decls
-    def declsOverride = membersDeclared filter (_.isOverride)
-    def declsOriginal = membersDeclared filterNot (_.isOverride)
-
-    def members           = membersUnabridged filterNot excludeMember
-    def membersUnabridged = tpe.members.toList
-    def membersDeclared   = members filterNot excludeMember
-    def membersInherited  = members filterNot (membersDeclared contains _)
-    def memberTypes       = members filter (_.name.isTypeName)
-    def memberMethods     = members filter (_.isMethod)
-
-    def pkg             = symbol.enclosingPackage
-    def pkgName         = pkg.fullName
-    def pkgClass        = symbol.enclosingPackageClass
-    def pkgMembers      = pkg.info.members filterNot excludeMember
-    def pkgClasses      = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
-    def pkgSymbols      = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
-
-    def tag            = typeEvidence
-    def runtimeClass   = runtimeClassEvidence.runtimeClass
-    def shortClass     = runtimeClass.getName split "[$.]" last
-
-    def baseClasses                    = tpe.baseClasses
-    def baseClassDecls                 = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
-    def ancestors                      = baseClasses drop 1
-    def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
-    def baseTypes                      = tpe.baseTypeSeq.toList
-
-    def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe
-    def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
-    def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
-
-    override def toString = value match {
-      case Some(x)  => "%s (%s)".format(x, shortClass)
-      case _        => runtimeClass.getName
-    }
-  }
-
-  trait LowPriorityPrettifier {
-    implicit object AnyPrettifier extends Prettifier[Any] {
-      def show(x: Any): Unit = prettify(x) foreach println
-      def prettify(x: Any): TraversableOnce[String] = x match {
-        case x: Name                => List(x.decode)
-        case Tuple2(k, v)           => List(prettify(k).toIterator ++ Iterator("->") ++ prettify(v) mkString " ")
-        case xs: Array[_]           => xs.iterator flatMap prettify
-        case xs: TraversableOnce[_] => xs flatMap prettify
-        case x                      => List(Prettifier.stringOf(x))
-      }
-    }
-  }
-  object StringPrettifier extends Prettifier[String] {
-    def show(x: String) = println(x)
-    def prettify(x: String) = List(Prettifier stringOf x)
-  }
-  object Prettifier extends LowPriorityPrettifier {
-    def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x)
-    def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value
-    def default[T] = new Prettifier[T] {
-      def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x
-      def show(x: T): Unit = AnyPrettifier show x
-    }
-  }
-  trait Prettifier[T] {
-    def show(x: T): Unit
-    def prettify(x: T): TraversableOnce[String]
-
-    def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println
-    def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x))
-  }
-
-  abstract class PrettifierClass[T: Prettifier]() {
-    val pretty = implicitly[Prettifier[T]]
-    import pretty._
-
-    def value: Seq[T]
-
-    def pp(f: Seq[T] => Seq[T]): Unit =
-      pretty prettify f(value) foreach (StringPrettifier show _)
-
-    def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap)
-    def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) }
-
-    def |[U](f: Seq[T] => Seq[U]): Seq[U]        = f(value)
-    def ^^[U](f: T => U): Seq[U]                 = value map f
-    def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf
-
-    def >>!(implicit ord: Ordering[T]): Unit     = pp(_.sorted.distinct)
-    def >>(implicit ord: Ordering[T]): Unit      = pp(_.sorted)
-    def >!(): Unit                               = pp(_.distinct)
-    def >(): Unit                                = pp(identity)
-
-    def >#(): Unit                               = this ># (identity[T] _)
-    def >#[U](p: T => U): Unit                   = this ppfreq p
-
-    def >?(p: T => Boolean): Unit                = pp(_ filter p)
-    def >?(s: String): Unit                      = pp(_ filter (_.toString contains s))
-    def >?(r: Regex): Unit                       = pp(_ filter (_.toString matches fixRegex(r)))
-
-    private def fixRegex(r: scala.util.matching.Regex): String = {
-      val s = r.pattern.toString
-      val prefix = if (s startsWith "^") "" else """^.*?"""
-      val suffix = if (s endsWith "$") "" else """.*$"""
-
-      prefix + s + suffix
-    }
-  }
-
-  class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { }
-  class SinglePrettifierClass[T: Prettifier](single: T) extends PrettifierClass[T]() {
-    val value = List(single)
-  }
-
-  class RichReplString(s: String) {
-    // make an url out of the string
-    def u: URL = (
-      if (s contains ":") new URL(s)
-      else if (new JFile(s) exists) new JFile(s).toURI.toURL
-      else new URL("http://" + s)
-    )
-  }
-  class RichInputStream(in: InputStream)(implicit codec: Codec) {
-    def bytes(): Array[Byte]  = io.Streamable.bytes(in)
-    def slurp(): String       = io.Streamable.slurp(in)
-    def <<(): String          = slurp()
-  }
-  class RichReplURL(url: URL)(implicit codec: Codec) {
-    def slurp(): String = io.Streamable.slurp(url)
-  }
-  class RichSymbolList(syms: List[Symbol]) {
-    def sigs  = syms map (_.defString)
-    def infos = syms map (_.info)
-  }
-
-  trait Implicits1 {
-    // fallback
-    implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
-      new SinglePrettifierClass[T](x)
-
-    implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
-  }
-  trait Implicits2 extends Implicits1 {
-    class RichSymbol(sym: Symbol) {
-      // convenient type application
-      def apply(targs: Type*): Type = typeRef(NoPrefix, sym, targs.toList)
-    }
-    object symbolSubtypeOrdering extends Ordering[Symbol] {
-      def compare(s1: Symbol, s2: Symbol) =
-        if (s1 eq s2) 0
-        else if (s1 isLess s2) -1
-        else 1
-    }
-    implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
-    implicit lazy val powerTypeOrdering: Ordering[Type]     = Ordering[Symbol] on (_.typeSymbol)
-
-    implicit def replInternalInfo[T: ru.TypeTag : ClassTag](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
-    implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
-    implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
-      new MultiPrettifierClass[T](xs.toSeq)
-    implicit def replPrettifier[T] : Prettifier[T] = Prettifier.default[T]
-    implicit def replTypeApplication(sym: Symbol): RichSymbol = new RichSymbol(sym)
-
-    implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
-    implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
-    implicit def liftToTermName(s: String): TermName = newTermName(s)
-    implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
-  }
-
-  trait ReplUtilities {
-    // [Eugene to Paul] needs review!
-    // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
-    // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
-    def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
-    def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
-    def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
-    def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
-    def url(s: String) = {
-      try new URL(s)
-      catch { case _: MalformedURLException =>
-        if (Path(s).exists) Path(s).toURL
-        else new URL("http://" + s)
-      }
-    }
-    def sanitize(s: String): String = sanitize(s.getBytes())
-    def sanitize(s: Array[Byte]): String = (s map {
-      case x if x.toChar.isControl  => '?'
-      case x                        => x.toChar
-    }).mkString
-
-    def strings(s: Seq[Byte]): List[String] = {
-      if (s.length == 0) Nil
-      else s dropWhile (_.toChar.isControl) span (x => !x.toChar.isControl) match {
-        case (next, rest) => next.map(_.toChar).mkString :: strings(rest)
-      }
-    }
-  }
-
-  lazy val rutil: ReplUtilities = new ReplUtilities { }
-  lazy val phased: Phased       = new { val global: intp.global.type = intp.global } with Phased { }
-
-  def context(code: String)    = analyzer.rootContext(unit(code))
-  def source(code: String)     = newSourceFile(code)
-  def unit(code: String)       = newCompilationUnit(code)
-  def trees(code: String)      = parse(code) getOrElse Nil
-  def typeOf(id: String)       = intp.typeOfExpression(id)
-
-  override def toString = """
-    |** Power mode status **
-    |Default phase: %s
-    |Names: %s
-    |Identifiers: %s
-  """.stripMargin.format(
-      phased.get,
-      intp.allDefinedNames mkString " ",
-      intp.unqualifiedIds mkString " "
-    )
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
deleted file mode 100644
index 7cd0f43..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.util.control.ControlThrowable
-import util.Exceptional.unwrap
-import util.stackTraceString
-
-trait ReplConfig {
-  lazy val replProps = new ReplProps
-
-  class TapMaker[T](x: T) {
-    def tapInfo(msg: => String): T  = tap(x => replinfo(parens(x)))
-    def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
-    def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
-    def tap[U](f: T => U): T = {
-      f(x)
-      x
-    }
-  }
-
-  private def parens(x: Any) = "(" + x + ")"
-  private def echo(msg: => String) =
-    try Console println msg
-    catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
-
-  private[nsc] def repldbgex(ex: Throwable): Unit = {
-    if (isReplDebug) {
-      echo("Caught/suppressing: " + ex)
-      ex.printStackTrace
-    }
-  }
-  private[nsc] def repldbg(msg: => String)    = if (isReplDebug) echo(msg)
-  private[nsc] def repltrace(msg: => String)  = if (isReplTrace) echo(msg)
-  private[nsc] def replinfo(msg: => String)   = if (isReplInfo)  echo(msg)
-
-  private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
-    case t: ControlThrowable => throw t
-    case t: Throwable        =>
-      repldbg(label + ": " + unwrap(t))
-      repltrace(stackTraceString(unwrap(t)))
-      alt
-  }
-  private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
-    substituteAndLog("" + alt, alt)(body)
-  private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
-    try body
-    catch logAndDiscard(label, alt)
-  }
-  private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
-    substituteAndLog(label, ())(body)
-
-  def isReplTrace: Boolean = replProps.trace
-  def isReplDebug: Boolean = replProps.debug || isReplTrace
-  def isReplInfo: Boolean  = replProps.info || isReplDebug
-  def isReplPower: Boolean = replProps.power
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
deleted file mode 100644
index 7c698a2..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import reporters._
-import typechecker.Analyzer
-
-/** A layer on top of Global so I can guarantee some extra
- *  functionality for the repl.  It doesn't do much yet.
- */
-trait ReplGlobal extends Global {
-  // This exists mostly because using the reporter too early leads to deadlock.
-  private def echo(msg: String) { Console println msg }
-
-  override def abort(msg: String): Nothing = {
-    echo("ReplGlobal.abort: " + msg)
-    super.abort(msg)
-  }
-
-  override lazy val analyzer = new {
-    val global: ReplGlobal.this.type = ReplGlobal.this
-  } with Analyzer {
-    override def newTyper(context: Context): Typer = new Typer(context) {
-      override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
-        val res = super.typed(tree, mode, pt)
-        tree match {
-          case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
-            repldbg("typed %s: %s".format(name, res.tpe))
-          case _ =>
-        }
-        res
-      }
-    }
-  }
-
-  object replPhase extends SubComponent {
-    val global: ReplGlobal.this.type = ReplGlobal.this
-    val phaseName = "repl"
-    val runsAfter = List[String]("typer")
-    val runsRightAfter = None
-    def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
-      def apply(unit: CompilationUnit) {
-        repldbg("Running replPhase on " + unit.body)
-        // newNamer(rootContext(unit)).enterSym(unit.body)
-      }
-    }
-  }
-
-  override protected def computePhaseDescriptors: List[SubComponent] = {
-    addToPhasesSet(replPhase, "repl")
-    super.computePhaseDescriptors
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
deleted file mode 100644
index bc3e7a1..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.sys._
-import Prop._
-
-class ReplProps {
-  private def bool(name: String) = BooleanProp.keyExists(name)
-  private def int(name: String) = IntProp(name)
-
-  val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
-  val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
-
-  val info  = bool("scala.repl.info")
-  val debug = bool("scala.repl.debug")
-  val trace = bool("scala.repl.trace")
-  val power = bool("scala.repl.power")
-
-  val replInitCode    = Prop[JFile]("scala.repl.initcode")
-  val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
-  val powerInitCode   = Prop[JFile]("scala.repl.power.initcode")
-  val powerBanner     = Prop[JFile]("scala.repl.power.banner")
-
-  val vids = bool("scala.repl.vids")
-  val maxPrintString = int("scala.repl.maxprintstring")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
deleted file mode 100644
index f8ecc6c..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
-
-trait ReplStrings {
-  /** Convert a string into code that can recreate the string.
-   *  This requires replacing all special characters by escape
-   *  codes. It does not add the surrounding " marks.  */
-  def string2code(str: String): String = {
-    val res = new StringBuilder
-    for (c <- str) c match {
-      case '"' | '\'' | '\\'  => res += '\\' ; res += c
-      case _ if c.isControl   => res ++= Chars.char2uescape(c)
-      case _                  => res += c
-    }
-    res.toString
-  }
-
-  def string2codeQuoted(str: String) =
-    "\"" + string2code(str) + "\""
-
-  def any2stringOf(x: Any, maxlen: Int) =
-    "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
-
-  def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList
-  def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
deleted file mode 100644
index 53478bd..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.language.implicitConversions
-import scala.reflect.api.{Universe => ApiUniverse}
-import scala.reflect.runtime.{universe => ru}
-
-/** A class which the repl utilizes to expose predefined objects.
- *  The base implementation is empty; the standard repl implementation
- *  is StdReplVals.
- */
-abstract class ReplVals { }
-
-class StdReplVals(final val r: ILoop) extends ReplVals {
-  final lazy val repl                     = r
-  final lazy val intp                     = r.intp
-  final lazy val power                    = r.power
-  final lazy val reader                   = r.in
-  final lazy val vals                     = this
-  final lazy val global: intp.global.type = intp.global
-  final lazy val isettings                = intp.isettings
-  final lazy val completion               = reader.completion
-  final lazy val history                  = reader.history
-  final lazy val phased                   = power.phased
-  final lazy val analyzer                 = global.analyzer
-
-  object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { }
-
-  final lazy val typer = analyzer.newTyper(
-    analyzer.rootContext(
-      power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
-    )
-  )
-  def lastRequest = intp.lastRequest
-
-  class ReplImplicits extends power.Implicits2 {
-    import intp.global._
-
-    private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
-    implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
-  }
-
-  final lazy val replImplicits = new ReplImplicits
-
-  def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
-}
-
-object ReplVals {
-  /** Latest attempt to work around the challenge of foo.global.Type
-   *  not being seen as the same type as bar.global.Type even though
-   *  the globals are the same.  Dependent method types to the rescue.
-   */
-  def mkCompilerTypeFromTag[T <: Global](global: T) = {
-    import global._
-    import definitions._
-
-    /** We can't use definitions.compilerTypeFromTag directly because we're passing
-     *  it to map and the compiler refuses to perform eta expansion on a method
-     *  with a dependent return type.  (Can this be relaxed?) To get around this
-     *  I have this forwarder which widens the type and then cast the result back
-     *  to the dependent type.
-     */
-    def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type =
-      definitions.compilerTypeFromTag(t)
-
-    class AppliedTypeFromTags(sym: Symbol) {
-      def apply[M](implicit m1: ru.TypeTag[M]): Type =
-        if (sym eq NoSymbol) NoType
-        else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type])
-
-      def apply[M1, M2](implicit m1: ru.TypeTag[M1], m2: ru.TypeTag[M2]): Type =
-        if (sym eq NoSymbol) NoType
-        else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type])
-    }
-
-    (sym: Symbol) => new AppliedTypeFromTags(sym)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/compiler/scala/tools/nsc/interpreter/Results.scala
deleted file mode 100644
index e400906..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-object Results {
-  /** A result from the Interpreter interpreting one line of input. */
-  abstract sealed class Result
-
-  /** The line was interpreted successfully. */
-  case object Success extends Result
-
-  /** The line was erroneous in some way. */
-  case object Error extends Result
-
-  /** The input was incomplete.  The caller should request more input.
-   */
-  case object Incomplete extends Result
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
deleted file mode 100644
index 4371f7f..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.{ ClassTag, classTag }
-
-class RichClass[T](val clazz: Class[T]) {
-  def toTag: ClassTag[T] = ClassTag[T](clazz)
-  def toTypeString: String = TypeStrings.fromClazz(clazz)
-
-  // Sadly isAnonymousClass does not return true for scala anonymous
-  // classes because our naming scheme is not doing well against the
-  // jvm's many assumptions.
-  def isScalaAnonymous = (
-    try clazz.isAnonymousClass || (clazz.getName contains "$anon$")
-    catch { case _: java.lang.InternalError => false }  // good ol' "Malformed class name"
-  )
-
-  /** It's not easy... to be... me... */
-  def supermans: List[ClassTag[_]] = supers map (_.toTag)
-  def superNames: List[String]    = supers map (_.getName)
-  def interfaces: List[JClass]    = supers filter (_.isInterface)
-
-  def hasAncestorName(f: String => Boolean) = superNames exists f
-  def hasAncestor(f: JClass => Boolean) = supers exists f
-  def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + "."))
-
-  def supers: List[JClass] = {
-    def loop(x: JClass): List[JClass] = x.getSuperclass match {
-      case null   => List(x)
-      case sc     => x :: (x.getInterfaces.toList flatMap loop) ++ loop(sc)
-    }
-    loop(clazz).distinct
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
deleted file mode 100644
index bccd815..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.io.{ BufferedReader }
-import session.NoHistory
-
-/** Reads using standard JDK API */
-class SimpleReader(
-  in: BufferedReader,
-  out: JPrintWriter,
-  val interactive: Boolean)
-extends InteractiveReader
-{
-  val history = NoHistory
-  val completion = NoCompletion
-
-  def init() = ()
-  def reset() = ()
-  def eraseLine() = ()
-  def redrawLine() = ()
-  def currentLine = ""
-  def readOneLine(prompt: String): String = {
-    if (interactive) {
-      out.print(prompt)
-      out.flush()
-    }
-    in.readLine()
-  }
-  def readOneKey(prompt: String)  = sys.error("No char-based input in SimpleReader")
-}
-
-object SimpleReader {
-  def defaultIn  = Console.in
-  def defaultOut = new JPrintWriter(Console.out)
-
-  def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
-    new SimpleReader(in, out, interactive)
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
deleted file mode 100644
index 60399f5..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.lang.{ reflect => r }
-import r.TypeVariable
-import scala.reflect.NameTransformer
-import NameTransformer._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
-import typechecker.DestructureTypes
-import scala.reflect.internal.util.StringOps.ojoin
-import scala.language.implicitConversions
-
-/** A more principled system for turning types into strings.
- */
-trait StructuredTypeStrings extends DestructureTypes {
-  val global: Global
-  import global._
-  import definitions._
-
-  case class LabelAndType(label: String, typeName: String) { }
-  object LabelAndType {
-    val empty = LabelAndType("", "")
-  }
-  case class Grouping(ldelim: String, mdelim: String, rdelim: String, labels: Boolean) {
-    def join(elems: String*): String = (
-      if (elems.isEmpty) ""
-      else elems.mkString(ldelim, mdelim, rdelim)
-    )
-  }
-  val NoGrouping      = Grouping("", "", "", false)
-  val ListGrouping    = Grouping("(", ", ", ")", false)
-  val ProductGrouping = Grouping("(", ", ", ")", true)
-  val ParamGrouping   = Grouping("(", ", ", ")", true)
-  val BlockGrouping   = Grouping(" { ", "; ", "}", false)
-
-  private implicit def lowerName(n: Name): String = "" + n
-  private def str(level: Int)(body: => String): String = "  " * level + body
-  private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
-    val l1 = str(level)(name + grouping.ldelim)
-    val l2 = nodes.map(_ show level + 1)
-    val l3 = str(level)(grouping.rdelim)
-
-    l1 +: l2 :+ l3 mkString "\n"
-  }
-  private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
-    import grouping._
-    val threshold = 70
-
-    val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
-    if (try1.length < threshold) try1
-    else block(level, grouping)(name, nodes)
-  }
-  private def shortClass(x: Any) = {
-    if (opt.debug) {
-      val name   = (x.getClass.getName split '.').last
-      val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
-      val str    = if (isAnon) name else (name split '$').last
-
-      " // " + str
-    }
-    else ""
-  }
-
-  sealed abstract class TypeNode {
-    def grouping: Grouping
-    def nodes: List[TypeNode]
-
-    def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
-    def show(indent: Int): String = show(indent, true)
-    def show(): String = show(0)
-
-    def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
-    def withType(t: String): this.type  = modifyNameInfo(_.copy(typeName = t))
-
-    def label       = nameInfo.label
-    def typeName    = nameInfo.typeName
-
-    protected def mkPrefix(showLabel: Boolean) = {
-      val pre = if (showLabel && label != "") label + " = " else ""
-      pre + typeName
-    }
-    override def toString = show() // + "(toString)"
-    private var nameInfo: LabelAndType = LabelAndType.empty
-    private def modifyNameInfo(f: LabelAndType => LabelAndType): this.type = {
-      nameInfo = f(nameInfo)
-      this
-    }
-  }
-  case class TypeAtom[T](atom: T) extends TypeNode {
-    def grouping = NoGrouping
-    def nodes = Nil
-    override protected def mkPrefix(showLabel: Boolean) =
-      super.mkPrefix(showLabel) + atom + shortClass(atom)
-  }
-  case class TypeProduct(nodes: List[TypeNode]) extends TypeNode {
-    def grouping: Grouping = ProductGrouping
-    def emptyTypeName = ""
-    override def typeName = if (nodes.isEmpty) emptyTypeName else super.typeName
-  }
-
-  /** For a NullaryMethod, in = TypeEmpty; for MethodType(Nil, _) in = TypeNil */
-  class NullaryFunction(out: TypeNode) extends TypeProduct(List(out)) {
-    override def typeName = "NullaryMethodType"
-  }
-  class MonoFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
-    override def typeName = "MethodType"
-  }
-  class PolyFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
-    override def typeName = "PolyType"
-  }
-
-  class TypeList(nodes: List[TypeNode]) extends TypeProduct(nodes) {
-    override def grouping = ListGrouping
-    override def emptyTypeName = "Nil"
-    override def typeName = "List"
-  }
-  class TypeScope(nodes: List[TypeNode]) extends TypeProduct(nodes) {
-    override def grouping = BlockGrouping
-    override def typeName = "Scope"
-    override def emptyTypeName = "EmptyScope"
-  }
-
-  object TypeEmpty extends TypeNode {
-    override def grouping = NoGrouping
-    override def nodes = Nil
-    override def label = ""
-    override def typeName = ""
-    override def show(indent: Int, showLabel: Boolean) = ""
-  }
-
-  object intoNodes extends DestructureType[TypeNode] {
-    def withLabel(node: TypeNode, label: String): TypeNode   = node withLabel label
-    def withType(node: TypeNode, typeName: String): TypeNode = node withType typeName
-
-    def wrapEmpty                             = TypeEmpty
-    def wrapSequence(nodes: List[TypeNode])   = new TypeList(nodes)
-    def wrapProduct(nodes: List[TypeNode])    = new TypeProduct(nodes)
-    def wrapPoly(in: TypeNode, out: TypeNode) = new PolyFunction(in, out)
-    def wrapMono(in: TypeNode, out: TypeNode) = if (in == wrapEmpty) new NullaryFunction(out) else new MonoFunction(in, out)
-    def wrapAtom[U](value: U)                 = new TypeAtom(value)
-  }
-
-  def show(tp: Type): String = intoNodes(tp).show
-}
-
-
-/** Logic for turning a type into a String.  The goal is to be
- *  able to take some arbitrary object 'x' and obtain the most precise
- *  String for which an injection of x.asInstanceOf[String] will
- *  be valid from both the JVM's and scala's perspectives.
- *
- *  "definition" is when you want strings like
- */
-trait TypeStrings {
-  private val ObjectClass = classOf[java.lang.Object]
-  private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
-  private val primitiveMap = primitives.toList map { x =>
-    val key = x match {
-      case "void" => "Void"
-      case "int"  => "Integer"
-      case "char" => "Character"
-      case s      => s.capitalize
-    }
-    val value = x match {
-      case "void" => "Unit"
-      case s      => s.capitalize
-    }
-
-    ("java.lang." + key) -> ("scala." + value)
-  } toMap
-
-  def scalaName(s: String): String = {
-    if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
-    else if (s == "void") "scala.Unit"
-    else if (primitives(s)) "scala." + s.capitalize
-    else primitiveMap.getOrElse(s, NameTransformer.decode(s))
-  }
-  // Trying to put humpty dumpty back together again.
-  def scalaName(clazz: JClass): String = {
-    val name      = clazz.getName
-    val isAnon    = clazz.isScalaAnonymous
-    val enclClass = clazz.getEnclosingClass
-    def enclPre   = enclClass.getName + MODULE_SUFFIX_STRING
-    def enclMatch = name startsWith enclPre
-
-    scalaName(
-      if (enclClass == null || isAnon || !enclMatch) name
-      else enclClass.getName + "." + (name stripPrefix enclPre)
-    )
-  }
-  def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass)
-  def anyClass(x: Any): JClass          = if (x == null) null else x.getClass
-
-  private def brackets(tps: String*): String =
-    if (tps.isEmpty) ""
-    else tps.mkString("[", ", ", "]")
-
-  private def tvarString(tvar: TypeVariable[_]): String = tvarString(tvar.getBounds.toList)
-  private def tvarString(bounds: List[AnyRef]): String = {
-    val xs = bounds filterNot (_ == ObjectClass) collect { case x: JClass => x }
-    if (xs.isEmpty) "_"
-    else scalaName(xs.head)
-  }
-  private def tparamString(clazz: JClass): String = {
-    brackets(clazz.getTypeParameters map tvarString: _*)
-  }
-
-  private def tparamString[T: ru.TypeTag] : String = {
-    def typeArguments: List[ru.Type] = {
-      import ru.TypeRefTag // otherwise the pattern match will be unchecked
-                           // because TypeRef is an abstract type
-      ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
-    }
-    // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
-    // how do I get to it? acquiring context classloader seems unreliable because of multithreading
-    def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
-    brackets(typeArguments map (jc => tvarString(List(jc))): _*)
-  }
-
-  /** Going for an overabundance of caution right now.  Later these types
-   *  can be a lot more precise, but right now the tags have a habit of
-   *  introducing material which is not syntactically valid as scala source.
-   *  When this happens it breaks the repl.  It would be nice if we mandated
-   *  that tag toString methods (or some other method, since it's bad
-   *  practice to rely on toString for correctness) generated the VALID string
-   *  representation of the type.
-   */
-  def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T]
-  def fromValue(value: Any): String                          = if (value == null) "Null" else fromClazz(anyClass(value))
-  def fromClazz(clazz: JClass): String                       = scalaName(clazz) + tparamString(clazz)
-  def fromTag[T: ru.TypeTag : ClassTag] : String             = scalaName(classTag[T].runtimeClass) + tparamString[T]
-
-  /** Reducing fully qualified noise for some common packages.
-   */
-  def quieter(tpe: String, alsoStrip: String*): String = {
-    val transforms = List(
-      "scala.collection.immutable." -> "immutable.",
-      "scala.collection.mutable." -> "mutable.",
-      "scala.collection.generic." -> "generic.",
-      "java.lang." -> "jl.",
-      "scala.runtime." -> "runtime."
-    ) ++ (alsoStrip map (_ -> ""))
-
-    transforms.foldLeft(tpe) {
-      case (res, (k, v)) => res.replaceAll(k, v)
-    }
-  }
-
-  val typeTransforms = List(
-    "java.lang." -> "",
-    "scala.collection.immutable." -> "immutable.",
-    "scala.collection.mutable." -> "mutable.",
-    "scala.collection.generic." -> "generic."
-  )
-}
-
-object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
deleted file mode 100644
index e3440c9..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-
-import scala.language.implicitConversions
-
-/** The main REPL related classes and values are as follows.
- *  In addition to standard compiler classes Global and Settings, there are:
- *
- *  History: an interface for session history.
- *  Completion: an interface for tab completion.
- *  ILoop (formerly InterpreterLoop): The umbrella class for a session.
- *  IMain (formerly Interpreter): Handles the evolving state of the session
- *    and handles submitting code to the compiler and handling the output.
- *  InteractiveReader: how ILoop obtains input.
- *  History: an interface for session history.
- *  Completion: an interface for tab completion.
- *  Power: a repository for more advanced/experimental features.
- *
- *  ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
- *  InteractiveReader contains { history: History, completion: Completion }
- *  IMain contains { global: Global }
- */
-package object interpreter extends ReplConfig with ReplStrings {
-  type JFile          = java.io.File
-  type JClass         = java.lang.Class[_]
-  type JList[T]       = java.util.List[T]
-  type JCollection[T] = java.util.Collection[T]
-  type JPrintWriter   = java.io.PrintWriter
-  type InputStream    = java.io.InputStream
-  type OutputStream   = java.io.OutputStream
-
-  val IR = Results
-
-  implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
-  private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
-    import scala.collection.JavaConverters._
-    xs.asScala.toList map ("" + _)
-  }
-
-  private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
-  private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
-  private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg)
-  private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala
deleted file mode 100644
index daa05b8..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-/** An implementation-agnostic history interface which makes no
- *  reference to the jline classes.  Very sparse right now.
- */
-trait History {
-  def asStrings: List[String]
-  def index: Int
-  def size: Int
-  def grep(s: String): List[String]
-}
-object NoHistory extends History {
-  def asStrings       = Nil
-  def grep(s: String) = Nil
-  def index           = 0
-  def size            = 0
-}
-
-object History {
-  def empty: History = NoHistory
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
deleted file mode 100644
index 9f4e2b9..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-import scala.collection.mutable.{ Buffer, ListBuffer }
-import scala.collection.JavaConverters._
-
-class SimpleHistory extends JLineHistory {
-  private var _index: Int = 0
-  private val buf: Buffer[String] = new ListBuffer[String]
-  private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x) }
-  private def setTo(num: Int)          = { _index = num ; true }
-  private def minusOne                 = { _index -= 1 ; true }
-  private def plusOne                  = { _index += 1 ; true }
-  private def lastIndex                = size - 1
-  private def fail(msg: String): String = {
-    repldbg("Internal error in history(size %d, index %d): %s".format(
-      size, index, msg)
-    )
-    ""
-  }
-
-  case class Entry(index: Int, value: CharSequence) extends JEntry {
-    override def toString = value
-  }
-
-  def maxSize: Int = 2500
-  def last = if (isEmpty) fail("last") else buf.last
-
-  def size = buf.size
-  def index = _index
-  def isEmpty = buf.isEmpty
-  def clear() = buf.clear()
-  def get(idx: Int): CharSequence = buf(idx)
-  def add(item: CharSequence): Unit = buf += item
-  def replace(item: CharSequence): Unit = {
-    buf trimEnd 1
-    add(item)
-  }
-  def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx)
-  def entries(): JListIterator[JEntry]         = toEntries().asJava.listIterator()
-  def iterator: JIterator[JEntry]              = toEntries().iterator.asJava
-
-  def current()         = if (index >= 0 && index < buf.size) buf(index) else fail("current()")
-  def previous()        = (index > 0) && minusOne
-  def next()            = (index <= lastIndex) && plusOne
-  def moveToFirst()     = (size > 0) && (index != 0) && setTo(0)
-  def moveToLast()      = (size > 0) && (index < lastIndex) && setTo(lastIndex)
-  def moveTo(idx: Int)  = (idx > 0) && (idx <= lastIndex) && setTo(idx)
-  def moveToEnd(): Unit = setTo(size)
-
-  // scala legacy interface
-  def asList: List[JEntry] = toEntries().toList
-  def asJavaList           = entries()
-  def asStrings            = buf.toList
-  def grep(s: String)      = buf.toList filter (_ contains s)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
deleted file mode 100644
index c62cf21..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-import scala.language.implicitConversions
-
-/** Files having to do with the state of a repl session:
- *  lines of text entered, types and terms defined, etc.
- */
-package object session {
-  type JIterator[T]       = java.util.Iterator[T]
-  type JListIterator[T]   = java.util.ListIterator[T]
-
-  type JEntry             = scala.tools.jline.console.history.History.Entry
-  type JHistory           = scala.tools.jline.console.history.History
-  type JMemoryHistory     = scala.tools.jline.console.history.MemoryHistory
-  type JPersistentHistory = scala.tools.jline.console.history.PersistentHistory
-
-  private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString
-}
diff --git a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
deleted file mode 100644
index 98c3d27..0000000
--- a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.util.concurrent._
-
-class DaemonThreadFactory extends ThreadFactory {
-  def newThread(r: Runnable): Thread = {
-    val thread = new Thread(r)
-    thread setDaemon true
-    thread
-  }
-}
-
-object DaemonThreadFactory {
-  def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory)
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala
deleted file mode 100644
index 7b4e385..0000000
--- a/src/compiler/scala/tools/nsc/io/Fileish.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ InputStream }
-import java.util.jar.JarEntry
-
-/** A common interface for File-based things and Stream-based things.
- *  (In particular, io.File and JarEntry.)
- */
-class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars {
-  def inputStream() = input()
-
-  def parent       = path.parent
-  def name         = path.name
-  def isSourceFile = path.hasExtension("java", "scala")
-
-  private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim }
-  lazy val pkgFromPath      = parent.path.replaceAll("""[/\\]""", ".")
-  lazy val pkgFromSource    = pkgLines map (_ stripSuffix ";") mkString "."
-
-  override def toString = path.path
-}
-
-object Fileish {
-  def apply(f: File): Fileish = new Fileish(f, () => f.inputStream())
-  def apply(f: JarEntry, in: () => InputStream): Fileish  = new Fileish(Path(f.getName), in)
-  def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index e919621..2967f67 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -10,8 +10,7 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException,
 import java.util.jar._
 import scala.collection.JavaConverters._
 import Attributes.Name
-import util.ClassPath
-import scala.language.implicitConversions
+import scala.language.{ implicitConversions, postfixOps }
 
 // Attributes.Name instances:
 //
@@ -37,9 +36,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
   def this(jfile: JFile) = this(File(jfile))
   def this(path: String) = this(File(path))
 
-  protected def errorFn(msg: String): Unit = Console println msg
-
-  lazy val jarFile  = new JarFile(file.jfile)
   lazy val manifest = withJarInput(s => Option(s.getManifest))
 
   def mainClass     = manifest map (f => f(Name.MAIN_CLASS))
@@ -51,6 +47,20 @@ class Jar(file: File) extends Iterable[JarEntry] {
     case _        => Nil
   }
 
+  /** Invoke f with input for named jar entry (or None). */
+  def withEntryStream[A](name: String)(f: Option[InputStream] => A) = {
+    val jarFile = new JarFile(file.jfile)
+    def apply() =
+      jarFile getEntry name match {
+        case null   => f(None)
+        case entry  =>
+          val in = Some(jarFile getInputStream entry)
+          try f(in)
+          finally in map (_.close())
+      }
+    try apply() finally jarFile.close()
+  }
+
   def withJarInput[T](f: JarInputStream => T): T = {
     val in = new JarInputStream(file.inputStream())
     try f(in)
@@ -64,12 +74,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
     Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
   }
   override def iterator: Iterator[JarEntry] = this.toList.iterator
-  def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x)))
-
-  private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match {
-    case null   => errorFn("No such entry: " + entry) ; null
-    case x      => x
-  }
   override def toString = "" + file
 }
 
@@ -111,9 +115,9 @@ class JarWriter(val file: File, val manifest: Manifest) {
     val buf = new Array[Byte](10240)
     def loop(): Unit = in.read(buf, 0, buf.length) match {
       case -1 => in.close()
-      case n  => out.write(buf, 0, n) ; loop
+      case n  => out.write(buf, 0, n) ; loop()
     }
-    loop
+    loop()
   }
 
   def close() = out.close()
@@ -131,7 +135,6 @@ object Jar {
       m
     }
     def apply(manifest: JManifest): WManifest = new WManifest(manifest)
-    implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
   }
   class WManifest(manifest: JManifest) {
     for ((k, v) <- initialMainAttrs)
@@ -148,12 +151,7 @@ object Jar {
     }
 
     def apply(name: Attributes.Name): String        = attrs(name)
-    def apply(name: String): String                 = apply(new Attributes.Name(name))
     def update(key: Attributes.Name, value: String) = attrs.put(key, value)
-    def update(key: String, value: String)          = attrs.put(new Attributes.Name(key), value)
-
-    def mainClass: String = apply(Name.MAIN_CLASS)
-    def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
   }
 
   // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
@@ -161,7 +159,7 @@ object Jar {
   private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
   private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
 
-  def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+  def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true)
   def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
     f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
 
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
deleted file mode 100644
index 5ffb5b4..0000000
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ /dev/null
@@ -1,301 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.{Reader, Writer, StringReader, StringWriter}
-import scala.collection.mutable.{Buffer, ArrayBuffer}
-import scala.math.BigInt
-
-/** Companion object of class `Lexer` which defines tokens and some utility concepts
- *  used for tokens and lexers
- */
-object Lexer {
-
-  /** An exception raised if a if input does not correspond to what's expected
-   *  @param   rdr   the lexer form which the bad input is read
-   *  @param   msg   the error message
-   */
-  class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
-
-  /** The class of tokens, i.e. descriptions of input words (or: lexemes).
-   *  @param str    the characters making up this token
-   */
-  class Token(val str: String) {
-    override def toString = str
-  }
-
-  /** A subclass of `Token` representing single-character delimiters
-   *  @param char the delimiter character making up this token
-   */
-  case class Delim(char: Char) extends Token("'"+char.toString+"'")
-
-  /** A subclass of token representing integer literals */
-  case class IntLit(override val str: String) extends Token(str)
-
-  /** A subclass of token representing floating point literals */
-  case class FloatLit(override val str: String) extends Token(str)
-
-  /** A subclass of token representing string literals */
-  case class StringLit(override val str: String) extends Token(str) {
-    override def toString = quoted(str)
-  }
-
-  /** The `true` token */
-  val TrueLit = new Token("true")
-
-  /** The `false` token */
-  val FalseLit = new Token("false")
-
-  /** The `null` token */
-  val NullLit = new Token("null")
-
-  /** The '`(`' token */
-  val LParen = new Delim('(')
-
-  /** The '`(`' token */
-  val RParen = new Delim(')')
-
-  /** The '`{`' token */
-  val LBrace = new Delim('{')
-
-  /** The '`}`' token */
-  val RBrace = new Delim('}')
-
-  /** The '`[`' token */
-  val LBracket = new Delim('[')
-
-  /** The '`]`' token */
-  val RBracket = new Delim(']')
-
-  /** The '`,`' token */
-  val Comma = new Delim(',')
-
-  /** The '`:`' token */
-  val Colon = new Delim(':')
-
-  /** The token representing end of input */
-  val EOF = new Token("<end of input>")
-
-  private def toUDigit(ch: Int): Char = {
-    val d = ch & 0xF
-    (if (d < 10) d + '0' else d - 10 + 'A').toChar
-  }
-
-  private def addToStr(buf: StringBuilder, ch: Char) {
-    ch match {
-      case '"' => buf ++= "\\\""
-      case '\b' => buf ++= "\\b"
-      case '\f' => buf ++= "\\f"
-      case '\n' => buf ++= "\\n"
-      case '\r' => buf ++= "\\r"
-      case '\t' => buf ++= "\\t"
-      case '\\' => buf ++= "\\\\"
-      case _ =>
-        if (' ' <= ch && ch < 128) buf += ch
-        else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
-    }
-  }
-
-  /** Returns given string enclosed in `"`-quotes with all string characters escaped
-   *  so that they correspond to the JSON standard.
-   *  Characters that escaped are:  `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
-   *  Furthermore, every other character which is not in the ASCII range 32-127 is
-   *  escaped as a four hex-digit unicode character of the form `\ u x x x x`.
-   *  @param   str   the string to be quoted
-   */
-  def quoted(str: String): String = {
-    val buf = new StringBuilder += '\"'
-    str foreach (addToStr(buf, _))
-    buf += '\"'
-    buf.toString
-  }
-
-  private val BUF_SIZE = 2 << 16
-}
-
-import Lexer._
-
-/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
- *  Tokens understood are:
- *
- *  `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
- *  strings (syntax as in JSON),
- *  integer numbers (syntax as in JSON: -?(0|\d+)
- *  floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
- *  The end of input is represented as its own token, EOF.
- *  Lexers can keep one token lookahead
- *
- * @param rd   the reader from which characters are read.
- */
-class Lexer(rd: Reader) {
-
-  /** The last-read character */
-  var ch: Char = 0
-
-  /** The number of characters read so far */
-  var pos: Long = 0
-
-  /** The last-read token */
-  var token: Token = _
-
-  /** The number of characters read before the start of the last-read token */
-  var tokenPos: Long = 0
-
-  private var atEOF: Boolean = false
-  private val buf = new Array[Char](BUF_SIZE)
-  private var nread: Int = 0
-  private var bp = 0
-
-  /** Reads next character into `ch` */
-  def nextChar() {
-    assert(!atEOF)
-    if (bp == nread) {
-      nread = rd.read(buf)
-      bp = 0
-      if (nread <= 0) { ch = 0; atEOF = true; return }
-    }
-    ch = buf(bp)
-    bp += 1
-    pos += 1
-  }
-
-  /** If last-read character equals given character, reads next character,
-   *  otherwise raises an error
-   *  @param  c   the given character to compare with last-read character
-   *  @throws  MalformedInput if character does not match
-   */
-  def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
-
-  private val sb = new StringBuilder
-
-  private def putChar() {
-    sb += ch; nextChar()
-  }
-
-  private def putAcceptString(str: String) {
-    str foreach acceptChar
-    sb ++= str
-  }
-
-  /** Skips whitespace and reads next lexeme into `token`
-   *  @throws  MalformedInput if lexeme not recognized as a valid token
-   */
-  def nextToken() {
-    sb.clear()
-    while (!atEOF && ch <= ' ') nextChar()
-    tokenPos = pos - 1
-    if (atEOF) token = EOF
-    else ch match {
-      case '(' => putChar(); token = LParen
-      case ')' => putChar(); token = RParen
-      case '{' => putChar(); token = LBrace
-      case '}' => putChar(); token = RBrace
-      case '[' => putChar(); token = LBracket
-      case ']' => putChar(); token = RBracket
-      case ',' => putChar(); token = Comma
-      case ':' => putChar(); token = Colon
-      case 't' => putAcceptString("true"); token = TrueLit
-      case 'f' => putAcceptString("false"); token = FalseLit
-      case 'n' => putAcceptString("null"); token = NullLit
-      case '"' => getString()
-      case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
-      case _ => error("unrecoginezed start of token: '"+ch+"'")
-    }
-    //println("["+token+"]")
-  }
-
-  /** Reads a string literal, and forms a `StringLit` token from it.
-   *  Last-read input character `ch` must be opening `"`-quote.
-   *  @throws  MalformedInput if lexeme not recognized as a string literal.
-   */
-  def getString() {
-    def udigit() = {
-      nextChar()
-      if ('0' <= ch && ch <= '9') ch - '9'
-      else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
-      else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
-      else error("illegal unicode escape character: '"+ch+"'")
-    }
-    val delim = ch
-    nextChar()
-    while (ch != delim && ch >= ' ') {
-      if (ch == '\\') {
-        nextChar()
-        ch match {
-          case '\'' => sb += '\''
-          case '"' => sb += '"'
-          case '\\' => sb += '\\'
-          case '/' => sb += '/'
-          case 'b' => sb += '\b'
-          case 'f' => sb += '\f'
-          case 'n' => sb += '\n'
-          case 'r' => sb += '\r'
-          case 't' => sb += '\t'
-          case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
-          case _ => error("illegal escape character: '"+ch+"'")
-        }
-        nextChar()
-      } else {
-        putChar()
-      }
-    }
-    acceptChar(delim)
-    token = StringLit(sb.toString)
-  }
-
-  /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
-   *  Last-read input character `ch` must be either `-` or a digit.
-   *  @throws  MalformedInput if lexeme not recognized as a numeric literal.
-   */
-  def getNumber() {
-    def digit() =
-      if ('0' <= ch && ch <= '9') putChar()
-      else error("<digit> expected")
-    def digits() =
-      do { digit() } while ('0' <= ch && ch <= '9')
-    var isFloating = false
-    if (ch == '-') putChar()
-    if (ch == '0') digit()
-    else digits()
-    if (ch == '.') {
-      isFloating = true
-      putChar()
-      digits()
-    }
-    if (ch == 'e' || ch == 'E') {
-      isFloating = true
-      putChar()
-      if (ch == '+' || ch == '-') putChar()
-      digits()
-    }
-    token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
-  }
-
-  /** If current token equals given token, reads next token, otherwise raises an error.
-   *  @param  t   the given token to compare current token with
-   *  @throws MalformedInput  if the two tokens do not match.
-   */
-  def accept(t: Token) {
-    if (token == t) nextToken()
-    else error(t+" expected, but "+token+" found")
-  }
-
-  /** The current token is a delimiter consisting of given character, reads next token,
-   *  otherwise raises an error.
-   *  @param  c   the given delimiter character to compare current token with
-   *  @throws MalformedInput  if the current token `token` is not a delimiter, or
-   *                          consists of a character different from `c`.
-   */
-  def accept(ch: Char) {
-    token match {
-      case Delim(`ch`) => nextToken()
-      case _ => accept(Delim(ch))
-    }
-  }
-
-  /** Always throws a `MalformedInput` exception with given error message.
-   *  @param msg  the error message
-   */
-  def error(msg: String) = throw new MalformedInput(this, msg)
-
-  nextChar()
-  nextToken()
-}
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
deleted file mode 100644
index 2f0a71f..0000000
--- a/src/compiler/scala/tools/nsc/io/MsilFile.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
-
-/** This class wraps an MsilType.  It exists only so
- *  ClassPath can treat all of JVM/MSIL/bin/src files
- *  uniformly, as AbstractFiles.
- */
-class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
-}
-
-object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
deleted file mode 100644
index b03a921..0000000
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ /dev/null
@@ -1,454 +0,0 @@
-package scala.tools.nsc.io
-
-import scala.annotation.unchecked
-import Lexer._
-import java.io.Writer
-import scala.language.implicitConversions
-import scala.reflect.ClassTag
-
-/** An abstract class for writing and reading Scala objects to and
- *  from a legible representation. The presesentation follows the following grammar:
- *  {{{
- *  Pickled = `true` | `false` | `null` | NumericLit | StringLit |
- *            Labelled | Pickled `,` Pickled
- *  Labelled = StringLit `(` Pickled? `)`
- *  }}}
- *
- *  All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
- *
- *  Subclasses of `Pickler` each can write and read individual classes
- *  of values.
- *
- *  @param  T   the type of values handled by this pickler.
- *
- *  These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
- *  Iulian Dragos' picklers for Scala to XML. See:
- *
- *  <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
- *  http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
- *  </a>
- */
-abstract class Pickler[T] {
-
-  import Pickler._
-
-  /** Writes value in pickled form
-   *  @param  wr   the writer to which pickled form is written
-   *  @param  x    the value to write
-   */
-  def pickle(wr: Writer, x: T)
-
-  /** Reads value from pickled form.
-   *
-   *  @param  rd   the lexer from which lexemes are read
-   *  @return An `UnpickleSuccess value if the current input corresponds to the
-   *          kind of value that is unpickled by the current subclass of `Pickler`,
-   *          an `UnpickleFailure` value otherwise.
-   *  @throws  `Lexer.MalformedInput` if input is invalid, or if
-   *          an `Unpickle
-   */
-  def unpickle(rd: Lexer): Unpickled[T]
-
-  /** A pickler representing a `~`-pair of values as two consecutive pickled
-   *  strings, separated by a comma.
-   *  @param  that   the second pickler which together with the current pickler makes
-   *                 up the pair `this ~ that` to be pickled.
-   */
-  def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
-
-  /** A pickler that adds a label to the current pickler, using the representation
-   *   `label ( <current pickler> )`
-   *
-   *  @label  the string to be added as a label.
-   */
-  def labelled(label: String): Pickler[T] = labelledPickler(label, this)
-
-  /** A pickler obtained from the current pickler by a pair of transformer functions
-   *  @param   in   the function that maps values handled by the current pickler to
-   *                values handled by the wrapped pickler.
-   *  @param   out  the function that maps values handled by the wrapped pickler to
-   *                values handled by the current pickler.
-   */
-  def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
-
-  /** A pickler obtained from the current pickler by also admitting `null` as
-   *  a handled value, represented as the token `null`.
-   *
-   *  @param  fromNull    an implicit evidence parameter ensuring that the type of values
-   *                      handled by this pickler contains `null`.
-   */
-  def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
-
-  /** A conditional pickler obtained from the current pickler.
-   *  @param   cond   the condition to test to find out whether pickler can handle
-   *                  some Scala value.
-   */
-  def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
-
-  /** A conditional pickler handling values of some Scala class. It adds the
-   *  class name as a label to the representation of the current pickler and
-   *  @param    c     the class of values handled by this pickler.
-   */
-  def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
-}
-
-object Pickler {
-
-  var picklerDebugMode = false
-
-  /** A base class representing unpickler result. It has two subclasses:
-   *  `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
-   *  where a value of the given type `T` could not be unpickled from input.
-   *  @param  T the type of unpickled values in case of success.
-   */
-  abstract class Unpickled[+T] {
-    /** Transforms success values to success values using given function,
-     *  leaves failures alone
-     *  @param   f the function to apply.
-     */
-    def map[U](f: T => U): Unpickled[U] = this match {
-      case UnpickleSuccess(x) => UnpickleSuccess(f(x))
-      case f: UnpickleFailure => f
-    }
-    /** Transforms success values to successes or failures using given function,
-     *  leaves failures alone.
-     *  @param   f the function to apply.
-     */
-    def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
-      case UnpickleSuccess(x) => f(x)
-      case f: UnpickleFailure => f
-    }
-    /** Tries alternate expression if current result is a failure
-     *  @param alt  the alternate expression to be tried in case of failure
-     */
-    def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
-      case UnpickleSuccess(x) => this
-      case f: UnpickleFailure => alt
-    }
-
-    /** Transforms failures into thrown `MalformedInput` exceptions.
-     *  @throws  MalformedInput   if current result is a failure
-     */
-    def requireSuccess: UnpickleSuccess[T] = this match {
-      case s @ UnpickleSuccess(x) => s
-      case f: UnpickleFailure =>
-        throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
-    }
-  }
-
-  /** A class representing successful unpicklings
-   *  @param T        the type of the unpickled value
-   *  @param result   the unpickled value
-   */
-  case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
-
-  /** A class representing unpickle failures
-   *  @param msg      an error message describing what failed.
-   *  @param rd       the lexer unpickled values were read from (can be used to get
-   *                  error position, for instance).
-   */
-  class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
-    def errMsg = msg
-    override def toString = "Failure at "+rd.tokenPos+":\n"+msg
-  }
-
-  private def errorExpected(rd: Lexer, msg: => String) =
-    new UnpickleFailure("expected: "+msg+"\n" +
-                        "found   : "+rd.token,
-                        rd)
-
-  private def nextSuccess[T](rd: Lexer, result: T) = {
-    rd.nextToken()
-    UnpickleSuccess(result)
-  }
-
-  /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
-   */
-  def pkl[T: Pickler] = implicitly[Pickler[T]]
-
-  /** A class represenenting `~`-pairs */
-  case class ~[+S, +T](fst: S, snd: T)
-
-  /** A wrapper class to be able to use `~` s an infix method */
-  implicit class TildeDecorator[S](x: S) {
-    /** Infix method that forms a `~`-pair. */
-    def ~ [T](y: T): S ~ T = new ~ (x, y)
-  }
-
-  /** A converter from binary functions to functions over `~`-pairs
-   */
-  implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
-
-  /** An converter from unctions returning Options over pair to functions returning `~`-pairs
-   *  The converted function will raise a `MatchError` where the original function returned
-   *  a `None`. This converter is useful for turning `unapply` methods of case classes
-   *  into wrapper methods that can be passed as second argument to `wrap`.
-   */
-  implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
-
-  /** Same as `p.labelled(label)`.
-   */
-  def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
-    def pickle(wr: Writer, x: T) = {
-      wr.write(quoted(label));
-      wr.write("(")
-      p.pickle(wr, x)
-      wr.write(")")
-    }
-    def unpickle(rd: Lexer): Unpickled[T] =
-      rd.token match {
-        case StringLit(`label`) =>
-          rd.nextToken()
-          rd.accept('(')
-          val result = p.unpickle(rd).requireSuccess
-          rd.accept(')')
-          result
-        case _ =>
-          errorExpected(rd, quoted(label)+"(...)")
-      }
-  }
-
-  /** Same as `p.wrap(in)(out)`
-   */
-  def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
-    def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
-    def unpickle(rd: Lexer) = p.unpickle(rd) map in
-  }
-
-  /** Same as `p.cond(condition)`
-   */
-  def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
-    def pickle(wr: Writer, x: T) = p.pickle(wr, x)
-    def unpickle(rd: Lexer) = p.unpickle(rd)
-  }
-
-  /** Same as `p ~ q`
-   */
-  def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
-    lazy val qq = q
-    def pickle(wr: Writer, x: T ~ U) = {
-      p.pickle(wr, x.fst)
-      wr.write(',')
-      q.pickle(wr, x.snd)
-    }
-    def unpickle(rd: Lexer) =
-      for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
-      yield x ~ y
-  }
-
-  /** Same as `p | q`
-   */
-  def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
-    new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
-      lazy val qq = q
-      override def tryPickle(wr: Writer, x: Any): Boolean =
-        p.tryPickle(wr, x) || qq.tryPickle(wr, x)
-      def pickle(wr: Writer, x: T) =
-        require(tryPickle(wr, x),
-                "no pickler found for "+x+" of class "+x.getClass.getName)
-      def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
-    }
-
-  /** Same as `p.orNull`
-   */
-  def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
-    def pickle(wr: Writer, x: T) =
-      if (x == null) wr.write("null") else p.pickle(wr, x)
-    def unpickle(rd: Lexer): Unpickled[T] =
-      if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
-      else p.unpickle(rd)
-  }
-
-  /** A conditional pickler for singleton objects. It represents these
-   *  with the object's underlying class as a label.
-   *  Example: Object scala.None would be represented as `scala.None$()`.
-   */
-  def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
-    unitPickler
-      .wrapped { _ => x } { x => () }
-      .labelled (x.getClass.getName)
-      .cond (x eq _.asInstanceOf[AnyRef])
-
-  /** A pickler the handles instances of classes that have an empty constructor.
-   *  It represents than as `$new ( <name of class> )`.
-   *  When unpickling, a new instance of the class is created using the empty
-   *  constructor of the class via `Class.forName(<name of class>).newInstance()`.
-   */
-  def javaInstancePickler[T <: AnyRef]: Pickler[T] =
-    (stringPickler labelled "$new")
-      .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
-
-  /** A picklers that handles iterators. It pickles all values
-   *  returned by an iterator separated by commas.
-   *  When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
-   *  This iterator returns 0 or more values that are obtained by unpickling
-   *  until a closing parenthesis, bracket or brace or the end of input is encountered.
-   *
-   *  This means that iterator picklers should not be directly followed by `~`
-   *  because the pickler would also read any values belonging to the second
-   *  part of the `~`-pair.
-   *
-   *  What's usually done instead is that the iterator pickler is wrapped and labelled
-   *  to handle other kinds of sequences.
-   */
-  implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
-    lazy val p = pkl[T]
-    def pickle(wr: Writer, xs: Iterator[T]) {
-      var first = true
-      for (x <- xs) {
-        if (first) first = false else wr.write(',')
-        p.pickle(wr, x)
-      }
-    }
-    def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
-      var first = true
-      def hasNext = {
-        val t = rd.token
-        t != EOF && t != RParen && t != RBrace && t != RBracket
-      }
-      def next(): T = {
-        if (first) first = false else rd.accept(',')
-        p.unpickle(rd).requireSuccess.result
-      }
-    })
-  }
-
-  /** A pickler that handles values that can be represented as a single token.
-   *  @param   kind   the kind of token representing the value, used in error messages
-   *                  for unpickling.
-   *  @param  matcher A partial function from tokens to handled values. Unpickling
-   *                  succeeds if the matcher function is defined on the current token.
-   */
-  private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
-    def pickle(wr: Writer, x: T) = wr.write(x.toString)
-    def unpickle(rd: Lexer) =
-      if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
-      else errorExpected(rd, kind)
-  }
-
-  /** A pickler for values of type `Long`, represented as integer literals */
-  implicit val longPickler: Pickler[Long] =
-    tokenPickler("integer literal") { case IntLit(s) => s.toLong }
-
-  /** A pickler for values of type `Double`, represented as floating point literals */
-  implicit val doublePickler: Pickler[Double] =
-    tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
-
-  /** A pickler for values of type `Byte`, represented as integer literals */
-  implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
-
-  /** A pickler for values of type `Short`, represented as integer literals */
-  implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
-
-  /** A pickler for values of type `Int`, represented as integer literals */
-  implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
-
-  /** A pickler for values of type `Float`, represented as floating point literals */
-  implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
-
-  /** A conditional pickler for the boolean value `true` */
-  private val truePickler =
-    tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
-
-  /** A conditional pickler for the boolean value `false` */
-  private val falsePickler =
-    tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
-
-  /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
-  implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
-
-  /** A pickler for values of type `Unit`, represented by the empty character string */
-  implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
-    def pickle(wr: Writer, x: Unit) {}
-    def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
-  }
-
-  /** A pickler for values of type `String`, represented as string literals */
-  implicit val stringPickler: Pickler[String] = new Pickler[String] {
-    def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
-    def unpickle(rd: Lexer) = rd.token match {
-      case StringLit(s) => nextSuccess(rd, s)
-      case NullLit => nextSuccess(rd, null)
-      case _ => errorExpected(rd, "string literal")
-    }
-  }
-
-  /** A pickler for values of type `Char`, represented as string literals of length 1 */
-  implicit val charPickler: Pickler[Char] =
-    stringPickler
-      .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
-
-  /** A pickler for pairs, represented as `~`-pairs */
-  implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
-    (pkl[T1] ~ pkl[T2])
-      .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
-      .labelled ("tuple2")
-
-  /** A pickler for 3-tuples, represented as `~`-tuples */
-  implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
-    (p1 ~ p2 ~ p3)
-      .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
-      .labelled ("tuple3")
-
-  /** A pickler for 4-tuples, represented as `~`-tuples */
-  implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
-    (p1 ~ p2 ~ p3 ~ p4)
-      .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
-      .labelled ("tuple4")
-
-  /** A conditional pickler for the `scala.None` object */
-  implicit val nonePickler = singletonPickler(None)
-
-  /** A conditional pickler for instances of class `scala.Some` */
-  implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
-    pkl[T]
-      .wrapped { Some(_) } { _.get }
-      .asClass (classOf[Some[T]])
-
-  /** A pickler for optional values */
-  implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
-
-  /** A pickler for list values */
-  implicit def listPickler[T: Pickler]: Pickler[List[T]] =
-    iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
-
-  /** A pickler for vector values */
-  implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
-    iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
-
-  /** A pickler for array values */
-  implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
-    iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
-}
-
-/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
- *  of this class.
- *  @param canPickle   The predicate that indicates whether a given value
- *                     can be pickled by instances of this class.
- */
-abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
-  import Pickler._
-
-  /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
-   */
-  def tryPickle(wr: Writer, x: Any): Boolean = {
-    val result = canPickle(x)
-    if (result) pickle(wr, x.asInstanceOf[T])
-    result
-  }
-
-  /** A pickler obtained from this pickler and an alternative pickler.
-   *  To pickle a value, this pickler is tried first. If it cannot handle
-   *  the object (as indicated by its `canPickle` test), then the
-   *  alternative pickler is tried.
-   *  To unpickle a value, this unpickler is tried first. If it cannot read
-   *  the input (as indicated by a `UnpickleFailure` result), then the
-   *  alternative pickler is tried.
-   *  @param V    The handled type of the returned pickler.
-   *  @param U    The handled type of the alternative pickler.
-   *  @param that The alternative pickler.
-   */
-  def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
-    eitherPickler[V, T, U](this, that)
-}
-
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
deleted file mode 100644
index acd4847..0000000
--- a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.Writer
-
-class PrettyWriter(wr: Writer) extends Writer {
-  protected val indentStep = "  "
-  private var indent = 0
-  private def newLine() {
-    wr.write('\n')
-    wr.write(indentStep * indent)
-  }
-  def close() = wr.close()
-  def flush() = wr.flush()
-  def write(str: Array[Char], off: Int, len: Int): Unit = {
-    if (off < str.length && off < len) {
-      str(off) match {
-        case '{' | '[' | '(' =>
-          indent += 1
-          wr.write(str(off))
-          newLine()
-          wr.write(str, off + 1, len - 1)
-        case '}' | ']' | ')' =>
-          wr.write(str, off, len)
-          indent -= 1
-        case ',' =>
-          wr.write(',')
-          newLine()
-          wr.write(str, off + 1, len - 1)
-        case ':' =>
-          wr.write(':')
-          wr.write(' ')
-          wr.write(str, off + 1, len - 1)
-        case _ =>
-          wr.write(str, off, len)
-      }
-    } else {
-      wr.write(str, off, len)
-    }
-  }
-  override def toString = wr.toString
-}
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
deleted file mode 100644
index 5cb61b6..0000000
--- a/src/compiler/scala/tools/nsc/io/Replayer.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.{Reader, Writer}
-
-import Pickler._
-import Lexer.{Token, EOF}
-
-abstract class LogReplay {
-  def logreplay(event: String, x: => Boolean): Boolean
-  def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
-  def close()
-  def flush()
-}
-
-class Logger(wr0: Writer) extends LogReplay {
-  val wr = new PrettyWriter(wr0)
-  private var first = true
-  private def insertComma() = if (first) first = false else wr.write(",")
-
-  def logreplay(event: String, x: => Boolean) = {
-    val xx = x
-    if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
-    xx
-  }
-  def logreplay[T: Pickler](event: String, x: => Option[T]) = {
-    val xx = x
-    xx match {
-      case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
-      case None =>
-    }
-    xx
-  }
-  def close() { wr.close() }
-  def flush() { wr.flush() }
-}
-
-object NullLogger extends LogReplay {
-  def logreplay(event: String, x: => Boolean) = x
-  def logreplay[T: Pickler](event: String, x: => Option[T]) = x
-  def close() {}
-  def flush() {}
-}
-
-class Replayer(raw: Reader) extends LogReplay {
-  private val rd = new Lexer(raw)
-  private var nextComma = false
-
-  private def eatComma() =
-    if (nextComma) { rd.accept(','); nextComma = false }
-
-  def logreplay(event: String, x: => Boolean) =
-    if (rd.token == EOF) NullLogger.logreplay(event, x)
-    else {
-      eatComma()
-      pkl[Unit].labelled(event).unpickle(rd) match {
-        case UnpickleSuccess(_) => nextComma = true; true
-        case _ => false
-      }
-    }
-
-  def logreplay[T: Pickler](event: String, x: => Option[T]) =
-    if (rd.token == EOF) NullLogger.logreplay(event, x)
-    else {
-      eatComma()
-      pkl[T].labelled(event).unpickle(rd) match {
-        case UnpickleSuccess(y) => nextComma = true; Some(y)
-        case _ => None
-      }
-    }
-
-  def close() { raw.close() }
-  def flush() {}
-}
-
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index e766c1b..a803e41 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -9,18 +9,11 @@ package io
 import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter, Closeable }
 import java.io.{ BufferedOutputStream, BufferedReader }
 import java.net.{ ServerSocket, SocketException, SocketTimeoutException, InetAddress, Socket => JSocket }
-import scala.sys.SystemProperties._
 import scala.io.Codec
 
 /** A skeletal only-as-much-as-I-need Socket wrapper.
  */
 object Socket {
-  def preferringIPv4[T](body: => T): T = exclusively {
-    val saved = preferIPv4Stack.value
-    try   { preferIPv4Stack.enable() ; body }
-    finally preferIPv4Stack setValue saved
-  }
-
   class Box[+T](f: () => T) {
     private def handlerFn[U](f: Throwable => U): PartialFunction[Throwable, U] = {
       case x @ (_: IOException | _: SecurityException)  => f(x)
@@ -28,13 +21,10 @@ object Socket {
     private val optHandler = handlerFn[Option[T]](_ => None)
     private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x))
 
-    def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt
     def either: Either[Throwable, T]    = try Right(f()) catch eitherHandler
     def opt: Option[T]                  = try Some(f()) catch optHandler
   }
 
-  def newIPv4Server(port: Int = 0)        = new Box(() => preferringIPv4(new ServerSocket(0)))
-  def newServer(port: Int = 0)            = new Box(() => new ServerSocket(0))
   def localhost(port: Int)                = apply(InetAddress.getLocalHost(), port)
   def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
   def apply(host: String, port: Int)      = new Box(() => new Socket(new JSocket(host, port)))
@@ -62,4 +52,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable {
       out.close()
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 569270f..3220c2e 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -9,7 +9,7 @@ package io
 
 import java.io.{ FileInputStream, InputStream, IOException }
 import java.nio.{ByteBuffer, CharBuffer}
-import java.nio.channels.{FileChannel, ReadableByteChannel, Channels}
+import java.nio.channels.{ ReadableByteChannel, Channels }
 import java.nio.charset.{CharsetDecoder, CoderResult}
 import scala.tools.nsc.reporters._
 
@@ -33,9 +33,6 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
                    "Please try specifying another one using the -encoding option")
   }
 
-  /** Reads the file with the specified name. */
-  def read(filename: String): Array[Char]= read(new JFile(filename))
-
   /** Reads the specified file. */
   def read(file: JFile): Array[Char] = {
     val c = new FileInputStream(file).getChannel
@@ -77,7 +74,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
   protected def read(bytes: ByteBuffer): Array[Char] = {
     val decoder: CharsetDecoder = this.decoder.reset()
     val chars: CharBuffer = this.chars; chars.clear()
-    terminate(flush(decoder, decode(decoder, bytes, chars, true)))
+    terminate(flush(decoder, decode(decoder, bytes, chars, endOfInput = true)))
   }
 
   //########################################################################
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 711696b..5f2f90c 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -5,9 +5,6 @@
 
 package scala.tools.nsc
 
-import java.util.concurrent.{ Future, Callable }
-import java.util.{ Timer, TimerTask }
-import java.util.jar.{ Attributes }
 import scala.language.implicitConversions
 
 package object io {
@@ -21,41 +18,13 @@ package object io {
   type Path = scala.reflect.io.Path
   val Path = scala.reflect.io.Path
   type PlainFile = scala.reflect.io.PlainFile
-  val PlainFile = scala.reflect.io.PlainFile
   val Streamable = scala.reflect.io.Streamable
   type VirtualDirectory = scala.reflect.io.VirtualDirectory
   type VirtualFile = scala.reflect.io.VirtualFile
-  val ZipArchive = scala.reflect.io.ZipArchive
   type ZipArchive = scala.reflect.io.ZipArchive
-  
-  implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
 
   type JManifest = java.util.jar.Manifest
   type JFile = java.io.File
 
   implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
-  private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
-
-  def runnable(body: => Unit): Runnable       = new Runnable { override def run() = body }
-  def callable[T](body: => T): Callable[T]    = new Callable[T] { override def call() = body }
-  def spawn[T](body: => T): Future[T]         = daemonThreadPool submit callable(body)
-  def submit(runnable: Runnable)              = daemonThreadPool submit runnable
-
-  // Create, start, and return a daemon thread
-  def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
-  def newThread(f: Thread => Unit)(body: => Unit): Thread = {
-    val thread = new Thread(runnable(body))
-    f(thread)
-    thread.start
-    thread
-  }
-
-  // Set a timer to execute the given code.
-  def timer(seconds: Int)(body: => Unit): Timer = {
-    val alarm = new Timer(true) // daemon
-    val tt    = new TimerTask { def run() = body }
-
-    alarm.schedule(tt, seconds * 1000)
-    alarm
-  }
 }
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0779e64..a61ad39 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -8,11 +8,11 @@
 package scala.tools.nsc
 package javac
 
-import scala.reflect.internal.util.OffsetPosition
 import scala.collection.mutable.ListBuffer
 import symtab.Flags
 import JavaTokens._
 import scala.language.implicitConversions
+import scala.reflect.internal.util.Position
 
 trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
   val global : Global
@@ -27,7 +27,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
     def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
     def deprecationWarning(off: Int, msg: String) = unit.deprecationWarning(off, msg)
-    implicit def i2p(offset : Int) : Position = new OffsetPosition(unit.source, offset)
+    implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset)
     def warning(pos : Int, msg : String) : Unit = unit.warning(pos, msg)
     def syntaxError(pos: Int, msg: String) : Unit = unit.error(pos, msg)
   }
@@ -35,7 +35,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
   abstract class JavaParser extends ParserCommon {
     val in: JavaScanner
 
-    protected def posToReport: Int = in.currentPos
     def freshName(prefix : String): Name
     protected implicit def i2p(offset : Int) : Position
     private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1
@@ -75,7 +74,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
             nbraces += 1
           case _ =>
         }
-        in.nextToken
+        in.nextToken()
       }
     }
 
@@ -94,11 +93,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       if (skipIt)
         skip()
     }
-    def warning(msg: String) : Unit = warning(in.currentPos, msg)
-
     def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos
-    def errorTermTree = Literal(Constant(null)) setPos in.currentPos
-    def errorPatternTree = blankExpr setPos in.currentPos
 
     // --------- tree building -----------------------------
 
@@ -123,14 +118,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def makeTemplate(parents: List[Tree], stats: List[Tree]) =
       Template(
         parents,
-        emptyValDef,
+        noSelfType,
         if (treeInfo.firstConstructor(stats) == EmptyTree) makeConstructor(List()) :: stats
         else stats)
 
     def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
       makeParam(nme.syntheticParamName(count), tpt)
     def makeParam(name: String, tpt: Tree): ValDef =
-      makeParam(newTypeName(name), tpt)
+      makeParam(name: TermName, tpt)
     def makeParam(name: TermName, tpt: Tree): ValDef =
       ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
 
@@ -153,7 +148,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
             nbraces += 1
           case _ =>
         }
-        in.nextToken
+        in.nextToken()
         in.token match {
           case RPAREN =>
             nparens -= 1
@@ -168,7 +163,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       while (!(tokens contains in.token) && in.token != EOF) {
         if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
         else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
-        else in.nextToken
+        else in.nextToken()
       }
     }
 
@@ -178,18 +173,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def accept(token: Int): Int = {
       val pos = in.currentPos
       if (in.token != token) {
-        val posToReport =
-          //if (in.currentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0))
-          //  in.lastPos
-          //else
-            in.currentPos
+        val posToReport = in.currentPos
         val msg =
           JavaScannerConfiguration.token2string(token) + " expected but " +
             JavaScannerConfiguration.token2string(in.token) + " found."
 
-        syntaxError(posToReport, msg, true)
+        syntaxError(posToReport, msg, skipIt = true)
       }
-      if (in.token == token) in.nextToken
+      if (in.token == token) in.nextToken()
       pos
     }
 
@@ -199,7 +190,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
         case GTGTGT   => GTGT
         case GTGTEQ   => GTEQ
         case GTGT     => GT
-        case GTEQ     => ASSIGN
+        case GTEQ     => EQUALS
       }
       if (closers isDefinedAt in.token) in.token = closers(in.token)
       else accept(GT)
@@ -209,7 +200,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def ident(): Name =
       if (in.token == IDENTIFIER) {
         val name = in.name
-        in.nextToken
+        in.nextToken()
         name
       } else {
         accept(IDENTIFIER)
@@ -219,7 +210,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
       val buf = ListBuffer[T](p())
       while (in.token == sep) {
-        in.nextToken
+        in.nextToken()
         buf += p()
       }
       buf.toList
@@ -233,7 +224,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
         case AppliedTypeTree(_, _) | ExistentialTypeTree(_, _) | SelectFromTypeTree(_, _) =>
           tree
         case _ =>
-          syntaxError(tree.pos, "identifier expected", false)
+          syntaxError(tree.pos, "identifier expected", skipIt = false)
           errorTypeTree
       }
     }
@@ -243,7 +234,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def qualId(): RefTree = {
       var t: RefTree = atPos(in.currentPos) { Ident(ident()) }
       while (in.token == DOT) {
-        in.nextToken
+        in.nextToken()
         t = atPos(in.currentPos) { Select(t, ident()) }
       }
       t
@@ -252,7 +243,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def optArrayBrackets(tpt: Tree): Tree =
       if (in.token == LBRACKET) {
         val tpt1 = atPos(in.pos) { arrayOf(tpt) }
-        in.nextToken
+        in.nextToken()
         accept(RBRACKET)
         optArrayBrackets(tpt1)
       } else tpt
@@ -260,21 +251,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def basicType(): Tree =
       atPos(in.pos) {
         in.token match {
-          case BYTE => in.nextToken; TypeTree(ByteClass.tpe)
-          case SHORT => in.nextToken; TypeTree(ShortClass.tpe)
-          case CHAR => in.nextToken; TypeTree(CharClass.tpe)
-          case INT => in.nextToken; TypeTree(IntClass.tpe)
-          case LONG => in.nextToken; TypeTree(LongClass.tpe)
-          case FLOAT => in.nextToken; TypeTree(FloatClass.tpe)
-          case DOUBLE => in.nextToken; TypeTree(DoubleClass.tpe)
-          case BOOLEAN => in.nextToken; TypeTree(BooleanClass.tpe)
-          case _ => syntaxError("illegal start of type", true); errorTypeTree
+          case BYTE    => in.nextToken(); TypeTree(ByteTpe)
+          case SHORT   => in.nextToken(); TypeTree(ShortTpe)
+          case CHAR    => in.nextToken(); TypeTree(CharTpe)
+          case INT     => in.nextToken(); TypeTree(IntTpe)
+          case LONG    => in.nextToken(); TypeTree(LongTpe)
+          case FLOAT   => in.nextToken(); TypeTree(FloatTpe)
+          case DOUBLE  => in.nextToken(); TypeTree(DoubleTpe)
+          case BOOLEAN => in.nextToken(); TypeTree(BooleanTpe)
+          case _       => syntaxError("illegal start of type", skipIt = true); errorTypeTree
         }
       }
 
     def typ(): Tree =
       optArrayBrackets {
-        if (in.token == FINAL) in.nextToken
+        if (in.token == FINAL) in.nextToken()
         if (in.token == IDENTIFIER) {
           var t = typeArgs(atPos(in.currentPos)(Ident(ident())))
           // typeSelect generates Select nodes is the lhs is an Ident or Select,
@@ -287,7 +278,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
             case _ => SelectFromTypeTree(t, name.toTypeName)
           }
           while (in.token == DOT) {
-            in.nextToken
+            in.nextToken()
             t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident())))
           }
           convertToTypeId(t)
@@ -301,16 +292,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       def typeArg(): Tree =
         if (in.token == QMARK) {
           val pos = in.currentPos
-          in.nextToken
-          var lo: Tree = TypeTree(NothingClass.tpe)
-          var hi: Tree = TypeTree(AnyClass.tpe)
-          if (in.token == EXTENDS) {
-            in.nextToken
-            hi = typ()
-          } else if (in.token == SUPER) {
-            in.nextToken
-            lo = typ()
-          }
+          in.nextToken()
+          val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+          val lo = if (in.token == SUPER)   { in.nextToken() ; typ() } else EmptyTree
           val tdef = atPos(pos) {
             TypeDef(
               Modifiers(Flags.JAVA | Flags.DEFERRED),
@@ -324,7 +308,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
           typ()
         }
       if (in.token == LT) {
-        in.nextToken
+        in.nextToken()
         val t1 = convertToTypeId(t)
         val args = repsep(typeArg, COMMA)
         acceptClosingAngle()
@@ -339,7 +323,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def annotations(): List[Tree] = {
       //var annots = new ListBuffer[Tree]
       while (in.token == AT) {
-        in.nextToken
+        in.nextToken()
         annotation()
       }
       List() // don't pass on annotations for now
@@ -348,46 +332,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
      */
     def annotation() {
-      val pos = in.currentPos
-      var t = qualId()
+      qualId()
       if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
       else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
     }
-/*
-    def annotationArg() = {
-      val pos = in.token
-      if (in.token == IDENTIFIER && in.lookaheadToken == ASSIGN) {
-        val name = ident()
-        accept(ASSIGN)
-        atPos(pos) {
-          ValDef(Modifiers(Flags.JAVA), name, TypeTree(), elementValue())
-        }
-      } else {
-        elementValue()
-      }
-    }
-
-    def elementValue(): Tree =
-      if (in.token == AT) annotation()
-      else if (in.token == LBRACE) elementValueArrayInitializer()
-      else expression1()
-
-    def elementValueArrayInitializer() = {
-      accept(LBRACE)
-      val buf = new ListBuffer[Tree]
-      def loop() =
-        if (in.token != RBRACE) {
-          buf += elementValue()
-          if (in.token == COMMA) {
-            in.nextToken
-            loop()
-          }
-        }
-      loop()
-      accept(RBRACE)
-      buf.toList
-    }
- */
 
     def modifiers(inInterface: Boolean): Modifiers = {
       var flags: Long = Flags.JAVA
@@ -399,41 +347,41 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       while (true) {
         in.token match {
           case AT if (in.lookaheadToken != INTERFACE) =>
-            in.nextToken
+            in.nextToken()
             annotation()
           case PUBLIC =>
             isPackageAccess = false
-            in.nextToken
+            in.nextToken()
           case PROTECTED =>
             flags |= Flags.PROTECTED
-            in.nextToken
+            in.nextToken()
           case PRIVATE =>
             isPackageAccess = false
             flags |= Flags.PRIVATE
-            in.nextToken
+            in.nextToken()
           case STATIC =>
             flags |= Flags.STATIC
-            in.nextToken
+            in.nextToken()
           case ABSTRACT =>
             flags |= Flags.ABSTRACT
-            in.nextToken
+            in.nextToken()
           case FINAL =>
             flags |= Flags.FINAL
-            in.nextToken
+            in.nextToken()
           case DEFAULT =>
             flags |= Flags.DEFAULTMETHOD
             in.nextToken()
           case NATIVE =>
             addAnnot(NativeAttr)
-            in.nextToken
+            in.nextToken()
           case TRANSIENT =>
             addAnnot(TransientAttr)
-            in.nextToken
+            in.nextToken()
           case VOLATILE =>
             addAnnot(VolatileAttr)
-            in.nextToken
+            in.nextToken()
           case SYNCHRONIZED | STRICTFP =>
-            in.nextToken
+            in.nextToken()
           case _ =>
             val privateWithin: TypeName =
               if (isPackageAccess && !inInterface) thisPackageName
@@ -447,7 +395,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
 
     def typeParams(): List[TypeDef] =
       if (in.token == LT) {
-        in.nextToken
+        in.nextToken()
         val tparams = repsep(typeParam, COMMA)
         acceptClosingAngle()
         tparams
@@ -456,27 +404,20 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     def typeParam(): TypeDef =
       atPos(in.currentPos) {
         val name = identForType()
-        val hi =
-          if (in.token == EXTENDS) {
-            in.nextToken
-            bound()
-          } else {
-            scalaDot(tpnme.Any)
-          }
-        TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, List(),
-                TypeBoundsTree(scalaDot(tpnme.Nothing), hi))
+        val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+        TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, Nil, TypeBoundsTree(EmptyTree, hi))
       }
 
     def bound(): Tree =
       atPos(in.currentPos) {
         val buf = ListBuffer[Tree](typ())
         while (in.token == AMP) {
-          in.nextToken
+          in.nextToken()
           buf += typ()
         }
         val ts = buf.toList
         if (ts.tail.isEmpty) ts.head
-        else CompoundTypeTree(Template(ts, emptyValDef, List()))
+        else CompoundTypeTree(Template(ts, noSelfType, List()))
       }
 
     def formalParams(): List[ValDef] = {
@@ -487,21 +428,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
     }
 
     def formalParam(): ValDef = {
-      if (in.token == FINAL) in.nextToken
+      if (in.token == FINAL) in.nextToken()
       annotations()
       var t = typ()
       if (in.token == DOTDOTDOT) {
-        in.nextToken
+        in.nextToken()
         t = atPos(t.pos) {
           AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t))
         }
       }
-     varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident())
+     varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident().toTermName)
     }
 
     def optThrows() {
       if (in.token == THROWS) {
-        in.nextToken
+        in.nextToken()
         repsep(typ, COMMA)
       }
     }
@@ -520,8 +461,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       val isVoid = in.token == VOID
       var rtpt =
         if (isVoid) {
-          in.nextToken
-          TypeTree(UnitClass.tpe) setPos in.pos
+          in.nextToken()
+          TypeTree(UnitTpe) setPos in.pos
         } else typ()
       var pos = in.currentPos
       val rtptName = rtpt match {
@@ -555,9 +496,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
               if (parentToken == AT && in.token == DEFAULT) {
                 val annot =
                   atPos(pos) {
-                    New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
+                    New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
                   }
-                mods1 = mods1 withAnnotations List(annot)
+                mods1 = mods1 withAnnotations annot :: Nil
                 skipTo(SEMI)
                 accept(SEMI)
                 blankExpr
@@ -569,7 +510,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
           if (inInterface) mods1 |= Flags.DEFERRED
           List {
             atPos(pos) {
-              DefDef(mods1, name, tparams, List(vparams), rtpt, body)
+              DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body)
             }
           }
         } else {
@@ -591,18 +532,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
      *  these potential definitions are real or not.
      */
     def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
-      val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name))
+      val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName))
       val maybe = new ListBuffer[Tree] // potential variable definitions.
       while (in.token == COMMA) {
-        in.nextToken
+        in.nextToken()
         if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
           val name = ident()
-          if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
+          if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
             buf ++= maybe
-            buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
+            buf += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
             maybe.clear()
           } else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
-            maybe += varDecl(in.currentPos, mods, tpt.duplicate, name)
+            maybe += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
           } else { // ... if there's something else we were still in the initializer of the
                    // previous var def; skip to next comma or semicolon.
             skipTo(COMMA, SEMI)
@@ -622,7 +563,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
 
     def varDecl(pos: Position, mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
       val tpt1 = optArrayBrackets(tpt)
-      if (in.token == ASSIGN && !mods.isParameter) skipTo(COMMA, SEMI)
+      if (in.token == EQUALS && !mods.isParameter) skipTo(COMMA, SEMI)
       val mods1 = if (mods.isFinal) mods &~ Flags.FINAL else mods | Flags.MUTABLE
       atPos(pos) {
         ValDef(mods1, name, tpt1, blankExpr)
@@ -675,25 +616,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       def collectIdents() : Int = {
         if (in.token == ASTERISK) {
           val starOffset = in.pos
-          in.nextToken
+          in.nextToken()
           buf += nme.WILDCARD
           starOffset
         } else {
           val nameOffset = in.pos
           buf += ident()
           if (in.token == DOT) {
-            in.nextToken
+            in.nextToken()
             collectIdents()
           } else nameOffset
         }
       }
-      if (in.token == STATIC) in.nextToken
+      if (in.token == STATIC) in.nextToken()
       else buf += nme.ROOTPKG
       val lastnameOffset = collectIdents()
       accept(SEMI)
       val names = buf.toList
       if (names.length < 2) {
-        syntaxError(pos, "illegal import", false)
+        syntaxError(pos, "illegal import", skipIt = false)
         List()
       } else {
         val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
@@ -708,7 +649,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
 
     def interfacesOpt() =
       if (in.token == IMPLEMENTS) {
-        in.nextToken
+        in.nextToken()
         repsep(typ, COMMA)
       } else {
         List()
@@ -721,7 +662,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       val tparams = typeParams()
       val superclass =
         if (in.token == EXTENDS) {
-          in.nextToken
+          in.nextToken()
           typ()
         } else {
           javaLangObject()
@@ -740,10 +681,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       val tparams = typeParams()
       val parents =
         if (in.token == EXTENDS) {
-          in.nextToken
+          in.nextToken()
           repsep(typ, COMMA)
         } else {
-          List(javaLangObject)
+          List(javaLangObject())
         }
       val (statics, body) = typeBody(INTERFACE, name)
       addCompanionObject(statics, atPos(pos) {
@@ -770,7 +711,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
           skipAhead() // skip init block, we just assume we have seen only static
           accept(RBRACE)
         } else if (in.token == SEMI) {
-          in.nextToken
+          in.nextToken()
         } else {
           if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC
           val decls = memberDecl(mods, parentToken)
@@ -822,7 +763,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
         if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
           buf += enumConst(enumType)
           if (in.token == COMMA) {
-            in.nextToken
+            in.nextToken()
             parseEnumConsts()
           }
         }
@@ -831,7 +772,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       val consts = buf.toList
       val (statics, body) =
         if (in.token == SEMI) {
-          in.nextToken
+          in.nextToken()
           typeBodyDecls(ENUM, name)
         } else {
           (List(), List())
@@ -844,14 +785,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
           blankExpr),
         DefDef(
           Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
-          List(List(makeParam("x", TypeTree(StringClass.tpe)))),
+          List(List(makeParam("x", TypeTree(StringTpe)))),
           enumType,
           blankExpr))
       accept(RBRACE)
       val superclazz =
         AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
       addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
-        ClassDef(mods, name, List(),
+        ClassDef(mods | Flags.ENUM, name, List(),
                  makeTemplate(superclazz :: interfaces, body))
       })
     }
@@ -870,10 +811,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
           skipAhead()
           accept(RBRACE)
         }
-        // The STABLE flag is to signal to namer that this was read from a
-        // java enum, and so should be given a Constant type (thereby making
-        // it usable in annotations.)
-        ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+        ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
       }
     }
 
@@ -882,13 +820,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       case INTERFACE => interfaceDecl(mods)
       case AT        => annotationDecl(mods)
       case CLASS     => classDecl(mods)
-      case _         => in.nextToken; syntaxError("illegal start of type declaration", true); List(errorTypeTree)
+      case _         => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
     }
 
     /** CompilationUnit ::= [package QualId semi] TopStatSeq
      */
     def compilationUnit(): Tree = {
-      var pos = in.currentPos;
+      var pos = in.currentPos
       val pkg: RefTree =
         if (in.token == AT || in.token == PACKAGE) {
           annotations()
@@ -908,9 +846,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
       while (in.token == IMPORT)
         buf ++= importDecl()
       while (in.token != EOF && in.token != RBRACE) {
-        while (in.token == SEMI) in.nextToken
+        while (in.token == SEMI) in.nextToken()
         if (in.token != EOF)
-          buf ++= typeDecl(modifiers(false))
+          buf ++= typeDecl(modifiers(inInterface = false))
       }
       accept(EOF)
       atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index e230585..c540121 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader
 import scala.reflect.internal.util._
 import scala.reflect.internal.Chars._
 import JavaTokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
 import scala.language.implicitConversions
 
 // Todo merge these better with Scanners
@@ -57,23 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon {
   /** ...
    */
   abstract class AbstractJavaScanner extends AbstractJavaTokenData {
-    implicit def p2g(pos: Position): ScanPosition
     implicit def g2p(pos: ScanPosition): Position
 
-    /** the last error position
-     */
-    var errpos: ScanPosition
-    var lastPos: ScanPosition
-    def skipToken: ScanPosition
     def nextToken(): Unit
     def next: AbstractJavaTokenData
     def intVal(negated: Boolean): Long
     def floatVal(negated: Boolean): Double
-    def intVal: Long = intVal(false)
-    def floatVal: Double = floatVal(false)
-    //def token2string(token : Int) : String = configuration.token2string(token)
-    /** return recent scala doc, if any */
-    def flushDoc: DocComment
+    def intVal: Long = intVal(negated = false)
+    def floatVal: Double = floatVal(negated = false)
     def currentPos: Position
   }
 
@@ -164,7 +155,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
       case AMP        => "`&'"
       case AMPAMP     => "`&&'"
       case AMPEQ      => "`&='"
-      case ASSIGN     => "`='"
       case ASTERISK   => "`*'"
       case ASTERISKEQ => "`*='"
       case AT         => "`@'"
@@ -178,6 +168,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
       case DOT        => "`.'"
       case DOTDOTDOT  => "`...'"
       case EQEQ       => "`=='"
+      case EQUALS     => "`='"
       case GT         => "`>'"
       case GTEQ       => "`>='"
       case GTGT       => "`>>'"
@@ -227,17 +218,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
   abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
     override def intVal = super.intVal// todo: needed?
     override def floatVal = super.floatVal
-    override var errpos: Int = NoPos
     def currentPos: Position = g2p(pos - 1)
-
     var in: JavaCharArrayReader = _
 
-    def dup: JavaScanner = {
-      val dup = clone().asInstanceOf[JavaScanner]
-      dup.in = in.dup
-      dup
-    }
-
     /** character buffer for literals
      */
     val cbuf = new StringBuilder()
@@ -252,22 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
       cbuf.setLength(0)
     }
 
-    /** buffer for the documentation comment
-     */
-    var docBuffer: StringBuilder = null
-
-    def flushDoc: DocComment = {
-      val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null
-      docBuffer = null
-      ret
-    }
-
-    /** add the given character to the documentation buffer
-     */
-    protected def putDocChar(c: Char) {
-      if (docBuffer ne null) docBuffer.append(c)
-    }
-
     private class JavaTokenData0 extends JavaTokenData
 
     /** we need one token lookahead
@@ -277,13 +244,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
 
 // Get next token ------------------------------------------------------------
 
-    /** read next token and return last position
-     */
-    def skipToken: Int = {
-      val p = pos; nextToken
-      p - 1
-    }
-
     def nextToken() {
       if (next.token == EMPTY) {
         fetchToken()
@@ -296,7 +256,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
 
     def lookaheadToken: Int = {
       prev copyFrom this
-      nextToken
+      nextToken()
       val t = token
       next copyFrom this
       this copyFrom prev
@@ -308,11 +268,10 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     private def fetchToken() {
       if (token == EOF) return
       lastPos = in.cpos - 1
-      //var index = bp
       while (true) {
         in.ch match {
           case ' ' | '\t' | CR | LF | FF =>
-            in.next
+            in.next()
           case _ =>
             pos = in.cpos
             (in.ch: @switch) match {
@@ -329,47 +288,47 @@ trait JavaScanners extends ast.parser.ScannersCommon {
                    'u' | 'v' | 'w' | 'x' | 'y' |
                    'z' =>
                 putChar(in.ch)
-                in.next
-                getIdentRest
+                in.next()
+                getIdentRest()
                 return
 
               case '0' =>
                 putChar(in.ch)
-                in.next
+                in.next()
                 if (in.ch == 'x' || in.ch == 'X') {
-                  in.next
+                  in.next()
                   base = 16
                 } else {
                   base = 8
                 }
-                getNumber
+                getNumber()
                 return
 
               case '1' | '2' | '3' | '4' |
                    '5' | '6' | '7' | '8' | '9' =>
                 base = 10
-                getNumber
+                getNumber()
                 return
 
               case '\"' =>
-                in.next
+                in.next()
                 while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) {
                   getlitch()
                 }
                 if (in.ch == '\"') {
                   token = STRINGLIT
                   setName()
-                  in.next
+                  in.next()
                 } else {
                   syntaxError("unclosed string literal")
                 }
                 return
 
               case '\'' =>
-                in.next
+                in.next()
                 getlitch()
                 if (in.ch == '\'') {
-                  in.next
+                  in.next()
                   token = CHARLIT
                   setName()
                 } else {
@@ -378,32 +337,32 @@ trait JavaScanners extends ast.parser.ScannersCommon {
                 return
 
               case '=' =>
-                token = ASSIGN
-                in.next
+                token = EQUALS
+                in.next()
                 if (in.ch == '=') {
                   token = EQEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '>' =>
                 token = GT
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = GTEQ
-                  in.next
+                  in.next()
                 } else if (in.ch == '>') {
                   token = GTGT
-                  in.next
+                  in.next()
                   if (in.ch == '=') {
                     token = GTGTEQ
-                    in.next
+                    in.next()
                   } else if (in.ch == '>') {
                     token = GTGTGT
-                    in.next
+                    in.next()
                     if (in.ch == '=') {
                       token = GTGTGTEQ
-                      in.next
+                      in.next()
                     }
                   }
                 }
@@ -411,145 +370,145 @@ trait JavaScanners extends ast.parser.ScannersCommon {
 
               case '<' =>
                 token = LT
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = LTEQ
-                  in.next
+                  in.next()
                 } else if (in.ch == '<') {
                   token = LTLT
-                  in.next
+                  in.next()
                   if (in.ch == '=') {
                     token = LTLTEQ
-                    in.next
+                    in.next()
                   }
                 }
                 return
 
               case '!' =>
                 token = BANG
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = BANGEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '~' =>
                 token = TILDE
-                in.next
+                in.next()
                 return
 
               case '?' =>
                 token = QMARK
-                in.next
+                in.next()
                 return
 
               case ':' =>
                 token = COLON
-                in.next
+                in.next()
                 return
 
               case '@' =>
                 token = AT
-                in.next
+                in.next()
                 return
 
               case '&' =>
                 token = AMP
-                in.next
+                in.next()
                 if (in.ch == '&') {
                   token = AMPAMP
-                  in.next
+                  in.next()
                 } else if (in.ch == '=') {
                   token = AMPEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '|' =>
                 token = BAR
-                in.next
+                in.next()
                 if (in.ch == '|') {
                   token = BARBAR
-                  in.next
+                  in.next()
                 } else if (in.ch == '=') {
                   token = BAREQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '+' =>
                 token = PLUS
-                in.next
+                in.next()
                 if (in.ch == '+') {
                   token = PLUSPLUS
-                  in.next
+                  in.next()
                 } else if (in.ch == '=') {
                   token = PLUSEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '-' =>
                 token = MINUS
-                in.next
+                in.next()
                 if (in.ch == '-') {
                   token = MINUSMINUS
-                  in.next
+                  in.next()
                 } else if (in.ch == '=') {
                   token = MINUSEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '*' =>
                 token = ASTERISK
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = ASTERISKEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '/' =>
-                in.next
+                in.next()
                 if (!skipComment()) {
                   token = SLASH
-                  in.next
+                  in.next()
                   if (in.ch == '=') {
                     token = SLASHEQ
-                    in.next
+                    in.next()
                   }
                   return
                 }
 
               case '^' =>
                 token = HAT
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = HATEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '%' =>
                 token = PERCENT
-                in.next
+                in.next()
                 if (in.ch == '=') {
                   token = PERCENTEQ
-                  in.next
+                  in.next()
                 }
                 return
 
               case '.' =>
                 token = DOT
-                in.next
+                in.next()
                 if ('0' <= in.ch && in.ch <= '9') {
-                  putChar('.'); getFraction
+                  putChar('.'); getFraction()
                 } else if (in.ch == '.') {
-                  in.next
+                  in.next()
                   if (in.ch == '.') {
-                    in.next
+                    in.next()
                     token = DOTDOTDOT
                   } else syntaxError("`.' character expected")
                 }
@@ -557,60 +516,60 @@ trait JavaScanners extends ast.parser.ScannersCommon {
 
               case ';' =>
                 token = SEMI
-                in.next
+                in.next()
                 return
 
               case ',' =>
                 token = COMMA
-                in.next
+                in.next()
                 return
 
               case '(' =>
                 token = LPAREN
-                in.next
+                in.next()
                 return
 
               case '{' =>
                 token = LBRACE
-                in.next
+                in.next()
                 return
 
               case ')' =>
                 token = RPAREN
-                in.next
+                in.next()
                 return
 
               case '}' =>
                 token = RBRACE
-                in.next
+                in.next()
                 return
 
               case '[' =>
                 token = LBRACKET
-                in.next
+                in.next()
                 return
 
               case ']' =>
                 token = RBRACKET
-                in.next
+                in.next()
                 return
 
               case SU =>
                 if (!in.hasNext) token = EOF
                 else {
                   syntaxError("illegal character")
-                  in.next
+                  in.next()
                 }
                 return
 
               case _ =>
                 if (Character.isUnicodeIdentifierStart(in.ch)) {
                   putChar(in.ch)
-                  in.next
-                  getIdentRest
+                  in.next()
+                  getIdentRest()
                 } else {
                   syntaxError("illegal character: "+in.ch.toInt)
-                  in.next
+                  in.next()
                 }
                 return
             }
@@ -618,33 +577,20 @@ trait JavaScanners extends ast.parser.ScannersCommon {
       }
     }
 
-    private def skipComment(): Boolean = {
-      if (in.ch == '/') {
-        do {
-          in.next
-        } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
-        true
-      } else if (in.ch == '*') {
-        docBuffer = null
-        in.next
-        val scalaDoc = ("/**", "*/")
-        if (in.ch == '*' && forScaladoc)
-          docBuffer = new StringBuilder(scalaDoc._1)
-        do {
-          do {
-            if (in.ch != '*' && in.ch != SU) {
-              in.next; putDocChar(in.ch)
-            }
-          } while (in.ch != '*' && in.ch != SU)
-          while (in.ch == '*') {
-            in.next; putDocChar(in.ch)
-          }
-        } while (in.ch != '/' && in.ch != SU)
-        if (in.ch == '/') in.next
-        else incompleteInputError("unclosed comment")
-        true
-      } else {
-        false
+    protected def skipComment(): Boolean = {
+      @tailrec def skipLineComment(): Unit = in.ch match {
+        case CR | LF | SU =>
+        case _            => in.next; skipLineComment()
+      }
+      @tailrec def skipJavaComment(): Unit = in.ch match {
+        case SU  => incompleteInputError("unclosed comment")
+        case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
+        case _   => in.next; skipJavaComment()
+      }
+      in.ch match {
+        case '/' => in.next ; skipLineComment() ; true
+        case '*' => in.next ; skipJavaComment() ; true
+        case _   => false
       }
     }
 
@@ -668,12 +614,12 @@ trait JavaScanners extends ast.parser.ScannersCommon {
                '0' | '1' | '2' | '3' | '4' |
                '5' | '6' | '7' | '8' | '9' =>
             putChar(in.ch)
-            in.next
+            in.next()
 
           case '_' =>
             putChar(in.ch)
-            in.next
-            getIdentRest
+            in.next()
+            getIdentRest()
             return
           case SU =>
             setName()
@@ -682,7 +628,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
           case _ =>
             if (Character.isUnicodeIdentifierPart(in.ch)) {
               putChar(in.ch)
-              in.next
+              in.next()
             } else {
               setName()
               token = JavaScannerConfiguration.name2token(name)
@@ -698,17 +644,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     */
     protected def getlitch() =
       if (in.ch == '\\') {
-        in.next
+        in.next()
         if ('0' <= in.ch && in.ch <= '7') {
           val leadch: Char = in.ch
           var oct: Int = digit2int(in.ch, 8)
-          in.next
+          in.next()
           if ('0' <= in.ch && in.ch <= '7') {
             oct = oct * 8 + digit2int(in.ch, 8)
-            in.next
+            in.next()
             if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
               oct = oct * 8 + digit2int(in.ch, 8)
-              in.next
+              in.next()
             }
           }
           putChar(oct.asInstanceOf[Char])
@@ -726,11 +672,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
               syntaxError(in.cpos - 1, "invalid escape character")
               putChar(in.ch)
           }
-          in.next
+          in.next()
         }
       } else  {
         putChar(in.ch)
-        in.next
+        in.next()
       }
 
     /** read fractional part and exponent of floating point number
@@ -740,35 +686,35 @@ trait JavaScanners extends ast.parser.ScannersCommon {
       token = DOUBLELIT
       while ('0' <= in.ch && in.ch <= '9') {
         putChar(in.ch)
-        in.next
+        in.next()
       }
       if (in.ch == 'e' || in.ch == 'E') {
         val lookahead = in.copy
-        lookahead.next
+        lookahead.next()
         if (lookahead.ch == '+' || lookahead.ch == '-') {
-          lookahead.next
+          lookahead.next()
         }
         if ('0' <= lookahead.ch && lookahead.ch <= '9') {
           putChar(in.ch)
-          in.next
+          in.next()
           if (in.ch == '+' || in.ch == '-') {
             putChar(in.ch)
-            in.next
+            in.next()
           }
           while ('0' <= in.ch && in.ch <= '9') {
             putChar(in.ch)
-            in.next
+            in.next()
           }
         }
         token = DOUBLELIT
       }
       if (in.ch == 'd' || in.ch == 'D') {
         putChar(in.ch)
-        in.next
+        in.next()
         token = DOUBLELIT
       } else if (in.ch == 'f' || in.ch == 'F') {
         putChar(in.ch)
-        in.next
+        in.next()
         token = FLOATLIT
       }
       setName()
@@ -778,7 +724,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
      */
     def intVal(negated: Boolean): Long = {
       if (token == CHARLIT && !negated) {
-        if (name.length > 0) name.charAt(0) else 0
+        if (name.length > 0) name.charAt(0).toLong else 0
       } else {
         var value: Long = 0
         val divider = if (base == 10) 1 else 2
@@ -828,23 +774,23 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     protected def getNumber() {
       while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
         putChar(in.ch)
-        in.next
+        in.next()
       }
       token = INTLIT
       if (base <= 10 && in.ch == '.') {
         val lookahead = in.copy
-        lookahead.next
+        lookahead.next()
         lookahead.ch match {
           case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
                '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
             putChar(in.ch)
-            in.next
-            return getFraction
+            in.next()
+            return getFraction()
           case _ =>
             if (!isIdentifierStart(lookahead.ch)) {
               putChar(in.ch)
-              in.next
-              return getFraction
+              in.next()
+              return getFraction()
             }
         }
       }
@@ -852,11 +798,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
           (in.ch == 'e' || in.ch == 'E' ||
            in.ch == 'f' || in.ch == 'F' ||
            in.ch == 'd' || in.ch == 'D')) {
-        return getFraction
+        return getFraction()
       }
       setName()
       if (in.ch == 'l' || in.ch == 'L') {
-        in.next
+        in.next()
         token = LONGLIT
       }
     }
@@ -868,7 +814,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     def syntaxError(pos: Int, msg: String) {
       error(pos, msg)
       token = ERROR
-      errpos = pos
     }
 
     /** generate an error at the current token position
@@ -879,7 +824,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     def incompleteInputError(msg: String) {
       incompleteInputError(pos, msg)
       token = EOF
-      errpos = pos
     }
 
     override def toString() = token match {
@@ -908,21 +852,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
     /** INIT: read lookahead character and token.
      */
     def init() {
-      in.next
-      nextToken
+      in.next()
+      nextToken()
     }
   }
 
-  /** ...
-   */
   class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
     in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
-    init
-    def warning(pos: Int, msg: String) = unit.warning(pos, msg)
+    init()
     def error  (pos: Int, msg: String) = unit.  error(pos, msg)
     def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
     def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
-    implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1
-    implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
+    implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos)
   }
 }
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index a562de2..9b31e6e 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -6,123 +6,89 @@
 package scala.tools.nsc
 package javac
 
-object JavaTokens extends ast.parser.Tokens {
+object JavaTokens extends ast.parser.CommonTokens {
 
-  def isLiteral(code : Int) =
+  def isLiteral(code: Int) =
     code >= CHARLIT && code <= STRINGLIT
 
   /** identifiers */
   final val IDENTIFIER = 10
-  def isIdentifier(code : Int) =
+  def isIdentifier(code: Int) =
     code == IDENTIFIER
 
   /** keywords */
-  final val ABSTRACT = 20
-  final val ASSERT = 21
-  final val BOOLEAN = 22
-  final val BREAK = 23
-  final val BYTE = 24
-  final val CASE = 25
-  final val CATCH = 26
-  final val CHAR = 27
-  final val CLASS = 28
-  final val CONST = 29
-  final val CONTINUE = 30
-  final val DEFAULT = 31
-  final val DO = 32
-  final val DOUBLE = 33
-  final val ELSE = 34
-  final val ENUM = 35
-  final val EXTENDS = 36
-  final val FINAL = 37
-  final val FINALLY = 38
-  final val FLOAT = 39
-  final val FOR = 40
-  final val IF = 41
-  final val GOTO = 42
-  final val IMPLEMENTS = 43
-  final val IMPORT = 44
-  final val INSTANCEOF = 45
-  final val INT = 46
-  final val INTERFACE = 47
-  final val LONG = 48
-  final val NATIVE = 49
-  final val NEW = 50
-  final val PACKAGE = 51
-  final val PRIVATE = 52
-  final val PROTECTED = 53
-  final val PUBLIC = 54
-  final val RETURN = 55
-  final val SHORT = 56
-  final val STATIC = 57
-  final val STRICTFP = 58
-  final val SUPER = 59
-  final val SWITCH = 60
-  final val SYNCHRONIZED = 61
-  final val THIS = 62
-  final val THROW = 63
-  final val THROWS = 64
-  final val TRANSIENT = 65
-  final val TRY = 66
-  final val VOID = 67
-  final val VOLATILE = 68
-  final val WHILE = 69
+  final val INSTANCEOF = 27
+  final val CONST = 28
 
-  def isKeyword(code : Int) =
-    code >= ABSTRACT && code <= WHILE
+  /** modifiers */
+  final val PUBLIC = 42
+  final val DEFAULT = 47
+  final val STATIC = 48
+  final val TRANSIENT = 50
+  final val VOLATILE = 51
+  final val SYNCHRONIZED = 52
+  final val NATIVE = 53
+  final val STRICTFP = 54
+  final val THROWS = 56
 
-  /** special symbols */
-  final val COMMA = 70
-  final val SEMI = 71
-  final val DOT = 72
-  final val AT = 73
-  final val COLON = 74
-  final val ASSIGN = 75
-  final val EQEQ = 76
-  final val BANGEQ = 77
-  final val LT = 78
-  final val GT = 79
-  final val LTEQ = 80
-  final val GTEQ = 81
-  final val BANG = 82
-  final val QMARK = 83
-  final val AMP = 84
-  final val BAR = 85
-  final val PLUS = 86
-  final val MINUS = 87
-  final val ASTERISK = 88
-  final val SLASH = 89
-  final val PERCENT = 90
-  final val HAT = 91
-  final val LTLT = 92
-  final val GTGT = 93
-  final val GTGTGT = 94
-  final val AMPAMP = 95
-  final val BARBAR = 96
-  final val PLUSPLUS = 97
-  final val MINUSMINUS = 98
-  final val TILDE = 99
-  final val DOTDOTDOT = 100
-  final val AMPEQ = 104
-  final val BAREQ = 105
-  final val PLUSEQ = 106
-  final val MINUSEQ = 107
-  final val ASTERISKEQ = 1010
-  final val SLASHEQ = 109
-  final val PERCENTEQ = 110
-  final val HATEQ = 111
-  final val LTLTEQ = 112
-  final val GTGTEQ = 113
-  final val GTGTGTEQ = 114
+  /** templates */
+  final val INTERFACE = 66
+  final val ENUM = 67
+  final val IMPLEMENTS = 69
+
+  /** control structures */
+  final val BREAK = 87
+  final val CONTINUE = 88
+  final val GOTO = 89
+  final val SWITCH = 94
+  final val ASSERT = 98
 
-  def isSymbol(code : Int) =
-    code >= COMMA && code <= GTGTGTEQ
+  /** special symbols */
+  final val EQEQ = 140
+  final val BANGEQ = 141
+  final val LT = 142
+  final val GT = 143
+  final val LTEQ = 144
+  final val GTEQ = 145
+  final val BANG = 146
+  final val QMARK = 147
+  final val AMP = 148
+  final val BAR = 149
+  final val PLUS = 150
+  final val MINUS = 151
+  final val ASTERISK = 152
+  final val SLASH = 153
+  final val PERCENT = 154
+  final val HAT = 155
+  final val LTLT = 156
+  final val GTGT = 157
+  final val GTGTGT = 158
+  final val AMPAMP = 159
+  final val BARBAR = 160
+  final val PLUSPLUS = 161
+  final val MINUSMINUS = 162
+  final val TILDE = 163
+  final val DOTDOTDOT = 164
+  final val AMPEQ = 165
+  final val BAREQ = 166
+  final val PLUSEQ = 167
+  final val MINUSEQ = 168
+  final val ASTERISKEQ = 169
+  final val SLASHEQ = 170
+  final val PERCENTEQ = 171
+  final val HATEQ = 172
+  final val LTLTEQ = 173
+  final val GTGTEQ = 174
+  final val GTGTGTEQ = 175
 
-  /** parenthesis */
-  final val LPAREN = 115
-  final val RPAREN = 116
-  final val LBRACKET = 117
-  final val RBRACKET = 118
-  final val LBRACE = 119
-  final val RBRACE = 120
+  /** primitive types */
+  final val VOID = 180
+  final val BOOLEAN = 181
+  final val BYTE = 182
+  final val SHORT = 183
+  final val CHAR = 184
+  final val INT = 185
+  final val LONG = 186
+  final val FLOAT = 187
+  final val DOUBLE = 188
 }
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
deleted file mode 100644
index 5ca9fd5..0000000
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import ast.{ Printers, Trees }
-import java.io.{ StringWriter, PrintWriter }
-import scala.annotation.elidable
-import scala.language.postfixOps
-
-/** Ancillary bits of ParallelMatching which are better off
- *  out of the way.
- */
-trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
-
-  import global.{ typer => _, _ }
-  import CODE._
-
-  /** Debugging support: enable with -Ypmat-debug **/
-  private final def trace = settings.Ypmatdebug.value
-
-  def impossible:           Nothing = abort("this never happens")
-
-  def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] =
-    tree filter (pf isDefinedAt _) map (x => pf(x))
-
-  object Types {
-    import definitions._
-
-    val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
-
-    implicit class RichType(undecodedTpe: Type) {
-      def tpe = decodedEqualsType(undecodedTpe)
-      def isAnyRef = tpe <:< AnyRefClass.tpe
-
-      // These tests for final classes can inspect the typeSymbol
-      private def is(s: Symbol) = tpe.typeSymbol eq s
-      def      isByte = is(ByteClass)
-      def     isShort = is(ShortClass)
-      def       isInt = is(IntClass)
-      def      isChar = is(CharClass)
-      def   isBoolean = is(BooleanClass)
-      def   isNothing = is(NothingClass)
-      def     isArray = is(ArrayClass)
-    }
-  }
-
-  object Debug {
-    def typeToString(t: Type): String = t match {
-      case NoType => "x"
-      case x      => x.toString
-    }
-    def symbolToString(s: Symbol): String = s match {
-      case x  => x.toString
-    }
-    def treeToString(t: Tree): String = treeInfo.unbind(t) match {
-      case EmptyTree            => "?"
-      case WILD()               => "_"
-      case Literal(Constant(x)) => "LIT(%s)".format(x)
-      case Apply(fn, args)      => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",")
-      case Typed(expr, tpt)     => "%s: %s".format(treeToString(expr), treeToString(tpt))
-      case x                    =>  x.toString + " (" + x.getClass + ")"
-    }
-
-    // Formatting for some error messages
-    private val NPAD = 15
-    def pad(s: String): String = "%%%ds" format (NPAD-1) format s
-    def pad(s: Any): String = pad(s match {
-      case x: Tree    => treeToString(x)
-      case x          => x.toString
-    })
-
-    // pretty print for debugging
-    def pp(x: Any): String = pp(x, false)
-    def pp(x: Any, newlines: Boolean): String = {
-      val stripStrings = List("""java\.lang\.""", """\$iw\.""")
-
-      def clean(s: String): String =
-        stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, ""))
-
-      def pplist(xs: List[Any]): String =
-        if (newlines) (xs map ("    " + _ + "\n")).mkString("\n", "", "")
-        else xs.mkString("(", ", ", ")")
-
-      pp(x match {
-        case s: String      => return clean(s)
-        case x: Tree        => asCompactString(x)
-        case xs: List[_]    => pplist(xs map pp)
-        case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
-        case x              => x.toString
-      })
-    }
-
-    @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
-      if (trace) {
-        val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
-        println(msg)
-      }
-    }
-    @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = {
-      if (trace)
-        TRACE("[" + """%10s""".format(cat) + "]  " + f, xs: _*)
-    }
-    def tracing[T](s: String)(x: T): T = {
-      if (trace)
-        println(("[" + """%10s""".format(s) + "]  %s") format pp(x))
-
-      x
-    }
-    private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
-      println(fmt.format(xs: _*) + " == " + x)
-      x
-    }
-    private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
-      if (settings.debug.value) printing(fmt, xs: _*)(x)
-      else x
-    }
-
-    def indent(s: Any) = s.toString() split "\n" map ("  " + _) mkString "\n"
-    def indentAll(s: Seq[Any]) = s map ("  " + _.toString() + "\n") mkString
-  }
-
-  /** Drops the 'i'th element of a list.
-   */
-  def dropIndex[T](xs: List[T], n: Int) = {
-    val (l1, l2) = xs splitAt n
-    l1 ::: (l2 drop 1)
-  }
-
-  /** Extract the nth element of a list and return it and the remainder.
-   */
-  def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
-    (xs(n), dropIndex(xs, n))
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
deleted file mode 100644
index daefe4c..0000000
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ /dev/null
@@ -1,259 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import symtab.Flags
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-trait Matrix extends MatrixAdditions {
-  self: ExplicitOuter with ParallelMatching =>
-
-  import global.{ typer => _, _ }
-  import analyzer.Typer
-  import CODE._
-  import Debug._
-  import Flags.{ SYNTHETIC, MUTABLE }
-
-  private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG
-
-  /** Translation of match expressions.
-   *
-   *  `p`:  pattern
-   *  `g`:  guard
-   *  `bx`: body index
-   *
-   *   internal representation is (tvars:List[Symbol], rows:List[Row])
-   *
-   *         tmp1      tmp_n
-   *    Row( p_11  ...  p_1n   g_1  b_1 ) + subst
-   *
-   *    Row( p_m1  ...  p_mn   g_m  b_m ) + subst
-   *
-   * Implementation based on the algorithm described in
-   *
-   *   "A Term Pattern-Match Compiler Inspired by Finite Automata Theory"
-   *   Mikael Pettersson
-   *   ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz
-   *
-   *  @author Burak Emir
-   */
-
-  /** "The Mixture Rule"
-
-        {v=pat1, pats1 .. } {q1}
-  match {..               } {..}
-        {v=patn, patsn .. } {qn}
-
-  The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
-  constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
-  The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
-  tor and possibly a default arc). Foreach constructor in the selected column, its arc is defined as
-  follows:
-
-  Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
-  terns are viewed as regular expressions, this will be the indices of the patterns that either
-  have the same constructor c, or are wildcards.
-
-  Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed
-  above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For
-  eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced
-  instead, each tagged with the right path variable. This results in a pattern matrix with n
-  columns and j rows. This matrix is then appended to the result of selecting, from each col-
-  umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally
-  the indices are used to select the corresponding final states that go with these rows. Note
-  that the order of the indices is significant; selected rows do not change their relative orders.
-  The arc for the constructor c is now defined as (c’,state), where c’ is cwith any
-  immediate sub-patterns replaced by their path variables (thus c’ is a simple pattern), and
-  state is the result of recursively applying match to the new matrix and the new sequence
-  of final states.
-
-  Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive,
-  then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are
-  any wildcard patterns in the selected column, then their rows are selected from the rest of the
-  matrix and the final states, and the state is the result of applying match to the new matrix and
-  states. Otherwise,the error state is used after its reference count has been incremented.
-  **/
-
-  /** Handles all translation of pattern matching.
-   */
-  def handlePattern(
-    selector: Tree,         // tree being matched upon (called scrutinee after this)
-    cases: List[CaseDef],   // list of cases in the match
-    isChecked: Boolean,     // whether exhaustiveness checking is enabled (disabled with @unchecked)
-    context: MatrixContext): Tree =
-  {
-    import context._
-    TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size)
-
-    val matrixInit: MatrixInit = {
-      val v = copyVar(selector, isChecked, selector.tpe, "temp")
-      MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident)))
-    }
-    val matrix = new MatchMatrix(context) { lazy val data = matrixInit }
-    val mch     = typer typed matrix.expansion.toTree
-    val dfatree = typer typed Block(matrix.data.valDefs, mch)
-
-    // redundancy check
-    matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code"))
-    // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE
-    tracing("handlePattern")(matrix optimize dfatree)
-  }
-
-  case class MatrixContext(
-    cunit: CompilationUnit,       // current unit
-    handleOuter: Tree => Tree,    // for outer pointer
-    typer: Typer,                 // a local typer
-    owner: Symbol,                // the current owner
-    matchResultType: Type)        // the expected result type of the whole match
-      extends Squeezer
-  {
-    private def ifNull[T](x: T, alt: T) = if (x == null) alt else x
-
-    // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking
-    private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE)
-
-    // Recording the symbols of the synthetics we create so we don't go clearing
-    // anyone else's mutable flags.
-    private val _syntheticSyms = mutable.HashSet[Symbol]()
-    def clearSyntheticSyms() = {
-      _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
-      debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
-      _syntheticSyms.clear()
-    }
-    def recordSyntheticSym(sym: Symbol): Symbol = {
-      _syntheticSyms += sym
-      if (_syntheticSyms.size > 25000) {
-        cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.")
-        abort("This is a bug in the pattern matcher.")
-      }
-      sym
-    }
-
-    case class MatrixInit(
-      roots: List[PatternVar],
-      cases: List[CaseDef],
-      default: Tree
-    ) {
-      def tvars = roots map (_.lhs)
-      def valDefs = roots map (_.valDef)
-      override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size)
-    }
-
-    implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup =
-      PatternVarGroup(xs)
-
-    object PatternVarGroup {
-      def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList)
-      def apply(xs: List[PatternVar]) = new PatternVarGroup(xs)
-
-      // XXX - transitional
-      def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = {
-        def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v)
-        val info =
-          if (freeVars.isEmpty) vlist
-          else (freeVars map vmap).flatten
-
-        val xs =
-          for (Binding(lhs, rhs) <- info) yield
-            new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE))
-
-        new PatternVarGroup(xs)
-      }
-    }
-
-    val emptyPatternVarGroup = PatternVarGroup()
-    class PatternVarGroup(val pvs: List[PatternVar]) {
-      def syms    = pvs map (_.sym)
-      def valDefs = pvs map (_.valDef)
-      def idents  = pvs map (_.ident)
-
-      def extractIndex(index: Int): (PatternVar, PatternVarGroup) = {
-        val (t, ts) = self.extractIndex(pvs, index)
-        (t, PatternVarGroup(ts))
-      }
-
-      def isEmpty = pvs.isEmpty
-      def size = pvs.size
-      def head = pvs.head
-      def ::(t: PatternVar) = PatternVarGroup(t :: pvs)
-      def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs)
-      def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs)
-
-      def apply(i: Int) = pvs(i)
-      def zipWithIndex = pvs.zipWithIndex
-      def indices = pvs.indices
-      def map[T](f: PatternVar => T) = pvs map f
-      def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p)
-
-      override def toString() = pp(pvs)
-    }
-
-    /** Every temporary variable allocated is put in a PatternVar.
-     */
-    class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
-      def sym = lhs
-      def tpe = lhs.tpe
-      if (checked)
-        lhs resetFlag NO_EXHAUSTIVE
-      else
-        lhs setFlag NO_EXHAUSTIVE
-
-      // See #1427 for an example of a crash which occurs unless we retype:
-      // in that instance there is an existential in the pattern.
-      lazy val ident  = typer typed Ident(lhs)
-      lazy val valDef = typer typedValDef ValDef(lhs, rhs)
-
-      override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
-    }
-
-    /** Given a tree, creates a new synthetic variable of the same type
-     *  and assigns the tree to it.
-     */
-    def copyVar(
-      root: Tree,
-      checked: Boolean,
-      _tpe: Type = null,
-      label: String = "temp"): PatternVar =
-    {
-      val tpe   = ifNull(_tpe, root.tpe)
-      val name  = cunit.freshTermName(label)
-      val sym   = newVar(root.pos, tpe, flags(checked), name)
-
-      tracing("copy")(new PatternVar(sym, root, checked))
-    }
-
-    /** Creates a new synthetic variable of the specified type and
-     *  assigns the result of f(symbol) to it.
-     */
-    def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
-      val lhs = newVar(owner.pos, tpe, flags(checked))
-      val rhs = f(lhs)
-
-      tracing("create")(new PatternVar(lhs, rhs, checked))
-    }
-    def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
-      val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked))
-      val rhs = f(lhs)
-
-      tracing("createLazy")(new PatternVar(lhs, rhs, checked))
-    }
-
-    private def newVar(
-      pos: Position,
-      tpe: Type,
-      flags: List[Long] = Nil,
-      name: TermName = null): Symbol =
-    {
-      val n = if (name == null) cunit.freshTermName("temp") else name
-      // careful: pos has special meaning
-      val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
-      recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
deleted file mode 100644
index 7220253..0000000
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ /dev/null
@@ -1,193 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-
-/** Traits which are mixed into MatchMatrix, but separated out as
- *  (somewhat) independent components to keep them on the sidelines.
- */
-trait MatrixAdditions extends ast.TreeDSL {
-  self: ExplicitOuter with ParallelMatching =>
-
-  import global.{ typer => _, _ }
-  import symtab.Flags
-  import CODE._
-  import Debug._
-  import treeInfo._
-  import definitions.{ isPrimitiveValueClass }
-
-  /** The Squeezer, responsible for all the squeezing.
-   */
-  private[matching] trait Squeezer {
-    self: MatrixContext =>
-
-    private val settings_squeeze = !settings.Ynosqueeze.value
-
-    class RefTraverser(vd: ValDef) extends Traverser {
-      private val targetSymbol = vd.symbol
-      private var safeRefs     = 0
-      private var isSafe       = true
-
-      def canDrop   = isSafe && safeRefs == 0
-      def canInline = isSafe && safeRefs == 1
-
-      override def traverse(tree: Tree): Unit = tree match {
-        case t: Ident if t.symbol eq targetSymbol =>
-          // target symbol's owner should match currentOwner
-          if (targetSymbol.owner == currentOwner) safeRefs += 1
-          else isSafe = false
-
-        case LabelDef(_, params, rhs) =>
-          if (params exists (_.symbol eq targetSymbol))  // cannot substitute this one
-            isSafe = false
-
-          traverse(rhs)
-        case _ if safeRefs > 1 => ()
-        case _ =>
-          super.traverse(tree)
-      }
-    }
-
-    /** Compresses multiple Blocks. */
-    private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
-      case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1)
-      case _                                     => Block(stats, expr)
-    }
-    def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
-      if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp))
-      else                  combineBlocks(vds, exp)
-
-    private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
-      lazy val squeezedTail = squeezedBlock(vds.tail, exp)
-      def default = squeezedTail match {
-        case Block(vds2, exp2) => Block(vds.head :: vds2, exp2)
-        case exp2              => Block(vds.head :: Nil,  exp2)
-      }
-
-      if (vds.isEmpty) exp
-      else vds.head match {
-        case vd: ValDef =>
-          val rt = new RefTraverser(vd)
-          rt.atOwner(owner)(rt traverse squeezedTail)
-
-          if (rt.canDrop)
-            squeezedTail
-          else if (isConstantType(vd.symbol.tpe) || rt.canInline)
-            new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
-          else
-            default
-        case _ => default
-      }
-    }
-  }
-
-  /** The Optimizer, responsible for some of the optimizing.
-   */
-  private[matching] trait MatchMatrixOptimizer {
-    self: MatchMatrix =>
-
-    import self.context._
-
-    final def optimize(tree: Tree): Tree = {
-      // Uses treeInfo extractors rather than looking at trees directly
-      // because the many Blocks obscure our vision.
-      object lxtt extends Transformer {
-        override def transform(tree: Tree): Tree = tree match {
-          case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
-            squeezedBlock(transformStats(stats, currentOwner), body)
-          case IsIf(cond, IsTrue(), IsFalse()) =>
-            transform(cond)
-          case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
-            transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
-          case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
-            transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
-          case _ =>
-            super.transform(tree)
-        }
-      }
-      try lxtt transform tree
-      finally clearSyntheticSyms()
-    }
-  }
-
-  /** The Exhauster.
-   */
-  private[matching] trait MatrixExhaustiveness {
-    self: MatchMatrix =>
-
-    import self.context._
-
-    /** Exhaustiveness checking requires looking for sealed classes
-     *  and if found, making sure all children are covered by a pattern.
-     */
-    class ExhaustivenessChecker(rep: Rep, matchPos: Position) {
-      val Rep(tvars, rows) = rep
-
-      import Flags.{ MUTABLE, ABSTRACT, SEALED }
-
-      private case class Combo(index: Int, sym: Symbol) { }
-
-      /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
-      private def rowCoversCombo(row: Row, combos: List[Combo]) =
-        row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
-
-      private def requiresExhaustive(sym: Symbol) = {
-         (sym.isMutable) &&                 // indicates that have not yet checked exhaustivity
-        !(sym hasFlag NO_EXHAUSTIVE) &&     // indicates @unchecked
-         (sym.tpe.typeSymbol.isSealed) &&
-        !isPrimitiveValueClass(sym.tpe.typeSymbol)   // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
-      }
-
-      private lazy val inexhaustives: List[List[Combo]] = {
-        // let's please not get too clever side-effecting the mutable flag.
-        val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) }
-        val collected = toCollect map { case (pv, i) =>
-          // okay, now reset the flag
-          pv.sym resetFlag MUTABLE
-
-          i -> (
-            pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
-            // symbols which are both sealed and abstract need not be covered themselves, because
-            // all of their children must be and they cannot otherwise be created.
-            filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
-            // have to filter out children which cannot match: see ticket #3683 for an example
-            filter (_.tpe matchesPattern pv.tpe)
-          )
-        }
-
-        val folded =
-          collected.foldRight(List[List[Combo]]())((c, xs) => {
-            val (i, syms) = c match { case (i, set) => (i, set.toList) }
-            xs match {
-              case Nil  => syms map (s => List(Combo(i, s)))
-              case _    => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest
-            }
-          })
-
-        folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo)))
-      }
-
-      private def mkPad(xs: List[Combo], i: Int): String = xs match {
-        case Nil                    => pad("*")
-        case Combo(j, sym) :: rest  => if (j == i) pad(sym.name.toString) else mkPad(rest, i)
-      }
-      private def mkMissingStr(open: List[Combo]) =
-        "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString
-
-      /** The only public method. */
-      def check = {
-        def errMsg = (inexhaustives map mkMissingStr).mkString
-        if (inexhaustives.nonEmpty)
-          cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg)
-
-        rep
-      }
-    }
-  }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
deleted file mode 100644
index dbb9b7a..0000000
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ /dev/null
@@ -1,870 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Copyright 2007 Google Inc. All Rights Reserved.
- * Author: bqe at google.com (Burak Emir)
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-import scala.collection.{ mutable }
-import scala.reflect.internal.util.Position
-import transform.ExplicitOuter
-import symtab.Flags
-import mutable.ListBuffer
-import scala.annotation.elidable
-import scala.language.postfixOps
-import scala.tools.nsc.settings.ScalaVersion
-
-trait ParallelMatching extends ast.TreeDSL
-      with MatchSupport
-      with Matrix
-      with Patterns
-      with PatternBindings
-{
-  self: ExplicitOuter =>
-
-  import global.{ typer => _, _ }
-  import definitions.{
-    AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass,
-    getProductArgs, productProj, Object_eq, Any_asInstanceOf
-  }
-  import CODE._
-  import Types._
-  import Debug._
-
-  /** Transition **/
-  def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
-
-  /** The umbrella matrix class. **/
-  abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness {
-    import context._
-
-    def data: MatrixContext#MatrixInit
-
-    lazy val MatrixInit(roots, cases, failTree) = data
-    lazy val (rows, targets)                    = expand(roots, cases).unzip
-    lazy val expansion: Rep                     = make(roots, rows)
-
-    private val shortCuts = perRunCaches.newMap[Int, Symbol]()
-
-    final def createShortCut(theLabel: Symbol): Int = {
-      val key = shortCuts.size + 1
-      shortCuts(key) = theLabel
-      -key
-    }
-    def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
-      val labelName = cunit.freshTermName(namePrefix)
-      val labelSym  = owner.newLabel(labelName, owner.pos)
-      val labelInfo = MethodType(params, restpe)
-
-      LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
-    }
-
-    /** This is the recursively focal point for translating the current
-     *  list of pattern variables and a list of pattern match rows into
-     *  a tree suitable for entering erasure.
-     *
-     *  The first time it is called, the variables are (copies of) the
-     *  original pattern matcher roots, and the rows correspond to the
-     *  original casedefs.
-     */
-    final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = {
-      traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", "))
-      def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j)
-
-      val newRows = rows1 flatMap (_ expandAlternatives classifyPat)
-      if (rows1.length != newRows.length) make(roots1, newRows)  // recursive call if any change
-      else {
-        val rep = Rep(roots1, newRows)
-        new ExhaustivenessChecker(rep, roots.head.sym.pos).check
-        rep
-      }
-    }
-
-    override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets))
-
-    /**
-     * Encapsulates a symbol being matched on.  It is created from a
-     * PatternVar, which encapsulates the symbol's creation and assignment.
-     *
-     * We never match on trees directly - a temporary variable is created
-     * (in a PatternVar) for any expression being matched on.
-     */
-    class Scrutinee(val pv: PatternVar) {
-      import definitions._
-
-      // presenting a face of our symbol
-      def sym   = pv.sym
-      def tpe   = sym.tpe
-      def pos   = sym.pos
-      def id    = ID(sym) setPos pos  // attributed ident
-
-      def accessors     = if (isCaseClass) sym.caseFieldAccessors else Nil
-      def accessorTypes = accessors map (x => (tpe memberType x).resultType)
-
-      lazy val accessorPatternVars  = PatternVarGroup(
-        for ((accessor, tpe) <- accessors zip accessorTypes) yield
-          createVar(tpe, _ => fn(id, accessor))
-      )
-
-      private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef)
-      def allValDefs = extraValDefs ::: accessorPatternVars.valDefs
-
-      // tests
-      def isDefined      = sym ne NoSymbol
-      def isSubrangeType = subrangeTypes(tpe.typeSymbol)
-      def isCaseClass    = tpe.typeSymbol.isCase
-
-      // sequences
-      def seqType         = tpe.widen baseType SeqClass
-      def elemType        = tpe typeArgs 0
-
-      private def elemAt(i: Int)  = (id DOT (tpe member nme.apply))(LIT(i))
-      private def createElemVar(i: Int)   = createVar(elemType, _ => elemAt(i))
-      private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop)
-
-      def createSequenceVars(count: Int): List[PatternVar] =
-        (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i))
-
-      // for propagating "unchecked" to synthetic vars
-      def isChecked = !(sym hasFlag NO_EXHAUSTIVE)
-      def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _)
-
-      // this is probably where this actually belongs
-      def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked)
-
-      def castedTo(headType: Type) =
-        if (tpe =:= headType) this
-        else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
-
-      override def toString() = "(%s: %s)".format(id, tpe)
-    }
-
-    def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = {
-      def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true }
-      def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault
-
-      // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 }
-      // ... should compile to a switch.  It doesn't because the scrut isn't Int/Char, but
-      // that could be handle in an if/else since every pattern requires an Int.
-      // More immediately, Byte and Short scruts should also work.
-      if (!scrut.isSubrangeType) None
-      else {
-        val (_lits, others) = ps span isSwitchableConst
-        val lits = _lits collect { case x: LiteralPattern => x }
-
-        condOpt(others) {
-          case Nil                                => new PatternSwitch(scrut, lits, None)
-          // TODO: This needs to also allow the case that the last is a compatible type pattern.
-          case List(x) if isSwitchableDefault(x)  => new PatternSwitch(scrut, lits, Some(x))
-        }
-      }
-    }
-
-    class PatternSwitch(
-      scrut: Scrutinee,
-      override val ps: List[LiteralPattern],
-      val defaultPattern: Option[Pattern]
-    ) extends PatternMatch(scrut, ps) {
-      require(scrut.isSubrangeType && (ps forall (_.isSwitchable)))
-    }
-
-    case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) {
-      def head = ps.head
-      def tail = ps.tail
-      def size = ps.length
-
-      def headType = head.necessaryType
-      private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0
-      def dummies = emptyPatterns(dummyCount)
-
-      def apply(i: Int): Pattern = ps(i)
-      def pzip() = ps.zipWithIndex
-      def pzip[T](others: List[T]) = {
-        assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others))
-        ps zip others
-      }
-
-      // Any unapply - returns Some(true) if a type test is needed before the unapply can
-      // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.)
-      object AnyUnapply {
-        def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) {
-          case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe)
-        }
-      }
-
-      def mkRule(rest: Rep): RuleApplication = {
-        tracing("Rule")(head match {
-          case x if isEquals(x.tree.tpe)        => new MixEquals(this, rest)
-          case x: SequencePattern               => new MixSequence(this, rest, x)
-          case AnyUnapply(false)                => new MixUnapply(this, rest)
-          case _ =>
-            isPatternSwitch(scrut, ps) match {
-              case Some(x)  => new MixLiteralInts(x, rest)
-              case _        => new MixTypes(this, rest)
-            }
-        })
-      }
-      override def toString() = "%s match {%s}".format(scrut, indentAll(ps))
-    } // PatternMatch
-
-    /***** Rule Applications *****/
-
-    sealed abstract class RuleApplication {
-      def pmatch: PatternMatch
-      def rest: Rep
-      def cond: Tree
-      def success: Tree
-      def failure: Tree
-
-      lazy val PatternMatch(scrut, patterns) = pmatch
-      lazy val head = pmatch.head
-      lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure)
-
-      def mkFail(xs: List[Row]): Tree =
-        if (xs.isEmpty) failTree
-        else remake(xs).toTree
-
-      def remake(
-        rows: List[Row],
-        pvgroup: PatternVarGroup = emptyPatternVarGroup,
-        includeScrut: Boolean = true): Rep =
-      {
-        val scrutpvs = if (includeScrut) List(scrut.pv) else Nil
-        make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows)
-      }
-
-      /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */
-      def tree(): Tree
-
-      override def toString =
-        "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head)
-    }
-
-    /** {case ... if guard => bx} else {guardedRest} */
-    /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */
-    case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication {
-      def pmatch: PatternMatch = impossible
-      def rest: Rep = guardedRest
-
-      private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap
-      lazy val cond    = guard
-      lazy val success = successTree
-      lazy val failure = guardedRest.toTree
-
-      final def tree(): Tree =
-        if (bx < 0) REF(shortCuts(-bx))
-        else squeezedBlock(
-          valDefs,
-          if (cond.isEmpty) success else codegen
-        )
-
-      override def toString = "(case %d) {\n  Bindings: %s\n\n  if (%s) { %s }\n  else { %s }\n}".format(
-        bx, subst, guard, success, guardedRest
-      )
-    }
-
-    class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication {
-      val literals = pmatch.ps
-      val defaultPattern = pmatch.defaultPattern
-
-      private lazy val casted: Tree =
-        if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id
-
-      // creates a row transformer for injecting the default case bindings at a given index
-      private def addDefaultVars(index: Int): Row => Row =
-        if (defaultVars.isEmpty) identity
-        else rebindAll(_, pmatch(index).boundVariables, scrut.sym)
-
-      // add bindings for all the given vs to the given tvar
-      private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) =
-        r rebind r.subst.add(vs, tvar)
-
-      private def bindVars(Tag: Int, orig: Bindings): Bindings = {
-        def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match {
-          case Nil => bnd
-          case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym))
-          case (_,  vs)::xs => myBindVars(xs, bnd)
-        }
-        myBindVars(varMap, orig)
-      }
-
-      // bound vars and rows for default pattern (only one row, but a list is easier to use later)
-      lazy val (defaultVars, defaultRows) = defaultPattern match {
-        case None    => (Nil, Nil)
-        case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym)))
-      }
-
-      // literalMap is a map from each literal to a list of row indices.
-      // varMap is a list from each literal to a list of the defined vars.
-      lazy val (litPairs, varMap) = (
-        literals.zipWithIndex map {
-          case (lit, index) =>
-            val tag  = lit.intValue
-            (tag -> index, tag -> lit.boundVariables)
-        } unzip
-      )
-      def literalMap = litPairs groupBy (_._1) map {
-        case (k, vs) => (k, vs map (_._2))
-      }
-
-      lazy val cases =
-        for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
-          val newRows = indices map (i => addDefaultVars(i)(rest rows i))
-          val r       = remake(newRows ++ defaultRows, includeScrut = false)
-          val r2      = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
-
-          CASE(Literal(Constant(tag))) ==> r2.toTree
-        }
-
-      lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
-      def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree
-
-      // cond/success/failure only used if there is exactly one case.
-      lazy val cond    = scrut.id MEMBER_== cases.head.pat
-      lazy val success = cases.head.body
-      lazy val failure = defaultTree
-
-      // only one case becomes if/else, otherwise match
-      def tree() =
-        if (cases.size == 1) codegen
-        else casted MATCH (cases :+ defaultCase: _*)
-    }
-
-    /** mixture rule for unapply pattern
-     */
-    class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
-      val Pattern(UnApply(unMethod, unArgs)) = head
-      val Apply(unTarget, _ :: trailing) = unMethod
-
-      object SameUnapplyCall {
-        def isSame(t: Tree) = isEquivalentTree(unTarget, t)
-        def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match {
-          case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args)
-          case _                                                  => None
-        })
-      }
-      object SameUnapplyPattern {
-        def isSame(t: Tree)   = isEquivalentTree(unMethod, t)
-        def apply(x: Pattern) = unapply(x).isDefined
-        def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match {
-          case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs)
-          case _                                   => None
-        })
-      }
-
-      private lazy val zipped      = pmatch pzip rest.rows
-
-      lazy val unapplyResult: PatternVar =
-        scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
-
-      lazy val cond: Tree = unapplyResult.tpe.normalize match {
-        case TypeRef(_, BooleanClass, _)  => unapplyResult.ident
-        case TypeRef(_, SomeClass, _)     => TRUE
-        case _                            => NOT(unapplyResult.ident DOT nme.isEmpty)
-      }
-
-      lazy val failure =
-        mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
-
-      private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
-        // pattern variable for the unapply result of Some(x).get
-        def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
-          case Nil      => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
-          case arg :: _ => arg
-        }
-        lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
-        def tuple = pv.lhs
-
-        // at this point it's Some[T1,T2...]
-        lazy val tpes  = getProductArgs(tuple.tpe)
-
-        // one pattern variable per tuple element
-        lazy val tuplePVs =
-          for ((tpe, i) <- tpes.zipWithIndex) yield
-            scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1)))
-
-        // the filter prevents infinite unapply recursion
-        def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = {
-          val dum = if (unArgs.length <= 1) unArgs.length else tpes.size
-          for ((pat, r) <- zipped) yield pat match {
-            case SameUnapplyCall(xs)  => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym)
-            case _                    => r insert (emptyPatterns(dum) :+ pat)
-          }
-        }
-
-        // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)]
-        unArgs.length match {
-          case 0  => (Nil, Nil, mkNewRows((xs) => Nil))
-          case 1  => (List(pv), List(pv), mkNewRows(xs => List(xs.head)))
-          case _  => (pv :: tuplePVs, tuplePVs, mkNewRows(identity))
-        }
-      }
-
-      lazy val success = {
-        val (squeezePVs, pvs, rows) = doSuccess
-        val srep = remake(rows, pvs).toTree
-
-        squeezedBlock(squeezePVs map (_.valDef), srep)
-      }
-
-      final def tree() =
-        squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen)
-    }
-
-    /** Handle Sequence patterns (including Star patterns.)
-     *  Note: pivot == head, just better typed.
-     */
-    sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
-      require(scrut.tpe <:< head.tpe)
-
-      def hasStar = pivot.hasStar
-      private def pivotLen    = pivot.nonStarLength
-      private def seqDummies  = emptyPatterns(pivot.elems.length + 1)
-
-      // Should the given pattern join the expanded pivot in the success matrix? If so,
-      // this partial function will be defined for the pattern, and the result of the apply
-      // is the expanded sequence of new patterns.
-      lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
-        private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
-          case (true, true)   => true
-          case (true, false)  => pivotLen <= x.nonStarLength
-          case (false, true)  => pivotLen >= x.nonStarLength
-          case (false, false) => pivotLen == x.nonStarLength
-        }
-
-        def isDefinedAt(pat: Pattern) = pat match {
-          case x: SequenceLikePattern => seqIsDefinedAt(x)
-          case WildcardPattern()      => true
-          case _                      => false
-        }
-
-        def apply(pat: Pattern): List[Pattern] = pat match {
-          case x: SequenceLikePattern =>
-            def isSameLength  = pivotLen == x.nonStarLength
-            def rebound       = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
-
-            (pivot.hasStar, x.hasStar, isSameLength) match {
-              case (true, true, true)   => rebound :+ NoPattern
-              case (true, true, false)  => (seqDummies drop 1) :+ x
-              case (true, false, true)  => x.elemPatterns ++ List(NilPattern, NoPattern)
-              case (false, true, true)  => rebound
-              case (false, false, true) => x.elemPatterns :+ NoPattern
-              case _                    => seqDummies
-            }
-
-          case _  => seqDummies
-        }
-      }
-
-      // Should the given pattern be in the fail matrix? This is true of any sequences
-      // as long as the result of the length test on the pivot doesn't make it impossible:
-      // for instance if neither sequence is right ignoring and they are of different
-      // lengths, the later one cannot match since its length must be wrong.
-      def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
-        case x: SequenceLikePattern =>
-          (hasStar, x.hasStar) match {
-            case (_, true)      => true
-            case (true, false)  => pivotLen > x.nonStarLength
-            case (false, false) => pivotLen != x.nonStarLength
-          }
-        case WildcardPattern()      => true
-        case _                      => false
-      })
-
-      // divide the remaining rows into success/failure branches, expanding subsequences of patterns
-      val successRows = pmatch pzip rest.rows collect {
-        case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
-      }
-      val failRows = pmatch pzip rest.rows collect {
-        case (c, row) if failureMatrixFn(c) => row insert c
-      }
-
-      // the discrimination test for sequences is a call to lengthCompare.  Note that
-      // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
-      // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
-      lazy val cond = {
-        // the method call symbol
-        val methodOp: Symbol                = head.tpe member nme.lengthCompare
-
-        // the comparison to perform.  If the pivot is right ignoring, then a scrutinee sequence
-        // of >= pivot length could match it; otherwise it must be exactly equal.
-        val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
-
-        // scrutinee.lengthCompare(pivotLength) [== | >=] 0
-        val compareFn: Tree => Tree         = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
-
-        // wrapping in a null check on the scrutinee
-        // XXX this needs to use the logic in "def condition"
-        nullSafe(compareFn, FALSE)(scrut.id)
-        // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty)
-      }
-      lazy val success = {
-        // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
-        lazy val pvs = scrut createSequenceVars pivotLen
-
-        squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree)
-      }
-      lazy val failure  = remake(failRows).toTree
-
-      final def tree(): Tree = codegen
-    }
-
-    class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
-      private lazy val rhs =
-        decodedEqualsType(head.tpe) match {
-          case SingleType(pre, sym) => REF(pre, sym)
-          case PseudoType(o)        => o
-        }
-      private lazy val labelDef =
-        createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
-
-      lazy val cond       = handleOuter(rhs MEMBER_== scrut.id)
-      lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
-      lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol))
-      lazy val success    = remake(List(successOne, successTwo)).toTree
-      lazy val failure    = labelDef
-
-      final def tree() = codegen
-      override def toString() = "MixEquals(%s == %s)".format(scrut, head)
-    }
-
-    /** Mixture rule for type tests.
-     *  moreSpecific: more specific patterns
-     *      subsumed: more general patterns (subsuming current), rows index and subpatterns
-     *     remaining: remaining, rows index and pattern
-     */
-    class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
-      case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
-      case class No(bx: Int, remaining: Pattern)
-
-      val (yeses, noes) = {
-        val _ys = new ListBuffer[Yes]
-        val _ns = new ListBuffer[No]
-
-        for ((pattern, j) <- pmatch.pzip()) {
-          // scrutinee, head of pattern group
-          val (s, p) = (pattern.tpe, head.necessaryType)
-
-          def isEquivalent  = head.necessaryType =:= pattern.tpe
-          def isObjectTest  = pattern.isObject && (p =:= pattern.necessaryType)
-
-          def sMatchesP = matches(s, p)
-          def pMatchesS = matches(p, s)
-
-          def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern
-
-          def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps))
-          def passr()                                                           = Some( No(j, pattern))
-
-          def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
-          def subs()          = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
-
-          val (oneY, oneN) = pattern match {
-            case Pattern(LIT(null)) if !(p =:= s)       => (None, passr)      // (1)
-            case x if isObjectTest                      => (passl(), None)    // (2)
-            case Pattern(Typed(pp, _))     if sMatchesP => (typed(pp), None)  // (4)
-            // The next line used to be this which "fixed" 1697 but introduced
-            // numerous regressions including #3136.
-            // case Pattern(_: UnApply, _)              => (passl(), passr)
-            case Pattern(_: UnApply)                    => (None, passr)
-            case x if !x.isDefault && sMatchesP         => (subs(), None)
-            case x if  x.isDefault || pMatchesS         => (passl(), passr)
-            case _                                      => (None, passr)
-          }
-          oneY map (_ys +=)
-          oneN map (_ns +=)
-        }
-        (_ys.toList, _ns.toList)
-      }
-
-      val moreSpecific = yeses map (_.moreSpecific)
-      val subsumed = yeses map (x => (x.bx, x.subsumed))
-      val remaining = noes map (x => (x.bx, x.remaining))
-
-      private def mkZipped =
-        for (Yes(j, moreSpecific, subsumed) <- yeses) yield
-          j -> (moreSpecific :: subsumed)
-
-      lazy val casted = scrut castedTo pmatch.headType
-      lazy val cond   = condition(casted.tpe, scrut, head.boundVariables.nonEmpty)
-
-      private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty)
-      lazy val (subtests, subtestVars) =
-        if (isAnyMoreSpecific)  (mkZipped, List(casted.pv))
-        else                    (subsumed, Nil)
-
-      lazy val newRows =
-        for ((j, ps) <- subtests) yield
-          (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym)
-
-      lazy val success = {
-        val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false)
-        squeezedBlock(casted.allValDefs, srep.toTree)
-      }
-
-      lazy val failure =
-        mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
-
-      final def tree(): Tree = codegen
-    }
-
-    /*** States, Rows, Etc. ***/
-
-    case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) {
-      private def nobindings = subst.get().isEmpty
-      private def bindstr = if (nobindings) "" else pp(subst)
-
-      /** Extracts the 'i'th pattern. */
-      def extractColumn(i: Int) = {
-        val (x, xs) = extractIndex(pats, i)
-        (x, copy(pats = xs))
-      }
-
-      /** Replaces the 'i'th pattern with the argument. */
-      def replaceAt(i: Int, p: Pattern) = {
-        val newps = (pats take i) ::: p :: (pats drop (i + 1))
-        copy(pats = newps)
-      }
-
-      def insert(h: Pattern)              = copy(pats = h :: pats)
-      def insert(hs: List[Pattern])       = copy(pats = hs ::: pats)  // prepends supplied pattern
-      def rebind(b: Bindings)             = copy(subst = b)           // substitutes for bindings
-
-      def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) =
-        tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar)))
-
-      // returns this rows with alternatives expanded
-      def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = {
-        def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
-
-        // classify all the top level patterns - alternatives come back unaltered
-        val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
-        // see if any alternatives were in there
-        val (ps, others) = newPats span isNotAlternative
-        // make a new row for each alternative, with it spliced into the original position
-        if (others.isEmpty) List(copy(pats = ps))
-        else extractBindings(others.head) map (x => replaceAt(ps.size, x))
-      }
-      override def toString() = {
-        val bs = if (nobindings) "" else "\n" + bindstr
-        "Row(%d)(%s%s)".format(bx, pp(pats), bs)
-      }
-    }
-    abstract class State {
-      def bx: Int                   // index into the list of rows
-      def params: List[Symbol]      // bound names to be supplied as arguments to labeldef
-      def body: Tree                // body to execute upon match
-      def label: Option[LabelDef]   // label definition for this state
-
-      // Called with a bindings map when a match is achieved.
-      // Returns a list of variable declarations based on the labeldef parameters
-      // and the given substitution, and the body to execute.
-      protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree)
-
-      final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = {
-        _referenceCount += 1
-        applyBindingsImpl(subst)
-      }
-
-      private var _referenceCount   = 0
-      def referenceCount            = _referenceCount
-      def unreached                 = referenceCount == 0
-      def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
-
-      // Creates a simple Ident if the symbol's type conforms to
-      // the val definition's type, or a casted Ident if not.
-      private def newValIdent(lhs: Symbol, rhs: Symbol) =
-        if (rhs.tpe <:< lhs.tpe) Ident(rhs)
-        else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
-
-      protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
-        typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
-
-      protected def newValReference(lhs: Symbol, rhs: Symbol) =
-        typer typed newValIdent(lhs, rhs)
-
-      protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
-      protected def identsFor(subst: Map[Symbol, Symbol])  = mapSubst(subst)(newValReference)
-
-      protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
-        params flatMap { lhs =>
-          subst get lhs map (rhs => f(lhs, rhs)) orElse {
-            // This should not happen; the code should be structured so it is
-            // impossible, but that still lies ahead.
-            cunit.warning(lhs.pos, "No binding")
-            None
-          }
-        }
-
-      // typer is not able to digest a body of type Nothing being assigned result type Unit
-      protected def caseResultType =
-        if (body.tpe.isNothing) body.tpe else matchResultType
-    }
-
-    case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State {
-      def label = None
-
-      protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) =
-        (valDefsFor(subst), body.duplicate setType caseResultType)
-    }
-
-    case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State {
-      traceCategory("Final State", "(%s) => %s", paramsString, body)
-      def label = Some(labelDef)
-
-      private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
-
-      protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
-        val tree =
-          if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst)
-          else labelDef
-
-        (valDefsFor(subst), tree)
-      }
-
-      private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", "
-      override def toString() = pp("(%s) => %s".format(pp(params), body))
-    }
-
-    case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) {
-      lazy val Row(pats, subst, guard, index) = rows.head
-      lazy val guardedRest        = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail)
-      lazy val (defaults, others) = pats span (_.isDefault)
-
-      /** Cut out the column containing the non-default pattern. */
-      class Cut(index: Int) {
-        /** The first two separate out the 'i'th pattern in each row from the remainder. */
-        private val (_column, _rows) = rows map (_ extractColumn index) unzip
-
-        /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
-        private val (_pv, _tvars) = tvars extractIndex index
-
-        /** The non-default pattern (others.head) replaces the column head. */
-        private val (_ncol, _nrep) =
-          (others.head :: _column.tail, make(_tvars, _rows))
-
-        def mix() = {
-          val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
-          PatternMatch(newScrut, _ncol) mkRule _nrep
-        }
-      }
-
-      /** Converts this to a tree - recursively acquires subreps. */
-      final def toTree(): Tree = tracing("toTree")(typer typed applyRule())
-
-      /** The VariableRule. */
-      private def variable() = {
-        val binding = (defaults map (_.boundVariables) zip tvars.pvs) .
-          foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs))
-
-        VariableRule(binding, guard, guardedRest, index)
-      }
-      /** The MixtureRule: picks a rewrite rule to apply. */
-      private def mixture() = new Cut(defaults.size) mix()
-
-      /** Applying the rule will result in one of:
-        *
-        *   VariableRule - if all patterns are default patterns
-        *    MixtureRule - if one or more patterns are not default patterns
-        *          Error - no rows remaining
-        */
-      final def applyRule(): Tree =
-        if (rows.isEmpty) failTree
-        else if (others.isEmpty) variable.tree()
-        else mixture.tree()
-
-      def ppn(x: Any) = pp(x, newlines = true)
-      override def toString() =
-        if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows))
-        else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows))
-    }
-
-    /** Expands the patterns recursively. */
-    final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") {
-      for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield {
-        val subtrees = pat match {
-          case x if roots.length <= 1 => List(x)
-          case Apply(_, args)         => args
-          case WILD()                 => emptyTrees(roots.length)
-        }
-        val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct
-        val row    = Row(toPats(subtrees), NoBinding, guard, bx)
-        val state  = body match {
-          case x: Literal => LiteralState(bx, params, body)
-          case _          => FinalState(bx, params, body)
-        }
-
-        row -> state
-      }
-    }
-
-    /** returns the condition in "if (cond) k1 else k2"
-     */
-    final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = {
-      assert(scrut.isDefined)
-      val cond = handleOuter(condition(tpe, scrut.id, isBound))
-
-      if (!needsOuterTest(tpe, scrut.tpe, owner)) cond
-      else addOuterCondition(cond, tpe, scrut.id)
-    }
-
-    final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = {
-      assert((tpe ne NoType) && (scrutTree.tpe ne NoType))
-      def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef
-      def isRef             = scrutTree.tpe.isAnyRef
-
-      // See ticket #1503 for the motivation behind checking for a binding.
-      // The upshot is that it is unsound to assume equality means the right
-      // type, but if the value doesn't appear on the right hand side of the
-      // match that's unimportant; so we add an instance check only if there
-      // is a binding.
-      def bindingWarning() = {
-        if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
-          cunit.warning(scrutTree.pos,
-            "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
-        }
-      }
-
-      def genEquals(sym: Symbol): Tree = {
-        val t1: Tree = REF(sym) MEMBER_== scrutTree
-
-        if (isBound) {
-          bindingWarning()
-          t1 AND (scrutTree IS tpe.widen)
-        }
-        else t1
-      }
-
-      typer typed {
-        tpe match {
-          case ConstantType(Constant(null)) if isRef  => scrutTree OBJ_EQ NULL
-          case ConstantType(const)                    => scrutTree MEMBER_== Literal(const)
-          case SingleType(NoPrefix, sym)              => genEquals(sym)
-          case SingleType(pre, sym) if sym.isStable   => genEquals(sym)
-          case ThisType(sym) if sym.isModule          => genEquals(sym)
-          case _ if isMatchUnlessNull                 => scrutTree OBJ_NE NULL
-          case _                                      => scrutTree IS tpe
-        }
-      }
-    }
-
-    /** adds a test comparing the dynamic outer to the static outer */
-    final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = {
-      val TypeRef(prefix, _, _) = tpe2test
-      val theRef = handleOuter(prefix match {
-        case NoPrefix         => abort("assertion failed: NoPrefix")
-        case ThisType(clazz)  => THIS(clazz)
-        case pre              => REF(pre.prefix, pre.termSymbol)
-      })
-      outerAccessor(tpe2test.typeSymbol) match {
-        case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
-        case outerAcc =>
-          val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
-          cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
deleted file mode 100644
index 7b2fcf0..0000000
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-import scala.language.postfixOps
-
-trait PatternBindings extends ast.TreeDSL
-{
-  self: ExplicitOuter with ParallelMatching =>
-
-  import global.{ typer => _, _ }
-  import definitions.{ EqualsPatternClass }
-  import CODE._
-  import Debug._
-
-  /** EqualsPattern **/
-  def isEquals(tpe: Type)             = tpe.typeSymbol == EqualsPatternClass
-  def mkEqualsRef(tpe: Type)          = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
-  def decodedEqualsType(tpe: Type)    =
-    if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
-
-  // A subtype test which creates fresh existentials for type
-  // parameters on the right hand side.
-  def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
-
-  // For spotting duplicate unapplies
-  def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
-
-  // Reproduce the Bind trees wrapping oldTree around newTree
-  def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
-    case b @ Bind(x, body)  => Bind(b.symbol, moveBindings(body, newTree))
-    case _                  => newTree
-  }
-
-  // used as argument to `EqualsPatternClass`
-  case class PseudoType(o: Tree) extends SimpleTypeProxy {
-    override def underlying: Type = o.tpe
-    override def safeToString: String = "PseudoType("+o+")"
-  }
-
-  // If the given pattern contains alternatives, return it as a list of patterns.
-  // Makes typed copies of any bindings found so all alternatives point to final state.
-  def extractBindings(p: Pattern): List[Pattern] =
-    toPats(_extractBindings(p.boundTree, identity))
-
-  private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = {
-    def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe
-
-    p match {
-      case b @ Bind(_, body)  => _extractBindings(body, newPrev(b))
-      case Alternative(ps)    => ps map prevBindings
-    }
-  }
-
-  trait PatternBindingLogic {
-    self: Pattern =>
-
-    // This is for traversing the pattern tree - pattern types which might have
-    // bound variables beneath them return a list of said patterns for flatMapping.
-    def subpatternsForVars: List[Pattern] = Nil
-
-    // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
-    private var _boundTree: Tree = tree
-    def boundTree = _boundTree
-    def setBound(x: Bind): Pattern = {
-      _boundTree = x
-      this
-    }
-    def boundVariables = strip(boundTree)
-
-    // If a tree has bindings, boundTree looks something like
-    //   Bind(v3, Bind(v2, Bind(v1, tree)))
-    // This takes the given tree and creates a new pattern
-    //   using the same bindings.
-    def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
-
-    // Wrap this pattern's bindings around (_: Type)
-    def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
-      val aType = if (ascription == null) tpe else ascription
-      rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe)
-    }
-
-    // Wrap them around _
-    def rebindToEmpty(tpe: Type): Pattern =
-      rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-
-    // Wrap them around a singleton type for an EqualsPattern check.
-    def rebindToEqualsCheck(): Pattern =
-      rebindToType(equalsCheck)
-
-    // Like rebindToEqualsCheck, but subtly different.  Not trying to be
-    // mysterious -- I haven't sorted it all out yet.
-    def rebindToObjectCheck(): Pattern =
-      rebindToType(mkEqualsRef(sufficientType), sufficientType)
-
-    /** Helpers **/
-    private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match {
-      case Nil      => pat
-      case x :: xs  => Bind(x, wrapBindings(xs, pat)) setType pat.tpe
-    }
-    private def strip(t: Tree): List[Symbol] = t match {
-      case b @ Bind(_, pat) => b.symbol :: strip(pat)
-      case _                => Nil
-    }
-    private def deepstrip(t: Tree): List[Symbol] =
-      treeCollect(t, { case x: Bind => x.symbol })
-  }
-
-  case class Binding(pvar: Symbol, tvar: Symbol) {
-    override def toString() = pvar.name + " -> " + tvar.name
-  }
-
-  class Bindings(private val vlist: List[Binding]) {
-    // if (!vlist.isEmpty)
-    //   traceCategory("Bindings", this.toString)
-
-    def get() = vlist
-    def toMap = vlist map (x => (x.pvar, x.tvar)) toMap
-
-    def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = {
-      val newBindings = vs.toList map (v => Binding(v, tvar))
-      new Bindings(newBindings ++ vlist)
-    }
-
-    override def toString() =
-      if (vlist.isEmpty) "<none>"
-      else vlist.mkString(", ")
-  }
-
-  val NoBinding: Bindings = new Bindings(Nil)
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
deleted file mode 100644
index ef41246..0000000
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ /dev/null
@@ -1,499 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import symtab.Flags
-import PartialFunction._
-
-/** Patterns are wrappers for Trees with enhanced semantics.
- *
- *  @author Paul Phillips
- */
-
-trait Patterns extends ast.TreeDSL {
-  self: transform.ExplicitOuter =>
-
-  import global.{ typer => _, _ }
-  import definitions._
-  import CODE._
-  import Debug._
-  import treeInfo.{ unbind, isStar, isVarPattern }
-
-  type PatternMatch       = MatchMatrix#PatternMatch
-  private type PatternVar = MatrixContext#PatternVar
-
-  // Fresh patterns
-  def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
-  def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
-
-  // An empty pattern
-  def NoPattern = WildcardPattern()
-
-  // The constant null pattern
-  def NullPattern = LiteralPattern(NULL)
-
-  // The Nil pattern
-  def NilPattern = Pattern(gen.mkNil)
-
-  // 8.1.1
-  case class VariablePattern(tree: Ident) extends NamePattern {
-    lazy val Ident(name) = tree
-    require(isVarPattern(tree) && name != nme.WILDCARD)
-    override def covers(sym: Symbol) = true
-    override def description = "%s".format(name)
-  }
-
-  // 8.1.1 (b)
-  case class WildcardPattern() extends Pattern {
-    def tree = EmptyTree
-    override def covers(sym: Symbol) = true
-    override def isDefault = true
-    override def description = "_"
-  }
-
-  // 8.1.2
-  case class TypedPattern(tree: Typed) extends Pattern {
-    lazy val Typed(expr, tpt) = tree
-
-    override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
-    override def sufficientType = tpt.tpe
-    override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
-    override def simplify(pv: PatternVar) = Pattern(expr) match {
-      case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
-      case _                                              => this
-    }
-    override def description = "%s: %s".format(Pattern(expr), tpt)
-  }
-
-  // 8.1.3
-  case class LiteralPattern(tree: Literal) extends Pattern {
-    lazy val Literal(const @ Constant(value)) = tree
-
-    def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
-    def intValue = const.intValue
-    override def description = {
-      val s = if (value == null) "null" else value.toString
-      "Lit(%s)".format(s)
-    }
-  }
-
-  // 8.1.4 (a)
-  case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
-    // XXX - see bug 3411 for code which violates this assumption
-    // require (!isVarPattern(fn) && args.isEmpty)
-    lazy val ident @ Ident(name) = fn
-
-    override def sufficientType = Pattern(ident).equalsCheck
-    override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
-    override def description = "Id(%s)".format(name)
-  }
-  // 8.1.4 (b)
-  case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
-    require (args.isEmpty)
-    lazy val Apply(select: Select, _) = tree
-
-    override lazy val sufficientType = qualifier.tpe match {
-      case t: ThisType  => singleType(t, sym)   // this.X
-      case _            =>
-        qualifier match {
-          case _: Apply => PseudoType(tree)
-          case _        => singleType(Pattern(qualifier).necessaryType, sym)
-        }
-    }
-
-    override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
-    override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
-    override def description = backticked match {
-      case Some(s)  => "this." + s
-      case _        => "Sel(%s.%s)".format(Pattern(qualifier), name)
-    }
-
-  }
-  // 8.1.4 (c)
-  case class StableIdPattern(tree: Select) extends SelectPattern {
-    def select = tree
-    override def description = "St(%s)".format(printableSegments.mkString(" . "))
-    private def printableSegments =
-      pathSegments filter (x => !x.isEmpty && (x.toString != "$iw"))
-  }
-  // 8.1.4 (d)
-  case class ObjectPattern(tree: Apply) extends ApplyPattern {  // NamePattern?
-    require(!fn.isType && isModule)
-
-    override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
-    override def sufficientType = tpe.narrow
-    override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
-    override def description = "Obj(%s)".format(fn)
-  }
-  // 8.1.4 (e)
-  case class SimpleIdPattern(tree: Ident) extends NamePattern {
-    val Ident(name) = tree
-    override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
-    override def description = "Id(%s)".format(name)
-  }
-
-  // 8.1.5
-  case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
-    require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn)
-    def name = tpe.typeSymbol.name
-    def cleanName = tpe.typeSymbol.decodedName
-    def hasPrefix = tpe.prefix.prefixString != ""
-    def prefixedName =
-      if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
-      else cleanName
-
-    private def isColonColon = cleanName == "::"
-
-    override def subpatterns(pm: MatchMatrix#PatternMatch) =
-      if (pm.head.isCaseClass) toPats(args)
-      else super.subpatterns(pm)
-
-    override def simplify(pv: PatternVar) =
-      if (args.isEmpty) this rebindToEmpty tree.tpe
-      else this
-
-    override def covers(sym: Symbol) = {
-      debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
-        sym.tpe.typeSymbol == this.tpe.typeSymbol
-      }
-    }
-    override def description = {
-      if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
-      else "%s(%s)".format(name, toPats(args).mkString(", "))
-    }
-  }
-  // 8.1.6
-  case class TuplePattern(tree: Apply) extends ApplyPattern {
-    override def description = "((%s))".format(args.size, toPats(args).mkString(", "))
-  }
-
-  // 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
-  case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
-    private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
-
-    override def simplify(pv: PatternVar) = {
-      if (pv.tpe <:< arg.tpe) this
-      else this rebindTo uaTyped
-    }
-    override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
-  }
-
-  // Special List handling.  It was like that when I got here.
-  case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
-    // As yet I can't testify this is doing any good relative to using
-    // tpt.tpe, but it doesn't seem to hurt either.
-    private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
-    private lazy val consRef    = appliedType(ConsClass, packedType)
-    private lazy val listRef    = appliedType(ListClass, packedType)
-    private lazy val seqRef     = appliedType(SeqClass, packedType)
-
-    private def thisSeqRef = {
-      val tc = (tree.tpe baseType SeqClass).typeConstructor
-      if (tc.typeParams.size == 1) appliedType(tc, List(packedType))
-      else seqRef
-    }
-
-    // Fold a list into a well-typed x :: y :: etc :: tree.
-    private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
-      case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
-      case _           =>
-        val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
-        val consType    = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
-
-        Apply(TypeTree(consType), List(hd, tl)) setType consRef
-    }
-    private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
-    override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
-
-    override def simplify(pv: PatternVar) = {
-      if (pv.tpe <:< necessaryType)
-        Pattern(foldedPatterns)
-      else
-        this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
-    }
-    override def description = "List(%s => %s)".format(packedType, resTypesString)
-  }
-
-  trait SequenceLikePattern extends Pattern {
-    def elems: List[Tree]
-    override def hasStar = elems.nonEmpty && isStar(elems.last)
-
-    def elemPatterns    = toPats(elems)
-    def nonStarElems    = if (hasStar) elems.init else elems
-    def nonStarPatterns = toPats(nonStarElems)
-    def nonStarLength   = nonStarElems.length
-  }
-
-  // 8.1.8 (b) (literal ArrayValues)
-  case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
-    lazy val ArrayValue(elemtpt, elems) = tree
-
-    override def subpatternsForVars: List[Pattern] = elemPatterns
-    override def description = "Seq(%s)".format(elemPatterns mkString ", ")
-  }
-
-  // 8.1.8 (c)
-  case class StarPattern(tree: Star) extends Pattern {
-    lazy val Star(elem) = tree
-    override def description = "_*"
-  }
-  // XXX temporary?
-  case class ThisPattern(tree: This) extends NamePattern {
-    lazy val This(name) = tree
-    override def description = "this"
-  }
-
-  // 8.1.9
-  // InfixPattern ... subsumed by Constructor/Extractor Patterns
-
-  // 8.1.10
-  case class AlternativePattern(tree: Alternative) extends Pattern {
-    private lazy val Alternative(subtrees) = tree
-    private def alts = toPats(subtrees)
-    override def description = "Alt(%s)".format(alts mkString " | ")
-  }
-
-  // 8.1.11
-  // XMLPattern ... for now, subsumed by SequencePattern, but if we want
-  //   to make it work right, it probably needs special handling.
-
-  private def abortUnknownTree(tree: Tree) =
-    abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass))
-
-  object Pattern {
-    // a small tree -> pattern cache
-    private val cache = perRunCaches.newMap[Tree, Pattern]()
-
-    def apply(tree: Tree): Pattern = {
-      if (cache contains tree)
-        return cache(tree)
-
-      val p = tree match {
-        case x: Bind              => apply(unbind(tree)) setBound x
-        case EmptyTree            => WildcardPattern()
-        case Ident(nme.WILDCARD)  => WildcardPattern()
-        case x @ Alternative(ps)  => AlternativePattern(x)
-        case x: Apply             => ApplyPattern(x)
-        case x: Typed             => TypedPattern(x)
-        case x: Literal           => LiteralPattern(x)
-        case x: UnApply           => UnapplyPattern(x)
-        case x: Ident             => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
-        case x: ArrayValue        => SequencePattern(x)
-        case x: Select            => StableIdPattern(x)
-        case x: Star              => StarPattern(x)
-        case x: This              => ThisPattern(x) // XXX ?
-        case _                    => abortUnknownTree(tree)
-      }
-      cache(tree) = p
-
-      // limiting the trace output
-      p match {
-        case WildcardPattern()  => p
-        case _: LiteralPattern  => p
-        case _                  => tracing("Pattern")(p)
-      }
-    }
-    // matching on Pattern(...) always skips the bindings.
-    def unapply(other: Any): Option[Tree] = other match {
-      case x: Tree    => unapply(Pattern(x))
-      case x: Pattern => Some(x.tree)
-      case _          => None
-    }
-  }
-
-  object UnapplyPattern {
-    private object UnapplySeq {
-      def unapply(x: UnApply) = x match {
-        case UnApply(
-        Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
-        List(ArrayValue(_, elems))) =>
-          Some((qual.symbol, tpt, elems))
-        case _ =>
-          None
-       }
-    }
-
-    def apply(x: UnApply): Pattern = x match {
-      case UnapplySeq(ListModule, tpt, elems) =>
-        ListExtractorPattern(x, tpt, elems)
-      case _ =>
-        ExtractorPattern(x)
-    }
-  }
-
-  // right now a tree like x @ Apply(fn, Nil) where !fn.isType
-  // is handled by creating a singleton type:
-  //
-  //    val stype = Types.singleType(x.tpe.prefix, x.symbol)
-  //
-  // and then passing that as a type argument to EqualsPatternClass:
-  //
-  //    val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype))
-  //
-  // then creating a Typed pattern and rebinding.
-  //
-  //    val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-  //
-  // This is also how Select(qual, name) is handled.
-  object ApplyPattern {
-    def apply(x: Apply): Pattern = {
-      val Apply(fn, args) = x
-      def isModule  = x.symbol.isModule || x.tpe.termSymbol.isModule
-
-      if (fn.isType) {
-        if (isTupleType(fn.tpe)) TuplePattern(x)
-        else ConstructorPattern(x)
-      }
-      else if (args.isEmpty) {
-        if (isModule) ObjectPattern(x)
-        else fn match {
-          case _: Ident   => ApplyIdentPattern(x)
-          case _: Select  => ApplySelectPattern(x)
-        }
-      }
-      else abortUnknownTree(x)
-    }
-  }
-
-  /** Some intermediate pattern classes with shared structure **/
-
-  sealed trait SelectPattern extends NamePattern {
-    def select: Select
-    lazy val Select(qualifier, name) = select
-    def pathSegments = getPathSegments(tree)
-    def backticked: Option[String] = qualifier match {
-      case _: This if nme.isVariableName(name)  => Some("`%s`".format(name))
-      case _                                => None
-    }
-    override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
-    protected def getPathSegments(t: Tree): List[Name] = t match {
-      case Select(q, name)  => name :: getPathSegments(q)
-      case Apply(f, Nil)    => getPathSegments(f)
-      case _                => Nil
-    }
-  }
-
-  sealed trait NamePattern extends Pattern {
-    def name: Name
-    override def sufficientType = tpe.narrow
-    override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
-    override def description = name.toString
-  }
-
-  sealed trait UnapplyPattern extends Pattern {
-    lazy val UnApply(unfn, args) = tree
-    lazy val Apply(fn, _) = unfn
-    lazy val MethodType(List(arg, _*), _) = fn.tpe
-
-    // Covers if the symbol matches the unapply method's argument type,
-    // and the return type of the unapply is Some.
-    override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
-
-    // TODO: for alwaysCovers:
-    //   fn.tpe.finalResultType.typeSymbol == SomeClass
-
-    override def necessaryType = arg.tpe
-    override def subpatternsForVars = args match {
-      case List(ArrayValue(elemtpe, elems)) => toPats(elems)
-      case _                                => toPats(args)
-    }
-
-    def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
-    def resTypesString = resTypes match {
-      case Nil  => "Boolean"
-      case xs   => xs.mkString(", ")
-    }
-  }
-
-  sealed trait ApplyPattern extends Pattern {
-    lazy val Apply(fn, args) = tree
-    override def subpatternsForVars: List[Pattern] = toPats(args)
-
-    override def dummies =
-      if (!this.isCaseClass) Nil
-      else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
-
-    def isConstructorPattern = fn.isType
-    override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
-  }
-
-  sealed abstract class Pattern extends PatternBindingLogic {
-    def tree: Tree
-
-    // returns either a simplification of this pattern or identity.
-    def simplify(pv: PatternVar): Pattern = this
-
-    // the right number of dummies for this pattern
-    def dummies: List[Pattern] = Nil
-
-    // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher)
-    def isDefault = false
-
-    // what type must a scrutinee have to have any chance of matching this pattern?
-    def necessaryType = tpe
-
-    // what type could a scrutinee have which would automatically indicate a match?
-    // (nullness and guards will still be checked.)
-    def sufficientType = tpe
-
-    // the subpatterns for this pattern (at the moment, that means constructor arguments)
-    def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
-
-    // if this pattern should be considered to cover the given symbol
-    def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
-    def newMatchesPattern(sym: Symbol, pattp: Type) = {
-      debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
-        (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
-        (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
-      }
-    }
-
-    def    sym  = tree.symbol
-    def    tpe  = tree.tpe
-    def isEmpty = tree.isEmpty
-
-    def isModule    = sym.isModule || tpe.termSymbol.isModule
-    def isCaseClass = tpe.typeSymbol.isCase
-    def isObject    = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable  // XXX not entire logic
-
-    def hasStar = false
-
-    def setType(tpe: Type): this.type = {
-      tree setType tpe
-      this
-    }
-
-    def equalsCheck =
-      tracing("equalsCheck")(
-        if (sym.isValue) singleType(NoPrefix, sym)
-        else tpe.narrow
-      )
-
-    /** Standard methods **/
-    override def equals(other: Any) = other match {
-      case x: Pattern => this.boundTree == x.boundTree
-      case _          => super.equals(other)
-    }
-    override def hashCode() = boundTree.hashCode()
-    def description = super.toString
-
-    final override def toString = description
-
-    def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
-    def kindString = ""
-  }
-
-  /*** Extractors ***/
-
-  object UnapplyParamType {
-    def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) {
-      case UnApply(Apply(fn, _), _) => fn.tpe match {
-        case m: MethodType => m.paramTypes.head
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 00a9f3b..817a4a5 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -6,14 +6,23 @@
 package scala.tools
 
 package object nsc {
+  type Mode = scala.reflect.internal.Mode
+  val Mode = scala.reflect.internal.Mode
+
+  def EXPRmode = Mode.EXPRmode
+
   type Phase = scala.reflect.internal.Phase
   val NoPhase = scala.reflect.internal.NoPhase
 
+  type Variance = scala.reflect.internal.Variance
+  val Variance = scala.reflect.internal.Variance
+
   type FatalError = scala.reflect.internal.FatalError
   val FatalError = scala.reflect.internal.FatalError
 
   type MissingRequirementError = scala.reflect.internal.MissingRequirementError
   val MissingRequirementError = scala.reflect.internal.MissingRequirementError
 
-  val ListOfNil = List(Nil)
+  @deprecated("Use scala.reflect.internal.util.ListOfNil", "2.11.0")
+  lazy val ListOfNil = scala.reflect.internal.util.ListOfNil
 }
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 2050ce7..7837f9a 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -6,14 +6,15 @@
 package scala.tools.nsc
 package plugins
 
-import io.{ File, Path, Jar }
-import java.net.URLClassLoader
-import java.util.jar.JarFile
+import scala.tools.nsc.io.{ Jar }
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.io.{ Directory, File, Path }
+import java.io.InputStream
 import java.util.zip.ZipException
 
 import scala.collection.mutable
 import mutable.ListBuffer
-import scala.xml.XML
+import scala.util.{ Try, Success, Failure }
 
 /** Information about a plugin loaded from a jar file.
  *
@@ -37,14 +38,35 @@ abstract class Plugin {
   val description: String
 
   /** The compiler that this plugin uses.  This is normally equated
-   *  to a constructor parameter in the concrete subclass. */
+   *  to a constructor parameter in the concrete subclass.
+   */
   val global: Global
 
-  /** Handle any plugin-specific options.  The `-P:plugname:` part
-   *  will not be present. */
-  def processOptions(options: List[String], error: String => Unit) {
-    if (!options.isEmpty)
-      error("Error: " + name + " has no options")
+  def options: List[String] = {
+    // Process plugin options of form plugin:option
+    def namec = name + ":"
+    global.settings.pluginOptions.value filter (_ startsWith namec) map (_ stripPrefix namec)
+  }
+
+  /** Handle any plugin-specific options.
+   *  The user writes `-P:plugname:opt1,opt2`,
+   *  but the plugin sees `List(opt1, opt2)`.
+   *  The plugin can opt out of further processing
+   *  by returning false.  For example, if the plugin
+   *  has an "enable" flag, now would be a good time
+   *  to sit on the bench.
+   *  @param options plugin arguments
+   *  @param error error function
+   *  @return true to continue, or false to opt out
+   */
+  def init(options: List[String], error: String => Unit): Boolean = {
+    processOptions(options, error)
+    true
+  }
+
+  @deprecated("use Plugin#init instead", since="2.11")
+  def processOptions(options: List[String], error: String => Unit): Unit = {
+    if (!options.isEmpty) error(s"Error: $name takes no options")
   }
 
   /** A description of this plugin's options, suitable as a response
@@ -63,90 +85,116 @@ object Plugin {
 
   private val PluginXML = "scalac-plugin.xml"
 
-  /** Create a class loader with the specified file plus
+  /** Create a class loader with the specified locations plus
    *  the loader that loaded the Scala compiler.
    */
-  private def loaderFor(jarfiles: Seq[Path]): ClassLoader = {
+  private def loaderFor(locations: Seq[Path]): ScalaClassLoader = {
     val compilerLoader = classOf[Plugin].getClassLoader
-    val jarurls = jarfiles map (_.toURL)
+    val urls = locations map (_.toURL)
 
-    new URLClassLoader(jarurls.toArray, compilerLoader)
+    ScalaClassLoader fromURLs (urls, compilerLoader)
   }
 
-  /** Try to load a plugin description from the specified
-   *  file, returning <code>None</code> if it does not work.
+  /** Try to load a plugin description from the specified location.
    */
-  private def loadDescription(jarfile: Path): Option[PluginDescription] =
-    // XXX Return to this once we have some ARM support
-    if (!jarfile.exists) None
-    else try {
-      val jar = new JarFile(jarfile.jfile)
-
-      try {
-        jar getEntry PluginXML match {
-          case null  => None
-          case entry =>
-            val in = jar getInputStream entry
-            val packXML = XML load in
-            in.close()
-
-            PluginDescription fromXML packXML
-        }
-      }
-      finally jar.close()
-    }
-    catch {
-      case _: ZipException => None
+  private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = {
+    // XXX Return to this once we have more ARM support
+    def read(is: Option[InputStream]) = is match {
+      case None     => throw new PluginLoadException(jarp.path, s"Missing $PluginXML in $jarp")
+      case Some(is) => PluginDescription.fromXML(is)
     }
+    Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read))
+  }
+
+  private def loadDescriptionFromFile(f: Path): Try[PluginDescription] =
+    Try(PluginDescription.fromXML(new java.io.FileInputStream(f.jfile)))
 
   type AnyClass = Class[_]
 
-  /** Loads a plugin class from the named jar file.
-   *
-   *  @return `None` if the jar file has no plugin in it or
-   *                 if the plugin is badly formed.
+  /** Use a class loader to load the plugin class.
    */
-  def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
-    loadDescription(jarfile) match {
-      case None =>
-        println("Warning: could not load descriptor for plugin %s".format(jarfile))
-        None
-      case Some(pdesc) =>
-        try Some(loader loadClass pdesc.classname) catch {
-        case _: Exception =>
-          println("Warning: class not found for plugin in %s (%s)".format(jarfile, pdesc.classname))
-          None
-      }
+  def load(classname: String, loader: ClassLoader): Try[AnyClass] = {
+    import scala.util.control.NonFatal
+    try {
+      Success[AnyClass](loader loadClass classname)
+    } catch {
+      case NonFatal(e) =>
+        Failure(new PluginLoadException(classname, s"Error: unable to load class: $classname"))
+      case e: NoClassDefFoundError =>
+        Failure(new PluginLoadException(classname, s"Error: class not found: ${e.getMessage} required by $classname"))
     }
+  }
 
-  /** Load all plugins found in the argument list, both in the
-   *  jar files explicitly listed, and in the jar files in the
-   *  directories specified. Skips all plugins in `ignoring`.
-   *  A single classloader is created and used to load all of them.
+  /** Load all plugins specified by the arguments.
+   *  Each location of `paths` must be a valid plugin archive or exploded archive.
+   *  Each of `paths` must define one plugin.
+   *  Each of `dirs` may be a directory containing arbitrary plugin archives.
+   *  Skips all plugins named in `ignoring`.
+   *  A classloader is created to load each plugin.
    */
   def loadAllFrom(
-    jars: List[Path],
+    paths: List[List[Path]],
     dirs: List[Path],
-    ignoring: List[String]): List[AnyClass] =
+    ignoring: List[String]): List[Try[AnyClass]] =
   {
-    val alljars = (jars ::: (for {
-      dir <- dirs if dir.isDirectory
-      entry <- dir.toDirectory.files.toList sortBy (_.name)
-// was:      if Path.isJarOrZip(entry)
-      if Jar.isJarOrZip(entry)
-      pdesc <- loadDescription(entry)
-      if !(ignoring contains pdesc.name)
-    } yield entry)).distinct
-
-    val loader = loaderFor(alljars)
-    (alljars map (loadFrom(_, loader))).flatten
+    // List[(jar, Try(descriptor))] in dir
+    def scan(d: Directory) =
+      d.files.toList sortBy (_.name) filter (Jar isJarOrZip _) map (j => (j, loadDescriptionFromJar(j)))
+
+    type PDResults = List[Try[(PluginDescription, ScalaClassLoader)]]
+
+    // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars
+    val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d =>
+      scan(d.toDirectory) collect {
+        case (j, Success(pd)) => Success((pd, loaderFor(Seq(j))))
+      }
+    }
+
+    // scan jar paths for plugins, taking the first plugin you find.
+    // a path element can be either a plugin.jar or an exploded dir.
+    def findDescriptor(ps: List[Path]) = {
+      def loop(qs: List[Path]): Try[PluginDescription] = qs match {
+        case Nil       => Failure(new MissingPluginException(ps))
+        case p :: rest =>
+          if (p.isDirectory) loadDescriptionFromFile(p.toDirectory / PluginXML)
+          else if (p.isFile) loadDescriptionFromJar(p.toFile)
+          else loop(rest)
+      }
+      loop(ps)
+    }
+    val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map {
+      case (p, Success(pd)) => Success((pd, loaderFor(p)))
+      case (_, Failure(e))  => Failure(e)
+    }
+
+    val seen = mutable.HashSet[String]()
+    val enabled = (fromPaths ::: fromDirs) map {
+      case Success((pd, loader)) if seen(pd.classname)        =>
+        // a nod to SI-7494, take the plugin classes distinctly
+        Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})"))
+      case Success((pd, loader)) if ignoring contains pd.name =>
+        Failure(new PluginLoadException(pd.name, s"Disabling plugin ${pd.name}"))
+      case Success((pd, loader)) =>
+        seen += pd.classname
+        Plugin.load(pd.classname, loader)
+      case Failure(e)            =>
+        Failure(e)
+    }
+    enabled   // distinct and not disabled
   }
 
   /** Instantiate a plugin class, given the class and
    *  the compiler it is to be used in.
    */
   def instantiate(clazz: AnyClass, global: Global): Plugin = {
-    val constructor = clazz getConstructor classOf[Global]
-    (constructor newInstance global).asInstanceOf[Plugin]
+    (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin]
   }
 }
+
+class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) {
+  def this(path: String, message: String) = this(path, message, null)
+}
+
+class MissingPluginException(path: String) extends PluginLoadException(path, s"No plugin in path $path") {
+  def this(paths: List[Path]) = this(paths mkString File.pathSeparator)
+}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 4d98b25..a6df08c 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -15,11 +15,13 @@ package plugins
  */
 abstract class PluginComponent extends SubComponent {
 
-  /** Internal flag to tell external from internal phases */
+  /** By definition, plugin phases are externally provided. */
   final override val internal = false
 
-  /** Phases supplied by plugins should not have give the runsRightAfter constraint,
-   * but can override it */
+  /** Only plugins are granted a reprieve from specifying whether they follow. */
   val runsRightAfter: Option[String] = None
 
+  /** Useful for -Xshow-phases. */
+  def description: String = ""
+
 }
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index bd56740..bf78c93 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -6,70 +6,50 @@
 package scala.tools.nsc
 package plugins
 
-import scala.xml.{Node,NodeSeq}
+import scala.reflect.internal.util.StringContextStripMarginOps
 
 /** A description of a compiler plugin, suitable for serialization
  *  to XML for inclusion in the plugin's .jar file.
  *
  * @author Lex Spoon
  * @version 1.0, 2007-5-21
+ * @author Adriaan Moors
+ * @version 2.0, 2013
+ * @param name A short name of the plugin, used to identify it in
+ *   various contexts. The phase defined by the plugin
+ *   should have the same name.
+ * @param classname The name of the main Plugin class.
  */
-abstract class PluginDescription {
-
-  /** A short name of the compiler, used to identify it in
-   *  various contexts. The phase defined by the plugin
-   *  should have the same name.
-   */
-  val name: String
-
-  /** The name of the main class for the plugin */
-  val classname: String
-
-  /** An XML representation of this description.  It can be
-   *  read back using <code>PluginDescription.fromXML</code>.
+case class PluginDescription(name: String, classname: String) {
+  /** An XML representation of this description.
    *  It should be stored inside the jar archive file.
    */
-  def toXML: Node = {
-    <plugin>
-      <name>{name}</name>
-      <classname>{classname}</classname>
-    </plugin>
-  }
+  def toXML: String =
+    sm"""<plugin>
+         | <name>${name}</name>
+         | <classname>${classname}</classname>
+         |</plugin>"""
 }
 
 /** Utilities for the PluginDescription class.
  *
- *  @author Lex Spoon
- *  @version 1.0, 2007-5-21
+ * @author Lex Spoon
+ * @version 1.0, 2007-5-21
+ * @author Adriaan Moors
+ * @version 2.0, 2013
  */
 object PluginDescription {
-
-  def fromXML(xml: Node): Option[PluginDescription] = {
-    // check the top-level tag
-    xml match {
-      case <plugin>{_*}</plugin>  => ()
-      case _ => return None
-    }
-    // extract one field
-    def getField(field: String): Option[String] = {
-      val text = (xml \\ field).text.trim
-      if (text == "") None else Some(text)
-    }
-
-    // extract the required fields
-    val name1 = getField("name") match {
-      case None => return None
-      case Some(str) => str
-    }
-    val classname1 = getField("classname") match {
-      case None => return None
-      case Some(str) => str
-    }
-
-    Some(new PluginDescription {
-      val name = name1
-      val classname = classname1
-    })
+  private def text(ns: org.w3c.dom.NodeList): String =
+    if (ns.getLength == 1) ns.item(0).getTextContent.trim
+    else throw new RuntimeException("Bad plugin descriptor.")
+
+  def fromXML(xml: java.io.InputStream): PluginDescription = {
+    import javax.xml.parsers.DocumentBuilderFactory
+    val root = DocumentBuilderFactory.newInstance.newDocumentBuilder.parse(xml).getDocumentElement
+    root.normalize()
+    if (root.getNodeName != "plugin")
+      throw new RuntimeException("Plugin descriptor root element must be <plugin>.")
+
+    PluginDescription(text(root.getElementsByTagName("name")), text(root.getElementsByTagName("classname")))
   }
-
 }
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
deleted file mode 100644
index c5da249..0000000
--- a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-package plugins
-
-/** ...
- *
- * @author Lex Spoon
- * @version 1.0, 2007-5-21
- */
-class PluginLoadException(filename: String, cause: Exception)
-extends Exception(cause)
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 736bd82..12f9aeb 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,9 @@
 package scala.tools.nsc
 package plugins
 
-import io.{ File, Path }
+import scala.reflect.io.{ File, Path }
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver.Defaults
 
 /** Support for run-time loading of compiler plugins.
  *
@@ -15,8 +17,7 @@ import io.{ File, Path }
  *  @version 1.1, 2009/1/2
  *  Updated 2009/1/2 by Anders Bach Nielsen: Added features to implement SIP 00002
  */
-trait Plugins {
-  self: Global =>
+trait Plugins { global: Global =>
 
   /** Load a rough list of the plugins.  For speed, it
    *  does not instantiate a compiler run.  Therefore it cannot
@@ -24,9 +25,21 @@ trait Plugins {
    *  filtered from the final list of plugins.
    */
   protected def loadRoughPluginsList(): List[Plugin] = {
-    val jars = settings.plugin.value map Path.apply
-    val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
-    val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+    def asPath(p: String) = ClassPath split p
+    val paths  = settings.plugin.value filter (_ != "") map (s => asPath(s) map Path.apply)
+    val dirs   = {
+      def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s
+      asPath(settings.pluginsDir.value) map injectDefault map Path.apply
+    }
+    val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value)
+    val (goods, errors) = maybes partition (_.isSuccess)
+    // Explicit parameterization of recover to suppress -Xlint warning about inferred Any
+    errors foreach (_.recover[Any] {
+      // legacy behavior ignores altogether, so at least warn devs
+      case e: MissingPluginException => if (global.isDeveloper) warning(e.getMessage)
+      case e: Exception              => inform(e.getMessage)
+    })
+    val classes = goods map (_.get)  // flatten
 
     // Each plugin must only be instantiated once. A common pattern
     // is to register annotation checkers during object construction, so
@@ -34,7 +47,7 @@ trait Plugins {
     classes map (Plugin.instantiate(_, this))
   }
 
-  protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
+  protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList()
 
   /** Load all available plugins.  Skips plugins that
    *  either have the same name as another one, or which
@@ -55,7 +68,7 @@ trait Plugins {
       def withPlug          = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames)
       lazy val commonPhases = phaseNames intersect plugPhaseNames
 
-      def note(msg: String): Unit = if (settings.verbose.value) inform(msg format plug.name)
+      def note(msg: String): Unit = if (settings.verbose) inform(msg format plug.name)
       def fail(msg: String)       = { note(msg) ; withoutPlug }
 
       if (plugNames contains plug.name)
@@ -72,30 +85,21 @@ trait Plugins {
 
     val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet)
 
-    /** Verify requirements are present. */
+    // Verify required plugins are present.
     for (req <- settings.require.value ; if !(plugs exists (_.name == req)))
       globalError("Missing required plugin: " + req)
 
-    /** Process plugin options. */
-    def namec(plug: Plugin) = plug.name + ":"
-    def optList(xs: List[String], p: Plugin) = xs filter (_ startsWith namec(p))
-    def doOpts(p: Plugin): List[String] =
-      optList(settings.pluginOptions.value, p) map (_ stripPrefix namec(p))
-
-    for (p <- plugs) {
-      val opts = doOpts(p)
-      if (!opts.isEmpty)
-        p.processOptions(opts, globalError)
-    }
-
-    /** Verify no non-existent plugin given with -P */
-    for (opt <- settings.pluginOptions.value ; if plugs forall (p => optList(List(opt), p).isEmpty))
-      globalError("bad option: -P:" + opt)
+    // Verify no non-existent plugin given with -P
+    for {
+      opt <- settings.pluginOptions.value
+      if !(plugs exists (opt startsWith _.name + ":"))
+    } globalError("bad option: -P:" + opt)
 
-    plugs
+    // Plugins may opt out, unless we just want to show info
+    plugs filter (p => p.init(p.options, globalError) || (settings.debug && settings.isInfo))
   }
 
-  lazy val plugins: List[Plugin] = loadPlugins
+  lazy val plugins: List[Plugin] = loadPlugins()
 
   /** A description of all the plugins that are loaded */
   def pluginDescriptions: String =
@@ -106,7 +110,7 @@ trait Plugins {
    * @see phasesSet
    */
   protected def computePluginPhases(): Unit =
-    phasesSet ++= (plugins flatMap (_.components))
+    for (p <- plugins; c <- p.components) addToPhasesSet(c, c.description)
 
   /** Summary of the options for all loaded plugins */
   def pluginOptionsHelp: String =
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index c7ee11d..16d4324 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -18,22 +18,20 @@ abstract class AbstractReporter extends Reporter {
   def display(pos: Position, msg: String, severity: Severity): Unit
   def displayPrompt(): Unit
 
-  private val positions = new mutable.HashMap[Position, Severity]
+  private val positions = mutable.Map[Position, Severity]() withDefaultValue INFO
+  private val messages  = mutable.Map[Position, List[String]]() withDefaultValue Nil
 
   override def reset() {
-    super.reset
-    positions.clear
+    super.reset()
+    positions.clear()
+    messages.clear()
   }
 
   private def isVerbose   = settings.verbose.value
   private def noWarnings  = settings.nowarnings.value
   private def isPromptSet = settings.prompt.value
 
-  protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
-    val severity =
-      if (settings.fatalWarnings.value && _severity == WARNING) ERROR
-      else _severity
-
+  protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
     if (severity == INFO) {
       if (isVerbose || force) {
         severity.count += 1
@@ -41,19 +39,20 @@ abstract class AbstractReporter extends Reporter {
       }
     }
     else {
-      val hidden = testAndLog(pos, severity)
+      val hidden = testAndLog(pos, severity, msg)
       if (severity == WARNING && noWarnings) ()
       else {
         if (!hidden || isPromptSet) {
           severity.count += 1
           display(pos, msg, severity)
-        } else if (settings.debug.value) {
+        }
+        else if (settings.debug) {
           severity.count += 1
           display(pos, "[ suppressed ] " + msg, severity)
         }
 
         if (isPromptSet)
-          displayPrompt
+          displayPrompt()
       }
     }
   }
@@ -61,12 +60,20 @@ abstract class AbstractReporter extends Reporter {
   /** Logs a position and returns true if it was already logged.
    *  @note  Two positions are considered identical for logging if they have the same point.
    */
-  private def testAndLog(pos: Position, severity: Severity): Boolean =
+  private def testAndLog(pos: Position, severity: Severity, msg: String): Boolean =
     pos != null && pos.isDefined && {
       val fpos = pos.focus
-      (positions get fpos) match {
-        case Some(level) if level >= severity => true
-        case _                                => positions += (fpos -> severity) ; false
+      val suppress = positions(fpos) match {
+        case ERROR                         => true  // already error at position
+        case highest if highest > severity => true  // already message higher than present severity
+        case `severity`                    => messages(fpos) contains msg // already issued this exact message
+        case _                             => false // good to go
+      }
+
+      suppress || {
+        positions(fpos) = severity
+        messages(fpos) ::= msg
+        false
       }
     }
 }
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index e847fb5..3f210a5 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -3,11 +3,13 @@
  * @author Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package reporters
 
 import java.io.{ BufferedReader, IOException, PrintWriter }
 import scala.reflect.internal.util._
+import StringOps._
 
 /**
  * This class implements a Reporter that displays messages on a text
@@ -34,15 +36,15 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
   }
 
   /** Returns the number of errors issued totally as a string.
-   *
-   *  @param severity ...
-   *  @return         ...
    */
   private def getCountString(severity: Severity): String =
     StringOps.countElementsAsString((severity).count, label(severity))
 
   /** Prints the message. */
-  def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
+  def printMessage(msg: String) {
+    writer print trimAllTrailingSpace(msg) + "\n"
+    writer.flush()
+  }
 
   /** Prints the message with the given position indication. */
   def printMessage(posIn: Position, msg: String) {
@@ -52,17 +54,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
     printMessage(pos, clabel(severity) + msg)
   }
 
-  /**
-   *  @param pos ...
-   */
-  def printSourceLine(pos: Position) {
-    printMessage(pos.lineContent.stripLineEnd)
-    printColumnMarker(pos)
-  }
-
   /** Prints the column marker of the given position.
-   *
-   *  @param pos ...
    */
   def printColumnMarker(pos: Position) =
     if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") }
@@ -94,6 +86,5 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
     }
   }
 
-  private def abort(msg: String) = throw new Error(msg)
   override def flush() { writer.flush() }
 }
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 8871ae6..68362c0 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
 package reporters
 
 import scala.reflect.internal.util._
-import scala.reflect.internal.util.StringOps._
 
 /**
  * This interface provides methods to issue information, warning and
@@ -59,15 +58,15 @@ abstract class Reporter {
   /** For sending a message which should not be labeled as a warning/error,
    *  but also shouldn't require -verbose to be visible.
    */
-  def echo(msg: String): Unit                                = info(NoPosition, msg, true)
-  def echo(pos: Position, msg: String): Unit                 = info(pos, msg, true)
+  def echo(msg: String): Unit                                = info(NoPosition, msg, force = true)
+  def echo(pos: Position, msg: String): Unit                 = info(pos, msg, force = true)
 
   /** Informational messages, suppressed unless -verbose or force=true. */
   def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
 
   /** Warnings and errors. */
-  def warning(pos: Position, msg: String): Unit              = withoutTruncating(info0(pos, msg, WARNING, false))
-  def error(pos: Position, msg: String): Unit                = withoutTruncating(info0(pos, msg, ERROR, false))
+  def warning(pos: Position, msg: String): Unit              = withoutTruncating(info0(pos, msg, WARNING, force = false))
+  def error(pos: Position, msg: String): Unit                = withoutTruncating(info0(pos, msg, ERROR, force = false))
   def incompleteInputError(pos: Position, msg: String): Unit = {
     if (incompleteHandled) incompleteHandler(pos, msg)
     else error(pos, msg)
@@ -81,10 +80,4 @@ abstract class Reporter {
     WARNING.count     = 0
     cancelled         = false
   }
-
-  // sbt compat
-  @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
-  def countElementsAsString(n: Int, elements: String): String = StringOps.countElementsAsString(n, elements)
-  @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
-  def countAsString(n: Int): String = StringOps.countAsString(n)
 }
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 34e2a8a..04c5bdf 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -14,7 +14,7 @@ import scala.reflect.internal.util.Position
  * console.
  */
 class StoreReporter extends Reporter {
-  class Info(val pos: Position, val msg: String, val severity: Severity) {
+  case class Info(pos: Position, msg: String, severity: Severity) {
     override def toString() = "pos: " + pos + " " + msg + " " + severity
   }
   val infos = new mutable.LinkedHashSet[Info]
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
deleted file mode 100644
index 10e9982..0000000
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.{FileInputStream, InputStreamReader, IOException}
-
-import scala.runtime.ScalaRunTime.stringOf
-import java.lang.reflect.InvocationTargetException
-import scala.reflect.runtime.ReflectionUtils._
-import scala.collection.mutable.ArrayBuffer
-
- at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-class Mixer {
-
-  protected val stdSeparator = "//> "
-  protected val ctdSeparator = "//| "
-  protected val sepColumn = 50
-  protected val tabInc = 8
-
-  type Comments = Seq[(Int, Array[Char])]
-
-  def parseComments(comments: Array[Char]): Iterator[(Int, Array[Char])] = new Iterator[(Int, Array[Char])] {
-    var idx = 0
-    def hasNext = idx < comments.length
-    def next() = {
-      val nextSpace = comments indexOf (' ', idx)
-      var nextNL = comments indexOf ('\n', nextSpace + 1)
-      if (nextNL < 0) nextNL = comments.length
-      val result =
-        (new String(comments.slice(idx, nextSpace)).toInt, comments.slice(nextSpace + 1, nextNL))
-      idx = nextNL + 1
-      result
-    }
-  }
-
-  def mix(source: Array[Char], comments: Array[Char]): Array[Char] = {
-    val mixed = new ArrayBuffer[Char]
-    var written = 0
-    def align() = {
-      var idx = mixed.lastIndexOf('\n') + 1
-      var col = 0
-      while (idx < mixed.length) {
-        col =
-          if (mixed(idx) == '\t') (col / tabInc) * tabInc + tabInc
-          else col + 1
-        idx += 1
-      }
-      if (col > sepColumn) {
-        mixed += '\n'
-        col = 0
-      }
-      while (col < sepColumn) {
-        mixed += ' '
-        col += 1
-      }
-    }
-    for ((offset, cs) <- parseComments(comments)) {
-      val sep =
-        if (written < offset) {
-          for (i <- written until offset) mixed += source(i)
-          written = offset
-          stdSeparator
-        } else {
-          mixed += '\n'
-          ctdSeparator
-        }
-      align()
-      mixed ++= sep ++= cs
-    }
-    mixed ++= source.view(written, source.length)
-    mixed.toArray
-  }
-
-}
-
-object Mixer extends Mixer {
-
-  def contents(name: String): Array[Char] = {
-    val page = new Array[Char](2 << 14)
-    val buf = new ArrayBuffer[Char]
-    val in = new FileInputStream(name)
-    val rdr = new InputStreamReader(in)
-    var nread = 0
-    do {
-      nread = rdr.read(page, 0, page.length)
-      buf ++= (if (nread == page.length) page else page.take(nread))
-    } while (nread >= 0)
-    buf.toArray
-  }
-
-  def main(args: Array[String]) {
-    val mixer = new Mixer
-    try {
-      require(args.length == 2, "required arguments: file1 file2")
-      val source = contents(args(0))
-      val comments = contents(args(1))
-      val mixed = mixer.mix(source, comments)
-      println(mixed.mkString)
-    } catch {
-      case ex: IOException =>
-        println("error: "+ ex.getMessage)
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
deleted file mode 100644
index 01dccd7..0000000
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.tools.nsc
-package scratchpad
-
-import java.io.Writer
-import scala.reflect.internal.util.SourceFile
-import scala.reflect.internal.Chars._
-
- at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-object SourceInserter {
-  def stripRight(cs: Array[Char]): Array[Char] = {
-    val lines =
-      new String(cs) split "\n"
-    def leftPart(str: String) =
-      (str split """//>|//\|""").head
-    def isContinuation(str: String) =
-      ((str contains "//>") || (str contains "//|")) && (leftPart(str) forall isWhitespace)
-    def stripTrailingWS(str: String) =
-      str take (str lastIndexWhere (!isWhitespace(_))) + 1
-    val prefixes =
-      lines filterNot isContinuation map leftPart map stripTrailingWS
-    (prefixes mkString "\n").toArray
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 783e249..8b897b8 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -3,7 +3,8 @@
  * @author  Paul Phillips
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package settings
 
 trait AbsScalaSettings {
@@ -32,11 +33,4 @@ trait AbsScalaSettings {
   def PhasesSetting(name: String, descr: String, default: String): PhasesSetting
   def StringSetting(name: String, helpArg: String, descr: String, default: String): StringSetting
   def PrefixSetting(name: String, prefix: String, descr: String): PrefixSetting
-
-  /** **/
-  abstract class SettingGroup(val prefix: String) extends AbsSetting {
-    def name = prefix
-    def helpDescription: String = sys.error("todo")
-    def unparse: List[String] = List(name)
-  }
 }
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index adabeb0..4727e6d 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -47,8 +47,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
         }
     })
 
-  implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
-
   trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
     def name: String
     def helpDescription: String
@@ -83,14 +81,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
       this
     }
 
-    /** If the appearance of the setting should halt argument processing. */
-    private var isTerminatorSetting = false
-    def shouldStopProcessing = isTerminatorSetting
-    def stopProcessing(): this.type = {
-      isTerminatorSetting = true
-      this
-    }
-
     /** Issue error and return */
     def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
 
@@ -110,6 +100,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
 
     /** Attempt to set from a properties file style property value.
      *  Currently used by Eclipse SDT only.
+     *  !!! Needs test.
      */
     def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
 
@@ -133,7 +124,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
       case _                          => false
     }
     override def hashCode() = name.hashCode + value.hashCode
-    override def toString() = name + " = " + value
+    override def toString() = name + " = " + (if (value == "") "\"\"" else value)
   }
 
   trait InternalSetting extends AbsSetting {
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
deleted file mode 100644
index 0bec113..0000000
--- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-trait AdvancedScalaSettings {
-  self: AbsScalaSettings =>
-
-  abstract class X extends SettingGroup("-X") {
-    val assemextdirs: StringSetting
-    val assemname: StringSetting
-    val assempath: StringSetting
-    val checkinit: BooleanSetting
-    val disableassertions: BooleanSetting
-    val elidebelow: IntSetting
-    val experimental: BooleanSetting
-    val future: BooleanSetting
-    val generatephasegraph: StringSetting
-    val logimplicits: BooleanSetting
-    val mainClass: StringSetting
-    val migration: BooleanSetting
-    val noforwarders: BooleanSetting
-    val nojline: BooleanSetting
-    val nouescape: BooleanSetting
-    val plugin: MultiStringSetting
-    val plugindisable: MultiStringSetting
-    val pluginlist: BooleanSetting
-    val pluginrequire: MultiStringSetting
-    val pluginsdir: StringSetting
-    val print: PhasesSetting
-    val printicode: BooleanSetting
-    val printpos: BooleanSetting
-    val printtypes: BooleanSetting
-    val prompt: BooleanSetting
-    val resident: BooleanSetting
-    val script: StringSetting
-    val showclass: StringSetting
-    val showobject: StringSetting
-    val showphases: BooleanSetting
-    val sourcedir: StringSetting
-    val sourcereader: StringSetting
-  }
-  // def Xexperimental = X.experimental
-  // def Xmigration28 = X.migration
-  // def Xnojline = X.nojline
-  // def Xprint = X.print
-  // def Xprintpos = X.printpos
-  // def Xshowcls = X.showclass
-  // def Xshowobj = X.showobject
-  // def assemextdirs = X.assemextdirs
-  // def assemname = X.assemname
-  // def assemrefs = X.assempath
-  // def checkInit = X.checkinit
-  // def disable = X.plugindisable
-  // def elideLevel = X.elidelevel
-  // def future = X.future
-  // def genPhaseGraph = X.generatephasegraph
-  // def logimplicits = X.logimplicits
-  // def noForwarders = X.noforwarders
-  // def noassertions = X.disableassertions
-  // def nouescape = X.nouescape
-  // def plugin = X.plugin
-  // def pluginsDir = X.pluginsdir
-  // def printtypes = X.printtypes
-  // def prompt = X.prompt
-  // def require = X.require
-  // def resident = X.resident
-  // def script = X.script
-  // def showPhases = X.showphases
-  // def showPlugins = X.pluginlist
-  // def sourceReader = X.sourcereader
-  // def sourcedir = X.sourcedir
-  // def writeICode = X.printicode
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
deleted file mode 100644
index da2c89d..0000000
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** Taking flag checking to a somewhat higher level. */
-trait AestheticSettings {
-  def settings: Settings
-
-  // Some(value) if setting has been set by user, None otherwise.
-  def optSetting[T](s: Settings#Setting): Option[T] =
-    if (s.isDefault) None else Some(s.value.asInstanceOf[T])
-
-  def script       = optSetting[String](settings.script)
-  def encoding     = optSetting[String](settings.encoding)
-  def sourceReader = optSetting[String](settings.sourceReader)
-
-  def debug           = settings.debug.value
-  def declsOnly       = false
-  def deprecation     = settings.deprecation.value
-  def experimental    = settings.Xexperimental.value
-  def fatalWarnings   = settings.fatalWarnings.value
-  def feature         = settings.feature.value
-  def future          = settings.future.value
-  def logClasspath    = settings.Ylogcp.value
-  def printStats      = settings.Ystatistics.value
-  def target          = settings.target.value
-  def unchecked       = settings.unchecked.value
-  def verbose         = settings.verbose.value
-  def virtPatmat      = !settings.XoldPatmat.value
-
-  /** Derived values */
-  def jvm           = target startsWith "jvm"
-  def msil          = target == "msil"
-  def verboseDebug  = debug && verbose
-}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 5c852ae..8c2b510 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -8,7 +8,7 @@ package nsc
 package settings
 
 import util.ClassPath
-import io.{ Directory, Path, AbstractFile }
+import io.{ Path, AbstractFile }
 
 class FscSettings(error: String => Unit) extends Settings(error) {
   outer =>
@@ -38,13 +38,13 @@ class FscSettings(error: String => Unit) extends Settings(error) {
   private def holdsPath = Set[Settings#Setting](
     d, dependencyfile, pluginsDir, Ygenjavap
   )
-  
+
   override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
     val (r, args) = super.processArguments(arguments, processAll)
     // we need to ensure the files specified with relative locations are absolutized based on the currentDir
     (r, args map {a => absolutizePath(a)})
   }
-  
+
   /**
    * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
    * If it's already absolute then it's left alone.
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index e4f9947..3590254 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -9,9 +9,9 @@ package nsc
 package settings
 
 import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
-import scala.reflect.internal.util.StringOps
-import scala.collection.mutable.ListBuffer
+import scala.collection.generic.Clearable
 import scala.io.Source
+import scala.reflect.internal.util.StringOps
 import scala.reflect.{ ClassTag, classTag }
 
 /** A mutable Settings object.
@@ -63,40 +63,33 @@ class MutableSettings(val errorFn: String => Unit)
         (checkDependencies, residualArgs)
       case "--" :: xs =>
         (checkDependencies, xs)
+      // discard empties, sometimes they appear because of ant or etc.
+      // but discard carefully, because an empty string is valid as an argument
+      // to an option, e.g. -cp "" .  So we discard them only when they appear
+      // where an option should be, not where an argument to an option should be.
+      case "" :: xs =>
+        loop(xs, residualArgs)
       case x :: xs  =>
-        val isOpt = x startsWith "-"
-        if (isOpt) {
-          val newArgs = parseParams(args)
-          if (args eq newArgs) {
-            errorFn(s"bad option: '$x'")
-            (false, args)
-          }
-          // discard empties, sometimes they appear because of ant or etc.
-          // but discard carefully, because an empty string is valid as an argument
-          // to an option, e.g. -cp "" .  So we discard them only when they appear
-          // in option position.
-          else if (x == "") {
-            loop(xs, residualArgs)
+        if (x startsWith "-") {
+          parseParams(args) match {
+            case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args)
+            case newArgs                    => loop(newArgs, residualArgs)
           }
-          else lookupSetting(x) match {
-            case Some(s) if s.shouldStopProcessing  => (checkDependencies, newArgs)
-            case _                                  => loop(newArgs, residualArgs)
-          }
-        }
-        else {
-          if (processAll) loop(xs, residualArgs :+ x)
-          else (checkDependencies, args)
         }
+        else if (processAll)
+          loop(xs, residualArgs :+ x)
+        else
+          (checkDependencies, args)
     }
     loop(arguments, Nil)
   }
-  def processArgumentString(params: String) = processArguments(splitParams(params), true)
+  def processArgumentString(params: String) = processArguments(splitParams(params), processAll = true)
 
   /** Create a new Settings object, copying all user-set values.
    */
   def copy(): Settings = {
     val s = new Settings()
-    s.processArguments(recreateArgs, true)
+    s.processArguments(recreateArgs, processAll = true)
     s
   }
 
@@ -115,7 +108,7 @@ class MutableSettings(val errorFn: String => Unit)
 
   /** Split the given line into parameters.
    */
-  def splitParams(line: String) = cmd.Parser.tokenize(line, errorFn)
+  def splitParams(line: String) = cmd.CommandLineParser.tokenize(line, errorFn)
 
   /** Returns any unprocessed arguments.
    */
@@ -134,7 +127,7 @@ class MutableSettings(val errorFn: String => Unit)
 
     // if arg is of form -Xfoo:bar,baz,quux
     def parseColonArg(s: String): Option[List[String]] = {
-      val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+      val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
 
       // any non-Nil return value means failure and we return s unmodified
       tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
@@ -184,7 +177,7 @@ class MutableSettings(val errorFn: String => Unit)
   * The class loader defining `T` should provide resources `app.class.path`
   * and `boot.class.path`.  These resources should contain the application
   * and boot classpaths in the same form as would be passed on the command line.*/
-  def embeddedDefaults[T: ClassTag]: Unit =
+  def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl
     embeddedDefaults(classTag[T].runtimeClass.getClassLoader)
 
   /** Initializes these settings for embedded use by a class from the given class loader.
@@ -248,7 +241,7 @@ class MutableSettings(val errorFn: String => Unit)
     /** Add a destination directory for sources found under srcdir.
      *  Both directories should exits.
      */
-    def add(srcDir: String, outDir: String): Unit =
+    def add(srcDir: String, outDir: String): Unit = // used in ide?
       add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
           checkDir(AbstractFile.getDirectory(outDir), outDir))
 
@@ -256,8 +249,7 @@ class MutableSettings(val errorFn: String => Unit)
     private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
       if (dir != null && dir.isDirectory)
         dir
-// was:      else if (allowJar && dir == null && Path.isJarOrZip(name, false))
-      else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
+      else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
         new PlainFile(Path(name))
       else
         throw new FatalError(name + " does not exist or is not a directory")
@@ -268,7 +260,7 @@ class MutableSettings(val errorFn: String => Unit)
      */
     def setSingleOutput(outDir: String) {
       val dst = AbstractFile.getDirectory(outDir)
-      setSingleOutput(checkDir(dst, outDir, true))
+      setSingleOutput(checkDir(dst, outDir, allowJar = true))
     }
 
     def getSingleOutput: Option[AbstractFile] = singleOutDir
@@ -331,12 +323,12 @@ class MutableSettings(val errorFn: String => Unit)
         case Some(d) =>
           d match {
               case _: VirtualDirectory | _: io.ZipArchive => Nil
-              case _                   => List(d.lookupPathUnchecked(srcPath, false))
+              case _                   => List(d.lookupPathUnchecked(srcPath, directory = false))
           }
         case None =>
           (outputs filter (isBelow _).tupled) match {
             case Nil => Nil
-            case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+            case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false))
           }
       }
     }
@@ -390,7 +382,7 @@ class MutableSettings(val errorFn: String => Unit)
     def max = range map (_._2) getOrElse IntMax
 
     override def value_=(s: Int) =
-      if (isInputValid(s)) super.value_=(s) else errorMsg
+      if (isInputValid(s)) super.value_=(s) else errorMsg()
 
     // Validate that min and max are consistent
     assert(min <= max)
@@ -422,7 +414,7 @@ class MutableSettings(val errorFn: String => Unit)
       if (args.isEmpty) errorAndValue("missing argument", None)
       else parseArgument(args.head) match {
         case Some(i)  => value = i ; Some(args.tail)
-        case None     => errorMsg ; None
+        case None     => errorMsg() ; None
       }
 
     def unparse: List[String] =
@@ -443,9 +435,20 @@ class MutableSettings(val errorFn: String => Unit)
 
     def tryToSet(args: List[String]) = { value = true ; Some(args) }
     def unparse: List[String] = if (value) List(name) else Nil
-    override def tryToSetFromPropertyValue(s : String) {
+    override def tryToSetFromPropertyValue(s : String) { // used from ide
       value = s.equalsIgnoreCase("true")
     }
+    override def tryToSetColon(args: List[String]) = args match {
+      case Nil => tryToSet(Nil)
+      case List(x) =>
+        if (x.equalsIgnoreCase("true")) {
+          value = true
+          Some(Nil)
+        } else if (x.equalsIgnoreCase("false")) {
+          value = false
+          Some(Nil)
+        } else errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
+    }
   }
 
   /** A special setting for accumulating arguments like -Dfoo=bar. */
@@ -494,8 +497,6 @@ class MutableSettings(val errorFn: String => Unit)
     descr: String,
     default: ScalaVersion)
   extends Setting(name, descr) {
-    import ScalaVersion._
-    
     type T = ScalaVersion
     protected var v: T = NoScalaVersion
 
@@ -503,14 +504,14 @@ class MutableSettings(val errorFn: String => Unit)
       value = default
       Some(args)
     }
-    
+
     override def tryToSetColon(args: List[String]) = args match {
       case Nil      => value = default; Some(Nil)
       case x :: xs  => value = ScalaVersion(x, errorFn) ; Some(xs)
     }
-    
+
     override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-    
+
     def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
 
     withHelpSyntax(s"${name}:<${arg}>")
@@ -553,7 +554,7 @@ class MutableSettings(val errorFn: String => Unit)
     name: String,
     val arg: String,
     descr: String)
-  extends Setting(name, descr) {
+  extends Setting(name, descr) with Clearable {
     type T = List[String]
     protected var v: T = Nil
     def appendToValue(str: String) { value ++= List(str) }
@@ -565,7 +566,8 @@ class MutableSettings(val errorFn: String => Unit)
       Some(rest)
     }
     override def tryToSetColon(args: List[String]) = tryToSet(args)
-    override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
+    override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
+    def clear(): Unit = (v = Nil)
     def unparse: List[String] = value map (name + ":" + _)
 
     withHelpSyntax(name + ":<" + arg + ">")
@@ -599,7 +601,7 @@ class MutableSettings(val errorFn: String => Unit)
     }
     def unparse: List[String] =
       if (value == default) Nil else List(name + ":" + value)
-    override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil)
+    override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide
 
     withHelpSyntax(name + ":<" + helpArg + ">")
   }
@@ -619,44 +621,49 @@ class MutableSettings(val errorFn: String => Unit)
     name: String,
     descr: String,
     default: String
-  ) extends Setting(name, mkPhasesHelp(descr, default)) {
+  ) extends Setting(name, mkPhasesHelp(descr, default)) with Clearable {
     private[nsc] def this(name: String, descr: String) = this(name, descr, "")
 
     type T = List[String]
-    protected var v: T = Nil
-    override def value = if (v contains "all") List("all") else super.value
-    private lazy val (numericValues, stringValues) =
-      value filterNot (_ == "" ) partition (_ forall (ch => ch.isDigit || ch == '-'))
-
-    /** A little ad-hoc parsing.  If a string is not the name of a phase, it can also be:
-     *    a phase id: 5
-     *    a phase id range: 5-10 (inclusive of both ends)
-     *    a range with no start: -5 means up to and including 5
-     *    a range with no end: 10- means 10 until completion.
-     */
-    private def stringToPhaseIdTest(s: String): Int => Boolean = (s indexOf '-') match {
-      case -1  => (_ == s.toInt)
-      case 0   => (_ <= s.tail.toInt)
-      case idx =>
-        if (s.last == '-') (_ >= s.init.toInt)
-        else (s splitAt idx) match {
-          case (s1, s2) => (id => id >= s1.toInt && id <= s2.tail.toInt)
-        }
-    }
-    private lazy val phaseIdTest: Int => Boolean =
-      (numericValues map stringToPhaseIdTest) match {
-        case Nil    => _ => false
-        case fns    => fns.reduceLeft((f1, f2) => id => f1(id) || f2(id))
+    private[this] var _v: T = Nil
+    private[this] var _numbs: List[(Int,Int)] = Nil
+    private[this] var _names: T = Nil
+    //protected var v: T = Nil
+    protected def v: T = _v
+    protected def v_=(t: T): Unit = {
+      // throws NumberFormat on bad range (like -5-6)
+      def asRange(s: String): (Int,Int) = (s indexOf '-') match {
+        case -1 => (s.toInt, s.toInt)
+        case 0  => (-1, s.tail.toInt)
+        case i if s.last == '-' => (s.init.toInt, Int.MaxValue)
+        case i  => (s.take(i).toInt, s.drop(i+1).toInt)
       }
+      val numsAndStrs = t filter (_.nonEmpty) partition (_ forall (ch => ch.isDigit || ch == '-'))
+      _numbs = numsAndStrs._1 map asRange
+      _names = numsAndStrs._2
+      _v     = t
+    }
+    override def value = if (v contains "all") List("all") else super.value // i.e., v
+    private def numericValues = _numbs
+    private def stringValues  = _names
+    private def phaseIdTest(i: Int): Boolean = numericValues exists (_ match {
+      case (min, max) => min <= i && i <= max
+    })
 
     def tryToSet(args: List[String]) =
       if (default == "") errorAndValue("missing phase", None)
-      else { tryToSetColon(List(default)) ; Some(args) }
+      else tryToSetColon(List(default)) map (_ => args)
+
+    override def tryToSetColon(args: List[String]) = try {
+      args match {
+        case Nil  => if (default == "") errorAndValue("missing phase", None)
+                     else tryToSetColon(List(default))
+        case xs   => value = (value ++ xs).distinct.sorted ; Some(Nil)
+      }
+    } catch { case _: NumberFormatException => None }
+
+    def clear(): Unit = (v = Nil)
 
-    override def tryToSetColon(args: List[String]) = args match {
-      case Nil  => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(List(default))
-      case xs   => value = (value ++ xs).distinct.sorted ; Some(Nil)
-    }
     // we slightly abuse the usual meaning of "contains" here by returning
     // true if our phase list contains "all", regardless of the incoming argument
     def contains(phName: String)     = doAllPhases || containsName(phName)
@@ -672,4 +679,14 @@ class MutableSettings(val errorFn: String => Unit)
       else name + "[:phases]"
     )
   }
+
+  /** Internal use - syntax enhancements. */
+  protected class EnableSettings[T <: BooleanSetting](val s: T) {
+    def enablingIfNotSetByUser(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (sett => if (!sett.isSetByUser) sett.value = s.value))
+    def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
+    def disabling(toDisable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toDisable foreach (_.value = !s.value))
+    def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
+  }
+  import scala.language.implicitConversions
+  protected implicit def installEnableSettings[T <: BooleanSetting](s: T): EnableSettings[T] = new EnableSettings(s)
 }
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index dbfaa2c..a643a08 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -4,7 +4,8 @@
  */
 // $Id$
 
-package scala.tools
+package scala
+package tools
 package nsc
 package settings
 
@@ -19,7 +20,7 @@ trait ScalaSettings extends AbsScalaSettings
   self: MutableSettings =>
 
   /** Set of settings */
-  protected lazy val allSettings = mutable.HashSet[Setting]()
+  protected[scala] lazy val allSettings = mutable.HashSet[Setting]()
 
   /** Against my better judgment, giving in to martin here and allowing
    *  CLASSPATH to be used automatically.  So for the user-specified part
@@ -38,84 +39,85 @@ trait ScalaSettings extends AbsScalaSettings
   protected def futureSettings = List[BooleanSetting]()
 
   /** Enabled under -optimise. */
-  protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
+  def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
 
-  /** Internal use - syntax enhancements. */
-  private class EnableSettings[T <: BooleanSetting](val s: T) {
-    def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
-    def andThen(f: s.T => Unit): s.type                  = s withPostSetHook (setting => f(setting.value))
-  }
-  private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
+  /** If any of these settings is enabled, the compiler should print a message and exit.  */
+  def infoSettings = List[Setting](help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph)
+
+  /** Is an info setting set? */
+  def isInfo = infoSettings exists (_.isSetByUser)
 
   /** Disable a setting */
   def disable(s: Setting) = allSettings -= s
 
   val jvmargs  = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
   val defines  = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
-  val toolcp   = PathSetting("-toolcp", "Add to the runner classpath.", "")
+  /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "")
   val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
 
   /**
    *  Standard settings
    */
   // argfiles is only for the help message
-  val argfiles      = BooleanSetting    ("@<file>", "A text file containing compiler arguments (options and source files)")
+  /*val argfiles = */ BooleanSetting    ("@<file>", "A text file containing compiler arguments (options and source files)")
   val classpath     = PathSetting       ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
   val d             = OutputSetting     (outputDirs, ".")
   val nospecialization = BooleanSetting    ("-no-specialization", "Ignore @specialize annotations.")
   val language      = MultiStringSetting("-language", "feature", "Enable one or more language features.")
 
+  /*
+   * The previous "-source" option is intended to be used mainly
+   * though this helper.
+   */
+  lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0"))
+
   /**
    * -X "Advanced" settings
    */
-  val Xhelp         = BooleanSetting    ("-X", "Print a synopsis of advanced options.")
-  val assemname     = StringSetting     ("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
-  val assemrefs     = StringSetting     ("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
-  val assemextdirs  = StringSetting     ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies.  default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
-  val sourcedir     = StringSetting     ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
-  val checkInit     = BooleanSetting    ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
-  val noassertions  = BooleanSetting    ("-Xdisable-assertions", "Generate no assertions or assumptions.")
-  val elidebelow    = IntSetting        ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
+  val Xhelp              = BooleanSetting      ("-X", "Print a synopsis of advanced options.")
+  val checkInit          = BooleanSetting      ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
+  val developer          = BooleanSetting      ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss")
+  val noassertions       = BooleanSetting      ("-Xdisable-assertions", "Generate no assertions or assumptions.")
+  val elidebelow         = IntSetting          ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
                                                 elidable.MINIMUM, None, elidable.byName get _)
-  val noForwarders  = BooleanSetting    ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
-  val genPhaseGraph = StringSetting     ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
-  val XlogImplicits = BooleanSetting    ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
-  val logImplicitConv = BooleanSetting  ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
-  val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
-  val logFreeTerms  = BooleanSetting    ("-Xlog-free-terms", "Print a message when reification creates a free term.")
-  val logFreeTypes  = BooleanSetting    ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
-  val maxClassfileName = IntSetting     ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
-  val Xmigration    = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
-  val nouescape     = BooleanSetting    ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
-  val Xnojline      = BooleanSetting    ("-Xnojline", "Do not use JLine for editing.")
-  val Xverify       = BooleanSetting    ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
-  val plugin        = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
-  val disable       = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
-  val showPlugins   = BooleanSetting    ("-Xplugin-list", "Print a synopsis of loaded plugins.")
-  val require       = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
-  val pluginsDir    = StringSetting     ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
-  val Xprint        = PhasesSetting     ("-Xprint", "Print out program after")
-  val writeICode    = PhasesSetting     ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
-  val Xprintpos     = BooleanSetting    ("-Xprint-pos", "Print tree positions, as offsets.")
-  val printtypes    = BooleanSetting    ("-Xprint-types", "Print tree types (debugging option).")
-  val prompt        = BooleanSetting    ("-Xprompt", "Display a prompt after each error (debugging option).")
-  val resident      = BooleanSetting    ("-Xresident", "Compiler stays resident: read source filenames from standard input.")
-  val script        = StringSetting     ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
-  val mainClass     = StringSetting     ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
-  val Xshowcls      = StringSetting     ("-Xshow-class", "class", "Show internal representation of class.", "")
-  val Xshowobj      = StringSetting     ("-Xshow-object", "object", "Show internal representation of object.", "")
-  val showPhases    = BooleanSetting    ("-Xshow-phases", "Print a synopsis of compiler phases.")
-  val sourceReader  = StringSetting     ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
-
-  val XoldPatmat    = BooleanSetting    ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
+  val noForwarders       = BooleanSetting      ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
+  val genPhaseGraph      = StringSetting       ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
+  val XlogImplicits      = BooleanSetting      ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
+  val logImplicitConv    = BooleanSetting      ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+  val logReflectiveCalls = BooleanSetting      ("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
+  val logFreeTerms       = BooleanSetting      ("-Xlog-free-terms", "Print a message when reification creates a free term.")
+  val logFreeTypes       = BooleanSetting      ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
+  val maxClassfileName   = IntSetting          ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
+  val Xmigration         = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
+  val nouescape          = BooleanSetting      ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
+  val Xnojline           = BooleanSetting      ("-Xnojline", "Do not use JLine for editing.")
+  val Xverify            = BooleanSetting      ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
+  val plugin             = MultiStringSetting  ("-Xplugin", "paths", "Load a plugin from each classpath.")
+  val disable            = MultiStringSetting  ("-Xplugin-disable", "plugin", "Disable plugins by name.")
+  val showPlugins        = BooleanSetting      ("-Xplugin-list", "Print a synopsis of loaded plugins.")
+  val require            = MultiStringSetting  ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.")
+  val pluginsDir         = StringSetting       ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath)
+  val Xprint             = PhasesSetting       ("-Xprint", "Print out program after")
+  val writeICode         = PhasesSetting       ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
+  val Xprintpos          = BooleanSetting      ("-Xprint-pos", "Print tree positions, as offsets.")
+  val printtypes         = BooleanSetting      ("-Xprint-types", "Print tree types (debugging option).")
+  val prompt             = BooleanSetting      ("-Xprompt", "Display a prompt after each error (debugging option).")
+  val resident           = BooleanSetting      ("-Xresident", "Compiler stays resident: read source filenames from standard input.")
+  val script             = StringSetting       ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
+  val mainClass          = StringSetting       ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
+  val Xshowcls           = StringSetting       ("-Xshow-class", "class", "Show internal representation of class.", "")
+  val Xshowobj           = StringSetting       ("-Xshow-object", "object", "Show internal representation of object.", "")
+  val showPhases         = BooleanSetting      ("-Xshow-phases", "Print a synopsis of compiler phases.")
+  val sourceReader       = StringSetting       ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
+  val strictInference    = BooleanSetting      ("-Xstrict-inference", "Don't infer known-unsound types")
+  val source             = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211)
+
   val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
-  val XfullLubs     = BooleanSetting    ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
-  val Xdivergence211 = BooleanSetting   ("-Xdivergence211", "Turn on the 2.11 behavior of implicit divergence not terminating recursive implicit searches (SI-7291).")
+  val XfullLubs         = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
 
   /** Compatibility stubs for options whose value name did
    *  not previously match the option name.
    */
-  def XO = optimise
   def debuginfo = g
   def dependenciesFile = dependencyfile
   def nowarnings = nowarn
@@ -128,10 +130,12 @@ trait ScalaSettings extends AbsScalaSettings
   val overrideObjects = BooleanSetting    ("-Yoverride-objects", "Allow member objects to be overridden.")
   val overrideVars    = BooleanSetting    ("-Yoverride-vars", "Allow vars to be overridden.")
   val Yhelp           = BooleanSetting    ("-Y", "Print a synopsis of private options.")
+  val breakCycles     = BooleanSetting    ("-Ybreak-cycles", "Attempt to break cycles encountered during typing")
   val browse          = PhasesSetting     ("-Ybrowse", "Browse the abstract syntax tree after")
   val check           = PhasesSetting     ("-Ycheck", "Check the tree at the end of")
   val Yshow           = PhasesSetting     ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
   val Xcloselim       = BooleanSetting    ("-Yclosure-elim", "Perform closure elimination.")
+  val YconstOptimization  = BooleanSetting    ("-Yconst-opt", "Perform optimization with constant values.")
   val Ycompacttrees   = BooleanSetting    ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
   val noCompletion    = BooleanSetting    ("-Yno-completion", "Disable tab-completion in the REPL.")
   val Xdce            = BooleanSetting    ("-Ydead-code", "Perform dead code elimination.")
@@ -149,7 +153,6 @@ trait ScalaSettings extends AbsScalaSettings
   val nopredef        = BooleanSetting    ("-Yno-predef", "Compile without importing Predef.")
   val noAdaptedArgs   = BooleanSetting    ("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
   val Yrecursion      = IntSetting        ("-Yrecursion", "Set recursion depth used when locking symbols.", 0, Some((0, Int.MaxValue)), (_: String) => None)
-  val selfInAnnots    = BooleanSetting    ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
   val Xshowtrees      = BooleanSetting    ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
   val XshowtreesCompact
                       = BooleanSetting    ("-Yshow-trees-compact", "(Requires -Xprint:) Print detailed ASTs in compact form.")
@@ -157,54 +160,65 @@ trait ScalaSettings extends AbsScalaSettings
                       = BooleanSetting    ("-Yshow-trees-stringified", "(Requires -Xprint:) Print stringifications along with detailed ASTs.")
   val Yshowsyms       = BooleanSetting    ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
   val Yshowsymkinds   = BooleanSetting    ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
+  val Yshowsymowners  = BooleanSetting    ("-Yshow-symowners", "Print owner identifiers next to symbol names.")
   val skip            = PhasesSetting     ("-Yskip", "Skip")
   val Ygenjavap       = StringSetting     ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+  val Ygenasmp        = StringSetting     ("-Ygen-asmp",  "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
   val Ydumpclasses    = StringSetting     ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
-  val Ynosqueeze      = BooleanSetting    ("-Yno-squeeze", "Disable creation of compact code in matching.")
   val Ystatistics     = BooleanSetting    ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
   val stopAfter       = PhasesSetting     ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
   val stopBefore      = PhasesSetting     ("-Ystop-before", "Stop before")
-  val refinementMethodDispatch
-                      = ChoiceSetting     ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
   val Yrangepos       = BooleanSetting    ("-Yrangepos", "Use range positions for syntax trees.")
-  val Ybuilderdebug   = ChoiceSetting     ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
+  val Ymemberpos      = StringSetting     ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
   val Yreifycopypaste = BooleanSetting    ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
-  val Ymacronoexpand  = BooleanSetting    ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
+  val Ymacroexpand    = ChoiceSetting     ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal)
+  val Ymacronoexpand  = BooleanSetting    ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None)
   val Yreplsync       = BooleanSetting    ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
-  val Ynotnull        = BooleanSetting    ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
+  val Yreplclassbased = BooleanSetting    ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
+  val Yreploutdir     = StringSetting     ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
   val YmethodInfer    = BooleanSetting    ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
-  val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T].  This is a temporary option to ease transition.")
+  val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T].  This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
+  val inferByName     = BooleanSetting    ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
   val Yinvalidate     = StringSetting     ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
-  val noSelfCheck     = BooleanSetting    ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
   val YvirtClasses    = false // too embryonic to even expose as a -Y //BooleanSetting    ("-Yvirtual-classes", "Support virtual classes")
+  val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
+  val YnoLoadImplClass = BooleanSetting   ("-Yno-load-impl-class", "Do not load $class.class files.")
 
   val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
+  // the current standard is "inline" but we are moving towards "method"
+  val Ydelambdafy        = ChoiceSetting     ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
 
-  def stop = stopAfter
+  private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
 
   /** Area-specific debug output.
    */
-  val Ybuildmanagerdebug      = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
-  val Ycompletion             = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
   val Ydocdebug               = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
   val Yidedebug               = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
-  val Yinferdebug             = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
   val Yissuedebug             = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
   val YmacrodebugLite         = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
   val YmacrodebugVerbose      = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
-  val Ypmatdebug              = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
   val Yposdebug               = BooleanSetting("-Ypos-debug", "Trace position validation.")
   val Yreifydebug             = BooleanSetting("-Yreify-debug", "Trace reification.")
-  val Yrepldebug              = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
   val Ytyperdebug             = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
   val Ypatmatdebug            = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+  val Yquasiquotedebug        = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
+
+  // TODO 2.12 Remove
+  val Yinferdebug             = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") withDeprecationMessage("Use -Ytyper-debug") enabling(List(Ytyperdebug))
 
   /** Groups of Settings.
    */
-  val future        = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings
-  val optimise      = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
-  val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
+  val future        = BooleanSetting("-Xfuture", "Turn on future language features.") enablingIfNotSetByUser futureSettings
+  val optimise      = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enablingIfNotSetByUser optimiseSettings
+  val nooptimise    = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
+  val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enablingIfNotSetByUser experimentalSettings
 
+  /**
+   * Settings motivated by GenBCode
+   */
+  val Ybackend = ChoiceSetting ("-Ybackend", "choice of bytecode emitter", "Choice of bytecode emitter.",
+                                List("GenASM", "GenBCode"),
+                                "GenASM")
   // Feature extensions
   val XmacroSettings          = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
 
@@ -227,4 +241,17 @@ trait ScalaSettings extends AbsScalaSettings
 
   /** Test whether this is scaladoc we're looking at */
   def isScaladoc = false
+
+  /**
+   * Helper utilities for use by checkConflictingSettings()
+   */
+  def isBCodeActive   = !isICodeAskedFor
+  def isBCodeAskedFor = (Ybackend.value != "GenASM")
+  def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser)
+
+  object MacroExpand {
+    val None = "none"
+    val Normal = "normal"
+    val Discard = "discard"
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index d6a0149..4f45043 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -4,13 +4,14 @@
  */
 // $Id$
 
-package scala.tools.nsc.settings
+package scala
+package tools.nsc.settings
 
 /**
  * Represents a single Scala version in a manner that
  * supports easy comparison and sorting.
  */
-abstract class ScalaVersion extends Ordered[ScalaVersion] {
+sealed abstract class ScalaVersion extends Ordered[ScalaVersion] {
   def unparse: String
 }
 
@@ -19,7 +20,7 @@ abstract class ScalaVersion extends Ordered[ScalaVersion] {
  */
 case object NoScalaVersion extends ScalaVersion {
   def unparse = "none"
-      
+
   def compare(that: ScalaVersion): Int = that match {
     case NoScalaVersion => 0
     case _ => 1
@@ -33,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
  * to segregate builds
  */
 case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
-  def unparse = s"${major}.${minor}.${rev}.${build.unparse}"  
+  def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
 
   def compare(that: ScalaVersion): Int =  that match {
     case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
@@ -48,7 +49,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
       else build compare thatBuild
     case AnyScalaVersion => 1
     case NoScalaVersion => -1
-  }  
+  }
 }
 
 /**
@@ -56,7 +57,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
  */
 case object AnyScalaVersion extends ScalaVersion {
   def unparse = "any"
-      
+
   def compare(that: ScalaVersion): Int = that match {
     case AnyScalaVersion => 0
     case _ => -1
@@ -70,7 +71,7 @@ object ScalaVersion {
   private val dot = "\\."
   private val dash = "\\-"
   private def not(s:String) = s"[^${s}]"
-  private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r  
+  private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
 
   def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
     def errorAndValue() = {
@@ -82,41 +83,41 @@ object ScalaVersion {
         )
         AnyScalaVersion
     }
-    
+
     def toInt(s: String) = s match {
       case null | "" => 0
       case _ => s.toInt
     }
-    
+
     def isInt(s: String) = util.Try(toInt(s)).isSuccess
-    
+
     def toBuild(s: String) = s match {
       case null | "FINAL" => Final
       case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
       case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
       case _ => Development(s)
     }
-    
+
     try versionString match {
       case "none" => NoScalaVersion
       case "any" => AnyScalaVersion
-      case R(_, majorS, _, minorS, _, revS, _, buildS) => 
+      case R(_, majorS, _, minorS, _, revS, _, buildS) =>
         SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
-      case _ => 
+      case _ =>
         errorAndValue()
     } catch {
       case e: NumberFormatException => errorAndValue()
     }
   }
-  
-  def apply(versionString: String): ScalaVersion = 
+
+  def apply(versionString: String): ScalaVersion =
       apply(versionString, msg => throw new NumberFormatException(msg))
-  
+
   /**
    * The version of the compiler running now
    */
   val current = apply(util.Properties.versionNumberString)
-  
+
   /**
    * The 2.8.0 version.
    */
@@ -126,7 +127,7 @@ object ScalaVersion {
 /**
  * Represents the data after the dash in major.minor.rev-build
  */
-abstract class ScalaBuild extends Ordered[ScalaBuild] {  
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
   /**
    * Return a version of this build information that can be parsed back into the
    * same ScalaBuild
@@ -138,7 +139,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] {
  */
 case class Development(id: String) extends ScalaBuild {
   def unparse = s"-${id}"
-      
+
   def compare(that: ScalaBuild) = that match {
     // sorting two development builds based on id is reasonably valid for two versions created with the same schema
     // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
@@ -154,7 +155,7 @@ case class Development(id: String) extends ScalaBuild {
  */
 case object Final extends ScalaBuild {
   def unparse = ""
-      
+
   def compare(that: ScalaBuild) = that match {
     case Final => 0
     // a final is newer than anything other than a development build or another final
@@ -168,14 +169,14 @@ case object Final extends ScalaBuild {
  */
 case class RC(n: Int) extends ScalaBuild {
   def unparse = s"-RC${n}"
-      
+
   def compare(that: ScalaBuild) = that match {
     // compare two rcs based on their RC numbers
     case RC(thatN) => n - thatN
     // an rc is older than anything other than a milestone or another rc
     case Milestone(_) => 1
-    case _ => -1    
-  }  
+    case _ => -1
+  }
 }
 
 /**
@@ -183,12 +184,12 @@ case class RC(n: Int) extends ScalaBuild {
  */
 case class Milestone(n: Int) extends ScalaBuild {
   def unparse = s"-M${n}"
-      
+
   def compare(that: ScalaBuild) = that match {
     // compare two milestones based on their milestone numbers
     case Milestone(thatN) => n - thatN
     // a milestone is older than anything other than another milestone
     case _ => -1
-    
-  }  
+
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index e866ad6..37dfafb 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -35,22 +35,15 @@ trait StandardScalaSettings {
   val feature =        BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
   val g =               ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
   val help =           BooleanSetting ("-help", "Print a synopsis of standard options")
-  val make =            ChoiceSetting ("-make", "policy", "Recompilation detection policy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all")
-                        . withDeprecationMessage ("this option is unmaintained.  Use sbt or an IDE for selective recompilation.")
   val nowarn =         BooleanSetting ("-nowarn", "Generate no warnings.")
   val optimise:        BooleanSetting // depends on post hook which mutates other settings
   val print =          BooleanSetting ("-print", "Print program with Scala-specific features removed.")
   val target =          ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
-                                       List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
-                                       "jvm-1.6")
+                          List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6")
   val unchecked =      BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
   val uniqid =         BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
   val usejavacp =      BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+  val usemanifestcp =  BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.")
   val verbose =        BooleanSetting ("-verbose", "Output messages about what the compiler is doing.")
   val version =        BooleanSetting ("-version", "Print product version and exit.")
-
-  /** These are @<file> and -Dkey=val style settings, which don't
-   *  nicely map to identifiers.
-   */
-  val argfiles: BooleanSetting  // exists only to echo help message, should be done differently
 }
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 9f98792..1509ad1 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -7,6 +7,8 @@ package scala.tools
 package nsc
 package settings
 
+import language.existentials
+
 /** Settings influencing the printing of warnings.
  */
 trait Warnings {
@@ -19,43 +21,40 @@ trait Warnings {
   // present form, but have the potential to offer useful info.
   protected def allWarnings = lintWarnings ++ List(
     warnDeadCode,
-    warnSelectNullable,
     warnValueDiscard,
     warnNumericWiden
   )
   // These warnings should be pretty quiet unless you're doing
   // something inadvisable.
   protected def lintWarnings = List(
-    // warnDeadCode,
     warnInaccessible,
     warnNullaryOverride,
     warnNullaryUnit,
-    warnAdaptedArgs
+    warnAdaptedArgs,
+    warnInferAny
+    // warnUnused       SI-7712, SI-7707 warnUnused not quite ready for prime-time
+    // warnUnusedImport currently considered too noisy for general use
   )
 
-  // Warning groups.
-  val lint = (
-    BooleanSetting("-Xlint", "Enable recommended additional warnings.")
-    withPostSetHook (_ => lintWarnings foreach (_.value = true))
-  )
-  val warnEverything = (
-    BooleanSetting("-Ywarn-all", "Enable all -Y warnings.")
-    withPostSetHook (_ => lintWarnings foreach (_.value = true))
-  )
+  private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.")
 
   // Individual warnings.
-  val warnSelectNullable   = BooleanSetting   ("-Xcheck-null", "Warn upon selection of nullable reference.")
   val warnAdaptedArgs      = BooleanSetting   ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.")
   val warnDeadCode         = BooleanSetting   ("-Ywarn-dead-code", "Warn when dead code is identified.")
   val warnValueDiscard     = BooleanSetting   ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
   val warnNumericWiden     = BooleanSetting   ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
   val warnNullaryUnit      = BooleanSetting   ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.")
   val warnInaccessible     = BooleanSetting   ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
-  val warnNullaryOverride  = BooleanSetting   ("-Ywarn-nullary-override",
-    "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+  val warnNullaryOverride  = BooleanSetting   ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+  val warnInferAny         = BooleanSetting   ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.")
+  val warnUnused           = BooleanSetting   ("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused")
+  val warnUnusedImport     = BooleanSetting   ("-Ywarn-unused-import", "Warn when imports are unused")
+
+   // Warning groups.
+  val lint = BooleanSetting("-Xlint", "Enable recommended additional warnings.") enablingIfNotSetByUser lintWarnings
 
   // Backward compatibility.
-  def Xwarnfatal    = fatalWarnings
-  def Xchecknull    = warnSelectNullable
-  def Ywarndeadcode = warnDeadCode
+  @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal            = fatalWarnings      // used by sbt
+  @deprecated("This option is being removed", "2.11.0") def Xchecknull = warnSelectNullable // used by ide
+  @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode          = warnDeadCode       // used by ide
 }
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index c7bd678..c2d0f5c 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -6,16 +6,16 @@
 package scala.tools.nsc
 package symtab
 
-import scala.reflect.internal.util.BatchSourceFile
 import scala.tools.nsc.io.AbstractFile
 
 /** A subclass of SymbolLoaders that implements browsing behavior.
  *  This class should be used whenever file dependencies and recompile sets
  *  are managed automatically.
  */
-abstract class BrowsingLoaders extends SymbolLoaders {
-  import global._
+abstract class BrowsingLoaders extends GlobalSymbolLoaders {
+  val global: Global
 
+  import global._
   import syntaxAnalyzer.{OutlineParser, MalformedInput}
 
   /** In browse mode, it can happen that an encountered symbol is already
@@ -28,7 +28,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
   override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
     completer.sourcefile match {
       case Some(src) =>
-        (if (member.isModule) member.moduleClass else member).sourceFile = src
+        (if (member.isModule) member.moduleClass else member).associatedFile = src
       case _ =>
     }
     val decls = owner.info.decls
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 9e5186b..8b73995 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,9 @@ import java.io.IOException
 import scala.compat.Platform.currentTime
 import scala.tools.nsc.util.{ ClassPath }
 import classfile.ClassfileParser
-import scala.reflect.internal.Flags._
 import scala.reflect.internal.MissingRequirementError
 import scala.reflect.internal.util.Statistics
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
 
 /** This class ...
  *
@@ -21,8 +20,23 @@ import scala.tools.nsc.io.{ AbstractFile, MsilFile }
  *  @version 1.0
  */
 abstract class SymbolLoaders {
-  val global: Global
-  import global._
+  val symbolTable: symtab.SymbolTable {
+    def settings: Settings
+  }
+  val platform: backend.Platform {
+    val symbolTable: SymbolLoaders.this.symbolTable.type
+  }
+  import symbolTable._
+  /**
+   * Required by ClassfileParser. Check documentation in that class for details.
+   */
+  def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+  /**
+   * Should forward to `Run.compileLate`. The more principled fix would be to
+   * determine why this functionality is needed and extract it into a separate
+   * interface.
+   */
+  protected def compileLate(srcfile: AbstractFile): Unit
   import SymbolLoadersStats._
 
   protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
@@ -31,6 +45,14 @@ abstract class SymbolLoaders {
     member
   }
 
+  protected def signalError(root: Symbol, ex: Throwable) {
+    if (settings.debug) ex.printStackTrace()
+    globalError(ex.getMessage() match {
+      case null => "i/o error while loading " + root.name
+      case msg  => "error while loading " + root.name + ", " + msg
+    })
+  }
+
   /** Enter class with given `name` into scope of `root`
    *  and give them `completer` as type.
    */
@@ -68,14 +90,14 @@ abstract class SymbolLoaders {
           name+"\none of them needs to be removed from classpath"
         )
       else if (settings.termConflict.value == "package") {
-        global.warning(
+        warning(
           "Resolving package/object name conflict in favor of package " +
           preExisting.fullName + ".  The object will be inaccessible."
         )
         root.info.decls.unlink(preExisting)
       }
       else {
-        global.warning(
+        warning(
           "Resolving package/object name conflict in favor of object " +
           preExisting.fullName + ".  The package will be inaccessible."
         )
@@ -132,20 +154,27 @@ abstract class SymbolLoaders {
 
   /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
    */
-  def initializeFromClassPath(owner: Symbol, classRep: ClassPath[platform.BinaryRepr]#ClassRep) {
+  def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
     ((classRep.binary, classRep.source) : @unchecked) match {
       case (Some(bin), Some(src))
       if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
-        if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
-        global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+        if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path)
+        enterToplevelsFromSource(owner, classRep.name, src)
       case (None, Some(src)) =>
-        if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
-        global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+        if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
+        enterToplevelsFromSource(owner, classRep.name, src)
       case (Some(bin), _) =>
-        global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin))
+        enterClassAndModule(owner, classRep.name, newClassLoader(bin))
     }
   }
 
+  /** Create a new loader from a binary classfile.
+   *  This is intented as a hook allowing to support loading symbols from
+   *  files other than .class files.
+   */
+  protected def newClassLoader(bin: AbstractFile): SymbolLoader =
+    new ClassfileLoader(bin)
+
   /**
    * A lazy type that completes itself by calling parameter doComplete.
    * Any linked modules/classes or module classes are also initialized.
@@ -160,7 +189,7 @@ abstract class SymbolLoaders {
     def sourcefile: Option[AbstractFile] = None
 
     /**
-     * Description of the resource (ClassPath, AbstractFile, MsilFile)
+     * Description of the resource (ClassPath, AbstractFile)
      * being processed by this loader
      */
     protected def description: String
@@ -169,25 +198,13 @@ abstract class SymbolLoaders {
 
     private def setSource(sym: Symbol) {
       sourcefile foreach (sf => sym match {
-        case cls: ClassSymbol => cls.sourceFile = sf
-        case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf
+        case cls: ClassSymbol => cls.associatedFile = sf
+        case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf
         case _ => ()
       })
     }
 
     override def complete(root: Symbol) {
-      def signalError(ex: Exception) {
-        ok = false
-        if (settings.debug.value) ex.printStackTrace()
-        val msg = ex.getMessage()
-        // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
-        // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
-        // that are not in their correct place (see bug for details)
-        if (!settings.isScaladoc)
-          globalError(
-            if (msg eq null) "i/o error while loading " + root.name
-            else "error while loading " + root.name + ", " + msg);
-      }
       try {
         val start = currentTime
         val currentphase = phase
@@ -197,11 +214,11 @@ abstract class SymbolLoaders {
         ok = true
         setSource(root)
         setSource(root.companionSymbol) // module -> class, class -> module
-      } catch {
-        case ex: IOException =>
-          signalError(ex)
-        case ex: MissingRequirementError =>
-          signalError(ex)
+      }
+      catch {
+        case ex @ (_: IOException | _: MissingRequirementError) =>
+          ok = false
+          signalError(root, ex)
       }
       initRoot(root)
       if (!root.isPackageClass) initRoot(root.companionSymbol)
@@ -226,14 +243,13 @@ abstract class SymbolLoaders {
   /**
    * Load contents of a package
    */
-  class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter {
+  class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
     protected def description = "package loader "+ classpath.name
 
     protected def doComplete(root: Symbol) {
       assert(root.isPackageClass, root)
       root.setInfo(new PackageClassInfoType(newScope, root))
 
-      val sourcepaths = classpath.sourcepaths
       if (!root.isRoot) {
         for (classRep <- classpath.classes if platform.doLoad(classRep)) {
           initializeFromClassPath(root, classRep)
@@ -250,8 +266,24 @@ abstract class SymbolLoaders {
   }
 
   class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
-    private object classfileParser extends ClassfileParser {
-      val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
+    private object classfileParser extends {
+      val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
+    } with ClassfileParser {
+      override protected type ThisConstantPool = ConstantPool
+      override protected def newConstantPool: ThisConstantPool = new ConstantPool
+      override protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+        SymbolLoaders.this.lookupMemberAtTyperPhaseIfPossible(sym, name)
+      /*
+       * The type alias and the cast (where the alias is used) is needed due to problem described
+       * in SI-7585. In this particular case, the problem is that we need to make sure that symbol
+       * table used by symbol loaders is exactly the same as they one used by classfileParser.
+       * If you look at the path-dependent types we have here everything should work out ok but
+       * due to issue described in SI-7585 type-checker cannot tie the knot here.
+       *
+       */
+      private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+      val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
+      val classPath = platform.classPath
     }
 
     protected def description = "class file "+ classfile.toString
@@ -259,7 +291,7 @@ abstract class SymbolLoaders {
     protected def doComplete(root: Symbol) {
       val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
       classfileParser.parse(classfile, root)
-      if (root.associatedFile eq null) {
+      if (root.associatedFile eq NoAbstractFile) {
         root match {
           // In fact, the ModuleSymbol forwards its setter to the module class
           case _: ClassSymbol | _: ModuleSymbol =>
@@ -274,21 +306,11 @@ abstract class SymbolLoaders {
     override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
   }
 
-  class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
-    private def typ = msilFile.msilType
-    private object typeParser extends clr.TypeParser {
-      val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
-    }
-
-    protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
-    protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
-  }
-
   class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
     protected def description = "source file "+ srcfile.toString
     override def fromSource = true
     override def sourcefile = Some(srcfile)
-    protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
+    protected def doComplete(root: Symbol): Unit = compileLate(srcfile)
   }
 
   object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter {
@@ -296,11 +318,6 @@ abstract class SymbolLoaders {
     protected def doComplete(root: Symbol) { root.sourceModule.initialize }
   }
 
-  object clrTypes extends clr.CLRTypes {
-    val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
-    if (global.forMSIL) init()
-  }
-
   /** used from classfile parser to avoid cyclies */
   var parentsLevel = 0
   var pendingLoadActions: List[() => Unit] = Nil
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index 7a84441..daaa625 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -6,7 +6,6 @@
 package scala.tools.nsc
 package symtab
 
-import scala.collection.{ mutable, immutable }
 import scala.language.implicitConversions
 import scala.language.postfixOps
 
@@ -17,9 +16,6 @@ trait SymbolTrackers {
   val global: Global
   import global._
 
-  private implicit lazy val TreeOrdering: Ordering[Tree] =
-    Ordering by (x => (x.shortClass, x.symbol))
-
   private implicit lazy val SymbolOrdering: Ordering[Symbol] =
     Ordering by (x => (x.kindString, x.name.toString))
 
@@ -76,7 +72,6 @@ trait SymbolTrackers {
     private def isFlagsChange(sym: Symbol) = changed.flags contains sym
 
     private implicit def NodeOrdering: Ordering[Node] = Ordering by (_.root)
-    private def ownersString(sym: Symbol, num: Int) = sym.ownerChain drop 1 take num mkString " -> "
 
     object Node {
       def nodes(syms: Set[Symbol]): List[Node] = {
@@ -114,7 +109,6 @@ trait SymbolTrackers {
         case Some(oldFlags) =>
           val added   = masked & ~oldFlags
           val removed = oldFlags & ~masked
-          val steady  = masked & ~(added | removed)
           val all     = masked | oldFlags
           val strs    = 0 to 63 map { bit =>
             val flag = 1L << bit
@@ -133,7 +127,7 @@ trait SymbolTrackers {
           else " (" + Flags.flagsToString(masked) + ")"
       }
       def symString(sym: Symbol) = (
-        if (settings.debug.value && sym.hasCompleteInfo) {
+        if (settings.debug && sym.hasCompleteInfo) {
           val s = sym.defString take 240
           if (s.length == 240) s + "..." else s
         }
@@ -181,7 +175,7 @@ trait SymbolTrackers {
     }
     def show(label: String): String = {
       val hierarchy = Node(current)
-      val Change(added, removed, symMap, owners, flags) = history.head
+      val Change(_, removed, symMap, _, _) = history.head
       def detailString(sym: Symbol) = {
         val ownerString = sym.ownerChain splitAt 3 match {
           case (front, back) =>
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index 427b5bf..17e3b08 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -29,11 +29,6 @@ class AbstractFileReader(val file: AbstractFile) {
    */
   var bp: Int = 0
 
-  /** return byte at offset 'pos'
-   */
-  @throws(classOf[IndexOutOfBoundsException])
-  def byteAt(pos: Int): Byte = buf(pos)
-
   /** read a byte
    */
   @throws(classOf[IndexOutOfBoundsException])
@@ -45,7 +40,7 @@ class AbstractFileReader(val file: AbstractFile) {
 
   /** read some bytes
    */
-  def nextBytes(len: Int): Array[Byte] = {
+  def nextBytes(len: Int): Array[Byte] = { // used in ide
     bp += len
     buf.slice(bp - len, bp)
   }
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 2955986..ea600bc 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package symtab
 package classfile
 
@@ -12,48 +13,94 @@ import java.lang.Integer.toHexString
 import scala.collection.{ mutable, immutable }
 import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
 import scala.annotation.switch
+import scala.reflect.internal.{ JavaAccFlags }
 import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
 import scala.tools.nsc.io.AbstractFile
 
+import util.ClassPath
+
 /** This abstract class implements a class file parser.
  *
  *  @author Martin Odersky
  *  @version 1.0
  */
 abstract class ClassfileParser {
-  val global: Global
-  import global._
-  import definitions.{ AnnotationClass, ClassfileAnnotationClass }
+  val symbolTable: SymbolTable {
+    def settings: Settings
+  }
+  val loaders: SymbolLoaders {
+    val symbolTable: ClassfileParser.this.symbolTable.type
+  }
+
+  import symbolTable._
+  /**
+   * If typer phase is defined then perform member lookup of a symbol
+   * `sym` at typer phase. This method results from refactoring. The
+   * original author of the logic that uses typer phase didn't explain
+   * why we need to force infos at that phase specifically. It only mentioned
+   * that ClassfileParse can be called late (e.g. at flatten phase) and
+   * we make to make sure we handle such situation properly.
+   */
+  protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+
+  /** The compiler classpath. */
+  def classPath: ClassPath[AbstractFile]
+
+  import definitions._
   import scala.reflect.internal.ClassfileConstants._
   import Flags._
 
+  protected type ThisConstantPool <: ConstantPool
+  protected def newConstantPool: ThisConstantPool
+
   protected var in: AbstractFileReader = _  // the class file reader
   protected var clazz: Symbol = _           // the class symbol containing dynamic members
   protected var staticModule: Symbol = _    // the module symbol containing static members
-  protected var instanceScope: Scope = _     // the scope of all instance definitions
-  protected var staticScope: Scope = _       // the scope of all static definitions
-  protected var pool: ConstantPool = _      // the classfile's constant pool
+  protected var instanceScope: Scope = _    // the scope of all instance definitions
+  protected var staticScope: Scope = _      // the scope of all static definitions
+  protected var pool: ThisConstantPool = _  // the classfile's constant pool
   protected var isScala: Boolean = _        // does class file describe a scala class?
   protected var isScalaAnnot: Boolean = _   // does class file describe a scala class with its pickled info in an annotation?
   protected var isScalaRaw: Boolean = _     // this class file is a scala class with no pickled info
-  protected var busy: Option[Symbol] = None // lock to detect recursive reads
+  protected var busy: Symbol = _            // lock to detect recursive reads
   protected var currentClass: Name = _      // JVM name of the current class
   protected var classTParams = Map[Name,Symbol]()
   protected var srcfile0 : Option[AbstractFile] = None
   protected def moduleClass: Symbol = staticModule.moduleClass
+  private var sawPrivateConstructor = false
+
+  private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz
 
   def srcfile = srcfile0
 
+  private def optimized         = settings.optimise.value
+
+  // u1, u2, and u4 are what these data types are called in the JVM spec.
+  // They are an unsigned byte, unsigned char, and unsigned int respectively.
+  // We bitmask u1 into an Int to make sure it's 0-255 (and u1 isn't used
+  // for much beyond tags) but leave u2 alone as it's already unsigned.
+  protected final def u1(): Int = in.nextByte & 0xFF
+  protected final def u2(): Int = in.nextChar.toInt
+  protected final def u4(): Int = in.nextInt
+
+  private def readInnerClassFlags() = readClassFlags()
+  private def readClassFlags()      = JavaAccFlags classFlags u2
+  private def readMethodFlags()     = JavaAccFlags methodFlags u2
+  private def readFieldFlags()      = JavaAccFlags fieldFlags u2
+  private def readTypeName()        = readName().toTypeName
+  private def readName()            = pool getName u2
+  private def readType()            = pool getType u2
+
   private object unpickler extends scala.reflect.internal.pickling.UnPickler {
-    val global: ClassfileParser.this.global.type = ClassfileParser.this.global
+    val symbolTable: ClassfileParser.this.symbolTable.type = ClassfileParser.this.symbolTable
   }
 
   private def handleMissing(e: MissingRequirementError) = {
-    if (settings.debug.value) e.printStackTrace
+    if (settings.debug) e.printStackTrace
     throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
   }
   private def handleError(e: Exception) = {
-    if (settings.debug.value) e.printStackTrace()
+    if (settings.debug) e.printStackTrace()
     throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
   }
   private def mismatchError(c: Symbol) = {
@@ -65,16 +112,15 @@ abstract class ClassfileParser {
     case e: RuntimeException        => handleError(e)
   }
   @inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
-    busy match {
-      case Some(`sym`)  => throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
-      case Some(sym1)   => throw new IOException(s"illegal class file dependency between '$sym' and '$sym1'")
-      case _            => ()
-    }
+    if (busy eq sym)
+      throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+    else if ((busy ne null) && (busy ne NoSymbol))
+      throw new IOException(s"illegal class file dependency between '$sym' and '$busy'")
 
-    busy = Some(sym)
+    busy = sym
     try body
     catch parseErrorHandler
-    finally busy = None
+    finally busy = NoSymbol
   }
   @inline private def raiseLoaderLevel[T](body: => T): T = {
     loaders.parentsLevel += 1
@@ -94,75 +140,70 @@ abstract class ClassfileParser {
       this.staticModule = if (root.isModule) root else root.companionModule
       this.isScala      = false
 
-      parseHeader
-      this.pool = new ConstantPool
+      parseHeader()
+      this.pool = newConstantPool
       parseClass()
     }
   }
 
   private def parseHeader() {
-    val magic = in.nextInt
+    val magic = u4
     if (magic != JAVA_MAGIC)
-      throw new IOException("class file '" + in.file + "' "
-                            + "has wrong magic number 0x" + toHexString(magic)
-                            + ", should be 0x" + toHexString(JAVA_MAGIC))
-    val minorVersion = in.nextChar.toInt
-    val majorVersion = in.nextChar.toInt
-    if ((majorVersion < JAVA_MAJOR_VERSION) ||
-        ((majorVersion == JAVA_MAJOR_VERSION) &&
-         (minorVersion < JAVA_MINOR_VERSION)))
-      throw new IOException("class file '" + in.file + "' "
-                            + "has unknown version "
-                            + majorVersion + "." + minorVersion
-                            + ", should be at least "
-                            + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION)
+      abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}")
+
+    val minor, major = u2
+    if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION)
+      abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
   }
 
-  class ConstantPool {
-    private val len = in.nextChar
-    private val starts = new Array[Int](len)
-    private val values = new Array[AnyRef](len)
-    private val internalized = new Array[Name](len)
+  /**
+   * Constructor of this class should not be called directly, use `newConstantPool` instead.
+   */
+  protected class ConstantPool {
+    protected val len          = u2
+    protected val starts       = new Array[Int](len)
+    protected val values       = new Array[AnyRef](len)
+    protected val internalized = new Array[Name](len)
 
     { var i = 1
       while (i < starts.length) {
         starts(i) = in.bp
         i += 1
-        (in.nextByte.toInt: @switch) match {
-          case CONSTANT_UTF8 | CONSTANT_UNICODE =>
-            in.skip(in.nextChar)
-          case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
-            in.skip(2)
-          case CONSTANT_METHODHANDLE =>
-            in.skip(3)
-          case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
-             | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
-             | CONSTANT_INVOKEDYNAMIC =>
-            in.skip(4)
-          case CONSTANT_LONG | CONSTANT_DOUBLE =>
-            in.skip(8)
-            i += 1
-          case _ =>
-            errorBadTag(in.bp - 1)
+        (u1: @switch) match {
+          case CONSTANT_UTF8 | CONSTANT_UNICODE                                => in skip u2
+          case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE          => in skip 2
+          case CONSTANT_METHODHANDLE                                           => in skip 3
+          case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF => in skip 4
+          case CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT        => in skip 4
+          case CONSTANT_INVOKEDYNAMIC                                          => in skip 4
+          case CONSTANT_LONG | CONSTANT_DOUBLE                                 => in skip 8 ; i += 1
+          case _                                                               => errorBadTag(in.bp - 1)
         }
       }
     }
 
-    /** Return the name found at given index. */
-    def getName(index: Int): Name = {
-      if (index <= 0 || len <= index)
-        errorBadIndex(index)
+    def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = {
+      values(idx) = value
+      value
+    }
 
-      values(index) match {
+    def firstExpecting(index: Int, expected: Int): Int = {
+      val start = starts(index)
+      val first = in.buf(start).toInt
+      if (first == expected) start + 1
+      else this errorBadTag start
+    }
+
+    /** Return the name found at given index. */
+    def getName(index: Int): Name = (
+      if (index <= 0 || len <= index) errorBadIndex(index)
+      else values(index) match {
         case name: Name => name
-        case null   =>
-          val start = starts(index)
-          if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
-          val name = newTermName(in.buf, start + 3, in.getChar(start + 1))
-          values(index) = name
-          name
+        case _          =>
+          val start = firstExpecting(index, CONSTANT_UTF8)
+          recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start).toInt), index)
       }
-    }
+    )
 
     /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
     def getExternalName(index: Int): Name = {
@@ -177,91 +218,23 @@ abstract class ClassfileParser {
 
     def getClassSymbol(index: Int): Symbol = {
       if (index <= 0 || len <= index) errorBadIndex(index)
-      var c = values(index).asInstanceOf[Symbol]
-      if (c eq null) {
-        val start = starts(index)
-        if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
-        val name = getExternalName(in.getChar(start + 1))
-        if (nme.isModuleName(name))
-          c = rootMirror.getModule(nme.stripModuleSuffix(name))
-        else
-          c = classNameToSymbol(name)
-
-        values(index) = c
+      values(index) match {
+        case sym: Symbol => sym
+        case _           =>
+          val result = getClassName(index) match {
+            case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule
+            case name                           => classNameToSymbol(name)
+          }
+          recordAtIndex(result, index)
       }
-      c
     }
 
     /** Return the external name of the class info structure found at 'index'.
      *  Use 'getClassSymbol' if the class is sure to be a top-level class.
      */
     def getClassName(index: Int): Name = {
-      val start = starts(index)
-      if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
-      getExternalName(in.getChar(start + 1))
-    }
-
-    /** Return the symbol of the class member at `index`.
-     *  The following special cases exist:
-     *   - If the member refers to special `MODULE$` static field, return
-     *  the symbol of the corresponding module.
-     *   - If the member is a field, and is not found with the given name,
-     *     another try is made by appending `nme.LOCAL_SUFFIX_STRING`
-     *   - If no symbol is found in the right tpe, a new try is made in the
-     *     companion class, in case the owner is an implementation class.
-     */
-    def getMemberSymbol(index: Int, static: Boolean): Symbol = {
-      if (index <= 0 || len <= index) errorBadIndex(index)
-      var f = values(index).asInstanceOf[Symbol]
-      if (f eq null) {
-        val start = starts(index)
-        val first = in.buf(start).toInt
-        if (first != CONSTANT_FIELDREF &&
-            first != CONSTANT_METHODREF &&
-            first != CONSTANT_INTFMETHODREF) errorBadTag(start)
-        val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
-        debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
-        val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
-        debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
-
-        forceMangledName(tpe0.typeSymbol.name, false)
-        val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
-        if (name == nme.MODULE_INSTANCE_FIELD) {
-          val index = in.getChar(start + 1)
-          val name = getExternalName(in.getChar(starts(index) + 1))
-          //assert(name.endsWith("$"), "Not a module class: " + name)
-          f = forceMangledName(name dropRight 1, true)
-          if (f == NoSymbol)
-            f = rootMirror.getModule(name dropRight 1)
-        } else {
-          val origName = nme.originalName(name)
-          val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
-          f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
-          if (f == NoSymbol)
-            f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
-          if (f == NoSymbol) {
-            // if it's an impl class, try to find it's static member inside the class
-            if (ownerTpe.typeSymbol.isImplClass) {
-              f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
-            } else {
-              log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
-              f = tpe match {
-                case MethodType(_, _) => owner.newMethod(name, owner.pos)
-                case _                => owner.newVariable(name, owner.pos)
-              }
-              f setInfo tpe
-              log("created fake member " + f.fullName)
-            }
-          }
-        }
-        assert(f != NoSymbol,
-          s"could not find $name: $tpe in $ownerTpe" + (
-            if (settings.debug.value) ownerTpe.members.mkString(", members are:\n  ", "\n  ", "") else ""
-          )
-        )
-        values(index) = f
-      }
-      f
+      val start = firstExpecting(index, CONSTANT_CLASS)
+      getExternalName((in getChar start).toInt)
     }
 
     /** Return a name and a type at the given index. If the type is a method
@@ -270,96 +243,69 @@ abstract class ClassfileParser {
      *  if a symbol of the given name already exists, and has a different
      *  type.
      */
-    private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
+    protected def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
       if (index <= 0 || len <= index) errorBadIndex(index)
-      var p = values(index).asInstanceOf[(Name, Type)]
-      if (p eq null) {
-        val start = starts(index)
-        if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
-        val name = getName(in.getChar(start + 1).toInt)
-        // create a dummy symbol for method types
-        val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
-        var tpe  = getType(dummySym, in.getChar(start + 3).toInt)
-
-        // fix the return type, which is blindly set to the class currently parsed
-        if (name == nme.CONSTRUCTOR)
-          tpe match {
-            case MethodType(formals, restpe) =>
-              tpe = MethodType(formals, ownerTpe)
+      values(index) match {
+        case p: ((Name @unchecked, Type @unchecked)) => p
+        case _                                       =>
+          val start = firstExpecting(index, CONSTANT_NAMEANDTYPE)
+          val name = getName(in.getChar(start).toInt)
+          // create a dummy symbol for method types
+          val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
+          val tpe   = getType(dummy, in.getChar(start + 2).toInt)
+          // fix the return type, which is blindly set to the class currently parsed
+          val restpe = tpe match {
+            case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe)
+            case _                                                 => tpe
           }
-
-        p = (name, tpe)
+          ((name, restpe))
       }
-      p
     }
 
     /** Return the type of a class constant entry. Since
      *  arrays are considered to be class types, they might
      *  appear as entries in 'newarray' or 'cast' opcodes.
      */
-    def getClassOrArrayType(index: Int): Type = {
+    def getClassOrArrayType(index: Int): Type = (
       if (index <= 0 || len <= index) errorBadIndex(index)
-      val value = values(index)
-      var c: Type = null
-      if (value eq null) {
-        val start = starts(index)
-        if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
-        val name = getExternalName(in.getChar(start + 1))
-        if (name.charAt(0) == ARRAY_TAG) {
-          c = sigToType(null, name)
-          values(index) = c
-        } else {
-          val sym = classNameToSymbol(name)
-                  /*if (name.endsWith("$")) definitions.getModule(name.subName(0, name.length - 1))
-                    else if (name.endsWith("$class")) definitions.getModule(name)
-                    else definitions.getClass(name)*/
-          values(index) = sym
-          c = sym.tpe
-        }
-      } else c = value match {
-          case tp: Type => tp
-          case cls: Symbol => cls.tpe
+      else values(index) match {
+        case tp: Type    => tp
+        case cls: Symbol => cls.tpe_*
+        case _           =>
+          val name = getClassName(index)
+          name charAt 0 match {
+            case ARRAY_TAG => recordAtIndex(sigToType(null, name), index)
+            case _         => recordAtIndex(classNameToSymbol(name), index).tpe_*
+          }
       }
-      c
-    }
-
-    def getType(index: Int): Type = getType(null, index)
-
-    def getType(sym: Symbol, index: Int): Type =
-      sigToType(sym, getExternalName(index))
+    )
 
-    def getSuperClass(index: Int): Symbol =
-      if (index == 0) definitions.AnyClass else getClassSymbol(index)
+    def getType(index: Int): Type              = getType(null, index)
+    def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
+    def getSuperClass(index: Int): Symbol      = if (index == 0) AnyClass else getClassSymbol(index)
 
-    def getConstant(index: Int): Constant = {
+    private def createConstant(index: Int): Constant = {
+      val start = starts(index)
+      Constant((in.buf(start).toInt: @switch) match {
+        case CONSTANT_STRING  => getName(in.getChar(start + 1).toInt).toString
+        case CONSTANT_INTEGER => in.getInt(start + 1)
+        case CONSTANT_FLOAT   => in.getFloat(start + 1)
+        case CONSTANT_LONG    => in.getLong(start + 1)
+        case CONSTANT_DOUBLE  => in.getDouble(start + 1)
+        case CONSTANT_CLASS   => getClassOrArrayType(index).typeSymbol.tpe_* // !!! Is this necessary or desirable?
+        case _                => errorBadTag(start)
+      })
+    }
+    def getConstant(index: Char): Constant = getConstant(index.toInt)
+    def getConstant(index: Int): Constant = (
       if (index <= 0 || len <= index) errorBadIndex(index)
-      var value = values(index)
-      if (value eq null) {
-        val start = starts(index)
-        value = (in.buf(start).toInt: @switch) match {
-          case CONSTANT_STRING =>
-            Constant(getName(in.getChar(start + 1).toInt).toString)
-          case CONSTANT_INTEGER =>
-            Constant(in.getInt(start + 1))
-          case CONSTANT_FLOAT =>
-            Constant(in.getFloat(start + 1))
-          case CONSTANT_LONG =>
-            Constant(in.getLong(start + 1))
-          case CONSTANT_DOUBLE =>
-            Constant(in.getDouble(start + 1))
-          case CONSTANT_CLASS =>
-            getClassOrArrayType(index).typeSymbol
-          case _ =>
-            errorBadTag(start)
-        }
-        values(index) = value
+      else values(index) match {
+        case  const: Constant => const
+        case sym: Symbol      => Constant(sym.tpe_*)
+        case tpe: Type        => Constant(tpe)
+        case _                => recordAtIndex(createConstant(index), index)
       }
-      value match {
-        case  ct: Constant => ct
-        case cls: Symbol   => Constant(cls.tpe)
-        case arr: Type     => Constant(arr)
-      }
-    }
+    )
 
     private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
       val decodedLength = ByteCodecs.decode(bytes)
@@ -368,71 +314,45 @@ abstract class ClassfileParser {
       arr
     }
 
-    def getBytes(index: Int): Array[Byte] = {
+    def getBytes(index: Int): Array[Byte] = (
       if (index <= 0 || len <= index) errorBadIndex(index)
-      var value = values(index).asInstanceOf[Array[Byte]]
-      if (value eq null) {
-        val start = starts(index)
-        if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
-        val len   = in.getChar(start + 1)
-        val bytes = new Array[Byte](len)
-        System.arraycopy(in.buf, start + 3, bytes, 0, len)
-        value = getSubArray(bytes)
-        values(index) = value
+      else values(index) match {
+        case xs: Array[Byte] => xs
+        case _               =>
+          val start = firstExpecting(index, CONSTANT_UTF8)
+          val len   = (in getChar start).toInt
+          val bytes = new Array[Byte](len)
+          System.arraycopy(in.buf, start + 2, bytes, 0, len)
+          recordAtIndex(getSubArray(bytes), index)
       }
-      value
-    }
+    )
 
     def getBytes(indices: List[Int]): Array[Byte] = {
-      assert(!indices.isEmpty, indices)
-      var value = values(indices.head).asInstanceOf[Array[Byte]]
-      if (value eq null) {
-        val bytesBuffer = ArrayBuffer.empty[Byte]
-        for (index <- indices) {
-          if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
-          val start = starts(index)
-          if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
-          val len = in.getChar(start + 1)
-          bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
-        }
-        value = getSubArray(bytesBuffer.toArray)
-        values(indices.head) = value
+      val head = indices.head
+      values(head) match {
+        case xs: Array[Byte] => xs
+        case _               =>
+          val arr: Array[Byte] = indices.toArray flatMap { index =>
+            if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+            val start = firstExpecting(index, CONSTANT_UTF8)
+            val len   = (in getChar start).toInt
+            in.buf drop start + 2 take len
+          }
+          recordAtIndex(getSubArray(arr), head)
       }
-      value
     }
 
     /** Throws an exception signaling a bad constant index. */
-    private def errorBadIndex(index: Int) =
-      throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+    protected def errorBadIndex(index: Int) =
+      abort(s"bad constant pool index: $index at pos: ${in.bp}")
 
     /** Throws an exception signaling a bad tag at given address. */
-    private def errorBadTag(start: Int) =
-      throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
-  }
-
-  /** Try to force the chain of enclosing classes for the given name. Otherwise
-   *  flatten would not lift classes that were not referenced in the source code.
-   */
-  def forceMangledName(name: Name, module: Boolean): Symbol = {
-    val parts = name.decode.toString.split(Array('.', '$'))
-    var sym: Symbol = rootMirror.RootClass
-
-    // was "at flatten.prev"
-    beforeFlatten {
-      for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
-        val sym1 = beforeIcode {
-          sym.linkedClassOfClass.info
-          sym.info.decl(part.encode)
-        }//.suchThat(module == _.isModule)
-
-        sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
-      }
-    }
-    sym
+    protected def errorBadTag(start: Int) =
+      abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
   }
 
   private def loadClassSymbol(name: Name): Symbol = {
-    val file = global.classPath findSourceFile ("" +name) getOrElse {
+    val file = classPath findSourceFile ("" +name) getOrElse {
       // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
       // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
       // that are not in their correct place (see bug for details)
@@ -440,7 +360,7 @@ abstract class ClassfileParser {
         warning(s"Class $name not found - continuing with a stub.")
       return NoSymbol.newClass(name.toTypeName)
     }
-    val completer     = new global.loaders.ClassfileLoader(file)
+    val completer     = new loaders.ClassfileLoader(file)
     var owner: Symbol = rootMirror.RootClass
     var sym: Symbol   = NoSymbol
     var ss: Name      = null
@@ -451,7 +371,7 @@ abstract class ClassfileParser {
       ss = name.subName(start, end)
       sym = owner.info.decls lookup ss
       if (sym == NoSymbol) {
-        sym = owner.newPackage(ss) setInfo completer
+        sym = owner.newPackage(ss.toTermName) setInfo completer
         sym.moduleClass setInfo completer
         owner.info.decls enter sym
       }
@@ -466,6 +386,7 @@ abstract class ClassfileParser {
       sym
     }
   }
+
   /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
    *  The method called "getClassByName" should either return the class or not.
    */
@@ -485,30 +406,27 @@ abstract class ClassfileParser {
       catch { case _: FatalError => loadClassSymbol(name) }
   }
 
-  var sawPrivateConstructor = false
-
   def parseClass() {
-    val jflags       = in.nextChar
-    val isAnnotation = hasAnnotation(jflags)
-    var sflags       = toScalaClassFlags(jflags)
-    var nameIdx      = in.nextChar
-    currentClass     = pool.getClassName(nameIdx)
-
-    /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
-     *  Updates the read pointer of 'in'. */
+    val jflags   = readClassFlags()
+    val sflags   = jflags.toScalaFlags
+    val nameIdx  = u2
+    currentClass = pool.getClassName(nameIdx)
+
+    /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+     * Updates the read pointer of 'in'. */
     def parseParents: List[Type] = {
       if (isScala) {
-        in.nextChar              // skip superclass
-        val ifaces = in.nextChar
-        in.bp += ifaces * 2     // .. and iface count interfaces
-        List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
+        u2                    // skip superclass
+        val ifaces = u2
+        in.bp += ifaces * 2   // .. and iface count interfaces
+        List(AnyRefTpe) // dummy superclass, will be replaced by pickled information
       }
       else raiseLoaderLevel {
-        val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
-                        else pool.getSuperClass(in.nextChar).tpe
-        val ifaceCount = in.nextChar
-        var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
-        if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+        val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe }
+                        else pool.getSuperClass(u2).tpe_*
+        val ifaceCount = u2
+        var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_*
+        if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe
         superType :: ifaces
       }
     }
@@ -533,31 +451,30 @@ abstract class ClassfileParser {
     val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
 
     if (!isScala && !isScalaRaw)
-      enterOwnInnerClasses
+      enterOwnInnerClasses()
 
     val curbp = in.bp
     skipMembers() // fields
     skipMembers() // methods
     if (!isScala) {
       clazz setFlag sflags
-      importPrivateWithinFromJavaFlags(clazz, jflags)
-      importPrivateWithinFromJavaFlags(staticModule, jflags)
-      clazz.setInfo(classInfo)
+      propagatePackageBoundary(jflags, clazz, staticModule, staticModule.moduleClass)
+      clazz setInfo classInfo
       moduleClass setInfo staticInfo
-      staticModule.setInfo(moduleClass.tpe)
-      staticModule.setFlag(JAVA)
-      staticModule.moduleClass.setFlag(JAVA)
+      staticModule setInfo moduleClass.tpe
+      staticModule setFlag JAVA
+      staticModule.moduleClass setFlag JAVA
       // attributes now depend on having infos set already
       parseAttributes(clazz, classInfo)
 
       def queueLoad() {
         in.bp = curbp
-        0 until in.nextChar foreach (_ => parseField())
+        0 until u2 foreach (_ => parseField())
         sawPrivateConstructor = false
-        0 until in.nextChar foreach (_ => parseMethod())
+        0 until u2 foreach (_ => parseMethod())
         val needsConstructor = (
              !sawPrivateConstructor
-          && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+          && !(instanceScope containsName nme.CONSTRUCTOR)
           && (sflags & INTERFACE) == 0
         )
         if (needsConstructor)
@@ -588,70 +505,76 @@ abstract class ClassfileParser {
   }
 
   def parseField() {
-    val jflags = in.nextChar
-    var sflags = toScalaFieldFlags(jflags)
-    if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
+    val jflags = readFieldFlags()
+    val sflags = jflags.toScalaFlags
+
+    if ((sflags & PRIVATE) != 0L && !optimized) {
       in.skip(4); skipAttributes()
     } else {
-      val name    = pool.getName(in.nextChar)
-      val info    = pool.getType(in.nextChar)
-      val sym     = getOwner(jflags).newValue(name, NoPosition, sflags)
-      val isEnum  = (jflags & JAVA_ACC_ENUM) != 0
+      val name    = readName()
+      val info    = readType()
+      val sym     = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags)
 
+      // Note: the info may be overwritten later with a generic signature
+      // parsed from SignatureATTR
       sym setInfo {
-        if (isEnum) ConstantType(Constant(sym))
+        if (jflags.isEnum) ConstantType(Constant(sym))
         else info
       }
-      importPrivateWithinFromJavaFlags(sym, jflags)
+      propagatePackageBoundary(jflags, sym)
       parseAttributes(sym, info)
-      getScope(jflags).enter(sym)
+      getScope(jflags) enter sym
 
       // sealed java enums
-      if (isEnum) {
+      if (jflags.isEnum) {
         val enumClass = sym.owner.linkedClassOfClass
-        if (!enumClass.isSealed)
-          enumClass setFlag (SEALED | ABSTRACT)
-
-        enumClass addChild sym
+        enumClass match {
+          case NoSymbol =>
+            devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
+          case linked =>
+            if (!linked.isSealed)
+              linked setFlag (SEALED | ABSTRACT)
+            linked addChild sym
+        }
       }
     }
   }
 
   def parseMethod() {
-    val jflags = in.nextChar.toInt
-    var sflags = toScalaMethodFlags(jflags)
-    if (isPrivate(jflags) && !global.settings.optimise.value) {
-      val name = pool.getName(in.nextChar)
+    val jflags = readMethodFlags()
+    val sflags = jflags.toScalaFlags
+    if (jflags.isPrivate && !optimized) {
+      val name = readName()
       if (name == nme.CONSTRUCTOR)
         sawPrivateConstructor = true
       in.skip(2); skipAttributes()
     } else {
-      if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) { // TODO this should be !optimize, no? See c4181f656d.
+      if ((sflags & PRIVATE) != 0L && optimized) { // TODO this should be !optimized, no? See c4181f656d.
         in.skip(4); skipAttributes()
       } else {
-        val name = pool.getName(in.nextChar)
-        val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
-        var info = pool.getType(sym, (in.nextChar))
+        val name = readName()
+        val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags)
+        var info = pool.getType(sym, u2)
         if (name == nme.CONSTRUCTOR)
           info match {
             case MethodType(params, restpe) =>
               // if this is a non-static inner class, remove the explicit outer parameter
               val paramsNoOuter = innerClasses getEntry currentClass match {
-                case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
-                  /* About `clazz.owner.isPackage` below: SI-5957
+                case Some(entry) if !isScalaRaw && !entry.jflags.isStatic =>
+                  /* About `clazz.owner.hasPackageFlag` below: SI-5957
                    * For every nested java class A$B, there are two symbols in the scala compiler.
                    *  1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package
                    *  2. created by ClassfileParser of A when reading the inner classes, owner: A
                    * If symbol 1 gets completed (e.g. because the compiled source mentions `A$B`, not `A#B`), the
                    * ClassfileParser for 1 executes, and clazz.owner is the package.
                    */
-                  assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.isPackage, params.head.tpe.typeSymbol + ": " + clazz.owner)
+                  assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.hasPackageFlag, params.head.tpe.typeSymbol + ": " + clazz.owner)
                   params.tail
                 case _ =>
                   params
               }
               val newParams = paramsNoOuter match {
-                case (init :+ tail) if (jflags & JAVA_ACC_SYNTHETIC) != 0L =>
+                case (init :+ tail) if jflags.isSynthetic =>
                   // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which
                   // are added when an inner class needs to access a private constructor.
                   init
@@ -661,13 +584,15 @@ abstract class ClassfileParser {
 
               info = MethodType(newParams, clazz.tpe)
           }
-        sym.setInfo(info)
-        importPrivateWithinFromJavaFlags(sym, jflags)
+        // Note: the info may be overrwritten later with a generic signature
+        // parsed from SignatureATTR
+        sym setInfo info
+        propagatePackageBoundary(jflags, sym)
         parseAttributes(sym, info)
-        if ((jflags & JAVA_ACC_VARARGS) != 0) {
-          sym.setInfo(arrayToRepeated(sym.info))
-        }
-        getScope(jflags).enter(sym)
+        if (jflags.isVarargs)
+          sym modifyInfo arrayToRepeated
+
+        getScope(jflags) enter sym
       }
     }
   }
@@ -687,15 +612,15 @@ abstract class ClassfileParser {
     def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
       val tag = sig.charAt(index); index += 1
       tag match {
-        case BYTE_TAG   => definitions.ByteClass.tpe
-        case CHAR_TAG   => definitions.CharClass.tpe
-        case DOUBLE_TAG => definitions.DoubleClass.tpe
-        case FLOAT_TAG  => definitions.FloatClass.tpe
-        case INT_TAG    => definitions.IntClass.tpe
-        case LONG_TAG   => definitions.LongClass.tpe
-        case SHORT_TAG  => definitions.ShortClass.tpe
-        case VOID_TAG   => definitions.UnitClass.tpe
-        case BOOL_TAG   => definitions.BooleanClass.tpe
+        case BYTE_TAG   => ByteTpe
+        case CHAR_TAG   => CharTpe
+        case DOUBLE_TAG => DoubleTpe
+        case FLOAT_TAG  => FloatTpe
+        case INT_TAG    => IntTpe
+        case LONG_TAG   => LongTpe
+        case SHORT_TAG  => ShortTpe
+        case VOID_TAG   => UnitTpe
+        case BOOL_TAG   => BooleanTpe
         case 'L' =>
           def processInner(tp: Type): Type = tp match {
             case TypeRef(pre, sym, args) if (!sym.isStatic) =>
@@ -720,7 +645,7 @@ abstract class ClassfileParser {
                           val tp = sig2type(tparams, skiptvs)
                           // sig2type seems to return AnyClass regardless of the situation:
                           // we don't want Any as a LOWER bound.
-                          if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+                          if (tp.typeSymbol == AnyClass) TypeBounds.empty
                           else TypeBounds.lower(tp)
                         case '*' => TypeBounds.empty
                       }
@@ -734,14 +659,16 @@ abstract class ClassfileParser {
                 }
                 accept('>')
                 assert(xs.length > 0, tp)
-                newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
-              } else if (classSym.isMonomorphicType) {
-                tp
-              } else {
+                debuglogResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
+              }
+              // isMonomorphicType is false if the info is incomplete, as it usually is here
+              // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
+              // or we'll create a boatload of needless existentials.
+              else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
+              else debuglogResult(s"raw type from $classSym"){
                 // raw type - existentially quantify all type parameters
                 val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
-                val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
-                newExistentialType(eparams, t)
+                newExistentialType(eparams, typeRef(pre, classSym, eparams.map(_.tpeHK)))
               }
             case tp =>
               assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
@@ -750,12 +677,14 @@ abstract class ClassfileParser {
 
           val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
           assert(!classSym.isOverloaded, classSym.alternatives)
-          var tpe = processClassType(processInner(classSym.tpe))
+          var tpe = processClassType(processInner(classSym.tpe_*))
           while (sig.charAt(index) == '.') {
             accept('.')
             val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
             val clazz = tpe.member(name)
-            tpe = processClassType(processInner(clazz.tpe))
+            val dummyArgs = Nil // the actual arguments are added in processClassType
+            val inner = typeRef(pre = tpe, sym = clazz, args = dummyArgs)
+            tpe = processClassType(inner)
           }
           accept(';')
           tpe
@@ -768,11 +697,11 @@ abstract class ClassfileParser {
           // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
           // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
           // see also RestrictJavaArraysMap (when compiling java sources directly)
-          if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
-            elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
+          if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe)) {
+            elemtp = intersectionType(List(elemtp, ObjectTpe))
           }
 
-          definitions.arrayType(elemtp)
+          arrayType(elemtp)
         case '(' =>
           // we need a method symbol. given in line 486 by calling getType(methodSym, ..)
           assert(sym ne null, sig)
@@ -783,14 +712,14 @@ abstract class ClassfileParser {
           index += 1
           val restype = if (sym != null && sym.isClassConstructor) {
             accept('V')
-            clazz.tpe
+            clazz.tpe_*
           } else
             sig2type(tparams, skiptvs)
           JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype)
         case 'T' =>
           val n = subName(';'.==).toTypeName
           index += 1
-          if (skiptvs) definitions.AnyClass.tpe
+          if (skiptvs) AnyTpe
           else tparams(n).typeConstructor
       }
     } // sig2type(tparams, skiptvs)
@@ -815,14 +744,14 @@ abstract class ClassfileParser {
         val tpname = subName(':'.==).toTypeName
         val s = sym.newTypeParameter(tpname)
         tparams = tparams + (tpname -> s)
-        sig2typeBounds(tparams, true)
+        sig2typeBounds(tparams, skiptvs = true)
         newTParams += s
       }
       index = start
       while (sig.charAt(index) != '>') {
         val tpname = subName(':'.==).toTypeName
         val s = tparams(tpname)
-        s.setInfo(sig2typeBounds(tparams, false))
+        s.setInfo(sig2typeBounds(tparams, skiptvs = false))
       }
       accept('>')
     }
@@ -831,36 +760,32 @@ abstract class ClassfileParser {
       sym.setInfo(new TypeParamsType(ownTypeParams))
     val tpe =
       if ((sym eq null) || !sym.isClass)
-        sig2type(tparams, false)
+        sig2type(tparams, skiptvs = false)
       else {
         classTParams = tparams
         val parents = new ListBuffer[Type]()
         while (index < end) {
-          parents += sig2type(tparams, false)  // here the variance doesnt'matter
+          parents += sig2type(tparams, skiptvs = false)  // here the variance doesnt'matter
         }
         ClassInfoType(parents.toList, instanceScope, sym)
       }
     GenPolyType(ownTypeParams, tpe)
   } // sigToType
 
-  class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
-    override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
-  }
-
   def parseAttributes(sym: Symbol, symtype: Type) {
     def convertTo(c: Constant, pt: Type): Constant = {
-      if (pt.typeSymbol == definitions.BooleanClass && c.tag == IntTag)
+      if (pt.typeSymbol == BooleanClass && c.tag == IntTag)
         Constant(c.value != 0)
       else
         c convertTo pt
     }
     def parseAttribute() {
-      val attrName = pool.getName(in.nextChar).toTypeName
-      val attrLen = in.nextInt
+      val attrName = readTypeName()
+      val attrLen  = u4
       attrName match {
         case tpnme.SignatureATTR =>
           if (!isScala && !isScalaRaw) {
-            val sig = pool.getExternalName(in.nextChar)
+            val sig = pool.getExternalName(u2)
             val newType = sigToType(sym, sig)
             sym.setInfo(newType)
           }
@@ -869,14 +794,14 @@ abstract class ClassfileParser {
           sym.setFlag(SYNTHETIC | ARTIFACT)
           in.skip(attrLen)
         case tpnme.BridgeATTR =>
-          sym.setFlag(BRIDGE)
+          sym.setFlag(BRIDGE | ARTIFACT)
           in.skip(attrLen)
         case tpnme.DeprecatedATTR =>
           val arg = Literal(Constant("see corresponding Javadoc for more information."))
-          sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant("")))
+          sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant("")))
           in.skip(attrLen)
         case tpnme.ConstantValueATTR =>
-          val c = pool.getConstant(in.nextChar)
+          val c = pool.getConstant(u2)
           val c1 = convertTo(c, symtype)
           if (c1 ne null) sym.setInfo(ConstantType(c1))
           else debugwarn(s"failure to convert $c to $symtype")
@@ -890,7 +815,7 @@ abstract class ClassfileParser {
           isScalaRaw = true
          // Attribute on methods of java annotation classes when that method has a default
         case tpnme.AnnotationDefaultATTR =>
-          sym.addAnnotation(definitions.AnnotationDefaultAttr)
+          sym.addAnnotation(AnnotationDefaultAttr)
           in.skip(attrLen)
         // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
         case tpnme.RuntimeAnnotationATTR =>
@@ -920,7 +845,7 @@ abstract class ClassfileParser {
           parseExceptions(attrLen)
 
         case tpnme.SourceFileATTR =>
-          val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+          val srcfileLeaf = readName().toString.trim
           val srcpath = sym.enclosingPackage match {
             case NoSymbol => srcfileLeaf
             case rootMirror.EmptyPackage => srcfileLeaf
@@ -939,8 +864,8 @@ abstract class ClassfileParser {
     }
 
     def parseAnnotArg: Option[ClassfileAnnotArg] = {
-      val tag = in.nextByte.toChar
-      val index = in.nextChar
+      val tag = u1
+      val index = u2
       tag match {
         case STRING_TAG =>
           Some(LiteralAnnotArg(Constant(pool.getName(index).toString)))
@@ -951,7 +876,7 @@ abstract class ClassfileParser {
           Some(LiteralAnnotArg(Constant(pool.getType(index))))
         case ENUM_TAG   =>
           val t = pool.getType(index)
-          val n = pool.getName(in.nextChar)
+          val n = readName()
           val module = t.typeSymbol.companionModule
           val s = module.info.decls.lookup(n)
           if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
@@ -976,43 +901,43 @@ abstract class ClassfileParser {
     }
 
     def parseScalaSigBytes: Option[ScalaSigBytes] = {
-      val tag = in.nextByte.toChar
+      val tag = u1
       assert(tag == STRING_TAG, tag)
-      Some(ScalaSigBytes(pool getBytes in.nextChar))
+      Some(ScalaSigBytes(pool getBytes u2))
     }
 
     def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
-      val tag = in.nextByte.toChar
+      val tag = u1
       assert(tag == ARRAY_TAG, tag)
-      val stringCount = in.nextChar
+      val stringCount = u2
       val entries =
         for (i <- 0 until stringCount) yield {
-          val stag = in.nextByte.toChar
+          val stag = u1
           assert(stag == STRING_TAG, stag)
-          in.nextChar.toInt
+          u2
         }
       Some(ScalaSigBytes(pool.getBytes(entries.toList)))
     }
 
-    /** Parse and return a single annotation.  If it is malformed,
-     *  return None.
+    /* Parse and return a single annotation.  If it is malformed,
+     * return None.
      */
-    def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try {
+    def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
       val attrType = pool.getType(attrNameIndex)
-      val nargs = in.nextChar
+      val nargs = u2
       val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
       var hasError = false
       for (i <- 0 until nargs) {
-        val name = pool.getName(in.nextChar)
+        val name = readName()
         // The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
         // available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
         // is encoded as a string because of limitations in the Java class file format.
-        if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+        if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
           parseScalaSigBytes match {
             case Some(c) => nvpairs += ((name, c))
             case None => hasError = true
           }
-        else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
+        else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
           parseScalaLongSigBytes match {
             case Some(c) => nvpairs += ((name, c))
             case None => hasError = true
@@ -1037,20 +962,20 @@ abstract class ClassfileParser {
         // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
         // and that should never be swallowed silently.
         warning(s"Caught: $ex while parsing annotations in ${in.file}")
-        if (settings.debug.value) ex.printStackTrace()
+        if (settings.debug) ex.printStackTrace()
 
         None // ignore malformed annotations
     }
 
-    /**
+    /*
      * Parse the "Exceptions" attribute which denotes the exceptions
      * thrown by a method.
      */
     def parseExceptions(len: Int) {
-      val nClasses = in.nextChar
+      val nClasses = u2
       for (n <- 0 until nClasses) {
         // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
-        val cls = pool.getClassSymbol(in.nextChar.toInt)
+        val cls = pool.getClassSymbol(u2)
         // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
         // and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
         cls.initialize
@@ -1058,16 +983,16 @@ abstract class ClassfileParser {
       }
     }
 
-    /** Parse a sequence of annotations and attaches them to the
-     *  current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+    /* Parse a sequence of annotations and attaches them to the
+     * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
     def parseAnnotations(len: Int): Option[AnnotationInfo] =  {
-      val nAttr = in.nextChar
+      val nAttr = u2
       var scalaSigAnnot: Option[AnnotationInfo] = None
       for (n <- 0 until nAttr)
-        parseAnnotation(in.nextChar) match {
-          case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+        parseAnnotation(u2) match {
+          case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
             scalaSigAnnot = Some(scalaSig)
-          case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) =>
+          case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
             scalaSigAnnot = Some(scalaSig)
           case Some(annot) =>
             sym.addAnnotation(annot)
@@ -1077,7 +1002,7 @@ abstract class ClassfileParser {
     }
 
     // begin parseAttributes
-    for (i <- 0 until in.nextChar) parseAttribute()
+    for (i <- 0 until u2) parseAttribute()
   }
 
   /** Enter own inner classes in the right scope. It needs the scopes to be set up,
@@ -1087,16 +1012,17 @@ abstract class ClassfileParser {
     def className(name: Name): Name =
       name.subName(name.lastPos('.') + 1, name.length)
 
-    def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
-      val completer   = new global.loaders.ClassfileLoader(file)
+    def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
+      def jflags      = entry.jflags
+      val completer   = new loaders.ClassfileLoader(file)
       val name        = entry.originalName
-      var sflags      = toScalaClassFlags(jflags)
-      val owner       = getOwner(jflags)
+      val sflags      = jflags.toScalaFlags
+      val owner       = ownerForFlags(jflags)
       val scope       = getScope(jflags)
       val innerClass  = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
       val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
 
-      innerModule.moduleClass setInfo global.loaders.moduleClassLoader
+      innerModule.moduleClass setInfo loaders.moduleClassLoader
       List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
 
       scope enter innerClass
@@ -1117,10 +1043,10 @@ abstract class ClassfileParser {
     for (entry <- innerClasses.entries) {
       // create a new class member for immediate inner classes
       if (entry.outerName == currentClass) {
-        val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+        val file = classPath.findSourceFile(entry.externalName.toString) getOrElse {
           throw new AssertionError(entry.externalName)
         }
-        enterClassAndModule(entry, file, entry.jflags)
+        enterClassAndModule(entry, file)
       }
     }
   }
@@ -1133,26 +1059,27 @@ abstract class ClassfileParser {
     skipSuperclasses()
     skipMembers() // fields
     skipMembers() // methods
-    val attrs = in.nextChar
+    val attrs = u2
     for (i <- 0 until attrs) {
-      val attrName = pool.getName(in.nextChar).toTypeName
-      val attrLen = in.nextInt
+      val attrName = readTypeName()
+      val attrLen = u4
       attrName match {
         case tpnme.SignatureATTR =>
           in.skip(attrLen)
         case tpnme.ScalaSignatureATTR =>
           isScala = true
           val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
-          pbuf.readNat; pbuf.readNat;
+          pbuf.readNat(); pbuf.readNat()
           if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
             isScalaAnnot = true    // is in a ScalaSignature annotation.
           in.skip(attrLen)
         case tpnme.ScalaATTR =>
           isScalaRaw = true
         case tpnme.InnerClassesATTR if !isScala =>
-          val entries = in.nextChar.toInt
+          val entries = u2
           for (i <- 0 until entries) {
-            val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+            val innerIndex, outerIndex, nameIndex = u2
+            val jflags = readInnerClassFlags()
             if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
               innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
           }
@@ -1164,31 +1091,19 @@ abstract class ClassfileParser {
   }
 
   /** An entry in the InnerClasses attribute of this class file. */
-  case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+  case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) {
     def externalName = pool getClassName external
     def outerName    = pool getClassName outer
     def originalName = pool getName name
-    def isStatic     = ClassfileParser.this.isStatic(jflags)
     def isModule     = originalName.isTermName
-    def scope        = if (isStatic) staticScope else instanceScope
-    def enclosing    = if (isStatic) enclModule else enclClass
+    def scope        = if (jflags.isStatic) staticScope else instanceScope
+    def enclosing    = if (jflags.isStatic) enclModule else enclClass
 
     // The name of the outer class, without its trailing $ if it has one.
     private def strippedOuter = nme stripModuleSuffix outerName
     private def isInner       = innerClasses contains strippedOuter
     private def enclClass     = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
     private def enclModule    = enclClass.companionModule
-
-    private def staticWord = if (isStatic) "static " else ""
-    override def toString = s"$staticWord$originalName in $outerName ($externalName)"
-  }
-
-  /** Return the Symbol of the top level class enclosing `name`,
-   *  or the symbol of `name` itself if no enclosing classes are found.
-   */
-  def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
-    case Some(entry) => topLevelClass(entry.outerName)
-    case _           => classNameToSymbol(name)
   }
 
   /** Return the class symbol for the given name. It looks it up in its outer class.
@@ -1213,20 +1128,16 @@ abstract class ClassfileParser {
       case Some(entry) => innerSymbol(entry)
       case _           => NoSymbol
     }
-    // if loading during initialization of `definitions` typerPhase is not yet set.
-    // in that case we simply load the member at the current phase
-    @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
-      if (currentRun.typerPhase eq null) body else beforeTyper(body)
 
     private def innerSymbol(entry: InnerClassEntry): Symbol = {
       val name      = entry.originalName.toTypeName
       val enclosing = entry.enclosing
       def getMember = (
         if (enclosing == clazz) entry.scope lookup name
-        else enclosing.info member name
+        else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
       )
-      enteringTyperIfPossible(getMember)
-      /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+      getMember
+      /*  There used to be an assertion that this result is not NoSymbol; changing it to an error
        *  revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
        *  in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
        *  thought it wasn't triggering, I removed it entirely.
@@ -1234,6 +1145,9 @@ abstract class ClassfileParser {
     }
   }
 
+  class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
+    override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
+  }
   class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
     override def complete(sym: Symbol) {
       sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
@@ -1241,32 +1155,29 @@ abstract class ClassfileParser {
   }
 
   def skipAttributes() {
-    val attrCount = in.nextChar
-    for (i <- 0 until attrCount) {
-      in.skip(2); in.skip(in.nextInt)
+    var attrCount: Int = u2
+    while (attrCount > 0) {
+      in skip 2
+      in skip u4
+      attrCount -= 1
     }
   }
 
   def skipMembers() {
-    val memberCount = in.nextChar
-    for (i <- 0 until memberCount) {
-      in.skip(6); skipAttributes()
+    var memberCount: Int = u2
+    while (memberCount > 0) {
+      in skip 6
+      skipAttributes()
+      memberCount -= 1
     }
   }
 
   def skipSuperclasses() {
     in.skip(2) // superclass
-    val ifaces = in.nextChar
+    val ifaces = u2
     in.skip(2 * ifaces)
   }
 
-  protected def getOwner(flags: Int): Symbol =
-    if (isStatic(flags)) moduleClass else clazz
-
-  protected def getScope(flags: Int): Scope =
-    if (isStatic(flags)) staticScope else instanceScope
-
-  private def isPrivate(flags: Int)     = (flags & JAVA_ACC_PRIVATE) != 0
-  private def isStatic(flags: Int)      = (flags & JAVA_ACC_STATIC) != 0
-  private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+  protected def getScope(flags: JavaAccFlags): Scope =
+    if (flags.isStatic) staticScope else instanceScope
 }
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index d0c540a..6ca2205 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -3,15 +3,15 @@
  * @author Iulian Dragos
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package symtab
 package classfile
 
 import scala.collection.{ mutable, immutable }
 import mutable.ListBuffer
-import backend.icode._
 import ClassfileConstants._
-import scala.reflect.internal.Flags._
+import scala.reflect.internal.JavaAccFlags
 
 /** ICode reader from Java bytecode.
  *
@@ -20,6 +20,8 @@ import scala.reflect.internal.Flags._
  */
 abstract class ICodeReader extends ClassfileParser {
   val global: Global
+  val symbolTable: global.type
+  val loaders: global.loaders.type
   import global._
   import icodes._
 
@@ -28,12 +30,100 @@ abstract class ICodeReader extends ClassfileParser {
   var method: IMethod = NoIMethod          // the current IMethod
   var isScalaModule = false
 
+  override protected type ThisConstantPool = ICodeConstantPool
+  override protected def newConstantPool = new ICodeConstantPool
+
+  /** Try to force the chain of enclosing classes for the given name. Otherwise
+   *  flatten would not lift classes that were not referenced in the source code.
+   */
+  def forceMangledName(name: Name, module: Boolean): Symbol = {
+    val parts = name.decode.toString.split(Array('.', '$'))
+    var sym: Symbol = rootMirror.RootClass
+
+    // was "at flatten.prev"
+    enteringFlatten {
+      for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
+        val sym1 = enteringIcode {
+          sym.linkedClassOfClass.info
+          sym.info.decl(part.encode)
+        }//.suchThat(module == _.isModule)
+
+        sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
+      }
+    }
+    sym
+  }
+
+  protected class ICodeConstantPool extends ConstantPool {
+    /** Return the symbol of the class member at `index`.
+     *  The following special cases exist:
+     *   - If the member refers to special `MODULE$` static field, return
+     *  the symbol of the corresponding module.
+     *   - If the member is a field, and is not found with the given name,
+     *     another try is made by appending `nme.LOCAL_SUFFIX_STRING`
+     *   - If no symbol is found in the right tpe, a new try is made in the
+     *     companion class, in case the owner is an implementation class.
+     */
+    def getMemberSymbol(index: Int, static: Boolean): Symbol = {
+      if (index <= 0 || len <= index) errorBadIndex(index)
+      var f = values(index).asInstanceOf[Symbol]
+      if (f eq null) {
+        val start = starts(index)
+        val first = in.buf(start).toInt
+        if (first != CONSTANT_FIELDREF &&
+            first != CONSTANT_METHODREF &&
+            first != CONSTANT_INTFMETHODREF) errorBadTag(start)
+        val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
+        debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
+        val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+        debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
+
+        forceMangledName(tpe0.typeSymbol.name, module = false)
+        val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+        if (name == nme.MODULE_INSTANCE_FIELD) {
+          val index = in.getChar(start + 1).toInt
+          val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
+          //assert(name.endsWith("$"), "Not a module class: " + name)
+          f = forceMangledName(name dropRight 1, module = true)
+          if (f == NoSymbol)
+            f = rootMirror.getModuleByName(name dropRight 1)
+        } else {
+          val origName = nme.unexpandedName(name)
+          val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
+          f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
+          if (f == NoSymbol)
+            f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+          if (f == NoSymbol) {
+            // if it's an impl class, try to find it's static member inside the class
+            if (ownerTpe.typeSymbol.isImplClass) {
+              f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+            } else {
+              log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
+              f = tpe match {
+                case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
+                case _                => owner.newVariable(name.toTermName, owner.pos)
+              }
+              f setInfo tpe
+              log("created fake member " + f.fullName)
+            }
+          }
+        }
+        assert(f != NoSymbol,
+          s"could not find $name: $tpe in $ownerTpe" + (
+            if (settings.debug.value) ownerTpe.members.mkString(", members are:\n  ", "\n  ", "") else ""
+          )
+        )
+        values(index) = f
+      }
+      f
+    }
+  }
+
   /** Read back bytecode for the given class symbol. It returns
    *  two IClass objects, one for static members and one
    *  for non-static members.
    */
   def readClass(cls: Symbol): (IClass, IClass) = {
-    var classFile: io.AbstractFile = null;
     cls.info // ensure accurate type information
 
     isScalaModule = cls.isModule && !cls.isJavaDefined
@@ -48,58 +138,55 @@ abstract class ICodeReader extends ClassfileParser {
     (staticCode, instanceCode)
   }
 
-  /** If we're parsing a scala module, the owner of members is always
-   *  the module symbol.
-   */
-  override def getOwner(jflags: Int): Symbol =
-    if (isScalaModule) this.staticModule
-    else super.getOwner(jflags)
-
   override def parseClass() {
     this.instanceCode = new IClass(clazz)
     this.staticCode   = new IClass(staticModule)
-    val jflags = in.nextChar
-    val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
-    val sflags = toScalaClassFlags(jflags)  // what, this is never used??
-    val c = pool getClassSymbol in.nextChar
 
+    u2
+    pool getClassSymbol u2
     parseInnerClasses()
 
     in.skip(2)               // super class
-    in.skip(2 * in.nextChar) // interfaces
-    val fieldCount = in.nextChar
+    in.skip(2 * u2) // interfaces
+    val fieldCount = u2
     for (i <- 0 until fieldCount) parseField()
-    val methodCount = in.nextChar
-    for (i <- 0 until methodCount) parseMethod();
+    val methodCount = u2
+    for (i <- 0 until methodCount) parseMethod()
     instanceCode.methods = instanceCode.methods.reverse
     staticCode.methods = staticCode.methods.reverse
   }
 
   override def parseField() {
-    val (jflags, sym) = parseMember(true)
+    val (jflags, sym) = parseMember(field = true)
     getCode(jflags) addField new IField(sym)
     skipAttributes()
   }
 
-  private def parseMember(field: Boolean): (Int, Symbol) = {
-    val jflags   = in.nextChar
-    val name     = pool getName in.nextChar
-    val owner    = getOwner(jflags)
-    val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
+  private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
+    val jflags   = JavaAccFlags(u2)
+    val name     = pool getName u2
+    /*  If we're parsing a scala module, the owner of members is always
+     *  the module symbol.
+     */
+    val owner = (
+      if (isScalaModule) staticModule
+      else if (jflags.isStatic) moduleClass
+      else clazz
+    )
+    val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags)
 
     try {
-      val ch  = in.nextChar
+      val ch  = u2
       val tpe = pool.getType(dummySym, ch)
 
       if ("<clinit>" == name.toString)
         (jflags, NoSymbol)
       else {
-        val owner = getOwner(jflags)
-        var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
+        var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
         if (sym == NoSymbol)
-          sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+          sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
         if (sym == NoSymbol) {
-          sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+          sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym
           sym setInfoAndEnter tpe
           log(s"ICodeReader could not locate ${name.decode} in $owner.  Created ${sym.defString}.")
         }
@@ -124,31 +211,31 @@ abstract class ICodeReader extends ClassfileParser {
   }
 
   override def parseMethod() {
-    val (jflags, sym) = parseMember(false)
-    var beginning = in.bp
+    val (jflags, sym) = parseMember(field = false)
+    val beginning = in.bp
     try {
       if (sym != NoSymbol) {
         this.method = new IMethod(sym)
         this.method.returnType = toTypeKind(sym.tpe.resultType)
         getCode(jflags).addMethod(this.method)
-        if ((jflags & JAVA_ACC_NATIVE) != 0)
+        if (jflags.isNative)
           this.method.native = true
-        val attributeCount = in.nextChar
+        val attributeCount = u2
         for (i <- 0 until attributeCount) parseAttribute()
       } else {
-        debuglog("Skipping non-existent method.");
-        skipAttributes();
+        debuglog("Skipping non-existent method.")
+        skipAttributes()
       }
     } catch {
       case e: MissingRequirementError =>
-        in.bp = beginning; skipAttributes
-        debuglog("Skipping non-existent method. " + e.msg);
+        in.bp = beginning; skipAttributes()
+        debuglog("Skipping non-existent method. " + e.msg)
     }
   }
 
   def parseAttribute() {
-    val attrName = pool.getName(in.nextChar).toTypeName
-    val attrLen = in.nextInt
+    val attrName = pool.getName(u2).toTypeName
+    val attrLen = u4
     attrName match {
       case tpnme.CodeATTR =>
         parseByteCode()
@@ -169,12 +256,12 @@ abstract class ICodeReader extends ClassfileParser {
       rootMirror.getClassByName(name)
     }
     else if (nme.isModuleName(name)) {
-      val strippedName = nme.stripModuleSuffix(name)
-      forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
+      val strippedName = name.dropModule
+      forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName)
     }
     else {
-      forceMangledName(name, false)
-      afterFlatten(rootMirror.getClassByName(name.toTypeName))
+      forceMangledName(name, module = false)
+      exitingFlatten(rootMirror.getClassByName(name.toTypeName))
     }
     if (sym.isModule)
       sym.moduleClass
@@ -192,9 +279,9 @@ abstract class ICodeReader extends ClassfileParser {
 
   /** Parse java bytecode into ICode */
   def parseByteCode() {
-    maxStack = in.nextChar
-    maxLocals = in.nextChar
-    val codeLength = in.nextInt
+    maxStack = u2
+    maxLocals = u2
+    val codeLength = u4
     val code = new LinearCode
 
     def parseInstruction() {
@@ -202,27 +289,26 @@ abstract class ICodeReader extends ClassfileParser {
       import code._
       var size = 1 // instruction size
 
-      /** Parse 16 bit jump target. */
+      /* Parse 16 bit jump target. */
       def parseJumpTarget = {
         size += 2
-        val offset = in.nextChar.toShort
+        val offset = u2.toShort
         val target = pc + offset
         assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
         target
       }
 
-      /** Parse 32 bit jump target. */
+      /* Parse 32 bit jump target. */
       def parseJumpTargetW: Int = {
         size += 4
-        val offset = in.nextInt
+        val offset = u4
         val target = pc + offset
         assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
         target
       }
 
-      val instr = toUnsignedByte(in.nextByte)
-      instr match {
-        case JVM.nop => parseInstruction
+      u1 match {
+        case JVM.nop => parseInstruction()
         case JVM.aconst_null => code emit CONSTANT(Constant(null))
         case JVM.iconst_m1   => code emit CONSTANT(Constant(-1))
         case JVM.iconst_0    => code emit CONSTANT(Constant(0))
@@ -240,21 +326,21 @@ abstract class ICodeReader extends ClassfileParser {
         case JVM.dconst_0    => code emit CONSTANT(Constant(0.0))
         case JVM.dconst_1    => code emit CONSTANT(Constant(1.0))
 
-        case JVM.bipush      => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
-        case JVM.sipush      => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
-        case JVM.ldc         => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1
-        case JVM.ldc_w       => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
-        case JVM.ldc2_w      => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
-        case JVM.iload       => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT)));    size += 1
-        case JVM.lload       => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG)));   size += 1
-        case JVM.fload       => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT)));  size += 1
-        case JVM.dload       => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
+        case JVM.bipush      => code.emit(CONSTANT(Constant(u1))); size += 1
+        case JVM.sipush      => code.emit(CONSTANT(Constant(u2))); size += 2
+        case JVM.ldc         => code.emit(CONSTANT(pool.getConstant(u1))); size += 1
+        case JVM.ldc_w       => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+        case JVM.ldc2_w      => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+        case JVM.iload       => code.emit(LOAD_LOCAL(code.getLocal(u1, INT)));    size += 1
+        case JVM.lload       => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG)));   size += 1
+        case JVM.fload       => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT)));  size += 1
+        case JVM.dload       => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
         case JVM.aload       =>
-          val local = in.nextByte.toInt; size += 1
+          val local = u1.toInt; size += 1
           if (local == 0 && !method.isStatic)
-            code.emit(THIS(method.symbol.owner));
+            code.emit(THIS(method.symbol.owner))
           else
-            code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)));
+            code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
 
         case JVM.iload_0     => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
         case JVM.iload_1     => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
@@ -274,9 +360,9 @@ abstract class ICodeReader extends ClassfileParser {
         case JVM.dload_3     => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
         case JVM.aload_0     =>
           if (!method.isStatic)
-            code.emit(THIS(method.symbol.owner));
+            code.emit(THIS(method.symbol.owner))
           else
-            code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)));
+            code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
         case JVM.aload_1     => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
         case JVM.aload_2     => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
         case JVM.aload_3     => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
@@ -290,11 +376,11 @@ abstract class ICodeReader extends ClassfileParser {
         case JVM.caload      => code.emit(LOAD_ARRAY_ITEM(CHAR))
         case JVM.saload      => code.emit(LOAD_ARRAY_ITEM(SHORT))
 
-        case JVM.istore      => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT)));    size += 1
-        case JVM.lstore      => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG)));   size += 1
-        case JVM.fstore      => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT)));  size += 1
-        case JVM.dstore      => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
-        case JVM.astore      => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, ObjectReference))); size += 1
+        case JVM.istore      => code.emit(STORE_LOCAL(code.getLocal(u1, INT)));    size += 1
+        case JVM.lstore      => code.emit(STORE_LOCAL(code.getLocal(u1, LONG)));   size += 1
+        case JVM.fstore      => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT)));  size += 1
+        case JVM.dstore      => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
+        case JVM.astore      => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1
         case JVM.istore_0    => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
         case JVM.istore_1    => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
         case JVM.istore_2    => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
@@ -378,9 +464,9 @@ abstract class ICodeReader extends ClassfileParser {
         case JVM.lxor        => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
         case JVM.iinc        =>
           size += 2
-          val local = code.getLocal(in.nextByte, INT)
+          val local = code.getLocal(u1, INT)
           code.emit(LOAD_LOCAL(local))
-          code.emit(CONSTANT(Constant(in.nextByte)))
+          code.emit(CONSTANT(Constant(u1)))
           code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
           code.emit(STORE_LOCAL(local))
 
@@ -430,14 +516,14 @@ abstract class ICodeReader extends ClassfileParser {
           size += padding
           in.bp += padding
           assert((pc + size % 4) != 0, pc)
-/*          var byte1 = in.nextByte; size += 1;
-          while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
-          val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
+/*          var byte1 = u1; size += 1;
+          while (byte1 == 0) { byte1 = u1; size += 1; }
+          val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1;
           size = size + 3
        */
-          val default = pc + in.nextInt; size += 4
-          val low  = in.nextInt
-          val high = in.nextInt
+          val default = pc + u4; size += 4
+          val low  = u4
+          val high = u4
           size += 8
           assert(low <= high, "Value low not <= high for tableswitch.")
 
@@ -450,13 +536,13 @@ abstract class ICodeReader extends ClassfileParser {
           size += padding
           in.bp += padding
           assert((pc + size % 4) != 0, pc)
-          val default = pc + in.nextInt; size += 4
-          val npairs = in.nextInt; size += 4
+          val default = pc + u4; size += 4
+          val npairs = u4; size += 4
           var tags: List[List[Int]] = Nil
           var targets: List[Int] = Nil
           var i = 0
           while (i < npairs) {
-            tags = List(in.nextInt) :: tags; size += 4
+            tags = List(u4) :: tags; size += 4
             targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
             i += 1
           }
@@ -471,59 +557,59 @@ abstract class ICodeReader extends ClassfileParser {
         case JVM.return_     => code.emit(RETURN(UNIT))
 
         case JVM.getstatic    =>
-          val field = pool.getMemberSymbol(in.nextChar, true); size += 2
+          val field = pool.getMemberSymbol(u2, static = true); size += 2
           if (field.hasModuleFlag)
             code emit LOAD_MODULE(field)
           else
-            code emit LOAD_FIELD(field, true)
+            code emit LOAD_FIELD(field, isStatic = true)
         case JVM.putstatic   =>
-          val field = pool.getMemberSymbol(in.nextChar, true); size += 2
-          code.emit(STORE_FIELD(field, true))
+          val field = pool.getMemberSymbol(u2, static = true); size += 2
+          code.emit(STORE_FIELD(field, isStatic = true))
         case JVM.getfield    =>
-          val field = pool.getMemberSymbol(in.nextChar, false); size += 2
-          code.emit(LOAD_FIELD(field, false))
+          val field = pool.getMemberSymbol(u2, static = false); size += 2
+          code.emit(LOAD_FIELD(field, isStatic = false))
         case JVM.putfield    =>
-          val field = pool.getMemberSymbol(in.nextChar, false); size += 2
-          code.emit(STORE_FIELD(field, false))
+          val field = pool.getMemberSymbol(u2, static = false); size += 2
+          code.emit(STORE_FIELD(field, isStatic = false))
 
         case JVM.invokevirtual =>
-          val m = pool.getMemberSymbol(in.nextChar, false); size += 2
+          val m = pool.getMemberSymbol(u2, static = false); size += 2
           code.emit(CALL_METHOD(m, Dynamic))
           method.updateRecursive(m)
         case JVM.invokeinterface  =>
-          val m = pool.getMemberSymbol(in.nextChar, false); size += 4
+          val m = pool.getMemberSymbol(u2, static = false); size += 4
           in.skip(2)
           code.emit(CALL_METHOD(m, Dynamic))
           // invokeinterface can't be recursive
         case JVM.invokespecial   =>
-          val m = pool.getMemberSymbol(in.nextChar, false); size += 2
-          val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
-                      else SuperCall(m.owner.name);
+          val m = pool.getMemberSymbol(u2, static = false); size += 2
+          val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
+                      else SuperCall(m.owner.name)
           code.emit(CALL_METHOD(m, style))
           method.updateRecursive(m)
         case JVM.invokestatic    =>
-          val m = pool.getMemberSymbol(in.nextChar, true); size += 2
+          val m = pool.getMemberSymbol(u2, static = true); size += 2
           if (isBox(m))
             code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
           else if (isUnbox(m))
             code.emit(UNBOX(toTypeKind(m.info.resultType)))
           else {
-            code.emit(CALL_METHOD(m, Static(false)))
+            code.emit(CALL_METHOD(m, Static(onInstance = false)))
             method.updateRecursive(m)
           }
         case JVM.invokedynamic  =>
           // TODO, this is just a place holder. A real implementation must parse the class constant entry
           debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
           containsInvokeDynamic = true
-          val poolEntry = in.nextChar
+          val poolEntry = in.nextChar.toInt
           in.skip(2)
           code.emit(INVOKE_DYNAMIC(poolEntry))
 
         case JVM.new_          =>
-          code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
+          code.emit(NEW(REFERENCE(pool.getClassSymbol(u2))))
           size += 2
         case JVM.newarray      =>
-          val kind = in.nextByte match {
+          val kind = u1 match {
             case T_BOOLEAN => BOOL
             case T_CHAR    => CHAR
             case T_FLOAT   => FLOAT
@@ -537,35 +623,35 @@ abstract class ICodeReader extends ClassfileParser {
           code.emit(CREATE_ARRAY(kind, 1))
 
         case JVM.anewarray     =>
-          val tpe = pool.getClassOrArrayType(in.nextChar); size += 2
+          val tpe = pool.getClassOrArrayType(u2); size += 2
           code.emit(CREATE_ARRAY(toTypeKind(tpe), 1))
 
         case JVM.arraylength   => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter
         case JVM.athrow        => code.emit(THROW(definitions.ThrowableClass))
         case JVM.checkcast     =>
-          code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+          code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
         case JVM.instanceof    =>
-          code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+          code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
         case JVM.monitorenter  => code.emit(MONITOR_ENTER())
         case JVM.monitorexit   => code.emit(MONITOR_EXIT())
         case JVM.wide          =>
           size += 1
-          toUnsignedByte(in.nextByte) match {
-            case JVM.iload  => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT)));    size += 2
-            case JVM.lload  => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG)));   size += 2
-            case JVM.fload  => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT)));  size += 2
-            case JVM.dload  => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
-            case JVM.aload  => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
-            case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT)));    size += 2
-            case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG)));   size += 2
-            case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT)));  size += 2
-            case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
-            case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
+          u1 match {
+            case JVM.iload  => code.emit(LOAD_LOCAL(code.getLocal(u2, INT)));    size += 2
+            case JVM.lload  => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG)));   size += 2
+            case JVM.fload  => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT)));  size += 2
+            case JVM.dload  => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+            case JVM.aload  => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
+            case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT)));    size += 2
+            case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG)));   size += 2
+            case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT)));  size += 2
+            case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+            case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
             case JVM.ret => sys.error("Cannot handle jsr/ret")
             case JVM.iinc =>
               size += 4
-              val local = code.getLocal(in.nextChar, INT)
-              code.emit(CONSTANT(Constant(in.nextChar)))
+              val local = code.getLocal(u2, INT)
+              code.emit(CONSTANT(Constant(u2)))
               code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
               code.emit(STORE_LOCAL(local))
             case _ => sys.error("Invalid 'wide' operand")
@@ -573,8 +659,8 @@ abstract class ICodeReader extends ClassfileParser {
 
         case JVM.multianewarray =>
           size += 3
-          val tpe = toTypeKind(pool getClassOrArrayType in.nextChar)
-          val dim = in.nextByte
+          val tpe = toTypeKind(pool getClassOrArrayType u2)
+          val dim = u1
 //          assert(dim == 1, "Cannot handle multidimensional arrays yet.")
           code emit CREATE_ARRAY(tpe, dim)
 
@@ -598,16 +684,16 @@ abstract class ICodeReader extends ClassfileParser {
     }
 
     pc = 0
-    while (pc < codeLength) parseInstruction
+    while (pc < codeLength) parseInstruction()
 
-    val exceptionEntries = in.nextChar.toInt
+    val exceptionEntries = u2.toInt
     code.containsEHs = (exceptionEntries != 0)
     var i = 0
     while (i < exceptionEntries) {
       // skip start end PC
       in.skip(4)
       // read the handler PC
-      code.jmpTargets += in.nextChar
+      code.jmpTargets += u2
       // skip the exception type
       in.skip(2)
       i += 1
@@ -643,15 +729,13 @@ abstract class ICodeReader extends ClassfileParser {
   /** Return the icode class that should include members with the given flags.
    *  There are two possible classes, the static part and the instance part.
    */
-  def getCode(flags: Int): IClass =
-    if (isScalaModule) staticCode
-    else if ((flags & JAVA_ACC_STATIC) != 0) staticCode
-    else instanceCode
+  def getCode(flags: JavaAccFlags): IClass =
+    if (isScalaModule || flags.isStatic) staticCode else instanceCode
 
   class LinearCode {
-    var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
-    var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
-    var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
+    val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
+    val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
+    val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
 
     var containsDUPX = false
     var containsNEW  = false
@@ -683,7 +767,6 @@ abstract class ICodeReader extends ClassfileParser {
 
       val blocks = makeBasicBlocks
       var otherBlock: BasicBlock = NoBasicBlock
-      var disableJmpTarget = false
 
       for ((pc, instr) <- instrs.iterator) {
 //        Console.println("> " + pc + ": " + instr);
@@ -691,7 +774,7 @@ abstract class ICodeReader extends ClassfileParser {
           otherBlock = blocks(pc)
           if (!bb.closed && otherBlock != bb) {
             bb.emit(JUMP(otherBlock))
-            bb.close
+            bb.close()
 //            Console.println("\t> closing bb: " + bb)
           }
           bb = otherBlock
@@ -734,46 +817,44 @@ abstract class ICodeReader extends ClassfileParser {
 
       val tfa = new analysis.MethodTFA() {
         import analysis._
-        import analysis.typeFlowLattice.IState
 
         /** Abstract interpretation for one instruction. */
         override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
-          val bindings = out.vars
           val stack = out.stack
           import stack.push
           i match {
             case DUP_X1 =>
               val (one, two) = stack.pop2
-              push(one); push(two); push(one);
+              push(one); push(two); push(one)
 
             case DUP_X2 =>
               val (one, two, three) = stack.pop3
-              push(one); push(three); push(two); push(one);
+              push(one); push(three); push(two); push(one)
 
             case DUP2_X1 =>
               val (one, two) = stack.pop2
               if (one.isWideType) {
-                push(one); push(two); push(one);
+                push(one); push(two); push(one)
               } else {
                 val three = stack.pop
-                push(two); push(one); push(three); push(two); push(one);
+                push(two); push(one); push(three); push(two); push(one)
               }
 
             case DUP2_X2 =>
               val (one, two) = stack.pop2
               if (one.isWideType && two.isWideType) {
-                push(one); push(two); push(one);
+                push(one); push(two); push(one)
               } else if (one.isWideType) {
                 val three = stack.pop
                 assert(!three.isWideType, "Impossible")
-                push(one); push(three); push(two); push(one);
+                push(one); push(three); push(two); push(one)
               } else {
                 val three = stack.pop
                 if (three.isWideType) {
-                  push(two); push(one); push(one); push(three); push(two); push(one);
+                  push(two); push(one); push(one); push(three); push(two); push(one)
                 } else {
                   val four = stack.pop
-                  push(two); push(one); push(four); push(one); push(three); push(two); push(one);
+                  push(two); push(one); push(four); push(one); push(three); push(two); push(one)
                 }
               }
 
@@ -786,7 +867,7 @@ abstract class ICodeReader extends ClassfileParser {
 
 //      method.dump
       tfa.init(method)
-      tfa.run
+      tfa.run()
       for (bb <- linearizer.linearize(method)) {
         var info = tfa.in(bb)
         for (i <- bb.toList) {
@@ -801,7 +882,7 @@ abstract class ICodeReader extends ClassfileParser {
                   STORE_LOCAL(tmp2),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
 
             case DUP_X2 =>
               val one = info.stack.types(0)
@@ -814,30 +895,30 @@ abstract class ICodeReader extends ClassfileParser {
                   STORE_LOCAL(tmp2),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               else {
-                val tmp3 = freshLocal(info.stack.types(2));
+                val tmp3 = freshLocal(info.stack.types(2))
                 bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
                   STORE_LOCAL(tmp2),
                   STORE_LOCAL(tmp3),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp3),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               }
 
             case DUP2_X1 =>
               val one = info.stack.types(0)
               val two = info.stack.types(1)
-              val tmp1 = freshLocal(one);
-              val tmp2 = freshLocal(two);
+              val tmp1 = freshLocal(one)
+              val tmp2 = freshLocal(two)
               if (one.isWideType) {
                 assert(!two.isWideType, "Impossible")
                 bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
                   STORE_LOCAL(tmp2),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               } else {
                 val tmp3 = freshLocal(info.stack.types(2))
                 bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
@@ -846,7 +927,7 @@ abstract class ICodeReader extends ClassfileParser {
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp3),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               }
 
             case DUP2_X2 =>
@@ -859,21 +940,21 @@ abstract class ICodeReader extends ClassfileParser {
                   STORE_LOCAL(tmp2),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               } else if (one.isWideType) {
                 val three = info.stack.types(2)
                 assert(!two.isWideType && !three.isWideType, "Impossible")
-                val tmp3 = freshLocal(three);
+                val tmp3 = freshLocal(three)
                 bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
                   STORE_LOCAL(tmp2),
                   STORE_LOCAL(tmp3),
                   LOAD_LOCAL(tmp1),
                   LOAD_LOCAL(tmp3),
                   LOAD_LOCAL(tmp2),
-                  LOAD_LOCAL(tmp1)));
+                  LOAD_LOCAL(tmp1)))
               } else {
                 val three = info.stack.types(2)
-                val tmp3 = freshLocal(three);
+                val tmp3 = freshLocal(three)
                 if (three.isWideType) {
                   bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
                       STORE_LOCAL(tmp2),
@@ -882,10 +963,10 @@ abstract class ICodeReader extends ClassfileParser {
                       LOAD_LOCAL(tmp1),
                       LOAD_LOCAL(tmp3),
                       LOAD_LOCAL(tmp2),
-                      LOAD_LOCAL(tmp1)));
+                      LOAD_LOCAL(tmp1)))
                 } else {
                   val four = info.stack.types(3)
-                  val tmp4 = freshLocal(three);
+                  val tmp4 = freshLocal(three)
                   assert(!four.isWideType, "Impossible")
                   bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
                       STORE_LOCAL(tmp2),
@@ -896,7 +977,7 @@ abstract class ICodeReader extends ClassfileParser {
                       LOAD_LOCAL(tmp4),
                       LOAD_LOCAL(tmp3),
                       LOAD_LOCAL(tmp2),
-                      LOAD_LOCAL(tmp1)));
+                      LOAD_LOCAL(tmp1)))
                 }
               }
             case _ =>
@@ -911,11 +992,11 @@ abstract class ICodeReader extends ClassfileParser {
       import opcodes._
       val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
       rdef.init(method)
-      rdef.run
+      rdef.run()
 
       for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
         case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
-          def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = {
+          def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = {
             rdef.findDefs(bb0, idx0, 1, depth) match {
               case ((bb1, idx1)) :: _ =>
                 bb1(idx1) match {
@@ -934,6 +1015,7 @@ abstract class ICodeReader extends ClassfileParser {
     }
 
     /** Return the local at given index, with the given type. */
+    def getLocal(idx: Char, kind: TypeKind): Local = getLocal(idx.toInt, kind)
     def getLocal(idx: Int, kind: TypeKind): Local = {
       assert(idx < maxLocals, "Index too large for local variable.")
 
@@ -952,7 +1034,7 @@ abstract class ICodeReader extends ClassfileParser {
 
       locals.get(idx) match {
         case Some(ls) =>
-          val l = ls find { loc => loc._2 <:< kind }
+          val l = ls find { loc => loc._2 isAssignabledTo kind }
           l match {
             case Some((loc, _)) => loc
             case None =>
@@ -963,8 +1045,8 @@ abstract class ICodeReader extends ClassfileParser {
               l
           }
         case None =>
-          checkValidIndex
-          val l = freshLocal(idx, kind, false)
+          checkValidIndex()
+          val l = freshLocal(idx, kind, isArg = false)
           debuglog("Added new local for idx " + idx + ": " + kind)
           locals += (idx -> List((l, kind)))
           l
@@ -976,7 +1058,7 @@ abstract class ICodeReader extends ClassfileParser {
     /** Return a fresh Local variable for the given index.
      */
     private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
-      val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
+      val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
       val l = new Local(sym, kind, isArg)
       method.addLocal(l)
       l
@@ -988,7 +1070,7 @@ abstract class ICodeReader extends ClassfileParser {
      *  the original method. */
     def freshLocal(kind: TypeKind): Local = {
       count += 1
-      freshLocal(maxLocals + count, kind, false)
+      freshLocal(maxLocals + count, kind, isArg = false)
     }
 
     /** add a method param with the given index. */
@@ -1006,7 +1088,8 @@ abstract class ICodeReader extends ClassfileParser {
       jmpTargets += pc
     }
 
-    case class LJUMP(pc: Int) extends LazyJump(pc);
+    case class LJUMP(pc: Int) extends LazyJump(pc)
+
     case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
       extends LazyJump(success) {
       override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index ed7eb6d..592c549 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -11,6 +11,7 @@ import java.lang.Float.floatToIntBits
 import java.lang.Double.doubleToLongBits
 import scala.io.Codec
 import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
+import scala.reflect.internal.util.shortClassOfInstance
 import scala.collection.mutable.LinkedHashMap
 import PickleFormat._
 import Flags._
@@ -26,12 +27,8 @@ import Flags._
 abstract class Pickler extends SubComponent {
   import global._
 
-  private final val showSig = false
-
   val phaseName = "pickler"
 
-  currentRun
-
   def newPhase(prev: Phase): StdPhase = new PicklePhase(prev)
 
   class PicklePhase(prev: Phase) extends StdPhase(prev) {
@@ -58,23 +55,26 @@ abstract class Pickler extends SubComponent {
           case _ =>
         }
       }
-      // If there are any erroneous types in the tree, then we will crash
-      // when we pickle it: so let's report an error instead.  We know next
-      // to nothing about what happened, but our supposition is a lot better
-      // than "bad type: <error>" in terms of explanatory power.
-      for (t <- unit.body) {
-        if (t.isErroneous) {
-          unit.error(t.pos, "erroneous or inaccessible type")
-          return
-        }
 
-        if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
-          unit.error(t.pos, "macro has not been expanded")
-          return
-        }
+      try {
+        pickle(unit.body)
+      } catch {
+        case e: FatalError =>
+          for (t <- unit.body) {
+            // If there are any erroneous types in the tree, then we will crash
+            // when we pickle it: so let's report an error instead.  We know next
+            // to nothing about what happened, but our supposition is a lot better
+            // than "bad type: <error>" in terms of explanatory power.
+            //
+            // OPT: do this only as a recovery after fatal error. Checking in advance was expensive.
+            if (t.isErroneous) {
+              if (settings.debug) e.printStackTrace()
+              unit.error(t.pos, "erroneous or inaccessible type")
+              return
+            }
+          }
+          throw e
       }
-
-      pickle(unit.body)
     }
   }
 
@@ -84,7 +84,7 @@ abstract class Pickler extends SubComponent {
     private var entries   = new Array[AnyRef](256)
     private var ep        = 0
     private val index     = new LinkedHashMap[AnyRef, Int]
-    private lazy val nonClassRoot = findOrElse(root.ownersIterator)(!_.isClass)(NoSymbol)
+    private lazy val nonClassRoot = findSymbol(root.ownersIterator)(!_.isClass)
 
     private def isRootSym(sym: Symbol) =
       sym.name.toTermName == rootName && sym.owner == rootOwner
@@ -92,12 +92,17 @@ abstract class Pickler extends SubComponent {
     /** Returns usually symbol's owner, but picks classfile root instead
      *  for existentially bound variables that have a non-local owner.
      *  Question: Should this be done for refinement class symbols as well?
+     *
+     *  Note: tree pickling also finds its way here; e.g. in SI-7501 the pickling
+     *  of trees in annotation arguments considers the parameter symbol of a method
+     *  called in such a tree as "local". The condition `sym.isValueParameter` was
+     *  added to fix that bug, but there may be a better way.
      */
     private def localizedOwner(sym: Symbol) =
-      if (isLocal(sym) && !isRootSym(sym) && !isLocal(sym.owner))
+      if (isLocalToPickle(sym) && !isRootSym(sym) && !isLocalToPickle(sym.owner))
         // don't use a class as the localized owner for type parameters that are not owned by a class: those are not instantiated by asSeenFrom
         // however, they would suddenly be considered by asSeenFrom if their localized owner became a class (causing the crashes of #4079, #2741)
-        (if(sym.isTypeParameter && !sym.owner.isClass) nonClassRoot
+        (if ((sym.isTypeParameter || sym.isValueParameter) && !sym.owner.isClass) nonClassRoot
          else root)
       else sym.owner
 
@@ -105,13 +110,14 @@ abstract class Pickler extends SubComponent {
      *  anyway? This is the case if symbol is a refinement class,
      *  an existentially bound variable, or a higher-order type parameter.
      */
-    private def isLocal(sym: Symbol): Boolean =
-      !sym.isPackageClass && sym != NoSymbol &&
-      (isRootSym(sym) ||
-       sym.isRefinementClass ||
-       sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
-       sym.isParameter ||
-       isLocal(sym.owner))
+    private def isLocalToPickle(sym: Symbol): Boolean = (sym != NoSymbol) && !sym.isPackageClass && (
+         isRootSym(sym)
+      || sym.isRefinementClass
+      || sym.isAbstractType && sym.hasFlag(EXISTENTIAL) // existential param
+      || sym.isParameter
+      || isLocalToPickle(sym.owner)
+    )
+    private def isExternalSymbol(sym: Symbol): Boolean = (sym != NoSymbol) && !isLocalToPickle(sym)
 
     // Phase 1 methods: Populate entries/index ------------------------------------
 
@@ -134,19 +140,47 @@ abstract class Pickler extends SubComponent {
         true
     }
 
+    private def deskolemizeTypeSymbols(ref: AnyRef): AnyRef = ref match {
+      case sym: Symbol => deskolemize(sym)
+      case _           => ref
+    }
+
+    /** If the symbol is a type skolem, deskolemize and log it.
+     *  If we fail to deskolemize, in a method like
+     *    trait Trait[+A] { def f[CC[X]] : CC[A] }
+     *  the applied type CC[A] will hold a different CC symbol
+     *  than the type-constructor type-parameter CC.
+     */
+    private def deskolemize(sym: Symbol): Symbol = {
+      if (sym.isTypeSkolem) {
+        val sym1 = sym.deSkolemize
+        log({
+          val what0 = sym.defString
+          val what = sym1.defString match {
+            case `what0` => what0
+            case other   => what0 + "->" + other
+          }
+          val where = sym.enclMethod.fullLocationString
+          s"deskolemizing $what in $where"
+        })
+        sym1
+      }
+      else sym
+    }
+
     /** Store symbol in index. If symbol is local, also store everything it references.
-     *
-     *  @param sym ...
      */
-    def putSymbol(sym: Symbol) {
+    def putSymbol(sym0: Symbol) {
+      val sym = deskolemize(sym0)
+
       if (putEntry(sym)) {
-        if (isLocal(sym)) {
+        if (isLocalToPickle(sym)) {
           putEntry(sym.name)
           putSymbol(sym.owner)
           putSymbol(sym.privateWithin)
           putType(sym.info)
-          if (sym.thisSym.tpeHK != sym.tpeHK)
-            putType(sym.typeOfThis);
+          if (sym.hasSelfType)
+            putType(sym.typeOfThis)
           putSymbol(sym.alias)
           if (!sym.children.isEmpty) {
             val (locals, globals) = sym.children partition (_.isLocalClass)
@@ -173,257 +207,69 @@ abstract class Pickler extends SubComponent {
      */
     private def putType(tp: Type): Unit = if (putEntry(tp)) {
       tp match {
-        case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
+        case NoType | NoPrefix =>
           ;
         case ThisType(sym) =>
           putSymbol(sym)
         case SingleType(pre, sym) =>
-          putType(pre); putSymbol(sym)
+          putType(pre)
+          putSymbol(sym)
         case SuperType(thistpe, supertpe) =>
           putType(thistpe)
           putType(supertpe)
         case ConstantType(value) =>
           putConstant(value)
         case TypeRef(pre, sym, args) =>
-//          if (sym.isAbstractType && (sym hasFlag EXISTENTIAL))
-//            if (!(boundSyms contains sym))
-//              println("unbound existential: "+sym+sym.locationString)
-          putType(pre); putSymbol(sym); putTypes(args)
+          putType(pre)
+          putSymbol(sym)
+          putTypes(args)
         case TypeBounds(lo, hi) =>
-          putType(lo); putType(hi)
-        case RefinedType(parents, decls) =>
-          val rclazz = tp.typeSymbol
-          for (m <- decls.iterator)
-            if (m.owner != rclazz) abort("bad refinement member "+m+" of "+tp+", owner = "+m.owner)
-          putSymbol(rclazz); putTypes(parents); putSymbols(decls.toList)
-        case ClassInfoType(parents, decls, clazz) =>
-          putSymbol(clazz); putTypes(parents); putSymbols(decls.toList)
+          putType(lo)
+          putType(hi)
+        case tp: CompoundType =>
+          putSymbol(tp.typeSymbol)
+          putTypes(tp.parents)
+          putSymbols(tp.decls.toList)
         case MethodType(params, restpe) =>
-          putType(restpe); putSymbols(params)
+          putType(restpe)
+          putSymbols(params)
         case NullaryMethodType(restpe) =>
           putType(restpe)
         case PolyType(tparams, restpe) =>
-          /** no longer needed since all params are now local
-          tparams foreach { tparam =>
-            if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
-          }
-          */
-          putType(restpe); putSymbols(tparams)
+          putType(restpe)
+          putSymbols(tparams)
         case ExistentialType(tparams, restpe) =>
-//          val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag  (see isLocal)
-//          boundSyms = tparams ::: boundSyms
-//          try {
-            putType(restpe);
-//          } finally {
-//            boundSyms = savedBoundSyms
-//          }
+          putType(restpe)
           putSymbols(tparams)
-        case AnnotatedType(annotations, underlying, selfsym) =>
+        case AnnotatedType(_, underlying) =>
           putType(underlying)
-          if (settings.selfInAnnots.value) putSymbol(selfsym)
-          putAnnotations(annotations filter (_.isStatic))
+          tp.staticAnnotations foreach putAnnotation
         case _ =>
           throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")")
       }
     }
     private def putTypes(tps: List[Type]) { tps foreach putType }
 
-    private def putTree(tree: Tree): Unit = if (putEntry(tree)) {
-      if (tree != EmptyTree)
-        putType(tree.tpe)
-      if (tree.hasSymbol)
-        putSymbol(tree.symbol)
-
-      tree match {
-        case EmptyTree =>
-
-        case tree at PackageDef(pid, stats) =>
-          putTree(pid)
-          putTrees(stats)
-
-        case ClassDef(mods, name, tparams, impl) =>
-          putMods(mods)
-          putEntry(name)
-          putTree(impl)
-          putTrees(tparams)
-
-        case ModuleDef(mods, name, impl) =>
-          putMods(mods)
-          putEntry(name)
-          putTree(impl)
-
-        case ValDef(mods, name, tpt, rhs) =>
-          putMods(mods)
-          putEntry(name)
-          putTree(tpt)
-          putTree(rhs)
-
-        case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
-          putMods(mods)
-          putEntry(name)
-          putTrees(tparams)
-          putTreess(vparamss)
-          putTree(tpt)
-          putTree(rhs)
-
-        case TypeDef(mods, name, tparams, rhs) =>
-          putMods(mods)
-          putEntry(name)
-          putTree(rhs)
-          putTrees(tparams)
-
-        case LabelDef(name, params, rhs) =>
-          putEntry(name)
-          putTree(rhs)
-          putTrees(params)
-
-        case Import(expr, selectors) =>
-          putTree(expr)
-          for (ImportSelector(from, _, to, _) <- selectors) {
-            putEntry(from)
-            putEntry(to)
-          }
-/*
-        case DocDef(comment, definition) =>  should not be needed
-          putConstant(Constant(comment))
-          putTree(definition)
-*/
-        case Template(parents, self, body) =>
-          putTrees(parents)
-          putTree(self)
-          putTrees(body)
-
-        case Block(stats, expr) =>
-          putTree(expr)
-          putTrees(stats)
-
-        case CaseDef(pat, guard, body) =>
-          putTree(pat)
-          putTree(guard)
-          putTree(body)
-
-        case Alternative(trees) =>
-          putTrees(trees)
-
-        case Star(elem) =>
-          putTree(elem)
-
-        case Bind(name, body) =>
-          putEntry(name)
-          putTree(body)
-
-        case UnApply(fun: Tree, args) =>
-          putTree(fun)
-          putTrees(args)
-
-        case ArrayValue(elemtpt, trees) =>
-          putTree(elemtpt)
-          putTrees(trees)
-
-
-        case Function(vparams, body) =>
-          putTree(body)
-          putTrees(vparams)
-
-        case Assign(lhs, rhs) =>
-          putTree(lhs)
-          putTree(rhs)
-
-        case If(cond, thenp, elsep) =>
-          putTree(cond)
-          putTree(thenp)
-          putTree(elsep)
-
-        case Match(selector, cases) =>
-          putTree(selector)
-          putTrees(cases)
-
-        case Return(expr) =>
-          putTree(expr)
-
-        case Try(block, catches, finalizer) =>
-          putTree(block)
-          putTree(finalizer)
-          putTrees(catches)
-
-        case Throw(expr) =>
-          putTree(expr)
-
-        case New(tpt) =>
-          putTree(tpt)
-
-        case Typed(expr, tpt) =>
-          putTree(expr)
-          putTree(tpt)
-
-        case TypeApply(fun, args) =>
-          putTree(fun)
-          putTrees(args)
-
-        case Apply(fun, args) =>
-          putTree(fun)
-          putTrees(args)
-
-        case ApplyDynamic(qual, args) =>
-          putTree(qual)
-          putTrees(args)
-
-        case Super(qual, mix) =>
-          putTree(qual)
-          putEntry(mix:Name)
-
-        case This(qual) =>
-          putEntry(qual)
-
-        case Select(qualifier, selector) =>
-          putTree(qualifier)
-          putEntry(selector)
-
-        case Ident(name) =>
-          putEntry(name)
-
-        case Literal(value) =>
-          putEntry(value)
-
-        case TypeTree() =>
-
-        case Annotated(annot, arg) =>
-          putTree(annot)
-          putTree(arg)
-
-        case SingletonTypeTree(ref) =>
-          putTree(ref)
-
-        case SelectFromTypeTree(qualifier, selector) =>
-          putTree(qualifier)
-          putEntry(selector)
-
-        case CompoundTypeTree(templ: Template) =>
-          putTree(templ)
-
-        case AppliedTypeTree(tpt, args) =>
-          putTree(tpt)
-          putTrees(args)
-
-        case TypeBoundsTree(lo, hi) =>
-          putTree(lo)
-          putTree(hi)
-
-        case ExistentialTypeTree(tpt, whereClauses) =>
-          putTree(tpt)
-          putTrees(whereClauses)
+    private object putTreeTraverser extends Traverser {
+      // Only used when pickling trees, i.e. in an argument of some Annotation
+      // annotations in Modifiers are removed by the typechecker
+      override def traverseModifiers(mods: Modifiers): Unit = if (putEntry(mods)) putEntry(mods.privateWithin)
+      override def traverseName(name: Name): Unit           = putEntry(name)
+      override def traverseConstant(const: Constant): Unit  = putEntry(const)
+      override def traverse(tree: Tree): Unit               = putTree(tree)
+
+      def put(tree: Tree): Unit = {
+        if (tree.canHaveAttrs)
+          putType(tree.tpe)
+        if (tree.hasSymbolField)
+          putSymbol(tree.symbol)
+
+        super.traverse(tree)
       }
     }
-
-    private def putTrees(trees: List[Tree]) = trees foreach putTree
-    private def putTreess(treess: List[List[Tree]]) = treess foreach putTrees
-
-    /** only used when pickling trees, i.e. in an
-     *  argument of some Annotation */
-    private def putMods(mods: Modifiers) = if (putEntry(mods)) {
-      // annotations in Modifiers are removed by the typechecker
-      val Modifiers(flags, privateWithin, Nil) = mods
-      putEntry(privateWithin)
+    private def putTree(tree: Tree) {
+      if (putEntry(tree))
+        putTreeTraverser put tree
     }
 
     /** Store a constant in map index, along with anything it references.
@@ -437,7 +283,7 @@ abstract class Pickler extends SubComponent {
     }
 
     private def putChildren(sym: Symbol, children: List[Symbol]) {
-      assert(putEntry((sym, children)))
+      putEntry(sym -> children)
       children foreach putSymbol
     }
 
@@ -445,14 +291,10 @@ abstract class Pickler extends SubComponent {
     private def putAnnotation(sym: Symbol, annot: AnnotationInfo) {
       // if an annotation with the same arguments is applied to the
       // same symbol multiple times, it's only pickled once.
-      if (putEntry((sym, annot)))
+      if (putEntry(sym -> annot))
         putAnnotationBody(annot)
     }
 
-    /** used in AnnotatedType only, i.e. annotations on types */
-    private def putAnnotations(annots: List[AnnotationInfo]) {
-      annots foreach putAnnotation
-    }
     private def putAnnotation(annot: AnnotationInfo) {
       if (putEntry(annot))
         putAnnotationBody(annot)
@@ -467,14 +309,10 @@ abstract class Pickler extends SubComponent {
         }
       }
       def putClassfileAnnotArg(carg: ClassfileAnnotArg) {
-        carg match {
-          case LiteralAnnotArg(const) =>
-            putConstant(const)
-          case ArrayAnnotArg(args) =>
-            if (putEntry(carg))
-              args foreach putClassfileAnnotArg
-          case NestedAnnotArg(annInfo) =>
-            putAnnotation(annInfo)
+        (carg: @unchecked) match {
+          case LiteralAnnotArg(const)  => putConstant(const)
+          case ArrayAnnotArg(args)     => if (putEntry(carg)) args foreach putClassfileAnnotArg
+          case NestedAnnotArg(annInfo) => putAnnotation(annInfo)
         }
       }
       val AnnotationInfo(tpe, args, assocs) = annot
@@ -490,8 +328,11 @@ abstract class Pickler extends SubComponent {
 
     /** Write a reference to object, i.e., the object's number in the map index.
      */
-    private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
-    private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
+    private def writeRef(ref: AnyRef) {
+      writeNat(index(deskolemizeTypeSymbols(ref)))
+    }
+    private def writeRefs(refs: List[AnyRef]): Unit = refs foreach writeRef
+
     private def writeRefsWithLength(refs: List[AnyRef]) {
       writeNat(refs.length)
       writeRefs(refs)
@@ -502,7 +343,7 @@ abstract class Pickler extends SubComponent {
     private def writeSymInfo(sym: Symbol) {
       writeRef(sym.name)
       writeRef(localizedOwner(sym))
-      writeLongNat((rawToPickledFlags(sym.flags & PickledFlags)))
+      writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags)))
       if (sym.hasAccessBoundary) writeRef(sym.privateWithin)
       writeRef(sym.info)
     }
@@ -534,567 +375,144 @@ abstract class Pickler extends SubComponent {
 
     /** Write a ClassfileAnnotArg (argument to classfile annotation) */
     def writeClassfileAnnotArg(carg: ClassfileAnnotArg) {
-      carg match {
-        case LiteralAnnotArg(const) =>
-          writeRef(const)
-        case ArrayAnnotArg(args) =>
-          writeRef(carg)
-        case NestedAnnotArg(annInfo) =>
-          writeRef(annInfo)
+      (carg: @unchecked) match {
+        case LiteralAnnotArg(const)  => writeRef(const)
+        case ArrayAnnotArg(args)     => writeRef(carg)
+        case NestedAnnotArg(annInfo) => writeRef(annInfo)
       }
     }
 
-    /** Write an entry */
-    private def writeEntry(entry: AnyRef) {
-      def writeBody(entry: AnyRef): Int = entry match {
-        case name: Name =>
-          writeName(name)
-          if (name.isTermName) TERMname else TYPEname
-        case NoSymbol =>
-          NONEsym
-        case sym: Symbol if !isLocal(sym) =>
-          val tag =
-            if (sym.isModuleClass) {
-              writeRef(sym.name.toTermName); EXTMODCLASSref
-            } else {
-              writeRef(sym.name); EXTref
-            }
-          if (!sym.owner.isRoot) writeRef(sym.owner)
-          tag
-        case sym: ClassSymbol =>
-          writeSymInfo(sym)
-          if (sym.thisSym.tpe != sym.tpe) writeRef(sym.typeOfThis)
-          CLASSsym
-        case sym: TypeSymbol =>
-          writeSymInfo(sym)
-          if (sym.isAbstractType) TYPEsym else ALIASsym
-        case sym: TermSymbol =>
-          writeSymInfo(sym)
-          if (sym.alias != NoSymbol) writeRef(sym.alias)
-          if (sym.isModule) MODULEsym else VALsym
-        case NoType =>
-          NOtpe
-        case NoPrefix =>
-          NOPREFIXtpe
-        case ThisType(sym) =>
-          writeRef(sym); THIStpe
-        case SingleType(pre, sym) =>
-          writeRef(pre); writeRef(sym); SINGLEtpe
-        case SuperType(thistpe, supertpe) =>
-          writeRef(thistpe); writeRef(supertpe); SUPERtpe
-        case ConstantType(value) =>
-          writeRef(value); CONSTANTtpe
-        case TypeRef(pre, sym, args) =>
-          writeRef(pre); writeRef(sym); writeRefs(args); TYPEREFtpe
-        case TypeBounds(lo, hi) =>
-          writeRef(lo); writeRef(hi); TYPEBOUNDStpe
-        case tp @ RefinedType(parents, decls) =>
-          writeRef(tp.typeSymbol); writeRefs(parents); REFINEDtpe
-        case ClassInfoType(parents, decls, clazz) =>
-          writeRef(clazz); writeRefs(parents); CLASSINFOtpe
-        case mt @ MethodType(formals, restpe) =>
-          writeRef(restpe); writeRefs(formals) ; METHODtpe
-        case mt @ NullaryMethodType(restpe) =>
-          // reuse POLYtpe since those can never have an empty list of tparams.
-          // TODO: is there any way this can come back and bite us in the bottom?
-          // ugliness and thrift aside, this should make this somewhat more backward compatible
-          // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
-          writeRef(restpe); writeRefs(Nil); POLYtpe
-        case PolyType(tparams, restpe) => // invar: tparams nonEmpty
-          writeRef(restpe); writeRefs(tparams); POLYtpe
-        case ExistentialType(tparams, restpe) =>
-          writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
-        // case DeBruijnIndex(l, i) =>
-        //   writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
-        case c @ Constant(_) =>
-          if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
-          else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
-          else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue))
-          else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
-          else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
-          else if (c.tag == ClazzTag) writeRef(c.typeValue)
-          else if (c.tag == EnumTag) writeRef(c.symbolValue)
-          LITERAL + c.tag // also treats UnitTag, NullTag; no value required
-        case AnnotatedType(annotations, tp, selfsym) =>
-          annotations filter (_.isStatic) match {
-            case Nil          => writeBody(tp) // write the underlying type if there are no annotations
-            case staticAnnots =>
-              if (settings.selfInAnnots.value && selfsym != NoSymbol)
-                writeRef(selfsym)
-              writeRef(tp)
-              writeRefs(staticAnnots)
-              ANNOTATEDtpe
-          }
-        // annotations attached to a symbol (i.e. annots on terms)
-        case (target: Symbol, annot at AnnotationInfo(_, _, _)) =>
-          writeRef(target)
-          writeAnnotation(annot)
-          SYMANNOT
-
-        case ArrayAnnotArg(args) =>
-          args foreach writeClassfileAnnotArg
-          ANNOTARGARRAY
-
-        case (target: Symbol, children: List[_]) =>
-          writeRef(target)
-          writeRefs(children.asInstanceOf[List[Symbol]])
-          CHILDREN
-
-        case EmptyTree =>
-          writeNat(EMPTYtree)
-          TREE
-
-        case tree at PackageDef(pid, stats) =>
-          writeNat(PACKAGEtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(tree.mods)
-          writeRef(pid)
-          writeRefs(stats)
-          TREE
-
-        case tree at ClassDef(mods, name, tparams, impl) =>
-          writeNat(CLASStree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(mods)
-          writeRef(name)
-          writeRef(impl)
-          writeRefs(tparams)
-          TREE
-
-        case tree at ModuleDef(mods, name, impl) =>
-          writeNat(MODULEtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(mods)
-          writeRef(name)
-          writeRef(impl)
-          TREE
-
-        case tree at ValDef(mods, name, tpt, rhs) =>
-          writeNat(VALDEFtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(mods)
-          writeRef(name)
-          writeRef(tpt)
-          writeRef(rhs)
-          TREE
-
-        case tree at DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
-          writeNat(DEFDEFtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(mods)
-          writeRef(name)
-          writeRefsWithLength(tparams)
-          writeNat(vparamss.length)
-          vparamss foreach writeRefsWithLength
-          writeRef(tpt)
-          writeRef(rhs)
-          TREE
-
-        case tree at TypeDef(mods, name, tparams, rhs) =>
-          writeNat(TYPEDEFtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(mods)
-          writeRef(name)
-          writeRef(rhs)
-          writeRefs(tparams)
-          TREE
-
-        case tree at LabelDef(name, params, rhs) =>
-          writeNat(LABELtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(name)
-          writeRef(rhs)
-          writeRefs(params)
-          TREE
-
-        case tree at Import(expr, selectors) =>
-          writeNat(IMPORTtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(expr)
-          for (ImportSelector(from, _, to, _) <- selectors) {
-            writeRef(from)
-            writeRef(to)
-          }
-          TREE
-
-        case tree at DocDef(comment, definition) =>
-          writeNat(DOCDEFtree)
-          writeRef(tree.tpe)
-          writeRef(Constant(comment))
-          writeRef(definition)
-          TREE
-
-        case tree at Template(parents, self, body) =>
-          writeNat(TEMPLATEtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRefsWithLength(parents)
-          writeRef(self)
-          writeRefs(body)
-          TREE
-
-        case tree at Block(stats, expr) =>
-          writeNat(BLOCKtree)
-          writeRef(tree.tpe)
-          writeRef(expr)
-          writeRefs(stats)
-          TREE
-
-        case tree at CaseDef(pat, guard, body) =>
-          writeNat(CASEtree)
-          writeRef(tree.tpe)
-          writeRef(pat)
-          writeRef(guard)
-          writeRef(body)
-          TREE
-
-        case tree at Alternative(trees) =>
-          writeNat(ALTERNATIVEtree)
-          writeRef(tree.tpe)
-          writeRefs(trees)
-          TREE
-
-        case tree at Star(elem) =>
-          writeNat(STARtree)
-          writeRef(tree.tpe)
-          writeRef(elem)
-          TREE
-
-        case tree at Bind(name, body) =>
-          writeNat(BINDtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(name)
-          writeRef(body)
-          TREE
-
-        case tree at UnApply(fun: Tree, args) =>
-          writeNat(UNAPPLYtree)
-          writeRef(tree.tpe)
-          writeRef(fun)
-          writeRefs(args)
-          TREE
-
-        case tree at ArrayValue(elemtpt, trees) =>
-          writeNat(ARRAYVALUEtree)
-          writeRef(tree.tpe)
-          writeRef(elemtpt)
-          writeRefs(trees)
-          TREE
-
-        case tree at Function(vparams, body) =>
-          writeNat(FUNCTIONtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(body)
-          writeRefs(vparams)
-          TREE
-
-        case tree at Assign(lhs, rhs) =>
-          writeNat(ASSIGNtree)
-          writeRef(tree.tpe)
-          writeRef(lhs)
-          writeRef(rhs)
-          TREE
-
-        case tree at If(cond, thenp, elsep) =>
-          writeNat(IFtree)
-          writeRef(tree.tpe)
-          writeRef(cond)
-          writeRef(thenp)
-          writeRef(elsep)
-          TREE
-
-        case tree at Match(selector, cases) =>
-          writeNat(MATCHtree)
-          writeRef(tree.tpe)
-          writeRef(selector)
-          writeRefs(cases)
-          TREE
-
-        case tree at Return(expr) =>
-          writeNat(RETURNtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(expr)
-          TREE
-
-        case tree at Try(block, catches, finalizer) =>
-          writeNat(TREtree)
-          writeRef(tree.tpe)
-          writeRef(block)
-          writeRef(finalizer)
-          writeRefs(catches)
-          TREE
-
-        case tree at Throw(expr) =>
-          writeNat(THROWtree)
-          writeRef(tree.tpe)
-          writeRef(expr)
-          TREE
-
-        case tree at New(tpt) =>
-          writeNat(NEWtree)
-          writeRef(tree.tpe)
-          writeRef(tpt)
-          TREE
-
-        case tree at Typed(expr, tpt) =>
-          writeNat(TYPEDtree)
-          writeRef(tree.tpe)
-          writeRef(expr)
-          writeRef(tpt)
-          TREE
-
-        case tree at TypeApply(fun, args) =>
-          writeNat(TYPEAPPLYtree)
-          writeRef(tree.tpe)
-          writeRef(fun)
-          writeRefs(args)
-          TREE
-
-        case tree at Apply(fun, args) =>
-          writeNat(APPLYtree)
-          writeRef(tree.tpe)
-          writeRef(fun)
-          writeRefs(args)
-          TREE
-
-        case tree at ApplyDynamic(qual, args) =>
-          writeNat(APPLYDYNAMICtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(qual)
-          writeRefs(args)
-          TREE
-
-        case tree at Super(qual, mix) =>
-          writeNat(SUPERtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(qual)
-          writeRef(mix)
-          TREE
-
-        case tree at This(qual) =>
-          writeNat(THIStree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(qual)
-          TREE
-
-        case tree at Select(qualifier, selector) =>
-          writeNat(SELECTtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(qualifier)
-          writeRef(selector)
-          TREE
-
-        case tree at Ident(name) =>
-          writeNat(IDENTtree)
-          writeRef(tree.tpe)
-          writeRef(tree.symbol)
-          writeRef(name)
-          TREE
-
-        case tree at Literal(value) =>
-          writeNat(LITERALtree)
-          writeRef(tree.tpe)
-          writeRef(value)
-          TREE
-
-        case tree at TypeTree() =>
-          writeNat(TYPEtree)
-          writeRef(tree.tpe)
-          TREE
-
-        case tree at Annotated(annot, arg) =>
-          writeNat(ANNOTATEDtree)
-          writeRef(tree.tpe)
-          writeRef(annot)
-          writeRef(arg)
-          TREE
-
-        case tree at SingletonTypeTree(ref) =>
-          writeNat(SINGLETONTYPEtree)
+    private object writeTreeBodyTraverser extends Traverser {
+      private var refs = false
+      @inline private def asRefs[T](body: => T): T = {
+        val saved = refs
+        refs = true
+        try body finally refs = saved
+      }
+      override def traverseModifiers(mods: Modifiers): Unit          = if (refs) writeRef(mods) else super.traverseModifiers(mods)
+      override def traverseName(name: Name): Unit                    = writeRef(name)
+      override def traverseConstant(const: Constant): Unit           = writeRef(const)
+      override def traverseParams(params: List[Tree]): Unit          = writeRefsWithLength(params)
+      override def traverseParamss(vparamss: List[List[Tree]]): Unit = {
+        writeNat(vparamss.length)
+        super.traverseParamss(vparamss)
+      }
+      override def traverse(tree: Tree): Unit = {
+        if (refs)
+          writeRef(tree)
+        else {
           writeRef(tree.tpe)
-          writeRef(ref)
-          TREE
+          if (tree.hasSymbolField)
+            writeRef(tree.symbol)
 
-        case tree at SelectFromTypeTree(qualifier, selector) =>
-          writeNat(SELECTFROMTYPEtree)
-          writeRef(tree.tpe)
-          writeRef(qualifier)
-          writeRef(selector)
-          TREE
+          asRefs(super.traverse(tree))
+        }
+      }
+    }
 
-        case tree at CompoundTypeTree(templ: Template) =>
-          writeNat(COMPOUNDTYPEtree)
-          writeRef(tree.tpe)
-          writeRef(templ)
-          TREE
+    /** Write an entry */
+    private def writeEntry(entry: AnyRef) {
+      def writeLocalSymbolBody(sym: Symbol) {
+        writeSymInfo(sym)
+        sym match {
+          case _: ClassSymbol if sym.hasSelfType => writeRef(sym.typeOfThis)
+          case _: TermSymbol if sym.alias.exists => writeRef(sym.alias)
+          case _                                 =>
+        }
+      }
+      def writeExtSymbolBody(sym: Symbol) {
+        val name = if (sym.isModuleClass) sym.name.toTermName else sym.name
+        writeRef(name)
+        if (!sym.owner.isRoot)
+          writeRef(sym.owner)
+      }
+      def writeSymbolBody(sym: Symbol) {
+        if (sym ne NoSymbol) {
+          if (isLocalToPickle(sym))
+            writeLocalSymbolBody(sym)
+          else
+            writeExtSymbolBody(sym)
+        }
+      }
 
-        case tree at AppliedTypeTree(tpt, args) =>
-          writeNat(APPLIEDTYPEtree)
-          writeRef(tree.tpe)
-          writeRef(tpt)
-          writeRefs(args)
-          TREE
+      // NullaryMethodType reuses POLYtpe since those can never have an empty list of tparams.
+      // TODO: is there any way this can come back and bite us in the bottom?
+      // ugliness and thrift aside, this should make this somewhat more backward compatible
+      // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
+      def writeTypeBody(tpe: Type): Unit = tpe match {
+        case NoType | NoPrefix                   =>
+        case ThisType(sym)                       => writeRef(sym)
+        case SingleType(pre, sym)                => writeRef(pre) ; writeRef(sym)
+        case SuperType(thistpe, supertpe)        => writeRef(thistpe) ; writeRef(supertpe)
+        case ConstantType(value)                 => writeRef(value)
+        case TypeBounds(lo, hi)                  => writeRef(lo) ; writeRef(hi)
+        case TypeRef(pre, sym, args)             => writeRef(pre) ; writeRef(sym); writeRefs(args)
+        case MethodType(formals, restpe)         => writeRef(restpe) ; writeRefs(formals)
+        case NullaryMethodType(restpe)           => writeRef(restpe); writeRefs(Nil)
+        case PolyType(tparams, restpe)           => writeRef(restpe); writeRefs(tparams)
+        case ExistentialType(tparams, restpe)    => writeRef(restpe); writeRefs(tparams)
+        case StaticallyAnnotatedType(annots, tp) => writeRef(tp) ; writeRefs(annots)
+        case AnnotatedType(_, tp)                => writeTypeBody(tp) // write the underlying type if there are no static annotations
+        case CompoundType(parents, _, clazz)     => writeRef(clazz); writeRefs(parents)
+      }
 
-        case tree at TypeBoundsTree(lo, hi) =>
-          writeNat(TYPEBOUNDStree)
-          writeRef(tree.tpe)
-          writeRef(lo)
-          writeRef(hi)
-          TREE
+      def writeTreeBody(tree: Tree) {
+        writeNat(picklerSubTag(tree))
+        if (!tree.isEmpty)
+          writeTreeBodyTraverser traverse tree
+      }
 
-        case tree at ExistentialTypeTree(tpt, whereClauses) =>
-          writeNat(EXISTENTIALTYPEtree)
-          writeRef(tree.tpe)
-          writeRef(tpt)
-          writeRefs(whereClauses)
-          TREE
+      def writeConstant(c: Constant): Unit = c.tag match {
+        case BooleanTag => writeLong(if (c.booleanValue) 1 else 0)
+        case FloatTag   => writeLong(floatToIntBits(c.floatValue).toLong)
+        case DoubleTag  => writeLong(doubleToLongBits(c.doubleValue))
+        case StringTag  => writeRef(newTermName(c.stringValue))
+        case ClazzTag   => writeRef(c.typeValue)
+        case EnumTag    => writeRef(c.symbolValue)
+        case tag        => if (ByteTag <= tag && tag <= LongTag) writeLong(c.longValue)
+      }
 
-        case Modifiers(flags, privateWithin, _) =>
-          val pflags = rawToPickledFlags(flags)
-          writeNat((pflags >> 32).toInt)
-          writeNat((pflags & 0xFFFFFFFF).toInt)
-          writeRef(privateWithin)
-          MODIFIERS
+      def writeModifiers(mods: Modifiers) {
+        val pflags = rawToPickledFlags(mods.flags)
+        writeNat((pflags >> 32).toInt)
+        writeNat((pflags & 0xFFFFFFFF).toInt)
+        writeRef(mods.privateWithin)
+      }
 
-        // annotations on types (not linked to a symbol)
-        case annot at AnnotationInfo(_, _, _) =>
-          writeAnnotation(annot)
-          ANNOTINFO
+      def writeSymbolTuple(target: Symbol, other: Any) {
+        writeRef(target)
+        other match {
+          case annot: AnnotationInfo             => writeAnnotation(annot)
+          case children: List[Symbol @unchecked] => writeRefs(children)
+          case _                                 =>
+        }
+      }
 
-        case _ =>
-          throw new FatalError("bad entry: " + entry + " " + entry.getClass)
+      def writeBody(entry: AnyRef): Unit = entry match {
+        case tree: Tree              => writeTreeBody(tree)
+        case sym: Symbol             => writeSymbolBody(sym)
+        case tpe: Type               => writeTypeBody(tpe)
+        case name: Name              => writeName(name)
+        case const: Constant         => writeConstant(const)
+        case mods: Modifiers         => writeModifiers(mods)
+        case annot: AnnotationInfo   => writeAnnotation(annot)
+        case (target: Symbol, other) => writeSymbolTuple(target, other)
+        case ArrayAnnotArg(args)     => args foreach writeClassfileAnnotArg
+        case _                       => devWarning(s"Unexpected entry to pickler ${shortClassOfInstance(entry)} $entry")
       }
 
       // begin writeEntry
-      val startpos = writeIndex
-      // reserve some space so that the patchNat's most likely won't need to shift
-      writeByte(0); writeByte(0)
-      patchNat(startpos, writeBody(entry))
-      patchNat(startpos + 1, writeIndex - (startpos + 2))
-    }
-
-    /** Print entry for diagnostics */
-    def printEntryAtIndex(idx: Int) = printEntry(entries(idx))
-    def printEntry(entry: AnyRef) {
-      def printRef(ref: AnyRef) {
-        print(index(ref)+
-              (if (ref.isInstanceOf[Name]) "("+ref+") " else " "))
-      }
-      def printRefs(refs: List[AnyRef]) { refs foreach printRef }
-      def printSymInfo(sym: Symbol) {
-        var posOffset = 0
-        printRef(sym.name)
-        printRef(localizedOwner(sym))
-        print(flagsToString(sym.flags & PickledFlags)+" ")
-        if (sym.hasAccessBoundary) printRef(sym.privateWithin)
-        printRef(sym.info)
-      }
-      def printBody(entry: AnyRef) = entry match {
-        case name: Name =>
-          print((if (name.isTermName) "TERMname " else "TYPEname ")+name)
-        case NoSymbol =>
-          print("NONEsym")
-        case sym: Symbol if !isLocal(sym) =>
-          if (sym.isModuleClass) {
-            print("EXTMODCLASSref "); printRef(sym.name.toTermName)
-          } else {
-            print("EXTref "); printRef(sym.name)
-          }
-          if (!sym.owner.isRoot) printRef(sym.owner)
-        case sym: ClassSymbol =>
-          print("CLASSsym ")
-          printSymInfo(sym)
-          if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis)
-        case sym: TypeSymbol =>
-          print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ")
-          printSymInfo(sym)
-        case sym: TermSymbol =>
-          print(if (sym.isModule) "MODULEsym " else "VALsym ")
-          printSymInfo(sym)
-          if (sym.alias != NoSymbol) printRef(sym.alias)
-        case NoType =>
-          print("NOtpe")
-        case NoPrefix =>
-          print("NOPREFIXtpe")
-        case ThisType(sym) =>
-          print("THIStpe "); printRef(sym)
-        case SingleType(pre, sym) =>
-          print("SINGLEtpe "); printRef(pre); printRef(sym);
-        case ConstantType(value) =>
-          print("CONSTANTtpe "); printRef(value);
-        case TypeRef(pre, sym, args) =>
-          print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args);
-        case TypeBounds(lo, hi) =>
-          print("TYPEBOUNDStpe "); printRef(lo); printRef(hi);
-        case tp @ RefinedType(parents, decls) =>
-          print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
-        case ClassInfoType(parents, decls, clazz) =>
-          print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
-        case mt @ MethodType(formals, restpe) =>
-          print("METHODtpe"); printRef(restpe); printRefs(formals)
-        case PolyType(tparams, restpe) =>
-          print("POLYtpe "); printRef(restpe); printRefs(tparams);
-        case ExistentialType(tparams, restpe) =>
-          print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
-          print("||| "+entry)
-        // case DeBruijnIndex(l, i) =>
-        //   print("DEBRUIJNINDEXtpe "); print(l+" "+i)
-        case c @ Constant(_) =>
-          print("LITERAL ")
-          if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
-          else if (c.tag == ByteTag) print("Byte "+c.longValue)
-          else if (c.tag == ShortTag) print("Short "+c.longValue)
-          else if (c.tag == CharTag) print("Char "+c.longValue)
-          else if (c.tag == IntTag) print("Int "+c.longValue)
-          else if (c.tag == LongTag) print("Long "+c.longValue)
-          else if (c.tag == FloatTag) print("Float "+c.floatValue)
-          else if (c.tag == DoubleTag) print("Double "+c.doubleValue)
-          else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) }
-          else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) }
-          else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) }
-        case AnnotatedType(annots, tp, selfsym) =>
-          if (settings.selfInAnnots.value) {
-            print("ANNOTATEDWSELFtpe ")
-            printRef(tp)
-            printRef(selfsym)
-            printRefs(annots)
-          } else {
-            print("ANNOTATEDtpe ")
-            printRef(tp)
-            printRefs(annots)
-          }
-        case (target: Symbol, AnnotationInfo(atp, args, Nil)) =>
-          print("SYMANNOT ")
-          printRef(target)
-          printRef(atp)
-          for (c <- args) printRef(c)
-        case (target: Symbol, children: List[_]) =>
-          print("CHILDREN ")
-          printRef(target)
-          for (c <- children) printRef(c.asInstanceOf[Symbol])
-        case AnnotationInfo(atp, args, Nil) =>
-          print("ANNOTINFO")
-          printRef(atp)
-          for (c <- args) printRef(c)
-        case _ =>
-          throw new FatalError("bad entry: " + entry + " " + entry.getClass)
+      // The picklerTag method can't determine if it's an external symbol reference
+      val tag = entry match {
+        case sym: Symbol if isExternalSymbol(sym) => if (sym.isModuleClass) EXTMODCLASSref else EXTref
+        case _                                    => picklerTag(entry)
       }
-      printBody(entry); println()
+      writeNat(tag)
+      writeByte(0) // reserve a place to record the number of bytes written
+      val start = writeIndex
+      writeBody(entry)
+      val length = writeIndex - start
+      patchNat(start - 1, length) // patch bytes written over the placeholder
     }
 
     /** Write byte array */
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
deleted file mode 100644
index 40189b9..0000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.File
-import java.util.{Comparator, StringTokenizer}
-import scala.util.Sorting
-import ch.epfl.lamp.compiler.msil._
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.{Position, NoPosition}
-
-/**
- * Collects all types from all reference assemblies.
- */
-abstract class CLRTypes {
-
-  val global: Global
-  import global.Symbol
-  import global.definitions
-
-  //##########################################################################
-
-  var BYTE: Type = _
-  var UBYTE: Type = _
-  var SHORT: Type = _
-  var USHORT: Type = _
-  var CHAR: Type = _
-  var INT: Type = _
-  var UINT: Type = _
-  var LONG: Type = _
-  var ULONG: Type = _
-  var FLOAT: Type = _
-  var DOUBLE: Type = _
-  var BOOLEAN: Type = _
-  var VOID: Type = _
-  var ENUM: Type = _
-  var DELEGATE: Type = _
-
-  var OBJECT: Type = _
-  var STRING: Type = _
-  var STRING_ARRAY: Type = _
-
-  var VALUE_TYPE: Type = _
-
-  var SCALA_SYMTAB_ATTR: Type = _
-  var SYMTAB_CONSTR: ConstructorInfo = _
-  var SYMTAB_DEFAULT_CONSTR: ConstructorInfo = _
-
-  var DELEGATE_COMBINE: MethodInfo = _
-  var DELEGATE_REMOVE: MethodInfo = _
-
-  val types: mutable.Map[Symbol,Type] = new mutable.HashMap
-  val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap
-  val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap
-  val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap
-  val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap
-  val addressOfViews = new mutable.HashSet[Symbol]
-  val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap
-
-  def isAddressOf(msym : Symbol) = addressOfViews.contains(msym)
-
-  def isNonEnumValuetype(cls: Symbol) = {
-    val msilTOpt = types.get(cls)
-    val res = msilTOpt.isDefined && {
-      val msilT = msilTOpt.get
-      msilT.IsValueType && !msilT.IsEnum
-    }
-    res
-  }
-
-  def isValueType(cls: Symbol): Boolean = {
-    val opt = types.get(cls)
-    opt.isDefined && opt.get.IsValueType
-  }
-
-  def init() = try { // initialize
-    // the MsilClasspath (nsc/util/Classpath.scala) initializes the msil-library by calling
-    // Assembly.LoadFrom("mscorlib.dll"), so this type should be found
-    Type.initMSCORLIB(getTypeSafe("System.String").Assembly)
-
-    BYTE     = getTypeSafe("System.SByte")
-    UBYTE    = getTypeSafe("System.Byte")
-    CHAR     = getTypeSafe("System.Char")
-    SHORT    = getTypeSafe("System.Int16")
-    USHORT   = getTypeSafe("System.UInt16")
-    INT      = getTypeSafe("System.Int32")
-    UINT     = getTypeSafe("System.UInt32")
-    LONG     = getTypeSafe("System.Int64")
-    ULONG    = getTypeSafe("System.UInt64")
-    FLOAT    = getTypeSafe("System.Single")
-    DOUBLE   = getTypeSafe("System.Double")
-    BOOLEAN  = getTypeSafe("System.Boolean")
-    VOID     = getTypeSafe("System.Void")
-    ENUM     = getTypeSafe("System.Enum")
-    DELEGATE = getTypeSafe("System.MulticastDelegate")
-
-    OBJECT = getTypeSafe("System.Object")
-    STRING = getTypeSafe("System.String")
-    STRING_ARRAY = getTypeSafe("System.String[]")
-    VALUE_TYPE = getTypeSafe("System.ValueType")
-
-    SCALA_SYMTAB_ATTR = getTypeSafe("scala.runtime.SymtabAttribute")
-    val bytearray: Array[Type] = Array(Type.GetType("System.Byte[]"))
-    SYMTAB_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(bytearray)
-    SYMTAB_DEFAULT_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(Type.EmptyTypes)
-
-    val delegate: Type = getTypeSafe("System.Delegate")
-    val dargs: Array[Type] = Array(delegate, delegate)
-    DELEGATE_COMBINE = delegate.GetMethod("Combine", dargs)
-    DELEGATE_REMOVE = delegate.GetMethod("Remove", dargs)
-  }
-  catch {
-    case e: RuntimeException =>
-      Console.println(e.getMessage)
-      throw e
-  }
-
-  //##########################################################################
-  // type mapping and lookup
-
-  def getType(name: String): Type = Type.GetType(name)
-
-  def getTypeSafe(name: String): Type = {
-    val t = Type.GetType(name)
-    assert(t != null, name)
-    t
-  }
-
-  def mkArrayType(elemType: Type): Type = getType(elemType.FullName + "[]")
-
-  def isDelegateType(t: Type): Boolean = { t.BaseType() == DELEGATE }
-}  // CLRTypes
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
deleted file mode 100644
index 5a0253c..0000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ /dev/null
@@ -1,850 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.IOException
-import io.MsilFile
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.UnPickler
-import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import scala.language.implicitConversions
-
-/**
- *  @author Nikolay Mihaylov
- */
-abstract class TypeParser {
-
-  val global: Global
-
-  import global._
-  import loaders.clrTypes
-
-  //##########################################################################
-
-  private var clazz: Symbol = _
-  private var instanceDefs: Scope = _   // was members
-  private var staticModule: Symbol = _  // was staticsClass
-  private var staticDefs: Scope = _     // was statics
-
-  protected def statics: Symbol = staticModule.moduleClass
-
-  protected var busy: Boolean = false       // lock to detect recursive reads
-
-  private object unpickler extends UnPickler {
-    val global: TypeParser.this.global.type = TypeParser.this.global
-  }
-
-  def parse(typ: MSILType, root: Symbol) {
-
-    def handleError(e: Throwable) = {
-      if (settings.debug.value) e.printStackTrace()  //debug
-      throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
-    }
-    assert(!busy)
-    busy = true
-
-    if (root.isModule) {
-      this.clazz = root.companionClass
-      this.staticModule = root
-    } else {
-      this.clazz = root
-      this.staticModule = root.companionModule
-    }
-    try {
-      parseClass(typ)
-    } catch {
-      case e: FatalError => handleError(e)
-      case e: RuntimeException => handleError(e)
-    }
-    busy = false
-  }
-
-  class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
-    override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
-  }
-
-  /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
-  *  but there are differences that should be kept in mind.
-  *  forMSIL, a nested class knows nothing about any type-params in the nesting class,
-  *  therefore newTParams is redundant (other than for recording lexical order),
-  *  it always contains the same elements as classTParams.value */
-  val classTParams = scala.collection.mutable.Map[Int,Symbol]() // TODO should this be a stack? (i.e., is it possible for >1 invocation to getCLRType on the same TypeParser instance be active )
-  val newTParams = new scala.collection.mutable.ListBuffer[Symbol]()
-  val methodTParams = scala.collection.mutable.Map[Int,Symbol]()
-
-  private def sig2typeBounds(tvarCILDef: GenericParamAndConstraints): Type = {
-    val ts = new scala.collection.mutable.ListBuffer[Type]
-    for (cnstrnt <- tvarCILDef.Constraints) {
-      ts += getCLRType(cnstrnt) // TODO we're definitely not at or after erasure, no need to call objToAny, right?
-    }
-    TypeBounds.upper(intersectionType(ts.toList, clazz))
-    // TODO variance???
-  }
-
-  private def createViewFromTo(viewSuffix : String, fromTpe : Type, toTpe : Type,
-                               addToboxMethodMap : Boolean, isAddressOf : Boolean) : Symbol = {
-    val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
-    val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(List(fromTpe)), toTpe)
-    val vmsym = createMethod(nme.view_ + viewSuffix, flags, viewMethodType, null, true);
-    // !!! this used to mutate a mutable map in definitions, but that map became
-    // immutable and this kept "working" with a no-op.  So now it's commented out
-    // since I retired the deprecated code which allowed for that bug.
-    //
-    // if (addToboxMethodMap) definitions.boxMethod(clazz) = vmsym
-
-    if (isAddressOf) clrTypes.addressOfViews += vmsym
-    vmsym
-  }
-
-  private def createDefaultConstructor(typ: MSILType) {
-    val attrs = MethodAttributes.Public | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName // TODO instance
-    val declType= typ
-    val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
-    val flags = Flags.JAVA
-    val owner = clazz
-    val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
-    val rettype = clazz.tpe
-    val mtype = methodType(Array[MSILType](), rettype);
-    val mInfo = mtype(methodSym)
-    methodSym.setInfo(mInfo)
-    instanceDefs.enter(methodSym);
-    clrTypes.constructors(methodSym) = method
-  }
-
-  private def parseClass(typ: MSILType) {
-
-    {
-      val t4c = clrTypes.types.get(clazz)
-      assert(t4c == None || t4c == Some(typ))
-    }
-    clrTypes.types(clazz) = typ
-
-    {
-      val c4t = clrTypes.sym2type.get(typ)
-      assert(c4t == None || c4t == Some(clazz))
-    }
-    clrTypes.sym2type(typ) = clazz
-
-    if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
-      val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false);
-      assert (attrs.length == 1, attrs.length);
-      val a = attrs(0).asInstanceOf[MSILAttribute];
-      assert (a.getConstructor() == clrTypes.SYMTAB_CONSTR);
-      val symtab = a.getConstructorArguments()(0).asInstanceOf[Array[Byte]]
-      unpickler.unpickle(symtab, 0, clazz, staticModule, typ.FullName);
-      val mClass = clrTypes.getType(typ.FullName + "$");
-      if (mClass != null) {
-        clrTypes.types(statics) = mClass;
-        val moduleInstance = mClass.GetField("MODULE$");
-        assert (moduleInstance != null, mClass);
-        clrTypes.fields(statics) = moduleInstance;
-      }
-      return
-    }
-    val flags = translateAttributes(typ)
-
-    var clazzBoxed : Symbol = NoSymbol
-    var clazzMgdPtr : Symbol = NoSymbol
-
-    val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
-
-    if(canBeTakenAddressOf) {
-      clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
-      clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
-      clrTypes.mdgptrcls4clssym(clazz) =  clazzMgdPtr
-      /* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
-         before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
-      val typMgdPtr = MSILType.mkByRef(typ)
-      clrTypes.types(clazzMgdPtr) = typMgdPtr
-      clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
-      /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
-         because there's no metadata-level representation for a "boxed valuetype" */
-      val instanceDefsMgdPtr = newScope
-      val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
-      clazzMgdPtr.setFlag(flags)
-      clazzMgdPtr.setInfo(classInfoMgdPtr)
-    }
-
-/* START CLR generics (snippet 1) */
-    // first pass
-    for (tvarCILDef <- typ.getSortedTVars() ) {
-      val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
-      val tpsym = clazz.newTypeParameter(tpname)
-      classTParams.put(tvarCILDef.Number, tpsym)
-      newTParams += tpsym
-      // TODO wouldn't the following also be needed later, i.e. during getCLRType
-      tpsym.setInfo(definitions.AnyClass.tpe)
-    }
-    // second pass
-    for (tvarCILDef <- typ.getSortedTVars() ) {
-      val tpsym = classTParams(tvarCILDef.Number)
-      tpsym.setInfo(sig2typeBounds(tvarCILDef)) // we never skip bounds unlike in forJVM
-    }
-/* END CLR generics (snippet 1) */
-    val ownTypeParams = newTParams.toList
-/* START CLR generics (snippet 2) */
-    if (!ownTypeParams.isEmpty) {
-      clazz.setInfo(new TypeParamsType(ownTypeParams))
-      if(typ.IsValueType && !typ.IsEnum) {
-        clazzBoxed.setInfo(new TypeParamsType(ownTypeParams))
-      }
-    }
-/* END CLR generics (snippet 2) */
-    instanceDefs = newScope
-    staticDefs = newScope
-
-    val classInfoAsInMetadata = {
-        val ifaces: Array[MSILType] = typ.getInterfaces()
-        val superType = if (typ.BaseType() != null) getCLRType(typ.BaseType())
-                        else if (typ.IsInterface()) definitions.ObjectClass.tpe
-                        else definitions.AnyClass.tpe; // this branch activates for System.Object only.
-        // parents (i.e., base type and interfaces)
-        val parents = new scala.collection.mutable.ListBuffer[Type]()
-        parents += superType
-        for (iface <- ifaces) {
-          parents += getCLRType(iface)  // here the variance doesn't matter
-        }
-        // methods, properties, events, fields are entered in a moment
-        if (canBeTakenAddressOf) {
-          val instanceDefsBoxed = newScope
-          ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
-        } else
-          ClassInfoType(parents.toList, instanceDefs, clazz)
-      }
-
-    val staticInfo = ClassInfoType(List(), staticDefs, statics)
-
-    clazz.setFlag(flags)
-
-    if (canBeTakenAddressOf) {
-      clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
-                          else genPolyType(ownTypeParams, classInfoAsInMetadata) )
-      clazzBoxed.setFlag(flags)
-      val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
-      clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
-                     else genPolyType(ownTypeParams, rawValueInfoType) )
-    } else {
-      clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
-                     else genPolyType(ownTypeParams, classInfoAsInMetadata) )
-    }
-
-    // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
-    statics.setFlag(Flags.JAVA)
-    statics.setInfo(staticInfo)
-    staticModule.setFlag(Flags.JAVA)
-    staticModule.setInfo(statics.tpe)
-
-
-    if (canBeTakenAddressOf) {
-      //  implicit conversions are owned by staticModule.moduleClass
-      createViewFromTo("2Boxed", clazz.tpe, clazzBoxed.tpe, addToboxMethodMap = true, isAddressOf = false)
-      // createViewFromTo("2Object", clazz.tpe, definitions.ObjectClass.tpe, addToboxMethodMap = true, isAddressOf = false)
-      createViewFromTo("2MgdPtr", clazz.tpe, clazzMgdPtr.tpe, addToboxMethodMap = false, isAddressOf = true)
-      // a return can't have type managed-pointer, thus a dereference-conversion is not needed
-      // similarly, a method can't declare as return type "boxed valuetype"
-      if (!typ.IsEnum) {
-        // a synthetic default constructor for raw-type allows `new X' syntax
-        createDefaultConstructor(typ)
-      }
-    }
-
-    // import nested types
-    for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem)
-				                                 || ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
-      {
-        val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
-              val nclazz = statics.newClass(ntype.Name)
-        val nmodule = statics.newModule(ntype.Name)
-	nclazz.setInfo(loader)
-	nmodule.setInfo(loader)
-	staticDefs.enter(nclazz)
-	staticDefs.enter(nmodule)
-
-	assert(nclazz.companionModule == nmodule, nmodule)
-	assert(nmodule.companionClass == nclazz, nclazz)
-      }
-
-    val fields = typ.getFields()
-    for (field <- fields
-         if !(field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly)
-         if (getCLRType(field.FieldType) != null)
-         ) {
-      assert (!field.FieldType.IsPointer && !field.FieldType.IsByRef, "CLR requirement")
-      val flags = translateAttributes(field);
-      val name = newTermName(field.Name);
-      val fieldType =
-        if (field.IsLiteral && !field.FieldType.IsEnum && isDefinedAtgetConstant(getCLRType(field.FieldType)))
-	      ConstantType(getConstant(getCLRType(field.FieldType), field.getValue))
-	    else
-	      getCLRType(field.FieldType)
-      val owner = if (field.IsStatic()) statics else clazz;
-      val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
-        // TODO: set private within!!! -> look at typechecker/Namers.scala
-        (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
-      clrTypes.fields(sym) = field;
-    }
-
-    for (constr <- typ.getConstructors() if !constr.IsStatic() && !constr.IsPrivate() &&
-         !constr.IsAssembly() && !constr.IsFamilyAndAssembly() && !constr.HasPtrParamOrRetType())
-      createMethod(constr);
-
-    // initially also contains getters and setters of properties.
-    val methodsSet = new mutable.HashSet[MethodInfo]();
-    methodsSet ++= typ.getMethods();
-
-    for (prop <- typ.getProperties) {
-      val propType: Type = getCLSType(prop.PropertyType);
-      if (propType != null) {
-	val getter: MethodInfo = prop.GetGetMethod(true);
-	val setter: MethodInfo = prop.GetSetMethod(true);
-	var gparamsLength: Int = -1;
-	if (!(getter == null || getter.IsPrivate || getter.IsAssembly
-              || getter.IsFamilyAndAssembly || getter.HasPtrParamOrRetType))
-	  {
-	    assert(prop.PropertyType == getter.ReturnType);
-	    val gparams: Array[ParameterInfo] = getter.GetParameters();
-	    gparamsLength = gparams.length;
-            val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply;
-	    val flags = translateAttributes(getter);
-	    val owner: Symbol = if (getter.IsStatic) statics else clazz;
-            val methodSym = owner.newMethod(name, NoPosition, flags)
-      val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
-                        else methodType(getter, getter.ReturnType)(methodSym)
-        methodSym.setInfo(mtype);
-	    methodSym.setFlag(Flags.ACCESSOR);
-	    (if (getter.IsStatic) staticDefs else instanceDefs).enter(methodSym)
-	    clrTypes.methods(methodSym) = getter;
-	    methodsSet -= getter;
-	  }
-	if (!(setter == null || setter.IsPrivate || setter.IsAssembly
-             || setter.IsFamilyAndAssembly || setter.HasPtrParamOrRetType))
-	  {
-	    val sparams: Array[ParameterInfo] = setter.GetParameters()
-	    if(getter != null)
-	      assert(getter.IsStatic == setter.IsStatic);
-	    assert(setter.ReturnType == clrTypes.VOID);
-	    if(getter != null)
-	      assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter);
-
-            val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
-			     else nme.update;
-	    val flags = translateAttributes(setter);
-	    val mtype = methodType(setter, definitions.UnitClass.tpe);
-	    val owner: Symbol = if (setter.IsStatic) statics else clazz;
-            val methodSym = owner.newMethod(name, NoPosition, flags)
-        methodSym.setInfo(mtype(methodSym))
-	    methodSym.setFlag(Flags.ACCESSOR);
-	    (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
-	    clrTypes.methods(methodSym) = setter;
-	    methodsSet -= setter;
-	  }
-      }
-    }
-
-/*    for (event <- typ.GetEvents) {
-      // adding += and -= methods to add delegates to an event.
-      // raising the event ist not possible from outside the class (this is so
-      // generally in .net world)
-      val adder: MethodInfo = event.GetAddMethod();
-      val remover: MethodInfo = event.GetRemoveMethod();
-      if (!(adder == null || adder.IsPrivate || adder.IsAssembly
-	    || adder.IsFamilyAndAssembly))
-	{
-	  assert(adder.ReturnType == clrTypes.VOID);
-	  assert(adder.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
-	  val name = encode("+=");
-	  val flags = translateAttributes(adder);
-	  val mtype: Type = methodType(adder, adder.ReturnType);
-	  createMethod(name, flags, mtype, adder, adder.IsStatic)
-	  methodsSet -= adder;
-	}
-      if (!(remover == null || remover.IsPrivate || remover.IsAssembly
-	    || remover.IsFamilyAndAssembly))
-	{
-	  assert(remover.ReturnType == clrTypes.VOID);
-	  assert(remover.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
-	  val name = encode("-=");
-	  val flags = translateAttributes(remover);
-	  val mtype: Type = methodType(remover, remover.ReturnType);
-	  createMethod(name, flags, mtype, remover, remover.IsStatic)
-	  methodsSet -= remover;
-	}
-    } */
-
-/* Adds view amounting to syntax sugar for a CLR implicit overload.
-   The long-form syntax can also be supported if "methodsSet -= method" (last statement) is removed.
-
-    /* remember, there's typ.getMethods and type.GetMethods  */
-    for (method <- typ.getMethods)
-      if(!method.HasPtrParamOrRetType &&
-              method.IsPublic && method.IsStatic && method.IsSpecialName &&
-              method.Name == "op_Implicit") {
-        // create a view: typ => method's return type
-        val viewRetType: Type = getCLRType(method.ReturnType)
-        val viewParamTypes: List[Type] = method.GetParameters().map(_.ParameterType).map(getCLSType).toList;
-        /* The spec says "The operator method shall be defined as a static method on either the operand or return type."
-         *  We don't consider the declaring type for the purposes of definitions.functionType,
-         * instead we regard op_Implicit's argument type and return type as defining the view's signature.
-         */
-        if (viewRetType != null && !viewParamTypes.contains(null)) {
-          /* The check above applies e.g. to System.Decimal that has a conversion from UInt16, a non-CLS type, whose CLS-mapping returns null */
-          val funType: Type = definitions.functionType(viewParamTypes, viewRetType);
-          val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
-          val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(viewParamTypes), funType)
-          val vmsym = createMethod(nme.view_, flags, viewMethodType, method, true);
-          methodsSet -= method;
-        }
-      }
-*/
-
-    for (method <- methodsSet.iterator)
-      if (!method.IsPrivate() && !method.IsAssembly() && !method.IsFamilyAndAssembly()
-           && !method.HasPtrParamOrRetType)
-        createMethod(method);
-
-    // Create methods and views for delegate support
-    if (clrTypes.isDelegateType(typ)) {
-      createDelegateView(typ)
-      createDelegateChainers(typ)
-    }
-
-    // for enumerations introduce comparison and bitwise logical operations;
-    // the backend will recognize them and replace them with comparison or
-    // bitwise logical operations on the primitive underlying type
-
-    if (typ.IsEnum) {
-      val ENUM_CMP_NAMES = List(nme.EQ, nme.NE, nme.LT, nme.LE, nme.GT, nme.GE);
-      val ENUM_BIT_LOG_NAMES = List(nme.OR, nme.AND, nme.XOR);
-
-      val flags = Flags.JAVA | Flags.FINAL
-      for (cmpName <- ENUM_CMP_NAMES) {
-        val enumCmp = clazz.newMethod(cmpName)
-        val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
-        enumCmp.setFlag(flags).setInfo(enumCmpType)
-        instanceDefs.enter(enumCmp)
-      }
-
-      for (bitLogName <- ENUM_BIT_LOG_NAMES) {
-        val enumBitLog = clazz.newMethod(bitLogName)
-        val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
-        enumBitLog.setFlag(flags).setInfo(enumBitLogType)
-        instanceDefs.enter(enumBitLog)
-      }
-    }
-
-  } // parseClass
-
-  private def populateMethodTParams(method: MethodBase, methodSym: MethodSymbol) : List[Symbol] = {
-    if(!method.IsGeneric) Nil
-    else {
-      methodTParams.clear
-      val newMethodTParams = new scala.collection.mutable.ListBuffer[Symbol]()
-
-      // first pass
-      for (mvarCILDef <- method.getSortedMVars() ) {
-        val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
-        val mtpsym = methodSym.newTypeParameter(mtpname)
-        methodTParams.put(mvarCILDef.Number, mtpsym)
-        newMethodTParams += mtpsym
-        // TODO wouldn't the following also be needed later, i.e. during getCLRType
-        mtpsym.setInfo(definitions.AnyClass.tpe)
-      }
-      // second pass
-      for (mvarCILDef <- method.getSortedMVars() ) {
-        val mtpsym = methodTParams(mvarCILDef.Number)
-        mtpsym.setInfo(sig2typeBounds(mvarCILDef)) // we never skip bounds unlike in forJVM
-      }
-
-      newMethodTParams.toList
-    }
-  }
-
-  private def createMethod(method: MethodBase) {
-
-    val flags = translateAttributes(method);
-    val owner = if (method.IsStatic()) statics else clazz;
-    val methodSym = owner.newMethod(getName(method), NoPosition, flags)
-    /* START CLR generics (snippet 3) */
-    val newMethodTParams = populateMethodTParams(method, methodSym)
-    /* END CLR generics (snippet 3) */
-
-    val rettype = if (method.IsConstructor()) clazz.tpe
-                  else getCLSType(method.asInstanceOf[MethodInfo].ReturnType);
-    if (rettype == null) return;
-    val mtype = methodType(method, rettype);
-    if (mtype == null) return;
-/* START CLR generics (snippet 4) */
-    val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
-                else mtype(methodSym)
-/* END CLR generics (snippet 4) */
-/* START CLR non-generics (snippet 4)
-    val mInfo = mtype(methodSym)
-   END CLR non-generics (snippet 4) */
-    methodSym.setInfo(mInfo)
-    (if (method.IsStatic()) staticDefs else instanceDefs).enter(methodSym);
-    if (method.IsConstructor())
-      clrTypes.constructors(methodSym) = method.asInstanceOf[ConstructorInfo]
-    else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo];
-  }
-
-  private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
-    val mtype = methodType(args, getCLSType(retType))
-    assert(mtype != null)
-    createMethod(name, flags, mtype, method, statik)
-  }
-
-  private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
-    val methodSym: Symbol = (if (statik)  statics else clazz).newMethod(name)
-    methodSym.setFlag(flags).setInfo(mtype(methodSym))
-    (if (statik) staticDefs else instanceDefs).enter(methodSym)
-    if (method != null)
-      clrTypes.methods(methodSym)  = method
-    methodSym
-  }
-
-  private def createDelegateView(typ: MSILType) = {
-    val invoke: MethodInfo = typ.GetMember("Invoke")(0).asInstanceOf[MethodInfo];
-    val invokeRetType: Type = getCLRType(invoke.ReturnType);
-    val invokeParamTypes: List[Type] =invoke.GetParameters().map(_.ParameterType).map(getCLSType).toList;
-    val funType: Type = definitions.functionType(invokeParamTypes, invokeRetType);
-
-    val typClrType: Type = getCLRType(typ);
-    val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? think not needed
-
-    // create the forward view: delegate => function
-    val delegateParamTypes: List[Type] = List(typClrType);
-    // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods)
-    val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType)
-    val fmsym = createMethod(nme.view_, flags, forwardViewMethodType, null, true);
-
-    // create the backward view: function => delegate
-    val functionParamTypes: List[Type] = List(funType);
-    val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType)
-    val bmsym = createMethod(nme.view_, flags, backwardViewMethodType, null, true);
-  }
-
-  private def createDelegateChainers(typ: MSILType) = {
-    val flags: Long = Flags.JAVA | Flags.FINAL
-    val args: Array[MSILType] = Array(typ)
-
-    var s = createMethod(encode("+="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_COMBINE, false);
-    s = createMethod(encode("-="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_REMOVE, false);
-
-    s = createMethod(nme.PLUS, flags, args, typ, clrTypes.DELEGATE_COMBINE, false);
-    s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false);
-  }
-
-  private def getName(method: MethodBase): TermName = {
-
-    def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match {
-      case 1 => name match {
-        // PartitionI.10.3.1
-        case "op_Decrement" => Some(encode("--"))
-        case "op_Increment" => Some(encode("++"))
-        case "op_UnaryNegation" => Some(nme.UNARY_-)
-        case "op_UnaryPlus" => Some(nme.UNARY_+)
-        case "op_LogicalNot" => Some(nme.UNARY_!)
-        case "op_OnesComplement" => Some(nme.UNARY_~)
-        /* op_True and op_False have no operator symbol assigned,
-           Other methods that will have to be written in full are:
-           op_AddressOf & (unary)
-           op_PointerDereference * (unary) */
-        case _ => None
-      }
-      case 2 => name match {
-        // PartitionI.10.3.2
-        case "op_Addition" => Some(nme.ADD)
-        case "op_Subtraction" => Some(nme.SUB)
-        case "op_Multiply" => Some(nme.MUL)
-        case "op_Division" => Some(nme.DIV)
-        case "op_Modulus" => Some(nme.MOD)
-        case "op_ExclusiveOr" => Some(nme.XOR)
-        case "op_BitwiseAnd" => Some(nme.AND)
-        case "op_BitwiseOr" => Some(nme.OR)
-        case "op_LogicalAnd" => Some(nme.ZAND)
-        case "op_LogicalOr" => Some(nme.ZOR)
-        case "op_LeftShift" => Some(nme.LSL)
-        case "op_RightShift" => Some(nme.ASR)
-        case "op_Equality" => Some(nme.EQ)
-        case "op_GreaterThan" => Some(nme.GT)
-        case "op_LessThan" => Some(nme.LT)
-        case "op_Inequality" => Some(nme.NE)
-        case "op_GreaterThanOrEqual" => Some(nme.GE)
-        case "op_LessThanOrEqual" => Some(nme.LE)
-
-        /* op_MemberSelection is reserved in Scala  */
-
-        /* The standard does not assign operator symbols to op_Assign , op_SignedRightShift , op_UnsignedRightShift ,
-         *   and op_UnsignedRightShiftAssignment so those names will be used instead to invoke those methods. */
-
-        /*
-          The remaining binary operators are not overloaded in C# and are therefore not in widespread use. They have to be written in full.
-
-          op_RightShiftAssignment      >>=
-          op_MultiplicationAssignment  *=
-          op_PointerToMemberSelection  ->*
-          op_SubtractionAssignment     -=
-          op_ExclusiveOrAssignment     ^=
-          op_LeftShiftAssignment       <<=
-          op_ModulusAssignment         %=
-          op_AdditionAssignment        +=
-          op_BitwiseAndAssignment      &=
-          op_BitwiseOrAssignment       |=
-          op_Comma                     ,
-          op_DivisionAssignment        /=
-        */
-        case _ => None
-      }
-      case _ => None
-    }
-
-    if (method.IsConstructor()) return nme.CONSTRUCTOR;
-    val name = method.Name;
-    if (method.IsStatic()) {
-      if(method.IsSpecialName) {
-        val paramsArity = method.GetParameters().size
-        // handle operator overload, otherwise handle as any static method
-        val operName = operatorOverload(name, paramsArity)
-        if (operName.isDefined) { return operName.get; }
-      }
-      return newTermName(name);
-    }
-    val params = method.GetParameters();
-    name match {
-      case "GetHashCode" if (params.length == 0) => nme.hashCode_;
-      case "ToString" if (params.length == 0) => nme.toString_;
-      case "Finalize" if (params.length == 0) => nme.finalize_;
-      case "Equals" if (params.length == 1 && params(0).ParameterType == clrTypes.OBJECT) =>
-        nme.equals_;
-      case "Invoke" if (clrTypes.isDelegateType(method.DeclaringType)) => nme.apply;
-      case _ => newTermName(name);
-    }
-  }
-
-  //##########################################################################
-
-  private def methodType(method: MethodBase, rettype: MSILType): Symbol => Type = {
-    val rtype = getCLSType(rettype);
-    if (rtype == null) null else methodType(method, rtype);
-  }
-
-  /** Return a method type for the given method. */
-  private def methodType(method: MethodBase, rettype: Type): Symbol => Type =
-    methodType(method.GetParameters().map(_.ParameterType), rettype);
-
-  /** Return a method type for the provided argument types and return type. */
-  private def methodType(argtypes: Array[MSILType], rettype: Type): Symbol => Type = {
-    def paramType(typ: MSILType): Type =
-      if (typ eq clrTypes.OBJECT) definitions.AnyClass.tpe // TODO a hack to compile scalalib, should be definitions.AnyRefClass.tpe
-      else getCLSType(typ);
-    val ptypes = argtypes.map(paramType).toList;
-    if (ptypes.contains(null)) null
-    else method => JavaMethodType(method.newSyntheticValueParams(ptypes), rettype);
-  }
-
-    //##########################################################################
-
-  private def getClassType(typ: MSILType): Type = {
-    assert(typ != null);
-    val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe;
-    //if (res.isError())
-    //  global.reporter.error("unknown class reference " + type.FullName);
-    res
-  }
-
-  private def getCLSType(typ: MSILType): Type = { // getCLS returns non-null for types GenMSIL can handle, be they CLS-compliant or not
-    if (typ.IsTMVarUsage())
-    /* START CLR generics (snippet 5) */
-      getCLRType(typ)
-    /* END CLR generics (snippet 5) */
-    /* START CLR non-generics (snippet 5)
-      null
-       END CLR non-generics (snippet 5) */
-    else if ( /* TODO hack if UBYE, uncommented, "ambiguous reference to overloaded definition" ensues, for example for System.Math.Max(x, y) */
-              typ == clrTypes.USHORT || typ == clrTypes.UINT || typ == clrTypes.ULONG
-      /*  || typ == clrTypes.UBYTE    */
-          ||  typ.IsNotPublic()      || typ.IsNestedPrivate()
-          ||  typ.IsNestedAssembly() || typ.IsNestedFamANDAssem()
-          ||  typ.IsPointer()
-          || (typ.IsArray() && getCLRType(typ.GetElementType()) == null)  /* TODO hack: getCLR instead of getCLS */
-          || (typ.IsByRef() && !typ.GetElementType().CanBeTakenAddressOf()))
-      null
-    else
-      getCLRType(typ)
-  }
-
-  private def getCLRTypeIfPrimitiveNullOtherwise(typ: MSILType): Type =
-    if (typ == clrTypes.OBJECT)
-      definitions.ObjectClass.tpe;
-    else if (typ == clrTypes.VALUE_TYPE)
-      definitions.AnyValClass.tpe
-    else if (typ == clrTypes.STRING)
-      definitions.StringClass.tpe;
-    else if (typ == clrTypes.VOID)
-      definitions.UnitClass.tpe
-    else if (typ == clrTypes.BOOLEAN)
-      definitions.BooleanClass.tpe
-    else if (typ == clrTypes.CHAR)
-      definitions.CharClass.tpe
-    else if ((typ == clrTypes.BYTE)  || (typ == clrTypes.UBYTE)) // TODO U... is a hack to compile scalalib
-      definitions.ByteClass.tpe
-    else if ((typ == clrTypes.SHORT) || (typ == clrTypes.SHORT)) // TODO U... is a hack to compile scalalib
-      definitions.ShortClass.tpe
-    else if ((typ == clrTypes.INT)   || (typ == clrTypes.UINT))  // TODO U... is a hack to compile scalalib
-      definitions.IntClass.tpe
-    else if ((typ == clrTypes.LONG)  || (typ == clrTypes.LONG))  // TODO U... is a hack to compile scalalib
-      definitions.LongClass.tpe
-    else if (typ == clrTypes.FLOAT)
-      definitions.FloatClass.tpe
-    else if (typ == clrTypes.DOUBLE)
-      definitions.DoubleClass.tpe
-    else null
-
-
-  private def getCLRType(tMSIL: MSILType): Type = {
-     var res = getCLRTypeIfPrimitiveNullOtherwise(tMSIL)
-     if (res != null) res
-     else if (tMSIL.isInstanceOf[ConstructedType]) {
-       val ct = tMSIL.asInstanceOf[ConstructedType]
-       /* START CLR generics (snippet 6) */
-             val cttpArgs = ct.typeArgs.map(tmsil => getCLRType(tmsil)).toList
-             appliedType(getCLRType(ct.instantiatedType), cttpArgs)
-       /* END CLR generics (snippet 6) */
-       /* START CLR non-generics (snippet 6)
-       getCLRType(ct.instantiatedType)
-          END CLR non-generics (snippet 6) */
-     } else if (tMSIL.isInstanceOf[TMVarUsage]) {
-        /* START CLR generics (snippet 7) */
-             val tVarUsage = tMSIL.asInstanceOf[TMVarUsage]
-             val tVarNumber = tVarUsage.Number
-             if (tVarUsage.isTVar) classTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
-             else methodTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
-        /* END CLR generics (snippet 7) */
-       /* START CLR non-generics (snippet 7)
-        null // definitions.ObjectClass.tpe
-          END CLR non-generics (snippet 7) */
-     } else if (tMSIL.IsArray()) {
-        var elemtp = getCLRType(tMSIL.GetElementType())
-        // cut&pasted from ClassfileParser
-        // make unbounded Array[T] where T is a type variable into Array[T with Object]
-        // (this is necessary because such arrays have a representation which is incompatible
-        // with arrays of primitive types).
-        // TODO does that incompatibility also apply to .NET?
-        if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
-          elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
-        appliedType(definitions.ArrayClass.tpe, List(elemtp))
-     } else {
-       res = clrTypes.sym2type.get(tMSIL) match {
-         case Some(sym) => sym.tpe
-         case None => if (tMSIL.IsByRef && tMSIL.GetElementType.IsValueType) {
-                        val addressed = getCLRType(tMSIL.GetElementType)
-                        val clasym = addressed.typeSymbolDirect // TODO should be .typeSymbol?
-                        clasym.info.load(clasym)
-                        val secondAttempt = clrTypes.sym2type.get(tMSIL)
-                        secondAttempt match { case Some(sym) => sym.tpe
-                                              case None => null
-                                            }
-                      } else getClassType(tMSIL)
-       }
-       if (res == null)
-         null // TODO new RuntimeException()
-       else res
-     }
-  }
-
-  // the values are Java-Box-Classes (e.g. Integer, Boolean, Character)
-  // java.lang.Number to get the value (if a number, not for boolean, character)
-  // see ch.epfl.lamp.compiler.msil.util.PEStream.java
-  def getConstant(constType: Type, value: Object): Constant = {
-    val typeClass = constType.typeSymbol
-    if (typeClass == definitions.BooleanClass)
-      Constant(value.asInstanceOf[java.lang.Boolean].booleanValue)
-    else if (typeClass == definitions.ByteClass)
-      Constant(value.asInstanceOf[java.lang.Number].byteValue)
-    else if (typeClass == definitions.ShortClass)
-      Constant(value.asInstanceOf[java.lang.Number].shortValue)
-    else if (typeClass == definitions.CharClass)
-      Constant(value.asInstanceOf[java.lang.Character].charValue)
-    else if (typeClass == definitions.IntClass)
-      Constant(value.asInstanceOf[java.lang.Number].intValue)
-    else if (typeClass == definitions.LongClass)
-      Constant(value.asInstanceOf[java.lang.Number].longValue)
-    else if (typeClass == definitions.FloatClass)
-      Constant(value.asInstanceOf[java.lang.Number].floatValue)
-    else if (typeClass == definitions.DoubleClass)
-      Constant(value.asInstanceOf[java.lang.Number].doubleValue)
-    else if (typeClass == definitions.StringClass)
-      Constant(value.asInstanceOf[java.lang.String])
-    else
-      abort("illegal value: " + value + ", class-symbol: " + typeClass)
-  }
-
-  def isDefinedAtgetConstant(constType: Type): Boolean = {
-    val typeClass = constType.typeSymbol
-    if (    (typeClass == definitions.BooleanClass)
-         || (typeClass == definitions.ByteClass)
-         || (typeClass == definitions.ShortClass)
-         || (typeClass == definitions.CharClass)
-         || (typeClass == definitions.IntClass)
-         || (typeClass == definitions.LongClass)
-         || (typeClass == definitions.FloatClass)
-         || (typeClass == definitions.DoubleClass)
-         || (typeClass == definitions.StringClass)
-       )
-      true
-    else
-      false
-  }
-
-  private def translateAttributes(typ: MSILType): Long = {
-    var flags: Long = Flags.JAVA;
-    if (typ.IsNotPublic() || typ.IsNestedPrivate()
-	|| typ.IsNestedAssembly() || typ.IsNestedFamANDAssem())
-      flags = flags | Flags.PRIVATE;
-    else if (typ.IsNestedFamily() || typ.IsNestedFamORAssem())
-      flags = flags | Flags.PROTECTED;
-    if (typ.IsAbstract())
-      flags = flags | Flags.ABSTRACT;
-    if (typ.IsSealed())
-      flags = flags | Flags.FINAL;
-    if (typ.IsInterface())
-      flags = flags | Flags.INTERFACE | Flags.TRAIT | Flags.ABSTRACT;
-
-    flags
-  }
-
-  private def translateAttributes(field: FieldInfo): Long = {
-    var flags: Long = Flags.JAVA;
-    if (field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly())
-      flags = flags | Flags.PRIVATE;
-    else if (field.IsFamily() || field.IsFamilyOrAssembly())
-      flags = flags | Flags.PROTECTED;
-    if (field.IsInitOnly() || field.IsLiteral())
-      flags = flags | Flags.FINAL;
-    else
-      flags = flags | Flags.MUTABLE;
-    if (field.IsStatic)
-      flags = flags | Flags.STATIC
-
-    flags
-  }
-
-  private def translateAttributes(method: MethodBase): Long = {
-    var flags: Long = Flags.JAVA;
-    if (method.IsPrivate() || method.IsAssembly() || method.IsFamilyAndAssembly())
-      flags = flags | Flags.PRIVATE;
-    else if (method.IsFamily() || method.IsFamilyOrAssembly())
-      flags = flags | Flags.PROTECTED;
-    if (method.IsAbstract())
-      flags = flags | Flags.DEFERRED;
-    if (method.IsStatic)
-      flags = flags | Flags.STATIC
-
-    flags
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index bacd8c3..2b7c6cc 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,8 +8,6 @@ package transform
 
 import symtab._
 import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
 
 abstract class AddInterfaces extends InfoTransform { self: Erasure =>
   import global._                  // the global environment
@@ -94,7 +92,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
         impl.typeOfThis = iface.typeOfThis
         impl.thisSym setName iface.thisSym.name
       }
-      impl.sourceFile = iface.sourceFile
+      impl.associatedFile = iface.sourceFile
       if (inClass)
         iface.owner.info.decls enter impl
 
@@ -111,7 +109,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
   def implClass(iface: Symbol): Symbol = {
     iface.info
 
-    implClassMap.getOrElse(iface, atPhase(implClassPhase) {
+    implClassMap.getOrElse(iface, enteringPhase(implClassPhase) {
       if (iface.implClass eq NoSymbol)
         debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
       else
@@ -145,7 +143,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
 
         decls enter (
           implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
-            setInfo MethodType(Nil, UnitClass.tpe)
+            setInfo MethodType(Nil, UnitTpe)
         )
       }
 
@@ -176,8 +174,8 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
     override def complete(implSym: Symbol) {
       debuglog("LazyImplClassType completing " + implSym)
 
-      /** If `tp` refers to a non-interface trait, return a
-       *  reference to its implementation class. Otherwise return `tp`.
+      /* If `tp` refers to a non-interface trait, return a
+       * reference to its implementation class. Otherwise return `tp`.
        */
       def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) {
         tp match { //@MATN: no normalize needed (comes after erasure)
@@ -191,12 +189,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
         case ClassInfoType(parents, decls, _) =>
           assert(phase == implClassPhase, tp)
           // Impl class parents: Object first, matching interface last.
-          val implParents = ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
+          val implParents = ObjectTpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
           ClassInfoType(implParents, implDecls(implSym, decls), implSym)
         case PolyType(_, restpe) =>
           implType(restpe)
       }
-      implSym setInfo implType(beforeErasure(iface.info))
+      implSym setInfo implType(enteringErasure(iface.info))
     }
 
     override def load(clazz: Symbol) { complete(clazz) }
@@ -211,7 +209,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
         case Nil      => Nil
         case hd :: tl =>
           assert(!hd.typeSymbol.isTrait, clazz)
-          if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
+          if (clazz.isTrait) ObjectTpe :: tl
           else parents
       }
       val decls1 = scopeTransform(clazz)(
@@ -251,7 +249,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
   private def ifaceMemberDef(tree: Tree): Tree = createMemberDef(tree, true)(t => DefDef(t.symbol, EmptyTree))
 
   private def ifaceTemplate(templ: Template): Template =
-    treeCopy.Template(templ, templ.parents, emptyValDef, templ.body map ifaceMemberDef)
+    treeCopy.Template(templ, templ.parents, noSelfType, templ.body map ifaceMemberDef)
 
   /** Transforms the member tree containing the implementation
    *  into a member of the impl class.
@@ -278,11 +276,11 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
    */
   private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
     if (treeInfo.firstConstructor(stats) != EmptyTree) stats
-    else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant()))) :: stats
+    else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant(())))) :: stats
 
   private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) {
     val templ1 = (
-      Template(templ.parents, emptyValDef, addMixinConstructorDef(clazz, templ.body map implMemberDef))
+      Template(templ.parents, noSelfType, addMixinConstructorDef(clazz, templ.body map implMemberDef))
         setSymbol clazz.newLocalDummy(templ.pos)
     )
     templ1.changeOwner(templ.symbol.owner -> clazz, templ.symbol -> templ1.symbol)
@@ -317,10 +315,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
         // body until now, because the typer knows that Any has no
         // constructor and won't accept a call to super.init.
         assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
-        Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
+        Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
 
       case Block(stats, expr) =>
-        // needs `hasSymbol` check because `supercall` could be a block (named / default args)
+        // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
         val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
         treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
     }
@@ -340,7 +338,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
           deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
         case Template(parents, self, body) =>
           val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
-          treeCopy.Template(tree, parents1, emptyValDef, body)
+          treeCopy.Template(tree, parents1, noSelfType, body)
         case This(_) if sym.needsImplClass =>
           val impl = implClass(sym)
           var owner = currentOwner
@@ -352,7 +350,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
           val mix1 = mix
             if (mix == tpnme.EMPTY) mix
             else {
-              val ps = beforeErasure {
+              val ps = enteringErasure {
                 sym.info.parents dropWhile (p => p.symbol.name != mix)
               }
               assert(!ps.isEmpty, tree);
@@ -369,29 +367,3 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
     }
   }
 }
-/*
-    val ensureNoEscapes = new TypeTraverser {
-      def ensureNoEscape(sym: Symbol) {
-        if (sym.hasFlag(PRIVATE)) {
-          var o = currentOwner;
-          while (o != NoSymbol && o != sym.owner && !o.isLocal && !o.hasFlag(PRIVATE))
-          o = o.owner
-          if (o == sym.owner) sym.makeNotPrivate(base);
-        }
-      }
-      def traverse(t: Type): TypeTraverser = {
-        t match {
-          case TypeRef(qual, sym, args) =>
-            ensureNoEscape(sym)
-            mapOver(t)
-          case ClassInfoType(parents, decls, clazz) =>
-            parents foreach { p => traverse; () }
-            traverse(t.typeOfThis)
-          case _ =>
-            mapOver(t)
-        }
-        this
-      }
-    }
-
-*/
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 7a0b034..f14fce5 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -11,7 +11,7 @@ import Flags._
 import scala.collection._
 import scala.language.postfixOps
 
-abstract class CleanUp extends Transform with ast.TreeDSL {
+abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
   import global._
   import definitions._
   import CODE._
@@ -20,10 +20,22 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
   /** the following two members override abstract members in Transform */
   val phaseName: String = "cleanup"
 
+  /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */
+  private var entryPoints: List[Symbol] = null
+  def getEntryPoints: List[Symbol] = {
+    assert(settings.isBCodeActive, "Candidate Java entry points are collected here only when GenBCode in use.")
+    entryPoints sortBy ("" + _.fullName) // For predictably ordered error messages.
+  }
+
+  override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
+    entryPoints = if (settings.isBCodeActive) Nil else null;
+    super.newPhase(prev)
+  }
+
   protected def newTransformer(unit: CompilationUnit): Transformer =
     new CleanUpTransformer(unit)
 
-  class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
+  class CleanUpTransformer(unit: CompilationUnit) extends StaticsTransformer {
     private val newStaticMembers      = mutable.Buffer.empty[Tree]
     private val newStaticInits        = mutable.Buffer.empty[Tree]
     private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
@@ -32,25 +44,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
       newStaticInits.clear()
       symbolsStoredAsStatic.clear()
     }
-    private def savingStatics[T](body: => T): T = {
-      val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
-      val savedNewStaticInits   : mutable.Buffer[Tree] = newStaticInits.clone()
-      val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
-      val result = body
-
-      clearStatics()
-      newStaticMembers      ++= savedNewStaticMembers
-      newStaticInits        ++= savedNewStaticInits
-      symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
-      result
-    }
     private def transformTemplate(tree: Tree) = {
-      val Template(parents, self, body) = tree
+      val Template(_, _, body) = tree
       clearStatics()
       val newBody = transformTrees(body)
       val templ   = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
-      try addStaticInits(templ) // postprocess to include static ctors
+      try addStaticInits(templ, newStaticInits, localTyper) // postprocess to include static ctors
       finally clearStatics()
     }
     private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
@@ -60,21 +59,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
 
     private var localTyper: analyzer.Typer = null
 
-    private object MethodDispatchType extends scala.Enumeration {
-      val NO_CACHE, MONO_CACHE, POLY_CACHE = Value
-    }
-    import MethodDispatchType.{ NO_CACHE, MONO_CACHE, POLY_CACHE }
-    private def dispatchType() = settings.refinementMethodDispatch.value match {
-      case "no-cache"   => NO_CACHE
-      case "mono-cache" => MONO_CACHE
-      case "poly-cache" => POLY_CACHE
-    }
-
-    def shouldRewriteTry(tree: Try) = {
-      val sym = tree.tpe.typeSymbol
-      forMSIL && (sym != UnitClass) && (sym != NothingClass)
-    }
-
     private def typedWithPos(pos: Position)(tree: Tree) =
       localTyper.typedPos(pos)(tree)
 
@@ -91,7 +75,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
     def transformApplyDynamic(ad: ApplyDynamic) = {
       val qual0 = ad.qual
       val params = ad.args
-        if (settings.logReflectiveCalls.value)
+        if (settings.logReflectiveCalls)
           unit.echo(ad.pos, "method invocation uses reflection")
 
         val typedPos = typedWithPos(ad.pos) _
@@ -106,11 +90,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
             if (isFinal) FINAL else 0
           )
 
-          val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+          val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags.toLong) setInfoAndEnter forType
           if (!isFinal)
             varSym.addAnnotation(VolatileAttr)
 
-          val varDef = typedPos( VAL(varSym) === forInit )
+          val varDef = typedPos(ValDef(varSym, forInit))
           newStaticMembers append transform(varDef)
 
           val varInit = typedPos( REF(varSym) === forInit )
@@ -120,7 +104,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
         }
 
         def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
-          val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+          val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC)
           val params  = methSym.newSyntheticValueParams(List(ClassClass.tpe))
           methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
 
@@ -132,147 +116,79 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
         def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
           ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
 
-        /* ... */
-        def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
-          case NO_CACHE =>
-
-              /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
-
-                var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
-                def reflMethod$Method(forReceiver: JClass[_]): JMethod =
-                  forReceiver.getMethod("xyz", reflParams$Cache)
-
-              */
-
-              val reflParamsCacheSym: Symbol =
-                addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
-              addStaticMethodToClass((_, forReceiverSym) =>
-                gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), REF(reflParamsCacheSym)))
-              )
+        def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = {
+          /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+             (SoftReference so that it does not interfere with classloader garbage collection,
+             see ticket #2365 for details):
 
-            case MONO_CACHE =>
+            var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
 
-              /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
-                 (but with a SoftReference wrapping reflClass$Cache, similarly in the poly Cache) :
+            var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
 
-                var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
-                var reflMethod$Cache: JMethod = null
-
-                var reflClass$Cache: JClass[_] = null
-
-                def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
-                  if (reflClass$Cache != forReceiver) {
-                    reflMethod$Cache = forReceiver.getMethod("xyz", reflParams$Cache)
-                    reflClass$Cache = forReceiver
-                  }
-                  reflMethod$Cache
-                }
-
-              */
-
-              val reflParamsCacheSym: Symbol =
-                addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
-              val reflMethodCacheSym: Symbol =
-                addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
-
-              val reflClassCacheSym: Symbol =
-                addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
-
-              def isCacheEmpty(receiver: Symbol): Tree =
-                reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
-
-              addStaticMethodToClass((_, forReceiverSym) =>
-                BLOCK(
-                  IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
-                    REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym))) ,
-                    REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
-                    UNIT
-                  ) ENDIF,
-                  REF(reflMethodCacheSym)
-                )
-              )
-
-            case POLY_CACHE =>
-
-              /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
-                 (SoftReference so that it does not interfere with classloader garbage collection, see ticket
-                 #2365 for details):
-
-                var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
-                var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
+            def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
+              var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+              if (methodCache eq null) {
+                methodCache = new EmptyMethodCache
+                reflPoly$Cache = new SoftReference(methodCache)
+              }
+              var method: JMethod = methodCache.find(forReceiver)
+              if (method ne null)
+                return method
+              else {
+                method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
+                reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
+                return method
+              }
+            }
+          */
 
-                def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
-                  var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
-                  if (methodCache eq null) {
-                    methodCache = new EmptyMethodCache
-                    reflPoly$Cache = new SoftReference(methodCache)
-                  }
-                  var method: JMethod = methodCache.find(forReceiver)
-                  if (method ne null)
-                    return method
-                  else {
-                    method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
-                    reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
-                    return method
-                  }
-                }
+          val reflParamsCacheSym: Symbol =
+            addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
 
-              */
+          def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
+          val reflPolyCacheSym: Symbol = addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
 
-              val reflParamsCacheSym: Symbol =
-                addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
+          def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
 
-              def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
-              val reflPolyCacheSym: Symbol = (
-                addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
-              )
-              def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+          addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+            val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+            val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
 
-              addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
-                val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
-                val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+            BLOCK(
+              ValDef(methodCache, getPolyCache),
+              IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+                REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+                REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+              ) ENDIF,
 
+              ValDef(methodSym, (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym))),
+              IF (REF(methodSym) OBJ_NE NULL) .
+                THEN (Return(REF(methodSym)))
+              ELSE {
+                def methodSymRHS  = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
+                def cacheRHS      = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
                 BLOCK(
-                  VAR(methodCache) === getPolyCache,
-                  IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
-                    REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
-                    REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
-                  ) ENDIF,
-
-                  VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
-                  IF (REF(methodSym) OBJ_NE NULL) .
-                    THEN (Return(REF(methodSym)))
-                  ELSE {
-                    def methodSymRHS  = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
-                    def cacheRHS      = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
-                    BLOCK(
-                      REF(methodSym)        === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
-                      REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
-                      Return(REF(methodSym))
-                    )
-                  }
+                  REF(methodSym)        === (REF(currentRun.runDefinitions.ensureAccessibleMethod) APPLY (methodSymRHS)),
+                  REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+                  Return(REF(methodSym))
                 )
-              })
-
+              }
+            )
+          })
         }
 
         /* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
 
         def testForName(name: Name): Tree => Tree = t => (
           if (nme.CommonOpNames(name))
-            gen.mkMethodCall(definitions.Boxes_isNumberOrBool, t :: Nil)
+            gen.mkMethodCall(currentRun.runDefinitions.Boxes_isNumberOrBool, t :: Nil)
           else if (nme.BooleanOpNames(name))
             t IS_OBJ BoxedBooleanClass.tpe
           else
-            gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
+            gen.mkMethodCall(currentRun.runDefinitions.Boxes_isNumber, t :: Nil)
         )
 
-        /** The Tree => Tree function in the return is necessary to prevent the original qual
+        /*  The Tree => Tree function in the return is necessary to prevent the original qual
          *  from being duplicated in the resulting code.  It may be a side-effecting expression,
          *  so all the test logic is routed through gen.evalOnce, which creates a block like
          *    { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
@@ -284,7 +200,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
             else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
             else nme.NO_NAME
           )
-          definitions.getDeclIfDefined(BoxesRunTimeClass, methodName) match {
+          getDeclIfDefined(BoxesRunTimeClass, methodName) match {
             case NoSymbol => None
             case sym      => assert(!sym.isOverloaded, sym) ; Some((sym, testForName(name)))
           }
@@ -303,6 +219,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
 
         /* ### CALLING THE APPLY ### */
         def callAsReflective(paramTypes: List[Type], resType: Type): Tree = {
+          val runDefinitions = currentRun.runDefinitions
+          import runDefinitions._
+
           gen.evalOnce(qual, currentOwner, unit) { qual1 =>
             /* Some info about the type of the method being called. */
             val methSym       = ad.symbol
@@ -322,11 +241,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
             // If there's any chance this signature could be met by an Array.
             val isArrayMethodSignature = {
               def typesMatchApply = paramTypes match {
-                case List(tp) => tp <:< IntClass.tpe
+                case List(tp) => tp <:< IntTpe
                 case _        => false
               }
               def typesMatchUpdate = paramTypes match {
-                case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+                case List(tp1, tp2) => (tp1 <:< IntTpe) && isMaybeUnit
                 case _              => false
               }
 
@@ -357,13 +276,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
               else if (resultSym == ObjectClass) tree                                     // no cast necessary
               else gen.mkCast(tree, boxedResType)                                         // cast to expected type
 
-            /** Normal non-Array call */
+            /* Normal non-Array call */
             def genDefaultCall = {
               // reflective method call machinery
               val invokeName  = MethodClass.tpe member nme.invoke_                                  // scala.reflect.Method.invoke(...)
               def cache       = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes))     // cache Symbol
-              def lookup      = Apply(cache, List(qual1() GETCLASS))                                // get Method object from cache
-              def invokeArgs  = ArrayValue(TypeTree(ObjectClass.tpe), params)                       // args for invocation
+              def lookup      = Apply(cache, List(qual1() GETCLASS()))                                // get Method object from cache
+              def invokeArgs  = ArrayValue(TypeTree(ObjectTpe), params)                       // args for invocation
               def invocation  = (lookup DOT invokeName)(qual1(), invokeArgs)                        // .invoke(qual1, ...)
 
               // exception catching machinery
@@ -375,7 +294,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
               fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
             }
 
-            /** A possible primitive method call, represented by methods in BoxesRunTime. */
+            /* A possible primitive method call, represented by methods in BoxesRunTime. */
             def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
             def genValueCallWithTest = {
               getPrimitiveReplacementForStructuralCall(methSym.name) match {
@@ -386,7 +305,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
               }
             }
 
-            /** A native Array call. */
+            /* A native Array call. */
             def genArrayCall = fixResult(
               methSym.name match {
                 case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
@@ -397,9 +316,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
               mustBeUnit = methSym.name == nme.update
             )
 
-            /** A conditional Array call, when we can't determine statically if the argument is
-             *  an Array, but the structural type method signature is consistent with an Array method
-             *  so we have to generate both kinds of code.
+            /* A conditional Array call, when we can't determine statically if the argument is
+             * an Array, but the structural type method signature is consistent with an Array method
+             * so we have to generate both kinds of code.
              */
             def genArrayCallWithTest =
               IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
@@ -413,103 +332,88 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
           }
         }
 
-        if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
-/*          val guardCallSite: Tree = {
-            val cachedClass = addStaticVariableToClass("cachedClass", definitions.ClassClass.tpe, EmptyTree)
-            val tmpVar = currentOwner.newVariable(ad.pos, unit.freshTermName(ad.pos, "x")).setInfo(definitions.AnyRefClass.tpe)
-            atPos(ad.pos)(Block(List(
-              ValDef(tmpVar, transform(qual))),
-              If(Apply(Select(gen.mkAttributedRef(cachedClass), nme.EQ), List(getClass(Ident(tmpVar)))),
-                 Block(List(Assign(gen.mkAttributedRef(cachedClass), getClass(Ident(tmpVar)))),
-                       treeCopy.ApplyDynamic(ad, Ident(tmpVar), transformTrees(params))),
-                 EmptyTree)))
-          }
-          //println(guardCallSite)
-*/
-          localTyper.typed(treeCopy.ApplyDynamic(ad, transform(qual), transformTrees(params)))
-        }
-        else {
-
-          /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad at ApplyDynamic(qual, params) ### */
-
-          /* This creates the tree that does the reflective call (see general comment
-           * on the apply-dynamic tree for its format). This tree is simply composed
-           * of three successive calls, first to getClass on the callee, then to
-           * getMethod on the class, then to invoke on the method.
-           * - getMethod needs an array of classes for choosing one amongst many
-           *   overloaded versions of the method. This is provided by paramTypeClasses
-           *   and must be done on the static type as Scala's dispatching is static on
-           *   the parameters.
-           * - invoke needs an array of AnyRefs that are the method's arguments. The
-           *   erasure phase guarantees that any parameter passed to a dynamic apply
-           *   is compatible (through boxing). Boxed ints et al. is what invoke expects
-           *   when the applied method expects ints, hence no change needed there.
-           * - in the end, the result of invoke must be fixed, again to deal with arrays.
-           *   This is provided by fixResult. fixResult will cast the invocation's result
-           *   to the method's return type, which is generally ok, except when this type
-           *   is a value type (int et al.) in which case it must cast to the boxed version
-           *   because invoke only returns object and erasure made sure the result is
-           *   expected to be an AnyRef. */
-          val t: Tree = {
-            val (mparams, resType) = ad.symbol.tpe match {
-              case MethodType(mparams, resType) =>
-                assert(params.length == mparams.length, ((params, mparams)))
-                (mparams, resType)
-              case tpe @ OverloadedType(pre, alts) =>
-                unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n     Symbol: ${ad.symbol}\n  Overloads: $tpe\n  Arguments: " + ad.args.map(_.tpe))
-                alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
-                  case mt @ MethodType(mparams, resType) :: Nil =>
-                    unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
-                    (mparams, resType)
-                  case _ =>
-                    unit.error(ad.pos, "Cannot resolve overload.")
-                    (Nil, NoType)
-                }
-            }
-            typedPos {
-              val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
-              qual = REF(sym)
-
-              BLOCK(
-                VAL(sym) === qual0,
-                callAsReflective(mparams map (_.tpe), resType)
-              )
-            }
+        {
+
+        /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad at ApplyDynamic(qual, params) ### */
+
+        /* This creates the tree that does the reflective call (see general comment
+         * on the apply-dynamic tree for its format). This tree is simply composed
+         * of three successive calls, first to getClass on the callee, then to
+         * getMethod on the class, then to invoke on the method.
+         * - getMethod needs an array of classes for choosing one amongst many
+         *   overloaded versions of the method. This is provided by paramTypeClasses
+         *   and must be done on the static type as Scala's dispatching is static on
+         *   the parameters.
+         * - invoke needs an array of AnyRefs that are the method's arguments. The
+         *   erasure phase guarantees that any parameter passed to a dynamic apply
+         *   is compatible (through boxing). Boxed ints et al. is what invoke expects
+         *   when the applied method expects ints, hence no change needed there.
+         * - in the end, the result of invoke must be fixed, again to deal with arrays.
+         *   This is provided by fixResult. fixResult will cast the invocation's result
+         *   to the method's return type, which is generally ok, except when this type
+         *   is a value type (int et al.) in which case it must cast to the boxed version
+         *   because invoke only returns object and erasure made sure the result is
+         *   expected to be an AnyRef. */
+        val t: Tree = {
+          val (mparams, resType) = ad.symbol.tpe match {
+            case MethodType(mparams, resType) =>
+              assert(params.length == mparams.length, ((params, mparams)))
+              (mparams, resType)
+            case tpe @ OverloadedType(pre, alts) =>
+              unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n     Symbol: ${ad.symbol}\n  Overloads: $tpe\n  Arguments: " + ad.args.map(_.tpe))
+              alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+                case mt @ MethodType(mparams, resType) :: Nil =>
+                  unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+                  (mparams, resType)
+                case _ =>
+                  unit.error(ad.pos, "Cannot resolve overload.")
+                  (Nil, NoType)
+              }
           }
+          typedPos {
+            val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+            qual = REF(sym)
 
-          /* For testing purposes, the dynamic application's condition
-           * can be printed-out in great detail. Remove? */
-          if (settings.debug.value) {
-            def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
-            val mstr = ad.symbol.tpe match {
-              case MethodType(mparams, resType) =>
-                """|  with
-                   |  - declared parameter types: '%s'
-                   |  - passed argument types:    '%s'
-                   |  - result type:              '%s'""" .
-                  stripMargin.format(
-                     paramsToString(mparams),
-                     paramsToString(params),
-                     resType.toString
-                  )
-              case _ => ""
-            }
-            log(
-              """Dynamically application '%s.%s(%s)' %s - resulting code: '%s'""".format(
-                qual, ad.symbol.name, paramsToString(params), mstr, t
-              )
+            BLOCK(
+              ValDef(sym, qual0),
+              callAsReflective(mparams map (_.tpe), resType)
             )
           }
+        }
 
-          /* We return the dynamic call tree, after making sure no other
-           * clean-up transformation are to be applied on it. */
-          transform(t)
+        /* For testing purposes, the dynamic application's condition
+         * can be printed-out in great detail. Remove? */
+        if (settings.debug) {
+          def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
+          val mstr = ad.symbol.tpe match {
+            case MethodType(mparams, resType) =>
+              sm"""|  with
+                   |  - declared parameter types: '${paramsToString(mparams)}'
+                   |  - passed argument types:    '${paramsToString(params)}'
+                   |  - result type:              '${resType.toString}'"""
+            case _ => ""
+          }
+          log(s"""Dynamically application '$qual.${ad.symbol.name}(${paramsToString(params)})' $mstr - resulting code: '$t'""")
         }
-        /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+
+        /* We return the dynamic call tree, after making sure no other
+         * clean-up transformation are to be applied on it. */
+        transform(t)
+      /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+      }
     }
 
     override def transform(tree: Tree): Tree = tree match {
 
+      case _: ClassDef
+      if (entryPoints != null) &&
+         genBCode.isJavaEntryPoint(tree.symbol, currentUnit)
+      =>
+        // collecting symbols for entry points here (as opposed to GenBCode where they are used)
+        // has the advantage of saving an additional pass over all ClassDefs.
+        entryPoints ::= tree.symbol
+        super.transform(tree)
+
       /* Transforms dynamic calls (i.e. calls to methods that are undefined
        * in the erased type space) to -- dynamically -- unsafe calls using
        * reflection. This is used for structural sub-typing of refinement
@@ -555,10 +459,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
        * constructor. */
       case Template(parents, self, body) =>
         localTyper = typer.atOwner(tree, currentClass)
-        if (forMSIL) savingStatics( transformTemplate(tree) )
-        else transformTemplate(tree)
+        transformTemplate(tree)
 
-      case Literal(c) if (c.tag == ClazzTag) && !forMSIL=>
+      case Literal(c) if c.tag == ClazzTag =>
         val tpe = c.typeValue
         typedWithPos(tree.pos) {
           if (isPrimitiveValueClass(tpe.typeSymbol)) {
@@ -571,24 +474,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
           else tree
         }
 
-      /* MSIL requires that the stack is empty at the end of a try-block.
-       * Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
-       * store their result in a local variable. The catch blocks are adjusted as well.
-       * The try tree is subsituted by a block whose result expression is read of that variable. */
-      case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
-        def transformTry = {
-        val tpe = theTry.tpe.widen
-        val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
-        def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
-
-        val newBlock    = assignBlock(block)
-        val newCatches  = for (CaseDef(pattern, guard, body) <- catches) yield
-          (CASE(super.transform(pattern)) IF (super.transform(guard))) ==> assignBlock(body)
-        val newTry      = Try(newBlock, newCatches, super.transform(finalizer))
-
-        typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
-        }
-        transformTry
      /*
       * This transformation should identify Scala symbol invocations in the tree and replace them
       * with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -596,18 +481,33 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
       * For instance, say we have a Scala class:
       *
       * class Cls {
-      *   // ...
-      *   def someSymbol = `symbolic
-      *   // ...
+      *   def someSymbol1 = 'Symbolic1
+      *   def someSymbol2 = 'Symbolic2
+      *   def sameSymbol1 = 'Symbolic1
+      *   val someSymbol3 = 'Symbolic3
       * }
       *
       * After transformation, this class looks like this:
       *
       * class Cls {
-      *   private "static" val <some_name>$symbolic = Symbol("symbolic")
-      *   // ...
-      *   def someSymbol = <some_name>$symbolic
-      *   // ...
+      *   private <static> var symbol$1: scala.Symbol
+      *   private <static> var symbol$2: scala.Symbol
+      *   private <static> var symbol$3: scala.Symbol
+      *   private          val someSymbol3: scala.Symbol
+      *
+      *   private <static> def <clinit> = {
+      *     symbol$1 = Symbol.apply("Symbolic1")
+      *     symbol$2 = Symbol.apply("Symbolic2")
+      *   }
+      *
+      *   private def <init> = {
+      *     someSymbol3 = symbol$3
+      *   }
+      *
+      *   def someSymbol1 = symbol$1
+      *   def someSymbol2 = symbol$2
+      *   def sameSymbol1 = symbol$1
+      *   val someSymbol3 = someSymbol3
       * }
       *
       * The reasoning behind this transformation is the following. Symbols get interned - they are stored
@@ -617,17 +517,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
       * is accessed only once during class loading, and after that, the unique symbol is in the static
       * member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
       *
-      * And, finally, be advised - scala symbol literal and the Symbol class of the compiler
+      * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
       * have little in common.
       */
       case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
         def transformApply = {
-        // add the symbol name to a map if it's not there already
-        val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
-        val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
-        // create a reference to a static field
-        val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
-        super.transform(ntree)
+          // add the symbol name to a map if it's not there already
+          val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+          val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
+          // create a reference to a static field
+          val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
+          super.transform(ntree)
         }
         transformApply
 
@@ -636,7 +536,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
       //
       // See SI-6611; we must *only* do this for literal vararg arrays.
       case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _))
-      if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
+      if wrapRefArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
         super.transform(arg)
       case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _)))))
       if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) =>
@@ -657,12 +557,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
         // create a symbol for the static field
         val stfieldSym = (
           currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
-            setInfo SymbolClass.tpe
+            setInfoAndEnter SymbolClass.tpe
         )
-        currentClass.info.decls enter stfieldSym
 
         // create field definition and initialization
-        val stfieldDef  = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+        val stfieldDef  = theTyper.typedPos(pos)(ValDef(stfieldSym, rhs))
         val stfieldInit = theTyper.typedPos(pos)(REF(stfieldSym) === rhs)
 
         // add field definition to new defs
@@ -673,44 +572,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
       })
     }
 
-    /* finds the static ctor DefDef tree within the template if it exists. */
-    private def findStaticCtor(template: Template): Option[Tree] =
-      template.body find {
-        case defdef @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => defdef.symbol.hasStaticFlag
-        case _ => false
-      }
-
-    /* changes the template for the class so that it contains a static constructor with symbol fields inits,
-     * augments an existing static ctor if one already existed.
-     */
-    private def addStaticInits(template: Template): Template = {
-      if (newStaticInits.isEmpty)
-        template
-      else {
-        val newCtor = findStaticCtor(template) match {
-          // in case there already were static ctors - augment existing ones
-          // currently, however, static ctors aren't being generated anywhere else
-          case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
-            // modify existing static ctor
-            deriveDefDef(ctor) {
-              case block @ Block(stats, expr) =>
-                // need to add inits to existing block
-                treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
-              case term: TermTree =>
-                // need to create a new block with inits and the old term
-                treeCopy.Block(term, newStaticInits.toList, term)
-            }
-          case _ =>
-            // create new static ctor
-            val staticCtorSym  = currentClass.newStaticConstructor(template.pos)
-            val rhs            = Block(newStaticInits.toList, Literal(Constant(())))
-
-            localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
-        }
-        deriveTemplate(template)(newCtor :: _)
-      }
-    }
-
   } // CleanUpTransformer
 
 }
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 1a1137f..391bce5 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -9,12 +9,11 @@ package transform
 import scala.collection.{ mutable, immutable }
 import scala.collection.mutable.ListBuffer
 import symtab.Flags._
-import util.TreeSet
 
 /** This phase converts classes with parameters into Java-like classes with
  *  fields, which are assigned to from constructors.
  */
-abstract class Constructors extends Transform with ast.TreeDSL {
+abstract class Constructors extends Statics with Transform with ast.TreeDSL {
   import global._
   import definitions._
 
@@ -24,557 +23,709 @@ abstract class Constructors extends Transform with ast.TreeDSL {
   protected def newTransformer(unit: CompilationUnit): Transformer =
     new ConstructorTransformer(unit)
 
-  private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]
-  private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]
+  private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]()
+  private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]()
 
   class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
 
-    def transformClassTemplate(impl: Template): Template = {
-      val clazz = impl.symbol.owner  // the transformed class
-      val stats = impl.body          // the transformed template body
-      val localTyper = typer.atOwner(impl, clazz)
-
-      val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
-      val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
-
-      case class ConstrInfo(
-        constr: DefDef,               // The primary constructor
-        constrParams: List[Symbol],   // ... and its parameters
-        constrBody: Block             // ... and its body
+    /*
+     * Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class,
+     * for which a reference to the member precedes its definition.
+     */
+    private def checkUninitializedReads(cd: ClassDef) {
+      val stats = cd.impl.body
+      val clazz = cd.symbol
+
+      def checkableForInit(sym: Symbol) = (
+           (sym ne null)
+        && (sym.isVal || sym.isVar)
+        && !(sym hasFlag LAZY | DEFERRED | SYNTHETIC)
       )
-      // decompose primary constructor into the three entities above.
-      val constrInfo: ConstrInfo = {
-        stats find (_.symbol.isPrimaryConstructor) match {
-          case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
-        ConstrInfo(ddef, vparams map (_.symbol), rhs)
-          case x =>
-            // AnyVal constructor is OK
-            assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
-            return impl
-        }
-      }
-      import constrInfo._
-
-      // The parameter accessor fields which are members of the class
-      val paramAccessors = clazz.constrParamAccessors
-
-      // The constructor parameter corresponding to an accessor
-      def parameter(acc: Symbol): Symbol =
-        parameterNamed(nme.getterName(acc.originalName))
-
-      // The constructor parameter with given name. This means the parameter
-      // has given name, or starts with given name, and continues with a `$` afterwards.
-      def parameterNamed(name: Name): Symbol = {
-        def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
-
-        (constrParams filter matchesName) match {
-          case Nil    => abort(name + " not in " + constrParams)
-          case p :: _ => p
-        }
-      }
-
-      var usesSpecializedField: Boolean = false
-
-      // A transformer for expressions that go into the constructor
-      val intoConstructorTransformer = new Transformer {
-        def isParamRef(sym: Symbol) =
-          sym.isParamAccessor &&
-          sym.owner == clazz &&
-          !(clazz isSubClass DelayedInitClass) &&
-          !(sym.isGetter && sym.accessed.isVariable) &&
-          !sym.isSetter
-        private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
-        override def transform(tree: Tree): Tree = tree match {
-          case Apply(Select(This(_), _), List()) =>
-            // references to parameter accessor methods of own class become references to parameters
-            // outer accessors become references to $outer parameter
-            if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol))
-              gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
-            else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
-              gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
-            else
-              super.transform(tree)
-          case Select(This(_), _) if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol)) =>
-            // references to parameter accessor field of own class become references to parameters
-            gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
-          case Select(_, _) =>
-            if (specializeTypes.specializedTypeVars(tree.symbol).nonEmpty)
-              usesSpecializedField = true
-            super.transform(tree)
-          case _ =>
-            super.transform(tree)
-        }
-      }
-
-      // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
-      def intoConstructor(oldowner: Symbol, tree: Tree) =
-        intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
-
-      // Should tree be moved in front of super constructor call?
-      def canBeMoved(tree: Tree) = tree match {
-        case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
-        case _                     => false
-      }
-
-      // Create an assignment to class field `to` with rhs `from`
-      def mkAssign(to: Symbol, from: Tree): Tree =
-        localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
-
-      // Create code to copy parameter to parameter accessor field.
-      // If parameter is $outer, check that it is not null so that we NPE
-      // here instead of at some unknown future $outer access.
-      def copyParam(to: Symbol, from: Symbol): Tree = {
-        import CODE._
-        val result = mkAssign(to, Ident(from))
-
-        if (from.name != nme.OUTER ||
-            from.tpe.typeSymbol.isPrimitiveValueClass) result
-        else localTyper.typedPos(to.pos) {
-          IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
+      val uninitializedVals = mutable.Set[Symbol](
+        stats collect { case vd: ValDef if checkableForInit(vd.symbol) => vd.symbol.accessedOrSelf }: _*
+      )
+      if (uninitializedVals.size > 1)
+        log("Checking constructor for init order issues among: " + uninitializedVals.toList.map(_.name.toString.trim).distinct.sorted.mkString(", "))
+
+      for (stat <- stats) {
+        // Checking the qualifier symbol is necessary to prevent a selection on
+        // another instance of the same class from potentially appearing to be a forward
+        // reference on the member in the current class.
+        def check(tree: Tree) = {
+          for (t <- tree) t match {
+            case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz =>
+              unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}")
+            case _ =>
+          }
         }
-      }
-
-      // The list of definitions that go into class
-      val defBuf = new ListBuffer[Tree]
-
-      // The auxiliary constructors, separate from the defBuf since they should
-      // follow the primary constructor
-      val auxConstructorBuf = new ListBuffer[Tree]
-
-      // The list of statements that go into constructor after and including the superclass constructor call
-      val constrStatBuf = new ListBuffer[Tree]
-
-      // The list of early initializer statements that go into constructor before the superclass constructor call
-      val constrPrefixBuf = new ListBuffer[Tree]
-
-      // The early initialized field definitions of the class (these are the class members)
-      val presupers = treeInfo.preSuperFields(stats)
-
-      // generate code to copy pre-initialized fields
-      for (stat <- constrBody.stats) {
-        constrStatBuf += stat
         stat match {
-          case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
-            // stat is the constructor-local definition of the field value
-            val fields = presupers filter (
-              vdef => nme.localToGetter(vdef.name) == name)
-            assert(fields.length == 1)
-            val to = fields.head.symbol
-            if (!to.tpe.isInstanceOf[ConstantType])
-              constrStatBuf += mkAssign(to, Ident(stat.symbol))
-          case _ =>
+          case vd: ValDef      =>
+            // doing this first allows self-referential vals, which to be a conservative
+            // warner we will do because it's possible though difficult for it to be useful.
+            uninitializedVals -= vd.symbol.accessedOrSelf
+            if (!vd.symbol.isLazy)
+              check(vd.rhs)
+          case _: MemberDef    => // skip other member defs
+          case t               => check(t) // constructor body statement
         }
       }
 
-      // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
-      for (stat <- stats) stat match {
-        case DefDef(_,_,_,_,_,rhs) =>
-          // methods with constant result type get literals as their body
-          // all methods except the primary constructor go into template
-          stat.symbol.tpe match {
-            case MethodType(List(), tp @ ConstantType(c)) =>
-              defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
-            case _ =>
-              if (stat.symbol.isPrimaryConstructor) ()
-              else if (stat.symbol.isConstructor) auxConstructorBuf += stat
-              else defBuf += stat
+    } // end of checkUninitializedReads()
+
+    override def transform(tree: Tree): Tree = {
+      tree match {
+        case cd @ ClassDef(mods0, name0, tparams0, impl0) if !cd.symbol.isInterface && !isPrimitiveValueClass(cd.symbol) =>
+          if(cd.symbol eq AnyValClass) {
+            cd
           }
-        case ValDef(_, _, _, rhs) =>
-          // val defs with constant right-hand sides are eliminated.
-          // for all other val defs, an empty valdef goes into the template and
-          // the initializer goes as an assignment into the constructor
-          // if the val def is an early initialized or a parameter accessor, it goes
-          // before the superclass constructor call, otherwise it goes after.
-          // Lazy vals don't get the assignment in the constructor.
-          if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
-            if (rhs != EmptyTree && !stat.symbol.isLazy) {
-              val rhs1 = intoConstructor(stat.symbol, rhs);
-              (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
-                stat.symbol, rhs1)
-            }
-            defBuf += deriveValDef(stat)(_ => EmptyTree)
+          else {
+            checkUninitializedReads(cd)
+            val tplTransformer = new TemplateTransformer(unit, impl0)
+            treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed)
           }
-        case ClassDef(_, _, _, _) =>
-          // classes are treated recursively, and left in the template
-          defBuf += new ConstructorTransformer(unit).transform(stat)
         case _ =>
-          // all other statements go into the constructor
-          constrStatBuf += intoConstructor(impl.symbol, stat)
+          super.transform(tree)
       }
+    }
 
-      // ----------- avoid making fields for symbols that are not accessed --------------
+  } // ConstructorTransformer
 
-      // A sorted set of symbols that are known to be accessed outside the primary constructor.
-      val accessedSyms = new TreeSet[Symbol]((x, y) => x isLess y)
+  /*
+   * Summary
+   * -------
+   *
+   * The following gets elided unless they're actually needed:
+   *   (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols, as well as
+   *   (b) outer accessors of a final class which don't override anything.
+   *
+   *
+   * Gory details
+   * ------------
+   *
+   * The constructors phase elides
+   *
+   *  (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols
+   *      provided they're only accessed within the primary constructor;
+   *
+   * as well as
+   *
+   *  (b) outer accessors directly owned by the class of interest,
+   *      provided that class is final, they don't override anything, and moreover they aren't accessed anywhere.
+   *      An outer accessor is backed by a param-accessor field.
+   *      If an outer-accessor can be elided then its supporting field can be elided as well.
+   *
+   * Once the potential candidates for elision are known (as described above) it remains to visit
+   * those program locations where they might be accessed, and only those.
+   *
+   * What trees can be visited at this point?
+   * To recap, by the time the constructors phase runs, local definitions have been hoisted out of their original owner.
+   * Moreover, by the time elision is about to happen, the `intoConstructors` rewriting
+   * of template-level statements has taken place (the resulting trees can be found in `constrStatBuf`).
+   *
+   * That means:
+   *
+   *   - nested classes are to be found in `defBuf`
+   *
+   *   - value and method definitions are also in `defBuf` and none of them contains local methods or classes.
+   *
+   *   - auxiliary constructors are to be found in `auxConstructorBuf`
+   *
+   * Coming back to the question which trees may contain accesses:
+   *
+   *   (c) regarding parameter-accessor fields, all candidates in (a) are necessarily private-local,
+   *       and thus may only be accessed from value or method definitions owned by the current class
+   *       (ie there's no point drilling down into nested classes).
+   *
+   *   (d) regarding candidates in (b), they are accesible from all places listed in (c) and in addition
+   *       from nested classes (nested at any number of levels).
+   *
+   * In all cases, we're done with traversing as soon as all candidates have been ruled out.
+   *
+   * Finally, the whole affair of eliding is avoided for DelayedInit subclasses,
+   * given that for them usually nothing gets elided anyway.
+   * That's a consequence from re-locating the post-super-calls statements from their original location
+   * (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, as required by DelayedInit.
+   *
+   */
+  private trait OmittablesHelper { self: TemplateTransformer =>
+
+    /*
+     * Initially populated with all elision candidates.
+     * Trees are traversed, and those candidates are removed which are actually needed.
+     * After that, `omittables` doesn't shrink anymore: each symbol it contains can be unlinked from clazz.info.decls.
+     */
+    val omittables = mutable.Set.empty[Symbol]
+
+    def populateOmittables() {
+
+      omittables.clear()
+
+      if(isDelayedInitSubclass) {
+        return
+      }
 
-      // a list of outer accessor symbols and their bodies
-      var outerAccessors: List[(Symbol, Tree)] = List()
+      def isParamCandidateForElision(sym: Symbol) = (sym.isParamAccessor && sym.isPrivateLocal)
+      def isOuterCandidateForElision(sym: Symbol) = (sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol)
 
-      // Could symbol's definition be omitted, provided it is not accessed?
-      // This is the case if the symbol is defined in the current class, and
-      // ( the symbol is an object private parameter accessor field, or
-      //   the symbol is an outer accessor of a final class which does not override another outer accessor. )
-      def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
-        sym.isParamAccessor && sym.isPrivateLocal ||
-        sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
-        !(clazz isSubClass DelayedInitClass)
-      )
+      val paramCandidatesForElision: Set[ /*Field*/  Symbol] = (clazz.info.decls.toSet filter isParamCandidateForElision)
+      val outerCandidatesForElision: Set[ /*Method*/ Symbol] = (clazz.info.decls.toSet filter isOuterCandidateForElision)
+
+      omittables ++= paramCandidatesForElision
+      omittables ++= outerCandidatesForElision
 
-      // Is symbol known to be accessed outside of the primary constructor,
-      // or is it a symbol whose definition cannot be omitted anyway?
-      def mustbeKept(sym: Symbol) = !maybeOmittable(sym) || (accessedSyms contains sym)
+      val bodyOfOuterAccessor: Map[Symbol, DefDef] =
+        defBuf.collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd }.toMap
 
-      // A traverser to set accessedSyms and outerAccessors
-      val accessTraverser = new Traverser {
-        override def traverse(tree: Tree) = {
+      // no point traversing further once omittables is empty, all candidates ruled out already.
+      object detectUsages extends Traverser {
+        private def markUsage(sym: Symbol) {
+          omittables -= debuglogResult("omittables -= ")(sym)
+          // recursive call to mark as needed the field supporting the outer-accessor-method.
+          bodyOfOuterAccessor get sym foreach (this traverse _.rhs)
+        }
+        override def traverse(tree: Tree): Unit = if (omittables.nonEmpty) {
+          def sym = tree.symbol
           tree match {
-            case DefDef(_, _, _, _, _, body)
-            if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isEffectivelyFinal) =>
-              debuglog("outerAccessors += " + tree.symbol.fullName)
-              outerAccessors ::= ((tree.symbol, body))
-            case Select(_, _) =>
-              if (!mustbeKept(tree.symbol)) {
-                debuglog("accessedSyms += " + tree.symbol.fullName)
-                accessedSyms addEntry tree.symbol
-              }
-              super.traverse(tree)
-            case _ =>
-              super.traverse(tree)
+            // don't mark as "needed" the field supporting this outer-accessor, ie not just yet.
+            case _: DefDef if outerCandidatesForElision(sym) => ()
+            case _: Select if omittables(sym)                => markUsage(sym) ; super.traverse(tree)
+            case _                                           => super.traverse(tree)
           }
         }
+        def walk(xs: Seq[Tree]) = xs.iterator foreach traverse
+      }
+      if (omittables.nonEmpty) {
+        detectUsages walk defBuf
+        detectUsages walk auxConstructorBuf
       }
+    }
+    def mustBeKept(sym: Symbol) = !omittables(sym)
+
+  } // OmittablesHelper
+
+  /*
+   *  TemplateTransformer rewrites DelayedInit subclasses.
+   *  The list of statements that will end up in the primary constructor can be split into:
+   *
+   *    (a) up to and including the super-constructor call.
+   *        These statements can occur only in the (bytecode-level) primary constructor.
+   *
+   *    (b) remaining statements
+   *
+   *  The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
+   *
+   *  The rewriting to achieve "delayed initialization" involves:
+   *    (c) an additional, synthetic, public method encapsulating (b)
+   *    (d) an additional, synthetic closure whose argless apply() just invokes (c)
+   *    (e) after executing the statements in (a),
+   *        the primary constructor instantiates (d) and passes it as argument
+   *        to a `delayedInit()` invocation on the current instance.
+   *        In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
+   *        so that it can be overridden (for an example see `scala.App`)
+   *
+   *  The following helper methods prepare Trees as part of this rewriting:
+   *
+   *    (f) `delayedEndpointDef()` prepares (c).
+   *        A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
+   *        to become statements in method (c). The main task here is re-formulating accesses to params
+   *        of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
+   *        In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
+   *
+   *    (g) `delayedInitClosure()` prepares (d)
+   *
+   *    (h) `delayedInitCall()`    prepares the `delayedInit()` invocation referred to in (e)
+   *
+   *  Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
+   *
+   *  A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
+   *  all of the delayed initialization sequence, which in turn required:
+   *    - reformulating "accesses-on-this" into "accesses-on-outer", and
+   *    - adding public getters and setters.
+   *
+   *  @param stats the statements in (b) above
+   *
+   *  @return the DefDef for (c) above
+   *
+   * */
+  private trait DelayedInitHelper { self: TemplateTransformer =>
+
+    private def delayedEndpointDef(stats: List[Tree]): DefDef = {
+
+      val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
+      val methodSym  = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
+      methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
+
+      // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
+      val blk       = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
+      val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
+
+      delayedDD.asInstanceOf[DefDef]
+    }
+
+    private def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
+      val satelliteClass = localTyper.typed {
+        atPos(impl.pos) {
+          val closureClass   = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
+          val closureParents = List(AbstractFunctionClass(0).tpe)
+
+          closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
+
+          val outerField: TermSymbol = (
+            closureClass
+              newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
+              setInfoAndEnter clazz.tpe
+          )
+          val applyMethod: MethodSymbol = (
+            closureClass
+              newMethod(nme.apply, impl.pos, FINAL)
+              setInfoAndEnter MethodType(Nil, ObjectTpe)
+          )
+          val outerFieldDef     = ValDef(outerField)
+          val closureClassTyper = localTyper.atOwner(closureClass)
+          val applyMethodTyper  = closureClassTyper.atOwner(applyMethod)
+
+          def applyMethodStat =
+            applyMethodTyper.typed {
+              atPos(impl.pos) {
+                val receiver = Select(This(closureClass), outerField)
+                Apply(Select(receiver, delayedEndPointSym), Nil)
+              }
+            }
 
-      // first traverse all definitions except outeraccesors
-      // (outeraccessors are avoided in accessTraverser)
-      for (stat <- defBuf.iterator ++ auxConstructorBuf.iterator)
-        accessTraverser.traverse(stat)
-
-      // then traverse all bodies of outeraccessors which are accessed themselves
-      // note: this relies on the fact that an outer accessor never calls another
-      // outer accessor in the same class.
-      for ((accSym, accBody) <- outerAccessors)
-        if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
-
-      // Initialize all parameters fields that must be kept.
-      val paramInits = paramAccessors filter mustbeKept map { acc =>
-        // Check for conflicting symbol amongst parents: see bug #1960.
-        // It would be better to mangle the constructor parameter name since
-        // it can only be used internally, but I think we need more robust name
-        // mangling before we introduce more of it.
-        val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
-        if (conflict ne NoSymbol)
-          unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
-
-        copyParam(acc, parameter(acc))
+          val applyMethodDef = DefDef(
+            sym = applyMethod,
+            vparamss = ListOfNil,
+            rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
+
+          ClassDef(
+            sym = closureClass,
+            constrMods = Modifiers(0),
+            vparamss = List(List(outerFieldDef)),
+            body = applyMethodDef :: Nil,
+            superPos = impl.pos)
+        }
       }
 
-      /** Return a single list of statements, merging the generic class constructor with the
-       *  specialized stats. The original statements are retyped in the current class, and
-       *  assignments to generic fields that have a corresponding specialized assignment in
-       *  `specializedStats` are replaced by the specialized assignment.
-       */
-      def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
-        val specBuf = new ListBuffer[Tree]
-        specBuf ++= specializedStats
-
-        def specializedAssignFor(sym: Symbol): Option[Tree] =
-          specializedStats find {
-            case Assign(sel @ Select(This(_), _), rhs) =>
-              (    (sel.symbol hasFlag SPECIALIZED)
-                && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
-              )
-            case _ => false
-          }
+      satelliteClass.asInstanceOf[ClassDef]
+    }
 
-        /** Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
-         *  Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
-         *  variable, but after specialization this is a concrete primitive type, so it would
-         *  be an error to pass it to array_update(.., .., Object).
-         */
-        def rewriteArrayUpdate(tree: Tree): Tree = {
-          val adapter = new Transformer {
-            override def transform(t: Tree): Tree = t match {
-              case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
-                localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
-              case _ => super.transform(t)
-            }
-          }
-          adapter.transform(tree)
-        }
+    private def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+      gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+    }
 
-        log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
-        val res = for (s <- originalStats; stat = s.duplicate) yield {
-          log("merge: looking at " + stat)
-          val stat1 = stat match {
-            case Assign(sel @ Select(This(_), field), _) =>
-              specializedAssignFor(sel.symbol).getOrElse(stat)
-            case _ => stat
-          }
-          if (stat1 ne stat) {
-            log("replaced " + stat + " with " + stat1)
-            specBuf -= stat1
-          }
+    def rewriteDelayedInit() {
+      /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+       * but excluding it includes too much.  The constructor sequence being mimicked
+       * needs to be reproduced with total fidelity.
+       *
+       * See test case files/run/bug4680.scala, the output of which is wrong in many
+       * particulars.
+       */
+      val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
 
-          if (stat1 eq stat) {
-            assert(ctorParams(genericClazz).length == constrParams.length)
-            // this is just to make private fields public
-            (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrParams, null, true))(stat1)
-
-            val stat2 = rewriteArrayUpdate(stat1)
-            // statements coming from the original class need retyping in the current context
-            debuglog("retyping " + stat2)
-
-            val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
-            d.retyped(localTyper.context1.asInstanceOf[d.Context],
-                      stat2,
-                      genericClazz,
-                      clazz,
-                      Map.empty)
-          } else
-            stat1
+      if (needsDelayedInit) {
+        val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
+        defBuf += delayedHook
+        val hookCallerClass = {
+          // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
+          val drillDown = new ConstructorTransformer(unit)
+          drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
         }
-        if (specBuf.nonEmpty)
-          println("residual specialized constructor statements: " + specBuf)
-        res
+        defBuf += hookCallerClass
+        remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
       }
+    }
+
+  } // DelayedInitHelper
+
+  private trait GuardianOfCtorStmts { self: TemplateTransformer =>
+
+    /* Return a single list of statements, merging the generic class constructor with the
+     * specialized stats. The original statements are retyped in the current class, and
+     * assignments to generic fields that have a corresponding specialized assignment in
+     * `specializedStats` are replaced by the specialized assignment.
+     */
+    private def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
+      val specBuf = new ListBuffer[Tree]
+      specBuf ++= specializedStats
+
+      def specializedAssignFor(sym: Symbol): Option[Tree] =
+        specializedStats find {
+          case Assign(sel @ Select(This(_), _), _) =>
+            sel.symbol.isSpecialized && (nme.unspecializedName(sel.symbol.getterName) == sym.getterName)
+          case _ => false
+        }
 
-      /** Add an 'if' around the statements coming after the super constructor. This
-       *  guard is necessary if the code uses specialized fields. A specialized field is
-       *  initialized in the subclass constructor, but the accessors are (already) overridden
-       *  and pointing to the (empty) fields. To fix this, a class with specialized fields
-       *  will not run its constructor statements if the instance is specialized. The specialized
-       *  subclass includes a copy of those constructor statements, and runs them. To flag that a class
-       *  has specialized fields, and their initialization should be deferred to the subclass, method
-       *  'specInstance$' is added in phase specialize.
+      /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
+       * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
+       * variable, but after specialization this is a concrete primitive type, so it would
+       * be an error to pass it to array_update(.., .., Object).
        */
-      def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
-        // split the statements in presuper and postsuper
-    //    var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
-      //  if (postfix.nonEmpty) {
-        //  prefix = prefix :+ postfix.head
-          //postfix = postfix.tail
-        //}
-
-        if (usesSpecializedField && shouldGuard && stats.nonEmpty) {
-          // save them for duplication in the specialized subclass
-          guardedCtorStats(clazz) = stats
-          ctorParams(clazz) = constrParams
-
-          val tree =
-            If(
-              Apply(
-                CODE.NOT (
-                 Apply(gen.mkAttributedRef(specializedFlag), List())),
-                List()),
-              Block(stats, Literal(Constant())),
-              EmptyTree)
-
-          List(localTyper.typed(tree))
-        }
-        else if (clazz.hasFlag(SPECIALIZED)) {
-          // add initialization from its generic class constructor
-          val genericName  = nme.unspecializedName(clazz.name)
-          val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
-          assert(genericClazz != NoSymbol, clazz)
-
-          guardedCtorStats.get(genericClazz) match {
-            case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
-            case None => stats
+      def rewriteArrayUpdate(tree: Tree): Tree = {
+        val arrayUpdateMethod = currentRun.runDefinitions.arrayUpdateMethod
+        val adapter = new Transformer {
+          override def transform(t: Tree): Tree = t match {
+            case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+              localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
+            case _ => super.transform(t)
           }
-        } else stats
-      }
-/*
-      def isInitDef(stat: Tree) = stat match {
-        case dd: DefDef => dd.symbol == delayedInitMethod
-        case _ => false
+        }
+        adapter.transform(tree)
       }
-*/
 
-      /** Create a getter or a setter and enter into `clazz` scope
-       */
-      def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
-        val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
-        clazz.info.decls enter m
-      }
+      log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
+      val res = for (s <- originalStats; stat = s.duplicate) yield {
+        log("merge: looking at " + stat)
+        val stat1 = stat match {
+          case Assign(sel @ Select(This(_), field), _) =>
+            specializedAssignFor(sel.symbol).getOrElse(stat)
+          case _ => stat
+        }
+        if (stat1 ne stat) {
+          log("replaced " + stat + " with " + stat1)
+          specBuf -= stat1
+        }
 
-      def addGetter(sym: Symbol): Symbol = {
-        val getr = addAccessor(
-          sym, nme.getterName(sym.name), getterFlags(sym.flags))
-        getr setInfo MethodType(List(), sym.tpe)
-        defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
-        getr
+        if (stat1 eq stat) {
+          assert(ctorParams(genericClazz).length == constrInfo.constrParams.length)
+          // this is just to make private fields public
+          (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1)
+
+          val stat2 = rewriteArrayUpdate(stat1)
+          // statements coming from the original class need retyping in the current context
+          debuglog("retyping " + stat2)
+
+          val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
+          d.retyped(localTyper.context1.asInstanceOf[d.Context],
+                    stat2,
+                    genericClazz,
+                    clazz,
+                    Map.empty)
+        } else
+          stat1
       }
-
-      def addSetter(sym: Symbol): Symbol = {
-        sym setFlag MUTABLE
-        val setr = addAccessor(
-          sym, nme.getterToSetter(nme.getterName(sym.name)), setterFlags(sym.flags))
-        setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe)
-        defBuf += localTyper.typed {
-          //util.trace("adding setter def for "+setr) {
-          atPos(sym.pos) {
-            DefDef(setr, paramss =>
-              Assign(Select(This(clazz), sym), Ident(paramss.head.head)))
-          }//}
+      if (specBuf.nonEmpty)
+        println("residual specialized constructor statements: " + specBuf)
+      res
+    }
+
+    /* Add an 'if' around the statements coming after the super constructor. This
+     * guard is necessary if the code uses specialized fields. A specialized field is
+     * initialized in the subclass constructor, but the accessors are (already) overridden
+     * and pointing to the (empty) fields. To fix this, a class with specialized fields
+     * will not run its constructor statements if the instance is specialized. The specialized
+     * subclass includes a copy of those constructor statements, and runs them. To flag that a class
+     * has specialized fields, and their initialization should be deferred to the subclass, method
+     * 'specInstance$' is added in phase specialize.
+     */
+    def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
+      // // split the statements in presuper and postsuper
+      // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+      // if (postfix.nonEmpty) {
+      //   prefix = prefix :+ postfix.head
+      //   postfix = postfix.tail
+      // }
+
+      if (shouldGuard && usesSpecializedField && stats.nonEmpty) {
+        // save them for duplication in the specialized subclass
+        guardedCtorStats(clazz) = stats
+        ctorParams(clazz) = constrInfo.constrParams
+
+        val tree =
+          If(
+            Apply(
+              CODE.NOT (
+               Apply(gen.mkAttributedRef(specializedFlag), List())),
+              List()),
+            Block(stats, Literal(Constant(()))),
+            EmptyTree)
+
+        List(localTyper.typed(tree))
+      }
+      else if (clazz.hasFlag(SPECIALIZED)) {
+        // add initialization from its generic class constructor
+        val genericName  = nme.unspecializedName(clazz.name)
+        val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
+        assert(genericClazz != NoSymbol, clazz)
+
+        guardedCtorStats.get(genericClazz) match {
+          case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
+          case None => stats
         }
-        setr
+      } else stats
+    }
+
+  } // GuardianOfCtorStmts
+
+  private class TemplateTransformer(val unit: CompilationUnit, val impl: Template)
+    extends StaticsTransformer
+    with    DelayedInitHelper
+    with    OmittablesHelper
+    with    GuardianOfCtorStmts {
+
+    val clazz = impl.symbol.owner  // the transformed class
+    val stats = impl.body          // the transformed template body
+    val localTyper = typer.atOwner(impl, clazz)
+
+    val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+    val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
+
+    val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+
+    case class ConstrInfo(
+      constr: DefDef,               // The primary constructor
+      constrParams: List[Symbol],   // ... and its parameters
+      constrBody: Block             // ... and its body
+    )
+    // decompose primary constructor into the three entities above.
+    val constrInfo: ConstrInfo = {
+      val ddef = (stats find (_.symbol.isPrimaryConstructor))
+      ddef match {
+        case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
+          ConstrInfo(ddef, vparams map (_.symbol), rhs)
+        case x =>
+          abort("no constructor in template: impl = " + impl)
       }
+    }
+    import constrInfo._
 
-      def ensureAccessor(sym: Symbol)(acc: => Symbol) =
-        if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class
-          var getr = acc
-          getr makeNotPrivate clazz
-          getr
-        } else {
-          if (sym.owner == clazz) sym makeNotPrivate clazz
-          NoSymbol
-        }
+    // The parameter accessor fields which are members of the class
+    val paramAccessors = clazz.constrParamAccessors
 
-      def ensureGetter(sym: Symbol): Symbol = ensureAccessor(sym) {
-        val getr = sym.getter(clazz)
-        if (getr != NoSymbol) getr else addGetter(sym)
-      }
+    // The constructor parameter corresponding to an accessor
+    def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
 
-      def ensureSetter(sym: Symbol): Symbol = ensureAccessor(sym) {
-        var setr = sym.setter(clazz, hasExpandedName = false)
-        if (setr == NoSymbol) setr = sym.setter(clazz, hasExpandedName = true)
-        if (setr == NoSymbol) setr = addSetter(sym)
-        setr
+    // The constructor parameter with given name. This means the parameter
+    // has given name, or starts with given name, and continues with a `$` afterwards.
+    def parameterNamed(name: Name): Symbol = {
+      def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
+
+      (constrParams filter matchesName) match {
+        case Nil    => abort(name + " not in " + constrParams)
+        case p :: _ => p
       }
+    }
+
+    /*
+     * `usesSpecializedField` makes a difference in deciding whether constructor-statements
+     * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of
+     * one or more specialized sub-classes.
+     *
+     * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
+     * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with.
+     * That way, trips to a map in `specializeTypes` are saved.
+     */
+    var usesSpecializedField: Boolean = false
+
+    // A transformer for expressions that go into the constructor
+    private class IntoCtorTransformer extends Transformer {
+
+      private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz)
+
+      // Terminology: a stationary location is never written after being read.
+      private def isStationaryParamRef(sym: Symbol) = (
+        isParamRef(sym) &&
+        !(sym.isGetter && sym.accessed.isVariable) &&
+        !sym.isSetter
+      )
 
-      def delayedInitClosure(stats: List[Tree]) =
-        localTyper.typed {
-          atPos(impl.pos) {
-            val closureClass   = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
-            val closureParents = List(AbstractFunctionClass(0).tpe)
-
-            closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
-
-            val outerField = (
-              closureClass
-                newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
-                setInfoAndEnter clazz.tpe
-            )
-            val applyMethod = (
-              closureClass
-                newMethod(nme.apply, impl.pos, FINAL)
-                setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
-            )
-            val outerFieldDef     = ValDef(outerField)
-            val closureClassTyper = localTyper.atOwner(closureClass)
-            val applyMethodTyper  = closureClassTyper.atOwner(applyMethod)
-
-            val constrStatTransformer = new Transformer {
-              override def transform(tree: Tree): Tree = tree match {
-                case This(_) if tree.symbol == clazz =>
-                  applyMethodTyper.typed {
-                    atPos(tree.pos) {
-                      Select(This(closureClass), outerField)
-                    }
-                  }
-                case _ =>
-                  super.transform {
-                    tree match {
-                      case Select(qual, _) =>
-                        val getter = ensureGetter(tree.symbol)
-                        if (getter != NoSymbol)
-                          applyMethodTyper.typed {
-                            atPos(tree.pos) {
-                              Apply(Select(qual, getter), List())
-                            }
-                          }
-                        else tree
-                      case Assign(lhs @ Select(qual, _), rhs) =>
-                        val setter = ensureSetter(lhs.symbol)
-                        if (setter != NoSymbol)
-                          applyMethodTyper.typed {
-                            atPos(tree.pos) {
-                              Apply(Select(qual, setter), List(rhs))
-                            }
-                          }
-                        else tree
-                      case _ =>
-                        tree.changeOwner(impl.symbol -> applyMethod)
-                    }
-                  }
-              }
-            }
+      private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
 
-            def applyMethodStats = constrStatTransformer.transformTrees(stats)
+      /*
+       * whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
+       *   (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
+       *   (b) isn't subject to specialization. We might be processing statements for:
+       *         (b.1) the constructur in the generic   (super-)class; or
+       *         (b.2) the constructor in the specialized (sub-)class.
+       *   (c) isn't part of a DelayedInit subclass.
+       */
+      private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym))
+
+      override def transform(tree: Tree): Tree = tree match {
+
+        case Apply(Select(This(_), _), List()) =>
+          // references to parameter accessor methods of own class become references to parameters
+          // outer accessors become references to $outer parameter
+          if (canBeSupplanted(tree.symbol))
+            gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
+          else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
+            gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
+          else
+            super.transform(tree)
 
-            val applyMethodDef = DefDef(
-              sym = applyMethod,
-              vparamss = ListOfNil,
-              rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
+        case Select(This(_), _) if canBeSupplanted(tree.symbol) =>
+          // references to parameter accessor field of own class become references to parameters
+          gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
 
-            ClassDef(
-              sym = closureClass,
-              constrMods = Modifiers(0),
-              vparamss = List(List(outerFieldDef)),
-              argss = ListOfNil,
-              body = List(applyMethodDef),
-              superPos = impl.pos)
+        case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField`
+          if (possiblySpecialized(tree.symbol)) {
+            usesSpecializedField = true
           }
-        }
+          super.transform(tree)
 
-      def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
-        gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+        case _ =>
+          super.transform(tree)
       }
 
-      /** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
-      def splitAtSuper(stats: List[Tree]) = {
-        def isConstr(tree: Tree): Boolean = tree match {
-          case Block(_, expr) => isConstr(expr)  // SI-6481 account for named argument blocks
-          case _              => (tree.symbol ne null) && tree.symbol.isConstructor
+    }
+
+    private val intoConstructorTransformer = new IntoCtorTransformer
+
+    // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
+    def intoConstructor(oldowner: Symbol, tree: Tree) =
+      intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
+
+    // Should tree be moved in front of super constructor call?
+    def canBeMoved(tree: Tree) = tree match {
+      case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
+      case _                     => false
+    }
+
+    // Create an assignment to class field `to` with rhs `from`
+    def mkAssign(to: Symbol, from: Tree): Tree =
+      localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
+
+    // Create code to copy parameter to parameter accessor field.
+    // If parameter is $outer, check that it is not null so that we NPE
+    // here instead of at some unknown future $outer access.
+    def copyParam(to: Symbol, from: Symbol): Tree = {
+      import CODE._
+      val result = mkAssign(to, Ident(from))
+
+      if (from.name != nme.OUTER ||
+          from.tpe.typeSymbol.isPrimitiveValueClass) result
+      else localTyper.typedPos(to.pos) {
+        // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
+        IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
+      }
+    }
+
+    // The list of definitions that go into class
+    val defBuf = new ListBuffer[Tree]
+
+    // The auxiliary constructors, separate from the defBuf since they should
+    // follow the primary constructor
+    val auxConstructorBuf = new ListBuffer[Tree]
+
+    // The list of statements that go into the constructor after and including the superclass constructor call
+    val constrStatBuf = new ListBuffer[Tree]
+
+    // The list of early initializer statements that go into constructor before the superclass constructor call
+    val constrPrefixBuf = new ListBuffer[Tree]
+
+    // The early initialized field definitions of the class (these are the class members)
+    val presupers = treeInfo.preSuperFields(stats)
+
+    // The list of statements that go into the class initializer
+    val classInitStatBuf = new ListBuffer[Tree]
+
+    // generate code to copy pre-initialized fields
+    for (stat <- constrBody.stats) {
+      constrStatBuf += stat
+      stat match {
+        case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
+          // stat is the constructor-local definition of the field value
+          val fields = presupers filter (_.getterName == name)
+          assert(fields.length == 1)
+          val to = fields.head.symbol
+          if (!to.tpe.isInstanceOf[ConstantType])
+            constrStatBuf += mkAssign(to, Ident(stat.symbol))
+        case _ =>
+      }
+    }
+
+    // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
+    for (stat <- stats) stat match {
+      case DefDef(_,_,_,_,_,rhs) =>
+        // methods with constant result type get literals as their body
+        // all methods except the primary constructor go into template
+        stat.symbol.tpe match {
+          case MethodType(List(), tp @ ConstantType(c)) =>
+            defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
+          case _ =>
+            if (stat.symbol.isPrimaryConstructor) ()
+            else if (stat.symbol.isConstructor) auxConstructorBuf += stat
+            else defBuf += stat
+        }
+      case ValDef(mods, _, _, rhs) if !mods.hasStaticFlag =>
+        // val defs with constant right-hand sides are eliminated.
+        // for all other val defs, an empty valdef goes into the template and
+        // the initializer goes as an assignment into the constructor
+        // if the val def is an early initialized or a parameter accessor, it goes
+        // before the superclass constructor call, otherwise it goes after.
+        // Lazy vals don't get the assignment in the constructor.
+        if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
+          if (rhs != EmptyTree && !stat.symbol.isLazy) {
+            val rhs1 = intoConstructor(stat.symbol, rhs)
+            (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
+              stat.symbol, rhs1)
+          }
+          defBuf += deriveValDef(stat)(_ => EmptyTree)
         }
-        val (pre, rest0) = stats span (!isConstr(_))
-        val (supercalls, rest) = rest0 span (isConstr(_))
-        (pre ::: supercalls, rest)
+      case ValDef(_, _, _, rhs) =>
+        // Add static initializer statements to classInitStatBuf and remove the rhs from the val def.
+        classInitStatBuf += mkAssign(stat.symbol, rhs)
+        defBuf += deriveValDef(stat)(_ => EmptyTree)
+
+      case ClassDef(_, _, _, _) =>
+        // classes are treated recursively, and left in the template
+        defBuf += new ConstructorTransformer(unit).transform(stat)
+      case _ =>
+        // all other statements go into the constructor
+        constrStatBuf += intoConstructor(impl.symbol, stat)
+    }
+
+    populateOmittables()
+
+    // Initialize all parameters fields that must be kept.
+    val paramInits = paramAccessors filter mustBeKept map { acc =>
+      // Check for conflicting symbol amongst parents: see bug #1960.
+      // It would be better to mangle the constructor parameter name since
+      // it can only be used internally, but I think we need more robust name
+      // mangling before we introduce more of it.
+      val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+      if (conflict ne NoSymbol)
+        unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+
+      copyParam(acc, parameter(acc))
+    }
+
+    /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
+    def splitAtSuper(stats: List[Tree]) = {
+      def isConstr(tree: Tree): Boolean = tree match {
+        case Block(_, expr) => isConstr(expr)  // SI-6481 account for named argument blocks
+        case _              => (tree.symbol ne null) && tree.symbol.isConstructor
       }
+      val (pre, rest0) = stats span (!isConstr(_))
+      val (supercalls, rest) = rest0 span (isConstr(_))
+      (pre ::: supercalls, rest)
+    }
 
-      var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList)
+    val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
+    var remainingConstrStats = remainingConstrStats0
 
-      /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
-       *  but excluding it includes too much.  The constructor sequence being mimicked
-       *  needs to be reproduced with total fidelity.
-       *
-       *  See test case files/run/bug4680.scala, the output of which is wrong in many
-       *  particulars.
-       */
-      val needsDelayedInit =
-        (clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty
+    rewriteDelayedInit()
 
-      if (needsDelayedInit) {
-        val dicl = new ConstructorTransformer(unit) transform delayedInitClosure(remainingConstrStats)
-        defBuf += dicl
-        remainingConstrStats = List(delayedInitCall(dicl))
-      }
+    // Assemble final constructor
+    defBuf += deriveDefDef(constr)(_ =>
+      treeCopy.Block(
+        constrBody,
+        paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
+          guardSpecializedInitializer(remainingConstrStats),
+        constrBody.expr))
 
-      // Assemble final constructor
-      defBuf += deriveDefDef(constr)(_ =>
-        treeCopy.Block(
-          constrBody,
-          paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
-            guardSpecializedInitializer(remainingConstrStats),
-          constrBody.expr))
+    // Followed by any auxiliary constructors
+    defBuf ++= auxConstructorBuf
 
-      // Followed by any auxiliary constructors
-      defBuf ++= auxConstructorBuf
+    // Unlink all fields that can be dropped from class scope
+    for (sym <- clazz.info.decls ; if !mustBeKept(sym))
+      clazz.info.decls unlink sym
 
-      // Unlink all fields that can be dropped from class scope
-      for (sym <- clazz.info.decls ; if !mustbeKept(sym))
-        clazz.info.decls unlink sym
+    // Eliminate all field definitions that can be dropped from template
+    val templateWithoutOmittables: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustBeKept(stat.symbol)))
+    //  Add the static initializers
+    val transformed: Template = addStaticInits(templateWithoutOmittables, classInitStatBuf, localTyper)
 
-      // Eliminate all field definitions that can be dropped from template
-      deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
-    } // transformClassTemplate
+  } // TemplateTransformer
 
-    override def transform(tree: Tree): Tree =
-      tree match {
-        case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
-          deriveClassDef(tree)(transformClassTemplate)
-        case _ =>
-          super.transform(tree)
-      }
-  } // ConstructorTransformer
 }
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
new file mode 100644
index 0000000..1468680
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -0,0 +1,464 @@
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+import scala.collection._
+import scala.language.postfixOps
+import scala.reflect.internal.Symbols
+import scala.collection.mutable.LinkedHashMap
+
+/**
+ * This transformer is responisble for turning lambdas into anonymous classes.
+ * The main assumption it makes is that a lambda {args => body} has been turned into
+ * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
+ * Currently Uncurry is responsible for that transformation.
+ *
+ * From a lambda, Delambdafy will create
+ * 1) a static forwarder at the top level of the class that contained the lambda
+ * 2) a new top level class that
+      a) has fields and a constructor taking the captured environment (including possbily the "this"
+ *       reference)
+ *    b) an apply method that calls the static forwarder
+ *    c) if needed a bridge method for the apply method
+ *  3) an instantiation of the newly created class which replaces the lambda
+ *
+ *  TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
+ * specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
+ * appropriately named applysp method
+ */
+abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
+  import global._
+  import definitions._
+  import CODE._
+
+  val analyzer: global.analyzer.type = global.analyzer
+
+  /** the following two members override abstract members in Transform */
+  val phaseName: String = "delambdafy"
+
+  protected def newTransformer(unit: CompilationUnit): Transformer =
+    new DelambdafyTransformer(unit)
+
+  class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with TypeAdapter {
+    private val lambdaClassDefs = new mutable.LinkedHashMap[Symbol, List[Tree]] withDefaultValue Nil
+
+
+    val typer = localTyper
+
+    // we need to know which methods refer to the 'this' reference so that we can determine
+    // which lambdas need access to it
+    val thisReferringMethods: Set[Symbol] = {
+      val thisReferringMethodsTraverser = new ThisReferringMethodsTraverser()
+      thisReferringMethodsTraverser traverse unit.body
+      val methodReferringMap = thisReferringMethodsTraverser.liftedMethodReferences
+      val referrers = thisReferringMethodsTraverser.thisReferringMethods
+      // recursively find methods that refer to 'this' directly or indirectly via references to other methods
+      // for each method found add it to the referrers set
+      def refersToThis(symbol: Symbol): Boolean = {
+        if (referrers contains symbol) true
+        else if (methodReferringMap(symbol) exists refersToThis) {
+          // add it early to memoize
+          debuglog(s"$symbol indirectly refers to 'this'")
+          referrers += symbol
+          true
+        } else false
+      }
+      methodReferringMap.keys foreach refersToThis
+      referrers
+    }
+
+    val accessorMethods = mutable.ArrayBuffer[Tree]()
+
+    // the result of the transformFunction method. A class definition for the lambda, an expression
+    // insantiating the lambda class, and an accessor method for the lambda class to be able to
+    // call the implementation
+    case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+
+    // here's the main entry point of the transform
+    override def transform(tree: Tree): Tree = tree match {
+      // the main thing we care about is lambdas
+      case fun @ Function(_, _) =>
+        // a lambda beccomes a new class, an instantiation expression, and an
+        // accessor method
+        val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
+        // we'll add accessor methods to the current template later
+        accessorMethods += accessorMethod
+        val pkg = lambdaClassDef.symbol.owner
+
+        // we'll add the lambda class to the package later
+        lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+        super.transform(newExpr)
+      // when we encounter a template (basically the thing that holds body of a class/trait)
+      // we need to updated it to include newly created accesor methods after transforming it
+      case Template(_, _, _) =>
+        try {
+          // during this call accessorMethods will be populated from the Function case
+          val Template(parents, self, body) = super.transform(tree)
+          Template(parents, self, body ++ accessorMethods)
+        } finally accessorMethods.clear()
+      case _ => super.transform(tree)
+    }
+
+    // this entry point is aimed at the statements in the compilation unit.
+    // after working on the entire compilation until we'll have a set of
+    // new class definitions to add to the top level
+    override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
+      super.transformStats(stats, exprOwner) ++ lambdaClassDefs(exprOwner)
+    }
+
+    private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
+
+    // turns a lambda into a new class def, a New expression instantiating that class, and an
+    // accessor method fo the body of the lambda
+    private def transformFunction(originalFunction: Function): TransformedFunction = {
+      val functionTpe = originalFunction.tpe
+      val targs = functionTpe.typeArgs
+      val formals :+ restpe = targs
+      val oldClass = originalFunction.symbol.enclClass
+
+      // find which variables are free in the lambda because those are captures that need to be
+      // passed into the constructor of the anonymous function class
+      val captures = FreeVarTraverser.freeVarsOf(originalFunction)
+
+      /**
+       * Creates the apply method for the anonymous subclass of FunctionN
+       */
+      def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
+        val target = targetMethod(fun)
+        if (!thisProxy.exists) {
+          target setFlag STATIC
+        }
+        val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList
+
+        val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString()), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+
+        val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
+
+        params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+        params foreach (_.symbol.owner = methSym)
+
+        val methodType = MethodType(paramSyms, restpe)
+        methSym setInfo methodType
+
+        oldClass.info.decls enter methSym
+
+        val body = localTyper.typed {
+          val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
+          val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
+          Apply(newTarget, newParams)
+        } setPos fun.pos
+        val methDef = DefDef(methSym, List(params), body)
+
+        // Have to repack the type to avoid mismatches when existentials
+        // appear in the result - see SI-4869.
+        // TODO probably don't need packedType
+        methDef.tpt setType localTyper.packedType(body, methSym)
+        methDef
+      }
+
+      /**
+       * Creates the apply method for the anonymous subclass of FunctionN
+       */
+      def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+        val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
+        val params = fun.vparams map (_.duplicate)
+
+        val paramSyms = map2(formals, params) {
+          (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+        }
+        params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+        params foreach (_.symbol.owner = methSym)
+
+        val methodType = MethodType(paramSyms, restpe)
+        methSym setInfo methodType
+
+        newClass.info.decls enter methSym
+
+        val Apply(_, oldParams) = fun.body
+
+        val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+        body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
+        body changeOwner (fun.symbol -> methSym)
+
+        val methDef = DefDef(methSym, List(params), body)
+
+        // Have to repack the type to avoid mismatches when existentials
+        // appear in the result - see SI-4869.
+        // TODO probably don't need packedType
+        methDef.tpt setType localTyper.packedType(body, methSym)
+        methDef
+      }
+
+      /**
+       * Creates the constructor on the newly created class. It will handle
+       * initialization of members that represent the captured environment
+       */
+      def createConstructor(newClass: Symbol, members: List[ValDef]): DefDef = {
+        val constrSym = newClass.newConstructor(originalFunction.pos, SYNTHETIC)
+
+        val (paramSymbols, params, assigns) = (members map {member =>
+          val paramSymbol = newClass.newVariable(member.symbol.name.toTermName, newClass.pos, 0)
+          paramSymbol.setInfo(member.symbol.info)
+          val paramVal = ValDef(paramSymbol)
+          val paramIdent = Ident(paramSymbol)
+          val assign = Assign(Select(gen.mkAttributedThis(newClass), member.symbol), paramIdent)
+
+          (paramSymbol, paramVal, assign)
+        }).unzip3
+
+        val constrType = MethodType(paramSymbols, newClass.thisType)
+        constrSym setInfoAndEnter constrType
+
+        val body =
+          Block(
+            List(
+              Apply(Select(Super(gen.mkAttributedThis(newClass), tpnme.EMPTY) setPos newClass.pos, nme.CONSTRUCTOR) setPos newClass.pos, Nil) setPos newClass.pos
+            ) ++ assigns,
+            Literal(Constant(())): Tree
+          ) setPos newClass.pos
+
+        (localTyper typed DefDef(constrSym, List(params), body) setPos newClass.pos).asInstanceOf[DefDef]
+      }
+
+      val pkg = oldClass.owner
+
+      // Parent for anonymous class def
+      val abstractFunctionErasedType = AbstractFunctionClass(formals.length).tpe
+
+      // anonymous subclass of FunctionN with an apply method
+      def makeAnonymousClass = {
+        val parents = addSerializable(abstractFunctionErasedType)
+        val funOwner = originalFunction.symbol.owner
+
+        // TODO harmonize the naming of delamdafy anon-fun classes with those spun up by Uncurry
+        //      - make `anonClass.isAnonymousClass` true.
+        //      - use `newAnonymousClassSymbol` or push the required variations into a similar factory method
+        //      - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash`
+        val suffix = "$lambda$" + (
+          if (funOwner.isPrimaryConstructor) ""
+          else "$" + funOwner.name
+        )
+        val name = unit.freshTypeName(s"${oldClass.name.decode}$suffix")
+
+        val anonClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
+        anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+        val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
+        captures foreach {capture =>
+          val sym = anonClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC)
+          sym setInfo capture.info
+          captureProxies2 += ((capture, sym))
+        }
+
+      // the Optional proxy that will hold a reference to the 'this'
+      // object used by the lambda, if any. NoSymbol if there is no this proxy
+      val thisProxy = {
+        val target = targetMethod(originalFunction)
+        if (thisReferringMethods contains target) {
+          val sym = anonClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
+          sym.info = oldClass.tpe
+          sym
+        } else NoSymbol
+      }
+
+      val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, anonClass, originalFunction.symbol.pos, thisProxy)
+
+      val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
+
+      val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
+
+      val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
+        anonClass.info.decls enter member
+        ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
+      }
+
+      // constructor
+      val constr = createConstructor(anonClass, members)
+
+      // apply method with same arguments and return type as original lambda.
+      val applyMethodDef = createApplyMethod(anonClass, decapturedFunction, accessorMethod, thisProxy)
+
+      val bridgeMethod = createBridgeMethod(anonClass, originalFunction, applyMethodDef)
+
+      def fulldef(sym: Symbol) =
+        if (sym == NoSymbol) sym.toString
+        else s"$sym: ${sym.tpe} in ${sym.owner}"
+
+        bridgeMethod foreach (bm =>
+          // TODO SI-6260 maybe just create the apply method with the signature (Object => Object) in all cases
+          //      rather than the method+bridge pair.
+          if (bm.symbol.tpe =:= applyMethodDef.symbol.tpe)
+            erasure.resolveAnonymousBridgeClash(applyMethodDef.symbol, bm.symbol)
+        )
+
+        val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
+
+        // TODO if member fields are private this complains that they're not accessible
+        (localTyper.typedPos(decapturedFunction.pos)(ClassDef(anonClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+      }
+
+      val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
+
+      pkg.info.decls enter anonymousClassDef.symbol
+
+      val thisArg = optionSymbol(thisProxy) map (_ => gen.mkAttributedThis(oldClass) setPos originalFunction.pos)
+      val captureArgs = captures map (capture => Ident(capture) setPos originalFunction.pos)
+
+      val newStat =
+          Typed(New(anonymousClassDef.symbol, (thisArg.toList ++ captureArgs): _*), TypeTree(abstractFunctionErasedType))
+
+      val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
+
+      TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+    }
+
+    /**
+     * Creates a bridge method if needed. The bridge method forwards from apply(x1: Object, x2: Object...xn: Object): Object to
+     * apply(x1: T1, x2: T2...xn: Tn): T0 using type adaptation on each input and output. The only time a bridge isn't needed
+     * is when the original lambda is already erased to type Object, Object, Object... => Object
+     */
+    def createBridgeMethod(newClass:Symbol, originalFunction: Function, applyMethod: DefDef): Option[DefDef] = {
+      val bridgeMethSym = newClass.newMethod(nme.apply, applyMethod.pos, FINAL | SYNTHETIC | BRIDGE)
+      val originalParams = applyMethod.vparamss(0)
+      val bridgeParams = originalParams map { originalParam =>
+        val bridgeSym = bridgeMethSym.newSyntheticValueParam(ObjectTpe, originalParam.name)
+        ValDef(bridgeSym)
+      }
+
+      val bridgeSyms = bridgeParams map (_.symbol)
+
+      val methodType = MethodType(bridgeSyms, ObjectTpe)
+      bridgeMethSym setInfo methodType
+
+      def adapt(tree: Tree, expectedTpe: Type): (Boolean, Tree) = {
+        if (tree.tpe =:= expectedTpe) (false, tree)
+        else (true, adaptToType(tree, expectedTpe))
+      }
+
+      def adaptAndPostErase(tree: Tree, pt: Type): (Boolean, Tree) = {
+        val (needsAdapt, adaptedTree) = adapt(tree, pt)
+        val trans = postErasure.newTransformer(unit)
+        val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 elimnates ErasedValueTypes
+        (needsAdapt, postErasedTree)
+      }
+
+      enteringPhase(currentRun.posterasurePhase) {
+        // e.g, in:
+        //   class C(val a: Int) extends AnyVal; (x: Int) => new C(x)
+        //
+        // This type is:
+        //    (x: Int)ErasedValueType(class C, Int)
+        val liftedBodyDefTpe: MethodType = {
+          val liftedBodySymbol = {
+            val Apply(method, _) = originalFunction.body
+            method.symbol
+          }
+          liftedBodySymbol.info.asInstanceOf[MethodType]
+        }
+        val (paramNeedsAdaptation, adaptedParams) = (bridgeSyms zip liftedBodyDefTpe.params map {case (bridgeSym, param) => adapt(Ident(bridgeSym) setType bridgeSym.tpe, param.tpe)}).unzip
+        // SI-8017 Before, this code used `applyMethod.symbol.info.resultType`.
+        //         But that symbol doesn't have a type history that goes back before `delambdafy`,
+        //         so we just see a plain `Int`, rather than `ErasedValueType(C, Int)`.
+        //         This triggered primitive boxing, rather than value class boxing.
+        val resTp = liftedBodyDefTpe.finalResultType
+        val body = Apply(gen.mkAttributedSelect(gen.mkAttributedThis(newClass), applyMethod.symbol), adaptedParams) setType resTp
+        val (needsReturnAdaptation, adaptedBody) = adaptAndPostErase(body, ObjectTpe)
+
+        val needsBridge = (paramNeedsAdaptation contains true) || needsReturnAdaptation
+        if (needsBridge) {
+          val methDef = DefDef(bridgeMethSym, List(bridgeParams), adaptedBody)
+          newClass.info.decls enter bridgeMethSym
+          Some((localTyper typed methDef).asInstanceOf[DefDef])
+        } else None
+      }
+    }
+  } // DelambdafyTransformer
+
+  // A traverser that finds symbols used but not defined in the given Tree
+  // TODO freeVarTraverser in LambdaLift does a very similar task. With some
+  // analysis this could probably be unified with it
+  class FreeVarTraverser extends Traverser {
+    val freeVars = mutable.LinkedHashSet[Symbol]()
+    val declared = mutable.LinkedHashSet[Symbol]()
+
+    override def traverse(tree: Tree) = {
+      tree match {
+        case Function(args, _) =>
+          args foreach {arg => declared += arg.symbol}
+        case ValDef(_, _, _, _) =>
+          declared += tree.symbol
+        case _: Bind =>
+          declared += tree.symbol
+        case Ident(_) =>
+          val sym = tree.symbol
+          if ((sym != NoSymbol) && sym.isLocalToBlock && sym.isTerm && !sym.isMethod && !declared.contains(sym)) freeVars += sym
+        case _ =>
+      }
+      super.traverse(tree)
+    }
+  }
+
+  object FreeVarTraverser {
+    def freeVarsOf(function: Function) = {
+      val freeVarsTraverser = new FreeVarTraverser
+      freeVarsTraverser.traverse(function)
+      freeVarsTraverser.freeVars
+    }
+  }
+
+  // A transformer that converts specified captured symbols into other symbols
+  // TODO this transform could look more like ThisSubstituter and TreeSymSubstituter. It's not clear that it needs that level of sophistication since the types
+  // at this point are always very simple flattened/erased types, but it would probably be more robust if it tried to take more complicated types into account
+  class DeCapturifyTransformer(captureProxies: Map[Symbol, TermSymbol], unit: CompilationUnit, oldClass: Symbol, newClass:Symbol, pos: Position, thisProxy: Symbol) extends TypingTransformer(unit) {
+    override def transform(tree: Tree) = tree match {
+      case tree at This(encl) if tree.symbol == oldClass && thisProxy.exists =>
+        gen mkAttributedSelect (gen mkAttributedThis newClass, thisProxy)
+      case Ident(name) if (captureProxies contains tree.symbol) =>
+        gen mkAttributedSelect (gen mkAttributedThis newClass, captureProxies(tree.symbol))
+      case _ => super.transform(tree)
+    }
+  }
+
+  /**
+   * Get the symbol of the target lifted lambad body method from a function. I.e. if
+   * the function is {args => anonfun(args)} then this method returns anonfun's symbol
+   */
+  private def targetMethod(fun: Function): Symbol = fun match {
+    case Function(_, Apply(target, _)) =>
+      target.symbol
+    case _ =>
+      // any other shape of Function is unexpected at this point
+      abort(s"could not understand function with tree $fun")
+  }
+
+  // finds all methods that reference 'this'
+  class ThisReferringMethodsTraverser() extends Traverser {
+    private var currentMethod: Symbol = NoSymbol
+    // the set of methods that refer to this
+    val thisReferringMethods = mutable.Set[Symbol]()
+    // the set of lifted lambda body methods that each method refers to
+    val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set())
+    override def traverse(tree: Tree) = tree match {
+      case DefDef(_, _, _, _, _, _) =>
+        // we don't expect defs within defs. At this phase trees should be very flat
+        if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.")
+        currentMethod = tree.symbol
+        super.traverse(tree)
+        currentMethod = NoSymbol
+      case fun at Function(_, _) =>
+        // we don't drill into functions because at the beginning of this phase they will always refer to 'this'.
+        // They'll be of the form {(args...) => this.anonfun(args...)}
+        // but we do need to make note of the lifted body method in case it refers to 'this'
+        if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun)
+      case This(_) =>
+        if (currentMethod.exists && tree.symbol == currentMethod.enclClass) {
+          debuglog(s"$currentMethod directly refers to 'this'")
+          thisReferringMethods add currentMethod
+        }
+      case _ =>
+        super.traverse(tree)
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index df220b7..e036035 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -10,17 +10,22 @@ import scala.reflect.internal.ClassfileConstants._
 import scala.collection.{ mutable, immutable }
 import symtab._
 import Flags._
+import scala.reflect.internal.Mode._
 
 abstract class Erasure extends AddInterfaces
                           with scala.reflect.internal.transform.Erasure
                           with typechecker.Analyzer
                           with TypingTransformers
                           with ast.TreeDSL
+                          with TypeAdaptingTransformer
 {
   import global._
   import definitions._
   import CODE._
 
+  val analyzer: typechecker.Analyzer { val global: Erasure.this.global.type } =
+    this.asInstanceOf[typechecker.Analyzer { val global: Erasure.this.global.type }]
+
   val phaseName: String = "erasure"
 
   def newTransformer(unit: CompilationUnit): Transformer =
@@ -49,14 +54,14 @@ abstract class Erasure extends AddInterfaces
             if (sym == ArrayClass) args foreach traverse
             else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
             else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
-            else if (!sym.owner.isPackageClass) traverse(pre)
+            else if (!sym.isTopLevel) traverse(pre)
           case PolyType(_, _) | ExistentialType(_, _) =>
             result = true
           case RefinedType(parents, _) =>
             parents foreach traverse
           case ClassInfoType(parents, _, _) =>
             parents foreach traverse
-          case AnnotatedType(_, atp, _) =>
+          case AnnotatedType(_, atp) =>
             traverse(atp)
           case _ =>
             mapOver(tp)
@@ -65,8 +70,8 @@ abstract class Erasure extends AddInterfaces
     }
   }
 
-  override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value
-  def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
+  override protected def verifyJavaErasure = settings.Xverify || settings.debug
+  def needsJavaSig(tp: Type) = !settings.Ynogenericsig && NeedsSigCollector.collect(tp)
 
   // only refer to type params that will actually make it into the sig, this excludes:
   // * higher-order type parameters
@@ -87,11 +92,22 @@ abstract class Erasure extends AddInterfaces
   // more rigorous way up front rather than catching it after the fact,
   // but that will be more involved.
   private def dotCleanup(sig: String): String = {
-    var last: Char = '\0'
-    sig map {
-      case '.' if last != '>' => last = '.' ; '$'
-      case ch                 => last = ch ; ch
+    // OPT 50% of time in generic signatures (~1% of compile time) was in this method, hence the imperative rewrite.
+    var last: Char = '\u0000'
+    var i = 0
+    val len = sig.length
+    val copy: Array[Char] = sig.toCharArray
+    var changed = false
+    while (i < sig.length) {
+      val ch = copy(i)
+      if (ch == '.' && last != '>') {
+         copy(i) = '$'
+         changed = true
+      }
+      last = ch
+      i += 1
     }
+    if (changed) new String(copy) else sig
   }
 
   /** This object is only used for sanity testing when -check:genjvm is set.
@@ -100,7 +116,7 @@ abstract class Erasure extends AddInterfaces
    *  unboxing some primitive types and further simplifications as they are done in jsig.
    */
   val prepareSigMap = new TypeMap {
-    def squashBoxed(tp: Type): Type = tp.normalize match {
+    def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
       case t @ RefinedType(parents, decls) =>
         val parents1 = parents mapConserve squashBoxed
         if (parents1 eq parents) tp
@@ -110,10 +126,10 @@ abstract class Erasure extends AddInterfaces
         if (tpe1 eq tpe) t
         else ExistentialType(tparams, tpe1)
       case t =>
-        if (boxedClass contains t.typeSymbol) ObjectClass.tpe
+        if (boxedClass contains t.typeSymbol) ObjectTpe
         else tp
     }
-    def apply(tp: Type): Type = tp.normalize match {
+    def apply(tp: Type): Type = tp.dealiasWiden match {
       case tp1 @ TypeBounds(lo, hi) =>
         val lo1 = squashBoxed(apply(lo))
         val hi1 = squashBoxed(apply(hi))
@@ -122,16 +138,16 @@ abstract class Erasure extends AddInterfaces
       case tp1 @ TypeRef(pre, sym, args) =>
         def argApply(tp: Type) = {
           val tp1 = apply(tp)
-          if (tp1.typeSymbol == UnitClass) ObjectClass.tpe
+          if (tp1.typeSymbol == UnitClass) ObjectTpe
           else squashBoxed(tp1)
         }
         if (sym == ArrayClass && args.nonEmpty)
-          if (unboundedGenericArrayLevel(tp1) == 1) ObjectClass.tpe
+          if (unboundedGenericArrayLevel(tp1) == 1) ObjectTpe
           else mapOver(tp1)
         else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
-          ObjectClass.tpe
+          ObjectTpe
         else if (sym == UnitClass)
-          BoxedUnitClass.tpe
+          BoxedUnitTpe
         else if (sym == NothingClass)
           RuntimeNothingClass.tpe
         else if (sym == NullClass)
@@ -144,7 +160,7 @@ abstract class Erasure extends AddInterfaces
         }
       case tp1 @ MethodType(params, restpe) =>
         val params1 = mapOver(params)
-        val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+        val restpe1 = if (restpe.typeSymbol == UnitClass) UnitTpe else apply(restpe)
         if ((params1 eq params) && (restpe1 eq restpe)) tp1
         else MethodType(params1, restpe1)
       case tp1 @ RefinedType(parents, decls) =>
@@ -162,8 +178,8 @@ abstract class Erasure extends AddInterfaces
     }
   }
 
-  private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
-    case RefinedType(parents, _) => parents map (_.normalize)
+  private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
+    case RefinedType(parents, _) => parents map (_.dealiasWiden)
     case tp                      => tp :: Nil
   }
 
@@ -172,7 +188,7 @@ abstract class Erasure extends AddInterfaces
   /** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
    *  type for constructors.
    */
-  def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
+  def javaSig(sym0: Symbol, info: Type): Option[String] = enteringErasure {
     val isTraitSignature = sym0.enclClass.isTrait
 
     def superSig(parents: List[Type]) = {
@@ -181,7 +197,7 @@ abstract class Erasure extends AddInterfaces
           // java is unthrilled about seeing interfaces inherit from classes
           val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
           // traits should always list Object.
-          if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectClass.tpe :: ok
+          if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
           else ok
         }
         else parents
@@ -192,7 +208,7 @@ abstract class Erasure extends AddInterfaces
     def boundsSig(bounds: List[Type]) = {
       val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
       val classPart = isClass match {
-        case Nil    => ":" // + boxedSig(ObjectClass.tpe)
+        case Nil    => ":" // + boxedSig(ObjectTpe)
         case x :: _ => ":" + boxedSig(x)
       }
       classPart :: (isTrait map boxedSig) mkString ":"
@@ -206,7 +222,7 @@ abstract class Erasure extends AddInterfaces
     // Anything which could conceivably be a module (i.e. isn't known to be
     // a type parameter or similar) must go through here or the signature is
     // likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
-    def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName)
+    def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName)
 
     def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
       val tp = tp0.dealias
@@ -219,11 +235,14 @@ abstract class Erasure extends AddInterfaces
           def argSig(tp: Type) =
             if (existentiallyBound contains tp.typeSymbol) {
               val bounds = tp.typeSymbol.info.bounds
-              if (!(AnyRefClass.tpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
-              else if (!(bounds.lo <:< NullClass.tpe)) "-" + boxedSig(bounds.lo)
+              if (!(AnyRefTpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
+              else if (!(bounds.lo <:< NullTpe)) "-" + boxedSig(bounds.lo)
               else "*"
-            } else {
-              boxedSig(tp)
+            } else tp match {
+              case PolyType(_, res) =>
+                "*" // SI-7932
+              case _ =>
+                boxedSig(tp)
             }
           def classSig = {
             val preRebound = pre.baseType(sym.owner) // #2585
@@ -246,7 +265,7 @@ abstract class Erasure extends AddInterfaces
 
           // If args isEmpty, Array is being used as a type constructor
           if (sym == ArrayClass && args.nonEmpty) {
-            if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
+            if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectTpe)
             else ARRAY_TAG.toString+(args map (jsig(_))).mkString
           }
           else if (isTypeParameterInSig(sym, sym0)) {
@@ -254,20 +273,20 @@ abstract class Erasure extends AddInterfaces
             "" + TVAR_TAG + sym.name + ";"
           }
           else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
-            jsig(ObjectClass.tpe)
+            jsig(ObjectTpe)
           else if (sym == UnitClass)
-            jsig(BoxedUnitClass.tpe)
+            jsig(BoxedUnitTpe)
           else if (sym == NothingClass)
             jsig(RuntimeNothingClass.tpe)
           else if (sym == NullClass)
             jsig(RuntimeNullClass.tpe)
           else if (isPrimitiveValueClass(sym)) {
-            if (!primitiveOK) jsig(ObjectClass.tpe)
-            else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
+            if (!primitiveOK) jsig(ObjectTpe)
+            else if (sym == UnitClass) jsig(BoxedUnitTpe)
             else abbrvTag(sym).toString
           }
           else if (sym.isDerivedValueClass) {
-            val unboxed     = sym.derivedValueClassUnbox.info.finalResultType
+            val unboxed     = sym.derivedValueClassUnbox.tpe_*.finalResultType
             val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
             def unboxedMsg  = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen"
             logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") {
@@ -297,7 +316,7 @@ abstract class Erasure extends AddInterfaces
           boxedSig(parent)
         case ClassInfoType(parents, _, _) =>
           superSig(parents)
-        case AnnotatedType(_, atp, _) =>
+        case AnnotatedType(_, atp) =>
           jsig(atp, existentiallyBound, toplevel, primitiveOK)
         case BoundedWildcardType(bounds) =>
           println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
@@ -340,10 +359,9 @@ abstract class Erasure extends AddInterfaces
       case _                               => tp.deconst
     }
   }
-  
+
   // ## requires a little translation
   private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
-  
   // Methods on Any/Object which we rewrite here while we still know what
   // is a primitive and what arrived boxed.
   private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
@@ -352,44 +370,6 @@ abstract class Erasure extends AddInterfaces
 
   override def newTyper(context: Context) = new Eraser(context)
 
-  private def safeToRemoveUnbox(cls: Symbol): Boolean =
-    (cls == definitions.NullClass) || isBoxedValueClass(cls)
-
-  /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */
-  object Unboxed {
-    def unapply(tree: Tree): Option[Tree] = tree match {
-      case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
-        Some(arg)
-      case Apply(
-        TypeApply(
-          cast @ Select(
-            Apply(
-              sel @ Select(arg, acc),
-              List()),
-            asinstanceof),
-          List(tpt)),
-        List())
-      if cast.symbol == Object_asInstanceOf &&
-        tpt.tpe.typeSymbol.isDerivedValueClass &&
-        sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
-        Some(arg)
-      case _ =>
-        None
-    }
-  }
-
-  /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */
-  object Boxed {
-    def unapply(tree: Tree): Option[Tree] = tree match {
-      case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
-        Some(arg)
-      case LabelDef(name, params, Boxed(rhs)) =>
-        Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
-      case _ =>
-        None
-    }
-  }
-
   class ComputeBridges(unit: CompilationUnit, root: Symbol) {
     assert(phase == currentRun.erasurePhase, phase)
 
@@ -399,22 +379,19 @@ abstract class Erasure extends AddInterfaces
     val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
     var bridges      = List[Tree]()
 
-    val opc = beforeExplicitOuter {
+    val opc = enteringExplicitOuter {
       new overridingPairs.Cursor(root) {
         override def parents              = List(root.info.firstParent)
-        override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
+        override def exclude(sym: Symbol) = !sym.isMethod || super.exclude(sym)
       }
     }
 
     def compute(): (List[Tree], immutable.Set[Symbol]) = {
       while (opc.hasNext) {
-        val member = opc.overriding
-        val other  = opc.overridden
-        //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
-        if (beforeExplicitOuter(!member.isDeferred))
-          checkPair(member, other)
+        if (enteringExplicitOuter(!opc.low.isDeferred))
+          checkPair(opc.currentPair)
 
-        opc.next
+        opc.next()
       }
       (bridges, toBeRemoved)
     }
@@ -429,23 +406,23 @@ abstract class Erasure extends AddInterfaces
      *  @param  other    The overidden symbol for which the bridge was generated
      *  @param  bridge   The bridge
      */
-    def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Boolean = {
+    def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = {
       def fulldef(sym: Symbol) =
         if (sym == NoSymbol) sym.toString
         else s"$sym: ${sym.tpe} in ${sym.owner}"
       var noclash = true
+      val clashErrors = mutable.Buffer[(Position, String)]()
       def clashError(what: String) = {
-        noclash = false
-        unit.error(
-          if (member.owner == root) member.pos else root.pos,
-          sm"""bridge generated for member ${fulldef(member)}
-              |which overrides ${fulldef(other)}
-              |clashes with definition of $what;
-              |both have erased type ${afterPostErasure(bridge.tpe)}""")
+        val pos = if (member.owner == root) member.pos else root.pos
+        val msg = sm"""bridge generated for member ${fulldef(member)}
+                      |which overrides ${fulldef(other)}
+                      |clashes with definition of $what;
+                      |both have erased type ${exitingPostErasure(bridge.tpe)}"""
+        clashErrors += Tuple2(pos, msg)
       }
       for (bc <- root.baseClasses) {
-        if (settings.debug.value)
-          afterPostErasure(println(
+        if (settings.debug)
+          exitingPostErasure(println(
             sm"""check bridge overrides in $bc
                 |${bc.info.nonPrivateDecl(bridge.name)}
                 |${site.memberType(bridge)}
@@ -454,24 +431,31 @@ abstract class Erasure extends AddInterfaces
 
         def overriddenBy(sym: Symbol) =
           sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
-        for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+        for (overBridge <- exitingPostErasure(overriddenBy(bridge))) {
           if (overBridge == member) {
             clashError("the member itself")
           } else {
             val overMembers = overriddenBy(member)
             if (!overMembers.exists(overMember =>
-              afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+              exitingPostErasure(overMember.tpe =:= overBridge.tpe))) {
               clashError(fulldef(overBridge))
             }
           }
         }
       }
-      noclash
+      clashErrors
     }
 
-    def checkPair(member: Symbol, other: Symbol) {
-      val otpe = specialErasure(root)(other.tpe)
-      val bridgeNeeded = afterErasure (
+    /** TODO - work through this logic with a fine-toothed comb, incorporating
+     *  into SymbolPairs where appropriate.
+     */
+    def checkPair(pair: SymbolPair) {
+      import pair._
+      val member = low
+      val other  = high
+      val otpe   = highErased
+
+      val bridgeNeeded = exitingErasure (
         !member.isMacro &&
         !(other.tpe =:= member.tpe) &&
         !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
@@ -484,7 +468,7 @@ abstract class Erasure extends AddInterfaces
       if (!bridgeNeeded)
         return
 
-      val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+      val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
       val bridge   = other.cloneSymbolImpl(root, newFlags) setPos root.pos
 
       debuglog("generating bridge from %s (%s): %s to %s: %s".format(
@@ -497,11 +481,21 @@ abstract class Erasure extends AddInterfaces
       bridge setInfo (otpe cloneInfo bridge)
       bridgeTarget(bridge) = member
 
-      if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
-          checkBridgeOverrides(member, other, bridge)) {
-        afterErasure(root.info.decls enter bridge)
+      def sigContainsValueClass = (member.tpe exists (_.typeSymbol.isDerivedValueClass))
+
+      val shouldAdd = (
+            !sigContainsValueClass
+        ||  (checkBridgeOverrides(member, other, bridge) match {
+              case Nil => true
+              case es if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true
+              case es => for ((pos, msg) <- es) unit.error(pos, msg); false
+            })
+      )
+
+      if (shouldAdd) {
+        exitingErasure(root.info.decls enter bridge)
         if (other.owner == root) {
-          afterErasure(root.info.decls.unlink(other))
+          exitingErasure(root.info.decls.unlink(other))
           toBeRemoved += other
         }
 
@@ -510,7 +504,7 @@ abstract class Erasure extends AddInterfaces
       }
     }
 
-    def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+    def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure {
       // type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
       // calling `member` is not guaranteed to succeed in general, there's
       // nothing we can do about this, except for an unapply: when this subtype test fails,
@@ -521,10 +515,10 @@ abstract class Erasure extends AddInterfaces
       def maybeWrap(bridgingCall: Tree): Tree = {
         val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
              (member.name == nme.unapply || member.name == nme.unapplySeq)
-          && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+          && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
 
         import CODE._
-        val _false    = FALSE_typed
+        val _false    = FALSE
         val pt        = member.tpe.resultType
         lazy val zero =
           if      (_false.tpe <:< pt)    _false
@@ -544,168 +538,15 @@ abstract class Erasure extends AddInterfaces
 
           maybeWrap(bridgingCall)
       }
-      atPos(bridge.pos)(DefDef(bridge, rhs))
+      DefDef(bridge, rhs)
     }
   }
 
   /** The modifier typer which retypes with erased types. */
-  class Eraser(_context: Context) extends Typer(_context) {
-
-    private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
-
-    private def isDifferentErasedValueType(tpe: Type, other: Type) =
-      isErasedValueType(tpe) && (tpe ne other)
-
-    private def isPrimitiveValueMember(sym: Symbol) =
-      sym != NoSymbol && isPrimitiveValueClass(sym.owner)
-
-    @inline private def box(tree: Tree, target: => String): Tree = {
-      val result = box1(tree)
-      log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
-      result
-    }
-
-    /** Box `tree` of unboxed type */
-    private def box1(tree: Tree): Tree = tree match {
-      case LabelDef(_, _, _) =>
-        val ldef = deriveLabelDef(tree)(box1)
-        ldef setType ldef.rhs.tpe
-      case _ =>
-        val tree1 = tree.tpe match {
-          case ErasedValueType(tref) =>
-            val clazz = tref.sym
-            tree match {
-              case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
-                log("shortcircuiting unbox -> box "+arg); arg
-              case _ =>
-                New(clazz, cast(tree, underlyingOfValueClass(clazz)))
-            }
-          case _ =>
-            tree.tpe.typeSymbol match {
-          case UnitClass  =>
-            if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
-            else BLOCK(tree, REF(BoxedUnit_UNIT))
-          case NothingClass => tree // a non-terminating expression doesn't need boxing
-          case x          =>
-            assert(x != ArrayClass)
-            tree match {
-              /** Can't always remove a Box(Unbox(x)) combination because the process of boxing x
-               *  may lead to throwing an exception.
-               *
-               *  This is important for specialization: calls to the super constructor should not box/unbox specialized
-               *  fields (see TupleX). (ID)
-               */
-              case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
-                log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
-                arg
-              case _ =>
-                (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
-            }
-            }
-        }
-        typedPos(tree.pos)(tree1)
-    }
-
-    private def unbox(tree: Tree, pt: Type): Tree = {
-      val result = unbox1(tree, pt)
-      log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
-      result
-    }
-
-    /** Unbox `tree` of boxed type to expected type `pt`.
-     *
-     *  @param tree the given tree
-     *  @param pt   the expected type.
-     *  @return     the unboxed tree
-     */
-    private def unbox1(tree: Tree, pt: Type): Tree = tree match {
-/*
-      case Boxed(unboxed) =>
-        println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
-        adaptToType(unboxed, pt)
- */
-      case LabelDef(_, _, _) =>
-        val ldef = deriveLabelDef(tree)(unbox(_, pt))
-        ldef setType ldef.rhs.tpe
-      case _ =>
-        val tree1 = pt match {
-          case ErasedValueType(tref) =>
-            tree match {
-              case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
-                log("shortcircuiting box -> unbox "+arg)
-                arg
-              case _ =>
-                val clazz = tref.sym
-                log("not boxed: "+tree)
-                lazy val underlying = underlyingOfValueClass(clazz)
-                val tree0 =
-                  if (tree.tpe.typeSymbol == NullClass &&
-                      isPrimitiveValueClass(underlying.typeSymbol)) {
-                    // convert `null` directly to underlying type, as going
-                    // via the unboxed type would yield a NPE (see SI-5866)
-                    unbox1(tree, underlying)
-                  } else
-                    Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
-                cast(tree0, pt)
-            }
-          case _ =>
-            pt.typeSymbol match {
-              case UnitClass  =>
-                if (treeInfo isExprSafeToInline tree) UNIT
-                else BLOCK(tree, UNIT)
-              case x          =>
-                assert(x != ArrayClass)
-                // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
-                Apply(unboxMethod(pt.typeSymbol), tree)
-            }
-        }
-        typedPos(tree.pos)(tree1)
-    }
-
-    /** Generate a synthetic cast operation from tree.tpe to pt.
-     *  @pre pt eq pt.normalize
-     */
-    private def cast(tree: Tree, pt: Type): Tree = logResult(s"cast($tree, $pt)") {
-      if (pt.typeSymbol == UnitClass) {
-        // See SI-4731 for one example of how this occurs.
-        log("Attempted to cast to Unit: " + tree)
-        tree.duplicate setType pt
-      } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
-        // See SI-2386 for one example of when this might be necessary.
-        val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
-        val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
-        gen.mkAttributedCast(tree1, pt)
-      } else gen.mkAttributedCast(tree, pt)
-    }
+  class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
+    val typer = this.asInstanceOf[analyzer.Typer]
 
-    /** Adapt `tree` to expected type `pt`.
-     *
-     *  @param tree the given tree
-     *  @param pt   the expected type
-     *  @return     the adapted tree
-     */
-    private def adaptToType(tree: Tree, pt: Type): Tree = {
-      if (settings.debug.value && pt != WildcardType)
-        log("adapting " + tree + ":" + tree.tpe + " : " +  tree.tpe.parents + " to " + pt)//debug
-      if (tree.tpe <:< pt)
-        tree
-      else if (isDifferentErasedValueType(tree.tpe, pt))
-        adaptToType(box(tree, pt.toString), pt)
-      else if (isDifferentErasedValueType(pt, tree.tpe))
-        adaptToType(unbox(tree, pt), pt)
-      else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
-        adaptToType(box(tree, pt.toString), pt)
-      } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
-        // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
-        //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
-        adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
-//      } else if (pt <:< tree.tpe)
-//        cast(tree, pt)
-      } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
-        adaptToType(unbox(tree, pt), pt)
-      else
-        cast(tree, pt)
-    }
+    override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
 
     /**  Replace member references as follows:
      *
@@ -723,45 +564,35 @@ abstract class Erasure extends AddInterfaces
     private def adaptMember(tree: Tree): Tree = {
       //Console.println("adaptMember: " + tree);
       tree match {
-        case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
+        case Apply(ta @ TypeApply(sel @ Select(qual, name), List(targ)), List())
         if tree.symbol == Any_asInstanceOf =>
-          val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
-          val qualClass = qual1.tpe.typeSymbol
-/*
-          val targClass = targ.tpe.typeSymbol
-
-          if (isNumericValueClass(qualClass) && isNumericValueClass(targClass))
-            // convert numeric type casts
-            atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
-          else
-*/
+          val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
+          // !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1.
           if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
             val noNullCheckNeeded = targ.tpe match {
-              case ErasedValueType(tref) =>
-                atPhase(currentRun.erasurePhase) {
-                  isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
-                }
+              case ErasedValueType(_, underlying) =>
+                isPrimitiveValueClass(underlying.typeSymbol)
               case _ =>
                 true
             }
             if (noNullCheckNeeded) unbox(qual1, targ.tpe)
             else {
-              def nullConst = Literal(Constant(null)) setType NullClass.tpe
               val untyped =
 //                util.trace("new asinstanceof test") {
                   gen.evalOnce(qual1, context.owner, context.unit) { qual =>
-                    If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+                    If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)),
                        Literal(Constant(null)) setType targ.tpe,
                        unbox(qual(), targ.tpe))
                   }
 //                }
               typed(untyped)
             }
-          } else tree
+          } else treeCopy.Apply(tree, treeCopy.TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), List())
+
         case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
         if tree.symbol == Any_isInstanceOf =>
           targ.tpe match {
-            case ErasedValueType(tref) => targ.setType(tref.sym.tpe)
+            case ErasedValueType(clazz, _) => targ.setType(clazz.tpe)
             case _ =>
           }
             tree
@@ -791,7 +622,7 @@ abstract class Erasure extends AddInterfaces
               tree.symbol = NoSymbol
               selectFrom(qual1)
             } else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
-              assert(qual1.symbol.isStable, qual1.symbol);
+              assert(qual1.symbol.isStable, qual1.symbol)
               val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
               adaptMember(selectFrom(applied))
             } else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -812,23 +643,23 @@ abstract class Erasure extends AddInterfaces
 
     /** A replacement for the standard typer's adapt method.
      */
-    override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
+    override protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree =
       adaptToType(tree, pt)
 
     /** A replacement for the standard typer's `typed1` method.
      */
-    override def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+    override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
       val tree1 = try {
         tree match {
           case InjectDerivedValue(arg) =>
             (tree.attachments.get[TypeRefAttachment]: @unchecked) match {
               case Some(itype) =>
                 val tref = itype.tpe
-                val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+                val argPt = enteringErasure(erasedValueClassArg(tref))
                 log(s"transforming inject $arg -> $tref/$argPt")
                 val result = typed(arg, mode, argPt)
                 log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
-                return result setType ErasedValueType(tref)
+                return result setType ErasedValueType(tref.sym, result.tpe)
 
             }
           case _ =>
@@ -852,7 +683,7 @@ abstract class Erasure extends AddInterfaces
         newCdef setType newCdef.body.tpe
       }
       def adaptBranch(branch: Tree): Tree =
-        if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
+        if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
 
       tree1 match {
         case If(cond, thenp, elsep) =>
@@ -868,8 +699,7 @@ abstract class Erasure extends AddInterfaces
               alt => alt == first || !(first.tpe looselyMatches alt.tpe)
             }
             if (tree.symbol ne sym1) {
-              tree1.symbol = sym1
-              tree1.tpe = sym1.tpe
+              tree1 setSymbol sym1 setType sym1.tpe
             }
           }
           tree1
@@ -877,47 +707,39 @@ abstract class Erasure extends AddInterfaces
           tree1
       }
     }
-
-    private def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
-      case MethodType(Nil, _) => true
-      case _                  => false
-    }
   }
 
   /** The erasure transformer */
   class ErasureTransformer(unit: CompilationUnit) extends Transformer {
-    /** Emit an error if there is a double definition. This can happen if:
-     *
-     *  - A template defines two members with the same name and erased type.
-     *  - A template defines and inherits two members `m` with different types,
-     *    but their erased types are the same.
-     *  - A template inherits two members `m` with different types,
-     *    but their erased types are the same.
-     */
-    private def checkNoDoubleDefs(root: Symbol) {
-      def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
-        afterPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
-
-      def doubleDefError(sym1: Symbol, sym2: Symbol) {
-        // the .toString must also be computed at the earlier phase
-        val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
-        val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
-        if (!tpe1.isErroneous && !tpe2.isErroneous)
-          unit.error(
-          if (sym1.owner == root) sym1.pos else root.pos,
-          (if (sym1.owner == sym2.owner) "double definition:\n"
-           else if (sym1.owner == root) "name clash between defined and inherited member:\n"
-           else "name clash between inherited members:\n") +
-          sym1 + ":" + afterRefchecks(tpe1.toString) +
-            (if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
-          sym2 + ":" + afterRefchecks(tpe2.toString) +
-            (if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
-          "\nhave same type" +
-          (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
-        sym1.setInfo(ErrorType)
+    import overridingPairs.Cursor
+
+    private def doubleDefError(pair: SymbolPair) {
+      import pair._
+
+      if (!pair.isErroneous) {
+        val what = (
+          if (low.owner == high.owner) "double definition"
+          else if (low.owner == base) "name clash between defined and inherited member"
+          else "name clash between inherited members"
+        )
+        val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType)
+
+        unit.error(pos,
+          s"""|$what:
+              |${exitingRefchecks(highString)} and
+              |${exitingRefchecks(lowString)}
+              |have same type$when""".trim.stripMargin
+        )
       }
+      low setInfo ErrorType
+    }
+
+    private def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
+      exitingPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
 
-      val decls = root.info.decls
+    /** TODO - adapt SymbolPairs so it can be used here. */
+    private def checkNoDeclaredDoubleDefs(base: Symbol) {
+      val decls = base.info.decls
 
       // SI-8010 force infos, otherwise makeNotPrivate in ExplicitOuter info transformer can trigger
       //         a scope rehash while were iterating and we can see the same entry twice!
@@ -928,62 +750,52 @@ abstract class Erasure extends AddInterfaces
       //         Why not just create a temporary scope here? We need to force the name changes in any case before
       //         we do these checks, so that we're comparing same-named methods based on the expanded names that actually
       //         end up in the bytecode.
-      afterPostErasure(decls.foreach(_.info))
+      exitingPostErasure(decls.foreach(_.info))
 
       var e = decls.elems
       while (e ne null) {
         if (e.sym.isTerm) {
-          var e1 = decls.lookupNextEntry(e)
+          var e1 = decls lookupNextEntry e
           while (e1 ne null) {
             assert(e.sym ne e1.sym, s"Internal error: encountered ${e.sym.debugLocationString} twice during scope traversal. This might be related to SI-8010.")
-            if (sameTypeAfterErasure(e1.sym, e.sym)) doubleDefError(e.sym, e1.sym)
-            e1 = decls.lookupNextEntry(e1)
+            if (sameTypeAfterErasure(e.sym, e1.sym))
+              doubleDefError(new SymbolPair(base, e.sym, e1.sym))
+
+            e1 = decls lookupNextEntry e1
           }
         }
         e = e.next
       }
+    }
 
-      val opc = new overridingPairs.Cursor(root) {
-        override def exclude(sym: Symbol): Boolean =
-          (!sym.isTerm || sym.isPrivate || super.exclude(sym)
-           // specialized members have no type history before 'specialize', causing double def errors for curried defs
-           || !sym.hasTypeAt(currentRun.refchecksPhase.id))
-
-        override def matches(sym1: Symbol, sym2: Symbol): Boolean =
-          afterPostErasure(sym1.tpe =:= sym2.tpe)
+    /** Emit an error if there is a double definition. This can happen if:
+     *
+     *  - A template defines two members with the same name and erased type.
+     *  - A template defines and inherits two members `m` with different types,
+     *    but their erased types are the same.
+     *  - A template inherits two members `m` with different types,
+     *    but their erased types are the same.
+     */
+    private def checkNoDoubleDefs(root: Symbol) {
+      checkNoDeclaredDoubleDefs(root)
+      object opc extends Cursor(root) {
+        // specialized members have no type history before 'specialize', causing double def errors for curried defs
+        override def exclude(sym: Symbol): Boolean = (
+             sym.isType
+          || sym.isPrivate
+          || super.exclude(sym)
+          || !sym.hasTypeAt(currentRun.refchecksPhase.id)
+        )
+        override def matches(lo: Symbol, high: Symbol) = true
       }
-      while (opc.hasNext) {
-        if (!afterRefchecks(
-              root.thisType.memberType(opc.overriding) matches
-              root.thisType.memberType(opc.overridden)) &&
-            sameTypeAfterErasure(opc.overriding, opc.overridden)) {
-          debuglog("" + opc.overriding.locationString + " " +
-                     opc.overriding.infosString +
-                     opc.overridden.locationString + " " +
-                     opc.overridden.infosString)
-          doubleDefError(opc.overriding, opc.overridden)
-        }
-        opc.next
+      def isErasureDoubleDef(pair: SymbolPair) = {
+        import pair._
+        log(s"Considering for erasure clash:\n$pair")
+        !exitingRefchecks(lowType matches highType) && sameTypeAfterErasure(low, high)
       }
+      opc.iterator filter isErasureDoubleDef foreach doubleDefError
     }
 
-/*
-      for (bc <- root.info.baseClasses.tail; other <- bc.info.decls.toList) {
-        if (other.isTerm && !other.isConstructor && !(other hasFlag (PRIVATE | BRIDGE))) {
-          for (member <- root.info.nonPrivateMember(other.name).alternatives) {
-            if (member != other &&
-                !(member hasFlag BRIDGE) &&
-                afterErasure(member.tpe =:= other.tpe) &&
-                !afterRefchecks(
-                  root.thisType.memberType(member) matches root.thisType.memberType(other))) {
-              debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
-              doubleDefError(member, other)
-            }
-          }
-        }
-      }
-*/
-
     /**  Add bridge definitions to a template. This means:
      *
      *   If there is a concrete member `m` which overrides a member in a base
@@ -998,7 +810,6 @@ abstract class Erasure extends AddInterfaces
      */
     private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
       assert(phase == currentRun.erasurePhase, phase)
-      debuglog("computing bridges for " + owner)
       new ComputeBridges(unit, owner) compute()
     }
 
@@ -1037,7 +848,6 @@ abstract class Erasure extends AddInterfaces
           case Select(qual, _) => qual
           case TypeApply(Select(qual, _), _) => qual
         }
-
         def preEraseAsInstanceOf = {
           (fn: @unchecked) match {
             case TypeApply(Select(qual, _), List(targ)) =>
@@ -1054,7 +864,7 @@ abstract class Erasure extends AddInterfaces
         def preEraseIsInstanceOf = {
           fn match {
             case TypeApply(sel @ Select(qual, name), List(targ)) =>
-              if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+              if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
                 unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
 
               def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
@@ -1065,7 +875,7 @@ abstract class Erasure extends AddInterfaces
                   List()) setPos tree.pos
               targ.tpe match {
                 case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
-                  val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+                  val cmpOp = if (targ.tpe <:< AnyValTpe) Any_equals else Object_eq
                   atPos(tree.pos) {
                     Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
                   }
@@ -1095,7 +905,9 @@ abstract class Erasure extends AddInterfaces
           preEraseAsInstanceOf
         } else if (fn.symbol == Any_isInstanceOf) {
           preEraseIsInstanceOf
-        } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+        } else if (fn.symbol.isOnlyRefinementMember) {
+          // !!! Another spot where we produce overloaded types (see test pos/t6301)
+          log(s"${fn.symbol.fullLocationString} originates in refinement class - call will be implemented via reflection.")
           ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
         } else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
           Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
@@ -1154,7 +966,8 @@ abstract class Erasure extends AddInterfaces
                   SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
                   args)
               }
-            } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+            }
+            else if (args.isEmpty && interceptedMethods(fn.symbol)) {
               if (poundPoundMethods.contains(fn.symbol)) {
                 // This is unattractive, but without it we crash here on ().## because after
                 // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
@@ -1166,13 +979,24 @@ abstract class Erasure extends AddInterfaces
                   case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
                   case BooleanClass                             => If(qual, LIT(true.##), LIT(false.##))
                   case _                                        =>
-                    global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+                    // Since we are past typer, we need to avoid creating trees carrying
+                    // overloaded types.  This logic is custom (and technically incomplete,
+                    // although serviceable) for def hash.  What is really needed is for
+                    // the overloading logic presently hidden away in a few different
+                    // places to be properly exposed so we can just call "resolveOverload"
+                    // after typer.  Until then:
+                    val alts    = ScalaRunTimeModule.info.member(nme.hash_).alternatives
+                    def alt1    = alts find (_.info.paramTypes.head =:= qual.tpe)
+                    def alt2    = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass)
+                    val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2)
+
+                    global.typer.typed(newTree)
                 }
               } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
                 // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
                 global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
               } else if (primitiveGetClassMethods.contains(fn.symbol)) {
-                // if we got here then we're trying to send a primitive getClass method to either 
+                // if we got here then we're trying to send a primitive getClass method to either
                 // a) an Any, in which cage Object_getClass works because Any erases to object. Or
                 //
                 // b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
@@ -1211,12 +1035,19 @@ abstract class Erasure extends AddInterfaces
           preErase(fun)
 
         case Select(qual, name) =>
-          val owner = tree.symbol.owner
-          // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+          val sym = tree.symbol
+          val owner = sym.owner
           if (owner.isRefinementClass) {
-            val overridden = tree.symbol.nextOverriddenSymbol
-            assert(overridden != NoSymbol, tree.symbol)
-            tree.symbol = overridden
+            sym.allOverriddenSymbols filterNot (_.owner.isRefinementClass) match {
+              case overridden :: _ =>
+                log(s"${sym.fullLocationString} originates in refinement class - replacing with ${overridden.fullLocationString}.")
+                tree.symbol = overridden
+              case Nil =>
+                // Ideally this should not be reached or reachable; anything which would
+                // get here should have been caught in the surrounding Apply.
+                devWarning(s"Failed to rewrite reflective apply - now don't know what to do with " + tree)
+                return treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name)
+            }
           }
 
           def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
@@ -1243,7 +1074,7 @@ abstract class Erasure extends AddInterfaces
           assert(!currentOwner.isImplClass)
           //Console.println("checking no dble defs " + tree)//DEBUG
           checkNoDoubleDefs(tree.symbol.owner)
-          treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
+          treeCopy.Template(tree, parents, noSelfType, addBridges(body, currentOwner))
 
         case Match(selector, cases) =>
           Match(Typed(selector, TypeTree(selector.tpe)), cases)
@@ -1251,7 +1082,7 @@ abstract class Erasure extends AddInterfaces
         case Literal(ct) if ct.tag == ClazzTag
                          && ct.typeValue.typeSymbol != definitions.UnitClass =>
           val erased = ct.typeValue match {
-            case TypeRef(pre, clazz, args) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(pre, clazz)
+            case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr)
             case tpe => specialScalaErasure(tpe)
           }
           treeCopy.Literal(tree, Constant(erased))
@@ -1283,13 +1114,12 @@ abstract class Erasure extends AddInterfaces
               tree1 setType specialScalaErasure(tree1.tpe)
             case ArrayValue(elemtpt, trees) =>
               treeCopy.ArrayValue(
-                tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null
+                tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
             case DefDef(_, _, _, _, tpt, _) =>
-              val result = super.transform(tree1) setType null
-              tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
-              result
+              try super.transform(tree1).clearType()
+              finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
             case _ =>
-              super.transform(tree1) setType null
+              super.transform(tree1).clearType()
           }
         }
       }
@@ -1301,14 +1131,22 @@ abstract class Erasure extends AddInterfaces
     override def transform(tree: Tree): Tree = {
       val tree1 = preTransformer.transform(tree)
       // log("tree after pretransform: "+tree1)
-      afterErasure {
+      exitingErasure {
         val tree2 = mixinTransformer.transform(tree1)
         // debuglog("tree after addinterfaces: \n" + tree2)
 
-        newTyper(rootContext(unit, tree, true)).typed(tree2)
+        newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2)
       }
     }
   }
 
+  final def resolveAnonymousBridgeClash(sym: Symbol, bridge: Symbol) {
+    // TODO reinstate this after Delambdafy generates anonymous classes that meet this requirement.
+    // require(sym.owner.isAnonymousClass, sym.owner)
+    log(s"Expanding name of ${sym.debugLocationString} as it clashes with bridge. Renaming deemed safe because the owner is anonymous.")
+    sym.expandName(sym.owner)
+    bridge.resetFlag(BRIDGE)
+  }
+
   private class TypeRefAttachment(val tpe: TypeRef)
 }
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 970519a..0447e23 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -3,14 +3,14 @@
  * @author Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package transform
 
 import symtab._
 import Flags.{ CASE => _, _ }
 import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
-import matching.{ Patterns, ParallelMatching }
 import scala.tools.nsc.settings.ScalaVersion
 
 /** This class ...
@@ -19,15 +19,12 @@ import scala.tools.nsc.settings.ScalaVersion
  *  @version 1.0
  */
 abstract class ExplicitOuter extends InfoTransform
-      with Patterns
-      with ParallelMatching
       with TypingTransformers
       with ast.TreeDSL
 {
   import global._
   import definitions._
   import CODE._
-  import Debug.TRACE
 
   /** The following flags may be set by this phase: */
   override def phaseNewFlags: Long = notPROTECTED
@@ -76,28 +73,20 @@ abstract class ExplicitOuter extends InfoTransform
 
   class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
     override def transform(tree: Tree) = tree match {
-      case Bind(_, body) if toRemove(tree.symbol) =>
-        TRACE("Dropping unused binding: " + tree.symbol)
-        super.transform(body)
+      case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
       case _                                      => super.transform(tree)
     }
   }
 
-  /** Issue a migration warning for instance checks which might be on an Array and
-   *  for which the type parameter conforms to Seq, because these answers changed in 2.8.
-   */
-  def isArraySeqTest(lhs: Type, rhs: Type) =
-    (ArrayClass.tpe <:< lhs.widen) && (rhs.widen matchesPattern SeqClass.tpe)
-
   def outerAccessor(clazz: Symbol): Symbol = {
     val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
     if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
     else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
   }
   def newOuterAccessor(clazz: Symbol) = {
-    val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+    val accFlags = SYNTHETIC | ARTIFACT | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
     val sym      = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
-    val restpe   = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+    val restpe   = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType
 
     sym expandName clazz
     sym.referenced = clazz
@@ -139,7 +128,7 @@ abstract class ExplicitOuter extends InfoTransform
    *  <ol>
    *    <li>
    *      Add an outer parameter to the formal parameters of a constructor
-   *      in a inner non-trait class;
+   *      in an inner non-trait class;
    *    </li>
    *    <li>
    *      Add a protected $outer field to an inner class which is
@@ -187,16 +176,13 @@ abstract class ExplicitOuter extends InfoTransform
       var decls1 = decls
       if (isInner(clazz) && !clazz.isInterface) {
         decls1 = decls.cloneScope
-        val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
-        outerAcc expandName clazz
-
-        decls1 enter newOuterAccessor(clazz)
+        decls1 enter newOuterAccessor(clazz) // 3
         if (hasOuterField(clazz)) //2
           decls1 enter newOuterField(clazz)
       }
       if (!clazz.isTrait && !parents.isEmpty) {
         for (mc <- clazz.mixinClasses) {
-          val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
+          val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc))
           if (mixinOuterAcc != NoSymbol) {
             if (skipMixinOuterAccessor(clazz, mc))
               debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
@@ -220,7 +206,7 @@ abstract class ExplicitOuter extends InfoTransform
       // On the other hand, mixing in the trait into a separately compiled
       // class needs to have a common naming scheme, independently of whether
       // the field was accessed from an inner class or not. See #2946
-      if (sym.owner.isTrait && sym.hasLocalFlag &&
+      if (sym.owner.isTrait && sym.isLocalToThis &&
               (sym.getter(sym.owner.toInterface) == NoSymbol))
         sym.makeNotPrivate(sym.owner)
       tp
@@ -230,7 +216,7 @@ abstract class ExplicitOuter extends InfoTransform
    *  values for outer parameters of constructors.
    *  The class provides methods for referencing via outer.
    */
-  abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+  abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with UnderConstructionTransformer {
     /** The directly enclosing outer parameter, if we are in a constructor */
     protected var outerParam: Symbol = NoSymbol
 
@@ -239,9 +225,10 @@ abstract class ExplicitOuter extends InfoTransform
      *
      * Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
      */
-    protected def outerValue: Tree =
-      if (outerParam != NoSymbol) ID(outerParam)
-      else outerSelect(THIS(currentClass))
+    protected def outerValue: Tree = outerParam match {
+      case NoSymbol   => outerSelect(gen.mkAttributedThis(currentClass))
+      case outerParam => gen.mkAttributedIdent(outerParam)
+    }
 
     /** Select and apply outer accessor from 'base'
      *  The result is typed but not positioned.
@@ -281,11 +268,6 @@ abstract class ExplicitOuter extends InfoTransform
      *  <blockquote><pre>`base'.$outer$$C1 ... .$outer$$Cn</pre></blockquote>
      *  which refers to the outer instance of class to of
      *  value base. The result is typed but not positioned.
-     *
-     *  @param base ...
-     *  @param from ...
-     *  @param to   ...
-     *  @return     ...
      */
     protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = {
       //Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe)
@@ -294,34 +276,19 @@ abstract class ExplicitOuter extends InfoTransform
       else outerPath(outerSelect(base), from.outerClass, to)
     }
 
-
-    /** The stack of class symbols in which a call to this() or to the super
-      * constructor, or early definition is active
-      */
-    protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
-    protected val selfOrSuperCalls = mutable.Stack[Symbol]()
-    @inline protected def inSelfOrSuperCall[A](sym: Symbol)(a: => A) = {
-      selfOrSuperCalls push sym
-      try a finally selfOrSuperCalls.pop()
-    }
-
     override def transform(tree: Tree): Tree = {
+      def sym = tree.symbol
       val savedOuterParam = outerParam
       try {
         tree match {
           case Template(_, _, _) =>
             outerParam = NoSymbol
-          case DefDef(_, _, _, vparamss, _, _) =>
-            if (tree.symbol.isClassConstructor && isInner(tree.symbol.owner)) {
-              outerParam = vparamss.head.head.symbol
-              assert(outerParam.name startsWith nme.OUTER, outerParam.name)
-            }
+          case DefDef(_, _, _, (param :: _) :: _, _, _) if sym.isClassConstructor && isInner(sym.owner) =>
+            outerParam = param.symbol
+            assert(outerParam.name startsWith nme.OUTER, outerParam.name)
           case _ =>
         }
-        if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree))
-          inSelfOrSuperCall(currentOwner.owner)(super.transform(tree))
-        else
-          super.transform(tree)
+        super.transform(tree)
       }
       finally outerParam = savedOuterParam
     }
@@ -387,22 +354,14 @@ abstract class ExplicitOuter extends InfoTransform
 
     /** The definition tree of the outer accessor of current class
      */
-    def outerFieldDef: Tree =
-      VAL(outerField(currentClass)) === EmptyTree
+    def outerFieldDef: Tree = ValDef(outerField(currentClass))
 
     /** The definition tree of the outer accessor of current class
      */
-    def outerAccessorDef: Tree = {
-      val outerAcc = outerAccessor(currentClass)
-      var rhs: Tree =
-        if (outerAcc.isDeferred) EmptyTree
-        else This(currentClass) DOT outerField(currentClass)
-
-      /** If we don't re-type the tree, we see self-type related crashes like #266.
-       */
-      localTyper typed {
-        (DEF(outerAcc) withPos currentClass.pos withType null) === rhs
-      }
+    def outerAccessorDef: Tree = localTyper typed {
+      val acc = outerAccessor(currentClass)
+      val rhs = if (acc.isDeferred) EmptyTree else Select(This(currentClass), outerField(currentClass))
+      DefDef(acc, rhs)
     }
 
     /** The definition tree of the outer accessor for class mixinClass.
@@ -423,80 +382,8 @@ abstract class ExplicitOuter extends InfoTransform
         else if (mixinPrefix.typeArgs.nonEmpty) gen.mkAttributedThis(mixinPrefix.typeSymbol)
         else gen.mkAttributedQualifier(mixinPrefix)
       )
-      localTyper typed {
-        (DEF(outerAcc) withPos currentClass.pos) === {
-          // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
-          gen.mkCast(transformer.transform(path), outerAcc.info.resultType)
-        }
-      }
-    }
-
-    // requires settings.XoldPatmat.value
-    def matchTranslation(tree: Match) = {
-      val Match(selector, cases) = tree
-      var nselector = transform(selector)
-
-      def makeGuardDef(vs: List[Symbol], guard: Tree) = {
-        val gdname = unit.freshTermName("gd")
-        val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
-        val params = method newSyntheticValueParams vs.map(_.tpe)
-        method setInfo new MethodType(params, BooleanClass.tpe)
-
-        localTyper typed {
-          DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
-        }
-      }
-
-      val nguard = new ListBuffer[Tree]
-      val ncases =
-        for (CaseDef(pat, guard, body) <- cases) yield {
-          // Strip out any unused pattern bindings up front
-          val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol
-          val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol }
-          val (used, unused) = patternIdents partition references
-          val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat
-
-          val gdcall =
-            if (guard == EmptyTree) EmptyTree
-            else {
-              val guardDef = makeGuardDef(used, guard)
-              nguard += transform(guardDef) // building up list of guards
-
-              localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident))
-            }
-
-          (CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
-        }
-
-      val (checkExhaustive, requireSwitch) = nselector match {
-        case Typed(nselector1, tpt) =>
-          val unchecked = tpt.tpe hasAnnotation UncheckedClass
-          if (unchecked)
-            nselector = nselector1
-
-          // Don't require a tableswitch if there are 1-2 casedefs
-          // since the matcher intentionally emits an if-then-else.
-          (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
-        case _  =>
-          (true, false)
-      }
-
-      val t = atPos(tree.pos) {
-        val context     = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
-        val t_untyped   = handlePattern(nselector, ncases, checkExhaustive, context)
-
-        /* if @switch annotation is present, verify the resulting tree is a Match */
-        if (requireSwitch) t_untyped match {
-          case Block(_, Match(_, _))  => // ok
-          case _                      =>
-            unit.error(tree.pos, "could not emit switch for @switch annotated match")
-        }
-
-        localTyper.typed(t_untyped, context.matchResultType)
-      }
-
-      if (nguard.isEmpty) t
-      else Block(nguard.toList, t) setType t.tpe
+      // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
+      localTyper typed DefDef(outerAcc, gen.mkCast(transformer.transform(path), outerAcc.info.resultType))
     }
 
     /** The main transformation method */
@@ -583,14 +470,10 @@ abstract class ExplicitOuter extends InfoTransform
           })
           super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
 
-        // entry point for pattern matcher translation
-        case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
-          matchTranslation(m)
-
         // for the new pattern matcher
         // base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
         // TODO remove the synthetic `<outer>` method from outerFor??
-        case Apply(eqsel at Select(eqapp at Apply(sel at Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+        case Apply(eqsel at Select(eqapp at Apply(sel at Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
           val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
           val acc = outerAccessor(outerFor)
 
@@ -599,24 +482,17 @@ abstract class ExplicitOuter extends InfoTransform
               // at least don't crash... this duplicates maybeOmittable from constructors
               (acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
             unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
-            return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
+            transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
           } else {
             // println("(base, acc)= "+(base, acc))
             val outerSelect = localTyper typed Apply(Select(base, acc), Nil)
             // achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass))
             // println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass))
             // println("outerSelect = "+ outerSelect)
-            return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
+            transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
           }
 
         case _ =>
-          if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
-            case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
-              if (isArraySeqTest(qual.tpe, args.head.tpe))
-                unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
-            case _ => ()
-          }
-
           val x = super.transform(tree)
           if (x.tpe eq null) x
           else x setType transformInfo(currentOwner, x.tpe)
@@ -625,7 +501,7 @@ abstract class ExplicitOuter extends InfoTransform
 
     /** The transformation method for whole compilation units */
     override def transformUnit(unit: CompilationUnit) {
-      afterExplicitOuter(super.transformUnit(unit))
+      exitingExplicitOuter(super.transformUnit(unit))
     }
   }
 
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index e0c0cd0..2235a93 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -8,9 +8,6 @@ package transform
 import symtab._
 import Flags._
 import scala.collection.{ mutable, immutable }
-import scala.collection.mutable
-import scala.tools.nsc.util.FreshNameCreator
-import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
 
 /**
  * Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -23,7 +20,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
 
   import global._ // the global environment
   import definitions._ // standard classes and methods
-  import typer.{ typed, atOwner } // methods to type trees
 
   /** the following two members override abstract members in Transform */
   val phaseName: String = "extmethods"
@@ -70,7 +66,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
   }
 
   /** Return the extension method that corresponds to given instance method `meth`. */
-  def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
+  def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) {
     val companionInfo = companionModuleForce(imeth.owner).info
     val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
     val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
@@ -87,7 +83,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
            |
            | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
            |
-           | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
+           | Eligible Names: ${extensionNames(imeth).mkString(",")}" """)
     matching.head
   }
 
@@ -133,7 +129,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
       if (seen contains clazz)
         unit.error(pos, "value class may not unbox to itself")
       else {
-        val unboxed = erasure.underlyingOfValueClass(clazz).typeSymbol
+        val unboxed = definitions.underlyingOfValueClass(clazz).typeSymbol
         if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
       }
 
@@ -185,6 +181,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
       //  bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
       // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
     }
+
     override def transform(tree: Tree): Tree = {
       tree match {
         case Template(_, _, _) =>
@@ -194,6 +191,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
             checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
             extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
             currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
+            // SI-7859 make param accessors accessible so the erasure can generate unbox operations.
+            val paramAccessors = currentOwner.info.decls.filter(sym => sym.isParamAccessor && sym.isMethod)
+            paramAccessors.foreach(_.makeNotPrivate(currentOwner))
             super.transform(tree)
           } else if (currentOwner.isStaticOwner) {
             super.transform(tree)
@@ -206,11 +206,12 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
           val companion     = origThis.companionModule
 
           def makeExtensionMethodSymbol = {
-            val extensionName = extensionNames(origMeth).head
+            val extensionName = extensionNames(origMeth).head.toTermName
             val extensionMeth = (
-              companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+              companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
                 setAnnotations origMeth.annotations
             )
+            origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
             companion.info.decls.enter(extensionMeth)
           }
 
@@ -224,21 +225,22 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
           val extensionParams = allParameters(extensionMono)
           val extensionThis   = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
 
-          val extensionBody = (
-            rhs
+          val extensionBody: Tree = {
+            val tree = rhs
               .substituteSymbols(origTpeParams, extensionTpeParams)
               .substituteSymbols(origParams, extensionParams)
               .substituteThis(origThis, extensionThis)
               .changeOwner(origMeth -> extensionMeth)
-          )
+            new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree)
+          }
           val castBody =
             if (extensionBody.tpe <:< extensionMono.finalResultType)
               extensionBody
             else
               gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // SI-7818 e.g. mismatched existential skolems
 
-          // Record the extension method ( FIXME: because... ? )
-          extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, castBody))
+          // Record the extension method. Later, in `Extender#transformStats`, these will be added to the companion object.
+          extensionDefs(companion) += DefDef(extensionMeth, castBody)
 
           // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
           // which leaves the actual argument application for extensionCall.
@@ -261,14 +263,43 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
 
     override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
       super.transformStats(stats, exprOwner) map {
-        case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol =>
-          val defns = extensionDefs(md.symbol).toList map (member =>
-            atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member))
-          )
-          extensionDefs -= md.symbol
-          deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns))
+        case md @ ModuleDef(_, _, _) =>
+          val extraStats = extensionDefs remove md.symbol match {
+            case Some(defns) => defns.toList map (defn => atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(defn.duplicate)))
+            case _           => Nil
+          }
+          if (extraStats.isEmpty) md
+          else deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ extraStats))
         case stat =>
           stat
       }
   }
+
+  final class SubstututeRecursion(origMeth: Symbol, extensionMeth: Symbol,
+                            unit: CompilationUnit) extends TypingTransformer(unit) {
+    override def transform(tree: Tree): Tree = tree match {
+      // SI-6574 Rewrite recursive calls against the extension method so they can
+      //         be tail call optimized later. The tailcalls phases comes before
+      //         erasure, which performs this translation more generally at all call
+      //         sites.
+      //
+      //         // Source
+      //         class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'] } }
+      //
+      //         // Translation
+      //         class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'](a1) } }
+      //         object C   { def meth$extension[M, C](this$: C[C], a: A)
+      //                        = { meth$extension[M', C']({ <expr>: C[C'] })(a1) } }
+      case treeInfo.Applied(sel @ Select(qual, _), targs, argss) if sel.symbol == origMeth =>
+        localTyper.typedPos(tree.pos) {
+          val allArgss = List(qual) :: argss
+          val origThis = extensionMeth.owner.companionClass
+          val baseType = qual.tpe.baseType(origThis)
+          val allTargs = targs.map(_.tpe) ::: baseType.typeArgs
+          val fun = gen.mkAttributedTypeApply(gen.mkAttributedThis(extensionMeth.owner), extensionMeth, allTargs)
+          allArgss.foldLeft(fun)(Apply(_, _))
+        }
+      case _ => super.transform(tree)
+    }
+  }
 }
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index cd26f95..c3fbfae 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -8,29 +8,28 @@ package transform
 
 import symtab._
 import Flags._
-import scala.collection.{ mutable, immutable }
 import scala.collection.mutable.ListBuffer
 
 abstract class Flatten extends InfoTransform {
   import global._
-  import definitions._
+  import treeInfo.isQualifierSafeToElide
 
   /** the following two members override abstract members in Transform */
   val phaseName: String = "flatten"
 
-  /** Updates the owning scope with the given symbol; returns the old symbol.
+  /** Updates the owning scope with the given symbol, unlinking any others.
    */
-  private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
-    val scope = sym.owner.info.decls
-    val old   = scope lookup sym.name andAlso scope.unlink
-    scope enter sym
-
-    if (old eq NoSymbol)
-      log(s"lifted ${sym.fullLocationString}")
-    else
-      log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
+  private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
+    removeSymbolInCurrentScope(sym)
+    sym.owner.info.decls enter sym
+  }
 
-    old
+  private def removeSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
+    val scope = sym.owner.info.decls
+    val old   = (scope lookupUnshadowedEntries sym.name).toList
+    old foreach (scope unlink _)
+    def old_s = old map (_.sym) mkString ", "
+    if (old.nonEmpty) debuglog(s"In scope of ${sym.owner}, unlinked $old_s")
   }
 
   private def liftClass(sym: Symbol) {
@@ -53,7 +52,7 @@ abstract class Flatten extends InfoTransform {
     clazz.isClass && !clazz.isPackageClass && {
       // Cannot flatten here: class A[T] { object B }
       // was "at erasurePhase.prev"
-      beforeErasure(clazz.typeParams.isEmpty)
+      enteringErasure(clazz.typeParams.isEmpty)
     }
   }
 
@@ -67,11 +66,11 @@ abstract class Flatten extends InfoTransform {
         val decls1 = scopeTransform(clazz) {
           val decls1 = newScope
           if (clazz.isPackageClass) {
-            afterFlatten { decls foreach (decls1 enter _) }
+            exitingFlatten { decls foreach (decls1 enter _) }
           }
           else {
             val oldowner = clazz.owner
-            afterFlatten { oldowner.info }
+            exitingFlatten { oldowner.info }
             parents1 = parents mapConserve (this)
 
             for (sym <- decls) {
@@ -90,7 +89,7 @@ abstract class Flatten extends InfoTransform {
         val restp1 = apply(restp)
         if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
       case PolyType(tparams, restp) =>
-        val restp1 = apply(restp);
+        val restp1 = apply(restp)
         if (restp1 eq restp) tp else PolyType(tparams, restp1)
       case _ =>
         mapOver(tp)
@@ -105,25 +104,46 @@ abstract class Flatten extends InfoTransform {
     /** Buffers for lifted out classes */
     private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]()
 
-    override def transform(tree: Tree): Tree = {
+    override def transform(tree: Tree): Tree = postTransform {
       tree match {
         case PackageDef(_, _) =>
           liftedDefs(tree.symbol.moduleClass) = new ListBuffer
+          super.transform(tree)
         case Template(_, _, _) if tree.symbol.isDefinedInPackage =>
           liftedDefs(tree.symbol.owner) = new ListBuffer
+          super.transform(tree)
+        case ClassDef(_, _, _, _) if tree.symbol.isNestedClass =>
+          // SI-5508 Ordering important. In `object O { trait A { trait B } }`, we want `B` to appear after `A` in
+          //         the sequence of lifted trees in the enclosing package. Why does this matter? Currently, mixin
+          //         needs to transform `A` first to a chance to create accessors for private[this] trait fields
+          //         *before* it transforms inner classes that refer to them. This also fixes SI-6231.
+          //
+          //         Alternative solutions
+          //            - create the private[this] accessors eagerly in Namer (but would this cover private[this] fields
+          //              added later phases in compilation?)
+          //            - move the accessor creation to the Mixin info transformer
+          val liftedBuffer = liftedDefs(tree.symbol.enclosingTopLevelClass.owner)
+          val index = liftedBuffer.length
+          liftedBuffer.insert(index, super.transform(tree))
+          if (tree.symbol.sourceModule.isStaticModule)
+            removeSymbolInCurrentScope(tree.symbol.sourceModule)
+          EmptyTree
         case _ =>
+          super.transform(tree)
       }
-      postTransform(super.transform(tree))
     }
 
     private def postTransform(tree: Tree): Tree = {
       val sym = tree.symbol
       val tree1 = tree match {
-        case ClassDef(_, _, _, _) if sym.isNestedClass =>
-          liftedDefs(sym.enclosingTopLevelClass.owner) += tree
-          EmptyTree
-        case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
-          afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
+        case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel =>
+          exitingFlatten {
+            atPos(tree.pos) {
+              val ref = gen.mkAttributedRef(sym)
+              if (isQualifierSafeToElide(qual)) ref
+              else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module.
+            }
+          }
         case _ =>
           tree
       }
@@ -133,7 +153,10 @@ abstract class Flatten extends InfoTransform {
     /** Transform statements and add lifted definitions to them. */
     override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
       val stats1 = super.transformStats(stats, exprOwner)
-      if (currentOwner.isPackageClass) stats1 ::: liftedDefs(currentOwner).toList
+      if (currentOwner.isPackageClass) {
+        val lifted = liftedDefs(currentOwner).toList
+        stats1 ::: lifted
+      }
       else stats1
     }
   }
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index b6dbaca..dc321e2 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -10,11 +10,11 @@ package transform
  * An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent.
  * The symbol info is transformed assuming it is consistent right before this phase.
  * The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history.
- * This way sym.info (during an atPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
+ * This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
  * (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers
  *  of the most recent phase before p, up to the transformer of the phase right before p.)
  *
- * Concretely, atPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
+ * Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
  */
 trait InfoTransform extends Transform {
   import global.{Symbol, Type, InfoTransformer, infoTransformers}
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
index 0af3cf7..1bbe1b8 100644
--- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
@@ -1,9 +1,11 @@
 package scala.tools.nsc
 package transform
 
-trait InlineErasure { self: Erasure =>
-  
+trait InlineErasure {
+  self: Erasure =>
+
+/*
   import global._
   import definitions._
-
-}
\ No newline at end of file
+ */
+}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 631468d..e38c034 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -8,9 +8,8 @@ package transform
 
 import symtab._
 import Flags._
-import util.TreeSet
 import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet }
+import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
 
 abstract class LambdaLift extends InfoTransform {
   import global._
@@ -32,6 +31,21 @@ abstract class LambdaLift extends InfoTransform {
     }
   }
 
+  /** scala.runtime.*Ref classes */
+  private lazy val allRefClasses: Set[Symbol] = {
+    refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
+  }
+
+  /** Each scala.runtime.*Ref class has a static method `create(value)` that simply instantiates the Ref to carry that value. */
+  private lazy val refCreateMethod: Map[Symbol, Symbol] = {
+    mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.create))
+  }
+
+  /** Quite frequently a *Ref is initialized with its zero (e.g., null, 0.toByte, etc.) Method `zero()` of *Ref class encapsulates that pattern. */
+  private lazy val refZeroMethod: Map[Symbol, Symbol] = {
+    mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.zero))
+  }
+
   def transformInfo(sym: Symbol, tp: Type): Type =
     if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true)
     else lifted(tp)
@@ -41,6 +55,8 @@ abstract class LambdaLift extends InfoTransform {
 
   class LambdaLifter(unit: CompilationUnit) extends explicitOuter.OuterPathTransformer(unit) {
 
+    private type SymSet = TreeSet[Symbol]
+
     /** A map storing free variables of functions and classes */
     private val free = new LinkedHashMap[Symbol, SymSet]
 
@@ -53,6 +69,12 @@ abstract class LambdaLift extends InfoTransform {
     /** Symbols that are called from an inner class. */
     private val calledFromInner = new LinkedHashSet[Symbol]
 
+    private val ord = Ordering.fromLessThan[Symbol](_ isLess _)
+    private def newSymSet = TreeSet.empty[Symbol](ord)
+
+    private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
+      f.getOrElseUpdate(sym, newSymSet)
+
     /** The set of symbols that need to be renamed. */
     private val renamable = newSymSet
 
@@ -92,13 +114,6 @@ abstract class LambdaLift extends InfoTransform {
     /** Buffers for lifted out classes and methods */
     private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
 
-    private type SymSet = TreeSet[Symbol]
-
-    private def newSymSet = new TreeSet[Symbol](_ isLess _)
-
-    private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
-      f.getOrElseUpdate(sym, newSymSet)
-
     private def isSameOwnerEnclosure(sym: Symbol) =
       sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
 
@@ -107,7 +122,7 @@ abstract class LambdaLift extends InfoTransform {
      *  and the owner of `sym`.
      *  Return `true` if there is no class between `enclosure` and
      *  the owner of sym.
-     *  pre: sym.isLocal, (enclosure.isMethod || enclosure.isClass)
+     *  pre: sym.isLocalToBlock, (enclosure.isMethod || enclosure.isClass)
      *
      *  The idea of `markFree` is illustrated with an example:
      *
@@ -140,10 +155,10 @@ abstract class LambdaLift extends InfoTransform {
         else {
           val ss = symSet(free, enclosure)
           if (!ss(sym)) {
-            ss addEntry sym
-            renamable addEntry sym
+            ss += sym
+            renamable += sym
             changedFreeVars = true
-            debuglog("" + sym + " is free in " + enclosure);
+            debuglog("" + sym + " is free in " + enclosure)
             if (sym.isVariable) sym setFlag CAPTURED
           }
           !enclosure.isClass
@@ -153,7 +168,7 @@ abstract class LambdaLift extends InfoTransform {
 
     private def markCalled(sym: Symbol, owner: Symbol) {
       debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
-      symSet(called, owner) addEntry sym
+      symSet(called, owner) += sym
       if (sym.enclClass != owner.enclClass) calledFromInner += sym
     }
 
@@ -161,11 +176,11 @@ abstract class LambdaLift extends InfoTransform {
     private val freeVarTraverser = new Traverser {
       override def traverse(tree: Tree) {
        try { //debug
-        val sym = tree.symbol;
+        val sym = tree.symbol
         tree match {
           case ClassDef(_, _, _, _) =>
             liftedDefs(tree.symbol) = Nil
-            if (sym.isLocal) {
+            if (sym.isLocalToBlock) {
               // Don't rename implementation classes independently of their interfaces. If
               // the interface is to be renamed, then we will rename the implementation
               // class at that time. You'd think we could call ".implClass" on the trait
@@ -180,36 +195,36 @@ abstract class LambdaLift extends InfoTransform {
               if (sym.isImplClass)
                 localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
               else {
-                renamable addEntry sym
+                renamable += sym
                 if (sym.isTrait)
                   localTraits((sym, sym.name)) = sym.owner
               }
             }
           case DefDef(_, _, _, _, _, _) =>
-            if (sym.isLocal) {
-              renamable addEntry sym
+            if (sym.isLocalToBlock) {
+              renamable += sym
               sym setFlag (PrivateLocal | FINAL)
             } else if (sym.isPrimaryConstructor) {
-              symSet(called, sym) addEntry sym.owner
+              symSet(called, sym) += sym.owner
             }
           case Ident(name) =>
             if (sym == NoSymbol) {
               assert(name == nme.WILDCARD)
-            } else if (sym.isLocal) {
+            } else if (sym.isLocalToBlock) {
               val owner = currentOwner.logicallyEnclosingMember
               if (sym.isTerm && !sym.isMethod) markFree(sym, owner)
               else if (sym.isMethod) markCalled(sym, owner)
-                //symSet(called, owner) addEntry sym
+                //symSet(called, owner) += sym
             }
           case Select(_, _) =>
-            if (sym.isConstructor && sym.owner.isLocal)
+            if (sym.isConstructor && sym.owner.isLocalToBlock)
               markCalled(sym, currentOwner.logicallyEnclosingMember)
           case _ =>
         }
         super.traverse(tree)
        } catch {//debug
          case ex: Throwable =>
-           Console.println("exception when traversing " + tree)
+           Console.println(s"$ex while traversing $tree")
            throw ex
        }
       }
@@ -245,16 +260,15 @@ abstract class LambdaLift extends InfoTransform {
           freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
         } else {
           // SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
-          //         Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError
+          //         Generating a unique name, mangled with the enclosing class name, avoids a VerifyError
           //         in the case that a sub-class happens to lifts out a method with the *same* name.
-          val name = freshen(sym.name + nme.NAME_JOIN_STRING)
-          if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass)
+          val name = freshen("" + sym.name + nme.NAME_JOIN_STRING)
+          if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass)
           else name
         }
       }
 
-      /** Rename a trait's interface and implementation class in coordinated fashion.
-       */
+      /* Rename a trait's interface and implementation class in coordinated fashion. */
       def renameTrait(traitSym: Symbol, implSym: Symbol) {
         val originalImplName = implSym.name
         renameSym(traitSym)
@@ -290,7 +304,7 @@ abstract class LambdaLift extends InfoTransform {
           proxies(owner) =
             for (fv <- freeValues.toList) yield {
               val proxyName = proxyNames.getOrElse(fv, fv.name)
-              val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info
+              val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info
               if (owner.isClass) owner.info.decls enter proxy
               proxy
             }
@@ -348,7 +362,7 @@ abstract class LambdaLift extends InfoTransform {
 
     private def proxyRef(sym: Symbol) = {
       val psym = proxy(sym)
-      if (psym.isLocal) gen.mkAttributedIdent(psym) else memberRef(psym)
+      if (psym.isLocalToBlock) gen.mkAttributedIdent(psym) else memberRef(psym)
     }
 
     private def addFreeArgs(pos: Position, sym: Symbol, args: List[Tree]) = {
@@ -422,8 +436,15 @@ abstract class LambdaLift extends InfoTransform {
     private def liftDef(tree: Tree): Tree = {
       val sym = tree.symbol
       val oldOwner = sym.owner
-      if (sym.owner.isAuxiliaryConstructor && sym.isMethod)  // # bug 1909
-    	  sym setFlag STATIC
+      if (sym.isMethod && isUnderConstruction(sym.owner.owner)) { // # bug 1909
+         if (sym.isModule) { // Yes, it can be a module and a method, see comments on `isModuleNotMethod`!
+           // TODO promote to an implementation restriction if we can reason that this *always* leads to VerifyError.
+           // See neg/t1909-object.scala
+           def msg = s"SI-1909 Unable to STATICally lift $sym, which is defined in the self- or super-constructor call of ${sym.owner.owner}. A VerifyError is likely."
+           devWarning(tree.pos, msg)
+          } else sym setFlag STATIC
+      }
+
       sym.owner = sym.owner.enclClass
       if (sym.isClass) sym.owner = sym.owner.toInterface
       if (sym.isMethod) sym setFlag LIFTED
@@ -438,63 +459,28 @@ abstract class LambdaLift extends InfoTransform {
       tree match {
         case ClassDef(_, _, _, _) =>
           val tree1 = addFreeParams(tree, sym)
-          if (sym.isLocal) liftDef(tree1) else tree1
+          if (sym.isLocalToBlock) liftDef(tree1) else tree1
         case DefDef(_, _, _, _, _, _) =>
           val tree1 = addFreeParams(tree, sym)
-          if (sym.isLocal) liftDef(tree1) else tree1
+          if (sym.isLocalToBlock) liftDef(tree1) else tree1
         case ValDef(mods, name, tpt, rhs) =>
           if (sym.isCapturedVariable) {
             val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
-            /* Creating a constructor argument if one isn't present. */
-            val constructorArg = rhs match {
-              case EmptyTree =>
-                sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
-                  case List(tp) => gen.mkZero(tp)
-                  case _        =>
-                    debugwarn("Couldn't determine how to properly construct " + sym)
-                    rhs
-                }
-              case arg => arg
+
+            val refTypeSym = sym.tpe.typeSymbol
+
+            val factoryCall = typer.typedPos(rhs.pos) {
+              rhs match {
+                case EmptyTree =>
+                  val zeroMSym   = refZeroMethod(refTypeSym)
+                  gen.mkMethodCall(zeroMSym, Nil)
+                case arg =>
+                  val createMSym = refCreateMethod(refTypeSym)
+                  gen.mkMethodCall(createMSym, arg :: Nil)
+              }
             }
-            
-            /** Wrap expr argument in new *Ref(..) constructor. But try/catch
-             * is a problem because a throw will clear the stack and post catch
-             * we would expect the partially-constructed object to be on the stack
-             * for the call to init. So we recursively
-             * search for "leaf" result expressions where we know its safe
-             * to put the new *Ref(..) constructor or, if all else fails, transform
-             * an expr to { val temp=expr; new *Ref(temp) }.
-             * The reason we narrowly look for try/catch in captured var definitions 
-             * is because other try/catch expression have already been lifted
-             * see SI-6863
-             */
-            def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
-              // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
-              // a try/catch in final expression position.
-              case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
-                New(sym.tpe, expr)
-              case Try(block, catches, finalizer) =>
-                Try(refConstr(block), catches map refConstrCase, finalizer)
-              case Block(stats, expr) =>
-                Block(stats, refConstr(expr))
-              case If(cond, trueBranch, falseBranch) =>
-                If(cond, refConstr(trueBranch), refConstr(falseBranch))
-              case Match(selector, cases) =>
-                Match(selector, cases map refConstrCase)
-              // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
-              // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
-              // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
-              case _ =>
-                debuglog("assigning expr to temp: " + (expr.pos))
-                val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
-                val tempDef = ValDef(tempSym, expr) setPos expr.pos
-                val tempRef = Ident(tempSym) setPos expr.pos 
-                Block(tempDef, New(sym.tpe, tempRef))
-            }}
-            def refConstrCase(cdef: CaseDef): CaseDef =
-              CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
-
-            treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
+
+            treeCopy.ValDef(tree, mods, name, tpt1, factoryCall)
           } else tree
         case Return(Block(stats, value)) =>
           Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
@@ -510,10 +496,10 @@ abstract class LambdaLift extends InfoTransform {
           treeCopy.Assign(tree, qual, rhs)
         case Ident(name) =>
           val tree1 =
-            if (sym != NoSymbol && sym.isTerm && !sym.isLabel)
+            if (sym.isTerm && !sym.isLabel)
               if (sym.isMethod)
                 atPos(tree.pos)(memberRef(sym))
-              else if (sym.isLocal && !isSameOwnerEnclosure(sym))
+              else if (sym.isLocalToBlock && !isSameOwnerEnclosure(sym))
                 atPos(tree.pos)(proxyRef(sym))
               else tree
             else tree
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 21213cf..b71d14a 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
       curTree = tree
 
       tree match {
-        
+
         case Block(_, _) =>
           val block1 = super.transform(tree)
           val Block(stats, expr) = block1
@@ -79,7 +79,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
               List(stat)
           })
           treeCopy.Block(block1, stats1, expr)
-          
+
         case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
           val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
             val enclosingClassOrDummyOrMethod = {
@@ -100,9 +100,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
             val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
             sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
             (rhs1, sDef)
-          } else            
+          } else
             (transform(rhs), EmptyTree)
-            
+
           val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
           if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
         }
@@ -183,30 +183,31 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
 
       if (bmps.isEmpty) rhs else rhs match {
         case Block(assign, l @ LabelDef(name, params, _))
-          if name.toString == ("_" + methSym.name) && isMatch(params) =>
+          if (name string_== "_" + methSym.name) && isMatch(params) =>
             Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
 
         case _ => prependStats(bmps, rhs)
       }
     }
-    
+
     def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
                       stats: List[Tree], retVal: Tree): Tree = {
-      val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+      val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
       defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
       defSym.owner = lzyVal.owner
       debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
       if (bitmaps.contains(lzyVal))
         bitmaps(lzyVal).map(_.owner = defSym)
       val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
-      DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
+
+      DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal)))
     }
-  
-  
+
+
     def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
                        stats: List[Tree], retVal: Tree): (Tree, Tree) = {
       val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
-      (If(cond, Apply(ID(slowPathDef.symbol), List()), retVal), slowPathDef)
+      (If(cond, Apply(Ident(slowPathDef.symbol), Nil), retVal), slowPathDef)
     }
 
     /** return a 'lazified' version of rhs. Rhs should conform to the
@@ -221,7 +222,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
      *  Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
      *  { if ((bitmap&n & MASK) == 0) this.l$compute()
      *    else l$
-     *    
+     *
      *    def l$compute() = { synchronized(enclosing_class_or_dummy) {
      *      if ((bitmap$n & MASK) == 0) {
      *       l$ = <rhs>
@@ -277,8 +278,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
       if (bmps.length > n)
         bmps(n)
       else {
-        val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
-        beforeTyper {
+        val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteTpe)
+        enteringTyper {
           sym addAnnotation VolatileAttr
         }
 
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index e92450c..673bc04 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -27,14 +27,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
   private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
 
   /** Map a lazy, mixedin field accessor to it's trait member accessor */
-  private val initializer = perRunCaches.newMap[Symbol, Symbol]
+  private val initializer = perRunCaches.newMap[Symbol, Symbol]()
 
 // --------- helper functions -----------------------------------------------
 
   /** A member of a trait is implemented statically if its implementation after the
    *  mixin transform is in the static implementation module. To be statically
    *  implemented, a member must be a method that belonged to the trait's implementation class
-   *  before (e.g. it is not abstract). Not statically implemented are
+   *  before (i.e. it is not abstract). Not statically implemented are
    *   - non-private modules: these are implemented directly in the mixin composition class
    *     (private modules, on the other hand, are implemented statically, but their
    *      module variable is not. all such private modules are lifted, because
@@ -68,7 +68,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
    *  maps all other types to themselves.
    */
   private def toInterface(tp: Type): Type =
-    beforeMixin(tp.typeSymbol.toInterface).tpe
+    enteringMixin(tp.typeSymbol.toInterface).tpe
 
   private def isFieldWithBitmap(field: Symbol) = {
     field.info // ensure that nested objects are transformed
@@ -86,9 +86,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
    *  Note: The `checkinit` option does not check if transient fields are initialized.
    */
   private def needsInitFlag(sym: Symbol) = (
-        settings.checkInit.value
+        settings.checkInit
      && sym.isGetter
      && !sym.isInitializedToDefault
+     && !isConstantType(sym.info.finalResultType) // SI-4742
      && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
      && !sym.accessed.hasFlag(PRESUPER)
      && !sym.isOuterAccessor
@@ -102,7 +103,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
   private val toInterfaceMap = new TypeMap {
     def apply(tp: Type): Type = mapOver( tp match {
       case TypeRef(pre, sym, args) if sym.isImplClass =>
-        typeRef(pre, beforeMixin(sym.toInterface), args)
+        typeRef(pre, enteringMixin(sym.toInterface), args)
       case _ => tp
     })
   }
@@ -119,14 +120,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
    *  @param mixinClass The mixin class that produced the superaccessor
    */
   private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
-    afterSpecialize {
+    exitingSpecialize {
       var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
       var sym: Symbol = NoSymbol
       debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
             " " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
       while (!bcs.isEmpty && sym == NoSymbol) {
-        if (settings.debug.value) {
-          val other = bcs.head.info.nonPrivateDecl(member.name);
+        if (settings.debug) {
+          val other = bcs.head.info.nonPrivateDecl(member.name)
           debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
               " " + other.isDeferred)
         }
@@ -148,7 +149,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         sym =>
           isConcreteAccessor(sym) &&
           !sym.hasFlag(MIXEDIN) &&
-          matchesType(sym.tpe, member.tpe, true))
+          matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true))
     }
     (    bcs.head != member.owner
       && (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
@@ -165,31 +166,31 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
     addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
 
   def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
-    val newSym = beforeErasure {
+    val newSym = enteringErasure {
       // since we used `mixinMember` from the interface that represents the trait that's
       // being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
       // info) as they are seen from the class.  We can't use the member that we get from the
       // implementation class, as it's a clone that was made after erasure, and thus it does not
       // know its info at the beginning of erasure anymore.
-      //   Optimize: no need if mixinClass has no typeparams.
-      mixinMember cloneSymbol clazz modifyInfo (info =>
-        if (mixinClass.typeParams.isEmpty) info
-        else (clazz.thisType baseType mixinClass) memberInfo mixinMember
-      )
+      val sym = mixinMember cloneSymbol clazz
+
+      val erasureMap = erasure.erasure(mixinMember)
+      val erasedInterfaceInfo: Type = erasureMap(mixinMember.info)
+      val specificForwardInfo       = (clazz.thisType baseType mixinClass) memberInfo mixinMember
+      val forwarderInfo =
+        if (erasureMap(specificForwardInfo) =:= erasedInterfaceInfo)
+          specificForwardInfo
+        else {
+          erasedInterfaceInfo
+        }
+      // Optimize: no need if mixinClass has no typeparams.
+      // !!! JZ Really? What about the effect of abstract types, prefix?
+      if (mixinClass.typeParams.isEmpty) sym
+      else sym modifyInfo (_ => forwarderInfo)
     }
-    // clone before erasure got rid of type info we'll need to generate a javaSig
-    // now we'll have the type info at (the beginning of) erasure in our history,
-    // and now newSym has the info that's been transformed to fit this period
-    // (no need for asSeenFrom as phase.erasedTypes)
-    // TODO: verify we need the updateInfo and document why
-    newSym updateInfo (mixinMember.info cloneInfo newSym)
+    newSym
   }
 
-  def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
-    if (field.isMethod) field.hasStableFlag
-    else !field.isMutable
-  )
-
   /** Add getters and setters for all non-module fields of an implementation
    *  class to its interface unless they are already present. This is done
    *  only once per class. The mixedin flag is used to remember whether late
@@ -197,32 +198,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
    *    - lazy fields don't get a setter.
    */
   def addLateInterfaceMembers(clazz: Symbol) {
-    def makeConcrete(member: Symbol) =
-      member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
-
     if (treatedClassInfos(clazz) != clazz.info) {
       treatedClassInfos(clazz) = clazz.info
       assert(phase == currentRun.mixinPhase, phase)
 
-      /** Create a new getter. Getters are never private or local. They are
+      /* Create a new getter. Getters are never private or local. They are
        *  always accessors and deferred. */
       def newGetter(field: Symbol): Symbol = {
         // println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
         val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
         // TODO preserve pre-erasure info?
-        clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
+        clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info)
       }
 
-      /** Create a new setter. Setters are never private or local. They are
-       *  always accessors and deferred. */
+      /* Create a new setter. Setters are never private or local. They are
+       * always accessors and deferred. */
       def newSetter(field: Symbol): Symbol = {
         //println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
-        val setterName = nme.getterToSetter(nme.getterName(field.name))
+        val setterName = field.setterName
         val newFlags   = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
         val setter     = clazz.newMethod(setterName, field.pos, newFlags)
         // TODO preserve pre-erasure info?
-        setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
-        if (needsExpandedSetterName(field))
+        setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe)
+        if (field.needsExpandedSetterName)
           setter.name = nme.expandedSetterName(setter.name, clazz)
 
         setter
@@ -240,12 +238,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
           val getter = member.getter(clazz)
           if (getter == NoSymbol) addMember(clazz, newGetter(member))
           if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) {
-            val setter = member.setter(clazz, needsExpandedSetterName(member))
+            val setter = member.setter(clazz)
             if (setter == NoSymbol) addMember(clazz, newSetter(member))
           }
         }
       }
-      debuglog("new defs of " + clazz + " = " + clazz.info.decls);
+      debuglog("new defs of " + clazz + " = " + clazz.info.decls)
     }
   }
 
@@ -267,7 +265,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         resetFlag DEFERRED | lateDEFERRED
     )
 
-    /** Mix in members of implementation class mixinClass into class clazz */
+    /* Mix in members of implementation class mixinClass into class clazz */
     def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
       if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " +
         ((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
@@ -276,23 +274,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         val imember = member overriddenSymbol mixinInterface
         imember overridingSymbol clazz match {
           case NoSymbol =>
-            if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+            if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember)
               cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
           case _        =>
         }
       }
     }
 
-    /** Mix in members of trait mixinClass into class clazz. Also,
-     *  for each lazy field in mixinClass, add a link from its mixed in member to its
-     *  initializer method inside the implclass.
+    /* Mix in members of trait mixinClass into class clazz. Also,
+     * for each lazy field in mixinClass, add a link from its mixed in member to its
+     * initializer method inside the implclass.
      */
     def mixinTraitMembers(mixinClass: Symbol) {
       // For all members of a trait's interface do:
       for (mixinMember <- mixinClass.info.decls) {
         if (isConcreteAccessor(mixinMember)) {
           if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
-            debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+            devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}")
           else {
             // mixin field accessors
             val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
@@ -311,14 +309,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
                   // mixinMember is a value of type unit. No field needed
                   ;
                 case _ => // otherwise mixin a field as well
-                  // atPhase: the private field is moved to the implementation class by erasure,
+                  // enteringPhase: the private field is moved to the implementation class by erasure,
                   // so it can no longer be found in the mixinMember's owner (the trait)
-                  val accessed = beforePickler(mixinMember.accessed)
+                  val accessed = enteringPickler(mixinMember.accessed)
                   // #3857, need to retain info before erasure when cloning (since cloning only
                   // carries over the current entry in the type history)
-                  val sym = beforeErasure {
+                  val sym = enteringErasure {
                     // so we have a type history entry before erasure
-                    clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+                    clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType)
                   }
                   sym updateInfo mixinMember.tpe.resultType // info at current phase
 
@@ -379,35 +377,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
       var parents1 = parents
       var decls1 = decls
       if (!clazz.isPackageClass) {
-        afterMixin(clazz.owner.info)
+        exitingMixin(clazz.owner.info)
         if (clazz.isImplClass) {
           clazz setFlag lateMODULE
           var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
-          if (sourceModule != NoSymbol) {
-            sourceModule setPos sym.pos
-            if (sourceModule.flags != MODULE) {
-              log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags)))
-              sourceModule.flags = MODULE
-            }
-          }
-          else {
+          if (sourceModule == NoSymbol) {
             sourceModule = (
               clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
                 setModuleClass sym.asInstanceOf[ClassSymbol]
             )
             clazz.owner.info.decls enter sourceModule
           }
+          else {
+            sourceModule setPos sym.pos
+            if (sourceModule.flags != MODULE) {
+              log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString))
+              sourceModule.flags = MODULE
+            }
+          }
           sourceModule setInfo sym.tpe
           // Companion module isn't visible for anonymous class at this point anyway
-          assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
-            clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
+          assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,  s"$clazz has no sourceModule: $sym ${sym.tpe}")
           parents1 = List()
           decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
         } else if (!parents.isEmpty) {
           parents1 = parents.head :: (parents.tail map toInterface)
         }
       }
-      //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
+      //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
       if ((parents1 eq parents) && (decls1 eq decls)) tp
       else ClassInfoType(parents1, decls1, clazz)
 
@@ -437,7 +434,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         tree match {
           case Assign(lhs, rhs) => traverse(rhs) // assignments don't count
           case _ =>
-            if (tree.hasSymbol && tree.symbol != NoSymbol) {
+            if (tree.hasSymbolField && tree.symbol != NoSymbol) {
               val sym = tree.symbol
               if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
                   && sym.isPrivate
@@ -481,7 +478,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
     /** The typer */
     private var localTyper: erasure.Typer = _
     private def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
-    private def localTyped(pos: Position, tree: Tree, pt: Type) = localTyper.typed(atPos(pos)(tree), pt)
 
     /** Map lazy values to the fields they should null after initialization. */
     private var lazyValNullables: Map[Symbol, Set[Symbol]] = _
@@ -515,7 +511,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
      *       - create a new method definition that also has a `self` parameter
      *         (which comes first) Iuli: this position is assumed by tail call elimination
      *         on a different receiver. Storing a new 'this' assumes it is located at
-     *         index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
+     *         index 0 in the local variable table. See 'STORE_THIS' and GenASM.
      *   - Map implementation class types in type-apply's to their interfaces
      *   - Remove all fields in implementation classes
      */
@@ -524,7 +520,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
       tree match {
         case Template(parents, self, body) =>
           localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
-          afterMixin(currentOwner.owner.info)//todo: needed?
+          exitingMixin(currentOwner.owner.info)//todo: needed?
 
           if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
             addMixedinMembers(currentOwner, unit)
@@ -543,17 +539,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
             else EmptyTree
           }
           else {
-            if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
+            if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) {
               sym.addAnnotation(TraitSetterAnnotationClass)
             }
             tree
           }
+        // !!! What is this doing, and why is it only looking for exactly
+        // one type parameter? It would seem to be
+        //   "Map implementation class types in type-apply's to their interfaces"
+        // from the comment on preTransform, but is there some way we should know
+        // that impl class types in type applies can only appear in single
+        // type parameter type constructors?
         case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
           if (arg.tpe.typeSymbol.isImplClass) {
             val ifacetpe = toInterface(arg.tpe)
-            arg.tpe = ifacetpe
-            tapp.tpe = MethodType(List(), ifacetpe)
-            tree.tpe = ifacetpe
+            arg setType ifacetpe
+            tapp setType MethodType(Nil, ifacetpe)
+            tree setType ifacetpe
           }
           tree
         case ValDef(_, _, _, _) if currentOwner.isImplClass =>
@@ -590,18 +592,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         tree
     }
 
-    /** Create a static reference to given symbol <code>sym</code> of the
-     *  form <code>M.sym</code> where M is the symbol's implementation module.
+    /** Create a static reference to given symbol `sym` of the
+     *  form `M.sym` where M is the symbol's implementation module.
      */
     private def staticRef(sym: Symbol): Tree = {
       sym.owner.info        //todo: needed?
       sym.owner.owner.info  //todo: needed?
 
-      assert(
-        sym.owner.sourceModule ne NoSymbol,
-        "" + sym.fullLocationString + " in " + sym.owner.owner + " " + sym.owner.owner.info.decls
-      )
-      REF(sym.owner.sourceModule) DOT sym
+      if (sym.owner.sourceModule eq NoSymbol)
+        abort(s"Cannot create static reference to $sym because ${sym.safeOwner} has no source module")
+      else
+        REF(sym.owner.sourceModule) DOT sym
     }
 
     def needsInitAndHasOffset(sym: Symbol) =
@@ -647,34 +648,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
     private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
       val newDefs = mutable.ListBuffer[Tree]()
 
-      /** Attribute given tree and anchor at given position */
+      /* Attribute given tree and anchor at given position */
       def attributedDef(pos: Position, tree: Tree): Tree = {
         debuglog("add new def to " + clazz + ": " + tree)
         typedPos(pos)(tree)
       }
 
-      /** The position of given symbol, or, if this is undefined,
-       *  the position of the current class.
+      /* The position of given symbol, or, if this is undefined,
+       * the position of the current class.
        */
       def position(sym: Symbol) =
         if (sym.pos == NoPosition) clazz.pos else sym.pos
 
-      /** Add tree at given position as new definition */
+      /* Add tree at given position as new definition */
       def addDef(pos: Position, tree: Tree) {
         newDefs += attributedDef(pos, tree)
       }
 
-      /** Add new method definition.
+      /* Add new method definition.
        *
-       *  @param sym   The method symbol.
-       *  @param rhs   The method body.
+       * @param sym   The method symbol.
+       * @param rhs   The method body.
        */
       def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs))
       def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
 
-      /** Add `newdefs` to `stats`, removing any abstract method definitions
-       *  in <code>stats</code> that are matched by some symbol defined in
-       *  <code>newDefs</code>.
+      /* Add `newdefs` to `stats`, removing any abstract method definitions
+       * in `stats` that are matched by some symbol defined in
+       * `newDefs`.
        */
       def add(stats: List[Tree], newDefs: List[Tree]) = {
         val newSyms = newDefs map (_.symbol)
@@ -690,30 +691,30 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         else newDefs ::: (stats filter isNotDuplicate)
       }
 
-      /** If `stat` is a superaccessor, complete it by adding a right-hand side.
-       *  Note: superaccessors are always abstract until this point.
-       *   The method to call in a superaccessor is stored in the accessor symbol's alias field.
-       *  The rhs is:
-       *    super.A(xs)  where A is the super accessor's alias and xs are its formal parameters.
-       *  This rhs is typed and then mixin transformed.
+      /* If `stat` is a superaccessor, complete it by adding a right-hand side.
+       * Note: superaccessors are always abstract until this point.
+       *  The method to call in a superaccessor is stored in the accessor symbol's alias field.
+       * The rhs is:
+       *   super.A(xs)  where A is the super accessor's alias and xs are its formal parameters.
+       * This rhs is typed and then mixin transformed.
        */
       def completeSuperAccessor(stat: Tree) = stat match {
         case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
-          val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
-          val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
+          val body = atPos(stat.pos)(Apply(Select(Super(clazz, tpnme.EMPTY), stat.symbol.alias), vparams map (v => Ident(v.symbol))))
+          val pt   = stat.symbol.tpe.resultType
 
-          deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
+          copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt))))
         case _ =>
           stat
       }
 
-      /**
+      /*
        *  Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
        *  the bitmap of its parents. If that does not exist yet we create one.
        */
       def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
         val category   = bitmapCategory(field)
-        val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+        val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName
         val sym        = clazz0.info.decl(bitmapName)
 
         assert(!sym.isOverloaded, sym)
@@ -721,15 +722,15 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         def createBitmap: Symbol = {
           val bitmapKind =  bitmapKindForCategory(category)
           val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
-          beforeTyper(sym addAnnotation VolatileAttr)
+          enteringTyper(sym addAnnotation VolatileAttr)
 
           category match {
             case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
             case _                                                     =>
           }
           val init = bitmapKind match {
-            case BooleanClass => VAL(sym) === FALSE
-            case _            => VAL(sym) === ZERO
+            case BooleanClass => ValDef(sym, FALSE)
+            case _            => ValDef(sym, ZERO)
           }
 
           sym setFlag PrivateLocal
@@ -738,10 +739,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
           sym
         }
 
-        if (sym ne NoSymbol)
-          sym
-        else
-          createBitmap
+        sym orElse createBitmap
       }
 
       def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
@@ -749,7 +747,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
       }
 
-      /** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
+      /* Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
       def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
         val bmp      = bitmapFor(clazz, offset, valSym)
         def mask     = maskForOffset(offset, valSym, kind)
@@ -759,8 +757,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         x === newValue
       }
 
-      /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
-       *  precise comparison operator depending on the value of 'equalToZero'.
+      /* Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
+       * precise comparison operator depending on the value of 'equalToZero'.
        */
       def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
         val bitmapTree  = (This(clazz) DOT bitmapSym)
@@ -777,12 +775,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
 
       def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
                         stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
-        val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+        val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
         val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
         defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
         val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
         val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
-        addDef(position(defSym), DEF(defSym).mkTree(strictSubst(BLOCK(rhs, retVal))) setSymbol defSym)
+        addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal))))
         defSym
       }
 
@@ -798,19 +796,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
       }
 
 
-	  /** Always copy the tree if we are going to perform sym substitution,
-	   *  otherwise we will side-effect on the tree that is used in the fast path
-	   */
-	  class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
-	    override def transform(tree: Tree): Tree =
-	      if (tree.hasSymbol && from.contains(tree.symbol))
-	        super.transform(tree.duplicate)
-	      else super.transform(tree.duplicate)
+      /* Always copy the tree if we are going to perform sym substitution,
+       * otherwise we will side-effect on the tree that is used in the fast path
+       */
+      class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
+        override def transform(tree: Tree): Tree =
+          if (tree.hasSymbolField && from.contains(tree.symbol))
+            super.transform(tree.duplicate)
+          else super.transform(tree.duplicate)
 
-	    override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
-	  }
+        override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
+      }
 
-      /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+      /*  return a 'lazified' version of rhs. It uses double-checked locking to ensure
        *  initialization is performed at most once. For performance reasons the double-checked
        *  locking is split into two parts, the first (fast) path checks the bitmap without
        *  synchronizing, and if that fails it initializes the lazy val within the
@@ -819,8 +817,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
        *  Private fields used only in this initializer are subsequently set to null.
        *
        *  @param clazz The class symbol
+       *  @param lzyVal The symbol of this lazy field
        *  @param init The tree which initializes the field ( f = <rhs> )
-       *  @param fieldSym The symbol of this lazy field
        *  @param offset The offset of this field in the flags bitmap
        *
        *  The result will be a tree of the form
@@ -853,7 +851,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         val bitmapSym = bitmapFor(clazz, offset, lzyVal)
         val kind      = bitmapKind(lzyVal)
         val mask      = maskForOffset(offset, lzyVal, kind)
-        def cond      = mkTest(clazz, mask, bitmapSym, true, kind)
+        def cond      = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind)
         val nulls     = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
         def syncBody  = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
 
@@ -870,7 +868,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
             val cond                    = Apply(Select(moduleVarRef, Object_eq), List(NULL))
             mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
           case _ =>
-            abort("Invalid getter " + rhs + " for module in class " + clazz)
+            abort(s"Invalid getter $rhs for module in $clazz")
         }
 
       def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
@@ -878,20 +876,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         val bitmapSym = bitmapFor(clazz, offset, sym)
         val kind      = bitmapKind(sym)
         val mask      = maskForOffset(offset, sym, kind)
-        val msg       = "Uninitialized field: " + unit.source + ": " + pos.line
+        val msg       = s"Uninitialized field: ${unit.source}: ${pos.line}"
         val result    =
-          IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
+          IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) .
             THEN (retVal) .
-            ELSE (THROW(UninitializedErrorClass, LIT(msg)))
+            ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
 
         typedPos(pos)(BLOCK(result, retVal))
       }
 
-      /** Complete lazy field accessors. Applies only to classes,
-       *  for it's own (non inherited) lazy fields. If 'checkinit'
-       *  is enabled, getters that check for the initialized bit are
-       *  generated, and the class constructor is changed to set the
-       *  initialized bits.
+      /* Complete lazy field accessors. Applies only to classes,
+       * for it's own (non inherited) lazy fields. If 'checkinit'
+       * is enabled, getters that check for the initialized bit are
+       * generated, and the class constructor is changed to set the
+       * initialized bits.
        */
       def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
         def dd(stat: DefDef) = {
@@ -922,7 +920,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
           else if (sym.isConstructor) {
             deriveDefDef(stat)(addInitBits(clazz, _))
           }
-          else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
+          else if (settings.checkInit && !clazz.isTrait && sym.isSetter) {
             val getter = sym.getter(clazz)
             if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
               deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
@@ -972,23 +970,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         }
       }
 
-      /** Adds statements to set the 'init' bit for each field initialized
-       *  in the body of a constructor.
+      /* Adds statements to set the 'init' bit for each field initialized
+       * in the body of a constructor.
        */
       def addInitBits(clazz: Symbol, rhs: Tree): Tree =
         new AddInitBitsTransformer(clazz) transform rhs
 
-      def isCheckInitField(field: Symbol) =
-        needsInitFlag(field) && !field.isDeferred
-
-      def superClassesToCheck(clazz: Symbol) =
-        clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
-
       // begin addNewDefs
 
-      /** Fill the map from fields to offset numbers.
-       *  Instead of field symbols, the map keeps their getter symbols. This makes
-       *  code generation easier later.
+      /* Fill the map from fields to offset numbers.
+       * Instead of field symbols, the map keeps their getter symbols. This makes
+       * code generation easier later.
        */
       def buildBitmapOffsets() {
         def fold(fields: List[Symbol], category: Name) = {
@@ -1044,16 +1036,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
         }
         // if class is not a trait add accessor definitions
         else if (!clazz.isTrait) {
-          if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
+          // This needs to be a def to avoid sharing trees
+          def accessedRef = accessedReference(sym)
+          if (isConcreteAccessor(sym)) {
             // add accessor definitions
             addDefDef(sym, {
-              val accessedRef = accessedReference(sym)
               if (sym.isSetter) {
                 if (isOverriddenSetter(sym)) UNIT
                 else accessedRef match {
-                  case Literal(_) => accessedRef
-                  case _ =>
-                    val init   = Assign(accessedRef, Ident(sym.firstParam))
+                  case ref @ Literal(_) => ref
+                  case ref =>
+                    val init   = Assign(ref, Ident(sym.firstParam))
                     val getter = sym.getter(clazz)
 
                     if (!needsInitFlag(getter)) init
@@ -1063,16 +1056,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
               else if (needsInitFlag(sym))
                 mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
               else
-                gen.mkCheckInit(accessedRef)
+                accessedRef
             })
           }
           else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
             // add modules
-            val vdef = gen.mkModuleVarDef(sym)
-            addDef(position(sym), vdef)
+            val vsym = sym.owner.newModuleVarSymbol(sym)
+            addDef(position(sym), ValDef(vsym))
 
-            val rhs          = gen.newModule(sym, vdef.symbol.tpe)
-            val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+            // !!! TODO - unravel the enormous duplication between this code and
+            // eliminateModuleDefs in RefChecks.
+            val rhs          = gen.newModule(sym, vsym.tpe)
+            val assignAndRet = gen.mkAssignAndReturn(vsym, rhs)
             val attrThis     = gen.mkAttributedThis(clazz)
             val rhs1         = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
 
@@ -1090,7 +1085,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
             // add forwarders
             assert(sym.alias != NoSymbol, sym)
             // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
-            if (!sym.isTermMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
+            if (!sym.isMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
           }
         }
       }
@@ -1135,7 +1130,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
       // change every node type that refers to an implementation class to its
       // corresponding interface, unless the node's symbol is an implementation class.
       if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
-        tree.tpe = toInterface(tree.tpe)
+        tree modifyType toInterface
 
       tree match {
         case templ @ Template(parents, self, body) =>
@@ -1151,9 +1146,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
           qual
 
         case Apply(Select(qual, _), args) =>
-          /** Changes <code>qual.m(args)</code> where m refers to an implementation
+          /*  Changes `qual.m(args)` where m refers to an implementation
            *  class method to Q.m(S, args) where Q is the implementation module of
-           *  <code>m</code> and S is the self parameter for the call, which
+           *  `m` and S is the self parameter for the call, which
            *  is determined as follows:
            *     - if qual != super, qual itself
            *     - if qual == super, and we are in an implementation class,
@@ -1164,7 +1159,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
             def implSym = implClass(sym.owner).info.member(sym.name)
             assert(target ne NoSymbol,
               List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
-                  beforePrevPhase(implSym.tpe), phase) mkString " "
+                  enteringPrevPhase(implSym.tpe), phase) mkString " "
             )
             typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
           }
@@ -1193,7 +1188,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
                   typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
                 }
                 else {
-                  staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
+                  staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner))))
                 }
               }
               else {
@@ -1211,36 +1206,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
           tree
 
         case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
-          assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags)))
+          assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString))
           // refer to fields in some implementation class via an abstract
           // getter in the interface.
           val iface  = toInterface(sym.owner.tpe).typeSymbol
           val ifaceGetter = sym getter iface
 
-          def si6231Restriction() {
-            // See SI-6231 comments in LamdaLift for ideas on how to lift the restriction.
-            val msg = sm"""Implementation restriction: local ${iface.fullLocationString} is unable to automatically capture the
-                |free variable ${sym} on behalf of ${currentClass}. You can manually assign it to a val inside the trait,
-                |and refer that that val in ${currentClass}. For more details, see SI-6231."""
-            reporter.error(tree.pos, msg)
-          }
-
-          if (ifaceGetter == NoSymbol) {
-            if (sym.isParamAccessor) {
-              si6231Restriction()
-              EmptyTree
-            }
-            else abort("No getter for " + sym + " in " + iface)
-          }
+          if (ifaceGetter == NoSymbol) abort("No getter for " + sym + " in " + iface)
           else typedPos(tree.pos)((qual DOT ifaceGetter)())
 
         case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
           // assign to fields in some implementation class via an abstract
           // setter in the interface.
-          def setter = lhs.symbol.setter(
-            toInterface(lhs.symbol.owner.tpe).typeSymbol,
-            needsExpandedSetterName(lhs.symbol)
-          ) setPos lhs.pos
+          def setter = lhs.symbol.setter(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
 
           typedPos(tree.pos)((qual DOT setter)(rhs))
 
@@ -1258,7 +1236,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
       val tree1 = super.transform(preTransform(tree))
       // localTyper needed when not flattening inner classes. parts after an
       // inner class will otherwise be typechecked with a wrong scope
-      try afterMixin(postTransform(tree1))
+      try exitingMixin(postTransform(tree1))
       finally localTyper = saved
     }
   }
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 67be81b..bbd11ef 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -6,221 +6,39 @@
 package scala.tools.nsc
 package transform
 
-import scala.collection.mutable
 import symtab.Flags._
-import util.HashSet
-import scala.annotation.tailrec
+import scala.reflect.internal.SymbolPairs
 
 /** A class that yields a kind of iterator (`Cursor`),
- *  which yields all pairs of overriding/overridden symbols
- *  that are visible in some baseclass, unless there's a parent class
- *  that already contains the same pairs.
- *  @author Martin Odersky
- *  @version 1.0
+ *  which yields pairs of corresponding symbols visible in some base class,
+ *  unless there's a parent class that already contains the same pairs.
+ *  Most of the logic is in SymbolPairs, which contains generic
+ *  pair-oriented traversal logic.
  */
-abstract class OverridingPairs {
-
-  val global: Global
+abstract class OverridingPairs extends SymbolPairs {
   import global._
 
-  /** The cursor class
-   *  @param base   the base class that contains the overriding pairs
-   */
-  class Cursor(base: Symbol) {
-
-    private val self = base.thisType
-
-    /** Symbols to exclude: Here these are constructors, private locals,
-     *  and bridges. But it may be refined in subclasses.
-     *
-     */
-    protected def exclude(sym: Symbol): Boolean =
-      sym.isConstructor || sym.isPrivateLocal || sym.hasFlag(BRIDGE)
-
-    /** The parents of base (may also be refined).
-     */
-    protected def parents: List[Type] = base.info.parents
-
-    /** Does `sym1` match `sym2` so that it qualifies as overriding.
-     *  Types always match. Term symbols match if their membertypes
-     *  relative to <base>.this do
-     */
-    protected def matches(sym1: Symbol, sym2: Symbol): Boolean = {
-      def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass
-      val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
-      debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
-        sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
-
-      result
-    }
+  class Cursor(base: Symbol) extends super.Cursor(base) {
+    lazy val relatively = new RelativeTo(base.thisType)
 
-    /** An implementation of BitSets as arrays (maybe consider collection.BitSet
-     *  for that?) The main purpose of this is to implement
-     *  intersectionContainsElement efficiently.
+    /** Symbols to exclude: Here these are constructors and private/artifact symbols,
+     *  including bridges. But it may be refined in subclasses.
      */
-    private type BitSet = Array[Int]
-
-    private def include(bs: BitSet, n: Int) {
-      val nshifted = n >> 5
-      val nmask = 1 << (n & 31)
-      bs(nshifted) = bs(nshifted) | nmask
-    }
-
-    /** Implements `bs1 * bs2 * {0..n} != 0.
-     *  Used in hasCommonParentAsSubclass */
-    private def intersectionContainsElementLeq(bs1: BitSet, bs2: BitSet, n: Int): Boolean = {
-      val nshifted = n >> 5
-      val nmask = 1 << (n & 31)
-      var i = 0
-      while (i < nshifted) {
-        if ((bs1(i) & bs2(i)) != 0) return true
-        i += 1
-      }
-      (bs1(nshifted) & bs2(nshifted) & (nmask | nmask - 1)) != 0
-    }
-
-    /** The symbols that can take part in an overriding pair */
-    private val decls = newScope
+    override protected def exclude(sym: Symbol) = (
+         sym.isPrivateLocal
+      || sym.isArtifact
+      || sym.isConstructor
+      || (sym.isPrivate && sym.owner != base) // Privates aren't inherited. Needed for pos/t7475a.scala
+    )
 
-    // fill `decls` with overriding shadowing overridden */
-    { def fillDecls(bcs: List[Symbol], deferredflag: Int) {
-        if (!bcs.isEmpty) {
-          fillDecls(bcs.tail, deferredflag)
-          var e = bcs.head.info.decls.elems;
-          while (e ne null) {
-            if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym))
-              decls enter e.sym;
-            e = e.next
-          }
-        }
-      }
-      // first, deferred (this wil need to change if we change lookup rules!
-      fillDecls(base.info.baseClasses, DEFERRED)
-      // then, concrete.
-      fillDecls(base.info.baseClasses, 0)
-    }
-
-    private val size = base.info.baseClasses.length
-
-    /** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
-     *  linearization order.
-     *  symbols that are not baseclasses map to -1.
+    /** Types always match. Term symbols match if their member types
+     *  relative to `self` match.
      */
-    private val index = new mutable.HashMap[Symbol, Int] {
-      override def default(key: Symbol) = -1
-    }
-
-    // Note: overridingPairs can be called at odd instances by the Eclipse plugin
-    // Soemtimes symbols are not yet defined and we get missing keys.
-    // The implementation here is hardened so that it does not crash on a missing key.
-
-    { var i = 0
-      for (bc <- base.info.baseClasses) {
-        index(bc) = i
-        i += 1
-      }
-    }
-
-    /** A mapping from all base class indices to a bitset
-     *  which indicates whether parents are subclasses.
-     *
-     *   i \in subParents(j)   iff
-     *   exists p \in parents, b \in baseClasses:
-     *     i = index(p)
-     *     j = index(b)
-     *     p isSubClass b
-     *     p.baseType(b) == self.baseType(b)
-     */
-    private val subParents = new Array[BitSet](size)
-
-    { for (i <- List.range(0, size))
-        subParents(i) = new BitSet(size);
-      for (p <- parents) {
-        val pIndex = index(p.typeSymbol)
-        if (pIndex >= 0)
-          for (bc <- p.baseClasses)
-            if (p.baseType(bc) =:= self.baseType(bc)) {
-              val bcIndex = index(bc)
-              if (bcIndex >= 0)
-                include(subParents(bcIndex), pIndex)
-            }
-      }
-   }
-
-    /** Do `sym1` and `sym2` have a common subclass in `parents`?
-     *  In that case we do not follow their overriding pairs
-     */
-    private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
-      val index1 = index(sym1.owner)
-      (index1 >= 0) && {
-        val index2 = index(sym2.owner)
-        (index2 >= 0) && {
-          intersectionContainsElementLeq(
-            subParents(index1), subParents(index2), index1 min index2)
-        }
-      }
-    }
-
-    /** The scope entries that have already been visited as overridden
-     *  (maybe excluded because of hasCommonParentAsSubclass).
-     *  These will not appear as overriding
-     */
-    private val visited = HashSet[ScopeEntry]("visited", 64)
-
-    /** The current entry candidate for overriding
-     */
-    private var curEntry = decls.elems
-
-    /** The current entry candidate for overridden */
-    private var nextEntry = curEntry
-
-    /** The current candidate symbol for overriding */
-    var overriding: Symbol = _
-
-    /** If not null: The symbol overridden by overriding */
-    var overridden: Symbol = _
-
-    //@M: note that next is called once during object initialization
-    def hasNext: Boolean = curEntry ne null
-
-    @tailrec
-    final def next() {
-      if (curEntry ne null) {
-        overriding = curEntry.sym
-        if (nextEntry ne null) {
-          do {
-            do {
-              nextEntry = decls.lookupNextEntry(nextEntry);
-              /* DEBUG
-              if ((nextEntry ne null) &&
-                  !(nextEntry.sym hasFlag PRIVATE) &&
-                  !(overriding.owner == nextEntry.sym.owner) &&
-                  !matches(overriding, nextEntry.sym))
-                println("skipping "+overriding+":"+self.memberType(overriding)+overriding.locationString+" to "+nextEntry.sym+":"+self.memberType(nextEntry.sym)+nextEntry.sym.locationString)
-              */
-              } while ((nextEntry ne null) &&
-                     ((nextEntry.sym hasFlag PRIVATE) ||
-                      (overriding.owner == nextEntry.sym.owner) ||
-                      (!matches(overriding, nextEntry.sym)) ||
-                      (exclude(overriding))))
-            if (nextEntry ne null) visited addEntry nextEntry
-            // skip nextEntry if a class in `parents` is a subclass of the owners of both
-            // overriding and nextEntry.sym
-          } while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym)))
-          if (nextEntry ne null) {
-            overridden = nextEntry.sym;
-            //Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG
-          } else {
-            do {
-              curEntry = curEntry.next
-            } while ((curEntry ne null) && (visited contains curEntry));
-            nextEntry = curEntry
-            next
-          }
-        }
-      }
-    }
-
-    next
+    override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
+         (lo.owner != high.owner)     // don't try to form pairs from overloaded members
+      && !high.isPrivate              // private or private[this] members never are overriden
+      && !exclude(lo)                 // this admits private, as one can't have a private member that matches a less-private member.
+      && relatively.matches(lo, high)
+    ) // TODO we don't call exclude(high), should we?
   }
 }
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index 3ef32ca..32987fe 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -8,64 +8,36 @@ package transform
 /** This phase maps ErasedValueTypes to the underlying unboxed representation and
  *  performs peephole optimizations.
  */
-trait PostErasure extends InfoTransform with TypingTransformers {
-
+trait PostErasure extends InfoTransform with TypingTransformers with scala.reflect.internal.transform.PostErasure {
   val global: Global
+
   import global._
-  import definitions._
+  import treeInfo._
 
   val phaseName: String = "posterasure"
 
   def newTransformer(unit: CompilationUnit): Transformer = new PostErasureTransformer(unit)
   override def changesBaseClasses = false
 
-  object elimErasedValueType extends TypeMap {
-    def apply(tp: Type) = tp match {
-      case ConstantType(Constant(tp: Type)) =>
-        ConstantType(Constant(apply(tp)))
-      case ErasedValueType(tref) =>
-        atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
-      case _ => mapOver(tp)
-    }
-  }
-
-  def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
-
   class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+    override def transform(tree: Tree) = {
+      def finish(res: Tree) = logResult(s"Posterasure reduction\n  Old: $tree\n  New")(res)
+
+      /* We use the name of the operation being performed and not the symbol
+       * itself because the symbol hails from the boxed class, and this transformation
+       * exists to operate directly on the values. So we are for instance looking
+       * up == on an lhs of type Int, whereas the symbol which has been passed in
+       * is from java.lang.Integer.
+       */
+      def binop(lhs: Tree, op: Symbol, rhs: Tree) =
+        finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos))
 
-    override def transform(tree: Tree) =
       super.transform(tree) setType elimErasedValueType(tree.tpe) match {
-        case // new C(arg).underlying  ==>  arg
-          Apply(sel @ Select(
-            Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)),
-            acc), List())
-        if atPhase(currentRun.erasurePhase) {
-          tpt.tpe.typeSymbol.isDerivedValueClass &&
-          sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
-        } =>
-          if (settings.debug.value) log("Removing "+tree+" -> "+arg)
-          arg
-        case // new C(arg1) == new C(arg2)  ==>  arg1 == arg2
-          Apply(sel @ Select(
-            Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)),
-            cmp),
-            List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2))))
-        if atPhase(currentRun.erasurePhase) {
-          tpt1.tpe.typeSymbol.isDerivedValueClass &&
-          (sel.symbol == Object_== || sel.symbol == Object_!=) &&
-          tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol
-        } =>
-          val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos
-          log("shortcircuiting equality "+tree+" -> "+result)
-          localTyper.typed(result)
-
-        case // arg.asInstanceOf[T]  ==>  arg      if arg.tpe == T
-          Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List())
-        if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ?
-          if (settings.debug.value) log("Shortening "+tree+" -> "+arg)
-          arg
-        case tree1 =>
-          tree1
+        case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v)          // x.asInstanceOf[X]       ==> x
+        case ValueClass.BoxAndUnbox(v)             => finish(v)          // (new B(v)).unbox        ==> v
+        case ValueClass.BoxAndCompare(v1, op, v2)  => binop(v1, op, v2)  // new B(v1) == new B(v2)  ==> v1 == v2
+        case tree                                  => tree
       }
+    }
   }
 }
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 44d8860..cffb483 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -11,9 +11,8 @@ package transform
 abstract class SampleTransform extends Transform {
   // inherits abstract value `global` and class `Phase` from Transform
 
-  import global._                  // the global environment
-  import definitions._             // standard classes and methods
-  import typer.{typed, atOwner}    // methods to type trees
+  import global._       // the global environment
+  import typer.typed    // method to type trees
 
   /** the following two members override abstract members in Transform */
   val phaseName: String = "sample-phase"
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 7e85647..02e5524 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -3,13 +3,15 @@
  * @author Iulian Dragos
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package transform
 
 import scala.tools.nsc.symtab.Flags
 import scala.collection.{ mutable, immutable }
 import scala.language.postfixOps
 import scala.language.existentials
+import scala.annotation.tailrec
 
 /** Specialize code on types.
  *
@@ -50,7 +52,11 @@ import scala.language.existentials
  */
 abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   import global._
+  import definitions._
   import Flags._
+
+  private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
+
   /** the name of the phase: */
   val phaseName: String = "specialize"
 
@@ -66,13 +72,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
   private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
 
-  import definitions.{
-    BooleanClass, UnitClass, ArrayClass,
-    ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
-    SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass,
-    GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
-  }
-  import rootMirror.RootClass
 
   /** TODO - this is a lot of maps.
    */
@@ -101,8 +100,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   /** Concrete methods that use a specialized type, or override such methods. */
   private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
 
-  private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized)
   private def specializedOn(sym: Symbol): List[Symbol] = {
+    val GroupOfSpecializable = currentRun.runDefinitions.GroupOfSpecializable
     sym getAnnotation SpecializedClass match {
       case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
       case Some(ann @ AnnotationInfo(_, args, _)) => {
@@ -119,14 +118,30 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     }
   }
 
-  // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
+  @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = {
+    if (candidates.isEmpty) NoSymbol
+    else f(candidates.head) match {
+      case NoSymbol => findSymbol(candidates.tail, f)
+      case sym      => sym
+    }
+  }
+  private def hasNewParents(tree: Tree) = {
+    val parents = tree.symbol.info.parents
+    val prev    = enteringPrevPhase(tree.symbol.info.parents)
+    (parents != prev) && {
+      debuglog(s"$tree parents changed from: $prev to: $parents")
+      true
+    }
+  }
+
+  // If we replace `isBoundedGeneric` with (tp <:< AnyRefTpe),
   // then pos/spec-List.scala fails - why? Does this kind of check fail
   // for similar reasons? Does `sym.isAbstractType` make a difference?
   private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
     specializedOn(sym).exists(s => !isPrimitiveValueClass(s)) &&
     !isPrimitiveValueClass(tp.typeSymbol) &&
     isBoundedGeneric(tp)
-    //(tp <:< AnyRefClass.tpe)
+    //(tp <:< AnyRefTpe)
   }
 
   object TypeEnv {
@@ -147,7 +162,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
       case (sym, tpe) =>
         t2 get sym exists { t2tp =>
-          (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+          (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefTpe)
         }
     }
 
@@ -163,22 +178,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       env forall { case (tvar, tpe) =>
         tvar.isSpecialized && (concreteTypes(tvar) contains tpe) && {
           (sym.typeParams contains tvar) ||
-          (sym.owner != RootClass && (sym.owner.typeParams contains tvar))
+          (sym.owner != rootMirror.RootClass && (sym.owner.typeParams contains tvar))
         }
       }
     }
   }
 
-  /** Returns the generic class that was specialized to 'sClass', or
-   *  'sClass' itself if sClass is not a specialized subclass.
-   */
-  def genericClass(sClass: Symbol): Symbol =
-    if (sClass.isSpecialized) sClass.superClass
-    else sClass
-
   case class Overload(sym: Symbol, env: TypeEnv) {
     override def toString = "specialized overload " + sym + " in " + env
-    def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+    def matchesSym(sym1: Symbol)  = sym.info =:= sym1.info
     def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
   }
   private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
@@ -207,8 +215,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
      *  type bounds of other @specialized type parameters (and not in its result type).
      */
     def degenerate = false
-
-    def isAccessor = false
   }
 
   /** Symbol is a special overloaded method of 'original', in the environment env. */
@@ -226,11 +232,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     def target = t
   }
 
-  /** Symbol is a specialized accessor for the `target` field. */
-  case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
-    override def isAccessor = true
+  /** Symbol is a special overload of the super accessor. */
+  case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo {
+    def target = t
   }
 
+  /** Symbol is a specialized accessor for the `target` field. */
+  case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { }
+
   /** Symbol is a specialized method whose body should be the target's method body. */
   case class Implementation(target: Symbol) extends SpecializedInfo
 
@@ -268,9 +277,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   def specializedParams(sym: Symbol): List[Symbol] =
     sym.info.typeParams filter (_.isSpecialized)
 
-  def splitParams(tps: List[Symbol]) =
-    tps partition (_.isSpecialized)
-
   /** Given an original class symbol and a list of types its type parameters are instantiated at
    *  returns a list of type parameters that should remain in the TypeRef when instantiating a
    *  specialized type.
@@ -286,7 +292,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         // when searching for a specialized class, take care to map all
         // type parameters that are subtypes of AnyRef to AnyRef
         val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
-          if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+          if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe
           else tp
         )
         specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
@@ -300,6 +306,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   /** Return the specialized name of 'sym' in the given environment. It
    *  guarantees the same result regardless of the map order by sorting
    *  type variables alphabetically.
+   *
+   *  !!! Is this safe in the face of the following?
+   *    scala> trait T { def foo[A] = 0}; object O extends T { override def foo[B] = 0 }
    */
   private def specializedName(sym: Symbol, env: TypeEnv): TermName = {
     val tvars = (
@@ -315,20 +324,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   /** Specialize name for the two list of types. The first one denotes
    *  specialization on method type parameters, the second on outer environment.
    */
-  private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = {
-    if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty))
-      name
+  private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = (
+    if (name == nme.CONSTRUCTOR || (types1.isEmpty && types2.isEmpty))
+      name.toTermName
     else if (nme.isSetterName(name))
-      nme.getterToSetter(specializedName(nme.setterToGetter(name), types1, types2))
+      specializedName(name.getterName, types1, types2).setterName
     else if (nme.isLocalName(name))
-      nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
+      specializedName(name.getterName, types1, types2).localName
     else {
       val (base, cs, ms) = nme.splitSpecializedName(name)
       newTermName(base.toString + "$"
-                  + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
-                  + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
+                  + "m" + ms + types1.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "")
+                  + "c" + cs + types2.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
     }
-  }
+  )
 
   lazy val specializableTypes = ScalaValueClasses map (_.tpe) sorted
 
@@ -352,7 +361,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       specializedOn(sym) map (s => specializesClass(s).tpe) sorted
 
     if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
-      reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+      reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".")
 
     types
   }
@@ -372,7 +381,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     // zip the keys with each permutation to create a TypeEnv.
     // If we don't exclude the "all AnyRef" specialization, we will
     // incur duplicate members and crash during mixin.
-    loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
+    loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefTpe)) map (xss => Map(keys zip xss: _*))
   }
 
   /** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -385,23 +394,31 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *      enclosing member with the annotation.
    */
   private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
-    !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+    !hasUnspecializableAnnotation(sym) && (
          specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
       || sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
       || isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
     )
   )
 
+  private def hasUnspecializableAnnotation(sym: Symbol): Boolean =
+    sym.ownerChain.exists(_ hasAnnotation UnspecializedClass)
+
   def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
     case NormalizedMember(_)  => true
     case _                    => false
   })
   def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = {
-    val buf = Set.newBuilder[Symbol]
-    tpes foreach (tp => buf ++= specializedTypeVars(tp))
-    buf.result
+    @tailrec def loop(result: immutable.Set[Symbol], xs: List[Type]): immutable.Set[Symbol] = {
+      if (xs.isEmpty) result
+      else loop(result ++ specializedTypeVars(xs.head), xs.tail)
+    }
+    loop(immutable.Set.empty, tpes)
   }
-  def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
+  def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = (
+    if (neverHasTypeParameters(sym)) immutable.Set.empty
+    else enteringTyper(specializedTypeVars(sym.info))
+  )
 
   /** Return the set of @specialized type variables mentioned by the given type.
    *  It only counts type variables that appear:
@@ -412,7 +429,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
   def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
     case TypeRef(pre, sym, args) =>
       if (sym.isAliasType)
-        specializedTypeVars(tpe.normalize)
+        specializedTypeVars(tpe.dealiasWiden)
       else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
         Set(sym)
       else if (sym == ArrayClass)
@@ -422,15 +439,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       else
         specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if tp.isSpecialized => arg })
 
-    case PolyType(tparams, resTpe)   => specializedTypeVars(resTpe :: tparams.map(_.info))
+    case PolyType(tparams, resTpe)   => specializedTypeVars(resTpe :: mapList(tparams)(symInfo)) // OPT
     // since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
     case NullaryMethodType(resTpe)   => specializedTypeVars(resTpe)
-    case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
+    case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: mapList(argSyms)(symTpe))  // OPT
     case ExistentialType(_, res)     => specializedTypeVars(res)
-    case AnnotatedType(_, tp, _)     => specializedTypeVars(tp)
+    case AnnotatedType(_, tp)        => specializedTypeVars(tp)
     case TypeBounds(lo, hi)          => specializedTypeVars(lo :: hi :: Nil)
     case RefinedType(parents, _)     => parents flatMap specializedTypeVars toSet
-    case _                           => Set()
+    case _                           => immutable.Set.empty
   }
 
   /** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
@@ -441,7 +458,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
     sClassMap.getOrElseUpdate(tparam,
       tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
-        modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+        modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
     ).tpe
   }
 
@@ -475,7 +492,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     foreach2(syms, cloned) { (orig, cln) =>
       cln.removeAnnotation(SpecializedClass)
       if (env.contains(orig))
-        cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+        cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
     }
     cloned map (_ substInfo (syms, cloned))
   }
@@ -484,7 +501,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *  the specialized symbol (class (specialization) or member (normalization)), leaves everything else as-is.
    */
   private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env map {
-    case (sym, tp) if tp == AnyRefClass.tpe && sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
+    case (sym, AnyRefTpe) if sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
     case x => x
   }
 
@@ -492,8 +509,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *  the original class, leaves everything else as-is.
    */
   private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env map {
-    case (sym, tp) if (tp == AnyRefClass.tpe) && sym.owner == origcls => (sym, sym.tpe)
-    case x => x
+    case (sym, AnyRefTpe) if sym.owner == origcls => (sym, sym.tpe)
+    case x                                        => x
   }
 
   /** Specialize 'clazz', in the environment `outerEnv`. The outer
@@ -506,9 +523,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    */
   def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = {
     def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = {
-      /** It gets hard to follow all the clazz and cls, and specializedClass
-       *  was both already used for a map and mucho long.  So "sClass" is the
-       *  specialized subclass of "clazz" throughout this file.
+      /* It gets hard to follow all the clazz and cls, and specializedClass
+       * was both already used for a map and mucho long.  So "sClass" is the
+       * specialized subclass of "clazz" throughout this file.
        */
 
       // SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
@@ -525,7 +542,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
         member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
 
-      sClass.sourceFile = clazz.sourceFile
+      sClass.associatedFile = clazz.sourceFile
       currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
 
       val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
@@ -537,7 +554,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       var newClassTParams: List[Symbol] = Nil       // unspecialized type parameters of 'specializedClass' (cloned)
 
       // has to be a val in order to be computed early. It is later called
-      // within 'atPhase(next)', which would lead to an infinite cycle otherwise
+      // within 'enteringPhase(next)', which would lead to an infinite cycle otherwise
       val specializedInfoType: Type = {
         oldClassTParams = survivingParams(clazz.info.typeParams, env)
         newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
@@ -546,18 +563,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         def applyContext(tpe: Type) =
           subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))
 
-        /** Return a list of specialized parents to be re-mixed in a specialized subclass.
-         *  Assuming env = [T -> Int] and
-         *    class Integral[@specialized T] extends Numeric[T]
-         *  and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
+        /* Return a list of specialized parents to be re-mixed in a specialized subclass.
+         * Assuming env = [T -> Int] and
+         *   class Integral[@specialized T] extends Numeric[T]
+         * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
          *
-         *  so that class Integral$mci extends Integral[Int] with Numeric$mcI.
+         * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
          */
         def specializedParents(parents: List[Type]): List[Type] = {
           var res: List[Type] = Nil
           // log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
           for (p <- parents) {
-            val stp = afterSpecialize(specializedType(p))
+            val stp = exitingSpecialize(specializedType(p))
             if (stp != p)
               if (p.typeSymbol.isTrait) res ::= stp
               else if (currentRun.compiles(clazz))
@@ -567,7 +584,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           res
         }
 
-        var parents = List(applyContext(beforeTyper(clazz.tpe)))
+        var parents = List(applyContext(enteringTyper(clazz.tpe_*)))
         // log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
         if (parents.head.typeSymbol.isTrait)
           parents = parents.head.parents.head :: parents
@@ -589,13 +606,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
       }
 
-      afterSpecialize(sClass setInfo specializedInfoType)
+      exitingSpecialize(sClass setInfo specializedInfoType)
       val fullEnv = outerEnv ++ env
 
-      /** Enter 'sym' in the scope of the current specialized class. It's type is
-       *  mapped through the active environment, binding type variables to concrete
-       *  types. The existing typeEnv for `sym` is composed with the current active
-       *  environment
+      /* Enter 'sym' in the scope of the current specialized class. It's type is
+       * mapped through the active environment, binding type variables to concrete
+       * types. The existing typeEnv for `sym` is composed with the current active
+       * environment
        */
       def enterMember(sym: Symbol): Symbol = {
         typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
@@ -608,18 +625,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         decls1 enter subst(fullEnv)(sym)
       }
 
-      /** Create and enter in scope an overridden symbol m1 for `m` that forwards
-       *  to `om`. `om` is a fresh, special overload of m1 that is an implementation
-       *  of `m`. For example, for a
+      /* Create and enter in scope an overridden symbol m1 for `m` that forwards
+       * to `om`. `om` is a fresh, special overload of m1 that is an implementation
+       * of `m`. For example, for a
        *
-       *  class Foo[@specialized A] {
-       *    def m(x: A) = <body> // m
-       *  }
-       *  , for class Foo$I extends Foo[Int], this method enters two new symbols in
-       *  the scope of Foo$I:
+       * class Foo[@specialized A] {
+       *   def m(x: A) = <body> // m
+       * }
+       * , for class Foo$I extends Foo[Int], this method enters two new symbols in
+       * the scope of Foo$I:
        *
-       *    def m(x: Int) = m$I(x) // m1
-       *    def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
+       *   def m(x: Int) = m$I(x) // m1
+       *   def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
        */
       def forwardToOverload(m: Symbol): Symbol = {
         val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR)))
@@ -683,7 +700,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           def mkAccessor(field: Symbol, name: Name) = {
             val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
             // we rely on the super class to initialize param accessors
-            val sym = sClass.newMethod(name, field.pos, newFlags)
+            val sym = sClass.newMethod(name.toTermName, field.pos, newFlags)
             info(sym) = SpecializedAccessor(field)
             sym
           }
@@ -699,10 +716,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
           enterMember(specVal)
           // create accessors
-          // debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
 
           if (nme.isLocalName(m.name)) {
-            val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+            val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info)
             val origGetter = overrideIn(sClass, m.getter(clazz))
             info(origGetter) = Forward(specGetter)
             enterMember(specGetter)
@@ -717,10 +733,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
             }
 
             if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
-              val specSetter = mkAccessor(specVal, nme.getterToSetter(specGetter.name))
+              val specSetter = mkAccessor(specVal, specGetter.setterName)
                 .resetFlag(STABLE)
               specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
-                                            UnitClass.tpe))
+                                            UnitTpe))
               val origSetter = overrideIn(sClass, m.setter(clazz))
               info(origSetter) = Forward(specSetter)
               enterMember(specSetter)
@@ -777,7 +793,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       if (existing != NoSymbol)
         clazz.owner.info.decls.unlink(existing)
 
-      afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+      exitingSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
     }
     if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
     cleanAnyRefSpecCache(clazz, decls1)
@@ -795,7 +811,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    */
   private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
     sym :: (
-      if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
+      if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
       else if (sym.hasDefault) {
         /* Specializing default getters is useless, also see SI-7329 . */
         sym.resetFlag(SPECIALIZED)
@@ -882,6 +898,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       }
 
       val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
+      owner.info.decls.enter(specMember)
       typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
       wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
 
@@ -895,25 +912,30 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     }
 
     if (sym.isMethod) {
-      val stvars = specializedTypeVars(sym)
-      if (stvars.nonEmpty)
-        debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
+      if (hasUnspecializableAnnotation(sym)) {
+        List()
+      } else {
+        val stvars = specializedTypeVars(sym)
+        if (stvars.nonEmpty)
+          debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
 
-      val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
-      val tps2 = tps1 filter stvars
-      if (!sym.isDeferred)
-        addConcreteSpecMethod(sym)
+        val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
+        val tps2 = tps1 filter stvars
+        if (!sym.isDeferred)
+          addConcreteSpecMethod(sym)
 
-      specializeOn(tps2)
+        specializeOn(tps2)
+      }
     }
     else Nil
   }
 
   /** Return the specialized overload of `m`, in the given environment. */
-  private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
-    val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
+  private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv, nameSymbol: Symbol = NoSymbol): Symbol = {
+    val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | LAZY)
     // this method properly duplicates the symbol's info
-    ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
+    val specname = specializedName(nameSymbol orElse sym, env)
+    ( sym.cloneSymbol(owner, newFlags, newName = specname)
         modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
     )
   }
@@ -929,13 +951,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *  this method will return List('apply$mcII$sp')
    */
   private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
-    /** Return the overridden symbol in syms that needs a specialized overriding symbol,
-     *  together with its specialization environment. The overridden symbol may not be
-     *  the closest to 'overriding', in a given hierarchy.
+    /* Return the overridden symbol in syms that needs a specialized overriding symbol,
+     * together with its specialization environment. The overridden symbol may not be
+     * the closest to 'overriding', in a given hierarchy.
      *
-     *  An method m needs a special override if
-     *    * m overrides a method whose type contains specialized type variables
-     *    * there is a valid specialization environment that maps the overridden method type to m's type.
+     * An method m needs a special override if
+     *   * m overrides a method whose type contains specialized type variables
+     *   * there is a valid specialization environment that maps the overridden method type to m's type.
      */
     def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
       def checkOverriddenTParams(overridden: Symbol) {
@@ -960,7 +982,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
               checkOverriddenTParams(overridden)
 
             val env    = unify(overridden.info, overriding.info, emptyEnv, false, true)
-            def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
+            def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
 
             if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
               debuglog("  " + pp(env) + " found " + atNext)
@@ -973,18 +995,36 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     }
     (clazz.info.decls flatMap { overriding =>
       needsSpecialOverride(overriding) match {
-        case (NoSymbol, _)     => None
+        case (NoSymbol, _)     =>
+          if (overriding.isSuperAccessor) {
+            val alias = overriding.alias
+            debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName))
+            needsSpecialOverride(alias) match {
+              case nope @ (NoSymbol, _) => None
+              case (overridden, env) =>
+                val om = specializedOverload(clazz, overriding, env, overridden)
+                om.setName(nme.superName(om.name))
+                om.asInstanceOf[TermSymbol].setAlias(info(alias).target)
+                om.owner.info.decls.enter(om)
+                info(om) = SpecialSuperAccessor(om)
+                om.makeNotPrivate(om.owner)
+                newOverload(overriding, om, env)
+                Some(om)
+            }
+          } else None
         case (overridden, env) =>
           val om = specializedOverload(clazz, overridden, env)
+          clazz.info.decls.enter(om)
           foreachWithIndex(om.paramss) { (params, i) =>
             foreachWithIndex(params) { (param, j) =>
               param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
             }
           }
           debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
+          if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
           typeEnv(om) = env
           addConcreteSpecMethod(overriding)
-          if (overriding.isDeferred) {    // abstract override
+          if (overriding.isDeferred) { // abstract override
             debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
             info(om) = Forward(overriding)
           }
@@ -1002,8 +1042,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
             }
             info(overriding) = Forward(om setPos overriding.pos)
           }
+
           newOverload(overriding, om, env)
-          ifDebug(afterSpecialize(assert(
+          ifDebug(exitingSpecialize(assert(
             overridden.owner.info.decl(om.name) != NoSymbol,
             "Could not find " + om.name + " in " + overridden.owner.info.decls))
           )
@@ -1032,7 +1073,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
         env + ((sym1, tp2))
       else if (isSpecializedAnyRefSubtype(tp2, sym1))
-        env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+        env + ((sym1, tp2))
       else if (strict)
         unifyError(tp1, tp2)
       else
@@ -1066,7 +1107,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     case (ThisType(_), _)                             => unify(tp1.widen, tp2, env, strict)
     case (_, ThisType(_))                             => unify(tp1, tp2.widen, env, strict)
     case (RefinedType(_, _), RefinedType(_, _))       => env
-    case (AnnotatedType(_, tp1, _), tp2)              => unify(tp2, tp1, env, strict)
+    case (AnnotatedType(_, tp1), tp2)                 => unify(tp2, tp1, env, strict)
     case (ExistentialType(_, res1), _)                => unify(tp2, res1, env, strict)
     case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => unify(List(lo1, hi1), List(lo2, hi2), env, strict)
     case _ =>
@@ -1089,10 +1130,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     }
   }
 
-  /** Apply type bindings in the given environment `env` to all declarations.  */
-  private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
-    decls map subst(env)
-
   /** Apply the type environment 'env' to the given type. All type
    *  bindings are supposed to be to primitive types. A type variable
    *  that is annotated with 'uncheckedVariance' is mapped to the corresponding
@@ -1119,35 +1156,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
   private def subst(env: TypeEnv)(decl: Symbol): Symbol =
     decl modifyInfo (info =>
-      if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe)
+      if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe_*)
       else subst(env, info)
     )
 
-  /** Checks if the type parameter symbol is not specialized
-   *  and is used as type parameters when extending a class with a specialized
-   *  type parameter.
-   *  At some point we may remove this restriction.
-   *
-   *  Example:
-   *
-   *    class Base[@specialized T]
-   *    class Derived[T] extends Base[T] // a non-specialized T is
-   *                                     // used as a type param for Base
-   *                                     // -> returning true
-   */
-  private def notSpecializedIn(tsym: Symbol, supertpe: Type) = supertpe match {
-    case TypeRef(_, supersym, supertargs) =>
-      val tspec = specializedOn(tsym).toSet
-      for (supt <- supersym.typeParams) {
-        val supspec = specializedOn(supt).toSet
-        if (tspec != supspec && tspec.subsetOf(supspec))
-          reporter.error(tsym.pos, "Type parameter has to be specialized at least for the same types as in the superclass. Missing types: " + (supspec.diff(tspec)).mkString(", "))
-      }
-    case _ => //log("nope")
-  }
-
   private def unspecializableClass(tp: Type) = (
-       definitions.isRepeatedParamType(tp)  // ???
+       isRepeatedParamType(tp)  // ???
     || tp.typeSymbol.isJavaDefined
     || tp.typeSymbol.isPackageClass
   )
@@ -1156,12 +1170,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *  If it is a 'no-specialization' run, it is applied only to loaded symbols.
    */
   override def transformInfo(sym: Symbol, tpe: Type): Type = {
-    if (settings.nospecialization.value && currentRun.compiles(sym)) tpe
+    if (settings.nospecialization && currentRun.compiles(sym)) tpe
     else tpe.resultType match {
       case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
         val tparams  = tpe.typeParams
         if (tparams.isEmpty)
-          afterSpecialize(parents map (_.typeSymbol.info))
+          exitingSpecialize(parents map (_.typeSymbol.info))
 
         val parents1 = parents mapConserve specializedType
         if (parents ne parents1) {
@@ -1182,7 +1196,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *
    *  A conflicting type environment could still be satisfiable.
    */
-  def conflicting(env: TypeEnv) = !nonConflicting(env)
   def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) =>
     (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))
   }
@@ -1252,9 +1265,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
     class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
       override def castType(tree: Tree, pt: Type): Tree = {
-        // log(" expected type: " + pt)
-        // log(" tree type: " + tree.tpe)
-        tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+        tree modifyType fixType
         // log(" tree type: " + tree.tpe)
         val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
           val casttpe = CastMap(tree.tpe)
@@ -1262,8 +1273,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
           else tree
         } else tree
-        ntree.tpe = null
-        ntree
+
+        ntree.clearType()
       }
     }
 
@@ -1296,7 +1307,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    */
   class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
     override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
-      beforeSpecialize(super.retyped(context, tree, oldThis, newThis, env))
+      enteringSpecialize(super.retyped(context, tree, oldThis, newThis, env))
   }
 
   /** A tree symbol substituter that substitutes on type skolems.
@@ -1319,7 +1330,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     }
 
     private def isAccessible(sym: Symbol): Boolean =
-      (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
+      if (currentOwner.isAnonymousFunction) {
+        if (inlineFunctionExpansion) devWarning("anonymous function made it to specialization even though inline expansion is set.")
+        false
+      }
+      else (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
 
     private def shouldMakePublic(sym: Symbol): Boolean =
       sym.hasFlag(PRIVATE | PROTECTED) && (addressFields || !nme.isLocalName(sym.name))
@@ -1333,7 +1348,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         if (sym.isPrivate) debuglog(
           "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format(
             sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
-        )
+          )
         if (shouldMakePublic(sym) && !isAccessible(sym)) {
           debuglog("changing private flag of " + sym)
           sym.makeNotPrivate(sym.owner)
@@ -1418,28 +1433,64 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
 
     def transform1(tree: Tree) = {
       val symbol = tree.symbol
-
-      /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
-      def specSym(qual: Tree): Option[Symbol] = {
+      /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
+      def specSym(qual: Tree): Symbol = {
         val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
-        debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
-                .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
-        if (env.nonEmpty) {  // a method?
-          val specCandidates = qual.tpe.member(specializedName(symbol, env))
-          val specMember = specCandidates suchThat { s =>
-            doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
+        def isMatch(member: Symbol) = {
+          val memberType = qual.tpe memberType member
+
+          val residualTreeType = tree match {
+            case TypeApply(fun, targs) if fun.symbol == symbol =>
+              // SI-6308 Handle methods with only some type parameters specialized.
+              //         drop the specialized type parameters from the PolyType, and
+              //         substitute in the type environment.
+              val GenPolyType(tparams, tpe) = fun.tpe
+              val (from, to) = env.toList.unzip
+              val residualTParams = tparams.filterNot(env.contains)
+              GenPolyType(residualTParams, tpe).substituteTypes(from, to)
+            case _ => tree.tpe
           }
 
-          debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
-          debuglog("[specSym] found specMember: " + specMember)
-          if (specMember ne NoSymbol)
-            if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
-            else {
-              debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
-              None
-            }
-          else None
-        } else None
+          (
+               doesConform(symbol, residualTreeType, memberType, env)
+            && TypeEnv.includes(typeEnv(member), env)
+          )
+        }
+        if (env.isEmpty) NoSymbol
+        else qual.tpe member specializedName(symbol, env) suchThat isMatch
+      }
+
+      def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = {
+        pre member specializedName(member, env) suchThat (_.tpe matches subst(env, member.tpe))
+      }
+
+      def transformSelect(sel: Select) = {
+        val Select(qual, name) = sel
+        debuglog(s"specializing Select(sym=${symbol.defString}, tree.tpe=${tree.tpe})")
+
+        val qual1                     = transform(qual)
+        def copySelect                = treeCopy.Select(tree, qual1, name)
+        def newSelect(member: Symbol) = atPos(tree.pos)(Select(qual1, member))
+        def typedOp(member: Symbol)   = localTyper typedOperator newSelect(member)
+        def typedTree(member: Symbol) = localTyper typed newSelect(member)
+
+        val ignoreEnv = specializedTypeVars(symbol.info).isEmpty || name == nme.CONSTRUCTOR
+        if (ignoreEnv) overloads(symbol) find (_ matchesSym symbol) match {
+          case Some(Overload(member, _)) => typedOp(member)
+          case _                         => copySelect
+        }
+        else {
+          val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+          overloads(symbol) find (_ matchesEnv env) match {
+            case Some(Overload(member, _)) => typedOp(member)
+            case _ =>
+              matchingSymbolInPrefix(qual1.tpe, symbol, env) match {
+                case NoSymbol                  => copySelect
+                case member if member.isMethod => typedOp(member)
+                case member                    => typedTree(member)
+              }
+          }
+        }
       }
 
       /** Computes residual type parameters after rewiring, like "String" in the following example:
@@ -1448,25 +1499,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
        *    specMe[Int, String](1, "2") => specMe$mIc$sp[String](1, "2")
        *  ```
        */
-      def computeResidualTypeVars(baseTree: Tree, specTree: Tree, baseTargs: List[Tree], env: TypeEnv) = {
-        val baseSym: Symbol = baseTree.symbol
-        val specSym: Symbol = specTree.symbol
-        val residualTargs = baseSym.info.typeParams zip baseTargs collect {
+      def computeResidualTypeVars(baseTree: Tree, specMember: Symbol, specTree: Tree, baseTargs: List[Tree], env: TypeEnv): Tree = {
+        val residualTargs = symbol.info.typeParams zip baseTargs collect {
           case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
         }
-
-        if (specSym.info.typeParams.isEmpty && residualTargs.nonEmpty) {
-          log("!!! Type args to be applied, but symbol says no parameters: " + ((specSym.defString, residualTargs)))
+        // See SI-5583.  Don't know why it happens now if it didn't before.
+        if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+          devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
           baseTree
         }
         else {
-          ifDebug(assert(residualTargs.length == specSym.info.typeParams.length,
-            "residual: %s, tparams: %s, env: %s".format(residualTargs, specSym.info.typeParams, env))
+          ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+            "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
           )
 
           val tree1 = gen.mkTypeApply(specTree, residualTargs)
           debuglog("rewrote " + tree + " to " + tree1)
-          localTyper.typedOperator(atPos(tree.pos)(tree1))
+          localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
         }
       }
 
@@ -1474,31 +1523,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       tree match {
         case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
           def transformNew = {
-          debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
-          val found = findSpec(tpt.tpe)
-          if (found.typeSymbol ne tpt.tpe.typeSymbol) {
-            // the ctor can be specialized
-            debuglog("** instantiated specialized type: " + found)
-            reportError {
-              localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
-            } {
-              _ => super.transform(tree)
+            debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+            val found = specializedType(tpt.tpe)
+            if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized
+              val inst = New(found, transformTrees(args): _*)
+              reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree))
             }
-          } else super.transform(tree)
+            else
+              super.transform(tree)
           }
           transformNew
 
-        case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
-          if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+        case Apply(sel @ Select(sup @ Super(qual, name), name1), args) if hasNewParents(sup) =>
           def transformSuperApply = {
-
-          def parents = sup.symbol.info.parents
-          debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
-
-          val res = localTyper.typed(
-            Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
-          debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
-          res
+            val sup1  = Super(qual, name) setPos sup.pos
+            val tree1 = Apply(Select(sup1, name1) setPos sel.pos, transformTrees(args))
+            val res   = localTyper.typedPos(tree.pos)(tree1)
+            debuglog(s"retyping call to super, from: $symbol to ${res.symbol}")
+            res
           }
           transformSuperApply
 
@@ -1513,17 +1555,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           val qual1 = transform(qual)
           log(">>> TypeApply: " + tree + ", qual1: " + qual1)
           specSym(qual1) match {
-            case Some(specMember) =>
+            case NoSymbol =>
+              // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+              treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), transformTrees(targs))
+            case specMember =>
               debuglog("found " + specMember.fullName)
               ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
 
               val env = typeEnv(specMember)
-              computeResidualTypeVars(tree, gen.mkAttributedSelect(qual1, specMember), targs, env)
-
-            case None =>
-              treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
-              // See pos/exponential-spec.scala - can't call transform on the whole tree again.
-              // super.transform(tree)
+              computeResidualTypeVars(tree, specMember, gen.mkAttributedSelect(qual1, specMember), targs, env)
           }
 
         // This rewires calls to specialized methods defined in the local scope. For example:
@@ -1536,7 +1576,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           if (env.isEmpty) super.transform(tree)
           else {
             overloads(symbol) find (_ matchesEnv env) match {
-              case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, Ident(specMember), targs, env)
+              case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, specMember, Ident(specMember), targs, env)
               case _ => super.transform(tree)
             }
           }
@@ -1546,36 +1586,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
           tree
 
-        case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
-          debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
-          val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
-          if (env.isEmpty) super.transform(tree)
-          else {
-            val qual1 = transform(qual)
-            def reselect(member: Symbol) = {
-              val newSelect = atPos(tree.pos)(Select(qual1, member))
-              if (member.isMethod) localTyper typedOperator newSelect
-              else localTyper typed newSelect
-            }
-            overloads(symbol) find (_ matchesEnv env) match {
-              case Some(Overload(member, _)) => reselect(member)
-              case _                         =>
-                val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
-                if (specMember ne NoSymbol)
-                  reselect(specMember)
-                else
-                  treeCopy.Select(tree, qual1, name)
-            }
-          }
-        case Select(qual, _) =>
-          overloads(symbol) find (_ matchesSym symbol) match {
-            case Some(Overload(member, _)) =>
-              val newTree = Select(transform(qual), member)
-              debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
-              localTyper.typedOperator(atPos(tree.pos)(newTree))
-            case None =>
-              super.transform(tree)
-          }
+        case sel @ Select(_, _) =>
+          transformSelect(sel)
 
         case PackageDef(pid, stats) =>
           tree.symbol.info // make sure specializations have been performed
@@ -1600,47 +1612,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           transformTemplate
 
         case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
-          def transformDefDef = {
-          // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
-          def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
-
+        def transformDefDef = {
           if (symbol.isConstructor) {
-
-            val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
-
+            val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner))
             if (symbol.isPrimaryConstructor)
-              localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
+              localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(())))))
             else // duplicate the original constructor
-              reportTypeError(duplicateBody(ddef, info(symbol).target))
+              reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef)
           }
           else info(symbol) match {
             case Implementation(target) =>
               assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
               // we have an rhs, specialize it
-              val tree1 = reportTypeError {
-                duplicateBody(ddef, target)
-              }
+              val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef)
               debuglog("implementation: " + tree1)
               deriveDefDef(tree1)(transform)
 
             case NormalizedMember(target) =>
-              val constraints = satisfiabilityConstraints(typeEnv(symbol))
-              log("constraints: " + constraints)
-              if (target.isDeferred || constraints == None) {
-                deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
-              } else {
-                // we have an rhs, specialize it
-                val tree1 = reportTypeError {
-                  duplicateBody(ddef, target, constraints.get)
-                }
-                debuglog("implementation: " + tree1)
-                deriveDefDef(tree1)(transform)
+              logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match {
+                case Some(constraint) if !target.isDeferred =>
+                  // we have an rhs, specialize it
+                  val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef)
+                  debuglog("implementation: " + tree1)
+                  deriveDefDef(tree1)(transform)
+                case _ =>
+                  deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
               }
 
             case SpecialOverride(target) =>
               assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
               //debuglog("moving implementation, body of target " + target + ": " + body(target))
-              debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+              log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
               // we have an rhs, specialize it
               val tree1 = addBody(ddef, target)
               (new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
@@ -1650,7 +1652,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
             case SpecialOverload(original, env) =>
               debuglog("completing specialized " + symbol.fullName + " calling " + original)
               debuglog("special overload " + original + " -> " + env)
-              val t = DefDef(symbol, { vparamss =>
+              val t = DefDef(symbol, { vparamss: List[List[Symbol]] =>
                 val fun = Apply(Select(This(symbol.owner), original),
                                 makeArguments(original, vparamss.head))
 
@@ -1688,6 +1690,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
             case Abstract(targ) =>
               debuglog("abstract: " + targ)
               localTyper.typed(deriveDefDef(tree)(rhs => rhs))
+
+            case SpecialSuperAccessor(targ) =>
+              debuglog("special super accessor: " + targ + " for " + tree)
+              localTyper.typed(deriveDefDef(tree)(rhs => rhs))
           }
           }
           expandInnerNormalizedMembers(transformDefDef)
@@ -1713,7 +1719,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           deriveValDef(newValDef)(transform)
           }
           transformValDef
-
         case _ =>
           super.transform(tree)
       }
@@ -1745,7 +1750,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           // flag. nobody has to see this anyway :)
           sym.setFlag(SPECIALIZED)
           // create empty bodies for specializations
-          localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, { vparamss => EmptyTree })), ddef))
+          localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, { vparamss: List[List[Symbol]] => EmptyTree })), ddef))
         } else
           tree
       case _ =>
@@ -1783,7 +1788,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       val symbol = tree.symbol
       debuglog("specializing body of" + symbol.defString)
       val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
-//      val (_, origtparams) = splitParams(source.typeParams)
       val env = typeEnv(symbol)
       val boundTvars = env.keySet
       val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
@@ -1810,9 +1814,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
         false) // don't make private fields public
 
       val newBody = symSubstituter(body(source).duplicate)
-      tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
-
-      copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
+      tpt modifyType (_.substSym(oldtparams, newtparams))
+      copyDefDef(tree)(vparamss = List(newSyms map ValDef.apply), rhs = newBody)
     }
 
     /** Create trees for specialized members of 'sClass', based on the
@@ -1849,26 +1852,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
             }
 
             // ctor
-            mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
+            mbrs += DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef.apply), EmptyTree)
           } else {
-            mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
+            mbrs += DefDef(m, { paramss: List[List[Symbol]] => EmptyTree })
           }
         } else if (m.isValue) {
-          mbrs += ValDef(m, EmptyTree).setType(NoType).setPos(m.pos)
+          mbrs += ValDef(m).setType(NoType)
         } else if (m.isClass) {
 //           mbrs  +=
-//              ClassDef(m, Template(m.info.parents map TypeTree, emptyValDef, List())
+//              ClassDef(m, Template(m.info.parents map TypeTree, noSelfType, List())
 //                         .setSymbol(m.newLocalDummy(m.pos)))
 //            log("created synthetic class: " + m.fullName)
         }
       }
       if (hasSpecializedFields) {
         val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
-        val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+        val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe)
 
-        mbrs += atPos(sym.pos) {
-          DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
-        }
+        mbrs += DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
       }
       mbrs.toList
     }
@@ -1881,7 +1882,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
           for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
             debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
             val parents = specCls.info.parents.map(TypeTree)
-            ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+            ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List()))
               .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
           }
         case _ => Nil
@@ -1914,16 +1915,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
    *  }}
    */
   private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+    log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)")
 
-    /** A constructor parameter `f` initializes a specialized field
-     *  iff:
-     *    - it is specialized itself
-     *    - there is a getter for the original (non-specialized) field in the same class
-     *    - there is a getter for the specialized field in the same class
+    /* A constructor parameter `f` initializes a specialized field
+     * iff:
+     *   - it is specialized itself
+     *   - there is a getter for the original (non-specialized) field in the same class
+     *   - there is a getter for the specialized field in the same class
      */
     def initializesSpecializedField(f: Symbol) = (
          (f.name endsWith nme.SPECIALIZED_SUFFIX)
-      && clazz.info.member(nme.originalName(f.name)).isPublic
+      && clazz.info.member(f.unexpandedName).isPublic
       && clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
     )
 
@@ -1950,16 +1952,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
     //! TODO: make sure the param types are seen from the right prefix
     map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
   )
-  private def findSpec(tp: Type): Type = tp match {
-    case TypeRef(pre, sym, _ :: _) => specializedType(tp)
-    case _                         => tp
-  }
 
   class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
     informProgress("specializing " + unit)
     override def transform(tree: Tree) = {
-      val resultTree = if (settings.nospecialization.value) tree
-      else afterSpecialize(specializeCalls(unit).transform(tree))
+      val resultTree = if (settings.nospecialization) tree
+      else exitingSpecialize(specializeCalls(unit).transform(tree))
 
       // Remove the final modifier and @inline annotation from anything in the
       // original class (since it's being overridden in at least onesubclass).
@@ -1977,13 +1975,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
       }
 
       resultTree
-    }
-  }
-
-  def printSpecStats() {
-    println("    concreteSpecMembers: %7d".format(concreteSpecMethods.size))
-    println("    overloads:           %7d".format(overloads.size))
-    println("    typeEnv:             %7d".format(typeEnv.size))
-    println("    info:                %7d".format(info.size))
-  }
+    }  }
 }
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
new file mode 100644
index 0000000..e2508b8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -0,0 +1,52 @@
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+
+import collection.mutable.Buffer
+
+abstract class Statics extends Transform with ast.TreeDSL {
+  import global._
+
+  class StaticsTransformer extends Transformer {
+
+    /** finds the static ctor DefDef tree within the template if it exists. */
+    def findStaticCtor(template: Template): Option[Tree] =
+      template.body find {
+        case defdef @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => defdef.symbol.hasStaticFlag
+        case _ => false
+      }
+
+    /** changes the template for the class so that it contains a static constructor with symbol fields inits,
+      * augments an existing static ctor if one already existed.
+      */
+    def addStaticInits(template: Template, newStaticInits: Buffer[Tree], localTyper: analyzer.Typer): Template = {
+      if (newStaticInits.isEmpty)
+        template
+      else {
+        val newCtor = findStaticCtor(template) match {
+          // in case there already were static ctors - augment existing ones
+          // currently, however, static ctors aren't being generated anywhere else
+          case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
+            // modify existing static ctor
+            deriveDefDef(ctor) {
+              case block @ Block(stats, expr) =>
+                // need to add inits to existing block
+                treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
+              case term: TermTree =>
+                // need to create a new block with inits and the old term
+                treeCopy.Block(term, newStaticInits.toList, term)
+            }
+          case _ =>
+            // create new static ctor
+            val staticCtorSym  = currentClass.newStaticConstructor(template.pos)
+            val rhs            = Block(newStaticInits.toList, Literal(Constant(())))
+
+            localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
+        }
+        deriveTemplate(template)(newCtor :: _)
+      }
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 9384992..714f189 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -3,7 +3,8 @@
  * @author Iulian Dragos
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package transform
 
 import symtab.Flags
@@ -17,7 +18,7 @@ import Flags.SYNTHETIC
 abstract class TailCalls extends Transform {
   import global._                     // the global environment
   import definitions._                // standard classes and methods
-  import typer.{ typed, typedPos }    // methods to type trees
+  import typer.typedPos               // methods to type trees
 
   val phaseName: String = "tailcalls"
 
@@ -31,7 +32,7 @@ abstract class TailCalls extends Transform {
   class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
     def apply(unit: global.CompilationUnit) {
       if (!(settings.debuginfo.value == "notailcalls")) {
-        newTransformer(unit).transformUnit(unit);
+        newTransformer(unit).transformUnit(unit)
       }
     }
   }
@@ -82,104 +83,130 @@ abstract class TailCalls extends Transform {
    *   that label.
    * </p>
    * <p>
-   *   Assumes: <code>Uncurry</code> has been run already, and no multiple
+   *   Assumes: `Uncurry` has been run already, and no multiple
    *            parameter lists exit.
    * </p>
    */
   class TailCallElimination(unit: CompilationUnit) extends Transformer {
-    private val defaultReason = "it contains a recursive call not in tail position"
+    private def defaultReason = "it contains a recursive call not in tail position"
+    private val failPositions = perRunCaches.newMap[TailContext, Position]() withDefault (_.methodPos)
+    private val failReasons   = perRunCaches.newMap[TailContext, String]() withDefaultValue defaultReason
+    private def tailrecFailure(ctx: TailContext) {
+      val method      = ctx.method
+      val failReason  = failReasons(ctx)
+      val failPos     = failPositions(ctx)
+
+      unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
+    }
 
     /** Has the label been accessed? Then its symbol is in this set. */
-    private val accessed = new scala.collection.mutable.HashSet[Symbol]()
+    private val accessed = perRunCaches.newSet[Symbol]()
     // `accessed` was stored as boolean in the current context -- this is no longer tenable
     // with jumps to labels in tailpositions now considered in tailposition,
     // a downstream context may access the label, and the upstream one will be none the wiser
     // this is necessary because tail-calls may occur in places where syntactically they seem impossible
     // (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
 
+    sealed trait TailContext {
+      def method: Symbol          // current method
+      def tparams: List[Symbol]   // type parameters
+      def methodPos: Position     // default position for failure reporting
+      def tailPos: Boolean        // context is in tail position
+      def label: Symbol           // new label, tail call target
+      def tailLabels: Set[Symbol]
+
+      def enclosingType = method.enclClass.typeOfThis
+      def isEligible    = method.isEffectivelyFinalOrNotOverridden
+      def isMandatory   = method.hasAnnotation(TailrecClass)
+      def isTransformed = isEligible && accessed(label)
+
+      def newThis(pos: Position) = {
+        def msg = "Creating new `this` during tailcalls\n  method: %s\n  current class: %s".format(
+          method.ownerChain.mkString(" -> "),
+          currentClass.ownerChain.mkString(" -> ")
+        )
+        logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
+      }
+      override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
 
-    class Context() {
-      /** The current method */
-      var method: Symbol = NoSymbol
-
-      // symbols of label defs in this method that are in tail position
-      var tailLabels: Set[Symbol] = Set()
-
-      /** The current tail-call label */
-      var label: Symbol = NoSymbol
-
-      /** The expected type arguments of self-recursive calls */
-      var tparams: List[Symbol] = Nil
-
-      /** Tells whether we are in a (possible) tail position */
-      var tailPos = false
+    }
 
-      /** The reason this method could not be optimized. */
-      var failReason = defaultReason
-      var failPos    = method.pos
+    object EmptyTailContext extends TailContext {
+      def method     = NoSymbol
+      def tparams    = Nil
+      def methodPos  = NoPosition
+      def tailPos    = false
+      def label      = NoSymbol
+      def tailLabels = Set.empty[Symbol]
+    }
 
-      def this(that: Context) = {
-        this()
-        this.method   = that.method
-        this.tparams  = that.tparams
-        this.tailPos  = that.tailPos
-        this.failPos  = that.failPos
-        this.label    = that.label
-        this.tailLabels = that.tailLabels
+    class DefDefTailContext(dd: DefDef) extends TailContext {
+      def method    = dd.symbol
+      def tparams   = dd.tparams map (_.symbol)
+      def methodPos = dd.pos
+      def tailPos   = true
+
+      lazy val label      = mkLabel()
+      lazy val tailLabels = {
+        // labels are local to a method, so only traverse the rhs of a defdef
+        val collector = new TailPosLabelsTraverser
+        collector traverse dd.rhs
+        collector.tailLabels.toSet
       }
-      def this(dd: DefDef) {
-        this()
-        this.method   = dd.symbol
-        this.tparams  = dd.tparams map (_.symbol)
-        this.tailPos  = true
-        this.failPos  = dd.pos
-
-        /** Create a new method symbol for the current method and store it in
-          * the label field.
-          */
-        this.label    = {
-          val label     = method.newLabel(newTermName("_" + method.name), method.pos)
-          val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
-          label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
-        }
+
+      private def mkLabel() = {
+        val label     = method.newLabel(newTermName("_" + method.name), method.pos)
+        val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
+        label setInfo MethodType(thisParam :: method.tpe.params, method.tpe_*.finalResultType)
         if (isEligible)
           label substInfo (method.tpe.typeParams, tparams)
-      }
 
-      def enclosingType    = method.enclClass.typeOfThis
-      def methodTypeParams = method.tpe.typeParams
-      def isEligible       = method.isEffectivelyFinal
-      // @tailrec annotation indicates mandatory transformation
-      def isMandatory      = method.hasAnnotation(TailrecClass) && !forMSIL
-      def isTransformed    = isEligible && accessed(label)
-      def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
-
-      def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n  method: %s\n  current class: %s".format(
-        method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
-          method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+        label
       }
-
-      override def toString(): String = (
-        "" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
-        " Label: " + label + " Label type: " + label.info
-      )
+      private def isRecursiveCall(t: Tree) = {
+        val receiver = t.symbol
+
+        (    (receiver != null)
+          && receiver.isMethod
+          && (method.name == receiver.name)
+          && (method.enclClass isSubClass receiver.enclClass)
+        )
+      }
+      def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
+    }
+    class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+      def method     = that.method
+      def tparams    = that.tparams
+      def methodPos  = that.methodPos
+      def tailLabels = that.tailLabels
+      def label      = that.label
     }
 
-    private var ctx: Context = new Context()
-    private def noTailContext() = {
-      val t = new Context(ctx)
-      t.tailPos = false
-      t
+    private var ctx: TailContext = EmptyTailContext
+    private def noTailContext()  = new ClonedTailContext(ctx, tailPos = false)
+    private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
+
+    override def transformUnit(unit: CompilationUnit): Unit = {
+      try {
+        super.transformUnit(unit)
+      } finally {
+        // OPT clear these after each compilation unit
+        failPositions.clear()
+        failReasons.clear()
+        accessed.clear()
+      }
     }
 
     /** Rewrite this tree to contain no tail recursive calls */
-    def transform(tree: Tree, nctx: Context): Tree = {
+    def transform(tree: Tree, nctx: TailContext): Tree = {
       val saved = ctx
       ctx = nctx
       try transform(tree)
       finally this.ctx = saved
     }
 
+    def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
     def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
     def noTailTransforms(trees: List[Tree]) = {
       val nctx = noTailContext()
@@ -187,38 +214,33 @@ abstract class TailCalls extends Transform {
     }
 
     override def transform(tree: Tree): Tree = {
-      /** A possibly polymorphic apply to be considered for tail call transformation.
-       */
+      /* A possibly polymorphic apply to be considered for tail call transformation. */
       def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
         val receiver: Tree = fun match {
           case Select(qual, _)  => qual
           case _                => EmptyTree
         }
-
         def receiverIsSame    = ctx.enclosingType.widen =:= receiver.tpe.widen
         def receiverIsSuper   = ctx.enclosingType.widen <:< receiver.tpe.widen
         def isRecursiveCall   = (ctx.method eq fun.symbol) && ctx.tailPos
         def transformArgs     = noTailTransforms(args)
         def matchesTypeArgs   = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
 
-        /** Records failure reason in Context for reporting.
-         *  Position is unchanged (by default, the method definition.)
+        /* Records failure reason in Context for reporting.
+         * Position is unchanged (by default, the method definition.)
          */
         def fail(reason: String) = {
           debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
-
-          ctx.failReason = reason
+          if (ctx.isMandatory) failReasons(ctx) = reason
           treeCopy.Apply(tree, noTailTransform(target), transformArgs)
         }
-        /** Position of failure is that of the tree being considered.
-         */
+        /* Position of failure is that of the tree being considered. */
         def failHere(reason: String) = {
-          ctx.failPos = fun.pos
+          if (ctx.isMandatory) failPositions(ctx) = fun.pos
           fail(reason)
         }
         def rewriteTailCall(recv: Tree): Tree = {
           debuglog("Rewriting tail recursive call:  " + fun.pos.lineContent.trim)
-
           accessed += ctx.label
           typedPos(fun.pos) {
             val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
@@ -228,15 +250,20 @@ abstract class TailCalls extends Transform {
 
         if (!ctx.isEligible)            fail("it is neither private nor final so can be overridden")
         else if (!isRecursiveCall) {
-          if (receiverIsSuper)          failHere("it contains a recursive call targeting supertype " + receiver.tpe)
+          if (ctx.isMandatory && receiverIsSuper) // OPT expensive check, avoid unless we will actually report the error
+                                        failHere("it contains a recursive call targeting a supertype")
           else                          failHere(defaultReason)
         }
         else if (!matchesTypeArgs)      failHere("it is called recursively with different type arguments")
         else if (receiver == EmptyTree) rewriteTailCall(This(currentClass))
-        else if (forMSIL)               fail("it cannot be optimized on MSIL")
         else if (!receiverIsSame)       failHere("it changes type of 'this' on a polymorphic recursive call")
         else                            rewriteTailCall(receiver)
       }
+      
+      def isEligible(tree: DefDef) = {
+        val sym = tree.symbol
+        !(sym.hasAccessorFlag || sym.isConstructor)
+      }
 
       tree match {
         case ValDef(_, _, _, _) =>
@@ -245,37 +272,23 @@ abstract class TailCalls extends Transform {
 
           super.transform(tree)
 
-        case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
-          val newCtx = new Context(dd)
-          def isRecursiveCall(t: Tree) = {
-            val sym = t.symbol
-            (sym != null) && {
-              sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
-            }
-          }
-          if (newCtx.isMandatory) {
-            if (!rhs0.exists(isRecursiveCall)) {
-              unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
-            }
-          }
-
-          // labels are local to a method, so only traverse the rhs of a defdef
-          val collectTailPosLabels = new TailPosLabelsTraverser
-          collectTailPosLabels traverse rhs0
-          newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
+        case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
+          val newCtx = new DefDefTailContext(dd)
+          if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
+            unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
 
-          debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
+          debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}")
           val newRHS = transform(rhs0, newCtx)
 
-          deriveDefDef(tree){rhs =>
+          deriveDefDef(tree) { rhs =>
             if (newCtx.isTransformed) {
-              /** We have rewritten the tree, but there may be nested recursive calls remaining.
-               *  If @tailrec is given we need to fail those now.
+              /* We have rewritten the tree, but there may be nested recursive calls remaining.
+               * If @tailrec is given we need to fail those now.
                */
               if (newCtx.isMandatory) {
                 for (t @ Apply(fn, _) <- newRHS ; if fn.symbol == newCtx.method) {
-                  newCtx.failPos = t.pos
-                  newCtx.tailrecFailure()
+                  failPositions(newCtx) = t.pos
+                  tailrecFailure(newCtx)
                 }
               }
               val newThis = newCtx.newThis(tree.pos)
@@ -287,8 +300,8 @@ abstract class TailCalls extends Transform {
               ))
             }
             else {
-              if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
-                newCtx.tailrecFailure()
+              if (newCtx.isMandatory && (newCtx containsRecursiveCall newRHS))
+                tailrecFailure(newCtx)
 
               newRHS
             }
@@ -349,27 +362,25 @@ abstract class TailCalls extends Transform {
         case Apply(tapply @ TypeApply(fun, targs), vargs) =>
           rewriteApply(tapply, fun, targs, vargs)
 
-        case Apply(fun, args) =>
-          if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
-            treeCopy.Apply(tree, fun, transformTrees(args))
-          else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
-            // this is to detect tailcalls in translated matches
-            // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
-            // thus, the argument to the call is in tailposition
-            val saved = ctx.tailPos
-            ctx.tailPos = true
-            debuglog("in tailpos label: "+ args.head)
-            val res = transform(args.head)
-            ctx.tailPos = saved
-            if (res ne args.head) {
-              // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
-              // must leave the jump to the original tailpos-label (fun)!
-              // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
-              treeCopy.Apply(tree, fun, List(res))
-            }
-            else rewriteApply(fun, fun, Nil, args)
-          } else rewriteApply(fun, fun, Nil, args)
+        case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and =>
+          treeCopy.Apply(tree, fun, transformTrees(args))
+
+        // this is to detect tailcalls in translated matches
+        // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+        // thus, the argument to the call is in tailposition
+        case Apply(fun, args @ (arg :: Nil)) if fun.symbol.isLabel && ctx.tailLabels(fun.symbol) =>
+          debuglog(s"in tailpos label: $arg")
+          val res = yesTailTransform(arg)
+          // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+          // must leave the jump to the original tailpos-label (fun)!
+          // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+          if (res ne arg)
+            treeCopy.Apply(tree, fun, res :: Nil)
+          else
+            rewriteApply(fun, fun, Nil, args)
 
+        case Apply(fun, args) =>
+          rewriteApply(fun, fun, Nil, args)
         case Alternative(_) | Star(_) | Bind(_, _) =>
           sys.error("We should've never gotten inside a pattern")
         case Select(qual, name) =>
@@ -404,7 +415,7 @@ abstract class TailCalls extends Transform {
       finally maybeTail = saved
     }
 
-    def traverseNoTail(tree: Tree) = traverse(tree, false)
+    def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
     def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
 
     override def traverse(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
new file mode 100644
index 0000000..f83b6f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -0,0 +1,187 @@
+package scala.tools.nsc
+package transform
+
+import scala.reflect.internal._
+import scala.tools.nsc.ast.TreeDSL
+import scala.tools.nsc.Global
+
+/**
+ * A trait usable by transforms that need to adapt trees of one type to another type
+ */
+trait TypeAdaptingTransformer {
+  self: TreeDSL =>
+
+  val analyzer: typechecker.Analyzer { val global: self.global.type }
+
+  trait TypeAdapter {
+    val typer: analyzer.Typer
+    import global._
+    import definitions._
+    import CODE._
+
+    def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
+      case MethodType(Nil, _) => true
+      case _                  => false
+    }
+
+    private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
+     currentRun.runDefinitions.isUnbox(fn.symbol) && {
+      val cls = arg.tpe.typeSymbol
+      (cls == definitions.NullClass) || isBoxedValueClass(cls)
+     }
+    }
+
+    private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
+
+    private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+
+    private def isDifferentErasedValueType(tpe: Type, other: Type) =
+      isErasedValueType(tpe) && (tpe ne other)
+
+    def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
+
+    @inline def box(tree: Tree, target: => String): Tree = {
+      val result = box1(tree)
+      if (tree.tpe =:= UnitTpe) ()
+      else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
+      result
+    }
+
+    /** Box `tree` of unboxed type */
+    private def box1(tree: Tree): Tree = tree match {
+      case LabelDef(_, _, _) =>
+        val ldef = deriveLabelDef(tree)(box1)
+        ldef setType ldef.rhs.tpe
+      case _ =>
+        val tree1 = tree.tpe match {
+          case ErasedValueType(clazz, _) =>
+            New(clazz, cast(tree, underlyingOfValueClass(clazz)))
+          case _ =>
+            tree.tpe.typeSymbol match {
+          case UnitClass  =>
+            if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
+            else BLOCK(tree, REF(BoxedUnit_UNIT))
+          case NothingClass => tree // a non-terminating expression doesn't need boxing
+          case x          =>
+            assert(x != ArrayClass)
+            tree match {
+              /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
+               * may lead to throwing an exception.
+               *
+               * This is important for specialization: calls to the super constructor should not box/unbox specialized
+               * fields (see TupleX). (ID)
+               */
+              case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
+                log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
+                arg
+              case _ =>
+                (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
+            }
+            }
+        }
+        typer.typedPos(tree.pos)(tree1)
+    }
+
+    def unbox(tree: Tree, pt: Type): Tree = {
+      val result = unbox1(tree, pt)
+      log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
+      result
+    }
+
+    /** Unbox `tree` of boxed type to expected type `pt`.
+     *
+     *  @param tree the given tree
+     *  @param pt   the expected type.
+     *  @return     the unboxed tree
+     */
+    private def unbox1(tree: Tree, pt: Type): Tree = tree match {
+/*
+      case Boxed(unboxed) =>
+        println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
+        adaptToType(unboxed, pt)
+ */
+      case LabelDef(_, _, _) =>
+        val ldef = deriveLabelDef(tree)(unbox(_, pt))
+        ldef setType ldef.rhs.tpe
+      case _ =>
+        val tree1 = pt match {
+          case ErasedValueType(clazz, underlying) =>
+            val tree0 =
+              if (tree.tpe.typeSymbol == NullClass &&
+                  isPrimitiveValueClass(underlying.typeSymbol)) {
+                // convert `null` directly to underlying type, as going
+                // via the unboxed type would yield a NPE (see SI-5866)
+                unbox1(tree, underlying)
+              } else
+                Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+            cast(tree0, pt)
+          case _ =>
+            pt.typeSymbol match {
+              case UnitClass  =>
+                if (treeInfo isExprSafeToInline tree) UNIT
+                else BLOCK(tree, UNIT)
+              case x          =>
+                assert(x != ArrayClass)
+                // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
+                Apply(currentRun.runDefinitions.unboxMethod(pt.typeSymbol), tree)
+            }
+        }
+        typer.typedPos(tree.pos)(tree1)
+    }
+
+    /** Generate a synthetic cast operation from tree.tpe to pt.
+     *  @pre pt eq pt.normalize
+     */
+    def cast(tree: Tree, pt: Type): Tree = {
+      if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
+        def word = (
+          if (tree.tpe <:< pt) "upcast"
+          else if (pt <:< tree.tpe) "downcast"
+          else if (pt weak_<:< tree.tpe) "coerce"
+          else if (tree.tpe weak_<:< pt) "widen"
+          else "cast"
+        )
+        log(s"erasure ${word}s from ${tree.tpe} to $pt")
+      }
+      if (pt =:= UnitTpe) {
+        // See SI-4731 for one example of how this occurs.
+        log("Attempted to cast to Unit: " + tree)
+        tree.duplicate setType pt
+      } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
+        // See SI-2386 for one example of when this might be necessary.
+        val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
+        val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
+        gen.mkAttributedCast(tree1, pt)
+      } else gen.mkAttributedCast(tree, pt)
+    }
+
+    /** Adapt `tree` to expected type `pt`.
+     *
+     *  @param tree the given tree
+     *  @param pt   the expected type
+     *  @return     the adapted tree
+     */
+    def adaptToType(tree: Tree, pt: Type): Tree = {
+      if (settings.debug && pt != WildcardType)
+        log("adapting " + tree + ":" + tree.tpe + " : " +  tree.tpe.parents + " to " + pt)//debug
+      if (tree.tpe <:< pt)
+        tree
+      else if (isDifferentErasedValueType(tree.tpe, pt))
+        adaptToType(box(tree, pt.toString), pt)
+      else if (isDifferentErasedValueType(pt, tree.tpe))
+        adaptToType(unbox(tree, pt), pt)
+      else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
+        adaptToType(box(tree, pt.toString), pt)
+      } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
+        // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
+        //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
+        adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
+//      } else if (pt <:< tree.tpe)
+//        cast(tree, pt)
+      } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
+        adaptToType(unbox(tree, pt), pt)
+      else
+        cast(tree, pt)
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index c7bc16f..3feadcd 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -6,8 +6,6 @@
 package scala.tools.nsc
 package transform
 
-import scala.collection.{ mutable, immutable }
-
 /** A base class for transforms.
  *  A transform contains a compiler phase which applies a tree transformer.
  */
@@ -19,17 +17,15 @@ trait TypingTransformers {
   abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
     var localTyper: analyzer.Typer =
       if (phase.erasedTypes)
-        erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer]
+        erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer]
       else
         analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
     protected var curTree: Tree = _
-    protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
 
     override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
 
     def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
       val savedLocalTyper = localTyper
-//      println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
       localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
       val result = super.atOwner(owner)(trans)
       localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e61b35a..d77c6b5 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -3,7 +3,8 @@
  * @author
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package transform
 
 import symtab.Flags._
@@ -61,27 +62,9 @@ abstract class UnCurry extends InfoTransform
 
 // uncurry and uncurryType expand type aliases
 
-  /** Traverse tree omitting local method definitions.
-   *  If a `return` is encountered, set `returnFound` to true.
-   *  Used for MSIL only.
-   */
-  private object lookForReturns extends Traverser {
-    var returnFound = false
-    override def traverse(tree: Tree): Unit =  tree match {
-      case Return(_) => returnFound = true
-      case DefDef(_, _, _, _, _, _) => ;
-      case _ => super.traverse(tree)
-    }
-    def found(tree: Tree) = {
-      returnFound = false
-      traverse(tree)
-      returnFound
-    }
-  }
-
   class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+    private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
     private var needTryLift       = false
-    private var inPattern         = false
     private var inConstructorFlag = 0L
     private val byNameArgs        = mutable.HashSet[Tree]()
     private val noApply           = mutable.HashSet[Tree]()
@@ -96,12 +79,6 @@ abstract class UnCurry extends InfoTransform
     @inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
       f(newMembers.remove(owner).getOrElse(Nil).toList)
 
-    @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
-      inPattern = value
-      try body
-      finally inPattern = !value
-    }
-
     private def newFunction0(body: Tree): Tree = {
       val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
       log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
@@ -109,11 +86,6 @@ abstract class UnCurry extends InfoTransform
       transformFunction(result)
     }
 
-    private lazy val serialVersionUIDAnnotation =
-      AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
-
-    private var nprinted = 0
-
     // I don't have a clue why I'm catching TypeErrors here, but it's better
     // than spewing stack traces at end users for internal errors. Examples
     // which hit at this point should not be hard to come by, but the immediate
@@ -133,21 +105,11 @@ abstract class UnCurry extends InfoTransform
      */
     def isByNameRef(tree: Tree) = (
          tree.isTerm
+      && (tree.symbol ne null)
+      && (isByName(tree.symbol))
       && !byNameArgs(tree)
-      && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
     )
 
-    /** Uncurry a type of a tree node.
-     *  This function is sensitive to whether or not we are in a pattern -- when in a pattern
-     *  additional parameter sections of a case class are skipped.
-     */
-    def uncurryTreeType(tp: Type): Type = tp match {
-      case MethodType(params, MethodType(params1, restpe)) if inPattern =>
-        uncurryTreeType(MethodType(params, restpe))
-      case _ =>
-        uncurry(tp)
-    }
-
 // ------- Handling non-local returns -------------------------------------------------
 
     /** The type of a non-local return expression with given argument type */
@@ -160,7 +122,7 @@ abstract class UnCurry extends InfoTransform
     /** Return non-local return key for given method */
     private def nonLocalReturnKey(meth: Symbol) =
       nonLocalReturnKeys.getOrElseUpdate(meth,
-        meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+        meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectTpe
       )
 
     /** Generate a non-local return throw with given return expression from given method.
@@ -193,18 +155,28 @@ abstract class UnCurry extends InfoTransform
      */
     private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
       localTyper typed {
-        val extpe   = nonLocalReturnExceptionType(meth.tpe.finalResultType)
+        val restpe  = meth.tpe_*.finalResultType
+        val extpe   = nonLocalReturnExceptionType(restpe)
         val ex      = meth.newValue(nme.ex, body.pos) setInfo extpe
-        val argType = meth.tpe.finalResultType withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
+        val argType = restpe withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
         val pat     = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(argType))
-        val rhs = (
+        val rhs     = (
           IF   ((ex DOT nme.key)() OBJ_EQ Ident(key))
           THEN ((ex DOT nme.value)())
           ELSE (Throw(Ident(ex)))
         )
-        val keyDef   = ValDef(key, New(ObjectClass.tpe))
+        val keyDef   = ValDef(key, New(ObjectTpe))
         val tryCatch = Try(body, pat -> rhs)
 
+        import treeInfo.{catchesThrowable, isSyntheticCase}
+        for {
+          Try(t, catches, _) <- body
+          cdef <- catches
+          if catchesThrowable(cdef) && !isSyntheticCase(cdef)
+        } {
+          unit.warning(body.pos, "catch block may intercept non-local return from " + meth)
+        }
+
         Block(List(keyDef), tryCatch)
       }
     }
@@ -228,8 +200,6 @@ abstract class UnCurry extends InfoTransform
      *    }
      *    new $anon()
      *
-     * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
-     *
      */
     def transformFunction(fun: Function): Tree = {
       fun.tpe match {
@@ -245,169 +215,41 @@ abstract class UnCurry extends InfoTransform
       deEta(fun) match {
         // nullary or parameterless
         case fun1 if fun1 ne fun => fun1
-        case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
-          // only get here when running under -Xoldpatmat
-          synthPartialFunction(fun)
         case _ =>
-          val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
-          val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
-          anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
-          val targs     = fun.tpe.typeArgs
-          val (formals, restpe) = (targs.init, targs.last)
-
-          val applyMethodDef = {
-            val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
-            val paramSyms = map2(formals, fun.vparams) {
-              (tp, param) => methSym.newSyntheticValueParam(tp, param.name)
-            }
-            methSym setInfoAndEnter MethodType(paramSyms, restpe)
+          def typedFunPos(t: Tree) = localTyper.typedPos(fun.pos)(t)
+          val funParams = fun.vparams map (_.symbol)
+          def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef =
+            gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags)
 
-            fun.vparams foreach  (_.symbol.owner =  methSym)
-            fun.body changeOwner (fun.symbol     -> methSym)
+          val canUseDelamdafyMethod = (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
 
-            val body    = localTyper.typedPos(fun.pos)(fun.body)
-            val methDef = DefDef(methSym, List(fun.vparams), body)
+          if (inlineFunctionExpansion || !canUseDelamdafyMethod) {
+            val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
+            val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation
+            anonClass setInfo ClassInfoType(parents, newScope, anonClass)
 
-            // Have to repack the type to avoid mismatches when existentials
-            // appear in the result - see SI-4869.
-            methDef.tpt setType localTyper.packedType(body, methSym)
-            methDef
-          }
-
-          localTyper.typedPos(fun.pos) {
-            Block(
-              List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
-              Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
-          }
-
-      }
-    }
-
-    /** Transform a function node (x => body) of type PartialFunction[T, R] where
-     *    body = expr match { case P_i if G_i => E_i }_i=1..n
-     *  to (assuming none of the cases is a default case):
-     *
-     *    class $anon() extends AbstractPartialFunction[T, R] with Serializable {
-     *      def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
-     *        case P_1 if G_1 => E_1
-     *        ...
-     *        case P_n if G_n => E_n
-     *        case _ => default(expr)
-     *      }
-     *      def isDefinedAt(x: T): boolean = (x: @unchecked) match {
-     *        case P_1 if G_1 => true
-     *        ...
-     *        case P_n if G_n => true
-     *        case _ => false
-     *      }
-     *    }
-     *    new $anon()
-     *
-     *  If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
-     */
-    def synthPartialFunction(fun: Function) = {
-      if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
-
-      val targs             = fun.tpe.typeArgs
-      val (formals, restpe) = (targs.init, targs.last)
-
-      val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
-      val parents   = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
-      anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
-      // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
-      // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
-      val bodyForIDA = {
-        val duped   = fun.body.duplicate
-        val oldParams = new mutable.ListBuffer[Symbol]()
-        val newParams = new mutable.ListBuffer[Symbol]()
-
-        val oldSyms0 =
-          duped filter {
-            case l at LabelDef(_, params, _) =>
-              params foreach {p =>
-                val oldSym = p.symbol
-                p.symbol = oldSym.cloneSymbol
-                oldParams += oldSym
-                newParams += p.symbol
-              }
-              true
-            case _ => false
-          } map (_.symbol)
-        val oldSyms = oldParams.toList ++ oldSyms0
-        val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
-        // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
-
-        val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
-
-        substLabels(duped)
-      }
-
-      // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
-      val applyOrElseMethodDef = {
-        val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
-
-        val List(argtpe)            = formals
-        val A1                      = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
-        val B1                      = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
-        val methFormals             = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
-        val params at List(x, default) = methSym newSyntheticValueParams methFormals
-        methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
-
-        val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
-        val body = localTyper.typedPos(fun.pos) { import CODE._
-          def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
-
-          substParam(fun.body) match {
-            case orig at Match(selector, cases) =>
-              if (cases exists treeInfo.isDefaultCase) orig
-              else {
-                val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
-                Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
-              }
+            val applyMethodDef = mkMethod(anonClass, nme.apply)
+            anonClass.info.decls enter applyMethodDef.symbol
 
+            typedFunPos {
+              Block(
+                ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos),
+                Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+            }
+          } else {
+            // method definition with the same arguments, return type, and body as the original lambda
+            val liftedMethod = mkMethod(fun.symbol.owner, nme.ANON_FUN_NAME, additionalFlags = ARTIFACT)
+
+            // new function whose body is just a call to the lifted method
+            val newFun = deriveFunction(fun)(_ => typedFunPos(
+              gen.mkForwarder(gen.mkAttributedRef(liftedMethod.symbol), funParams :: Nil)
+            ))
+            typedFunPos(Block(liftedMethod, super.transform(newFun)))
           }
         }
-        body.changeOwner(fun.symbol -> methSym)
-
-        val methDef = DefDef(methSym, body)
-
-        // Have to repack the type to avoid mismatches when existentials
-        // appear in the result - see SI-4869.
-        methDef.tpt setType localTyper.packedType(body, methSym)
-        methDef
-      }
-
-      val isDefinedAtMethodDef = {
-        val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
-        val params  = methSym newSyntheticValueParams formals
-        methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
-        val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
-        def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
-
-        val body = bodyForIDA match {
-          case Match(selector, cases) =>
-            if (cases exists treeInfo.isDefaultCase) TRUE_typed
-            else
-              doSubst(Match(/*gen.mkUnchecked*/(selector),
-                        (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
-                        DEFAULT ==> FALSE_typed)))
-
-        }
-        body.changeOwner(fun.symbol -> methSym)
-
-        DefDef(methSym, body)
-      }
-
-      localTyper.typedPos(fun.pos) {
-        Block(
-          List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
-          Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
-      }
     }
 
+
     def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
       val isJava = fun.isJavaDefined
       def transformVarargs(varargsElemType: Type) = {
@@ -416,7 +258,7 @@ abstract class UnCurry extends InfoTransform
 
         // when calling into scala varargs, make sure it's a sequence.
         def arrayToSequence(tree: Tree, elemtp: Type) = {
-          afterUncurry {
+          exitingUncurry {
             localTyper.typedPos(pos) {
               val pt = arrayType(elemtp)
               val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -435,7 +277,7 @@ abstract class UnCurry extends InfoTransform
           def getClassTag(tp: Type): Tree = {
             val tag = localTyper.resolveClassTag(tree.pos, tp)
             // Don't want bottom types getting any further than this (SI-4024)
-            if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+            if (tp.typeSymbol.isBottomClass) getClassTag(AnyTpe)
             else if (!tag.isEmpty) tag
             else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
             else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
@@ -446,7 +288,7 @@ abstract class UnCurry extends InfoTransform
               case _          => EmptyTree
             }
           }
-          afterUncurry {
+          exitingUncurry {
             localTyper.typedPos(pos) {
               gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe)))
             }
@@ -465,12 +307,12 @@ abstract class UnCurry extends InfoTransform
           }
           else {
             def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType)
-            if (isJava || inPattern) mkArray
+            if (isJava) mkArray
             else if (args.isEmpty) gen.mkNil  // avoid needlessly double-wrapping an empty argument list
             else arrayToSequence(mkArray, varargsElemType)
           }
 
-        afterUncurry {
+        exitingUncurry {
           if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
             // The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
             suffix = localTyper.typedPos(pos) {
@@ -491,7 +333,7 @@ abstract class UnCurry extends InfoTransform
           arg setType functionType(Nil, arg.tpe)
         }
         else {
-          log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
+          log(s"Argument '$arg' at line ${arg.pos.line} is $formal from ${fun.fullName}")
           def canUseDirectly(recv: Tree) = (
                recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
             && treeInfo.isExprSafeToInline(recv)
@@ -538,7 +380,7 @@ abstract class UnCurry extends InfoTransform
         deriveDefDef(dd)(_ => body)
       case _ => tree
     }
-    def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy
+    def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy || currentOwner.isAnonymousFunction
 
 // ------ The tree transformers --------------------------------------------------------
 
@@ -550,15 +392,7 @@ abstract class UnCurry extends InfoTransform
         finally needTryLift = saved
       }
 
-      /** A try or synchronized needs to be lifted anyway for MSIL if it contains
-       *  return statements. These are disallowed in the CLR. By lifting
-       *  such returns will be converted to throws.
-       */
-      def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
-        forMSIL && lookForReturns.found(tree)
-
-      /** Transform tree `t` to { def f = t; f } where `f` is a fresh name
-       */
+      /* Transform tree `t` to { def f = t; f } where `f` is a fresh name */
       def liftTree(tree: Tree) = {
         debuglog("lifting tree at: " + (tree.pos))
         val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos)
@@ -578,10 +412,14 @@ abstract class UnCurry extends InfoTransform
       }
 
       val sym = tree.symbol
+
+      // true if the taget is a lambda body that's been lifted into a method
+      def isLiftedLambdaBody(target: Tree) = target.symbol.isLocalToBlock && target.symbol.isArtifact && target.symbol.name.containsName(nme.ANON_FUN_NAME)
+
       val result = (
         // TODO - settings.noassertions.value temporarily retained to avoid
         // breakage until a reasonable interface is settled upon.
-        if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
+        if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions)))
           replaceElidableTree(tree)
         else translateSynchronized(tree) match {
           case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
@@ -592,7 +430,7 @@ abstract class UnCurry extends InfoTransform
 
             if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
 
-            withNeedLift(false) {
+            withNeedLift(needLift = false) {
               if (dd.symbol.isClassConstructor) {
                 atOwner(sym) {
                   val rhs1 = (rhs: @unchecked) match {
@@ -616,37 +454,32 @@ abstract class UnCurry extends InfoTransform
           case ValDef(_, _, _, rhs) =>
             if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
             if (!sym.owner.isSourceMethod)
-              withNeedLift(true) { super.transform(tree) }
+              withNeedLift(needLift = true) { super.transform(tree) }
             else
               super.transform(tree)
           case UnApply(fn, args) =>
-            val fn1 = withInPattern(false)(transform(fn))
-            val args1 = transformTrees(fn.symbol.name match {
-              case nme.unapply    => args
-              case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
-              case _              => sys.error("internal error: UnApply node has wrong symbol")
-            })
+            val fn1   = transform(fn)
+            val args1 = fn.symbol.name match {
+              case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes)
+              case _              => args
+            }
             treeCopy.UnApply(tree, fn1, args1)
 
           case Apply(fn, args) =>
-            if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
-              transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
-            else {
-              val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
-              withNeedLift(needLift) {
-                val formals = fn.tpe.paramTypes
-                treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
-              }
+            val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+            withNeedLift(needLift) {
+              val formals = fn.tpe.paramTypes
+              treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
             }
 
           case Assign(_: RefTree, _) =>
-            withNeedLift(true) { super.transform(tree) }
+            withNeedLift(needLift = true) { super.transform(tree) }
 
           case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
-            withNeedLift(true) { super.transform(tree) }
+            withNeedLift(needLift = true) { super.transform(tree) }
 
           case ret @ Return(_) if (isNonLocalReturn(ret)) =>
-            withNeedLift(true) { super.transform(ret) }
+            withNeedLift(needLift = true) { super.transform(ret) }
 
           case Try(_, Nil, _) =>
             // try-finally does not need lifting: lifting is needed only for try-catch
@@ -656,13 +489,17 @@ abstract class UnCurry extends InfoTransform
             super.transform(tree)
 
           case Try(block, catches, finalizer) =>
-            if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+            if (needTryLift) transform(liftTree(tree))
             else super.transform(tree)
 
           case CaseDef(pat, guard, body) =>
-            val pat1 = withInPattern(true)(transform(pat))
+            val pat1 = transform(pat)
             treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
 
+          // if a lambda is already the right shape we don't need to transform it again
+          case fun @ Function(_, Apply(target, _)) if (!inlineFunctionExpansion) && isLiftedLambdaBody(target) =>
+            super.transform(fun)
+
           case fun @ Function(_, _) =>
             mainTransform(transformFunction(fun))
 
@@ -681,11 +518,11 @@ abstract class UnCurry extends InfoTransform
             tree1
         }
       )
-      assert(result.tpe != null, result + " tpe is null")
-      result setType uncurryTreeType(result.tpe)
+      assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
+      result modifyType uncurry
     }
 
-    def postTransform(tree: Tree): Tree = afterUncurry {
+    def postTransform(tree: Tree): Tree = exitingUncurry {
       def applyUnary(): Tree = {
         // TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
         // whole tree is a polymorphic nullary method application
@@ -703,44 +540,13 @@ abstract class UnCurry extends InfoTransform
 
       def isThrowable(pat: Tree): Boolean = pat match {
         case Typed(Ident(nme.WILDCARD), tpt) =>
-          tpt.tpe =:= ThrowableClass.tpe
+          tpt.tpe =:= ThrowableTpe
         case Bind(_, pat) =>
           isThrowable(pat)
         case _ =>
           false
       }
 
-      def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
-
-      def postTransformTry(tree: Try) = {
-        val body = tree.block
-        val catches = tree.catches
-        val finalizer = tree.finalizer
-        if (opt.virtPatmat) {
-          if (catches exists (cd => !treeInfo.isCatchCase(cd)))
-            debugwarn("VPM BUG! illegal try/catch " + catches)
-          tree
-        } else if (catches forall treeInfo.isCatchCase) {
-          tree
-        } else {
-          val exname = unit.freshTermName("ex$")
-          val cases =
-            if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
-            else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
-          val catchall =
-            atPos(tree.pos) {
-              CaseDef(
-                Bind(exname, Ident(nme.WILDCARD)),
-                EmptyTree,
-                Match(Ident(exname), cases))
-            }
-          debuglog("rewrote try: " + catches + " ==> " + catchall);
-          val catches1 = localTyper.typedCases(
-            List(catchall), ThrowableClass.tpe, WildcardType)
-          treeCopy.Try(tree, body, catches1, finalizer)
-        }
-      }
-
       tree match {
         /* Some uncurry post transformations add members to templates.
          *
@@ -779,7 +585,9 @@ abstract class UnCurry extends InfoTransform
           addJavaVarargsForwarders(dd, flatdd)
 
         case tree: Try =>
-          postTransformTry(tree)
+          if (tree.catches exists (cd => !treeInfo.isCatchCase(cd)))
+            devWarning("VPM BUG - illegal try/catch " + tree.catches)
+          tree
 
         case Apply(Apply(fn, args), args1) =>
           treeCopy.Apply(tree, fn, args ::: args1)
@@ -837,7 +645,7 @@ abstract class UnCurry extends InfoTransform
       final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
 
       def isDependent(dd: DefDef): Boolean =
-        beforeUncurry {
+        enteringUncurry {
           val methType = dd.symbol.info
           methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
         }
@@ -914,10 +722,6 @@ abstract class UnCurry extends InfoTransform
       if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
         return flatdd
 
-      def toSeqType(tp: Type): Type = {
-        val arg = elementType(ArrayClass, tp)
-        seqType(arg)
-      }
       def toArrayType(tp: Type): Type = {
         val arg = elementType(SeqClass, tp)
         // to prevent generation of an `Object` parameter from `Array[T]` parameter later
@@ -926,7 +730,7 @@ abstract class UnCurry extends InfoTransform
         //   becomes     def foo[T](a: Int, b: Array[Object])
         //   instead of  def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object)
         arrayType(
-          if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectClass.tpe
+          if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectTpe
           else arg
         )
       }
@@ -941,7 +745,7 @@ abstract class UnCurry extends InfoTransform
         case p if rpsymbols(p.symbol) => toArrayType(p.symbol.tpe)
         case p                        => p.symbol.tpe
       }
-      val forwresult = dd.symbol.tpe.finalResultType
+      val forwresult = dd.symbol.tpe_*.finalResultType
       val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
         currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
       )
@@ -952,11 +756,12 @@ abstract class UnCurry extends InfoTransform
       }
 
       // create the symbol
-      val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+      val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+      def forwParams = forwsym.info.paramss.flatten
 
       // create the tree
       val forwtree = theTyper.typedPos(dd.pos) {
-        val locals = map2(forwsym ARGS, flatparams) {
+        val locals = map2(forwParams, flatparams) {
           case (_, fp) if !rpsymbols(fp.symbol) => null
           case (argsym, fp)                     =>
             Block(Nil,
@@ -966,15 +771,13 @@ abstract class UnCurry extends InfoTransform
               )
             )
         }
-        val seqargs = map2(locals, forwsym ARGS) {
+        val seqargs = map2(locals, forwParams) {
           case (null, argsym) => Ident(argsym)
           case (l, _)         => l
         }
         val end = if (forwsym.isConstructor) List(UNIT) else Nil
 
-        DEF(forwsym) === BLOCK(
-          Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*
-        )
+        DefDef(forwsym, BLOCK(Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*))
       }
 
       // check if the method with that name and those arguments already exists in the template
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index dbe0831..e0bc478 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -4,16 +4,15 @@
  * @author Adriaan Moors
  */
 
-package scala.tools.nsc.transform.patmat
+package scala
+package tools.nsc.transform.patmat
 
-import scala.tools.nsc.symtab._
 import scala.language.postfixOps
 import scala.collection.mutable
 import scala.reflect.internal.util.Statistics
 import scala.reflect.internal.util.Position
 import scala.reflect.internal.util.HashSet
 
-
 trait Logic extends Debugging  {
   import PatternMatchingStats._
 
@@ -113,8 +112,8 @@ trait Logic extends Debugging  {
     case object False extends Prop
 
     // symbols are propositions
-    abstract case class Sym(val variable: Var, val const: Const) extends Prop {
-      private[this] val id = Sym.nextSymId
+    abstract case class Sym(variable: Var, const: Const) extends Prop {
+      private val id: Int = Sym.nextSymId
 
       override def toString = variable +"="+ const +"#"+ id
     }
@@ -126,6 +125,7 @@ trait Logic extends Debugging  {
         (uniques findEntryOrUpdate newSym)
       }
       private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+      implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id)
     }
 
     def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
@@ -162,13 +162,17 @@ trait Logic extends Debugging  {
 
     // to govern how much time we spend analyzing matches for unreachability/exhaustivity
     object AnalysisBudget {
-      import scala.tools.cmd.FromString.IntFromString
-      val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+      private val budgetProp = scala.sys.Prop[Int]("scalac.patmat.analysisBudget")
+      private val budgetOff = "off"
+      val max: Int = {
+        val DefaultBudget = 256
+        budgetProp.option.getOrElse(if (budgetProp.get.equalsIgnoreCase("off")) Integer.MAX_VALUE else DefaultBudget)
+      }
 
       abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
 
       object exceeded extends Exception(
-          s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+          s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)")
 
     }
 
@@ -212,7 +216,7 @@ trait Logic extends Debugging  {
       }
 
       props foreach gatherEqualities.apply
-      if (modelNull) vars foreach (_.registerNull)
+      if (modelNull) vars foreach (_.registerNull())
 
       val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
 
@@ -280,7 +284,7 @@ trait Logic extends Debugging  {
     def eqFreePropToSolvable(p: Prop): Formula
     def cnfString(f: Formula): String
 
-    type Model = Map[Sym, Boolean]
+    type Model = collection.immutable.SortedMap[Sym, Boolean]
     val EmptyModel: Model
     val NoModel: Model
 
@@ -293,6 +297,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
   trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
     type Type = global.Type
     type Tree = global.Tree
+    import global.definitions.ConstantNull
 
     // resets hash consing -- only supposed to be called by TreeMakersToProps
     def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
@@ -321,7 +326,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
       val staticTpCheckable: Type = checkableType(staticTp)
 
       private[this] var _mayBeNull = false
-      def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+      def registerNull(): Unit = { ensureCanModify(); if (ConstantNull <:< staticTpCheckable) _mayBeNull = true }
       def mayBeNull: Boolean = _mayBeNull
 
       // case None => domain is unknown,
@@ -345,16 +350,16 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
           } else
             subConsts
 
-        observed; allConsts
+        observed(); allConsts
       }
 
       // populate equalitySyms
       // don't care about the result, but want only one fresh symbol per distinct constant c
-      def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+      def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
 
       // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
       // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
-      def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+      def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)}
 
       // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
       /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
@@ -366,7 +371,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
        * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
        */
       lazy val implications = {
-        /** when we know V = C, which other equalities must hold
+        /* when we know V = C, which other equalities must hold
          *
          * in general, equality to some type implies equality to its supertypes
          * (this multi-valued kind of equality is necessary for unreachability)
@@ -479,7 +484,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
       lazy val symForStaticTp: Option[Sym]  = symForEqualsTo.get(TypeConst(staticTpCheckable))
 
       // don't access until all potential equalities have been registered using registerEquality
-      private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+      private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList}
 
       // don't call until all equalities have been registered and registerNull has been called (if needed)
       def describe = {
@@ -494,7 +499,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
 
 
     import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
-    import global.definitions.{AnyClass, UnitClass}
+    import global.definitions._
 
 
     // all our variables range over types
@@ -514,11 +519,11 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
         uniques.get(tp).getOrElse(
           uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
             case Some((_, c)) =>
-              debug.patmat("unique const: "+ (tp, c))
+              debug.patmat("unique const: "+ ((tp, c)))
               c
             case _ =>
               val fresh = mkFresh
-              debug.patmat("uniqued const: "+ (tp, fresh))
+              debug.patmat("uniqued const: "+ ((tp, fresh)))
               uniques(tp) = fresh
               fresh
           })
@@ -534,12 +539,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
         if (!t.symbol.isStable) t.tpe.narrow
         else trees find (a => a.correspondsStructure(t)(sameValue)) match {
           case Some(orig) =>
-            debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+            debug.patmat("unique tp for tree: "+ ((orig, orig.tpe)))
             orig.tpe
           case _ =>
             // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
             val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
-            debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+            debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe)))
             trees += treeWithNarrowedType
             treeWithNarrowedType.tpe
         }
@@ -549,7 +554,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
       def tp: Type
       def wideTp: Type
 
-      def isAny = wideTp.typeSymbol == AnyClass
+      def isAny = wideTp =:= AnyTpe
       def isValue: Boolean //= tp.isStable
 
       // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
@@ -564,11 +569,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
     // (At least conceptually: `true` is an instance of class `Boolean`)
     private def widenToClass(tp: Type): Type =
       if (tp.typeSymbol.isClass) tp
+      else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp)
       else tp.baseType(tp.baseClasses.head)
 
     object TypeConst extends TypeConstExtractor {
       def apply(tp: Type) = {
-        if (tp =:= NullTp) NullConst
+        if (tp =:= ConstantNull) NullConst
         else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
         else Const.unique(tp, new TypeConst(tp))
       }
@@ -577,7 +583,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
 
     // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
     sealed class TypeConst(val tp: Type) extends Const {
-      assert(!(tp =:= NullTp))
+      assert(!(tp =:= ConstantNull))
       /*private[this] val id: Int = */ Const.nextTypeId
 
       val wideTp = widenToClass(tp)
@@ -598,7 +604,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
       }
       def apply(p: Tree) = {
         val tp = p.tpe.normalize
-        if (tp =:= NullTp) NullConst
+        if (tp =:= ConstantNull) NullConst
         else {
           val wideTp = widenToClass(tp)
 
@@ -606,7 +612,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
             if (tp.isInstanceOf[SingletonType]) tp
             else p match {
               case Literal(c) =>
-                if (c.tpe.typeSymbol == UnitClass) c.tpe
+                if (c.tpe =:= UnitTpe) c.tpe
                 else ConstantType(c)
               case Ident(_) if p.symbol.isStable =>
                 // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
@@ -626,16 +632,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
     }
     sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
       // debug.patmat("VC"+(tp, wideTp, toString))
-      assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+      assert(!(tp =:= ConstantNull)) // TODO: assert(!tp.isStable)
       /*private[this] val id: Int = */Const.nextValueId
       def isValue = true
     }
 
-
-    lazy val NullTp = ConstantType(Constant(null))
     case object NullConst extends Const {
-      def tp     = NullTp
-      def wideTp = NullTp
+      def tp     = ConstantNull
+      def wideTp = ConstantNull
 
       def isValue = true
       override def toString = "null"
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 9558542..2893cbd 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -12,27 +12,73 @@ import scala.reflect.internal.util.Statistics
 import scala.reflect.internal.util.Position
 
 trait TreeAndTypeAnalysis extends Debugging {
-  import global.{Tree, Type, Symbol, definitions, analyzer,
-    ConstantType, Literal, Constant,  appliedType, WildcardType, TypeRef, ModuleClassSymbol,
-    nestedMemberType, TypeMap, Ident}
-
+  import global._
   import definitions._
   import analyzer.Typer
 
+  /** Compute the type T implied for a value `v` matched by a pattern `pat` (with expected type `pt`).
+   *
+   * Usually, this is the pattern's type because pattern matching implies instance-of checks.
+   *
+   * However, Stable Identifier and Literal patterns are matched using `==`,
+   * which does not imply a type for the binder that binds the matched value.
+   *
+   * See SI-1503, SI-5024: don't cast binders to types we're not sure they have
+   *
+   * TODO: update spec as follows (deviation between `**`):
+   *
+   *   A pattern binder x at p consists of a pattern variable x and a pattern p.
+   *   The type of the variable x is the static type T **IMPLIED BY** the pattern p.
+   *   This pattern matches any value v matched by the pattern p
+   *     **Deleted: , provided the run-time type of v is also an instance of T, **
+   *   and it binds the variable name to that value.
+   *
+   *   Addition:
+   *     A pattern `p` _implies_ a type `T` if the pattern matches only values of the type `T`.
+   */
+  def binderTypeImpliedByPattern(pat: Tree, pt: Type, binder: Symbol): Type =
+    pat match {
+      // because `==` decides whether these patterns match, stable identifier patterns (ident or selection)
+      // do not contribute any type information (beyond the pattern's expected type)
+      // e.g., in case x at Nil => x --> all we know about `x` is that it satisfies Nil == x, which could be anything
+      case Ident(_) | Select(_, _) =>
+        if (settings.future) pt
+        else {
+          // TODO: don't warn unless this unsound assumption is actually used in a cast
+          // I tried annotating the type returned here with an internal annotation (`pat.tpe withAnnotation UnsoundAssumptionAnnotation`),
+          // and catching it in the patmat backend when used in a cast (because that would signal the unsound assumption was used),
+          // but the annotation didn't bubble up...
+          // This is a pretty poor approximation.
+          def unsoundAssumptionUsed = binder.name != nme.WILDCARD && !(pt <:< pat.tpe)
+          if (settings.lint && unsoundAssumptionUsed)
+            global.currentUnit.warning(pat.pos,
+              sm"""The value matched by $pat is bound to ${binder.name}, which may be used under the
+                  |unsound assumption that it has type ${pat.tpe}, whereas we can only safely
+                  |count on it having type $pt, as the pattern is matched using `==` (see SI-1503).""")
+
+          pat.tpe
+        }
+
+
+      // the other patterns imply type tests, so we can safely assume the binder has the pattern's type when the pattern matches
+      // concretely, a literal, type pattern, a case class (the constructor's result type) or extractor (the unapply's argument type) all imply type tests
+      // (and, inductively, an alternative)
+      case _ => pat.tpe
+    }
 
   // we use subtyping as a model for implication between instanceof tests
   // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
   // unfortunately this is not true in general:
-  // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+  // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
   def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
-    val tpValue    = tp.typeSymbol.isPrimitiveValueClass
+    val tpValue = isPrimitiveValueType(tp)
 
     // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
     // (and the subtype is respectively a value type or not a value type)
     // this allows us to reuse subtyping as a model for implication between instanceOf tests
     // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
     val tpImpliedNormalizedToAny =
-      if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+      if (tpImplied =:= (if (tpValue) AnyValTpe else AnyRefTpe)) AnyTpe
       else tpImplied
 
     tp <:< tpImpliedNormalizedToAny
@@ -52,28 +98,31 @@ trait TreeAndTypeAnalysis extends Debugging {
       tp.typeSymbol match {
         // TODO case _ if tp.isTupleType => // recurse into component types?
         case UnitClass =>
-          Some(List(UnitClass.tpe))
+          Some(List(UnitTpe))
         case BooleanClass =>
-          Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+          Some(ConstantTrue :: ConstantFalse :: Nil)
         // TODO case _ if tp.isTupleType => // recurse into component types
         case modSym: ModuleClassSymbol =>
           Some(List(tp))
         // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
         case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
-          debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+          debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
           None
         case sym =>
-          val subclasses = (
-            sym.sealedDescendants.toList sortBy (_.sealedSortName)
+          val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
             // symbols which are both sealed and abstract need not be covered themselves, because
             // all of their children must be and they cannot otherwise be created.
-            filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
-          debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+            sym.sealedDescendants.toList
+              sortBy (_.sealedSortName)
+              filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+          )
 
           val tpApprox = typer.infer.approximateAbstracts(tp)
           val pre = tpApprox.prefix
+
+          Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
           // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
-          val validSubTypes = (subclasses flatMap {sym =>
+            subclasses flatMap { sym =>
               // have to filter out children which cannot match: see ticket #3683 for an example
               // compare to the fully known type `tp` (modulo abstract types),
               // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
@@ -85,9 +134,8 @@ trait TreeAndTypeAnalysis extends Debugging {
               // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
               if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
               else None
-            })
-          debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
-          Some(validSubTypes)
+            }
+          })
       }
 
     // approximate a type to the static type that is fully checkable at run time,
@@ -108,10 +156,7 @@ trait TreeAndTypeAnalysis extends Debugging {
             mapOver(tp)
         }
       }
-
-      val res = typeArgsToWildcardsExceptArray(tp)
-      debug.patmat("checkable "+(tp, res))
-      res
+      debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
     }
 
     // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
@@ -128,8 +173,8 @@ trait TreeAndTypeAnalysis extends Debugging {
 }
 
 trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
-  import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
-  import global.definitions.{ListClass, NilModule}
+  import global._
+  import global.definitions._
 
   /**
    * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
@@ -140,20 +185,17 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
       var currId = 0
     }
     case class Test(prop: Prop, treeMaker: TreeMaker) {
-      // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+      // private val reusedBy = new mutable.HashSet[Test]
       var reuses: Option[Test] = None
       def registerReuseBy(later: Test): Unit = {
         assert(later.reuses.isEmpty, later.reuses)
         // reusedBy += later
         later.reuses = Some(this)
       }
-
       val id = { Test.currId += 1; Test.currId}
-      override def toString =
-        "T"+ id + "C("+ prop +")"  //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+      override def toString = s"T${id}C($prop)"
     }
 
-
     class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
       override def uniqueNonNullProp(p: Tree): Prop = True
     }
@@ -162,9 +204,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
     class TreeMakersToProps(val root: Symbol) {
       prepareNewAnalysis() // reset hash consing for Var and Const
 
-      private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
-      private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
-      private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+      private[this] val uniqueEqualityProps = new mutable.HashMap[(Tree, Tree), Eq]
+      private[this] val uniqueNonNullProps  = new mutable.HashMap[Tree, Not]
+      private[this] val uniqueTypeProps     = new mutable.HashMap[(Tree, Type), Eq]
 
       def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
         uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
@@ -226,7 +268,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
           // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
           val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
             case (f, t) =>
-              t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+              t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f)
           }
           val (boundFrom, boundTo) = boundSubst.unzip
           val (unboundFrom, unboundTo) = unboundSubst.unzip
@@ -269,7 +311,8 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
                 }
                 def nonNullTest(testedBinder: Symbol)                 = uniqueNonNullProp(binderToUniqueTree(testedBinder))
                 def equalsTest(pat: Tree, testedBinder: Symbol)       = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat))
-                def eqTest(pat: Tree, testedBinder: Symbol)           = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+                // rewrite eq test to type test against the singleton type `pat.tpe`; unrelated to == (uniqueEqualityProp), could be null
+                def eqTest(pat: Tree, testedBinder: Symbol)           = uniqueTypeProp(binderToUniqueTree(testedBinder), uniqueTp(pat.tpe))
                 def tru                                               = True
               }
               ttm.renderCondition(condStrategy)
@@ -279,9 +322,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
             case SubstOnlyTreeMaker(_, _)                             => True
             case GuardTreeMaker(guard) =>
               guard.tpe match {
-                case ConstantType(Constant(true))  => True
-                case ConstantType(Constant(false)) => False
-                case _                             => handleUnknown(tm)
+                case ConstantTrue  => True
+                case ConstantFalse => False
+                case _             => handleUnknown(tm)
               }
             case ExtractorTreeMaker(_, _, _) |
                  ProductExtractorTreeMaker(_, _) |
@@ -335,19 +378,13 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
       debug.patmat("treeMakers:")
       debug.patmat(alignAcrossRows(cases, ">>"))
     }
-
-    def showTests(testss: List[List[Test]]) = {
-      debug.patmat("tests: ")
-      debug.patmat(alignAcrossRows(testss, "&"))
-    }
   }
-
 }
 
 trait MatchAnalysis extends MatchApproximation {
   import PatternMatchingStats._
-  import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
-  import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+  import global._
+  import global.definitions._
 
   trait MatchAnalyzer extends MatchApproximator  {
     def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
@@ -492,8 +529,13 @@ trait MatchAnalysis extends MatchApproximation {
 
     object CounterExample {
       def prune(examples: List[CounterExample]): List[CounterExample] = {
-        val distinct = examples.filterNot(_ == NoExample).toSet
-        distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+        // SI-7669 Warning: we don't used examples.distinct here any more as
+        //         we can have A != B && A.coveredBy(B) && B.coveredBy(A)
+        //         with Nil and List().
+        val result = mutable.Buffer[CounterExample]()
+        for (example <- examples if (!result.exists(example coveredBy _)))
+          result += example
+        result.toList
       }
     }
 
@@ -595,7 +637,7 @@ trait MatchAnalysis extends MatchApproximation {
         private def unique(variable: Var): VariableAssignment =
           uniques.getOrElseUpdate(variable, {
             val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
-            VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty)
+            VariableAssignment(variable, eqTo.toList, neqTo.toList)
           })
 
         def apply(variable: Var): VariableAssignment = {
@@ -609,7 +651,7 @@ trait MatchAnalysis extends MatchApproximation {
           else {
             findVar(pre) foreach { preVar =>
               val outerCtor = this(preVar)
-              outerCtor.fields(field) = newCtor
+              outerCtor.addField(field, newCtor)
             }
             newCtor
           }
@@ -617,15 +659,21 @@ trait MatchAnalysis extends MatchApproximation {
       }
 
       // node in the tree that describes how to construct a counter-example
-      case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
+      case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const]) {
+        private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty
         // need to prune since the model now incorporates all super types of a constant (needed for reachability)
         private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
         private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
         private lazy val ctor       = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
-        private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
-        private lazy val cls        = if (ctor == NoSymbol) NoSymbol else ctor.owner
-        private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
-
+        private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
+        private lazy val cls        = ctor.safeOwner
+        private lazy val caseFieldAccs = cls.caseFieldAccessors
+
+        def addField(symbol: Symbol, assign: VariableAssignment) {
+          // SI-7669 Only register this field if if this class contains it.
+          val shouldConstrainField = !symbol.isCaseAccessor || caseFieldAccs.contains(symbol)
+          if (shouldConstrainField) fields(symbol) = assign
+        }
 
         def allFieldAssignmentsLegal: Boolean =
           (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
@@ -638,7 +686,7 @@ trait MatchAnalysis extends MatchApproximation {
         def toCounterExample(beBrief: Boolean = false): CounterExample =
           if (!allFieldAssignmentsLegal) NoExample
           else {
-            debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+            debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)))
             val res = prunedEqualTo match {
               // a definite assignment to a value
               case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
@@ -659,7 +707,7 @@ trait MatchAnalysis extends MatchApproximation {
 
                 cls match {
                   case ConsClass               => ListExample(args())
-                  case _ if isTupleSymbol(cls) => TupleExample(args(true))
+                  case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
                   case _ => ConstructorExample(cls, args())
                 }
 
@@ -679,8 +727,7 @@ trait MatchAnalysis extends MatchApproximation {
               // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
               case _ => NoExample
             }
-            debug.patmat("described as: "+ res)
-            res
+            debug.patmatResult("described as")(res)
           }
 
         override def toString = toCounterExample().toString
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 57fab4e..06b39b0 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -10,7 +10,6 @@ import scala.tools.nsc.symtab.Flags.SYNTHETIC
 import scala.language.postfixOps
 import scala.reflect.internal.util.Statistics
 import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
 
 /** Factory methods used by TreeMakers to make the actual trees.
  *
@@ -18,10 +17,7 @@ import scala.reflect.internal.util.NoPosition
  * and pure (aka "virtualized": match is parametric in its monad).
  */
 trait MatchCodeGen extends Interface {
-  import PatternMatchingStats._
-  import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
-    appliedType, NoType, MethodType, newTermName, Name,
-    Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+  import global._
   import definitions._
 
   ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -66,45 +62,44 @@ trait MatchCodeGen extends Interface {
     def codegen: AbsCodegen
 
     abstract class CommonCodegen extends AbsCodegen { import CODE._
-      def fun(arg: Symbol, body: Tree): Tree           = Function(List(ValDef(arg)), body)
-      def tupleSel(binder: Symbol)(i: Int): Tree       = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
-      def index(tgt: Tree)(i: Int): Tree               = tgt APPLY (LIT(i))
-      def drop(tgt: Tree)(n: Int): Tree                = (tgt DOT vpmName.drop) (LIT(n))
-      def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)          // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+      def fun(arg: Symbol, body: Tree): Tree     = Function(List(ValDef(arg)), body)
+      def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+      def index(tgt: Tree)(i: Int): Tree         = tgt APPLY (LIT(i))
+
+      // Right now this blindly calls drop on the result of the unapplySeq
+      // unless it verifiably has no drop method (this is the case in particular
+      // with Array.) You should not actually have to write a method called drop
+      // for name-based matching, but this was an expedient route for the basics.
+      def drop(tgt: Tree)(n: Int): Tree = {
+        def callDirect   = fn(tgt, nme.drop, LIT(n))
+        def callRuntime  = Apply(REF(currentRun.runDefinitions.traversableDropMethod), tgt :: LIT(n) :: Nil)
+        def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType)
+
+        if (needsRuntime) callRuntime else callDirect
+      }
+
+      // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+      def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)
 
       // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
       def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
-      def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
-
-      // duplicated out of frustration with cast generation
-      def mkZero(tp: Type): Tree = {
-        tp.typeSymbol match {
-          case UnitClass    => Literal(Constant())
-          case BooleanClass => Literal(Constant(false))
-          case FloatClass   => Literal(Constant(0.0f))
-          case DoubleClass  => Literal(Constant(0.0d))
-          case ByteClass    => Literal(Constant(0.toByte))
-          case ShortClass   => Literal(Constant(0.toShort))
-          case IntClass     => Literal(Constant(0))
-          case LongClass    => Literal(Constant(0L))
-          case CharClass    => Literal(Constant(0.toChar))
-          case _            => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
-        }
+      def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
+
+      def mkZero(tp: Type): Tree = gen.mkConstantZero(tp) match {
+        case Constant(null) => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+        case const          => Literal(const)
       }
     }
   }
 
   trait PureMatchMonadInterface extends MatchMonadInterface {
     val matchStrategy: Tree
-
-    def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
-    def pureType(tp: Type): Type     = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
-    protected def matchMonadSym      = oneSig.finalResultType.typeSymbol
-
     import CODE._
     def _match(n: Name): SelectStart = matchStrategy DOT n
 
-    private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe  // TODO: error message
+    // TODO: error message
+    private lazy val oneType              = typer.typedOperator(_match(vpmName.one)).tpe
+    override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil))
   }
 
   trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -132,17 +127,11 @@ trait MatchCodeGen extends Interface {
       //  __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
       def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
       //  __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
-      def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+      def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitTpe)), next)
     }
   }
 
-  trait OptimizedMatchMonadInterface extends MatchMonadInterface {
-    override def inMatchMonad(tp: Type): Type = optionType(tp)
-    override def pureType(tp: Type): Type     = tp
-    override protected def matchMonadSym      = OptionClass
-  }
-
-  trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+  trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface {
     override def codegen: AbsCodegen = optimizedCodegen
 
     // when we know we're targetting Option, do some inlining the optimizer won't do
@@ -158,9 +147,8 @@ trait MatchCodeGen extends Interface {
        * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
        */
       def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
-        val matchEnd = newSynthCaseLabel("matchEnd")
         val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
-        matchEnd setInfo MethodType(List(matchRes), restpe)
+        val matchEnd = newSynthCaseLabel("matchEnd") setInfo MethodType(List(matchRes), restpe)
 
         def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
         var _currCase = newCaseSym
@@ -172,23 +160,22 @@ trait MatchCodeGen extends Interface {
 
           LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
         }
-
         // must compute catchAll after caseLabels (side-effects nextCase)
         // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
         // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
         val catchAllDef = matchFailGen map { matchFailGen =>
-          val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+          val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives
 
           LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
         } toList // at most 1 element
 
         // scrutSym == NoSymbol when generating an alternatives matcher
-        val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym)  === scrut) else Nil // for alternatives
+        val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
 
         // the generated block is taken apart in TailCalls under the following assumptions
-          // the assumption is once we encounter a case, the remainder of the block will consist of cases
-          // the prologue may be empty, usually it is the valdef that stores the scrut
-          // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+        // the assumption is once we encounter a case, the remainder of the block will consist of cases
+        // the prologue may be empty, usually it is the valdef that stores the scrut
+        // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
         Block(
           scrutDef ++ caseDefs ++ catchAllDef,
           LabelDef(matchEnd, List(matchRes), REF(matchRes))
@@ -210,15 +197,14 @@ trait MatchCodeGen extends Interface {
         // next: MatchMonad[U]
         // returns MatchMonad[U]
         def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
-          val tp      = inMatchMonad(b.tpe)
-          val prevSym = freshSym(prev.pos, tp, "o")
-          val isEmpty = tp member vpmName.isEmpty
-          val get     = tp member vpmName.get
-
+          val prevSym = freshSym(prev.pos, prev.tpe, "o")
           BLOCK(
-            VAL(prevSym) === prev,
+            ValDef(prevSym, prev),
             // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
-            ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+            ifThenElseZero(
+              NOT(prevSym DOT vpmName.isEmpty),
+              Substitution(b, prevSym DOT vpmName.get)(next)
+            )
           )
         }
 
@@ -228,14 +214,12 @@ trait MatchCodeGen extends Interface {
         // next == MatchMonad[U]
         // returns MatchMonad[U]
         def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
-          val rest =
+          val rest = (
             // only emit a local val for `nextBinder` if it's actually referenced in `next`
             if (next.exists(_.symbol eq nextBinder))
-              BLOCK(
-                VAL(nextBinder) === res,
-                next
-              )
+              BLOCK(ValDef(nextBinder, res), next)
             else next
+          )
           ifThenElseZero(cond, rest)
         }
 
@@ -255,4 +239,4 @@ trait MatchCodeGen extends Interface {
 
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
new file mode 100644
index 0000000..0d08120
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+/** Segregating this super hacky CPS code. */
+trait MatchCps {
+  self: PatternMatching =>
+
+  import global._
+
+  // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+  private object CpsSymbols {
+    private def cpsSymbol(name: String) = rootMirror.getClassIfDefined(s"scala.util.continuations.$name")
+
+    val MarkerCPSAdaptPlus  = cpsSymbol("cpsPlus")
+    val MarkerCPSAdaptMinus = cpsSymbol("cpsMinus")
+    val MarkerCPSSynth      = cpsSymbol("cpsSynth")
+    val MarkerCPSTypes      = cpsSymbol("cpsParam")
+    val stripTriggerCPSAnns = Set[Symbol](MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+    val strippedCPSAnns     = stripTriggerCPSAnns + MarkerCPSTypes
+
+    // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+    // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+    // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+    def removeCPSFromPt(pt: Type): Type = (
+      if (MarkerCPSAdaptPlus.exists && (stripTriggerCPSAnns exists pt.hasAnnotation))
+        pt filterAnnotations (ann => !(strippedCPSAnns exists ann.matches))
+      else
+        pt
+    )
+  }
+  def removeCPSFromPt(pt: Type): Type = CpsSymbols removeCPSFromPt pt
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index c570dd8..8ff7824 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
 import scala.collection.mutable
 import scala.reflect.internal.util.Statistics
 import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
 
 /** Optimize and analyze matches based on their TreeMaker-representation.
  *
@@ -20,15 +19,9 @@ import scala.reflect.internal.util.NoPosition
  */
 // TODO: split out match analysis
 trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
-  import PatternMatchingStats._
-  import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
-    ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
-    Typed, treeInfo, nme, Ident,
-    Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
-
+  import global._
   import global.definitions._
 
-
   ////
   trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
     /** a flow-sensitive, generalised, common sub-expression elimination
@@ -148,19 +141,19 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
     object ReusedCondTreeMaker {
       def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
     }
-    class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+    class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker {
       lazy val localSubstitution        = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
-      lazy val storedCond               = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+      lazy val storedCond               = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE
       lazy val treesToHoist: List[Tree] = {
         nextBinder setFlag MUTABLE
-        List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+        List(storedCond, nextBinder) map (b => ValDef(b, codegen.mkZero(b.info)))
       }
 
       // TODO: finer-grained duplication
       def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
         atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
 
-      override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+      override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution))
     }
 
     case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
@@ -199,7 +192,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
         // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
         casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
       }
-      override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+      override def toString = "R"+((lastReusedTreeMaker.storedCond.name, substitution))
     }
   }
 
@@ -217,7 +210,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
 //  }
 
   //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
-  trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+  trait SwitchEmission extends TreeMakers with MatchMonadInterface {
     import treeInfo.isGuardedCase
 
     abstract class SwitchMaker {
@@ -240,9 +233,6 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
       def defaultBody: Tree
       def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
 
-      private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
-        if (xs exists (_.isEmpty)) None else Some(xs.flatten)
-
       object GuardAndBodyTreeMakers {
           def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
             tms match {
@@ -409,23 +399,15 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
       private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
 
       // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
-      private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
-        var cases = cs
-        var unreachable: Option[CaseDef] = None
-
-        while (cases.nonEmpty && unreachable.isEmpty) {
-          val currCase = cases.head
-          if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
-            unreachable = Some(cases.tail.head)
-          else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
-            unreachable = cases.tail.find(caseImplies(currCase))
-          else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
-            unreachable = Some(currCase)
-
-          cases = cases.tail
+      private def unreachableCase(cases: List[CaseDef]): Option[CaseDef] = {
+        def loop(cases: List[CaseDef]): Option[CaseDef] = cases match {
+          case head :: next :: _ if isDefault(head)                                    => Some(next) // subsumed by the next case, but faster
+          case head :: rest if !isGuardedCase(head) || head.guard.tpe =:= ConstantTrue => rest find caseImplies(head) orElse loop(rest)
+          case head :: _ if head.guard.tpe =:= ConstantFalse                           => Some(head)
+          case _ :: rest                                                               => loop(rest)
+          case _                                                                       => None
         }
-
-        unreachable
+        loop(cases)
       }
 
       // empty list ==> failure
@@ -510,7 +492,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
     }
 
     class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
-      val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+      val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe)
       val alternativesSupported = true
       val canJump = true
 
@@ -535,7 +517,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
       }
 
       def defaultSym: Symbol = scrutSym
-      def defaultBody: Tree  = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+      def defaultBody: Tree  = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse Throw(MatchErrorClass.tpe, REF(scrutSym)) }
       def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
         (DEFAULT IF guard) ==> body
       }}
@@ -550,10 +532,10 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
         else {
           // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
           val scrutToInt: Tree =
-            if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+            if (scrutSym.tpe =:= IntTpe) REF(scrutSym)
             else (REF(scrutSym) DOT (nme.toInt))
           Some(BLOCK(
-            VAL(scrutSym) === scrut,
+            ValDef(scrutSym, scrut),
             Match(scrutToInt, caseDefsWithDefault) // a switch
           ))
         }
@@ -578,16 +560,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
       }
 
       def isDefault(x: CaseDef): Boolean = x match {
-        case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe)          => true
-        case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+        case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe)          => true
+        case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
         case CaseDef(Ident(nme.WILDCARD), EmptyTree, _)                                                          => true
         case _ => false
       }
 
-      lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+      lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableTpe)
       def defaultBody: Tree       = Throw(CODE.REF(defaultSym))
       def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
-        (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+        (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableTpe)))) IF guard) ==> body
       }}
     }
 
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 90c52e3..4cf8980 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -12,92 +12,183 @@ import scala.reflect.internal.util.Statistics
 
 /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
  */
-trait MatchTranslation { self: PatternMatching  =>
+trait MatchTranslation {
+  self: PatternMatching =>
+
   import PatternMatchingStats._
-  import global.{phase, currentRun, Symbol,
-    Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
-    Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
-    Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
-    Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
-    elimAnonymousClass, asCompactDebugString, hasLength}
-  import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
-    repeatedToSeq, isRepeatedParamType, getProductArgs}
+  import global._
+  import definitions._
   import global.analyzer.{ErrorUtils, formalTypes}
+  import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+  import CODE._
+
+  // Always map repeated params to sequences
+  private def setVarInfo(sym: Symbol, info: Type) =
+    sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info))
+
+  private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol
 
-  trait MatchTranslator extends TreeMakers {
+  trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
     import typer.context
 
-    // Why is it so difficult to say "here's a name and a context, give me any
-    // matching symbol in scope" ? I am sure this code is wrong, but attempts to
-    // use the scopes of the contexts in the enclosing context chain discover
-    // nothing. How to associate a name with a symbol would would be a wonderful
-    // linkage for which to establish a canonical acquisition mechanism.
-    def matchingSymbolInScope(pat: Tree): Symbol = {
-      def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
-        case PolyType(tparams, restpe)  => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
-        case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
-        case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
-        case _                          => NoSymbol
+    /** A conservative approximation of which patterns do not discern anything.
+     * They are discarded during the translation.
+     */
+    object WildcardPattern {
+      def unapply(pat: Tree): Boolean = pat match {
+        case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+        case Star(WildcardPattern())               => true
+        case x: Ident                              => treeInfo.isVarPattern(x)
+        case Alternative(ps)                       => ps forall unapply
+        case EmptyTree                             => true
+        case _                                     => false
       }
-      pat match {
-        case Bind(name, _) =>
-          context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
-            res orElse declarationOfName(ctx.owner.rawInfo, name))
-        case _ => NoSymbol
+    }
+
+    object PatternBoundToUnderscore {
+      def unapply(pat: Tree): Boolean = pat match {
+        case Bind(nme.WILDCARD, _)                => true // don't skip when binding an interesting symbol!
+        case Ident(nme.WILDCARD)                  => true
+        case Alternative(ps)                      => ps forall unapply
+        case Typed(PatternBoundToUnderscore(), _) => true
+        case _                                    => false
       }
     }
 
-    // Issue better warnings than "unreachable code" when people mis-use
-    // variable patterns thinking they bind to existing identifiers.
-    //
-    // Possible TODO: more deeply nested variable patterns, like
-    //   case (a, b) => 1 ; case (c, d) => 2
-    // However this is a pain (at least the way I'm going about it)
-    // and I have to think these detailed errors are primarily useful
-    // for beginners, not people writing nested pattern matches.
-    def checkMatchVariablePatterns(cases: List[CaseDef]) {
-      // A string describing the first variable pattern
-      var vpat: String = null
-      // Using an iterator so we can recognize the last case
-      val it = cases.iterator
-
-      def addendum(pat: Tree) = {
-        matchingSymbolInScope(pat) match {
-          case NoSymbol   => ""
-          case sym        =>
-            val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
-            s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+    object SymbolBound {
+      def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+        case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
+        case _                             => None
+      }
+    }
+
+    def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+      case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr)
+      case _                      => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree)
+    }
+
+    final case class BoundTree(binder: Symbol, tree: Tree) {
+      private lazy val extractor = ExtractorCall(tree)
+
+      def pos     = tree.pos
+      def tpe     = binder.info.dealiasWiden  // the type of the variable bound to the pattern
+      def pt      = unbound match {
+        case Star(tpt)      => this glbWith seqType(tpt.tpe)
+        case TypeBound(tpe) => tpe
+        case tree           => tree.tpe
+      }
+      def glbWith(other: Type) = glb(tpe :: other :: Nil).normalize
+
+      object SymbolAndTypeBound {
+        def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+          case SymbolBound(sym, TypeBound(tpe)) => Some(sym -> tpe)
+          case TypeBound(tpe)                   => Some(binder -> tpe)
+          case _                                => None
         }
       }
 
-      while (it.hasNext) {
-        val cdef = it.next
-        // If a default case has been seen, then every succeeding case is unreachable.
-        if (vpat != null)
-          context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
-        // If this is a default case and more cases follow, warn about this one so
-        // we have a reason to mention its pattern variable name and any corresponding
-        // symbol in scope.  Errors will follow from the remaining cases, at least
-        // once we make the above warning an error.
-        else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
-          val vpatName = cdef.pat match {
-            case Bind(name, _)   => s" '$name'"
-            case _               => ""
-          }
-          vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
-          context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+      object TypeBound {
+        def unapply(tree: Tree): Option[Type] = tree match {
+          case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe)
+          case _                                      => None
         }
       }
+
+      private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+      private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+      private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+      private def equalityTestStep()                         = step(EqualityTestTreeMaker(binder, tree, pos))()
+      private def typeTestStep(sub: Symbol, subPt: Type)     = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))()
+      private def alternativesStep(alts: List[Tree])         = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+      private def translatedAlts(alts: List[Tree])           = alts map (alt => rebindTo(alt).translate())
+      private def noStep()                                   = step()()
+
+      private def unsupportedPatternMsg = sm"""
+        |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.)
+        |""".trim
+
+      // example check: List[Int] <:< ::[Int]
+      private def extractorStep(): TranslationStep = {
+        def paramType = extractor.aligner.wholeType
+        import extractor.treeMaker
+        // chain a type-testing extractor before the actual extractor call
+        // it tests the type, checks the outer pointer and casts to the expected type
+        // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+        // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+        lazy val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+        // check whether typetest implies binder is not null,
+        // even though the eventual null check will be on typeTest.nextBinder
+        // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+        def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest impliesBinderNonNull binder, pos)
+
+        // paramType = the type expected by the unapply
+        // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+        val makers = (
+          // Statically conforms to paramType
+          if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+          else typeTest :: extraction :: Nil
+        )
+        step(makers: _*)(extractor.subBoundTrees: _*)
+      }
+
+      // Summary of translation cases. I moved the excerpts from the specification further below so all
+      // the logic can be seen at once.
+      //
+      // [1] skip wildcard trees -- no point in checking them
+      // [2] extractor and constructor patterns
+      // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+      //     It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+      //     this is not guaranteed until we cast
+      // [4] typed patterns - a typed pattern never has any subtrees
+      //     must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+      // [5] literal and stable id patterns
+      // [6] pattern alternatives
+      // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+      //     don't fail here though (or should we?)
+      def nextStep(): TranslationStep = tree match {
+        case WildcardPattern()                                        => noStep()
+        case _: UnApply | _: Apply                                    => extractorStep()
+        case SymbolAndTypeBound(sym, tpe)                             => typeTestStep(sym, tpe)
+        case TypeBound(tpe)                                           => typeTestStep(binder, tpe)
+        case SymbolBound(sym, expr)                                   => bindingStep(sym, expr)
+        case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
+        case Alternative(alts)                                        => alternativesStep(alts)
+        case _                                                        => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+      }
+      def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+      private def setInfo(paramType: Type): Boolean = {
+        devWarning(s"resetting info of $this to $paramType")
+        setVarInfo(binder, paramType)
+        true
+      }
+      // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
+      // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
+      // because apparently patBinder may have an unfortunate type (.decls don't have the case field
+      // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
+      // infers a weird type for an unapply call. By going back to the parameterType for the
+      // extractor call we get a saner type, so let's just do that for now.
+      def ensureConformsTo(paramType: Type): Boolean = (
+           (tpe =:= paramType)
+        || (tpe <:< paramType) && setInfo(paramType)
+      )
+
+      private def concreteType = tpe.bounds.hi
+      private def unbound = unbind(tree)
+      private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+      private def at_s = unbound match {
+        case WildcardPattern() => ""
+        case pat               => s" @ $pat"
+      }
+      override def toString = s"${binder.name}: $tpe_s$at_s"
     }
 
-    // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
-    private lazy val MarkerCPSAdaptPlus  = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
-    private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
-    private lazy val MarkerCPSSynth      = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
-    private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
-    private lazy val MarkerCPSTypes      = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
-    private lazy val strippedCPSAnns     = MarkerCPSTypes :: stripTriggerCPSAnns
-    private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+    // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+    final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+      def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+      override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+    }
 
     /** Implement a pattern match by turning its cases (including the implicit failure case)
       * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -113,10 +204,8 @@ trait MatchTranslation { self: PatternMatching  =>
       val Match(selector, cases) = match_
 
       val (nonSyntheticCases, defaultOverride) = cases match {
-        case init :+ last if treeInfo isSyntheticDefaultCase last =>
-          (init, Some(((scrut: Tree) => last.body)))
-        case _ =>
-          (cases, None)
+        case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body)))
+        case _                                                    => (cases, None)
       }
 
       checkMatchVariablePatterns(nonSyntheticCases)
@@ -133,18 +222,11 @@ trait MatchTranslation { self: PatternMatching  =>
 
       val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
 
-      val origPt  = match_.tpe
       // when one of the internal cps-type-state annotations is present, strip all CPS annotations
-      // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
-      // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
-      val ptUnCPS =
-        if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
-          removeCPSAdaptAnnotations(origPt)
-        else origPt
-
+      val origPt  = removeCPSFromPt(match_.tpe)
       // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
       // pt is the skolemized version
-      val pt = repeatedToSeq(ptUnCPS)
+      val pt = repeatedToSeq(origPt)
 
       // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
       val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -169,36 +251,34 @@ trait MatchTranslation { self: PatternMatching  =>
           val bindersAndCases = caseDefs map { caseDef =>
             // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
             // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
-            val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+            val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
             (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
           }
 
-          for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+          for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
               if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
               cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
         }
 
         val catches = if (swatches.nonEmpty) swatches else {
-          val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+          val scrutSym = freshSym(pos, pureType(ThrowableTpe))
           val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
 
-          val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+          val exSym = freshSym(pos, pureType(ThrowableTpe), "ex")
 
           List(
               atPos(pos) {
                 CaseDef(
                   Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
                   EmptyTree,
-                  combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+                  combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym))))
                 )
               })
         }
 
-        typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+        typer.typedCases(catches, ThrowableTpe, WildcardType)
       }
 
-
-
     /**  The translation of `pat if guard => body` has two aspects:
       *     1) the substitution due to the variables bound by patterns
       *     2) the combination of the extractor calls using `flatMap`.
@@ -227,166 +307,12 @@ trait MatchTranslation { self: PatternMatching  =>
       *    a function that will take care of binding and substitution of the next ast (to the right).
       *
       */
-    def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
-      translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+    def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+      val CaseDef(pattern, guard, body) = caseDef
+      translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
     }
 
-    def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
-      // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
-      type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
-      def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
-      def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
-      val pos = patTree.pos
-
-      def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
-        if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
-        // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
-        debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
-
-        // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
-        // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
-        // (it will later result in a type test when `tp` is not a subtype of `b.info`)
-        // TODO: can we simplify this, together with the Bound case?
-        (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
-          debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
-          b setInfo tp
-        }
-
-        // example check: List[Int] <:< ::[Int]
-        // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
-        // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
-        val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
-          if (patBinder.info.widen <:< extractor.paramType) {
-            // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
-            // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
-            // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
-            // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
-            /* TODO: uncomment when `settings.developer` and `devWarning` become available
-              if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
-                devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
-            */
-            (Nil, patBinder setInfo extractor.paramType, false)
-          } else {
-            // chain a type-testing extractor before the actual extractor call
-            // it tests the type, checks the outer pointer and casts to the expected type
-            // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
-            // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
-            val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
-            // check whether typetest implies patBinder is not null,
-            // even though the eventual null check will be on patBinderOrCasted
-            // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
-            (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
-          }
-
-        withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
-      }
-
-
-      object MaybeBoundTyped {
-        /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
-          * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
-          * The returned type is the one inferred by inferTypedPattern (`owntype`)
-          *
-          * @arg patBinder  symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
-        */
-        def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
-          // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
-          case Bound(subpatBinder, typed at Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
-          case Bind(_, typed at Typed(Ident(_), tpt))             if typed.tpe ne null => Some((patBinder, typed.tpe))
-          case Typed(Ident(_), tpt)                            if tree.tpe ne null  => Some((patBinder, tree.tpe))
-          case _  => None
-        }
-      }
-
-      val (treeMakers, subpats) = patTree match {
-        // skip wildcard trees -- no point in checking them
-        case WildcardPattern() => noFurtherSubPats()
-        case UnApply(unfun, args) =>
-          // TODO: check unargs == args
-          // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
-          translateExtractorPattern(ExtractorCall(unfun, args))
-
-        /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
-          It consists of a stable identifier c, followed by element patterns p1, ..., pn.
-          The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
-          If the case class is monomorphic, then it must conform to the expected type of the pattern,
-          and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
-          If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
-          The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
-          The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
-          A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
-        **/
-        case Apply(fun, args)     =>
-          ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
-            ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
-            noFurtherSubPats()
-          }
-
-        /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
-            The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
-            This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
-        **/
-        // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
-        case MaybeBoundTyped(subPatBinder, pt) =>
-          val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
-          // a typed pattern never has any subtrees
-          noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
-        /** A pattern binder x at p consists of a pattern variable x and a pattern p.
-            The type of the variable x is the static type T of the pattern p.
-            This pattern matches any value v matched by the pattern p,
-            provided the run-time type of v is also an instance of T,  <-- TODO! https://issues.scala-lang.org/browse/SI-1503
-            and it binds the variable name to that value.
-        **/
-        case Bound(subpatBinder, p)          =>
-          // replace subpatBinder by patBinder (as if the Bind was not there)
-          withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
-            // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
-            (patBinder, p)
-          )
-
-        /** 8.1.4 Literal Patterns
-              A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
-              The type of L must conform to the expected type of the pattern.
-
-            8.1.5 Stable Identifier Patterns  (a stable identifier r (see §3.1))
-              The pattern matches any value v such that r == v (§12.1).
-              The type of r must conform to the expected type of the pattern.
-        **/
-        case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
-          noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
-        case Alternative(alts)    =>
-          noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
-      /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
-          case class Foo(x: Int, y: String)
-          case class Bar(z: Int)
-
-          def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
-      */
-
-        case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
-          debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
-          noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
-        // case Star(_) | ArrayValue  => error("stone age pattern relics encountered!")
-
-        case _                       =>
-          typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
-          noFurtherSubPats()
-      }
-
-      treeMakers ++ subpats.flatMap { case (binder, pat) =>
-        translatePattern(binder, pat) // recurse on subpatterns
-      }
-    }
+    def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
 
     def translateGuard(guard: Tree): List[TreeMaker] =
       if (guard == EmptyTree) Nil
@@ -401,27 +327,70 @@ trait MatchTranslation { self: PatternMatching  =>
     def translateBody(body: Tree, matchPt: Type): TreeMaker =
       BodyTreeMaker(body, matchPt)
 
+    // Some notes from the specification
+
+    /*A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+      It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+      The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+      If the case class is monomorphic, then it must conform to the expected type of the pattern,
+      and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected
+      types of the element patterns p1, ..., pn.
+
+      If the case class is polymorphic, then its type parameters are instantiated so that the
+      instantiation of c conforms to the expected type of the pattern.
+      The instantiated formal parameter types of c’s primary constructor are then taken as the
+      expected types of the component patterns p1, ..., pn.
+
+      The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+      where each element pattern pi matches the corresponding value vi .
+      A special case arises when c’s formal parameter types end in a repeated parameter.
+      This is further discussed in (§8.1.9).
+    **/
+
+    /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+       The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+       This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+    */
+
+    /* A pattern binder x at p consists of a pattern variable x and a pattern p.
+       The type of the variable x is the static type T of the pattern p.
+       This pattern matches any value v matched by the pattern p,
+       provided the run-time type of v is also an instance of T,  <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+       and it binds the variable name to that value.
+    */
+
+    /* 8.1.4 Literal Patterns
+         A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+         The type of L must conform to the expected type of the pattern.
+
+       8.1.5 Stable Identifier Patterns  (a stable identifier r (see §3.1))
+         The pattern matches any value v such that r == v (§12.1).
+         The type of r must conform to the expected type of the pattern.
+    */
+
 
 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
 // helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
 
     object ExtractorCall {
-      def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
-      def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] =  Some(new ExtractorCallProd(fun, args))
+      // TODO: check unargs == args
+      def apply(tree: Tree): ExtractorCall = tree match {
+        case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor
+        case Apply(fun, args)     => new ExtractorCallProd(alignPatterns(tree), fun, args)      // case class
+      }
     }
 
-    abstract class ExtractorCall(val args: List[Tree]) {
-      val nbSubPats = args.length
+    abstract class ExtractorCall(val aligner: PatternAligned) {
+      import aligner._
+      def fun: Tree
+      def args: List[Tree]
 
-      // everything okay, captain?
-      def isTyped    : Boolean
-
-      def isSeq: Boolean
-      lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
-      // to which type should the previous binder be casted?
-      def paramType  : Type
+      // don't go looking for selectors if we only expect one pattern
+      def rawSubPatTypes = aligner.extractedTypes
+      def resultInMonad  = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
+      def resultType     = fun.tpe.finalResultType
 
       /** Create the TreeMaker that embodies this extractor call
        *
@@ -433,79 +402,82 @@ trait MatchTranslation { self: PatternMatching  =>
 
       // `subPatBinders` are the variables bound by this pattern in the following patterns
       // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
-      lazy val subPatBinders = args map {
-        case Bound(b, p) => b
-        case p => freshSym(p.pos, prefix = "p")
-      }
-
-      lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
-        case (b, Bound(_, p)) => (b, p)
-        case bp => bp
-      }
+      // must set infos to `subPatTypes`, which are provided by extractor's result,
+      // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+      // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+      // TODO: can we simplify this, together with the Bound case?
+      def subPatBinders = subBoundTrees map (_.binder)
+      lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
 
       // never store these in local variables (for PreserveSubPatBinders)
-      lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
-        case (b, PatternBoundToUnderscore()) => b
-      }.toSet
-
-      def subPatTypes: List[Type] =
-        if(isSeq) {
-          val TypeRef(pre, SeqClass, args) = seqTp
-          // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
-          val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
-          if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
-          else formalTypes(formalsWithRepeated, nbSubPats)
-        } else rawSubPatTypes
-
-      protected def rawSubPatTypes: List[Type]
-
-      protected def seqTp = rawSubPatTypes.last baseType SeqClass
-      protected def seqLenCmp                = rawSubPatTypes.last member nme.lengthCompare
-      protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
-      protected lazy val lastIndexingBinder  = if(lastIsStar) nbSubPats-2 else nbSubPats-1
-      protected lazy val expectedLength      = lastIndexingBinder - firstIndexingBinder + 1
-      protected lazy val minLenToCheck       = if(lastIsStar) 1 else 0
-      protected def seqTree(binder: Symbol)  = tupleSel(binder)(firstIndexingBinder+1)
+      lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+      // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+      private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe)
+
+      def subPatTypes: List[Type] = typedPatterns map (_.tpe)
+
+      // there are `productArity` non-seq elements in the tuple.
+      protected def firstIndexingBinder = productArity
+      protected def expectedLength      = elementArity
+      protected def lastIndexingBinder  = totalArity - starArity - 1
+
+      private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+      private def genTake(binder: Symbol, n: Int): List[Tree]         = (0 until n).toList map (codegen index seqTree(binder))
+      private def genDrop(binder: Symbol, n: Int): List[Tree]         = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+      // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+      protected def seqTree(binder: Symbol)                = tupleSel(binder)(firstIndexingBinder + 1)
       protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
 
-      // the trees that select the subpatterns on the extractor's result, referenced by `binder`
-      // require isSeq
+      // the trees that select the subpatterns on the extractor's result,
+      // referenced by `binder`
       protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
-        val indexingIndices   = (0 to (lastIndexingBinder-firstIndexingBinder))
-        val nbIndexingIndices = indexingIndices.length
-
+        def lastTrees: List[Tree] = (
+          if (!aligner.isStar) Nil
+          else if (expectedLength == 0) seqTree(binder) :: Nil
+          else genDrop(binder, expectedLength)
+        )
         // this error-condition has already been checked by checkStarPatOK:
-        //   if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
-        // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
-        (((1 to firstIndexingBinder) map tupleSel(binder)) ++
-        // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
-        (indexingIndices map codegen.index(seqTree(binder))) ++
-        // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
-        (if(!lastIsStar) Nil else List(
-          if(nbIndexingIndices == 0) seqTree(binder)
-          else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+        //   if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+
+        // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+        // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+        // [3] the last one -- if the last subpattern is a sequence wildcard:
+        //       drop the prefix (indexed by the refs on the preceding line), return the remainder
+        (    productElemsToN(binder, firstIndexingBinder)
+          ++ genTake(binder, expectedLength)
+          ++ lastTrees
+        ).toList
       }
 
       // the trees that select the subpatterns on the extractor's result, referenced by `binder`
       // require (nbSubPats > 0 && (!lastIsStar || isSeq))
-      protected def subPatRefs(binder: Symbol): List[Tree] =
-        if (nbSubPats == 0) Nil
-        else if (isSeq) subPatRefsSeq(binder)
-        else ((1 to nbSubPats) map tupleSel(binder)).toList
+      protected def subPatRefs(binder: Symbol): List[Tree] = (
+        if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
+        else productElemsToN(binder, totalArity)
+      )
+
+      private def compareInts(t1: Tree, t2: Tree) =
+        gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
 
       protected def lengthGuard(binder: Symbol): Option[Tree] =
         // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
-        checkedLength map { expectedLength => import CODE._
+        checkedLength map { expectedLength =>
           // `binder.lengthCompare(expectedLength)`
-          def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+          // ...if binder has a lengthCompare method, otherwise
+          // `scala.math.signum(binder.length - expectedLength)`
+          def checkExpectedLength = sequenceType member nme.lengthCompare match {
+            case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength))
+            case lencmp   => (seqTree(binder) DOT lencmp)(LIT(expectedLength))
+          }
 
           // the comparison to perform
           // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
           // (otherwise equality is required)
           def compareOp: (Tree, Tree) => Tree =
-            if (lastIsStar)  _ INT_>= _
-            else             _ INT_== _
+            if (aligner.isStar) _ INT_>= _
+            else         _ INT_== _
 
           // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
           (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
@@ -513,35 +485,14 @@ trait MatchTranslation { self: PatternMatching  =>
 
       def checkedLength: Option[Int] =
         // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
-        if (!isSeq || (expectedLength < minLenToCheck)) None
+        if (!isSeq || expectedLength < starArity) None
         else Some(expectedLength)
-
     }
 
     // TODO: to be called when there's a def unapplyProd(x: T): U
     // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
-    //
     // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
-    class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
-      // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
-      /*override def equals(x$1: Any): Boolean = ...
-             val o5: Option[com.mosol.sl.Span[Any]] =  // Span[Any] --> Any is not a legal type argument for Span!
-      */
-      // private val orig            = fun match {case tpt: TypeTree => tpt.original case _ => fun}
-      // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
-      // private val extractorTp     = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
-      // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
-      // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
-      private def constructorTp = fun.tpe
-
-      def isTyped    = fun.isTyped
-
-      // to which type should the previous binder be casted?
-      def paramType  = constructorTp.finalResultType
-
-      def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
-      protected def rawSubPatTypes = constructorTp.paramTypes
-
+    class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
       /** Create the TreeMaker that embodies this extractor call
        *
        * `binder` has been casted to `paramType` if necessary
@@ -553,34 +504,27 @@ trait MatchTranslation { self: PatternMatching  =>
         // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
         // make an exception for classes under the scala package as they should be well-behaved,
         // to optimize matching on List
-        val mutableBinders =
+        val mutableBinders = (
           if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
               (paramAccessors exists (_.isMutable)))
             subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
           else Nil
+        )
 
         // checks binder ne null before chaining to the next extractor
         ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
       }
 
       // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
-      override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+      override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
         val accessors = binder.caseFieldAccessors
         if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
         else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
       }
-
-      override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
     }
 
-    class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
-      private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
-
-      def tpe        = extractorCall.tpe
-      def isTyped    = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
-      def paramType  = tpe.paramTypes.head
-      def resultType = tpe.finalResultType
-      def isSeq      = extractorCall.symbol.name == nme.unapplySeq
+    class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
+      val Unapplied(fun) = extractorCallIncludingDummy
 
       /** Create the TreeMaker that embodies this extractor call
        *
@@ -593,82 +537,53 @@ trait MatchTranslation { self: PatternMatching  =>
        *    Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
        */
       def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
-        // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+        // the extractor call (applied to the binder bound by the flatMap corresponding
+        // to the previous (i.e., enclosing/outer) pattern)
         val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
-        val binder         = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
-        ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+        // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+        // wrong when isSeq, and resultInMonad should always be correct since it comes
+        // directly from the extractor's result type
+        val binder         = freshSym(pos, pureType(resultInMonad))
+
+        ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+          subPatBinders,
+          subPatRefs(binder),
+          aligner.isBool,
+          checkedLength,
+          patBinderOrCasted,
+          ignoredSubPatBinders
+        )
       }
 
       override protected def seqTree(binder: Symbol): Tree =
-        if (firstIndexingBinder == 0) CODE.REF(binder)
+        if (firstIndexingBinder == 0) REF(binder)
         else super.seqTree(binder)
 
       // the trees that select the subpatterns on the extractor's result, referenced by `binder`
-      // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+      // require (totalArity > 0 && (!lastIsStar || isSeq))
       override protected def subPatRefs(binder: Symbol): List[Tree] =
-        if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+        if (aligner.isSingle) REF(binder) :: Nil // special case for extractors
         else super.subPatRefs(binder)
 
       protected def spliceApply(binder: Symbol): Tree = {
         object splice extends Transformer {
+          def binderRef(pos: Position): Tree =
+            REF(binder) setPos pos
           override def transform(t: Tree) = t match {
+            // duplicated with the extractor Unapplied
             case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
-              treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
-            case _ => super.transform(t)
+              treeCopy.Apply(t, x, binderRef(i.pos) :: Nil)
+            // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt
+            case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) =>
+              treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil)
+            case _ =>
+              super.transform(t)
           }
         }
-        splice.transform(extractorCallIncludingDummy)
-      }
-
-      // what's the extractor's result type in the monad?
-      // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
-      protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
-        if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
-        else matchMonadResult(resultType)
-      }
-
-      protected lazy val rawSubPatTypes =
-        if (resultInMonad.typeSymbol eq UnitClass) Nil
-        else if(!isSeq && nbSubPats == 1)          List(resultInMonad)
-        else getProductArgs(resultInMonad) match {
-          case Nil => List(resultInMonad)
-          case x   => x
-        }
-
-      override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
-    }
-
-    /** A conservative approximation of which patterns do not discern anything.
-     * They are discarded during the translation.
-     */
-    object WildcardPattern {
-      def unapply(pat: Tree): Boolean = pat match {
-        case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
-        case Ident(nme.WILDCARD)                   => true
-        case Star(WildcardPattern())               => true
-        case x: Ident                              => treeInfo.isVarPattern(x)
-        case Alternative(ps)                       => ps forall (WildcardPattern.unapply(_))
-        case EmptyTree                             => true
-        case _                                     => false
-      }
-    }
-
-    object PatternBoundToUnderscore {
-      def unapply(pat: Tree): Boolean = pat match {
-        case Bind(nme.WILDCARD, _)                => true // don't skip when binding an interesting symbol!
-        case Ident(nme.WILDCARD)                  => true
-        case Alternative(ps)                      => ps forall (PatternBoundToUnderscore.unapply(_))
-        case Typed(PatternBoundToUnderscore(), _) => true
-        case _                                    => false
+        splice transform extractorCallIncludingDummy
       }
-    }
 
-    object Bound {
-      def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
-        case t at Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
-          Some((t.symbol, p))
-        case _ => None
-      }
+      override def rawSubPatTypes = aligner.extractor.varargsTypes
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 202f344..5d8a9fe 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
 import scala.collection.mutable
 import scala.reflect.internal.util.Statistics
 import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
 
 /** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
  *
@@ -19,13 +18,8 @@ import scala.reflect.internal.util.NoPosition
  * mostly agnostic to whether we're in optimized/pure (virtualized) mode.
  */
 trait MatchTreeMaking extends MatchCodeGen with Debugging {
-  import PatternMatchingStats._
-  import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
-    Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
-    ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
-    Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
-
-  import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+  import global._
+  import definitions._
 
   final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
   object Suppression {
@@ -60,7 +54,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
       private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
         if (currSub ne null) {
-          debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+          debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ ((this, currSub, outerSubst)))
           Thread.dumpStack()
         }
         else currSub = outerSubst >> substitution
@@ -85,7 +79,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       def chainBefore(next: Tree)(casegen: Casegen): Tree
     }
 
-    trait NoNewBinders extends TreeMaker {
+    sealed trait NoNewBinders extends TreeMaker {
       protected val localSubstitution: Substitution = EmptySubstitution
     }
 
@@ -100,7 +94,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
       def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
         atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
-      override def toString = "B"+(body, matchPt)
+      override def toString = "B"+((body, matchPt))
     }
 
     case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
@@ -111,12 +105,12 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       override def toString = "S"+ localSubstitution
     }
 
-    abstract class FunTreeMaker extends TreeMaker {
+    sealed abstract class FunTreeMaker extends TreeMaker {
       val nextBinder: Symbol
       def pos = nextBinder.pos
     }
 
-    abstract class CondTreeMaker extends FunTreeMaker {
+    sealed abstract class CondTreeMaker extends FunTreeMaker {
       val prevBinder: Symbol
       val nextBinderTp: Type
       val cond: Tree
@@ -132,7 +126,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
     // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
     protected val debugInfoEmitVars = !settings.optimise.value
 
-    trait PreserveSubPatBinders extends TreeMaker {
+    sealed trait PreserveSubPatBinders extends TreeMaker {
       val subPatBinders: List[Symbol]
       val subPatRefs: List[Tree]
       val ignoredSubPatBinders: Set[Symbol]
@@ -165,7 +159,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       override def subPatternsAsSubstitution =
         Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
 
-      import CODE._
       def bindSubPats(in: Tree): Tree =
         if (!emitVars) in
         else {
@@ -180,7 +173,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
           else {
             // only store binders actually used
             val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
-            Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+            Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(ValDef(_, _)), in)
           }
         }
     }
@@ -207,6 +200,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
       def extraStoredBinders: Set[Symbol] = Set()
 
+      debug.patmat(s"""
+        |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+        |  $subPatBinders
+        |  $subPatRefs
+        |  $extractorReturnsBoolean
+        |  $checkedLength
+        |  $prevBinder
+        |  $ignoredSubPatBinders
+        |}""".stripMargin)
+
       def chainBefore(next: Tree)(casegen: Casegen): Tree = {
         val condAndNext = extraCond match {
           case Some(cond) =>
@@ -220,7 +223,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
         )
       }
 
-      override def toString = "X"+(extractor, nextBinder.name)
+      override def toString = "X"+((extractor, nextBinder.name))
     }
 
     /**
@@ -274,7 +277,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
         }
       }
 
-      override def toString = "P"+(prevBinder.name,  extraCond getOrElse "", localSubstitution)
+      override def toString = "P"+((prevBinder.name,  extraCond getOrElse "", localSubstitution))
     }
 
     object IrrefutableExtractorTreeMaker {
@@ -284,8 +287,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
         case TypeRef(_, SomeClass, _) => true
         // probably not useful since this type won't be inferred nor can it be written down (yet)
-        case ConstantType(Constant(true)) => true
-        case _ => false
+        case ConstantTrue => true
+        case _            => false
       }
 
       def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
@@ -324,9 +327,9 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
         def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
           val expectedOuter = expectedTp.prefix match {
-            case ThisType(clazz)      => THIS(clazz)
-            case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
-            case _ => mkTRUE // fallback for SI-6183
+            case ThisType(clazz) => This(clazz)
+            case NoType          => mkTRUE // fallback for SI-6183
+            case pre             => REF(pre.prefix, pre.termSymbol)
           }
 
           // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
@@ -389,11 +392,13 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
     **/
     case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
       import TypeTestTreeMaker._
-      debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+      debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
 
       lazy val outerTestNeeded = (
-          !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
-        && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+           (expectedTp.prefix ne NoPrefix)
+        && !expectedTp.prefix.typeSymbol.isPackageClass
+        && needsOuterTest(expectedTp, testedBinder.info, matchOwner)
+      )
 
       // the logic to generate the run-time test that follows from the fact that
       // a `prevBinder` is expected to have type `expectedTp`
@@ -403,44 +408,51 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
         import cs._
 
-        def default =
-          // do type test first to ensure we won't select outer on null
-          if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
-          else typeTest(testedBinder, expectedTp)
-
         // propagate expected type
         def expTp(t: Tree): t.type = t setType expectedTp
 
+        def testedWide              = testedBinder.info.widen
+        def expectedWide            = expectedTp.widen
+        def isAnyRef                = testedWide <:< AnyRefTpe
+        def isAsExpected            = testedWide <:< expectedTp
+        def isExpectedPrimitiveType = isAsExpected && isPrimitiveValueType(expectedTp)
+        def isExpectedReferenceType = isAsExpected && (expectedTp <:< AnyRefTpe)
+        def mkNullTest              = nonNullTest(testedBinder)
+        def mkOuterTest             = outerTest(testedBinder, expectedTp)
+        def mkTypeTest              = typeTest(testedBinder, expectedWide)
+
+        def mkEqualsTest(lhs: Tree): cs.Result      = equalsTest(lhs, testedBinder)
+        def mkEqTest(lhs: Tree): cs.Result          = eqTest(lhs, testedBinder)
+        def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res
+
+        // If we conform to expected primitive type:
+        //   it cannot be null and cannot have an outer pointer. No further checking.
+        // If we conform to expected reference type:
+        //   have to test outer and non-null
+        // If we do not conform to expected type:
+        //   have to test type and outer (non-null is implied by successful type test)
+        def mkDefault = (
+          if (isExpectedPrimitiveType) tru
+          else addOuterTest(
+            if (isExpectedReferenceType) mkNullTest
+            else mkTypeTest
+          )
+        )
+
         // true when called to type-test the argument to an extractor
         // don't do any fancy equality checking, just test the type
-        if (extractorArgTypeTest) default
+        // TODO: verify that we don't need to special-case Array
+        // I think it's okay:
+        //  - the isInstanceOf test includes a test for the element type
+        //  - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+        if (extractorArgTypeTest) mkDefault
         else expectedTp match {
-          // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
-          // this implies sym.isStable
-          case SingleType(_, sym)                       => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
-          // must use == to support e.g. List() == Nil
-          case ThisType(sym) if sym.isModule            => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
-          case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
-                                                        => eqTest(expTp(CODE.NULL), testedBinder)
-          case ConstantType(const)                      => equalsTest(expTp(Literal(const)), testedBinder)
-          case ThisType(sym)                            => eqTest(expTp(This(sym)), testedBinder)
-
-          // TODO: verify that we don't need to special-case Array
-          // I think it's okay:
-          //  - the isInstanceOf test includes a test for the element type
-          //  - Scala's arrays are invariant (so we don't drop type tests unsoundly)
-          case _ if testedBinder.info.widen <:< expectedTp =>
-            // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
-            // since the types conform, no further checking is required
-            if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
-            // have to test outer and non-null only when it's a reference type
-            else if (expectedTp <:< AnyRefClass.tpe) {
-              // do non-null check first to ensure we won't select outer on null
-              if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
-              else nonNullTest(testedBinder)
-            } else default
-
-          case _ => default
+          case SingleType(_, sym)                       => mkEqTest(gen.mkAttributedQualifier(expectedTp)) // SI-4577, SI-4897
+          case ThisType(sym) if sym.isModule            => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil
+          case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL))
+          case ConstantType(const)                      => mkEqualsTest(expTp(Literal(const)))
+          case ThisType(sym)                            => mkEqTest(expTp(This(sym)))
+          case _                                        => mkDefault
         }
       }
 
@@ -452,7 +464,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
       def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
 
-      override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+      override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp))
     }
 
     // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
@@ -463,7 +475,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
       val cond = codegen._equals(patTree, prevBinder)
       val res  = CODE.REF(prevBinder)
-      override def toString = "ET"+(prevBinder.name, patTree)
+      override def toString = "ET"+((prevBinder.name, patTree))
     }
 
     case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
@@ -474,7 +486,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
         altss = altss map (alts => propagateSubstitution(alts, substitution))
       }
 
-      def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+      def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = {
         atPos(pos){
           // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
           // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
@@ -482,7 +494,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
             ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
           )
 
-          val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+          val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE))
           codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
         }
       }
@@ -523,12 +535,13 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
 
     // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
     def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
-      fixerUpper(owner, scrut.pos){
-        def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+      fixerUpper(owner, scrut.pos) {
+        def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree))
+
         debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
 
         val (suppression, requireSwitch): (Suppression, Boolean) =
-          if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+          if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false)
           else scrut match {
             case Typed(tree, tpt) =>
               val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
@@ -587,18 +600,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
         t match {
           case Function(_, _) if t.symbol == NoSymbol =>
             t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
-            debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+            debug.patmat("new symbol for "+ ((t, t.symbol.ownerChain)))
           case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
-            debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+            debug.patmat("fundef: "+ ((t, t.symbol.ownerChain, currentOwner.ownerChain)))
             t.symbol.owner = currentOwner
           case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
-            debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
-            if(d.symbol.moduleClass ne NoSymbol)
-              d.symbol.moduleClass.owner = currentOwner
+            debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
 
+            d.symbol.moduleClass andAlso (_.owner = currentOwner)
             d.symbol.owner = currentOwner
           // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
-          debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+          debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
           case _ =>
         }
         super.traverse(t)
@@ -611,4 +623,4 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
       // currentRun.trackerFactory.snapshot()
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
new file mode 100644
index 0000000..a7d7680
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+trait MatchWarnings {
+  self: PatternMatching =>
+
+  import global._
+
+  trait TreeMakerWarnings {
+    self: MatchTranslator =>
+
+    import typer.context
+
+    // Why is it so difficult to say "here's a name and a context, give me any
+    // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+    // use the scopes of the contexts in the enclosing context chain discover
+    // nothing. How to associate a name with a symbol would would be a wonderful
+    // linkage for which to establish a canonical acquisition mechanism.
+    private def matchingSymbolInScope(pat: Tree): Symbol = {
+      def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+        case PolyType(tparams, restpe)  => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+        case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+        case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+        case _                          => NoSymbol
+      }
+      pat match {
+        case Bind(name, _) =>
+          context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+            res orElse declarationOfName(ctx.owner.rawInfo, name))
+        case _ => NoSymbol
+      }
+    }
+
+    // Issue better warnings than "unreachable code" when people mis-use
+    // variable patterns thinking they bind to existing identifiers.
+    //
+    // Possible TODO: more deeply nested variable patterns, like
+    //   case (a, b) => 1 ; case (c, d) => 2
+    // However this is a pain (at least the way I'm going about it)
+    // and I have to think these detailed errors are primarily useful
+    // for beginners, not people writing nested pattern matches.
+    def checkMatchVariablePatterns(cases: List[CaseDef]) {
+      // A string describing the first variable pattern
+      var vpat: String = null
+      // Using an iterator so we can recognize the last case
+      val it = cases.iterator
+
+      def addendum(pat: Tree) = {
+        matchingSymbolInScope(pat) match {
+          case NoSymbol   => ""
+          case sym        =>
+            val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+            s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+        }
+      }
+
+      while (it.hasNext) {
+        val cdef = it.next()
+        // If a default case has been seen, then every succeeding case is unreachable.
+        if (vpat != null)
+          context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+        // If this is a default case and more cases follow, warn about this one so
+        // we have a reason to mention its pattern variable name and any corresponding
+        // symbol in scope.  Errors will follow from the remaining cases, at least
+        // once we make the above warning an error.
+        else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+          val vpatName = cdef.pat match {
+            case Bind(name, _)   => s" '$name'"
+            case _               => ""
+          }
+          vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+          context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
new file mode 100644
index 0000000..e84ccbf
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
@@ -0,0 +1,155 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*]
+ *        A case matches: P1, P2, ..., Pj, opt[Seq[E]]
+ *          Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]]
+ *
+ *  Here Pm/Fi is the last pattern to match the fixed arity section.
+ *
+ *    productArity: the value of i, i.e. the number of non-sequence types in the extractor
+ *    nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition
+ *    elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements
+ *       starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern
+ *      totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition
+ *
+ *  Note that productArity is a function only of the extractor, and
+ *  nonStar/star/totalArity are all functions of the patterns. The key
+ *  value for aligning and typing the patterns is elementArity, as it
+ *  is derived from both sets of information.
+ */
+trait PatternExpander[Pattern, Type] {
+  /** You'll note we're not inside the cake. "Pattern" and "Type" are
+   *  arbitrary types here, and NoPattern and NoType arbitrary values.
+   */
+  def NoPattern: Pattern
+  def NoType: Type
+
+  /** It's not optimal that we're carrying both sequence and repeated
+   *  type here, but the implementation requires more unraveling before
+   *  it can be avoided.
+   *
+   *  sequenceType is Seq[T], elementType is T, repeatedType is T*.
+   */
+  sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) {
+    def exists = elementType != NoType
+
+    def elementList  = if (exists) elementType :: Nil else Nil
+    def sequenceList = if (exists) sequenceType :: Nil else Nil
+    def repeatedList = if (exists) repeatedType :: Nil else Nil
+
+    override def toString = s"${elementType}*"
+  }
+  object NoRepeated extends Repeated(NoType, NoType, NoType) {
+    override def toString = "<none>"
+  }
+
+  final case class Patterns(fixed: List[Pattern], star: Pattern) {
+    def hasStar      = star != NoPattern
+    def starArity    = if (hasStar) 1 else 0
+    def nonStarArity = fixed.length
+    def totalArity   = nonStarArity + starArity
+    def starPatterns = if (hasStar) star :: Nil else Nil
+    def all          = fixed ::: starPatterns
+
+    override def toString = all mkString ", "
+  }
+
+  /** An 'extractor' can be a case class or an unapply or unapplySeq method.
+   *  Decoding what it is that they extract takes place before we arrive here,
+   *  so that this class can concentrate only on the relationship between
+   *  patterns and types.
+   *
+   *  In a case class, the class is the unextracted type and the fixed and
+   *  repeated types are derived from its constructor parameters.
+   *
+   *  In an unapply, this is reversed: the parameter to the unapply is the
+   *  unextracted type, and the other types are derived based on the return
+   *  type of the unapply method.
+   *
+   *  In other words, this case class and unapply are encoded the same:
+   *
+   *    case class Foo(x: Int, y: Int, zs: Char*)
+   *    def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])]
+   *
+   *  Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*))
+   *
+   *  @param  whole     The type in its unextracted form
+   *  @param  fixed     The non-sequence types which are extracted
+   *  @param  repeated  The sequence type which is extracted
+   */
+  final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated) {
+    require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)")
+
+    def productArity = fixed.length
+    def hasSeq       = repeated.exists
+    def elementType  = repeated.elementType
+    def sequenceType = repeated.sequenceType
+    def allTypes     = fixed ::: repeated.sequenceList
+    def varargsTypes = fixed ::: repeated.repeatedList
+    def isErroneous  = allTypes contains NoType
+
+    private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil )
+
+    def offeringString = if (isErroneous) "<error>" else typeStrings match {
+      case Nil       => "Boolean"
+      case tp :: Nil => tp
+      case tps       => tps.mkString("(", ", ", ")")
+    }
+    override def toString = "%s => %s".format(whole, offeringString)
+  }
+
+  final case class TypedPat(pat: Pattern, tpe: Type) {
+    override def toString = s"$pat: $tpe"
+  }
+
+  /** If elementArity is...
+   *    0: A perfect match between extractor and the fixed patterns.
+   *       If there is a star pattern it will match any sequence.
+   *  > 0: There are more patterns than products. There will have to be a
+   *       sequence which can populate at least <elementArity> patterns.
+   *  < 0: There are more products than patterns: compile time error.
+   */
+  final case class Aligned(patterns: Patterns, extractor: Extractor) {
+    def elementArity = patterns.nonStarArity - productArity
+    def productArity = extractor.productArity
+    def starArity    = patterns.starArity
+    def totalArity   = patterns.totalArity
+
+    def wholeType            = extractor.whole
+    def sequenceType         = extractor.sequenceType
+    def productTypes         = extractor.fixed
+    def extractedTypes       = extractor.allTypes
+    def typedNonStarPatterns = products ::: elements
+    def typedPatterns        = typedNonStarPatterns ::: stars
+
+    def isBool   = !isSeq && productArity == 0
+    def isSingle = !isSeq && totalArity == 1
+    def isStar   = patterns.hasStar
+    def isSeq    = extractor.hasSeq
+
+    private def typedAsElement(pat: Pattern)  = TypedPat(pat, extractor.elementType)
+    private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType)
+    private def productPats = patterns.fixed take productArity
+    private def elementPats = patterns.fixed drop productArity
+    private def products    = (productPats, productTypes).zipped map TypedPat
+    private def elements    = elementPats map typedAsElement
+    private def stars       = patterns.starPatterns map typedAsSequence
+
+    override def toString = s"""
+      |Aligned {
+      |   patterns  $patterns
+      |  extractor  $extractor
+      |    arities  $productArity/$elementArity/$starArity  // product/element/star
+      |      typed  ${typedPatterns mkString ", "}
+      |}""".stripMargin.trim
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index df4e699..f6c960d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -34,23 +34,25 @@ import scala.reflect.internal.util.Position
   *  - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
   *  - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
   */
-trait PatternMatching extends Transform with TypingTransformers
+trait PatternMatching extends Transform
+                      with TypingTransformers
                       with Debugging
                       with Interface
                       with MatchTranslation
                       with MatchTreeMaking
                       with MatchCodeGen
+                      with MatchCps
                       with ScalaLogic
                       with Solving
                       with MatchAnalysis
-                      with MatchOptimization {
+                      with MatchOptimization
+                      with MatchWarnings
+                      with ScalacPatternExpanders {
   import global._
 
   val phaseName: String = "patmat"
 
-  def newTransformer(unit: CompilationUnit): Transformer =
-    if (opt.virtPatmat) new MatchTransformer(unit)
-    else noopTransformer
+  def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit)
 
   class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
     override def transform(tree: Tree): Tree = tree match {
@@ -96,24 +98,26 @@ trait Debugging {
   // TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
   object debug {
     val printPatmat = global.settings.Ypatmatdebug.value
-    @inline final def patmat(s: => String) = if (printPatmat) println(s)
+    @inline final def patmat(s: => String) = if (printPatmat) Console.err.println(s)
+    @inline final def patmatResult[T](s: => String)(result: T): T = {
+      if (printPatmat) Console.err.println(s + ": " + result)
+      result
+    }
   }
 }
 
 trait Interface extends ast.TreeDSL {
-  import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+  import global._
   import analyzer.Typer
 
   // 2.10/2.11 compatibility
-  protected final def dealiasWiden(tp: Type)   = tp.dealias                       // 2.11: dealiasWiden
-  protected final def mkTRUE                   = CODE.TRUE_typed                  // 2.11: CODE.TRUE
-  protected final def mkFALSE                  = CODE.FALSE_typed                 // 2.11: CODE.FALSE
-  protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
-  protected final def devWarning(str: String)  = global.debugwarn(str)            // 2.11: omit
+  protected final def dealiasWiden(tp: Type)   = tp.dealiasWiden
+  protected final def mkTRUE                   = CODE.TRUE
+  protected final def mkFALSE                  = CODE.FALSE
+  protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable
 
   object vpmName {
     val one       = newTermName("one")
-    val drop      = newTermName("drop")
     val flatMap   = newTermName("flatMap")
     val get       = newTermName("get")
     val guard     = newTermName("guard")
@@ -132,8 +136,9 @@ trait Interface extends ast.TreeDSL {
 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
 
   /** Interface with user-defined match monad?
-   * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+   * if there's a <code>__match</code> in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
 
+       {{{
        type Matcher[P[_], M[+_], A] = {
          def flatMap[B](f: P[A] => M[B]): M[B]
          def orElse[B >: A](alternative: => M[B]): M[B]
@@ -147,12 +152,14 @@ trait Interface extends ast.TreeDSL {
          def one[T](x: P[T]): M[T]
          def guard[T](cond: P[Boolean], then: => P[T]): M[T]
        }
+       }}}
 
    * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
 
 
-   * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+   * if no <code>__match</code> is found, we assume the following implementation (and generate optimized code accordingly)
 
+       {{{
        object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
          def zero = None
          def one[T](x: T) = Some(x)
@@ -160,11 +167,13 @@ trait Interface extends ast.TreeDSL {
          def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
          def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
        }
+       }}}
 
    */
   trait MatchMonadInterface {
     val typer: Typer
     val matchOwner = typer.context.owner
+    def pureType(tp: Type): Type = tp
 
     def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
     def reportMissingCases(pos: Position, counterExamples: List[String]) = {
@@ -174,16 +183,6 @@ trait Interface extends ast.TreeDSL {
 
       typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
     }
-
-    def inMatchMonad(tp: Type): Type
-    def pureType(tp: Type): Type
-    final def matchMonadResult(tp: Type): Type =
-      tp.baseType(matchMonadSym).typeArgs match {
-        case arg :: Nil => arg
-        case _ => ErrorType
-      }
-
-    protected def matchMonadSym: Symbol
   }
 
 
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
new file mode 100644
index 0000000..d10eff1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -0,0 +1,154 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** This is scalac-specific logic layered on top of the scalac-agnostic
+ *  "matching products to patterns" logic defined in PatternExpander.
+ */
+trait ScalacPatternExpanders {
+  val global: Global
+
+  import global._
+  import definitions._
+  import treeInfo._
+
+  type PatternAligned = ScalacPatternExpander#Aligned
+
+  implicit class AlignedOps(val aligned: PatternAligned) {
+    import aligned._
+    def expectedTypes     = typedPatterns map (_.tpe)
+    def unexpandedFormals = extractor.varargsTypes
+  }
+  trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
+    def NoPattern = EmptyTree
+    def NoType    = global.NoType
+
+    def newPatterns(patterns: List[Tree]): Patterns = patterns match {
+      case init :+ last if isStar(last) => Patterns(init, last)
+      case _                            => Patterns(patterns, NoPattern)
+    }
+    def elementTypeOf(tpe: Type) = {
+      val seq = repeatedToSeq(tpe)
+
+      ( typeOfMemberNamedHead(seq)
+          orElse typeOfMemberNamedApply(seq)
+          orElse definitions.elementType(ArrayClass, seq)
+      )
+    }
+    def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor =
+      logResult(s"newExtractor($whole, $fixed, $repeated")(Extractor(whole, fixed, repeated))
+
+    // Turn Seq[A] into Repeated(Seq[A], A, A*)
+    def repeatedFromSeq(seqType: Type): Repeated = {
+      val elem     = elementTypeOf(seqType)
+      val repeated = scalaRepeatedType(elem)
+
+      Repeated(seqType, elem, repeated)
+    }
+    // Turn A* into Repeated(Seq[A], A, A*)
+    def repeatedFromVarargs(repeated: Type): Repeated =
+      Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated)
+
+    /** In this case we are basing the pattern expansion on a case class constructor.
+     *  The argument is the MethodType carried by the primary constructor.
+     */
+    def applyMethodTypes(method: Type): Extractor = {
+      val whole = method.finalResultType
+
+      method.paramTypes match {
+        case init :+ last if isScalaRepeatedParamType(last) => newExtractor(whole, init, repeatedFromVarargs(last))
+        case tps                                            => newExtractor(whole, tps, NoRepeated)
+      }
+    }
+
+    /** In this case, expansion is based on an unapply or unapplySeq method.
+     *  Unfortunately the MethodType does not carry the information of whether
+     *  it was unapplySeq, so we have to funnel that information in separately.
+     */
+    def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
+      val whole    = firstParamType(method)
+      val result   = method.finalResultType
+      val expanded = (
+        if (result =:= BooleanTpe) Nil
+        else typeOfMemberNamedGet(result) match {
+          case rawGet if !hasSelectors(rawGet) => rawGet :: Nil
+          case rawGet                          => typesOfSelectors(rawGet)
+        }
+      )
+      expanded match {
+        case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last))
+        case tps                   => newExtractor(whole, tps, NoRepeated)
+      }
+    }
+  }
+  object alignPatterns extends ScalacPatternExpander {
+    /** Converts a T => (A, B, C) extractor to a T => ((A, B, CC)) extractor.
+     */
+    def tupleExtractor(extractor: Extractor): Extractor =
+      extractor.copy(fixed = tupleType(extractor.fixed) :: Nil)
+
+    private def validateAligned(tree: Tree, aligned: Aligned): Aligned = {
+      import aligned._
+
+      def owner         = tree.symbol.owner
+      def offering      = extractor.offeringString
+      def symString     = tree.symbol.fullLocationString
+      def offerString   = if (extractor.isErroneous) "" else s" offering $offering"
+      def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity
+
+      def err(msg: String)         = currentUnit.error(tree.pos, msg)
+      def warn(msg: String)        = currentUnit.warning(tree.pos, msg)
+      def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity")
+
+      if (isStar && !isSeq)
+        err("Star pattern must correspond with varargs or unapplySeq")
+      else if (elementArity < 0)
+        arityError("not enough")
+      else if (elementArity > 0 && !extractor.hasSeq)
+        arityError("too many")
+
+      aligned
+    }
+
+    def apply(sel: Tree, args: List[Tree]): Aligned = {
+      val fn = sel match {
+        case Unapplied(fn) => fn
+        case _             => sel
+      }
+      val patterns  = newPatterns(args)
+      val isSeq = sel.symbol.name == nme.unapplySeq
+      val isUnapply = sel.symbol.name == nme.unapply
+      val extractor = sel.symbol.name match {
+        case nme.unapply    => unapplyMethodTypes(fn.tpe, isSeq = false)
+        case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+        case _              => applyMethodTypes(fn.tpe)
+      }
+
+      /** Rather than let the error that is SI-6675 pollute the entire matching
+       *  process, we will tuple the extractor before creation Aligned so that
+       *  it contains known good values.
+       */
+      def productArity    = extractor.productArity
+      def acceptMessage   = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}"
+      val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1
+
+      if (requiresTupling && effectivePatternArity(args) == 1)
+        currentUnit.deprecationWarning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+      val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor
+      validateAligned(fn, Aligned(patterns, normalizedExtractor))
+    }
+
+    def apply(tree: Tree): Aligned = tree match {
+      case Apply(fn, args)   => apply(fn, args)
+      case UnApply(fn, args) => apply(fn, args)
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index ec66bf6..1902606 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -8,18 +8,13 @@ package scala.tools.nsc.transform.patmat
 
 import scala.collection.mutable
 import scala.reflect.internal.util.Statistics
+import scala.language.postfixOps
+import scala.reflect.internal.util.Collections._
 
 // naive CNF translation and simple DPLL solver
 trait Solving extends Logic {
   import PatternMatchingStats._
   trait CNF extends PropositionalLogic {
-
-    /** Override Array creation for efficiency (to not go through reflection). */
-    private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
-      def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
-      final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
-    }
-
     import scala.collection.mutable.ArrayBuffer
     type FormulaBuilder = ArrayBuffer[Clause]
     def formulaBuilder  = ArrayBuffer[Clause]()
@@ -31,9 +26,12 @@ trait Solving extends Logic {
     type Formula = FormulaBuilder
     def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
 
-    type Clause  = Set[Lit]
+    type Clause  = collection.Set[Lit]
     // a clause is a disjunction of distinct literals
-    def clause(l: Lit*): Clause = l.toSet
+    def clause(l: Lit*): Clause = (
+      // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set
+      mutable.LinkedHashSet(l: _*)
+    )
 
     type Lit
     def Lit(sym: Sym, pos: Boolean = true): Lit
@@ -71,7 +69,7 @@ trait Solving extends Logic {
       val TrueF          = formula()
       val FalseF         = formula(clause())
       def lit(s: Sym)    = formula(clause(Lit(s)))
-      def negLit(s: Sym) = formula(clause(Lit(s, false)))
+      def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
 
       def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
         def distribute(a: Formula, b: Formula, budget: Int): Formula =
@@ -139,7 +137,7 @@ trait Solving extends Logic {
     def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
 
     // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
-    val EmptyModel = Map.empty[Sym, Boolean]
+    val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean]
     val NoModel: Model = null
 
     // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
@@ -164,7 +162,7 @@ trait Solving extends Logic {
               else Nil
             }
             val forced = unassigned flatMap { s =>
-              force(Lit(s, true)) ++ force(Lit(s, false))
+              force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
             }
             debug.patmat("forced "+ forced)
             val negated = negateModel(model)
@@ -211,9 +209,8 @@ trait Solving extends Logic {
             // SI-7020 Linked- for deterministic counter examples.
             val pos = new mutable.LinkedHashSet[Sym]()
             val neg = new mutable.LinkedHashSet[Sym]()
-            f.foreach{_.foreach{ lit =>
-              if (lit.pos) pos += lit.sym else neg += lit.sym
-            }}
+            mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym)
+
             // appearing in both positive and negative
             val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
             // appearing only in either positive/negative positions
@@ -235,9 +232,8 @@ trait Solving extends Logic {
             }
         }
 
-        if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
-
-        satisfiableWithModel
+      if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+      satisfiableWithModel
     }
   }
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
index 62c584e..1e544e5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -24,6 +24,8 @@ trait Adaptations {
   trait Adaptation {
     self: Typer =>
 
+    import runDefinitions._
+
     def checkValidAdaptation(t: Tree, args: List[Tree]): Boolean = {
       def applyArg = t match {
         case Apply(_, arg :: Nil) => arg
@@ -41,11 +43,11 @@ trait Adaptations {
       def givenString = if (args.isEmpty) "<none>" else args.mkString(", ")
       def adaptedArgs = if (args.isEmpty) "(): Unit" else args.mkString("(", ", ", "): " + applyArg.tpe)
 
-      def adaptWarning(msg: String) = context.warning(t.pos, msg +
+      def adaptWarningMessage(msg: String, showAdaptation: Boolean = true) = msg +
         "\n        signature: " + sigString +
         "\n  given arguments: " + givenString +
-        "\n after adaptation: " + callString + "(" + adaptedArgs + ")"
-      )
+        (if (showAdaptation) "\n after adaptation: " + callString + "(" + adaptedArgs + ")" else "")
+
       // A one-argument method accepting Object (which may look like "Any"
       // at this point if the class is java defined) is a "leaky target" for
       // which we should be especially reluctant to insert () or auto-tuple.
@@ -66,18 +68,21 @@ trait Adaptations {
         )
       }
 
-      if (settings.noAdaptedArgs.value)
-        adaptWarning("No automatic adaptation here: use explicit parentheses.")
-      else if (settings.warnAdaptedArgs.value)
-        adaptWarning(
-          if (args.isEmpty) "Adapting argument list by inserting (): " + (
-            if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
-            else "this is unlikely to be what you want."
-          )
-          else "Adapting argument list by creating a " + args.size + "-tuple: this may not be what you want."
-        )
+      if (settings.noAdaptedArgs)
+        context.warning(t.pos, adaptWarningMessage("No automatic adaptation here: use explicit parentheses."))
+      else if (args.isEmpty) {
+        if (settings.future)
+          context.error(t.pos, adaptWarningMessage("Adaptation of argument list by inserting () has been removed.", showAdaptation = false))
+        else {
+          val msg = "Adaptation of argument list by inserting () has been deprecated: " + (
+          if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
+          else "this is unlikely to be what you want.")
+          context.unit.deprecationWarning(t.pos, adaptWarningMessage(msg))
+        }
+      } else if (settings.warnAdaptedArgs)
+        context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want."))
 
-      !settings.noAdaptedArgs.value
+      !settings.noAdaptedArgs || !(args.isEmpty && settings.future)
     }
   }
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index b504863..323fe1c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -16,7 +16,6 @@ trait Analyzer extends AnyRef
             with Typers
             with Infer
             with Implicits
-            with Variances
             with EtaExpansion
             with SyntheticMethods
             with Unapplies
@@ -30,8 +29,9 @@ trait Analyzer extends AnyRef
   val global : Global
   import global._
 
-  object namerFactory extends SubComponent {
+  object namerFactory extends {
     val global: Analyzer.this.global.type = Analyzer.this.global
+  } with SubComponent {
     val phaseName = "namer"
     val runsAfter = List[String]("parser")
     val runsRightAfter = None
@@ -45,8 +45,9 @@ trait Analyzer extends AnyRef
     }
   }
 
-  object packageObjects extends SubComponent {
+  object packageObjects extends {
     val global: Analyzer.this.global.type = Analyzer.this.global
+  } with SubComponent {
     val phaseName = "packageobjects"
     val runsAfter = List[String]()
     val runsRightAfter= Some("namer")
@@ -72,9 +73,10 @@ trait Analyzer extends AnyRef
     }
   }
 
-  object typerFactory extends SubComponent {
-    import scala.reflect.internal.TypesStats.typerNanos
+  object typerFactory extends {
     val global: Analyzer.this.global.type = Analyzer.this.global
+  } with SubComponent {
+    import scala.reflect.internal.TypesStats.typerNanos
     val phaseName = "typer"
     val runsAfter = List[String]()
     val runsRightAfter = Some("packageobjects")
@@ -88,22 +90,27 @@ trait Analyzer extends AnyRef
       override def run() {
         val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
         global.echoPhaseSummary(this)
-        currentRun.units foreach applyPhase
-        undoLog.clear()
-        // need to clear it after as well or 10K+ accumulated entries are
-        // uncollectable the rest of the way.
+        for (unit <- currentRun.units) {
+          applyPhase(unit)
+          undoLog.clear()
+        }
         if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
       }
       def apply(unit: CompilationUnit) {
         try {
-          unit.body = newTyper(rootContext(unit)).typed(unit.body)
-          if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body)
+          val typer = newTyper(rootContext(unit))
+          unit.body = typer.typed(unit.body)
+          if (global.settings.Yrangepos && !global.reporter.hasErrors) global.validatePositions(unit.body)
           for (workItem <- unit.toCheck) workItem()
-        } finally {
+          if (settings.warnUnusedImport)
+            warnUnusedImports(unit)
+          if (settings.warnUnused)
+            typer checkUnused unit
+        }
+        finally {
           unit.toCheck.clear()
         }
       }
     }
   }
 }
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 28f620d..fa6e539 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -13,7 +13,6 @@ package typechecker
 trait AnalyzerPlugins { self: Analyzer =>
   import global._
 
-
   trait AnalyzerPlugin {
     /**
      * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
@@ -33,7 +32,7 @@ trait AnalyzerPlugins { self: Analyzer =>
     /**
      * Let analyzer plugins change the expected type before type checking a tree.
      */
-    def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+    def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = pt
 
     /**
      * Let analyzer plugins modify the type that has been computed for a tree.
@@ -44,7 +43,7 @@ trait AnalyzerPlugins { self: Analyzer =>
      * @param mode  Mode that was used for typing `tree`
      * @param pt    Expected type that was used for typing `tree`
      */
-    def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+    def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = tpe
 
     /**
      * Let analyzer plugins change the types assigned to definitions. For definitions that have
@@ -133,7 +132,7 @@ trait AnalyzerPlugins { self: Analyzer =>
      * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
      * given type tp, taking into account the given mode (see method adapt in trait Typers).
      */
-    def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+    def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = false
 
     /**
      * Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -142,11 +141,11 @@ trait AnalyzerPlugins { self: Analyzer =>
      * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
      * class cannot do the adapting, it should return the tree unchanged.
      */
-    def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+    def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = tree
 
     /**
      * Modify the type of a return expression. By default, return expressions have type
-     * NothingClass.tpe.
+     * NothingTpe.
      *
      * @param tpe   The type of the return expression
      * @param typer The typer that was used for typing the return tree
@@ -156,6 +155,117 @@ trait AnalyzerPlugins { self: Analyzer =>
     def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
   }
 
+  /**
+   * @define nonCumulativeReturnValueDoc Returns `None` if the plugin doesn't want to customize the default behavior
+   * or something else if the plugin knows better that the implementation provided in scala-compiler.jar.
+   * If multiple plugins return a non-empty result, it's going to be a compilation error.
+   */
+  trait MacroPlugin {
+    /**
+     * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+     *
+     * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+     * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+     * at the phase in which their symbol is created. Observations show that this can even be the
+     * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+     * need to be active also in phases other than namer and typer.
+     *
+     * Typically, this method can be implemented as
+     *
+     *   global.phase.id < global.currentRun.picklerPhase.id
+     */
+    def isActive(): Boolean = true
+
+    /**
+     * Typechecks the right-hand side of a macro definition (which typically features
+     * a mere reference to a macro implementation).
+     *
+     * Default implementation provided in `self.standardTypedMacroBody` makes sure that the rhs
+     * resolves to a reference to a method in either a static object or a macro bundle,
+     * verifies that the referred method is compatible with the macro def and upon success
+     * attaches a macro impl binding to the macro def's symbol.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None
+
+    /**
+     * Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set),
+     * possibly using the current typer mode and the provided prototype.
+     *
+     * Default implementation provided in `self.standardMacroExpand` figures out whether the `expandee`
+     * needs to be expanded right away or its expansion has to be delayed until all undetermined
+     * parameters are inferred, then loads the macro implementation using `self.pluginsMacroRuntime`,
+     * prepares the invocation arguments for the macro implementation using `self.pluginsMacroArgs`,
+     * and finally calls into the macro implementation. After the call returns, it typechecks
+     * the expansion and performs some bookkeeping.
+     *
+     * This method is typically implemented if your plugin requires significant changes to the macro engine.
+     * If you only need to customize the macro context, consider implementing `pluginsMacroArgs`.
+     * If you only need to customize how macro implementation are invoked, consider going for `pluginsMacroRuntime`.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = None
+
+    /**
+     * Computes the arguments that need to be passed to the macro impl corresponding to a particular expandee.
+     *
+     * Default implementation provided in `self.standardMacroArgs` instantiates a `scala.reflect.macros.contexts.Context`,
+     * gathers type and value arguments of the macro application and throws them together into `MacroArgs`.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = None
+
+    /**
+     * Summons a function that encapsulates macro implementation invocations for a particular expandee.
+     *
+     * Default implementation provided in `self.standardMacroRuntime` returns a function that
+     * loads the macro implementation binding from the macro definition symbol,
+     * then uses either Java or Scala reflection to acquire the method that corresponds to the impl,
+     * and then reflectively calls into that method.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = None
+
+    /**
+     * Creates a symbol for the given tree in lexical context encapsulated by the given namer.
+     *
+     * Default implementation provided in `namer.standardEnterSym` handles MemberDef's and Imports,
+     * doing nothing for other trees (DocDef's are seen through and rewrapped). Typical implementation
+     * of `enterSym` for a particular tree flavor creates a corresponding symbol, assigns it to the tree,
+     * enters the symbol into scope and then might even perform some code generation.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = false
+
+    /**
+     * Makes sure that for the given class definition, there exists a companion object definition.
+     *
+     * Default implementation provided in `namer.standardEnsureCompanionObject` looks up a companion symbol for the class definition
+     * and then checks whether the resulting symbol exists or not. If it exists, then nothing else is done.
+     * If not, a synthetic object definition is created using the provided factory, which is then entered into namer's scope.
+     *
+     * $nonCumulativeReturnValueDoc.
+     */
+    def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = None
+
+    /**
+     * Prepares a list of statements for being typechecked by performing domain-specific type-agnostic code synthesis.
+     *
+     * Trees passed into this method are going to be named, but not typed.
+     * In particular, you can rely on the compiler having called `enterSym` on every stat prior to passing calling this method.
+     *
+     * Default implementation does nothing. Current approaches to code syntheses (generation of underlying fields
+     * for getters/setters, creation of companion objects for case classes, etc) are too disparate and ad-hoc
+     * to be treated uniformly, so I'm leaving this for future work.
+     */
+    def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = stats
+  }
+
 
 
   /** A list of registered analyzer plugins */
@@ -167,59 +277,158 @@ trait AnalyzerPlugins { self: Analyzer =>
       analyzerPlugins = plugin :: analyzerPlugins
   }
 
+  private abstract class CumulativeOp[T] {
+    def default: T
+    def accumulate: (T, AnalyzerPlugin) => T
+  }
+
+  private def invoke[T](op: CumulativeOp[T]): T = {
+    if (analyzerPlugins.isEmpty) op.default
+    else analyzerPlugins.foldLeft(op.default)((current, plugin) =>
+      if (!plugin.isActive()) current else op.accumulate(current, plugin))
+  }
 
   /** @see AnalyzerPlugin.pluginsPt */
-  def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+  def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type =
+    // performance opt
     if (analyzerPlugins.isEmpty) pt
-    else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
-      if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+    else invoke(new CumulativeOp[Type] {
+      def default = pt
+      def accumulate = (pt, p) => p.pluginsPt(pt, typer, tree, mode)
+    })
 
   /** @see AnalyzerPlugin.pluginsTyped */
-  def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
-    // support deprecated methods in annotation checkers
-    val annotCheckersTpe = addAnnotations(tree, tpe)
-    if (analyzerPlugins.isEmpty) annotCheckersTpe
-    else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
-      if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
-  }
+  def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type =
+    // performance opt
+    if (analyzerPlugins.isEmpty) addAnnotations(tree, tpe)
+    else invoke(new CumulativeOp[Type] {
+      // support deprecated methods in annotation checkers
+      def default = addAnnotations(tree, tpe)
+      def accumulate = (tpe, p) => p.pluginsTyped(tpe, typer, tree, mode, pt)
+    })
 
   /** @see AnalyzerPlugin.pluginsTypeSig */
-  def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
-    if (analyzerPlugins.isEmpty) tpe
-    else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
-      if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+  def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = invoke(new CumulativeOp[Type] {
+    def default = tpe
+    def accumulate = (tpe, p) => p.pluginsTypeSig(tpe, typer, defTree, pt)
+  })
 
   /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
-  def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
-    if (analyzerPlugins.isEmpty) tpe
-    else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
-      if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+  def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = invoke(new CumulativeOp[Type] {
+    def default = tpe
+    def accumulate = (tpe, p) => p.pluginsTypeSigAccessor(tpe, typer, tree, sym)
+  })
 
   /** @see AnalyzerPlugin.canAdaptAnnotations */
-  def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+  def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = invoke(new CumulativeOp[Boolean] {
     // support deprecated methods in annotation checkers
-    val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
-    annotCheckersExists || {
-      if (analyzerPlugins.isEmpty) false
-      else analyzerPlugins.exists(plugin =>
-        plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
-    }
-  }
+    def default = global.canAdaptAnnotations(tree, mode, pt)
+    def accumulate = (curr, p) => curr || p.canAdaptAnnotations(tree, typer, mode, pt)
+  })
 
   /** @see AnalyzerPlugin.adaptAnnotations */
-  def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+  def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = invoke(new CumulativeOp[Tree] {
     // support deprecated methods in annotation checkers
-    val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
-    if (analyzerPlugins.isEmpty) annotCheckersTree
-    else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
-      if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
-  }
+    def default = global.adaptAnnotations(tree, mode, pt)
+    def accumulate = (tree, p) => p.adaptAnnotations(tree, typer, mode, pt)
+  })
 
   /** @see AnalyzerPlugin.pluginsTypedReturn */
-  def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
-    val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
-    if (analyzerPlugins.isEmpty) annotCheckersType
-    else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
-      if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+  def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = invoke(new CumulativeOp[Type] {
+    def default = adaptTypeOfReturn(tree.expr, pt, tpe)
+    def accumulate = (tpe, p) => p.pluginsTypedReturn(tpe, typer, tree, pt)
+  })
+
+  /** A list of registered macro plugins */
+  private var macroPlugins: List[MacroPlugin] = Nil
+
+  /** Registers a new macro plugin */
+  def addMacroPlugin(plugin: MacroPlugin) {
+    if (!macroPlugins.contains(plugin))
+      macroPlugins = plugin :: macroPlugins
+  }
+
+  private abstract class NonCumulativeOp[T] {
+    def position: Position
+    def description: String
+    def default: T
+    def custom(plugin: MacroPlugin): Option[T]
+  }
+
+  private def invoke[T](op: NonCumulativeOp[T]): T = {
+    if (macroPlugins.isEmpty) op.default
+    else {
+      val results = macroPlugins.filter(_.isActive()).map(plugin => (plugin, op.custom(plugin)))
+      results.flatMap { case (p, Some(result)) => Some((p, result)); case _ => None } match {
+        case (p1, _) :: (p2, _) :: _ => typer.context.error(op.position, s"both $p1 and $p2 want to ${op.description}"); op.default
+        case (_, custom) :: Nil => custom
+        case Nil => op.default
+      }
+    }
+  }
+
+  /** @see MacroPlugin.pluginsTypedMacroBody */
+  def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Tree = invoke(new NonCumulativeOp[Tree] {
+    def position = ddef.pos
+    def description = "typecheck this macro definition"
+    def default = standardTypedMacroBody(typer, ddef)
+    def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef)
+  })
+
+  /** @see MacroPlugin.pluginsMacroExpand */
+  def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] {
+    def position = expandee.pos
+    def description = "expand this macro application"
+    def default = standardMacroExpand(typer, expandee, mode, pt)
+    def custom(plugin: MacroPlugin) = plugin.pluginsMacroExpand(typer, expandee, mode, pt)
+  })
+
+  /** @see MacroPlugin.pluginsMacroArgs */
+  def pluginsMacroArgs(typer: Typer, expandee: Tree): MacroArgs = invoke(new NonCumulativeOp[MacroArgs] {
+    def position = expandee.pos
+    def description = "compute macro arguments for this macro application"
+    def default = standardMacroArgs(typer, expandee)
+    def custom(plugin: MacroPlugin) = plugin.pluginsMacroArgs(typer, expandee)
+  })
+
+  /** @see MacroPlugin.pluginsMacroRuntime */
+  def pluginsMacroRuntime(expandee: Tree): MacroRuntime = invoke(new NonCumulativeOp[MacroRuntime] {
+    def position = expandee.pos
+    def description = "compute macro runtime for this macro application"
+    def default = standardMacroRuntime(expandee)
+    def custom(plugin: MacroPlugin) = plugin.pluginsMacroRuntime(expandee)
+  })
+
+  /** @see MacroPlugin.pluginsEnterSym */
+  def pluginsEnterSym(namer: Namer, tree: Tree): Context =
+    if (macroPlugins.isEmpty) namer.standardEnterSym(tree)
+    else invoke(new NonCumulativeOp[Context] {
+      def position = tree.pos
+      def description = "enter a symbol for this tree"
+      def default = namer.standardEnterSym(tree)
+      def custom(plugin: MacroPlugin) = {
+        val hasExistingSym = tree.symbol != NoSymbol
+        val result = plugin.pluginsEnterSym(namer, tree)
+        if (result && hasExistingSym) Some(namer.context)
+        else if (result && tree.isInstanceOf[Import]) Some(namer.context.make(tree))
+        else if (result) Some(namer.context)
+        else None
+      }
+    })
+
+  /** @see MacroPlugin.pluginsEnsureCompanionObject */
+  def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = invoke(new NonCumulativeOp[Symbol] {
+    def position = cdef.pos
+    def description = "enter a companion symbol for this tree"
+    def default = namer.standardEnsureCompanionObject(cdef, creator)
+    def custom(plugin: MacroPlugin) = plugin.pluginsEnsureCompanionObject(namer, cdef, creator)
+  })
+
+  /** @see MacroPlugin.pluginsEnterStats */
+  def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+    // performance opt
+    if (macroPlugins.isEmpty) stats
+    else macroPlugins.foldLeft(stats)((current, plugin) =>
+      if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, stats))
   }
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index d30b5c2..1388440 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -6,12 +6,8 @@
 package scala.tools.nsc
 package typechecker
 
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
-import symtab.Flags._
-import scala.annotation.tailrec
 import Checkability._
+import scala.language.postfixOps
 
 /** On pattern matcher checkability:
  *
@@ -66,6 +62,9 @@ trait Checkable {
     bases foreach { bc =>
       val tps1 = (from baseType bc).typeArgs
       val tps2 = (tvarType baseType bc).typeArgs
+      if (tps1.size != tps2.size)
+        devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)")
+
       (tps1, tps2).zipped foreach (_ =:= _)
       // Alternate, variance respecting formulation causes
       // neg/unchecked3.scala to fail (abstract types).  TODO -
@@ -82,7 +81,7 @@ trait Checkable {
 
     val resArgs = tparams zip tvars map {
       case (_, tvar) if tvar.instValid => tvar.constr.inst
-      case (tparam, _)                 => tparam.tpe
+      case (tparam, _)                 => tparam.tpeHK
     }
     appliedType(to, resArgs: _*)
   }
@@ -112,7 +111,7 @@ trait Checkable {
   private class CheckabilityChecker(val X: Type, val P: Type) {
     def Xsym = X.typeSymbol
     def Psym = P.typeSymbol
-    def XR   = propagateKnownTypes(X, Psym)
+    def XR   = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
     // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
     def P1   = X matchesPattern P
     def P2   = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
@@ -134,7 +133,7 @@ trait Checkable {
       else if (P3) RuntimeCheckable
       else if (uncheckableType == NoType) {
         // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type
-        debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
+        debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
         CheckabilityError
       }
       else Uncheckable
@@ -154,6 +153,7 @@ trait Checkable {
     def neverSubClass = isNeverSubClass(Xsym, Psym)
     def neverMatches  = result == StaticallyFalse
     def isUncheckable = result == Uncheckable
+    def isCheckable   = !isUncheckable
     def uncheckableMessage = uncheckableType match {
       case NoType                                   => "something"
       case tp @ RefinedType(_, _)                   => "refinement " + tp
@@ -186,7 +186,7 @@ trait Checkable {
      *  additional conditions holds:
      *   - either A or B is effectively final
      *   - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin)
-     *   - both A and B are sealed, and every possible pairing of their children is irreconcilable
+     *   - both A and B are sealed/final, and every possible pairing of their children is irreconcilable
      *
      *  TODO: the last two conditions of the last possibility (that the symbols are not of
      *  classes being compiled in the current run) are because this currently runs too early,
@@ -195,19 +195,28 @@ trait Checkable {
      *  so I will consult with moors about the optimal time to be doing this.
      */
     def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && (
-         sym1.initialize.isEffectivelyFinal // initialization important
-      || sym2.initialize.isEffectivelyFinal
+         isEffectivelyFinal(sym1) // initialization important
+      || isEffectivelyFinal(sym2)
       || !sym1.isTrait && !sym2.isTrait
-      || sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+      || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+    )
+    private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal
+    private def isEffectivelyFinal(sym: Symbol): Boolean = (
+      // initialization important
+      sym.initialize.isEffectivelyFinalOrNotOverridden || (
+        settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final.
+      )
     )
+
     def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
 
     private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ {
-      def isNeverSubArg(t1: Type, t2: Type, variance: Int) = {
-        if (variance > 0) isNeverSubType(t2, t1)
-        else if (variance < 0) isNeverSubType(t1, t2)
-        else isNeverSameType(t1, t2)
-      }
+      def isNeverSubArg(t1: Type, t2: Type, variance: Variance) = (
+        if (variance.isInvariant) isNeverSameType(t1, t2)
+        else if (variance.isCovariant) isNeverSubType(t2, t1)
+        else if (variance.isContravariant) isNeverSubType(t1, t2)
+        else false
+      )
       exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg)
     }
     private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
@@ -232,6 +241,17 @@ trait Checkable {
   trait InferCheckable {
     self: Inferencer =>
 
+    def isUncheckable(P0: Type) = !isCheckable(P0)
+
+    def isCheckable(P0: Type): Boolean = (
+      uncheckedOk(P0) || (P0.widen match {
+        case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
+        case RefinedType(_, decls) if !decls.isEmpty               => false
+        case RefinedType(parents, _)                               => parents forall isCheckable
+        case p                                                     => new CheckabilityChecker(AnyTpe, p) isCheckable
+      })
+    )
+
     /** TODO: much better error positions.
      *  Kind of stuck right now because they just pass us the one tree.
      *  TODO: Eliminate inPattern, canRemedy, which have no place here.
@@ -240,10 +260,12 @@ trait Checkable {
       if (uncheckedOk(P0)) return
       def where = if (inPattern) "pattern " else ""
 
-      // singleton types not considered here
-      val P = P0.widen
+      // singleton types not considered here, dealias the pattern for SI-XXXX
+      val P = P0.dealiasWiden
       val X = X0.widen
 
+      def PString = if (P eq P0) P.toString else s"$P (the underlying of $P0)"
+
       P match {
         // Prohibit top-level type tests for these, but they are ok nested (e.g. case Foldable[Nothing] => ... )
         case TypeRef(_, NothingClass | NullClass | AnyValClass, _) =>
@@ -254,17 +276,21 @@ trait Checkable {
         // Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
         case RefinedType(_, decls) if !decls.isEmpty =>
           getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+        case RefinedType(parents, _) =>
+          parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
         case _ =>
           val checker = new CheckabilityChecker(X, P)
-          log(checker.summaryString)
+          if (checker.result == RuntimeCheckable)
+            log(checker.summaryString)
+
           if (checker.neverMatches) {
             val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)"
-            getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $P$addendum")
+            getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum")
           }
           else if (checker.isUncheckable) {
             val msg = (
-              if (checker.uncheckableType =:= P) s"abstract type $where$P"
-              else s"${checker.uncheckableMessage} in type $where$P"
+              if (checker.uncheckableType =:= P) s"abstract type $where$PString"
+              else s"${checker.uncheckableMessage} in type $where$PString"
             )
             getContext.unit.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure")
           }
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 89e2ee4..56ed0ee 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -3,10 +3,10 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package typechecker
 
-
 import java.lang.ArithmeticException
 
 /** This class ...
@@ -18,7 +18,6 @@ abstract class ConstantFolder {
 
   val global: Global
   import global._
-  import definitions._
 
   /** If tree is a constant operation, replace with result. */
   def apply(tree: Tree): Tree = fold(tree, tree match {
@@ -29,9 +28,6 @@ abstract class ConstantFolder {
 
   /** If tree is a constant value that can be converted to type `pt`, perform
    *  the conversion.
-   *
-   *  @param tree ...
-   *  @param pt ...
    */
   def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match {
     case ConstantType(x) => x convertTo pt
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index a7b0e47..9715fda 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -6,55 +6,54 @@
 package scala.tools.nsc
 package typechecker
 
-import scala.collection.{ mutable, immutable }
 import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import symtab.Flags.IS_ERROR
 import scala.compat.Platform.EOL
 import scala.reflect.runtime.ReflectionUtils
 import scala.reflect.macros.runtime.AbortMacroException
 import scala.util.control.NonFatal
 import scala.tools.nsc.util.stackTraceString
+import scala.reflect.io.NoAbstractFile
 
 trait ContextErrors {
   self: Analyzer =>
 
   import global._
   import definitions._
-  import treeInfo._
 
-  object ErrorKinds extends Enumeration {
-    type ErrorKind = Value
-    val Normal, Access, Ambiguous, Divergent = Value
-  }
-
-  import ErrorKinds.ErrorKind
-
-  trait AbsTypeError extends Throwable {
+  sealed abstract class AbsTypeError extends Throwable {
     def errPos: Position
     def errMsg: String
-    def kind: ErrorKind
+    override def toString() = "[Type error at:" + errPos + "] " + errMsg
   }
 
-  case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
-    extends AbsTypeError {
-
-    def errPos:Position = underlyingTree.pos
-    override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+  sealed abstract class TreeTypeError extends AbsTypeError {
+    def underlyingTree: Tree
+    def errPos = underlyingTree.pos
   }
 
-  case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+  case class NormalTypeError(underlyingTree: Tree, errMsg: String)
+    extends TreeTypeError
+
+  case class AccessTypeError(underlyingTree: Tree, errMsg: String)
+    extends TreeTypeError
+
+  case class AmbiguousTypeError(errPos: Position, errMsg: String)
+    extends AbsTypeError
+
+  case class SymbolTypeError(underlyingSym: Symbol, errMsg: String)
     extends AbsTypeError {
 
     def errPos = underlyingSym.pos
   }
 
-  case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+  case class TypeErrorWrapper(ex: TypeError)
     extends AbsTypeError {
     def errMsg = ex.msg
     def errPos = ex.pos
   }
 
-  case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+  case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError)
     extends AbsTypeError {
     def errMsg = ex.msg
     def errPos = tree.pos
@@ -68,19 +67,19 @@ trait ContextErrors {
   //    (pt at the point of divergence gives less information to the user)
   // Note: it is safe to delay error message generation in this case
   // becasue we don't modify implicits' infos.
-  // only issued when -Xdivergence211 is turned on
-  case class DivergentImplicitTypeError(tree: Tree, pt0: Type, sym: Symbol) extends AbsTypeError {
-    def errPos: Position = tree.pos
+  case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
+    extends TreeTypeError {
     def errMsg: String   = errMsgForPt(pt0)
-    def kind = ErrorKinds.Divergent
-    def withPt(pt: Type): AbsTypeError = NormalTypeError(tree, errMsgForPt(pt), kind)
+    def withPt(pt: Type): AbsTypeError = this.copy(pt0 = pt)
     private def errMsgForPt(pt: Type) =
       s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
   }
 
-  case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+  case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
+    extends TreeTypeError
 
-  case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+  case class PosAndMsgTypeError(errPos: Position, errMsg: String)
+    extends AbsTypeError
 
   object ErrorUtils {
     def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
@@ -91,22 +90,13 @@ trait ContextErrors {
       issueTypeError(SymbolTypeError(sym, msg))
     }
 
-    // only called when -Xdivergence211 is turned off
-    def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
-      issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
-    }
-
     def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
       context.issueAmbiguousError(pre, sym1, sym2, err)
     }
 
     def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
 
-    def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
-      def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
-
-      "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
-    }
+    def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req)
   }
 
   def notAnyRefMessage(found: Type): String = {
@@ -147,7 +137,7 @@ trait ContextErrors {
         }
         issueNormalTypeError(tree,
           "stable identifier required, but "+tree+" found." + (
-          if (isStableExceptVolatile(tree)) addendum else ""))
+          if (treeInfo.hasVolatileType(tree)) addendum else ""))
         setError(tree)
       }
 
@@ -155,28 +145,40 @@ trait ContextErrors {
         def errMsg = {
           val paramName = param.name
           val paramTp   = param.tpe
+          def evOrParam = (
+            if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX)
+              "evidence parameter of type"
+            else
+              s"parameter $paramName:"
+          )
           paramTp.typeSymbolDirect match {
-              case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
-              case _ =>
-                "could not find implicit value for "+
-                   (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
-                    else "parameter "+paramName+": ")+paramTp
+            case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+            case _ => s"could not find implicit value for $evOrParam $paramTp"
           }
         }
         issueNormalTypeError(tree, errMsg)
       }
 
       def AdaptTypeError(tree: Tree, found: Type, req: Type) = {
+        // SI-3971 unwrapping to the outermost Apply helps prevent confusion with the
+        // error message point.
+        def callee = {
+          def unwrap(t: Tree): Tree = t match {
+            case Apply(app: Apply, _) => unwrap(app)
+            case _                    => t
+          }
+          unwrap(tree)
+        }
+
         // If the expected type is a refinement type, and the found type is a refinement or an anon
         // class, we can greatly improve the error message by retyping the tree to recover the actual
         // members present, then display along with the expected members. This is done here because
         // this is the last point where we still have access to the original tree, rather than just
         // the found/req types.
-        val foundType: Type = req.normalize match {
+        val foundType: Type = req.dealiasWiden match {
           case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
-            val retyped    = typed (tree.duplicate setType null)
+            val retyped    = typed (tree.duplicate.clearType())
             val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
-
             if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
             else {
               // The members arrive marked private, presumably because there was no
@@ -190,11 +192,10 @@ trait ContextErrors {
           case _ =>
             found
         }
-        assert(!found.isErroneous && !req.isErroneous, (found, req))
+        assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
 
-        issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
-        if (settings.explaintypes.value)
-          explainTypes(found, req)
+        issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(foundType, req)))
+        infer.explainTypes(foundType, req)
       }
 
       def WithFilterError(tree: Tree, ex: AbsTypeError) = {
@@ -203,14 +204,18 @@ trait ContextErrors {
       }
 
       def ParentTypesError(templ: Template, ex: TypeError) = {
-         templ.tpe = null
-         issueNormalTypeError(templ, ex.getMessage())
+        templ.clearType()
+        issueNormalTypeError(templ, ex.getMessage())
+        setError(templ)
       }
 
       // additional parentTypes errors
-      def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+      def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
         issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
 
+      def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
+        issueNormalTypeError(arg, "parents of traits may not have parameters")
+
       def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
         issueNormalTypeError(supertpt, "missing type arguments")
 
@@ -318,7 +323,7 @@ trait ContextErrors {
           val target           = qual.tpe.widen
           def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
           def nameString       = decodeWithKind(name, owner)
-          /** Illuminating some common situations and errors a bit further. */
+          /* Illuminating some common situations and errors a bit further. */
           def addendum         = {
             val companion = {
               if (name.isTermName && owner.isPackageClass) {
@@ -354,6 +359,14 @@ trait ContextErrors {
         //setError(sel)
       }
 
+      def SelectWithUnderlyingError(sel: Tree, err: AbsTypeError) = {
+        // if there's no position, this is likely the result of a MissingRequirementError
+        // use the position of the selection we failed to type check to report the original message
+        if (err.errPos == NoPosition) issueNormalTypeError(sel, err.errMsg)
+        else issueTypeError(err)
+        setError(sel)
+      }
+
       //typedNew
       def IsAbstractError(tree: Tree, sym: Symbol) = {
         issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated")
@@ -403,11 +416,28 @@ trait ContextErrors {
         setError(tree)
       }
 
-      def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) =
+      def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type, withTupleAddendum: Boolean) = {
+        def issue(what: String) = {
+          val addendum: String = fun match {
+            case Function(params, _) if withTupleAddendum =>
+              val funArity = params.length
+              val example = analyzer.exampleTuplePattern(params map (_.name))
+              (pt baseType FunctionClass(1)) match {
+                case TypeRef(_, _, arg :: _) if arg.typeSymbol == TupleClass(funArity) && funArity > 1 =>
+                  sm"""|
+                       |Note: The expected type requires a one-argument function accepting a $funArity-Tuple.
+                       |      Consider a pattern matching anonymous function, `{ case $example =>  ... }`"""
+                case _ => ""
+              }
+            case _ => ""
+          }
+          issueNormalTypeError(vparam, what + addendum)
+        }
         if (vparam.mods.isSynthetic) fun match {
           case Function(_, Match(_, _)) => MissingParameterTypeAnonMatchError(vparam, pt)
-          case _                        => issueNormalTypeError(vparam, "missing parameter type for expanded function " + fun)
-        } else issueNormalTypeError(vparam, "missing parameter type")
+          case _                        => issue("missing parameter type for expanded function " + fun)
+        } else issue("missing parameter type")
+      }
 
       def MissingParameterTypeAnonMatchError(vparam: Tree, pt: Type) =
         issueNormalTypeError(vparam, "missing parameter type for expanded function\n"+
@@ -437,9 +467,6 @@ trait ContextErrors {
       def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) =
         NormalTypeError(tree, "found array constant, expected argument of type " + pt)
 
-      def UnexpectedTreeAnnotation(tree: Tree) =
-        NormalTypeError(tree, "unexpected tree in annotation: "+ tree)
-
       def AnnotationTypeMismatchError(tree: Tree, expected: Type, found: Type) =
         NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found)
 
@@ -468,7 +495,7 @@ trait ContextErrors {
       def AbstractionFromVolatileTypeError(vd: ValDef) =
         issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
 
-      private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+      private[scala] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
         "wrong number of type parameters for "+treeSymTypeMsg(fun)
 
       def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
@@ -484,7 +511,7 @@ trait ContextErrors {
       // doTypeApply
       //tryNamesDefaults
       def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
-        NormalTypeError(tree, "macros application do not support named and/or default arguments")
+        NormalTypeError(tree, "macro applications do not support named and/or default arguments")
 
       def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
         NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -525,6 +552,12 @@ trait ContextErrors {
       def TooManyArgsPatternError(fun: Tree) =
         NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
 
+      def BlackboxExtractorExpansion(fun: Tree) =
+        NormalTypeError(fun, "extractor macros can only be whitebox")
+
+      def WrongShapeExtractorExpansion(fun: Tree) =
+        NormalTypeError(fun, "extractor macros can only expand into extractor calls")
+
       def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
         NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
 
@@ -532,7 +565,7 @@ trait ContextErrors {
         NormalTypeError(tree, fun.tpe+" does not take parameters")
 
       // Dynamic
-      def DynamicVarArgUnsupported(tree: Tree, name: String) =
+      def DynamicVarArgUnsupported(tree: Tree, name: Name) =
         issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
 
       def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
@@ -578,11 +611,13 @@ trait ContextErrors {
 
       //adapt
       def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
-        issueNormalTypeError(tree,
-          "missing arguments for " + meth.fullLocationString + (
+        val message =
+          if (meth.isMacro) MacroTooFewArgumentListsMessage
+          else "missing arguments for " + meth.fullLocationString + (
             if (meth.isConstructor) ""
             else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
-          ))
+          )
+        issueNormalTypeError(tree, message)
         setError(tree)
       }
 
@@ -598,8 +633,12 @@ trait ContextErrors {
         setError(tree)
       }
 
-      def CaseClassConstructorError(tree: Tree) = {
-        issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+      def CaseClassConstructorError(tree: Tree, baseMessage: String) = {
+        val addendum = directUnapplyMember(tree.symbol.info) match {
+          case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
+          case _                                            => ""
+        }
+        issueNormalTypeError(tree, baseMessage + addendum)
         setError(tree)
       }
 
@@ -663,7 +702,7 @@ trait ContextErrors {
         val addendums = List(
           if (sym0.associatedFile eq sym1.associatedFile)
             Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
-          else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+          else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile))
             Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
           else None ,
           if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
@@ -680,8 +719,8 @@ trait ContextErrors {
       def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
         issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
 
-      def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
-        issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+      def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) =
+        issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym"))
 
       // macro-related errors (also see MacroErrors below)
 
@@ -690,26 +729,50 @@ trait ContextErrors {
         setError(tree)
       }
 
-      // same reason as for MacroBodyTypecheckException
+      def MacroTooManyArgumentListsError(expandee: Tree, fun: Symbol) = {
+        NormalTypeError(expandee, "too many argument lists for " + fun)
+      }
+
+      private def MacroIncompatibleEngineError(friendlyMessage: String, internalMessage: String) = {
+        def debugDiagnostic = s"(internal diagnostic: $internalMessage)"
+        val message = if (macroDebugLite || macroDebugVerbose) s"$friendlyMessage $debugDiagnostic" else friendlyMessage
+        issueNormalTypeError(lastTreeToTyper, message)
+      }
+
+      def MacroCantExpand210xMacrosError(internalMessage: String) =
+        MacroIncompatibleEngineError("can't expand macros compiled by previous versions of Scala", internalMessage)
+
+      def MacroCantExpandIncompatibleMacrosError(internalMessage: String) =
+        MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage)
+
       case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
 
-      private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+      protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
         def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
         macroLogLite("macro expansion has failed: %s".format(msgForLog))
-        val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
-        if (msg != null) context.error(errorPos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+        if (msg != null) context.error(if (pos.isDefined) pos else expandee.pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
         setError(expandee)
         throw MacroExpansionException
       }
 
-      def MacroPartialApplicationError(expandee: Tree) = {
+      private def macroExpansionError2(expandee: Tree, msg: String) = {
         // macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
         // kinda contradictory to the comment in `macroExpansionError`, but this is how it works
-        issueNormalTypeError(expandee, "macros cannot be partially applied")
+        issueNormalTypeError(expandee, msg)
         setError(expandee)
         throw MacroExpansionException
       }
 
+      private def MacroTooFewArgumentListsMessage = "too few argument lists for macro invocation"
+      def MacroTooFewArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooFewArgumentListsMessage)
+
+      private def MacroTooManyArgumentListsMessage = "too many argument lists for macro invocation"
+      def MacroTooManyArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooManyArgumentListsMessage)
+
+      def MacroTooFewArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too few arguments for macro invocation")
+
+      def MacroTooManyArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too many arguments for macro invocation")
+
       def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
         // errors have been reported by the macro itself, so we do nothing here
         macroLogVerbose("macro expansion has been aborted")
@@ -731,7 +794,7 @@ trait ContextErrors {
           try {
             // [Eugene] is there a better way?
             // [Paul] See Exceptional.scala and Origins.scala.
-            val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpand1")
+            val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpandWithRuntime")
             if (relevancyThreshold == -1) None
             else {
               var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
@@ -771,23 +834,29 @@ trait ContextErrors {
         macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
       }
 
-      def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+      def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
+        def isUnaffiliatedExpr = expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
+        def isUnaffiliatedTree = expanded.isInstanceOf[scala.reflect.api.Trees#TreeApi]
+        val expected = "expr or tree"
+        val actual = if (isUnaffiliatedExpr) "an expr" else if (isUnaffiliatedTree) "a tree" else "unexpected"
+        val isPathMismatch = expanded != null && (isUnaffiliatedExpr || isUnaffiliatedTree)
         macroExpansionError(expandee,
-          "macro must return a compiler-specific expr; returned value is " + (
+          s"macro must return a compiler-specific $expected; returned value is " + (
             if (expanded == null) "null"
-            else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
-            else " of " + expanded.getClass
+            else if (isPathMismatch) s"$actual, but it doesn't belong to this compiler's universe"
+            else "of " + expanded.getClass
         ))
-
-      def MacroImplementationNotFoundError(expandee: Tree) = {
-        val message =
-          "macro implementation not found: " + expandee.symbol.name + " " +
-          "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
-          (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
-           else "")
-        macroExpansionError(expandee, message)
       }
+
+      def MacroImplementationNotFoundError(expandee: Tree) =
+        macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name))
     }
+
+    /** This file will be the death of me. */
+    protected def macroImplementationNotFoundMessage(name: Name): String = (
+      s"""|macro implementation not found: $name
+          |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin
+    )
   }
 
   trait InferencerContextErrors {
@@ -829,14 +898,17 @@ trait ContextErrors {
           )
         }
 
-      def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+      def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
+        AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
+
+      def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = {
         def errMsg = {
           val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString
 
           underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
           location + explanation
         }
-        NormalTypeError(tree, errMsg, ErrorKinds.Access)
+        AccessTypeError(tree, errMsg)
       }
 
       def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
@@ -881,7 +953,7 @@ trait ContextErrors {
             "argument types " + argtpes.mkString("(", ",", ")") +
            (if (pt == WildcardType) "" else " and expected result type " + pt)
           val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
-          issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+          issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
           setErrorOnLastTry(lastTry, tree)
         } else setError(tree) // do not even try further attempts because they should all fail
                               // even if this is not the last attempt (because of the SO's possibility on the horizon)
@@ -889,13 +961,13 @@ trait ContextErrors {
       }
 
       def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
-        issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+        issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt)))
         setErrorOnLastTry(lastTry, tree)
       }
 
       def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
         val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
-        issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+        issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
         setErrorOnLastTry(lastTry, tree)
       }
 
@@ -909,7 +981,7 @@ trait ContextErrors {
           kindErrors.toList.mkString("\n", ", ", ""))
       }
 
-      private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+      private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
         if (explaintypes) {
           val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
           (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
@@ -925,7 +997,7 @@ trait ContextErrors {
       def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
                           tparams: List[Symbol], kindErrors: List[String]) =
         issueNormalTypeError(tree,
-          NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value))
+          NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes))
 
       //substExpr
       def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
@@ -1033,20 +1105,14 @@ trait ContextErrors {
         val s1 = if (prevSym.isModule) "case class companion " else ""
         val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
         val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
-        val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
-                      val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+        val where = if (currentSym.isTopLevel != prevSym.isTopLevel) {
+                      val inOrOut = if (prevSym.isTopLevel) "outside of" else "in"
                       " %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
                     } else ""
 
         issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where)
       }
 
-      def MaxParametersCaseClassError(tree: Tree) =
-        issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
-
-      def InheritsItselfError(tree: Tree) =
-        issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
-
       def MissingParameterOrValTypeError(vparam: Tree) =
         issueNormalTypeError(vparam, "missing parameter type")
 
@@ -1097,11 +1163,11 @@ trait ContextErrors {
 
 
       def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
-        issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+        issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag.toLong) + " modifier")
 
       def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
         issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
-            Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+            Flags.flagsToString(flag1.toLong), Flags.flagsToString(flag2.toLong), sym))
 
       def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
         val errorAddendum =
@@ -1143,7 +1209,7 @@ trait ContextErrors {
             // failures which have nothing to do with implicit conversions
             // per se, but which manifest as implicit conversion conflicts
             // involving Any, are further explained from foundReqMsg.
-            if (AnyRefClass.tpe <:< req) (
+            if (AnyRefTpe <:< req) (
               if (sym == AnyClass || sym == UnitClass) (
                  sm"""|Note: ${sym.name} is not implicitly converted to AnyRef.  You can safely
                       |pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."""
@@ -1159,11 +1225,11 @@ trait ContextErrors {
                sm"""|Note that implicit conversions are not applicable because they are ambiguous:
                     |${coreMsg}are possible conversion functions from $found to $req"""
           }
-          typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+          typeErrorMsg(found, req) + (
             if (explanation == "") "" else "\n" + explanation
           )
         }
-        context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+        context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
           if (isView) viewMsg
           else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
         )
@@ -1171,13 +1237,7 @@ trait ContextErrors {
     }
 
     def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
-      if (settings.Xdivergence211.value) {
-        issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
-      } else {
-        issueDivergentImplicitsError(tree,
-            "diverging implicit expansion for type "+pt+"\nstarting with "+
-            sym.fullLocationString)
-      }
+      issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
   }
 
   object NamesDefaultsErrorsGen {
@@ -1216,7 +1276,7 @@ trait ContextErrors {
 
     def DoubleParamNamesDefaultError(arg: Tree, name: Name, pos: Int, otherName: Option[Name])(implicit context: Context) = {
       val annex = otherName match {
-        case Some(oName) => "\nNote that that '"+ oName +"' is not a parameter name of the invoked method."
+        case Some(oName) => "\nNote that '"+ oName +"' is not a parameter name of the invoked method."
         case None => ""
       }
       issueNormalTypeError(arg, "parameter '"+ name +"' is already specified at parameter position "+ pos + annex)
@@ -1228,145 +1288,4 @@ trait ContextErrors {
       setError(arg)
     }
   }
-
-  // using an exception here is actually a good idea
-  // because the lifespan of this exception is extremely small and controlled
-  // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
-  case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
-
-  trait MacroErrors {
-    self: MacroTyper =>
-
-    private implicit val context0 = typer.context
-    val context = typer.context
-
-    // helpers
-
-    private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
-      val noun = if (flavor == "value") "parameter" else "type parameter"
-      val message = noun + " lists have different length, " + violation + " extra " + noun
-      val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
-      message + suffix
-    }
-
-    private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
-
-    private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
-      var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
-      if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
-      var retPart = restpe.toString
-      if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
-      argsPart + ": " + retPart
-    }
-
-    // not exactly an error generator, but very related
-    // and I dearly wanted to push it away from Macros.scala
-    private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
-      val ok = if (macroDebugVerbose || settings.explaintypes.value) {
-        if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true")
-        withTypesExplained(rtpe <:< atpe)
-      } else rtpe <:< atpe
-      if (!ok) {
-        compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
-      }
-    }
-
-    // errors
-
-    private def fail() = {
-      // need to set the IS_ERROR flag to prohibit spurious expansions
-      if (macroDef != null) macroDef setFlag IS_ERROR
-      // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
-      // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
-      macroDdef setType ErrorType
-      throw MacroBodyTypecheckException
-    }
-
-    private def genericError(tree: Tree, message: String) = {
-      issueNormalTypeError(tree, message)
-      fail()
-    }
-
-    private def implRefError(message: String) = {
-      val treeInfo.Applied(implRef, _, _) = macroDdef.rhs
-      genericError(implRef, message)
-    }
-
-    private def compatibilityError(message: String) =
-      implRefError(
-        "macro implementation has incompatible shape:"+
-        "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
-        "\n found   : " + showMeth(aparamss, aret, abbreviate = false) +
-        "\n" + message)
-
-    // Phase I: sanity checks
-
-    def MacroDefIsFastTrack() = {
-      macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
-      assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
-      throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
-    }
-
-    def MacroDefIsQmarkQmarkQmark() = {
-      macroLogVerbose("typecheck terminated unexpectedly: macro is ???")
-      throw MacroBodyTypecheckException
-    }
-
-    def MacroFeatureNotEnabled() = {
-      macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
-      fail()
-    }
-
-    // Phase II: typecheck the right-hand side of the macro def
-
-    // do nothing, just fail. relevant typecheck errors have already been reported
-    def MacroDefUntypeableBodyError() = fail()
-
-    def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
-
-    def MacroImplNotPublicError() = implRefError("macro implementation must be public")
-
-    def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
-
-    def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = {
-      val MacroImplReference(owner, meth, targs) = macroImplRef
-      val diagnostic = if (meth.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments"
-      implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef))
-    }
-
-    def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
-
-    // Phase III: check compatibility between the macro def and its macro impl
-    // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
-    // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
-
-    def MacroImplNonTagImplicitParameters(params: List[Symbol]) = implRefError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
-
-    def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
-
-    def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
-
-    def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
-
-    def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
-
-    def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
-
-    def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
-
-    def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
-      if (isRepeated(rparam) && !isRepeated(aparam))
-        compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
-      if (!isRepeated(rparam) && isRepeated(aparam))
-        compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
-    }
-
-    def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
-      compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
-
-    def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
-      compatibilityError(
-        "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
-        ex.getMessage)
-  }
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 3fe98ed..8e1ceff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -6,9 +6,9 @@
 package scala.tools.nsc
 package typechecker
 
-import symtab.Flags._
-import scala.collection.mutable.{LinkedHashSet, Set}
+import scala.collection.{ immutable, mutable }
 import scala.annotation.tailrec
+import scala.reflect.internal.util.shortClassOfInstance
 
 /**
  *  @author  Martin Odersky
@@ -16,32 +16,63 @@ import scala.annotation.tailrec
  */
 trait Contexts { self: Analyzer =>
   import global._
+  import definitions.{ JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage }
+  import ContextMode._
 
-  object NoContext extends Context {
-    outer      = this
+  protected def onTreeCheckerError(pos: Position, msg: String): Unit = ()
+
+  object NoContext
+    extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
+                    null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
     enclClass  = this
     enclMethod = this
 
+    override val depth = 0
     override def nextEnclosing(p: Context => Boolean): Context = this
     override def enclosingContextChain: List[Context] = Nil
     override def implicitss: List[List[ImplicitInfo]] = Nil
+    override def imports: List[ImportInfo] = Nil
+    override def firstImport: Option[ImportInfo] = None
     override def toString = "NoContext"
   }
   private object RootImports {
-    import definitions._
     // Possible lists of root imports
     val javaList         = JavaLangPackage :: Nil
     val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
     val completeList     = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
   }
 
+  def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) =
+    LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2")
+  def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) =
+    LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp")
+
   private lazy val startContext = {
     NoContext.make(
-    Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
+    Template(List(), noSelfType, List()) setSymbol global.NoSymbol setType global.NoType,
     rootMirror.RootClass,
     rootMirror.RootClass.info.decls)
   }
 
+  private lazy val allUsedSelectors =
+    mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set()
+  private lazy val allImportInfos =
+    mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil
+
+  def warnUnusedImports(unit: CompilationUnit) = {
+    for (imps <- allImportInfos.remove(unit)) {
+      for (imp <- imps.reverse.distinct) {
+        val used = allUsedSelectors(imp)
+        def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+
+        imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
+          unit.warning(imp posOf sel, "Unused import")
+        }
+      }
+      allUsedSelectors --= imps
+    }
+  }
+
   var lastAccessCheckDetails: String = ""
 
   /** List of symbols to import from in a root context.  Typically that
@@ -55,292 +86,413 @@ trait Contexts { self: Analyzer =>
   protected def rootImports(unit: CompilationUnit): List[Symbol] = {
     assert(definitions.isDefinitionsInitialized, "definitions uninitialized")
 
-    if (settings.noimports.value) Nil
+    if (settings.noimports) Nil
     else if (unit.isJava) RootImports.javaList
-    else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+    else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) {
+      // SI-8258 Needed for the presentation compiler using -sourcepath, otherwise cycles can occur. See the commit
+      //         message for this ticket for an example.
+      debuglog("Omitted import of Predef._ for " + unit)
+      RootImports.javaAndScalaList
+    }
     else RootImports.completeList
   }
 
-  def rootContext(unit: CompilationUnit): Context             = rootContext(unit, EmptyTree, false)
-  def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
-  def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
-    import definitions._
-    var sc = startContext
-    for (sym <- rootImports(unit)) {
-      sc = sc.makeNewImport(sym)
-      sc.depth += 1
-    }
-    val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
+
+  def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = {
+    val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym)))
+
+    // there must be a scala.xml package when xml literals were parsed in this unit
+    if (unit.hasXml && ScalaXmlPackage == NoSymbol)
+      unit.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see http://docs.scala-lang.org/overviews/core/scala-2.11.html#scala-xml.")
+
+    // scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
+    // We detect `scala-xml` by looking for `scala.xml.TopScope` and
+    // inject the equivalent of `import scala.xml.{TopScope => $scope}`
+    val contextWithXML =
+      if (!unit.hasXml || ScalaXmlTopScope == NoSymbol) rootImportsContext
+      else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope))
+
+    val c = contextWithXML.make(tree, unit = unit)
     if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
-    c.implicitsEnabled = !erasedTypes
-    c.enrichmentEnabled = c.implicitsEnabled
+    c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes
     c
   }
 
   def resetContexts() {
-    var sc = startContext
-    while (sc != NoContext) {
-      sc.tree match {
-        case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
-        case _ =>
+    startContext.enclosingContextChain foreach { context =>
+      context.tree match {
+        case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
+        case _               =>
       }
-      sc.flushAndReturnBuffer()
-      sc.flushAndReturnWarningsBuffer()
-      sc = sc.outer
+      context.reportBuffer.clearAll()
     }
   }
 
-  private object Errors {
-    final val ReportErrors     = 1 << 0
-    final val BufferErrors     = 1 << 1
-    final val AmbiguousErrors  = 1 << 2
-    final val notThrowMask     = ReportErrors | BufferErrors
-    final val AllMask          = ReportErrors | BufferErrors | AmbiguousErrors
-  }
+  /**
+   * A motley collection of the state and loosely associated behaviour of the type checker.
+   * Each `Typer` has an associated context, and as it descends into the tree new `(Typer, Context)`
+   * pairs are spawned.
+   *
+   * Meet the crew; first the state:
+   *
+   *   - A tree, symbol, and scope representing the focus of the typechecker
+   *   - An enclosing context, `outer`.
+   *   - The current compilation unit.
+   *   - A variety of bits that track the current error reporting policy (more on this later);
+   *     whether or not implicits/macros are enabled, whether we are in a self or super call or
+   *     in a constructor suffix. These are represented as bits in the mask `contextMode`.
+   *   - Some odds and ends: undetermined type pararameters of the current line of type inference;
+   *     contextual augmentation for error messages, tracking of the nesting depth.
+   *
+   * And behaviour:
+   *
+   *   - The central point for issuing errors and warnings from the typechecker, with a means
+   *     to buffer these for use in 'silent' type checking, when some recovery might be possible.
+   *  -  `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
+   *     is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
+   *     and to collect in-scope implicit defintions (`implicitss`)
+   *     Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
+   *  -  In a similar vein, we can assess accessiblity (`isAccessible`.)
+   *
+   * More on error buffering:
+   *     When are type errors recoverable? In quite a few places, it turns out. Some examples:
+   *     trying to type an application with/without the expected type, or with/without implicit views
+   *     enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
+   *
+   *     Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+   *     afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
+   *     fine grained control is needed based on the kind of error; ambiguity errors are often
+   *     suppressed during exploraratory typing, such as determining whether `a == b` in an argument
+   *     position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+   *     applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
+   *     a function type with/without implicit views.
+   *
+   *     When the error policies entails error/warning buffering, the mutable [[ReportBuffer]] records
+   *     everything that is issued. It is important to note, that child Contexts created with `make`
+   *     "inherit" the very same `ReportBuffer` instance, whereas children spawned through `makeSilent`
+   *     receive an separate, fresh buffer.
+   *
+   * @param tree  Tree associated with this context
+   * @param owner The current owner
+   * @param scope The current scope
+   * @param _outer The next outer context.
+   */
+  class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope,
+                                     val unit: CompilationUnit, _outer: Context) {
+    private def outerIsNoContext = _outer eq null
+    final def outer: Context = if (outerIsNoContext) NoContext else _outer
 
-  class Context private[typechecker] {
-    import Errors._
-
-    var unit: CompilationUnit = NoCompilationUnit
-    var tree: Tree = _                      // Tree associated with this context
-    var owner: Symbol = NoSymbol            // The current owner
-    var scope: Scope = _                    // The current scope
-    var outer: Context = _                  // The next outer context
-    var enclClass: Context = _              // The next outer context whose tree is a
-                                            // template or package definition
-    @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+    /** The next outer context whose tree is a template or package definition */
+    var enclClass: Context = _
+
+    @inline private def savingEnclClass[A](c: Context)(a: => A): A = {
       val saved = enclClass
       enclClass = c
       try a finally enclClass = saved
     }
 
-    var enclMethod: Context = _             // The next outer context whose tree is a method
-    var variance: Int = _                   // Variance relative to enclosing class
-    private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
-                                                    // not inherited to child contexts
-    var depth: Int = 0
-    var imports: List[ImportInfo] = List()   // currently visible imports
-    var openImplicits: List[OpenImplicit] = List() // types for which implicit arguments
-                                             // are currently searched
-    // for a named application block (Tree) the corresponding NamedApplyInfo
-    var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
-    var prefix: Type = NoPrefix
-    var inConstructorSuffix = false         // are we in a secondary constructor
-                                            // after the this constructor call?
-    var returnsSeen = false                 // for method context: were returns encountered?
-    var inSelfSuperCall = false             // is this context (enclosed in) a constructor call?
-    // (the call to the super or self constructor in the first line of a constructor)
-    // in this context the object's fields should not be in scope
+    /** A bitmask containing all the boolean flags in a context, e.g. are implicit views enabled */
+    var contextMode: ContextMode = ContextMode.DefaultMode
 
-    var diagnostic: List[String] = Nil      // these messages are printed when issuing an error
-    var implicitsEnabled = false
-    var macrosEnabled = true
-    var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
-    var checking = false
-    var retyping = false
+    /** Update all modes in `mask` to `value` */
+    def update(mask: ContextMode, value: Boolean) {
+      contextMode = contextMode.set(value, mask)
+    }
+
+    /** Set all modes in the mask `enable` to true, and all in `disable` to false. */
+    def set(enable: ContextMode = NOmode, disable: ContextMode = NOmode): this.type = {
+      contextMode = contextMode.set(true, enable).set(false, disable)
+      this
+    }
+
+    /** Is this context in all modes in the given `mask`? */
+    def apply(mask: ContextMode): Boolean = contextMode.inAll(mask)
+
+    /** The next outer context whose tree is a method */
+    var enclMethod: Context = _
+
+    /** Variance relative to enclosing class */
+    var variance: Variance = Variance.Invariant
+
+    private var _undetparams: List[Symbol] = List()
 
-    var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
-       // for type parameters which are narrowed in a GADT
+    protected def outerDepth = if (outerIsNoContext) 0 else outer.depth
 
-    var typingIndentLevel: Int = 0
-    def typingIndent = "  " * typingIndentLevel
+    val depth: Int = {
+      val increasesDepth = isRootImport || outerIsNoContext || (outer.scope != scope)
+      ( if (increasesDepth) 1 else 0 ) + outerDepth
+    }
+
+    /** The currently visible imports */
+    def imports: List[ImportInfo] = outer.imports
+    /** Equivalent to `imports.headOption`, but more efficient */
+    def firstImport: Option[ImportInfo] = outer.firstImport
+    def isRootImport: Boolean = false
 
-    var buffer: Set[AbsTypeError] = _
-    var warningsBuffer: Set[(Position, String)] = _
+    /** Types for which implicit arguments are currently searched */
+    var openImplicits: List[OpenImplicit] = List()
 
+    /* For a named application block (`Tree`) the corresponding `NamedApplyInfo`. */
+    var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
+    var prefix: Type = NoPrefix
+
+    def inSuperInit_=(value: Boolean)         = this(SuperInit) = value
+    def inSuperInit                           = this(SuperInit)
+    def inConstructorSuffix_=(value: Boolean) = this(ConstructorSuffix) = value
+    def inConstructorSuffix                   = this(ConstructorSuffix)
+    def inPatAlternative_=(value: Boolean)    = this(PatternAlternative) = value
+    def inPatAlternative                      = this(PatternAlternative)
+    def starPatterns_=(value: Boolean)        = this(StarPatterns) = value
+    def starPatterns                          = this(StarPatterns)
+    def returnsSeen_=(value: Boolean)         = this(ReturnsSeen) = value
+    def returnsSeen                           = this(ReturnsSeen)
+    def inSelfSuperCall_=(value: Boolean)     = this(SelfSuperCall) = value
+    def inSelfSuperCall                       = this(SelfSuperCall)
+    def implicitsEnabled_=(value: Boolean)    = this(ImplicitsEnabled) = value
+    def implicitsEnabled                      = this(ImplicitsEnabled)
+    def macrosEnabled_=(value: Boolean)       = this(MacrosEnabled) = value
+    def macrosEnabled                         = this(MacrosEnabled)
+    def enrichmentEnabled_=(value: Boolean)   = this(EnrichmentEnabled) = value
+    def enrichmentEnabled                     = this(EnrichmentEnabled)
+    def checking_=(value: Boolean)            = this(Checking) = value
+    def checking                              = this(Checking)
+    def retyping_=(value: Boolean)            = this(ReTyping) = value
+    def retyping                              = this(ReTyping)
+    def inSecondTry                           = this(SecondTry)
+    def inSecondTry_=(value: Boolean)         = this(SecondTry) = value
+    def inReturnExpr                          = this(ReturnExpr)
+    def inTypeConstructorAllowed              = this(TypeConstructorAllowed)
+
+    def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode
+
+    /** These messages are printed when issuing an error */
+    var diagnostic: List[String] = Nil
+
+    /** Saved type bounds for type parameters which are narrowed in a GADT. */
+    var savedTypeBounds: List[(Symbol, Type)] = List()
+
+    /** The next enclosing context (potentially `this`) that is owned by a class or method */
     def enclClassOrMethod: Context =
-      if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+      if (!owner.exists || owner.isClass || owner.isMethod) this
       else outer.enclClassOrMethod
 
+    /** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
+    def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef])
+
+    /** ...or an Apply. */
+    def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply])
+
+    def siteString = {
+      def what_s  = if (owner.isConstructor) "" else owner.kindString
+      def where_s = if (owner.isClass) "" else "in " + enclClass.owner.decodedName
+      List(what_s, owner.decodedName, where_s) filterNot (_ == "") mkString " "
+    }
+    //
+    // Tracking undetermined type parameters for type argument inference.
+    //
     def undetparamsString =
       if (undetparams.isEmpty) ""
       else undetparams.mkString("undetparams=", ", ", "")
-    def undetparams = _undetparams
+    /** Undetermined type parameters. See `Infer#{inferExprInstance, adjustTypeArgs}`. Not inherited to child contexts */
+    def undetparams: List[Symbol] = _undetparams
     def undetparams_=(ps: List[Symbol]) = { _undetparams = ps }
 
-    def extractUndetparams() = {
+    /** Return and clear the undetermined type parameters */
+    def extractUndetparams(): List[Symbol] = {
       val tparams = undetparams
       undetparams = List()
       tparams
     }
 
-    private[this] var mode = 0
-
-    def errBuffer = buffer
-    def hasErrors = buffer.nonEmpty
-    def hasWarnings = warningsBuffer.nonEmpty
-
-    def state: Int = mode
-    def restoreState(state0: Int) = mode = state0
-
-    def reportErrors    = (state & ReportErrors)     != 0
-    def bufferErrors    = (state & BufferErrors)     != 0
-    def ambiguousErrors = (state & AmbiguousErrors)  != 0
-    def throwErrors     = (state & notThrowMask)     == 0
-
-    def setReportErrors()    = mode = (ReportErrors | AmbiguousErrors)
-    def setBufferErrors()    = {
-      //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
-      mode = BufferErrors
+    /** Run `body` with this context with no undetermined type parameters, restore the original
+     *  the original list afterwards.
+     *  @param reportAmbiguous Should ambiguous errors be reported during evaluation of `body`?
+     */
+    def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = {
+      withMode() {
+        this(AmbiguousErrors) = reportAmbiguous
+        val saved = extractUndetparams()
+        try body
+        finally undetparams = saved
+      }
     }
-    def setThrowErrors()     = mode &= (~AllMask)
-    def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
 
-    def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
-    def condBufferFlush(removeP: AbsTypeError => Boolean) {
-      val elems = buffer.filter(removeP)
-      buffer --= elems
-    }
-    def flushBuffer() { buffer.clear() }
-    def flushAndReturnBuffer(): Set[AbsTypeError] = {
-      val current = buffer.clone()
-      buffer.clear()
+    //
+    // Error reporting policies and buffer.
+    //
+
+    private var _reportBuffer: ReportBuffer = new ReportBuffer
+    /** A buffer for errors and warnings, used with `this.bufferErrors == true` */
+    def reportBuffer = _reportBuffer
+    /** Discard the current report buffer, and replace with an empty one */
+    def useFreshReportBuffer() = _reportBuffer = new ReportBuffer
+    /** Discard the current report buffer, and replace with `other` */
+    def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other
+
+    /** The first error, if any, in the report buffer */
+    def firstError: Option[AbsTypeError] = reportBuffer.firstError
+    def errors: Seq[AbsTypeError] = reportBuffer.errors
+    /** Does the report buffer contain any errors? */
+    def hasErrors = reportBuffer.hasErrors
+
+    def reportErrors    = this(ReportErrors)
+    def bufferErrors    = this(BufferErrors)
+    def ambiguousErrors = this(AmbiguousErrors)
+    def throwErrors     = contextMode.inNone(ReportErrors | BufferErrors)
+
+    def setReportErrors(): Unit                   = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors)
+    def setBufferErrors(): Unit                   = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors)
+    def setThrowErrors(): Unit                    = this(ReportErrors | AmbiguousErrors | BufferErrors) = false
+    def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
+
+    /** Append the given errors to the report buffer */
+    def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors
+    /** Clear all errors from the report buffer */
+    def flushBuffer() { reportBuffer.clearAllErrors() }
+    /** Return and clear all errors from the report buffer */
+    def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = {
+      val current = reportBuffer.errors
+      reportBuffer.clearAllErrors()
       current
     }
-    def flushAndReturnWarningsBuffer(): Set[(Position, String)] = {
-      val current = warningsBuffer.clone()
-      warningsBuffer.clear()
-      current
+
+    /** Issue and clear all warnings from the report buffer */
+    def flushAndIssueWarnings() {
+      reportBuffer.warnings foreach {
+        case (pos, msg) => unit.warning(pos, msg)
+      }
+      reportBuffer.clearAllWarnings()
     }
 
-    def logError(err: AbsTypeError) = buffer += err
+    //
+    // Temporary mode adjustment
+    //
 
-    def withImplicitsEnabled[T](op: => T): T = {
-      val saved = implicitsEnabled
-      implicitsEnabled = true
+    @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
+      val saved = contextMode
+      set(enabled, disabled)
       try op
-      finally implicitsEnabled = saved
+      finally contextMode = saved
     }
 
-    def withImplicitsDisabled[T](op: => T): T = {
-      val saved = implicitsEnabled
-      implicitsEnabled = false
-      val savedP = enrichmentEnabled
-      enrichmentEnabled = false
-      try op
-      finally {
-        implicitsEnabled = saved
-        enrichmentEnabled = savedP
-      }
+    @inline final def withImplicitsEnabled[T](op: => T): T                 = withMode(enabled = ImplicitsEnabled)(op)
+    @inline final def withImplicitsDisabled[T](op: => T): T                = withMode(disabled = ImplicitsEnabled | EnrichmentEnabled)(op)
+    @inline final def withImplicitsDisabledAllowEnrichment[T](op: => T): T = withMode(enabled = EnrichmentEnabled, disabled = ImplicitsEnabled)(op)
+    @inline final def withMacrosEnabled[T](op: => T): T                    = withMode(enabled = MacrosEnabled)(op)
+    @inline final def withMacrosDisabled[T](op: => T): T                   = withMode(disabled = MacrosEnabled)(op)
+    @inline final def withinStarPatterns[T](op: => T): T                   = withMode(enabled = StarPatterns)(op)
+    @inline final def withinSuperInit[T](op: => T): T                      = withMode(enabled = SuperInit)(op)
+    @inline final def withinSecondTry[T](op: => T): T                      = withMode(enabled = SecondTry)(op)
+    @inline final def withinPatAlternative[T](op: => T): T                 = withMode(enabled = PatternAlternative)(op)
+
+    /** TypeConstructorAllowed is enabled when we are typing a higher-kinded type.
+     *  adapt should then check kind-arity based on the prototypical type's kind
+     *  arity. Type arguments should not be inferred.
+     */
+    @inline final def withinTypeConstructorAllowed[T](op: => T): T = withMode(enabled = TypeConstructorAllowed)(op)
+
+    /* TODO - consolidate returnsSeen (which seems only to be used by checkDead)
+     * and ReturnExpr.
+     */
+    @inline final def withinReturnExpr[T](op: => T): T = {
+      enclMethod.returnsSeen = true
+      withMode(enabled = ReturnExpr)(op)
     }
 
-    def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
-      val saved = implicitsEnabled
-      implicitsEnabled = false
-      val savedP = enrichmentEnabled
-      enrichmentEnabled = true
-      try op
-      finally {
-        implicitsEnabled = saved
-        enrichmentEnabled = savedP
+    // See comment on FormerNonStickyModes.
+    @inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op)
+
+    /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */
+    @inline final def inSilentMode(expr: => Boolean): Boolean = {
+      withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`.
+        setBufferErrors()
+        try expr && !hasErrors
+        finally reportBuffer.clearAll()
       }
     }
 
-    def withMacrosEnabled[T](op: => T): T = {
-      val saved = macrosEnabled
-      macrosEnabled = true
-      try op
-      finally macrosEnabled = saved
-    }
+    //
+    // Child Context Creation
+    //
 
-    def withMacrosDisabled[T](op: => T): T = {
-      val saved = macrosEnabled
-      macrosEnabled = false
-      try op
-      finally macrosEnabled = saved
-    }
-
-    def make(unit: CompilationUnit, tree: Tree, owner: Symbol,
-             scope: Scope, imports: List[ImportInfo]): Context = {
-      val c   = new Context
-      c.unit  = unit
-      c.tree  = tree
-      c.owner = owner
-      c.scope = scope
-      c.outer = this
-
-      tree match {
-        case Template(_, _, _) | PackageDef(_, _) =>
-          c.enclClass = c
-          c.prefix = c.owner.thisType
-          c.inConstructorSuffix = false
-        case _ =>
-          c.enclClass = this.enclClass
-          c.prefix =
-            if (c.owner != this.owner && c.owner.isTerm) NoPrefix
-            else this.prefix
-          c.inConstructorSuffix = this.inConstructorSuffix
+    /**
+     * Construct a child context. The parent and child will share the report buffer.
+     * Compare with `makeSilent`, in which the child has a fresh report buffer.
+     *
+     * If `tree` is an `Import`, that import will be avaiable at the head of
+     * `Context#imports`.
+     */
+    def make(tree: Tree = tree, owner: Symbol = owner,
+             scope: Scope = scope, unit: CompilationUnit = unit): Context = {
+      val isTemplateOrPackage = tree match {
+        case _: Template | _: PackageDef => true
+        case _                           => false
+      }
+      val isDefDef = tree match {
+        case _: DefDef => true
+        case _         => false
       }
-      tree match {
-        case DefDef(_, _, _, _, _, _) =>
-          c.enclMethod = c
-        case _ =>
-          c.enclMethod = this.enclMethod
+      val isImport = tree match {
+        // The guard is for SI-8403. It prevents adding imports again in the context created by
+        // `Namer#createInnerNamer`
+        case _: Import if tree != this.tree => true
+        case _                              => false
       }
-      c.variance = this.variance
-      c.depth = if (scope == this.scope) this.depth else this.depth + 1
-      c.imports = imports
-      c.inSelfSuperCall = inSelfSuperCall
-      c.restoreState(this.state)
-      c.diagnostic = this.diagnostic
-      c.typingIndentLevel = typingIndentLevel
-      c.implicitsEnabled = this.implicitsEnabled
-      c.macrosEnabled = this.macrosEnabled
-      c.enrichmentEnabled = this.enrichmentEnabled
-      c.checking = this.checking
-      c.retyping = this.retyping
-      c.openImplicits = this.openImplicits
-      c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
-      c.warningsBuffer = if (this.warningsBuffer == null) LinkedHashSet[(Position, String)]() else this.warningsBuffer
+      val sameOwner = owner == this.owner
+      val prefixInChild =
+        if (isTemplateOrPackage) owner.thisType
+        else if (!sameOwner && owner.isTerm) NoPrefix
+        else prefix
+
+      // The blank canvas
+      val c = if (isImport)
+        new Context(tree, owner, scope, unit, this) with ImportContext
+      else
+        new Context(tree, owner, scope, unit, this)
+
+      // Fields that are directly propagated
+      c.variance           = variance
+      c.diagnostic         = diagnostic
+      c.openImplicits      = openImplicits
+      c.contextMode        = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below.
+      c._reportBuffer      = reportBuffer
+
+      // Fields that may take on a different value in the child
+      c.prefix             = prefixInChild
+      c.enclClass          = if (isTemplateOrPackage) c else enclClass
+      c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
+
+      // SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
+      c.enclMethod         = if (isDefDef && !owner.isLazy) c else enclMethod
+
       registerContext(c.asInstanceOf[analyzer.Context])
       debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
       c
     }
 
-    // TODO: remove? Doesn't seem to be used
-    def make(unit: CompilationUnit): Context = {
-      val c = make(unit, EmptyTree, owner, scope, imports)
-      c.setReportErrors()
-      c.implicitsEnabled = true
-      c.macrosEnabled = true
-      c
-    }
-
-    def makeNewImport(sym: Symbol): Context =
-      makeNewImport(gen.mkWildcardImport(sym))
-
-    def makeNewImport(imp: Import): Context =
-      make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
-
     def make(tree: Tree, owner: Symbol, scope: Scope): Context =
+      // TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail.
+      //              even if it is extened to check that `unit == this.unit`. Why is this?
       if (tree == this.tree && owner == this.owner && scope == this.scope) this
-      else make0(tree, owner, scope)
-
-    private def make0(tree: Tree, owner: Symbol, scope: Scope): Context =
-      make(unit, tree, owner, scope, imports)
+      else make(tree, owner, scope, unit)
 
+    /** Make a child context that represents a new nested scope */
     def makeNewScope(tree: Tree, owner: Symbol): Context =
       make(tree, owner, newNestedScope(scope))
-    // IDE stuff: distinguish between scopes created for typing and scopes created for naming.
-
-    def make(tree: Tree, owner: Symbol): Context =
-      make0(tree, owner, scope)
 
-    def make(tree: Tree): Context =
-      make(tree, owner)
-
-    def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
+    /** Make a child context that buffers errors and warnings into a fresh report buffer. */
+    def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = {
       val c = make(newtree)
       c.setBufferErrors()
       c.setAmbiguousErrors(reportAmbiguousErrors)
-      c.buffer = new LinkedHashSet[AbsTypeError]()
+      c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`.
       c
     }
 
+    /** Make a silent child context does not allow implicits. Used to prevent chaining of implicit views. */
     def makeImplicit(reportAmbiguousErrors: Boolean) = {
       val c = makeSilent(reportAmbiguousErrors)
-      c.implicitsEnabled = false
-      c.enrichmentEnabled = false
+      c(ImplicitsEnabled | EnrichmentEnabled) = false
       c
     }
 
@@ -355,12 +507,10 @@ trait Contexts { self: Analyzer =>
      * accessible.
      */
     def makeConstructorContext = {
-      var baseContext = enclClass.outer
-      while (baseContext.tree.isInstanceOf[Template])
-        baseContext = baseContext.outer
+      val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template])
       val argContext = baseContext.makeNewScope(tree, owner)
+      argContext.contextMode = contextMode
       argContext.inSelfSuperCall = true
-      argContext.restoreState(this.state)
       def enterElems(c: Context) {
         def enterLocalElems(e: ScopeEntry) {
           if (e != null && e.owner == c.scope) {
@@ -379,6 +529,10 @@ trait Contexts { self: Analyzer =>
       argContext
     }
 
+    //
+    // Error and warning issuance
+    //
+
     private def addDiagString(msg: String) = {
       val ds =
         if (diagnostic.isEmpty) ""
@@ -386,23 +540,29 @@ trait Contexts { self: Analyzer =>
       if (msg endsWith ds) msg else msg + ds
     }
 
-    private def unitError(pos: Position, msg: String) =
-      unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+    private def unitError(pos: Position, msg: String): Unit =
+      if (checking) onTreeCheckerError(pos, msg) else unit.error(pos, msg)
 
     @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
-      debugwarn("issue error: " + err.errMsg)
-      if (settings.Yissuedebug.value) (new Exception).printStackTrace()
+      // TODO: are errors allowed to have pos == NoPosition??
+      // if not, Jason suggests doing: val pos = err.errPos.orElse( { devWarning("Que?"); context.tree.pos })
+      if (settings.Yissuedebug) {
+        log("issue error: " + err.errMsg)
+        (new Exception).printStackTrace()
+      }
       if (pf isDefinedAt err) pf(err)
-      else if (bufferErrors) { buffer += err }
+      else if (bufferErrors) { reportBuffer += err }
       else throw new TypeError(err.errPos, err.errMsg)
     }
 
+    /** Issue/buffer/throw the given type error according to the current mode for error reporting. */
     def issue(err: AbsTypeError) {
       issueCommon(err) { case _ if reportErrors =>
         unitError(err.errPos, addDiagString(err.errMsg))
       }
     }
 
+    /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
     def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
       issueCommon(err) { case _ if ambiguousErrors =>
         if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
@@ -410,45 +570,29 @@ trait Contexts { self: Analyzer =>
       }
     }
 
+    /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
     def issueAmbiguousError(err: AbsTypeError) {
       issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
     }
 
-    // TODO remove
+    /** Issue/throw the given `err` according to the current mode for error reporting. */
     def error(pos: Position, err: Throwable) =
       if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
       else throw err
 
+    /** Issue/throw the given error message according to the current mode for error reporting. */
     def error(pos: Position, msg: String) = {
       val msg1 = addDiagString(msg)
       if (reportErrors) unitError(pos, msg1)
       else throw new TypeError(pos, msg1)
     }
 
-    def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
-    def warning(pos: Position, msg: String, force: Boolean) {
+    /** Issue/throw the given error message according to the current mode for error reporting. */
+    def warning(pos: Position, msg: String, force: Boolean = false) {
       if (reportErrors || force) unit.warning(pos, msg)
-      else if (bufferErrors) warningsBuffer += ((pos, msg))
+      else if (bufferErrors) reportBuffer += (pos -> msg)
     }
 
-    def isLocal(): Boolean = tree match {
-      case Block(_,_)       => true
-      case PackageDef(_, _) => false
-      case EmptyTree        => false
-      case _                => outer.isLocal()
-    }
-
-    /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
-     *  existence of __match for MatchTranslation in virtpatmat.) This logic probably
-     *  needs improvement.
-     */
-    def isNameInScope(name: Name) = (
-      enclosingContextChain exists (ctx =>
-           (ctx.scope.lookupEntry(name) != null)
-        || (ctx.owner.rawInfo.member(name) != NoSymbol)
-      )
-    )
-
     // nextOuter determines which context is searched next for implicits
     // (after `this`, which contributes `newImplicits` below.) In
     // most cases, it is simply the outer context: if we're owned by
@@ -473,26 +617,35 @@ trait Contexts { self: Analyzer =>
 
     def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
 
-    override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
-      owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
-    )
-    /** Is `sub` a subclass of `base` or a companion object of such a subclass?
-     */
-    def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
-      sub.isNonBottomSubClass(base) ||
-      sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
-
-    /** Return closest enclosing context that defines a superclass of `clazz`, or a
-     *  companion module of a superclass of `clazz`, or NoContext if none exists */
-    def enclosingSuperClassContext(clazz: Symbol): Context = {
-      var c = this.enclClass
-      while (c != NoContext &&
-             !clazz.isNonBottomSubClass(c.owner) &&
-             !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
-        c = c.outer.enclClass
-      c
+    private def treeTruncated       = tree.toString.replaceAll("\\s+", " ").lines.mkString("\\n").take(70)
+    private def treeIdString        = if (settings.uniqid.value) "#" + System.identityHashCode(tree).toString.takeRight(3) else ""
+    private def treeString          = tree match {
+      case x: Import => "" + x
+      case Template(parents, `noSelfType`, body) =>
+        val pstr = if ((parents eq null) || parents.isEmpty) "Nil" else parents mkString " "
+        val bstr = if (body eq null) "" else body.length + " stats"
+        s"""Template($pstr, _, $bstr)"""
+      case x => s"${tree.shortClass}${treeIdString}:${treeTruncated}"
     }
 
+    override def toString =
+      sm"""|Context($unit) {
+           |   owner       = $owner
+           |   tree        = $treeString
+           |   scope       = ${scope.size} decls
+           |   contextMode = $contextMode
+           |   outer.owner = ${outer.owner}
+           |}"""
+
+    //
+    // Accessibility checking
+    //
+
+    /** Is `sub` a subclass of `base` or a companion object of such a subclass? */
+    private def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+      sub.isNonBottomSubClass(base) ||
+    sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+
     /** Return the closest enclosing context that defines a subclass of `clazz`
      *  or a companion object thereof, or `NoContext` if no such context exists.
      */
@@ -503,22 +656,25 @@ trait Contexts { self: Analyzer =>
       c
     }
 
-    /** Is `sym` accessible as a member of tree `site` with type
-     *  `pre` in current context?
-     */
+    def enclosingNonImportContext: Context = {
+      var c = this
+      while (c != NoContext && c.tree.isInstanceOf[Import])
+        c = c.outer
+      c
+    }
+
+    /** Is `sym` accessible as a member of `pre` in current context? */
     def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
       lastAccessCheckDetails = ""
       // Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
 
+      // don't have access if there is no linked class (so exclude linkedClass=NoSymbol)
       def accessWithinLinked(ab: Symbol) = {
-        val linked = ab.linkedClassOfClass
-        // don't have access if there is no linked class
-        // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
-        //  since `accessWithin(NoSymbol) == true` whatever the symbol)
-        (linked ne NoSymbol) && accessWithin(linked)
+        val linked = linkedClassOfClassOf(ab, this)
+        linked.fold(false)(accessWithin)
       }
 
-      /** Are we inside definition of `ab`? */
+      /* Are we inside definition of `ab`? */
       def accessWithin(ab: Symbol) = {
         // #3663: we must disregard package nesting if sym isJavaDefined
         if (sym.isJavaDefined) {
@@ -530,26 +686,12 @@ trait Contexts { self: Analyzer =>
         } else (owner hasTransOwner ab)
       }
 
-/*
-        var c = this
-        while (c != NoContext && c.owner != owner) {
-          if (c.outer eq null) abort("accessWithin(" + owner + ") " + c);//debug
-          if (c.outer.enclClass eq null) abort("accessWithin(" + owner + ") " + c);//debug
-          c = c.outer.enclClass
-        }
-        c != NoContext
-      }
-*/
-      /** Is `clazz` a subclass of an enclosing class? */
-      def isSubClassOfEnclosing(clazz: Symbol): Boolean =
-        enclosingSuperClassContext(clazz) != NoContext
-
       def isSubThisType(pre: Type, clazz: Symbol): Boolean = pre match {
         case ThisType(pclazz) => pclazz isNonBottomSubClass clazz
         case _ => false
       }
 
-      /** Is protected access to target symbol permitted */
+      /* Is protected access to target symbol permitted */
       def isProtectedAccessOK(target: Symbol) = {
         val c = enclosingSubClassContext(sym.owner)
         if (c == NoContext)
@@ -580,7 +722,7 @@ trait Contexts { self: Analyzer =>
 
         (  (ab.isTerm || ab == rootMirror.RootClass)
         || (accessWithin(ab) || accessWithinLinked(ab)) &&
-             (  !sym.hasLocalFlag
+             (  !sym.isLocalToThis
              || sym.owner.isImplClass // allow private local accesses to impl classes
              || sym.isProtected && isSubThisType(pre, sym.owner)
              || pre =:= sym.owner.thisType
@@ -589,8 +731,7 @@ trait Contexts { self: Analyzer =>
              (  superAccess
              || pre.isInstanceOf[ThisType]
              || phase.erasedTypes
-             || isProtectedAccessOK(sym)
-             || (sym.allOverriddenSymbols exists isProtectedAccessOK)
+             || (sym.overrideChain exists isProtectedAccessOK)
                 // that last condition makes protected access via self types work.
              )
         )
@@ -600,27 +741,51 @@ trait Contexts { self: Analyzer =>
       }
     }
 
+    //
+    // Type bound management
+    //
+
     def pushTypeBounds(sym: Symbol) {
+      sym.info match {
+        case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb")
+        case info           => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}")
+      }
       savedTypeBounds ::= ((sym, sym.info))
     }
 
     def restoreTypeBounds(tp: Type): Type = {
-      var current = tp
-      for ((sym, info) <- savedTypeBounds) {
-        debuglog("resetting " + sym + " to " + info);
-        sym.info match {
-          case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
-            current = current.instantiateTypeParams(List(sym), List(lo))
-//@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
-          case _ =>
-        }
-        sym.setInfo(info)
+      def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) =>
+        def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "<empty bounds>" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})"
+        //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
+        val TypeBounds(lo, hi) = sym.info.bounds
+        val isUnique           = lo <:< hi && hi <:< lo
+        val isPresent          = current contains sym
+        def saved_s            = bounds_s(savedInfo.bounds)
+        def current_s          = bounds_s(sym.info.bounds)
+
+        if (isUnique && isPresent)
+          devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")(
+            current.instantiateTypeParams(List(sym), List(hi))
+          )
+        else if (isPresent)
+          devWarningResult(s"Discarding inferred $current_s because it does not uniquely determine $sym in")(current)
+        else
+          logResult(s"Discarding inferred $current_s because $sym does not appear in")(current)
+      }
+      try restore()
+      finally {
+        for ((sym, savedInfo) <- savedTypeBounds)
+          sym setInfo debuglogResult(s"Discarding inferred $sym=${sym.info}, restoring saved info")(savedInfo)
+
+        savedTypeBounds = Nil
       }
-      savedTypeBounds = List()
-      current
     }
 
-    private var implicitsCache: List[List[ImplicitInfo]] = null
+    //
+    // Implicit collection
+    //
+
+    private var implicitsCache: List[ImplicitInfo] = null
     private var implicitsRunId = NoRunId
 
     def resetCache() {
@@ -662,7 +827,7 @@ trait Contexts { self: Analyzer =>
         case ImportSelector(from, _, to, _) :: sels1 =>
           var impls = collect(sels1) filter (info => info.name != from)
           if (to != nme.WILDCARD) {
-            for (sym <- imp.importedSymbol(to).alternatives)
+            for (sym <- importedAccessibleSymbol(imp, to).alternatives)
               if (isQualifyingImplicit(to, sym, pre, imported = true))
                 impls = new ImplicitInfo(to, pre, sym) :: impls
           }
@@ -679,33 +844,351 @@ trait Contexts { self: Analyzer =>
      * filtered out later by `eligibleInfos` (SI-4270 / 9129cfe9), as they don't type-check.
      */
     def implicitss: List[List[ImplicitInfo]] = {
-      if (implicitsRunId != currentRunId) {
-        implicitsRunId = currentRunId
-        implicitsCache = List()
-        val newImplicits: List[ImplicitInfo] =
-          if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
-            if (!owner.isInitialized) return nextOuter.implicitss
-            // debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
-            savingEnclClass(this) {
-              // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
-              //     it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
-              //     remedied nonetheless.
-              collectImplicits(owner.thisType.implicitMembers, owner.thisType)
-            }
-          } else if (scope != nextOuter.scope && !owner.isPackageClass) {
-            debuglog("collect local implicits " + scope.toList)//DEBUG
-            collectImplicits(scope, NoPrefix)
-          } else if (imports != nextOuter.imports) {
-            assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
-            collectImplicitImports(imports.head)
-          } else if (owner.isPackageClass) {
-            // the corresponding package object may contain implicit members.
-            collectImplicits(owner.tpe.implicitMembers, owner.tpe)
-          } else List()
-        implicitsCache = if (newImplicits.isEmpty) nextOuter.implicitss
-                         else newImplicits :: nextOuter.implicitss
+      val nextOuter = this.nextOuter
+      def withOuter(is: List[ImplicitInfo]): List[List[ImplicitInfo]] =
+        is match {
+          case Nil => nextOuter.implicitss
+          case _   => is :: nextOuter.implicitss
+        }
+
+      val CycleMarker = NoRunId - 1
+      if (implicitsRunId == CycleMarker) {
+        debuglog(s"cycle while collecting implicits at owner ${owner}, probably due to an implicit without an explicit return type. Continuing with implicits from enclosing contexts.")
+        withOuter(Nil)
+      } else if (implicitsRunId != currentRunId) {
+        implicitsRunId = CycleMarker
+        implicits(nextOuter) match {
+          case None =>
+            implicitsRunId = NoRunId
+            withOuter(Nil)
+          case Some(is) =>
+            implicitsRunId = currentRunId
+            implicitsCache = is
+            withOuter(is)
+        }
       }
-      implicitsCache
+      else withOuter(implicitsCache)
+    }
+
+    /** @return None if a cycle is detected, or Some(infos) containing the in-scope implicits at this context */
+    private def implicits(nextOuter: Context): Option[List[ImplicitInfo]] = {
+      val imports = this.imports
+      if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
+        if (!owner.isInitialized) None
+        else savingEnclClass(this) {
+          // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+          //     it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+          //     remedied nonetheless.
+          Some(collectImplicits(owner.thisType.implicitMembers, owner.thisType))
+        }
+      } else if (scope != nextOuter.scope && !owner.isPackageClass) {
+        debuglog("collect local implicits " + scope.toList)//DEBUG
+        Some(collectImplicits(scope, NoPrefix))
+      } else if (firstImport != nextOuter.firstImport) {
+        assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports))
+        Some(collectImplicitImports(imports.head))
+      } else if (owner.isPackageClass) {
+        // the corresponding package object may contain implicit members.
+        Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe))
+      } else Some(Nil)
+    }
+
+    //
+    // Imports and symbol lookup
+    //
+
+    /** It's possible that seemingly conflicting identifiers are
+     *  identifiably the same after type normalization.  In such cases,
+     *  allow compilation to proceed.  A typical example is:
+     *    package object foo { type InputStream = java.io.InputStream }
+     *    import foo._, java.io._
+     */
+    private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = {
+      val imp1Explicit = imp1 isExplicitImport name
+      val imp2Explicit = imp2 isExplicitImport name
+      val ambiguous    = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit
+      val imp1Symbol   = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false))
+      val imp2Symbol   = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false))
+
+      // The types of the qualifiers from which the ambiguous imports come.
+      // If the ambiguous name is a value, these must be the same.
+      def t1 = imp1.qual.tpe
+      def t2 = imp2.qual.tpe
+      // The types of the ambiguous symbols, seen as members of their qualifiers.
+      // If the ambiguous name is a monomorphic type, we can relax this far.
+      def mt1 = t1 memberType imp1Symbol
+      def mt2 = t2 memberType imp2Symbol
+
+      def characterize = List(
+        s"types:  $t1 =:= $t2  ${t1 =:= t2}  members: ${mt1 =:= mt2}",
+        s"member type 1: $mt1",
+        s"member type 2: $mt2"
+      ).mkString("\n  ")
+
+      if (!ambiguous || !imp2Symbol.exists) Some(imp1)
+      else if (!imp1Symbol.exists) Some(imp2)
+      else (
+        // The symbol names are checked rather than the symbols themselves because
+        // each time an overloaded member is looked up it receives a new symbol.
+        // So foo.member("x") != foo.member("x") if x is overloaded.  This seems
+        // likely to be the cause of other bugs too...
+        if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) {
+          log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol")
+          Some(imp1)
+        }
+        // Monomorphism restriction on types is in part because type aliases could have the
+        // same target type but attach different variance to the parameters. Maybe it can be
+        // relaxed, but doesn't seem worth it at present.
+        else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) {
+          log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent")
+          Some(imp1)
+        }
+        else {
+          log(s"Import is genuinely ambiguous:\n  " + characterize)
+          None
+        }
+      )
+    }
+
+    /** The symbol with name `name` imported via the import in `imp`,
+     *  if any such symbol is accessible from this context.
+     */
+    def importedAccessibleSymbol(imp: ImportInfo, name: Name): Symbol =
+      importedAccessibleSymbol(imp, name, requireExplicit = false)
+
+    private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
+      imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
+
+    /** Is `sym` defined in package object of package `pkg`?
+     *  Since sym may be defined in some parent of the package object,
+     *  we cannot inspect its owner only; we have to go through the
+     *  info of the package object.  However to avoid cycles we'll check
+     *  what other ways we can before pushing that way.
+     */
+    def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
+      def uninitialized(what: String) = {
+        log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
+        false
+      }
+      def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
+      def matchesInfo = (
+        // need to be careful here to not get a cyclic reference during bootstrap
+        if (pkg.isInitialized) {
+          val module = pkg.info member nme.PACKAGEkw
+          if (module.isInitialized)
+            module.info.member(sym.name).alternatives contains sym
+          else
+            uninitialized("" + module)
+        }
+        else uninitialized("" + pkg)
+      )
+      def inPackageObject(sym: Symbol) = (
+        // To be in the package object, one of these must be true:
+        //   1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
+        //   2) sym.owner is inherited by the correct package object class
+        // We try to establish 1) by inspecting the owners directly, and then we try
+        // to rule out 2), and only if both those fail do we resort to looking in the info.
+        !sym.hasPackageFlag && sym.owner.exists && (
+          if (sym.owner.isPackageObjectClass)
+            sym.owner.owner == pkgClass
+          else
+            !sym.owner.isPackageClass && matchesInfo
+        )
+      )
+
+      // An overloaded symbol might not have the expected owner!
+      // The alternatives must be inspected directly.
+      pkgClass.isPackageClass && (
+        if (sym.isOverloaded)
+          sym.alternatives forall (isInPackageObject(_, pkg))
+        else
+          inPackageObject(sym)
+      )
+    }
+
+    def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
+
+    /** Find the symbol of a simple name starting from this context.
+     *  All names are filtered through the "qualifies" predicate,
+     *  the search continuing as long as no qualifying name is found.
+     */
+    def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = {
+      var lookupError: NameLookup  = null       // set to non-null if a definite error is encountered
+      var inaccessible: NameLookup = null       // records inaccessible symbol for error reporting in case none is found
+      var defSym: Symbol           = NoSymbol   // the directly found symbol
+      var pre: Type                = NoPrefix   // the prefix type of defSym, if a class member
+      var cx: Context              = this       // the context under consideration
+      var symbolDepth: Int         = -1         // the depth of the directly found symbol
+
+      def finish(qual: Tree, sym: Symbol): NameLookup = (
+        if (lookupError ne null) lookupError
+        else sym match {
+          case NoSymbol if inaccessible ne null => inaccessible
+          case NoSymbol                         => LookupNotFound
+          case _                                => LookupSucceeded(qual, sym)
+        }
+      )
+      def finishDefSym(sym: Symbol, pre0: Type): NameLookup =
+        if (requiresQualifier(sym))
+          finish(gen.mkAttributedQualifier(pre0), sym)
+        else
+          finish(EmptyTree, sym)
+
+      def isPackageOwnedInDifferentUnit(s: Symbol) = (
+        s.isDefinedInPackage && (
+             !currentRun.compiles(s)
+          || unit.exists && s.sourceFile != unit.source.file
+        )
+      )
+      def requiresQualifier(s: Symbol) = (
+           s.owner.isClass
+        && !s.owner.isPackageClass
+        && !s.isTypeParameterOrSkolem
+      )
+      def lookupInPrefix(name: Name)    = pre member name filter qualifies
+      def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false)
+
+      def searchPrefix = {
+        cx = cx.enclClass
+        val found0 = lookupInPrefix(name)
+        val found1 = found0 filter accessibleInPrefix
+        if (found0.exists && !found1.exists && inaccessible == null)
+          inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails)
+
+        found1
+      }
+
+      def lookupInScope(scope: Scope) =
+        (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
+
+      def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
+        logResult(s"overloaded symbol in $pre")(owner.newOverloaded(pre, entries map (_.sym)))
+
+      // Constructor lookup should only look in the decls of the enclosing class
+      // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
+      if (name == nme.CONSTRUCTOR) return {
+        val enclClassSym = cx.enclClass.owner
+        val scope = cx.enclClass.prefix.baseType(enclClassSym).decls
+        val constructorSym = lookupInScope(scope) match {
+          case Nil       => NoSymbol
+          case hd :: Nil => hd.sym
+          case entries   => newOverloaded(enclClassSym, cx.enclClass.prefix, entries)
+        }
+        finishDefSym(constructorSym, cx.enclClass.prefix)
+      }
+
+      // cx.scope eq null arises during FixInvalidSyms in Duplicators
+      while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) {
+        pre    = cx.enclClass.prefix
+        defSym = lookupInScope(cx.scope) match {
+          case Nil                  => searchPrefix
+          case entries @ (hd :: tl) =>
+            // we have a winner: record the symbol depth
+            symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth
+            if (tl.isEmpty) hd.sym
+            else newOverloaded(cx.owner, pre, entries)
+        }
+        if (!defSym.exists)
+          cx = cx.outer // push further outward
+      }
+      if (symbolDepth < 0)
+        symbolDepth = cx.depth
+
+      var impSym: Symbol = NoSymbol
+      var imports        = Context.this.imports
+      def imp1           = imports.head
+      def imp2           = imports.tail.head
+      def sameDepth      = imp1.depth == imp2.depth
+      def imp1Explicit   = imp1 isExplicitImport name
+      def imp2Explicit   = imp2 isExplicitImport name
+
+      def lookupImport(imp: ImportInfo, requireExplicit: Boolean) =
+        importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies
+
+      // Java: A single-type-import declaration d in a compilation unit c of package p
+      // that imports a type named n shadows, throughout c, the declarations of:
+      //
+      //  1) any top level type named n declared in another compilation unit of p
+      //
+      // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+      //
+      // Scala: Bindings of different kinds have a precedence defined on them:
+      //
+      //  1) Definitions and declarations that are local, inherited, or made available by a
+      //     package clause in the same compilation unit where the definition occurs have
+      //     highest precedence.
+      //  2) Explicit imports have next highest precedence.
+      def depthOk(imp: ImportInfo) = (
+           imp.depth > symbolDepth
+        || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth)
+      )
+
+      while (!impSym.exists && imports.nonEmpty && depthOk(imports.head)) {
+        impSym = lookupImport(imp1, requireExplicit = false)
+        if (!impSym.exists)
+          imports = imports.tail
+      }
+
+      if (defSym.exists && impSym.exists) {
+        // imported symbols take precedence over package-owned symbols in different compilation units.
+        if (isPackageOwnedInDifferentUnit(defSym))
+          defSym = NoSymbol
+        // Defined symbols take precedence over erroneous imports.
+        else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
+          impSym = NoSymbol
+        // Otherwise they are irreconcilably ambiguous
+        else
+          return ambiguousDefnAndImport(defSym.alternatives.head.owner, imp1)
+      }
+
+      // At this point only one or the other of defSym and impSym might be set.
+      if (defSym.exists)
+        finishDefSym(defSym, pre)
+      else if (impSym.exists) {
+        // We continue walking down the imports as long as the tail is non-empty, which gives us:
+        //   imports  ==  imp1 :: imp2 :: _
+        // And at least one of the following is true:
+        //   - imp1 and imp2 are at the same depth
+        //   - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked
+        def keepLooking = (
+             lookupError == null
+          && imports.tail.nonEmpty
+          && (sameDepth || !imp1Explicit)
+        )
+        // If we find a competitor imp2 which imports the same name, possible outcomes are:
+        //
+        //  - same depth, imp1 wild, imp2 explicit:        imp2 wins, drop imp1
+        //  - same depth, imp1 wild, imp2 wild:            ambiguity check
+        //  - same depth, imp1 explicit, imp2 explicit:    ambiguity check
+        //  - differing depth, imp1 wild, imp2 explicit:   ambiguity check
+        //  - all others:                                  imp1 wins, drop imp2
+        //
+        // The ambiguity check is: if we can verify that both imports refer to the same
+        // symbol (e.g. import foo.X followed by import foo._) then we discard imp2
+        // and proceed. If we cannot, issue an ambiguity error.
+        while (keepLooking) {
+          // If not at the same depth, limit the lookup to explicit imports.
+          // This is desirable from a performance standpoint (compare to
+          // filtering after the fact) but also necessary to keep the unused
+          // import check from being misled by symbol lookups which are not
+          // actually used.
+          val other = lookupImport(imp2, requireExplicit = !sameDepth)
+          def imp1wins() = { imports = imp1 :: imports.tail.tail }
+          def imp2wins() = { impSym = other ; imports = imports.tail }
+
+          if (!other.exists) // imp1 wins; drop imp2 and continue.
+            imp1wins()
+          else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue.
+            imp2wins()
+          else resolveAmbiguousImport(name, imp1, imp2) match {
+            case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins()
+            case _         => lookupError = ambiguousImports(imp1, imp2)
+          }
+        }
+        // optimization: don't write out package prefixes
+        finish(resetPos(imp1.qual.duplicate), impSym)
+      }
+      else finish(EmptyTree, NoSymbol)
     }
 
     /**
@@ -731,12 +1214,84 @@ trait Contexts { self: Analyzer =>
     }
   } //class Context
 
+  /** A `Context` focussed on an `Import` tree */
+  trait ImportContext extends Context {
+    private val impInfo: ImportInfo = {
+      val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth)
+      if (settings.warnUnusedImport && !isRootImport) // excludes java.lang/scala/Predef imports
+        allImportInfos(unit) ::= info
+      info
+    }
+    override final def imports      = impInfo :: super.imports
+    override final def firstImport  = Some(impInfo)
+    override final def isRootImport = !tree.pos.isDefined
+    override final def toString     = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+  }
+
+  /** A buffer for warnings and errors that are accumulated during speculative type checking. */
+  final class ReportBuffer {
+    type Error = AbsTypeError
+    type Warning = (Position, String)
+
+    private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+
+    // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
+    // is replicated here out of conservatism.
+    private var _errorBuffer: mutable.LinkedHashSet[Error] = _
+    private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer}
+    def errors: immutable.Seq[Error] = errorBuffer.toVector
+
+    private var _warningBuffer: mutable.LinkedHashSet[Warning] = _
+    private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer}
+    def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+
+    def +=(error: AbsTypeError): this.type = {
+      errorBuffer += error
+      this
+    }
+    def ++=(errors: Traversable[AbsTypeError]): this.type = {
+      errorBuffer ++= errors
+      this
+    }
+    def +=(warning: Warning): this.type = {
+      warningBuffer += warning
+      this
+    }
+
+    def clearAll(): this.type = {
+      clearAllErrors(); clearAllWarnings();
+    }
+
+    def clearAllErrors(): this.type = {
+      errorBuffer.clear()
+      this
+    }
+    def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+      errorBuffer.retain(!PartialFunction.cond(_)(removeF))
+      this
+    }
+    def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+      errorBuffer.retain(PartialFunction.cond(_)(leaveF))
+      this
+    }
+    def clearAllWarnings(): this.type = {
+      warningBuffer.clear()
+      this
+    }
+
+    def hasErrors     = errorBuffer.nonEmpty
+    def firstError    = errorBuffer.headOption
+  }
+
   class ImportInfo(val tree: Import, val depth: Int) {
+    def pos = tree.pos
+    def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
+
     /** The prefix expression */
     def qual: Tree = tree.symbol.info match {
       case ImportType(expr) => expr
-      case ErrorType => tree setType NoType // fix for #2870
-      case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
+      case ErrorType        => tree setType NoType // fix for #2870
+      case _                => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
     }
 
     /** Is name imported explicitly, not via wildcard? */
@@ -745,25 +1300,53 @@ trait Contexts { self: Analyzer =>
 
     /** The symbol with name `name` imported from import clause `tree`.
      */
-    def importedSymbol(name: Name): Symbol = {
+    def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false)
+
+    private def recordUsage(sel: ImportSelector, result: Symbol) {
+      def posstr = pos.source.file.name + ":" + posOf(sel).line
+      def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})"
+      debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr")
+      allUsedSelectors(this) += sel
+    }
+
+    /** If requireExplicit is true, wildcard imports are not considered. */
+    def importedSymbol(name: Name, requireExplicit: Boolean): Symbol = {
       var result: Symbol = NoSymbol
       var renamed = false
       var selectors = tree.selectors
-      while (selectors != Nil && result == NoSymbol) {
-        if (selectors.head.rename == name.toTermName)
+      def current = selectors.head
+      while ((selectors ne Nil) && result == NoSymbol) {
+        if (current.rename == name.toTermName)
           result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
-            if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name)
-        else if (selectors.head.name == name.toTermName)
+            if (name.isTypeName) current.name.toTypeName else current.name)
+        else if (current.name == name.toTermName)
           renamed = true
-        else if (selectors.head.name == nme.WILDCARD && !renamed)
+        else if (current.name == nme.WILDCARD && !renamed && !requireExplicit)
           result = qual.tpe.nonLocalMember(name)
-        selectors = selectors.tail
+
+        if (result == NoSymbol)
+          selectors = selectors.tail
       }
-      result
+      if (settings.warnUnusedImport && selectors.nonEmpty && result != NoSymbol && pos != NoPosition)
+        recordUsage(current, result)
+
+      // Harden against the fallout from bugs like SI-6745
+      //
+      // [JZ] I considered issuing a devWarning and moving the
+      //      check inside the above loop, as I believe that
+      //      this always represents a mistake on the part of
+      //      the caller.
+      if (definitions isImportable result) result
+      else NoSymbol
+    }
+    private def selectorString(s: ImportSelector): String = {
+      if (s.name == nme.WILDCARD && s.rename == null) "_"
+      else if (s.name == s.rename) "" + s.name
+      else s.name + " => " + s.rename
     }
 
     def allImportedSymbols: Iterable[Symbol] =
-      qual.tpe.members flatMap (transformImport(tree.selectors, _))
+      importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _))
 
     private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match {
       case List() => List()
@@ -774,10 +1357,123 @@ trait Contexts { self: Analyzer =>
       case _ :: rest => transformImport(rest, sym)
     }
 
-    override def toString() = tree.toString()
+    override def hashCode = tree.##
+    override def equals(other: Any) = other match {
+      case that: ImportInfo => (tree == that.tree)
+      case _                => false
+    }
+    override def toString = tree.toString
   }
 
-  case class ImportType(expr: Tree) extends Type {
-    override def safeToString = "ImportType("+expr+")"
-  }
+  type ImportType = global.ImportType
+  val ImportType = global.ImportType
+}
+
+object ContextMode {
+  import scala.language.implicitConversions
+  private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
+  def apply(bits: Int): ContextMode = new ContextMode(bits)
+  final val NOmode: ContextMode                   = 0
+
+  final val ReportErrors: ContextMode             = 1 << 0
+  final val BufferErrors: ContextMode             = 1 << 1
+  final val AmbiguousErrors: ContextMode          = 1 << 2
+
+  /** Are we in a secondary constructor after the this constructor call? */
+  final val ConstructorSuffix: ContextMode        = 1 << 3
+
+  /** For method context: were returns encountered? */
+  final val ReturnsSeen: ContextMode              = 1 << 4
+
+  /** Is this context (enclosed in) a constructor call?
+    * (the call to the super or self constructor in the first line of a constructor.)
+    * In such a context, the object's fields should not be in scope
+    */
+  final val SelfSuperCall: ContextMode            = 1 << 5
+
+  // TODO harvest documentation for this
+  final val ImplicitsEnabled: ContextMode         = 1 << 6
+
+  final val MacrosEnabled: ContextMode            = 1 << 7
+
+  /** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */
+  final val EnrichmentEnabled: ContextMode        = 1 << 8
+
+  /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
+  final val Checking: ContextMode                 = 1 << 9
+
+  /** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply`
+   *  TODO - iron out distinction/overlap with SecondTry.
+   */
+  final val ReTyping: ContextMode                 = 1 << 10
+
+  /** Are we typechecking pattern alternatives. Formerly ALTmode. */
+  final val PatternAlternative: ContextMode       = 1 << 11
+
+  /** Are star patterns allowed. Formerly STARmode. */
+  final val StarPatterns: ContextMode             = 1 << 12
+
+  /** Are we typing the "super" in a superclass constructor call super.<init>. Formerly SUPERCONSTRmode. */
+  final val SuperInit: ContextMode                = 1 << 13
+
+  /*  Is this the second attempt to type this tree? In that case functions
+   *  may no longer be coerced with implicit views. Formerly SNDTRYmode.
+   */
+  final val SecondTry: ContextMode                = 1 << 14
+
+  /** Are we in return position? Formerly RETmode. */
+  final val ReturnExpr: ContextMode               = 1 << 15
+
+  /** Are unapplied type constructors allowed here? Formerly HKmode. */
+  final val TypeConstructorAllowed: ContextMode   = 1 << 16
+
+  /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode.
+   *  To mimick the sticky mode behavior, when captain stickyfingers
+   *  comes around we need to propagate those modes but forget the other
+   *  context modes which were once mode bits; those being so far the
+   *  ones listed here.
+   */
+  final val FormerNonStickyModes: ContextMode = (
+    PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed
+  )
+
+  final val DefaultMode: ContextMode      = MacrosEnabled
+
+  private val contextModeNameMap = Map(
+    ReportErrors           -> "ReportErrors",
+    BufferErrors           -> "BufferErrors",
+    AmbiguousErrors        -> "AmbiguousErrors",
+    ConstructorSuffix      -> "ConstructorSuffix",
+    SelfSuperCall          -> "SelfSuperCall",
+    ImplicitsEnabled       -> "ImplicitsEnabled",
+    MacrosEnabled          -> "MacrosEnabled",
+    Checking               -> "Checking",
+    ReTyping               -> "ReTyping",
+    PatternAlternative     -> "PatternAlternative",
+    StarPatterns           -> "StarPatterns",
+    SuperInit              -> "SuperInit",
+    SecondTry              -> "SecondTry",
+    TypeConstructorAllowed -> "TypeConstructorAllowed"
+  )
+}
+
+/**
+ * A value class to carry the boolean flags of a context, such as whether errors should
+ * be buffered or reported.
+ */
+final class ContextMode private (val bits: Int) extends AnyVal {
+  import ContextMode._
+
+  def &(other: ContextMode): ContextMode  = new ContextMode(bits & other.bits)
+  def |(other: ContextMode): ContextMode  = new ContextMode(bits | other.bits)
+  def &~(other: ContextMode): ContextMode = new ContextMode(bits & ~(other.bits))
+  def set(value: Boolean, mask: ContextMode) = if (value) |(mask) else &~(mask)
+
+  def inAll(required: ContextMode)        = (this & required) == required
+  def inAny(required: ContextMode)        = (this & required) != NOmode
+  def inNone(prohibited: ContextMode)     = (this & prohibited) == NOmode
+
+  override def toString =
+    if (bits == 0) "NOmode"
+    else (contextModeNameMap filterKeys inAll).values.toList.sorted mkString " "
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 3e249e5..1f1ccbe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,8 +6,6 @@
 package scala.tools.nsc
 package typechecker
 
-import scala.language.implicitConversions
-
 /** A generic means of breaking down types into their subcomponents.
  *  Types are decomposed top down, and recognizable substructure is
  *  dispatched via self-apparently named methods.  Those methods can
@@ -37,8 +35,6 @@ trait DestructureTypes {
     def wrapSequence(nodes: List[Node]): Node
     def wrapAtom[U](value: U): Node
 
-    private implicit def liftToTerm(name: String): TermName = newTermName(name)
-
     private val openSymbols = scala.collection.mutable.Set[Symbol]()
 
     private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
@@ -68,15 +64,6 @@ trait DestructureTypes {
       },
       tree.productPrefix
     )
-    def wrapSymbol(label: String, sym: Symbol): Node = {
-      if (sym eq NoSymbol) wrapEmpty
-      else atom(label, sym)
-    }
-    def wrapInfo(sym: Symbol) = sym.info match {
-      case TypeBounds(lo, hi)        => typeBounds(lo, hi)
-      case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
-      case _                         => wrapEmpty
-    }
     def wrapSymbolInfo(sym: Symbol): Node = {
       if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
       else {
@@ -99,7 +86,6 @@ trait DestructureTypes {
     def constant(label: String, const: Constant): Node = atom(label, const)
 
     def scope(decls: Scope): Node          = node("decls", scopeMemberList(decls.toList))
-    def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
 
     def resultType(restpe: Type): Node          = this("resultType", restpe)
     def typeParams(tps: List[Symbol]): Node     = node("typeParams", symbolList(tps))
@@ -188,14 +174,13 @@ trait DestructureTypes {
       case AntiPolyType(pre, targs)                  => product(tp, prefix(pre), typeArgs(targs))
       case ClassInfoType(parents, decls, clazz)      => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
       case ConstantType(const)                       => product(tp, constant("value", const))
-      case DeBruijnIndex(level, index, args)         => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
       case OverloadedType(pre, alts)                 => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
       case RefinedType(parents, decls)               => product(tp, parentList(parents), scope(decls))
       case SingleType(pre, sym)                      => product(tp, prefix(pre), wrapAtom(sym))
       case SuperType(thistp, supertp)                => product(tp, this("this", thistp), this("super", supertp))
       case ThisType(clazz)                           => product(tp, wrapAtom(clazz))
       case TypeVar(inst, constr)                     => product(tp, this("inst", inst), typeConstraint(constr))
-      case AnnotatedType(annotations, underlying, _) => annotatedType(annotations, underlying)
+      case AnnotatedType(annotations, underlying)    => annotatedType(annotations, underlying)
       case ExistentialType(tparams, underlying)      => polyFunction(tparams, underlying)
       case PolyType(tparams, restpe)                 => polyFunction(tparams, restpe)
       case MethodType(params, restpe)                => monoFunction(params, restpe)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 25a1228..69ae6ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -17,12 +17,7 @@ import scala.collection.{ mutable, immutable }
  */
 abstract class Duplicators extends Analyzer {
   import global._
-  import definitions.{ AnyRefClass, AnyValClass }
-
-  def retyped(context: Context, tree: Tree): Tree = {
-    resetClassOwners
-    (newBodyDuplicator(context)).typed(tree)
-  }
+  import definitions._
 
   /** Retype the given tree in the given context. Use this method when retyping
    *  a method in a different class. The typer will replace references to the this of
@@ -33,7 +28,7 @@ abstract class Duplicators extends Analyzer {
     if (oldThis ne newThis) {
       oldClassOwner = oldThis
       newClassOwner = newThis
-    } else resetClassOwners
+    } else resetClassOwners()
 
     envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
     debuglog("retyped with env: " + env)
@@ -79,22 +74,19 @@ abstract class Duplicators extends Analyzer {
 
       override def mapOver(tpe: Type): Type = tpe match {
         case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
-          var sym1 = context.scope.lookup(sym.name)
-          if (sym1 eq NoSymbol) {
-            // try harder (look in outer scopes)
-            // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
-            BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
-              case SilentResultValue(t) =>
-                sym1 = t.symbol
-                debuglog("fixed by trying harder: "+(sym, sym1, context))
-              case _ =>
-            }
-          }
-//          assert(sym1 ne NoSymbol, tpe)
-          if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
-            debuglog("fixing " + sym + " -> " + sym1)
+          val sym1 = (
+            context.scope lookup sym.name orElse {
+              // try harder (look in outer scopes)
+              // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but
+              // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+              BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol)
+            } filter (_ ne sym)
+          )
+          if (sym1.exists) {
+            debuglog(s"fixing $sym -> $sym1")
             typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
-          } else super.mapOver(tpe)
+          }
+          else super.mapOver(tpe)
 
         case TypeRef(pre, sym, args) =>
           val newsym = updateSym(sym)
@@ -144,8 +136,8 @@ abstract class Duplicators extends Analyzer {
         sym
 
     private def invalidate(tree: Tree, owner: Symbol = NoSymbol) {
-      debuglog("attempting to invalidate " + tree.symbol)
-      if (tree.isDef && tree.symbol != NoSymbol) {
+      debuglog(s"attempting to invalidate symbol = ${tree.symbol}")
+      if ((tree.isDef || tree.isInstanceOf[Function]) && tree.symbol != NoSymbol) {
         debuglog("invalid " + tree.symbol)
         invalidSyms(tree.symbol) = tree
 
@@ -162,7 +154,7 @@ abstract class Duplicators extends Analyzer {
           case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
             debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
             invalidSyms(vdef.symbol) = vdef
-            val newowner = if (owner != NoSymbol) owner else context.owner
+            val newowner = owner orElse context.owner
             val newsym = vdef.symbol.cloneSymbol(newowner)
             newsym.setInfo(fixType(vdef.symbol.info))
             vdef.symbol = newsym
@@ -174,6 +166,11 @@ abstract class Duplicators extends Analyzer {
             invalidateAll(tparams ::: vparamss.flatten)
             tree.symbol = NoSymbol
 
+          case Function(vparams, _) =>
+            // invalidate parameters
+            invalidateAll(vparams)
+            tree.symbol = NoSymbol
+
           case _ =>
             tree.symbol = NoSymbol
         }
@@ -184,17 +181,6 @@ abstract class Duplicators extends Analyzer {
       stats.foreach(invalidate(_, owner))
     }
 
-    private def inspectTpe(tpe: Type) = {
-      tpe match {
-        case MethodType(_, res) =>
-          res + ", " + res.bounds.hi + ", " + (res.bounds.hi match {
-            case TypeRef(_, _, args) if (args.length > 0) => args(0) + ", " + args(0).bounds.hi
-            case _ => "non-tref: " + res.bounds.hi.getClass
-          })
-        case _ =>
-      }
-    }
-
     /** Optionally cast this tree into some other type, if required.
      *  Unless overridden, just returns the tree.
      */
@@ -214,10 +200,10 @@ abstract class Duplicators extends Analyzer {
      *  their symbols are recreated ad-hoc and their types are fixed inline, instead of letting the
      *  namer/typer handle them, or Idents that refer to them.
      */
-    override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+    override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
       debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass)
       val origtreesym = tree.symbol
-      if (tree.hasSymbol && tree.symbol != NoSymbol
+      if (tree.hasSymbolField && tree.symbol != NoSymbol
           && !tree.symbol.isLabel  // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
           && invalidSyms.isDefinedAt(tree.symbol)) {
         debuglog("removed symbol " + tree.symbol)
@@ -227,40 +213,39 @@ abstract class Duplicators extends Analyzer {
       tree match {
         case ttree @ TypeTree() =>
           // log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
-          ttree.tpe = fixType(ttree.tpe)
-          ttree
+          ttree modifyType fixType
 
         case Block(stats, res) =>
           debuglog("invalidating block")
           invalidateAll(stats)
           invalidate(res)
-          tree.tpe = null
-          super.typed(tree, mode, pt)
+          super.typed(tree.clearType(), mode, pt)
 
         case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
           // log("invalidating classdef " + tree)
           tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
           invalidateAll(stats, tree.symbol)
-          tree.tpe = null
-          super.typed(tree, mode, pt)
+          super.typed(tree.clearType(), mode, pt)
 
         case ddef @ DefDef(_, _, _, _, tpt, rhs) =>
-          ddef.tpt.tpe = fixType(ddef.tpt.tpe)
-          ddef.tpe = null
-          super.typed(ddef, mode, pt)
+          ddef.tpt modifyType fixType
+          super.typed(ddef.clearType(), mode, pt)
+
+        case fun: Function =>
+          debuglog("Clearing the type and retyping Function: " + fun)
+          super.typed(fun.clearType, mode, pt)
 
         case vdef @ ValDef(mods, name, tpt, rhs) =>
           // log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
           //if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
-          vdef.tpt.tpe = fixType(vdef.tpt.tpe)
-          vdef.tpe = null
-          super.typed(vdef, mode, pt)
+          vdef.tpt modifyType fixType
+          super.typed(vdef.clearType(), mode, pt)
 
         case ldef @ LabelDef(name, params, rhs) =>
           // log("label def: " + ldef)
           // in case the rhs contains any definitions -- TODO: is this necessary?
           invalidate(rhs)
-          ldef.tpe = null
+          ldef.clearType()
 
           // is this LabelDef generated by tailcalls?
           val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS)
@@ -278,27 +263,23 @@ abstract class Duplicators extends Analyzer {
 
           val params1 = params map newParam
           val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
-          rhs1.tpe = null
 
-          super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
+          super.typed(treeCopy.LabelDef(tree, name, params1, rhs1.clearType()), mode, pt)
 
         case Bind(name, _) =>
           // log("bind: " + tree)
           invalidate(tree)
-          tree.tpe = null
-          super.typed(tree, mode, pt)
+          super.typed(tree.clearType(), mode, pt)
 
         case Ident(_) if tree.symbol.isLabel =>
           debuglog("Ident to labeldef " + tree + " switched to ")
           tree.symbol = updateSym(tree.symbol)
-          tree.tpe = null
-          super.typed(tree, mode, pt)
+          super.typed(tree.clearType(), mode, pt)
 
         case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
           debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
           tree.symbol = updateSym(origtreesym)
-          tree.tpe = null
-          super.typed(tree, mode, pt)
+          super.typed(tree.clearType(), mode, pt)
 
         case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
           // We use the symbol name instead of the tree name because the symbol
@@ -320,9 +301,15 @@ abstract class Duplicators extends Analyzer {
                 case ((alt, tpe)) :: Nil =>
                   log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n  Overload was: $memberString")
                   Select(This(newClassOwner), alt)
-                case _ =>
-                  log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
-                  nameSelection
+                case xs =>
+                  alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match {
+                    case alt :: Nil =>
+                      log(s"Resorted to parameter list arity to disambiguate to $alt\n  Overload was: $memberString")
+                      Select(This(newClassOwner), alt)
+                    case _ =>
+                      log(s"Could not disambiguate $memberTypes. Attempting name-based selection, but we may crash later.")
+                      nameSelection
+                  }
               }
             }
             else nameSelection
@@ -351,7 +338,7 @@ abstract class Duplicators extends Analyzer {
           super.typed(atPos(tree.pos)(tree1))
 */
         case Match(scrut, cases) =>
-          val scrut1   = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+          val scrut1   = typedByValueExpr(scrut)
           val scrutTpe = scrut1.tpe.widen
           val cases1 = {
             if (scrutTpe.isFinalType) cases filter {
@@ -366,8 +353,8 @@ abstract class Duplicators extends Analyzer {
             // Without this, AnyRef specializations crash on patterns like
             //   case _: Boolean => ...
             // Not at all sure this is safe.
-            else if (scrutTpe <:< AnyRefClass.tpe)
-              cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+            else if (scrutTpe <:< AnyRefTpe)
+              cases filterNot (_.pat.tpe <:< AnyValTpe)
             else
               cases
           }
@@ -381,12 +368,11 @@ abstract class Duplicators extends Analyzer {
         case _ =>
           debuglog("Duplicators default case: " + tree.summaryString)
           debuglog(" ---> " + tree)
-          if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+          if (tree.hasSymbolField && tree.symbol.safeOwner == AnyClass)
             tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
-          }
+
           val ntree = castType(tree, pt)
-          val res = super.typed(ntree, mode, pt)
-          res
+          super.typed(ntree, mode, pt)
       }
     }
 
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 57b9dfe..7092f00 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -33,7 +33,7 @@ trait EtaExpansion { self: Analyzer =>
   }
 
   /** <p>
-   *    Expand partial function applications of type <code>type</code>.
+   *    Expand partial function applications of type `type`.
    *  </p><pre>
    *  p.f(es_1)...(es_n)
    *     ==>  {
@@ -56,11 +56,8 @@ trait EtaExpansion { self: Analyzer =>
     }
     val defs = new ListBuffer[Tree]
 
-    /** Append to <code>defs</code> value definitions for all non-stable
-     *  subexpressions of the function application <code>tree</code>.
-     *
-     *  @param tree ...
-     *  @return     ...
+    /* Append to `defs` value definitions for all non-stable
+     * subexpressions of the function application `tree`.
      */
     def liftoutPrefix(tree: Tree): Tree = {
       def liftout(tree: Tree, byName: Boolean): Tree =
@@ -97,12 +94,12 @@ trait EtaExpansion { self: Analyzer =>
             // with repeated params, there might be more or fewer args than params
             liftout(arg, byName(i).getOrElse(false))
           }
-          treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
+          treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType()
         case TypeApply(fn, args) =>
-          treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
+          treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType()
         case Select(qual, name) =>
           val name = tree.symbol.name // account for renamed imports, SI-7233
-          treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
+          treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol
         case Ident(name) =>
           tree
       }
@@ -110,8 +107,7 @@ trait EtaExpansion { self: Analyzer =>
       tree1
     }
 
-    /** Eta-expand lifted tree.
-     */
+    /* Eta-expand lifted tree. */
     def expand(tree: Tree, tpe: Type): Tree = tpe match {
       case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
         val params: List[(ValDef, Boolean)] = paramSyms.map {
@@ -119,7 +115,7 @@ trait EtaExpansion { self: Analyzer =>
             val origTpe = sym.tpe
             val isRepeated = definitions.isRepeatedParamType(origTpe)
             // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
-            val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+            val droppedStarTpe = if (settings.etaExpandKeepsStar) origTpe else dropIllegalStarTypes(origTpe)
             val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
             (valDef, isRepeated)
         }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 35a4461..d87090f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -8,7 +8,8 @@
 //todo: disallow C#D in superclass
 //todo: treat :::= correctly
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package typechecker
 
 import scala.annotation.tailrec
@@ -16,7 +17,7 @@ import scala.collection.{ mutable, immutable }
 import mutable.{ LinkedHashMap, ListBuffer }
 import scala.util.matching.Regex
 import symtab.Flags._
-import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.{TriState, Statistics}
 import scala.language.implicitConversions
 
 /** This trait provides methods to find various kinds of implicits.
@@ -30,11 +31,11 @@ trait Implicits {
   import global._
   import definitions._
   import ImplicitsStats._
-  import typeDebug.{ ptTree, ptBlock, ptLine }
-  import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
+  import typingStack.{ printTyping }
+  import typeDebug._
 
   def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
-    inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
+    inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos)
 
   def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
     inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
@@ -59,40 +60,35 @@ trait Implicits {
    *  @return                        A search result
    */
   def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = {
-    printInference("[infer %s] %s with pt=%s in %s".format(
-      if (isView) "view" else "implicit",
-      tree, pt, context.owner.enclClass)
-    )
-    printTyping(
-      ptBlock("infer implicit" + (if (isView) " view" else ""),
-        "tree"        -> tree,
-        "pt"          -> pt,
-        "undetparams" -> context.outer.undetparams
-      )
-    )
-    indentTyping()
-
+    // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the
+    // work is performed, than at the point where it presently exists.
+    val shouldPrint     = printTypings && !context.undetparams.isEmpty
     val rawTypeStart    = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
     val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
     val subtypeStart    = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null
     val start           = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null
-    if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
-      printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
+    if (shouldPrint)
+      typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString))
     val implicitSearchContext = context.makeImplicit(reportAmbiguous)
     val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
-    if ((result.isFailure || !settings.Xdivergence211.value) && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
-      context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
-      debugwarn("update buffer: " + implicitSearchContext.errBuffer)
+    if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
+      context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect {
+        case dte: DivergentImplicitTypeError => dte
+        case ate: AmbiguousImplicitTypeError => ate
+      })
+      debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
     }
-    printInference("[infer implicit] inferred " + result)
-    context.undetparams = context.undetparams filterNot result.subst.from.contains
+    // SI-7944 undetermined type parameters that result from inference within typedImplicit land in
+    //         `implicitSearchContext.undetparams`, *not* in `context.undetparams`
+    //         Here, we copy them up to parent context (analogously to the way the errors are copied above),
+    //         and then filter out any which *were* inferred and are part of the substitutor in the implicit search result.
+    context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct
 
     if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start)
     if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart)
     if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart)
     if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart)
-    deindentTyping()
-    printTyping("Implicit search yielded: "+ result)
+
     result
   }
 
@@ -101,24 +97,14 @@ trait Implicits {
   def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
     val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
     def wrapper(inference: => SearchResult) = wrapper1(inference)
-    def fail(reason: Option[String]) = {
-      if (!silent) {
-        if (context.hasErrors) onError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
-        else onError(pos, reason getOrElse "implicit search has failed. to find out the reason, turn on -Xlog-implicits")
-      }
-      EmptyTree
-    }
-    try {
-      wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) match {
-        case failure if failure.tree.isEmpty => fail(None)
-        case success => success.tree
-      }
-    } catch {
-      case ex: DivergentImplicit =>
-        if (settings.Xdivergence211.value)
-          debugwarn("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
-        fail(Some("divergent implicit expansion"))
+    val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
+    if (result.isFailure && !silent) {
+      val err = context.firstError
+      val errPos = err.map(_.errPos).getOrElse(pos)
+      val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+      onError(errPos, errMsg)
     }
+    result.tree
   }
 
   /** Find all views from type `tp` (in which `tpars` are free)
@@ -137,7 +123,7 @@ trait Implicits {
     val tvars = tpars map (TypeVar untouchable _)
     val tpSubsted = tp.subst(tpars, tvars)
 
-    val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+    val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), true, context.makeImplicit(reportAmbiguousErrors = false))
 
     search.allImplicitsPoly(tvars)
   }
@@ -149,6 +135,12 @@ trait Implicits {
   private val implicitsCache = new LinkedHashMap[Type, Infoss]
   private val infoMapCache = new LinkedHashMap[Symbol, InfoMap]
   private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
+  private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 }
+
+  private def isInvalidConversionSource(tpe: Type): Boolean = tpe match {
+    case Function1(in, _) => in <:< NullClass.tpe
+    case _                => false
+  }
 
   def resetImplicits() {
     implicitsCache.clear()
@@ -157,7 +149,7 @@ trait Implicits {
   }
 
   /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
-   * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+   * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types
    * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
    * so we have to approximate (otherwise it is excluded a priori).
    */
@@ -170,29 +162,28 @@ trait Implicits {
    *  @param  tree    The tree representing the implicit
    *  @param  subst   A substituter that represents the undetermined type parameters
    *                  that were instantiated by the winning implicit.
+   *  @param undetparams undeterminted type parameters
    */
-  class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
+  class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) {
     override def toString = "SearchResult(%s, %s)".format(tree,
       if (subst.isEmpty) "" else subst)
 
     def isFailure          = false
     def isAmbiguousFailure = false
-    // only used when -Xdivergence211 is turned on
     def isDivergent        = false
     final def isSuccess    = !isFailure
   }
 
-  lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+  lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) {
     override def isFailure = true
   }
 
-  // only used when -Xdivergence211 is turned on
-  lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+  lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) {
     override def isFailure   = true
     override def isDivergent = true
   }
 
-  lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+  lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) {
     override def isFailure          = true
     override def isAmbiguousFailure = true
   }
@@ -204,6 +195,7 @@ trait Implicits {
    */
   class ImplicitInfo(val name: Name, val pre: Type, val sym: Symbol) {
     private var tpeCache: Type = null
+    private var isCyclicOrErroneousCache: TriState = TriState.Unknown
 
     /** Computes member type of implicit from prefix `pre` (cached). */
     def tpe: Type = {
@@ -211,7 +203,12 @@ trait Implicits {
       tpeCache
     }
 
-    def isCyclicOrErroneous =
+    def isCyclicOrErroneous: Boolean = {
+      if (!isCyclicOrErroneousCache.isKnown) isCyclicOrErroneousCache = computeIsCyclicOrErroneous
+      isCyclicOrErroneousCache.booleanValue
+    }
+
+    private[this] final def computeIsCyclicOrErroneous =
       try sym.hasFlag(LOCKED) || containsError(tpe)
       catch { case _: CyclicReference => true }
 
@@ -226,20 +223,13 @@ trait Implicits {
       case NullaryMethodType(restpe) =>
         containsError(restpe)
       case mt @ MethodType(_, restpe) =>
-        (mt.paramTypes exists typeIsError) || containsError(restpe)
+        // OPT avoiding calling `mt.paramTypes` which creates a new list.
+        (mt.params exists symTypeIsError) || containsError(restpe)
       case _ =>
         tp.isError
     }
 
-    /** Todo reconcile with definition of stability given in Types.scala */
-    private def isStable(tp: Type): Boolean = tp match {
-     case TypeRef(pre, sym, _) =>
-       sym.isPackageClass ||
-       sym.isModuleClass && isStable(pre) /*||
-       sym.isAliasType && isStable(tp.normalize)*/
-     case _ => tp.isStable
-    }
-    def isStablePrefix = isStable(pre)
+    def isStablePrefix = pre.isStable
 
     override def equals(other: Any) = other match {
       case that: ImplicitInfo =>
@@ -249,7 +239,10 @@ trait Implicits {
       case _ => false
     }
     override def hashCode = name.## + pre.## + sym.##
-    override def toString = name + ": " + tpe
+    override def toString = (
+      if (tpeCache eq null) name + ": ?"
+      else name + ": " + tpe
+    )
   }
 
   /** A class which is used to track pending implicits to prevent infinite implicit searches.
@@ -281,16 +274,13 @@ trait Implicits {
   object HasMember {
     private val hasMemberCache = perRunCaches.newMap[Name, Type]()
     def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType))
-    def unapply(pt: Type): Option[Name] = pt match {
-      case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name)
-      case _ => None
     }
-  }
 
   /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
    */
   object HasMethodMatching {
-    val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+    val dummyMethod = NoSymbol.newTermSymbol("typer$dummy") setInfo NullaryMethodType(AnyTpe)
+
     def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
 
     def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
@@ -317,6 +307,9 @@ trait Implicits {
    */
   object Function1 {
     val Sym = FunctionClass(1)
+    // It is tempting to think that this should be inspecting "tp baseType Sym"
+    // rather than tp. See test case run/t8280 and the commit message which
+    // accompanies it for explanation why that isn't done.
     def unapply(tp: Type) = tp match {
       case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2))
       case _                                  => None
@@ -332,27 +325,33 @@ trait Implicits {
    *                          (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument)
    *                          If it's set to NoPosition, then position-based services will use `tree.pos`
    */
-  class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition)
-    extends Typer(context0) with ImplicitsContextErrors {
-      printTyping(
-        ptBlock("new ImplicitSearch",
-          "tree"        -> tree,
-          "pt"          -> pt,
-          "isView"      -> isView,
-          "context0"    -> context0,
-          "undetparams" -> context.outer.undetparams
-        )
-      )
-//    assert(tree.isEmpty || tree.pos.isDefined, tree)
+  class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors {
+    val searchId = implicitSearchId()
+    private def typingLog(what: String, msg: => String) =
+      typingStack.printTyping(tree, f"[search #$searchId] $what $msg")
+
+    import infer._
+    if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
+
+    /** The type parameters to instantiate */
+    val undetParams = if (isView) Nil else context.outer.undetparams
+    val wildPt = approximate(pt)
+
+    private val runDefintions = currentRun.runDefinitions
+    import runDefintions._
+
+    def undet_s = if (undetParams.isEmpty) "" else undetParams.mkString(" inferring ", ", ", "")
+    def tree_s = typeDebug ptTree tree
+    def ctx_s = fullSiteString(context)
+    typingLog("start", s"`$tree_s`$undet_s, searching for adaptation to pt=$pt $ctx_s")
+
     def pos = if (pos0 != NoPosition) pos0 else tree.pos
 
     def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = {
-      if (settings.XlogImplicits.value)
+      if (settings.XlogImplicits)
         reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason)
       SearchFailure
     }
-
-    import infer._
     /** Is implicit info `info1` better than implicit info `info2`?
      */
     def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
@@ -360,7 +359,7 @@ trait Implicits {
       (info2 == NoImplicitInfo) ||
       (info1 != NoImplicitInfo) && {
         if (info1.sym.isStatic && info2.sym.isStatic) {
-          improvesCache get (info1, info2) match {
+          improvesCache get ((info1, info2)) match {
             case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b
             case None =>
               val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
@@ -388,12 +387,12 @@ trait Implicits {
      *  if one or both are intersection types with a pair of overlapping parent types.
      */
     private def dominates(dtor: Type, dted: Type): Boolean = {
-      def core(tp: Type): Type = tp.normalize match {
-        case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
-        case AnnotatedType(annots, tp, selfsym) => core(tp)
-        case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
-        case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
-        case _ => tp
+      def core(tp: Type): Type = tp.dealiasWiden match {
+        case RefinedType(parents, defs)         => intersectionType(parents map core, tp.typeSymbol.owner)
+        case AnnotatedType(annots, tp)          => core(tp)
+        case ExistentialType(tparams, result)   => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
+        case PolyType(tparams, result)          => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
+        case _                                  => tp
       }
       def stripped(tp: Type): Type = {
         // `t.typeSymbol` returns the symbol of the normalized type. If that normalized type
@@ -402,47 +401,36 @@ trait Implicits {
         val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol
         deriveTypeWithWildcards(syms.distinct)(tp)
       }
-      def sum(xs: List[Int]) = (0 /: xs)(_ + _)
-      def complexity(tp: Type): Int = tp.normalize match {
-        case NoPrefix =>
-          0
-        case SingleType(pre, sym) =>
-          if (sym.isPackage) 0 else complexity(tp.normalize.widen)
-        case TypeRef(pre, sym, args) =>
-          complexity(pre) + sum(args map complexity) + 1
-        case RefinedType(parents, _) =>
-          sum(parents map complexity) + 1
-        case _ =>
-          1
+      def complexity(tp: Type): Int = tp.dealias match {
+        case NoPrefix                => 0
+        case SingleType(pre, sym)    => if (sym.hasPackageFlag) 0 else complexity(tp.dealiasWiden)
+        case ThisType(sym)           => if (sym.hasPackageFlag) 0 else 1
+        case TypeRef(pre, sym, args) => complexity(pre) + (args map complexity).sum + 1
+        case RefinedType(parents, _) => (parents map complexity).sum + 1
+        case _                       => 1
       }
       def overlaps(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
         case (RefinedType(parents, _), _) => parents exists (overlaps(_, tp2))
         case (_, RefinedType(parents, _)) => parents exists (overlaps(tp1, _))
-        case _ => tp1.typeSymbol == tp2.typeSymbol
+        case _                            => tp1.typeSymbol == tp2.typeSymbol
       }
       val dtor1 = stripped(core(dtor))
       val dted1 = stripped(core(dted))
       overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
     }
 
-    if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
-
-    /** The type parameters to instantiate */
-    val undetParams = if (isView) List() else context.outer.undetparams
-
     /** The expected type with all undetermined type parameters replaced with wildcards. */
     def approximate(tp: Type) = deriveTypeWithWildcards(undetParams)(tp)
-    val wildPt = approximate(pt)
 
     /** Try to construct a typed tree from given implicit info with given
      *  expected type.
      *  Detect infinite search trees for implicits.
      *
-     *  @param info    The given implicit info describing the implicit definition
-     *  @param isLocal Is the implicit in the local scope of the call site?
-     *  @pre           `info.tpe` does not contain an error
+     *  @param info              The given implicit info describing the implicit definition
+     *  @param isLocalToCallsite Is the implicit in the local scope of the call site?
+     *  @pre `info.tpe` does not contain an error
      */
-    private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+    private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = {
       // SI-7167 let implicit macros decide what amounts for a divergent implicit search
       // imagine a macro writer which wants to synthesize a complex implicit Complex[T] by making recursive calls to Complex[U] for its parts
       // e.g. we have `class Foo(val bar: Bar)` and `class Bar(val x: Int)`
@@ -458,45 +446,21 @@ trait Implicits {
       (context.openImplicits find { case OpenImplicit(info, tp, tree1) => !info.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
          case Some(pending) =>
            //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
-           if (settings.Xdivergence211.value) DivergentSearchFailure
-           else throw DivergentImplicit
+           DivergentSearchFailure
          case None =>
-           def pre211DivergenceLogic() = {
            try {
              context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
              // println("  "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
-             typedImplicit0(info, ptChecked, isLocal)
-           } catch {
-             case ex: DivergentImplicit =>
+             val result = typedImplicit0(info, ptChecked, isLocalToCallsite)
+             if (result.isDivergent) {
                //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
-               if (context.openImplicits.tail.isEmpty) {
-                 if (!pt.isErroneous && !info.sym.isMacro)
-                   DivergingImplicitExpansionError(tree, pt, info.sym)(context)
-                 SearchFailure
-               } else {
-                 throw DivergentImplicit
-               }
+               if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
+                 DivergingImplicitExpansionError(tree, pt, info.sym)(context)
+             }
+             result
            } finally {
              context.openImplicits = context.openImplicits.tail
            }
-           }
-           def post211DivergenceLogic() = {
-             try {
-               context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
-               // println("  "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
-               val result = typedImplicit0(info, ptChecked, isLocal)
-               if (result.isDivergent) {
-                 //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
-                 if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
-                   DivergingImplicitExpansionError(tree, pt, info.sym)(context)
-               }
-               result
-             } finally {
-               context.openImplicits = context.openImplicits.tail
-             }
-           }
-           if (settings.Xdivergence211.value) post211DivergenceLogic()
-           else pre211DivergenceLogic()
        }
     }
 
@@ -512,10 +476,8 @@ trait Implicits {
       val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
       val result = normSubType(tp, pt) || isView && {
         pt match {
-          case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
-            matchesPtView(tp, arg1, arg2, undet)
-          case _ =>
-            false
+          case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet)
+          case _                     => false
         }
       }
       if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start)
@@ -606,41 +568,31 @@ trait Implicits {
       // side is a class, else we may not know enough.
       case tr1 @ TypeRef(_, sym1, _) if sym1.isClass =>
         tp2.dealiasWiden match {
-          case TypeRef(_, sym2, _)         => sym2.isClass && !(sym1 isWeakSubClass sym2)
+          case TypeRef(_, sym2, _)         => ((sym1 eq ByNameParamClass) != (sym2 eq ByNameParamClass)) || (sym2.isClass && !(sym1 isWeakSubClass sym2))
           case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol
           case _                           => false
         }
       case _ => false
     }
 
-    private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+    private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = {
       if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits)
-      printTyping (
-        ptBlock("typedImplicit0",
-          "info.name" -> info.name,
-          "ptChecked" -> ptChecked,
-          "pt"        -> wildPt,
-          "orig"      -> ptBlock("info",
-            "undetParams"           -> undetParams,
-            "info.pre"              -> info.pre
-          ).replaceAll("\\n", "\n  ")
-        )
-      )
-
-      if (ptChecked || matchesPt(info))
-        typedImplicit1(info, isLocal)
-      else
-        SearchFailure
+      val ok = ptChecked || matchesPt(info) && {
+        def word = if (isLocalToCallsite) "local " else ""
+        typingLog("match", s"$word$info")
+        true
+      }
+      if (ok) typedImplicit1(info, isLocalToCallsite) else SearchFailure
     }
 
-    private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
+    private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = {
       if (Statistics.canEnable) Statistics.incCounter(matchingImplicits)
 
-      val itree = atPos(pos.focus) {
-        // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
-        val isScalaDoc = context.tree == EmptyTree
+      // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+      val isScalaDoc = context.tree == EmptyTree
 
-        if (isLocal && !isScalaDoc) {
+      val itree0 = atPos(pos.focus) {
+        if (isLocalToCallsite && !isScalaDoc) {
           // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
           // rather than an attributed Select, to detect shadowing.
           Ident(info.name)
@@ -651,36 +603,51 @@ trait Implicits {
           Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
         }
       }
-      printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
-        typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
-      )
+      val itree1 = if (isBlackbox(info.sym)) suppressMacroExpansion(itree0) else itree0
+      typingLog("considering", typeDebug.ptTree(itree1))
 
-      def fail(reason: String): SearchResult = failure(itree, reason)
+      def fail(reason: String): SearchResult = failure(itree0, reason)
+      def fallback = typed1(itree1, EXPRmode, wildPt)
       try {
-        val itree1 =
-          if (isView) {
-            val arg1 :: arg2 :: _ = pt.typeArgs
+        val itree2 = if (!isView) fallback else pt match {
+          case Function1(arg1, arg2) =>
             typed1(
-              atPos(itree.pos)(Apply(itree, List(Ident("<argument>") setType approximate(arg1)))),
+              atPos(itree0.pos)(Apply(itree1, List(Ident("<argument>") setType approximate(arg1)))),
               EXPRmode,
               approximate(arg2)
-            )
-          }
-          else
-            typed1(itree, EXPRmode, wildPt)
-
-        if (context.hasErrors)
-          return fail(context.errBuffer.head.errMsg)
+            ) match {
+              // try to infer implicit parameters immediately in order to:
+              //   1) guide type inference for implicit views
+              //   2) discard ineligible views right away instead of risking spurious ambiguous implicits
+              //
+              // this is an improvement of the state of the art that brings consistency to implicit resolution rules
+              // (and also helps fundep materialization to be applicable to implicit views)
+              //
+              // there's one caveat though. we need to turn this behavior off for scaladoc
+              // because scaladoc usually doesn't know the entire story
+              // and is just interested in views that are potentially applicable
+              // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???`
+              // then Scaladoc will give us something of type `C[T]`, and it would like to know
+              // that `conv` is potentially available under such and such conditions
+              case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc =>
+                applyImplicitArgs(tree)
+              case tree => tree
+            }
+          case _ => fallback
+        }
+        context.firstError match { // using match rather than foreach to avoid non local return.
+          case Some(err) =>
+            log("implicit adapt failed: " + err.errMsg)
+            return fail(err.errMsg)
+          case None      =>
+        }
 
         if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
 
-        printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
-        val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
-                     else adapt(itree1, EXPRmode, wildPt)
+        val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee
+                     else adapt(itree2, EXPRmode, wildPt)
 
-        printTyping("adapted implicit %s:%s to %s".format(
-          itree1.symbol, itree2.tpe, wildPt)
-        )
+        typingStack.showAdapt(itree0, itree3, pt, context)
 
         def hasMatchingSymbol(tree: Tree): Boolean = (tree.symbol == info.sym) || {
           tree match {
@@ -692,31 +659,29 @@ trait Implicits {
         }
 
         if (context.hasErrors)
-          fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
-        else if (isLocal && !hasMatchingSymbol(itree1))
+          fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg)
+        else if (itree3.isErroneous)
+          fail("error typechecking implicit candidate")
+        else if (isLocalToCallsite && !hasMatchingSymbol(itree2))
           fail("candidate implicit %s is shadowed by %s".format(
-            info.sym.fullLocationString, itree1.symbol.fullLocationString))
+            info.sym.fullLocationString, itree2.symbol.fullLocationString))
         else {
           val tvars = undetParams map freshVar
           def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
 
-          printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
-            if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
-            typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
-            itree2.tpe, pt
-          ))
-
-          if (matchesPt(itree2.tpe, ptInstantiated, undetParams)) {
+          if (matchesPt(itree3.tpe, ptInstantiated, undetParams)) {
             if (tvars.nonEmpty)
-              printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
+              typingLog("solve", ptLine("tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
 
-            val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
-                                    false, lubDepth(List(itree2.tpe, pt)))
+            val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), upper = false, lubDepth(itree3.tpe :: pt :: Nil))
 
             // #2421: check that we correctly instantiated type parameters outside of the implicit tree:
-            checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
-            if (context.hasErrors)
-              return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
+            checkBounds(itree3, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+            context.firstError match {
+              case Some(err) =>
+                return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg)
+              case None      =>
+            }
 
             // filter out failures from type inference, don't want to remove them from undetParams!
             // we must be conservative in leaving type params in undetparams
@@ -727,7 +692,7 @@ trait Implicits {
               if (okParams.isEmpty) EmptyTreeTypeSubstituter
               else {
                 val subst = new TreeTypeSubstituter(okParams, okArgs)
-                subst traverse itree2
+                subst traverse itree3
                 notifyUndetparamsInferred(okParams, okArgs)
                 subst
               }
@@ -741,26 +706,27 @@ trait Implicits {
             // duplicating the code here, but this is probably a
             // hotspot (and you can't just call typed, need to force
             // re-typecheck)
-            // TODO: the return tree is ignored.  This seems to make
-            // no difference, but it's bad practice regardless.
-
-
-            val checked = itree2 match {
-              case TypeApply(fun, args)           => typedTypeApply(itree2, EXPRmode, fun, args)
-              case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
+            //
+            // This is just called for the side effect of error detection,
+            // see SI-6966 to see what goes wrong if we use the result of this
+            // as the SearchResult.
+            itree3 match {
+              case TypeApply(fun, args)           => typedTypeApply(itree3, EXPRmode, fun, args)
+              case Apply(TypeApply(fun, args), _) => typedTypeApply(itree3, EXPRmode, fun, args) // t2421c
               case t                              => t
             }
 
-            if (context.hasErrors)
-              fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
-            else {
-              val result = new SearchResult(itree2, subst)
-              if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
-              printInference("[success] found %s for pt %s".format(result, ptInstantiated))
-              result
+            context.firstError match {
+              case Some(err) =>
+                fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg)
+              case None      =>
+                val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams)
+                if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
+                typingLog("success", s"inferred value of type $ptInstantiated is $result")
+                result
             }
           }
-          else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
+          else fail("incompatible: %s does not match expected type %s".format(itree3.tpe, ptInstantiated))
         }
       }
       catch {
@@ -811,12 +777,12 @@ trait Implicits {
 
     /** Prune ImplicitInfos down to either all the eligible ones or the best one.
      *
-     *  @param  iss       list of list of infos
-     *  @param  isLocal   if true, `iss` represents in-scope implicits, which must respect the normal rules of
-     *                    shadowing. The head of the list `iss` must represent implicits from the closest
-     *                    enclosing scope, and so on.
+     *  @param  iss                list of list of infos
+     *  @param  isLocalToCallsite  if true, `iss` represents in-scope implicits, which must respect the normal rules of
+     *                             shadowing. The head of the list `iss` must represent implicits from the closest
+     *                             enclosing scope, and so on.
      */
-    class ImplicitComputation(iss: Infoss, isLocal: Boolean) {
+    class ImplicitComputation(iss: Infoss, isLocalToCallsite: Boolean) {
       abstract class Shadower {
         def addInfos(infos: Infos)
         def isShadowed(name: Name): Boolean
@@ -826,7 +792,7 @@ trait Implicits {
         final class LocalShadower extends Shadower {
           val shadowed = util.HashSet[Name](512)
           def addInfos(infos: Infos) {
-            shadowed addEntries infos.map(_.name)
+            infos.foreach(i => shadowed.addEntry(i.name))
           }
           def isShadowed(name: Name) = shadowed(name)
         }
@@ -835,15 +801,14 @@ trait Implicits {
           def addInfos(infos: Infos) {}
           def isShadowed(name: Name) = false
         }
-        if (isLocal) new LocalShadower else NoShadower
+        if (isLocalToCallsite) new LocalShadower else NoShadower
       }
 
       private var best: SearchResult = SearchFailure
 
       private def isIneligible(info: ImplicitInfo) = (
            info.isCyclicOrErroneous
-        || isView && isPredefMemberNamed(info.sym, nme.conforms)
-        || shadower.isShadowed(info.name)
+        || isView && (info.sym eq Predef_conforms) // as an implicit conversion, Predef.$conforms is a no-op, so exclude it
         || (!context.macrosEnabled && info.sym.isTermMacro)
       )
 
@@ -852,6 +817,7 @@ trait Implicits {
       def survives(info: ImplicitInfo) = (
            !isIneligible(info)                      // cyclic, erroneous, shadowed, or specially excluded
         && isPlausiblyCompatible(info.tpe, wildPt)  // optimization to avoid matchesPt
+        && !shadower.isShadowed(info.name)          // OPT rare, only check for plausible candidates
         && matchesPt(info)                          // stable and matches expected type
       )
       /** The implicits that are not valid because they come later in the source and
@@ -865,41 +831,43 @@ trait Implicits {
 
       /** Preventing a divergent implicit from terminating implicit search,
        *  so that if there is a best candidate it can still be selected.
-       *
-       *  The old way of handling divergence.
-       *  Only enabled when -Xdivergence211 is turned off.
        */
-      private var divergence = false
-      private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
-        var remaining = 1;
-        { case x: DivergentImplicit if remaining > 0 =>
-            remaining -= 1
-            divergence = true
-            log("discarding divergent implicit during implicit search")
-            SearchFailure
+      object DivergentImplicitRecovery {
+        private var divergentError: Option[DivergentImplicitTypeError] = None
+
+        private def saveDivergent(err: DivergentImplicitTypeError) {
+          if (divergentError.isEmpty) divergentError = Some(err)
         }
-      }
 
-      /** Preventing a divergent implicit from terminating implicit search,
-       *  so that if there is a best candidate it can still be selected.
-       *
-       *  The new way of handling divergence.
-       *  Only enabled when -Xdivergence211 is turned on.
-       */
-      object DivergentImplicitRecovery {
-        // symbol of the implicit that caused the divergence.
-        // Initially null, will be saved on first diverging expansion.
-        private var implicitSym: Symbol    = _
-        private var countdown: Int = 1
-
-        def sym: Symbol = implicitSym
-        def apply(search: SearchResult, i: ImplicitInfo): SearchResult =
-          if (search.isDivergent && countdown > 0) {
-            countdown -= 1
-            implicitSym = i.sym
-            log("discarding divergent implicit ${implicitSym} during implicit search")
+        def issueSavedDivergentError() {
+          divergentError foreach (err => context.issue(err))
+        }
+
+        def apply(search: SearchResult, i: ImplicitInfo, errors: Seq[AbsTypeError]): SearchResult = {
+          // A divergent error from a nested implicit search will be found in `errors`. Stash that
+          // aside to be re-issued if this implicit search fails.
+          errors.collectFirst { case err: DivergentImplicitTypeError => err } foreach saveDivergent
+
+          if (search.isDivergent && divergentError.isEmpty) {
+            // Divergence triggered by `i` at this level of the implicit serach. We haven't
+            // seen divergence so far, we won't issue this error just yet, and instead temporarily
+            // treat `i` as a failed candidate.
+            saveDivergent(DivergentImplicitTypeError(tree, pt, i.sym))
+            log(s"discarding divergent implicit ${i.sym} during implicit search")
             SearchFailure
-          } else search
+          } else {
+            if (search.isFailure) {
+              // We don't want errors that occur during checking implicit info
+              // to influence the check of further infos, but we should retain divergent implicit errors
+              // (except for the one we already squirreled away)
+              val saved = divergentError.getOrElse(null)
+              context.reportBuffer.retainErrors {
+                case err: DivergentImplicitTypeError => err ne saved
+              }
+            }
+            search
+          }
+        }
       }
 
       /** Sorted list of eligible implicits.
@@ -915,10 +883,7 @@ trait Implicits {
         matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg)
       }
       if (eligible.nonEmpty)
-        printInference("[search%s] %s with pt=%s in %s, eligible:\n  %s".format(
-          if (isView) " view" else "",
-          tree, pt, context.owner.enclClass, eligible.mkString("\n  "))
-        )
+        printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}")
 
       /** Faster implicit search.  Overall idea:
        *   - prune aggressively
@@ -926,49 +891,39 @@ trait Implicits {
        *   - if it matches, forget about all others it improves upon
        */
       @tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
-        case Nil      => acc
-        case i :: is  =>
-          def pre211tryImplicitInfo(i: ImplicitInfo) =
-            try typedImplicit(i, ptChecked = true, isLocal)
-            catch divergenceHandler
-
-          def post211tryImplicitInfo(i: ImplicitInfo) =
-            DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i)
-
-          {
-            if (settings.Xdivergence211.value) post211tryImplicitInfo(i)
-            else pre211tryImplicitInfo(i)
-          } match {
-            // only used if -Xdivergence211 is turned on
-            case sr if sr.isDivergent =>
-              Nil
-            case sr if sr.isFailure =>
-              // We don't want errors that occur during checking implicit info
-              // to influence the check of further infos.
-              context.condBufferFlush(_.kind != ErrorKinds.Divergent)
-              rankImplicits(is, acc)
-            case newBest        =>
-              best = newBest
-              val newPending = undoLog undo {
-                is filterNot (alt => alt == i || {
-                  try improves(i, alt)
-                  catch {
-                    case e: CyclicReference =>
-                      if (printInfers) {
-                        println(i+" discarded because cyclic reference occurred")
-                        e.printStackTrace()
-                      }
-                      true
-                  }
-                })
+        case Nil                          => acc
+        case firstPending :: otherPending =>
+          def firstPendingImproves(alt: ImplicitInfo) =
+            firstPending == alt || (
+              try improves(firstPending, alt)
+              catch {
+                case e: CyclicReference =>
+                  debugwarn(s"Discarding $firstPending during implicit search due to cyclic reference.")
+                  true
+              }
+            )
+
+          val typedFirstPending = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite)
+
+          // Pass the errors to `DivergentImplicitRecovery` so that it can note
+          // the first `DivergentImplicitTypeError` that is being propagated
+          // from a nested implicit search; this one will be
+          // re-issued if this level of the search fails.
+          DivergentImplicitRecovery(typedFirstPending, firstPending, context.errors) match {
+            case sr if sr.isDivergent => Nil
+            case sr if sr.isFailure   => rankImplicits(otherPending, acc)
+            case newBest              =>
+              best = newBest // firstPending is our new best, since we already pruned last time around:
+              val pendingImprovingBest = undoLog undo {
+                otherPending filterNot firstPendingImproves
               }
-              rankImplicits(newPending, i :: acc)
+              rankImplicits(pendingImprovingBest, firstPending :: acc)
           }
       }
 
       /** Returns all eligible ImplicitInfos and their SearchResults in a map.
        */
-      def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocal))
+      def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocalToCallsite))
 
       /** Returns the SearchResult of the best match.
        */
@@ -990,13 +945,9 @@ trait Implicits {
         }
 
         if (best.isFailure) {
-          /** If there is no winner, and we witnessed and caught divergence,
-           *  now we can throw it for the error message.
-           */
-          if (divergence || DivergentImplicitRecovery.sym != null) {
-            if (settings.Xdivergence211.value) DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
-            else throw DivergentImplicit
-          }
+          // If there is no winner, and we witnessed and recorded a divergence error,
+          // our recovery attempt has failed, so we must now issue it.
+          DivergentImplicitRecovery.issueSavedDivergentError()
 
           if (invalidImplicits.nonEmpty)
             setAddendum(pos, () =>
@@ -1011,15 +962,15 @@ trait Implicits {
     /** Computes from a list of lists of implicit infos a map which takes
      *  infos which are applicable for given expected type `pt` to their attributed trees.
      *
-     *  @param iss            The given list of lists of implicit infos
-     *  @param isLocal        Is implicit definition visible without prefix?
-     *                        If this is the case then symbols in preceding lists shadow
-     *                        symbols of the same name in succeeding lists.
-     *  @return               map from infos to search results
+     *  @param iss               The given list of lists of implicit infos
+     *  @param isLocalToCallsite Is implicit definition visible without prefix?
+     *                           If this is the case then symbols in preceding lists shadow
+     *                           symbols of the same name in succeeding lists.
+     *  @return                  map from infos to search results
      */
-    def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
+    def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): Map[ImplicitInfo, SearchResult] = {
       val start       = if (Statistics.canEnable) Statistics.startCounter(subtypeAppInfos) else null
-      val computation = new ImplicitComputation(iss, isLocal) { }
+      val computation = new ImplicitComputation(iss, isLocalToCallsite) { }
       val applicable  = computation.findAll()
 
       if (Statistics.canEnable) Statistics.stopCounter(subtypeAppInfos, start)
@@ -1030,14 +981,14 @@ trait Implicits {
      *  If found return a search result with a tree from found implicit info
      *  which is typed with expected type `pt`. Otherwise return SearchFailure.
      *
-     *  @param implicitInfoss The given list of lists of implicit infos
-     *  @param isLocal        Is implicit definition visible without prefix?
-     *                        If this is the case then symbols in preceding lists shadow
-     *                        symbols of the same name in succeeding lists.
+     *  @param implicitInfoss    The given list of lists of implicit infos
+     *  @param isLocalToCallsite Is implicit definition visible without prefix?
+     *                           If this is the case then symbols in preceding lists shadow
+     *                           symbols of the same name in succeeding lists.
      */
-    def searchImplicit(implicitInfoss: Infoss, isLocal: Boolean): SearchResult =
+    def searchImplicit(implicitInfoss: Infoss, isLocalToCallsite: Boolean): SearchResult =
       if (implicitInfoss.forall(_.isEmpty)) SearchFailure
-      else new ImplicitComputation(implicitInfoss, isLocal) findBest()
+      else new ImplicitComputation(implicitInfoss, isLocalToCallsite) findBest()
 
     /** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
      *  the implicit infos in the companion objects of these class symbols C.
@@ -1053,8 +1004,8 @@ trait Implicits {
      */
     private def companionImplicitMap(tp: Type): InfoMap = {
 
-      /** Populate implicit info map by traversing all parts of type `tp`.
-       *  Parameters as for `getParts`.
+      /* Populate implicit info map by traversing all parts of type `tp`.
+       * Parameters as for `getParts`.
        */
       def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) = tp match {
         case TypeRef(pre, sym, args) =>
@@ -1086,13 +1037,13 @@ trait Implicits {
             }
       }
 
-      /** Populate implicit info map by traversing all parts of type `tp`.
-       *  This method is performance critical.
-       *  @param tp   The type for which we want to traverse parts
-       *  @param infoMap  The infoMap in which implicit infos corresponding to parts are stored
-       *  @param seen     The types that were already visited previously when collecting parts for the given infoMap
-       *  @param pending  The set of static symbols for which we are currently trying to collect their parts
-       *                  in order to cache them in infoMapCache
+      /* Populate implicit info map by traversing all parts of type `tp`.
+       * This method is performance critical.
+       * @param tp   The type for which we want to traverse parts
+       * @param infoMap  The infoMap in which implicit infos corresponding to parts are stored
+       * @param seen     The types that were already visited previously when collecting parts for the given infoMap
+       * @param pending  The set of static symbols for which we are currently trying to collect their parts
+       *                 in order to cache them in infoMapCache
        */
       def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) {
         if (seen(tp))
@@ -1101,23 +1052,21 @@ trait Implicits {
         tp match {
           case TypeRef(pre, sym, args) =>
             if (sym.isClass) {
-              if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
-                    (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
-                    (sym.name == tpnme.ROOT))) {
+              if (!sym.isAnonOrRefinementClass && !sym.isRoot) {
                 if (sym.isStatic && !(pending contains sym))
                   infoMap ++= {
                     infoMapCache get sym match {
                       case Some(imap) => imap
                       case None =>
                         val result = new InfoMap
-                        getClassParts(sym.tpe)(result, new mutable.HashSet(), pending + sym)
+                        getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym)
                         infoMapCache(sym) = result
                         result
                     }
                   }
                 else
                   getClassParts(tp)
-                args foreach (getParts(_))
+                args foreach getParts
               }
             } else if (sym.isAliasType) {
               getParts(tp.normalize) // SI-7180 Normalize needed to expand HK type refs
@@ -1133,7 +1082,7 @@ trait Implicits {
             getParts(restpe)
           case RefinedType(ps, _) =>
             for (p <- ps) getParts(p)
-          case AnnotatedType(_, t, _) =>
+          case AnnotatedType(_, t) =>
             getParts(t)
           case ExistentialType(_, t) =>
             getParts(t)
@@ -1145,9 +1094,9 @@ trait Implicits {
 
       val infoMap = new InfoMap
       getParts(tp)(infoMap, new mutable.HashSet(), Set())
-      printInference(
-        ptBlock("companionImplicitMap " + tp, infoMap.toSeq.map({ case (k, v) => ("" + k, v.mkString(", ")) }): _*)
-      )
+      if (infoMap.nonEmpty)
+        printTyping(tree, infoMap.size + " implicits in companion scope")
+
       infoMap
     }
 
@@ -1179,13 +1128,6 @@ trait Implicits {
       }
     }
 
-    private def TagSymbols =  TagMaterializers.keySet
-    private val TagMaterializers = Map[Symbol, Symbol](
-      ClassTagClass    -> materializeClassTag,
-      WeakTypeTagClass -> materializeWeakTypeTag,
-      TypeTagClass     -> materializeTypeTag
-    )
-
     /** Creates a tree will produce a tag of the requested flavor.
       * An EmptyTree is returned if materialization fails.
       */
@@ -1204,8 +1146,10 @@ trait Implicits {
 
         try {
           val tree1 = typedPos(pos.focus)(arg)
-          if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
-          else new SearchResult(tree1, EmptyTreeTypeSubstituter)
+          context.firstError match {
+            case Some(err) => processMacroExpansionError(err.errPos, err.errMsg)
+            case None      => new SearchResult(tree1, EmptyTreeTypeSubstituter, Nil)
+          }
         } catch {
           case ex: TypeError =>
             processMacroExpansionError(ex.pos, ex.msg)
@@ -1222,8 +1166,8 @@ trait Implicits {
           case ThisType(thisSym) =>
             gen.mkAttributedThis(thisSym)
           case _ =>
-            // if ``pre'' is not a PDT, e.g. if someone wrote
-            //   implicitly[scala.reflect.macros.Context#TypeTag[Int]]
+            // if `pre` is not a PDT, e.g. if someone wrote
+            //   implicitly[scala.reflect.macros.blackbox.Context#TypeTag[Int]]
             // then we need to fail, because we don't know the prefix to use during type reification
             // upd. we also need to fail silently, because this is a very common situation
             // e.g. quite often we're searching for BaseUniverse#TypeTag, e.g. for a type tag in any universe
@@ -1236,8 +1180,8 @@ trait Implicits {
         }
       )
       // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
-      var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
-      if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+      val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
+      if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
       if (context.macrosEnabled) success(materializer)
       // don't call `failure` here. if macros are disabled, we just fail silently
       // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
@@ -1245,8 +1189,6 @@ trait Implicits {
       else SearchFailure
     }
 
-    private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
-
     /** Creates a tree that calls the relevant factory method in object
       * scala.reflect.Manifest for type 'tp'. An EmptyTree is returned if
       * no manifest is found. todo: make this instantiate take type params as well?
@@ -1255,28 +1197,28 @@ trait Implicits {
       val full = flavor == FullManifestClass
       val opt = flavor == OptManifestClass
 
-      /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
+      /* Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
       def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
         if (args contains EmptyTree) EmptyTree
         else typedPos(tree.pos.focus) {
           val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList)
-          if (settings.debug.value) println("generated manifest: "+mani) // DEBUG
+          if (settings.debug) println("generated manifest: "+mani) // DEBUG
           mani
         }
 
-      /** Creates a tree representing one of the singleton manifests.*/
+      /* Creates a tree representing one of the singleton manifests.*/
       def findSingletonManifest(name: String) = typedPos(tree.pos.focus) {
         Select(gen.mkAttributedRef(FullManifestModule), name)
       }
 
-      /** Re-wraps a type in a manifest before calling inferImplicit on the result */
+      /* Re-wraps a type in a manifest before calling inferImplicit on the result */
       def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
-        inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
+        inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree
 
       def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
       def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
         implicit def wrapResult(tree: Tree): SearchResult =
-          if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
+          if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to), Nil)
 
         val tp1 = tp0.dealias
         tp1 match {
@@ -1313,8 +1255,8 @@ trait Implicits {
             // looking for a manifest of a type parameter that hasn't been inferred by now,
             // can't do much, but let's not fail
             else if (undetParams contains sym) {
-              // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
-              mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
+              // #3859: need to include the mapping from sym -> NothingTpe in the SearchResult
+              mot(NothingTpe, sym :: from, NothingTpe :: to)
             } else {
               // a manifest should have been found by normal searchImplicit
               EmptyTree
@@ -1351,7 +1293,8 @@ trait Implicits {
             return SearchFailure
           }
           val cm = typed(Ident(ReflectRuntimeCurrentMirror))
-          val interop = gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
+          val internal = gen.mkAttributedSelect(gen.mkAttributedRef(ReflectRuntimeUniverse), UniverseInternal)
+          val interop = gen.mkMethodCall(Select(internal, nme.typeTagToManifest), List(tp), List(cm, tagInScope))
           wrapResult(interop)
         }
       } else {
@@ -1363,7 +1306,7 @@ trait Implicits {
     }
 
     def wrapResult(tree: Tree): SearchResult =
-      if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
+      if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter, Nil)
 
     /** Materializes implicits of predefined types (currently, manifests and tags).
      *  Will be replaced by implicit macros once we fix them.
@@ -1406,7 +1349,7 @@ trait Implicits {
       val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
       val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
 
-      var result = searchImplicit(context.implicitss, true)
+      var result = searchImplicit(context.implicitss, isLocalToCallsite = true)
 
       if (result.isFailure) {
         if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
@@ -1421,34 +1364,53 @@ trait Implicits {
 
         val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits
         result = materializeImplicit(pt)
-
         // `materializeImplicit` does some preprocessing for `pt`
         // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
-        if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
+        if (result.isFailure && !wasAmbigious)
+          result = searchImplicit(implicitsOfExpectedType, isLocalToCallsite = false)
 
         if (result.isFailure) {
           context.updateBuffer(previousErrs)
           if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
         } else {
-          if (wasAmbigious && settings.lint.value)
-            reporter.warning(tree.pos,
-              "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
-                previousErrs.map(_.errMsg).mkString("\n"))
-
           if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
           if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
         }
       }
-
-      if (result.isFailure && settings.debug.value)
-        log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+      if (result.isSuccess && isView) {
+        def maybeInvalidConversionError(msg: String) {
+          // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError"
+          // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690.
+          if (context.ambiguousErrors)
+            context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
+        }
+        pt match {
+          case Function1(_, out) =>
+            def prohibit(sym: Symbol) = if (sym.tpe <:< out) {
+               maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}")
+              result = SearchFailure
+            }
+            prohibit(AnyRefClass)
+            if (settings.isScala211) prohibit(AnyValClass)
+          case _                 => false
+        }
+        if (settings.isScala211 && isInvalidConversionSource(pt)) {
+          maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
+          result = SearchFailure
+        }
+      }
+      if (result.isFailure)
+        debuglog("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
 
       result
     }
 
     def allImplicits: List[SearchResult] = {
-      def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
-      (search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
+      def search(iss: Infoss, isLocalToCallsite: Boolean) = applicableInfos(iss, isLocalToCallsite).values
+      (
+        search(context.implicitss, isLocalToCallsite = true) ++
+        search(implicitsOfExpectedType, isLocalToCallsite = false)
+      ).toList.filter(_.tree ne EmptyTree)
     }
 
     // find all implicits for some type that contains type variables
@@ -1456,8 +1418,8 @@ trait Implicits {
     def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
       def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
 
-      def eligibleInfos(iss: Infoss, isLocal: Boolean) = {
-        val eligible = new ImplicitComputation(iss, isLocal).eligible
+      def eligibleInfos(iss: Infoss, isLocalToCallsite: Boolean) = {
+        val eligible = new ImplicitComputation(iss, isLocalToCallsite).eligible
         eligible.toList.flatMap {
           (ii: ImplicitInfo) =>
         // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
@@ -1466,12 +1428,13 @@ trait Implicits {
         // any previous errors should not affect us now
         context.flushBuffer()
 
-            val res = typedImplicit(ii, ptChecked = false, isLocal)
+            val res = typedImplicit(ii, ptChecked = false, isLocalToCallsite)
         if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
         else Nil
       }
     }
-      eligibleInfos(context.implicitss, isLocal = true) ++ eligibleInfos(implicitsOfExpectedType, isLocal = false)
+      eligibleInfos(context.implicitss, isLocalToCallsite = true) ++
+      eligibleInfos(implicitsOfExpectedType, isLocalToCallsite = false)
   }
   }
 
@@ -1494,13 +1457,15 @@ trait Implicits {
         case None => Some("Missing argument `msg` on implicitNotFound annotation.")
       })
 
+    // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
+    private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r
 
     class Message(sym: Symbol, msg: String) {
-      // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
-      private def interpolate(text: String, vars: Map[String, String]) = {
-        """\$\{([^}]+)\}""".r.replaceAllIn(text, (_: Regex.Match) match {
-          case Regex.Groups(v) => java.util.regex.Matcher.quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
-        })}
+      private def interpolate(text: String, vars: Map[String, String]) =
+        Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
+          case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
+          // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
+        })
 
       private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
 
@@ -1509,18 +1474,16 @@ trait Implicits {
         interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
 
       def validate: Option[String] = {
-        import scala.util.matching.Regex; import scala.collection.breakOut
-        // is there a shorter way to avoid the intermediate toList?
-        val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
+        val refs  = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
         val decls = typeParamNames.toSet
 
         (refs &~ decls) match {
           case s if s.isEmpty => None
-          case unboundNames =>
+          case unboundNames   =>
             val singular = unboundNames.size == 1
-            Some("The type parameter"+( if(singular) " " else "s " )+ unboundNames.mkString(", ")  +
-                  " referenced in the message of the @implicitNotFound annotation "+( if(singular) "is" else "are" )+
-                  " not defined by "+ sym +".")
+            val ess      = if (singular) "" else "s"
+            val bee      = if (singular) "is" else "are"
+            Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @implicitNotFound annotation $bee not defined by $sym.")
         }
       }
     }
@@ -1535,9 +1498,7 @@ object ImplicitsStats {
   val subtypeImpl         = Statistics.newSubCounter("  of which in implicit", subtypeCount)
   val findMemberImpl      = Statistics.newSubCounter("  of which in implicit", findMemberCount)
   val subtypeAppInfos     = Statistics.newSubCounter("  of which in app impl", subtypeCount)
-  val subtypeImprovCount  = Statistics.newSubCounter("  of which in improves", subtypeCount)
   val implicitSearchCount = Statistics.newCounter   ("#implicit searches", "typer")
-  val triedImplicits      = Statistics.newSubCounter("  #tried", implicitSearchCount)
   val plausiblyCompatibleImplicits
                                   = Statistics.newSubCounter("  #plausibly compatible", implicitSearchCount)
   val matchingImplicits   = Statistics.newSubCounter("  #matching", implicitSearchCount)
@@ -1557,7 +1518,3 @@ object ImplicitsStats {
   val implicitCacheAccs   = Statistics.newCounter   ("implicit cache accesses", "typer")
   val implicitCacheHits   = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
 }
-
-// only used when -Xdivergence211 is turned off
-class DivergentImplicit extends Exception
-object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 55e0a95..fc0e2c7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -7,12 +7,11 @@ package scala.tools.nsc
 package typechecker
 
 import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
 import scala.util.control.ControlThrowable
 import symtab.Flags._
-import scala.annotation.tailrec
+import scala.reflect.internal.Depth
 
-/** This trait ...
+/** This trait contains methods related to type parameter inference.
  *
  *  @author Martin Odersky
  *  @version 1.0
@@ -22,138 +21,64 @@ trait Infer extends Checkable {
 
   import global._
   import definitions._
-  import typer.printInference
   import typeDebug.ptBlock
-
-/* -- Type parameter inference utility functions --------------------------- */
-
-  private def assertNonCyclic(tvar: TypeVar) =
-    assert(tvar.constr.inst != tvar, tvar.origin)
-
-  /** The formal parameter types corresponding to <code>formals</code>.
-   *  If <code>formals</code> has a repeated last parameter, a list of
-   *  (nargs - params.length + 1) copies of its type is returned.
-   *  By-name types are replaced with their underlying type.
+  import typeDebug.str.parentheses
+  import typingStack.{ printTyping }
+
+  /** The formal parameter types corresponding to `formals`.
+   *  If `formals` has a repeated last parameter, a list of
+   *  (numArgs - numFormals + 1) copies of its type is appended
+   *  to the other formals. By-name types are replaced with their
+   *  underlying type.
    *
    *  @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
    *  @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
    */
-  def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
-    val formals1 = if (removeByName) formals mapConserve {
-      case TypeRef(_, ByNameParamClass, List(arg)) => arg
-      case formal => formal
-    } else formals
-    if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
-      val ft = formals1.last.dealiasWiden.typeArgs.head
-      formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
-    } else formals1
+  def formalTypes(formals: List[Type], numArgs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+    val numFormals = formals.length
+    val formals1   = if (removeByName) formals mapConserve dropByName else formals
+    val expandLast = (
+         (removeRepeated || numFormals != numArgs)
+      && isVarArgTypes(formals1)
+    )
+    def lastType = formals1.last.dealiasWiden.typeArgs.head
+    def expanded(n: Int) = (1 to n).toList map (_ => lastType)
+
+    if (expandLast)
+      formals1.init ::: expanded(numArgs - numFormals + 1)
+    else
+      formals1
   }
 
-  /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
-   * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
-   * unapply[Seq] call is assumed to have result type `resTp`.
-   *
-   * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
-   *
-   * @param nbSubPats          The number of arguments to the extractor pattern
-   * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
-   *                           bind patterns, is a Tuple pattern, in which case it is the number of
-   *                           elements. Used to issue warnings about binding a `TupleN` to a single value.
-   * @throws TypeError when the unapply[Seq] definition is ill-typed
-   * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
-   *
-   * This is the spec currently implemented -- TODO: update it.
-   *
-   *   8.1.8 ExtractorPatterns
-   *
-   *   An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
-   *   However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
-   *
-   *   An `unapply` method with result type `R` in an object `x` matches the
-   *   pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
-   *     - `n = 0` and `R =:= Boolean`, or
-   *     - `n = 1` and `R <:< Option[T]`, for some type `T`.
-   *        The argument pattern `p1` is typed in turn with expected type `T`.
-   *     - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
-   *       types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
-   *       typed with expected types `T_1, ..., T_n`.
-   *
-   *   An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
-   *   if it takes exactly one argument and its result type is of the form `Option[S]`,
-   *   where either:
-   *     - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
-   *     - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
-   *
-   *   The argument patterns `p_1, ..., p_n` are typed with expected types
-   *   `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
-   *
+  /** Sorts the alternatives according to the given comparison function.
+   *  Returns a list containing the best alternative as well as any which
+   *  the best fails to improve upon.
    */
-  def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
-                           unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
-    val isUnapplySeq     = unappSym.name == nme.unapplySeq
-    val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
-
-    def seqToRepeatedChecked(tp: Type) = {
-      val toRepeated = seqToRepeated(tp)
-      if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
-      else toRepeated
-    }
-
-    // empty list --> error, otherwise length == 1
-    lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
-    // empty list --> not a ProductN, otherwise product element types
-    def productArgs = getProductArgs(optionArgs.head)
-
-    val formals =
-      // convert Seq[T] to the special repeated argument type
-      // so below we can use formalTypes to expand formals to correspond to the number of actuals
-      if (isUnapplySeq) {
-        if (optionArgs.nonEmpty)
-          productArgs match {
-            case Nil => List(seqToRepeatedChecked(optionArgs.head))
-            case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
-          }
-        else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
-      } else {
-        if (booleanExtractor && nbSubPats == 0) Nil
-        else if (optionArgs.nonEmpty)
-          if (nbSubPats == 1) {
-            val productArity = productArgs.size
-            if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
-              global.currentUnit.warning(pos,
-                s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
-            optionArgs
-          }
-          // TODO: update spec to reflect we allow any ProductN, not just TupleN
-          else productArgs
-        else
-          throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
-      }
-
-    // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
-    val formalsExpanded =
-      if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
-      else formals
+  private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = {
+    def improves(sym1: Symbol, sym2: Symbol) = (
+         (sym2 eq NoSymbol)
+      || sym2.isError
+      || (sym2 hasAnnotation BridgeClass)
+      || isBetter(sym1, sym2)
+    )
 
-    if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
-    else (formals, formalsExpanded)
+    alternatives sortWith improves match {
+      case best :: rest if rest.nonEmpty => best :: rest.filterNot(alt => improves(best, alt))
+      case bests                         => bests
+    }
   }
 
-  def actualTypes(actuals: List[Type], nformals: Int): List[Type] =
-    if (nformals == 1 && !hasLength(actuals, 1))
-      List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals))
-    else actuals
-
-  def actualArgs(pos: Position, actuals: List[Tree], nformals: Int): List[Tree] = {
-    val inRange = nformals == 1 && !hasLength(actuals, 1) && actuals.lengthCompare(MaxTupleArity) <= 0
-    if (inRange && !phase.erasedTypes) List(atPos(pos)(gen.mkTuple(actuals)))
-    else actuals
+  // we must not allow CyclicReference to be thrown when sym.info is called
+  // in checkAccessible, because that would mark the symbol erroneous, which it
+  // is not. But if it's a true CyclicReference then macro def will report it.
+  // See comments to TypeSigError for an explanation of this special case.
+  // [Eugene] is there a better way?
+  private object CheckAccessibleMacroCycle extends TypeCompleter {
+    val tree = EmptyTree
+    override def complete(sym: Symbol) = ()
   }
 
   /** A fresh type variable with given type parameter as origin.
-   *
-   *  @param tparam ...
-   *  @return       ...
    */
   def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
 
@@ -170,50 +95,34 @@ trait Infer extends Checkable {
    */
   object instantiate extends TypeMap {
     private var excludedVars = immutable.Set[TypeVar]()
+    private def applyTypeVar(tv: TypeVar): Type = tv match {
+      case TypeVar(origin, constr) if !constr.instValid => throw new DeferredNoInstance(() => s"no unique instantiation of type variable $origin could be found")
+      case _ if excludedVars(tv)                        => throw new NoInstance("cyclic instantiation")
+      case TypeVar(_, constr)                           =>
+        excludedVars += tv
+        try apply(constr.inst)
+        finally excludedVars -= tv
+    }
     def apply(tp: Type): Type = tp match {
-      case WildcardType | BoundedWildcardType(_) | NoType =>
-        throw new NoInstance("undetermined type")
-      case tv @ TypeVar(origin, constr) if !tv.untouchable =>
-        if (constr.inst == NoType) {
-          throw new DeferredNoInstance(() =>
-            "no unique instantiation of type variable " + origin + " could be found")
-        } else if (excludedVars(tv)) {
-          throw new NoInstance("cyclic instantiation")
-        } else {
-          excludedVars += tv
-          val res = apply(constr.inst)
-          excludedVars -= tv
-          res
-        }
-      case _ =>
-        mapOver(tp)
+      case WildcardType | BoundedWildcardType(_) | NoType => throw new NoInstance("undetermined type")
+      case tv: TypeVar if !tv.untouchable                 => applyTypeVar(tv)
+      case _                                              => mapOver(tp)
     }
   }
 
+  @inline final def falseIfNoInstance(body: => Boolean): Boolean =
+    try body catch { case _: NoInstance => false }
+
   /** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
-   *
-   *  @param tp ...
-   *  @return   ...
    */
   private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
-    case WildcardType | BoundedWildcardType(_) | NoType =>
-      false
-    case NoPrefix | ThisType(_) | ConstantType(_) =>
-      true
-    case TypeRef(pre, sym, args) =>
-      isFullyDefined(pre) && (args forall isFullyDefined)
-    case SingleType(pre, sym) =>
-      isFullyDefined(pre)
-    case RefinedType(ts, decls) =>
-      ts forall isFullyDefined
-    case TypeVar(origin, constr) if (constr.inst == NoType) =>
-      false
-    case _ =>
-      try {
-        instantiate(tp); true
-      } catch {
-        case ex: NoInstance => false
-      }
+    case WildcardType | BoundedWildcardType(_) | NoType => false
+    case NoPrefix | ThisType(_) | ConstantType(_)       => true
+    case TypeRef(pre, _, args)                          => isFullyDefined(pre) && (args forall isFullyDefined)
+    case SingleType(pre, _)                             => isFullyDefined(pre)
+    case RefinedType(ts, _)                             => ts forall isFullyDefined
+    case TypeVar(_, constr) if constr.inst == NoType    => false
+    case _                                              => falseIfNoInstance({ instantiate(tp) ; true })
   }
 
   /** Solve constraint collected in types `tvars`.
@@ -225,32 +134,17 @@ trait Infer extends Checkable {
    *  @param upper      When `true` search for max solution else min.
    *  @throws NoInstance
    */
-  def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
-                  variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
-
-    if (tvars.nonEmpty)
-      printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
-
-    if (!solve(tvars, tparams, variances, upper, depth)) {
-      // no panic, it's good enough to just guess a solution, we'll find out
-      // later whether it works.  *ZAP* @M danger, Will Robinson! this means
-      // that you should never trust inferred type arguments!
-      //
-      // Need to call checkBounds on the args/typars or type1 on the tree
-      // for the expression that results from type inference see e.g., #2421:
-      // implicit search had been ignoring this caveat
-      // throw new DeferredNoInstance(() =>
-      //   "no solution exists for constraints"+(tvars map boundsString))
+  def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): List[Type] = {
+    if (tvars.isEmpty) Nil else {
+      printTyping("solving for " + parentheses((tparams, tvars).zipped map ((p, tv) => s"${p.name}: $tv")))
+      // !!! What should be done with the return value of "solve", which is at present ignored?
+      // The historical commentary says "no panic, it's good enough to just guess a solution,
+      // we'll find out later whether it works", meaning don't issue an error here when types
+      // don't conform to bounds. That means you can never trust the results of implicit search.
+      // For an example where this was not being heeded, SI-2421.
+      solve(tvars, tparams, variances, upper, depth)
+      tvars map instantiate
     }
-    for (tvar <- tvars ; if tvar.constr.inst == tvar) {
-      if (tvar.origin.typeSymbol.info eq ErrorType)
-        // this can happen if during solving a cyclic type parameter
-        // such as T <: T gets completed. See #360
-        tvar.constr.inst = ErrorType
-      else
-        abort(tvar.origin+" at "+tvar.origin.typeSymbol.owner)
-    }
-    tvars map instantiate
   }
 
   def skipImplicit(tp: Type) = tp match {
@@ -265,33 +159,29 @@ trait Infer extends Checkable {
    *  This method seems to be performance critical.
    */
   def normalize(tp: Type): Type = tp match {
-    case mt @ MethodType(params, restpe) if mt.isImplicit =>
-      normalize(restpe)
-    case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
-      functionType(mt.paramTypes, normalize(restpe))
-    case NullaryMethodType(restpe) =>
-      normalize(restpe)
-    case ExistentialType(tparams, qtpe) =>
-      newExistentialType(tparams, normalize(qtpe))
-    case tp1 =>
-      tp1 // @MAT aliases already handled by subtyping
+    case PolyType(_, restpe) =>
+      logResult(sm"""|Normalizing PolyType in infer:
+                     |  was: $restpe
+                     |  now""")(normalize(restpe))
+    case mt @ MethodType(_, restpe) if mt.isImplicit             => normalize(restpe)
+    case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
+    case NullaryMethodType(restpe)                               => normalize(restpe)
+    case ExistentialType(tparams, qtpe)                          => newExistentialType(tparams, normalize(qtpe))
+    case _                                                       => tp // @MAT aliases already handled by subtyping
   }
 
   private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
   private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
 
   /** The context-dependent inferencer part */
-  class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+  abstract class Inferencer extends InferencerContextErrors with InferCheckable {
+    def context: Context
     import InferErrorGen._
 
     /* -- Error Messages --------------------------------------------------- */
     def setError[T <: Tree](tree: T): T = {
-      debuglog("set error: "+ tree)
-      // this breaks -Ydebug pretty radically
-      // if (settings.debug.value) { // DEBUG
-      //   println("set error: "+tree);
-      //   throw new Error()
-      // }
+      // SI-7388, one can incur a cycle calling sym.toString
+      // (but it'd be nicer if that weren't so)
       def name = {
         val sym = tree.symbol
         val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
@@ -301,7 +191,7 @@ trait Infer extends Checkable {
       def errorValue  = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
       def errorSym    = if (tree.isType) errorClass else errorValue
 
-      if (tree.hasSymbol)
+      if (tree.hasSymbolField)
         tree setSymbol errorSym
 
       tree setType ErrorType
@@ -311,102 +201,87 @@ trait Infer extends Checkable {
 
     def issue(err: AbsTypeError): Unit = context.issue(err)
 
-    def isPossiblyMissingArgs(found: Type, req: Type) = (
-      false
-      /** However it is that this condition is expected to imply
-       *  "is possibly missing args", it is too weak.  It is
-       *  better to say nothing than to offer misleading guesses.
+    def explainTypes(tp1: Type, tp2: Type) = {
+      if (context.reportErrors)
+        withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+    }
 
-         (found.resultApprox ne found)
-      && isWeaklyCompatible(found.resultApprox, req)
-      */
-    )
+    // When filtering sym down to the accessible alternatives leaves us empty handed.
+    private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+      if (settings.debug) {
+        Console.println(context)
+        Console.println(tree)
+        Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
+      }
+      ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
+        if (settings.check.isDefault)
+          analyzer.lastAccessCheckDetails
+        else
+          ptBlock("because of an internal error (no accessible symbol)",
+            "sym.ownerChain"                -> sym.ownerChain,
+            "underlyingSymbol(sym)"         -> underlyingSymbol(sym),
+            "pre"                           -> pre,
+            "site"                          -> site,
+            "tree"                          -> tree,
+            "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
+            "context.owner"                 -> context.owner,
+            "context.outer.enclClass.owner" -> context.outer.enclClass.owner
+          )
+      ))(context)
 
-    def explainTypes(tp1: Type, tp2: Type) =
-      withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+      setError(tree)
+    }
 
     /* -- Tests & Checks---------------------------------------------------- */
 
-    /** Check that <code>sym</code> is defined and accessible as a member of
-     *  tree <code>site</code> with type <code>pre</code> in current context.
+    /** Check that `sym` is defined and accessible as a member of
+     *  tree `site` with type `pre` in current context.
+     *  @PP: In case it's not abundantly obvious to anyone who might read
+     *  this, the method does a lot more than "check" these things, as does
+     *  nearly every method in the compiler, so don't act all shocked.
+     *  This particular example "checks" its way to assigning both the
+     *  symbol and type of the incoming tree, in addition to forcing lots
+     *  of symbol infos on its way to transforming java raw types (but
+     *  only of terms - why?)
      *
      * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
      *       since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
      */
-    def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
-      if (sym.isError) {
-        tree setSymbol sym setType ErrorType
-      } else {
-        val topClass = context.owner.enclosingTopLevelClass
-        if (context.unit.exists)
-          context.unit.depends += sym.enclosingTopLevelClass
-
-        var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
-        // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
-        if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
-          sym1 = sym
-
-        if (sym1 == NoSymbol) {
-          if (settings.debug.value) {
-            Console.println(context)
-            Console.println(tree)
-            Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
-          }
-          ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
-            if (settings.check.isDefault)
-              analyzer.lastAccessCheckDetails
-            else
-              ptBlock("because of an internal error (no accessible symbol)",
-                "sym.ownerChain"                -> sym.ownerChain,
-                "underlyingSymbol(sym)"         -> underlyingSymbol(sym),
-                "pre"                           -> pre,
-                "site"                          -> site,
-                "tree"                          -> tree,
-                "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
-                "context.owner"                 -> context.owner,
-                "context.outer.enclClass.owner" -> context.outer.enclClass.owner
-              )
-          ))(context)
-          setError(tree)
-        }
-        else {
-          if (context.owner.isTermMacro && (sym1 hasFlag LOCKED)) {
-            // we must not let CyclicReference to be thrown from sym1.info
-            // because that would mark sym1 erroneous, which it is not
-            // but if it's a true CyclicReference then macro def will report it
-            // see comments to TypeSigError for an explanation of this special case
-            // [Eugene] is there a better way?
-            val dummy = new TypeCompleter { val tree = EmptyTree; override def complete(sym: Symbol) {} }
-            throw CyclicReference(sym1, dummy)
-          }
+    def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+      def malformed(ex: MalformedType, instance: Type): Type = {
+        val what    = if (ex.msg contains "malformed type") "is malformed" else s"contains a ${ex.msg}"
+        val message = s"\n because its instance type $instance $what"
+        val error   = AccessError(tree, sym, pre, context.enclClass.owner, message)
+        ErrorUtils.issueTypeError(error)(context)
+        ErrorType
+      }
+      def accessible = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) match {
+        case NoSymbol if sym.isJavaDefined && context.unit.isJava => sym  // don't try to second guess Java; see #4402
+        case sym1                                                 => sym1
+      }
+      // XXX So... what's this for exactly?
+      if (context.unit.exists)
+        context.unit.depends += sym.enclosingTopLevelClass
 
-          if (sym1.isTerm)
-            sym1.cookJavaRawInfo() // xform java rawtypes into existentials
-
-          val owntype = {
-            try pre.memberType(sym1)
-            catch {
-              case ex: MalformedType =>
-                if (settings.debug.value) ex.printStackTrace
-                val sym2 = underlyingSymbol(sym1)
-                val itype = pre.memberType(sym2)
-                ErrorUtils.issueTypeError(
-                  AccessError(tree, sym, pre, context.enclClass.owner,
-                          "\n because its instance type "+itype+
-                          (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
-                           else " contains a "+ex.msg)))(context)
-                ErrorType
-            }
-          }
-          tree setSymbol sym1 setType {
+      if (sym.isError)
+        tree setSymbol sym setType ErrorType
+      else accessible match {
+        case NoSymbol                                                 => checkAccessibleError(tree, sym, pre, site)
+        case sym if context.owner.isTermMacro && (sym hasFlag LOCKED) => throw CyclicReference(sym, CheckAccessibleMacroCycle)
+        case sym                                                      =>
+          val sym1 = if (sym.isTerm) sym.cookJavaRawInfo() else sym // xform java rawtypes into existentials
+          val owntype = (
+            try pre memberType sym1
+            catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) }
+          )
+          tree setSymbol sym1 setType (
             pre match {
               case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp)
               case _            => owntype
             }
-          }
-        }
+          )
       }
-
+    }
 
     /** "Compatible" means conforming after conversions.
      *  "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
@@ -417,45 +292,38 @@ trait Infer extends Checkable {
      *  since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
      */
     private def isCompatible(tp: Type, pt: Type): Boolean = {
-      def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
-        case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
-        case _ => false
-      }
+      def isCompatibleByName(tp: Type, pt: Type): Boolean = (
+           isByNameParamType(pt)
+        && !isByNameParamType(tp)
+        && isCompatible(tp, dropByName(pt))
+      )
       val tp1 = normalize(tp)
-      (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
+
+      (    (tp1 weak_<:< pt)
+        || isCoercible(tp1, pt)
+        || isCompatibleByName(tp, pt)
+      )
     }
-    def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
-      (tps corresponds pts)(isCompatible)
+    def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
 
-    def isWeaklyCompatible(tp: Type, pt: Type): Boolean =
-      pt.typeSymbol == UnitClass || // can perform unit coercion
-      isCompatible(tp, pt) ||
-      tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
-      tp.params.isEmpty && isCompatible(tp.resultType, pt)
+    def isWeaklyCompatible(tp: Type, pt: Type): Boolean = {
+      def isCompatibleNoParamsMethod = tp match {
+        case MethodType(Nil, restpe) => isCompatible(restpe, pt)
+        case _                       => false
+      }
+      (    pt.typeSymbol == UnitClass // can perform unit coercion
+        || isCompatible(tp, pt)
+        || isCompatibleNoParamsMethod // can perform implicit () instantiation
+      )
+    }
 
-    /** Like weakly compatible but don't apply any implicit conversions yet.
+    /*  Like weakly compatible but don't apply any implicit conversions yet.
      *  Used when comparing the result type of a method with its prototype.
-     *
-     *  [Martin] I think Infer is also created by Erasure, with the default
-     *  implementation of isCoercible
-     *  [Paulp] (Assuming the above must refer to my comment on isCoercible)
-     *  Nope, I examined every occurrence of Inferencer in trunk.  It
-     *  appears twice as a self-type, once at its definition, and once
-     *  where it is instantiated in Typers.  There are no others.
-     *
-         % ack -A0 -B0 --no-filename '\bInferencer\b' src
-             self: Inferencer =>
-             self: Inferencer =>
-           class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
-             val infer = new Inferencer(context0) {
      */
     def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
       context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
 
-    /** This is overridden in the Typer.infer with some logic, but since
-     *  that's the only place in the compiler an Inferencer is ever created,
-     *  I suggest this should either be abstract or have the implementation.
-     */
+    // Overridden at the point of instantiation, where inferView is visible.
     def isCoercible(tp: Type, pt: Type): Boolean = false
 
     /* -- Type instantiation------------------------------------------------ */
@@ -464,112 +332,99 @@ trait Infer extends Checkable {
      *  by existentially bound variables.
      */
     def makeFullyDefined(tp: Type): Type = {
-      val tparams = new ListBuffer[Symbol]
+      var tparams: List[Symbol] = Nil
       def addTypeParam(bounds: TypeBounds): Type = {
         val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
-        tparams += tparam
+        tparams ::= tparam
         tparam.tpe
       }
       val tp1 = tp map {
-        case WildcardType =>
-          addTypeParam(TypeBounds.empty)
-        case BoundedWildcardType(bounds) =>
-          addTypeParam(bounds)
-        case t => t
+        case WildcardType                => addTypeParam(TypeBounds.empty)
+        case BoundedWildcardType(bounds) => addTypeParam(bounds)
+        case t                           => t
       }
-      existentialAbstraction(tparams.toList, tp1)
+      if (tp eq tp1) tp
+      else existentialAbstraction(tparams.reverse, tp1)
     }
+    def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp)
 
     /** Return inferred type arguments of polymorphic expression, given
-     *  its type parameters and result type and a prototype <code>pt</code>.
-     *  If no minimal type variables exist that make the
-     *  instantiated type a subtype of <code>pt</code>, return null.
-     *
-     *  @param tparams ...
-     *  @param restpe  ...
-     *  @param pt      ...
-     *  @return        ...
+     *  type vars, its type parameters and result type and a prototype `pt`.
+     *  If the type variables cannot be instantiated such that the type
+     *  conforms to `pt`, return null.
      */
-    private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
-      val tvars = tparams map freshVar
-      val instResTp = restpe.instantiateTypeParams(tparams, tvars)
-      if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
-        try {
-          // If the restpe is an implicit method, and the expected type is fully defined
-          // optimize type variables wrt to the implicit formals only; ignore the result type.
-          // See test pos/jesper.scala
-          val varianceType = restpe match {
-            case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
-              MethodType(mt.params, AnyClass.tpe)
-            case _ =>
-              restpe
-          }
-          //println("try to solve "+tvars+" "+tparams)
-          (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
-                      false, lubDepth(List(restpe, pt))), tvars)
-        } catch {
-          case ex: NoInstance => (null, null)
-        }
-      } else (null, null)
+    private def exprTypeArgs(tvars: List[TypeVar], tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] = {
+      def restpeInst = restpe.instantiateTypeParams(tparams, tvars)
+      def conforms   = if (useWeaklyCompatible) isWeaklyCompatible(restpeInst, pt) else isCompatible(restpeInst, pt)
+      // If the restpe is an implicit method, and the expected type is fully defined
+      // optimize type variables wrt to the implicit formals only; ignore the result type.
+      // See test pos/jesper.scala
+      def variance = restpe match {
+        case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe)
+        case _                                                     => restpe
+      }
+      def solve() = solvedTypes(tvars, tparams, tparams map varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil))
+
+      if (conforms)
+        try solve() catch { case _: NoInstance => null }
+      else
+        null
     }
+    /** Overload which allocates fresh type vars.
+     *  The other one exists because apparently inferExprInstance needs access to the typevars
+     *  after the call, and its wasteful to return a tuple and throw it away almost every time.
+     */
+    private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
+      exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
 
     /** Return inferred proto-type arguments of function, given
     *  its type and value parameters and result type, and a
-    *  prototype <code>pt</code> for the function result.
+    *  prototype `pt` for the function result.
     *  Type arguments need to be either determined precisely by
     *  the prototype, or they are maximized, if they occur only covariantly
     *  in the value parameter list.
     *  If instantiation of a type parameter fails,
     *  take WildcardType for the proto-type argument.
-    *
-    *  @param tparams ...
-    *  @param formals ...
-    *  @param restype ...
-    *  @param pt      ...
-    *  @return        ...
     */
-    def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
-                      pt: Type): List[Type] = {
-      /** Map type variable to its instance, or, if `variance` is covariant/contravariant,
-       *  to its upper/lower bound */
-      def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
+    def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type, pt: Type): List[Type] = {
+      // Map type variable to its instance, or, if `variance` is variant,
+      // to its upper or lower bound
+      def instantiateToBound(tvar: TypeVar, variance: Variance): Type = {
         lazy val hiBounds = tvar.constr.hiBounds
         lazy val loBounds = tvar.constr.loBounds
-        lazy val upper = glb(hiBounds)
-        lazy val lower = lub(loBounds)
+        lazy val upper    = glb(hiBounds)
+        lazy val lower    = lub(loBounds)
         def setInst(tp: Type): Type = {
           tvar setInst tp
-          assertNonCyclic(tvar)//debug
+          assert(tvar.constr.inst != tvar, tvar.origin)
           instantiate(tvar.constr.inst)
         }
-        //Console.println("instantiate "+tvar+tvar.constr+" variance = "+variance);//DEBUG
-        if (tvar.constr.inst != NoType)
+        if (tvar.constr.instValid)
           instantiate(tvar.constr.inst)
-        else if ((variance & COVARIANT) != 0 && hiBounds.nonEmpty)
-          setInst(upper)
-        else if ((variance & CONTRAVARIANT) != 0 && loBounds.nonEmpty)
+        else if (loBounds.nonEmpty && variance.isContravariant)
           setInst(lower)
-        else if (hiBounds.nonEmpty && loBounds.nonEmpty && upper <:< lower)
+        else if (hiBounds.nonEmpty && (variance.isPositive || loBounds.nonEmpty && upper <:< lower))
           setInst(upper)
         else
           WildcardType
-      } catch {
-        case ex: NoInstance => WildcardType
       }
+
       val tvars = tparams map freshVar
       if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
         map2(tparams, tvars)((tparam, tvar) =>
-          instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
+          try instantiateToBound(tvar, varianceInTypes(formals)(tparam))
+          catch { case ex: NoInstance => WildcardType }
+        )
       else
-        tvars map (tvar => WildcardType)
+        tvars map (_ => WildcardType)
     }
 
     /** [Martin] Can someone comment this please? I have no idea what it's for
      *  and the code is not exactly readable.
      */
     object AdjustedTypeArgs {
-      val Result = scala.collection.mutable.LinkedHashMap
-      type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+      val Result  = mutable.LinkedHashMap
+      type Result = mutable.LinkedHashMap[Symbol, Option[Type]]
 
       def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
         (m collect {case (p, Some(a)) => (p, a)}).unzip  ))
@@ -586,7 +441,7 @@ trait Infer extends Checkable {
         def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
           val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
           val (okArgs, okTparams) = ok.unzip
-          (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+          (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys)
         })
       }
 
@@ -608,7 +463,7 @@ trait Infer extends Checkable {
      *
      * Rewrite for repeated param types:  Map T* entries to Seq[T].
      *  @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
-     *    type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
+     *    type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined
      */
     def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result  = {
       val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
@@ -616,33 +471,32 @@ trait Infer extends Checkable {
       foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
         val retract = (
               targ.typeSymbol == NothingClass                                         // only retract Nothings
-          && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+          && (restpe.isWildcard || !varianceInType(restpe)(tparam).isPositive)  // don't retract covariant occurrences
         )
 
-        // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
         buf += ((tparam,
           if (retract) None
           else Some(
             if (targ.typeSymbol == RepeatedParamClass)     targ.baseType(SeqClass)
             else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
             // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
-            else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+            else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ
             else targ.widen
           )
         ))
       }
-      buf.result
+      buf.result()
     }
 
     /** Return inferred type arguments, given type parameters, formal parameters,
     *  argument types, result type and expected result type.
-    *  If this is not possible, throw a <code>NoInstance</code> exception.
-    *  Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
+    *  If this is not possible, throw a `NoInstance` exception.
+    *  Undetermined type arguments are represented by `definitions.NothingTpe`.
     *  No check that inferred parameters conform to their bounds is made here.
     *
     *  @param   tparams         the type parameters of the method
     *  @param   formals         the value parameter types of the method
-    *  @param   restp           the result type of the method
+    *  @param   restpe          the result type of the method
     *  @param   argtpes         the argument types of the application
     *  @param   pt              the expected return type of the application
     *  @return  @see adjustTypeArgs
@@ -689,35 +543,79 @@ trait Infer extends Checkable {
             "argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1))
         }
       }
-      val targs = solvedTypes(
-        tvars, tparams, tparams map varianceInTypes(formals),
-        false, lubDepth(formals) max lubDepth(argtpes)
-      )
+      val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes))
+      // Can warn about inferring Any/AnyVal as long as they don't appear
+      // explicitly anywhere amongst the formal, argument, result, or expected type.
+      def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
+      def argumentPosition(idx: Int): Position = context.tree match {
+        case x: ValOrDefDef => x.rhs match {
+          case Apply(fn, args) if idx < args.size => args(idx).pos
+          case _                                  => context.tree.pos
+        }
+        case _ => context.tree.pos
+      }
+      if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) {
+        foreachWithIndex(targs) ((targ, idx) =>
+          targ.typeSymbol match {
+            case sym @ (AnyClass | AnyValClass) =>
+              context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+            case _ =>
+          }
+        )
+      }
       adjustTypeArgs(tparams, tvars, targs, restpe)
     }
 
+    /** One must step carefully when assessing applicability due to
+     *  complications from varargs, tuple-conversion, named arguments.
+     *  This method is used to filter out inapplicable methods,
+     *  its behavior slightly configurable based on what stage of
+     *  overloading resolution we're at.
+     *
+     *  This method has boolean parameters, which is usually suboptimal
+     *  but I didn't work out a better way.  They don't have defaults,
+     *  and the method's scope is limited.
+     */
+    private[typechecker] def isApplicableBasedOnArity(tpe: Type, argsCount: Int, varargsStar: Boolean, tuplingAllowed: Boolean): Boolean = followApply(tpe) match {
+      case OverloadedType(pre, alts) =>
+        // followApply may return an OverloadedType (tpe is a value type with multiple `apply` methods)
+        alts exists (alt => isApplicableBasedOnArity(pre memberType alt, argsCount, varargsStar, tuplingAllowed))
+      case _ =>
+        val paramsCount   = tpe.params.length
+        // simpleMatch implies we're not using defaults
+        val simpleMatch   = paramsCount == argsCount
+        val varargsTarget = isVarArgsList(tpe.params)
+
+        // varargsMatch implies we're not using defaults, as varargs and defaults are mutually exclusive
+        def varargsMatch  = varargsTarget && (paramsCount - 1) <= argsCount
+        // another reason why auto-tupling is a bad idea: it can hide the use of defaults, so must rule those out explicitly
+        def tuplingMatch  = tuplingAllowed && eligibleForTupleConversion(paramsCount, argsCount, varargsTarget)
+        // varargs and defaults are mutually exclusive, so not using defaults if `varargsTarget`
+        // we're not using defaults if there are (at least as many) arguments as parameters (not using exact match to allow for tupling)
+        def notUsingDefaults = varargsTarget || paramsCount <= argsCount
+
+        // A varargs star call, e.g. (x, y:_*) can only match a varargs method
+        // with the same number of parameters.  See SI-5859 for an example of what
+        // would fail were this not enforced before we arrived at isApplicable.
+        if (varargsStar)
+          varargsTarget && simpleMatch
+        else
+          simpleMatch || varargsMatch || (tuplingMatch && notUsingDefaults)
+    }
+
     private[typechecker] def followApply(tp: Type): Type = tp match {
+      case _ if tp.isError => tp // SI-8228, `ErrorType nonPrivateMember nme.apply` returns an member with an erroneous type!
       case NullaryMethodType(restp) =>
         val restp1 = followApply(restp)
         if (restp1 eq restp) tp else restp1
       case _ =>
-        val appmeth = {
-          //OPT cut down on #closures by special casing non-overloaded case
-          // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
-          val result = tp.nonPrivateMember(nme.apply)
-          if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
-          else result filter (_.isPublic)
+        //OPT cut down on #closures by special casing non-overloaded case
+        // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+        tp nonPrivateMember nme.apply match {
+          case NoSymbol                                 => tp
+          case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
+          case sym                                      => OverloadedType(tp, sym.filter(_.isPublic).alternatives)
         }
-        if (appmeth == NoSymbol) tp
-        else OverloadedType(tp, appmeth.alternatives)
-    }
-
-    def hasExactlyNumParams(tp: Type, n: Int): Boolean = tp match {
-      case OverloadedType(pre, alts) =>
-        alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
-      case _ =>
-        val len = tp.params.length
-        len == n || isVarArgsList(tp.params) && len <= n + 1
     }
 
     /**
@@ -731,7 +629,7 @@ trait Infer extends Checkable {
      *    to the corresponding position in params
      *  - namesOK is false when there's an invalid use of named arguments
      */
-    private def checkNames(argtpes: List[Type], params: List[Symbol]) = {
+    private def checkNames(argtpes: List[Type], params: List[Symbol]): (List[Type], Array[Int], Boolean) = {
       val argPos = Array.fill(argtpes.length)(-1)
       var positionalAllowed, namesOK = true
       var index = 0
@@ -743,7 +641,7 @@ trait Infer extends Checkable {
           if (pos == -1) {
             if (positionalAllowed) { // treat assignment as positional argument
               argPos(index) = index
-              res = UnitClass.tpe
+              res = UnitTpe // TODO: this is a bit optimistic, the name may not refer to a mutable variable...
             } else                   // unknown parameter name
               namesOK = false
           } else if (argPos.contains(pos)) { // parameter specified twice
@@ -765,207 +663,190 @@ trait Infer extends Checkable {
       (argtpes1, argPos, namesOK)
     }
 
-    /** don't do a () to (()) conversion for methods whose second parameter
-     * is a varargs. This is a fairly kludgey way to address #3224.
-     * We'll probably find a better way to do this by identifying
-     * tupled and n-ary methods, but thiws is something for a future major revision.
+    /** True if the given parameter list can accept a tupled argument list,
+     *  and the argument list can be tupled (based on its length.)
      */
-    def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean =
-      args.isEmpty && hasLength(params, 2) && isVarArgsList(params)
+    def eligibleForTupleConversion(paramsCount: Int, argsCount: Int, varargsTarget: Boolean): Boolean = {
+      def canSendTuple = argsCount match {
+        case 0 => !varargsTarget        // avoid () to (()) conversion - SI-3224
+        case 1 => false                 // can't tuple a single argument
+        case n => n <= MaxTupleArity    // <= 22 arguments
+      }
+      def canReceiveTuple = paramsCount match {
+        case 1 => true
+        case 2 => varargsTarget
+        case _ => false
+      }
+      canSendTuple && canReceiveTuple
+    }
+    def eligibleForTupleConversion(formals: List[Type], argsCount: Int): Boolean = formals match {
+      case p :: Nil                                     => eligibleForTupleConversion(1, argsCount, varargsTarget = isScalaRepeatedParamType(p))
+      case _ :: p :: Nil if isScalaRepeatedParamType(p) => eligibleForTupleConversion(2, argsCount, varargsTarget = true)
+      case _                                            => false
+    }
+
+    /** The type of an argument list after being coerced to a tuple.
+     *  @pre: the argument list is eligible for tuple conversion.
+     */
+    private def typeAfterTupleConversion(argtpes: List[Type]): Type = (
+      if (argtpes.isEmpty) UnitTpe                 // aka "Tuple0"
+      else tupleType(argtpes map {
+        case NamedType(name, tp) => UnitTpe  // not a named arg - only assignments here
+        case RepeatedType(tp)    => tp       // but probably shouldn't be tupling a call containing :_*
+        case tp                  => tp
+      })
+    )
 
-    /** Is there an instantiation of free type variables <code>undetparams</code>
-     *  such that function type <code>ftpe</code> is applicable to
-     *  <code>argtpes</code> and its result conform to <code>pt</code>?
+    /** If the argument list needs to be tupled for the parameter list,
+     *  a list containing the type of the tuple.  Otherwise, the original
+     *  argument list.
+     */
+    def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = {
+      if (eligibleForTupleConversion(formals, argtpes.size))
+        typeAfterTupleConversion(argtpes) :: Nil
+      else
+        argtpes
+    }
+
+    private def isApplicableToMethod(undetparams: List[Symbol], mt: MethodType, argtpes0: List[Type], pt: Type): Boolean = {
+      val formals          = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
+      def missingArgs      = missingParams[Type](argtpes0, mt.params, x => Some(x) collect { case NamedType(n, _) => n })
+      def argsTupled       = tupleIfNecessary(mt.paramTypes, argtpes0)
+      def argsPlusDefaults = missingArgs match {
+        case (args, _) if args forall (_.hasDefault) => argtpes0 ::: makeNamedTypes(args)
+        case _                                       => argsTupled
+      }
+      // If args eq the incoming arg types, fail; otherwise recurse with these args.
+      def tryWithArgs(args: List[Type]) = (
+           (args ne argtpes0)
+        && isApplicable(undetparams, mt, args, pt)
+      )
+      def tryInstantiating(args: List[Type]) = falseIfNoInstance {
+        val restpe = mt resultType args
+        val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, args, pt)
+        val restpeInst = restpe.instantiateTypeParams(okparams, okargs)
+        // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+        exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match {
+          case null => false
+          case _    => isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
+        }
+      }
+      def typesCompatible(args: List[Type]) = undetparams match {
+        case Nil => isCompatibleArgs(args, formals) && isWeaklyCompatible(mt resultType args, pt)
+        case _   => tryInstantiating(args)
+      }
+
+      // when using named application, the vararg param has to be specified exactly once
+      def reorderedTypesCompatible = checkNames(argtpes0, mt.params) match {
+        case (_, _, false)                                                                => false // names are not ok
+        case (_, pos, _) if !allArgsArePositional(pos) && !sameLength(formals, mt.params) => false // different length lists and all args not positional
+        case (args, pos, _)                                                               => typesCompatible(reorderArgs(args, pos))
+      }
+      compareLengths(argtpes0, formals) match {
+        case 0 if containsNamedType(argtpes0) => reorderedTypesCompatible      // right number of args, wrong order
+        case 0                                => typesCompatible(argtpes0)     // fast track if no named arguments are used
+        case x if x > 0                       => tryWithArgs(argsTupled)       // too many args, try tupling
+        case _                                => tryWithArgs(argsPlusDefaults) // too few args, try adding defaults or tupling
+      }
+    }
+
+    /** Is there an instantiation of free type variables `undetparams` such that
+     *  function type `ftpe` is applicable to `argtpes0` and its result conform to `pt`?
      *
-     *  @param undetparams ...
      *  @param ftpe        the type of the function (often a MethodType)
-     *  @param argtpes     the argument types; a NamedType(name, tp) for named
+     *  @param argtpes0    the argument types; a NamedType(name, tp) for named
      *    arguments. For each NamedType, if `name` does not exist in `ftpe`, that
      *    type is set to `Unit`, i.e. the corresponding argument is treated as
      *    an assignment expression (@see checkNames).
-     *  @param pt          ...
-     *  @return            ...
      */
-    private def isApplicable(undetparams: List[Symbol], ftpe: Type,
-                             argtpes0: List[Type], pt: Type): Boolean =
+    private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = (
       ftpe match {
-        case OverloadedType(pre, alts) =>
-          alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
-        case ExistentialType(tparams, qtpe) =>
-          isApplicable(undetparams, qtpe, argtpes0, pt)
-        case mt @ MethodType(params, _) =>
-          val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
-
-          def tryTupleApply: Boolean = {
-            // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
-            val tupleArgTpes = actualTypes(argtpes0 map {
-                // no assignment is treated as named argument here
-              case NamedType(name, tp) => UnitClass.tpe
-              case tp => tp
-              }, formals.length)
-
-            !sameLength(argtpes0, tupleArgTpes) &&
-            !isUnitForVarArgs(argtpes0, params) &&
-            isApplicable(undetparams, ftpe, tupleArgTpes, pt)
-          }
-          def typesCompatible(argtpes: List[Type]) = {
-            val restpe = ftpe.resultType(argtpes)
-            if (undetparams.isEmpty) {
-              isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
-            } else {
-              try {
-                val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
-                // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
-                (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true)._1 ne null) &&
-                isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
-              } catch {
-                case ex: NoInstance => false
-              }
-            }
-          }
-
-          // very similar logic to doTypedApply in typechecker
-          val lencmp = compareLengths(argtpes0, formals)
-          if (lencmp > 0) tryTupleApply
-          else if (lencmp == 0) {
-            if (!argtpes0.exists(_.isInstanceOf[NamedType])) {
-              // fast track if no named arguments are used
-              typesCompatible(argtpes0)
-            }
-            else {
-              // named arguments are used
-              val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params)
-              // when using named application, the vararg param has to be specified exactly once
-              ( namesOK && (isIdentity(argPos) || sameLength(formals, params)) &&
-              // nb. arguments and names are OK, check if types are compatible
-                typesCompatible(reorderArgs(argtpes1, argPos))
-              )
-            }
-          }
-          else {
-            // not enough arguments, check if applicable using defaults
-            val missing = missingParams[Type](argtpes0, params, {
-              case NamedType(name, _) => Some(name)
-              case _ => None
-            })._1
-            if (missing forall (_.hasDefault)) {
-              // add defaults as named arguments
-              val argtpes1 = argtpes0 ::: (missing map (p => NamedType(p.name, p.tpe)))
-              isApplicable(undetparams, ftpe, argtpes1, pt)
-            }
-            else tryTupleApply
-          }
-
-        case NullaryMethodType(restpe) => // strip nullary method type, which used to be done by the polytype case below
-          isApplicable(undetparams, restpe, argtpes0, pt)
-        case PolyType(tparams, restpe) =>
-          createFromClonedSymbols(tparams, restpe)((tps1, restpe1) => isApplicable(tps1 ::: undetparams, restpe1, argtpes0, pt))
-        case ErrorType =>
-          true
-        case _ =>
-          false
+        case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
+        case ExistentialType(_, qtpe)  => isApplicable(undetparams, qtpe, argtpes0, pt)
+        case mt @ MethodType(_, _)     => isApplicableToMethod(undetparams, mt, argtpes0, pt)
+        case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt)
+        case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt))
+        case ErrorType                 => true
+        case _                         => false
       }
+    )
 
     /**
-     * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
-     * The chance of TypeErrors should be reduced through context errors
+     * Are arguments of the given types applicable to `ftpe`? Type argument inference
+     * is tried twice: firstly with the given expected type, and secondly with `WildcardType`.
      */
-    private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
-                                              argtpes0: List[Type], pt: Type): Boolean = {
-      val silentContext = context.makeSilent(false)
-      val typer0 = newTyper(silentContext)
-      val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
-      if (pt != WildcardType && silentContext.hasErrors) {
-        silentContext.flushBuffer()
-        val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
-        if (silentContext.hasErrors) false else res2
-      } else res1
+    // Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+    // The chance of TypeErrors should be reduced through context errors
+    private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = {
+      def applicableExpectingPt(pt: Type): Boolean = {
+        val silent = context.makeSilent(reportAmbiguousErrors = false)
+        val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+        if (silent.hasErrors && !pt.isWildcard)
+          applicableExpectingPt(WildcardType) // second try
+        else
+          result
+      }
+      applicableExpectingPt(pt)
     }
 
-    /** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
+    /** Is type `ftpe1` strictly more specific than type `ftpe2`
      *  when both are alternatives in an overloaded function?
      *  @see SLS (sec:overloading-resolution)
-     *
-     *  @param ftpe1 ...
-     *  @param ftpe2 ...
-     *  @return      ...
      */
-    def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
-      case OverloadedType(pre, alts) =>
-        alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2))
-      case et: ExistentialType =>
-        isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
-        //et.withTypeVars(isAsSpecific(_, ftpe2))
-      case NullaryMethodType(res) =>
-        isAsSpecific(res, ftpe2)
-      case mt: MethodType if mt.isImplicit =>
-        isAsSpecific(ftpe1.resultType, ftpe2)
-      case mt @ MethodType(params, _) if params.nonEmpty =>
-        var argtpes = mt.paramTypes
-        if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
-          argtpes = argtpes map (argtpe =>
-            if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
-        isApplicable(List(), ftpe2, argtpes, WildcardType)
-      case PolyType(tparams, NullaryMethodType(res)) =>
-        isAsSpecific(PolyType(tparams, res), ftpe2)
-      case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
-        isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
-      case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
-        isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
-      // case NullaryMethodType(res) =>
-      //   isAsSpecific(res, ftpe2)
-      case ErrorType =>
-        true
-      case _ =>
-        ftpe2 match {
-          case OverloadedType(pre, alts) =>
-            alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
-          case et: ExistentialType =>
-            et.withTypeVars(isAsSpecific(ftpe1, _))
-          case mt: MethodType =>
-            !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
-          case NullaryMethodType(res) =>
-            isAsSpecific(ftpe1, res)
-          case PolyType(tparams, NullaryMethodType(res)) =>
-            isAsSpecific(ftpe1, PolyType(tparams, res))
-          case PolyType(tparams, mt: MethodType) =>
-            !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
-          case _ =>
-            isAsSpecificValueType(ftpe1, ftpe2, List(), List())
-        }
+    def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = {
+      def checkIsApplicable(argtpes: List[Type]) = isApplicable(Nil, ftpe2, argtpes, WildcardType)
+      def bothAreVarargs                         = isVarArgsList(ftpe1.params) && isVarArgsList(ftpe2.params)
+      def onRight = ftpe2 match {
+        case OverloadedType(pre, alts)                     => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
+        case et: ExistentialType                           => et.withTypeVars(isAsSpecific(ftpe1, _))
+        case mt @ MethodType(_, restpe)                    => !mt.isImplicit || isAsSpecific(ftpe1, restpe)
+        case NullaryMethodType(res)                        => isAsSpecific(ftpe1, res)
+        case PolyType(tparams, NullaryMethodType(restpe))  => isAsSpecific(ftpe1, PolyType(tparams, restpe))
+        case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe))
+        case _                                             => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil)
+      }
+      ftpe1 match {
+        case OverloadedType(pre, alts)                                      => alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
+        case et: ExistentialType                                            => isAsSpecific(et.skolemizeExistential, ftpe2)
+        case NullaryMethodType(restpe)                                      => isAsSpecific(restpe, ftpe2)
+        case mt @ MethodType(_, restpe) if mt.isImplicit                    => isAsSpecific(restpe, ftpe2)
+        case mt @ MethodType(_, _) if bothAreVarargs                        => checkIsApplicable(mt.paramTypes mapConserve repeatedToSingle)
+        case mt @ MethodType(params, _) if params.nonEmpty                  => checkIsApplicable(mt.paramTypes)
+        case PolyType(tparams, NullaryMethodType(restpe))                   => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+        case PolyType(tparams, mt @ MethodType(_, restpe)) if mt.isImplicit => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+        case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty     => checkIsApplicable(mt.paramTypes)
+        case ErrorType                                                      => true
+        case _                                                              => onRight
+      }
     }
-    private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = (tpe1, tpe2) match {
-      case (PolyType(tparams1, rtpe1), _) =>
+    private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = tpe1 match {
+      case PolyType(tparams1, rtpe1) =>
         isAsSpecificValueType(rtpe1, tpe2, undef1 ::: tparams1, undef2)
-      case (_, PolyType(tparams2, rtpe2)) =>
-        isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
-      case _ =>
-        existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+      case _                         =>
+        tpe2 match {
+          case PolyType(tparams2, rtpe2) => isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
+          case _                         => existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+        }
     }
 
-
-/*
-    def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type): Boolean =
-      ftpe1.isError || isAsSpecific(ftpe1, ftpe2) &&
-      (!isAsSpecific(ftpe2, ftpe1) ||
-       !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
-       phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
-*/
     /** Is sym1 (or its companion class in case it is a module) a subclass of
      *  sym2 (or its companion class in case it is a module)?
      */
     def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
-      (sym1 != sym2) && (sym1 != NoSymbol) && (
-           (sym1 isSubClass sym2)
-        || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
-        || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
-      )
+         (sym1 ne sym2)
+      && (sym1 ne NoSymbol)
+      && (    (sym1 isSubClass sym2)
+           || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+           || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+         )
     )
 
     /** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
      */
-    def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) =
-      sym2 == NoSymbol || isProperSubClassOrObject(sym1.owner, sym2.owner)
+    def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) = (
+         (sym2 eq NoSymbol)
+      || isProperSubClassOrObject(sym1.safeOwner, sym2.owner)
+    )
 
     def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type, sym1: Symbol, sym2: Symbol): Boolean = {
       // ftpe1 / ftpe2 are OverloadedTypes (possibly with one single alternative) if they
@@ -978,92 +859,36 @@ trait Infer extends Checkable {
                                  (!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
         val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
                             (if (isInProperSubClassOrObject(sym2, sym1)) 1 else 0)
-//        println("is more specific? "+sym1+":"+ftpe1+sym1.locationString+"/"+sym2+":"+ftpe2+sym2.locationString+":"+
-//                specificCount+"/"+subClassCount)
         specificCount + subClassCount > 0
       }
     }
-/*
-      ftpe1.isError || {
-        if (isAsSpecific(ftpe1, ftpe2))
-          (!isAsSpecific(ftpe2, ftpe1) ||
-           isProperSubClassOrObject(sym1.owner, sym2.owner) ||
-           !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
-           phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
-        else
-          !isAsSpecific(ftpe2, ftpe1) &&
-          isProperSubClassOrObject(sym1.owner, sym2.owner)
-      }
-*/
-    private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = (ftpe1, ftpe2) match {
-      case (MethodType(_, rtpe1), MethodType(_, rtpe2)) =>
-        rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
-      case _ =>
-        false
-    }
-/*
-    /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
-     */
-    def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
-      isMethod(tpe2) && !isMethod(tpe1) ||
-      isNullary(tpe1) && !isNullary(tpe2) ||
-      isStrictlyBetter(tpe1, tpe2)
-    }
-
-    /** Is type `tpe1` a strictly better alternative than type `tpe2`?
-     *  non-methods are always strictly better than methods
-     *  nullary methods are always strictly better than non-nullary
-     *  if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
-     *   - tpe1 specializes tpe2 and tpe2 does not specialize tpe1
-     *   - tpe1 and tpe2 specialize each other and tpe1 has a strictly better resulttype than
-     *     tpe2
-     */
-    def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
-      def isNullary(tpe: Type): Boolean = tpe match {
-        case tp: RewrappingTypeProxy => isNullary(tp.underlying)
-        case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
-      }
-      def isMethod(tpe: Type): Boolean = tpe match {
-        case tp: RewrappingTypeProxy => isMethod(tp.underlying)
-        case MethodType(_, _) | PolyType(_, _) => true
-        case _ => false
-      }
-      def hasStrictlyBetterResult =
-        resultIsBetter(tpe1, tpe2, List(), List()) && !resultIsBetter(tpe2, tpe1, List(), List())
-      if (!isMethod(tpe1))
-        isMethod(tpe2) || hasStrictlyBetterResult
-
-      isNullary(tpe1) && !isNullary(tpe2) ||
-      is
 
-      else if (isNullary(tpe1))
-        isMethod(tpe2) && (!isNullary(tpe2) || hasStrictlyBetterResult)
-      else
-        specializes(tpe1, tpe2) && (!specializes(tpe2, tpe1) || hasStrictlyBetterResult)
+    private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
+      case MethodType(_, rtpe1) =>
+        ftpe2 match {
+          case MethodType(_, rtpe2) => rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
+          case _                    => false
+        }
+      case _ => false
     }
 
-*/
     /** error if arguments not within bounds. */
-    def checkBounds(tree: Tree, pre: Type, owner: Symbol,
-                    tparams: List[Symbol], targs: List[Type], prefix: String): Boolean =
-      if ((targs exists (_.isErroneous)) || (tparams exists (_.isErroneous))) true
-      else {
-        //@M validate variances & bounds of targs wrt variances & bounds of tparams
-        //@M TODO: better place to check this?
-        //@M TODO: errors for getters & setters are reported separately
-        val kindErrors = checkKindBounds(tparams, targs, pre, owner)
-        kindErrors match {
-          case Nil =>
-            def notWithinBounds() = NotWithinBounds(tree, prefix, targs, tparams, Nil)
-            isWithinBounds(pre, owner, tparams, targs) || {notWithinBounds(); false}
-          case errors =>
-            def kindBoundErrors() = KindBoundErrors(tree, prefix, targs, tparams, errors)
-            (targs contains WildcardType) || {kindBoundErrors(); false}
-        }
+    def checkBounds(tree: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = {
+      def issueBoundsError()                       = { NotWithinBounds(tree, prefix, targs, tparams, Nil) ; false }
+      def issueKindBoundErrors(errs: List[String]) = { KindBoundErrors(tree, prefix, targs, tparams, errs) ; false }
+      //@M validate variances & bounds of targs wrt variances & bounds of tparams
+      //@M TODO: better place to check this?
+      //@M TODO: errors for getters & setters are reported separately
+      def check() = checkKindBounds(tparams, targs, pre, owner) match {
+        case Nil  => isWithinBounds(pre, owner, tparams, targs) || issueBoundsError()
+        case errs => (targs contains WildcardType) || issueKindBoundErrors(errs)
       }
 
+      targs.exists(_.isErroneous) || tparams.exists(_.isErroneous) || check()
+    }
+
     def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
-      checkKindBounds0(tparams, targs, pre, owner, true) map {
+      checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map {
         case (targ, tparam, kindErrors) =>
           kindErrors.errorMessage(targ, tparam)
       }
@@ -1078,21 +903,13 @@ trait Infer extends Checkable {
      *  attempts fail, an error is produced.
      */
     def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) {
-      printInference(
-        ptBlock("inferArgumentInstance",
-          "tree"        -> tree,
-          "tree.tpe"    -> tree.tpe,
-          "undetparams" -> undetparams,
-          "strictPt"    -> strictPt,
-          "lenientPt"   -> lenientPt
-        )
-      )
-      var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)._1
+      printTyping(tree, s"inferring arg instance based on pt0=$strictPt, pt1=$lenientPt")
+      var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false)
       if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt))
-        targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)._1
+        targs = exprTypeArgs(undetparams, tree.tpe, lenientPt, useWeaklyCompatible = false)
 
       substExpr(tree, undetparams, targs, lenientPt)
-      printInference("[inferArgumentInstance] finished, targs = " + targs)
+      printTyping(tree, s"infer arg instance from pt0=$strictPt, pt1=$lenientPt; targs=$targs")
     }
 
     /** Infer type arguments `targs` for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
@@ -1101,31 +918,28 @@ trait Infer extends Checkable {
      * If passed, infers against specified type `treeTp` instead of `tree.tp`.
      */
     def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
-      val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
-      val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
-      printInference(
-        ptBlock("inferExprInstance",
-          "tree"    -> tree,
-          "tree.tpe"-> tree.tpe,
-          "tparams" -> tparams,
-          "pt"      -> pt,
-          "targs"   -> targs,
-          "tvars"   -> tvars
-        )
-      )
+      val treeTp = if (treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+      val tvars  = tparams map freshVar
+      val targs  = exprTypeArgs(tvars, tparams, treeTp, pt, useWeaklyCompatible)
+      def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString ","
+      printTyping(tree, s"infer expr instance from pt=$pt, $infer_s")
+
+      // SI-7899 infering by-name types is unsound. The correct behaviour is conditional because the hole is
+      //         exploited in Scalaz (Free.scala), as seen in: run/t7899-regression.
+      def dropByNameIfStrict(tp: Type): Type = if (settings.inferByName) tp else dropByName(tp)
+      def targsStrict = if (targs eq null) null else targs mapConserve dropByNameIfStrict
 
       if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
-        substExpr(tree, tparams, targs, pt)
+        substExpr(tree, tparams, targsStrict, pt)
         List()
       } else {
-        val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targs)
-        printInference(
-          ptBlock("inferExprInstance/AdjustedTypeArgs",
-            "okParams" -> okParams,
-            "okArgs" -> okArgs,
-            "leftUndet" -> leftUndet
-          )
-        )
+        val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targsStrict)
+        def solved_s = map2(okParams, okArgs)((p, a) => s"$p=$a") mkString ","
+        def undet_s = leftUndet match {
+          case Nil => ""
+          case ps  => ps.mkString(", undet=", ",", "")
+        }
+        printTyping(tree, s"infer solved $solved_s$undet_s")
         substExpr(tree, okParams, okArgs, pt)
         leftUndet
       }
@@ -1133,30 +947,25 @@ trait Infer extends Checkable {
 
     /** Substitute free type variables `undetparams` of polymorphic argument
      *  expression `tree` to `targs`, Error if `targs` is null.
-     *
-     *  @param tree ...
-     *  @param undetparams ...
-     *  @param targs ...
-     *  @param pt ...
      */
-    private def substExpr(tree: Tree, undetparams: List[Symbol],
-                          targs: List[Type], pt: Type) {
+    private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) {
       if (targs eq null) {
         if (!tree.tpe.isErroneous && !pt.isErroneous)
           PolymorphicExpressionInstantiationError(tree, undetparams, pt)
-      } else {
+      }
+      else {
         new TreeTypeSubstituter(undetparams, targs).traverse(tree)
         notifyUndetparamsInferred(undetparams, targs)
       }
     }
 
-    /** Substitute free type variables <code>undetparams</code> of application
-     *  <code>fn(args)</code>, given prototype <code>pt</code>.
+    /** Substitute free type variables `undetparams` of application
+     *  `fn(args)`, given prototype `pt`.
      *
      *  @param fn          fn: the function that needs to be instantiated.
      *  @param undetparams the parameters that need to be determined
      *  @param args        the actual arguments supplied in the call.
-     *  @param pt          the expected type of the function application
+     *  @param pt0         the expected type of the function application
      *  @return            The type parameters that remain uninstantiated,
      *                     and that thus have not been substituted.
      */
@@ -1166,20 +975,12 @@ trait Infer extends Checkable {
         try {
           val pt      = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
           val formals = formalTypes(mt.paramTypes, args.length)
-          val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
+          val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
           val restpe  = fn.tpe.resultType(argtpes)
 
           val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
             methTypeArgs(undetparams, formals, restpe, argtpes, pt)
 
-          printInference("[infer method] solving for %s in %s based on (%s)%s (%s)".format(
-            undetparams.map(_.name).mkString(", "),
-            fn.tpe,
-            argtpes.mkString(", "),
-            restpe,
-            (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "")
-          ))
-
           if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) {
             val treeSubst = new TreeTypeSubstituter(okparams, okargs)
             treeSubst traverseTrees fn :: args
@@ -1202,25 +1003,22 @@ trait Infer extends Checkable {
         }
     }
 
-    def widen(tp: Type): Type = abstractTypesToBounds(tp)
-
-    /** Substitute free type variables <code>undetparams</code> of type constructor
-     *  <code>tree</code> in pattern, given prototype <code>pt</code>.
+    /** Substitute free type variables `undetparams` of type constructor
+     *  `tree` in pattern, given prototype `pt`.
      *
      *  @param tree        the constuctor that needs to be instantiated
      *  @param undetparams the undetermined type parameters
-     *  @param pt          the expected result type of the instance
+     *  @param pt0         the expected result type of the instance
      */
     def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
-      val pt       = widen(pt0)
+      val pt       = abstractTypesToBounds(pt0)
       val ptparams = freeTypeParamsOfTerms(pt)
       val ctorTp   = tree.tpe
       val resTp    = ctorTp.finalResultType
 
       debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp)
 
-      /** Compute type arguments for undetermined params
-       */
+      /* Compute type arguments for undetermined params */
       def inferFor(pt: Type): Option[List[Type]] = {
         val tvars   = undetparams map freshVar
         val resTpV  = resTp.instantiateTypeParams(undetparams, tvars)
@@ -1232,13 +1030,16 @@ trait Infer extends Checkable {
             val variances  =
               if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
               else undetparams map varianceInTypes(ctorTp.paramTypes)
-            val targs      = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+
+            // Note: this is the only place where solvedTypes (or, indirectly, solve) is called
+            // with upper = true.
+            val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(resTp :: pt :: Nil))
             // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
             // no checkBounds here. If we enable it, test bug602 fails.
             // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
             Some(targs)
           } catch ifNoInstance { msg =>
-            debuglog("NO INST "+ (tvars, tvars map (_.constr)))
+            debuglog("NO INST "+ ((tvars, tvars map (_.constr))))
             NoConstructorInstanceError(tree, resTp, pt, msg)
             None
           }
@@ -1272,109 +1073,68 @@ trait Infer extends Checkable {
           }
         } else None
 
-      (inferFor(pt) orElse inferForApproxPt) map { targs =>
-        new TreeTypeSubstituter(undetparams, targs).traverse(tree)
-        notifyUndetparamsInferred(undetparams, targs)
-      } getOrElse {
-        debugwarn("failed inferConstructorInstance for "+ tree  +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
-        // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
-        ConstrInstantiationError(tree, resTp, pt)
+      inferFor(pt) orElse inferForApproxPt match {
+        case Some(targs) =>
+          new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+          notifyUndetparamsInferred(undetparams, targs)
+        case _ =>
+          def not = if (isFullyDefined(pt)) "" else "not "
+          devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt (${not}fully defined)")
+          ConstrInstantiationError(tree, resTp, pt)
       }
     }
 
-
-    def instBounds(tvar: TypeVar): (Type, Type) = {
-      val tparam = tvar.origin.typeSymbol
-      val instType = toOrigin(tvar.constr.inst)
+    def instBounds(tvar: TypeVar): TypeBounds = {
+      val tparam               = tvar.origin.typeSymbol
+      val instType             = toOrigin(tvar.constr.inst)
+      val TypeBounds(lo, hi)   = tparam.info.bounds
       val (loBounds, hiBounds) =
-        if (instType != NoType && isFullyDefined(instType)) (List(instType), List(instType))
+        if (isFullyDefined(instType)) (List(instType), List(instType))
         else (tvar.constr.loBounds, tvar.constr.hiBounds)
-      val lo = lub(tparam.info.bounds.lo :: loBounds map toOrigin)
-      val hi = glb(tparam.info.bounds.hi :: hiBounds map toOrigin)
-      (lo, hi)
+
+      TypeBounds(
+        lub(lo :: loBounds map toOrigin),
+        glb(hi :: hiBounds map toOrigin)
+      )
     }
 
     def isInstantiatable(tvars: List[TypeVar]) = {
       val tvars1 = tvars map (_.cloneInternal)
       // Note: right now it's not clear that solving is complete, or how it can be made complete!
       // So we should come back to this and investigate.
-      solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (x => COVARIANT), false)
+      solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false, Depth.AnyDepth)
     }
 
-    // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped)
+    // this is quite nasty: it destructively changes the info of the syms of e.g., method type params
+    // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped)
     // the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time
     def instantiateTypeVar(tvar: TypeVar) {
-      val tparam = tvar.origin.typeSymbol
-      if (false &&
-          tvar.constr.inst != NoType &&
-          isFullyDefined(tvar.constr.inst) &&
-          (tparam.info.bounds containsType tvar.constr.inst)) {
-        context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
-        tparam setInfo tvar.constr.inst
-        tparam resetFlag DEFERRED
-        debuglog("new alias of " + tparam + " = " + tparam.info)
-      } else {
-        val (lo, hi) = instBounds(tvar)
-        if (lo <:< hi) {
-          if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved
-             && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
-            context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
-            tparam setInfo TypeBounds(lo, hi)
-            debuglog("new bounds of " + tparam + " = " + tparam.info)
-          } else {
-            debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
-          }
-        } else {
-          debuglog("inconsistent: "+tparam+" "+lo+" "+hi)
-        }
-      }
-    }
-
-    /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
-     * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
-     * TODO: at the very least, reduce duplication wrt checkCheckable
-     */
-    def containsUnchecked(tp: Type): Boolean = {
-      def check(tp: Type, bound: List[Symbol]): Boolean = {
-        def isSurroundingTypeParam(sym: Symbol) = {
-          val e = context.scope.lookupEntry(sym.name)
-            (    (e ne null)
-              && (e.sym == sym )
-              && !e.sym.isTypeParameterOrSkolem
-              && (e.owner == context.scope)
-            )
-        }
-        def isLocalBinding(sym: Symbol) = (
-          sym.isAbstractType && (
-               (bound contains sym)
-            || (sym.name == tpnme.WILDCARD)
-            || isSurroundingTypeParam(sym)
-          )
-        )
-        tp.normalize match {
-          case SingleType(pre, _) =>
-            check(pre, bound)
-          case TypeRef(_, ArrayClass, arg :: _) =>
-            check(arg, bound)
-          case tp @ TypeRef(pre, sym, args) =>
-            (  (sym.isAbstractType && !isLocalBinding(sym))
-            || (args exists (x => !isLocalBinding(x.typeSymbol)))
-            || check(pre, bound)
-            )
-          // case RefinedType(_, decls) if decls.nonEmpty =>
-          //   patternWarning(tp, "refinement ")
-          case RefinedType(parents, _) =>
-            parents exists (p => check(p, bound))
-          case ExistentialType(quantified, tp1) =>
-            check(tp1, bound ::: quantified)
-          case _ =>
-            false
+      val tparam                    = tvar.origin.typeSymbol
+      val TypeBounds(lo0, hi0)      = tparam.info.bounds
+      val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+      val enclCase                  = context.enclosingCaseDef
+      def enclCase_s                = enclCase.toString.replaceAll("\\n", " ").take(60)
+
+      if (enclCase.savedTypeBounds.nonEmpty) log(
+        sm"""|instantiateTypeVar with nonEmpty saved type bounds {
+             |  enclosing  $enclCase_s
+             |      saved  ${enclCase.savedTypeBounds}
+             |     tparam  ${tparam.shortSymbolClass} ${tparam.defString}
+             |}""")
+
+      if (lo1 <:< hi1) {
+        if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
+          log(s"redundant bounds: discarding TypeBounds($lo1, $hi1) for $tparam, no improvement on TypeBounds($lo0, $hi0)")
+        else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
+          log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
+        else {
+          enclCase pushTypeBounds tparam
+          tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
         }
       }
-      check(tp, Nil)
+      else log(s"inconsistent bounds: discarding TypeBounds($lo1, $hi1)")
     }
 
-
     /** Type intersection of simple type tp1 with general type tp2.
      *  The result eliminates some redundancies.
      */
@@ -1393,16 +1153,16 @@ trait Infer extends Checkable {
     }
 
     def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = {
-      val pt        = widen(pt0)
+      val pt        = abstractTypesToBounds(pt0)
       val ptparams  = freeTypeParamsOfTerms(pt)
       val tpparams  = freeTypeParamsOfTerms(pattp)
 
       def ptMatchesPattp = pt matchesPattern pattp.widen
       def pattpMatchesPt = pattp matchesPattern pt
 
-      /** If we can absolutely rule out a match we can fail early.
-       *  This is the case if the scrutinee has no unresolved type arguments
-       *  and is a "final type", meaning final + invariant in all type parameters.
+      /* If we can absolutely rule out a match we can fail early.
+       * This is the case if the scrutinee has no unresolved type arguments
+       * and is a "final type", meaning final + invariant in all type parameters.
        */
       if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) {
         IncompatibleScrutineeTypeError(tree0, pattp, pt)
@@ -1438,9 +1198,9 @@ trait Infer extends Checkable {
         }
         tvars foreach instantiateTypeVar
       }
-      /** If the scrutinee has free type parameters but the pattern does not,
-       *  we have to flip the arguments so the expected type is treated as more
-       *  general when calculating the intersection.  See run/bug2755.scala.
+      /* If the scrutinee has free type parameters but the pattern does not,
+       * we have to flip the arguments so the expected type is treated as more
+       * general when calculating the intersection.  See run/bug2755.scala.
        */
       if (tpparams.isEmpty && ptparams.nonEmpty) intersect(pattp, pt)
       else intersect(pt, pattp)
@@ -1500,193 +1260,152 @@ trait Infer extends Checkable {
 
     /* -- Overload Resolution ---------------------------------------------- */
 
-/*
-    def checkNotShadowed(pos: Position, pre: Type, best: Symbol, eligible: List[Symbol]) =
-      if (!phase.erasedTypes)
-        for (alt <- eligible) {
-          if (isProperSubClassOrObject(alt.owner, best.owner))
-            error(pos,
-                  "erroneous reference to overloaded definition,\n"+
-                  "most specific definition is: "+best+best.locationString+" of type "+pre.memberType(best)+
-                  ",\nyet alternative definition   "+alt+alt.locationString+" of type "+pre.memberType(alt)+
-                  "\nis defined in a subclass")
-        }
-*/
-
-    /** Assign <code>tree</code> the symbol and type of the alternative which
-     *  matches prototype <code>pt</code>, if it exists.
+    /** Assign `tree` the symbol and type of the alternative which
+     *  matches prototype `pt`, if it exists.
      *  If several alternatives match `pt`, take parameterless one.
      *  If no alternative matches `pt`, take the parameterless one anyway.
      */
-    def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
-      case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
-        val alts0          = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
-        val noAlternatives = alts0.isEmpty
-        val alts1          = if (noAlternatives) alts else alts0
-
-        //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
-        def improves(sym1: Symbol, sym2: Symbol): Boolean =
-          sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) ||
-          { val tp1 = pre.memberType(sym1)
-            val tp2 = pre.memberType(sym2)
-            (tp2 == ErrorType ||
-             !global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) ||
-             isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) }
-
-        val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) =>
-          if (improves(alt, best)) alt else best)
-
-        val competing = alts1 dropWhile (alt => best == alt || improves(best, alt))
-
-        if (best == NoSymbol) {
-          if (settings.debug.value) {
-            tree match {
-              case Select(qual, _) =>
-                Console.println("qual: " + qual + ":" + qual.tpe +
-                                   " with decls " + qual.tpe.decls +
-                                   " with members " + qual.tpe.members +
-                                   " with members " + qual.tpe.member(newTermName("$minus")))
-              case _ =>
-            }
-          }
-          // todo: missing test case
-          NoBestExprAlternativeError(tree, pt, isSecondTry)
-        } else if (!competing.isEmpty) {
-          if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
-          else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
-          else {
+    def inferExprAlternative(tree: Tree, pt: Type): Tree = {
+      def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = {
+        val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
+        val alts1 = if (alts0.isEmpty) alts else alts0
+        val bests = bestAlternatives(alts1) { (sym1, sym2) =>
+          val tp1 = pre memberType sym1
+          val tp2 = pre memberType sym2
+
+          (    (tp2 eq ErrorType)
+            || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
+            || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
+          )
+        }
+        // todo: missing test case for bests.isEmpty
+        bests match {
+          case best :: Nil                              => tree setSymbol best setType (pre memberType best)
+          case best :: competing :: _ if alts0.nonEmpty =>
             // SI-6912 Don't give up and leave an OverloadedType on the tree.
             //         Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
             //         unless an error is issued. We're not issuing an error, in the assumption that it would be
             //         spurious in light of the erroneous expected type
-            setError(tree)
-          }
-        } else {
-//          val applicable = alts1 filter (alt =>
-//            global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
-//          checkNotShadowed(tree.pos, pre, best, applicable)
-          tree.setSymbol(best).setType(pre.memberType(best))
+            if (pt.isErroneous) setError(tree)
+            else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
+          case _                                        => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
         }
       }
-    }
-
-    @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
-      val oldState = context.state
-      context.setBufferErrors()
-      val res = expr
-      val contextWithErrors = context.hasErrors
-      context.flushBuffer()
-      context.restoreState(oldState)
-      res && !contextWithErrors
+      tree.tpe match {
+        case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree
+        case _                         => tree
+      }
     }
 
     // Checks against the name of the parameter and also any @deprecatedName.
     private def paramMatchesName(param: Symbol, name: Name) =
       param.name == name || param.deprecatedParamName.exists(_ == name)
 
-    // Check the first parameter list the same way.
-    private def methodMatchesName(method: Symbol, name: Name) = method.paramss match {
-      case ps :: _  => ps exists (p => paramMatchesName(p, name))
-      case _        => false
+    private def containsNamedType(argtpes: List[Type]): Boolean = argtpes match {
+      case Nil                  => false
+      case NamedType(_, _) :: _ => true
+      case _ :: rest            => containsNamedType(rest)
     }
-
-    private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = {
+    private def namesOfNamedArguments(argtpes: List[Type]) =
+      argtpes collect { case NamedType(name, _) => name }
+
+    /** Given a list of argument types and eligible method overloads, whittle the
+     *  list down to the methods which should be considered for specificity
+     *  testing, taking into account here:
+     *   - named arguments at the call site (keep only methods with name-matching parameters)
+     *   - if multiple methods are eligible, drop any methods which take default arguments
+     *   - drop any where arity cannot match under any conditions (allowing for
+     *     overloaded applies, varargs, and tupling conversions)
+     *  This method is conservative; it can tolerate some varieties of false positive,
+     *  but no false negatives.
+     *
+     *  @param  eligible     the overloaded method symbols
+     *  @param  argtpes      the argument types at the call site
+     *  @param  varargsStar  true if the call site has a `: _*` attached to the last argument
+     */
+    private def overloadsToConsiderBySpecificity(eligible: List[Symbol], argtpes: List[Type], varargsStar: Boolean): List[Symbol] = {
+      // TODO spec: this namesMatch business is not spec'ed, and is the wrong fix for SI-4592
+      // we should instead clarify what the spec means by "typing each argument with an undefined expected type".
+      // What does typing a named argument entail when we don't know what the valid parameter names are?
+      // (Since we're doing overload resolution, there are multiple alternatives that can define different names.)
+      // Luckily, the next step checks applicability to the individual alternatives, so it knows whether an assignment is:
+      // 1) a valid named argument
+      // 2) a well-typed assignment
+      // 3) an error (e.g., rhs does not refer to a variable)
+      //
+      // For now, the logic is:
       // If there are any foo=bar style arguments, and any of the overloaded
-      // methods has a parameter named `foo`, then only those methods are considered.
-      val namesOfArgs = argtpes collect { case NamedType(name, _) => name }
-      val namesMatch = (
-        if (namesOfArgs.isEmpty) Nil
-        else eligible filter { m =>
-          namesOfArgs forall { name =>
-            methodMatchesName(m, name)
-          }
-        }
-      )
-
-      if (namesMatch.nonEmpty) namesMatch
-      else if (eligible.isEmpty || eligible.tail.isEmpty) eligible
-      else eligible filter { alt =>
-        // for functional values, the `apply` method might be overloaded
-        val mtypes = followApply(alt.tpe) match {
-          case OverloadedType(_, alts) => alts map (_.tpe)
-          case t                       => t :: Nil
-        }
-        // Drop those that use a default; keep those that use vararg/tupling conversion.
-        mtypes exists (t =>
-          !t.typeSymbol.hasDefaultFlag && (
-               compareLengths(t.params, argtpes) < 0  // tupling (*)
-            || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
-          )
-        )
-        // (*) more arguments than parameters, but still applicable: tupling conversion works.
-        //     todo: should not return "false" when paramTypes = (Unit) no argument is given
-        //     (tupling would work)
+      // methods has a parameter named `foo`, then only those methods are considered when we must disambiguate.
+      def namesMatch = namesOfNamedArguments(argtpes) match {
+        case Nil   => Nil
+        case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name))))
       }
+      if (eligible.isEmpty || eligible.tail.isEmpty) eligible
+      else
+        namesMatch match {
+          case namesMatch if namesMatch.nonEmpty => namesMatch // TODO: this has no basis in the spec, remove!
+          case _ =>
+            // If there are multiple applicable alternatives, drop those using default arguments.
+            // This is done indirectly by checking applicability based on arity in `isApplicableBasedOnArity`.
+            // If defaults are required in the application, the arities won't match up exactly.
+            // TODO: should we really allow tupling here?? (If we don't, this is the only call-site with `tuplingAllowed = true`)
+            eligible filter (alt => isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true))
+        }
     }
 
-    /** Assign <code>tree</code> the type of an alternative which is applicable
-     *  to <code>argtpes</code>, and whose result type is compatible with `pt`.
+    /** Assign `tree` the type of an alternative which is applicable
+     *  to `argtpes`, and whose result type is compatible with `pt`.
      *  If several applicable alternatives exist, drop the alternatives which use
      *  default arguments, then select the most specialized one.
      *  If no applicable alternative exists, and pt != WildcardType, try again
      *  with pt = WildcardType.
      *  Otherwise, if there is no best alternative, error.
      *
-     *  @param argtpes contains the argument types. If an argument is named, as
+     *  @param argtpes0 contains the argument types. If an argument is named, as
      *    "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
      *    of some NamedType does not exist in an alternative's parameter names,
      *    the type is replaces by `Unit`, i.e. the argument is treated as an
      *    assignment expression.
+     *
+     *  @pre  tree.tpe is an OverloadedType.
      */
-    def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
-                               argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
-      case OverloadedType(pre, alts) =>
-        val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
-        tryTwice { isSecondTry =>
-          debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
-
-          def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
-               isVarArgsList(alt.tpe.params)
-            && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
-          )
-          val applicable = resolveOverloadedMethod(argtpes,
-            alts filter (alt =>
-                 varargsApplicableCheck(alt)
-              && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
-            )
-          )
-
-          def improves(sym1: Symbol, sym2: Symbol) = {
-            // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
-            sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) ||
-            isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
-                                   followApply(pre.memberType(sym2)), sym1, sym2)
-          }
-
-          val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) =>
-            if (improves(alt, best)) alt else best)
-          val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
-          if (best == NoSymbol) {
-            if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
-            else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
-          } else if (!competing.isEmpty) {
-            AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
-          } else {
-//            checkNotShadowed(tree.pos, pre, best, applicable)
-            tree.setSymbol(best).setType(pre.memberType(best))
-          }
+    def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = {
+      val OverloadedType(pre, alts) = tree.tpe
+      var varargsStar = false
+      val argtpes = argtpes0 mapConserve {
+        case RepeatedType(tp) => varargsStar = true ; tp
+        case tp               => tp
+      }
+      def followType(sym: Symbol) = followApply(pre memberType sym)
+      def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
+        val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt))
+        val applicable  = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
+        val ranked      = bestAlternatives(applicable)((sym1, sym2) =>
+          isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
+        )
+        ranked match {
+          case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+          case best :: Nil            => tree setSymbol best setType (pre memberType best)           // success
+          case Nil if pt.isWildcard   => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry)  // failed
+          case Nil                    => bestForExpectedType(WildcardType, isLastTry)                // failed, but retry with WildcardType
         }
-      case _ =>
+      }
+      // This potentially makes up to four attempts: tryTwice may execute
+      // with and without views enabled, and bestForExpectedType will try again
+      // with pt = WildcardType if it fails with pt != WildcardType.
+      tryTwice { isLastTry =>
+        val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+        debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+        bestForExpectedType(pt, isLastTry)
+      }
     }
 
     /** Try inference twice, once without views and once with views,
      *  unless views are already disabled.
-     *
-     *  @param infer ...
      */
     def tryTwice(infer: Boolean => Unit): Unit = {
       if (context.implicitsEnabled) {
-        val saved = context.state
+        val savedContextMode = context.contextMode
         var fallback = false
         context.setBufferErrors()
         // We cache the current buffer because it is impossible to
@@ -1700,65 +1419,59 @@ trait Infer extends Checkable {
           context.withImplicitsDisabled(infer(false))
           if (context.hasErrors) {
             fallback = true
-            context.restoreState(saved)
+            context.contextMode = savedContextMode
             context.flushBuffer()
             infer(true)
           }
         } catch {
           case ex: CyclicReference  => throw ex
           case ex: TypeError        => // recoverable cyclic references
-            context.restoreState(saved)
+            context.contextMode = savedContextMode
             if (!fallback) infer(true) else ()
         } finally {
-          context.restoreState(saved)
+          context.contextMode = savedContextMode
           context.updateBuffer(errorsToRestore)
         }
       }
       else infer(true)
     }
 
-    /** Assign <code>tree</code> the type of all polymorphic alternatives
-     *  with <code>nparams</code> as the number of type parameters, if it exists.
+    /** Assign `tree` the type of all polymorphic alternatives
+     *  which have the same number of type parameters as does `argtypes`
+     *  with all argtypes are within the corresponding type parameter bounds.
      *  If no such polymorphic alternative exist, error.
-     *
-     *  @param tree ...
-     *  @param nparams ...
      */
     def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
       val OverloadedType(pre, alts) = tree.tpe
-      val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
-      def fail(kind: PolyAlternativeErrorKind.ErrorType) =
-        PolyAlternativeError(tree, argtypes, sym0, kind)
-
-      if (sym0 == NoSymbol) return (
-        if (alts exists (_.typeParams.nonEmpty))
-          fail(PolyAlternativeErrorKind.WrongNumber)
-        else fail(PolyAlternativeErrorKind.NoParams))
-
-      val (resSym, resTpe) = {
-        if (!sym0.isOverloaded)
-          (sym0, pre.memberType(sym0))
-        else {
-          val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
-          if (sym == NoSymbol) {
-            if (argtypes forall (x => !x.isErroneous))
-              fail(PolyAlternativeErrorKind.ArgsDoNotConform)
-            return
-          }
-          else if (sym.isOverloaded) {
-            val xs      = sym.alternatives
-            val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
-            val bounds  = tparams map (_.tpeHK) // see e.g., #1236
-            val tpe     = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
-
-            (sym setInfo tpe, tpe)
-          }
-          else (sym, pre.memberType(sym))
-        }
+      // Alternatives with a matching length type parameter list
+      val matchingLength   = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
+      def allMonoAlts      = alts forall (_.typeParams.isEmpty)
+      def errorKind        = matchingLength match {
+        case NoSymbol if allMonoAlts => PolyAlternativeErrorKind.NoParams          // no polymorphic method alternative
+        case NoSymbol                => PolyAlternativeErrorKind.WrongNumber       // wrong number of tparams
+        case _                       => PolyAlternativeErrorKind.ArgsDoNotConform  // didn't conform to bounds
+      }
+      def fail() = PolyAlternativeError(tree, argtypes, matchingLength, errorKind)
+      def finish(sym: Symbol, tpe: Type) = tree setSymbol sym setType tpe
+      // Alternatives which conform to bounds
+      def checkWithinBounds(sym: Symbol) = sym.alternatives match {
+        case Nil if argtypes.exists(_.isErroneous) =>
+        case Nil                                   => fail()
+        case alt :: Nil                            => finish(alt, pre memberType alt)
+        case alts @ (hd :: _)                      =>
+          log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
+          // Multiple alternatives which are within bounds; spin up an
+          // overloaded type which carries an "AntiPolyType" as a prefix.
+          val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+          val bounds  = tparams map (_.tpeHK) // see e.g., #1236
+          val tpe     = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
+          finish(sym setInfo tpe, tpe)
+      }
+      matchingLength.alternatives match {
+        case Nil        => fail()
+        case alt :: Nil => finish(alt, pre memberType alt)
+        case _          => checkWithinBounds(matchingLength filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes)))
       }
-      // Side effects tree with symbol and type
-      tree setSymbol resSym setType resTpe
     }
   }
 }
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index d6ec5f2..9cf92ca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1,19 +1,21 @@
 package scala.tools.nsc
 package typechecker
 
+import java.lang.Math.min
 import symtab.Flags._
 import scala.tools.nsc.util._
-import scala.tools.nsc.util.ClassPath._
 import scala.reflect.runtime.ReflectionUtils
 import scala.collection.mutable.ListBuffer
-import scala.compat.Platform.EOL
+import scala.reflect.ClassTag
 import scala.reflect.internal.util.Statistics
 import scala.reflect.macros.util._
-import java.lang.{Class => jClass}
-import java.lang.reflect.{Array => jArray, Method => jMethod}
-import scala.reflect.internal.util.Collections._
 import scala.util.control.ControlThrowable
-import scala.reflect.macros.runtime.AbortMacroException
+import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.compiler.DefaultMacroCompiler
+import scala.tools.reflect.FastTrack
+import scala.runtime.ScalaRunTime
+import Fingerprint._
 
 /**
  *  Code to deal with macros, namely with:
@@ -27,7 +29,7 @@ import scala.reflect.macros.runtime.AbortMacroException
  *  Then fooBar needs to point to a static method of the following form:
  *
  *    def fooBar[T: c.WeakTypeTag] // type tag annotation is optional
- *           (c: scala.reflect.macros.Context)
+ *           (c: scala.reflect.macros.blackbox.Context)
  *           (xs: c.Expr[List[T]])
  *           : c.Expr[T] = {
  *      ...
@@ -40,15 +42,22 @@ import scala.reflect.macros.runtime.AbortMacroException
  *    (Expr(elems))
  *    (TypeTag(Int))
  */
-trait Macros extends scala.tools.reflect.FastTrack with Traces {
+trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
   self: Analyzer =>
 
   import global._
   import definitions._
   import treeInfo.{isRepeatedParamType => _, _}
   import MacrosStats._
+
   def globalSettings = global.settings
 
+  protected def findMacroClassLoader(): ClassLoader = {
+    val classpath = global.classPath.asURLs
+    macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+    ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+  }
+
   /** `MacroImplBinding` and its companion module are responsible for
    *  serialization/deserialization of macro def -> impl bindings.
    *
@@ -58,7 +67,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
    *
    *  This solution is very simple, but unfortunately it's also lacking. If we use it, then
    *  signatures of macro defs become transitively dependent on scala-reflect.jar
-   *  (because they refer to macro impls, and macro impls refer to scala.reflect.macros.Context defined in scala-reflect.jar).
+   *  (because they refer to macro impls, and macro impls refer to *box.Context defined in scala-reflect.jar).
    *  More details can be found in comments to https://issues.scala-lang.org/browse/SI-5940.
    *
    *  Therefore we have to avoid putting macro impls into binding pickles and come up with our own serialization format.
@@ -71,64 +80,85 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
    *  Includes a path to load the implementation via Java reflection,
    *  and various accounting information necessary when composing an argument list for the reflective invocation.
    */
-  private case class MacroImplBinding(
-    // Java class name of the class that contains the macro implementation
-    // is used to load the corresponding object with Java reflection
-    val className: String,
-    // method name of the macro implementation
-    // `className` and `methName` are all we need to reflectively invoke a macro implementation
-    // because macro implementations cannot be overloaded
-    val methName: String,
-    // flattens the macro impl's parameter lists having symbols replaced with metadata
-    // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
-    // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
-    // `signature` will be equal to List(-1, -1, 0, 1)
-    val signature: List[Int],
-    // type arguments part of a macro impl ref (the right-hand side of a macro definition)
-    // these trees don't refer to a macro impl, so we can pickle them as is
-    val targs: List[Tree])
+  case class MacroImplBinding(
+      // Is this macro impl a bundle (a trait extending *box.Macro) or a vanilla def?
+      val isBundle: Boolean,
+      // Is this macro impl blackbox (i.e. having blackbox.Context in its signature)?
+      val isBlackbox: Boolean,
+      // Java class name of the class that contains the macro implementation
+      // is used to load the corresponding object with Java reflection
+      className: String,
+      // method name of the macro implementation
+      // `className` and `methName` are all we need to reflectively invoke a macro implementation
+      // because macro implementations cannot be overloaded
+      methName: String,
+      // flattens the macro impl's parameter lists having symbols replaced with their fingerprints
+      // currently fingerprints are calculated solely from types of the symbols:
+      //   * c.Expr[T] => LiftedTyped
+      //   * c.Tree => LiftedUntyped
+      //   * c.WeakTypeTag[T] => Tagged(index of the type parameter corresponding to that type tag)
+      //   * everything else (e.g. *box.Context) => Other
+      // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: blackbox.Context)(x: c.Expr[T], y: c.Tree): (U, V) = ???
+      // `signature` will be equal to List(List(Other), List(LiftedTyped, LiftedUntyped), List(Tagged(0), Tagged(2)))
+      signature: List[List[Fingerprint]],
+      // type arguments part of a macro impl ref (the right-hand side of a macro definition)
+      // these trees don't refer to a macro impl, so we can pickle them as is
+      targs: List[Tree]) {
+    // Was this binding derived from a `def ... = macro ???` definition?
+    def is_??? = {
+      val Predef_??? = currentRun.runDefinitions.Predef_???
+      className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded
+    }
+    def isWhitebox = !isBlackbox
+  }
 
   /** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
    *  with synthetic content that carries the payload described in `MacroImplBinding`.
    *
    *  For example, for a pair of macro definition and macro implementation:
-   *    def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = c.literalUnit;
+   *    def impl(c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] = ???
    *    def foo: Unit = macro impl
    *
    *  We will have the following annotation added on the macro definition `foo`:
    *
    *    @scala.reflect.macros.internal.macroImpl(
    *      `macro`(
-   *        "signature" = List(-1),
+   *        "macroEngine" = <current macro engine>,
+   *        "isBundle" = false,
+   *        "isBlackbox" = true,
+   *        "signature" = List(Other),
    *        "methodName" = "impl",
-   *        "versionFormat" = 1,
    *        "className" = "Macros$"))
    */
-  private object MacroImplBinding {
-    val versionFormat = 1
-
+  def macroEngine = "v7.0 (implemented in Scala 2.11.0-M8)"
+  object MacroImplBinding {
     def pickleAtom(obj: Any): Tree =
       obj match {
         case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
         case s: String => Literal(Constant(s))
-        case i: Int => Literal(Constant(i))
+        case d: Double => Literal(Constant(d))
+        case b: Boolean => Literal(Constant(b))
+        case f: Fingerprint => Literal(Constant(f.value))
       }
 
     def unpickleAtom(tree: Tree): Any =
       tree match {
         case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
         case Literal(Constant(s: String)) => s
-        case Literal(Constant(i: Int)) => i
+        case Literal(Constant(d: Double)) => d
+        case Literal(Constant(b: Boolean)) => b
+        case Literal(Constant(i: Int)) => Fingerprint(i)
       }
 
     def pickle(macroImplRef: Tree): Tree = {
-      val MacroImplReference(owner, macroImpl, targs) = macroImplRef
-      val paramss = macroImpl.paramss
+      val runDefinitions = currentRun.runDefinitions
+      import runDefinitions._
+      val MacroImplReference(isBundle, isBlackbox, owner, macroImpl, targs) = macroImplRef
 
       // todo. refactor when fixing SI-5498
       def className: String = {
         def loop(sym: Symbol): String = sym match {
-          case sym if sym.owner.isPackageClass =>
+          case sym if sym.isTopLevel =>
             val suffix = if (sym.isModuleClass) "$" else ""
             sym.fullName + suffix
           case sym =>
@@ -139,16 +169,25 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
         loop(owner)
       }
 
-      def signature: List[Int] = {
-        val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
-        transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+      def signature: List[List[Fingerprint]] = {
+        def fingerprint(tpe: Type): Fingerprint = tpe.dealiasWiden match {
+          case TypeRef(_, RepeatedParamClass, underlying :: Nil) => fingerprint(underlying)
+          case ExprClassOf(_) => LiftedTyped
+          case TreeType() => LiftedUntyped
+          case _ => Other
+        }
+
+        val transformed = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => tparam)
+        mmap(transformed)(p => if (p.isTerm) fingerprint(p.info) else Tagged(p.paramPos))
       }
 
       val payload = List[(String, Any)](
-        "versionFormat" -> versionFormat,
-        "className"     -> className,
-        "methodName"    -> macroImpl.name.toString,
-        "signature"     -> signature
+        "macroEngine" -> macroEngine,
+        "isBundle"    -> isBundle,
+        "isBlackbox"  -> isBlackbox,
+        "className"   -> className,
+        "methodName"  -> macroImpl.name.toString,
+        "signature"   -> signature
       )
 
       // the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload.
@@ -185,498 +224,296 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
       val Apply(_, pickledPayload) = wrapped
       val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
 
-      val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
-      if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
+      import typer.TyperErrorGen._
+      def fail(msg: String) = MacroCantExpandIncompatibleMacrosError(msg)
+      def unpickle[T](field: String, clazz: Class[T]): T = {
+        def failField(msg: String) = fail(s"$field $msg")
+        if (!payload.contains(field)) failField("is supposed to be there")
+        val raw: Any = payload(field)
+        if (raw == null) failField(s"is not supposed to be null")
+        val expected = ScalaRunTime.box(clazz)
+        val actual = raw.getClass
+        if (!expected.isAssignableFrom(actual)) failField(s"has wrong type: expected $expected, actual $actual")
+        raw.asInstanceOf[T]
+      }
+
+      if (!payload.contains("macroEngine")) MacroCantExpand210xMacrosError("macroEngine field not found")
+      val macroEngine = unpickle("macroEngine", classOf[String])
+      if (self.macroEngine != macroEngine) MacroCantExpandIncompatibleMacrosError(s"expected = ${self.macroEngine}, actual = $macroEngine")
 
-      val className = payload("className").asInstanceOf[String]
-      val methodName = payload("methodName").asInstanceOf[String]
-      val signature = payload("signature").asInstanceOf[List[Int]]
-      MacroImplBinding(className, methodName, signature, targs)
+      val isBundle = unpickle("isBundle", classOf[Boolean])
+      val isBlackbox = unpickle("isBlackbox", classOf[Boolean])
+      val className = unpickle("className", classOf[String])
+      val methodName = unpickle("methodName", classOf[String])
+      val signature = unpickle("signature", classOf[List[List[Fingerprint]]])
+      MacroImplBinding(isBundle, isBlackbox, className, methodName, signature, targs)
     }
   }
 
-  private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+  def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
     val pickle = MacroImplBinding.pickle(macroImplRef)
     macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
   }
 
-  private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
-    val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
-    MacroImplBinding.unpickle(pickle)
-  }
-
-  /** Transforms parameters lists of a macro impl.
-   *  The `transform` function is invoked only for WeakTypeTag evidence parameters.
-   *
-   *  The transformer takes two arguments: a value parameter from the parameter list
-   *  and a type parameter that is witnesses by the value parameter.
-   *
-   *  If the transformer returns a NoSymbol, the value parameter is not included from the result.
-   *  If the transformer returns something else, this something else is included in the result instead of the value parameter.
-   *
-   *  Despite of being highly esoteric, this function significantly simplifies signature analysis.
-   *  For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
-   *  or to streamline creation of the list of macro arguments.
-   */
-  private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
-    if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
-    if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
-    def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
-      case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
-      if c == paramss.head.head && universe == MacroContextUniverse =>
-        transform(param, targ.typeSymbol)
-      case _ =>
-        param
+  def loadMacroImplBinding(macroDef: Symbol): Option[MacroImplBinding] =
+    macroDef.getAnnotation(MacroImplAnnotation) collect {
+      case AnnotationInfo(_, List(pickle), _) => MacroImplBinding.unpickle(pickle)
     }
-    val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
-    if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
-  }
 
-  def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
-    // Step I. Transform c.Expr[T] to T
-    var runtimeType = macroImpl.tpe.finalResultType.dealias match {
-      case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
-      case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
-    }
+  def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectApplied(expandee).core.symbol)
+  def isBlackbox(macroDef: Symbol): Boolean = {
+    val fastTrackBoxity = fastTrack.get(macroDef).map(_.isBlackbox)
+    val bindingBoxity = loadMacroImplBinding(macroDef).map(_.isBlackbox)
+    fastTrackBoxity orElse bindingBoxity getOrElse false
+  }
 
-    // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
-    runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
-
-    // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
-    def unsigma(tpe: Type): Type =
-      transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
-        case (implCtxParam :: Nil) :: implParamss =>
-          val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
-          object UnsigmaTypeMap extends TypeMap {
-            def apply(tp: Type): Type = tp match {
-              case TypeRef(pre, sym, args) =>
-                val pre1 = pre match {
-                  case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
-                    ThisType(macroDdef.symbol.owner)
-                  case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
-                    implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+  def computeMacroDefTypeFromMacroImplRef(macroDdef: DefDef, macroImplRef: Tree): Type = {
+    macroImplRef match {
+      case MacroImplReference(_, _, _, macroImpl, targs) =>
+        // Step I. Transform c.Expr[T] to T and everything else to Any
+        var runtimeType = decreaseMetalevel(macroImpl.info.finalResultType)
+
+        // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+        runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, targs map (_.tpe))
+
+        // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+        def unsigma(tpe: Type): Type =
+          transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) match {
+            case (implCtxParam :: Nil) :: implParamss =>
+              val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+              object UnsigmaTypeMap extends TypeMap {
+                def apply(tp: Type): Type = tp match {
+                  case TypeRef(pre, sym, args) =>
+                    val pre1 = pre match {
+                      case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+                        ThisType(macroDdef.symbol.owner)
+                      case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+                        implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+                      case _ =>
+                        pre
+                    }
+                    val args1 = args map mapOver
+                    TypeRef(pre1, sym, args1)
                   case _ =>
-                    pre
+                    mapOver(tp)
                 }
-                val args1 = args map mapOver
-                TypeRef(pre1, sym, args1)
-              case _ =>
-                mapOver(tp)
-            }
-          }
-
-          UnsigmaTypeMap(tpe)
-        case _ =>
-          tpe
-      }
-
-    unsigma(runtimeType)
-  }
-
-  /** A reference macro implementation signature compatible with a given macro definition.
-   *
-   *  In the example above for the following macro def:
-   *    def foo[T](xs: List[T]): T = macro fooBar
-   *
-   *  This function will return:
-   *    (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
-   *
-   *  Note that type tag evidence parameters are not included into the result.
-   *  Type tag context bounds for macro impl tparams are optional.
-   *  Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
-   *
-   *  @param macroDef The macro definition symbol
-   *  @param tparams  The type parameters of the macro definition
-   *  @param vparamss The value parameters of the macro definition
-   *  @param retTpe   The return type of the macro definition
-   */
-  private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
-    // had to move method's body to an object because of the recursive dependencies between sigma and param
-    object SigGenerator {
-      def sigma(tpe: Type): Type = {
-        class SigmaTypeMap extends TypeMap {
-          def apply(tp: Type): Type = tp match {
-            case TypeRef(pre, sym, args) =>
-              val pre1 = pre match {
-                case ThisType(sym) if sym == macroDef.owner =>
-                  SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
-                case SingleType(NoPrefix, sym) =>
-                  mfind(vparamss)(_.symbol == sym) match {
-                    case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
-                    case _ => pre
-                  }
-                case _ =>
-                  pre
               }
-              TypeRef(pre1, sym, args map mapOver)
+
+              UnsigmaTypeMap(tpe)
             case _ =>
-              mapOver(tp)
+              tpe
           }
-        }
 
-        new SigmaTypeMap() apply tpe
-      }
-
-      def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) =
-        macroDef.newValueParameter(name, pos, flags) setInfo tpe
-      val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
-      def implType(isType: Boolean, origTpe: Type): Type =
-        if (isRepeatedParamType(origTpe))
-          appliedType(
-            RepeatedParamClass.typeConstructor,
-            List(implType(isType, sigma(origTpe.typeArgs.head))))
-        else {
-          val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
-          typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
-        }
-      val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
-      def param(tree: Tree): Symbol =
-        paramCache.getOrElseUpdate(tree.symbol, {
-          val sym = tree.symbol
-          makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
-        })
-
-      val paramss = List(ctxParam) :: mmap(vparamss)(param)
-      val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
+        unsigma(runtimeType)
+      case _ =>
+        ErrorType
     }
-
-    import SigGenerator._
-    macroLogVerbose(sm"""
-      |generating macroImplSigs for: $macroDef
-      |tparams are: $tparams
-      |vparamss are: $vparamss
-      |retTpe is: $retTpe
-      |macroImplSig is: $paramss, $implRetTpe
-    """.trim)
-    (paramss, implRetTpe)
   }
 
-  /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+  /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method or a top-level macro bundle,
    *  and that that method is signature-wise compatible with the given macro definition.
    *
-   *  @return Typechecked rhs of the given macro definition if everything is okay.
+   *  @return Macro impl reference for the given macro definition if everything is okay.
    *          EmptyTree if an error occurs.
    */
-  def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
-    try new MacroTyper(typer, macroDdef).typed
-    catch { case MacroBodyTypecheckException => EmptyTree }
-
-  class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
-    // Phase I: sanity checks
-    val macroDef = macroDdef.symbol
-    macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
-    assert(macroDef.isTermMacro, macroDdef)
-    if (fastTrack contains macroDef) MacroDefIsFastTrack()
-    if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
-
-    // we use typed1 instead of typed, because otherwise adapt is going to mess us up
-    // if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
-    // unfortunately, this means that we have to manually trigger macro expansion
-    // because it's adapt which is responsible for automatic expansion during typechecking
-    def typecheckRhs(rhs: Tree): Tree = {
-      try {
-        // interestingly enough, just checking isErroneous doesn't cut it
-        // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
-        // doesn't manifest itself as an error in the resulting tree
-        val prevNumErrors = reporter.ERROR.count
-        var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
-        def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
-        while (rhsNeedsMacroExpansion) {
-          rhs1 = macroExpand1(typer, rhs1) match {
-            case Success(expanded) =>
-              try {
-                val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
-                macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
-                typechecked
-              } finally {
-                popMacroContext()
-              }
-            case Delay(delayed) =>
-              typer.instantiate(delayed, EXPRmode, WildcardType)
-            case Fallback(fallback) =>
-              typer.typed1(fallback, EXPRmode, WildcardType)
-            case Other(result) =>
-              result
-          }
-        }
-        val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
-        if (typecheckedWithErrors) MacroDefUntypeableBodyError()
-        rhs1
-      } catch {
-        case ex: TypeError =>
-          typer.reportTypeError(context, rhs.pos, ex)
-          MacroDefUntypeableBodyError()
-      }
-    }
-
-    // Phase II: typecheck the right-hand side of the macro def
-    val typed = typecheckRhs(macroDdef.rhs)
-    typed match {
-      case MacroImplReference(_, meth, _) if meth == Predef_??? =>
-        bindMacroImpl(macroDef, typed)
-        MacroDefIsQmarkQmarkQmark()
-      case MacroImplReference(owner, meth, targs) =>
-        if (!meth.isMethod) MacroDefInvalidBodyError()
-        if (!meth.isPublic) MacroImplNotPublicError()
-        if (meth.isOverloaded) MacroImplOverloadedError()
-        if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
-        if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
-        bindMacroImpl(macroDef, typed)
-      case _ =>
-        MacroDefInvalidBodyError()
-    }
-
-    // Phase III: check compatibility between the macro def and its macro impl
-    // this check ignores type tag evidence parameters, because type tag context bounds are optional
-    // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
-    // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
-    val macroImpl = typed.symbol
-    val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
-    val aret = macroImpl.tpe.finalResultType
-    val macroDefRet =
-      if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
-      else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
-    val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
-
-    val implicitParams = aparamss.flatten filter (_.isImplicit)
-    if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
-    if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
-
-    val atparams = macroImpl.typeParams
-    val atvars = atparams map freshVar
-    def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
-
-    try {
-      map2(aparamss, rparamss)((aparams, rparams) => {
-        if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
-        if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
-      })
-
-      // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
-      // then `atpeToRtpe` is going to fail with an unsound substitution
-      map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
-        if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
-        if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
-        val aparamtpe = aparam.tpe.dealias match {
-          case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
-          case tpe => tpe
-        }
-        checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
-      })
-
-      checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
-
-      val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
-      val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
-      val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
-      boundsOk match {
-        case SilentResultValue(true) => // do nothing, success
-        case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
-      }
-    } catch {
-      case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
-    }
-  }
+  def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = pluginsTypedMacroBody(typer, macroDdef)
 
-  /** Macro classloader that is used to resolve and run macro implementations.
-   *  Loads classes from from -cp (aka the library classpath).
-   *  Is also capable of detecting REPL and reusing its classloader.
+  /** Default implementation of `typedMacroBody`.
+   *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsTypedMacroBody for more details)
    */
-  lazy val macroClassloader: ClassLoader = {
-    if (global.forMSIL)
-      throw new UnsupportedOperationException("Scala reflection not available on this platform")
-
-    val classpath = global.classPath.asURLs
-    macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
-    val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
-    // a heuristic to detect the REPL
-    if (global.settings.exposeEmptyPackage.value) {
-      macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
-      import scala.tools.nsc.interpreter._
-      val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
-      new AbstractFileClassLoader(virtualDirectory, loader) {}
-    } else {
-      loader
-    }
-  }
+  def standardTypedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
+    val macroDef = macroDdef.symbol
+    assert(macroDef.isMacro, macroDdef)
 
-  /** Produces a function that can be used to invoke macro implementation for a given macro definition:
-   *    1) Looks up macro implementation symbol in this universe.
-   *    2) Loads its enclosing class from the macro classloader.
-   *    3) Loads the companion of that enclosing class from the macro classloader.
-   *    4) Resolves macro implementation within the loaded companion.
-   *
-   *  @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
-   *          `null` otherwise.
-   */
-  type MacroRuntime = MacroArgs => Any
-  private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
-  private def macroRuntime(macroDef: Symbol): MacroRuntime = {
-    macroLogVerbose(s"looking for macro implementation: $macroDef")
+    macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
     if (fastTrack contains macroDef) {
-      macroLogVerbose("macro expansion is serviced by a fast track")
-      fastTrack(macroDef)
+      macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+      assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+      EmptyTree
     } else {
-      macroRuntimesCache.getOrElseUpdate(macroDef, {
-        val binding = loadMacroImplBinding(macroDef)
-        val className = binding.className
-        val methName = binding.methName
-        macroLogVerbose(s"resolved implementation as $className.$methName")
-
-        if (binding.className == Predef_???.owner.fullName.toString && binding.methName == Predef_???.name.encoded) {
-          args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
-        } else {
-          // I don't use Scala reflection here, because it seems to interfere with JIT magic
-          // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
-          // I'm not sure what's the reason - for me it's pure voodoo
-          // upd. my latest experiments show that everything's okay
-          // it seems that in 2.10.1 we can easily switch to Scala reflection
-          try {
-            macroLogVerbose(s"loading implementation class: $className")
-            macroLogVerbose(s"classloader is: ${ReflectionUtils.show(macroClassloader)}")
-            val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
-            // relies on the fact that macro impls cannot be overloaded
-            // so every methName can resolve to at maximum one method
-            val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
-            val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
-            macroLogVerbose(s"successfully loaded macro impl as ($implObj, $implMeth)")
-            args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
-          } catch {
-            case ex: Exception =>
-              macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
-              macroDef setFlag IS_ERROR
-              null
-          }
-        }
-      })
+      def fail() = { if (macroDef != null) macroDef setFlag IS_ERROR; macroDdef setType ErrorType; EmptyTree }
+      def success(macroImplRef: Tree) = { bindMacroImpl(macroDef, macroImplRef); macroImplRef }
+
+      if (!typer.checkFeature(macroDdef.pos, currentRun.runDefinitions.MacrosFeature, immediate = true)) {
+        macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+        fail()
+      } else {
+        val macroDdef1: macroDdef.type = macroDdef
+        val typer1: typer.type = typer
+        val macroCompiler = new {
+          val global: self.global.type = self.global
+          val typer: self.global.analyzer.Typer = typer1.asInstanceOf[self.global.analyzer.Typer]
+          val macroDdef: self.global.DefDef = macroDdef1
+        } with DefaultMacroCompiler
+        val macroImplRef = macroCompiler.resolveMacroImpl
+        if (macroImplRef.isEmpty) fail() else success(macroImplRef)
+      }
     }
   }
 
-  private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext =
+  def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
     new {
       val universe: self.global.type = self.global
       val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
-      val expandee = expandeeTree
+      val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse duplicateAndKeepPositions(expandeeTree)
     } with UnaffiliatedMacroContext {
       val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
       override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
     }
+  }
 
   /** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
    */
   case class MacroArgs(c: MacroContext, others: List[Any])
-  private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
-    val macroDef   = expandee.symbol
-    val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
-    val context    = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
-    var typeArgs   = List[Tree]()
-    val exprArgs   = ListBuffer[List[Expr[_]]]()
-    def collectMacroArgs(tree: Tree): Unit = tree match {
-      case Apply(fn, args) =>
-        // todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
-        exprArgs.prepend(args map (arg => context.Expr[Nothing](arg)(TypeTag.Nothing)))
-        collectMacroArgs(fn)
-      case TypeApply(fn, args) =>
-        typeArgs = args
-        collectMacroArgs(fn)
-      case _ =>
-    }
-    collectMacroArgs(expandee)
+  def macroArgs(typer: Typer, expandee: Tree): MacroArgs = pluginsMacroArgs(typer, expandee)
 
-    val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
-    val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
-    if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+  /** Default implementation of `macroArgs`.
+   *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroArgs for more details)
+   */
+  def standardMacroArgs(typer: Typer, expandee: Tree): MacroArgs = {
+    val macroDef = expandee.symbol
+    val paramss = macroDef.paramss
+    val treeInfo.Applied(core, targs, argss) = expandee
+    val prefix = core match { case Select(qual, _) => qual; case _ => EmptyTree }
+    val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefix, expandee))
+
+    macroLogVerbose(sm"""
+      |context: $context
+      |prefix: $prefix
+      |targs: $targs
+      |argss: $argss
+      |paramss: $paramss
+    """.trim)
 
-    val argss: List[List[Any]] = exprArgs.toList
-    macroLogVerbose(s"context: $context")
-    macroLogVerbose(s"argss: $argss")
+    import typer.TyperErrorGen._
+    val isNullaryArgsEmptyParams = argss.isEmpty && paramss == ListOfNil
+    if (paramss.length < argss.length) MacroTooManyArgumentListsError(expandee)
+    if (paramss.length > argss.length && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee)
 
-    val preparedArgss: List[List[Any]] =
+    val macroImplArgs: List[Any] =
       if (fastTrack contains macroDef) {
-        if (fastTrack(macroDef) validate context) argss
-        else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
-      } else {
-        // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
-        // consider the following example:
-        //
-        //   class D[T] {
-        //     class C[U] {
-        //       def foo[V] = macro Impls.foo[T, U, V]
-        //     }
-        //   }
-        //
-        //   val outer1 = new D[Int]
-        //   val outer2 = new outer1.C[String]
-        //   outer2.foo[Boolean]
-        //
-        // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
-        // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
-        // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
-        val binding = loadMacroImplBinding(macroDef)
-        macroLogVerbose(s"binding: $binding")
-        val tags = binding.signature filter (_ != -1) map (paramPos => {
-          val targ = binding.targs(paramPos).tpe.typeSymbol
-          val tpe = if (targ.isTypeParameterOrSkolem) {
-            if (targ.owner == macroDef) {
-              // doesn't work when macro def is compiled separately from its usages
-              // then targ is not a skolem and isn't equal to any of macroDef.typeParams
-              // val argPos = targ.deSkolemize.paramPos
-              val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
-              typeArgs(argPos).tpe
+        // Take a dry run of the fast track implementation
+        if (fastTrack(macroDef) validate expandee) argss.flatten
+        else MacroTooFewArgumentListsError(expandee)
+      }
+      else {
+        def calculateMacroArgs(binding: MacroImplBinding) = {
+          val signature = if (binding.isBundle) binding.signature else binding.signature.tail
+          macroLogVerbose(s"binding: $binding")
+
+          // STEP I: prepare value arguments of the macro expansion
+          // wrap argss in c.Expr if necessary (i.e. if corresponding macro impl param is of type c.Expr[T])
+          // expand varargs (nb! varargs can apply to any parameter section, not necessarily to the last one)
+          val trees = map3(argss, paramss, signature)((args, defParams, implParams) => {
+            val isVarargs = isVarArgsList(defParams)
+            if (isVarargs) {
+              if (defParams.length > args.length + 1) MacroTooFewArgumentsError(expandee)
+            } else {
+              if (defParams.length < args.length) MacroTooManyArgumentsError(expandee)
+              if (defParams.length > args.length) MacroTooFewArgumentsError(expandee)
+            }
+
+            val wrappedArgs = mapWithIndex(args)((arg, j) => {
+              val fingerprint = implParams(min(j, implParams.length - 1))
+              fingerprint match {
+                case LiftedTyped => context.Expr[Nothing](arg.duplicate)(TypeTag.Nothing) // TODO: SI-5752
+                case LiftedUntyped => arg.duplicate
+                case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " +
+                                s"corresponding to arg $arg in $argss")
+              }
+            })
+
+            if (isVarargs) {
+              val (normal, varargs) = wrappedArgs splitAt (defParams.length - 1)
+              normal :+ varargs // pack all varargs into a single Seq argument (varargs Scala style)
+            } else wrappedArgs
+          })
+          macroLogVerbose(s"trees: $trees")
+
+          // STEP II: prepare type arguments of the macro expansion
+          // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
+          // consider the following example:
+          //
+          //   class D[T] {
+          //     class C[U] {
+          //       def foo[V] = macro Impls.foo[T, U, V]
+          //     }
+          //   }
+          //
+          //   val outer1 = new D[Int]
+          //   val outer2 = new outer1.C[String]
+          //   outer2.foo[Boolean]
+          //
+          // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+          // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
+          // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
+          val tags = signature.flatten collect { case f if f.isTag => f.paramPos } map (paramPos => {
+            val targ = binding.targs(paramPos).tpe.typeSymbol
+            val tpe = if (targ.isTypeParameterOrSkolem) {
+              if (targ.owner == macroDef) {
+                // doesn't work when macro def is compiled separately from its usages
+                // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+                // val argPos = targ.deSkolemize.paramPos
+                val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+                targs(argPos).tpe
+              } else
+                targ.tpe.asSeenFrom(
+                  if (prefix == EmptyTree) macroDef.owner.tpe else prefix.tpe,
+                  macroDef.owner)
             } else
-              targ.tpe.asSeenFrom(
-                if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
-                macroDef.owner)
-          } else
-            targ.tpe
-          context.WeakTypeTag(tpe)
-        })
-        macroLogVerbose(s"tags: $tags")
-
-        // transforms argss taking into account varargness of paramss
-        // note that typetag context bounds are only declared on macroImpls
-        // so this optional arglist might not match macroDef's paramlist
-        // nb! varargs can apply to any parameter section, not necessarily to the last one
-        mapWithIndex(argss :+ tags)((as, i) => {
-          val mapsToParamss = macroDef.paramss.indices contains i
-          if (mapsToParamss) {
-            val ps = macroDef.paramss(i)
-            if (isVarArgsList(ps)) {
-              val (normal, varargs) = as splitAt (ps.length - 1)
-              normal :+ varargs // pack all varargs into a single List argument
-            } else as
-          } else as
-        })
+              targ.tpe
+            context.WeakTypeTag(tpe)
+          })
+          macroLogVerbose(s"tags: $tags")
+
+          // if present, tags always come in a separate parameter/argument list
+          // that's because macro impls can't have implicit parameters other than c.WeakTypeTag[T]
+          (trees :+ tags).flatten
+        }
+
+        val binding = loadMacroImplBinding(macroDef).get
+        if (binding.is_???) Nil
+        else calculateMacroArgs(binding)
       }
-    macroLogVerbose(s"preparedArgss: $preparedArgss")
-    MacroArgs(context, preparedArgss.flatten)
+    macroLogVerbose(s"macroImplArgs: $macroImplArgs")
+    MacroArgs(context, macroImplArgs)
   }
 
   /** Keeps track of macros in-flight.
-   *  See more informations in comments to `openMacros` in `scala.reflect.macros.Context`.
+   *  See more informations in comments to `openMacros` in `scala.reflect.macros.whitebox.Context`.
    */
-  private var _openMacros = List[MacroContext]()
+  var _openMacros = List[MacroContext]()
   def openMacros = _openMacros
-  private def pushMacroContext(c: MacroContext) = _openMacros ::= c
-  private def popMacroContext() = _openMacros = _openMacros.tail
+  def pushMacroContext(c: MacroContext) = _openMacros ::= c
+  def popMacroContext() = _openMacros = _openMacros.tail
   def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
 
-  private sealed abstract class MacroExpansionResult
-  private case class Success(expanded: Tree) extends MacroExpansionResult
-  private case class Delay(delayed: Tree) extends MacroExpansionResult
-  private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
-  private case class Other(result: Tree) extends MacroExpansionResult
-  private def Skip(expanded: Tree) = Other(expanded)
-  private def Cancel(expandee: Tree) = Other(expandee)
-  private def Failure(expandee: Tree) = Other(expandee)
-
   /** Performs macro expansion:
-   *    1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
-   *    2) Loads macro implementation using `macroMirror`
-   *    3) Synthesizes invocation arguments for the macro implementation
-   *    4) Checks that the result is a tree bound to this universe
-   *    5) Typechecks the result against the return type of the macro definition
+   *
+   *  ========= Expandable trees =========
+   *
+   *  A term of one of the following shapes:
+   *
+   *    Ident(<term macro>)
+   *    Select(<any qualifier>, <term macro>)
+   *    TypeApply(<any of the above>, <targs>)
+   *    Apply(...Apply(<any of the above>, <args1>)...<argsN>)
+   *
+   *  ========= Macro expansion =========
+   *
+   *  First of all `macroExpandXXX`:
+   *    1) If necessary desugars the `expandee` to fit into the default expansion scheme
+   *       that is understood by `macroExpandWithRuntime` / `macroExpandWithoutRuntime`
+   *
+   *  Then `macroExpandWithRuntime`:
+   *    2) Checks whether the expansion needs to be delayed
+   *    3) Loads macro implementation using `macroMirror`
+   *    4) Synthesizes invocation arguments for the macro implementation
+   *    5) Checks that the result is a tree or an expr bound to this universe
+   *
+   *  Finally `macroExpandXXX`:
+   *    6) Validates the expansion against the white list of supported tree shapes
+   *    7) Typechecks the result as required by the circumstances of the macro application
    *
    *  If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
    *  along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
@@ -687,123 +524,199 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
    *
    *  @return
    *    the expansion result                    if the expansion has been successful,
-   *    the fallback method invocation          if the expansion has been unsuccessful, but there is a fallback,
+   *    the fallback tree                       if the expansion has been unsuccessful, but there is a fallback,
    *    the expandee unchanged                  if the expansion has been delayed,
    *    the expandee fully expanded             if the expansion has been delayed before and has been expanded now,
    *    the expandee with an error marker set   if the expansion has been cancelled due malformed arguments or implementation
    *    the expandee with an error marker set   if there has been an error
    */
-  def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
-    if (settings.Ymacronoexpand.value) return expandee // SI-6812
-    val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
-    if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
-    try {
-      macroExpand1(typer, expandee) match {
-        case Success(expanded) =>
-          try {
-            def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
-              if (tree.isErroneous) return tree
-              macroLogVerbose(s"typechecking against $phase $pt: $expanded")
-              val numErrors    = reporter.ERROR.count
-              def hasNewErrors = reporter.ERROR.count > numErrors
-              val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
-              macroLogVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result""")
-              result
-            }
+  abstract class MacroExpander(val typer: Typer, val expandee: Tree) {
+    def onSuccess(expanded: Tree): Tree
+    def onFallback(expanded: Tree): Tree
+    def onSuppressed(expandee: Tree): Tree = expandee
+    def onDelayed(expanded: Tree): Tree = expanded
+    def onSkipped(expanded: Tree): Tree = expanded
+    def onFailure(expanded: Tree): Tree = { typer.infer.setError(expandee); expandee }
+
+    def apply(desugared: Tree): Tree = {
+      if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee)
+      else expand(desugared)
+    }
 
-            var expectedTpe = expandee.tpe
-            if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
-            // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
-            val expanded0 = duplicateAndKeepPositions(expanded)
-            val expanded1 = typecheck("macro def return type", expanded0, expectedTpe)
-            val expanded2 = typecheck("expected type", expanded1, pt)
-            expanded2
-          } finally {
-            popMacroContext()
+    protected def expand(desugared: Tree): Tree = {
+      def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true)
+      def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
+      if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
+      linkExpandeeAndDesugared(expandee, desugared)
+
+      val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+      if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
+      try {
+        withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions
+          if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
+            val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
+            macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
+            onFailure(typer.infer.setError(expandee))
+          } else try {
+            val expanded = {
+              val runtime = macroRuntime(expandee)
+              if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
+              else macroExpandWithoutRuntime(typer, expandee)
+            }
+            expanded match {
+              case Success(expanded) =>
+                // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+                val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+                if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+                if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe)
+                else expanded1
+              case Fallback(fallback) => onFallback(fallback)
+              case Delayed(delayed) => onDelayed(delayed)
+              case Skipped(skipped) => onSkipped(skipped)
+              case Failure(failure) => onFailure(failure)
+            }
+          } catch {
+            case typer.TyperErrorGen.MacroExpansionException => onFailure(expandee)
           }
-        case Delay(delayed) =>
-          // =========== THE SITUATION ===========
-          //
-          // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
-          // then there are two possible situations we're in:
-          //
-          // 1) We're in POLYmode, when the typer tests the waters wrt type inference
-          // (e.g. as in typedArgToPoly in doTypedApply).
-          //
-          // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
-          // (e.g. if we're an argument to a function call, then this means that no previous argument lists
-          // can determine our type variables for us).
-          //
-          // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
-          // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
-          //
-          // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
-          // the undetermined type params. Therefore we need to do something ourselves or otherwise this
-          // expandee will forever remaing not expanded (see SI-5692).
-          //
-          // A traditional way out of this conundrum is to call `instantiate` and let the inferencer
-          // try to find the way out. It works for simple cases, but sometimes, if the inferencer lacks
-          // information, it will be forced to approximate.
-          //
-          // =========== THE PROBLEM ===========
-          //
-          // Consider the following example (thanks, Miles!):
-          //
-          //   // Iso represents an isomorphism between two datatypes:
-          //   // 1) An arbitrary one (e.g. a random case class)
-          //   // 2) A uniform representation for all datatypes (e.g. an HList)
-          //   trait Iso[T, U] {
-          //     def to(t : T) : U
-          //     def from(u : U) : T
-          //   }
-          //   implicit def materializeIso[T, U]: Iso[T, U] = macro ???
-          //
-          //   case class Foo(i: Int, s: String, b: Boolean)
-          //   def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
-          //   foo(Foo(23, "foo", true))
-          //
-          // In the snippet above, even though we know that there's a fundep going from T to U
-          // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
-          // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
-          // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
-          val shouldInstantiate = typer.context.undetparams.nonEmpty && !inPolyMode(mode)
-          if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
-          else delayed
-        case Fallback(fallback) =>
-          typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
-        case Other(result) =>
-          result
+        }
+      } finally {
+        if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
       }
-    } finally {
-      if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
     }
   }
 
-  /** Does the same as `macroExpand`, but without typechecking the expansion
-   *  Meant for internal use within the macro infrastructure, don't use it elsewhere.
+  /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+   *  @param outerPt Expected type that comes from enclosing context (something that's traditionally called `pt`).
+   *  @param innerPt Expected type that comes from the signature of a macro def, possibly wildcarded to help type inference.
    */
-  private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
-    // verbose printing might cause recursive macro expansions, so I'm shutting it down here
-    withInfoLevel(nodePrinters.InfoLevel.Quiet) {
-      if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
-        val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
-        macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
-        return Cancel(typer.infer.setError(expandee))
+  class DefMacroExpander(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type)
+  extends MacroExpander(typer, expandee) {
+    lazy val innerPt = {
+      val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe
+      if (isBlackbox(expandee)) tp
+      else {
+        // approximation is necessary for whitebox macros to guide type inference
+        // read more in the comments for onDelayed below
+        val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
+        deriveTypeWithWildcards(undetparams)(tp)
       }
+    }
+    override def onSuccess(expanded0: Tree) = {
+      // prematurely annotate the tree with a macro expansion attachment
+      // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
+      linkExpandeeAndExpanded(expandee, expanded0)
 
-      try {
-        val runtime = macroRuntime(expandee.symbol)
-        if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
-        else macroExpandWithoutRuntime(typer, expandee)
-      } catch {
-        case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
+      def typecheck(label: String, tree: Tree, pt: Type): Tree = {
+        if (tree.isErrorTyped) tree
+        else {
+          if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree")
+          // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
+          // therefore we need to re-enable the conversions back temporarily
+          val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt))
+          if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}")
+          result
+        }
+      }
+
+      if (isBlackbox(expandee)) {
+        val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt)))
+        typecheck("blackbox typecheck", expanded1, outerPt)
+      } else {
+        // whitebox expansions need to be typechecked against WildcardType first in order to avoid SI-6992 and SI-8048
+        // then we typecheck against innerPt, not against outerPt in order to prevent SI-8209
+        val expanded1 = typecheck("whitebox typecheck #0", expanded0, WildcardType)
+        val expanded2 = typecheck("whitebox typecheck #1", expanded1, innerPt)
+        typecheck("whitebox typecheck #2", expanded2, outerPt)
       }
     }
+    override def onDelayed(delayed: Tree) = {
+      // =========== THE SITUATION ===========
+      //
+      // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+      // then there are two possible situations we're in:
+      // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+      // (e.g. as in typedArgToPoly in doTypedApply).
+      // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+      // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+      // can determine our type variables for us).
+      //
+      // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+      // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+      //
+      // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+      // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+      // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+      // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
+      // but sometimes, if the inferencer lacks information, it will be forced to approximate.
+      //
+      // =========== THE PROBLEM ===========
+      //
+      // Consider the following example (thanks, Miles!):
+      //
+      // Iso represents an isomorphism between two datatypes:
+      // 1) An arbitrary one (e.g. a random case class)
+      // 2) A uniform representation for all datatypes (e.g. an HList)
+      //
+      //   trait Iso[T, U] {
+      //   def to(t : T) : U
+      //   def from(u : U) : T
+      //   }
+      //   implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+      //
+      //   case class Foo(i: Int, s: String, b: Boolean)
+      //   def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+      //   foo(Foo(23, "foo", true))
+      //
+      // In the snippet above, even though we know that there's a fundep going from T to U
+      // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+      // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+      // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+      //
+      // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) ===========
+      //
+      // To give materializers a chance to say their word before vanilla inference kicks in,
+      // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
+      // and then trigger macro expansion with the undetermined type parameters still there.
+      // Thanks to that the materializer can take a look at what's going on and react accordingly.
+      val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
+      if (shouldInstantiate) {
+        if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt)
+        else {
+          forced += delayed
+          typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false)
+          macroExpand(typer, delayed, mode, outerPt)
+        }
+      } else delayed
+    }
+    override def onFallback(fallback: Tree) = typer.typed(fallback, mode, outerPt)
+  }
+
+  /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+   *  @see DefMacroExpander
+   */
+  def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt)
+
+  /** Default implementation of `macroExpand`.
+   *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details)
+   */
+  def standardMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = {
+    val expander = new DefMacroExpander(typer, expandee, mode, pt)
+    expander(expandee)
+  }
+
+  sealed abstract class MacroStatus(val result: Tree)
+  case class Success(expanded: Tree) extends MacroStatus(expanded)
+  case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+  case class Delayed(delayed: Tree) extends MacroStatus(delayed)
+  case class Skipped(skipped: Tree) extends MacroStatus(skipped)
+  case class Failure(failure: Tree) extends MacroStatus(failure)
+  def Delay(expanded: Tree) = Delayed(expanded)
+  def Skip(expanded: Tree) = Skipped(expanded)
 
   /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
    *  Meant for internal use within the macro infrastructure, don't use it elsewhere.
    */
-  private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+  def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
     val wasDelayed  = isDelayed(expandee)
     val undetparams = calculateUndetparams(expandee)
     val nowDelayed  = !typer.context.macrosEnabled || undetparams.nonEmpty
@@ -829,15 +742,41 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
           def hasNewErrors = reporter.ERROR.count > numErrors
           val expanded = { pushMacroContext(args.c); runtime(args) }
           if (hasNewErrors) MacroGeneratedTypeError(expandee)
+          def validateResultingTree(expanded: Tree) = {
+            macroLogVerbose("original:")
+            macroLogLite("" + expanded + "\n" + showRaw(expanded))
+            val freeSyms = expanded.freeTerms ++ expanded.freeTypes
+            freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+            // Macros might have spliced arguments with range positions into non-compliant
+            // locations, notably, under a tree without a range position. Or, they might
+            // splice a tree that `resetAttrs` has assigned NoPosition.
+            //
+            // Here, we just convert all positions in the tree to offset positions, and
+            // convert NoPositions to something sensible.
+            //
+            // Given that the IDE now sees the expandee (by using -Ymacro-expand:discard),
+            // this loss of position fidelity shouldn't cause any real problems.
+            //
+            // Alternatively, we could pursue a way to exclude macro expansions from position
+            // invariant checking, or find a way not to touch expansions that happen to validate.
+            //
+            // This would be useful for cases like:
+            //
+            //    macro1 { macro2 { "foo" } }
+            //
+            // to allow `macro1` to see the range position of the "foo".
+            val expandedPos = enclosingMacroPosition.focus
+            def fixPosition(pos: Position) =
+              if (pos == NoPosition) expandedPos else pos.focus
+            expanded.foreach(t => t.pos = fixPosition(t.pos))
+
+            val result = atPos(enclosingMacroPosition.focus)(expanded)
+            Success(result)
+          }
           expanded match {
-            case expanded: Expr[_] =>
-              macroLogVerbose("original:")
-              macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
-              val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
-              freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
-              Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
-            case _ =>
-              MacroExpansionIsNotExprError(expandee, expanded)
+            case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
+            case expanded: Tree if expandee.symbol.isTermMacro => validateResultingTree(expanded)
+            case _ => MacroExpansionHasInvalidTypeError(expandee, expanded)
           }
         } catch {
           case ex: Throwable =>
@@ -858,7 +797,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
   /** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
    *  Meant for internal use within the macro infrastructure, don't use it elsewhere.
    */
-  private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+  def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
     import typer.TyperErrorGen._
     val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
     macroLogLite(s"falling back to: $fallbackSym")
@@ -886,10 +825,12 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
    *    2) undetparams (sym.isTypeParameter && !sym.isSkolem)
    */
   var hasPendingMacroExpansions = false
-  private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
+  private val forced = perRunCaches.newWeakSet[Tree]
+  private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
   private def isDelayed(expandee: Tree) = delayed contains expandee
   private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
-    delayed.get(expandee).getOrElse {
+    if (forced(expandee)) scala.collection.mutable.Set[Int]()
+    else delayed.getOrElse(expandee, {
       val calculated = scala.collection.mutable.Set[Symbol]()
       expandee foreach (sub => {
         def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
@@ -898,8 +839,8 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
       })
       macroLogVerbose("calculateUndetparams: %s".format(calculated))
       calculated map (_.id)
-    }
-  private val undetparams = perRunCaches.newSet[Int]
+    })
+  private val undetparams = perRunCaches.newSet[Int]()
   def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
     undetparams ++= newUndets map (_.id)
     if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
@@ -928,7 +869,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
     new Transformer {
       override def transform(tree: Tree) = super.transform(tree match {
         // todo. expansion should work from the inside out
-        case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+        case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty && !tree.isErroneous =>
           val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
           delayed -= tree
           context.implicitsEnabled = typer.context.implicitsEnabled
@@ -946,3 +887,22 @@ object MacrosStats {
   val macroExpandCount    = Statistics.newCounter ("#macro expansions", "typer")
   val macroExpandNanos    = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
 }
+
+class Fingerprint private[Fingerprint](val value: Int) extends AnyVal {
+  def paramPos = { assert(isTag, this); value }
+  def isTag = value >= 0
+  override def toString = this match {
+    case Other => "Other"
+    case LiftedTyped => "Expr"
+    case LiftedUntyped => "Tree"
+    case _ => s"Tag($value)"
+  }
+}
+
+object Fingerprint {
+  def apply(value: Int) = new Fingerprint(value)
+  def Tagged(tparamPos: Int) = new Fingerprint(tparamPos)
+  val Other = new Fingerprint(-1)
+  val LiftedTyped = new Fingerprint(-2)
+  val LiftedUntyped = new Fingerprint(-3)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 99557d1..ba183fe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
 package typechecker
 
 import symtab.Flags._
-import scala.collection.{ mutable, immutable }
 import scala.reflect.internal.util.StringOps.{ ojoin }
 import scala.reflect.ClassTag
 import scala.reflect.runtime.{ universe => ru }
@@ -22,74 +21,23 @@ trait MethodSynthesis {
   import definitions._
   import CODE._
 
-  object synthesisUtil {
-    type TT[T]  = ru.TypeTag[T]
-    type CT[T] = ClassTag[T]
-
-    def ValOrDefDef(sym: Symbol, body: Tree) =
-      if (sym.isLazy) ValDef(sym, body)
-      else DefDef(sym, body)
-
-    def applyTypeInternal(tags: List[TT[_]]): Type = {
-      val symbols = tags map compilerSymbolFromTag
-      val container :: args = symbols
-      val tparams = container.typeConstructor.typeParams
-
-      // Conservative at present - if manifests were more usable this could do a lot more.
-      // [Eugene to Paul] all right, they are now. what do you have in mind?
-      require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
-      require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
-      require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
-
-      appliedType(container, args map (_.tpe): _*)
-    }
-
-    def companionType[T](implicit ct: CT[T]) =
-      rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe
-
-    // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
-    def applyType[CC](implicit t1: TT[CC]): Type =
-      applyTypeInternal(List(t1))
-
-    def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
-      applyTypeInternal(List(t1, t2))
-
-    def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
-      applyTypeInternal(List(t1, t2, t3))
-
-    def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
-      applyTypeInternal(List(t1, t2, t3, t4))
-
-    def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
-      val fnSymbol = compilerSymbolFromTag(t)
-      val formals = compilerTypeFromTag(t).typeArguments
-      assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t))
-      val params  = owner newSyntheticValueParams formals
-      MethodType(params, formals.last)
+  /** The annotations amongst those found on the original symbol which
+   *  should be propagated to this kind of accessor.
+   */
+  def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+    def annotationFilter(ann: AnnotationInfo) = ann.metaAnnotations match {
+      case Nil if ann.defaultTargets.isEmpty => keepClean                             // no meta-annotations or default targets
+      case Nil                               => ann.defaultTargets contains category  // default targets exist for ann
+      case metas                             => metas exists (_ matches category)     // meta-annotations attached to ann
     }
-
-      /** The annotations amongst those found on the original symbol which
-       *  should be propagated to this kind of accessor.
-       */
-      def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
-        initial filter { ann =>
-          // There are no meta-annotation arguments attached to `ann`
-          if (ann.metaAnnotations.isEmpty) {
-            // A meta-annotation matching `annotKind` exists on `ann`'s definition.
-            (ann.defaultTargets contains category) ||
-            // `ann`'s definition has no meta-annotations, and `keepClean` is true.
-            (ann.defaultTargets.isEmpty && keepClean)
-          }
-          // There are meta-annotation arguments, and one of them matches `annotKind`
-          else ann.metaAnnotations exists (_ matches category)
-        }
-      }
-   }
-  import synthesisUtil._
+    initial filter annotationFilter
+  }
 
   class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
     def mkThis = This(clazz) setPos clazz.pos.focus
-    def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym))
+    def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(
+      if (clazz.isClass) Select(This(clazz), sym) else Ident(sym)
+    )
 
     private def isOverride(name: TermName) =
       clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
@@ -99,19 +47,24 @@ trait MethodSynthesis {
       overrideFlag | SYNTHETIC
     }
     def newMethodFlags(method: Symbol) = {
-      val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+      val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L
       (method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
     }
 
     private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
-      localTyper typed ValOrDefDef(method, f(method))
+      localTyper typed (
+        if (method.isLazy) ValDef(method, f(method))
+        else DefDef(method, f(method))
+      )
 
     private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
-      val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+      val name1 = name.toTermName
+      val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
       finishMethod(m setInfoAndEnter info, f)
     }
     private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
-      val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+      val name1 = name.toTermName
+      val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
       finishMethod(m setInfoAndEnter infoFn(m), f)
     }
     private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
@@ -119,22 +72,9 @@ trait MethodSynthesis {
       finishMethod(clazz.info.decls enter m, f)
     }
 
-    private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
-      cloneInternal(original, f, original.name)
-
     def clazzMember(name: Name)  = clazz.info nonPrivateMember name
     def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
 
-    /** Function argument takes the newly created method symbol of
-     *  the same type as `name` in clazz, and returns the tree to be
-     *  added to the template.
-     */
-    def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
-      overrideMethod(clazzMember(name))(f)
-
-    def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
-      cloneInternal(original, sym => f(sym setFlag OVERRIDE))
-
     def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
       cloneInternal(original, f, nameFn(original.name))
 
@@ -151,9 +91,9 @@ trait MethodSynthesis {
       createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
 
     def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
-      createMethod(name, List(IntClass.tpe), returnType) { m =>
+      createMethod(name, List(IntTpe), returnType) { m =>
         val arg0    = Ident(m.firstParam)
-        val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
+        val default = DEFAULT ==> Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg0, nme.toString_))
         val cases   = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
 
         Match(arg0, cases)
@@ -174,7 +114,7 @@ trait MethodSynthesis {
 
   /** There are two key methods in here.
    *
-   *   1) Enter methods such as enterGetterSetterare called
+   *   1) Enter methods such as enterGetterSetter are called
    *   from Namer with a tree which may generate further trees such as accessors or
    *   implicit wrappers. Some setup is performed.  In general this creates symbols
    *   and enters them into the scope of the owner.
@@ -219,14 +159,46 @@ trait MethodSynthesis {
       enterBeans(tree)
     }
 
+    /** This is called for those ValDefs which addDerivedTrees ignores, but
+     *  which might have a warnable annotation situation.
+     */
+    private def warnForDroppedAnnotations(tree: Tree) {
+      val annotations   = tree.symbol.initialize.annotations
+      val targetClass   = defaultAnnotationTarget(tree)
+      val retained      = deriveAnnotations(annotations, targetClass, keepClean = true)
+
+      annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass))
+    }
+    private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) {
+      global.reporter.warning(ann.pos,
+        s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " +
+        s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})")
+    }
+
     def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
       case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
         // If we don't save the annotations, they seem to wander off.
         val annotations = stat.symbol.initialize.annotations
-        ( allValDefDerived(vd)
+        val trees = (
+          allValDefDerived(vd)
                 map (acc => atPos(vd.pos.focus)(acc derive annotations))
           filterNot (_ eq EmptyTree)
         )
+        // Verify each annotation landed safely somewhere, else warn.
+        // Filtering when isParamAccessor is a necessary simplification
+        // because there's a bunch of unwritten annotation code involving
+        // the propagation of annotations - constructor parameter annotations
+        // may need to make their way to parameters of the constructor as
+        // well as fields of the class, etc.
+        if (!mods.isParamAccessor) annotations foreach (ann =>
+          if (!trees.exists(_.symbol hasAnnotation ann.symbol))
+            issueAnnotationWarning(vd, ann, GetterTargetClass)
+        )
+
+        trees
+      case vd: ValDef =>
+        warnForDroppedAnnotations(vd)
+        vd :: Nil
       case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
         val annotations = stat.symbol.initialize.annotations
         // TODO: need to shuffle annotations between wrapper and class.
@@ -253,8 +225,7 @@ trait MethodSynthesis {
     )
     def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
       val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
-      if (forMSIL) Nil
-      else if (vd.symbol hasAnnotation BeanPropertyAttr)
+      if (vd.symbol hasAnnotation BeanPropertyAttr)
         BeanGetter(vd) :: setter
       else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
         BooleanBeanGetter(vd) :: setter
@@ -276,7 +247,7 @@ trait MethodSynthesis {
      *  So it's important that creating an instance of Derived does not have a side effect,
      *  or if it has a side effect, control that it is done only once.
      */
-    trait Derived {
+    sealed trait Derived {
 
       /** The tree from which we are deriving a synthetic member. Typically, that's
        *  given as an argument of the instance. */
@@ -305,22 +276,21 @@ trait MethodSynthesis {
       def derivedTree: Tree
     }
 
-    trait DerivedFromMemberDef extends Derived {
+    sealed trait DerivedFromMemberDef extends Derived {
       def tree: MemberDef
       def enclClass: Symbol
 
       // Final methods to make the rest easier to reason about.
       final def mods               = tree.mods
       final def basisSym           = tree.symbol
-      final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
     }
 
-    trait DerivedFromClassDef extends DerivedFromMemberDef {
+    sealed trait DerivedFromClassDef extends DerivedFromMemberDef {
       def tree: ClassDef
       final def enclClass = basisSym.owner.enclClass
     }
 
-    trait DerivedFromValDef extends DerivedFromMemberDef {
+    sealed trait DerivedFromValDef extends DerivedFromMemberDef {
       def tree: ValDef
       final def enclClass = basisSym.enclClass
 
@@ -359,10 +329,10 @@ trait MethodSynthesis {
         logDerived(derivedTree)
       }
     }
-    trait DerivedGetter extends DerivedFromValDef {
+    sealed trait DerivedGetter extends DerivedFromValDef {
       // TODO
     }
-    trait DerivedSetter extends DerivedFromValDef {
+    sealed trait DerivedSetter extends DerivedFromValDef {
       override def isSetter = true
       private def setterParam = derivedSym.paramss match {
         case (p :: Nil) :: _  => p
@@ -396,11 +366,11 @@ trait MethodSynthesis {
       def name: TermName               = tree.name.toTermName
     }
 
-    abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
+    sealed abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
       def name       = tree.name
       def category   = GetterTargetClass
       def flagsMask  = GetterFlags
-      def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE )
+      def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE )
 
       override def validate() {
         assert(derivedSym != NoSymbol, tree)
@@ -411,12 +381,9 @@ trait MethodSynthesis {
       }
     }
     case class Getter(tree: ValDef) extends BaseGetter(tree) {
-      override def derivedSym = (
-        if (mods.isDeferred) basisSym
-        else basisSym.getter(enclClass)
-      )
-
-      override def derivedTree: DefDef = {
+      override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getter(enclClass)
+      private def derivedRhs  = if (mods.isDeferred) EmptyTree else fieldSelection
+      private def derivedTpt = {
         // For existentials, don't specify a type for the getter, even one derived
         // from the symbol! This leads to incompatible existentials for the field and
         // the getter. Let the typer do all the work. You might think "why only for
@@ -425,29 +392,16 @@ trait MethodSynthesis {
         // starts compiling (instead of failing like it's supposed to) because the typer
         // expects to be able to identify escaping locals in typedDefDef, and fails to
         // spot that brand of them. In other words it's an artifact of the implementation.
-        val tpt = derivedSym.tpe.finalResultType match {
-          case ExistentialType(_, _)  => TypeTree()
-          case _ if mods.isDeferred   => TypeTree()
+        val tpt = derivedSym.tpe_*.finalResultType.widen match {
+          // Range position errors ensue if we don't duplicate this in some
+          // circumstances (at least: concrete vals with existential types.)
+          case ExistentialType(_, _)  => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus)
+          case _ if mods.isDeferred   => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
           case tp                     => TypeTree(tp)
         }
-        tpt setPos derivedSym.pos.focus
-        // keep type tree of original abstract field
-        if (mods.isDeferred)
-          tpt setOriginal tree.tpt
-
-        // TODO - reconcile this with the DefDef creator in Trees (which
-        //   at this writing presented no way to pass a tree in for tpt.)
-        atPos(derivedSym.pos) {
-          DefDef(
-            Modifiers(derivedSym.flags),
-            derivedSym.name.toTermName,
-            Nil,
-            Nil,
-            tpt,
-            if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
-          ) setSymbol derivedSym
-        }
+        tpt setPos tree.tpt.pos.focus
       }
+      override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt)
     }
     /** Implements lazy value accessors:
      *    - for lazy values of type Unit and all lazy fields inside traits,
@@ -458,7 +412,7 @@ trait MethodSynthesis {
     case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
       class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
         extends ChangeOwnerTraverser(oldowner, newowner) {
-        
+
         override def traverse(tree: Tree) {
           tree match {
             case _: DefTree => change(tree.symbol.moduleClass)
@@ -473,13 +427,13 @@ trait MethodSynthesis {
       override def derivedSym = basisSym.lazyAccessor
       override def derivedTree: DefDef = {
         val ValDef(_, _, tpt0, rhs0) = tree
-        val rhs1 = transformed.getOrElse(rhs0, rhs0)
+        val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0)
         val body = (
           if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
           else gen.mkAssignAndReturn(basisSym, rhs1)
         )
-        derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
-        val ddefRes = atPos(tree.pos)(DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)))
+        derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
+        val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))
         // ValDef will have its position focused whereas DefDef will have original correct rangepos
         // ideally positions would be correct at the creation time but lazy vals are really a special case
         // here so for the sake of keeping api clean we fix positions manually in LazyValGetter
@@ -489,7 +443,7 @@ trait MethodSynthesis {
       }
     }
     case class Setter(tree: ValDef) extends DerivedSetter {
-      def name       = nme.getterToSetter(tree.name)
+      def name       = tree.setterName
       def category   = SetterTargetClass
       def flagsMask  = SetterFlags
       def flagsExtra = ACCESSOR
@@ -497,7 +451,7 @@ trait MethodSynthesis {
       override def derivedSym = basisSym.setter(enclClass)
     }
     case class Field(tree: ValDef) extends DerivedFromValDef {
-      def name       = nme.getterToLocal(tree.name)
+      def name       = tree.localName
       def category   = FieldTargetClass
       def flagsMask  = FieldFlags
       def flagsExtra = PrivateLocal
@@ -528,7 +482,7 @@ trait MethodSynthesis {
       def flagsExtra = 0
       override def derivedSym = enclClass.info decl name
     }
-    trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
+    sealed trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
       def category = BeanGetterTargetClass
       override def validate() {
         if (derivedSym == NoSymbol) {
@@ -558,7 +512,7 @@ trait MethodSynthesis {
 
     // No Symbols available.
     private def beanAccessorsFromNames(tree: ValDef) = {
-      val ValDef(mods, name, tpt, _) = tree
+      val ValDef(mods, _, _, _) = tree
       val hasBP     = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
       val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
 
@@ -575,9 +529,6 @@ trait MethodSynthesis {
     }
 
     protected def enterBeans(tree: ValDef) {
-      if (forMSIL)
-        return
-
       val ValDef(mods, name, _, _) = tree
       val beans = beanAccessorsFromNames(tree)
       if (beans.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
deleted file mode 100644
index d650762..0000000
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-/** Mode constants.
- */
-trait Modes {
-  /** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
-   */
-  final val NOmode        = 0x000
-  final val EXPRmode      = 0x001
-  final val PATTERNmode   = 0x002
-
-  /** TYPEmode needs a comment. <-- XXX.
-   */
-  final val TYPEmode      = 0x004
-
-  /** SCCmode is orthogonal to above. When set we are
-   *  in the this or super constructor call of a constructor.
-   */
-  final val SCCmode       = 0x008
-
-  /** FUNmode is orthogonal to above.
-   *  When set we are looking for a method or constructor.
-   */
-  final val FUNmode       = 0x010
-
-  /** POLYmode is orthogonal to above.
-   *  When set expression types can be polymorphic.
-   */
-  final val POLYmode      = 0x020
-
-  /** QUALmode is orthogonal to above. When set
-   *  expressions may be packages and Java statics modules.
-   */
-  final val QUALmode      = 0x040
-
-  /** TAPPmode is set for the function/type constructor
-   *  part of a type application. When set we do not decompose PolyTypes.
-   */
-  final val TAPPmode      = 0x080
-
-  /** SUPERCONSTRmode is set for the super
-   *  in a superclass constructor call super.<init>.
-   */
-  final val SUPERCONSTRmode = 0x100
-
-  /** SNDTRYmode indicates that an application is typed for the 2nd time.
-   *  In that case functions may no longer be coerced with implicit views.
-   */
-  final val SNDTRYmode    = 0x200
-
-  /** LHSmode is set for the left-hand side of an assignment.
-   */
-  final val LHSmode       = 0x400
-
-  /** STARmode is set when star patterns are allowed.
-   *  (This was formerly called REGPATmode.)
-   */
-  final val STARmode      = 0x1000
-
-  /** ALTmode is set when we are under a pattern alternative.
-   */
-  final val ALTmode       = 0x2000
-
-  /** HKmode is set when we are typing a higher-kinded type.
-   *  adapt should then check kind-arity based on the prototypical type's
-   *  kind arity.  Type arguments should not be inferred.
-   */
-  final val HKmode        = 0x4000 // @M: could also use POLYmode | TAPPmode
-
-  /** BYVALmode is set when we are typing an expression
-   *  that occurs in a by-value position. An expression e1 is in by-value
-   *  position within expression e2 iff it will be reduced to a value at that
-   *  position during the evaluation of e2.  Examples are by-value function
-   *  arguments or the conditional of an if-then-else clause.
-   *  This mode has been added to support continuations.
-   */
-  final val BYVALmode     = 0x8000
-
-  /** TYPEPATmode is set when we are typing a type in a pattern.
-   */
-  final val TYPEPATmode   = 0x10000
-
-  /** RETmode is set when we are typing a return expression.
-   */
-  final val RETmode       = 0x20000
-
-  final private val StickyModes   = EXPRmode | PATTERNmode | TYPEmode | ALTmode
-
-  final def onlyStickyModes(mode: Int) =
-    mode & StickyModes
-
-  final def forFunMode(mode: Int) =
-    mode & (StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
-
-  final def forTypeMode(mode: Int) =
-    if (inAnyMode(mode, PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
-    else TYPEmode
-
-  final def inAllModes(mode: Int, required: Int)  = (mode & required) == required
-  final def inAnyMode(mode: Int, required: Int)   = (mode & required) != 0
-  final def inNoModes(mode: Int, prohibited: Int) = (mode & prohibited) == 0
-  final def inHKMode(mode: Int)                   = (mode & HKmode) != 0
-  final def inFunMode(mode: Int)                  = (mode & FUNmode) != 0
-  final def inPolyMode(mode: Int)                 = (mode & POLYmode) != 0
-  final def inPatternMode(mode: Int)              = (mode & PATTERNmode) != 0
-  final def inExprModeOr(mode: Int, others: Int)  = (mode & (EXPRmode | others)) != 0
-  final def inExprModeButNot(mode: Int, prohibited: Int) =
-    (mode & (EXPRmode | prohibited)) == EXPRmode
-
-  /** Translates a mask of mode flags into something readable.
-   */
-  private val modeNameMap = Map[Int, String](
-    (1 << 0)  -> "EXPRmode",
-    (1 << 1)  -> "PATTERNmode",
-    (1 << 2)  -> "TYPEmode",
-    (1 << 3)  -> "SCCmode",
-    (1 << 4)  -> "FUNmode",
-    (1 << 5)  -> "POLYmode",
-    (1 << 6)  -> "QUALmode",
-    (1 << 7)  -> "TAPPmode",
-    (1 << 8)  -> "SUPERCONSTRmode",
-    (1 << 9)  -> "SNDTRYmode",
-    (1 << 10) -> "LHSmode",
-    (1 << 11) -> "<DOES NOT EXIST mode>",
-    (1 << 12) -> "STARmode",
-    (1 << 13) -> "ALTmode",
-    (1 << 14) -> "HKmode",
-    (1 << 15) -> "BYVALmode",
-    (1 << 16) -> "TYPEPATmode"
-  )
-  def modeString(mode: Int): String =
-    if (mode == 0) "NOmode"
-    else (modeNameMap filterKeys (bit => inAllModes(mode, bit))).values mkString " "
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index bb93807..23dc57d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -8,9 +8,8 @@ package typechecker
 
 import scala.collection.mutable
 import scala.annotation.tailrec
-import scala.ref.WeakReference
 import symtab.Flags._
-import scala.tools.nsc.io.AbstractFile
+import scala.language.postfixOps
 
 /** This trait declares methods to create symbols and to enter them into scopes.
  *
@@ -23,7 +22,7 @@ trait Namers extends MethodSynthesis {
   import global._
   import definitions._
 
-  private var _lockedCount = 0
+  var _lockedCount = 0
   def lockedCount = this._lockedCount
 
   /** Replaces any Idents for which cond is true with fresh TypeTrees().
@@ -36,7 +35,8 @@ trait Namers extends MethodSynthesis {
     }
     def apply(tree: Tree) = {
       val r = transform(tree)
-      if (r.exists(_.isEmpty)) TypeTree()
+      if (r exists { case tt: TypeTree => tt.isEmpty case _ => false })
+        TypeTree()
       else r
     }
   }
@@ -49,10 +49,10 @@ trait Namers extends MethodSynthesis {
 
   private class NormalNamer(context: Context) extends Namer(context)
   def newNamer(context: Context): Namer = new NormalNamer(context)
-  def newNamerFor(context: Context, tree: Tree): Namer =
-    newNamer(context.makeNewScope(tree, tree.symbol))
 
   abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
+    // overridden by the presentation compiler
+    def saveDefaultGetter(meth: Symbol, default: Symbol) { }
 
     import NamerErrorGen._
     val typer = newTyper(context)
@@ -107,8 +107,8 @@ trait Namers extends MethodSynthesis {
     }
 
     protected def owner       = context.owner
-    private def contextFile = context.unit.source.file
-    private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
+    def contextFile = context.unit.source.file
+    def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
       case ex: TypeError =>
         // H@ need to ensure that we handle only cyclic references
         TypeSigError(tree, ex)
@@ -122,10 +122,31 @@ trait Namers extends MethodSynthesis {
       || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
       || (vd.name startsWith nme.OUTER)
       || (context.unit.isJava)
+      || isEnumConstant(vd)
     )
+
     def noFinishGetterSetter(vd: ValDef) = (
          (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
-      || vd.symbol.isModuleVar)
+      || vd.symbol.isModuleVar
+      || isEnumConstant(vd))
+
+    /** Determines whether this field holds an enum constant.
+      * To qualify, the following conditions must be met:
+      *  - The field's class has the ENUM flag set
+      *  - The field's class extends java.lang.Enum
+      *  - The field has the ENUM flag set
+      *  - The field is static
+      *  - The field is stable
+      */
+    def isEnumConstant(vd: ValDef) = {
+      val ownerHasEnumFlag =
+        // Necessary to check because scalac puts Java's static members into the companion object
+        // while Scala's enum constants live directly in the class.
+        // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+        // cyclic reference error. See the commit message for details.
+        if (context.unit.isJava) owner.companionClass.hasEnumFlag else owner.hasEnumFlag
+      vd.mods.hasAllFlags(ENUM | STABLE | STATIC) && ownerHasEnumFlag
+    }
 
     def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T =
       if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
@@ -135,7 +156,8 @@ trait Namers extends MethodSynthesis {
       setPrivateWithin(tree, sym, tree.mods)
 
     def inConstructorFlag: Long = {
-      val termOwnedContexts: List[Context] = context.enclosingContextChain.takeWhile(_.owner.isTerm)
+      val termOwnedContexts: List[Context] =
+        context.enclosingContextChain.takeWhile(c => c.owner.isTerm && !c.owner.isAnonymousFunction)
       val constructorNonSuffix = termOwnedContexts exists (c => c.owner.isConstructor && !c.inConstructorSuffix)
       val earlyInit            = termOwnedContexts exists (_.owner.isEarlyInitialized)
       if (constructorNonSuffix || earlyInit) INCONSTRUCTOR else 0L
@@ -147,10 +169,17 @@ trait Namers extends MethodSynthesis {
     def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = {
       debuglog("[overwrite] " + sym)
       val newFlags = (sym.flags & LOCKED) | flags
+      sym.rawInfo match {
+        case tr: TypeRef =>
+          // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef
+          // over this mutated symbol, and we witness a stale cache for `parents`.
+          tr.invalidateCaches()
+        case _ =>
+      }
       sym reset NoType setFlag newFlags setPos pos
       sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
 
-      if (sym.owner.isPackageClass) {
+      if (sym.isTopLevel) {
         companionSymbolOf(sym, context) andAlso { companion =>
           val assignNoType = companion.rawInfo match {
             case _: SymLoader => true
@@ -173,21 +202,24 @@ trait Namers extends MethodSynthesis {
       else innerNamer
     }
 
+    // FIXME - this logic needs to be thoroughly explained
+    // and justified.  I know it's wrong with repect to package
+    // objects, but I think it's also wrong in other ways.
     protected def conflict(newS: Symbol, oldS: Symbol) = (
        (   !oldS.isSourceMethod
         || nme.isSetterName(newS.name)
-        || newS.owner.isPackageClass
+        || newS.isTopLevel
        ) &&
       !(   // @M: allow repeated use of `_` for higher-order type params
            (newS.owner.isTypeParameter || newS.owner.isAbstractType)
            // FIXME: name comparisons not successful, are these underscores
            // sometimes nme.WILDCARD and sometimes tpnme.WILDCARD?
-        && (newS.name.toString == nme.WILDCARD.toString)
+        && (newS.name string_== nme.WILDCARD)
        )
     )
 
     private def allowsOverload(sym: Symbol) = (
-      sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
+      sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel
     )
 
     private def inCurrentScope(m: Symbol): Boolean = {
@@ -200,6 +232,19 @@ trait Namers extends MethodSynthesis {
 
     /** Enter symbol into given scope and return symbol itself */
     def enterInScope(sym: Symbol, scope: Scope): Symbol = {
+      // FIXME - this is broken in a number of ways.
+      //
+      // 1) If "sym" allows overloading, that is not itself sufficient to skip
+      // the check, because "prev.sym" also must allow overloading.
+      //
+      // 2) There is nothing which reconciles a package's scope with
+      // the package object's scope.  This is the source of many bugs
+      // with e.g. defining a case class in a package object.  When
+      // compiling against classes, the class symbol is created in the
+      // package and in the package object, and the conflict is undetected.
+      // There is also a non-deterministic outcome for situations like
+      // an object with the same name as a method in the package object.
+
       // allow for overloaded methods
       if (!allowsOverload(sym)) {
         val prev = scope.lookupEntry(sym.name)
@@ -226,7 +271,12 @@ trait Namers extends MethodSynthesis {
         validate(sym2.companionClass)
     }
 
-    def enterSym(tree: Tree): Context = {
+    def enterSym(tree: Tree): Context = pluginsEnterSym(this, tree)
+
+    /** Default implementation of `enterSym`.
+     *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnterSym for more details)
+     */
+    def standardEnterSym(tree: Tree): Context = {
       def dispatch() = {
         var returnContext = this.context
         tree match {
@@ -239,7 +289,7 @@ trait Namers extends MethodSynthesis {
           case DocDef(_, defn)                               => enterSym(defn)
           case tree @ Import(_, _)                           =>
             assignSymbol(tree)
-            returnContext = context.makeNewImport(tree)
+            returnContext = context.make(tree)
           case _ =>
         }
         returnContext
@@ -275,10 +325,13 @@ trait Namers extends MethodSynthesis {
     }
 
     private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
-      sym.name.toTermName match {
+      if (isPastTyper) sym.name.toTermName match {
         case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
         case _                                                                                  =>
-          log("[+symbol] " + sym.debugLocationString)
+          tree match {
+            case md: DefDef => log("[+symbol] " + sym.debugLocationString)
+            case _          =>
+          }
       }
       tree.symbol = sym
       sym
@@ -289,7 +342,7 @@ trait Namers extends MethodSynthesis {
      *  be transferred to the symbol as they are, supply a mask containing
      *  the flags to keep.
      */
-    private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
+    def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
       val pos         = tree.pos
       val isParameter = tree.mods.isParameter
       val flags       = tree.mods.flags & mask
@@ -300,28 +353,28 @@ trait Namers extends MethodSynthesis {
         case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
         case DefDef(_, _, _, _, _, _)               => owner.newMethod(name.toTermName, pos, flags)
         case ClassDef(_, _, _, _)                   => owner.newClassSymbol(name.toTypeName, pos, flags)
-        case ModuleDef(_, _, _)                     => owner.newModule(name, pos, flags)
+        case ModuleDef(_, _, _)                     => owner.newModule(name.toTermName, pos, flags)
         case PackageDef(pid, _)                     => createPackageSymbol(pos, pid)
         case ValDef(_, _, _, _)                     =>
-          if (isParameter) owner.newValueParameter(name, pos, flags)
-          else owner.newValue(name, pos, flags)
+          if (isParameter) owner.newValueParameter(name.toTermName, pos, flags)
+          else owner.newValue(name.toTermName, pos, flags)
       }
     }
-    private def createFieldSymbol(tree: ValDef): TermSymbol =
-      owner.newValue(nme.getterToLocal(tree.name), tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
+    def createFieldSymbol(tree: ValDef): TermSymbol =
+      owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
 
-    private def createImportSymbol(tree: Tree) =
+    def createImportSymbol(tree: Tree) =
       NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
 
     /** All PackageClassInfoTypes come from here. */
-    private def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
+    def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
       val pkgOwner = pid match {
         case Ident(_)                 => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner
         case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass
       }
       val existing = pkgOwner.info.decls.lookup(pid.name)
 
-      if (existing.isPackage && pkgOwner == existing.owner)
+      if (existing.hasPackageFlag && pkgOwner == existing.owner)
         existing
       else {
         val pkg          = pkgOwner.newPackage(pid.name.toTermName, pos)
@@ -335,11 +388,10 @@ trait Namers extends MethodSynthesis {
     }
 
     private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = {
-      val file = contextFile
       if (clazz.sourceFile != null && clazz.sourceFile != contextFile)
-        debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile)
+        devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile")
 
-      clazz.sourceFile = contextFile
+      clazz.associatedFile = contextFile
       if (clazz.sourceFile != null) {
         assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile)
         currentRun.symSource(clazz) = clazz.sourceFile
@@ -353,7 +405,7 @@ trait Namers extends MethodSynthesis {
       val existing = context.scope.lookup(tree.name)
       val isRedefinition = (
            existing.isType
-        && existing.owner.isPackageClass
+        && existing.isTopLevel
         && context.scope == existing.owner.info.decls
         && currentRun.canRedefine(existing)
       )
@@ -366,21 +418,19 @@ trait Namers extends MethodSynthesis {
         else assignAndEnterSymbol(tree) setFlag inConstructorFlag
       }
       clazz match {
-        case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym)
-        case _                                              => clazz
+        case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym)
+        case _                                    => clazz
       }
     }
 
     /** Given a ClassDef or ModuleDef, verifies there isn't a companion which
      *  has been defined in a separate file.
      */
-    private def validateCompanionDefs(tree: ImplDef) {
-      val sym = tree.symbol
-      if (sym eq NoSymbol) return
-
+    def validateCompanionDefs(tree: ImplDef) {
+      val sym    = tree.symbol orElse { return }
       val ctx    = if (context.owner.isPackageObjectClass) context.outer else context
-      val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
-      val clazz  = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+      val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
+      val clazz  = if (sym.isClass) sym else ctx.scope lookupClass tree.name
       val fails  = (
            module.isModule
         && clazz.isClass
@@ -408,13 +458,23 @@ trait Namers extends MethodSynthesis {
       sym
     }
 
-    /** Enter a module symbol. The tree parameter can be either
-     *  a module definition or a class definition.
+    /** Enter a module symbol.
      */
     def enterModuleSymbol(tree : ModuleDef): Symbol = {
-      var m: Symbol = context.scope lookupAll tree.name find (_.isModule) getOrElse NoSymbol
+      var m: Symbol = context.scope lookupModule tree.name
       val moduleFlags = tree.mods.flags | MODULE
       if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
+        // This code accounts for the way the package objects found in the classpath are opened up
+        // early by the completer of the package itself. If the `packageobjects` phase then finds
+        // the same package object in sources, we have to clean the slate and remove package object
+        // members from the package class.
+        //
+        // TODO SI-4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids
+        //      opening up the package object on the classpath at all if one exists in source.
+        if (m.isPackageObject) {
+          val packageScope = m.enclosingPackageClass.rawInfo.decls
+          packageScope.filter(_.owner != m.enclosingPackageClass).toList.foreach(packageScope unlink _)
+        }
         updatePosFlags(m, tree.pos, moduleFlags)
         setPrivateWithin(tree, m)
         m.moduleClass andAlso (setPrivateWithin(tree, _))
@@ -426,8 +486,8 @@ trait Namers extends MethodSynthesis {
         m.moduleClass setFlag moduleClassFlags(moduleFlags)
         setPrivateWithin(tree, m.moduleClass)
       }
-      if (m.owner.isPackageClass && !m.isPackage) {
-        m.moduleClass.sourceFile = contextFile
+      if (m.isTopLevel && !m.isPackage) {
+        m.moduleClass.associatedFile = contextFile
         currentRun.symSource(m) = m.moduleClass.sourceFile
         registerTopLevelSym(m)
       }
@@ -450,7 +510,13 @@ trait Namers extends MethodSynthesis {
      *  class definition tree.
      *  @return the companion object symbol.
      */
-    def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
+    def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol =
+      pluginsEnsureCompanionObject(this, cdef, creator)
+
+    /** Default implementation of `ensureCompanionObject`.
+     *  Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnsureCompanionObject for more details)
+     */
+    def standardEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
       val m = companionSymbolOf(cdef.symbol, context)
       // @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
       // otherwise. documentation welcome.
@@ -489,7 +555,7 @@ trait Namers extends MethodSynthesis {
             typer.permanentlyHiddenWarning(pos, to0, e.sym)
           else if (context ne context.enclClass) {
             val defSym = context.prefix.member(to) filter (
-              sym => sym.exists && context.isAccessible(sym, context.prefix, false))
+              sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false))
 
             defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
           }
@@ -509,7 +575,7 @@ trait Namers extends MethodSynthesis {
         if (from != nme.WILDCARD && base != ErrorType) {
           if (isValid(from)) {
             // for Java code importing Scala objects
-            if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+            if (!nme.isModuleName(from) || isValid(from.dropModule)) {
               typer.TyperErrorGen.NotAMemberError(tree, expr, from)
             }
           }
@@ -546,8 +612,8 @@ trait Namers extends MethodSynthesis {
       val sym      = copyDef.symbol
       val lazyType = completerOf(copyDef)
 
-      /** Assign the types of the class parameters to the parameters of the
-       *  copy method. See comment in `Unapplies.caseClassCopyMeth` */
+      /* Assign the types of the class parameters to the parameters of the
+       * copy method. See comment in `Unapplies.caseClassCopyMeth` */
       def assignParamTypes() {
         val clazz = sym.owner
         val constructorType = clazz.primaryConstructor.tpe
@@ -587,28 +653,13 @@ trait Namers extends MethodSynthesis {
       }
     }
 
-    def enterIfNotThere(sym: Symbol) {
-      val scope = context.scope
-      @tailrec def search(e: ScopeEntry) {
-        if ((e eq null) || (e.owner ne scope))
-          scope enter sym
-        else if (e.sym ne sym)  // otherwise, aborts since we found sym
-          search(e.tail)
-      }
-      search(scope lookupEntry sym.name)
-    }
-
     def enterValDef(tree: ValDef) {
       if (noEnterGetterSetter(tree))
         assignAndEnterFinishedSymbol(tree)
       else
         enterGetterSetter(tree)
 
-      // When java enums are read from bytecode, they are known to have
-      // constant types by the jvm flag and assigned accordingly.  When
-      // they are read from source, the java parser marks them with the
-      // STABLE flag, and now we receive that signal.
-      if (tree.symbol hasAllFlags STABLE | JAVA)
+      if (isEnumConstant(tree))
         tree.symbol setInfo ConstantType(Constant(tree.symbol))
     }
 
@@ -620,7 +671,7 @@ trait Namers extends MethodSynthesis {
       // via "x$lzy" as can be seen in test #3927.
       val sym = (
         if (owner.isClass) createFieldSymbol(tree)
-        else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
+        else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT)
       )
       enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
     }
@@ -641,7 +692,7 @@ trait Namers extends MethodSynthesis {
       case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
         assignAndEnterFinishedSymbol(tree)
       case DefDef(mods, name, tparams, _, _, _) =>
-        val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
+        val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0
         val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
 
         if (name == nme.copy && sym.isSynthetic)
@@ -651,15 +702,12 @@ trait Namers extends MethodSynthesis {
     }
 
     def enterClassDef(tree: ClassDef) {
-      val ClassDef(mods, name, tparams, impl) = tree
+      val ClassDef(mods, _, _, impl) = tree
       val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
       tree.symbol = enterClassSymbol(tree)
       tree.symbol setInfo completerOf(tree)
 
       if (mods.isCase) {
-        if (primaryConstructorArity > MaxFunctionArity)
-          MaxParametersCaseClassError(tree)
-
         val m = ensureCompanionObject(tree, caseModuleDef)
         m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
       }
@@ -672,7 +720,7 @@ trait Namers extends MethodSynthesis {
         m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
       }
       val owner = tree.symbol.owner
-      if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
+      if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) {
         context.unit.warning(tree.pos,
           "it is not recommended to define classes/objects inside of package objects.\n" +
           "If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
@@ -690,22 +738,9 @@ trait Namers extends MethodSynthesis {
       validateCompanionDefs(tree)
     }
 
-    // this logic is needed in case typer was interrupted half
-    // way through and then comes back to do the tree again. In
-    // that case the definitions that were already attributed as
-    // well as any default parameters of such methods need to be
-    // re-entered in the current scope.
-    protected def enterExistingSym(sym: Symbol): Context = {
-      if (forInteractive && sym != null && sym.owner.isTerm) {
-        enterIfNotThere(sym)
-        if (sym.isLazy)
-          sym.lazyAccessor andAlso enterIfNotThere
-
-        for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
-          defAtt.defaultGetters foreach enterIfNotThere
-      }
-      this.context
-    }
+    // Hooks which are overridden in the presentation compiler
+    def enterExistingSym(sym: Symbol): Context = this.context
+    def enterIfNotThere(sym: Symbol) { }
 
     def enterSyntheticSym(tree: Tree): Symbol = {
       enterSym(tree)
@@ -715,41 +750,55 @@ trait Namers extends MethodSynthesis {
 
 // --- Lazy Type Assignment --------------------------------------------------
 
-    def initializeLowerBounds(tp: Type): Type = {
+    def findCyclicalLowerBound(tp: Type): Symbol = {
       tp match {
         case TypeBounds(lo, _) =>
           // check that lower bound is not an F-bound
-          for (TypeRef(_, sym, _) <- lo)
-            sym.initialize
+          // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed
+          for (tp1 @ TypeRef(_, sym, _) <- lo) {
+            if (settings.breakCycles) {
+              if (!sym.maybeInitialize) {
+                log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}")
+                return sym
+              }
+            }
+            else sym.initialize
+          }
         case _ =>
       }
-      tp
+      NoSymbol
     }
 
     def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
+      // this early test is there to avoid infinite baseTypes when
+      // adding setters and getters --> bug798
+      // It is a def in an attempt to provide some insulation against
+      // uninitialized symbols misleading us. It is not a certainty
+      // this accomplishes anything, but performance is a non-consideration
+      // on these flag checks so it can't hurt.
+      def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
       logAndValidate(sym) {
-        val tp = initializeLowerBounds(typeSig(tree))
+        val tp = typeSig(tree)
+
+        findCyclicalLowerBound(tp) andAlso { sym =>
+          if (needsCycleCheck) {
+            // neg/t1224:  trait C[T] ; trait A { type T >: C[T] <: C[C[T]] }
+            // To avoid an infinite loop on the above, we cannot break all cycles
+            log(s"Reinitializing info of $sym to catch any genuine cycles")
+            sym reset sym.info
+            sym.initialize
+          }
+        }
         sym setInfo {
           if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
           else tp
         }
-        // this early test is there to avoid infinite baseTypes when
-        // adding setters and getters --> bug798
-        val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter
-        if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp))
-          sym setInfo ErrorType
+        if (needsCycleCheck) {
+          log(s"Needs cycle check: ${sym.debugLocationString}")
+          if (!typer.checkNonCyclic(tree.pos, tp))
+            sym setInfo ErrorType
+        }
       }
-      // tree match {
-      //   case ClassDef(_, _, _, impl) =>
-      //     val parentsOK = (
-      //          treeInfo.isInterface(sym, impl.body)
-      //       || (sym eq ArrayClass)
-      //       || (sym isSubClass AnyValClass)
-      //     )
-      //     if (!parentsOK)
-      //       ensureParent(sym, AnyRefClass)
-      //   case _ => ()
-      // }
     }
 
     def moduleClassTypeCompleter(tree: ModuleDef) = {
@@ -764,7 +813,7 @@ trait Namers extends MethodSynthesis {
     def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
       logAndValidate(sym) {
         sym setInfo {
-          val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+          val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe)
                    else NullaryMethodType(typeSig(tree))
           pluginsTypeSigAccessor(tp, typer, tree, sym)
         }
@@ -807,31 +856,28 @@ trait Namers extends MethodSynthesis {
         case _ =>
           false
       }
-
-      val tpe1 = dropRepeatedParamType(tpe.deconst)
-      val tpe2 = tpe1.widen
-
-      // This infers Foo.type instead of "object Foo"
-      // See Infer#adjustTypeArgs for the polymorphic case.
-      if (tpe.typeSymbolDirect.isModuleClass) tpe1
-      else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
-        if (tpe2 <:< pt) tpe2 else tpe1
-      else if (isHidden(tpe)) tpe2
-      // In an attempt to make pattern matches involving method local vals
-      // compilable into switches, for a time I had a more generous condition:
-      //    `if (sym.isFinal || sym.isLocal) tpe else tpe1`
-      // This led to issues with expressions like classOf[List[_]] which apparently
-      // depend on being deconst-ed here, so this is again the original:
-      else if (!sym.isFinal) tpe1
-      else tpe
+      val shouldWiden = (
+           !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo"
+        && (tpe.widen <:< pt)                  // Don't widen our way out of conforming to pt
+        && (   sym.isVariable
+            || sym.isMethod && !sym.hasAccessorFlag
+            || isHidden(tpe)
+           )
+      )
+      dropIllegalStarTypes(
+        if (shouldWiden) tpe.widen
+        else if (sym.isFinal) tpe    // "final val" allowed to retain constant type
+        else tpe.deconst
+      )
     }
     /** Computes the type of the body in a ValDef or DefDef, and
      *  assigns the type to the tpt's node.  Returns the type.
      */
     private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
-      val rhsTpe =
-        if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
-        else defnTyper.computeType(tree.rhs, pt)
+      val rhsTpe = tree match {
+        case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt)
+        case _ => defnTyper.computeType(tree.rhs, pt)
+      }
 
       val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
       tree.tpt defineType defnTpe setPos tree.pos.focus
@@ -841,7 +887,7 @@ trait Namers extends MethodSynthesis {
     // owner is the class with the self type
     def enterSelf(self: ValDef) {
       val ValDef(_, name, tpt, _) = self
-      if (self eq emptyValDef)
+      if (self eq noSelfType)
         return
 
       val hasName = name != nme.WILDCARD
@@ -851,7 +897,7 @@ trait Namers extends MethodSynthesis {
 
       val sym = (
         if (hasType || hasName) {
-          owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe
+          owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe_*
           val selfSym = owner.thisSym setPos self.pos
           if (hasName) selfSym setName name else selfSym
         }
@@ -866,16 +912,11 @@ trait Namers extends MethodSynthesis {
     private def templateSig(templ: Template): Type = {
       val clazz = context.owner
       def checkParent(tpt: Tree): Type = {
-        val tp = tpt.tpe
-        val inheritsSelf = tp.typeSymbol == owner
-        if (inheritsSelf)
-          InheritsItselfError(tpt)
-
-        if (inheritsSelf || tp.isError) AnyRefClass.tpe
-        else tp
+        if (tpt.tpe.isError) AnyRefTpe
+        else tpt.tpe
       }
 
-      val parents = typer.parentTypes(templ) map checkParent
+      val parents = typer.typedParentTypes(templ) map checkParent
 
       enterSelf(templ.self)
 
@@ -901,11 +942,10 @@ trait Namers extends MethodSynthesis {
         val modClass = companionSymbolOf(clazz, context).moduleClass
         modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
           val cdef = cma.caseClass
-          def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
+          def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists)
+
           // SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name
-          if (cdef.symbol == clazz && !hasCopy(decls) &&
-                  !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
-                  !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
+          if (cdef.symbol == clazz && !hasCopy)
             addCopyMethod(cdef, templateNamer)
         }
       }
@@ -951,9 +991,9 @@ trait Namers extends MethodSynthesis {
       // Assign the moduleClass info (templateSig returns a ClassInfoType)
       val clazz = moduleSym.moduleClass
       clazz setInfo pluginsTp
-      // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+      // clazz.tpe_* returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
       // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
-      clazz.tpe
+      clazz.tpe_*
     }
 
     /**
@@ -997,7 +1037,7 @@ trait Namers extends MethodSynthesis {
       var vparamSymss = enterValueParams(vparamss)
 
 
-      /**
+      /*
        * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
        * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
        * so the resulting type is a valid external method type, it does not contain (references to) skolems.
@@ -1031,7 +1071,7 @@ trait Namers extends MethodSynthesis {
         res.substSym(tparamSkolems, tparamSyms)
       }
 
-      /**
+      /*
        * Creates a schematic method type which has WildcardTypes for non specified
        * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
        * type schema is
@@ -1047,15 +1087,16 @@ trait Namers extends MethodSynthesis {
       }
 
       def overriddenSymbol(resTp: Type) = {
+        lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala
         intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
-          sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
+          sym != NoSymbol && (site.memberType(sym) matches schema)
         }
       }
       // TODO: see whether this or something similar would work instead:
       // def overriddenSymbol = meth.nextOverriddenSymbol
 
 
-      /**
+      /*
        * If `meth` doesn't have an explicit return type, extracts the return type from the method
        * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
        * type for computing the type of the rhs. The resulting type references type skolems for
@@ -1093,6 +1134,9 @@ trait Namers extends MethodSynthesis {
             overriddenTp = overriddenTp.resultType
           }
 
+          // SI-7668 Substitute parameters from the parent method with those of the overriding method.
+          overriddenTp = overriddenTp.substSym(overridden.paramss.flatten, vparamss.flatten.map(_.symbol))
+
           overriddenTp match {
             case NullaryMethodType(rtpe) => overriddenTp = rtpe
             case MethodType(List(), rtpe) => overriddenTp = rtpe
@@ -1111,7 +1155,7 @@ trait Namers extends MethodSynthesis {
       }
 
       if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
-        tpt defineType context.enclClass.owner.tpe
+        tpt defineType context.enclClass.owner.tpe_*
         tpt setPos meth.pos.focus
       }
 
@@ -1136,7 +1180,7 @@ trait Namers extends MethodSynthesis {
         }
       }
 
-      addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
+      addDefaultGetters(meth, ddef, vparamss, tparams, overriddenSymbol(methResTp))
 
       // fast track macros, i.e. macros defined inside the compiler, are hardcoded
       // hence we make use of that and let them have whatever right-hand side they need
@@ -1147,7 +1191,7 @@ trait Namers extends MethodSynthesis {
       // because @macroImpl annotation only gets assigned during typechecking
       // otherwise macro defs wouldn't be able to robustly coexist with their clients
       // because a client could be typechecked before a macro def that it uses
-      if (meth.isTermMacro) {
+      if (meth.isMacro) {
         typer.computeMacroDefType(ddef, resTpFromOverride)
       }
 
@@ -1178,7 +1222,12 @@ trait Namers extends MethodSynthesis {
      * typechecked, the corresponding param would not yet have the "defaultparam"
      * flag.
      */
-    private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+    private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+      val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate)
+      // having defs here is important to make sure that there's no sneaky tree sharing
+      // in methods with multiple default parameters
+      def rtparams = rtparams0.map(_.duplicate)
+      def rvparamss = rvparamss0.map(_.map(_.duplicate))
       val methOwner  = meth.owner
       val isConstr   = meth.isConstructor
       val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
@@ -1186,8 +1235,8 @@ trait Namers extends MethodSynthesis {
       // value parameters of the base class (whose defaults might be overridden)
       var baseParamss = (vparamss, overridden.tpe.paramss) match {
         // match empty and missing parameter list
-        case (Nil, List(Nil)) => Nil
-        case (List(Nil), Nil) => ListOfNil
+        case (Nil, ListOfNil) => Nil
+        case (ListOfNil, Nil) => ListOfNil
         case (_, paramss)     => paramss
       }
       assert(
@@ -1210,23 +1259,36 @@ trait Namers extends MethodSynthesis {
       //
       vparamss.foldLeft(Nil: List[List[ValDef]]) { (previous, vparams) =>
         assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
+        val rvparams = rvparamss(previous.length)
         var baseParams = if (overrides) baseParamss.head else Nil
-        for (vparam <- vparams) {
+        map2(vparams, rvparams)((vparam, rvparam) => {
           val sym = vparam.symbol
           // true if the corresponding parameter of the base class has a default argument
           val baseHasDefault = overrides && baseParams.head.hasDefault
           if (sym.hasDefault) {
-            // generate a default getter for that argument
+            // Create a "default getter", i.e. a DefDef that will calculate vparam.rhs
+            // for those who are going to call meth without providing an argument corresponding to vparam.
+            // After the getter is created, a corresponding synthetic symbol is created and entered into the parent namer.
+            //
+            // In the ideal world, this DefDef would be a simple one-liner that just returns vparam.rhs,
+            // but in scalac things are complicated in two different ways.
+            //
+            // 1) Because the underlying language is quite sophisticated, we must allow for those sophistications in our getter.
+            //    Namely: a) our getter has to copy type parameters from the associated method (or the associated class
+            //    if meth is a constructor), because vparam.rhs might refer to one of them, b) our getter has to copy
+            //    preceding value parameter lists from the associated method, because again vparam.rhs might refer to one of them.
+            //
+            // 2) Because we have already assigned symbols to type and value parameters that we have to copy, we must jump through
+            //    hoops in order to destroy them and allow subsequent naming create new symbols for our getter. Previously this
+            //    was done in an overly brutal way akin to resetAllAttrs, but now we utilize a resetLocalAttrs-based approach.
+            //    Still far from ideal, but at least enables things like run/macro-default-params that were previously impossible.
+
             val oflag = if (baseHasDefault) OVERRIDE else 0
             val name = nme.defaultGetterName(meth.name, posCounter)
 
-            // Create trees for the defaultGetter. Uses tools from Unapplies.scala
-            var deftParams = tparams map copyUntyped[TypeDef]
-            val defvParamss = mmap(previous) { p =>
-              // in the default getter, remove the default parameter
-              val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
-              UnTyper.traverse(p1)
-              p1
+            var defTparams = rtparams
+            val defVparamss = mmap(rvparamss.take(previous.length)){ rvp =>
+              copyValDef(rvp)(mods = rvp.mods &~ DEFAULTPARAM, rhs = EmptyTree)
             }
 
             val parentNamer = if (isConstr) {
@@ -1248,7 +1310,8 @@ trait Namers extends MethodSynthesis {
                     return // fix #3649 (prevent crash in erroneous source code)
                 }
               }
-              deftParams = cdef.tparams map copyUntypedInvariant
+              val ClassDef(_, _, rtparams, _) = resetAttrs(cdef.duplicate)
+              defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT)))
               nmr
             }
             else ownerNamer getOrElse {
@@ -1259,47 +1322,45 @@ trait Namers extends MethodSynthesis {
               nmr
             }
 
-            // If the parameter type mentions any type parameter of the method, let the compiler infer the
-            // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
-            // This is better than always using Wildcard for inferring the result type, for example in
-            //    def f(i: Int, m: Int => Int = identity _) = m(i)
-            // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
-            val names = deftParams map { case TypeDef(_, name, _, _) => name }
-            val subst = new TypeTreeSubstituter(names contains _)
-
-            val defTpt = subst(copyUntyped(vparam.tpt match {
-              // default getter for by-name params
-              case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
-              case t => t
-            }))
-            val defRhs = copyUntyped(vparam.rhs)
+            val defTpt =
+              // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree()
+              // will break the carefully orchestrated naming/typing logic that involves enterCopyMethod and caseClassCopyMeth
+              if (meth.isCaseCopy) TypeTree()
+              else {
+                // If the parameter type mentions any type parameter of the method, let the compiler infer the
+                // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
+                // This is better than always using Wildcard for inferring the result type, for example in
+                //    def f(i: Int, m: Int => Int = identity _) = m(i)
+                // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
+                // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene
+                //       will open the doors to a much better way of doing this kind of stuff
+                val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name }
+                val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _)
+                eraseAllMentionsOfTparams(rvparam.tpt match {
+                  // default getter for by-name params
+                  case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
+                  case t => t
+                })
+              }
+            val defRhs = rvparam.rhs
 
             val defaultTree = atPos(vparam.pos.focus) {
-              DefDef(
-                Modifiers(meth.flags & DefaultGetterFlags) | SYNTHETIC | DEFAULTPARAM | oflag,
-                name, deftParams, defvParamss, defTpt, defRhs)
+              DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, defTparams, defVparamss, defTpt, defRhs)
             }
             if (!isConstr)
               methOwner.resetFlag(INTERFACE) // there's a concrete member now
             val default = parentNamer.enterSyntheticSym(defaultTree)
-            if (forInteractive && default.owner.isTerm) {
-              // save the default getters as attachments in the method symbol. if compiling the
-              // same local block several times (which can happen in interactive mode) we might
-              // otherwise not find the default symbol, because the second time it the method
-              // symbol will be re-entered in the scope but the default parameter will not.
-              val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
-                case Some(att) => att.defaultGetters += default
-                case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
-              }
-            }
-          } else if (baseHasDefault) {
+            if (default.owner.isTerm)
+              saveDefaultGetter(meth, default)
+          }
+          else if (baseHasDefault) {
             // the parameter does not have a default itself, but the
             // corresponding parameter in the base class does.
             sym.setFlag(DEFAULTPARAM)
           }
           posCounter += 1
           if (overrides) baseParams = baseParams.tail
-        }
+        })
         if (overrides) baseParamss = baseParamss.tail
         previous :+ vparams
       }
@@ -1358,20 +1419,28 @@ trait Namers extends MethodSynthesis {
     private def importSig(imp: Import) = {
       val Import(expr, selectors) = imp
       val expr1 = typer.typedQualifier(expr)
-      typer checkStable expr1
+
       if (expr1.symbol != null && expr1.symbol.isRootPackage)
         RootImportError(imp)
 
       if (expr1.isErrorTyped)
         ErrorType
       else {
+        expr1 match {
+          case This(_) =>
+            // SI-8207 okay, typedIdent expands Ident(self) to C.this which doesn't satisfy the next case
+            // TODO should we change `typedIdent` not to expand to the `Ident` to a `This`?
+          case _ if treeInfo.isStableIdentifierPattern(expr1) =>
+          case _ =>
+            typer.TyperErrorGen.UnstableTreeError(expr1)
+        }
+
         val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
         checkSelectors(newImport)
-        transformed(imp) = newImport
+        context.unit.transformed(imp) = newImport
         // copy symbol and type attributes back into old expression
         // so that the structure builder will find it.
-        expr.symbol = expr1.symbol
-        expr.tpe = expr1.tpe
+        expr setSymbol expr1.symbol setType expr1.tpe
         ImportType(expr1)
       }
     }
@@ -1393,7 +1462,9 @@ trait Namers extends MethodSynthesis {
       if (!cdef.symbol.hasAbstractFlag)
         namer.enterSyntheticSym(caseModuleApplyMeth(cdef))
 
-      namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
+      val primaryConstructorArity = treeInfo.firstConstructorArgs(cdef.impl.body).size
+      if (primaryConstructorArity <= MaxTupleArity)
+        namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
     }
 
     def addCopyMethod(cdef: ClassDef, namer: Namer) {
@@ -1407,12 +1478,12 @@ trait Namers extends MethodSynthesis {
      */
     def typeSig(tree: Tree): Type = {
       // log("typeSig " + tree)
-      /** For definitions, transform Annotation trees to AnnotationInfos, assign
-       *  them to the sym's annotations. Type annotations: see Typer.typedAnnotated
-       *  We have to parse definition annotations here (not in the typer when traversing
-       *  the MemberDef tree): the typer looks at annotations of certain symbols; if
-       *  they were added only in typer, depending on the compilation order, they may
-       *  or may not be visible.
+      /* For definitions, transform Annotation trees to AnnotationInfos, assign
+       * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+       * We have to parse definition annotations here (not in the typer when traversing
+       * the MemberDef tree): the typer looks at annotations of certain symbols; if
+       * they were added only in typer, depending on the compilation order, they may
+       * or may not be visible.
        */
       def annotate(annotated: Symbol) = {
         // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
@@ -1425,7 +1496,7 @@ trait Namers extends MethodSynthesis {
               annCtx.setReportErrors()
               // need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
               AnnotationInfo lazily {
-                beforeTyper(newTyper(annCtx) typedAnnotation ann)
+                enteringTyper(newTyper(annCtx) typedAnnotation ann)
               }
             }
             if (ainfos.nonEmpty) {
@@ -1477,12 +1548,6 @@ trait Namers extends MethodSynthesis {
         tpe
     }
 
-    def ensureParent(clazz: Symbol, parent: Symbol) = {
-      val info0 = clazz.info
-      val info1 = includeParent(info0, parent)
-      if (info0 ne info1) clazz setInfo info1
-    }
-
     class LogTransitions[S](onEnter: S => String, onExit: S => String) {
       val enabled = settings.debug.value
       @inline final def apply[T](entity: S)(body: => T): T = {
@@ -1512,8 +1577,8 @@ trait Namers extends MethodSynthesis {
     private object RestrictJavaArraysMap extends TypeMap {
       def apply(tp: Type): Type = tp match {
         case TypeRef(pre, ArrayClass, List(elemtp))
-        if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe) =>
-          TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectClass.tpe))))
+        if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe) =>
+          TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectTpe))))
         case _ =>
           mapOver(tp)
       }
@@ -1535,7 +1600,7 @@ trait Namers extends MethodSynthesis {
           AbstractMemberWithModiferError(sym, flag)
       }
       def checkNoConflict(flag1: Int, flag2: Int) {
-        if (sym hasAllFlags flag1 | flag2)
+        if (sym hasAllFlags flag1.toLong | flag2)
           IllegalModifierCombination(sym, flag1, flag2)
       }
       if (sym.isImplicit) {
@@ -1543,7 +1608,7 @@ trait Namers extends MethodSynthesis {
           fail(ImplicitConstr)
         if (!(sym.isTerm || (sym.isClass && !sym.isTrait)))
           fail(ImplicitNotTermOrClass)
-        if (sym.owner.isPackageClass)
+        if (sym.isTopLevel)
           fail(ImplicitAtToplevel)
       }
       if (sym.isClass) {
@@ -1609,7 +1674,7 @@ trait Namers extends MethodSynthesis {
     val tree: Tree
   }
 
-  def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter {
+  def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter with FlagAgnosticCompleter {
     val tree = t
     def completeImpl(sym: Symbol) = c(sym)
   }
@@ -1651,7 +1716,7 @@ trait Namers extends MethodSynthesis {
       // @M an abstract type's type parameters are entered.
       // TODO: change to isTypeMember ?
       if (defnSym.isAbstractType)
-        newNamerFor(ctx, tree) enterSyms tparams //@M
+        newNamer(ctx.makeNewScope(tree, tree.symbol)) enterSyms tparams //@M
       restp complete sym
     }
   }
@@ -1689,13 +1754,6 @@ trait Namers extends MethodSynthesis {
     }
   }
 
-  @deprecated("Use underlyingSymbol instead", "2.10.0")
-  def underlying(member: Symbol): Symbol = underlyingSymbol(member)
-  @deprecated("Use `companionSymbolOf` instead", "2.10.0")
-  def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx)
-  @deprecated("Use `companionSymbolOf` instead", "2.10.0")
-  def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx)
-
   /** The companion class or companion module of `original`.
    *  Calling .companionModule does not work for classes defined inside methods.
    *
@@ -1705,11 +1763,23 @@ trait Namers extends MethodSynthesis {
    *  call this method?
    */
   def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
+    val owner = original.owner
+    // SI-7264 Force the info of owners from previous compilation runs.
+    //         Doing this generally would trigger cycles; that's what we also
+    //         use the lower-level scan through the current Context as a fall back.
+    if (!currentRun.compiles(owner)) owner.initialize
     original.companionSymbol orElse {
-      ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+      ctx.lookup(original.name.companionName, owner).suchThat(sym =>
         (original.isTerm || sym.hasModuleFlag) &&
         (sym isCoDefinedWith original)
       )
     }
   }
+
+  /** A version of `Symbol#linkedClassOfClass` that works with local companions, ala `companionSymbolOf`. */
+  final def linkedClassOfClassOf(original: Symbol, ctx: Context): Symbol =
+    if (original.isModuleClass)
+      companionSymbolOf(original.sourceModule, ctx)
+    else
+      companionSymbolOf(original, ctx).moduleClass
 }
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 70f2f41..dceb0a4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -8,7 +8,6 @@ package typechecker
 
 import symtab.Flags._
 import scala.collection.mutable
-import scala.ref.WeakReference
 import scala.reflect.ClassTag
 
 /**
@@ -20,6 +19,7 @@ trait NamesDefaults { self: Analyzer =>
   import global._
   import definitions._
   import NamesDefaultsErrorsGen._
+  import treeInfo.WildcardStarArg
 
   // Default getters of constructors are added to the companion object in the
   // typeCompleter of the constructor (methodSig). To compute the signature,
@@ -42,13 +42,11 @@ trait NamesDefaults { self: Analyzer =>
     blockTyper: Typer
   ) { }
 
-  val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
-
-  def nameOf(arg: Tree) = arg match {
-    case AssignOrNamedArg(Ident(name), rhs) => Some(name)
-    case _ => None
+  private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name }
+  def isNamedArg(arg: Tree) = arg match {
+    case AssignOrNamedArg(Ident(_), _) => true
+    case _                             => false
   }
-  def isNamed(arg: Tree) = nameOf(arg).isDefined
 
   /** @param pos maps indices from old to new */
   def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
@@ -58,13 +56,13 @@ trait NamesDefaults { self: Analyzer =>
   }
 
   /** @param pos maps indices from new to old (!) */
-  def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
+  private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
     val argsArray = args.toArray
     (argsArray.indices map (i => argsArray(pos(i)))).toList
   }
 
   /** returns `true` if every element is equal to its index */
-  def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
+  def allArgsArePositional(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
 
   /**
    * Transform a function application into a Block, and assigns typer.context
@@ -107,14 +105,14 @@ trait NamesDefaults { self: Analyzer =>
    *  @return the transformed application (a Block) together with the NamedApplyInfo.
    *     if isNamedApplyBlock(tree), returns the existing context.namedApplyBlockInfo
    */
-  def transformNamedApplication(typer: Typer, mode: Int, pt: Type)
+  def transformNamedApplication(typer: Typer, mode: Mode, pt: Type)
                                (tree: Tree, argPos: Int => Int): Tree = {
     import typer._
     import typer.infer._
     val context = typer.context
     import context.unit
 
-    /**
+    /*
      * Transform a function into a block, and passing context.namedApplyBlockInfo to
      * the new block as side-effect.
      *
@@ -164,18 +162,18 @@ trait NamesDefaults { self: Analyzer =>
 
       // never used for constructor calls, they always have a stable qualifier
       def blockWithQualifier(qual: Tree, selected: Name) = {
-        val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent)
+        val sym = blockTyper.context.owner.newValue(unit.freshTermName(nme.QUAL_PREFIX), newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent)
         blockTyper.context.scope enter sym
         val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
         // it stays in Vegas: SI-5720, SI-5727
         qual changeOwner (blockTyper.context.owner -> sym)
 
         val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
-        var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
+        val baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
           // setSymbol below is important because the 'selected' function might be overloaded. by
           // assigning the correct method symbol, typedSelect will just assign the type. the reason
           // to still call 'typed' is to correctly infer singleton types, SI-5259.
-          val selectPos = 
+          val selectPos =
             if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end))
             else baseFun.pos
           val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos))
@@ -207,7 +205,7 @@ trait NamesDefaults { self: Analyzer =>
           if (module == NoSymbol) None
           else {
             val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
-            if (module.isStable && pre.isStable)    // fixes #4524. the type checker does the same for
+            if (treeInfo.admitsTypeSelection(ref))  // fixes #4524. the type checker does the same for
               ref.setType(singleType(pre, module))  // typedSelect, it calls "stabilize" on the result.
             Some(ref)
           }
@@ -262,7 +260,7 @@ trait NamesDefaults { self: Analyzer =>
       }
     }
 
-    /**
+    /*
      * For each argument (arg: T), create a local value
      *  x$n: T = arg
      *
@@ -284,17 +282,17 @@ trait NamesDefaults { self: Analyzer =>
           val repeated = isScalaRepeatedParamType(paramTpe)
           val argTpe = (
             if (repeated) arg match {
-              case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
-              case _                                       => seqType(arg.tpe)
+              case WildcardStarArg(expr) => expr.tpe
+              case _                     => seqType(arg.tpe)
             }
             else {
               // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
-              //      singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+              //      singleton type without an original TypeTree fails to retypecheck after a resetAttrs (SI-7516),
               //      which is important for (at least) macros.
               arg.tpe
             }
           ).widen // have to widen or types inferred from literal defaults will be singletons
-          val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo {
+          val s = context.owner.newValue(unit.freshTermName(nme.NAMEDARG_PREFIX), arg.pos, newFlags = ARTIFACT) setInfo {
             val tp = if (byName) functionType(Nil, argTpe) else argTpe
             uncheckedBounds(tp)
           }
@@ -311,11 +309,8 @@ trait NamesDefaults { self: Analyzer =>
             } else {
               new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
               if (repeated) arg match {
-                case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
-                  expr
-                case _ =>
-                  val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
-                  blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+                case WildcardStarArg(expr) => expr
+                case _                     => blockTyper typed gen.mkSeqApply(resetAttrs(arg))
               } else arg
             }
           Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
@@ -334,7 +329,7 @@ trait NamesDefaults { self: Analyzer =>
           assert(isNamedApplyBlock(transformedFun), transformedFun)
           val NamedApplyInfo(qual, targs, vargss, blockTyper) =
             context.namedApplyBlockInfo.get._2
-          val existingBlock @ Block(stats, funOnly) = transformedFun
+          val Block(stats, funOnly) = transformedFun
 
           // type the application without names; put the arguments in definition-site order
           val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
@@ -382,7 +377,9 @@ trait NamesDefaults { self: Analyzer =>
     }
   }
 
-  def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOf _): (List[Symbol], Boolean) = {
+  def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe))
+
+  def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
     val namedArgs = args.dropWhile(arg => {
       val n = argName(arg)
       n.isEmpty || params.forall(p => p.name != n.get)
@@ -417,7 +414,7 @@ trait NamesDefaults { self: Analyzer =>
           // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
           if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
           else {
-            var default1 = qual match {
+            var default1: Tree = qual match {
               case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter)
               case None    => gen.mkAttributedRef(defGetter)
 
@@ -463,20 +460,6 @@ trait NamesDefaults { self: Analyzer =>
     } else NoSymbol
   }
 
-  private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
-    val savedParams    = context.extractUndetparams()
-    val savedReporting = context.ambiguousErrors
-
-    context.setAmbiguousErrors(false)
-    try fn(savedParams)
-    finally {
-      context.setAmbiguousErrors(savedReporting)
-      //@M note that we don't get here when an ambiguity was detected (during the computation of res),
-      // as errorTree throws an exception
-      context.undetparams = savedParams
-    }
-  }
-
   /** A full type check is very expensive; let's make sure there's a name
    *  somewhere which could potentially be ambiguous before we go that route.
    */
@@ -491,12 +474,10 @@ trait NamesDefaults { self: Analyzer =>
       //   def f[T](x: T) = x
       //   var x = 0
       //   f(x = 1)   <<  "x = 1" typechecks with expected type WildcardType
-      savingUndeterminedTParams(context) { udp =>
+      val udp = context.undetparams
+      context.savingUndeterminedTypeParams(reportAmbiguous = false) {
         val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
-          override def apply(tp: Type): Type = super.apply(tp match {
-            case TypeRef(_, ByNameParamClass, x :: Nil) => x
-            case _                                      => tp
-          })
+          override def apply(tp: Type): Type = super.apply(dropByName(tp))
         }
         // This throws an exception which is caught in `tryTypedApply` (as it
         // uses `silent`) - unfortunately, tryTypedApply recovers from the
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
new file mode 100644
index 0000000..cf3f265
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -0,0 +1,376 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package typechecker
+
+import scala.collection.mutable
+import symtab.Flags
+import Mode._
+
+ /**
+ *
+ *  A pattern match such as
+ *
+ *    x match { case Foo(a, b) => ...}
+ *
+ *  Might match an instance of any of the following definitions of Foo.
+ *  Note the analogous treatment between case classes and unapplies.
+ *
+ *    case class Foo(xs: Int*)
+ *    case class Foo(a: Int, xs: Int*)
+ *    case class Foo(a: Int, b: Int)
+ *    case class Foo(a: Int, b: Int, xs: Int*)
+ *
+ *    object Foo { def unapplySeq(x: Any): Option[Seq[Int]] }
+ *    object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] }
+ *    object Foo { def unapply(x: Any): Option[(Int, Int)] }
+ *    object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] }
+ */
+
+trait PatternTypers {
+  self: Analyzer =>
+
+  import global._
+  import definitions._
+
+  private object FixedAndRepeatedTypes {
+    def unapply(types: List[Type]) = types match {
+      case init :+ last if isRepeatedParamType(last) => Some((init, dropRepeated(last)))
+      case _                                         => Some((types, NoType))
+    }
+  }
+
+  trait PatternTyper {
+    self: Typer =>
+
+    import TyperErrorGen._
+    import infer._
+
+    private def unit = context.unit
+
+    // If the tree's symbol's type does not define an extractor, maybe the tree's type does.
+    // this is the case when we encounter an arbitrary tree as the target of an unapply call
+    // (rather than something that looks like a constructor call.) (for now, this only happens
+    // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+    // more common place)
+    private def hasUnapplyMember(tpe: Type): Boolean   = reallyExists(unapplyMember(tpe))
+    private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*)
+    private def hasUnapplyMember(fun: Tree): Boolean   = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe)
+
+    // ad-hoc overloading resolution to deal with unapplies and case class constructors
+    // If some but not all alternatives survive filtering the tree's symbol with `p`,
+    // then update the tree's symbol and type to exclude the filtered out alternatives.
+    private def inPlaceAdHocOverloadingResolution(fun: Tree)(p: Symbol => Boolean): Tree = fun.symbol filter p match {
+      case sym if sym.exists && (sym ne fun.symbol) => fun setSymbol sym modifyType (tp => filterOverloadedAlts(tp)(p))
+      case _                                        => fun
+    }
+    private def filterOverloadedAlts(tpe: Type)(p: Symbol => Boolean): Type = tpe match {
+      case OverloadedType(pre, alts) => overloadedType(pre, alts filter p)
+      case tp                        => tp
+    }
+
+    def typedConstructorPattern(fun0: Tree, pt: Type): Tree = {
+      // Do some ad-hoc overloading resolution and update the tree's symbol and type
+      // do not update the symbol if the tree's symbol's type does not define an unapply member
+      // (e.g. since it's some method that returns an object with an unapply member)
+      val fun         = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+      val caseClass   = fun.tpe.typeSymbol.linkedClassOfClass
+      val member      = unapplyMember(fun.tpe)
+      def resultType  = (fun.tpe memberType member).finalResultType
+      def isEmptyType = resultOfMatchingMethod(resultType, nme.isEmpty)()
+      def isOkay      = (
+           resultType.isErroneous
+        || (resultType <:< BooleanTpe)
+        || (isEmptyType <:< BooleanTpe)
+        || member.isMacro
+        || member.isOverloaded // the whole overloading situation is over the rails
+      )
+
+      // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
+      // A case class with 23+ params has no unapply method.
+      // A case class constructor may be overloaded with unapply methods in the companion.
+      if (caseClass.isCase && !member.isOverloaded)
+        logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt))
+      else if (!reallyExists(member))
+        CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member")
+      else if (isOkay)
+        fun
+      else if (isEmptyType == NoType)
+        CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean")
+      else
+        CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: $isEmptyType)")
+    }
+
+    def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
+      def typedArgWithFormal(arg: Tree, pt: Type) = {
+        val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
+        typedArg(arg, mode, newMode, dropByName(pt))
+      }
+      val FixedAndRepeatedTypes(fixed, elem) = formals
+      val front = (args, fixed).zipped map typedArgWithFormal
+      def rest  = context withinStarPatterns (args drop front.length map (typedArgWithFormal(_, elem)))
+
+      elem match {
+        case NoType => front
+        case _      => front ::: rest
+      }
+    }
+
+    private def boundedArrayType(bound: Type): Type = {
+      val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+      newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
+    }
+
+    protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = {
+      val Typed(expr, tpt) = tree
+      val exprTyped = typed(expr, mode)
+      val baseClass = exprTyped.tpe.typeSymbol match {
+        case ArrayClass => ArrayClass
+        case _          => SeqClass
+      }
+      val starType = baseClass match {
+        case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt)
+        case ArrayClass                                                    => boundedArrayType(pt)
+        case _                                                             => seqType(pt)
+      }
+      val exprAdapted = adapt(exprTyped, mode, starType)
+      exprAdapted.tpe baseType baseClass match {
+        case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+        case _                            => setError(tree)
+      }
+    }
+
+    protected def typedInPattern(tree: Typed, mode: Mode, pt: Type) = {
+      val Typed(expr, tpt) = tree
+      val tptTyped  = typedType(tpt, mode)
+      val tpe       = tptTyped.tpe
+      val exprTyped = typed(expr, mode, tpe.deconst)
+      val extractor = extractorForUncheckedType(tpt.pos, tpe)
+
+      val canRemedy = tpe match {
+        case RefinedType(_, decls) if !decls.isEmpty                 => false
+        case RefinedType(parents, _) if parents exists isUncheckable => false
+        case _                                                       => extractor.nonEmpty
+      }
+
+      val ownType   = inferTypedPattern(tptTyped, tpe, pt, canRemedy)
+      val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType
+
+      extractor match {
+        case EmptyTree => treeTyped
+        case _         => wrapClassTagUnapply(treeTyped, extractor, tpe)
+      }
+    }
+    private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
+      private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
+
+      // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+      // Test case which presently requires the exclusion is run/gadts.scala.
+      def eligible(tparam: Symbol) = (
+           tparam.isTypeParameterOrSkolem
+        && tparam.owner.isTerm
+        && (settings.strictInference || !variance.isInvariant)
+      )
+
+      def skolems = try skolemBuffer.toList finally skolemBuffer.clear()
+      def apply(tp: Type): Type = mapOver(tp) match {
+        case tp @ TypeRef(NoPrefix, tpSym, Nil) if eligible(tpSym) =>
+          val bounds = (
+            if (variance.isInvariant) tpSym.tpeHK.bounds
+            else if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK)
+            else TypeBounds.lower(tpSym.tpeHK)
+          )
+          // origin must be the type param so we can deskolemize
+          val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds)
+          skolemBuffer += skolem
+          logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*)
+        case tp1 => tp1
+      }
+    }
+
+    /*
+     * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+     * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+     *
+     * Consider the following example:
+     *
+     *  class AbsWrapperCov[+A]
+     *  case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+     *
+     *  def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+     *    case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+     *      wrapped // : Wrapped[_ <: T]
+     *  }
+     *
+     * this method should type check if and only if Wrapped is covariant in its type parameter
+     *
+     * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+     * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+     * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+     *
+     * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+     * we can simply replace skolems that represent method type parameters as seen from the method's body
+     * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+     * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+     *
+     * see test/files/../t5189*.scala
+     */
+    private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, ptIn: Type): Tree = {
+      // TODO SI-7886 / SI-5900 This is well intentioned but doesn't quite hit the nail on the head.
+      //      For now, I've put it completely behind -Xstrict-inference.
+      val untrustworthyPt = settings.strictInference && (
+           ptIn =:= AnyTpe
+        || ptIn =:= NothingTpe
+        || ptIn.typeSymbol != caseClass
+      )
+      val variantToSkolem     = new VariantToSkolemMap
+      val caseClassType       = tree.tpe.prefix memberType caseClass
+      val caseConstructorType = caseClassType memberType caseClass.primaryConstructor
+      val tree1               = TypeTree(caseConstructorType) setOriginal tree
+      val pt                  = if (untrustworthyPt) caseClassType else ptIn
+
+      // have to open up the existential and put the skolems in scope
+      // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+      val ptSafe   = logResult(s"case constructor from (${tree.summaryString}, $caseClassType, $pt)")(variantToSkolem(pt))
+      val freeVars = variantToSkolem.skolems
+
+      // use "tree" for the context, not context.tree: don't make another CaseDef context,
+      // as instantiateTypeVar's bounds would end up there
+      val ctorContext = context.makeNewScope(tree, context.owner)
+      freeVars foreach ctorContext.scope.enter
+      newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe)
+
+      // simplify types without losing safety,
+      // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+      val extrapolator = new ExistentialExtrapolation(freeVars)
+      def extrapolate(tp: Type) = extrapolator extrapolate tp
+
+      // once the containing CaseDef has been type checked (see typedCase),
+      // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+      tree1 modifyType {
+        case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+          copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type
+        case tp => tp
+      }
+    }
+
+    def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
+      def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+      def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+      if (args.length > MaxTupleArity)
+        return duplErrorTree(TooManyArgsPatternError(fun))
+
+      def freshArgType(tp: Type): Type = tp match {
+        case MethodType(param :: _, _) => param.tpe
+        case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+        case OverloadedType(_, _)      => OverloadedUnapplyError(fun) ; ErrorType
+        case _                         => UnapplyWithSingleArgError(fun) ; ErrorType
+      }
+      val unapplyMethod    = unapplyMember(fun.tpe)
+      val unapplyType      = fun.tpe memberType unapplyMethod
+      val unapplyParamType = firstParamType(unapplyType)
+      def isSeq            = unapplyMethod.name == nme.unapplySeq
+
+      def extractor     = extractorForUncheckedType(fun.pos, unapplyParamType)
+      def canRemedy     = unapplyParamType match {
+        case RefinedType(_, decls) if !decls.isEmpty                 => false
+        case RefinedType(parents, _) if parents exists isUncheckable => false
+        case _                                                       => extractor.nonEmpty
+      }
+
+      def freshUnapplyArgType(): Type = {
+        val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree))
+        val unapplyContext = context.makeNewScope(context.tree, context.owner)
+        freeVars foreach unapplyContext.scope.enter
+        val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy)
+        // turn any unresolved type variables in freevars into existential skolems
+        val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+        pattp.substSym(freeVars, skolems)
+      }
+
+      val unapplyArg = (
+        context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (
+          if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt
+          else freshUnapplyArgType()
+        )
+      )
+      val unapplyArgTree = Ident(unapplyArg) updateAttachment SubpatternsAttachment(args)
+
+      // clearing the type is necessary so that ref will be stabilized; see bug 881
+      val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), unapplyArgTree :: Nil))
+
+      def makeTypedUnApply() = {
+        // the union of the expected type and the inferred type of the argument to unapply
+        val glbType        = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
+        val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
+        val formals        = patmat.alignPatterns(fun1, args).unexpandedFormals
+        val args1          = typedArgsForFormals(args, formals, mode)
+        val result         = UnApply(fun1, args1) setPos tree.pos setType glbType
+
+        if (wrapInTypeTest)
+          wrapClassTagUnapply(result, extractor, glbType)
+        else
+          result
+      }
+
+      if (fun1.tpe.isErroneous)
+        duplErrTree
+      else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply]) {
+        if (isBlackbox(unapplyMethod)) duplErrorTree(BlackboxExtractorExpansion(tree))
+        else duplErrorTree(WrongShapeExtractorExpansion(tree))
+      } else
+        makeTypedUnApply()
+    }
+
+    def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+      // TODO: disable when in unchecked match
+      // we don't create a new Context for a Match, so find the CaseDef,
+      // then go out one level and navigate back to the match that has this case
+      val args = List(uncheckedPattern)
+      val app  = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+      // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+      // and re-typechecks of the target of the unapply call in PATTERNmode,
+      // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+      // but an arbitrary tree as is the case here
+      val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+
+      log(sm"""
+        |wrapClassTagUnapply {
+        |  pattern: $uncheckedPattern
+        |  extract: $classTagExtractor
+        |       pt: $pt
+        |      res: $res
+        |}""".trim)
+
+      res
+    }
+
+    // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+    // return the corresponding extractor (an instance of ClassTag[`pt`])
+    def extractorForUncheckedType(pos: Position, pt: Type): Tree = {
+      if (isPastTyper || (pt eq NoType)) EmptyTree else {
+        pt match {
+          case RefinedType(parents, decls) if !decls.isEmpty || (parents exists isUncheckable) => return EmptyTree
+          case _                                                                               =>
+        }
+        // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+        // but at least make a proper type before passing it elsewhere
+        val pt1 = pt.dealiasWiden match {
+          case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+          case pt1                                           => pt1
+        }
+        if (isCheckable(pt1)) EmptyTree
+        else resolveClassTag(pos, pt1) match {
+          case tree if unapplyMember(tree.tpe).exists => tree
+          case _                                      => devWarning(s"Cannot create runtime type test for $pt1") ; EmptyTree
+        }
+      }
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index fea234d..b166bf9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -59,7 +59,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
   override def changesBaseClasses = false
 
   override def transformInfo(sym: Symbol, tp: Type): Type = {
-    if (sym.isModule && !sym.isStatic) sym setFlag (lateMETHOD | STABLE)
+    // !!! This is a sketchy way to do things.
+    // It would be better to replace the module symbol with a method symbol
+    // rather than creating this module/method hybrid which must be special
+    // cased all over the place. Look for the call sites which use(d) some
+    // variation of "isMethod && !isModule", which to an observer looks like
+    // a nonsensical condition. (It is now "isModuleNotMethod".)
+    if (sym.isModule && !sym.isStatic) {
+      sym setFlag lateMETHOD | STABLE
+      // Note that this as far as we can see it works equally well
+      // to set the METHOD flag here and dump lateMETHOD, but it does
+      // mean that under separate compilation the typer will see
+      // modules as methods (albeit stable ones with singleton types.)
+      // So for now lateMETHOD lives while we try to convince ourselves
+      // we can live without it or deliver that info some other way.
+      log(s"Stabilizing module method for ${sym.fullLocationString}")
+    }
     super.transformInfo(sym, tp)
   }
 
@@ -71,7 +86,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
     if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
   )
 
-  def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+  def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
     case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
       rtp1 <:< rtp2
     case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
@@ -95,28 +110,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
   class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
 
-    var localTyper: analyzer.Typer = typer;
+    var localTyper: analyzer.Typer = typer
     var currentApplication: Tree = EmptyTree
     var inPattern: Boolean = false
+    @inline final def savingInPattern[A](body: => A): A = {
+      val saved = inPattern
+      try body finally inPattern = saved
+    }
+
     var checkedCombinations = Set[List[Type]]()
 
     // only one overloaded alternative is allowed to define default arguments
-    private def checkOverloadedRestrictions(clazz: Symbol): Unit = {
+    private def checkOverloadedRestrictions(clazz: Symbol, defaultClass: Symbol): Unit = {
       // Using the default getters (such as methodName$default$1) as a cheap way of
       // finding methods with default parameters. This way, we can limit the members to
       // those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods
       // directly requires inspecting the parameter list of every one. That modification
       // shaved 95% off the time spent in this method.
-      val defaultGetters     = clazz.info.findMembers(0L, DEFAULTPARAM)
+      val defaultGetters     = defaultClass.info.findMembers(excludedFlags = PARAM, requiredFlags = DEFAULTPARAM)
       val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
 
       defaultMethodNames.toList.distinct foreach { name =>
-        val methods      = clazz.info.findMember(name, 0L, METHOD, false).alternatives
-        def hasDefaultParam(tpe: Type): Boolean = tpe match {
-          case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
-          case _ => false
-        }
-        val haveDefaults = methods filter (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name))
+        val methods      = clazz.info.findMember(name, 0L, requiredFlags = METHOD, stableOnly = false).alternatives
+        val haveDefaults = methods filter (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name))
 
         if (haveDefaults.lengthCompare(1) > 0) {
           val owners = haveDefaults map (_.owner)
@@ -133,7 +149,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           }
         }
       }
-      if (settings.lint.value) {
+
+      // Check for doomed attempt to overload applyDynamic
+      if (clazz isSubClass DynamicClass) {
+        for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) {
+          unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
+        }
+      }
+
+      // This has become noisy with implicit classes.
+      if (settings.lint && settings.developer) {
         clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
           // implicit classes leave both a module symbol and a method symbol as residue
           val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
@@ -187,7 +212,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
             val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
 
             // Delaying calling memberType as long as possible
-            if (inherited ne NoSymbol) {
+            if (inherited.exists) {
               val jtpe = toJavaRepeatedParam(self memberType member)
               // this is a bit tortuous: we look for non-private members or bridges
               // if we find a bridge everything is OK. If we find another member,
@@ -241,7 +266,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
       case class MixinOverrideError(member: Symbol, msg: String)
 
-      var mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
+      val mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
 
       def printMixinOverrideErrors() {
         mixinOverrideErrors.toList match {
@@ -273,21 +298,26 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
          else "")
       }
 
-      /** Check that all conditions for overriding `other` by `member`
-       *  of class `clazz` are met.
+      /* Check that all conditions for overriding `other` by `member`
+       * of class `clazz` are met.
        */
-      def checkOverride(member: Symbol, other: Symbol) {
+      def checkOverride(pair: SymbolPair) {
+        import pair._
+        val member   = low
+        val other    = high
+        def memberTp = lowType
+        def otherTp  = highType
+
         debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
 
-        def memberTp = self.memberType(member)
-        def otherTp  = self.memberType(other)
-        def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
+        def noErrorType = !pair.isErroneous
         def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol
-        def isNeitherInClass = (member.owner != clazz) && (other.owner != clazz)
+        def isNeitherInClass = member.owner != pair.base && other.owner != pair.base
+
         def objectOverrideErrorMsg = (
-          "overriding " + other.fullLocationString + " with " + member.fullLocationString + ":\n" +
+          "overriding " + high.fullLocationString + " with " + low.fullLocationString + ":\n" +
           "an overriding object must conform to the overridden object's class bound" +
-          analyzer.foundReqMsg(classBoundAsSeen(member.tpe), classBoundAsSeen(other.tpe))
+          analyzer.foundReqMsg(pair.lowClassBound, pair.highClassBound)
         )
 
         def overrideErrorMsg(msg: String): String = {
@@ -299,7 +329,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
                 infoStringWithLocation(other),
                 infoStringWithLocation(member)
               )
-            else if (settings.debug.value)
+            else if (settings.debug)
               analyzer.foundReqMsg(member.tpe, other.tpe)
             else ""
 
@@ -353,8 +383,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           }
         }
 
-        /** Is the intersection between given two lists of overridden symbols empty?
-         */
+        /* Is the intersection between given two lists of overridden symbols empty? */
         def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
           !(syms1 exists (syms2 contains _))
 
@@ -378,12 +407,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           if (!isOverrideAccessOK) {
             overrideAccessError()
           } else if (other.isClass) {
-            overrideError("cannot be used here - class definitions cannot be overridden");
+            overrideError("cannot be used here - class definitions cannot be overridden")
           } else if (!other.isDeferred && member.isClass) {
-            overrideError("cannot be used here - classes can only override abstract types");
+            overrideError("cannot be used here - classes can only override abstract types")
           } else if (other.isEffectivelyFinal) { // (1.2)
-            overrideError("cannot override final member");
-          } else if (!other.isDeferredOrDefault && !member.isAnyOverride && !member.isSynthetic) { // (*)
+            overrideError("cannot override final member")
+          } else if (!other.isDeferredOrDefault && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
             // (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
             // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
               if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -400,7 +429,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
             // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
             // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
-            if (!settings.overrideVars.value)
+            if (!settings.overrideVars)
               overrideError("cannot override a mutable variable")
           }
           else if (member.isAnyOverride &&
@@ -418,13 +447,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
                      member.isValue && !member.isLazy) {
             overrideError("must be declared lazy to override a concrete lazy value")
           } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9)
-            overrideError("cannot override an abstract method")
+            overrideError("cannot be used here - term macros cannot override abstract methods")
           } else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
-            overrideError("cannot override a macro")
+            overrideError("cannot be used here - only term macros can override term macros")
           } else {
             checkOverrideTypes()
             checkOverrideDeprecated()
-            if (settings.warnNullaryOverride.value) {
+            if (settings.warnNullaryOverride) {
               if (other.paramss.isEmpty && !member.paramss.isEmpty) {
                 unit.warning(member.pos, "non-nullary method overrides nullary method")
               }
@@ -432,76 +461,77 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           }
         }
 
-        def checkOverrideTypes() {
-          if (other.isAliasType) {
-            //if (!member.typeParams.isEmpty) (1.5)  @MAT
-            //  overrideError("may not be parameterized");
-            //if (!other.typeParams.isEmpty)  (1.5)   @MAT
-            //  overrideError("may not override parameterized type");
-            // @M: substSym
-
-            if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
-              overrideTypeError();
+        //if (!member.typeParams.isEmpty) (1.5)  @MAT
+        //  overrideError("may not be parameterized");
+        //if (!other.typeParams.isEmpty)  (1.5)   @MAT
+        //  overrideError("may not override parameterized type");
+        // @M: substSym
+        def checkOverrideAlias() {
+          // Important: first check the pair has the same kind, since the substitution
+          // carries high's type parameter's bounds over to low, so that
+          // type equality doesn't consider potentially different bounds on low/high's type params.
+          // In b781e25afe this went from using memberInfo to memberType (now lowType/highType), tested by neg/override.scala.
+          // TODO: was that the right fix? it seems type alias's RHS should be checked by looking at the symbol's info
+          if (pair.sameKind && lowType.substSym(low.typeParams, high.typeParams) =:= highType) ()
+          else overrideTypeError() // (1.6)
+        }
+        //if (!member.typeParams.isEmpty) // (1.7)  @MAT
+        //  overrideError("may not be parameterized");
+        def checkOverrideAbstract() {
+          if (!(highInfo.bounds containsType lowType)) { // (1.7.1)
+            overrideTypeError(); // todo: do an explaintypes with bounds here
+            explainTypes(_.bounds containsType _, highInfo, lowType)
           }
-          else if (other.isAbstractType) {
-            //if (!member.typeParams.isEmpty) // (1.7)  @MAT
-            //  overrideError("may not be parameterized");
-            val otherTp = self.memberInfo(other)
-
-            if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
-              overrideTypeError(); // todo: do an explaintypes with bounds here
-              explainTypes(_.bounds containsType _, otherTp, memberTp)
-            }
-
-            // check overriding (abstract type --> abstract type or abstract type --> concrete type member (a type alias))
-            // making an abstract type member concrete is like passing a type argument
-            val kindErrors = typer.infer.checkKindBounds(List(other), List(memberTp), self, member.owner) // (1.7.2)
-
-            if(!kindErrors.isEmpty)
+          // check overriding (abstract type --> abstract type or abstract type --> concrete type member (a type alias))
+          // making an abstract type member concrete is like passing a type argument
+          typer.infer.checkKindBounds(high :: Nil, lowType :: Nil, rootType, low.owner) match { // (1.7.2)
+            case Nil        =>
+            case kindErrors =>
               unit.error(member.pos,
                 "The kind of "+member.keyString+" "+member.varianceString + member.nameString+
                 " does not conform to the expected kind of " + other.defString + other.locationString + "." +
                 kindErrors.toList.mkString("\n", ", ", ""))
-
-            // check a type alias's RHS corresponds to its declaration
-            // this overlaps somewhat with validateVariance
-            if(member.isAliasType) {
-              // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
-              val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
-
-              if(!kindErrors.isEmpty)
+          }
+          // check a type alias's RHS corresponds to its declaration
+          // this overlaps somewhat with validateVariance
+          if (low.isAliasType) {
+            typer.infer.checkKindBounds(low :: Nil, lowType.normalize :: Nil, rootType, low.owner) match {
+              case Nil        =>
+              case kindErrors =>
                 unit.error(member.pos,
-                  "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
-                  member.varianceString + member.nameString+ " does not conform to its expected kind."+
+                  "The kind of the right-hand side "+lowType.normalize+" of "+low.keyString+" "+
+                  low.varianceString + low.nameString+ " does not conform to its expected kind."+
                   kindErrors.toList.mkString("\n", ", ", ""))
-            } else if (member.isAbstractType) {
-              if (memberTp.isVolatile && !otherTp.bounds.hi.isVolatile)
-                overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
-            }
-          } else if (other.isTerm) {
-            other.cookJavaRawInfo() // #2454
-            val memberTp = self.memberType(member)
-            val otherTp = self.memberType(other)
-            if (!overridesTypeInPrefix(memberTp, otherTp, self)) { // 8
-              overrideTypeError()
-              explainTypes(memberTp, otherTp)
             }
-
-            if (member.isStable && !otherTp.isVolatile) {
-	            if (memberTp.isVolatile)
-                overrideError("has a volatile type; cannot override a member with non-volatile type")
-              else memberTp.normalize.resultType match {
-                case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
-                  // might mask some inconsistencies -- check overrides
-                  checkedCombinations += rt.parents
-                  val tsym = rt.typeSymbol;
-                  if (tsym.pos == NoPosition) tsym setPos member.pos
-                  checkAllOverrides(tsym, typesOnly = true)
-                case _ =>
-              }
+          }
+          else if (low.isAbstractType && lowType.isVolatile && !highInfo.bounds.hi.isVolatile)
+            overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
+        }
+        def checkOverrideTerm() {
+          other.cookJavaRawInfo() // #2454
+          if (!overridesTypeInPrefix(lowType, highType, rootType)) { // 8
+            overrideTypeError()
+            explainTypes(lowType, highType)
+          }
+          if (low.isStable && !highType.isVolatile) {
+            if (lowType.isVolatile)
+              overrideError("has a volatile type; cannot override a member with non-volatile type")
+            else lowType.normalize.resultType match {
+              case rt: RefinedType if !(rt =:= highType) && !(checkedCombinations contains rt.parents) =>
+                // might mask some inconsistencies -- check overrides
+                checkedCombinations += rt.parents
+                val tsym = rt.typeSymbol
+                if (tsym.pos == NoPosition) tsym setPos member.pos
+                checkAllOverrides(tsym, typesOnly = true)
+              case _ =>
             }
           }
         }
+        def checkOverrideTypes() {
+          if (high.isAliasType)         checkOverrideAlias()
+          else if (high.isAbstractType) checkOverrideAbstract()
+          else if (high.isTerm)         checkOverrideTerm()
+        }
 
         def checkOverrideDeprecated() {
           if (other.hasDeprecatedOverridingAnnotation) {
@@ -514,10 +544,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
       val opc = new overridingPairs.Cursor(clazz)
       while (opc.hasNext) {
-        //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
-        if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
+        if (!opc.high.isClass)
+          checkOverride(opc.currentPair)
 
-        opc.next
+        opc.next()
       }
       printMixinOverrideErrors()
 
@@ -549,15 +579,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
               def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
               val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
               val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
-              afterErasure(tp1 matches tp2)
+              exitingErasure(tp1 matches tp2)
             })
 
         def ignoreDeferred(member: Symbol) = (
           (member.isAbstractType && !member.isFBounded) || (
-            member.isJavaDefined &&
-            // the test requires afterErasure so shouldn't be
+            // the test requires exitingErasure so shouldn't be
             // done if the compiler has no erasure phase available
-            (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
+               member.isJavaDefined
+            && (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
           )
         )
 
@@ -578,8 +608,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           def stubImplementations: List[String] = {
             // Grouping missing methods by the declaring class
             val regrouped = missingMethods.groupBy(_.owner).toList
-            def membersStrings(members: List[Symbol]) =
-              members.sortBy("" + _.name) map (m => m.defStringSeenAs(clazz.tpe memberType m) + " = ???")
+            def membersStrings(members: List[Symbol]) = {
+              members foreach fullyInitializeSymbol
+              members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???")
+            }
 
             if (regrouped.tail.isEmpty)
               membersStrings(regrouped.head._2)
@@ -718,16 +750,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
         // For non-AnyVal classes, prevent abstract methods in interfaces that override
         // final members in Object; see #4431
-        for (decl <- clazz.info.decls.iterator) {
-          val overridden = decl.overriddenSymbol(ObjectClass)
+        for (decl <- clazz.info.decls) {
+          // Have to use matchingSymbol, not a method involving overridden symbols,
+          // because the scala type system understands that an abstract method here does not
+          // override a concrete method in Object. The jvm, however, does not.
+          val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe)
           if (overridden.isFinal)
             unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
         }
       }
 
-      /** Returns whether there is a symbol declared in class `inclazz`
-       *  (which must be different from `clazz`) whose name and type
-       *  seen as a member of `class.thisType` matches `member`'s.
+      /* Returns whether there is a symbol declared in class `inclazz`
+       * (which must be different from `clazz`) whose name and type
+       * seen as a member of `class.thisType` matches `member`'s.
        */
       def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = {
         val isVarargs = hasRepeatedParam(member.tpe)
@@ -739,22 +774,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
           matches(member.tpe) || (isVarargs && matches(varargsType))
         }
-        /** The rules for accessing members which have an access boundary are more
-         *  restrictive in java than scala.  Since java has no concept of package nesting,
-         *  a member with "default" (package-level) access can only be accessed by members
-         *  in the exact same package.  Example:
+        /* The rules for accessing members which have an access boundary are more
+         * restrictive in java than scala.  Since java has no concept of package nesting,
+         * a member with "default" (package-level) access can only be accessed by members
+         * in the exact same package.  Example:
          *
-         *    package a.b;
-         *    public class JavaClass { void foo() { } }
+         *   package a.b;
+         *   public class JavaClass { void foo() { } }
          *
-         *  The member foo() can be accessed only from members of package a.b, and not
-         *  nested packages like a.b.c.  In the analogous scala class:
+         * The member foo() can be accessed only from members of package a.b, and not
+         * nested packages like a.b.c.  In the analogous scala class:
          *
-         *    package a.b
-         *    class ScalaClass { private[b] def foo() = () }
+         *   package a.b
+         *   class ScalaClass { private[b] def foo() = () }
          *
-         *  The member IS accessible to classes in package a.b.c.  The javaAccessCheck logic
-         *  is restricting the set of matching signatures according to the above semantics.
+         * The member IS accessible to classes in package a.b.c.  The javaAccessCheck logic
+         * is restricting the set of matching signatures according to the above semantics.
          */
         def javaAccessCheck(sym: Symbol) = (
              !inclazz.isJavaDefined                             // not a java defined member
@@ -774,7 +809,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           // for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
 
           val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
-          def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+          def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
           nonMatching match {
             case Nil =>
               issueError("")
@@ -801,7 +836,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       for (i <- 0 until seenTypes.length)
         seenTypes(i) = Nil
 
-      /** validate all base types of a class in reverse linear order. */
+      /* validate all base types of a class in reverse linear order. */
       def register(tp: Type): Unit = {
 //        if (clazz.fullName.endsWith("Collection.Projection"))
 //            println("validate base type "+tp)
@@ -823,13 +858,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         val baseClass = clazz.info.baseTypeSeq(i).typeSymbol
         seenTypes(i) match {
           case Nil =>
-            println("??? base "+baseClass+" not found in basetypes of "+clazz)
+            devWarning(s"base $baseClass not found in basetypes of $clazz. This might indicate incorrect caching of TypeRef#parents.")
           case _ :: Nil =>
             ;// OK
           case tp1 :: tp2 :: _ =>
             unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
                        " inherits different type instances of " + baseClass +
-                       ":\n" + tp1 + " and " + tp2);
+                       ":\n" + tp1 + " and " + tp2)
             explainTypes(tp1, tp2)
             explainTypes(tp2, tp1)
         }
@@ -838,163 +873,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
   // Variance Checking --------------------------------------------------------
 
-    private val ContraVariance = -1
-    private val NoVariance = 0
-    private val CoVariance = 1
-    private val AnyVariance = 2
-
-    private val escapedPrivateLocals = new mutable.HashSet[Symbol]
-
-    val varianceValidator = new Traverser {
-
-      /** Validate variance of info of symbol `base` */
-      private def validateVariance(base: Symbol) {
-        // A flag for when we're in a refinement, meaning method parameter types
-        // need to be checked.
-        var inRefinement = false
-
-        def varianceString(variance: Int): String =
-          if (variance == 1) "covariant"
-          else if (variance == -1) "contravariant"
-          else "invariant";
-
-        /** The variance of a symbol occurrence of `tvar`
-         *  seen at the level of the definition of `base`.
-         *  The search proceeds from `base` to the owner of `tvar`.
-         *  Initially the state is covariant, but it might change along the search.
-         */
-        def relativeVariance(tvar: Symbol): Int = {
-          val clazz = tvar.owner
-          var sym = base
-          var state = CoVariance
-          while (sym != clazz && state != AnyVariance) {
-            //Console.println("flip: " + sym + " " + sym.isParameter());//DEBUG
-            // Flip occurrences of type parameters and parameters, unless
-            //  - it's a constructor, or case class factory or extractor
-            //  - it's a type parameter of tvar's owner.
-            if (sym.isParameter && !sym.owner.isConstructor && !sym.owner.isCaseApplyOrUnapply &&
-                !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem &&
-                  tvar.owner == sym.owner)) state = -state;
-            else if (!sym.owner.isClass ||
-                     sym.isTerm && ((sym.isPrivateLocal || sym.isProtectedLocal || sym.isSuperAccessor /* super accessors are implicitly local #4345*/) && !(escapedPrivateLocals contains sym))) {
-              // return AnyVariance if `sym` is local to a term
-              // or is private[this] or protected[this]
-              state = AnyVariance
-            } else if (sym.isAliasType) {
-              // return AnyVariance if `sym` is an alias type
-              // that does not override anything. This is OK, because we always
-              // expand aliases for variance checking.
-              // However, if `sym` does override a type in a base class
-              // we have to assume NoVariance, as there might then be
-              // references to the type parameter that are not variance checked.
-              state = if (sym.isOverridingSymbol) NoVariance else AnyVariance
-            }
-            sym = sym.owner
-          }
-          state
-        }
-
-        /** Validate that the type `tp` is variance-correct, assuming
-         *  the type occurs itself at variance position given by `variance`
-         */
-        def validateVariance(tp: Type, variance: Int): Unit = tp match {
-          case ErrorType =>
-          case WildcardType =>
-          case BoundedWildcardType(bounds) =>
-            validateVariance(bounds, variance)
-          case NoType =>
-          case NoPrefix =>
-          case ThisType(_) =>
-          case ConstantType(_) =>
-          // case DeBruijnIndex(_, _) =>
-          case SingleType(pre, sym) =>
-            validateVariance(pre, variance)
-          case TypeRef(_, sym, _) if sym.isAliasType =>
-            // okay to ignore pre/args here. In 2.10.3 we used to check them in addition to checking
-            // the normalized type, which led to exponential time type checking, see pos/t8152-performance.scala
-            validateVariance(tp.normalize, variance)
-          case TypeRef(pre, sym, args) =>
-//            println("validate "+sym+" at "+relativeVariance(sym))
-            if (sym.variance != NoVariance) {
-              val v = relativeVariance(sym)
-              if (v != AnyVariance && sym.variance != v * variance) {
-                //Console.println("relativeVariance(" + base + "," + sym + ") = " + v);//DEBUG
-                def tpString(tp: Type) = tp match {
-                  case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
-                  case _ => "type "+tp
-                }
-                unit.error(base.pos,
-                           varianceString(sym.variance) + " " + sym +
-                           " occurs in " + varianceString(v * variance) +
-                           " position in " + tpString(base.info) + " of " + base);
-              }
-            }
-            validateVariance(pre, variance)
-            // @M for higher-kinded typeref, args.isEmpty
-            // However, these args respect variances by construction anyway
-            // -- the interesting case is in type application, see checkKindBounds in Infer
-            if (args.nonEmpty)
-              validateVarianceArgs(args, variance, sym.typeParams)
-          case ClassInfoType(parents, decls, symbol) =>
-            validateVariances(parents, variance)
-          case RefinedType(parents, decls) =>
-            validateVariances(parents, variance)
-            val saved = inRefinement
-            inRefinement = true
-            for (sym <- decls)
-              validateVariance(sym.info, if (sym.isAliasType) NoVariance else variance)
-            inRefinement = saved
-          case TypeBounds(lo, hi) =>
-            validateVariance(lo, -variance)
-            validateVariance(hi, variance)
-          case mt @ MethodType(formals, result) =>
-            if (inRefinement)
-              validateVariances(mt.paramTypes, -variance)
-            validateVariance(result, variance)
-          case NullaryMethodType(result) =>
-            validateVariance(result, variance)
-          case PolyType(tparams, result) =>
-            // type parameters will be validated separately, because they are defined explicitly.
-            validateVariance(result, variance)
-          case ExistentialType(tparams, result) =>
-            validateVariances(tparams map (_.info), variance)
-            validateVariance(result, variance)
-          case AnnotatedType(annots, tp, selfsym) =>
-            if (!annots.exists(_ matches uncheckedVarianceClass))
-              validateVariance(tp, variance)
-        }
-
-        def validateVariances(tps: List[Type], variance: Int) {
-          tps foreach (tp => validateVariance(tp, variance))
-        }
-
-        def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
-          foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
-        }
-
-        validateVariance(base.info, CoVariance)
+    object varianceValidator extends VarianceValidator {
+      private def tpString(tp: Type) = tp match {
+        case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
+        case _                                => "type "+tp
       }
-
-      override def traverse(tree: Tree) {
-        tree match {
-          case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
-            validateVariance(tree.symbol)
-            super.traverse(tree)
-          // ModuleDefs need not be considered because they have been eliminated already
-          case ValDef(_, _, _, _) =>
-            if (!tree.symbol.hasLocalFlag)
-              validateVariance(tree.symbol)
-          case DefDef(_, _, tparams, vparamss, _, _) =>
-            // No variance check for object-private/protected methods/values.
-            if (!tree.symbol.hasLocalFlag) {
-              validateVariance(tree.symbol)
-              traverseTrees(tparams)
-              traverseTreess(vparamss)
-            }
-          case Template(_, _, _) =>
-            super.traverse(tree)
-          case _ =>
-        }
+      override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) {
+        currentRun.currentUnit.error(base.pos,
+          s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base")
       }
     }
 
@@ -1022,7 +908,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       var index = -1
       for (stat <- stats) {
         index = index + 1
-        def enterSym(sym: Symbol) = if (sym.isLocal) {
+        def enterSym(sym: Symbol) = if (sym.isLocalToBlock) {
           currentLevel.scope.enter(sym)
           symIndex(sym) = index
         }
@@ -1039,11 +925,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
     }
 
     private def enterReference(pos: Position, sym: Symbol) {
-      if (sym.isLocal) {
+      if (sym.isLocalToBlock) {
         val e = currentLevel.scope.lookupEntry(sym.name)
         if ((e ne null) && sym == e.sym) {
           var l = currentLevel
-          while (l.scope != e.owner) l = l.outer;
+          while (l.scope != e.owner) l = l.outer
           val symindex = symIndex(sym)
           if (l.maxindex < symindex) {
             l.refpos = pos
@@ -1059,8 +945,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       def apply(tp: Type) = mapOver(tp).normalize
     }
 
-    def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
-      case (tap at TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
+    def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match {
+      case (tap at TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply =>
         unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
       case _ =>
     }
@@ -1069,164 +955,166 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
       case _                                                               => false
     }
-    def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
-      case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
-        def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
-        def isNew(tree: Tree) = tree match {
-          case Function(_, _)
-             | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
-          case _ => false
-        }
-        def underlyingClass(tp: Type): Symbol = {
-          val sym = tp.widen.typeSymbol
-          if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
-          else sym
-        }
-        val actual   = underlyingClass(args.head.tpe)
-        val receiver = underlyingClass(qual.tpe)
-        def onTrees[T](f: List[Tree] => T) = f(List(qual, args.head))
-        def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
-
-        // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
-        def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
-
-        /** Symbols which limit the warnings we can issue since they may be value types */
-        val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
-
-        // Whether def equals(other: Any) has known behavior: it is the default
-        // inherited from java.lang.Object, or it is a synthetically generated
-        // case equals.  TODO - more cases are warnable if the target is a synthetic
-        // equals.
-        def isUsingWarnableEquals = {
-          val m = receiver.info.member(nme.equals_)
-          ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
-        }
-        def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
-        def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
-        // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
-        def isUsingDefaultScalaOp = {
-          val s = fn.symbol
-          (s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
-        }
-        def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
-
-        // Whether the operands+operator represent a warnable combo (assuming anyrefs)
-        // Looking for comparisons performed with ==/!= in combination with either an
-        // equals method inherited from Object or a case class synthetic equals (for
-        // which we know the logic.)
-        def isWarnable           = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
-        def isEitherNullable     = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
-        def isEitherValueClass   = actual.isDerivedValueClass || receiver.isDerivedValueClass
-        def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
-        def isUnit(s: Symbol)    = unboxedValueClass(s) == UnitClass
-        def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
-        def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
-        // test is behind a platform guard
-        def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
-        // includes java.lang.Number if appropriate [SI-5779]
-        def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
-        def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
-        // used to short-circuit unrelatedTypes check if both sides are special
-        def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
-        // unused
-        def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
-        val nullCount            = onSyms(_ filter (_ == NullClass) size)
-        def isNonsenseValueClassCompare = (
-             !haveSubclassRelationship
-          && isUsingDefaultScalaOp
-          && isEitherValueClass
-          && !isCaseEquals
-        )
+    /** Check the sensibility of using the given `equals` to compare `qual` and `other`. */
+    private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree) = {
+      def isReferenceOp = sym == Object_eq || sym == Object_ne
+      def isNew(tree: Tree) = tree match {
+        case Function(_, _) | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
+        case _ => false
+      }
+      def underlyingClass(tp: Type): Symbol = {
+        val sym = tp.widen.typeSymbol
+        if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
+        else sym
+      }
+      val actual   = underlyingClass(other.tpe)
+      val receiver = underlyingClass(qual.tpe)
+      def onTrees[T](f: List[Tree] => T) = f(List(qual, other))
+      def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
+
+      // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
+      def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(other.tpe.widen)
+
+      /* Symbols which limit the warnings we can issue since they may be value types */
+      val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
+
+      // Whether def equals(other: Any) has known behavior: it is the default
+      // inherited from java.lang.Object, or it is a synthetically generated
+      // case equals.  TODO - more cases are warnable if the target is a synthetic
+      // equals.
+      def isUsingWarnableEquals = {
+        val m = receiver.info.member(nme.equals_)
+        ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
+      }
+      def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
+      def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
+      // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
+      def isUsingDefaultScalaOp = sym == Object_== || sym == Object_!= || sym == Any_== || sym == Any_!=
+      def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
+      // Whether the operands+operator represent a warnable combo (assuming anyrefs)
+      // Looking for comparisons performed with ==/!= in combination with either an
+      // equals method inherited from Object or a case class synthetic equals (for
+      // which we know the logic.)
+      def isWarnable           = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
+      def isEitherNullable     = (NullTpe <:< receiver.info) || (NullTpe <:< actual.info)
+      def isEitherValueClass   = actual.isDerivedValueClass || receiver.isDerivedValueClass
+      def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
+      def isUnit(s: Symbol)    = unboxedValueClass(s) == UnitClass
+      def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+      def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+      def isJavaNumber(s: Symbol)  = s isSubClass JavaNumberClass
+      // includes java.lang.Number if appropriate [SI-5779]
+      def isAnyNumber(s: Symbol)     = isScalaNumber(s) || isJavaNumber(s)
+      def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+      // used to short-circuit unrelatedTypes check if both sides are special
+      def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+      val nullCount            = onSyms(_ filter (_ == NullClass) size)
+      def isNonsenseValueClassCompare = (
+           !haveSubclassRelationship
+        && isUsingDefaultScalaOp
+        && isEitherValueClass
+        && !isCaseEquals
+      )
 
-        def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
-          val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
-          unit.warning(pos, "comparing "+what+" using `"+name.decode+"' will always yield " + msg)
-        }
-        def nonSensible(pre: String, alwaysEqual: Boolean) =
-          nonSensibleWarning(pre+"values of types "+typesString, alwaysEqual)
-        def nonSensiblyEq() = nonSensible("", true)
-        def nonSensiblyNeq() = nonSensible("", false)
-        def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
-
-        def unrelatedMsg = name match {
-          case nme.EQ | nme.eq => "never compare equal"
-          case _               => "always compare unequal"
-        }
-        def unrelatedTypes() = {
-          val weaselWord = if (isEitherValueClass) "" else " most likely"
-          unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
-        }
+      // Have we already determined that the comparison is non-sensible? I mean, non-sensical?
+      var isNonSensible = false
+
+      def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
+        val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
+        unit.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg")
+        isNonSensible = true
+      }
+      def nonSensible(pre: String, alwaysEqual: Boolean) =
+        nonSensibleWarning(s"${pre}values of types $typesString", alwaysEqual)
+      def nonSensiblyEq() = nonSensible("", alwaysEqual = true)
+      def nonSensiblyNeq() = nonSensible("", alwaysEqual = false)
+      def nonSensiblyNew() = nonSensibleWarning("a fresh object", alwaysEqual = false)
+
+      def unrelatedMsg = name match {
+        case nme.EQ | nme.eq => "never compare equal"
+        case _               => "always compare unequal"
+      }
+      def unrelatedTypes() = if (!isNonSensible) {
+        val weaselWord = if (isEitherValueClass) "" else " most likely"
+        unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
+      }
 
-        if (nullCount == 2) // null == null
+      if (nullCount == 2) // null == null
+        nonSensiblyEq()
+      else if (nullCount == 1) {
+        if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
+          nonSensiblyNeq()
+        else if (onTrees( _ exists isNew)) // null == new AnyRef
+          nonSensiblyNew()
+      }
+      else if (isBoolean(receiver)) {
+        if (!isBoolean(actual) && !isMaybeValue(actual))    // true == 5
+          nonSensiblyNeq()
+      }
+      else if (isUnit(receiver)) {
+        if (isUnit(actual)) // () == ()
           nonSensiblyEq()
-        else if (nullCount == 1) {
-          if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
-            nonSensiblyNeq()
-          else if (onTrees( _ exists isNew)) // null == new AnyRef
-            nonSensiblyNew()
-        }
-        else if (isBoolean(receiver)) {
-          if (!isBoolean(actual) && !isMaybeValue(actual))    // true == 5
+        else if (!isUnit(actual) && !isMaybeValue(actual))  // () == "abc"
+          nonSensiblyNeq()
+      }
+      else if (isNumeric(receiver)) {
+        if (!isNumeric(actual))
+          if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual))   // 5 == "abc"
             nonSensiblyNeq()
-        }
-        else if (isUnit(receiver)) {
-          if (isUnit(actual)) // () == ()
-            nonSensiblyEq()
-          else if (!isUnit(actual) && !isMaybeValue(actual))  // () == "abc"
+      }
+      else if (isWarnable && !isCaseEquals) {
+        if (isNew(qual)) // new X == y
+          nonSensiblyNew()
+        else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp))   // object X ; X == new Y
+          nonSensiblyNew()
+        else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) {  // object X, Y; X == Y
+          if (isEitherNullable)
+            nonSensible("non-null ", false)
+          else
             nonSensiblyNeq()
         }
-        else if (isNumeric(receiver)) {
-          if (!isNumeric(actual) && !forMSIL)
-            if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual))   // 5 == "abc"
-              nonSensiblyNeq()
+      }
+
+      // warn if one but not the other is a derived value class
+      // this is especially important to enable transitioning from
+      // regular to value classes without silent failures.
+      if (isNonsenseValueClassCompare)
+        unrelatedTypes()
+      // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
+      else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+        // better to have lubbed and lost
+        def warnIfLubless(): Unit = {
+          val common = global.lub(List(actual.tpe, receiver.tpe))
+          if (ObjectTpe <:< common)
+            unrelatedTypes()
         }
-        else if (isWarnable && !isCaseEquals) {
-          if (isNew(qual)) // new X == y
-            nonSensiblyNew()
-          else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp))   // object X ; X == new Y
-            nonSensiblyNew()
-          else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) {  // object X, Y; X == Y
-            if (isEitherNullable)
-              nonSensible("non-null ", false)
-            else
-              nonSensiblyNeq()
+        // warn if actual has a case parent that is not same as receiver's;
+        // if actual is not a case, then warn if no common supertype, as below
+        if (isCaseEquals) {
+          def thisCase = receiver.info.member(nme.equals_).owner
+          actual.info.baseClasses.find(_.isCase) match {
+            case Some(p) if p != thisCase => nonSensible("case class ", false)
+            case None =>
+              // stronger message on (Some(1) == None)
+              //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
+              //else
+              // if a class, it must be super to thisCase (and receiver) since not <: thisCase
+              if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
+              else if (!haveSubclassRelationship) warnIfLubless()
+            case _ =>
           }
         }
-
-        // warn if one but not the other is a derived value class
-        // this is especially important to enable transitioning from
-        // regular to value classes without silent failures.
-        if (isNonsenseValueClassCompare)
-          unrelatedTypes()
-        // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
-        else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
-          // better to have lubbed and lost
-          def warnIfLubless(): Unit = {
-            val common = global.lub(List(actual.tpe, receiver.tpe))
-            if (ObjectClass.tpe <:< common)
-              unrelatedTypes()
-          }
-          // warn if actual has a case parent that is not same as receiver's;
-          // if actual is not a case, then warn if no common supertype, as below
-          if (isCaseEquals) {
-            def thisCase = receiver.info.member(nme.equals_).owner
-            actual.info.baseClasses.find(_.isCase) match {
-              case Some(p) if p != thisCase => nonSensible("case class ", false)
-              case None =>
-                // stronger message on (Some(1) == None)
-                //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
-                //else
-                // if a class, it must be super to thisCase (and receiver) since not <: thisCase
-                if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
-                else if (!haveSubclassRelationship) warnIfLubless()
-              case _ =>
-            }
-          }
-          // warn only if they have no common supertype below Object
-          else if (!haveSubclassRelationship) {
-            warnIfLubless()
-          }
+        // warn only if they have no common supertype below Object
+        else if (!haveSubclassRelationship) {
+          warnIfLubless()
         }
+      }
+    }
+    /** Sensibility check examines flavors of equals. */
+    def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
+      case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+        checkSensibleEquals(pos, qual, name, fn.symbol, args.head)
       case _ =>
     }
 
@@ -1251,8 +1139,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
     /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
     def toConstructor(pos: Position, tpe: Type): Tree = {
-      var rtpe = tpe.finalResultType
-      assert(rtpe.typeSymbol hasFlag CASE, tpe);
+      val rtpe = tpe.finalResultType
+      assert(rtpe.typeSymbol hasFlag CASE, tpe)
       localTyper.typedOperator {
         atPos(pos) {
           Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
@@ -1270,57 +1158,61 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       finally popLevel()
     }
 
-    /** Eliminate ModuleDefs.
-     *   - A top level object is replaced with their module class.
-     *   - An inner object is transformed into a module var, created on first access.
+    /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is
+     *  replaced with a ClassDef (carrying the corresponding module class symbol) with additional
+     *  trees created as follows:
      *
-     *  In both cases, this transformation returns the list of replacement trees:
-     *   - Top level: the module class accessor definition
-     *   - Inner: a class definition, declaration of module var, and module var accessor
+     *  1) A statically reachable object (either top-level or nested only in objects) receives
+     *     no additional trees.
+     *  2) An inner object which matches an existing member (e.g. implements an interface)
+     *     receives an accessor DefDef to implement the interface.
+     *  3) An inner object otherwise receives a private ValDef which declares a module var
+     *     (the field which holds the module class - it has a name like Foo$module) and an
+     *     accessor for that field. The instance is created lazily, on first access.
      */
-    private def eliminateModuleDefs(tree: Tree): List[Tree] = {
-      val ModuleDef(mods, name, impl) = tree
-      val sym      = tree.symbol
-      val classSym = sym.moduleClass
-      val cdef     = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
-
-      def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
-        // See SI-5012, SI-6712.
+    private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks {
+      val ModuleDef(_, _, impl) = moduleDef
+      val module        = moduleDef.symbol
+      val site          = module.owner
+      val moduleName    = module.name.toTermName
+      // The typer doesn't take kindly to seeing this ClassDef; we have to
+      // set NoType so it will be ignored.
+      val cdef          = ClassDef(module.moduleClass, impl) setType NoType
+
+      // Create the module var unless the immediate owner is a class and
+      // the module var already exists there. See SI-5012, SI-6712.
+      def findOrCreateModuleVar() = {
         val vsym = (
-          if (sym.owner.isTerm) NoSymbol
-          else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName))
+          if (site.isTerm) NoSymbol
+          else site.info decl nme.moduleVarName(moduleName)
         )
-        // In case we are dealing with local symbol then we already have
-        // to correct error with forward reference
-        if (vsym == NoSymbol) gen.mkModuleVarDef(sym)
-        else ValDef(vsym)
+        vsym orElse (site newModuleVarSymbol module)
       }
-      def createStaticModuleAccessor() = afterRefchecks {
-        val method = (
-          sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
-            setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
-        )
-        localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
+      def newInnerObject() = {
+        // Create the module var unless it is already in the module owner's scope.
+        // The lookup is on module.enclClass and not module.owner lest there be a
+        // nullary method between us and the class; see SI-5012.
+        val moduleVar = findOrCreateModuleVar()
+        val rhs       = gen.newModule(module, moduleVar.tpe)
+        val body      = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
+        val accessor  = DefDef(module, body.changeOwner(moduleVar -> module))
+
+        ValDef(moduleVar) :: accessor :: Nil
       }
-      def createInnerModuleAccessor(vdef: Tree) = List(
-        vdef,
-        localTyper.typedPos(tree.pos) {
-          val vsym = vdef.symbol
-          afterRefchecks {
-            val rhs  = gen.newModule(sym, vsym.tpe)
-            val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
-            DefDef(sym, body.changeOwner(vsym -> sym))
-          }
-        }
-      )
-      transformTrees(cdef :: {
-        if (!sym.isStatic)
-          createInnerModuleAccessor(findOrCreateModuleVar)
-        else if (sym.isOverridingSymbol)
-          List(createStaticModuleAccessor())
+      def matchingInnerObject() = {
+        val newFlags = (module.flags | STABLE) & ~MODULE
+        val newInfo  = NullaryMethodType(module.moduleClass.tpe)
+        val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo
+
+        DefDef(accessor, Select(This(site), module)) :: Nil
+      }
+      val newTrees = cdef :: (
+        if (module.isStatic)
+          if (module.isOverridingSymbol) matchingInnerObject() else Nil
         else
-          Nil
-      })
+          newInnerObject()
+      )
+      transformTrees(newTrees map localTyper.typedPos(moduleDef.pos))
     }
 
     def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
@@ -1334,11 +1226,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         }
       case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
       case ValDef(_, _, _, _) =>
-        val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
+        val tree1 = transform(tree) // important to do before forward reference check
         if (tree1.symbol.isLazy) tree1 :: Nil
         else {
           val lazySym = tree.symbol.lazyAccessorOrSelf
-          if (lazySym.isLocal && index <= currentLevel.maxindex) {
+          if (lazySym.isLocalToBlock && index <= currentLevel.maxindex) {
             debuglog("refsym = " + currentLevel.refsym)
             unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
           }
@@ -1355,7 +1247,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       catch {
         case ex: TypeError =>
           unit.error(tree0.pos, ex.getMessage())
-          if (settings.explaintypes.value) {
+          if (settings.explaintypes) {
             val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
             (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
             (argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi))
@@ -1378,22 +1270,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         false
     }
 
-    /** If symbol is deprecated, and the point of reference is not enclosed
-     *  in either a deprecated member or a scala bridge method, issue a warning.
-     */
-    private def checkDeprecated(sym: Symbol, pos: Position) {
+    // Note: if a symbol has both @deprecated and @migration annotations and both
+    // warnings are enabled, only the first one checked here will be emitted.
+    // I assume that's a consequence of some code trying to avoid noise by suppressing
+    // warnings after the first, but I think it'd be better if we didn't have to
+    // arbitrarily choose one as more important than the other.
+    private def checkUndesiredProperties(sym: Symbol, pos: Position) {
+      // If symbol is deprecated, and the point of reference is not enclosed
+      // in either a deprecated member or a scala bridge method, issue a warning.
       if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
         unit.deprecationWarning(pos, "%s%s is deprecated%s".format(
           sym, sym.locationString, sym.deprecationMessage map (": " + _) getOrElse "")
         )
       }
-    }
-
-    /** Similar to deprecation: check if the symbol is marked with @migration
-     *  indicating it has changed semantics between versions.
-     */
-    private def checkMigration(sym: Symbol, pos: Position) = {
-      if (sym.hasMigrationAnnotation) {
+      // Similar to deprecation: check if the symbol is marked with @migration
+      // indicating it has changed semantics between versions.
+      if (sym.hasMigrationAnnotation && settings.Xmigration.value != NoScalaVersion) {
         val changed = try
           settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
         catch {
@@ -1405,9 +1297,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         if (changed)
           unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
       }
-    }
-
-    private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
+      // See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly.
       if (sym.isCompileTimeOnly) {
         def defaultMsg =
           sm"""Reference to ${sym.fullLocationString} should not have survived past type checking,
@@ -1476,7 +1366,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         // if the unnormalized type is accessible, that's good enough
         if (inaccessible.isEmpty) ()
         // or if the normalized type is, that's good too
-        else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+        else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
         // otherwise warn about the inaccessible syms in the unnormalized type
         else inaccessible foreach (sym => warnLessAccessible(sym, member))
       }
@@ -1487,6 +1377,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
     }
 
+    private def checkByNameRightAssociativeDef(tree: DefDef) {
+      tree match {
+        case DefDef(_, name, _, params :: _, _, _) =>
+          if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+            unit.warning(tree.pos,
+              "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
+        case _ =>
+      }
+    }
+
     /** Check that a deprecated val or def does not override a
       * concrete, non-deprecated method.  If it does, then
       * deprecation is meaningless.
@@ -1519,8 +1419,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
       case TypeRef(pre, sym, args) =>
         tree match {
           case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
-          case _ =>
-            checkDeprecated(sym, tree.pos)
+                                                      // FIXME: reconcile this check with one in resetAttrs
+          case _ => checkUndesiredProperties(sym, tree.pos)
         }
         if(sym.isJavaDefined)
           sym.typeParams foreach (_.cookJavaRawInfo())
@@ -1532,7 +1432,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
     private def checkTypeRefBounds(tp: Type, tree: Tree) = {
       var skipBounds = false
       tp match {
-        case AnnotatedType(ann :: Nil, underlying, selfSym) if ann.symbol == UncheckedBoundsClass =>
+        case AnnotatedType(ann :: Nil, underlying) if ann.symbol == UncheckedBoundsClass =>
           skipBounds = true
           underlying
         case TypeRef(pre, sym, args) =>
@@ -1562,7 +1462,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           applyChecks(sym.annotations)
           // validate implicitNotFoundMessage
           analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
-            unit.warning(tree.pos, "Invalid implicitNotFound message for %s%s:\n%s".format(sym, sym.locationString, warn))
+            unit.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
           }
 
         case tpt at TypeTree() =>
@@ -1575,7 +1475,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           }
 
           doTypeTraversal(tree) {
-            case tp @ AnnotatedType(annots, _, _)  =>
+            case tp @ AnnotatedType(annots, _)  =>
               applyChecks(annots)
             case tp =>
           }
@@ -1590,9 +1490,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         case TypeApply(fun, targs) =>
           isClassTypeAccessible(fun)
         case Select(module, apply) =>
-          // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
-          // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
-          module.symbol.companionClass.isClass
+          ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`;
+            //          {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`.
+            treeInfo.isQualifierSafeToElide(module) &&
+            // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case,
+            // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+            module.symbol.companionClass.isClass
+          )
       }
 
       val doTransform =
@@ -1627,7 +1531,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           transform(qual)
 
       case Apply(fn, args) =>
-        // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher
+        // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability
+        // analyses in the pattern matcher
         if (!inPattern) {
           checkImplicitViewOptionApply(tree.pos, fn, args)
           checkSensible(tree.pos, fn, args)
@@ -1636,33 +1541,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         tree
     }
     private def transformSelect(tree: Select): Tree = {
-      val Select(qual, name) = tree
+      val Select(qual, _) = tree
       val sym = tree.symbol
 
-      /** Note: if a symbol has both @deprecated and @migration annotations and both
-       *  warnings are enabled, only the first one checked here will be emitted.
-       *  I assume that's a consequence of some code trying to avoid noise by suppressing
-       *  warnings after the first, but I think it'd be better if we didn't have to
-       *  arbitrarily choose one as more important than the other.
-       */
-      checkDeprecated(sym, tree.pos)
-      if(settings.Xmigration.value != NoScalaVersion)
-        checkMigration(sym, tree.pos)
-      checkCompileTimeOnly(sym, tree.pos)
+      checkUndesiredProperties(sym, tree.pos)
       checkDelayedInitSelect(qual, sym, tree.pos)
 
-      if (sym eq NoSymbol) {
-        unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
-      }
-      else if (currentClass != sym.owner && sym.hasLocalFlag) {
-        var o = currentClass
-        var hidden = false
-        while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
-          hidden = o.isTerm || o.isPrivateLocal
-          o = o.owner
-        }
-        if (!hidden) escapedPrivateLocals += sym
-      }
+      if (!sym.exists)
+        devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
+      else if (sym.isLocalToThis)
+        varianceValidator.checkForEscape(sym, currentClass)
 
       def checkSuper(mix: Name) =
         // term should have been eliminated by super accessors
@@ -1678,7 +1566,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
     private def transformIf(tree: If): Tree = {
       val If(cond, thenpart, elsepart) = tree
       def unitIfEmpty(t: Tree): Tree =
-        if (t == EmptyTree) Literal(Constant()).setPos(tree.pos).setType(UnitClass.tpe) else t
+        if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t
 
       cond.tpe match {
         case ConstantType(value) =>
@@ -1695,8 +1583,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         // on Unit, in which case we had better let it slide.
         val isOk = (
              sym.isGetter
-          || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
           || (sym.name containsName nme.DEFAULT_GETTER_STRING)
+          || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
         )
         if (!isOk)
           unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
@@ -1705,14 +1593,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
     // Verify classes extending AnyVal meet the requirements
     private def checkAnyValSubclass(clazz: Symbol) = {
-      if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
+      if (clazz.isDerivedValueClass) {
         if (clazz.isTrait)
           unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
-        else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+        else if (clazz.hasAbstractFlag)
           unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
       }
     }
 
+    private def checkUnexpandedMacro(t: Tree) =
+      if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro)
+        unit.error(t.pos, "macro has not been expanded")
+
     override def transform(tree: Tree): Tree = {
       val savedLocalTyper = localTyper
       val savedCurrentApplication = currentApplication
@@ -1731,21 +1623,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
           case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
             checkDeprecatedOvers(tree)
             checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
-            if (settings.warnNullaryUnit.value)
+            if (settings.warnNullaryUnit)
               checkNullaryMethodReturnType(sym)
-            if (settings.warnInaccessible.value) {
+            if (settings.warnInaccessible) {
               if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
                 checkAccessibilityOfReferencedTypes(tree)
             }
+            tree match {
+              case dd: DefDef => checkByNameRightAssociativeDef(dd)
+              case _          =>
+            }
             tree
 
           case Template(parents, self, body) =>
             localTyper = localTyper.atOwner(tree, currentOwner)
             validateBaseTypes(currentOwner)
-            checkOverloadedRestrictions(currentOwner)
+            checkOverloadedRestrictions(currentOwner, currentOwner)
+            // SI-7870 default getters for constructors live in the companion module
+            checkOverloadedRestrictions(currentOwner, currentOwner.companionModule)
             val bridges = addVarargBridges(currentOwner)
             checkAllOverrides(currentOwner)
             checkAnyValSubclass(currentOwner)
+            if (currentOwner.isDerivedValueClass)
+              currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
             if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
 
           case dc at TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
@@ -1797,12 +1697,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
             enterReference(tree.pos, tpt.tpe.typeSymbol)
             tree
 
-          case Typed(_, Ident(tpnme.WILDCARD_STAR)) if !isRepeatedParamArg(tree) =>
+          case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
             unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
               "(such annotations are only allowed in arguments to *-parameters)")
             tree
 
           case Ident(name) =>
+            checkUndesiredProperties(sym, tree.pos)
             transformCaseApply(tree,
               if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
                 assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
@@ -1822,19 +1723,33 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
 
           case _ => tree
         }
+
         // skip refchecks in patterns....
         result = result match {
           case CaseDef(pat, guard, body) =>
-            inPattern = true
-            val pat1 = transform(pat)
-            inPattern = false
+            val pat1 = savingInPattern {
+              inPattern = true
+              transform(pat)
+            }
             treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
           case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
-            val old = inPattern
-            inPattern = true
-            val res = deriveLabelDef(result)(transform) // TODO SI-7756 Too broad! The code from the original case body should be fully refchecked!
-            inPattern = old
-            res
+            savingInPattern {
+              inPattern = true
+              deriveLabelDef(result)(transform)
+            }
+          case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+            savingInPattern {
+              // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+              //         that we are in the user-supplied code in the case body.
+              //
+              //         Relies on the translation of:
+              //            (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+              //         to:
+              //            <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+              //                  matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+              inPattern = false
+              super.transform(result)
+            }
           case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
             deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
           case _ =>
@@ -1843,14 +1758,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
         result match {
           case ClassDef(_, _, _, _)
              | TypeDef(_, _, _, _) =>
-            if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+            if (result.symbol.isLocalToBlock || result.symbol.isTopLevel)
               varianceValidator.traverse(result)
+          case tt @ TypeTree() if tt.original != null =>
+            varianceValidator.traverse(tt.original) // See SI-7872
           case _ =>
         }
+
+        checkUnexpandedMacro(result)
+
         result
       } catch {
         case ex: TypeError =>
-          if (settings.debug.value) ex.printStackTrace()
+          if (settings.debug) ex.printStackTrace()
           unit.error(tree.pos, ex.getMessage())
           tree
       } finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 64c5b41..57f27a0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,7 +4,165 @@ package typechecker
 trait StdAttachments {
   self: Analyzer =>
 
-  type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
+  import global._
+
+  /** Carries information necessary to expand the host tree.
+   *  At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
+   *  After a macro application has been successfully expanded, this attachment is destroyed.
+   */
+  type UnaffiliatedMacroContext = scala.reflect.macros.contexts.Context
   type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
   case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
-}
\ No newline at end of file
+
+  /** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime.
+   */
+  case class MacroExpanderAttachment(original: Tree, desugared: Tree)
+
+  /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment.
+   */
+ def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment =
+    tree.attachments.get[MacroExpanderAttachment] getOrElse {
+      tree match {
+        case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn)
+        case _ => MacroExpanderAttachment(tree, EmptyTree)
+      }
+    }
+
+  /** After macro expansion is completed, links the expandee and the expansion result
+   *  by annotating them both with a `MacroExpansionAttachment`.
+   */
+  def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree): Unit = {
+    val metadata = MacroExpanderAttachment(expandee, desugared)
+    expandee updateAttachment metadata
+    desugared updateAttachment metadata
+  }
+
+  /** Is added by the macro engine to originals and results of macro expansions.
+   *  Stores the original expandee as it entered the `macroExpand` function.
+   */
+  case class MacroExpansionAttachment(expandee: Tree, expanded: Any)
+
+  /** Determines whether the target is either an original or a result of a macro expansion.
+   *  The parameter is of type `Any`, because macros can expand both into trees and into annotations.
+   */
+  def hasMacroExpansionAttachment(any: Any): Boolean = any match {
+    case tree: Tree => tree.hasAttachment[MacroExpansionAttachment]
+    case _ => false
+  }
+
+  /** Returns the original tree of the macro expansion if the argument is a macro expansion or EmptyTree otherwise.
+   */
+  def macroExpandee(tree: Tree): Tree = tree.attachments.get[MacroExpansionAttachment].map(_.expandee).getOrElse(EmptyTree)
+
+  /** After macro expansion is completed, links the expandee and the expansion result by annotating them both with a `MacroExpansionAttachment`.
+   *  The `expanded` parameter is of type `Any`, because macros can expand both into trees and into annotations.
+   */
+  def linkExpandeeAndExpanded(expandee: Tree, expanded: Any): Unit = {
+    val metadata = MacroExpansionAttachment(expandee, expanded)
+    expandee updateAttachment metadata
+    expanded match {
+      case expanded: Tree => expanded updateAttachment metadata
+      case _ => // do nothing
+    }
+  }
+
+  /** When present, suppresses macro expansion for the host.
+   *  This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
+   *
+   *  Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
+   *  (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+   */
+  case object SuppressMacroExpansionAttachment
+
+  /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+   */
+  def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+  /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+   */
+  def unsuppressMacroExpansion(tree: Tree): Tree = {
+    tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+    tree match {
+      // see the comment to `isMacroExpansionSuppressed` to learn why we need
+      // a special traversal strategy here
+      case Apply(fn, _) => unsuppressMacroExpansion(fn)
+      case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+      case _ => // do nothing
+    }
+    tree
+  }
+
+  /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+   */
+  def isMacroExpansionSuppressed(tree: Tree): Boolean =
+    (  settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
+    || tree.hasAttachment[SuppressMacroExpansionAttachment.type]
+    || (tree match {
+        // we have to account for the fact that during typechecking an expandee might become wrapped,
+        // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+        // in that case the expandee itself will no longer be suppressed and we need to look at the core
+        case Apply(fn, _)     => isMacroExpansionSuppressed(fn)
+        case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+        case _                => false
+      })
+    )
+
+  /** After being synthesized by the parser, primary constructors aren't fully baked yet.
+   *  A call to super in such constructors is just a fill-me-in-later dummy resolved later
+   *  by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
+   *  allows them to complete the synthesis.
+   */
+  case class SuperArgsAttachment(argss: List[List[Tree]])
+
+  /** Convenience method for `SuperArgsAttachment`.
+   *  Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
+   *  so it really benefits from a dedicated extractor.
+   */
+  def superArgs(tree: Tree): Option[List[List[Tree]]] =
+    tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
+
+  /** Determines whether the given tree has an associated SuperArgsAttachment.
+   */
+  def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
+
+  /** @see markMacroImplRef
+   */
+  case object MacroImplRefAttachment
+
+  /** Marks the tree as a macro impl reference, which is a naked reference to a method.
+   *
+   *  This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`),
+   *  because otherwise typing a naked reference will result in the "follow this method with `_' if you want to
+   *  treat it as a partially applied function" errors.
+   *
+   *  This mark suppresses adapt except for when the annottee is a macro application.
+   */
+  def markMacroImplRef(tree: Tree): Tree = tree.updateAttachment(MacroImplRefAttachment)
+
+  /** Unmarks the tree as a macro impl reference (see `markMacroImplRef` for more information).
+   *
+   *  This is necessary when a tree that was previously deemed to be a macro impl reference,
+   *  typechecks to be a macro application. Then we need to unmark it, expand it and try to treat
+   *  its expansion as a macro impl reference.
+   */
+  def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type]
+
+  /** Determines whether a tree should or should not be adapted,
+   *  because someone has put MacroImplRefAttachment on it.
+   */
+  def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type]
+
+  /** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected
+   *  from typedNamedApply, the applyDynamicNamed argument rewriter, the latter
+   *  doesn’t know whether it needs to apply the rewriting because the application
+   *  has just been desugared or it needs to hold on because it’s already performed
+   *  a desugaring on this tree. This has led to SI-8006.
+   *
+   *  This attachment solves the problem by providing a means of communication
+   *  between the two Dynamic desugarers, which solves the aforementioned issue.
+   */
+  case object DynamicRewriteAttachment
+  def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment)
+  def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type]
+  def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index bad4938..87da565 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -1,9 +1,11 @@
+
 /* NSC -- new Scala compiler
  * Copyright 2005-2013 LAMP/EPFL
  * @author Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package typechecker
 
 import scala.collection.{ mutable, immutable }
@@ -28,7 +30,7 @@ import symtab.Flags._
  */
 abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
   import global._
-  import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+  import definitions._
   import analyzer.{ restrictionError }
 
   /** the following two members override abstract members in Transform */
@@ -60,16 +62,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       val clazz              = qual.symbol
       val supername          = nme.superName(name)
       val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
-        debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
-        val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+        debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
+        val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
         val tpe = clazz.thisType memberType sym match {
-          case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
-          case t                                  => t
+          case t if sym.isModuleNotMethod => NullaryMethodType(t)
+          case t                          => t
         }
         acc setInfoAndEnter (tpe cloneInfo acc)
         // Diagnostic for SI-7091
         if (!accDefs.contains(clazz))
-          reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.isPackage=${clazz.isPackage}. Accessor required for ${sel} (${showRaw(sel)})")
+          reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.hasPackageFlag}. Accessor required for ${sel} (${showRaw(sel)})")
         else storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
         acc
       }
@@ -108,11 +110,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       val clazz = sup.symbol
 
       if (sym.isDeferred) {
-        val member = sym.overridingSymbol(clazz);
+        val member = sym.overridingSymbol(clazz)
         if (mix != tpnme.EMPTY || member == NoSymbol ||
             !(member.isAbstractOverride && member.isIncompleteIn(clazz)))
           unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
-                               "unless it is overridden by a member declared `abstract' and `override'");
+                               "unless it is overridden by a member declared `abstract' and `override'")
       } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
         // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
         val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
@@ -165,18 +167,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
               log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
             }
           }
-          if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
-            println("========== scaladoc of "+sym+" =============================")
-            println(toJavaDoc(expandedDocComment(sym)))
-            for (member <- sym.info.members) {
-              println(member+":"+sym.thisType.memberInfo(member)+"\n"+
-                      toJavaDoc(expandedDocComment(member, sym)))
-              for ((useCase, comment, pos) <- useCases(member, sym)) {
-                println("usecase "+useCase+":"+useCase.info)
-                println(toJavaDoc(comment))
-              }
-            }
-          }
           super.transform(tree)
           }
           transformClassDef
@@ -203,7 +193,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
           transformTemplate
 
         case TypeApply(sel @ Select(This(_), name), args) =>
-          mayNeedProtectedAccessor(sel, args, false)
+          mayNeedProtectedAccessor(sel, args, goToSuper = false)
 
         // set a flag for all type parameters with `@specialized` annotation so it can be pickled
         case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) =>
@@ -231,10 +221,10 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
               // also exists in a superclass, because they may be surprised
               // to find out that a constructor parameter will shadow a
               // field. See SI-4762.
-              if (settings.lint.value) {
+              if (settings.lint) {
                 if (sym.isPrivateLocal && sym.paramss.isEmpty) {
                   qual.symbol.ancestors foreach { parent =>
-                    parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
+                    parent.info.decls filterNot (x => x.isPrivate || x.isLocalToThis) foreach { m2 =>
                       if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) {
                         unit.warning(sel.pos,
                           sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name
@@ -260,9 +250,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
                   Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
                 }).asInstanceOf[Select]
                 debuglog("alias replacement: " + tree + " ==> " + result); //debug
-                localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+                localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true))
               } else {
-                /**
+                /*
                  * A trait which extends a class and accesses a protected member
                  *  of that class cannot implement the necessary accessor method
                  *  because its implementation is in an implementation class (e.g.
@@ -279,20 +269,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
                   && sym.enclClass != currentClass
                   && !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
                   && !sym.owner.isTrait
-                  && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
-                  && (qual.symbol.info.member(sym.name) ne NoSymbol)
-                  && !needsProtectedAccessor(sym, tree.pos))
+                  && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+                  && qual.symbol.info.member(sym.name).exists
+                  && !needsProtectedAccessor(sym, tree.pos)
+                )
                 if (shouldEnsureAccessor) {
                   log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
                   ensureAccessor(sel)
                 }
                 else
-                  mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+                  mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false)
               }
 
             case Super(_, mix) =>
               if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
-                if (!settings.overrideVars.value)
+                if (!settings.overrideVars)
                   unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
               } else if (isDisallowed(sym)) {
                 unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
@@ -300,16 +291,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
               transformSuperSelect(sel)
 
             case _ =>
-              mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
+              mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true)
           }
           }
           transformSelect
 
-        case DefDef(mods, name, tparams, vparamss, tpt, rhs) if tree.symbol.isMethodWithExtension =>
-          treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
+        case DefDef(_, _, _, _, _, _) if tree.symbol.isMethodWithExtension =>
+          deriveDefDef(tree)(rhs => withInvalidOwner(transform(rhs)))
 
         case TypeApply(sel @ Select(qual, name), args) =>
-          mayNeedProtectedAccessor(sel, args, true)
+          mayNeedProtectedAccessor(sel, args, goToSuper = true)
 
         case Assign(lhs @ Select(qual, name), rhs) =>
           def transformAssign = {
@@ -317,8 +308,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
               lhs.symbol.isJavaDefined &&
               needsProtectedAccessor(lhs.symbol, tree.pos)) {
             debuglog("Adding protected setter for " + tree)
-            val setter = makeSetter(lhs);
-            debuglog("Replaced " + tree + " with " + setter);
+            val setter = makeSetter(lhs)
+            debuglog("Replaced " + tree + " with " + setter)
             transform(localTyper.typed(Apply(setter, List(qual, rhs))))
           } else
             super.transform(tree)
@@ -377,14 +368,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
      *  typed.
      */
     private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
-      val Select(qual, name) = tree
+      val Select(qual, _) = tree
       val sym = tree.symbol
       val clazz = hostForAccessorOf(sym, currentClass)
 
       assert(clazz != NoSymbol, sym)
       debuglog("Decided for host class: " + clazz)
 
-      val accName    = nme.protName(sym.originalName)
+      val accName    = nme.protName(sym.unexpandedName)
       val hasArgs    = sym.tpe.paramSectionCount > 0
       val memberType = refChecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
 
@@ -402,7 +393,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       }
 
       val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
-        val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+        val newAcc = clazz.newMethod(nme.protName(sym.unexpandedName), tree.pos, newFlags = ARTIFACT)
         newAcc setInfoAndEnter accType(newAcc)
 
         val code = DefDef(newAcc, {
@@ -413,7 +404,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
           args.foldLeft(base)(Apply(_, _))
         })
 
-        debuglog("" + code)
+        debuglog("created protected accessor: " + code)
         storeAccessorDefinition(clazz, code)
         newAcc
       }
@@ -425,7 +416,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
           case _          => mkApply(TypeApply(selection, targs))
         }
       }
-      debuglog("Replaced " + tree + " with " + res)
+      debuglog(s"Replaced $tree with $res")
       if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
     }
 
@@ -462,12 +453,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       assert(clazz != NoSymbol, field)
       debuglog("Decided for host class: " + clazz)
 
-      val accName = nme.protSetterName(field.originalName)
+      val accName = nme.protSetterName(field.unexpandedName)
       val protectedAccessor = clazz.info decl accName orElse {
-        val protAcc      = clazz.newMethod(accName, field.pos)
+        val protAcc      = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT)
         val paramTypes   = List(clazz.typeOfThis, field.tpe)
         val params       = protAcc newSyntheticValueParams paramTypes
-        val accessorType = MethodType(params, UnitClass.tpe)
+        val accessorType = MethodType(params, UnitTpe)
 
         protAcc setInfoAndEnter accessorType
         val obj :: value :: Nil = params
@@ -496,9 +487,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       def accessibleThroughSubclassing =
         validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
 
-      def packageAccessBoundry(sym: Symbol) =
-        sym.accessBoundary(sym.enclosingPackageClass)
-
       val isCandidate = (
            sym.isProtected
         && sym.isJavaDefined
@@ -554,7 +542,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
       case TypeRef(pre, _, _)      => isThisType(pre)
       case SingleType(pre, _)      => isThisType(pre)
       case RefinedType(parents, _) => parents exists isThisType
-      case AnnotatedType(_, tp, _) => isThisType(tp)
+      case AnnotatedType(_, tp)    => isThisType(tp)
       case _                       => false
     }
   }
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 242eb9c..9516f94 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -6,10 +6,10 @@
 package scala.tools.nsc
 package typechecker
 
-import symtab.Flags
+import scala.collection.{ mutable, immutable }
 import symtab.Flags._
-import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
 
 /** Synthetic method implementations for case classes and case objects.
  *
@@ -94,13 +94,13 @@ trait SyntheticMethods extends ast.TreeDSL {
     // like Tags and Arrays which are not robust and infer things
     // which they shouldn't.
     val accessorLub  = (
-      if (opt.experimental) {
-        global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
+      if (settings.Xexperimental) {
+        global.weakLub(accessors map (_.tpe.finalResultType)) match {
           case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
           case tp                                            => tp
         }
       }
-      else AnyClass.tpe
+      else AnyTpe
     )
 
     def forwardToRuntime(method: Symbol): Tree =
@@ -121,70 +121,60 @@ trait SyntheticMethods extends ast.TreeDSL {
         (m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth))
       }
     }
-    def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
-      clazzMember(newTermName(name)).info match {
-        case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
-        case _                                                => default
-      }
-    }
     def productIteratorMethod = {
       createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
         gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
       )
     }
-    def projectionMethod(accessor: Symbol, num: Int) = {
-      createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
-    }
 
-    /** Common code for productElement and (currently disabled) productElementName
-     */
+    /* Common code for productElement and (currently disabled) productElementName */
     def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree =
       createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx)))
 
-    // def productElementNameMethod = perElementMethod(nme.productElementName, StringClass.tpe)(x => LIT(x.name.toString))
+    // def productElementNameMethod = perElementMethod(nme.productElementName, StringTpe)(x => LIT(x.name.toString))
 
     var syntheticCanEqual = false
 
-    /** The canEqual method for case classes.
-     *    def canEqual(that: Any) = that.isInstanceOf[This]
+    /* The canEqual method for case classes.
+     *   def canEqual(that: Any) = that.isInstanceOf[This]
      */
     def canEqualMethod: Tree = {
       syntheticCanEqual = true
-      createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+      createMethod(nme.canEqual_, List(AnyTpe), BooleanTpe)(m =>
         Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
     }
 
-    /** that match { case _: this.C => true ; case _ => false }
-     *  where `that` is the given method's first parameter.
+    /* that match { case _: this.C => true ; case _ => false }
+     * where `that` is the given method's first parameter.
      *
-     *  An isInstanceOf test is insufficient because it has weaker
-     *  requirements than a pattern match. Given an inner class Foo and
-     *  two different instantiations of the container, an x.Foo and and a y.Foo
-     *  are both .isInstanceOf[Foo], but the one does not match as the other.
+     * An isInstanceOf test is insufficient because it has weaker
+     * requirements than a pattern match. Given an inner class Foo and
+     * two different instantiations of the container, an x.Foo and and a y.Foo
+     * are both .isInstanceOf[Foo], but the one does not match as the other.
      */
     def thatTest(eqmeth: Symbol): Tree = {
       Match(
         Ident(eqmeth.firstParam),
         List(
           CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(clazz.tpe)), EmptyTree, TRUE),
-          CaseDef(WILD.empty, EmptyTree, FALSE)
+          CaseDef(Ident(nme.WILDCARD), EmptyTree, FALSE)
         )
       )
     }
 
-    /** (that.asInstanceOf[this.C])
-     *  where that is the given methods first parameter.
+    /* (that.asInstanceOf[this.C])
+     * where that is the given methods first parameter.
      */
     def thatCast(eqmeth: Symbol): Tree =
       gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
 
-    /** The equality method core for case classes and inline clases.
-     *  1+ args:
-     *    (that.isInstanceOf[this.C]) && {
-     *        val x$1 = that.asInstanceOf[this.C]
-     *        (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
-     *       }
-     *  Drop canBuildFrom part if class is final and canBuildFrom is synthesized
+    /* The equality method core for case classes and inline clases.
+     * 1+ args:
+     *   (that.isInstanceOf[this.C]) && {
+     *       val x$1 = that.asInstanceOf[this.C]
+     *       (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+     *      }
+     * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
      */
     def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = {
       val otherName = context.unit.freshTermName(clazz.name + "$")
@@ -199,18 +189,18 @@ trait SyntheticMethods extends ast.TreeDSL {
       )
     }
 
-    /** The equality method for case classes.
-     *  0 args:
-     *    def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
-     *  1+ args:
-     *    def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
-     *      (that.isInstanceOf[this.C]) && {
-     *        val x$1 = that.asInstanceOf[this.C]
-     *        (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
-     *       }
-     *    }
+    /* The equality method for case classes.
+     * 0 args:
+     *   def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
+     * 1+ args:
+     *   def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
+     *     (that.isInstanceOf[this.C]) && {
+     *       val x$1 = that.asInstanceOf[this.C]
+     *       (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+     *      }
+     *   }
      */
-    def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+    def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
       if (accessors.isEmpty)
         if (clazz.isFinal) thatTest(m)
         else thatTest(m) AND ((thatCast(m) DOT nme.canEqual_)(mkThis))
@@ -218,30 +208,35 @@ trait SyntheticMethods extends ast.TreeDSL {
         (mkThis ANY_EQ Ident(m.firstParam)) OR equalsCore(m, accessors)
     }
 
-    /** The equality method for value classes
-     *  def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
-     *    (that.isInstanceOf[this.C]) && {
-     *     val x$1 = that.asInstanceOf[this.C]
-     *     (this.underlying == that.underlying
+    /* The equality method for value classes
+     * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
+     *   (that.isInstanceOf[this.C]) && {
+     *    val x$1 = that.asInstanceOf[this.C]
+     *    (this.underlying == that.underlying
      */
-    def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+    def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
       equalsCore(m, List(clazz.derivedValueClassUnbox))
     }
 
-    /** The hashcode method for value classes
+    /* The hashcode method for value classes
      * def hashCode(): Int = this.underlying.hashCode
      */
-    def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+    def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { m =>
       Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
     }
 
-    /** The _1, _2, etc. methods to implement ProductN, disabled
-     *  until we figure out how to introduce ProductN without cycles.
+    /* The _1, _2, etc. methods to implement ProductN, disabled
+     * until we figure out how to introduce ProductN without cycles.
      */
-     def productNMethods = {
+    /****
+    def productNMethods = {
       val accs = accessors.toIndexedSeq
       1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num)))
     }
+    def projectionMethod(accessor: Symbol, num: Int) = {
+      createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
+    }
+    ****/
 
     // methods for both classes and objects
     def productMethods = {
@@ -259,19 +254,20 @@ trait SyntheticMethods extends ast.TreeDSL {
 
     def hashcodeImplementation(sym: Symbol): Tree = {
       sym.tpe.finalResultType.typeSymbol match {
-        case UnitClass | NullClass                         => Literal(Constant(0))
-        case BooleanClass                                  => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
-        case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
-        case LongClass                                     => callStaticsMethod("longHash")(Ident(sym))
-        case DoubleClass                                   => callStaticsMethod("doubleHash")(Ident(sym))
-        case FloatClass                                    => callStaticsMethod("floatHash")(Ident(sym))
-        case _                                             => callStaticsMethod("anyHash")(Ident(sym))
+        case UnitClass | NullClass              => Literal(Constant(0))
+        case BooleanClass                       => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+        case IntClass                           => Ident(sym)
+        case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt)
+        case LongClass                          => callStaticsMethod("longHash")(Ident(sym))
+        case DoubleClass                        => callStaticsMethod("doubleHash")(Ident(sym))
+        case FloatClass                         => callStaticsMethod("floatHash")(Ident(sym))
+        case _                                  => callStaticsMethod("anyHash")(Ident(sym))
       }
     }
 
     def specializedHashcode = {
-      createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
-        val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+      createMethod(nme.hashCode_, Nil, IntTpe) { m =>
+        val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntTpe
         val valdef      = ValDef(accumulator, Literal(Constant(0xcafebabe)))
         val mixes       = accessors map (acc =>
           Assign(
@@ -313,11 +309,11 @@ trait SyntheticMethods extends ast.TreeDSL {
       // Object_equals   -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam)))
     )
 
-    /** If you serialize a singleton and then deserialize it twice,
-     *  you will have two instances of your singleton unless you implement
-     *  readResolve.  Here it is implemented for all objects which have
-     *  no implementation and which are marked serializable (which is true
-     *  for all case objects.)
+    /* If you serialize a singleton and then deserialize it twice,
+     * you will have two instances of your singleton unless you implement
+     * readResolve.  Here it is implemented for all objects which have
+     * no implementation and which are marked serializable (which is true
+     * for all case objects.)
      */
     def needsReadResolve = (
          clazz.isModuleClass
@@ -335,18 +331,20 @@ trait SyntheticMethods extends ast.TreeDSL {
         else Nil
       )
 
-      /** Always generate overrides for equals and hashCode in value classes,
-       *  so they can appear in universal traits without breaking value semantics.
+      /* Always generate overrides for equals and hashCode in value classes,
+       * so they can appear in universal traits without breaking value semantics.
        */
       def impls = {
         def shouldGenerate(m: Symbol) = {
           !hasOverridingImplementation(m) || {
             clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
-              if (settings.lint.value) {
-                (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
-                  currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
-                }
-              }
+              // Without a means to suppress this warning, I've thought better of it.
+              //
+              // if (settings.lint) {
+              //   (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+              //     currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+              //   }
+              // }
               true
             }
           }
@@ -359,7 +357,7 @@ trait SyntheticMethods extends ast.TreeDSL {
           // This method should be generated as private, but apparently if it is, then
           // it is name mangled afterward.  (Wonder why that is.) So it's only protected.
           // For sure special methods like "readResolve" should not be mangled.
-          List(createMethod(nme.readResolve, Nil, ObjectClass.tpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+          List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
         }
         else Nil
       )
@@ -368,11 +366,11 @@ trait SyntheticMethods extends ast.TreeDSL {
       catch { case _: TypeError if reporter.hasErrors => Nil }
     }
 
-    /** If this case class has any less than public accessors,
-     *  adds new accessors at the correct locations to preserve ordering.
-     *  Note that this must be done before the other method synthesis
-     *  because synthesized methods need refer to the new symbols.
-     *  Care must also be taken to preserve the case accessor order.
+    /* If this case class has any less than public accessors,
+     * adds new accessors at the correct locations to preserve ordering.
+     * Note that this must be done before the other method synthesis
+     * because synthesized methods need refer to the new symbols.
+     * Care must also be taken to preserve the case accessor order.
      */
     def caseTemplateBody(): List[Tree] = {
       val lb = ListBuffer[Tree]()
@@ -382,7 +380,7 @@ trait SyntheticMethods extends ast.TreeDSL {
         val original = ddef.symbol
         val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
           newAcc.makePublic
-          newAcc resetFlag (ACCESSOR | PARAMACCESSOR)
+          newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
           ddef.rhs.duplicate
         }
         // TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index d82fbd7..90ec3a8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -10,16 +10,19 @@ trait Tags {
   trait Tag {
     self: Typer =>
 
-    private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper {
+    private val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+
+    private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
       def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
       wrapper(inferImplicit(
         EmptyTree,
         taggedTp,
-        /*reportAmbiguous =*/ true,
-        /*isView =*/ false,
-        /*context =*/ context,
-        /*saveAmbiguousDivergent =*/ true,
-        /*pos =*/ pos
+        reportAmbiguous = true,
+        isView = false,
+        context,
+        saveAmbiguousDivergent = true,
+        pos
       ).tree)
     }
 
@@ -30,7 +33,7 @@ trait Tags {
      *  However we found out that we don't really need this concept, so it got removed.
      *
      *  @param   pos                    Position for error reporting. Please, provide meaningful value.
-     *  @param   tp                     Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntClass.tpe) will look for ClassTag[Int].
+     *  @param   tp                     Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntTpe) will look for ClassTag[Int].
      *  @param   allowMaterialization   If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
      *                                  If false then materialization macros are prohibited from running.
      *
@@ -49,7 +52,7 @@ trait Tags {
      *  @param   pre                    Prefix that represents a universe this type tag will be bound to.
      *                                  If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
      *                                  If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
-     *  @param   tp                     Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
+     *  @param   tp                     Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntTpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
      *  @param   concrete               If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
      *                                  If false then the function will always succeed (abstract types will be reified as free types).
      *  @param   allowMaterialization   If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
@@ -69,4 +72,4 @@ trait Tags {
         resolveTag(pos, taggedTp, allowMaterialization)
       }
   }
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 88d10f1..a2f52e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -6,19 +6,72 @@
 package scala.tools.nsc
 package typechecker
 
-import scala.tools.nsc.symtab.Flags._
 import scala.collection.mutable
 import mutable.ListBuffer
 import util.returning
+import scala.reflect.internal.util.shortClassOfInstance
+import scala.reflect.internal.util.StringOps._
 
 abstract class TreeCheckers extends Analyzer {
   import global._
 
-  private def classstr(x: AnyRef) = (x.getClass.getName split """\\.|\\$""").last
+  override protected def onTreeCheckerError(pos: Position, msg: String) {
+    if (settings.fatalWarnings)
+      currentUnit.warning(pos, "\n** Error during internal checking:\n" + msg)
+  }
+
+  case class DiffResult[T](lost: List[T], gained: List[T]) {
+    def isEmpty  = lost.isEmpty && gained.isEmpty
+    def lost_s   = if (lost.isEmpty) "" else lost.mkString("lost: ", ", ", "")
+    def gained_s = if (gained.isEmpty) "" else gained.mkString("gained: ", ", ", "")
+    override def toString = ojoin(lost_s, gained_s)
+  }
+
+  def diffList[T](xs: List[T], ys: List[T]): DiffResult[T] =
+    DiffResult(xs filterNot ys.contains, ys filterNot xs.contains)
+
+  def diffTrees(t1: Tree, t2: Tree): DiffResult[Tree] =
+    diffList(t1 filter (_ ne t1), t2 filter (_ ne t2))
+
+  def diffTemplates(t1: Template, t2: Template): String = {
+    val parents = diffList(t1.parents, t2.parents).toString match { case "" => "" case s => "parents " + s }
+    val stats   = diffList(t1.body, t2.body).toString match { case ""      => "" case s => "stats " + s }
+    oempty(parents, stats) mkString ", "
+  }
+
+  def diff(t1: Tree, t2: Tree): String = (t1, t2) match {
+    case (_: Literal, _: Literal)     => ""
+    case (t1: ImplDef, t2: ImplDef)   => diff(t1.impl, t2.impl)
+    case (t1: Template, t2: Template) => diffTemplates(t1, t2)
+    case _                            => diffTrees(t1, t2).toString // "<error: different tree classes>"
+  }
+
+  private def clean_s(s: String) = s.replaceAllLiterally("scala.collection.", "s.c.")
   private def typestr(x: Type)    = " (tpe = " + x + ")"
-  private def treestr(t: Tree)    = t + " [" + classstr(t) + "]" + typestr(t.tpe)
+  private def treestr(t: Tree)    = t + " [" + classString(t) + "]" + typestr(t.tpe)
   private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString
   private def wholetreestr(t: Tree) = nodeToString(t) + "\n"
+  private def truncate(str: String, len: Int): String = (
+    if (str.length <= len) str
+    else (str takeWhile (_ != '\n') take len - 3) + "..."
+  )
+  private def signature(sym: Symbol) = clean_s(sym match {
+    case null           => "null"
+    case _: ClassSymbol => sym.name + ": " + sym.tpe_*
+    case _              => sym.defString
+  })
+  private def classString(x: Any) = x match {
+    case null      => ""
+    case t: Tree   => t.shortClass
+    case s: Symbol => s.shortSymbolClass
+    case x: AnyRef => shortClassOfInstance(x)
+  }
+  private def nonPackageOwners(s: Symbol) = s.ownerChain drop 1 takeWhile (!_.hasPackageFlag)
+  private def nonPackageOwnersPlusOne(s: Symbol) = nonPackageOwners(s) ::: (s.ownerChain dropWhile (!_.hasPackageFlag) take 1)
+  private def ownersString(s: Symbol) = nonPackageOwnersPlusOne(s) match {
+    case Nil => "NoSymbol"
+    case xs  => xs mkString " -> "
+  }
 
   private def beststr(t: Tree) = "<" + {
     if (t.symbol != null && t.symbol != NoSymbol) "sym=" + ownerstr(t.symbol)
@@ -26,51 +79,55 @@ abstract class TreeCheckers extends Analyzer {
     else t match {
       case x: DefTree => "name=" + x.name
       case x: RefTree => "reference=" + x.name
-      case _          => "clazz=" + classstr(t)
+      case _          => "clazz=" + classString(t)
     }
   } + ">"
 
   /** This is a work in progress, don't take it too seriously.
    */
   object SymbolTracker extends Traverser {
-    type PhaseMap = mutable.HashMap[Symbol, List[Tree]]
+    type PhaseMap = mutable.Map[Symbol, List[Tree]]
+    def symbolTreeMap[T <: Tree]() = mutable.Map[Symbol, List[T]]() withDefaultValue Nil
 
-    val maps          = ListBuffer[(Phase, PhaseMap)]()
-    def prev          = maps.init.last._2
-    def latest        = maps.last._2
-    val defSyms       = mutable.HashMap[Symbol, List[DefTree]]()
+    var maps: List[(Phase, PhaseMap)] = ((NoPhase, null)) :: Nil
+    def prev          = maps.tail.head._2
+    def latest        = maps.head._2
+    val defSyms       = symbolTreeMap[DefTree]()
     val newSyms       = mutable.HashSet[Symbol]()
     val movedMsgs     = new ListBuffer[String]
     def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
 
-    def inPrev(sym: Symbol) = {
-      (maps.size >= 2) && (prev contains sym)
-    }
-    def record(sym: Symbol, tree: Tree) = {
-      if (latest contains sym) latest(sym) = latest(sym) :+ tree
-      else latest(sym) = List(tree)
+    def record(tree: Tree) {
+      val sym = tree.symbol
+      if ((sym eq null) || (sym eq NoSymbol)) return
 
-      if (inPrev(sym)) {
-        val prevTrees = prev(sym)
+      val prevMap   = maps.tail.head._2
+      val prevTrees = if (prevMap eq null) Nil else prevMap(sym)
 
-        if (prevTrees exists (t => (t eq tree) || (t.symbol == sym))) ()
-        else if (prevTrees exists (_.symbol.owner == sym.owner.implClass)) {
-          errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
-        }
-        else {
-          val s1 = (prevTrees map wholetreestr).sorted.distinct
-          val s2 = wholetreestr(tree)
-          if (s1 contains s2) ()
-          else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
-        }
+      tree match {
+        case t: DefTree => defSyms(sym) ::= t
+        case _          =>
+      }
+
+      if (prevTrees.isEmpty)
+        newSyms += sym
+      else if (prevTrees exists (t => (t eq tree) || (t.symbol == sym)))
+        ()
+      else if (prevTrees exists (_.symbol.owner == sym.owner.implClass))
+        errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
+      else {
+        val s1 = (prevTrees map wholetreestr).sorted.distinct
+        val s2 = wholetreestr(tree)
+        if (s1 contains s2) ()
+        else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
       }
-      else newSyms += sym
     }
+
     def reportChanges(): Unit = {
       // new symbols
       if (newSyms.nonEmpty) {
         informFn(newSyms.size + " new symbols.")
-        val toPrint = if (settings.debug.value) sortedNewSyms mkString " " else ""
+        val toPrint = if (settings.debug) sortedNewSyms mkString " " else ""
 
         newSyms.clear()
         if (toPrint != "")
@@ -89,74 +146,63 @@ abstract class TreeCheckers extends Analyzer {
     }
 
     def check(ph: Phase, unit: CompilationUnit): Unit = {
-      if (maps.isEmpty || maps.last._1 != ph)
-        maps += ((ph, new PhaseMap))
-
+      maps match {
+        case ((`ph`, _)) :: _ =>
+        case _                => maps ::= ((ph, symbolTreeMap[Tree]()))
+      }
       traverse(unit.body)
       reportChanges()
     }
-    override def traverse(tree: Tree): Unit = {
-      val sym    = tree.symbol
-      if (sym != null && sym != NoSymbol) {
-        record(sym, tree)
-        tree match {
-          case x: DefTree =>
-            if (defSyms contains sym) defSyms(sym) = defSyms(sym) :+ x
-            else defSyms(sym) = List(x)
-          case _ => ()
-        }
-      }
-
+    override def traverse(tree: Tree) {
+      record(tree)
       super.traverse(tree)
     }
   }
 
   lazy val tpeOfTree = mutable.HashMap[Tree, Type]()
+  private lazy val reportedAlready = mutable.HashSet[(Tree, Symbol)]()
+
+  def posstr(p: Position): String = (
+    if (p eq null) "" else {
+      try p.source.path + ":" + p.line
+      catch { case _: UnsupportedOperationException => p.toString }
+    }
+  )
+
 
-  def posstr(p: Position) =
-    try p.source.path + ":" + p.line
-    catch { case _: UnsupportedOperationException => p.toString }
+  def errorFn(pos: Position, msg: Any): Unit = currentUnit.warning(pos, "[check: %s] %s".format(phase.prev, msg))
+  def errorFn(msg: Any): Unit                = errorFn(NoPosition, msg)
 
-  private var hasError: Boolean = false
-  def errorFn(msg: Any): Unit                = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
-  def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
   def informFn(msg: Any) {
-    if (settings.verbose.value || settings.debug.value)
+    if (settings.verbose || settings.debug)
       println("[check: %s] %s".format(phase.prev, msg))
   }
 
   def assertFn(cond: Boolean, msg: => Any) =
     if (!cond) errorFn(msg)
 
-  private def wrap[T](msg: => Any)(body: => Unit) {
+  private def wrap[T](msg: => Any)(body: => T): T = {
     try body
     catch { case x: Throwable =>
       Console.println("Caught " + x)
       Console.println(msg)
       x.printStackTrace
+      null.asInstanceOf[T]
     }
   }
 
   def checkTrees() {
-    if (settings.verbose.value)
+    if (settings.verbose)
       Console.println("[consistency check at the beginning of phase " + phase + "]")
 
     currentRun.units foreach (x => wrap(x)(check(x)))
   }
 
-  def printingTypings[T](body: => T): T = {
-    val saved = global.printTypings
-    global.printTypings = true
-    val result = body
-    global.printTypings = saved
-    result
-  }
   def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
-    hasError = false
     val unit0 = currentUnit
     currentRun.currentUnit = unit
     body
-    currentRun.advanceUnit
+    currentRun.advanceUnit()
     assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
     currentRun.currentUnit = unit0
   }
@@ -164,35 +210,37 @@ abstract class TreeCheckers extends Analyzer {
     informProgress("checking "+unit)
     val context = rootContext(unit)
     context.checking = true
-    tpeOfTree.clear
+    tpeOfTree.clear()
     SymbolTracker.check(phase, unit)
     val checker = new TreeChecker(context)
     runWithUnit(unit) {
       checker.precheck.traverse(unit.body)
       checker.typed(unit.body)
       checker.postcheck.traverse(unit.body)
-      if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
     }
   }
 
   override def newTyper(context: Context): Typer = new TreeChecker(context)
 
   class TreeChecker(context0: Context) extends Typer(context0) {
-    override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
-      // If we don't intercept this all the synthetics get added at every phase,
-      // with predictably unfortunate results.
-      templ
-    }
+    // If we don't intercept this all the synthetics get added at every phase,
+    // with predictably unfortunate results.
+    override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = templ
 
     // XXX check for tree.original on TypeTrees.
-    private def treesDiffer(t1: Tree, t2: Tree) =
-      errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2))
+    private def treesDiffer(t1: Tree, t2: Tree): Unit = {
+      def len1 = t1.toString.length
+      def len2 = t2.toString.length
+      def name = t1 match {
+        case t: NameTree => t.name
+        case _           => t1.summaryString
+      }
+      def summary = s"${t1.shortClass} $name differs, bytes $len1 -> $len2, "
+      errorFn(t1.pos, summary + diff(t1, t2))
+    }
+
     private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) =
       errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree)
-    private def ownersDiffer(tree: Tree, shouldBe: Symbol) = {
-      val sym = tree.symbol
-      errorFn(tree.pos, sym + " has wrong owner: " + ownerstr(sym.owner) + ", should be: " + ownerstr(shouldBe))
-    }
 
     /** XXX Disabled reporting of position errors until there is less noise. */
     private def noPos(t: Tree) =
@@ -204,30 +252,46 @@ abstract class TreeCheckers extends Analyzer {
       if (t.symbol == NoSymbol)
         errorFn(t.pos, "no symbol: " + treestr(t))
 
-    override def typed(tree: Tree, mode: Int, pt: Type): Tree = returning(tree) {
-      case EmptyTree | TypeTree() => ()
-      case _ if tree.tpe != null  =>
-        tpeOfTree.getOrElseUpdate(tree, {
-          val saved = tree.tpe
-          tree.tpe = null
-          saved
-        })
-        wrap(tree)(super.typed(tree, mode, pt) match {
-          case _: Literal     => ()
-          case x if x ne tree => treesDiffer(tree, x)
-          case _              => ()
-        })
-      case _ => ()
+    private def passThrough(tree: Tree) = tree match {
+      case EmptyTree | TypeTree() => true
+      case _                      => tree.tpe eq null
+    }
+    override def typed(tree: Tree, mode: Mode, pt: Type): Tree = (
+      if (passThrough(tree))
+        super.typed(tree, mode, pt)
+      else
+        checkedTyped(tree, mode, pt)
+    )
+    private def checkedTyped(tree: Tree, mode: Mode, pt: Type): Tree = {
+      val typed = wrap(tree)(super.typed(tree, mode, pt))
+
+      if (tree ne typed)
+        treesDiffer(tree, typed)
+
+      tree
     }
 
     object precheck extends TreeStackTraverser {
-      override def traverse(tree: Tree) {
-        checkSymbolRefsRespectScope(tree)
+      private var enclosingMemberDefs: List[MemberDef] = Nil
+      private def pushMemberDef[T](md: MemberDef)(body: => T): T = {
+        enclosingMemberDefs ::= md
+        try body finally enclosingMemberDefs = enclosingMemberDefs.tail
+      }
+      override def traverse(tree: Tree): Unit = tree match {
+        case md: MemberDef => pushMemberDef(md)(traverseInternal(tree))
+        case _             => traverseInternal(tree)
+      }
+
+      private def traverseInternal(tree: Tree) {
+        if (!tree.canHaveAttrs)
+          return
+
+        checkSymbolRefsRespectScope(enclosingMemberDefs takeWhile (md => !md.symbol.hasPackageFlag), tree)
         checkReturnReferencesDirectlyEnclosingDef(tree)
 
         val sym = tree.symbol
         def accessed = sym.accessed
-        def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
+        def fail(msg: String) = errorFn(tree.pos, msg + tree.shortClass + " / " + tree)
 
         tree match {
           case DefDef(_, _, _, _, _, _) =>
@@ -236,7 +300,7 @@ abstract class TreeCheckers extends Analyzer {
                 case _: ConstantType  => ()
                 case _                =>
                   checkSym(tree)
-                  /** XXX: lots of syms show up here with accessed == NoSymbol. */
+                  /* XXX: lots of syms show up here with accessed == NoSymbol. */
                   if (accessed != NoSymbol) {
                     val agetter = accessed.getter(sym.owner)
                     val asetter = accessed.setter(sym.owner)
@@ -263,15 +327,14 @@ abstract class TreeCheckers extends Analyzer {
             else if (currentOwner.ownerChain takeWhile (_ != sym) exists (_ == NoSymbol))
               return fail("tree symbol "+sym+" does not point to enclosing class; tree = ")
 
-          /** XXX: temporary while Import nodes are arriving untyped. */
+          /* XXX: temporary while Import nodes are arriving untyped. */
           case Import(_, _) =>
             return
           case _ =>
         }
-
-        if (tree.pos == NoPosition && tree != EmptyTree)
+        if (tree.pos == NoPosition)
           noPos(tree)
-        else if (tree.tpe == null && phase.id > currentRun.typerPhase.id)
+        else if (tree.tpe == null && isPastTyper)
           noType(tree)
         else if (tree.isDef) {
           checkSym(tree)
@@ -284,7 +347,7 @@ abstract class TreeCheckers extends Analyzer {
               def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
 
               if (sym.owner != currentOwner) {
-                val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
+                val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol }
                 if (sym.owner != expected)
                   fail(sm"""|
                             | currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "}
@@ -296,58 +359,87 @@ abstract class TreeCheckers extends Analyzer {
         super.traverse(tree)
       }
 
-      private def checkSymbolRefsRespectScope(tree: Tree) {
-        def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
-        def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
-        val info = Option(symbolOf(tree).info).getOrElse(NoType)
-        val referencedSymbols: List[Symbol] = {
-          val directRef = tree match {
-            case _: RefTree => symbolOf(tree).toOption
-            case _          => None
+      private def checkSymbolRefsRespectScope(enclosingMemberDefs: List[MemberDef], tree: Tree) {
+        def symbolOf(t: Tree): Symbol  = if (t.symbol eq null) NoSymbol else t.symbol
+        def typeOf(t: Tree): Type      = if (t.tpe eq null) NoType else t.tpe
+        def infoOf(t: Tree): Type      = symbolOf(t).info
+        def referencesInType(tp: Type) = tp collect { case TypeRef(_, sym, _) => sym }
+        // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+        // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+        if (symbolOf(tree).isAccessor)
+          return
+
+        val treeSym  = symbolOf(tree)
+        val treeInfo = infoOf(tree)
+        val treeTpe  = typeOf(tree)
+
+        def isOk(sym: Symbol) = treeSym hasTransOwner sym.enclosingSuchThat(x => !x.isTypeParameterOrSkolem) // account for higher order type params
+        def isEligible(sym: Symbol) = (sym ne NoSymbol) && (
+             sym.isTypeParameter
+          || sym.isLocalToBlock
+        )
+        val referencedSymbols = (treeSym :: referencesInType(treeInfo)).distinct filter (sym => isEligible(sym) && !isOk(sym))
+        def mk[T](what: String, x: T, str: T => String = (x: T) => "" + x): ((Any, String)) =
+          x -> s"%10s  %-20s %s".format(what, classString(x), truncate(str(x), 80).trim)
+
+        def encls = enclosingMemberDefs.filterNot(_.symbol == treeSym).zipWithIndex map { case (md, i) => mk(s"encl(${i+1})", md.symbol, signature) }
+
+        def mkErrorMsg(outOfScope: Symbol): String = {
+
+          def front = List(
+            mk[Tree]("tree", tree),
+            mk[Position]("position", tree.pos, posstr),
+            mk("with sym", treeSym, signature)
+          )
+          def tpes = treeTpe match {
+            case NoType => Nil
+            case _      => mk[Type]("and tpe", treeTpe) :: Nil
+          }
+          def ref = mk[Symbol]("ref to", outOfScope, (s: Symbol) => s.nameString + " (" + s.debugFlagString + ")")
+
+          val pairs = front ++ tpes ++ encls ++ (ref :: Nil)
+          val width = pairs.map(_._2.length).max
+          val fmt = "%-" + width + "s"
+          val lines = pairs map {
+            case (s: Symbol, msg) => fmt.format(msg) + "  in  " + ownersString(s)
+            case (x, msg)         => fmt.format(msg)
           }
-          def referencedSyms(tp: Type) = (tp collect {
-            case TypeRef(_, sym, _) => sym
-          }).toList
-          val indirectRefs = referencedSyms(info)
-          (indirectRefs ++ directRef).distinct
+          lines.mkString("Out of scope symbol reference {\n", "\n", "\n}")
         }
-        for {
-          sym <- referencedSymbols
-          // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
-          // This happens in Namer#accessorTypeCompleter. We just look the other way here.
-          if !tree.symbol.isAccessor
-          if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
-        } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+
+        referencedSymbols foreach (sym =>
+          if (!reportedAlready((tree, sym))) {
+            errorFn("\n" + mkErrorMsg(sym))
+            reportedAlready += ((tree, sym))
+          }
+        )
       }
 
-      private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
-        tree match {
-          case _: Return =>
-            path.collectFirst {
-              case dd: DefDef => dd
-            } match {
-              case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
-              case Some(dd) =>
-                if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
-            }
-          case _ =>
-        }
+      private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree): Unit = tree match {
+        case _: Return =>
+          path collectFirst { case dd: DefDef => dd } match {
+            case None                                 => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+            case Some(dd) if tree.symbol != dd.symbol => errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+            case _                                    =>
+          }
+        case _ =>
       }
     }
 
     object postcheck extends Traverser {
-      override def traverse(tree: Tree) {
-        tree match {
-          case EmptyTree | TypeTree() => ()
-          case _ =>
-            tpeOfTree get tree foreach { oldtpe =>
-              if (oldtpe =:= tree.tpe) ()
-              else typesDiffer(tree, oldtpe, tree.tpe)
-
-              tree.tpe = oldtpe
-              super.traverse(tree)
-            }
-        }
+      override def traverse(tree: Tree): Unit = tree match {
+        case EmptyTree | TypeTree() => ()
+        case _ =>
+          tpeOfTree get tree foreach { oldtpe =>
+            if (tree.tpe eq null)
+              errorFn(s"tree.tpe=null for " + tree.shortClass + " (symbol: " + classString(tree.symbol) + " " + signature(tree.symbol) + "), last seen tpe was " + oldtpe)
+            else if (oldtpe =:= tree.tpe)
+              ()
+            else
+              typesDiffer(tree, oldtpe, tree.tpe)
+
+            super.traverse(tree setType oldtpe)
+          }
       }
     }
   }
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 2270e81..60346e7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -8,7 +8,6 @@ package typechecker
 
 import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
 import scala.util.control.Exception.ultimately
 import symtab.Flags._
 import PartialFunction._
@@ -37,15 +36,6 @@ trait TypeDiagnostics {
 
   import global._
   import definitions._
-  import global.typer.{ infer, context }
-
-  /** The common situation of making sure nothing is erroneous could be
-   *  nicer if Symbols, Types, and Trees all implemented some common interface
-   *  in which isErroneous and similar would be placed.
-   */
-  def noErroneousTypes(tps: Type*)    = tps forall (x => !x.isErroneous)
-  def noErroneousSyms(syms: Symbol*)  = syms forall (x => !x.isErroneous)
-  def noErroneousTrees(trees: Tree*)  = trees forall (x => !x.isErroneous)
 
   /** For errors which are artifacts of the implementation: such messages
    *  indicate that the restriction may be lifted in the future.
@@ -58,7 +48,7 @@ trait TypeDiagnostics {
   /** A map of Positions to addendums - if an error involves a position in
    *  the map, the addendum should also be printed.
    */
-  private var addendums = perRunCaches.newMap[Position, () => String]()
+  private val addendums = perRunCaches.newMap[Position, () => String]()
   private var isTyperInPattern = false
 
   /** Devising new ways of communicating error info out of
@@ -119,6 +109,22 @@ trait TypeDiagnostics {
     case x                                    => x.toString
   }
 
+  /**
+   * [a, b, c] => "(a, b, c)"
+   * [a, B]    => "(param1, param2)"
+   * [a, B, c] => "(param1, ..., param2)"
+   */
+  final def exampleTuplePattern(names: List[Name]): String = {
+    val arity = names.length
+    val varPatterNames: Option[List[String]] = sequence(names map {
+      case name if nme.isVariableName(name) => Some(name.decode)
+      case _                                => None
+    })
+    def parenthesize(a: String) = s"($a)"
+    def genericParams = (Seq("param1") ++ (if (arity > 2) Seq("...") else Nil) ++ Seq(s"param$arity"))
+    parenthesize(varPatterNames.getOrElse(genericParams).mkString(", "))
+  }
+
   def alternatives(tree: Tree): List[Type] = tree.tpe match {
     case OverloadedType(pre, alternatives)  => alternatives map pre.memberType
     case _                                  => Nil
@@ -136,7 +142,7 @@ trait TypeDiagnostics {
     else if (!member.isDeferred) member.accessed
     else {
       val getter = if (member.isSetter) member.getter(member.owner) else member
-      val flags  = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
+      val flags  = if (getter.setter(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
 
       getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
     }
@@ -153,7 +159,7 @@ trait TypeDiagnostics {
     def defaultMessage    = moduleMessage + preResultString + tree.tpe
     def applyMessage      = defaultMessage + tree.symbol.locationString
 
-    if ((sym eq null) || (sym eq NoSymbol)) {
+    if (!tree.hasExistingSymbol) {
       if (isTyperInPattern) patternMessage
       else exprMessage
     }
@@ -174,18 +180,13 @@ trait TypeDiagnostics {
     case xs   => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
   }
 
-  def varianceWord(sym: Symbol): String =
-    if (sym.variance == 1) "covariant"
-    else if (sym.variance == -1) "contravariant"
-    else "invariant"
-
   def explainAlias(tp: Type) = {
     // Don't automatically normalize standard aliases; they still will be
     // expanded if necessary to disambiguate simple identifiers.
-    if ((tp eq tp.normalize) || tp.typeSymbolDirect.isInDefaultNamespace) ""
-    else {
+    val deepDealias = DealiasedType(tp)
+    if (tp eq deepDealias) "" else {
       // A sanity check against expansion being identical to original.
-      val s = "" + DealiasedType(tp)
+      val s = "" + deepDealias
       if (s == "" + tp) ""
       else "\n    (which expands to)  " + s
     }
@@ -223,12 +224,12 @@ trait TypeDiagnostics {
                   // force measures than comparing normalized Strings were producing error messages
                   // like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there
                   // should be a cleaner way to do this.
-                  if (found.normalize.toString == tp.normalize.toString) ""
+                  if (found.dealiasWiden.toString == tp.dealiasWiden.toString) ""
                   else " (and %s <: %s)".format(found, tp)
                 )
                 val explainDef = {
                   val prepend = if (isJava) "Java-defined " else ""
-                  "%s%s is %s in %s.".format(prepend, reqsym, varianceWord(param), param)
+                  "%s%s is %s in %s.".format(prepend, reqsym, param.variance, param)
                 }
                 // Don't suggest they change the class declaration if it's somewhere
                 // under scala.* or defined in a java class, because attempting either
@@ -248,11 +249,11 @@ trait TypeDiagnostics {
                 || ((arg <:< reqArg) && param.isCovariant)
                 || ((reqArg <:< arg) && param.isContravariant)
               )
-              val invariant = param.variance == 0
+              val invariant = param.variance.isInvariant
 
               if (conforms)                             Some("")
-              else if ((arg <:< reqArg) && invariant)   mkMsg(true)   // covariant relationship
-              else if ((reqArg <:< arg) && invariant)   mkMsg(false)  // contravariant relationship
+              else if ((arg <:< reqArg) && invariant)   mkMsg(isSubtype = true)   // covariant relationship
+              else if ((reqArg <:< arg) && invariant)   mkMsg(isSubtype = false)  // contravariant relationship
               else None // we assume in other cases our ham-fisted advice will merely serve to confuse
           }
           val messages = relationships.flatten
@@ -268,7 +269,7 @@ trait TypeDiagnostics {
   // For found/required errors where AnyRef would have sufficed:
   // explain in greater detail.
   def explainAnyVsAnyRef(found: Type, req: Type): String = {
-    if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+    if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
   }
 
   // TODO - figure out how to avoid doing any work at all
@@ -300,8 +301,8 @@ trait TypeDiagnostics {
           case xs   => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
         })+ "`"
 
-    "\nNote: if you intended to match against the class, try "+ caseString
-
+    if (!clazz.exists) ""
+    else "\nNote: if you intended to match against the class, try "+ caseString
   }
 
   case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
@@ -309,7 +310,6 @@ trait TypeDiagnostics {
     // distinguished from the other types in the same error message
     private val savedName = sym.name
     def restoreName()     = sym.name = savedName
-    def isAltered         = sym.name != savedName
     def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
 
     /** Prepend java.lang, scala., or Predef. if this type originated
@@ -442,6 +442,119 @@ trait TypeDiagnostics {
     def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
       contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
 
+    object checkUnused {
+      val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace")
+
+      class UnusedPrivates extends Traverser {
+        val defnTrees = ListBuffer[MemberDef]()
+        val targets   = mutable.Set[Symbol]()
+        val setVars   = mutable.Set[Symbol]()
+        val treeTypes = mutable.Set[Type]()
+
+        def defnSymbols = defnTrees.toList map (_.symbol)
+        def localVars   = defnSymbols filter (t => t.isLocalToBlock && t.isVar)
+
+        def qualifiesTerm(sym: Symbol) = (
+             (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocalToBlock)
+          && !nme.isLocalName(sym.name)
+          && !sym.isParameter
+          && !sym.isParamAccessor       // could improve this, but it's a pain
+          && !sym.isEarlyInitialized    // lots of false positives in the way these are encoded
+          && !(sym.isGetter && sym.accessed.isEarlyInitialized)
+        )
+        def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage
+        def qualifies(sym: Symbol) = (
+             (sym ne null)
+          && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym))
+        )
+
+        override def traverse(t: Tree): Unit = {
+          t match {
+            case t: MemberDef if qualifies(t.symbol)   => defnTrees += t
+            case t: RefTree if t.symbol ne null        => targets += t.symbol
+            case Assign(lhs, _) if lhs.symbol != null  => setVars += lhs.symbol
+            case _                                     =>
+          }
+          // Only record type references which don't originate within the
+          // definition of the class being referenced.
+          if (t.tpe ne null) {
+            for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
+              tp match {
+                case NoType | NoPrefix    =>
+                case NullaryMethodType(_) =>
+                case MethodType(_, _)     =>
+                case _                    =>
+                  log(s"$tp referenced from $currentOwner")
+                  treeTypes += tp
+              }
+            }
+            // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused.
+            t.tpe.prefix foreach {
+              case SingleType(_, sym) => targets += sym
+              case _                 =>
+            }
+          }
+          super.traverse(t)
+        }
+        def isUnusedType(m: Symbol): Boolean = (
+              m.isType
+          && !m.isTypeParameterOrSkolem // would be nice to improve this
+          && (m.isPrivate || m.isLocalToBlock)
+          && !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m)))
+        )
+        def isUnusedTerm(m: Symbol): Boolean = (
+             (m.isTerm)
+          && (m.isPrivate || m.isLocalToBlock)
+          && !targets(m)
+          && !(m.name == nme.WILDCARD)              // e.g. val _ = foo
+          && !ignoreNames(m.name.toTermName)        // serialization methods
+          && !isConstantType(m.info.resultType)     // subject to constant inlining
+          && !treeTypes.exists(_ contains m)        // e.g. val a = new Foo ; new a.Bar
+        )
+        def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol))
+        def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol))
+        // local vars which are never set, except those already returned in unused
+        def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v))
+      }
+
+      def apply(unit: CompilationUnit) = {
+        val p = new UnusedPrivates
+        p traverse unit.body
+        val unused = p.unusedTerms
+        unused foreach { defn: DefTree =>
+          val sym             = defn.symbol
+          val pos = (
+            if (defn.pos.isDefined) defn.pos
+            else if (sym.pos.isDefined) sym.pos
+            else sym match {
+              case sym: TermSymbol => sym.referenced.pos
+              case _               => NoPosition
+            }
+          )
+          val why = if (sym.isPrivate) "private" else "local"
+          val what = (
+            if (sym.isDefaultGetter) "default argument"
+            else if (sym.isConstructor) "constructor"
+            else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
+            else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val"
+            else if (sym.isSetter) "setter"
+            else if (sym.isMethod) "method"
+            else if (sym.isModule) "object"
+            else "term"
+          )
+          unit.warning(pos, s"$why $what in ${sym.owner} is never used")
+        }
+        p.unsetVars foreach { v =>
+          unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+        }
+        p.unusedTypes foreach { t =>
+          val sym = t.symbol
+          val why = if (sym.isPrivate) "private" else "local"
+          unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+        }
+      }
+    }
+
     object checkDead {
       private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
       // The method being applied to `tree` when `apply` is called.
@@ -466,17 +579,13 @@ trait TypeDiagnostics {
         // Error suppression will squash some of these warnings unless we circumvent it.
         // It is presumed if you are using a -Y option you would really like to hear
         // the warnings you've requested.
-        if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
-          context.warning(tree.pos, "dead code following this construct", true)
+        if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK)
+          context.warning(tree.pos, "dead code following this construct", force = true)
         tree
       }
 
       // The checkDead call from typedArg is more selective.
-      def inMode(mode: Int, tree: Tree): Tree = {
-        val modeOK = (mode & (EXPRmode | BYVALmode | POLYmode)) == (EXPRmode | BYVALmode)
-        if (modeOK) apply(tree)
-        else tree
-      }
+      def inMode(mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(tree) else tree
     }
 
     private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
@@ -497,7 +606,7 @@ trait TypeDiagnostics {
 
     /** Report a type error.
      *
-     *  @param pos0   The position where to report the error
+     *  @param pos    The position where to report the error
      *  @param ex     The exception that caused the error
      */
     def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
@@ -506,7 +615,7 @@ trait TypeDiagnostics {
       // but it seems that throwErrors excludes some of the errors that should actually be
       // buffered, causing TypeErrors to fly around again. This needs some more investigation.
       if (!context0.reportErrors) throw ex
-      if (settings.debug.value) ex.printStackTrace()
+      if (settings.debug) ex.printStackTrace()
 
       ex match {
         case CyclicReference(sym, info: TypeCompleter) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
new file mode 100644
index 0000000..cb1f1f4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -0,0 +1,239 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import java.lang.{ reflect => r }
+import r.TypeVariable
+import scala.reflect.NameTransformer
+import NameTransformer._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+
+/** A more principled system for turning types into strings.
+ */
+trait StructuredTypeStrings extends DestructureTypes {
+  val global: Global
+  import global._
+
+  case class LabelAndType(label: String, typeName: String) { }
+  object LabelAndType {
+    val empty = LabelAndType("", "")
+  }
+  case class Grouping(ldelim: String, mdelim: String, rdelim: String, labels: Boolean) {
+    def join(elems: String*): String = (
+      if (elems.isEmpty) ""
+      else elems.mkString(ldelim, mdelim, rdelim)
+    )
+  }
+  val NoGrouping      = Grouping("", "", "", labels = false)
+  val ListGrouping    = Grouping("(", ", ", ")", labels = false)
+  val ProductGrouping = Grouping("(", ", ", ")", labels = true)
+  val BlockGrouping   = Grouping(" { ", "; ", "}", labels = false)
+
+  private def str(level: Int)(body: => String): String = "  " * level + body
+  private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+    val l1 = str(level)(name + grouping.ldelim)
+    val l2 = nodes.map(_ show level + 1)
+    val l3 = str(level)(grouping.rdelim)
+
+    l1 +: l2 :+ l3 mkString "\n"
+  }
+  private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+    val threshold = 70
+
+    val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
+    if (try1.length < threshold) try1
+    else block(level, grouping)(name, nodes)
+  }
+  private def shortClass(x: Any) = {
+    if (settings.debug) {
+      val name   = (x.getClass.getName split '.').last
+      val str    = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last
+
+      " // " + str
+    }
+    else ""
+  }
+
+  sealed abstract class TypeNode {
+    def grouping: Grouping
+    def nodes: List[TypeNode]
+
+    def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
+    def show(indent: Int): String = show(indent, showLabel = true)
+    def show(): String = show(0)
+
+    def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
+    def withType(t: String): this.type  = modifyNameInfo(_.copy(typeName = t))
+
+    def label       = nameInfo.label
+    def typeName    = nameInfo.typeName
+
+    protected def mkPrefix(showLabel: Boolean) = {
+      val pre = if (showLabel && label != "") label + " = " else ""
+      pre + typeName
+    }
+    override def toString = show() // + "(toString)"
+    private var nameInfo: LabelAndType = LabelAndType.empty
+    private def modifyNameInfo(f: LabelAndType => LabelAndType): this.type = {
+      nameInfo = f(nameInfo)
+      this
+    }
+  }
+  case class TypeAtom[T](atom: T) extends TypeNode {
+    def grouping = NoGrouping
+    def nodes = Nil
+    override protected def mkPrefix(showLabel: Boolean) =
+      super.mkPrefix(showLabel) + atom + shortClass(atom)
+  }
+  case class TypeProduct(nodes: List[TypeNode]) extends TypeNode {
+    def grouping: Grouping = ProductGrouping
+    def emptyTypeName = ""
+    override def typeName = if (nodes.isEmpty) emptyTypeName else super.typeName
+  }
+
+  /** For a NullaryMethod, in = TypeEmpty; for MethodType(Nil, _) in = TypeNil */
+  class NullaryFunction(out: TypeNode) extends TypeProduct(List(out)) {
+    override def typeName = "NullaryMethodType"
+  }
+  class MonoFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+    override def typeName = "MethodType"
+  }
+  class PolyFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+    override def typeName = "PolyType"
+  }
+
+  class TypeList(nodes: List[TypeNode]) extends TypeProduct(nodes) {
+    override def grouping = ListGrouping
+    override def emptyTypeName = "Nil"
+    override def typeName = "List"
+  }
+
+  object TypeEmpty extends TypeNode {
+    override def grouping = NoGrouping
+    override def nodes = Nil
+    override def label = ""
+    override def typeName = ""
+    override def show(indent: Int, showLabel: Boolean) = ""
+  }
+
+  object intoNodes extends DestructureType[TypeNode] {
+    def withLabel(node: TypeNode, label: String): TypeNode   = node withLabel label
+    def withType(node: TypeNode, typeName: String): TypeNode = node withType typeName
+
+    def wrapEmpty                             = TypeEmpty
+    def wrapSequence(nodes: List[TypeNode])   = new TypeList(nodes)
+    def wrapProduct(nodes: List[TypeNode])    = new TypeProduct(nodes)
+    def wrapPoly(in: TypeNode, out: TypeNode) = new PolyFunction(in, out)
+    def wrapMono(in: TypeNode, out: TypeNode) = if (in == wrapEmpty) new NullaryFunction(out) else new MonoFunction(in, out)
+    def wrapAtom[U](value: U)                 = new TypeAtom(value)
+  }
+
+  def show(tp: Type): String = intoNodes(tp).show()
+}
+
+
+/** Logic for turning a type into a String.  The goal is to be
+ *  able to take some arbitrary object 'x' and obtain the most precise
+ *  String for which an injection of x.asInstanceOf[String] will
+ *  be valid from both the JVM's and scala's perspectives.
+ *
+ *  "definition" is when you want strings like
+ */
+trait TypeStrings {
+  private type JClass = java.lang.Class[_]
+  private val ObjectClass = classOf[java.lang.Object]
+  private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
+  private val primitiveMap = (primitives.toList map { x =>
+    val key = x match {
+      case "int"  => "Integer"
+      case "char" => "Character"
+      case s      => s.capitalize
+    }
+    val value = x match {
+      case "void" => "Unit"
+      case s      => s.capitalize
+    }
+
+    ("java.lang." + key) -> ("scala." + value)
+  }).toMap
+
+  def isAnonClass(cl: Class[_]) = {
+    val xs = cl.getName.reverse takeWhile (_ != '$')
+    xs.nonEmpty && xs.forall(_.isDigit)
+  }
+
+  def scalaName(s: String): String = {
+    if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
+    else if (s == "void") "scala.Unit"
+    else if (primitives(s)) "scala." + s.capitalize
+    else primitiveMap.getOrElse(s, NameTransformer.decode(s))
+  }
+  // Trying to put humpty dumpty back together again.
+  def scalaName(clazz: JClass): String = {
+    val name      = clazz.getName
+    val enclClass = clazz.getEnclosingClass
+    def enclPre   = enclClass.getName + MODULE_SUFFIX_STRING
+    def enclMatch = name startsWith enclPre
+
+    scalaName(
+      if (enclClass == null || isAnonClass(clazz) || !enclMatch) name
+      else enclClass.getName + "." + (name stripPrefix enclPre)
+    )
+  }
+  def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+
+  private def brackets(tps: String*): String =
+    if (tps.isEmpty) ""
+    else tps.mkString("[", ", ", "]")
+
+  private def tvarString(tvar: TypeVariable[_]): String = tvarString(tvar.getBounds.toList)
+  private def tvarString(bounds: List[AnyRef]): String = {
+    val xs = bounds filterNot (_ == ObjectClass) collect { case x: JClass => x }
+    if (xs.isEmpty) "_"
+    else scalaName(xs.head)
+  }
+  private def tparamString(clazz: JClass): String = {
+    brackets(clazz.getTypeParameters map tvarString: _*)
+  }
+
+  private def tparamString[T: ru.TypeTag] : String = {
+    import ru._ // get TypeRefTag in scope so that pattern match works (TypeRef is an abstract type)
+    def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
+    brackets(typeArguments map (jc => tvarString(List(jc))): _*)
+  }
+
+  /** Going for an overabundance of caution right now.  Later these types
+   *  can be a lot more precise, but right now the tags have a habit of
+   *  introducing material which is not syntactically valid as scala source.
+   *  When this happens it breaks the repl.  It would be nice if we mandated
+   *  that tag toString methods (or some other method, since it's bad
+   *  practice to rely on toString for correctness) generated the VALID string
+   *  representation of the type.
+   */
+  def fromValue(value: Any): String                          = if (value == null) "Null" else fromClazz(anyClass(value))
+  def fromClazz(clazz: JClass): String                       = scalaName(clazz) + tparamString(clazz)
+  def fromTag[T: ru.TypeTag : ClassTag] : String             = scalaName(classTag[T].runtimeClass) + tparamString[T]
+
+  /** Reducing fully qualified noise for some common packages.
+   */
+  def quieter(tpe: String, alsoStrip: String*): String = {
+    val transforms = List(
+      "scala.collection.immutable." -> "immutable.",
+      "scala.collection.mutable." -> "mutable.",
+      "scala.collection.generic." -> "generic.",
+      "java.lang." -> "jl.",
+      "scala.runtime." -> "runtime."
+    ) ++ (alsoStrip map (_ -> ""))
+
+    transforms.foldLeft(tpe) {
+      case (res, (k, v)) => res.replaceAll(k, v)
+    }
+  }
+}
+
+object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index e09a509..9f557f4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -9,13 +9,15 @@
 // Added: Thu Apr 12 18:23:58 2007
 //todo: disallow C#D in superclass
 //todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
 package typechecker
 
-import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import scala.collection.{mutable, immutable}
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
 import mutable.ListBuffer
 import symtab.Flags._
+import Mode._
 
 // Suggestion check whether we can do without priming scopes with symbols of outer scopes,
 // like the IDE does.
@@ -24,88 +26,107 @@ import symtab.Flags._
  *  @author  Martin Odersky
  *  @version 1.0
  */
-trait Typers extends Modes with Adaptations with Tags {
+trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
   self: Analyzer =>
 
   import global._
   import definitions._
   import TypersStats._
 
-  final def forArgMode(fun: Tree, mode: Int) =
-    if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
-    else mode
+  final def forArgMode(fun: Tree, mode: Mode) =
+    if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
 
   // namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
   // is cached here and re-used in typedDefDef / typedValDef
   // Also used to cache imports type-checked by namer.
-  val transformed = new mutable.HashMap[Tree, Tree]
+  val transformed = new mutable.AnyRefMap[Tree, Tree]
 
   final val shortenImports = false
 
+  // allows override of the behavior of the resetTyper method w.r.t comments
+  def resetDocComments() = {
+    clearDocComments()
+  }
+
   def resetTyper() {
     //println("resetTyper called")
     resetContexts()
     resetImplicits()
-    transformed.clear()
-    clearDocComments()
+    resetDocComments()
   }
 
-  object UnTyper extends Traverser {
-    override def traverse(tree: Tree) = {
-      if (tree != EmptyTree) tree.tpe = null
-      if (tree.hasSymbol) tree.symbol = NoSymbol
-      super.traverse(tree)
+  sealed abstract class SilentResult[+T] {
+    def isEmpty: Boolean
+    def nonEmpty = !isEmpty
+
+    @inline final def fold[U](none: => U)(f: T => U): U = this match {
+      case SilentResultValue(value) => f(value)
+      case _                        => none
+    }
+    @inline final def map[U](f: T => U): SilentResult[U] = this match {
+      case SilentResultValue(value) => SilentResultValue(f(value))
+      case x: SilentTypeError       => x
     }
+    @inline final def filter(p: T => Boolean): SilentResult[T] = this match {
+      case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p")))
+      case _                                     => this
   }
-/* needed for experimental version where early types can be type arguments
-  class EarlyMap(clazz: Symbol) extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
-        TypeRef(ThisType(clazz), sym, List())
-      case _ =>
-        mapOver(tp)
+    @inline final def orElse[T1 >: T](f: Seq[AbsTypeError] => T1): T1 = this match {
+      case SilentResultValue(value) => value
+      case s : SilentTypeError      => f(s.reportableErrors)
+    }
+  }
+  class SilentTypeError private(val errors: List[AbsTypeError]) extends SilentResult[Nothing] {
+    override def isEmpty = true
+    def err: AbsTypeError = errors.head
+    def reportableErrors = errors match {
+      case (e1: AmbiguousImplicitTypeError) +: _ =>
+        List(e1) // DRYer error reporting for neg/t6436b.scala
+      case all =>
+        all
     }
   }
-*/
+  object SilentTypeError {
+    def apply(errors: AbsTypeError*): SilentTypeError = new SilentTypeError(errors.toList)
+    def unapply(error: SilentTypeError): Option[AbsTypeError] = error.errors.headOption
+  }
 
-  sealed abstract class SilentResult[+T]
-  case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
-  case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
+  case class SilentResultValue[+T](value: T) extends SilentResult[T] { override def isEmpty = false }
 
   def newTyper(context: Context): Typer = new NormalTyper(context)
+
   private class NormalTyper(context : Context) extends Typer(context)
 
   // A transient flag to mark members of anonymous classes
   // that are turned private by typedBlock
   private final val SYNTHETIC_PRIVATE = TRANS_FLAG
 
-  private def isPastTyper = phase.id > currentRun.typerPhase.id
-
-  // To enable decent error messages when the typer crashes.
-  // TODO - this only catches trees which go through def typed,
-  // but there are all kinds of back ways - typedClassDef, etc. etc.
-  // Funnel everything through one doorway.
-  var lastTreeToTyper: Tree = EmptyTree
+  private final val InterpolatorCodeRegex  = """\$\{.*?\}""".r
+  private final val InterpolatorIdentRegex = """\$[$\w]+""".r // note that \w doesn't include $
 
-  // when true:
-  //  - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
-  //  - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
-  // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
-  private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
-
-  abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
+  abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
     import context0.unit
-    import typeDebug.{ ptTree, ptBlock, ptLine }
+    import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
     import TyperErrorGen._
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
 
-    val infer = new Inferencer(context0) {
-      override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
-        tp.isError || pt.isError ||
-        context0.implicitsEnabled && // this condition prevents chains of views
-        inferView(EmptyTree, tp, pt, false) != EmptyTree
-      }
+    private val transformed: mutable.Map[Tree, Tree] = unit.transformed
+
+    val infer = new Inferencer {
+      def context = Typer.this.context
+      // See SI-3281 re undoLog
+      override def isCoercible(tp: Type, pt: Type) = undoLog undo viewExists(tp, pt)
     }
 
+    /** Overridden to false in scaladoc and/or interactive. */
+    def canAdaptConstantTypeToLiteral = true
+    def canTranslateEmptyListToNil    = true
+    def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+
+    def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
+      typed(docDef.definition, mode, pt)
+
     /** Find implicit arguments and pass them to given tree.
      */
     def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
@@ -115,10 +136,7 @@ trait Typers extends Modes with Adaptations with Tags {
         // paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
         // hide some valid errors for params preceding the erroneous one.
         var paramFailed = false
-
-        def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
-        def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
-        var mkArg: (Tree, Name) => Tree = mkPositionalArg
+        var mkArg: (Name, Tree) => Tree = (_, tree) => tree
 
         // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
         //
@@ -129,28 +147,27 @@ trait Typers extends Modes with Adaptations with Tags {
           for(ar <- argResultsBuff)
             paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
 
-          val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
+          val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
           argResultsBuff += res
 
           if (res.isSuccess) {
-            argBuff += mkArg(res.tree, param.name)
+            argBuff += mkArg(param.name, res.tree)
           } else {
-            mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+            mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
             if (!param.hasDefault && !paramFailed) {
-              context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
-                case Some(divergentImplicit) if !settings.Xdivergence211.value =>
+              context.reportBuffer.errors.collectFirst {
+                case dte: DivergentImplicitTypeError => dte
+              } match {
+                case Some(divergent) =>
                   // DivergentImplicit error has higher priority than "no implicit found"
                   // no need to issue the problem again if we are still in silent mode
                   if (context.reportErrors) {
-                    context.issue(divergentImplicit)
-                    context.condBufferFlush(_.kind  == ErrorKinds.Divergent)
-                  }
-                case Some(divergentImplicit: DivergentImplicitTypeError) if settings.Xdivergence211.value =>
-                  if (context.reportErrors) {
-                    context.issue(divergentImplicit.withPt(paramTp))
-                    context.condBufferFlush(_.kind  == ErrorKinds.Divergent)
+                    context.issue(divergent.withPt(paramTp))
+                    context.reportBuffer.clearErrors {
+                      case dte: DivergentImplicitTypeError => true
+                    }
                   }
-                case None =>
+                case _ =>
                   NoImplicitFoundError(fun, param)
               }
               paramFailed = true
@@ -176,10 +193,17 @@ trait Typers extends Modes with Adaptations with Tags {
         fun
     }
 
+    def viewExists(from: Type, to: Type): Boolean = (
+         !from.isError
+      && !to.isError
+      && context.implicitsEnabled
+      && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree)
+    )
+
     def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
-      inferView(tree, from, to, reportAmbiguous, true)
+      inferView(tree, from, to, reportAmbiguous, saveErrors = true)
 
-    /** Infer an implicit conversion (``view'') between two types.
+    /** Infer an implicit conversion (`view`) between two types.
      *  @param tree             The tree which needs to be converted.
      *  @param from             The source type of the conversion
      *  @param to               The target type of the conversion
@@ -194,12 +218,12 @@ trait Typers extends Modes with Adaptations with Tags {
       debuglog("infer view from "+from+" to "+to)//debug
       if (isPastTyper) EmptyTree
       else from match {
-        case MethodType(_, _) => EmptyTree
+        case MethodType(_, _)     => EmptyTree
         case OverloadedType(_, _) => EmptyTree
-        case PolyType(_, _) => EmptyTree
-        case _ =>
+        case PolyType(_, _)       => EmptyTree
+        case _                    =>
           def wrapImplicit(from: Type): Tree = {
-            val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
+            val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
             if (result.subst != EmptyTreeTypeSubstituter) {
               result.subst traverse tree
               notifyUndetparamsInferred(result.subst.from, result.subst.to)
@@ -237,32 +261,6 @@ trait Typers extends Modes with Adaptations with Tags {
       case _ => tp
     }
 
-    /** Check that <code>tree</code> is a stable expression.
-     *
-     *  @param tree ...
-     *  @return     ...
-     */
-    def checkStable(tree: Tree): Tree = (
-      if (treeInfo.isExprSafeToInline(tree)) tree
-      else if (tree.isErrorTyped) tree
-      else UnstableTreeError(tree)
-    )
-
-    /** Would tree be a stable (i.e. a pure expression) if the type
-     *  of its symbol was not volatile?
-     */
-    protected def isStableExceptVolatile(tree: Tree) = {
-      tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
-      { val savedTpe = tree.symbol.info
-        val savedSTABLE = tree.symbol getFlag STABLE
-        tree.symbol setInfo AnyRefClass.tpe
-        tree.symbol setFlag STABLE
-        val result = treeInfo.isExprSafeToInline(tree)
-        tree.symbol setInfo savedTpe
-        tree.symbol setFlag savedSTABLE
-        result
-      }
-    }
     private def errorNotClass(tpt: Tree, found: Type)  = { ClassTypeRequiredError(tpt, found); false }
     private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
 
@@ -294,16 +292,11 @@ trait Typers extends Modes with Adaptations with Tags {
       )
     }
 
-    /** Check that type <code>tp</code> is not a subtype of itself.
-     *
-     *  @param pos ...
-     *  @param tp  ...
-     *  @return    <code>true</code> if <code>tp</code> is not a subtype of itself.
+    /** Check that type `tp` is not a subtype of itself.
      */
     def checkNonCyclic(pos: Position, tp: Type): Boolean = {
       def checkNotLocked(sym: Symbol) = {
-        sym.initialize
-        sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
+        sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
       }
       tp match {
         case TypeRef(pre, sym, args) =>
@@ -314,12 +307,6 @@ trait Typers extends Modes with Adaptations with Tags {
 
         case SingleType(pre, sym) =>
           checkNotLocked(sym)
-/*
-        case TypeBounds(lo, hi) =>
-          var ok = true
-          for (t <- lo) ok = ok & checkNonCyclic(pos, t)
-          ok
-*/
         case st: SubType =>
           checkNonCyclic(pos, st.supertype)
         case ct: CompoundType =>
@@ -330,19 +317,19 @@ trait Typers extends Modes with Adaptations with Tags {
     }
 
     def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
-      if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+      if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false
       else checkNonCyclic(pos, tp)
     } finally {
       lockedSym.unlock()
     }
 
     def checkNonCyclic(sym: Symbol) {
-      if (!checkNonCyclic(sym.pos, sym.tpe)) sym.setInfo(ErrorType)
+      if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType)
     }
 
     def checkNonCyclic(defn: Tree, tpt: Tree) {
       if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) {
-        tpt.tpe = ErrorType
+        tpt setType ErrorType
         defn.symbol.setInfo(ErrorType)
       }
     }
@@ -373,28 +360,13 @@ trait Typers extends Modes with Adaptations with Tags {
       private var scope: Scope = _
       private var hiddenSymbols: List[Symbol] = _
 
-      /** Check that type <code>tree</code> does not refer to private
+      /** Check that type `tree` does not refer to private
        *  components unless itself is wrapped in something private
-       *  (<code>owner</code> tells where the type occurs).
-       *
-       *  @param owner ...
-       *  @param tree  ...
-       *  @return      ...
+       *  (`owner` tells where the type occurs).
        */
       def privates[T <: Tree](owner: Symbol, tree: T): T =
         check(owner, EmptyScope, WildcardType, tree)
 
-      /** Check that type <code>tree</code> does not refer to entities
-       *  defined in scope <code>scope</code>.
-       *
-       *  @param scope ...
-       *  @param pt    ...
-       *  @param tree  ...
-       *  @return      ...
-       */
-      def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
-        check(NoSymbol, scope, pt, tree)
-
       private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
         this.owner = owner
         this.scope = scope
@@ -407,7 +379,7 @@ trait Typers extends Modes with Adaptations with Tags {
           check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
         else if (owner == NoSymbol)
           tree setType packSymbols(hiddenSymbols.reverse, tp1)
-        else if (!phase.erasedTypes) { // privates
+        else if (!isPastTyper) { // privates
           val badSymbol = hiddenSymbols.head
           SymbolEscapesScopeError(tree, badSymbol)
         } else tree
@@ -421,7 +393,7 @@ trait Typers extends Modes with Adaptations with Tags {
           if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) {
             var o = owner
             while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass &&
-                   !o.isLocal && !o.isPrivate &&
+                   !o.isLocalToBlock && !o.isPrivate &&
                    !o.privateWithin.hasTransOwner(sym.owner))
               o = o.owner
             if (o == sym.owner || o == sym.owner.linkedClassOfClass)
@@ -470,7 +442,7 @@ trait Typers extends Modes with Adaptations with Tags {
       }
 
     /** The qualifying class
-     *  of a this or super with prefix <code>qual</code>.
+     *  of a this or super with prefix `qual`.
      *  packageOk is equal false when qualifying class symbol
      */
     def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
@@ -511,11 +483,6 @@ trait Typers extends Modes with Adaptations with Tags {
     }
 
     @inline
-    final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
-      f(newTyper(c))
-    }
-
-    @inline
     final def withSavedContext[T](c: Context)(f: => T) = {
       val savedErrors = c.flushAndReturnBuffer()
       val res = f
@@ -533,8 +500,6 @@ trait Typers extends Modes with Adaptations with Tags {
         typer1
       } else this
 
-    final val xtypes = false
-
     /** Is symbol defined and not stale?
      */
     def reallyExists(sym: Symbol) = {
@@ -553,15 +518,21 @@ trait Typers extends Modes with Adaptations with Tags {
       }
     }
 
-    /** Does the context of tree <code>tree</code> require a stable type?
+    /** Does the context of tree `tree` require a stable type?
      */
-    private def isStableContext(tree: Tree, mode: Int, pt: Type) =
-      isNarrowable(tree.tpe) && ((mode & (EXPRmode | LHSmode)) == EXPRmode) &&
-      (xtypes ||
-      (pt.isStable ||
-       (mode & QUALmode) != 0 && !tree.symbol.isConstant ||
-       pt.typeSymbol.isAbstractType && pt.bounds.lo.isStable && !(tree.tpe <:< pt)) ||
-       pt.typeSymbol.isRefinementClass && !(tree.tpe <:< pt))
+    private def isStableContext(tree: Tree, mode: Mode, pt: Type) = {
+      def ptSym = pt.typeSymbol
+      def expectsStable = (
+           pt.isStable
+        || mode.inQualMode && !tree.symbol.isConstant
+        || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass)
+      )
+
+      (    isNarrowable(tree.tpe)
+        && mode.typingExprNotLhs
+        && expectsStable
+      )
+    }
 
     /** Make symbol accessible. This means:
      *  If symbol refers to package object, insert `.package` as second to last selector.
@@ -572,11 +543,13 @@ trait Typers extends Modes with Adaptations with Tags {
      *  @return modified tree and new prefix type
      */
     private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
-      if (isInPackageObject(sym, pre.typeSymbol)) {
+      if (context.isInPackageObject(sym, pre.typeSymbol)) {
         if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
           // short cut some aliases. It seems pattern matching needs this
           // to notice exhaustiveness and to generate good code when
           // List extractors are mixed with :: patterns. See Test5 in lists.scala.
+          //
+          // TODO SI-6609 Eliminate this special case once the old pattern matcher is removed.
           def dealias(sym: Symbol) =
             (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
           sym.name match {
@@ -588,7 +561,7 @@ trait Typers extends Modes with Adaptations with Tags {
         }
         val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
           tree match {
-            case Ident(_) => Ident(nme.PACKAGEkw)
+            case Ident(_) => Ident(rootMirror.getPackageObjectWithMember(pre, sym))
             case Select(qual, _) => Select(qual, nme.PACKAGEkw)
             case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw)
           }
@@ -605,66 +578,60 @@ trait Typers extends Modes with Adaptations with Tags {
         (checkAccessible(tree, sym, pre, site), pre)
       }
 
-    /** Is `sym` defined in package object of package `pkg`?
-     */
-    private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
-      def isInPkgObj(sym: Symbol) =
-        !sym.owner.isPackage && {
-          sym.owner.isPackageObjectClass &&
-            sym.owner.owner == pkg ||
-            pkg.isInitialized && {
-              // need to be careful here to not get a cyclic reference during bootstrap
-              val pkgobj = pkg.info.member(nme.PACKAGEkw)
-              pkgobj.isInitialized &&
-                (pkgobj.info.member(sym.name).alternatives contains sym)
-            }
-        }
-      pkg.isPackageClass && {
-        if (sym.isOverloaded) sym.alternatives forall isInPkgObj
-        else isInPkgObj(sym)
-      }
-    }
-
     /** Post-process an identifier or selection node, performing the following:
-     *  1. Check that non-function pattern expressions are stable
+     *  1. Check that non-function pattern expressions are stable (ignoring volatility concerns -- SI-6815)
+     *       (and narrow the type of modules: a module reference in a pattern has type Foo.type, not "object Foo")
      *  2. Check that packages and static modules are not used as values
      *  3. Turn tree type into stable type if possible and required by context.
      *  4. Give getClass calls a more precise type based on the type of the target of the call.
      */
-    private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
-      if (tree.symbol.isOverloaded && !inFunMode(mode))
+    protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+
+      // Side effect time! Don't be an idiot like me and think you
+      // can move "val sym = tree.symbol" before this line, because
+      // inferExprAlternative side-effects the tree's symbol.
+      if (tree.symbol.isOverloaded && !mode.inFunMode)
         inferExprAlternative(tree, pt)
 
       val sym = tree.symbol
-      def fail() = NotAValueError(tree, sym)
+      val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm
 
-      if (tree.isErrorTyped) tree
-      else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
-        if (sym.isValue) {
-          val tree1 = checkStable(tree)
-          // A module reference in a pattern has type Foo.type, not "object Foo"
-          if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
-          else tree1
-        }
-        else fail()
-      } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
-        fail()
-      } else {
-        if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
-            (isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
-          tree.setType(singleType(pre, sym))
-        // To fully benefit from special casing the return type of
-        // getClass, we have to catch it immediately so expressions
-        // like x.getClass().newInstance() are typed with the type of x.
-        else if (  isGetClass(tree.symbol)
-                // TODO: If the type of the qualifier is inaccessible, we can cause private types
-                // to escape scope here, e.g. pos/t1107.  I'm not sure how to properly handle this
-                // so for now it requires the type symbol be public.
-                && pre.typeSymbol.isPublic)
-          tree setType MethodType(Nil, getClassReturnType(pre))
-        else
-          tree
-      }
+      def isModuleTypedExpr = (
+           treeInfo.admitsTypeSelection(tree)
+        && (isStableContext(tree, mode, pt) || sym.isModuleNotMethod)
+      )
+      def isStableValueRequired = (
+           isStableIdPattern
+        || mode.in(all = EXPRmode, none = QUALmode) && !phase.erasedTypes
+      )
+      // To fully benefit from special casing the return type of
+      // getClass, we have to catch it immediately so expressions like
+      // x.getClass().newInstance() are typed with the type of x. TODO: If the
+      // type of the qualifier is inaccessible, we can cause private types to
+      // escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
+      // this so for now it requires the type symbol be public.
+      def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
+      def narrowIf(tree: Tree, condition: Boolean) =
+        if (condition) tree setType singleType(pre, sym) else tree
+
+      def checkStable(tree: Tree): Tree =
+        if (treeInfo.isStableIdentifierPattern(tree)) tree
+        else UnstableTreeError(tree)
+
+      if (tree.isErrorTyped)
+        tree
+      else if (!sym.isValue && isStableValueRequired) // (2)
+        NotAValueError(tree, sym)
+      else if (isStableIdPattern)                     // (1)
+        // A module reference in a pattern has type Foo.type, not "object Foo"
+        narrowIf(checkStable(tree), sym.isModuleNotMethod)
+      else if (isModuleTypedExpr)                     // (3)
+        narrowIf(tree, true)
+      else if (isGetClassCall)                        // (4)
+        tree setType MethodType(Nil, getClassReturnType(pre))
+      else
+        tree
     }
 
     private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
@@ -672,22 +639,21 @@ trait Typers extends Modes with Adaptations with Tags {
       case _                                    => !phase.erasedTypes
     }
 
-    /**
-     *  @param tree ...
-     *  @param mode ...
-     *  @param pt   ...
-     *  @return     ...
-     */
-    def stabilizeFun(tree: Tree, mode: Int, pt: Type): Tree = {
+    def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = {
       val sym = tree.symbol
       val pre = tree match {
         case Select(qual, _) => qual.tpe
-        case _ => NoPrefix
+        case _               => NoPrefix
+      }
+      def stabilizable = (
+           pre.isStable
+        && sym.tpe.params.isEmpty
+        && (isStableContext(tree, mode, pt) || sym.isModule)
+      )
+      tree.tpe match {
+        case MethodType(_, _) if stabilizable => tree setType MethodType(Nil, singleType(pre, sym)) // TODO: should this be a NullaryMethodType?
+        case _                                => tree
       }
-      if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
-          (isStableContext(tree, mode, pt) || sym.isModule))
-        tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
-      else tree
     }
 
     /** The member with given name of given qualifier tree */
@@ -730,14 +696,10 @@ trait Typers extends Modes with Adaptations with Tags {
           context.namedApplyBlockInfo = context1.namedApplyBlockInfo
           if (context1.hasErrors) {
             stopStats()
-            SilentTypeError(context1.errBuffer.head)
+            SilentTypeError(context1.errors: _*)
           } else {
             // If we have a successful result, emit any warnings it created.
-            if (context1.hasWarnings) {
-              context1.flushAndReturnWarningsBuffer() foreach {
-                case (pos, msg) => unit.warning(pos, msg)
-              }
-            }
+            context1.flushAndIssueWarnings()
             SilentResultValue(result)
           }
         } else {
@@ -775,7 +737,7 @@ trait Typers extends Modes with Adaptations with Tags {
           featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
         val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
         def action(): Boolean = {
-          def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess
+          def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
           def hasOption = settings.language.value exists (s => s == featureName || s == "_")
           val OK = hasImport || hasOption
           if (!OK) {
@@ -849,10 +811,12 @@ trait Typers extends Modes with Adaptations with Tags {
      *  (14) When in mode EXPRmode, apply a view
      *  If all this fails, error
      */
-    protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
+    protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
+      def hasUndets           = context.undetparams.nonEmpty
+      def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode
 
       def adaptToImplicitMethod(mt: MethodType): Tree = {
-        if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+        if (hasUndets) { // (9) -- should revisit dropped condition `hasUndetsInMonoMode`
           // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
           // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
             context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
@@ -864,29 +828,28 @@ trait Typers extends Modes with Adaptations with Tags {
 
         // avoid throwing spurious DivergentImplicit errors
         if (context.hasErrors)
-          return setError(tree)
-
-        withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
-          if (original != EmptyTree && pt != WildcardType)
-            typer1.silent(tpr => {
-              val withImplicitArgs = tpr.applyImplicitArgs(tree)
-              if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
-              else tpr.typed(withImplicitArgs, mode, pt)
-            }) match {
-              case SilentResultValue(result) =>
-                result
-              case _ =>
-                val resetTree = resetLocalAttrs(original)
+          setError(tree)
+        else
+          withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
+            if (original != EmptyTree && pt != WildcardType) (
+              typer1 silent { tpr =>
+                val withImplicitArgs = tpr.applyImplicitArgs(tree)
+                if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+                else tpr.typed(withImplicitArgs, mode, pt)
+              }
+              orElse { _ =>
+                val resetTree = resetAttrs(original)
                 debuglog(s"fallback on implicits: ${tree}/$resetTree")
-                val tree1 = typed(resetTree, mode, WildcardType)
+                val tree1 = typed(resetTree, mode)
                 // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
                 // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
-                tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
+                tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
                 if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
-            }
-          else
-            typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
-        }
+              }
+            )
+            else
+              typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+          )
       }
 
       def instantiateToMethodType(mt: MethodType): Tree = {
@@ -895,174 +858,78 @@ trait Typers extends Modes with Adaptations with Tags {
           case Block(_, tree1) => tree1.symbol
           case _               => tree.symbol
         }
-        if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
-          debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
+        if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+          debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
           checkParamsConvertible(tree, tree.tpe)
           val tree0 = etaExpand(context.unit, tree, this)
-          // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
-
-          if (context.undetparams.nonEmpty) {
-            // #2624: need to infer type arguments for eta expansion of a polymorphic method
-            // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
-            // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
-            // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
-            // (note that (3) does not call typed to do the polymorphic type instantiation --
-            //  it is called after the tree has been typed with a polymorphic expected result type)
-            instantiate(typed(tree0, mode, WildcardType), mode, pt)
-          } else
+
+          // #2624: need to infer type arguments for eta expansion of a polymorphic method
+          // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
+          // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
+          // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
+          // (note that (3) does not call typed to do the polymorphic type instantiation --
+          //  it is called after the tree has been typed with a polymorphic expected result type)
+          if (hasUndets)
+            instantiate(typed(tree0, mode), mode, pt)
+          else
             typed(tree0, mode, pt)
-        } else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
-          adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
-        } else if (context.implicitsEnabled) {
+        }
+        else if (!meth.isConstructor && mt.params.isEmpty) // (4.3)
+          adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
+        else if (context.implicitsEnabled)
           MissingArgsForMethodTpeError(tree, meth)
-        } else {
+        else
           setError(tree)
-        }
       }
 
       def adaptType(): Tree = {
-        if (inFunMode(mode)) {
-          // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
-          // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
-          // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
-          // tree setType tree.tpe.normalize
+        // @M When not typing a type constructor (!context.inTypeConstructorAllowed)
+        // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+        // and thus parameterized types must be applied to their type arguments
+        // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+        def properTypeRequired = (
+             tree.hasSymbolField
+          && !context.inTypeConstructorAllowed
+          && !(tree.symbol.isJavaDefined && context.unit.isJava)
+        )
+        // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
+        // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
+        // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol
+        // (TypeTree's must also be checked here, and they don't directly have a symbol)
+        def kindArityMismatch = (
+             context.inTypeConstructorAllowed
+          && !sameLength(tree.tpe.typeParams, pt.typeParams)
+        )
+        // Note that we treat Any and Nothing as kind-polymorphic.
+        // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
+        // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+        def kindArityMismatchOk = tree.tpe.typeSymbol match {
+          case NothingClass | AnyClass => true
+          case _                       => pt == WildcardType
+        }
+
+        // todo. It would make sense when mode.inFunMode to instead use
+        //    tree setType tree.tpe.normalize
+        // when typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+        // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
+        // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
+        if (mode.inFunMode)
           tree
-        } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
-          !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
-          // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
-          // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
-          // and thus parameterized types must be applied to their type arguments
-          // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+        else if (properTypeRequired && tree.symbol.typeParams.nonEmpty)  // (7)
           MissingTypeParametersError(tree)
-        } else if ( // (7.1) @M: check kind-arity
-        // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
-        (inHKMode(mode)) &&
-          // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
-          // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
-          !sameLength(tree.tpe.typeParams, pt.typeParams) &&
-          !(tree.tpe.typeSymbol == AnyClass ||
-            tree.tpe.typeSymbol == NothingClass ||
-            pt == WildcardType)) {
-          // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
-          // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
-          // Note that we treat Any and Nothing as kind-polymorphic.
-          // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
-          // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+        else if (kindArityMismatch && !kindArityMismatchOk)  // (7.1) @M: check kind-arity
           KindArityMismatchError(tree, pt)
-        } else tree match { // (6)
+        else tree match { // (6)
           case TypeTree() => tree
           case _          => TypeTree(tree.tpe) setOriginal tree
         }
       }
 
-      /**
-       * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
-       * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
-       *
-       * Consider the following example:
-       *
-       *  class AbsWrapperCov[+A]
-       *  case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
-       *
-       *  def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
-       *    case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
-       *      wrapped // : Wrapped[_ <: T]
-       *  }
-       *
-       * this method should type check if and only if Wrapped is covariant in its type parameter
-       *
-       * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
-       * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
-       * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
-       *
-       * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
-       * we can simply replace skolems that represent method type parameters as seen from the method's body
-       * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
-       * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
-       *
-       * see test/files/../t5189*.scala
-       */
-      def adaptConstrPattern(): Tree = { // (5)
-        def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
-        val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
-        // if the tree's symbol's type does not define an extractor, maybe the tree's type does
-        // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
-        // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
-        val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
-        if (extractor != NoSymbol) {
-          // if we did some ad-hoc overloading resolution, update the tree's symbol
-          // do not update the symbol if the tree's symbol's type does not define an unapply member
-          // (e.g. since it's some method that returns an object with an unapply member)
-          if (overloadedExtractorOfObject != NoSymbol)
-            tree setSymbol overloadedExtractorOfObject
-
-          tree.tpe match {
-            case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
-            case _ =>
-          }
-          val unapply = unapplyMember(extractor.tpe)
-          val clazz = unapplyParameterType(unapply)
-
-          if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
-            // convert synthetic unapply of case class to case class constructor
-            val prefix = tree.tpe.prefix
-            val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
-              .setOriginal(tree)
-
-            val skolems = new mutable.ListBuffer[TypeSymbol]
-            object variantToSkolem extends VariantTypeMap {
-              def apply(tp: Type) = mapOver(tp) match {
-                case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
-                  // must initialize or tpSym.tpe might see random type params!!
-                  // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
-                  // TODO: why is that??
-                  tpSym.initialize
-                  val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
-                  // origin must be the type param so we can deskolemize
-                  val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
-                  // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
-                  skolems += skolem
-                  skolem.tpe
-                case tp1 => tp1
-              }
-            }
-
-            // have to open up the existential and put the skolems in scope
-            // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
-            val ptSafe   = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
-            val freeVars = skolems.toList
-
-            // use "tree" for the context, not context.tree: don't make another CaseDef context,
-            // as instantiateTypeVar's bounds would end up there
-            val ctorContext = context.makeNewScope(tree, context.owner)
-            freeVars foreach ctorContext.scope.enter
-            newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
-            // simplify types without losing safety,
-            // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
-            val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
-            val extrapolated = tree1.tpe match {
-              case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
-                ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
-                copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
-              case tp => tp
-            }
-
-            // once the containing CaseDef has been type checked (see typedCase),
-            // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
-            tree1 setType extrapolated
-          } else {
-            tree
-          }
-        } else {
-          CaseClassConstructorError(tree)
-        }
-      }
-
       def insertApply(): Tree = {
-        assert(!inHKMode(mode), modeString(mode)) //@M
+        assert(!context.inTypeConstructorAllowed, mode) //@M
         val adapted = adaptToName(tree, nme.apply)
-        def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+        def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
+
         // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
         val qual = adapted match {
           case This(_) =>
@@ -1083,31 +950,199 @@ trait Typers extends Modes with Adaptations with Tags {
           Select(qual setPos tree.pos.makeTransparent, nme.apply)
         }
       }
+      def adaptConstant(value: Constant): Tree = {
+        val sym = tree.symbol
+        if (sym != null && sym.isDeprecated) {
+          val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
+          unit.deprecationWarning(tree.pos, msg)
+        }
+        treeCopy.Literal(tree, value)
+      }
+
+      // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+      // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+      // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+      //
+      //   val x: T = expr
+      //
+      // where T is the type of expr, but T contains existential skolems ts.
+      // In that case, this value definition does not typecheck.
+      // The value definition
+      //
+      //   val x: T forSome { ts } = expr
+      //
+      // would typecheck. Or one can simply leave out the type of the `val`:
+      //
+      //   val x = expr
+      //
+      // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+      // type is an existential type.
+      //
+      // The reason for both failures have to do with the way we (don't) transform
+      // skolem types along with the trees that contain them. We'd need a
+      // radically different approach to do it. But before investing a lot of time to
+      // to do this (I have already sunk 3 full days with in the end futile attempts
+      // to consistently transform skolems and fix 6029), I'd like to
+      // investigate ways to avoid skolems completely.
+      //
+      // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+      // (which is the return type of the macro definition instantiated in the context of expandee):
+      //
+      //   Test.scala:2: error: type mismatch;
+      //     found   : $u.Expr[Class[_ <: Object]]
+      //     required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+      //     scala.reflect.runtime.universe.reify(new Object().getClass)
+      //                                         ^
+      // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+      // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+      //
+      def adaptMismatchedSkolems() = {
+        def canIgnoreMismatch = (
+             !context.reportErrors && isPastTyper
+          || tree.hasAttachment[MacroExpansionAttachment]
+        )
+        def bound = pt match {
+          case ExistentialType(qs, _) => qs
+          case _                      => Nil
+        }
+        def msg = sm"""
+          |Recovering from existential or skolem type error in
+          |  $tree
+          |with type: ${tree.tpe}
+          |       pt: $pt
+          |  context: ${context.tree}
+          |  adapted
+          """.trim
+
+        val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil
+        boundOrSkolems match {
+          case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree)
+          case _   => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt)))
+        }
+      }
+
+      def fallbackAfterVanillaAdapt(): Tree = {
+        def isPopulatedPattern = {
+          if ((tree.symbol ne null) && tree.symbol.isModule)
+            inferModulePattern(tree, pt)
+
+          isPopulated(tree.tpe, approximateAbstracts(pt))
+        }
+        if (mode.inPatternMode && isPopulatedPattern)
+          return tree
+
+        val tree1 = constfold(tree, pt) // (10) (11)
+        if (tree1.tpe <:< pt)
+          return adapt(tree1, mode, pt, original)
+
+        if (mode.typingExprNotFun) {
+          // The <: Any requirement inhibits attempts to adapt continuation types
+          // to non-continuation types.
+          if (tree.tpe <:< AnyTpe) pt.dealias match {
+            case TypeRef(_, UnitClass, _) => // (12)
+              if (settings.warnValueDiscard)
+                context.unit.warning(tree.pos, "discarded non-Unit value")
+              return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
+            case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
+              if (settings.warnNumericWiden)
+                context.unit.warning(tree.pos, "implicit numeric widening")
+              return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
+            case _ =>
+          }
+          if (pt.dealias.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt)) // (13)
+            return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
+
+          if (hasUndets)
+            return instantiate(tree, mode, pt)
+
+          if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+            // (14); the condition prevents chains of views
+            debuglog("inferring view from " + tree.tpe + " to " + pt)
+            inferView(tree, tree.tpe, pt, reportAmbiguous = true) match {
+              case EmptyTree =>
+              case coercion  =>
+                def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+                if (settings.logImplicitConv)
+                  unit.echo(tree.pos, msg)
+
+                debuglog(msg)
+                val silentContext = context.makeImplicit(context.ambiguousErrors)
+                val res = newTyper(silentContext).typed(
+                  new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+                silentContext.firstError match {
+                  case Some(err) => context.issue(err)
+                  case None      => return res
+                }
+            }
+          }
+        }
+
+        debuglog("error tree = " + tree)
+        if (settings.debug && settings.explaintypes)
+          explainTypes(tree.tpe, pt)
+
+        if (tree.tpe.isErroneous || pt.isErroneous)
+          setError(tree)
+        else
+          adaptMismatchedSkolems()
+      }
+
+      def vanillaAdapt(tree: Tree) = {
+        def applyPossible = {
+          def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
+          def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty)
+          def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0)
+
+          dyna.acceptsApplyDynamic(tree.tpe) || (
+            if (mode.inTappMode)
+              tree.tpe.typeParams.isEmpty && hasPolymorphicApply
+            else
+              hasMonomorphicApply
+          )
+        }
+        def shouldInsertApply(tree: Tree) = mode.typingExprFun && {
+          tree.tpe match {
+            case _: MethodType | _: OverloadedType | _: PolyType => false
+            case _                                               => applyPossible
+          }
+        }
+        if (tree.isType)
+          adaptType()
+        else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree))
+          macroExpand(this, tree, mode, pt)
+        else if (mode.typingConstructorPattern)
+          typedConstructorPattern(tree, pt)
+        else if (shouldInsertApply(tree))
+          insertApply()
+        else if (hasUndetsInMonoMode) { // (9)
+          assert(!context.inTypeConstructorAllowed, context) //@M
+          instantiatePossiblyExpectingUnit(tree, mode, pt)
+        }
+        else if (tree.tpe <:< pt)
+          tree
+        else
+          fallbackAfterVanillaAdapt()
+      }
 
       // begin adapt
-      tree.tpe match {
-        case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+      if (isMacroImplRef(tree)) {
+        if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original)
+        else tree
+      } else tree.tpe match {
+        case atp @ AnnotatedType(_, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
           adaptAnnotations(tree, this, mode, pt)
-        case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
-          val sym = tree.symbol
-          if (sym != null && sym.isDeprecated) {
-            val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
-            unit.deprecationWarning(tree.pos, msg)
-          }
-          treeCopy.Literal(tree, value)
-        case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
+        case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
+          adaptConstant(value)
+        case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
           inferExprAlternative(tree, pt)
           adapt(tree, mode, pt, original)
         case NullaryMethodType(restpe) => // (2)
           adapt(tree setType restpe, mode, pt, original)
-        case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
+        case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
           adapt(tree setType arg, mode, pt, original)
-        case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
-          ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
-          adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
-        case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
-          adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
-        case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
+        case tp if mode.typingExprNotLhs && isExistentialType(tp) =>
+          adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
+        case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
           // assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
           // we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
           // ticket #2197 triggered turning the assert into a guard
@@ -1116,176 +1151,24 @@ trait Typers extends Modes with Adaptations with Tags {
           // -- are we sure we want to expand aliases this early?
           // -- what caused this change in behaviour??
           val tparams1 = cloneSymbols(tparams)
-          val tree1 = if (tree.isType) tree
-          else TypeApply(tree, tparams1 map (tparam =>
-            TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+          val tree1 = (
+            if (tree.isType) tree
+            else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+          )
           context.undetparams ++= tparams1
           notifyUndetparamsAdded(tparams1)
           adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
-        case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
-          adaptToImplicitMethod(mt)
 
-        case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
-          (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) =>
+        case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1)
+          adaptToImplicitMethod(mt)
+        case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) =>
           instantiateToMethodType(mt)
-
         case _ =>
-          def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match {
-            case _: MethodType | _: OverloadedType | _: PolyType => false
-            case _                                               => applyPossible
-          })
-          def applyPossible = {
-            def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
-            dyna.acceptsApplyDynamic(tree.tpe) || (
-              if ((mode & TAPPmode) != 0)
-                tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
-              else
-                applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
-            )
-          }
-          if (tree.isType)
-            adaptType()
-          else if (
-              inExprModeButNot(mode, FUNmode) && !tree.isDef &&   // typechecking application
-              tree.symbol != null && tree.symbol.isTermMacro &&   // of a macro
-              !isMacroExpansionSuppressed(tree))
-            macroExpand(this, tree, mode, pt)
-          else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
-            adaptConstrPattern()
-          else if (shouldInsertApply(tree))
-            insertApply()
-          else if (context.undetparams.nonEmpty && !inPolyMode(mode)) { // (9)
-            assert(!inHKMode(mode), modeString(mode)) //@M
-            instantiatePossiblyExpectingUnit(tree, mode, pt)
-          } else if (tree.tpe <:< pt) {
-            tree
-          } else {
-            def fallBack: Tree = {
-              if (inPatternMode(mode)) {
-                if ((tree.symbol ne null) && tree.symbol.isModule)
-                  inferModulePattern(tree, pt)
-                if (isPopulated(tree.tpe, approximateAbstracts(pt)))
-                  return tree
-              }
-              val tree1 = constfold(tree, pt) // (10) (11)
-              if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
-              else {
-                if (inExprModeButNot(mode, FUNmode)) {
-                  pt.dealias match {
-                    case TypeRef(_, sym, _) =>
-                      // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
-                      // infinite expansion if pt is constant type ()
-                      if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
-                        if (settings.warnValueDiscard.value)
-                          context.unit.warning(tree.pos, "discarded non-Unit value")
-                        return typedPos(tree.pos, mode, pt) {
-                          Block(List(tree), Literal(Constant()))
-                        }
-                      } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
-                        if (settings.warnNumericWiden.value)
-                          context.unit.warning(tree.pos, "implicit numeric widening")
-                        return typedPos(tree.pos, mode, pt) {
-                          Select(tree, "to" + sym.name)
-                        }
-                      }
-                    case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
-                      return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
-                    case _ =>
-                  }
-                  if (!context.undetparams.isEmpty) {
-                    return instantiate(tree, mode, pt)
-                  }
-                  if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
-                    // (14); the condition prevents chains of views
-                    debuglog("inferring view from " + tree.tpe + " to " + pt)
-                    val coercion = inferView(tree, tree.tpe, pt, true)
-                    // convert forward views of delegate types into closures wrapped around
-                    // the delegate's apply method (the "Invoke" method, which was translated into apply)
-                    if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
-                      val meth: Symbol = tree.tpe.member(nme.apply)
-                      debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
-                      return typed(Select(tree, meth), mode, pt)
-                    }
-                    if (coercion != EmptyTree) {
-                      def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
-                      if (settings.logImplicitConv.value)
-                        unit.echo(tree.pos, msg)
-
-                      debuglog(msg)
-                      val silentContext = context.makeImplicit(context.ambiguousErrors)
-                      val res = newTyper(silentContext).typed(
-                        new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
-                      if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
-                    }
-                  }
-                }
-                if (settings.debug.value) {
-                  log("error tree = " + tree)
-                  if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
-                }
-
-                val found = tree.tpe
-                if (!found.isErroneous && !pt.isErroneous) {
-                  if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
-                    val (bound, req) = pt match {
-                      case ExistentialType(qs, tpe) => (qs, tpe)
-                      case _ => (Nil, pt)
-                    }
-                    val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
-                    if (boundOrSkolems.nonEmpty) {
-                      // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
-                      // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
-                      // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
-                      //
-                      //   val x: T = expr
-                      //
-                      // where T is the type of expr, but T contains existential skolems ts.
-                      // In that case, this value definition does not typecheck.
-                      // The value definition
-                      //
-                      //   val x: T forSome { ts } = expr
-                      //
-                      // would typecheck. Or one can simply leave out the type of the `val`:
-                      //
-                      //   val x = expr
-                      //
-                      // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
-                      // type is an existential type.
-                      //
-                      // The reason for both failures have to do with the way we (don't) transform
-                      // skolem types along with the trees that contain them. We'd need a
-                      // radically different approach to do it. But before investing a lot of time to
-                      // to do this (I have already sunk 3 full days with in the end futile attempts
-                      // to consistently transform skolems and fix 6029), I'd like to
-                      // investigate ways to avoid skolems completely.
-                      //
-                      // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
-                      // (which is the return type of the macro definition instantiated in the context of expandee):
-                      //
-                      //   Test.scala:2: error: type mismatch;
-                      //     found   : $u.Expr[Class[_ <: Object]]
-                      //     required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
-                      //     scala.reflect.runtime.universe.reify(new Object().getClass)
-                      //                                         ^
-                      // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
-                      // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
-                      //
-                      log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
-                      return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
-                    }
-                  }
-                  // create an actual error
-                  AdaptTypeError(tree, found, pt)
-                }
-                setError(tree)
-              }
-            }
-            fallBack
-          }
+          vanillaAdapt(tree)
       }
     }
 
-    def instantiate(tree: Tree, mode: Int, pt: Type): Tree = {
+    def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
       inferExprInstance(tree, context.extractUndetparams(), pt)
       adapt(tree, mode, pt)
     }
@@ -1293,19 +1176,17 @@ trait Typers extends Modes with Adaptations with Tags {
      *  with expected type Unit, but if that fails, try again with pt = WildcardType
      *  and discard the expression.
      */
-    def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
+    def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
       val savedUndetparams = context.undetparams
-      silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
-        case SilentResultValue(t) => t
-        case _ =>
-          context.undetparams = savedUndetparams
-          val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
-          typed(valueDiscard, mode, UnitClass.tpe)
+      silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
+        context.undetparams = savedUndetparams
+        val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
+        typed(valueDiscard, mode, UnitTpe)
       }
     }
 
-    def instantiatePossiblyExpectingUnit(tree: Tree, mode: Int, pt: Type): Tree = {
-      if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
+    def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
+      if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
         instantiateExpectingUnit(tree, mode)
       else
         instantiate(tree, mode, pt)
@@ -1341,7 +1222,7 @@ trait Typers extends Modes with Adaptations with Tags {
         inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
           case EmptyTree  => qual
           case coercion   =>
-            if (settings.logImplicitConv.value)
+            if (settings.logImplicitConv)
               unit.echo(qual.pos,
                 "applied implicit conversion from %s to %s = %s".format(
                   qual.tpe, searchTemplate, coercion.symbol.defString))
@@ -1364,43 +1245,36 @@ trait Typers extends Modes with Adaptations with Tags {
       def doAdapt(restpe: Type) =
         //util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
         adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
-      if (pt != WildcardType) {
-        silent(_ => doAdapt(pt)) match {
-          case SilentResultValue(result) if result != qual =>
-            result
-          case _ =>
-            debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
-            doAdapt(WildcardType)
-        }
-      } else
+
+      if (pt == WildcardType)
         doAdapt(pt)
+      else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ =>
+        logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType))
+      )
     }
 
     /** Try to apply an implicit conversion to `qual` so that it contains
      *  a method `name`. If that's ambiguous try taking arguments into
      *  account using `adaptToArguments`.
      */
-    def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
-      def onError(reportError: => Tree): Tree = {
-        context.tree match {
-          case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
-            silent(_.typedArgs(args.map(_.duplicate), mode)) match {
-              case SilentResultValue(args) =>
-                if (args exists (_.isErrorTyped))
-                  reportError
-                else
-                  adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
-              case _            =>
-                reportError
-            }
-          case _ =>
-            reportError
-        }
-      }
-      silent(_.adaptToMember(qual, HasMember(name), false)) match {
-          case SilentResultValue(res) => res
-          case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
+    def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+      def onError(reportError: => Tree): Tree = context.tree match {
+        case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
+          ( silent   (_.typedArgs(args.map(_.duplicate), mode))
+              filter (xs => !(xs exists (_.isErrorTyped)))
+                 map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
+              orElse ( _ => reportError)
+          )
+        case _            =>
+          reportError
       }
+
+      silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (errs =>
+        onError {
+          if (reportAmbiguous) errs foreach (context issue _)
+          setError(tree)
+        }
+      )
     }
 
     /** Try to apply an implicit conversion to `qual` to that it contains a
@@ -1411,13 +1285,6 @@ trait Typers extends Modes with Adaptations with Tags {
       if (member(qual, name) != NoSymbol) qual
       else adaptToMember(qual, HasMember(name))
 
-    private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
-      // XXX: see about using the class's symbol....
-      enclTparams foreach (sym => context.scope.enter(sym))
-      namer.enterValueParams(vparamss)
-      typed(cbody)
-    }
-
     private def validateNoCaseAncestor(clazz: Symbol) = {
       if (!phase.erasedTypes) {
         for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1500,17 +1367,25 @@ trait Typers extends Modes with Adaptations with Tags {
         unit.error(clazz.pos, "value class may not be a "+
           (if (clazz.owner.isTerm) "local class" else "member of another class"))
       if (!clazz.isPrimitiveValueClass) {
-        clazz.info.decls.toList.filter(acc => acc.isMethod && acc.isParamAccessor) match {
-          case List(acc) =>
-            def isUnderlyingAcc(sym: Symbol) =
-              sym == acc || acc.hasAccessorFlag && sym == acc.accessed
-            if (acc.accessBoundary(clazz) != rootMirror.RootClass)
-              unit.error(acc.pos, "value class needs to have a publicly accessible val parameter")
-            else if (acc.tpe.typeSymbol.isDerivedValueClass)
-              unit.error(acc.pos, "value class may not wrap another user-defined value class")
-            checkEphemeral(clazz, body filterNot (stat => isUnderlyingAcc(stat.symbol)))
-          case x =>
-            unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
+        clazz.primaryConstructor.paramss match {
+          case List(List(param)) =>
+            val decls = clazz.info.decls
+            val paramAccessor = clazz.constrParamAccessors.head
+            if (paramAccessor.isMutable)
+              unit.error(paramAccessor.pos, "value class parameter must not be a var")
+            val accessor = decls.toList.find(x => x.isMethod && x.accessedOrSelf == paramAccessor)
+            accessor match {
+              case None =>
+                unit.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]")
+              case Some(acc) if acc.isProtectedLocal =>
+                unit.error(paramAccessor.pos, "value class parameter must not be protected[this]")
+              case Some(acc) =>
+                if (acc.tpe.typeSymbol.isDerivedValueClass)
+                  unit.error(acc.pos, "value class may not wrap another user-defined value class")
+                checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor))
+            }
+          case _ =>
+            unit.error(clazz.pos, "value class needs to have exactly one val parameter")
         }
       }
 
@@ -1519,126 +1394,255 @@ trait Typers extends Modes with Adaptations with Tags {
           unit.error(tparam.pos, "type parameter of value class may not be specialized")
     }
 
-    def parentTypes(templ: Template): List[Tree] =
-      if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
-      else try {
-        val clazz = context.owner
-        // Normalize supertype and mixins so that supertype is always a class, not a trait.
-        var supertpt = typedTypeConstructor(templ.parents.head)
-        val firstParent = supertpt.tpe.typeSymbol
-        var mixins = templ.parents.tail map typedType
-        // If first parent is a trait, make it first mixin and add its superclass as first parent
-        while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
-          val supertpt1 = typedType(supertpt)
-          if (!supertpt1.isErrorTyped) {
-            mixins = supertpt1 :: mixins
-            supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+    /** Typechecks a parent type reference.
+     *
+     *  This typecheck is harder than it might look, because it should honor early
+     *  definitions and also perform type argument inference with the help of super call
+     *  arguments provided in `encodedtpt`.
+     *
+     *  The method is called in batches (batch = 1 time per each parent type referenced),
+     *  two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
+     *  and once from typer, when typechecking the definition.
+     *
+     *  ***Arguments***
+     *
+     *  `encodedtpt` represents the parent type reference wrapped in an `Apply` node
+     *  which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
+     *  If no value arguments are provided by the user, the `Apply` node is still
+     *  there, but its `args` will be set to `Nil`.
+     *  This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
+     *
+     *  `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
+     *  Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
+     *  (I wrote "maybe" because trait constructors don't call super constructors).
+     *  This argument is synthesized by `tools.nsc.ast.Trees.Template`.
+     *
+     *  `inMixinPosition` indicates whether the reference is not the first in the
+     *  list of parents (and therefore cannot be a class) or the opposite.
+     *
+     *  ***Return value and side effects***
+     *
+     *  Returns a `TypeTree` representing a resolved parent type.
+     *  If the typechecked parent reference implies non-nullary and non-empty argument list,
+     *  this argument list is attached to the returned value in SuperArgsAttachment.
+     *  The attachment is necessary for the subsequent typecheck to fixup a super constructor call
+     *  in the body of the primary constructor (see `typedTemplate` for details).
+     *
+     *  This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
+     *  described in the docs of that method. It might also attribute the Super(_, _) reference
+     *  (if present) inside the primary constructor of `templ`.
+     *
+     *  ***Example***
+     *
+     *  For the following definition:
+     *
+     *    class D extends {
+     *      val x = 2
+     *      val y = 4
+     *    } with B(x)(3) with C(y) with T
+     *
+     *  this method will be called six times:
+     *
+     *    (3 times from the namer)
+     *    typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
+     *    typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
+     *    typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
+     *
+     *    (3 times from the typer)
+     *    <the same three calls>
+     */
+    private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
+      val app = treeInfo.dissectApplied(encodedtpt)
+      val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee))
+      val argssAreTrivial = argss == Nil || argss == ListOfNil
+
+      // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
+      // we'll have to check the probe for isTypeMacro anyways.
+      // therefore I think it's reasonable to trade a more specific "inherits itself" error
+      // for a generic, yet understandable "cyclic reference" error
+      var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
+      if (probe == null) probe = NoSymbol
+      probe.initialize
+
+      if (probe.isTrait || inMixinPosition) {
+        if (!argssAreTrivial) {
+          if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
+          else () // a class in a mixin position - this warrants an error in `validateParentClasses`
+                  // therefore here we do nothing, e.g. don't check that the # of ctor arguments
+                  // matches the # of ctor parameters or stuff like that
+        }
+        typedType(decodedtpt)
+      } else {
+        val supertpt = typedTypeConstructor(decodedtpt)
+        val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
+        def inferParentTypeArgs: Tree = {
+          typedPrimaryConstrBody(templ) {
+            val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
+            val supercall = New(supertpe, mmap(argss)(_.duplicate))
+            val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
+            ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
+            atPos(supertpt.pos.focus)(supercall)
+          } match {
+            case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt); supertpt
+            case tpt       => TypeTree(tpt.tpe) setPos supertpt.pos  // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
           }
         }
-        if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
-          supertpt.tpe = AnyRefClass.tpe
 
-        // Determine
-        //  - supertparams: Missing type parameters from supertype
-        //  - supertpe: Given supertype, polymorphic in supertparams
-        val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
-        var supertpe = supertpt.tpe
-        if (!supertparams.isEmpty)
-          supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
+        val supertptWithTargs = if (supertparams.isEmpty || context.unit.isJava) supertpt else inferParentTypeArgs
 
-        // A method to replace a super reference by a New in a supercall
-        def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
-          case Apply(fn, args) =>
-            treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
-          case Select(Super(_, _), nme.CONSTRUCTOR) =>
-            treeCopy.Select(
-              scall,
-              atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
-              nme.CONSTRUCTOR)
-        }
+        // this is the place where we tell the typer what argss should be used for the super call
+        // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
+        // the super call dummy is already good enough, so we don't need to do anything
+        if (argssAreTrivial) supertptWithTargs else supertptWithTargs updateAttachment SuperArgsAttachment(argss)
+      }
+    }
 
+    /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
+     *  Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
+     *  `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
+     *
+     *  ***Return value and side effects***
+     *
+     *  If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
+     *  Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
+     *
+     *  As a side effect, this method attributes the underlying fields of early vals.
+     *  Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
+     *  at least once per definition. It'd be great to disentangle this logic at some point.
+     *
+     *  ***Example***
+     *
+     *  For the following definition:
+     *
+     *    class D extends {
+     *      val x = 2
+     *      val y = 4
+     *    } with B(x)(3) with C(y) with T
+     *
+     *  the primary constructor of `templ` will be:
+     *
+     *    Block(List(
+     *      ValDef(NoMods, x, TypeTree(), 2)
+     *      ValDef(NoMods, y, TypeTree(), 4)
+     *      global.pendingSuperCall,
+     *      Literal(Constant(())))
+     *
+     *  Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
+     *  which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
+     *  from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
+     */
+    private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
         treeInfo.firstConstructor(templ.body) match {
-          case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
-            // Convert constructor body to block in environment and typecheck it
+        case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
             val (preSuperStats, superCall) = {
               val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
               (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
             }
-            val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
-            val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
-              case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
-              case _                                    => cunit.duplicate
-            })
-            val outercontext = context.outer
-
+          val superCall1 = (superCall match {
+            case global.pendingSuperCall => actualSuperCall
+            case EmptyTree => EmptyTree
+          }) orElse cunit
+          val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
+          val clazz = context.owner
             assert(clazz != NoSymbol, templ)
-            val cscope = outercontext.makeNewScope(constr, outercontext.owner)
-            val cbody2 = newTyper(cscope) // called both during completion AND typing.
-                .typePrimaryConstrBody(clazz,
-                  cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
-
-            superCall match {
-              case Apply(_, _) =>
-                val treeInfo.Applied(_, _, argss) = superCall
-                val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
-                if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
-                  ConstrArgsInTraitParentTpeError(sarg, firstParent)
-                if (!supertparams.isEmpty)
-                  supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
-              case _ =>
-                if (!supertparams.isEmpty)
-                  MissingTypeArgumentsParentTpeError(supertpt)
+          val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+          val cbody2 = { // called both during completion AND typing.
+            val typer1 = newTyper(cscope)
+            // XXX: see about using the class's symbol....
+            clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
+            typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
+            typer1.typed(cbody1)
             }
 
             val preSuperVals = treeInfo.preSuperFields(templ.body)
             if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
-              debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+            devWarning("Wanted to zip empty presuper val list with " + preSuperStats)
             else
-              map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+            map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe)
 
+          if (superCall1 == cunit) EmptyTree
+          else cbody2 match {
+            case Block(_, expr) => expr
+            case tree => tree
+          }
           case _ =>
-            if (!supertparams.isEmpty)
-              MissingTypeArgumentsParentTpeError(supertpt)
-        }
-/* experimental: early types as type arguments
-        val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
-        val earlyMap = new EarlyMap(clazz)
-        List.mapConserve(supertpt :: mixins){ tpt =>
-          val tpt1 = checkNoEscaping.privates(clazz, tpt)
-          if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
+          EmptyTree
         }
-*/
 
-        //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
+    /** Makes sure that the first type tree in the list of parent types is always a class.
+     *  If the first parent is a trait, prepend its supertype to the list until it's a class.
+     */
+    private def normalizeFirstParent(parents: List[Tree]): List[Tree] = {
+      @annotation.tailrec
+      def explode0(parents: List[Tree]): List[Tree] = {
+        val supertpt :: rest = parents // parents is always non-empty here - it only grows
+        if (supertpt.tpe.typeSymbol == AnyClass) {
+          supertpt setType AnyRefTpe
+          parents
+        } else if (treeInfo isTraitRef supertpt) {
+          val supertpt1  = typedType(supertpt)
+          def supersuper = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+          if (supertpt1.isErrorTyped) rest
+          else explode0(supersuper :: supertpt1 :: rest)
+        } else parents
+      }
+
+      def explode(parents: List[Tree]) =
+        if (treeInfo isTraitRef parents.head) explode0(parents)
+        else parents
+
+      if (parents.isEmpty) Nil else explode(parents)
+    }
 
-        // Certain parents are added in the parser before it is known whether
-        // that class also declared them as parents.  For instance, this is an
-        // error unless we take corrective action here:
-        //
-        //   case class Foo() extends Serializable
-        //
-        // So we strip the duplicates before typer.
-        def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
-          case Nil      => Nil
-          case x :: xs  =>
-            val sym = x.symbol
-            x :: fixDuplicates(
-              if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
-              else xs
-            )
-        }
+    /** Certain parents are added in the parser before it is known whether
+     *  that class also declared them as parents. For instance, this is an
+     *  error unless we take corrective action here:
+     *
+     *    case class Foo() extends Serializable
+     *
+     *  So we strip the duplicates before typer.
+     */
+    private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
+      case Nil      => Nil
+      case x :: xs  =>
+        val sym = x.symbol
+        x :: fixDuplicateSyntheticParents(
+          if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
+          else xs
+        )
+    }
 
-        fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
-      }
-      catch {
-        case ex: TypeError =>
-          // fallback in case of cyclic errors
-          // @H none of the tests enter here but I couldn't rule it out
-          log("Type error calculating parents in template " + templ)
-          log("Error: " + ex)
-          ParentTypesError(templ, ex)
-          List(TypeTree(AnyRefClass.tpe))
-      }
+    def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
+      case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe)))
+      case first :: rest =>
+        try {
+          val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
+            typedParentType(first, templ, inMixinPosition = false) +:
+            (rest map (typedParentType(_, templ, inMixinPosition = true)))))
+
+          // if that is required to infer the targs of a super call
+          // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
+          // as a side effect, that typecheck also assigns types to the fields underlying early vals
+          // however if inference is not required, the typecheck doesn't happen
+          // and therefore early fields have their type trees not assigned
+          // here we detect this situation and take preventive measures
+          if (treeInfo.hasUntypedPreSuperFields(templ.body))
+            typedPrimaryConstrBody(templ)(EmptyTree)
+
+          supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
+        }
+        catch {
+          case ex: TypeError =>
+            // fallback in case of cyclic errors
+            // @H none of the tests enter here but I couldn't rule it out
+            // upd. @E when a definition inherits itself, we end up here
+            // because `typedParentType` triggers `initialize` for parent types symbols
+            log("Type error calculating parents in template " + templ)
+            log("Error: " + ex)
+            ParentTypesError(templ, ex)
+            List(TypeTree(AnyRefTpe))
+        }
+    }
 
     /** <p>Check that</p>
      *  <ul>
@@ -1678,30 +1682,29 @@ trait Typers extends Modes with Adaptations with Tags {
           if (psym.isFinal)
             pending += ParentFinalInheritanceError(parent, psym)
 
-          if (psym.hasDeprecatedInheritanceAnnotation) {
+          val sameSourceFile = context.unit.source.file == psym.sourceFile
+
+          if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
             val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
             val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
             unit.deprecationWarning(parent.pos, msg)
           }
 
           if (psym.isSealed && !phase.erasedTypes)
-            if (context.unit.source.file == psym.sourceFile)
+            if (sameSourceFile)
               psym addChild context.owner
             else
               pending += ParentSealedInheritanceError(parent, psym)
+          val parentTypeOfThis = parent.tpe.dealias.typeOfThis
 
-          if (!(selfType <:< parent.tpe.typeOfThis) &&
+          if (!(selfType <:< parentTypeOfThis) &&
               !phase.erasedTypes &&
               !context.owner.isSynthetic &&   // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
-              !settings.noSelfCheck.value &&  // setting to suppress this very check
               !selfType.isErroneous &&
               !parent.tpe.isErroneous)
           {
-            //Console.println(context.owner);//DEBUG
-            //Console.println(context.owner.unsafeTypeParams);//DEBUG
-            //Console.println(List.fromArray(context.owner.info.closure));//DEBUG
             pending += ParentSelfTypeConformanceError(parent, selfType)
-            if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
+            if (settings.explaintypes) explainTypes(selfType, parentTypeOfThis)
           }
 
           if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
@@ -1715,13 +1718,6 @@ trait Typers extends Modes with Adaptations with Tags {
         for (p <- parents) validateParentClass(p, superclazz)
       }
 
-/*
-      if (settings.Xshowcls.value != "" &&
-          settings.Xshowcls.value == context.owner.fullName)
-        println("INFO "+context.owner+
-                ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
-                ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
-*/
       pending.foreach(ErrorUtils.issueTypeError)
     }
 
@@ -1731,7 +1727,7 @@ trait Typers extends Modes with Adaptations with Tags {
       for (tparam <- clazz.typeParams) {
         if (classinfo.expansiveRefs(tparam) contains tparam) {
           val newinfo = ClassInfoType(
-            classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
+            classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
             classinfo.decls,
             clazz)
           clazz.setInfo {
@@ -1745,27 +1741,26 @@ trait Typers extends Modes with Adaptations with Tags {
       }
     }
 
-    /**
-     *  @param cdef ...
-     *  @return     ...
-     */
     def typedClassDef(cdef: ClassDef): Tree = {
-//      attributes(cdef)
       val clazz = cdef.symbol
       val typedMods = typedModifiers(cdef.mods)
       assert(clazz != NoSymbol, cdef)
       reenterTypeParams(cdef.tparams)
       val tparams1 = cdef.tparams mapConserve (typedTypeDef)
-      val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
+      val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
       val impl2 = finishMethodSynthesis(impl1, clazz, context)
       if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
         checkEphemeral(clazz, impl2.body)
-      if ((clazz != ClassfileAnnotationClass) &&
-          (clazz isNonBottomSubClass ClassfileAnnotationClass))
-        restrictionWarning(cdef.pos, unit,
-          "subclassing Classfile does not\n"+
-          "make your annotation visible at runtime.  If that is what\n"+
-          "you want, you must write the annotation class in Java.")
+
+      if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
+        if (!clazz.owner.isPackageClass)
+          unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+        else restrictionWarning(cdef.pos, unit,
+          """|subclassing Classfile does not
+             |make your annotation visible at runtime.  If that is what
+             |you want, you must write the annotation class in Java.""".stripMargin)
+      }
+
       if (!isPastTyper) {
         for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
           val m = companionSymbolOf(clazz, context)
@@ -1777,10 +1772,6 @@ trait Typers extends Modes with Adaptations with Tags {
         .setType(NoType)
     }
 
-    /**
-     *  @param mdef ...
-     *  @return     ...
-     */
     def typedModuleDef(mdef: ModuleDef): Tree = {
       // initialize all constructors of the linked class: the type completer (Namer.methodSig)
       // might add default getters to this object. example: "object T; class T(x: Int = 1)"
@@ -1798,47 +1789,28 @@ trait Typers extends Modes with Adaptations with Tags {
         || clazz.isSerializable
       )
       val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
-        parentTypes(mdef.impl) ++ (
+        typedParentTypes(mdef.impl) ++ (
           if (noSerializable) Nil
           else {
             clazz.makeSerializable()
-            List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+            List(TypeTree(SerializableTpe) setPos clazz.pos.focus)
           }
         )
       })
 
       val impl2  = finishMethodSynthesis(impl1, clazz, context)
 
-      // SI-5954. On second compile of a companion class contained in a package object we end up
-      // with some confusion of names which leads to having two symbols with the same name in the
-      // same owner. Until that can be straightened out we will warn on companion objects in package
-      // objects. But this code also tries to be friendly by distinguishing between case classes and
-      // user written companion pairs
-      def warnPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
-        // ignore synthetic objects, because the "companion" object to a case class is synthetic and
-        // we only want one error per case class
-        if (!m.isSynthetic) {
-          // can't handle case classes in package objects
-          if (m.isCaseClass) pkgObjectWarning(m, mdef, "case")
-          // can't handle companion class/object pairs in package objects
-          else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
-                   (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
-                     pkgObjectWarning(m, mdef, "companion")
-        }
-
-        def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
-          val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
-          val pos = if (m.pos.isDefined) m.pos else mdef.pos
-          debugwarn(s"${m}  should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
-          debugwarn(pos.lineContent + (if (pos.isDefined) " " * (pos.column - 1) + "^" else ""))
-        }
-      }
-
-      if (mdef.symbol.isPackageObject)
-        warnPackageObjectMembers(mdef)
+      if (settings.isScala211  && mdef.symbol == PredefModule)
+        ensurePredefParentsAreInSameSourceFile(impl2)
 
       treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
     }
+
+    private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
+      val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
+      if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
+        unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
+    }
     /** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
      *  all the time, it is exposed here the module/class typing methods go through it.
      *  ...but it turns out it's also the ideal spot for namer/typer coordination for
@@ -1862,20 +1834,17 @@ trait Typers extends Modes with Adaptations with Tags {
     }
 
     protected def enterSym(txt: Context, tree: Tree): Context =
-      if (txt eq context) namer.enterSym(tree)
-      else newNamer(txt).enterSym(tree)
+      if (txt eq context) namer enterSym tree
+      else newNamer(txt) enterSym tree
 
-    /**
-     *  @param templ    ...
-     *  @param parents1 ...
-     *    <li> <!-- 2 -->
-     *      Check that inner classes do not inherit from Annotation
-     *    </li>
-     *  @return         ...
+    /** <!-- 2 --> Check that inner classes do not inherit from Annotation
      */
-    def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
+    def typedTemplate(templ0: Template, parents1: List[Tree]): Template = {
+      val templ = templ0
+      // please FIXME: uncommenting this line breaks everything
+      // val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents)
       val clazz = context.owner
-      clazz.annotations.map(_.completeInfo)
+      clazz.annotations.map(_.completeInfo())
       if (templ.symbol == NoSymbol)
         templ setSymbol clazz.newLocalDummy(templ.pos)
       val self1 = templ.self match {
@@ -1901,25 +1870,41 @@ trait Typers extends Modes with Adaptations with Tags {
       )
       // the following is necessary for templates generated later
       assert(clazz.info.decls != EmptyScope, clazz)
-      enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
+      val body1 = pluginsEnterStats(this, templ.body)
+      enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1)
+      if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
       validateParentClasses(parents1, selfType)
       if (clazz.isCase)
         validateNoCaseAncestor(clazz)
+      if (clazz.isTrait && hasSuperArgs(parents1.head))
+        ConstrArgsInParentOfTraitError(parents1.head, clazz)
 
-      if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
+      if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
         unit.error(clazz.pos, "inner classes cannot be classfile annotations")
 
       if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
         checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
 
-      val body =
-        if (isPastTyper || reporter.hasErrors) templ.body
-        else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+      val body2 = {
+        val body2 =
+          if (isPastTyper || reporter.hasErrors) body1
+          else body1 flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+        val primaryCtor = treeInfo.firstConstructor(body2)
+        val primaryCtor1 = primaryCtor match {
+          case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
+            val argss = superArgs(parents1.head) getOrElse Nil
+            val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent
+            val superCall = atPos(pos)(PrimarySuperCall(argss))
+            deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
+          case _ => primaryCtor
+        }
+        body2 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+      }
 
-      val body1 = typedStats(body, templ.symbol)
+      val body3 = typedStats(body2, templ.symbol)
 
       if (clazz.info.firstParent.typeSymbol == AnyValClass)
-        validateDerivedValueClass(clazz, body1)
+        validateDerivedValueClass(clazz, body3)
 
       if (clazz.isTrait) {
         for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
@@ -1927,28 +1912,24 @@ trait Typers extends Modes with Adaptations with Tags {
         }
       }
 
-      treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
+      treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_*
     }
 
     /** Remove definition annotations from modifiers (they have been saved
-     *  into the symbol's ``annotations'' in the type completer / namer)
+     *  into the symbol's `annotations` in the type completer / namer)
      *
      *  However reification does need annotation definitions to proceed.
      *  Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
      *  The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
      *  that involve locally defined annotations. See more about that in Reifiers.scala.
      *
-     *  That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+     *  That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere).
      *  The field doesn't get pickled/unpickled and exists only during a single compilation run.
      *  This simultaneously allows us to reify annotations and to preserve backward compatibility.
      */
     def typedModifiers(mods: Modifiers): Modifiers =
       mods.copy(annotations = Nil) setPositions mods.positions
 
-    /**
-     *  @param vdef ...
-     *  @return     ...
-     */
     def typedValDef(vdef: ValDef): ValDef = {
       val sym = vdef.symbol
       val valDefTyper = {
@@ -1965,7 +1946,7 @@ trait Typers extends Modes with Adaptations with Tags {
       val sym = vdef.symbol.initialize
       val typedMods = typedModifiers(vdef.mods)
 
-      sym.annotations.map(_.completeInfo)
+      sym.annotations.map(_.completeInfo())
       val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
       checkNonCyclic(vdef, tpt1)
 
@@ -2000,10 +1981,6 @@ trait Typers extends Modes with Adaptations with Tags {
     }
 
     /** Enter all aliases of local parameter accessors.
-     *
-     *  @param clazz    ...
-     *  @param vparamss ...
-     *  @param rhs      ...
      */
     def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
       debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
@@ -2053,7 +2030,7 @@ trait Typers extends Modes with Adaptations with Tags {
                 orElse (superAcc getter superAcc.owner)
                 filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
             )
-            if (alias.exists && !alias.accessed.isVariable) {
+            if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
               val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
                 case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
                 case acc                                           => acc
@@ -2122,14 +2099,14 @@ trait Typers extends Modes with Adaptations with Tags {
         unit.error(pos, msg)
         false
       }
-      /** Have to examine all parameters in all lists.
+      /* Have to examine all parameters in all lists.
        */
       def paramssTypes(tp: Type): List[List[Type]] = tp match {
         case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
         case PolyType(_, restpe)        => paramssTypes(restpe)
         case _                          => Nil
       }
-      def resultType = meth.tpe.finalResultType
+      def resultType = meth.tpe_*.finalResultType
       def nthParamPos(n1: Int, n2: Int) =
         try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
 
@@ -2141,10 +2118,10 @@ trait Typers extends Modes with Adaptations with Tags {
           val sym = paramType.typeSymbol
           def paramPos = nthParamPos(listIdx, paramIdx)
 
-          /** Not enough to look for abstract types; have to recursively check the bounds
-           *  of each abstract type for more abstract types. Almost certainly there are other
-           *  exploitable type soundness bugs which can be seen by bounding a type parameter
-           *  by an abstract type which itself is bounded by an abstract type.
+          /* Not enough to look for abstract types; have to recursively check the bounds
+           * of each abstract type for more abstract types. Almost certainly there are other
+           * exploitable type soundness bugs which can be seen by bounding a type parameter
+           * by an abstract type which itself is bounded by an abstract type.
            */
           def checkAbstract(tp0: Type, what: String): Boolean = {
             def check(sym: Symbol): Boolean = !sym.isAbstractType || {
@@ -2168,51 +2145,6 @@ trait Typers extends Modes with Adaptations with Tags {
         failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
     }
 
-    def typedUseCase(useCase: UseCase) {
-      def stringParser(str: String): syntaxAnalyzer.Parser = {
-        val file = new BatchSourceFile(context.unit.source.file, str) {
-          override def positionInUltimateSource(pos: Position) = {
-            pos.withSource(context.unit.source, useCase.pos.start)
-          }
-        }
-        val unit = new CompilationUnit(file)
-        new syntaxAnalyzer.UnitParser(unit)
-      }
-      val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
-      val enclClass = context.enclClass.owner
-      def defineAlias(name: Name) =
-        if (context.scope.lookup(name) == NoSymbol) {
-          lookupVariable(name.toString.substring(1), enclClass) match {
-            case Some(repl) =>
-              silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
-                case SilentResultValue(tpt) =>
-                  val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
-                  val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
-                  val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
-                  alias setInfo newInfo
-                  context.scope.enter(alias)
-                case _ =>
-              }
-            case _ =>
-          }
-        }
-      for (tree <- trees; t <- tree)
-        t match {
-          case Ident(name) if name startsWith '$' => defineAlias(name)
-          case _ =>
-        }
-      useCase.aliases = context.scope.toList
-      namer.enterSyms(trees)
-      typedStats(trees, NoSymbol)
-      useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
-      if (settings.debug.value)
-        useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
-    }
-
-    /**
-     *  @param ddef ...
-     *  @return     ...
-     */
     def typedDefDef(ddef: DefDef): DefDef = {
       val meth = ddef.symbol.initialize
 
@@ -2231,13 +2163,13 @@ trait Typers extends Modes with Adaptations with Tags {
       val tparams1 = ddef.tparams mapConserve typedTypeDef
       val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
 
-      meth.annotations.map(_.completeInfo)
+      meth.annotations.map(_.completeInfo())
 
       for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
         if (isRepeatedParamType(vparam1.symbol.tpe))
           StarParamNotLastError(vparam1)
 
-      var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
+      val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
       checkNonCyclic(ddef, tpt1)
       ddef.tpt.setType(tpt1.tpe)
       val typedMods = typedModifiers(ddef.mods)
@@ -2249,7 +2181,7 @@ trait Typers extends Modes with Adaptations with Tags {
                meth.owner.isAnonOrRefinementClass))
             InvalidConstructorDefError(ddef)
           typed(ddef.rhs)
-        } else if (meth.isTermMacro) {
+        } else if (meth.isMacro) {
           // typechecking macro bodies is sort of unconventional
           // that's why we employ our custom typing scheme orchestrated outside of the typer
           transformedOr(ddef.rhs, typedMacroBody(this, ddef))
@@ -2281,14 +2213,14 @@ trait Typers extends Modes with Adaptations with Tags {
               DeprecatedParamNameError(p, n)
           }
         }
-      }
-      if (meth.isStructuralRefinementMember)
-        checkMethodStructuralCompatible(ddef)
+        if (meth.isStructuralRefinementMember)
+          checkMethodStructuralCompatible(ddef)
 
-      if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
-        case List(param) :: _ if !param.isImplicit =>
-          checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
-        case _ =>
+        if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+          case List(param) :: _ if !param.isImplicit =>
+            checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+          case _ =>
+        }
       }
 
       treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
@@ -2305,10 +2237,10 @@ trait Typers extends Modes with Adaptations with Tags {
       reenterTypeParams(tdef.tparams)
       val tparams1 = tdef.tparams mapConserve typedTypeDef
       val typedMods = typedModifiers(tdef.mods)
-      tdef.symbol.annotations.map(_.completeInfo)
+      tdef.symbol.annotations.map(_.completeInfo())
 
       // @specialized should not be pickled when compiling with -no-specialize
-      if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
+      if (settings.nospecialization && currentRun.compiles(tdef.symbol)) {
         tdef.symbol.removeAnnotation(definitions.SpecializedClass)
         tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass)
       }
@@ -2332,7 +2264,7 @@ trait Typers extends Modes with Adaptations with Tags {
         case ldef @ LabelDef(_, _, _) =>
           if (ldef.symbol == NoSymbol)
             ldef.symbol = namer.enterInScope(
-              context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
+              context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe))
         case _ =>
       }
     }
@@ -2341,7 +2273,7 @@ trait Typers extends Modes with Adaptations with Tags {
       if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
         val restpe = ldef.symbol.tpe.resultType
         val rhs1 = typed(ldef.rhs, restpe)
-        ldef.params foreach (param => param.tpe = param.symbol.tpe)
+        ldef.params foreach (param => param setType param.symbol.tpe)
         deriveLabelDef(ldef)(_ => rhs1) setType restpe
       }
       else {
@@ -2349,29 +2281,25 @@ trait Typers extends Modes with Adaptations with Tags {
         val rhs1 = typed(ldef.rhs)
         val restpe = rhs1.tpe
         if (restpe == initpe) { // stable result, no need to check again
-          ldef.params foreach (param => param.tpe = param.symbol.tpe)
+          ldef.params foreach (param => param setType param.symbol.tpe)
           treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
         } else {
           context.scope.unlink(ldef.symbol)
           val sym2 = namer.enterInScope(
             context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
-          val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
-          ldef.params foreach (param => param.tpe = param.symbol.tpe)
+          val LabelDef(_, _, rhs1) = resetAttrs(ldef)
+          val rhs2 = typed(brutallyResetAttrs(rhs1), restpe)
+          ldef.params foreach (param => param setType param.symbol.tpe)
           deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
         }
       }
     }
 
-    /**
-     *  @param block ...
-     *  @param mode  ...
-     *  @param pt    ...
-     *  @return      ...
-     */
-    def typedBlock(block: Block, mode: Int, pt: Type): Block = {
+    def typedBlock(block0: Block, mode: Mode, pt: Type): Block = {
       val syntheticPrivates = new ListBuffer[Symbol]
       try {
-        namer.enterSyms(block.stats)
+        namer.enterSyms(block0.stats)
+        val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr)
         for (stat <- block.stats) enterLabelDef(stat)
 
         if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
@@ -2430,7 +2358,7 @@ trait Typers extends Modes with Adaptations with Tags {
             case _ => stat::Nil
             })
         val stats2 = typedStats(stats1, context.owner)
-        val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
+        val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
         treeCopy.Block(block, stats2, expr1)
           .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
       } finally {
@@ -2440,12 +2368,6 @@ trait Typers extends Modes with Adaptations with Tags {
       }
     }
 
-    /**
-     *  @param cdef   ...
-     *  @param pattpe ...
-     *  @param pt     ...
-     *  @return       ...
-     */
     def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
       // verify no _* except in last position
       for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
@@ -2460,85 +2382,74 @@ trait Typers extends Modes with Adaptations with Tags {
       // list, so substitute the final result type of the method, i.e. the type
       // of the case class.
       if (pat1.tpe.paramSectionCount > 0)
-        pat1 setType pat1.tpe.finalResultType
-
-      if (forInteractive) {
-        for (bind @ Bind(name, _) <- cdef.pat)
-          if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
-            namer.enterIfNotThere(bind.symbol)
+        pat1 modifyType (_.finalResultType)
+
+      for (bind @ Bind(name, _) <- cdef.pat) {
+        val sym = bind.symbol
+        if (name.toTermName != nme.WILDCARD && sym != null) {
+          if (sym == NoSymbol) {
+            if (context.scope.lookup(name) == NoSymbol)
+              namer.enterInScope(context.owner.newErrorSymbol(name))
+          } else
+            namer.enterIfNotThere(sym)
+        }
       }
 
       val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
-                         else typed(cdef.guard, BooleanClass.tpe)
+                         else typed(cdef.guard, BooleanTpe)
       var body1: Tree = typed(cdef.body, pt)
 
-      val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
-      if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
-        body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
-
+      if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
+        body1 modifyType context.enclosingCaseDef.restoreTypeBounds
         // insert a cast if something typechecked under the GADT constraints,
         // but not in real life (i.e., now that's we've reset the method's type skolems'
         //   infos back to their pre-GADT-constraint state)
-        if (isFullyDefined(pt) && !(body1.tpe <:< pt))
-          body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
-
+        if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
+          log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
+          body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
+        }
       }
 
 //    body1 = checkNoEscaping.locals(context.scope, pt, body1)
       treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
     }
 
-    // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
-    // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
-    object deskolemizeGADTSkolems extends TypeMap {
-      def apply(tp: Type): Type = mapOver(tp) match {
-        case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
-          typeRef(NoPrefix, sym.deSkolemize, args)
-        case tp1 => tp1
-      }
-    }
-
     def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
       cases mapConserve { cdef =>
         newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
       }
 
-    def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+    def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
 
-    def ptOrLub(tps: List[Type], pt: Type  )       = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
-    def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+    def packedTypes(trees: List[Tree]): List[Type] = trees map (c => packedType(c, context.owner).deconst)
 
     // takes untyped sub-trees of a match and type checks them
-    def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
-      val selector1  = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+    def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
+      val selector1  = checkDead(typedByValueExpr(selector))
       val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
       val casesTyped = typedCases(cases, selectorTp, pt)
 
-      val (resTp, needAdapt) =
-        if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
-        else ptOrLub(casesTyped map (_.tpe), pt)
-
-      val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
+      def finish(cases: List[CaseDef], matchType: Type) =
+        treeCopy.Match(tree, selector1, cases) setType matchType
 
-      val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
-      if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
-        new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
-      matchTyped
+      if (isFullyDefined(pt))
+        finish(casesTyped, pt)
+      else packedTypes(casesTyped) match {
+        case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed))
+        case packed                             =>
+          val lub = weakLub(packed)
+          finish(casesTyped map (adaptCase(_, mode, lub)), lub)
+      }
     }
 
-    // match has been typed -- virtualize it if we're feeling experimental
-    // (virtualized matches are expanded during type checking so they have the full context available)
-    // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
-    def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
-      import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+    // match has been typed -- virtualize it during type checking so the full context is available
+    def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = {
+      import patmat.{ vpmName, PureMatchTranslator }
 
       // TODO: add fallback __match sentinel to predef
       val matchStrategy: Tree =
-        if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null    // fast path, avoiding the next line if there's no __match to be seen
-        else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
-          case SilentResultValue(ms) => ms
-          case _                     => null
-        }
+        if (!(settings.Xexperimental && context.isNameInScope(vpmName._match))) null    // fast path, avoiding the next line if there's no __match to be seen
+        else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
 
       if (matchStrategy ne null) // virtualize
         typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
@@ -2568,13 +2479,11 @@ trait Typers extends Modes with Adaptations with Tags {
      * an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
      * however, note that pattern matching codegen is designed to run *before* uncurry
      */
-    def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
-      assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
-
-      val pt    = deskolemizeGADTSkolems(pt0)
-      val targs = pt.normalize.typeArgs
+    def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
+      assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
+      val targs = pt.dealiasWiden.typeArgs
 
-      // if targs.head isn't fully defined, we can translate --> error
+      // if targs.head isn't fully defined, we can't translate --> error
       targs match {
         case argTp :: _ if isFullyDefined(argTp) => // ok
         case _ => // uh-oh
@@ -2586,18 +2495,16 @@ trait Typers extends Modes with Adaptations with Tags {
       val argTp :: resTp :: Nil = targs
 
       // targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
-      val targsValidParams = targs forall (_ <:< AnyClass.tpe)
+      val targsValidParams = targs forall (_ <:< AnyTpe)
 
-      val anonClass = (context.owner
-        newAnonymousFunctionClass tree.pos
-        addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()))
+      val anonClass = context.owner newAnonymousFunctionClass tree.pos addAnnotation SerialVersionUIDAnnotation
 
       import CODE._
 
       val Match(sel, cases) = tree
 
       // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
-      val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+      val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
 
       // must generate a new tree every time
       def selector: Tree = gen.mkUnchecked(
@@ -2663,7 +2570,7 @@ trait Typers extends Modes with Adaptations with Tags {
         //
         //         Well behaved trees satisfy the property:
         //
-        //         typed(tree) == typed(resetLocalAttrs(typed(tree))
+        //         typed(tree) == typed(resetAttrs(typed(tree))
         //
         //         Trees constructed without low-level symbol manipulation get this for free;
         //         references to local symbols are cleared by `ResetAttrs`, but bind to the
@@ -2701,8 +2608,15 @@ trait Typers extends Modes with Adaptations with Tags {
             default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
           )
         }
-        val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
-        val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+        def newParam(param: Symbol): ValDef = {
+          val vd              = ValDef(param, EmptyTree)
+          val tt @ TypeTree() = vd.tpt
+          tt setOriginal (originals(param) setPos param.pos.focus)
+          vd
+        }
+
+        val rhs    = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+        val defdef = newDefDef(methodSym, rhs)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe))
 
         (defdef, matchResTp)
       }
@@ -2714,12 +2628,12 @@ trait Typers extends Modes with Adaptations with Tags {
 
         val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
         methodBodyTyper.context.scope enter paramSym
-        methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
+        methodSym setInfo MethodType(List(paramSym), BooleanTpe)
 
-        val defaultCase = mkDefaultCase(FALSE_typed)
-        val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
+        val defaultCase = mkDefaultCase(FALSE)
+        val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe)
 
-        DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
+        DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
       }
 
       // only used for @cps annotated partial functions
@@ -2728,7 +2642,7 @@ trait Typers extends Modes with Adaptations with Tags {
         val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
         val paramSym = mkParam(methodSym)
 
-        methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+        methodSym setInfo MethodType(List(paramSym), AnyTpe)
 
         val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
         // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
@@ -2764,7 +2678,7 @@ trait Typers extends Modes with Adaptations with Tags {
       members foreach (m => anonClass.info.decls enter m.symbol)
 
       val typedBlock = typedPos(tree.pos, mode, pt) {
-        Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+        Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
           Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
         ))
       }
@@ -2776,31 +2690,200 @@ trait Typers extends Modes with Adaptations with Tags {
         }
     }
 
+    /** Synthesize and type check the implementation of a type with a Single Abstract Method
+     *
+     *  `{ (p1: T1, ..., pN: TN) => body } : S`
+     *
+     *  expands to (where `S` is the expected type that defines a single abstract method named `apply`)
+     *
+     *  `{
+     *    def apply$body(p1: T1, ..., pN: TN): T = body
+     *    new S {
+     *     def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN)
+     *    }
+     *  }`
+     *
+     * If 'T' is not fully defined, it is inferred by type checking
+     * `apply$body` without a result type before type checking the block.
+     * The method's inferred result type is used instead of T`. [See test/files/pos/sammy_poly.scala]
+     *
+     * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
+     * and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
+     *
+     * The function's body is put in a method outside of the class definition to enforce scoping.
+     * S's members should not be in scope in `body`.
+     *
+     * The restriction on implicit arguments (neither S's constructor, nor sam may take an implicit argument list),
+     * is largely to keep the implementation of type inference (the computation of `samClassTpFullyDefined`) simple.
+     *
+     * NOTE: it would be nicer to not have to type check `apply$body` separately when `T` is not fully defined.
+     * However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
+     * which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
+     * and have the instantiation of the first occurrence propagate to the rest of the block.
+     */
+    def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
+      // assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
+      val sampos = fun.pos
+
+      // if the expected sam type is fully defined, use it for the method's result type
+      // otherwise, NoType, so that type inference will determine the method's result type
+      // resPt is syntactically contained in samClassTp, so if the latter is fully defined, so is the former
+      // ultimately, we want to fully define samClassTp as it is used as the superclass of our anonymous class
+      val samDefTp = if (isFullyDefined(resPt)) resPt else NoType
+      val bodyName = newTermName(sam.name + "$body")
+
+      // `def '${sam.name}\$body'($p1: $T1, ..., $pN: $TN): $resPt = $body`
+      val samBodyDef =
+        DefDef(NoMods,
+          bodyName,
+          Nil,
+          List(fun.vparams.map(_.duplicate)), // must duplicate as we're also using them for `samDef`
+          TypeTree(samDefTp) setPos sampos.focus,
+          fun.body)
+
+      // If we need to enter the sym for the body def before type checking the block,
+      // we'll create a nested context, as explained below.
+      var nestedTyper = this
+
+      // Type check body def before classdef to fully determine samClassTp (if necessary).
+      // As `samClassTp` determines a parent type for the class,
+      // we can't type check `block` in one go unless `samClassTp` is fully defined.
+      val samClassTpFullyDefined =
+        if (isFullyDefined(samClassTp)) samClassTp
+        else try {
+          // This creates a symbol for samBodyDef with a type completer that'll be triggered immediately below.
+          // The symbol is entered in the same scope used for the block below, and won't thus be reentered later.
+          // It has to be a new scope, though, or we'll "get ambiguous reference to overloaded definition" [pos/sammy_twice.scala]
+          // makeSilent: [pos/nonlocal-unchecked.scala -- when translation all functions to sams]
+          val nestedCtx = enterSym(context.makeNewScope(context.tree, context.owner).makeSilent(), samBodyDef)
+          nestedTyper = newTyper(nestedCtx)
+
+          // NOTE: this `samBodyDef.symbol.info` runs the type completer set up by the enterSym above
+          val actualSamType = samBodyDef.symbol.info
+
+          // we're trying to fully define the type arguments for this type constructor
+          val samTyCon  = samClassTp.typeSymbol.typeConstructor
+
+          // the unknowns
+          val tparams   = samClassTp.typeSymbol.typeParams
+          // ... as typevars
+          val tvars     = tparams map freshVar
+
+          // 1. Recover partial information:
+          //   - derive a type from samClassTp that has the corresponding tparams for type arguments that aren't fully defined
+          //   - constrain typevars to be equal to type args that are fully defined
+          val samClassTpMoreDefined = appliedType(samTyCon,
+            (samClassTp.typeArgs, tparams, tvars).zipped map {
+              case (a, _, tv) if isFullyDefined(a) => tv =:= a; a
+              case (_, p, _)                       => p.typeConstructor
+            })
 
-    /**
-     *  @param fun  ...
-     *  @param mode ...
-     *  @param pt   ...
-     *  @return     ...
+          // the method type we're expecting the synthesized sam to have, based on the expected sam type,
+          // where fully defined type args to samClassTp have been preserved,
+          // with the unknown args replaced by their corresponding type param
+          val expectedSamType = samClassTpMoreDefined.memberInfo(sam)
+
+          // 2. make sure the body def's actual type (formals and result) conforms to
+          //    sam's expected type (in terms of the typevars that represent the sam's class's type params)
+          actualSamType <:< expectedSamType.substituteTypes(tparams, tvars)
+
+          // solve constraints tracked by tvars
+          val targs = solvedTypes(tvars, tparams, tparams map varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil))
+
+          debuglog(s"sam infer: $samClassTp --> ${appliedType(samTyCon, targs)} by $actualSamType <:< $expectedSamType --> $targs for $tparams")
+
+          // a fully defined samClassTp
+          appliedType(samTyCon, targs)
+        } catch {
+          case _: NoInstance | _: TypeError =>
+            devWarning(sampos, s"Could not define type $samClassTp using ${samBodyDef.symbol.rawInfo} <:< ${samClassTp memberInfo sam} (for $sam)")
+            samClassTp
+        }
+
+      // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)`
+      val samDef =
+        DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
+          sam.name.toTermName,
+          Nil,
+          List(fun.vparams),
+          TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus,
+          Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name)))
+        )
+
+      val serializableParentAddendum =
+        if (typeIsSubTypeOfSerializable(samClassTp)) Nil
+        else List(TypeTree(SerializableTpe))
+
+      val classDef =
+        ClassDef(Modifiers(FINAL), tpnme.ANON_FUN_NAME, tparams = Nil,
+          gen.mkTemplate(
+            parents    = TypeTree(samClassTpFullyDefined) :: serializableParentAddendum,
+            self       = emptyValDef,
+            constrMods = NoMods,
+            vparamss   = ListOfNil,
+            body       = List(samDef),
+            superPos   = sampos.focus
+          )
+        )
+
+      // type checking the whole block, so that everything is packaged together nicely
+      // and we don't have to create any symbols by hand
+      val block =
+        nestedTyper.typedPos(sampos, mode, samClassTpFullyDefined) {
+          Block(
+            samBodyDef,
+            classDef,
+            Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), nme.CONSTRUCTOR), Nil)
+          )
+        }
+
+      classDef.symbol addAnnotation SerialVersionUIDAnnotation
+      block
+    }
+
+    /** Type check a function literal.
+     *
+     * Based on the expected type pt, potentially synthesize an instance of
+     *   - PartialFunction,
+     *   - a type with a Single Abstract Method (under -Xexperimental for now).
      */
-    private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+    private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
       val numVparams = fun.vparams.length
-      if (numVparams > definitions.MaxFunctionArity)
-        return MaxFunctionArityError(fun)
-
-      def decompose(pt: Type): (Symbol, List[Type], Type) =
-        if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
-            (  pt.normalize.typeArgs.length - 1 == numVparams
-            || fun.vparams.exists(_.tpt.isEmpty)
-            ))
-          (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
-        else
-          (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
+      val FunctionSymbol =
+        if (numVparams > definitions.MaxFunctionArity) NoSymbol
+        else FunctionClass(numVparams)
 
-      val (clazz, argpts, respt) = decompose(pt)
-      if (argpts.lengthCompare(numVparams) != 0)
+      /* The Single Abstract Member of pt, unless pt is the built-in function type of the expected arity,
+       * as `(a => a): Int => Int` should not (yet) get the sam treatment.
+       */
+      val sam =
+        if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol
+        else samOf(pt)
+
+      /* The SAM case comes first so that this works:
+       *   abstract class MyFun extends (Int => Int)
+       *   (a => a): MyFun
+       *
+       * Note that the arity of the sam must correspond to the arity of the function.
+       */
+      val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
+      val (argpts, respt) =
+        if (samViable) {
+          val samInfo = pt memberInfo sam
+          (samInfo.paramTypes, samInfo.resultType)
+        } else {
+          pt baseType FunctionSymbol match {
+            case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+            case _                                       => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
+          }
+        }
+
+      if (!FunctionSymbol.exists)
+        MaxFunctionArityError(fun)
+      else if (argpts.lengthCompare(numVparams) != 0)
         WrongNumberOfParametersError(fun, argpts)
       else {
+        var issuedMissingParameterTypeError = false
         foreach2(fun.vparams, argpts) { (vparam, argpt) =>
           if (vparam.tpt.isEmpty) {
             vparam.tpt.tpe =
@@ -2808,19 +2891,18 @@ trait Typers extends Modes with Adaptations with Tags {
               else {
                 fun match {
                   case etaExpansion(vparams, fn, args) =>
-                    silent(_.typed(fn, forFunMode(mode), pt)) match {
-                      case SilentResultValue(fn1) if context.undetparams.isEmpty =>
-                        // if context,undetparams is not empty, the function was polymorphic,
+                    silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 =>
+                        // if context.undetparams is not empty, the function was polymorphic,
                         // so we need the missing arguments to infer its type. See #871
                         //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
                         val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams)
                         if (isFunctionType(ftpe) && isFullyDefined(ftpe))
                           return typedFunction(fun, mode, ftpe)
-                      case _ =>
                     }
                   case _ =>
                 }
-                MissingParameterTypeError(fun, vparam, pt)
+                MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError)
+                issuedMissingParameterTypeError = true
                 ErrorType
               }
             if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
@@ -2830,7 +2912,7 @@ trait Typers extends Modes with Adaptations with Tags {
         fun.body match {
           // translate `x => x match { <cases> }` : PartialFunction to
           // `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
-          case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
+          case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
             // go to outer context -- must discard the context that was created for the Function since we're discarding the function
             // thus, its symbol, which serves as the current context.owner, is not the right owner
             // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
@@ -2839,22 +2921,26 @@ trait Typers extends Modes with Adaptations with Tags {
             if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
 
             outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt)
+
+          // Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body`
+          // to an instance of the corresponding anonymous subclass of `pt`.
+          case _ if samViable =>
+            newTyper(context.outer).synthesizeSAMFunction(sam, fun, respt, pt, mode)
+
+          // regular Function
           case _ =>
             val vparamSyms = fun.vparams map { vparam =>
               enterSym(context, vparam)
               if (context.retyping) context.scope enter vparam.symbol
               vparam.symbol
             }
-            val vparams = fun.vparams mapConserve (typedValDef)
-    //        for (vparam <- vparams) {
-    //          checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
-    //        }
+            val vparams = fun.vparams mapConserve typedValDef
             val formals = vparamSyms map (_.tpe)
             val body1 = typed(fun.body, respt)
             val restpe = packedType(body1, fun.symbol).deconst.resultType
-            val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
-    //        body = checkNoEscaping.locals(context.scope, restpe, body)
-            treeCopy.Function(fun, vparams, body1).setType(funtpe)
+            val funtpe  = appliedType(FunctionSymbol, formals :+ restpe: _*)
+
+            treeCopy.Function(fun, vparams, body1) setType funtpe
         }
       }
     }
@@ -2872,13 +2958,8 @@ trait Typers extends Modes with Adaptations with Tags {
         val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
         templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
         templ updateAttachment att.copy(stats = stats1)
-        for (stat <- stats1 if stat.isDef) {
-          val member = stat.symbol
-          if (!(context.owner.ancestors forall
-                (bc => member.matchingSymbol(bc, context.owner.thisType) == NoSymbol))) {
-                  member setFlag OVERRIDE
-                }
-        }
+        for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
+          stat.symbol setFlag OVERRIDE
       }
     }
 
@@ -2886,17 +2967,6 @@ trait Typers extends Modes with Adaptations with Tags {
       case Some(imp1: Import) => imp1
       case _                  => log("unhandled import: "+imp+" in "+unit); imp
     }
-    private def isWarnablePureExpression(tree: Tree) = tree match {
-      case EmptyTree | Literal(Constant(())) => false
-      case _                                 =>
-        !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
-          val sym = tree.symbol
-          (sym == null) || !(sym.isModule || sym.isLazy) || {
-            debuglog("'Pure' but side-effecting expression in statement position: " + tree)
-            false
-          }
-        }
-    }
 
     def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
       val inBlock = exprOwner == context.owner
@@ -2911,7 +2981,7 @@ trait Typers extends Modes with Adaptations with Tags {
             case imp @ Import(_, _) =>
               imp.symbol.initialize
               if (!imp.symbol.isError) {
-                context = context.makeNewImport(imp)
+                context = context.make(imp)
                 typedImport(imp)
               } else EmptyTree
             case _ =>
@@ -2925,7 +2995,7 @@ trait Typers extends Modes with Adaptations with Tags {
                 } else newTyper(context.make(stat, exprOwner))
                 // XXX this creates a spurious dead code warning if an exception is thrown
                 // in a constructor, even if it is the only thing in the constructor.
-                val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+                val result = checkDead(localTyper.typedByValueExpr(stat))
 
                 if (treeInfo.isSelfOrSuperConstrCall(result)) {
                   context.inConstructorSuffix = true
@@ -2933,7 +3003,7 @@ trait Typers extends Modes with Adaptations with Tags {
                     ConstructorsOrderError(stat)
                 }
 
-                if (isWarnablePureExpression(result)) context.warning(stat.pos,
+                if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
                   "a pure expression does nothing in statement position; " +
                   "you may be omitting necessary parentheses"
                 )
@@ -2942,10 +3012,10 @@ trait Typers extends Modes with Adaptations with Tags {
           }
       }
 
-      /** 'accessor' and 'accessed' are so similar it becomes very difficult to
-       *  follow the logic, so I renamed one to something distinct.
+      /* 'accessor' and 'accessed' are so similar it becomes very difficult to
+       * follow the logic, so I renamed one to something distinct.
        */
-      def accesses(looker: Symbol, accessed: Symbol) = accessed.hasLocalFlag && (
+      def accesses(looker: Symbol, accessed: Symbol) = accessed.isLocalToThis && (
            (accessed.isParamAccessor)
         || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
       )
@@ -2960,7 +3030,7 @@ trait Typers extends Modes with Adaptations with Tags {
                 (e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
               // default getters are defined twice when multiple overloads have defaults. an
               // error for this is issued in RefChecks.checkDefaultsInOverloaded
-              if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
+              if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefault &&
                   !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
                 log("Double definition detected:\n  " +
                     ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n  " +
@@ -2984,7 +3054,7 @@ trait Typers extends Modes with Adaptations with Tags {
           // SI-5877 The decls of a package include decls of the package object. But we don't want to add
           //         the corresponding synthetics to the package class, only to the package object class.
           def shouldAdd(sym: Symbol) =
-            inBlock || !isInPackageObject(sym, context.owner)
+            inBlock || !context.isInPackageObject(sym, context.owner)
           for (sym <- scope if shouldAdd(sym))
             for (tree <- context.unit.synthetics get sym) {
               newStats += typedStat(tree) // might add even more synthetics to the scope
@@ -3004,7 +3074,7 @@ trait Typers extends Modes with Adaptations with Tags {
           def matches(stat: Tree, synt: Tree) = (stat, synt) match {
             // synt is default arg for stat
             case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
-              mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString)
+              mods.hasDefault && syntName.toString.startsWith(statName.toString)
 
             // synt is companion module
             case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
@@ -3037,42 +3107,14 @@ trait Typers extends Modes with Adaptations with Tags {
       }
     }
 
-    def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
-      val typedMode = onlyStickyModes(mode) | newmode
-      val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
+    def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
+      val typedMode = mode.onlySticky | newmode
+      val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt))
       checkDead.inMode(typedMode, t)
     }
 
-    def typedArgs(args: List[Tree], mode: Int) =
-      args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
-
-    /** Type trees in `args0` against corresponding expected type in `adapted0`.
-     *
-     * The mode in which each argument is typed is derived from `mode` and
-     * whether the arg was originally by-name or var-arg (need `formals0` for that)
-     * the default is by-val, of course.
-     *
-     * (docs reverse-engineered -- AM)
-     */
-    def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
-      val sticky = onlyStickyModes(mode)
-      def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
-        if (args.isEmpty || adapted.isEmpty) Nil
-        else {
-          // No formals left or * indicates varargs.
-          val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
-          val typedMode = sticky | (
-            if (isVarArgs) STARmode | BYVALmode
-            else if (isByNameParamType(formals.head)) 0
-            else BYVALmode
-          )
-          val tree = typedArg(args.head, mode, typedMode, adapted.head)
-          // formals may be empty, so don't call tail
-          tree :: loop(args.tail, formals drop 1, adapted.tail)
-        }
-      }
-      loop(args0, formals0, adapted0)
-    }
+    def typedArgs(args: List[Tree], mode: Mode) =
+      args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
 
     /** Does function need to be instantiated, because a missing parameter
      *  in an argument closure overlaps with an uninstantiated formal?
@@ -3114,26 +3156,25 @@ trait Typers extends Modes with Adaptations with Tags {
       }
     }
 
-    def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+    def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
       // TODO_NMT: check the assumption that args nonEmpty
       def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
       def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
 
       def preSelectOverloaded(fun: Tree): Tree = {
-        if (fun.hasSymbol && fun.symbol.isOverloaded) {
+        if (fun.hasSymbolField && fun.symbol.isOverloaded) {
           // remove alternatives with wrong number of parameters without looking at types.
-          // less expensive than including them in inferMethodAlternatvie (see below).
+          // less expensive than including them in inferMethodAlternative (see below).
           def shapeType(arg: Tree): Type = arg match {
             case Function(vparams, body) =>
-              functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+              functionType(vparams map (_ => AnyTpe), shapeType(body))
             case AssignOrNamedArg(Ident(name), rhs) =>
               NamedType(name, shapeType(rhs))
             case _ =>
-              NothingClass.tpe
+              NothingTpe
           }
           val argtypes = args map shapeType
           val pre = fun.symbol.tpe.prefix
-
           var sym = fun.symbol filter { alt =>
             // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
             // now fixed by using isWeaklyCompatible in exprTypeArgs
@@ -3145,20 +3186,19 @@ trait Typers extends Modes with Adaptations with Tags {
             // Types: "refs = Array(Map(), Map())".  I determined that inference fails if there are at
             // least two invariant type parameters. See the test case I checked in to help backstop:
             // pos/isApplicableSafe.scala.
-            isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+            isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt)
           }
           if (sym.isOverloaded) {
-            val sym1 = sym filter (alt => {
               // eliminate functions that would result from tupling transforms
               // keeps alternatives with repeated params
-              hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
-                // also keep alts which define at least one default
-                alt.tpe.paramss.exists(_.exists(_.hasDefault))
-            })
+            val sym1 = sym filter (alt =>
+                 isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false)
+              || alt.tpe.params.exists(_.hasDefault)
+            )
             if (sym1 != NoSymbol) sym = sym1
           }
           if (sym == NoSymbol) fun
-          else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+          else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
         } else fun
       }
 
@@ -3167,28 +3207,43 @@ trait Typers extends Modes with Adaptations with Tags {
       fun.tpe match {
         case OverloadedType(pre, alts) =>
           def handleOverloaded = {
-            val undetparams = context.extractUndetparams()
-
-            val argtpes = new ListBuffer[Type]
-            val amode = forArgMode(fun, mode)
-            val args1 = args map {
-              case arg @ AssignOrNamedArg(Ident(name), rhs) =>
-                // named args: only type the righthand sides ("unknown identifier" errors otherwise)
-                val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
-                argtpes += NamedType(name, rhs1.tpe.deconst)
-                // the assign is untyped; that's ok because we call doTypedApply
-                atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
-              case arg =>
-                val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
-                argtpes += arg1.tpe.deconst
-                arg1
+            val undetparams = context.undetparams
+            val (args1, argTpes) = context.savingUndeterminedTypeParams() {
+              val amode = forArgMode(fun, mode)
+              def typedArg0(tree: Tree) = typedArg(tree, amode, BYVALmode, WildcardType)
+              args.map {
+                case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+                  // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+                  // the assign is untyped; that's ok because we call doTypedApply
+                  val typedRhs        = typedArg0(rhs)
+                  val argWithTypedRhs = treeCopy.AssignOrNamedArg(arg, arg.lhs, typedRhs)
+
+                  // TODO: SI-8197/SI-4592: check whether this named argument could be interpreted as an assign
+                  // infer.checkNames must not use UnitType: it may not be a valid assignment, or the setter may return another type from Unit
+                  //
+                  // var typedAsAssign = true
+                  // val argTyped = silent(_.typedArg(argWithTypedRhs, amode, BYVALmode, WildcardType)) orElse { errors =>
+                  //   typedAsAssign = false
+                  //   argWithTypedRhs
+                  // }
+                  //
+                  // TODO: add an assignmentType field to NamedType, equal to:
+                  // assignmentType = if (typedAsAssign) argTyped.tpe else NoType
+
+                  (argWithTypedRhs, NamedType(name, typedRhs.tpe.deconst))
+                case arg @ treeInfo.WildcardStarArg(repeated) =>
+                  val arg1 = typedArg0(arg)
+                  (arg1, RepeatedType(arg1.tpe.deconst))
+                case arg =>
+                  val arg1 = typedArg0(arg)
+                  (arg1, arg1.tpe.deconst)
+              }.unzip
             }
-            context.undetparams = undetparams
             if (context.hasErrors)
               setError(tree)
             else {
-              inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
-              doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+              inferMethodAlternative(fun, undetparams, argTpes, pt)
+              doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
             }
           }
           handleOverloaded
@@ -3196,65 +3251,62 @@ trait Typers extends Modes with Adaptations with Tags {
         case mt @ MethodType(params, _) =>
           val paramTypes = mt.paramTypes
           // repeat vararg as often as needed, remove by-name
-          val formals = formalTypes(paramTypes, args.length)
+          val argslen = args.length
+          val formals = formalTypes(paramTypes, argslen)
 
-          /** Try packing all arguments into a Tuple and apply `fun`
-           *  to that. This is the last thing which is tried (after
-           *  default arguments)
+          /* Try packing all arguments into a Tuple and apply `fun`
+           * to that. This is the last thing which is tried (after
+           * default arguments)
            */
-          def tryTupleApply: Option[Tree] = {
-            // if 1 formal, 1 arg (a tuple), otherwise unmodified args
-            val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length)
-
-            if (!sameLength(tupleArgs, args) && !isUnitForVarArgs(args, params)) {
+          def tryTupleApply: Tree = (
+            if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
+              val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
               // expected one argument, but got 0 or >1 ==>  try applying to tuple
               // the inner "doTypedApply" does "extractUndetparams" => restore when it fails
               val savedUndetparams = context.undetparams
-              silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
-                case SilentResultValue(t) =>
+              silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
                   // Depending on user options, may warn or error here if
                   // a Unit or tuple was inserted.
-                  Some(t) filter (tupledTree =>
-                       !inExprModeButNot(mode, FUNmode)
-                    || tupledTree.symbol == null
-                    || checkValidAdaptation(tupledTree, args)
+                  val keepTree = (
+                       !mode.typingExprNotFun
+                    || t.symbol == null
+                    || checkValidAdaptation(t, args)
                   )
-                case _ =>
-                  context.undetparams = savedUndetparams
-                  None
-              }
-            } else None
-          }
+                  if (keepTree) t else EmptyTree
+              } orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
+            }
+            else EmptyTree
+          )
 
-          /** Treats an application which uses named or default arguments.
-           *  Also works if names + a vararg used: when names are used, the vararg
-           *  parameter has to be specified exactly once. Note that combining varargs
-           *  and defaults is ruled out by typedDefDef.
+          /* Treats an application which uses named or default arguments.
+           * Also works if names + a vararg used: when names are used, the vararg
+           * parameter has to be specified exactly once. Note that combining varargs
+           * and defaults is ruled out by typedDefDef.
            */
           def tryNamesDefaults: Tree = {
             val lencmp = compareLengths(args, formals)
 
             def checkNotMacro() = {
-              if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
-                tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
+              if (treeInfo.isMacroApplication(fun))
+                tryTupleApply orElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
             }
 
             if (mt.isErroneous) duplErrTree
-            else if (inPatternMode(mode)) {
+            else if (mode.inPatternMode) {
               // #2064
               duplErrorTree(WrongNumberOfArgsError(tree, fun))
             } else if (lencmp > 0) {
-              tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
+              tryTupleApply orElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
             } else if (lencmp == 0) {
               // we don't need defaults. names were used, so this application is transformed
               // into a block (@see transformNamedApplication in NamesDefaults)
               val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
               if (namelessArgs exists (_.isErroneous)) {
                 duplErrTree
-              } else if (!isIdentity(argPos) && !sameLength(formals, params))
-                // !isIdentity indicates that named arguments are used to re-order arguments
+              } else if (!allArgsArePositional(argPos) && !sameLength(formals, params))
+                // !allArgsArePositional indicates that named arguments are used to re-order arguments
                 duplErrorTree(MultipleVarargError(tree))
-              else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
+              else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) {
                 // if there's no re-ordering, and fun is not transformed, no need to transform
                 // more than an optimization, e.g. important in "synchronized { x = update-x }"
                 checkNotMacro()
@@ -3320,69 +3372,47 @@ trait Typers extends Modes with Adaptations with Tags {
                   doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
                 } else {
                   rollbackNamesDefaultsOwnerChanges()
-                  tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
+                  tryTupleApply orElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
                 }
               }
             }
           }
 
           if (!sameLength(formals, args) ||   // wrong nb of arguments
-              (args exists isNamed) ||        // uses a named argument
+              (args exists isNamedArg) ||     // uses a named argument
               isNamedApplyBlock(fun)) {       // fun was transformed to a named apply block =>
                                               // integrate this application into the block
-            if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
+            if (dyna.isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
             else tryNamesDefaults
           } else {
             val tparams = context.extractUndetparams()
             if (tparams.isEmpty) { // all type params are defined
               def handleMonomorphicCall: Tree = {
-                // In order for checkDead not to be misled by the unfortunate special
-                // case of AnyRef#synchronized (which is implemented with signature T => T
-                // but behaves as if it were (=> T) => T) we need to know what is the actual
-                // target of a call.  Since this information is no longer available from
-                // typedArg, it is recorded here.
-                val args1 =
-                  // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
-                  // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
-                  // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
-                  // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
-                  //  casting breaks SI-6145,
-                  //  not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
-                  if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+                // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+                // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+                // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+                // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+                //  casting breaks SI-6145,
+                //  not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+                def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
+
+                val args1 = (
+                  if (noExpectedType)
                     typedArgs(args, forArgMode(fun, mode))
                   else
-                    typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+                    typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
+                )
 
                 // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
                 // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
                 // precise(foo) : foo.type => foo.type
-                val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
+                val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe))
                 def ifPatternSkipFormals(tp: Type) = tp match {
-                  case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+                  case MethodType(_, rtp) if (mode.inPatternMode) => rtp
                   case _ => tp
                 }
 
-                // Replace the Delegate-Chainer methods += and -= with corresponding
-                // + and - calls, which are translated in the code generator into
-                // Combine and Remove
-                if (forMSIL) {
-                  fun match {
-                    case Select(qual, name) =>
-                      if (isSubType(qual.tpe, DelegateClass.tpe)
-                        && (name == encode("+=") || name == encode("-="))) {
-                        val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
-                        val f = Select(qual, n)
-                        // the compiler thinks, the PLUS method takes only one argument,
-                        // but he thinks it's an instance method -> still two ref's on the stack
-                        //  -> translated by backend
-                        val rhs = treeCopy.Apply(tree, f, args)
-                        return typed(Assign(qual, rhs))
-                      }
-                    case _ => ()
-                  }
-                }
-
-                /**
+                /*
                  * This is translating uses of List() into Nil.  This is less
                  *  than ideal from a consistency standpoint, but it shouldn't be
                  *  altered without due caution.
@@ -3390,7 +3420,7 @@ trait Typers extends Modes with Adaptations with Tags {
                  *  forced during kind-arity checking, so it is guarded by additional
                  *  tests to ensure we're sufficiently far along.
                  */
-                if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+                if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
                   atPos(tree.pos)(gen.mkNil setType restpe)
                 else
                   constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
@@ -3404,7 +3434,7 @@ trait Typers extends Modes with Adaptations with Tags {
               doTypedApply(tree, fun, args, mode, pt)
             } else {
               def handlePolymorphicCall = {
-                assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+                assert(!mode.inPatternMode, mode) // this case cannot arise for patterns
                 val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
                 val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
                   if (targ == WildcardType) tparam.tpeHK else targ)
@@ -3430,9 +3460,8 @@ trait Typers extends Modes with Adaptations with Tags {
                   // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
                   // returns those undetparams which have not been instantiated.
                   val undetparams = inferMethodInstance(fun, tparams, args1, pt)
-                  val result = doTypedApply(tree, fun, args1, mode, pt)
-                  context.undetparams = undetparams
-                  result
+                  try doTypedApply(tree, fun, args1, mode, pt)
+                  finally context.undetparams = undetparams
                 }
               }
               handlePolymorphicCall
@@ -3446,157 +3475,45 @@ trait Typers extends Modes with Adaptations with Tags {
           if (!tree.isErrorTyped) setError(tree) else tree
           // @H change to setError(treeCopy.Apply(tree, fun, args))
 
-        case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
+        // SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
+        case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
           doTypedUnapply(tree, fun0, fun, args, mode, pt)
 
         case _ =>
-          duplErrorTree(ApplyWithoutArgsError(tree, fun))
-      }
-    }
-
-    def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
-      def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
-      def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
-
-      val otpe = fun.tpe
-
-      if (args.length > MaxTupleArity)
-        return duplErrorTree(TooManyArgsPatternError(fun))
-
-      //
-      def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
-        case MethodType(param :: _, _) =>
-          (Nil, param.tpe)
-        case PolyType(tparams, restpe) =>
-          createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
-        // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
-        case OverloadedType(_, _) =>
-          OverloadedUnapplyError(fun)
-          (Nil, ErrorType)
-        case _ =>
-          UnapplyWithSingleArgError(fun)
-          (Nil, ErrorType)
-      }
-
-      val unapp     = unapplyMember(otpe)
-      val unappType = otpe.memberType(unapp)
-      val argDummy  = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
-      val arg       = Ident(argDummy) setType pt
-
-      val uncheckedTypeExtractor =
-        if (unappType.paramTypes.nonEmpty)
-          extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
-        else None
-
-      if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
-        //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
-        val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
-        val unapplyContext = context.makeNewScope(context.tree, context.owner)
-        freeVars foreach unapplyContext.scope.enter
-
-        val typer1 = newTyper(unapplyContext)
-        val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
-
-        // turn any unresolved type variables in freevars into existential skolems
-        val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
-        arg.tpe = pattp.substSym(freeVars, skolems)
-        argDummy setInfo arg.tpe
-      }
-
-      // setType null is necessary so that ref will be stabilized; see bug 881
-      val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
-
-      if (fun1.tpe.isErroneous) duplErrTree
-      else {
-        val resTp     = fun1.tpe.finalResultType.normalize
-        val nbSubPats = args.length
-        val (formals, formalsExpanded) =
-          extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
-        if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
-        else {
-          val args1 = typedArgs(args, mode, formals, formalsExpanded)
-          // This used to be the following (failing) assert:
-          //   assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
-          // I modified as follows.  See SI-1048.
-          val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
-          val itype = glb(List(pt1, arg.tpe))
-          arg.tpe = pt1    // restore type (arg is a dummy tree, just needs to pass typechecking)
-          val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
-
-          // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
-          // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
-          // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
-          if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
-          else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
-        }
+          if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
+          else duplErrorTree(ApplyWithoutArgsError(tree, fun))
       }
     }
 
-    def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
-      // TODO: disable when in unchecked match
-      // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
-      // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
-      // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
-      //   case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
-      //   case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
-      // }
-      // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
-      // println("wrapClassTagUnapply: "+ extractor)
-      // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
-
-      val args = List(uncheckedPattern)
-      val app  = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
-      // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
-      // and re-typechecks of the target of the unapply call in PATTERNmode,
-      // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
-      // but an arbitrary tree as is the case here
-      doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
-    }
-
-    // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
-    // return the corresponding extractor (an instance of ClassTag[`pt`])
-    def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else {
-      // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
-      pt.normalize.typeConstructor match {
-        // if at least one of the types in an intersection is checkable, use the checkable ones
-        // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
-        // Coll is an abstract type, but SeqLike of course is not
-        case RefinedType(parents, _)  if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) =>
-          None
-
-        case ptCheckable if infer.containsUnchecked(ptCheckable) =>
-          val classTagExtractor = resolveClassTag(pos, ptCheckable)
-
-          if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
-            Some(classTagExtractor)
-          else None
-
-        case _ => None
-    }
-    }
-
     /**
      * Convert an annotation constructor call into an AnnotationInfo.
-     *
-     * @param annClass the expected annotation class
      */
-    def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
-      lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
+    def typedAnnotation(ann: Tree, mode: Mode = EXPRmode): AnnotationInfo = {
       var hasError: Boolean = false
       val pending = ListBuffer[AbsTypeError]()
 
+      def finish(res: AnnotationInfo): AnnotationInfo = {
+        if (hasError) {
+          pending.foreach(ErrorUtils.issueTypeError)
+          ErroneousAnnotation
+        }
+        else res
+      }
+
       def reportAnnotationError(err: AbsTypeError) = {
         pending += err
         hasError = true
-        annotationError
+        ErroneousAnnotation
       }
 
-      /** Calling constfold right here is necessary because some trees (negated
-       *  floats and literals in particular) are not yet folded.
+      /* Calling constfold right here is necessary because some trees (negated
+       * floats and literals in particular) are not yet folded.
        */
       def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
-        val const: Constant = typed(constfold(tr), EXPRmode, pt) match {
+        // The typed tree may be relevantly different than the tree `tr`,
+        // e.g. it may have encountered an implicit conversion.
+        val ttree = typed(constfold(tr), pt)
+        val const: Constant = ttree match {
           case l @ Literal(c) if !l.isErroneous => c
           case tree => tree.tpe match {
             case ConstantType(c)  => c
@@ -3605,29 +3522,36 @@ trait Typers extends Modes with Adaptations with Tags {
         }
 
         if (const == null) {
-          reportAnnotationError(AnnotationNotAConstantError(tr)); None
+          reportAnnotationError(AnnotationNotAConstantError(ttree)); None
         } else if (const.value == null) {
           reportAnnotationError(AnnotationArgNullError(tr)); None
         } else
           Some(LiteralAnnotArg(const))
       }
 
-      /** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
-       *  an error message is reported and None is returned.
+      /* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
+       * an error message is reported and None is returned.
        */
       def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
         case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
           reportAnnotationError(ArrayConstantsError(tree)); None
 
         case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
-          val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
+          val annInfo = typedAnnotation(ann, mode)
+          val annType = annInfo.tpe
+
+          if (!annType.typeSymbol.isSubClass(pt.typeSymbol))
+            reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType))
+          else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass))
+            reportAnnotationError(NestedAnnotationError(ann, annType))
+
           if (annInfo.atp.isErroneous) { hasError = true; None }
           else Some(NestedAnnotArg(annInfo))
 
         // use of Array.apply[T: ClassTag](xs: T*): Array[T]
         // and    Array.apply(x: Int, xs: Int*): Array[Int]       (and similar)
         case Apply(fun, args) =>
-          val typedFun = typed(fun, forFunMode(mode), WildcardType)
+          val typedFun = typed(fun, mode.forFunMode)
           if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
             pt match {
               case TypeRef(_, ArrayClass, targ :: _) =>
@@ -3655,50 +3579,48 @@ trait Typers extends Modes with Adaptations with Tags {
       }
 
       // begin typedAnnotation
-      val (fun, argss) = {
-        def extract(fun: Tree, outerArgss: List[List[Tree]]):
-          (Tree, List[List[Tree]]) = fun match {
-            case Apply(f, args) =>
-              extract(f, args :: outerArgss)
-            case Select(New(tpt), nme.CONSTRUCTOR) =>
-              (fun, outerArgss)
-            case _ =>
-              reportAnnotationError(UnexpectedTreeAnnotation(fun))
-              (setError(fun), outerArgss)
-          }
-        extract(ann, List())
-      }
-
-      val res = if (fun.isErroneous) annotationError
-      else {
-        val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
-        val annType = tpt.tpe
+      val treeInfo.Applied(fun0, targs, argss) = ann
+      if (fun0.isErroneous)
+        return finish(ErroneousAnnotation)
+      val typedFun0 = typed(fun0, mode.forFunMode)
+      val typedFunPart = (
+        // If there are dummy type arguments in typeFun part, it suggests we
+        // must type the actual constructor call, not only the select. The value
+        // arguments are how the type arguments will be inferred.
+        if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe)))
+          logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _))))
+        else
+          typedFun0
+      )
+      val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart
+      val annType = annTpt.tpe
 
-        if (typedFun.isErroneous) annotationError
+      finish(
+        if (typedFun.isErroneous)
+          ErroneousAnnotation
         else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
           // annotation to be saved as java classfile annotation
           val isJava = typedFun.symbol.owner.isJavaDefined
-          if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
-            reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
-          } else if (argss.length > 1) {
+          if (argss.length > 1) {
             reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
-          } else {
+          }
+          else {
             val annScope = annType.decls
                 .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
-            val names = new scala.collection.mutable.HashSet[Symbol]
-            def hasValue = names exists (_.name == nme.value)
+            val names = mutable.Set[Symbol]()
             names ++= (if (isJava) annScope.iterator
                        else typedFun.tpe.params.iterator)
+
+            def hasValue = names exists (_.name == nme.value)
             val args = argss match {
-              case List(List(arg)) if !isNamed(arg) && hasValue =>
-                List(new AssignOrNamedArg(Ident(nme.value), arg))
-              case as :: _ => as
+              case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil
+              case args :: Nil                                         => args
             }
 
             val nvPairs = args map {
               case arg @ AssignOrNamedArg(Ident(name), rhs) =>
                 val sym = if (isJava) annScope.lookup(name)
-                          else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol)
+                          else findSymbol(typedFun.tpe.params)(_.name == name)
                 if (sym == NoSymbol) {
                   reportAnnotationError(UnknownAnnotationNameError(arg, name))
                   (nme.ERROR, None)
@@ -3722,47 +3644,16 @@ trait Typers extends Modes with Adaptations with Tags {
                 reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
             }
 
-            if (hasError) annotationError
+            if (hasError) ErroneousAnnotation
             else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
           }
-        } else if (requireJava) {
-          reportAnnotationError(NestedAnnotationError(ann, annType))
-        } else {
-          val typedAnn = if (selfsym == NoSymbol) {
+        }
+        else {
+          val typedAnn: Tree = {
             // local dummy fixes SI-5544
             val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
-            localTyper.typed(ann, mode, annClass.tpe)
-          } else {
-            // Since a selfsym is supplied, the annotation should have
-            // an extra "self" identifier in scope for type checking.
-            // This is implemented by wrapping the rhs
-            // in a function like "self => rhs" during type checking,
-            // and then stripping the "self =>" and substituting
-            // in the supplied selfsym.
-            val funcparm = ValDef(NoMods, nme.self, TypeTree(selfsym.info), EmptyTree)
-            val func = Function(List(funcparm), ann.duplicate)
-                                         // The .duplicate of annot.constr
-                                         // deals with problems that
-                                         // accur if this annotation is
-                                         // later typed again, which
-                                         // the compiler sometimes does.
-                                         // The problem is that "self"
-                                         // ident's within annot.constr
-                                         // will retain the old symbol
-                                         // from the previous typing.
-            val fun1clazz = FunctionClass(1)
-            val funcType = typeRef(fun1clazz.tpe.prefix,
-                                   fun1clazz,
-                                   List(selfsym.info, annClass.tpe))
-
-            (typed(func, mode, funcType): @unchecked) match {
-              case t @ Function(List(arg), rhs) =>
-                val subs =
-                  new TreeSymSubstituter(List(arg.symbol),List(selfsym))
-                subs(rhs)
-            }
+            localTyper.typed(ann, mode, annType)
           }
-
           def annInfo(t: Tree): AnnotationInfo = t match {
             case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
               AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos)
@@ -3786,36 +3677,33 @@ trait Typers extends Modes with Adaptations with Tags {
           if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
             unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.")
 
-          if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError
+          if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
           else annInfo(typedAnn)
         }
-      }
-
-      if (hasError) {
-        pending.foreach(ErrorUtils.issueTypeError)
-        annotationError
-      } else res
+      )
     }
 
     /** Compute an existential type from raw hidden symbols `syms` and type `tp`
      */
-    def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
-
-    def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
-      ctx.owner.isTerm &&
-      (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
-      {
-        var ctx1 = ctx.outer
-        while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
-        (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
-      }
-
-    def isCapturedExistential(sym: Symbol) =
-      (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
-      val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
-      try !isReferencedFrom(context, sym)
-      finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
-    }
+    def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner)
+
+    def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = (
+       ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || {
+          var ctx1 = ctx.outer
+          while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope))
+            ctx1 = ctx1.outer
+
+          (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
+       }
+    )
+
+    def isCapturedExistential(sym: Symbol) = (
+      (sym hasAllFlags EXISTENTIAL | CAPTURED) && {
+        val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
+        try !isReferencedFrom(context, sym)
+        finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
+      }
+    )
 
     def packCaptured(tpe: Type): Type = {
       val captured = mutable.Set[Symbol]()
@@ -3827,26 +3715,31 @@ trait Typers extends Modes with Adaptations with Tags {
 
     /** convert local symbols and skolems to existentials */
     def packedType(tree: Tree, owner: Symbol): Type = {
-      def defines(tree: Tree, sym: Symbol) =
-        sym.isExistentialSkolem && sym.unpackLocation == tree ||
-        tree.isDef && tree.symbol == sym
-      def isVisibleParameter(sym: Symbol) =
-        sym.isParameter && (sym.owner == owner) && (sym.isType || !owner.isAnonymousFunction)
+      def defines(tree: Tree, sym: Symbol) = (
+           sym.isExistentialSkolem && sym.unpackLocation == tree
+        || tree.isDef && tree.symbol == sym
+      )
+      def isVisibleParameter(sym: Symbol) = (
+           sym.isParameter
+        && (sym.owner == owner)
+        && (sym.isType || !owner.isAnonymousFunction)
+      )
       def containsDef(owner: Symbol, sym: Symbol): Boolean =
         (!sym.hasPackageFlag) && {
           var o = sym.owner
           while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
           o == owner && !isVisibleParameter(sym)
         }
-      var localSyms = scala.collection.immutable.Set[Symbol]()
-      var boundSyms = scala.collection.immutable.Set[Symbol]()
+      var localSyms = immutable.Set[Symbol]()
+      var boundSyms = immutable.Set[Symbol]()
       def isLocal(sym: Symbol): Boolean =
         if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
         else if (owner == NoSymbol) tree exists (defines(_, sym))
         else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
       def containsLocal(tp: Type): Boolean =
         tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
-      val normalizeLocals = new TypeMap {
+
+      val dealiasLocals = new TypeMap {
         def apply(tp: Type): Type = tp match {
           case TypeRef(pre, sym, args) =>
             if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
@@ -3878,7 +3771,7 @@ trait Typers extends Modes with Adaptations with Tags {
           t match {
             case ExistentialType(tparams, _) =>
               boundSyms ++= tparams
-            case AnnotatedType(annots, _, _) =>
+            case AnnotatedType(annots, _) =>
               for (annot <- annots; arg <- annot.args) {
                 arg match {
                   case Ident(_) =>
@@ -3899,25 +3792,25 @@ trait Typers extends Modes with Adaptations with Tags {
         for (sym <- remainingSyms) addLocals(sym.existentialBound)
       }
 
-      val normalizedTpe = normalizeLocals(tree.tpe)
-      addLocals(normalizedTpe)
-      packSymbols(localSyms.toList, normalizedTpe)
+      val dealiasedType = dealiasLocals(tree.tpe)
+      addLocals(dealiasedType)
+      packSymbols(localSyms.toList, dealiasedType)
     }
 
     def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
       if (!checkClassType(tpt) && noGen) tpt
       else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
 
-    protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
+    protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
       for (wc <- tree.whereClauses)
-        if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
+        if (wc.symbol == NoSymbol) { namer enterSym wc; wc.symbol setFlag EXISTENTIAL }
         else context.scope enter wc.symbol
       val whereClauses1 = typedStats(tree.whereClauses, context.owner)
-      for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
+      for (vd @ ValDef(_, _, _, _) <- whereClauses1)
         if (vd.symbol.tpe.isVolatile)
           AbstractionFromVolatileTypeError(vd)
       val tpt1 = typedType(tree.tpt, mode)
-      existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+      existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => {
         val original = tpt1 match {
           case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
           case _ => {
@@ -3931,16 +3824,16 @@ trait Typers extends Modes with Adaptations with Tags {
     }
 
     // lifted out of typed1 because it's needed in typedImplicit0
-    protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
+    protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
       case OverloadedType(pre, alts) =>
-        inferPolyAlternatives(fun, args map (_.tpe))
+        inferPolyAlternatives(fun, mapList(args)(treeTpe))
         val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
         val args1 = if (sameLength(args, tparams)) {
           //@M: in case TypeApply we can't check the kind-arities of the type arguments,
           // as we don't know which alternative to choose... here we do
           map2Conserve(args, tparams) {
             //@M! the polytype denotes the expected kind
-            (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+            (arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
           }
         } else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
          // Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
@@ -3953,15 +3846,15 @@ trait Typers extends Modes with Adaptations with Tags {
         typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
       case PolyType(tparams, restpe) if tparams.nonEmpty =>
         if (sameLength(tparams, args)) {
-          val targs = args map (_.tpe)
+          val targs = mapList(args)(treeTpe)
           checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
-          if (fun.symbol == Predef_classOf)
-            typedClassOf(tree, args.head, true)
+          if (isPredefClassOf(fun.symbol))
+            typedClassOf(tree, args.head, noGen = true)
           else {
             if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
               val scrutineeType = fun match {
                 case Select(qual, _) => qual.tpe
-                case _               => AnyClass.tpe
+                case _               => AnyTpe
               }
               checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
             }
@@ -4020,17 +3913,17 @@ trait Typers extends Modes with Adaptations with Tags {
           // else false
       }
 
-      def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+      def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
         def argToBinding(arg: Tree): Tree = arg match {
           case AssignOrNamedArg(i @ Ident(name), rhs) =>
-            atPos(i.pos.withEnd(rhs.pos.endOrPoint)) {
+            atPos(i.pos.withEnd(rhs.pos.end)) {
               gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs))
             }
           case _ =>
             gen.mkTuple(List(CODE.LIT(""), arg))
         }
 
-        val t = treeCopy.Apply(orig, fun, args map argToBinding)
+        val t = treeCopy.Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding)
         wrapErrors(t, _.typed(t, mode, pt))
       }
 
@@ -4053,12 +3946,16 @@ trait Typers extends Modes with Adaptations with Tags {
        *  - simplest solution: have two method calls
        *
        */
-      def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
-        log(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
+      def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
+        val cxTree = context.enclosingNonImportContext.tree // SI-8364
+        debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
         val treeInfo.Applied(treeSelection, _, _) = tree
-        def isDesugaredApply = treeSelection match {
-          case Select(`qual`, nme.apply) => true
-          case _                         => false
+        def isDesugaredApply = {
+          val protoQual = macroExpandee(qual) orElse qual
+          treeSelection match {
+            case Select(`protoQual`, nme.apply) => true
+            case _                              => false
+          }
         }
         acceptsApplyDynamicWithType(qual, name) map { tp =>
           // If tp == NoType, pass only explicit type arguments to applyXXX.  Not used at all
@@ -4070,20 +3967,20 @@ trait Typers extends Modes with Adaptations with Tags {
           def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic
           def matches(t: Tree)          = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection
 
-          /** Note that the trees which arrive here are potentially some distance from
-           *  the trees of direct interest. `cxTree` is some enclosing expression which
-           *  may apparently be arbitrarily larger than `tree`; and `tree` itself is
-           *  too small, having at least in some cases lost its explicit type parameters.
-           *  This logic is designed to use `tree` to pinpoint the immediately surrounding
-           *  Apply/TypeApply/Select node, and only then creates the dynamic call.
-           *  See SI-6731 among others.
+          /* Note that the trees which arrive here are potentially some distance from
+           * the trees of direct interest. `cxTree` is some enclosing expression which
+           * may apparently be arbitrarily larger than `tree`; and `tree` itself is
+           * too small, having at least in some cases lost its explicit type parameters.
+           * This logic is designed to use `tree` to pinpoint the immediately surrounding
+           * Apply/TypeApply/Select node, and only then creates the dynamic call.
+           * See SI-6731 among others.
            */
           def findSelection(t: Tree): Option[(TermName, Tree)] = t match {
             case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None
             case Apply(fn, args) if matches(fn)   => Some((applyOp(args), fn))
             case Assign(lhs, _) if matches(lhs)   => Some((nme.updateDynamic, lhs))
             case _ if matches(t)                  => Some((nme.selectDynamic, t))
-            case _                                => t.children flatMap findSelection headOption
+            case _                                => (t.children flatMap findSelection).headOption
           }
           findSelection(cxTree) match {
             case Some((opName, treeInfo.Applied(_, targs, _))) =>
@@ -4092,48 +3989,38 @@ trait Typers extends Modes with Adaptations with Tags {
               val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) {
                 Literal(Constant(name.decode))
               }
-              atPos(qual.pos)(Apply(fun, List(nameStringLit)))
+              markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit))))
             case _ =>
               setError(tree)
           }
         }
       }
-
-      def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
-        silent(typeTree) match {
-          case SilentResultValue(r) => r
-          case SilentTypeError(err) => DynamicRewriteError(tree, err)
-        }
-      }
+      def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head))
     }
 
-    final def deindentTyping() = context.typingIndentLevel -= 2
-    final def indentTyping() = context.typingIndentLevel += 2
-    @inline final def printTyping(s: => String) = {
-      if (printTypings)
-        println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent))
-    }
-    @inline final def printInference(s: => String) = {
-      if (printInfers)
-        println(s)
-    }
+    def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
+      // Lookup in the given class using the root mirror.
+      def lookupInOwner(owner: Symbol, name: Name): Symbol =
+        if (mode.inQualMode) rootMirror.missingHook(owner, name) else NoSymbol
 
-    def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
-      def isPatternMode = inPatternMode(mode)
+      // Lookup in the given qualifier.  Used in last-ditch efforts by typedIdent and typedSelect.
+      def lookupInRoot(name: Name): Symbol  = lookupInOwner(rootMirror.RootClass, name)
+      def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name
 
-      //Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
-      //@M! get the type of the qualifier in a Select tree, otherwise: NoType
-      def prefixType(fun: Tree): Type = fun match {
-        case Select(qualifier, _) => qualifier.tpe
-//        case Ident(name) => ??
-        case _ => NoType
-      }
+      def lookupInQualifier(qual: Tree, name: Name): Symbol = (
+        if (name == nme.ERROR || qual.tpe.widen.isErroneous)
+          NoSymbol
+        else lookupInOwner(qual.tpe.typeSymbol, name) orElse {
+          NotAMemberError(tree, qual, name)
+          NoSymbol
+        }
+      )
 
       def typedAnnotated(atd: Annotated): Tree = {
         val ann = atd.annot
         val arg1 = typed(atd.arg, mode, pt)
-        /** mode for typing the annotation itself */
-        val annotMode = mode & ~TYPEmode | EXPRmode
+        /* mode for typing the annotation itself */
+        val annotMode = (mode &~ TYPEmode) | EXPRmode
 
         def resultingTypeTree(tpe: Type) = {
           // we need symbol-ful originals for reification
@@ -4150,45 +4037,14 @@ trait Typers extends Modes with Adaptations with Tags {
         if (arg1.isType) {
           // make sure the annotation is only typechecked once
           if (ann.tpe == null) {
-            // an annotated type
-            val selfsym =
-              if (!settings.selfInAnnots.value)
-                NoSymbol
-              else
-                arg1.tpe.selfsym orElse {
-                  /* Implementation limitation: Currently this
-                   * can cause cyclical reference errors even
-                   * when the self symbol is not referenced at all.
-                   * Surely at least some of these cases can be
-                   * fixed by proper use of LazyType's.  Lex tinkered
-                   * on this but did not succeed, so is leaving
-                   * it alone for now. Example code with the problem:
-                   *  class peer extends Annotation
-                   *  class NPE[T <: NPE[T] @peer]
-                   *
-                   * (Note: -Yself-in-annots must be on to see the problem)
-                   * */
-                  ( context.owner
-                      newLocalDummy (ann.pos)
-                      newValue (nme.self, ann.pos)
-                      setInfo (arg1.tpe.withoutAnnotations)
-                  )
-                }
-
-            val ainfo = typedAnnotation(ann, annotMode, selfsym)
-            val atype0 = arg1.tpe.withAnnotation(ainfo)
-            val atype =
-              if ((selfsym != NoSymbol) && (ainfo.refsSymbol(selfsym)))
-                atype0.withSelfsym(selfsym)
-              else
-                atype0 // do not record selfsym if
-                       // this annotation did not need it
+            val ainfo = typedAnnotation(ann, annotMode)
+            val atype = arg1.tpe.withAnnotation(ainfo)
 
             if (ainfo.isErroneous)
               // Erroneous annotations were already reported in typedAnnotation
               arg1  // simply drop erroneous annotations
             else {
-              ann.tpe = atype
+              ann setType atype
               resultingTypeTree(atype)
             }
           } else {
@@ -4199,7 +4055,7 @@ trait Typers extends Modes with Adaptations with Tags {
         else {
           if (ann.tpe == null) {
             val annotInfo = typedAnnotation(ann, annotMode)
-            ann.tpe = arg1.tpe.withAnnotation(annotInfo)
+            ann setType arg1.tpe.withAnnotation(annotInfo)
           }
           val atype = ann.tpe
           Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
@@ -4223,7 +4079,7 @@ trait Typers extends Modes with Adaptations with Tags {
             if (name != tpnme.WILDCARD) namer.enterInScope(sym)
             else context.scope.enter(sym)
 
-            tree setSymbol sym setType sym.tpe
+            tree setSymbol sym setType sym.tpeHK
 
           case name: TermName  =>
             val sym =
@@ -4231,14 +4087,17 @@ trait Typers extends Modes with Adaptations with Tags {
               else context.owner.newValue(name, tree.pos)
 
             if (name != nme.WILDCARD) {
-              if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree)
+              if (context.inPatAlternative)
+                VariableInPatternAlternativeError(tree)
+
               namer.enterInScope(sym)
             }
 
             val body1 = typed(body, mode, pt)
+            val impliedType = patmat.binderTypeImpliedByPattern(body1, pt, sym) // SI-1503, SI-5204
             val symTp =
-              if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
-              else body1.tpe
+              if (treeInfo.isSequenceValued(body)) seqType(impliedType)
+              else impliedType
             sym setInfo symTp
 
             // have to imperatively set the symbol for this bind to keep it in sync with the symbols used in the body of a case
@@ -4256,16 +4115,16 @@ trait Typers extends Modes with Adaptations with Tags {
 
       def typedArrayValue(tree: ArrayValue) = {
         val elemtpt1 = typedType(tree.elemtpt, mode)
-        val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
-        treeCopy.ArrayValue(tree, elemtpt1, elems1)
-          .setType(
-            (if (isFullyDefined(pt) && !phase.erasedTypes) pt
-             else arrayType(elemtpt1.tpe)).notNull)
+        val elems1   = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
+        // see run/t6126 for an example where `pt` does not suffice (tagged types)
+        val tpe1     = if (isFullyDefined(pt) && !phase.erasedTypes) pt else arrayType(elemtpt1.tpe)
+
+        treeCopy.ArrayValue(tree, elemtpt1, elems1) setType tpe1
       }
 
       def typedAssign(lhs: Tree, rhs: Tree): Tree = {
         // see SI-7617 for an explanation of why macro expansion is suppressed
-        def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode, WildcardType)
+        def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode)
         val lhs1    = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs)))
         val varsym  = lhs1.symbol
 
@@ -4280,7 +4139,7 @@ trait Typers extends Modes with Adaptations with Tags {
         if (treeInfo.mayBeVarGetter(varsym)) {
           lhs1 match {
             case treeInfo.Applied(Select(qual, name), _, _) =>
-              val sel = Select(qual, nme.getterToSetter(name.toTermName)) setPos lhs.pos
+              val sel = Select(qual, name.setterName) setPos lhs.pos
               val app = Apply(sel, List(rhs)) setPos tree.pos
               return typed(app, mode, pt)
 
@@ -4291,12 +4150,12 @@ trait Typers extends Modes with Adaptations with Tags {
 //        // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
 //        (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
         if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
-          val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
-          treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
+          val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
+          treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
         }
         else if(dyna.isDynamicallyUpdatable(lhs1)) {
-          val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
-          val t = atPos(lhs1.pos.withEnd(rhs1.pos.endOrPoint)) {
+          val rhs1 = typedByValueExpr(rhs)
+          val t = atPos(lhs1.pos.withEnd(rhs1.pos.end)) {
             Apply(lhs1, List(rhs1))
           }
           dyna.wrapErrors(t, _.typed1(t, mode, pt))
@@ -4304,51 +4163,53 @@ trait Typers extends Modes with Adaptations with Tags {
         else fail()
       }
 
-      def typedIf(tree: If) = {
-        val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
-        val thenp = tree.thenp
-        val elsep = tree.elsep
-        if (elsep.isEmpty) { // in the future, should be unnecessary
-          val thenp1 = typed(thenp, UnitClass.tpe)
-          treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
-        } else {
-          var thenp1 = typed(thenp, pt)
-          var elsep1 = typed(elsep, pt)
-          def thenTp = packedType(thenp1, context.owner)
-          def elseTp = packedType(elsep1, context.owner)
-
-          // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
-          val (owntype, needAdapt) =
-            // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
-            // in the special (though common) case where the types are equal, it pays to pack before comparing
-            // especially virtpatmat needs more aggressive unification of skolemized types
-            // this breaks src/library/scala/collection/immutable/TrieIterator.scala
-            if ( opt.virtPatmat && !isPastTyper
-              && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
-              && thenTp =:= elseTp
-               ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
-            // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
-            else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
-
-          if (needAdapt) { //isNumericValueType(owntype)) {
-            thenp1 = adapt(thenp1, mode, owntype)
-            elsep1 = adapt(elsep1, mode, owntype)
-          }
-          treeCopy.If(tree, cond1, thenp1, elsep1) setType owntype
-        }
-      }
-
-      // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+      def typedIf(tree: If): If = {
+        val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe))
+        // One-legged ifs don't need a lot of analysis
+        if (tree.elsep.isEmpty)
+          return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe
+
+        val thenp1 = typed(tree.thenp, pt)
+        val elsep1 = typed(tree.elsep, pt)
+
+        // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+        // in the special (though common) case where the types are equal, it pays to pack before comparing
+        // especially virtpatmat needs more aggressive unification of skolemized types
+        // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+        // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+        def samePackedTypes = (
+             !isPastTyper
+          && thenp1.tpe.annotations.isEmpty
+          && elsep1.tpe.annotations.isEmpty
+          && packedType(thenp1, context.owner) =:= packedType(elsep1, context.owner)
+        )
+        def finish(ownType: Type) = treeCopy.If(tree, cond1, thenp1, elsep1) setType ownType
+        // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+        // @PP: This was doing the samePackedTypes check BEFORE the isFullyDefined check,
+        // which based on everything I see everywhere else was a bug. I reordered it.
+        if (isFullyDefined(pt))
+          finish(pt)
+        // Important to deconst, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
+        else thenp1.tpe.deconst :: elsep1.tpe.deconst :: Nil match {
+          case tp :: _ if samePackedTypes     => finish(tp)
+          case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+          case tpes                           =>
+            val lub = weakLub(tpes)
+            treeCopy.If(tree, cond1, adapt(thenp1, mode, lub), adapt(elsep1, mode, lub)) setType lub
+        }
+      }
+
+      // When there's a suitable __match in scope, virtualize the pattern match
       // otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
       // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
       def typedVirtualizedMatch(tree: Match): Tree = {
         val selector = tree.selector
         val cases = tree.cases
         if (selector == EmptyTree) {
-          if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
+          if (pt.typeSymbol == PartialFunctionClass)
             synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
           else {
-            val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+            val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
             val params = for (i <- List.range(0, arity)) yield
               atPos(tree.pos.focusStart) {
                 ValDef(Modifiers(PARAM | SYNTHETIC),
@@ -4356,7 +4217,11 @@ trait Typers extends Modes with Adaptations with Tags {
               }
             val ids = for (p <- params) yield Ident(p.name)
             val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
-            val body = treeCopy.Match(tree, selector1, cases)
+            // SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
+            //         receive symbols owned by this function. However if, after a silent mode session, we discard
+            //         this Function and try a different approach (e.g. applying a view to the reciever) we end up
+            //         with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
+            val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
             typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
           }
         } else
@@ -4375,9 +4240,9 @@ trait Typers extends Modes with Adaptations with Tags {
           val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
           if (restpt.tpe eq null) {
             ReturnWithoutTypeError(tree, enclMethod.owner)
-          } else {
-            context.enclMethod.returnsSeen = true
-            val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+          }
+          else {
+            val expr1 = context withinReturnExpr typedByValueExpr(expr, restpt.tpe)
             // Warn about returning a value if no value can be returned.
             if (restpt.tpe.typeSymbol == UnitClass) {
               // The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4387,7 +4252,7 @@ trait Typers extends Modes with Adaptations with Tags {
                 unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
             }
             val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
-            val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+            val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
             res.setType(tp)
           }
         }
@@ -4404,7 +4269,7 @@ trait Typers extends Modes with Adaptations with Tags {
           // given a dealiased type.
           val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
           if (checkStablePrefixClassType(tpt0))
-            if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+            if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
               context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
               notifyUndetparamsAdded(context.undetparams)
               TypeTree().setOriginal(tpt0)
@@ -4413,8 +4278,8 @@ trait Typers extends Modes with Adaptations with Tags {
           else tpt0
         }
 
-        /** If current tree <tree> appears in <val x(: T)? = <tree>>
-         *  return `tp with x.type' else return `tp`.
+        /* If current tree <tree> appears in <val x(: T)? = <tree>>
+         * return `tp with x.type' else return `tp`.
          */
         def narrowRhs(tp: Type) = { val sym = context.tree.symbol
           context.tree match {
@@ -4434,7 +4299,7 @@ trait Typers extends Modes with Adaptations with Tags {
           NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
           setError(tpt)
         }
-        else if (!(  tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+        else if (!(  tp == sym.typeOfThis // when there's no explicit self type -- with (#3612) or without self variable
                      // sym.thisSym.tpe == tp.typeOfThis (except for objects)
                   || narrowRhs(tp) <:< tp.typeOfThis
                   || phase.erasedTypes
@@ -4464,36 +4329,15 @@ trait Typers extends Modes with Adaptations with Tags {
           else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
         case MethodType(formals, _) =>
           if (isFunctionType(pt)) expr1
-          else expr1 match {
-            case Select(qual, name) if (forMSIL &&
-                                        pt != WildcardType &&
-                                        pt != ErrorType &&
-                                        isSubType(pt, DelegateClass.tpe)) =>
-              val scalaCaller = newScalaCaller(pt)
-              addScalaCallerInfo(scalaCaller, expr1.symbol)
-              val n: Name = scalaCaller.name
-              val del = Ident(DelegateClass) setType DelegateClass.tpe
-              val f = Select(del, n)
-              //val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt))
-              val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null)))
-                                     else List(qual) // where the scala-method is located
-              val rhs = Apply(f, args)
-              typed(rhs)
-            case _ =>
-              adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
-          }
+          else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
         case ErrorType =>
           expr1
         case _ =>
           UnderscoreEtaError(expr1)
       }
 
-      /**
-       *  @param args ...
-       *  @return     ...
-       */
-      def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
-        val c = context.makeSilent(false)
+      def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
+        val c = context.makeSilent(reportAmbiguousErrors = false)
         c.retyping = true
         try {
           val res = newTyper(c).typedArgs(args, mode)
@@ -4502,184 +4346,156 @@ trait Typers extends Modes with Adaptations with Tags {
           case ex: CyclicReference =>
             throw ex
           case te: TypeError =>
-            // @H some of typer erros can still leak,
+            // @H some of typer errors can still leak,
             // for instance in continuations
             None
-        } finally {
-          c.flushBuffer()
         }
       }
 
-      /** Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
-       *  insert an implicit conversion.
+      /* Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
+       * insert an implicit conversion.
        */
       def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
         val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
 
-        def onError(typeError: AbsTypeError): Tree = {
-            if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
+        def onError(typeErrors: Seq[AbsTypeError]): Tree = {
+          if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
 
-            // If the problem is with raw types, copnvert to existentials and try again.
-            // See #4712 for a case where this situation arises,
-            if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
-              val newtpe = rawToExistential(fun.tpe)
-              if (fun.tpe ne newtpe) {
-                // println("late cooking: "+fun+":"+fun.tpe) // DEBUG
-                return tryTypedApply(fun setType newtpe, args)
-              }
+          // If the problem is with raw types, copnvert to existentials and try again.
+          // See #4712 for a case where this situation arises,
+          if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
+            val newtpe = rawToExistential(fun.tpe)
+            if (fun.tpe ne newtpe) {
+              // println("late cooking: "+fun+":"+fun.tpe) // DEBUG
+              return tryTypedApply(fun setType newtpe, args)
             }
+          }
+          def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
+            case Block(_, r)                        => treesInResult(r)
+            case Match(_, cases)                    => cases
+            case CaseDef(_, _, r)                   => treesInResult(r)
+            case Annotated(_, r)                    => treesInResult(r)
+            case If(_, t, e)                        => treesInResult(t) ++ treesInResult(e)
+            case Try(b, catches, _)                 => treesInResult(b) ++ catches
+            case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
+            case Select(qual, name)                 => treesInResult(qual)
+            case Apply(fun, args)                   => treesInResult(fun) ++ args.flatMap(treesInResult)
+            case TypeApply(fun, args)               => treesInResult(fun) ++ args.flatMap(treesInResult)
+            case _                                  => Nil
+          })
+          def errorInResult(tree: Tree) = treesInResult(tree) exists (err => typeErrors.exists(_.errPos == err.pos))
 
-            def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
-              case Block(_, r)                        => treesInResult(r)
-              case Match(_, cases)                    => cases
-              case CaseDef(_, _, r)                   => treesInResult(r)
-              case Annotated(_, r)                    => treesInResult(r)
-              case If(_, t, e)                        => treesInResult(t) ++ treesInResult(e)
-              case Try(b, catches, _)                 => treesInResult(b) ++ catches
-              case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
-              case _                                  => Nil
-            })
-            def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos)
-
-            val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult)
-            printTyping {
-              val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
-              if (retry) "second try: " + funStr
-              else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos
-            }
-            if (retry) {
-              val Select(qual, name) = fun
-              tryTypedArgs(args, forArgMode(fun, mode)) match {
-                case Some(args1) =>
-                  val qual1 =
-                    if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
-                    else qual
-                  if (qual1 ne qual) {
-                    val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
-                    return typed1(tree1, mode | SNDTRYmode, pt)
-                  }
-                case _ => ()
-              }
+          val retry = (typeErrors.forall(_.errPos != null)) && (fun :: tree :: args exists errorInResult)
+          typingStack.printTyping({
+            val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
+            if (retry) "second try: " + funStr
+            else "no second try: " + funStr + " because error not in result: " + typeErrors.head.errPos+"!="+tree.pos
+          })
+          if (retry) {
+            val Select(qual, name) = fun
+            tryTypedArgs(args, forArgMode(fun, mode)) match {
+              case Some(args1) =>
+                val qual1 =
+                  if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
+                  else qual
+                if (qual1 ne qual) {
+                  val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
+                  return context withinSecondTry typed1(tree1, mode, pt)
+                }
+              case _ => ()
             }
-            issue(typeError)
-            setError(treeCopy.Apply(tree, fun, args))
+          }
+          typeErrors foreach issue
+          setError(treeCopy.Apply(tree, fun, args))
         }
 
-        silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
-          case SilentResultValue(t) =>
-            t
-          case SilentTypeError(err) =>
-            onError(err)
-        }
+        silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError
       }
 
       def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
+        // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)`
         val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
-        if (args.isEmpty && stableApplication && isPatternMode) {
-          // treat stable function applications f() as expressions.
-          //
-          // [JZ] According to Martin, this is related to the old pattern matcher, which
-          //      needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
-          //      compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
-          //      but we should remove it altogether in Scala 2.11.
-          typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
-        } else {
-          val funpt = if (isPatternMode) pt else WildcardType
-          val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
-          val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
-
-          def onError(reportError: => Tree): Tree = {
-              fun match {
-                case Select(qual, name)
-                if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
-                  val qual1 = typedQualifier(qual)
-                  if (treeInfo.isVariableOrGetter(qual1)) {
-                    if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
-                    convertToAssignment(fun, qual1, name, args)
-                  } else {
-                    if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
-                      reportError
-                  }
-                case _ =>
-                  if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
-                  reportError
-              }
-          }
-          silent(_.typed(fun, forFunMode(mode), funpt),
-                 if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
-                 if ((mode & EXPRmode) != 0) tree else context.tree) match {
-            case SilentResultValue(fun1) =>
-              val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
-              if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
-              def isImplicitMethod(tpe: Type) = tpe match {
-                case mt: MethodType => mt.isImplicit
-                case _ => false
-              }
-              val useTry = (
-                   !isPastTyper
-                && fun2.isInstanceOf[Select]
-                && !isImplicitMethod(fun2.tpe)
-                && ((fun2.symbol eq null) || !fun2.symbol.isConstructor)
-                && (mode & (EXPRmode | SNDTRYmode)) == EXPRmode
-              )
-              val res =
-                if (useTry) tryTypedApply(fun2, args)
-                else doTypedApply(tree, fun2, args, mode, pt)
-
-            /*
-              if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) {
-                res.tpe = res.tpe.notNull
-              }
-              */
-              // TODO: In theory we should be able to call:
-              //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
-              // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
-              // by calling ArrayClass.info here (or some other place before specialize).
-              if (fun2.symbol == Array_apply && !res.isErrorTyped) {
-                val checked = gen.mkCheckInit(res)
-                // this check is needed to avoid infinite recursion in Duplicators
-                // (calling typed1 more than once for the same tree)
-                if (checked ne res) typed { atPos(tree.pos)(checked) }
-                else res
-              } else
-                res
-            case SilentTypeError(err) =>
-              onError({issue(err); setError(tree)})
-          }
+        val funpt = if (mode.inPatternMode) pt else WildcardType
+        val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+        val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+        def onError(reportError: => Tree): Tree = fun match {
+          case Select(qual, name) if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+            val qual1 = typedQualifier(qual)
+            if (treeInfo.isVariableOrGetter(qual1)) {
+              if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+              convertToAssignment(fun, qual1, name, args)
+            }
+            else {
+              if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+                reportError
+            }
+          case _ =>
+            if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+            reportError
+        }
+        val silentResult = silent(
+          op                    = _.typed(fun, mode.forFunMode, funpt),
+          reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
+          newtree               = if (mode.inExprMode) tree else context.tree
+        )
+        silentResult match {
+          case SilentResultValue(fun1) =>
+            val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
+            if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
+            val noSecondTry = (
+                 isPastTyper
+              || context.inSecondTry
+              || (fun2.symbol ne null) && fun2.symbol.isConstructor
+              || isImplicitMethodType(fun2.tpe)
+            )
+            val isFirstTry = fun2 match {
+              case Select(_, _) => !noSecondTry && mode.inExprMode
+              case _            => false
+            }
+            if (isFirstTry)
+              tryTypedApply(fun2, args)
+            else
+              doTypedApply(tree, fun2, args, mode, pt)
+          case err: SilentTypeError =>
+            onError({
+              err.reportableErrors foreach issue
+              args foreach (arg => typed(arg, mode, ErrorType))
+              setError(tree)
+            })
         }
       }
 
-      def typedApply(tree: Apply) = {
-        val fun = tree.fun
-        val args = tree.args
-        fun match {
-          case Block(stats, expr) =>
-            typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
-          case _ =>
-            normalTypedApply(tree, fun, args) match {
-              case Apply(Select(New(tpt), name), args)
-              if (tpt.tpe != null &&
-                tpt.tpe.typeSymbol == ArrayClass &&
-                args.length == 1 &&
-                erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
-                // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
-                // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
-                // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
-                val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
-                val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
-                atPos(tree.pos) {
-                  val tag = resolveClassTag(tree.pos, tagType)
-                  if (tag.isEmpty) MissingClassTagError(tree, tagType)
-                  else typed(new ApplyToImplicitArgs(Select(tag, nme.newArray), args))
+      // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+      // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
+      // where Array HK gets applied (N-1) times
+      object ArrayInstantiation {
+        def unapply(tree: Apply) = tree match {
+          case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass =>
+            Some(tpt.tpe) collect {
+              case erasure.GenericArray(level, componentType) =>
+                val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res))
+
+                resolveClassTag(tree.pos, tagType) match {
+                  case EmptyTree => MissingClassTagError(tree, tagType)
+                  case tag       => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil))
                 }
-              case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
-                TooManyArgumentListsForConstructor(tree)
-              case tree1 =>
-                tree1
             }
+          case _ => None
         }
       }
 
+      def typedApply(tree: Apply) = tree match {
+        case Apply(Block(stats, expr), args) =>
+          typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+        case Apply(fun, args) =>
+          normalTypedApply(tree, fun, args) match {
+            case ArrayInstantiation(tree1)                                           => typed(tree1, mode, pt)
+            case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696
+            case tree1                                                               => tree1
+          }
+      }
+
       def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
         val prefix = name.toTermName stripSuffix nme.EQL
         def mkAssign(vble: Tree): Tree =
@@ -4733,8 +4549,6 @@ trait Typers extends Modes with Adaptations with Tags {
           case This(_) => qual1.symbol
           case _ => qual1.tpe.typeSymbol
         }
-        //println(clazz+"/"+qual1.tpe.typeSymbol+"/"+qual1)
-
         def findMixinSuper(site: Type): Type = {
           var ps = site.parents filter (_.typeSymbol.name == mix)
           if (ps.isEmpty)
@@ -4742,11 +4556,6 @@ trait Typers extends Modes with Adaptations with Tags {
           if (ps.isEmpty) {
             debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
             if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
-              // println(qual1)
-              // println(clazz)
-              // println(site)
-              // println(site.parents)
-              // println(mix)
               // the reference to super class got lost during erasure
               restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
               ErrorType
@@ -4764,7 +4573,7 @@ trait Typers extends Modes with Adaptations with Tags {
 
         val owntype = (
           if (!mix.isEmpty) findMixinSuper(clazz.tpe)
-          else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent
+          else if (context.inSuperInit) clazz.info.firstParent
           else intersectionType(clazz.info.parents)
         )
         treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
@@ -4778,15 +4587,29 @@ trait Typers extends Modes with Adaptations with Tags {
             if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
         }
 
-      /** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
-       *  <code>qual</code> is already attributed.
-       *
-       *  @param qual ...
-       *  @param name ...
-       *  @return     ...
+      /* Attribute a selection where `tree` is `qual.name`.
+       * `qual` is already attributed.
        */
       def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
-        def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
+        val t = typedSelectInternal(tree, qual, name)
+        // Checking for OverloadedTypes being handed out after overloading
+        // resolution has already happened.
+        if (isPastTyper) t.tpe match {
+          case OverloadedType(pre, alts) =>
+            if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) ()
+            else if (settings.debug) printCaller(
+              s"""|Select received overloaded type during $phase, but typer is over.
+                  |If this type reaches the backend, we are likely doomed to crash.
+                  |$t has these overloads:
+                  |${alts map (s => "  " + s.defStringSeenAs(pre memberType s)) mkString "\n"}
+                  |""".stripMargin
+            )("")
+          case _ =>
+        }
+        t
+      }
+      def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
+        def asDynamicCall = dyna.mkInvoke(context, tree, qual, name) map { t =>
           dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
         }
 
@@ -4794,82 +4617,65 @@ trait Typers extends Modes with Adaptations with Tags {
           // symbol not found? --> try to convert implicitly to a type that does have the required
           // member.  Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
           // xml member to StringContext, which in turn has an unapply[Seq] method)
-          if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
-            val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+          if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
+            val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
             if ((qual1 ne qual) && !qual1.isErrorTyped)
               return typed(treeCopy.Select(tree, qual1, name), mode, pt)
           }
           NoSymbol
         }
         if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
-          qual.tpe = tree.symbol.owner.tpe
+          qual setType tree.symbol.owner.tpe
 
         if (!reallyExists(sym)) {
           def handleMissing: Tree = {
-            if (context.unit.isJava && name.isTypeName) {
-              // SI-3120 Java uses the same syntax, A.B, to express selection from the
-              // value A and from the type A. We have to try both.
-              val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
-              if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
-            }
-
-            // try to expand according to Dynamic rules.
-            asDynamicCall foreach (x => return x)
-
-            debuglog(
-              "qual = " + qual + ":" + qual.tpe +
-                "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
-                "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
-                "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
-
-            def makeInteractiveErrorTree = {
-              val tree1 = tree match {
-                case Select(_, _) => treeCopy.Select(tree, qual, name)
-                case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
-              }
-              setError(tree1)
-            }
-
-            if (name == nme.ERROR && forInteractive)
-              return makeInteractiveErrorTree
-
-            if (!qual.tpe.widen.isErroneous) {
-              if ((mode & QUALmode) != 0) {
-                val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
-                if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+            def errorTree = missingSelectErrorTree(tree, qual, name)
+            def asTypeSelection = (
+              if (context.unit.isJava && name.isTypeName) {
+                // SI-3120 Java uses the same syntax, A.B, to express selection from the
+                // value A and from the type A. We have to try both.
+                atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
+                  case EmptyTree => None
+                  case tree1     => Some(typed1(tree1, mode, pt))
+                }
               }
-              NotAMemberError(tree, qual, name)
-            }
-
-            if (forInteractive) makeInteractiveErrorTree else setError(tree)
+              else None
+            )
+            debuglog(s"""
+              |qual=$qual:${qual.tpe}
+              |symbol=${qual.tpe.termSymbol.defString}
+              |scope-id=${qual.tpe.termSymbol.info.decls.hashCode}
+              |members=${qual.tpe.members mkString ", "}
+              |name=$name
+              |found=$sym
+              |owner=${context.enclClass.owner}
+              """.stripMargin)
+
+            // 1) Try converting a term selection on a java class into a type selection.
+            // 2) Try expanding according to Dynamic rules.
+            // 3) Try looking up the name in the qualifier.
+            asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match {
+              case NoSymbol => setError(errorTree)
+              case found    => typed1(tree setSymbol found, mode, pt)
+            })
           }
           handleMissing
-        } else {
+        }
+        else {
           val tree1 = tree match {
             case Select(_, _) => treeCopy.Select(tree, qual, name)
             case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
           }
           val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+            case SilentTypeError(err: AccessTypeError) =>
+              (tree1, Some(err))
             case SilentTypeError(err) =>
-              if (err.kind != ErrorKinds.Access) {
-                context issue err
-                return setError(tree)
-              }
-              else (tree1, Some(err))
+              SelectWithUnderlyingError(tree, err)
+              return tree
             case SilentResultValue(treeAndPre) =>
               (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
           }
 
-          def isPotentialNullDeference() = {
-            !isPastTyper &&
-            !sym.isConstructor &&
-            !(qual.tpe <:< NotNullClass.tpe) && !qual.tpe.isNotNull &&
-            !(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol)  // null.is/as is not a dereference
-          }
-          // unit is null here sometimes; how are we to know when unit might be null? (See bug #2467.)
-          if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
-            unit.warning(tree.pos, "potential null pointer dereference: "+tree)
-
           result match {
             // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
             case SelectFromTypeTree(qual at TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
@@ -4884,7 +4690,7 @@ trait Typers extends Modes with Adaptations with Tags {
             case _ if accessibleError.isDefined =>
               // don't adapt constructor, SI-6074
               val qual1 = if (name == nme.CONSTRUCTOR) qual
-                          else adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+                          else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false)
               if (!qual1.isErrorTyped && (qual1 ne qual))
                 typed(Select(qual1, name) setPos tree.pos, mode, pt)
               else
@@ -4899,320 +4705,115 @@ trait Typers extends Modes with Adaptations with Tags {
         }
       }
 
-      def typedSelectOrSuperCall(tree: Select) = {
-        val qual = tree.qualifier
-        val name = tree.name
-        qual match {
-          case _: Super if name == nme.CONSTRUCTOR =>
-            val qual1 =
-              typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
-              // the qualifier type of a supercall constructor is its first parent class
-            typedSelect(tree, qual1, nme.CONSTRUCTOR)
-          case _ =>
-            if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
-            var qual1 = checkDead(typedQualifier(qual, mode))
-            if (name.isTypeName) qual1 = checkStable(qual1)
-
-            val tree1 = // temporarily use `filter` and an alternative for `withFilter`
-              if (name == nme.withFilter)
-                silent(_ => typedSelect(tree, qual1, name)) match {
-                  case SilentResultValue(result) =>
-                    result
-                  case _ =>
-                    silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
-                      case SilentResultValue(result2) =>
-                        unit.deprecationWarning(
-                          tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
-                            ", using `filter' method instead")
-                        result2
-                      case SilentTypeError(err) =>
-                        WithFilterError(tree, err)
-                    }
-                }
-              else
-                typedSelect(tree, qual1, name)
-
-            if (tree.isInstanceOf[PostfixSelect])
-              checkFeature(tree.pos, PostfixOpsFeature, name.decode)
-            if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
-              checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
-
-            if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
-            else tree1
+      // temporarily use `filter` as an alternative for `withFilter`
+      def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
+        def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
+        silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
+          silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
+            case SilentResultValue(res) => warn() ; res
+            case SilentTypeError(err)   => WithFilterError(tree, err)
+          }
         }
       }
+      def typedSelectOrSuperCall(tree: Select) = tree match {
+        case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
+          // the qualifier type of a supercall constructor is its first parent class
+          typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR)
+        case Select(qual, name) =>
+          if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+          val qualTyped = checkDead(typedQualifier(qual, mode))
+          val qualStableOrError = (
+            if (qualTyped.isErrorTyped || !name.isTypeName || treeInfo.admitsTypeSelection(qualTyped))
+              qualTyped
+            else
+              UnstableTreeError(qualTyped)
+          )
+          val tree1 = name match {
+            case nme.withFilter if !settings.future => tryWithFilterAndFilter(tree, qualStableOrError)
+            case _              => typedSelect(tree, qualStableOrError, name)
+          }
+          def sym = tree1.symbol
+          if (tree.isInstanceOf[PostfixSelect])
+            checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+          if (sym != null && sym.isOnlyRefinementMember && !sym.isMacro)
+            checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString)
+
+          qualStableOrError.symbol match {
+            case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name)
+            case _                            => tree1
+          }
+      }
+
+      /* A symbol qualifies if:
+       *  - it exists
+       *  - it is not stale (stale symbols are made to disappear here)
+       *  - if we are in a constructor pattern, method definitions do not qualify
+       *    unless they are stable.  Otherwise, 'case x :: xs' would find the :: method.
+       */
+      def qualifies(sym: Symbol) = (
+           sym.hasRawInfo
+        && reallyExists(sym)
+        && !(mode.typingConstructorPattern && sym.isMethod && !sym.isStable)
+      )
 
-      /** Attribute an identifier consisting of a simple name or an outer reference.
+      /* Attribute an identifier consisting of a simple name or an outer reference.
        *
-       *  @param tree      The tree representing the identifier.
-       *  @param name      The name of the identifier.
-       *  Transformations: (1) Prefix class members with this.
-       *                   (2) Change imported symbols to selections
+       * @param tree      The tree representing the identifier.
+       * @param name      The name of the identifier.
+       * Transformations: (1) Prefix class members with this.
+       *                  (2) Change imported symbols to selections
        */
       def typedIdent(tree: Tree, name: Name): Tree = {
-        var errorContainer: AbsTypeError = null
-        def ambiguousError(msg: String) = {
-          assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
-          errorContainer = AmbiguousIdentError(tree, name, msg)
-        }
-        def identError(tree: AbsTypeError) = {
-          assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
-          errorContainer = tree
-        }
-
-        var defSym: Symbol = tree.symbol  // the directly found symbol
-        var pre: Type = NoPrefix          // the prefix type of defSym, if a class member
-        var qual: Tree = EmptyTree        // the qualifier tree if transformed tree is a select
-        var inaccessibleSym: Symbol = NoSymbol // the first symbol that was found but that was discarded
-                                          // for being inaccessible; used for error reporting
-        var inaccessibleExplanation: String = ""
-
-        // If a special setting is given, the empty package will be checked as a
-        // last ditch effort before failing.  This method sets defSym and returns
-        // true if a member of the given name exists.
-        def checkEmptyPackage(): Boolean = {
-          defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name)
-          defSym != NoSymbol
-        }
-        def startingIdentContext = (
-          // ignore current variable scope in patterns to enforce linearity
-          if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context
-          else context.outer
-        )
-        // A symbol qualifies if it exists and is not stale. Stale symbols
-        // are made to disappear here. In addition,
-        // if we are in a constructor of a pattern, we ignore all definitions
-        // which are methods (note: if we don't do that
-        // case x :: xs in class List would return the :: method)
-        // unless they are stable or are accessors (the latter exception is for better error messages).
-        def qualifies(sym: Symbol): Boolean = {
-          sym.hasRawInfo &&       // this condition avoids crashing on self-referential pattern variables
-          reallyExists(sym) &&
-          ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
-        }
-
-        if (defSym == NoSymbol) {
-          var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
-
-          var cx = startingIdentContext
-          while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
-            pre = cx.enclClass.prefix
-            defEntry = cx.scope.lookupEntry(name)
-            if ((defEntry ne null) && qualifies(defEntry.sym)) {
-              // Right here is where SI-1987, overloading in package objects, can be
-              // seen to go wrong. There is an overloaded symbol, but when referring
-              // to the unqualified identifier from elsewhere in the package, only
-              // the last definition is visible. So overloading mis-resolves and is
-              // definition-order dependent, bad things. See run/t1987.scala.
-              //
-              // I assume the actual problem involves how/where these symbols are entered
-              // into the scope. But since I didn't figure out how to fix it that way, I
-              // catch it here by looking up package-object-defined symbols in the prefix.
-              if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
-                defSym = pre.member(defEntry.sym.name)
-                if (defSym ne defEntry.sym) {
-                  qual = gen.mkAttributedQualifier(pre)
-                  log(sm"""
-                    |  !!! Overloaded package object member resolved incorrectly.
-                    |        prefix: $pre
-                    |     Discarded: ${defEntry.sym.defString}
-                    |         Using: ${defSym.defString}
-                    """)
-                }
-              }
-              else
-                defSym = defEntry.sym
-            }
-            else {
-              cx = cx.enclClass
-              val foundSym = pre.member(name) filter qualifies
-              defSym = foundSym filter (context.isAccessible(_, pre, false))
-              if (defSym == NoSymbol) {
-                if ((foundSym ne NoSymbol) && (inaccessibleSym eq NoSymbol)) {
-                  inaccessibleSym = foundSym
-                  inaccessibleExplanation = analyzer.lastAccessCheckDetails
-                }
-                cx = cx.outer
-              }
-            }
-          }
+        // setting to enable unqualified idents in empty package (used by the repl)
+        def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol
 
-          val symDepth = if (defEntry eq null) cx.depth
-                         else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
-          var impSym: Symbol = NoSymbol      // the imported symbol
-          var imports = context.imports      // impSym != NoSymbol => it is imported from imports.head
+        def issue(err: AbsTypeError) = {
+          // Avoiding some spurious error messages: see SI-2388.
+          val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)
+          if (!suppress)
+            ErrorUtils.issueTypeError(err)
 
-          // Java: A single-type-import declaration d in a compilation unit c of package p
-          // that imports a type named n shadows, throughout c, the declarations of:
-          //
-          //  1) any top level type named n declared in another compilation unit of p
-          //
-          // A type-import-on-demand declaration never causes any other declaration to be shadowed.
-          //
-          // Scala: Bindings of different kinds have a precedence defined on them:
-          //
-          //  1) Definitions and declarations that are local, inherited, or made available by a
-          //     package clause in the same compilation unit where the definition occurs have
-          //     highest precedence.
-          //  2) Explicit imports have next highest precedence.
-          def depthOk(imp: ImportInfo) = (
-               imp.depth > symDepth
-            || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
-          )
-          while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
-            impSym = imports.head.importedSymbol(name)
-            if (!impSym.exists) imports = imports.tail
-          }
-
-          // detect ambiguous definition/import,
-          // update `defSym` to be the final resolved symbol,
-          // update `pre` to be `sym`s prefix type in case it is an imported member,
-          // and compute value of:
-
-          if (defSym.exists && impSym.exists) {
-            // imported symbols take precedence over package-owned symbols in different
-            // compilation units. Defined symbols take precedence over erroneous imports.
-            if (defSym.isDefinedInPackage &&
-                (!currentRun.compiles(defSym) ||
-                 context.unit.exists && defSym.sourceFile != context.unit.source.file))
-              defSym = NoSymbol
-            else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
-              impSym = NoSymbol
-          }
-          if (defSym.exists) {
-            if (impSym.exists)
-              ambiguousError(
-                "it is both defined in "+defSym.owner +
-                " and imported subsequently by \n"+imports.head)
-            else if (!defSym.owner.isClass || defSym.owner.isPackageClass || defSym.isTypeParameterOrSkolem)
-              pre = NoPrefix
-            else
-              qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre))
-          } else {
-            if (impSym.exists) {
-              var impSym1: Symbol = NoSymbol
-              var imports1 = imports.tail
-
-              /** It's possible that seemingly conflicting identifiers are
-               *  identifiably the same after type normalization.  In such cases,
-               *  allow compilation to proceed.  A typical example is:
-               *    package object foo { type InputStream = java.io.InputStream }
-               *    import foo._, java.io._
-               */
-              def ambiguousImport() = {
-                // The types of the qualifiers from which the ambiguous imports come.
-                // If the ambiguous name is a value, these must be the same.
-                def t1  = imports.head.qual.tpe
-                def t2  = imports1.head.qual.tpe
-                // The types of the ambiguous symbols, seen as members of their qualifiers.
-                // If the ambiguous name is a monomorphic type, we can relax this far.
-                def mt1 = t1 memberType impSym
-                def mt2 = t2 memberType impSym1
-                def characterize = List(
-                  s"types:  $t1 =:= $t2  ${t1 =:= t2}  members: ${mt1 =:= mt2}",
-                  s"member type 1: $mt1",
-                  s"member type 2: $mt2",
-                  s"$impSym == $impSym1  ${impSym == impSym1}",
-                  s"${impSym.debugLocationString} ${impSym.getClass}",
-                  s"${impSym1.debugLocationString} ${impSym1.getClass}"
-                ).mkString("\n  ")
-
-                // The symbol names are checked rather than the symbols themselves because
-                // each time an overloaded member is looked up it receives a new symbol.
-                // So foo.member("x") != foo.member("x") if x is overloaded.  This seems
-                // likely to be the cause of other bugs too...
-                if (t1 =:= t2 && impSym.name == impSym1.name)
-                  log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
-                // Monomorphism restriction on types is in part because type aliases could have the
-                // same target type but attach different variance to the parameters. Maybe it can be
-                // relaxed, but doesn't seem worth it at present.
-                else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
-                  log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
-                else {
-                  log(s"Import is genuinely ambiguous:\n  " + characterize)
-                  ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
-                }
-              }
-              while (errorContainer == null && !imports1.isEmpty &&
-                     (!imports.head.isExplicitImport(name) ||
-                      imports1.head.depth == imports.head.depth)) {
-                impSym1 = imports1.head.importedSymbol(name)
-                if (reallyExists(impSym1)) {
-                  if (imports1.head.isExplicitImport(name)) {
-                    if (imports.head.isExplicitImport(name) ||
-                        imports1.head.depth != imports.head.depth) ambiguousImport()
-                    impSym = impSym1
-                    imports = imports1
-                  } else if (!imports.head.isExplicitImport(name) &&
-                             imports1.head.depth == imports.head.depth) ambiguousImport()
-                }
-                imports1 = imports1.tail
-              }
-              defSym = impSym
-              val qual0 = imports.head.qual
-              if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
-                qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
-              pre = qual.tpe
-            }
-            else if (settings.exposeEmptyPackage.value && checkEmptyPackage())
-              log("Allowing empty package member " + name + " due to settings.")
-            else {
-              if ((mode & QUALmode) != 0) {
-                val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
-                if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
-              }
-              if (settings.debug.value) {
-                log(context.imports)//debug
-              }
-              if (inaccessibleSym eq NoSymbol) {
-                // Avoiding some spurious error messages: see SI-2388.
-                if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
-                else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
-              } else
-                identError(InferErrorGen.AccessError(
-                  tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
-                  inaccessibleExplanation
-                ))
-              defSym = context.owner.newErrorSymbol(name)
-            }
-          }
-        }
-        if (errorContainer != null) {
-          ErrorUtils.issueTypeError(errorContainer)
           setError(tree)
-        } else {
-          if (defSym.owner.isPackageClass)
-            pre = defSym.owner.thisType
-
-          // Inferring classOf type parameter from expected type.
-          if (defSym.isThisSym) {
-            typed1(This(defSym.owner) setPos tree.pos, mode, pt)
-          }
+        }
+          // ignore current variable scope in patterns to enforce linearity
+        val startContext = if (mode.typingPatternOrTypePat) context.outer else context
+        val nameLookup   = tree.symbol match {
+          case NoSymbol   => startContext.lookupSymbol(name, qualifies)
+          case sym        => LookupSucceeded(EmptyTree, sym)
+        }
+        import InferErrorGen._
+        nameLookup match {
+          case LookupAmbiguous(msg)         => issue(AmbiguousIdentError(tree, name, msg))
+          case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg))
+          case LookupNotFound               =>
+            inEmptyPackage orElse lookupInRoot(name) match {
+              case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext))
+              case sym      => typed1(tree setSymbol sym, mode, pt)
+                }
+          case LookupSucceeded(qual, sym)   =>
+            (// this -> Foo.this
+            if (sym.isThisSym)
+              typed1(This(sym.owner) setPos tree.pos, mode, pt)
           // Inferring classOf type parameter from expected type.  Otherwise an
           // actual call to the stubbed classOf method is generated, returning null.
-          else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+            else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
             typedClassOf(tree, TypeTree(pt.typeArgs.head))
           else {
-            val tree1 = (
-              if (qual == EmptyTree) tree
-              // atPos necessary because qualifier might come from startContext
-              else atPos(tree.pos)(Select(qual, name) setAttachments tree.attachments)
-            )
-            val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
-            // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
-            val tree3 = stabilize(tree2, pre2, mode, pt)
+              val pre1  = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
+              val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
+              val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual)
             // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
             //         inference errors in pattern matching.
-            tree3 setType dropRepeatedParamType(tree3.tpe)
+              stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes
+            }) setAttachments tree.attachments
           }
         }
-      }
 
       def typedIdentOrWildcard(tree: Ident) = {
         val name = tree.name
         if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
-        if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
-            (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+        if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) ||
+            (name == tpnme.WILDCARD && mode.inTypeMode))
           tree setType makeFullyDefined(pt)
         else
           typedIdent(tree, name)
@@ -5239,40 +4840,63 @@ trait Typers extends Modes with Adaptations with Tags {
       }
 
       def typedAppliedTypeTree(tree: AppliedTypeTree) = {
-        val tpt = tree.tpt
-        val args = tree.args
-        val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+        val tpt        = tree.tpt
+        val args       = tree.args
+        val tpt1       = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+        def isPoly     = tpt1.tpe.isInstanceOf[PolyType]
+        def isComplete = tpt1.symbol.rawInfo.isComplete
+
         if (tpt1.isErrorTyped) {
           tpt1
-        } else if (!tpt1.hasSymbol) {
+        } else if (!tpt1.hasSymbolField) {
           AppliedTypeNoParametersError(tree, tpt1.tpe)
         } else {
           val tparams = tpt1.symbol.typeParams
+
           if (sameLength(tparams, args)) {
             // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
-            val args1 =
-              if (!tpt1.symbol.rawInfo.isComplete)
-                args mapConserve (typedHigherKindedType(_, mode))
-                // if symbol hasn't been fully loaded, can't check kind-arity
-              else map2Conserve(args, tparams) { (arg, tparam) =>
-                //@M! the polytype denotes the expected kind
-                typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+            val args1 = map2Conserve(args, tparams) { (arg, tparam) =>
+              def ptParams = Kind.FromParams(tparam.typeParams)
+
+              // if symbol hasn't been fully loaded, can't check kind-arity except when we're in a pattern,
+              // where we can (we can't take part in F-Bounds) and must (SI-8023)
+              val pt = if (mode.typingPatternOrTypePat) {
+                tparam.initialize; ptParams
               }
-            val argtypes = args1 map (_.tpe)
-
-            foreach2(args, tparams)((arg, tparam) => arg match {
-              // note: can't use args1 in selector, because Bind's got replaced
-              case Bind(_, _) =>
-                if (arg.symbol.isAbstractType)
-                  arg.symbol setInfo // XXX, feedback. don't trackSymInfo here!
-                    TypeBounds(
-                      lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
-                      glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
-              case _ =>
-            })
+              else if (isComplete) ptParams
+              else Kind.Wildcard
+
+              typedHigherKindedType(arg, mode, pt)
+            }
+            val argtypes = mapList(args1)(treeTpe)
+
+            foreach2(args, tparams) { (arg, tparam) =>
+              // note: can't use args1 in selector, because Binds got replaced
+              val asym = arg.symbol
+              def abounds = asym.info.bounds
+              def tbounds = tparam.info.bounds
+              def enhanceBounds(): Unit = {
+                val TypeBounds(lo0, hi0) = abounds
+                val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes)
+                val lo = lub(List(lo0, lo1))
+                val hi = glb(List(hi0, hi1))
+                if (!(lo =:= lo0 && hi =:= hi0))
+                  asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi))
+              }
+              if (asym != null && asym.isAbstractType) {
+                arg match {
+                  // I removed the Ident() case that partially fixed SI-1786,
+                  // because the stricter bounds being inferred broke e.g., slick
+                  // worse, the fix was compilation order-dependent
+                  // sharpenQuantifierBounds (used in skolemizeExistential) has an alternative fix (SI-6169) that's less invasive
+                  case Bind(_, _) => enhanceBounds()
+                  case _          =>
+                }
+              }
+            }
             val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
             val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
-            if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+            if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
               TypeTreeWithDeferredRefCheck(){ () =>
                 // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
                 // we can't simply use original in refchecks because it does not contains types
@@ -5285,7 +4909,7 @@ trait Typers extends Modes with Adaptations with Tags {
             AppliedTypeNoParametersError(tree, tpt1.tpe)
           } else {
             //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
-            if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
+            if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
             AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
           }
         }
@@ -5294,7 +4918,8 @@ trait Typers extends Modes with Adaptations with Tags {
       val sym: Symbol = tree.symbol
       if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
 
-      def typedPackageDef(pdef: PackageDef) = {
+      def typedPackageDef(pdef0: PackageDef) = {
+        val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats))
         val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
         assert(sym.moduleClass ne NoSymbol, sym)
         val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
@@ -5302,142 +4927,100 @@ trait Typers extends Modes with Adaptations with Tags {
         treeCopy.PackageDef(tree, pid1, stats1) setType NoType
       }
 
-      def typedDocDef(docdef: DocDef) = {
-        if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
-          val comment = docdef.comment
-          fillDocComment(sym, comment)
-          val typer1 = newTyper(context.makeNewScope(tree, context.owner))
-          for (useCase <- comment.useCases) {
-            typer1.silent(_.typedUseCase(useCase)) match {
-              case SilentTypeError(err) =>
-                unit.warning(useCase.pos, err.errMsg)
-              case _ =>
-            }
-            for (useCaseSym <- useCase.defined) {
-              if (sym.name != useCaseSym.name)
-                unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
-            }
-          }
-        }
-        typed(docdef.definition, mode, pt)
-      }
-
-      /**
+      /*
        * The typer with the correct context for a method definition. If the method is a default getter for
        * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
        */
       def defDefTyper(ddef: DefDef) = {
-        val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+        val isConstrDefaultGetter = ddef.mods.hasDefault && sym.owner.isModuleClass &&
             nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
         newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
       }
 
       def typedAlternative(alt: Alternative) = {
-        val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
-        treeCopy.Alternative(tree, alts1) setType pt
+        context withinPatAlternative (
+          treeCopy.Alternative(tree, alt.trees mapConserve (alt => typed(alt, mode, pt))) setType pt
+        )
       }
-
       def typedStar(tree: Star) = {
-        if ((mode & STARmode) == 0 && !isPastTyper)
+        if (!context.starPatterns && !isPastTyper)
           StarPatternWithVarargParametersError(tree)
-        treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
-      }
 
-      def typedUnApply(tree: UnApply) = {
-        val fun1 = typed(tree.fun)
-        val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
-        val args1 = map2(tree.args, tpes)(typedPattern)
-        treeCopy.UnApply(tree, fun1, args1) setType pt
+        treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
       }
-
-      def typedTry(tree: Try) = {
-        var block1 = typed(tree.block, pt)
-        var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
-
-        for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
-          def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+      def issueTryWarnings(tree: Try): Try = {
+        def checkForCatchAll(cdef: CaseDef) {
           def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
-          cdef.pat match {
+          def warn(name: Name) = {
+            val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning."
+            context.warning(cdef.pat.pos, msg)
+          }
+          if (cdef.guard.isEmpty) cdef.pat match {
             case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
-            case i @ Ident(name) if unbound(i) => warn(name)
-            case _ =>
+            case i @ Ident(name) if unbound(i)          => warn(name)
+            case _                                      =>
           }
         }
-
-        val finalizer1 =
-          if (tree.finalizer.isEmpty) tree.finalizer
-          else typed(tree.finalizer, UnitClass.tpe)
-        val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
-        if (needAdapt) {
-          block1 = adapt(block1, mode, owntype)
-          catches1 = catches1 map (adaptCase(_, mode, owntype))
+        if (!isPastTyper) tree match {
+          case Try(_, Nil, fin) =>
+            if (fin eq EmptyTree)
+              context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.")
+          case Try(_, catches, _) =>
+            catches foreach checkForCatchAll
         }
+        tree
+      }
 
-        treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+      def typedTry(tree: Try) = {
+        val Try(block, catches, fin) = tree
+        val block1   = typed(block, pt)
+        val catches1 = typedCases(catches, ThrowableTpe, pt)
+        val fin1     = if (fin.isEmpty) fin else typed(fin, UnitTpe)
+
+        def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType
+
+        issueTryWarnings(
+          if (isFullyDefined(pt))
+            finish(pt)
+          else block1 :: catches1 map (_.tpe.deconst) match {
+            case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+            case tpes                           =>
+              val lub      = weakLub(tpes)
+              val block2   = adapt(block1, mode, lub)
+              val catches2 = catches1 map (adaptCase(_, mode, lub))
+              treeCopy.Try(tree, block2, catches2, fin1) setType lub
+          }
+        )
       }
 
       def typedThrow(tree: Throw) = {
-        val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
-        treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+        val expr1 = typedByValueExpr(tree.expr, ThrowableTpe)
+        treeCopy.Throw(tree, expr1) setType NothingTpe
       }
 
       def typedTyped(tree: Typed) = {
-        val expr = tree.expr
-        val tpt = tree.tpt
-        tpt match {
-          case Function(List(), EmptyTree) =>
-            // find out whether the programmer is trying to eta-expand a macro def
-            // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
-            // that typecheck must not trigger macro expansions, so we explicitly prohibit them
-            // however we cannot do `context.withMacrosDisabled`
-            // because `expr` might contain nested macro calls (see SI-6673)
-            val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
-            exprTyped match {
-              case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
-                MacroEtaError(exprTyped)
-              case _ =>
-                typedEta(checkDead(exprTyped))
-            }
-
-          case Ident(tpnme.WILDCARD_STAR) =>
-            val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
-            def subArrayType(pt: Type) =
-              if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
-              else {
-                val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
-                newExistentialType(List(tparam), arrayType(tparam.tpe))
-              }
-
-            val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
-              case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
-              case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
-            }
-            exprAdapted.tpe.baseType(baseClass) match {
-              case TypeRef(_, _, List(elemtp)) =>
-                treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
-              case _ =>
-                setError(tree)
+        if (treeInfo isWildcardStarType tree.tpt)
+          typedStarInPattern(tree, mode.onlySticky, pt)
+        else if (mode.inPatternMode)
+          typedInPattern(tree, mode.onlySticky, pt)
+        else tree match {
+          // find out whether the programmer is trying to eta-expand a macro def
+          // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+          // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+          // however we cannot do `context.withMacrosDisabled`
+          // because `expr` might contain nested macro calls (see SI-6673)
+          //
+          // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
+          // which means trailing underscore.
+          case Typed(expr, Function(Nil, EmptyTree)) =>
+            typed1(suppressMacroExpansion(expr), mode, pt) match {
+              case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
+              case exprTyped                                         => typedEta(checkDead(exprTyped))
             }
-
-          case _ =>
-            val tptTyped = typedType(tpt, mode)
-            val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
-            val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
-            if (isPatternMode) {
-              val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
-
-              // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
-              val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-              val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
-              treeTyped setType ownType
-
-              uncheckedTypeExtractor match {
-                case None => treeTyped
-                case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
-              }
-            } else
-              treeTyped setType tptTyped.tpe
+          case Typed(expr, tpt) =>
+            val tpt1  = typedType(tpt, mode)                           // type the ascribed type first
+            val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
+            treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
         }
       }
 
@@ -5453,8 +5036,8 @@ trait Typers extends Modes with Adaptations with Tags {
         //val undets = context.undetparams
 
         // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
-        val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
-        val tparams = fun1.symbol.typeParams
+        val fun1 = typed(fun, mode.forFunMode | TAPPmode)
+        val tparams = if (fun1.symbol == null) Nil else fun1.symbol.typeParams
 
         //@M TODO: val undets_fun = context.undetparams  ?
         // "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
@@ -5464,8 +5047,7 @@ trait Typers extends Modes with Adaptations with Tags {
 
         // @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
         val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
-          //@M! the polytype denotes the expected kind
-          (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+          (arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
         }
         else {
           //@M  this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
@@ -5483,10 +5065,9 @@ trait Typers extends Modes with Adaptations with Tags {
 
       def typedApplyDynamic(tree: ApplyDynamic) = {
         assert(phase.erasedTypes)
-        val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
-        val qual1 = typed(tree.qual, AnyRefClass.tpe)
-        val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
-        treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+        val qual1 = typed(tree.qual, AnyRefTpe)
+        val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe))
+        treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe
       }
 
       def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
@@ -5498,20 +5079,72 @@ trait Typers extends Modes with Adaptations with Tags {
         treeCopy.ReferenceToBoxed(tree, id1) setType tpe
       }
 
+      // Warn about likely interpolated strings which are missing their interpolators
+      def warnMissingInterpolator(lit: Literal): Unit = if (!isPastTyper) {
+        // attempt to avoid warning about trees munged by macros
+        def isMacroExpansion = {
+          // context.tree is not the expandee; it is plain new SC(ps).m(args)
+          //context.tree exists (t => (t.pos includes lit.pos) && hasMacroExpansionAttachment(t))
+          // testing pos works and may suffice
+          //openMacros exists (_.macroApplication.pos includes lit.pos)
+          // tests whether the lit belongs to the expandee of an open macro
+          openMacros exists (_.macroApplication.attachments.get[MacroExpansionAttachment] match {
+            case Some(MacroExpansionAttachment(_, t: Tree)) => t exists (_ == lit)
+            case _                                          => false
+          })
+        }
+        // attempt to avoid warning about the special interpolated message string
+        // for implicitNotFound or any standard interpolation (with embedded $$).
+        def isRecognizablyNotForInterpolation = context.enclosingApply.tree match {
+          case Apply(Select(Apply(RefTree(_, nme.StringContext), _), _), _) => true
+          case Apply(Select(New(RefTree(_, tpnme.implicitNotFound)), _), _) => true
+          case _                                                            => isMacroExpansion
+        }
+        def requiresNoArgs(tp: Type): Boolean = tp match {
+          case PolyType(_, restpe)     => requiresNoArgs(restpe)
+          case MethodType(Nil, restpe) => requiresNoArgs(restpe)  // may be a curried method - can't tell yet
+          case MethodType(p :: _, _)   => p.isImplicit            // implicit method requires no args
+          case _                       => true                    // catches all others including NullaryMethodType
+        }
+        def isPlausible(m: Symbol) = m.alternatives exists (m => requiresNoArgs(m.info))
+
+        def maybeWarn(s: String): Unit = {
+          def warn(message: String)         = context.unit.warning(lit.pos, s"$message Did you forget the interpolator?")
+          def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol
+          def suspiciousExpr                = InterpolatorCodeRegex findFirstIn s
+          def suspiciousIdents              = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(s drop 1))
+
+          // heuristics - no warning on e.g. a string with only "$asInstanceOf"
+          if (s contains ' ') (
+            if (suspiciousExpr.nonEmpty)
+              warn("That looks like an interpolated expression!") // "${...}"
+            else
+              suspiciousIdents find isPlausible foreach (sym => warn(s"`$$${sym.name}` looks like an interpolated identifier!")) // "$id"
+          )
+        }
+        lit match {
+          case Literal(Constant(s: String)) if !isRecognizablyNotForInterpolation => maybeWarn(s)
+          case _                                                                  =>
+        }
+      }
+
       def typedLiteral(tree: Literal) = {
-        val value = tree.value
-        tree setType (
-          if (value.tag == UnitTag) UnitClass.tpe
-          else ConstantType(value))
+        if (settings.lint) warnMissingInterpolator(tree)
+
+        tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value))
       }
 
       def typedSingletonTypeTree(tree: SingletonTypeTree) = {
-        val ref1 = checkStable(
-          context.withImplicitsDisabled(
-            typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
-          )
-        )
-        tree setType ref1.tpe.resultType
+        val refTyped =
+          context.withImplicitsDisabled {
+            typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
+          }
+
+        if (!refTyped.isErrorTyped)
+          tree setType refTyped.tpe.resultType
+
+        if (treeInfo.admitsTypeSelection(refTyped)) tree
+        else UnstableTreeError(refTyped)
       }
 
       def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
@@ -5521,8 +5154,8 @@ trait Typers extends Modes with Adaptations with Tags {
       }
 
       def typedTypeBoundsTree(tree: TypeBoundsTree) = {
-        val lo1 = typedType(tree.lo, mode)
-        val hi1 = typedType(tree.hi, mode)
+        val lo1 = if (tree.lo.isEmpty) TypeTree(NothingTpe) else typedType(tree.lo, mode)
+        val hi1 = if (tree.hi.isEmpty) TypeTree(AnyTpe) else typedType(tree.hi, mode)
         treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
       }
 
@@ -5543,11 +5176,13 @@ trait Typers extends Modes with Adaptations with Tags {
             case _ => tree
           }
         }
-        else
+        else {
           // we should get here only when something before failed
           // and we try again (@see tryTypedApply). In that case we can assign
           // whatever type to tree; we just have to survive until a real error message is issued.
-          tree setType AnyClass.tpe
+          devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
+          tree setType AnyTpe
+        }
       }
       def typedFunction(fun: Function) = {
         if (fun.symbol == NoSymbol)
@@ -5556,104 +5191,126 @@ trait Typers extends Modes with Adaptations with Tags {
         typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
       }
 
-      // begin typed1
-      //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
-      tree match {
-        case tree: Ident                        => typedIdentOrWildcard(tree)
-        case tree: Select                       => typedSelectOrSuperCall(tree)
-        case tree: Apply                        => typedApply(tree)
+      // Trees only allowed during pattern mode.
+      def typedInPatternMode(tree: Tree): Tree = tree match {
+        case tree: Alternative => typedAlternative(tree)
+        case tree: Star        => typedStar(tree)
+        case _                 => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree")
+      }
+
+      def typedTypTree(tree: TypTree): Tree = tree match {
         case tree: TypeTree                     => typedTypeTree(tree)
-        case tree: Literal                      => typedLiteral(tree)
-        case tree: This                         => typedThis(tree)
-        case tree: ValDef                       => typedValDef(tree)
-        case tree: DefDef                       => defDefTyper(tree).typedDefDef(tree)
-        case tree: Block                        => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
-        case tree: If                           => typedIf(tree)
-        case tree: TypeApply                    => typedTypeApply(tree)
         case tree: AppliedTypeTree              => typedAppliedTypeTree(tree)
-        case tree: Bind                         => typedBind(tree)
-        case tree: Function                     => typedFunction(tree)
-        case tree: Match                        => typedVirtualizedMatch(tree)
-        case tree: New                          => typedNew(tree)
-        case tree: Assign                       => typedAssign(tree.lhs, tree.rhs)
-        case tree: AssignOrNamedArg             => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
-        case tree: Super                        => typedSuper(tree)
         case tree: TypeBoundsTree               => typedTypeBoundsTree(tree)
-        case tree: Typed                        => typedTyped(tree)
-        case tree: ClassDef                     => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
-        case tree: ModuleDef                    => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
-        case tree: TypeDef                      => typedTypeDef(tree)
-        case tree: LabelDef                     => labelTyper(tree).typedLabelDef(tree)
-        case tree: PackageDef                   => typedPackageDef(tree)
-        case tree: DocDef                       => typedDocDef(tree)
-        case tree: Annotated                    => typedAnnotated(tree)
         case tree: SingletonTypeTree            => typedSingletonTypeTree(tree)
         case tree: SelectFromTypeTree           => typedSelectFromTypeTree(tree)
         case tree: CompoundTypeTree             => typedCompoundTypeTree(tree)
         case tree: ExistentialTypeTree          => typedExistentialTypeTree(tree)
-        case tree: Return                       => typedReturn(tree)
-        case tree: Try                          => typedTry(tree)
-        case tree: Throw                        => typedThrow(tree)
-        case tree: Alternative                  => typedAlternative(tree)
-        case tree: Star                         => typedStar(tree)
-        case tree: UnApply                      => typedUnApply(tree)
-        case tree: ArrayValue                   => typedArrayValue(tree)
-        case tree: ApplyDynamic                 => typedApplyDynamic(tree)
-        case tree: ReferenceToBoxed             => typedReferenceToBoxed(tree)
         case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
-        case tree: Import                       => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
-        case _                                  => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+        case _                                  => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree")
+      }
+
+      def typedMemberDef(tree: MemberDef): Tree = tree match {
+        case tree: ValDef     => typedValDef(tree)
+        case tree: DefDef     => defDefTyper(tree).typedDefDef(tree)
+        case tree: ClassDef   => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+        case tree: ModuleDef  => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+        case tree: TypeDef    => typedTypeDef(tree)
+        case tree: PackageDef => typedPackageDef(tree)
+        case _                => abort(s"unexpected member def: ${tree.getClass}\n$tree")
+      }
+
+      // Trees not allowed during pattern mode.
+      def typedOutsidePatternMode(tree: Tree): Tree = tree match {
+        case tree: Block            => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+        case tree: If               => typedIf(tree)
+        case tree: TypeApply        => typedTypeApply(tree)
+        case tree: Function         => typedFunction(tree)
+        case tree: Match            => typedVirtualizedMatch(tree)
+        case tree: New              => typedNew(tree)
+        case tree: Assign           => typedAssign(tree.lhs, tree.rhs)
+        case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+        case tree: Super            => typedSuper(tree)
+        case tree: Annotated        => typedAnnotated(tree)
+        case tree: Return           => typedReturn(tree)
+        case tree: Try              => typedTry(tree)
+        case tree: Throw            => typedThrow(tree)
+        case tree: ArrayValue       => typedArrayValue(tree)
+        case tree: ApplyDynamic     => typedApplyDynamic(tree)
+        case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+        case tree: LabelDef         => labelTyper(tree).typedLabelDef(tree)
+        case tree: DocDef           => typedDocDef(tree, mode, pt)
+        case _                      => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+      }
+
+      // Trees allowed in or out of pattern mode.
+      def typedInAnyMode(tree: Tree): Tree = tree match {
+        case tree: Ident   => typedIdentOrWildcard(tree)
+        case tree: Bind    => typedBind(tree)
+        case tree: Apply   => typedApply(tree)
+        case tree: Select  => typedSelectOrSuperCall(tree)
+        case tree: Literal => typedLiteral(tree)
+        case tree: Typed   => typedTyped(tree)
+        case tree: This    => typedThis(tree)  // SI-6104
+        case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
+        case _             =>
+          if (mode.inPatternMode)
+            typedInPatternMode(tree)
+          else
+            typedOutsidePatternMode(tree)
+      }
+
+      // begin typed1
+      tree match {
+        case tree: TypTree   => typedTypTree(tree)
+        case tree: MemberDef => typedMemberDef(tree)
+        case _               => typedInAnyMode(tree)
       }
     }
 
-    /**
-     *  @param tree ...
-     *  @param mode ...
-     *  @param pt   ...
-     *  @return     ...
-     */
-    def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+    def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
       lastTreeToTyper = tree
-      indentTyping()
-
-      val ptPlugins = pluginsPt(pt, this, tree, mode)
-
+      def body = (
+        if (printTypings && !phase.erasedTypes && !noPrintTyping(tree))
+          typingStack.nextTyped(tree, mode, pt, context)(typedInternal(tree, mode, pt))
+        else
+          typedInternal(tree, mode, pt)
+      )
       val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
       if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
-      try {
-        if (context.retyping &&
-            (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
+      try body
+      finally if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
+    }
+
+    private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = {
+      val ptPlugins = pluginsPt(pt, this, tree, mode)
+      def retypingOk = (
+            context.retyping
+        && (tree.tpe ne null)
+        && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))
+      )
+      def runTyper(): Tree = {
+        if (retypingOk) {
           tree.tpe = null
           if (tree.hasSymbol) tree.symbol = NoSymbol
         }
-
         val alreadyTyped = tree.tpe ne null
-        var tree1: Tree = if (alreadyTyped) tree else {
-          printTyping(
-            ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
-              "undetparams"      -> context.undetparams,
-              "implicitsEnabled" -> context.implicitsEnabled,
-              "enrichmentEnabled"   -> context.enrichmentEnabled,
-              "mode"             -> modeString(mode),
-              "silent"           -> context.bufferErrors,
-              "context.owner"    -> context.owner
-            )
-          )
-          typed1(tree, mode, dropExistential(ptPlugins))
-        }
+        val shouldPrint = !alreadyTyped && !phase.erasedTypes
+        val ptWild = if (mode.inPatternMode)
+          ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body.
+        else
+          dropExistential(ptPlugins) // FIXME: document why this is done.
+        val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode, ptWild)
+        if (shouldPrint)
+          typingStack.showTyped(tree1)
+
         // Can happen during erroneous compilation - error(s) have been
         // reported, but we need to avoid causing an NPE with this tree
         if (tree1.tpe eq null)
           return setError(tree)
 
-        if (!alreadyTyped) {
-          printTyping("typed %s: %s%s".format(
-            ptTree(tree1), tree1.tpe,
-            if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")
-          )
-        }
+        tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins))
 
-        tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
         val result =
           if (tree1.isEmpty) tree1
           else {
@@ -5661,84 +5318,82 @@ trait Typers extends Modes with Adaptations with Tags {
             if (hasPendingMacroExpansions) macroExpandAll(this, result) else result
           }
 
-        if (!alreadyTyped) {
-          printTyping("adapted %s: %s to %s, %s".format(
-            tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
-          ) //DEBUG
-        }
-        if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+        if (shouldPrint)
+          typingStack.showAdapt(tree1, result, ptPlugins, context)
+
+        if (!isPastTyper)
+          signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+
         result
-      } catch {
+      }
+
+      try runTyper() catch {
         case ex: TypeError =>
-          tree.tpe = null
+          tree.clearType()
           // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
-          printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
-
+          typingStack.printTyping(tree, "caught %s: while typing %s".format(ex, tree)) //DEBUG
           reportTypeError(context, tree.pos, ex)
           setError(tree)
         case ex: Exception =>
-          if (settings.debug.value) // @M causes cyclic reference error
-            Console.println("exception when typing "+tree+", pt = "+ptPlugins)
+          // @M causes cyclic reference error
+          devWarning(s"exception when typing $tree, pt=$ptPlugins")
           if (context != null && context.unit.exists && tree != null)
-            logError("AT: " + (tree.pos).dbgString, ex)
+            logError("AT: " + tree.pos, ex)
           throw ex
       }
-      finally {
-        deindentTyping()
-        if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
-      }
     }
 
     def atOwner(owner: Symbol): Typer =
-      newTyper(context.make(context.tree, owner))
+      newTyper(context.make(owner = owner))
 
     def atOwner(tree: Tree, owner: Symbol): Typer =
       newTyper(context.make(tree, owner))
 
-    /** Types expression or definition <code>tree</code>.
-     *
-     *  @param tree ...
-     *  @return     ...
+    /** Types expression or definition `tree`.
      */
     def typed(tree: Tree): Tree = {
-      val ret = typed(tree, EXPRmode, WildcardType)
+      val ret = typed(tree, context.defaultModeForTyped, WildcardType)
       ret
     }
 
-    def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
+    def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt)
+
+    def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
     def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
     // TODO: see if this formulation would impose any penalty, since
     // it makes for a lot less casting.
     // def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
 
-    /** Types expression <code>tree</code> with given prototype <code>pt</code>.
-     *
-     *  @param tree ...
-     *  @param pt   ...
-     *  @return     ...
+    /** Types expression `tree` with given prototype `pt`.
      */
     def typed(tree: Tree, pt: Type): Tree =
-      typed(tree, EXPRmode, pt)
+      typed(tree, context.defaultModeForTyped, pt)
+
+    def typed(tree: Tree, mode: Mode): Tree =
+      typed(tree, mode, WildcardType)
 
-    /** Types qualifier <code>tree</code> of a select node.
-     *  E.g. is tree occurs in a context like <code>tree.m</code>.
+    /** Types qualifier `tree` of a select node.
+     *  E.g. is tree occurs in a context like `tree.m`.
      */
-    def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
-      typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+    def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
+      typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
 
-    /** Types qualifier <code>tree</code> of a select node.
-     *  E.g. is tree occurs in a context like <code>tree.m</code>.
+    /** Types qualifier `tree` of a select node.
+     *  E.g. is tree occurs in a context like `tree.m`.
      */
-    def typedQualifier(tree: Tree, mode: Int): Tree =
+    def typedQualifier(tree: Tree, mode: Mode): Tree =
       typedQualifier(tree, mode, WildcardType)
 
     def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
 
     /** Types function part of an application */
-    def typedOperator(tree: Tree): Tree =
-      typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
+    def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes)
 
-    /** Types a pattern with prototype <code>pt</code> */
+    // the qualifier type of a supercall constructor is its first parent class
+    private def typedSelectOrSuperQualifier(qual: Tree) =
+      context withinSuperInit typed(qual, PolyQualifierModes)
+
+    /** Types a pattern with prototype `pt` */
     def typedPattern(tree: Tree, pt: Type): Tree = {
       // We disable implicits because otherwise some constructs will
       // type check which should not.  The pattern matcher does not
@@ -5760,30 +5415,28 @@ trait Typers extends Modes with Adaptations with Tags {
       // TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
       typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
         case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
-        case pat => pat
+        case pat               => pat
       }
     }
 
     /** Types a (fully parameterized) type tree */
-    def typedType(tree: Tree, mode: Int): Tree =
-      typed(tree, forTypeMode(mode), WildcardType)
+    def typedType(tree: Tree, mode: Mode): Tree =
+      typed(tree, mode.forTypeMode, WildcardType)
 
     /** Types a (fully parameterized) type tree */
     def typedType(tree: Tree): Tree = typedType(tree, NOmode)
 
-    /** Types a higher-kinded type tree -- pt denotes the expected kind*/
-    def typedHigherKindedType(tree: Tree, mode: Int, pt: Type): Tree =
-      if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
-      else typed(tree, HKmode, pt)
-
-    def typedHigherKindedType(tree: Tree, mode: Int): Tree =
-      typed(tree, HKmode, WildcardType)
+    /** Types a higher-kinded type tree -- pt denotes the expected kind and must be one of `Kind.WildCard` and `Kind.FromParams` */
+    def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
+      if (pt != Kind.Wildcard && pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
+      else context withinTypeConstructorAllowed typed(tree, NOmode, pt)
 
-    def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode)
+    def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
+      context withinTypeConstructorAllowed typed(tree)
 
     /** Types a type constructor tree used in a new or supertype */
-    def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
-      val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
+    def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
+      val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType)
 
       // get rid of type aliases for the following check (#1241)
       result.tpe.dealias match {
@@ -5804,7 +5457,7 @@ trait Typers extends Modes with Adaptations with Tags {
 
     def computeType(tree: Tree, pt: Type): Type = {
       // macros employ different logic of `computeType`
-      assert(!context.owner.isTermMacro, context.owner)
+      assert(!context.owner.isMacro, context.owner)
       val tree1 = typed(tree, pt)
       transformed(tree) = tree1
       val tpe = packedType(tree1, context.owner)
@@ -5812,60 +5465,63 @@ trait Typers extends Modes with Adaptations with Tags {
       tpe
     }
 
-    def computeMacroDefType(tree: Tree, pt: Type): Type = {
-      assert(context.owner.isTermMacro, context.owner)
-      assert(tree.symbol.isTermMacro, tree.symbol)
-      assert(tree.isInstanceOf[DefDef], tree.getClass)
-      val ddef = tree.asInstanceOf[DefDef]
+    def computeMacroDefType(ddef: DefDef, pt: Type): Type = {
+      assert(context.owner.isMacro, context.owner)
+      assert(ddef.symbol.isMacro, ddef.symbol)
 
-      val tree1 =
+      val rhs1 =
         if (transformed contains ddef.rhs) {
           // macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap
           // if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree`
           // here we guard against this case
           transformed(ddef.rhs)
         } else {
-          val tree1 = typedMacroBody(this, ddef)
-          transformed(ddef.rhs) = tree1
-          tree1
+          val rhs1 = typedMacroBody(this, ddef)
+          transformed(ddef.rhs) = rhs1
+          rhs1
         }
 
-      val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
+      val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree
       val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
-      if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
-    }
-
-    def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
-      case Some(tree1) => transformed -= tree; tree1
-      case None => op
+      if (isMacroBodyOkay && shouldInheritMacroImplReturnType) {
+        val commonMessage = "macro defs must have explicitly specified return types"
+        def reportFailure() = {
+          ddef.symbol.setFlag(IS_ERROR)
+          unit.error(ddef.pos, commonMessage)
+        }
+        def reportWarning(inferredType: Type) = {
+          val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
+          unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)")
+        }
+        computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
+          case ErrorType => ErrorType
+          case NothingTpe => NothingTpe
+          case NoType => reportFailure(); AnyTpe
+          case tpe => reportWarning(tpe); tpe
+        }
+      } else AnyTpe
     }
 
-    def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
-      case Some(tree1) => transformed -= tree; tree1
-      case None => typed(tree, mode, pt)
+    def transformedOr(tree: Tree, op: => Tree): Tree = transformed remove tree match {
+      case Some(tree1) => tree1
+      case _           => op
     }
 
-/*
-    def convertToTypeTree(tree: Tree): Tree = tree match {
-      case TypeTree() => tree
-      case _ => TypeTree(tree.tpe)
+    def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed remove tree match {
+      case Some(tree1) => tree1
+      case _           => typed(tree, mode, pt)
     }
-*/
   }
 }
 
 object TypersStats {
   import scala.reflect.internal.TypesStats._
-  import scala.reflect.internal.BaseTypeSeqsStats._
   val typedIdentCount     = Statistics.newCounter("#typechecked identifiers")
   val typedSelectCount    = Statistics.newCounter("#typechecked selections")
   val typedApplyCount     = Statistics.newCounter("#typechecked applications")
   val rawTypeFailed       = Statistics.newSubCounter ("  of which in failed", rawTypeCount)
   val subtypeFailed       = Statistics.newSubCounter("  of which in failed", subtypeCount)
   val findMemberFailed    = Statistics.newSubCounter("  of which in failed", findMemberCount)
-  val compoundBaseTypeSeqCount = Statistics.newSubCounter("  of which for compound types", baseTypeSeqCount)
-  val typerefBaseTypeSeqCount = Statistics.newSubCounter("  of which for typerefs", baseTypeSeqCount)
-  val singletonBaseTypeSeqCount = Statistics.newSubCounter("  of which for singletons", baseTypeSeqCount)
   val failedSilentNanos   = Statistics.newSubTimer("time spent in failed", typerNanos)
   val failedApplyNanos    = Statistics.newSubTimer("  failed apply", typerNanos)
   val failedOpEqNanos     = Statistics.newSubTimer("  failed op=", typerNanos)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
new file mode 100644
index 0000000..550fd4e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
@@ -0,0 +1,168 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.mutable
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import mutable.ListBuffer
+import Mode._
+
+trait TypersTracking {
+  self: Analyzer =>
+
+  import global._
+  import typeDebug._
+
+  // To enable decent error messages when the typer crashes.
+  // TODO - this only catches trees which go through def typed,
+  // but there are all kinds of back ways - typedClassDef, etc. etc.
+  // Funnel everything through one doorway.
+  var lastTreeToTyper: Tree = EmptyTree
+
+  def fullSiteString(context: Context): String = {
+    def owner_long_s = (
+      if (settings.debug.value) {
+        def flags_s = context.owner.debugFlagString match {
+          case "" => ""
+          case s  => " with flags " + inLightMagenta(s)
+        }
+        s", a ${context.owner.shortSymbolClass}$flags_s"
+      }
+      else ""
+    )
+    def marker = if (context.bufferErrors) "silent" else "site"
+    def undet_s = context.undetparams match {
+      case Nil => ""
+      case ps  => ps.mkString(" solving: ", ",", "")
+    }
+    def implicits_s = (
+      if (context.enrichmentEnabled)
+        if (context.implicitsEnabled) ""
+        else inLightRed("enrichment only")
+      else inLightRed("implicits disabled")
+    )
+
+    s"($marker$undet_s: ${context.siteString}$owner_long_s) $implicits_s"
+  }
+
+  object typingStack {
+    val out = new java.io.PrintWriter(System.err, true)
+
+    // TODO - account for colors so the color of a multiline string
+    // doesn't infect the connector lines
+    private def currentIndent = "|    " * depth
+
+    private var trees: List[Frame] = Nil
+    private var depth = 0
+    private def atLowerIndent[T](body: => T): T = {
+      depth -= 1
+      try body finally depth += 1
+    }
+    private def resetIfEmpty(s: String) = if (trees.isEmpty) resetColor(s) else s
+
+    private def truncAndOneLine(s: String): String = {
+      val s1 = s.replaceAll("\\s+", " ")
+      if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..."
+    }
+
+    private class Frame(val tree: Tree) { }
+    private def greenType(tp: Type): String = tpe_s(tp, inGreen)
+    private def greenType(tree: Tree): String = tree match {
+      case null                              => "[exception]"
+      case md: MemberDef if md.tpe == NoType => inBlue(s"[${md.keyword} ${md.name}]") + " " + greenType(md.symbol.tpe)
+      case _ if tree.tpe.isComplete          => greenType(tree.tpe)
+      case _                                 => "<?>"
+    }
+    def indented(s: String): String =
+      if (s == "") "" else currentIndent + s.replaceAll("\n", "\n" + currentIndent)
+
+    @inline final def runWith[T](t: Tree)(body: => T): T = {
+      push(t)
+      try body finally pop(t)
+    }
+    def push(t: Tree): Unit = {
+      trees ::= new Frame(t)
+      depth += 1
+    }
+    def pop(t: Tree): Unit = {
+      val frame = trees.head
+      assert(frame.tree eq t, ((frame.tree, t)))
+      trees = trees.tail
+      depth -= 1
+    }
+    def show(s: String)     { if (s != "") out.println(s) }
+
+    def showPush(tree: Tree, context: Context) {
+      showPush(tree, NOmode, WildcardType, context)
+    }
+    def showPush(tree: Tree, mode: Mode, pt: Type, context: Context) {
+      def tree_s = truncAndOneLine(ptTree(tree))
+      def pt_s = if (pt.isWildcard || context.inTypeConstructorAllowed) "" else s": pt=$pt"
+      def all_s = List(tree_s, pt_s, mode, fullSiteString(context)) filterNot (_ == "") mkString " "
+
+      atLowerIndent(show(indented("""|-- """ + all_s)))
+    }
+    def showPop(typedTree: Tree): Tree = {
+      val s = greenType(typedTree)
+      show(resetIfEmpty(indented("""\-> """ + s)))
+      typedTree
+    }
+    def showAdapt(original: Tree, adapted: Tree, pt: Type, context: Context) {
+      if (!noPrintAdapt(original, adapted)) {
+        def tree_s1 = inLightCyan(truncAndOneLine(ptTree(original)))
+        def pt_s = if (pt.isWildcard) "" else s" based on pt $pt"
+        def tree_s2 = adapted match {
+          case tt: TypeTree => "is now a TypeTree(" + tpe_s(tt.tpe, inCyan) + ")"
+          case _            => "adapted to " + inCyan(truncAndOneLine(ptTree(adapted))) + pt_s
+        }
+        show(indented(s"[adapt] $tree_s1 $tree_s2"))
+      }
+    }
+    def showTyped(tree: Tree) {
+      def class_s = tree match {
+        case _: RefTree => ""
+        case _          => " " + tree.shortClass
+      }
+      if (!noPrintTyping(tree))
+        show(indented(s"[typed$class_s] " + truncAndOneLine(ptTree(tree))))
+    }
+
+    def nextTyped(tree: Tree, mode: Mode, pt: Type, context: Context)(body: => Tree): Tree =
+      nextTypedInternal(tree, showPush(tree, mode, pt, context))(body)
+
+    def nextTypedInternal(tree: Tree, pushFn: => Unit)(body: => Tree): Tree = (
+      if (noPrintTyping(tree))
+        body
+      else
+        runWith(tree) { pushFn ; showPop(body) }
+    )
+
+    @inline final def printTyping(tree: Tree, s: => String) = {
+      if (printTypings && !noPrintTyping(tree))
+        show(indented(s))
+    }
+    @inline final def printTyping(s: => String) = {
+      if (printTypings)
+        show(indented(s))
+    }
+  }
+  def tpe_s(tp: Type, colorize: String => String): String = tp match {
+    case OverloadedType(pre, alts) => alts map (alt => tpe_s(pre memberType alt, colorize)) mkString " <and> "
+    case _                         => colorize(tp.toLongString)
+  }
+  // def sym_s(s: Symbol) = if (s eq null) "" + s else s.getClass.getName split '.' last;
+
+  // Some trees which are typed with mind-numbing frequency and
+  // which add nothing by being printed. Did () type to Unit? Let's
+  // gamble on yes.
+  private def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t))
+  def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t)
+  def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || (
+       (tree1.tpe == tree2.tpe)
+    && (tree1.symbol == tree2.symbol)
+  )
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 31c5a61..cc2d914 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -12,8 +12,7 @@ import symtab.Flags._
  *  @author  Martin Odersky
  *  @version 1.0
  */
-trait Unapplies extends ast.TreeDSL
-{
+trait Unapplies extends ast.TreeDSL {
   self: Analyzer =>
 
   import global._
@@ -21,8 +20,8 @@ trait Unapplies extends ast.TreeDSL
   import CODE.{ CASE => _, _ }
   import treeInfo.{ isRepeatedParamType, isByNameParamType }
 
-  private val unapplyParamName = nme.x_0
-
+  private def unapplyParamName = nme.x_0
+  private def caseMods         = Modifiers(SYNTHETIC | CASE)
 
   // In the typeCompleter (templateSig) of a case class (resp it's module),
   // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
@@ -31,51 +30,17 @@ trait Unapplies extends ast.TreeDSL
   // moduleClass symbol of the companion module.
   class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
 
-  /** returns type list for return type of the extraction
-   * @see extractorFormalTypes
+  /** Returns unapply or unapplySeq if available, without further checks.
    */
-  def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
-    assert(ufn.isMethod, ufn)
-    val nbSubPats = args.length
-    //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
-    ufn.name match {
-      case nme.unapply | nme.unapplySeq =>
-        val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
-        if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
-        else formals
-      case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
-    }
-  }
+  def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq)
 
-  /** returns type of the unapply method returning T_0...T_n
-   *  for n == 0, boolean
-   *  for n == 1, Some[T0]
-   *  else Some[Product[Ti]]
+  /** Filters out unapplies with multiple (non-implicit) parameter lists,
+   *  as they cannot be used as extractors
    */
-  def unapplyReturnTypeExpected(argsLength: Int) = argsLength match {
-    case 0 => BooleanClass.tpe
-    case 1 => optionType(WildcardType)
-    case n => optionType(productType((List fill n)(WildcardType)))
-  }
+  def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym))
 
-  /** returns unapply or unapplySeq if available */
-  def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
-    case NoSymbol => tp member nme.unapplySeq
-    case unapp    => unapp
-  }
-  /** returns unapply member's parameter type. */
-  def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
-    case p :: Nil => p.tpe.typeSymbol
-    case _        => NoSymbol
-  }
-
-  def copyUntyped[T <: Tree](tree: T): T =
-    returning[T](tree.duplicate)(UnTyper traverse _)
-
-  def copyUntypedInvariant(td: TypeDef): TypeDef = {
-    val copy = treeCopy.TypeDef(td, td.mods &~ (COVARIANT | CONTRAVARIANT), td.name, td.tparams, td.rhs)
-
-    returning[TypeDef](copy.duplicate)(UnTyper traverse _)
+  object HasUnapply {
+    def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
   }
 
   private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
@@ -87,8 +52,15 @@ trait Unapplies extends ast.TreeDSL
   }
 
   private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
-    val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
-    mmap(vparamss)(copyUntyped[ValDef])
+    val ClassDef(_, _, _, Template(_, _, body)) = resetAttrs(cdef.duplicate)
+    val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body
+    vparamss
+  }
+
+  private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = {
+    val ClassDef(_, _, tparams, _) = resetAttrs(cdef.duplicate)
+    val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT)))
+    tparamsInvariant
   }
 
   /** The return value of an unapply method of a case class C[Ts]
@@ -97,25 +69,19 @@ trait Unapplies extends ast.TreeDSL
    */
   private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
     def caseFieldAccessorValue(selector: ValDef): Tree = {
-      val accessorName = selector.name
-      val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
-        case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
-      }
-      privateLocalParamAccessor match {
-        case None =>
-          // Selecting by name seems to be the most straight forward way here to
-          // avoid forcing the symbol of the case class in order to list the accessors.
-          val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
-          Ident(param) DOT maybeRenamedAccessorName
-        case Some(sym) =>
-          // But, that gives a misleading error message in neg/t1422.scala, where a case
-          // class has an illegal private[this] parameter. We can detect this by checking
-          // the modifiers on the param accessors.
-          //
-          // We just generate a call to that param accessor here, which gives us an inaccessible
-          // symbol error, as before.
-          Ident(param) DOT sym
+      // Selecting by name seems to be the most straight forward way here to
+      // avoid forcing the symbol of the case class in order to list the accessors.
+      def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name)
+      // But, that gives a misleading error message in neg/t1422.scala, where a case
+      // class has an illegal private[this] parameter. We can detect this by checking
+      // the modifiers on the param accessors.
+      // We just generate a call to that param accessor here, which gives us an inaccessible
+      // symbol error, as before.
+      def localAccessor = caseclazz.impl.body find {
+        case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal
+        case _                                          => false
       }
+      localAccessor.fold(selectByName)(Ident(param) DOT _.symbol)
     }
 
     // Working with trees, rather than symbols, to avoid cycles like SI-5082
@@ -128,11 +94,16 @@ trait Unapplies extends ast.TreeDSL
   /** The module corresponding to a case class; overrides toString to show the module's name
    */
   def caseModuleDef(cdef: ClassDef): ModuleDef = {
-    // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
-    // an abrupt crash we trim the list here.
-    def primaries      = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
-    def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
-    def createFun      = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+    val params = constrParamss(cdef)
+    def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && (params match {
+      case List(ps) if ps.length <= MaxFunctionArity => true
+      case _ => false
+    })
+    def createFun = {
+      def primaries = params.head map (_.tpt)
+      gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+    }
+
     def parents        = if (inheritFromFun) List(createFun) else Nil
     def toString       = DefDef(
       Modifiers(OVERRIDE | FINAL | SYNTHETIC),
@@ -149,15 +120,13 @@ trait Unapplies extends ast.TreeDSL
     ModuleDef(
       Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
       cdef.name.toTermName,
-      Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
+      gen.mkTemplate(parents, noSelfType, NoMods, Nil, body, cdef.impl.pos.focus))
   }
 
-  private val caseMods = Modifiers(SYNTHETIC | CASE)
-
   /** The apply method corresponding to a case class
    */
   def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
-    val tparams   = cdef.tparams map copyUntypedInvariant
+    val tparams   = constrTparamsInvariant(cdef)
     val cparamss  = constrParamss(cdef)
     def classtpe = classType(cdef, tparams)
     atPos(cdef.pos.focus)(
@@ -173,7 +142,7 @@ trait Unapplies extends ast.TreeDSL
   /** The unapply method corresponding to a case class
    */
   def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
-    val tparams   = cdef.tparams map copyUntypedInvariant
+    val tparams   = constrTparamsInvariant(cdef)
     val method    = constrParamss(cdef) match {
       case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
       case _                                                          => nme.unapply
@@ -228,7 +197,7 @@ trait Unapplies extends ast.TreeDSL
         treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
       }
 
-      val tparams = cdef.tparams map copyUntypedInvariant
+      val tparams = constrTparamsInvariant(cdef)
       val paramss = classParamss match {
         case Nil => Nil
         case ps :: pss =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
deleted file mode 100644
index ea436a7..0000000
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab.Flags.{ VarianceFlags => VARIANCES, _ }
-
-/** Variances form a lattice, 0 <= COVARIANT <= Variances, 0 <= CONTRAVARIANT <= VARIANCES
- */
-trait Variances {
-
-  val global: Global
-  import global._
-
-  /** Flip between covariant and contravariant */
-  private def flip(v: Int): Int = {
-    if (v == COVARIANT) CONTRAVARIANT
-    else if (v == CONTRAVARIANT) COVARIANT
-    else v
-  }
-
-  /** Map everything below VARIANCES to 0 */
-  private def cut(v: Int): Int =
-    if (v == VARIANCES) v else 0
-
-  /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
-  def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
-    (VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
-
-  /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
-  def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
-    if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
-    else varianceInType(sym.info)(tparam)
-
-  /** Compute variance of type parameter `tparam` in all types `tps`. */
-  def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
-    (VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
-
-  /** Compute variance of type parameter `tparam` in all type arguments
-   *  <code>tps</code> which correspond to formal type parameters `tparams1`.
-   */
-  def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
-    var v: Int = VARIANCES;
-    for ((tp, tparam1) <- tps zip tparams1) {
-      val v1 = varianceInType(tp)(tparam)
-      v = v & (if (tparam1.isCovariant) v1
-	       else if (tparam1.isContravariant) flip(v1)
-	       else cut(v1))
-    }
-    v
-  }
-
-  /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
-  def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
-    (VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
-  }
-
-  /** Compute variance of type parameter `tparam` in type annotation `annot`. */
-  def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
-    varianceInType(annot.atp)(tparam)
-  }
-
-  /** Compute variance of type parameter <code>tparam</code> in type <code>tp</code>. */
-  def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
-    case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
-      VARIANCES
-    case BoundedWildcardType(bounds) =>
-      varianceInType(bounds)(tparam)
-    case SingleType(pre, sym) =>
-      varianceInType(pre)(tparam)
-    case TypeRef(pre, sym, args) =>
-      if (sym == tparam) COVARIANT
-      // tparam cannot occur in tp's args if tp is a type constructor (those don't have args)
-      else if (tp.isHigherKinded) varianceInType(pre)(tparam)
-      else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
-    case TypeBounds(lo, hi) =>
-      flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
-    case RefinedType(parents, defs) =>
-      varianceInTypes(parents)(tparam) & varianceInSyms(defs.toList)(tparam)
-    case MethodType(params, restpe) =>
-      flip(varianceInSyms(params)(tparam)) & varianceInType(restpe)(tparam)
-    case NullaryMethodType(restpe) =>
-      varianceInType(restpe)(tparam)
-    case PolyType(tparams, restpe) =>
-      flip(varianceInSyms(tparams)(tparam)) & varianceInType(restpe)(tparam)
-    case ExistentialType(tparams, restpe) =>
-      varianceInSyms(tparams)(tparam) & varianceInType(restpe)(tparam)
-    case AnnotatedType(annots, tp, _) =>
-      varianceInAttribs(annots)(tparam) & varianceInType(tp)(tparam)
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index 5c6f525..e6f95eb 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -8,15 +8,7 @@ package util
 
 import scala.reflect.internal.Chars._
 
-abstract class CharArrayReader { self =>
-
-  val buf: Array[Char]
-
-  def decodeUni: Boolean = true
-
-  /** An error routine to call on bad unicode escapes \\uxxxx. */
-  protected def error(offset: Int, msg: String): Unit
-
+trait CharArrayReaderData {
   /** the last read character */
   var ch: Char = _
 
@@ -29,13 +21,32 @@ abstract class CharArrayReader { self =>
   /** The start offset of the line before the current one */
   var lastLineStartOffset: Int = 0
 
-  private var lastUnicodeOffset = -1
+  protected var lastUnicodeOffset = -1
+
+  def copyFrom(cd: CharArrayReaderData): this.type = {
+    this.ch = cd.ch
+    this.charOffset = cd.charOffset
+    this.lineStartOffset = cd.lineStartOffset
+    this.lastLineStartOffset = cd.lastLineStartOffset
+    this.lastUnicodeOffset = cd.lastUnicodeOffset
+    this
+  }
+}
+
+abstract class CharArrayReader extends CharArrayReaderData { self =>
+
+  val buf: Array[Char]
+
+  def decodeUni: Boolean = true
+
+  /** An error routine to call on bad unicode escapes \\uxxxx. */
+  protected def error(offset: Int, msg: String): Unit
 
   /** Is last character a unicode escape \\uxxxx? */
   def isUnicodeEscape = charOffset == lastUnicodeOffset
 
   /** Advance one character; reducing CR;LF pairs to just LF */
-  final def nextChar() {
+  final def nextChar(): Unit = {
     if (charOffset >= buf.length) {
       ch = SU
     } else {
@@ -43,7 +54,10 @@ abstract class CharArrayReader { self =>
       ch = c
       charOffset += 1
       if (c == '\\') potentialUnicode()
-      else if (c < ' ') { skipCR(); potentialLineEnd() }
+      if (ch < ' ') {
+        skipCR()
+        potentialLineEnd()
+      }
     }
   }
 
@@ -63,7 +77,7 @@ abstract class CharArrayReader { self =>
   }
 
   /** Interpret \\uxxxx escapes */
-  private def potentialUnicode() {
+  private def potentialUnicode() = {
     def evenSlashPrefix: Boolean = {
       var p = charOffset - 2
       while (p >= 0 && buf(p) == '\\') p -= 1
@@ -94,13 +108,17 @@ abstract class CharArrayReader { self =>
   }
 
   /** replace CR;LF by LF */
-  private def skipCR() {
-    if (ch == CR)
-      if (charOffset < buf.length && buf(charOffset) == LF) {
-        charOffset += 1
-        ch = LF
+  private def skipCR() =
+    if (ch == CR && charOffset < buf.length)
+      buf(charOffset) match {
+        case LF =>
+          charOffset += 1
+          ch = LF
+        case '\\' =>
+          if (lookaheadReader.getu == LF)
+            potentialUnicode()
+        case _ =>
       }
-  }
 
   /** Handle line ends */
   private def potentialLineEnd() {
@@ -121,5 +139,6 @@ abstract class CharArrayReader { self =>
     def error(offset: Int, msg: String) = self.error(offset, msg)
     /** A mystery why CharArrayReader.nextChar() returns Unit */
     def getc() = { nextChar() ; ch }
+    def getu() = { require(buf(charOffset) == '\\') ; ch = '\\' ; charOffset += 1 ; potentialUnicode() ; ch }
   }
 }
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index a62c87e..d2ba61c 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -11,9 +11,9 @@ import java.net.URL
 import scala.collection.{ mutable, immutable }
 import io.{ File, Directory, Path, Jar, AbstractFile }
 import scala.reflect.internal.util.StringOps.splitWhere
-import scala.reflect.ClassTag
 import Jar.isJarOrZip
 import File.pathSeparator
+import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
 import java.net.MalformedURLException
 import java.util.regex.PatternSyntaxException
 import scala.reflect.runtime.ReflectionUtils
@@ -26,18 +26,16 @@ import scala.reflect.runtime.ReflectionUtils
  *  @author Stepan Koltsov
  */
 object ClassPath {
+  import scala.language.postfixOps
+
   /** Expand single path entry */
   private def expandS(pattern: String): List[String] = {
     val wildSuffix = File.separator + "*"
 
-    /** Get all subdirectories, jars, zips out of a directory. */
+    /* Get all subdirectories, jars, zips out of a directory. */
     def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
       dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
 
-    def basedir(s: String) =
-      if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
-      else "."
-
     if (pattern == "*") lsDir(Directory("."))
     else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
     else if (pattern contains '*') {
@@ -59,22 +57,6 @@ object ClassPath {
   /** Split the classpath, apply a transformation function, and reassemble it. */
   def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
 
-  /** Split the classpath, filter according to predicate, and reassemble. */
-  def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
-
-  /** Split the classpath and map them into Paths */
-  def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
-
-  /** Make all classpath components absolute. */
-  def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
-
-  /** Join the paths as a classpath */
-  def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
-  def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
-
-  /** Split the classpath and map them into URLs */
-  def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
-
   /** Expand path and possibly expanding stars */
   def expandPath(path: String, expandStar: Boolean = true): List[String] =
     if (expandStar) split(path) flatMap expandS
@@ -100,9 +82,6 @@ object ClassPath {
     )
   }
 
-  /** A useful name filter. */
-  def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
-
   def specToURL(spec: String): Option[URL] =
     try Some(new URL(spec))
     catch { case _: MalformedURLException => None }
@@ -116,6 +95,12 @@ object ClassPath {
      */
     def isValidName(name: String): Boolean = true
 
+    /** Filters for assessing validity of various entities.
+     */
+    def validClassFile(name: String)  = endsClass(name) && isValidName(name)
+    def validPackage(name: String)    = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
+    def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+
     /** From the representation to its identifier.
      */
     def toBinaryName(rep: T): String
@@ -127,31 +112,29 @@ object ClassPath {
     /** Creators for sub classpaths which preserve this context.
      */
     def sourcesInPath(path: String): List[ClassPath[T]] =
-      for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+      for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
         new SourcePath[T](dir, this)
 
     def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
-      for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+      for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
         newClassPath(entry)
 
-    def classesAtAllURLS(path: String): List[ClassPath[T]] =
-      (path split " ").toList flatMap classesAtURL
-
-    def classesAtURL(spec: String) =
-      for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
-        newClassPath(location)
-
     def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
-      classesInPathImpl(path, true).toIndexedSeq
+      classesInPathImpl(path, expand = true).toIndexedSeq
 
-    def classesInPath(path: String) = classesInPathImpl(path, false)
+    def classesInPath(path: String) = classesInPathImpl(path, expand = false)
 
     // Internal
     private def classesInPathImpl(path: String, expand: Boolean) =
       for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
         newClassPath(dir)
+
+    def classesInManifest(used: Boolean) =
+      if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
   }
 
+  def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+
   class JavaContext extends ClassPathContext[AbstractFile] {
     def toBinaryName(rep: AbstractFile) = {
       val name = rep.name
@@ -161,9 +144,7 @@ object ClassPath {
     def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
   }
 
-  object DefaultJavaContext extends JavaContext {
-    override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
-  }
+  object DefaultJavaContext extends JavaContext
 
   private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
   private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
@@ -217,8 +198,7 @@ abstract class ClassPath[T] {
   def sourcepaths: IndexedSeq[AbstractFile]
 
   /**
-   * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
-   * and / or a SourcefileLoader.
+   * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
    */
   case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
     def name: String = binary match {
@@ -231,16 +211,16 @@ abstract class ClassPath[T] {
 
   /** Filters for assessing validity of various entities.
    */
-  def validClassFile(name: String)  = endsClass(name) && context.isValidName(name)
-  def validPackage(name: String)    = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
-  def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+  def validClassFile(name: String)  = context.validClassFile(name)
+  def validPackage(name: String)    = context.validPackage(name)
+  def validSourceFile(name: String) = context.validSourceFile(name)
 
   /**
    * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
    * Does not support nested classes on .NET
    */
   def findClass(name: String): Option[AnyClassRep] =
-    splitWhere(name, _ == '.', true) match {
+    splitWhere(name, _ == '.', doDropIndex = true) match {
       case Some((pkg, rest)) =>
         val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
         rep map {
@@ -284,7 +264,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
       else if (f.isDirectory && validPackage(f.name))
         packageBuf += new SourcePath[T](f, context)
     }
-    (packageBuf.result, classBuf.result)
+    (packageBuf.result(), classBuf.result())
   }
 
   lazy val (packages, classes) = traverse()
@@ -297,7 +277,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
 class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
   def name = dir.name
   override def origin = dir.underlyingSource map (_.path)
-  def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+  def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
   def asClasspathString = dir.path
   val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
 
@@ -305,13 +285,26 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
   private def traverse() = {
     val classBuf   = immutable.Vector.newBuilder[ClassRep]
     val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
-    dir foreach { f =>
-      if (!f.isDirectory && validClassFile(f.name))
-        classBuf += ClassRep(Some(f), None)
-      else if (f.isDirectory && validPackage(f.name))
-        packageBuf += new DirectoryClassPath(f, context)
+    dir foreach {
+      f =>
+        // Optimization: We assume the file was not changed since `dir` called
+        // `Path.apply` and categorized existent files as `Directory`
+        // or `File`.
+        val isDirectory = f match {
+          case pf: io.PlainFile => pf.givenPath match {
+            case _: io.Directory => true
+            case _: io.File      => false
+            case _               => f.isDirectory
+          }
+          case _ =>
+            f.isDirectory
+        }
+        if (!isDirectory && validClassFile(f.name))
+          classBuf += ClassRep(Some(f), None)
+        else if (isDirectory && validPackage(f.name))
+          packageBuf += new DirectoryClassPath(f, context)
     }
-    (packageBuf.result, classBuf.result)
+    (packageBuf.result(), classBuf.result())
   }
 
   lazy val (packages, classes) = traverse()
@@ -409,15 +402,3 @@ class JavaClassPath(
   containers: IndexedSeq[ClassPath[AbstractFile]],
   context: JavaContext)
 extends MergedClassPath[AbstractFile](containers, context) { }
-
-object JavaClassPath {
-  def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
-    val containers = {
-      for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
-        new DirectoryClassPath(f, context)
-    }
-    new JavaClassPath(containers.toIndexedSeq, context)
-  }
-  def fromURLs(urls: Seq[URL]): JavaClassPath =
-    fromURLs(urls, ClassPath.DefaultJavaContext)
-}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
deleted file mode 100644
index 9cf2c53..0000000
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ /dev/null
@@ -1,144 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input.{ Reader }
-import scala.util.parsing.input.CharArrayReader.EofCh
-import scala.collection.mutable.ListBuffer
-
-/** A simple command line parser to replace the several different
- *  simple ones spread around trunk.
- *
- *  XXX Note this has been completely obsolesced by scala.tools.cmd.
- *  I checked it back in as part of rolling partest back a month
- *  rather than go down the rabbit hole of unravelling dependencies.
- */
-
-trait ParserUtil extends Parsers {
-  protected implicit class ParserPlus[+T](underlying: Parser[T]) {
-    def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b  => b }
-    def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b  => a }
-  }
-}
-
-case class CommandLine(
-  args: List[String],
-  unaryArguments: List[String],
-  binaryArguments: List[String]
-) {
-  def this(args: List[String]) = this(args, Nil, Nil)
-  def this(args: Array[String]) = this(args.toList, Nil, Nil)
-  def this(line: String) = this(CommandLineParser tokenize line, Nil, Nil)
-
-  def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
-  def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
-
-  def originalArgs = args
-  def assumeBinary = true
-  def enforceArity = true
-  def onlyKnownOptions = false
-
-  val Terminator = "--"
-  val ValueForUnaryOption = "true"  // so if --opt is given, x(--opt) = true
-
-  def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
-  def errorFn(msg: String) = println(msg)
-
-  /** argMap is option -> argument (or "" if it is a unary argument)
-   *  residualArgs are what is left after removing the options and their args.
-   */
-  lazy val (argMap, residualArgs) = {
-    val residualBuffer = new ListBuffer[String]
-
-    def stripQuotes(s: String) = {
-      def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
-      if (List('"', '\'') exists isQuotedBy) s.tail.init else s
-    }
-
-    def isValidOption(s: String) = !onlyKnownOptions || (unaryArguments contains s) || (binaryArguments contains s)
-    def isOption(s: String) = (s startsWith "-") && (isValidOption(s) || { unknownOption(s) ; false })
-    def isUnary(s: String) = isOption(s) && (unaryArguments contains s)
-    def isBinary(s: String) = isOption(s) && !isUnary(s) && (assumeBinary || (binaryArguments contains s))
-
-    def unknownOption(opt: String) =
-      errorFn("Option '%s' not recognized.".format(opt))
-    def missingArg(opt: String, what: String) =
-      errorFn("Option '%s' requires argument, found %s instead.".format(opt, what))
-
-    def loop(args: List[String]): Map[String, String] = {
-      def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
-      if (args.isEmpty) return Map()
-      val hd :: rest = args
-      if (rest.isEmpty) {
-        if (isBinary(hd) && enforceArity)
-          missingArg(hd, "EOF")
-
-        if (isOption(hd)) mapForUnary(hd) else residual(args)
-      }
-      else
-        if (hd == Terminator) residual(rest)
-      else {
-        val hd1 :: hd2 :: rest = args
-
-        if (hd2 == Terminator) mapForUnary(hd1) ++ residual(rest)
-        else if (isUnary(hd1)) mapForUnary(hd1) ++ loop(hd2 :: rest)
-        else if (isBinary(hd1)) {
-          // Disabling this check so
-          //  --scalacopts "-verbose" works.  We can't tell if it's quoted,
-          // the shell does us in.
-          //
-          // if (isOption(hd2) && enforceArity)
-          //   missingArg(hd1, hd2)
-
-          Map(hd1 -> hd2) ++ loop(rest)
-        }
-        else { residual(List(hd1)) ++ loop(hd2 :: rest) }
-      }
-    }
-
-    (loop(args), residualBuffer map stripQuotes toList)
-  }
-
-  def isSet(arg: String) = args contains arg
-  def get(arg: String) = argMap get arg
-  def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
-  def apply(arg: String) = argMap(arg)
-
-  override def toString() = "CommandLine(\n%s)\n" format (args map ("  " + _ + "\n") mkString)
-}
-
-object CommandLineParser extends RegexParsers with ParserUtil {
-  override def skipWhitespace = false
-
-  def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
-  def elemOf(xs: Elem*): Parser[Elem]     = elem("elemOf", xs contains _)
-  def escaped(ch: Char): Parser[String] = "\\" + ch
-  def mkQuoted(ch: Char): Parser[String] = (
-      elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
-    | failure("Unmatched %s in input." format ch)
-  )
-
-  /** Apparently windows can't deal with the quotes sticking around. */
-  lazy val squoted: Parser[String] = mkQuoted('\'')   // ^^ (x => "'%s'" format x)
-  lazy val dquoted: Parser[String] = mkQuoted('"')    // ^^ (x => "\"" + x + "\"")
-  lazy val token: Parser[String]   = """\S+""".r
-
-  lazy val argument: Parser[String] = squoted | dquoted | token
-  lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
-
-  class ParseException(msg: String) extends RuntimeException(msg)
-
-  def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
-  def tokenize(line: String, errorFn: String => Unit): List[String] = {
-    parse(commandLine, line.trim) match {
-      case Success(args, _)     => args
-      case NoSuccess(msg, rest) => errorFn(msg) ; Nil
-    }
-  }
-  def apply(line: String) = new CommandLine(tokenize(line))
-}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index dde53dc..ba44126 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -74,7 +74,7 @@ object DocStrings {
     else idx :: findAll(str, idx)(p)
   }
 
-  /** Produces a string index, which is a list of ``sections'', i.e
+  /** Produces a string index, which is a list of `sections`, i.e
    *  pairs of start/end positions of all tagged sections in the string.
    *  Every section starts with an at sign and extends to the next at sign,
    *  or to the end of the comment string, but excluding the final two
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 3434426..1608ffa 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,8 +3,6 @@ package util
 
 import java.util.concurrent.ExecutionException
 import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import scala.reflect.internal.util.StringOps._
-import scala.language.implicitConversions
 
 object Exceptional {
   def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
deleted file mode 100644
index 5421843..0000000
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.mutable
-
-trait FreshNameCreator {
-  /** Do not call before after type checking ends.
-   *  PP: I think that directive needs to lose a word somewhere.
-   */
-  def newName(): String
-  def newName(prefix: String): String
-
-  @deprecated("use newName(prefix)", "2.9.0")
-  def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
-  @deprecated("use newName()", "2.9.0")
-  def newName(pos: scala.reflect.internal.util.Position): String = newName()
-}
-
-object FreshNameCreator {
-  class Default extends FreshNameCreator {
-    protected var counter = 0
-    protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
-
-    /**
-     * Create a fresh name with the given prefix. It is guaranteed
-     * that the returned name has never been returned by a previous
-     * call to this function (provided the prefix does not end in a digit).
-     */
-    def newName(prefix: String): String = {
-      val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
-      counters(safePrefix) += 1
-
-      safePrefix + counters(safePrefix)
-    }
-    def newName(): String = {
-      counter += 1
-      "$" + counter + "$"
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index b7ed790..58a5442 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.tools.nsc
+package scala
+package tools.nsc
 package util
 
 import scala.reflect.internal.Chars._
@@ -14,74 +15,32 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
   def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) =
     this(buf, 0, /* 1, 1, */ decodeUni, error)
 
-  /** produce a duplicate of this char array reader which starts reading
-    *  at current position, independent of what happens to original reader
-	*/
-  def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader]
-
-  /** layout constant
-   */
-  val tabinc = 8
-
   /** the line and column position of the current character
   */
   var ch: Char = _
   var bp = start
-  var oldBp = -1
-  var oldCh: Char = _
-
-  //private var cline: Int = _
-  //private var ccol: Int = _
   def cpos = bp
   var isUnicode: Boolean = _
-  var lastLineStartPos: Int = 0
-  var lineStartPos: Int = 0
-  var lastBlankLinePos: Int = 0
-
-  private var onlyBlankChars = false
-  //private var nextline = startline
-  //private var nextcol = startcol
-
-  private def markNewLine() {
-    lastLineStartPos = lineStartPos
-    if (onlyBlankChars) lastBlankLinePos = lineStartPos
-    lineStartPos = bp
-    onlyBlankChars = true
-    //nextline += 1
-    //nextcol = 1
-  }
-
-  def hasNext: Boolean = if (bp < buf.length) true
-  else {
-    false
-  }
 
-  def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals
+  def hasNext = bp < buf.length
 
   def next(): Char = {
-    //cline = nextline
-    //ccol = nextcol
     val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array
     if(!hasNext) {
       ch = SU
       return SU  // there is an endless stream of SU's at the end
     }
-    oldBp = bp
-    oldCh = ch
     ch = buf(bp)
     isUnicode = false
     bp = bp + 1
     ch match {
       case '\t' =>
-        // nextcol = ((nextcol - 1) / tabinc * tabinc) + tabinc + 1;
       case CR =>
-        if (bp < buf.size && buf(bp) == LF) {
+        if (bp < buf.length && buf(bp) == LF) {
           ch = LF
           bp += 1
         }
-        markNewLine()
       case LF | FF =>
-        markNewLine()
       case '\\' =>
         def evenSlashPrefix: Boolean = {
           var p = bp - 2
@@ -90,34 +49,23 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
         }
         def udigit: Int = {
           val d = digit2int(buf(bp), 16)
-          if (d >= 0) { bp += 1; /* nextcol = nextcol + 1 */ }
-          else error("error in unicode escape");
+          if (d >= 0) bp += 1
+          else error("error in unicode escape")
           d
         }
-        // nextcol += 1
         if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) {
           do {
             bp += 1 //; nextcol += 1
-          } while (buf(bp) == 'u');
+          } while (buf(bp) == 'u')
           val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
           ch = code.asInstanceOf[Char]
           isUnicode = true
         }
       case _ =>
-        if (ch > ' ') onlyBlankChars = false
-        // nextcol += 1
     }
     ch
   }
 
-  def rewind() {
-    if (oldBp == -1) throw new IllegalArgumentException
-    bp = oldBp
-    ch = oldCh
-    oldBp = -1
-    oldCh = 'x'
-  }
-
   def copy: JavaCharArrayReader =
     new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
 }
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
deleted file mode 100644
index 77a19d3..0000000
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ /dev/null
@@ -1,170 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-// $Id$
-
-package scala.tools.nsc
-package util
-
-import java.io.File
-import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
-import scala.collection.mutable
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.ClassPathContext
-import scala.reflect.runtime.ReflectionUtils.isTraitImplementation
-
-/** Keeping the MSIL classpath code in its own file is important to make sure
- *  we don't accidentally introduce a dependency on msil.jar in the jvm.
- */
-
-object MsilClassPath {
-  def collectTypes(assemFile: AbstractFile) = {
-    var res: Array[MSILType] = MSILType.EmptyTypes
-    val assem = Assembly.LoadFrom(assemFile.path)
-    if (assem != null) {
-      // DeclaringType == null: true for non-inner classes
-      res = assem.GetTypes() filter (_.DeclaringType == null)
-      Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
-    }
-    res
-  }
-
-  /** On the java side this logic is in PathResolver, but as I'm not really
-   *  up to folding MSIL into that, I am encapsulating it here.
-   */
-  def fromSettings(settings: Settings): MsilClassPath = {
-    val context =
-      if (settings.inline.value) new MsilContext
-      else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
-
-    import settings._
-    new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
-  }
-
-  class MsilContext extends ClassPathContext[MsilFile] {
-    def toBinaryName(rep: MsilFile) = rep.msilType.Name
-    def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
-  }
-
-  private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = {
-    import ClassPath._
-    val etr = new mutable.ListBuffer[ClassPath[MsilFile]]
-    val names = new mutable.HashSet[String]
-
-    // 1. Assemblies from -Xassem-extdirs
-    for (dirName <- expandPath(ext, expandStar = false)) {
-      val dir = AbstractFile.getDirectory(dirName)
-      if (dir ne null) {
-        for (file <- dir) {
-          val name = file.name.toLowerCase
-          if (name.endsWith(".dll") || name.endsWith(".exe")) {
-            names += name
-            etr += context.newClassPath(file)
-          }
-        }
-      }
-    }
-
-    // 2. Assemblies from -Xassem-path
-    for (fileName <- expandPath(user, expandStar = false)) {
-      val file = AbstractFile.getFile(fileName)
-      if (file ne null) {
-        val name = file.name.toLowerCase
-        if (name.endsWith(".dll") || name.endsWith(".exe")) {
-          names += name
-          etr += context.newClassPath(file)
-        }
-      }
-    }
-
-    def check(n: String) {
-      if (!names.contains(n))
-      throw new AssertionError("Cannot find assembly "+ n +
-         ". Use -Xassem-extdirs or -Xassem-path to specify its location")
-    }
-    check("mscorlib.dll")
-    check("scalaruntime.dll")
-
-    // 3. Source path
-    for (dirName <- expandPath(source, expandStar = false)) {
-      val file = AbstractFile.getDirectory(dirName)
-      if (file ne null) etr += new SourcePath[MsilFile](file, context)
-    }
-
-    etr.toList
-  }
-}
-import MsilClassPath._
-
-/**
- * A assembly file (dll / exe) containing classes and namespaces
- */
-class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] {
-  def name = {
-    val i = namespace.lastIndexOf('.')
-    if (i < 0) namespace
-    else namespace drop (i + 1)
-  }
-  def asURLs = List(new java.net.URL(name))
-  def asClasspathString = sys.error("Unknown")  // I don't know what if anything makes sense here?
-
-  private lazy val first: Int = {
-    var m = 0
-    var n = types.length - 1
-    while (m < n) {
-      val l = (m + n) / 2
-      val res = types(l).FullName.compareTo(namespace)
-      if (res < 0) m = l + 1
-      else n = l
-    }
-    if (types(m).FullName.startsWith(namespace)) m else types.length
-  }
-
-  lazy val classes = {
-    val cls = new mutable.ListBuffer[ClassRep]
-    var i = first
-    while (i < types.length && types(i).Namespace.startsWith(namespace)) {
-      // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
-      if (types(i).Namespace == namespace)
-        cls += ClassRep(Some(new MsilFile(types(i))), None)
-      i += 1
-    }
-    cls.toIndexedSeq
-  }
-
-  lazy val packages = {
-    val nsSet = new mutable.HashSet[String]
-    var i = first
-    while (i < types.length && types(i).Namespace.startsWith(namespace)) {
-      val subns = types(i).Namespace
-      if (subns.length > namespace.length) {
-        // example: namespace = "System", subns = "System.Reflection.Emit"
-        //   => find second "." and "System.Reflection" to nsSet.
-        val end = subns.indexOf('.', namespace.length + 1)
-        nsSet += (if (end < 0) subns
-                  else subns.substring(0, end))
-      }
-      i += 1
-    }
-    val xs = for (ns <- nsSet.toList)
-      yield new AssemblyClassPath(types, ns, context)
-
-    xs.toIndexedSeq
-  }
-
-  val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
-  override def toString() = "assembly classpath "+ namespace
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
-extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { }
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
deleted file mode 100644
index 67987c6..0000000
--- a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.tools.nsc.util
-
-import scala.collection.{ mutable, immutable }
-
-/** A hashmap with set-valued values, and an empty set as default value
- */
-class MultiHashMap[K, V] extends mutable.HashMap[K, immutable.Set[V]] {
-  override def default(key: K): immutable.Set[V] = Set()
-}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
deleted file mode 100644
index 1f6fa68..0000000
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import java.lang.{ ClassLoader => JClassLoader }
-import java.lang.reflect.{ Constructor, Modifier, Method }
-import java.io.{ File => JFile }
-import java.net.{ URLClassLoader => JURLClassLoader }
-import java.net.URL
-import scala.reflect.runtime.ReflectionUtils.unwrapHandler
-import ScalaClassLoader._
-import scala.util.control.Exception.{ catching }
-import scala.language.implicitConversions
-import scala.reflect.{ ClassTag, classTag }
-
-trait HasClassPath {
-  def classPathURLs: Seq[URL]
-}
-
-/** A wrapper around java.lang.ClassLoader to lower the annoyance
- *  of java reflection.
- */
-trait ScalaClassLoader extends JClassLoader {
-  /** Executing an action with this classloader as context classloader */
-  def asContext[T](action: => T): T = {
-    val saved = contextLoader
-    try { setContext(this) ; action }
-    finally setContext(saved)
-  }
-  def setAsContext() { setContext(this) }
-
-  /** Load and link a class with this classloader */
-  def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false)
-  /** Load, link and initialize a class with this classloader */
-  def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
-
-  private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
-    catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
-      Class.forName(path, initialize, this).asInstanceOf[Class[T]]
-
-  /** Create an instance of a class with this classloader */
-  def create(path: String): AnyRef =
-    tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
-
-  def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
-    classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
-
-  /** The actual bytes for a class file, or an empty array if it can't be found. */
-  def classBytes(className: String): Array[Byte] = classAsStream(className) match {
-    case null   => Array()
-    case stream => io.Streamable.bytes(stream)
-  }
-
-  /** An InputStream representing the given class name, or null if not found. */
-  def classAsStream(className: String) =
-    getResourceAsStream(className.replaceAll("""\.""", "/") + ".class")
-
-  /** Run the main method of a class to be loaded by this classloader */
-  def run(objectName: String, arguments: Seq[String]) {
-    val clsToRun = tryToInitializeClass(objectName) getOrElse (
-      throw new ClassNotFoundException(objectName)
-    )
-    val method = clsToRun.getMethod("main", classOf[Array[String]])
-    if (!Modifier.isStatic(method.getModifiers))
-      throw new NoSuchMethodException(objectName + ".main is not static")
-
-    try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
-    catch unwrapHandler({ case ex => throw ex })
-  }
-
-  /** A list comprised of this classloader followed by all its
-   *  (non-null) parent classloaders, if any.
-   */
-  def loaderChain: List[ScalaClassLoader] = this :: (getParent match {
-    case null => Nil
-    case p    => p.loaderChain
-  })
-}
-
-/** Methods for obtaining various classloaders.
- *      appLoader: the application classloader.  (Also called the java system classloader.)
- *      extLoader: the extension classloader.
- *     bootLoader: the boot classloader.
- *  contextLoader: the context classloader.
- */
-object ScalaClassLoader {
-  /** Returns loaders which are already ScalaClassLoaders unaltered,
-   *  and translates java.net.URLClassLoaders into scala URLClassLoaders.
-   *  Otherwise creates a new wrapper.
-   */
-  implicit def apply(cl: JClassLoader): ScalaClassLoader = cl match {
-    case cl: ScalaClassLoader => cl
-    case cl: JURLClassLoader  => new URLClassLoader(cl.getURLs.toSeq, cl.getParent)
-    case _                    => new JClassLoader(cl) with ScalaClassLoader
-  }
-  def contextLoader = apply(Thread.currentThread.getContextClassLoader)
-  def appLoader     = apply(JClassLoader.getSystemClassLoader)
-  def extLoader     = apply(appLoader.getParent)
-  def bootLoader    = apply(null)
-  def contextChain  = loaderChain(contextLoader)
-
-  def pathToErasure[T: ClassTag]   = pathToClass(classTag[T].runtimeClass)
-  def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
-  def locate[T: ClassTag]          = contextLoader getResource pathToErasure[T]
-
-  /** Tries to guess the classpath by type matching the context classloader
-   *  and its parents, looking for any classloaders which will reveal their
-   *  classpath elements as urls.  It it can't find any, creates a classpath
-   *  from the supplied string.
-   */
-  def guessClassPathString(default: String = ""): String = {
-    val classpathURLs = contextChain flatMap {
-      case x: HasClassPath    => x.classPathURLs
-      case x: JURLClassLoader => x.getURLs.toSeq
-      case _                  => Nil
-    }
-    if (classpathURLs.isEmpty) default
-    else JavaClassPath.fromURLs(classpathURLs).asClasspathString
-  }
-
-  def loaderChain(head: JClassLoader) = {
-    def loop(cl: JClassLoader): List[JClassLoader] =
-      if (cl == null) Nil else cl :: loop(cl.getParent)
-
-    loop(head)
-  }
-  def setContext(cl: JClassLoader) =
-    Thread.currentThread.setContextClassLoader(cl)
-  def savingContextLoader[T](body: => T): T = {
-    val saved = contextLoader
-    try body
-    finally setContext(saved)
-  }
-
-  class URLClassLoader(urls: Seq[URL], parent: JClassLoader)
-      extends JURLClassLoader(urls.toArray, parent)
-         with ScalaClassLoader
-         with HasClassPath {
-
-    private var classloaderURLs: Seq[URL] = urls
-    private def classpathString = ClassPath.fromURLs(urls: _*)
-    def classPathURLs: Seq[URL] = classloaderURLs
-    def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs
-
-    /** Override to widen to public */
-    override def addURL(url: URL) = {
-      classloaderURLs :+= url
-      super.addURL(url)
-    }
-    def toLongString = urls.mkString("URLClassLoader(\n  ", "\n  ", "\n)\n")
-  }
-
-  def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
-    new URLClassLoader(urls, parent)
-
-  /** True if supplied class exists in supplied path */
-  def classExists(urls: Seq[URL], name: String): Boolean =
-    fromURLs(urls) tryToLoadClass name isDefined
-
-  /** Finding what jar a clazz or instance came from */
-  def origin(x: Any): Option[URL] = originOfClass(x.getClass)
-  def originOfClass(x: Class[_]): Option[URL] =
-    Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
-}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 2b87280..b804bfb 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -3,17 +3,17 @@
  * @author  Martin Odersky
  */
 
-package scala.tools
+package scala
+package tools
 package nsc
 package util
 
-import java.io.{File, FileInputStream, PrintStream}
+import java.io.PrintStream
 import java.lang.Long.toHexString
 import java.lang.Float.intBitsToFloat
 import java.lang.Double.longBitsToDouble
 import scala.reflect.internal.{Flags, Names}
 import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import interpreter.ByteCode.scalaSigBytesForPath
 
 object ShowPickled extends Names {
   import PickleFormat._
@@ -94,7 +94,6 @@ object ShowPickled extends Names {
     case ANNOTATEDtpe   => "ANNOTATEDtpe"
     case ANNOTINFO      => "ANNOTINFO"
     case ANNOTARGARRAY  => "ANNOTARGARRAY"
-    // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
     case EXISTENTIALtpe => "EXISTENTIALtpe"
     case TREE           => "TREE"
     case MODIFIERS      => "MODIFIERS"
@@ -109,7 +108,7 @@ object ShowPickled extends Names {
     var result = 0L
     var b = 0L
     do {
-      b = data(idx)
+      b = data(idx).toLong
       idx += 1
       result = (result << 7) + (b & 0x7f)
     } while((b & 0x80) != 0L)
@@ -165,7 +164,7 @@ object ShowPickled extends Names {
         out.print(" %s[%s]".format(toHexString(pflags), flagString))
       }
 
-      /** Might be info or privateWithin */
+      /* Might be info or privateWithin */
       val x = buf.readNat()
       if (buf.readIndex == end) {
         printFlags(None)
@@ -177,9 +176,9 @@ object ShowPickled extends Names {
       }
     }
 
-    /** Note: the entries which require some semantic analysis to be correctly
-     *  interpreted are for the most part going to tell you the wrong thing.
-     *  It's not so easy to duplicate the logic applied in the UnPickler.
+    /* Note: the entries which require some semantic analysis to be correctly
+     * interpreted are for the most part going to tell you the wrong thing.
+     * It's not so easy to duplicate the logic applied in the UnPickler.
      */
     def printEntry(i: Int) {
       buf.readIndex = index(i)
@@ -251,7 +250,7 @@ object ShowPickled extends Names {
         case SYMANNOT       =>
           printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
         case ANNOTATEDtpe   =>
-          printTypeRef(); buf.until(end, printAnnotInfoRef);
+          printTypeRef(); buf.until(end, printAnnotInfoRef)
         case ANNOTINFO      =>
           printTypeRef(); buf.until(end, printAnnotArgRef)
         case ANNOTARGARRAY  =>
@@ -272,8 +271,7 @@ object ShowPickled extends Names {
     for (i <- 0 until index.length) printEntry(i)
   }
 
-  def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
-  def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+  def fromFile(path: String) = fromBytes(io.File(path).toByteArray())
   def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
     try Some(new PickleBuffer(data, 0, data.length))
     catch { case _: Exception => None }
@@ -288,7 +286,7 @@ object ShowPickled extends Names {
 
   def main(args: Array[String]) {
     args foreach { arg =>
-      (fromFile(arg) orElse fromName(arg)) match {
+      fromFile(arg) match {
         case Some(pb) => show(arg + ":", pb)
         case _        => Console.println("Cannot read " + arg)
       }
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index 2601798..4e1cf02 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -6,7 +6,7 @@ package util
 import java.io.PrintStream
 
 /** A simple tracer
- *  @param out: The print stream where trace info shoul be sent
+ *  @param out: The print stream where trace info should be sent
  *  @param enabled: A condition that must be true for trace info to be produced.
  */
 class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
@@ -14,6 +14,5 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
     if (enabled) out.println(msg+value)
     value
   }
-  def withOutput(out: PrintStream) = new SimpleTracer(out, enabled)
   def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled)
 }
diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala
new file mode 100644
index 0000000..fa4fe29
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala
@@ -0,0 +1,76 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc.util
+
+private[util] trait StackTracing extends Any {
+
+  /** Format a stack trace, returning the prefix consisting of frames that satisfy
+   *  a given predicate.
+   *  The format is similar to the typical case described in the JavaDoc
+   *  for [[java.lang.Throwable#printStackTrace]].
+   *  If a stack trace is truncated, it will be followed by a line of the form
+   *  `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+   *  shared stack trace segments.
+   *  @param e the exception
+   *  @param p the predicate to select the prefix
+   */
+  def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = {
+    import collection.mutable.{ ArrayBuffer, ListBuffer }
+    import compat.Platform.EOL
+    import util.Properties.isJavaAtLeast
+
+    val sb = ListBuffer.empty[String]
+
+    type TraceRelation = String
+    val Self       = new TraceRelation("")
+    val CausedBy   = new TraceRelation("Caused by: ")
+    val Suppressed = new TraceRelation("Suppressed: ")
+
+    val suppressable = isJavaAtLeast("1.7")
+
+    def clazz(e: Throwable)           = e.getClass.getName
+    def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) }
+    def msg(e: Throwable): String     = e.getMessage match { case null => because(e) ; case s => s }
+    def txt(e: Throwable): String     = msg(e) match { case null => "" ; case s => s": $s" }
+    def header(e: Throwable): String  = s"${clazz(e)}${txt(e)}"
+
+    val indent = "\u0020\u0020"
+
+    val seen = new ArrayBuffer[Throwable](16)
+    def unseen(t: Throwable) = {
+      def inSeen = seen exists (_ eq t)
+      val interesting = (t != null) && !inSeen
+      if (interesting) seen += t
+      interesting
+    }
+
+    def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) {
+      val trace  = e.getStackTrace
+      val frames = (
+        if (share.nonEmpty) {
+          val spare  = share.reverseIterator
+          val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _)
+          trimmed.reverse
+        } else trace
+      )
+      val prefix   = frames takeWhile p
+      val margin   = indent * indents
+      val indented = margin + indent
+      sb append s"${margin}${r}${header(e)}"
+      prefix foreach (f => sb append s"${indented}at $f")
+      if (frames.size < trace.size) sb append s"$indented... ${trace.size - frames.size} more"
+      if (r == Self && prefix.size < frames.size) sb append s"$indented... ${frames.size - prefix.size} elided"
+      print(e.getCause, CausedBy, trace, indents)
+      if (suppressable) {
+        import scala.language.reflectiveCalls
+        type Suppressing = { def getSuppressed(): Array[Throwable] }
+        for (s <- e.asInstanceOf[Suppressing].getSuppressed) print(s, Suppressed, frames, indents + 1)
+      }
+    }
+    print(e, Self, share = Array.empty, indents = 0)
+
+    sb mkString EOL
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
deleted file mode 100644
index d2e9238..0000000
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author  Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-/** Sets implemented as binary trees.
- *
- *  @author Martin Odersky
- *  @version 1.0
- */
-class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
-
-  private class Tree(val elem: T) {
-    var l: Tree = null
-    var r: Tree = null
-  }
-
-  private var tree: Tree = null
-
-  def findEntry(x: T): T = {
-    def find(t: Tree): T = {
-      if (t eq null) null
-      else if (less(x, t.elem)) find(t.l)
-      else if (less(t.elem, x)) find(t.r)
-      else t.elem
-    }
-    find(tree)
-  }
-
-  def addEntry(x: T) {
-    def add(t: Tree): Tree = {
-      if (t eq null) new Tree(x)
-      else if (less(x, t.elem)) { t.l = add(t.l); t }
-      else if (less(t.elem, x)) { t.r = add(t.r); t }
-      else t
-    }
-    tree = add(tree)
-  }
-
-  def iterator = toList.iterator
-
-  override def foreach[U](f: T => U) {
-    def loop(t: Tree) {
-      if (t ne null) {
-        loop(t.l)
-        f(t.elem)
-        loop(t.r)
-      }
-    }
-    loop(tree)
-  }
-  override def toList = {
-    val xs = scala.collection.mutable.ListBuffer[T]()
-    foreach(xs += _)
-    xs.toList
-  }
-
-  override def toString(): String = {
-    if (tree eq null) "<empty>" else "(..." + tree.elem + "...)"
-  }
-}
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b1f4696..4f7a9ff 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -7,9 +7,9 @@ class WorkScheduler {
 
   type Action = () => Unit
 
-  private var todo = new mutable.Queue[Action]
-  private var throwables = new mutable.Queue[Throwable]
-  private var interruptReqs = new mutable.Queue[InterruptReq]
+  private val todo = new mutable.Queue[Action]
+  private val throwables = new mutable.Queue[Throwable]
+  private val interruptReqs = new mutable.Queue[InterruptReq]
 
   /** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
   def waitForMoreWork() = synchronized {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index d34d4ee..bd95fdb 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -3,14 +3,13 @@
  * @author Paul Phillips
  */
 
-package scala.tools.nsc
+package scala
+package tools
+package nsc
 
-import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter, Reader }
 
 package object util {
-
-  implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
   // forwarder for old code that builds against 2.9 and 2.10
   val Chars = scala.reflect.internal.Chars
 
@@ -18,16 +17,9 @@ package object util {
   type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T]
   val HashSet = scala.reflect.internal.util.HashSet
 
-  def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value
-
   /** Apply a function and return the passed value */
   def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
 
-  /** Frequency counter */
-  def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size)
-
-  def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1)
-
   /** Execute code and then wait for all non-daemon Threads
    *  created and begun during its execution to complete.
    */
@@ -54,16 +46,15 @@ package object util {
     (result, ts2 filterNot (ts1 contains _))
   }
 
-  /** Given a function and a block of code, evaluates code block,
-   *  calls function with milliseconds elapsed, and returns block result.
-   */
-  def millisElapsedTo[T](f: Long => Unit)(body: => T): T = {
-    val start = System.currentTimeMillis
-    val result = body
-    val end = System.currentTimeMillis
-
-    f(end - start)
-    result
+  def stringFromReader(reader: Reader) = {
+    val writer = new StringWriter()
+    var c = reader.read()
+    while(c != -1) {
+      writer.write(c)
+      c = reader.read()
+    }
+    reader.close()
+    writer.toString()
   }
 
   /** Generate a string using a routine that wants to write on a stream. */
@@ -83,60 +74,58 @@ package object util {
   }
   def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _)
 
-  lazy val trace = new SimpleTracer(System.out)
-  lazy val errtrace = new SimpleTracer(System.err)
-
-  @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
-  val StringOps = scala.reflect.internal.util.StringOps
-
-  @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
-  type StringOps = scala.reflect.internal.util.StringOps
-
-  @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
-  val TableDef = scala.reflect.internal.util.TableDef
+  /** A one line string which contains the class of the exception, the
+   *  message if any, and the first non-Predef location in the stack trace
+   *  (to exclude assert, require, etc.)
+   */
+  def stackTraceHeadString(ex: Throwable): String = {
+    val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString ""
+    val msg   = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" }
+    val clazz = ex.getClass.getName.split('.').last
 
-  @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
-  type TableDef[T] = scala.reflect.internal.util.TableDef[T]
+    s"$clazz$msg @ $frame"
+  }
 
-  @deprecated("scala.reflect.internal.util.WeakHashSet", "2.10.0")
-  type WeakHashSet[T <: AnyRef] = scala.reflect.internal.util.WeakHashSet[T]
+  implicit class StackTraceOps(private val e: Throwable) extends AnyVal with StackTracing {
+    /** Format the stack trace, returning the prefix consisting of frames that satisfy
+     *  a given predicate.
+     *  The format is similar to the typical case described in the JavaDoc
+     *  for [[java.lang.Throwable#printStackTrace]].
+     *  If a stack trace is truncated, it will be followed by a line of the form
+     *  `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+     *  shared stack trace segments.
+     *  @param p the predicate to select the prefix
+     */
+    def stackTracePrefixString(p: StackTraceElement => Boolean): String = stackTracePrefixString(e)(p)
+  }
 
-  @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
-  val Position = scala.reflect.internal.util.Position
+  lazy val trace = new SimpleTracer(System.out)
 
+  // These four deprecated since 2.10.0 are still used in (at least)
+  // the sbt 0.12.4 compiler interface.
   @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
   type Position = scala.reflect.internal.util.Position
-
   @deprecated("Moved to scala.reflect.internal.util.NoPosition", "2.10.0")
   val NoPosition = scala.reflect.internal.util.NoPosition
-
   @deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
   val FakePos = scala.reflect.internal.util.FakePos
-
   @deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
   type FakePos = scala.reflect.internal.util.FakePos
 
-  @deprecated("Moved to scala.reflect.internal.util.OffsetPosition", "2.10.0")
-  type OffsetPosition = scala.reflect.internal.util.OffsetPosition
-
+  // These three were still used in scala-refactoring.
   @deprecated("Moved to scala.reflect.internal.util.RangePosition", "2.10.0")
   type RangePosition = scala.reflect.internal.util.RangePosition
-
   @deprecated("Moved to scala.reflect.internal.util.SourceFile", "2.10.0")
   type SourceFile = scala.reflect.internal.util.SourceFile
+  @deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
+  type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
 
-  @deprecated("Moved to scala.reflect.internal.util.NoSourceFile", "2.10.0")
-  val NoSourceFile = scala.reflect.internal.util.NoSourceFile
-
-  @deprecated("Moved to scala.reflect.internal.util.NoFile", "2.10.0")
-  val NoFile = scala.reflect.internal.util.NoFile
-
-  @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
-  val ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+  @deprecated("Moved to scala.reflect.internal.util.AbstractFileClassLoader", "2.11.0")
+  type AbstractFileClassLoader = scala.reflect.internal.util.AbstractFileClassLoader
 
-  @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
-  type ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+  @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+  val ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
 
-  @deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
-  type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
+  @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+  type ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
 }
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index d35ac43..8630ecf 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -2,7 +2,10 @@ package scala.tools
 package reflect
 
 import scala.reflect.reify.Taggers
-import scala.tools.nsc.typechecker.{Analyzer, Macros}
+import scala.tools.nsc.typechecker.{ Analyzer, Macros }
+import scala.reflect.runtime.Macros.currentMirror
+import scala.reflect.api.Universe
+import scala.tools.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
 
 /** Optimizes system macro expansions by hardwiring them directly to their implementations
  *  bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -12,30 +15,43 @@ trait FastTrack {
 
   import global._
   import definitions._
-
   import scala.language.implicitConversions
-  private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
-  private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+  import treeInfo.Applied
+
+  private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
+    new { val c: c0.type = c0 } with Taggers
+  private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } =
+    new { val c: c0.type = c0 } with FormatInterpolator
+  private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
+    new { val c: c0.type = c0 } with QuasiquoteImpls
+  private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+    sym -> new FastTrackEntry(pf, isBlackbox = true)
+  private def makeWhitebox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+    sym -> new FastTrackEntry(pf, isBlackbox = false)
 
-  implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
-  type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
-  case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
-    def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
-    def run(c: MacroContext): Any = {
-      val result = expander((c, c.expandee))
-      c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+  final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree], val isBlackbox: Boolean) extends (MacroArgs => Any) {
+    def validate(tree: Tree) = pf isDefinedAt Applied(tree)
+    def apply(margs: MacroArgs): margs.c.Expr[Nothing] = {
+      val MacroArgs(c, _) = margs
+      // Macros validated that the pf is defined here - and there's not much we could do if it weren't.
+      c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
     }
   }
 
-  lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
-    var registry = Map[Symbol, FastTrackEntry]()
-    implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
-    materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
-    materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
-    materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
-    ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
-    ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
-    StringContext_f bindTo { case (c, app at Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
-    registry
+  /** A map from a set of pre-established macro symbols to their implementations. */
+  def fastTrack: Map[Symbol, FastTrackEntry] = fastTrackCache()
+  private val fastTrackCache = perRunCaches.newGeneric[Map[Symbol, FastTrackEntry]] {
+    val runDefinitions = currentRun.runDefinitions
+    import runDefinitions._
+    Map[Symbol, FastTrackEntry](
+      makeBlackbox(        materializeClassTag) { case Applied(_, ttag :: Nil, _)                 => _.materializeClassTag(ttag.tpe) },
+      makeBlackbox(     materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _)     => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
+      makeBlackbox(         materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _)     => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
+      makeBlackbox(           ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _)  => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
+      makeBlackbox(            StringContext_f) { case _                                          => _.interpolate },
+      makeBlackbox(ReflectRuntimeCurrentMirror) { case _                                          => c => currentMirror(c).tree },
+      makeWhitebox(  QuasiquoteClass_api_apply) { case _                                          => _.expandQuasiquote },
+      makeWhitebox(QuasiquoteClass_api_unapply) { case _                                          => _.expandQuasiquote }
+    )
   }
 }
diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
new file mode 100644
index 0000000..e0f9bb6
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
@@ -0,0 +1,379 @@
+package scala.tools.reflect
+
+import scala.reflect.macros.runtime.Context
+import scala.collection.mutable.{ ListBuffer, Stack }
+import scala.reflect.internal.util.Position
+import scala.PartialFunction.cond
+import scala.util.matching.Regex.Match
+
+import java.util.{ Formatter, Formattable, IllegalFormatException }
+
+abstract class FormatInterpolator {
+  val c: Context
+  val global: c.universe.type = c.universe
+
+  import c.universe.{ Match => _, _ }
+  import definitions._
+  import treeInfo.Applied
+
+  @inline private def truly(body: => Unit): Boolean = { body ; true }
+  @inline private def falsely(body: => Unit): Boolean = { body ; false }
+
+  private def fail(msg: String) = c.abort(c.enclosingPosition, msg)
+  private def bail(msg: String) = global.abort(msg)
+
+  def interpolate: Tree = c.macroApplication match {
+    //case q"$_(..$parts).f(..$args)" =>
+    case Applied(Select(Apply(_, parts), _), _, argss) =>
+      val args = argss.flatten
+      def badlyInvoked = (parts.length != args.length + 1) && truly {
+        def because(s: String) = s"too $s arguments for interpolated string"
+        val (p, msg) =
+          if (parts.length == 0) (c.prefix.tree.pos, "there are no parts")
+          else if (args.length + 1 < parts.length)
+            (if (args.isEmpty) c.enclosingPosition else args.last.pos, because("few"))
+          else (args(parts.length-1).pos, because("many"))
+        c.abort(p, msg)
+      }
+      if (badlyInvoked) c.macroApplication else interpolated(parts, args)
+    case other =>
+      bail(s"Unexpected application ${showRaw(other)}")
+      other
+  }
+
+  /** Every part except the first must begin with a conversion for
+   *  the arg that preceded it. If the conversion is missing, "%s"
+   *  is inserted.
+   *
+   *  In any other position, the only permissible conversions are
+   *  the literals (%% and %n) or an index reference (%1$ or %<).
+   *
+   *  A conversion specifier has the form:
+   *
+   *  [index$][flags][width][.precision]conversion
+   *
+   *  1) "...${smth}" => okay, equivalent to "...${smth}%s"
+   *  2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+   *  3) "...${smth}%" => error
+   *  4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+   *  5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+   *  6) "...${smth}[%legalJavaConversion]" => okay*
+   *  7) "...${smth}[%illegalJavaConversion]" => error
+   *  *Legal according to [[http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html]]
+   */
+  def interpolated(parts: List[Tree], args: List[Tree]) = {
+    val fstring  = new StringBuilder
+    val evals    = ListBuffer[ValDef]()
+    val ids      = ListBuffer[Ident]()
+    val argStack = Stack(args: _*)
+
+    // create a tmp val and add it to the ids passed to format
+    def defval(value: Tree, tpe: Type): Unit = {
+      val freshName = TermName(c.freshName("arg$"))
+      evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
+      ids += Ident(freshName)
+    }
+    // Append the nth part to the string builder, possibly prepending an omitted %s first.
+    // Sanity-check the % fields in this part.
+    def copyPart(part: Tree, n: Int): Unit = {
+      import SpecifierGroups.{ Spec, Index }
+      val s0 = part match {
+        case Literal(Constant(x: String)) => x
+        case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+      }
+      def escapeHatch: PartialFunction[Throwable, String] = {
+        // trailing backslash, octal escape, or other
+        case e: StringContext.InvalidEscapeException =>
+          def errPoint = part.pos withPoint (part.pos.point + e.index)
+          def octalOf(c: Char) = Character.digit(c, 8)
+          def alt = {
+            def altOf(i: Int) = i match {
+              case '\b' => "\\b"
+              case '\t' => "\\t"
+              case '\n' => "\\n"
+              case '\f' => "\\f"
+              case '\r' => "\\r"
+              case '\"' => "${'\"'}" /* avoid lint warn */ +
+                " or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\""  // $" in future
+              case '\'' => "'"
+              case '\\' => """\\"""
+              case x    => "\\u%04x" format x
+            }
+            val suggest = {
+              val r = "([0-7]{1,3}).*".r
+              (s0 drop e.index + 1) match {
+                case r(n) => altOf { (0 /: n) { case (a, o) => (8 * a) + (o - '0') } }
+                case _    => ""
+              }
+            }
+            val txt =
+              if ("" == suggest) ""
+              else s", use $suggest instead"
+            txt
+          }
+          def badOctal = {
+            def msg(what: String) = s"Octal escape literals are $what$alt."
+            if (settings.future) {
+              c.error(errPoint, msg("unsupported"))
+              s0
+            } else {
+              c.enclosingUnit.deprecationWarning(errPoint, msg("deprecated"))
+              try StringContext.treatEscapes(s0) catch escapeHatch
+            }
+          }
+          if (e.index == s0.length - 1) {
+            c.error(errPoint, """Trailing '\' escapes nothing.""")
+            s0
+          } else if (octalOf(s0(e.index + 1)) >= 0) {
+            badOctal
+          } else {
+            c.error(errPoint, e.getMessage)
+            s0
+          }
+      }
+      val s  = try StringContext.processEscapes(s0) catch escapeHatch
+      val ms = fpat findAllMatchIn s
+
+      def errorLeading(op: Conversion) = op.errorAt(Spec, s"conversions must follow a splice; ${Conversion.literalHelp}")
+
+      def first = n == 0
+      // a conversion for the arg is required
+      if (!first) {
+        val arg = argStack.pop()
+        def s_%() = {
+          fstring append "%s"
+          defval(arg, AnyTpe)
+        }
+        def accept(op: Conversion) = {
+          if (!op.isLeading) errorLeading(op)
+          op.accepts(arg) match {
+            case Some(tpe) => defval(arg, tpe)
+            case None      =>
+          }
+        }
+        if (ms.hasNext) {
+          Conversion(ms.next, part.pos, args.size) match {
+            case Some(op) if op.isLiteral => s_%()
+            case Some(op) if op.indexed =>
+              if (op.index map (_ == n) getOrElse true) accept(op)
+              else {
+                // either some other arg num, or '<'
+                c.warning(op.groupPos(Index), "Index is not this arg")
+                s_%()
+              }
+            case Some(op) => accept(op)
+            case None     =>
+          }
+        } else s_%()
+      }
+      // any remaining conversions must be either literals or indexed
+      while (ms.hasNext) {
+        Conversion(ms.next, part.pos, args.size) match {
+          case Some(op) if first && op.hasFlag('<')   => op.badFlag('<', "No last arg")
+          case Some(op) if op.isLiteral || op.indexed => // OK
+          case Some(op) => errorLeading(op)
+          case None     =>
+        }
+      }
+      fstring append s
+    }
+
+    parts.zipWithIndex foreach {
+      case (part, n) => copyPart(part, n)
+    }
+
+    //q"{..$evals; ${fstring.toString}.format(..$ids)}"
+    locally {
+      val expr =
+        Apply(
+          Select(
+            Literal(Constant(fstring.toString)),
+            newTermName("format")),
+          ids.toList
+        )
+      val p = c.macroApplication.pos
+      Block(evals.toList, atPos(p.focus)(expr)) setPos p.makeTransparent
+    }
+  }
+
+  val fpat = """%(?:(\d+)\$)?([-#+ 0,(\<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r
+  object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value }
+
+  val stdContextTags = new { val tc: c.type = c } with StdContextTags
+  import stdContextTags._
+  val tagOfFormattable = typeTag[Formattable]
+
+  /** A conversion specifier matched by `m` in the string part at `pos`,
+   *  with `argc` arguments to interpolate.
+   */
+  sealed trait Conversion {
+    def m: Match
+    def pos: Position
+    def argc: Int
+
+    import SpecifierGroups.{ Value => SpecGroup, _ }
+    private def maybeStr(g: SpecGroup) = Option(m group g.id)
+    private def maybeInt(g: SpecGroup) = maybeStr(g) map (_.toInt)
+    val index: Option[Int]     = maybeInt(Index)
+    val flags: Option[String]  = maybeStr(Flags)
+    val width: Option[Int]     = maybeInt(Width)
+    val precision: Option[Int] = maybeStr(Precision) map (_.drop(1).toInt)
+    val op: String             = maybeStr(CC) getOrElse ""
+
+    def cc: Char = if ("tT" contains op(0)) op(1) else op(0)
+
+    def indexed:   Boolean = index.nonEmpty || hasFlag('<')
+    def isLiteral: Boolean = false
+    def isLeading: Boolean = m.start(0) == 0
+    def verify:    Boolean = goodFlags && goodIndex
+    def accepts(arg: Tree): Option[Type]
+
+    val allFlags = "-#+ 0,(<"
+    def hasFlag(f: Char) = (flags getOrElse "") contains f
+    def hasAnyFlag(fs: String) = fs exists (hasFlag)
+
+    def badFlag(f: Char, msg: String) = {
+      val i = flags map (_.indexOf(f)) filter (_ >= 0) getOrElse 0
+      errorAtOffset(Flags, i, msg)
+    }
+    def groupPos(g: SpecGroup) = groupPosAt(g, 0)
+    def groupPosAt(g: SpecGroup, i: Int) = pos withPoint (pos.point + m.start(g.id) + i)
+    def errorAt(g: SpecGroup, msg: String) = c.error(groupPos(g), msg)
+    def errorAtOffset(g: SpecGroup, i: Int, msg: String) = c.error(groupPosAt(g, i), msg)
+
+    def noFlags = flags.isEmpty || falsely { errorAt(Flags, "flags not allowed") }
+    def noWidth = width.isEmpty || falsely { errorAt(Width, "width not allowed") }
+    def noPrecision = precision.isEmpty || falsely { errorAt(Precision, "precision not allowed") }
+    def only_-(msg: String) = {
+      val badFlags = (flags getOrElse "") filterNot { case '-' | '<' => true case _ => false }
+      badFlags.isEmpty || falsely { badFlag(badFlags(0), s"Only '-' allowed for $msg") }
+    }
+    protected def okFlags: String = allFlags
+    def goodFlags = {
+      val badFlags = flags map (_ filterNot (okFlags contains _))
+      for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'")
+      badFlags.getOrElse("").isEmpty 
+    }
+    def goodIndex = {
+      if (index.nonEmpty && hasFlag('<'))
+        c.warning(groupPos(Index), "Argument index ignored if '<' flag is present")
+      val okRange = index map (i => i > 0 && i <= argc) getOrElse true
+      okRange || hasFlag('<') || falsely { errorAt(Index, "Argument index out of range") }
+    }
+    /** Pick the type of an arg to format from among the variants
+     *  supported by a conversion.  This is the type of the temporary,
+     *  so failure results in an erroneous assignment to the first variant.
+     *  A more complete message would be nice.
+     */
+    def pickAcceptable(arg: Tree, variants: Type*): Option[Type] =
+      variants find (arg.tpe <:< _) orElse (
+        variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree)
+      ) orElse Some(variants(0))
+  }
+  object Conversion {
+    import SpecifierGroups.{ Spec, CC, Width }
+    def apply(m: Match, p: Position, n: Int): Option[Conversion] = {
+      def badCC(msg: String) = {
+        val dk = new ErrorXn(m, p)
+        val at = if (dk.op.isEmpty) Spec else CC
+        dk.errorAt(at, msg)
+      }
+      def cv(cc: Char) = cc match {
+        case 'b' | 'B' | 'h' | 'H' | 's' | 'S' =>
+          new GeneralXn(m, p, n)
+        case 'c' | 'C' =>
+          new CharacterXn(m, p, n)
+        case 'd' | 'o' | 'x' | 'X' =>
+          new IntegralXn(m, p, n)
+        case 'e' | 'E' | 'f' | 'g' | 'G' | 'a' | 'A' =>
+          new FloatingPointXn(m, p, n)
+        case 't' | 'T' =>
+          new DateTimeXn(m, p, n)
+        case '%' | 'n' =>
+          new LiteralXn(m, p, n)
+        case _ =>
+          badCC(s"illegal conversion character '$cc'")
+          null
+      }
+      Option(m group CC.id) map (cc => cv(cc(0))) match {
+        case Some(x) => Option(x) filter (_.verify)
+        case None    =>
+          badCC(s"Missing conversion operator in '${m.matched}'; $literalHelp")
+          None
+      }
+    }
+    val literalHelp = "use %% for literal %, %n for newline"
+  }
+  class GeneralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    def accepts(arg: Tree) = cc match {
+      case 's' | 'S' if hasFlag('#') => pickAcceptable(arg, tagOfFormattable.tpe)
+      case 'b' | 'B' => if (arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
+      case _         => Some(AnyTpe)
+    }
+    override protected def okFlags = cc match {
+      case 's' | 'S' => "-#<"
+      case _         => "-<"
+    }
+  }
+  class LiteralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    import SpecifierGroups.Width
+    override val isLiteral = true
+    override def verify = op match {
+      case "%" => super.verify && noPrecision && truly(width foreach (_ => c.warning(groupPos(Width), "width ignored on literal")))
+      case "n" => noFlags && noWidth && noPrecision
+    }
+    override protected val okFlags = "-"
+    def accepts(arg: Tree) = None
+  }
+  class CharacterXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    override def verify = super.verify && noPrecision && only_-("c conversion")
+    def accepts(arg: Tree) = pickAcceptable(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
+  }
+  class IntegralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    override def verify = {
+      def d_# = (cc == 'd' && hasFlag('#') &&
+        truly { badFlag('#', "# not allowed for d conversion") }
+      )
+      def x_comma = (cc != 'd' && hasFlag(',') &&
+        truly { badFlag(',', "',' only allowed for d conversion of integral types") }
+      )
+      super.verify && noPrecision && !d_# && !x_comma
+    }
+    override def accepts(arg: Tree) = {
+      def isBigInt = arg.tpe <:< tagOfBigInt.tpe
+      val maybeOK = "+ ("
+      def bad_+ = cond(cc) {
+        case 'o' | 'x' | 'X' if hasAnyFlag(maybeOK) && !isBigInt =>
+          maybeOK filter hasFlag foreach (badf =>
+            badFlag(badf, s"only use '$badf' for BigInt conversions to o, x, X"))
+          true
+      }
+      if (bad_+) None else pickAcceptable(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
+    }
+  }
+  class FloatingPointXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    override def verify = super.verify && (cc match {
+      case 'a' | 'A' =>
+        val badFlags = ",(" filter hasFlag
+        noPrecision && badFlags.isEmpty || falsely {
+          badFlags foreach (badf => badFlag(badf, s"'$badf' not allowed for a, A"))
+        }
+      case _ => true
+    })
+    def accepts(arg: Tree) = pickAcceptable(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
+  }
+  class DateTimeXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+    import SpecifierGroups.CC
+    def hasCC = (op.length == 2 ||
+      falsely { errorAt(CC, "Date/time conversion must have two characters") })
+    def goodCC = ("HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc" contains cc) ||
+      falsely { errorAtOffset(CC, 1, s"'$cc' doesn't seem to be a date or time conversion") }
+    override def verify = super.verify && hasCC && goodCC && noPrecision && only_-("date/time conversions")
+    def accepts(arg: Tree) = pickAcceptable(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
+  }
+  class ErrorXn(val m: Match, val pos: Position) extends Conversion {
+    val argc = 0
+    override def verify = false
+    def accepts(arg: Tree) = None
+  }
+}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
index f0d3d59..e3341a4 100644
--- a/src/compiler/scala/tools/reflect/FrontEnd.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -21,7 +21,7 @@ trait FrontEnd {
   def hasErrors   = ERROR.count > 0
   def hasWarnings = WARNING.count > 0
 
-  case class Info(val pos: Position, val msg: String, val severity: Severity)
+  case class Info(pos: Position, msg: String, severity: Severity)
   val infos = new scala.collection.mutable.LinkedHashSet[Info]
 
   /** Handles incoming info */
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
deleted file mode 100644
index f4f385f..0000000
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ /dev/null
@@ -1,171 +0,0 @@
-package scala.tools.reflect
-
-import scala.reflect.macros.{ReificationException, UnexpectedReificationException}
-import scala.reflect.macros.runtime.Context
-import scala.collection.mutable.ListBuffer
-import scala.collection.mutable.Stack
-import scala.reflect.internal.util.OffsetPosition
-
-abstract class MacroImplementations {
-  val c: Context
-
-  import c.universe._
-  import definitions._
-
-  def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = {
-    // the parts all have the same position information (as the expression is generated by the compiler)
-    // the args have correct position information
-
-    // the following conditions can only be violated if invoked directly
-    if (parts.length != args.length + 1) {
-      if(parts.length == 0)
-        c.abort(c.prefix.tree.pos, "too few parts")
-      else if(args.length + 1 < parts.length)
-        c.abort(if(args.length==0) c.enclosingPosition else args.last.pos,
-            "too few arguments for interpolated string")
-      else
-        c.abort(args(parts.length-1).pos,
-            "too many arguments for interpolated string")
-    }
-
-    val pi = parts.iterator
-    val bldr = new java.lang.StringBuilder
-    val evals = ListBuffer[ValDef]()
-    val ids = ListBuffer[Ident]()
-    val argStack = Stack(args : _*)
-
-    def defval(value: Tree, tpe: Type): Unit = {
-      val freshName = newTermName(c.fresh("arg$"))
-      evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
-      ids += Ident(freshName)
-    }
-
-    def isFlag(ch: Char): Boolean = {
-      ch match {
-        case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
-        case _ => false
-      }
-    }
-
-    def checkType(arg: Tree, variants: Type*): Option[Type] = {
-      variants.find(arg.tpe <:< _).orElse(
-        variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse(
-            Some(variants(0))
-        )
-      )
-    }
-
-    val stdContextTags = new { val tc: c.type = c } with StdContextTags
-    import stdContextTags._
-
-    def conversionType(ch: Char, arg: Tree): Option[Type] = {
-      ch match {
-        case 'b' | 'B' =>
-          if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
-        case 'h' | 'H' =>
-          Some(AnyTpe)
-        case 's' | 'S' =>
-          Some(AnyTpe)
-        case 'c' | 'C' =>
-          checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
-        case 'd' | 'o' | 'x' | 'X' =>
-          checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
-        case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A'  =>
-          checkType(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
-        case 't' | 'T' =>
-          checkType(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
-        case _ => None
-      }
-    }
-
-    def copyString(first: Boolean): Unit = {
-      val strTree = pi.next()
-      val rawStr = strTree match {
-        case Literal(Constant(str: String)) => str
-        case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
-      }
-      val str = StringContext.treatEscapes(rawStr)
-      val strLen = str.length
-      val strIsEmpty = strLen == 0
-      def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
-      def isPercent(idx: Int) = charAtIndexIs(idx, '%')
-      def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
-      var idx = 0
-
-      def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
-      def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
-      def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
-      def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
-
-      // STEP 1: handle argument conversion
-      // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
-      // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
-      // 3) "...${smth}%" => error
-      // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
-      // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
-      // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
-      // 7) "...${smth}[%illegalJavaConversion]" => error
-      if (!first) {
-        val arg = argStack.pop
-        if (isConversion(0)) {
-          // PRE str is not empty and str(0) == '%'
-          // argument index parameter is not allowed, thus parse
-          //    [flags][width][.precision]conversion
-          var pos = 1
-          while (pos < strLen && isFlag(str charAt pos)) pos += 1
-          while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
-          if (pos < strLen && str.charAt(pos) == '.') {
-            pos += 1
-            while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
-          }
-          if (pos < strLen) {
-            conversionType(str charAt pos, arg) match {
-              case Some(tpe) => defval(arg, tpe)
-              case None => illegalConversionCharacter(pos)
-            }
-          } else {
-            wrongConversionString(pos - 1)
-          }
-          idx = 1
-        } else {
-          bldr append "%s"
-          defval(arg, AnyTpe)
-        }
-      }
-
-      // STEP 2: handle the rest of the text
-      // 1) %n tokens are left as is
-      // 2) %% tokens are left as is
-      // 3) other usages of percents are reported as errors
-      if (!strIsEmpty) {
-        while (idx < strLen) {
-          if (isPercent(idx)) {
-            if (isConversion(idx)) nonEscapedPercent(idx)
-            else idx += 1 // skip n and % in %n and %%
-          }
-          idx += 1
-        }
-        bldr append (str take idx)
-      }
-    }
-
-    copyString(first = true)
-    while (pi.hasNext) {
-      copyString(first = false)
-    }
-
-    val fstring = bldr.toString
-//  val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*))
-//  https://issues.scala-lang.org/browse/SI-5824, therefore
-    val expr =
-      Apply(
-        Select(
-          Literal(Constant(fstring)),
-          newTermName("format")),
-        List(ids: _* )
-      );
-
-    Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
-  }
-
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index f8ded56..ac63232 100644
--- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -12,9 +12,10 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val
   extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
 
   override def transformedType(sym: Symbol) =
-    erasure.transformInfo(sym,
-      uncurry.transformInfo(sym,
-        refChecks.transformInfo(sym, sym.info)))
+    postErasure.transformInfo(sym,
+      erasure.transformInfo(sym,
+        uncurry.transformInfo(sym,
+          refChecks.transformInfo(sym, sym.info))))
 
   override def isCompilerUniverse = true
 
@@ -36,5 +37,13 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val
   // (each mirror has its own set package symbols, because of the peculiarities of symbol loading in scala),
   // that `Predef` symbol only has a single owner, and this messes up visibility, which is calculated based on owners, not scopes.
   override def runtimeMirror(cl: ClassLoader): Mirror = rootMirror
+
+  // Mirror and RuntimeClass come from both Global and reflect.runtime.SymbolTable
+  // so here the compiler needs an extra push to help decide between those (in favor of the latter)
+  import scala.reflect.ClassTag
+  override type Mirror = JavaMirror
+  override implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[Mirror])
+  override type RuntimeClass = java.lang.Class[_]
+  override implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass])
 }
 
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 116ae24..3ae21b6 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -4,7 +4,6 @@ package reflect
 import scala.tools.nsc.Driver
 import scala.tools.nsc.Global
 import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
 import scala.tools.nsc.util.ScalaClassLoader
 import scala.tools.util.PathResolver
 
@@ -16,4 +15,4 @@ object ReflectMain extends Driver {
   }
 
   override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index a3bc9b9..ee352c5 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -1,7 +1,6 @@
 package scala.tools
 package reflect
 
-import java.lang.{Class => jClass}
 import scala.reflect.{ClassTag, classTag}
 import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
 
@@ -19,12 +18,11 @@ trait StdTags {
       new TypeCreator {
         def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type = {
           val u = m.universe
-          val pre = u.ThisType(m.staticPackage("scala.collection.immutable").moduleClass.asInstanceOf[u.Symbol])
-          u.TypeRef(pre, u.definitions.ListClass, List(u.definitions.StringClass.toTypeConstructor))
+          u.appliedType(u.definitions.ListClass.toType, List(u.definitions.StringClass.toType))
         }
       })
 
-  private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+  protected def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
     u.TypeTag[T](
       m,
       new TypeCreator {
@@ -35,8 +33,6 @@ trait StdTags {
   lazy val tagOfString = tagOfStaticClass[String]
   lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
   lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
-  lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
-  lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
   lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
   lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
   lazy val tagOfBigInt = tagOfStaticClass[BigInt]
@@ -52,7 +48,7 @@ object StdRuntimeTags extends StdTags {
 }
 
 abstract class StdContextTags extends StdTags {
-  val tc: scala.reflect.macros.Context
+  val tc: scala.reflect.macros.contexts.Context
   val u: tc.universe.type = tc.universe
   val m = tc.mirror
 }
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index ab814b6..dfe53be 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -21,13 +21,39 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
    */
   def frontEnd: FrontEnd
 
-  /** Typechecks a tree using this ToolBox.
+  /** Represents mode of operations of the typechecker underlying `c.typecheck` calls.
+   *  Is necessary since the shape of the typechecked tree alone is not enough to guess how it should be typechecked.
+   *  Can be EXPRmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern).
+   */
+  type TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a term.
+   *  This is the default typechecking mode in Scala 2.11 and the only one supported in Scala 2.10.
+   */
+  val TERMmode: TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a type.
+   */
+  val TYPEmode: TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a pattern.
+   */
+  val PATTERNmode: TypecheckMode
+
+  /** @see `Typers.typecheck`
+   */
+  @deprecated("Use `tb.typecheck` instead", "2.11.0")
+  def typeCheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree =
+    typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled)
+
+  /** Typechecks a tree against the expected type `pt`
+   *  under typechecking mode specified in `mode` with [[EXPRmode]] being default.
    *  This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings.
    *
    *  If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
    *  then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
    *
-   *  If `silent` is false, `TypeError` will be thrown in case of a typecheck error.
+   *  If `silent` is false, `ToolBoxError` will be thrown in case of a typecheck error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
    *  Such errors don't vanish and can be inspected by turning on -Ydebug.
    *
@@ -35,7 +61,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
    *    `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
    *    `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
    */
-  def typeCheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree
+  def typecheck(tree: u.Tree, mode: TypecheckMode = TERMmode, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree
 
   /** Infers an implicit value of the expected type `pt` in top-level context.
    *  Optional `pos` parameter provides a position that will be associated with the implicit search.
@@ -44,10 +70,10 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
    *  this API won't take into account the lexical context of the callsite, because
    *  currently it's impossible to reify it.
    *
-   *  If `silent` is false, `TypeError` will be thrown in case of an inference error.
+   *  If `silent` is false, `ToolBoxError` will be thrown in case of an inference error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
    *  Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
-   *  Unlike in `typeCheck`, `silent` is true by default.
+   *  Unlike in `typecheck`, `silent` is true by default.
    */
   def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
 
@@ -58,27 +84,24 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
    *  this API won't take into account the lexical context of the callsite, because
    *  currently it's impossible to reify it.
    *
-   *  If `silent` is false, `TypeError` will be thrown in case of an inference error.
+   *  If `silent` is false, `ToolBoxError` will be thrown in case of an inference error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
    *  Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
-   *  Unlike in `typeCheck`, `silent` is true by default.
+   *  Unlike in `typecheck`, `silent` is true by default.
    */
   def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
 
-  /** Recursively resets symbols and types in a given tree.
-   *
-   *  Note that this does not revert the tree to its pre-typer shape.
-   *  For more info, read up https://issues.scala-lang.org/browse/SI-5464.
-   */
-  def resetAllAttrs(tree: u.Tree): u.Tree
-
   /** Recursively resets locally defined symbols and types in a given tree.
-   *
-   *  Note that this does not revert the tree to its pre-typer shape.
-   *  For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+   *  WARNING: Don't use this API, go for [[untypecheck]] instead.
    */
+  @deprecated("Use `tb.untypecheck` instead", "2.11.0")
   def resetLocalAttrs(tree: u.Tree): u.Tree
 
+  /**
+   *  @see [[scala.reflect.macros.Typers.untypecheck]]
+   */
+  def untypecheck(tree: u.Tree): u.Tree
+
   /** .. */
   def parse(code: String): u.Tree
 
@@ -93,6 +116,15 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
    */
   def compile(tree: u.Tree): () => Any
 
+  /** Defines a top-level class, trait or module in this ToolBox,
+   *  putting it into a uniquely-named package and returning a symbol that references the defined entity.
+   *  For a ClassDef, a ClassSymbol is returned, and for a ModuleDef, a ModuleSymbol is returned (not a module class, but a module itself).
+   *
+   *  This method can be used to generate definitions that will later be re-used by subsequent calls to
+   *  `compile`, `define` or `eval`. To refer to the generated definition in a tree, use q"$sym".
+   */
+  def define(tree: u.ImplDef): u.Symbol
+
   /** Compiles and runs a tree using this ToolBox.
    *  Is equivalent to `compile(tree)()`.
    */
@@ -101,4 +133,4 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
 
 /** Represents an error during toolboxing
  */
-case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause)
+case class ToolBoxError(message: String, cause: Throwable = null) extends Throwable(message, cause)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 8803980..3b12086 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -1,14 +1,13 @@
-package scala.tools
+package scala
+package tools
 package reflect
 
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.Global
 import scala.tools.nsc.reporters._
 import scala.tools.nsc.CompilerCommand
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
-import scala.tools.nsc.io.VirtualDirectory
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import scala.tools.nsc.util.FreshNameCreator
-import scala.tools.nsc.ast.parser.Tokens.EOF
+import scala.tools.nsc.io.{AbstractFile, VirtualDirectory}
+import scala.tools.nsc.util.AbstractFileClassLoader
 import scala.reflect.internal.Flags._
 import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
 import java.lang.{Class => jClass}
@@ -16,7 +15,7 @@ import scala.compat.Platform.EOL
 import scala.reflect.NameTransformer
 import scala.reflect.api.JavaUniverse
 import scala.reflect.io.NoAbstractFile
-import scala.tools.nsc.interactive.RangePositions
+import scala.reflect.internal.FatalError
 
 abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
 
@@ -32,8 +31,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
     lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
     lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
 
-    class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
-    extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
+    lazy val arguments = CommandLineParser.tokenize(options)
+    lazy val virtualDirectory =
+      arguments.iterator.sliding(2).collectFirst{ case Seq("-d", dir) => dir } match {
+        case Some(outDir) => AbstractFile.getDirectory(outDir)
+        case None => new VirtualDirectory("(memory)", None)
+      }
+
+    class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter0: Reporter)
+    extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) {
       import definitions._
 
       private val trace = scala.tools.nsc.util.trace when settings.debug.value
@@ -50,7 +56,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
       }
 
       // should be called after every use of ToolBoxGlobal in order to prevent leaks
-      // there's the `withCleanupCaches` method defined below, which provides a convenient interface for that
       def cleanupCaches(): Unit = {
         perRunCaches.clearAll()
         undoLog.clear()
@@ -59,11 +64,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
         lastSeenContext = null
       }
 
-      def withCleanupCaches[T](body: => T): T =
-        try body
-        finally cleanupCaches()
-
-      def verify(expr: Tree): Unit = {
+      def verify(expr: Tree): Tree = {
         // Previously toolboxes used to typecheck their inputs before compiling.
         // Actually, the initial demo by Martin first typechecked the reified tree,
         // then ran it, which typechecked it again, and only then launched the
@@ -76,22 +77,17 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
         val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
         if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed")
 
-        val freeTypes = expr.freeTypes
-        if (freeTypes.length > 0) {
-          var msg = "reflective toolbox has failed:" + EOL
-          msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). "
-          msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? "
-          msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types"
-          throw ToolBoxError(msg)
+        if (expr.freeTypes.nonEmpty) {
+          val ft_s = expr.freeTypes map (ft => s"  ${ft.name} ${ft.origin}") mkString "\n  "
+          throw ToolBoxError(s"""
+            |reflective toolbox failed due to unresolved free type variables:
+            |$ft_s
+            |have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+            |if you have troubles tracking free type variables, consider using -Xlog-free-types
+            """.stripMargin.trim)
         }
-      }
-
-      def wrapIntoTerm(tree: Tree): Tree =
-        if (!tree.isTerm) Block(List(tree), Literal(Constant(()))) else tree
 
-      def unwrapFromTerm(tree: Tree): Tree = tree match {
-        case Block(List(tree), Literal(Constant(()))) => tree
-        case tree => tree
+        expr
       }
 
       def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
@@ -103,9 +99,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
           if (namesakes.length > 0) name += ("$" + (namesakes.length + 1))
           freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX))
         })
-        var expr = new Transformer {
+        val expr = new Transformer {
           override def transform(tree: Tree): Tree =
-            if (tree.hasSymbol && tree.symbol.isFreeTerm) {
+            if (tree.hasSymbolField && tree.symbol.isFreeTerm) {
               tree match {
                 case Ident(_) =>
                   val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm))
@@ -120,58 +116,59 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
         (expr, freeTermNames)
       }
 
-      def transformDuringTyper(expr0: Tree, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
-        verify(expr0)
-
-        // need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
-        var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
-        val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
-        expr = Block(dummies, wrapIntoTerm(expr))
-
-        // [Eugene] how can we implement that?
-        // !!! Why is this is in the empty package? If it's only to make
-        // it inaccessible then please put it somewhere designed for that
-        // rather than polluting the empty package with synthetics.
-        val ownerClass    = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
-        build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
-        val owner         = ownerClass.newLocalDummy(expr.pos)
-        var currentTyper  = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
-        val wrapper1      = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
-        val wrapper2      = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
-        def wrapper       (tree: => Tree) = wrapper1(wrapper2(tree))
-
-        val run = new Run
-        run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
-        phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
-        currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
-        reporter.reset()
-
-        val expr1 = wrapper(transform(currentTyper, expr))
-        var (dummies1, unwrapped) = expr1 match {
-          case Block(dummies, unwrapped) => (dummies, unwrapped)
-          case unwrapped => (Nil, unwrapped)
-        }
-        var invertedIndex = freeTerms map (_.swap)
-        // todo. also fixup singleton types
-        unwrapped = new Transformer {
-          override def transform(tree: Tree): Tree =
-            tree match {
-              case Ident(name) if invertedIndex contains name =>
-                Ident(invertedIndex(name)) setType tree.tpe
-              case _ =>
-                super.transform(tree)
-            }
-        }.transform(unwrapped)
-        new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
-        unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
-        unwrapped
+      def transformDuringTyper(expr: Tree, mode: scala.reflect.internal.Mode, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
+        def withWrapping(tree: Tree)(op: Tree => Tree) = if (mode == TERMmode) wrappingIntoTerm(tree)(op) else op(tree)
+        withWrapping(verify(expr))(expr1 => {
+          // need to extract free terms, because otherwise you won't be able to typecheck macros against something that contains them
+          val exprAndFreeTerms = extractFreeTerms(expr1, wrapFreeTermRefs = false)
+          var expr2 = exprAndFreeTerms._1
+          val freeTerms = exprAndFreeTerms._2
+          val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
+          expr2 = Block(dummies, expr2)
+
+          // !!! Why is this is in the empty package? If it's only to make
+          // it inaccessible then please put it somewhere designed for that
+          // rather than polluting the empty package with synthetics.
+          // [Eugene] how can we implement that?
+          val ownerClass       = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
+          build.setInfo(ownerClass, ClassInfoType(List(ObjectTpe), newScope, ownerClass))
+          val owner            = ownerClass.newLocalDummy(expr2.pos)
+          val currentTyper     = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr2, owner))
+          val withImplicitFlag = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
+          val withMacroFlag    = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
+          def withContext      (tree: => Tree) = withImplicitFlag(withMacroFlag(tree))
+
+          val run = new Run
+          run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
+          phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
+          currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
+          reporter.reset()
+
+          val expr3 = withContext(transform(currentTyper, expr2))
+          var (dummies1, result) = expr3 match {
+            case Block(dummies, result) => ((dummies, result))
+            case result                 => ((Nil, result))
+          }
+          val invertedIndex = freeTerms map (_.swap)
+          result = new Transformer {
+            override def transform(tree: Tree): Tree =
+              tree match {
+                case Ident(name: TermName) if invertedIndex contains name =>
+                  Ident(invertedIndex(name)) setType tree.tpe
+                case _ =>
+                  super.transform(tree)
+              }
+          }.transform(result)
+          new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(result)
+          result
+        })
       }
 
-      def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
-        transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
+      def typecheck(expr: Tree, pt: Type, mode: scala.reflect.internal.Mode, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+        transformDuringTyper(expr, mode, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
           (currentTyper, expr) => {
-            trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
-            currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+            trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value))
+            currentTyper.silent(_.typed(expr, mode, pt), reportAmbiguousErrors = false) match {
               case analyzer.SilentResultValue(result) =>
                 trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
                 result
@@ -183,26 +180,39 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
           })
 
       def inferImplicit(tree: Tree, pt: Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: Position): Tree =
-        transformDuringTyper(tree, withImplicitViewsDisabled = false, withMacrosDisabled = withMacrosDisabled)(
+        transformDuringTyper(tree, TERMmode, withImplicitViewsDisabled = false, withMacrosDisabled = withMacrosDisabled)(
           (currentTyper, tree) => {
-            trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, true, true, settings.Yshowsymkinds.value))
+            trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value))
             analyzer.inferImplicit(tree, pt, isView, currentTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw ToolBoxError(msg))
           })
 
+      private def wrapInPackageAndCompile(packageName: TermName, tree: ImplDef): Symbol = {
+        val pdef = PackageDef(Ident(packageName), List(tree))
+        val unit = new CompilationUnit(NoSourceFile)
+        unit.body = pdef
+
+        val run = new Run
+        reporter.reset()
+        run.compileUnits(List(unit), run.namerPhase)
+        throwIfErrors()
+
+        tree.symbol
+      }
+
       def compile(expr0: Tree): () => Any = {
-        val expr = wrapIntoTerm(expr0)
+        val expr = build.SyntacticBlock(expr0 :: Nil)
 
         val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
         val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
         verify(expr)
 
-        def wrap(expr0: Tree): ModuleDef = {
+        def wrapInModule(expr0: Tree): ModuleDef = {
           val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
 
-          val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
-            nextWrapperModuleName())
+          val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
+            nextWrapperModuleName(), NoPosition, NoFlags)
 
-          val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
+          val minfo = ClassInfoType(List(ObjectTpe), newScope, obj.moduleClass)
           obj.moduleClass setInfo minfo
           obj setInfo obj.moduleClass.tpe
 
@@ -212,7 +222,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
             val (fv, name) = schema
             meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
           }
-          meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
+          meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyTpe)
           minfo.decls enter meth
           def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
             case Some(sym) if sym != null && sym != NoSymbol => sym.owner
@@ -223,33 +233,25 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
 
           val moduledef = ModuleDef(
               obj,
-              Template(
-                  List(TypeTree(ObjectClass.tpe)),
-                  emptyValDef,
+              gen.mkTemplate(
+                  List(TypeTree(ObjectTpe)),
+                  noSelfType,
                   NoMods,
                   List(),
-                  List(List()),
                   List(methdef),
                   NoPosition))
-          trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
+          trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value))
 
-          var cleanedUp = resetLocalAttrs(moduledef)
-          trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
+          val cleanedUp = resetAttrs(moduledef)
+          trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value))
           cleanedUp.asInstanceOf[ModuleDef]
         }
 
-        val mdef = wrap(expr)
-        val pdef = PackageDef(Ident(mdef.name), List(mdef))
-        val unit = new CompilationUnit(NoSourceFile)
-        unit.body = pdef
+        val mdef = wrapInModule(expr)
+        val msym = wrapInPackageAndCompile(mdef.name, mdef)
 
-        val run = new Run
-        reporter.reset()
-        run.compileUnits(List(unit), run.namerPhase)
-        throwIfErrors()
-
-        val className = mdef.symbol.fullName
-        if (settings.debug.value) println("generated: "+className)
+        val className = msym.fullName
+        if (settings.debug) println("generated: "+className)
         def moduleFileName(className: String) = className + "$"
         val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
         val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
@@ -275,90 +277,101 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
         }
       }
 
+      def define(tree: ImplDef): Symbol = {
+        val freeTerms = tree.freeTerms
+        if (freeTerms.nonEmpty) throw ToolBoxError(s"reflective toolbox has failed: cannot have free terms in a top-level definition")
+        verify(tree)
+        wrapInPackageAndCompile(nextWrapperModuleName(), tree)
+      }
+
       def parse(code: String): Tree = {
-        val run = new Run
         reporter.reset()
-        val file = new BatchSourceFile("<toolbox>", code)
-        val unit = new CompilationUnit(file)
-        phase = run.parserPhase
-        val parser = new syntaxAnalyzer.UnitParser(unit)
-        val parsed = parser.templateStats()
-        parser.accept(EOF)
+        val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStatsOrPackages())
         throwIfErrors()
-        parsed match {
-          case expr :: Nil => expr
-          case stats :+ expr => Block(stats, expr)
-        }
+        tree
       }
 
-      def showAttributed(artifact: Any, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = {
+      def showAttributed(artifact: Any, printTypes: Boolean = true, printIds: Boolean = true, printOwners: Boolean = false, printKinds: Boolean = false): String = {
         val saved1 = settings.printtypes.value
         val saved2 = settings.uniqid.value
-        val saved3 = settings.Yshowsymkinds.value
+        val saved3 = settings.Yshowsymowners.value
+        val saved4 = settings.Yshowsymkinds.value
         try {
           settings.printtypes.value = printTypes
           settings.uniqid.value = printIds
+          settings.Yshowsymowners.value = printOwners
           settings.Yshowsymkinds.value = printKinds
           artifact.toString
         } finally {
           settings.printtypes.value = saved1
           settings.uniqid.value = saved2
-          settings.Yshowsymkinds.value = saved3
+          settings.Yshowsymowners.value = saved3
+          settings.Yshowsymkinds.value = saved4
         }
       }
 
       // reporter doesn't accumulate errors, but the front-end does
       def throwIfErrors() = {
-        if (frontEnd.hasErrors) {
-          var msg = "reflective compilation has failed: " + EOL + EOL
-          msg += frontEnd.infos map (_.msg) mkString EOL
-          throw ToolBoxError(msg)
-        }
+        if (frontEnd.hasErrors) throw ToolBoxError(
+          "reflective compilation has failed:" + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
+        )
       }
     }
 
-    // todo. is not going to work with quoted arguments with embedded whitespaces
-    lazy val arguments = options.split(" ")
+    trait CompilerApi {
+      val compiler: ToolBoxGlobal
+      val importer: compiler.Importer { val from: u.type }
+      val exporter: u.Importer { val from: compiler.type }
+    }
 
-    lazy val virtualDirectory =
-      (arguments zip arguments.tail).collect{ case ("-d", dir) => dir }.lastOption match {
-        case Some(outDir) => scala.tools.nsc.io.AbstractFile.getDirectory(outDir)
-        case None => new VirtualDirectory("(memory)", None)
+    object withCompilerApi {
+      private object api extends CompilerApi {
+        lazy val compiler: ToolBoxGlobal = {
+          try {
+            val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
+            val command = new CompilerCommand(arguments.toList, errorFn)
+            command.settings.outputDirs setSingleOutput virtualDirectory
+            val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
+            if (frontEnd.hasErrors) {
+              throw ToolBoxError(
+                "reflective compilation has failed: cannot initialize the compiler:" + EOL + EOL +
+                (frontEnd.infos map (_.msg) mkString EOL)
+              )
+            }
+            instance
+          } catch {
+            case ex: Throwable =>
+              throw ToolBoxError(s"reflective compilation has failed: cannot initialize the compiler due to $ex", ex)
+          }
+        }
+
+        lazy val importer = compiler.mkImporter(u)
+        lazy val exporter = importer.reverse
       }
 
-    lazy val compiler: ToolBoxGlobal = {
-      try {
-        val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
-        val command = new CompilerCommand(arguments.toList, errorFn)
-        val settings = command.settings
-        settings.outputDirs setSingleOutput virtualDirectory
-        val reporter = frontEndToReporter(frontEnd, command.settings)
-        val instance =
-          if (settings.Yrangepos.value) new ToolBoxGlobal(settings, reporter) with RangePositions
-          else new ToolBoxGlobal(settings, reporter)
-        if (frontEnd.hasErrors) {
-          var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
-          msg += frontEnd.infos map (_.msg) mkString EOL
-          throw ToolBoxError(msg)
-        }
-        instance
-      } catch {
-        case ex: Throwable =>
-          var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString)
-          throw ToolBoxError(msg, ex)
+      private val toolBoxLock = new Object
+      def apply[T](f: CompilerApi => T): T = toolBoxLock.synchronized {
+        try f(api)
+        catch { case ex: FatalError => throw ToolBoxError(s"fatal compiler error", ex) }
+        finally api.compiler.cleanupCaches()
       }
     }
 
-    lazy val importer = compiler.mkImporter(u)
-    lazy val exporter = importer.reverse
+    type TypecheckMode = scala.reflect.internal.Mode
+    val TypecheckMode = scala.reflect.internal.Mode
+    val TERMmode = TypecheckMode.EXPRmode
+    val TYPEmode = TypecheckMode.TYPEmode | TypecheckMode.FUNmode
+    val PATTERNmode = TypecheckMode.PATTERNmode
 
-    def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
-      if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
-      var ctree: compiler.Tree = importer.importTree(tree)
-      var cexpectedType: compiler.Type = importer.importType(expectedType)
+    def typecheck(tree: u.Tree, mode: TypecheckMode = TERMmode, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = withCompilerApi { compilerApi =>
+      import compilerApi._
+
+      if (compiler.settings.verbose) println("importing "+tree+", expectedType = "+expectedType)
+      val ctree: compiler.Tree = importer.importTree(tree)
+      val cexpectedType: compiler.Type = importer.importType(expectedType)
 
-      if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
-      val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+      if (compiler.settings.verbose) println("typing "+ctree+", expectedType = "+expectedType)
+      val ttree: compiler.Tree = compiler.typecheck(ctree, cexpectedType, mode, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
       val uttree = exporter.importTree(ttree)
       uttree
     }
@@ -368,54 +381,65 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
     }
 
     def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree = {
-      val viewTpe = u.appliedType(u.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+      val functionTypeCtor = u.definitions.FunctionClass(1).asClass.toTypeConstructor
+      val viewTpe = u.appliedType(functionTypeCtor, List(from, to))
       inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
     }
 
-    private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
-      if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
-      var ctree: compiler.Tree = importer.importTree(tree)
-      var cpt: compiler.Type = importer.importType(pt)
-      var cpos: compiler.Position = importer.importPosition(pos)
+    private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = withCompilerApi { compilerApi =>
+      import compilerApi._
 
-      if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
+      if (compiler.settings.verbose) println(s"importing pt=$pt, tree=$tree, pos=$pos")
+      val ctree: compiler.Tree = importer.importTree(tree)
+      val cpt: compiler.Type = importer.importType(pt)
+      val cpos: compiler.Position = importer.importPosition(pos)
+
+      if (compiler.settings.verbose) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
       val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos)
       val uitree = exporter.importTree(itree)
       uitree
     }
 
-    def resetAllAttrs(tree: u.Tree): u.Tree = {
-      val ctree: compiler.Tree = importer.importTree(tree)
-      val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
-      val uttree = exporter.importTree(ttree)
-      uttree
-    }
-
-    def resetLocalAttrs(tree: u.Tree): u.Tree = {
+    def resetLocalAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
+      import compilerApi._
       val ctree: compiler.Tree = importer.importTree(tree)
-      val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
+      val ttree: compiler.Tree = compiler.resetAttrs(ctree)
       val uttree = exporter.importTree(ttree)
       uttree
     }
 
-    def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
-      compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
+    def untypecheck(tree: u.Tree): u.Tree = resetLocalAttrs(tree)
 
-    def parse(code: String): u.Tree = {
-      if (compiler.settings.verbose.value) println("parsing "+code)
+    def parse(code: String): u.Tree = withCompilerApi { compilerApi =>
+      import compilerApi._
+      if (compiler.settings.verbose) println("parsing "+code)
       val ctree: compiler.Tree = compiler.parse(code)
       val utree = exporter.importTree(ctree)
       utree
     }
 
-    def compile(tree: u.Tree): () => Any = {
-      if (compiler.settings.verbose.value) println("importing "+tree)
+    def compile(tree: u.Tree): () => Any = withCompilerApi { compilerApi =>
+      import compilerApi._
+
+      if (compiler.settings.verbose) println("importing "+tree)
       val ctree: compiler.Tree = importer.importTree(tree)
 
-      if (compiler.settings.verbose.value) println("compiling "+ctree)
+      if (compiler.settings.verbose) println("compiling "+ctree)
       compiler.compile(ctree)
     }
 
+    def define(tree: u.ImplDef): u.Symbol = withCompilerApi { compilerApi =>
+      import compilerApi._
+
+      if (compiler.settings.verbose) println("importing "+tree)
+      val ctree: compiler.ImplDef = importer.importTree(tree).asInstanceOf[compiler.ImplDef]
+
+      if (compiler.settings.verbose) println("defining "+ctree)
+      val csym: compiler.Symbol = compiler.define(ctree)
+      val usym = exporter.importSymbol(csym)
+      usym
+    }
+
     def eval(tree: u.Tree): Any = compile(tree)()
   }
 }
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index 7ce0171..523287f 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -25,6 +25,7 @@ trait WrappedProperties extends PropertiesTrait {
   override def clearProp(name: String)               = wrap(super.clearProp(name)).orNull
   override def envOrElse(name: String, alt: String)  = wrap(super.envOrElse(name, alt)) getOrElse alt
   override def envOrNone(name: String)               = wrap(super.envOrNone(name)).flatten
+  override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt
 
   def systemProperties: List[(String, String)] = {
     import scala.collection.JavaConverters._
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3f880bf..1055894 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -32,7 +32,7 @@ package object reflect {
 
   /** Creates a reporter that prints messages to the console according to the settings.
    *
-   *  ``minSeverity'' determines minimum severity of the messages to be printed.
+   *  `minSeverity` determines minimum severity of the messages to be printed.
    *  0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
    */
   // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
@@ -52,7 +52,7 @@ package object reflect {
     override def hasWarnings = reporter.hasWarnings
 
     def display(info: Info): Unit = info.severity match {
-      case API_INFO => reporter.info(info.pos, info.msg, false)
+      case API_INFO => reporter.info(info.pos, info.msg, force = false)
       case API_WARNING => reporter.warning(info.pos, info.msg)
       case API_ERROR => reporter.error(info.pos, info.msg)
     }
@@ -76,7 +76,6 @@ package object reflect {
   private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
     val settings = settings0
 
-    import frontEnd.{Severity => ApiSeverity}
     val API_INFO = frontEnd.INFO
     val API_WARNING = frontEnd.WARNING
     val API_ERROR = frontEnd.ERROR
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
new file mode 100644
index 0000000..68cc728
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -0,0 +1,245 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.collection.{immutable, mutable}
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+class Rank private[Rank](val value: Int) extends AnyVal {
+  def pred = { assert(value - 1 >= 0); new Rank(value - 1) }
+  def succ = new Rank(value + 1)
+  override def toString = if (value == 0) "no dots" else "." * (value + 1)
+}
+
+object Rank {
+  val NoDot = new Rank(0)
+  val DotDot = new Rank(1)
+  val DotDotDot = new Rank(2)
+  object Dot { def unapply(rank: Rank) = rank != NoDot }
+  def parseDots(part: String) = {
+    if (part.endsWith("...")) (part.stripSuffix("..."), DotDotDot)
+    else if (part.endsWith("..")) (part.stripSuffix(".."), DotDot)
+    else (part, NoDot)
+  }
+}
+
+/** Defines abstractions that provide support for splicing into Scala syntax.
+ */
+trait Holes { self: Quasiquotes =>
+  import global._
+  import Rank._
+  import definitions._
+  import universeTypes._
+
+  private lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
+  private def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
+  private def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
+  private def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
+  private def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
+  private def isNativeType(tpe: Type) =
+    (tpe <:< treeType) || (tpe <:< nameType) || (tpe <:< modsType) ||
+    (tpe <:< flagsType) || (tpe <:< symbolType)
+  private def isBottomType(tpe: Type) =
+    tpe <:< NothingClass.tpe || tpe <:< NullClass.tpe
+  private def extractIterableTParam(tpe: Type) =
+    IterableTParam.asSeenFrom(tpe, IterableClass)
+  private def stripIterable(tpe: Type, limit: Rank = DotDotDot): (Rank, Type) =
+    if (limit == NoDot) (NoDot, tpe)
+    else if (tpe != null && !isIterableType(tpe)) (NoDot, tpe)
+    else if (isBottomType(tpe)) (NoDot, tpe)
+    else {
+      val targ = extractIterableTParam(tpe)
+      val (rank, innerTpe) = stripIterable(targ, limit.pred)
+      (rank.succ, innerTpe)
+    }
+  private def iterableTypeFromRank(n: Rank, tpe: Type): Type = {
+    if (n == NoDot) tpe
+    else appliedType(IterableClass.toType, List(iterableTypeFromRank(n.pred, tpe)))
+  }
+
+  /** Hole encapsulates information about unquotees in quasiquotes.
+   *  It packs together a rank, pre-reified tree representation
+   *  (possibly preprocessed) and position.
+   */
+  abstract class Hole {
+    val tree: Tree
+    val pos: Position
+    val rank: Rank
+  }
+
+  object Hole {
+    def apply(rank: Rank, tree: Tree): Hole =
+      if (method != nme.unapply) new ApplyHole(rank, tree)
+      else new UnapplyHole(rank, tree)
+    def unapply(hole: Hole): Some[(Tree, Rank)] = Some((hole.tree, hole.rank))
+  }
+
+  class ApplyHole(annotatedRank: Rank, unquotee: Tree) extends Hole {
+    val (strippedTpe, tpe): (Type, Type) = {
+      val (strippedRank, strippedTpe) = stripIterable(unquotee.tpe, limit = annotatedRank)
+      if (isBottomType(strippedTpe)) cantSplice()
+      else if (isNativeType(strippedTpe)) {
+        if (strippedRank != NoDot && !(strippedTpe <:< treeType) && !isLiftableType(strippedTpe)) cantSplice()
+        else (strippedTpe, iterableTypeFromRank(annotatedRank, strippedTpe))
+      } else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromRank(annotatedRank, treeType))
+      else cantSplice()
+    }
+
+    val tree = {
+      def inner(itpe: Type)(tree: Tree) =
+        if (isNativeType(itpe)) tree
+        else if (isLiftableType(itpe)) lifted(itpe)(tree)
+        else global.abort("unreachable")
+      if (annotatedRank == NoDot) inner(strippedTpe)(unquotee)
+      else iterated(annotatedRank, unquotee, unquotee.tpe)
+    }
+
+    val pos = unquotee.pos
+
+    val rank = stripIterable(tpe)._1
+
+    private def cantSplice(): Nothing = {
+      val (iterableRank, iterableType) = stripIterable(unquotee.tpe)
+      val holeRankMsg = if (annotatedRank != NoDot) s" with $annotatedRank" else ""
+      val action = "unquote " + unquotee.tpe + holeRankMsg
+      val suggestRank = annotatedRank != iterableRank || annotatedRank != NoDot
+      val unquoteeRankMsg = if (annotatedRank != iterableRank && iterableRank != NoDot) s"using $iterableRank" else "omitting the dots"
+      val rankSuggestion = if (suggestRank) unquoteeRankMsg else ""
+      val suggestLifting = (annotatedRank == NoDot || iterableRank != NoDot) && !(iterableType <:< treeType) && !isLiftableType(iterableType)
+      val liftedTpe = if (annotatedRank != NoDot) iterableType else unquotee.tpe
+      val liftSuggestion = if (suggestLifting) s"providing an implicit instance of Liftable[$liftedTpe]" else ""
+      val advice =
+        if (isBottomType(iterableType)) "bottom type values often indicate programmer mistake"
+        else "consider " + List(rankSuggestion, liftSuggestion).filter(_ != "").mkString(" or ")
+      c.abort(unquotee.pos, s"Can't $action, $advice")
+    }
+
+    private def lifted(tpe: Type)(tree: Tree): Tree = {
+      val lifter = inferLiftable(tpe)
+      assert(lifter != EmptyTree, s"couldnt find a liftable for $tpe")
+      val lifted = Apply(lifter, List(tree))
+      atPos(tree.pos)(lifted)
+    }
+
+    private def toStats(tree: Tree): Tree =
+      // q"$u.internal.reificationSupport.toStats($tree)"
+      Apply(Select(Select(Select(u, nme.internal), nme.reificationSupport), nme.toStats), tree :: Nil)
+
+    private def toList(tree: Tree, tpe: Type): Tree =
+      if (isListType(tpe)) tree
+      else Select(tree, nme.toList)
+
+    private def mapF(tree: Tree, f: Tree => Tree): Tree =
+      if (f(Ident(TermName("x"))) equalsStructure Ident(TermName("x"))) tree
+      else {
+        val x: TermName = c.freshName()
+        // q"$tree.map { $x => ${f(Ident(x))} }"
+        Apply(Select(tree, nme.map),
+          Function(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree) :: Nil,
+            f(Ident(x))) :: Nil)
+      }
+
+    private object IterableType {
+      def unapply(tpe: Type): Option[Type] =
+        if (isIterableType(tpe)) Some(extractIterableTParam(tpe)) else None
+    }
+
+    private object LiftedType {
+      def unapply(tpe: Type): Option[Tree => Tree] =
+        if (tpe <:< treeType) Some(t => t)
+        else if (isLiftableType(tpe)) Some(lifted(tpe)(_))
+        else None
+    }
+
+    /** Map high-rank unquotee onto an expression that eveluates as a list of given rank.
+     *
+     *  All possible combinations of representations are given in the table below:
+     *
+     *    input                          output for T <: Tree          output for T: Liftable
+     *
+     *    ..${x: Iterable[T]}            x.toList                      x.toList.map(lift)
+     *    ..${x: T}                      toStats(x)                    toStats(lift(x))
+     *
+     *    ...${x: Iterable[Iterable[T]]} x.toList { _.toList }         x.toList.map { _.toList.map(lift) }
+     *    ...${x: Iterable[T]}           x.toList.map { toStats(_) }   x.toList.map { toStats(lift(_)) }
+     *    ...${x: T}                     toStats(x).map { toStats(_) } toStats(lift(x)).map { toStats(_) }
+     *
+     *  For optimization purposes `x.toList` is represented as just `x` if it is statically known that
+     *  x is not just an Iterable[T] but a List[T]. Similarly no mapping is performed if mapping function is
+     *  known to be an identity.
+     */
+    private def iterated(rank: Rank, tree: Tree, tpe: Type): Tree = (rank, tpe) match {
+      case (DotDot, tpe @ IterableType(LiftedType(lift))) => mapF(toList(tree, tpe), lift)
+      case (DotDot, LiftedType(lift))                     => toStats(lift(tree))
+      case (DotDotDot, tpe @ IterableType(inner))         => mapF(toList(tree, tpe), t => iterated(DotDot, t, inner))
+      case (DotDotDot, LiftedType(lift))                  => mapF(toStats(lift(tree)), toStats)
+      case _                                              => global.abort("unreachable")
+    }
+  }
+
+  class UnapplyHole(val rank: Rank, pat: Tree) extends Hole {
+    val (placeholderName, pos, tptopt) = pat match {
+      case Bind(pname, inner @ Bind(_, Typed(Ident(nme.WILDCARD), tpt))) => (pname, inner.pos, Some(tpt))
+      case Bind(pname, inner @ Typed(Ident(nme.WILDCARD), tpt))          => (pname, inner.pos, Some(tpt))
+      case Bind(pname, inner)                                            => (pname, inner.pos, None)
+    }
+    val treeNoUnlift = Bind(placeholderName, Ident(nme.WILDCARD))
+    lazy val tree =
+      tptopt.map { tpt =>
+        val TypeDef(_, _, _, typedTpt) =
+          try c.typeCheck(TypeDef(NoMods, TypeName("T"), Nil, tpt))
+          catch { case TypecheckException(pos, msg) => c.abort(pos.asInstanceOf[c.Position], msg) }
+        val tpe = typedTpt.tpe
+        val (iterableRank, _) = stripIterable(tpe)
+        if (iterableRank.value < rank.value)
+          c.abort(pat.pos, s"Can't extract $tpe with $rank, consider using $iterableRank")
+        val (_, strippedTpe) = stripIterable(tpe, limit = rank)
+        if (strippedTpe <:< treeType) treeNoUnlift
+        else
+          unlifters.spawn(strippedTpe, rank).map {
+            Apply(_, treeNoUnlift :: Nil)
+          }.getOrElse {
+            c.abort(pat.pos, s"Can't find $unliftableType[$strippedTpe], consider providing it")
+          }
+      }.getOrElse { treeNoUnlift }
+  }
+
+  /** Full support for unliftable implies that it's possible to interleave
+   *  deconstruction with higher rank and unlifting of the values.
+   *  In particular extraction of List[Tree] as List[T: Unliftable] requires
+   *  helper extractors that would do the job: UnliftListElementwise[T]. Similarly
+   *  List[List[Tree]] needs UnliftListOfListsElementwise[T].
+   *
+   *  See also "unlift list" tests in UnapplyProps.scala
+   */
+  object unlifters {
+    private var records = List.empty[(Type, Rank)]
+    // Materialize unlift helper that does elementwise
+    // unlifting for corresponding rank and type.
+    def spawn(tpe: Type, rank: Rank): Option[Tree] = {
+      val unlifter = inferUnliftable(tpe)
+      if (unlifter == EmptyTree) None
+      else if (rank == NoDot) Some(unlifter)
+      else {
+        val idx = records.indexWhere { p => p._1 =:= tpe && p._2 == rank }
+        val resIdx = if (idx != -1) idx else { records +:= (tpe, rank); records.length - 1}
+        Some(Ident(TermName(nme.QUASIQUOTE_UNLIFT_HELPER + resIdx)))
+      }
+    }
+    // Returns a list of vals that will defined required unlifters
+    def preamble(): List[Tree] =
+      records.zipWithIndex.map { case ((tpe, rank), idx) =>
+        val name = TermName(nme.QUASIQUOTE_UNLIFT_HELPER + idx)
+        val helperName = rank match {
+          case DotDot    => nme.UnliftListElementwise
+          case DotDotDot => nme.UnliftListOfListsElementwise
+        }
+        val lifter = inferUnliftable(tpe)
+        assert(helperName.isTermName)
+        // q"val $name: $u.internal.reificationSupport.${helperName.toTypeName} = $u.internal.reificationSupport.$helperName($lifter)"
+        ValDef(NoMods, name,
+          AppliedTypeTree(Select(Select(Select(u, nme.internal), nme.reificationSupport), helperName.toTypeName), List(TypeTree(tpe))),
+          Apply(Select(Select(Select(u, nme.internal), nme.reificationSupport), helperName), lifter :: Nil))
+      }
+  }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
new file mode 100644
index 0000000..b68022a
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -0,0 +1,221 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
+import scala.tools.nsc.ast.parser.Tokens._
+import scala.compat.Platform.EOL
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
+import scala.collection.mutable.ListBuffer
+import scala.util.Try
+
+/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
+ *  A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ *  Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ */
+trait Parsers { self: Quasiquotes =>
+  import global.{Try => _, _}
+  import build.implodePatDefs
+
+  abstract class Parser extends {
+    val global: self.global.type = self.global
+  } with ScalaParser {
+    def parse(code: String): Tree = {
+      try {
+        val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, code)
+        val parser = new QuasiquoteParser(file)
+        parser.checkNoEscapingPlaceholders { parser.parseRule(entryPoint) }
+      } catch {
+        case mi: MalformedInput => c.abort(correspondingPosition(mi.offset), mi.msg)
+      }
+    }
+
+    def correspondingPosition(offset: Int): Position = {
+      val posMapList = posMap.toList
+      def containsOffset(start: Int, end: Int) = start <= offset && offset < end
+      def fallbackPosition = posMapList match {
+        case (pos1, (start1, end1)) :: _   if start1 > offset => pos1
+        case _ :+ ((pos2, (start2, end2))) if end2 <= offset  => pos2.withPoint(pos2.point + (end2 - start2))
+      }
+      posMapList.sliding(2).collect {
+        case (pos1, (start1, end1)) :: _                        if containsOffset(start1, end1) => (pos1, offset - start1)
+        case (pos1, (start1, end1)) :: (pos2, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1 - start1)
+        case _ :: (pos2, (start2, end2)) :: _                   if containsOffset(start2, end2) => (pos2, offset - start2)
+      }.map { case (pos, offset) =>
+        pos.withPoint(pos.point + offset)
+      }.toList.headOption.getOrElse(fallbackPosition)
+    }
+
+    override def token2string(token: Int): String = token match {
+      case EOF => "end of quote"
+      case _ => super.token2string(token)
+    }
+
+    def entryPoint: QuasiquoteParser => Tree
+
+    class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) { parser =>
+      def isHole: Boolean = isIdent && isHole(in.name)
+
+      def isHole(name: Name): Boolean = holeMap.contains(name)
+
+      override implicit lazy val fresh: FreshNameCreator = new FreshNameCreator(nme.QUASIQUOTE_PREFIX)
+
+      override val treeBuilder = new ParserTreeBuilder {
+        override implicit def fresh: FreshNameCreator = parser.fresh
+
+        // q"(..$xs)"
+        override def makeTupleTerm(trees: List[Tree]): Tree = TuplePlaceholder(trees)
+
+        // tq"(..$xs)"
+        override def makeTupleType(trees: List[Tree]): Tree = TupleTypePlaceholder(trees)
+
+        // q"{ $x }"
+        override def makeBlock(stats: List[Tree]): Tree = stats match {
+          case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head)
+          case _ => super.makeBlock(stats)
+        }
+
+        // tq"$a => $b"
+        override def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = FunctionTypePlaceholder(argtpes, restpe)
+
+        // make q"val (x: T) = rhs" be equivalent to q"val x: T = rhs" for sake of bug compatibility (SI-8211)
+        override def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree) = pat match {
+          case TuplePlaceholder(inParensPat :: Nil) => super.makePatDef(mods, inParensPat, rhs)
+          case _ => super.makePatDef(mods, pat, rhs)
+        }
+      }
+      import treeBuilder.{global => _, unit => _, _}
+
+      // q"def foo($x)"
+      override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
+        if (isHole && lookingAhead { in.token == COMMA || in.token == RPAREN }) {
+          ParamPlaceholder(implicitmod, ident())
+        } else super.param(owner, implicitmod, caseParam)
+
+      // q"($x) => ..." && q"class X { selfie => }
+      override def convertToParam(tree: Tree): ValDef = tree match {
+        case Ident(name) if isHole(name) => ParamPlaceholder(NoFlags, name)
+        case _ => super.convertToParam(tree)
+      }
+
+      // q"foo match { case $x }"
+      override def caseClause(): CaseDef =
+        if (isHole && lookingAhead { in.token == CASE || in.token == RBRACE || in.token == SEMI }) {
+          val c = CasePlaceholder(ident())
+          while (in.token == SEMI) in.nextToken()
+          c
+        } else
+          super.caseClause()
+
+      override def caseBlock(): Tree = super.caseBlock() match {
+        case Block(Nil, expr) => expr
+        case other => other
+      }
+
+      override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+
+      override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
+
+      override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
+
+      override def isTemplateIntro: Boolean = super.isTemplateIntro || (isHole && lookingAhead { isTemplateIntro })
+
+      override def isDefIntro: Boolean = super.isDefIntro || (isHole && lookingAhead { isDefIntro })
+
+      override def isDclIntro: Boolean = super.isDclIntro || (isHole && lookingAhead { isDclIntro })
+
+      override def isStatSep(token: Int) = token == EOF || super.isStatSep(token)
+
+      override def expectedMsg(token: Int): String =
+        if (isHole) expectedMsgTemplate(token2string(token), "unquotee")
+        else super.expectedMsg(token)
+
+      // $mods def foo
+      // $mods T
+      override def readAnnots(annot: => Tree): List[Tree] = in.token match {
+        case AT =>
+          in.nextToken()
+          annot :: readAnnots(annot)
+        case _ if isHole && lookingAhead { isAnnotation || isModifier || isDefIntro || isIdent || isStatSep || in.token == LPAREN } =>
+          val ann = ModsPlaceholder(in.name)
+          in.nextToken()
+          ann :: readAnnots(annot)
+        case _ =>
+          Nil
+      }
+
+      override def refineStat(): List[Tree] =
+        if (isHole && !isDclIntro) {
+          val result = RefineStatPlaceholder(in.name) :: Nil
+          in.nextToken()
+          result
+        } else super.refineStat()
+
+      override def ensureEarlyDef(tree: Tree) = tree match {
+        case Ident(name: TermName) if isHole(name) => EarlyDefPlaceholder(name)
+        case _ => super.ensureEarlyDef(tree)
+      }
+
+      override def isTypedParam(tree: Tree) = super.isTypedParam(tree) || (tree match {
+        case Ident(name) if isHole(name) => true
+        case _ => false
+      })
+
+      override def topStat = super.topStat.orElse {
+        case _ if isHole =>
+          val stats = PackageStatPlaceholder(in.name) :: Nil
+          in.nextToken()
+          stats
+      }
+
+      override def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true) =
+        if (isHole && lookingAhead { in.token == EOF || in.token == RPAREN || isStatSep }) {
+          val res = ForEnumPlaceholder(in.name) :: Nil
+          in.nextToken()
+          res
+        } else super.enumerator(isFirst, allowNestedIf)
+    }
+  }
+
+  /** Wrapper around tree parsed in q"..." quote. Needed to support ..$ splicing on top-level. */
+  object Q {
+    def apply(tree: Tree): Block = Block(Nil, tree).updateAttachment(Q)
+    def unapply(tree: Tree): Option[Tree] = tree match {
+      case Block(Nil, contents) if tree.hasAttachment[Q.type] => Some(contents)
+      case _ => None
+    }
+  }
+
+  object TermParser extends Parser {
+    def entryPoint = parser => Q(implodePatDefs(gen.mkTreeOrBlock(parser.templateOrTopStatSeq())))
+  }
+
+  object TypeParser extends Parser {
+    def entryPoint = { parser =>
+      if (parser.in.token == EOF)
+        TypeTree()
+      else
+        parser.typ()
+    }
+  }
+
+  object CaseParser extends Parser {
+    def entryPoint = parser => implodePatDefs(parser.caseClause())
+  }
+
+  object PatternParser extends Parser {
+    def entryPoint = { parser =>
+      val pat = parser.noSeq.pattern()
+      gen.patvarTransformer.transform(pat)
+    }
+  }
+
+  object ForEnumeratorParser extends Parser {
+    def entryPoint = { parser =>
+      val enums = parser.enumerator(isFirst = false, allowNestedIf = false)
+      assert(enums.length == 1)
+      implodePatDefs(enums.head)
+    }
+  }
+
+  object FreshName extends FreshNameExtractor(nme.QUASIQUOTE_PREFIX)
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
new file mode 100644
index 0000000..b287971
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -0,0 +1,201 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.util.UUID.randomUUID
+import scala.collection.{immutable, mutable}
+
+/** Emulates hole support (see Holes.scala) in the quasiquote parser (see Parsers.scala).
+ *  A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ *  Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ *  This trait stores knowledge of how to represent the holes as something understandable by the parser
+ *  and how to recover holes from the results of parsing the produced representation.
+ */
+trait Placeholders { self: Quasiquotes =>
+  import global._
+  import Rank._
+  import universeTypes._
+
+  // Step 1: Transform Scala source with holes into vanilla Scala source
+
+  lazy val posMap = mutable.LinkedHashMap[Position, (Int, Int)]()
+  lazy val code = {
+    val sb = new StringBuilder()
+    val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$"
+
+    def appendPart(value: String, pos: Position) = {
+      val start = sb.length
+      sb.append(value)
+      val end = sb.length
+      posMap += pos -> ((start, end))
+    }
+
+    def appendHole(tree: Tree, rank: Rank) = {
+      val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
+      sb.append(placeholderName)
+      val holeTree =
+        if (method != nme.unapply) tree
+        else Bind(placeholderName, tree)
+      holeMap(placeholderName) = Hole(rank, holeTree)
+    }
+
+    val iargs = method match {
+      case nme.apply   => args
+      case nme.unapply => internal.subpatterns(args.head).get
+      case _           => global.abort("unreachable")
+    }
+
+    foreach2(iargs, parts.init) { case (tree, (p, pos)) =>
+      val (part, rank) = parseDots(p)
+      appendPart(part, pos)
+      appendHole(tree, rank)
+    }
+    val (p, pos) = parts.last
+    appendPart(p, pos)
+
+    sb.toString
+  }
+
+  object holeMap {
+    private val underlying = mutable.LinkedHashMap.empty[String, Hole]
+    private val accessed   = mutable.Set.empty[String]
+    def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
+    def contains(key: Name): Boolean = underlying.contains(key.toString)
+    def apply(key: Name): Hole = {
+      val skey = key.toString
+      val value = underlying(skey)
+      accessed += skey
+      value
+    }
+    def update(key: Name, hole: Hole) =
+      underlying += key.toString -> hole
+    def get(key: Name): Option[Hole] = {
+      val skey = key.toString
+      underlying.get(skey).map { v =>
+        accessed += skey
+        v
+      }
+    }
+    def keysIterator: Iterator[TermName] = underlying.keysIterator.map(TermName(_))
+  }
+
+  // Step 2: Transform vanilla Scala AST into an AST with holes
+
+  trait HolePlaceholder {
+    def matching: PartialFunction[Any, Name]
+    def unapply(scrutinee: Any): Option[Hole] = {
+      val name = matching.lift(scrutinee)
+      name.flatMap { holeMap.get(_) }
+    }
+  }
+
+  object Placeholder extends HolePlaceholder {
+    def matching = {
+      case name: Name => name
+      case Ident(name) => name
+      case Bind(name, Ident(nme.WILDCARD)) => name
+      case TypeDef(_, name, List(), TypeBoundsTree(EmptyTree, EmptyTree)) => name
+    }
+  }
+
+  object ModsPlaceholder extends HolePlaceholder {
+    def apply(name: Name) =
+      Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(name.toString))))
+    def matching = {
+      case Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(s: String)))) => TermName(s)
+    }
+  }
+
+  object AnnotPlaceholder extends HolePlaceholder {
+    def matching = {
+      case Apply(Select(New(Ident(name)), nme.CONSTRUCTOR), Nil) => name
+    }
+  }
+
+  object ParamPlaceholder extends HolePlaceholder {
+    def apply(flags: FlagSet, name: Name) =
+      ValDef(Modifiers(flags), nme.QUASIQUOTE_PARAM, Ident(name), EmptyTree)
+    def matching = {
+      case ValDef(_, nme.QUASIQUOTE_PARAM, Ident(name), EmptyTree) => name
+    }
+  }
+
+  object TuplePlaceholder {
+    def apply(args: List[Tree]) =
+      Apply(Ident(nme.QUASIQUOTE_TUPLE), args)
+    def unapply(tree: Tree): Option[List[Tree]] = tree match {
+      case Apply(Ident(nme.QUASIQUOTE_TUPLE), args) => Some(args)
+      case _ => None
+    }
+  }
+
+  object TupleTypePlaceholder {
+    def apply(args: List[Tree]) =
+      AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), args)
+    def unapply(tree: Tree): Option[List[Tree]] = tree match {
+      case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), args) => Some(args)
+      case _ => None
+    }
+  }
+
+  object FunctionTypePlaceholder {
+    def apply(args: List[Tree], res: Tree) =
+      AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), args :+ res)
+    def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
+      case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), args :+ res) => Some((args, res))
+      case _ => None
+    }
+  }
+
+  object SymbolPlaceholder {
+    def unapply(scrutinee: Any): Option[Hole] = scrutinee match {
+      case Placeholder(hole: ApplyHole) if hole.tpe <:< symbolType => Some(hole)
+      case _ => None
+    }
+  }
+
+  object CasePlaceholder {
+    def apply(name: Name) =
+      CaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), Ident(name) :: Nil), EmptyTree, EmptyTree)
+    def unapply(tree: Tree): Option[Hole] = tree match {
+      case CaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), List(Placeholder(hole))), EmptyTree, EmptyTree) => Some(hole)
+      case _ => None
+    }
+  }
+
+  object RefineStatPlaceholder {
+    def apply(name: Name) =
+      ValDef(NoMods, nme.QUASIQUOTE_REFINE_STAT, Ident(name), EmptyTree)
+    def unapply(tree: Tree): Option[Hole] = tree match {
+      case ValDef(_, nme.QUASIQUOTE_REFINE_STAT, Ident(Placeholder(hole)), _) => Some(hole)
+      case _ => None
+    }
+  }
+
+  object EarlyDefPlaceholder {
+    def apply(name: Name) =
+      ValDef(Modifiers(Flag.PRESUPER), nme.QUASIQUOTE_EARLY_DEF, Ident(name), EmptyTree)
+    def unapply(tree: Tree): Option[Hole] = tree match {
+      case ValDef(_, nme.QUASIQUOTE_EARLY_DEF, Ident(Placeholder(hole)), _) => Some(hole)
+      case _ => None
+    }
+  }
+
+  object PackageStatPlaceholder {
+    def apply(name: Name) =
+      ValDef(NoMods, nme.QUASIQUOTE_PACKAGE_STAT, Ident(name), EmptyTree)
+    def unapply(tree: Tree): Option[Hole] = tree match {
+      case ValDef(NoMods, nme.QUASIQUOTE_PACKAGE_STAT, Ident(Placeholder(hole)), EmptyTree) => Some(hole)
+      case _ => None
+    }
+  }
+
+  object ForEnumPlaceholder {
+    def apply(name: Name) =
+      build.SyntacticValFrom(Bind(name, Ident(nme.WILDCARD)), Ident(nme.QUASIQUOTE_FOR_ENUM))
+    def unapply(tree: Tree): Option[Hole] = tree match {
+      case build.SyntacticValFrom(Bind(Placeholder(hole), Ident(nme.WILDCARD)), Ident(nme.QUASIQUOTE_FOR_ENUM)) =>
+        Some(hole)
+      case _ => None
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
new file mode 100644
index 0000000..b330691
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -0,0 +1,60 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.reflect.macros.runtime.Context
+
+abstract class Quasiquotes extends Parsers
+                              with Holes
+                              with Placeholders
+                              with Reifiers {
+  val c: Context
+  val global: c.universe.type = c.universe
+  import c.universe._
+
+  def debug(msg: => String): Unit =
+    if (settings.Yquasiquotedebug.value) println(msg)
+
+  lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
+    case Apply(build.SyntacticTypeApplied(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), _), args0) =>
+      debug(s"parse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
+      val parts1 = parts0.map {
+        case lit @ Literal(Constant(s: String)) => s -> lit.pos
+        case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
+      }
+      val reify0 = method0 match {
+        case nme.apply   => new ApplyReifier().reifyFillingHoles(_)
+        case nme.unapply => new UnapplyReifier().reifyFillingHoles(_)
+        case other       => global.abort(s"Unknown quasiquote api method: $other")
+      }
+      val parse0 = interpolator0 match {
+        case nme.q       => TermParser.parse(_)
+        case nme.tq      => TypeParser.parse(_)
+        case nme.cq      => CaseParser.parse(_)
+        case nme.pq      => PatternParser.parse(_)
+        case nme.fq      => ForEnumeratorParser.parse(_)
+        case other       => global.abort(s"Unknown quasiquote flavor: $other")
+      }
+      (universe0, args0, parts1, parse0, reify0, method0)
+    case _ =>
+      global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
+  }
+
+  lazy val u = universe // shortcut
+  lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
+
+  def expandQuasiquote = {
+    debug(s"macro application:\n${c.macroApplication}\n")
+    debug(s"code to parse:\n$code\n")
+    val tree = parse(code)
+    debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
+    val reified = reify(tree)
+    def sreified =
+      reified
+        .toString
+        .replace("scala.reflect.runtime.`package`.universe.internal.reificationSupport.", "")
+        .replace("scala.reflect.runtime.`package`.universe.", "")
+        .replace("scala.collection.immutable.", "")
+    debug(s"reified tree:\n$sreified\n")
+    reified
+  }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
new file mode 100644
index 0000000..95113d5
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -0,0 +1,487 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.lang.UnsupportedOperationException
+import scala.reflect.reify.{Reifier => ReflectReifier}
+import scala.reflect.internal.Flags._
+
+trait Reifiers { self: Quasiquotes =>
+  import global._
+  import global.build._
+  import global.treeInfo._
+  import global.definitions._
+  import Rank._
+  import universeTypes._
+
+  abstract class Reifier(val isReifyingExpressions: Boolean) extends {
+    val global: self.global.type = self.global
+    val universe = self.universe
+    val reifee = EmptyTree
+    val mirror = EmptyTree
+    val concrete = false
+  } with ReflectReifier {
+    lazy val typer = throw new UnsupportedOperationException
+
+    def isReifyingPatterns: Boolean = !isReifyingExpressions
+    def action = if (isReifyingExpressions) "unquote" else "extract"
+    def holesHaveTypes = isReifyingExpressions
+
+    /** Map that stores freshly generated names linked to the corresponding names in the reified tree.
+     *  This information is used to reify names created by calls to freshTermName and freshTypeName.
+     */
+    val nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+
+    /** Wraps expressions into:
+     *    a block which starts with a sequence of vals that correspond
+     *    to fresh names that has to be created at evaluation of the quasiquote
+     *    and ends with reified tree:
+     *
+     *      {
+     *        val name$1: universe.TermName = universe.build.freshTermName(prefix1)
+     *        ...
+     *        val name$N: universe.TermName = universe.build.freshTermName(prefixN)
+     *        tree
+     *      }
+     *
+     *  Wraps patterns into:
+     *    a call into anonymous class' unapply method required by unapply macro expansion:
+     *
+     *      new {
+     *        def unapply(tree) = tree match {
+     *          case pattern if guard => Some(result)
+     *          case _ => None
+     *        }
+     *      }.unapply(<unapply-selector>)
+     *
+     *    where pattern corresponds to reified tree and guard represents conjunction of equalities
+     *    which check that pairs of names in nameMap.values are equal between each other.
+     */
+    def wrap(tree: Tree) =
+      if (isReifyingExpressions) {
+        val freshdefs = nameMap.iterator.map {
+          case (origname, names) =>
+            assert(names.size == 1)
+            val FreshName(prefix) = origname
+            val nameTypeName = if (origname.isTermName) tpnme.TermName else tpnme.TypeName
+            val freshName = if (origname.isTermName) nme.freshTermName else nme.freshTypeName
+            // q"val ${names.head}: $u.$nameTypeName = $u.internal.reificationSupport.$freshName($prefix)"
+            ValDef(NoMods, names.head, Select(u, nameTypeName),
+              Apply(Select(Select(Select(u, nme.internal), nme.reificationSupport), freshName), Literal(Constant(prefix)) :: Nil))
+        }.toList
+        // q"..$freshdefs; $tree"
+        SyntacticBlock(freshdefs :+ tree)
+      } else {
+        val freevars = holeMap.keysIterator.map(Ident(_)).toList
+        val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
+        val cases =
+          if(isVarPattern) {
+            val Ident(name) :: Nil = freevars
+            // cq"$name: $treeType => $SomeModule($name)" :: Nil
+            CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))),
+              EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil
+          } else {
+            val (succ, fail) = freevars match {
+              case Nil =>
+                // (q"true", q"false")
+                (Literal(Constant(true)), Literal(Constant(false)))
+              case head :: Nil =>
+                // (q"$SomeModule($head)", q"$NoneModule")
+                (Apply(Ident(SomeModule), List(head)), Ident(NoneModule))
+              case vars =>
+                // (q"$SomeModule((..$vars))", q"$NoneModule")
+                (Apply(Ident(SomeModule), List(SyntacticTuple(vars))), Ident(NoneModule))
+            }
+            val guard =
+              nameMap.collect { case (_, nameset) if nameset.size >= 2 =>
+                nameset.toList.sliding(2).map { case List(n1, n2) =>
+                  // q"$n1 == $n2"
+                  Apply(Select(Ident(n1), nme.EQ), List(Ident(n2)))
+                }
+              }.flatten.reduceOption[Tree] { (l, r) =>
+                // q"$l && $r"
+                Apply(Select(l, nme.ZAND), List(r))
+              }.getOrElse { EmptyTree }
+            // cq"$tree if $guard => $succ" :: cq"_ => $fail" :: Nil
+            CaseDef(tree, guard, succ) :: CaseDef(Ident(nme.WILDCARD), EmptyTree, fail) :: Nil
+          }
+        // q"new { def unapply(tree: $AnyClass) = { ..${unlifters.preamble()}; tree match { case ..$cases } } }.unapply(..$args)"
+        Apply(
+          Select(
+            SyntacticNew(Nil, Nil, noSelfType, List(
+              DefDef(NoMods, nme.unapply, Nil, List(List(ValDef(NoMods, nme.tree, TypeTree(AnyClass.toType), EmptyTree))), TypeTree(),
+                SyntacticBlock(unlifters.preamble() :+ Match(Ident(nme.tree), cases))))),
+            nme.unapply),
+          args)
+      }
+
+    def reifyFillingHoles(tree: Tree): Tree = {
+      val reified = reifyTree(tree)
+      holeMap.unused.foreach { hole =>
+        c.abort(holeMap(hole).pos, s"Don't know how to $action here")
+      }
+      wrap(reified)
+    }
+
+    override def reifyTree(tree: Tree): Tree =
+      reifyTreePlaceholder(tree) orElse
+      reifyTreeSyntactically(tree)
+
+    def reifyTreePlaceholder(tree: Tree): Tree = tree match {
+      case Placeholder(hole: ApplyHole) if hole.tpe <:< treeType => hole.tree
+      case Placeholder(Hole(tree, NoDot)) if isReifyingPatterns => tree
+      case Placeholder(hole @ Hole(_, rank @ Dot())) => c.abort(hole.pos, s"Can't $action with $rank here")
+      case TuplePlaceholder(args) => reifyTuple(args)
+      // Due to greediness of syntactic applied we need to pre-emptively peek inside.
+      // `rest` will always be non-empty due to the rule on top of this one.
+      case SyntacticApplied(id @ Ident(nme.QUASIQUOTE_TUPLE), first :: rest) =>
+        mirrorBuildCall(nme.SyntacticApplied, reifyTreePlaceholder(Apply(id, first)), reify(rest))
+      case TupleTypePlaceholder(args) => reifyTupleType(args)
+      case FunctionTypePlaceholder(argtpes, restpe) => reifyFunctionType(argtpes, restpe)
+      case CasePlaceholder(hole) => hole.tree
+      case RefineStatPlaceholder(hole) => reifyRefineStat(hole)
+      case EarlyDefPlaceholder(hole) => reifyEarlyDef(hole)
+      case PackageStatPlaceholder(hole) => reifyPackageStat(hole)
+      case ParamPlaceholder(hole) => hole.tree
+      // for enumerators are checked not during splicing but during
+      // desugaring of the for loop in SyntacticFor & SyntacticForYield
+      case ForEnumPlaceholder(hole) => hole.tree
+      case _ => EmptyTree
+    }
+
+    override def reifyTreeSyntactically(tree: Tree) = tree match {
+      case RefTree(qual, SymbolPlaceholder(Hole(tree, _))) if isReifyingExpressions =>
+        mirrorBuildCall(nme.mkRefTree, reify(qual), tree)
+      case This(SymbolPlaceholder(Hole(tree, _))) if isReifyingExpressions =>
+        mirrorCall(nme.This, tree)
+      case SyntacticTraitDef(mods, name, tparams, earlyDefs, parents, selfdef, body) =>
+        reifyBuildCall(nme.SyntacticTraitDef, mods, name, tparams, earlyDefs, parents, selfdef, body)
+      case SyntacticClassDef(mods, name, tparams, constrmods, vparamss,
+                             earlyDefs, parents, selfdef, body) =>
+        mirrorBuildCall(nme.SyntacticClassDef, reify(mods), reify(name), reify(tparams), reify(constrmods),
+                                               reifyVparamss(vparamss), reify(earlyDefs), reify(parents),
+                                               reify(selfdef), reify(body))
+      case SyntacticPackageObjectDef(name, earlyDefs, parents, selfdef, body) =>
+        reifyBuildCall(nme.SyntacticPackageObjectDef, name, earlyDefs, parents, selfdef, body)
+      case SyntacticObjectDef(mods, name, earlyDefs, parents, selfdef, body) =>
+        reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
+      case SyntacticNew(earlyDefs, parents, selfdef, body) =>
+        reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
+      case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+        mirrorBuildCall(nme.SyntacticDefDef, reify(mods), reify(name), reify(tparams),
+                                             reifyVparamss(vparamss), reify(tpt), reify(rhs))
+      case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
+        reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
+      case SyntacticVarDef(mods, name, tpt, rhs) =>
+        reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
+      case SyntacticValFrom(pat, rhs) =>
+        reifyBuildCall(nme.SyntacticValFrom, pat, rhs)
+      case SyntacticValEq(pat, rhs) =>
+        reifyBuildCall(nme.SyntacticValEq, pat, rhs)
+      case SyntacticFilter(cond) =>
+        reifyBuildCall(nme.SyntacticFilter, cond)
+      case SyntacticFor(enums, body) =>
+        reifyBuildCall(nme.SyntacticFor, enums, body)
+      case SyntacticForYield(enums, body) =>
+        reifyBuildCall(nme.SyntacticForYield, enums, body)
+      case SyntacticAssign(lhs, rhs) =>
+        reifyBuildCall(nme.SyntacticAssign, lhs, rhs)
+      case SyntacticApplied(fun, argss) if argss.nonEmpty =>
+        reifyBuildCall(nme.SyntacticApplied, fun, argss)
+      case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
+        reifyBuildCall(nme.SyntacticTypeApplied, fun, targs)
+      case SyntacticAppliedType(tpt, targs) if targs.nonEmpty =>
+        reifyBuildCall(nme.SyntacticAppliedType, tpt, targs)
+      case SyntacticFunction(args, body) =>
+        reifyBuildCall(nme.SyntacticFunction, args, body)
+      case SyntacticEmptyTypeTree() =>
+        reifyBuildCall(nme.SyntacticEmptyTypeTree)
+      case SyntacticImport(expr, selectors) =>
+        reifyBuildCall(nme.SyntacticImport, expr, selectors)
+      case SyntacticPartialFunction(cases) =>
+        reifyBuildCall(nme.SyntacticPartialFunction, cases)
+      case SyntacticMatch(scrutinee, cases) =>
+        reifyBuildCall(nme.SyntacticMatch, scrutinee, cases)
+      case SyntacticTermIdent(name, isBackquoted) =>
+        reifyBuildCall(nme.SyntacticTermIdent, name, isBackquoted)
+      case SyntacticTypeIdent(name) =>
+        reifyBuildCall(nme.SyntacticTypeIdent, name)
+      case SyntacticCompoundType(parents, defns) =>
+        reifyBuildCall(nme.SyntacticCompoundType, parents, defns)
+      case SyntacticSingletonType(ref) =>
+        reifyBuildCall(nme.SyntacticSingletonType, ref)
+      case SyntacticTypeProjection(qual, name) =>
+        reifyBuildCall(nme.SyntacticTypeProjection, qual, name)
+      case SyntacticAnnotatedType(tpt, annot) =>
+        reifyBuildCall(nme.SyntacticAnnotatedType, tpt, annot)
+      case SyntacticExistentialType(tpt, where) =>
+        reifyBuildCall(nme.SyntacticExistentialType, tpt, where)
+      case Q(tree) if fillListHole.isDefinedAt(tree) =>
+        mirrorBuildCall(nme.SyntacticBlock, fillListHole(tree))
+      case Q(other) =>
+        reifyTree(other)
+      // Syntactic block always matches so we have to be careful
+      // not to cause infinite recursion.
+      case block @ SyntacticBlock(stats) if block.isInstanceOf[Block] =>
+        reifyBuildCall(nme.SyntacticBlock, stats)
+      case SyntheticUnit() =>
+        reifyBuildCall(nme.SyntacticBlock, Nil)
+      case Try(block, catches, finalizer) =>
+        reifyBuildCall(nme.SyntacticTry, block, catches, finalizer)
+      case CaseDef(pat, guard, body) if fillListHole.isDefinedAt(body) =>
+        mirrorCall(nme.CaseDef, reify(pat), reify(guard), mirrorBuildCall(nme.SyntacticBlock, fillListHole(body)))
+      // parser emits trees with scala package symbol to ensure
+      // that some names hygienically point to various scala package
+      // members; we need to preserve this symbol to preserve
+      // correctness of the trees produced by quasiquotes
+      case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage =>
+        reifyBuildCall(nme.ScalaDot, name)
+      case Select(qual, name) =>
+        val ctor = if (name.isTypeName) nme.SyntacticSelectType else nme.SyntacticSelectTerm
+        reifyBuildCall(ctor, qual, name)
+      case _ =>
+        super.reifyTreeSyntactically(tree)
+    }
+
+    override def reifyName(name: Name): Tree = name match {
+      case Placeholder(hole: ApplyHole) =>
+        if (!(hole.tpe <:< nameType)) c.abort(hole.pos, s"$nameType expected but ${hole.tpe} found")
+        hole.tree
+      case Placeholder(hole: UnapplyHole) => hole.treeNoUnlift
+      case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
+        def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+        def introduceName() = { val n = fresh(); nameMap(name) += n; n}
+        def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
+        if (isReifyingPatterns) result(introduceName())
+        else result(nameMap.get(name).map { _.head }.getOrElse { introduceName() })
+      case _ =>
+        super.reifyName(name)
+    }
+
+    def reifyTuple(args: List[Tree]) = args match {
+      case Nil => reify(Literal(Constant(())))
+      case List(hole @ Placeholder(Hole(_, NoDot))) => reify(hole)
+      case List(Placeholder(_)) => reifyBuildCall(nme.SyntacticTuple, args)
+      // in a case we only have one element tuple without
+      // any rank annotations this means that this is
+      // just an expression wrapped in parentheses
+      case List(other) => reify(other)
+      case _ => reifyBuildCall(nme.SyntacticTuple, args)
+    }
+
+    def reifyTupleType(args: List[Tree]) = args match {
+      case Nil => reify(Select(Ident(nme.scala_), tpnme.Unit))
+      case List(hole @ Placeholder(Hole(_, NoDot))) => reify(hole)
+      case List(Placeholder(_)) => reifyBuildCall(nme.SyntacticTupleType, args)
+      case List(other) => reify(other)
+      case _ => reifyBuildCall(nme.SyntacticTupleType, args)
+    }
+
+    def reifyFunctionType(argtpes: List[Tree], restpe: Tree) =
+      reifyBuildCall(nme.SyntacticFunctionType, argtpes, restpe)
+
+    def reifyConstructionCheck(name: TermName, hole: Hole) = hole match {
+      case _: UnapplyHole => hole.tree
+      case _: ApplyHole => mirrorBuildCall(name, hole.tree)
+    }
+
+    def reifyRefineStat(hole: Hole) = reifyConstructionCheck(nme.mkRefineStat, hole)
+
+    def reifyEarlyDef(hole: Hole) = reifyConstructionCheck(nme.mkEarlyDef, hole)
+
+    def reifyAnnotation(hole: Hole) = reifyConstructionCheck(nme.mkAnnotation, hole)
+
+    def reifyPackageStat(hole: Hole) = reifyConstructionCheck(nme.mkPackageStat, hole)
+
+    def reifyVparamss(vparamss: List[List[ValDef]]) = {
+      val build.ImplicitParams(paramss, implparams) = vparamss
+      if (implparams.isEmpty) reify(paramss)
+      else reifyBuildCall(nme.ImplicitParams, paramss, implparams)
+    }
+
+    /** Splits list into a list of groups where subsequent elements are considered
+     *  similar by the corresponding function.
+     *
+     *  Example:
+     *
+     *    > group(List(1, 1, 0, 0, 1, 0)) { _ == _ }
+     *    List(List(1, 1), List(0, 0), List(1), List(0))
+     *
+     */
+    def group[T](lst: List[T])(similar: (T, T) => Boolean) = lst.foldLeft[List[List[T]]](List()) {
+      case (Nil, el) => List(List(el))
+      case (ll :+ (last @ (lastinit :+ lastel)), el) if similar(lastel, el) => ll :+ (last :+ el)
+      case (ll, el) => ll :+ List(el)
+    }
+
+    /** Reifies list filling all the valid holeMap.
+     *
+     *  Reification of non-trivial list is done in two steps:
+     *
+     *  1. split the list into groups where every placeholder is always
+     *     put in a group of it's own and all subsquent non-holeMap are
+     *     grouped together; element is considered to be a placeholder if it's
+     *     in the domain of the fill function;
+     *
+     *  2. fold the groups into a sequence of lists added together with ++ using
+     *     fill reification for holeMap and fallback reification for non-holeMap.
+     *
+     *  Example:
+     *
+     *    reifyHighRankList(lst) {
+     *      // first we define patterns that extract high-rank holeMap (currently ..)
+     *      case Placeholder(IterableType(_, _)) => tree
+     *    } {
+     *      // in the end we define how single elements are reified, typically with default reify call
+     *      reify(_)
+     *    }
+     *
+     *  Sample execution of previous concrete list reifier:
+     *
+     *    > val lst = List(foo, bar, qq$f3948f9s$1)
+     *    > reifyHighRankList(lst) { ... } { ... }
+     *    q"List($foo, $bar) ++ ${holeMap(qq$f3948f9s$1).tree}"
+     */
+    def reifyHighRankList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree
+
+    val fillListHole: PartialFunction[Any, Tree] = {
+      case Placeholder(Hole(tree, DotDot)) => tree
+      case CasePlaceholder(Hole(tree, DotDot)) => tree
+      case RefineStatPlaceholder(h @ Hole(_, DotDot)) => reifyRefineStat(h)
+      case EarlyDefPlaceholder(h @ Hole(_, DotDot)) => reifyEarlyDef(h)
+      case PackageStatPlaceholder(h @ Hole(_, DotDot)) => reifyPackageStat(h)
+      case ForEnumPlaceholder(Hole(tree, DotDot)) => tree
+      case ParamPlaceholder(Hole(tree, DotDot)) => tree
+      case SyntacticPatDef(mods, pat, tpt, rhs) =>
+        reifyBuildCall(nme.SyntacticPatDef, mods, pat, tpt, rhs)
+      case SyntacticValDef(mods, p @ Placeholder(h: ApplyHole), tpt, rhs) if h.tpe <:< treeType =>
+        mirrorBuildCall(nme.SyntacticPatDef, reify(mods), h.tree, reify(tpt), reify(rhs))
+    }
+
+    val fillListOfListsHole: PartialFunction[Any, Tree] = {
+      case List(ParamPlaceholder(Hole(tree, DotDotDot))) => tree
+      case List(Placeholder(Hole(tree, DotDotDot))) => tree
+    }
+
+    /** Reifies arbitrary list filling ..$x and ...$y holeMap when they are put
+     *  in the correct position. Fallbacks to regular reification for zero rank
+     *  elements.
+     */
+    override def reifyList(xs: List[Any]): Tree = reifyHighRankList(xs)(fillListHole.orElse(fillListOfListsHole))(reify)
+
+    def reifyAnnotList(annots: List[Tree]): Tree = reifyHighRankList(annots) {
+      case AnnotPlaceholder(h @ Hole(_, DotDot)) => reifyAnnotation(h)
+    } {
+      case AnnotPlaceholder(h: ApplyHole) if h.tpe <:< treeType => reifyAnnotation(h)
+      case AnnotPlaceholder(h: UnapplyHole) if h.rank == NoDot => reifyAnnotation(h)
+      case other => reify(other)
+    }
+
+    // These are explicit flags except those that are used
+    // to overload the same tree for two different concepts:
+    // - MUTABLE that is used to override ValDef for vars
+    // - TRAIT that is used to override ClassDef for traits
+    val nonOverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+
+    def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
+      // Traits automatically have ABSTRACT flag assigned to
+      // them so in that case it's not an explicit flag
+      val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
+      if ((flags & nonOverloadedExplicitFlags) != 0L)
+        c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
+    }
+
+    override def mirrorSelect(name: String): Tree =
+      Select(universe, TermName(name))
+
+    override def mirrorCall(name: TermName, args: Tree*): Tree =
+      Apply(Select(universe, name), args.toList)
+
+    override def mirrorBuildCall(name: TermName, args: Tree*): Tree =
+      Apply(Select(Select(Select(universe, nme.internal), nme.reificationSupport), name), args.toList)
+
+    override def scalaFactoryCall(name: String, args: Tree*): Tree =
+      call("scala." + name, args: _*)
+  }
+
+  class ApplyReifier extends Reifier(isReifyingExpressions = true) {
+    def reifyHighRankList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree =
+      if (xs.isEmpty) mkList(Nil)
+      else {
+        def reifyGroup(group: List[Any]): Tree = group match {
+          case List(elem) if fill.isDefinedAt(elem) => fill(elem)
+          case elems => mkList(elems.map(fallback))
+        }
+        val head :: tail = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+        tail.foldLeft[Tree](reifyGroup(head)) { (tree, lst) => Apply(Select(tree, nme.PLUSPLUS), List(reifyGroup(lst))) }
+      }
+
+    override def reifyModifiers(m: Modifiers) =
+      if (m == NoMods) super.reifyModifiers(m)
+      else {
+        val (modsPlaceholders, annots) = m.annotations.partition {
+          case ModsPlaceholder(_) => true
+          case _ => false
+        }
+        val (mods, flags) = modsPlaceholders.map {
+          case ModsPlaceholder(hole: ApplyHole) => hole
+        }.partition { hole =>
+          if (hole.tpe <:< modsType) true
+          else if (hole.tpe <:< flagsType) false
+          else c.abort(hole.pos, s"$flagsType or $modsType expected but ${hole.tpe} found")
+        }
+        mods match {
+          case hole :: Nil =>
+            if (flags.nonEmpty) c.abort(flags(0).pos, "Can't unquote flags together with modifiers, consider merging flags into modifiers")
+            if (annots.nonEmpty) c.abort(hole.pos, "Can't unquote modifiers together with annotations, consider merging annotations into modifiers")
+            ensureNoExplicitFlags(m, hole.pos)
+            hole.tree
+          case _ :: hole :: Nil =>
+            c.abort(hole.pos, "Can't unquote multiple modifiers, consider merging them into a single modifiers instance")
+          case _ =>
+            val baseFlags = reifyFlags(m.flags)
+            val reifiedFlags = flags.foldLeft[Tree](baseFlags) { case (flag, hole) => Apply(Select(flag, nme.OR), List(hole.tree)) }
+            mirrorFactoryCall(nme.Modifiers, reifiedFlags, reify(m.privateWithin), reifyAnnotList(annots))
+        }
+      }
+
+  }
+  class UnapplyReifier extends Reifier(isReifyingExpressions = false) {
+    private def collection = ScalaDot(nme.collection)
+    private def collectionColonPlus = Select(collection, nme.COLONPLUS)
+    private def collectionCons = Select(Select(collection, nme.immutable), nme.CONS)
+    private def collectionNil = Select(Select(collection, nme.immutable), nme.Nil)
+    // pq"$lhs :+ $rhs"
+    private def append(lhs: Tree, rhs: Tree) = Apply(collectionColonPlus, lhs :: rhs :: Nil)
+    // pq"$lhs :: $rhs"
+    private def cons(lhs: Tree, rhs: Tree) = Apply(collectionCons, lhs :: rhs :: Nil)
+
+    def reifyHighRankList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree = {
+      val grouped = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+      def appended(lst: List[Any], init: Tree)  = lst.foldLeft(init)  { (l, r) => append(l, fallback(r)) }
+      def prepended(lst: List[Any], init: Tree) = lst.foldRight(init) { (l, r) => cons(fallback(l), r)   }
+      grouped match {
+        case init :: List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, prepended(init, fill(hole)))
+        case init :: List(hole) :: Nil         if fill.isDefinedAt(hole) => prepended(init, fill(hole))
+        case         List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, fill(hole))
+        case         List(hole) :: Nil         if fill.isDefinedAt(hole) => fill(hole)
+        case _                                                           => prepended(xs, collectionNil)
+      }
+    }
+
+    override def reifyModifiers(m: Modifiers) =
+      if (m == NoMods) super.reifyModifiers(m)
+      else {
+        val mods = m.annotations.collect { case ModsPlaceholder(hole: UnapplyHole) => hole }
+        mods match {
+          case hole :: Nil =>
+            if (m.annotations.length != 1) c.abort(hole.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers")
+            ensureNoExplicitFlags(m, hole.pos)
+            hole.treeNoUnlift
+          case _ :: hole :: _ =>
+            c.abort(hole.pos, "Can't extract multiple modifiers together, consider extracting a single modifiers instance")
+          case Nil =>
+            mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reifyAnnotList(m.annotations))
+        }
+      }
+  }
+}
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 21137ac..3cfc1eb 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -6,13 +6,14 @@
 package scala.tools
 package util
 
-import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
-import java.lang.{ ClassLoader => JavaClassLoader }
 import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.File
-import Javap._
-import scala.language.reflectiveCalls
+import java.io.PrintWriter
+
+trait JpResult {
+  def isError: Boolean
+  def value: Any
+  def show(): Unit
+}
 
 trait Javap {
   def loader: ScalaClassLoader
@@ -29,147 +30,3 @@ object NoJavap extends Javap {
   def tryFile(path: String): Option[Array[Byte]] = None
   def tryClass(path: String): Array[Byte]        = Array()
 }
-
-class JavapClass(
-  val loader: ScalaClassLoader = ScalaClassLoader.appLoader,
-  val printWriter: PrintWriter = new PrintWriter(System.out, true)
-) extends Javap {
-
-  lazy val parser = new JpOptions
-
-  val EnvClass     = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
-  val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
-  private def failed = (EnvClass eq null) || (PrinterClass eq null)
-
-  val PrinterCtr   = (
-    if (failed) null
-    else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
-  )
-
-  def findBytes(path: String): Array[Byte] =
-    tryFile(path) getOrElse tryClass(path)
-
-  def apply(args: Seq[String]): List[JpResult] = {
-    if (failed) List(new JpError("Could not load javap tool. Check that JAVA_HOME is correct."))
-    else args.toList filterNot (_ startsWith "-") map { path =>
-      val bytes = findBytes(path)
-      if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
-      else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
-    }
-  }
-
-  def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
-    if (failed) null
-    else PrinterCtr.newInstance(in, printWriter, env)
-
-  def newEnv(opts: Seq[String]): FakeEnvironment = {
-    lazy val env: FakeEnvironment = EnvClass.newInstance()
-
-    if (failed) null
-    else parser(opts) foreach { case (name, value) =>
-      val field = EnvClass getDeclaredField name
-      field setAccessible true
-      field.set(env, value.asInstanceOf[AnyRef])
-    }
-
-    env
-  }
-
-  /** Assume the string is a path and try to find the classfile
-   *  it represents.
-   */
-  def tryFile(path: String): Option[Array[Byte]] = {
-    val file = File(
-      if (path.endsWith(".class")) path
-      else path.replace('.', '/') + ".class"
-    )
-    if (!file.exists) None
-    else try Some(file.toByteArray) catch { case x: Exception => None }
-  }
-  /** Assume the string is a fully qualified class name and try to
-   *  find the class object it represents.
-   */
-  def tryClass(path: String): Array[Byte] = {
-    val extName = (
-      if (path endsWith ".class") (path dropRight 6).replace('/', '.')
-      else path
-    )
-    loader.classBytes(extName)
-  }
-}
-
-object Javap {
-  val Env     = "sun.tools.javap.JavapEnvironment"
-  val Printer = "sun.tools.javap.JavapPrinter"
-
-  def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) =
-    cl.tryToInitializeClass[AnyRef](Env).isDefined
-
-  // "documentation"
-  type FakeEnvironment = AnyRef
-  type FakePrinter = AnyRef
-
-  def apply(path: String): Unit      = apply(Seq(path))
-  def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show())
-
-  sealed trait JpResult {
-    type ResultType
-    def isError: Boolean
-    def value: ResultType
-    def show(): Unit
-    // todo
-    // def header(): String
-    // def fields(): List[String]
-    // def methods(): List[String]
-    // def signatures(): List[String]
-  }
-  class JpError(msg: String) extends JpResult {
-    type ResultType = String
-    def isError = true
-    def value = msg
-    def show() = println(msg)
-  }
-  class JpSuccess(val value: AnyRef) extends JpResult {
-    type ResultType = AnyRef
-    def isError = false
-    def show() = value.asInstanceOf[{ def print(): Unit }].print()
-  }
-
-  class JpOptions {
-    private object Access {
-      final val PRIVATE = 0
-      final val PROTECTED = 1
-      final val PACKAGE = 2
-      final val PUBLIC = 3
-    }
-    private val envActionMap: Map[String, (String, Any)] = {
-      val map = Map(
-        "-l"         -> (("showLineAndLocal", true)),
-        "-c"         -> (("showDisassembled", true)),
-        "-s"         -> (("showInternalSigs", true)),
-        "-verbose"   -> (("showVerbose", true)),
-        "-private"   -> (("showAccess", Access.PRIVATE)),
-        "-package"   -> (("showAccess", Access.PACKAGE)),
-        "-protected" -> (("showAccess", Access.PROTECTED)),
-        "-public"    -> (("showAccess", Access.PUBLIC)),
-        "-all"       -> (("showallAttr", true))
-      )
-      map ++ List(
-        "-v" -> map("-verbose"),
-        "-p" -> map("-private")
-      )
-    }
-    def apply(opts: Seq[String]): Seq[(String, Any)] = {
-      opts flatMap { opt =>
-        envActionMap get opt match {
-          case Some(pair) => List(pair)
-          case _          =>
-            val charOpts = opt.tail.toSeq map ("-" + _)
-            if (charOpts forall (envActionMap contains _))
-              charOpts map envActionMap
-            else Nil
-        }
-      }
-    }
-  }
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 0af1011..5526660 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -3,37 +3,46 @@
  * @author  Paul Phillips
  */
 
-package scala.tools
+package scala
+package tools
 package util
 
-import java.net.{ URL, MalformedURLException }
 import scala.tools.reflect.WrappedProperties.AccessControl
-import nsc.{ Settings, GenericRunnerSettings }
-import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
-import nsc.io.{ File, Directory, Path, AbstractFile }
+import scala.tools.nsc.{ Settings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.reflect.io.{ File, Directory, Path, AbstractFile }
+import scala.reflect.runtime.ReflectionUtils
 import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
 import PartialFunction.condOpt
 import scala.language.postfixOps
 
 // Loosely based on the draft specification at:
-// https://wiki.scala-lang.org/display/SW/Classpath
+// https://wiki.scala-lang.org/display/SIW/Classpath
 
 object PathResolver {
-  // Imports property/environment functions which suppress
-  // security exceptions.
+  // Imports property/environment functions which suppress security exceptions.
   import AccessControl._
-
-  def firstNonEmpty(xs: String*)            = xs find (_ != "") getOrElse ""
-
-  /** Map all classpath elements to absolute paths and reconstruct the classpath.
-    */
-  def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+  import scala.compat.Platform.EOL
+
+  implicit class MkLines(val t: TraversableOnce[_]) extends AnyVal {
+    def mkLines: String = t.mkString("", EOL, EOL)
+    def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = {
+      val space = "\u0020"
+      val sep = if (indented) EOL + space * 2 else EOL
+      val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "")
+      t.mkString(header + lbrace + sep, sep, rbrace + EOL)
+    }
+  }
+  implicit class AsLines(val s: String) extends AnyVal {
+    // sm"""...""" could do this in one pass
+    def asLines = s.trim.stripMargin.lines.mkLines
+  }
 
   /** pretty print class path */
   def ppcp(s: String) = split(s) match {
     case Nil      => ""
     case Seq(x)   => x
-    case xs       => xs map ("\n" + _) mkString
+    case xs       => xs.mkString(EOL, EOL, "")
   }
 
   /** Values found solely by inspecting environment or property variables.
@@ -46,8 +55,7 @@ object PathResolver {
     /** Environment variables which java pays attention to so it
      *  seems we do as well.
      */
-    def classPathEnv        =  envOrElse("CLASSPATH", "")
-    def sourcePathEnv       =  envOrElse("SOURCEPATH", "")
+    def sourcePathEnv       = envOrElse("SOURCEPATH", "")
 
     def javaBootClassPath   = propOrElse("sun.boot.class.path", searchForBootClasspath)
     def javaExtDirs         = propOrEmpty("java.ext.dirs")
@@ -58,20 +66,14 @@ object PathResolver {
     def javaUserClassPath   = propOrElse("java.class.path", "")
     def useJavaClassPath    = propOrFalse("scala.usejavacp")
 
-    override def toString = """
+    override def toString = s"""
       |object Environment {
-      |  scalaHome          = %s (useJavaClassPath = %s)
-      |  javaBootClassPath  = <%d chars>
-      |  javaExtDirs        = %s
-      |  javaUserClassPath  = %s
-      |  scalaExtDirs       = %s
-      |}""".trim.stripMargin.format(
-        scalaHome, useJavaClassPath,
-        javaBootClassPath.length,
-        ppcp(javaExtDirs),
-        ppcp(javaUserClassPath),
-        ppcp(scalaExtDirs)
-      )
+      |  scalaHome          = $scalaHome (useJavaClassPath = $useJavaClassPath)
+      |  javaBootClassPath  = <${javaBootClassPath.length} chars>
+      |  javaExtDirs        = ${ppcp(javaExtDirs)}
+      |  javaUserClassPath  = ${ppcp(javaUserClassPath)}
+      |  scalaExtDirs       = ${ppcp(scalaExtDirs)}
+      |}""".asLines
   }
 
   /** Default values based on those in Environment as interpreted according
@@ -86,7 +88,6 @@ object PathResolver {
 
     def scalaHome         = Environment.scalaHome
     def scalaHomeDir      = Directory(scalaHome)
-    def scalaHomeExists   = scalaHomeDir.isDirectory
     def scalaLibDir       = Directory(scalaHomeDir / "lib")
     def scalaClassesDir   = Directory(scalaHomeDir / "classes")
 
@@ -109,33 +110,67 @@ object PathResolver {
     // classpath as set up by the runner (or regular classpath under -nobootcp)
     // and then again here.
     def scalaBootClassPath  = ""
-    // scalaLibDirFound match {
-    //   case Some(dir) if scalaHomeExists =>
-    //     val paths = ClassPath expandDir dir.path
-    //     join(paths: _*)
-    //   case _                            => ""
-    // }
-
     def scalaExtDirs = Environment.scalaExtDirs
-
     def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
 
-    override def toString = """
+    override def toString = s"""
       |object Defaults {
-      |  scalaHome            = %s
-      |  javaBootClassPath    = %s
-      |  scalaLibDirFound     = %s
-      |  scalaLibFound        = %s
-      |  scalaBootClassPath   = %s
-      |  scalaPluginPath      = %s
-      |}""".trim.stripMargin.format(
-        scalaHome,
-        ppcp(javaBootClassPath),
-        scalaLibDirFound, scalaLibFound,
-        ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
-      )
+      |  scalaHome            = $scalaHome
+      |  javaBootClassPath    = ${ppcp(javaBootClassPath)}
+      |  scalaLibDirFound     = $scalaLibDirFound
+      |  scalaLibFound        = $scalaLibFound
+      |  scalaBootClassPath   = ${ppcp(scalaBootClassPath)}
+      |  scalaPluginPath      = ${ppcp(scalaPluginPath)}
+      |}""".asLines
+  }
+
+  /** Locations discovered by supplemental heuristics.
+   */
+  object SupplementalLocations {
+
+    /** The platform-specific support jar.
+     *
+     *  Usually this is `tools.jar` in the jdk/lib directory of the platform distribution.
+     *
+     *  The file location is determined by probing the lib directory under JDK_HOME or JAVA_HOME,
+     *  if one of those environment variables is set, then the lib directory under java.home,
+     *  and finally the lib directory under the parent of java.home. Or, as a last resort,
+     *  search deeply under those locations (except for the parent of java.home, on the notion
+     *  that if this is not a canonical installation, then that search would have little
+     *  chance of succeeding).
+     */
+    def platformTools: Option[File] = {
+      val jarName = "tools.jar"
+      def jarPath(path: Path) = (path / "lib" / jarName).toFile
+      def jarAt(path: Path) = {
+        val f = jarPath(path)
+        if (f.isFile) Some(f) else None
+      }
+      val jdkDir = {
+        val d = Directory(jdkHome)
+        if (d.isDirectory) Some(d) else None
+      }
+      def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
+
+      val home    = envOrSome("JDK_HOME", envOrNone("JAVA_HOME")) map (p => Path(p))
+      val install = Some(Path(javaHome))
+
+      (home flatMap jarAt) orElse (install flatMap jarAt) orElse (install map (_.parent) flatMap jarAt) orElse
+        (jdkDir flatMap deeply)
+    }
+    override def toString = s"""
+      |object SupplementalLocations {
+      |  platformTools        = $platformTools
+      |}""".asLines
+  }
+
+  // used in PathResolver constructor
+  private object NoImplClassJavaContext extends JavaContext {
+    override def isValidName(name: String): Boolean =
+      !ReflectionUtils.scalacShouldntLoadClassfile(name)
   }
 
+  // called from scalap
   def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
     val s = new Settings()
     s.classpath.value = path
@@ -153,18 +188,21 @@ object PathResolver {
     }
     else {
       val settings = new Settings()
-      val rest = settings.processArguments(args.toList, false)._2
+      val rest = settings.processArguments(args.toList, processAll = false)._2
       val pr = new PathResolver(settings)
       println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
       println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
-      pr.result.show
+      pr.result.show()
     }
   }
 }
-import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
 
 class PathResolver(settings: Settings, context: JavaContext) {
-  def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
+  import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+
+  def this(settings: Settings) = this(settings,
+      if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
+      else DefaultJavaContext)
 
   private def cmdLineOrElse(name: String, alt: String) = {
     (commandLineFor(name) match {
@@ -188,6 +226,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
   object Calculated {
     def scalaHome           = Defaults.scalaHome
     def useJavaClassPath    = settings.usejavacp.value || Defaults.useJavaClassPath
+    def useManifestClassPath= settings.usemanifestcp.value
     def javaBootClassPath   = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
     def javaExtDirs         = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
     def javaUserClassPath   = if (useJavaClassPath) Defaults.javaUserClassPath else ""
@@ -227,43 +266,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
       classesInPath(scalaBootClassPath),            // 4. The Scala boot class path.
       contentsOfDirsInPath(scalaExtDirs),           // 5. The Scala extension class path.
       classesInExpandedPath(userClassPath),         // 6. The Scala application class path.
+      classesInManifest(useManifestClassPath),      // 8. The Manifest class path.
       sourcesInPath(sourcePath)                     // 7. The Scala source path.
     )
 
     lazy val containers = basis.flatten.distinct
 
-    override def toString = """
+    override def toString = s"""
       |object Calculated {
-      |  scalaHome            = %s
-      |  javaBootClassPath    = %s
-      |  javaExtDirs          = %s
-      |  javaUserClassPath    = %s
-      |    useJavaClassPath   = %s
-      |  scalaBootClassPath   = %s
-      |  scalaExtDirs         = %s
-      |  userClassPath        = %s
-      |  sourcePath           = %s
-      |}""".trim.stripMargin.format(
-        scalaHome,
-        ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
-        useJavaClassPath,
-        ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
-        ppcp(sourcePath)
-      )
+      |  scalaHome            = $scalaHome
+      |  javaBootClassPath    = ${ppcp(javaBootClassPath)}
+      |  javaExtDirs          = ${ppcp(javaExtDirs)}
+      |  javaUserClassPath    = ${ppcp(javaUserClassPath)}
+      |    useJavaClassPath   = $useJavaClassPath
+      |  scalaBootClassPath   = ${ppcp(scalaBootClassPath)}
+      |  scalaExtDirs         = ${ppcp(scalaExtDirs)}
+      |  userClassPath        = ${ppcp(userClassPath)}
+      |  sourcePath           = ${ppcp(sourcePath)}
+      |}""".asLines
   }
 
   def containers = Calculated.containers
 
   lazy val result = {
     val cp = new JavaClassPath(containers.toIndexedSeq, context)
-    if (settings.Ylogcp.value) {
-      Console.println("Classpath built from " + settings.toConciseString)
-      Console.println("Defaults: " + PathResolver.Defaults)
-      Console.println("Calculated: " + Calculated)
+    if (settings.Ylogcp) {
+      Console print f"Classpath built from ${settings.toConciseString} %n"
+      Console print s"Defaults: ${PathResolver.Defaults}"
+      Console print s"Calculated: $Calculated"
 
       val xs = (Calculated.basis drop 2).flatten.distinct
-      println("After java boot/extdirs classpath has %d entries:" format xs.size)
-      xs foreach (x => println("  " + x))
+      Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true))
     }
     cp
   }
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1b06ce2..1d39a59 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.tools.util
+package scala
+package tools.util
 
 import java.net.{ ServerSocket, SocketException, SocketTimeoutException }
 import java.io.{ PrintWriter, BufferedReader }
@@ -16,8 +17,8 @@ trait CompileOutputCommon {
   def verbose: Boolean
 
   def info(msg: String)  = if (verbose) echo(msg)
-  def echo(msg: String)  = {Console println msg; Console.flush}
-  def warn(msg: String)  = {Console.err println msg; Console.flush}
+  def echo(msg: String)  = {Console println msg; Console.flush()}
+  def warn(msg: String)  = {Console.err println msg; Console.flush()}
   def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
 }
 
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index d208a9f..3c203e1 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -3,7 +3,7 @@ package scala.tools.util
 import scala.tools.nsc.io._
 import java.net.URLClassLoader
 import scala.collection.JavaConverters._
-
+import scala.language.postfixOps
 
 object VerifyClass {
 
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
deleted file mode 100644
index 44a5b53..0000000
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ /dev/null
@@ -1,249 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2010-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.continuations
-
-import scala.annotation.{ Annotation, StaticAnnotation, TypeConstraint }
-
-/** This annotation is used to mark a parameter as part of a continuation
- * context.
- *
- * The type `A @cpsParam[B,C]` is desugared to `ControlContext[A,B,C]` at compile
- * time.
- *
- * @tparam B  The type of computation state after computation has executed, and
- *   before control is returned to the shift.
- * @tparam C  The eventual return type of this delimited compuation.
- * @see scala.util.continuations.ControlContext
- */
-class cpsParam[-B,+C] extends StaticAnnotation with TypeConstraint
-
-private class cpsSym[B] extends Annotation // implementation detail
-
-private class cpsSynth extends Annotation // implementation detail
-
-private class cpsPlus extends StaticAnnotation with TypeConstraint // implementation detail
-private class cpsMinus extends Annotation // implementation detail
-
-
-/**
- * This class represent a portion of computation that has a 'hole' in it.  The
- * class has the ability to compute state up until a certain point where the
- * state has the `A` type.  If this context is given a function of type
- * `A => B` to move the state to the `B` type, then the entire computation can
- * be completed resulting in a value of type `C`.
- *
- * An Example: {{{
- *   val cc = new ControlContext[String, String, String](
- *      fun = { (f: String=>String, err: Exception => String) =>
- *        val updatedState =
- *          try f("State")
- *          catch {
- *            case e: Exception => err(e)
- *          }
- *        updatedState + "-Complete!"
- *      },
- *      x = null.asIntanceOf[String]
- *  }
- *  cc.foreach(_ + "-Continued")  // Results in "State-Continued-Complete!"
- * }}}
- *
- * This class is used to transform calls to `shift` in the `continuations`
- * package.  Direct use and instantiation is possible, but usually reserved
- * for advanced cases.
- *
- *
- * A context may either be ''trivial'' or ''non-trivial''.   A ''trivial''
- * context '''just''' has a state of type `A`.  When completing the computation,
- * it's only necessary to use the function of type `A => B` directly against
- * the trivial value. A ''non-trivial'' value stores a computation '''around'''
- * the state transformation of type `A => B` and cannot be short-circuited.
- *
- * @param fun The captured computation so far.  The type
- *   `(A => B, Exception => B) => C` is a function where:
- *   - The first parameter `A=>B` represents the computation defined against
- *       the current state held in the ControlContext.
- *   - The second parameter `Exception => B` represents a computation to
- *       perform if an exception is thrown from the first parameter's computation.
- *   - The return value is the result of the entire computation contained in this
- *       `ControlContext`.
- * @param x  The current state stored in this context.  Allowed to be null if
- *   the context is non-trivial.
- * @tparam A  The type of the state currently held in the context.
- * @tparam B  The type of the transformed state needed to complete this computation.
- * @tparam C  The return type of the entire computation stored in this context.
- * @note `fun` and `x` are allowed to be `null`.
- * @see scala.util.continutations.shiftR
- */
-final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val x: A) extends Serializable {
-
-  /*
-    final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
-      new ControlContext((k:(A1 => B)) => fun((x:A) => k(f(x))), null.asInstanceOf[A1])
-    }
-
-    final def flatMap[A1,B1<:B](f: (A => ControlContext[A1,B1,B])): ControlContext[A1,B1,C] = {
-      new ControlContext((k:(A1 => B1)) => fun((x:A) => f(x).fun(k)))
-    }
-  */
-
-  /**
-   * Modifies the currently captured state in this `ControlContext`.
-   * @tparam A1 The new type of state in this context.
-   * @param f A transformation function on the current state of the `ControlContext`.
-   * @return The new `ControlContext`.
-   */
-  @noinline final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
-    if (fun eq null)
-      try {
-        new ControlContext[A1,B,C](null, f(x)) // TODO: only alloc if f(x) != x
-      } catch {
-        case ex: Exception =>
-          new ControlContext((k: A1 => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
-      }
-    else
-      new ControlContext({ (k: A1 => B, thr: Exception => B) =>
-        fun( { (x:A) =>
-          var done = false
-          try {
-            val res = f(x)
-            done = true
-            k(res)
-          } catch {
-            case ex: Exception if !done =>
-              thr(ex)
-          }
-        }, thr)
-      }, null.asInstanceOf[A1])
-  }
-
-
-  // it would be nice if @inline would turn the trivial path into a tail call.
-  // unfortunately it doesn't, so we do it ourselves in SelectiveCPSTransform
-
-  /**
-   * Maps and flattens this `ControlContext` with another `ControlContext` generated from the current state.
-   * @note   The resulting comuptation is still the type `C`.
-   * @tparam A1 The new type of the contained state.
-   * @tparam B1 The new type of the state after the stored continuation has executed.
-   * @tparam C1 The result type of the nested `ControlContext`.  Because the nested `ControlContext` is executed within
-   *   the outer `ControlContext`, this type must `>: B` so that the resulting nested computation can be fed through
-   *   the current continuation.
-   * @param f A transformation function from the current state to a nested `ControlContext`.
-   * @return The transformed `ControlContext`.
-   */
-  @noinline final def flatMap[A1,B1,C1<:B](f: (A => ControlContext[A1,B1,C1])): ControlContext[A1,B1,C] = {
-    if (fun eq null)
-      try {
-        f(x).asInstanceOf[ControlContext[A1,B1,C]]
-      } catch {
-        case ex: Exception =>
-          new ControlContext((k: A1 => B1, thr: Exception => B1) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
-      }
-    else
-      new ControlContext({ (k: A1 => B1, thr: Exception => B1) =>
-        fun( { (x:A) =>
-          var done = false
-          try {
-            val ctxR = f(x)
-            done = true
-            val res: C1 = ctxR.foreachFull(k, thr) // => B1
-            res
-          } catch {
-            case ex: Exception if !done =>
-              thr(ex).asInstanceOf[B] // => B NOTE: in general this is unsafe!
-          }                           // However, the plugin will not generate offending code
-        }, thr.asInstanceOf[Exception=>B]) // => B
-      }, null.asInstanceOf[A1])
-  }
-
-  /**
-   * Runs the computation against the state stored in this `ControlContext`.
-   * @param f the computation that modifies the current state of the context.
-   * @note This method could throw exceptions from the computations.
-   */
-  final def foreach(f: A => B) = foreachFull(f, throw _)
-
-  def foreachFull(f: A => B, g: Exception => B): C = {
-    if (fun eq null)
-      f(x).asInstanceOf[C]
-    else
-      fun(f, g)
-  }
-
-  /** @return true if this context only stores a state value and not any deferred computation. */
-  final def isTrivial = fun eq null
-  /** @return The current state value. */
-  final def getTrivialValue = x.asInstanceOf[A]
-
-  // need filter or other functions?
-
-  final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = {
-    if (fun eq null)
-      this
-    else {
-      val fun1 = (ret1: A1 => B1, thr1: Exception => B1) => {
-        val thr: Exception => B1 = { t: Exception =>
-          var captureExceptions = true
-          try {
-            if (pf.isDefinedAt(t)) {
-              val cc1 = pf(t)
-              captureExceptions = false
-              cc1.foreachFull(ret1, thr1) // Throw => B
-            } else {
-              captureExceptions = false
-              thr1(t) // Throw => B1
-            }
-          } catch {
-            case t1: Exception if captureExceptions => thr1(t1) // => E2
-          }
-        }
-        fun(ret1, thr)// fun(ret1, thr)  // => B
-      }
-      new ControlContext(fun1, null.asInstanceOf[A1])
-    }
-  }
-
-  final def mapFinally(f: () => Unit): ControlContext[A,B,C] = {
-    if (fun eq null) {
-      try {
-        f()
-        this
-      } catch {
-        case ex: Exception =>
-          new ControlContext((k: A => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A])
-      }
-    } else {
-      val fun1 = (ret1: A => B, thr1: Exception => B) => {
-        val ret: A => B = { x: A =>
-          var captureExceptions = true
-          try {
-            f()
-            captureExceptions = false
-            ret1(x)
-          } catch {
-            case t1: Exception if captureExceptions => thr1(t1)
-          }
-        }
-        val thr: Exception => B = { t: Exception =>
-          var captureExceptions = true
-          try {
-            f()
-            captureExceptions = false
-            thr1(t)
-          } catch {
-            case t1: Exception if captureExceptions => thr1(t1)
-          }
-        }
-        fun(ret, thr1)
-      }
-      new ControlContext(fun1, null.asInstanceOf[A])
-    }
-  }
-
-}
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
deleted file mode 100644
index 90bab56..0000000
--- a/src/continuations/library/scala/util/continuations/package.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2010-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util
-
-/* TODO: better documentation of return-type modification.
- * (Especially what means "Illegal answer type modification: ... andThen ...")
- */
-
-/**
- * Delimited continuations are a feature for modifying the usual control flow
- * of a program.  To use continuations, provide the option `-P:continuations:enable`
- * to the Scala compiler or REPL to activate the compiler plugin.
- *
- * Below is an example of using continuations to suspend execution while awaiting
- * user input. Similar facilities are used in so-called continuation-based web frameworks.
- *
- * {{{
- *   def go =
- *     reset {
- *       println("Welcome!")
- *       val first = ask("Please give me a number")
- *       val second = ask("Please enter another number")
- *       printf("The sum of your numbers is: %d\n", first + second)
- *     }
- * }}}
- *
- * The `reset` is provided by this package and delimits the extent of the
- * transformation. The `ask` is a function that will be defined below. Its
- * effect is to issue a prompt and then suspend execution awaiting user input.
- * Once the user provides an input value, execution of the suspended block
- * resumes.
- *
- * {{{
- *   val sessions = new HashMap[UUID, Int=>Unit]
- *   def ask(prompt: String): Int @cps[Unit] =
- *     shift {
- *       k: (Int => Unit) => {
- *         val id = uuidGen
- *         printf("%s\nrespond with: submit(0x%x, ...)\n", prompt, id)
- *         sessions += id -> k
- *       }
- *     }
- * }}}
- *
- * The type of `ask` includes a `@cps` annotation which drives the transformation.
- * The type signature `Int @cps[Unit]` means that `ask` should be used in a
- * context requiring an `Int`, but actually it will suspend and return `Unit`.
- *
- * The computation leading up to the first `ask` is executed normally. The
- * remainder of the reset block is wrapped into a closure that is passed as
- * the parameter `k` to the `shift` function, which can then decide whether
- * and how to execute the continuation. In this example, the continuation is
- * stored in a sessions map for later execution. This continuation includes a
- * second call to `ask`, which is treated likewise once the execution resumes.
- *
- * <h2>CPS Annotation</h2>
- *
- * The aforementioned `@cps[A]` annotation is an alias for the more general
- * `@cpsParam[B,C]` where `B=C`. The type `A @cpsParam[B,C]` describes a term
- * which yields a value of type `A` within an evaluation context producing a
- * value of type `B`. After the CPS transformation, this return type is
- * modified to `C`.
- *
- * The `@cpsParam` annotations are introduced by `shift` blocks, and propagate
- * via the return types to the dynamically enclosing context. The propagation
- * stops upon reaching a `reset` block.
- */
-
-package object continuations {
-
-  /** An annotation that denotes a type is part of a continuation context.
-   *  `@cps[A]` is shorthand for `cpsParam[A,A]`.
-   *  @tparam A  The return type of the continuation context.
-   */
-  type cps[A] = cpsParam[A,A]
-
-  /** An annotation that denotes a type is part of a side effecting continuation context.
-   *  `@suspendable` is shorthand notation for `@cpsParam[Unit,Unit]` or `@cps[Unit]`.
-   */
-  type suspendable = cps[Unit]
-
-  /**
-   * The `shift` function captures the remaining computation in a `reset` block
-   * and passes it to a closure provided by the user.
-   *
-   * For example:
-   * {{{
-   *    reset {
-   *       shift { (k: Int => Int) => k(5) } + 1
-   *    }
-   * }}}
-   *
-   * In this example, `shift` is used in the expression `shift ... + 1`.
-   * The compiler will alter this expression so that the call
-   * to `shift` becomes a parameter to a function, creating something like:
-   * {{{
-   *   { (k: Int => Int) => k(5) } apply { _ + 1 }
-   * }}}
-   * The result of this expression is 6.
-   *
-   * There can be more than one `shift` call in a `reset` block.  Each call
-   * to `shift` can alter the return type of expression within the reset block,
-   * but will not change the return type of the entire `reset { block }`
-   * expression.
-   *
-   * @param fun  A function where
-   *   - The parameter is the remainder of computation within the current
-   *     `reset` block.  This is passed as a function `A => B`.
-   *   - The return is the return value of the `ControlContext` which is
-   *     generated from this inversion.
-   * @note  Must be invoked in the context of a call to `reset`  This context
-   *        may not be far up the stack, but a call to reset is needed to
-   *        eventually remove the `@cps` annotations from types.
-   */
-  def shift[A,B,C](fun: (A => B) => C): A @cpsParam[B,C] = {
-    throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
-  }
-  /** Creates a context for continuations captured within the argument closure
-   * of this `reset` call and returns the result of the entire transformed
-   * computation. Within an expression of the form `reset { block }`,
-   * the closure expression (`block`) will be modified such that at each
-   * call to `shift` the remainder of the expression is transformed into a
-   * function to be passed into the shift.
-   * @return The result of a block of code that uses `shift` to capture continuations.
-   */
-  def reset[A,C](ctx: =>(A @cpsParam[A,C])): C = {
-    val ctxR = reify[A,A,C](ctx)
-    if (ctxR.isTrivial)
-      ctxR.getTrivialValue.asInstanceOf[C]
-    else
-      ctxR.foreach((x:A) => x)
-  }
-
-  def reset0[A](ctx: =>(A @cpsParam[A,A])): A = reset(ctx)
-
-  def run[A](ctx: =>(Any @cpsParam[Unit,A])): A = {
-    val ctxR = reify[Any,Unit,A](ctx)
-    if (ctxR.isTrivial)
-      ctxR.getTrivialValue.asInstanceOf[A]
-    else
-      ctxR.foreach((x:Any) => ())
-  }
-
-
-  // methods below are primarily implementation details and are not
-  // needed frequently in client code
-
-  def shiftUnit0[A,B](x: A): A @cpsParam[B,B] = {
-    shiftUnit[A,B,B](x)
-  }
-
-  def shiftUnit[A,B,C>:B](x: A): A @cpsParam[B,C] = {
-    throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
-  }
-
-  /** This method converts from the sugared `A @cpsParam[B,C]` type to the desugared
-    * `ControlContext[A,B,C]` type.  The underlying data is not changed.
-    */
-  def reify[A,B,C](ctx: =>(A @cpsParam[B,C])): ControlContext[A,B,C] = {
-    throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
-  }
-
-  def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = {
-    new ControlContext(null, x)
-  }
-
-  /**
-   * Captures a computation into a `ControlContext`.
-   * @param fun  The function which accepts the inverted computation and returns
-   * a final result.
-   * @see shift
-   */
-  def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
-    new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
-  }
-
-  def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = {
-    ctx
-  }
-
-}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
deleted file mode 100644
index 00c72cf..0000000
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ /dev/null
@@ -1,524 +0,0 @@
-// $Id$
-
-package scala.tools.selectivecps
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
-import scala.tools.nsc.MissingRequirementError
-
-abstract class CPSAnnotationChecker extends CPSUtils with Modes {
-  val global: Global
-  import global._
-  import analyzer.{AnalyzerPlugin, Typer}
-  import definitions._
-
-  //override val verbose = true
-  @inline override final def vprintln(x: =>Any): Unit = if (verbose) println(x)
-
-  /**
-   *  Checks whether @cps annotations conform
-   */
-  object checker extends AnnotationChecker {
-    private[CPSAnnotationChecker] def addPlusMarker(tp: Type)  = tp withAnnotation newPlusMarker()
-    private[CPSAnnotationChecker] def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
-
-    private[CPSAnnotationChecker] def cleanPlus(tp: Type) =
-      removeAttribs(tp, MarkerCPSAdaptPlus, MarkerCPSTypes)
-    private[CPSAnnotationChecker] def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
-      cleanPlus(tp) withAnnotations newAnnots.toList
-
-    /** Check annotations to decide whether tpe1 <:< tpe2 */
-    def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
-      if (!cpsEnabled) return true
-
-      vprintln("check annotations: " + tpe1 + " <:< " + tpe2)
-
-      // Nothing is least element, but Any is not the greatest
-      if (tpe1.typeSymbol eq NothingClass)
-        return true
-
-      val annots1 = cpsParamAnnotation(tpe1)
-      val annots2 = cpsParamAnnotation(tpe2)
-
-      // @plus and @minus should only occur at the left, and never together
-      // TODO: insert check
-
-      // @minus @cps is the same as no annotations
-      if (hasMinusMarker(tpe1))
-        return annots2.isEmpty
-
-      // to handle answer type modification, we must make @plus <:< @cps
-      if (hasPlusMarker(tpe1) && annots1.isEmpty)
-        return true
-
-      // @plus @cps will fall through and compare the @cps type args
-      // @cps parameters must match exactly
-      if ((annots1 corresponds annots2)(_.atp <:< _.atp))
-        return true
-
-      // Need to handle uninstantiated type vars specially:
-
-      // g map (x => x)  with expected type List[Int] @cps
-      // results in comparison ?That <:< List[Int] @cps
-
-      // Instantiating ?That to an annotated type would fail during
-      // transformation.
-
-      // Instead we force-compare tpe1 <:< tpe2.withoutAnnotations
-      // to trigger instantiation of the TypeVar to the base type
-
-      // This is a bit unorthodox (we're only supposed to look at
-      // annotations here) but seems to work.
-
-      if (!annots2.isEmpty && !tpe1.isGround)
-        return tpe1 <:< tpe2.withoutAnnotations
-
-      false
-    }
-
-    /** Refine the computed least upper bound of a list of types.
-     *  All this should do is add annotations. */
-    override def annotationsLub(tpe: Type, ts: List[Type]): Type = {
-      if (!cpsEnabled) return tpe
-
-      val annots1 = cpsParamAnnotation(tpe)
-      val annots2 = ts flatMap cpsParamAnnotation
-
-      if (annots2.nonEmpty) {
-        val cpsLub = newMarker(global.lub(annots1:::annots2 map (_.atp)))
-        val tpe1 = if (annots1.nonEmpty) removeAttribs(tpe, MarkerCPSTypes) else tpe
-        tpe1.withAnnotation(cpsLub)
-      }
-      else tpe
-    }
-
-    /** Refine the bounds on type parameters to the given type arguments. */
-    override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
-      if (!cpsEnabled) return bounds
-
-      val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
-      if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) {
-        vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
-        if (hasCpsParamTypes(targs.last))
-          bounds.reverse match {
-            case res::b if !hasCpsParamTypes(res.hi) =>
-              (TypeBounds(res.lo, res.hi.withAnnotation(anyAtCPS))::b).reverse
-            case _ => bounds
-          }
-        else
-          bounds
-      }
-      else if (tparams.head.owner == ByNameParamClass) {
-        vprintln("byname bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
-        val TypeBounds(lo, hi) = bounds.head
-        if (hasCpsParamTypes(targs.head) && !hasCpsParamTypes(hi))
-          TypeBounds(lo, hi withAnnotation anyAtCPS) :: Nil
-        else bounds
-      } else
-        bounds
-    }
-  }
-
-  object plugin extends AnalyzerPlugin {
-
-    import checker._
-
-    override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
-      if (!cpsEnabled) return false
-      vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
-
-      val annots1 = cpsParamAnnotation(tree.tpe)
-      val annots2 = cpsParamAnnotation(pt)
-
-      if ((mode & global.analyzer.PATTERNmode) != 0) {
-        //println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
-        if (!annots1.isEmpty) {
-          return true
-        }
-      }
-
-/*
-      // not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
-      if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
-        if (!annots1.isEmpty) {
-          return true
-        }
-      }
-*/
-
-/*
-      this interferes with overloading resolution
-      if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) {
-        vprintln("already compatible, can't adapt further")
-        return false
-      }
-*/
-      if ((mode & global.analyzer.EXPRmode) != 0) {
-        if ((annots1 corresponds annots2)(_.atp <:< _.atp)) {
-          vprintln("already same, can't adapt further")
-          false
-        } else if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
-          //println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
-          if (!hasPlusMarker(tree.tpe)) {
-  //          val base = tree.tpe <:< removeAllCPSAnnotations(pt)
-  //          val known = global.analyzer.isFullyDefined(pt)
-  //          println(same + "/" + base + "/" + known)
-            //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
-            // TBD: use same or not?
-            //if (same) {
-              vprintln("yes we can!! (unit)")
-              true
-            //}
-          } else false
-        } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.RETmode) != 0)) {
-          vprintln("checking enclosing method's result type without annotations")
-          tree.tpe <:< pt.withoutAnnotations
-        } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
-          val optCpsTypes: Option[(Type, Type)]         = cpsParamTypes(tree.tpe)
-          val optExpectedCpsTypes: Option[(Type, Type)] = cpsParamTypes(pt)
-          if (optCpsTypes.isEmpty || optExpectedCpsTypes.isEmpty) {
-            vprintln("yes we can!! (byval)")
-            true
-          } else { // check cps param types
-            val cpsTpes = optCpsTypes.get
-            val cpsPts  = optExpectedCpsTypes.get
-            // class cpsParam[-B,+C], therefore:
-            cpsPts._1 <:< cpsTpes._1 && cpsTpes._2 <:< cpsPts._2
-          }
-        } else false
-      } else false
-    }
-
-    override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
-      if (!cpsEnabled) return tree
-
-      vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
-
-      val patMode   = (mode & global.analyzer.PATTERNmode) != 0
-      val exprMode  = (mode & global.analyzer.EXPRmode) != 0
-      val byValMode = (mode & global.analyzer.BYVALmode) != 0
-      val retMode   = (mode & global.analyzer.RETmode) != 0
-
-      val annotsTree     = cpsParamAnnotation(tree.tpe)
-      val annotsExpected = cpsParamAnnotation(pt)
-
-      // not sure I rephrased this comment correctly:
-      // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))`
-      // doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
-      if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
-      else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
-        // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
-        // tree will look like having any possible annotation
-        //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
-
-        // CAVEAT:
-        //  for monomorphic answer types we want to have @plus @cps (for better checking)
-        //  for answer type modification we want to have only @plus (because actual answer type may differ from pt)
-
-        val res = tree modifyType (_ withAnnotations newPlusMarker() :: annotsExpected) // needed for #1807
-        vprintln("adapted annotations (not by val) of " + tree + " to " + res.tpe)
-        res
-      } else if (exprMode && byValMode && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
-        // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
-        // tree will look like having no annotation
-        val res = tree modifyType addMinusMarker
-        vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
-        res
-      } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
-        // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
-        // tree will look like having any possible annotation
-        
-        // note 1: we are only adding a plus marker if the method's result type is a cps type
-        //         (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty)
-        // note 2: we are not adding the expected cps annotations, since they will be added
-        //         by adaptTypeOfReturn (see below).
-        val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
-        vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
-        res
-      } else tree
-    }
-
-    /** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type.
-     *  Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`).
-     *  
-     *  A return expression in a method that has a CPS result type is an error unless the return
-     *  is in tail position. Therefore, we are making sure that only the types of return expressions
-     *  are adapted which will either be removed, or lead to an error.
-     */
-    override def pluginsTypedReturn(default: Type, typer: Typer, tree: Return, pt: Type): Type = {
-      val expr = tree.expr
-      // only adapt if method's result type (pt) is cps type
-      val annots = cpsParamAnnotation(pt)
-      if (annots.nonEmpty) {
-        // return type of `expr` without plus marker, but only if it doesn't have other cps annots
-        if (hasPlusMarker(expr.tpe) && !hasCpsParamTypes(expr.tpe))
-          expr.setType(removeAttribs(expr.tpe, MarkerCPSAdaptPlus))
-        expr.tpe
-      } else default
-    }
-
-    def updateAttributesFromChildren(tpe: Type, childAnnots: List[AnnotationInfo], byName: List[Tree]): Type = {
-      tpe match {
-        // Would need to push annots into each alternative of overloaded type
-        // But we can't, since alternatives aren't types but symbols, which we
-        // can't change (we'd be affecting symbols globally)
-        /*
-        case OverloadedType(pre, alts) =>
-          OverloadedType(pre, alts.map((sym: Symbol) => updateAttributes(pre.memberType(sym), annots)))
-        */
-        case OverloadedType(pre, alts) => tpe   //reconstruct correct annotations later
-        case MethodType(params, restpe) => tpe
-        case PolyType(params, restpe) => tpe
-        case _ =>
-          assert(childAnnots forall (_ matches MarkerCPSTypes), childAnnots)
-          /*
-            [] + [] = []
-            plus + [] = plus
-            cps + [] = cps
-            plus cps + [] = plus cps
-            minus cps + [] = minus cps
-            synth cps + [] = synth cps // <- synth on left - does it happen?
-
-            [] + cps = cps
-            plus + cps = synth cps
-            cps + cps = cps! <- lin
-            plus cps + cps = synth cps! <- unify
-            minus cps + cps = minus cps! <- lin
-            synth cps + cps = synth cps! <- unify
-          */
-
-          val plus = hasPlusMarker(tpe) || (
-               hasCpsParamTypes(tpe)
-            && byName.nonEmpty
-            && (byName forall (t => hasPlusMarker(t.tpe)))
-          )
-
-          // move @plus annotations outward from by-name children
-          if (childAnnots.isEmpty) return {
-            if (plus) { // @plus or @plus @cps
-              byName foreach (_ modifyType cleanPlus)
-              addPlusMarker(tpe)
-            }
-            else tpe
-          }
-
-          val annots1 = cpsParamAnnotation(tpe)
-
-          if (annots1.isEmpty) { // nothing or @plus
-            cleanPlusWith(tpe)(newSynthMarker(), linearize(childAnnots))
-          }
-          else {
-            val annot1 = single(annots1)
-            if (plus) { // @plus @cps
-              val annot2 = linearize(childAnnots)
-
-              if (annot2.atp <:< annot1.atp) {
-                try cleanPlusWith(tpe)(newSynthMarker(), annot2)
-                finally byName foreach (_ modifyType cleanPlus)
-              }
-              else throw new TypeError(annot2 + " is not a subtype of " + annot1)
-            }
-            else if (hasSynthMarker(tpe)) { // @synth @cps
-              val annot2 = linearize(childAnnots)
-              if (annot2.atp <:< annot1.atp)
-                cleanPlusWith(tpe)(annot2)
-              else
-                throw new TypeError(annot2 + " is not a subtype of " + annot1)
-            }
-            else // @cps
-              cleanPlusWith(tpe)(linearize(childAnnots:::annots1))
-          }
-      }
-    }
-
-    def transArgList(fun: Tree, args: List[Tree]): List[List[Tree]] = {
-      val formals = fun.tpe.paramTypes
-      val overshoot = args.length - formals.length
-
-      for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
-        tp match {
-          case TypeRef(_, ByNameParamClass, List(elemtp)) =>
-            Nil // TODO: check conformance??
-          case _ =>
-            List(a)
-        }
-      }
-    }
-
-
-    def transStms(stms: List[Tree]): List[Tree] = stms match {
-      case ValDef(mods, name, tpt, rhs)::xs =>
-        rhs::transStms(xs)
-      case Assign(lhs, rhs)::xs =>
-        rhs::transStms(xs)
-      case x::xs =>
-        x::transStms(xs)
-      case Nil =>
-        Nil
-    }
-
-    def single(xs: List[AnnotationInfo]) = xs match {
-      case List(x) => x
-      case _ =>
-        global.globalError("not a single cps annotation: " + xs)
-        xs(0)
-    }
-    
-    def emptyOrSingleList(xs: List[AnnotationInfo]) = if (xs.isEmpty) Nil else List(single(xs))
-
-    def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
-      def inspect(t: Tree): List[AnnotationInfo] = {
-        if (t.tpe eq null) Nil else {
-          val extra: List[AnnotationInfo] = t.tpe match {
-            case _: MethodType | _: PolyType | _: OverloadedType =>
-              // method types, poly types and overloaded types do not obtain cps annotions by propagation
-              // need to reconstruct transitively from their children.
-              t match {
-                case Select(qual, name) => inspect(qual)
-                case Apply(fun, args) => (fun::(transArgList(fun,args).flatten)) flatMap inspect
-                case TypeApply(fun, args) => (fun::(transArgList(fun,args).flatten)) flatMap inspect
-                case _ => Nil
-              }
-            case _ => Nil
-          }
-
-          val types = cpsParamAnnotation(t.tpe)
-          // TODO: check that it has been adapted and if so correctly
-          extra ++ emptyOrSingleList(types)
-        }
-      }
-      val children = childTrees flatMap inspect
-
-      val newtpe = updateAttributesFromChildren(tpe, children, byName)
-
-      if (!newtpe.annotations.isEmpty)
-        vprintln("[checker] inferred " + tree + " / " + tpe + " ===> "+ newtpe)
-
-      newtpe
-    }
-
-    /** Modify the type that has thus far been inferred
-     *  for a tree.  All this should do is add annotations. */
-
-    override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
-      import scala.util.control._
-      if (!cpsEnabled) {
-        if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
-          global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
-        return tpe
-      }
-
-//      if (tree.tpe.hasAnnotation(MarkerCPSAdaptPlus))
-//        println("addAnnotation " + tree + "/" + tpe)
-
-      tree match {
-
-        case Apply(fun @ Select(qual, name), args) if fun.isTyped =>
-
-          // HACK: With overloaded methods, fun will never get annotated. This is because
-          // the 'overloaded' type gets annotated, but not the alternatives (among which
-          // fun's type is chosen)
-
-          vprintln("[checker] checking select apply " + tree + "/" + tpe)
-
-          transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
-
-        case Apply(TypeApply(fun @ Select(qual, name), targs), args) if fun.isTyped => // not trigge
-
-          vprintln("[checker] checking select apply type-apply " + tree + "/" + tpe)
-
-          transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
-
-        case TypeApply(fun @ Select(qual, name), args) if fun.isTyped =>
-          def stripNullaryMethodType(tp: Type) = tp match { case NullaryMethodType(restpe) => restpe case tp => tp }
-          vprintln("[checker] checking select type-apply " + tree + "/" + tpe)
-
-          transChildrenInOrder(tree, stripNullaryMethodType(tpe), List(qual, fun), Nil)
-
-        case Apply(fun, args) if fun.isTyped =>
-
-          vprintln("[checker] checking unknown apply " + tree + "/" + tpe)
-
-          transChildrenInOrder(tree, tpe, fun::(transArgList(fun, args).flatten), Nil)
-
-        case TypeApply(fun, args) =>
-
-          vprintln("[checker] checking unknown type apply " + tree + "/" + tpe)
-
-          transChildrenInOrder(tree, tpe, List(fun), Nil)
-
-        case Select(qual, name) if qual.isTyped =>
-
-          vprintln("[checker] checking select " + tree + "/" + tpe)
-
-          // straightforward way is problematic (see select.scala and Test2.scala)
-          // transChildrenInOrder(tree, tpe, List(qual), Nil)
-
-          // the problem is that qual may be of type OverloadedType (or MethodType) and
-          // we cannot safely annotate these. so we just ignore these cases and
-          // clean up later in the Apply/TypeApply trees.
-
-          if (hasCpsParamTypes(qual.tpe)) {
-            // however there is one special case:
-            // if it's a method without parameters, just apply it. normally done in adapt, but
-            // we have to do it here so we don't lose the cps information (wouldn't trigger our
-            // adapt and there is no Apply/TypeApply created)
-            tpe match {
-              case NullaryMethodType(restpe) =>
-                //println("yep: " + restpe + "," + restpe.getClass)
-                transChildrenInOrder(tree, restpe, List(qual), Nil)
-              case _ : PolyType => tpe
-              case _ : MethodType => tpe
-              case _ : OverloadedType => tpe
-              case _ =>
-                transChildrenInOrder(tree, tpe, List(qual), Nil)
-            }
-          } else
-            tpe
-
-        case If(cond, thenp, elsep) =>
-          transChildrenInOrder(tree, tpe, List(cond), List(thenp, elsep))
-
-        case Match(select, cases) =>
-          transChildrenInOrder(tree, tpe, List(select), cases:::(cases map { case CaseDef(_, _, body) => body }))
-
-        case Try(block, catches, finalizer) =>
-          val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches map { case CaseDef(_, _, body) => body }))
-
-          val annots = cpsParamAnnotation(tpe1)
-          if (annots.nonEmpty) {
-            val ann = single(annots)
-            val (atp0, atp1) = annTypes(ann)
-            if (!(atp0 =:= atp1))
-              throw new TypeError("only simple cps types allowed in try/catch blocks (found: " + tpe1 + ")")
-            if (!finalizer.isEmpty) // no finalizers allowed. see explanation in SelectiveCPSTransform
-              reporter.error(tree.pos, "try/catch blocks that use continuations cannot have finalizers")
-          }
-          tpe1
-
-        case Block(stms, expr) =>
-          // if any stm has annotation, so does block
-          transChildrenInOrder(tree, tpe, transStms(stms), List(expr))
-
-        case ValDef(mods, name, tpt, rhs) =>
-          vprintln("[checker] checking valdef " + name + "/"+tpe+"/"+tpt+"/"+tree.symbol.tpe)
-          // ValDef symbols must *not* have annotations!
-          // lazy vals are currently not supported
-          // but if we erase here all annotations, compiler will complain only
-          // when generating bytecode.
-          // This way lazy vals will be reported as unsupported feature later rather than weird type error.
-          if (hasAnswerTypeAnn(tree.symbol.info) && !mods.isLazy) { // is it okay to modify sym here?
-            vprintln("removing annotation from sym " + tree.symbol + "/" + tree.symbol.tpe + "/" + tpt)
-            tpt modifyType removeAllCPSAnnotations
-            tree.symbol modifyInfo removeAllCPSAnnotations
-          }
-          tpe
-
-        case _ =>
-          tpe
-      }
-
-
-    }
-  }
-}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
deleted file mode 100644
index 46c644b..0000000
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-// $Id$
-
-package scala.tools.selectivecps
-
-import scala.tools.nsc.Global
-
-trait CPSUtils {
-  val global: Global
-  import global._
-  import definitions._
-
-  var cpsEnabled = false
-  val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
-  def vprintln(x: =>Any): Unit = if (verbose) println(x)
-
-  object cpsNames {
-    val catches         = newTermName("$catches")
-    val ex              = newTermName("$ex")
-    val flatMapCatch    = newTermName("flatMapCatch")
-    val getTrivialValue = newTermName("getTrivialValue")
-    val isTrivial       = newTermName("isTrivial")
-    val reify           = newTermName("reify")
-    val reifyR          = newTermName("reifyR")
-    val shift           = newTermName("shift")
-    val shiftR          = newTermName("shiftR")
-    val shiftSuffix     = newTermName("$shift")
-    val shiftUnit0      = newTermName("shiftUnit0")
-    val shiftUnit       = newTermName("shiftUnit")
-    val shiftUnitR      = newTermName("shiftUnitR")
-  }
-
-  lazy val MarkerCPSSym        = rootMirror.getRequiredClass("scala.util.continuations.cpsSym")
-  lazy val MarkerCPSTypes      = rootMirror.getRequiredClass("scala.util.continuations.cpsParam")
-  lazy val MarkerCPSSynth      = rootMirror.getRequiredClass("scala.util.continuations.cpsSynth")
-  lazy val MarkerCPSAdaptPlus  = rootMirror.getRequiredClass("scala.util.continuations.cpsPlus")
-  lazy val MarkerCPSAdaptMinus = rootMirror.getRequiredClass("scala.util.continuations.cpsMinus")
-
-  lazy val Context = rootMirror.getRequiredClass("scala.util.continuations.ControlContext")
-  lazy val ModCPS = rootMirror.getRequiredPackage("scala.util.continuations")
-
-  lazy val MethShiftUnit  = definitions.getMember(ModCPS, cpsNames.shiftUnit)
-  lazy val MethShiftUnit0 = definitions.getMember(ModCPS, cpsNames.shiftUnit0)
-  lazy val MethShiftUnitR = definitions.getMember(ModCPS, cpsNames.shiftUnitR)
-  lazy val MethShift      = definitions.getMember(ModCPS, cpsNames.shift)
-  lazy val MethShiftR     = definitions.getMember(ModCPS, cpsNames.shiftR)
-  lazy val MethReify      = definitions.getMember(ModCPS, cpsNames.reify)
-  lazy val MethReifyR     = definitions.getMember(ModCPS, cpsNames.reifyR)
-
-  lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
-    MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
-
-  // TODO - needed? Can these all use the same annotation info?
-  protected def newSynthMarker() = newMarker(MarkerCPSSynth)
-  protected def newPlusMarker()  = newMarker(MarkerCPSAdaptPlus)
-  protected def newMinusMarker() = newMarker(MarkerCPSAdaptMinus)
-  protected def newMarker(tpe: Type): AnnotationInfo = AnnotationInfo marker tpe
-  protected def newMarker(sym: Symbol): AnnotationInfo = AnnotationInfo marker sym.tpe
-
-  protected def newCpsParamsMarker(tp1: Type, tp2: Type) =
-    newMarker(appliedType(MarkerCPSTypes.tpe, List(tp1, tp2)))
-
-  // annotation checker
-
-  protected def annTypes(ann: AnnotationInfo): (Type, Type) = {
-    val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs
-    ((tp0, tp1))
-  }
-  protected def hasMinusMarker(tpe: Type)   = tpe hasAnnotation MarkerCPSAdaptMinus
-  protected def hasPlusMarker(tpe: Type)    = tpe hasAnnotation MarkerCPSAdaptPlus
-  protected def hasSynthMarker(tpe: Type)   = tpe hasAnnotation MarkerCPSSynth
-  protected def hasCpsParamTypes(tpe: Type) = tpe hasAnnotation MarkerCPSTypes
-  protected def cpsParamTypes(tpe: Type)    = tpe getAnnotation MarkerCPSTypes map annTypes
-
-  def filterAttribs(tpe:Type, cls:Symbol) =
-    tpe.annotations filter (_ matches cls)
-
-  def removeAttribs(tpe: Type, classes: Symbol*) =
-    tpe filterAnnotations (ann => !(classes exists (ann matches _)))
-
-  def removeAllCPSAnnotations(tpe: Type) = removeAttribs(tpe, allCPSAnnotations:_*)
-
-  def cpsParamAnnotation(tpe: Type) = filterAttribs(tpe, MarkerCPSTypes)
-
-  def linearize(ann: List[AnnotationInfo]): AnnotationInfo = {
-    ann reduceLeft { (a, b) =>
-      val (u0,v0) = annTypes(a)
-      val (u1,v1) = annTypes(b)
-      // vprintln("check lin " + a + " andThen " + b)
-
-      if (v1 <:< u0)
-        newCpsParamsMarker(u1, v0)
-      else
-        throw new TypeError("illegal answer type modification: " + a + " andThen " + b)
-    }
-  }
-
-  // anf transform
-
-  def getExternalAnswerTypeAnn(tp: Type) = {
-    cpsParamTypes(tp) orElse {
-      if (hasPlusMarker(tp))
-        global.warning("trying to instantiate type " + tp + " to unknown cps type")
-      None
-    }
-  }
-
-  def getAnswerTypeAnn(tp: Type): Option[(Type, Type)] =
-    cpsParamTypes(tp) filterNot (_ => hasPlusMarker(tp))
-
-  def hasAnswerTypeAnn(tp: Type) =
-    hasCpsParamTypes(tp) && !hasPlusMarker(tp)
-
-  def updateSynthFlag(tree: Tree) = { // remove annotations if *we* added them (@synth present)
-    if (hasSynthMarker(tree.tpe)) {
-      log("removing annotation from " + tree)
-      tree modifyType removeAllCPSAnnotations
-    } else
-      tree
-  }
-
-  type CPSInfo = Option[(Type,Type)]
-
-  def linearize(a: CPSInfo, b: CPSInfo)(implicit unit: CompilationUnit, pos: Position): CPSInfo = {
-    (a,b) match {
-      case (Some((u0,v0)), Some((u1,v1))) =>
-        vprintln("check lin " + a + " andThen " + b)
-        if (!(v1 <:< u0)) {
-          unit.error(pos,"cannot change answer type in composition of cps expressions " +
-          "from " + u1 + " to " + v0 + " because " + v1 + " is not a subtype of " + u0 + ".")
-          throw new Exception("check lin " + a + " andThen " + b)
-        }
-        Some((u1,v0))
-      case (Some(_), _) => a
-      case (_, Some(_)) => b
-      case _ => None
-    }
-  }
-}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
deleted file mode 100644
index 8b39bf3..0000000
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ /dev/null
@@ -1,549 +0,0 @@
-// $Id$
-
-package scala.tools.selectivecps
-
-import scala.tools.nsc._
-import scala.tools.nsc.transform._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.plugins._
-
-import scala.tools.nsc.ast._
-
-/**
- * In methods marked @cps, explicitly name results of calls to other @cps methods
- */
-abstract class SelectiveANFTransform extends PluginComponent with Transform with
-  TypingTransformers with CPSUtils {
-  // inherits abstract value `global` and class `Phase` from Transform
-
-  import global._                  // the global environment
-  import definitions._             // standard classes and methods
-  import typer.atOwner             // methods to type trees
-
-  /** the following two members override abstract members in Transform */
-  val phaseName: String = "selectiveanf"
-
-  protected def newTransformer(unit: CompilationUnit): Transformer =
-    new ANFTransformer(unit)
-
-
-  class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
-
-    implicit val _unit = unit // allow code in CPSUtils.scala to report errors
-    var cpsAllowed: Boolean = false // detect cps code in places we do not handle (yet)
-
-    object RemoveTailReturnsTransformer extends Transformer {
-      override def transform(tree: Tree): Tree = tree match {
-        case Block(stms, r @ Return(expr)) =>
-          treeCopy.Block(tree, stms, expr)
-
-        case Block(stms, expr) =>
-          treeCopy.Block(tree, stms, transform(expr))
-
-        case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
-          treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
-
-        case If(cond, r1 @ Return(thenExpr), elseExpr) =>
-          treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
-
-        case If(cond, thenExpr, r2 @ Return(elseExpr)) =>
-          treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
-
-        case If(cond, thenExpr, elseExpr) =>
-          treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
-
-        case Try(block, catches, finalizer) =>
-          treeCopy.Try(tree,
-                       transform(block),
-                       (catches map (t => transform(t))).asInstanceOf[List[CaseDef]],
-                       transform(finalizer))
-
-        case CaseDef(pat, guard, r @ Return(expr)) =>
-          treeCopy.CaseDef(tree, pat, guard, expr)
-
-        case CaseDef(pat, guard, body) =>
-          treeCopy.CaseDef(tree, pat, guard, transform(body))
-
-        case Return(_) =>
-          unit.error(tree.pos, "return expressions in CPS code must be in tail position")
-          tree
-
-        case _ =>
-          super.transform(tree)
-      }
-    }
-
-    def removeTailReturns(body: Tree): Tree = {
-      // support body with single return expression
-      body match {
-        case Return(expr) => expr
-        case _ => RemoveTailReturnsTransformer.transform(body)
-      }
-    }
-
-    override def transform(tree: Tree): Tree = {
-      if (!cpsEnabled) return tree
-
-      tree match {
-
-        // Maybe we should further generalize the transform and move it over
-        // to the regular Transformer facility. But then, actual and required cps
-        // state would need more complicated (stateful!) tracking.
-
-        // Making the default case use transExpr(tree, None, None) instead of
-        // calling super.transform() would be a start, but at the moment,
-        // this would cause infinite recursion. But we could remove the
-        // ValDef case here.
-
-        case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
-          debuglog("transforming " + dd.symbol)
-
-          atOwner(dd.symbol) {
-            val rhs =
-              if (cpsParamTypes(tpt.tpe).nonEmpty) removeTailReturns(rhs0)
-              else rhs0
-            val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))(getExternalAnswerTypeAnn(tpt.tpe).isDefined)
-
-            debuglog("result "+rhs1)
-            debuglog("result is of type "+rhs1.tpe)
-
-            treeCopy.DefDef(dd, mods, name, transformTypeDefs(tparams), transformValDefss(vparamss),
-                        transform(tpt), rhs1)
-          }
-
-        case ff @ Function(vparams, body) =>
-          debuglog("transforming anon function " + ff.symbol)
-
-          atOwner(ff.symbol) {
-
-            //val body1 = transExpr(body, None, getExternalAnswerTypeAnn(body.tpe))
-
-            // need to special case partial functions: if expected type is @cps
-            // but all cases are pure, then we would transform
-            // { x => x match { case A => ... }} to
-            // { x => shiftUnit(x match { case A => ... })}
-            // which Uncurry cannot handle (see function6.scala)
-            // thus, we push down the shiftUnit to each of the case bodies
-
-            val ext = getExternalAnswerTypeAnn(body.tpe)
-            val pureBody = getAnswerTypeAnn(body.tpe).isEmpty
-            implicit val isParentImpure = ext.isDefined
-
-            def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = {
-              val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
-                // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning
-                val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue
-                treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
-              }
-              treeCopy.Match(tree, transform(selector), caseVals)
-            }
-
-            def transformPureVirtMatch(body: Block, selDef: ValDef, cases: List[Tree], matchEnd: Tree) = {
-              val stats = transform(selDef) :: (cases map (transExpr(_, None, ext)))
-              treeCopy.Block(body, stats, transExpr(matchEnd, None, ext))
-            }
-
-            val body1 = body match {
-              case Match(selector, cases) if ext.isDefined && pureBody =>
-                transformPureMatch(body, selector, cases)
-
-              // virtpatmat switch
-              case Block(List(selDef: ValDef), mat at Match(selector, cases)) if ext.isDefined && pureBody =>
-                treeCopy.Block(body, List(transform(selDef)), transformPureMatch(mat, selector, cases))
-
-              // virtpatmat
-              case b at Block(matchStats@((selDef: ValDef) :: cases), matchEnd) if ext.isDefined && pureBody && (matchStats forall treeInfo.hasSynthCaseSymbol) =>
-                transformPureVirtMatch(b, selDef, cases, matchEnd)
-
-              // virtpatmat that stores the scrut separately -- TODO: can we eliminate this case??
-              case Block(List(selDef0: ValDef), mat at Block(matchStats@((selDef: ValDef) :: cases), matchEnd)) if ext.isDefined && pureBody  && (matchStats forall treeInfo.hasSynthCaseSymbol)=>
-                treeCopy.Block(body, List(transform(selDef0)), transformPureVirtMatch(mat, selDef, cases, matchEnd))
-
-              case _ =>
-                transExpr(body, None, ext)
-            }
-
-            debuglog("anf result "+body1+"\nresult is of type "+body1.tpe)
-
-            treeCopy.Function(ff, transformValDefs(vparams), body1)
-          }
-
-        case vd @ ValDef(mods, name, tpt, rhs) => // object-level valdefs
-          debuglog("transforming valdef " + vd.symbol)
-
-          if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) {
-            
-            atOwner(vd.symbol) {
-
-              val rhs1 = transExpr(rhs, None, None)
-
-              treeCopy.ValDef(vd, mods, name, transform(tpt), rhs1)
-            }
-          } else {
-            unit.error(tree.pos, "cps annotations not allowed on by-value parameters or value definitions")
-            super.transform(tree)
-          }
-
-        case TypeTree() =>
-          // circumvent cpsAllowed here
-          super.transform(tree)
-
-        case Apply(_,_) =>
-          // this allows reset { ... } in object constructors
-          // it's kind of a hack to put it here (see note above)
-          transExpr(tree, None, None)
-
-        case _ =>
-          if (hasAnswerTypeAnn(tree.tpe)) {
-            if (!cpsAllowed) {
-              if (tree.symbol.isLazy)
-                unit.error(tree.pos, "implementation restriction: cps annotations not allowed on lazy value definitions")
-              else
-                unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
-            }
-            log(tree)
-          }
-
-          cpsAllowed = false
-          super.transform(tree)
-      }
-    }
-
-
-    def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean = false): Tree = {
-      transTailValue(tree, cpsA, cpsR)(cpsR.isDefined || isAnyParentImpure) match {
-        case (Nil, b) => b
-        case (a, b) =>
-          treeCopy.Block(tree, a,b)
-      }
-    }
-
-
-    def transArgList(fun: Tree, args: List[Tree], cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[List[Tree]], List[Tree], CPSInfo) = {
-      val formals = fun.tpe.paramTypes
-      val overshoot = args.length - formals.length
-
-      var spc: CPSInfo = cpsA
-
-      val (stm,expr) = (for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
-        tp match {
-          case TypeRef(_, ByNameParamClass, List(elemtp)) =>
-            // note that we're not passing just isAnyParentImpure
-            (Nil, transExpr(a, None, getAnswerTypeAnn(elemtp))(getAnswerTypeAnn(elemtp).isDefined || isAnyParentImpure))
-          case _ =>
-            val (valStm, valExpr, valSpc) = transInlineValue(a, spc)
-            spc = valSpc
-            (valStm, valExpr)
-        }
-      }).unzip
-
-      (stm,expr,spc)
-    }
-
-
-    // precondition: cpsR.isDefined "implies" isAnyParentImpure
-    def transValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree, CPSInfo) = {
-      // return value: (stms, expr, spc), where spc is CPSInfo after stms but *before* expr
-      implicit val pos = tree.pos
-      tree match {
-        case Block(stms, expr) =>
-          val (cpsA2, cpsR2) = (cpsA, linearize(cpsA, getAnswerTypeAnn(tree.tpe))) // tbd
-          //          val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
-
-          val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
-          val tree1  = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
-
-          (Nil, tree1, cpsA)
-
-        case If(cond, thenp, elsep) =>
-          /* possible situations:
-          cps before (cpsA)
-          cps in condition (spc)  <-- synth flag set if *only* here!
-          cps in (one or both) branches */
-          val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
-          val (cpsA2, cpsR2) = if (hasSynthMarker(tree.tpe))
-            (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
-            (None, getAnswerTypeAnn(tree.tpe)) // if no cps in condition, branches must conform to tree.tpe directly
-          val thenVal = transExpr(thenp, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
-          val elseVal = transExpr(elsep, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
-
-          // check that then and else parts agree (not necessary any more, but left as sanity check)
-          if (cpsR.isDefined) {
-            if (elsep == EmptyTree)
-              unit.error(tree.pos, "always need else part in cps code")
-          }
-          if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
-            unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
-          }
-
-          (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
-
-        case Match(selector, cases) =>
-          val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
-          val (cpsA2, cpsR2) =
-            if (hasSynthMarker(tree.tpe)) (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
-            else (None, getAnswerTypeAnn(tree.tpe))
-
-          val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
-            val bodyVal = transExpr(body, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
-            treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
-          }
-
-          (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
-
-        // this is utterly broken: LabelDefs need to be considered together when transforming them to DefDefs:
-        // suppose a Block {L1; ... ; LN}
-        // this should become {D1def ; ... ; DNdef ; D1()}
-        // where D$idef = def L$i(..) = {L$i.body; L${i+1}(..)}
-
-        case ldef @ LabelDef(name, params, rhs) =>
-          // println("trans LABELDEF "+(name, params, tree.tpe, hasAnswerTypeAnn(tree.tpe)))
-          // TODO why does the labeldef's type have a cpsMinus annotation, whereas the rhs does not? (BYVALmode missing/too much somewhere?)
-          if (hasAnswerTypeAnn(tree.tpe)) {
-            // currentOwner.newMethod(name, tree.pos, Flags.SYNTHETIC) setInfo ldef.symbol.info
-            val sym    = ldef.symbol resetFlag Flags.LABEL
-            val rhs1   = rhs //new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
-            val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe))(getAnswerTypeAnn(tree.tpe).isDefined || isAnyParentImpure) changeOwner (currentOwner -> sym)
-
-            val stm1 = localTyper.typed(DefDef(sym, rhsVal))
-            // since virtpatmat does not rely on fall-through, don't call the labels it emits
-            // transBlock will take care of calling the first label
-            // calling each labeldef is wrong, since some labels may be jumped over
-            // we can get away with this for now since the only other labels we emit are for tailcalls/while loops,
-            // which do not have consecutive labeldefs (and thus fall-through is irrelevant)
-            if (treeInfo.hasSynthCaseSymbol(ldef)) (List(stm1), localTyper.typed{Literal(Constant(()))}, cpsA)
-            else {
-              assert(params.isEmpty, "problem in ANF transforming label with non-empty params "+ ldef)
-              (List(stm1), localTyper.typed{Apply(Ident(sym), List())}, cpsA)
-            }
-          } else {
-            val rhsVal = transExpr(rhs, None, None)
-            (Nil, updateSynthFlag(treeCopy.LabelDef(tree, name, params, rhsVal)), cpsA)
-          }
-
-
-        case Try(block, catches, finalizer) =>
-          val blockVal = transExpr(block, cpsA, cpsR)
-
-          val catchVals = for {
-            cd @ CaseDef(pat, guard, body) <- catches
-            bodyVal = transExpr(body, cpsA, cpsR)
-          } yield {
-            treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
-          }
-
-          val finallyVal = transExpr(finalizer, None, None) // for now, no cps in finally
-
-          (Nil, updateSynthFlag(treeCopy.Try(tree, blockVal, catchVals, finallyVal)), cpsA)
-
-        case Assign(lhs, rhs) =>
-          // allow cps code in rhs only
-          val (stms, expr, spc) = transInlineValue(rhs, cpsA)
-          (stms, updateSynthFlag(treeCopy.Assign(tree, transform(lhs), expr)), spc)
-
-        case Return(expr0) =>
-          if (isAnyParentImpure)
-            unit.error(tree.pos, "return expression not allowed, since method calls CPS method")
-          val (stms, expr, spc) = transInlineValue(expr0, cpsA)
-          (stms, updateSynthFlag(treeCopy.Return(tree, expr)), spc)
-
-        case Throw(expr0) =>
-          val (stms, expr, spc) = transInlineValue(expr0, cpsA)
-          (stms, updateSynthFlag(treeCopy.Throw(tree, expr)), spc)
-
-        case Typed(expr0, tpt) =>
-          // TODO: should x: A @cps[B,C] have a special meaning?
-          // type casts used in different ways (see match2.scala, #3199)
-          val (stms, expr, spc) = transInlineValue(expr0, cpsA)
-          val tpt1 = if (treeInfo.isWildcardStarArg(tree)) tpt else
-            treeCopy.TypeTree(tpt).setType(removeAllCPSAnnotations(tpt.tpe))
-//        (stms, updateSynthFlag(treeCopy.Typed(tree, expr, tpt1)), spc)
-          (stms, treeCopy.Typed(tree, expr, tpt1).setType(removeAllCPSAnnotations(tree.tpe)), spc)
-
-        case TypeApply(fun, args) =>
-          val (stms, expr, spc) = transInlineValue(fun, cpsA)
-          (stms, updateSynthFlag(treeCopy.TypeApply(tree, expr, args)), spc)
-
-        case Select(qual, name) =>
-          val (stms, expr, spc) = transInlineValue(qual, cpsA)
-          (stms, updateSynthFlag(treeCopy.Select(tree, expr, name)), spc)
-
-        case Apply(fun, args) =>
-          val (funStm, funExpr, funSpc) = transInlineValue(fun, cpsA)
-          val (argStm, argExpr, argSpc) = transArgList(fun, args, funSpc)
-
-          (funStm ::: (argStm.flatten), updateSynthFlag(treeCopy.Apply(tree, funExpr, argExpr)),
-            argSpc)
-
-        case _ =>
-          cpsAllowed = true
-          (Nil, transform(tree), cpsA)
-      }
-    }
-
-    // precondition: cpsR.isDefined "implies" isAnyParentImpure
-    def transTailValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree) = {
-
-      val (stms, expr, spc) = transValue(tree, cpsA, cpsR)
-
-      val bot = linearize(spc, getAnswerTypeAnn(expr.tpe))(unit, tree.pos)
-
-      val plainTpe = removeAllCPSAnnotations(expr.tpe)
-
-      if (cpsR.isDefined && !bot.isDefined) {
-
-        if (!expr.isEmpty && (expr.tpe.typeSymbol ne NothingClass)) {
-          // must convert!
-          debuglog("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe  + ")")
-          debuglog("cps type conversion (expected: " + cpsR.get + "): " + expr)
-
-          if (!hasPlusMarker(expr.tpe))
-            unit.warning(tree.pos, "expression " + tree + " is cps-transformed unexpectedly")
-
-          try {
-            val Some((a, b)) = cpsR
-            /** Since shiftUnit is bounded [A,B,C>:B] this may not typecheck
-             *  if C is overly specific.  So if !(B <:< C), call shiftUnit0
-             *  instead, which takes only two type arguments.
-             */
-            val conforms = a <:< b
-            val call = localTyper.typedPos(tree.pos)(
-              Apply(
-                TypeApply(
-                  gen.mkAttributedRef( if (conforms) MethShiftUnit else MethShiftUnit0 ),
-                  List(TypeTree(plainTpe), TypeTree(a)) ++ ( if (conforms) List(TypeTree(b)) else Nil )
-                ),
-                List(expr)
-              )
-            )
-            // This is today's sick/meaningless heuristic for spotting breakdown so
-            // we don't proceed until stack traces start draping themselves over everything.
-            // If there are wildcard types in the tree and B == Nothing, something went wrong.
-            // (I thought WildcardTypes would be enough, but nope.  'reset0 { 0 }' has them.)
-            //
-            // Code as simple as    reset((_: String).length)
-            // will crash meaninglessly without this check.  See SI-3718.
-            //
-            // TODO - obviously this should be done earlier, differently, or with
-            // a more skilled hand.  Most likely, all three.
-            if ((b.typeSymbol eq NothingClass) && call.tpe.exists(_ eq WildcardType))
-              unit.error(tree.pos, "cannot cps-transform malformed (possibly in shift/reset placement) expression")
-            else
-              return ((stms, call))
-          }
-          catch {
-            case ex:TypeError =>
-              unit.error(ex.pos, "cannot cps-transform expression " + tree + ": " + ex.msg)
-          }
-        }
-
-      } else if (!cpsR.isDefined && bot.isDefined) {
-        // error!
-        debuglog("cps type error: " + expr)
-        //println("cps type error: " + expr + "/" + expr.tpe + "/" + getAnswerTypeAnn(expr.tpe))
-
-        //println(cpsR + "/" + spc + "/" + bot)
-
-        unit.error(tree.pos, "found cps expression in non-cps position")
-      } else {
-        // all is well
-
-        if (hasPlusMarker(expr.tpe)) {
-          unit.warning(tree.pos, "expression " + expr + " of type " + expr.tpe + " is not expected to have a cps type")
-          expr modifyType removeAllCPSAnnotations
-        }
-
-        // TODO: sanity check that types agree
-      }
-
-      (stms, expr)
-    }
-
-    def transInlineValue(tree: Tree, cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree, CPSInfo) = {
-
-      val (stms, expr, spc) = transValue(tree, cpsA, None) // never required to be cps
-
-      getAnswerTypeAnn(expr.tpe) match {
-        case spcVal @ Some(_) =>
-
-          val valueTpe = removeAllCPSAnnotations(expr.tpe)
-
-          val sym: Symbol = (
-            currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC)
-              setInfo valueTpe
-              setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))
-          )
-          expr.changeOwner(currentOwner -> sym)
-
-          (stms ::: List(ValDef(sym, expr) setType(NoType)),
-             Ident(sym) setType(valueTpe) setPos(tree.pos), linearize(spc, spcVal)(unit, tree.pos))
-
-        case _ =>
-          (stms, expr, spc)
-      }
-
-    }
-
-
-
-    def transInlineStm(stm: Tree, cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean):  (List[Tree], CPSInfo) = {
-      stm match {
-
-        // TODO: what about DefDefs?
-        // TODO: relation to top-level val def?
-        // TODO: what about lazy vals?
-
-        case tree @ ValDef(mods, name, tpt, rhs) =>
-          val (stms, anfRhs, spc) = atOwner(tree.symbol) { transValue(rhs, cpsA, None) }
-
-          val tv = new ChangeOwnerTraverser(tree.symbol, currentOwner)
-          stms.foreach(tv.traverse(_))
-
-          // TODO: symbol might already have annotation. Should check conformance
-          // TODO: better yet: do without annotations on symbols
-
-          val spcVal = getAnswerTypeAnn(anfRhs.tpe)
-          if (spcVal.isDefined) {
-              tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
-          }
-
-          (stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos))
-
-        case _ =>
-          val (headStms, headExpr, headSpc) = transInlineValue(stm, cpsA)
-          val valSpc = getAnswerTypeAnn(headExpr.tpe)
-          (headStms:::List(headExpr), linearize(headSpc, valSpc)(unit, stm.pos))
-      }
-    }
-
-    // precondition: cpsR.isDefined "implies" isAnyParentImpure
-    def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree) = {
-      def rec(currStats: List[Tree], currAns: CPSInfo, accum: List[Tree]): (List[Tree], Tree) =
-        currStats match {
-          case Nil =>
-            val (anfStats, anfExpr) = transTailValue(expr, currAns, cpsR)
-            (accum ++ anfStats, anfExpr)
-
-          case stat :: rest =>
-            val (stats, nextAns) = transInlineStm(stat, currAns)
-            rec(rest, nextAns, accum ++ stats)
-         }
-
-      val (anfStats, anfExpr) = rec(stms, cpsA, List())
-      // println("\nanf-block:\n"+ ((stms :+ expr) mkString ("{", "\n", "}")) +"\nBECAME\n"+ ((anfStats :+ anfExpr) mkString ("{", "\n", "}")))
-      // println("synth case? "+ (anfStats map (t => (t, t.isDef, treeInfo.hasSynthCaseSymbol(t)))))
-      // SUPER UGLY HACK: handle virtpatmat-style matches, whose labels have already been turned into DefDefs
-      if (anfStats.nonEmpty && (anfStats forall (t => !t.isDef || treeInfo.hasSynthCaseSymbol(t)))) {
-        val (prologue, rest) = (anfStats :+ anfExpr) span (s => !s.isInstanceOf[DefDef]) // find first case
-        // println("rest: "+ rest)
-        // val (defs, calls) = rest partition (_.isInstanceOf[DefDef])
-        if (rest.nonEmpty) {
-          // the filter drops the ()'s emitted when transValue encountered a LabelDef
-          val stats = prologue ++ (rest filter (_.isInstanceOf[DefDef])).reverse // ++ calls
-          // println("REVERSED "+ (stats mkString ("{", "\n", "}")))
-          (stats, localTyper.typed{Apply(Ident(rest.head.symbol), List())}) // call first label to kick-start the match
-        } else (anfStats, anfExpr)
-      } else (anfStats, anfExpr)
-    }
-  }
-}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
deleted file mode 100644
index 2371597..0000000
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-// $Id$
-
-package scala.tools.selectivecps
-
-import scala.tools.nsc
-import scala.tools.nsc.typechecker._
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class SelectiveCPSPlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "continuations"
-  val description = "applies selective cps conversion"
-
-  val anfPhase = new SelectiveANFTransform() {
-    val global = SelectiveCPSPlugin.this.global
-    val runsAfter = List("pickler")
-  }
-
-  val cpsPhase = new SelectiveCPSTransform() {
-    val global = SelectiveCPSPlugin.this.global
-    val runsAfter = List("selectiveanf")
-    override val runsBefore = List("uncurry")
-  }
-
-
-  val components = List[PluginComponent](anfPhase, cpsPhase)
-
-  val checker = new CPSAnnotationChecker {
-    val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
-  }
-  global.addAnnotationChecker(checker.checker)
-  global.analyzer.addAnalyzerPlugin(checker.plugin)
-
-  global.log("instantiated cps plugin: " + this)
-
-  def setEnabled(flag: Boolean) = {
-    checker.cpsEnabled = flag
-    anfPhase.cpsEnabled = flag
-    cpsPhase.cpsEnabled = flag
-  }
-
-  // TODO: require -enabled command-line flag
-
-  override def processOptions(options: List[String], error: String => Unit) = {
-    var enabled = false
-    for (option <- options) {
-      if (option == "enable") {
-        enabled = true
-      } else {
-        error("Option not understood: "+option)
-      }
-    }
-    setEnabled(enabled)
-  }
-
-  override val optionsHelp: Option[String] =
-    Some("  -P:continuations:enable        Enable continuations")
-}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
deleted file mode 100644
index 4482bf2..0000000
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ /dev/null
@@ -1,384 +0,0 @@
-// $Id$
-
-package scala.tools.selectivecps
-
-import scala.collection._
-
-import scala.tools.nsc._
-import scala.tools.nsc.transform._
-import scala.tools.nsc.plugins._
-
-import scala.tools.nsc.ast.TreeBrowsers
-import scala.tools.nsc.ast._
-
-/**
- * In methods marked @cps, CPS-transform assignments introduced by ANF-transform phase.
- */
-abstract class SelectiveCPSTransform extends PluginComponent with
-  InfoTransform with TypingTransformers with CPSUtils with TreeDSL {
-  // inherits abstract value `global` and class `Phase` from Transform
-
-  import global._                  // the global environment
-  import definitions._             // standard classes and methods
-  import typer.atOwner             // methods to type trees
-
-  /** the following two members override abstract members in Transform */
-  val phaseName: String = "selectivecps"
-
-  protected def newTransformer(unit: CompilationUnit): Transformer =
-    new CPSTransformer(unit)
-
-  /** This class does not change linearization */
-  override def changesBaseClasses = false
-
-  /** - return symbol's transformed type,
-   */
-  def transformInfo(sym: Symbol, tp: Type): Type = {
-    if (!cpsEnabled) return tp
-
-    val newtp = transformCPSType(tp)
-
-    if (newtp != tp)
-      debuglog("transformInfo changed type for " + sym + " to " + newtp);
-
-    if (sym == MethReifyR)
-      debuglog("transformInfo (not)changed type for " + sym + " to " + newtp);
-
-    newtp
-  }
-
-  def transformCPSType(tp: Type): Type = {  // TODO: use a TypeMap? need to handle more cases?
-    tp match {
-      case PolyType(params,res) => PolyType(params, transformCPSType(res))
-      case NullaryMethodType(res) => NullaryMethodType(transformCPSType(res))
-      case MethodType(params,res) => MethodType(params, transformCPSType(res))
-      case TypeRef(pre, sym, args) => TypeRef(pre, sym, args.map(transformCPSType(_)))
-      case _ =>
-        getExternalAnswerTypeAnn(tp) match {
-          case Some((res, outer)) =>
-            appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer))
-          case _ =>
-            removeAllCPSAnnotations(tp)
-        }
-    }
-  }
-
-
-  class CPSTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
-    private val patmatTransformer = patmat.newTransformer(unit)
-
-    override def transform(tree: Tree): Tree = {
-      if (!cpsEnabled) return tree
-      postTransform(mainTransform(tree))
-    }
-
-    def postTransform(tree: Tree): Tree = {
-      tree.setType(transformCPSType(tree.tpe))
-    }
-
-
-    def mainTransform(tree: Tree): Tree = {
-      tree match {
-
-        // TODO: can we generalize this?
-
-        case Apply(TypeApply(fun, targs), args)
-        if (fun.symbol == MethShift) =>
-          debuglog("found shift: " + tree)
-          atPos(tree.pos) {
-            val funR = gen.mkAttributedRef(MethShiftR) // TODO: correct?
-            //gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
-            //ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
-            //gen.mkAttributedRef(ModCPS.tpe,  MethShiftR) // TODO: correct?
-            debuglog("funR.tpe = " + funR.tpe)
-            Apply(
-                TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
-                args.map(transform(_))
-            ).setType(transformCPSType(tree.tpe))
-          }
-
-        case Apply(TypeApply(fun, targs), args)
-        if (fun.symbol == MethShiftUnit) =>
-          debuglog("found shiftUnit: " + tree)
-          atPos(tree.pos) {
-            val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
-            debuglog("funR.tpe = " + funR.tpe)
-            Apply(
-                TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
-                    List(targs(0).tpe, targs(1).tpe))),
-                args.map(transform(_))
-            ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
-          }
-
-        case Apply(TypeApply(fun, targs), args)
-        if (fun.symbol == MethReify) =>
-          log("found reify: " + tree)
-          atPos(tree.pos) {
-            val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
-            debuglog("funR.tpe = " + funR.tpe)
-            Apply(
-                TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
-                args.map(transform(_))
-            ).setType(transformCPSType(tree.tpe))
-          }
-
-      case Try(block, catches, finalizer) =>
-        // currently duplicates the catch block into a partial function.
-        // this is kinda risky, but we don't expect there will be lots
-        // of try/catches inside catch blocks (exp. blowup unlikely).
-
-        // CAVEAT: finalizers are surprisingly tricky!
-        // the problem is that they cannot easily be removed
-        // from the regular control path and hence will
-        // also be invoked after creating the Context object.
-
-        /*
-        object Test {
-          def foo1 = {
-            throw new Exception("in sub")
-            shift((k:Int=>Int) => k(1))
-            10
-          }
-          def foo2 = {
-            shift((k:Int=>Int) => k(2))
-            20
-          }
-          def foo3 = {
-            shift((k:Int=>Int) => k(3))
-            throw new Exception("in sub")
-            30
-          }
-          def foo4 = {
-            shift((k:Int=>Int) => 4)
-            throw new Exception("in sub")
-            40
-          }
-          def bar(x: Int) = try {
-            if (x == 1)
-              foo1
-            else if (x == 2)
-              foo2
-            else if (x == 3)
-              foo3
-            else //if (x == 4)
-              foo4
-          } catch {
-            case _ =>
-              println("exception")
-              0
-          } finally {
-            println("done")
-          }
-        }
-
-        reset(Test.bar(1)) // should print: exception,done,0
-        reset(Test.bar(2)) // should print: done,20 <-- but prints: done,done,20
-        reset(Test.bar(3)) // should print: exception,done,0 <-- but prints: done,exception,done,0
-        reset(Test.bar(4)) // should print: 4 <-- but prints: done,4
-        */
-
-        val block1 = transform(block)
-        val catches1 = transformCaseDefs(catches)
-        val finalizer1 = transform(finalizer)
-
-        if (hasAnswerTypeAnn(tree.tpe)) {
-          //vprintln("CPS Transform: " + tree + "/" + tree.tpe + "/" + block1.tpe)
-
-          val (stms, expr1) = block1 match {
-            case Block(stms, expr) => (stms, expr)
-            case expr => (Nil, expr)
-          }
-
-          val targettp = transformCPSType(tree.tpe)
-
-          val pos = catches.head.pos
-          val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
-          val funDef = localTyper.typedPos(pos) {
-            ValDef(funSym, Match(EmptyTree, catches1))
-          }
-          val expr2 = localTyper.typedPos(pos) {
-            Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym)))
-          }
-
-          val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
-
-          import CODE._
-          // generate a case that is supported directly by the back-end
-          val catchIfDefined = CaseDef(
-                Bind(exSym, Ident(nme.WILDCARD)),
-                EmptyTree,
-                IF ((REF(funSym) DOT nme.isDefinedAt)(REF(exSym))) THEN (REF(funSym) APPLY (REF(exSym))) ELSE Throw(REF(exSym))
-              )
-
-          val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableClass.tpe, targettp)
-          //typedCases(tree, catches, ThrowableClass.tpe, pt)
-
-          patmatTransformer.transform(localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1))))
-
-
-/*
-          disabled for now - see notes above
-
-          val expr3 = if (!finalizer.isEmpty) {
-            val pos = finalizer.pos
-            val finalizer2 = duplicateTree(finalizer1)
-            val fun = Function(List(), finalizer2)
-            val expr3 = localTyper.typedPos(pos) { Apply(Select(expr2, expr2.tpe.member("mapFinally")), List(fun)) }
-
-            val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
-            chown.traverse(finalizer2)
-
-            expr3
-          } else
-            expr2
-*/
-        } else {
-          treeCopy.Try(tree, block1, catches1, finalizer1)
-        }
-
-      case Block(stms, expr) =>
-
-          val (stms1, expr1) = transBlock(stms, expr)
-          treeCopy.Block(tree, stms1, expr1)
-
-        case _ =>
-          super.transform(tree)
-      }
-    }
-
-
-
-    def transBlock(stms: List[Tree], expr: Tree): (List[Tree], Tree) = {
-
-      stms match {
-        case Nil =>
-          (Nil, transform(expr))
-
-        case stm::rest =>
-
-          stm match {
-            case vd @ ValDef(mods, name, tpt, rhs)
-            if (vd.symbol.hasAnnotation(MarkerCPSSym)) =>
-
-              debuglog("found marked ValDef "+name+" of type " + vd.symbol.tpe)
-
-              val tpe = vd.symbol.tpe
-              val rhs1 = atOwner(vd.symbol) { transform(rhs) }
-              rhs1.changeOwner(vd.symbol -> currentOwner) // TODO: don't traverse twice
-
-              debuglog("valdef symbol " + vd.symbol + " has type " + tpe)
-              debuglog("right hand side " + rhs1 + " has type " + rhs1.tpe)
-
-              debuglog("currentOwner: " + currentOwner)
-              debuglog("currentMethod: " + currentMethod)
-
-              val (bodyStms, bodyExpr) = transBlock(rest, expr)
-              // FIXME: result will later be traversed again by TreeSymSubstituter and
-              // ChangeOwnerTraverser => exp. running time.
-              // Should be changed to fuse traversals into one.
-
-              val specialCaseTrivial = bodyExpr match {
-                case Apply(fun, args) =>
-                  // for now, look for explicit tail calls only.
-                  // are there other cases that could profit from specializing on
-                  // trivial contexts as well?
-                  (bodyExpr.tpe.typeSymbol == Context) && (currentMethod == fun.symbol)
-                case _ => false
-              }
-
-              def applyTrivial(ctxValSym: Symbol, body: Tree) = {
-
-                val body1 = (new TreeSymSubstituter(List(vd.symbol), List(ctxValSym)))(body)
-
-                val body2 = localTyper.typedPos(vd.symbol.pos) { body1 }
-
-                // in theory it would be nicer to look for an @cps annotation instead
-                // of testing for Context
-                if ((body2.tpe == null) || !(body2.tpe.typeSymbol == Context)) {
-                  //println(body2 + "/" + body2.tpe)
-                  unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
-                }
-                body2
-              }
-
-              def applyCombinatorFun(ctxR: Tree, body: Tree) = {
-                val arg = currentOwner.newValueParameter(name, ctxR.pos).setInfo(tpe)
-                val body1 = (new TreeSymSubstituter(List(vd.symbol), List(arg)))(body)
-                val fun = localTyper.typedPos(vd.symbol.pos) { Function(List(ValDef(arg)), body1) } // types body as well
-                arg.owner = fun.symbol
-                body1.changeOwner(currentOwner -> fun.symbol)
-
-                // see note about multiple traversals above
-
-                debuglog("fun.symbol: "+fun.symbol)
-                debuglog("fun.symbol.owner: "+fun.symbol.owner)
-                debuglog("arg.owner: "+arg.owner)
-
-                debuglog("fun.tpe:"+fun.tpe)
-                debuglog("return type of fun:"+body1.tpe)
-
-                var methodName = nme.map
-
-                if (body1.tpe != null) {
-                  if (body1.tpe.typeSymbol == Context)
-                    methodName = nme.flatMap
-                }
-                else
-                  unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
-
-                debuglog("will use method:"+methodName)
-
-                localTyper.typedPos(vd.symbol.pos) {
-                  Apply(Select(ctxR, ctxR.tpe.member(methodName)), List(fun))
-                }
-              }
-
-              def mkBlock(stms: List[Tree], expr: Tree) = if (stms.nonEmpty) Block(stms, expr) else expr
-
-              try {
-                if (specialCaseTrivial) {
-                  debuglog("will optimize possible tail call: " + bodyExpr)
-
-                  // FIXME: flatMap impl has become more complicated due to
-                  // exceptions. do we need to put a try/catch in the then part??
-
-                  // val ctx = <rhs>
-                  // if (ctx.isTrivial)
-                  //   val <lhs> = ctx.getTrivialValue; ...    <--- TODO: try/catch ??? don't bother for the moment...
-                  // else
-                  //   ctx.flatMap { <lhs> => ... }
-                  val ctxSym = currentOwner.newValue(newTermName("" + vd.symbol.name + cpsNames.shiftSuffix)).setInfo(rhs1.tpe)
-                  val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
-                  def ctxRef = localTyper.typed(Ident(ctxSym))
-                  val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
-                  val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
-                  val switchExpr = localTyper.typedPos(vd.symbol.pos) {
-                    val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
-                    If(Select(ctxRef, ctxSym.tpe.member(cpsNames.isTrivial)),
-                      applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
-                      applyCombinatorFun(ctxRef, body2))
-                  }
-                  (List(ctxDef), switchExpr)
-                } else {
-                  // ctx.flatMap { <lhs> => ... }
-                  //     or
-                  // ctx.map { <lhs> => ... }
-                  (Nil, applyCombinatorFun(rhs1, mkBlock(bodyStms, bodyExpr)))
-                }
-              } catch {
-                case ex:TypeError =>
-                  unit.error(ex.pos, ex.msg)
-                  (bodyStms, bodyExpr)
-              }
-
-            case _ =>
-                val stm1 = transform(stm)
-                val (a, b) = transBlock(rest, expr)
-                (stm1::a, b)
-            }
-      }
-    }
-
-
-  }
-}
diff --git a/src/continuations/plugin/scalac-plugin.xml b/src/continuations/plugin/scalac-plugin.xml
deleted file mode 100644
index 04d4265..0000000
--- a/src/continuations/plugin/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<!-- $Id$ -->
-<plugin>
-  <name>continuations</name>
-  <classname>scala.tools.selectivecps.SelectiveCPSPlugin</classname>
-</plugin>
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
deleted file mode 100644
index e60d16c..0000000
--- a/src/detach/library/scala/remoting/Channel.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: Channel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.io._
-import java.net._
-import java.rmi.server.RMIClassLoader
-
-/** <p>
- *    The class <code>Channel</code> implements (basic) typed channels
- *    which use <a href="http://java.sun.com/docs/books/tutorial/networking/sockets/"
- *    target="_top"/>Java socket</a> communication and Scala type manifests to
- *    provide type-safe send/receive operations between a localhost and another
- *    remote machine by specifying some <code>host</code> and <code>port</code>.
- *  </p>
- *
- *  @author Stephane Micheloud
- *  @version 1.1
- */
-class Channel protected (socket: Socket) {
-
-  // Create a socket without a timeout
-  def this(host: String, port: Int) = this(new Socket(host, port))
-
-  // // Create a socket with a timeout
-  // val sockaddr: SocketAddress = new InetSocketAddress(addr, port)
-  // val socket = new Socket()
-  // // If the timeout occurs, SocketTimeoutException is thrown.
-  // socket.connect(sockaddr, 2000) // 2 seconds
-
-  /** Returns the local address of this channel. */
-  val host = socket.getInetAddress.getHostAddress
-
-  /** Returns the port on which this channel is listening. */
-  val port = socket.getLocalPort
-
-  private var cl: ClassLoader =
-    try {
-      // requires permission in Java policy file
-      val codebase = System.getProperty("java.rmi.server.codebase")
-      if (codebase != null) info("codebase="+codebase)
-      RMIClassLoader.getClassLoader(codebase)
-    }
-    catch {
-      case e: Exception =>
-        sys.error("Class loader undefined: " + e.getMessage)
-        null
-    }
-  def classLoader: ClassLoader = cl
-  def classLoader_=(x: ClassLoader) { cl = x }
-
-  info(""+this)
-
-  private class CustomObjectInputStream(in: InputStream)
-  extends ObjectInputStream(in) {
-    override def resolveClass(desc: ObjectStreamClass): Class[_] =
-      if (cl eq null)
-        super.resolveClass(desc)
-      else
-        try {
-          info("resolve class "+desc.getName)
-          cl loadClass desc.getName
-        }
-        catch {
-          case e: ClassNotFoundException =>
-            super.resolveClass(desc)
-        }
-  }
-
-  // lazy modifier is required!
-  private lazy val in =
-    try {
-      new CustomObjectInputStream(socket.getInputStream)
-    }
-    catch {
-      case e: IOException =>
-        sys.error("Input stream undefined: "+e.getMessage+" ("+this+")")
-        null
-    }
-  private lazy val out =
-    try {
-      new ObjectOutputStream(socket.getOutputStream)
-    }
-    catch {
-      case e: IOException =>
-        sys.error("Output stream undefined: "+e.getMessage+" ("+this+")")
-        null
-    }
-
-  /** <code>receive<primtype></code> methods may throw an
-   *  <code>IOException</code>.
-   */
-  def receiveUnit    = receive[Unit]
-  def receiveBoolean = receive[Boolean]
-  def receiveByte    = receive[Byte]
-  def receiveChar    = receive[Char]
-  def receiveShort   = receive[Short]
-  def receiveInt     = receive[Int]
-  def receiveLong    = receive[Long]
-  def receiveFloat   = receive[Float]
-  def receiveDouble  = receive[Double]
-  def receiveString  = receive[String]
-
-  /** <code>receive</code> method may throw either an
-   *  <code>ClassNotFoundException</code> or an <code>IOException</code>.
-   *
-   *  @throw <code>ChannelException</code> if received value has not
-   *         the expected type.
-   */
-  @throws(classOf[ChannelException])
-  def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
-    val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
-    info("receive: found="+found+", expected="+expected)
-    import scala.reflect.ClassTag
-    val x = found match {
-      case ClassTag.Unit    => ()
-      case ClassTag.Boolean => in.readBoolean()
-      case ClassTag.Byte    => in.readByte()
-      case ClassTag.Char    => in.readChar()
-      case ClassTag.Short   => in.readShort()
-      case ClassTag.Int     => in.readInt()
-      case ClassTag.Long    => in.readLong()
-      case ClassTag.Float   => in.readFloat()
-      case ClassTag.Double  => in.readDouble()
-      case _                => in.readObject()
-    }
-    val res = if (found <:< expected)
-      x.asInstanceOf[T]
-    else
-      throw new ChannelException(
-        "\n\tfound \""+found+"\"\n\texpected \""+expected+"\"")
-    info("received "+res+" (available="+in.available+")")
-    res
-  }
-
-  /** <code>?</code> method may throw either an
-   *  <code>ClassNotFoundException</code> or an <code>IOException</code>.
-   */
-  def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
-
-  /** <code>send</code> method may throw an <code>IOException</code>.
-   */
-  def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
-    out writeObject t
-    x match {
-      case x: Unit    => // nop
-      case x: Boolean => out writeBoolean x
-      case x: Byte    => out writeByte x
-      case x: Char    => out writeChar x
-      case x: Short   => out writeShort x
-      case x: Int     => out writeInt x
-      case x: Long    => out writeLong x
-      case x: Float   => out writeFloat x
-      case x: Double  => out writeDouble x
-      case x          => out writeObject x
-    }
-    out.flush()
-    info("sent "+x)
-  }
-
-  /** <code>!</code> method may throw an <code>IOException</code>.
-   */
-  def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
-
-  def close() {
-    try { socket.close() }
-    catch { case e: IOException => }
-    info(this+" closed")
-  }
-
-  override def toString: String = socket.toString
-
-  private def info(msg: String) {
-    runtime.remoting.Debug.info("[Channel] "+msg)
-  }
-}
-
-/** <code>ChannelException</code> may be thrown by the operation
- *  <code>receive</code> when the received data has not the expected type.
- */
-case class ChannelException(msg: String) extends IOException(msg)
-
diff --git a/src/detach/library/scala/remoting/Debug.scala b/src/detach/library/scala/remoting/Debug.scala
deleted file mode 100644
index 79f2bce..0000000
--- a/src/detach/library/scala/remoting/Debug.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: Debug.scala 17412 2009-03-31 10:08:25Z michelou $
-
-package scala.remoting
-
-/**
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-object Debug extends runtime.remoting.Debug {
-  private val f = new java.text.SimpleDateFormat("HH:mm:ss")
-  private val c = new java.util.GregorianCalendar
-
-  def getTime: String = f format c.getTime
-
-  def getLocation(obj: AnyRef): String = {
-    val s = obj.getClass().getClassLoader().toString()
-    s substring s.indexOf('[')
-  }
-}
diff --git a/src/detach/library/scala/remoting/ServerChannel.scala b/src/detach/library/scala/remoting/ServerChannel.scala
deleted file mode 100644
index 7828f85..0000000
--- a/src/detach/library/scala/remoting/ServerChannel.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: ServerChannel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.net.{ServerSocket, Socket}
-
-/** <p>
- *    Creates a server channel and binds its associated socket to the
- *    specified port number.<br/>
- *    Example:
- *  </p><pre>
- *  <b>class</b> ComputeChannel(s: Socket) <b>extends</b> Channel(s) {
- *    <b>def</b> receiveFunc = receive[Int => Int]
- *  }
- *  <b>class</b> ComputeServer(p: Int)
- *  <b>extends</b> AbstractServerChannel[ComputeChannel](p) {
- *     <b>def</b> newChannel(s: Socket) = <b>new</b> ComputeChannel(s)
- *  }</pre>
- *
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-class ServerChannel(p: Int) extends AbstractServerChannel[Channel](p) {
-  def newChannel(s: Socket) = new Channel(s)
-}
-
-abstract class AbstractServerChannel[T <: Channel](_port: Int) {
-
-  /** Creates an input channel and binds its associated socket to any
-   *  free port.
-   */
-  def this() = this(0)
-
-  // The maximum queue length for incoming requests to connect is set to 50.
-  private val serverSocket = new ServerSocket(_port)
-
-  /** Returns the local address of this channel. */
-  val host = serverSocket.getInetAddress.getHostAddress
-
-  /** Returns the port on which this channel is listening. */
-  val port = serverSocket.getLocalPort
-  info("Listening on port "+port)
-
-  protected def newChannel(socket: Socket): T
-
-  def accept: T = {
-    System.gc() // required!
-    newChannel(serverSocket.accept)
-  }
-
-  def close() {
-    try { serverSocket.close() }
-    catch { case e: java.io.IOException => }
-    info("Server socket "+host+":"+port+" closed")
-  }
-
-  protected def info(msg: String) {
-    runtime.remoting.Debug.info("[ServerChannel] "+msg)
-  }
-}
diff --git a/src/detach/library/scala/remoting/detach.scala b/src/detach/library/scala/remoting/detach.scala
deleted file mode 100644
index 51a3ac5..0000000
--- a/src/detach/library/scala/remoting/detach.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: detach.scala 16901 2009-01-13 15:37:05Z michelou $
-
-package scala.remoting
-
-
-/** The <code>detach</code> object is a <em>marker object</em> which informs
- *  the Scala compiler that arguments whose type is a function type are
- *  eligible for remote closure generation.
- *
- *  @author  Stephane Micheloud
- *  @version 1.0, 13/07/2005
- */
-object detach {
-
-  def apply[R](f: Function0[R]): Function0[R] = f
-  def apply[T0, R](f: Function1[T0, R]): Function1[T0, R] = f
-  def apply[T0, T1, R](f: Function2[T0, T1, R]): Function2[T0, T1, R] = f
-  def apply[T0, T1, T2, R](f: Function3[T0, T1, T2, R]): Function3[T0, T1, T2, R] = f
-  def apply[T0, T1, T2, T3, R](f: Function4[T0, T1, T2, T3, R]): Function4[T0, T1, T2, T3, R] = f
-  def apply[T0, T1, T2, T3, T4, R](f: Function5[T0, T1, T2, T3, T4, R]): Function5[T0, T1, T2, T3, T4, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, R](f: Function6[T0, T1, T2, T3, T4, T5, R]): Function6[T0, T1, T2, T3, T4, T5, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, R](f: Function7[T0, T1, T2, T3, T4, T5, T6, R]): Function7[T0, T1, T2, T3, T4, T5, T6, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, R](f: Function8[T0, T1, T2, T3, T4, T5, T6, T7, R]): Function8[T0, T1, T2, T3, T4, T5, T6, T7, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, R](f: Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R]): Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R] = f
-
-  // since 2.7.0
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R](f: Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R]): Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R](f: Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R]): Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R](f: Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R]): Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R](f: Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R]): Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R](f: Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R]): Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R](f: Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R]): Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R](f: Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R]): Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R](f: Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R]): Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R](f: Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R]): Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R](f: Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R]): Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R](f: Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R]): Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R](f: Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R]): Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] = f
-  def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R](f: Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R]): Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] = f
-}
-
diff --git a/src/detach/library/scala/runtime/RemoteRef.scala b/src/detach/library/scala/runtime/RemoteRef.scala
deleted file mode 100644
index e65b22c..0000000
--- a/src/detach/library/scala/runtime/RemoteRef.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteRef.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.runtime
-
-import java.net.{InetAddress, MalformedURLException}
-import java.rmi.{NoSuchObjectException, NotBoundException, Remote}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.{ExportException, RemoteObject, UnicastRemoteObject}
-
-import scala.runtime.remoting.{Debug, RemoteGC}
-
-/**
- *
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-object RemoteRef { /*extends Thread {
-  start()
-
-  private class QuitException extends Exception
-  private var isTerminated = false
-
-  // keeps track of live remote objects
-  val remoteGC = new RemoteGC
-
-  override def run() {
-    info("started thread")
-    try {
-      while (!isTerminated) {
-        this.synchronized {
-          try {
-            wait(200)
-          } catch {
-            case _: InterruptedException =>
-              if (isTerminated) throw new QuitException
-          }
-          remoteGC.gc()
-          if (remoteGC.allClosed)
-            throw new QuitException
-        } // synchronized
-
-      }
-    } catch {
-      case _: QuitException =>
-        // allow thread to exit
-    }
-  }
-*/
-  try {
-    val prop = System.getProperty("sun.rmi.dgc.server.gcInterval")
-    if (prop eq null)
-      System.setProperty("sun.rmi.dgc.server.gcInterval", "10000")
-  }
-  catch {
-    case e =>
-      error(e.getMessage)
-  }
-
-  private val host =
-    try {
-      val prop = System.getProperty("java.rmi.server.hostname")
-      if (prop ne null) prop else InetAddress.getLocalHost.getHostAddress
-    }
-    catch {
-      case e =>
-        warning(e.getMessage)
-        InetAddress.getLocalHost.getHostAddress
-    }
-
-  private val port =
-    try {
-      val prop = System.getProperty("scala.remoting.port")
-      if (prop ne null) prop.toInt else Registry.REGISTRY_PORT
-    }
-    catch {
-      case e =>
-        warning(e.getMessage)
-        Registry.REGISTRY_PORT // default port
-    }
-
-  private val registry =
-    try {
-      LocateRegistry.createRegistry(port)
-    }
-    catch {
-      case e =>
-        warning(e.getMessage)
-        LocateRegistry.getRegistry(host, port)
-    }
-
-  private val prefix = "//"+host+":"+port+"/"
-  printDebugInfos
-
-  // Variant 1: rebind/unbind
-  def bind(name: String, x: Remote): Remote =
-    try {
-      registry.rebind(prefix+name, x)
-      info("\""+prefix+name+"\" bound")
-      val stub = RemoteObject.toStub(x)
-      //remoteGC.newRef(stub)
-      stub
-    } catch {
-      case e: MalformedURLException =>
-        error(e.getMessage); null
-      case e: ExportException =>
-        info(""+e); null
-      case e: Exception => // AlreadyBoundException, etc..
-        throw e
-    }
-
-  def unbind(name: String) =
-    try {
-      registry.unbind(prefix+name)
-      info("\""+name+"\" unbound")
-    } catch {
-      case e: java.io.EOFException =>
-        warning(e.getMessage)
-      case e: NotBoundException =>
-        warning(e.getMessage+" already unbound")
-      case e: MalformedURLException =>
-        error(e.getMessage)
-      case e: Exception =>
-        throw e
-    }
-/*
-  // Variant 2: un-/exportObject
-  def bind(name: String, x: Remote): Remote =
-    try {
-      val ex = UnicastRemoteObject.exportObject(x)
-      registry.rebind(prefix+name, ex)
-      info("\""+prefix+name+"\" bound")
-      //val stub = RemoteObject.toStub(ex)
-      //remoteGC.newRef(ex)
-      ex //stub
-    } catch {
-      case e: MalformedURLException =>
-        error(e.getMessage); null
-      case e: ExportException =>
-        info(""+e); null
-      case e: Exception => // AlreadyBoundException, etc..
-        throw e
-    }
-
-  def unbind(x: Remote) {
-    try {
-      UnicastRemoteObject.unexportObject(x, false)
-      info("\""+x+"\" unbound")
-    } catch {
-      case e: java.io.EOFException =>
-        warning(e.getMessage)
-      case e: NotBoundException =>
-        warning(e.getMessage+" already unbound")
-      case e: MalformedURLException =>
-        error(e.getMessage)
-      case e: Exception =>
-        throw e
-    }
-  }
-*/
-  private def info(msg: String) { Debug.info("[RemoteRef] "+msg) }
-  private def warning(msg: String) { Debug.warning("[RemoteRef] "+msg) }
-  private def error(msg: String) { Debug.error("[RemoteRef] "+msg) }
-
-  private def printDebugInfos() {
-    def property(name: String): String =
-      name+"="+(
-      try { System.getProperty(name, "") }
-      catch { case e => warning(e.getMessage); "?" })
-    info(property("java.rmi.server.hostname"))
-    info(property("sun.rmi.dgc.server.gcInterval"))
-    info("registry="+registry)
-    info("prefix="+prefix)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/Debug.scala b/src/detach/library/scala/runtime/remoting/Debug.scala
deleted file mode 100644
index 06cdc67..0000000
--- a/src/detach/library/scala/runtime/remoting/Debug.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: Debug.scala 17777 2009-05-19 18:16:25Z michelou $
-
-package scala.runtime.remoting
-
-/**
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-object Debug extends Debug {
-  override def info   (msg: String) { if (lib) super.info(msg) }
-  override def verbose(msg: String) { if (lib) super.verbose(msg) }
-  override def warning(msg: String) { if (lib) super.warning(msg) }
-  override def error  (msg: String) { if (lib) super.error(msg) }
-}
-
-/**
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-class Debug(tag: String) {
-
-  def this() = this("")
-
-  object Level extends Enumeration {
-    type Level = Value
-    val SILENT, ERROR, WARNING, VERBOSE, INFO = Value
-  }
-
-  private val level0 =
-    try {
-      val prop = System.getProperty("scala.remoting.logLevel")
-      if (prop ne null) prop.toLowerCase else ""
-    }
-    catch {
-      case e =>
-        Console.err.println(e.getMessage)
-        ""
-    }
-
-  import Level._
-  protected var (lev, lib) = {
-    val p = java.util.regex.Pattern.compile("(error|warning|verbose|info)(\\,lib)?(.*)")
-    val m = p matcher level0
-    val (s, b) =
-      if (m.matches) (m.group(1), m.group(2) ne null)
-      else ("", false)
-    s match {
-      case "error"   => (ERROR  , b)
-      case "warning" => (WARNING, b)
-      case "verbose" => (VERBOSE, b)
-      case "info"    => (INFO   , b)
-      case _         => (SILENT , false)
-    }
-  }
-
-  def level = lev
-  def level_= (lev: Level) = { this.lev = lev }
-
-  private val tag0: String =
-    if (tag != null & tag.length > 0) tag+" " else ""
-
-  def info(msg: String) {
-    if (lev >= INFO) Console.println(tag0 + "(info): " + msg)
-  }
-
-  def verbose(msg: String) {
-    if (lev >= VERBOSE) Console.println(tag0 + "(verb): " + msg)
-  }
-
-  def warning(msg: String) {
-    if (lev >= WARNING) Console.err.println(tag0 + "(warn): " + msg)
-  }
-
-  def error(msg: String) {
-    if (lev >= ERROR) Console.err.println(tag0 + "(erro): " + msg)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
deleted file mode 100644
index 1105832..0000000
--- a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RegistryDelegate.scala 18234 2009-07-07 13:21:57Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.{RMISecurityManager, Remote, RemoteException}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.UnicastRemoteObject
-
-/**
- * <p>
- *   This class implements the registry delegate concept
- *   (see http://www.genady.net/rmi/v20/docs/delegate/RegistryDelegate.html)
- * </p>
- * <p>
- *   In order to enforce some level of security, the standard RMI registry
- *   implementation (e.g. <code>rmiregistry.exe</code>) only allows processes
- *   on the same host to register objects in the registry (think of a bank
- *   running a registry on one of its servers, and doesn't want anybody
- *   modifying it). So, by design, if a process tries to
- *   <code>bind(String, Remote)</code> an object to a remote registry,
- *   an exception will be thrown.
- * </p>
- * <p>
- *   However, the design of a distributed system may require remote clients to
- *   register themselves in a central registry. If such system is deployed in a
- *   controlled and trusted environment (e.g., a firewalled intranet with tight
- *   access control), the security risk may be acceptable.
- * </p>
- * <p>
- *   The simplest technical solution to the remote registration problem is to
- *   have a registry delegate. A registry delegate is an object that serves as
- *   a proxy for the real registry. The delegate itself usually appears in the
- *   registry under a well known name. It implements the Registry interface and
- *   simply delegates all method calls to the appropriate methods of the real
- *   registry. The delegate is allowed to perform bind and unbind operations
- *   because it is running on the same host as the registry.
- * </p>
- * <p>
- *   The common scenario for starting a registry and creating the delegate is
- *   starting a class with the following <code>main(Array[String])</code> method:
- * </p>
- * <pre>
- *   @throws(classOf[AccessException], classOf[RemoteException], classOf[AlreadyBoundException])
- *   <b>object</b> namingService {
- *     <b>def</b> main(args: Array[String]) {
- *       <b>if</b> (System.getSecurityManager() == <b>null</b>)
- *         System.setSecurityManager(<b>new</b> RMISecurityManager())
- *
- *       <b>val</b> registry = LocateRegistry.createRegistry(REGISTRY_PORT)
- *       registry.bind(DELEGATE_NAME, <b>new</b> RegistryDelegate());
- *
- *       do {
- *         <b>try</b> {
- *           Thread.sleep(Long.MAX_VALUE)
- *         } <b>catch</b> {
- *           <b>case</b> e: InterruptedException => // do nothing
- *           <b>case</b> e: Throwable => e.printStackTrace(); sys.exit(1)
- *         }
- *       } while (<b>true</b>)
- *     }
- *  }</pre>
- * <p>
- *   The common usage scenario looks something like:
- * </p><pre>
- *   Registry remoteRegistry = LocateRegistry.getRegistry("remotehost.mycompany.com");
- *   Registry delegate = (Registry) remoteRegistry.lookup(DELEGATE_NAME);
- *   delegate.bind("someName", <b>new</b> SomeRemoteObject());</pre>
- * <p>
- *   The <code>getRegistryDelegate(String)</code> method is a helper method
- *   that fetches the registry delegate for you.
- * </p>
- * <p>
- *   The <code>main(Array[String])</code> method of this class will create a
- *   local registry on the default port, create a registry delegate and bind
- *   it under the well known name that you chose in the wizard
- *   (<code>DELEGATE_NAME</code>).
- * </p>
- *
- * @author Genady Beryozkin, rmi-info at genady.net
- */
-
-object RMIDelegate {
-  /** The name under which the delegate appears in the registry. */
-  val DELEGATE_NAME = "foo"
-
-  /** This method retrieves the registry delegate from a registry that is
-   *  running on a remote host.
-   */
-  @throws(classOf[RemoteException])
-  def getRegistryDelegate(remoteHost: String): Registry =
-    getRegistryDelegate(remoteHost, Registry.REGISTRY_PORT)
-
-  /** This method retrieves the registry delegate from a registry that is
-   * running on a remote host.
-   */
-  @throws(classOf[RemoteException])
-  def getRegistryDelegate(remoteHost: String, remotePort: Int): Registry = {
-    val registry = LocateRegistry.getRegistry(remoteHost, remotePort)
-    (registry lookup DELEGATE_NAME).asInstanceOf[Registry]
-  }
-
-  /** A simple way to run a registry and bind a registry delegate. */
-  @throws(classOf[RemoteException])
-  def main(args: Array[String]) {
-    var port = Registry.REGISTRY_PORT
-
-    if (args.length > 0) {
-      if (args(0) equals "-help") {
-        println("Usage: rmidelegate <options> <port>")
-        sys.exit(0)
-      }
-      try {
-        port = args(0).toInt
-      } catch {
-        case e: NumberFormatException =>
-          println("Usage: rmidelegate <options> <port>")
-          sys.exit(1)
-      }
-      val opts = args filter (_ startsWith "-J-D")
-      for (opt <- opts) {
-        val x = opt.substring(4) split "="
-        if (x.length == 2) System.setProperty(x(0), x(1))
-        else System.setProperty(x(0), "")
-      }
-    }
-
-    if (System.getSecurityManager() == null)
-      System.setSecurityManager(new RMISecurityManager() {
-        override def checkPermission(p: java.security.Permission) {}
-      })
-
-
-    val registry = LocateRegistry.createRegistry(port)
-    registry.bind(DELEGATE_NAME, new RegistryDelegate())
-
-    do {
-      try {
-        Thread.sleep(Long.MaxValue)
-      } catch {
-       case e: InterruptedException =>
-         // do nothing
-       case e: Throwable =>
-         e.printStackTrace()
-         sys.exit(1)
-      }
-    } while (true)
-  }
-
-}
-
-/** Create a delegate for a user provided registry instance. The registry is
- *  assumed to be a local registry, as there is no point in creating a delegate
- *  for a remote registry.
- */
-class RegistryDelegate(reg: Registry) extends UnicastRemoteObject with Registry {
-  /** The local registry */
-  private val localRegistry: Registry = reg
-
-  /** Create a delegate for a local registry that is bound to the default
-   *  local port (1099).
-   */
-  def this() = this(LocateRegistry.getRegistry())
-
-  /** Create a delegate for a local registry that is bound to a user
-   *  specified port.
-   */
-  def this(port: Int) = this(LocateRegistry.getRegistry(port))
-
-  @throws(classOf[RemoteException])
-  def bind(name: String, obj: Remote) { localRegistry.bind(name, obj) }
-
-  @throws(classOf[RemoteException])
-  def list(): Array[String] = localRegistry.list()
-
-  @throws(classOf[RemoteException])
-  def lookup(name: String): Remote = localRegistry.lookup(name)
-
-  @throws(classOf[RemoteException])
-  def rebind(name: String, obj: Remote) { localRegistry.rebind(name, obj) }
-
-  @throws(classOf[RemoteException])
-  def unbind(name: String) { localRegistry.unbind(name) }
-
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
deleted file mode 100644
index ff6c8f6..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteBooleanRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{BooleanRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteBooleanRef</code> provides a remote interface
- * for manipulating boolean references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteBooleanRef {
-  def elem_=(value: Boolean)
-  def elem: Boolean
-}
-
-/**
- * The class <code>RemoteBooleanRefImpl</code> implements a remote (global)
- * boolean reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.BooleanRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteBooleanRefImpl(name: String, x: BooleanRef)
-extends UnicastRemoteObject with RemoteBooleanRef with Unreferenced {
-  def elem_=(value: Boolean) { x.elem = value }
-  def elem: Boolean = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteBooleanRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
deleted file mode 100644
index 335f0d9..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteByteRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ByteRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteByteRef</code> provides a remote interface
- * for manipulating byte references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteByteRef {
-  def elem_=(value: Byte)
-  def elem: Byte
-}
-
-/**
- * The class <code>RemoteByteRefImpl</code> implements a remote (global)
- * byte reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ByteRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteByteRefImpl(name: String, x: ByteRef)
-extends UnicastRemoteObject with RemoteByteRef with Unreferenced {
-  def elem_=(value: Byte) { x.elem = value }
-  def elem: Byte = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteByteRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
deleted file mode 100644
index e0f48eb..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteCharRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{CharRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteCharRef</code> provides a remote interface
- * for manipulating character references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteCharRef {
-  def elem_=(value: Char)
-  def elem: Char
-}
-
-/**
- * The class <code>RemoteCharRefImpl</code> implements a remote (global)
- * character reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.CharRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteCharRefImpl(name: String, x: CharRef)
-extends UnicastRemoteObject with RemoteCharRef with Unreferenced {
-  def elem_=(value: Char) { x.elem = value }
-  def elem: Char = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteCharRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
deleted file mode 100644
index 2e13195..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteDoubleRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{DoubleRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteDoubleRef</code> provides..
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteDoubleRef {
-  def elem_=(value: Double)
-  def elem: Double
-}
-
-/**
- * The class <code>RemoteDoubleRefImpl</code> implements a remote (global)
- * double reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.DoubleRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteDoubleRefImpl(name: String, x: DoubleRef)
-extends UnicastRemoteObject with RemoteDoubleRef with Unreferenced {
-  def elem_=(value: Double) { x.elem = value }
-  def elem: Double = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteDoubleRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
deleted file mode 100644
index f4e61ea..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteFloatRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{FloatRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteFloatRef</code> provides a remote interface
- * for manipulating float references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteFloatRef {
-  def elem_=(value: Float)
-  def elem: Float
-}
-
-/**
- * The class <code>RemoteFloatRefImpl</code> implements a remote (global)
- * float reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.FloatRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteFloatRefImpl(name: String, x: FloatRef)
-extends UnicastRemoteObject with RemoteFloatRef with Unreferenced {
-  def elem_=(value: Float) { x.elem = value }
-  def elem: Float = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteIntFloatImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteGC.scala b/src/detach/library/scala/runtime/remoting/RemoteGC.scala
deleted file mode 100644
index 393c031..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteGC.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteGC.scala 17547 2009-04-21 13:56:28Z michelou $
-
-package scala.runtime.remoting
-
-import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
-import java.rmi.{NoSuchObjectException, Remote}
-import java.rmi.server.UnicastRemoteObject
-import scala.collection.mutable
-
-/**
- *
- *  @author Stephane Micheloud
- *  @version 1.0
- */
-// Adapted from scala.actors.ActorGC
-private [runtime] class RemoteGC {
-
-  private val refQueue = new ReferenceQueue[Remote]
-  private val refSet = new mutable.HashSet[Reference[T] forSome { type T <: Remote }]
-
-  private var liveRefs = 0
-
-  def newRef(a: Remote) = synchronized {
-    refSet += new WeakReference(a, refQueue)
-    liveRefs += 1
-    info("added object reference \""+a+"\" ("+liveRefs+")")
-  }
-
-  def gc() = synchronized {
-    info("GC called ("+liveRefs+")")
-    // check for unreachable object references
-    def drain() {
-      val wr = refQueue.poll
-      if (wr != null) {
-        val msg = try {
-          UnicastRemoteObject.unexportObject(wr.get, true/*force*/)
-          "removed object reference"
-        }
-        catch {
-          case e: NoSuchObjectException =>
-            "object already unbound"
-        }
-        info(msg+" ("+liveRefs+")")
-        liveRefs -= 1
-        refSet -= wr
-        // continue draining
-        drain()
-      }
-    }
-    drain()
-  }
-
-  def allClosed: Boolean = synchronized {
-    liveRefs <= 0
-  }
-
-  private def info(msg: String) { Debug.info("[RemoteGC] "+msg) }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
deleted file mode 100644
index b14403f..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteIntRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{IntRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteIntRef</code> provides a remote interface
- * for manipulating integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteIntRef {
-  def elem_=(value: Int)
-  def elem: Int
-}
-
-/**
- * The class <code>RemoteIntRefImpl</code> implements a remote (global)
- * integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.IntRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteIntRefImpl(name: String, x: IntRef)
-extends UnicastRemoteObject with RemoteIntRef with Unreferenced {
-  def elem_=(value: Int) { x.elem = value }
-  def elem: Int = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteIntRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
deleted file mode 100644
index da83491..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteLongRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{LongRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteLongRef</code> provides a remote interface
- * for manipulating long integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteLongRef {
-  def elem_=(value: Long)
-  def elem: Long
-}
-
-/**
- * The class <code>RemoteLongRefImpl</code> implements a remote (global)
- * long integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.LongRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteLongRefImpl(name: String, x: LongRef)
-extends UnicastRemoteObject with RemoteLongRef with Unreferenced {
-  def elem_=(value: Long) { x.elem = value }
-  def elem: Long = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteLongRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
deleted file mode 100644
index 9f27b26..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteObjectRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ObjectRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteObjectRef</code> provides a remote interface
- * for manipulating object references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteObjectRef {
-  def elem_=(value: AnyRef)
-  def elem: AnyRef
-}
-
-/**
- * The class <code>RemoteObjectRefImpl</code> implements a remote (global)
- * object reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ObjectRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteObjectRefImpl(name: String, x: ObjectRef)
-extends UnicastRemoteObject with RemoteObjectRef with Unreferenced {
-  def elem_=(value: AnyRef) { x.elem = value }
-  def elem: AnyRef = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteObjectRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
deleted file mode 100644
index 2ced9db..0000000
--- a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id: RemoteShortRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ShortRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteShortRef</code> provides a remote interface
- * for manipulating short integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at remote
-trait RemoteShortRef {
-  def elem_=(value: Short)
-  def elem: Short
-}
-
-/**
- * The class <code>RemoteShortRefImpl</code> implements a remote (global)
- * short integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ShortRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteShortRefImpl(name: String, x: ShortRef)
-extends UnicastRemoteObject with RemoteShortRef with Unreferenced {
-  def elem_=(value: Short) { x.elem = value }
-  def elem: Short = x.elem
-  override def toString() = x.elem.toString
-  def unreferenced() {
-    Debug.info("[RemoteShortRefImpl] unreferenced: "+this)
-    RemoteRef.unbind(name)
-  }
-}
diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala
deleted file mode 100644
index f9a3d80..0000000
--- a/src/detach/plugin/scala/tools/detach/Detach.scala
+++ /dev/null
@@ -1,1190 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.transform._
-
-abstract class Detach extends PluginComponent
-                         with Transform with TypingTransformers {
-  import global._
-  import definitions._
-
-  /** the following two members override abstract members in Transform */
-  val phaseName: String = "detach"
-
-  protected def newTransformer(unit: CompilationUnit): Transformer =
-    new DetachTransformer(unit)
-
-  // set with the `-P:detach:enable` plugin option (see DetachPlugin) */
-  protected[detach] var isEnabled = false
-
-  private class DetachTransformer(unit: CompilationUnit)
-  extends TypingTransformer(unit) {
-    private val DEBUG = settings.debug.value
-    private val PROXY_PREFIX  = "proxy$"  // local proxy objects
-    private val PROXY_SUFFIX  = "$proxy"  // top-level proxy classes
-    private val DETACH_SUFFIX = "$detach" // detached closures
-    private val IMPL_SUFFIX   = "Impl"    // follows Java convention
-
-    private val nme_bind = newTermName("bind")
-    private val nme_unbind = newTermName("unbind")
-    private val nme_unreferenced = newTermName("unreferenced")
-
-    private val Functions = FunctionClass.toList // see method isFuncType
-
-    private val RemoteClass =
-      definitions.getClass("java.rmi.Remote")
-
-    private val UIDClass =
-      definitions.getClass("java.rmi.server.UID")
-
-    private val UnicastRemoteObjectClass =
-      definitions.getClass("java.rmi.server.UnicastRemoteObject")
-
-    private val UnreferencedClass =
-      definitions.getClass("java.rmi.server.Unreferenced")
-
-    private val DetachModule =
-      definitions.getModule("scala.remoting.detach")
-
-    private val DebugModule =
-      definitions.getModule("scala.remoting.Debug")
-
-    private val RemoteRefModule =
-      definitions.getModule("scala.runtime.RemoteRef")
-
-    private val ThreadModule =
-      definitions.getModule("java.lang.Thread")
-
-    private val UnicastRemoteObjectModule =
-      definitions.getModule("java.rmi.server.UnicastRemoteObject")
-
-    private val remoteAnnotationInfo = {
-      val RemoteAttr: Symbol = definitions.getClass("scala.remote")
-      AnnotationInfo(RemoteAttr.tpe, List(), List())
-    }
-
-    private val serializableAnnotationInfo =
-      AnnotationInfo(SerializableAttr.tpe, List(), List())
-/*
-    private val throwsAnnotationInfo = {
-      val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException")
-      val ThrowsAttr = definitions.getClass("scala.throws")
-      AnnotationInfo(
-        ThrowsAttr.tpe,
-        List(Literal(Constant(RemoteExceptionClass.tpe))),
-        List()
-      )
-    }
-*/
-    // todo: see generation of Java version UID
-    private def serialVersionUIDAnnotationInfo(clazz: Symbol) = {
-      def genHash(sym: Symbol): Long = {
-        val sym1 = if (sym.isConstructor) sym.owner else sym
-        val ts = sym.tpe match {
-          case MethodType(params, rt) => (params map (_.tpe)) ::: List(rt)
-          case t => List(t)
-        }
-        val hashes = sym1.nameString.hashCode ::
-          (ts map (_.typeSymbol.nameString.hashCode))
-        (0L /: hashes)((acc, h) => acc ^ h)
-      }
-      val hashes = for (sym <- clazz.info.decls.toList) yield genHash(sym)
-      val uid: Long = (0L /: hashes) ((acc, h) => acc * 41 + h)
-      val serialVersionUIDAttr = definitions.getClass("scala.SerialVersionUID")
-      AnnotationInfo(
-        serialVersionUIDAttr.tpe,
-        List(Literal(Constant(uid))),
-        List()
-      )
-    }
-
-    private def elems(suffix: String): List[(Symbol, Symbol)] =
-      for (clazz <- ObjectRefClass :: refClass.valuesIterator.toList) yield {
-        val name = "scala.runtime.remoting.Remote" + clazz.name + suffix
-        (clazz, definitions.getClass(name))
-      }
-    private val remoteRefClass = immutable.HashMap(elems(""): _*)
-    private val remoteRefImpl = immutable.HashMap(elems("Impl"): _*)
-
-    private val proxyInterfaceDefs = new mutable.HashMap[Symbol/*owner*/, ListBuffer[Tree]]
-    private val detachedClosureApply = new mutable.HashMap[Tree, Apply]
-
-    private type SymSet = mutable.HashSet[Symbol]
-    private val capturedObjects = new mutable.HashMap[Symbol/*clazz*/, SymSet]
-    private val capturedFuncs = new mutable.HashMap[Symbol/*clazz*/, SymSet]
-    private val capturedCallers = new mutable.HashMap[Symbol/*clazz*/, SymSet]
-    private val capturedThisClass = new mutable.HashMap[Symbol, Symbol]
-
-    private val proxies = new mutable.HashMap[
-      Symbol, //clazz
-      (Symbol, Symbol, mutable.HashMap[Symbol, Symbol]) //iface, impl, accessor map
-    ]
-    def toInterface(clazz: Symbol) = proxies(clazz)._1
-    private val classdefs = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
-    // detachedClosure gathers class definitions containing a "detach" apply
-    private val detachedClosure = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
-
-    /** <p>
-     *    The method <code>freeObjTraverser.traverse</code> is invoked
-     *    in the method <code>DetachPlugin.transformUnit</code> in order to
-     *    gather information about objects referenced inside a detached
-     *    closure and which will be accessed remotely through object proxies.
-     *  </p>
-     *  <p>
-     *    Object proxies are generated in method <code>mkClosureApply</code>
-     *    and their definitions are generated in method <code>genProxy</code>.
-     *  </p>
-     */
-    private val freeObjTraverser = new Traverser {
-      def symSet(f: mutable.HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match {
-        case Some(ss) => ss
-        case None => val ss = new mutable.HashSet[Symbol]; f(sym) = ss; ss
-      }
-      def getClosureApply(tree: Tree): Apply = tree match {
-        case Block(_, expr) => getClosureApply(expr)
-        case Typed(expr, _) => getClosureApply(expr)
-        case apply @ Apply(Select(_, _), _) => apply // sel="<init>" or some "f$0"
-        case Apply(fun, _)  => getClosureApply(fun)
-        case _ =>
-          throw new Error("getClosureApply: unhandled case " + tree)
-      }
-      def isFuncType(tp: Type): Boolean = tp match {
-        case TypeRef(pre, sym, args) =>
-          Functions contains sym.tpe.typeSymbol
-        case _ =>
-          false
-      }
-      def isOuterMember(sym: Symbol): Boolean =
-        sym.isOuterAccessor ||
-        sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
-      override def traverse(tree: Tree) {
-        val sym = tree.symbol
-        val owner =
-          if (currentOwner.isModule) currentOwner
-          else currentOwner.enclClass
-        tree match {
-          case cdef @ ClassDef(_, _, _, impl) =>
-            classdefs(sym) = cdef
-            super.traverse(impl)
-            if (detachedClosure contains sym) {
-              detachedClosure(sym) = cdef
-              symSet(capturedObjects, sym) += capturedThisClass(sym)
-            }
-
-          case Apply(Select(qual, _), List(arg))
-          if (qual.tpe <:< DetachModule.tpe) =>
-            assert(isFuncType(arg.tpe))//debug
-            val t = getClosureApply(arg)
-            if (!t.fun.symbol.isConstructor)
-              unit.error(t.pos, "detach inapplicable for " +t.fun.symbol)
-            val sym = t.fun.symbol.owner
-            capturedThisClass(sym) = owner
-            symSet(capturedFuncs, sym)
-            detachedClosureApply(tree) = t
-            classdefs get sym match {
-              case None =>
-                detachedClosure(sym) = null // set later in case ClassDef
-              case Some(cdef) =>
-                detachedClosure(sym) = cdef
-                symSet(capturedObjects, sym) += capturedThisClass(sym)
-            }
-            super.traverse(arg)
-
-          case Select(qual @ This(_), name)
-          if qual.symbol.isModuleClass && !qual.symbol.isPackageClass =>
-            val qsym = qual.symbol
-            symSet(capturedFuncs, owner) += sym
-            symSet(capturedObjects, owner) += qsym
-
-          case Select(qual, name)
-          if (qual.hasSymbol &&
-              (sym.owner != owner) &&
-              !(sym.ownerChain contains ScalaPackageClass) &&
-              !(sym.owner hasFlag JAVA)) =>
-            val qsym = qual.symbol
-            symSet(capturedFuncs, owner) += sym
-            if (qsym.isStaticModule && !qsym.isPackage) {
-              //println("*****1******* capturedObjects("+owner+") += "+qsym)
-              symSet(capturedObjects, owner) += qsym
-            }
-            else if (!isOuterMember(qsym) && !(qsym isNestedIn owner)) {
-              //println("*****3******* capturedCallers("+sym+") += "+qsym)
-              symSet(capturedCallers, sym) += qsym
-            }
-
-          case _ =>
-            super.traverse(tree)
-        }
-      }
-    } //freeObjTraverser
-
-    private val valueClass = immutable.HashMap(
-      (for ((sym, ref) <- refClass.toList) yield (ref, sym)): _*
-    ) + (ObjectRefClass -> ObjectClass)
-
-    private def toValueClass(tp: Type): Type =
-      if (isRefClass(tp)) valueClass(tp.typeSymbol).tpe
-      else if (proxies contains tp.typeSymbol) toInterface(tp.typeSymbol).tpe
-      else tp
-
-    private def isRefClass(tp: Type): Boolean =
-      (tp ne null) &&
-      ((refClass.valuesIterator contains tp.typeSymbol) || (ObjectRefClass eq tp.typeSymbol))
-
-    private def isRemoteRefClass(tp: Type): Boolean =
-      (tp ne null) && (remoteRefClass.valuesIterator contains tp.typeSymbol)
-
-    private def mkRemoteRefClass(tp: Type): Type = {
-      assert(isRefClass(tp))
-      val tp1 = remoteRefClass(tp.typeSymbol)
-      typeRef(tp1.typeConstructor.prefix, tp1, Nil) // after erasure, no type anymore!
-    }
-
-    class TreeOuterSubstituter(from: List[Symbol], to: List[Symbol]) extends Traverser {
-      if (DEBUG)
-        println("\nTreeOuterSubstituter:"+
-                "\n\tfrom="+from.mkString(",")+
-                "\n\tto="+to.mkString(","))
-      val substMap = new mutable.HashMap[Symbol, Symbol]
-      override def traverse(tree: Tree) {
-        def subst(from: List[Symbol], to: List[Symbol]) {
-          if (!from.isEmpty)
-            if (tree.symbol.tpe == from.head.tpe) {
-              if (DEBUG)
-                println("\nTreeOuterSubstituter\n\tsym="+tree.symbol+
-                        ", tpe="+tree.symbol.tpe+
-                        "\n\towner="+tree.symbol.owner)
-              tree.symbol updateInfo to.head.tpe
-            }
-            else tree.symbol.tpe match {
-              case MethodType(params, restp) =>
-                for (p <- params if p.tpe == from.head.tpe) {
-                  p updateInfo to.head.tpe
-                }
-                if (restp == from.head.tpe) {
-                  if (DEBUG)
-                    println("\nTreeOuterSubstituter(2)\n\tsym="+tree.symbol+
-                            ", tpe="+tree.symbol.tpe+
-                            ", owner="+tree.symbol.owner)
-                  tree.symbol updateInfo MethodType(params, to.head.tpe)
-                }
-              case _ =>
-                subst(from.tail, to.tail)
-            }
-        }
-        def isOuter(sym: Symbol): Boolean =
-          sym.isOuterAccessor ||
-          sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
-        if (tree.hasSymbol && isOuter(tree.symbol)) subst(from, to)
-        super.traverse(tree)
-      }
-    }
-
-    // based on class Trees.TreeTypeSubstituter
-    private class TreeTypeRefSubstituter(clazz: Symbol) extends Traverser {
-      override def traverse(tree: Tree) {
-        val sym = tree.symbol
-        if (tree.hasSymbol && isRefClass(sym.tpe) &&
-           (sym.owner.enclClass == clazz) &&
-           (sym.isValueParameter || sym.hasFlag(PARAMACCESSOR))) {
-          sym setInfo mkRemoteRefClass(sym.tpe)
-          tree.tpe = sym.tpe
-        }
-        if (isRefClass(tree.tpe))
-          tree.tpe = mkRemoteRefClass(tree.tpe)
-        super.traverse(tree)
-      }
-      override def apply[T <: Tree](tree: T): T = super.apply(tree)
-    }
-
-    private class TreeOwnerSubstituter(from: Symbol, to: Symbol) extends Traverser {
-      def substType(sym: Symbol): Type = {
-        def subst(tpe: Type): Type = tpe match {
-          case MethodType(params, restp) =>
-            println("TreeOwnerSubstituter[1]: tpe="+tpe+
-                    ", tpe.typeSymbol="+tpe.typeSymbol+", sym="+sym)//debug
-            for (p <- params if p.tpe == from.tpe) {
-              println("TreeOwnerSubstituter[2]: sym="+sym+
-                      ", sym.owner="+sym.owner+", p.tpe="+p.tpe)//debug
-              p updateInfo to.tpe
-            }
-            MethodType(params, subst(restp))
-          case _ =>
-            if (sym.owner == from && tpe == from.tpe) {
-              println("TreeOwnerSubstituter[3]: sym="+sym+
-                      ", owner="+sym.owner+", tpe="+tpe)//debug
-              to.tpe
-            } else tpe
-        }
-        subst(sym.tpe)
-      }
-      val map = new mutable.HashMap[Symbol, Symbol]
-      override def traverse(tree: Tree) {
-        if (tree.hasSymbol && tree.symbol != NoSymbol) {
-          val sym = tree.symbol
-          if (sym.owner == from) {
-            val sym1 = map get sym match {
-              case Some(s) => s
-              case None => val s = sym.cloneSymbol(to); map(sym) = s; s
-            }
-            tree setSymbol sym1
-          }
-          val sym1 = tree.symbol
-          val tp = substType(sym1)
-          if (tp != sym1.tpe) {
-            if (sym1.owner == to)
-              println("\n%%%%%1%%%%%%% TreeOwnerSubst: tree="+tree+", sym1="+sym1+", sym1.owner="+sym1.owner)//debug
-            sym1 setInfo tp
-            tree setSymbol sym1
-          }
-        }
-        super.traverse(tree)
-      }
-      //override def apply[T <: Tree](tree: T): T = super.apply(tree/*.duplicate*/)
-    }
-
-    private var inConstructorFlag = 0L
-
-    private def isCaptured(clazz: Symbol, sym: Symbol): Boolean =
-      if (capturedFuncs contains clazz) {
-        //log("**1** isCaptured: clazz="+clazz+", sym="+sym+", ")
-        capturedFuncs(clazz) contains sym
-      }
-      else {
-        //log("**2** isCaptured: clazz="+clazz+", sym="+sym)
-        sym.isMethod && !sym.isConstructor
-      }
-
-    private class TreeAccessorSubstituter(clazz: Symbol, objs: List[Symbol], proxySyms: List[Symbol])
-    extends Transformer {
-      def removeAccessors(tree: Tree): Tree = tree match {
-        case Apply(fun, _) =>
-          removeAccessors(fun)
-        case Select(qual, _) if tree.hasSymbol && tree.symbol.isOuterAccessor =>
-          removeAccessors(qual)
-        case _ =>
-          tree
-      }
-      if (DEBUG)
-        println("\nTreeAccessorSubstituter: "+
-                "\n\tobjs="+objs.mkString(",")+
-                "\n\tproxies="+proxySyms.mkString(","))
-      override def transform(tree: Tree): Tree = tree match {
-        // transforms field assignment $outer.i$1.elem=..
-        // into setter $outer.i$1_=(..)
-        case Assign(lhs @ Select(qual1 @ Select(qual, name), name1), rhs)
-        if qual1.hasSymbol && !qual1.symbol.isPrivateLocal &&
-           isRemoteRefClass(qual1.tpe) =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Assign1\n\tqual1="+qual1+", sel.tpe="+lhs.tpe+
-                    "\n\tqual1.tpe="+qual1.tpe+", name1="+name1+
-                    "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
-          val iface = toInterface(qual.tpe.typeSymbol)
-          val sym = iface.tpe.decls lookup nme.getterToSetter(name)
-          atPos(tree.pos)(Apply(
-            Select(super.transform(qual), sym) setType lhs.tpe,
-            List(super.transform(rhs))
-          ) setType tree.tpe)
-
-        // transforms local assignment this.x$1.elem=..
-        // into setter method this.x$1_=(..)
-        case Assign(lhs @ Select(qual, name), rhs)
-        if qual.hasSymbol && qual.symbol.isPrivateLocal &&
-           isRemoteRefClass(qual.tpe) =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Assign2"+
-                    "\n\tqual="+qual+", qual.tpe="+qual.tpe+
-                    "\n\tname="+name)
-          // substitute the 'elem' member of the reference class with
-          // the corresponding setter method of the remote reference class.
-          val qual1 = super.transform(qual)
-          val sym = qual1.tpe.decls lookup nme.getterToSetter(name)
-          val fun = gen.mkAttributedSelect(qual1, sym)
-          Apply(fun, List(super.transform(rhs))) setType lhs.tpe
-
-        case Assign(Select(qual, name), rhs)
-        if qual.hasSymbol && (objs contains qual.symbol) =>
-          val sym = qual.symbol
-          val proxy = proxySyms(objs indexOf sym)
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Assign3"+
-                    "\n\tqual="+qual+", qual.tpe="+qual.tpe+
-                    "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
-                    "\n\tname="+name)//debug
-          // substitute the member accessor of the enclosing class with
-          // the corresponding setter method of the detached interface.
-          val iface = toInterface(sym)
-          val substSymbols = new TreeSymSubstituter(
-            sym.info.decls.toList filter { isCaptured(sym, _) },
-            iface.info.decls.toList)
-          substSymbols(Apply(
-                         Select(Ident(proxy), nme.getterToSetter(name)),
-                         List(super.transform(rhs))))
-
-        // transforms setter invocation this.i$1_=(..)
-        // into setter invocation $outer.i$1_=(..)
-        case Apply(Select(qual @ This(_), name), args)
-        if (objs contains qual.symbol) && nme.isSetterName(name) =>
-          val proxy = proxySyms(objs indexOf qual.symbol)
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Apply"+
-                    "\n\tqual="+qual+", qual.tpe="+qual.tpe+
-                    "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
-                    "\n\tname="+name+", decoded="+name.decode)
-          val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
-          val sym1 = proxy.info.decls lookup name.decode
-          val fun = gen.mkAttributedSelect(qual1, sym1)
-          Apply(fun, args map (super.transform(_))) setType tree.tpe
-
-        // transforms access to field this.name$1
-        // into invocation of getter method $outer.name$1()
-        case Select(qual @ This(_), name)
-        if objs contains qual.symbol =>
-          val proxy = proxySyms(objs indexOf qual.symbol)
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select"+
-                    "\n\tqual="+qual+", qual.tpe="+qual.tpe+
-                    "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
-                    "\n\tname="+name+", decoded="+name.decode)
-          val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
-          val sym1 = proxy.info.decls lookup nme.originalName(name) //name
-          gen.mkAttributedSelect(qual1, sym1)
-
-        // transforms field $outer.name$1 into getter method $outer.name$1()
-        case Select(qual @ Select(_, name1), name)
-        if qual.hasSymbol && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) &&
-           !tree.symbol.isMethod =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select0\n\tqual="+qual+
-                    ", qual.tpe="+qual.tpe+", name="+name)//debug
-          val sym = qual.symbol
-          val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(sym.owner), sym)
-          val iface = toInterface(qual.tpe.typeSymbol)
-          val sym1 = iface.tpe.decls lookup name
-          val fun = gen.mkAttributedSelect(qual1, sym1)
-          Apply(fun, List()) setType tree.tpe
-
-        case Select(apply @ Apply(fun @ Select(qual, _), _), name)
-        if fun.symbol.isOuterAccessor =>
-          val tsym = fun.symbol.tpe.resultType.typeSymbol
-          val funcs = capturedFuncs(clazz).toList filter (sym =>
-            (tsym.ownerChain contains sym.owner) || (tsym isSubClass sym.owner))
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select1\n\tfun="+fun+
-                    ",\n\tfun.tpe="+fun.tpe+", name="+name+
-                    ",\n\tfuncs="+funcs)//debug
-          funcs find (tree.symbol.==) match {
-            case Some(sym) =>
-              val qual1 =
-                if (currentOwner.enclClass isNestedIn clazz) apply
-                else removeAccessors(qual)
-              val name1 =
-                (if (tsym isSubClass qual1.tpe.typeSymbol) ""
-                 else tsym.fullName('$')+"$")+sym.name
-              val iface = toInterface(qual1.tpe.typeSymbol)
-              val sym1 = iface.tpe.decls lookup name1
-              gen.mkAttributedSelect(qual1, sym1)
-            case None =>
-              super.transform(tree)
-          }
-
-        // transforms field access $outer.i$1.elem
-        // into invocation of getter method $outer.i$1()
-        case Select(qual @ Select(qual1, name1), name)
-        if qual.hasSymbol && !qual.symbol.isPrivateLocal &&
-           isRemoteRefClass(qual.tpe) =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select2\n\tqual="+qual+
-                    "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
-          val iface = toInterface(qual.symbol.owner)
-          val sym1 = iface.tpe.decls lookup name1
-          val fun = gen.mkAttributedSelect(qual1, sym1)
-          Apply(fun, List()) setType tree.tpe
-
-        // transforms local access this.i$1.elem
-        // into invocation of getter method this.i$1()
-        case Select(qual, name)
-        if qual.hasSymbol && qual.symbol.isPrivateLocal &&
-           isRemoteRefClass(qual.tpe) =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select3\n\tqual="+qual+
-                    "\n\tqual.tpe="+qual.tpe)//debug
-          val sym = qual.tpe.decls lookup name
-          val fun = gen.mkAttributedSelect(qual, sym)
-          Apply(fun, List()) setType tree.tpe
-
-        case Select(qual, name)
-        if qual.hasSymbol && (objs contains qual.symbol) =>
-          if (DEBUG)
-            println("\nTreeAccessorSubstituter: Select4\n\tqual="+qual+
-                    ", qual.tpe="+qual.tpe+", name="+name)//debug
-          val sym = qual.symbol
-          val proxy = proxySyms(objs indexOf sym)
-          // substitute the accessor of a member of the enclosing class
-          // with the corresponding accessor of the detached interface
-          val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
-          val iface = toInterface(sym)
-          val sym1 = iface.tpe.decls lookup name.decode
-          gen.mkAttributedSelect(qual1, sym1)
-
-        case _ =>
-          super.transform(tree)
-      }
-      def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
-    } // TreeAccessorSubstituter
-/*
-    private class TreeNameSubstituter(from: Name, to: Symbol) extends Transformer {
-      override def transform(tree: Tree): Tree = tree match {
-        case Super(qual, mix) if tree.symbol.name == from =>
-          Super(qual, mix) setSymbol to
-        case This(name) if name == from =>
-          This(to.name) setSymbol to
-        case _ =>
-          super.transform(tree)
-      }
-      def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
-    }
-*/
-    /** <p>
-     *    Given the closure definition (generated by previous phases)
-     *  </p><pre>
-     *    class $anonfun$1 extends Object with Function1 {
-     *      def this($outer: C, x$1: Int): $anonfun$1 = ..
-     *      def apply(x: Int): Int = x + this.$outer.x() + this.x$1
-     *    }</pre>
-     *  <p>
-     *    the method <code>mkClosureDef</code> transforms the above code
-     *    to the following:
-     *  </p><pre>
-     *    @serializable
-     *    class $anonfun$1$detach extends Object with Function1 {
-     *      def this($outer: C$proxy, x$1: Int): $anonfun$1$detach = ..
-     *      def apply(x: Int): Int = x + this.$outer.x() + this.x$1
-     *    }</pre>
-     *  <p>
-     *    In particular, it performs the following operations:
-     *    1) add constructor parameter <code>proxy_n</code> to access
-     *       proxy of the enclosing class
-     *    2) change reference types in constructor arguments to type
-     *       <code<Remote_type_Ref</code>'
-     *    3) change occurences of <code>this</code> identifier to
-     *        <code>proxy_n</code> in template code
-     *    4) change reference types of local value definitions associated
-     *       to updated constructor arguments to type <code>Remote_type_Ref</code>
-     *  </p>
-     *
-     *  @param  clazz the symbol of the original closure definition
-     *  @return the typed class definition for the detached closure.
-     */
-    private def mkClosureDef(clazz: Symbol): Tree = {
-      val cdef = detachedClosure(clazz)
-      val name = cdef.symbol.name
-      if (name endsWith DETACH_SUFFIX)
-        return cdef // closure already detached
-
-      clazz.name = encode(clazz.name.decode + DETACH_SUFFIX)
-      clazz addAnnotation serialVersionUIDAnnotationInfo(clazz)
-      clazz addAnnotation serializableAnnotationInfo
-
-      val thiz = capturedThisClass(clazz)
-      val (List(outer), captured) =
-        capturedObjects(clazz).toList partition (thiz.==)
-
-      /** <p>
-       *    Method <code>updateConstructorParams</code> updates the class
-       *    symbol of the detached closure as follows:
-       *    1) it appends the "$detach" suffix to the class name,
-       *    2) it adds the "@serializable" annotation to class attributes,
-       *    3) it adds a parameter symbol for each element of "captured".
-       *  </p>
-       *  <p>
-       *    and also updates the signature of the constructor symbol:
-       *    1) it adds a parameter type for each element of "captured",
-       *    2) it changes reference types to remote reference types.
-       *  </p>
-       */
-      def updateConstructorParams(vparams: List[ValDef]): List[Symbol] = {
-        val hasOuter = !vparams.isEmpty && (vparams.head.symbol.tpe == thiz.tpe)
-        val ctor = clazz.primaryConstructor
-        val params = (for (sym <- captured) yield {
-          val iface = toInterface(sym)
-          val param = ctor.newValueParameter(ctor.pos, freshProxyName)
-            .setFlag(SYNTHETIC)
-            .setInfo(iface.tpe)
-          param.owner = ctor
-          param
-        }) ::: (
-          if (hasOuter) Nil
-          else {
-            val iface = toInterface(thiz)
-            val param = ctor.newValueParameter(ctor.pos, nme.OUTER)
-              .setFlag(SYNTHETIC)
-              .setInfo(iface.tpe)
-            param.owner = ctor
-            List(param)
-          }
-        )
-        val tp = ctor.tpe match {
-          case mt @ MethodType(params1, restp) =>
-            val params2 = if (hasOuter) {
-              val iface = toInterface(params1.head.tpe.typeSymbol)
-              ctor.newSyntheticValueParam(iface.tpe) :: params1.tail
-            }
-            else params1
-            for (p <- params2 if isRefClass(p.tpe)) {
-              p updateInfo mkRemoteRefClass(p.tpe)
-            }
-            MethodType(params ::: params2, restp)
-          case tp =>
-            tp
-        }
-        ctor updateInfo tp
-        params
-      } //updateConstructorParams
-
-      /**
-       */
-      def updateConstructorDef(ctor: DefDef): (List[Tree], List[Symbol]) = {
-        val DefDef(mods, name, tparams, List(vparams), tpt, rhs) = ctor
-        val newparams = updateConstructorParams(vparams)
-        val vparams0 = newparams map (sym => ValDef(sym) setType sym.tpe)
-        val ctorDef = treeCopy.DefDef(ctor, mods, name, tparams, List(vparams0 ::: vparams), tpt, rhs)
-        val accessors = for (sym <- newparams) yield {
-          val acc = clazz.newValue(sym.pos, sym.name)
-            .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
-            .setInfo(sym.tpe)
-          clazz.info.decls enter acc
-          acc
-        }
-        val accDefs = accessors map (sym => ValDef(sym) setType sym.tpe)
-        (ctorDef :: accDefs, accessors)
-      } //updateConstructorDef
-
-      val impl = cdef.impl
-      val (List(ctor: DefDef), body1) = impl.body partition (t =>
-        t.isDef && t.symbol.isPrimaryConstructor)
-      val (defs, accessors) = updateConstructorDef(ctor)
-      val impl1 = treeCopy.Template(impl, impl.parents, impl.self, defs ::: body1)
-      val (from, to) = /*List.unzip*/(
-        for (obj <- captured ::: List(outer))
-        yield (obj, toInterface(obj))
-      ) unzip
-      //val substNames = new TreeNameSubstituter(name, clazz)
-      val substTypeRefs = new TreeTypeRefSubstituter(clazz)
-      val substAccs = new TreeAccessorSubstituter(clazz, from, accessors)
-      val substTypes = new TreeOuterSubstituter(from, to)
-      val substSyms = new TreeSymSubstituter(from, to)
-      val t1 = ClassDef(clazz, substSyms(substTypes(substAccs(substTypeRefs(impl1)))))
-      //println("mkClosureDef: t(untyped)=\n"+nodeToString(t1))
-      val t = localTyper typed t1
-      detachedClosure(clazz) = t.asInstanceOf[ClassDef]
-      //println("mkClosureDef: t(typed)=\n"+nodeToString(t))
-      t
-    } //mkClosureDef
-
-    /** <p>
-     *   Given a class <code>C</code> with member <code>x</code>
-     *   which is (remotely) referenced from inside a detached closure:
-     *  </p><pre>
-     *    class C extends .. {
-     *      var x: Int
-     *    }</pre>
-     *  <p>
-     *    the method <code>addProxy</code> generates the following two
-     *    proxy definitions (used later in method <code>mkClosureApply</code>
-     *    to generate object proxies):
-     *  </p><pre>
-     *    trait C$proxy extends java.rmi.Remote {
-     *      def x(): Int
-     *      def x_=(x$1: Int): Unit
-     *    }
-     *    class C$proxyImpl
-     *    extends java.rmi.server.UnicastRemoteObject
-     *    with C$proxy with java.rmi.server.Unreferenced {
-     *      def this(x$0: String, x$1: C): C$ProxyImpl = ..
-     *      def x(): Int = this.x$1.x()
-     *      def x_=(x$1: Int): Unit = this.x$1.x_=(x$1)
-     *      def unreferenced(): Unit = RemoteRef.unbind(this.x$0)
-     *    }</pre>
-     */
-    private def addProxy(closure: Symbol, clazz: Symbol) {
-      // the Sun RMI compiler crashes with the error message
-      // "error: An error has occurred in the compiler; ..." with trace
-      // "sun.tools.java.CompilerError: getInnerClassField" if the
-      // generated proxy class does not belong to the top-level scope.
-      val proxyOwner = clazz.toplevelClass.owner //clazz.owner
-
-      if (DEBUG)
-        println("\nadd proxy for "+clazz+" in "+proxyOwner)//debug
-
-      val (proxyIntf, proxyImpl, proxyMap) = proxies get clazz match {
-        case Some(proxy) =>
-          proxy
-        case None =>
-          val iface =
-            proxyOwner.newClass(clazz.pos, encode(clazz.name.decode + PROXY_SUFFIX))
-          iface.sourceFile = clazz.sourceFile
-          iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface
-          val iparents = List(ObjectClass.tpe, RemoteClass.tpe)
-          iface setInfo ClassInfoType(iparents, newScope, iface)
-          // methods must throw RemoteException
-          iface addAnnotation remoteAnnotationInfo
-
-          val iclaz =
-            proxyOwner.newClass(clazz.pos, encode(iface.name.decode + IMPL_SUFFIX))
-          iclaz.sourceFile = clazz.sourceFile
-          iclaz setFlag (SYNTHETIC | FINAL)
-          // Variant 1: rebind/unbind
-          val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
-          // Variant 2: un-/exportObject
-          //val cparents = List(ObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
-          iclaz setInfo ClassInfoType(cparents, newScope, iclaz)
-          val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol])
-          proxies(clazz) = proxy
-          proxy
-      }
-
-      def addAccessors() {
-        def mkGetter(sym: Symbol, name: String): Symbol = {
-          val getter = if (sym.isMethod) {
-            val meth = sym.cloneSymbol(proxyIntf)
-            meth.name = name
-            val tsym = meth.tpe.resultType.typeSymbol
-            if (proxies contains tsym)
-              meth updateInfo MethodType(List(), toInterface(tsym).tpe)
-            meth
-          }
-          else {
-            val meth = proxyIntf.newMethod(sym.pos, nme.getterName(sym.originalName))
-            meth setFlag ACCESSOR
-            meth setInfo MethodType(List(), toValueClass(sym.tpe))
-            meth
-          }
-          getter setFlag ABSTRACT
-          getter resetFlag FINAL
-          getter
-        }
-        def mkSetter(sym: Symbol): Symbol = {
-          val setter = proxyIntf.newMethod(sym.pos, nme.getterToSetter(sym.originalName))
-          setter setFlag (sym.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED)
-          val param = setter.newSyntheticValueParam(toValueClass(sym.tpe))
-          setter setInfo MethodType(List(param), UnitClass.tpe)
-          setter setFlag ABSTRACT
-          setter resetFlag FINAL
-          setter
-        }
-        def create(owner: Symbol, clazz: Symbol) {
-          val funcs = capturedFuncs(owner).toList
-          funcs find (_.isConstructor) match {
-            case Some(sym) if capturedFuncs contains sym.owner =>
-              create(sym.owner, clazz)
-            case _ =>
-          }
-          val newfuncs = funcs filterNot (proxyMap.valuesIterator.toList contains)
-          val (members, others) = newfuncs partition (clazz isSubClass _.owner)
-          val outers = others filter (sym =>
-            (clazz isNestedIn sym.owner) && clazz.isClass)
-          for (sym <- outers) {
-            val sym1 = mkGetter(sym, sym.fullName('$'))
-            proxyIntf.info.decls enter sym1
-            proxyMap(sym1) = sym
-          }/*
-          for (sym <- outers if capturedCallers contains sym;
-               caller <- capturedCallers(sym)) {
-            val sym1 = mkGetter(sym, caller.nameString+'$'+sym.nameString)
-            if (clazz.isAnonymousClass)
-              println("[2] clazz="+clazz+", sym1="+sym1)
-            proxyIntf.info.decls enter sym1
-            proxyMap(sym1) = sym
-          }*/
-          for (sym <- members if !sym.isConstructor) {
-            val sym1 = mkGetter(sym, sym.originalName.decode)
-            proxyIntf.info.decls enter sym1
-            proxyMap(sym1) = sym
-          }
-          for (sym <- members if isRefClass(sym.tpe)) {
-            val sym1 = mkSetter(sym)
-            proxyIntf.info.decls enter sym1
-            proxyMap(sym1) = sym
-          }
-        }
-        create(closure, clazz)
-      }
-
-      addAccessors
-      if (DEBUG) {
-        val xs = proxyMap.keysIterator.toList
-        println("\tadded "+proxyIntf+
-                "\n\twith "+xs.mkString(", ")+" ["+xs.length+"]")
-      }
-    } //addProxy
-
-    def genProxy(clazz: Symbol) {
-      val (proxyIntf, proxyImpl, proxyMap) = proxies(clazz)
-
-      // generate proxy interface
-      val ifaceBody = proxyMap.keysIterator.toList map { DefDef(_, EmptyTree) }
-      val ifaceParents =
-        proxyIntf.info.parents map (t => TypeTree(t) setPos proxyIntf.pos)
-      val ifaceTmpl = Template(ifaceParents, emptyValDef, ifaceBody)
-      val ifaceDef = localTyper typed ClassDef(proxyIntf, ifaceTmpl)
-
-      // generated proxy implementation
-      // Variant 1: rebind/unbind
-      val param1 =
-        proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
-         .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
-         .setInfo(StringClass.tpe)
-      proxyImpl.info.decls enter param1
-
-      val param2 =
-        proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
-          .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
-          .setInfo(clazz.tpe)
-      proxyImpl.info.decls enter param2
-
-      val unreferenced =
-        proxyImpl.newMethod(proxyImpl.pos, nme_unreferenced)
-          .setInfo(MethodType(List(), UnitClass.tpe))
-      proxyImpl.info.decls enter unreferenced
-
-      val proxyBody =
-        DefDef(unreferenced, List(List()), Block(
-          List(Apply( //stats
-            Select(gen.mkAttributedRef(DebugModule), "info"),
-            List(Apply(
-              Select(Literal(Constant("unreferenced: ")), "$plus"),
-              // Variant 1: rebind/unbind
-              List(Select(This(proxyImpl), param1.name))
-              // Variant 2: un-/exportObject
-              //List(This(proxyImpl))
-            ))
-          )),
-          Apply( //expr
-            Select(gen.mkAttributedRef(RemoteRefModule), nme_unbind),
-            // Variant 1: rebind/unbind
-            List(Select(This(proxyImpl), param1.name))
-            // Variant 2: un-/exportObject
-            //List(This(proxyImpl))
-          )
-        )) :: (
-        for (sym <- proxyIntf.info.decls.toList) yield {
-          val sym1 = sym.cloneSymbol(proxyImpl)
-          sym1 resetFlag (ABSTRACT | DEFERRED | lateDEFERRED)
-          proxyImpl.info.decls enter sym1
-          DefDef(sym1, {
-            val sym2 = proxyMap(sym)
-            var t = Select(This(proxyImpl), param2)
-            var outerAcc =
-              if (sym2.owner isSubClass param2) None
-              else param2.info.decls.toList find (_.isOuterAccessor)
-            while (!outerAcc.isEmpty) {
-              t = Select(t, outerAcc.get)
-              val outerClass = outerAcc.get.tpe.resultType.typeSymbol
-              outerAcc =
-                if (sym2.owner == outerClass) None
-                else outerClass.info.decls.toList find (_.isOuterAccessor)
-            }
-            val sel = Select(t, sym2)
-            if (sym2.isMethod) {
-              Apply(sel, sym1.paramss(0) map { Ident(_) })
-            }
-            else if (isRefClass(sym2.tpe)) {
-              val sel1 = Select(sel, nme.elem)
-              if (sym1.tpe.paramTypes.length == 0) sel1
-              else Assign(sel1, Ident(sym1.paramss(0)(0)))
-            }
-            else
-              sel
-          })
-        })
-      val proxyParents =
-        proxyImpl.info.parents map (t => TypeTree(t) setPos proxyImpl.pos)
-      val proxyTmpl = Template(proxyParents,
-                           emptyValDef, NoMods,
-              // Variant 1: rebind/unbind
-              /*vparamss*/ List(List(ValDef(param1), ValDef(param2))),
-              // Variant 2: un-/exportObject
-              ///*vparamss*/ List(List(ValDef(param2))),
-                 /*argss*/ List(List()), proxyBody, NoPosition)
-      val proxyDef = localTyper typed ClassDef(proxyImpl, proxyTmpl)
-
-      // remember definitions to be added by transformStats
-      val proxyOwner = proxyIntf.owner
-      if (! (proxyInterfaceDefs contains proxyOwner))
-        proxyInterfaceDefs(proxyOwner) = new ListBuffer
-      proxyInterfaceDefs(proxyOwner) += ifaceDef
-      proxyInterfaceDefs(proxyOwner) += proxyDef
-    } //genProxy
-
-    private def freshName(s: String): Name =
-      unit.fresh.newName(s)
-
-    private def freshProxyName: Name =
-      unit.fresh.newName(PROXY_PREFIX)
-
-    /** <p>
-     *   Given a detached closure applied in some environment consisting
-     *   of an enclosing class <code>C</code> and some local variables
-     *   <code>x$1</code> (immutable) and <code>y$1</code> (mutable):
-     *  </p><pre>
-     *    scala.remoting.detach.apply({
-     *      (new $anonfun$1(C.this, x$1, y$1): Function1)
-     *    })</pre>
-     *  <p>
-     *    the above code is transformed to the following block:
-     *  </p><pre>
-     *    {
-     *      val proxy$1: C$Proxy =
-     *        RemoteRef.bind("C/proxy$1", new C$ProxyImpl(C.this))
-     *      val proxy$2: RemoteIntRef =
-     *        RemoteRef.bind("C/proxy$2", new RemoteIntRefImpl(y$1))
-     *      (new $anonfun$1detach(proxy$1, x$1, proxy$2): Function1)
-     *    }
-     *  </pre>
-     */
-    private def mkClosureApply(tree: Tree): Tree = {
-      val apply @ Apply(fun, args) = detachedClosureApply(tree)
-      assert(fun.symbol.isConstructor, fun.symbol+" is not a constructor")//debug
-      val clazz = apply.tpe.typeSymbol
-      val thiz = capturedThisClass(clazz)
-      val cdef = mkClosureDef(clazz)
-      val uid = localTyper typed {
-        val sym = currentOwner.newValue(tree.pos, freshName("uid$"))
-          .setFlag(SYNTHETIC)
-          .setInfo(StringClass.tpe)
-        val rhs = Apply(Select(
-          Apply(
-            Select(New(TypeTree(UIDClass.tpe)), nme.CONSTRUCTOR),
-            List()
-          ),
-          "toString"
-        ), List())
-        ValDef(sym, rhs)
-      }
-      def cast(tree: Tree, tpe: Type): Tree =
-        Apply(
-          TypeApply(
-            Select(tree, Object_asInstanceOf),
-            List(TypeTree(tpe))
-          ),
-          List()
-        )
-
-      def mkProxy(csym: Symbol): ValDef = {
-        val (iface, proxy, _) = proxies(csym)
-        val sym = currentOwner.newValue(csym.pos, freshProxyName)
-          .setFlag(SYNTHETIC)
-          .setInfo(iface.tpe)
-        val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
-        val name = Apply(
-          Select(Literal(Constant(sym.fullName('/')+"$")), String_+),
-          List(Ident(uid.symbol))
-        )
-        val thiz =
-          if (csym.isModule) gen.mkAttributedIdent(csym)
-          else gen.mkAttributedThis(csym)
-        val args = List(name,
-                        Apply(Select(New(TypeTree(proxy.tpe)), nme.CONSTRUCTOR),
-                              // Variant 1: rebind/unbind
-                              List(name, thiz)))
-                              // Variant 2: un-/exportObject
-                              //List(thiz)))
-        val rhs = cast(Apply(bind, args), iface.tpe)
-        ValDef(sym, rhs)
-      }
-
-      def mkObjProxies: List[ValDef] = {
-        val (outer, captured) =
-           capturedObjects(clazz).toList partition (thiz.==)
-        (captured ::: outer) map mkProxy
-      }
-
-      def mkArgProxies: Map[Symbol, ValDef] = {
-        def retRefs(t: Tree): List[Tree] = t match {
-          case Apply(fun, args) =>
-            args flatMap retRefs
-          case id @ Ident(_) =>
-            if (isRefClass(id.tpe)) List(id) else Nil
-          case Template(_, _, body) =>
-            body flatMap retRefs
-          case New(tpt) =>
-            retRefs(tpt)
-          case thiz @ This(_) =>
-            if (isRefClass(thiz.tpe)) List(thiz) else Nil
-          case _ =>
-            throw new Error("Internal error: " + t.getClass)
-        }
-        new immutable.HashMap[Symbol, ValDef] ++ (
-          for (variable <- retRefs(apply)) yield {
-            val param = variable.symbol
-            assert(isRefClass(param.tpe), param)
-            val proxy = currentOwner.newValue(param.pos, freshProxyName)
-              .setFlag(SYNTHETIC)
-              .setInfo(mkRemoteRefClass(param.tpe))
-            val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
-            //val name = Literal(Constant(proxy.fullName('/')))
-            val name = Apply(
-              Select(Literal(Constant(proxy.fullName('/')+"$")), String_+),
-              List(Ident(uid.symbol))
-            )
-            val ts = param.tpe.typeSymbol
-            val args = List(name,
-                            Apply(
-                              Select(New(TypeTree(remoteRefImpl(ts).tpe)), nme.CONSTRUCTOR),
-                              // Variant 1: rebind/unbind
-                              List(name, variable)))
-                              // Variant 2: un-/exportObject
-                              //List(variable)))
-            val rhs = cast(Apply(bind, args), remoteRefClass(ts).tpe)
-            (param, ValDef(proxy, rhs))
-          }
-        )
-      } //mkArgProxies
-
-      /** <p>
-       *   Method <code>mkClosureInstance</code> updates the list of actual
-       *   parameters passed to the closure instance.
-       *  </p>
-       */
-      def mkClosureInstance(objProxies: List[ValDef],
-                            argProxies: Map[Symbol, ValDef]): Tree = {
-        fun.tpe = fun.symbol.tpe
-        val args0 = objProxies map (tree => Ident(tree.symbol))
-        val hasOuter = !args.isEmpty && (args.head.symbol.tpe == thiz.tpe)
-        val args1 = (if (hasOuter) args.tail else args) map (arg =>
-          argProxies get arg.symbol match {
-            case Some(t) => Ident(t.symbol)
-            case None => arg
-          }
-        )
-        if (DEBUG)
-          println("\nmkClosureInstance:\n\targs0="+args0+"\n\targs1="+args1)
-        val t = Typed(
-                  Apply(fun, args0 ::: args1),
-                  //TypeTree(clazz.info.parents.tail.head) //interface (2.7.x)
-                  TypeTree(clazz.info.parents.head) //interface (2.8.x)
-                )
-        localTyper typed t
-      } //mkClosureInstance
-
-      val objProxies = mkObjProxies
-      val argProxies = mkArgProxies
-      val stats = uid :: objProxies ::: argProxies.valuesIterator.toList
-      val expr = mkClosureInstance(objProxies, argProxies)
-      localTyper typed Block(stats, expr)
-    } //mkClosureApply
-
-    override def transform(tree: Tree): Tree = {
-      def withInConstructorFlag(inConstructorFlag: Long)(f: => Tree): Tree = {
-        val savedInConstructorFlag = this.inConstructorFlag
-        this.inConstructorFlag = inConstructorFlag
-        val t = f
-        this.inConstructorFlag = savedInConstructorFlag
-        t
-      }
-      if (!isEnabled) return tree
-      tree match {
-        case ClassDef(mods, name, tparams, impl) =>
-          val tree1 = super.transform(tree)
-          if (!reporter.hasErrors && (capturedThisClass contains tree1.symbol))
-            mkClosureDef(tree1.symbol)
-          else
-            tree1
-
-        case Apply(Select(_, _), _) =>
-          val tree1 = super.transform(tree)
-          if (!reporter.hasErrors && (detachedClosureApply contains tree1))
-            atPos(tree1.pos)(mkClosureApply(tree1))
-          else
-            tree1
-
-        case Template(_, _, _) =>
-          withInConstructorFlag(0) { super.transform(tree) }
-
-        case _ =>
-          super.transform(tree)
-      }
-    }
-
-    /** Transform statements and add detached definitions to them. */
-    override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
-      val stats1 = super.transformStats(stats, exprOwner)
-      val newDefs = {
-        val buf = new ListBuffer[Tree]
-        if (proxyInterfaceDefs contains currentOwner)
-          buf ++= proxyInterfaceDefs(currentOwner).toList
-        buf.toList
-      }
-      if (newDefs.isEmpty) stats1 else stats1 ::: newDefs
-    }
-
-    private def genProxies() {
-      def printDebugInfo() {
-        println("\ncompilation unit : "+unit)
-        for ((sym, _) <- detachedClosure) {
-          println("closure to detach: "+sym+" (owner: "+sym.owner+")")
-          println("captured this    : "+capturedThisClass(sym))
-          val objs = capturedObjects get sym match {
-            case Some(ss) => ss.toList
-            case None => Nil
-          }
-          println("captured objects : "+objs.mkString(", ")+" ["+objs.length+"]")
-        }
-        println("\ncalled functions :")
-        for (sym <- capturedFuncs.keysIterator) {
-          val xs = capturedFuncs(sym).toList map (s => {
-            val callers = capturedCallers get s match {
-              case Some(ss) => "|"+ss.toList.mkString(",")
-              case None => ""
-            }
-            s+"("+s.owner.name+callers+")"
-          })
-          println("\t"+sym+" -> "+xs.mkString(", ")+" ["+xs.length+"]")
-        }
-      }
-      def printDebugInfo2() {
-        println("\nproxy classes    :")
-        for (sym <- proxies.keysIterator)
-          println("\t"+sym+"("+sym.tpe+") -> "+proxies(sym))
-      }
-      if (DEBUG)
-        printDebugInfo
-      for ((closure, _) <- detachedClosure;
-           captured <- capturedObjects(closure))
-        addProxy(closure, captured)
-      if (DEBUG)
-        printDebugInfo2
-      for (sym <- proxies.keysIterator)
-        genProxy(sym)
-    } //genProxies
-
-    /** <p>
-     *    Method <code>transformUnit</code> performs three successive operations:
-     *  </p>
-     *  <ol>
-     *    <li>it first gathers infos about free objects and detached
-     *      closures;</li>
-     *    <li>it then adds proxies for free objects;</li>
-     *    <li>finally, if transforms detached closures (both definition and
-     *       instantiation).</li>
-     *  </ol>
-     */
-    override def transformUnit(unit: CompilationUnit) {
-      freeObjTraverser.traverse(unit.body)
-      if (!reporter.hasErrors) genProxies
-      super.transformUnit(unit)
-    }
-  }
-
-}
-
diff --git a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
deleted file mode 100644
index c6e18b7..0000000
--- a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.tools.nsc.{Global, Phase}
-import scala.tools.nsc.plugins.{Plugin, PluginComponent}
-
-class DetachPlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "detach"
-  val description = "Perform detaching of remote closures"
-
-  object detach extends {
-    val global = DetachPlugin.this.global
-    val runsAfter = List("lambdalift")
-    override val runsBefore = List("constructors")
-  } with Detach
-
-  val components = List[PluginComponent](detach)
-
-  def setEnabled(flag: Boolean) { detach.isEnabled = flag }
-
-  override def processOptions(options: List[String], error: String => Unit) = {
-    var enabled = false
-    for (option <- options) {
-      if (option == "enable") {
-        enabled = true
-      } else {
-        error("Option not understood: "+option)
-      }
-    }
-    setEnabled(enabled)
-  }
-
-  override val optionsHelp: Option[String] =
-    Some("  -P:detach:enable               Enable detaching of remote closures")
-}
diff --git a/src/detach/plugin/scalac-plugin.xml b/src/detach/plugin/scalac-plugin.xml
deleted file mode 100644
index 6c8600e..0000000
--- a/src/detach/plugin/scalac-plugin.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<plugin>
-  <name>detach</name>
-  <classname>scala.tools.detach.DetachPlugin</classname>
-</plugin>
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 44dd3d8..5311651 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -1,55 +1,88 @@
 Eclipse project files
 =====================
 
-Import all projects inside Eclipse by choosing File/Import Existing Projects
-and navigate to src/eclipse. Check all projects and click ok.
+The following points describe how to get Scala to run in Eclipse:
 
-IMPORTANT
-=========
+0. To get Scala to work inside of Eclipse Kepler it is necessary to build the Scala IDE by your own
+because for the moment there is no update site provided for the newest development version
+of Scala. To do so enter the following commands one after the other:
 
-1. You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in 
-Preferences/General/Workspace/Linked Resources. The value should be the absolute 
-path to your scala checkout. All paths in project files are relative to this one,
+        git clone https://github.com/scala-ide/scala-ide.git
+        cd scala-ide
+        ./build-all.sh clean install -Pscala-2.11.x -Peclipse-kepler -DskipTests
+
+  After that you have an update site in `scala-ide/org.scala-ide.sdt.update-site/target/site`, which needs to be
+installed in Eclipse.
+
+0. The second thing that needs to be done is building Scala in order to get all necessary
+dependencies. To do that simply enter
+
+        ant
+
+  and wait until it is completed. To verify that everything has been built successfully, execute the REPL that can be found
+at `scala/build/pack/bin/scala`.
+
+0. Import all projects inside of Eclipse by choosing `File/Import Existing Projects`
+and navigate to `scala/src/eclipse`. Check all projects and click ok.
+
+0. You need to define a `path variable` inside Eclipse. Define `SCALA_BASEDIR` in 
+`Preferences/General/Workspace/Linked Resources`. The value should be the absolute 
+path to your Scala checkout. All paths in the project files are relative to this one,
 so nothing will work before you do so.
-Additionally, we start using Maven dependencies (e.g. junit) so you need to define
-`classpath variable` inside Eclipse. Define `M2_REPO` in Java/Build Path/Classpath Variables
-to point to your local Maven repository (e.g. $HOME/.m2/repository).
 
-2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
+  The same `SCALA_BASEDIR` variable needs to be defined as a `classpath variable` in
+`Java/Build Path/Classpath Variables`.
+
+  Additionally, we start using Maven dependencies (e.g. `JUnit`) so you need to define another
+`classpath variable` inside Eclipse. Define `M2_REPO` in `Java/Build Path/Classpath Variables`
+to point to your local Maven repository (e.g. `$HOME/.m2/repository`).
+
+  Lastly, the JRE used by Eclipse needs to know the path to the `JLine` library, which is used by the REPL.
+To set the JAR file, navigate to `Java/Installed JREs`, select the default JRE, press `Edit/Add External JARs...`
+and enter the path to JLine whose location is `SCALA_BASEDIR/build/deps/repl/jline-2.11.jar` (`SCALA_BASEDIR` cannot be entered,
+it needs to be replaced with its absolute path).
+
+0. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
 JDK. The Scala library uses such APIs, so you'd see this error:
 
         Access restriction: The method compareAndSwapObject(Object, long, Object, Object)
         from the type Unsafe is not accessible due to restriction on required library.
-You can *fix* it by allowing calls to restricted APIs in `Java=>Compiler=>Errors/Warnings=>Deprecated and Restricted API` 
-settings.
-
-3. The IDE guesses the Scala library version by looking for `library.properties` inside 
-the library jar. The `scala-library` project does not have such a file, so you will see
-an error about incompatible libraries. You can work around it by adding a `library.properties`
-inside `src/library` with the following contents:
 
-        #Mon, 04 Jun 2012 02:08:56 +0200
-        version.number=2.10.0-20120603-141530-b34313db72
-        maven.version.number=2.10.0-SNAPSHOT
-        osgi.version.number=2.10.0.v20120603-141530-b34313db72
-        copyright.string=Copyright 2002-2012 LAMP/EPFL
+  You can *fix* it by allowing calls to restricted APIs in `Java/Compiler/Errors/Warnings/Deprecated and Restricted API` 
+settings.
 
-4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
+0. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
 from being shown as dirty in `git status`. You can still ignore them by telling Git to
 consider them unchanged:
 
         git update-index --assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
 
-If you want to go back to normal (for instance, to commit your changes to project files), run:
+  If you want to go back to normal (for instance, to commit your changes to project files), run:
 
         git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
 
+If it doesn’t compile
+=====================
+
+The likely reason is that the build path of the imported projects isn’t correct. This can happen for instance
+when the [version.properties](https://github.com/scala/scala/blob/master/versions.properties) file is updated,
+and Eclipse .classpath of the different projects isn’t updated accordingly. The fix is simple, manually inspect
+the build path of each project and make sure the version of the declared dependencies is in sync with the version
+declared in the `version.properties` file. If it isn’t, update it manually and, when done, don’t forget to share
+your changes via a pull request.
+(We are aware this is cumbersome. If you feel like scripting the process, pull requests are of course welcome.)
+
+Launching & Debugging scalac
+============================
+
+Read [here](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html#Launching_and_Debugging_scalac).
+
 DETAILS
 =======
 
-The compiler project depends on the library, reflect, asm and fjbg projects. The
+The compiler project depends on the library, reflect, and asm projects. The
 builder will take care of the correct ordering, and changes in one project will
 be picked up by the dependent projects.
 
-The output directory is set to be build/quick, so the runner scripts in quick
-work as they are (run an ant build to have them generated once)
\ No newline at end of file
+The output directory is set to be `build/quick`, so the runner scripts in quick
+work as they are (they are generated after an ant build).
diff --git a/src/eclipse/continuations-library/.classpath b/src/eclipse/continuations-library/.classpath
deleted file mode 100644
index b3ca4ee..0000000
--- a/src/eclipse/continuations-library/.classpath
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" path="library"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
-	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="output" path="build-quick-continuations-library"/>
-</classpath>
diff --git a/src/eclipse/continuations-library/.project b/src/eclipse/continuations-library/.project
deleted file mode 100644
index f3a53a3..0000000
--- a/src/eclipse/continuations-library/.project
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>continuations-library</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.scala-ide.sdt.core.scalabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.scala-ide.sdt.core.scalanature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-	<linkedResources>
-		<link>
-			<name>build-quick-continuations-library</name>
-			<type>2</type>
-			<locationURI>SCALA_BASEDIR/build/quick/classes/continuations/library</locationURI>
-		</link>
-		<link>
-			<name>library</name>
-			<type>2</type>
-			<locationURI>SCALA_BASEDIR/src/continuations/library</locationURI>
-		</link>
-	</linkedResources>
-</projectDescription>
diff --git a/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs b/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs
deleted file mode 100644
index 63e1df2..0000000
--- a/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs
+++ /dev/null
@@ -1,2 +0,0 @@
-P=continuations\:enable
-scala.compiler.useProjectSettings=true
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
deleted file mode 100644
index 3e2f55f..0000000
--- a/src/eclipse/fjbg/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" path="fjbg"/>
-	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="output" path="libs-classes-fjbg"/>
-</classpath>
diff --git a/src/eclipse/fjbg/.project b/src/eclipse/fjbg/.project
deleted file mode 100644
index 8acea9f..0000000
--- a/src/eclipse/fjbg/.project
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>fjbg</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.scala-ide.sdt.core.scalabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.scala-ide.sdt.core.scalanature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-	<linkedResources>
-		<link>
-			<name>fjbg</name>
-			<type>2</type>
-			<locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
-		</link>
-		<link>
-			<name>libs-classes-fjbg</name>
-			<type>2</type>
-			<locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
-		</link>
-	</linkedResources>
-</projectDescription>
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
new file mode 100644
index 0000000..9e773a3
--- /dev/null
+++ b/src/eclipse/interactive/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="interactive"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+	<classpathentry kind="output" path="build-quick-interactive"/>
+</classpath>
diff --git a/src/eclipse/interactive/.project b/src/eclipse/interactive/.project
new file mode 100644
index 0000000..1d30e0c
--- /dev/null
+++ b/src/eclipse/interactive/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>interactive</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.scala-ide.sdt.core.scalabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.scala-ide.sdt.core.scalanature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+	<linkedResources>
+		<link>
+			<name>build-quick-interactive</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/build/quick/classes/interactive</locationURI>
+		</link>
+		<link>
+			<name>interactive</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/src/interactive</locationURI>
+		</link>
+		<link>
+			<name>lib</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/lib</locationURI>
+		</link>
+	</linkedResources>
+</projectDescription>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 7936d4d..c2aab19 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,15 +1,14 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
-	<classpathentry kind="src" path="partest"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="lib" path="lib/ant/ant.jar"/>
-	<classpathentry kind="lib" path="lib/jline.jar"/>
-	<classpathentry kind="lib" path="lib/msil.jar"/>
+	<classpathentry kind="src" path="partest-extras"/>
 	<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
-	<classpathentry kind="output" path="build-quick-partest"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
+	<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
+        <classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry kind="output" path="build-quick-partest-extras"/>
 </classpath>
diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project
index 45c2433..5c0c851 100644
--- a/src/eclipse/partest/.project
+++ b/src/eclipse/partest/.project
@@ -1,6 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <projectDescription>
-	<name>partest</name>
+	<name>partest-extras</name>
 	<comment></comment>
 	<projects>
 	</projects>
@@ -17,9 +17,9 @@
 	</natures>
 	<linkedResources>
 		<link>
-			<name>build-quick-partest</name>
+			<name>build-quick-partest-extras</name>
 			<type>2</type>
-			<locationURI>SCALA_BASEDIR/build/quick/classes/partest</locationURI>
+			<locationURI>SCALA_BASEDIR/build/quick/classes/partest-extras</locationURI>
 		</link>
 		<link>
 			<name>lib</name>
@@ -27,9 +27,9 @@
 			<locationURI>SCALA_BASEDIR/lib</locationURI>
 		</link>
 		<link>
-			<name>partest</name>
+			<name>partest-extras</name>
 			<type>2</type>
-			<locationURI>SCALA_BASEDIR/src/partest</locationURI>
+			<locationURI>SCALA_BASEDIR/src/partest-extras</locationURI>
 		</link>
 	</linkedResources>
 </projectDescription>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
index 36e6b6a..3f14621 100644
--- a/src/eclipse/reflect/.classpath
+++ b/src/eclipse/reflect/.classpath
@@ -2,7 +2,6 @@
 <classpath>
 	<classpathentry kind="src" path="reflect"/>
 	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="output" path="build-quick-reflect"/>
 </classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
new file mode 100644
index 0000000..601a231
--- /dev/null
+++ b/src/eclipse/repl/.classpath
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="repl"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+	<classpathentry kind="var" path="M2_REPO/jline/jline/2.11/jline-2.11.jar"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.11.jar"/>
+	<classpathentry kind="output" path="build-quick-repl"/>
+</classpath>
diff --git a/src/eclipse/repl/.project b/src/eclipse/repl/.project
new file mode 100644
index 0000000..69ad08a
--- /dev/null
+++ b/src/eclipse/repl/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>repl</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.scala-ide.sdt.core.scalabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.scala-ide.sdt.core.scalanature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+	<linkedResources>
+		<link>
+			<name>build-quick-repl</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/build/quick/classes/repl</locationURI>
+		</link>
+		<link>
+			<name>lib</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/lib</locationURI>
+		</link>
+		<link>
+			<name>repl</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/src/repl</locationURI>
+		</link>
+	</linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index d438d3e..e81cacc 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -1,14 +1,10 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="compiler"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+	<classpathentry combineaccessrules="false" exported="true" kind="src" path="/asm"/>
+	<classpathentry combineaccessrules="false" exported="true" kind="src" path="/reflect"/>
+	<classpathentry combineaccessrules="false" exported="true" kind="src" path="/scala-library"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="lib" path="lib/ant/ant.jar"/>
-	<classpathentry kind="lib" path="lib/jline.jar"/>
-	<classpathentry kind="lib" path="lib/msil.jar"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
 	<classpathentry kind="output" path="build-quick-compiler"/>
 </classpath>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
index a3a4933..eff3c8e 100644
--- a/src/eclipse/scala-library/.classpath
+++ b/src/eclipse/scala-library/.classpath
@@ -2,6 +2,6 @@
 <classpath>
 	<classpathentry kind="src" path="library"/>
 	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="output" path="build-quick-lib"/>
 </classpath>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
new file mode 100644
index 0000000..c8f0e89
--- /dev/null
+++ b/src/eclipse/scaladoc/.classpath
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="scaladoc"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11.0-M7/1.0.0-RC7/scala-xml_2.11.0-M7-1.0.0-RC7.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11.0-M7/1.0.0-RC5/scala-parser-combinators_2.11.0-M7-1.0.0-RC5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+	<classpathentry kind="output" path="build-quick-scaladoc"/>
+</classpath>
diff --git a/src/eclipse/scaladoc/.project b/src/eclipse/scaladoc/.project
new file mode 100644
index 0000000..bf76490
--- /dev/null
+++ b/src/eclipse/scaladoc/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>scaladoc</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.scala-ide.sdt.core.scalabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.scala-ide.sdt.core.scalanature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+	<linkedResources>
+		<link>
+			<name>build-quick-scaladoc</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/build/quick/classes/scaladoc</locationURI>
+		</link>
+		<link>
+			<name>lib</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/lib</locationURI>
+		</link>
+		<link>
+			<name>scaladoc</name>
+			<type>2</type>
+			<locationURI>SCALA_BASEDIR/src/scaladoc</locationURI>
+		</link>
+	</linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath
index 16737bd..3b635cf 100644
--- a/src/eclipse/scalap/.classpath
+++ b/src/eclipse/scalap/.classpath
@@ -1,13 +1,9 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="scalap"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+	<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="lib" path="lib/ant/ant.jar"/>
-	<classpathentry kind="lib" path="lib/jline.jar"/>
-	<classpathentry kind="lib" path="lib/msil.jar"/>
-	<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
 	<classpathentry kind="output" path="build-quick-scalap"/>
 </classpath>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index 718f7b6..8a599bd 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -1,12 +1,12 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="test-junit"/>
+	<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+	<classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
 	<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
 	<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="lib" path="lib/ant/ant.jar"/>
-	<classpathentry kind="lib" path="lib/jline.jar"/>
 	<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
-	<classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
 	<classpathentry kind="output" path="build-test-junit"/>
 </classpath>
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
deleted file mode 100644
index 9856dc7..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Context in which FJBG executes. Used both as a factory for most
- * FJBG classes and as a repository for other factories.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class FJBGContext {
-    /** Class file major version */
-    final int MAJOR_VERSION;
-
-    /** Class file minor version */
-    final int MINOR_VERSION;
-
-    public FJBGContext() {
-        this(45, 3);
-    }
-
-    public FJBGContext(int major, int minor) {
-        MAJOR_VERSION = major;
-        MINOR_VERSION = minor;
-    }
-
-    // Factory methods
-    //////////////////////////////////////////////////////////////////////
-
-    public JClass JClass(int accessFlags,
-                         String name,
-                         String superclassName,
-                         String[] interfaceNames,
-                         String sourceFileName) {
-        return new JClass(this,
-                          accessFlags,
-                          name,
-                          superclassName,
-                          interfaceNames,
-                          sourceFileName);
-    }
-
-    public JClass JClass(DataInputStream stream)
-        throws IOException {
-        return new JClass(this, stream);
-    }
-
-    public JConstantPool JConstantPool() {
-        return new JConstantPool(this);
-    }
-
-    public JConstantPool JConstantPool(DataInputStream stream)
-        throws IOException {
-        return new JConstantPool(this, stream);
-    }
-
-    public JField JField(JClass owner,
-			 int accessFlags,
-			 String name,
-			 JType type) {
-        return new JField(this,
-                          owner,
-			  accessFlags,
-			  name,
-			  type);
-    }
-
-    public JField JField(JClass owner, DataInputStream stream)
-        throws IOException {
-        return new JField(this, owner, stream);
-    }
-
-    public JMethod JMethod(JClass owner,
-                           int accessFlags,
-                           String name,
-                           JType returnType,
-                           JType[] argTypes,
-                           String[] argNames) {
-        return new JMethod(this,
-                           owner,
-                           accessFlags,
-                           name,
-                           returnType,
-                           argTypes,
-                           argNames);
-    }
-
-    public JMethod JMethod(JClass owner,
-                           int accessFlags,
-                           String name,
-                           JMethodType type,
-                           String[] argNames) {
-        return JMethod(owner,
-                       accessFlags,
-                       name,
-                       type.getReturnType(),
-                       type.getArgumentTypes(),
-                       argNames);
-    }
-
-    public JMethod JMethod(JClass owner, DataInputStream stream)
-        throws IOException {
-        return new JMethod(this, owner, stream);
-    }
-
-    public JLocalVariable JLocalVariable(JMethod owner,
-                                         JType type,
-                                         String name,
-                                         int index) {
-        return new JLocalVariable(this, owner, type, name, index);
-    }
-
-    public JCode JCode(JClass clazz, JMethod owner) {
-        return new JExtendedCode(this, clazz, owner);
-    }
-
-    public JCode JCode(JClass clazz, JMethod owner, DataInputStream stream)
-        throws IOException {
-        return new JCode(this, clazz, owner, stream);
-    }
-
-    public JAttributeFactory JAttributeFactory() {
-        return new JAttributeFactory(this);
-    }
-
-    // Attributes
-    public JCodeAttribute JCodeAttribute(JClass clazz, JMethod owner) {
-        return new JCodeAttribute(this, clazz, owner);
-    }
-
-    public JEnclosingMethodAttribute JEnclosingMethodAttribute(JClass clazz,
-                                                               String className,
-                                                               String methodName,
-                                                               JType methodType) {
-        return new JEnclosingMethodAttribute(this, clazz, className, methodName, methodType);
-    }
-
-    public JExceptionsAttribute JExceptionsAttribute(JClass clazz,
-                                                     JMethod owner) {
-        return new JExceptionsAttribute(this, clazz, owner);
-    }
-
-    public JLineNumberTableAttribute JLineNumberTableAttribute(JClass clazz,
-                                                               JCode owner) {
-        return new JLineNumberTableAttribute(this, clazz, owner);
-    }
-
-    public JLocalVariableTableAttribute JLocalVariableTableAttribute(JClass clazz,
-                                                                     JCode owner) {
-        return new JLocalVariableTableAttribute(this, clazz, owner);
-    }
-
-    public JOtherAttribute JOtherAttribute(JClass clazz,
-                                           Object owner,
-                                           String name,
-                                           byte[] contents,
-                                           int length) {
-        return new JOtherAttribute(this, clazz, owner, name, contents, length);
-    }
-
-    public JOtherAttribute JOtherAttribute(JClass clazz,
-                                           Object owner,
-                                           String name,
-                                           byte[] contents) {
-        return JOtherAttribute(clazz, owner, name, contents, contents.length);
-    }
-
-    public JSourceFileAttribute JSourceFileAttribute(JClass clazz,
-                                                     String sourceFileName) {
-        return new JSourceFileAttribute(this, clazz, sourceFileName);
-    }
-
-    public JStackMapTableAttribute JStackMapTableAttribute(JClass clazz,
-                                                           JCode owner) {
-        return new JStackMapTableAttribute(this, clazz, owner);
-    }
-
-    /// Repository
-    //////////////////////////////////////////////////////////////////////
-
-    protected JAttributeFactory jAttributeFactory = null;
-    public JAttributeFactory getJAttributeFactory() {
-        if (jAttributeFactory == null)
-            jAttributeFactory = JAttributeFactory();
-        return jAttributeFactory;
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
deleted file mode 100644
index 01d8cc9..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of access flags for fields, methods and classes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public interface JAccessFlags {
-    public static int ACC_PUBLIC    = 0x0001;
-    public static int ACC_PRIVATE   = 0x0002;
-    public static int ACC_PROTECTED = 0x0004;
-    public static int ACC_STATIC    = 0x0008;
-    public static int ACC_FINAL     = 0x0010;
-    public static int ACC_SUPER     = 0x0020;
-    public static int ACC_VOLATILE  = 0x0040;
-    public static int ACC_TRANSIENT = 0x0080;
-    public static int ACC_NATIVE    = 0x0100;
-    public static int ACC_INTERFACE = 0x0200;
-    public static int ACC_ABSTRACT  = 0x0400;
-    public static int ACC_STRICT    = 0x0800;
-    public static int ACC_SYNTHETIC = 0x1000;
-    public static int ACC_ANNOTATION= 0x2000;
-    public static int ACC_ENUM      = 0x4000;
-
-    // 1.5 specifics
-    public static int ACC_BRIDGE    = 0x0040;
-    public static int ACC_VARARGS   = 0x0080;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
deleted file mode 100644
index 61a0452..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java arrays.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JArrayType extends JReferenceType {
-    protected final JType elementType;
-    protected String signature = null;
-
-    public JArrayType(JType elementType) {
-        this.elementType = elementType;
-    }
-
-    public int getSize() { return 1; }
-
-    public String getSignature() {
-        if (signature == null)
-            signature = "[" + elementType.getSignature();
-        return signature;
-    }
-
-    public String getDescriptor() {
-        return getSignature();
-    }
-
-    public int getTag() { return T_ARRAY; }
-
-    public JType getElementType() { return elementType; }
-
-    public String toString() {
-        return elementType.toString() + "[]";
-    }
-
-    public boolean isArrayType() { return true; }
-
-    public boolean isCompatibleWith(JType other) {
-        if (other instanceof JObjectType)
-            return (JObjectType)other == JObjectType.JAVA_LANG_OBJECT;
-        else if (other instanceof JArrayType)
-            return elementType.isCompatibleWith(((JArrayType)other).elementType);
-        else return other == JType.REFERENCE;
-    }
-
-    public static JArrayType BOOLEAN   = new JArrayType(JType.BOOLEAN);
-    public static JArrayType BYTE      = new JArrayType(JType.BYTE);
-    public static JArrayType CHAR      = new JArrayType(JType.CHAR);
-    public static JArrayType SHORT     = new JArrayType(JType.SHORT);
-    public static JArrayType INT       = new JArrayType(JType.INT);
-    public static JArrayType FLOAT     = new JArrayType(JType.FLOAT);
-    public static JArrayType LONG      = new JArrayType(JType.LONG);
-    public static JArrayType DOUBLE    = new JArrayType(JType.DOUBLE);
-    public static JArrayType REFERENCE = new JArrayType(JType.REFERENCE);
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
deleted file mode 100644
index 6a825be..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Abstract superclass for attributes which can be attached to various
- * parts of a class file.
- *
- * Attributes are used for classes (section 4.2), fields (section 4.6),
- * methods (section 4.7) and the Code attribute (section 4.8.3).
- * See sections 4.2 and later of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public abstract class JAttribute {
-    protected final int nameIdx;
-
-    static public void writeTo(List/*<JAttribute>*/ attrs, DataOutputStream stream)
-        throws IOException {
-        stream.writeShort(attrs.size());
-        Iterator attrsIt = attrs.iterator();
-        while (attrsIt.hasNext()) {
-            JAttribute attr = (JAttribute)attrsIt.next();
-            attr.writeTo(stream);
-        }
-    }
-
-    static public List/*<JAttribute>*/ readFrom(FJBGContext context,
-                                                JClass clazz,
-                                                Object owner,
-                                                DataInputStream stream)
-        throws IOException {
-        JAttributeFactory factory = context.getJAttributeFactory();
-        int count = stream.readShort();
-        ArrayList list = new ArrayList(count);
-        for (int i = 0; i < count; ++i)
-            list.add(factory.newInstance(clazz, owner, stream));
-        return list;
-    }
-
-    public JAttribute(FJBGContext context, JClass clazz) {
-        this.nameIdx = clazz.getConstantPool().addUtf8(getName());
-    }
-
-    public JAttribute(FJBGContext context, JClass clazz, String name) {
-        this.nameIdx = clazz.getConstantPool().addUtf8(name);
-    }
-
-    abstract public String getName();
-
-    /**
-     * Write the attribute to a stream.
-     */
-    public void writeTo(DataOutputStream stream) throws IOException {
-        int contentsSize = getSize();
-
-        stream.writeShort(nameIdx);
-        stream.writeInt(contentsSize);
-        int streamSizeBefore = stream.size();
-        writeContentsTo(stream);
-        int streamSizeDiff = stream.size() - streamSizeBefore;
-
-        assert contentsSize == streamSizeDiff
-            : "invalid size for attribute " + getName()
-            + " given: " + contentsSize
-            + " actual: " + streamSizeDiff;
-    }
-
-    // Note: it is not legal to add data to the constant pool during
-    // the execution of any of the following two methods.
-    protected abstract int getSize();
-    protected abstract void writeContentsTo(DataOutputStream stream)
-        throws IOException;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
deleted file mode 100644
index 33cdce2..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-
-/**
- * Extensible factory to build subclasses of JAttribute based on an
- * attribute name.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JAttributeFactory {
-    protected FJBGContext context;
-    protected HashMap/*<String, Constructor>*/ constructors = new HashMap();
-
-    protected final static Class[] CONSTRUCTOR_ARGS = new Class[] {
-        FJBGContext.class,
-        JClass.class,
-        Object.class,
-        String.class,
-        int.class,
-        DataInputStream.class
-    };
-
-    protected final static Constructor defaultDefaultConstructor;
-    static {
-        try {
-            defaultDefaultConstructor =
-                 JOtherAttribute.class.getConstructor(CONSTRUCTOR_ARGS);
-        } catch (NoSuchMethodException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    protected final Constructor defaultConstructor;
-
-    public JAttributeFactory(FJBGContext context,
-                             Constructor defaultConstructor) {
-        this.context = context;
-        this.defaultConstructor = defaultConstructor;
-        registerClass("Code", JCodeAttribute.class);
-        registerClass("ConstantValue", JConstantValueAttribute.class);
-        registerClass("EnclosingMethod", JEnclosingMethodAttribute.class);
-        registerClass("Exceptions", JExceptionsAttribute.class);
-        registerClass("InnerClasses", JInnerClassesAttribute.class);
-        registerClass("LineNumberTable", JLineNumberTableAttribute.class);
-        registerClass("LocalVariableTable", JLocalVariableTableAttribute.class);
-        registerClass("SourceFile", JSourceFileAttribute.class);
-        registerClass("StackMapTable", JStackMapTableAttribute.class);
-    }
-
-    public JAttributeFactory(FJBGContext context) {
-        this(context, defaultDefaultConstructor);
-    }
-
-    public void registerClass(String attributeName,
-                              Class clazz) {
-        if (! JAttribute.class.isAssignableFrom(clazz))
-            throw new IllegalArgumentException("Not a subclass of JAttribute: "
-                                               + clazz);
-
-        try {
-            Constructor constr = clazz.getConstructor(CONSTRUCTOR_ARGS);
-            constructors.put(attributeName, constr);
-        } catch (NoSuchMethodException e) {
-            throw new IllegalArgumentException("No appropriate constructor for "
-                                               + clazz);
-        }
-    }
-
-    public JAttribute newInstance(JClass clazz,
-                                  Object owner,
-                                  DataInputStream stream)
-        throws IOException {
-        String name = clazz.getConstantPool().lookupUtf8(stream.readShort());
-        Integer size = new Integer(stream.readInt());
-        Constructor constr = (Constructor)constructors.get(name);
-        if (constr == null) constr = defaultConstructor;
-
-        Object[] args = new Object[] { context, clazz, owner, name, size, stream };
-        try {
-            return (JAttribute)constr.newInstance(args);
-        } catch (InstantiationException e) {
-            throw new RuntimeException(e);
-        } catch (IllegalAccessException e) {
-            throw new RuntimeException(e);
-        } catch (InvocationTargetException e) {
-            throw new RuntimeException(e);
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
deleted file mode 100644
index bb1538e..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
+++ /dev/null
@@ -1,420 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.*;
-import java.io.*;
-
-/**
- * Representation of a Java class.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-public class JClass extends JMember {
-
-    /** Magic number for Java class files. */
-    public final static int MAGIC_NUMBER = 0xCAFEBABE;
-
-    protected final JAttributeFactory attributeFactory;
-
-    protected final String superclassName;
-    protected final String[] interfaceNames;
-    protected final String sourceFileName;
-    protected final JConstantPool pool;
-
-    public final static String[] NO_INTERFACES = new String[0];
-
-    protected final LinkedList/*<JMethod>*/ methods = new LinkedList();
-    protected final LinkedList/*<JField>*/ fields = new LinkedList();
-
-    protected JInnerClassesAttribute innerClasses;
-
-    protected int major;
-    protected int minor;
-
-    /**
-     * Creates a new class with its access flags, name, superclass name,
-     * interfaces names and source file name initialized to a given value.
-     * The constructor also initializes the pool and adds a sourceFileName
-     * attribute to the class.
-     * @param accessFlags the int representing the access flags of the class.
-     * @param name the string representing the name of the class.
-     * @param superclassName the string representing the name of the class'
-     * superclass.
-     * @param interfaceNames the list of strings representing the names of the
-     * interfaces implemented by the class.
-     * @param sourceFileName name of the file from which the class was compiled.
-     */
-    protected JClass(FJBGContext context,
-                     int accessFlags,
-                     String name,
-                     String superclassName,
-                     String[] interfaceNames,
-                     String sourceFileName) {
-        super(context, accessFlags, name);
-        this.attributeFactory = context.getJAttributeFactory();
-
-        this.major = context.MAJOR_VERSION;
-        this.minor = context.MINOR_VERSION;
-
-        this.superclassName = superclassName;
-        this.interfaceNames = interfaceNames;
-        this.sourceFileName = sourceFileName;
-        this.pool = context.JConstantPool();
-        if (sourceFileName != null)
-            addAttribute(context.JSourceFileAttribute(this, sourceFileName));
-    }
-
-    protected JClass(FJBGContext context, DataInputStream stream)
-        throws IOException {
-        super(context);
-        this.attributeFactory = context.getJAttributeFactory();
-
-        int magic = stream.readInt();
-        if (magic != MAGIC_NUMBER)
-            throw new IllegalArgumentException("invalid magic number: "+magic);
-
-        minor = stream.readShort();
-        major = stream.readShort();
-        pool = context.JConstantPool(stream);
-        accessFlags = stream.readShort();
-
-        // This class, super class and interfaces
-        name = pool.lookupClass(stream.readShort());
-        superclassName = pool.lookupClass(stream.readShort());
-        interfaceNames = new String[stream.readShort()];
-        for (int i = 0; i < interfaceNames.length; ++i)
-            interfaceNames[i] = pool.lookupClass(stream.readShort());
-
-        // Fields, methods and attributes
-        int fieldsCount = stream.readShort();
-        for (int i = 0; i < fieldsCount; ++i)
-            addField(context.JField(this, stream));
-
-        int methodsCount = stream.readShort();
-        for (int i = 0; i < methodsCount; ++i)
-            addMethod(context.JMethod(this, stream));
-
-        String fileName = null;
-        int attributesCount = stream.readShort();
-        for (int i = 0; i < attributesCount; ++i) {
-            JAttribute attr = attributeFactory.newInstance(this, this, stream);
-            if (attr instanceof JSourceFileAttribute)
-                fileName = ((JSourceFileAttribute)attr).getFileName();
-            else if (attr instanceof JInnerClassesAttribute)
-                innerClasses = (JInnerClassesAttribute)attr;
-            addAttribute(attr);
-        }
-        sourceFileName = fileName;
-    }
-
-    /**
-     * Gets the name of the class' superclass.
-     * @return The string representing the name of the class' superclass.
-     */
-    public String getSuperclassName() { return superclassName; }
-
-    /**
-     * Gets the names of the interfaces implemented by the class.
-     * @return The array containing the string representations of the
-     * names of the interfaces implemented by the class.
-     */
-    public String[] getInterfaceNames() { return interfaceNames; }
-
-    /**
-     * Gets the source file name of this class.
-     * @return The string representing the source file name of this class.
-     */
-    public String getSourceFileName() { return sourceFileName; }
-
-    /**
-     * Gets the type of the objects that are instances of the class.
-     * @return The type of the instances of the class.
-     */
-    public JType getType() { return new JObjectType(name); }
-
-    public JClass getJClass() { return this; }
-
-    public boolean isPublic() {
-        return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
-    }
-
-    public boolean isPrivate() {
-        return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
-    }
-
-    public boolean isProtected() {
-        return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
-    }
-
-    public boolean isStatic() {
-        return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
-    }
-
-    public boolean isFinal() {
-        return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
-    }
-
-    public boolean isAbstract() {
-        return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
-    }
-
-    /**
-     * Gets the version number of the class.
-     * @param major The int representing the major part of the version number
-     * of the class.
-     * @param minor The int representing the minor part of the version number
-     * of the class.
-     */
-    public void setVersion(int major, int minor) {
-        assert !frozen;
-        this.major = major;
-        this.minor = minor;
-    }
-
-    /**
-     * Gets the major part of the number describing the version of the class.
-     * @return The int representing the major part of the version number of
-     * the class.
-     */
-    public int getMajorVersion() { return major; }
-
-    /**
-     * Gets the minor part of the number describing the version of the class.
-     * @return The int representing the minor part of the version number of
-     * the class.
-     */
-    public int getMinorVersion() { return minor; }
-
-    /**
-     * Gets the constant pool of the class.
-     * @return The constant pool of the class.
-     */
-    public JConstantPool getConstantPool() { return pool; }
-
-    public JInnerClassesAttribute getInnerClasses() {
-    	if (innerClasses == null) {
-    		innerClasses = new JInnerClassesAttribute(context, this);
-    		addAttribute(innerClasses);
-    	}
-    	return innerClasses;
-    }
-
-    /**
-     * Decides if the class is an interface.
-     * @return The boolean representing if the class is an interface or not.
-     */
-    public boolean isInterface() {
-        return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
-    }
-
-    public void addField(JField field) {
-        assert !frozen;
-        fields.add(field);
-    }
-
-    /**
-     * Create and add a new field to the class.
-     */
-    public JField addNewField(int accessFlags, String name, JType type) {
-        assert !frozen;
-        JField f = context.JField(this, accessFlags, name, type);
-        addField(f);
-        return f;
-    }
-
-    protected void addMethod(JMethod method) {
-        assert !frozen;
-        methods.add(method);
-    }
-
-    /**
-     * Create and add a new method to the class.
-     */
-    public JMethod addNewMethod(int accessFlags,
-                                String name,
-                                JType returnType,
-                                JType[] argTypes,
-                                String[] argNames) {
-        assert !frozen;
-        JMethod m = context.JMethod(this,
-                                    accessFlags,
-                                    name,
-                                    returnType,
-                                    argTypes,
-                                    argNames);
-        addMethod(m);
-        return m;
-    }
-
-    /**
-     * Remove a previously-added method. This makes no attempt at
-     * minimising the constant pool by removing all constants which
-     * were used only by this method.
-     */
-    public void removeMethod(JMethod m) {
-        assert !frozen;
-        methods.remove(m);
-    }
-
-    public JField[] getFields() {
-        return (JField[])fields.toArray(new JField[fields.size()]);
-    }
-
-    public JMethod[] getMethods() {
-        return (JMethod[])methods.toArray(new JMethod[methods.size()]);
-    }
-
-    /**
-     * Freeze the contents of this class so that it can be written to
-     * a file.
-     */
-    public void freeze() {
-        assert !frozen;
-        frozen = true;
-    }
-
-    /**
-     * Writes the contents of the class to a file referenced by its name.
-     * @param fileName The name of the file in which the class must be written.
-     */
-    public void writeTo(String fileName) throws IOException {
-        writeTo(new File(fileName));
-    }
-
-    /**
-     * Writes the contents of the class to a file.
-     * @param file The file in which the class must be written.
-     */
-    public void writeTo(File file) throws IOException {
-        File parent = file.getParentFile();
-        if (parent != null && !parent.isDirectory())
-            if (!parent.mkdirs())
-                throw new IOException("cannot create directory " + parent);
-
-        FileOutputStream fStream = new FileOutputStream(file);
-        BufferedOutputStream bStream = new BufferedOutputStream(fStream);
-        DataOutputStream dStream = new DataOutputStream(bStream);
-        writeTo(dStream);
-        dStream.close();
-        bStream.close();
-        fStream.close();
-    }
-
-    /**
-     * Writes the contents of the class to a data stream.
-     * @param stream The data stream in which the class must be written.
-     */
-    public void writeTo(DataOutputStream stream) throws IOException {
-        if (!frozen) freeze();
-
-        int thisClassIdx = pool.addClass(name);
-        int superClassIdx = pool.addClass(superclassName);
-        int[] interfacesIdx = new int[interfaceNames.length];
-
-        for (int i = 0; i < interfaceNames.length; ++i)
-            interfacesIdx[i] = pool.addClass(interfaceNames[i]);
-
-        pool.freeze();
-
-        // Magic number.
-        stream.writeInt(MAGIC_NUMBER);
-        // Version
-        stream.writeShort(minor);
-        stream.writeShort(major);
-        // Constant pool
-        pool.writeTo(stream);
-        // Access flags
-        stream.writeShort(accessFlags);
-
-        // This class, super class and interfaces
-        stream.writeShort(thisClassIdx);
-        stream.writeShort(superClassIdx);
-        stream.writeShort(interfacesIdx.length);
-        for (int i = 0; i < interfacesIdx.length; ++i)
-            stream.writeShort(interfacesIdx[i]);
-
-        // Fields and methods
-        stream.writeShort(fields.size());
-        Iterator fieldsIt = fields.iterator();
-        while (fieldsIt.hasNext())
-            ((JField)fieldsIt.next()).writeTo(stream);
-
-        stream.writeShort(methods.size());
-        Iterator methodsIt = methods.iterator();
-        while (methodsIt.hasNext())
-            ((JMethod)methodsIt.next()).writeTo(stream);
-
-        // Attributes
-        JAttribute.writeTo(attributes, stream);
-    }
-
-    // Follows javap output format for ClassFile.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer();
-        if (sourceFileName != null) {
-            buf.append("Compiled from \"");
-            buf.append(sourceFileName);
-            buf.append("\"\n");
-        }
-        buf.append(getMemberName());
-        buf.append(toExternalName(getName()));
-        if (!isInterface()) {
-            buf.append(" extends ");
-            buf.append(toExternalName(getSuperclassName()));
-        }
-        if (interfaceNames.length > 0) {
-            if (isInterface()) buf.append(" extends ");
-            else buf.append(" implements ");
-            for (int i = 0; i < interfaceNames.length; ++i) {
-                if (i > 0) buf.append(",");
-                buf.append(toExternalName(interfaceNames[i]));
-            }
-        }
-        buf.append("\n");
-        Iterator attrsIt = attributes.iterator();
-        while (attrsIt.hasNext()) {
-            JAttribute attr = (JAttribute)attrsIt.next();
-            buf.append(attr);
-        }
-        buf.append("  minor version: ");
-        buf.append(minor);
-        buf.append("\n  major version: ");
-        buf.append(major);
-        buf.append("\n");
-        buf.append(pool);
-        buf.append("\n{\n");
-        JField[] jfields = getFields();
-        for (int i = 0; i < jfields.length; ++i) {
-            if (i > 0) buf.append("\n");
-            buf.append(jfields[i]);
-        }
-        buf.append("\n");
-        JMethod[] jmethods = getMethods();
-        for (int i = 0; i < jmethods.length; ++i) {
-            if (i > 0) buf.append("\n");
-            buf.append(jmethods[i]);
-        }
-        buf.append("\n}\n");
-        return buf.toString();
-    }
-
-    private String getMemberName() {
-        StringBuffer buf = new StringBuffer();
-        if (isPublic()) buf.append("public ");
-        else if (isProtected()) buf.append("protected ");
-        else if (isPrivate()) buf.append("private ");
-        if (isInterface())
-            buf.append("interface ");
-        else {
-            if (isAbstract()) buf.append("abstract ");
-            else if (isFinal()) buf.append("final ");
-            buf.append("class ");
-        }
-        return buf.toString();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
deleted file mode 100644
index ab6934a..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
+++ /dev/null
@@ -1,1308 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * List of instructions, to which Java byte-code instructions can be added.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCode {
-    protected boolean frozen = false;
-
-    public static int MAX_CODE_SIZE = 65535;
-
-    protected final FJBGContext context;
-    protected final JMethod owner;
-
-    protected final ByteArray codeArray;
-
-    protected final LinkedList/*<ExceptionHandler>*/ exceptionHandlers =
-        new LinkedList();
-
-    protected final JConstantPool pool;
-
-    protected final ArrayList/*<OffsetToPatch>*/ offsetToPatch =
-        new ArrayList();
-
-    protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
-    protected int maxStackSize = UNKNOWN_STACK_SIZE;
-    protected int[] stackProduction = null;
-    protected int[] stackSizes;
-
-    protected JCode(FJBGContext context, JClass clazz, JMethod owner) {
-        this.context = context;
-        this.pool = clazz.getConstantPool();
-        this.owner = owner;
-        this.codeArray = new ByteArray();
-    }
-
-    protected JCode(FJBGContext context,
-                    JClass clazz,
-                    JMethod owner,
-                    DataInputStream stream)
-        throws IOException {
-        this.context = context;
-        this.pool = clazz.getConstantPool();
-        this.owner = owner;
-        owner.setCode(this);
-        int size = stream.readInt();
-        if (size > MAX_CODE_SIZE) // section 4.10
-            throw new Error("code size must be less than " + MAX_CODE_SIZE + ": " + size);
-        this.codeArray = new ByteArray(stream, size);
-    }
-
-    /**
-     * Gets the program counter, which is defined as the address of the
-     * next instruction.
-     * @return The int representing the value of the program counter
-     */
-    public int getPC() {
-        return codeArray.getSize();
-    }
-
-    /**
-     * Gets the size of the code
-     * @return The number of bytes of the code
-     */
-    public int getSize() {
-        return codeArray.getSize();
-    }
-
-    /**
-     * Gets the method to which the code belongs
-     * @return The method to which the code belongs
-     */
-    public JMethod getOwner() {
-        return owner;
-    }
-
-    // Stack size
-    public int getMaxStackSize() {
-        if (maxStackSize == UNKNOWN_STACK_SIZE)
-            maxStackSize = computeMaxStackSize();
-        return maxStackSize;
-    }
-
-    // Freezing
-    //////////////////////////////////////////////////////////////////////
-
-    public static class CodeSizeTooBigException extends OffsetTooBigException {
-        public int codeSize;
-
-        public CodeSizeTooBigException(int size) {
-          codeSize = size;
-        }
-    }
-
-    public void freeze() throws OffsetTooBigException {
-        assert !frozen;
-
-        if (getSize() > MAX_CODE_SIZE) throw new CodeSizeTooBigException(getSize());
-
-        patchAllOffset();
-        codeArray.freeze();
-        frozen = true;
-    }
-
-    // Attributes
-    //////////////////////////////////////////////////////////////////////
-
-    protected final LinkedList/*<JAttribute>*/ attributes = new LinkedList();
-
-    public void addAttribute(JAttribute attr) {
-        attributes.add(attr);
-    }
-
-    public List/*<JAttribute>*/ getAttributes() {
-        return attributes;
-    }
-
-    // Emitting code
-    //////////////////////////////////////////////////////////////////////
-
-    public void emit(JOpcode opcode) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-    }
-
-    public void emitNOP() { emit(JOpcode.NOP); }
-
-    // Constant loading.
-    public void emitACONST_NULL() { emit(JOpcode.ACONST_NULL); }
-    public void emitICONST_M1() { emit(JOpcode.ICONST_M1); }
-    public void emitICONST_0() { emit(JOpcode.ICONST_0); }
-    public void emitICONST_1() { emit(JOpcode.ICONST_1); }
-    public void emitICONST_2() { emit(JOpcode.ICONST_2); }
-    public void emitICONST_3() { emit(JOpcode.ICONST_3); }
-    public void emitICONST_4() { emit(JOpcode.ICONST_4); }
-    public void emitICONST_5() { emit(JOpcode.ICONST_5); }
-    public void emitLCONST_0() { emit(JOpcode.LCONST_0); }
-    public void emitLCONST_1() { emit(JOpcode.LCONST_1); }
-    public void emitFCONST_0() { emit(JOpcode.FCONST_0); }
-    public void emitFCONST_1() { emit(JOpcode.FCONST_1); }
-    public void emitFCONST_2() { emit(JOpcode.FCONST_2); }
-    public void emitDCONST_0() { emit(JOpcode.DCONST_0); }
-    public void emitDCONST_1() { emit(JOpcode.DCONST_1); }
-
-    public void emitBIPUSH(int b) { emitU1(JOpcode.BIPUSH, b); }
-    public void emitSIPUSH(int s) { emitU2(JOpcode.SIPUSH, s); }
-    public void emitLDC(int value) {
-        emitU1(JOpcode.LDC, pool.addInteger(value));
-    }
-    public void emitLDC(float value) {
-        emitU1(JOpcode.LDC, pool.addFloat(value));
-    }
-    public void emitLDC(String value) {
-        emitU1(JOpcode.LDC, pool.addString(value));
-    }
-    public void emitLDC_W(int value) {
-        emitU1(JOpcode.LDC_W, pool.addInteger(value));
-    }
-    public void emitLDC_W(float value) {
-        emitU1(JOpcode.LDC_W, pool.addFloat(value));
-    }
-    public void emitLDC_W(String value) {
-        emitU1(JOpcode.LDC_W, pool.addString(value));
-    }
-    public void emitLDC2_W(long value) {
-        emitU2(JOpcode.LDC2_W, pool.addLong(value));
-    }
-    public void emitLDC2_W(double value) {
-        emitU2(JOpcode.LDC2_W, pool.addDouble(value));
-    }
-
-    // Loading variables.
-    public void emitILOAD(int index) { emitU1(JOpcode.ILOAD, index); }
-    public void emitLLOAD(int index) { emitU1(JOpcode.LLOAD, index); }
-    public void emitFLOAD(int index) { emitU1(JOpcode.FLOAD, index); }
-    public void emitDLOAD(int index) { emitU1(JOpcode.DLOAD, index); }
-    public void emitALOAD(int index) { emitU1(JOpcode.ALOAD, index); }
-
-    public void emitILOAD_0() { emit(JOpcode.ILOAD_0); }
-    public void emitILOAD_1() { emit(JOpcode.ILOAD_1); }
-    public void emitILOAD_2() { emit(JOpcode.ILOAD_2); }
-    public void emitILOAD_3() { emit(JOpcode.ILOAD_3); }
-    public void emitLLOAD_0() { emit(JOpcode.LLOAD_0); }
-    public void emitLLOAD_1() { emit(JOpcode.LLOAD_1); }
-    public void emitLLOAD_2() { emit(JOpcode.LLOAD_2); }
-    public void emitLLOAD_3() { emit(JOpcode.LLOAD_3); }
-    public void emitFLOAD_0() { emit(JOpcode.FLOAD_0); }
-    public void emitFLOAD_1() { emit(JOpcode.FLOAD_1); }
-    public void emitFLOAD_2() { emit(JOpcode.FLOAD_2); }
-    public void emitFLOAD_3() { emit(JOpcode.FLOAD_3); }
-    public void emitDLOAD_0() { emit(JOpcode.DLOAD_0); }
-    public void emitDLOAD_1() { emit(JOpcode.DLOAD_1); }
-    public void emitDLOAD_2() { emit(JOpcode.DLOAD_2); }
-    public void emitDLOAD_3() { emit(JOpcode.DLOAD_3); }
-    public void emitALOAD_0() { emit(JOpcode.ALOAD_0); }
-    public void emitALOAD_1() { emit(JOpcode.ALOAD_1); }
-    public void emitALOAD_2() { emit(JOpcode.ALOAD_2); }
-    public void emitALOAD_3() { emit(JOpcode.ALOAD_3); }
-
-    public void emitIALOAD() { emit(JOpcode.IALOAD); }
-    public void emitLALOAD() { emit(JOpcode.LALOAD); }
-    public void emitFALOAD() { emit(JOpcode.FALOAD); }
-    public void emitDALOAD() { emit(JOpcode.DALOAD); }
-    public void emitAALOAD() { emit(JOpcode.AALOAD); }
-    public void emitBALOAD() { emit(JOpcode.BALOAD); }
-    public void emitCALOAD() { emit(JOpcode.CALOAD); }
-    public void emitSALOAD() { emit(JOpcode.SALOAD); }
-
-    // Storing variables.
-    public void emitISTORE(int index) { emitU1(JOpcode.ISTORE, index); }
-    public void emitLSTORE(int index) { emitU1(JOpcode.LSTORE, index); }
-    public void emitFSTORE(int index) { emitU1(JOpcode.FSTORE, index); }
-    public void emitDSTORE(int index) { emitU1(JOpcode.DSTORE, index); }
-    public void emitASTORE(int index) { emitU1(JOpcode.ASTORE, index); }
-
-    public void emitISTORE_0() { emit(JOpcode.ISTORE_0); }
-    public void emitISTORE_1() { emit(JOpcode.ISTORE_1); }
-    public void emitISTORE_2() { emit(JOpcode.ISTORE_2); }
-    public void emitISTORE_3() { emit(JOpcode.ISTORE_3); }
-    public void emitLSTORE_0() { emit(JOpcode.LSTORE_0); }
-    public void emitLSTORE_1() { emit(JOpcode.LSTORE_1); }
-    public void emitLSTORE_2() { emit(JOpcode.LSTORE_2); }
-    public void emitLSTORE_3() { emit(JOpcode.LSTORE_3); }
-    public void emitFSTORE_0() { emit(JOpcode.FSTORE_0); }
-    public void emitFSTORE_1() { emit(JOpcode.FSTORE_1); }
-    public void emitFSTORE_2() { emit(JOpcode.FSTORE_2); }
-    public void emitFSTORE_3() { emit(JOpcode.FSTORE_3); }
-    public void emitDSTORE_0() { emit(JOpcode.DSTORE_0); }
-    public void emitDSTORE_1() { emit(JOpcode.DSTORE_1); }
-    public void emitDSTORE_2() { emit(JOpcode.DSTORE_2); }
-    public void emitDSTORE_3() { emit(JOpcode.DSTORE_3); }
-    public void emitASTORE_0() { emit(JOpcode.ASTORE_0); }
-    public void emitASTORE_1() { emit(JOpcode.ASTORE_1); }
-    public void emitASTORE_2() { emit(JOpcode.ASTORE_2); }
-    public void emitASTORE_3() { emit(JOpcode.ASTORE_3); }
-
-    public void emitIASTORE() { emit(JOpcode.IASTORE); }
-    public void emitLASTORE() { emit(JOpcode.LASTORE); }
-    public void emitFASTORE() { emit(JOpcode.FASTORE); }
-    public void emitDASTORE() { emit(JOpcode.DASTORE); }
-    public void emitAASTORE() { emit(JOpcode.AASTORE); }
-    public void emitBASTORE() { emit(JOpcode.BASTORE); }
-    public void emitCASTORE() { emit(JOpcode.CASTORE); }
-    public void emitSASTORE() { emit(JOpcode.SASTORE); }
-
-    // Stack manipulation.
-    public void emitPOP() { emit(JOpcode.POP); }
-    public void emitPOP2() { emit(JOpcode.POP2); }
-    public void emitDUP() { emit(JOpcode.DUP); }
-    public void emitDUP_X1() { emit(JOpcode.DUP_X1); }
-    public void emitDUP_X2() { emit(JOpcode.DUP_X2); }
-    public void emitDUP2() { emit(JOpcode.DUP2); }
-    public void emitDUP2_X1() { emit(JOpcode.DUP2_X1); }
-    public void emitDUP2_X2() { emit(JOpcode.DUP2_X2); }
-    public void emitSWAP() { emit(JOpcode.SWAP); }
-
-    // Artithmetic and logic operations.
-    public void emitIADD() { emit(JOpcode.IADD); }
-    public void emitLADD() { emit(JOpcode.LADD); }
-    public void emitFADD() { emit(JOpcode.FADD); }
-    public void emitDADD() { emit(JOpcode.DADD); }
-
-    public void emitISUB() { emit(JOpcode.ISUB); }
-    public void emitLSUB() { emit(JOpcode.LSUB); }
-    public void emitFSUB() { emit(JOpcode.FSUB); }
-    public void emitDSUB() { emit(JOpcode.DSUB); }
-
-    public void emitIMUL() { emit(JOpcode.IMUL); }
-    public void emitLMUL() { emit(JOpcode.LMUL); }
-    public void emitFMUL() { emit(JOpcode.FMUL); }
-    public void emitDMUL() { emit(JOpcode.DMUL); }
-
-    public void emitIDIV() { emit(JOpcode.IDIV); }
-    public void emitLDIV() { emit(JOpcode.LDIV); }
-    public void emitFDIV() { emit(JOpcode.FDIV); }
-    public void emitDDIV() { emit(JOpcode.DDIV); }
-
-    public void emitIREM() { emit(JOpcode.IREM); }
-    public void emitLREM() { emit(JOpcode.LREM); }
-    public void emitFREM() { emit(JOpcode.FREM); }
-    public void emitDREM() { emit(JOpcode.DREM); }
-
-    public void emitINEG() { emit(JOpcode.INEG); }
-    public void emitLNEG() { emit(JOpcode.LNEG); }
-    public void emitFNEG() { emit(JOpcode.FNEG); }
-    public void emitDNEG() { emit(JOpcode.DNEG); }
-
-    public void emitISHL() { emit(JOpcode.ISHL); }
-    public void emitLSHL() { emit(JOpcode.LSHL); }
-
-    public void emitISHR() { emit(JOpcode.ISHR); }
-    public void emitLSHR() { emit(JOpcode.LSHR); }
-
-    public void emitIUSHR() { emit(JOpcode.IUSHR); }
-    public void emitLUSHR() { emit(JOpcode.LUSHR); }
-
-    public void emitIAND() { emit(JOpcode.IAND); }
-    public void emitLAND() { emit(JOpcode.LAND); }
-
-    public void emitIOR() { emit(JOpcode.IOR); }
-    public void emitLOR() { emit(JOpcode.LOR); }
-
-    public void emitIXOR() { emit(JOpcode.IXOR); }
-    public void emitLXOR() { emit(JOpcode.LXOR); }
-
-    public void emitIINC(int index, int increment) {
-        emitU1U1(JOpcode.IINC, index, increment);
-    }
-
-    // (Numeric) type conversions.
-    public void emitI2L() { emit(JOpcode.I2L); }
-    public void emitI2F() { emit(JOpcode.I2F); }
-    public void emitI2D() { emit(JOpcode.I2D); }
-    public void emitL2I() { emit(JOpcode.L2I); }
-    public void emitL2F() { emit(JOpcode.L2F); }
-    public void emitL2D() { emit(JOpcode.L2D); }
-    public void emitF2I() { emit(JOpcode.F2I); }
-    public void emitF2L() { emit(JOpcode.F2L); }
-    public void emitF2D() { emit(JOpcode.F2D); }
-    public void emitD2I() { emit(JOpcode.D2I); }
-    public void emitD2L() { emit(JOpcode.D2L); }
-    public void emitD2F() { emit(JOpcode.D2F); }
-    public void emitI2B() { emit(JOpcode.I2B); }
-    public void emitI2C() { emit(JOpcode.I2C); }
-    public void emitI2S() { emit(JOpcode.I2S); }
-
-    // Comparisons and tests.
-    public void emitLCMP() { emit(JOpcode.LCMP); }
-    public void emitFCMPL() { emit(JOpcode.FCMPL); }
-    public void emitFCMPG() { emit(JOpcode.FCMPG); }
-    public void emitDCMPL() { emit(JOpcode.DCMPL); }
-    public void emitDCMPG() { emit(JOpcode.DCMPG); }
-
-    protected void emitGenericIF(JOpcode opcode, Label label)
-        throws OffsetTooBigException {
-        emitU2(opcode, label.getOffset16(getPC() + 1, getPC()));
-    }
-
-    public void emitIFEQ(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFEQ, label);
-    }
-    public void emitIFEQ(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFEQ, targetPC - getPC());
-    }
-    public void emitIFEQ() {
-        emitU2(JOpcode.IFEQ, 0);
-    }
-
-    public void emitIFNE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFNE, label);
-    }
-    public void emitIFNE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFNE, targetPC - getPC());
-    }
-    public void emitIFNE() {
-        emitU2(JOpcode.IFNE, 0);
-    }
-
-    public void emitIFLT(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFLT, label);
-    }
-    public void emitIFLT(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFLT, targetPC - getPC());
-    }
-    public void emitIFLT() {
-        emitU2(JOpcode.IFLT, 0);
-    }
-
-    public void emitIFGE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFGE, label);
-    }
-    public void emitIFGE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFGE, targetPC - getPC());
-    }
-    public void emitIFGE() {
-        emitU2(JOpcode.IFGE, 0);
-    }
-
-    public void emitIFGT(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFGT, label);
-    }
-    public void emitIFGT(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFGT, targetPC - getPC());
-    }
-    public void emitIFGT() {
-        emitU2(JOpcode.IFGT, 0);
-    }
-
-    public void emitIFLE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFLE, label);
-    }
-    public void emitIFLE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFLE, targetPC - getPC());
-    }
-    public void emitIFLE() {
-        emitU2(JOpcode.IFLE, 0);
-    }
-
-    public void emitIF_ICMPEQ(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPEQ, label);
-    }
-    public void emitIF_ICMPEQ(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPEQ, targetPC - getPC());
-    }
-    public void emitIF_ICMPEQ() {
-        emitU2(JOpcode.IF_ICMPEQ, 0);
-    }
-
-    public void emitIF_ICMPNE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPNE, label);
-    }
-    public void emitIF_ICMPNE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPNE, targetPC - getPC());
-    }
-    public void emitIF_ICMPNE() {
-        emitU2(JOpcode.IF_ICMPNE, 0);
-    }
-
-    public void emitIF_ICMPLT(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPLT, label);
-    }
-    public void emitIF_ICMPLT(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPLT, targetPC - getPC());
-    }
-    public void emitIF_ICMPLT() {
-        emitU2(JOpcode.IF_ICMPLT, 0);
-    }
-
-    public void emitIF_ICMPGE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPGE, label);
-    }
-    public void emitIF_ICMPGE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPGE, targetPC - getPC());
-    }
-    public void emitIF_ICMPGE() {
-        emitU2(JOpcode.IF_ICMPGE, 0);
-    }
-
-    public void emitIF_ICMPGT(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPGT, label);
-    }
-    public void emitIF_ICMPGT(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPGT, targetPC - getPC());
-    }
-    public void emitIF_ICMPGT() {
-        emitU2(JOpcode.IF_ICMPGT, 0);
-    }
-
-    public void emitIF_ICMPLE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ICMPLE, label);
-    }
-    public void emitIF_ICMPLE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ICMPLE, targetPC - getPC());
-    }
-    public void emitIF_ICMPLE() {
-        emitU2(JOpcode.IF_ICMPLE, 0);
-    }
-
-    public void emitIF_ACMPEQ(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ACMPEQ, label);
-    }
-    public void emitIF_ACMPEQ(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ACMPEQ, targetPC - getPC());
-    }
-    public void emitIF_ACMPEQ() {
-        emitU2(JOpcode.IF_ACMPEQ, 0);
-    }
-
-    public void emitIF_ACMPNE(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IF_ACMPNE, label);
-    }
-    public void emitIF_ACMPNE(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IF_ACMPNE, targetPC - getPC());
-    }
-    public void emitIF_ACMPNE() {
-        emitU2(JOpcode.IF_ACMPNE, 0);
-    }
-
-    public void emitIFNULL(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFNULL, label);
-    }
-    public void emitIFNULL(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFNULL, targetPC - getPC());
-    }
-    public void emitIFNULL() {
-        emitU2(JOpcode.IFNULL, 0);
-    }
-
-    public void emitIFNONNULL(Label label) throws OffsetTooBigException {
-        emitGenericIF(JOpcode.IFNONNULL, label);
-    }
-    public void emitIFNONNULL(int targetPC) throws OffsetTooBigException {
-        emitU2(JOpcode.IFNONNULL, targetPC - getPC());
-    }
-    public void emitIFNONNULL() {
-        emitU2(JOpcode.IFNONNULL, 0);
-    }
-
-    public void emitGOTO(Label label) throws OffsetTooBigException {
-        emitU2(JOpcode.GOTO, label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitGOTO(int targetPC) throws OffsetTooBigException {
-        int offset = targetPC - getPC();
-        checkOffset16(offset);
-        emitU2(JOpcode.GOTO, offset);
-    }
-    public void emitGOTO() {
-        emitU2(JOpcode.GOTO, 0);
-    }
-
-    public void emitGOTO_W(Label label) {
-        emitU4(JOpcode.GOTO_W, label.getOffset32(getPC() + 1, getPC()));
-    }
-    public void emitGOTO_W(int targetPC) {
-        emitU4(JOpcode.GOTO_W, targetPC - getPC());
-    }
-    public void emitGOTO_W() {
-        emitU4(JOpcode.GOTO_W, 0);
-    }
-
-    public void emitJSR(Label label) throws OffsetTooBigException {
-        emitU2(JOpcode.JSR, label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitJSR(int targetPC) {
-        emitU2(JOpcode.JSR, targetPC - getPC());
-    }
-    public void emitJSR() {
-        emitU2(JOpcode.JSR, 0);
-    }
-
-    public void emitJSR_W(Label label) {
-        emitU4(JOpcode.JSR_W, label.getOffset32(getPC() + 1, getPC()));
-    }
-    public void emitJSR_W(int targetPC) {
-        emitU4(JOpcode.JSR_W, targetPC - getPC());
-    }
-    public void emitJSR_W() {
-        emitU4(JOpcode.JSR_W, 0);
-    }
-
-    /*
-    public void emitRET(Label label) throws OffsetTooBigException {
-        emitU2(JOpcode.RET, label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitRET(int targetPC) {
-        emitU1(JOpcode.RET, targetPC);
-    }
-    public void emitRET() {
-        emitU1(JOpcode.RET, 0);
-    }
-    */
-
-    public void emitRET(int index) {
-        emitU1(JOpcode.RET, index);
-    }
-
-    public void emitRET(JLocalVariable var) {
-        emitRET(var.getIndex());
-    }
-
-    public void emitTABLESWITCH(int[] keys,
-                     Label[] branches,
-                     Label defaultBranch) {
-        assert keys.length == branches.length;
-
-        int low = keys[0], high = keys[keys.length - 1];
-        int instrPC = getPC();
-
-        setStackProduction(instrPC, JOpcode.TABLESWITCH);
-        codeArray.addU1(JOpcode.cTABLESWITCH);
-        while (getPC() % 4 != 0) codeArray.addU1(0);
-
-        codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
-        codeArray.addU4(low);
-        codeArray.addU4(high);
-        for (int i = 0; i < branches.length; i++) {
-            assert keys[i] == low + i;
-            codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
-        }
-    }
-
-    public void emitLOOKUPSWITCH(int[] keys,
-                     Label[] branches,
-                     Label defaultBranch) {
-        assert keys.length == branches.length;
-
-        int instrPC = getPC();
-        setStackProduction(getPC(), JOpcode.LOOKUPSWITCH);
-        codeArray.addU1(JOpcode.cLOOKUPSWITCH);
-        while (getPC() % 4 != 0) codeArray.addU1(0);
-
-        codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
-        codeArray.addU4(branches.length);
-        for (int i = 0; i < branches.length; i++) {
-            codeArray.addU4(keys[i]);
-            codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
-        }
-    }
-
-    public void emitIRETURN() { emit(JOpcode.IRETURN); }
-    public void emitLRETURN() { emit(JOpcode.LRETURN); }
-    public void emitFRETURN() { emit(JOpcode.FRETURN); }
-    public void emitDRETURN() { emit(JOpcode.DRETURN); }
-    public void emitARETURN() { emit(JOpcode.ARETURN); }
-    public void emitRETURN() { emit(JOpcode.RETURN); }
-
-    // Field access
-    public void emitGETSTATIC(String className, String name, JType type) {
-        setStackProduction(getPC(), type.getSize());
-        int index = pool.addFieldRef(className, name, type.getSignature());
-        emitU2(JOpcode.GETSTATIC, index);
-    }
-    public void emitPUTSTATIC(String className, String name, JType type) {
-        setStackProduction(getPC(), -type.getSize());
-        int index = pool.addFieldRef(className, name, type.getSignature());
-        emitU2(JOpcode.PUTSTATIC, index);
-    }
-    public void emitGETFIELD(String className, String name, JType type) {
-        setStackProduction(getPC(), type.getSize() - 1);
-        int index = pool.addFieldRef(className, name, type.getSignature());
-        emitU2(JOpcode.GETFIELD, index);
-    }
-    public void emitPUTFIELD(String className, String name, JType type) {
-        setStackProduction(getPC(), -(type.getSize() + 1));
-        int index = pool.addFieldRef(className, name, type.getSignature());
-        emitU2(JOpcode.PUTFIELD, index);
-    }
-
-    // Method invocation
-    public void emitINVOKEVIRTUAL(String className,
-                                  String name,
-                                  JMethodType type) {
-        setStackProduction(getPC(), type.getProducedStack() - 1);
-        int index =
-            pool.addClassMethodRef(className, name, type.getSignature());
-        emitU2(JOpcode.INVOKEVIRTUAL, index);
-    }
-    public void emitINVOKESPECIAL(String className,
-                                  String name,
-                                  JMethodType type) {
-        setStackProduction(getPC(), type.getProducedStack() - 1);
-        int index =
-            pool.addClassMethodRef(className, name, type.getSignature());
-        emitU2(JOpcode.INVOKESPECIAL, index);
-    }
-    public void emitINVOKESTATIC(String className,
-                                 String name,
-                                 JMethodType type) {
-        setStackProduction(getPC(), type.getProducedStack());
-        int index =
-            pool.addClassMethodRef(className, name, type.getSignature());
-        emitU2(JOpcode.INVOKESTATIC, index);
-    }
-    public void emitINVOKEINTERFACE(String className,
-                                    String name,
-                                    JMethodType type) {
-        setStackProduction(getPC(), type.getProducedStack() - 1);
-        int index =
-            pool.addInterfaceMethodRef(className, name, type.getSignature());
-        emitU2U1U1(JOpcode.INVOKEINTERFACE, index, type.getArgsSize() + 1, 0);
-    }
-
-    // Object creation
-    public void emitNEW(String className) {
-        emitU2(JOpcode.NEW, pool.addClass(className));
-    }
-    public void emitNEWARRAY(JType elemType) {
-        emitU1(JOpcode.NEWARRAY, elemType.getTag());
-    }
-    public void emitANEWARRAY(JReferenceType elemType) {
-        emitU2(JOpcode.ANEWARRAY, pool.addDescriptor(elemType));
-    }
-    public void emitMULTIANEWARRAY(JReferenceType elemType, int dimensions) {
-        setStackProduction(getPC(), -dimensions + 1);
-        emitU2U1(JOpcode.MULTIANEWARRAY,
-                 pool.addDescriptor(elemType),
-                 dimensions);
-    }
-    public void emitARRAYLENGTH() { emit(JOpcode.ARRAYLENGTH); }
-
-    // Exception throwing
-    public void emitATHROW() { emit(JOpcode.ATHROW); }
-
-    // Dynamic typing
-    public void emitCHECKCAST(JReferenceType type) {
-        emitU2(JOpcode.CHECKCAST, pool.addDescriptor(type));
-    }
-    public void emitINSTANCEOF(JReferenceType type) {
-        emitU2(JOpcode.INSTANCEOF, pool.addDescriptor(type));
-    }
-
-    // Monitors
-    public void emitMONITORENTER() { emit(JOpcode.MONITORENTER); }
-    public void emitMONITOREXIT() { emit(JOpcode.MONITOREXIT); }
-
-    // Wide variants
-    // FIXME setStackProd. will here raise an exception
-    public void emitWIDE(JOpcode opcode, int index) {
-        assert (opcode.code == JOpcode.cILOAD)
-            || (opcode.code == JOpcode.cLLOAD)
-            || (opcode.code == JOpcode.cFLOAD)
-            || (opcode.code == JOpcode.cDLOAD)
-            || (opcode.code == JOpcode.cALOAD)
-            || (opcode.code == JOpcode.cISTORE)
-            || (opcode.code == JOpcode.cLSTORE)
-            || (opcode.code == JOpcode.cFSTORE)
-            || (opcode.code == JOpcode.cDSTORE)
-            || (opcode.code == JOpcode.cASTORE)
-            || (opcode.code == JOpcode.cRET)
-            : "invalide opcode for WIDE: " + opcode;
-
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(JOpcode.WIDE.code);
-        codeArray.addU1(opcode.code);
-        codeArray.addU2(index);
-    }
-    public void emitWIDE(JOpcode opcode, int index, int constant) {
-        assert opcode.code == JOpcode.cIINC
-            : "invalid opcode for WIDE: " + opcode;
-
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(JOpcode.cWIDE);
-        codeArray.addU1(opcode.code);
-        codeArray.addU2(index);
-        codeArray.addU2(constant);
-    }
-
-    protected void emitU1(JOpcode opcode, int i1) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU1(i1);
-    }
-
-    protected void emitU1U1(JOpcode opcode, int i1, int i2) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU1(i1);
-        codeArray.addU1(i2);
-    }
-
-    protected void emitU2(JOpcode opcode, int i1) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU2(i1);
-    }
-
-    protected void emitU2U1(JOpcode opcode, int i1, int i2) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU2(i1);
-        codeArray.addU1(i2);
-    }
-
-    protected void emitU2U1U1(JOpcode opcode, int i1, int i2, int i3) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU2(i1);
-        codeArray.addU1(i2);
-        codeArray.addU1(i3);
-    }
-
-    protected void emitU4(JOpcode opcode, int i1) {
-        setStackProduction(getPC(), opcode);
-        codeArray.addU1(opcode.code);
-        codeArray.addU4(i1);
-    }
-
-    protected int getU1(int sourcePos) {
-        return codeArray.getU1(sourcePos);
-    }
-
-    protected int getU2(int sourcePos) {
-        return codeArray.getU2(sourcePos);
-    }
-
-    protected int getU4(int sourcePos) {
-        return codeArray.getU4(sourcePos);
-    }
-
-    protected int getS1(int sourcePos) {
-        return codeArray.getS1(sourcePos);
-    }
-
-    protected int getS2(int sourcePos) {
-        return codeArray.getS2(sourcePos);
-    }
-
-    protected int getS4(int sourcePos) {
-        return codeArray.getS4(sourcePos);
-    }
-
-    // Stack size computation
-    //////////////////////////////////////////////////////////////////////
-
-    protected int getStackProduction(int pc) {
-        if (stackProduction == null || pc >= stackProduction.length)
-            return UNKNOWN_STACK_SIZE;
-        else
-            return stackProduction[pc];
-    }
-
-    protected void setStackProduction(int pc, int production) {
-        if (stackProduction == null) {
-            stackProduction = new int[256];
-            Arrays.fill(stackProduction, UNKNOWN_STACK_SIZE);
-        } else {
-            while (pc >= stackProduction.length) {
-                    int[] newStackProduction = new int[stackProduction.length * 2];
-                    System.arraycopy(stackProduction, 0,
-                                     newStackProduction, 0,
-                                     stackProduction.length);
-                    Arrays.fill(newStackProduction,
-                                stackProduction.length,
-                                newStackProduction.length,
-                                UNKNOWN_STACK_SIZE);
-                    stackProduction = newStackProduction;
-                }
-        }
-        stackProduction[pc] = production;
-    }
-
-    protected void setStackProduction(int pc, JOpcode opcode) {
-        // TODO we should instead check whether the opcode has known
-        // stack consumption/production.
-        if (getStackProduction(pc) == UNKNOWN_STACK_SIZE)
-//                && opcode.hasKnownProducedDataSize()
-//                && opcode.hasKnownConsumedDataSize())
-            setStackProduction(pc,
-                               opcode.getProducedDataSize()
-                               - opcode.getConsumedDataSize());
-    }
-
-    protected int computeMaxStackSize() {
-        if (stackSizes == null) {
-            stackSizes = new int[getSize()];
-            Arrays.fill(stackSizes, UNKNOWN_STACK_SIZE);
-            stackSizes[0] = 0;
-        }
-        int size = computeMaxStackSize(0, 0, 0);
-
-        // compute stack sizes for exception handlers too
-        ExceptionHandler exh = null;
-        for (Iterator it = exceptionHandlers.iterator();
-             it.hasNext();) {
-            exh = (ExceptionHandler)it.next();
-            int exhSize = computeMaxStackSize(exh.getHandlerPC(), 1, 1);
-            if (size < exhSize)
-                size = exhSize;
-        }
-
-        return size;
-    }
-
-    protected int computeMaxStackSize(int pc, int stackSize, int maxStackSize) {
-        JCodeIterator iterator = new JCodeIterator(this, pc);
-        for (;;) {
-            int successors = iterator.getSuccessorCount();
-            if (successors == 0)
-                return maxStackSize;
-            else {
-                assert stackProduction[iterator.getPC()] != UNKNOWN_STACK_SIZE
-                    : "unknown stack production, pc=" + iterator.getPC()
-                    + " in method " + owner.getName();
-                stackSize += stackProduction[iterator.getPC()];
-                if (stackSize > maxStackSize)
-                    maxStackSize = stackSize;
-                int nextPC = -1;
-                for (int i = 0; i < successors; ++i) {
-                    int succPC = iterator.getSuccessorPC(i);
-                    assert succPC >= 0 && succPC < stackSizes.length
-                        : iterator.getPC() + ": invalid pc: " + succPC
-                        + " op: " + iterator.getOpcode();
-                    if (stackSizes[succPC] == UNKNOWN_STACK_SIZE) {
-                        stackSizes[succPC] = stackSize;
-                        if (nextPC == -1)
-                            nextPC = succPC;
-                        else
-                            maxStackSize = computeMaxStackSize(succPC,
-                                                               stackSize,
-                                                               maxStackSize);
-                    }
-                }
-                if (nextPC == -1)
-                    return maxStackSize;
-                else
-                    iterator.moveTo(nextPC);
-            }
-        }
-    }
-
-    // Labels
-    //////////////////////////////////////////////////////////////////////
-
-    public static class OffsetTooBigException extends Exception {
-        public OffsetTooBigException() { super(); }
-        public OffsetTooBigException(String message) { super(message); }
-    }
-
-    protected void checkOffset16(int offset) throws OffsetTooBigException {
-        if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
-            throw new OffsetTooBigException("offset too big to fit"
-                                            + " in 16 bits: " + offset);
-    }
-
-    public class Label {
-        protected boolean anchored = false;
-        protected int targetPC = 0;
-
-        public void anchorToNext() {
-            assert !anchored;
-            this.targetPC = getPC();
-            anchored = true;
-        }
-
-        public int getAnchor() {
-            assert anchored;
-            return targetPC;
-        }
-
-        protected int getOffset16(int pc, int instrPC)
-            throws OffsetTooBigException {
-            if (anchored) {
-                int offset = targetPC - instrPC;
-                checkOffset16(offset);
-                return offset;
-            } else {
-                recordOffsetToPatch(pc, 16, instrPC, this);
-                return 0;
-            }
-        }
-
-        protected int getOffset32(int pc, int instrPC) {
-            if (anchored)
-                return targetPC - instrPC;
-            else {
-                recordOffsetToPatch(pc, 32, instrPC, this);
-                return 0;
-            }
-        }
-    }
-
-    public Label newLabel() {
-        return new Label();
-    }
-
-    public Label[] newLabels(int count) {
-        Label[] labels = new Label[count];
-        for (int i = 0; i < labels.length; ++i)
-            labels[i] = newLabel();
-        return labels;
-    }
-
-    protected static class OffsetToPatch {
-        public final int pc;
-        public final int size;
-        public final int instrPC;
-        public final Label label;
-
-        public OffsetToPatch(int pc, int size, int instrPC, Label label) {
-            this.pc = pc;
-            this.size = size;
-            this.instrPC = instrPC;
-            this.label = label;
-        }
-    }
-
-    protected void recordOffsetToPatch(int offsetPC,
-                                       int size,
-                                       int instrPC,
-                                       Label label) {
-        offsetToPatch.add(new OffsetToPatch(offsetPC, size, instrPC, label));
-    }
-
-    protected void patchAllOffset() throws OffsetTooBigException {
-        Iterator offsetIt = offsetToPatch.iterator();
-        while (offsetIt.hasNext()) {
-            OffsetToPatch offset = (OffsetToPatch)offsetIt.next();
-            int offsetValue = offset.label.getAnchor() - offset.instrPC;
-            if (offset.size == 16) {
-                checkOffset16(offsetValue);
-                codeArray.putU2(offset.pc, offsetValue);
-            } else
-                codeArray.putU4(offset.pc, offsetValue);
-        }
-    }
-
-    // Exception handling
-    //////////////////////////////////////////////////////////////////////
-
-    public class ExceptionHandler {
-        protected int startPC, endPC, handlerPC;
-        protected final String catchType;
-        protected final int catchTypeIndex;
-
-        public void setStartPC(int pc) {
-            this.startPC = pc;
-        }
-
-        public int getStartPC() {
-            return this.startPC;
-        }
-
-        public void setEndPC(int pc) {
-            this.endPC = pc;
-        }
-
-        public int getEndPC() {
-            return this.endPC;
-        }
-
-        public void setHandlerPC(int pc) {
-            this.handlerPC = pc;
-        }
-
-        public int getHandlerPC() {
-            return this.handlerPC;
-        }
-
-        public ExceptionHandler(String catchType) {
-            this(0, 0, 0, catchType);
-        }
-
-        public ExceptionHandler(int startPC,
-                                   int endPC,
-                                   int handlerPC,
-                                   String catchType) {
-            this.startPC = startPC;
-            this.endPC = endPC;
-            this.handlerPC = handlerPC;
-            this.catchType = catchType;
-            this.catchTypeIndex = (catchType == null
-                                   ? 0
-                                   : pool.addClass(catchType));
-        }
-
-        public ExceptionHandler(DataInputStream stream) throws IOException {
-            this.startPC = stream.readShort();
-            this.endPC = stream.readShort();
-            this.handlerPC = stream.readShort();
-            this.catchTypeIndex = stream.readShort();
-            this.catchType = (catchTypeIndex == 0
-                              ? null
-                              : pool.lookupClass(catchTypeIndex));
-        }
-
-        public void writeTo(DataOutputStream stream) throws IOException {
-            stream.writeShort(startPC);
-            stream.writeShort(endPC);
-            stream.writeShort(handlerPC);
-            stream.writeShort(catchTypeIndex);
-        }
-
-        // Follows javap output format for exception handlers.
-        /*@Override*/public String toString() {
-            StringBuffer buf = new StringBuffer("    ");
-            if (startPC < 10) buf.append(" ");
-            buf.append(startPC);
-            buf.append("    ");
-            if (endPC < 10) buf.append(" ");
-            buf.append(endPC);
-            buf.append("    ");
-            buf.append(handlerPC);
-            buf.append("   ");
-            if (catchType != null) {
-                buf.append("Class ");
-                buf.append(catchType);
-            }
-            else
-                buf.append("any");
-            return buf.toString();
-        }
-
-    }
-
-    public void addExceptionHandler(ExceptionHandler handler) {
-        assert !frozen;
-        exceptionHandlers.add(handler);
-    }
-
-    public void addExceptionHandler(int startPC,
-                                    int endPC,
-                                    int handlerPC,
-                                    String catchType) {
-        addExceptionHandler(new ExceptionHandler(startPC,
-                                                 endPC,
-                                                 handlerPC,
-                                                 catchType));
-    }
-
-    public void addFinallyHandler(int startPC, int endPC, int handlerPC) {
-        assert !frozen;
-        addExceptionHandler(startPC, endPC, handlerPC, null);
-    }
-
-    public List/*<ExceptionHandler>*/ getExceptionHandlers() {
-        return exceptionHandlers;
-    }
-
-    // Line numbers
-    //////////////////////////////////////////////////////////////////////
-
-    protected int[] lineNumbers = null;
-    protected void ensureLineNumberCapacity(int endPC) {
-        assert !frozen;
-        if (lineNumbers == null) {
-            lineNumbers = new int[endPC];
-            addAttribute(context.JLineNumberTableAttribute(owner.getOwner(),
-                                                           this));
-        } else if (lineNumbers.length < endPC) {
-            int[] newLN = new int[Math.max(endPC, lineNumbers.length * 2)];
-            System.arraycopy(lineNumbers, 0, newLN, 0, lineNumbers.length);
-            lineNumbers = newLN;
-        }
-    }
-
-    /**
-     * Set all line numbers in the interval [startPC, endPC) to
-     * line, overwriting existing line numbers.
-     */
-    public void setLineNumber(int startPC, int endPC, int line) {
-        ensureLineNumberCapacity(endPC);
-        Arrays.fill(lineNumbers, startPC, endPC, line);
-    }
-
-    public void setLineNumber(int instrPC, int line) {
-        setLineNumber(instrPC, instrPC + 1, line);
-    }
-
-    /** Sets all non-filled line numbers in the interval [startPC, endPC)
-     *  to 'line'.
-     */
-    public void completeLineNumber(int startPC, int endPC, int line) {
-        ensureLineNumberCapacity(endPC);
-        for (int pc = startPC; pc < endPC; ++pc)
-            if (lineNumbers[pc] == 0) lineNumbers[pc] = line;
-    }
-
-    public int[] getLineNumbers() {
-        assert frozen;
-        if (lineNumbers == null) return new int[0];
-        else if (lineNumbers.length == getPC()) return lineNumbers;
-        else {
-            int[] trimmedLN = new int[getPC()];
-            System.arraycopy(lineNumbers, 0,
-                             trimmedLN, 0,
-                             Math.min(lineNumbers.length, trimmedLN.length));
-            return trimmedLN;
-        }
-    }
-
-    // Output
-    //////////////////////////////////////////////////////////////////////
-
-    public void writeTo(DataOutputStream stream) throws IOException {
-        assert frozen;
-        stream.writeInt(getSize());
-        codeArray.writeTo(stream);
-    }
-
-    // Follows javap output format for opcodes.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer();
-        JOpcode opcode = null;
-        int pc = 0, addr = 0;
-        while (pc < codeArray.getSize()) {
-            buf.append("\n   ");
-            buf.append(pc);
-            buf.append(":\t");
-            opcode = JOpcode.OPCODES[codeArray.getU1(pc)];
-            buf.append(decode(opcode, pc));
-            if (opcode.code == JOpcode.cTABLESWITCH ||
-                opcode.code == JOpcode.cLOOKUPSWITCH) {
-                addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
-                int low = codeArray.getU4(addr);
-                int high = codeArray.getU4(addr+4);
-                pc = addr + (2/*low+high*/ + (high - low + 1)/*targets*/) * 4;
-            } else
-                pc += opcode.getSize();
-        }
-        if (exceptionHandlers.size() > 0) {
-            buf.append("\n  Exception table:\n   from   to  target type\n");
-            Iterator it = exceptionHandlers.iterator();
-            while (it.hasNext()) {
-                ExceptionHandler exh = (ExceptionHandler)it.next();
-                buf.append(exh);
-                buf.append("\n");
-            }
-        }
-        return buf.toString();
-    }
-
-    private String decode(JOpcode opcode, int pc) {
-        String ownerClassName = owner.getOwner().getName();
-        int data, data2;
-        StringBuilder buf = new StringBuilder();
-        buf.append(opcode.name.toLowerCase());
-        switch (opcode.code) {
-        case JOpcode.cALOAD: case JOpcode.cASTORE: case JOpcode.cBIPUSH:
-        case JOpcode.cDLOAD: case JOpcode.cDSTORE:
-        case JOpcode.cFLOAD: case JOpcode.cFSTORE:
-        case JOpcode.cILOAD: case JOpcode.cISTORE:
-        case JOpcode.cLLOAD: case JOpcode.cLSTORE:
-            data = codeArray.getU1(pc+1);
-            buf.append("\t");
-            buf.append(data);
-            break;
-        case JOpcode.cLDC:
-            data = codeArray.getU1(pc+1);
-            buf.append("\t#");
-            buf.append(data);
-            buf.append("; ");
-            buf.append(pool.lookupEntry(data).toComment(ownerClassName));
-            break;
-        case JOpcode.cNEWARRAY:
-            data = codeArray.getU1(pc+1);
-            buf.append(" ");
-            buf.append(JType.tagToString(data));
-            break;
-        case JOpcode.cIINC:
-            data = codeArray.getU1(pc+1);
-            data2 = codeArray.getU1(pc+2);
-            buf.append("\t");
-            buf.append(data);
-            buf.append(", ");
-            buf.append(data2);
-            break;
-        case JOpcode.cSIPUSH:
-            data = codeArray.getU2(pc+1);
-            buf.append("\t");
-            buf.append(data);
-            break;
-        case JOpcode.cANEWARRAY: case JOpcode.cCHECKCAST:
-        case JOpcode.cGETFIELD: case JOpcode.cGETSTATIC:
-        case JOpcode.cINSTANCEOF:
-        case JOpcode.cINVOKESPECIAL: case JOpcode.cINVOKESTATIC:
-        case JOpcode.cINVOKEVIRTUAL:
-        case JOpcode.cLDC_W: case JOpcode.cLDC2_W: case JOpcode.cNEW:
-        case JOpcode.cPUTFIELD: case JOpcode.cPUTSTATIC:
-            data = codeArray.getU2(pc+1);
-            buf.append("\t#");
-            buf.append(data);
-            buf.append("; ");
-            buf.append(pool.lookupEntry(data).toComment(ownerClassName));
-            break;
-        case JOpcode.cIF_ACMPEQ: case JOpcode.cIF_ACMPNE:
-        case JOpcode.cIFEQ: case JOpcode.cIFGE: case JOpcode.cIFGT:
-        case JOpcode.cIFLE: case JOpcode.cIFLT: case JOpcode.cIFNE:
-        case JOpcode.cIFNONNULL: case JOpcode.cIFNULL:
-        case JOpcode.cIF_ICMPEQ: case JOpcode.cIF_ICMPGE:
-        case JOpcode.cIF_ICMPGT: case JOpcode.cIF_ICMPLE:
-        case JOpcode.cIF_ICMPLT: case JOpcode.cIF_ICMPNE:
-            data = codeArray.getU2(pc+1); // maybe S2 offset
-            buf.append("\t");
-            buf.append(pc+data);
-            break;
-        case JOpcode.cGOTO:
-            data = codeArray.getS2(pc+1); // always S2 offset
-            buf.append("\t");
-            buf.append(pc+data);
-            break;
-        case JOpcode.cINVOKEINTERFACE:
-            data = codeArray.getU2(pc+1);
-            data2 = codeArray.getU1(pc+3);
-            buf.append("\t#");
-            buf.append(data);
-            buf.append(",  ");
-            buf.append(data2);
-            buf.append("; ");
-            buf.append(pool.lookupEntry(data).toComment(ownerClassName));
-            break;
-        case JOpcode.cTABLESWITCH:
-            buf.append("{ //");
-            int addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
-            int low = codeArray.getU4(addr);
-            int high = codeArray.getU4(addr+4);
-            buf.append(low);
-            buf.append(" to ");
-            buf.append(high);
-            for (int i = low; i <= high; ++i) {
-                buf.append("\n\t\t");
-                buf.append(i);
-                buf.append(": ");
-                buf.append(pc+codeArray.getU4(addr+(i-1)*4));
-                buf.append(";");
-            }
-            buf.append("\n\t\tdefault: ");
-            buf.append(pc+codeArray.getU4(addr-4));
-            buf.append(" }");
-        default:
-        }
-        return buf.toString();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
deleted file mode 100644
index 9f3fcf8..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Code attribute, containing code of methods.
- *
- * A Code attribute contains the JVM instructions and auxiliary information
- * for a single method, instance initialization method, or class or interface
- * initialization method. See section 4.8.3 of the JVM specification.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-
-public class JCodeAttribute extends JAttribute {
-    protected final JCode code;
-    protected final JMethod owner;
-    protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
-    protected final int maxStackSize;
-    protected final int maxLocals;
-
-    public JCodeAttribute(FJBGContext context, JClass clazz, JMethod owner) {
-        super(context, clazz);
-        this.owner = owner;
-
-        this.maxStackSize = UNKNOWN_STACK_SIZE;
-        this.maxLocals = 0; // unknown
-        this.code = owner.getCode();
-
-        assert clazz == owner.getOwner();
-    }
-
-    public JCodeAttribute(FJBGContext context,
-                          JClass clazz,
-                          Object owner,
-                          String name,
-                          int size,
-                          DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.owner = (JMethod)owner;
-
-        this.maxStackSize = stream.readShort();
-        this.maxLocals = stream.readShort();
-        this.code = context.JCode(clazz, (JMethod)owner, stream);
-
-        int handlersCount = stream.readShort();
-        for (int i = 0; i < handlersCount; ++i)
-            code.addExceptionHandler(code.new ExceptionHandler(stream));
-        List/*<JAttribute>*/ attributes =
-            JAttribute.readFrom(context, clazz, code, stream);
-        Iterator attrIt = attributes.iterator();
-        while (attrIt.hasNext())
-            code.addAttribute((JAttribute)attrIt.next());
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "Code"; }
-
-    // Follows javap output format for Code attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  Code:");
-        buf.append("\n   Stack=");
-        buf.append(maxStackSize);
-        buf.append(", Locals=");
-        buf.append(maxLocals);
-        buf.append(", Args_size=");
-        buf.append(owner.getArgsSize());
-        buf.append(code);
-        buf.append("\n");
-        Iterator it = code.getAttributes().iterator();
-        while (it.hasNext()) {
-            JAttribute attr = (JAttribute)it.next();
-            buf.append(attr);
-            buf.append("\n");
-        }
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        int handlersNum = code.getExceptionHandlers().size();
-
-        int attrsSize = 0;
-        Iterator attrsIt = code.getAttributes().iterator();
-        while (attrsIt.hasNext()) {
-            JAttribute attr = (JAttribute)attrsIt.next();
-            attrsSize += attr.getSize() + 6;
-        }
-
-        return 2                // max stack
-            + 2                 // max locals
-            + 4                 // code size
-            + code.getSize()    // code
-            + 2                 // exception table size
-            + 8 * handlersNum   // exception table
-            + 2                 // attributes count
-            + attrsSize;        // attributes
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        List/*<ExceptionHandler>*/ handlers = code.getExceptionHandlers();
-
-        stream.writeShort(code.getMaxStackSize());
-        stream.writeShort(owner.getMaxLocals());
-
-        code.writeTo(stream);
-
-        stream.writeShort(handlers.size());
-        Iterator handlerIt = handlers.iterator();
-        while (handlerIt.hasNext())
-            ((JCode.ExceptionHandler)handlerIt.next()).writeTo(stream);
-        JAttribute.writeTo(code.getAttributes(), stream);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
deleted file mode 100644
index d09dfd1..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * Iterator used to examine the contents of an instruction list.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCodeIterator {
-    protected final JCode code;
-    protected final JConstantPool pool;
-    protected final ByteArray codeArray;
-
-    protected int pc;
-    protected JOpcode opcode;
-
-    /**
-     * Creates a new code iterator with its instruction list
-     * and its pc initialized to a given value.
-     */
-    public JCodeIterator(JCode code, int pc) {
-        this.code = code;
-        this.pool = code.getOwner().getOwner().getConstantPool();
-        this.codeArray = code.codeArray;
-        this.pc = pc;
-        setOpcode();
-    }
-
-    public JCodeIterator(JCode code) {
-        this(code, 0);
-    }
-
-    /**
-     * Get the current program counter.
-     * @return The current program counter.
-     */
-    public int getPC() { return pc; }
-
-    /**
-     * Searches the type of the instruction positionned at the
-     * current address and updates the current instruction.
-     */
-    protected void setOpcode() {
-        // TODO : check if the current pc is the beginning
-        //        of an instruction
-        opcode = isValid() ? JOpcode.OPCODES[codeArray.getU1(pc)] : null;
-    }
-
-    /**
-     * Returns the opcode of the current instruction.
-     * @return The opcode of the current instruction.
-     */
-    public JOpcode getOpcode() {
-        return opcode;
-    }
-
-    /**
-     * Updates the program counter to an given value.
-     * @param pc The new value of the program counter.
-     */
-    public void moveTo(int pc) {
-        this.pc = pc;
-        setOpcode();
-    }
-
-    /**
-     * Check the validity of the iterator.
-     * @return true iff the iterator points to a valid address.
-     */
-    public boolean isValid() {
-        return pc < codeArray.getSize();
-    }
-
-    /**
-     * Updates the current instruction with the next one in the
-     * sense of their position in the code.
-     */
-    public void moveToNext() {
-        moveTo(pc + getInstructionSize());
-    }
-
-    /**
-     * Updates the current instruction with a specific successor
-     * of it.
-     * @param succ The index of the wanted successor in the list of
-     * the successors of the current instruction.
-     */
-    public void moveToSuccessor(int succ) {
-        moveTo(getSuccessorPC(succ));
-    }
-
-    /**
-     * Updates the current instruction with the one positionned
-     * at a given index relatively to the actual program counter
-     * @param offset The relative position of the instruction
-     * compared with the position of the current one
-     */
-    public void moveRelatively(int offset) {
-        moveTo(pc + offset);
-    }
-
-    /**
-     * Returns the size in bytes of the current instruction.
-     * @return The size in bytes of the current instruction.
-     */
-    public int getInstructionSize() {
-        if (opcode.size != JOpcode.UNKNOWN) {
-            return opcode.size;
-        } else if (opcode == JOpcode.TABLESWITCH) {
-            int lowOffset = 1 + pad4(pc + 1) + 4;
-            int low = codeArray.getS4(pc + lowOffset);
-            int high = codeArray.getS4(pc + lowOffset + 4);
-            return lowOffset + 8 + 4 * (high - low + 1);
-        } else if (opcode == JOpcode.LOOKUPSWITCH) {
-            int npairsOffset = 1 + pad4(pc + 1) + 4;
-            int npairs = codeArray.getS4(pc + npairsOffset);
-            return npairsOffset + 4 + 8 * npairs;
-        } else if (opcode == JOpcode.WIDE) {
-            if (codeArray.getU1(pc + 1) == JOpcode.cIINC)
-                return 6;
-            else
-                return 4;
-        } else
-            throw new Error("Unknown size for instruction " + opcode);
-    }
-
-    /**
-     * Returns the number of successors of the current instruction.
-     * @return The number of successors of the current instruction.
-     */
-    public int getSuccessorCount() {
-        if (opcode.successorCount != JOpcode.UNKNOWN) {
-            return opcode.successorCount;
-        } else if (opcode == JOpcode.TABLESWITCH) {
-            int lowPos = pc + 1 + pad4(pc + 1) + 4;
-            return 1                           // default case
-                + codeArray.getS4(lowPos + 4)  // value of HIGH field
-                - codeArray.getS4(lowPos) + 1; // value of LOW field
-        } else if (opcode == JOpcode.LOOKUPSWITCH) {
-            int npairsPos = pc + 1 + pad4(pc + 1) + 4;
-            return 1 + codeArray.getS4(npairsPos);
-        } else
-            throw new Error("Unknown successors for instruction " + opcode);
-    }
-
-    /**
-     * Returns the address of the successor of the current instruction
-     * given its index in the list of successors of the current
-     * instruction.
-     * @param index The index of the wanted successor in the list of
-     * the successors of the current instruction.
-     * @return The address of the specific successor.
-     */
-    public int getSuccessorPC(int index) {
-        assert (index >= 0) && (index < getSuccessorCount()) : index;
-
-        switch (opcode.jumpKind) {
-        case JOpcode.JMP_NEXT:
-            return pc + getInstructionSize();
-        case JOpcode.JMP_ALWAYS_S2_OFFSET:
-            return pc + codeArray.getS2(pc + 1);
-        case JOpcode.JMP_ALWAYS_S4_OFFSET:
-            return pc + codeArray.getS4(pc + 1);
-        case JOpcode.JMP_MAYBE_S2_OFFSET:
-            if (index == 0)
-                return pc + getInstructionSize();
-            else
-                return pc + codeArray.getS2(pc + 1);
-        case JOpcode.JMP_TABLE: {
-            int defaultPos = pc + 1 + pad4(pc + 1);
-            if (index == 0)
-                return pc + codeArray.getS4(defaultPos);
-            else
-                return pc + codeArray.getS4(defaultPos + 3*4 + 4 * (index - 1));
-        }
-        case JOpcode.JMP_LOOKUP: {
-            int defaultPos = pc + 1 + pad4(pc + 1);
-            if (index == 0)
-                return pc + codeArray.getS4(defaultPos);
-            else
-                return pc + codeArray.getS4(defaultPos + 2*4 + 4 + 8 * (index - 1));
-        }
-        default:
-            throw new Error();
-        }
-    }
-
-    /**
-     * Returns the total size of data words put on the stack by the current
-     * instruction.
-     * @return The total size of data words put on the stack by the current
-     * instruction.
-     */
-    public int getProducedDataSize() {
-        if (opcode.getProducedDataTypes() == JOpcode.UNKNOWN_TYPE) {
-            switch (opcode.code) {
-            case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cBALOAD:
-                return 1;
-            case JOpcode.cLDC2_W: case JOpcode.cDUP: case JOpcode.cSWAP:
-                return 2;
-            case JOpcode.cDUP_X1:
-                return 3;
-            case JOpcode.cDUP_X2: case JOpcode.cDUP2:
-                return 4;
-            case JOpcode.cDUP2_X1:
-                return 5;
-            case JOpcode.cDUP2_X2:
-                return 6;
-            case JOpcode.cGETSTATIC: case JOpcode.cGETFIELD: {
-                JConstantPool.FieldOrMethodRefEntry entry =
-                    (JConstantPool.FieldOrMethodRefEntry)
-                    pool.lookupEntry(codeArray.getU2(pc + 1));
-                return JType.parseSignature(entry.getSignature()).getSize();
-            }
-            case JOpcode.cWIDE : {
-                int op = codeArray.getU1(pc + 1);
-                if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) {
-                    JOpcode opcode2 = JOpcode.OPCODES[op];
-                    return JType.getTotalSize(opcode2.getProducedDataTypes());
-                } else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
-                    return 0;
-                else return 0; // (IINC)
-            }
-            default :
-                throw new Error(opcode.toString());
-            }
-        } else
-            return JType.getTotalSize(opcode.getProducedDataTypes());
-    }
-
-    /**
-     * Returns the total size of data words taken from the stack by the current
-     * instruction.
-     * @return The total size of data words taken from the stack by the current
-     * instruction.
-     */
-    public int getConsumedDataSize() {
-        if (opcode.getConsumedDataTypes() != JOpcode.UNKNOWN_TYPE)
-            return JType.getTotalSize(opcode.getConsumedDataTypes());
-        else {
-            switch (opcode.code) {
-            case JOpcode.cPOP: case JOpcode.cDUP:
-                return 1;
-            case JOpcode.cPOP2: case JOpcode.cSWAP:
-            case JOpcode.cDUP_X1: case JOpcode.cDUP2:
-                return 2;
-            case JOpcode.cDUP_X2: case JOpcode.cDUP2_X1:
-                return 3;
-            case JOpcode.cDUP2_X2:
-                return 4;
-            case JOpcode.cPUTSTATIC: case JOpcode.cPUTFIELD: {
-                JConstantPool.FieldOrMethodRefEntry entry =
-                    (JConstantPool.FieldOrMethodRefEntry)
-                    pool.lookupEntry(codeArray.getU2(pc + 1));
-                return JType.parseSignature(entry.getSignature()).getSize();
-            }
-            case JOpcode.cINVOKEVIRTUAL: case JOpcode.cINVOKESPECIAL:
-            case JOpcode.cINVOKESTATIC:  case JOpcode.cINVOKEINTERFACE : {
-                JConstantPool.FieldOrMethodRefEntry entry =
-                    (JConstantPool.FieldOrMethodRefEntry)
-                    pool.lookupEntry(codeArray.getU2(pc + 1));
-                JMethodType tp = (JMethodType)
-                    JType.parseSignature(entry.getSignature());
-                return tp.getArgsSize()
-                    + (opcode == JOpcode.INVOKESTATIC ? 0 : 1);
-            }
-            case JOpcode.cWIDE : {
-                int op = codeArray.getU1(pc + 1);
-                if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
-                    return 0;
-                else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) {
-                    JOpcode opcode2 = JOpcode.OPCODES[op];
-                    return JType.getTotalSize(opcode2.getConsumedDataTypes());
-                } else
-                    return 0; // (IINC)
-            }
-            case JOpcode.cMULTIANEWARRAY :
-                return codeArray.getU1(pc + 3);
-            default:
-                throw new Error(opcode.toString());
-            }
-        }
-    }
-
-    /**
-     * Returns the number of data types put on the stack by the current
-     * instruction.
-     * @return The number of data types put on the stack by the current
-     * instruction.
-     */
-    public int getProducedDataTypesNumber() {
-        if (opcode.getProducedDataTypes() != JOpcode.UNKNOWN_TYPE)
-            return opcode.getProducedDataTypes().length;
-        else {
-            switch (opcode.code) {
-            case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cLDC2_W:
-            case JOpcode.cBALOAD: case JOpcode.cGETSTATIC:
-            case JOpcode.cGETFIELD:
-                return 1;
-            case JOpcode.cDUP: case JOpcode.cSWAP:
-                return 2;
-            case JOpcode.cDUP_X1:
-                return 3;
-            case JOpcode.cWIDE: {
-                int op = codeArray.getU1(pc + 1);
-                if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
-                    return 1;
-                else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
-                    return 0;
-                else
-                    return 0; // (IINC)
-            }
-            default:
-                throw new Error("JOpcode implementation error");
-            }
-        }
-    }
-
-    /**
-     * Returns the number of data types taken from the stack by the current
-     * instruction.
-     * @return The number of data types taken from the stack by the current
-     * instruction.
-     */
-//     public int getConsumedDataTypesNumber() {
-//         if (opcode.getConsumedDataTypes() == JOpcode.UNKNOWN_TYPE) {
-//             switch (opcode.code) {
-//             case 87 : return 1; // POP
-//             case 88 : return 2; // POP2
-//             case 89 : return 1; // DUP
-//             case 90 : return 2; // DUP_X1
-//             case 91 : // DUP_X2
-//             case 92 : // DUP2
-//             case 93 : // DUP2_X1
-//             case 94 : // DUP2_X2
-//                 throw new UnsupportedOperationException("Opcode " + opcode.name
-//                                                         + " has a stack-dependant"
-//                                                         + " data types consumption");
-//             case 95 : return 2; // SWAP
-//             case 179 : return 1; // PUTSTATIC
-//             case 181 : return 1; // PUTFIELD
-//             case 182 : // INVOKEVIRTUAL
-//             case 183 : // INVOKESPECIAL
-//             case 185 : // INVOKEINTERFACE
-//                 s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-//                 return ((JMethodType)JType.parseSignature(s)).argTypes.length + 1;
-//             case 184 : // INVOKESTATIC
-//                 s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-//                 return ((JMethodType)JType.parseSignature(s)).argTypes.length;
-//             case 196 : // WIDE
-//                 int op = codeArray.getU1(pc + 1);
-//                 if (op >= 21 && op <= 25) return 0; // (xLOAD)
-//                 else if (op >= 54 && op <= 58) // (xSTORE)
-//                     return JOpcode.OPCODES[op].getConsumedDataTypes().length;
-//                 else return 0; // (IINC)
-//             case 197 : return codeArray.getU1(pc + 3); // MULTIANEWARRAY
-//             default : throw new Error("JOpcode implementation error");
-//             }
-//         } else return opcode.getConsumedDataTypes().length;
-//     }
-
-
-    // Return the number between 0 and 3 which, if added to the given
-    // value, would yield a multiple of 4.
-    protected int[] padding = { 0, 3, 2, 1 };
-    protected int pad4(int value) {
-        return padding[value % 4];
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
deleted file mode 100644
index 9867e01..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ /dev/null
@@ -1,771 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-
-/**
- * Constant pool, holding constants for a Java class file.
- *
- * @author Michel Schinz
- * @version 2.0
- */
-
-public class JConstantPool {
-    protected boolean frozen = false;
-
-    protected HashMap/*<Entry,Integer>*/ entryToIndex = new HashMap();
-    protected Entry[] indexToEntry;
-    protected int currIndex;
-
-    public static final short CONSTANT_Utf8               = 1;
-    public static final short CONSTANT_Integer            = 3;
-    public static final short CONSTANT_Float              = 4;
-    public static final short CONSTANT_Long               = 5;
-    public static final short CONSTANT_Double             = 6;
-    public static final short CONSTANT_Class              = 7;
-    public static final short CONSTANT_String             = 8;
-    public static final short CONSTANT_Fieldref           = 9;
-    public static final short CONSTANT_Methodref          = 10;
-    public static final short CONSTANT_InterfaceMethodref = 11;
-    public static final short CONSTANT_NameAndType        = 12;
-
-    protected JConstantPool(FJBGContext context) {
-        indexToEntry = new Entry[8];
-        currIndex = 1;
-    }
-
-    protected JConstantPool(FJBGContext context, DataInputStream stream)
-        throws IOException {
-        int count = stream.readShort();
-        indexToEntry = new EntryIndex[count];
-
-        currIndex = 1;
-        while (currIndex < count) {
-            EntryIndex e;
-            int tag = stream.readByte();
-
-            switch (tag) {
-            case CONSTANT_Utf8:
-                e = new Utf8Entry(stream);
-                // no duplicates
-                entryToIndex.put(e, new Integer(currIndex));
-                break;
-            case CONSTANT_Integer:
-                e = new IntegerEntry(stream);
-                break;
-            case CONSTANT_Float:
-                e = new FloatEntry(stream);
-                break;
-            case CONSTANT_Long:
-                e = new LongEntry(stream);
-                break;
-            case CONSTANT_Double:
-                e = new DoubleEntry(stream);
-                break;
-            case CONSTANT_Class:
-                e = new DescriptorEntryIndex(stream);
-                break;
-            case CONSTANT_String:
-                e = new StringEntryIndex(stream);
-                break;
-            case CONSTANT_Fieldref:
-            case CONSTANT_Methodref:
-            case CONSTANT_InterfaceMethodref:
-                e = new FieldOrMethodRefEntryIndex(tag, stream);
-                break;
-            case CONSTANT_NameAndType:
-                e = new NameAndTypeEntryIndex(stream);
-                break;
-            default:
-                throw new IllegalArgumentException("unknown entry in pool: " + tag);
-            }
-            indexToEntry[currIndex] = e;
-            currIndex += e.getSize();
-        }
-    }
-
-    public void freeze() { frozen = true; }
-
-    /**
-     * Returns a string representing the type of an entry
-     * knowing its tag
-     * @param tag The tag representing the type of the
-     * constant pool entry
-     */
-    public String getEntryType(int tag) {
-        switch (tag) {
-        case CONSTANT_Utf8   : return "Utf8";
-        case CONSTANT_Integer : return "Integer";
-        case CONSTANT_Float  : return "Float";
-        case CONSTANT_Long   : return "Long";
-        case CONSTANT_Double : return "Double";
-        case CONSTANT_Class  : return "Class";
-        case CONSTANT_String : return "String";
-        case CONSTANT_Fieldref : return "Field";
-        case CONSTANT_Methodref : return "Method";
-        case CONSTANT_InterfaceMethodref : return "InterfaceMethod";
-        case CONSTANT_NameAndType : return "NameAndType";
-        default : throw new Error("invalid constant pool tag : " + tag);
-        }
-    }
-
-    public int addClass(String className) {
-        return addDescriptor(className.replace('.', '/'));
-    }
-
-    public int addDescriptor(JReferenceType type) {
-        return addDescriptor(type.getDescriptor());
-    }
-
-    protected int addDescriptor(String name) {
-        return addEntry(new DescriptorEntryValue(name));
-    }
-
-    public int addClassMethodRef(String className,
-                                 String methodName,
-                                 String signature) {
-        return addMethodRef(true, className, methodName, signature);
-    }
-
-    public int addInterfaceMethodRef(String className,
-                                     String methodName,
-                                     String signature) {
-        return addMethodRef(false, className, methodName, signature);
-    }
-
-    public int addMethodRef(boolean isClass,
-                            String className,
-                            String methodName,
-                            String signature) {
-        return addEntry(new FieldOrMethodRefEntryValue(isClass
-                                                       ? CONSTANT_Methodref
-                                                       : CONSTANT_InterfaceMethodref,
-                                                       className,
-                                                       methodName,
-                                                       signature));
-    }
-
-    public int addFieldRef(String className,
-                           String fieldName,
-                           String signature) {
-        return addEntry(new FieldOrMethodRefEntryValue(CONSTANT_Fieldref,
-                                                       className,
-                                                       fieldName,
-                                                       signature));
-    }
-
-    public int addInteger(int value) {
-        return addEntry(new IntegerEntry(value));
-    }
-
-    public int addFloat(float value) {
-        return addEntry(new FloatEntry(value));
-    }
-
-    public int addLong(long value) {
-        return addEntry(new LongEntry(value));
-    }
-
-    public int addDouble(double value) {
-        return addEntry(new DoubleEntry(value));
-    }
-
-    public int addString(String value) {
-        return addEntry(new StringEntryValue(value));
-    }
-
-    public int addNameAndType(String name, String descriptor) {
-        return addEntry(new NameAndTypeEntryValue(name, descriptor));
-    }
-
-    public int addUtf8(String value) {
-        return addEntry(new Utf8Entry(value));
-    }
-
-    public int addUtf8(byte[] value) {
-        return addEntry(new Utf8Entry(value));
-    }
-
-    protected int addEntry(EntryValue e) {
-        assert !frozen;
-        Integer idx = (Integer)entryToIndex.get(e);
-        if (idx != null)
-            return idx.intValue();
-
-        e.addChildren();
-
-        int index = currIndex;
-        currIndex += e.getSize();
-
-        entryToIndex.put(e, new Integer(index));
-        if (index >= indexToEntry.length) {
-            Entry[] newI2E = new Entry[indexToEntry.length * 2];
-            System.arraycopy(indexToEntry, 0, newI2E, 0, indexToEntry.length);
-            indexToEntry = newI2E;
-        }
-        indexToEntry[index] = e;
-        return index;
-    }
-
-    /// Lookup methods
-    //////////////////////////////////////////////////////////////////////
-
-    public Entry lookupEntry(int index) {
-        assert index > 0 && index < currIndex
-            : "invalid index: " + index;
-        assert indexToEntry[index] != null
-            : "invalid index (null contents): " + index;
-        return indexToEntry[index];
-    }
-
-    public String lookupClass(int index) {
-        DescriptorEntry entry = (DescriptorEntry)lookupEntry(index);
-        return entry.getValue();
-    }
-
-    public String lookupNameAndType(int index) {
-        NameAndTypeEntry entry = (NameAndTypeEntry)lookupEntry(index);
-        return entry.getName()+":"+entry.getDescriptor();
-    }
-
-    public String lookupUtf8(int index) {
-        Utf8Entry entry = (Utf8Entry)lookupEntry(index);
-        return entry.getValue();
-    }
-
-    /// Output
-    //////////////////////////////////////////////////////////////////////
-
-    public void writeTo(DataOutputStream stream) throws IOException {
-        if (! frozen) freeze();
-
-        stream.writeShort(currIndex);
-        for (int i = 0; i < currIndex; ++i) {
-            Entry entry = indexToEntry[i];
-            if (entry != null) {
-                stream.writeByte(entry.getTag());
-                entry.writeContentsTo(stream);
-            }
-        }
-    }
-
-    // Follows javap output format for constant pool.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  Constant pool:");
-        for (int i = 0; i < currIndex; ++i) {
-            Entry entry = indexToEntry[i];
-            if (entry != null) {
-                if (i > 0) buf.append("\n");
-                buf.append("const #");
-                buf.append(i);
-                buf.append(" = ");
-                buf.append(entry);
-            }
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    /// Classes for the various kinds of entries
-    //////////////////////////////////////////////////////////////////////
-
-    public interface Entry {
-        public int getTag();
-
-        int getSize();
-        void writeContentsTo(DataOutputStream stream) throws IOException;
-        String toComment(String ownerClassName);
-    }
-
-    protected interface EntryValue extends Entry {
-        abstract void addChildren();
-    }
-
-    protected interface EntryIndex extends Entry {
-        abstract void fetchChildren();
-    }
-
-    abstract protected class ChildlessEntry implements EntryValue, EntryIndex {
-        public void addChildren() {}
-        public void fetchChildren() {}
-    }
-
-    public class IntegerEntry extends ChildlessEntry implements Entry {
-        private final int value;
-        public IntegerEntry(int value) { this.value = value; }
-        public IntegerEntry(DataInputStream stream) throws IOException {
-            this(stream.readInt());
-        }
-
-        public int hashCode() { return value; }
-        public boolean equals(Object o) {
-            return o instanceof IntegerEntry && ((IntegerEntry)o).value == value;
-        }
-
-        public int getTag() { return CONSTANT_Integer; }
-        public int getValue() { return value; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeInt(value);
-        }
-        /*@Override*/ public String toString() {
-            StringBuffer buf = new StringBuffer("int\t");
-            buf.append(getValue());
-            buf.append(";");
-            return buf.toString();
-        }
-        public String toComment(String ownerClassname) {
-            return "//int "+getValue();
-        }
-    }
-
-    public class FloatEntry extends ChildlessEntry implements Entry {
-        private final float value;
-        public FloatEntry(float value) { this.value = value; }
-        public FloatEntry(DataInputStream stream) throws IOException {
-            this(stream.readFloat());
-        }
-
-        public int hashCode() { return (int)value; }
-        public boolean equals(Object o) {
-            return o instanceof FloatEntry && ((FloatEntry)o).value == value;
-        }
-
-        public int getTag() { return CONSTANT_Float; }
-        public float getValue() { return value; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeFloat(value);
-        }
-        /*@Override*/ public String toString() {
-            StringBuffer buf = new StringBuffer("float\t");
-            buf.append(getValue());
-            buf.append("f");
-            return buf.toString();
-        }
-        public String toComment(String ownerClassname) {
-            return "//float "+getValue()+"f";
-        }
-    }
-
-    public class LongEntry extends ChildlessEntry implements Entry {
-        private final long value;
-        public LongEntry(long value) { this.value = value; }
-        public LongEntry(DataInputStream stream) throws IOException {
-            this(stream.readLong());
-        }
-
-        public int hashCode() { return (int)value; }
-        public boolean equals(Object o) {
-            return o instanceof LongEntry && ((LongEntry)o).value == value;
-        }
-
-        public int getTag() { return CONSTANT_Long; }
-        public long getValue() { return value; }
-
-        public int getSize() { return 2; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeLong(value);
-        }
-        /*@Override*/ public String toString() {
-            StringBuffer buf = new StringBuffer("long\t");
-            buf.append(getValue());
-            buf.append("l;");
-            return buf.toString();
-        }
-        public String toComment(String ownerClassname) {
-            return "//long "+getValue()+"l";
-        }
-    }
-
-    public class DoubleEntry extends ChildlessEntry implements Entry {
-        private final double value;
-        public DoubleEntry(double value) { this.value = value; }
-        public DoubleEntry(DataInputStream stream) throws IOException {
-            this(stream.readDouble());
-        }
-
-        public int hashCode() { return (int)value; }
-        public boolean equals(Object o) {
-            return o instanceof DoubleEntry && ((DoubleEntry)o).value == value;
-        }
-
-        public int getTag() { return CONSTANT_Double; }
-        public double getValue() { return value; }
-
-        public int getSize() { return 2; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeDouble(value);
-        }
-        /*@Override*/ public String toString() {
-            StringBuffer buf = new StringBuffer("double\t");
-            buf.append(getValue());
-            return buf.toString();
-        }
-        public String toComment(String ownerClassname) {
-            return "//double "+getValue();
-        }
-    }
-
-    public class Utf8Entry extends ChildlessEntry implements Entry {
-        private final String value;
-        private final byte[] bytes;
-        public Utf8Entry(String value) {
-            this.value = value.intern();
-            this.bytes = null;
-        }
-        public Utf8Entry(DataInputStream stream) throws IOException {
-            this(stream.readUTF());
-        }
-        public Utf8Entry(byte[] bytes) {
-            this.bytes = bytes;
-            this.value = null;
-        }
-
-        public int hashCode() {
-            if (bytes != null) return bytes.hashCode();
-            return value.hashCode();
-        }
-        public boolean equals(Object o) {
-            boolean isEqual = o instanceof Utf8Entry;
-            if (bytes != null) {
-                isEqual = isEqual && ((Utf8Entry)o).bytes == bytes;
-            }
-            else {
-                isEqual = isEqual && ((Utf8Entry)o).value == value;
-            }
-            return isEqual;
-        }
-
-        public int getTag() { return CONSTANT_Utf8; }
-        public String getValue() { return value; }
-        public byte[] getBytes() { return bytes; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            if (bytes != null) {
-                if (bytes.length > 65535) {
-                    throw new IOException("String literal of length " + bytes.length + " does not fit in Classfile");
-                }
-                stream.writeShort(bytes.length);
-                stream.write(bytes);
-            }
-            else
-                stream.writeUTF(value);
-        }
-        // Follows javap output format for Utf8 pool entries.
-        public String toString() { return "Asciz\t"+escaped(getValue())+";"; }
-        public String toComment(String ownerClassname) {
-            return "//Asciz "+escaped(getValue());
-        }
-        private String escaped(String s) {
-            return s.replace("\n", "\\n");
-        }
-    }
-
-    abstract public class StringEntry implements Entry {
-        protected String value;
-        protected int valueIndex;
-
-        public int hashCode() {
-            assert value != null;
-            return value.hashCode();
-        }
-        public boolean equals(Object o) {
-            return o instanceof StringEntry && ((StringEntry)o).value == value;
-        }
-
-        public int getTag() { return CONSTANT_String; }
-        public String getValue() { return value; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeShort(valueIndex);
-        }
-        // Follows javap output format for String pool entries.
-        public String toString() {
-            return "String\t#"+valueIndex+";\t//  "+escaped(getValue());
-        }
-        public String toComment(String ownerClassname) {
-            return "//String "+escaped(getValue());
-        }
-        private String escaped(String s) {
-            return s.replace("\n", "\\n");
-        }
-    }
-
-    public class StringEntryValue extends StringEntry implements EntryValue {
-        public StringEntryValue(String value) {
-            this.value = value.intern();
-        }
-        public void addChildren() {
-            valueIndex = addUtf8(value);
-        }
-    }
-
-    public class StringEntryIndex extends StringEntry implements EntryIndex {
-        public StringEntryIndex(int valueIndex) {
-            this.valueIndex = valueIndex;
-        }
-        public StringEntryIndex(DataInputStream stream) throws IOException {
-            this(stream.readShort());
-        }
-        public String getValue() {
-            if (value == null) fetchChildren();
-            return super.getValue();
-        }
-        public void fetchChildren() {
-            value = lookupUtf8(valueIndex);
-        }
-    }
-
-    abstract public class DescriptorEntry implements Entry {
-        protected String name;
-        protected int nameIndex;
-
-        public int hashCode() {
-            assert name != null;
-            return name.hashCode();
-        }
-        public boolean equals(Object o) {
-            return o instanceof DescriptorEntry && ((DescriptorEntry)o).name == name;
-        }
-
-        public int getTag() { return CONSTANT_Class; }
-        public String getValue() { return name; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeShort(nameIndex);
-        }
-        // Follows javap output format for class pool entries.
-        public String toString() {
-            StringBuffer buf = new StringBuffer("class\t#");
-            buf.append(nameIndex);
-            buf.append(";\t//  ");
-            buf.append(getClassName());
-            return buf.toString();
-        }
-        public String toComment(String ownerClassname) {
-            return "//class "+getClassName();
-        }
-        private String getClassName() {
-            StringBuffer buf = new StringBuffer();
-            String value = getValue();
-            if (value.startsWith("[")) buf.append("\"");
-            buf.append(value);
-            if (value.startsWith("[")) buf.append("\"");
-            return buf.toString();
-        }
-    }
-
-    protected class DescriptorEntryValue
-        extends DescriptorEntry
-        implements EntryValue {
-        public DescriptorEntryValue(String name) { this.name = name.intern(); }
-        public void addChildren() {
-            nameIndex = addUtf8(name);
-        }
-    }
-
-    protected class DescriptorEntryIndex
-        extends DescriptorEntry
-        implements EntryIndex {
-        public DescriptorEntryIndex(int nameIndex) { this.nameIndex = nameIndex; }
-        public DescriptorEntryIndex(DataInputStream stream) throws IOException {
-            this(stream.readShort());
-        }
-        public String getValue() {
-            if (name == null) fetchChildren();
-            return super.getValue();
-        }
-        public void fetchChildren() {
-            name = lookupUtf8(nameIndex);
-        }
-    }
-
-    abstract public class FieldOrMethodRefEntry implements Entry {
-        private final int tag;
-        protected String className, thingName, signature;
-        protected int classIndex, nameAndTypeIndex;
-
-        public FieldOrMethodRefEntry(int tag) {
-            assert tag == CONSTANT_Fieldref
-                || tag == CONSTANT_Methodref
-                || tag == CONSTANT_InterfaceMethodref;
-
-            this.tag = tag;
-        }
-
-        public int hashCode() {
-            return tag
-                + className.hashCode()
-                + thingName.hashCode()
-                + signature.hashCode();
-        }
-        public boolean equals(Object o) {
-            return o instanceof FieldOrMethodRefEntry
-                && ((FieldOrMethodRefEntry)o).tag == tag
-                && ((FieldOrMethodRefEntry)o).className == className
-                && ((FieldOrMethodRefEntry)o).thingName == thingName
-                && ((FieldOrMethodRefEntry)o).signature == signature;
-        }
-
-        public int getTag() { return tag; }
-        public String getClassName() { return className; }
-        public String getFieldOrMethodName() { return thingName; }
-        public String getSignature() { return signature; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeShort(classIndex);
-            stream.writeShort(nameAndTypeIndex);
-        }
-        // Follows javap output format for field/method pool entries.
-        public String toString() {
-            return getEntryType(tag)+"\t#"+classIndex+".#"+nameAndTypeIndex+
-                   ";\t//  "+getName("")+":"+signature;
-        }
-        public String toComment(String ownerClassName) {
-            return "//"+getEntryType(tag)+" "+getName(ownerClassName)+":"+signature;
-        }
-        private String getName(String ownerClassName) {
-            String name = getFieldOrMethodName();
-            if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(name))
-                name = "\""+name+"\"";
-            if (!getClassName().equals(ownerClassName))
-                name = getClassName()+"."+name;
-            return name;
-        }
-    }
-
-    protected class FieldOrMethodRefEntryValue
-        extends FieldOrMethodRefEntry
-        implements EntryValue {
-        public FieldOrMethodRefEntryValue(int tag,
-                                          String className,
-                                          String thingName,
-                                          String signature) {
-            super(tag);
-            this.className = className.intern();
-            this.thingName = thingName.intern();
-            this.signature = signature.intern();
-        }
-
-        public void addChildren() {
-            classIndex = addClass(className);
-            nameAndTypeIndex = addNameAndType(thingName, signature);
-        }
-    }
-
-    protected class FieldOrMethodRefEntryIndex
-        extends FieldOrMethodRefEntry
-        implements EntryIndex {
-        public FieldOrMethodRefEntryIndex(int tag,
-                                          int classIndex,
-                                          int nameAndTypeIndex) {
-            super(tag);
-            this.classIndex = classIndex;
-            this.nameAndTypeIndex = nameAndTypeIndex;
-        }
-        public FieldOrMethodRefEntryIndex(int tag, DataInputStream stream)
-            throws IOException {
-            this(tag, stream.readShort(), stream.readShort());
-        }
-        public String getClassName() {
-            if (className == null) fetchChildren();
-            return super.getClassName();
-        }
-        public String getFieldOrMethodName() {
-            if (thingName == null) fetchChildren();
-            return super.getFieldOrMethodName();
-        }
-        public String getSignature() {
-            if (signature == null) fetchChildren();
-            return super.getSignature();
-        }
-        public void fetchChildren() {
-            className = lookupClass(classIndex);
-            NameAndTypeEntry nat = (NameAndTypeEntry)lookupEntry(nameAndTypeIndex);
-            thingName = nat.getName();
-            signature = nat.getDescriptor();
-        }
-    }
-
-    abstract public class NameAndTypeEntry implements Entry {
-        protected String name, descriptor;
-        protected int nameIndex, descriptorIndex;
-
-        public int hashCode() { return name.hashCode() + descriptor.hashCode(); }
-        public boolean equals(Object o) {
-            return o instanceof NameAndTypeEntry
-                && ((NameAndTypeEntry)o).name == name
-                && ((NameAndTypeEntry)o).descriptor == descriptor;
-        }
-
-        public int getTag() { return CONSTANT_NameAndType; }
-        public String getName() { return name; }
-        public String getDescriptor() { return descriptor; }
-
-        public int getSize() { return 1; }
-        public void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeShort(nameIndex);
-            stream.writeShort(descriptorIndex);
-        }
-        // Follows javap output format for name/type pool entries.
-        public String toString() {
-            String natName = getName();
-            if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(natName))
-                natName = "\""+natName+"\"";
-            return "NameAndType\t#"+nameIndex+":#"+descriptorIndex+
-                   ";//  "+natName+":"+getDescriptor();
-        }
-        public String toComment(String ownerClassname) { return ""; }
-    }
-
-    protected class NameAndTypeEntryValue
-        extends NameAndTypeEntry
-        implements EntryValue {
-        public NameAndTypeEntryValue(String name, String descriptor) {
-            this.name = name.intern();
-            this.descriptor = descriptor.intern();
-        }
-        public void addChildren() {
-            nameIndex = addUtf8(name);
-            descriptorIndex = addUtf8(descriptor);
-        }
-    }
-
-    protected class NameAndTypeEntryIndex
-        extends NameAndTypeEntry
-        implements EntryIndex {
-        public NameAndTypeEntryIndex(int nameIndex, int descriptorIndex) {
-            this.nameIndex = nameIndex;
-            this.descriptorIndex = descriptorIndex;
-        }
-        public NameAndTypeEntryIndex(DataInputStream stream) throws IOException {
-            this(stream.readShort(), stream.readShort());
-        }
-        public String getName() {
-            if (name == null) fetchChildren();
-            return super.getName();
-        }
-        public String getDescriptor() {
-            if (descriptor == null) fetchChildren();
-            return super.getDescriptor();
-        }
-        public void fetchChildren() {
-            name = lookupUtf8(nameIndex);
-            descriptor = lookupUtf8(descriptorIndex);
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
deleted file mode 100644
index 6ee05e4..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * ConstantValue attribute representing the value of a constant field.
- *
- * There can be no more than one ConstantValue attribute in the attributes
- * table of a given field_info structure.. See section 4.8.2 of the JVM
- * specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JConstantValueAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    protected int constantValueIndex;
-
-    public JConstantValueAttribute(FJBGContext context,
-                                  JClass clazz,
-                                  JField field) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-
-        assert field.getOwner() == clazz;
-    }
-
-    public JConstantValueAttribute(FJBGContext context,
-                                   JClass clazz,
-                                   Object owner, // JField
-                                   String name,
-                                   int size,
-                                   DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        this.constantValueIndex = stream.readShort();
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "ConstantValue"; }
-
-    // Follows javap output format for ConstantValue attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  Constant value: ");
-        buf.append(pool.lookupEntry(constantValueIndex));
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return 2; // Short.SIZE
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(constantValueIndex);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
deleted file mode 100644
index f663f00..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * EclosingMethod attribute
-
- * A class must have an EnclosingMethod attribute if and only if it is a
- * local class or an anonymous class. A class may have no more than one
- * EnclosingMethod attribute. See section 4.8.6 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JEnclosingMethodAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    protected final int classIdx;
-    protected final int nameAndTypeIdx;
-
-    public JEnclosingMethodAttribute(FJBGContext context,
-                                     JClass clazz,
-                                     String className,
-                                     String methodName,
-                                     JType methodType) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-
-        this.classIdx = pool.addClass(className);
-        this.nameAndTypeIdx = pool.addNameAndType(methodName, methodType.getSignature());
-    }
-
-    public JEnclosingMethodAttribute(FJBGContext context,
-                                     JClass clazz,
-                                     Object owner,
-                                     String name,
-                                     int size,
-                                     DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        this.classIdx = stream.readShort();
-        this.nameAndTypeIdx = stream.readShort();
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "EnclosingMethod"; }
-
-    // Follows javap output format for EnclosingMethod attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  EnclosingMethod:");
-        buf.append("\n   #");
-        buf.append(classIdx);
-        if (nameAndTypeIdx != 0) {
-            buf.append(" of #");
-            buf.append(nameAndTypeIdx);
-        }
-        buf.append(";\t//  ");
-        buf.append(pool.lookupEntry(classIdx));
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return 4; // 2 * Short.SIZE
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(classIdx);
-        stream.writeShort(nameAndTypeIdx);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
deleted file mode 100644
index b91d0f2..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Exceptions attribute
-
- * This table is used by compilers to indicate which Exceptions a method
- * is declared to throw. See section 2.6.4 of the JVM specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JExceptionsAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    protected int[] indexTable;
-    protected int count;
-
-    public JExceptionsAttribute(FJBGContext context,
-                                JClass clazz,
-                                JMethod owner) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-
-        this.count = 0;
-        this.indexTable = new int[8]; // some size > count
-
-        assert clazz == owner.getOwner();
-    }
-
-    public JExceptionsAttribute(FJBGContext context,
-                                JClass clazz,
-                                Object owner, //JMethod
-                                String name,
-                                int size,
-                                DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        this.count = stream.readShort();
-        this.indexTable = new int[count];
-        for (int i = 0; i < count; ++i)
-            indexTable[i] = stream.readShort();
-
-        assert name.equals(getName());
-    }
-
-    public void addEntry(int classIndex) {
-        if (count >= indexTable.length) {
-            int[] newIT = new int[indexTable.length * 2];
-            System.arraycopy(indexTable, 0, newIT, 0, indexTable.length);
-            indexTable = newIT;
-        }
-        indexTable[count++] = classIndex;
-    }
-
-    public String getName() { return "Exceptions"; }
-
-    // Follows javap output format for Exceptions attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  Exceptions: ");
-        for (int i = 0; i < indexTable.length; ++i) {
-            buf.append("\n   throws ");
-            buf.append(JClass.toExternalName(pool.lookupClass(indexTable[i])));
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return 2 + indexTable.length * 2;
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(count);
-        for (int i = 0; i < count; ++i)
-            stream.writeShort(indexTable[i]);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
deleted file mode 100644
index d82db82..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ /dev/null
@@ -1,667 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Extended list of instructions, providing pseudo-instructions which
- * are easier to use than the standard ones.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JExtendedCode extends JCode {
-    public final static int COND_EQ = 0;
-    public final static int COND_NE = 1;
-    public final static int COND_LT = 2;
-    public final static int COND_GE = 3;
-    public final static int COND_GT = 4;
-    public final static int COND_LE = 5;
-
-    private final JOpcode[] forbidden = new JOpcode[0];
-    private final JOpcode[] nothingToDo = new JOpcode[0];
-
-    private final JOpcode[][][] typeConversions = {
-        {
-            /* T_BOOLEAN -> T_BOOLEAN */ nothingToDo,
-            /* T_BOOLEAN -> T_CHAR */    forbidden,
-            /* T_BOOLEAN -> T_FLOAT */   forbidden,
-            /* T_BOOLEAN -> T_DOUBLE */  forbidden,
-            /* T_BOOLEAN -> T_BYTE */    forbidden,
-            /* T_BOOLEAN -> T_SHORT */   forbidden,
-            /* T_BOOLEAN -> T_INT */     forbidden,
-            /* T_BOOLEAN -> T_LONG */    forbidden
-        },
-        {
-            /* T_CHAR -> T_BOOLEAN */ forbidden,
-            /* T_CHAR -> T_CHAR */    nothingToDo,
-            /* T_CHAR -> T_FLOAT */   {JOpcode.I2F},
-            /* T_CHAR -> T_DOUBLE */  {JOpcode.I2D},
-            /* T_CHAR -> T_BYTE */    {JOpcode.I2B},
-            /* T_CHAR -> T_SHORT */   {JOpcode.I2S},
-            /* T_CHAR -> T_INT */     nothingToDo,
-            /* T_CHAR -> T_LONG */    {JOpcode.I2L}
-        },
-        {
-            /* T_FLOAT -> T_BOOLEAN */ forbidden,
-            /* T_FLOAT -> T_CHAR */    {JOpcode.F2I, JOpcode.I2C},
-            /* T_FLOAT -> T_FLOAT */   nothingToDo,
-            /* T_FLOAT -> T_DOUBLE */  {JOpcode.F2D},
-            /* T_FLOAT -> T_BYTE */    {JOpcode.F2I, JOpcode.I2B},
-            /* T_FLOAT -> T_SHORT */   {JOpcode.F2I, JOpcode.I2S},
-            /* T_FLOAT -> T_INT */     {JOpcode.F2I},
-            /* T_FLOAT -> T_LONG */    {JOpcode.F2L}
-        },
-        {
-            /* T_DOUBLE -> T_BOOLEAN */ forbidden,
-            /* T_DOUBLE -> T_CHAR */    {JOpcode.D2I, JOpcode.I2C},
-            /* T_DOUBLE -> T_FLOAT */   {JOpcode.D2F},
-            /* T_DOUBLE -> T_DOUBLE */  nothingToDo,
-            /* T_DOUBLE -> T_BYTE */    {JOpcode.D2I, JOpcode.I2B},
-            /* T_DOUBLE -> T_SHORT */   {JOpcode.D2I, JOpcode.I2S},
-            /* T_DOUBLE -> T_INT */     {JOpcode.D2I},
-            /* T_DOUBLE -> T_LONG */    {JOpcode.D2L}
-        },
-        {
-            /* T_BYTE -> T_BOOLEAN */ forbidden,
-            /* T_BYTE -> T_CHAR */    {JOpcode.I2C},
-            /* T_BYTE -> T_FLOAT */   {JOpcode.I2F},
-            /* T_BYTE -> T_DOUBLE */  {JOpcode.I2D},
-            /* T_BYTE -> T_BYTE */    nothingToDo,
-            /* T_BYTE -> T_SHORT */   nothingToDo,
-            /* T_BYTE -> T_INT */     nothingToDo,
-            /* T_BYTE -> T_LONG */    {JOpcode.I2L}
-        },
-        {
-            /* T_SHORT -> T_BOOLEAN */ forbidden,
-            /* T_SHORT -> T_CHAR */    {JOpcode.I2C},
-            /* T_SHORT -> T_FLOAT */   {JOpcode.I2F},
-            /* T_SHORT -> T_DOUBLE */  {JOpcode.I2D},
-            /* T_SHORT -> T_BYTE */    {JOpcode.I2B},
-            /* T_SHORT -> T_SHORT */   nothingToDo,
-            /* T_SHORT -> T_INT */     nothingToDo,
-            /* T_SHORT -> T_LONG */    {JOpcode.I2L}
-        },
-        {
-            /* T_INT -> T_BOOLEAN */ forbidden,
-            /* T_INT -> T_CHAR */    {JOpcode.I2C},
-            /* T_INT -> T_FLOAT */   {JOpcode.I2F},
-            /* T_INT -> T_DOUBLE */  {JOpcode.I2D},
-            /* T_INT -> T_BYTE */    {JOpcode.I2B},
-            /* T_INT -> T_SHORT */   {JOpcode.I2S},
-            /* T_INT -> T_INT */     nothingToDo,
-            /* T_INT -> T_LONG */    {JOpcode.I2L}
-        },
-        {
-            /* T_LONG -> T_BOOLEAN */ forbidden,
-            /* T_LONG -> T_CHAR */    {JOpcode.L2I, JOpcode.I2C},
-            /* T_LONG -> T_FLOAT */   {JOpcode.L2F},
-            /* T_LONG -> T_DOUBLE */  {JOpcode.L2D},
-            /* T_LONG -> T_BYTE */    {JOpcode.L2I, JOpcode.I2B},
-            /* T_LONG -> T_SHORT */   {JOpcode.L2I, JOpcode.I2S},
-            /* T_LONG -> T_INT */     {JOpcode.L2I},
-            /* T_LONG -> T_LONG */    nothingToDo
-        }
-    };
-
-    public JExtendedCode(FJBGContext context,
-                         JClass clazz,
-                         JMethod owner) {
-        super(context, clazz, owner);
-    }
-
-    public void emitPUSH(boolean value) { emitPUSH(value ? 1 : 0); }
-    public void emitPUSH(Boolean value) { emitPUSH(value.booleanValue()); }
-
-    public void emitPUSH(byte value) {
-      switch (value) {
-        case -1: emitICONST_M1(); break;
-        case 0: emitICONST_0(); break;
-        case 1: emitICONST_1(); break;
-        case 2: emitICONST_2(); break;
-        case 3: emitICONST_3(); break;
-        case 4: emitICONST_4(); break;
-        case 5: emitICONST_5(); break;
-        default:
-          emitBIPUSH(value);
-      }
-    }
-    public void emitPUSH(Byte value) { emitPUSH(value.byteValue()); }
-
-    public void emitPUSH(short value) {
-      switch (value) {
-        case -1: emitICONST_M1(); break;
-        case 0: emitICONST_0(); break;
-        case 1: emitICONST_1(); break;
-        case 2: emitICONST_2(); break;
-        case 3: emitICONST_3(); break;
-        case 4: emitICONST_4(); break;
-        case 5: emitICONST_5(); break;
-        default:
-          if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
-            emitBIPUSH((byte)value);
-          else
-            emitSIPUSH(value);
-      }
-    }
-    public void emitPUSH(Short value) { emitPUSH(value.shortValue()); }
-
-    // TODO check that we do the right thing here
-    public void emitPUSH(char value) { emitPUSH((int)value); }
-    public void emitPUSH(Character value) { emitPUSH(value.charValue()); }
-
-    public void emitPUSH(int value) {
-        switch (value) {
-        case -1: emitICONST_M1(); break;
-        case 0: emitICONST_0(); break;
-        case 1: emitICONST_1(); break;
-        case 2: emitICONST_2(); break;
-        case 3: emitICONST_3(); break;
-        case 4: emitICONST_4(); break;
-        case 5: emitICONST_5(); break;
-        default:
-          if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
-            emitBIPUSH((byte)value);
-          else if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE)
-            emitSIPUSH((short)value);
-          else
-            emitPUSH_index(pool.addInteger(value));
-          break;
-        }
-    }
-    public void emitPUSH(Integer value) { emitPUSH(value.intValue()); }
-
-    public void emitPUSH(long value) {
-        if (value == 0L)
-            emitLCONST_0();
-        else if (value == 1L)
-            emitLCONST_1();
-        else
-            emitLDC2_W(value);
-    }
-    public void emitPUSH(Long value) { emitPUSH(value.longValue()); }
-
-    private static final Float ZEROF = Float.valueOf(0f);
-    private static final Float ONEF = Float.valueOf(1f);
-    private static final Float TWOF = Float.valueOf(2f);
-    public void emitPUSH(Float value) {
-        if (ZEROF.equals(value))
-            emitFCONST_0();
-        else if (ONEF.equals(value))
-            emitFCONST_1();
-        else if (TWOF.equals(value))
-            emitFCONST_2();
-        else
-            emitPUSH_index(pool.addFloat(value.floatValue()));
-    }
-    public void emitPUSH(float value) { emitPUSH(Float.valueOf(value)); }
-
-    private static final Double ZEROD = Double.valueOf(0d);
-    private static final Double ONED = Double.valueOf(1d);
-    public void emitPUSH(Double value) {
-        if (ZEROD.equals(value))
-            emitDCONST_0();
-        else if (ONED.equals(value))
-            emitDCONST_1();
-        else
-            emitLDC2_W(value.doubleValue());
-    }
-    public void emitPUSH(double value) { emitPUSH(Double.valueOf(value)); }
-
-    public void emitPUSH(String s) {
-        emitPUSH_index(pool.addString(s));
-    }
-
-    /** Pushes a class literal on the stack */
-    public void emitPUSH(JReferenceType type) {
-        assert owner.owner.major >= 49;
-        emitPUSH_index(pool.addClass(type.getDescriptor()));
-    }
-
-    protected void emitPUSH_index(int index) {
-        if (index <= 0xFF)
-            emitU1(JOpcode.LDC, index);
-        else
-            emitU2(JOpcode.LDC_W, index);
-    }
-
-    public void emitLOAD(int index, JType type) {
-        JOpcode opcode;
-
-        switch (type.getTag()) {
-        case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
-        case JType.T_SHORT: case JType.T_INT:
-            switch (index) {
-            case 0: emitILOAD_0(); return;
-            case 1: emitILOAD_1(); return;
-            case 2: emitILOAD_2(); return;
-            case 3: emitILOAD_3(); return;
-            default: opcode = JOpcode.ILOAD;
-            } break;
-        case JType.T_FLOAT:
-            switch (index) {
-            case 0: emitFLOAD_0(); return;
-            case 1: emitFLOAD_1(); return;
-            case 2: emitFLOAD_2(); return;
-            case 3: emitFLOAD_3(); return;
-            default: opcode = JOpcode.FLOAD;
-            } break;
-        case JType.T_LONG:
-            switch (index) {
-            case 0: emitLLOAD_0(); return;
-            case 1: emitLLOAD_1(); return;
-            case 2: emitLLOAD_2(); return;
-            case 3: emitLLOAD_3(); return;
-            default: opcode = JOpcode.LLOAD;
-            } break;
-        case JType.T_DOUBLE:
-            switch (index) {
-            case 0: emitDLOAD_0(); return;
-            case 1: emitDLOAD_1(); return;
-            case 2: emitDLOAD_2(); return;
-            case 3: emitDLOAD_3(); return;
-            default: opcode = JOpcode.DLOAD;
-            } break;
-        case JType.T_ARRAY: case JType.T_OBJECT:
-            switch (index) {
-            case 0: emitALOAD_0(); return;
-            case 1: emitALOAD_1(); return;
-            case 2: emitALOAD_2(); return;
-            case 3: emitALOAD_3(); return;
-            default: opcode = JOpcode.ALOAD;
-            } break;
-        default:
-            throw new IllegalArgumentException("invalid type for load "+type);
-        }
-
-        if (index > 0xFF)
-            emitWIDE(opcode, index);
-        else
-            emitU1(opcode, index);
-    }
-    public void emitLOAD(JLocalVariable var) {
-        emitLOAD(var.index, var.type);
-    }
-
-    public void emitSTORE(int index, JType type) {
-        JOpcode opcode;
-
-        switch (type.getTag()) {
-        case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
-        case JType.T_SHORT: case JType.T_INT:
-            switch (index) {
-            case 0: emitISTORE_0(); return;
-            case 1: emitISTORE_1(); return;
-            case 2: emitISTORE_2(); return;
-            case 3: emitISTORE_3(); return;
-            default: opcode = JOpcode.ISTORE;
-            } break;
-        case JType.T_FLOAT:
-            switch (index) {
-            case 0: emitFSTORE_0(); return;
-            case 1: emitFSTORE_1(); return;
-            case 2: emitFSTORE_2(); return;
-            case 3: emitFSTORE_3(); return;
-            default: opcode = JOpcode.FSTORE;
-            } break;
-        case JType.T_LONG:
-            switch (index) {
-            case 0: emitLSTORE_0(); return;
-            case 1: emitLSTORE_1(); return;
-            case 2: emitLSTORE_2(); return;
-            case 3: emitLSTORE_3(); return;
-            default: opcode = JOpcode.LSTORE;
-            } break;
-        case JType.T_DOUBLE:
-            switch (index) {
-            case 0: emitDSTORE_0(); return;
-            case 1: emitDSTORE_1(); return;
-            case 2: emitDSTORE_2(); return;
-            case 3: emitDSTORE_3(); return;
-            default: opcode = JOpcode.DSTORE;
-            } break;
-        case JType.T_ARRAY: case JType.T_OBJECT: case JType.T_ADDRESS:
-            switch (index) {
-            case 0: emitASTORE_0(); return;
-            case 1: emitASTORE_1(); return;
-            case 2: emitASTORE_2(); return;
-            case 3: emitASTORE_3(); return;
-            default: opcode = JOpcode.ASTORE;
-            } break;
-        default:
-            throw new IllegalArgumentException("invalid type for store "+type);
-        }
-
-        if (index > 0xFF)
-            emitWIDE(opcode, index);
-        else
-            emitU1(opcode, index);
-    }
-    public void emitSTORE(JLocalVariable var) {
-        emitSTORE(var.index, var.type);
-    }
-
-    public void emitALOAD(JType type) {
-        switch (type.getTag()) {
-        case JType.T_BOOLEAN:
-        case JType.T_BYTE:
-            emitBALOAD();
-            break;
-        case JType.T_CHAR:
-            emitCALOAD();
-            break;
-        case JType.T_SHORT:
-            emitSALOAD();
-            break;
-        case JType.T_INT:
-            emitIALOAD();
-            break;
-        case JType.T_FLOAT:
-            emitFALOAD();
-            break;
-        case JType.T_LONG:
-            emitLALOAD();
-            break;
-        case JType.T_DOUBLE:
-            emitDALOAD();
-            break;
-        case JType.T_ARRAY:
-        case JType.T_OBJECT:
-            emitAALOAD();
-            break;
-        default:
-            throw new IllegalArgumentException("invalid type for aload " + type);
-        }
-    }
-
-    public void emitASTORE(JType type) {
-        switch (type.getTag()) {
-        case JType.T_BOOLEAN:
-        case JType.T_BYTE:
-            emitBASTORE();
-            break;
-        case JType.T_CHAR:
-            emitCASTORE();
-            break;
-        case JType.T_SHORT:
-            emitSASTORE();
-            break;
-        case JType.T_INT:
-            emitIASTORE();
-            break;
-        case JType.T_FLOAT:
-            emitFASTORE();
-            break;
-        case JType.T_LONG:
-            emitLASTORE();
-            break;
-        case JType.T_DOUBLE:
-            emitDASTORE();
-            break;
-        case JType.T_ARRAY:
-        case JType.T_OBJECT:
-            emitAASTORE();
-            break;
-        default:
-            throw new IllegalArgumentException("invalid type for astore " + type);
-        }
-    }
-
-    public void emitRETURN(JType type) {
-        if (type.isValueType()) {
-            switch (type.getTag()) {
-            case JType.T_BOOLEAN:
-            case JType.T_BYTE:
-            case JType.T_CHAR:
-            case JType.T_SHORT:
-            case JType.T_INT:
-                emitIRETURN();
-                break;
-            case JType.T_FLOAT:
-                emitFRETURN();
-                break;
-            case JType.T_LONG:
-                emitLRETURN();
-                break;
-            case JType.T_DOUBLE:
-                emitDRETURN();
-                break;
-            }
-        } else if (type.isArrayType() || type.isObjectType())
-            emitARETURN();
-        else if (type == JType.VOID)
-            emitRETURN();
-        else
-            throw new IllegalArgumentException("invalid type for RETURN " + type);
-    }
-
-    public void emitADD(JType type) {
-        switch (type.getTag()) {
-        case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
-        case JType.T_SHORT: case JType.T_INT:
-            emitIADD(); break;
-        case JType.T_FLOAT:
-            emitFADD(); break;
-        case JType.T_LONG:
-            emitLADD(); break;
-        case JType.T_DOUBLE:
-            emitDADD(); break;
-        }
-    }
-
-    /**
-     * Emits a basic type conversion instruction choosen according to the
-     * types given in parameter.
-     *
-     * @param fromType The type of the value to be cast into another type.
-     * @param toType The type the value will be cast into.
-     */
-    public void emitT2T(JType fromType, JType toType) {
-        assert fromType.getTag() >= JType.T_BOOLEAN
-            && fromType.getTag() <= JType.T_LONG
-            && toType.getTag() >= JType.T_BOOLEAN
-            && toType.getTag() <= JType.T_LONG;
-
-        JOpcode[] conv = typeConversions[fromType.getTag() - 4][toType.getTag() - 4];
-        if (conv == forbidden) {
-            throw new Error("inconvertible types : " + fromType.toString()
-                            + " -> " + toType.toString());
-        } else if (conv != nothingToDo) {
-            for (int i = 0; i < conv.length; i++) {
-                emit(conv[i]);
-            }
-        }
-    }
-
-    public void emitIF(int cond, Label label) throws OffsetTooBigException {
-        assert cond >= COND_EQ && cond <= COND_LE;
-        emitU2(JOpcode.OPCODES[153 + cond], label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitIF(int cond, int targetPC) throws OffsetTooBigException {
-        int offset = targetPC - getPC();
-        emitU2(JOpcode.OPCODES[153 + cond], offset);
-    }
-    public void emitIF(int cond) throws OffsetTooBigException {
-        emitIF(cond, 0);
-    }
-
-    public void emitIF_ICMP(int cond, Label label) throws OffsetTooBigException {
-        assert cond >= COND_EQ && cond <= COND_LE;
-        emitU2(JOpcode.OPCODES[159 + cond], label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitIF_ICMP(int cond, int targetPC) throws OffsetTooBigException {
-        int offset = targetPC - getPC();
-        emitU2(JOpcode.OPCODES[159 + cond], offset);
-    }
-    public void emitIF_ICMP(int cond) throws OffsetTooBigException {
-        emitIF_ICMP(cond, 0);
-    }
-
-    public void emitIF_ACMP(int cond, Label label) throws OffsetTooBigException {
-        assert cond == COND_EQ || cond == COND_NE;
-        emitU2(JOpcode.OPCODES[165 + cond], label.getOffset16(getPC() + 1, getPC()));
-    }
-    public void emitIF_ACMP(int cond, int targetPC) throws OffsetTooBigException {
-        int offset = targetPC - getPC();
-        emitU2(JOpcode.OPCODES[165 + cond], offset);
-    }
-    public void emitIF_ACMP(int cond) throws OffsetTooBigException {
-        emitIF_ACMP(cond, 0);
-    }
-
-    public void emitGOTO_maybe_W(Label label, boolean defaultToWide) {
-        if (label.anchored)
-            emitGOTO_maybe_W(label.targetPC);
-        else {
-            if (defaultToWide)
-                emitGOTO_W(label);
-            else {
-                try {
-                    emitGOTO(label);
-                } catch (OffsetTooBigException e) {
-                    throw new Error(e);
-                }
-            }
-        }
-    }
-
-    public void emitGOTO_maybe_W(int targetPC) {
-        int offset = targetPC - (getPC() + 1);
-        if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
-            emitGOTO_W(targetPC);
-        else {
-            try {
-                emitGOTO(targetPC);
-            } catch (OffsetTooBigException e) {
-                throw new Error(e);
-            }
-        }
-    }
-
-    /**
-     * Emits a switch instruction choosen according to the caracteristics
-     * of the given list of keys and a default maxRate.
-     *
-     * @param keySets The array of all keys that must be compared to the
-     *        value on stack.
-     * @param branches The labels representing the jump addresses linked
-     *        with the corresponding keys.
-     * @param defaultBranch The label representing the default branch
-     *        address.
-     */
-    public void emitSWITCH(int[][] keySets,
-                           Label[] branches,
-                           Label defaultBranch,
-                           double minDensity) {
-        assert keySets.length == branches.length;
-
-        int flatSize = 0;
-        for (int i = 0; i < keySets.length; ++i)
-            flatSize += keySets[i].length;
-
-        int[] flatKeys = new int[flatSize];
-        Label[] flatBranches = new Label[flatSize];
-        int flatI = 0;
-        for (int i = 0; i < keySets.length; ++i) {
-            Label branch = branches[i];
-            int[] keys = keySets[i];
-            for (int j = 0; j < keys.length; ++j) {
-                flatKeys[flatI] = keys[j];
-                flatBranches[flatI] = branch;
-            }
-            ++flatI;
-        }
-        assert flatI == flatSize;
-        emitSWITCH(flatKeys, flatBranches, defaultBranch, minDensity);
-    }
-
-    /**
-     * Emits a switch instruction choosen according to the caracteristics
-     * of the given list of keys and a given maxRate.
-     *
-     * @param keys The array of all keys that must be compared to the
-     *        value on stack.
-     * @param branches The labels representing the jump addresses linked
-     *        with the corresponding keys.
-     * @param defaultBranch The label representing the default branch
-     *        address.
-     * @param minDensity The minimum density to use for TABLESWITCH.
-     */
-    public void emitSWITCH(int[] keys,
-                           Label[] branches,
-                           Label defaultBranch,
-                           double minDensity) {
-        assert keys.length == branches.length;
-
-        //The special case for empty keys. It makes sense to allow
-        //empty keys and generate LOOKUPSWITCH with defaultBranch
-        //only. This is exactly what javac does for switch statement
-        //that has only a default case.
-        if (keys.length == 0) {
-          emitLOOKUPSWITCH(keys, branches, defaultBranch);
-          return;
-        }
-        //the rest of the code assumes that keys.length > 0
-
-        // sorting the tables
-        // FIXME use quicksort
-        for (int i = 1; i < keys.length; i++) {
-            for (int j = 1; j <= keys.length - i; j++) {
-                if (keys[j] < keys[j - 1]) {
-                    int tmp = keys[j];
-                    keys[j] = keys[j - 1];
-                    keys[j - 1] = tmp;
-
-                    Label tmp_l = branches[j];
-                    branches[j] = branches[j - 1];
-                    branches[j - 1] = tmp_l;
-                }
-            }
-        }
-
-        int keyMin = keys[0], keyMax = keys[keys.length - 1];
-        /** Calculate in long to guard against overflow. */
-        long keyRange = (long)keyMax - keyMin + 1;
-        if ((double)keys.length / (double)keyRange >= minDensity) {
-            // Keys are dense enough, use a table in which holes are
-            // filled with defaultBranch.
-            int[] newKeys = new int[(int)keyRange];
-            Label[] newBranches = new Label[(int)keyRange];
-            int oldPos = 0;
-            for (int i = 0; i < keyRange; ++i) {
-                int key = keyMin + i;
-                newKeys[i] = key;
-                if (keys[oldPos] == key) {
-                    newBranches[i] = branches[oldPos];
-                    ++oldPos;
-                } else
-                    newBranches[i] = defaultBranch;
-            }
-            assert oldPos == keys.length;
-            emitTABLESWITCH(newKeys, newBranches, defaultBranch);
-        } else
-            emitLOOKUPSWITCH(keys, branches, defaultBranch);
-    }
-
-    /**
-     * Emits a method invocation instruction choosen according to
-     * the caracteristics of the given method.
-     *
-     * @param method The method to be invoked.
-     */
-    public void emitINVOKE(JMethod method) {
-        String mName = method.getName();
-        String cName = method.getOwner().getName();
-        JMethodType mType = (JMethodType)method.getType();
-        if (method.isStatic())
-            emitINVOKESTATIC(cName, mName, mType);
-        else if (method.getOwner().isInterface())
-            emitINVOKEINTERFACE(cName, mName, mType);
-        else
-            emitINVOKEVIRTUAL(cName, mName, mType);
-    }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
deleted file mode 100644
index 29d826b..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Java class field.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JField extends JFieldOrMethod {
-
-    protected JField(FJBGContext context,
-                     JClass owner,
-                     int accessFlags,
-                     String name,
-                     JType type) {
-        super(context, owner, accessFlags, name, type);
-    }
-
-    protected JField(FJBGContext context,
-                     JClass owner,
-                     DataInputStream stream)
-        throws IOException {
-        super(context, owner, stream);
-    }
-
-    // Follows javap output format for fields.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer(flagsToString());
-        buf.append(toExternalName(getType()));
-        buf.append(" ");
-        buf.append(getName());
-        buf.append(";\n");
-        java.util.Iterator attrsIt = attributes.iterator();
-        while (attrsIt.hasNext()) {
-            JAttribute attrs = (JAttribute)attrsIt.next();
-            buf.append(attrs);
-        }
-        return buf.toString();
-    }
-
-    private String flagsToString() {
-        StringBuffer buf = new StringBuffer();
-        if (isPublic()) buf.append("public ");
-        else if (isProtected()) buf.append("protected ");
-        else if (isPrivate()) buf.append("private ");
-        if (isStatic()) buf.append("static ");
-        else if (isTransient()) buf.append("transient ");
-        else if (isVolatile()) buf.append("volatile ");
-        if (isAbstract()) buf.append("abstract ");
-        else if (isFinal()) buf.append("final ");
-        return buf.toString();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
deleted file mode 100644
index 794c0f1..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Abstract superclass for a Java field or method.
- *
- * No two methods of fields in one class file may have the same name and
- * descriptor. See sections 4.6 and 4.7 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JFieldOrMethod extends JMember {
-
-    protected final JClass owner;
-    protected final JType type;
-
-    protected final int nameIndex, signatureIndex;
-
-    protected JFieldOrMethod(FJBGContext context,
-                             JClass owner,
-                             int accessFlags,
-                             String name,
-                             JType type) {
-        super(context, accessFlags, name);
-        this.owner = owner;
-        this.type = type;
-
-        nameIndex = owner.pool.addUtf8(name);
-        signatureIndex = owner.pool.addUtf8(type.getSignature());
-    }
-
-    protected JFieldOrMethod(FJBGContext context,
-                             JClass owner,
-                             DataInputStream stream)
-        throws IOException {
-        super(context);
-        this.owner = owner;
-        this.accessFlags = stream.readShort();
-        this.nameIndex = stream.readShort();
-        this.name = owner.pool.lookupUtf8(nameIndex);
-        this.signatureIndex = stream.readShort();
-        this.type = JType.parseSignature(owner.pool.lookupUtf8(signatureIndex));
-        this.attributes.addAll(JAttribute.readFrom(context, owner, this, stream));
-    }
-
-    public void freeze() throws JCode.OffsetTooBigException {
-        assert !frozen;
-        frozen = true;
-    }
-
-    public JClass getOwner() { return owner; }
-
-    public JType getType() { return type; }
-
-    public JClass getJClass() { return owner; }
-
-    public boolean isPublic() {
-        return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
-    }
-
-    public boolean isPrivate() {
-        return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
-    }
-
-    public boolean isProtected() {
-        return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
-    }
-
-    public boolean isStatic() {
-        return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
-    }
-
-    public boolean isFinal() {
-        return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
-    }
-
-    public boolean isSuper() {
-        return (accessFlags & JAccessFlags.ACC_SUPER) != 0;
-    }
-
-    public boolean isVolatile() {
-        return (accessFlags & JAccessFlags.ACC_VOLATILE) != 0;
-    }
-
-    public boolean isTransient() {
-        return (accessFlags & JAccessFlags.ACC_TRANSIENT) != 0;
-    }
-
-    public boolean isNative() {
-        return (accessFlags & JAccessFlags.ACC_NATIVE) != 0;
-    }
-
-    public boolean isInterface() {
-        return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
-    }
-
-    public boolean isAbstract() {
-        return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
-    }
-
-    public boolean isStrict() {
-        return (accessFlags & JAccessFlags.ACC_STRICT) != 0;
-    }
-
-    // 1.5 specifics
-    public boolean isBridge() {
-        return (accessFlags & JAccessFlags.ACC_BRIDGE) != 0;
-    }
-
-    public boolean hasVarargs() {
-        return (accessFlags & JAccessFlags.ACC_VARARGS) != 0;
-    }
-
-    public void writeTo(DataOutputStream stream) throws IOException {
-        if (! frozen) {
-            try {
-                freeze();
-            }
-            catch (JCode.OffsetTooBigException e) {
-                throw new Error(e);
-            }
-        }
-        stream.writeShort(accessFlags);
-        stream.writeShort(nameIndex);
-        stream.writeShort(signatureIndex);
-        JAttribute.writeTo(getAttributes(), stream);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
deleted file mode 100644
index 1c1ced5..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * InnerClasses attribute.
- *
- * The ClassFile structure of a class/interface C must have exactly one
- * InnerClasses attribute in its attributes table if the constant pool of C
- * contains a CONSTANT_Class_info entry which represents a class or interface
- * that is not a member of a package. See section 4.8.5 of the JVM Specification.
- *
- * @author Iulian Dragos, Stephane Micheloud
- * @version 1.1
- */
-public class JInnerClassesAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    /** InnerClass entries */
-    private Map/*<String, Entry>*/ entries = new LinkedHashMap();
-
-    public JInnerClassesAttribute(FJBGContext context, JClass clazz) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-    }
-
-    public JInnerClassesAttribute(FJBGContext context,
-                                  JClass clazz,
-                                  Object owner,
-                                  String name,
-                                  int size,
-                                  DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        String inner = null;
-        int count = stream.readShort();
-        for (int i = 0; i < count; ++i) {
-            int innerIdx = stream.readShort();
-            int outerIdx = stream.readShort();
-            int nameIdx = stream.readShort();
-            int flags = stream.readShort();
-            inner = pool.lookupClass(innerIdx);
-            entries.put(inner, new Entry(innerIdx, outerIdx, nameIdx, flags));
-        }
-
-        assert name.equals(getName());
-    }
-
-    public void addEntry(String inner, String outer, String name, int flags) {
-        int innerIdx = pool.addClass(inner);
-        int outerIdx = 0;
-        if (outer != null) outerIdx = pool.addClass(outer);
-        int nameIdx = 0;
-        if (name != null) nameIdx = pool.addUtf8(name);
-
-        Entry e = new Entry(innerIdx, outerIdx, nameIdx, flags);
-
-        if (entries.containsKey(inner)) {
-            Entry other = (Entry) entries.get(inner);
-            assert other.outerInfo == e.outerInfo && other.originalName == e.originalName && other.innerFlags == e.innerFlags
-                : inner + " already declared as " + other;
-        } else
-            entries.put(inner, e);
-    }
-
-    public String getName() { return "InnerClasses"; }
-
-    // Follows javap output format for the InnerClass attribute.
-    /*@Override*/ public String toString() {
-        // Here we intentionally use "InnerClass" as javap :-(
-        StringBuffer buf = new StringBuffer("  InnerClass: ");
-        for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
-            Entry e = (Entry)it.next();
-            buf.append("\n   ");
-            buf.append(e.innerFlagsToString());
-            buf.append("#");
-            if (e.originalName != 0) {
-                buf.append(e.originalName);
-                buf.append("= #");
-            }
-            buf.append(e.innerInfo);
-            if (e.outerInfo != 0) {
-                buf.append(" of #");
-                buf.append(e.outerInfo);
-            }
-            buf.append("; //");
-            if (e.originalName != 0) {
-                buf.append(pool.lookupUtf8(e.originalName));
-                buf.append("=");
-            }
-            buf.append("class ");
-            buf.append(pool.lookupClass(e.innerInfo));
-            if (e.outerInfo != 0) {
-                buf.append(" of class ");
-                buf.append(pool.lookupClass(e.outerInfo));
-            }
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return 2 + entries.size() * 8;
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(entries.size());
-        for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
-            Entry e = (Entry)it.next();
-            stream.writeShort(e.innerInfo);
-            stream.writeShort(e.outerInfo);
-            stream.writeShort(e.originalName);
-            stream.writeShort(e.innerFlags);
-        }
-    }
-
-    /** An entry in the InnerClasses attribute, as defined by the JVM Spec. */
-    private class Entry {
-        /** CONSTANT_Class_info index in the pool for the inner class (mangled). */
-        int innerInfo;
-
-        /** CONSTANT_Class_info index in the pool for the outer class (mangled). */
-        int outerInfo;
-
-        /** CONSTANT_Utf8_info index in the pool for the original name of the inner class. */
-        int originalName;
-
-        /** Short int for modifier flags. */
-        int innerFlags;
-
-        public Entry(int iI, int oI, int oN, int f) {
-            this.innerInfo = iI;
-            this.outerInfo = oI;
-            this.originalName = oN;
-            this.innerFlags = f;
-        }
-
-        public Entry(String innerClass, String outerClass, String name, int flags) {
-            this(pool.addClass(innerClass), pool.addClass(outerClass), pool.addUtf8(name), flags);
-        }
-
-        /** Two entries are equal if they refer to the same inner class.
-         *  innerInfo represents a unique name (mangled).
-         */
-        public boolean equals(Object other) {
-            if (other instanceof Entry) {
-                Entry otherEntry = (Entry) other;
-                return otherEntry.innerInfo == this.innerInfo;
-            }
-            return false;
-        }
-
-        public String innerFlagsToString() {
-            StringBuffer buf = new StringBuffer();
-            if (isPublic()) buf.append("public ");
-            else if (isProtected()) buf.append("protected ");
-            else if (isPrivate()) buf.append("private ");
-            //if (isStatic()) buf.append("static "); // as javap
-            if (isAbstract()) buf.append("abstract ");
-            else if (isFinal()) buf.append("final ");
-            return buf.toString();
-        }
-
-        private boolean isPublic() {
-            return (innerFlags & JAccessFlags.ACC_PUBLIC) != 0;
-        }
-
-        private boolean isPrivate() {
-            return (innerFlags & JAccessFlags.ACC_PRIVATE) != 0;
-        }
-
-        private boolean isProtected() {
-            return (innerFlags & JAccessFlags.ACC_PROTECTED) != 0;
-        }
-
-        private boolean isStatic() {
-            return (innerFlags & JAccessFlags.ACC_STATIC) != 0;
-        }
-
-        private boolean isFinal() {
-            return (innerFlags & JAccessFlags.ACC_FINAL) != 0;
-        }
-
-        private boolean isAbstract() {
-            return (innerFlags & JAccessFlags.ACC_ABSTRACT) != 0;
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
deleted file mode 100644
index 96f3b4e..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Labels which can be attached to instructions.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLabel {
-    public final static int UNDEFINED_ANCHOR = -1;
-    protected int anchor = UNDEFINED_ANCHOR;
-
-    public boolean isAnchored() { return anchor != UNDEFINED_ANCHOR; }
-
-    public int getAnchor() {
-        assert isAnchored();
-        return anchor;
-    }
-
-    public void setAnchor(int anchor) {
-        assert !isAnchored();
-        this.anchor = anchor;
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
deleted file mode 100644
index f8c09b8..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attribute storing correspondance between instructions and source
- * line numbers.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLineNumberTableAttribute extends JAttribute {
-    protected final JCode code;
-
-    public JLineNumberTableAttribute(FJBGContext context,
-                                     JClass clazz,
-                                     JCode owner) {
-        super(context, clazz);
-        this.code = owner;
-
-        assert owner.getOwner().getOwner() == clazz;
-    }
-
-    public JLineNumberTableAttribute(FJBGContext context,
-                                     JClass clazz,
-                                     Object owner,
-                                     String name,
-                                     int size,
-                                     DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.code = (JCode)owner;
-
-        int[] mapping = new int[code.getSize()];
-
-        int count = stream.readShort();
-        for (int i = 0; i < count; ++i) {
-            int startPC = stream.readShort();
-            int lineNum = stream.readShort();
-            mapping[startPC] = lineNum;
-        }
-
-        // Avoids duplication of LineNumberTable attribute
-        // (see method ensureLineNumberCapacity in class JCode).
-        assert code.lineNumbers == null;
-        code.lineNumbers = new int[0];
-
-        int lineNum = 0;
-        for (int pc = 0; pc < mapping.length; ++pc) {
-            if (mapping[pc] != 0) lineNum = mapping[pc];
-            if (lineNum != 0) code.setLineNumber(pc, lineNum);
-        }
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "LineNumberTable"; }
-
-    // Follows javap output format for LineNumberTable attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  LineNumberTable: ");
-        int[] encoding = encode();
-        for (int i = 0; i < encoding.length/2; ++i) {
-            buf.append("\n   line ");
-            buf.append(encoding[i * 2 + 1]);
-            buf.append(": ");
-            buf.append(encoding[i * 2]);
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int[] encoding;
-    protected int[] encode() {
-        if (encoding == null) {
-            int[] lineNumbers = code.getLineNumbers();
-            int[] preEncoding = new int[lineNumbers.length * 2];
-            int prevLineNum = 0;
-
-            int i = 0;
-            for (int pc = 0; pc < lineNumbers.length; ++pc) {
-                int lineNum = lineNumbers[pc];
-                if (lineNum != 0 & lineNum != prevLineNum) {
-                    preEncoding[i++] = pc;
-                    preEncoding[i++] = lineNum;
-                    prevLineNum = lineNum;
-                }
-            }
-            if (i == preEncoding.length)
-                encoding = preEncoding;
-            else {
-                encoding = new int[i];
-                System.arraycopy(preEncoding, 0, encoding, 0, i);
-            }
-        }
-        return encoding;
-    }
-
-    protected int getSize() {
-        int[] encoding = encode();
-        return 2 + encoding.length * 2;
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        int[] encoding = encode();
-        int entries = encoding.length / 2;
-        stream.writeShort(entries);
-        for (int i = 0; i < entries; ++i) {
-            stream.writeShort(encoding[i * 2]);
-            stream.writeShort(encoding[i * 2 + 1]);
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
deleted file mode 100644
index af79806..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Representation of a local variable or method argument.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLocalVariable {
-    protected final JMethod owner;
-    protected final JType type;
-    protected final String name;
-    protected final int index;
-
-    protected JLocalVariable(FJBGContext context,
-                             JMethod owner,
-                             JType type,
-                             String name,
-                             int index) {
-        this.owner = owner;
-        this.type = type;
-        this.name = name;
-        this.index = index;
-
-        assert index < 0xFFFF : "index too big for local variable: " + index;
-    }
-
-    public JMethod getOwner() { return owner; }
-    public int getIndex() { return index; }
-    public String getName() { return name; }
-    public JType getType() { return type; }
-
-    /*@Override*/ public String toString() {
-        return "0\t"+type.getSize()+"\t"+index+"\t"+name+"\t"+type;
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
deleted file mode 100644
index b277cc7..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-
-import ch.epfl.lamp.fjbg.JConstantPool.*;
-
-/**
- * Attribute storing local variables.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JLocalVariableTableAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    protected final LinkedList/*<Entry>*/ entries = new LinkedList();
-    protected int localVariableIndex = 0;
-
-    public JLocalVariableTableAttribute(FJBGContext context,
-                                        JClass clazz,
-                                        JCode code) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-
-        assert code.getOwner().getOwner() == clazz;
-    }
-
-    public JLocalVariableTableAttribute(FJBGContext context,
-                                        JClass clazz,
-                                        Object owner,
-                                        String name,
-                                        int size,
-                                        DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        int count = stream.readShort();
-        for (int i = 0; i < count; ++i) {
-            int startPc = stream.readShort();
-            int length = stream.readShort();
-            int nameIndex = stream.readShort();
-            int descIndex = stream.readShort();
-            int index = stream.readShort();
-            addEntry(startPc, length, nameIndex, descIndex, index);
-        }
-
-        assert name.equals(getName());
-    }
-
-    public void addEntry(int startPc, int length, int nameIndex,
-                         int descIndex, int index) {
-        entries.add(new Entry(startPc, length, nameIndex, descIndex, index));
-    }
-
-    public void addEntry(int startPc, int length, String name,
-                         String desc, int index) {
-        Entry e = new Entry(startPc, length, name, desc, index);
-        Entry other = getEntry(index);
-        if (other != null) {
-            assert other.nameIndex == e.nameIndex && other.descIndex == e.descIndex
-                : e + " already declared as " + other;
-        } else
-            entries.add(e);
-    }
-
-    public void addEntry(int startPc, int length, String name, String desc) {
-        entries.add(new Entry(startPc, length, name, desc));
-    }
-
-    public String getName() { return "LocalVariableTable"; }
-
-    // Follows javap output format for LocalVariableTable attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  LocalVariableTable: ");
-        buf.append("\n   Start  Length  Slot  Name   Signature");
-        for (Iterator it = entries.iterator(); it.hasNext(); ) {
-            buf.append("\n   ");
-            Entry e = (Entry)it.next();
-            Utf8Entry name = (Utf8Entry)pool.lookupEntry(e.nameIndex);
-            Utf8Entry sig = (Utf8Entry)pool.lookupEntry(e.descIndex);
-            buf.append(e.startPc);
-            buf.append("      ");
-            buf.append(e.length);
-            buf.append("      ");
-            buf.append(e.index);
-            buf.append("    ");
-            buf.append(name.getValue());
-            buf.append("       ");
-            buf.append(sig.getValue());
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    public int getMaxLocals() {
-        return localVariableIndex;
-    }
-
-    public int getSize() {
-        return 2 + entries.size() * 10;
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(entries.size());
-        for (Iterator it = entries.iterator(); it.hasNext(); ) {
-            Entry e = (Entry)it.next();
-            stream.writeShort(e.startPc);
-            stream.writeShort(e.length);
-            stream.writeShort(e.nameIndex);
-            stream.writeShort(e.descIndex);
-            stream.writeShort(e.index);
-        }
-    }
-
-    private Entry getEntry(int index) {
-        Entry e = null;
-        try { e = (Entry)entries.get(index); } catch (Exception ex) {}
-        return e;
-    }
-
-    private class Entry {
-        int startPc;
-        int length;
-        int nameIndex;
-        int descIndex;
-        int index;
-
-        public Entry(int startPc, int length, int nameIndex, int descIndex, int index) {
-            this.startPc = startPc;
-            this.length = length;
-            this.nameIndex = nameIndex;
-            this.descIndex = descIndex;
-            this.index = index;
-            localVariableIndex += length;
-        }
-
-        public Entry(int startPc, int length, String name, String desc, int index) {
-            this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), index);
-        }
-
-        public Entry(int startPc, int length, String name, String desc) {
-            this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), localVariableIndex);
-        }
-
-        /** Two entries are equal if they refer to the same index.
-         */
-        public boolean equals(Object other) {
-            if (other instanceof Entry) {
-                Entry otherEntry = (Entry) other;
-                return otherEntry.index == this.index;
-            }
-            return false;
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
deleted file mode 100644
index 6356cc8..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Abstract superclass for a Java class, field or method.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-
-abstract public class JMember {
-
-    protected boolean frozen = false;
-
-    protected final FJBGContext context;
-
-    protected String name;
-
-    protected int accessFlags;
-
-    protected final List/*<JAttribute>*/ attributes = new LinkedList();
-
-    protected JMember(FJBGContext context) { this.context = context; }
-
-    protected JMember(FJBGContext context, int accessFlags, String name) {
-        this(context);
-        this.name = name;
-        this.accessFlags = accessFlags;
-    }
-
-    /**
-     * Gets the access flags of the class.
-     * @return The int representing the access flags of the class.
-     */
-    public int getAccessFlags() { return accessFlags; }
-
-    /**
-     * Gets the name of the member.
-     * @return The string representing the name of the member.
-     */
-    public String getName() { return name; }
-
-    /**
-     * Gets the type of the objects that are instances of the class.
-     * @return The type of the instances of the class.
-     */
-    public abstract JType getType();
-
-    /**
-     * Gets the class corresponding to/owning this member
-     * @return The class owning this member or the class itself.
-     */
-    public abstract JClass getJClass();
-
-    /**
-     * Gets the constant pool of the class.
-     * @return The constant pool of the class.
-     */
-    public JConstantPool getConstantPool() { return getJClass().getConstantPool(); }
-
-    public FJBGContext getContext() { return context; }
-
-    /**
-     * Adds an attribute to the class.
-     * @param attr The attribute to be added.
-     */
-    public void addAttribute(JAttribute attr) {
-        assert !frozen;
-        attributes.add(attr);
-    }
-
-    /**
-     * Gets the list of all attributes of the class.
-     * @return The list of the attributes of the class representation.
-     */
-    public List/*<JAttribute>*/ getAttributes() {
-        return attributes;
-    }
-
-    /**
-     * Get the attribute with the given name, or null if it doesn't
-     * exist.
-     */
-    public JAttribute getAttribute(String name) {
-        Iterator attrIt = getAttributes().iterator();
-        while (attrIt.hasNext()) {
-            JAttribute attr = (JAttribute)attrIt.next();
-            if (attr.getName().equals(name))
-                return attr;
-        }
-        return null;
-    }
-
-    protected static String toExternalName(String name) {
-        return name.replace('/', '.');
-    }
-
-    protected static String toExternalName(JType tpe) {
-        return tpe.toString().replace(':', '.');
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
deleted file mode 100644
index 01d58a4..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Representation of a Java method.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethod extends JFieldOrMethod {
-    public final static String CLASS_CONSTRUCTOR_NAME = "<clinit>";
-    public final static String INSTANCE_CONSTRUCTOR_NAME = "<init>";
-
-    protected /*final*/ JCode code;
-    protected final String[] argNames;
-
-    protected final LinkedList/*<JLocalVariable>*/ localVariables =
-        new LinkedList();
-    protected int localVariableIndex = 0;
-
-
-    protected JMethod(FJBGContext context,
-                      JClass owner,
-                      int accessFlags,
-                      String name,
-                      JType returnType,
-                      JType[] argTypes,
-                      String[] argNames) {
-        super(context,
-              owner,
-              accessFlags,
-              name,
-              new JMethodType(returnType, argTypes));
-        this.argNames = argNames;
-
-        assert argTypes.length == argNames.length;
-
-        if (isAbstract() || isNative()) {
-            code = null;
-        } else {
-            code = context.JCode(owner, this);
-            addAttribute(context.JCodeAttribute(owner, this));
-
-            if (!isStatic())
-                addNewLocalVariable(owner.getType(), "this");
-
-            for (int i = 0; i < argTypes.length; ++i)
-                addNewLocalVariable(argTypes[i], argNames[i]);
-        }
-    }
-
-    protected JMethod(FJBGContext context,
-                      JClass owner,
-                      DataInputStream stream)
-        throws IOException {
-        super(context, owner, stream);
-
-        assert isAbstract() || isNative() || code != null;
-
-        int n = 0;
-        if (code != null) {
-            for (Iterator it = code.getAttributes().iterator(); it.hasNext(); ) {
-                JAttribute attr = (JAttribute)it.next();
-                if (attr instanceof JLocalVariableTableAttribute)
-                   n = ((JLocalVariableTableAttribute)attr).getMaxLocals();
-            }
-        }
-        this.localVariableIndex = n;
-
-
-        JType[] argTypes = ((JMethodType)getType()).getArgumentTypes();
-        argNames = new String[argTypes.length]; // TODO get from attribute
-        for (int i = 0; i < argNames.length; ++i)
-            argNames[i] = "v"+i;
-    }
-
-    public void freeze() throws JCode.OffsetTooBigException {
-        if (code != null) code.freeze();
-        super.freeze();
-    }
-
-    public JType getReturnType() {
-        return ((JMethodType)type).getReturnType();
-    }
-
-    public JType[] getArgumentTypes() {
-        return ((JMethodType)type).getArgumentTypes();
-    }
-
-    public int getArgsSize() {
-        int size = ((JMethodType)type).getArgsSize();
-        if (!isStatic()) size += 1;  // for this
-        return size;
-    }
-
-    public String[] getArgumentNames() {
-        return argNames;
-    }
-
-    public JCode getCode() {
-        assert !isAbstract();
-        return code;
-    }
-
-    // Invoked by the JCode constructor
-    protected void setCode(JCode code) {
-        assert null == this.code;
-        this.code = code;
-    }
-
-    public JCodeIterator codeIterator() {
-        return new JCodeIterator(code);
-    }
-
-    // Local variables
-    // FIXME : find a better management method for local variables
-    public JLocalVariable addNewLocalVariable(JType type, String name) {
-        assert !frozen;
-        JLocalVariable var =
-            context.JLocalVariable(this, type, name, localVariableIndex);
-        localVariableIndex += type.getSize();
-        localVariables.add(var);
-        return var;
-    }
-
-    public JLocalVariable getLocalVariable(int index) {
-        for (int i = 0; i < localVariables.size(); i++) {
-            if (((JLocalVariable)localVariables.get(i)).index == index)
-                return (JLocalVariable)localVariables.get(i);
-        }
-        return null;
-    }
-
-    public JLocalVariable[] getLocalVariables() {
-        return (JLocalVariable[])localVariables
-            .toArray(new JLocalVariable[localVariables.size()]);
-    }
-
-
-    public int getMaxLocals() {
-        return localVariableIndex;
-    }
-
-    // Follows javap output format for methods.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer(flagsToString());
-        String name = getName();
-        if (CLASS_CONSTRUCTOR_NAME.equals(name))
-            buf.append("{}");
-        else {
-            if (INSTANCE_CONSTRUCTOR_NAME.equals(name))
-                name = getOwner().getName();
-            else {
-                buf.append(toExternalName(getReturnType()));
-                buf.append(" ");
-            }
-            buf.append(toExternalName(name));
-            buf.append("(");
-            JType[] ts = getArgumentTypes();
-            for (int i = 0; i < ts.length; ++i) {
-                if (i > 0) buf.append(", ");
-                buf.append(toExternalName(ts[i]));
-            }
-            buf.append(")");
-        }
-        buf.append(";\n");
-        Iterator it = attributes.iterator();
-        while(it.hasNext()) {
-            JAttribute attr = (JAttribute)it.next();
-            buf.append(attr);
-        }
-        return buf.toString();
-    }
-
-    private String flagsToString() {
-        StringBuffer buf = new StringBuffer();
-        if (isPublic()) buf.append("public ");
-        else if (isProtected()) buf.append("protected ");
-        else if (isPrivate()) buf.append("private ");
-        if (isBridge()) buf.append("<bridge> ");
-        if (hasVarargs()) buf.append("<varargs> ");
-        if (isStatic()) buf.append("static ");
-        else if (isNative()) buf.append("native ");
-        if (isAbstract()) buf.append("abstract ");
-        else if (isFinal()) buf.append("final ");
-        return buf.toString();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
deleted file mode 100644
index cd3d71f..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Type for Java methods. These types do not really exist in Java, but
- * are provided here because they are useful in several places.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethodType extends JType {
-    protected final JType returnType;
-    protected final JType[] argTypes;
-    protected String signature = null;
-
-    public final static JMethodType ARGLESS_VOID_FUNCTION =
-        new JMethodType(JType.VOID, JType.EMPTY_ARRAY);
-
-    public JMethodType(JType returnType, JType[] argTypes) {
-        this.returnType = returnType;
-        this.argTypes = argTypes;
-    }
-
-    public JType getReturnType() { return returnType; }
-    public JType[] getArgumentTypes() { return argTypes; }
-
-    public int getSize() {
-        throw new UnsupportedOperationException();
-    }
-
-    public String getSignature() {
-        if (signature == null) {
-            StringBuffer buf = new StringBuffer();
-            buf.append('(');
-            for (int i = 0; i < argTypes.length; ++i)
-                buf.append(argTypes[i].getSignature());
-            buf.append(')');
-            buf.append(returnType.getSignature());
-            signature = buf.toString();
-        }
-        return signature;
-    }
-
-    public int getTag() { return T_UNKNOWN; }
-
-    public String toString() {
-        StringBuffer buf = new StringBuffer();
-        buf.append('(');
-        for (int i = 0; i < argTypes.length; ++i)
-            buf.append(argTypes[i].toString());
-        buf.append(')');
-        buf.append(returnType.toString());
-        return buf.toString();
-    }
-
-    public int getArgsSize() {
-        int size = 0;
-        for (int i = 0; i < argTypes.length; ++i)
-            size += argTypes[i].getSize();
-        return size;
-    }
-
-    public int getProducedStack() {
-        return returnType.getSize() - getArgsSize();
-    }
-
-    public boolean isCompatibleWith(JType other) {
-        return false;
-    }
-    public boolean equals(Object o) {
-        if (o instanceof JMethodType)
-            return ((JMethodType)o).getSignature().equals(this.getSignature());
-        else
-            return false;
-    }
-    public int hashCode() {
-        if (signature == null)
-            return 0;
-        else
-            return signature.hashCode();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
deleted file mode 100644
index 06db5b1..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JObjectType extends JReferenceType {
-    protected final String name;
-    protected String signature = null;
-
-    public final static JObjectType JAVA_LANG_OBJECT =
-        new JObjectType("java.lang.Object");
-    public final static JObjectType JAVA_LANG_STRING =
-        new JObjectType("java.lang.String");
-    public final static JObjectType CLONEABLE =
-        new JObjectType("Cloneable");
-    public final static JObjectType JAVA_IO_SERIALIZABLE =
-        new JObjectType("java.io.Serializable");
-
-    public JObjectType(String name) {
-        this.name = name;
-    }
-
-    public int getSize() { return 1; }
-
-    public String getName() { return name; }
-
-    public String getSignature() {
-        if (signature == null)
-            signature = "L" + name.replace('.','/') + ";";
-        return signature;
-    }
-
-    public String getDescriptor() {
-        return name.replace('.','/');
-    }
-
-    public int getTag() { return T_OBJECT; }
-
-    public String toString() { return name; }
-
-    public boolean isObjectType() { return true; }
-
-    public boolean isCompatibleWith(JType other) {
-        return other instanceof JObjectType
-            || other == JType.REFERENCE;
-    }
-    public boolean equals(Object o) {
-        if (o instanceof JObjectType)
-            return ((JObjectType)o).getSignature().equals(this.getSignature());
-        else
-            return false;
-    }
-    public int hashCode() {
-        return name.hashCode();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
deleted file mode 100644
index cc68681..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
+++ /dev/null
@@ -1,1267 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of opcodes for the JVM.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JOpcode {
-    public final String name;
-    public final int code;
-
-    // The following attributes can be (statically) unknown for some
-    // instructions, and are therefore not public. To know their value,
-    // functions have to be used (see JCodeIterator).
-    protected final int size;
-    protected final JType[] producedDataTypes;
-    protected final JType[] consumedDataTypes;
-    protected final int jumpKind;
-    protected final int successorCount;
-
-    protected final static int UNKNOWN = Integer.MIN_VALUE;
-
-    protected final static int JMP_NONE             = 0;
-    protected final static int JMP_NEXT             = 1;
-    protected final static int JMP_ALWAYS_S2_OFFSET = 2;
-    protected final static int JMP_ALWAYS_S4_OFFSET = 3;
-    protected final static int JMP_MAYBE_S2_OFFSET  = 4;
-    protected final static int JMP_TABLE            = 5;
-    protected final static int JMP_LOOKUP           = 6;
-
-    protected final static JType[] NO_DATA = new JType[0];
-
-    protected final static JType[] INT_TYPE =
-        new JType[] { JType.INT };
-    protected final static JType[] FLOAT_TYPE =
-        new JType[] { JType.FLOAT };
-    protected final static JType[] LONG_TYPE =
-        new JType[] { JType.LONG };
-    protected final static JType[] DOUBLE_TYPE =
-        new JType[] { JType.DOUBLE };
-    protected final static JType[] OBJECT_REF_TYPE =
-        new JType[] { JObjectType.JAVA_LANG_OBJECT };
-    protected final static JType[] ARRAY_REF_TYPE =
-        new JType[] { new JArrayType(JType.VOID) };
-    protected final static JType[] REFERENCE_TYPE =
-        new JType[] { JType.REFERENCE };
-    protected final static JType[] ADDRESS_TYPE =
-        new JType[] { JType.ADDRESS };
-    protected final static JType[] UNKNOWN_TYPE =
-        new JType[] { JType.UNKNOWN };
-
-    /// Instruction codes
-    public final static int cNOP             = 0;
-    public final static int cACONST_NULL     = 1;
-    public final static int cICONST_M1       = 2;
-    public final static int cICONST_0        = 3;
-    public final static int cICONST_1        = 4;
-    public final static int cICONST_2        = 5;
-    public final static int cICONST_3        = 6;
-    public final static int cICONST_4        = 7;
-    public final static int cICONST_5        = 8;
-    public final static int cLCONST_0        = 9;
-    public final static int cLCONST_1        = 10;
-    public final static int cFCONST_0        = 11;
-    public final static int cFCONST_1        = 12;
-    public final static int cFCONST_2        = 13;
-    public final static int cDCONST_0        = 14;
-    public final static int cDCONST_1        = 15;
-    public final static int cBIPUSH          = 16;
-    public final static int cSIPUSH          = 17;
-    public final static int cLDC             = 18;
-    public final static int cLDC_W           = 19;
-    public final static int cLDC2_W          = 20;
-    public final static int cILOAD           = 21;
-    public final static int cLLOAD           = 22;
-    public final static int cFLOAD           = 23;
-    public final static int cDLOAD           = 24;
-    public final static int cALOAD           = 25;
-    public final static int cILOAD_0         = 26;
-    public final static int cILOAD_1         = 27;
-    public final static int cILOAD_2         = 28;
-    public final static int cILOAD_3         = 29;
-    public final static int cLLOAD_0         = 30;
-    public final static int cLLOAD_1         = 31;
-    public final static int cLLOAD_2         = 32;
-    public final static int cLLOAD_3         = 33;
-    public final static int cFLOAD_0         = 34;
-    public final static int cFLOAD_1         = 35;
-    public final static int cFLOAD_2         = 36;
-    public final static int cFLOAD_3         = 37;
-    public final static int cDLOAD_0         = 38;
-    public final static int cDLOAD_1         = 39;
-    public final static int cDLOAD_2         = 40;
-    public final static int cDLOAD_3         = 41;
-    public final static int cALOAD_0         = 42;
-    public final static int cALOAD_1         = 43;
-    public final static int cALOAD_2         = 44;
-    public final static int cALOAD_3         = 45;
-    public final static int cIALOAD          = 46;
-    public final static int cLALOAD          = 47;
-    public final static int cFALOAD          = 48;
-    public final static int cDALOAD          = 49;
-    public final static int cAALOAD          = 50;
-    public final static int cBALOAD          = 51;
-    public final static int cCALOAD          = 52;
-    public final static int cSALOAD          = 53;
-    public final static int cISTORE          = 54;
-    public final static int cLSTORE          = 55;
-    public final static int cFSTORE          = 56;
-    public final static int cDSTORE          = 57;
-    public final static int cASTORE          = 58;
-    public final static int cISTORE_0        = 59;
-    public final static int cISTORE_1        = 60;
-    public final static int cISTORE_2        = 61;
-    public final static int cISTORE_3        = 62;
-    public final static int cLSTORE_0        = 63;
-    public final static int cLSTORE_1        = 64;
-    public final static int cLSTORE_2        = 65;
-    public final static int cLSTORE_3        = 66;
-    public final static int cFSTORE_0        = 67;
-    public final static int cFSTORE_1        = 68;
-    public final static int cFSTORE_2        = 69;
-    public final static int cFSTORE_3        = 70;
-    public final static int cDSTORE_0        = 71;
-    public final static int cDSTORE_1        = 72;
-    public final static int cDSTORE_2        = 73;
-    public final static int cDSTORE_3        = 74;
-    public final static int cASTORE_0        = 75;
-    public final static int cASTORE_1        = 76;
-    public final static int cASTORE_2        = 77;
-    public final static int cASTORE_3        = 78;
-    public final static int cIASTORE         = 79;
-    public final static int cLASTORE         = 80;
-    public final static int cFASTORE         = 81;
-    public final static int cDASTORE         = 82;
-    public final static int cAASTORE         = 83;
-    public final static int cBASTORE         = 84;
-    public final static int cCASTORE         = 85;
-    public final static int cSASTORE         = 86;
-    public final static int cPOP             = 87;
-    public final static int cPOP2            = 88;
-    public final static int cDUP             = 89;
-    public final static int cDUP_X1          = 90;
-    public final static int cDUP_X2          = 91;
-    public final static int cDUP2            = 92;
-    public final static int cDUP2_X1         = 93;
-    public final static int cDUP2_X2         = 94;
-    public final static int cSWAP            = 95;
-    public final static int cIADD            = 96;
-    public final static int cLADD            = 97;
-    public final static int cFADD            = 98;
-    public final static int cDADD            = 99;
-    public final static int cISUB            = 100;
-    public final static int cLSUB            = 101;
-    public final static int cFSUB            = 102;
-    public final static int cDSUB            = 103;
-    public final static int cIMUL            = 104;
-    public final static int cLMUL            = 105;
-    public final static int cFMUL            = 106;
-    public final static int cDMUL            = 107;
-    public final static int cIDIV            = 108;
-    public final static int cLDIV            = 109;
-    public final static int cFDIV            = 110;
-    public final static int cDDIV            = 111;
-    public final static int cIREM            = 112;
-    public final static int cLREM            = 113;
-    public final static int cFREM            = 114;
-    public final static int cDREM            = 115;
-    public final static int cINEG            = 116;
-    public final static int cLNEG            = 117;
-    public final static int cFNEG            = 118;
-    public final static int cDNEG            = 119;
-    public final static int cISHL            = 120;
-    public final static int cLSHL            = 121;
-    public final static int cISHR            = 122;
-    public final static int cLSHR            = 123;
-    public final static int cIUSHR           = 124;
-    public final static int cLUSHR           = 125;
-    public final static int cIAND            = 126;
-    public final static int cLAND            = 127;
-    public final static int cIOR             = 128;
-    public final static int cLOR             = 129;
-    public final static int cIXOR            = 130;
-    public final static int cLXOR            = 131;
-    public final static int cIINC            = 132;
-    public final static int cI2L             = 133;
-    public final static int cI2F             = 134;
-    public final static int cI2D             = 135;
-    public final static int cL2I             = 136;
-    public final static int cL2F             = 137;
-    public final static int cL2D             = 138;
-    public final static int cF2I             = 139;
-    public final static int cF2L             = 140;
-    public final static int cF2D             = 141;
-    public final static int cD2I             = 142;
-    public final static int cD2L             = 143;
-    public final static int cD2F             = 144;
-    public final static int cI2B             = 145;
-    public final static int cI2C             = 146;
-    public final static int cI2S             = 147;
-    public final static int cLCMP            = 148;
-    public final static int cFCMPL           = 149;
-    public final static int cFCMPG           = 150;
-    public final static int cDCMPL           = 151;
-    public final static int cDCMPG           = 152;
-    public final static int cIFEQ            = 153;
-    public final static int cIFNE            = 154;
-    public final static int cIFLT            = 155;
-    public final static int cIFGE            = 156;
-    public final static int cIFGT            = 157;
-    public final static int cIFLE            = 158;
-    public final static int cIF_ICMPEQ       = 159;
-    public final static int cIF_ICMPNE       = 160;
-    public final static int cIF_ICMPLT       = 161;
-    public final static int cIF_ICMPGE       = 162;
-    public final static int cIF_ICMPGT       = 163;
-    public final static int cIF_ICMPLE       = 164;
-    public final static int cIF_ACMPEQ       = 165;
-    public final static int cIF_ACMPNE       = 166;
-    public final static int cGOTO            = 167;
-    public final static int cJSR             = 168;
-    public final static int cRET             = 169;
-    public final static int cTABLESWITCH     = 170;
-    public final static int cLOOKUPSWITCH    = 171;
-    public final static int cIRETURN         = 172;
-    public final static int cLRETURN         = 173;
-    public final static int cFRETURN         = 174;
-    public final static int cDRETURN         = 175;
-    public final static int cARETURN         = 176;
-    public final static int cRETURN          = 177;
-    public final static int cGETSTATIC       = 178;
-    public final static int cPUTSTATIC       = 179;
-    public final static int cGETFIELD        = 180;
-    public final static int cPUTFIELD        = 181;
-    public final static int cINVOKEVIRTUAL   = 182;
-    public final static int cINVOKESPECIAL   = 183;
-    public final static int cINVOKESTATIC    = 184;
-    public final static int cINVOKEINTERFACE = 185;
-    public final static int cNEW             = 187;
-    public final static int cNEWARRAY        = 188;
-    public final static int cANEWARRAY       = 189;
-    public final static int cARRAYLENGTH     = 190;
-    public final static int cATHROW          = 191;
-    public final static int cCHECKCAST       = 192;
-    public final static int cINSTANCEOF      = 193;
-    public final static int cMONITORENTER    = 194;
-    public final static int cMONITOREXIT     = 195;
-    public final static int cWIDE            = 196;
-    public final static int cMULTIANEWARRAY  = 197;
-    public final static int cIFNULL          = 198;
-    public final static int cIFNONNULL       = 199;
-    public final static int cGOTO_W          = 200;
-    public final static int cJSR_W           = 201;
-
-    // Objects representing instructions
-    public final static JOpcode NOP =
-        new JOpcode("NOP", cNOP, 1, NO_DATA, NO_DATA, JMP_NEXT);
-    public final static JOpcode ACONST_NULL = new JOpcode("ACONST_NULL",
-                                                          cACONST_NULL,
-                                                          1,
-                                                          REFERENCE_TYPE,
-                                                          NO_DATA,
-                                                          JMP_NEXT);
-    public final static JOpcode ICONST_M1 =
-        new JOpcode("ICONST_M1", cICONST_M1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_0 =
-        new JOpcode("ICONST_0", cICONST_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_1 =
-        new JOpcode("ICONST_1", cICONST_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_2 =
-        new JOpcode("ICONST_2", cICONST_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_3 =
-        new JOpcode("ICONST_3", cICONST_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_4 =
-        new JOpcode("ICONST_4", cICONST_4, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ICONST_5 =
-        new JOpcode("ICONST_5", cICONST_5, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LCONST_0 =
-        new JOpcode("LCONST_0", cLCONST_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LCONST_1 =
-        new JOpcode("LCONST_1", cLCONST_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FCONST_0 =
-        new JOpcode("FCONST_0", cFCONST_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FCONST_1 =
-        new JOpcode("FCONST_1", cFCONST_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FCONST_2 =
-        new JOpcode("FCONST_2", cFCONST_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DCONST_0 =
-        new JOpcode("DCONST_0", cDCONST_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DCONST_1 =
-        new JOpcode("DCONST_1", cDCONST_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode BIPUSH =
-        new JOpcode("BIPUSH", cBIPUSH, 2, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode SIPUSH =
-        new JOpcode("SIPUSH", cSIPUSH, 3, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LDC =
-        new JOpcode("LDC", cLDC, 2, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LDC_W =
-        new JOpcode("LDC_W", cLDC_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LDC2_W =
-        new JOpcode("LDC2_W", cLDC2_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ILOAD =
-        new JOpcode("ILOAD", cILOAD, 2, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LLOAD =
-        new JOpcode("LLOAD", cLLOAD, 2, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FLOAD =
-        new JOpcode("FLOAD", cFLOAD, 2, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DLOAD =
-        new JOpcode("DLOAD", cDLOAD, 2, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ALOAD =
-        new JOpcode("ALOAD", cALOAD, 2, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ILOAD_0 =
-        new JOpcode("ILOAD_0", cILOAD_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ILOAD_1 =
-        new JOpcode("ILOAD_1", cILOAD_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ILOAD_2 =
-        new JOpcode("ILOAD_2", cILOAD_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ILOAD_3 =
-        new JOpcode("ILOAD_3", cILOAD_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LLOAD_0 =
-        new JOpcode("LLOAD_0", cLLOAD_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LLOAD_1 =
-        new JOpcode("LLOAD_1", cLLOAD_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LLOAD_2 =
-        new JOpcode("LLOAD_2", cLLOAD_2, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode LLOAD_3 =
-        new JOpcode("LLOAD_3", cLLOAD_3, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FLOAD_0 =
-        new JOpcode("FLOAD_0", cFLOAD_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FLOAD_1 =
-        new JOpcode("FLOAD_1", cFLOAD_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FLOAD_2 =
-        new JOpcode("FLOAD_2", cFLOAD_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode FLOAD_3 =
-        new JOpcode("FLOAD_3", cFLOAD_3, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DLOAD_0 =
-        new JOpcode("DLOAD_0", cDLOAD_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DLOAD_1 =
-        new JOpcode("DLOAD_1", cDLOAD_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DLOAD_2 =
-        new JOpcode("DLOAD_2", cDLOAD_2, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode DLOAD_3 =
-        new JOpcode("DLOAD_3", cDLOAD_3, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ALOAD_0 =
-        new JOpcode("ALOAD_0", cALOAD_0, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ALOAD_1 =
-        new JOpcode("ALOAD_1", cALOAD_1, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ALOAD_2 =
-        new JOpcode("ALOAD_2", cALOAD_2, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode ALOAD_3 =
-        new JOpcode("ALOAD_3", cALOAD_3, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode IALOAD =
-        new JOpcode("IALOAD",
-                    cIALOAD,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JArrayType.INT},
-                    JMP_NEXT);
-    public final static JOpcode LALOAD =
-        new JOpcode("LALOAD",
-                    cLALOAD,
-                    1,
-                    LONG_TYPE,
-                    new JType[] {JType.INT, JArrayType.LONG},
-                    JMP_NEXT);
-    public final static JOpcode FALOAD =
-        new JOpcode("FALOAD",
-                    cFALOAD,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] {JType.INT, JArrayType.FLOAT},
-                    JMP_NEXT);
-    public final static JOpcode DALOAD =
-        new JOpcode("DALOAD",
-                    cDALOAD,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] {JType.INT, JArrayType.DOUBLE},
-                    JMP_NEXT);
-    public final static JOpcode AALOAD =
-        new JOpcode("AALOAD",
-                    cAALOAD,
-                    1,
-                    REFERENCE_TYPE,
-                    new JType[] {JType.INT, JArrayType.REFERENCE},
-                    JMP_NEXT);
-    public final static JOpcode BALOAD =
-        new JOpcode("BALOAD",
-                    cBALOAD,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, new JArrayType(JType.UNKNOWN)},
-                    JMP_NEXT);
-    public final static JOpcode CALOAD =
-        new JOpcode("CALOAD",
-                    cCALOAD,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JArrayType.CHAR},
-                    JMP_NEXT);
-    public final static JOpcode SALOAD =
-        new JOpcode("SALOAD",
-                    cSALOAD,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JArrayType.SHORT},
-                    JMP_NEXT);
-    public final static JOpcode ISTORE =
-        new JOpcode("ISTORE", cISTORE, 2, NO_DATA, INT_TYPE, JMP_NEXT);
-    public final static JOpcode LSTORE =
-        new JOpcode("LSTORE", cLSTORE, 2, NO_DATA, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode FSTORE =
-        new JOpcode("FSTORE", cFSTORE, 2, NO_DATA, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode DSTORE =
-        new JOpcode("DSTORE", cDSTORE, 2, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode ASTORE =
-        new JOpcode("ASTORE", cASTORE, 2, NO_DATA, REFERENCE_TYPE, JMP_NEXT);
-    public final static JOpcode ISTORE_0 =
-        new JOpcode("ISTORE_0", cISTORE_0, 1, NO_DATA, INT_TYPE, JMP_NEXT);
-    public final static JOpcode ISTORE_1 =
-        new JOpcode("ISTORE_1", cISTORE_1, 1, NO_DATA, INT_TYPE, JMP_NEXT);
-    public final static JOpcode ISTORE_2 =
-        new JOpcode("ISTORE_2", cISTORE_2, 1, NO_DATA, INT_TYPE, JMP_NEXT);
-    public final static JOpcode ISTORE_3 =
-        new JOpcode("ISTORE_3", cISTORE_3, 1, NO_DATA, INT_TYPE, JMP_NEXT);
-    public final static JOpcode LSTORE_0 =
-        new JOpcode("LSTORE_0", cLSTORE_0, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode LSTORE_1 =
-        new JOpcode("LSTORE_1", cLSTORE_1, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode LSTORE_2 =
-        new JOpcode("LSTORE_2", cLSTORE_2, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode LSTORE_3 =
-        new JOpcode("LSTORE_3", cLSTORE_3, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode FSTORE_0 =
-        new JOpcode("FSTORE_0", cFSTORE_0, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode FSTORE_1 =
-        new JOpcode("FSTORE_1", cFSTORE_1, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode FSTORE_2 =
-        new JOpcode("FSTORE_2", cFSTORE_2, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode FSTORE_3 =
-        new JOpcode("FSTORE_3", cFSTORE_3, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode DSTORE_0 =
-        new JOpcode("DSTORE_0", cDSTORE_0, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode DSTORE_1 =
-        new JOpcode("DSTORE_1", cDSTORE_1, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode DSTORE_2 =
-        new JOpcode("DSTORE_2", cDSTORE_2, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode DSTORE_3 =
-        new JOpcode("DSTORE_3", cDSTORE_3, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode ASTORE_0 = new JOpcode("ASTORE_0",
-                                                       cASTORE_0,
-                                                       1,
-                                                       NO_DATA,
-                                                       REFERENCE_TYPE,
-                                                       JMP_NEXT);
-    public final static JOpcode ASTORE_1 = new JOpcode("ASTORE_1",
-                                                       cASTORE_1,
-                                                       1,
-                                                       NO_DATA,
-                                                       REFERENCE_TYPE,
-                                                       JMP_NEXT);
-    public final static JOpcode ASTORE_2 = new JOpcode("ASTORE_2",
-                                                       cASTORE_2,
-                                                       1,
-                                                       NO_DATA,
-                                                       REFERENCE_TYPE,
-                                                       JMP_NEXT);
-    public final static JOpcode ASTORE_3 = new JOpcode("ASTORE_3",
-                                                       cASTORE_3,
-                                                       1,
-                                                       NO_DATA,
-                                                       REFERENCE_TYPE,
-                                                       JMP_NEXT);
-    public final static JOpcode IASTORE =
-        new JOpcode("IASTORE",
-                    cIASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.INT,
-                                  JType.INT,
-                                  JArrayType.INT},
-                    JMP_NEXT);
-    public final static JOpcode LASTORE =
-        new JOpcode("LASTORE",
-                    cLASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.LONG,
-                                  JType.INT,
-                                  JArrayType.LONG},
-                    JMP_NEXT);
-    public final static JOpcode FASTORE =
-        new JOpcode("FASTORE",
-                    cFASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.FLOAT,
-                                  JType.INT,
-                                  JArrayType.FLOAT},
-                    JMP_NEXT);
-    public final static JOpcode DASTORE =
-        new JOpcode("DASTORE",
-                    cDASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.DOUBLE,
-                                  JType.INT,
-                                  JArrayType.DOUBLE},
-                    JMP_NEXT);
-    public final static JOpcode AASTORE =
-        new JOpcode("AASTORE",
-                    cAASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.REFERENCE,
-                                  JType.INT,
-                                  JArrayType.REFERENCE},
-                    JMP_NEXT);
-    public final static JOpcode BASTORE =
-        new JOpcode("BASTORE",
-                    cBASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.INT,
-                                  JType.INT,
-                                  new JArrayType(JType.UNKNOWN)},
-                    JMP_NEXT);
-    public final static JOpcode CASTORE =
-        new JOpcode("CASTORE",
-                    cCASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.INT,
-                                  JType.INT,
-                                  JArrayType.CHAR},
-                    JMP_NEXT);
-    public final static JOpcode SASTORE =
-        new JOpcode("SASTORE",
-                    cSASTORE,
-                    1,
-                    NO_DATA,
-                    new JType[] { JType.INT,
-                                  JType.INT,
-                                  JArrayType.SHORT},
-                    JMP_NEXT);
-    public final static JOpcode POP =
-        new JOpcode("POP", cPOP, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode POP2 =
-        new JOpcode("POP2", cPOP2, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode DUP =
-        new JOpcode("DUP", cDUP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode DUP_X1 = new JOpcode("DUP_X1",
-                                                     cDUP_X1,
-                                                     1,
-                                                     UNKNOWN_TYPE,
-                                                     UNKNOWN_TYPE,
-                                                     JMP_NEXT);
-    public final static JOpcode DUP_X2 = new JOpcode("DUP_X2",
-                                                     cDUP_X2,
-                                                     1,
-                                                     UNKNOWN_TYPE,
-                                                     UNKNOWN_TYPE,
-                                                     JMP_NEXT);
-    public final static JOpcode DUP2 =
-        new JOpcode("DUP2", cDUP2, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode DUP2_X1 = new JOpcode("DUP2_X1",
-                                                      cDUP2_X1,
-                                                      1,
-                                                      UNKNOWN_TYPE,
-                                                      UNKNOWN_TYPE,
-                                                      JMP_NEXT);
-    public final static JOpcode DUP2_X2 = new JOpcode("DUP2_X2",
-                                                      cDUP2_X2,
-                                                      1,
-                                                      UNKNOWN_TYPE,
-                                                      UNKNOWN_TYPE,
-                                                      JMP_NEXT);
-    public final static JOpcode SWAP =
-        new JOpcode("SWAP", cSWAP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode IADD =
-        new JOpcode("IADD",
-                    cIADD,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LADD =
-        new JOpcode("LADD",
-                    cLADD,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FADD =
-        new JOpcode("FADD",
-                    cFADD,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DADD =
-        new JOpcode("DADD",
-                    cDADD,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode ISUB =
-        new JOpcode("ISUB",
-                    cISUB,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LSUB =
-        new JOpcode("LSUB",
-                    cLSUB,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FSUB =
-        new JOpcode("FSUB",
-                    cFSUB,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DSUB =
-        new JOpcode("DSUB",
-                    cDSUB,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode IMUL =
-        new JOpcode("IMUL",
-                    cIMUL,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LMUL =
-        new JOpcode("LMUL",
-                    cLMUL,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FMUL =
-        new JOpcode("FMUL",
-                    cFMUL,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DMUL =
-        new JOpcode("DMUL",
-                    cDMUL,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode IDIV =
-        new JOpcode("IDIV",
-                    cIDIV,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LDIV =
-        new JOpcode("LDIV",
-                    cLDIV,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FDIV =
-        new JOpcode("FDIV",
-                    cFDIV,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DDIV =
-        new JOpcode("DDIV",
-                    cDDIV,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode IREM =
-        new JOpcode("IREM",
-                    cIREM,
-                    1,
-                    INT_TYPE,
-                    new JType[] {JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LREM =
-        new JOpcode("LREM",
-                    cLREM,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FREM =
-        new JOpcode("FREM",
-                    cFREM,
-                    1,
-                    FLOAT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DREM =
-        new JOpcode("DREM",
-                    cDREM,
-                    1,
-                    DOUBLE_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode INEG =
-        new JOpcode("INEG", cINEG, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode LNEG =
-        new JOpcode("LNEG", cLNEG, 1, LONG_TYPE, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode FNEG =
-        new JOpcode("FNEG", cFNEG, 1, FLOAT_TYPE, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode DNEG =
-        new JOpcode("DNEG", cDNEG, 1, DOUBLE_TYPE, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode ISHL =
-        new JOpcode("ISHL", cISHL,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LSHL =
-        new JOpcode("LSHL",
-                    cLSHL,
-                    1,
-                    LONG_TYPE,
-                    new JType [] { JType.INT, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode ISHR =
-        new JOpcode("ISHR",
-                    cISHR,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LSHR =
-        new JOpcode("LSHR",
-                    cLSHR,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.INT, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode IUSHR =
-        new JOpcode("IUSHR",
-                    cIUSHR,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LUSHR =
-        new JOpcode("LUSHR",
-                    cLUSHR,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.INT, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode IAND =
-        new JOpcode("IAND",
-                    cIAND,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LAND =
-        new JOpcode("LAND",
-                    cLAND,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode IOR =
-        new JOpcode("IOR",
-                    cIOR,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LOR =
-        new JOpcode("LOR",
-                    cLOR,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode IXOR =
-        new JOpcode("IXOR",
-                    cIXOR,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_NEXT);
-    public final static JOpcode LXOR =
-        new JOpcode("LXOR",
-                    cLXOR,
-                    1,
-                    LONG_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode IINC =
-        new JOpcode("IINC", cIINC, 3, NO_DATA, NO_DATA, JMP_NEXT);
-    public final static JOpcode I2L =
-        new JOpcode("I2L", cI2L, 1, LONG_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode I2F =
-        new JOpcode("I2F", cI2F, 1, FLOAT_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode I2D =
-        new JOpcode("I2D", cI2D, 1, DOUBLE_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode L2I =
-        new JOpcode("L2I", cL2I, 1, INT_TYPE, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode L2F =
-        new JOpcode("L2F", cL2F, 1, FLOAT_TYPE, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode L2D =
-        new JOpcode("L2D", cL2D, 1, DOUBLE_TYPE, LONG_TYPE, JMP_NEXT);
-    public final static JOpcode F2I =
-        new JOpcode("F2I", cF2I, 1, INT_TYPE, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode F2L =
-        new JOpcode("F2L", cF2L, 1, LONG_TYPE, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode F2D =
-        new JOpcode("F2D", cF2D, 1, DOUBLE_TYPE, FLOAT_TYPE, JMP_NEXT);
-    public final static JOpcode D2I =
-        new JOpcode("D2I", cD2I, 1, INT_TYPE, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode D2L =
-        new JOpcode("D2L", cD2L, 1, LONG_TYPE, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode D2F =
-        new JOpcode("D2F", cD2F, 1, FLOAT_TYPE, DOUBLE_TYPE, JMP_NEXT);
-    public final static JOpcode I2B =
-        new JOpcode("I2B", cI2B, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode I2C =
-        new JOpcode("I2C", cI2C, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode I2S =
-        new JOpcode("I2S", cI2S, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
-    public final static JOpcode LCMP =
-        new JOpcode("LCMP",
-                    cLCMP,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode FCMPL =
-        new JOpcode("FCMPL",
-                    cFCMPL,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode FCMPG =
-        new JOpcode("FCMPG",
-                    cFCMPG,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.FLOAT, JType.FLOAT },
-                    JMP_NEXT);
-    public final static JOpcode DCMPL =
-        new JOpcode("DCMPL",
-                    cDCMPL,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.LONG, JType.LONG },
-                    JMP_NEXT);
-    public final static JOpcode DCMPG =
-        new JOpcode("DCMPG",
-                    cDCMPG,
-                    1,
-                    INT_TYPE,
-                    new JType[] { JType.DOUBLE, JType.DOUBLE },
-                    JMP_NEXT);
-    public final static JOpcode IFEQ =
-        new JOpcode("IFEQ", cIFEQ, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFNE =
-        new JOpcode("IFNE", cIFNE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFLT =
-        new JOpcode("IFLT", cIFLT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFGE =
-        new JOpcode("IFGE", cIFGE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFGT =
-        new JOpcode("IFGT", cIFGT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFLE =
-        new JOpcode("IFLE", cIFLE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPEQ =
-        new JOpcode("IF_ICMPEQ",
-                    cIF_ICMPEQ,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPNE =
-        new JOpcode("IF_ICMPNE",
-                    cIF_ICMPNE,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPLT =
-        new JOpcode("IF_ICMPLT",
-                    cIF_ICMPLT,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPGE =
-        new JOpcode("IF_ICMPGE",
-                    cIF_ICMPGE,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPGT =
-        new JOpcode("IF_ICMPGT",
-                    cIF_ICMPGT,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ICMPLE =
-        new JOpcode("IF_ICMPLE",
-                    cIF_ICMPLE,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.INT, JType.INT },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ACMPEQ =
-        new JOpcode("IF_ACMPEQ",
-                    cIF_ACMPEQ,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.REFERENCE, JType.REFERENCE },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IF_ACMPNE =
-        new JOpcode("IF_ACMPNE",
-                    cIF_ACMPNE,
-                    3,
-                    NO_DATA,
-                    new JType[] { JType.REFERENCE, JType.REFERENCE },
-                    JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode GOTO =
-        new JOpcode("GOTO", cGOTO, 3, NO_DATA, NO_DATA, JMP_ALWAYS_S2_OFFSET);
-    public final static JOpcode JSR =
-        new JOpcode("JSR", cJSR, 3, ADDRESS_TYPE, NO_DATA, JMP_ALWAYS_S2_OFFSET);
-    public final static JOpcode RET =
-        new JOpcode("RET", cRET, 2, NO_DATA, NO_DATA, JMP_NONE);
-    public final static JOpcode TABLESWITCH = new JOpcode("TABLESWITCH",
-                                                          cTABLESWITCH,
-                                                          UNKNOWN,
-                                                          NO_DATA,
-                                                          INT_TYPE,
-                                                          JMP_TABLE);
-    public final static JOpcode LOOKUPSWITCH = new JOpcode("LOOKUPSWITCH",
-                                                           cLOOKUPSWITCH,
-                                                           UNKNOWN,
-                                                           NO_DATA,
-                                                           INT_TYPE,
-                                                           JMP_LOOKUP);
-    public final static JOpcode IRETURN =
-        new JOpcode("IRETURN", cIRETURN, 1, NO_DATA, INT_TYPE, JMP_NONE);
-    public final static JOpcode LRETURN =
-        new JOpcode("LRETURN", cLRETURN, 1, NO_DATA, LONG_TYPE, JMP_NONE);
-    public final static JOpcode FRETURN =
-        new JOpcode("FRETURN", cFRETURN, 1, NO_DATA, FLOAT_TYPE, JMP_NONE);
-    public final static JOpcode DRETURN =
-        new JOpcode("DRETURN", cDRETURN, 1, NO_DATA, DOUBLE_TYPE, JMP_NONE);
-    public final static JOpcode ARETURN = new JOpcode("ARETURN",
-                                                      cARETURN,
-                                                      1,
-                                                      NO_DATA,
-                                                      OBJECT_REF_TYPE,
-                                                      JMP_NONE);
-    public final static JOpcode RETURN =
-        new JOpcode("RETURN", cRETURN, 1, NO_DATA, NO_DATA, JMP_NONE);
-    public final static JOpcode GETSTATIC = new JOpcode("GETSTATIC",
-                                                        cGETSTATIC,
-                                                        3,
-                                                        UNKNOWN_TYPE,
-                                                        NO_DATA,
-                                                        JMP_NEXT);
-    public final static JOpcode PUTSTATIC = new JOpcode("PUTSTATIC",
-                                                        cPUTSTATIC,
-                                                        3,
-                                                        NO_DATA,
-                                                        UNKNOWN_TYPE,
-                                                        JMP_NEXT);
-    public final static JOpcode GETFIELD = new JOpcode("GETFIELD",
-                                                       cGETFIELD,
-                                                       3,
-                                                       UNKNOWN_TYPE,
-                                                       OBJECT_REF_TYPE,
-                                                       JMP_NEXT);
-    public final static JOpcode PUTFIELD =
-        new JOpcode("PUTFIELD", cPUTFIELD, 3, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
-    public final static JOpcode INVOKEVIRTUAL = new JOpcode("INVOKEVIRTUAL",
-                                                            cINVOKEVIRTUAL,
-                                                            3,
-                                                            NO_DATA,
-                                                            UNKNOWN_TYPE,
-                                                            JMP_NEXT);
-    public final static JOpcode INVOKESPECIAL = new JOpcode("INVOKESPECIAL",
-                                                            cINVOKESPECIAL,
-                                                            3,
-                                                            NO_DATA,
-                                                            UNKNOWN_TYPE,
-                                                            JMP_NEXT);
-    public final static JOpcode INVOKESTATIC = new JOpcode("INVOKESTATIC",
-                                                           cINVOKESTATIC,
-                                                           3,
-                                                           NO_DATA,
-                                                           UNKNOWN_TYPE,
-                                                           JMP_NEXT);
-    public final static JOpcode INVOKEINTERFACE =
-        new JOpcode("INVOKEINTERFACE",
-                    cINVOKEINTERFACE,
-                    5,
-                    NO_DATA,
-                    UNKNOWN_TYPE,
-                    JMP_NEXT);
-    public final static JOpcode NEW =
-        new JOpcode("NEW", cNEW, 3, OBJECT_REF_TYPE, NO_DATA, JMP_NEXT);
-    public final static JOpcode NEWARRAY =
-        new JOpcode("NEWARRAY",
-                    cNEWARRAY,
-                    2,
-                    ARRAY_REF_TYPE,
-                    INT_TYPE,
-                    JMP_NEXT);
-    public final static JOpcode ANEWARRAY =
-        new JOpcode("ANEWARRAY",
-                    cANEWARRAY,
-                    3,
-                    ARRAY_REF_TYPE,
-                    INT_TYPE,
-                    JMP_NEXT);
-    public final static JOpcode ARRAYLENGTH = new JOpcode("ARRAYLENGTH",
-                                                          cARRAYLENGTH,
-                                                          1,
-                                                          INT_TYPE,
-                                                          ARRAY_REF_TYPE,
-                                                          JMP_NEXT);
-    public final static JOpcode ATHROW = new JOpcode("ATHROW",
-                                                     cATHROW,
-                                                     1,
-                                                     OBJECT_REF_TYPE,
-                                                     OBJECT_REF_TYPE,
-                                                     JMP_NONE);
-    public final static JOpcode CHECKCAST = new JOpcode("CHECKCAST",
-                                                        cCHECKCAST,
-                                                        3,
-                                                        OBJECT_REF_TYPE,
-                                                        OBJECT_REF_TYPE,
-                                                        JMP_NEXT);
-    public final static JOpcode INSTANCEOF = new JOpcode("INSTANCEOF",
-                                                         cINSTANCEOF,
-                                                         3,
-                                                         INT_TYPE,
-                                                         OBJECT_REF_TYPE,
-                                                         JMP_NEXT);
-    public final static JOpcode MONITORENTER = new JOpcode("MONITORENTER",
-                                                           cMONITORENTER,
-                                                           1,
-                                                           NO_DATA,
-                                                           OBJECT_REF_TYPE,
-                                                           JMP_NEXT);
-    public final static JOpcode MONITOREXIT = new JOpcode("MONITOREXIT",
-                                                          cMONITOREXIT,
-                                                          1,
-                                                          NO_DATA,
-                                                          OBJECT_REF_TYPE,
-                                                          JMP_NEXT);
-    public final static JOpcode WIDE = new JOpcode("WIDE",
-                                                   cWIDE,
-                                                   UNKNOWN,
-                                                   UNKNOWN_TYPE,
-                                                   UNKNOWN_TYPE,
-                                                   JMP_NEXT);
-    public final static JOpcode MULTIANEWARRAY = new JOpcode("MULTIANEWARRAY",
-                                                             cMULTIANEWARRAY,
-                                                             4,
-                                                             ARRAY_REF_TYPE,
-                                                             UNKNOWN_TYPE,
-                                                             JMP_NEXT);
-    public final static JOpcode IFNULL = new JOpcode("IFNULL",
-                                                     cIFNULL,
-                                                     3,
-                                                     NO_DATA,
-                                                     REFERENCE_TYPE,
-                                                     JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode IFNONNULL = new JOpcode("IFNONNULL",
-                                                        cIFNONNULL,
-                                                        3,
-                                                        NO_DATA,
-                                                        REFERENCE_TYPE,
-                                                        JMP_MAYBE_S2_OFFSET);
-    public final static JOpcode GOTO_W = new JOpcode("GOTO_W",
-                                                     cGOTO_W,
-                                                     5,
-                                                     NO_DATA,
-                                                     NO_DATA,
-                                                     JMP_ALWAYS_S4_OFFSET);
-    public final static JOpcode JSR_W =
-        new JOpcode("JSR_W", cJSR_W, 5, ADDRESS_TYPE, NO_DATA, JMP_NEXT);
-
-    public final static JOpcode[] OPCODES = {
-        NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
-        ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0,
-        LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
-        DCONST_1, BIPUSH, SIPUSH, LDC, LDC_W,
-        LDC2_W, ILOAD, LLOAD, FLOAD, DLOAD,
-        ALOAD, ILOAD_0, ILOAD_1, ILOAD_2, ILOAD_3,
-        LLOAD_0, LLOAD_1, LLOAD_2, LLOAD_3, FLOAD_0,
-        FLOAD_1, FLOAD_2, FLOAD_3, DLOAD_0, DLOAD_1,
-        DLOAD_2, DLOAD_3, ALOAD_0, ALOAD_1, ALOAD_2,
-        ALOAD_3, IALOAD, LALOAD, FALOAD, DALOAD,
-        AALOAD, BALOAD, CALOAD, SALOAD, ISTORE,
-        LSTORE, FSTORE, DSTORE, ASTORE, ISTORE_0,
-        ISTORE_1, ISTORE_2, ISTORE_3, LSTORE_0, LSTORE_1,
-        LSTORE_2, LSTORE_3, FSTORE_0, FSTORE_1, FSTORE_2,
-        FSTORE_3, DSTORE_0, DSTORE_1, DSTORE_2, DSTORE_3,
-        ASTORE_0, ASTORE_1, ASTORE_2, ASTORE_3, IASTORE,
-        LASTORE, FASTORE, DASTORE, AASTORE, BASTORE,
-        CASTORE, SASTORE, POP, POP2, DUP,
-        DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2,
-        SWAP, IADD, LADD, FADD, DADD,
-        ISUB, LSUB, FSUB, DSUB, IMUL,
-        LMUL, FMUL, DMUL, IDIV, LDIV,
-        FDIV, DDIV, IREM, LREM, FREM,
-        DREM, INEG, LNEG, FNEG, DNEG,
-        ISHL, LSHL, ISHR, LSHR, IUSHR,
-        LUSHR, IAND, LAND, IOR, LOR,
-        IXOR, LXOR, IINC, I2L, I2F,
-        I2D, L2I, L2F, L2D, F2I,
-        F2L, F2D, D2I, D2L, D2F,
-        I2B, I2C, I2S, LCMP, FCMPL,
-        FCMPG, DCMPL, DCMPG, IFEQ, IFNE,
-        IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
-        IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
-        IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, RET,
-        TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN,
-        DRETURN, ARETURN, RETURN, GETSTATIC, PUTSTATIC,
-        GETFIELD, PUTFIELD, INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC,
-        INVOKEINTERFACE, null, NEW, NEWARRAY, ANEWARRAY,
-        ARRAYLENGTH, ATHROW, CHECKCAST, INSTANCEOF, MONITORENTER,
-        MONITOREXIT, WIDE, MULTIANEWARRAY, IFNULL, IFNONNULL,
-        GOTO_W, JSR_W
-    };
-
-    protected JOpcode(String name,
-                      int code,
-                      int size,
-                      JType[] producedDataTypes,
-                      JType[] consumedDataTypes,
-                      int jumpKind) {
-        this.name = name;
-        this.code = code;
-        this.size = size;
-        this.producedDataTypes = producedDataTypes;
-        this.consumedDataTypes = consumedDataTypes;
-        this.jumpKind = jumpKind;
-        switch (jumpKind) {
-        case JMP_NONE: successorCount = 0; break;
-        case JMP_NEXT: successorCount = 1; break;
-        case JMP_ALWAYS_S2_OFFSET: successorCount = 1; break;
-        case JMP_ALWAYS_S4_OFFSET: successorCount = 1; break;
-        case JMP_MAYBE_S2_OFFSET: successorCount = 2; break;
-        case JMP_TABLE: successorCount = UNKNOWN; break;
-        case JMP_LOOKUP: successorCount = UNKNOWN; break;
-        default: successorCount = UNKNOWN; break;
-        }
-    }
-
-    public String toString() { return name; }
-    protected int getSize() { return size; }
-    protected JType[] getProducedDataTypes() { return producedDataTypes; }
-    protected JType[] getConsumedDataTypes() { return consumedDataTypes; }
-
-    protected int getProducedDataSize() {
-        if (producedDataTypes != UNKNOWN_TYPE)
-            return JType.getTotalSize(producedDataTypes);
-        else {
-            switch (code) {
-            case cLDC: case cLDC_W: case cBALOAD:
-                return 1;
-            case cLDC2_W: case cDUP: case cSWAP:
-                return 2;
-            case cDUP_X1:
-                return 3;
-            case cDUP_X2: case cDUP2:
-                return 4;
-            case cDUP2_X1:
-                return 5;
-            case cDUP2_X2:
-                return 6;
-            default:
-                throw new Error(this.toString());
-            }
-        }
-    }
-
-    protected int getConsumedDataSize() {
-        if (consumedDataTypes != UNKNOWN_TYPE)
-            return JType.getTotalSize(consumedDataTypes);
-        else {
-            switch (code) {
-            case cPOP: case cDUP:
-                return 1;
-            case cPOP2: case cDUP_X1: case cDUP2: case cSWAP:
-                return 2;
-            case cDUP_X2: case cDUP2_X1:
-                return 3;
-            case cDUP2_X2:
-                return 4;
-            default:
-                throw new Error(this.toString());
-            }
-        }
-    }
-
-    protected int getProducedDataTypesNumber() {
-        if (producedDataTypes != UNKNOWN_TYPE)
-            return producedDataTypes.length;
-        else {
-            switch (code) {
-            case cLDC: case cLDC_W: case cLDC2_W: case cBALOAD:
-            case cGETSTATIC: case cGETFIELD:
-                return 1;
-            case cDUP: case cSWAP:
-                return 2;
-            case cDUP_X2: case cDUP2: case cDUP2_X1: case cDUP2_X2:
-                return 2;
-            case cDUP_X1:
-                return 3;
-            default:
-                throw new Error(this.toString());
-            }
-        }
-    }
-
-    protected int getConsumedDataTypesNumber() {
-        if (consumedDataTypes != UNKNOWN_TYPE)
-            return consumedDataTypes.length;
-        else {
-            switch (code) {
-            case cPOP: case cDUP: case cPUTSTATIC:
-                return 1;
-            case cPUTFIELD: case cDUP_X1: case cDUP_X2:
-            case cDUP2: case cDUP2_X1: case cPOP2: case cSWAP:
-                return 2;
-            default:
-                throw new Error(this.toString());
-            }
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
deleted file mode 100644
index 50aa9d3..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attributes which are unknown to the JVM (or at least to this library).
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JOtherAttribute extends JAttribute {
-    protected final String name;
-    protected final byte[] contents;
-    protected final int length;
-
-    public JOtherAttribute(FJBGContext context,
-                           JClass clazz,
-                           Object owner,
-                           String name,
-                           byte[] contents,
-                           int length) {
-        super(context, clazz, name);
-        this.name = name;
-        this.contents = contents;
-        this.length = length;
-    }
-
-    public JOtherAttribute(FJBGContext context,
-                           JClass clazz,
-                           Object owner,
-                           String name,
-                           int size,
-                           DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.name = name;
-        this.contents = new byte[size];
-        this.length = size;
-
-        stream.read(contents, 0, length);
-    }
-
-    public String getName() { return name; }
-
-    // Follows javap output format for user-defined attributes.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  ");
-        buf.append(name);
-        buf.append(": length = 0x");
-        buf.append(Integer.toHexString(length).toUpperCase());
-        for (int i = 0; i < length; ++i) {
-            if (i % 16 == 0) buf.append("\n   ");
-            buf.append(hexString(contents[i]));
-            buf.append(" ");
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int getSize() { return length; }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.write(contents, 0, length);
-    }
-
-    private static final String hexString(int i) {
-        return ((0 <= i && i < 16) ? "0" : "")+Integer.toHexString(i).toUpperCase();
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
deleted file mode 100644
index 73d1026..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java references, i.e. arrays and objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JReferenceType extends JType {
-    public boolean isReferenceType() { return true; }
-
-    abstract public String getDescriptor();
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
deleted file mode 100644
index 3a17cb2..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Sourcefile attribute, which can be attached to class files to
- * associate them with their source file.
- *
- * There can be no more than one SourceFile attribute in the attributes table
- * of a given ClassFile structure. See section 4.8.9 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JSourceFileAttribute extends JAttribute {
-    protected final String sourceFileName;
-    protected final int sourceFileIndex;
-
-    public JSourceFileAttribute(FJBGContext context,
-                                JClass clazz,
-                                String sourceFileName) {
-        super(context, clazz);
-        this.sourceFileName = sourceFileName;
-        this.sourceFileIndex = clazz.getConstantPool().addUtf8(sourceFileName);
-    }
-
-    public JSourceFileAttribute(FJBGContext context,
-                                JClass clazz,
-                                Object owner,
-                                String name,
-                                int size,
-                                DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-
-        this.sourceFileIndex = stream.readShort();
-        this.sourceFileName = clazz.getConstantPool().lookupUtf8(sourceFileIndex);
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "SourceFile"; }
-
-    public String getFileName() { return sourceFileName; }
-
-    // Follows javap output format for SourceFile attribute.
-    /*@Override*/ public String toString() {
-        StringBuffer buf = new StringBuffer("  SourceFile: \"");
-        buf.append(sourceFileName);
-        buf.append("\"\n");
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return 2; // Short.SIZE
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(sourceFileIndex);
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
deleted file mode 100644
index 72a5484..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JStackMapTableAttribute extends JAttribute {
-    /** Constant pool of the current classfile. */
-    private JConstantPool pool;
-
-    /** StackMapTable entries */
-    protected final List/*<Frame>*/ entries = new ArrayList();
-    protected int entriesSize = 0;
-    protected boolean usesU2;
-
-    public JStackMapTableAttribute(FJBGContext context,
-                                   JClass clazz,
-                                   JCode code) {
-        super(context, clazz);
-        this.pool = clazz.pool;
-
-        assert code.getOwner().getOwner() == clazz;
-    }
-
-    public JStackMapTableAttribute(FJBGContext context,
-                                   JClass clazz,
-                                   Object owner,
-                                   String name,
-                                   int size,
-                                   DataInputStream stream)
-        throws IOException {
-        super(context, clazz, name);
-        this.pool = clazz.pool;
-
-        int count = stream.readShort();
-        this.usesU2 = count < 65536;
-        for (int i = 0; i < count; ++i)
-           this.entries.add(new Frame(stream));
-        this.entriesSize = computeSize();
-
-        assert name.equals(getName());
-    }
-
-    public String getName() { return "StackMapTable"; }
-
-    // Follows javap output format for StackMapTable attribute.
-    /*@Override*/ public String toString() {
-        Frame frame = null;
-        StringBuffer buf = new StringBuffer("  StackMapTable: number_of_entries = ");
-        buf.append(entries.size());
-        Iterator it = entries.iterator();
-        while (it.hasNext()) {
-            frame = (Frame)it.next();
-            buf.append("\n   frame_type = ");
-            buf.append(frame.tag);
-            buf.append(" /* ");
-            buf.append(getFrameType(frame.tag));
-            buf.append(" */");
-            if (frame.offsetDelta != -1)
-                buf.append("\n     offset_delta = "+frame.offsetDelta);
-            if (frame.locals != null)
-                appendTypeInfoArray(buf, "locals", frame.locals);
-            if (frame.stackItems != null)
-                appendTypeInfoArray(buf, "stack", frame.stackItems);
-        }
-        buf.append("\n");
-        return buf.toString();
-    }
-
-    protected int getSize() {
-        return entriesSize;
-    }
-
-    protected void writeContentsTo(DataOutputStream stream) throws IOException {
-        stream.writeShort(entriesSize);
-        Iterator it = entries.iterator();
-        while (it.hasNext()) {
-            Frame frame = (Frame)it.next();
-            frame.writeContentsTo(stream);
-        }
-    }
-
-    private class TypeInfo {
-        final int tag;
-        final int poolIndexOrOffset; // tag == 7 => poolIndex, tag = 8 => offset
-        private int bytes;
-        TypeInfo(DataInputStream stream) throws IOException {
-            int size = 1;
-            this.tag = stream.readByte();
-            if (tag == 7) { // ITEM_Object; // 7
-                poolIndexOrOffset = stream.readShort();
-                size += 2;
-            } else if (tag == 8) { // ITEM_Uninitialized // 8
-                poolIndexOrOffset = (usesU2) ? stream.readShort() : stream.readInt();
-                size += (usesU2) ? 2 : 4;
-            } else
-                poolIndexOrOffset = -1;
-            this.bytes += size;
-        }
-        int getSize() { return bytes; }
-        void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeByte(tag);
-            if (tag == 7) { // ITEM_Object; // 7
-                stream.writeShort(poolIndexOrOffset);
-            } else if (tag == 8) { // ITEM_Uninitialized // 8
-                if (usesU2) stream.writeShort(poolIndexOrOffset);
-                else stream.writeInt(poolIndexOrOffset);
-            }
-        }
-        /*@Override*/ public String toString() {
-            switch (tag) {
-            case 0: // ITEM_Top
-                return "<top>";
-            case 1: // ITEM_Integer
-                return "int";
-            case 2: // ITEM_Float
-                return "float";
-            case 3: // ITEM_Double
-                return "double";
-            case 4: // ITEM_Long
-                return "long";
-            case 5: // ITEM_Null
-                return "null";
-            case 6: // ITEM_UninializedThis
-                return "this";
-            case 7: // ITEM_Object
-                String name = pool.lookupClass(poolIndexOrOffset);
-                if (name.startsWith("[")) name = "\""+name+"\"";
-                return "class "+name;
-            case 8: // ITEM_Uninitialized
-                return "<uninitialized>";
-            default:
-                return String.valueOf(tag);
-            }
-        }
-    }
-
-    private class Frame {
-        final int tag;
-        int offsetDelta = -1;
-        TypeInfo[] stackItems = null;
-        TypeInfo[] locals = null;
-        private int bytes;
-        Frame(DataInputStream stream) throws IOException {
-            // The stack_map_frame structure consists of a one-byte tag
-            // followed by zero or more bytes.
-            this.tag = stream.readUnsignedByte();
-            if (tag < 64) { // SAME;  // 0-63
-                //done
-            } else if (tag < 128) { // SAME_LOCALS_1_STACK_ITEM;  // 64-127
-                this.offsetDelta = tag - 64;
-                readStackItems(stream, 1);
-            } else if (tag < 248) { // reserved for future use.
-                assert false : "Tags in the range [128-247] are reserved for future use.";
-            } else if (tag < 251) { // CHOP;  // 248-250
-                int k = 251 - tag;
-                readOffsetDelta(stream);
-            } else if (tag == 251) { // SAME_FRAME_EXTENDED
-                readOffsetDelta(stream);
-            } else if (tag < 255) { // APPEND;  // 252-254
-                readOffsetDelta(stream);
-                readLocals(stream, tag - 251);
-            } else {               // FULL_FRAME;  // 255
-                readOffsetDelta(stream);
-                readLocals(stream);
-                readStackItems(stream);
-            }
-        }
-        int getSize() { return bytes; }
-        void readOffsetDelta(DataInputStream stream) throws IOException {
-            this.offsetDelta = (usesU2) ? stream.readShort() : stream.readInt();
-            this.bytes += (usesU2) ? 2 : 4;
-        }
-        int getOffsetDelta() { return offsetDelta; }
-        void readStackItems(DataInputStream stream, int k) throws IOException {
-            this.stackItems = new TypeInfo[k];
-            for (int i = 0; i < k; ++i) {
-                stackItems[i] = new TypeInfo(stream);
-                this.bytes += stackItems[i].getSize();
-            }
-        }
-        void readStackItems(DataInputStream stream) throws IOException {
-            int k = (usesU2) ? stream.readShort() : stream.readInt();
-            this.bytes += (usesU2) ? 2 : 4;
-            readStackItems(stream, k);
-        }
-        void readLocals(DataInputStream stream, int k) throws IOException {
-            this.locals = new TypeInfo[k];
-            for (int i = 0; i < k; ++i) {
-                locals[i] = new TypeInfo(stream);
-                this.bytes += locals[i].getSize();
-            }
-        }
-        void readLocals(DataInputStream stream) throws IOException {
-            int k = (usesU2) ? stream.readShort() : stream.readInt();
-            this.bytes += (usesU2) ? 2 : 4;
-            readLocals(stream, k);
-        }
-        void writeContentsTo(DataOutputStream stream) throws IOException {
-            stream.writeByte(tag);
-            if (tag < 64) {
-                //done
-            } else if (tag < 128) { // SAME;  // 0-63
-                assert stackItems.length == 1;
-                stackItems[0].writeContentsTo(stream);
-            } else if (tag < 248) {
-                assert false : "Tags in the range [128-247] are reserved for future use.";
-            } else if (tag < 251) {
-                if (usesU2) stream.writeShort(offsetDelta);
-                else stream.writeInt(offsetDelta);
-            } else if (tag == 251) {
-                if (usesU2) stream.writeShort(offsetDelta);
-                else stream.writeInt(offsetDelta);
-            } else if (tag < 255) { // APPEND;  // 252-254
-                if (usesU2) stream.writeShort(offsetDelta);
-                else stream.writeInt(offsetDelta);
-                for (int i = 0; i < locals.length; ++i)
-                    locals[i].writeContentsTo(stream);
-            } else {
-                if (usesU2) stream.writeShort(offsetDelta);
-                else stream.writeInt(offsetDelta);
-                for (int i = 0; i < locals.length; ++i)
-                    locals[i].writeContentsTo(stream);
-                for (int i = 0; i < stackItems.length; ++i)
-                    stackItems[i].writeContentsTo(stream);
-            }
-        }
-    }
-
-    private int computeSize() {
-        int size = (usesU2) ? 2 : 4; // number of frames
-        Iterator it = entries.iterator();
-        while (it.hasNext()) {
-            Frame frame = (Frame)it.next();
-            size += frame.getSize();
-        }
-        return size;
-    }
-
-    private static final String getFrameType(int tag) {
-        if (tag < 64) return "same";
-        else if (tag < 128) return "same locals 1 stack item";
-        else if (tag < 248) return "<reserved>";
-        else if (tag < 251) return "chop";
-        else if (tag == 251) return "same frame extended";
-        else if (tag < 255) return "append";
-        else return "full frame";
-    }
-
-    private static StringBuffer appendTypeInfoArray(StringBuffer buf,
-                                                    String s, TypeInfo[] a) {
-        buf.append("\n     ");
-        buf.append(s);
-        buf.append(" = ");
-        if (a.length > 0) {
-            buf.append("[ ");
-            for (int i = 0; i < a.length; ++i) {
-                if (i > 0) buf.append(", ");
-                buf.append(a[i]);
-            }
-            buf.append(" ]");
-        }
-        else
-            buf.append("[]");
-        return buf;
-    }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
deleted file mode 100644
index 298a2b0..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-
-/**
- * Representation of Java types.
- *
- * @version 1.0
- * @author Michel Schinz
- */
-
-abstract public class JType {
-    abstract public int getSize();
-    abstract public String getSignature();
-    abstract public int getTag();
-    abstract public String toString();
-    abstract public boolean isCompatibleWith(JType other);
-
-    public boolean isValueType() { return false; }
-    public boolean isObjectType() { return false; }
-    public boolean isArrayType() { return false; }
-    public boolean isReferenceType() { return false; }
-
-    // Tags for types. Taken from BCEL.
-    public static final int T_BOOLEAN = 4;
-    public static final int T_CHAR    = 5;
-    public static final int T_FLOAT   = 6;
-    public static final int T_DOUBLE  = 7;
-    public static final int T_BYTE    = 8;
-    public static final int T_SHORT   = 9;
-    public static final int T_INT     = 10;
-    public static final int T_LONG    = 11;
-    public static final int T_VOID    = 12; // Non-standard
-    public static final int T_ARRAY   = 13;
-    public static final int T_OBJECT  = 14;
-    public static final int T_UNKNOWN = 15;
-    public static final int T_ADDRESS = 16;
-
-    public static final int T_REFERENCE = 17; // type compatible with references
-
-    public static final JType[] EMPTY_ARRAY = new JType[0];
-
-    protected static JType parseSig(StringReader s) throws IOException {
-        int nextChar = s.read();
-        if (nextChar == -1) throw new IllegalArgumentException();
-
-        switch ((char)nextChar) {
-        case 'V' : return VOID;
-        case 'Z' : return BOOLEAN;
-        case 'B' : return BYTE;
-        case 'C' : return CHAR;
-        case 'S' : return SHORT;
-        case 'I' : return INT;
-        case 'F' : return FLOAT;
-        case 'J' : return LONG;
-        case 'D' : return DOUBLE;
-        case 'L': {
-            StringBuffer className = new StringBuffer();
-            for (;;) {
-                nextChar = s.read();
-                if (nextChar == -1 || nextChar == ';') break;
-                className.append(nextChar == '/' ? ':' : ((char)nextChar));
-            }
-            if (nextChar != ';') throw new IllegalArgumentException();
-            return new JObjectType(className.toString());
-        }
-        case '[': {
-            JType elemType = parseSig(s);
-            return new JArrayType(elemType);
-        }
-        case '(': {
-            ArrayList argTps = new ArrayList();
-            for (;;) {
-                s.mark(1);
-                nextChar = s.read();
-                if (nextChar == -1 || nextChar == ')') break;
-                s.reset();
-                argTps.add(parseSig(s));
-            }
-            if (nextChar != ')') throw new IllegalArgumentException("a");
-            JType[] argTpsA = (JType[])argTps.toArray(new JType[argTps.size()]);
-            JType returnType = parseSig(s);
-            return new JMethodType(returnType, argTpsA);
-        }
-        default:
-            throw new IllegalArgumentException();
-        }
-    }
-
-    /**
-     * A signature is a string representing the generic type of a field or
-     * method, or generic type information for a class declaration.
-     * See section 4.4.4 of the JVM specification.
-     */
-    public static JType parseSignature(String signature) {
-        try {
-            StringReader sigReader = new StringReader(signature);
-            JType parsed = parseSig(sigReader);
-            if (sigReader.read() != -1)
-                throw new IllegalArgumentException();
-            return parsed;
-        } catch (IllegalArgumentException e) {
-            throw new IllegalArgumentException("invalid signature " + signature);
-        } catch (IOException e) {
-            throw new Error(e);
-        }
-    }
-
-    public static int getTotalSize(JType[] types) {
-        int size = 0;
-        for (int i = 0; i < types.length; ++i)
-            size += types[i].getSize();
-        return size;
-    }
-
-    protected JType() {}
-
-    public static JType VOID = new JType() {
-        public int getSize() { return 0; }
-        public String getSignature() { return "V"; }
-        public int getTag() { return T_VOID; }
-        public String toString() { return "void"; }
-        public boolean isCompatibleWith(JType other) {
-            throw new UnsupportedOperationException("type VOID is no real "
-                                                    + "data type therefore "
-                                                    + "cannot be assigned to "
-                                                    + other.toString());
-        }
-    };
-
-    public static JType BOOLEAN = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "Z"; }
-        public int getTag() { return T_BOOLEAN; }
-        public String toString() { return "boolean"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == BOOLEAN
-                || other == INT
-                || other == BYTE
-                || other == CHAR
-                || other == SHORT;
-        }
-    };
-
-    public static JType BYTE = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "B"; }
-        public int getTag() { return T_BYTE; }
-        public String toString() { return "byte"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == BOOLEAN
-                || other == INT
-                || other == BYTE
-                || other == CHAR
-                || other == SHORT;
-        }
-    };
-
-    public static JType CHAR = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "C"; }
-        public int getTag() { return T_CHAR; }
-        public String toString() { return "char"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == BOOLEAN
-                || other == INT
-                || other == BYTE
-                || other == CHAR
-                || other == SHORT;
-        }
-    };
-
-    public static JType SHORT = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "S"; }
-        public int getTag() { return T_SHORT; }
-        public String toString() { return "short"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == BOOLEAN
-                || other == INT
-                || other == BYTE
-                || other == CHAR
-                || other == SHORT;
-        }
-    };
-
-    public static JType INT = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "I"; }
-        public int getTag() { return T_INT; }
-        public String toString() { return "int"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == BOOLEAN
-                || other == INT
-                || other == BYTE
-                || other == CHAR
-                || other == SHORT;
-        }
-    };
-
-    public static JType FLOAT = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() { return "F"; }
-        public int getTag() { return T_FLOAT; }
-        public String toString() { return "float"; }
-        public boolean isValueType() { return true; }
-        public boolean isCompatibleWith(JType other) {
-            return other == FLOAT;
-        }
-    };
-
-    public static JType LONG = new JType() {
-            public int getSize() { return 2; }
-            public String getSignature() { return "J"; }
-            public int getTag() { return T_LONG; }
-            public String toString() { return "long"; }
-            public boolean isValueType() { return true; }
-            public boolean isCompatibleWith(JType other) {
-                return other == LONG;
-            }
-        };
-
-    public static JType DOUBLE = new JType() {
-            public int getSize() { return 2; }
-            public String getSignature() { return "D"; }
-            public int getTag() { return T_DOUBLE; }
-            public String toString() { return "double"; }
-            public boolean isValueType() { return true; }
-            public boolean isCompatibleWith(JType other) {
-                return other == DOUBLE;
-            }
-        };
-
-    public static JType REFERENCE = new JType() {
-            public int getSize() { return 1; }
-            public String getSignature() {
-                throw new UnsupportedOperationException("type REFERENCE is no real "
-                                                        + "data type and therefore "
-                                                        + "has no signature");
-            }
-            public int getTag() { return T_REFERENCE; }
-            public String toString() { return "<reference>"; }
-            public boolean isCompatibleWith(JType other) {
-                throw new UnsupportedOperationException("type REFERENCE is no real "
-                                                        + "data type and therefore "
-                                                        + "cannot be assigned to "
-                                                        + other.toString());
-            }
-        };
-
-    public static JType ADDRESS = new JType() {
-        public int getSize() { return 1; }
-        public String getSignature() {
-            throw new UnsupportedOperationException("type ADDRESS is no usable "
-                                                    + "data type and therefore "
-                                                    + "has no signature");
-        }
-        public int getTag() { return T_ADDRESS; }
-        public String toString() { return "<address>"; }
-        public boolean isCompatibleWith(JType other) {
-            return other == ADDRESS;
-        }
-    };
-
-    public static JType UNKNOWN = new JType() {
-        public int getSize() {
-            throw new UnsupportedOperationException("type UNKNOWN is no real "
-                                                    + "data type and therefore "
-                                                    + "has no size");
-        }
-        public String getSignature() {
-            throw new UnsupportedOperationException("type UNKNOWN is no real "
-                                                    + "data type and therefore "
-                                                    + "has no signature");
-        }
-        public int getTag() { return T_UNKNOWN; }
-        public String toString() { return "<unknown>"; }
-        public boolean isCompatibleWith(JType other) {
-            throw new UnsupportedOperationException("type UNKNOWN is no real "
-                                                    + "data type and therefore "
-                                                    + "cannot be assigned to "
-                                                    + other.toString());
-        }
-    };
-
-    protected static String tagToString(int tag) {
-        switch (tag) {
-        case T_BOOLEAN : return "boolean";
-        case T_CHAR    : return "char";
-        case T_FLOAT   : return "float";
-        case T_DOUBLE  : return "double";
-        case T_BYTE    : return "byte";
-        case T_SHORT   : return "short";
-        case T_INT     : return "int";
-        case T_LONG    : return "long";
-        case T_VOID    : return "void"; // Non-standard
-        case T_ARRAY   : return "[]";
-        case T_OBJECT  : return "Object";
-        case T_UNKNOWN : return "<unknown>";
-        case T_ADDRESS : return "<address>";
-        default:         return String.valueOf(tag);
-        }
-    }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/Main.java b/src/fjbg/ch/epfl/lamp/fjbg/Main.java
deleted file mode 100644
index 810ee7c..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/Main.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.jar.JarFile;
-import java.util.zip.ZipEntry;
-
-/**
- * Main program entry to execute the FJBG reader from the command line.
- *
- * The reader prints out the decoded data in the same output format as
- * javap, the Java bytecode disassembler of the Sun J2SE SDK.
- *
- * @author Stephane Micheloud
- * @version 1.1
- */
-
-public class Main {
-    private static final String PRODUCT_STRING = "Fast Java Bytecode Generator";
-    private static final String VERSION_STRING = "version 1.1";
-
-    private static final int ACTION_USAGE = 0;
-    private static final int ACTION_DONE = 1;
-    private static final int ACTION_PROCEED = 2;
-
-    private static String classPath = ".";
-    private static String[] classNames = null;
-
-    public static void main(String[] args) {
-        switch (parseArgs(args)) {
-        case ACTION_USAGE: printUsage(); break;
-        case ACTION_PROCEED: processClasses(); break;
-        default:
-        }
-    }
-
-    private static void processClasses() {
-        FJBGContext fjbgContext = new FJBGContext(49, 0);
-        if (classNames.length > 0)
-            try {
-                for (int i = 0; i < classNames.length; ++i)
-                    processClass(fjbgContext, classNames[i]);
-            } catch (IOException e) {
-                System.err.println(e.getMessage());
-            }
-        else
-            System.err.println(
-                "No classes were specified on the command line.  Try -help.");
-    }
-
-    private static void processClass(FJBGContext fjbgContext, String className)
-    throws IOException {
-        InputStream in = getInputStream(className);
-        JClass jclass = fjbgContext.JClass(new DataInputStream(in));
-        System.out.println(jclass);
-        in.close();
-    }
-
-    private static InputStream getInputStream(String className) throws IOException {
-        String name = null;
-        String[] paths = classPath.split(File.pathSeparator);
-        for (int i = 0; i < paths.length; ++i) {
-            File parent = new File(paths[i]);
-            if (parent.isDirectory()) {
-                name = className.replace('.', File.separatorChar)+".class";
-                File f = new File(parent, name);
-                if (f.isFile()) return new FileInputStream(f);
-            } else if (paths[i].endsWith(".jar")) {
-                JarFile f = new JarFile(parent);
-                name = className.replace('.', '/')+".class";
-                ZipEntry e = f.getEntry(name);
-                if (e != null) return f.getInputStream(e);
-            }
-        }
-        throw new IOException("ERROR:Could not find "+className);
-    }
-
-    private static int parseArgs(String[] args) {
-        ArrayList/*<String>*/ classes = new ArrayList();
-        String arg = null;
-        int action = ACTION_USAGE;
-        int i = 0, n = args.length;
-        while (i < n) {
-            arg = args[i];
-            if (arg.equals("-classpath") && (i+1) < n) {
-               classPath = args[i+1]; i += 2;
-            } else if (arg.equals("-cp") && (i+1) < n) {
-               classPath = args[i+1]; i += 2;
-            } else if (arg.equals("-help")) {
-               i = n+1;
-            //} else if (arg.equals("-v")) {
-            //   verbose = true; i += 1;
-            } else if (arg.equals("-version")) {
-               System.err.println(PRODUCT_STRING+" "+VERSION_STRING);
-               action = ACTION_DONE; i = n+1;
-            } else if (arg.startsWith("-")) {
-               System.err.println("invalid flag: "+arg);
-               i = n+1;
-            } else {
-               classes.add(arg); i += 1;
-            }
-        }
-        if (i == n && i > 0) {
-            classNames = (String[])classes.toArray(new String[classes.size()]);
-            action = ACTION_PROCEED;
-        }
-        return action;
-    }
-
-    private static void printUsage() {
-        System.out.println("Usage: fjbg <options> <classes>");
-        System.out.println();
-        System.out.println("where possible options include:");
-        System.out.println("  -cp <path>           Specify where to find user class files");
-        System.out.println("  -classpath <path>    Specify where to find user class files");
-        System.out.println("  -help                Print a synopsis of standard options");
-        System.out.println("  -version             Version information");
-        System.out.println();
-        System.exit(1);
-    }
-}
-
diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
deleted file mode 100644
index b852e1a..0000000
--- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author  Michel Schinz
- */
-
-package ch.epfl.lamp.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Array of bytes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class ByteArray {
-    protected final static int BYTE_BLOCK_BITS = 8;
-    protected final static int BYTE_BLOCK_SIZE = 1 << BYTE_BLOCK_BITS;
-    protected final static int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1;
-
-    protected byte[][] data = new byte[][] { new byte[BYTE_BLOCK_SIZE] };
-    protected int pos = 0;  // The next free position.
-
-    protected boolean frozen = false;
-
-    public ByteArray() { }
-
-    public ByteArray(InputStream stream, int size) throws IOException {
-        pos = size;
-        for (int i = 0; size > 0; ++i) {
-            int sizeToRead = Math.min(BYTE_BLOCK_SIZE, size);
-            stream.read(data[i], 0, sizeToRead);
-
-            size -= sizeToRead;
-            if (size > 0) addNewBlock();
-        }
-    }
-
-    public void freeze() { frozen = true; }
-
-    public int nextBytePosition() {
-        return pos;
-    }
-
-    public int getSize() {
-        return pos;
-    }
-
-    protected void addNewBlock() {
-        int nextBlockPos = pos >>> BYTE_BLOCK_BITS;
-        if (nextBlockPos == data.length) {
-            byte[][] newData = new byte[data.length * 2][];
-            System.arraycopy(data, 0, newData, 0, data.length);
-            data = newData;
-        }
-        assert data[nextBlockPos] == null : pos + " " + nextBlockPos;
-        data[nextBlockPos] = new byte[BYTE_BLOCK_SIZE];
-    }
-
-    protected void addByte(int b) {
-        assert !frozen;
-
-        if ((pos & BYTE_BLOCK_MASK) == 0 && pos > 0)
-            addNewBlock();
-        int currPos = pos++;
-        data[currPos >>> BYTE_BLOCK_BITS][currPos & BYTE_BLOCK_MASK] = (byte)b;
-    }
-
-    public void addU1(int i) {
-        assert i <= 0xFF : i;
-        addByte(i);
-    }
-
-    public void addU2(int i) {
-        assert i <= 0xFFFF : i;
-
-        addByte(i >>> 8);
-        addByte(i & 0xFF);
-    }
-
-    public void addU4(int i) {
-        addByte(i >>> 24);
-        addByte((i >>> 16) & 0xFF);
-        addByte((i >>>  8) & 0xFF);
-        addByte(i & 0xFF);
-    }
-
-    public void putByte(int targetPos, int b) {
-        assert !frozen;
-        assert targetPos < pos : targetPos + " >= " + pos;
-
-        data[targetPos >>> BYTE_BLOCK_BITS][targetPos & BYTE_BLOCK_MASK] = (byte)b;
-    }
-
-    public void putU2(int targetPos, int i) {
-        assert i < 0xFFFF : i;
-        putByte(targetPos, i >>> 8);
-        putByte(targetPos + 1, i & 0xFF);
-    }
-
-    public void putU4(int targetPos, int i) {
-        putByte(targetPos, i >>> 24);
-        putByte(targetPos + 1, (i >>> 16) & 0xFF);
-        putByte(targetPos + 2, (i >>>  8) & 0xFF);
-        putByte(targetPos + 3, i & 0xFF);
-    }
-
-    public int getU1(int sourcePos) {
-        assert sourcePos < pos : sourcePos + " >= " + pos;
-        return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK] & 0xFF;
-    }
-
-    public int getU2(int sourcePos) {
-        return (getU1(sourcePos) << 8) | getU1(sourcePos + 1);
-    }
-
-    public int getU4(int sourcePos) {
-        return (getU2(sourcePos) << 16) | getU2(sourcePos + 2);
-    }
-
-    public int getS1(int sourcePos) {
-        assert sourcePos < pos : sourcePos + " >= " + pos;
-        return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK];
-    }
-
-    public int getS2(int sourcePos) {
-        return (getS1(sourcePos) << 8) | getU1(sourcePos + 1);
-    }
-
-    public int getS4(int sourcePos) {
-        return (getS2(sourcePos) << 16) | getU2(sourcePos + 2);
-    }
-
-    public void writeTo(OutputStream stream) throws IOException {
-        if (!frozen) freeze();
-
-        for (int i = 0; i < data.length && data[i] != null; ++i) {
-            int len = Math.min(BYTE_BLOCK_SIZE, pos - (i << BYTE_BLOCK_BITS));
-            stream.write(data[i], 0, len);
-        }
-    }
-}
diff --git a/src/intellij/README b/src/intellij/README
index 9ef612b..ade8774 100644
--- a/src/intellij/README
+++ b/src/intellij/README
@@ -1,13 +1,8 @@
 Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
 
 The following steps are required to use IntelliJ IDEA on Scala trunk
- - compile "locker" using "ant locker.done"
- - Copy the *.iml.SAMPLE / *.ipr.SAMPLE files to *.iml / *.ipr
- - In IDEA, create a global library named "ant" which contains "ant.jar"
- - Also create an SDK entry named "1.6" containing the java 1.6 SDK
- - In the Scala Facet of the "library" and "reflect" modules, update the path in the
-   command-line argument for "-sourcepath"
- - In the Project Settings, update the "Version Control" to match your checkout
-
-Known problems
- - Due to SI-4365, the "library" module has to be built using "-Yno-generic-signatures"
+ - compile "locker" using "ant locker.done". This will also download some JARs from
+   Maven to ./build/deps, which are included in IntelliJ's classpath.
+ - Run src/intellij/setup.sh
+ - Open ./src/intellij/scala-lang.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the java 1.6 SDK
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index 696c347..9fb9cd5 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -19,11 +19,8 @@
     <orderEntry type="sourceFolder" forTests="false" />
     <orderEntry type="module" module-name="library" />
     <orderEntry type="module" module-name="reflect" />
-    <orderEntry type="module" module-name="asm" />
-    <orderEntry type="module" module-name="fjbg" />
-    <orderEntry type="module" module-name="msil" />
-    <orderEntry type="library" name="ant" level="application" />
-    <orderEntry type="library" name="jline" level="project" />
+    <orderEntry type="module" module-name="asm" exported="" />
+    <orderEntry type="library" exported="" name="ant" level="project" />
   </component>
 </module>
 
diff --git a/src/intellij/diff.sh b/src/intellij/diff.sh
new file mode 100755
index 0000000..54f9248
--- /dev/null
+++ b/src/intellij/diff.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+#
+# Diffs the SAMPLE files against the working project config.
+#
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+for f in "$SCRIPT_DIR"/*.{iml,ipr}; do
+	echo $f; diff -u $f.SAMPLE $f;
+done
diff --git a/src/intellij/fjbg.iml.SAMPLE b/src/intellij/fjbg.iml.SAMPLE
deleted file mode 100644
index 03eca69..0000000
--- a/src/intellij/fjbg.iml.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
-  <component name="NewModuleRootManager" inherit-compiler-output="true">
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../fjbg">
-      <sourceFolder url="file://$MODULE_DIR$/../fjbg" isTestSource="false" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-  </component>
-</module>
-
diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE
new file mode 100644
index 0000000..c6c8ebb
--- /dev/null
+++ b/src/intellij/interactive.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="scala" name="Scala">
+      <configuration>
+        <option name="compilerLibraryLevel" value="Project" />
+        <option name="compilerLibraryName" value="compiler-locker" />
+        <option name="maximumHeapSize" value="1536" />
+        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+      </configuration>
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$/../interactive">
+      <sourceFolder url="file://$MODULE_DIR$/../interactive" isTestSource="false" />
+    </content>
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="module" module-name="library" />
+    <orderEntry type="module" module-name="reflect" />
+    <orderEntry type="module" module-name="compiler" />
+    <orderEntry type="module" module-name="scaladoc" />
+  </component>
+</module>
diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE
index 9c1b7ec..cac53df 100644
--- a/src/intellij/library.iml.SAMPLE
+++ b/src/intellij/library.iml.SAMPLE
@@ -5,7 +5,7 @@
       <configuration>
         <option name="compilerLibraryLevel" value="Project" />
         <option name="compilerLibraryName" value="compiler-locker" />
-        <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/library -Yno-generic-signatures" />
+        <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/library" />
         <option name="maximumHeapSize" value="1536" />
         <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
       </configuration>
diff --git a/src/intellij/manual.iml.SAMPLE b/src/intellij/manual.iml.SAMPLE
index 62810e0..3295a4a 100644
--- a/src/intellij/manual.iml.SAMPLE
+++ b/src/intellij/manual.iml.SAMPLE
@@ -18,7 +18,8 @@
     <orderEntry type="inheritedJdk" />
     <orderEntry type="sourceFolder" forTests="false" />
     <orderEntry type="module" module-name="library" />
-    <orderEntry type="library" name="ant" level="application" />
+    <orderEntry type="module" module-name="xml" />
+    <orderEntry type="library" name="ant" level="project" />
   </component>
 </module>
 
diff --git a/src/intellij/msil.iml.SAMPLE b/src/intellij/msil.iml.SAMPLE
deleted file mode 100644
index 56f7947..0000000
--- a/src/intellij/msil.iml.SAMPLE
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
-  <component name="FacetManager">
-    <facet type="scala" name="Scala">
-      <configuration>
-        <option name="compilerLibraryLevel" value="Project" />
-        <option name="compilerLibraryName" value="compiler-locker" />
-        <option name="maximumHeapSize" value="1536" />
-        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
-      </configuration>
-    </facet>
-  </component>
-  <component name="NewModuleRootManager" inherit-compiler-output="true">
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../msil">
-      <sourceFolder url="file://$MODULE_DIR$/../msil" isTestSource="false" />
-      <excludeFolder url="file://$MODULE_DIR$/../msil/ch/epfl/lamp/compiler/msil/tests" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="module" module-name="library" />
-  </component>
-</module>
-
diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE
deleted file mode 100644
index ab4a32a..0000000
--- a/src/intellij/partest.iml.SAMPLE
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
-  <component name="FacetManager">
-    <facet type="scala" name="Scala">
-      <configuration>
-        <option name="compilerLibraryLevel" value="Project" />
-        <option name="compilerLibraryName" value="compiler-locker" />
-        <option name="maximumHeapSize" value="1536" />
-        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
-      </configuration>
-    </facet>
-  </component>
-  <component name="NewModuleRootManager" inherit-compiler-output="true">
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../partest">
-      <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="module" module-name="library" />
-    <orderEntry type="module" module-name="reflect" />
-    <orderEntry type="module" module-name="actors" />
-    <orderEntry type="module" module-name="scalap" />
-    <orderEntry type="module" module-name="compiler" />
-    <orderEntry type="library" name="ant" level="application" />
-  </component>
-</module>
-
diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE
index 10973c5..7d10522 100644
--- a/src/intellij/reflect.iml.SAMPLE
+++ b/src/intellij/reflect.iml.SAMPLE
@@ -5,7 +5,7 @@
       <configuration>
         <option name="compilerLibraryLevel" value="Project" />
         <option name="compilerLibraryName" value="compiler-locker" />
-        <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/reflect" />
+        <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/reflect" />
         <option name="maximumHeapSize" value="1536" />
         <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
       </configuration>
diff --git a/src/intellij/repl.iml.SAMPLE b/src/intellij/repl.iml.SAMPLE
new file mode 100644
index 0000000..fc78ffe
--- /dev/null
+++ b/src/intellij/repl.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="scala" name="Scala">
+      <configuration>
+        <option name="compilerLibraryLevel" value="Project" />
+        <option name="compilerLibraryName" value="compiler-locker" />
+        <option name="maximumHeapSize" value="1536" />
+        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+      </configuration>
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$/../repl">
+      <sourceFolder url="file://$MODULE_DIR$/../repl" isTestSource="false" />
+    </content>
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="module" module-name="library" />
+    <orderEntry type="module" module-name="reflect" />
+    <orderEntry type="module" module-name="compiler" />
+    <orderEntry type="library" name="repl-deps" level="project" />
+  </component>
+</module>
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 37307c2..a0765b3 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -33,6 +33,9 @@
   <component name="EntryPointsManager">
     <entry_points version="2.0" />
   </component>
+  <component name="HighlightingAdvisor">
+    <option name="SUGGEST_TYPE_AWARE_HIGHLIGHTING" value="false" />
+  </component>
   <component name="InspectionProjectProfileManager">
     <profiles>
       <profile version="1.0" is_locked="false">
@@ -198,16 +201,15 @@
       <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
       <module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
       <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
-      <module fileurl="file://$PROJECT_DIR$/fjbg.iml" filepath="$PROJECT_DIR$/fjbg.iml" />
       <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
+      <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
       <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
       <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
-      <module fileurl="file://$PROJECT_DIR$/msil.iml" filepath="$PROJECT_DIR$/msil.iml" />
-      <module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
       <module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
+      <module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
       <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+      <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
       <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
-      <module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
       <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
     </modules>
   </component>
@@ -225,24 +227,52 @@
     <mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
   </component>
   <component name="libraryTable">
+    <library name="ant">
+      <CLASSES>
+        <root url="jar://$PROJECT_DIR$/../../lib/ant/ant.jar!/" />
+      </CLASSES>
+      <JAVADOC />
+      <SOURCES />
+    </library>
     <library name="compiler-locker">
       <CLASSES>
         <root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
         <root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
         <root url="file://$PROJECT_DIR$/../../build/locker/classes/reflect" />
-        <root url="file://$PROJECT_DIR$/../../build/libs/classes/fjbg" />
         <root url="file://$PROJECT_DIR$/../../build/asm/classes" />
       </CLASSES>
       <JAVADOC />
       <SOURCES />
     </library>
-    <library name="jline">
+    <library name="junit">
       <CLASSES>
-        <root url="jar://$PROJECT_DIR$/../../lib/jline.jar!/" />
+        <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
       </CLASSES>
       <JAVADOC />
-      <SOURCES />
+      <SOURCES>
+        <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+      </SOURCES>
+      <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" />
+      <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
+    </library>
+    <library name="partest-deps">
+      <CLASSES>
+        <root url="file://$PROJECT_DIR$/../../build/deps/partest" />
+      </CLASSES>
+      <JAVADOC />
+      <SOURCES>
+        <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+      </SOURCES>
+      <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/partest" recursive="false" />
+      <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
     </library>
+    <library name="repl-deps">
+       <CLASSES>
+        <root url="file://$PROJECT_DIR$/../../build/deps/repl" />
+       </CLASSES>
+       <JAVADOC />
+       <SOURCES />
+      <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/repl" recursive="false" />
+     </library>
   </component>
 </project>
-
diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE
index 8ea9d0d..a4d8638 100644
--- a/src/intellij/scala.iml.SAMPLE
+++ b/src/intellij/scala.iml.SAMPLE
@@ -2,7 +2,9 @@
 <module type="JAVA_MODULE" version="4">
   <component name="NewModuleRootManager" inherit-compiler-output="true">
     <exclude-output />
-    <content url="file://$MODULE_DIR$/../.." />
+    <content url="file://$MODULE_DIR$/../..">
+      <excludeFolder url="file://$MODULE_DIR$/../../build" />
+    </content>
     <orderEntry type="inheritedJdk" />
     <orderEntry type="sourceFolder" forTests="false" />
   </component>
diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE
new file mode 100644
index 0000000..07bea5b
--- /dev/null
+++ b/src/intellij/scaladoc.iml.SAMPLE
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="scala" name="Scala">
+      <configuration>
+        <option name="compilerLibraryLevel" value="Project" />
+        <option name="compilerLibraryName" value="compiler-locker" />
+        <option name="maximumHeapSize" value="1536" />
+        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+      </configuration>
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$/../scaladoc">
+      <sourceFolder url="file://$MODULE_DIR$/../scaladoc" isTestSource="false" />
+    </content>
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="module" module-name="library" />
+    <orderEntry type="module" module-name="reflect" />
+    <orderEntry type="module" module-name="compiler" />
+    <orderEntry type="module" module-name="xml" />
+    <orderEntry type="module" module-name="parser-combinators" />
+    <orderEntry type="module" module-name="partest" />
+  </component>
+</module>
diff --git a/src/intellij/setup.sh b/src/intellij/setup.sh
new file mode 100755
index 0000000..bd324ba
--- /dev/null
+++ b/src/intellij/setup.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+#
+# Generates IntelliJ IDEA project files based on the checked-in samples.
+#
+
+set -e
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+export BASE="$( cd "$( dirname "$0" )"/../.. && pwd )"
+echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel."
+read
+
+(rm -f *.ipr *.iml 2>/dev/null)
+for f in $(ls "$SCRIPT_DIR"/*.SAMPLE); do
+	NEW_FILE=`echo $f | perl -pe 's/.SAMPLE//'`;
+
+	cp $f $NEW_FILE
+
+	# IntelliJ doesn't process the "compilerOptions" setting for variable
+	# replacement. If it did, we would just use "$PROJECT_DIR$". Instead,
+	# we do this replacement ourselves.
+	perl -pi -e 's/\$BASE_DIR\$/$ENV{"BASE"}/g' $NEW_FILE
+	echo "Created $NEW_FILE"
+done
diff --git a/src/intellij/swing.iml.SAMPLE b/src/intellij/swing.iml.SAMPLE
deleted file mode 100644
index c97bfdf..0000000
--- a/src/intellij/swing.iml.SAMPLE
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
-  <component name="FacetManager">
-    <facet type="scala" name="Scala">
-      <configuration>
-        <option name="compilerLibraryLevel" value="Project" />
-        <option name="compilerLibraryName" value="compiler-locker" />
-        <option name="maximumHeapSize" value="1536" />
-        <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
-      </configuration>
-    </facet>
-  </component>
-  <component name="NewModuleRootManager" inherit-compiler-output="true">
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../swing">
-      <sourceFolder url="file://$MODULE_DIR$/../swing" isTestSource="false" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="module" module-name="library" />
-    <orderEntry type="module" module-name="actors" />
-  </component>
-</module>
-
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 112fec4..423be20 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -6,15 +6,16 @@
     <orderEntry type="inheritedJdk" />
     <orderEntry type="sourceFolder" forTests="false" />
     <orderEntry type="module" module-name="library" />
+    <orderEntry type="module" module-name="xml" />
+    <orderEntry type="module" module-name="parser-combinators" />
     <orderEntry type="module" module-name="reflect" />
     <orderEntry type="module" module-name="compiler" />
     <orderEntry type="module" module-name="actors" />
     <orderEntry type="module" module-name="swing" />
     <orderEntry type="module" module-name="partest" />
     <orderEntry type="module" module-name="asm" />
-    <orderEntry type="module" module-name="fjbg" />
     <orderEntry type="module" module-name="forkjoin" />
-    <orderEntry type="module" module-name="msil" />
+    <orderEntry type="library" name="junit" level="project" />
   </component>
 </module>
 
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
new file mode 100644
index 0000000..2e4f6b0
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -0,0 +1,444 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.FailedInterrupt
+import scala.tools.nsc.util.EmptyAction
+import scala.tools.nsc.util.WorkScheduler
+import scala.reflect.internal.util.{SourceFile, Position}
+import scala.tools.nsc.util.InterruptReq
+
+/** Interface of interactive compiler to a client such as an IDE
+ *  The model the presentation compiler consists of the following parts:
+ *
+ *  unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
+ *
+ *  manipulated by: removeUnitOf, reloadSources.
+ *
+ *  A call to reloadSources will add the given sources to the loaded units, and
+ *  start a new background compiler pass to compile all loaded units (with the indicated sources first).
+ *  Each background compiler pass has its own typer run.
+ *  The background compiler thread can be interrupted each time an AST node is
+ *  completely typechecked in the following ways:
+
+ *  1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
+ *  2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
+ *  3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
+ *  4. by raising an exception in the scheduler.
+ *  5. by passing a high-priority action wrapped in ask { ... }.
+ *
+ *  Actions under 1-3 can themselves be interrupted if they involve typechecking
+ *  AST nodes. High-priority actions under 5 cannot; they always run to completion.
+ *  So these high-priority actions should to be short.
+ *
+ *  Normally, an interrupted action continues after the interrupting action is finished.
+ *  However, if the interrupting action created a new typer run, the interrupted
+ *  action is aborted. If there's an outstanding response, it will be set to
+ *  a Right value with a FreshRunReq exception.
+ */
+trait CompilerControl { self: Global =>
+
+  type Response[T] = scala.tools.nsc.interactive.Response[T]
+
+  /** The scheduler by which client and compiler communicate
+   *  Must be initialized before starting compilerRunner
+   */
+  @volatile protected[interactive] var scheduler = new WorkScheduler
+
+  /** Return the compilation unit attached to a source file, or None
+   *  if source is not loaded.
+   */
+  def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
+
+  /** Run operation `op` on a compilation unit associated with given `source`.
+   *  If source has a loaded compilation unit, this one is passed to `op`.
+   *  Otherwise a new compilation unit is created, but not added to the set of loaded units.
+   */
+  def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
+    op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
+
+  /** Removes the CompilationUnit corresponding to the given SourceFile
+   *  from consideration for recompilation.
+   */
+  def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
+
+  /** Returns the top level classes and objects that were deleted
+   * in the editor since last time recentlyDeleted() was called.
+   */
+  def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
+    val result = deletedTopLevelSyms
+    deletedTopLevelSyms.clear()
+    result.toList
+  }
+
+  /** Locate smallest tree that encloses position
+   *  @pre Position must be loaded
+   */
+  def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
+
+  /** Locates smallest context that encloses position as an optional value.
+   */
+  def locateContext(pos: Position): Option[Context] =
+    for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
+
+  /** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
+   */
+  def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse {
+    throw new FatalError("no context found for "+pos)
+  }
+
+  private def postWorkItem(item: WorkItem) =
+    if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
+
+  /** Makes sure a set of compilation units is loaded and parsed.
+   *  Returns () to syncvar `response` on completion.
+   *  Afterwards a new background compiler run is started with
+   *  the given sources at the head of the list of to-be-compiled sources.
+   */
+  def askReload(sources: List[SourceFile], response: Response[Unit]) = {
+    val superseeded = scheduler.dequeueAll {
+      case ri: ReloadItem if ri.sources == sources => Some(ri)
+      case _ => None
+    }
+    superseeded.foreach(_.response.set(()))
+    postWorkItem(new ReloadItem(sources, response))
+  }
+
+  /** Removes source files and toplevel symbols, and issues a new typer run.
+   *  Returns () to syncvar `response` on completion.
+   */
+  def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = {
+    postWorkItem(new FilesDeletedItem(sources, response))
+  }
+
+  /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
+   *  Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
+   */
+  def askTypeAt(pos: Position, response: Response[Tree]) =
+    postWorkItem(new AskTypeAtItem(pos, response))
+
+  /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
+   *  @pre `source` needs to be loaded.
+   *  @note Deprecated because of race conditions in the typechecker when the background compiler
+   *        is interrupted while typing the same `source`.
+   *  @see  SI-6578
+   */
+  @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
+  def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
+    postWorkItem(new AskTypeItem(source, forceReload, response))
+
+  /** Sets sync var `response` to the position of the definition of the given link in
+   *  the given sourcefile.
+   *
+   *  @param   sym      The symbol referenced by the link (might come from a classfile)
+   *  @param   source   The source file that's supposed to contain the definition
+   *  @param   response A response that will be set to the following:
+   *                    If `source` contains a definition that is referenced by the given link
+   *                    the position of that definition, otherwise NoPosition.
+   *  Note: This operation does not automatically load `source`. If `source`
+   *  is unloaded, it stays that way.
+   */
+  def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
+    postWorkItem(new AskLinkPosItem(sym, source, response))
+
+  /** Sets sync var `response` to doc comment information for a given symbol.
+   *
+   *  @param   sym        The symbol whose doc comment should be retrieved (might come from a classfile)
+   *  @param   source     The source file that's supposed to contain the definition
+   *  @param   site       The symbol where 'sym' is observed
+   *  @param   fragments  All symbols that can contribute to the generated documentation
+   *                      together with their source files.
+   *  @param   response   A response that will be set to the following:
+   *                      If `source` contains a definition of a given symbol that has a doc comment,
+   *                      the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+   *  Note: This operation does not automatically load sources that are not yet loaded.
+   */
+  def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
+    postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
+
+  @deprecated("Use method that accepts fragments", "2.10.2")
+  def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
+    askDocComment(sym, source, site, (sym,source)::Nil, response)
+
+  /** Sets sync var `response` to list of members that are visible
+   *  as members of the tree enclosing `pos`, possibly reachable by an implicit.
+   *  @pre  source is loaded
+   */
+  def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
+    postWorkItem(new AskTypeCompletionItem(pos, response))
+
+  /** Sets sync var `response` to list of members that are visible
+   *  as members of the scope enclosing `pos`.
+   *  @pre  source is loaded
+   */
+  def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
+    postWorkItem(new AskScopeCompletionItem(pos, response))
+
+  /** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
+   *  If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
+   */
+  def askToDoFirst(source: SourceFile) =
+    postWorkItem(new AskToDoFirstItem(source))
+
+  /** If source is not yet loaded, loads it, and starts a new run, otherwise
+   * continues with current pass.
+   * Waits until source is fully type checked and returns body in response.
+   * @param source     The source file that needs to be fully typed.
+   * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If 
+                       the file is already loaded, this flag is ignored.
+   * @param response   The response, which is set to the fully attributed tree of `source`.
+   *                   If the unit corresponding to `source` has been removed in the meantime
+   *                   the a NoSuchUnitError is raised in the response.
+   */
+  def askLoadedTyped(source:SourceFile, keepLoaded: Boolean, response: Response[Tree]): Unit =
+    postWorkItem(new AskLoadedTypedItem(source, keepLoaded, response))
+
+  final def askLoadedTyped(source: SourceFile, response: Response[Tree]): Unit =
+    askLoadedTyped(source, false, response)
+
+  /** If source if not yet loaded, get an outline view with askParseEntered.
+   *  If source is loaded, wait for it to be typechecked.
+   *  In both cases, set response to parsed (and possibly typechecked) tree.
+   *  @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+   */
+  def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
+    getUnit(source) match {
+      case Some(_) => askLoadedTyped(source, keepSrcLoaded, response)
+      case None => askParsedEntered(source, keepSrcLoaded, response)
+    }
+  }
+
+  /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
+   *  @param source       The source file to be analyzed
+   *  @param keepLoaded   If set to `true`, source file will be kept as a loaded unit afterwards.
+   *                      If keepLoaded is `false` the operation is run at low priority, only after
+   *                      everything is brought up to date in a regular type checker run.
+   *  @param response     The response.
+   */
+  def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
+    postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
+
+
+  /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
+   *  (but not re-loaded).
+   */
+  def askReset() = scheduler raise (new FreshRunReq)
+
+  /** Tells the compile server to shutdown, and not to restart again */
+  def askShutdown() = scheduler raise ShutdownReq
+
+  /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
+   *
+   *  This method is thread-safe and as such can safely run outside of the presentation
+   *  compiler thread.
+   */
+  def parseTree(source: SourceFile): Tree = {
+    newUnitParser(new CompilationUnit(source)).parse()
+  }
+
+  /** Asks for a computation to be done quickly on the presentation compiler thread */
+  def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
+
+  /** Asks for a computation to be done on presentation compiler thread, returning
+   *  a response with the result or an exception
+   */
+  def askForResponse[A](op: () => A): Response[A] = {
+    val r = new Response[A]
+    if (self.onCompilerThread) {
+      try   { r set op() }
+      catch { case exc: Throwable => r raise exc }
+      r
+    } else {
+      val ir = scheduler askDoQuickly op
+      ir onComplete {
+        case Left(result) => r set result
+        case Right(exc)   => r raise exc
+      }
+      r
+    }
+  }
+
+  def onCompilerThread = Thread.currentThread == compileRunner
+
+  /** Info given for every member found by completion
+   */
+  abstract class Member {
+    val sym: Symbol
+    val tpe: Type
+    val accessible: Boolean
+    def implicitlyAdded = false
+
+    private def accessible_s = if (accessible) "" else "[inaccessible] "
+    def forceInfoString = {
+      definitions.fullyInitializeSymbol(sym)
+      definitions.fullyInitializeType(tpe)
+      infoString
+    }
+    def infoString = s"$accessible_s${sym.defStringSeenAs(tpe)}"
+  }
+
+  case class TypeMember(
+    sym: Symbol,
+    tpe: Type,
+    accessible: Boolean,
+    inherited: Boolean,
+    viaView: Symbol) extends Member {
+    override def implicitlyAdded = viaView != NoSymbol
+  }
+
+  case class ScopeMember(
+    sym: Symbol,
+    tpe: Type,
+    accessible: Boolean,
+    viaImport: Tree) extends Member
+
+  // items that get sent to scheduler
+
+  abstract class WorkItem extends (() => Unit) {
+    val onCompilerThread = self.onCompilerThread
+
+    /** Raise a MissingReponse, if the work item carries a response. */
+    def raiseMissing(): Unit
+  }
+
+  case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+    def apply() = reload(sources, response)
+    override def toString = "reload "+sources
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+    def apply() = filesDeleted(sources, response)
+    override def toString = "files deleted "+sources
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem {
+    def apply() = self.getTypedTreeAt(pos, response)
+    override def toString = "typeat "+pos.source+" "+pos.show
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+    def apply() = self.getTypedTree(source, forceReload, response)
+    override def toString = "typecheck"
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
+    def apply() = self.getTypeCompletion(pos, response)
+    override def toString = "type completion "+pos.source+" "+pos.show
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
+    def apply() = self.getScopeCompletion(pos, response)
+    override def toString = "scope completion "+pos.source+" "+pos.show
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
+    def apply() = {
+      moveToFront(List(source))
+      enableIgnoredFile(source.file)
+    }
+    override def toString = "dofirst "+source
+
+    def raiseMissing() = ()
+  }
+
+  case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem {
+    def apply() = self.getLinkPos(sym, source, response)
+    override def toString = "linkpos "+sym+" in "+source
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+    def apply() = self.getDocComment(sym, source, site, fragments, response)
+    override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskLoadedTypedItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+    def apply() = self.waitLoadedTyped(source, response, keepLoaded, this.onCompilerThread)
+    override def toString = "wait loaded & typed "+source
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+    def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
+    override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
+
+    def raiseMissing() =
+      response raise new MissingResponse
+  }
+
+  /** A do-nothing work scheduler that responds immediately with MissingResponse.
+   *
+   *  Used during compiler shutdown.
+   */
+  class NoWorkScheduler extends WorkScheduler {
+
+    override def postWorkItem(action: Action) = synchronized {
+      action match {
+        case w: WorkItem => w.raiseMissing()
+        case e: EmptyAction => // do nothing
+        case _ => println("don't know what to do with this " + action.getClass)
+      }
+    }
+
+    override def doQuickly[A](op: () => A): A = {
+      throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
+    }
+
+    override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
+      val ir = new InterruptReq {
+        type R = A
+        val todo = () => throw new MissingResponse
+      }
+      ir.execute()
+      ir
+    }
+
+  }
+
+}
+
+  // ---------------- Interpreted exceptions -------------------
+
+/** Signals a request for a fresh background compiler run.
+ *  Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+class FreshRunReq extends ControlThrowable
+
+/** Signals a request for a shutdown of the presentation compiler.
+ *  Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+object ShutdownReq extends ControlThrowable
+
+class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
+
+class MissingResponse extends Exception("response missing")
diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
new file mode 100644
index 0000000..bf718c2
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -0,0 +1,177 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.collection.mutable.ArrayBuffer
+import scala.annotation.tailrec
+
+trait ContextTrees { self: Global =>
+
+  type Context = analyzer.Context
+  lazy val NoContext = analyzer.NoContext
+  type Contexts = ArrayBuffer[ContextTree]
+
+  /** A context tree contains contexts that are indexed by positions.
+   *  It satisfies the following properties:
+   *  1. All context come from compiling the same unit.
+   *  2. Child contexts have parent contexts in their outer chain.
+   *  3. The `pos` field of a context is the same as `context.tree.pos`, unless that
+   *     position is transparent. In that case, `pos` equals the position of
+   *     one of the solid descendants of `context.tree`.
+   *  4. Children of a context have non-overlapping increasing positions.
+   *  5. No context in the tree has a transparent position.
+   */
+  class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) {
+    def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree])
+    override def toString = "ContextTree("+pos+", "+children+")"
+  }
+
+  /** Returns the most precise context possible for the given `pos`.
+   *
+   *  It looks for the finest ContextTree containing `pos`, and then look inside
+   *  this ContextTree for a child ContextTree located immediately before `pos`.
+   *  If such a child exists, returns its context, otherwise returns the context of
+   *  the parent ContextTree.
+   *
+   *  This is required to always return a context which contains the all the imports
+   *  declared up to `pos` (see SI-7280 for a test case).
+   *
+   *  Can return None if `pos` is before any valid Scala code.
+   */
+  def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
+    @tailrec
+    def locateFinestContextTree(context: ContextTree): ContextTree = {
+      if (context.pos includes pos) {
+        locateContextTree(context.children, pos) match {
+          case Some(x) =>
+            locateFinestContextTree(x)
+          case None =>
+            context
+        }
+      } else {
+        context
+      }
+    }
+    locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
+  }
+
+  /** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
+   *  or None if `pos` is located before all ContextTrees.
+   */ 
+  def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
+    if (contexts.isEmpty) None
+    else {
+      // binary search on contexts, loop invar: lo <= hi, recursion metric: `hi - lo`
+      @tailrec
+      def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
+        // [SI-8239] enforce loop invariant & ensure recursion metric decreases monotonically on every recursion
+        if (lo > hi) previousSibling
+        else if (pos properlyPrecedes contexts(lo).pos)
+          previousSibling
+        else if (contexts(hi).pos properlyPrecedes pos)
+          Some(contexts(hi))
+        else {
+          val mid = (lo + hi) / 2
+          val midpos = contexts(mid).pos
+          if (midpos includes pos)
+            Some(contexts(mid))
+          else if (midpos properlyPrecedes pos)
+            // recursion metric: (hi - ((lo + hi)/2 + 1)) < (hi - lo)
+            // since (hi - ((lo + hi)/2 + 1)) - (hi - lo) = lo - ((lo + hi)/2 + 1) < 0
+            // since 2*lo - lo - hi - 2 = lo - hi - 2 < 0
+            // since lo < hi + 2
+            // can violate lo <= hi, hence the lo > hi check at the top [SI-8239]
+            loop(mid + 1, hi, Some(contexts(mid)))
+          else if (lo != hi) // avoid looping forever (lo == hi violates the recursion metric) [SI-8239]
+            // recursion metric: ((lo + hi)/2) - lo < (hi - lo)
+            // since ((lo + hi)/2) - lo - (hi - lo) = ((lo + hi)/2) - hi < 0
+            // since 2 * (((lo + hi)/2) - hi) = lo - hi < 0 since lo < hi
+            loop(lo, mid, previousSibling)
+          else previousSibling
+        }
+      }
+      loop(0, contexts.length - 1, None)
+    }
+  }
+
+  /** Insert a context at correct position into a buffer of context trees.
+   *  If the `context` has a transparent position, add it multiple times
+   *  at the positions of all its solid descendant trees.
+   */
+  def addContext(contexts: Contexts, context: Context): Unit = {
+    val cpos = context.tree.pos
+    if (cpos.isTransparent)
+      for (t <- context.tree.children flatMap solidDescendants)
+        addContext(contexts, context, t.pos)
+    else
+      addContext(contexts, context, cpos)
+  }
+
+  /** Insert a context with non-transparent position `cpos`
+   *  at correct position into a buffer of context trees.
+   */
+  def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized {
+    try {
+      if (!cpos.isRange) {}
+      else if (contexts.isEmpty) contexts += new ContextTree(cpos, context)
+      else {
+        val hi = contexts.length - 1
+        if (contexts(hi).pos precedes cpos)
+          contexts += new ContextTree(cpos, context)
+        else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search
+          addContext(contexts(hi).children, context, cpos)
+        else if (cpos precedes contexts(0).pos)
+          new ContextTree(cpos, context) +=: contexts
+        else {
+          def insertAt(idx: Int): Boolean = {
+            val oldpos = contexts(idx).pos
+            if (oldpos sameRange cpos) {
+              contexts(idx) = new ContextTree(cpos, context, contexts(idx).children)
+              true
+            } else if (oldpos includes cpos) {
+              addContext(contexts(idx).children, context, cpos)
+              true
+            } else if (cpos includes oldpos) {
+              val start = contexts.indexWhere(cpos includes _.pos)
+              val last = contexts.lastIndexWhere(cpos includes _.pos)
+              contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1))
+              contexts.remove(start + 1, last - start)
+              true
+            } else false
+          }
+          def loop(lo: Int, hi: Int) {
+            if (hi - lo > 1) {
+              val mid = (lo + hi) / 2
+              val midpos = contexts(mid).pos
+              if (cpos precedes midpos)
+                loop(lo, mid)
+              else if (midpos precedes cpos)
+                loop(mid, hi)
+              else
+                addContext(contexts(mid).children, context, cpos)
+            } else if (!insertAt(lo) && !insertAt(hi)) {
+              val lopos = contexts(lo).pos
+              val hipos = contexts(hi).pos
+              if ((lopos precedes cpos) && (cpos precedes hipos))
+                contexts.insert(hi, new ContextTree(cpos, context))
+              else
+                inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos)
+            }
+          }
+          loop(0, hi)
+        }
+      }
+    } catch {
+      case ex: Throwable =>
+        println(ex)
+        ex.printStackTrace()
+        println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+
+                (contexts(contexts.length - 1).pos includes cpos))
+        throw ex
+    }
+  }
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
new file mode 100644
index 0000000..95027a2
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -0,0 +1,1272 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
+import scala.collection.mutable
+import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.io.AbstractFile
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.typechecker.Analyzer
+import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
+import scala.annotation.{ elidable, tailrec }
+import scala.language.implicitConversions
+import scala.tools.nsc.typechecker.Typers
+import scala.util.control.Breaks._
+
+/**
+ * This trait allows the IDE to have an instance of the PC that
+ * does not clear the comments table at every new typer run (those
+ * being many and close between in this context).
+ */
+
+trait CommentPreservingTypers extends Typers {
+  self: Analyzer =>
+
+  override def resetDocComments() = {}
+}
+
+trait InteractiveAnalyzer extends Analyzer {
+  val global : Global
+  import global._
+
+  override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
+  override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
+
+  trait InteractiveTyper extends Typer {
+    override def canAdaptConstantTypeToLiteral = false
+    override def canTranslateEmptyListToNil    = false
+    override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match {
+      case Select(_, _)             => treeCopy.Select(tree, qual, name)
+      case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+    }
+  }
+
+  trait InteractiveNamer extends Namer {
+    override def saveDefaultGetter(meth: Symbol, default: Symbol) {
+      // save the default getters as attachments in the method symbol. if compiling the
+      // same local block several times (which can happen in interactive mode) we might
+      // otherwise not find the default symbol, because the second time it the method
+      // symbol will be re-entered in the scope but the default parameter will not.
+      meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+        case Some(att) => att.defaultGetters += default
+        case None      => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+      }
+    }
+    // this logic is needed in case typer was interrupted half
+    // way through and then comes back to do the tree again. In
+    // that case the definitions that were already attributed as
+    // well as any default parameters of such methods need to be
+    // re-entered in the current scope.
+    override def enterExistingSym(sym: Symbol): Context = {
+      if (sym != null && sym.owner.isTerm) {
+        enterIfNotThere(sym)
+        if (sym.isLazy)
+          sym.lazyAccessor andAlso enterIfNotThere
+
+        for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+          defAtt.defaultGetters foreach enterIfNotThere
+      }
+      super.enterExistingSym(sym)
+    }
+    override def enterIfNotThere(sym: Symbol) {
+      val scope = context.scope
+      @tailrec def search(e: ScopeEntry) {
+        if ((e eq null) || (e.owner ne scope))
+          scope enter sym
+        else if (e.sym ne sym)  // otherwise, aborts since we found sym
+          search(e.tail)
+      }
+      search(scope lookupEntry sym.name)
+    }
+  }
+}
+
+/** The main class of the presentation compiler in an interactive environment such as an IDE
+ */
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
+  /* Is the compiler initializing? Early def, so that the field is true during the
+   *  execution of the super constructor.
+   */
+  private var initializing = true
+  override val useOffsetPositions = false
+} with scala.tools.nsc.Global(settings, _reporter)
+  with CompilerControl
+  with ContextTrees
+  with RichCompilationUnits
+  with Picklers {
+
+  import definitions._
+
+  if (!settings.Ymacroexpand.isSetByUser)
+    settings.Ymacroexpand.value = settings.MacroExpand.Discard
+
+  val debugIDE: Boolean = settings.YpresentationDebug.value
+  val verboseIDE: Boolean = settings.YpresentationVerbose.value
+
+  private def replayName = settings.YpresentationReplay.value
+  private def logName = settings.YpresentationLog.value
+  private def afterTypeDelay = settings.YpresentationDelay.value
+  private final val SleepTime = 10
+
+  val log =
+    if (replayName != "") new Replayer(new FileReader(replayName))
+    else if (logName != "") new Logger(new FileWriter(logName))
+    else NullLogger
+
+  import log.logreplay
+  debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
+  debugLog("classpath: "+classPath)
+
+  private var curTime = System.nanoTime
+  private def timeStep = {
+    val last = curTime
+    curTime = System.nanoTime
+    ", delay = " + (curTime - last) / 1000000 + "ms"
+  }
+
+  /** Print msg only when debugIDE is true. */
+  @inline final def debugLog(msg: => String) =
+    if (debugIDE) println("[%s] %s".format(projectName, msg))
+
+  /** Inform with msg only when verboseIDE is true. */
+  @inline final def informIDE(msg: => String) =
+    if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+
+  // don't keep the original owner in presentation compiler runs
+  // (the map will grow indefinitely, and the only use case is the backend)
+  override protected def saveOriginalOwner(sym: Symbol) { }
+  override protected def originalEnclosingMethod(sym: Symbol) =
+    abort("originalOwner is not kept in presentation compiler runs.")
+
+  override def forInteractive = true
+  override protected def synchronizeNames = true
+
+  override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+    new InteractiveAsSeenFromMap(pre, clazz)
+
+  class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
+    /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
+     *  which it is currently supposed it is not.
+     *
+     *  If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
+     *  method rather than aborting in the failure case.
+     */
+  }
+
+  /** A map of all loaded files to the rich compilation units that correspond to them.
+   */
+  val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
+                       SynchronizedMap[AbstractFile, RichCompilationUnit] {
+    override def put(key: AbstractFile, value: RichCompilationUnit) = {
+      val r = super.put(key, value)
+      if (r.isEmpty) debugLog("added unit for "+key)
+      r
+    }
+    override def remove(key: AbstractFile) = {
+      val r = super.remove(key)
+      if (r.nonEmpty) debugLog("removed unit for "+key)
+      r
+    }
+  }
+
+  /** A set containing all those files that need to be removed
+   *  Units are removed by getUnit, typically once a unit is finished compiled.
+   */
+  protected val toBeRemoved: mutable.Set[AbstractFile] =
+    new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+  /** A set containing all those files that need to be removed after a full background compiler run
+   */
+  protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] =
+    new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+  class ResponseMap extends mutable.HashMap[SourceFile, Set[Response[Tree]]] {
+    override def default(key: SourceFile): Set[Response[Tree]] = Set()
+    override def += (binding: (SourceFile, Set[Response[Tree]])) = {
+      assert(interruptsEnabled, "delayed operation within an ask")
+      super.+=(binding)
+    }
+  }
+
+  /** A map that associates with each abstract file the set of responses that are waiting
+   *  (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
+   */
+  protected val waitLoadedTypeResponses = new ResponseMap
+
+  /** A map that associates with each abstract file the set of responses that ware waiting
+   *  (via build) for the unit associated with the abstract file to be parsed and entered
+   */
+  protected var getParsedEnteredResponses = new ResponseMap
+
+  private def cleanResponses(rmap: ResponseMap): Unit = {
+    for ((source, rs) <- rmap.toList) {
+      for (r <- rs) {
+        if (getUnit(source).isEmpty)
+          r raise new NoSuchUnitError(source.file)
+        if (r.isComplete)
+          rmap(source) -= r
+      }
+      if (rmap(source).isEmpty)
+        rmap -= source
+    }
+  }
+
+  override lazy val analyzer = new {
+    val global: Global.this.type = Global.this
+  } with InteractiveAnalyzer
+
+  private def cleanAllResponses() {
+    cleanResponses(waitLoadedTypeResponses)
+    cleanResponses(getParsedEnteredResponses)
+  }
+
+  private def checkNoOutstanding(rmap: ResponseMap): Unit =
+    for ((_, rs) <- rmap.toList; r <- rs) {
+      debugLog("ERROR: missing response, request will be discarded")
+      r raise new MissingResponse
+    }
+
+  def checkNoResponsesOutstanding() {
+    checkNoOutstanding(waitLoadedTypeResponses)
+    checkNoOutstanding(getParsedEnteredResponses)
+  }
+
+  /** The compilation unit corresponding to a source file
+   *  if it does not yet exist create a new one atomically
+   *  Note: We want to remove this.
+   */
+  protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
+    unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
+
+  /** Work through toBeRemoved list to remove any units.
+   *  Then return optionally unit associated with given source.
+   */
+  protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
+    toBeRemoved.synchronized {
+      for (f <- toBeRemoved) {
+        informIDE("removed: "+s)
+        unitOfFile -= f
+        allSources = allSources filter (_.file != f)
+      }
+      toBeRemoved.clear()
+    }
+    unitOfFile get s.file
+  }
+
+  /** A list giving all files to be typechecked in the order they should be checked.
+   */
+  protected var allSources: List[SourceFile] = List()
+
+  private var lastException: Option[Throwable] = None
+
+  /** A list of files that crashed the compiler. They will be ignored during background
+   *  compilation until they are removed from this list.
+   */
+  private var ignoredFiles: Set[AbstractFile] = Set()
+
+  /** Flush the buffer of sources that are ignored during background compilation. */
+  def clearIgnoredFiles() {
+    ignoredFiles = Set()
+  }
+
+  /** Remove a crashed file from the ignore buffer. Background compilation will take it into account
+   *  and errors will be reported against it. */
+  def enableIgnoredFile(file: AbstractFile) {
+    ignoredFiles -= file
+    debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles))
+  }
+
+  /** The currently active typer run */
+  private var currentTyperRun: TyperRun = _
+  newTyperRun()
+
+  /** Is a background compiler run needed?
+   *  Note: outOfDate is true as long as there is a background compile scheduled or going on.
+   */
+  private var outOfDate = false
+
+  def isOutOfDate: Boolean = outOfDate
+
+  def demandNewCompilerRun() = {
+    if (outOfDate) throw new FreshRunReq // cancel background compile
+    else outOfDate = true            // proceed normally and enable new background compile
+  }
+
+  protected[interactive] var minRunId = 1
+
+  private[interactive] var interruptsEnabled = true
+
+  private val NoResponse: Response[_] = new Response[Any]
+
+  /** The response that is currently pending, i.e. the compiler
+   *  is working on providing an asnwer for it.
+   */
+  private var pendingResponse: Response[_] = NoResponse
+
+  // ----------- Overriding hooks in nsc.Global -----------------------
+
+  /** Called from parser, which signals hereby that a method definition has been parsed.
+   */
+  override def signalParseProgress(pos: Position) {
+    // We only want to be interruptible when running on the PC thread.
+    if(onCompilerThread) {
+      checkForMoreWork(pos)
+    }
+  }
+
+  /** Called from typechecker, which signals hereby that a node has been completely typechecked.
+   *  If the node includes unit.targetPos, abandons run and returns newly attributed tree.
+   *  Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
+   *  @param  context  The context that typechecked the node
+   *  @param  old      The original node
+   *  @param  result   The transformed node
+   */
+  override def signalDone(context: Context, old: Tree, result: Tree) {
+    val canObserveTree = (
+         interruptsEnabled
+      && analyzer.lockedCount == 0
+      && !context.bufferErrors // SI-7558 look away during exploratory typing in "silent mode"
+    )
+    if (canObserveTree) {
+      if (context.unit.exists &&
+          result.pos.isOpaqueRange &&
+          (result.pos includes context.unit.targetPos)) {
+        var located = new TypedLocator(context.unit.targetPos) locateIn result
+        if (located == EmptyTree) {
+          println("something's wrong: no "+context.unit+" in "+result+result.pos)
+          located = result
+        }
+        throw new TyperResult(located)
+      }
+      else {
+        try {
+          checkForMoreWork(old.pos)
+        } catch {
+          case ex: ValidateException => // Ignore, this will have been reported elsewhere
+            debugLog("validate exception caught: "+ex)
+          case ex: Throwable =>
+            log.flush()
+            throw ex
+        }
+      }
+    }
+  }
+
+  /** Called from typechecker every time a context is created.
+   *  Registers the context in a context tree
+   */
+  override def registerContext(c: Context) = c.unit match {
+    case u: RichCompilationUnit => addContext(u.contexts, c)
+    case _ =>
+  }
+
+  /** The top level classes and objects currently seen in the presentation compiler
+   */
+  private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol]
+
+  /** The top level classes and objects no longer seen in the presentation compiler
+   */
+  val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol]
+
+  /** Called from typechecker every time a top-level class or object is entered.
+   */
+  override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
+
+  protected type SymbolLoadersInInteractive = GlobalSymbolLoaders {
+    val global: Global.this.type
+    val platform: Global.this.platform.type
+  }
+  /** Symbol loaders in the IDE parse all source files loaded from a package for
+   *  top-level idents. Therefore, we can detect top-level symbols that have a name
+   *  different from their source file
+   */
+  override lazy val loaders: SymbolLoadersInInteractive = new {
+    val global: Global.this.type = Global.this
+    val platform: Global.this.platform.type = Global.this.platform
+  } with BrowsingLoaders
+
+  // ----------------- Polling ---------------------------------------
+
+  case class WorkEvent(atNode: Int, atMillis: Long)
+
+  private var moreWorkAtNode: Int = -1
+  private var nodesSeen = 0
+  private var lastWasReload = false
+
+  /** The number of pollForWorks after which the presentation compiler yields.
+   *  Yielding improves responsiveness on systems with few cores because it
+   *  gives the UI thread a chance to get new tasks and interrupt the presentation
+   *  compiler with them.
+   */
+  private final val yieldPeriod = 10
+
+  /** Called from runner thread and signalDone:
+   *  Poll for interrupts and execute them immediately.
+   *  Then, poll for exceptions and execute them.
+   *  Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
+   *  @param pos   The position of the tree if polling while typechecking, NoPosition otherwise
+   *
+   */
+  private[interactive] def pollForWork(pos: Position) {
+    var loop: Boolean = true
+    while (loop) {
+      breakable{
+        loop = false
+        if (!interruptsEnabled) return
+        if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
+          Thread.`yield`()
+
+        def nodeWithWork(): Option[WorkEvent] =
+          if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
+          else None
+
+        nodesSeen += 1
+        logreplay("atnode", nodeWithWork()) match {
+          case Some(WorkEvent(id, _)) =>
+            debugLog("some work at node "+id+" current = "+nodesSeen)
+          //        assert(id >= nodesSeen)
+          moreWorkAtNode = id
+          case None =>
+        }
+
+        if (nodesSeen >= moreWorkAtNode) {
+
+          logreplay("asked", scheduler.pollInterrupt()) match {
+            case Some(ir) =>
+              try {
+                interruptsEnabled = false
+                debugLog("ask started"+timeStep)
+                ir.execute()
+              } finally {
+                debugLog("ask finished"+timeStep)
+                interruptsEnabled = true
+              }
+            loop = true; break
+            case _ =>
+          }
+
+          if (logreplay("cancelled", pendingResponse.isCancelled)) {
+            throw CancelException
+          }
+
+          logreplay("exception thrown", scheduler.pollThrowable()) match {
+            case Some(ex: FreshRunReq) =>
+              newTyperRun()
+            minRunId = currentRunId
+            demandNewCompilerRun()
+
+            case Some(ShutdownReq) =>
+              scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up
+                val units = scheduler.dequeueAll {
+                  case item: WorkItem => Some(item.raiseMissing())
+                  case _ => Some(())
+                }
+
+                // don't forget to service interrupt requests
+                scheduler.dequeueAllInterrupts(_.execute())
+
+                debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
+                debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
+                         .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
+                checkNoResponsesOutstanding()
+
+                log.flush()
+                scheduler = new NoWorkScheduler
+                throw ShutdownReq
+              }
+
+            case Some(ex: Throwable) => log.flush(); throw ex
+            case _ =>
+          }
+
+          lastWasReload = false
+
+          logreplay("workitem", scheduler.nextWorkItem()) match {
+            case Some(action) =>
+              try {
+                debugLog("picked up work item at "+pos+": "+action+timeStep)
+                action()
+                debugLog("done with work item: "+action)
+              } finally {
+                debugLog("quitting work item: "+action+timeStep)
+              }
+            case None =>
+          }
+        }
+      }
+    }
+  }
+
+  protected def checkForMoreWork(pos: Position) {
+    val typerRun = currentTyperRun
+    pollForWork(pos)
+    if (typerRun != currentTyperRun) demandNewCompilerRun()
+  }
+
+  // ----------------- The Background Runner Thread -----------------------
+
+  private var threadId = 0
+
+  /** The current presentation compiler runner */
+  @volatile private[interactive] var compileRunner: Thread = newRunnerThread()
+
+  /** Check that the currenyly executing thread is the presentation compiler thread.
+   *
+   *  Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
+   */
+  @elidable(elidable.WARNING)
+  override def assertCorrectThread() {
+    assert(initializing || onCompilerThread,
+        "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
+        " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
+  }
+
+  /** Create a new presentation compiler runner.
+   */
+  private def newRunnerThread(): Thread = {
+    threadId += 1
+    compileRunner = new PresentationCompilerThread(this, projectName)
+    compileRunner.setDaemon(true)
+    compileRunner
+  }
+
+  private def ensureUpToDate(unit: RichCompilationUnit) =
+    if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+
+  /** Compile all loaded source files in the order given by `allSources`.
+   */
+  private[interactive] final def backgroundCompile() {
+    informIDE("Starting new presentation compiler type checking pass")
+    reporter.reset()
+
+    // remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
+    allSources = allSources filter (s => unitOfFile contains (s.file))
+
+    // ensure all loaded units are parsed
+    for (s <- allSources; unit <- getUnit(s)) {
+      // checkForMoreWork(NoPosition)  // disabled, as any work done here would be in an inconsistent state
+      ensureUpToDate(unit)
+      parseAndEnter(unit)
+      serviceParsedEntered()
+    }
+
+    // sleep window
+    if (afterTypeDelay > 0 && lastWasReload) {
+      val limit = System.currentTimeMillis() + afterTypeDelay
+      while (System.currentTimeMillis() < limit) {
+        Thread.sleep(SleepTime)
+        checkForMoreWork(NoPosition)
+      }
+    }
+
+    // ensure all loaded units are typechecked
+    for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
+      try {
+        if (!unit.isUpToDate)
+          if (unit.problems.isEmpty || !settings.YpresentationStrict)
+            typeCheck(unit)
+          else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
+        else debugLog("already up to date: "+unit)
+        for (r <- waitLoadedTypeResponses(unit.source))
+          r set unit.body
+        serviceParsedEntered()
+      } catch {
+        case ex: FreshRunReq => throw ex           // propagate a new run request
+        case ShutdownReq     => throw ShutdownReq  // propagate a shutdown request
+        case ex: ControlThrowable => throw ex
+        case ex: Throwable =>
+          println("[%s]: exception during background compile: ".format(unit.source) + ex)
+          ex.printStackTrace()
+          for (r <- waitLoadedTypeResponses(unit.source)) {
+            r.raise(ex)
+          }
+          serviceParsedEntered()
+
+          lastException = Some(ex)
+          ignoredFiles += unit.source.file
+          println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles))
+
+          reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString()))
+      }
+    }
+
+    // move units removable after this run to the "to-be-removed" buffer
+    toBeRemoved ++= toBeRemovedAfterRun
+
+    // clean out stale waiting responses
+    cleanAllResponses()
+
+    // wind down
+    if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) {
+      // need another cycle to treat those
+      newTyperRun()
+      backgroundCompile()
+    } else {
+      outOfDate = false
+      informIDE("Everything is now up to date")
+    }
+  }
+
+  /** Service all pending getParsedEntered requests
+   */
+  private def serviceParsedEntered() {
+    var atOldRun = true
+    for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
+      if (atOldRun) { newTyperRun(); atOldRun = false }
+      getParsedEnteredNow(source, r)
+    }
+    getParsedEnteredResponses.clear()
+  }
+
+  /** Reset unit to unloaded state */
+  private def reset(unit: RichCompilationUnit): Unit = {
+    unit.depends.clear()
+    unit.defined.clear()
+    unit.synthetics.clear()
+    unit.toCheck.clear()
+    unit.checkedFeatures = Set()
+    unit.targetPos = NoPosition
+    unit.contexts.clear()
+    unit.problems.clear()
+    unit.body = EmptyTree
+    unit.status = NotLoaded
+    unit.transformed.clear()
+  }
+
+  /** Parse unit and create a name index, unless this has already been done before */
+  private def parseAndEnter(unit: RichCompilationUnit): Unit =
+    if (unit.status == NotLoaded) {
+      debugLog("parsing: "+unit)
+      currentTyperRun.compileLate(unit)
+      if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
+      if (!unit.isJava) syncTopLevelSyms(unit)
+      unit.status = JustParsed
+    }
+
+  /** Make sure unit is typechecked
+   */
+  private def typeCheck(unit: RichCompilationUnit) {
+    debugLog("type checking: "+unit)
+    parseAndEnter(unit)
+    unit.status = PartiallyChecked
+    currentTyperRun.typeCheck(unit)
+    unit.lastBody = unit.body
+    unit.status = currentRunId
+  }
+
+  /** Update deleted and current top-level symbols sets */
+  def syncTopLevelSyms(unit: RichCompilationUnit) {
+    val deleted = currentTopLevelSyms filter { sym =>
+      /** We sync after namer phase and it resets all the top-level symbols
+       *  that survive the new parsing
+       *  round to NoPeriod.
+       */
+      sym.sourceFile == unit.source.file &&
+      sym.validTo != NoPeriod &&
+      runId(sym.validTo) < currentRunId
+    }
+    for (d <- deleted) {
+      d.owner.info.decls unlink d
+      deletedTopLevelSyms += d
+      currentTopLevelSyms -= d
+    }
+  }
+
+  /** Move list of files to front of allSources */
+  def moveToFront(fs: List[SourceFile]) {
+    allSources = fs ::: (allSources diff fs)
+  }
+
+  // ----------------- Implementations of client commands -----------------------
+
+  def respond[T](result: Response[T])(op: => T): Unit =
+    respondGradually(result)(Stream(op))
+
+  def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = {
+    val prevResponse = pendingResponse
+    try {
+      pendingResponse = response
+      if (!response.isCancelled) {
+        var results = op
+        while (!response.isCancelled && results.nonEmpty) {
+          val result = results.head
+          results = results.tail
+          if (results.isEmpty) {
+            response set result
+            debugLog("responded"+timeStep)
+          } else response setProvisionally result
+        }
+      }
+    } catch {
+      case CancelException =>
+        debugLog("cancelled")
+      case ex: FreshRunReq =>
+        if (debugIDE) {
+          println("FreshRunReq thrown during response")
+          ex.printStackTrace()
+        }
+        response raise ex
+        throw ex
+
+      case ex @ ShutdownReq =>
+        if (debugIDE) {
+          println("ShutdownReq thrown during response")
+          ex.printStackTrace()
+        }
+        response raise ex
+        throw ex
+
+      case ex: Throwable =>
+        if (debugIDE) {
+          println("exception thrown during response: "+ex)
+          ex.printStackTrace()
+        }
+        response raise ex
+    } finally {
+      pendingResponse = prevResponse
+    }
+  }
+
+  private def reloadSource(source: SourceFile) {
+    val unit = new RichCompilationUnit(source)
+    unitOfFile(source.file) = unit
+    toBeRemoved -= source.file
+    toBeRemovedAfterRun -= source.file
+    reset(unit)
+    //parseAndEnter(unit)
+  }
+
+  /** Make sure a set of compilation units is loaded and parsed */
+  private def reloadSources(sources: List[SourceFile]) {
+    newTyperRun()
+    minRunId = currentRunId
+    sources foreach reloadSource
+    moveToFront(sources)
+  }
+
+  /** Make sure a set of compilation units is loaded and parsed */
+  private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) {
+    informIDE("reload: " + sources)
+    lastWasReload = true
+    respond(response)(reloadSources(sources))
+    demandNewCompilerRun()
+  }
+
+  private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) {
+    informIDE("files deleted: " + sources)
+    val deletedFiles = sources.map(_.file).toSet
+    val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile}
+    for (d <- deletedSyms) {
+      d.owner.info.decls unlink d
+      deletedTopLevelSyms += d
+      currentTopLevelSyms -= d
+    }
+    sources foreach (removeUnitOf(_))
+    minRunId = currentRunId
+    respond(response)(())
+    demandNewCompilerRun()
+  }
+
+  /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully.
+   *  If we do just removeUnit, some problems with default parameters can ensue.
+   *  Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
+   */
+  private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
+    toBeRemovedAfterRun ++= sources map (_.file)
+  }
+
+  /** A fully attributed tree located at position `pos` */
+  private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+    case None =>
+      reloadSources(List(pos.source))
+      try typedTreeAt(pos)
+      finally afterRunRemoveUnitsOf(List(pos.source))
+    case Some(unit) =>
+      informIDE("typedTreeAt " + pos)
+      parseAndEnter(unit)
+      val tree = locateTree(pos)
+      debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
+      tree match {
+        case Import(expr, _) =>
+          debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members))
+        case _ =>
+      }
+      if (stabilizedType(tree) ne null) {
+        debugLog("already attributed: "+tree.symbol+" "+tree.tpe)
+        tree
+      } else {
+        unit.targetPos = pos
+        try {
+          debugLog("starting targeted type check")
+          typeCheck(unit)
+//          println("tree not found at "+pos)
+          EmptyTree
+        } catch {
+          case ex: TyperResult => new Locator(pos) locateIn ex.tree
+        } finally {
+          unit.targetPos = NoPosition
+        }
+      }
+  }
+
+  /** A fully attributed tree corresponding to the entire compilation unit  */
+  private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
+    informIDE("typedTree " + source + " forceReload: " + forceReload)
+    val unit = getOrCreateUnitOf(source)
+    if (forceReload) reset(unit)
+    parseAndEnter(unit)
+    if (unit.status <= PartiallyChecked) typeCheck(unit)
+    unit.body
+  }
+
+  /** Set sync var `response` to a fully attributed tree located at position `pos`  */
+  private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) {
+    respond(response)(typedTreeAt(pos))
+  }
+
+  /** Set sync var `response` to a fully attributed tree corresponding to the
+   *  entire compilation unit  */
+  private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
+    respond(response)(typedTree(source, forceReload))
+  }
+
+  private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
+    val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
+    sources filterNot (getUnit(_).isDefined) match {
+      case Nil =>
+        f(unitOfSrc)
+      case unknown =>
+        reloadSources(unknown)
+        try {
+          f(unitOfSrc)
+        } finally
+          afterRunRemoveUnitsOf(unknown)
+    }
+  }
+
+  private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
+    withTempUnits(List(source)){ srcToUnit =>
+      f(srcToUnit(source))
+    }
+
+  /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+  private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
+    val originalTypeParams = sym.owner.typeParams
+    ensureUpToDate(unit)
+    parseAndEnter(unit)
+    val pre = adaptToNewRunMap(ThisType(sym.owner))
+    val rawsym = pre.typeSymbol.info.decl(sym.name)
+    val newsym = rawsym filter { alt =>
+      sym.isType || {
+        try {
+          val tp1 = pre.memberType(alt) onTypeError NoType
+          val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+          matchesType(tp1, tp2, alwaysMatchSimple = false) || {
+            debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
+            val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
+            matchesType(tp1, tp3, alwaysMatchSimple = false) || {
+              debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
+              false
+            }
+          }
+        }
+        catch {
+          case ex: ControlThrowable => throw ex
+          case ex: Throwable =>
+            debugLog("error in findMirrorSymbol: " + ex)
+            ex.printStackTrace()
+            false
+        }
+      }
+    }
+    if (newsym == NoSymbol) {
+      if (rawsym.exists && !rawsym.isOverloaded) rawsym
+      else {
+        debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
+        NoSymbol
+      }
+    } else if (newsym.isOverloaded) {
+      settings.uniqid.value = true
+      debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
+      NoSymbol
+    } else {
+      debugLog("mirror found for " + newsym + ": " + newsym.pos)
+      newsym
+    }
+  }
+
+  /** Implements CompilerControl.askLinkPos */
+  private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+    informIDE("getLinkPos "+sym+" "+source)
+    respond(response) {
+      if (sym.owner.isClass) {
+        withTempUnit(source){ u =>
+          findMirrorSymbol(sym, u).pos
+        }
+      } else {
+        debugLog("link not in class "+sym+" "+source+" "+sym.owner)
+        NoPosition
+      }
+    }
+  }
+
+  private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+    unit.body foreachPartial {
+      case DocDef(comment, defn) if defn.symbol == sym =>
+        fillDocComment(defn.symbol, comment)
+        EmptyTree
+      case _: ValOrDefDef =>
+        EmptyTree
+    }
+  }
+
+  /** Implements CompilerControl.askDocComment */
+  private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
+                                         response: Response[(String, String, Position)]) {
+    informIDE(s"getDocComment $sym at $source, site $site")
+    respond(response) {
+      withTempUnits(fragments.unzip._2){ units =>
+        for((sym, src) <- fragments) {
+          val mirror = findMirrorSymbol(sym, units(src))
+          if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
+        }
+        val mirror = findMirrorSymbol(sym, units(source))
+        if (mirror eq NoSymbol)
+          ("", "", NoPosition)
+        else {
+          (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
+        }
+      }
+    }
+    // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+    newTyperRun()
+  }
+
+  def stabilizedType(tree: Tree): Type = tree match {
+    case Ident(_) if treeInfo.admitsTypeSelection(tree) =>
+      singleType(NoPrefix, tree.symbol)
+    case Select(qual, _) if treeInfo.admitsTypeSelection(tree) =>
+      singleType(qual.tpe, tree.symbol)
+    case Import(expr, selectors) =>
+      tree.symbol.info match {
+        case ImportType(expr) => expr match {
+          case s at Select(qual, name) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, s.symbol)
+          case i : Ident => i.tpe
+          case _ => tree.tpe
+        }
+        case _ => tree.tpe
+      }
+
+    case _ => tree.tpe
+  }
+
+  import analyzer.{SearchResult, ImplicitSearch}
+
+  private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
+    informIDE("getScopeCompletion" + pos)
+    respond(response) { scopeMembers(pos) }
+  }
+
+  private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
+    override def default(key: Name) = Set()
+
+    private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
+      (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
+    }
+
+    private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
+      m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+      !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+      (!implicitlyAdded || m.implicitlyAdded)
+
+    def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
+      if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
+        add(sym.accessed, pre, implicitlyAdded)(toMember)
+      } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) {
+        val symtpe = pre.memberType(sym) onTypeError ErrorType
+        matching(sym, symtpe, this(sym.name)) match {
+          case Some(m) =>
+            if (keepSecond(m, sym, implicitlyAdded)) {
+              //print(" -+ "+sym.name)
+              this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
+            }
+          case None =>
+            //print(" + "+sym.name)
+            this(sym.name) = this(sym.name) + toMember(sym, symtpe)
+        }
+      }
+    }
+
+    def addNonShadowed(other: Members[M]) = {
+      for ((name, ms) <- other)
+        if (ms.nonEmpty && this(name).isEmpty) this(name) = ms
+    }
+
+    def allMembers: List[M] = values.toList.flatten
+  }
+
+  /** Return all members visible without prefix in context enclosing `pos`. */
+  private def scopeMembers(pos: Position): List[ScopeMember] = {
+    typedTreeAt(pos) // to make sure context is entered
+    val context = doLocateContext(pos)
+    val locals = new Members[ScopeMember]
+    val enclosing = new Members[ScopeMember]
+    def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
+      locals.add(sym, pre, implicitlyAdded = false) { (s, st) =>
+        // imported val and var are always marked as inaccessible, but they could be accessed through their getters. SI-7995
+        if (s.hasGetter)
+          new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport)
+        else
+          new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
+      }
+    def localsToEnclosing() = {
+      enclosing.addNonShadowed(locals)
+      locals.clear()
+    }
+    //print("add scope members")
+    var cx = context
+    while (cx != NoContext) {
+      for (sym <- cx.scope)
+        addScopeMember(sym, NoPrefix, EmptyTree)
+      localsToEnclosing()
+      if (cx == cx.enclClass) {
+        val pre = cx.prefix
+        for (sym <- pre.members)
+          addScopeMember(sym, pre, EmptyTree)
+        localsToEnclosing()
+      }
+      cx = cx.outer
+    }
+    //print("\nadd imported members")
+    for (imp <- context.imports) {
+      val pre = imp.qual.tpe
+      for (sym <- imp.allImportedSymbols)
+        addScopeMember(sym, pre, imp.qual)
+      localsToEnclosing()
+    }
+    // println()
+    val result = enclosing.allMembers
+//    if (debugIDE) for (m <- result) println(m)
+    result
+  }
+
+  private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
+    informIDE("getTypeCompletion " + pos)
+    respondGradually(response) { typeMembers(pos) }
+    //if (debugIDE) typeMembers(pos)
+  }
+
+  private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
+    // Choosing which tree will tell us the type members at the given position:
+    //   If pos leads to an Import, type the expr
+    //   If pos leads to a Select, type the qualifier as long as it is not erroneous
+    //     (this implies discarding the possibly incomplete name in the Select node)
+    //   Otherwise, type the tree found at 'pos' directly.
+    val tree0 = typedTreeAt(pos) match {
+      case sel @ Select(qual, _) if sel.tpe == ErrorType => qual
+      case Import(expr, _)                               => expr
+      case t                                             => t
+    }
+    val context = doLocateContext(pos)
+
+    val shouldTypeQualifier = tree0.tpe match {
+      case null           => true
+      case mt: MethodType => mt.isImplicit
+      case _              => false
+    }
+
+    // TODO: guard with try/catch to deal with ill-typed qualifiers.
+    val tree = if (shouldTypeQualifier) analyzer newTyper context typedQualifier tree0 else tree0
+
+    debugLog("typeMembers at "+tree+" "+tree.tpe)
+    val superAccess = tree.isInstanceOf[Super]
+    val members = new Members[TypeMember]
+
+    def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
+      val implicitlyAdded = viaView != NoSymbol
+      members.add(sym, pre, implicitlyAdded) { (s, st) =>
+        new TypeMember(s, st,
+          context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
+          inherited,
+          viaView)
+      }
+    }
+
+    /** Create a function application of a given view function to `tree` and typechecked it.
+     */
+    def viewApply(view: SearchResult): Tree = {
+      assert(view.tree != EmptyTree)
+      analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
+        .typed(Apply(view.tree, List(tree)) setPos tree.pos)
+        .onTypeError(EmptyTree)
+    }
+
+    val pre = stabilizedType(tree)
+
+    val ownerTpe = tree.tpe match {
+      case ImportType(expr) => expr.tpe
+      case null => pre
+      case MethodType(List(), rtpe) => rtpe
+      case _ => tree.tpe
+    }
+
+    //print("add members")
+    for (sym <- ownerTpe.members)
+      addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
+    members.allMembers #:: {
+      //print("\nadd enrichment")
+      val applicableViews: List[SearchResult] =
+        if (ownerTpe.isErroneous) List()
+        else new ImplicitSearch(
+          tree, functionType(List(ownerTpe), AnyTpe), isView = true,
+          context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
+      for (view <- applicableViews) {
+        val vtree = viewApply(view)
+        val vpre = stabilizedType(vtree)
+        for (sym <- vtree.tpe.members) {
+          addTypeMember(sym, vpre, inherited = false, view.tree.symbol)
+        }
+      }
+      //println()
+      Stream(members.allMembers)
+    }
+  }
+
+  /** Implements CompilerControl.askLoadedTyped */
+  private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean = false, onSameThread: Boolean = true) {
+    getUnit(source) match {
+      case Some(unit) =>
+        if (unit.isUpToDate) {
+          debugLog("already typed")
+          response set unit.body
+        } else if (ignoredFiles(source.file)) {
+          response.raise(lastException.getOrElse(CancelException))
+        } else if (onSameThread) {
+          getTypedTree(source, forceReload = false, response)
+        } else {
+          debugLog("wait for later")
+          outOfDate = true
+          waitLoadedTypeResponses(source) += response
+        }
+      case None =>
+        debugLog("load unit and type")
+        try reloadSources(List(source))
+        finally {
+          waitLoadedTyped(source, response, onSameThread)
+          if (!keepLoaded) removeUnitOf(source)
+        }
+    }
+  }
+
+  /** Implements CompilerControl.askParsedEntered */
+  private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) {
+    getUnit(source) match {
+      case Some(unit) =>
+        getParsedEnteredNow(source, response)
+      case None =>
+        try {
+          if (keepLoaded || outOfDate && onSameThread)
+            reloadSources(List(source))
+        } finally {
+          if (keepLoaded || !outOfDate || onSameThread)
+            getParsedEnteredNow(source, response)
+          else
+            getParsedEnteredResponses(source) += response
+        }
+    }
+  }
+
+  /** Parses and enters given source file, stroring parse tree in response */
+  private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
+    respond(response) {
+      onUnitOf(source) { unit =>
+        parseAndEnter(unit)
+        unit.body
+      }
+    }
+  }
+
+  // ---------------- Helper classes ---------------------------
+
+  /** The typer run */
+  class TyperRun extends Run {
+    // units is always empty
+
+    /** canRedefine is used to detect double declarations of classes and objects
+     *  in multiple source files.
+     *  Since the IDE rechecks units several times in the same run, these tests
+     *  are disabled by always returning true here.
+     */
+    override def canRedefine(sym: Symbol) = true
+
+    def typeCheck(unit: CompilationUnit): Unit = {
+      applyPhase(typerPhase, unit)
+    }
+
+    /** Apply a phase to a compilation unit
+     *  @return true iff typechecked correctly
+     */
+    private def applyPhase(phase: Phase, unit: CompilationUnit) {
+      enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
+    }
+  }
+
+  def newTyperRun() {
+    currentTyperRun = new TyperRun
+  }
+
+  class TyperResult(val tree: Tree) extends ControlThrowable
+
+  assert(globalPhase.id == 0)
+
+  implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+
+  // OnTypeError should still catch TypeError because of cyclic references,
+  // but DivergentImplicit shouldn't leak anymore here
+  class OnTypeError[T](op: => T) {
+    def onTypeError(alt: => T) = try {
+      op
+    } catch {
+      case ex: TypeError =>
+        debugLog("type error caught: "+ex)
+        alt
+    }
+  }
+
+  // We need to force a number of symbols that might be touched by a parser.
+  // Otherwise thread safety property of parseTree method would be violated.
+  protected def forceSymbolsUsedByParser(): Unit = {
+    val symbols =
+      Set(UnitClass, BooleanClass, ByteClass,
+          ShortClass, IntClass, LongClass, FloatClass,
+          DoubleClass, NilModule, ListClass) ++ TupleClass.seq
+    symbols.foreach(_.initialize)
+  }
+
+  forceSymbolsUsedByParser()
+
+  /** Start the compiler background thread and turn on thread confinement checks */
+  private def finishInitialization(): Unit = {
+    // this flag turns on `assertCorrectThread checks`
+    initializing = false
+
+    // Only start the thread if initialization was successful. A crash while forcing symbols (for example
+    // if the Scala library is not on the classpath) can leave running threads behind. See Scala IDE #1002016
+    compileRunner.start()
+  }
+
+  /** The compiler has been initialized. Constructors are evaluated in textual order,
+   *  if we reached here, all super constructors and the primary constructor
+   *  have been executed.
+   */
+  finishInitialization()
+}
+
+object CancelException extends Exception
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
rename to src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
new file mode 100644
index 0000000..82e8de3
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -0,0 +1,299 @@
+package scala.tools.nsc.interactive
+
+import java.io.Reader
+
+/** Companion object of class `Lexer` which defines tokens and some utility concepts
+ *  used for tokens and lexers
+ */
+object Lexer {
+
+  /** An exception raised if an input does not correspond to what's expected
+   *  @param   rdr   the lexer from which the bad input is read
+   *  @param   msg   the error message
+   */
+  class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
+
+  /** The class of tokens, i.e. descriptions of input words (or: lexemes).
+   *  @param str    the characters making up this token
+   */
+  class Token(val str: String) {
+    override def toString = str
+  }
+
+  /** A subclass of `Token` representing single-character delimiters
+   *  @param char the delimiter character making up this token
+   */
+  case class Delim(char: Char) extends Token(s"'$char'")
+
+  /** A subclass of token representing integer literals */
+  case class IntLit(override val str: String) extends Token(str)
+
+  /** A subclass of token representing floating point literals */
+  case class FloatLit(override val str: String) extends Token(str)
+
+  /** A subclass of token representing string literals */
+  case class StringLit(override val str: String) extends Token(str) {
+    override def toString = quoted(str)
+  }
+
+  /** The `true` token */
+  val TrueLit = new Token("true")
+
+  /** The `false` token */
+  val FalseLit = new Token("false")
+
+  /** The `null` token */
+  val NullLit = new Token("null")
+
+  /** The '`(`' token */
+  val LParen = new Delim('(')
+
+  /** The '`)`' token */
+  val RParen = new Delim(')')
+
+  /** The '`{`' token */
+  val LBrace = new Delim('{')
+
+  /** The '`}`' token */
+  val RBrace = new Delim('}')
+
+  /** The '`[`' token */
+  val LBracket = new Delim('[')
+
+  /** The '`]`' token */
+  val RBracket = new Delim(']')
+
+  /** The '`,`' token */
+  val Comma = new Delim(',')
+
+  /** The '`:`' token */
+  val Colon = new Delim(':')
+
+  /** The token representing end of input */
+  val EOF = new Token("<end of input>")
+
+  private def toUDigit(ch: Int): Char = {
+    val d = ch & 0xF
+    (if (d < 10) d + '0' else d - 10 + 'A').toChar
+  }
+
+  private def addToStr(buf: StringBuilder, ch: Char) {
+    ch match {
+      case '"' => buf ++= "\\\""
+      case '\b' => buf ++= "\\b"
+      case '\f' => buf ++= "\\f"
+      case '\n' => buf ++= "\\n"
+      case '\r' => buf ++= "\\r"
+      case '\t' => buf ++= "\\t"
+      case '\\' => buf ++= "\\\\"
+      case _ =>
+        if (' ' <= ch && ch < 128) buf += ch
+        else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch.toInt)
+    }
+  }
+
+  /** Returns given string enclosed in `"`-quotes with all string characters escaped
+   *  so that they correspond to the JSON standard.
+   *  Characters that escaped are:  `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
+   *  Furthermore, every other character which is not in the ASCII range 32-127 is
+   *  escaped as a four hex-digit unicode character of the form `\ u x x x x`.
+   *  @param   str   the string to be quoted
+   */
+  def quoted(str: String): String = {
+    val buf = new StringBuilder += '\"'
+    str foreach (addToStr(buf, _))
+    buf += '\"'
+    buf.toString
+  }
+
+  private val BUF_SIZE = 2 << 16
+}
+
+import Lexer._
+
+/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
+ *  Tokens understood are:
+ *
+ *  `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
+ *  strings (syntax as in JSON),
+ *  integer numbers (syntax as in JSON: -?(0|\d+)
+ *  floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
+ *  The end of input is represented as its own token, EOF.
+ *  Lexers can keep one token lookahead
+ *
+ * @param rd   the reader from which characters are read.
+ */
+class Lexer(rd: Reader) {
+
+  /** The last-read character */
+  var ch: Char = 0
+
+  /** The number of characters read so far */
+  var pos: Long = 0
+
+  /** The last-read token */
+  var token: Token = _
+
+  /** The number of characters read before the start of the last-read token */
+  var tokenPos: Long = 0
+
+  private var atEOF: Boolean = false
+  private val buf = new Array[Char](BUF_SIZE)
+  private var nread: Int = 0
+  private var bp = 0
+
+  /** Reads next character into `ch` */
+  def nextChar() {
+    assert(!atEOF)
+    if (bp == nread) {
+      nread = rd.read(buf)
+      bp = 0
+      if (nread <= 0) { ch = 0; atEOF = true; return }
+    }
+    ch = buf(bp)
+    bp += 1
+    pos += 1
+  }
+
+  /** If last-read character equals given character, reads next character,
+   *  otherwise raises an error
+   *  @param  c   the given character to compare with last-read character
+   *  @throws  MalformedInput if character does not match
+   */
+  def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
+
+  private val sb = new StringBuilder
+
+  private def putChar() {
+    sb += ch; nextChar()
+  }
+
+  private def putAcceptString(str: String) {
+    str foreach acceptChar
+    sb ++= str
+  }
+
+  /** Skips whitespace and reads next lexeme into `token`
+   *  @throws  MalformedInput if lexeme not recognized as a valid token
+   */
+  def nextToken() {
+    sb.clear()
+    while (!atEOF && ch <= ' ') nextChar()
+    tokenPos = pos - 1
+    if (atEOF) token = EOF
+    else ch match {
+      case '(' => putChar(); token = LParen
+      case ')' => putChar(); token = RParen
+      case '{' => putChar(); token = LBrace
+      case '}' => putChar(); token = RBrace
+      case '[' => putChar(); token = LBracket
+      case ']' => putChar(); token = RBracket
+      case ',' => putChar(); token = Comma
+      case ':' => putChar(); token = Colon
+      case 't' => putAcceptString("true"); token = TrueLit
+      case 'f' => putAcceptString("false"); token = FalseLit
+      case 'n' => putAcceptString("null"); token = NullLit
+      case '"' => getString()
+      case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
+      case _ => error("unrecoginezed start of token: '"+ch+"'")
+    }
+    //println("["+token+"]")
+  }
+
+  /** Reads a string literal, and forms a `StringLit` token from it.
+   *  Last-read input character `ch` must be opening `"`-quote.
+   *  @throws  MalformedInput if lexeme not recognized as a string literal.
+   */
+  def getString() {
+    def udigit() = {
+      nextChar()
+      if ('0' <= ch && ch <= '9') ch - '9'
+      else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
+      else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
+      else error("illegal unicode escape character: '"+ch+"'")
+    }
+    val delim = ch
+    nextChar()
+    while (ch != delim && ch >= ' ') {
+      if (ch == '\\') {
+        nextChar()
+        ch match {
+          case '\'' => sb += '\''
+          case '"' => sb += '"'
+          case '\\' => sb += '\\'
+          case '/' => sb += '/'
+          case 'b' => sb += '\b'
+          case 'f' => sb += '\f'
+          case 'n' => sb += '\n'
+          case 'r' => sb += '\r'
+          case 't' => sb += '\t'
+          case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
+          case _ => error("illegal escape character: '"+ch+"'")
+        }
+        nextChar()
+      } else {
+        putChar()
+      }
+    }
+    acceptChar(delim)
+    token = StringLit(sb.toString)
+  }
+
+  /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
+   *  Last-read input character `ch` must be either `-` or a digit.
+   *  @throws  MalformedInput if lexeme not recognized as a numeric literal.
+   */
+  def getNumber() {
+    def digit() =
+      if ('0' <= ch && ch <= '9') putChar()
+      else error("<digit> expected")
+    def digits() =
+      do { digit() } while ('0' <= ch && ch <= '9')
+    var isFloating = false
+    if (ch == '-') putChar()
+    if (ch == '0') digit()
+    else digits()
+    if (ch == '.') {
+      isFloating = true
+      putChar()
+      digits()
+    }
+    if (ch == 'e' || ch == 'E') {
+      isFloating = true
+      putChar()
+      if (ch == '+' || ch == '-') putChar()
+      digits()
+    }
+    token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
+  }
+
+  /** If current token equals given token, reads next token, otherwise raises an error.
+   *  @param  t   the given token to compare current token with
+   *  @throws MalformedInput  if the two tokens do not match.
+   */
+  def accept(t: Token) {
+    if (token == t) nextToken()
+    else error(t+" expected, but "+token+" found")
+  }
+
+  /** The current token is a delimiter consisting of given character, reads next token,
+   *  otherwise raises an error.
+   *  @param  ch   the given delimiter character to compare current token with
+   *  @throws MalformedInput  if the current token `token` is not a delimiter, or
+   *                          consists of a character different from `c`.
+   */
+  def accept(ch: Char) {
+    token match {
+      case Delim(`ch`) => nextToken()
+      case _ => accept(Delim(ch))
+    }
+  }
+
+  /** Always throws a `MalformedInput` exception with given error message.
+   *  @param msg  the error message
+   */
+  def error(msg: String) = throw new MalformedInput(this, msg)
+
+  nextChar()
+  nextToken()
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
new file mode 100644
index 0000000..c838606
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.tools
+package nsc
+package interactive
+
+/** The main class for NSC, a compiler for the programming
+ *  language Scala.
+ */
+object Main extends nsc.MainClass {
+  override def processSettingsHook(): Boolean = {
+    if (this.settings.Yidedebug) {
+      this.settings.Xprintpos.value = true
+      this.settings.Yrangepos.value = true
+      val compiler = new interactive.Global(this.settings, this.reporter)
+      import compiler.{ reporter => _, _ }
+
+      val sfs = command.files map getSourceFile
+      val reloaded = new interactive.Response[Unit]
+      askReload(sfs, reloaded)
+
+      reloaded.get.right.toOption match {
+        case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
+        case None => reporter.reset() // Causes other compiler errors to be ignored
+      }
+      askShutdown
+      false
+    }
+    else true
+  }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
new file mode 100644
index 0000000..83f3fab
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -0,0 +1,377 @@
+package scala.tools.nsc.interactive
+
+import Lexer._
+import java.io.Writer
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
+
+/** An abstract class for writing and reading Scala objects to and
+ *  from a legible representation. The presesentation follows the following grammar:
+ *  {{{
+ *  Pickled = `true` | `false` | `null` | NumericLit | StringLit |
+ *            Labelled | Pickled `,` Pickled
+ *  Labelled = StringLit `(` Pickled? `)`
+ *  }}}
+ *
+ *  All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
+ *
+ *  Subclasses of `Pickler` each can write and read individual classes
+ *  of values.
+ *
+ *  @tparam  T   the type of values handled by this pickler.
+ *
+ *  These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
+ *  Iulian Dragos' picklers for Scala to XML. See:
+ *
+ *  <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
+ *  http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
+ *  </a>
+ */
+abstract class Pickler[T] {
+
+  import Pickler._
+
+  /** Writes value in pickled form
+   *  @param  wr   the writer to which pickled form is written
+   *  @param  x    the value to write
+   */
+  def pickle(wr: Writer, x: T)
+
+  /** Reads value from pickled form.
+   *
+   *  @param  rd   the lexer from which lexemes are read
+   *  @return An `UnpickleSuccess value if the current input corresponds to the
+   *          kind of value that is unpickled by the current subclass of `Pickler`,
+   *          an `UnpickleFailure` value otherwise.
+   *  @throws  `Lexer.MalformedInput` if input is invalid, or if
+   *          an `Unpickle
+   */
+  def unpickle(rd: Lexer): Unpickled[T]
+
+  /** A pickler representing a `~`-pair of values as two consecutive pickled
+   *  strings, separated by a comma.
+   *  @param  that   the second pickler which together with the current pickler makes
+   *                 up the pair `this ~ that` to be pickled.
+   */
+  def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
+
+  /** A pickler that adds a label to the current pickler, using the representation
+   *   `label ( <current pickler> )`
+   *
+   *  @label  the string to be added as a label.
+   */
+  def labelled(label: String): Pickler[T] = labelledPickler(label, this)
+
+  /** A pickler obtained from the current pickler by a pair of transformer functions
+   *  @param   in   the function that maps values handled by the current pickler to
+   *                values handled by the wrapped pickler.
+   *  @param   out  the function that maps values handled by the wrapped pickler to
+   *                values handled by the current pickler.
+   */
+  def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
+
+  /** A conditional pickler obtained from the current pickler.
+   *  @param   p   the condition to test to find out whether pickler can handle
+   *               some Scala value.
+   */
+  def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
+
+  /** A conditional pickler handling values of some Scala class. It adds the
+   *  class name as a label to the representation of the current pickler and
+   *  @param    c     the class of values handled by this pickler.
+   */
+  def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
+}
+
+object Pickler {
+  /** A base class representing unpickler result. It has two subclasses:
+   *  `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+   *  where a value of the given type `T` could not be unpickled from input.
+   *  @tparam  T the type of unpickled values in case of success.
+   */
+  abstract class Unpickled[+T] {
+    /** Transforms success values to success values using given function,
+     *  leaves failures alone
+     *  @param   f the function to apply.
+     */
+    def map[U](f: T => U): Unpickled[U] = this match {
+      case UnpickleSuccess(x) => UnpickleSuccess(f(x))
+      case f: UnpickleFailure => f
+    }
+    /** Transforms success values to successes or failures using given function,
+     *  leaves failures alone.
+     *  @param   f the function to apply.
+     */
+    def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
+      case UnpickleSuccess(x) => f(x)
+      case f: UnpickleFailure => f
+    }
+    /** Tries alternate expression if current result is a failure
+     *  @param alt  the alternate expression to be tried in case of failure
+     */
+    def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
+      case UnpickleSuccess(x) => this
+      case f: UnpickleFailure => alt
+    }
+
+    /** Transforms failures into thrown `MalformedInput` exceptions.
+     *  @throws  MalformedInput   if current result is a failure
+     */
+    def requireSuccess: UnpickleSuccess[T] = this match {
+      case s @ UnpickleSuccess(x) => s
+      case f: UnpickleFailure =>
+        throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
+    }
+  }
+
+  /** A class representing successful unpicklings
+   *  @tparam T       the type of the unpickled value
+   *  @param result   the unpickled value
+   */
+  case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
+
+  /** A class representing unpickle failures
+   *  @param msg      an error message describing what failed.
+   *  @param rd       the lexer unpickled values were read from (can be used to get
+   *                  error position, for instance).
+   */
+  class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
+    def errMsg = msg
+    override def toString = "Failure at "+rd.tokenPos+":\n"+msg
+  }
+
+  private def errorExpected(rd: Lexer, msg: => String) =
+    new UnpickleFailure("expected: "+msg+"\n" +
+                        "found   : "+rd.token,
+                        rd)
+
+  private def nextSuccess[T](rd: Lexer, result: T) = {
+    rd.nextToken()
+    UnpickleSuccess(result)
+  }
+
+  /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
+   */
+  def pkl[T: Pickler] = implicitly[Pickler[T]]
+
+  /** A class represenenting `~`-pairs */
+  case class ~[+S, +T](fst: S, snd: T)
+
+  /** A wrapper class to be able to use `~` s an infix method */
+  implicit class TildeDecorator[S](x: S) {
+    /** Infix method that forms a `~`-pair. */
+    def ~ [T](y: T): S ~ T = new ~ (x, y)
+  }
+
+  /** Same as `p.labelled(label)`.
+   */
+  def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
+    def pickle(wr: Writer, x: T) = {
+      wr.write(quoted(label))
+      wr.write("(")
+      p.pickle(wr, x)
+      wr.write(")")
+    }
+    def unpickle(rd: Lexer): Unpickled[T] =
+      rd.token match {
+        case StringLit(`label`) =>
+          rd.nextToken()
+          rd.accept('(')
+          val result = p.unpickle(rd).requireSuccess
+          rd.accept(')')
+          result
+        case _ =>
+          errorExpected(rd, quoted(label)+"(...)")
+      }
+  }
+
+  /** Same as `p.wrap(in)(out)`
+   */
+  def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
+    def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
+    def unpickle(rd: Lexer) = p.unpickle(rd) map in
+  }
+
+  /** Same as `p.cond(condition)`
+   */
+  def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
+    def pickle(wr: Writer, x: T) = p.pickle(wr, x)
+    def unpickle(rd: Lexer) = p.unpickle(rd)
+  }
+
+  /** Same as `p ~ q`
+   */
+  def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
+    lazy val qq = q
+    def pickle(wr: Writer, x: T ~ U) = {
+      p.pickle(wr, x.fst)
+      wr.write(',')
+      q.pickle(wr, x.snd)
+    }
+    def unpickle(rd: Lexer) =
+      for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
+      yield x ~ y
+  }
+
+  /** Same as `p | q`
+   */
+  def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
+    new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
+      lazy val qq = q
+      override def tryPickle(wr: Writer, x: Any): Boolean =
+        p.tryPickle(wr, x) || qq.tryPickle(wr, x)
+      def pickle(wr: Writer, x: T) =
+        require(tryPickle(wr, x),
+                "no pickler found for "+x+" of class "+x.getClass.getName)
+      def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
+    }
+
+  /** A conditional pickler for singleton objects. It represents these
+   *  with the object's underlying class as a label.
+   *  Example: Object scala.None would be represented as `scala.None$()`.
+   */
+  def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
+    unitPickler
+      .wrapped { _ => x } { x => () }
+      .labelled (x.getClass.getName)
+      .cond (x eq _.asInstanceOf[AnyRef])
+
+  /** A pickler the handles instances of classes that have an empty constructor.
+   *  It represents than as `$new ( <name of class> )`.
+   *  When unpickling, a new instance of the class is created using the empty
+   *  constructor of the class via `Class.forName(<name of class>).newInstance()`.
+   */
+  def javaInstancePickler[T <: AnyRef]: Pickler[T] =
+    (stringPickler labelled "$new")
+      .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
+
+  /** A picklers that handles iterators. It pickles all values
+   *  returned by an iterator separated by commas.
+   *  When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
+   *  This iterator returns 0 or more values that are obtained by unpickling
+   *  until a closing parenthesis, bracket or brace or the end of input is encountered.
+   *
+   *  This means that iterator picklers should not be directly followed by `~`
+   *  because the pickler would also read any values belonging to the second
+   *  part of the `~`-pair.
+   *
+   *  What's usually done instead is that the iterator pickler is wrapped and labelled
+   *  to handle other kinds of sequences.
+   */
+  implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
+    lazy val p = pkl[T]
+    def pickle(wr: Writer, xs: Iterator[T]) {
+      var first = true
+      for (x <- xs) {
+        if (first) first = false else wr.write(',')
+        p.pickle(wr, x)
+      }
+    }
+    def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
+      var first = true
+      def hasNext = {
+        val t = rd.token
+        t != EOF && t != RParen && t != RBrace && t != RBracket
+      }
+      def next(): T = {
+        if (first) first = false else rd.accept(',')
+        p.unpickle(rd).requireSuccess.result
+      }
+    })
+  }
+
+  /** A pickler that handles values that can be represented as a single token.
+   *  @param   kind   the kind of token representing the value, used in error messages
+   *                  for unpickling.
+   *  @param  matcher A partial function from tokens to handled values. Unpickling
+   *                  succeeds if the matcher function is defined on the current token.
+   */
+  private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
+    def pickle(wr: Writer, x: T) = wr.write(x.toString)
+    def unpickle(rd: Lexer) =
+      if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
+      else errorExpected(rd, kind)
+  }
+
+  /** A pickler for values of type `Long`, represented as integer literals */
+  implicit val longPickler: Pickler[Long] =
+    tokenPickler("integer literal") { case IntLit(s) => s.toLong }
+
+  /** A pickler for values of type `Int`, represented as integer literals */
+  implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
+
+  /** A conditional pickler for the boolean value `true` */
+  private val truePickler =
+    tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
+
+  /** A conditional pickler for the boolean value `false` */
+  private val falsePickler =
+    tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
+
+  /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
+  implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
+
+  /** A pickler for values of type `Unit`, represented by the empty character string */
+  implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
+    def pickle(wr: Writer, x: Unit) {}
+    def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
+  }
+
+  /** A pickler for values of type `String`, represented as string literals */
+  implicit val stringPickler: Pickler[String] = new Pickler[String] {
+    def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
+    def unpickle(rd: Lexer) = rd.token match {
+      case StringLit(s) => nextSuccess(rd, s)
+      case NullLit => nextSuccess(rd, null)
+      case _ => errorExpected(rd, "string literal")
+    }
+  }
+
+  /** A pickler for pairs, represented as `~`-pairs */
+  implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
+    (pkl[T1] ~ pkl[T2])
+      .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
+      .labelled ("tuple2")
+
+  /** A pickler for 3-tuples, represented as `~`-tuples */
+  implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
+    (p1 ~ p2 ~ p3)
+      .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
+      .labelled ("tuple3")
+
+  /** A pickler for list values */
+  implicit def listPickler[T: Pickler]: Pickler[List[T]] =
+    iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
+}
+
+/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
+ *  of this class.
+ *  @param canPickle   The predicate that indicates whether a given value
+ *                     can be pickled by instances of this class.
+ */
+abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
+  import Pickler._
+
+  /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
+   */
+  def tryPickle(wr: Writer, x: Any): Boolean = {
+    val result = canPickle(x)
+    if (result) pickle(wr, x.asInstanceOf[T])
+    result
+  }
+
+  /** A pickler obtained from this pickler and an alternative pickler.
+   *  To pickle a value, this pickler is tried first. If it cannot handle
+   *  the object (as indicated by its `canPickle` test), then the
+   *  alternative pickler is tried.
+   *  To unpickle a value, this unpickler is tried first. If it cannot read
+   *  the input (as indicated by a `UnpickleFailure` result), then the
+   *  alternative pickler is tried.
+   *  @tparam V    The handled type of the returned pickler.
+   *  @tparam U    The handled type of the alternative pickler.
+   *  @param that The alternative pickler.
+   */
+  def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
+    eitherPickler[V, T, U](this, that)
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
new file mode 100644
index 0000000..1f89e6d
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -0,0 +1,189 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import util.InterruptReq
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
+import io.{ AbstractFile, PlainFile }
+import util.EmptyAction
+import scala.reflect.internal.util.Position
+import Pickler._
+import scala.collection.mutable
+import mutable.ListBuffer
+
+trait Picklers { self: Global =>
+
+  lazy val freshRunReq =
+    unitPickler
+      .wrapped { _ => new FreshRunReq } { x => () }
+      .labelled ("FreshRunReq")
+      .cond (_.isInstanceOf[FreshRunReq])
+
+      lazy val shutdownReq = singletonPickler(ShutdownReq)
+
+  def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
+
+  implicit lazy val throwable: Pickler[Throwable] =
+    freshRunReq | shutdownReq | defaultThrowable
+
+  implicit def abstractFile: Pickler[AbstractFile] =
+    pkl[String]
+      .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
+      .asClass (classOf[PlainFile])
+
+  private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
+    override def default(key: AbstractFile) = Array()
+  }
+
+  type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
+
+  def delta(f: AbstractFile, cs: Array[Char]): Diff = {
+    val bs = sourceFilesSeen(f)
+    var start = 0
+    while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
+    var end = bs.length
+    var end2 = cs.length
+    while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
+    sourceFilesSeen(f) = cs
+    (start, end, cs.slice(start, end2).mkString(""))
+  }
+
+  def patch(f: AbstractFile, d: Diff): Array[Char] = {
+    val (start, end, replacement) = d
+    val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
+    sourceFilesSeen(f) = patched
+    patched
+  }
+
+  implicit lazy val sourceFile: Pickler[SourceFile] =
+    (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
+      case f ~ d => new BatchSourceFile(f, patch(f, d))
+    } {
+      f => f.file ~ delta(f.file, f.content)
+    }.asClass (classOf[BatchSourceFile])
+
+  lazy val offsetPosition: CondPickler[Position] =
+    (pkl[SourceFile] ~ pkl[Int])
+      .wrapped { case x ~ y => Position.offset(x, y) } { p => p.source ~ p.point }
+      .asClass (classOf[Position])
+
+  lazy val rangePosition: CondPickler[Position] =
+    (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+      .wrapped { case source ~ start ~ point ~ end => Position.range(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+      .asClass (classOf[Position])
+
+  lazy val transparentPosition: CondPickler[Position] =
+    (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+      .wrapped { case source ~ start ~ point ~ end => Position.range(source, start, point, end).makeTransparent } { p => p.source ~ p.start ~ p.point ~ p.end }
+      .asClass (classOf[Position])
+
+  lazy val noPosition = singletonPickler(NoPosition)
+
+  implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
+
+  implicit lazy val namePickler: Pickler[Name] =
+    pkl[String] .wrapped[Name] {
+      str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
+    } {
+      name => if (name.isTypeName) name.toString+"!" else name.toString
+    }
+
+  implicit lazy val symPickler: Pickler[Symbol] = {
+    def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
+      if (!sym.isRoot) {
+        ownerNames(sym.owner, buf)
+        buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
+        if (!sym.isType && !sym.isStable) { // TODO: what's the reasoning behind this condition!?
+          val sym1 = sym.owner.info.decl(sym.name)
+          if (sym1.isOverloaded) {
+            val index = sym1.alternatives.indexOf(sym)
+            assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
+            buf += newTermName(index.toString)
+          }
+        }
+      }
+      buf
+    }
+    def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
+      case List() =>
+        root
+      case name :: rest =>
+        val sym = root.info.decl(name)
+        if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
+        else makeSymbol(sym, rest)
+    }
+    pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
+  }
+
+  implicit def workEvent: Pickler[WorkEvent] = {
+    (pkl[Int] ~ pkl[Long])
+      .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
+  }
+
+  implicit def interruptReq: Pickler[InterruptReq] = {
+    val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
+    pkl[Unit] .wrapped { _ =>  emptyIR } { _ => () }
+  }
+
+  implicit def reloadItem: CondPickler[ReloadItem] =
+    pkl[List[SourceFile]]
+      .wrapped { ReloadItem(_, new Response) } { _.sources }
+      .asClass (classOf[ReloadItem])
+
+  implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
+    pkl[Position]
+      .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
+      .asClass (classOf[AskTypeAtItem])
+
+  implicit def askTypeItem: CondPickler[AskTypeItem] =
+    (pkl[SourceFile] ~ pkl[Boolean])
+      .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
+      .asClass (classOf[AskTypeItem])
+
+  implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
+    pkl[Position]
+      .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
+      .asClass (classOf[AskTypeCompletionItem])
+
+  implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
+    pkl[Position]
+      .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
+      .asClass (classOf[AskScopeCompletionItem])
+
+  implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
+    pkl[SourceFile]
+      .wrapped { new AskToDoFirstItem(_) } { _.source }
+      .asClass (classOf[AskToDoFirstItem])
+
+  implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
+    (pkl[Symbol] ~ pkl[SourceFile])
+      .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
+      .asClass (classOf[AskLinkPosItem])
+
+  implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
+    (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
+      .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
+      .asClass (classOf[AskDocCommentItem])
+
+  implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
+    pkl[SourceFile]
+      .wrapped { source => new AskLoadedTypedItem(source, false, new Response) } { _.source }
+      .asClass (classOf[AskLoadedTypedItem])
+
+  implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
+    (pkl[SourceFile] ~ pkl[Boolean])
+      .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
+      .asClass (classOf[AskParsedEnteredItem])
+
+  implicit def emptyAction: CondPickler[EmptyAction] =
+    pkl[Unit]
+      .wrapped { _ => new EmptyAction } { _ => () }
+      .asClass (classOf[EmptyAction])
+
+  implicit def action: Pickler[() => Unit] =
+    reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
+    askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
rename to src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
new file mode 100644
index 0000000..d7dadcc
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
@@ -0,0 +1,41 @@
+package scala.tools.nsc.interactive
+
+import java.io.Writer
+
+class PrettyWriter(wr: Writer) extends Writer {
+  protected val indentStep = "  "
+  private var indent = 0
+  private def newLine() {
+    wr.write('\n')
+    wr.write(indentStep * indent)
+  }
+  def close() = wr.close()
+  def flush() = wr.flush()
+  def write(str: Array[Char], off: Int, len: Int): Unit = {
+    if (off < str.length && off < len) {
+      str(off) match {
+        case '{' | '[' | '(' =>
+          indent += 1
+          wr.write(str(off).toInt)
+          newLine()
+          wr.write(str, off + 1, len - 1)
+        case '}' | ']' | ')' =>
+          wr.write(str, off, len)
+          indent -= 1
+        case ',' =>
+          wr.write(',')
+          newLine()
+          wr.write(str, off + 1, len - 1)
+        case ':' =>
+          wr.write(':')
+          wr.write(' ')
+          wr.write(str, off + 1, len - 1)
+        case _ =>
+          wr.write(str, off, len)
+      }
+    } else {
+      wr.write(str, off, len)
+    }
+  }
+  override def toString = wr.toString
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
new file mode 100644
index 0000000..ffa61b0
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -0,0 +1,164 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala
+package tools.nsc
+package interactive
+
+import scala.reflect.internal.util._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.io._
+import java.io.FileWriter
+
+/** Interface of interactive compiler to a client such as an IDE
+ */
+object REPL {
+
+  val versionMsg = "Scala compiler " +
+    Properties.versionString + " -- " +
+    Properties.copyrightString
+
+  val prompt = "> "
+
+  var reporter: ConsoleReporter = _
+
+  private def replError(msg: String) {
+    reporter.error(/*new Position */FakePos("scalac"),
+                   msg + "\n  scalac -help  gives more information")
+  }
+
+  def process(args: Array[String]) {
+    val settings = new Settings(replError)
+    reporter = new ConsoleReporter(settings)
+    val command = new CompilerCommand(args.toList, settings)
+    if (command.settings.version)
+      reporter.echo(versionMsg)
+    else {
+      try {
+        object compiler extends Global(command.settings, reporter) {
+//          printTypings = true
+        }
+        if (reporter.hasErrors) {
+          reporter.flush()
+          return
+        }
+        if (command.shouldStopWithInfo) {
+          reporter.echo(command.getInfoMessage(compiler))
+        } else {
+          run(compiler)
+        }
+      } catch {
+        case ex @ FatalError(msg) =>
+          if (true || command.settings.debug) // !!!
+            ex.printStackTrace()
+          reporter.error(null, "fatal error: " + msg)
+      }
+    }
+  }
+
+  def main(args: Array[String]) {
+    process(args)
+    sys.exit(if (reporter.hasErrors) 1 else 0)
+  }
+
+  def loop(action: (String) => Unit) {
+    Console.print(prompt)
+    try {
+      val line = Console.readLine()
+      if (line.length() > 0) {
+        action(line)
+      }
+      loop(action)
+    }
+    catch {
+      case _: java.io.EOFException => //nop
+    }
+  }
+
+  /** Commands:
+   *
+   *  reload file1 ... fileN
+   *  typeat file off1 off2?
+   *  complete file off1 off2?
+   */
+  def run(comp: Global) {
+    val reloadResult = new Response[Unit]
+    val typeatResult = new Response[comp.Tree]
+    val completeResult = new Response[List[comp.Member]]
+    val typedResult = new Response[comp.Tree]
+    val structureResult = new Response[comp.Tree]
+
+    def makePos(file: String, off1: String, off2: String) = {
+      val source = toSourceFile(file)
+      comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
+    }
+
+    def doTypeAt(pos: Position) {
+      comp.askTypeAt(pos, typeatResult)
+      show(typeatResult)
+    }
+
+    def doComplete(pos: Position) {
+      comp.askTypeCompletion(pos, completeResult)
+      show(completeResult)
+    }
+
+    def doStructure(file: String) {
+      comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult)
+      show(structureResult)
+    }
+
+    loop { line =>
+      (line split " ").toList match {
+        case "reload" :: args =>
+          comp.askReload(args map toSourceFile, reloadResult)
+          show(reloadResult)
+        case "reloadAndAskType" :: file :: millis :: Nil =>
+          comp.askReload(List(toSourceFile(file)), reloadResult)
+          Thread.sleep(millis.toLong)
+          println("ask type now")
+          comp.askLoadedTyped(toSourceFile(file), keepLoaded = true, typedResult)
+          typedResult.get
+        case List("typeat", file, off1, off2) =>
+          doTypeAt(makePos(file, off1, off2))
+        case List("typeat", file, off1) =>
+          doTypeAt(makePos(file, off1, off1))
+        case List("complete", file, off1, off2) =>
+          doComplete(makePos(file, off1, off2))
+        case List("complete", file, off1) =>
+          doComplete(makePos(file, off1, off1))
+        case List("quit") =>
+          comp.askShutdown()
+          sys.exit(1)
+        case List("structure", file) =>
+          doStructure(file)
+        case _ =>
+          print("""Available commands:
+                  | reload <file_1> ... <file_n>
+                  | reloadAndAskType <file> <sleep-ms>
+                  | typed <file>
+                  | typeat <file> <start-pos> <end-pos>
+                  | typeat <file> <pos>
+                  | complete <file> <start-pos> <end-pos>
+                  | compile <file> <pos>
+                  | structure <file>
+                  | quit
+                  |""".stripMargin)
+      }
+    }
+  }
+
+  def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
+
+  def using[T, U](svar: Response[T])(op: T => U): Option[U] = {
+    val res = svar.get match {
+      case Left(result) => Some(op(result))
+      case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None
+    }
+    svar.clear()
+    res
+  }
+
+  def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res))
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
new file mode 100644
index 0000000..410f919
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package interactive
+
+ at deprecated("Use scala.reflect.internal.Positions", "2.11.0")
+trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
+  self: scala.tools.nsc.Global =>
+
+  override val useOffsetPositions = false
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
new file mode 100644
index 0000000..0e3e249
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
@@ -0,0 +1,74 @@
+package scala.tools.nsc.interactive
+
+import java.io.{Reader, Writer}
+
+import Pickler._
+import Lexer.EOF
+
+abstract class LogReplay {
+  def logreplay(event: String, x: => Boolean): Boolean
+  def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
+  def close()
+  def flush()
+}
+
+class Logger(wr0: Writer) extends LogReplay {
+  val wr = new PrettyWriter(wr0)
+  private var first = true
+  private def insertComma() = if (first) first = false else wr.write(",")
+
+  def logreplay(event: String, x: => Boolean) = {
+    val xx = x
+    if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
+    xx
+  }
+  def logreplay[T: Pickler](event: String, x: => Option[T]) = {
+    val xx = x
+    xx match {
+      case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
+      case None =>
+    }
+    xx
+  }
+  def close() { wr.close() }
+  def flush() { wr.flush() }
+}
+
+object NullLogger extends LogReplay {
+  def logreplay(event: String, x: => Boolean) = x
+  def logreplay[T: Pickler](event: String, x: => Option[T]) = x
+  def close() {}
+  def flush() {}
+}
+
+class Replayer(raw: Reader) extends LogReplay {
+  private val rd = new Lexer(raw)
+  private var nextComma = false
+
+  private def eatComma() =
+    if (nextComma) { rd.accept(','); nextComma = false }
+
+  def logreplay(event: String, x: => Boolean) =
+    if (rd.token == EOF) NullLogger.logreplay(event, x)
+    else {
+      eatComma()
+      pkl[Unit].labelled(event).unpickle(rd) match {
+        case UnpickleSuccess(_) => nextComma = true; true
+        case _ => false
+      }
+    }
+
+  def logreplay[T: Pickler](event: String, x: => Option[T]) =
+    if (rd.token == EOF) NullLogger.logreplay(event, x)
+    else {
+      eatComma()
+      pkl[T].labelled(event).unpickle(rd) match {
+        case UnpickleSuccess(y) => nextComma = true; Some(y)
+        case _ => None
+      }
+    }
+
+  def close() { raw.close() }
+  def flush() {}
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
new file mode 100644
index 0000000..3e84c83
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
@@ -0,0 +1,107 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+/** Typical interaction, given a predicate <user-input>, a function <display>,
+ *  and an exception handler <handle>:
+ *
+ *  val TIMEOUT = 100 // (milliseconds) or something like that
+ *  val r = new Response()
+ *  while (!r.isComplete && !r.isCancelled) {
+ *    if (<user-input>) r.cancel()
+ *    else r.get(TIMEOUT) match {
+ *      case Some(Left(data)) => <display>(data)
+ *      case Some(Right(exc)) => <handle>(exc)
+ *      case None =>
+ *    }
+ *  }
+ */
+class Response[T] {
+
+  private var data: Option[Either[T, Throwable]] = None
+  private var complete = false
+  private var cancelled = false
+
+  /** Set provisional data, more to come
+   */
+  def setProvisionally(x: T) = synchronized {
+    data = Some(Left(x))
+  }
+
+  /** Set final data, and mark response as complete.
+   */
+  def set(x: T) = synchronized {
+    data = Some(Left(x))
+    complete = true
+    notifyAll()
+  }
+
+  /** Store raised exception in data, and mark response as complete.
+   */
+  def raise(exc: Throwable) = synchronized {
+    data = Some(Right(exc))
+    complete = true
+    notifyAll()
+  }
+
+  /** Get final data, wait as long as necessary.
+   *  When interrupted will return with Right(InterruptedException)
+   */
+  def get: Either[T, Throwable] = synchronized {
+    while (!complete) {
+      try {
+        wait()
+      } catch {
+        case exc: InterruptedException => {
+          Thread.currentThread().interrupt()
+          raise(exc)
+        }
+      }
+    }
+    data.get
+  }
+
+  /** Optionally get data within `timeout` milliseconds.
+   *  When interrupted will return with Some(Right(InterruptedException))
+   *  When timeout ends, will return last stored provisional result,
+   *  or else None if no provisional result was stored.
+   */
+  def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
+    val start = System.currentTimeMillis
+    var current = start
+    while (!complete && start + timeout > current) {
+      try {
+        wait(timeout - (current - start))
+      } catch {
+        case exc: InterruptedException => {
+          Thread.currentThread().interrupt()
+          raise(exc)
+        }
+      }
+      current = System.currentTimeMillis
+    }
+    data
+  }
+
+  /** Final data set was stored
+   */
+  def isComplete = synchronized { complete }
+
+  /** Cancel action computing this response (Only the
+   *  party that calls get on a response may cancel).
+   */
+  def cancel() = synchronized { cancelled = true }
+
+  /** A cancel request for this response has been issued
+   */
+  def isCancelled = synchronized { cancelled }
+
+  def clear() = synchronized {
+    data = None
+    complete = false
+    cancelled = false
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
rename to src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
new file mode 100644
index 0000000..2cb4f5f
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -0,0 +1,113 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+package tests
+
+import core._
+import scala.collection.mutable.ListBuffer
+
+/** A base class for writing interactive compiler tests.
+ *
+ *  This class tries to cover common functionality needed when testing the presentation
+ *  compiler: instantiation source files, reloading, creating positions, instantiating
+ *  the presentation compiler, random stress testing.
+ *
+ *  By default, this class loads all scala and java classes found under `src/`, going
+ *  recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources`
+ *  The presentation compiler is available through `compiler`.
+ *
+ *  It is easy to test member completion, type and hyperlinking at a given position. Source
+ *  files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the
+ *  typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in
+ *  your source files, and the test framework will automatically pick them up and test the
+ *  corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking
+ *  call). All ask operations are placed on the work queue without waiting for each one to
+ *  complete before asking the next. After all asks, it waits for each response in turn and
+ *  prints the result. The default timeout is 1 second per operation.
+ *
+ *  To define a custom operation you have to:
+ *
+ *  	(1) Define a new marker by extending `TestMarker`
+ *  	(2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef`
+ *  	(3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`.
+ *
+ *  Then you can simply use the new defined `marker` in your test sources and the testing
+ *  framework will automatically pick it up.
+ *
+ *  @see   Check existing tests under test/files/presentation
+ *
+ *  @author Iulian Dragos
+ *  @author Mirco Dotta
+ */
+abstract class InteractiveTest
+  extends AskParse
+  with AskShutdown
+  with AskReload
+  with AskLoadedTyped
+  with PresentationCompilerInstance
+  with CoreTestDefs
+  with InteractiveTestSettings { self =>
+
+  protected val runRandomTests = false
+
+  /** Should askAllSources wait for each ask to finish before issuing the next? */
+  override protected val synchronousRequests = true
+
+  /** The core set of test actions that are executed during each test run are
+   *  `CompletionAction`, `TypeAction` and `HyperlinkAction`.
+   *  Override this member if you need to change the default set of executed test actions.
+   */
+  protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = {
+    ListBuffer(new TypeCompletionAction(compiler), new ScopeCompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler))
+  }
+
+  /** Add new presentation compiler actions to test. Presentation compiler's test
+   *  need to extends trait `PresentationCompilerTestDef`.
+   */
+  protected def ++(tests: PresentationCompilerTestDef*) {
+    testActions ++= tests
+  }
+
+  /** Test's entry point */
+  def main(args: Array[String]) {
+    try execute()
+    finally askShutdown()
+  }
+
+  protected def execute(): Unit = {
+    loadSources()
+    runDefaultTests()
+  }
+
+  /** Load all sources before executing the test. */
+  protected def loadSources() {
+    // ask the presentation compiler to track all sources. We do
+    // not wait for the file to be entirely typed because we do want
+    // to exercise the presentation compiler on scoped type requests.
+    askReload(sourceFiles)
+    // make sure all sources are parsed before running the test. This
+    // is because test may depend on the sources having been parsed at
+    // least once
+    askParse(sourceFiles)
+  }
+
+  /** Run all defined `PresentationCompilerTestDef` */
+  protected def runDefaultTests() {
+    //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
+    testActions.foreach(_.runTest())
+  }
+
+  /** Perform n random tests with random changes. */
+  /****
+  private def randomTests(n: Int, files: Array[SourceFile]) {
+    val tester = new Tester(n, files, settings) {
+      override val compiler = self.compiler
+      override val reporter = new reporters.StoreReporter
+    }
+    tester.run()
+  }
+  ****/
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
new file mode 100644
index 0000000..ad5c61b
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -0,0 +1,69 @@
+package scala.tools.nsc
+package interactive
+package tests
+
+import java.io.File.pathSeparatorChar
+import java.io.File.separatorChar
+import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
+import scala.tools.nsc.io.{File,Path}
+import core.Reporter
+import core.TestSettings
+
+trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance {
+  /** Character delimiter for comments in .opts file */
+  private final val CommentStartDelimiter = "#"
+
+  private final val TestOptionsFileExtension = "flags"
+
+  /** Prepare the settings object. Load the .opts file and adjust all paths from the
+   *  Unix-like syntax to the platform specific syntax. This is necessary so that a
+   *  single .opts file can be used on all platforms.
+   *
+   *  @note Bootclasspath is treated specially. If there is a -bootclasspath option in
+   *        the file, the 'usejavacp' setting is set to false. This ensures that the
+   *        bootclasspath takes precedence over the scala-library used to run the current
+   *        test.
+   */
+  override protected def prepareSettings(settings: Settings) {
+    def adjustPaths(paths: settings.PathSetting*) {
+      for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
+        case '/' => separatorChar
+        case ':' => pathSeparatorChar
+        case c   => c
+      }
+    }
+
+    // need this so that the classpath comes from what partest
+    // instead of scala.home
+    settings.usejavacp.value = !argsString.contains("-bootclasspath")
+
+    // pass any options coming from outside
+    settings.processArgumentString(argsString) match {
+      case (false, rest) =>
+        println("error processing arguments (unprocessed: %s)".format(rest))
+      case _ => ()
+    }
+
+    // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+    if(settings.sourcepath.isSetByUser)
+      settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
+
+    adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
+  }
+
+  /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
+  protected val argsString = {
+    val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension)
+    val str = try File(optsFile).slurp() catch {
+      case e: java.io.IOException => ""
+    }
+    str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ")
+  }
+
+  override protected def printClassPath(implicit reporter: Reporter) {
+    reporter.println("\toutDir: %s".format(outDir.path))
+    reporter.println("\tbaseDir: %s".format(baseDir.path))
+    reporter.println("\targsString: %s".format(argsString))
+    super.printClassPath(reporter)
+  }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
new file mode 100644
index 0000000..f1ada32
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
@@ -0,0 +1,209 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala
+package tools.nsc
+package interactive
+package tests
+
+import scala.reflect.internal.util._
+import reporters._
+import io.AbstractFile
+import scala.collection.mutable.ArrayBuffer
+
+class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
+
+  val reporter = new StoreReporter
+  val compiler = new Global(settings, reporter)
+
+  def askAndListen[T, U](msg: String,  arg: T, op: (T, Response[U]) => Unit) {
+    if (settings.verbose) print(msg+" "+arg+": ")
+    val TIMEOUT = 10 // ms
+    val limit = System.currentTimeMillis() + randomDelayMillis
+    val res = new Response[U]
+    op(arg, res)
+    while (!res.isComplete && !res.isCancelled) {
+      if (System.currentTimeMillis() > limit) {
+        print("c"); res.cancel()
+      } else res.get(TIMEOUT.toLong) match {
+        case Some(Left(t)) =>
+          /**/
+          if (settings.verbose) println(t)
+        case Some(Right(ex)) =>
+          ex.printStackTrace()
+          println(ex)
+        case None =>
+      }
+    }
+  }
+
+  def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload)
+  def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt)
+  def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion)
+  def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion)
+
+  val rand = new java.util.Random()
+
+  private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1)
+
+  private def randomDecreasing(n: Int) = {
+    var r = rand.nextInt((1 to n).sum)
+    var limit = n
+    var result = 0
+    while (r > limit) {
+      result += 1
+      r -= limit
+      limit -= 1
+    }
+    result
+  }
+
+  def randomSourceFileIdx() = rand.nextInt(inputs.length)
+
+  def randomBatchesPerSourceFile(): Int = randomDecreasing(100)
+
+  def randomChangesPerBatch(): Int = randomInverse(50)
+
+  def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length)
+
+  def randomNumChars() = randomInverse(100)
+
+  def randomDelayMillis = randomInverse(10000)
+
+  class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) {
+
+    private var pos = start
+    private var deleted: List[Char] = List()
+
+    override def toString =
+      "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+
+      (if (toLeft) "left" else "right")
+
+    def deleteOne() {
+      val sf = inputs(sfidx)
+      deleted = sf.content(pos) :: deleted
+      val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1))
+      inputs(sfidx) = sf1
+      askReload(sf1)
+    }
+
+    def deleteAll() {
+      print("/"+nchars)
+      for (i <- 0 until nchars) {
+        if (toLeft) {
+          if (pos > 0 && pos <= inputs(sfidx).length) {
+            pos -= 1
+            deleteOne()
+          }
+        } else {
+          if (pos  < inputs(sfidx).length) {
+            deleteOne()
+          }
+        }
+      }
+    }
+
+    def insertAll() {
+      for (chr <- if (toLeft) deleted else deleted.reverse) {
+        val sf = inputs(sfidx)
+        val (pre, post) = sf./**/content splitAt pos
+        pos += 1
+        val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post))
+        inputs(sfidx) = sf1
+        askReload(sf1)
+      }
+    }
+  }
+
+  val testComment = "/**/"
+
+  def testFileChanges(sfidx: Int) = {
+    lazy val testPositions: Seq[Int] = {
+      val sf = inputs(sfidx)
+      val buf = new ArrayBuffer[Int]
+      var pos = sf.content.indexOfSlice(testComment)
+      while (pos > 0) {
+        buf += pos
+        pos = sf.content.indexOfSlice(testComment, pos + 1)
+      }
+      buf
+    }
+    def otherTest() {
+      if (testPositions.nonEmpty) {
+        val pos = Position.offset(inputs(sfidx), rand.nextInt(testPositions.length))
+        rand.nextInt(3) match {
+          case 0 => askTypeAt(pos)
+          case 1 => askTypeCompletion(pos)
+          case 2 => askScopeCompletion(pos)
+        }
+      }
+    }
+    for (i <- 0 until randomBatchesPerSourceFile()) {
+      val changes = Vector.fill(/**/randomChangesPerBatch()) {
+        /**/
+        new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean())
+      }
+      doTest(sfidx, changes, testPositions, otherTest) match {
+        case Some(errortrace) =>
+          println(errortrace)
+          minimize(errortrace)
+        case None =>
+      }
+    }
+  }
+
+  def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = {
+    print("new round with "+changes.length+" changes:")
+    changes foreach (_.deleteAll())
+    otherTest()
+    def errorCount() = compiler.ask(() => reporter.ERROR.count)
+//    println("\nhalf test round: "+errorCount())
+    changes.view.reverse foreach (_.insertAll())
+    otherTest()
+    println("done test round: "+errorCount())
+    if (errorCount() != 0)
+      Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content))
+    else
+      None
+  }
+
+  case class ErrorTrace(
+    sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
+    override def toString =
+      "Sourcefile: "+inputs(sfidx)+
+      "\nChanges:\n  "+changes.mkString("\n  ")+
+      "\nErrors:\n  "+infos.mkString("\n  ")+
+      "\nContents:\n"+content.mkString
+  }
+
+  def minimize(etrace: ErrorTrace) {}
+
+  /**/
+  def run() {
+    askReload(inputs: _*)
+    for (i <- 0 until ntests)
+      testFileChanges(randomSourceFileIdx())
+  }
+}
+
+/* A program to do presentation compiler stress tests.
+ * Usage:
+ *
+ *  scala scala.tools.nsc.interactive.test.Tester <n> <files>
+ *
+ * where <n> is the number os tests to be run and <files> is the set of files to test.
+ * This will do random deletions and re-insertions in any of the files.
+ * At places where an empty comment /**/ appears it will in addition randomly
+ * do ask-types, type-completions, or scope-completions.
+ */
+object Tester {
+  def main(args: Array[String]) {
+    val settings = new Settings()
+    val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true)
+    println("filenames = "+filenames)
+    val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
+    new Tester(args(0).toInt, files, settings).run()
+    sys.exit(0)
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
rename to src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
new file mode 100644
index 0000000..343986a
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -0,0 +1,128 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.reflect.internal.util.Position
+
+/** Set of core test definitions that are executed for each test run. */
+private[tests] trait CoreTestDefs
+	extends PresentationCompilerRequestsWorkingMode {
+
+  import scala.tools.nsc.interactive.Global
+
+  /** Ask the presentation compiler for completion at all locations
+   * (in all sources) where the defined `marker` is found. */
+  class TypeCompletionAction(override val compiler: Global)
+    extends PresentationCompilerTestDef
+    with AskTypeCompletionAt {
+
+    override def runTest() {
+      askAllSources(TypeCompletionMarker) { pos =>
+        askTypeCompletionAt(pos)
+      } { (pos, members) =>
+        withResponseDelimiter {
+          reporter.println("[response] askTypeCompletion at " + format(pos))
+          // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
+          // universal check file that we can provide for this to work
+          reporter.println("retrieved %d members".format(members.size))
+          compiler ask { () =>
+            val filtered = members.filterNot(member => (member.sym.name string_== "getClass") || member.sym.isConstructor)
+            reporter println (filtered.map(_.forceInfoString).sorted mkString "\n")
+          }
+        }
+      }
+    }
+  }
+
+  /** Ask the presentation compiler for completion at all locations
+   * (in all sources) where the defined `marker` is found. */
+  class ScopeCompletionAction(override val compiler: Global)
+    extends PresentationCompilerTestDef
+    with AskScopeCompletionAt {
+
+    override def runTest() {
+      askAllSources(ScopeCompletionMarker) { pos =>
+        askScopeCompletionAt(pos)
+      } { (pos, members) =>
+        withResponseDelimiter {
+          reporter.println("[response] askScopeCompletion at " + format(pos))
+          try {
+            // exclude members not from source (don't have position), for more focused and self contained tests.
+            def eligible(sym: compiler.Symbol) = sym.pos != compiler.NoPosition
+            val filtered = members.filter(member => eligible(member.sym))
+            
+            reporter.println("retrieved %d members".format(filtered.size))
+            compiler ask { () =>
+              reporter.println(filtered.map(_.forceInfoString).sorted mkString "\n")
+            }
+          } catch {
+            case t: Throwable =>
+              t.printStackTrace()
+          }
+
+        }
+      }
+    }
+  }
+
+  /** Ask the presentation compiler for type info at all locations
+   * (in all sources) where the defined `marker` is found. */
+  class TypeAction(override val compiler: Global)
+    extends PresentationCompilerTestDef
+    with AskTypeAt {
+
+    override def runTest() {
+      askAllSources(TypeMarker) { pos =>
+        askTypeAt(pos)
+      } { (pos, tree) =>
+        withResponseDelimiter {
+          reporter.println("[response] askTypeAt " + format(pos))
+          compiler.ask(() => reporter.println(tree))
+        }
+      }
+    }
+  }
+
+  /** Ask the presentation compiler for hyperlink at all locations
+   * (in all sources) where the defined `marker` is found. */
+  class HyperlinkAction(override val compiler: Global)
+    extends PresentationCompilerTestDef
+    with AskTypeAt
+    with AskTypeCompletionAt {
+
+    override def runTest() {
+      askAllSources(HyperlinkMarker) { pos =>
+        askTypeAt(pos)(NullReporter)
+      } { (pos, tree) =>
+        if(tree.symbol == compiler.NoSymbol || tree.symbol == null) {
+          reporter.println("\nNo symbol is associated with tree: "+tree)
+        }
+        else {
+          reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
+          val r = new Response[Position]
+          // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
+          // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
+          val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
+          val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
+
+          sourceFiles.find(_.path == treePath) match {
+            case Some(source) =>
+              compiler.askLinkPos(tree.symbol, source, r)
+              r.get match {
+                case Left(pos) =>
+                  val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
+                  withResponseDelimiter {
+                    reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
+                  }
+                case Right(ex) =>
+                  ex.printStackTrace()
+              }
+            case None =>
+              reporter.println("[error] could not locate sourcefile `" + treeName + "`." +
+                "Hint: Does the looked up definition come form a binary?")
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
new file mode 100644
index 0000000..29e546f
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -0,0 +1,33 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import reporters.{Reporter => CompilerReporter}
+
+/** Trait encapsulating the creation of a presentation compiler's instance.*/
+private[tests] trait PresentationCompilerInstance extends TestSettings {
+  protected val settings = new Settings
+
+  protected val compilerReporter: CompilerReporter = new InteractiveReporter {
+    override def compiler = PresentationCompilerInstance.this.compiler
+  }
+
+  protected def createGlobal: Global = new Global(settings, compilerReporter)
+
+  protected lazy val compiler: Global = {
+    prepareSettings(settings)
+    createGlobal
+  }
+
+  /**
+   * Called before instantiating the presentation compiler's instance.
+   * You should provide an implementation of this method if you need
+   * to customize the `settings` used to instantiate the presentation compiler.
+   * */
+  protected def prepareSettings(settings: Settings) {}
+
+  protected def printClassPath(implicit reporter: Reporter) {
+    reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value))
+    reporter.println("\tverbose: %b".format(settings.verbose.value))
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
rename to src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
new file mode 100644
index 0000000..4d5b4e1
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -0,0 +1,18 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.reflect.internal.util.Position
+
+trait PresentationCompilerTestDef {
+
+  private[tests] def runTest(): Unit
+
+  protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
+    def printDelimiter() = reporter.println("=" * 80)
+    printDelimiter()
+    block
+    printDelimiter()
+  }
+
+  protected def format(pos: Position): String =
+    (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
rename to src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
new file mode 100644
index 0000000..40cfc11
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -0,0 +1,20 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
+import scala.tools.nsc.io.{AbstractFile,Path}
+
+private[tests] object SourcesCollector {
+  type SourceFilter =  Path => Boolean
+
+  /**
+   * All files below `base` directory that pass the `filter`.
+   * With the default `filter` only .scala and .java files are collected.
+   * */
+  def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
+    assert(base.isDirectory, base + " is not a directory")
+    base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
+  }
+
+  private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
+  private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
new file mode 100644
index 0000000..3f9b402
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -0,0 +1,29 @@
+package scala.tools.nsc.interactive.tests.core
+
+case class DuplicateTestMarker(msg: String) extends Exception(msg)
+
+object TestMarker {
+  import scala.collection.mutable.Map
+  private val markers: Map[String, TestMarker] = Map.empty
+
+  private def checkForDuplicate(marker: TestMarker) {
+    markers.get(marker.marker) match {
+      case None => markers(marker.marker) = marker
+      case Some(otherMarker) =>
+        val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker)
+        throw new DuplicateTestMarker(msg)
+    }
+  }
+}
+
+abstract case class TestMarker(marker: String) {
+  TestMarker.checkForDuplicate(this)
+}
+
+object TypeCompletionMarker extends TestMarker("/*!*/")
+
+object ScopeCompletionMarker extends TestMarker("/*_*/")
+
+object TypeMarker extends TestMarker("/*?*/")
+
+object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
rename to src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
rename to src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
diff --git a/src/jline/build.sbt b/src/jline/build.sbt
index 4fc3bab..873f757 100644
--- a/src/jline/build.sbt
+++ b/src/jline/build.sbt
@@ -4,9 +4,9 @@ name := "jline"
 
 organization := "org.scala-lang"
 
-version := "2.10.0-SNAPSHOT"
+version := "2.11.0-SNAPSHOT"
 
-scalaVersion := "2.9.0-1"
+scalaVersion := "2.10.1"
 
 // Only need these because of weird testing jline issues.
 retrieveManaged := true
@@ -14,11 +14,11 @@ retrieveManaged := true
 parallelExecution in Test := false
 
 libraryDependencies ++= Seq(
-	"org.fusesource.jansi" % "jansi" % "1.4",
-	"com.novocode" % "junit-interface" % "0.7" % "test->default"
+	"org.fusesource.jansi" % "jansi" % "1.10",
+	"com.novocode" % "junit-interface" % "0.9" % "test->default"
 )
 
-javacOptions ++= Seq("-target", "1.5")
+javacOptions ++= Seq("-source", "1.5", "-target", "1.5")
 
 proguardOptions ++= Seq(
   "-dontshrink",
diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh
index aa5131c..744e175 100755
--- a/src/jline/manual-test.sh
+++ b/src/jline/manual-test.sh
@@ -3,6 +3,7 @@
 # Apparently the jline bundled with sbt interferes with testing some
 # changes: for instance after changing the keybindings I kept seeing
 # failures until I realized what was happening and bypassed sbt, like this.
+CP=lib_managed/jars/com.novocode/junit-interface/junit-interface-0.9.jar:lib_managed/jars/junit/junit-dep/junit-dep-4.8.2.jar:lib_managed/jars/org.fusesource.jansi/jansi/jansi-1.10.jar:lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.1.jar:lib_managed/jars/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.10/test-classes:target/scala-2.10/jline_2.10-2.11.0-SNAPSHOT.min.jar
 
-java -cp lib_managed/jar/com.novocode/junit-interface/junit-interface-0.5.jar:lib_managed/jar/junit/junit/junit-4.8.1.jar:lib_managed/jar/org.fusesource.jansi/jansi/jansi-1.4.jar:lib_managed/jar/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.9.0.1/test-classes:target/scala-2.9.0.1/jline_2.9.0-1-2.10.0-SNAPSHOT.jar \
-org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
+sbt proguard
+java -cp $CP org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
new file mode 100644
index 0000000..9b860e2
--- /dev/null
+++ b/src/jline/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.12.3
diff --git a/src/jline/project/plugins.sbt b/src/jline/project/plugins.sbt
new file mode 100644
index 0000000..9c13de9
--- /dev/null
+++ b/src/jline/project/plugins.sbt
@@ -0,0 +1,3 @@
+resolvers += Resolver.url("sbt-plugin-releases-scalasbt", url("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
+
+addSbtPlugin("org.scala-sbt" % "xsbt-proguard-plugin" % "0.1.3")
diff --git a/src/jline/project/plugins/build.sbt b/src/jline/project/plugins/build.sbt
deleted file mode 100644
index 0e0f27b..0000000
--- a/src/jline/project/plugins/build.sbt
+++ /dev/null
@@ -1,5 +0,0 @@
-resolvers += "Proguard plugin repo" at "http://siasia.github.com/maven2"
-
-libraryDependencies <<= (libraryDependencies, appConfiguration) { (deps, app) =>
-  deps :+ "com.github.siasia" %% "xsbt-proguard-plugin" % app.provider.id.version
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index 9df4270..a375b84 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -1712,7 +1712,7 @@ public class ConsoleReader
     }
 
     /**
-     * Output a platform-dependant newline.
+     * Output a platform-dependent newline.
      */
     public final void println() throws IOException {
         print(CR);
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index 07a9ffa..1c25989 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala
index 7d8b9f9..8c1862e 100644
--- a/src/library-aux/scala/AnyRef.scala
+++ b/src/library-aux/scala/AnyRef.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -76,8 +76,8 @@ trait AnyRef extends Any {
    *  @param    arg0  the object to compare against this object for equality.
    *  @return         `true` if the receiver object is equivalent to the argument; `false` otherwise.
    */
-  final def ==(that: AnyRef): Boolean =
-    if (this eq null) that eq null
+  final def ==(that: Any): Boolean =
+    if (this eq null) that.asInstanceOf[AnyRef] eq null
     else this equals that
 
   /** Create a copy of the receiver object.
diff --git a/src/library-aux/scala/Nothing.scala b/src/library-aux/scala/Nothing.scala
index eed6066..57f6fac 100644
--- a/src/library-aux/scala/Nothing.scala
+++ b/src/library-aux/scala/Nothing.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala
index 7455e78..931beb2 100644
--- a/src/library-aux/scala/Null.scala
+++ b/src/library-aux/scala/Null.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index 0722d80..4795a47 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -2,21 +2,54 @@ This is the documentation for the Scala standard library.
 
 == Package structure ==
 
-The [[scala]] package contains core types.
-
-[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation.
-
-Other important packages include:
-
-  - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang.
-  - [[scala.io     `scala.io`]]     - Input and output.
-  - [[scala.math   `scala.math`]]   - Basic math functions and additional numeric types.
-  - [[scala.sys    `scala.sys`]]    - Interaction with other processes and the operating system.
-  - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions.
-  - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing.
-  - [[scala.xml    `scala.xml`]]    - XML parsing, manipulation, and serialization.
-
-Many other packages exist.  See the complete list on the left.
+The [[scala]] package contains core types like [[scala.Int `Int`]], [[scala.Float `Float`]], [[scala.Array `Array`]]
+or [[scala.Option `Option`]] which are accessible in all Scala compilation units without explicit qualification or
+imports.
+
+Notable packages include:
+
+  - [[scala.collection    `scala.collection`]] and its sub-packages contain Scala's collections framework
+    - [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as
+      [[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]],
+      [[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or
+      [[scala.collection.immutable.HashSet `HasSet`]]
+    - [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as
+      [[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]],
+      [[scala.collection.mutable.StringBuilder `StringBuilder`]],
+      [[scala.collection.mutable.HashMap `HashMap`]] or [[scala.collection.mutable.HashSet `HashSet`]]
+    - [[scala.collection.concurrent `scala.collection.concurrent`]] - Mutable, concurrent data-structures such as
+      [[scala.collection.concurrent.TrieMap `TrieMap`]]
+    - [[scala.collection.parallel.immutable `scala.collection.parallel.immutable`]] - Immutable, parallel
+      data-structures such as [[scala.collection.parallel.immutable.ParVector `ParVector`]],
+      [[scala.collection.parallel.immutable.ParRange `ParRange`]],
+      [[scala.collection.parallel.immutable.ParHashMap `ParHashMap`]] or
+      [[scala.collection.parallel.immutable.ParHashSet `ParHashSet`]]
+    - [[scala.collection.parallel.mutable `scala.collection.parallel.mutable`]] - Mutable, parallel
+      data-structures such as [[scala.collection.parallel.mutable.ParArray `ParArray`]],
+      [[scala.collection.parallel.mutable.ParHashMap `ParHashMap`]],
+      [[scala.collection.parallel.mutable.ParTrieMap `ParTrieMap`]] or
+      [[scala.collection.parallel.mutable.ParHashSet `ParHashSet`]]
+  - [[scala.concurrent    `scala.concurrent`]] - Primitives for concurrent programming such as
+    [[scala.concurrent.Future `Futures`]] and [[scala.concurrent.Promise `Promises`]]
+  - [[scala.io            `scala.io`]]     - Input and output operations
+  - [[scala.math          `scala.math`]]   - Basic math functions and additional numeric types like
+    [[scala.math.BigInt `BigInt`]] and [[scala.math.BigDecimal `BigDecimal`]]
+  - [[scala.sys           `scala.sys`]]    - Interaction with other processes and the operating system
+  - [[scala.util.matching `scala.util.matching`]] - [[scala.util.matching.Regex Regular expressions]]
+
+Other packages exist.  See the complete list on the left.
+
+Additional parts of the standard library are shipped as separate libraries. These include:
+
+  - [[scala.reflect       `scala.reflect`]]   - Scala's reflection API (scala-reflect.jar)
+  - [[scala.xml           `scala.xml`]]    - XML parsing, manipulation, and serialization (scala-xml.jar)
+  - [[scala.swing         `scala.swing`]]  - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar)
+  - [[scala.util.continuations `scala.util.continuations`]] - Delimited continuations using continuation-passing-style
+    (scala-continuations-library.jar, scala-continuations-plugin.jar)
+  - [[scala.util.parsing  `scala.util.parsing`]] - [[scala.util.parsing.combinator Parser combinators]], including an
+    example implementation of a [[scala.util.parsing.json JSON parser]] (scala-parser-combinators.jar)
+  - [[scala.actors        `scala.actors`]]    - Actor-based concurrency (deprecated and replaced by Akka actors,
+    scala-actors.jar)
 
 == Automatic imports ==
 
diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala
index 0d6ba24..ff62948 100644
--- a/src/library/scala/AnyVal.scala
+++ b/src/library/scala/AnyVal.scala
@@ -33,7 +33,7 @@ package scala
  *
  * User-defined value classes which avoid object allocation...
  *
- *   - must have a single, public `val` parameter that is the underlying runtime representation.
+ *   - must have a single `val` parameter that is the underlying runtime representation.
  *   - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s.
  *   - typically extend no other trait apart from `AnyVal`.
  *   - cannot be used in type tests or pattern matching.
@@ -52,6 +52,6 @@ package scala
  * as well as in [[http://docs.scala-lang.org/sips/pending/value-classes.html SIP-15: Value Classes]],
  * the Scala Improvement Proposal.
  */
-abstract class AnyVal extends Any with NotNull {
+abstract class AnyVal extends Any {
   def getClass(): Class[_ <: AnyVal] = null
 }
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 90a8977..6224532 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -28,9 +28,10 @@ import scala.collection.mutable.ListBuffer
  *  functionality, which means that fields of the object will not have been initialized
  *  before the main method has been executed.'''''
  *
- *  It should also be noted that the `main` method will not normally need to be overridden:
- *  the purpose is to turn the whole class body into the “main method”. You should only
- *  chose to override it if you know what you are doing.
+ *  It should also be noted that the `main` method should not be overridden:
+ *  the whole class body becomes the “main method”.
+ *
+ *  Future versions of this trait will no longer extend `DelayedInit`.
  *
  *  @author  Martin Odersky
  *  @version 2.1, 15/02/2011
@@ -39,10 +40,12 @@ trait App extends DelayedInit {
 
   /** The time when the execution of this program started, in milliseconds since 1
     * January 1970 UTC. */
+  @deprecatedOverriding("executionStart should not be overridden", "2.11.0")
   val executionStart: Long = currentTime
 
   /** The command line arguments passed to the application's `main` method.
    */
+  @deprecatedOverriding("args should not be overridden", "2.11.0")
   protected def args: Array[String] = _args
 
   private var _args: Array[String] = _
@@ -56,16 +59,18 @@ trait App extends DelayedInit {
    *  themselves define a `delayedInit` method.
    *  @param body the initialization code to be stored for later execution
    */
+  @deprecated("The delayedInit mechanism will disappear.", "2.11.0")
   override def delayedInit(body: => Unit) {
     initCode += (() => body)
   }
 
   /** The main method.
-   *  This stores all argument so that they can be retrieved with `args`
-   *  and the executes all initialization code segments in the order they were
-   *  passed to `delayedInit`
+   *  This stores all arguments so that they can be retrieved with `args`
+   *  and then executes all initialization code segments in the order in which
+   *  they were passed to `delayedInit`.
    *  @param args the arguments passed to the main method
    */
+  @deprecatedOverriding("main should not be overridden", "2.11.0")
   def main(args: Array[String]) = {
     this._args = args
     for (proc <- initCode) proc()
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
deleted file mode 100644
index e7db0d2..0000000
--- a/src/library/scala/Application.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala
-
-import scala.compat.Platform.currentTime
-
-/** The `Application` trait can be used to quickly turn objects
- *  into executable programs, but is ''not recommended''.
- *  Here is an example:
- *  {{{
- *  object Main extends Application {
- *    Console.println("Hello World!")
- *  }
- *  }}}
- *  Here, object `Main` inherits the `main` method of `Application`.
- *  The body of the `Main` object defines the main program. This technique
- *  does not work if the main program depends on command-line arguments
- *  (which are not accessible with the technique presented here).
- *
- *  It is possible to time the execution of objects that inherit from class
- *  `Application` by setting the global `scala.time`
- *  property. Here is an example for benchmarking object `Main`:
- *  {{{
- *  java -Dscala.time Main
- *  }}}
- *  In practice the `Application` trait has a number of serious pitfalls:
- *
- *  - Threaded code that references the object will block until static
- *    initialization is complete.  However, because the entire execution
- *    of an `object` extending `Application` takes place during
- *    static initialization, concurrent code will ''always'' deadlock if
- *    it must synchronize with the enclosing object.
- *  - As described above, there is no way to obtain the
- *    command-line arguments because all code in body of an `object`
- *    extending `Application` is run as part of the static initialization
- *    which occurs before `Application`'s `main` method
- *    even begins execution.
- *  - Static initializers are run only once during program execution, and
- *    JVM authors usually assume their execution to be relatively short.
- *    Therefore, certain JVM configurations may become confused, or simply
- *    fail to optimize or JIT the code in the body of an `object` extending
- *    `Application`.  This can lead to a significant performance degradation.
- *
- *  It is recommended to use the [[scala.App]] trait instead.
- *  {{{
- *  object Main {
- *    def main(args: Array[String]) {
- *      //..
- *    }
- *  }
- *  }}}
- *
- *  @author  Matthias Zenger
- *  @version 1.0, 10/09/2003
- */
- at deprecated("use App instead", "2.9.0")
-trait Application {
-
-  /** The time when the execution of this program started,
-    * in milliseconds since 1 January 1970 UTC. */
-  val executionStart: Long = currentTime
-
-  /** The default main method.
-   *
-   *  @param args the arguments passed to the main method
-   */
-  def main(args: Array[String]) {
-    if (util.Properties propIsSet "scala.time") {
-      val total = currentTime - executionStart
-      Console.println("[total " + total + "ms]")
-    }
-  }
-}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index b9f5180..6ab82d9 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -240,9 +240,9 @@ object Array extends FallbackArrayBuilding {
    */
   def concat[T: ClassTag](xss: Array[T]*): Array[T] = {
     val b = newBuilder[T]
-    b.sizeHint(xss.map(_.size).sum)
+    b.sizeHint(xss.map(_.length).sum)
     for (xs <- xss) b ++= xs
-    b.result
+    b.result()
   }
 
   /** Returns an array that contains the results of some element computation a number
@@ -267,7 +267,7 @@ object Array extends FallbackArrayBuilding {
       b += elem
       i += 1
     }
-    b.result
+    b.result()
   }
 
   /** Returns a two-dimensional array that contains the results of some element
@@ -331,7 +331,7 @@ object Array extends FallbackArrayBuilding {
       b += f(i)
       i += 1
     }
-    b.result
+    b.result()
   }
 
   /** Returns a two-dimensional array containing values of a given function
@@ -399,14 +399,14 @@ object Array extends FallbackArrayBuilding {
   def range(start: Int, end: Int, step: Int): Array[Int] = {
     if (step == 0) throw new IllegalArgumentException("zero step")
     val b = newBuilder[Int]
-    b.sizeHint(immutable.Range.count(start, end, step, false))
+    b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false))
 
     var i = start
     while (if (step < 0) end < i else i < end) {
       b += i
       i += step
     }
-    b.result
+    b.result()
   }
 
   /** Returns an array containing repeated applications of a function to a start value.
@@ -431,7 +431,7 @@ object Array extends FallbackArrayBuilding {
         b += acc
       }
     }
-    b.result
+    b.result()
   }
 
   /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 440e546..53b4fb2 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Boolean` (equivalent to Java's `boolean` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
  *  represented by an object in the underlying runtime system.
@@ -20,18 +20,16 @@ import scala.language.implicitConversions
  *  which provides useful non-primitive operations.
  */
 final abstract class Boolean private extends AnyVal {
-  /**
-   * Negates a Boolean expression.
-   *
-   * - `!a` results in `false` if and only if `a` evaluates to `true` and
-   * - `!a` results in `true` if and only if `a` evaluates to `false`.
-   *
-   * @return the negated expression
-   */
+  /** Negates a Boolean expression.
+    *
+    * - `!a` results in `false` if and only if `a` evaluates to `true` and
+    * - `!a` results in `true` if and only if `a` evaluates to `false`.
+    *
+    * @return the negated expression
+    */
   def unary_! : Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+  /** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
     *
     * `a == b` returns `true` if and only if
     *  - `a` and `b` are `true` or
@@ -48,8 +46,7 @@ final abstract class Boolean private extends AnyVal {
     */
   def !=(x: Boolean): Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+  /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
     *
     * `a || b` returns `true` if and only if
     *  - `a` is `true` or
@@ -62,8 +59,7 @@ final abstract class Boolean private extends AnyVal {
     */
   def ||(x: Boolean): Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+  /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
     *
     * `a && b` returns `true` if and only if
     *  - `a` and `b` are `true`.
@@ -78,8 +74,7 @@ final abstract class Boolean private extends AnyVal {
   // def ||(x: => Boolean): Boolean
   // def &&(x: => Boolean): Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+  /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
     *
     * `a | b` returns `true` if and only if
     *  - `a` is `true` or
@@ -90,8 +85,7 @@ final abstract class Boolean private extends AnyVal {
     */
   def |(x: Boolean): Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+  /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
     *
     * `a & b` returns `true` if and only if
     *  - `a` and `b` are `true`.
@@ -100,8 +94,7 @@ final abstract class Boolean private extends AnyVal {
     */
   def &(x: Boolean): Boolean
 
-  /**
-    * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+  /** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
     *
     * `a ^ b` returns `true` if and only if
     *  - `a` is `true` and `b` is `false` or
@@ -116,6 +109,8 @@ object Boolean extends AnyValCompanion {
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Boolean to be boxed
    *  @return     a java.lang.Boolean offering `x` as its underlying value.
    */
@@ -125,14 +120,15 @@ object Boolean extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Boolean.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Boolean to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Boolean
    *  @return     the Boolean resulting from calling booleanValue() on `x`
    */
   def unbox(x: java.lang.Object): Boolean = x.asInstanceOf[java.lang.Boolean].booleanValue()
 
-  /** The String representation of the scala.Boolean companion object.
-   */
+  /** The String representation of the scala.Boolean companion object. */
   override def toString = "object scala.Boolean"
 
 }
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index df0d2c7..413231c 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Byte` are not
  *  represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Byte private extends AnyVal {
  * }}}
  */
   def unary_~ : Int
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Int
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Int
 
   def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Byte private extends AnyVal {
   */
   def >>(x: Long): Int
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
   /**
@@ -447,165 +359,95 @@ final abstract class Byte private extends AnyVal {
   */
   def ^(x: Long): Long
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Byte] = null
 }
 
 object Byte extends AnyValCompanion {
-  /** The smallest value representable as a Byte.
-   */
+  /** The smallest value representable as a Byte. */
   final val MinValue = java.lang.Byte.MIN_VALUE
 
-  /** The largest value representable as a Byte.
-   */
+  /** The largest value representable as a Byte. */
   final val MaxValue = java.lang.Byte.MAX_VALUE
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Byte to be boxed
    *  @return     a java.lang.Byte offering `x` as its underlying value.
    */
@@ -615,18 +457,18 @@ object Byte extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Byte.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Byte to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Byte
    *  @return     the Byte resulting from calling byteValue() on `x`
    */
   def unbox(x: java.lang.Object): Byte = x.asInstanceOf[java.lang.Byte].byteValue()
 
-  /** The String representation of the scala.Byte companion object.
-   */
+  /** The String representation of the scala.Byte companion object. */
   override def toString = "object scala.Byte"
-
-  /** Language mandated coercions from Byte to "wider" types.
-   */
+  /** Language mandated coercions from Byte to "wider" types. */
+  import scala.language.implicitConversions
   implicit def byte2short(x: Byte): Short = x.toShort
   implicit def byte2int(x: Byte): Int = x.toInt
   implicit def byte2long(x: Byte): Long = x.toLong
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 1fa0c0d..ec2d48c 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Char` are not
  *  represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Char private extends AnyVal {
  * }}}
  */
   def unary_~ : Int
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Int
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Int
 
   def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Char private extends AnyVal {
   */
   def >>(x: Long): Int
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
   /**
@@ -447,165 +359,95 @@ final abstract class Char private extends AnyVal {
   */
   def ^(x: Long): Long
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Char] = null
 }
 
 object Char extends AnyValCompanion {
-  /** The smallest value representable as a Char.
-   */
+  /** The smallest value representable as a Char. */
   final val MinValue = java.lang.Character.MIN_VALUE
 
-  /** The largest value representable as a Char.
-   */
+  /** The largest value representable as a Char. */
   final val MaxValue = java.lang.Character.MAX_VALUE
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToCharacter`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Char to be boxed
    *  @return     a java.lang.Character offering `x` as its underlying value.
    */
@@ -615,18 +457,18 @@ object Char extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Character.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToChar`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Character to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Character
    *  @return     the Char resulting from calling charValue() on `x`
    */
   def unbox(x: java.lang.Object): Char = x.asInstanceOf[java.lang.Character].charValue()
 
-  /** The String representation of the scala.Char companion object.
-   */
+  /** The String representation of the scala.Char companion object. */
   override def toString = "object scala.Char"
-
-  /** Language mandated coercions from Char to "wider" types.
-   */
+  /** Language mandated coercions from Char to "wider" types. */
+  import scala.language.implicitConversions
   implicit def char2int(x: Char): Int = x.toInt
   implicit def char2long(x: Char): Long = x.toLong
   implicit def char2float(x: Char): Float = x.toFloat
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 5b01550..37127a9 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -6,16 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
 package scala
 
-import java.io.{BufferedReader, InputStream, InputStreamReader,
-                IOException, OutputStream, PrintStream, Reader}
-import java.text.MessageFormat
+import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader }
+import scala.io.{ AnsiColor, StdIn }
 import scala.util.DynamicVariable
 
-
 /** Implements functionality for
  *  printing Scala values on the terminal as well as reading specific values.
  *  Also defines constants for marking up text on ANSI terminals.
@@ -23,60 +19,16 @@ import scala.util.DynamicVariable
  *  @author  Matthias Zenger
  *  @version 1.0, 03/09/2003
  */
-object Console {
-
-  /** Foreground color for ANSI black */
-  final val BLACK      = "\033[30m"
-  /** Foreground color for ANSI red */
-  final val RED        = "\033[31m"
-  /** Foreground color for ANSI green */
-  final val GREEN      = "\033[32m"
-  /** Foreground color for ANSI yellow */
-  final val YELLOW     = "\033[33m"
-  /** Foreground color for ANSI blue */
-  final val BLUE       = "\033[34m"
-  /** Foreground color for ANSI magenta */
-  final val MAGENTA    = "\033[35m"
-  /** Foreground color for ANSI cyan */
-  final val CYAN       = "\033[36m"
-  /** Foreground color for ANSI white */
-  final val WHITE      = "\033[37m"
-
-  /** Background color for ANSI black */
-  final val BLACK_B    = "\033[40m"
-  /** Background color for ANSI red */
-  final val RED_B      = "\033[41m"
-  /** Background color for ANSI green */
-  final val GREEN_B    = "\033[42m"
-  /** Background color for ANSI yellow */
-  final val YELLOW_B   = "\033[43m"
-  /** Background color for ANSI blue */
-  final val BLUE_B     = "\033[44m"
-  /** Background color for ANSI magenta */
-  final val MAGENTA_B  = "\033[45m"
-  /** Background color for ANSI cyan */
-  final val CYAN_B     = "\033[46m"
-  /** Background color for ANSI white */
-  final val WHITE_B    = "\033[47m"
-
-  /** Reset ANSI styles */
-  final val RESET      = "\033[0m"
-  /** ANSI bold */
-  final val BOLD       = "\033[1m"
-  /** ANSI underlines */
-  final val UNDERLINED = "\033[4m"
-  /** ANSI blink */
-  final val BLINK      = "\033[5m"
-  /** ANSI reversed */
-  final val REVERSED   = "\033[7m"
-  /** ANSI invisible */
-  final val INVISIBLE  = "\033[8m"
-
+object Console extends DeprecatedConsole with AnsiColor {
   private val outVar = new DynamicVariable[PrintStream](java.lang.System.out)
   private val errVar = new DynamicVariable[PrintStream](java.lang.System.err)
-  private val inVar = new DynamicVariable[BufferedReader](
+  private val inVar  = new DynamicVariable[BufferedReader](
     new BufferedReader(new InputStreamReader(java.lang.System.in)))
 
+  protected def setOutDirect(out: PrintStream): Unit  = outVar.value = out
+  protected def setErrDirect(err: PrintStream): Unit  = errVar.value = err
+  protected def setInDirect(in: BufferedReader): Unit = inVar.value = in
+
   /** The default output, can be overridden by `setOut` */
   def out = outVar.value
   /** The default error, can be overridden by `setErr` */
@@ -84,12 +36,6 @@ object Console {
   /** The default input, can be overridden by `setIn` */
   def in = inVar.value
 
-  /** Sets the default output stream.
-   *
-   *  @param out the new output stream.
-   */
-  def setOut(out: PrintStream) { outVar.value = out }
-
   /** Sets the default output stream for the duration
    *  of execution of one thunk.
    *
@@ -106,13 +52,6 @@ object Console {
   def withOut[T](out: PrintStream)(thunk: =>T): T =
     outVar.withValue(out)(thunk)
 
-  /** Sets the default output stream.
-   *
-   *  @param out the new output stream.
-   */
-  def setOut(out: OutputStream): Unit =
-    setOut(new PrintStream(out))
-
   /** Sets the default output stream for the duration
    *  of execution of one thunk.
    *
@@ -125,13 +64,6 @@ object Console {
   def withOut[T](out: OutputStream)(thunk: =>T): T =
     withOut(new PrintStream(out))(thunk)
 
-
-  /** Sets the default error stream.
-   *
-   *  @param err the new error stream.
-   */
-  def setErr(err: PrintStream) { errVar.value = err }
-
   /** Set the default error stream for the duration
    *  of execution of one thunk.
    *  @example {{{
@@ -147,13 +79,6 @@ object Console {
   def withErr[T](err: PrintStream)(thunk: =>T): T =
     errVar.withValue(err)(thunk)
 
-  /** Sets the default error stream.
-   *
-   *  @param err the new error stream.
-   */
-  def setErr(err: OutputStream): Unit =
-    setErr(new PrintStream(err))
-
   /** Sets the default error stream for the duration
    *  of execution of one thunk.
    *
@@ -166,15 +91,6 @@ object Console {
   def withErr[T](err: OutputStream)(thunk: =>T): T =
     withErr(new PrintStream(err))(thunk)
 
-
-  /** Sets the default input stream.
-   *
-   *  @param reader specifies the new input stream.
-   */
-  def setIn(reader: Reader) {
-    inVar.value = new BufferedReader(reader)
-  }
-
   /** Sets the default input stream for the duration
    *  of execution of one thunk.
    *
@@ -195,14 +111,6 @@ object Console {
   def withIn[T](reader: Reader)(thunk: =>T): T =
     inVar.withValue(new BufferedReader(reader))(thunk)
 
-  /** Sets the default input stream.
-   *
-   *  @param in the new input stream.
-   */
-  def setIn(in: InputStream) {
-    setIn(new InputStreamReader(in))
-  }
-
   /** Sets the default input stream for the duration
    *  of execution of one thunk.
    *
@@ -251,218 +159,64 @@ object Console {
    *  @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
    */
   def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
+}
 
-  /** Read a full line from the default input.  Returns `null` if the end of the
-   * input stream has been reached.
-   *
-   * @return the string read from the terminal or null if the end of stream was reached.
-   */
-  def readLine(): String = in.readLine()
-
-  /** Print formatted text to the default output and read a full line from the default input.
-   *  Returns `null` if the end of the input stream has been reached.
-   *
-   *  @param text the format of the text to print out, as in `printf`.
-   *  @param args the parameters used to instantiate the format, as in `printf`.
-   *  @return the string read from the default input
-   */
-  def readLine(text: String, args: Any*): String = {
-    printf(text, args: _*)
-    readLine()
-  }
-
-  /** Reads a boolean value from an entire line of the default input.
-   *  Has a fairly liberal interpretation of the input.
-   *
-   *  @return the boolean value read, or false if it couldn't be converted to a boolean
-   *  @throws java.io.EOFException if the end of the input stream has been reached.
-   */
-  def readBoolean(): Boolean = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toLowerCase() match {
-        case "true" => true
-        case "t" => true
-        case "yes" => true
-        case "y" => true
-        case _ => false
-      }
-  }
-
-  /** Reads a byte value from an entire line of the default input.
-   *
-   *  @return the Byte that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
-   */
-  def readByte(): Byte = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toByte
-  }
-
-  /** Reads a short value from an entire line of the default input.
-   *
-   *  @return the short that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
-   */
-  def readShort(): Short = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toShort
-  }
-
-  /** Reads a char value from an entire line of the default input.
-   *
-   *  @return the Char that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
-   */
-  def readChar(): Char = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s charAt 0
-  }
-
-  /** Reads an int value from an entire line of the default input.
-   *
-   *  @return the Int that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
-   */
-  def readInt(): Int = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toInt
-  }
-
-  /** Reads an long value from an entire line of the default input.
-   *
-   *  @return the Long that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
-   */
-  def readLong(): Long = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toLong
-  }
+private[scala] abstract class DeprecatedConsole {
+  self: Console.type =>
+
+  /** Internal usage only. */
+  protected def setOutDirect(out: PrintStream): Unit
+  protected def setErrDirect(err: PrintStream): Unit
+  protected def setInDirect(in: BufferedReader): Unit
+
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean                     = StdIn.readBoolean()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte                           = StdIn.readByte()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readChar(): Char                           = StdIn.readChar()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double                       = StdIn.readDouble()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float                         = StdIn.readFloat()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readInt(): Int                             = StdIn.readInt()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(): String                         = StdIn.readLine()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*)
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLong(): Long                           = StdIn.readLong()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readShort(): Short                         = StdIn.readShort()
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any]           = StdIn.readf(format)
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any                = StdIn.readf1(format)
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any)         = StdIn.readf2(format)
+  @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any)    = StdIn.readf3(format)
 
-  /** Reads a float value from an entire line of the default input.
-   *  @return the Float that was read.
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+  /** Sets the default output stream.
    *
+   *  @param out the new output stream.
    */
-  def readFloat(): Float = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toFloat
-  }
+  @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out)
 
-  /** Reads a double value from an entire line of the default input.
+  /** Sets the default output stream.
    *
-   *  @return the Double that was read.
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+   *  @param out the new output stream.
    */
-  def readDouble(): Double = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toDouble
-  }
+  @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out))
 
-  /** Reads in some structured input (from the default input), specified by
-   *  a format specifier. See class `java.text.MessageFormat` for details of
-   *  the format specification.
+  /** Sets the default error stream.
    *
-   *  @param format the format of the input.
-   *  @return a list of all extracted values.
-   *  @throws java.io.EOFException if the end of the input stream has been
-   *          reached.
+   *  @param err the new error stream.
    */
-  def readf(format: String): List[Any] = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      textComponents(new MessageFormat(format).parse(s))
-  }
+  @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err)
 
-  /** Reads in some structured input (from the default input), specified by
-   *  a format specifier, returning only the first value extracted, according
-   *  to the format specification.
+  /** Sets the default error stream.
    *
-   *  @param format format string, as accepted by `readf`.
-   *  @return The first value that was extracted from the input
+   *  @param err the new error stream.
    */
-  def readf1(format: String): Any = readf(format).head
+  @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err))
 
-  /** Reads in some structured input (from the default input), specified
-   *  by a format specifier, returning only the first two values extracted,
-   *  according to the format specification.
+  /** Sets the default input stream.
    *
-   *  @param format format string, as accepted by `readf`.
-   *  @return A [[scala.Tuple2]] containing the first two values extracted
+   *  @param reader specifies the new input stream.
    */
-  def readf2(format: String): (Any, Any) = {
-    val res = readf(format)
-    (res.head, res.tail.head)
-  }
+  @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader))
 
-  /** Reads in some structured input (from the default input), specified
-   *  by a format specifier, returning only the first three values extracted,
-   *  according to the format specification.
+  /** Sets the default input stream.
    *
-   *  @param format format string, as accepted by `readf`.
-   *  @return A [[scala.Tuple3]] containing the first three values extracted
+   *  @param in the new input stream.
    */
-  def readf3(format: String): (Any, Any, Any) = {
-    val res = readf(format)
-    (res.head, res.tail.head, res.tail.tail.head)
-  }
-
-  private def textComponents(a: Array[AnyRef]): List[Any] = {
-    var i: Int = a.length - 1
-    var res: List[Any] = Nil
-    while (i >= 0) {
-      res = (a(i) match {
-        case x: java.lang.Boolean   => x.booleanValue()
-        case x: java.lang.Byte      => x.byteValue()
-        case x: java.lang.Short     => x.shortValue()
-        case x: java.lang.Character => x.charValue()
-        case x: java.lang.Integer   => x.intValue()
-        case x: java.lang.Long      => x.longValue()
-        case x: java.lang.Float     => x.floatValue()
-        case x: java.lang.Double    => x.doubleValue()
-        case x => x
-      }) :: res;
-      i -= 1
-    }
-    res
-  }
+  @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in)))
 }
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index 12793e6..7f976b0 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -8,7 +8,7 @@
 
 package scala
 
-/** Classes and objects (but note, not traits) inheriting the `DelayedInit` 
+/** Classes and objects (but note, not traits) inheriting the `DelayedInit`
  *  marker trait will have their initialization code rewritten as follows:
  *  `code` becomes `delayedInit(code)`.
  *
@@ -32,7 +32,7 @@ package scala
  *      val c = new C
  *    }
  *  }}}
- *  
+ *
  *  Should result in the following being printed:
  *  {{{
  *    dummy text, printed before initialization of C
@@ -43,6 +43,7 @@ package scala
  *
  *  @author  Martin Odersky
  */
+ at deprecated("DelayedInit semantics can be surprising. Support for `App` will continue.\nSee the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0")
 trait DelayedInit {
   def delayedInit(x: => Unit): Unit
 }
\ No newline at end of file
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index f058d7c..a58fa3e 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Double` are not
  *  represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Double private extends AnyVal {
   def toFloat: Float
   def toDouble: Double
 
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Double
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Double
 
   def +(x: String): String
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Double
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Double
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Double
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Double
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Double
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Double] = null
@@ -382,6 +224,8 @@ object Double extends AnyValCompanion {
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Double to be boxed
    *  @return     a java.lang.Double offering `x` as its underlying value.
    */
@@ -391,14 +235,15 @@ object Double extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Double.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Double to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Double
    *  @return     the Double resulting from calling doubleValue() on `x`
    */
   def unbox(x: java.lang.Object): Double = x.asInstanceOf[java.lang.Double].doubleValue()
 
-  /** The String representation of the scala.Double companion object.
-   */
+  /** The String representation of the scala.Double companion object. */
   override def toString = "object scala.Double"
 }
 
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 47d7840..d4b9c17 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -11,7 +11,7 @@ package scala
 import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet }
 import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
 import scala.reflect.NameTransformer._
-import java.util.regex.Pattern
+import scala.util.matching.Regex
 
 /** Defines a finite set of values specific to the enumeration. Typically
  *  these values enumerate all possible forms something can take and provide
@@ -56,14 +56,6 @@ abstract class Enumeration (initial: Int) extends Serializable {
 
   def this() = this(0)
 
-  @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
-  def this(initial: Int, names: String*) = {
-    this(initial)
-    this.nextName = names.iterator
-  }
-  @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
-  def this(names: String*) = this(0, names: _*)
-
   /* Note that `readResolve` cannot be private, since otherwise
      the JVM does not invoke it when deserializing subclasses. */
   protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
@@ -71,8 +63,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
   /** The name of this enumeration.
    */
   override def toString =
-    ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split 
-       Pattern.quote(NAME_JOIN_STRING)).last
+    ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
+       Regex.quote(NAME_JOIN_STRING)).last
 
   /** The mapping from the integer used to identify values to the actual
     * values. */
@@ -103,7 +95,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
   protected var nextName: Iterator[String] = _
 
   private def nextNameOrNull =
-    if (nextName != null && nextName.hasNext) nextName.next else null
+    if (nextName != null && nextName.hasNext) nextName.next() else null
 
   /** The highest integer amongst those used to identify values in this
     * enumeration. */
@@ -126,7 +118,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
    *
    * @param  s an `Enumeration` name
    * @return   the `Value` of this `Enumeration` if its name matches `s`
-   * @throws   java.util.NoSuchElementException if no `Value` with a matching
+   * @throws   NoSuchElementException if no `Value` with a matching
    *           name is in this `Enumeration`
    */
   final def withName(s: String): Value = values.find(_.toString == s).get
@@ -262,7 +254,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
     def contains(v: Value) = nnIds contains (v.id - bottomId)
     def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId))
     def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId))
-    def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId))
+    def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id))
+    override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id  map (id => thisenum.apply(bottomId + id))
     override def stringPrefix = thisenum + ".ValueSet"
     /** Creates a bit mask for the zero-adjusted ids in this set as a
      *  new array of longs */
@@ -284,7 +277,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
     def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
       private[this] val b = new mutable.BitSet
       def += (x: Value) = { b += (x.id - bottomId); this }
-      def clear() = b.clear
+      def clear() = b.clear()
       def result() = new ValueSet(b.toImmutable)
     }
     /** The implicit builder for value sets */
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index d942ace..3c59057 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Float` are not
  *  represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Float private extends AnyVal {
   def toFloat: Float
   def toDouble: Double
 
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Float
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Float
 
   def +(x: String): String
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Float] = null
@@ -382,6 +224,8 @@ object Float extends AnyValCompanion {
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Float to be boxed
    *  @return     a java.lang.Float offering `x` as its underlying value.
    */
@@ -391,18 +235,18 @@ object Float extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Float.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Float to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Float
    *  @return     the Float resulting from calling floatValue() on `x`
    */
   def unbox(x: java.lang.Object): Float = x.asInstanceOf[java.lang.Float].floatValue()
 
-  /** The String representation of the scala.Float companion object.
-   */
+  /** The String representation of the scala.Float companion object. */
   override def toString = "object scala.Float"
-
-  /** Language mandated coercions from Float to "wider" types.
-   */
+  /** Language mandated coercions from Float to "wider" types. */
+  import scala.language.implicitConversions
   implicit def float2double(x: Float): Double = x.toDouble
 }
 
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 5f87b38..e13aaad 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -1,12 +1,12 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 // GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Tue Aug 07 11:54:44 CEST 2012
+// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013
 
 package scala
 
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 22393c6..620dcc1 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -32,7 +32,7 @@ package scala
 
  */
 @annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
-trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends AnyRef { self =>
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
   /** Apply the body of this function to the argument.
    *  @return   the result of function application.
    */
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index f7e5d41..7789970 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 53742bf..d4276f3 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index e349d90..dfa8bcf 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 10ec64b..5404c20 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 82dd409..3145290 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index be5fbee..309ef53 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index 7a185b3..c4cb107 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 94e0000..005ae2a 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index a3ee677..371630d 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 038dcbb..95c60a4 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 0794a40..5690adb 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 727684d..a93f999 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index 2441278..7ebbb06 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 1f70b19..e5a3d83 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index bbbde82..850290d 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index f100860..c9ac6df 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index cba9b6c..360a460 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 0b8addf..d30877e 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 2098658..b19caf2 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 08a480d..3aff0b0 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index 2e35f79..f80ccf4 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index ae36413..72e5ebf 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Int` are not
  *  represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Int private extends AnyVal {
  * }}}
  */
   def unary_~ : Int
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Int
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Int
 
   def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Int private extends AnyVal {
   */
   def >>(x: Long): Int
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
   /**
@@ -447,165 +359,95 @@ final abstract class Int private extends AnyVal {
   */
   def ^(x: Long): Long
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Int] = null
 }
 
 object Int extends AnyValCompanion {
-  /** The smallest value representable as a Int.
-   */
+  /** The smallest value representable as a Int. */
   final val MinValue = java.lang.Integer.MIN_VALUE
 
-  /** The largest value representable as a Int.
-   */
+  /** The largest value representable as a Int. */
   final val MaxValue = java.lang.Integer.MAX_VALUE
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToInteger`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Int to be boxed
    *  @return     a java.lang.Integer offering `x` as its underlying value.
    */
@@ -615,18 +457,18 @@ object Int extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Integer.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToInt`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Integer to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Integer
    *  @return     the Int resulting from calling intValue() on `x`
    */
   def unbox(x: java.lang.Object): Int = x.asInstanceOf[java.lang.Integer].intValue()
 
-  /** The String representation of the scala.Int companion object.
-   */
+  /** The String representation of the scala.Int companion object. */
   override def toString = "object scala.Int"
-
-  /** Language mandated coercions from Int to "wider" types.
-   */
+  /** Language mandated coercions from Int to "wider" types. */
+  import scala.language.implicitConversions
   implicit def int2long(x: Int): Long = x.toLong
   implicit def int2float(x: Int): Float = x.toFloat
   implicit def int2double(x: Int): Double = x.toDouble
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 4ee9383..1bd0fe8 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Long` are not
  *  represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Long private extends AnyVal {
  * }}}
  */
   def unary_~ : Long
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Long
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Long
 
   def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Long private extends AnyVal {
   */
   def >>(x: Long): Long
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
   /**
@@ -447,165 +359,95 @@ final abstract class Long private extends AnyVal {
   */
   def ^(x: Long): Long
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Long] = null
 }
 
 object Long extends AnyValCompanion {
-  /** The smallest value representable as a Long.
-   */
+  /** The smallest value representable as a Long. */
   final val MinValue = java.lang.Long.MIN_VALUE
 
-  /** The largest value representable as a Long.
-   */
+  /** The largest value representable as a Long. */
   final val MaxValue = java.lang.Long.MAX_VALUE
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Long to be boxed
    *  @return     a java.lang.Long offering `x` as its underlying value.
    */
@@ -615,18 +457,18 @@ object Long extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Long.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Long to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Long
    *  @return     the Long resulting from calling longValue() on `x`
    */
   def unbox(x: java.lang.Object): Long = x.asInstanceOf[java.lang.Long].longValue()
 
-  /** The String representation of the scala.Long companion object.
-   */
+  /** The String representation of the scala.Long companion object. */
   override def toString = "object scala.Long"
-
-  /** Language mandated coercions from Long to "wider" types.
-   */
+  /** Language mandated coercions from Long to "wider" types. */
+  import scala.language.implicitConversions
   implicit def long2float(x: Long): Float = x.toFloat
   implicit def long2double(x: Long): Double = x.toDouble
 }
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
deleted file mode 100644
index bf6e494..0000000
--- a/src/library/scala/LowPriorityImplicits.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala
-
-import scala.collection.{ mutable, immutable, generic }
-import mutable.WrappedArray
-import immutable.WrappedString
-import generic.CanBuildFrom
-import scala.language.implicitConversions
-
-/** The `LowPriorityImplicits` class provides implicit values that
- *  are valid in all Scala compilation units without explicit qualification,
- *  but that are partially overridden by higher-priority conversions in object
- *  `Predef`.
- *
- *  @author  Martin Odersky
- *  @since 2.8
- */
-class LowPriorityImplicits {
-  /** We prefer the java.lang.* boxed types to these wrappers in
-   *  any potential conflicts.  Conflicts do exist because the wrappers
-   *  need to implement ScalaNumber in order to have a symmetric equals
-   *  method, but that implies implementing java.lang.Number as well.
-   *
-   *  Note - these are inlined because they are value classes, but
-   *  the call to xxxWrapper is not eliminated even though it does nothing.
-   *  Even inlined, every call site does a no-op retrieval of Predef's MODULE$
-   *  because maybe loading Predef has side effects!
-   */
-  @inline implicit def byteWrapper(x: Byte)       = new runtime.RichByte(x)
-  @inline implicit def shortWrapper(x: Short)     = new runtime.RichShort(x)
-  @inline implicit def intWrapper(x: Int)         = new runtime.RichInt(x)
-  @inline implicit def charWrapper(c: Char)       = new runtime.RichChar(c)
-  @inline implicit def longWrapper(x: Long)       = new runtime.RichLong(x)
-  @inline implicit def floatWrapper(x: Float)     = new runtime.RichFloat(x)
-  @inline implicit def doubleWrapper(x: Double)   = new runtime.RichDouble(x)
-  @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
-
-  // These eight implicits exist solely to exclude Null from the domain of
-  // the boxed types, so that e.g. "var x: Int = null" is a compile time
-  // error rather than a delayed null pointer exception by way of the
-  // conversion from java.lang.Integer.  If defined in the same file as
-  // Integer2int, they would have higher priority because Null is a subtype
-  // of Integer.  We balance that out and create conflict by moving the
-  // definition into the superclass.
-  //
-  // Caution: do not adjust tightrope tension without safety goggles in place.
-  implicit def Byte2byteNullConflict(x: Null): Byte          = sys.error("value error")
-  implicit def Short2shortNullConflict(x: Null): Short       = sys.error("value error")
-  implicit def Character2charNullConflict(x: Null): Char     = sys.error("value error")
-  implicit def Integer2intNullConflict(x: Null): Int         = sys.error("value error")
-  implicit def Long2longNullConflict(x: Null): Long          = sys.error("value error")
-  implicit def Float2floatNullConflict(x: Null): Float       = sys.error("value error")
-  implicit def Double2doubleNullConflict(x: Null): Double    = sys.error("value error")
-  implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error")
-
-  implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
-    if (xs eq null) null
-    else WrappedArray.make(xs)
-
-  // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
-  // is as good as another for all T <: AnyRef.  Instead of creating 100,000,000
-  // unique ones by way of this implicit, let's share one.
-  implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
-    if (xs eq null) null
-    else if (xs.length == 0) WrappedArray.empty[T]
-    else new WrappedArray.ofRef[T](xs)
-  }
-
-  implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
-  implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
-  implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
-  implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
-  implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
-  implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
-  implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
-  implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
-  implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
-
-  implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
-  implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
-
-  implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
-    new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
-      def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
-      def apply() = immutable.IndexedSeq.newBuilder[T]
-    }
-}
-
diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala
index f87416b..3cbe9ed 100644
--- a/src/library/scala/NotNull.scala
+++ b/src/library/scala/NotNull.scala
@@ -12,4 +12,6 @@ package scala
  * A marker trait for things that are not allowed to be null
  * @since 2.5
  */
+
+ at deprecated("This trait will be removed", "2.11.0")
 trait NotNull extends Any {}
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 3873df9..905e925 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -128,7 +128,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
    * val textField = new JComponent(initalText.orNull,20)
    * }}}
    */
-  @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse null
+  @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null)
 
   /** Returns a $some containing the result of applying $f to this $option's
    * value if this $option is nonempty.
@@ -209,6 +209,15 @@ sealed abstract class Option[+A] extends Product with Serializable {
     def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
   }
 
+  /** Tests whether the option contains a given value as an element.
+   *
+   *  @param elem the element to test.
+   *  @return `true` if the option has an element that is equal (as
+   *  determined by `==`) to `elem`, `false` otherwise.
+   */
+  final def contains[A1 >: A](elem: A1): Boolean =
+    !isEmpty && this.get == elem
+
   /** Returns true if this option is nonempty '''and''' the predicate
    * $p returns true when applied to this $option's value.
    * Otherwise, returns false.
@@ -247,7 +256,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
    *  value (if possible), or $none.
    */
   @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
-    if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
+    if (!isEmpty) pf.lift(this.get) else None
 
   /** Returns this $option if it is nonempty,
    *  otherwise return the result of evaluating `alternative`.
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 9ff648a..7f4a9dc 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -94,7 +94,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
    *  Note that expression `pf.applyOrElse(x, default)` is equivalent to
    *  {{{ if(pf isDefinedAt x) pf(x) else default(x) }}}
    *  except that `applyOrElse` method can be implemented more efficiently.
-   *  For all partial function literals compiler generates `applyOrElse` implementation which
+   *  For all partial function literals the compiler generates an `applyOrElse` implementation which
    *  avoids double evaluation of pattern matchers and guards.
    *  This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as:
    *
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 9bb5787..faeb1dc 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -15,6 +15,7 @@ import generic.CanBuildFrom
 import scala.annotation.{ elidable, implicitNotFound }
 import scala.annotation.elidable.ASSERTION
 import scala.language.{implicitConversions, existentials}
+import scala.io.StdIn
 
 /** The `Predef` object provides definitions that are accessible in all Scala
  *  compilation units without explicit qualification.
@@ -25,8 +26,6 @@ import scala.language.{implicitConversions, existentials}
  *  [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]]
  *  constructors ([[scala.collection.immutable.::]] and
  *  [[scala.collection.immutable.Nil]]).
- *  The types `Pair` (a [[scala.Tuple2]]) and `Triple` (a [[scala.Tuple3]]), with
- *  simple constructors, are also provided.
  *
  *  === Console I/O ===
  *  Predef provides a number of simple functions for console I/O, such as
@@ -68,7 +67,7 @@ import scala.language.{implicitConversions, existentials}
  *  Short value to a Long value as required, and to add additional higher-order
  *  functions to Array values. These are described in more detail in the documentation of [[scala.Array]].
  */
-object Predef extends LowPriorityImplicits {
+object Predef extends LowPriorityImplicits with DeprecatedPredef {
   /**
    * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to
    * the class literal `T.class` in Java.
@@ -96,24 +95,22 @@ object Predef extends LowPriorityImplicits {
   type Set[A]     = immutable.Set[A]
   val Map         = immutable.Map
   val Set         = immutable.Set
-  // @deprecated("Use scala.AnyRef instead", "2.10.0")
-  // def AnyRef = scala.AnyRef
 
   // Manifest types, companions, and incantations for summoning
   @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
-  @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+  @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
   type ClassManifest[T] = scala.reflect.ClassManifest[T]
   // TODO undeprecated until Scala reflection becomes non-experimental
   // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
   type OptManifest[T]   = scala.reflect.OptManifest[T]
   @annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
   // TODO undeprecated until Scala reflection becomes non-experimental
-  // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+  // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
   type Manifest[T]      = scala.reflect.Manifest[T]
-  @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+  @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
   val ClassManifest     = scala.reflect.ClassManifest
   // TODO undeprecated until Scala reflection becomes non-experimental
-  // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+  // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
   val Manifest          = scala.reflect.Manifest
   // TODO undeprecated until Scala reflection becomes non-experimental
   // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
@@ -133,25 +130,14 @@ object Predef extends LowPriorityImplicits {
   @inline def implicitly[T](implicit e: T) = e    // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
   @inline def locally[T](x: T): T  = x    // to communicate intent and avoid unmoored statements
 
-  // Apparently needed for the xml library
-  val $scope = scala.xml.TopScope
-
-  // Deprecated
+  // errors and asserts -------------------------------------------------
 
+  // !!! Remove this when possible - ideally for 2.11.
+  // We are stuck with it a while longer because sbt's compiler interface
+  // still calls it as of 0.12.2.
   @deprecated("Use `sys.error(message)` instead", "2.9.0")
   def error(message: String): Nothing = sys.error(message)
 
-  @deprecated("Use `sys.exit()` instead", "2.9.0")
-  def exit(): Nothing = sys.exit()
-
-  @deprecated("Use `sys.exit(status)` instead", "2.9.0")
-  def exit(status: Int): Nothing = sys.exit(status)
-
-  @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
-  def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
-
-  // errors and asserts -------------------------------------------------
-
   /** Tests an expression, throwing an `AssertionError` if false.
    *  Calls to this method will not be generated if `-Xelide-below`
    *  is at least `ASSERTION`.
@@ -233,19 +219,6 @@ object Predef extends LowPriorityImplicits {
       throw new IllegalArgumentException("requirement failed: "+ message)
   }
 
-  final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
-    // `__resultOfEnsuring` must be a public val to allow inlining.
-    // See comments in ArrowAssoc for more.
-    @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
-    def x = __resultOfEnsuring
-
-    def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
-    def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
-    def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
-    def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
-  }
-  @inline implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
-
   /** `???` can be used for marking methods that remain to be implemented.
    *  @throws  A `NotImplementedError`
    */
@@ -253,32 +226,82 @@ object Predef extends LowPriorityImplicits {
 
   // tupling ------------------------------------------------------------
 
+  @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0")
   type Pair[+A, +B] = Tuple2[A, B]
+  @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0")
   object Pair {
     def apply[A, B](x: A, y: B) = Tuple2(x, y)
     def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
   }
 
+  @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0")
   type Triple[+A, +B, +C] = Tuple3[A, B, C]
+  @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0")
   object Triple {
     def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z)
     def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
   }
 
-  final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
-    // `__leftOfArrow` must be a public val to allow inlining. The val
-    // used to be called `x`, but now goes by `__leftOfArrow`, as that
-    // reduces the chances of a user's writing `foo.__leftOfArrow` and
-    // being confused why they get an ambiguous implicit conversion
-    // error. (`foo.x` used to produce this error since both
-    // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
-    @deprecated("Use `__leftOfArrow` instead", "2.10.0")
-    def x = __leftOfArrow
-
-    @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+  // implicit classes -----------------------------------------------------
+
+  implicit final class ArrowAssoc[A](private val self: A) extends AnyVal {
+    @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y)
     def →[B](y: B): Tuple2[A, B] = ->(y)
   }
-  @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+
+  implicit final class Ensuring[A](private val self: A) extends AnyVal {
+    def ensuring(cond: Boolean): A = { assert(cond); self }
+    def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self }
+    def ensuring(cond: A => Boolean): A = { assert(cond(self)); self }
+    def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self }
+  }
+
+  implicit final class StringFormat[A](private val self: A) extends AnyVal {
+    /** Returns string formatted according to given `format` string.
+     *  Format strings are as for `String.format`
+     *  (@see java.lang.String.format).
+     */
+    @inline def formatted(fmtstr: String): String = fmtstr format self
+  }
+
+  // TODO: remove, only needed for binary compatibility of 2.11.0-RC1 with 2.11.0-M8
+  // note that `private[scala]` becomes `public` in bytecode
+  private[scala] final class StringAdd[A](private val self: A) extends AnyVal {
+    def +(other: String): String = String.valueOf(self) + other
+  }
+  private[scala] def StringAdd(x: Any): Any = new StringAdd(x)
+
+  // SI-8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit
+  implicit final class any2stringadd[A](private val self: A) extends AnyVal {
+    def +(other: String): String = String.valueOf(self) + other
+  }
+
+  implicit final class RichException(private val self: Throwable) extends AnyVal {
+    import scala.compat.Platform.EOL
+    @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL)
+  }
+
+  implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence {
+    def length: Int                                     = __sequenceOfChars.length
+    def charAt(index: Int): Char                        = __sequenceOfChars(index)
+    def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end))
+    override def toString                               = __sequenceOfChars mkString ""
+  }
+
+  implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence {
+    def length: Int                                     = __arrayOfChars.length
+    def charAt(index: Int): Char                        = __arrayOfChars(index)
+    def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end)
+    override def toString                               = __arrayOfChars mkString ""
+  }
+
+  implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
+    def apply(from: String) = apply()
+    def apply()             = mutable.StringBuilder.newBuilder
+  }
+
+  @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
+  @inline implicit def unaugmentString(x: StringOps): String = x.repr
 
   // printing and reading -----------------------------------------------
 
@@ -287,28 +310,10 @@ object Predef extends LowPriorityImplicits {
   def println(x: Any) = Console.println(x)
   def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
 
-  def readLine(): String = Console.readLine()
-  def readLine(text: String, args: Any*) = Console.readLine(text, args: _*)
-  def readBoolean() = Console.readBoolean()
-  def readByte() = Console.readByte()
-  def readShort() = Console.readShort()
-  def readChar() = Console.readChar()
-  def readInt() = Console.readInt()
-  def readLong() = Console.readLong()
-  def readFloat() = Console.readFloat()
-  def readDouble() = Console.readDouble()
-  def readf(format: String) = Console.readf(format)
-  def readf1(format: String) = Console.readf1(format)
-  def readf2(format: String) = Console.readf2(format)
-  def readf3(format: String) = Console.readf3(format)
-
   // views --------------------------------------------------------------
 
-  implicit def exceptionWrapper(exc: Throwable)                                 = new runtime.RichException(exc)
   implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2))                           = new runtime.Tuple2Zipped.Ops(x)
   implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3))                   = new runtime.Tuple3Zipped.Ops(x)
-  implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
-  implicit def arrayToCharSequence(xs: Array[Char]): CharSequence               = new runtime.ArrayCharSequence(xs, 0, xs.length)
 
   implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
     case x: Array[AnyRef]  => refArrayOps[AnyRef](x)
@@ -335,33 +340,6 @@ object Predef extends LowPriorityImplicits {
   implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short]       = new ArrayOps.ofShort(xs)
   implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit]          = new ArrayOps.ofUnit(xs)
 
-  // Primitive Widenings --------------------------------------------------------------
-
-  @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
-  @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
-  @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
-  @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
-  @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
-
-  @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
-  @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
-  @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
-  @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
-
-  @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
-  @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
-  @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
-  @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
-
-  @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
-  @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
-  @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
-
-  @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
-  @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
-
-  @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
-
   // "Autoboxing" and "Autounboxing" ---------------------------------------------------
 
   implicit def byte2Byte(x: Byte)           = java.lang.Byte.valueOf(x)
@@ -373,19 +351,6 @@ object Predef extends LowPriorityImplicits {
   implicit def double2Double(x: Double)     = java.lang.Double.valueOf(x)
   implicit def boolean2Boolean(x: Boolean)  = java.lang.Boolean.valueOf(x)
 
-  // These next eight implicits exist solely to exclude AnyRef methods from the
-  // eight implicits above so that primitives are not coerced to AnyRefs.  They
-  // only create such conflict for AnyRef methods, so the methods on the java.lang
-  // boxed types are unambiguously reachable.
-  implicit def byte2ByteConflict(x: Byte)           = new AnyRef
-  implicit def short2ShortConflict(x: Short)        = new AnyRef
-  implicit def char2CharacterConflict(x: Char)      = new AnyRef
-  implicit def int2IntegerConflict(x: Int)          = new AnyRef
-  implicit def long2LongConflict(x: Long)           = new AnyRef
-  implicit def float2FloatConflict(x: Float)        = new AnyRef
-  implicit def double2DoubleConflict(x: Double)     = new AnyRef
-  implicit def boolean2BooleanConflict(x: Boolean)  = new AnyRef
-
   implicit def Byte2byte(x: java.lang.Byte): Byte             = x.byteValue
   implicit def Short2short(x: java.lang.Short): Short         = x.shortValue
   implicit def Character2char(x: java.lang.Character): Char   = x.charValue
@@ -395,21 +360,6 @@ object Predef extends LowPriorityImplicits {
   implicit def Double2double(x: java.lang.Double): Double     = x.doubleValue
   implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue
 
-  // Strings and CharSequences --------------------------------------------------------------
-
-  @inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x)
-  @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
-  implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
-  implicit def unaugmentString(x: StringOps): String = x.repr
-
-  @deprecated("Use `StringCanBuildFrom`", "2.10.0")
-  def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
-
-  implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
-    def apply(from: String) = apply()
-    def apply()             = mutable.StringBuilder.newBuilder
-  }
-
   // Type Constraints --------------------------------------------------------------
 
   /**
@@ -432,9 +382,13 @@ object Predef extends LowPriorityImplicits {
   @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.")
   sealed abstract class <:<[-From, +To] extends (From => To) with Serializable
   private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x }
-  // not in the <:< companion object because it is also
-  // intended to subsume identity (which is no longer implicit)
-  implicit def conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A]
+  // The dollar prefix is to dodge accidental shadowing of this method
+  // by a user-defined method of the same name (SI-7788).
+  // The collections rely on this method.
+  implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A]
+
+  @deprecated("Use `implicitly[T <:< U]` or `identity` instead.", "2.11.0")
+  def conforms[A]: A <:< A = $conforms[A]
 
   /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal.
    *
@@ -460,3 +414,97 @@ object Predef extends LowPriorityImplicits {
     implicit def dummyImplicit: DummyImplicit = new DummyImplicit
   }
 }
+
+private[scala] trait DeprecatedPredef {
+  self: Predef.type =>
+
+  // Deprecated stubs for any who may have been calling these methods directly.
+  @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A]                                      = new ArrowAssoc(x)
+  @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A]                                            = new Ensuring(x)
+  @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any]                                  = new StringFormat(x)
+  @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable)                                    = new RichException(exc)
+  @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs)
+  @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence                   = new ArrayCharSequence(xs)
+
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String                 = StdIn.readLine()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*)
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readBoolean()                      = StdIn.readBoolean()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readByte()                         = StdIn.readByte()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readShort()                        = StdIn.readShort()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readChar()                         = StdIn.readChar()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readInt()                          = StdIn.readInt()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLong()                         = StdIn.readLong()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readFloat()                        = StdIn.readFloat()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readDouble()                       = StdIn.readDouble()
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String)              = StdIn.readf(format)
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String)             = StdIn.readf1(format)
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String)             = StdIn.readf2(format)
+  @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String)             = StdIn.readf3(format)
+}
+
+/** The `LowPriorityImplicits` class provides implicit values that
+*  are valid in all Scala compilation units without explicit qualification,
+*  but that are partially overridden by higher-priority conversions in object
+*  `Predef`.
+*
+*  @author  Martin Odersky
+*  @since 2.8
+*/
+// SI-7335 Parents of Predef are defined in the same compilation unit to avoid
+// cyclic reference errors compiling the standard library *without* a previously
+// compiled copy on the classpath.
+private[scala] abstract class LowPriorityImplicits {
+  import mutable.WrappedArray
+  import immutable.WrappedString
+
+  /** We prefer the java.lang.* boxed types to these wrappers in
+   *  any potential conflicts.  Conflicts do exist because the wrappers
+   *  need to implement ScalaNumber in order to have a symmetric equals
+   *  method, but that implies implementing java.lang.Number as well.
+   *
+   *  Note - these are inlined because they are value classes, but
+   *  the call to xxxWrapper is not eliminated even though it does nothing.
+   *  Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+   *  because maybe loading Predef has side effects!
+   */
+  @inline implicit def byteWrapper(x: Byte)       = new runtime.RichByte(x)
+  @inline implicit def shortWrapper(x: Short)     = new runtime.RichShort(x)
+  @inline implicit def intWrapper(x: Int)         = new runtime.RichInt(x)
+  @inline implicit def charWrapper(c: Char)       = new runtime.RichChar(c)
+  @inline implicit def longWrapper(x: Long)       = new runtime.RichLong(x)
+  @inline implicit def floatWrapper(x: Float)     = new runtime.RichFloat(x)
+  @inline implicit def doubleWrapper(x: Double)   = new runtime.RichDouble(x)
+  @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+
+  implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
+    if (xs eq null) null
+    else WrappedArray.make(xs)
+
+  // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
+  // is as good as another for all T <: AnyRef.  Instead of creating 100,000,000
+  // unique ones by way of this implicit, let's share one.
+  implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
+    if (xs eq null) null
+    else if (xs.length == 0) WrappedArray.empty[T]
+    else new WrappedArray.ofRef[T](xs)
+  }
+
+  implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+  implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+  implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+  implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+  implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+  implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+  implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+  implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+  implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
+
+  implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+  implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
+
+  implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
+    new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
+      def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
+      def apply() = immutable.IndexedSeq.newBuilder[T]
+    }
+}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 2c6838f..0798587 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index edd095c..dbc34ba 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 8daefde..70de79d 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 90b4e80..1bb79ac 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index d5997ea..d7e1e1b 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index db8e0f3..8571b45 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 113c07e..a2f5140 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index a6ad9c7..1c6ad00 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index cbf47ec..f03b0b3 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index f56836b..72df1b4 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 5b86bcf..0402f90 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index ed4bf36..b9770db 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index e27e54e..a43a4a2 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index 47437a2..7b0df20 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 319d272..f81347a 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 6ab3737..7a25891 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 1cfbd79..9976240 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 843571f..d6c1543 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index df73bba..5f1b11a 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 36906ca..efd9408 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index e7b2c13..fab0a99 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 916e57e..41391f7 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index d5e72ed..e22538e 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala
index 07fa6e2..7c28e6e 100644
--- a/src/library/scala/Proxy.scala
+++ b/src/library/scala/Proxy.scala
@@ -28,7 +28,7 @@ trait Proxy extends Any {
   override def hashCode: Int = self.hashCode
   override def equals(that: Any): Boolean = that match {
     case null  => false
-    case _     => 
+    case _     =>
       val x = that.asInstanceOf[AnyRef]
       (x eq this.asInstanceOf[AnyRef]) || (x eq self.asInstanceOf[AnyRef]) || (x equals self)
   }
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index 0a42ddb..8a658e2 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -18,6 +18,7 @@ package scala
  *  @see class Responder
  *  @since 2.1
  */
+ at deprecated("This object will be removed", "2.11.0")
 object Responder {
 
   /** Creates a responder that answer continuations with the constant `a`.
@@ -58,6 +59,7 @@ object Responder {
  *  @version 1.0
  *  @since 2.1
  */
+ at deprecated("This class will be removed", "2.11.0")
 abstract class Responder[+A] extends Serializable {
 
   def respond(k: A => Unit): Unit
diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/ScalaObject.scala
deleted file mode 100644
index f67dc3a..0000000
--- a/src/library/scala/ScalaObject.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala
-
-/** Until scala 2.10.0 this marker trait was added to
- *  scala-compiled classes.  Now it only exists for backward
- *  compatibility.
- */
- at deprecated("ScalaObject will be removed", "2.10.0")
-trait ScalaObject
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 1f7d047..77094f0 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -12,4 +12,4 @@ package scala
  * Annotation for specifying the `static SerialVersionUID` field
  * of a serializable class.
  */
-class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
+class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 35c5fe3..36b9ec4 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 /** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
  *  subtype of [[scala.AnyVal]]. Instances of `Short` are not
  *  represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Short private extends AnyVal {
  * }}}
  */
   def unary_~ : Int
-  /**
- * Returns this value, unmodified.
- */
+  /** Returns this value, unmodified. */
   def unary_+ : Int
-  /**
- * Returns the negation of this value.
- */
+  /** Returns the negation of this value. */
   def unary_- : Int
 
   def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Short private extends AnyVal {
   */
   def >>(x: Long): Int
 
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Short): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Char): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Int): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Long): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Float): Boolean
-  /**
-  * Returns `true` if this value is equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is equal to x, `false` otherwise. */
   def ==(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is not equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is not equal to x, `false` otherwise. */
   def !=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than x, `false` otherwise. */
   def <(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is less than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
   def <=(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than x, `false` otherwise. */
   def >(x: Double): Boolean
 
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Byte): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Short): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Char): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Int): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Long): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Float): Boolean
-  /**
-  * Returns `true` if this value is greater than or equal to x, `false` otherwise.
-  */
+  /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
   def >=(x: Double): Boolean
 
   /**
@@ -447,165 +359,95 @@ final abstract class Short private extends AnyVal {
   */
   def ^(x: Long): Long
 
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Byte): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Short): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Char): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Int): Int
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Long): Long
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Float): Float
-  /**
-  * Returns the sum of this value and `x`.
-  */
+  /** Returns the sum of this value and `x`. */
   def +(x: Double): Double
 
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Byte): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Short): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Char): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Int): Int
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Long): Long
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Float): Float
-  /**
-  * Returns the difference of this value and `x`.
-  */
+  /** Returns the difference of this value and `x`. */
   def -(x: Double): Double
 
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Byte): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Short): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Char): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Int): Int
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Long): Long
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Float): Float
-  /**
-  * Returns the product of this value and `x`.
-  */
+  /** Returns the product of this value and `x`. */
   def *(x: Double): Double
 
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Byte): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Short): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Char): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Int): Int
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Long): Long
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Float): Float
-  /**
-  * Returns the quotient of this value and `x`.
-  */
+  /** Returns the quotient of this value and `x`. */
   def /(x: Double): Double
 
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Byte): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Short): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Char): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Int): Int
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Long): Long
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Float): Float
-  /**
-  * Returns the remainder of the division of this value by `x`.
-  */
+  /** Returns the remainder of the division of this value by `x`. */
   def %(x: Double): Double
 
   override def getClass(): Class[Short] = null
 }
 
 object Short extends AnyValCompanion {
-  /** The smallest value representable as a Short.
-   */
+  /** The smallest value representable as a Short. */
   final val MinValue = java.lang.Short.MIN_VALUE
 
-  /** The largest value representable as a Short.
-   */
+  /** The largest value representable as a Short. */
   final val MaxValue = java.lang.Short.MAX_VALUE
 
   /** Transform a value type into a boxed reference type.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the Short to be boxed
    *  @return     a java.lang.Short offering `x` as its underlying value.
    */
@@ -615,18 +457,18 @@ object Short extends AnyValCompanion {
    *  method is not typesafe: it accepts any Object, but will throw
    *  an exception if the argument is not a java.lang.Short.
    *
+   *  Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+   *
    *  @param  x   the java.lang.Short to be unboxed.
    *  @throws     ClassCastException  if the argument is not a java.lang.Short
    *  @return     the Short resulting from calling shortValue() on `x`
    */
   def unbox(x: java.lang.Object): Short = x.asInstanceOf[java.lang.Short].shortValue()
 
-  /** The String representation of the scala.Short companion object.
-   */
+  /** The String representation of the scala.Short companion object. */
   override def toString = "object scala.Short"
-
-  /** Language mandated coercions from Short to "wider" types.
-   */
+  /** Language mandated coercions from Short to "wider" types. */
+  import scala.language.implicitConversions
   implicit def short2int(x: Short): Int = x.toInt
   implicit def short2long(x: Short): Long = x.toLong
   implicit def short2float(x: Short): Float = x.toFloat
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index c7a6091..137598c 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -11,7 +11,7 @@ package scala
 /** A common supertype for companions of specializable types.
  *  Should not be extended in user code.
  */
-trait Specializable extends SpecializableCompanion
+trait Specializable
 
 object Specializable {
   // No type parameter in @specialized annotation.
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala
deleted file mode 100644
index 1a9ce71..0000000
--- a/src/library/scala/SpecializableCompanion.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala
-
-/** A common supertype for companion classes which specialization takes into account.
- */
- at deprecated("Use Specializable instead", "2.10.0")
-private[scala] trait SpecializableCompanion
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 1b5fd6c..cd928a2 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -59,7 +59,8 @@ case class StringContext(parts: String*) {
    */
   def checkLengths(args: Seq[Any]): Unit =
     if (parts.length != args.length + 1)
-      throw new IllegalArgumentException("wrong number of arguments for interpolated string")
+      throw new IllegalArgumentException("wrong number of arguments ("+ args.length
+        +") for interpolated string with "+ parts.length +" parts")
 
 
   /** The simple string interpolator.
@@ -156,13 +157,12 @@ case class StringContext(parts: String*) {
    *      If a formatting position does not refer to a `%` character (which is assumed to
    *      start a format specifier), then the string format specifier `%s` is inserted.
    *
-   *   2. Any `%` characters not in formatting positions are left in the resulting
-   *      string literally. This is achieved by replacing each such occurrence by the
-   *      format specifier `%%`.
+   *   2. Any `%` characters not in formatting positions must begin one of the conversions
+   *      `%%` (the literal percent) or `%n` (the platform-specific line separator).
    */
   // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
   // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
-  def f(args: Any*): String = ??? // macro
+  def f(args: Any*): String = macro ???
 }
 
 object StringContext {
@@ -172,8 +172,8 @@ object StringContext {
    *  @param  str   The offending string
    *  @param  idx   The index of the offending backslash character in `str`.
    */
-  class InvalidEscapeException(str: String, idx: Int)
-    extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+  class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int)
+    extends IllegalArgumentException("invalid escape character at index "+index+" in \""+str+"\"")
 
   /** Expands standard Scala escape sequences in a string.
    *  Escape sequences are:
@@ -184,7 +184,11 @@ object StringContext {
    *  @param  str  A string that may contain escape sequences
    *  @return The string with all escape sequences expanded.
    */
-  def treatEscapes(str: String): String = {
+  def treatEscapes(str: String): String = treatEscapes0(str, strict = false)
+
+  def processEscapes(str: String): String = treatEscapes0(str, strict = true)
+
+  private def treatEscapes0(str: String, strict: Boolean): String = {
     lazy val bldr = new java.lang.StringBuilder
     val len = str.length
     var start = 0
@@ -201,6 +205,7 @@ object StringContext {
         idx += 1
         if (idx >= len) throw new InvalidEscapeException(str, cur)
         if ('0' <= str(idx) && str(idx) <= '7') {
+          if (strict) throw new InvalidEscapeException(str, cur)
           val leadch = str(idx)
           var oct = leadch - '0'
           idx += 1
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 02fdd0c..5898b63 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -15,6 +15,7 @@ package scala
  *  @constructor  Create a new tuple with 1 elements.
  *  @param  _1   Element 1 of this Tuple1
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
   extends Product1[T1]
 {
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index ba2a02a..2b02395 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -24,6 +24,7 @@ package scala
  *  @param  _9   Element 9 of this Tuple10
  *  @param  _10   Element 10 of this Tuple10
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10)
   extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
 {
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 7f51d17..0d5294d 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -25,6 +25,7 @@ package scala
  *  @param  _10   Element 10 of this Tuple11
  *  @param  _11   Element 11 of this Tuple11
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11)
   extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
 {
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index 4bbc6a0..d36c827 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -26,6 +26,7 @@ package scala
  *  @param  _11   Element 11 of this Tuple12
  *  @param  _12   Element 12 of this Tuple12
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12)
   extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]
 {
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 77bd59b..edc3745 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -27,6 +27,7 @@ package scala
  *  @param  _12   Element 12 of this Tuple13
  *  @param  _13   Element 13 of this Tuple13
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13)
   extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]
 {
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index bf7a4ce..9896e73 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -28,6 +28,7 @@ package scala
  *  @param  _13   Element 13 of this Tuple14
  *  @param  _14   Element 14 of this Tuple14
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14)
   extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]
 {
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 582c359..45cd4f7 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -29,6 +29,7 @@ package scala
  *  @param  _14   Element 14 of this Tuple15
  *  @param  _15   Element 15 of this Tuple15
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15)
   extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]
 {
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index a1e9a79..2e370a5 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -30,6 +30,7 @@ package scala
  *  @param  _15   Element 15 of this Tuple16
  *  @param  _16   Element 16 of this Tuple16
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16)
   extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]
 {
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f531766..2242a15 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -31,6 +31,7 @@ package scala
  *  @param  _16   Element 16 of this Tuple17
  *  @param  _17   Element 17 of this Tuple17
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17)
   extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]
 {
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a96db25..68f245c 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -32,6 +32,7 @@ package scala
  *  @param  _17   Element 17 of this Tuple18
  *  @param  _18   Element 18 of this Tuple18
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18)
   extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]
 {
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 718280d..a8a4954 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -33,6 +33,7 @@ package scala
  *  @param  _18   Element 18 of this Tuple19
  *  @param  _19   Element 19 of this Tuple19
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19)
   extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]
 {
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 35d5a44..9ea1469 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -16,6 +16,7 @@ package scala
  *  @param  _1   Element 1 of this Tuple2
  *  @param  _2   Element 2 of this Tuple2
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2)
   extends Product2[T1, T2]
 {
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 4a44c0b..0118d38 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -34,6 +34,7 @@ package scala
  *  @param  _19   Element 19 of this Tuple20
  *  @param  _20   Element 20 of this Tuple20
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20)
   extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]
 {
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 580a169..ceae94a 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -35,6 +35,7 @@ package scala
  *  @param  _20   Element 20 of this Tuple21
  *  @param  _21   Element 21 of this Tuple21
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21)
   extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]
 {
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index fd3392d..ecd567a 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -36,6 +36,7 @@ package scala
  *  @param  _21   Element 21 of this Tuple22
  *  @param  _22   Element 22 of this Tuple22
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22)
   extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]
 {
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 5ed1360..6e71d3a 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -17,6 +17,7 @@ package scala
  *  @param  _2   Element 2 of this Tuple3
  *  @param  _3   Element 3 of this Tuple3
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
   extends Product3[T1, T2, T3]
 {
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a859078..4c84cfc 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -18,6 +18,7 @@ package scala
  *  @param  _3   Element 3 of this Tuple4
  *  @param  _4   Element 4 of this Tuple4
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
   extends Product4[T1, T2, T3, T4]
 {
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 1edfb67..fe8e853 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -19,6 +19,7 @@ package scala
  *  @param  _4   Element 4 of this Tuple5
  *  @param  _5   Element 5 of this Tuple5
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5)
   extends Product5[T1, T2, T3, T4, T5]
 {
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5b74937..6bf1c73 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -20,6 +20,7 @@ package scala
  *  @param  _5   Element 5 of this Tuple6
  *  @param  _6   Element 6 of this Tuple6
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6)
   extends Product6[T1, T2, T3, T4, T5, T6]
 {
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index a7f572e..ea42709 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -21,6 +21,7 @@ package scala
  *  @param  _6   Element 6 of this Tuple7
  *  @param  _7   Element 7 of this Tuple7
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7)
   extends Product7[T1, T2, T3, T4, T5, T6, T7]
 {
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 9bb427d..c24f945 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -22,6 +22,7 @@ package scala
  *  @param  _7   Element 7 of this Tuple8
  *  @param  _8   Element 8 of this Tuple8
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8)
   extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
 {
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 4d50539..ed02b30 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -23,6 +23,7 @@ package scala
  *  @param  _8   Element 8 of this Tuple9
  *  @param  _9   Element 9 of this Tuple9
  */
+ at deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
 case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9)
   extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
 {
diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala
index 10c6ccc..0dfba2a 100644
--- a/src/library/scala/UninitializedFieldError.scala
+++ b/src/library/scala/UninitializedFieldError.scala
@@ -18,8 +18,6 @@ package scala
  *
  *  @since 2.7
  */
-final case class UninitializedFieldError(msg: String)
-           extends RuntimeException(msg) {
-  def this(obj: Any) =
-    this(if (null != obj) obj.toString() else "null")
+final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) {
+  def this(obj: Any) = this("" + obj)
 }
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index dc67e60..018ad24 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -1,17 +1,17 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
 
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
 
 package scala
 
-import scala.language.implicitConversions
-
 
 /** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
  *  `Unit`, `()`, and it is not represented by any object in the underlying
@@ -41,8 +41,7 @@ object Unit extends AnyValCompanion {
    */
   def unbox(x: java.lang.Object): Unit = ()
 
-  /** The String representation of the scala.Unit companion object.
-   */
+  /** The String representation of the scala.Unit companion object. */
   override def toString = "object scala.Unit"
 }
 
diff --git a/src/library/scala/annotation/cloneable.scala b/src/library/scala/annotation/cloneable.scala
deleted file mode 100644
index 4fb62b6..0000000
--- a/src/library/scala/annotation/cloneable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.annotation
-
-/**
- * An annotation that designates the class to which it is applied as cloneable
- */
- at deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
-class cloneable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala
new file mode 100644
index 0000000..942e9ca
--- /dev/null
+++ b/src/library/scala/annotation/compileTimeOnly.scala
@@ -0,0 +1,22 @@
+package scala.annotation
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates that an annottee should not be referred to after
+ * type checking (which includes macro expansion).
+ *
+ * Examples of potential use:
+ *   1) The annottee can only appear in the arguments of some other macro
+ *      that will eliminate it from the AST during expansion.
+ *   2) The annottee is a macro and should have been expanded away,
+ *      so if hasn't, something wrong has happened.
+ *      (Comes in handy to provide better support for new macro flavors,
+ *      e.g. macro annotations, that can't be expanded by the vanilla compiler).
+ *
+ * @param  message the error message to print during compilation if a reference remains
+ *                 after type checking
+ * @since  2.11.0
+ */
+ at getter @setter @beanGetter @beanSetter @companionClass @companionMethod
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala
index bbde90c..eeedcb0 100644
--- a/src/library/scala/annotation/implicitNotFound.scala
+++ b/src/library/scala/annotation/implicitNotFound.scala
@@ -9,8 +9,11 @@
 package scala.annotation
 
 /**
- * An annotation that specifies the error message that is emitted when the compiler
- * cannot find an implicit value of the annotated type.
+ * To customize the error message that's emitted when an implicit of type
+ * C[T1,..., TN] cannot be found, annotate the class C with @implicitNotFound.
+ * Assuming C has type parameters X1,..., XN, the error message will be the
+ * result of replacing all occurrences of ${Xi} in the string msg with the
+ * string representation of the corresponding type argument Ti. *
  *
  * @author Adriaan Moors
  * @since 2.8.1
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index adb6de6..e71be00 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -17,7 +17,7 @@ package scala.annotation
  * order between Scala 2.7 and 2.8.
  *
  * @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` indicates a version 
+ * by the compiler if the flag `-Xmigration` indicates a version
  * prior to the changedIn version.
  *
  * @param changedIn The version, in which the behaviour change was
@@ -25,7 +25,4 @@ package scala.annotation
  *
  * @since 2.8
  */
- private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation {
-   @deprecated("Use the constructor taking two Strings instead.", "2.10.0")
-   def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
- }
+ private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
deleted file mode 100644
index 1e1aff1..0000000
--- a/src/library/scala/annotation/serializable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.annotation
-
-/**
- * An annotation that designates the class to which it is applied as serializable
- */
- at deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/package.scala b/src/library/scala/annotation/target/package.scala
deleted file mode 100644
index ac2836c..0000000
--- a/src/library/scala/annotation/target/package.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.annotation
-
-package object target {
-  @deprecated("Use `@scala.annotation.meta.beanGetter` instead", "2.10.0")
-  type beanGetter = scala.annotation.meta.beanGetter
-
-  @deprecated("Use `@scala.annotation.meta.beanSetter` instead", "2.10.0")
-  type beanSetter = scala.annotation.meta.beanSetter
-
-  @deprecated("Use `@scala.annotation.meta.field` instead", "2.10.0")
-  type field = scala.annotation.meta.field
-
-  @deprecated("Use `@scala.annotation.meta.getter` instead", "2.10.0")
-  type getter = scala.annotation.meta.getter
-
-  @deprecated("Use `@scala.annotation.meta.param` instead", "2.10.0")
-  type param = scala.annotation.meta.param
-
-  @deprecated("Use `@scala.annotation.meta.setter` instead", "2.10.0")
-  type setter = scala.annotation.meta.setter
-}
diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala
index 3a95335..ac8fa26 100644
--- a/src/library/scala/beans/ScalaBeanInfo.scala
+++ b/src/library/scala/beans/ScalaBeanInfo.scala
@@ -27,7 +27,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
     for (m <- clazz.getMethods if methods.exists(_ == m.getName))
       yield new MethodDescriptor(m)
 
-  init
+  init()
 
   override def getPropertyDescriptors() = pd
   override def getMethodDescriptors() = md
@@ -35,10 +35,10 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
   // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
 
   private def init() {
-    var i = 0;
+    var i = 0
     while (i < props.length) {
       pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
-      i = i + 3;
+      i = i + 3
     }
   }
 
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 6985563..e255e96 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 4a1c0be..8a8af79 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -8,10 +8,10 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import BitSetLike._
-import generic._
 import mutable.StringBuilder
 
 /** A template trait for bitsets.
@@ -70,6 +70,8 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
     s
   }
 
+  override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0)
+
   implicit def ordering: Ordering[Int] = Ordering.Int
 
   def rangeImpl(from: Option[Int], until: Option[Int]): This = {
@@ -99,24 +101,35 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
     fromBitMaskNoCopy(a)
   }
 
-  def iterator: Iterator[Int] = new AbstractIterator[Int] {
-    private var current = 0
+  def iterator: Iterator[Int] = iteratorFrom(0)
+
+  override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] {
+    private var current = start
     private val end = nwords * WordLength
     def hasNext: Boolean = {
-      while (current < end && !self.contains(current)) current += 1
-      current < end
+      while (current != end && !self.contains(current)) current += 1
+      current != end
     }
     def next(): Int =
       if (hasNext) { val r = current; current += 1; r }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
 
   override def foreach[B](f: Int => B) {
-    for (i <- 0 until nwords) {
-      val w = word(i)
-      for (j <- i * WordLength until (i + 1) * WordLength) {
-        if ((w & (1L << j)) != 0L) f(j)
+    /* NOTE: while loops are significantly faster as of 2.11 and
+       one major use case of bitsets is performance. Also, there
+       is nothing to do when all bits are clear, so use that as
+       the inner loop condition. */
+    var i = 0
+    while (i < nwords) {
+      var w = word(i)
+      var j = i * WordLength
+      while (w != 0L) {
+        if ((w&1L) == 1L) f(j)
+        w = w >>> 1
+        j += 1
       }
+      i += 1
     }
   }
 
@@ -194,11 +207,15 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
   override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
     sb append start
     var pre = ""
-    for (i <- 0 until nwords * WordLength)
+    val max = nwords * WordLength
+    var i = 0
+    while(i != max) {
       if (contains(i)) {
         sb append pre append i
         pre = sep
       }
+      i += 1
+    }
     sb append end
   }
 
@@ -207,8 +224,10 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
 
 /** Companion object for BitSets. Contains private data only */
 object BitSetLike {
-  private[collection] val LogWL = 6
-  private val WordLength = 64
+  /* Final vals can sometimes be inlined as constants (faster) */
+  private[collection] final val LogWL = 6
+  private final val WordLength = 64
+  private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1
 
   private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = {
     var len = elems.length
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index 741bca4..e6e97d5 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 /** Buffered iterators are iterators which provide a method `head`
  *  that inspects the next element without discarding it.
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index 53fe32b..cbeb28d 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import parallel.Combiner
 
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index 5c91183..8afda7c 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -6,11 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
-
-import generic._
+package scala
+package collection
 
 /** A default map which implements the `+` and `-` methods of maps.
  *
@@ -27,14 +24,14 @@ import generic._
  *  @since 2.8
  */
 trait DefaultMap[A, +B] extends Map[A, B] { self =>
-  
+
   /** A default implementation which creates a new immutable map.
    */
   override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
     val b = Map.newBuilder[A, B1]
     b ++= this
     b += ((kv._1, kv._2))
-    b.result
+    b.result()
   }
 
   /** A default implementation which creates a new immutable map.
@@ -42,6 +39,6 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
   override def - (key: A): Map[A, B] = {
     val b = newBuilder
     b ++= this filter (key != _._1)
-    b.result
+    b.result()
   }
 }
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
index b4e7a14..6fd4158 100644
--- a/src/library/scala/collection/GenIterable.scala
+++ b/src/library/scala/collection/GenIterable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 
 import generic._
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 2ba9a72..1dbb54d 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -6,9 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
-import generic.{ CanBuildFrom => CBF, _ }
+import generic.{ CanBuildFrom => CBF }
 
 /** A template trait for all iterable collections which may possibly
  *  have their operations implemented in parallel.
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
deleted file mode 100644
index ca0332e..0000000
--- a/src/library/scala/collection/GenIterableView.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-import generic._
-
-
-
-trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
deleted file mode 100644
index 4e4ceb4..0000000
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-
-import generic._
-import TraversableView.NoBuilder
-
-
-
-trait GenIterableViewLike[+A,
-                          +Coll,
-                          +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
-extends GenIterable[A] with GenIterableLike[A, This] with GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This] {
-self =>
-
-  trait Transformed[+B] extends GenIterableView[B, Coll] with super.Transformed[B] {
-    def iterator: Iterator[B]
-    override def foreach[U](f: B => U): Unit = iterator foreach f
-    override def toString = viewToString
-    override def isEmpty = !iterator.hasNext
-  }
-
-  trait EmptyView extends Transformed[Nothing] with super.EmptyView {
-    final def iterator: Iterator[Nothing] = Iterator.empty
-  }
-
-  trait Forced[B] extends super.Forced[B] with Transformed[B] {
-    def iterator = forced.iterator
-  }
-
-  trait Sliced extends super.Sliced with Transformed[A] {
-    def iterator: Iterator[A] = self.iterator.slice(from, until)
-  }
-
-  trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
-    def iterator = self.iterator map mapping
-  }
-
-  trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
-    def iterator: Iterator[B] = self.iterator flatMap mapping
-  }
-
-  trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
-    def iterator = self.iterator ++ rest
-  }
-
-  trait Filtered extends super.Filtered with Transformed[A] {
-    def iterator = self.iterator filter pred
-  }
-
-  trait TakenWhile extends super.TakenWhile with Transformed[A] {
-    def iterator = self.iterator takeWhile pred
-  }
-
-  trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
-    def iterator = self.iterator dropWhile pred
-  }
-
-  trait Zipped[B] extends Transformed[(A, B)] {
-    protected[this] val other: GenIterable[B]
-    def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
-    final override protected[this] def viewIdentifier = "Z"
-  }
-
-  trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
-    protected[this] val other: GenIterable[B]
-    protected[this] val thisElem: A1
-    protected[this] val thatElem: B
-    final override protected[this] def viewIdentifier = "Z"
-    def iterator: Iterator[(A1, B)] =
-      self.iterator.zipAll(other.iterator, thisElem, thatElem)
-  }
-
-}
-
-
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index f7b2ae4..d17a2de 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -6,11 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 
-
 /** A trait for all traversable collections which may possibly
  *  have their operations implemented in parallel.
  *
@@ -27,12 +27,9 @@ extends GenMapLike[A, B, GenMap[A, B]]
   def updated [B1 >: B](key: A, value: B1): GenMap[A, B1]
 }
 
-
 object GenMap extends GenMapFactory[GenMap] {
   def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
 
   /** $mapCanBuildFromInfo */
   implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
 }
-
-
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 367377a..4e7d359 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 /** A trait for all maps upon which operations may be
  *  implemented in parallel.
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
index 4c5488d..480562c 100644
--- a/src/library/scala/collection/GenSeq.scala
+++ b/src/library/scala/collection/GenSeq.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 
 import generic._
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index 78d6334..c3bad60 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 
@@ -37,8 +38,8 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
    * Example:
    *
    * {{{
-   *    scala> val x = LinkedList(1, 2, 3, 4, 5)
-   *    x: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4, 5)
+   *    scala> val x = List(1, 2, 3, 4, 5)
+   *    x: List[Int] = List(1, 2, 3, 4, 5)
    *
    *    scala> x(3)
    *    res1: Int = 4
@@ -189,7 +190,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
    */
   def lastIndexWhere(p: A => Boolean, end: Int): Int
 
-  /** Returns new $coll wih elements in reversed order.
+  /** Returns new $coll with elements in reversed order.
    *
    *  $willNotTerminateInf
    *
@@ -301,14 +302,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
    *
    *    Example:
    *    {{{
-   *      scala> val x = LinkedList(1)
-   *      x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+   *      scala> val x = List(1)
+   *      x: List[Int] = List(1)
    *
    *      scala> val y = 2 +: x
-   *      y: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1)
+   *      y: List[Int] = List(2, 1)
    *
    *      scala> println(x)
-   *      LinkedList(1)
+   *      List(1)
    *    }}}
    *
    *    @return a new $coll consisting of `elem` followed
@@ -334,17 +335,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
    *
    *    Example:
    *    {{{
-   *       scala> import scala.collection.mutable.LinkedList
-   *       import scala.collection.mutable.LinkedList
-   *
-   *       scala> val a = LinkedList(1)
-   *       a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
-   *
+   *       scala> val a = List(1)
+   *       a: List[Int] = List(1)
+   *       
    *       scala> val b = a :+ 2
-   *       b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
-   *
+   *       b: List[Int] = List(1, 2)
+   *       
    *       scala> println(a)
-   *       LinkedList(1)
+   *       List(1)
    *    }}}
    *
    *    @return a new $coll consisting of
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
deleted file mode 100644
index 92c8b77..0000000
--- a/src/library/scala/collection/GenSeqView.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-import generic._
-
-
-
-trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
deleted file mode 100644
index 5160021..0000000
--- a/src/library/scala/collection/GenSeqViewLike.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-
-
-trait GenSeqViewLike[+A,
-                     +Coll,
-                     +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
-extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
-self =>
-
-  trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
-    def length: Int
-    def apply(idx: Int): B
-    override def toString = viewToString
-  }
-
-  trait EmptyView extends Transformed[Nothing] with super.EmptyView {
-    final override def length = 0
-    final override def apply(n: Int) = Nil(n)
-  }
-
-  trait Forced[B] extends super.Forced[B] with Transformed[B] {
-    def length = forced.length
-    def apply(idx: Int) = forced.apply(idx)
-  }
-
-  trait Sliced extends super.Sliced with Transformed[A] {
-    def length = iterator.size
-    def apply(idx: Int): A =
-      if (idx + from < until) self.apply(idx + from)
-      else throw new IndexOutOfBoundsException(idx.toString)
-
-    override def foreach[U](f: A => U) = iterator foreach f
-    override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
-  }
-
-  trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
-    def length = self.length
-    def apply(idx: Int): B = mapping(self(idx))
-  }
-
-  trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
-    protected[this] lazy val index = {
-      val index = new Array[Int](self.length + 1)
-      index(0) = 0
-      for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad
-        index(i + 1) = index(i) + mapping(self(i)).seq.size
-      index
-    }
-    protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
-      val mid = (lo + hi) / 2
-      if (idx < index(mid)) findRow(idx, lo, mid - 1)
-      else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
-      else mid
-    }
-    def length = index(self.length)
-    def apply(idx: Int) = {
-      val row = findRow(idx, 0, self.length - 1)
-      mapping(self(row)).seq.toSeq(idx - index(row))
-    }
-  }
-
-  trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
-    protected[this] lazy val restSeq = rest.toSeq
-    def length = self.length + restSeq.length
-    def apply(idx: Int) =
-      if (idx < self.length) self(idx) else restSeq(idx - self.length)
-  }
-
-  trait Filtered extends super.Filtered with Transformed[A] {
-    protected[this] lazy val index = {
-      var len = 0
-      val arr = new Array[Int](self.length)
-      for (i <- 0 until self.length)
-        if (pred(self(i))) {
-          arr(len) = i
-          len += 1
-        }
-      arr take len
-    }
-    def length = index.length
-    def apply(idx: Int) = self(index(idx))
-  }
-
-  trait TakenWhile extends super.TakenWhile with Transformed[A] {
-    protected[this] lazy val len = self prefixLength pred
-    def length = len
-    def apply(idx: Int) =
-      if (idx < len) self(idx)
-      else throw new IndexOutOfBoundsException(idx.toString)
-  }
-
-  trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
-    protected[this] lazy val start = self prefixLength pred
-    def length = self.length - start
-    def apply(idx: Int) =
-      if (idx >= 0) self(idx + start)
-      else throw new IndexOutOfBoundsException(idx.toString)
-  }
-
-  trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
-    protected[this] lazy val thatSeq = other.seq.toSeq
-    /* Have to be careful here - other may be an infinite sequence. */
-    def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
-    def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
-  }
-
-  trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
-    protected[this] lazy val thatSeq = other.seq.toSeq
-    def length: Int = self.length max thatSeq.length
-    def apply(idx: Int) =
-      (if (idx < self.length) self.apply(idx) else thisElem,
-       if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
-  }
-
-  trait Reversed extends Transformed[A] {
-    override def iterator: Iterator[A] = createReversedIterator
-    def length: Int = self.length
-    def apply(idx: Int): A = self.apply(length - 1 - idx)
-    final override protected[this] def viewIdentifier = "R"
-
-    private def createReversedIterator = {
-      var lst = List[A]()
-      for (elem <- self) lst ::= elem
-      lst.iterator
-    }
-  }
-
-  trait Patched[B >: A] extends Transformed[B] {
-    protected[this] val from: Int
-    protected[this] val patch: GenSeq[B]
-    protected[this] val replaced: Int
-    private lazy val plen = patch.length
-    override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
-    def length: Int = self.length + plen - replaced
-    def apply(idx: Int): B =
-      if (idx < from) self.apply(idx)
-      else if (idx < from + plen) patch.apply(idx - from)
-      else self.apply(idx - plen + replaced)
-    final override protected[this] def viewIdentifier = "P"
-  }
-
-  trait Prepended[B >: A] extends Transformed[B] {
-    protected[this] val fst: B
-    override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
-    def length: Int = 1 + self.length
-    def apply(idx: Int): B =
-      if (idx == 0) fst
-      else self.apply(idx - 1)
-    final override protected[this] def viewIdentifier = "A"
-  }
-
-}
-
-
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
index 832177b..2467860 100644
--- a/src/library/scala/collection/GenSet.scala
+++ b/src/library/scala/collection/GenSet.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 
 
 import generic._
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index f22a7c8..c5355e5 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 
 /** A template trait for sets which may possibly
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
index 3db2dd7..8705965 100644
--- a/src/library/scala/collection/GenTraversable.scala
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -6,14 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
-
+package scala
+package collection
 
 import generic._
 
-
 /** A trait for all traversable collections which may possibly
  *  have their operations implemented in parallel.
  *
@@ -30,10 +27,7 @@ extends GenTraversableLike[A, GenTraversable[A]]
   def companion: GenericCompanion[GenTraversable] = GenTraversable
 }
 
-
 object GenTraversable extends GenTraversableFactory[GenTraversable] {
   implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
   def newBuilder[A] = Traversable.newBuilder
 }
-
-
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 46134c9..ca098e5 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 
 import generic._
@@ -238,7 +239,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
    *      // lettersOf will return a Set[Char], not a Seq
    *      def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
    *
-   *      // xs will be a an Iterable[Int]
+   *      // xs will be an Iterable[Int]
    *      val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
    *
    *      // ys will be a Map[Int, Int]
@@ -266,20 +267,20 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
    *
    *    Example:
    *    {{{
-   *      scala> val a = LinkedList(1)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
-   *
-   *      scala> val b = LinkedList(2)
-   *      b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
-   *
+   *      scala> val a = List(1)
+   *      a: List[Int] = List(1)
+   *      
+   *      scala> val b = List(2)
+   *      b: List[Int] = List(2)
+   *      
    *      scala> val c = a ++ b
-   *      c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
-   *
-   *      scala> val d = LinkedList('a')
-   *      d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
-   *
+   *      c: List[Int] = List(1, 2)
+   *      
+   *      scala> val d = List('a')
+   *      d: List[Char] = List(a)
+   *      
    *      scala> val e = c ++ d
-   *      e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+   *      e: List[AnyVal] = List(1, 2, a)
    *    }}}
    *
    *    @return       a new $coll which contains all elements of this $coll
@@ -323,7 +324,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
    *  @tparam K    the type of keys returned by the discriminator function.
    *  @return      A map from keys to ${coll}s such that the following invariant holds:
    *               {{{
-   *                 (xs partition f)(k) = xs filter (x => f(x) == k)
+   *                 (xs groupBy f)(k) = xs filter (x => f(x) == k)
    *               }}}
    *               That is, every key `k` is bound to a $coll of those elements `x`
    *               for which `f(x)` equals `k`.
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 093db2a..0cd9140 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import scala.reflect.ClassTag
 import scala.collection.generic.CanBuildFrom
@@ -119,19 +120,6 @@ trait GenTraversableOnce[+A] extends Any {
    */
   def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
 
-  /** A syntactic sugar for out of order folding. See `fold`.
-   *
-   * Example:
-   * {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
-   *
-   *      scala> val b = (a /:\ 5)(_+_)
-   *      b: Int = 15
-   * }}}*/
-  @deprecated("use fold instead", "2.10.0")
-  def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
-
   /** Applies a binary operator to a start value and all elements of this $coll,
    *  going left to right.
    *
@@ -142,8 +130,8 @@ trait GenTraversableOnce[+A] extends Any {
    *
    *  Note that the folding function used to compute b is equivalent to that used to compute c.
    *  {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+   *      scala> val a = List(1,2,3,4)
+   *      a: List[Int] = List(1, 2, 3, 4)
    *
    *      scala> val b = (5 /: a)(_+_)
    *      b: Int = 15
@@ -179,8 +167,8 @@ trait GenTraversableOnce[+A] extends Any {
    *
    *  Note that the folding function used to compute b is equivalent to that used to compute c.
    *  {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+   *      scala> val a = List(1,2,3,4)
+   *      a: List[Int] = List(1, 2, 3, 4)
    *
    *      scala> val b = (a :\ 5)(_+_)
    *      b: Int = 15
@@ -261,11 +249,12 @@ trait GenTraversableOnce[+A] extends Any {
    *  @tparam B        the type of accumulated results
    *  @param z         the initial value for the accumulated result of the partition - this
    *                   will typically be the neutral element for the `seqop` operator (e.g.
-   *                   `Nil` for list concatenation or `0` for summation)
+   *                   `Nil` for list concatenation or `0` for summation) and may be evaluated
+   *                   more than once
    *  @param seqop     an operator used to accumulate results within a partition
    *  @param combop    an associative operator used to combine results from different partitions
    */
-  def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+  def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B
 
   /** Applies a binary operator to all elements of this $coll, going right to left.
    *  $willNotTerminateInf
@@ -352,7 +341,7 @@ trait GenTraversableOnce[+A] extends Any {
    *
    *  @param    ord   An ordering to be used for comparing elements.
    *  @tparam   A1    The type over which the ordering is defined.
-   *  @return   the smallest element of this $coll with respect to the ordering `cmp`.
+   *  @return   the smallest element of this $coll with respect to the ordering `ord`.
    *
    *  @usecase def min: A
    *    @inheritdoc
@@ -365,7 +354,7 @@ trait GenTraversableOnce[+A] extends Any {
    *
    *  @param    ord   An ordering to be used for comparing elements.
    *  @tparam   A1    The type over which the ordering is defined.
-   *  @return   the largest element of this $coll with respect to the ordering `cmp`.
+   *  @return   the largest element of this $coll with respect to the ordering `ord`.
    *
    *  @usecase def max: A
    *    @inheritdoc
@@ -374,8 +363,34 @@ trait GenTraversableOnce[+A] extends Any {
    */
   def max[A1 >: A](implicit ord: Ordering[A1]): A
 
+  /** Finds the first element which yields the largest value measured by function f.
+   *
+   *  @param    cmp   An ordering to be used for comparing elements.
+   *  @tparam   B     The result type of the function f.
+   *  @param    f     The measuring function.
+   *  @return   the first element of this $coll with the largest value measured by function f
+   *  with respect to the ordering `cmp`.
+   *
+   *  @usecase def maxBy[B](f: A => B): A
+   *    @inheritdoc
+   *
+   *    @return   the first element of this $coll with the largest value measured by function f.
+   */
   def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
 
+  /** Finds the first element which yields the smallest value measured by function f.
+   *
+   *  @param    cmp   An ordering to be used for comparing elements.
+   *  @tparam   B     The result type of the function f.
+   *  @param    f     The measuring function.
+   *  @return   the first element of this $coll with the smallest value measured by function f
+   *  with respect to the ordering `cmp`.
+   *
+   *  @usecase def minBy[B](f: A => B): A
+   *    @inheritdoc
+   *
+   *    @return   the first element of this $coll with the smallest value measured by function f.
+   */
   def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
 
   def forall(pred: A => Boolean): Boolean
@@ -491,7 +506,6 @@ trait GenTraversableOnce[+A] extends Any {
   def toIndexedSeq: immutable.IndexedSeq[A]
 
   /** Converts this $coll to a stream.
-   *  $willNotTerminateInf
    *  @return a stream containing all elements of this $coll.
    */
   def toStream: Stream[A]
@@ -503,7 +517,7 @@ trait GenTraversableOnce[+A] extends Any {
    */
   def toIterator: Iterator[A]
 
-  /** Converts this $coll to a mutable buffer.
+  /** Uses the contents of this $coll to create a new mutable buffer.
    *  $willNotTerminateInf
    *  @return a buffer containing all elements of this $coll.
    */
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
deleted file mode 100644
index cceb068..0000000
--- a/src/library/scala/collection/GenTraversableView.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-import generic._
-
-
-
-trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
deleted file mode 100644
index 77fe080..0000000
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ /dev/null
@@ -1,139 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-
-import generic._
-import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
-
-
-trait GenTraversableViewLike[+A,
-                             +Coll,
-                             +This <: GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This]]
-extends GenTraversable[A] with GenTraversableLike[A, This] {
-self =>
-
-  def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]): That
-
-  protected def underlying: Coll
-  protected[this] def viewIdentifier: String
-  protected[this] def viewIdString: String
-  def viewToString = stringPrefix + viewIdString + "(...)"
-
-  /** The implementation base trait of this view.
-   *  This trait and all its subtraits has to be re-implemented for each
-   *  ViewLike class.
-   */
-  trait Transformed[+B] extends GenTraversableView[B, Coll] {
-    def foreach[U](f: B => U): Unit
-
-    lazy val underlying = self.underlying
-    final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
-    override def stringPrefix = self.stringPrefix
-    override def toString = viewToString
-  }
-
-  trait EmptyView extends Transformed[Nothing] {
-    final override def isEmpty = true
-    final override def foreach[U](f: Nothing => U): Unit = ()
-  }
-
-  /** A fall back which forces everything into a vector and then applies an operation
-   *  on it. Used for those operations which do not naturally lend themselves to a view
-   */
-  trait Forced[B] extends Transformed[B] {
-    protected[this] val forced: GenSeq[B]
-    def foreach[U](f: B => U) = forced foreach f
-    final override protected[this] def viewIdentifier = "C"
-  }
-
-  trait Sliced extends Transformed[A] {
-    protected[this] val endpoints: SliceInterval
-    protected[this] def from  = endpoints.from
-    protected[this] def until = endpoints.until
-    // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
-    //   self.newSliced(endpoints.recalculate(_endpoints))
-
-    def foreach[U](f: A => U) {
-      var index = 0
-      for (x <- self) {
-        if (from <= index) {
-          if (until <= index) return
-          f(x)
-        }
-        index += 1
-      }
-    }
-    final override protected[this] def viewIdentifier = "S"
-  }
-
-  trait Mapped[B] extends Transformed[B] {
-    protected[this] val mapping: A => B
-    def foreach[U](f: B => U) {
-      for (x <- self)
-        f(mapping(x))
-    }
-    final override protected[this] def viewIdentifier = "M"
-  }
-
-  trait FlatMapped[B] extends Transformed[B] {
-    protected[this] val mapping: A => GenTraversableOnce[B]
-    def foreach[U](f: B => U) {
-      for (x <- self)
-        for (y <- mapping(x).seq)
-          f(y)
-    }
-    final override protected[this] def viewIdentifier = "N"
-  }
-
-  trait Appended[B >: A] extends Transformed[B] {
-    protected[this] val rest: GenTraversable[B]
-    def foreach[U](f: B => U) {
-      self foreach f
-      rest foreach f
-    }
-    final override protected[this] def viewIdentifier = "A"
-  }
-
-  trait Filtered extends Transformed[A] {
-    protected[this] val pred: A => Boolean
-    def foreach[U](f: A => U) {
-      for (x <- self)
-        if (pred(x)) f(x)
-    }
-    final override protected[this] def viewIdentifier = "F"
-  }
-
-  trait TakenWhile extends Transformed[A] {
-    protected[this] val pred: A => Boolean
-    def foreach[U](f: A => U) {
-      for (x <- self) {
-        if (!pred(x)) return
-        f(x)
-      }
-    }
-    final override protected[this] def viewIdentifier = "T"
-  }
-
-  trait DroppedWhile extends Transformed[A] {
-    protected[this] val pred: A => Boolean
-    def foreach[U](f: A => U) {
-      var go = false
-      for (x <- self) {
-        if (!go && !pred(x)) go = true
-        if (go) f(x)
-      }
-    }
-    final override protected[this] def viewIdentifier = "D"
-  }
-
-}
-
-
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 2de0043..1a33026 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Builder
@@ -28,14 +27,13 @@ trait IndexedSeq[+A] extends Seq[A]
  *  @define coll indexed sequence
  *  @define Coll `IndexedSeq`
  */
-object IndexedSeq extends SeqFactory[IndexedSeq] {
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
   // A single CBF which can be checked against to identify
   // an indexed collection type.
-  override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+  override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
     override def apply() = newBuilder[Nothing]
   }
   def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
     ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
 }
-
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 9d0e9cb..18c9175 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -6,9 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
-import generic._
 import mutable.ArrayBuffer
 import scala.annotation.tailrec
 
@@ -53,7 +53,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
   // pre: start >= 0, end <= self.length
   @SerialVersionUID(1756321872811029277L)
   protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable {
-    private def initialSize = if (end <= start) 0 else end - start
     private var index = start
     private def available = (end - index) max 0
 
@@ -61,7 +60,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
 
     def next(): A = {
       if (index >= end)
-        Iterator.empty.next
+        Iterator.empty.next()
 
       val x = self(index)
       index += 1
@@ -70,7 +69,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
 
     def head = {
       if (index >= end)
-        Iterator.empty.next
+        Iterator.empty.next()
 
       self(index)
     }
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 09c4b14..ade04e4 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -33,11 +33,17 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
     while (i < len) { f(this(i)); i += 1 }
   }
 
+  private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = {
+    var i = 0
+    while (i < length && p(apply(i)) == expectTrue) i += 1
+    i
+  }
+
   override /*IterableLike*/
-  def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
+  def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length
 
   override /*IterableLike*/
-  def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
+  def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length
 
   override /*IterableLike*/
   def find(p: A => Boolean): Option[A] = {
@@ -82,7 +88,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
         b += ((this(i), that(i).asInstanceOf[B]))
         i += 1
       }
-      b.result
+      b.result()
     case _ =>
       super.zip[A1, B, That](that)(bf)
   }
@@ -97,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
       b += ((this(i), i))
       i += 1
     }
-    b.result
+    b.result()
   }
 
   override /*IterableLike*/
@@ -113,7 +119,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
       b += self(i)
       i += 1
     }
-    b.result
+    b.result()
   }
 
   override /*IterableLike*/
@@ -214,7 +220,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
       i -= 1
       b += this(i)
     }
-    b.result
+    b.result()
   }
 
   override /*SeqLike*/
@@ -225,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
       if (0 < i) {
         i -= 1
         self(i)
-      } else Iterator.empty.next
+      } else Iterator.empty.next()
   }
 
   override /*SeqLike*/
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 5b73d72..a5ab8ef 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -8,10 +8,10 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
-import scala.util.control.Breaks._
 import mutable.Builder
 
 /** A base trait for iterable collections.
@@ -51,4 +51,4 @@ object Iterable extends TraversableFactory[Iterable] {
 }
 
 /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractIterable[+A] extends AbstractTraversable[A] with Iterable[A]
+abstract class AbstractIterable[+A] extends AbstractTraversable[A] with Iterable[A]
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 540bd84..91ab1f6 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -83,18 +83,34 @@ self =>
     iterator.foldRight(z)(op)
   override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B =
     iterator.reduceRight(op)
+    
+  
+  /** Returns this $coll as an iterable collection.
+   *
+   *  A new collection will not be built; lazy collections will stay lazy.
+   *
+   *  $willNotTerminateInf
+   *  @return an `Iterable` containing all elements of this $coll.
+   */
   override /*TraversableLike*/ def toIterable: Iterable[A] =
     thisCollection
-  override /*TraversableLike*/ def toIterator: Iterator[A] =
-    iterator
+  
+  /** Returns an Iterator over the elements in this $coll.  Produces the same
+   *  result as `iterator`.
+   *  $willNotTerminateInf
+   *  @return an Iterator containing all elements of this $coll.
+   */
+  @deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0")
+  override def toIterator: Iterator[A] = iterator
+  
   override /*TraversableLike*/ def head: A =
-    iterator.next
+    iterator.next()
 
   override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = {
     val lo = math.max(from, 0)
     val elems = until - lo
     val b = newBuilder
-    if (elems <= 0) b.result
+    if (elems <= 0) b.result()
     else {
       b.sizeHintBounded(elems, this)
       var i = 0
@@ -103,14 +119,14 @@ self =>
         b += it.next
         i += 1
       }
-      b.result
+      b.result()
     }
   }
 
   override /*TraversableLike*/ def take(n: Int): Repr = {
     val b = newBuilder
 
-    if (n <= 0) b.result
+    if (n <= 0) b.result()
     else {
       b.sizeHintBounded(n, this)
       var i = 0
@@ -119,7 +135,7 @@ self =>
         b += it.next
         i += 1
       }
-      b.result
+      b.result()
     }
   }
 
@@ -130,21 +146,21 @@ self =>
     var i = 0
     val it = iterator
     while (i < n && it.hasNext) {
-      it.next
+      it.next()
       i += 1
     }
-    (b ++= it).result
+    (b ++= it).result()
   }
 
   override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
     val b = newBuilder
     val it = iterator
     while (it.hasNext) {
-      val x = it.next
-      if (!p(x)) return b.result
+      val x = it.next()
+      if (!p(x)) return b.result()
       b += x
     }
-    b.result
+    b.result()
   }
 
   /** Partitions elements in fixed size ${coll}s.
@@ -152,13 +168,13 @@ self =>
    *
    *  @param size the number of elements per group
    *  @return An iterator producing ${coll}s of size `size`, except the
-   *          last will be truncated if the elements don't divide evenly.
+   *          last will be less than size `size` if the elements don't divide evenly.
    */
   def grouped(size: Int): Iterator[Repr] =
     for (xs <- iterator grouped size) yield {
       val b = newBuilder
       b ++= xs
-      b.result
+      b.result()
     }
 
   /** Groups elements in fixed size blocks by passing a "sliding window"
@@ -171,7 +187,7 @@ self =>
    *          fewer elements than size.
    */
   def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
-  
+
   /** Groups elements in fixed size blocks by passing a "sliding window"
    *  over them (as opposed to partitioning them, as is done in grouped.)
    *  @see [[scala.collection.Iterator]], method `sliding`
@@ -187,7 +203,7 @@ self =>
     for (xs <- iterator.sliding(size, step)) yield {
       val b = newBuilder
       b ++= xs
-      b.result
+      b.result()
     }
 
   /** Selects last ''n'' elements.
@@ -202,12 +218,12 @@ self =>
     b.sizeHintBounded(n, this)
     val lead = this.iterator drop n
     var go = false
-    for (x <- this.seq) {
-      if (lead.hasNext) lead.next
+    for (x <- this) {
+      if (lead.hasNext) lead.next()
       else go = true
       if (go) b += x
     }
-    b.result
+    b.result()
   }
 
   /** Selects all elements except last ''n'' ones.
@@ -224,9 +240,9 @@ self =>
     val it = iterator
     while (lead.hasNext) {
       b += it.next
-      lead.next
+      lead.next()
     }
-    b.result
+    b.result()
   }
 
   override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
@@ -234,7 +250,7 @@ self =>
     val end = (start + len) min xs.length
     val it = iterator
     while (i < end && it.hasNext) {
-      xs(i) = it.next
+      xs(i) = it.next()
       i += 1
     }
   }
@@ -244,8 +260,8 @@ self =>
     val these = this.iterator
     val those = that.iterator
     while (these.hasNext && those.hasNext)
-      b += ((these.next, those.next))
-    b.result
+      b += ((these.next(), those.next()))
+    b.result()
   }
 
   def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
@@ -253,12 +269,12 @@ self =>
     val these = this.iterator
     val those = that.iterator
     while (these.hasNext && those.hasNext)
-      b += ((these.next, those.next))
+      b += ((these.next(), those.next()))
     while (these.hasNext)
-      b += ((these.next, thatElem))
+      b += ((these.next(), thatElem))
     while (those.hasNext)
-      b += ((thisElem, those.next))
-    b.result
+      b += ((thisElem, those.next()))
+    b.result()
   }
 
   def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
@@ -268,7 +284,7 @@ self =>
       b += ((x, i))
       i +=1
     }
-    b.result
+    b.result()
   }
 
   def sameElements[B >: A](that: GenIterable[B]): Boolean = {
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index 2d04192..3a0e2ab 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
-
-import generic._
+package scala
+package collection
 
 /** This trait implements a proxy for iterable objects. It forwards all calls
  *  to a different iterable object.
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6968a54..90e630e 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Buffer
@@ -22,6 +23,7 @@ import mutable.Buffer
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
     extends IterableLike[A, Repr]
     with TraversableProxyLike[A, Repr] {
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 985556e..b5f424d 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import TraversableView.NoBuilder
@@ -16,7 +17,7 @@ import TraversableView.NoBuilder
 /** A base trait for non-strict views of `Iterable`s.
  *  $iterableViewInfo
  */
-trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] with GenIterableView[A, Coll]
+trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]]
 
 /** An object containing the necessary implicit definitions to make
  *  `IterableView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 3a81a34..668190f 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -6,10 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
-import TraversableView.NoBuilder
 import immutable.Stream
 import scala.language.implicitConversions
 
@@ -34,39 +34,68 @@ trait IterableViewLike[+A,
         with IterableLike[A, This]
         with TraversableView[A, Coll]
         with TraversableViewLike[A, Coll, This]
-        with GenIterableViewLike[A, Coll, This]
 { self =>
 
-  trait Transformed[+B] extends IterableView[B, Coll] with super[TraversableViewLike].Transformed[B] with super[GenIterableViewLike].Transformed[B] {
+  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+  private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
+
+  trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B] {
     def iterator: Iterator[B]
     override def foreach[U](f: B => U): Unit = iterator foreach f
     override def toString = viewToString
+    override def isEmpty = !iterator.hasNext
   }
 
-  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
-  private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
-
-  trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
+  trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+    final def iterator: Iterator[Nothing] = Iterator.empty
+  }
 
-  trait Forced[B] extends super[TraversableViewLike].Forced[B] with super[GenIterableViewLike].Forced[B] with Transformed[B]
+  trait Forced[B] extends super.Forced[B] with Transformed[B] {
+    def iterator = forced.iterator
+  }
 
-  trait Sliced extends super[TraversableViewLike].Sliced with super[GenIterableViewLike].Sliced with Transformed[A]
+  trait Sliced extends super.Sliced with Transformed[A] {
+    def iterator: Iterator[A] = self.iterator.slice(from, until)
+  }
 
-  trait Mapped[B] extends super[TraversableViewLike].Mapped[B] with super[GenIterableViewLike].Mapped[B] with Transformed[B]
+  trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+    def iterator = self.iterator map mapping
+  }
 
-  trait FlatMapped[B] extends super[TraversableViewLike].FlatMapped[B] with super[GenIterableViewLike].FlatMapped[B] with Transformed[B]
+  trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+    def iterator: Iterator[B] = self.iterator flatMap mapping
+  }
 
-  trait Appended[B >: A] extends super[TraversableViewLike].Appended[B] with super[GenIterableViewLike].Appended[B] with Transformed[B]
+  trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+    def iterator = self.iterator ++ rest
+  }
 
-  trait Filtered extends super[TraversableViewLike].Filtered with super[GenIterableViewLike].Filtered with Transformed[A]
+  trait Filtered extends super.Filtered with Transformed[A] {
+    def iterator = self.iterator filter pred
+  }
 
-  trait TakenWhile extends super[TraversableViewLike].TakenWhile with super[GenIterableViewLike].TakenWhile with Transformed[A]
+  trait TakenWhile extends super.TakenWhile with Transformed[A] {
+    def iterator = self.iterator takeWhile pred
+  }
 
-  trait DroppedWhile extends super[TraversableViewLike].DroppedWhile with super[GenIterableViewLike].DroppedWhile with Transformed[A]
+  trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+    def iterator = self.iterator dropWhile pred
+  }
 
-  trait Zipped[B] extends Transformed[(A, B)] with super[GenIterableViewLike].Zipped[B]
+  trait Zipped[B] extends Transformed[(A, B)] {
+    protected[this] val other: GenIterable[B]
+    def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
+    final override protected[this] def viewIdentifier = "Z"
+  }
 
-  trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] with super[GenIterableViewLike].ZippedAll[A1, B]
+  trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
+    protected[this] val other: GenIterable[B]
+    protected[this] val thisElem: A1
+    protected[this] val thatElem: B
+    final override protected[this] def viewIdentifier = "Z"
+    def iterator: Iterator[(A1, B)] =
+      self.iterator.zipAll(other.iterator, thisElem, thatElem)
+  }
 
   private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
 
@@ -117,5 +146,14 @@ trait IterableViewLike[+A,
   override def sliding(size: Int, step: Int): Iterator[This] =
     self.iterator.sliding(size, step) map (x => newForced(x).asInstanceOf[This])
 
+  override def sliding(size: Int): Iterator[This] =
+    sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented.
+
+  override def dropRight(n: Int): This =
+    take(thisSeq.length - n)
+
+  override def takeRight(n: Int): This =
+    drop(thisSeq.length - n)
+
   override def stringPrefix = "IterableView"
 }
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 2bb5bd1..1b49638 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -161,6 +161,41 @@ object Iterator {
     def hasNext = true
     def next = elem
   }
+
+  /** Avoid stack overflows when applying ++ to lots of iterators by
+   *  flattening the unevaluated iterators out into a vector of closures.
+   */
+  private[scala] final class ConcatIterator[+A](private[this] var current: Iterator[A], initial: Vector[() => Iterator[A]]) extends Iterator[A] {
+    @deprecated def this(initial: Vector[() => Iterator[A]]) = this(Iterator.empty, initial) // for binary compatibility
+    private[this] var queue: Vector[() => Iterator[A]] = initial
+    // Advance current to the next non-empty iterator
+    // current is set to null when all iterators are exhausted
+    private[this] def advance(): Boolean = {
+      if (queue.isEmpty) {
+        current = null
+        false
+      }
+      else {
+        current = queue.head()
+        queue = queue.tail
+        current.hasNext || advance()
+      }
+    }
+    def hasNext = (current ne null) && (current.hasNext || advance())
+    def next()  = if (hasNext) current.next else Iterator.empty.next
+
+    override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
+      new ConcatIterator(current, queue :+ (() => that.toIterator))
+  }
+
+  private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
+    private[this] lazy val rhs: Iterator[A] = that.toIterator
+    def hasNext = lhs.hasNext || rhs.hasNext
+    def next    = if (lhs.hasNext) lhs.next else rhs.next
+
+    override def ++[B >: A](that: => GenTraversableOnce[B]) =
+      new ConcatIterator(this, Vector(() => that.toIterator))
+  }
 }
 
 import Iterator.empty
@@ -338,24 +373,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
    *  @usecase def ++(that: => Iterator[A]): Iterator[A]
    *    @inheritdoc
    */
-  def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
-    // optimize a little bit to prevent n log n behavior.
-    private var cur : Iterator[B] = self
-    private var selfExhausted : Boolean = false
-    // since that is by-name, make sure it's only referenced once -
-    // if "val it = that" is inside the block, then hasNext on an empty
-    // iterator will continually reevaluate it.  (ticket #3269)
-    lazy val it = that.toIterator
-    // the eq check is to avoid an infinite loop on "x ++ x"
-    def hasNext = cur.hasNext || (!selfExhausted && {
-      it.hasNext && {
-        cur = it
-        selfExhausted = true
-        true
-      }
-    })
-    def next() = { hasNext; cur.next() }
-  }
+  def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.JoinIterator(self, that)
 
   /** Creates a new iterator by applying a function to all values produced by this iterator
    *  and concatenating the results.
@@ -368,7 +386,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
   def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
     private var cur: Iterator[B] = empty
     def hasNext: Boolean =
-      cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
+      cur.hasNext || self.hasNext && { cur = f(self.next()).toIterator; hasNext }
     def next(): B = (if (hasNext) cur else empty).next()
   }
 
@@ -408,7 +426,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
   def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = {
     val that0 = that.toIterator
     while (hasNext && that0.hasNext)
-      if (!p(next, that0.next)) return false
+      if (!p(next(), that0.next())) return false
 
     hasNext == that0.hasNext
   }
@@ -555,14 +573,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
   def span(p: A => Boolean): (Iterator[A], Iterator[A]) = {
     val self = buffered
 
-    /**
+    /*
      * Giving a name to following iterator (as opposed to trailing) because
      * anonymous class is represented as a structural type that trailing
      * iterator is referring (the finish() method) and thus triggering
      * handling of structural calls. It's not what's intended here.
      */
     class Leading extends AbstractIterator[A] {
-      private var isDone = false
       val lookahead = new mutable.Queue[A]
       def advance() = {
         self.hasNext && p(self.head) && {
@@ -572,7 +589,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
       }
       def finish() = {
         while (advance()) ()
-        isDone = true
       }
       def hasNext = lookahead.nonEmpty || advance()
       def next() = {
@@ -632,7 +648,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
    */
   def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] {
     def hasNext = self.hasNext && that.hasNext
-    def next = (self.next, that.next)
+    def next = (self.next(), that.next())
   }
 
   /** Appends an element value to this iterator until a given target length is reached.
@@ -652,9 +668,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
     def hasNext = self.hasNext || count < len
     def next = {
       count += 1
-      if (self.hasNext) self.next
+      if (self.hasNext) self.next()
       else if (count <= len) elem
-      else empty.next
+      else empty.next()
     }
   }
 
@@ -669,7 +685,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
     var idx = 0
     def hasNext = self.hasNext
     def next = {
-      val ret = (self.next, idx)
+      val ret = (self.next(), idx)
       idx += 1
       ret
     }
@@ -1054,12 +1070,12 @@ trait Iterator[+A] extends TraversableOnce[A] {
           val e = self.next()
           gap enqueue e
           e
-        } else gap.dequeue
+        } else gap.dequeue()
       }
       // to verify partnerhood we use reference equality on gap because
       // type testing does not discriminate based on origin.
       private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue
-      override def hashCode = gap.hashCode
+      override def hashCode = gap.hashCode()
       override def equals(other: Any) = other match {
         case x: Partner   => x.compareGap(gap) && gap.isEmpty
         case _            => super.equals(other)
@@ -1118,6 +1134,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
       xs(i) = next()
       i += 1
     }
+    // TODO: return i - start so the caller knows how many values read?
   }
 
   /** Tests if another iterator produces the same values as this one.
@@ -1140,7 +1157,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
   def toTraversable: Traversable[A] = toStream
   def toIterator: Iterator[A] = self
   def toStream: Stream[A] =
-    if (self.hasNext) Stream.cons(self.next, self.toStream)
+    if (self.hasNext) Stream.cons(self.next(), self.toStream)
     else Stream.empty[A]
 
 
@@ -1154,4 +1171,4 @@ trait Iterator[+A] extends TraversableOnce[A] {
 }
 
 /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractIterator[+A] extends Iterator[A]
+abstract class AbstractIterator[+A] extends Iterator[A]
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 59d4259..7bfa607 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -6,9 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
 import convert._
 
 /**   A collection of implicit conversions supporting interoperability between
@@ -22,7 +22,6 @@ import convert._
  *    scala.collection.mutable.Buffer <=> java.util.List
  *    scala.collection.mutable.Set <=> java.util.Set
  *    scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
- *    scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap
  *    scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
  *}}}
  *    In all cases, converting from a source type to a target type and back
@@ -50,83 +49,4 @@ import convert._
  *  @author Martin Odersky
  *  @since  2.8
  */
-object JavaConversions extends WrapAsScala with WrapAsJava {
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B]  = Wrappers.ConcurrentMapWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B]     = Wrappers.DictionaryWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A]          = Wrappers.IterableWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A]          = Wrappers.IteratorWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A]       = Wrappers.JCollectionWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B]    = Wrappers.JDictionaryWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A]      = Wrappers.JEnumerationWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A]         = Wrappers.JIterableWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A]         = Wrappers.JIteratorWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A]             = Wrappers.JListWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B]           = Wrappers.JMapWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper          = Wrappers.JPropertiesWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A]              = Wrappers.JSetWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B]            = Wrappers.MapWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A]     = Wrappers.MutableBufferWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B]     = Wrappers.MutableMapWrapper[A, B]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A]        = Wrappers.MutableSeqWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A]        = Wrappers.MutableSetWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A]               = Wrappers.SeqWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A]               = Wrappers.SetWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A]        = Wrappers.ToIteratorWrapper[A]
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper            = Wrappers.DictionaryWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper              = Wrappers.IterableWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper              = Wrappers.IteratorWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper           = Wrappers.JCollectionWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper        = Wrappers.JConcurrentMapWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper           = Wrappers.JDictionaryWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper          = Wrappers.JEnumerationWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper             = Wrappers.JIterableWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper             = Wrappers.JIteratorWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper                 = Wrappers.JListWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper                  = Wrappers.JMapWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper           = Wrappers.JPropertiesWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper                  = Wrappers.JSetWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper         = Wrappers.MutableBufferWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper            = Wrappers.MutableMapWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper            = Wrappers.MutableSeqWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper            = Wrappers.MutableSetWrapper
-  @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper                   = Wrappers.SeqWrapper
-
-  // Note to implementors: the cavalcade of deprecated methods herein should
-  // serve as a warning to any who follow: don't overload implicit methods.
-
-  @deprecated("use bufferAsJavaList instead", "2.9.0")
-  def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
-
-  @deprecated("use mutableSeqAsJavaList instead", "2.9.0")
-  def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
-
-  @deprecated("use seqAsJavaList instead", "2.9.0")
-  def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
-
-  @deprecated("use mutableSetAsJavaSet instead", "2.9.0")
-  def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
-
-  @deprecated("use setAsJavaSet instead", "2.9.0")
-  def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
-
-  @deprecated("use mutableMapAsJavaMap instead", "2.9.0")
-  def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
-
-  @deprecated("use mapAsJavaMap instead", "2.9.0")
-  def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
-
-  @deprecated("use iterableAsScalaIterable instead", "2.9.0")
-  def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
-
-  @deprecated("use collectionAsScalaIterable instead", "2.9.0")
-  def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
-
-  @deprecated("use mapAsScalaMap instead", "2.9.0")
-  def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
-
-  @deprecated("use propertiesAsScalaMap instead", "2.9.0")
-  def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
-}
-
-
+object JavaConversions extends WrapAsScala with WrapAsJava
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index ab3ac89..a4fa58b 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -6,16 +6,15 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
 import convert._
 
 // TODO: I cleaned all this documentation up in JavaConversions, but the
 // documentation in here is basically the pre-cleaned-up version with minor
 // additions.  Would be nice to have in one place.
 
-
 /** A collection of decorators that allow converting between
  *  Scala and Java collections using `asScala` and `asJava` methods.
  *
@@ -26,7 +25,7 @@ import convert._
  *  - `scala.collection.mutable.Buffer` <=> `java.util.List`
  *  - `scala.collection.mutable.Set` <=> `java.util.Set`
  *  - `scala.collection.mutable.Map` <=> `java.util.Map`
- *  - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *  - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
  *
  *  In all cases, converting from a source type to a target type and back
  *  again will return the original source object, e.g.
@@ -56,48 +55,4 @@ import convert._
  *  @author Martin Odersky
  *  @since  2.8.1
  */
-object JavaConverters extends DecorateAsJava with DecorateAsScala {
-  @deprecated("Don't access these decorators directly.", "2.10.0")
-  type AsJava[A]            = Decorators.AsJava[A]
-  @deprecated("Don't access these decorators directly.", "2.10.0")
-  type AsScala[A]           = Decorators.AsScala[A]
-  @deprecated("Don't access these decorators directly.", "2.10.0")
-  type AsJavaCollection[A]  = Decorators.AsJavaCollection[A]
-  @deprecated("Don't access these decorators directly.", "2.10.0")
-  type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
-  @deprecated("Don't access these decorators directly.", "2.10.0")
-  type AsJavaDictionary[A, B]  = Decorators.AsJavaDictionary[A, B]
-
-  @deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
-  def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
-
-  @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
-  def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
-
-  @deprecated("Use seqAsJavaListConverter instead", "2.9.0")
-  def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
-
-  @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
-  def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
-
-  @deprecated("Use setAsJavaSetConverter instead", "2.9.0")
-  def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
-
-  @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
-  def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
-
-  @deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
-  def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
-
-  @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
-  def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
-
-  @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
-  def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
-
-  @deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
-  def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
-
-  @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
-  def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
-}
+object JavaConverters extends DecorateAsJava with DecorateAsScala
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index e52a193..1e4975a 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Builder
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 78108a9..ff7985b 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -6,13 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
+package scala
+package collection
 
-package scala.collection
-
-import generic._
-import mutable.ListBuffer
 import immutable.List
-import scala.util.control.Breaks._
 import scala.annotation.tailrec
 
 /** A template trait for linear sequences of type `LinearSeq[A]`.
@@ -59,14 +56,14 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
     def next(): A =
       if (hasNext) {
         val result = these.head; these = these.tail; result
-      } else Iterator.empty.next
+      } else Iterator.empty.next()
 
     /** Have to clear `these` so the iterator is exhausted like
      *  it would be without the optimization.
      */
     override def toList: List[A] = {
       val xs = these.toList
-      these = newBuilder.result
+      these = newBuilder.result()
       xs
     }
   }
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 81cccea..8635b09 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -6,12 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
-import generic._
 import mutable.ListBuffer
 import immutable.List
-import scala.util.control.Breaks._
 import scala.annotation.tailrec
 
 /** A template trait for linear sequences of type `LinearSeq[A]`  which optimizes
@@ -84,7 +83,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
   }
 
   override /*SeqLike*/
-  def contains(elem: Any): Boolean = {
+  def contains[A1 >: A](elem: A1): Boolean = {
     var these = this
     while (!these.isEmpty) {
       if (these.head == elem) return true
@@ -92,7 +91,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
     }
     false
   }
-  
+
   override /*IterableLike*/
   def find(p: A => Boolean): Option[A] = {
     var these = this
@@ -113,7 +112,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
     }
     acc
   }
-  
+
   override /*IterableLike*/
   def foldRight[B](z: B)(f: (A, B) => B): B =
     if (this.isEmpty) z
@@ -152,7 +151,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
       b += these.head
       these = these.tail
     }
-    b.result
+    b.result()
   }
 
   override /*TraversableLike*/
@@ -187,7 +186,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
       these = these.tail
       lead = lead.tail
     }
-    b.result
+    b.result()
   }
 
   override /*IterableLike*/
@@ -195,7 +194,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
     var these: Repr = repr
     var count = from max 0
     if (until <= count)
-      return newBuilder.result
+      return newBuilder.result()
 
     val b = newBuilder
     var sliceElems = until - count
@@ -208,7 +207,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
       b += these.head
       these = these.tail
     }
-    b.result
+    b.result()
   }
 
   override /*IterableLike*/
@@ -219,7 +218,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
       b += these.head
       these = these.tail
     }
-    b.result
+    b.result()
   }
 
   override /*TraversableLike*/
@@ -230,7 +229,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
       b += these.head
       these = these.tail
     }
-    (b.result, these)
+    (b.result(), these)
   }
 
   override /*IterableLike*/
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 18ad20a..1e40fd8 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 
@@ -51,8 +52,8 @@ object Map extends MapFactory[Map] {
     def iterator                    = underlying.iterator
     override def default(key: A): B = d(key)
   }
-  
+
 }
 
 /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractMap[A, +B] extends AbstractIterable[(A, B)] with Map[A, B]
+abstract class AbstractMap[A, +B] extends AbstractIterable[(A, B)] with Map[A, B]
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 93d02a4..5ec7d5c 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.{ Builder, MapBuilder }
@@ -181,7 +181,7 @@ self =>
   def keysIterator: Iterator[A] = new AbstractIterator[A] {
     val iter = self.iterator
     def hasNext = iter.hasNext
-    def next() = iter.next._1
+    def next() = iter.next()._1
   }
 
   /** Collects all keys of this map in an iterable collection.
@@ -213,7 +213,7 @@ self =>
   def valuesIterator: Iterator[B] = new AbstractIterator[B] {
     val iter = self.iterator
     def hasNext = iter.hasNext
-    def next() = iter.next._2
+    def next() = iter.next()._2
   }
 
   /** Defines the default value computation for the map,
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index e85d306..941c1f5 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 /** This is a simple wrapper class for [[scala.collection.Map]].
  *  It is most useful for assembling customized map abstractions
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 44b39f6..dd80a53 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
-
-import generic._
+package scala
+package collection
 
 // Methods could be printed by  cat MapLike.scala | egrep '^  (override )?def'
 
@@ -19,6 +18,7 @@ import generic._
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
       extends MapLike[A, B, This]
       with IterableProxyLike[(A, B), This]
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 6731f74..174e3ab 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 /** A marker trait for collections which have their operations parallelised.
  *
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index d97c44a..b737752 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import parallel.Combiner
 
@@ -39,7 +40,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any {
   def par: ParRepr = {
     val cb = parCombiner
     for (x <- seq) cb += x
-    cb.result
+    cb.result()
   }
 
   /** The default `par` implementation uses the combiner provided by this method
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
new file mode 100644
index 0000000..fec4bbf
--- /dev/null
+++ b/src/library/scala/collection/Searching.scala
@@ -0,0 +1,118 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala
+package collection
+
+import scala.language.implicitConversions
+import scala.annotation.tailrec
+import scala.collection.generic.IsSeqLike
+import scala.math.Ordering
+
+/** A collection of wrappers that provide sequence classes with search functionality.
+  *
+  * Example usage:
+  * {{{
+  *    import scala.collection.Searching._
+  *    val l = List(1, 2, 3, 4, 5)
+  *    l.search(3)
+  *    // == Found(2)
+  * }}}
+  */
+object Searching {
+  sealed abstract class SearchResult {
+    def insertionPoint: Int
+  }
+
+  case class Found(foundIndex: Int) extends SearchResult {
+    override def insertionPoint = foundIndex
+  }
+  case class InsertionPoint(insertionPoint: Int) extends SearchResult
+
+  class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) {
+    /** Search the sorted sequence for a specific element. If the sequence is an
+      * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used.
+      *
+      * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+      * the results are undefined.
+      *
+      * @see [[scala.collection.IndexedSeq]]
+      * @see [[scala.math.Ordering]]
+      * @see [[scala.collection.SeqLike]], method `sorted`
+      *
+      * @param elem the element to find.
+      * @param ord  the ordering to be used to compare elements.
+      *
+      * @return a `Found` value containing the index corresponding to the element in the
+      *         sequence, or the `InsertionPoint` where the element would be inserted if
+      *         the element is not in the sequence.
+      */
+    final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
+      coll match {
+        case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord)
+        case _ => linearSearch(coll.view, elem, 0)(ord)
+      }
+
+    /** Search within an interval in the sorted sequence for a specific element. If the
+      * sequence is an IndexedSeq, a binary search is used. Otherwise, a linear search
+      * is used.
+      *
+      * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+      * the results are undefined.
+      *
+      * @see [[scala.collection.IndexedSeq]]
+      * @see [[scala.math.Ordering]]
+      * @see [[scala.collection.SeqLike]], method `sorted`
+      *
+      * @param elem the element to find.
+      * @param from the index where the search starts.
+      * @param to   the index following where the search ends.
+      * @param ord  the ordering to be used to compare elements.
+      *
+      * @return a `Found` value containing the index corresponding to the element in the
+      *         sequence, or the `InsertionPoint` where the element would be inserted if
+      *         the element is not in the sequence.
+      */
+    final def search[B >: A](elem: B, from: Int, to: Int)
+    (implicit ord: Ordering[B]): SearchResult =
+      coll match {
+        case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord)
+        case _ => linearSearch(coll.view(from, to), elem, from)(ord)
+      }
+
+    @tailrec
+    private def binarySearch[B >: A](elem: B, from: Int, to: Int)
+    (implicit ord: Ordering[B]): SearchResult = {
+      if ((to-from) == 1) InsertionPoint(from) else {
+        val idx = from+(to-from)/2
+        math.signum(ord.compare(elem, coll(idx))) match {
+          case -1 => binarySearch(elem, from, idx)(ord)
+          case  1 => binarySearch(elem, idx, to)(ord)
+          case  _ => Found(idx)
+        }
+      }
+    }
+
+    private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int)
+    (implicit ord: Ordering[B]): SearchResult = {
+      var idx = offset
+      val it = c.iterator
+      while (it.hasNext) {
+        val cur = it.next()
+        if (ord.equiv(elem, cur)) return Found(idx)
+        else if (ord.lt(elem, cur)) return InsertionPoint(idx-1)
+        idx += 1
+      }
+      InsertionPoint(idx)
+    }
+
+  }
+
+  implicit def search[Repr, A](coll: Repr)
+  (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll))
+}
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index 33e66c0..2f4b3e5 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Builder
@@ -37,4 +38,4 @@ object Seq extends SeqFactory[Seq] {
 }
 
 /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractSeq[+A] extends AbstractIterable[A] with Seq[A]
+abstract class AbstractSeq[+A] extends AbstractIterable[A] with Seq[A]
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index 20ea7f5..2398313 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
 
 /** An extractor used to head/tail deconstruct sequences. */
 object +: {
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 1be0dba..fdfb1f2 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -107,7 +107,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
 
   def segmentLength(p: A => Boolean, from: Int): Int = {
     var i = 0
-    var it = iterator.drop(from)
+    val it = iterator.drop(from)
     while (it.hasNext && p(it.next()))
       i += 1
     i
@@ -115,7 +115,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
 
   def indexWhere(p: A => Boolean, from: Int): Int = {
     var i = from
-    var it = iterator.drop(from)
+    val it = iterator.drop(from)
     while (it.hasNext) {
       if (p(it.next())) return i
       else i += 1
@@ -127,7 +127,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
   def lastIndexWhere(p: A => Boolean, end: Int): Int = {
     var i = length - 1
     val it = reverseIterator
-    while (it.hasNext && { val elem = it.next; (i > end || !p(elem)) }) i -= 1
+    while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1
     i
   }
 
@@ -156,10 +156,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     def hasNext = _hasNext
     def next(): Repr = {
       if (!hasNext)
-        Iterator.empty.next
+        Iterator.empty.next()
 
       val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
-      val result = (self.newBuilder ++= forcedElms).result
+      val result = (self.newBuilder ++= forcedElms).result()
       var i = idxs.length - 2
       while(i >= 0 && idxs(i) >= idxs(i+1))
         i -= 1
@@ -181,10 +181,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
       result
     }
     private def swap(i: Int, j: Int) {
-      var tmpI = idxs(i)
+      val tmpI = idxs(i)
       idxs(i) = idxs(j)
       idxs(j) = tmpI
-      var tmpE = elms(i)
+      val tmpE = elms(i)
       elms(i) = elms(j)
       elms(j) = tmpE
     }
@@ -208,15 +208,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     def hasNext = _hasNext
     def next(): Repr = {
       if (!hasNext)
-        Iterator.empty.next
+        Iterator.empty.next()
 
-      /** Calculate this result. */
+      /* Calculate this result. */
       val buf = self.newBuilder
       for(k <- 0 until nums.length; j <- 0 until nums(k))
         buf += elms(offs(k)+j)
-      val res = buf.result
+      val res = buf.result()
 
-      /** Prepare for the next call to next. */
+      /* Prepare for the next call to next. */
       var idx = nums.length - 1
       while (idx >= 0 && nums(idx) == cnts(idx))
         idx -= 1
@@ -268,18 +268,18 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     b.sizeHint(this)
     for (x <- xs)
       b += x
-    b.result
+    b.result()
   }
 
   def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
     var xs: List[A] = List()
-    for (x <- this.seq)
+    for (x <- this)
       xs = x :: xs
     val b = bf(repr)
     for (x <- xs)
       b += f(x)
 
-    b.result
+    b.result()
   }
 
   /** An iterator yielding elements in reversed order.
@@ -335,7 +335,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
       if (from > l) -1
       else if (tl < 1) clippedFrom
       else if (l < tl) -1
-      else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, true)
+      else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true)
     }
     else {
       var i = from
@@ -372,7 +372,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     if (end < 0) -1
     else if (tl < 1) clippedL
     else if (l < tl) -1
-    else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false)
+    else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false)
   }
 
   /** Tests whether this $coll contains a given sequence as a slice.
@@ -390,7 +390,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
    *  @return     `true` if this $coll has an element that is equal (as
    *              determined by `==`) to `elem`, `false` otherwise.
    */
-  def contains(elem: Any): Boolean = exists (_ == elem)
+  def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem)
 
   /** Produces a new sequence which contains all elements of this $coll and also all elements of
    *  a given sequence. `xs union ys`  is equivalent to `xs ++ ys`.
@@ -442,7 +442,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     for (x <- this)
       if (occ(x) == 0) b += x
       else occ(x) -= 1
-    b.result
+    b.result()
   }
 
   /** Computes the multiset intersection between this $coll and another sequence.
@@ -473,12 +473,12 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
         b += x
         occ(x) -= 1
       }
-    b.result
+    b.result()
   }
 
   private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
     val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 }
-    for (y <- sq.seq) occ(y) += 1
+    for (y <- sq) occ(y) += 1
     occ
   }
 
@@ -496,7 +496,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
         seen += x
       }
     }
-    b.result
+    b.result()
   }
 
   def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -505,30 +505,33 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     b ++= toCollection(prefix)
     b ++= patch.seq
     b ++= toCollection(rest).view drop replaced
-    b.result
+    b.result()
   }
 
   def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+    if (index < 0) throw new IndexOutOfBoundsException(index.toString)
     val b = bf(repr)
     val (prefix, rest) = this.splitAt(index)
+    val restColl = toCollection(rest)
+    if (restColl.isEmpty) throw new IndexOutOfBoundsException(index.toString)
     b ++= toCollection(prefix)
     b += elem
-    b ++= toCollection(rest).view.tail
-    b.result
+    b ++= restColl.view.tail
+    b.result()
   }
 
   def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
     val b = bf(repr)
     b += elem
     b ++= thisCollection
-    b.result
+    b.result()
   }
 
   def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
     val b = bf(repr)
     b ++= thisCollection
     b += elem
-    b.result
+    b.result()
   }
 
   def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -540,14 +543,14 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
       b += elem
       diff -= 1
     }
-    b.result
+    b.result()
   }
 
   def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
     val i = this.iterator
     val j = that.iterator
     while (i.hasNext && j.hasNext)
-      if (!p(i.next, j.next))
+      if (!p(i.next(), j.next()))
         return false
 
     !i.hasNext && !j.hasNext
@@ -608,7 +611,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     val len = this.length
     val arr = new ArraySeq[A](len)
     var i = 0
-    for (x <- this.seq) {
+    for (x <- this) {
       arr(i) = x
       i += 1
     }
@@ -616,13 +619,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
     val b = newBuilder
     b.sizeHint(len)
     for (x <- arr) b += x
-    b.result
+    b.result()
   }
 
   /** Converts this $coll to a sequence.
    *  $willNotTerminateInf
    *
-   *  Overridden for efficiency.
+   *  A new collection will not be built; in particular, lazy sequences will stay lazy.
    */
   override def toSeq: Seq[A] = thisCollection
 
@@ -682,7 +685,7 @@ object SeqLike {
         val wit = W.iterator.drop(n0)
         var i = if (forward) 0 else (n1-n0-1)
         while (i != done) {
-          Warr(i) = wit.next.asInstanceOf[AnyRef]
+          Warr(i) = wit.next().asInstanceOf[AnyRef]
           i += delta
         }
 
@@ -778,15 +781,15 @@ object SeqLike {
       case _ =>
         // We had better not index into S directly!
         val iter = S.iterator.drop(m0)
-        val Wopt = kmpOptimizeWord(W, n0, n1, true)
+        val Wopt = kmpOptimizeWord(W, n0, n1, forward = true)
         val T = kmpJumpTable(Wopt, n1-n0)
-        var cache = new Array[AnyRef](n1-n0)  // Ring buffer--need a quick way to do a look-behind
+        val cache = new Array[AnyRef](n1-n0)  // Ring buffer--need a quick way to do a look-behind
         var largest = 0
         var i, m = 0
         var answer = -1
         while (m+m0+n1-n0 <= m1) {
           while (i+m >= largest) {
-            cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef]
+            cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef]
             largest += 1
           }
           if (Wopt(i) == cache((i+m)%(n1-n0))) {
@@ -851,7 +854,7 @@ object SeqLike {
     else if (s1 - s0 < t1 - t0) -1            // Source is too short to find target
     else {
       // Nontrivial search
-      val ans = kmpSearch(source, s0, s1, target, t0, t1, true)
+      val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true)
       if (ans < 0) ans else ans - math.min(slen, sourceOffset)
     }
   }
@@ -883,7 +886,7 @@ object SeqLike {
     else if (fixed_s1 - s0 < t1 - t0) -1      // Source is too short to find target
     else {
       // Nontrivial search
-      val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, false)
+      val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false)
       if (ans < 0) ans else ans - s0
     }
   }
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index 1f8dc4a..f728ba8 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 /** This trait implements a proxy for sequence objects. It forwards
  *  all calls to a different sequence object.
@@ -17,4 +18,5 @@ package scala.collection
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait SeqProxy[+A] extends Seq[A] with SeqProxyLike[A, Seq[A]]
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 5e8030d..b01d227 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 
@@ -22,6 +23,7 @@ import generic._
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] {
   override def size = self.size
   override def toSeq: Seq[A] = self.toSeq
@@ -50,7 +52,7 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
   override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that)
   override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
   override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1
-  override def contains(elem: Any): Boolean = self.contains(elem)
+  override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem)
   override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
   override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that)
   override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that)
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index c26124c..4afc5bf 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import TraversableView.NoBuilder
@@ -16,7 +17,7 @@ import TraversableView.NoBuilder
 /** A base trait for non-strict views of sequences.
  *  $seqViewInfo
  */
-trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] with GenSeqView[A, Coll]
+trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]]
 
 /** An object containing the necessary implicit definitions to make
  *  `SeqView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 5f2bf90..5e31ac4 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -6,11 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import Seq.fill
-import TraversableView.NoBuilder
 
 /** A template trait for non-strict views of sequences.
  *  $seqViewInfo
@@ -30,45 +30,153 @@ import TraversableView.NoBuilder
 trait SeqViewLike[+A,
                   +Coll,
                   +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
-  extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] with GenSeqViewLike[A, Coll, This]
+  extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
 { self =>
 
-  trait Transformed[+B] extends SeqView[B, Coll] with super[IterableViewLike].Transformed[B] with super[GenSeqViewLike].Transformed[B] {
+  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+  private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
+
+  trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
     def length: Int
     def apply(idx: Int): B
     override def toString = viewToString
   }
 
-  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
-  private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
+  trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+    final override def length = 0
+    final override def apply(n: Int) = Nil(n)
+  }
 
-  trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
+  trait Forced[B] extends super.Forced[B] with Transformed[B] {
+    def length = forced.length
+    def apply(idx: Int) = forced.apply(idx)
+  }
 
-  trait Forced[B] extends super[IterableViewLike].Forced[B] with super[GenSeqViewLike].Forced[B] with Transformed[B]
+  trait Sliced extends super.Sliced with Transformed[A] {
+    def length = iterator.size
+    def apply(idx: Int): A =
+      if (idx + from < until) self.apply(idx + from)
+      else throw new IndexOutOfBoundsException(idx.toString)
 
-  trait Sliced extends super[IterableViewLike].Sliced with super[GenSeqViewLike].Sliced with Transformed[A]
+    override def foreach[U](f: A => U) = iterator foreach f
+    override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
+  }
 
-  trait Mapped[B] extends super[IterableViewLike].Mapped[B] with super[GenSeqViewLike].Mapped[B] with Transformed[B]
+  trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+    def length = self.length
+    def apply(idx: Int): B = mapping(self(idx))
+  }
 
-  trait FlatMapped[B] extends super[IterableViewLike].FlatMapped[B] with super[GenSeqViewLike].FlatMapped[B] with Transformed[B]
+  trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+    protected[this] lazy val index = {
+      val index = new Array[Int](self.length + 1)
+      index(0) = 0
+      for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad
+        index(i + 1) = index(i) + mapping(self(i)).seq.size
+      index
+    }
+    protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
+      val mid = (lo + hi) / 2
+      if (idx < index(mid)) findRow(idx, lo, mid - 1)
+      else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
+      else mid
+    }
+    def length = index(self.length)
+    def apply(idx: Int) = {
+      val row = findRow(idx, 0, self.length - 1)
+      mapping(self(row)).seq.toSeq(idx - index(row))
+    }
+  }
 
-  trait Appended[B >: A] extends super[IterableViewLike].Appended[B] with super[GenSeqViewLike].Appended[B] with Transformed[B]
+  trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+    protected[this] lazy val restSeq = rest.toSeq
+    def length = self.length + restSeq.length
+    def apply(idx: Int) =
+      if (idx < self.length) self(idx) else restSeq(idx - self.length)
+  }
 
-  trait Filtered extends super[IterableViewLike].Filtered with super[GenSeqViewLike].Filtered with Transformed[A]
+  trait Filtered extends super.Filtered with Transformed[A] {
+    protected[this] lazy val index = {
+      var len = 0
+      val arr = new Array[Int](self.length)
+      for (i <- 0 until self.length)
+        if (pred(self(i))) {
+          arr(len) = i
+          len += 1
+        }
+      arr take len
+    }
+    def length = index.length
+    def apply(idx: Int) = self(index(idx))
+  }
 
-  trait TakenWhile extends super[IterableViewLike].TakenWhile with super[GenSeqViewLike].TakenWhile with Transformed[A]
+  trait TakenWhile extends super.TakenWhile with Transformed[A] {
+    protected[this] lazy val len = self prefixLength pred
+    def length = len
+    def apply(idx: Int) =
+      if (idx < len) self(idx)
+      else throw new IndexOutOfBoundsException(idx.toString)
+  }
 
-  trait DroppedWhile extends super[IterableViewLike].DroppedWhile with super[GenSeqViewLike].DroppedWhile with Transformed[A]
+  trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+    protected[this] lazy val start = self prefixLength pred
+    def length = self.length - start
+    def apply(idx: Int) =
+      if (idx >= 0) self(idx + start)
+      else throw new IndexOutOfBoundsException(idx.toString)
+  }
 
-  trait Zipped[B] extends super[IterableViewLike].Zipped[B] with super[GenSeqViewLike].Zipped[B] with Transformed[(A, B)]
+  trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
+    protected[this] lazy val thatSeq = other.seq.toSeq
+    /* Have to be careful here - other may be an infinite sequence. */
+    def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
+    def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
+  }
 
-  trait ZippedAll[A1 >: A, B] extends super[IterableViewLike].ZippedAll[A1, B] with super[GenSeqViewLike].ZippedAll[A1, B] with Transformed[(A1, B)]
+  trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
+    protected[this] lazy val thatSeq = other.seq.toSeq
+    def length: Int = self.length max thatSeq.length
+    def apply(idx: Int) =
+      (if (idx < self.length) self.apply(idx) else thisElem,
+       if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
+  }
 
-  trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
+  trait Reversed extends Transformed[A] {
+    override def iterator: Iterator[A] = createReversedIterator
+    def length: Int = self.length
+    def apply(idx: Int): A = self.apply(length - 1 - idx)
+    final override protected[this] def viewIdentifier = "R"
+
+    private def createReversedIterator = {
+      var lst = List[A]()
+      for (elem <- self) lst ::= elem
+      lst.iterator
+    }
+  }
 
-  trait Patched[B >: A] extends Transformed[B] with super[GenSeqViewLike].Patched[B]
+  trait Patched[B >: A] extends Transformed[B] {
+    protected[this] val from: Int
+    protected[this] val patch: GenSeq[B]
+    protected[this] val replaced: Int
+    private lazy val plen = patch.length
+    override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
+    def length: Int = self.length + plen - replaced
+    def apply(idx: Int): B =
+      if (idx < from) self.apply(idx)
+      else if (idx < from + plen) patch.apply(idx - from)
+      else self.apply(idx - plen + replaced)
+    final override protected[this] def viewIdentifier = "P"
+  }
 
-  trait Prepended[B >: A] extends Transformed[B] with super[GenSeqViewLike].Prepended[B]
+  trait Prepended[B >: A] extends Transformed[B] {
+    protected[this] val fst: B
+    override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
+    def length: Int = 1 + self.length
+    def apply(idx: Int): B =
+      if (idx == 0) fst
+      else self.apply(idx - 1)
+    final override protected[this] def viewIdentifier = "A"
+  }
 
   /** Boilerplate method, to override in each subclass
    *  This method could be eliminated if Scala had virtual classes
@@ -137,5 +245,20 @@ trait SeqViewLike[+A,
   override def sorted[B >: A](implicit ord: Ordering[B]): This =
     newForced(thisSeq sorted ord).asInstanceOf[This]
 
+  override def sortWith(lt: (A, A) => Boolean): This =
+    newForced(thisSeq sortWith lt).asInstanceOf[This]
+
+  override def sortBy[B](f: (A) => B)(implicit ord: Ordering[B]): This =
+    newForced(thisSeq sortBy f).asInstanceOf[This]
+
+  override def combinations(n: Int): Iterator[This] =
+    (thisSeq combinations n).map(as => newForced(as).asInstanceOf[This])
+
+  override def permutations: Iterator[This] =
+    thisSeq.permutations.map(as => newForced(as).asInstanceOf[This])
+
+  override def distinct: This =
+    newForced(thisSeq.distinct).asInstanceOf[This]
+
   override def stringPrefix = "SeqView"
 }
diff --git a/src/library/scala/collection/Sequentializable.scala.disabled b/src/library/scala/collection/Sequentializable.scala.disabled
deleted file mode 100644
index df45767..0000000
--- a/src/library/scala/collection/Sequentializable.scala.disabled
+++ /dev/null
@@ -1,10 +0,0 @@
-package scala.collection
-
-
-
-
-trait Sequentializable[+T, +Repr] {
-  
-  def seq: Repr
-  
-}
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index c304323..f74c265 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 
 import generic._
 
@@ -44,4 +44,4 @@ object Set extends SetFactory[Set] {
 }
 
 /** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
+abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index a6ebcc0..0c5c7e0 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.{ Builder, SetBuilder }
@@ -180,14 +180,14 @@ self =>
     def hasNext = len <= elms.size || itr.hasNext
     def next = {
       if (!itr.hasNext) {
-        if (len > elms.size) Iterator.empty.next
+        if (len > elms.size) Iterator.empty.next()
         else {
           itr = new SubsetsItr(elms, len)
           len += 1
         }
       }
 
-      itr.next
+      itr.next()
     }
   }
 
@@ -205,11 +205,11 @@ self =>
 
     def hasNext = _hasNext
     def next(): This = {
-      if (!hasNext) Iterator.empty.next
+      if (!hasNext) Iterator.empty.next()
 
       val buf = self.newBuilder
       idxs.slice(0, len) foreach (idx => buf += elms(idx))
-      val result = buf.result
+      val result = buf.result()
 
       var i = len - 1
       while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index 08075a7..f9f38f1 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 /** This is a simple wrapper class for [[scala.collection.Set]].
  *  It is most useful for assembling customized set abstractions
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 5196f39..4cd215c 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -6,10 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
-
-import generic._
+package scala
+package collection
 
 // Methods could be printed by  cat SetLike.scala | egrep '^  (override )?def'
 
@@ -19,6 +17,7 @@ import generic._
  *  @author  Martin Odersky
  *  @version 2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] {
   def empty: This
   override def contains(elem: A): Boolean = self.contains(elem)
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index c81c16e..36e7eae 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Builder
@@ -33,24 +34,20 @@ object SortedMap extends SortedMapFactory[SortedMap] {
   def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
 
   implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
-  
+
   private[collection] trait Default[A, +B] extends SortedMap[A, B] {
   self =>
     override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
       val b = SortedMap.newBuilder[A, B1]
       b ++= this
       b += ((kv._1, kv._2))
-      b.result
+      b.result()
     }
-    
+
     override def - (key: A): SortedMap[A, B] = {
       val b = newBuilder
       for (kv <- this; if kv._1 != key) b += kv
-      b.result
+      b.result()
     }
   }
-
 }
-
-
-
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 57ad349..cf5e9c3 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 
 import generic._
 
@@ -42,6 +41,7 @@ self =>
       val map = self.rangeImpl(from, until)
       new map.DefaultKeySortedSet
     }
+    override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start)
   }
 
   /** Add a key/value pair to this map.
@@ -68,21 +68,27 @@ self =>
    *  @param elems the remaining elements to add.
    */
   override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = {
-    var m = this + elem1 + elem2;
+    var m = this + elem1 + elem2
     for (e <- elems) m = m + e
     m
   }
-  
+
   override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
     implicit def ordering: Ordering[A] = self.ordering
     override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+    override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+    override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p
+    override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
   }
-  
+
   override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
     implicit def ordering: Ordering[A] = self.ordering
     override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+    override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))}
+    override def keysIteratorFrom(start: A) = self keysIteratorFrom start
+    override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f
   }
-  
+
   /** Adds a number of elements provided by a traversable object
    *  and returns a new collection with the added elements.
    *
@@ -90,9 +96,27 @@ self =>
    */
   override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
     ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
-  
-}
-
-
-
 
+  /**
+   * Creates an iterator over all the key/value pairs
+   * contained in this map having a key greater than or
+   * equal to `start` according to the ordering of
+   * this map. x.iteratorFrom(y) is equivalent
+   * to but often more efficient than x.from(y).iterator.
+   *
+   * @param start The lower bound (inclusive)
+   * on the keys to be returned
+   */
+  def iteratorFrom(start: A): Iterator[(A, B)]
+  /**
+   * Creates an iterator over all the values contained in this
+   * map that are associated with a key greater than or equal to `start`
+   * according to the ordering of this map. x.valuesIteratorFrom(y) is
+   * equivalent to but often more efficient than
+   * x.from(y).valuesIterator.
+   *
+   * @param start The lower bound (inclusive)
+   * on the keys to be returned
+   */
+  def valuesIteratorFrom(start: A): Iterator[B]
+}
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index 2d5d4fb..43189d2 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 import generic._
 
 /** A sorted set.
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 71b45c7..c38ea1f 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 import generic._
 
 /** A template for sets which are sorted.
@@ -40,4 +41,14 @@ self =>
     case that: SortedSet[_] if that.ordering == ordering => that.hasAll(this.iterator)
     case that => super.subsetOf(that)
   }
+
+  /**
+   * Creates an iterator that contains all values from this collection
+   * greater than or equal to `start` according to the ordering of
+   * this collection. x.iteratorFrom(y) is equivalent to but will usually
+   * be more efficient than x.from(y).iterator
+   *
+   * @param start The lower-bound (inclusive) of the iterator
+   */
+  def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start)
 }
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 36ef230..b53724c 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -6,12 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 
 import generic._
-import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
+import mutable.Builder
 import scala.util.control.Breaks
 
 /** A trait for traversable collections.
@@ -102,4 +101,4 @@ object Traversable extends TraversableFactory[Traversable] { self =>
 }
 
 /** Explicit instantiation of the `Traversable` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractTraversable[+A] extends Traversable[A]
+abstract class AbstractTraversable[+A] extends Traversable[A]
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 8b430ca..b60ea86 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.{ Builder }
@@ -86,7 +87,7 @@ trait TraversableLike[+A, +Repr] extends Any
   def repr: Repr = this.asInstanceOf[Repr]
 
   final def isTraversableAgain: Boolean = true
-  
+
   /** The underlying collection seen as an instance of `$Coll`.
    *  By default this is implemented as the current collection object itself,
    *  but this can be overridden.
@@ -174,7 +175,7 @@ trait TraversableLike[+A, +Repr] extends Any
    *
    *  @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
    *    @inheritdoc
-   * 
+   *
    *    Example:
    *    {{{
    *      scala> val x = List(1)
@@ -252,18 +253,21 @@ trait TraversableLike[+A, +Repr] extends Any
     b.result
   }
 
+  private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+    val b = newBuilder
+    for (x <- this)
+      if (p(x) != isFlipped) b += x
+
+    b.result
+  }
+
   /** Selects all elements of this $coll which satisfy a predicate.
    *
    *  @param p     the predicate used to test elements.
    *  @return      a new $coll consisting of all elements of this $coll that satisfy the given
    *               predicate `p`. The order of the elements is preserved.
    */
-  def filter(p: A => Boolean): Repr = {
-    val b = newBuilder
-    for (x <- this)
-      if (p(x)) b += x
-    b.result
-  }
+  def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false)
 
   /** Selects all elements of this $coll which do not satisfy a predicate.
    *
@@ -271,11 +275,11 @@ trait TraversableLike[+A, +Repr] extends Any
    *  @return      a new $coll consisting of all elements of this $coll that do not satisfy the given
    *               predicate `p`. The order of the elements is preserved.
    */
-  def filterNot(p: A => Boolean): Repr = filter(!p(_))
+  def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true)
 
   def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
     val b = bf(repr)
-    for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
+    foreach(pf.runWith(b += _))
     b.result
   }
 
@@ -477,7 +481,7 @@ trait TraversableLike[+A, +Repr] extends Any
     var follow = false
     val b = newBuilder
     b.sizeHint(this, -1)
-    for (x <- this.seq) {
+    for (x <- this) {
       if (follow) b += lst
       else follow = true
       lst = x
@@ -502,7 +506,7 @@ trait TraversableLike[+A, +Repr] extends Any
   private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
     var i = 0
     breakable {
-      for (x <- this.seq) {
+      for (x <- this) {
         if (i >= from) b += x
         i += 1
         if (i >= until) break
@@ -619,7 +623,9 @@ trait TraversableLike[+A, +Repr] extends Any
     }
   }
 
+  @deprecatedOverriding("Enforce contract of toTraversable that if it is Traversable it returns itself.", "2.11.0")
   def toTraversable: Traversable[A] = thisCollection
+  
   def toIterator: Iterator[A] = toStream.iterator
   def toStream: Stream[A] = toBuffer.toStream
   // Override to provide size hint.
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index a448ac2..072fd3d 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
 import generic.CanBuildFrom
@@ -96,7 +97,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
   // for internal use
   protected[this] def reversed = {
     var elems: List[A] = Nil
-    self.seq foreach (elems ::= _)
+    self foreach (elems ::= _)
     elems
   }
 
@@ -128,10 +129,8 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
    *  @example    `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
    */
   def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
-    for (x <- self.toIterator) { // make sure to use an iterator or `seq`
-      if (pf isDefinedAt x)
-        return Some(pf(x))
-    }
+    // make sure to use an iterator or `seq`
+    self.toIterator.foreach(pf.runWith(b => return Some(b)))
     None
   }
 
@@ -141,7 +140,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
 
   def foldLeft[B](z: B)(op: (B, A) => B): B = {
     var result = z
-    this.seq foreach (x => result = op(result, x))
+    this foreach (x => result = op(result, x))
     result
   }
 
@@ -198,7 +197,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
 
   def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
 
-  def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+  def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
 
   def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
 
@@ -222,13 +221,37 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
     if (isEmpty)
       throw new UnsupportedOperationException("empty.maxBy")
 
-    reduceLeft((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
+    var maxF: B = null.asInstanceOf[B]
+    var maxElem: A = null.asInstanceOf[A]
+    var first = true
+
+    for (elem <- self) {
+      val fx = f(elem)
+      if (first || cmp.gt(fx, maxF)) {
+        maxElem = elem
+        maxF = fx
+        first = false
+      }
+    }
+    maxElem
   }
   def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
     if (isEmpty)
       throw new UnsupportedOperationException("empty.minBy")
 
-    reduceLeft((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
+    var minF: B = null.asInstanceOf[B]
+    var minElem: A = null.asInstanceOf[A]
+    var first = true
+
+    for (elem <- self) {
+      val fx = f(elem)
+      if (first || cmp.lt(fx, minF)) {
+        minElem = elem
+        minF = fx
+        first = false
+      }
+    }
+    minElem
   }
 
   /** Copies all elements of this $coll to a buffer.
@@ -271,7 +294,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
   def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
     val b = cbf()
     b ++= seq
-    b.result
+    b.result()
   }
 
   def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
@@ -279,7 +302,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
     for (x <- self)
       b += x
 
-    b.result
+    b.result()
   }
 
   def mkString(start: String, sep: String, end: String): String =
@@ -297,14 +320,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
    * Example:
    *
    * {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
-   *
+   *      scala> val a = List(1,2,3,4)
+   *      a: List[Int] = List(1, 2, 3, 4)
+   *      
    *      scala> val b = new StringBuilder()
-   *      b: StringBuilder =
-   *
-   *      scala> a.addString(b, "LinkedList(", ", ", ")")
-   *      res1: StringBuilder = LinkedList(1, 2, 3, 4)
+   *      b: StringBuilder = 
+   *      
+   *      scala> a.addString(b , "List(" , ", " , ")")
+   *      res5: StringBuilder = List(1, 2, 3, 4)
    * }}}
    *
    *  @param  b    the string builder to which elements are appended.
@@ -339,9 +362,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
    * Example:
    *
    * {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
-   *
+   *      scala> val a = List(1,2,3,4)
+   *      a: List[Int] = List(1, 2, 3, 4)
+   *      
    *      scala> val b = new StringBuilder()
    *      b: StringBuilder =
    *
@@ -362,14 +385,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
    * Example:
    *
    * {{{
-   *      scala> val a = LinkedList(1,2,3,4)
-   *      a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
-   *
+   *      scala> val a = List(1,2,3,4)
+   *      a: List[Int] = List(1, 2, 3, 4)
+   *      
    *      scala> val b = new StringBuilder()
    *      b: StringBuilder =
    *
    *      scala> val h = a.addString(b)
-   *      b: StringBuilder = 1234
+   *      h: StringBuilder = 1234
    * }}}
 
    *  @param  b    the string builder to which elements are appended.
@@ -380,27 +403,22 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
 
 
 object TraversableOnce {
-  @deprecated("use OnceCanBuildFrom instead", "2.10.0")
-  def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
-  @deprecated("use MonadOps instead", "2.10.0")
-  def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
-
   implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
   implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
     new FlattenOps[A](travs map ev)
 
   /* Functionality reused in Iterator.CanBuildFrom */
-  private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
-    def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
-    def traversableToColl[B](t: GenTraversable[B]): Coll[B]
+  private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] {
+    def bufferToColl[B](buff: ArrayBuffer[B]): CC[B]
+    def traversableToColl[B](t: GenTraversable[B]): CC[B]
 
-    def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
+    def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl
 
     /** Creates a new builder on request of a collection.
      *  @param from  the collection requesting the builder to be created.
      *  @return the result of invoking the `genericBuilder` method on `from`.
      */
-    def apply(from: Coll[_]): Builder[A, Coll[A]] = from match {
+    def apply(from: CC[_]): Builder[A, CC[A]] = from match {
       case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult {
         case res => traversableToColl(res.asInstanceOf[GenTraversable[A]])
       }
@@ -429,7 +447,7 @@ object TraversableOnce {
     def flatten: Iterator[A] = new AbstractIterator[A] {
       val its = travs.toIterator
       private var it: Iterator[A] = Iterator.empty
-      def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext }
+      def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext }
       def next(): A = if (hasNext) it.next() else Iterator.empty.next()
     }
   }
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 568298a..65936da 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 // Methods could be printed by  cat TraversableLike.scala | egrep '^  (override )?def'
 
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 8896cd1..4399dbc 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.{Buffer, StringBuilder}
@@ -23,6 +24,7 @@ import scala.reflect.ClassTag
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy {
   def self: Repr
 
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index cce6b72..cffce6f 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -6,16 +6,16 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.Builder
-import TraversableView.NoBuilder
 
 /** A base trait for non-strict views of traversable collections.
  *  $traversableViewInfo
  */
-trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] with GenTraversableView[A, Coll] { }
+trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { }
 
 /** An object containing the necessary implicit definitions to make
  *  `TraversableView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 14f865c..5926c69 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -6,11 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 import generic._
 import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
 import scala.annotation.migration
 import scala.language.implicitConversions
 
@@ -59,7 +59,7 @@ trait ViewMkString[+A] {
  *  $viewInfo
  *
  *  All views for traversable collections are defined by creating a new `foreach` method.
- *  
+ *
  *  @author Martin Odersky
  *  @version 2.8
  *  @since   2.8
@@ -70,27 +70,39 @@ trait ViewMkString[+A] {
 trait TraversableViewLike[+A,
                           +Coll,
                           +This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]]
-  extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] with GenTraversableViewLike[A, Coll, This]
+  extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A]
 {
   self =>
 
-  override protected[this] def newBuilder: Builder[A, This] =
-    throw new UnsupportedOperationException(this+".newBuilder")
-
   protected def underlying: Coll
   protected[this] def viewIdentifier: String = ""
   protected[this] def viewIdString: String = ""
+  def viewToString = stringPrefix + viewIdString + "(...)"
   override def stringPrefix = "TraversableView"
 
+  override protected[this] def newBuilder: Builder[A, This] =
+    throw new UnsupportedOperationException(this+".newBuilder")
+
   def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = {
     val b = bf(underlying)
     b ++= this
     b.result()
   }
 
-  trait Transformed[+B] extends TraversableView[B, Coll] with super.Transformed[B] {
+  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+  private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
+
+
+  /** The implementation base trait of this view.
+   *  This trait and all its subtraits has to be re-implemented for each
+   *  ViewLike class.
+   */
+  trait Transformed[+B] extends TraversableView[B, Coll] {
     def foreach[U](f: B => U): Unit
 
+    lazy val underlying = self.underlying
+    final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
+
     // Methods whose standard implementations use "isEmpty" need to be rewritten
     // for views, else they will end up traversing twice in a situation like:
     //   xs.view.flatMap(f).headOption
@@ -116,29 +128,99 @@ trait TraversableViewLike[+A,
     override def toString = viewToString
   }
 
-  /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
-  private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
-
-  trait EmptyView extends Transformed[Nothing] with super.EmptyView
+  trait EmptyView extends Transformed[Nothing] {
+    final override def isEmpty = true
+    final override def foreach[U](f: Nothing => U): Unit = ()
+  }
 
   /** A fall back which forces everything into a vector and then applies an operation
    *  on it. Used for those operations which do not naturally lend themselves to a view
    */
-  trait Forced[B] extends Transformed[B] with super.Forced[B]
+  trait Forced[B] extends Transformed[B] {
+    protected[this] val forced: GenSeq[B]
+    def foreach[U](f: B => U) = forced foreach f
+    final override protected[this] def viewIdentifier = "C"
+  }
 
-  trait Sliced extends Transformed[A] with super.Sliced
+  trait Sliced extends Transformed[A] {
+    protected[this] val endpoints: SliceInterval
+    protected[this] def from  = endpoints.from
+    protected[this] def until = endpoints.until
+    // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
+    //   self.newSliced(endpoints.recalculate(_endpoints))
+
+    def foreach[U](f: A => U) {
+      var index = 0
+      for (x <- self) {
+        if (from <= index) {
+          if (until <= index) return
+          f(x)
+        }
+        index += 1
+      }
+    }
+    final override protected[this] def viewIdentifier = "S"
+  }
 
-  trait Mapped[B] extends Transformed[B] with super.Mapped[B]
+  trait Mapped[B] extends Transformed[B] {
+    protected[this] val mapping: A => B
+    def foreach[U](f: B => U) {
+      for (x <- self)
+        f(mapping(x))
+    }
+    final override protected[this] def viewIdentifier = "M"
+  }
 
-  trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B]
+  trait FlatMapped[B] extends Transformed[B] {
+    protected[this] val mapping: A => GenTraversableOnce[B]
+    def foreach[U](f: B => U) {
+      for (x <- self)
+        for (y <- mapping(x).seq)
+          f(y)
+    }
+    final override protected[this] def viewIdentifier = "N"
+  }
 
-  trait Appended[B >: A] extends Transformed[B] with super.Appended[B]
+  trait Appended[B >: A] extends Transformed[B] {
+    protected[this] val rest: GenTraversable[B]
+    def foreach[U](f: B => U) {
+      self foreach f
+      rest foreach f
+    }
+    final override protected[this] def viewIdentifier = "A"
+  }
 
-  trait Filtered extends Transformed[A] with super.Filtered
+  trait Filtered extends Transformed[A] {
+    protected[this] val pred: A => Boolean
+    def foreach[U](f: A => U) {
+      for (x <- self)
+        if (pred(x)) f(x)
+    }
+    final override protected[this] def viewIdentifier = "F"
+  }
 
-  trait TakenWhile extends Transformed[A] with super.TakenWhile
+  trait TakenWhile extends Transformed[A] {
+    protected[this] val pred: A => Boolean
+    def foreach[U](f: A => U) {
+      for (x <- self) {
+        if (!pred(x)) return
+        f(x)
+      }
+    }
+    final override protected[this] def viewIdentifier = "T"
+  }
 
-  trait DroppedWhile extends Transformed[A] with super.DroppedWhile
+  trait DroppedWhile extends Transformed[A] {
+    protected[this] val pred: A => Boolean
+    def foreach[U](f: A => U) {
+      var go = false
+      for (x <- self) {
+        if (!go && !pred(x)) go = true
+        if (go) f(x)
+      }
+    }
+    final override protected[this] def viewIdentifier = "D"
+  }
 
   override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
     newAppended(xs.seq.toTraversable).asInstanceOf[That]
@@ -162,7 +244,7 @@ trait TraversableViewLike[+A,
 //     if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
 //    else super.flatMap[B, That](f)(bf)
   }
-  override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) = 
+  override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
     newFlatMapped(asTraversable)
   private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
 
@@ -209,7 +291,18 @@ trait TraversableViewLike[+A,
   override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) =
     (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3))  // TODO - Performance improvements.
 
-  override def toString = viewToString
-}
+  override def filterNot(p: (A) => Boolean): This =
+    newFiltered(a => !(p(a)))
+
+  override def inits: Iterator[This] =
+    thisSeq.inits.map(as => newForced(as).asInstanceOf[This])
+
+  override def tails: Iterator[This] =
+    thisSeq.tails.map(as => newForced(as).asInstanceOf[This])
 
+  override def tail: This =
+    // super.tail would also work as it is currently implemented in terms of drop(Int).
+    if (isEmpty) super.tail else newDropped(1)
 
+  override def toString = viewToString
+}
diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java
index a65d84b..97b8870 100644
--- a/src/library/scala/collection/concurrent/BasicNode.java
+++ b/src/library/scala/collection/concurrent/BasicNode.java
@@ -8,13 +8,8 @@
 
 package scala.collection.concurrent;
 
-
-
-
-
-
 public abstract class BasicNode {
-    
+
     public abstract String string(int lev);
-    
-}
\ No newline at end of file
+
+}
diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java
index d6eb29c..2fce971 100644
--- a/src/library/scala/collection/concurrent/CNodeBase.java
+++ b/src/library/scala/collection/concurrent/CNodeBase.java
@@ -8,28 +8,26 @@
 
 package scala.collection.concurrent;
 
-
-
 import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
 
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
 
+    @SuppressWarnings("rawtypes")
+    public static final AtomicIntegerFieldUpdater<CNodeBase> updater =
+            AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
 
-abstract class CNodeBase<K, V> extends MainNode<K, V> {
-    
-    public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
-    
     public volatile int csize = -1;
-    
+
     public boolean CAS_SIZE(int oldval, int nval) {
 	return updater.compareAndSet(this, oldval, nval);
     }
-    
+
     public void WRITE_SIZE(int nval) {
 	updater.set(this, nval);
     }
-    
+
     public int READ_SIZE() {
 	return updater.get(this);
     }
-    
+
 }
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java
index 331eeca..6019884 100644
--- a/src/library/scala/collection/concurrent/Gen.java
+++ b/src/library/scala/collection/concurrent/Gen.java
@@ -8,11 +8,4 @@
 
 package scala.collection.concurrent;
 
-
-
-
-
-
-final class Gen {
-}
-
+final class Gen {}
diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java
index cbe404e..2f2d203 100644
--- a/src/library/scala/collection/concurrent/INodeBase.java
+++ b/src/library/scala/collection/concurrent/INodeBase.java
@@ -8,28 +8,26 @@
 
 package scala.collection.concurrent;
 
-
-
 import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
 
+abstract class INodeBase<K, V> extends BasicNode {
 
+    @SuppressWarnings("rawtypes")
+    public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater =
+            AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
 
-abstract class INodeBase<K, V> extends BasicNode {
-    
-    public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
-    
     public static final Object RESTART = new Object();
-    
+
     public volatile MainNode<K, V> mainnode = null;
-    
+
     public final Gen gen;
-    
+
     public INodeBase(Gen generation) {
 	gen = generation;
     }
-    
+
     public BasicNode prev() {
 	return null;
     }
-    
+
 }
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java
index ffe5357..adb9b59 100644
--- a/src/library/scala/collection/concurrent/MainNode.java
+++ b/src/library/scala/collection/concurrent/MainNode.java
@@ -8,33 +8,32 @@
 
 package scala.collection.concurrent;
 
-
-
 import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
 
+abstract class MainNode<K, V> extends BasicNode {
 
+    @SuppressWarnings("rawtypes")
+    public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater =
+            AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
 
-abstract class MainNode<K, V> extends BasicNode {
-    
-    public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
-    
     public volatile MainNode<K, V> prev = null;
-    
+
     public abstract int cachedSize(Object ct);
-    
+
     public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
 	return updater.compareAndSet(this, oldval, nval);
     }
-    
+
     public void WRITE_PREV(MainNode<K, V> nval) {
 	updater.set(this, nval);
     }
-    
+
     // do we need this? unclear in the javadocs...
     // apparently not - volatile reads are supposed to be safe
     // irregardless of whether there are concurrent ARFU updates
+    @Deprecated @SuppressWarnings("unchecked")
     public MainNode<K, V> READ_PREV() {
 	return updater.get(this);
     }
-    
+
 }
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index b2276ce..02e5dd0 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.concurrent
+package scala
+package collection.concurrent
 
 /** A template trait for mutable maps that allow concurrent access.
  *
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 714260f..fccc1d8 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package concurrent
 
 import java.util.concurrent.atomic._
@@ -41,7 +42,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
   @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else {
     // complete the GCAS
     val prev = /*READ*/m.prev
-    val ctr = ct.readRoot(true)
+    val ctr = ct.readRoot(abort = true)
 
     prev match {
       case null =>
@@ -250,7 +251,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
               if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
               else {
                 if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
-                else return RESTART // used to be throw RestartException
+                else RESTART // used to be throw RestartException
               }
             case sn: SNode[K, V] => // 2) singleton node
               if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef]
@@ -428,16 +429,16 @@ extends MainNode[K, V] with KVNode[K, V] {
 }
 
 
-private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+private[collection] final class LNode[K, V](final val listmap: immutable.ListMap[K, V])
 extends MainNode[K, V] {
-  def this(k: K, v: V) = this(ImmutableListMap(k -> v))
-  def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+  def this(k: K, v: V) = this(immutable.ListMap(k -> v))
+  def this(k1: K, v1: V, k2: K, v2: V) = this(immutable.ListMap(k1 -> v1, k2 -> v2))
   def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
   def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = {
     val updmap = listmap - k
     if (updmap.size > 1) new LNode(updmap)
     else {
-      val (k, v) = updmap.iterator.next
+      val (k, v) = updmap.iterator.next()
       new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
     }
   }
@@ -545,7 +546,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
   //   removed (those existing when the op began)
   // - if there are only null-i-nodes below, returns null
   def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
-    var bmp = bitmap
+    val bmp = bitmap
     var i = 0
     val arr = array
     val tmparray = new Array[BasicNode](arr.length)
@@ -654,8 +655,8 @@ extends scala.collection.concurrent.Map[K, V]
   /* internal methods */
 
   private def writeObject(out: java.io.ObjectOutputStream) {
-    out.writeObject(hashf)
-    out.writeObject(ef)
+    out.writeObject(hashingobj)
+    out.writeObject(equalityobj)
 
     val it = iterator
     while (it.hasNext) {
@@ -723,7 +724,7 @@ extends scala.collection.concurrent.Map[K, V]
   private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
     val desc = RDCSS_Descriptor(ov, expectedmain, nv)
     if (CAS_ROOT(ov, desc)) {
-      RDCSS_Complete(false)
+      RDCSS_Complete(abort = false)
       /*READ*/desc.committed
     } else false
   }
@@ -920,8 +921,8 @@ object TrieMap extends MutableMapFactory[TrieMap] {
 
 
 private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
-  private var stack = new Array[Array[BasicNode]](7)
-  private var stackpos = new Array[Int](7)
+  private val stack = new Array[Array[BasicNode]](7)
+  private val stackpos = new Array[Int](7)
   private var depth = -1
   private var subiter: Iterator[(K, V)] = null
   private var current: KVNode[K, V] = null
@@ -1030,7 +1031,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
         val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
         stack(d) = arr1
         stackpos(d) = -1
-        val it = newIterator(level + 1, ct, false)
+        val it = newIterator(level + 1, ct, _mustInit = false)
         it.stack(0) = arr2
         it.stackpos(0) = -1
         it.depth = 0
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index 87bcae3..6658b6f 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
 import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -25,7 +26,7 @@ import scala.language.implicitConversions
  *  - `scala.collection.mutable.Buffer` <=> `java.util.List`
  *  - `scala.collection.mutable.Set` <=> `java.util.Set`
  *  - `scala.collection.mutable.Map` <=> `java.util.Map`
- *  - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *  - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
  *
  *  In all cases, converting from a source type to a target type and back
  *  again will return the original source object, e.g.
@@ -279,26 +280,6 @@ trait DecorateAsJava {
 
   /**
    * Adds an `asJava` method that implicitly converts a Scala mutable
-   * `ConcurrentMap` to a Java `ConcurrentMap`.
-   *
-   * The returned Java `ConcurrentMap` is backed by the provided Scala
-   * `ConcurrentMap` and any side-effects of using it via the Java interface
-   * will be visible via the Scala interface and vice versa.
-   *
-   * If the Scala `ConcurrentMap` was previously obtained from an implicit or
-   * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
-   * then the original Java `ConcurrentMap` will be returned.
-   *
-   * @param m The `ConcurrentMap` to be converted.
-   * @return An object with an `asJava` method that returns a Java
-   *         `ConcurrentMap` view of the argument.
-   */
-  @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
-  implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
-    new AsJava(asJavaConcurrentMap(m))
-
-  /**
-   * Adds an `asJava` method that implicitly converts a Scala mutable
    * `concurrent.Map` to a Java `ConcurrentMap`.
    *
    * The returned Java `ConcurrentMap` is backed by the provided Scala
@@ -306,7 +287,7 @@ trait DecorateAsJava {
    * will be visible via the Scala interface and vice versa.
    *
    * If the Scala `concurrent.Map` was previously obtained from an implicit or
-   * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+   * explicit call of `asConcurrentMap(java.util.concurrent.ConcurrentMap)`
    * then the original Java `ConcurrentMap` will be returned.
    *
    * @param m The Scala `concurrent.Map` to be converted.
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 94847a7..c724831 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
 import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -144,25 +145,6 @@ trait DecorateAsScala {
 
   /**
    * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
-   * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
-   * backed by the provided Java `ConcurrentMap` and any side-effects of using
-   * it via the Scala interface will be visible via the Java interface and
-   * vice versa.
-   *
-   * If the Java `ConcurrentMap` was previously obtained from an implicit or
-   * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
-   * then the original Scala `ConcurrentMap` will be returned.
-   *
-   * @param m The `ConcurrentMap` to be converted.
-   * @return An object with an `asScala` method that returns a Scala mutable
-   *         `ConcurrentMap` view of the argument.
-   */
-  @deprecated("Use `mapAsScalaConcurrentMapConverter` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
-  def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
-    new AsScala(asScalaConcurrentMap(m))
-
-  /**
-   * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
    * to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is
    * backed by the provided Java `ConcurrentMap` and any side-effects of using
    * it via the Scala interface will be visible via the Java interface and
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
index e2c46c1..d232fa0 100644
--- a/src/library/scala/collection/convert/Decorators.scala
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -6,10 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import java.{ util => ju }
 
 private[collection] trait Decorators {
   /** Generic class containing the `asJava` converter method */
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 5e6126a..9916fe9 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
 import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -235,27 +236,6 @@ trait WrapAsJava {
   }
 
   /**
-   * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
-   * `ConcurrentMap`.
-   *
-   * The returned Java `ConcurrentMap` is backed by the provided Scala
-   * `ConcurrentMap` and any side-effects of using it via the Java interface
-   * will be visible via the Scala interface and vice versa.
-   *
-   * If the Scala `ConcurrentMap` was previously obtained from an implicit or
-   * explicit call of `asScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
-   * then the original Java ConcurrentMap will be returned.
-   *
-   * @param m The `ConcurrentMap` to be converted.
-   * @return A Java `ConcurrentMap` view of the argument.
-   */
-  @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
-  implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
-    case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
-    case _ => new ConcurrentMapDeprecatedWrapper(m)
-  }
-
-  /**
    * Implicitly converts a Scala mutable `concurrent.Map` to a Java
    * `ConcurrentMap`.
    *
@@ -264,7 +244,7 @@ trait WrapAsJava {
    * will be visible via the Scala interface and vice versa.
    *
    * If the Scala `concurrent.Map` was previously obtained from an implicit or
-   * explicit call of `mapAsScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
+   * explicit call of `mapAsScalaConcurrentMap(java.util.concurrent.ConcurrentMap)`
    * then the original Java ConcurrentMap will be returned.
    *
    * @param m The Scala `concurrent.Map` to be converted.
@@ -277,11 +257,3 @@ trait WrapAsJava {
 }
 
 object WrapAsJava extends WrapAsJava { }
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ffcca62..d4ab451 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -6,36 +6,14 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
 import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
 import scala.language.implicitConversions
 
-trait LowPriorityWrapAsScala {
-  this: WrapAsScala =>
-
-  import Wrappers._
-
-  /**
-   * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
-   * The returned Scala ConcurrentMap is backed by the provided Java
-   * ConcurrentMap and any side-effects of using it via the Scala interface will
-   * be visible via the Java interface and vice versa.
-   *
-   * If the Java ConcurrentMap was previously obtained from an implicit or
-   * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
-   * then the original Scala ConcurrentMap will be returned.
-   *
-   * @param m The ConcurrentMap to be converted.
-   * @return A Scala mutable ConcurrentMap view of the argument.
-   */
-  @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
-  implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] =
-    asScalaConcurrentMap(m)
-}
-
-trait WrapAsScala extends LowPriorityWrapAsScala {
+trait WrapAsScala {
   import Wrappers._
   /**
    * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
@@ -178,25 +156,6 @@ trait WrapAsScala extends LowPriorityWrapAsScala {
    * @param m The ConcurrentMap to be converted.
    * @return A Scala mutable ConcurrentMap view of the argument.
    */
-  @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
-  def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
-    case cmw: ConcurrentMapDeprecatedWrapper[a, b] => cmw.underlying
-    case _                                         => new JConcurrentMapDeprecatedWrapper(m)
-  }
-
-  /**
-   * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
-   * The returned Scala ConcurrentMap is backed by the provided Java
-   * ConcurrentMap and any side-effects of using it via the Scala interface will
-   * be visible via the Java interface and vice versa.
-   *
-   * If the Java ConcurrentMap was previously obtained from an implicit or
-   * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
-   * then the original Scala ConcurrentMap will be returned.
-   *
-   * @param m The ConcurrentMap to be converted.
-   * @return A Scala mutable ConcurrentMap view of the argument.
-   */
   implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
     case cmw: ConcurrentMapWrapper[a, b]      => cmw.underlying
     case _                                    => new JConcurrentMapWrapper(m)
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 20add33..14ae57c 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package convert
 
 import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -27,9 +28,9 @@ private[collection] trait Wrappers {
 
   case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
     def hasNext = underlying.hasNext
-    def next() = underlying.next
+    def next() = underlying.next()
     def hasMoreElements = underlying.hasNext
-    def nextElement() = underlying.next
+    def nextElement() = underlying.next()
     def remove() = throw new UnsupportedOperationException
   }
 
@@ -81,7 +82,7 @@ private[collection] trait Wrappers {
     override def remove(i: Int) = underlying remove i
   }
 
-  case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+  case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
     def length = underlying.size
     override def isEmpty = underlying.isEmpty
     override def iterator: Iterator[A] = underlying.iterator
@@ -101,14 +102,20 @@ private[collection] trait Wrappers {
     override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
   }
 
+  // Note various overrides to avoid performance gotchas.
   class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
     self =>
+    override def contains(o: Object): Boolean = {
+      try { underlying.contains(o.asInstanceOf[A]) }
+      catch { case cce: ClassCastException => false }
+    }
+    override def isEmpty = underlying.isEmpty
     def size = underlying.size
     def iterator = new ju.Iterator[A] {
       val ui = underlying.iterator
       var prev: Option[A] = None
       def hasNext = ui.hasNext
-      def next = { val e = ui.next; prev = Some(e); e }
+      def next = { val e = ui.next(); prev = Some(e); e }
       def remove = prev match {
         case Some(e) =>
           underlying match {
@@ -180,7 +187,7 @@ private[collection] trait Wrappers {
         def hasNext = ui.hasNext
 
         def next() = {
-          val (k, v) = ui.next
+          val (k, v) = ui.next()
           prev = Some(k)
           new ju.Map.Entry[A, B] {
             import scala.util.hashing.byteswap32
@@ -211,6 +218,15 @@ private[collection] trait Wrappers {
         }
       }
     }
+
+    override def containsKey(key: AnyRef): Boolean = try {
+      // Note: Subclass of collection.Map with specific key type may redirect generic
+      // contains to specific contains, which will throw a ClassCastException if the
+      // wrong type is passed. This is why we need a type cast to A inside a try/catch.
+      underlying.contains(key.asInstanceOf[A])
+    } catch {
+      case ex: ClassCastException => false
+    }
   }
 
   case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
@@ -272,32 +288,10 @@ private[collection] trait Wrappers {
     override def empty: Repr = null.asInstanceOf[Repr]
   }
 
-  case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+  case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
     override def empty = JMapWrapper(new ju.HashMap[A, B])
   }
 
-  class ConcurrentMapDeprecatedWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
-    def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
-      case Some(v) => v
-      case None => null.asInstanceOf[B]
-    }
-
-    def remove(k: AnyRef, v: AnyRef) = try {
-      underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
-    } catch {
-      case ex: ClassCastException =>
-        false
-    }
-
-    def replace(k: A, v: B): B = underlying.replace(k, v) match {
-      case Some(v) => v
-      case None => null.asInstanceOf[B]
-    }
-
-    def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
-  }
-
   class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
 
     def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
@@ -320,32 +314,7 @@ private[collection] trait Wrappers {
     def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
   }
 
-  case class JConcurrentMapDeprecatedWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapDeprecatedWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
-    override def get(k: A) = {
-      val v = underlying get k
-      if (v != null) Some(v)
-      else None
-    }
-
-    override def empty = new JConcurrentMapDeprecatedWrapper(new juc.ConcurrentHashMap[A, B])
-
-    def putIfAbsent(k: A, v: B): Option[B] = {
-      val r = underlying.putIfAbsent(k, v)
-      if (r != null) Some(r) else None
-    }
-
-    def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
-    def replace(k: A, v: B): Option[B] = {
-      val prev = underlying.replace(k, v)
-      if (prev != null) Some(prev) else None
-    }
-
-    def replace(k: A, oldvalue: B, newvalue: B): Boolean =
-      underlying.replace(k, oldvalue, newvalue)
-  }
-
-  case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
+  case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
     override def get(k: A) = {
       val v = underlying get k
       if (v != null) Some(v)
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
index ea66101..13970f9 100644
--- a/src/library/scala/collection/convert/package.scala
+++ b/src/library/scala/collection/convert/package.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 
 package object convert {
   val decorateAsJava  = new DecorateAsJava { }
diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala
index c45ebcf..d430ece 100644
--- a/src/library/scala/collection/generic/BitOperations.scala
+++ b/src/library/scala/collection/generic/BitOperations.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** Some bit operations.
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 46e2d29..2e3aae3 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.collection._
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 73fd4fc..24e5b2a 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index 9ca3332..7f70b45 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.collection.parallel._
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
index 85cdbd7..e3db401 100644
--- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala
index a04ecb2..3c49605 100644
--- a/src/library/scala/collection/generic/Clearable.scala
+++ b/src/library/scala/collection/generic/Clearable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** This trait forms part of collections that can be cleared
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index e21f0be..8aefbdb 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -6,8 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
-
+package scala
+package collection
+package generic
 
 /** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
  *  of trait `TraversableLike`.
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index e869bba..ae31501 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.{Builder, MapBuilder}
@@ -44,7 +45,7 @@ abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A
    *  @tparam B      the type of the associated values
    *  @return        a new $coll consisting key/value pairs given by `elems`.
    */
-  def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result
+  def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result()
 
   /** The default builder for $Coll objects.
    *  @tparam A      the type of the keys
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index dd375c5..6afbb2e 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 9774805..800f66e 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2d3f7e6..2092c0c 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
@@ -38,12 +39,10 @@ import scala.language.higherKinds
 abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
 extends GenericCompanion[CC] {
 
-  // A default implementation of GenericCanBuildFrom which can be cast
-  // to whatever is desired.
-  private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
+  private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
     override def apply() = newBuilder[Nothing]
   }
-  lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
+  def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance
 
   /** A generic implementation of the `CanBuildFrom` trait, which forwards
    *  all calls to `apply(from)` to the `genericBuilder` method of
@@ -75,7 +74,7 @@ extends GenericCompanion[CC] {
       b.sizeHint(xss.map(_.size).sum)
 
     for (xs <- xss.seq) b ++= xs
-    b.result
+    b.result()
   }
 
   /** Produces a $coll containing the results of some element computation a number of times.
@@ -91,7 +90,7 @@ extends GenericCompanion[CC] {
       b += elem
       i += 1
     }
-    b.result
+    b.result()
   }
 
   /** Produces a two-dimensional $coll containing the results of some element computation a number of times.
@@ -149,7 +148,7 @@ extends GenericCompanion[CC] {
       b += f(i)
       i += 1
     }
-    b.result
+    b.result()
   }
 
   /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
@@ -218,13 +217,13 @@ extends GenericCompanion[CC] {
 
     if (step == zero) throw new IllegalArgumentException("zero step")
     val b = newBuilder[T]
-    b sizeHint immutable.NumericRange.count(start, end, step, false)
+    b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false)
     var i = start
     while (if (step < zero) end < i else i < end) {
       b += i
       i += step
     }
-    b.result
+    b.result()
   }
 
   /** Produces a $coll containing repeated applications of a function to a start value.
@@ -248,7 +247,6 @@ extends GenericCompanion[CC] {
         b += acc
       }
     }
-    b.result
+    b.result()
   }
 }
-
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index 76c12d1..a8ac2bf 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
@@ -23,11 +24,11 @@ abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] {
 
   def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]]
 
-  def empty[A: ClassTag]: CC[A] = newBuilder[A].result
+  def empty[A: ClassTag]: CC[A] = newBuilder[A].result()
 
   def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = {
     val b = newBuilder[A]
     b ++= elems
-    b.result
+    b.result()
   }
 }
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
index f327710..090cd72 100644
--- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index b966ce5..67d0a9c 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
@@ -34,7 +35,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
   /** An empty collection of type `$Coll[A]`
    *  @tparam A      the type of the ${coll}'s elements
    */
-  def empty[A]: CC[A] = newBuilder[A].result
+  def empty[A]: CC[A] = newBuilder[A].result()
 
   /** Creates a $coll with the specified elements.
    *  @tparam A      the type of the ${coll}'s elements
@@ -46,7 +47,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
     else {
       val b = newBuilder[A]
       b ++= elems
-      b.result
+      b.result()
     }
   }
 }
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index 094912c..5b328bf 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
@@ -23,12 +24,12 @@ abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] {
 
   def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]]
 
-  def empty[A: Ordering]: CC[A] = newBuilder[A].result
+  def empty[A: Ordering]: CC[A] = newBuilder[A].result()
 
   def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = {
     val b = newBuilder[A]
     b ++= elems
-    b.result
+    b.result()
   }
 }
 
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index a624e8c..c1a41ce 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index bb39461..432b913 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.parallel.Combiner
 import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 94c7663..b9b7043 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.parallel.Combiner
 import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 8b2f8a0..fd1e18a 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index ecfdcff..2cadd14 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 import scala.language.higherKinds
 /**
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index f7a8a9a..cd48cd2 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
@@ -73,11 +74,20 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
   /** Converts this $coll of pairs into two collections of the first and second
    *  half of each pair.
    *
+   *    {{{
+   *    val xs = $Coll(
+   *               (1, "one"),
+   *               (2, "two"),
+   *               (3, "three")).unzip
+   *    // xs == ($Coll(1, 2, 3),
+   *    //        $Coll(one, two, three))
+   *    }}}
+   *
    *  @tparam A1    the type of the first half of the element pairs
    *  @tparam A2    the type of the second half of the element pairs
    *  @param asPair an implicit conversion which asserts that the element type
    *                of this $coll is a pair.
-   *  @return       a pair ${coll}s, containing the first, respectively second
+   *  @return       a pair of ${coll}s, containing the first, respectively second
    *                half of each element pair of this $coll.
    */
   def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = {
@@ -88,18 +98,28 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
       b1 += x
       b2 += y
     }
-    (b1.result, b2.result)
+    (b1.result(), b2.result())
   }
 
   /** Converts this $coll of triples into three collections of the first, second,
    *  and third element of each triple.
    *
+   *    {{{
+   *    val xs = $Coll(
+   *               (1, "one", '1'),
+   *               (2, "two", '2'),
+   *               (3, "three", '3')).unzip3
+   *    // xs == ($Coll(1, 2, 3),
+   *    //        $Coll(one, two, three),
+   *    //        $Coll(1, 2, 3))
+   *    }}}
+   *
    *  @tparam A1       the type of the first member of the element triples
    *  @tparam A2       the type of the second member of the element triples
    *  @tparam A3       the type of the third member of the element triples
    *  @param asTriple  an implicit conversion which asserts that the element type
    *                   of this $coll is a triple.
-   *  @return          a triple ${coll}s, containing the first, second, respectively
+   *  @return          a triple of ${coll}s, containing the first, second, respectively
    *                   third member of each element triple of this $coll.
    */
   def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = {
@@ -113,7 +133,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
       b2 += y
       b3 += z
     }
-    (b1.result, b2.result, b3.result)
+    (b1.result(), b2.result(), b3.result())
   }
 
   /** Converts this $coll of traversable collections into
@@ -133,10 +153,16 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
    *    static type of $coll. For example:
    *
    *    {{{
-   *    val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+   *    val xs = List(
+   *               Set(1, 2, 3),
+   *               Set(1, 2, 3)
+   *             ).flatten
    *    // xs == List(1, 2, 3, 1, 2, 3)
    *
-   *    val ys = Set(List(1, 2, 3), List(3, 2, 1))
+   *    val ys = Set(
+   *               List(1, 2, 3),
+   *               List(3, 2, 1)
+   *             ).flatten
    *    // ys == Set(1, 2, 3)
    *    }}}
    */
@@ -144,12 +170,33 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
     val b = genericBuilder[B]
     for (xs <- sequential)
       b ++= asTraversable(xs).seq
-    b.result
+    b.result()
   }
 
   /** Transposes this $coll of traversable collections into
    *  a $coll of ${coll}s.
    *
+   *    The resulting collection's type will be guided by the
+   *    static type of $coll. For example:
+   *
+   *    {{{
+   *    val xs = List(
+   *               Set(1, 2, 3),
+   *               Set(4, 5, 6)).transpose
+   *    // xs == List(
+   *    //         List(1, 4),
+   *    //         List(2, 5),
+   *    //         List(3, 6))
+   *
+   *    val ys = Vector(
+   *               List(1, 2, 3),
+   *               List(4, 5, 6)).transpose
+   *    // ys == Vector(
+   *    //         Vector(1, 4),
+   *    //         Vector(2, 5),
+   *    //         Vector(3, 6))
+   *    }}}
+   *
    *  @tparam B the type of the elements of each traversable collection.
    *  @param  asTraversable an implicit conversion which asserts that the
    *          element type of this $coll is a `Traversable`.
@@ -161,7 +208,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
   @migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
   def transpose[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[CC[B] @uncheckedVariance] = {
     if (isEmpty)
-      return genericBuilder[CC[B]].result
+      return genericBuilder[CC[B]].result()
 
     def fail = throw new IllegalArgumentException("transpose requires all collections have the same size")
 
@@ -179,7 +226,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
     }
     val bb = genericBuilder[CC[B]]
     for (b <- bs) bb += b.result
-    bb.result
+    bb.result()
   }
 }
 
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index cb75212..a223c0c 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -6,10 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package generic
 
+import scala.annotation.tailrec
+
 /** This trait forms part of collections that can be augmented
  *  using a `+=` operator and that can be cleared of all elements using
  *  a `clear` method.
@@ -45,7 +47,19 @@ trait Growable[-A] extends Clearable {
    *  @param xs   the TraversableOnce producing the elements to $add.
    *  @return  the $coll itself.
    */
-  def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this }
+  def ++=(xs: TraversableOnce[A]): this.type = {
+    @tailrec def loop(xs: scala.collection.LinearSeq[A]) {
+      if (xs.nonEmpty) {
+        this += xs.head
+        loop(xs.tail)
+      }
+    }
+    xs match {
+      case xs: scala.collection.LinearSeq[_] => loop(xs)
+      case xs                                => xs foreach +=
+    }
+    this
+  }
 
   /** Clears the $coll's contents. After this operation, the
    *  $coll is empty.
diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala
index 1a981b4..aa0ce66 100755
--- a/src/library/scala/collection/generic/HasNewBuilder.scala
+++ b/src/library/scala/collection/generic/HasNewBuilder.scala
@@ -5,7 +5,8 @@
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 1ecfba1..99a0722 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.parallel.Combiner
 
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index 4ce50a3..7d857bf 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index 2e960e6..a72caf2 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.{ Builder, SetBuilder }
@@ -14,6 +15,7 @@ import scala.language.higherKinds
 
 abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
   extends SetFactory[CC] {
-
+  private[collection] def emptyInstance: CC[Any]
+  override def empty[A] = emptyInstance.asInstanceOf[CC[A]]
   def newBuilder[A]: Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty[A])
 }
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 7743fc2..730e58a 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index 9914557..1fd4a8c 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
new file mode 100644
index 0000000..ddc0141
--- /dev/null
+++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala
@@ -0,0 +1,22 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala
+package collection
+package generic
+
+import language.higherKinds
+
+/** A template for companion objects of IndexedSeq and subclasses thereof.
+ *
+ *  @since 2.11
+ */
+abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] {
+  override def ReusableCBF: GenericCanBuildFrom[Nothing] =
+    scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+}
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
new file mode 100644
index 0000000..4c857ad
--- /dev/null
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -0,0 +1,58 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala
+package collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+  * elements of type `A` and has a conversion to `SeqLike[A, Repr]`.
+  *
+  * This type enables simple enrichment of `Seq`s with extension methods which
+  * can make full use of the mechanics of the Scala collections framework in
+  * their implementation.
+  *
+  * Example usage:
+  * {{{
+  *    class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) {
+  *      final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+  *        r.flatMap(f(_))
+  *    }
+  *    implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] =
+  *      new FilterMapImpl(fr.conversion(r))
+  *
+  *    val l = List(1, 2, 3, 4, 5)
+  *    List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+  *    // == List(2, 4)
+  * }}}
+  *
+  * @see [[scala.collection.Seq]]
+  * @see [[scala.collection.generic.IsTraversableLike]]
+  */
+trait IsSeqLike[Repr] {
+  /** The type of elements we can traverse over. */
+  type A
+  /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */
+  val conversion: Repr => SeqLike[A, Repr]
+}
+
+object IsSeqLike {
+  import scala.language.higherKinds
+
+  implicit val stringRepr: IsSeqLike[String] { type A = Char } =
+    new IsSeqLike[String] {
+      type A = Char
+      val conversion = implicitly[String => SeqLike[Char, String]]
+    }
+
+  implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } =
+    new IsSeqLike[C[A0]] {
+      type A = A0
+      val conversion = conv
+    }
+}
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index c70772d..22cef55 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** A trait which can be used to avoid code duplication when defining extension
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
index bb5404c..3ee586a 100644
--- a/src/library/scala/collection/generic/IsTraversableOnce.scala
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** Type class witnessing that a collection representation type `Repr` has
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 90ebcac..7387dbe 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -6,13 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
+package scala
+package collection
+package generic
 
-
-package scala.collection.generic
 import scala.collection._
 
-import scala.collection.mutable.Buffer
-
 /** This trait implements a forwarder for iterable objects. It forwards
  *  all calls to a different iterable object, except for
  *
@@ -27,6 +26,7 @@ import scala.collection.mutable.Buffer
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Forwarding is inherently unreliable since it is not automated and methods can be forgotten.", "2.11.0")
 trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
 
   /** The iterable object to which calls are forwarded */
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 565850b..b9f3d4b 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index ac139cc..14c5b6b 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 9c69d53..6394465 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b9be83c..0339a52 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.collection.mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index a2de108..7657aff 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index bb88d26..4486cea 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.parallel.ParIterable
 import scala.collection.parallel.Combiner
@@ -35,8 +37,3 @@ extends GenTraversableFactory[CC]
     override def apply() = newBuilder[A]
   }
 }
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 0a6b08a..70797c8 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.parallel.ParMap
 import scala.collection.parallel.ParMapLike
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 3727ab8..4320635 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection.mutable.Builder
 import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index a660747..35cce11 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 import scala.language.higherKinds
 
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index e8b15ec..e21e2ea 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 import scala.collection._
 import scala.collection.immutable.Range
 
@@ -23,6 +25,7 @@ import scala.collection.immutable.Range
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0")
 trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
 
   protected override def underlying: Seq[A]
@@ -50,7 +53,7 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
   override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that
   override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
   override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that
-  override def contains(elem: Any): Boolean = underlying contains elem
+  override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem
   override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
   override def indices: Range = underlying.indices
 }
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index e9bbde9..fcd8d00 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.Builder
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index b00048f..dea5bb7 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package generic
 
 /** This trait forms part of collections that can be reduced
@@ -46,9 +46,5 @@ trait Shrinkable[-A] {
    *  @param xs   the iterator producing the elements to remove.
    *  @return the $coll itself
    */
-  def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this }
+  def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this }
 }
-
-
-
-
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 498db7f..021d289 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -6,15 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
-
+package scala
+package collection
+package generic
 
 import java.util.concurrent.atomic.AtomicInteger
 
-
-
-
-
 /**
  * A message interface serves as a unique interface to the
  * part of the collection capable of receiving messages from
@@ -95,7 +92,6 @@ trait Signalling {
   def tag: Int
 }
 
-
 /**
  * This signalling implementation returns default values and ignores received signals.
  */
@@ -108,13 +104,11 @@ class DefaultSignalling extends Signalling with VolatileAbort {
   def tag = -1
 }
 
-
 /**
  * An object that returns default values and ignores received signals.
  */
 object IdleSignalling extends DefaultSignalling
 
-
 /**
  * A mixin trait that implements abort flag behaviour using volatile variables.
  */
@@ -124,7 +118,6 @@ trait VolatileAbort extends Signalling {
   override def abort() = abortflag = true
 }
 
-
 /**
  * A mixin trait that implements index flag behaviour using atomic integers.
  * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater`
@@ -140,7 +133,7 @@ trait AtomicIndexFlag extends Signalling {
       val old = intflag.get
       if (f <= old) loop = false
       else if (intflag.compareAndSet(old, f)) loop = false
-    } while (loop);
+    } while (loop)
   }
   abstract override def setIndexFlagIfLesser(f: Int) = {
     var loop = true
@@ -148,11 +141,10 @@ trait AtomicIndexFlag extends Signalling {
       val old = intflag.get
       if (f >= old) loop = false
       else if (intflag.compareAndSet(old, f)) loop = false
-    } while (loop);
+    } while (loop)
   }
 }
 
-
 /**
  * An implementation of the signalling interface using delegates.
  */
@@ -163,7 +155,7 @@ trait DelegatedSignalling extends Signalling {
   var signalDelegate: Signalling
 
   def isAborted = signalDelegate.isAborted
-  def abort() = signalDelegate.abort
+  def abort() = signalDelegate.abort()
 
   def indexFlag = signalDelegate.indexFlag
   def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f)
@@ -173,25 +165,12 @@ trait DelegatedSignalling extends Signalling {
   def tag = signalDelegate.tag
 }
 
-
 /**
  * Class implementing delegated signalling.
  */
 class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling
 
-
 /**
  * Class implementing delegated signalling, but having its own distinct `tag`.
  */
 class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg)
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
index 1191259..73584ce 100644
--- a/src/library/scala/collection/generic/Sizing.scala
+++ b/src/library/scala/collection/generic/Sizing.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 /** A trait for objects which have a size.
  */
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 244e960..82acdd1 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** A container for the endpoints of a collection slice.
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index f962b26..a0b0e13 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package generic
 
 /** Any collection (including maps) whose keys (or elements) are ordered.
@@ -61,7 +62,8 @@ trait Sorted[K, +This <: Sorted[K, This]] {
   /** Creates a ranged projection of this collection with both a lower-bound
    *  and an upper-bound.
    *
-   *  @param from  The upper-bound (exclusive) of the ranged projection.
+   *  @param from The lower-bound (inclusive) of the ranged projection.
+   *  @param until The upper-bound (exclusive) of the ranged projection.
    */
   def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
 
@@ -71,28 +73,40 @@ trait Sorted[K, +This <: Sorted[K, This]] {
   def to(to: K): This = {
     val i = keySet.from(to).iterator
     if (i.isEmpty) return repr
-    val next = i.next
+    val next = i.next()
     if (compare(next, to) == 0)
       if (i.isEmpty) repr
-      else until(i.next)
+      else until(i.next())
     else
       until(next)
   }
 
+  /**
+   * Creates an iterator over all the keys(or elements)  contained in this
+   * collection greater than or equal to `start`
+   * according to the ordering of this collection. x.keysIteratorFrom(y)
+   * is equivalent to but often more efficient than
+   * x.from(y).keysIterator.
+   *
+   * @param start The lower bound (inclusive)
+   * on the keys to be returned
+   */
+  def keysIteratorFrom(start: K): Iterator[K]
+
   protected def hasAll(j: Iterator[K]): Boolean = {
     val i = keySet.iterator
     if (i.isEmpty) return j.isEmpty
 
-    var in = i.next;
+    var in = i.next()
     while (j.hasNext) {
-      val jn = j.next;
+      val jn = j.next()
       while ({
-        val n = compare(jn, in);
-        if (n == 0) false;
-        else if (n < 0) return false;
-        else if (!i.hasNext) return false;
-        else true;
-      }) in = i.next;
+        val n = compare(jn, in)
+        if (n == 0) false
+        else if (n < 0) return false
+        else if (!i.hasNext) return false
+        else true
+      }) in = i.next()
     }
     true
   }
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 17201b0..afa11e9 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.{Builder, MapBuilder}
@@ -24,7 +25,7 @@ abstract class SortedMapFactory[CC[A, B] <: SortedMap[A, B] with SortedMapLike[A
 
   def empty[A, B](implicit ord: Ordering[A]): CC[A, B]
 
-  def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result
+  def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result()
 
   def newBuilder[A, B](implicit ord: Ordering[A]): Builder[(A, B), CC[A, B]] =
     new MapBuilder[A, B, CC[A, B]](empty(ord))
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 08bca04..c734830 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import mutable.{Builder, SetBuilder}
@@ -23,11 +24,11 @@ abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A
 
   def empty[A](implicit ord: Ordering[A]): CC[A]
 
-  def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result
+  def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result()
 
   def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty)
 
-  implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord);
+  implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord)
 
   class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] {
     def apply(from: Coll) = newBuilder[A](ord)
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index e0fe07a..32a9000 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 5d1c9d1..ad6d8fd 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package generic
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 2662018..1d7974f 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.generic
+package scala
+package collection
+package generic
 
 import scala.collection._
 import mutable.{ Buffer, StringBuilder }
@@ -25,6 +27,7 @@ import scala.reflect.ClassTag
  *  @version 2.8
  *  @since   2.8
  */
+ at deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0")
 trait TraversableForwarder[+A] extends Traversable[A] {
   /** The traversable object to which calls are forwarded. */
   protected def underlying: Traversable[A]
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index dd47b7a..1beb4a8 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
 import generic.CanBuildFrom
 
 import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index ed3630e..70543aa 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -31,9 +32,6 @@ abstract class BitSet extends scala.collection.AbstractSet[Int]
                          with Serializable {
   override def empty = BitSet.empty
 
-  @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0")
-  def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
   protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
 
   /** Update word at index `idx`; enlarge set if `idx` outside range of set.
@@ -74,7 +72,7 @@ object BitSet extends BitSetFactory[BitSet] {
   def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
     private[this] val b = new mutable.BitSet
     def += (x: Int) = { b += x; this }
-    def clear() = b.clear
+    def clear() = b.clear()
     def result() = b.toImmutable
   }
 
@@ -82,10 +80,6 @@ object BitSet extends BitSetFactory[BitSet] {
   implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
 
   /** A bitset containing all the bits in an array */
-  @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0")
-  def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
-  /** A bitset containing all the bits in an array */
   def fromBitMask(elems: Array[Long]): BitSet = {
     val len = elems.length
     if (len == 0) empty
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index 4a0503a..e9b277b 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -6,13 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package immutable
 
-import generic._
-
 /** A default map which implements the `+` and `-`
  *  methods of maps. It does so using the default builder for
  *  maps defined in the `Map` object.
@@ -42,24 +39,14 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
     val b = Map.newBuilder[A, B1]
     b ++= this
     b += ((kv._1, kv._2))
-    b.result
+    b.result()
   }
 
   /** A default implementation which creates a new immutable map.
    */
   override def - (key: A): Map[A, B] = {
     val b = newBuilder
-    for (kv <- this.seq ; if kv._1 != key) b += kv
-    b.result
+    for (kv <- this ; if kv._1 != key) b += kv
+    b.result()
   }
 }
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled
deleted file mode 100644
index d34f7fd..0000000
--- a/src/library/scala/collection/immutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- *  $possiblyparinfo
- *  
- *  $iterableInfo
- */
-trait GenIterable[+A] extends GenTraversable[A] 
-                      with scala.collection.GenIterable[A] 
-                      with scala.collection.GenIterableLike[A, GenIterable[A]]
-//                      with GenericTraversableTemplate[A, GenIterable]
-{
-  def seq: Iterable[A]
-  //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled
deleted file mode 100644
index 73557a4..0000000
--- a/src/library/scala/collection/immutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,36 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-package immutable
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- *  $possiblyparinfo
- *  $mapNote
- *  $mapTags
- *  @since 1.0
- *  @author  Matthias Zenger
- */
-trait GenMap[A, +B] 
-extends GenIterable[(A, B)]
-   with scala.collection.GenMap[A, B] 
-   with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
-  def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-//   def empty[A, B]: Map[A, B] = Map.empty
-  
-//   /** $mapCanBuildFromInfo */
-//   implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
deleted file mode 100644
index 713529f..0000000
--- a/src/library/scala/collection/immutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,49 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- *  that can be mutated.
- *
- *  $possiblyparinfo
- *   
- *  $seqInfo
- * 
- *  The class adds an `update` method to `collection.Seq`.
- *  
- *  @define Coll `mutable.Seq`
- *  @define coll mutable sequence
- */
-trait GenSeq[+A] extends GenIterable[A] 
-                         with scala.collection.GenSeq[A] 
-                         with scala.collection.GenSeqLike[A, GenSeq[A]]
-//                         with GenericTraversableTemplate[A, GenSeq]
-{
-  def seq: Seq[A]
-  //override def companion: GenericCompanion[GenSeq] = GenSeq
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
deleted file mode 100644
index 56bd273..0000000
--- a/src/library/scala/collection/immutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A generic trait for mutable sets.
- *
- *  $possiblyparinfo
- *  $setNote
- *  $setTags
- *
- *  @since 1.0
- *  @author Matthias Zenger
- *  @define Coll `mutable.Set`
- *  @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
-                   with scala.collection.GenSet[A]
-                   with scala.collection.GenSetLike[A, GenSet[A]]
-//                   with GenericSetTemplate[A, GenSet]
-{
-  //override def companion: GenericCompanion[GenSet] = GenSet
-  def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-//   implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A] = Set.newBuilder
-// }
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
deleted file mode 100644
index e5b609f..0000000
--- a/src/library/scala/collection/immutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,41 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A trait for traversable collections that can be mutated.
- *
- *  $possiblyparinfo
- * 
- *  $traversableInfo
- *  @define mutability mutable
- */
-trait GenTraversable[+A] extends scala.collection.GenTraversable[A] 
-                            with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-//                            with GenericTraversableTemplate[A, GenTraversable]
-                            with Mutable
-{
-  def seq: Traversable[A]
-  //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 84416a6..3b3e65e 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -33,12 +33,15 @@ import parallel.immutable.ParHashMap
  *  @define willNotTerminateInf
  */
 @SerialVersionUID(2L)
+ at deprecatedInheritance("The implementation details of immutable hash maps make inheriting from them unwise.", "2.11.0")
 class HashMap[A, +B] extends AbstractMap[A, B]
                         with Map[A, B]
                         with MapLike[A, B, HashMap[A, B]]
                         with Serializable
                         with CustomParallelizable[(A, B), ParHashMap[A, B]]
 {
+  import HashMap.{nullToEmpty, bufferSize}
+
   override def size: Int = 0
 
   override def empty = HashMap.empty[A, B]
@@ -58,11 +61,22 @@ class HashMap[A, +B] extends AbstractMap[A, B]
 
   override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] =
     this + elem1 + elem2 ++ elems
-    // TODO: optimize (might be able to use mutable updates)
 
   def - (key: A): HashMap[A, B] =
     removed0(key, computeHash(key), 0)
 
+  override def filter(p: ((A, B)) => Boolean) = {
+    val buffer = new Array[HashMap[A, B]](bufferSize(size))
+    nullToEmpty(filter0(p, false, 0, buffer, 0))
+  }
+
+  override def filterNot(p: ((A, B)) => Boolean) = {
+    val buffer = new Array[HashMap[A, B]](bufferSize(size))
+    nullToEmpty(filter0(p, true, 0, buffer, 0))
+  }
+
+  protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = null
+
   protected def elemHashCode(key: A) = key.##
 
   protected final def improve(hcode: Int) = {
@@ -87,9 +101,6 @@ class HashMap[A, +B] extends AbstractMap[A, B]
 
   def split: Seq[HashMap[A, B]] = Seq(this)
 
-  @deprecated("Use the `merged` method instead.", "2.10.0")
-  def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
   /** Creates a new map which is the merge of this and the argument hash map.
    *
    *  Uses the specified collision resolution function if two keys are the same.
@@ -170,8 +181,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
     }
   }
 
-  // TODO: add HashMap2, HashMap3, ...
-
   class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
     override def size = 1
 
@@ -182,21 +191,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
     override def get0(key: A, hash: Int, level: Int): Option[B] =
       if (hash == this.hash && key == this.key) Some(value) else None
 
-    // override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
-    //   if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv)
-    //   else {
-    //     var thatindex = (hash >>> level) & 0x1f
-    //     var thisindex = (this.hash >>> level) & 0x1f
-    //     if (hash != this.hash) {
-    //       --new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv))
-    //       val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc
-    //       m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv)  TODO and it will
-    //     } else {
-    //        32-bit hash collision (rare, but not impossible)
-    //       new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
-    //     }
-    //   }
-
     private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
       if (hash == this.hash && key == this.key ) {
         if (merger eq null) {
@@ -220,6 +214,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
     override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
       if (hash == this.hash && key == this.key) HashMap.empty[A,B] else this
 
+    override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] =
+      if (negate ^ p(ensurePair)) this else null
+
     override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
     override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
     // this method may be called multiple times in a multithreaded environment, but that's ok
@@ -250,15 +247,34 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
     override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
       if (hash == this.hash) {
         val kvs1 = kvs - key
-        if (kvs1.isEmpty)
-          HashMap.empty[A,B]
-        else if(kvs1.tail.isEmpty) {
-          val kv = kvs1.head
-          new HashMap1[A,B](kv._1,hash,kv._2,kv)
-        } else
-          new HashMapCollision1(hash, kvs1)
+        kvs1.size match {
+          case 0 =>
+            HashMap.empty[A,B]
+          case 1 =>
+            val kv = kvs1.head
+            new HashMap1(kv._1,hash,kv._2,kv)
+          case x if x == kvs.size =>
+            this
+          case _ =>
+            new HashMapCollision1(hash, kvs1)
+        }
       } else this
 
+    override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = {
+      val kvs1 = if(negate) kvs.filterNot(p) else kvs.filter(p)
+      kvs1.size match {
+        case 0 =>
+          null
+        case 1 =>
+          val kv@(k,v) = kvs1.head
+          new HashMap1(k, hash, v, kv)
+        case x if x == kvs.size =>
+          this
+        case _ =>
+          new HashMapCollision1(hash, kvs1)
+      }
+    }
+
     override def iterator: Iterator[(A,B)] = kvs.iterator
     override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f)
     override def split: Seq[HashMap[A, B]] = {
@@ -283,24 +299,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
     // assert(Integer.bitCount(bitmap) == elems.length)
     // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]]))
 
-/*
-    def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
-      this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
-        val idx1 = (m1.hash >>> level) & 0x1f
-        val idx2 = (m2.hash >>> level) & 0x1f
-        assert(idx1 != idx2, m1.hash + "==" + m2.hash + " at level " + level) // TODO
-        val elems = new Array[HashMap[A,B]](2)
-        if (idx1 < idx2) {
-          elems(0) = m1
-          elems(1) = m2
-        } else {
-          elems(0) = m2
-          elems(1) = m1
-        }
-        elems
-      }, 2)
-    }
-*/
     override def size = size0
 
     override def get0(key: A, hash: Int, level: Int): Option[B] = {
@@ -310,7 +308,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
         elems(index & 0x1f).get0(key, hash, level + 5)
       } else if ((bitmap & mask) != 0) {
         val offset = Integer.bitCount(bitmap & (mask-1))
-        // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
         elems(offset).get0(key, hash, level + 5)
       } else
         None
@@ -322,7 +319,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
       val offset = Integer.bitCount(bitmap & (mask-1))
       if ((bitmap & mask) != 0) {
         val sub = elems(offset)
-        // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
         val subNew = sub.updated0(key, hash, level + 5, value, kv, merger)
         if(subNew eq sub) this else {
           val elemsNew = new Array[HashMap[A,B1]](elems.length)
@@ -345,7 +341,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
       val offset = Integer.bitCount(bitmap & (mask-1))
       if ((bitmap & mask) != 0) {
         val sub = elems(offset)
-        // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
         val subNew = sub.removed0(key, hash, level + 5)
         if (subNew eq sub) this
         else if (subNew.isEmpty) {
@@ -355,6 +350,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
             Array.copy(elems, 0, elemsNew, 0, offset)
             Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
             val sizeNew = size - sub.size
+            // if we have only one child, which is not a HashTrieSet but a self-contained set like
+            // HashSet1 or HashSetCollision1, return the child instead
             if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]])
               elemsNew(0)
             else
@@ -375,30 +372,58 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
       }
     }
 
+    override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = {
+      // current offset
+      var offset = offset0
+      // result size
+      var rs = 0
+      // bitmap for kept elems
+      var kept = 0
+      // loop over all elements
+      var i = 0
+      while (i < elems.length) {
+        val result = elems(i).filter0(p, negate, level + 5, buffer, offset)
+        if (result ne null) {
+          buffer(offset) = result
+          offset += 1
+          // add the result size
+          rs += result.size
+          // mark the bit i as kept
+          kept |= (1 << i)
+        }
+        i += 1
+      }
+      if (offset == offset0) {
+        // empty
+        null
+      } else if (rs == size0) {
+        // unchanged
+        this
+      } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieMap[A, B]]) {
+        // leaf
+        buffer(offset0)
+      } else {
+        // we have to return a HashTrieMap
+        val length = offset - offset0
+        val elems1 = new Array[HashMap[A, B]](length)
+        System.arraycopy(buffer, offset0, elems1, 0, length)
+        val bitmap1 = if (length == elems.length) {
+          // we can reuse the original bitmap
+          bitmap
+        } else {
+          // calculate new bitmap by keeping just bits in the kept bitmask
+          keepBits(bitmap, kept)
+        }
+        new HashTrieMap(bitmap1, elems1, rs)
+      }
+    }
+
     override def iterator: Iterator[(A, B)] = new TrieIterator[(A, B)](elems.asInstanceOf[Array[Iterable[(A, B)]]]) {
       final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair
     }
 
-/*
-def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
-var mOld = OldHashMap.empty[Int,Int]
-var mNew = HashMap.empty[Int,Int]
-time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
-time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
-time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
-time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
-time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
-time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
-time { mOld.iterator.foreach( p => ()) }
-time { mOld.iterator.foreach( p => ()) }
-time { mOld.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-*/
-
     override def foreach[U](f: ((A, B)) =>  U): Unit = {
-      var i = 0;
+      var i = 0
       while (i < elems.length) {
         elems(i).foreach(f)
         i += 1
@@ -471,9 +496,6 @@ time { mNew.iterator.foreach( p => ()) }
             // condition below is due to 2 things:
             // 1) no unsigned int compare on JVM
             // 2) 0 (no lsb) should always be greater in comparison
-            val a = thislsb - 1
-            val b = thatlsb - 1
-
             if (unsignedCompare(thislsb - 1, thatlsb - 1)) {
               val m = thiselems(thisi)
               totalelems += m.size
@@ -499,6 +521,50 @@ time { mNew.iterator.foreach( p => ()) }
     }
   }
 
+  /**
+   * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection
+   * @param size the maximum size of the collection to be generated
+   * @return the maximum buffer size
+   */
+  @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7)
+
+  /**
+   * In many internal operations the empty map is represented as null for performance reasons. This method converts
+   * null to the empty map for use in public methods
+   */
+  @inline private def nullToEmpty[A, B](m: HashMap[A, B]): HashMap[A, B] = if (m eq null) empty[A, B] else m
+
+  /**
+   * Utility method to keep a subset of all bits in a given bitmap
+   *
+   * Example
+   *    bitmap (binary): 00000001000000010000000100000001
+   *    keep (binary):                               1010
+   *    result (binary): 00000001000000000000000100000000
+   *
+   * @param bitmap the bitmap
+   * @param keep a bitmask containing which bits to keep
+   * @return the original bitmap with all bits where keep is not 1 set to 0
+   */
+  private def keepBits(bitmap: Int, keep: Int): Int = {
+    var result = 0
+    var current = bitmap
+    var kept = keep
+    while (kept != 0) {
+      // lowest remaining bit in current
+      val lsb = current ^ (current & (current - 1))
+      if ((kept & 1) != 0) {
+        // mark bit in result bitmap
+        result |= lsb
+      }
+      // clear lowest remaining one bit in abm
+      current &= ~lsb
+      // look at the next kept bit
+      kept >>>= 1
+    }
+    result
+  }
+
   @SerialVersionUID(2L)
   private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) extends Serializable {
     private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 87995f7..726937e 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -12,9 +12,10 @@ package scala
 package collection
 package immutable
 
-import scala.annotation.unchecked.{ uncheckedVariance => uV }
 import generic._
 import scala.collection.parallel.immutable.ParHashSet
+import scala.collection.GenSet
+import scala.annotation.tailrec
 
 /** This class implements immutable sets using a hash trie.
  *
@@ -30,6 +31,7 @@ import scala.collection.parallel.immutable.ParHashSet
  *  @define coll immutable hash set
  */
 @SerialVersionUID(2L)
+ at deprecatedInheritance("The implementation details of immutable hash sets make inheriting from them unwise.", "2.11.0")
 class HashSet[A] extends AbstractSet[A]
                     with Set[A]
                     with GenericSetTemplate[A, HashSet]
@@ -37,6 +39,8 @@ class HashSet[A] extends AbstractSet[A]
                     with CustomParallelizable[A, ParHashSet[A]]
                     with Serializable
 {
+  import HashSet.{nullToEmpty, bufferSize, LeafHashSet}
+
   override def companion: GenericCompanion[HashSet] = HashSet
 
   //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] {
@@ -53,14 +57,122 @@ class HashSet[A] extends AbstractSet[A]
 
   def contains(e: A): Boolean = get0(e, computeHash(e), 0)
 
+  override def subsetOf(that: GenSet[A]) = that match {
+    case that:HashSet[A] =>
+      // call the specialized implementation with a level of 0 since both this and that are top-level hash sets
+      subsetOf0(that, 0)
+    case _ =>
+      // call the generic implementation
+      super.subsetOf(that)
+  }
+
+  /**
+   * A specialized implementation of subsetOf for when both this and that are HashSet[A] and we can take advantage
+   * of the tree structure of both operands and the precalculated hashcodes of the HashSet1 instances.
+   * @param that the other set
+   * @param level the level of this and that hashset
+   *              The purpose of level is to keep track of how deep we are in the tree.
+   *              We need this information for when we arrive at a leaf and have to call get0 on that
+   *              The value of level is 0 for a top-level HashSet and grows in increments of 5
+   * @return true if all elements of this set are contained in that set
+   */
+  protected def subsetOf0(that: HashSet[A], level: Int) = {
+    // The default implementation is for the empty set and returns true because the empty set is a subset of all sets
+    true
+  }
+
   override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0)
 
   override def + (elem1: A, elem2: A, elems: A*): HashSet[A] =
     this + elem1 + elem2 ++ elems
-    // TODO: optimize (might be able to use mutable updates)
+
+  override def union(that: GenSet[A]): HashSet[A] = that match {
+    case that: HashSet[A] =>
+      val buffer = new Array[HashSet[A]](bufferSize(this.size + that.size))
+      nullToEmpty(union0(that, 0, buffer, 0))
+    case _ => super.union(that)
+  }
+
+  override def intersect(that: GenSet[A]): HashSet[A] = that match {
+    case that: HashSet[A] =>
+      val buffer = new Array[HashSet[A]](bufferSize(this.size min that.size))
+      nullToEmpty(intersect0(that, 0, buffer, 0))
+    case _ => super.intersect(that)
+  }
+
+  override def diff(that: GenSet[A]): HashSet[A] = that match {
+    case that: HashSet[A] =>
+      val buffer = new Array[HashSet[A]](bufferSize(this.size))
+      nullToEmpty(diff0(that, 0, buffer, 0))
+    case _ => super.diff(that)
+  }
+
+  /**
+   * Union with a leaf HashSet at a given level.
+   * @param that a leaf HashSet
+   * @param level the depth in the tree. We need this when we have to create a branch node on top of this and that
+   * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained
+   *         HashSet but needs to be stored at the correct depth
+   */
+  private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = {
+    // the default implementation is for the empty set, so we just return that
+    that
+  }
+
+  /**
+   * Union with a HashSet at a given level
+   * @param that a HashSet
+   * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree
+   * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes
+   * @param offset0 the first offset into the buffer in which we are allowed to write
+   * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained
+   *         HashSet but needs to be stored at the correct depth
+   */
+  private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+    // the default implementation is for the empty set, so we just return that
+    that
+  }
+
+  /**
+   * Intersection with another hash set at a given level
+   * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree
+   * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes
+   * @param offset0 the first offset into the buffer in which we are allowed to write
+   * @return The intersection of this and that at the given level. Unless level is zero, the result is not a
+   *         self-contained HashSet but needs to be stored at the correct depth
+   */
+  private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+    // the default implementation is for the empty set, so we just return the empty set
+    null
+  }
+
+  /**
+   * Diff with another hash set at a given level
+   * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree
+   * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes
+   * @param offset0 the first offset into the buffer in which we are allowed to write
+   * @return The diff of this and that at the given level. Unless level is zero, the result is not a
+   *         self-contained HashSet but needs to be stored at the correct depth
+   */
+  private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+    // the default implementation is for the empty set, so we just return the empty set
+    null
+  }
 
   def - (e: A): HashSet[A] =
-    removed0(e, computeHash(e), 0)
+    nullToEmpty(removed0(e, computeHash(e), 0))
+
+  override def filter(p: A => Boolean) = {
+    val buffer = new Array[HashSet[A]](bufferSize(size))
+    nullToEmpty(filter0(p, false, 0, buffer, 0))
+  }
+
+  override def filterNot(p: A => Boolean) = {
+    val buffer = new Array[HashSet[A]](bufferSize(size))
+    nullToEmpty(filter0(p, true, 0, buffer, 0))
+  }
+
+  protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = null
 
   protected def elemHashCode(key: A) = key.##
 
@@ -99,10 +211,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
 
   /** $setCanBuildFromInfo */
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
-  override def empty[A]: HashSet[A] = EmptyHashSet.asInstanceOf[HashSet[A]]
 
   private object EmptyHashSet extends HashSet[Any] { }
-
+  private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet
+  
   // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
   private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = {
     val index0 = (hash0 >>> level) & 0x1f
@@ -127,14 +239,27 @@ object HashSet extends ImmutableSetFactory[HashSet] {
     }
   }
 
-  // TODO: add HashSet2, HashSet3, ...
+  /**
+   * Common superclass of HashSet1 and HashSetCollision1, which are the two possible leaves of the Trie
+   */
+  private[HashSet] sealed abstract class LeafHashSet[A] extends HashSet[A] {
+    private[HashSet] def hash:Int
+  }
 
-  class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] {
+  class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] {
     override def size = 1
 
     override def get0(key: A, hash: Int, level: Int): Boolean =
       (hash == this.hash && key == this.key)
 
+    override def subsetOf0(that: HashSet[A], level: Int) = {
+      // check if that contains this.key
+      // we use get0 with our key and hash at the correct level instead of calling contains,
+      // which would not work since that might not be a top-level HashSet
+      // and in any case would be inefficient because it would require recalculating the hash code
+      that.get0(key, hash, level)
+    }
+
     override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
       if (hash == this.hash && key == this.key) this
       else {
@@ -146,35 +271,191 @@ object HashSet extends ImmutableSetFactory[HashSet] {
         }
       }
 
+    override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+      case that if that.hash != this.hash =>
+        // different hash code, so there is no need to investigate further.
+        // Just create a branch node containing the two.
+        makeHashTrieSet(this.hash, this, that.hash, that, level)
+      case that: HashSet1[A] =>
+        if (this.key == that.key) {
+          this
+        } else {
+          // 32-bit hash collision (rare, but not impossible)
+          new HashSetCollision1[A](hash, ListSet.empty + this.key + that.key)
+        }
+      case that: HashSetCollision1[A] =>
+        val ks1 = that.ks + key
+        // Could use eq check (faster) if ListSet was guaranteed to return itself
+        if (ks1.size == that.ks.size) {
+          that
+        } else {
+          new HashSetCollision1[A](hash, ks1)
+        }
+    }
+
+    override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int) = {
+      // switch to the Leaf version of union
+      // we can exchange the arguments because union is symmetrical
+      that.union0(this, level)
+    }
+
+    override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
+      if (that.get0(key, hash, level)) this else null
+
+    override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
+      if (that.get0(key, hash, level)) null else this
+
     override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
-      if (hash == this.hash && key == this.key) HashSet.empty[A] else this
+      if (hash == this.hash && key == this.key) null else this
+
+    override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
+      if (negate ^ p(key)) this else null
 
     override def iterator: Iterator[A] = Iterator(key)
     override def foreach[U](f: A => U): Unit = f(key)
   }
 
-  private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A])
-            extends HashSet[A] {
+  private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A]) extends LeafHashSet[A] {
 
     override def size = ks.size
 
     override def get0(key: A, hash: Int, level: Int): Boolean =
       if (hash == this.hash) ks.contains(key) else false
 
+    override def subsetOf0(that: HashSet[A], level: Int) = {
+      // we have to check each element
+      // we use get0 with our hash at the correct level instead of calling contains,
+      // which would not work since that might not be a top-level HashSet
+      // and in any case would be inefficient because it would require recalculating the hash code
+      ks.forall(key => that.get0(key, hash, level))
+    }
+
     override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
       if (hash == this.hash) new HashSetCollision1(hash, ks + key)
       else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
 
+    override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+      case that if that.hash != this.hash =>
+        // different hash code, so there is no need to investigate further.
+        // Just create a branch node containing the two.
+        makeHashTrieSet(this.hash, this, that.hash, that, level)
+      case that: HashSet1[A] =>
+        val ks1 = ks + that.key
+        // Could use eq check (faster) if ListSet was guaranteed to return itself
+        if (ks1.size == ks.size) {
+          this
+        } else {
+          // create a new HashSetCollision with the existing hash
+          // we don't have to check for size=1 because union is never going to remove elements
+          new HashSetCollision1[A](hash, ks1)
+        }
+      case that: HashSetCollision1[A] =>
+        val ks1 = this.ks ++ that.ks
+        ks1.size match {
+          case size if size == this.ks.size =>
+            // could this check be made faster by doing an eq check?
+            // I am not sure we can rely on ListSet returning itself when all elements are already in the set,
+            // so it seems unwise to rely on it.
+            this
+          case size if size == that.ks.size =>
+            // we have to check this as well, since we don't want to create a new instance if this is a subset of that
+            that
+          case _ =>
+            // create a new HashSetCollision with the existing hash
+            // we don't have to check for size=1 because union is never going to remove elements
+            new HashSetCollision1[A](hash, ks1)
+        }
+    }
+
+    override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+      case that: LeafHashSet[A] =>
+        // switch to the simpler Tree/Leaf implementation
+        this.union0(that, level)
+      case that: HashTrieSet[A] =>
+        // switch to the simpler Tree/Leaf implementation
+        // we can swap this and that because union is symmetrical
+        that.union0(this, level)
+      case _ => this
+    }
+
+    override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+      // filter the keys, taking advantage of the fact that we know their hash code
+      val ks1 = ks.filter(that.get0(_, hash, level))
+      ks1.size match {
+        case 0 =>
+          // the empty set
+          null
+        case size if size == this.size =>
+          // unchanged
+          // We do this check first since even if the result is of size 1 since
+          // it is preferable to return the existing set for better structural sharing
+          this
+        case size if size == that.size =>
+          // the other set
+          // We do this check first since even if the result is of size 1 since
+          // it is preferable to return the existing set for better structural sharing
+          that
+        case 1 =>
+          // create a new HashSet1 with the hash we already know
+          new HashSet1(ks1.head, hash)
+        case _ =>
+          // create a new HashSetCollison with the hash we already know and the new keys
+          new HashSetCollision1(hash, ks1)
+      }
+    }
+
+    override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+      val ks1 = ks.filterNot(that.get0(_, hash, level))
+      ks1.size match {
+        case 0 =>
+          // the empty set
+          null
+        case size if size == this.size =>
+          // unchanged
+          // We do this check first since even if the result is of size 1 since
+          // it is preferable to return the existing set for better structural sharing
+          this
+        case 1 =>
+          // create a new HashSet1 with the hash we already know
+          new HashSet1(ks1.head, hash)
+        case _ =>
+          // create a new HashSetCollison with the hash we already know and the new keys
+          new HashSetCollision1(hash, ks1)
+      }
+    }
+
     override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
       if (hash == this.hash) {
         val ks1 = ks - key
-        if(ks1.isEmpty)
-          HashSet.empty[A]
-        else if(ks1.tail.isEmpty)
+        ks1.size match {
+          case 0 =>
+            // the empty set
+            null
+          case 1 =>
+            // create a new HashSet1 with the hash we already know
+            new HashSet1(ks1.head, hash)
+          case size if size == ks.size =>
+            // Should only have HSC1 if size > 1
+            this
+          case _ =>
+            // create a new HashSetCollison with the hash we already know and the new keys
+            new HashSetCollision1(hash, ks1)
+        }
+      } else this
+
+    override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+      val ks1 = if(negate) ks.filterNot(p) else ks.filter(p)
+      ks1.size match {
+        case 0 =>
+          null
+        case 1 =>
           new HashSet1(ks1.head, hash)
-        else
+        case x if x == ks.size =>
+          this
+        case _ =>
           new HashSetCollision1(hash, ks1)
-      } else this
+      }
+    }
 
     override def iterator: Iterator[A] = ks.iterator
     override def foreach[U](f: A => U): Unit = ks.foreach(f)
@@ -196,6 +477,42 @@ object HashSet extends ImmutableSetFactory[HashSet] {
 
   }
 
+  /**
+   * A branch node of the HashTrieSet with at least one and up to 32 children.
+   *
+   * @param bitmap encodes which element corresponds to which child
+   * @param elems the up to 32 children of this node.
+   *              the number of children must be identical to the number of 1 bits in bitmap
+   * @param size0 the total number of elements. This is stored just for performance reasons.
+   * @tparam A      the type of the elements contained in this hash set.
+   *
+   * How levels work:
+   *
+   * When looking up or adding elements, the part of the hashcode that is used to address the children array depends
+   * on how deep we are in the tree. This is accomplished by having a level parameter in all internal methods
+   * that starts at 0 and increases by 5 (32 = 2^5) every time we go deeper into the tree.
+   *
+   * hashcode (binary): 00000000000000000000000000000000
+   * level=0 (depth=0)                             ^^^^^
+   * level=5 (depth=1)                        ^^^^^
+   * level=10 (depth=2)                  ^^^^^
+   * ...
+   *
+   * Be careful: a non-toplevel HashTrieSet is not a self-contained set, so e.g. calling contains on it will not work!
+   * It relies on its depth in the Trie for which part of a hash to use to address the children, but this information
+   * (the level) is not stored due to storage efficiency reasons but has to be passed explicitly!
+   *
+   * How bitmap and elems correspond:
+   *
+   * A naive implementation of a HashTrieSet would always have an array of size 32 for children and leave the unused
+   * children empty (null). But that would be very wasteful regarding memory. Instead, only non-empty children are
+   * stored in elems, and the bitmap is used to encode which elem corresponds to which child bucket. The lowest 1 bit
+   * corresponds to the first element, the second-lowest to the second, etc.
+   *
+   * bitmap (binary): 00010000000000000000100000000000
+   * elems: [a,b]
+   * children:        ---b----------------a-----------
+   */
   class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int)
         extends HashSet[A] {
     assert(Integer.bitCount(bitmap) == elems.length)
@@ -211,7 +528,6 @@ object HashSet extends ImmutableSetFactory[HashSet] {
         elems(index & 0x1f).get0(key, hash, level + 5)
       } else if ((bitmap & mask) != 0) {
         val offset = Integer.bitCount(bitmap & (mask-1))
-        // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
         elems(offset).get0(key, hash, level + 5)
       } else
         false
@@ -222,7 +538,6 @@ object HashSet extends ImmutableSetFactory[HashSet] {
       val mask = (1 << index)
       val offset = Integer.bitCount(bitmap & (mask-1))
       if ((bitmap & mask) != 0) {
-        // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
         val sub = elems(offset)
         val subNew = sub.updated0(key, hash, level + 5)
         if (sub eq subNew) this
@@ -242,16 +557,293 @@ object HashSet extends ImmutableSetFactory[HashSet] {
       }
     }
 
+    override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = {
+      val index = (that.hash >>> level) & 0x1f
+      val mask = (1 << index)
+      val offset = Integer.bitCount(bitmap & (mask - 1))
+      if ((bitmap & mask) != 0) {
+        val sub = elems(offset)
+        val sub1 = sub.union0(that, level + 5)
+        if (sub eq sub1) this
+        else {
+          val elems1 = new Array[HashSet[A]](elems.length)
+          Array.copy(elems, 0, elems1, 0, elems.length)
+          elems1(offset) = sub1
+          new HashTrieSet(bitmap, elems1, size + (sub1.size - sub.size))
+        }
+      } else {
+        val elems1 = new Array[HashSet[A]](elems.length + 1)
+        Array.copy(elems, 0, elems1, 0, offset)
+        elems1(offset) = that
+        Array.copy(elems, offset, elems1, offset + 1, elems.length - offset)
+        val bitmap1 = bitmap | mask
+        new HashTrieSet(bitmap1, elems1, size + that.size)
+      }
+    }
+
+    override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+      case that if that eq this =>
+        // shortcut for when that is this
+        // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage"
+        // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B
+        // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking
+        // at these nodes.
+        this
+      case that: LeafHashSet[A] =>
+        // when that is a leaf, we can switch to the simpler Tree/Leaf implementation
+        this.union0(that, level)
+      case that: HashTrieSet[A] =>
+        val a = this.elems
+        var abm = this.bitmap
+        var ai = 0
+
+        val b = that.elems
+        var bbm = that.bitmap
+        var bi = 0
+
+        // fetch a new temporary array that is guaranteed to be big enough (32 elements)
+        var offset = offset0
+        var rs = 0
+
+        // loop as long as there are bits left in either abm or bbm
+        while ((abm | bbm) != 0) {
+          // lowest remaining bit in abm
+          val alsb = abm ^ (abm & (abm - 1))
+          // lowest remaining bit in bbm
+          val blsb = bbm ^ (bbm & (bbm - 1))
+          if (alsb == blsb) {
+            val sub1 = a(ai).union0(b(bi), level + 5, buffer, offset)
+            rs += sub1.size
+            buffer(offset) = sub1
+            offset += 1
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb
+            ai += 1
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb
+            bi += 1
+          } else if (unsignedCompare(alsb - 1, blsb - 1)) {
+            // alsb is smaller than blsb, or alsb is set and blsb is 0
+            // in any case, alsb is guaranteed to be set here!
+            val sub1 = a(ai)
+            rs += sub1.size
+            buffer(offset) = sub1
+            offset += 1
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb
+            ai += 1
+          } else {
+            // blsb is smaller than alsb, or blsb is set and alsb is 0
+            // in any case, blsb is guaranteed to be set here!
+            val sub1 = b(bi)
+            rs += sub1.size
+            buffer(offset) = sub1
+            offset += 1
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb
+            bi += 1
+          }
+        }
+        if (rs == this.size) {
+          // if the result would be identical to this, we might as well return this
+          this
+        } else if (rs == that.size) {
+          // if the result would be identical to that, we might as well return that
+          that
+        } else {
+          // we don't have to check whether the result is a leaf, since union will only make the set larger
+          // and this is not a leaf to begin with.
+          val length = offset - offset0
+          val elems = new Array[HashSet[A]](length)
+          System.arraycopy(buffer, offset0, elems, 0, length)
+          new HashTrieSet(this.bitmap | that.bitmap, elems, rs)
+        }
+      case _ => this
+    }
+
+    override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+      case that if that eq this =>
+        // shortcut for when that is this
+        // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage"
+        // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B
+        // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking
+        // at these nodes!
+        this
+      case that: LeafHashSet[A] =>
+        // when that is a leaf, we can switch to the simpler Tree/Leaf implementation
+        // it is OK to swap the arguments because intersect is symmetric
+        // (we can't do this in case of diff, which is not symmetric)
+        that.intersect0(this, level, buffer, offset0)
+      case that: HashTrieSet[A] =>
+        val a = this.elems
+        var abm = this.bitmap
+        var ai = 0
+
+        val b = that.elems
+        var bbm = that.bitmap
+        var bi = 0
+
+        // if the bitmasks do not overlap, the result is definitely empty so we can abort here
+        if ((abm & bbm) == 0)
+          return null
+
+        // fetch a new temporary array that is guaranteed to be big enough (32 elements)
+        var offset = offset0
+        var rs = 0
+        var rbm = 0
+
+        // loop as long as there are bits left that are set in both abm and bbm
+        while ((abm & bbm) != 0) {
+          // highest remaining bit in abm
+          val alsb = abm ^ (abm & (abm - 1))
+          // highest remaining bit in bbm
+          val blsb = bbm ^ (bbm & (bbm - 1))
+          if (alsb == blsb) {
+            val sub1 = a(ai).intersect0(b(bi), level + 5, buffer, offset)
+            if (sub1 ne null) {
+              rs += sub1.size
+              rbm |= alsb
+              buffer(offset) = sub1
+              offset += 1
+            }
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb;
+            ai += 1
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb;
+            bi += 1
+          } else if (unsignedCompare(alsb - 1, blsb - 1)) {
+            // alsb is smaller than blsb, or alsb is set and blsb is 0
+            // in any case, alsb is guaranteed to be set here!
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb;
+            ai += 1
+          } else {
+            // blsb is smaller than alsb, or blsb is set and alsb is 0
+            // in any case, blsb is guaranteed to be set here!
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb;
+            bi += 1
+          }
+        }
+
+        if (rbm == 0) {
+          // if the result bitmap is empty, the result is the empty set
+          null
+        } else if (rs == size0) {
+          // if the result has the same number of elements as this, it must be identical to this,
+          // so we might as well return this
+          this
+        } else if (rs == that.size0) {
+          // if the result has the same number of elements as that, it must be identical to that,
+          // so we might as well return that
+          that
+        } else {
+          val length = offset - offset0
+          if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]])
+            buffer(offset0)
+          else {
+            val elems = new Array[HashSet[A]](length)
+            System.arraycopy(buffer, offset0, elems, 0, length)
+            new HashTrieSet[A](rbm, elems, rs)
+          }
+        }
+      case _ => null
+    }
+
+    override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+      case that if that eq this =>
+        // shortcut for when that is this
+        // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage"
+        // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B
+        // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking
+        // at these nodes!
+        null
+      case that: HashSet1[A] =>
+        removed0(that.key, that.hash, level)
+      case that: HashTrieSet[A] =>
+        val a = this.elems
+        var abm = this.bitmap
+        var ai = 0
+
+        val b = that.elems
+        var bbm = that.bitmap
+        var bi = 0
+
+        // fetch a new temporary array that is guaranteed to be big enough (32 elements)
+        var offset = offset0
+        var rs = 0
+        var rbm = 0
+
+        // loop until there are no more bits in abm
+        while(abm!=0) {
+          // highest remaining bit in abm
+          val alsb = abm ^ (abm & (abm - 1))
+          // highest remaining bit in bbm
+          val blsb = bbm ^ (bbm & (bbm - 1))
+          if (alsb == blsb) {
+            val sub1 = a(ai).diff0(b(bi), level + 5, buffer, offset)
+            if (sub1 ne null) {
+              rs += sub1.size
+              rbm |= alsb
+              buffer(offset) = sub1
+              offset += 1
+            }
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb; ai += 1
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb; bi += 1
+          } else if (unsignedCompare(alsb - 1, blsb - 1)) {
+            // alsb is smaller than blsb, or alsb is set and blsb is 0
+            // in any case, alsb is guaranteed to be set here!
+            val sub1 = a(ai)
+            rs += sub1.size
+            rbm |= alsb
+            buffer(offset) = sub1; offset += 1
+            // clear lowest remaining one bit in abm and increase the a index
+            abm &= ~alsb; ai += 1
+          } else {
+            // blsb is smaller than alsb, or blsb is set and alsb is 0
+            // in any case, blsb is guaranteed to be set here!
+            // clear lowest remaining one bit in bbm and increase the b index
+            bbm &= ~blsb; bi += 1
+          }
+        }
+        if (rbm == 0) {
+          null
+        } else if (rs == this.size0) {
+          // if the result has the same number of elements as this, it must be identical to this,
+          // so we might as well return this
+          this
+        } else {
+          val length = offset - offset0
+          if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]])
+            buffer(offset0)
+          else {
+            val elems = new Array[HashSet[A]](length)
+            System.arraycopy(buffer, offset0, elems, 0, length)
+            new HashTrieSet[A](rbm, elems, rs)
+          }
+        }
+      case that: HashSetCollision1[A] =>
+        // we remove the elements using removed0 so we can use the fact that we know the hash of all elements
+        // to be removed
+        @tailrec def removeAll(s:HashSet[A], r:ListSet[A]) : HashSet[A] =
+          if(r.isEmpty || (s eq null)) s
+          else removeAll(s.removed0(r.head, that.hash, level), r.tail)
+        removeAll(this, that.ks)
+      case _ => this
+    }
+
     override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
       val index = (hash >>> level) & 0x1f
       val mask = (1 << index)
       val offset = Integer.bitCount(bitmap & (mask-1))
       if ((bitmap & mask) != 0) {
         val sub = elems(offset)
-        // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
         val subNew = sub.removed0(key, hash, level + 5)
         if (sub eq subNew) this
-        else if (subNew.isEmpty) {
+        else if (subNew eq null) {
           val bitmapNew = bitmap ^ mask
           if (bitmapNew != 0) {
             val elemsNew = new Array[HashSet[A]](elems.length - 1)
@@ -265,7 +857,9 @@ object HashSet extends ImmutableSetFactory[HashSet] {
             else
               new HashTrieSet(bitmapNew, elemsNew, sizeNew)
           } else
-            HashSet.empty[A]
+            null
+        } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[_]]) {
+          subNew
         } else {
           val elemsNew = new Array[HashSet[A]](elems.length)
           Array.copy(elems, 0, elemsNew, 0, elems.length)
@@ -278,30 +872,101 @@ object HashSet extends ImmutableSetFactory[HashSet] {
       }
     }
 
+    override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
+      case that: HashTrieSet[A] if this.size0 <= that.size0 =>
+        // create local mutable copies of members
+        var abm = this.bitmap
+        val a = this.elems
+        var ai = 0
+        val b = that.elems
+        var bbm = that.bitmap
+        var bi = 0
+        if ((abm & bbm) == abm) {
+          // I tried rewriting this using tail recursion, but the generated java byte code was less than optimal
+          while(abm!=0) {
+            // highest remaining bit in abm
+            val alsb = abm ^ (abm & (abm - 1))
+            // highest remaining bit in bbm
+            val blsb = bbm ^ (bbm & (bbm - 1))
+            // if both trees have a bit set at the same position, we need to check the subtrees
+            if (alsb == blsb) {
+              // we are doing a comparison of a child of this with a child of that,
+              // so we have to increase the level by 5 to keep track of how deep we are in the tree
+              if (!a(ai).subsetOf0(b(bi), level + 5))
+                return false
+              // clear lowest remaining one bit in abm and increase the a index
+              abm &= ~alsb; ai += 1
+            }
+            // clear lowermost remaining one bit in bbm and increase the b index
+            // we must do this in any case
+            bbm &= ~blsb; bi += 1
+          }
+          true
+        } else {
+          // the bitmap of this contains more one bits than the bitmap of that,
+          // so this can not possibly be a subset of that
+          false
+        }
+      case _ =>
+        // if the other set is a HashTrieSet but has less elements than this, it can not be a subset
+        // if the other set is a HashSet1, we can not be a subset of it because we are a HashTrieSet with at least two children (see assertion)
+        // if the other set is a HashSetCollision1, we can not be a subset of it because we are a HashTrieSet with at least two different hash codes
+        // if the other set is the empty set, we are not a subset of it because we are not empty
+        false
+    }
+
+    override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = {
+      // current offset
+      var offset = offset0
+      // result size
+      var rs = 0
+      // bitmap for kept elems
+      var kept = 0
+      // loop over all elements
+      var i = 0
+      while (i < elems.length) {
+        val result = elems(i).filter0(p, negate, level + 5, buffer, offset)
+        if (result ne null) {
+          buffer(offset) = result
+          offset += 1
+          // add the result size
+          rs += result.size
+          // mark the bit i as kept
+          kept |= (1 << i)
+        }
+        i += 1
+      }
+      if (offset == offset0) {
+        // empty
+        null
+      } else if (rs == size0) {
+        // unchanged
+        this
+      } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) {
+        // leaf
+        buffer(offset0)
+      } else {
+        // we have to return a HashTrieSet
+        val length = offset - offset0
+        val elems1 = new Array[HashSet[A]](length)
+        System.arraycopy(buffer, offset0, elems1, 0, length)
+        val bitmap1 = if (length == elems.length) {
+          // we can reuse the original bitmap
+          bitmap
+        } else {
+          // calculate new bitmap by keeping just bits in the kept bitmask
+          keepBits(bitmap, kept)
+        }
+        new HashTrieSet(bitmap1, elems1, rs)
+      }
+    }
+
     override def iterator = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) {
       final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key
     }
-/*
-
-def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
-var mOld = OldHashSet.empty[Int]
-var mNew = HashSet.empty[Int]
-time { for (i <- 0 until 100000) mOld = mOld + i }
-time { for (i <- 0 until 100000) mOld = mOld + i }
-time { for (i <- 0 until 100000) mOld = mOld + i }
-time { for (i <- 0 until 100000) mNew = mNew + i }
-time { for (i <- 0 until 100000) mNew = mNew + i }
-time { for (i <- 0 until 100000) mNew = mNew + i }
-time { mOld.iterator.foreach( p => ()) }
-time { mOld.iterator.foreach( p => ()) }
-time { mOld.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-time { mNew.iterator.foreach( p => ()) }
-
-*/
+
     override def foreach[U](f: A =>  U): Unit = {
-      var i = 0;
+      var i = 0
       while (i < elems.length) {
         elems(i).foreach(f)
         i += 1
@@ -309,6 +974,54 @@ time { mNew.iterator.foreach( p => ()) }
     }
   }
 
+  /**
+   * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection
+   * @param size the maximum size of the collection to be generated
+   * @return the maximum buffer size
+   */
+  @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7)
+
+  /**
+   * In many internal operations the empty set is represented as null for performance reasons. This method converts
+   * null to the empty set for use in public methods
+   */
+  @inline private def nullToEmpty[A](s: HashSet[A]): HashSet[A] = if (s eq null) empty[A] else s
+
+  /**
+   * Utility method to keep a subset of all bits in a given bitmap
+   *
+   * Example
+   *    bitmap (binary): 00000001000000010000000100000001
+   *    keep (binary):                               1010
+   *    result (binary): 00000001000000000000000100000000
+   *
+   * @param bitmap the bitmap
+   * @param keep a bitmask containing which bits to keep
+   * @return the original bitmap with all bits where keep is not 1 set to 0
+   */
+  private def keepBits(bitmap: Int, keep: Int): Int = {
+    var result = 0
+    var current = bitmap
+    var kept = keep
+    while (kept != 0) {
+      // lowest remaining bit in current
+      val lsb = current ^ (current & (current - 1))
+      if ((kept & 1) != 0) {
+        // mark bit in result bitmap
+        result |= lsb
+      }
+      // clear lowest remaining one bit in abm
+      current &= ~lsb
+      // look at the next kept bit
+      kept >>>= 1
+    }
+    result
+  }
+
+  // unsigned comparison
+  @inline private[this] def unsignedCompare(i: Int, j: Int) =
+    (i < j) ^ (i < 0) ^ (j < 0)
+
   @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashSet[A]) extends Serializable {
     private def writeObject(out: java.io.ObjectOutputStream) {
       val s = orig.size
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 96414c0..06a44b2 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -22,6 +23,12 @@ trait IndexedSeq[+A] extends Seq[A]
                     with GenericTraversableTemplate[A, IndexedSeq]
                     with IndexedSeqLike[A, IndexedSeq[A]] {
   override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
+  
+  /** Returns this $coll as an indexed sequence.
+   *  
+   *  A new indexed sequence will not be built; lazy collections will stay lazy.
+   */
+  @deprecatedOverriding("Immutable indexed sequences should do nothing on toIndexedSeq except cast themselves as an indexed sequence.", "2.11.0")
   override def toIndexedSeq: IndexedSeq[A] = this
   override def seq: IndexedSeq[A] = this
 }
@@ -31,14 +38,13 @@ trait IndexedSeq[+A] extends Seq[A]
  *  @define coll indexed sequence
  *  @define Coll `IndexedSeq`
  */
-object IndexedSeq extends SeqFactory[IndexedSeq] {
-  override lazy val ReusableCBF  = 
-      scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
   class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
     def length = buf.length
     def apply(idx: Int) = buf.apply(idx)
   }
   def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
     ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
 }
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ab1faf3..8991d0b 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -12,6 +12,7 @@ package immutable
 
 import scala.collection.generic.{ CanBuildFrom, BitOperations }
 import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
 
 /** Utility class for integer maps.
  *  @author David MacIver
@@ -50,8 +51,10 @@ object IntMap {
     def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
   }
 
-  def empty[T] : IntMap[T]  = IntMap.Nil;
-  def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+  def empty[T] : IntMap[T]  = IntMap.Nil
+
+  def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value)
+
   def apply[T](elems: (Int, T)*): IntMap[T] =
     elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
 
@@ -210,7 +213,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
   }
 
   /**
-   * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
+   * Loop over the values of the map. The same as `values.foreach(f)`, but may
    * be more efficient.
    *
    * @param f The loop body
@@ -427,6 +430,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
   /**
    * The entry with the lowest key value considered in unsigned order.
    */
+  @tailrec
   final def firstKey: Int = this match {
     case Bin(_, _, l, r) => l.firstKey
     case Tip(k, v) => k
@@ -436,6 +440,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
   /**
    * The entry with the highest key value considered in unsigned order.
    */
+  @tailrec
   final def lastKey: Int = this match {
     case Bin(_, _, l, r) => r.lastKey
     case Tip(k, v) => k
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index cc64d8f..6e4eb1e 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index 5ede6d9..2109bd5 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 2d6952f..930e13a 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -85,7 +84,8 @@ sealed abstract class List[+A] extends AbstractSeq[A]
                                   with LinearSeq[A]
                                   with Product
                                   with GenericTraversableTemplate[A, List]
-                                  with LinearSeqOptimized[A, List[A]] {
+                                  with LinearSeqOptimized[A, List[A]]
+                                  with Serializable {
   override def companion: GenericCompanion[List] = List
 
   import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
@@ -158,7 +158,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
    *  @usecase def mapConserve(f: A => A): List[A]
    *    @inheritdoc
    */
-  def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+  @inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+    // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`.
+    // If any successful optimization attempts or other changes are made, please rehash them there too.
     @tailrec
     def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] =
       if (pending.isEmpty) {
@@ -201,17 +203,19 @@ sealed abstract class List[+A] extends AbstractSeq[A]
 
   override def toList: List[A] = this
 
-  override def take(n: Int): List[A] = {
-    val b = new ListBuffer[A]
-    var i = 0
-    var these = this
-    while (!these.isEmpty && i < n) {
+  override def take(n: Int): List[A] = if (isEmpty || n <= 0) Nil else {
+    val h = new ::(head, Nil)
+    var t = h
+    var rest = tail
+    var i = 1
+    while ({if (rest.isEmpty) return this; i < n}) {
       i += 1
-      b += these.head
-      these = these.tail
+      val nx = new ::(rest.head, Nil)
+      t.tl = nx
+      t = nx
+      rest = rest.tail
     }
-    if (these.isEmpty) this
-    else b.toList
+    h
   }
 
   override def drop(n: Int): List[A] = {
@@ -262,8 +266,87 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     }
     (b.toList, these)
   }
+  
+  @noinline // TODO - fix optimizer bug that requires noinline (see SI-8334)
+  final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+    if (bf eq List.ReusableCBF) {
+      if (this eq Nil) Nil.asInstanceOf[That] else {
+        val h = new ::[B](f(head), Nil)
+        var t: ::[B] = h
+        var rest = tail
+        while (rest ne Nil) {
+          val nx = new ::(f(rest.head), Nil)
+          t.tl = nx
+          t = nx
+          rest = rest.tail
+        }
+        h.asInstanceOf[That]
+      }
+    }
+    else super.map(f)
+  }
+  
+  @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+  final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+    if (bf eq List.ReusableCBF) {
+      if (this eq Nil) Nil.asInstanceOf[That] else {
+        var rest = this
+        var h: ::[B] = null
+        var x: A = null.asInstanceOf[A]
+        // Special case for first element
+        do {
+          val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied)
+          if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) h = new ::(x.asInstanceOf[B], Nil)
+          rest = rest.tail
+          if (rest eq Nil) return (if (h eq null ) Nil else h).asInstanceOf[That]
+        } while (h eq null)
+        var t = h
+        // Remaining elements
+        do {
+          val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied)
+          if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) {
+            val nx = new ::(x.asInstanceOf[B], Nil)
+            t.tl = nx
+            t = nx
+          }
+          rest = rest.tail
+        } while (rest ne Nil)
+        h.asInstanceOf[That]
+      }
+    }
+    else super.collect(pf)
+  }
+  
+  @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+  final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+    if (bf eq List.ReusableCBF) {
+      if (this eq Nil) Nil.asInstanceOf[That] else {
+        var rest = this
+        var found = false
+        var h: ::[B] = null
+        var t: ::[B] = null
+        while (rest ne Nil) {
+          f(rest.head).foreach{ b =>
+            if (!found) {
+              h = new ::(b, Nil)
+              t = h
+              found = true
+            }
+            else {
+              val nx = new ::(b, Nil)
+              t.tl = nx
+              t = nx
+            }
+          }
+          rest = rest.tail
+        }
+        (if (!found) Nil else h).asInstanceOf[That]
+      }
+    }
+    else super.flatMap(f)
+  }
 
-  override def takeWhile(p: A => Boolean): List[A] = {
+  @inline final override def takeWhile(p: A => Boolean): List[A] = {
     val b = new ListBuffer[A]
     var these = this
     while (!these.isEmpty && p(these.head)) {
@@ -273,7 +356,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     b.toList
   }
 
-  override def dropWhile(p: A => Boolean): List[A] = {
+  @inline final override def dropWhile(p: A => Boolean): List[A] = {
     @tailrec
     def loop(xs: List[A]): List[A] =
       if (xs.isEmpty || !p(xs.head)) xs
@@ -282,7 +365,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     loop(this)
   }
 
-  override def span(p: A => Boolean): (List[A], List[A]) = {
+  @inline final override def span(p: A => Boolean): (List[A], List[A]) = {
     val b = new ListBuffer[A]
     var these = this
     while (!these.isEmpty && p(these.head)) {
@@ -292,6 +375,16 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     (b.toList, these)
   }
 
+  // Overridden with an implementation identical to the inherited one (at this time)
+  // solely so it can be finalized and thus inlinable.
+  @inline final override def foreach[U](f: A => U) {
+    var these = this
+    while (!these.isEmpty) {
+      f(these.head)
+      these = these.tail
+    }
+  }
+
   override def reverse: List[A] = {
     var result: List[A] = Nil
     var these = this
@@ -301,7 +394,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     }
     result
   }
-  
+
   override def foldRight[B](z: B)(op: (A, B) => B): B =
     reverse.foldLeft(z)((right, left) => op(left, right))
 
@@ -311,17 +404,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
     if (isEmpty) Stream.Empty
     else new Stream.Cons(head, tail.toStream)
 
-  @inline override final
-  def foreach[B](f: A => B) {
-    var these = this
-    while (!these.isEmpty) {
-      f(these.head)
-      these = these.tail
-    }
-  }
-
-  @deprecated("use `distinct` instead", "2.8.0")
-  def removeDuplicates: List[A] = distinct
+  // Create a proxy for Java serialization that allows us to avoid mutation
+  // during de-serialization.  This is the Serialization Proxy Pattern.
+  protected final def writeReplace(): AnyRef = new List.SerializationProxy(this)
 }
 
 /** The empty list.
@@ -352,33 +437,9 @@ case object Nil extends List[Nothing] {
  *  @version 1.0, 15/07/2003
  *  @since   2.8
  */
- at SerialVersionUID(0L - 8476791151983527571L)
-final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extends List[B] {
-  override def head : B = hd
+final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
   override def tail : List[B] = tl
   override def isEmpty: Boolean = false
-
-  private def readObject(in: ObjectInputStream) {
-    val firstObject = in.readObject()
-    hd = firstObject.asInstanceOf[B]
-    assert(hd != ListSerializeEnd)
-    var current: ::[B] = this
-    while (true) in.readObject match {
-      case ListSerializeEnd =>
-        current.tl = Nil
-        return
-      case a =>
-        val list : ::[B] = new ::(a.asInstanceOf[B], Nil)
-        current.tl = list
-        current = list
-    }
-  }
-
-  private def writeObject(out: ObjectOutputStream) {
-    var xs: List[B] = this
-    while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
-    out.writeObject(ListSerializeEnd)
-  }
 }
 
 /** $factoryInfo
@@ -386,9 +447,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
  *  @define Coll `List`
  */
 object List extends SeqFactory[List] {
-
-  import scala.collection.{Iterable, Seq, IndexedSeq}
-
   /** $genericCanBuildFromInfo */
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] =
     ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -398,255 +456,39 @@ object List extends SeqFactory[List] {
   override def empty[A]: List[A] = Nil
 
   override def apply[A](xs: A*): List[A] = xs.toList
+  
+  private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this }
 
-  /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)`
-   * where `v,,0,, = start` and elements are in the range between `start`
-   * (inclusive) and `end` (exclusive).
-   *
-   *  @param start the start value of the list
-   *  @param end  the end value of the list
-   *  @param step the increment function of the list, which given `v,,n,,`,
-   *              computes `v,,n+1,,`. Must be monotonically increasing
-   *              or decreasing.
-   *  @return     the sorted list of all integers in range `[start;end)`.
-   */
-  @deprecated("use `iterate` instead", "2.8.0")
-  def range(start: Int, end: Int, step: Int => Int): List[Int] = {
-    val up = step(start) > start
-    val down = step(start) < start
-    val b = new ListBuffer[Int]
-    var i = start
-    while ((!up || i < end) && (!down || i > end)) {
-      b += i
-      val next = step(i)
-      if (i == next)
-        throw new IllegalArgumentException("the step function did not make any progress on "+ i)
-      i = next
-    }
-    b.toList
-  }
+  @SerialVersionUID(1L)
+  private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
 
-  /** Create a list containing several copies of an element.
-   *
-   *  @param n    the length of the resulting list
-   *  @param elem the element composing the resulting list
-   *  @return     a list composed of `n` elements all equal to `elem`
-   */
-  @deprecated("use `fill` instead", "2.8.0")
-  def make[A](n: Int, elem: A): List[A] = {
-    val b = new ListBuffer[A]
-    var i = 0
-    while (i < n) {
-      b += elem
-      i += 1
-    }
-    b.toList
-  }
-
-  /** Concatenate all the elements of a given list of lists.
-   *
-   *  @param xss the list of lists that are to be concatenated
-   *  @return    the concatenation of all the lists
-   */
-  @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
-  def flatten[A](xss: List[List[A]]): List[A] = {
-    val b = new ListBuffer[A]
-    for (xs <- xss) {
-      var xc = xs
-      while (!xc.isEmpty) {
-        b += xc.head
-        xc = xc.tail
+    private def writeObject(out: ObjectOutputStream) {
+      var xs: List[A] = orig
+      while (!xs.isEmpty) {
+        out.writeObject(xs.head)
+        xs = xs.tail
       }
+      out.writeObject(ListSerializeEnd)
     }
-    b.toList
-  }
 
-  /** Transforms a list of pairs into a pair of lists.
-   *
-   *  @param xs the list of pairs to unzip
-   *  @return a pair of lists.
-   */
-  @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
-  def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
-    val b1 = new ListBuffer[A]
-    val b2 = new ListBuffer[B]
-    var xc = xs
-    while (!xc.isEmpty) {
-      b1 += xc.head._1
-      b2 += xc.head._2
-      xc = xc.tail
-    }
-    (b1.toList, b2.toList)
-  }
-
-  /** Transforms an iterable of pairs into a pair of lists.
-   *
-   *  @param xs the iterable of pairs to unzip
-   *  @return a pair of lists.
-   */
-  @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
-  def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
-      xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
-        case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
+    // Java serialization calls this before readResolve during de-serialization.
+    // Read the whole list and store it in `orig`.
+    private def readObject(in: ObjectInputStream) {
+      val builder = List.newBuilder[A]
+      while (true) in.readObject match {
+        case ListSerializeEnd =>
+          orig = builder.result()
+          return
+        case a =>
+          builder += a.asInstanceOf[A]
       }
-
-  /**
-   * Returns the `Left` values in the given `Iterable` of `Either`s.
-   */
-  @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
-  def lefts[A, B](es: Iterable[Either[A, B]]) =
-    es.foldRight[List[A]](Nil)((e, as) => e match {
-      case Left(a) => a :: as
-      case Right(_) => as
-    })
-
-  /**
-   * Returns the `Right` values in the given `Iterable` of  `Either`s.
-   */
-  @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
-  def rights[A, B](es: Iterable[Either[A, B]]) =
-    es.foldRight[List[B]](Nil)((e, bs) => e match {
-      case Left(_) => bs
-      case Right(b) => b :: bs
-    })
-
-  /** Transforms an Iterable of Eithers into a pair of lists.
-   *
-   *  @param es the iterable of Eithers to separate
-   *  @return a pair of lists.
-   */
-  @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
-  def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
-    es.foldRight[(List[A], List[B])]((Nil, Nil)) {
-      case (Left(a), (lefts, rights)) => (a :: lefts, rights)
-      case (Right(b), (lefts, rights)) => (lefts, b :: rights)
     }
 
-  /** Converts an iterator to a list.
-   *
-   *  @param it the iterator to convert
-   *  @return   a list that contains the elements returned by successive
-   *            calls to `it.next`
-   */
-  @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
-  def fromIterator[A](it: Iterator[A]): List[A] = it.toList
-
-  /** Converts an array into a list.
-   *
-   *  @param arr the array to convert
-   *  @return    a list that contains the same elements than `arr`
-   *             in the same order
-   */
-  @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
-  def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
-
-  /** Converts a range of an array into a list.
-   *
-   *  @param arr   the array to convert
-   *  @param start the first index to consider
-   *  @param len   the length of the range to convert
-   *  @return      a list that contains the same elements than `arr`
-   *               in the same order
-   */
-  @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
-  def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
-    var res: List[A] = Nil
-    var i = start + len
-    while (i > start) {
-      i -= 1
-      res = arr(i) :: res
-    }
-    res
-  }
-
-  /** Returns the list resulting from applying the given function `f`
-   *  to corresponding elements of the argument lists.
-   *
-   *  @param f function to apply to each pair of elements.
-   *  @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are
-   *          `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and
-   *          `n = min(k,l)`
-   */
-  @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
-  def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
-    val b = new ListBuffer[C]
-    var xc = xs
-    var yc = ys
-    while (!xc.isEmpty && !yc.isEmpty) {
-      b += f(xc.head, yc.head)
-      xc = xc.tail
-      yc = yc.tail
-    }
-    b.toList
-  }
-
-  /** Tests whether the given predicate `p` holds
-   *  for all corresponding elements of the argument lists.
-   *
-   *  @param f function to apply to each pair of elements.
-   *  @return  `(p(a<sub>0</sub>,b<sub>0</sub>) &&
-   *           ... && p(a<sub>n</sub>,b<sub>n</sub>))]`
-   *           if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
-   *           `[b<sub>0</sub>, ..., b<sub>l</sub>]`
-   *           and `n = min(k,l)`
-   */
-  @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
-  def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
-    var xc = xs
-    var yc = ys
-    while (!xc.isEmpty && !yc.isEmpty) {
-      if (!f(xc.head, yc.head)) return false
-      xc = xc.tail
-      yc = yc.tail
-    }
-    true
-  }
-
-  /** Tests whether the given predicate `p` holds
-   *  for some corresponding elements of the argument lists.
-   *
-   *  @param f function to apply to each pair of elements.
-   *  @return  `n != 0 && (p(a<sub>0</sub>,b<sub>0</sub>) ||
-   *           ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
-   *           `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
-   *           `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
-   *           `n = min(k,l)`
-   */
-  @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
-  def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
-    var xc = xs
-    var yc = ys
-    while (!xc.isEmpty && !yc.isEmpty) {
-      if (f(xc.head, yc.head)) return true
-      xc = xc.tail
-      yc = yc.tail
-    }
-    false
-  }
-
-  /** Transposes a list of lists.
-   *  pre: All element lists have the same length.
-   *
-   *  @param xss the list of lists
-   *  @return    the transposed list of lists
-   */
-  @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
-  def transpose[A](xss: List[List[A]]): List[List[A]] = {
-    val buf = new ListBuffer[List[A]]
-    var yss = xss
-    while (!yss.head.isEmpty) {
-      buf += (yss map (_.head))
-      yss = (yss map (_.tail))
-    }
-    buf.toList
+    // Provide the result stored in `orig` for Java serialization
+    private def readResolve(): AnyRef = orig
   }
 }
 
 /** Only used for list serialization */
- at SerialVersionUID(0L - 8287891243975527522L)
-private[scala] case object ListSerializeStart
-
-/** Only used for list serialization */
 @SerialVersionUID(0L - 8476791151975527571L)
 private[scala] case object ListSerializeEnd
-
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 7581735..7c40e84 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -48,6 +49,7 @@ object ListMap extends ImmutableMapFactory[ListMap] {
  *  @define willNotTerminateInf
  */
 @SerialVersionUID(301002838095710379L)
+ at deprecatedInheritance("The semantics of immutable collections makes inheriting from ListMap error-prone.", "2.11.0")
 class ListMap[A, +B]
 extends AbstractMap[A, B]
    with Map[A, B]
@@ -121,12 +123,12 @@ extends AbstractMap[A, B]
       def hasNext = !self.isEmpty
       def next(): (A,B) =
         if (!hasNext) throw new NoSuchElementException("next on empty iterator")
-        else { val res = (self.key, self.value); self = self.tail; res }
+        else { val res = (self.key, self.value); self = self.next; res }
     }.toList.reverseIterator
 
   protected def key: A = throw new NoSuchElementException("empty map")
   protected def value: B = throw new NoSuchElementException("empty map")
-  override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
+  protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
 
   /** This class represents an entry in the `ListMap`.
    */
@@ -140,7 +142,7 @@ extends AbstractMap[A, B]
     override def size: Int = size0(this, 0)
 
     // to allow tail recursion and prevent stack overflows
-    @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
+    @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
 
     /** Is this an empty map?
      *
@@ -156,12 +158,12 @@ extends AbstractMap[A, B]
      *  @return     the value associated with the given key.
      */
     override def apply(k: A): B1 = apply0(this, k)
- 
-    
-    @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = 
+
+
+    @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
       if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
       else if (k == cur.key) cur.value
-      else apply0(cur.tail, k)  
+      else apply0(cur.next, k)
 
     /** Checks if this map maps `key` to a value and return the
      *  value if it exists.
@@ -173,14 +175,14 @@ extends AbstractMap[A, B]
 
     @tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
       if (k == cur.key) Some(cur.value)
-      else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
+      else if (cur.next.nonEmpty) get0(cur.next, k) else None
 
     /** This method allows one to create a new map with an additional mapping
      *  from `key` to `value`. If the map contains already a mapping for `key`,
      *  it will be overridden by this function.
      */
     override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
-      val m = if (contains(k)) this - k else this
+      val m = this - k
       new m.Node[B2](k, v)
     }
 
@@ -188,33 +190,18 @@ extends AbstractMap[A, B]
      *  If the map does not contain a mapping for the given key, the
      *  method returns the same map.
      */
-    override def - (k: A): ListMap[A, B1] = {
-      // This definition used to result in stack overflows
-      // if (k == key)
-      //   next
-      // else {
-      //   val tail = next - k
-      //   if (tail eq next) this
-      //   else new tail.Node(key, value)
-      // }
-      // we use an imperative one instead (and use an auxiliary list to preserve order!):
-      var cur: ListMap[A, B1] = this
-      var lst: List[(A, B1)] = Nil
-      while (cur.nonEmpty) {
-        if (k != cur.key) lst ::= ((cur.key, cur.value))
-        cur = cur.tail
-      }
-      var acc = ListMap[A, B1]()
-      while (lst != Nil) {
-        val elem = lst.head
-        val stbl = acc
-        acc = new stbl.Node(elem._1, elem._2)
-        lst = lst.tail
-      }
-      acc
-    }
-
-
-    override def tail: ListMap[A, B1] = ListMap.this
+    override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil)
+
+    @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
+      if (cur.isEmpty)
+        acc.last
+      else if (k == cur.key)
+        (cur.next /: acc) {
+          case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
+        }
+      else
+        remove0(k, cur.next, cur::acc)
+
+    override protected def next: ListMap[A, B1] = ListMap.this
   }
 }
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 6cf6c42..1bb07eb 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -21,10 +22,11 @@ import mutable.{ ListBuffer, Builder }
 object ListSet extends ImmutableSetFactory[ListSet] {
   /** setCanBuildFromInfo */
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A]
-  override def empty[A] = EmptyListSet.asInstanceOf[ListSet[A]]
+
   override def newBuilder[A]: Builder[A, ListSet[A]] = new ListSetBuilder[A]
 
   private object EmptyListSet extends ListSet[Any] { }
+  private[collection] def emptyInstance: ListSet[Any] = EmptyListSet
 
   /** A custom builder because forgetfully adding elements one at
    *  a time to a list backed set puts the "squared" in N^2.  There is a
@@ -63,6 +65,7 @@ object ListSet extends ImmutableSetFactory[ListSet] {
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecatedInheritance("The semantics of immutable collections makes inheriting from ListSet error-prone.", "2.11.0")
 class ListSet[A] extends AbstractSet[A]
                     with Set[A]
                     with GenericSetTemplate[A, ListSet]
@@ -75,7 +78,7 @@ class ListSet[A] extends AbstractSet[A]
    *  @return number of set elements.
    */
   override def size: Int = 0
-  override def isEmpty: Boolean = true;
+  override def isEmpty: Boolean = true
 
   /** Checks if this set contains element `elem`.
    *
@@ -100,7 +103,7 @@ class ListSet[A] extends AbstractSet[A]
    */
   override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
     if (xs.isEmpty) this
-    else (new ListSet.ListSetBuilder(this) ++= xs.seq).result
+    else (new ListSet.ListSetBuilder(this) ++= xs.seq).result()
 
   private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
   private[ListSet] def unchecked_outer: ListSet[A] =
@@ -120,18 +123,18 @@ class ListSet[A] extends AbstractSet[A]
         that = that.tail
         res
       }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
 
   /**
    *  @throws Predef.NoSuchElementException
    */
-  override def head: A = throw new NoSuchElementException("Set has no elements");
+  override def head: A = throw new NoSuchElementException("Set has no elements")
 
   /**
    *  @throws Predef.NoSuchElementException
    */
-  override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+  override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
 
   override def stringPrefix = "ListSet"
 
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 2a29104..868c0c0 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -12,6 +12,7 @@ package immutable
 
 import scala.collection.generic.{ CanBuildFrom, BitOperations }
 import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
 
 /** Utility class for long maps.
  *  @author David MacIver
@@ -77,8 +78,6 @@ object LongMap {
   }
 }
 
-import LongMap._
-
 // Iterator over a non-empty LongMap.
 private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
 
@@ -98,7 +97,7 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends
     buffer(index) = x.asInstanceOf[AnyRef]
     index += 1
   }
-  push(it);
+  push(it)
 
   /**
    * What value do we assign to a tip?
@@ -179,7 +178,7 @@ extends AbstractMap[Long, T]
    */
   override final def foreach[U](f: ((Long, T)) =>  U): Unit = this match {
     case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
-    case LongMap.Tip(key, value) => f((key, value));
+    case LongMap.Tip(key, value) => f((key, value))
     case LongMap.Nil =>
   }
 
@@ -206,7 +205,7 @@ extends AbstractMap[Long, T]
   }
 
   /**
-   * Loop over the keys of the map. The same as keys.foreach(f), but may
+   * Loop over the values of the map. The same as values.foreach(f), but may
    * be more efficient.
    *
    * @param f The loop body
@@ -418,5 +417,20 @@ extends AbstractMap[Long, T]
 
   def ++[S >: T](that: LongMap[S]) =
     this.unionWith[S](that, (key, x, y) => y)
+
+  @tailrec
+  final def firstKey: Long = this match {
+    case LongMap.Bin(_, _, l, r) => l.firstKey
+    case LongMap.Tip(k, v) => k
+    case LongMap.Nil => sys.error("Empty set")
+  }
+
+  @tailrec
+  final def lastKey: Long = this match {
+    case LongMap.Bin(_, _, l, r) => r.lastKey
+    case LongMap.Tip(k , v) => k
+    case LongMap.Nil => sys.error("Empty set")
+  }
+
 }
 
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2ebf503..5178d5a 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -31,6 +32,12 @@ trait Map[A, +B] extends Iterable[(A, B)]
                     with MapLike[A, B, Map[A, B]] { self =>
 
   override def empty: Map[A, B] = Map.empty
+
+  /** Returns this $coll as an immutable map.
+   *
+   *  A new map will not be built; lazy collections will stay lazy.
+   */
+  @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.",  "2.11.0")
   override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] =
     self.asInstanceOf[immutable.Map[T, U]]
 
@@ -51,7 +58,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
    *
    *  Invoking transformer methods (e.g. `map`) will not preserve the default value.
    *
-   *  @param d     the function mapping keys to values, used for non-present keys
+   *  @param d     default value used for non-present keys
    *  @return      a wrapper of the map with a default value
    */
   def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d)
@@ -184,4 +191,4 @@ object Map extends ImmutableMapFactory[Map] {
 }
 
 /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
+abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 7e60f07..94a5b79 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -85,14 +86,14 @@ self =>
    */
   override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
     ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
-  
+
   /** Filters this map by retaining only keys satisfying a predicate.
    *  @param  p   the predicate used to test keys
    *  @return an immutable map consisting only of those key value pairs of this map where the key satisfies
    *          the predicate `p`. The resulting map wraps the original map without copying any elements.
    */
   override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
-  
+
   /** Transforms this map by applying a function to every retrieved value.
    *  @param  f   the function used to transform values of this map.
    *  @return a map view which maps every key of this map
@@ -123,7 +124,7 @@ self =>
   def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = {
     val b = bf(repr)
     for ((key, value) <- this) b += ((key, f(key, value)))
-    b.result
+    b.result()
   }
 }
 
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index f3f04ec..d126b9e 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 /**
@@ -22,6 +23,7 @@ package immutable
  *  @version 2.0, 31/12/2006
  *  @since   2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
   override def repr = this
   private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index d3be299..f1ac161 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -6,12 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package immutable
 
 import mutable.{ Builder, ListBuffer }
-import generic._
 
 /** `NumericRange` is a more generic version of the
  *  `Range` class which works with arbitrary types.
@@ -81,17 +80,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
   // to guard against any (most likely illusory) performance drop.  They should
   // be eliminated one way or another.
 
-  // Counts how many elements from the start meet the given test.
-  private def skipCount(p: T => Boolean): Int = {
-    var current = start
-    var counted = 0
-
-    while (counted < length && p(current)) {
-      counted += 1
-      current += step
-    }
-    counted
-  }
   // Tests whether a number is within the endpoints, without testing
   // whether it is a member of the sequence (i.e. when step > 1.)
   private def isWithinBoundaries(elem: T) = !isEmpty && (
@@ -124,21 +112,21 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
     if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
     else locationAfterN(idx)
   }
-  
+
   import NumericRange.defaultOrdering
-  
+
   override def min[T1 >: T](implicit ord: Ordering[T1]): T =
     if (ord eq defaultOrdering(num)) {
       if (num.signum(step) > 0) start
       else last
     } else super.min(ord)
-  
-  override def max[T1 >: T](implicit ord: Ordering[T1]): T = 
+
+  override def max[T1 >: T](implicit ord: Ordering[T1]): T =
     if (ord eq defaultOrdering(num)) {
       if (num.signum(step) > 0) last
       else start
     } else super.max(ord)
-  
+
   // Motivated by the desire for Double ranges with BigDecimal precision,
   // we need some way to map a Range and get another Range.  This can't be
   // done in any fully general way because Ranges are not arbitrary
@@ -182,15 +170,41 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
   def containsTyped(x: T): Boolean =
     isWithinBoundaries(x) && (((x - start) % step) == zero)
 
-  override def contains(x: Any): Boolean =
+  override def contains[A1 >: T](x: A1): Boolean =
     try containsTyped(x.asInstanceOf[T])
     catch { case _: ClassCastException => false }
 
   final override def sum[B >: T](implicit num: Numeric[B]): B = {
-    import num.Ops
-    if (isEmpty) this.num fromInt 0
-    else if (numRangeElements == 1) head
-    else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
+    // arithmetic series formula  can be used for regular addition
+    if ((num eq scala.math.Numeric.IntIsIntegral)||
+        (num eq scala.math.Numeric.BigIntIsIntegral)||
+        (num eq scala.math.Numeric.ShortIsIntegral)||
+        (num eq scala.math.Numeric.ByteIsIntegral)||
+        (num eq scala.math.Numeric.CharIsIntegral)||
+        (num eq scala.math.Numeric.LongIsIntegral)||
+        (num eq scala.math.Numeric.FloatAsIfIntegral)||
+        (num eq scala.math.Numeric.BigDecimalIsFractional)||
+        (num eq scala.math.Numeric.DoubleAsIfIntegral)) {
+      val numAsIntegral = num.asInstanceOf[Integral[B]]
+      import numAsIntegral._
+      if (isEmpty) num fromInt 0
+      else if (numRangeElements == 1) head
+      else ((num fromInt numRangeElements) * (head + last) / (num fromInt 2))
+    } else {
+      // user provided custom Numeric, we cannot rely on arithmetic series formula
+      if (isEmpty) num.zero
+      else {
+        var acc = num.zero
+        var i = head
+        var idx = 0
+        while(idx < length) {
+          acc = num.plus(acc, i)
+          i = i + step
+          idx = idx + 1
+        }
+        acc
+      }
+    }
   }
 
   override lazy val hashCode = super.hashCode()
@@ -213,7 +227,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
 /** A companion object for numeric ranges.
  */
 object NumericRange {
-  
+
   /** Calculates the number of elements in a range given start, end, step, and
    *  whether or not it is inclusive.  Throws an exception if step == 0 or
    *  the number of elements exceeds the maximum Int.
@@ -227,28 +241,79 @@ object NumericRange {
     else if (start == end) if (isInclusive) 1 else 0
     else if (upward != posStep) 0
     else {
-      val diff      = num.minus(end, start)
-      val jumps     = num.toLong(num.quot(diff, step))
-      val remainder = num.rem(diff, step)
-      val longCount = jumps + (
-        if (!isInclusive && zero == remainder) 0 else 1
-      )
-
-      /** The edge cases keep coming.  Since e.g.
-       *    Long.MaxValue + 1 == Long.MinValue
-       *  we do some more improbable seeming checks lest
-       *  overflow turn up as an empty range.
+      /* We have to be frightfully paranoid about running out of range.
+       * We also can't assume that the numbers will fit in a Long.
+       * We will assume that if a > 0, -a can be represented, and if
+       * a < 0, -a+1 can be represented.  We also assume that if we
+       * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least).
+       * And we assume that numbers wrap rather than cap when they overflow.
        */
-      // The second condition contradicts an empty result.
-      val isOverflow = longCount == 0 && num.lt(num.plus(start, step), end) == upward
-
-      if (longCount > scala.Int.MaxValue || longCount < 0L || isOverflow) {
-        val word  = if (isInclusive) "to" else "until"
-        val descr = List(start, word, end, "by", step) mkString " "
-
-        throw new IllegalArgumentException(descr + ": seqs cannot contain more than Int.MaxValue elements.")
+      // Check whether we can short-circuit by deferring to Int range.
+      val startint = num.toInt(start)
+      if (start == num.fromInt(startint)) {
+        val endint = num.toInt(end)
+        if (end == num.fromInt(endint)) {
+          val stepint = num.toInt(step)
+          if (step == num.fromInt(stepint)) {
+            return {
+              if (isInclusive) Range.inclusive(startint, endint, stepint).length
+              else             Range          (startint, endint, stepint).length
+            }
+          }
+        }
+      }
+      // If we reach this point, deferring to Int failed.
+      // Numbers may be big.
+      val one = num.one
+      val limit = num.fromInt(Int.MaxValue)
+      def check(t: T): T = 
+        if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.")
+        else t
+      // If the range crosses zero, it might overflow when subtracted
+      val startside = num.signum(start)
+      val endside = num.signum(end)
+      num.toInt{
+        if (startside*endside >= 0) {
+          // We're sure we can subtract these numbers.
+          // Note that we do not use .rem because of different conventions for Long and BigInt
+          val diff = num.minus(end, start)
+          val quotient = check(num.quot(diff, step))
+          val remainder = num.minus(diff, num.times(quotient, step))
+          if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one))
+        }
+        else {
+          // We might not even be able to subtract these numbers.
+          // Jump in three pieces:
+          //   * start to -1 or 1, whichever is closer (waypointA)
+          //   * one step, which will take us at least to 0 (ends at waypointB)
+          //   * there to the end
+          val negone = num.fromInt(-1)
+          val startlim  = if (posStep) negone else one
+          val startdiff = num.minus(startlim, start)
+          val startq    = check(num.quot(startdiff, step))
+          val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step))
+          val waypointB = num.plus(waypointA, step)
+          check {
+            if (num.lt(waypointB, end) != upward) {
+              // No last piece
+              if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2))
+              else num.plus(startq, one)
+            }
+            else {
+              // There is a last piece
+              val enddiff = num.minus(end,waypointB)
+              val endq    = check(num.quot(enddiff, step))
+              val last    = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step))
+              // Now we have to tally up all the pieces
+              //   1 for the initial value
+              //   startq steps to waypointA
+              //   1 step to waypointB
+              //   endq steps to the end (one less if !isInclusive and last==end)
+              num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2)))
+            }
+          }
+        }
       }
-      longCount.toInt
     }
   }
 
@@ -272,7 +337,7 @@ object NumericRange {
     new Exclusive(start, end, step)
   def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
     new Inclusive(start, end, step)
-  
+
   private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
     Numeric.BigIntIsIntegral -> Ordering.BigInt,
     Numeric.IntIsIntegral -> Ordering.Int,
@@ -284,6 +349,6 @@ object NumericRange {
     Numeric.DoubleAsIfIntegral -> Ordering.Double,
     Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
   )
-  
+
 }
 
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 952107b..3a64820 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import java.io._
@@ -30,7 +31,7 @@ object PagedSeq {
     new PagedSeq[T]((data: Array[T], start: Int, len: Int) => {
       var i = 0
       while (i < len && source.hasNext) {
-        data(start + i) = source.next
+        data(start + i) = source.next()
         i += 1
       }
       if (i == 0) -1 else i
@@ -51,7 +52,7 @@ object PagedSeq {
         if (cnt == len) cnt
         else (more(data, start + cnt, len - cnt) max 0) + cnt
       } else if (source.hasNext) {
-        current = source.next
+        current = source.next()
         more(data, start, len)
       } else -1
     new PagedSeq(more(_: Array[Char], _: Int, _: Int))
@@ -125,6 +126,7 @@ import PagedSeq._
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecatedInheritance("The implementation details of paged sequences make inheriting from them unwise.", "2.11.0")
 class PagedSeq[T: ClassTag] protected(
   more: (Array[T], Int, Int) => Int,
   first1: Page[T],
@@ -186,7 +188,10 @@ extends scala.collection.AbstractSeq[T]
     val s = start + _start
     val e = if (_end == UndeterminedEnd) _end else start + _end
     var f = first1
-    while (f.end <= s && !f.isLast) f = f.next
+    while (f.end <= s && !f.isLast) {
+      if (f.next eq null) f.addMore(more)
+      f = f.next
+    }
     new PagedSeq(more, f, s, e)
   }
 
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 7d2ff95..264304d 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -37,6 +38,7 @@ import scala.annotation.tailrec
  */
 
 @SerialVersionUID(-7622936493364270175L)
+ at deprecatedInheritance("The implementation details of immutable queues make inheriting from them unwise.", "2.11.0")
 class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
          extends AbstractSeq[A]
             with LinearSeq[A]
@@ -87,6 +89,16 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
    */
   override def length = in.length + out.length
 
+  override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match {
+    case _: Queue.GenericCanBuildFrom[_] => new Queue(in, elem :: out).asInstanceOf[That]
+    case _                               => super.+:(elem)(bf)
+  }
+
+  override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match {
+    case _: Queue.GenericCanBuildFrom[_] => enqueue(elem).asInstanceOf[That]
+    case _                               => super.:+(elem)(bf)
+  }
+
   /** Creates a new queue with element added at the end
    *  of the old queue.
    *
@@ -117,6 +129,13 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
     case _                  => throw new NoSuchElementException("dequeue on empty queue")
   }
 
+  /** Optionally retrieves the first element and a queue of the remaining elements.
+   *
+   * @return A tuple of the first element of the queue, and a new queue with this element removed.
+   *         If the queue is empty, `None` is returned.
+   */
+  def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue)
+
   /** Returns the first element in the queue, or throws an error if there
    *  is no element contained in the queue.
    *
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 802e166..26ccd09 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection.immutable
+package scala
+package collection.immutable
 
 import scala.collection.parallel.immutable.ParRange
 
@@ -22,6 +23,15 @@ import scala.collection.parallel.immutable.ParRange
  *     println(r2.length) // = 5
  *  }}}
  *
+ *  Ranges that contain more than `Int.MaxValue` elements can be created, but
+ *  these overfull ranges have only limited capabilities.  Any method that
+ *  could require a collection of over `Int.MaxValue` length to be created, or
+ *  could be asked to index beyond `Int.MaxValue` elements will throw an
+ *  exception.  Overfull ranges can safely be reduced in size by changing
+ *  the step size (e.g. `by 3`) or taking/dropping elements.  `contains`,
+ *  `equals`, and access to the ends of the range (`head`, `last`, `tail`,
+ *  `init`) are also permitted on overfull ranges.
+ *
  *  @param start      the start of this range.
  *  @param end        the exclusive end of the range.
  *  @param step       the step for the range.
@@ -41,6 +51,7 @@ import scala.collection.parallel.immutable.ParRange
  *         and its complexity is O(1).
  */
 @SerialVersionUID(7618862778670199309L)
+ at deprecatedInheritance("The implementation details of Range makes inheriting from it unwise.", "2.11.0")
 class Range(val start: Int, val end: Int, val step: Int)
 extends scala.collection.AbstractSeq[Int]
    with IndexedSeq[Int]
@@ -64,6 +75,7 @@ extends scala.collection.AbstractSeq[Int]
     || (start < end && step < 0)
     || (start == end && !isInclusive)
   )
+  @deprecated("This method will be made private, use `length` instead.", "2.11")
   final val numRangeElements: Int = {
     if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
     else if (isEmpty) 0
@@ -73,21 +85,38 @@ extends scala.collection.AbstractSeq[Int]
       else len.toInt
     }
   }
-  final val lastElement     = start + (numRangeElements - 1) * step
-  final val terminalElement = start + numRangeElements * step
+  @deprecated("This method will be made private, use `last` instead.", "2.11")
+  final val lastElement = 
+    if (isEmpty) start - step
+    else step match {
+      case 1  => if (isInclusive) end else end-1
+      case -1 => if (isInclusive) end else end+1
+      case _  =>
+        val remainder = (gap % step).toInt
+        if (remainder != 0) end - remainder
+        else if (isInclusive) end
+        else end - step
+    }
+    
+  @deprecated("This method will be made private.", "2.11")
+  final val terminalElement = lastElement + step
 
+  /** The last element of this range.  This method will return the correct value
+   *  even if there are too many elements to iterate over.
+   */
   override def last = if (isEmpty) Nil.last else lastElement
+  override def head = if (isEmpty) Nil.head else start
 
   override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
     if (ord eq Ordering.Int) {
-      if (step > 0) start
+      if (step > 0) head
       else last
     } else super.min(ord)
 
   override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
     if (ord eq Ordering.Int) {
       if (step > 0) last
-      else start
+      else head
     } else super.max(ord)
 
   protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
@@ -111,21 +140,6 @@ extends scala.collection.AbstractSeq[Int]
       fail()
   }
 
-  def validateRangeBoundaries(f: Int => Any): Boolean = {
-    validateMaxLength()
-
-    start != Int.MinValue || end != Int.MinValue || {
-      var count = 0
-      var num = start
-      while (count < numRangeElements) {
-        f(num)
-        count += 1
-        num += step
-      }
-      false
-    }
-  }
-
   final def apply(idx: Int): Int = {
     validateMaxLength()
     if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
@@ -133,14 +147,19 @@ extends scala.collection.AbstractSeq[Int]
   }
 
   @inline final override def foreach[@specialized(Unit) U](f: Int => U) {
-    if (validateRangeBoundaries(f)) {
-      var i = start
-      val terminal = terminalElement
-      val step = this.step
-      while (i != terminal) {
-        f(i)
-        i += step
-      }
+    validateMaxLength()
+    val isCommonCase = (start != Int.MinValue || end != Int.MinValue)
+    var i = start
+    var count = 0
+    val terminal = terminalElement
+    val step = this.step
+    while(
+      if(isCommonCase) { i != terminal }
+      else             { count < numRangeElements }
+    ) {
+      f(i)
+      count += 1
+      i += step
     }
   }
 
@@ -153,8 +172,12 @@ extends scala.collection.AbstractSeq[Int]
    */
   final override def take(n: Int): Range = (
     if (n <= 0 || isEmpty) newEmptyRange(start)
-    else if (n >= numRangeElements) this
-    else new Range.Inclusive(start, locationAfterN(n - 1), step)
+    else if (n >= numRangeElements && numRangeElements >= 0) this
+    else {
+      // May have more than Int.MaxValue elements in range (numRangeElements < 0)
+      // but the logic is the same either way: take the first n
+      new Range.Inclusive(start, locationAfterN(n - 1), step)
+    }
   )
 
   /** Creates a new range containing all the elements of this range except the first `n` elements.
@@ -166,8 +189,12 @@ extends scala.collection.AbstractSeq[Int]
    */
   final override def drop(n: Int): Range = (
     if (n <= 0 || isEmpty) this
-    else if (n >= numRangeElements) newEmptyRange(end)
-    else copy(locationAfterN(n), end, step)
+    else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end)
+    else {
+      // May have more than Int.MaxValue elements (numRangeElements < 0)
+      // but the logic is the same either way: go forwards n steps, keep the rest
+      copy(locationAfterN(n), end, step)
+    }
   )
 
   /** Creates a new range containing all the elements of this range except the last one.
@@ -196,23 +223,17 @@ extends scala.collection.AbstractSeq[Int]
     drop(1)
   }
 
-  // Counts how many elements from the start meet the given test.
-  private def skipCount(p: Int => Boolean): Int = {
-    var current = start
-    var counted = 0
-
-    while (counted < numRangeElements && p(current)) {
-      counted += 1
-      current += step
+  // Advance from the start while we meet the given test
+  private def argTakeWhile(p: Int => Boolean): Long = {
+    if (isEmpty) start
+    else {
+      var current = start
+      val stop = last
+      while (current != stop && p(current)) current += step
+      if (current != stop || !p(current)) current
+      else current.toLong + step
     }
-    counted
   }
-  // Tests whether a number is within the endpoints, without testing
-  // whether it is a member of the sequence (i.e. when step > 1.)
-  private def isWithinBoundaries(elem: Int) = !isEmpty && (
-    (step > 0 && start <= elem && elem <= last ) ||
-    (step < 0 &&  last <= elem && elem <= start)
-  )
   // Methods like apply throw exceptions on invalid n, but methods like take/drop
   // are forgiving: therefore the checks are with the methods.
   private def locationAfterN(n: Int) = start + (step * n)
@@ -223,9 +244,33 @@ extends scala.collection.AbstractSeq[Int]
   // based on the given value.
   private def newEmptyRange(value: Int) = new Range(value, value, step)
 
-  final override def takeWhile(p: Int => Boolean): Range = take(skipCount(p))
-  final override def dropWhile(p: Int => Boolean): Range = drop(skipCount(p))
-  final override def span(p: Int => Boolean): (Range, Range) = splitAt(skipCount(p))
+  final override def takeWhile(p: Int => Boolean): Range = {
+    val stop = argTakeWhile(p)
+    if (stop==start) newEmptyRange(start)
+    else {
+      val x = (stop - step).toInt
+      if (x == last) this
+      else new Range.Inclusive(start, x, step)
+    }
+  }
+  final override def dropWhile(p: Int => Boolean): Range = {
+    val stop = argTakeWhile(p)
+    if (stop == start) this
+    else {
+      val x = (stop - step).toInt
+      if (x == last) newEmptyRange(last)
+      else new Range.Inclusive(x + step, last, step)
+    }
+  }
+  final override def span(p: Int => Boolean): (Range, Range) = {
+    val border = argTakeWhile(p)
+    if (border == start) (newEmptyRange(start), this)
+    else {
+      val x = (border - step).toInt
+      if (x == last) (this, newEmptyRange(last))
+      else (new Range.Inclusive(start, x, step), new Range.Inclusive(x+step, last, step))
+    }
+  }
 
   /** Creates a pair of new ranges, first consisting of elements before `n`, and the second
    *  of elements after `n`.
@@ -238,13 +283,32 @@ extends scala.collection.AbstractSeq[Int]
    *
    *  $doesNotUseBuilders
    */
-  final override def takeRight(n: Int): Range = drop(numRangeElements - n)
+  final override def takeRight(n: Int): Range = {
+    if (n <= 0) newEmptyRange(start)
+    else if (numRangeElements >= 0) drop(numRangeElements - n)
+    else {
+    // Need to handle over-full range separately
+      val y = last
+      val x = y - step.toLong*(n-1)
+      if ((step > 0 && x < start) || (step < 0 && x > start)) this
+      else new Range.Inclusive(x.toInt, y, step)
+    }
+  }
 
   /** Creates a new range consisting of the initial `length - n` elements of the range.
    *
    *  $doesNotUseBuilders
    */
-  final override def dropRight(n: Int): Range = take(numRangeElements - n)
+  final override def dropRight(n: Int): Range = {
+    if (n <= 0) this
+    else if (numRangeElements >= 0) take(numRangeElements - n)
+    else {
+    // Need to handle over-full range separately
+      val y = last - step.toInt*n
+      if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start)
+      else new Range.Inclusive(start, y.toInt, step)
+    }
+  }
 
   /** Returns the reverse of this range.
    *
@@ -260,12 +324,37 @@ extends scala.collection.AbstractSeq[Int]
     if (isInclusive) this
     else new Range.Inclusive(start, end, step)
 
-  final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0)
+  final def contains(x: Int) = {
+    if (x==end && !isInclusive) false
+    else if (step > 0) {
+      if (x < start || x > end) false
+      else (step == 1) || (((x - start) % step) == 0)
+    }
+    else {
+      if (x < end || x > start) false
+      else (step == -1) || (((x - start) % step) == 0)
+    }
+  }
 
   final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
-    if (isEmpty) 0
-    else if (numRangeElements == 1) head
-    else (numRangeElements.toLong * (head + last) / 2).toInt
+    if (num eq scala.math.Numeric.IntIsIntegral) {
+      // this is normal integer range with usual addition. arithmetic series formula can be used
+      if (isEmpty) 0
+      else if (numRangeElements == 1) head
+      else (numRangeElements.toLong * (head + last) / 2).toInt
+    } else {
+      // user provided custom Numeric, we cannot rely on arithmetic series formula
+      if (isEmpty) num.toInt(num.zero)
+      else {
+        var acc = num.zero
+        var i = head
+        while(i != terminalElement) {
+          acc = num.plus(acc, i)
+          i = i + step
+        }
+        num.toInt(acc)
+      }
+    }
   }
 
   override def toIterable = this
@@ -274,9 +363,15 @@ extends scala.collection.AbstractSeq[Int]
 
   override def equals(other: Any) = other match {
     case x: Range =>
-      (x canEqual this) && (length == x.length) && (
-        isEmpty ||                            // all empty sequences are equal
-        (start == x.start && last == x.last)  // same length and same endpoints implies equality
+      // Note: this must succeed for overfull ranges (length > Int.MaxValue)
+      (x canEqual this) && (
+        isEmpty ||                              // all empty sequences are equal
+        (start == x.start && {                  // Otherwise, must have same start
+          val l0 = last
+          (l0 == x.last && (                    // And same end
+            start == l0 || step == x.step       // And either the same step, or not take any steps
+          ))
+        })
       )
     case _ =>
       super.equals(other)
@@ -286,7 +381,8 @@ extends scala.collection.AbstractSeq[Int]
    */
 
   override def toString() = {
-    val endStr = if (numRangeElements > Range.MAX_PRINT) ", ... )" else ")"
+    val endStr =
+      if (numRangeElements > Range.MAX_PRINT || (!isEmpty && numRangeElements < 0)) ", ... )" else ")"
     take(Range.MAX_PRINT).mkString("Range(", ", ", endStr)
   }
 }
@@ -325,7 +421,7 @@ object Range {
     }
   }
   def count(start: Int, end: Int, step: Int): Int =
-    count(start, end, step, false)
+    count(start, end, step, isInclusive = false)
 
   class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
 //    override def par = new ParRange(this)
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
deleted file mode 100644
index 9739e8f..0000000
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ /dev/null
@@ -1,293 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2005-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala
-package collection
-package immutable
-
-/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
- *
- *  Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
- *
- *  @since 2.3
- */
- at deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
- at SerialVersionUID(8691885935445612921L)
-abstract class RedBlack[A] extends Serializable {
-
-  def isSmaller(x: A, y: A): Boolean
-
-  private def blacken[B](t: Tree[B]): Tree[B] = t match {
-    case RedTree(k, v, l, r) => BlackTree(k, v, l, r)
-    case t => t
-  }
-  private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) =
-    if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
-
-  abstract class Tree[+B] extends Serializable {
-    def isEmpty: Boolean
-    def isBlack: Boolean
-    def lookup(x: A): Tree[B]
-    def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v))
-    def delete(k: A): Tree[B] = blacken(del(k))
-    def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
-    def foreach[U](f: (A, B) =>  U)
-    def toStream: Stream[(A,B)]
-    def iterator: Iterator[(A, B)]
-    def upd[B1 >: B](k: A, v: B1): Tree[B1]
-    def del(k: A): Tree[B]
-    def smallest: NonEmpty[B]
-    def rng(from: Option[A], until: Option[A]): Tree[B]
-    def first : A
-    def last : A
-    def count : Int
-  }
-  abstract class NonEmpty[+B] extends Tree[B] with Serializable {
-    def isEmpty = false
-    def key: A
-    def value: B
-    def left: Tree[B]
-    def right: Tree[B]
-    def lookup(k: A): Tree[B] =
-      if (isSmaller(k, key)) left.lookup(k)
-      else if (isSmaller(key, k)) right.lookup(k)
-      else this
-    private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match {
-      case RedTree(y, yv, RedTree(x, xv, a, b), c) =>
-        RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
-      case RedTree(x, xv, a, RedTree(y, yv, b, c)) =>
-        RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
-      case _ =>
-        mkTree(isBlack, z, zv, l, d)
-    }
-    private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match {
-      case RedTree(z, zv, RedTree(y, yv, b, c), d) =>
-        RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
-      case RedTree(y, yv, b, RedTree(z, zv, c, d)) =>
-        RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
-      case _ =>
-        mkTree(isBlack, x, xv, a, r)
-    }
-    def upd[B1 >: B](k: A, v: B1): Tree[B1] = {
-      if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right)
-      else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v))
-      else mkTree(isBlack, k, v, left, right)
-    }
-    // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
-    // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
-    def del(k: A): Tree[B] = {
-      def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
-        case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) =>
-          RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d))
-        case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) =>
-          RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d))
-        case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) =>
-          RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d))
-        case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) =>
-          RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
-        case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) =>
-          RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d))
-        case (a, b) =>
-          BlackTree(x, xv, a, b)
-      }
-      def subl(t: Tree[B]) = t match {
-        case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b)
-        case _ => sys.error("Defect: invariance violation; expected black, got "+t)
-      }
-      def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
-        case (RedTree(y, yv, a, b), c) =>
-          RedTree(x, xv, BlackTree(y, yv, a, b), c)
-        case (bl, BlackTree(y, yv, a, b)) =>
-          balance(x, xv, bl, RedTree(y, yv, a, b))
-        case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) =>
-          RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c)))
-        case _ => sys.error("Defect: invariance violation at "+right)
-      }
-      def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
-        case (a, RedTree(y, yv, b, c)) =>
-          RedTree(x, xv, a, BlackTree(y, yv, b, c))
-        case (BlackTree(y, yv, a, b), bl) =>
-          balance(x, xv, RedTree(y, yv, a, b), bl)
-        case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) =>
-          RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl))
-        case _ => sys.error("Defect: invariance violation at "+left)
-      }
-      def delLeft = left match {
-        case _: BlackTree[_] => balLeft(key, value, left.del(k), right)
-        case _ => RedTree(key, value, left.del(k), right)
-      }
-      def delRight = right match {
-        case _: BlackTree[_] => balRight(key, value, left, right.del(k))
-        case _ => RedTree(key, value, left, right.del(k))
-      }
-      def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match {
-        case (Empty, t) => t
-        case (t, Empty) => t
-        case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) =>
-          append(b, c) match {
-            case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d))
-            case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d))
-          }
-        case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) =>
-          append(b, c) match {
-            case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d))
-            case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d))
-          }
-        case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c)
-        case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c))
-      }
-      // RedBlack is neither A : Ordering[A], nor A <% Ordered[A]
-      k match {
-        case _ if isSmaller(k, key) => delLeft
-        case _ if isSmaller(key, k) => delRight
-        case _ => append(left, right)
-      }
-    }
-
-    def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest
-
-    def toStream: Stream[(A,B)] =
-      left.toStream ++ Stream((key,value)) ++ right.toStream
-
-    def iterator: Iterator[(A, B)] =
-      left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator
-
-    def foreach[U](f: (A, B) => U) {
-      left foreach f
-      f(key, value)
-      right foreach f
-    }
-
-    override def rng(from: Option[A], until: Option[A]): Tree[B] = {
-      if (from == None && until == None) return this
-      if (from != None && isSmaller(key, from.get)) return right.rng(from, until);
-      if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get)))
-        return left.rng(from, until);
-      val newLeft = left.rng(from, None)
-      val newRight = right.rng(None, until)
-      if ((newLeft eq left) && (newRight eq right)) this
-      else if (newLeft eq Empty) newRight.upd(key, value);
-      else if (newRight eq Empty) newLeft.upd(key, value);
-      else rebalance(newLeft, newRight)
-    }
-
-    // The zipper returned might have been traversed left-most (always the left child)
-    // or right-most (always the right child). Left trees are traversed right-most,
-    // and right trees are traversed leftmost.
-
-    // Returns the zipper for the side with deepest black nodes depth, a flag
-    // indicating whether the trees were unbalanced at all, and a flag indicating
-    // whether the zipper was traversed left-most or right-most.
-
-    // If the trees were balanced, returns an empty zipper
-    private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = {
-      // Once a side is found to be deeper, unzip it to the bottom
-      def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = {
-        val next = if (leftMost) zipper.head.left else zipper.head.right
-        next match {
-          case node: NonEmpty[_] => unzip(node :: zipper, leftMost)
-          case Empty             => zipper
-        }
-      }
-
-      // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
-      // found to be deeper, or the bottom is reached
-      def unzipBoth(left: Tree[B],
-                    right: Tree[B],
-                    leftZipper: List[NonEmpty[B]],
-                    rightZipper: List[NonEmpty[B]],
-                    smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match {
-        case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) =>
-          unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1)
-        case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) =>
-          unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth)
-        case (_, r @ RedTree(_, _, _, _)) =>
-          unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth)
-        case (l @ RedTree(_, _, _, _), _) =>
-          unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth)
-        case (Empty, Empty) =>
-          (Nil, true, false, smallerDepth)
-        case (Empty, r @ BlackTree(_, _, _, _)) =>
-          val leftMost = true
-          (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth)
-        case (l @ BlackTree(_, _, _, _), Empty) =>
-          val leftMost = false
-          (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth)
-      }
-      unzipBoth(left, right, Nil, Nil, 0)
-    }
-
-    private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = {
-      // This is like drop(n-1), but only counting black nodes
-      def  findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match {
-        case BlackTree(_, _, _, _) :: tail =>
-          if (depth == 1) zipper else findDepth(tail, depth - 1)
-        case _ :: tail => findDepth(tail, depth)
-        case Nil => sys.error("Defect: unexpected empty zipper while computing range")
-      }
-
-      // Blackening the smaller tree avoids balancing problems on union;
-      // this can't be done later, though, or it would change the result of compareDepth
-      val blkNewLeft = blacken(newLeft)
-      val blkNewRight = blacken(newRight)
-      val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
-
-      if (levelled) {
-        BlackTree(key, value, blkNewLeft, blkNewRight)
-      } else {
-        val zipFrom = findDepth(zipper, smallerDepth)
-        val union = if (leftMost) {
-          RedTree(key, value, blkNewLeft, zipFrom.head)
-        } else {
-          RedTree(key, value, zipFrom.head, blkNewRight)
-        }
-        val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) =>
-            if (leftMost)
-              balanceLeft(node.isBlack, node.key, node.value, tree, node.right)
-            else
-              balanceRight(node.isBlack, node.key, node.value, node.left, tree)
-        }
-        zippedTree
-      }
-    }
-    def first = if (left .isEmpty) key else left.first
-    def last  = if (right.isEmpty) key else right.last
-    def count = 1 + left.count + right.count
-  }
-  case object Empty extends Tree[Nothing] {
-    def isEmpty = true
-    def isBlack = true
-    def lookup(k: A): Tree[Nothing] = this
-    def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty)
-    def del(k: A): Tree[Nothing] = this
-    def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map")
-    def iterator: Iterator[(A, Nothing)] = Iterator.empty
-    def toStream: Stream[(A,Nothing)] = Stream.empty
-
-    def foreach[U](f: (A, Nothing) => U) {}
-
-    def rng(from: Option[A], until: Option[A]) = this
-    def first = throw new NoSuchElementException("empty map")
-    def last = throw new NoSuchElementException("empty map")
-    def count = 0
-  }
-  case class RedTree[+B](override val key: A,
-                         override val value: B,
-                         override val left: Tree[B],
-                         override val right: Tree[B]) extends NonEmpty[B] {
-    def isBlack = false
-  }
-  case class BlackTree[+B](override val key: A,
-                           override val value: B,
-                           override val left: Tree[B],
-                           override val right: Tree[B]) extends NonEmpty[B] {
-    def isBlack = true
-  }
-}
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0254e9c..0dad106 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -18,19 +18,19 @@ import scala.annotation.meta.getter
 /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
  *
  *  Implementation note: since efficiency is important for data structures this implementation
- *  uses <code>null</code> to represent empty trees. This also means pattern matching cannot
+ *  uses `null` to represent empty trees. This also means pattern matching cannot
  *  easily be used. The API represented by the RedBlackTree object tries to hide these
  *  optimizations behind a reasonably clean API.
  *
  *  @since 2.10
  */
-private[immutable]
+private[collection]
 object RedBlackTree {
 
   def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
 
-  def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null
-  def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match {
+  def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null
+  def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match {
     case null => None
     case tree => Some(tree.value)
   }
@@ -44,8 +44,27 @@ object RedBlackTree {
   }
 
   def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
-  def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
-  def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
+  /**
+   * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound.
+   * The two bounds are optional.
+   */
+  def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int =
+    if (tree eq null) 0 else
+    (from, to) match {
+      // with no bounds use this node's count
+      case (None, None) => tree.count
+      // if node is less than the lower bound, try the tree on the right, it might be in range
+      case (Some(lb), _) if ordering.lt(tree.key, lb) => countInRange(tree.right, from, to)
+      // if node is greater than or equal to the upper bound, try the tree on the left, it might be in range
+      case (_, Some(ub)) if ordering.gteq(tree.key, ub) => countInRange(tree.left, from, to)
+      // node is in range so the tree on the left will all be less than the upper bound and the tree on the
+      // right will all be greater than or equal to the lower bound. So 1 for this node plus
+      // count the subtrees by stripping off the bounds that we don't need any more
+      case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to)
+
+    }
+  def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
+  def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k))
   def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
     case (Some(from), Some(until)) => this.range(tree, from, until)
     case (Some(from), None)        => this.from(tree, from)
@@ -74,20 +93,26 @@ object RedBlackTree {
     result
   }
 
-  def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
-    if (tree.left ne null) foreach(tree.left, f)
+
+  def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f)
+
+  private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) {
+    if (tree.left ne null) _foreach(tree.left, f)
     f((tree.key, tree.value))
-    if (tree.right ne null) foreach(tree.right, f)
+    if (tree.right ne null) _foreach(tree.right, f)
   }
-  def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
-    if (tree.left ne null) foreachKey(tree.left, f)
-    f(tree.key)
-    if (tree.right ne null) foreachKey(tree.right, f)
+
+  def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f)
+
+  private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) {
+    if (tree.left ne null) _foreachKey(tree.left, f)
+    f((tree.key))
+    if (tree.right ne null) _foreachKey(tree.right, f)
   }
 
-  def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
-  def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
-  def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
+  def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start)
+  def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start)
+  def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start)
 
   @tailrec
   def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -227,7 +252,7 @@ object RedBlackTree {
     if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
     val newLeft = doFrom(tree.left, from)
     if (newLeft eq tree.left) tree
-    else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+    else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false)
     else rebalance(tree, newLeft, tree.right)
   }
   private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -235,7 +260,7 @@ object RedBlackTree {
     if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
     val newRight = doTo(tree.right, to)
     if (newRight eq tree.right) tree
-    else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+    else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
     else rebalance(tree, tree.left, newRight)
   }
   private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -243,18 +268,18 @@ object RedBlackTree {
     if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
     val newRight = doUntil(tree.right, until)
     if (newRight eq tree.right) tree
-    else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+    else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
     else rebalance(tree, tree.left, newRight)
   }
   private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
     if (tree eq null) return null
-    if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
-    if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+    if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until)
+    if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until)
     val newLeft = doFrom(tree.left, from)
     val newRight = doUntil(tree.right, until)
     if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
-    else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
-    else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
+    else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false)
+    else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false)
     else rebalance(tree, newLeft, newRight)
   }
 
@@ -265,7 +290,7 @@ object RedBlackTree {
     if (n > count) return doDrop(tree.right, n - count - 1)
     val newLeft = doDrop(tree.left, n)
     if (newLeft eq tree.left) tree
-    else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
+    else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, overwrite = false)
     else rebalance(tree, newLeft, tree.right)
   }
   private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -275,7 +300,7 @@ object RedBlackTree {
     if (n <= count) return doTake(tree.left, n)
     val newRight = doTake(tree.right, n - count - 1)
     if (newRight eq tree.right) tree
-    else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
+    else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, overwrite = false)
     else rebalance(tree, tree.left, newRight)
   }
   private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -286,8 +311,8 @@ object RedBlackTree {
     val newLeft = doDrop(tree.left, from)
     val newRight = doTake(tree.right, until - count - 1)
     if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
-    else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
-    else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
+    else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, overwrite = false)
+    else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, overwrite = false)
     else rebalance(tree, newLeft, newRight)
   }
 
@@ -300,54 +325,56 @@ object RedBlackTree {
   // whether the zipper was traversed left-most or right-most.
 
   // If the trees were balanced, returns an empty zipper
-  private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+  private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (NList[Tree[A, B]], Boolean, Boolean, Int) = {
+    import NList.cons
     // Once a side is found to be deeper, unzip it to the bottom
-    def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = {
+    def unzip(zipper: NList[Tree[A, B]], leftMost: Boolean): NList[Tree[A, B]] = {
       val next = if (leftMost) zipper.head.left else zipper.head.right
-      next match {
-        case null => zipper
-        case node => unzip(node :: zipper, leftMost)
-      }
+      if (next eq null) zipper
+      else unzip(cons(next, zipper), leftMost)
     }
 
     // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
     // found to be deeper, or the bottom is reached
     def unzipBoth(left: Tree[A, B],
                   right: Tree[A, B],
-                  leftZipper: List[Tree[A, B]],
-                  rightZipper: List[Tree[A, B]],
-                  smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+                  leftZipper: NList[Tree[A, B]],
+                  rightZipper: NList[Tree[A, B]],
+                  smallerDepth: Int): (NList[Tree[A, B]], Boolean, Boolean, Int) = {
       if (isBlackTree(left) && isBlackTree(right)) {
-        unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1)
+        unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth + 1)
       } else if (isRedTree(left) && isRedTree(right)) {
-        unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth)
+        unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth)
       } else if (isRedTree(right)) {
-        unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth)
+        unzipBoth(left, right.left, leftZipper, cons(right, rightZipper), smallerDepth)
       } else if (isRedTree(left)) {
-        unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth)
+        unzipBoth(left.right, right, cons(left, leftZipper), rightZipper, smallerDepth)
       } else if ((left eq null) && (right eq null)) {
-        (Nil, true, false, smallerDepth)
+        (null, true, false, smallerDepth)
       } else if ((left eq null) && isBlackTree(right)) {
         val leftMost = true
-        (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth)
+        (unzip(cons(right, rightZipper), leftMost), false, leftMost, smallerDepth)
       } else if (isBlackTree(left) && (right eq null)) {
         val leftMost = false
-        (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth)
+        (unzip(cons(left, leftZipper), leftMost), false, leftMost, smallerDepth)
       } else {
         sys.error("unmatched trees in unzip: " + left + ", " + right)
       }
     }
-    unzipBoth(left, right, Nil, Nil, 0)
+    unzipBoth(left, right, null, null, 0)
   }
 
   private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = {
     // This is like drop(n-1), but only counting black nodes
-    def  findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match {
-      case head :: tail if isBlackTree(head) =>
-        if (depth == 1) zipper else findDepth(tail, depth - 1)
-      case _ :: tail => findDepth(tail, depth)
-      case Nil => sys.error("Defect: unexpected empty zipper while computing range")
-    }
+    @tailrec
+    def  findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] =
+      if (zipper eq null) {
+        sys.error("Defect: unexpected empty zipper while computing range")
+      } else if (isBlackTree(zipper.head)) {
+        if (depth == 1) zipper else findDepth(zipper.tail, depth - 1)
+      } else {
+        findDepth(zipper.tail, depth)
+      }
 
     // Blackening the smaller tree avoids balancing problems on union;
     // this can't be done later, though, or it would change the result of compareDepth
@@ -364,7 +391,7 @@ object RedBlackTree {
       } else {
         RedTree(tree.key, tree.value, zipFrom.head, blkNewRight)
       }
-      val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) =>
+      val zippedTree = NList.foldLeft(zipFrom.tail, union: Tree[A, B]) { (tree, node) =>
         if (leftMost)
           balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right)
         else
@@ -374,6 +401,25 @@ object RedBlackTree {
     }
   }
 
+  // Null optimized list implementation for tree rebalancing. null presents Nil.
+  private[this] final class NList[A](val head: A, val tail: NList[A])
+
+  private[this] final object NList {
+
+    def cons[B](x: B, xs: NList[B]): NList[B] = new NList(x, xs)
+
+    def foldLeft[A, B](xs: NList[A], z: B)(f: (B, A) => B): B = {
+      var acc = z
+      var these = xs
+      while (these ne null) {
+        acc = f(acc, these.head)
+        these = these.tail
+      }
+      acc
+    }
+
+  }
+
   /*
    * Forcing direct fields access using the @inline annotation helps speed up
    * various operations (especially smallest/greatest and update/delete).
@@ -419,32 +465,28 @@ object RedBlackTree {
     def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right))
   }
 
-  private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] {
+  private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(implicit ordering: Ordering[A]) extends Iterator[R] {
     protected[this] def nextResult(tree: Tree[A, B]): R
 
-    override def hasNext: Boolean = next ne null
+    override def hasNext: Boolean = lookahead ne null
 
-    override def next: R = next match {
+    override def next: R = lookahead match {
       case null =>
         throw new NoSuchElementException("next on empty iterator")
       case tree =>
-        next = findNext(tree.right)
+        lookahead = findLeftMostOrPopOnEmpty(goRight(tree))
         nextResult(tree)
     }
 
     @tailrec
-    private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = {
-      if (tree eq null) popPath()
+    private[this] def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] =
+      if (tree eq null) popNext()
       else if (tree.left eq null) tree
-      else {
-        pushPath(tree)
-        findNext(tree.left)
-      }
-    }
+      else findLeftMostOrPopOnEmpty(goLeft(tree))
 
-    private[this] def pushPath(tree: Tree[A, B]) {
+    private[this] def pushNext(tree: Tree[A, B]) {
       try {
-        path(index) = tree
+        stackOfNexts(index) = tree
         index += 1
       } catch {
         case _: ArrayIndexOutOfBoundsException =>
@@ -456,17 +498,17 @@ object RedBlackTree {
            * An exception handler is used instead of an if-condition to optimize the normal path.
            * This makes a large difference in iteration speed!
            */
-          assert(index >= path.length)
-          path :+= null
-          pushPath(tree)
+          assert(index >= stackOfNexts.length)
+          stackOfNexts :+= null
+          pushNext(tree)
       }
     }
-    private[this] def popPath(): Tree[A, B] = if (index == 0) null else {
+    private[this] def popNext(): Tree[A, B] = if (index == 0) null else {
       index -= 1
-      path(index)
+      stackOfNexts(index)
     }
 
-    private[this] var path = if (tree eq null) null else {
+    private[this] var stackOfNexts = if (root eq null) null else {
       /*
        * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
        * the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
@@ -475,22 +517,45 @@ object RedBlackTree {
        *
        * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one.
        */
-      val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1
+      val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - 1
       new Array[Tree[A, B]](maximumHeight)
     }
     private[this] var index = 0
-    private[this] var next: Tree[A, B] = findNext(tree)
+    private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root)
+
+    /**
+     * Find the leftmost subtree whose key is equal to the given key, or if no such thing,
+     * the leftmost subtree with the key that would be "next" after it according
+     * to the ordering. Along the way build up the iterator's path stack so that "next"
+     * functionality works.
+     */
+    private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else {
+      @tailrec def find(tree: Tree[A, B]): Tree[A, B] =
+        if (tree eq null) popNext()
+        else find(
+          if (ordering.lteq(key, tree.key)) goLeft(tree)
+          else goRight(tree)
+        )
+      find(root)
+    }
+
+    private[this] def goLeft(tree: Tree[A, B]) = {
+      pushNext(tree)
+      tree.left
+    }
+
+    private[this] def goRight(tree: Tree[A, B]) = tree.right
   }
 
-  private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) {
+  private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) {
     override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
   }
 
-  private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) {
+  private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) {
     override def nextResult(tree: Tree[A, B]) = tree.key
   }
 
-  private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) {
+  private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) {
     override def nextResult(tree: Tree[A, B]) = tree.value
   }
 }
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 14610ae..38855ca 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 8433c2b..0fbf794 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -32,7 +33,15 @@ trait Set[A] extends Iterable[A]
                 with Parallelizable[A, ParSet[A]]
 {
   override def companion: GenericCompanion[Set] = Set
+  
+  
+  /** Returns this $coll as an immutable map.
+   *  
+   *  A new map will not be built; lazy collections will stay lazy.
+   */
+  @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
   override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+  
   override def seq: Set[A] = this
   protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
 }
@@ -44,8 +53,7 @@ trait Set[A] extends Iterable[A]
 object Set extends ImmutableSetFactory[Set] {
   /** $setCanBuildFromInfo */
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
-  override def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]]
-
+  
   /** An optimized representation for immutable empty sets */
   private object EmptySet extends AbstractSet[Any] with Set[Any] with Serializable {
     override def size: Int = 0
@@ -55,6 +63,7 @@ object Set extends ImmutableSetFactory[Set] {
     def iterator: Iterator[Any] = Iterator.empty
     override def foreach[U](f: Any =>  U): Unit = {}
   }
+  private[collection] def emptyInstance: Set[Any] = EmptySet
 
   /** An optimized representation for immutable sets of size 1 */
   @SerialVersionUID(1233385750652442003L)
@@ -73,6 +82,16 @@ object Set extends ImmutableSetFactory[Set] {
     override def foreach[U](f: A =>  U): Unit = {
       f(elem1)
     }
+    override def exists(f: A => Boolean): Boolean = {
+      f(elem1)
+    }
+    override def forall(f: A => Boolean): Boolean = {
+      f(elem1)
+    }
+    override def find(f: A => Boolean): Option[A] = {
+      if (f(elem1)) Some(elem1)
+      else None
+    }
   }
 
   /** An optimized representation for immutable sets of size 2 */
@@ -93,6 +112,17 @@ object Set extends ImmutableSetFactory[Set] {
     override def foreach[U](f: A =>  U): Unit = {
       f(elem1); f(elem2)
     }
+    override def exists(f: A => Boolean): Boolean = {
+      f(elem1) || f(elem2)
+    }
+    override def forall(f: A => Boolean): Boolean = {
+      f(elem1) && f(elem2)
+    }
+    override def find(f: A => Boolean): Option[A] = {
+      if (f(elem1)) Some(elem1)
+      else if (f(elem2)) Some(elem2)
+      else None
+    }
   }
 
   /** An optimized representation for immutable sets of size 3 */
@@ -114,6 +144,18 @@ object Set extends ImmutableSetFactory[Set] {
     override def foreach[U](f: A =>  U): Unit = {
       f(elem1); f(elem2); f(elem3)
     }
+    override def exists(f: A => Boolean): Boolean = {
+      f(elem1) || f(elem2) || f(elem3)
+    }
+    override def forall(f: A => Boolean): Boolean = {
+      f(elem1) && f(elem2) && f(elem3)
+    }
+    override def find(f: A => Boolean): Option[A] = {
+      if (f(elem1)) Some(elem1)
+      else if (f(elem2)) Some(elem2)
+      else if (f(elem3)) Some(elem3)
+      else None
+    }
   }
 
   /** An optimized representation for immutable sets of size 4 */
@@ -136,6 +178,19 @@ object Set extends ImmutableSetFactory[Set] {
     override def foreach[U](f: A =>  U): Unit = {
       f(elem1); f(elem2); f(elem3); f(elem4)
     }
+    override def exists(f: A => Boolean): Boolean = {
+      f(elem1) || f(elem2) || f(elem3) || f(elem4)
+    }
+    override def forall(f: A => Boolean): Boolean = {
+      f(elem1) && f(elem2) && f(elem3) && f(elem4)
+    }
+    override def find(f: A => Boolean): Option[A] = {
+      if (f(elem1)) Some(elem1)
+      else if (f(elem2)) Some(elem2)
+      else if (f(elem3)) Some(elem3)
+      else if (f(elem4)) Some(elem4)
+      else None
+    }
   }
 }
 
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index 06c6843..d505185 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 /** This is a simple wrapper class for <a href="Set.html"
@@ -21,6 +22,7 @@ package immutable
  *
  *  @since 2.8
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
   override def repr = this
   private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] =
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index eb04231..f149355 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -82,11 +83,17 @@ self =>
   override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
     implicit def ordering: Ordering[A] = self.ordering
     override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+    override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+    override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p
+    override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
   }
 
   override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
     implicit def ordering: Ordering[A] = self.ordering
     override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+    override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))}
+    override def keysIteratorFrom(start : A) = self keysIteratorFrom start
+    override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f
   }
 
 }
@@ -106,13 +113,13 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
       val b = SortedMap.newBuilder[A, B1]
       b ++= this
       b += ((kv._1, kv._2))
-      b.result
+      b.result()
     }
 
     override def - (key: A): SortedMap[A, B] = {
       val b = newBuilder
       for (kv <- this; if kv._1 != key) b += kv
-      b.result
+      b.result()
     }
   }
 }
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 3f75d50..4a8859a 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 357e9a1..b77b16f 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -45,6 +46,7 @@ object Stack extends SeqFactory[Stack] {
  *  @define willNotTerminateInf
  */
 @SerialVersionUID(1976480595012942526L)
+ at deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List.  Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0")
 class Stack[+A] protected (protected val elems: List[A])
                  extends AbstractSeq[A]
                     with LinearSeq[A]
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 5bb4ef5..60de147 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -181,11 +182,14 @@ import scala.language.implicitConversions
  *  @define coll stream
  *  @define orderDependent
  *  @define orderDependentFold
+ *  @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections.
  */
+ at deprecatedInheritance("This class will be sealed.", "2.11.0")
 abstract class Stream[+A] extends AbstractSeq[A]
                              with LinearSeq[A]
                              with GenericTraversableTemplate[A, Stream]
-                             with LinearSeqOptimized[A, Stream[A]] {
+                             with LinearSeqOptimized[A, Stream[A]]
+                             with Serializable {
 self =>
   override def companion: GenericCompanion[Stream] = Stream
 
@@ -286,9 +290,8 @@ self =>
     len
   }
 
-  /** It's an imperfect world, but at least we can bottle up the
-   *  imperfection in a capsule.
-   */
+  // It's an imperfect world, but at least we can bottle up the
+  // imperfection in a capsule.
   @inline private def asThat[That](x: AnyRef): That     = x.asInstanceOf[That]
   @inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]]
   @inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) =
@@ -385,12 +388,17 @@ self =>
       // 1) stackoverflows (could be achieved with tailrec, too)
       // 2) out of memory errors for big streams (`this` reference can be eliminated from the stack)
       var rest: Stream[A] = this
-      while (rest.nonEmpty && !pf.isDefinedAt(rest.head)) rest = rest.tail
+
+      // Avoids calling both `pf.isDefined` and `pf.apply`.
+      var newHead: B = null.asInstanceOf[B]
+      val runWith = pf.runWith((b: B) => newHead = b)
+
+      while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail
 
       //  without the call to the companion object, a thunk is created for the tail of the new stream,
       //  and the closure of the thunk will reference `this`
       if (rest.isEmpty) Stream.Empty.asInstanceOf[That]
-      else Stream.collectedTail(rest, pf, bf).asInstanceOf[That]
+      else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That]
     }
   }
 
@@ -725,10 +733,15 @@ self =>
    * // produces: "5, 6, 7, 8, 9"
    * }}}
    */
-  override def take(n: Int): Stream[A] =
+  override def take(n: Int): Stream[A] = (
+    // Note that the n == 1 condition appears redundant but is not.
+    // It prevents "tail" from being referenced (and its head being evaluated)
+    // when obtaining the last element of the result. Such are the challenges
+    // of working with a lazy-but-not-really sequence.
     if (n <= 0 || isEmpty) Stream.empty
     else if (n == 1) cons(head, Stream.empty)
     else cons(head, tail take n-1)
+  )
 
   @tailrec final override def drop(n: Int): Stream[A] =
     if (n <= 0 || isEmpty) this
@@ -784,8 +797,23 @@ self =>
     these
   }
 
-  // there's nothing we can do about dropRight, so we just keep the definition
-  // in LinearSeq
+  /**
+   * @inheritdoc
+   * $willTerminateInf
+   */
+  override def dropRight(n: Int): Stream[A] = {
+    // We make dropRight work for possibly infinite streams by carrying
+    // a buffer of the dropped size. As long as the buffer is full and the
+    // rest is non-empty, we can feed elements off the buffer head.  When
+    // the rest becomes empty, the full buffer is the dropped elements.
+    def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = {
+      if (rest.isEmpty) Stream.empty
+      else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest)
+      else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail))
+    }
+    if (n <= 0) this
+    else advance((this take n).toList, Nil, this drop n)
+  }
 
   /** Returns the longest prefix of this `Stream` whose elements satisfy the
    * predicate `p`.
@@ -927,19 +955,21 @@ self =>
    * `Stream`.
    * @example {{{
    * val sov: Stream[Vector[Int]] = Vector(0) #:: Vector(0, 0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
-   * sov flatten take 10 mkString ", "
+   * sov.flatten take 10 mkString ", "
    * // produces: "0, 0, 0, 0, 0, 0, 0, 0, 0, 0"
    * }}}
    */
   override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): Stream[B] = {
-    def flatten1(t: Traversable[B]): Stream[B] =
-      if (!t.isEmpty)
-        cons(t.head, flatten1(t.tail))
-      else
-        tail.flatten
-
-    if (isEmpty) Stream.empty
-    else flatten1(asTraversable(head).seq.toTraversable)
+    var st: Stream[A] = this
+    while (st.nonEmpty) {
+      val h = asTraversable(st.head)
+      if (h.isEmpty) {
+        st = st.tail
+      } else {
+        return h.toStream #::: st.tail.flatten
+      }
+    }
+    Stream.empty
   }
 
   override def view = new StreamView[A, Stream[A]] {
@@ -973,7 +1003,7 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat
 
   def hasNext: Boolean = these.v.nonEmpty
   def next(): A =
-    if (isEmpty) Iterator.empty.next
+    if (isEmpty) Iterator.empty.next()
     else {
       val cur    = these.v
       val result = cur.head
@@ -1023,7 +1053,7 @@ object Stream extends SeqFactory[Stream] {
     def result: Stream[A] = parts.toStream flatMap (_.toStream)
   }
 
-  object Empty extends Stream[Nothing] with Serializable {
+  object Empty extends Stream[Nothing] {
     override def isEmpty = true
     override def head = throw new NoSuchElementException("head of empty stream")
     override def tail = throw new UnsupportedOperationException("tail of empty stream")
@@ -1074,15 +1104,19 @@ object Stream extends SeqFactory[Stream] {
 
   /** A lazy cons cell, from which streams are built. */
   @SerialVersionUID(-602202424901551803L)
-  final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] with Serializable {
+  final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] {
     override def isEmpty = false
     override def head = hd
     @volatile private[this] var tlVal: Stream[A] = _
-    def tailDefined: Boolean = tlVal ne null
+    @volatile private[this] var tlGen = tl _
+    def tailDefined: Boolean = tlGen eq null
     override def tail: Stream[A] = {
       if (!tailDefined)
         synchronized {
-          if (!tailDefined) tlVal = tl
+          if (!tailDefined) {
+            tlVal = tlGen()
+            tlGen = null
+          }
         }
 
       tlVal
@@ -1149,8 +1183,8 @@ object Stream extends SeqFactory[Stream] {
     cons(stream.head, stream.tail filter p)
   }
 
-  private[immutable] def collectedTail[A, B, That](stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
-    cons(pf(stream.head), stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
+  private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
+    cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
   }
 }
 
diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala
index 5a24b77..127ed76 100644
--- a/src/library/scala/collection/immutable/StreamView.scala
+++ b/src/library/scala/collection/immutable/StreamView.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
 package immutable
 
 trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]] { }
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 236308d..c2eb858 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -70,9 +71,3 @@ extends SeqView[A, Coll]
 
   override def stringPrefix = "StreamView"
 }
-
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index edea89b..8e1d950 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -6,10 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
-import generic._
 import mutable.Builder
 import scala.util.matching.Regex
 import scala.math.ScalaNumber
@@ -19,12 +19,11 @@ import scala.reflect.ClassTag
  *  @since 2.8
  */
 object StringLike {
-
   // just statics for companion class.
-  private final val LF: Char = 0x0A
-  private final val FF: Char = 0x0C
-  private final val CR: Char = 0x0D
-  private final val SU: Char = 0x1A
+  private final val LF = 0x0A
+  private final val FF = 0x0C
+  private final val CR = 0x0D
+  private final val SU = 0x1A
 }
 
 import StringLike._
@@ -60,8 +59,8 @@ self =>
     val start = from max 0
     val end   = until min length
 
-    if (start >= end) newBuilder.result
-    else (newBuilder ++= toString.substring(start, end)).result
+    if (start >= end) newBuilder.result()
+    else (newBuilder ++= toString.substring(start, end)).result()
   }
 
   /** Return the current string concatenated `n` times.
@@ -132,6 +131,7 @@ self =>
    *  end characters, i.e. apply `.stripLineEnd` to all lines
    *  returned by `linesWithSeparators`.
    */
+  @deprecated("Use `lines` instead.","2.11.0")
   def linesIterator: Iterator[String] =
     linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
 
@@ -165,8 +165,8 @@ self =>
    *  @return               the resulting string
    */
   def replaceAllLiterally(literal: String, replacement: String): String = {
-    val arg1 = java.util.regex.Pattern.quote(literal)
-    val arg2 = java.util.regex.Matcher.quoteReplacement(replacement)
+    val arg1 = Regex.quote(literal)
+    val arg2 = Regex.quoteReplacement(replacement)
 
     toString.replaceAll(arg1, arg2)
   }
@@ -223,12 +223,33 @@ self =>
    */
   def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
 
+  /**
+   * @throws `java.lang.IllegalArgumentException` - If the string does not contain a parsable boolean.
+   */
   def toBoolean: Boolean = parseBoolean(toString)
+  /**
+   * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable byte.
+   */
   def toByte: Byte       = java.lang.Byte.parseByte(toString)
+  /**
+   * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable short.
+   */
   def toShort: Short     = java.lang.Short.parseShort(toString)
+  /**
+   * @throws `java.lang.NumberFormatException`  - If the string does not contain a parsable int.
+   */
   def toInt: Int         = java.lang.Integer.parseInt(toString)
+  /**
+   * @throws `java.lang.NumberFormatException`  - If the string does not contain a parsable long.
+   */
   def toLong: Long       = java.lang.Long.parseLong(toString)
+  /**
+   * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable float.
+   */
   def toFloat: Float     = java.lang.Float.parseFloat(toString)
+  /**
+   * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable double.
+   */
   def toDouble: Double   = java.lang.Double.parseDouble(toString)
 
   private def parseBoolean(s: String): Boolean =
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index a650d98..6737692 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package immutable
 
 import mutable.StringBuilder
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 5188343..775d635 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5b4db26..8cc99a5 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -45,15 +44,13 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecatedInheritance("The implementation details of immutable tree maps make inheriting from them unwise.", "2.11.0")
 class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
   extends SortedMap[A, B]
      with SortedMapLike[A, B, TreeMap[A, B]]
      with MapLike[A, B, TreeMap[A, B]]
      with Serializable {
 
-  @deprecated("use `ordering.lt` instead", "2.10.0")
-  def isSmaller(x: A, y: A) = ordering.lt(x, y)
-
   override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
     TreeMap.newBuilder[A, B]
 
@@ -111,7 +108,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
   private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
     var result = 0
     val it = iterator
-    while (it.hasNext && p(it.next)) result += 1
+    while (it.hasNext && p(it.next())) result += 1
     result
   }
   override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p))
@@ -131,7 +128,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
    *  @param value   the value to be associated with `key`
    *  @return        a new $coll with the updated binding
    */
-  override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
+  override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, overwrite = true))
 
   /** Add a key/value pair to this map.
    *  @tparam   B1   type of the value of the new binding, a supertype of `B`
@@ -171,7 +168,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
    */
   def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
     assert(!RB.contains(tree, key))
-    new TreeMap(RB.update(tree, key, value, true))
+    new TreeMap(RB.update(tree, key, value, overwrite = true))
   }
 
   def - (key:A): TreeMap[A, B] =
@@ -192,16 +189,16 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
    *  @return the new iterator
    */
   override def iterator: Iterator[(A, B)] = RB.iterator(tree)
+  override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start))
 
   override def keysIterator: Iterator[A] = RB.keysIterator(tree)
+  override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
+
   override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
+  override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start))
 
   override def contains(key: A): Boolean = RB.contains(tree, key)
   override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
 
   override def foreach[U](f : ((A,B)) =>  U) = RB.foreach(tree, f)
 }
-
-
-
-
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 4947765..681dbbd 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -48,9 +49,13 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
  *  @define willNotTerminateInf
  */
 @SerialVersionUID(-5685982407650748405L)
+ at deprecatedInheritance("The implementation details of immutable tree sets make inheriting from them unwise.", "2.11.0")
 class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A])
   extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
 
+  if (ordering eq null)
+    throw new NullPointerException("ordering must not be null")
+
   override def stringPrefix = "TreeSet"
 
   override def size = RB.count(tree)
@@ -89,16 +94,13 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
   private[this] def countWhile(p: A => Boolean): Int = {
     var result = 0
     val it = iterator
-    while (it.hasNext && p(it.next)) result += 1
+    while (it.hasNext && p(it.next())) result += 1
     result
   }
   override def dropWhile(p: A => Boolean) = drop(countWhile(p))
   override def takeWhile(p: A => Boolean) = take(countWhile(p))
   override def span(p: A => Boolean) = splitAt(countWhile(p))
 
-  @deprecated("use `ordering.lt` instead", "2.10.0")
-  def isSmaller(x: A, y: A) = compare(x,y) < 0
-
   def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
 
   private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t)
@@ -112,7 +114,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
    *  @param elem    a new element to add.
    *  @return        a new $coll containing `elem` and all the elements of this $coll.
    */
-  def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
+  def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), overwrite = false))
 
   /** A new `TreeSet` with the entry added is returned,
    *  assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +124,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
    */
   def insert(elem: A): TreeSet[A] = {
     assert(!RB.contains(tree, elem))
-    newSet(RB.update(tree, elem, (), false))
+    newSet(RB.update(tree, elem, (), overwrite = false))
   }
 
   /** Creates a new `TreeSet` with the entry removed.
@@ -147,6 +149,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
    *  @return the new iterator
    */
   def iterator: Iterator[A] = RB.keysIterator(tree)
+  override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
 
   override def foreach[U](f: A =>  U) = RB.foreachKey(tree, f)
 
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index b0bd253..d7335e8 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
@@ -94,7 +95,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
   def hasNext = (subIter ne null) || depth >= 0
   def next(): T = {
     if (subIter ne null) {
-      val el = subIter.next
+      val el = subIter.next()
       if (!subIter.hasNext)
         subIter = null
       el
@@ -135,7 +136,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
     }
     else {
       subIter = m.iterator
-      next
+      next()
     }
     // The much slower version:
     //
@@ -177,7 +178,6 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
       if (depth > 0) {
         // 2) topmost comes before (is not) arrayD
         //    steal a portion of top to create a new iterator
-        val topmost = arrayStack(0)
         if (posStack(0) == arrayStack(0).length - 1) {
           // 2a) only a single entry left on top
           // this means we have to modify this iterator - pop topmost
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index bcce4a9..c7da447 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -18,21 +18,16 @@ import scala.collection.parallel.immutable.ParVector
 
 /** Companion object to the Vector class
  */
-object Vector extends SeqFactory[Vector] {
-  // left lying around for binary compatibility check
-  private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {    
-    override def apply() = newBuilder[Nothing]
-  }      
-  // left lying around for binary compatibility check
-  private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
-  
-  override lazy val ReusableCBF  = 
-      scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]  
+object Vector extends IndexedSeqFactory[Vector] {
   def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
     ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
   private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
   override def empty[A]: Vector[A] = NIL
+  
+  // Constants governing concat strategy for performance
+  private final val Log2ConcatFaster = 5
+  private final val TinyAppendFaster = 2
 }
 
 // in principle, most members should be private. however, access privileges must
@@ -64,7 +59,7 @@ object Vector extends SeqFactory[Vector] {
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
-final class Vector[+A](private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int)
+final class Vector[+A] private[immutable] (private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int)
 extends AbstractSeq[A]
    with IndexedSeq[A]
    with GenericTraversableTemplate[A, Vector]
@@ -113,7 +108,7 @@ override def companion: GenericCompanion[Vector] = Vector
       if (0 < i) {
         i -= 1
         self(i)
-      } else Iterator.empty.next
+      } else Iterator.empty.next()
   }
 
   // TODO: reverse
@@ -148,7 +143,7 @@ override def companion: GenericCompanion[Vector] = Vector
     if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
     else super.+:(elem)(bf)
 
-  override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = 
+  override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
     if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
     else super.:+(elem)(bf)
 
@@ -214,10 +209,29 @@ override def companion: GenericCompanion[Vector] = Vector
   override /*IterableLike*/ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n))
 
 
-  // concat (stub)
-
+  // concat (suboptimal but avoids worst performance gotchas)
   override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
-    super.++(that.seq)
+    if (bf eq IndexedSeq.ReusableCBF) {
+      import Vector.{Log2ConcatFaster, TinyAppendFaster}
+      if (that.isEmpty) this.asInstanceOf[That]
+      else {
+        val again = if (!that.isTraversableAgain) that.toVector else that
+        again.size match {
+          // Often it's better to append small numbers of elements (or prepend if RHS is a vector)
+          case n if n <= TinyAppendFaster || n < (this.size >> Log2ConcatFaster) => 
+            var v: Vector[B] = this
+            for (x <- again) v = v :+ x
+            v.asInstanceOf[That]
+          case n if this.size < (n >> Log2ConcatFaster) && again.isInstanceOf[Vector[_]] =>
+            var v = again.asInstanceOf[Vector[B]]
+            val ri = this.reverseIterator
+            while (ri.hasNext) v = ri.next +: v
+            v.asInstanceOf[That]
+          case _ => super.++(again)
+        }
+      }
+    }
+    else super.++(that.seq)
   }
 
 
@@ -251,8 +265,8 @@ override def companion: GenericCompanion[Vector] = Vector
 
   private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
     if (endIndex != startIndex) {
-      var blockIndex = (startIndex - 1) & ~31
-      var lo = (startIndex - 1) & 31
+      val blockIndex = (startIndex - 1) & ~31
+      val lo = (startIndex - 1) & 31
 
       if (startIndex != blockIndex + 32) {
         val s = new Vector(startIndex - 1, endIndex, blockIndex)
@@ -270,7 +284,7 @@ override def companion: GenericCompanion[Vector] = Vector
         //println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start")
         if (shift != 0) {
           // case A: we can shift right on the top level
-          debug
+          debug()
           //println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)")
 
           if (depth > 1) {
@@ -280,7 +294,7 @@ override def companion: GenericCompanion[Vector] = Vector
             s.initFrom(this)
             s.dirty = dirty
             s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks
-            s.debug
+            s.debug()
             s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing
             s.display0(lo) = value.asInstanceOf[AnyRef]
             //assert(depth == s.depth)
@@ -298,7 +312,7 @@ override def companion: GenericCompanion[Vector] = Vector
             s.shiftTopLevel(0, shiftBlocks) // shift right by n elements
             s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing
             s.display0(shift-1) = value.asInstanceOf[AnyRef]
-            s.debug
+            s.debug()
             s
           }
         } else if (blockIndex < 0) {
@@ -313,10 +327,10 @@ override def companion: GenericCompanion[Vector] = Vector
           val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex)
           s.initFrom(this)
           s.dirty = dirty
-          s.debug
+          s.debug()
           s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch
           s.display0(lo) = value.asInstanceOf[AnyRef]
-          s.debug
+          s.debug()
           //assert(s.depth == depth+1)
           s
         } else {
@@ -348,8 +362,8 @@ override def companion: GenericCompanion[Vector] = Vector
 //    //println("------- append " + value)
 //    debug()
     if (endIndex != startIndex) {
-      var blockIndex = endIndex & ~31
-      var lo = endIndex & 31
+      val blockIndex = endIndex & ~31
+      val lo = endIndex & 31
 
       if (endIndex != blockIndex) {
         //println("will make writable block (from "+focus+") at: " + blockIndex)
@@ -366,7 +380,7 @@ override def companion: GenericCompanion[Vector] = Vector
         //println("----- appendBack " + value + " at " + endIndex + " reached block end")
 
         if (shift != 0) {
-          debug
+          debug()
           //println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)")
           if (depth > 1) {
             val newBlockIndex = blockIndex - shift
@@ -375,10 +389,10 @@ override def companion: GenericCompanion[Vector] = Vector
             s.initFrom(this)
             s.dirty = dirty
             s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks
-            s.debug
+            s.debug()
             s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
             s.display0(lo) = value.asInstanceOf[AnyRef]
-            s.debug
+            s.debug()
             //assert(depth == s.depth)
             s
           } else {
@@ -394,7 +408,7 @@ override def companion: GenericCompanion[Vector] = Vector
             s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements
             s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
             s.display0(32 - shift) = value.asInstanceOf[AnyRef]
-            s.debug
+            s.debug()
             s
           }
         } else {
@@ -409,7 +423,7 @@ override def companion: GenericCompanion[Vector] = Vector
           //assert(s.depth == depth+1) might or might not create new level!
           if (s.depth == depth+1) {
             //println("creating new level " + s.depth + " (had "+0+" free space)")
-            s.debug
+            s.debug()
           }
           s
         }
@@ -583,9 +597,7 @@ override def companion: GenericCompanion[Vector] = Vector
   }
 
   private def dropFront0(cutIndex: Int): Vector[A] = {
-    var blockIndex = cutIndex & ~31
-    var lo = cutIndex & 31
-
+    val blockIndex = cutIndex & ~31
     val xor = cutIndex ^ (endIndex - 1)
     val d = requiredDepth(xor)
     val shift = (cutIndex & ~((1 << (5*d))-1))
@@ -615,9 +627,7 @@ override def companion: GenericCompanion[Vector] = Vector
   }
 
   private def dropBack0(cutIndex: Int): Vector[A] = {
-    var blockIndex = (cutIndex - 1) & ~31
-    var lo = ((cutIndex - 1) & 31) + 1
-
+    val blockIndex = (cutIndex - 1) & ~31
     val xor = startIndex ^ (cutIndex - 1)
     val d = requiredDepth(xor)
     val shift = (startIndex & ~((1 << (5*d))-1))
@@ -639,14 +649,13 @@ override def companion: GenericCompanion[Vector] = Vector
 }
 
 
-class VectorIterator[+A](_startIndex: Int, _endIndex: Int)
+class VectorIterator[+A](_startIndex: Int, endIndex: Int)
 extends AbstractIterator[A]
    with Iterator[A]
    with VectorPointer[A @uncheckedVariance] {
 
   private var blockIndex: Int = _startIndex & ~31
   private var lo: Int = _startIndex & 31
-  private var endIndex: Int = _endIndex
 
   private var endLo = math.min(endIndex - blockIndex, 32)
 
@@ -676,13 +685,13 @@ extends AbstractIterator[A]
     res
   }
 
-  private[collection] def remainingElementCount: Int = (_endIndex - (blockIndex + lo)) max 0
+  private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0
 
   /** Creates a new vector which consists of elements remaining in this iterator.
    *  Such a vector can then be split into several vectors using methods like `take` and `drop`.
    */
   private[collection] def remainingVector: Vector[A] = {
-    val v = new Vector(blockIndex + lo, _endIndex, blockIndex + lo)
+    val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo)
     v.initFrom(this)
     v
   }
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index edcab31..7592316 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package immutable
 
 import generic._
@@ -28,6 +29,7 @@ import mutable.{Builder, StringBuilder}
  *  @define Coll `WrappedString`
  *  @define coll wrapped string
  */
+ at deprecatedInheritance("Inherit from StringLike instead of WrappedString.", "2.11.0")
 class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] {
 
   override protected[this] def thisCollection: WrappedString = this
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
deleted file mode 100644
index ed0c1b3..0000000
--- a/src/library/scala/collection/immutable/package.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-
-package immutable {
-  /** It looks like once upon a time this was used by ParRange, but
-   *  since December 2010 in r23721 it is not used by anything.  We
-   *  should not have public API traits with seductive names like
-   *  "RangeUtils" which are neither documented nor used.
-   */
-  @deprecated("this class will be removed", "2.10.0")
-  trait RangeUtils[+Repr <: RangeUtils[Repr]] {
-    def start: Int
-    def end: Int
-    def step: Int
-    def inclusive: Boolean
-    def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean): Repr
-
-    private final def inclusiveLast: Int = {
-      val size = end.toLong - start.toLong
-      (size / step.toLong * step.toLong + start.toLong).toInt
-    }
-
-    final def _last: Int = (
-      if (!inclusive) {
-        if (step == 1 || step == -1) end - step
-        else {
-          val inclast = inclusiveLast
-          if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
-        }
-      }
-      else if (step == 1 || step == -1) end
-      else inclusiveLast
-    )
-
-    final def _foreach[U](f: Int => U) = if (_length > 0) {
-      var i = start
-      val last = _last
-      while (i != last) {
-        f(i)
-        i += step
-      }
-    }
-
-    final def _length: Int = (
-      if (!inclusive) {
-        if (end > start == step > 0 && start != end) {
-          (_last.toLong - start.toLong) / step.toLong + 1
-        } else 0
-      }.toInt
-      else {
-        if (end > start == step > 0 || start == end) {
-          (_last.toLong - start.toLong) / step.toLong + 1
-        } else 0
-      }.toInt
-    )
-
-    final def _apply(idx: Int): Int = {
-      if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString)
-      start + idx * step
-    }
-
-    private def locationAfterN(n: Int) = (
-      if (n > 0) {
-        if (step > 0)
-          scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
-        else
-          scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
-      }
-      else start
-    )
-
-    final def _take(n: Int) = (
-      if (n > 0 && _length > 0)
-        create(start, locationAfterN(n), step, true)
-      else
-        create(start, start, step, false)
-    )
-
-    final def _drop(n: Int)                 = create(locationAfterN(n), end, step, inclusive)
-    final def _slice(from: Int, until: Int) = _drop(from)._take(until - from)
-  }
-}
-
-package object immutable {
-  /** Nothing left after I promoted RangeUtils to the package. */
-}
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 157e5da..de09bb2 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -10,12 +10,11 @@ package scala
 package collection
 package mutable
 
-
 /**
- * An immutable AVL Tree implementation used by mutable.TreeSet
+ * An immutable AVL Tree implementation formerly used by mutable.TreeSet
  *
  * @author Lucien Pereira
- *
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
  */
 private[mutable] sealed trait AVLTree[+A] extends Serializable {
   def balance: Int
@@ -65,13 +64,19 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
   def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
 }
 
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
+ */
 private case object Leaf extends AVLTree[Nothing] {
   override val balance: Int = 0
 
   override val depth: Int = -1
 }
 
-private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] {
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
+ */
+private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
   override val balance: Int = right.depth - left.depth
 
   override val depth: Int = math.max(left.depth, right.depth) + 1
@@ -205,6 +210,9 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
   }
 }
 
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
+ */
 private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
   val stack = mutable.ArrayStack[Node[A]](root)
   diveLeft()
@@ -220,11 +228,11 @@ private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
   private def engageRight(): Unit = {
     if (Leaf != stack.head.right) {
       val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
-      stack.pop
+      stack.pop()
       stack.push(right)
       diveLeft()
     } else
-      stack.pop
+      stack.pop()
   }
 
   override def hasNext: Boolean = !stack.isEmpty
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
new file mode 100644
index 0000000..47fb667
--- /dev/null
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -0,0 +1,459 @@
+package scala
+package collection
+package mutable
+
+import generic.CanBuildFrom
+
+/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing.
+ * 
+ *  Basic map operations on single entries, including `contains` and `get`, 
+ *  are typically significantly faster with `AnyRefMap` than [[HashMap]].
+ *  Note that numbers and characters are not handled specially in AnyRefMap;
+ *  only plain `equals` and `hashCode` are used in comparisons.
+ * 
+ *  Methods that traverse or regenerate the map, including `foreach` and `map`,
+ *  are not in general faster than with `HashMap`.  The methods `foreachKey`,
+ *  `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster
+ *  than alternative ways to achieve the same functionality.
+ * 
+ *  Maps with open addressing may become less efficient at lookup after
+ *  repeated addition/removal of elements.  Although `AnyRefMap` makes a
+ *  decent attempt to remain efficient regardless,  calling `repack`
+ *  on a map that will no longer have elements removed but will be
+ *  used heavily may save both time and storage space.
+ * 
+ *  This map is not intended to contain more than 2^29^ entries (approximately
+ *  500 million).  The maximum capacity is 2^30^, but performance will degrade
+ *  rapidly as 2^30^ is approached.
+ *
+ */
+final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean)
+extends AbstractMap[K, V]
+   with Map[K, V]
+   with MapLike[K, V, AnyRefMap[K, V]]
+{
+  import AnyRefMap._
+  def this() = this(AnyRefMap.exceptionDefault, 16, true)
+  
+  /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */
+  def this(defaultEntry: K => V) = this(defaultEntry, 16, true)
+
+  /** Creates a new `AnyRefMap` with an initial buffer of specified size.
+   * 
+   *  An `AnyRefMap` can typically contain half as many elements as its buffer size
+   *  before it requires resizing.
+   */
+  def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true)
+  
+  /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */
+  def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true)
+  
+  private[this] var mask = 0
+  private[this] var _size = 0
+  private[this] var _vacant = 0
+  private[this] var _hashes: Array[Int] = null
+  private[this] var _keys: Array[AnyRef] = null
+  private[this] var _values: Array[AnyRef] = null
+    
+  if (initBlank) defaultInitialize(initialBufferSize)
+  
+  private[this] def defaultInitialize(n: Int) {
+    mask = 
+      if (n<0) 0x7
+      else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
+    _hashes = new Array[Int](mask+1)
+    _keys = new Array[AnyRef](mask+1)
+    _values = new Array[AnyRef](mask+1)
+  }
+  
+  private[collection] def initializeTo(
+    m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef]
+  ) {
+    mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz
+  }
+  
+  override def size: Int = _size
+  override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry)
+  
+  private def imbalanced: Boolean = 
+    (_size + _vacant) > 0.5*mask || _vacant > _size
+  
+  private def hashOf(key: K): Int = {
+    if (key eq null) 0x41081989
+    else {
+      val h = key.hashCode
+      // Part of the MurmurHash3 32 bit finalizer
+      val i = (h ^ (h >>> 16)) * 0x85EBCA6B
+      val j = (i ^ (i >>> 13))
+      if (j==0) 0x41081989 else j & 0x7FFFFFFF
+    }
+  }
+  
+  private def seekEntry(h: Int, k: AnyRef): Int = {
+    var e = h & mask
+    var x = 0
+    var g = 0
+    while ({ g = _hashes(e); g != 0}) {
+      if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e
+      x += 1
+      e = (e + 2*(x+1)*x - 3) & mask
+    }
+    e | MissingBit
+  }
+  
+  private def seekEntryOrOpen(h: Int, k: AnyRef): Int = {
+    var e = h & mask
+    var x = 0
+    var g = 0
+    var o = -1
+    while ({ g = _hashes(e); g != 0}) {
+      if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e
+      else if (o == -1 && g+g == 0) o = e
+      x += 1
+      e = (e + 2*(x+1)*x - 3) & mask
+    }
+    if (o >= 0) o | MissVacant else e | MissingBit
+  }
+  
+  override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0
+  
+  override def get(key: K): Option[V] = {
+    val i = seekEntry(hashOf(key), key)
+    if (i < 0) None else Some(_values(i).asInstanceOf[V])
+  }
+  
+  override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+    val i = seekEntry(hashOf(key), key)
+    if (i < 0) default else _values(i).asInstanceOf[V]
+  }
+  
+  override def getOrElseUpdate(key: K, defaultValue: => V): V = {
+    val h = hashOf(key)
+    var i = seekEntryOrOpen(h, key)
+    if (i < 0) {
+      // It is possible that the default value computation was side-effecting
+      // Our hash table may have resized or even contain what we want now
+      // (but if it does, we'll replace it)
+      val value = {
+        val oh = _hashes
+        val ans = defaultValue
+        if (oh ne _hashes) {
+          i = seekEntryOrOpen(h, key)
+          if (i >= 0) _size -= 1
+        }
+        ans
+      }
+      _size += 1
+      val j = i & IndexMask
+      _hashes(j) = h
+      _keys(j) = key.asInstanceOf[AnyRef]
+      _values(j) = value.asInstanceOf[AnyRef]
+      if ((i & VacantBit) != 0) _vacant -= 1
+      else if (imbalanced) repack()
+      value
+    }
+    else _values(i).asInstanceOf[V]
+  }
+  
+  /** Retrieves the value associated with a key, or the default for that type if none exists
+   *  (null for AnyRef, 0 for floats and integers).
+   * 
+   *  Note: this is the fastest way to retrieve a value that may or
+   *  may not exist, if the default null/zero is acceptable.  For key/value
+   *  pairs that do exist, `apply` (i.e. `map(key)`) is equally fast.
+   */
+  def getOrNull(key: K): V = {
+    val i = seekEntry(hashOf(key), key)
+    (if (i < 0) null else _values(i)).asInstanceOf[V]
+  }
+  
+  /** Retrieves the value associated with a key. 
+   *  If the key does not exist in the map, the `defaultEntry` for that key
+   *  will be returned instead; an exception will be thrown if no 
+   *  `defaultEntry` was supplied.
+   */
+  override def apply(key: K): V = {
+    val i = seekEntry(hashOf(key), key)
+    if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
+  }
+  
+  /** Defers to defaultEntry to find a default value for the key.  Throws an
+   *  exception if no other default behavior was specified.
+   */
+  override def default(key: K) = defaultEntry(key)
+  
+  private def repack(newMask: Int) {
+    val oh = _hashes
+    val ok = _keys
+    val ov = _values
+    mask = newMask
+    _hashes = new Array[Int](mask+1)
+    _keys = new Array[AnyRef](mask+1)
+    _values = new Array[AnyRef](mask+1)
+    _vacant = 0
+    var i = 0
+    while (i < oh.length) {
+      val h = oh(i)
+      if (h+h != 0) {
+        var e = h & mask
+        var x = 0
+        while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+        _hashes(e) = h
+        _keys(e) = ok(i)
+        _values(e) = ov(i)
+      }
+      i += 1
+    }
+  }
+  
+  /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup.
+   * 
+   *  For maps that undergo a complex creation process with both addition and
+   *  removal of keys, and then are used heavily with no further removal of
+   *  elements, calling `repack` after the end of the creation can result in
+   *  improved performance.  Repacking takes time proportional to the number
+   *  of entries in the map.
+   */
+  def repack() {
+    var m = mask
+    if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask
+    while (m > 8 && 8*_size < m) m = m >>> 1
+    repack(m)
+  }
+  
+  override def put(key: K, value: V): Option[V] = {
+    val h = hashOf(key)
+    val k = key
+    var i = seekEntryOrOpen(h, k)
+    if (i < 0) {
+      val j = i & IndexMask
+      _hashes(j) = h
+      _keys(j) = k
+      _values(j) = value.asInstanceOf[AnyRef]
+      _size += 1
+      if ((i & VacantBit) != 0) _vacant -= 1
+      else if (imbalanced) repack()
+      None
+    }
+    else {
+      val ans = Some(_values(i).asInstanceOf[V])
+      _hashes(i) = h
+      _keys(i) = k
+      _values(i) = value.asInstanceOf[AnyRef]
+      ans
+    }
+  }
+  
+  /** Updates the map to include a new key-value pair.
+   * 
+   *  This is the fastest way to add an entry to an `AnyRefMap`.
+   */
+  override def update(key: K, value: V): Unit = {
+    val h = hashOf(key)
+    val k = key
+    var i = seekEntryOrOpen(h, k)
+    if (i < 0) {
+      val j = i & IndexMask
+      _hashes(j) = h
+      _keys(j) = k
+      _values(j) = value.asInstanceOf[AnyRef]
+      _size += 1
+      if ((i & VacantBit) != 0) _vacant -= 1
+      else if (imbalanced) repack()
+    }
+    else {
+      _hashes(i) = h
+      _keys(i) = k
+      _values(i) = value.asInstanceOf[AnyRef]
+    }
+  }
+  
+  /** Adds a new key/value pair to this map and returns the map. */
+  def +=(key: K, value: V): this.type = { update(key, value); this }
+
+  def +=(kv: (K, V)): this.type = { update(kv._1, kv._2); this }
+  
+  def -=(key: K): this.type = {
+    val i = seekEntry(hashOf(key), key)
+    if (i >= 0) {
+      _size -= 1
+      _vacant += 1
+      _hashes(i) = Int.MinValue
+      _keys(i) = null
+      _values(i) = null
+    }
+    this
+  }
+  
+  def iterator: Iterator[(K, V)] = new Iterator[(K, V)] {
+    private[this] val hz = _hashes
+    private[this] val kz = _keys
+    private[this] val vz = _values
+    
+    private[this] var index = 0
+    
+    def hasNext: Boolean = index<hz.length && {
+      var h = hz(index)
+      while (h+h == 0) {
+        index += 1
+        if (index >= hz.length) return false
+        h = hz(index)
+      }
+      true
+    }
+    
+    def next: (K, V) = {
+      if (hasNext) {
+        val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V])
+        index += 1
+        ans
+      }
+      else throw new NoSuchElementException("next")
+    }
+  }
+  
+  override def foreach[A](f: ((K,V)) => A) {
+    var i = 0
+    var e = _size
+    while (e > 0) {
+      while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1
+      if (i < _hashes.length) {
+        f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]))
+        i += 1
+        e -= 1
+      }
+      else return
+    }
+  }
+    
+  override def clone(): AnyRefMap[K, V] = {
+    val hz = java.util.Arrays.copyOf(_hashes, _hashes.length)
+    val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+    val vz = java.util.Arrays.copyOf(_values,  _values.length)
+    val arm = new AnyRefMap[K, V](defaultEntry, 1, false)
+    arm.initializeTo(mask, _size, _vacant, hz, kz,  vz)
+    arm
+  }
+  
+  private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
+    var i,j = 0
+    while (i < _hashes.length & j < _size) {
+      val h = _hashes(i)
+      if (h+h != 0) {
+        j += 1
+        f(elems(i).asInstanceOf[A])
+      }
+      i += 1
+    }
+  }
+  
+  /** Applies a function to all keys of this map. */
+  def foreachKey[A](f: K => A) { foreachElement[K,A](_keys, f) }
+
+  /** Applies a function to all values of this map. */
+  def foreachValue[A](f: V => A) { foreachElement[V,A](_values, f) }
+  
+  /** Creates a new `AnyRefMap` with different values.
+   *  Unlike `mapValues`, this method generates a new
+   *  collection immediately.
+   */
+  def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = {
+    val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault,  1,  false)
+    val hz = java.util.Arrays.copyOf(_hashes, _hashes.length)
+    val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+    val vz = new Array[AnyRef](_values.length)
+    var i,j = 0
+    while (i < _hashes.length & j < _size) {
+      val h = _hashes(i)
+      if (h+h != 0) {
+        j += 1
+        vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+      }
+      i += 1
+    }
+    arm.initializeTo(mask, _size, _vacant, hz, kz, vz)
+    arm
+  }
+  
+  /** Applies a transformation function to all values stored in this map. 
+   *  Note: the default, if any,  is not transformed.
+   */
+  def transformValues(f: V => V): this.type = {
+    var i,j = 0
+    while (i < _hashes.length & j < _size) {
+      val h = _hashes(i)
+      if (h+h != 0) {
+        j += 1
+        _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+      }
+      i += 1
+    }
+    this
+  }
+
+}
+
+object AnyRefMap {
+  private final val IndexMask  = 0x3FFFFFFF
+  private final val MissingBit = 0x80000000
+  private final val VacantBit  = 0x40000000
+  private final val MissVacant = 0xC0000000
+  
+  private val exceptionDefault = (k: Any) => throw new NoSuchElementException(if (k == null) "(null)" else k.toString)
+  
+  implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] =
+    new CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] {
+      def apply(from: AnyRefMap[K,V]): AnyRefMapBuilder[J, U] = apply()
+      def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U]
+    }
+  
+  final class AnyRefMapBuilder[K <: AnyRef, V] extends Builder[(K, V), AnyRefMap[K, V]] {
+    private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V]
+    def +=(entry: (K, V)): this.type = {
+      elems += entry
+      this
+    }
+    def clear() { elems = new AnyRefMap[K, V] }
+    def result(): AnyRefMap[K, V] = elems
+  }
+
+  /** Creates a new `AnyRefMap` with zero or more key/value pairs. */
+  def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = {
+    val sz = if (elems.hasDefiniteSize) elems.size else 4
+    val arm = new AnyRefMap[K, V](sz * 2)
+    elems.foreach{ case (k,v) => arm(k) = v }
+    if (arm.size < (sz>>3)) arm.repack()
+    arm
+  }
+  
+  /** Creates a new empty `AnyRefMap`. */
+  def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V]
+  
+  /** Creates a new empty `AnyRefMap` with the supplied default */
+  def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default)
+  
+  /** Creates a new `AnyRefMap` from arrays of keys and values. 
+   *  Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
+   */
+  def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = {
+    val sz = math.min(keys.length, values.length)
+    val arm = new AnyRefMap[K, V](sz * 2)
+    var i = 0
+    while (i < sz) { arm(keys(i)) = values(i); i += 1 }
+    if (arm.size < (sz>>3)) arm.repack()
+    arm
+  }
+  
+  /** Creates a new `AnyRefMap` from keys and values. 
+   *  Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
+   */
+  def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = {
+    val sz = math.min(keys.size, values.size)
+    val arm = new AnyRefMap[K, V](sz * 2)
+    val ki = keys.iterator
+    val vi = values.iterator
+    while (ki.hasNext && vi.hasNext) arm(ki.next) = vi.next
+    if (arm.size < (sz >> 3)) arm.repack()
+    arm
+  }
+}
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index f1cfd2d..2d43b35 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 0ce2cda..6e53824 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
 import scala.reflect.ClassTag
 import scala.runtime.ScalaRunTime
 
@@ -54,6 +52,7 @@ object ArrayBuilder {
    *
    *  @tparam T     type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound.
    */
+  @deprecatedInheritance("ArrayBuilder.ofRef is an internal implementation not intended for subclassing.", "2.11.0")
   class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] {
 
     private var elems: Array[T] = _
@@ -118,6 +117,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `byte`s. */
+  @deprecatedInheritance("ArrayBuilder.ofByte is an internal implementation not intended for subclassing.", "2.11.0")
   class ofByte extends ArrayBuilder[Byte] {
 
     private var elems: Array[Byte] = _
@@ -182,6 +182,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `short`s. */
+  @deprecatedInheritance("ArrayBuilder.ofShort is an internal implementation not intended for subclassing.", "2.11.0")
   class ofShort extends ArrayBuilder[Short] {
 
     private var elems: Array[Short] = _
@@ -246,6 +247,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `char`s. */
+  @deprecatedInheritance("ArrayBuilder.ofChar is an internal implementation not intended for subclassing.", "2.11.0")
   class ofChar extends ArrayBuilder[Char] {
 
     private var elems: Array[Char] = _
@@ -310,6 +312,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `int`s. */
+  @deprecatedInheritance("ArrayBuilder.ofInt is an internal implementation not intended for subclassing.", "2.11.0")
   class ofInt extends ArrayBuilder[Int] {
 
     private var elems: Array[Int] = _
@@ -374,6 +377,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `long`s. */
+  @deprecatedInheritance("ArrayBuilder.ofLong is an internal implementation not intended for subclassing.", "2.11.0")
   class ofLong extends ArrayBuilder[Long] {
 
     private var elems: Array[Long] = _
@@ -438,6 +442,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `float`s. */
+  @deprecatedInheritance("ArrayBuilder.ofFloat is an internal implementation not intended for subclassing.", "2.11.0")
   class ofFloat extends ArrayBuilder[Float] {
 
     private var elems: Array[Float] = _
@@ -502,6 +507,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `double`s. */
+  @deprecatedInheritance("ArrayBuilder.ofDouble is an internal implementation not intended for subclassing.", "2.11.0")
   class ofDouble extends ArrayBuilder[Double] {
 
     private var elems: Array[Double] = _
@@ -630,6 +636,7 @@ object ArrayBuilder {
   }
 
   /** A class for array builders for arrays of `Unit` type. */
+  @deprecatedInheritance("ArrayBuilder.ofUnit is an internal implementation not intended for subclassing.", "2.11.0")
   class ofUnit extends ArrayBuilder[Unit] {
 
     private var elems: Array[Unit] = _
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 31f3d2a..80b38a8 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -6,14 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
-import generic._
 
-/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most
- *  operations on arrays and wrapped arrays.
+/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out the 
+ * `deep` method for arrays and wrapped arrays and serves as a marker trait
+ * for array wrappers.
  *
  *  @tparam A     type of the elements contained in the array like object.
  *  @tparam Repr  the type of the actual collection containing the elements.
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 25ba7e4..00491ef 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -33,6 +33,7 @@ import parallel.mutable.ParArray
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecatedInheritance("ArrayOps will be sealed to facilitate greater flexibility with array/collections integration in future releases.", "2.11.0")
 trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
 
   private def elementClass: Class[_] =
@@ -52,6 +53,20 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
       super.toArray[U]
   }
 
+  def :+[B >: T: ClassTag](elem: B): Array[B] = {
+    val result = Array.ofDim[B](repr.length + 1)
+    Array.copy(repr, 0, result, 0, repr.length)
+    result(repr.length) = elem
+    result
+  }
+
+  def +:[B >: T: ClassTag](elem: B): Array[B] = {
+    val result = Array.ofDim[B](repr.length + 1)
+    result(0) = elem
+    Array.copy(repr, 0, result, 1, repr.length)
+    result
+  }
+
   override def par = ParArray.handoff(repr)
 
   /** Flattens a two-dimensional array by concatenating all its rows
@@ -66,7 +81,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
     b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
     for (xs <- this)
       b ++= asTrav(xs)
-    b.result
+    b.result()
   }
 
   /** Transposes a two dimensional array.
@@ -92,6 +107,69 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
       bb.result()
     }
   }
+  
+  /** Converts an array of pairs into an array of first elements and an array of second elements.
+   *  
+   *  @tparam T1    the type of the first half of the element pairs
+   *  @tparam T2    the type of the second half of the element pairs
+   *  @param asPair an implicit conversion which asserts that the element type
+   *                of this Array is a pair.
+   *  @param ct1    a class tag for T1 type parameter that is required to create an instance
+   *                of Array[T1]
+   *  @param ct2    a class tag for T2 type parameter that is required to create an instance
+   *                of Array[T2]
+   *  @return       a pair of Arrays, containing, respectively, the first and second half
+   *                of each element pair of this Array.
+   */
+  // implementation NOTE: ct1 and ct2 can't be written as context bounds because desugared
+  // implicits are put in front of asPair parameter that is supposed to guide type inference
+  def unzip[T1, T2](implicit asPair: T => (T1, T2), ct1: ClassTag[T1], ct2: ClassTag[T2]): (Array[T1], Array[T2]) = {
+    val a1 = new Array[T1](length)
+    val a2 = new Array[T2](length)
+    var i = 0
+    while (i < length) {
+      val e = apply(i)
+      a1(i) = e._1
+      a2(i) = e._2
+      i += 1
+    }
+    (a1, a2)
+  }
+  
+  /** Converts an array of triples into three arrays, one containing the elements from each position of the triple.
+   *  
+   *  @tparam T1      the type of the first of three elements in the triple
+   *  @tparam T2      the type of the second of three elements in the triple
+   *  @tparam T3      the type of the third of three elements in the triple
+   *  @param asTriple an implicit conversion which asserts that the element type
+   *                  of this Array is a triple.
+   *  @param ct1    a class tag for T1 type parameter that is required to create an instance
+   *                of Array[T1]
+   *  @param ct2    a class tag for T2 type parameter that is required to create an instance
+   *                of Array[T2]
+   *  @param ct3    a class tag for T3 type parameter that is required to create an instance
+   *                of Array[T3]
+   *  @return         a triple of Arrays, containing, respectively, the first, second, and third
+   *                  elements from each element triple of this Array.
+   */
+  // implementation NOTE: ct1, ct2, ct3 can't be written as context bounds because desugared
+  // implicits are put in front of asPair parameter that is supposed to guide type inference
+  def unzip3[T1, T2, T3](implicit asTriple: T => (T1, T2, T3), ct1: ClassTag[T1], ct2: ClassTag[T2],
+    ct3: ClassTag[T3]): (Array[T1], Array[T2], Array[T3]) = {
+    val a1 = new Array[T1](length)
+    val a2 = new Array[T2](length)
+    val a3 = new Array[T3](length)
+    var i = 0
+    while (i < length) {
+      val e = apply(i)
+      a1(i) = e._1
+      a2(i) = e._2
+      a3(i) = e._3
+      i += 1
+    }
+    (a1, a2, a3)
+  }
+  
 
   def seq = thisCollection
 
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 33f6949..577a838 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -90,7 +91,7 @@ extends AbstractSeq[A]
   }
 
   override def clone(): ArraySeq[A] = {
-    val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+    val cloned = array.clone().asInstanceOf[Array[AnyRef]]
     new ArraySeq[A](length) {
       override val array = cloned
     }
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 670558a..fec2da8 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -150,14 +150,14 @@ extends AbstractSeq[T]
    *
    *  @param f The function to drain to.
    */
-  def drain(f: T => Unit) = while (!isEmpty) f(pop)
+  def drain(f: T => Unit) = while (!isEmpty) f(pop())
 
   /** Pushes all the provided elements in the traversable object onto the stack.
    *
    *  @param xs The source of elements to push.
    *  @return   A reference to this stack.
    */
-  override def ++=(xs: TraversableOnce[T]): this.type = { xs.seq foreach += ; this }
+  override def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this }
 
   /** Does the same as `push`, but returns the updated stack.
    *
@@ -190,7 +190,7 @@ extends AbstractSeq[T]
    *
    *  @param f   The function to apply to the top two elements.
    */
-  def combine(f: (T, T) => T): Unit = push(f(pop, pop))
+  def combine(f: (T, T) => T): Unit = push(f(pop(), pop()))
 
   /** Repeatedly combine the top elements of the stack until the stack contains only
    *  one element.
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 2a535a7..43d23ac 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -8,11 +8,12 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
-import BitSetLike.{LogWL, updateArray}
+import BitSetLike.{LogWL, MaxSize, updateArray}
 
 /** A class for mutable bitsets.
  *
@@ -36,7 +37,7 @@ import BitSetLike.{LogWL, updateArray}
  *  @define willNotTerminateInf
  */
 @SerialVersionUID(8483111450368547763L)
-class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
+class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
                                                   with SortedSet[Int]
                                                   with scala.collection.BitSet
                                                   with BitSetLike[BitSet]
@@ -53,19 +54,27 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
 
   def this() = this(0)
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nwords = elems.length
+  
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def word(idx: Int): Long =
     if (idx < nwords) elems(idx) else 0L
 
-  private def updateWord(idx: Int, w: Long) {
+  protected final def updateWord(idx: Int, w: Long) {
+    ensureCapacity(idx)
+    elems(idx) = w
+  }
+
+  protected final def ensureCapacity(idx: Int) {
+    require(idx < MaxSize)
     if (idx >= nwords) {
       var newlen = nwords
-      while (idx >= newlen) newlen = newlen * 2
+      while (idx >= newlen) newlen = (newlen * 2) min MaxSize
       val elems1 = new Array[Long](newlen)
       Array.copy(elems, 0, elems1, 0, nwords)
       elems = elems1
     }
-    elems(idx) = w
   }
 
   protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = new BitSet(words)
@@ -89,9 +98,57 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
     } else false
   }
 
+  @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
   def += (elem: Int): this.type = { add(elem); this }
+  
+  @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
   def -= (elem: Int): this.type = { remove(elem); this }
 
+  /** Updates this bitset to the union with another bitset by performing a bitwise "or".
+   *
+   *  @param   other  the bitset to form the union with.
+   *  @return  the bitset itself.
+   */
+  def |= (other: BitSet): this.type = {
+    ensureCapacity(other.nwords)
+    for (i <- 0 until other.nwords)
+      elems(i) = elems(i) | other.word(i)
+    this
+  }
+  /** Updates this bitset to the intersection with another bitset by performing a bitwise "and".
+   *
+   *  @param   other  the bitset to form the intersection with.
+   *  @return  the bitset itself.
+   */
+  def &= (other: BitSet): this.type = {
+    ensureCapacity(other.nwords)
+    for (i <- 0 until other.nwords)
+      elems(i) = elems(i) & other.word(i)
+    this
+  }
+  /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor".
+   *
+   *  @param   other  the bitset to form the symmetric difference with.
+   *  @return  the bitset itself.
+   */
+  def ^= (other: BitSet): this.type = {
+    ensureCapacity(other.nwords)
+    for (i <- 0 until other.nwords)
+      elems(i) = elems(i) ^ other.word(i)
+    this
+  }
+  /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not".
+   *
+   *  @param   other  the bitset to form the difference with.
+   *  @return  the bitset itself.
+   */
+  def &~= (other: BitSet): this.type = {
+    ensureCapacity(other.nwords)
+    for (i <- 0 until other.nwords)
+      elems(i) = elems(i) & ~other.word(i)
+    this
+  }
+
   override def clear() {
     elems = new Array[Long](elems.length)
   }
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index 230799c..7ec7b06 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -45,4 +46,4 @@ object Buffer extends SeqFactory[Buffer] {
 }
 
 /** Explicit instantiation of the `Buffer` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A]
+abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A]
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 5935a28..3c57387 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -183,6 +184,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
    *
    *  @param cmd  the message to send.
    */
+  @deprecated("Scripting is deprecated.", "2.11.0")
   def <<(cmd: Message[A]): Unit = cmd match {
     case Include(Start, x)      => prepend(x)
     case Include(End, x)        => append(x)
@@ -198,7 +200,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
     case Remove(Index(n), x)    => if (this(n) == x) remove(n)
     case Remove(NoLo, x)        => this -= x
 
-    case Reset()                => clear
+    case Reset()                => clear()
     case s: Script[_]           => s.iterator foreach <<
     case _                      => throw new UnsupportedOperationException("message " + cmd + " not understood")
   }
@@ -209,9 +211,11 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
    */
   override def stringPrefix: String = "Buffer"
 
-  /** Provide a read-only view of this buffer as a sequence
-   *  @return  A sequence which refers to this buffer for all its operations.
+  /** Returns the current evolving(!) state of this buffer as a read-only sequence.
+   *
+   *  @return  A sequence that forwards to this buffer for all its operations.
    */
+  @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0")
   def readOnly: scala.collection.Seq[A] = toSeq
 
   /** Creates a new collection containing both the elements of this collection and the provided
@@ -260,6 +264,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
   override def clone(): This = {
     val bf = newBuilder
     bf ++= this
-    bf.result.asInstanceOf[This]
+    bf.result().asInstanceOf[This]
   }
 }
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 37aa186..d9632cc 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
 import script._
 
 /** This is a simple proxy class for <a href="Buffer.html"
@@ -28,6 +26,7 @@ import script._
  *  @define Coll `BufferProxy`
  *  @define coll buffer proxy
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait BufferProxy[A] extends Buffer[A] with Proxy {
 
   def self: Buffer[A]
@@ -127,12 +126,13 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
 
   /** Clears the buffer contents.
    */
-  def clear() { self.clear }
+  def clear() { self.clear() }
 
   /** Send a message to this scriptable object.
    *
    *  @param cmd  the message to send.
    */
+  @deprecated("Scripting is deprecated.", "2.11.0")
   override def <<(cmd: Message[A]) { self << cmd }
 
   /** Return a clone of this buffer.
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 5c0681d..7556058 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -121,7 +121,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
       override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
       override def sizeHint(size: Int) = self.sizeHint(size)
       override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl)
-      def result: NewTo = f(self.result)
+      def result: NewTo = f(self.result())
     }
 }
 
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index dadcd36..8b2f3f7 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** A trait for cloneable collections.
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
deleted file mode 100644
index 5b5d738..0000000
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2010-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-package mutable
-
-/** A template trait for mutable maps that allow concurrent access.
- *
- *  $concurrentmapinfo
- *
- *  @since 2.8
- *  @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]]
- *  section on `Concurrent Maps` for more information.
- *
- *  @tparam A  the key type of the map
- *  @tparam B  the value type of the map
- *
- *  @define Coll `ConcurrentMap`
- *  @define coll concurrent map
- *  @define concurrentmapinfo
- *  This is a base trait for all Scala concurrent map implementations. It
- *  provides all of the methods a `Map` does, with the difference that all the
- *  changes are atomic. It also describes methods specific to concurrent maps.
- *
- *  '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
- *
- *  @define atomicop
- *  This is an atomic operation.
- */
- at deprecated("Use `scala.collection.concurrent.Map` instead.", "2.10.0")
-trait ConcurrentMap[A, B] extends Map[A, B] {
-
-  /**
-   * Associates the given key with a given value, unless the key was already
-   * associated with some other value.
-   *
-   * $atomicop
-   *
-   * @param k   key with which the specified value is to be associated with
-   * @param v   value to be associated with the specified key
-   * @return    `Some(oldvalue)` if there was a value `oldvalue` previously
-   *            associated with the specified key, or `None` if there was no
-   *            mapping for the specified key
-   */
-  def putIfAbsent(k: A, v: B): Option[B]
-
-  /**
-   * Removes the entry for the specified key if its currently mapped to the
-   * specified value.
-   *
-   * $atomicop
-   *
-   * @param k   key for which the entry should be removed
-   * @param v   value expected to be associated with the specified key if
-   *            the removal is to take place
-   * @return    `true` if the removal took place, `false` otherwise
-   */
-  def remove(k: A, v: B): Boolean
-
-  /**
-   * Replaces the entry for the given key only if it was previously mapped to
-   * a given value.
-   *
-   * $atomicop
-   *
-   * @param k         key for which the entry should be replaced
-   * @param oldvalue  value expected to be associated with the specified key
-   *                  if replacing is to happen
-   * @param newvalue  value to be associated with the specified key
-   * @return          `true` if the entry was replaced, `false` otherwise
-   */
-  def replace(k: A, oldvalue: B, newvalue: B): Boolean
-
-  /**
-   * Replaces the entry for the given key only if it was previously mapped
-   * to some value.
-   *
-   * $atomicop
-   *
-   * @param k   key for which the entry should be replaced
-   * @param v   value to be associated with the specified key
-   * @return    `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
-   */
-  def replace(k: A, v: B): Option[B]
-}
diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala
index f14cb4a..66db458 100644
--- a/src/library/scala/collection/mutable/DefaultEntry.scala
+++ b/src/library/scala/collection/mutable/DefaultEntry.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** Class used internally for default map model.
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 903f117..0088620 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** This class is used internally. It implements the mutable `Map`
@@ -18,6 +19,7 @@ package mutable
  *  @version 1.0, 08/07/2003
  *  @since   1
  */
+ at deprecated("This trait will be removed.", "2.11.0")
 trait DefaultMapModel[A, B] extends Map[A, B] {
 
   type Entry = DefaultEntry[A, B]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 18a1e23..671b79f 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -40,6 +41,7 @@ import generic._
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
 @SerialVersionUID(-8144992287952814767L)
 class DoubleLinkedList[A]() extends AbstractSeq[A]
                             with LinearSeq[A]
@@ -68,7 +70,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
   override def clone(): DoubleLinkedList[A] = {
     val builder = newBuilder
     builder ++= this
-    builder.result
+    builder.result()
   }
 }
 
@@ -76,6 +78,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
  *  @define coll double linked list
  *  @define Coll `DoubleLinkedList`
  */
+ at deprecated("Low-level linked lists are deprecated.", "2.11.0")
 object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
   /** $genericCanBuildFromInfo */
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index 3f223f3..a43fe34 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import scala.annotation.migration
@@ -55,6 +56,7 @@ import scala.annotation.migration
  *  @define Coll `DoubleLinkedList`
  *  @define coll double linked list
  */
+ at deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
 trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
 
   /** A reference to the node in the linked list preceeding the current node. */
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 91e95e0..25cc873 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -6,18 +6,16 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-
 /** An implementation class backing a `HashSet`.
  *
  *  This trait is used internally. It can be mixed in with various collections relying on
  *  hash table as an implementation.
  *
  *  @define coll flat hash table
- *  @define cannotStoreNull '''Note''': A $coll cannot store `null` elements.
  *  @since 2.3
  *  @tparam A   the type of the elements contained in the $coll.
  */
@@ -78,7 +76,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
     assert(size >= 0)
 
     table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
-    threshold = newThreshold(_loadFactor, table.size)
+    threshold = newThreshold(_loadFactor, table.length)
 
     seedvalue = in.readInt()
 
@@ -87,9 +85,9 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
 
     var index = 0
     while (index < size) {
-      val elem = in.readObject().asInstanceOf[A]
+      val elem = entryToElem(in.readObject())
       f(elem)
-      addEntry(elem)
+      addElem(elem)
       index += 1
     }
   }
@@ -109,61 +107,80 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
   }
 
   /** Finds an entry in the hash table if such an element exists. */
-  protected def findEntry(elem: A): Option[A] = {
-    val entry = findEntryImpl(elem)
-    if (null == entry) None else Some(entry.asInstanceOf[A])
-  }
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
+  protected def findEntry(elem: A): Option[A] =
+    findElemImpl(elem) match {
+      case null => None
+      case entry => Some(entryToElem(entry))
+    }
+
 
   /** Checks whether an element is contained in the hash table. */
-  protected def containsEntry(elem: A): Boolean = {
-    null != findEntryImpl(elem)
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
+  protected def containsElem(elem: A): Boolean = {
+    null != findElemImpl(elem)
   }
 
-  private def findEntryImpl(elem: A): AnyRef = {
-    var h = index(elemHashCode(elem))
-    var entry = table(h)
-    while (null != entry && entry != elem) {
+  private def findElemImpl(elem: A): AnyRef = {
+    val searchEntry = elemToEntry(elem)
+    var h = index(searchEntry.hashCode)
+    var curEntry = table(h)
+    while (null != curEntry && curEntry != searchEntry) {
       h = (h + 1) % table.length
-      entry = table(h)
+      curEntry = table(h)
     }
-    entry
+    curEntry
+  }
+
+  /** Add elem if not yet in table.
+   *  @return Returns `true` if a new elem was added, `false` otherwise.
+   */
+  protected def addElem(elem: A) : Boolean = {
+    addEntry(elemToEntry(elem))
   }
 
-  /** Add entry if not yet in table.
-   *  @return Returns `true` if a new entry was added, `false` otherwise.
+  /**
+   * Add an entry (an elem converted to an entry via elemToEntry) if not yet in
+   * table.
+   *  @return Returns `true` if a new elem was added, `false` otherwise.
    */
-  protected def addEntry(elem: A) : Boolean = {
-    var h = index(elemHashCode(elem))
-    var entry = table(h)
-    while (null != entry) {
-      if (entry == elem) return false
+  protected def addEntry(newEntry : AnyRef) : Boolean = {
+    var h = index(newEntry.hashCode)
+    var curEntry = table(h)
+    while (null != curEntry) {
+      if (curEntry == newEntry) return false
       h = (h + 1) % table.length
-      entry = table(h)
+      curEntry = table(h)
       //Statistics.collisions += 1
     }
-    table(h) = elem.asInstanceOf[AnyRef]
+    table(h) = newEntry
     tableSize = tableSize + 1
     nnSizeMapAdd(h)
     if (tableSize >= threshold) growTable()
     true
+
   }
 
-  /** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
-  protected def removeEntry(elem: A) : Option[A] = {
+  /**
+   * Removes an elem from the hash table returning true if the element was found (and thus removed)
+   * or false if it didn't exist.
+   */
+  protected def removeElem(elem: A) : Boolean = {
     if (tableDebug) checkConsistent()
     def precedes(i: Int, j: Int) = {
       val d = table.length >> 1
       if (i <= j) j - i < d
       else i - j > d
     }
-    var h = index(elemHashCode(elem))
-    var entry = table(h)
-    while (null != entry) {
-      if (entry == elem) {
+    val removalEntry = elemToEntry(elem)
+    var h = index(removalEntry.hashCode)
+    var curEntry = table(h)
+    while (null != curEntry) {
+      if (curEntry == removalEntry) {
         var h0 = h
         var h1 = (h0 + 1) % table.length
         while (null != table(h1)) {
-          val h2 = index(elemHashCode(table(h1).asInstanceOf[A]))
+          val h2 = index(table(h1).hashCode)
           //Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length)
           if (h2 != h1 && precedes(h2, h0)) {
             //Console.println("shift "+h1+" to "+h0+"!")
@@ -176,12 +193,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
         tableSize -= 1
         nnSizeMapRemove(h0)
         if (tableDebug) checkConsistent()
-        return Some(entry.asInstanceOf[A])
+        return true
       }
       h = (h + 1) % table.length
-      entry = table(h)
+      curEntry = table(h)
     }
-    None
+    false
   }
 
   protected def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -191,8 +208,8 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
       i < table.length
     }
     def next(): A =
-      if (hasNext) { i += 1; table(i - 1).asInstanceOf[A] }
-      else Iterator.empty.next
+      if (hasNext) { i += 1; entryToElem(table(i - 1)) }
+      else Iterator.empty.next()
   }
 
   private def growTable() {
@@ -205,7 +222,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
     var i = 0
     while (i < oldtable.length) {
       val entry = oldtable(i)
-      if (null != entry) addEntry(entry.asInstanceOf[A])
+      if (null != entry) addEntry(entry)
       i += 1
     }
     if (tableDebug) checkConsistent()
@@ -213,10 +230,11 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
 
   private def checkConsistent() {
     for (i <- 0 until table.length)
-      if (table(i) != null && !containsEntry(table(i).asInstanceOf[A]))
-        assert(false, i+" "+table(i)+" "+table.mkString)
+      if (table(i) != null && !containsElem(entryToElem(table(i))))
+        assert(assertion = false, i+" "+table(i)+" "+table.mkString)
   }
 
+
   /* Size map handling code */
 
   /*
@@ -232,15 +250,18 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
    * where sizeMapBucketSize == 4.
    *
    */
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) {
     val p = h >> sizeMapBucketBitSize
     sizemap(p) += 1
   }
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) {
     sizemap(h >> sizeMapBucketBitSize) -= 1
   }
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) {
     val nsize = calcSizeMapSize(tableLength)
     if (sizemap.length != nsize) sizemap = new Array[Int](nsize)
@@ -249,14 +270,17 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
 
   private[collection] final def totalSizeMapBuckets = (table.length - 1) / sizeMapBucketSize + 1
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1
 
   // discards the previous sizemap and only allocates a new one
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def sizeMapInit(tableLength: Int) {
     sizemap = new Array[Int](calcSizeMapSize(tableLength))
   }
 
   // discards the previous sizemap and populates the new one
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def sizeMapInitAndRebuild() {
     // first allocate
     sizeMapInit(table.length)
@@ -265,7 +289,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
     val totalbuckets = totalSizeMapBuckets
     var bucketidx = 0
     var tableidx = 0
-    var tbl = table
+    val tbl = table
     var tableuntil = sizeMapBucketSize min tbl.length
     while (bucketidx < totalbuckets) {
       var currbucketsz = 0
@@ -341,7 +365,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
       seedvalue = c.seedvalue
       sizemap = c.sizemap
     }
-    if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+    if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
   }
 
 }
@@ -359,6 +383,11 @@ private[collection] object FlatHashTable {
     override def initialValue = new scala.util.Random
   }
 
+  private object NullSentinel {
+    override def hashCode = 0
+    override def toString = "NullSentinel"
+  }
+
   /** The load factor for the hash table; must be < 500 (0.5)
    */
   def defaultLoadFactor: Int = 450
@@ -386,10 +415,6 @@ private[collection] object FlatHashTable {
     // so that:
     protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize
 
-    protected def elemHashCode(elem: A) =
-      if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.")
-      else elem.hashCode()
-
     protected final def improve(hcode: Int, seed: Int) = {
       //var h: Int = hcode + ~(hcode << 9)
       //h = h ^ (h >>> 14)
@@ -404,6 +429,19 @@ private[collection] object FlatHashTable {
       val rotated = (improved >>> rotation) | (improved << (32 - rotation))
       rotated
     }
+
+    /**
+     * Elems have type A, but we store AnyRef in the table. Plus we need to deal with
+     * null elems, which need to be stored as NullSentinel
+     */
+    protected final def elemToEntry(elem : A) : AnyRef =
+      if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef]
+
+    /**
+     * Does the inverse translation of elemToEntry
+     */
+    protected final def entryToElem(entry : AnyRef) : A =
+      (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A]
   }
 
 }
diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled
deleted file mode 100644
index 9acfccd..0000000
--- a/src/library/scala/collection/mutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- *  $possiblyparinfo
- *  
- *  $iterableInfo
- */
-trait GenIterable[A] extends GenTraversable[A] 
-                     with scala.collection.GenIterable[A] 
-                     with scala.collection.GenIterableLike[A, GenIterable[A]]
-//                     with GenericTraversableTemplate[A, GenIterable]
-{
-  def seq: Iterable[A]
-  //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled
deleted file mode 100644
index e4fd1da..0000000
--- a/src/library/scala/collection/mutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,40 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- *  $possiblyparinfo
- *  $mapNote
- *  $mapTags
- *  @since 1.0
- *  @author  Matthias Zenger
- */
-trait GenMap[A, B] 
-extends GenIterable[(A, B)]
-   with scala.collection.GenMap[A, B] 
-   with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
-  def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-//   def empty[A, B]: Map[A, B] = Map.empty
-  
-//   /** $mapCanBuildFromInfo */
-//   implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
deleted file mode 100644
index ec90472..0000000
--- a/src/library/scala/collection/mutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,44 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- *  that can be mutated.
- *
- *  $possiblyparinfo
- *   
- *  $seqInfo
- * 
- *  The class adds an `update` method to `collection.Seq`.
- *  
- *  @define Coll `mutable.Seq`
- *  @define coll mutable sequence
- */
-trait GenSeq[A] extends GenIterable[A] 
-                        with scala.collection.GenSeq[A] 
-                        with scala.collection.GenSeqLike[A, GenSeq[A]]
-//                        with GenericTraversableTemplate[A, GenSeq]
-{
-  //override def companion: GenericCompanion[GenSeq] = GenSeq
-  def seq: Seq[A]
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
deleted file mode 100644
index dec20e2..0000000
--- a/src/library/scala/collection/mutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package mutable
-
-
-
-import generic._
-
-
-/** A generic trait for mutable sets.
- *
- *  $possiblyparinfo
- *  $setNote
- *  $setTags
- *
- *  @since 1.0
- *  @author Matthias Zenger
- *  @define Coll `mutable.Set`
- *  @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
-                   with Growable[A]
-                   with scala.collection.GenSet[A]
-                   with scala.collection.GenSetLike[A, GenSet[A]]
-//                   with GenericSetTemplate[A, GenSet]
-{
-  //override def companion: GenericCompanion[GenSet] = GenSet
-  def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-//   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
deleted file mode 100644
index 2453e2c..0000000
--- a/src/library/scala/collection/mutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A trait for traversable collections that can be mutated.
- *
- *  $possiblyparinfo
- * 
- *  $traversableInfo
- *  @define mutability mutable
- */
-trait GenTraversable[A] extends scala.collection.GenTraversable[A] 
-                        with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-//                        with GenericTraversableTemplate[A, GenTraversable]
-                        with Mutable
-{
-  def seq: Traversable[A]
-  //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-//   implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-//   def newBuilder[A] = Traversable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index ba7ea60..c4b5e54 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala
index 5cd976e..4c0f6a9 100644
--- a/src/library/scala/collection/mutable/HashEntry.scala
+++ b/src/library/scala/collection/mutable/HashEntry.scala
@@ -5,7 +5,8 @@
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** Class used internally.
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 3cd7f07..6fca75f 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -93,9 +94,9 @@ extends AbstractMap[A, B]
 
   def -=(key: A): this.type = { removeEntry(key); this }
 
-  def iterator = entriesIterator map {e => (e.key, e.value)}
+  def iterator = entriesIterator map (e => ((e.key, e.value)))
 
-  override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
+  override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value)))
 
   /* Override to avoid tuple allocation in foreach */
   override def keySet: scala.collection.Set[A] = new DefaultKeySet {
@@ -111,21 +112,21 @@ extends AbstractMap[A, B]
   override def keysIterator: Iterator[A] = new AbstractIterator[A] {
     val iter    = entriesIterator
     def hasNext = iter.hasNext
-    def next()  = iter.next.key
+    def next()  = iter.next().key
   }
 
   /* Override to avoid tuple allocation */
   override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
     val iter    = entriesIterator
     def hasNext = iter.hasNext
-    def next()  = iter.next.value
+    def next()  = iter.next().value
   }
 
   /** Toggles whether a size map is used to track hash map statistics.
    */
   def useSizeMap(t: Boolean) = if (t) {
-    if (!isSizeMapDefined) sizeMapInitAndRebuild
-  } else sizeMapDisable
+    if (!isSizeMapDefined) sizeMapInitAndRebuild()
+  } else sizeMapDisable()
 
   protected def createNewEntry[B1](key: A, value: B1): Entry = {
     new Entry(key, value.asInstanceOf[B])
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index c60e363..886fee5 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -16,8 +17,6 @@ import scala.collection.parallel.mutable.ParHashSet
 
 /** This class implements mutable sets using a hashtable.
  *
- *  $cannotStoreNull
- *
  *  @author  Matthias Zenger
  *  @author  Martin Odersky
  *  @version 2.0, 31/12/2006
@@ -55,17 +54,17 @@ extends AbstractSet[A]
 
   override def size: Int = tableSize
 
-  def contains(elem: A): Boolean = containsEntry(elem)
+  def contains(elem: A): Boolean = containsElem(elem)
 
-  def += (elem: A): this.type = { addEntry(elem); this }
+  def += (elem: A): this.type = { addElem(elem); this }
 
-  def -= (elem: A): this.type = { removeEntry(elem); this }
+  def -= (elem: A): this.type = { removeElem(elem); this }
 
   override def par = new ParHashSet(hashTableContents)
 
-  override def add(elem: A): Boolean = addEntry(elem)
+  override def add(elem: A): Boolean = addElem(elem)
 
-  override def remove(elem: A): Boolean = removeEntry(elem).isDefined
+  override def remove(elem: A): Boolean = removeElem(elem)
 
   override def clear() { clearTable() }
 
@@ -75,8 +74,8 @@ extends AbstractSet[A]
     var i = 0
     val len = table.length
     while (i < len) {
-      val elem = table(i)
-      if (elem ne null) f(elem.asInstanceOf[A])
+      val curEntry = table(i)
+      if (curEntry ne null) f(entryToElem(curEntry))
       i += 1
     }
   }
@@ -94,8 +93,8 @@ extends AbstractSet[A]
   /** Toggles whether a size map is used to track hash map statistics.
    */
   def useSizeMap(t: Boolean) = if (t) {
-    if (!isSizeMapDefined) sizeMapInitAndRebuild
-  } else sizeMapDisable
+    if (!isSizeMapDefined) sizeMapInitAndRebuild()
+  } else sizeMapDisable()
 
 }
 
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 8fef1be..65d9c35 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** This class can be used to construct data structures that are based
@@ -96,7 +97,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
     val smDefined = in.readBoolean()
 
     table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
-    threshold = newThreshold(_loadFactor, table.size)
+    threshold = newThreshold(_loadFactor, table.length)
 
     if (smDefined) sizeMapInit(table.length) else sizemap = null
 
@@ -126,6 +127,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
 
   /** Find entry with given key in table, null if not found.
    */
+  @deprecatedOverriding("No sensible way to override findEntry as private findEntry0 is used in multiple places internally.", "2.11.0")
   protected def findEntry(key: A): Entry =
     findEntry0(key, index(elemHashCode(key)))
 
@@ -138,6 +140,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
   /** Add entry to table
    *  pre: no entry with same key exists
    */
+  @deprecatedOverriding("No sensible way to override addEntry as private addEntry0 is used in multiple places internally.", "2.11.0")
   protected def addEntry(e: Entry) {
     addEntry0(e, index(elemHashCode(e.key)))
   }
@@ -171,6 +174,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
 
   /** Remove entry from table if present.
    */
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def removeEntry(key: A) : Entry = {
     val h = index(elemHashCode(key))
     var e = table(h).asInstanceOf[Entry]
@@ -281,14 +285,17 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
    * is converted into a parallel hash table, the size map is initialized, as it will be needed
    * there.
    */
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) {
     sizemap(h >> sizeMapBucketBitSize) += 1
   }
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) {
     sizemap(h >> sizeMapBucketBitSize) -= 1
   }
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) {
     val nsize = calcSizeMapSize(tableLength)
     if (sizemap.length != nsize) sizemap = new Array[Int](nsize)
@@ -297,6 +304,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
 
   private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1
 
   // discards the previous sizemap and only allocates a new one
@@ -305,6 +313,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
   }
 
   // discards the previous sizemap and populates the new one
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def sizeMapInitAndRebuild() {
     sizeMapInit(table.length)
 
@@ -335,8 +344,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
     println(sizemap.toList)
   }
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def sizeMapDisable() = sizemap = null
 
+  @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
   protected def isSizeMapDefined = sizemap ne null
 
   // override to automatically initialize the size map
@@ -365,7 +376,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
       seedvalue = c.seedvalue
       sizemap = c.sizemap
     }
-    if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+    if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
   }
 
   private[collection] def hashTableContents = new HashTable.Contents(
@@ -382,7 +393,7 @@ private[collection] object HashTable {
   /** The load factor for the hash table (in 0.001 step).
    */
   private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
-  private[collection] final def loadFactorDenum = 1000;
+  private[collection] final def loadFactorDenum = 1000
 
   private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
 
@@ -457,13 +468,13 @@ private[collection] object HashTable {
    */
   private[collection] def powerOfTwo(target: Int): Int = {
     /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
-    var c = target - 1;
-    c |= c >>>  1;
-    c |= c >>>  2;
-    c |= c >>>  4;
-    c |= c >>>  8;
-    c |= c >>> 16;
-    c + 1;
+    var c = target - 1
+    c |= c >>>  1
+    c |= c >>>  2
+    c |= c >>>  4
+    c |= c >>>  8
+    c |= c >>> 16
+    c + 1
   }
 
   class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index c1d94a9..19148c0 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2011, LAMP/tPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/tPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
@@ -41,7 +42,7 @@ extends AbstractIterable[(Pub, Evt)]
    */
   def notify(pub: Pub, event: Evt) {
     if (log.length >= maxHistory)
-      log.dequeue
+      log.dequeue()
 
     log.enqueue((pub, event))
   }
@@ -50,7 +51,7 @@ extends AbstractIterable[(Pub, Evt)]
   def iterator: Iterator[(Pub, Evt)] = log.iterator
   def events: Iterator[Evt] = log.iterator map (_._2)
 
-  def clear() { log.clear }
+  def clear() { log.clear() }
 
   /** Checks if two history objects are structurally identical.
    *
@@ -60,5 +61,5 @@ extends AbstractIterable[(Pub, Evt)]
     case that: History[_, _] => this.log equals that.log
     case _                   => false
   }
-  override def hashCode = log.hashCode
+  override def hashCode = log.hashCode()
 }
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 755eea8..9ece8b1 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import scala.annotation.migration
@@ -24,6 +25,7 @@ import scala.annotation.migration
  *  @version 2.0, 01/01/2007
  *  @since   1
  */
+ at deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0")
 class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B])
 extends AbstractMap[A, B]
    with Map[A, B]
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 42c757d..730b222 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-
 /** This class can be used as an adaptor to create mutable sets from
  *  immutable set implementations. Only method `empty` has
  *  to be redefined if the immutable set on which this mutable set is
@@ -22,6 +20,7 @@ package mutable
  *  @version 1.0, 21/07/2003
  *  @since   1
  */
+ at deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0")
 class ImmutableSetAdaptor[A](protected var set: immutable.Set[A])
 extends AbstractSet[A]
    with Set[A]
@@ -48,6 +47,4 @@ extends AbstractSet[A]
   def -=(elem: A): this.type = { set = set - elem; this }
 
   override def clear(): Unit = { set = set.empty }
-
 }
-
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 4d094e6..3d9630e 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index f0c31ec..4cf794c 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -6,11 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
-import generic._
 
 /** A subtrait of scala.collection.IndexedSeq which represents sequences
  *  that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index cb7e8ef..09f0712 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -6,11 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
-import generic._
 
 /** A subtrait of scala.collection.IndexedSeq which represents sequences
  *  that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index cf5166e..31a4749 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -82,8 +83,6 @@ self =>
   protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
   protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
 
-  private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
-
   override def filter(p: A => Boolean): This = newFiltered(p)
   override def init: This = newSliced(SliceInterval(0, self.length - 1))
   override def drop(n: Int): This = newSliced(SliceInterval(n, self.length))
@@ -94,6 +93,7 @@ self =>
   override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p))
   override def splitAt(n: Int): (This, This) = (take(n), drop(n)) // !!!
   override def reverse: This = newReversed
+  override def tail: IndexedSeqView[A, Coll] = if (isEmpty) super.tail else slice(1, length)
 }
 
 /** An object containing the necessary implicit definitions to make
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index b79453e..92313c9 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -5,7 +5,8 @@
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -37,4 +38,4 @@ object Iterable extends TraversableFactory[Iterable] {
 }
 
 /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]
+abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index 0b56c86..ebee38b 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** A builder that constructs its result lazily. Iterators or iterables to
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index f241a2f..3fa1004 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala
index e4e2912..296e7fd 100644
--- a/src/library/scala/collection/mutable/LinkedEntry.scala
+++ b/src/library/scala/collection/mutable/LinkedEntry.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** Class for the linked hash map entry, used internally.
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index da2c36a..b64504b 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -84,7 +85,10 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
     }
   }
 
+  @deprecatedOverriding("+= should not be overridden so it stays consistent with put.", "2.11.0")
   def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this }
+
+  @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0")
   def -=(key: A): this.type = { remove(key); this }
 
   def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
@@ -92,33 +96,33 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
     def hasNext = cur ne null
     def next =
       if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
-  
+
   protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
     override def empty = LinkedHashMap.empty
   }
-  
+
   override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
 
   protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
     override def empty = LinkedHashMap.empty
   }
-  
+
   override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
-  
+
   protected class DefaultKeySet extends super.DefaultKeySet {
     override def empty = LinkedHashSet.empty
   }
-  
+
   override def keySet: scala.collection.Set[A] = new DefaultKeySet
-  
+
   override def keysIterator: Iterator[A] = new AbstractIterator[A] {
     private var cur = firstEntry
     def hasNext = cur ne null
     def next =
       if (hasNext) { val res = cur.key; cur = cur.later; res }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
 
   override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
@@ -126,7 +130,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
     def hasNext = cur ne null
     def next =
       if (hasNext) { val res = cur.value; cur = cur.later; res }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
 
   override def foreach[U](f: ((A, B)) => U) {
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 1723258..1768c94 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -56,7 +57,10 @@ class LinkedHashSet[A] extends AbstractSet[A]
 
   def contains(elem: A): Boolean = findEntry(elem) ne null
 
+  @deprecatedOverriding("+= should not be overridden so it stays consistent with add.", "2.11.0")
   def += (elem: A): this.type = { add(elem); this }
+
+  @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0")
   def -= (elem: A): this.type = { remove(elem); this }
 
   override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
@@ -78,9 +82,9 @@ class LinkedHashSet[A] extends AbstractSet[A]
     def hasNext = cur ne null
     def next =
       if (hasNext) { val res = cur.key; cur = cur.later; res }
-      else Iterator.empty.next
+      else Iterator.empty.next()
   }
-  
+
   override def foreach[U](f: A => U) {
     var cur = firstEntry
     while (cur ne null) {
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 29e6fdd..092698a 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -75,6 +76,7 @@ import generic._
   *  }}}
   */
 @SerialVersionUID(-7308240733518833071L)
+ at deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
 class LinkedList[A]() extends AbstractSeq[A]
                          with LinearSeq[A]
                          with GenericTraversableTemplate[A, LinkedList]
@@ -112,6 +114,7 @@ class LinkedList[A]() extends AbstractSeq[A]
  *  @define Coll `LinkedList`
  *  @define coll linked list
  */
+ at deprecated("Low-level linked lists are deprecated.", "2.11.0")
 object LinkedList extends SeqFactory[LinkedList] {
   override def empty[A]: LinkedList[A] = new LinkedList[A]
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 4f63ede..987b83d 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
 import scala.annotation.tailrec
 
 /** This extensible class may be used as a basis for implementing linked
@@ -57,6 +55,7 @@ import scala.annotation.tailrec
  *
  *  }}}
  */
+ at deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
 trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self =>
 
   var elem: A = _
@@ -188,6 +187,6 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
   override def clone(): This = {
     val bf = newBuilder
     bf ++= this
-    bf.result
+    bf.result()
   }
 }
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 67af4a6..5e838d0 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -6,14 +6,14 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
 import immutable.{List, Nil, ::}
 import java.io._
+import scala.annotation.migration
 
 /** A `Buffer` implementation back up by a list. It provides constant time
  *  prepend and append. Most other operations are linear.
@@ -56,12 +56,18 @@ final class ListBuffer[A]
   import scala.collection.Traversable
   import scala.collection.immutable.ListSerializeEnd
 
+  /** Expected invariants:
+   *  If start.isEmpty, last0 == null
+   *  If start.nonEmpty, last0 != null
+   *  If len == 0, start.isEmpty
+   *  If len > 0, start.nonEmpty
+   */
   private var start: List[A] = Nil
   private var last0: ::[A] = _
   private var exported: Boolean = false
   private var len = 0
 
-  protected def underlying: immutable.Seq[A] = start
+  protected def underlying: List[A] = start
 
   private def writeObject(out: ObjectOutputStream) {
     // write start
@@ -133,7 +139,7 @@ final class ListBuffer[A]
     if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
     if (exported) copy()
     if (n == 0) {
-      val newElem = new :: (x, start.tail);
+      val newElem = new :: (x, start.tail)
       if (last0 eq start) {
         last0 = newElem
       }
@@ -160,7 +166,7 @@ final class ListBuffer[A]
    */
   def += (x: A): this.type = {
     if (exported) copy()
-    if (start.isEmpty) {
+    if (isEmpty) {
       last0 = new :: (x, Nil)
       start = last0
     } else {
@@ -172,8 +178,11 @@ final class ListBuffer[A]
     this
   }
 
-  override def ++=(xs: TraversableOnce[A]): this.type =
-    if (xs.asInstanceOf[AnyRef] eq this) ++= (this take size) else super.++=(xs)
+  override def ++=(xs: TraversableOnce[A]): this.type = xs match {
+    case x: AnyRef if x eq this      => this ++= (this take size)
+    case _                           => super.++=(xs)
+
+  }
 
   override def ++=:(xs: TraversableOnce[A]): this.type =
     if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs)
@@ -182,6 +191,7 @@ final class ListBuffer[A]
    */
   def clear() {
     start = Nil
+    last0 = null
     exported = false
     len = 0
   }
@@ -195,7 +205,7 @@ final class ListBuffer[A]
   def +=: (x: A): this.type = {
     if (exported) copy()
     val newElem = new :: (x, start)
-    if (start.isEmpty) last0 = newElem
+    if (isEmpty) last0 = newElem
     start = newElem
     len += 1
     this
@@ -238,13 +248,22 @@ final class ListBuffer[A]
     }
   }
 
+  /** Reduce the length of the buffer, and null out last0
+   *  if this reduces the length to 0.
+   */
+  private def reduceLengthBy(num: Int) {
+    len -= num
+    if (len <= 0)   // obviously shouldn't be < 0, but still better not to leak
+      last0 = null
+  }
+
   /** Removes a given number of elements on a given index position. May take
    *  time linear in the buffer size.
    *
    *  @param n         the index which refers to the first element to remove.
    *  @param count     the number of elements to remove.
    */
-  @annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
+  @migration("Invalid input values will be rejected in future releases.", "2.11")
   override def remove(n: Int, count: Int) {
     if (n >= len)
       return
@@ -253,7 +272,6 @@ final class ListBuffer[A]
     if (exported) copy()
     val n1 = n max 0
     val count1 = count min (len - n1)
-    var old = start.head
     if (n1 == 0) {
       var c = count1
       while (c > 0) {
@@ -274,7 +292,7 @@ final class ListBuffer[A]
         c -= 1
       }
     }
-    len -= count1
+    reduceLengthBy(count1)
   }
 
 // Implementation of abstract method in Builder
@@ -285,7 +303,7 @@ final class ListBuffer[A]
    *  copied lazily, the first time it is mutated.
    */
   override def toList: List[A] = {
-    exported = !start.isEmpty
+    exported = !isEmpty
     start
   }
 
@@ -296,7 +314,7 @@ final class ListBuffer[A]
    *  @param xs   the list to which elements are prepended
    */
   def prependToList(xs: List[A]): List[A] = {
-    if (start.isEmpty) xs
+    if (isEmpty) xs
     else {
       if (exported) copy()
       last0.tl = xs
@@ -331,7 +349,7 @@ final class ListBuffer[A]
       if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
       cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
     }
-    len -= 1
+    reduceLengthBy(1)
     old
   }
 
@@ -343,11 +361,12 @@ final class ListBuffer[A]
    */
   override def -= (elem: A): this.type = {
     if (exported) copy()
-    if (start.isEmpty) {}
+    if (isEmpty) {}
     else if (start.head == elem) {
       start = start.tail
-      len -= 1
-    } else {
+      reduceLengthBy(1)
+    }
+    else {
       var cursor = start
       while (!cursor.tail.isEmpty && cursor.tail.head != elem) {
         cursor = cursor.tail
@@ -357,12 +376,18 @@ final class ListBuffer[A]
         if (z.tl == last0)
           last0 = z
         z.tl = cursor.tail.tail
-        len -= 1
+        reduceLengthBy(1)
       }
     }
     this
   }
 
+  /** Returns an iterator over this `ListBuffer`.  The iterator will reflect
+   *  changes made to the underlying `ListBuffer` beyond the next element;
+   *  the next element's value is cached so that `hasNext` and `next` are
+   *  guaranteed to be consistent.  In particular, an empty `ListBuffer`
+   *  will give an empty iterator even if the `ListBuffer` is later filled.
+   */
   override def iterator: Iterator[A] = new AbstractIterator[A] {
     // Have to be careful iterating over mutable structures.
     // This used to have "(cursor ne last0)" as part of its hasNext
@@ -371,32 +396,26 @@ final class ListBuffer[A]
     // a structure while iterating, but we should never return hasNext == true
     // on exhausted iterators (thus creating exceptions) merely because
     // values were changed in-place.
-    var cursor: List[A] = null
-    var delivered = 0
-
-    // Note: arguably this should not be a "dynamic test" against
-    // the present length of the buffer, but fixed at the size of the
-    // buffer when the iterator is created.  At the moment such a
-    // change breaks tests: see comment on def units in Global.scala.
-    def hasNext: Boolean = delivered < ListBuffer.this.length
+    var cursor: List[A] = if (ListBuffer.this.isEmpty) Nil else start
+
+    def hasNext: Boolean = cursor ne Nil
     def next(): A =
-      if (!hasNext)
-        throw new NoSuchElementException("next on empty Iterator")
+      if (!hasNext) throw new NoSuchElementException("next on empty Iterator")
       else {
-        if (cursor eq null) cursor = start
-        else cursor = cursor.tail
-        delivered += 1
-        cursor.head
+        val ans = cursor.head
+        cursor = cursor.tail
+        ans
       }
   }
 
-  /** expose the underlying list but do not mark it as exported */
+  @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0")
   override def readOnly: List[A] = start
 
   // Private methods
 
   /** Copy contents of this buffer */
   private def copy() {
+    if (isEmpty) return
     var cursor = start
     val limit = last0.tail
     clear()
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 7f05def..2ea5b1f 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -49,7 +50,10 @@ extends AbstractMap[A, B]
   def get(key: A): Option[B] = elems find (_._1 == key) map (_._2)
   def iterator: Iterator[(A, B)] = elems.iterator
 
+  @deprecatedOverriding("No sensible way to override += as private remove is used in multiple places internally.", "2.11.0")
   def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this }
+
+  @deprecatedOverriding("No sensible way to override -= as private remove is used in multiple places internally.", "2.11.0")
   def -= (key: A) = { elems = remove(key, elems, List()); this }
 
   @tailrec
@@ -60,7 +64,10 @@ extends AbstractMap[A, B]
   }
 
 
+  @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0")
   override def clear() = { elems = List(); siz = 0 }
+
+  @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0")
   override def size: Int = siz
 }
 
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
new file mode 100644
index 0000000..984ae6f
--- /dev/null
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -0,0 +1,569 @@
+package scala
+package collection
+package mutable
+
+import generic.CanBuildFrom
+
+/** This class implements mutable maps with `Long` keys based on a hash table with open addressing.
+ * 
+ *  Basic map operations on single entries, including `contains` and `get`, 
+ *  are typically substantially faster with `LongMap` than [[HashMap]].  Methods
+ *  that act on the whole map,  including `foreach` and `map` are not in
+ *  general expected to be faster than with a generic map, save for those
+ *  that take particular advantage of the internal structure of the map:
+ *  `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`.
+ * 
+ *  Maps with open addressing may become less efficient at lookup after
+ *  repeated addition/removal of elements.  Although `LongMap` makes a
+ *  decent attempt to remain efficient regardless,  calling `repack`
+ *  on a map that will no longer have elements removed but will be
+ *  used heavily may save both time and storage space.
+ * 
+ *  This map is not indended to contain more than 2^29 entries (approximately
+ *  500 million).  The maximum capacity is 2^30, but performance will degrade
+ *  rapidly as 2^30 is approached.
+ *
+ */
+final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean)
+extends AbstractMap[Long, V]
+   with Map[Long, V]
+   with MapLike[Long, V, LongMap[V]]
+   with Serializable
+{
+  import LongMap._
+
+  def this() = this(LongMap.exceptionDefault, 16, true)
+  
+  /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */
+  def this(defaultEntry: Long => V) = this(defaultEntry, 16, true)
+  
+  /** Creates a new `LongMap` with an initial buffer of specified size.
+   * 
+   *  A LongMap can typically contain half as many elements as its buffer size
+   *  before it requires resizing.
+   */
+  def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true)
+  
+  /** Creates a new `LongMap` with specified default values and initial buffer size. */
+  def this(defaultEntry: Long => V,  initialBufferSize: Int) = this(defaultEntry,  initialBufferSize,  true)
+  
+  private[this] var mask = 0
+  private[this] var extraKeys: Int = 0
+  private[this] var zeroValue: AnyRef = null
+  private[this] var minValue: AnyRef = null
+  private[this] var _size = 0
+  private[this] var _vacant = 0
+  private[this] var _keys: Array[Long] = null
+  private[this] var _values: Array[AnyRef] = null
+    
+  if (initBlank) defaultInitialize(initialBufferSize)
+  
+  private[this] def defaultInitialize(n: Int) = {
+    mask = 
+      if (n<0) 0x7
+      else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
+    _keys = new Array[Long](mask+1)
+    _values = new Array[AnyRef](mask+1)
+  }
+  
+  private[collection] def initializeTo(
+    m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef]
+  ) {
+    mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz
+  }
+  
+  override def size: Int = _size + (extraKeys+1)/2
+  override def empty: LongMap[V] = new LongMap()
+  
+  private def imbalanced: Boolean = 
+    (_size + _vacant) > 0.5*mask || _vacant > _size
+  
+  private def toIndex(k: Long): Int = {
+    // Part of the MurmurHash3 32 bit finalizer
+    val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt
+    var x = (h ^ (h >>> 16)) * 0x85EBCA6B
+    (x ^ (x >>> 13)) & mask
+  }
+  
+  private def seekEmpty(k: Long): Int = {
+    var e = toIndex(k)
+    var x = 0
+    while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+    e
+  }
+  
+  private def seekEntry(k: Long): Int = {
+    var e = toIndex(k)
+    var x = 0
+    var q = 0L
+    while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+    e | MissingBit
+  }
+  
+  private def seekEntryOrOpen(k: Long): Int = {
+    var e = toIndex(k)
+    var x = 0
+    var q = 0L
+    while ({ q = _keys(e); if (q==k) return e; q+q != 0}) {
+      x += 1
+      e = (e + 2*(x+1)*x - 3) & mask
+    }
+    if (q == 0) return e | MissingBit
+    val o = e | MissVacant
+    while ({ q = _keys(e); if (q==k) return e; q != 0}) {
+      x += 1
+      e = (e + 2*(x+1)*x - 3) & mask
+    }
+    o
+  }
+  
+  override def contains(key: Long): Boolean = {
+    if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0
+    else seekEntry(key) >= 0
+  }
+  
+  override def get(key: Long): Option[V] = {
+    if (key == -key) {
+      if ((((key>>>63).toInt+1) & extraKeys) == 0) None
+      else if (key == 0) Some(zeroValue.asInstanceOf[V])
+      else Some(minValue.asInstanceOf[V])
+    }
+    else {
+      val i = seekEntry(key)
+      if (i < 0) None else Some(_values(i).asInstanceOf[V])
+    }
+  }
+  
+  override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = {
+    if (key == -key) {
+      if ((((key>>>63).toInt+1) & extraKeys) == 0) default
+      else if (key == 0) zeroValue.asInstanceOf[V1]
+      else minValue.asInstanceOf[V1]
+    }
+    else {
+      val i = seekEntry(key)
+      if (i < 0) default else _values(i).asInstanceOf[V1]
+    }
+  }
+  
+  override def getOrElseUpdate(key: Long, defaultValue: => V): V = {
+    if (key == -key) {
+      val kbits = (key>>>63).toInt + 1
+      if ((kbits & extraKeys) == 0) {
+        val value = defaultValue
+        extraKeys |= kbits
+        if (key == 0) zeroValue = value.asInstanceOf[AnyRef]
+        else minValue = value.asInstanceOf[AnyRef]
+        value
+      }
+      else if (key == 0) zeroValue.asInstanceOf[V]
+      else minValue.asInstanceOf[V]
+    }
+    else {
+      var i = seekEntryOrOpen(key)
+      if (i < 0) {
+        // It is possible that the default value computation was side-effecting
+        // Our hash table may have resized or even contain what we want now
+        // (but if it does, we'll replace it)
+        val value = {
+          val ok = _keys
+          val ans = defaultValue
+          if (ok ne _keys) {
+            i = seekEntryOrOpen(key)
+            if (i >= 0) _size -= 1
+          }
+          ans
+        }
+        _size += 1
+        val j = i & IndexMask
+        _keys(j) = key
+        _values(j) = value.asInstanceOf[AnyRef]
+        if ((i & VacantBit) != 0) _vacant -= 1
+        else if (imbalanced) repack()
+        value
+      }
+      else _values(i).asInstanceOf[V]
+    }
+  }
+  
+  /** Retrieves the value associated with a key, or the default for that type if none exists
+   *  (null for AnyRef, 0 for floats and integers).
+   * 
+   *  Note: this is the fastest way to retrieve a value that may or
+   *  may not exist, if the default null/zero is acceptable.  For key/value
+   *  pairs that do exist,  `apply` (i.e. `map(key)`) is equally fast.
+   */
+  def getOrNull(key: Long): V = {
+    if (key == -key) {
+      if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V]
+      else if (key == 0) zeroValue.asInstanceOf[V]
+      else minValue.asInstanceOf[V]
+    }
+    else {
+      val i = seekEntry(key)
+      if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V]
+    }
+  }
+  
+  /** Retrieves the value associated with a key. 
+   *  If the key does not exist in the map, the `defaultEntry` for that key
+   *  will be returned instead.
+   */
+  override def apply(key: Long): V = {
+    if (key == -key) {
+      if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key)
+      else if (key == 0) zeroValue.asInstanceOf[V]
+      else minValue.asInstanceOf[V]
+    }
+    else {
+      val i = seekEntry(key)
+      if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
+    }
+  }
+  
+  /** The user-supplied default value for the key.  Throws an exception
+   *  if no other default behavior was specified.
+   */
+  override def default(key: Long) = defaultEntry(key)
+  
+  private def repack(newMask: Int) {
+    val ok = _keys
+    val ov = _values
+    mask = newMask
+    _keys = new Array[Long](mask+1)
+    _values = new Array[AnyRef](mask+1)
+    _vacant = 0
+    var i = 0
+    while (i < ok.length) {
+      val k = ok(i)
+      if (k != -k) {
+        val j = seekEmpty(k)
+        _keys(j) = k
+        _values(j) = ov(i)
+      }
+      i += 1
+    }
+  }
+  
+  /** Repacks the contents of this `LongMap` for maximum efficiency of lookup.
+   * 
+   *  For maps that undergo a complex creation process with both addition and
+   *  removal of keys, and then are used heavily with no further removal of
+   *  elements, calling `repack` after the end of the creation can result in
+   *  improved performance.  Repacking takes time proportional to the number
+   *  of entries in the map.
+   */
+  def repack() {
+    var m = mask
+    if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask
+    while (m > 8 && 8*_size < m) m = m >>> 1
+    repack(m)
+  }
+  
+  override def put(key: Long, value: V): Option[V] = {
+    if (key == -key) {
+      if (key == 0) {
+        val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None
+        zeroValue = value.asInstanceOf[AnyRef]
+        extraKeys |= 1
+        ans
+      }
+      else {
+        val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None
+        minValue = value.asInstanceOf[AnyRef]
+        extraKeys |= 2
+        ans
+      }
+    }
+    else {
+      val i = seekEntryOrOpen(key)
+      if (i < 0) {
+        val j = i & IndexMask
+        _keys(j) = key
+        _values(j) = value.asInstanceOf[AnyRef]
+        _size += 1
+        if ((i & VacantBit) != 0) _vacant -= 1
+        else if (imbalanced) repack()
+        None
+      }
+      else {
+        val ans = Some(_values(i).asInstanceOf[V])
+        _keys(i) = key
+        _values(i) = value.asInstanceOf[AnyRef]
+        ans
+      }
+    }
+  }
+  
+  /** Updates the map to include a new key-value pair.
+   * 
+   *  This is the fastest way to add an entry to a `LongMap`.
+   */
+  override def update(key: Long, value: V): Unit = {
+    if (key == -key) {
+      if (key == 0) {
+        zeroValue = value.asInstanceOf[AnyRef]
+        extraKeys |= 1
+      }
+      else {
+        minValue = value.asInstanceOf[AnyRef]
+        extraKeys |= 2
+      }
+    }
+    else {
+      var i = seekEntryOrOpen(key)
+      if (i < 0) {
+        val j = i & IndexMask
+        _keys(j) = key
+        _values(j) = value.asInstanceOf[AnyRef]
+        _size += 1
+        if ((i & VacantBit) != 0) _vacant -= 1
+        else if (imbalanced) repack()
+      }
+      else {
+        _keys(i) = key
+        _values(i) = value.asInstanceOf[AnyRef]
+      }
+    }
+  }
+  
+  /** Adds a new key/value pair to this map and returns the map. */
+  def +=(key: Long, value: V): this.type = { update(key, value); this }
+  
+  def +=(kv: (Long, V)): this.type = { update(kv._1, kv._2); this }
+  
+  def -=(key: Long): this.type = {
+    if (key == -key) {
+      if (key == 0L) {
+        extraKeys &= 0x2
+        zeroValue = null
+      }
+      else {
+        extraKeys &= 0x1
+        minValue = null
+      }
+    }
+    else {
+      val i = seekEntry(key)
+      if (i >= 0) {
+        _size -= 1
+        _vacant += 1
+        _keys(i) = Long.MinValue
+        _values(i) = null
+      }
+    }
+    this
+  }
+  
+  def iterator: Iterator[(Long, V)] = new Iterator[(Long, V)] {
+    private[this] val kz = _keys
+    private[this] val vz = _values
+    
+    private[this] var nextPair: (Long, V) = 
+      if (extraKeys==0) null
+      else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V])
+      else (Long.MinValue, minValue.asInstanceOf[V])
+
+    private[this] var anotherPair: (Long, V) = 
+      if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V])
+      else null
+        
+    private[this] var index = 0
+    
+    def hasNext: Boolean = nextPair != null || (index < kz.length && {
+      var q = kz(index)
+      while (q == -q) {
+        index += 1
+        if (index >= kz.length) return false
+        q = kz(index)
+      }
+      nextPair = (kz(index), vz(index).asInstanceOf[V])
+      index += 1
+      true
+    })
+    def next = {
+      if (nextPair == null && !hasNext) throw new NoSuchElementException("next")
+      val ans = nextPair
+      if (anotherPair != null) {
+        nextPair = anotherPair
+        anotherPair = null
+      }
+      nextPair = null
+      ans
+    }
+  }
+  
+  override def foreach[A](f: ((Long,V)) => A) {
+    var i,j = 0
+    while (i < _keys.length & j < _size) {
+      val k = _keys(i)
+      if (k != -k) {
+        j += 1
+        f((k, _values(i).asInstanceOf[V]))
+      }
+      i += 1
+    }
+    if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V]))
+    if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V]))
+  }
+  
+  override def clone(): LongMap[V] = {
+    val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+    val vz = java.util.Arrays.copyOf(_values,  _values.length)
+    val lm = new LongMap[V](defaultEntry, 1, false)
+    lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz,  vz)
+    lm
+  }
+  
+  /** Applies a function to all keys of this map. */
+  def foreachKey[A](f: Long => A) {
+    var i,j = 0
+    while (i < _keys.length & j < _size) {
+      val k = _keys(i)
+      if (k != -k) {
+        j += 1
+        f(k)
+      }
+      i += 1
+    }
+    if ((extraKeys & 1) == 1) f(0L)
+    if ((extraKeys & 2) == 2) f(Long.MinValue)
+  }
+
+  /** Applies a function to all values of this map. */
+  def foreachValue[A](f: V => A) {
+    var i,j = 0
+    while (i < _keys.length & j < _size) {
+      val k = _keys(i)
+      if (k != -k) {
+        j += 1
+        f(_values(i).asInstanceOf[V])
+      }
+      i += 1
+    }
+    if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V])
+    if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V])
+  }
+  
+  /** Creates a new `LongMap` with different values.
+   *  Unlike `mapValues`, this method generates a new
+   *  collection immediately.
+   */
+  def mapValuesNow[V1](f: V => V1): LongMap[V1] = {
+    val lm = new LongMap[V1](LongMap.exceptionDefault,  1,  false)
+    val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+    val vz = new Array[AnyRef](_values.length)
+    var i,j = 0
+    while (i < _keys.length & j < _size) {
+      val k = _keys(i)
+      if (k != -k) {
+        j += 1
+        vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+      }
+      i += 1
+    }
+    val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
+    val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
+    lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz)
+    lm
+  }
+  
+  /** Applies a transformation function to all values stored in this map. 
+   *  Note: the default, if any,  is not transformed.
+   */
+  def transformValues(f: V => V): this.type = {
+    var i,j = 0
+    while (i < _keys.length & j < _size) {
+      val k = _keys(i)
+      if (k != -k) {
+        j += 1
+        _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+      }
+      i += 1
+    }
+    if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef]
+    if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef]
+    this
+  }
+  
+  /*
+  override def toString = {
+    val sb = new StringBuilder("LongMap(")
+    var n = 0
+    foreach{ case (k,v) => 
+      if (n > 0) sb ++= ", "
+      sb ++= k.toString
+      sb ++= " -> "
+      sb ++= v.toString
+      n += 1
+    }
+    sb += ')'
+    sb.result
+  }
+  */
+}
+
+object LongMap {
+  private final val IndexMask  = 0x3FFFFFFF
+  private final val MissingBit = 0x80000000
+  private final val VacantBit  = 0x40000000
+  private final val MissVacant = 0xC0000000
+  
+  private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString)
+  
+  implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] = 
+    new CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] {
+      def apply(from: LongMap[V]): LongMapBuilder[U] = apply()
+      def apply(): LongMapBuilder[U] = new LongMapBuilder[U]
+    }
+  
+  final class LongMapBuilder[V] extends Builder[(Long, V), LongMap[V]] {
+    private[collection] var elems: LongMap[V] = new LongMap[V]
+    def +=(entry: (Long, V)): this.type = {
+      elems += entry
+      this
+    }
+    def clear() { elems = new LongMap[V] }
+    def result(): LongMap[V] = elems
+  }
+
+  /** Creates a new `LongMap` with zero or more key/value pairs. */
+  def apply[V](elems: (Long, V)*): LongMap[V] = {
+    val sz = if (elems.hasDefiniteSize) elems.size else 4
+    val lm = new LongMap[V](sz * 2)
+    elems.foreach{ case (k,v) => lm(k) = v }
+    if (lm.size < (sz>>3)) lm.repack()
+    lm
+  }
+  
+  /** Creates a new empty `LongMap`. */
+  def empty[V]: LongMap[V] = new LongMap[V]
+  
+  /** Creates a new empty `LongMap` with the supplied default */
+  def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default)
+  
+  /** Creates a new `LongMap` from arrays of keys and values. 
+   *  Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
+   */
+  def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = {
+    val sz = math.min(keys.length, values.length)
+    val lm = new LongMap[V](sz * 2)
+    var i = 0
+    while (i < sz) { lm(keys(i)) = values(i); i += 1 }
+    if (lm.size < (sz>>3)) lm.repack()
+    lm
+  }
+  
+  /** Creates a new `LongMap` from keys and values. 
+   *  Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
+   */
+  def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = {
+    val sz = math.min(keys.size, values.size)
+    val lm = new LongMap[V](sz * 2)
+    val ki = keys.iterator
+    val vi = values.iterator
+    while (ki.hasNext && vi.hasNext) lm(ki.next) = vi.next
+    if (lm.size < (sz >> 3)) lm.repack()
+    lm
+  }
+}
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index f72e1fc..2ac3cb6 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -42,22 +43,10 @@ trait Map[A, B]
    *
    *  Invoking transformer methods (e.g. `map`) will not preserve the default value.
    *
-   *  @param d     the function mapping keys to values, used for non-present keys
+   *  @param d     default value used for non-present keys
    *  @return      a wrapper of the map with a default value
    */
   def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
-
-  /** Return a read-only projection of this map.  !!! or just use an (immutable) MapProxy?
-  def readOnly : scala.collection.Map[A, B] = new scala.collection.Map[A, B] {
-    override def size = self.size
-    override def update(key: A, value: B) = self.update(key, value)
-    override def - (elem: A) = self - elem
-    override def iterator = self.iterator
-    override def foreach[U](f: ((A, B)) =>  U) = self.foreach(f)
-    override def empty[C] = self.empty[C]
-    def get(key: A) = self.get(key)
-  }
-  */
 }
 
 /** $factoryInfo
@@ -88,4 +77,4 @@ object Map extends MutableMapFactory[Map] {
 }
 
 /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractMap[A, B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
+abstract class AbstractMap[A, B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index 8468e09..a5a6b12 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** The canonical builder for immutable maps, working with the map's `+` method
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 42e5a0a..6230fc2 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -7,12 +7,13 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
-import scala.annotation.{migration, bridge}
-import parallel.mutable.ParMap
+import scala.annotation.migration
+import scala.collection.parallel.mutable.ParMap
 
 /** A template trait for mutable maps.
  *  $mapNote
@@ -50,8 +51,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
      with Parallelizable[(A, B), ParMap[A, B]]
 { self =>
 
-  import scala.collection.Traversable
-
   /** A common implementation of `newBuilder` for all mutable maps
    *    in terms of `empty`.
    *
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index c730e2b..552cd97 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /**
@@ -19,6 +20,7 @@ package mutable
  *  @version 2.0, 31/12/2006
  *  @since   1
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
   private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
     new MapProxy[A, B1] { val self = newSelf }
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 4635bfb..78dfc35 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index bc6272b..a0d3ee0 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -61,8 +60,7 @@ extends AbstractSeq[A]
     tl
   }
 
-  // this method must be private for binary compatibility
-  private final def tailImpl(tl: MutableList[A]) {
+  protected final def tailImpl(tl: MutableList[A]) {
     require(nonEmpty, "tail of empty list")
     tl.first0 = first0.tail
     tl.len = len - 1
@@ -149,12 +147,10 @@ extends AbstractSeq[A]
   override def clone(): MutableList[A]  = {
     val bf = newBuilder
     bf ++= seq
-    bf.result
+    bf.result()
   }
-
 }
 
-
 object MutableList extends SeqFactory[MutableList] {
   implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] =
     ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index bcaf977..9c3247f 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import script._
@@ -22,6 +23,7 @@ import script._
  *  @version 1.0, 08/07/2003
  *  @since   1
  */
+ at deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
 trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable]
 {
   type Pub <: ObservableBuffer[A]
@@ -65,7 +67,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
   }
 
   abstract override def clear(): Unit = {
-    super.clear
+    super.clear()
     publish(new Reset with Undoable {
       def undo() { throw new UnsupportedOperationException("cannot undo") }
     })
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index d81c90b..7509b72 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import script._
@@ -24,6 +25,7 @@ import script._
  *  @version 2.0, 31/12/2006
  *  @since   1
  */
+ at deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
 trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable]
 {
 
@@ -60,7 +62,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with
   }
 
   abstract override def clear(): Unit = {
-    super.clear
+    super.clear()
     publish(new Reset with Undoable {
       def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
     })
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 3e79506..19b4a5e 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import script._
@@ -22,6 +23,7 @@ import script._
  *  @version 1.0, 08/07/2003
  *  @since   1
  */
+ at deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
 trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
 {
 
@@ -44,7 +46,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
   }
 
   abstract override def clear(): Unit = {
-    super.clear
+    super.clear()
     publish(new Reset with Undoable {
       def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
     })
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 8b3e524..aade2ed 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -17,7 +17,6 @@ package mutable
  *  @since 2.7
  */
 object OpenHashMap {
-  import generic.BitOperations.Int.highestOneBit
 
   def apply[K, V](elems : (K, V)*) = new OpenHashMap[K, V] ++= elems
   def empty[K, V] = new OpenHashMap[K, V]
@@ -27,7 +26,7 @@ object OpenHashMap {
                                             var value: Option[Value])
                 extends HashEntry[Key, OpenEntry[Key, Value]]
 
-  private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
+  private[mutable] def nextPositivePowerOfTwo(i : Int) = 1 << (32 - Integer.numberOfLeadingZeros(i - 1))
 }
 
 /** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -62,7 +61,7 @@ extends AbstractMap[Key, Value]
 
   override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
 
-  private[this] val actualInitialSize = OpenHashMap.nextPowerOfTwo(initialSize)
+  private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize)
 
   private var mask = actualInitialSize - 1
   private var table : Array[Entry] = new Array[Entry](actualInitialSize)
@@ -78,8 +77,8 @@ extends AbstractMap[Key, Value]
   /** Returns a mangled hash code of the provided key. */
   protected def hashOf(key: Key) = {
     var h = key.##
-    h ^= ((h >>> 20) ^ (h >>> 12));
-    h ^ (h >>> 7) ^ (h >>> 4);
+    h ^= ((h >>> 20) ^ (h >>> 12))
+    h ^ (h >>> 7) ^ (h >>> 4)
   }
 
   private[this] def growTable() = {
@@ -89,7 +88,7 @@ extends AbstractMap[Key, Value]
     table = new Array[Entry](newSize)
     mask = newSize - 1
     oldTable.foreach( entry =>
-      if (entry != null && entry.value != None) addEntry(entry));
+      if (entry != null && entry.value != None) addEntry(entry))
     deleted = 0
   }
 
@@ -117,25 +116,28 @@ extends AbstractMap[Key, Value]
     put(key, hashOf(key), value)
   }
 
+  @deprecatedOverriding("+= should not be overridden in order to maintain consistency with put.", "2.11.0")
   def += (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this }
+  
+  @deprecatedOverriding("-= should not be overridden in order to maintain consistency with remove.", "2.11.0")
   def -= (key: Key): this.type = { remove(key); this }
 
   override def put(key: Key, value: Value): Option[Value] =
     put(key, hashOf(key), value)
 
   private def put(key: Key, hash: Int, value: Value): Option[Value] = {
-    if (2 * (size + deleted) > mask) growTable
+    if (2 * (size + deleted) > mask) growTable()
     val index = findIndex(key, hash)
     val entry = table(index)
     if (entry == null) {
-      table(index) = new OpenEntry(key, hash, Some(value));
+      table(index) = new OpenEntry(key, hash, Some(value))
       modCount += 1
       size += 1
       None
     } else {
       val res = entry.value
       if (entry.value == None) { size += 1; modCount += 1 }
-      entry.value = Some(value);
+      entry.value = Some(value)
       res
     }
   }
@@ -161,13 +163,13 @@ extends AbstractMap[Key, Value]
     while(entry != null){
       if (entry.hash == hash &&
           entry.key == key){
-        return entry.value;
+        return entry.value
       }
 
-      j = 5 * j + 1 + perturb;
-      perturb >>= 5;
-      index = j & mask;
-      entry = table(index);
+      j = 5 * j + 1 + perturb
+      perturb >>= 5
+      index = j & mask
+      entry = table(index)
     }
     None
   }
@@ -182,8 +184,8 @@ extends AbstractMap[Key, Value]
     val initialModCount = modCount
 
     private[this] def advance() {
-      if (initialModCount != modCount) sys.error("Concurrent modification");
-      while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
+      if (initialModCount != modCount) sys.error("Concurrent modification")
+      while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
     }
 
     def hasNext = {advance(); index <= mask }
@@ -198,7 +200,7 @@ extends AbstractMap[Key, Value]
 
   override def clone() = {
     val it = new OpenHashMap[Key, Value]
-    foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
+    foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
     it
   }
 
@@ -213,24 +215,24 @@ extends AbstractMap[Key, Value]
    *  @param f   The function to apply to each key, value mapping.
    */
   override def foreach[U](f : ((Key, Value)) => U) {
-    val startModCount = modCount;
+    val startModCount = modCount
     foreachUndeletedEntry(entry => {
       if (modCount != startModCount) sys.error("Concurrent Modification")
       f((entry.key, entry.value.get))}
-    );
+    )
   }
 
   private[this] def foreachUndeletedEntry(f : Entry => Unit){
-    table.foreach(entry => if (entry != null && entry.value != None) f(entry));
+    table.foreach(entry => if (entry != null && entry.value != None) f(entry))
   }
 
   override def transform(f : (Key, Value) => Value) = {
-    foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)));
+    foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
     this
   }
 
   override def retain(f : (Key, Value) => Boolean) = {
-    foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} );
+    foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} )
     this
   }
 
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 84257c6..b949bec 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -31,6 +30,7 @@ import generic._
  *  @define mayNotTerminateInf
  *  @define willNotTerminateInf
  */
+ at deprecatedInheritance("PriorityQueue is not intended to be subclassed due to extensive private implementation details.", "2.11.0")
 class PriorityQueue[A](implicit val ord: Ordering[A])
    extends AbstractIterable[A]
       with Iterable[A]
@@ -43,7 +43,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
 {
   import ord._
 
-  private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
+  private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] with Serializable {
     def p_size0 = size0
     def p_size0_=(s: Int) = size0 = s
     def p_array = array
@@ -134,11 +134,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
       throw new NoSuchElementException("no element to remove from heap")
 
   def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = {
-    val b = bf.apply
+    val b = bf.apply()
     while (nonEmpty) {
       b += dequeue()
     }
-    b.result
+    b.result()
   }
 
   /** Returns the element with the highest priority in the queue,
@@ -146,14 +146,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
    *
    *  @return   the element with the highest priority.
    */
-  @deprecated("Use `head` instead.", "2.9.0")
-  def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
-
-  /** Returns the element with the highest priority in the queue,
-   *  or throws an error if there is no element contained in the queue.
-   *
-   *  @return   the element with the highest priority.
-   */
   override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
 
   /** Removes all elements from the queue. After this operation is completed,
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index 3bb5d32..b24551a 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** This class servers as a proxy for priority queues. The
@@ -18,6 +19,7 @@ package mutable
  *  @version 1.0, 03/05/2004
  *  @since   1
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A]
          with Proxy
 {
@@ -66,7 +68,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
    *
    *  @return   the element with the highest priority.
    */
-  override def dequeue(): A = self.dequeue
+  override def dequeue(): A = self.dequeue()
 
   /** Returns the element with the highest priority in the queue,
    *  or throws an error if there is no element contained in the queue.
@@ -75,18 +77,10 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
    */
   override def head: A = self.head
 
-  /** Returns the element with the highest priority in the queue,
-   *  or throws an error if there is no element contained in the queue.
-   *
-   *  @return   the element with the highest priority.
-   */
-  @deprecated("Use `head` instead.", "2.9.0")
-  override def max: A = self.max
-
   /** Removes all elements from the queue. After this operation is completed,
    *  the queue will be empty.
    */
-  override def clear(): Unit = self.clear
+  override def clear(): Unit = self.clear()
 
   /** Returns a regular queue containing the same elements.
    */
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index e31205b..22bbea1 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
@@ -45,7 +46,7 @@ trait Publisher[Evt] {
   def suspendSubscription(sub: Sub) { suspended += sub }
   def activateSubscription(sub: Sub) { suspended -= sub }
   def removeSubscription(sub: Sub) { filters -= sub }
-  def removeSubscriptions() { filters.clear }
+  def removeSubscriptions() { filters.clear() }
 
   protected def publish(event: Evt) {
     filters.keys.foreach(sub =>
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 8ef5f6a..7c890fe 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -142,6 +143,7 @@ extends MutableList[A]
   /** Return the proper suffix of this list which starts with the first element that satisfies `p`.
    *  That element is unlinked from the list. If no element satisfies `p`, return None.
    */
+  @deprecated("extractFirst inappropriately exposes implementation details.  Use dequeue or dequeueAll.", "2.11.0")
   def extractFirst(start: LinkedList[A], p: A => Boolean): Option[LinkedList[A]] = {
     if (isEmpty) None
     else {
@@ -167,13 +169,6 @@ extends MutableList[A]
    */
   def front: A = head
 
-  // this method (duplicated from MutableList) must be private for binary compatibility
-  private final def tailImpl(tl: Queue[A]) {
-    require(nonEmpty, "tail of empty list")
-    tl.first0 = first0.tail
-    tl.len = len - 1
-    tl.last0 = if (tl.len == 0) tl.first0 else last0
-  }
 
   // TODO - Don't override this just for new to create appropriate type....
   override def tail: Queue[A] = {
@@ -185,7 +180,7 @@ extends MutableList[A]
   override def clone(): Queue[A] = {
     val bf = newBuilder
     bf ++= seq
-    bf.result
+    bf.result()
   }
 
   private[this] def decrementLength() {
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index c286a34..22ff330 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** `Queue` objects implement data structures that allow to
@@ -20,6 +21,7 @@ package mutable
  *  @version 1.1, 03/05/2004
  *  @since   1
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait QueueProxy[A] extends Queue[A] with Proxy {
 
   def self: Queue[A]
@@ -67,7 +69,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
    *
    *  @return the first element of the queue.
    */
-  override def dequeue(): A = self.dequeue
+  override def dequeue(): A = self.dequeue()
 
   /** Returns the first element in the queue, or throws an error if there
    *  is no element contained in the queue.
@@ -79,7 +81,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
   /** Removes all elements from the queue. After this operation is completed,
    *  the queue will be empty.
    */
-  override def clear(): Unit = self.clear
+  override def clear(): Unit = self.clear()
 
   /** Returns an iterator over all elements on the queue.
    *
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index 4a12f95..c304752 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -89,16 +89,20 @@ trait ResizableArray[A] extends IndexedSeq[A]
     }
   }
 
-  /** Ensure that the internal array has at `n` cells. */
+  /** Ensure that the internal array has at least `n` cells. */
   protected def ensureSize(n: Int) {
-    if (n > array.length) {
-      var newsize = array.length * 2
-      while (n > newsize)
-        newsize = newsize * 2
-
-      val newar: Array[AnyRef] = new Array(newsize)
-      scala.compat.Platform.arraycopy(array, 0, newar, 0, size0)
-      array = newar
+    // Use a Long to prevent overflows
+    val arrayLength: Long = array.length
+    if (n > arrayLength) {
+      var newSize: Long = arrayLength * 2
+      while (n > newSize)
+        newSize = newSize * 2
+      // Clamp newSize to Int.MaxValue
+      if (newSize > Int.MaxValue) newSize = Int.MaxValue
+
+      val newArray: Array[AnyRef] = new Array(newSize.toInt)
+      scala.compat.Platform.arraycopy(array, 0, newArray, 0, size0)
+      array = newArray
     }
   }
 
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 5544a21..725a811 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
@@ -30,7 +31,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und
    */
   def undo(): Unit = {
     val old = log.toList.reverse
-    clear
-    old.foreach { case (sub, event) => event.undo }
+    clear()
+    old.foreach { case (sub, event) => event.undo() }
   }
 }
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 9d9399e..eafde70 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -44,4 +45,4 @@ object Seq extends SeqFactory[Seq] {
 }
 
 /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractSeq[A] extends scala.collection.AbstractSeq[A] with Seq[A]
+abstract class AbstractSeq[A] extends scala.collection.AbstractSeq[A] with Seq[A]
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 447100c..6987066 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -6,10 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
 import parallel.mutable.ParSeq
 
 /** A template trait for mutable sequences of type `mutable.Seq[A]`.
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 023ff63..9757471 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -42,4 +43,4 @@ object Set extends MutableSetFactory[Set] {
 }
 
 /** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
-private[scala] abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
+abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 42fd651..01bfdc9 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
-
 /** The canonical builder for mutable Sets.
  *
  *  @tparam A      The type of the elements that will be contained in this set.
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 71da4c8..d749167 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -6,12 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
 import script._
-import scala.annotation.{ migration, bridge }
+import scala.annotation.migration
 import parallel.mutable.ParSet
 
 /** A template trait for mutable sets of type `mutable.Set[A]`.
@@ -209,11 +210,12 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
    *  @throws `Predef.UnsupportedOperationException`
    *  if the message was not understood.
    */
-   def <<(cmd: Message[A]): Unit = cmd match {
-     case Include(_, x)     => this += x
-     case Remove(_, x)      => this -= x
-     case Reset()           => clear
-     case s: Script[_]      => s.iterator foreach <<
-     case _                 => throw new UnsupportedOperationException("message " + cmd + " not understood")
-   }
+  @deprecated("Scripting is deprecated.", "2.11.0")
+  def <<(cmd: Message[A]): Unit = cmd match {
+    case Include(_, x)     => this += x
+    case Remove(_, x)      => this -= x
+    case Reset()           => clear()
+    case s: Script[_]      => s.iterator foreach <<
+    case _                 => throw new UnsupportedOperationException("message " + cmd + " not understood")
+  }
 }
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index c9f2975..7427950 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** This is a simple wrapper class for [[scala.collection.mutable.Set]].
@@ -17,6 +18,7 @@ package mutable
  *  @version 1.1, 09/05/2004
  *  @since   1
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
   override def repr = this
   override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty }
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 41f2c6e..0f2fa75 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 6eef250..53b6c59 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
@@ -118,7 +119,7 @@ extends AbstractSeq[A]
    *  @param xs the traversable object.
    *  @return the stack with the new elements on top.
    */
-  def pushAll(xs: TraversableOnce[A]): this.type = { xs.seq foreach push ; this }
+  def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this }
 
   /** Returns the top element of the stack. This method will not remove
    *  the element from the stack. An error is signaled if there is no
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 16f13ff..81e63b0 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** A stack implements a data structure which allows to store and retrieve
@@ -18,6 +19,7 @@ package mutable
  *  @version 1.0, 10/05/2004
  *  @since   1
  */
+ at deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
 trait StackProxy[A] extends Stack[A] with Proxy {
 
   def self: Stack[A]
@@ -58,7 +60,7 @@ trait StackProxy[A] extends Stack[A] with Proxy {
     self.push(elem)
     this
   }
-  
+
   /** Returns the top element of the stack. This method will not remove
    *  the element from the stack. An error is signaled if there is no
    *  element on the stack.
@@ -69,13 +71,13 @@ trait StackProxy[A] extends Stack[A] with Proxy {
 
   /** Removes the top element from the stack.
    */
-  override def pop(): A = self.pop
+  override def pop(): A = self.pop()
 
   /**
    * Removes all elements from the stack. After this operation completed,
    * the stack will be empty.
    */
-  override def clear(): Unit = self.clear
+  override def clear(): Unit = self.clear()
 
   /** Returns an iterator over all elements on the stack. This iterator
    *  is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 4d269a9..498e9e4 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import java.lang.{ StringBuilder => JavaStringBuilder }
@@ -255,8 +256,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
    *  @return     This StringBuilder.
    */
   def append(x: Boolean): StringBuilder = { underlying append x ; this }
-  def append(x: Byte): StringBuilder = { underlying append x ; this }
-  def append(x: Short): StringBuilder = { underlying append x ; this }
+  def append(x: Byte): StringBuilder = append(x.toInt)
+  def append(x: Short): StringBuilder = append(x.toInt)
   def append(x: Int): StringBuilder = { underlying append x ; this }
   def append(x: Long): StringBuilder = { underlying append x ; this }
   def append(x: Float): StringBuilder = { underlying append x ; this }
@@ -359,8 +360,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
    *  @return       this StringBuilder.
    */
   def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x))
-  def insert(index: Int, x: Byte): StringBuilder    = insert(index, String.valueOf(x))
-  def insert(index: Int, x: Short): StringBuilder   = insert(index, String.valueOf(x))
+  def insert(index: Int, x: Byte): StringBuilder    = insert(index, x.toInt)
+  def insert(index: Int, x: Short): StringBuilder   = insert(index, x.toInt)
   def insert(index: Int, x: Int): StringBuilder     = insert(index, String.valueOf(x))
   def insert(index: Int, x: Long): StringBuilder    = insert(index, String.valueOf(x))
   def insert(index: Int, x: Float): StringBuilder   = insert(index, String.valueOf(x))
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index 35d31d7..c2aa9be 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** `Subscriber[A, B]` objects may subscribe to events of type `A`
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index bf9a70c..8c646b0 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import script._
@@ -24,6 +25,7 @@ import script._
  *  @define Coll `SynchronizedBuffer`
  *  @define coll synchronized buffer
  */
+ at deprecated("Synchronization via traits is deprecated as it is inherently unreliable.  Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
 trait SynchronizedBuffer[A] extends Buffer[A] {
 
   import scala.collection.Traversable
@@ -157,9 +159,10 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
   /** Clears the buffer contents.
    */
   abstract override def clear(): Unit = synchronized {
-    super.clear
+    super.clear()
   }
 
+  @deprecated("Scripting is deprecated.", "2.11.0")
   override def <<(cmd: Message[A]): Unit = synchronized {
     super.<<(cmd)
   }
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 5a3562c..9876296 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.collection
+package scala
+package collection
 package mutable
 
 import scala.annotation.migration
@@ -25,6 +24,7 @@ import scala.annotation.migration
  *  @define Coll `SynchronizedMap`
  *  @define coll synchronized map
  */
+ at deprecated("Synchronization via traits is deprecated as it is inherently unreliable.  Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0")
 trait SynchronizedMap[A, B] extends Map[A, B] {
 
   abstract override def get(key: A): Option[B] = synchronized { super.get(key) }
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 8dfc40b..d3c0b85 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 /** This class implements synchronized priority queues using a binary heap.
@@ -23,6 +24,7 @@ package mutable
  *  @define Coll `SynchronizedPriorityQueue`
  *  @define coll synchronized priority queue
  */
+ at deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable.  Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0")
 class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
 
   /** Checks if the queue is empty.
@@ -64,7 +66,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
    *
    *  @return   the element with the highest priority.
    */
-  override def dequeue(): A = synchronized { super.dequeue }
+  override def dequeue(): A = synchronized { super.dequeue() }
 
   /** Returns the element with the highest priority in the queue,
    *  or throws an error if there is no element contained in the queue.
@@ -73,18 +75,10 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
    */
   override def head: A = synchronized { super.head }
 
-  /** Returns the element with the highest priority in the queue,
-   *  or throws an error if there is no element contained in the queue.
-   *
-   *  @return   the element with the highest priority.
-   */
-  @deprecated("Use `head` instead.", "2.9.0")
-  override def max: A = synchronized { super.max }
-
   /** Removes all elements from the queue. After this operation is completed,
    *  the queue will be empty.
    */
-  override def clear(): Unit = synchronized { super.clear }
+  override def clear(): Unit = synchronized { super.clear() }
 
   /** Returns an iterator which yield all the elements of the priority
    *  queue in descending priority order.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 9559d5e..48e40ab 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
@@ -24,9 +25,8 @@ package mutable
  *  @define Coll `SynchronizedQueue`
  *  @define coll synchronized queue
  */
+ at deprecated("Synchronization via selective overriding of methods is inherently unreliable.  Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
 class SynchronizedQueue[A] extends Queue[A] {
-  import scala.collection.Traversable
-
   /** Checks if the queue is empty.
    *
    *  @return true, iff there is no element in the queue.
@@ -58,7 +58,7 @@ class SynchronizedQueue[A] extends Queue[A] {
    *
    *  @return the first element of the queue.
    */
-  override def dequeue(): A = synchronized { super.dequeue }
+  override def dequeue(): A = synchronized { super.dequeue() }
 
   /** Returns the first element in the queue which satisfies the
    *  given predicate, and removes this element from the queue.
@@ -87,7 +87,7 @@ class SynchronizedQueue[A] extends Queue[A] {
   /** Removes all elements from the queue. After this operation is completed,
    *  the queue will be empty.
    */
-  override def clear(): Unit = synchronized { super.clear }
+  override def clear(): Unit = synchronized { super.clear() }
 
   /** Checks if two queues are structurally identical.
    *
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index e4a4499..60e2e79 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import script._
@@ -23,9 +24,8 @@ import script._
  *  @define Coll `SynchronizedSet`
  *  @define coll synchronized set
  */
+ at deprecated("Synchronization via traits is deprecated as it is inherently unreliable.  Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0")
 trait SynchronizedSet[A] extends Set[A] {
-  import scala.collection.Traversable
-
   abstract override def size: Int = synchronized {
     super.size
   }
@@ -71,7 +71,7 @@ trait SynchronizedSet[A] extends Set[A] {
   }
 
   abstract override def clear(): Unit = synchronized {
-    super.clear
+    super.clear()
   }
 
   override def subsetOf(that: scala.collection.GenSet[A]) = synchronized {
@@ -94,6 +94,7 @@ trait SynchronizedSet[A] extends Set[A] {
     super.toString
   }
 
+  @deprecated("Scripting is deprecated.", "2.11.0")
   override def <<(cmd: Message[A]): Unit = synchronized {
     super.<<(cmd)
   }
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index 5d7c9f6..bbb6f5a 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
@@ -24,6 +25,7 @@ package mutable
  *  @define Coll `SynchronizedStack`
  *  @define coll synchronized stack
  */
+ at deprecated("Synchronization via selective overriding of methods is inherently unreliable.  Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0")
 class SynchronizedStack[A] extends Stack[A] {
   import scala.collection.Traversable
 
@@ -67,13 +69,13 @@ class SynchronizedStack[A] extends Stack[A] {
 
   /** Removes the top element from the stack.
    */
-  override def pop(): A = synchronized { super.pop }
+  override def pop(): A = synchronized { super.pop() }
 
   /**
    * Removes all elements from the stack. After this operation completed,
    * the stack will be empty.
    */
-  override def clear(): Unit = synchronized { super.clear }
+  override def clear(): Unit = synchronized { super.clear() }
 
   /** Returns an iterator over all elements on the stack. This iterator
    *  is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index e36ffc8..d7ea376 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 5197af1..f849eea 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -6,10 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
+import scala.collection.immutable.{RedBlackTree => RB}
+import scala.runtime.ObjectRef
 
 /**
  * @define Coll `mutable.TreeSet`
@@ -29,95 +32,85 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
 }
 
 /**
- * A mutable SortedSet using an immutable AVL Tree as underlying data structure.
+ * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure.
  *
  * @author Lucien Pereira
  *
  */
-class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
+ at deprecatedInheritance("TreeSet is not designed to enable meaningful subclassing.", "2.11.0")
+class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A])
+  extends SortedSet[A] with SetLike[A, TreeSet[A]]
   with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
 
-  // Projection constructor
-  private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) {
-    this();
-    this.base = base
-    this.from = from
-    this.until = until
-  }
-
-  private var base: Option[TreeSet[A]] = None
-
-  private var from: Option[A] = None
-
-  private var until: Option[A] = None
-
-  private var avl: AVLTree[A] = Leaf
-
-  private var cardinality: Int = 0
-
-  def resolve: TreeSet[A] = base.getOrElse(this)
+  if (ordering eq null)
+    throw new NullPointerException("ordering must not be null")
 
-  private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean =
-    from.map(x => ordering.gteq(a, x)).getOrElse(true)
+  def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None)
 
-  private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean =
-    until.map(x => ordering.lt(a, x)).getOrElse(true)
-
-  /**
-   * Cardinality store the set size, unfortunately a
-   * set view (given by rangeImpl)
-   * cannot take advantage of this optimisation
-   *
-   */
-  override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
+  override def size: Int = RB.countInRange(treeRef.elem, from, until)
 
   override def stringPrefix = "TreeSet"
 
   override def empty: TreeSet[A] = TreeSet.empty
 
-  override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until)
+  private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match {
+    case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB))
+    case (None, _) => oldBound
+    case _ => newBound
+  }
+
+  override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = {
+    val newFrom = pickBound(ordering.max, fromArg, from)
+    val newUntil = pickBound(ordering.min, untilArg, until)
+
+    new TreeSet(treeRef, newFrom, newUntil)
+  }
 
   override def -=(elem: A): this.type = {
-    try {
-      resolve.avl = resolve.avl.remove(elem, ordering)
-      resolve.cardinality = resolve.cardinality - 1
-    } catch {
-      case e: NoSuchElementException => ()
-    }
+    treeRef.elem = RB.delete(treeRef.elem, elem)
     this
   }
 
   override def +=(elem: A): this.type = {
-    try {
-      resolve.avl = resolve.avl.insert(elem, ordering)
-      resolve.cardinality = resolve.cardinality + 1
-    } catch {
-      case e: IllegalArgumentException => ()
-    }
+    treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false)
     this
   }
 
   /**
    * Thanks to the immutable nature of the
-   * underlying AVL Tree, we can share it with
+   * underlying Tree, we can share it with
    * the clone. So clone complexity in time is O(1).
    *
    */
-  override def clone(): TreeSet[A] = {
-    val clone = new TreeSet[A](base, from, until)
-    clone.avl = resolve.avl
-    clone.cardinality = resolve.cardinality
-    clone
-  }
+  override def clone(): TreeSet[A] =
+    new TreeSet[A](new ObjectRef(treeRef.elem), from, until)
+
+  private val notProjection = !(from.isDefined || until.isDefined)
 
   override def contains(elem: A): Boolean = {
-    isLeftAcceptable(from, ordering)(elem) &&
-    isRightAcceptable(until, ordering)(elem) &&
-    resolve.avl.contains(elem, ordering)
+    def leftAcceptable: Boolean = from match {
+      case Some(lb) => ordering.gteq(elem, lb)
+      case _ => true
+    }
+
+    def rightAcceptable: Boolean = until match {
+      case Some(ub) => ordering.lt(elem, ub)
+      case _ => true
+    }
+
+    (notProjection || (leftAcceptable && rightAcceptable)) &&
+      RB.contains(treeRef.elem, elem)
   }
 
-  override def iterator: Iterator[A] = resolve.avl.iterator
-    .dropWhile(e => !isLeftAcceptable(from, ordering)(e))
-      .takeWhile(e => isRightAcceptable(until, ordering)(e))
+  override def iterator: Iterator[A] = iteratorFrom(None)
 
+  override def keysIteratorFrom(start: A) = iteratorFrom(Some(start))
+
+  private def iteratorFrom(start: Option[A]) = {
+    val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start))
+    until match {
+      case None => it
+      case Some(ub) => it takeWhile (k => ordering.lt(k, ub))
+    }
+  }
 }
diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala
index 0c0e8fe..482d618 100644
--- a/src/library/scala/collection/mutable/Undoable.scala
+++ b/src/library/scala/collection/mutable/Undoable.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 9b48c8f..1f89199 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.mutable
+package scala
+package collection.mutable
 
 import scala.collection.AbstractIterator
 import scala.collection.Iterator
@@ -42,6 +43,7 @@ import scala.reflect.ClassTag
  *
  */
 @SerialVersionUID(1L)
+ at deprecatedInheritance("UnrolledBuffer is not designed to enable meaningful subclassing.", "2.11.0")
 class UnrolledBuffer[T](implicit val tag: ClassTag[T])
 extends scala.collection.mutable.AbstractBuffer[T]
    with scala.collection.mutable.Buffer[T]
@@ -66,7 +68,20 @@ extends scala.collection.mutable.AbstractBuffer[T]
 
   protected def newUnrolled = new Unrolled[T](this)
 
-  private[collection] def calcNextLength(sz: Int) = sz
+  // The below would allow more flexible behavior without requiring inheritance
+  // that is risky because all the important internals are private.
+  // private var myLengthPolicy: Int => Int = x => x
+  // 
+  // /** Specifies how the array lengths should vary.
+  //   * 
+  //   *  By default,  `UnrolledBuffer` uses arrays of a fixed size.  A length
+  //   *  policy can be given that changes this scheme to, for instance, an
+  //   *  exponential growth.
+  //   * 
+  //   *  @param nextLength   computes the length of the next array from the length of the latest one
+  //   */
+  // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength }
+  private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz)
 
   def classTagCompanion = UnrolledBuffer
 
@@ -87,7 +102,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
     // `that` is no longer usable, so clear it
     // here we rely on the fact that `clear` allocates
     // new nodes instead of modifying the previous ones
-    that.clear
+    that.clear()
 
     // return a reference to this
     this
@@ -123,7 +138,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
       val r = node.array(pos)
       scan()
       r
-    } else Iterator.empty.next
+    } else Iterator.empty.next()
   }
 
   // this should be faster than the iterator
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 70e428c..433d054 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import generic._
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index b837240..53fca9f 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
 import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 7e02103..bfe95a1 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -8,10 +8,10 @@
 
 
 
-package scala.collection
+package scala
+package collection
 package mutable
 
-import generic._
 import scala.reflect.ClassTag
 import scala.runtime.ScalaRunTime._
 
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 00993c0..abccf5d 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -6,16 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
+package scala
+package collection.parallel
 
 import scala.collection.Parallel
 import scala.collection.mutable.Builder
 import scala.collection.generic.Sizing
 
-
-
 /** The base trait for all combiners.
  *  A combiner incremental collection construction just like
  *  a regular builder, but also implements an efficient merge operation of two builders
@@ -33,11 +30,11 @@ import scala.collection.generic.Sizing
  *  @since 2.9
  */
 trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-  
+
   @transient
   @volatile
   var _combinerTaskSupport = defaultTaskSupport
-  
+
   def combinerTaskSupport = {
     val cts = _combinerTaskSupport
     if (cts eq null) {
@@ -45,9 +42,9 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
       defaultTaskSupport
     } else cts
   }
-  
+
   def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts
-  
+
   /** Combines the contents of the receiver builder and the `other` builder,
    *  producing a new builder containing both their elements.
    *
@@ -81,18 +78,16 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
    *  By default, this method returns `false`.
    */
   def canBeShared: Boolean = false
-  
+
   /** Constructs the result and sets the appropriate tasksupport object to the resulting collection
    *  if this is applicable.
    */
   def resultWithTaskSupport: To = {
-    val res = result
+    val res = result()
     setTaskSupport(res, combinerTaskSupport)
   }
-  
 }
 
-
 /*
 private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
   abstract override def result = {
@@ -101,12 +96,3 @@ private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combine
   }
 }
 */
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2b24c88..2ceeb18 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -6,12 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 import scala.collection.GenIterable
 import scala.collection.generic._
 import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
 
 /** A template trait for parallel iterable collections.
  *
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 0f06ff3..445edd2 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -6,11 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
-
+package scala
+package collection.parallel
 
 import scala.collection.mutable.Builder
 import scala.collection.mutable.ArrayBuffer
@@ -32,6 +29,8 @@ import scala.annotation.unchecked.uncheckedVariance
 import scala.annotation.unchecked.uncheckedStable
 import scala.language.{ higherKinds, implicitConversions }
 
+import scala.collection.parallel.ParallelCollectionImplicits._
+
 
 /** A template trait for parallel collections of type `ParIterable[T]`.
  *
@@ -171,9 +170,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
 
   /** The task support object which is responsible for scheduling and
    *  load-balancing tasks to processors.
-   *                                                                              
+   *
    *  @see [[scala.collection.parallel.TaskSupport]]
-   */     
+   */
   def tasksupport = {
     val ts = _tasksupport
     if (ts eq null) {
@@ -188,18 +187,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
    *  A task support object can be changed in a parallel collection after it
    *  has been created, but only during a quiescent period, i.e. while there
    *  are no concurrent invocations to parallel collection methods.
-   *                                                                              
-   *  Here is a way to change the task support of a parallel collection:          
-   *                                                                              
-   *  {{{                                                                         
-   *  import scala.collection.parallel._                                          
-   *  val pc = mutable.ParArray(1, 2, 3)                                          
-   *  pc.tasksupport = new ForkJoinTaskSupport(                                   
-   *    new scala.concurrent.forkjoin.ForkJoinPool(2))                            
-   *  }}}                                                                         
+   *
+   *  Here is a way to change the task support of a parallel collection:
+   *
+   *  {{{
+   *  import scala.collection.parallel._
+   *  val pc = mutable.ParArray(1, 2, 3)
+   *  pc.tasksupport = new ForkJoinTaskSupport(
+   *    new scala.concurrent.forkjoin.ForkJoinPool(2))
+   *  }}}
    *
    *  @see [[scala.collection.parallel.TaskSupport]]
-   */     
+   */
   def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
 
   def seq: Sequential
@@ -214,7 +213,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
 
   def nonEmpty = size != 0
 
-  def head = iterator.next
+  def head = iterator.next()
 
   def headOption = if (nonEmpty) Some(head) else None
 
@@ -419,8 +418,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
    *  may be invoked arbitrary number of times.
    *
    *  For example, one might want to process some elements and then produce a `Set`. In this
-   *  case, `seqop` would process an element and append it to the list, while `combop`
-   *  would concatenate two lists from different partitions together. The initial value
+   *  case, `seqop` would process an element and append it to the set, while `combop`
+   *  would concatenate two sets from different partitions together. The initial value
    *  `z` would be an empty set.
    *
    *  {{{
@@ -433,12 +432,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
    *  @tparam S        the type of accumulated results
    *  @param z         the initial value for the accumulated result of the partition - this
    *                   will typically be the neutral element for the `seqop` operator (e.g.
-   *                   `Nil` for list concatenation or `0` for summation)
+   *                   `Nil` for list concatenation or `0` for summation) and may be evaluated
+   *                   more than once
    *  @param seqop     an operator used to accumulate results within a partition
    *  @param combop    an associative operator used to combine results from different partitions
    */
-  def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
-    tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+  def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
+    tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter))
   }
 
   def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
@@ -453,7 +453,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
 
   def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
 
-  /** Applies a function `f` to all the elements of $coll in a undefined order.
+  /** Applies a function `f` to all the elements of $coll in an undefined order.
    *
    *  @tparam U    the result type of the function applied to each element, which is always discarded
    *  @param f     function applied to each element
@@ -589,6 +589,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
     }
   }
 
+  def withFilter(pred: T => Boolean): Repr = filter(pred)
+
   def filter(pred: T => Boolean): Repr = {
     tasksupport.executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
   }
@@ -626,7 +628,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
       val b = bf(repr)
       this.splitter.copy2builder[U, That, Builder[U, That]](b)
       for (elem <- that.seq) b += elem
-      setTaskSupport(b.result, tasksupport)
+      setTaskSupport(b.result(), tasksupport)
     }
   }
 
@@ -727,7 +729,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
         tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult {
           cb => cb.resultWithTaskSupport
         })
-      }) else setTaskSupport((bf(repr) += z).result, tasksupport)
+      }) else setTaskSupport((bf(repr) += z).result(), tasksupport)
     } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
   } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
 
@@ -819,10 +821,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
 
   def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
     val thatseq = that.asParSeq
-    tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+    tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport })
   } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
 
-  def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
+  def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false)
 
   def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
     val thatseq = that.asParSeq
@@ -830,25 +832,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
       new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
         _.resultWithTaskSupport
       }
-    );
+    )
   } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
 
   protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
-    tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
+    tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport })
   }
 
   protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
     tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport })
   }
 
-  def view = new ParIterableView[T, Repr, Sequential] {
-    protected lazy val underlying = self.repr
-    protected[this] def viewIdentifier = ""
-    protected[this] def viewIdString = ""
-    override def seq = self.seq.view
-    def splitter = self.splitter
-    def size = splitter.remaining
-  }
+  @deprecated("Use .seq.view instead", "2.11.0")
+  def view = seq.view
 
   override def toArray[U >: T: ClassTag]: Array[U] = {
     val arr = new Array[U](size)
@@ -877,13 +873,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
   override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
 
   override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-  
+
   override def toVector: Vector[T] = to[Vector]
 
   override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
     toParCollection[T, Col[T]](() => cbf().asCombiner)
   } else seq.to(cbf)
-  
+
   /* tasks */
 
   protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
@@ -903,7 +899,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
     protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
     def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel)
     def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
-    private[parallel] override def signalAbort = pit.abort
+    private[parallel] override def signalAbort = pit.abort()
     override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
   }
 
@@ -920,8 +916,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
     def combineResults(fr: FR, sr: SR): R
     @volatile var result: R = null.asInstanceOf[R]
     private[parallel] override def signalAbort() {
-      ft.signalAbort
-      st.signalAbort
+      ft.signalAbort()
+      st.signalAbort()
     }
     protected def mergeSubtasks() {
       ft mergeThrowables st
@@ -935,9 +931,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
   (f: First, s: Second)
   extends Composite[FR, SR, R, First, Second](f, s) {
     def leaf(prevr: Option[R]) = {
-      tasksupport.executeAndWaitResult(ft)
-      tasksupport.executeAndWaitResult(st)
-      mergeSubtasks
+      tasksupport.executeAndWaitResult(ft) : Any
+      tasksupport.executeAndWaitResult(st) : Any
+      mergeSubtasks()
     }
   }
 
@@ -946,10 +942,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
   (f: First, s: Second)
   extends Composite[FR, SR, R, First, Second](f, s) {
     def leaf(prevr: Option[R]) = {
-      val ftfuture = tasksupport.execute(ft)
-      tasksupport.executeAndWaitResult(st)
+      val ftfuture: () => Any = tasksupport.execute(ft)
+      tasksupport.executeAndWaitResult(st) : Any
       ftfuture()
-      mergeSubtasks
+      mergeSubtasks()
     }
   }
 
@@ -962,7 +958,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
       result = map(initialResult)
     }
     private[parallel] override def signalAbort() {
-      inner.signalAbort
+      inner.signalAbort()
     }
     override def requiresStrictSplitters = inner.requiresStrictSplitters
   }
@@ -1005,10 +1001,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
     override def merge(that: Fold[U]) = result = op(result, that.result)
   }
 
-  protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
+  protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
   extends Accessor[S, Aggregate[S]] {
     @volatile var result: S = null.asInstanceOf[S]
-    def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
+    def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop)
     protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
     override def merge(that: Aggregate[S]) = result = combop(result, that.result)
   }
@@ -1084,7 +1080,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
   protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
   extends Accessor[Boolean, Forall] {
     @volatile var result: Boolean = true
-    def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
+    def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() }
     protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
     override def merge(that: Forall) = result = result && that.result
   }
@@ -1092,7 +1088,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
   protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
   extends Accessor[Boolean, Exists] {
     @volatile var result: Boolean = false
-    def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
+    def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() }
     protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
     override def merge(that: Exists) = result = result || that.result
   }
@@ -1100,7 +1096,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
   protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
   extends Accessor[Option[U], Find[U]] {
     @volatile var result: Option[U] = None
-    def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
+    def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() }
     protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
     override def merge(that: Find[U]) = if (this.result == None) result = that.result
   }
@@ -1152,7 +1148,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
       // note: HashMapCombiner doesn't merge same keys until evaluation
       val cb = mcf()
       while (pit.hasNext) {
-        val elem = pit.next
+        val elem = pit.next()
         cb += f(elem) -> elem
       }
       result = cb
@@ -1473,9 +1469,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
 
   /* alias methods */
 
-  def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+  def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
 
-  def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+  def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
 
   /* debug information */
 
@@ -1488,7 +1484,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
   def debugBuffer: ArrayBuffer[String] = null
 
   private[parallel] def debugclear() = synchronized {
-    debugBuffer.clear
+    debugBuffer.clear()
   }
 
   private[parallel] def debuglog(s: String) = synchronized {
@@ -1504,31 +1500,3 @@ self: ParIterableLike[T, Repr, Sequential] =>
   })
 
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
deleted file mode 100644
index 7644e1b..0000000
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection.parallel
-
-import scala.collection.{ Parallel, IterableView, GenIterableView, Iterator }
-import scala.collection.generic.CanCombineFrom
-
-/** A template view of a non-strict view of a parallel iterable collection.
- *
- *  @tparam T         the type of elements
- *  @tparam Coll      the type of the parallel collection this view was created from
- *  @tparam CollSeq   the type of the sequential collection corresponding to the underlying parallel collection
- *
- *  @since 2.9
- */
-trait ParIterableView[+T, +Coll <: Parallel, +CollSeq]
-extends ParIterableViewLike[T, Coll, CollSeq, ParIterableView[T, Coll, CollSeq], IterableView[T, CollSeq]]
-   with GenIterableView[T, Coll]
-
-
-object ParIterableView {
-  abstract class NoCombiner[T] extends Combiner[T, Nothing] {
-//    self: EnvironmentPassingCombiner[T, Nothing] =>
-    def +=(elem: T): this.type = this
-    def iterator: Iterator[T] = Iterator.empty
-    def result() = throw new UnsupportedOperationException("ParIterableView.Combiner.result")
-    def size = throw new UnsupportedOperationException("ParIterableView.Combiner.size")
-    def clear() {}
-    def combine[N <: T, NewTo >: Nothing](other: Combiner[N, NewTo]) =
-      throw new UnsupportedOperationException("ParIterableView.Combiner.result")
-  }
-
-  type Coll = ParIterableView[_, C, _] forSome { type C <: ParIterable[_] }
-
-  implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] =
-    new CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] {
-      def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
-      def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
-    }
-}
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
deleted file mode 100644
index 0ecd6bd..0000000
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ /dev/null
@@ -1,203 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection.parallel
-
-import scala.collection.Parallel
-import scala.collection.{ IterableView, IterableViewLike }
-import scala.collection.{ GenIterableView, GenIterableViewLike }
-import scala.collection.GenTraversableOnce
-import scala.collection.GenTraversable
-import scala.collection.GenIterable
-import scala.collection.GenSeq
-import scala.collection.generic.{ CanBuildFrom, SliceInterval }
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.immutable.ParRange
-import scala.language.implicitConversions
-
-
-
-/** A template view of a non-strict view of parallel iterable collection.
- *
- *  '''Note:''' Regular view traits have type parameters used to carry information
- *  about the type of the elements, type of the collection they are derived from and
- *  their own actual representation type. Parallel views have an additional parameter
- *  which carries information about the type of the sequential version of the view.
- *
- *  @tparam T         the type of the elements this view can traverse
- *  @tparam Coll      the type of the parallel collection this view is derived from
- *  @tparam CollSeq   the type of the sequential collection corresponding to the underlying parallel collection
- *  @tparam This      the actual representation type of this view
- *  @tparam ThisSeq   the type of the sequential representation of this view
- *
- *  @since 2.9
- */
-trait ParIterableViewLike[+T,
-                          +Coll <: Parallel,
-                          +CollSeq,
-                          +This <: ParIterableView[T, Coll, CollSeq] with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq],
-                          +ThisSeq <: IterableView[T, CollSeq] with IterableViewLike[T, CollSeq, ThisSeq]]
-extends GenIterableView[T, Coll]
-   with GenIterableViewLike[T, Coll, This]
-   with ParIterable[T]
-   with ParIterableLike[T, This, ThisSeq]
-{
-self =>
-
-  override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
-  override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
-  protected[this] def viewIdentifier: String
-  protected[this] def viewIdString: String
-
-  protected def underlying: Coll
-
-  /* wrappers */
-
-  trait Transformed[+S] extends ParIterableView[S, Coll, CollSeq] with super.Transformed[S] {
-    override def splitter: IterableSplitter[S]
-    override def iterator = splitter
-    def size = splitter.remaining
-  }
-
-  trait Sliced extends super.Sliced with Transformed[T] {
-    // override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
-    def splitter: IterableSplitter[T] = self.splitter.slice(from, until)
-    override def seq = self.seq.slice(from, until)
-  }
-
-  trait Mapped[S] extends super.Mapped[S] with Transformed[S]{
-    def splitter: IterableSplitter[S] = self.splitter.map(mapping)
-    override def seq = self.seq.map(mapping).asInstanceOf[IterableView[S, CollSeq]]
-  }
-
-  // only use if other is a ParIterable, otherwise force
-  trait Appended[U >: T] extends super.Appended[U] with Transformed[U] {
-    def restPar: ParIterable[U] = rest.asParIterable
-    def splitter: IterableSplitter[U] = self.splitter.appendParIterable[U, IterableSplitter[U]](restPar.splitter)
-    override def seq = self.seq.++(rest).asInstanceOf[IterableView[U, CollSeq]]
-  }
-
-  trait Forced[S] extends super.Forced[S] with Transformed[S] {
-    def forcedPar: ParIterable[S] = forced.asParIterable
-    def splitter: IterableSplitter[S] = forcedPar.splitter
-    override def seq = forcedPar.seq.view.asInstanceOf[IterableView[S, CollSeq]]
-  }
-
-  // only use if other is a ParSeq, otherwise force
-  trait Zipped[S] extends super.Zipped[S] with Transformed[(T, S)] {
-    def otherPar: ParSeq[S] = other.asParSeq
-    def splitter: IterableSplitter[(T, S)] = self.splitter zipParSeq otherPar.splitter
-    override def seq = (self.seq zip other).asInstanceOf[IterableView[(T, S), CollSeq]]
-  }
-
-  // only use if other is a ParSeq, otherwise force
-  trait ZippedAll[U >: T, S] extends super.ZippedAll[U, S] with Transformed[(U, S)] {
-    def otherPar: ParSeq[S] = other.asParSeq
-    def splitter: IterableSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
-    override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[IterableView[(U, S), CollSeq]]
-  }
-
-  protected[this] def thisParSeq: ParSeq[T] = mutable.ParArray.fromTraversables(this.iterator)
-  private[this] implicit def asThis(xs: Transformed[T]): This = xs.asInstanceOf[This]
-
-  /* operation overrides */
-
-  override def take(n: Int): This = newSliced(SliceInterval(0, n))
-  override def drop(n: Int): This = newSliced(SliceInterval(n, splitter.remaining))
-  override def splitAt(n: Int): (This, This) = (take(n), drop(n))
-  override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until))
-  override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
-  override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppendedTryParIterable(xs.toTraversable).asInstanceOf[That]
-
-  override def filter(p: T => Boolean): This = newForced(thisParSeq.filter(p)).asInstanceOf[This]
-  override def filterNot(p: T => Boolean): This = newForced(thisParSeq.filterNot(p)).asInstanceOf[This]
-  override def partition(p: T => Boolean): (This, This) = {
-    val (t, f) = thisParSeq.partition(p)
-    (newForced(t).asInstanceOf[This], newForced(f).asInstanceOf[This])
-  }
-  override def takeWhile(p: T => Boolean): This = newForced(thisParSeq.takeWhile(p)).asInstanceOf[This]
-  override def dropWhile(p: T => Boolean): This = newForced(thisParSeq.dropWhile(p)).asInstanceOf[This]
-  override def span(p: T => Boolean): (This, This) = {
-    val (pref, suff) = thisParSeq.span(p)
-    (newForced(pref).asInstanceOf[This], newForced(suff).asInstanceOf[This])
-  }
-  override def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.flatMap(f)).asInstanceOf[That]
-
-  override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
-  override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
-    newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
-  override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
-    newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
-
-  override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
-    tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
-  } otherwise {
-    val b = bf(underlying)
-    b ++= this.iterator
-    b.result
-  }
-
-  /* wrapper virtual ctors */
-
-  protected def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
-  protected def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
-  protected def newForced[S](xs: => GenSeq[S]): Transformed[S] = new Forced[S] { val forced = xs }
-  protected def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = new Appended[U] { val rest = that }
-  protected def newDroppedWhile(p: T => Boolean) = unsupported
-  protected def newTakenWhile(p: T => Boolean) = unsupported
-  protected def newFlatMapped[S](f: T => GenTraversableOnce[S]) = unsupported
-  protected def newFiltered(p: T => Boolean) = unsupported
-  protected def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
-  protected def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
-    val other = that
-    val thisElem = _thisElem
-    val thatElem = _thatElem
-  }
-
-  /* argument sequence dependent ctors */
-
-  protected def newForcedTryParIterable[S](xs: => GenSeq[S]): Transformed[S] = {
-    if (xs.isParIterable) newForced[S](xs)
-    else newForced(mutable.ParArray.fromTraversables(xs))
-  }
-  protected def newAppendedTryParIterable[U >: T](that: GenTraversable[U]): Transformed[U] = {
-    // we only append if `that` is a parallel iterable, i.e. it has a splitter
-    if (that.isParIterable) newAppended(that)
-    else newAppended(mutable.ParArray.fromTraversables(that))
-  }
-  protected def newZippedTryParSeq[S](that: GenIterable[S]): Transformed[(T, S)] = {
-    if (that.isParSeq) newZipped[S](that)
-    else newZipped[S](mutable.ParArray.fromTraversables(that))
-  }
-  protected def newZippedAllTryParSeq[S, U >: T](that: GenIterable[S], thisElem: U, thatElem: S): Transformed[(U, S)] = {
-    if (that.isParSeq) newZippedAll(that, thisElem, thatElem)
-    else newZippedAll(mutable.ParArray.fromTraversables(that), thisElem, thatElem)
-  }
-
-  /* tasks */
-
-  protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: IterableSplitter[T])
-  extends Transformer[Combiner[U, That], Force[U, That]] {
-    var result: Combiner[U, That] = null
-    def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
-    protected[this] def newSubtask(p: IterableSplitter[T]) = new Force(cbf, p)
-    override def merge(that: Force[U, That]) = result = result combine that.result
-  }
-
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 1f27ae8..9f92e6c 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 import scala.collection.Map
 import scala.collection.GenMap
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 56594be..d2b15c7 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -6,11 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
-
+package scala
+package collection.parallel
 
 import scala.collection.MapLike
 import scala.collection.GenMapLike
@@ -20,10 +17,6 @@ import scala.annotation.unchecked.uncheckedVariance
 import scala.collection.generic.IdleSignalling
 import scala.collection.generic.Signalling
 
-
-
-
-
 /** A template trait for mutable parallel maps. This trait is to be mixed in
  *  with concrete parallel maps to override the representation type.
  *
@@ -67,7 +60,7 @@ self =>
       i =>
       val iter = s
       def hasNext = iter.hasNext
-      def next() = iter.next._1
+      def next() = iter.next()._1
       def split = {
         val ss = iter.split.map(keysIterator(_))
         ss.foreach { _.signalDelegate = i.signalDelegate }
@@ -84,7 +77,7 @@ self =>
       i =>
       val iter = s
       def hasNext = iter.hasNext
-      def next() = iter.next._2
+      def next() = iter.next()._2
       def split = {
         val ss = iter.split.map(valuesIterator(_))
         ss.foreach { _.signalDelegate = i.signalDelegate }
@@ -146,15 +139,3 @@ self =>
 
   // note - should not override toMap (could be mutable)
 }
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index b905d1d..2c883ba 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -6,10 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
 
 import scala.collection.generic.GenericCompanion
 import scala.collection.generic.GenericParCompanion
@@ -18,9 +16,6 @@ import scala.collection.generic.ParFactory
 import scala.collection.generic.CanCombineFrom
 import scala.collection.GenSeq
 import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
-
-
 
 /** A template trait for parallel sequences.
  *
@@ -47,35 +42,9 @@ trait ParSeq[+T] extends GenSeq[T]
   override def stringPrefix = getClass.getSimpleName
 }
 
-
 object ParSeq extends ParFactory[ParSeq] {
   implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
 
   def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-
   def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index da9abfc..0b6fec3 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
 import scala.collection.generic.DefaultSignalling
@@ -15,6 +16,7 @@ import scala.collection.generic.CanBuildFrom
 import scala.collection.generic.CanCombineFrom
 import scala.collection.generic.VolatileAbort
 
+import scala.collection.parallel.ParallelCollectionImplicits._
 
 /** A template trait for sequences of type `ParSeq[T]`, representing
  *  parallel sequences with element type `T`.
@@ -44,7 +46,7 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
 extends scala.collection.GenSeqLike[T, Repr]
    with ParIterableLike[T, Repr, Sequential] {
 self =>
-  
+
   protected[this] type SuperParIterator = IterableSplitter[T]
 
   /** A more refined version of the iterator found in the `ParallelIterable` trait,
@@ -68,7 +70,7 @@ self =>
       val x = self(i)
       i += 1
       x
-    } else Iterator.empty.next
+    } else Iterator.empty.next()
 
     def head = self(i)
 
@@ -228,7 +230,7 @@ self =>
     b ++= pits(0)
     b ++= patch
     b ++= pits(2)
-    setTaskSupport(b.result, tasksupport)
+    setTaskSupport(b.result(), tasksupport)
   }
 
   def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
@@ -252,7 +254,7 @@ self =>
 
   def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
     patch(length, new immutable.Repetition(elem, len - length), 0)
-  } else patch(length, Nil, 0);
+  } else patch(length, Nil, 0)
 
   override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
     val thatseq = that.asParSeq
@@ -260,7 +262,7 @@ self =>
       new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
         _.resultWithTaskSupport
       }
-    );
+    )
   } else super.zip(that)(bf)
 
   /** Tests whether every element of this $coll relates to the
@@ -322,15 +324,8 @@ self =>
 
   override def toSeq = this.asInstanceOf[ParSeq[T]]
 
-  override def view = new ParSeqView[T, Repr, Sequential] {
-    protected lazy val underlying = self.repr
-    protected[this] def viewIdentifier = ""
-    protected[this] def viewIdString = ""
-    def length = self.length
-    def apply(idx: Int) = self(idx)
-    override def seq = self.seq.view
-    def splitter = self.splitter
-  }
+  @deprecated("use .seq.view", "2.11.0")
+  override def view = seq.view
 
   /* tasks */
 
@@ -423,7 +418,7 @@ self =>
     @volatile var result: Boolean = true
     def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
       result = pit.sameElements(otherpit)
-      if (!result) pit.abort
+      if (!result) pit.abort()
     }
     protected[this] def newSubtask(p: SuperParIterator) = unsupported
     override def split = {
@@ -471,7 +466,7 @@ self =>
     @volatile var result: Boolean = true
     def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
       result = pit.corresponds(corr)(otherpit)
-      if (!result) pit.abort
+      if (!result) pit.abort()
     }
     protected[this] def newSubtask(p: SuperParIterator) = unsupported
     override def split = {
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
deleted file mode 100644
index 3e3c497..0000000
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.collection.parallel
-
-import scala.collection.{ TraversableView, SeqView, Parallel, Iterator }
-import scala.collection.generic.CanCombineFrom
-
-/** A template view of a non-strict view of a parallel sequence.
- *
- *  @tparam T         the type of elements in this parallel sequence
- *  @tparam Coll      the type of the underlying parallel collection
- *  @tparam CollSeq   the type of the sequential collection corresponding to the underlying parallel collection
- *
- *  @since 2.9
- */
-trait ParSeqView[+T, +Coll <: Parallel, +CollSeq]
-extends ParSeqViewLike[T, Coll, CollSeq, ParSeqView[T, Coll, CollSeq], SeqView[T, CollSeq]]
-
-
-object ParSeqView {
-  abstract class NoCombiner[T] extends Combiner[T, Nothing] {
-//    self: EnvironmentPassingCombiner[T, Nothing] =>
-    def +=(elem: T): this.type = this
-    def iterator: Iterator[T] = Iterator.empty
-    def result() = throw new UnsupportedOperationException("ParSeqView.Combiner.result")
-    def size = throw new UnsupportedOperationException("ParSeqView.Combiner.size")
-    def clear() {}
-    def combine[N <: T, NewTo >: Nothing](other: Combiner[N, NewTo]) =
-      throw new UnsupportedOperationException("ParSeqView.Combiner.result")
-  }
-
-  type Coll = ParSeqView[_, C, _] forSome { type C <: ParSeq[_] }
-
-  implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] =
-    new CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] {
-      def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
-      def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
-    }
-}
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
deleted file mode 100644
index 04369d8..0000000
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ /dev/null
@@ -1,188 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.collection.parallel
-
-import scala.collection.{ Parallel, SeqView, SeqViewLike, GenSeqView, GenSeqViewLike, GenSeq }
-import scala.collection.{ GenIterable, GenTraversable, GenTraversableOnce, Iterator }
-import scala.collection.generic.{ CanBuildFrom, SliceInterval }
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.immutable.ParRange
-
-
-/** A template view of a non-strict view of parallel sequence.
- *
- *  @tparam T             the type of the elements in this view
- *  @tparam Coll          type of the collection this view is derived from
- *  @tparam CollSeq       type of the sequential collection corresponding to the underlying parallel collection
- *  @tparam This          actual representation type of this view
- *  @tparam ThisSeq       type of the sequential version of this view
- *
- *  @since 2.9
- */
-trait ParSeqViewLike[+T,
-                     +Coll <: Parallel,
-                     +CollSeq,
-                     +This <: ParSeqView[T, Coll, CollSeq] with ParSeqViewLike[T, Coll, CollSeq, This, ThisSeq],
-                     +ThisSeq <: SeqView[T, CollSeq] with SeqViewLike[T, CollSeq, ThisSeq]]
-extends GenSeqView[T, Coll]
-   with GenSeqViewLike[T, Coll, This]
-   with ParIterableView[T, Coll, CollSeq]
-   with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq]
-   with ParSeq[T]
-   with ParSeqLike[T, This, ThisSeq]
-{
-self =>
-
-  trait Transformed[+S] extends ParSeqView[S, Coll, CollSeq]
-  with super[ParIterableView].Transformed[S] with super[GenSeqViewLike].Transformed[S] {
-    override def splitter: SeqSplitter[S]
-    override def iterator = splitter
-    override def size = length
-  }
-
-  trait Sliced extends super[GenSeqViewLike].Sliced with super[ParIterableViewLike].Sliced with Transformed[T] {
-    // override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
-    override def splitter = self.splitter.psplit(from, until - from)(1)
-    override def seq = self.seq.slice(from, until)
-  }
-
-  trait Mapped[S] extends super[GenSeqViewLike].Mapped[S] with super[ParIterableViewLike].Mapped[S] with Transformed[S] {
-    override def splitter = self.splitter.map(mapping)
-    override def seq = self.seq.map(mapping).asInstanceOf[SeqView[S, CollSeq]]
-  }
-
-  trait Appended[U >: T] extends super[GenSeqViewLike].Appended[U] with super[ParIterableViewLike].Appended[U] with Transformed[U] {
-    override def restPar: ParSeq[U] = rest.asParSeq
-    override def splitter = self.splitter.appendParSeq[U, SeqSplitter[U]](restPar.splitter)
-    override def seq = self.seq.++(rest).asInstanceOf[SeqView[U, CollSeq]]
-  }
-
-  trait Forced[S] extends super[GenSeqViewLike].Forced[S] with super[ParIterableViewLike].Forced[S] with Transformed[S] {
-    override def forcedPar: ParSeq[S] = forced.asParSeq
-    override def splitter: SeqSplitter[S] = forcedPar.splitter
-    override def seq = forcedPar.seq.view.asInstanceOf[SeqView[S, CollSeq]]
-  }
-
-  trait Zipped[S] extends super[GenSeqViewLike].Zipped[S] with super[ParIterableViewLike].Zipped[S] with Transformed[(T, S)] {
-    override def splitter = self.splitter zipParSeq otherPar.splitter
-    override def seq = (self.seq zip other).asInstanceOf[SeqView[(T, S), CollSeq]]
-  }
-
-  trait ZippedAll[U >: T, S] extends super[GenSeqViewLike].ZippedAll[U, S] with super[ParIterableViewLike].ZippedAll[U, S] with Transformed[(U, S)] {
-    override def splitter: SeqSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
-    override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[SeqView[(U, S), CollSeq]]
-  }
-
-  trait Reversed extends super.Reversed with Transformed[T] {
-    override def splitter: SeqSplitter[T] = self.splitter.reverse
-    override def seq = self.seq.reverse.asInstanceOf[SeqView[T, CollSeq]]
-  }
-
-  // use only with ParSeq patches, otherwise force
-  trait Patched[U >: T] extends super.Patched[U] with Transformed[U] {
-    def patchPar: ParSeq[U] = patch.asInstanceOf[ParSeq[U]]
-    override def splitter: SeqSplitter[U] = self.splitter.patchParSeq[U](from, patchPar.splitter, replaced)
-    override def seq = self.seq.patch(from, patch, replaced).asInstanceOf[SeqView[U, CollSeq]]
-  }
-
-  // !!!
-  //
-  // What is up with this trait and method, why are they here doing
-  // nothing but throwing exceptions, without even being deprecated?
-  // They're not implementing something abstract; why aren't they
-  // just removed?
-  //
-  // use Patched instead
-  trait Prepended[U >: T] extends super.Prepended[U] with Transformed[U] {
-    unsupported
-  }
-  protected def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
-
-  /* wrapper virtual ctors */
-
-  protected override def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
-  protected override def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = {
-    // we only append if `that` is a parallel sequence, i.e. it has a precise splitter
-    if (that.isParSeq) new Appended[U] { val rest = that }
-    else newForced(mutable.ParArray.fromTraversables(this, that))
-  }
-  protected override def newForced[S](xs: => GenSeq[S]): Transformed[S] = {
-    if (xs.isParSeq) new Forced[S] { val forced = xs }
-    else new Forced[S] { val forced = mutable.ParArray.fromTraversables(xs) }
-  }
-  protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
-  protected override def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
-  protected override def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
-    val other = that
-    val thisElem = _thisElem
-    val thatElem = _thatElem
-  }
-  protected def newReversed: Transformed[T] = new Reversed { }
-  protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
-    val from = _from;
-    val patch = _patch;
-    val replaced = _replaced
-  } with Patched[U]
-
-  /* operation overrides */
-
-  /* sliced */
-  override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until)).asInstanceOf[This]
-  override def take(n: Int): This = newSliced(SliceInterval(0, n)).asInstanceOf[This]
-  override def drop(n: Int): This = newSliced(SliceInterval(n, length)).asInstanceOf[This]
-  override def splitAt(n: Int): (This, This) = (take(n), drop(n))
-
-  /* appended */
-  override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
-  override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = ++(Iterator.single(elem))(bf)
-  //override def union[U >: T, That](that: GenSeq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
-
-  /* misc */
-  override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
-  override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
-  override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
-    newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
-  override def reverse: This = newReversed.asInstanceOf[This]
-  override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
-
-  /* patched */
-  override def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = {
-    require(0 <= index && index < length)
-    patch(index, List(elem), 1)(bf)
-  }
-  override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(length, Seq.fill(len - length)(elem), 0)
-  override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(0, mutable.ParArray.fromTraversables(Iterator.single(elem)), 0)
-  override def patch[U >: T, That](from: Int, patch: GenSeq[U], replace: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replace).asInstanceOf[That]
-
-  /* forced */
-  // override def diff[U >: T](that: GenSeq[U]): This = newForced(thisParSeq diff that).asInstanceOf[This]
-  // override def intersect[U >: T](that: GenSeq[U]): This = newForced(thisParSeq intersect that).asInstanceOf[This]
-  // override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisParSeq sorted ord).asInstanceOf[This]
-  override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[This, S, That]): That = filter(pf.isDefinedAt).map(pf)(bf)
-  override def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanLeft(z)(op)).asInstanceOf[That]
-  override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That]
-  override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This]))
-  override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
-    tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
-  } otherwise {
-    val b = bf(underlying)
-    b ++= this.iterator
-    b.result
-  }
-
-  /* tasks */
-
-  protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: SeqSplitter[T])
-  extends Transformer[Combiner[U, That], Force[U, That]] {
-    var result: Combiner[U, That] = null
-    def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
-    protected[this] def newSubtask(p: SuperParIterator) = new Force(cbf, down(p))
-    override def merge(that: Force[U, That]) = result = result combine that.result
-  }
-}
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index 6e5e9b4..ba3d23f 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -6,25 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
+package scala
+package collection
+package parallel
 
-package scala.collection.parallel
-
-
-
-
-
-
-
-import scala.collection.Set
-import scala.collection.GenSet
-import scala.collection.mutable.Builder
 import scala.collection.generic._
 
-
-
-
-
-
 /** A template trait for parallel sets.
  *
  *  $sideeffects
@@ -35,12 +22,12 @@ import scala.collection.generic._
  *  @since 2.9
  */
 trait ParSet[T]
-extends GenSet[T]
+   extends GenSet[T]
    with GenericParTemplate[T, ParSet]
    with ParIterable[T]
    with ParSetLike[T, ParSet[T], Set[T]]
-{
-self =>
+{ self =>
+
   override def empty: ParSet[T] = mutable.ParHashSet[T]()
 
   //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T]
@@ -50,39 +37,8 @@ self =>
   override def stringPrefix = "ParSet"
 }
 
-
-
 object ParSet extends ParSetFactory[ParSet] {
   def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T]
 
   implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T]
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index c80b5de..4e9a2e5 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -6,23 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
 
 import scala.collection.SetLike
 import scala.collection.GenSetLike
 import scala.collection.GenSet
 import scala.collection.Set
-import scala.collection.mutable.Builder
-
-
-
-
-
-
-
 
 /** A template trait for parallel sets. This trait is mixed in with concrete
  *  parallel sets to override the representation type.
@@ -52,26 +42,4 @@ extends GenSetLike[T, Repr]
   def diff(that: GenSet[T]): Repr = sequentially {
     _ diff that
   }
-
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
index 42563f4..4b22934 100644
--- a/src/library/scala/collection/parallel/PreciseSplitter.scala
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -6,12 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
-
+package scala
+package collection.parallel
 
 import scala.collection.Seq
 
-
 /** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
  *  that traverse disjoint subsets of arbitrary sizes.
  *
@@ -55,10 +54,4 @@ trait PreciseSplitter[+T] extends Splitter[T] {
   def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
 
   def split: Seq[PreciseSplitter[T]]
-
 }
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 3150b0d..5f2ceac 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -6,10 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
 
 import scala.collection.Parallel
 import scala.collection.generic.Signalling
@@ -21,8 +19,6 @@ import scala.collection.Iterator.empty
 import scala.collection.GenTraversableOnce
 import scala.collection.parallel.immutable.repetition
 
-
-
 private[collection] trait RemainsIterator[+T] extends Iterator[T] {
   /** The number of elements this iterator has yet to iterate.
    *  This method doesn't change the state of the iterator.
@@ -35,7 +31,6 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
   def isRemainingCheap = true
 }
 
-
 /** Augments iterators with additional methods, mostly transformers,
  *  assuming they iterate an iterable collection.
  *
@@ -47,47 +42,47 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
 
   override def count(p: T => Boolean): Int = {
     var i = 0
-    while (hasNext) if (p(next)) i += 1
+    while (hasNext) if (p(next())) i += 1
     i
   }
 
   override def reduce[U >: T](op: (U, U) => U): U = {
-    var r: U = next
-    while (hasNext) r = op(r, next)
+    var r: U = next()
+    while (hasNext) r = op(r, next())
     r
   }
 
   override def fold[U >: T](z: U)(op: (U, U) => U): U = {
     var r = z
-    while (hasNext) r = op(r, next)
+    while (hasNext) r = op(r, next())
     r
   }
 
   override def sum[U >: T](implicit num: Numeric[U]): U = {
     var r: U = num.zero
-    while (hasNext) r = num.plus(r, next)
+    while (hasNext) r = num.plus(r, next())
     r
   }
 
   override def product[U >: T](implicit num: Numeric[U]): U = {
     var r: U = num.one
-    while (hasNext) r = num.times(r, next)
+    while (hasNext) r = num.times(r, next())
     r
   }
 
   override def min[U >: T](implicit ord: Ordering[U]): T = {
-    var r = next
+    var r = next()
     while (hasNext) {
-      val curr = next
+      val curr = next()
       if (ord.lteq(curr, r)) r = curr
     }
     r
   }
 
   override def max[U >: T](implicit ord: Ordering[U]): T = {
-    var r = next
+    var r = next()
     while (hasNext) {
-      val curr = next
+      val curr = next()
       if (ord.gteq(curr, r)) r = curr
     }
     r
@@ -97,16 +92,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
     var i = from
     val until = from + len
     while (i < until && hasNext) {
-      array(i) = next
+      array(i) = next()
       i += 1
     }
   }
 
   def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = {
     var i = howmany - 1
-    var u: U = next
+    var u: U = next()
     while (i > 0 && hasNext) {
-      u = op(u, next)
+      u = op(u, next())
       i -= 1
     }
     u
@@ -117,15 +112,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
     //val cb = pbf(repr)
     if (isRemainingCheap) cb.sizeHint(remaining)
-    while (hasNext) cb += f(next)
+    while (hasNext) cb += f(next())
     cb
   }
 
   def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = {
     //val cb = pbf(repr)
+    val runWith = pf.runWith(cb += _)
     while (hasNext) {
-      val curr = next
-      if (pf.isDefinedAt(curr)) cb += pf(curr)
+      val curr = next()
+      runWith(curr)
     }
     cb
   }
@@ -133,7 +129,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
     //val cb = pbf(repr)
     while (hasNext) {
-      val traversable = f(next).seq
+      val traversable = f(next()).seq
       if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
       else cb ++= traversable
     }
@@ -148,7 +144,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
 
   def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
     while (hasNext) {
-      val curr = next
+      val curr = next()
       if (pred(curr)) cb += curr
     }
     cb
@@ -156,7 +152,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
 
   def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
     while (hasNext) {
-      val curr = next
+      val curr = next()
       if (!pred(curr)) cb += curr
     }
     cb
@@ -164,7 +160,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
 
   def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
     while (hasNext) {
-      val curr = next
+      val curr = next()
       if (pred(curr)) btrue += curr
       else bfalse += curr
     }
@@ -214,7 +210,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = {
     var loop = true
     while (hasNext && loop) {
-      val curr = next
+      val curr = next()
       if (p(curr)) cb += curr
       else loop = false
     }
@@ -224,7 +220,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = {
     var isBefore = true
     while (hasNext && isBefore) {
-      val curr = next
+      val curr = next()
       if (p(curr)) before += curr
       else {
         if (isRemainingCheap) after.sizeHint(remaining + 1)
@@ -240,7 +236,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
     var last = z
     var i = from
     while (hasNext) {
-      last = op(last, next)
+      last = op(last, next())
       array(i) = last
       i += 1
     }
@@ -249,7 +245,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = {
     var curr = startValue
     while (hasNext) {
-      curr = op(curr, next)
+      curr = op(curr, next())
       cb += curr
     }
     cb
@@ -259,7 +255,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
     var curr = startValue
     var left = howmany
     while (left > 0) {
-      curr = op(curr, next)
+      curr = op(curr, next())
       cb += curr
       left -= 1
     }
@@ -269,16 +265,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
   def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
     if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
     while (hasNext && otherpit.hasNext) {
-      cb += ((next, otherpit.next))
+      cb += ((next(), otherpit.next()))
     }
     cb
   }
 
   def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
     if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
-    while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
-    while (this.hasNext) cb += ((this.next, thatelem))
-    while (that.hasNext) cb += ((thiselem, that.next))
+    while (this.hasNext && that.hasNext) cb += ((this.next(), that.next()))
+    while (this.hasNext) cb += ((this.next(), thatelem))
+    while (that.hasNext) cb += ((thiselem, that.next()))
     cb
   }
 
@@ -298,7 +294,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
     var total = 0
     var loop = true
     while (hasNext && loop) {
-      if (pred(next)) total += 1
+      if (pred(next())) total += 1
       else loop = false
     }
     total
@@ -308,7 +304,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
     var i = 0
     var loop = true
     while (hasNext && loop) {
-      if (pred(next)) loop = false
+      if (pred(next())) loop = false
       else i += 1
     }
     if (loop) -1 else i
@@ -318,7 +314,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
     var pos = -1
     var i = 0
     while (hasNext) {
-      if (pred(next)) pos = i
+      if (pred(next())) pos = i
       i += 1
     }
     pos
@@ -326,7 +322,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
 
   def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = {
     while (hasNext && that.hasNext) {
-      if (!corr(next, that.next)) return false
+      if (!corr(next(), that.next())) return false
     }
     hasNext == that.hasNext
   }
@@ -348,7 +344,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
     //val cb = cbf(repr)
     if (isRemainingCheap) cb.sizeHint(remaining)
     var lst = List[S]()
-    while (hasNext) lst ::= f(next)
+    while (hasNext) lst ::= f(next())
     while (lst != Nil) {
       cb += lst.head
       lst = lst.tail
@@ -363,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
     while (hasNext) {
       if (j == index) {
         cb += elem
-        next
+        next()
       } else cb += next
       j += 1
     }
@@ -438,7 +434,7 @@ self =>
   class Taken(taken: Int) extends IterableSplitter[T] {
     var remaining = taken min self.remaining
     def hasNext = remaining > 0
-    def next = { remaining -= 1; self.next }
+    def next = { remaining -= 1; self.next() }
     def dup: IterableSplitter[T] = self.dup.take(taken)
     def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
     protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
@@ -466,7 +462,7 @@ self =>
   class Mapped[S](f: T => S) extends IterableSplitter[S] {
     signalDelegate = self.signalDelegate
     def hasNext = self.hasNext
-    def next = f(self.next)
+    def next = f(self.next())
     def remaining = self.remaining
     def dup: IterableSplitter[S] = self.dup map f
     def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
@@ -483,8 +479,8 @@ self =>
     } else false
     def next = if (curr eq self) {
       hasNext
-      curr.next
-    } else curr.next
+      curr.next()
+    } else curr.next()
     def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining
     protected def firstNonEmpty = (curr eq self) && curr.hasNext
     def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that)
@@ -496,7 +492,7 @@ self =>
   class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
     signalDelegate = self.signalDelegate
     def hasNext = self.hasNext && that.hasNext
-    def next = (self.next, that.next)
+    def next = (self.next(), that.next())
     def remaining = self.remaining min that.remaining
     def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that)
     def split: Seq[IterableSplitter[(T, S)]] = {
@@ -514,9 +510,10 @@ self =>
     signalDelegate = self.signalDelegate
     def hasNext = self.hasNext || that.hasNext
     def next = if (self.hasNext) {
-      if (that.hasNext) (self.next, that.next)
-      else (self.next, thatelem)
-    } else (thiselem, that.next);
+      if (that.hasNext) (self.next(), that.next())
+      else (self.next(), thatelem)
+    } else (thiselem, that.next())
+
     def remaining = self.remaining max that.remaining
     def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
     def split: Seq[IterableSplitter[(U, S)]] = {
@@ -530,10 +527,8 @@ self =>
   }
 
   def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
-
 }
 
-
 /** Parallel sequence iterators allow splitting into arbitrary subsets.
  *
  *  @tparam T          type of the elements iterated.
@@ -605,7 +600,7 @@ self =>
         } else Seq(sz)
       }
       val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem)
-      val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 });
+      val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 })
 
       // split iterators
       val selfs = self.psplit(selfsizes: _*)
@@ -674,37 +669,3 @@ self =>
   def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced)
 
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index dc49bcf..8329f15 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 import scala.collection.{ Seq, Iterator }
 
@@ -52,7 +53,7 @@ trait Splitter[+T] extends Iterator[T] {
 object Splitter {
   def empty[T]: Splitter[T] = new Splitter[T] {
     def hasNext = false
-    def next = Iterator.empty.next
+    def next = Iterator.empty.next()
     def split = Seq(this)
   }
 }
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 9bed5be..9064018 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -6,39 +6,32 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
 
 import java.util.concurrent.ThreadPoolExecutor
 import scala.concurrent.forkjoin.ForkJoinPool
 import scala.concurrent.ExecutionContext
 
-
-
-/** A trait implementing the scheduling of
- *  a parallel collection operation.
+/** A trait implementing the scheduling of a parallel collection operation.
  *
  *  Parallel collections are modular in the way operations are scheduled. Each
  *  parallel collection is parametrized with a task support object which is
  *  responsible for scheduling and load-balancing tasks to processors.
- *  
+ *
  *  A task support object can be changed in a parallel collection after it has
  *  been created, but only during a quiescent period, i.e. while there are no
  *  concurrent invocations to parallel collection methods.
  *
  *  There are currently a few task support implementations available for
  *  parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]]
- *  uses a fork-join pool
- *  internally and is used by default on JVM 1.6 or greater. The less efficient
- *  [[scala.collection.parallel.ThreadPoolTaskSupport]] is a fallback for JVM
- *  1.5 and JVMs that do not support the fork join pools. The
- *  [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
+ *  uses a fork-join pool internally.
+ *
+ *  The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
  *  default execution context implementation found in scala.concurrent, and it
- *  reuses the thread pool used in scala.concurrent (this is either a fork join
- *  pool or a thread pool executor, depending on the JVM version). The
- *  execution context task support is set to each parallel collection by
+ *  reuses the thread pool used in scala.concurrent.
+ *
+ *  The execution context task support is set to each parallel collection by
  *  default, so parallel collections reuse the same fork-join pool as the
  *  future API.
  *
@@ -56,7 +49,6 @@ import scala.concurrent.ExecutionContext
  */
 trait TaskSupport extends Tasks
 
-
 /** A task support that uses a fork join pool to schedule tasks.
  *
  *  @see [[scala.collection.parallel.TaskSupport]] for more information.
@@ -68,17 +60,17 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
  *
  *  @see [[scala.collection.parallel.TaskSupport]] for more information.
  */
+ at deprecated("Use `ForkJoinTaskSupport` instead.", "2.11.0")
 class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
 extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
 
-
 /** A task support that uses an execution context to schedule tasks.
- *  
+ *
  *  It can be used with the default execution context implementation in the
  *  `scala.concurrent` package. It internally forwards the call to either a
  *  forkjoin based task support or a thread pool executor one, depending on
  *  what the execution context uses.
- *  
+ *
  *  By default, parallel collections are parametrized with this task support
  *  object, so parallel collections share the same execution context backend
  *  as the rest of the `scala.concurrent` package.
@@ -87,22 +79,3 @@ extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
  */
 class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global)
 extends TaskSupport with ExecutionContextTasks
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index cec9e29..fcf0dff 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -6,20 +6,15 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
 
 import java.util.concurrent.ThreadPoolExecutor
-
 import scala.concurrent.forkjoin._
 import scala.concurrent.ExecutionContext
 import scala.util.control.Breaks._
-
 import scala.annotation.unchecked.uncheckedVariance
 
-
-
 trait Task[R, +Tp] {
   type Result = R
 
@@ -54,38 +49,30 @@ trait Task[R, +Tp] {
         leaf(lastres)
         result = result // ensure that effects of `leaf` are visible to readers of `result`
       } catchBreak {
-        signalAbort
+        signalAbort()
       }
     } catch {
-      case thr: Exception =>
+      case thr: Throwable =>
         result = result // ensure that effects of `leaf` are visible
       throwable = thr
-      signalAbort
+      signalAbort()
     }
   }
 
   private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
     val that = t.asInstanceOf[Task[R, Tp]]
-    val local = result // ensure that any effects of modifying `result` are detected
-    // checkMerge(that)
     if (this.throwable == null && that.throwable == null) merge(t)
     mergeThrowables(that)
   }
 
-  private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
-    if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
-      println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
-    } else if (this.throwable != null || that.throwable != null) {
-      println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
-    }
-  }
-
   private[parallel] def mergeThrowables(that: Task[_, _]) {
-    if (this.throwable != null && that.throwable != null) {
-      // merge exceptions, since there were multiple exceptions
-      this.throwable = this.throwable alongWith that.throwable
-    } else if (that.throwable != null) this.throwable = that.throwable
-      else this.throwable = this.throwable
+    // TODO: As soon as we target Java >= 7, use Throwable#addSuppressed
+    // to pass additional Throwables to the caller, e. g.
+    // if (this.throwable != null && that.throwable != null)
+    //   this.throwable.addSuppressed(that.throwable)
+    // For now, we just use whatever Throwable comes across “first”.
+    if (this.throwable == null && that.throwable != null)
+      this.throwable = that.throwable
   }
 
   // override in concrete task implementations to signal abort to other tasks
@@ -176,7 +163,6 @@ trait AdaptiveWorkStealingTasks extends Tasks {
 
       while (last.next != null) {
         // val lastresult = Option(last.body.result)
-        val beforelast = last
         last = last.next
         if (last.tryCancel()) {
           // println("Done with " + beforelast.body + ", next direct is " + last.body)
@@ -202,7 +188,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
           last = t
           t.start()
         }
-      } while (head.body.shouldSplitFurther);
+      } while (head.body.shouldSplitFurther)
       head.next = last
       head
     }
@@ -225,6 +211,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
 
 
 /** An implementation of tasks objects based on the Java thread pooling API. */
+ at deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
 trait ThreadPoolTasks extends Tasks {
   import java.util.concurrent._
 
@@ -313,7 +300,7 @@ trait ThreadPoolTasks extends Tasks {
 
     () => {
       t.sync()
-      t.body.forwardThrowable
+      t.body.forwardThrowable()
       t.body.result
     }
   }
@@ -325,7 +312,7 @@ trait ThreadPoolTasks extends Tasks {
     t.start()
 
     t.sync()
-    t.body.forwardThrowable
+    t.body.forwardThrowable()
     t.body.result
   }
 
@@ -333,6 +320,7 @@ trait ThreadPoolTasks extends Tasks {
 
 }
 
+ at deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
 object ThreadPoolTasks {
   import java.util.concurrent._
 
@@ -357,60 +345,6 @@ object ThreadPoolTasks {
   )
 }
 
-
-/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
- at deprecated("This implementation is not used.", "2.10.0")
-trait FutureThreadPoolTasks extends Tasks {
-  import java.util.concurrent._
-
-  trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
-    @volatile var future: Future[_] = null
-
-    def start() = {
-      executor.synchronized {
-        future = executor.submit(this)
-      }
-    }
-    def sync() = future.get
-    def tryCancel = false
-    def run = {
-      compute()
-    }
-  }
-
-  protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
-
-  val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
-  def executor = environment.asInstanceOf[ThreadPoolExecutor]
-
-  def execute[R, Tp](task: Task[R, Tp]): () => R = {
-    val t = newWrappedTask(task)
-
-    // debuglog("-----------> Executing without wait: " + task)
-    t.start
-
-    () => {
-      t.sync
-      t.body.forwardThrowable
-      t.body.result
-    }
-  }
-
-  def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
-    val t = newWrappedTask(task)
-
-    // debuglog("-----------> Executing with wait: " + task)
-    t.start
-
-    t.sync
-    t.body.forwardThrowable
-    t.body.result
-  }
-
-  def parallelismLevel = FutureThreadPoolTasks.numCores
-
-}
-
 object FutureThreadPoolTasks {
   import java.util.concurrent._
 
@@ -467,8 +401,8 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
     }
 
     () => {
-      fjtask.sync
-      fjtask.body.forwardThrowable
+      fjtask.sync()
+      fjtask.body.forwardThrowable()
       fjtask.body.result
     }
   }
@@ -489,24 +423,19 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
       forkJoinPool.execute(fjtask)
     }
 
-    fjtask.sync
+    fjtask.sync()
     // if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body)
-    fjtask.body.forwardThrowable
+    fjtask.body.forwardThrowable()
     fjtask.body.result
   }
 
   def parallelismLevel = forkJoinPool.getParallelism
-
 }
 
-
 object ForkJoinTasks {
-  val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() // scala.parallel.forkjoinpool
-  // defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors)
-  // defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors)
+  lazy val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool()
 }
 
-
 /* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them.
  */
 trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks {
@@ -517,10 +446,9 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
   }
 
   def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
-
 }
 
-
+ at deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0")
 trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks {
 
   class WrappedTask[R, Tp](val body: Task[R, Tp])
@@ -529,24 +457,100 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
   }
 
   def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
-
 }
 
+/** An implementation of the `Tasks` that uses Scala `Future`s to compute
+ *  the work encapsulated in each task.
+ */
+private[parallel] final class FutureTasks(executor: ExecutionContext) extends Tasks {
+  import scala.concurrent._
+  import scala.util._
 
-trait ExecutionContextTasks extends Tasks {
+  private val maxdepth = (math.log(parallelismLevel) / math.log(2) + 1).toInt
+
+  val environment: ExecutionContext = executor
+
+  /** Divides this task into a lot of small tasks and executes them asynchronously
+   *  using futures.
+   *  Folds the futures and merges them asynchronously.
+   */
+  private def exec[R, Tp](topLevelTask: Task[R, Tp]): Future[R] = {
+    implicit val ec = environment
+
+    /** Constructs a tree of futures where tasks can be reasonably split.
+     */
+    def compute(task: Task[R, Tp], depth: Int): Future[Task[R, Tp]] = {
+      if (task.shouldSplitFurther && depth < maxdepth) {
+        val subtasks = task.split
+        val subfutures = for (subtask <- subtasks.iterator) yield compute(subtask, depth + 1)
+        subfutures.reduceLeft { (firstFuture, nextFuture) =>
+          for {
+            firstTask <- firstFuture
+            nextTask <- nextFuture
+          } yield {
+            firstTask tryMerge nextTask.repr
+            firstTask
+          }
+        } andThen {
+          case Success(firstTask) =>
+            task.throwable = firstTask.throwable
+            task.result = firstTask.result
+          case Failure(exception) =>
+            task.throwable = exception
+        }
+      } else Future {
+        task.tryLeaf(None)
+        task
+      }
+    }
+
+    compute(topLevelTask, 0) map { t =>
+      t.forwardThrowable()
+      t.result
+    }
+  }
+
+  def execute[R, Tp](task: Task[R, Tp]): () => R = {
+    val future = exec(task)
+    val callback = () => {
+      Await.result(future, scala.concurrent.duration.Duration.Inf)
+    }
+    callback
+  }
+
+  def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
+    execute(task)()
+  }
+
+  def parallelismLevel = Runtime.getRuntime.availableProcessors
+}
 
+/** This tasks implementation uses execution contexts to spawn a parallel computation.
+ *  
+ *  As an optimization, it internally checks whether the execution context is the
+ *  standard implementation based on fork/join pools, and if it is, creates a
+ *  `ForkJoinTaskSupport` that shares the same pool to forward its request to it.
+ *
+ *  Otherwise, it uses an execution context exclusive `Tasks` implementation to
+ *  divide the tasks into smaller chunks and execute operations on it.
+ */
+trait ExecutionContextTasks extends Tasks {
   def executionContext = environment
 
   val environment: ExecutionContext
 
-  // this part is a hack which allows switching
-  val driver: Tasks = executionContext match {
+  /** A driver serves as a target for this proxy `Tasks` object.
+   *  
+   *  If the execution context has the standard implementation and uses fork/join pools,
+   *  the driver is `ForkJoinTaskSupport` with the same pool, as an optimization.
+   *  Otherwise, the driver will be a Scala `Future`-based implementation.
+   */
+  private val driver: Tasks = executionContext match {
     case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
       case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp)
-      case tpe: ThreadPoolExecutor => new ThreadPoolTaskSupport(tpe)
-      case _ => ???
+      case _ => new FutureTasks(environment)
     }
-    case _ => ???
+    case _ => new FutureTasks(environment)
   }
 
   def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
@@ -554,16 +558,4 @@ trait ExecutionContextTasks extends Tasks {
   def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
 
   def parallelismLevel = driver.parallelismLevel
-
 }
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index b25230b..06455ba 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.immutable
-
-
+package scala
+package collection.parallel.immutable
 
 import scala.collection.parallel.ParMapLike
 import scala.collection.parallel.Combiner
@@ -23,8 +22,6 @@ import scala.collection.immutable.{ HashMap, TrieIterator }
 import scala.annotation.unchecked.uncheckedVariance
 import scala.collection.parallel.Task
 
-
-
 /** Immutable parallel hash map, based on hash tries.
  *
  *  $paralleliterableinfo
@@ -109,7 +106,7 @@ self =>
     }
     def next(): (K, V) = {
       i += 1
-      val r = triter.next
+      val r = triter.next()
       r
     }
     def hasNext: Boolean = {
@@ -135,10 +132,8 @@ self =>
         println("other kind of node")
     }
   }
-
 }
 
-
 /** $factoryInfo
  *  @define Coll `immutable.ParHashMap`
  *  @define coll immutable parallel hash map
@@ -157,7 +152,6 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
   var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0)
 }
 
-
 private[parallel] abstract class HashMapCombiner[K, V]
 extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
 //self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
@@ -330,30 +324,11 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
     }
     def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
   }
-
 }
 
-
 private[parallel] object HashMapCombiner {
   def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
 
   private[immutable] val rootbits = 5
   private[immutable] val rootsize = 1 << 5
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e7e64eb..65a6324 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
 
 
 
@@ -106,7 +107,7 @@ self =>
     }
     def next(): T = {
       i += 1
-      triter.next
+      triter.next()
     }
     def hasNext: Boolean = {
       i < sz
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 142f07f..417622f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -6,17 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package parallel.immutable
 
-
 import scala.collection.generic._
-
 import scala.collection.parallel.ParIterableLike
 import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
 
 /** A template trait for immutable parallel iterable collections.
  *
@@ -30,22 +26,18 @@ import scala.collection.GenIterable
  *  @since 2.9
  */
 trait ParIterable[+T]
-extends scala.collection/*.immutable*/.GenIterable[T]
+extends scala.collection.GenIterable[T]
    with scala.collection.parallel.ParIterable[T]
    with GenericParTemplate[T, ParIterable]
    with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
    with Immutable
 {
   override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
-
   // if `immutable.ParIterableLike` is introduced, please move these 4 methods there
   override def toIterable: ParIterable[T] = this
-
   override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
-
 }
 
-
 /** $factoryInfo
  */
 object ParIterable extends ParFactory[ParIterable] {
@@ -53,21 +45,5 @@ object ParIterable extends ParFactory[ParIterable] {
     new GenericCanCombineFrom[T]
 
   def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T]
-
   def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T]
-
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index e904a76..2956c2a 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package parallel.immutable
 
 import scala.collection.generic.ParMapFactory
@@ -62,7 +63,7 @@ self =>
    *
    *  Invoking transformer methods (e.g. `map`) will not preserve the default value.
    *
-   *  @param d     the function mapping keys to values, used for non-present keys
+   *  @param d     default value used for non-present keys
    *  @return      a wrapper of the map with a default value
    */
   def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
deleted file mode 100644
index 5f9c9c3..0000000
--- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
+++ /dev/null
@@ -1,128 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.collection.parallel.immutable
-
-
-
-import scala.collection.immutable.NumericRange
-import scala.collection.parallel.Combiner
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.ParIterableIterator
-
-
-
-/** Parallel ranges for numeric types.
- *  
- *  $paralleliterableinfo
- *  
- *  $sideeffects
- *  
- *  @param range    the sequential range this parallel range was obtained from
- *  
- *  @author Aleksandar Prokopec
- *  @since 2.9
- *  
- *  @define Coll `immutable.ParRange`
- *  @define coll immutable parallel range
- */
- at SerialVersionUID(1L)
-class ParNumericRange[T](val range: NumericRange[T])(implicit num: Integral[T])
-extends ParSeq[T]
-   with Serializable
-{
-self =>
-  
-  def seq = range
-  
-  @inline final def length = range.length
-  
-  @inline final def apply(idx: Int) = range.apply(idx);
-  
-  def parallelIterator = new ParNumericRangeIterator with SCPI
-  
-  type SCPI = SignalContextPassingIterator[ParNumericRangeIterator]
-  
-  class ParNumericRangeIterator(range: NumericRange[T] = self.range, num: Integral[T] = self.num)
-  extends ParIterator {
-  me: SignalContextPassingIterator[ParNumericRangeIterator] =>
-    override def toString = "ParNumericRangeIterator(over: " + range + ")"
-    private var ind = 0
-    private val len = range.length
-    
-    final def remaining = len - ind
-    
-    final def hasNext = ind < len
-    
-    final def next = if (hasNext) {
-      val r = range.apply(ind)
-      ind += 1
-      r
-    } else Iterator.empty.next
-    
-    private def rangeleft: NumericRange[T] = range.drop(ind)
-    
-    def dup = new ParNumericRangeIterator(rangeleft) with SCPI
-    
-    def split = {
-      val rleft = rangeleft
-      val elemleft = rleft.length
-      if (elemleft < 2) Seq(new ParNumericRangeIterator(rleft) with SCPI)
-      else Seq(
-        new ParNumericRangeIterator(rleft.take(elemleft / 2)) with SCPI,
-        new ParNumericRangeIterator(rleft.drop(elemleft / 2)) with SCPI
-      )
-    }
-    
-    def psplit(sizes: Int*) = {
-      var rleft = rangeleft
-      for (sz <- sizes) yield {
-        val fronttaken = rleft.take(sz)
-        rleft = rleft.drop(sz)
-        new ParNumericRangeIterator(fronttaken) with SCPI
-      }
-    }
-    
-    /* accessors */
-    
-    override def foreach[U](f: T => U): Unit = {
-      rangeleft.foreach(f)
-      ind = len
-    }
-    
-    override def reduce[U >: T](op: (U, U) => U): U = {
-      val r = rangeleft.reduceLeft(op)
-      ind = len
-      r
-    }
-    
-    /* transformers */
-    
-    override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
-      while (hasNext) {
-        cb += f(next)
-      }
-      cb
-    }
-  }
-  
-}
-
-
-object ParNumericRange {
-  def apply[T](start: T, end: T, step: T, inclusive: Boolean)(implicit num: Integral[T]) = new ParNumericRange[T](
-    if (inclusive) NumericRange.inclusive(start, end, step)(num)
-    else NumericRange.apply(start, end, step)(num)
-  )
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 0c9f82b..ec90de3 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -6,13 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
 
 import scala.collection.immutable.Range
 import scala.collection.parallel.Combiner
 import scala.collection.parallel.SeqSplitter
 import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.IterableSplitter
 import scala.collection.Iterator
 
 /** Parallel ranges.
@@ -42,7 +42,7 @@ self =>
 
   @inline final def length = range.length
 
-  @inline final def apply(idx: Int) = range.apply(idx);
+  @inline final def apply(idx: Int) = range.apply(idx)
 
   def splitter = new ParRangeIterator
 
@@ -60,7 +60,7 @@ self =>
       val r = range.apply(ind)
       ind += 1
       r
-    } else Iterator.empty.next
+    } else Iterator.empty.next()
 
     private def rangeleft = range.drop(ind)
 
@@ -107,7 +107,7 @@ self =>
       cb
     }
   }
-  
+
 }
 
 object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index aa19307..f0502fb 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -6,11 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package parallel.immutable
 
-
 import scala.collection.generic.GenericParTemplate
 import scala.collection.generic.GenericCompanion
 import scala.collection.generic.GenericParCompanion
@@ -18,9 +17,6 @@ import scala.collection.generic.CanCombineFrom
 import scala.collection.generic.ParFactory
 import scala.collection.parallel.ParSeqLike
 import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
 
 /** An immutable variant of `ParSeq`.
  *
@@ -38,7 +34,6 @@ extends scala.collection/*.immutable*/.GenSeq[T]
   override def toSeq: ParSeq[T] = this
 }
 
-
 /** $factoryInfo
  *  @define Coll `mutable.ParSeq`
  *  @define coll mutable parallel sequence
@@ -47,9 +42,5 @@ object ParSeq extends ParFactory[ParSeq] {
   implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
 
   def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
-
   def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
 }
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 3622377..7837d6f 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -6,10 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package parallel.immutable
 
-import scala.collection.GenSet
 import scala.collection.generic._
 import scala.collection.parallel.ParSetLike
 import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ee7f4a..c2c1d04 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -6,13 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package parallel.immutable
 
-
-
-
 import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory}
 import scala.collection.parallel.ParSeqLike
 import scala.collection.parallel.Combiner
@@ -22,8 +19,6 @@ import immutable.Vector
 import immutable.VectorBuilder
 import immutable.VectorIterator
 
-
-
 /** Immutable parallel vectors, based on vectors.
  *
  *  $paralleliterableinfo
@@ -82,11 +77,8 @@ extends ParSeq[T]
       splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
     }
   }
-
 }
 
-
-
 /** $factoryInfo
  *  @define Coll `immutable.ParVector`
  *  @define coll immutable parallel vector
@@ -100,8 +92,6 @@ object ParVector extends ParFactory[ParVector] {
   def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]]
 }
 
-
-
 private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] {
 //self: EnvironmentPassingCombiner[T, ParVector[T]] =>
   var sz = 0
@@ -135,11 +125,4 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[
     vectors ++= that.vectors
     this
   }
-
 }
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 5ca0724..8fd84ea 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 package immutable {
   /** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method.
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 12b2bc5..5ab2bb8 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
 
 import scala.collection.generic.Growable
 import scala.collection.generic.Sizing
@@ -29,6 +30,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
   def result: To = allocateAndCopy
   def clear() = { chain.clear() }
   def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
+    import language.existentials // FIXME: See SI-7750
     if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
       val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
       newLazyCombiner(chain ++= that.chain)
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 0a4f301..d0d022d 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -29,8 +29,6 @@ import scala.collection.mutable.Builder
 import scala.collection.GenTraversableOnce
 import scala.reflect.ClassTag
 
-
-
 /** Parallel sequence holding elements in a linear array.
  *
  *  `ParArray` is a parallel sequence with a predefined size. The size of the array
@@ -181,10 +179,10 @@ self =>
 
     override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
 
-    override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+    override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
 
     override def sum[U >: T](implicit num: Numeric[U]): U = {
-      var s = sum_quick(num, arr, until, i, num.zero)
+      val s = sum_quick(num, arr, until, i, num.zero)
       i = until
       s
     }
@@ -200,7 +198,7 @@ self =>
     }
 
     override def product[U >: T](implicit num: Numeric[U]): U = {
-        var p = product_quick(num, arr, until, i, num.one)
+        val p = product_quick(num, arr, until, i, num.one)
         i = until
         p
     }
@@ -226,7 +224,7 @@ self =>
         if (all) i = nextuntil
         else {
           i = until
-          abort
+          abort()
         }
 
         if (isAborted) return false
@@ -241,7 +239,7 @@ self =>
         if (p(a(j).asInstanceOf[T])) j += 1
         else return false
       }
-      return true
+      true
     }
 
     override def exists(p: T => Boolean): Boolean = {
@@ -254,7 +252,7 @@ self =>
         some = exists_quick(p, array, nextuntil, i)
         if (some) {
           i = until
-          abort
+          abort()
         } else i = nextuntil
 
         if (isAborted) return true
@@ -269,7 +267,7 @@ self =>
         if (p(a(j).asInstanceOf[T])) return true
         else j += 1
       }
-      return false
+      false
     }
 
     override def find(p: T => Boolean): Option[T] = {
@@ -283,7 +281,7 @@ self =>
 
         if (r != None) {
           i = until
-          abort
+          abort()
         } else i = nextuntil
 
         if (isAborted) return r
@@ -298,7 +296,7 @@ self =>
         if (p(elem)) return Some(elem)
         else j += 1
       }
-      return None
+      None
     }
 
     override def drop(n: Int): ParArrayIterator = {
@@ -405,9 +403,10 @@ self =>
 
     private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
       var j = from
+      val runWith = pf.runWith(b => cb += b)
       while (j < ntil) {
         val curr = a(j).asInstanceOf[T]
-        if (pf.isDefinedAt(curr)) cb += pf(curr)
+        runWith(curr)
         j += 1
       }
     }
@@ -432,7 +431,7 @@ self =>
     private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
       var j = i
       while(j < ntil) {
-        var curr = a(j).asInstanceOf[T]
+        val curr = a(j).asInstanceOf[T]
         if (pred(curr)) cb += curr
         j += 1
       }
@@ -447,7 +446,7 @@ self =>
     private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
       var j = i
       while(j < ntil) {
-        var curr = a(j).asInstanceOf[T]
+        val curr = a(j).asInstanceOf[T]
         if (!pred(curr)) cb += curr
         j += 1
       }
@@ -576,8 +575,6 @@ self =>
 
   /* operations */
 
-  private def asTask[R, Tp](t: Any) = t.asInstanceOf[Task[R, Tp]]
-
   private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]]
 
   override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) {
@@ -612,7 +609,8 @@ self =>
 
   class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any])
   extends Task[Unit, ScanToArray[U]] {
-    var result = ();
+    var result = ()
+
     def leaf(prev: Option[Unit]) = iterate(tree)
     private def iterate(tree: ScanTree[U]): Unit = tree match {
       case ScanNode(left, right) =>
@@ -648,7 +646,8 @@ self =>
   }
 
   class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] {
-    var result = ();
+    var result = ()
+
     def leaf(prev: Option[Unit]) = {
       val tarr = targetarr
       val sarr = array
@@ -701,7 +700,7 @@ object ParArray extends ParFactory[ParArray] {
 
   private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match {
     case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz))
-    case _ => new ParArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
+    case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz))
   }
 
   def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = {
@@ -719,27 +718,3 @@ object ParArray extends ParFactory[ParArray] {
   }
 
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index 8bc108a..62165ae 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package parallel.mutable
 
 import scala.collection.parallel.IterableSplitter
@@ -27,32 +28,28 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
   extends IterableSplitter[T] with SizeMapUtils {
     import scala.collection.DebugUtils._
 
-    private var traversed = 0
-    private val itertable = table
+    private[this] var traversed = 0
+    private[this] val itertable = table
 
     if (hasNext) scan()
 
-    private def scan() {
+    private[this] def scan() {
       while (itertable(idx) eq null) {
         idx += 1
       }
     }
 
-    private def checkbounds() = if (idx >= itertable.length) {
-      throw new IndexOutOfBoundsException(idx.toString)
-    }
-
     def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
 
     def remaining = totalsize - traversed
     def hasNext = traversed < totalsize
     def next() = if (hasNext) {
-      val r = itertable(idx).asInstanceOf[T]
+      val r = entryToElem(itertable(idx))
       traversed += 1
       idx += 1
       if (hasNext) scan()
       r
-    } else Iterator.empty.next
+    } else Iterator.empty.next()
     def dup = newIterator(idx, until, totalsize)
     def split = if (remaining > 1) {
       val divpt = (until + idx) / 2
@@ -102,11 +99,5 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
       }
       count
     }
-
-    private def check() = if (table.slice(idx, until).count(_ != null) != remaining) {
-      println("Invariant broken: " + debugInformation)
-      assert(false)
-    }
   }
-
 }
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 11588e5..bb3737f 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -6,12 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel
+package scala
+package collection.parallel
 package mutable
 
-
-
 import scala.collection.generic._
 import scala.collection.mutable.DefaultEntry
 import scala.collection.mutable.HashEntry
@@ -19,8 +17,6 @@ import scala.collection.mutable.HashTable
 import scala.collection.mutable.UnrolledBuffer
 import scala.collection.parallel.Task
 
-
-
 /** A parallel hash map.
  *
  *  `ParHashMap` is a parallel map which internally keeps elements within a hash table.
@@ -97,7 +93,8 @@ self =>
 
   class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
   extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
-    def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
+    def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value)
+
     def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
       new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
   }
@@ -143,10 +140,8 @@ self =>
       else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next)
     check(table(i))
   }
-
 }
 
-
 /** $factoryInfo
  *  @define Coll `mutable.ParHashMap`
  *  @define coll parallel hash map
@@ -161,14 +156,12 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
   implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V]
 }
 
-
 private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int)
 extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
    with scala.collection.mutable.HashTable.HashUtils[K]
 {
-  private var mask = ParHashMapCombiner.discriminantmask
-  private var nonmasklen = ParHashMapCombiner.nonmasklength
-  private var seedvalue = 27
+  private val nonmasklen = ParHashMapCombiner.nonmasklength
+  private val seedvalue = 27
 
   def +=(elem: (K, V)) = {
     sz += 1
@@ -232,8 +225,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
     def setSize(sz: Int) = tableSize = sz
     def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = {
       var h = index(elemHashCode(e.key))
-      // assertCorrectBlock(h, block)
-      var olde = table(h).asInstanceOf[DefaultEntry[K, V]]
+      val olde = table(h).asInstanceOf[DefaultEntry[K, V]]
 
       // check if key already exists
       var ce = olde
@@ -252,13 +244,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
         true
       } else false
     }
-    private def assertCorrectBlock(h: Int, block: Int) {
-      val blocksize = table.length / (1 << ParHashMapCombiner.discriminantbits)
-      if (!(h >= block * blocksize && h < (block + 1) * blocksize)) {
-        println("trying to put " + h + " into block no.: " + block + ", range: [" + block * blocksize + ", " + (block + 1) * blocksize + ">")
-        assert(h >= block * blocksize && h < (block + 1) * blocksize)
-      }
-    }
     protected def createNewEntry[X](key: K, x: X) = ???
   }
 
@@ -288,7 +273,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
         val chunksz = unrolled.size
         while (i < chunksz) {
           val elem = chunkarr(i)
-          // assertCorrectBlock(block, elem.key)
           if (t.insertEntry(elem)) insertcount += 1
           i += 1
         }
@@ -297,13 +281,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
       }
       insertcount
     }
-    private def assertCorrectBlock(block: Int, k: K) {
-      val hc = improve(elemHashCode(k), seedvalue)
-      if ((hc >>> nonmasklen) != block) {
-        println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
-        assert((hc >>> nonmasklen) == block)
-      }
-    }
     def split = {
       val fp = howmany / 2
       List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
@@ -313,29 +290,13 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
     }
     def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
   }
-
 }
 
-
 private[parallel] object ParHashMapCombiner {
   private[mutable] val discriminantbits = 5
   private[mutable] val numblocks = 1 << discriminantbits
-  private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+  private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
   private[mutable] val nonmasklength = 32 - discriminantbits
 
   def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 57fab57..1e3d57e 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
 
 
 
@@ -60,18 +61,18 @@ extends ParSet[T]
   override def seq = new scala.collection.mutable.HashSet(hashTableContents)
 
   def +=(elem: T) = {
-    addEntry(elem)
+    addElem(elem)
     this
   }
 
   def -=(elem: T) = {
-    removeEntry(elem)
+    removeElem(elem)
     this
   }
 
   override def stringPrefix = "ParHashSet"
 
-  def contains(elem: T) = containsEntry(elem)
+  def contains(elem: T) = containsElem(elem)
 
   def splitter = new ParHashSetIterator(0, table.length, size)
 
@@ -117,23 +118,23 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
 
 
 private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
 with scala.collection.mutable.FlatHashTable.HashUtils[T] {
 //self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
-  private var mask = ParHashSetCombiner.discriminantmask
-  private var nonmasklen = ParHashSetCombiner.nonmasklength
-  private var seedvalue = 27
+  private val nonmasklen = ParHashSetCombiner.nonmasklength
+  private val seedvalue = 27
 
   def +=(elem: T) = {
+    val entry = elemToEntry(elem)
     sz += 1
-    val hc = improve(elemHashCode(elem), seedvalue)
+    val hc = improve(entry.hashCode, seedvalue)
     val pos = hc >>> nonmasklen
     if (buckets(pos) eq null) {
       // initialize bucket
-      buckets(pos) = new UnrolledBuffer[Any]
+      buckets(pos) = new UnrolledBuffer[AnyRef]
     }
     // add to bucket
-    buckets(pos) += elem
+    buckets(pos) += entry
     this
   }
 
@@ -147,7 +148,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
     val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
     val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
     var leftinserts = 0
-    for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
+    for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry)
     table.setSize(leftinserts + inserted)
     table.hashTableContents
   }
@@ -159,10 +160,10 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
       sizeMapInit(table.length)
       seedvalue = ParHashSetCombiner.this.seedvalue
       for {
-        buffer <- buckets;
-        if buffer ne null;
-        elem <- buffer
-      } addEntry(elem.asInstanceOf[T])
+        buffer <- buckets
+        if buffer ne null
+        entry <- buffer
+      } addEntry(entry)
     }
     tbl.hashTableContents
   }
@@ -189,12 +190,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
     def setSize(sz: Int) = tableSize = sz
 
     /**
-     *  The elements are added using the `insertEntry` method. This method accepts three
+     *  The elements are added using the `insertElem` method. This method accepts three
      *  arguments:
      *
      *  @param insertAt      where to add the element (set to -1 to use its hashcode)
      *  @param comesBefore   the position before which the element should be added to
-     *  @param elem          the element to be added
+     *  @param newEntry      the element to be added
      *
      *  If the element is to be inserted at the position corresponding to its hash code,
      *  the table will try to add the element in such a position if possible. Collisions are resolved
@@ -206,17 +207,17 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
      *  If the element is already present in the hash table, it is not added, and this method
      *  returns 0. If the element is added, it returns 1.
      */
-    def insertEntry(insertAt: Int, comesBefore: Int, elem: T): Int = {
+    def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = {
       var h = insertAt
-      if (h == -1) h = index(elemHashCode(elem))
-      var entry = table(h)
-      while (null != entry) {
-        if (entry == elem) return 0
+      if (h == -1) h = index(newEntry.hashCode)
+      var curEntry = table(h)
+      while (null != curEntry) {
+        if (curEntry == newEntry) return 0
         h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!!
         if (h >= comesBefore) return -1
-        entry = table(h)
+        curEntry = table(h)
       }
-      table(h) = elem.asInstanceOf[AnyRef]
+      table(h) = newEntry
 
       // this is incorrect since we set size afterwards anyway and a counter
       // like this would not even work:
@@ -233,13 +234,14 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
 
   /* tasks */
 
-  class FillBlocks(buckets: Array[UnrolledBuffer[Any]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
-  extends Task[(Int, UnrolledBuffer[Any]), FillBlocks] {
-    var result = (Int.MinValue, new UnrolledBuffer[Any]);
-    def leaf(prev: Option[(Int, UnrolledBuffer[Any])]) {
+  class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
+  extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] {
+    var result = (Int.MinValue, new UnrolledBuffer[AnyRef])
+
+    def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) {
       var i = offset
       var totalinserts = 0
-      var leftover = new UnrolledBuffer[Any]()
+      var leftover = new UnrolledBuffer[AnyRef]()
       while (i < (offset + howmany)) {
         val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover)
         totalinserts += inserted
@@ -251,11 +253,11 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
     private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits
     private def blockStart(block: Int) = block * blocksize
     private def nextBlockStart(block: Int) = (block + 1) * blocksize
-    private def fillBlock(block: Int, elems: UnrolledBuffer[Any], leftovers: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
+    private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
       val beforePos = nextBlockStart(block)
 
       // store the elems
-      val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[Any]())
+      val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]())
 
       // store the leftovers
       val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers)
@@ -263,21 +265,21 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
       // return the no. of stored elements tupled with leftovers
       (elemsIn + leftoversIn, elemsLeft concat leftoversLeft)
     }
-    private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
-      var leftovers = new UnrolledBuffer[Any]
+    private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
+      val leftovers = new UnrolledBuffer[AnyRef]
       var inserted = 0
 
       var unrolled = elems.headPtr
       var i = 0
-      var t = table
+      val t = table
       while (unrolled ne null) {
         val chunkarr = unrolled.array
         val chunksz = unrolled.size
         while (i < chunksz) {
-          val elem = chunkarr(i)
-          val res = t.insertEntry(atPos, beforePos, elem.asInstanceOf[T])
+          val entry = chunkarr(i)
+          val res = t.insertEntry(atPos, beforePos, entry)
           if (res >= 0) inserted += res
-          else leftovers += elem
+          else leftovers += entry
           i += 1
         }
         i = 0
@@ -319,7 +321,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
 private[parallel] object ParHashSetCombiner {
   private[mutable] val discriminantbits = 5
   private[mutable] val numblocks = 1 << discriminantbits
-  private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+  private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
   private[mutable] val nonmasklength = 32 - discriminantbits
 
   def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 66ddef6..423b891 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -6,18 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package parallel.mutable
 
-
-
-
 import scala.collection.mutable.HashEntry
 import scala.collection.parallel.IterableSplitter
 
-
-
 /** Provides functionality for hash tables with linked list buckets,
  *  enriching the data structure by fulfilling certain requirements
  *  for their parallel construction and iteration.
@@ -110,7 +105,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
     } else Seq(this.asInstanceOf[IterRepr])
 
     private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = {
-      var buff = mutable.ArrayBuffer[Entry]()
+      val buff = mutable.ArrayBuffer[Entry]()
       var curr = chainhead
       while (curr ne null) {
         buff += curr
@@ -145,11 +140,4 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
       c
     }
   }
-
 }
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 7090c51..4659149 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -6,14 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
-
+package scala
+package collection
+package parallel.mutable
 
 import scala.collection.generic._
-import scala.collection.parallel.ParIterableLike
-import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
+import scala.collection.parallel.{ ParIterableLike, Combiner }
 
 /** A template trait for mutable parallel iterable collections.
  *
@@ -26,7 +24,7 @@ import scala.collection.GenIterable
  *  @author Aleksandar Prokopec
  *  @since 2.9
  */
-trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+trait ParIterable[T] extends scala.collection.GenIterable[T]
                         with scala.collection.parallel.ParIterable[T]
                         with GenericParTemplate[T, ParIterable]
                         with ParIterableLike[T, ParIterable[T], Iterable[T]]
@@ -45,24 +43,8 @@ trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
 /** $factoryInfo
  */
 object ParIterable extends ParFactory[ParIterable] {
-  implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] =
-    new GenericCanCombineFrom[T]
+  implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T]
 
   def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
-
   def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 2250a38..8110f9d 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -6,17 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel.mutable
-
-
-
+package scala
+package collection
+package parallel.mutable
 
 import scala.collection.generic._
 import scala.collection.parallel.Combiner
 
-
-
 /** A template trait for mutable parallel maps.
  *
  *  $sideeffects
@@ -28,11 +24,11 @@ import scala.collection.parallel.Combiner
  *  @since 2.9
  */
 trait ParMap[K, V]
-extends scala.collection/*.mutable*/.GenMap[K, V]
-   with scala.collection.parallel.ParMap[K, V]
-   with /* mutable */ ParIterable[(K, V)]
+extends GenMap[K, V]
+   with parallel.ParMap[K, V]
+   with ParIterable[(K, V)]
    with GenericParMapTemplate[K, V, ParMap]
-   with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
+   with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]]
 {
 
   protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -59,15 +55,12 @@ extends scala.collection/*.mutable*/.GenMap[K, V]
    *
    *  Invoking transformer methods (e.g. `map`) will not preserve the default value.
    *
-   *  @param d     the function mapping keys to values, used for non-present keys
+   *  @param d     default value used for non-present keys
    *  @return      a wrapper of the map with a default value
    */
   def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
-
 }
 
-
-
 object ParMap extends ParMapFactory[ParMap] {
   def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V]
 
@@ -94,22 +87,3 @@ object ParMap extends ParMapFactory[ParMap] {
     override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d)
   }
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index cdcfc59..42027f5 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -6,19 +6,15 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 package mutable
 
-
-
 import scala.collection.generic._
-import scala.collection.mutable.Builder
 import scala.collection.mutable.Cloneable
 import scala.collection.generic.Growable
 import scala.collection.generic.Shrinkable
 
-
-
 /** A template trait for mutable parallel maps. This trait is to be mixed in
  *  with concrete parallel maps to override the representation type.
  *
@@ -53,6 +49,4 @@ extends scala.collection.GenMapLike[K, V, Repr]
   def -(key: K) = this.clone() -= key
 
   def clear(): Unit
-
 }
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 95a4d4a..35be266 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel.mutable
-
+package scala
+package collection.parallel.mutable
 
 import scala.collection.generic.GenericParTemplate
 import scala.collection.generic.GenericCompanion
@@ -17,12 +16,6 @@ import scala.collection.generic.CanCombineFrom
 import scala.collection.generic.ParFactory
 import scala.collection.parallel.ParSeqLike
 import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
-
-
-
 
 /** A mutable variant of `ParSeq`.
  *
@@ -57,18 +50,3 @@ object ParSeq extends ParFactory[ParSeq] {
 
   def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index d8f8217..9367f14 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -6,18 +6,11 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection.parallel.mutable
-
-
+package scala
+package collection.parallel.mutable
 
 import scala.collection.generic._
 import scala.collection.parallel.Combiner
-import scala.collection.GenSet
-
-
-
-
 
 /** A mutable variant of `ParSet`.
  *
@@ -51,4 +44,3 @@ object ParSet extends ParSetFactory[ParSet] {
 
   override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner
 }
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 609888f..13af5ed 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -6,21 +6,15 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.collection
+package scala
+package collection
 package parallel.mutable
 
-
-
-import scala.collection.mutable.Set
-import scala.collection.mutable.Builder
 import scala.collection.mutable.Cloneable
 import scala.collection.GenSetLike
 import scala.collection.generic.Growable
 import scala.collection.generic.Shrinkable
 
-
-
 /** A template trait for mutable parallel sets. This trait is mixed in with concrete
  *  parallel sets to override the representation type.
  *
@@ -54,43 +48,3 @@ self =>
 
   // note: should not override toSet
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 61a50a1..a1dc37c 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
-
-
+package scala
+package collection.parallel.mutable
 
 import scala.collection.generic._
 import scala.collection.parallel.Combiner
@@ -23,8 +22,6 @@ import scala.collection.concurrent.INode
 import scala.collection.concurrent.TrieMap
 import scala.collection.concurrent.TrieMapIterator
 
-
-
 /** Parallel TrieMap collection.
  *
  *  It has its bulk operations parallelized, but uses the snapshot operation
@@ -116,10 +113,8 @@ extends ParMap[K, V]
     def shouldSplitFurther = howmany > 1
     override def merge(that: Size) = result = result + that.result
   }
-
 }
 
-
 private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean)
 extends TrieMapIterator[K, V](lev, ct, mustInit)
    with IterableSplitter[(K, V)]
@@ -136,7 +131,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
   }
 
   def dup = {
-    val it = newIterator(0, ct, false)
+    val it = newIterator(0, ct, _mustInit = false)
     dupTo(it)
     it.iterated = this.iterated
     it
@@ -154,7 +149,6 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
   def remaining: Int = totalsize - iterated
 }
 
-
 /** Only used within the `ParTrieMap`. */
 private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] {
 
@@ -172,24 +166,11 @@ private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrie
   }
 
   override def canBeShared = true
-
 }
 
-
 object ParTrieMap extends ParMapFactory[ParTrieMap] {
-
   def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V]
-
   def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V]
 
   implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V]
-
 }
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index dc31d1b..79322c8 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
 
 
 
@@ -26,7 +27,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
   override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
 
   // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden.
-  def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+  final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
 
   def allocateAndCopy = if (chain.size > 1) {
     val arrayseq = new ArraySeq[T](size)
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index c3a3794..d1379cd 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
 
 import scala.collection.generic.Sizing
 import scala.collection.mutable.ArraySeq
@@ -19,6 +20,7 @@ import scala.collection.parallel.Combiner
 import scala.collection.parallel.Task
 import scala.reflect.ClassTag
 
+// Todo -- revisit whether inheritance is the best way to achieve this functionality
 private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
   override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
   protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
@@ -47,7 +49,7 @@ extends Combiner[T, ParArray[T]] {
   }
 
   def clear() {
-    buff.clear
+    buff.clear()
   }
 
   override def sizeHint(sz: Int) = {
@@ -69,7 +71,8 @@ extends Combiner[T, ParArray[T]] {
 
   class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int)
   extends Task[Unit, CopyUnrolledToArray] {
-    var result = ();
+    var result = ()
+
     def leaf(prev: Option[Unit]) = if (howmany > 0) {
       var totalleft = howmany
       val (startnode, startpos) = findStart(offset)
diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
index 2494d09..81121d9 100644
--- a/src/library/scala/collection/parallel/mutable/package.scala
+++ b/src/library/scala/collection/parallel/mutable/package.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection.parallel
+package scala
+package collection.parallel
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.ArraySeq
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 988886b..91c54fa 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -41,9 +41,7 @@ package object parallel {
 
   private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
 
-  private[parallel] def getTaskSupport: TaskSupport =
-    if (scala.util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
-    else new ThreadPoolTaskSupport
+  private[parallel] def getTaskSupport: TaskSupport = new ExecutionContextTaskSupport
 
   val defaultTaskSupport: TaskSupport = getTaskSupport
 
@@ -55,43 +53,52 @@ package object parallel {
     c
   }
 
-  /* implicit conversions */
-
-  implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
-    def isParallel = bf.isInstanceOf[Parallel]
-    def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]]
-    def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] {
-      def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
-    }
-  }
-  implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
-    def isParallel = t.isInstanceOf[Parallel]
-    def isParIterable = t.isInstanceOf[ParIterable[_]]
-    def asParIterable = t.asInstanceOf[ParIterable[T]]
-    def isParSeq = t.isInstanceOf[ParSeq[_]]
-    def asParSeq = t.asInstanceOf[ParSeq[T]]
-    def ifParSeq[R](isbody: ParSeq[T] => R) = new Otherwise[R] {
-      def otherwise(notbody: => R) = if (isParallel) isbody(asParSeq) else notbody
-    }
-    def toParArray = if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] else {
-      val it = t.toIterator
-      val cb = mutable.ParArrayCombiner[T]()
-      while (it.hasNext) cb += it.next
-      cb.result
-    }
-  }
-  implicit def throwable2ops(self: Throwable) = new ThrowableOps {
-    def alongWith(that: Throwable) = (self, that) match {
-      case (self: CompositeThrowable, that: CompositeThrowable) => new CompositeThrowable(self.throwables ++ that.throwables)
-      case (self: CompositeThrowable, _) => new CompositeThrowable(self.throwables + that)
-      case (_, that: CompositeThrowable) => new CompositeThrowable(that.throwables + self)
-      case _ => new CompositeThrowable(Set(self, that))
+  /** Adds toParArray method to collection classes. */
+  implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) {
+    def toParArray = {
+      val t = asGto(c)
+      if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]]
+      else {
+        val it = t.toIterator
+        val cb = mutable.ParArrayCombiner[T]()
+        while (it.hasNext) cb += it.next
+        cb.result
+      }
     }
   }
 }
 
 
 package parallel {
+  /** Implicit conversions used in the implementation of parallel collections. */
+  private[collection] object ParallelCollectionImplicits {
+    implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
+      def isParallel = bf.isInstanceOf[Parallel]
+      def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]]
+      def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] {
+        def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
+      }
+    }
+    implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+      def isParallel = t.isInstanceOf[Parallel]
+      def isParIterable = t.isInstanceOf[ParIterable[_]]
+      def asParIterable = t.asInstanceOf[ParIterable[T]]
+      def isParSeq = t.isInstanceOf[ParSeq[_]]
+      def asParSeq = t.asInstanceOf[ParSeq[T]]
+      def ifParSeq[R](isbody: ParSeq[T] => R) = new Otherwise[R] {
+        def otherwise(notbody: => R) = if (isParallel) isbody(asParSeq) else notbody
+      }
+    }
+    implicit def throwable2ops(self: Throwable) = new ThrowableOps {
+      def alongWith(that: Throwable) = (self, that) match {
+        case (self: CompositeThrowable, that: CompositeThrowable) => new CompositeThrowable(self.throwables ++ that.throwables)
+        case (self: CompositeThrowable, _) => new CompositeThrowable(self.throwables + that)
+        case (_, that: CompositeThrowable) => new CompositeThrowable(that.throwables + self)
+        case _ => new CompositeThrowable(Set(self, that))
+      }
+    }
+  }
+  
   trait FactoryOps[From, Elem, To] {
     trait Otherwise[R] {
       def otherwise(notbody: => R): R
@@ -113,10 +120,11 @@ package parallel {
     def isParSeq: Boolean
     def asParSeq: ParSeq[T]
     def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R]
-    def toParArray: ParArray[T]
   }
 
+  @deprecated("This trait will be removed.", "2.11.0")
   trait ThrowableOps {
+    @deprecated("This method will be removed.", "2.11.0")
     def alongWith(that: Throwable): Throwable
   }
 
@@ -135,9 +143,8 @@ package parallel {
   }
 
   /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
-  final case class CompositeThrowable(
-    val throwables: Set[Throwable]
-  ) extends Exception(
+  @deprecated("This class will be removed.", "2.11.0")
+  final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception(
     "Multiple exceptions thrown during a parallel computation: " +
       throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
   )
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index cd64fa2..bed74bf 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package script
 
 /** Class `Location` describes locations in messages implemented by
@@ -17,8 +18,17 @@ package script
  *  @since   2.8
  */
 
+ at deprecated("Scripting is deprecated.", "2.11.0")
 sealed abstract class Location
+
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case object Start extends Location
+
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case object End extends Location
+
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case object NoLo extends Location
+
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case class Index(n: Int) extends Location
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 2ab7ea7..3fc2a0e 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package script
 
 import mutable.ArrayBuffer
@@ -20,6 +21,7 @@ import mutable.ArrayBuffer
  *  @version 1.0, 08/07/2003
  *  @since   2.8
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 trait Message[+A]
 
 /** This observable update refers to inclusion operations that add new elements
@@ -28,6 +30,7 @@ trait Message[+A]
  *  @author  Matthias Zenger
  *  @version 1.0, 08/07/2003
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case class Include[+A](location: Location, elem: A) extends Message[A] {
   def this(elem: A) = this(NoLo, elem)
 }
@@ -38,6 +41,7 @@ case class Include[+A](location: Location, elem: A) extends Message[A] {
  *  @author  Matthias Zenger
  *  @version 1.0, 08/07/2003
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case class Update[+A](location: Location, elem: A) extends Message[A] {
   def this(elem: A) = this(NoLo, elem)
 }
@@ -48,6 +52,7 @@ case class Update[+A](location: Location, elem: A) extends Message[A] {
  *  @author  Matthias Zenger
  *  @version 1.0, 08/07/2003
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case class Remove[+A](location: Location, elem: A) extends Message[A] {
   def this(elem: A) = this(NoLo, elem)
 }
@@ -57,6 +62,7 @@ case class Remove[+A](location: Location, elem: A) extends Message[A] {
  *  @author  Matthias Zenger
  *  @version 1.0, 08/07/2003
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 case class Reset[+A]() extends Message[A]
 
 /** Objects of this class represent compound messages consisting
@@ -65,11 +71,12 @@ case class Reset[+A]() extends Message[A]
  *  @author  Matthias Zenger
  *  @version 1.0, 10/05/2004
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
 
   override def toString(): String = {
     var res = "Script("
-    var it = this.iterator
+    val it = this.iterator
     var i = 1
     while (it.hasNext) {
       if (i > 1)
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index ceaf19a..4db75dd 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.collection
+package scala
+package collection
 package script
 
 /** Classes that mix in the `Scriptable` class allow messages to be sent to
@@ -16,6 +17,7 @@ package script
  *  @version 1.0, 09/05/2004
  *  @since   2.8
  */
+ at deprecated("Scripting is deprecated.", "2.11.0")
 trait Scriptable[A] {
   /** Send a message to this scriptable object.
    */
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 88cb150..875d811 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
-package scala.compat
+package scala
+package compat
 
 import java.lang.System
 
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index 652a234..dff8387 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -17,7 +17,7 @@ import scala.concurrent.duration.Duration
 /**
  * An object that may eventually be completed with a result value of type `T` which may be
  * awaited using blocking methods.
- * 
+ *
  * The [[Await]] object provides methods that allow accessing the result of an `Awaitable`
  * by blocking the current thread until the `Awaitable` has been completed or a timeout has
  * occurred.
@@ -26,9 +26,9 @@ trait Awaitable[+T] {
 
   /**
    * Await the "completed" state of this `Awaitable`.
-   * 
+   *
    * '''''This method should not be called directly; use [[Await.ready]] instead.'''''
-   * 
+   *
    * @param  atMost
    *         maximum wait time, which may be negative (no waiting is done),
    *         [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
@@ -41,12 +41,12 @@ trait Awaitable[+T] {
   @throws(classOf[TimeoutException])
   @throws(classOf[InterruptedException])
   def ready(atMost: Duration)(implicit permit: CanAwait): this.type
-  
+
   /**
    * Await and return the result (of type `T`) of this `Awaitable`.
-   * 
+   *
    * '''''This method should not be called directly; use [[Await.result]] instead.'''''
-   * 
+   *
    * @param  atMost
    *         maximum wait time, which may be negative (no waiting is done),
    *         [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000..a0d7aae
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala API                            **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+  // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+  private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+  private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+    private var parentBlockContext: BlockContext = _
+    // this method runs in the delegate ExecutionContext's thread
+    override def run(): Unit = {
+      require(_tasksLocal.get eq null)
+
+      val prevBlockContext = BlockContext.current
+      BlockContext.withBlockContext(this) {
+        try {
+          parentBlockContext = prevBlockContext
+
+          @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+            case Nil => ()
+            case head :: tail =>
+              _tasksLocal set tail
+              try {
+                head.run()
+              } catch {
+                case t: Throwable =>
+                  // if one task throws, move the
+                  // remaining tasks to another thread
+                  // so we can throw the exception
+                  // up to the invoking executor
+                  val remaining = _tasksLocal.get
+                  _tasksLocal set Nil
+                  unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+                  throw t // rethrow
+              }
+              processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+          }
+
+          processBatch(initial)
+        } finally {
+          _tasksLocal.remove()
+          parentBlockContext = null
+        }
+      }
+    }
+
+    override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+      // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+      {
+        val tasks = _tasksLocal.get
+        _tasksLocal set Nil
+        if ((tasks ne null) && tasks.nonEmpty)
+          unbatchedExecute(new Batch(tasks))
+      }
+
+      // now delegate the blocking to the previous BC
+      require(parentBlockContext ne null)
+      parentBlockContext.blockOn(thunk)
+    }
+  }
+
+  protected def unbatchedExecute(r: Runnable): Unit
+
+  override def execute(runnable: Runnable): Unit = {
+    if (batchable(runnable)) { // If we can batch the runnable
+      _tasksLocal.get match {
+        case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+        case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+      }
+    } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+  }
+
+  /** Override this to define which runnables will be batched. */
+  def batchable(runnable: Runnable): Boolean = runnable match {
+    case _: OnCompleteRunnable => true
+    case _                     => false
+  }
+}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index b4af161..a1e94c8 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -14,19 +14,60 @@ import scala.annotation.implicitNotFound
 import scala.util.Try
 
 /**
- * An `ExecutionContext` is an abstraction over an entity that can execute program logic.
+ * An `ExecutionContext` can execute program logic asynchronously,
+ * typically but not necessarily on a thread pool.
+ *
+ * A general purpose `ExecutionContext` must be asynchronous in executing
+ * any `Runnable` that is passed into its `execute`-method. A special purpose
+ * `ExecutionContext` may be synchronous but must only be passed to code that
+ * is explicitly safe to be run using a synchronously executing `ExecutionContext`.
+ *
+ * APIs such as `Future.onComplete` require you to provide a callback
+ * and an implicit `ExecutionContext`. The implicit `ExecutionContext`
+ * will be used to execute the callback.
+ *
+ * It is possible to simply import
+ * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an
+ * implicit `ExecutionContext`. This global context is a reasonable
+ * default thread pool.
+ *
+ * However, application developers should carefully consider where they
+ * want to set policy; ideally, one place per application (or per
+ * logically-related section of code) will make a decision about
+ * which `ExecutionContext` to use. That is, you might want to avoid
+ * hardcoding `scala.concurrent.ExecutionContext.Implicits.global` all
+ * over the place in your code.
+ * One approach is to add `(implicit ec: ExecutionContext)`
+ * to methods which need an `ExecutionContext`. Then import a specific
+ * context in one place for the entire application or module,
+ * passing it implicitly to individual methods.
+ *
+ * A custom `ExecutionContext` may be appropriate to execute code
+ * which blocks on IO or performs long-running computations.
+ * `ExecutionContext.fromExecutorService` and `ExecutionContext.fromExecutor`
+ * are good ways to create a custom `ExecutionContext`.
+ *
+ * The intent of `ExecutionContext` is to lexically scope code execution.
+ * That is, each method, class, file, package, or application determines
+ * how to run its own code. This avoids issues such as running
+ * application callbacks on a thread pool belonging to a networking library.
+ * The size of a networking library's thread pool can be safely configured,
+ * knowing that only that library's network operations will be affected.
+ * Application callback execution can be configured separately.
  */
- at implicitNotFound("Cannot find an implicit ExecutionContext, either import scala.concurrent.ExecutionContext.Implicits.global or use a custom one")
+ at implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass
+an (implicit ec: ExecutionContext) parameter to your method
+or import scala.concurrent.ExecutionContext.Implicits.global.""")
 trait ExecutionContext {
-  
+
   /** Runs a block of code on this execution context.
    */
   def execute(runnable: Runnable): Unit
-  
+
   /** Reports that an asynchronous computation failed.
    */
-  def reportFailure(t: Throwable): Unit
-  
+  def reportFailure(@deprecatedName('t) cause: Throwable): Unit
+
   /** Prepares for the execution of a task. Returns the prepared
    *  execution context. A valid implementation of `prepare` is one
    *  that simply returns `this`.
@@ -62,7 +103,7 @@ object ExecutionContext {
      */
     implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
   }
-    
+
   /** Creates an `ExecutionContext` from the given `ExecutorService`.
    */
   def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService =
@@ -71,7 +112,7 @@ object ExecutionContext {
   /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter.
    */
   def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
-  
+
   /** Creates an `ExecutionContext` from the given `Executor`.
    */
   def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor =
@@ -80,10 +121,10 @@ object ExecutionContext {
   /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter.
    */
   def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
-  
+
   /** The default reporter simply prints the stack trace of the `Throwable` to System.err.
    */
-  def defaultReporter: Throwable => Unit = (t: Throwable) => t.printStackTrace()
+  def defaultReporter: Throwable => Unit = _.printStackTrace()
 }
 
 
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index b2c09ec..4ed0687 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -29,11 +29,11 @@ import scala.reflect.ClassTag
 
 /** The trait that represents futures.
  *
- *  Asynchronous computations that yield futures are created with the `future` call:
+ *  Asynchronous computations that yield futures are created with the `Future` call:
  *
  *  {{{
  *  val s = "Hello"
- *  val f: Future[String] = future {
+ *  val f: Future[String] = Future {
  *    s + " future!"
  *  }
  *  f onSuccess {
@@ -67,11 +67,11 @@ import scala.reflect.ClassTag
  *  Example:
  *
  *  {{{
- *  val f = future { 5 }
- *  val g = future { 3 }
+ *  val f = Future { 5 }
+ *  val g = Future { 3 }
  *  val h = for {
  *    x: Int <- f // returns Future(5)
- *    y: Int <- g // returns Future(5)
+ *    y: Int <- g // returns Future(3)
  *  } yield x + y
  *  }}}
  *
@@ -131,9 +131,9 @@ trait Future[+T] extends Awaitable[T] {
    *  $multipleCallbacks
    *  $callbackInContext
    */
-  def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
+  def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
     case Failure(t) =>
-      callback.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
+      pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
     case _ =>
   }
 
@@ -146,7 +146,7 @@ trait Future[+T] extends Awaitable[T] {
    *  $multipleCallbacks
    *  $callbackInContext
    */
-  def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit
+  def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
 
 
   /* Miscellaneous */
@@ -266,16 +266,16 @@ trait Future[+T] extends Awaitable[T] {
    *
    *  Example:
    *  {{{
-   *  val f = future { 5 }
+   *  val f = Future { 5 }
    *  val g = f filter { _ % 2 == 1 }
    *  val h = f filter { _ % 2 == 0 }
    *  Await.result(g, Duration.Zero) // evaluates to 5
    *  Await.result(h, Duration.Zero) // throw a NoSuchElementException
    *  }}}
    */
-  def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
+  def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
     map {
-      r => if (pred(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
+      r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
     }
 
   /** Used by for-comprehensions.
@@ -291,7 +291,7 @@ trait Future[+T] extends Awaitable[T] {
    *
    *  Example:
    *  {{{
-   *  val f = future { -5 }
+   *  val f = Future { -5 }
    *  val g = f collect {
    *    case x if x < 0 => -x
    *  }
@@ -314,9 +314,9 @@ trait Future[+T] extends Awaitable[T] {
    *  Example:
    *
    *  {{{
-   *  future (6 / 0) recover { case e: ArithmeticException => 0 } // result: 0
-   *  future (6 / 0) recover { case e: NotFoundException   => 0 } // result: exception
-   *  future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
+   *  Future (6 / 0) recover { case e: ArithmeticException => 0 } // result: 0
+   *  Future (6 / 0) recover { case e: NotFoundException   => 0 } // result: exception
+   *  Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
    *  }}}
    */
   def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
@@ -334,8 +334,8 @@ trait Future[+T] extends Awaitable[T] {
    *  Example:
    *
    *  {{{
-   *  val f = future { Int.MaxValue }
-   *  future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
+   *  val f = Future { Int.MaxValue }
+   *  Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
    *  }}}
    */
   def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
@@ -373,8 +373,8 @@ trait Future[+T] extends Awaitable[T] {
    *
    *  Example:
    *  {{{
-   *  val f = future { sys.error("failed") }
-   *  val g = future { 5 }
+   *  val f = Future { sys.error("failed") }
+   *  val g = Future { 5 }
    *  val h = f fallbackTo g
    *  Await.result(h, Duration.Zero) // evaluates to 5
    *  }}}
@@ -419,7 +419,7 @@ trait Future[+T] extends Awaitable[T] {
    *  The following example prints out `5`:
    *
    *  {{{
-   *  val f = future { 5 }
+   *  val f = Future { 5 }
    *  f andThen {
    *    case r => sys.error("runtime exception")
    *  } andThen {
@@ -473,24 +473,31 @@ object Future {
    */
   def successful[T](result: T): Future[T] = Promise.successful(result).future
 
+  /** Creates an already completed Future with the specified result or exception.
+   *
+   *  @tparam T       the type of the value in the promise
+   *  @return         the newly created `Future` object
+   */
+  def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future
+
   /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
   *
   *  The result becomes available once the asynchronous computation is completed.
   *
   *  @tparam T       the type of the result
   *  @param body     the asychronous computation
-  *  @param execctx  the execution context on which the future is run
+  *  @param executor  the execution context on which the future is run
   *  @return         the `Future` holding the result of the computation
   */
-  def apply[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = impl.Future(body)
+  def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
 
-  /** Simple version of `Futures.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
+  /** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
    *  Useful for reducing many `Future`s into a single `Future`.
    */
-  def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
-    in.foldLeft(Promise.successful(cbf(in)).future) {
-      (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
-    } map (_.result)
+  def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
+    in.foldLeft(successful(cbf(in))) {
+      (fr, fa) => for (r <- fr; a <- fa) yield (r += a)
+    } map (_.result())
   }
 
   /** Returns a new `Future` to the result of the first future in the list that is completed.
@@ -504,15 +511,15 @@ object Future {
 
   /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
    */
-  def find[T](futurestravonce: TraversableOnce[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
-    val futures = futurestravonce.toBuffer
-    if (futures.isEmpty) Promise.successful[Option[T]](None).future
+  def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+    val futuresBuffer = futures.toBuffer
+    if (futuresBuffer.isEmpty) successful[Option[T]](None)
     else {
       val result = Promise[Option[T]]()
-      val ref = new AtomicInteger(futures.size)
+      val ref = new AtomicInteger(futuresBuffer.size)
       val search: Try[T] => Unit = v => try {
         v match {
-          case Success(r) => if (predicate(r)) result tryComplete Success(Some(r))
+          case Success(r) if p(r) => result tryComplete Success(Some(r))
           case _ =>
         }
       } finally {
@@ -521,7 +528,7 @@ object Future {
         }
       }
 
-      futures.foreach(_ onComplete search)
+      futuresBuffer.foreach(_ onComplete search)
 
       result.future
     }
@@ -537,9 +544,9 @@ object Future {
    *    val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
    *  }}}
    */
-  def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
-    if (futures.isEmpty) Future.successful(zero)
-    else sequence(futures).map(_.foldLeft(zero)(foldFun))
+  def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+    if (futures.isEmpty) successful(zero)
+    else sequence(futures).map(_.foldLeft(zero)(op))
   }
 
   /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
@@ -550,7 +557,7 @@ object Future {
    *  }}}
    */
   def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
-    if (futures.isEmpty) Future.failed(new NoSuchElementException("reduce attempted on empty collection"))
+    if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
     else sequence(futures).map(_ reduceLeft op)
   }
 
@@ -562,11 +569,11 @@ object Future {
    *    val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
    *  }}}
    */
-  def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
-    in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
-      val fb = fn(a.asInstanceOf[A])
+  def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
+    in.foldLeft(successful(cbf(in))) { (fr, a) =>
+      val fb = fn(a)
       for (r <- fr; b <- fb) yield (r += b)
-    }.map(_.result)
+    }.map(_.result())
 
   // This is used to run callbacks which are internal
   // to scala.concurrent; our own callbacks are only
@@ -587,111 +594,11 @@ object Future {
   // by just not ever using it itself. scala.concurrent
   // doesn't need to create defaultExecutionContext as
   // a side effect.
-  private[concurrent] object InternalCallbackExecutor extends ExecutionContext with java.util.concurrent.Executor {
+  private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+    override protected def unbatchedExecute(r: Runnable): Unit =
+      r.run()
     override def reportFailure(t: Throwable): Unit =
       throw new IllegalStateException("problem in scala.concurrent internal callback", t)
-
-    /**
-     * The BatchingExecutor trait had to be inlined into InternalCallbackExecutor for binary compatibility.
-     *
-     * BatchingExecutor is a trait for an Executor
-     * which groups multiple nested `Runnable.run()` calls
-     * into a single Runnable passed to the original
-     * Executor. This can be a useful optimization
-     * because it bypasses the original context's task
-     * queue and keeps related (nested) code on a single
-     * thread which may improve CPU affinity. However,
-     * if tasks passed to the Executor are blocking
-     * or expensive, this optimization can prevent work-stealing
-     * and make performance worse. Also, some ExecutionContext
-     * may be fast enough natively that this optimization just
-     * adds overhead.
-     * The default ExecutionContext.global is already batching
-     * or fast enough not to benefit from it; while
-     * `fromExecutor` and `fromExecutorService` do NOT add
-     * this optimization since they don't know whether the underlying
-     * executor will benefit from it.
-     * A batching executor can create deadlocks if code does
-     * not use `scala.concurrent.blocking` when it should,
-     * because tasks created within other tasks will block
-     * on the outer task completing.
-     * This executor may run tasks in any order, including LIFO order.
-     * There are no ordering guarantees.
-     *
-     * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
-     * in the calling thread synchronously. It must enqueue/handoff the Runnable.
-     */
-    // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
-    private val _tasksLocal = new ThreadLocal[List[Runnable]]()
-
-    private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
-      private[this] var parentBlockContext: BlockContext = _
-      // this method runs in the delegate ExecutionContext's thread
-      override def run(): Unit = {
-        require(_tasksLocal.get eq null)
-
-        val prevBlockContext = BlockContext.current
-        BlockContext.withBlockContext(this) {
-          try {
-            parentBlockContext = prevBlockContext
-
-            @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
-              case Nil => ()
-              case head :: tail =>
-                _tasksLocal set tail
-                try {
-                  head.run()
-                } catch {
-                  case t: Throwable =>
-                    // if one task throws, move the
-                    // remaining tasks to another thread
-                    // so we can throw the exception
-                    // up to the invoking executor
-                    val remaining = _tasksLocal.get
-                    _tasksLocal set Nil
-                    unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
-                    throw t // rethrow
-                }
-                processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
-            }
-
-            processBatch(initial)
-          } finally {
-            _tasksLocal.remove()
-            parentBlockContext = null
-          }
-        }
-      }
-
-      override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
-        // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
-        {
-          val tasks = _tasksLocal.get
-          _tasksLocal set Nil
-          if ((tasks ne null) && tasks.nonEmpty)
-            unbatchedExecute(new Batch(tasks))
-        }
-
-        // now delegate the blocking to the previous BC
-        require(parentBlockContext ne null)
-        parentBlockContext.blockOn(thunk)
-      }
-    }
-
-    override def execute(runnable: Runnable): Unit = runnable match {
-      // If we can batch the runnable
-      case _: OnCompleteRunnable =>
-        _tasksLocal.get match {
-          case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
-          case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
-        }
-
-      // If not batchable, just delegate to underlying
-      case _ =>
-        unbatchedExecute(runnable)
-    }
-
-    private def unbatchedExecute(r: Runnable): Unit = r.run()
   }
 }
 
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index eeadadd..089e67c 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2009-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2009-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -10,13 +10,13 @@ package scala.concurrent
 
 import scala.language.{implicitConversions, higherKinds}
 
-/** The `FutureTaskRunner</code> trait is a base trait of task runners
+/** The `FutureTaskRunner` trait is a base trait of task runners
  *  that provide some sort of future abstraction.
  *
  *  @author Philipp Haller
  */
 @deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait FutureTaskRunner extends TaskRunner {
+private[scala] trait FutureTaskRunner extends TaskRunner {
 
   /** The type of the futures that the underlying task runner supports.
    */
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index d6a7c1f..3d0597c 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -18,38 +18,6 @@ import scala.language.implicitConversions
  */
 object JavaConversions {
 
-  @deprecated("Use `asExecutionContext` instead.", "2.10.0")
-  implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner =
-    new ThreadPoolRunner {
-      override protected def executor =
-        exec
-
-      def shutdown() =
-        exec.shutdown()
-    }
-
-  @deprecated("Use `asExecutionContext` instead.", "2.10.0")
-  implicit def asTaskRunner(exec: Executor): TaskRunner =
-    new TaskRunner {
-      type Task[T] = Runnable
-
-      implicit def functionAsTask[T](fun: () => T): Task[T] = new Runnable {
-        def run() { fun() }
-      }
-
-      def execute[S](task: Task[S]) {
-        exec.execute(task)
-      }
-
-      def managedBlock(blocker: ManagedBlocker) {
-        blocker.block()
-      }
-
-      def shutdown() {
-        // do nothing
-      }
-    }
-
   /**
    * Creates a new `ExecutionContext` which uses the provided `ExecutorService`.
    */
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 4b81397..1c00c0e 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -14,6 +14,7 @@ package scala.concurrent
  *
  *  @author  Martin Odersky
  *  @version 1.0, 10/03/2003
+ *  @deprecated("Use java.util.concurrent.locks.Lock", "2.11.0")
  */
 class Lock {
   var available = true
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 7b2966c..b5a6e21 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -13,7 +13,7 @@ package scala.concurrent
  *  @author Philipp Haller
  */
 @deprecated("Use `blocking` instead.", "2.10.0")
-trait ManagedBlocker {
+private[scala] trait ManagedBlocker {
 
   /**
    * Possibly blocks the current thread, for example waiting for
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 8355a73..eb8044e 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -70,7 +70,7 @@ trait Promise[T] {
     other onComplete { this complete _ }
     this
   }
-  
+
   /** Attempts to complete this promise with the specified future, once that future is completed.
    *
    *  @return   This promise
@@ -82,11 +82,11 @@ trait Promise[T] {
 
   /** Completes the promise with a value.
    *
-   *  @param v    The value to complete the promise with.
+   *  @param value The value to complete the promise with.
    *
    *  $promiseCompletion
    */
-  def success(v: T): this.type = complete(Success(v))
+  def success(@deprecatedName('v) value: T): this.type = complete(Success(value))
 
   /** Tries to complete the promise with a value.
    *
@@ -98,13 +98,13 @@ trait Promise[T] {
 
   /** Completes the promise with an exception.
    *
-   *  @param t        The throwable to complete the promise with.
+   *  @param cause    The throwable to complete the promise with.
    *
    *  $allowedThrowables
    *
    *  $promiseCompletion
    */
-  def failure(t: Throwable): this.type = complete(Failure(t))
+  def failure(@deprecatedName('t) cause: Throwable): this.type = complete(Failure(cause))
 
   /** Tries to complete the promise with an exception.
    *
@@ -112,41 +112,35 @@ trait Promise[T] {
    *
    *  @return    If the promise has already been completed returns `false`, or `true` otherwise.
    */
-  def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
+  def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause))
 }
 
-
-
 object Promise {
-
   /** Creates a promise object which can be completed with a value.
-   *  
+   *
    *  @tparam T       the type of the value in the promise
    *  @return         the newly created `Promise` object
    */
   def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]()
 
   /** Creates an already completed Promise with the specified exception.
-   *  
+   *
    *  @tparam T       the type of the value in the promise
    *  @return         the newly created `Promise` object
    */
-  def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Failure(exception))
+  def failed[T](exception: Throwable): Promise[T] = fromTry(Failure(exception))
 
   /** Creates an already completed Promise with the specified result.
-   *  
+   *
    *  @tparam T       the type of the value in the promise
    *  @return         the newly created `Promise` object
    */
-  def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Success(result))
-  
-}
-
-
-
-
-
-
-
-
+  def successful[T](result: T): Promise[T] = fromTry(Success(result))
 
+  /** Creates an already completed Promise with the specified result or exception.
+   *
+   *  @tparam T       the type of the value in the promise
+   *  @return         the newly created `Promise` object
+   */
+  def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result)
+}
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 9ab7bcc..d5dc3d7 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -8,6 +8,8 @@
 
 package scala.concurrent
 
+import java.util.concurrent.TimeUnit
+
 /** A class to provide safe concurrent access to a mutable cell.
  *  All methods are synchronized.
  *
@@ -23,14 +25,16 @@ class SyncVar[A] {
     value.get
   }
 
-  /** Waits `timeout` millis. If `timeout <= 0` just returns 0. If the system clock
-   *  went backward, it will return 0, so it never returns negative results.
-   */
+  /** Waits `timeout` millis. If `timeout <= 0` just returns 0.
+    * It never returns negative results.
+    */
   private def waitMeasuringElapsed(timeout: Long): Long = if (timeout <= 0) 0 else {
-    val start = System.currentTimeMillis
+    val start = System.nanoTime()
     wait(timeout)
-    val elapsed = System.currentTimeMillis - start
-    if (elapsed < 0) 0 else elapsed
+    val elapsed = System.nanoTime() - start
+    // nanoTime should be monotonic, but it's not possible to rely on that.
+    // See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6458294.
+    if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed)
   }
 
   /** Waits for this SyncVar to become defined at least for
@@ -41,9 +45,9 @@ class SyncVar[A] {
    *  @return            `None` if variable is undefined after `timeout`, `Some(value)` otherwise
    */
   def get(timeout: Long): Option[A] = synchronized {
-    /** Defending against the system clock going backward
-     *  by counting time elapsed directly.  Loop required
-     *  to deal with spurious wakeups.
+    /* Defending against the system clock going backward
+     * by counting time elapsed directly.  Loop required
+     * to deal with spurious wakeups.
      */
     var rest = timeout
     while (!isDefined && rest > 0) {
@@ -79,6 +83,7 @@ class SyncVar[A] {
   // whether or not the SyncVar is already defined. So, set has been
   // deprecated in order to eventually be able to make "setting" private
   @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+  // NOTE: Used by SBT 0.13.0-M2 and below
   def set(x: A): Unit = setVal(x)
 
   /** Places a value in the SyncVar. If the SyncVar already has a stored value,
@@ -98,6 +103,7 @@ class SyncVar[A] {
   // whether or not the SyncVar is already defined. So, unset has been
   // deprecated in order to eventually be able to make "unsetting" private
   @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
+  // NOTE: Used by SBT 0.13.0-M2 and below
   def unset(): Unit = synchronized {
     isDefined = false
     value = None
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index a939a3f..1ea23b3 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -15,7 +15,7 @@ import scala.language.{higherKinds, implicitConversions}
  *  @author Philipp Haller
  */
 @deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait TaskRunner {
+private[scala] trait TaskRunner {
 
   type Task[T]
 
@@ -24,5 +24,4 @@ trait TaskRunner {
   def execute[S](task: Task[S]): Unit
 
   def shutdown(): Unit
-
 }
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
deleted file mode 100644
index e109a8a..0000000
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.concurrent
-
-import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit}
-
-/** The `TaskRunners` object...
- *
- *  @author Philipp Haller
- */
- at deprecated("Use `ExecutionContext` instead.", "2.10.0")
-object TaskRunners {
-
-  implicit val threadRunner: FutureTaskRunner =
-    new ThreadRunner
-
-  implicit val threadPoolRunner: FutureTaskRunner = {
-    val numCores = Runtime.getRuntime().availableProcessors()
-    val keepAliveTime = 60000L
-    val workQueue = new LinkedBlockingQueue[Runnable]
-    val exec = new ThreadPoolExecutor(numCores,
-                                      numCores,
-                                      keepAliveTime,
-                                      TimeUnit.MILLISECONDS,
-                                      workQueue,
-                                      new ThreadPoolExecutor.CallerRunsPolicy)
-    JavaConversions.asTaskRunner(exec)
-  }
-
-}
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index afa14ed..7784681 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -17,7 +17,7 @@ import scala.language.implicitConversions
  *  @author Philipp Haller
  */
 @deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait ThreadPoolRunner extends FutureTaskRunner {
+private[scala] trait ThreadPoolRunner extends FutureTaskRunner {
 
   type Task[T] = Callable[T] with Runnable
   type Future[T] = java.util.concurrent.Future[T]
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
deleted file mode 100644
index cd92db9..0000000
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.language.implicitConversions
-
-/** The `ThreadRunner` trait...
- *
- *  @author Philipp Haller
- */
- at deprecated("Use `ExecutionContext` instead.", "2.10.0")
-class ThreadRunner extends FutureTaskRunner {
-
-  type Task[T] = () => T
-  type Future[T] = () => T
-
-  implicit def functionAsTask[S](fun: () => S): Task[S] = fun
-  implicit def futureAsFunction[S](x: Future[S]): () => S = x
-
-  /* If expression computed successfully return it in `Right`,
-   * otherwise return exception in `Left`.
-   */
-  private def tryCatch[A](body: => A): Either[Exception, A] =
-    try Right(body) catch {
-      case ex: Exception => Left(ex)
-    }
-
-  def execute[S](task: Task[S]) {
-    val runnable = new Runnable {
-      def run() { tryCatch(task()) }
-    }
-    (new Thread(runnable)).start()
-  }
-
-  def submit[S](task: Task[S]): Future[S] = {
-    val result = new SyncVar[Either[Exception, S]]
-    val runnable = new Runnable {
-      def run() { result set tryCatch(task()) }
-    }
-    (new Thread(runnable)).start()
-    () => result.get.fold[S](throw _, identity _)
-  }
-
-  @deprecated("Use `blocking` instead.", "2.10.0")
-  def managedBlock(blocker: ManagedBlocker) {
-    blocker.block()
-  }
-
-  def shutdown() {
-    // do nothing
-  }
-
-}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 0353d61..1b50b7f 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -10,6 +10,7 @@ package scala.concurrent.duration
 
 import java.lang.{ Double => JDouble, Long => JLong }
 import scala.language.implicitConversions
+import scala.language.postfixOps
 
 object Duration {
 
@@ -103,7 +104,7 @@ object Duration {
    * Extract length and time unit out of a duration, if it is finite.
    */
   def unapply(d: Duration): Option[(Long, TimeUnit)] =
-    if (d.isFinite) Some((d.length, d.unit)) else None
+    if (d.isFinite()) Some((d.length, d.unit)) else None
 
   /**
    * Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
@@ -220,6 +221,8 @@ object Duration {
     final def toMinutes: Long = fail("toMinutes")
     final def toHours: Long   = fail("toHours")
     final def toDays: Long    = fail("toDays")
+
+    final def toCoarsest: Duration = this
   }
 
   /**
@@ -519,6 +522,18 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
    * $ovf
    */
   def plus(other: Duration)  = this + other
+  /**
+   * Return duration which is equal to this duration but with a coarsest Unit, or self in case it is already the coarsest Unit
+   * <p/>
+   * Examples:
+   * {{{
+   * Duration(60, MINUTES).toCoarsest // Duration(1, HOURS)
+   * Duration(1000, MILLISECONDS).toCoarsest // Duration(1, SECONDS)
+   * Duration(48, HOURS).toCoarsest // Duration(2, DAYS)
+   * Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS)
+   * }}}
+   */
+  def toCoarsest: Duration
 }
 
 object FiniteDuration {
@@ -623,7 +638,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
   // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
   private[this] def minusZero = -0d
   def /(divisor: Duration): Double =
-    if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+    if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos
     else if (divisor eq Undefined) Double.NaN
     else if ((length < 0) ^ (divisor > Zero)) 0d
     else minusZero
@@ -663,8 +678,8 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
    * Long.MinValue is not a legal `length` anyway.
    */
   private def safeMul(_a: Long, _b: Long): Long = {
-    val a = math.abs(_a)
-    val b = math.abs(_b)
+    val a = scala.math.abs(_a)
+    val b = scala.math.abs(_b)
     import java.lang.Long.{ numberOfLeadingZeros => leading }
     if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
     val product = a * b
@@ -690,6 +705,28 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
 
   final def isFinite() = true
 
+  final def toCoarsest: Duration = {
+    def loop(length: Long, unit: TimeUnit): FiniteDuration = {
+      def coarserOrThis(coarser: TimeUnit, divider: Int) =
+        if (length % divider == 0) loop(length / divider, coarser)
+        else if (unit == this.unit) this
+        else FiniteDuration(length, unit)
+
+      unit match {
+        case DAYS => FiniteDuration(length, unit)
+        case HOURS => coarserOrThis(DAYS, 24)
+        case MINUTES => coarserOrThis(HOURS, 60)
+        case SECONDS => coarserOrThis(MINUTES, 60)
+        case MILLISECONDS => coarserOrThis(SECONDS, 1000)
+        case MICROSECONDS => coarserOrThis(MILLISECONDS, 1000)
+        case NANOSECONDS => coarserOrThis(MICROSECONDS, 1000)
+      }
+    }
+
+    if (unit == DAYS || length == 0) this
+    else loop(length, unit)
+  }
+
   override def equals(other: Any) = other match {
     case x: FiniteDuration => toNanos == x.toNanos
     case _                 => super.equals(other)
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
index 2fd735f..d166975 100644
--- a/src/library/scala/concurrent/duration/package.scala
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -36,19 +36,19 @@ package object duration {
   final val NANOSECONDS  = java.util.concurrent.TimeUnit.NANOSECONDS
   final val SECONDS      = java.util.concurrent.TimeUnit.SECONDS
 
-  implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration         = Duration(p._1, p._2)
+  implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration         = Duration(p._1.toLong, p._2)
   implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
   implicit def durationToPair(d: Duration): (Long, TimeUnit)           = (d.length, d.unit)
 
-  implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
-    override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+  implicit final class DurationInt(private val n: Int) extends AnyVal with DurationConversions {
+    override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit)
   }
 
-  implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
+  implicit final class DurationLong(private val n: Long) extends AnyVal with DurationConversions {
     override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
   }
 
-  implicit final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
+  implicit final class DurationDouble(private val d: Double) extends AnyVal with DurationConversions {
     override protected def durationIn(unit: TimeUnit): FiniteDuration =
       Duration(d, unit) match {
         case f: FiniteDuration => f
@@ -59,17 +59,17 @@ package object duration {
   /*
    * Avoid reflection based invocation by using non-duck type
    */
-  implicit final class IntMult(val i: Int) extends AnyVal {
-    def *(d: Duration) = d * i
-    def *(d: FiniteDuration) = d * i
+  implicit final class IntMult(private val i: Int) extends AnyVal {
+    def *(d: Duration) = d * i.toDouble
+    def *(d: FiniteDuration) = d * i.toLong
   }
 
-  implicit final class LongMult(val i: Long) extends AnyVal {
-    def *(d: Duration) = d * i
-    def *(d: FiniteDuration) = d * i
+  implicit final class LongMult(private val i: Long) extends AnyVal {
+    def *(d: Duration) = d * i.toDouble
+    def *(d: FiniteDuration) = d * i.toLong
   }
 
-  implicit final class DoubleMult(val f: Double) extends AnyVal {
-    def *(d: Duration) = d * f
+  implicit final class DoubleMult(private val f: Double) extends AnyVal {
+    def *(d: Duration) = d * f.toDouble
   }
 }
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 0aa6b37..4797202 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -30,7 +30,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
   }
 
   // Implement BlockContext on FJP threads
-  class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { 
+  class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
     def wire[T <: Thread](thread: T): T = {
       thread.setDaemon(daemonic)
       thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
@@ -57,22 +57,22 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
 
   def createExecutorService: ExecutorService = {
 
-    def getInt(name: String, f: String => Int): Int =
-        try f(System.getProperty(name)) catch { case e: Exception => Runtime.getRuntime.availableProcessors }
-    def range(floor: Int, desired: Int, ceiling: Int): Int =
-      if (ceiling < floor) range(ceiling, desired, floor) else scala.math.min(scala.math.max(desired, floor), ceiling)
+    def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
+      case e: SecurityException => default
+    }) match {
+      case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
+      case other => other.toInt
+    }
+
+    def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
 
     val desiredParallelism = range(
-      getInt("scala.concurrent.context.minThreads", _.toInt),
-      getInt("scala.concurrent.context.numThreads", {
-        case null | "" => Runtime.getRuntime.availableProcessors
-        case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
-        case other => other.toInt
-      }),
-      getInt("scala.concurrent.context.maxThreads", _.toInt))
+      getInt("scala.concurrent.context.minThreads", "1"),
+      getInt("scala.concurrent.context.numThreads", "x1"),
+      getInt("scala.concurrent.context.maxThreads", "x1"))
 
     val threadFactory = new DefaultThreadFactory(daemonic = true)
-    
+
     try {
       new ForkJoinPool(
         desiredParallelism,
@@ -96,12 +96,26 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
     }
   }
 
-
   def execute(runnable: Runnable): Unit = executor match {
     case fj: ForkJoinPool =>
-      val fjt = runnable match {
+      val fjt: ForkJoinTask[_] = runnable match {
         case t: ForkJoinTask[_] => t
-        case runnable => new ForkJoinTask[Unit] {
+        case r                  => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+      }
+      Thread.currentThread match {
+        case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+        case _                                              => fj execute fjt
+      }
+    case generic => generic execute runnable
+  }
+
+  def reportFailure(t: Throwable) = reporter(t)
+}
+
+
+private[concurrent] object ExecutionContextImpl {
+
+  final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
           final override def setRawResult(u: Unit): Unit = ()
           final override def getRawResult(): Unit = ()
           final override def exec(): Boolean = try { runnable.run(); true } catch {
@@ -114,18 +128,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
               throw anything
           }
         }
-      }
-      Thread.currentThread match {
-        case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
-        case _ => fj execute fjt
-      }
-    case generic => generic execute runnable
-  }
-
-  def reportFailure(t: Throwable) = reporter(t)
-}
 
-private[concurrent] object ExecutionContextImpl {
   def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
   def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
     new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 89d10e5..042d32c 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
 
 import scala.concurrent.ExecutionContext
 import scala.util.control.NonFatal
-import scala.util.{Try, Success, Failure}
+import scala.util.{ Success, Failure }
 
 
 private[concurrent] object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index c9b2a15..b156010 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -82,11 +82,11 @@ private[concurrent] object Promise {
    *  2. Complete, with a result.
    *  3. Linked to another DefaultPromise.
    *
-   *  If a DefaultPromise is linked it another DefaultPromise then it will
+   *  If a DefaultPromise is linked to another DefaultPromise, it will
    *  delegate all its operations to that other promise. This means that two
    *  DefaultPromises that are linked will appear, to external callers, to have
-   *  exactly the same state and behaviour. E.g. they will both appear to be
-   *  either complete or incomplete, and with the same values.
+   *  exactly the same state and behaviour. For instance, both will appear as
+   *  incomplete, or as complete with the same result value.
    *
    *  A DefaultPromise stores its state entirely in the AnyRef cell exposed by
    *  AbstractPromise. The type of object stored in the cell fully describes the
@@ -155,7 +155,7 @@ private[concurrent] object Promise {
 
     /** Get the root promise for this promise, compressing the link chain to that
      *  promise if necessary.
-     *    
+     *
      *  For promises that are not linked, the result of calling
      *  `compressedRoot()` will the promise itself. However for linked promises,
      *  this method will traverse each link until it locates the root promise at
@@ -265,7 +265,7 @@ private[concurrent] object Promise {
     }
 
     def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
-      val preparedEC = executor.prepare
+      val preparedEC = executor.prepare()
       val runnable = new CallbackRunnable[T](preparedEC, func)
       dispatchOrAddCallback(runnable)
     }
@@ -329,7 +329,7 @@ private[concurrent] object Promise {
 
     def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
       val completedAs = value.get
-      val preparedEC = executor.prepare
+      val preparedEC = executor.prepare()
       (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
     }
 
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
deleted file mode 100644
index 4c91e78..0000000
--- a/src/library/scala/concurrent/ops.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.util.control.Exception.allCatch
-
-/** The object `ops` ...
- *
- *  @author  Martin Odersky, Stepan Koltsov, Philipp Haller
- */
- at deprecated("Use `Future` instead.", "2.10.0")
-object ops
-{
-  val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
-
-  /**
-   *  If expression computed successfully return it in `Right`,
-   *  otherwise return exception in `Left`.
-   */
-  private def tryCatch[A](body: => A): Either[Throwable, A] =
-    allCatch[A] either body
-
-  private def getOrThrow[T <: Throwable, A](x: Either[T, A]): A =
-    x.fold[A](throw _, identity _)
-
-  /** Evaluates an expression asynchronously.
-   *
-   *  @param  p the expression to evaluate
-   */
-  def spawn(p: => Unit)(implicit runner: TaskRunner = defaultRunner): Unit = {
-    runner execute runner.functionAsTask(() => p)
-  }
-
-  /** Evaluates an expression asynchronously, and returns a closure for
-   *  retrieving the result.
-   *
-   *  @param  p the expression to evaluate
-   *  @return   a closure which returns the result once it has been computed
-   */
-  def future[A](p: => A)(implicit runner: FutureTaskRunner = defaultRunner): () => A = {
-    runner.futureAsFunction(runner submit runner.functionAsTask(() => p))
-  }
-
-  /** Evaluates two expressions in parallel. Invoking `par` blocks the current
-   *  thread until both expressions have been evaluated.
-   *
-   *  @param  xp the first expression to evaluate
-   *  @param  yp the second expression to evaluate
-   *
-   *  @return    a pair holding the evaluation results
-   */
-  def par[A, B](xp: => A, yp: => B)(implicit runner: TaskRunner = defaultRunner): (A, B) = {
-    val y = new SyncVar[Either[Throwable, B]]
-    spawn { y set tryCatch(yp) }
-    (xp, getOrThrow(y.get))
-  }
-
-/*
-  def parMap[a,b](f: a => b, xs: Array[a]): Array[b] = {
-    val results = new Array[b](xs.length);
-    replicate(0, xs.length) { i => results(i) = f(xs(i)) }
-    results
-  }
-*/
-
-}
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 3e849f1..cc1350f 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -19,26 +19,30 @@ package object concurrent {
   type TimeoutException =      java.util.concurrent.TimeoutException
 
   /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
-   *  
+   *
    *  The result becomes available once the asynchronous computation is completed.
-   *  
+   *
    *  @tparam T       the type of the result
    *  @param body     the asynchronous computation
-   *  @param execctx  the execution context on which the future is run
+   *  @param executor the execution context on which the future is run
    *  @return         the `Future` holding the result of the computation
    */
-  def future[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = Future[T](body)
+  @deprecated("Use `Future { ... }` instead.", "2.11.0")
+  // removal planned for 2.13.0
+  def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body)
 
   /** Creates a promise object which can be completed with a value or an exception.
-   *  
+   *
    *  @tparam T       the type of the value in the promise
    *  @return         the newly created `Promise` object
    */
+  @deprecated("Use `Promise[T]()` instead.", "2.11.0")
+  // removal planned for 2.13.0
   def promise[T](): Promise[T] = Promise[T]()
 
   /** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust
    *  the runtime's behavior.
-   *  Properly marking blocking code may improve performance or avoid deadlocks. 
+   *  Properly marking blocking code may improve performance or avoid deadlocks.
    *
    *  Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`.
    *
@@ -53,22 +57,22 @@ package object concurrent {
 package concurrent {
   @implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.")
   sealed trait CanAwait
-  
+
   /**
    * Internal usage only, implementation detail.
    */
   private[concurrent] object AwaitPermission extends CanAwait
-  
+
   /**
    * `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
    */
   object Await {
     /**
      * Await the "completed" state of an `Awaitable`.
-     * 
+     *
      * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
      * the underlying [[ExecutionContext]] is prepared to properly manage the blocking.
-     * 
+     *
      * @param  awaitable
      *         the `Awaitable` to be awaited
      * @param  atMost
@@ -84,13 +88,13 @@ package concurrent {
     @throws(classOf[InterruptedException])
     def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type =
       blocking(awaitable.ready(atMost)(AwaitPermission))
-    
+
     /**
      * Await and return the result (of type `T`) of an `Awaitable`.
-     * 
+     *
      * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
      * the underlying [[ExecutionContext]] to properly detect blocking and ensure that there are no deadlocks.
-     * 
+     *
      * @param  awaitable
      *         the `Awaitable` to be awaited
      * @param  atMost
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
index 7006556..7d20219 100644
--- a/src/library/scala/deprecatedInheritance.scala
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -11,7 +11,8 @@ package scala
 /** An annotation that designates that inheriting from a class is deprecated.
  *
  *  This is usually done to warn about a non-final class being made final in a future version.
- *  Sub-classing such a class then generates a warning.
+ *  Sub-classing such a class then generates a warning. No warnings are generated if the
+ *  subclass is in the same compilation unit.
  *
  *  @param  message the message to print during compilation if the class was sub-classed
  *  @param  since   a string identifying the first version in which inheritance was deprecated
diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala
new file mode 100644
index 0000000..39e2e3b
--- /dev/null
+++ b/src/library/scala/io/AnsiColor.scala
@@ -0,0 +1,53 @@
+package scala
+package io
+
+trait AnsiColor {
+  /** Foreground color for ANSI black */
+  final val BLACK      = "\u001b[30m"
+  /** Foreground color for ANSI red */
+  final val RED        = "\u001b[31m"
+  /** Foreground color for ANSI green */
+  final val GREEN      = "\u001b[32m"
+  /** Foreground color for ANSI yellow */
+  final val YELLOW     = "\u001b[33m"
+  /** Foreground color for ANSI blue */
+  final val BLUE       = "\u001b[34m"
+  /** Foreground color for ANSI magenta */
+  final val MAGENTA    = "\u001b[35m"
+  /** Foreground color for ANSI cyan */
+  final val CYAN       = "\u001b[36m"
+  /** Foreground color for ANSI white */
+  final val WHITE      = "\u001b[37m"
+
+  /** Background color for ANSI black */
+  final val BLACK_B    = "\u001b[40m"
+  /** Background color for ANSI red */
+  final val RED_B      = "\u001b[41m"
+  /** Background color for ANSI green */
+  final val GREEN_B    = "\u001b[42m"
+  /** Background color for ANSI yellow */
+  final val YELLOW_B   = "\u001b[43m"
+  /** Background color for ANSI blue */
+  final val BLUE_B     = "\u001b[44m"
+  /** Background color for ANSI magenta */
+  final val MAGENTA_B  = "\u001b[45m"
+  /** Background color for ANSI cyan */
+  final val CYAN_B     = "\u001b[46m"
+  /** Background color for ANSI white */
+  final val WHITE_B    = "\u001b[47m"
+
+  /** Reset ANSI styles */
+  final val RESET      = "\u001b[0m"
+  /** ANSI bold */
+  final val BOLD       = "\u001b[1m"
+  /** ANSI underlines */
+  final val UNDERLINED = "\u001b[4m"
+  /** ANSI blink */
+  final val BLINK      = "\u001b[5m"
+  /** ANSI reversed */
+  final val REVERSED   = "\u001b[7m"
+  /** ANSI invisible */
+  final val INVISIBLE  = "\u001b[8m"
+}
+
+object AnsiColor extends AnsiColor { }
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 767f06f..1c87a1f 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -8,9 +8,11 @@
 
 package scala.io
 
+import java.util.Arrays
 import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
 import Source.DefaultBufSize
 import scala.collection.{ Iterator, AbstractIterator }
+import scala.collection.mutable.ArrayBuffer
 
 /** This object provides convenience methods to create an iterable
  *  representation of a source file.
@@ -40,7 +42,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
     map (_.toChar)
   )
 
-  class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] {
+  private def decachedReader: BufferedReader = {
     // Don't want to lose a buffered char sitting in iter either. Yes,
     // this is ridiculous, but if I can't get rid of Source, and all the
     // Iterator bits are designed into Source, and people create Sources
@@ -48,18 +50,21 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
     // that calls hasNext to find out if they're empty, and that leads
     // to chars being buffered, and no, I don't work here, they left a
     // door unlocked.
-    private val lineReader: BufferedReader = {
-      // To avoid inflicting this silliness indiscriminately, we can
-      // skip it if the char reader was never created: and almost always
-      // it will not have been created, since getLines will be called
-      // immediately on the source.
-      if (charReaderCreated && iter.hasNext) {
-        val pb = new PushbackReader(charReader)
-        pb unread iter.next()
-        new BufferedReader(pb, bufferSize)
-      }
-      else charReader
+    // To avoid inflicting this silliness indiscriminately, we can
+    // skip it if the char reader was never created: and almost always
+    // it will not have been created, since getLines will be called
+    // immediately on the source.
+    if (charReaderCreated && iter.hasNext) {
+      val pb = new PushbackReader(charReader)
+      pb unread iter.next().toInt
+      new BufferedReader(pb, bufferSize)
     }
+    else charReader
+  }
+
+
+  class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] {
+    private val lineReader = decachedReader
     var nextLine: String = null
 
     override def hasNext = {
@@ -73,11 +78,24 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
         if (nextLine == null) lineReader.readLine
         else try nextLine finally nextLine = null
       }
-      if (result == null) Iterator.empty.next
+      if (result == null) Iterator.empty.next()
       else result
     }
   }
 
   override def getLines(): Iterator[String] = new BufferedLineIterator
-}
 
+  /** Efficiently converts the entire remaining input into a string. */
+  override def mkString = {
+    // Speed up slurping of whole data set in the simplest cases.
+    val allReader = decachedReader
+    val sb = new StringBuilder
+    val buf = new Array[Char](bufferSize)
+    var n = 0
+    while (n != -1) {
+      n = charReader.read(buf)
+      if (n>0) sb.appendAll(buf, 0, n)
+    }
+    sb.result
+  }
+}
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
deleted file mode 100644
index 2c4a0bd..0000000
--- a/src/library/scala/io/BytePickle.scala
+++ /dev/null
@@ -1,318 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.io
-
-import scala.collection.mutable
-
-/**
- * Pickler combinators.
- * Based on a Haskell library by Andrew Kennedy,
- * see <a href="http://research.microsoft.com/~akenn/fun/"
- * target="_top">http://research.microsoft.com/~akenn/fun/</a>.
- *
- * @author  Philipp Haller
- * @version 1.1
- */
- at deprecated("This class will be removed.", "2.10.0")
-object BytePickle {
-  abstract class SPU[T] {
-    def appP(a: T, state: PicklerState): PicklerState
-    def appU(state: UnPicklerState): (T, UnPicklerState)
-  }
-
-  def pickle[T](p: SPU[T], a: T): Array[Byte] =
-    p.appP(a, new PicklerState(new Array[Byte](0), new PicklerEnv)).stream
-
-  def unpickle[T](p: SPU[T], stream: Array[Byte]): T =
-    p.appU(new UnPicklerState(stream, new UnPicklerEnv))._1
-
-  abstract class PU[T] {
-    def appP(a: T, state: Array[Byte]): Array[Byte]
-    def appU(state: Array[Byte]): (T, Array[Byte])
-  }
-
-  def upickle[T](p: PU[T], a: T): Array[Byte] =
-    p.appP(a, new Array[Byte](0))
-
-  def uunpickle[T](p: PU[T], stream: Array[Byte]): T =
-    p.appU(stream)._1
-
-  class PicklerEnv extends mutable.HashMap[Any, Int] {
-    private var cnt: Int = 64
-    def nextLoc() = { cnt += 1; cnt }
-  }
-
-  class UnPicklerEnv extends mutable.HashMap[Int, Any] {
-    private var cnt: Int = 64
-    def nextLoc() = { cnt += 1; cnt }
-  }
-
-  class PicklerState(val stream: Array[Byte], val dict: PicklerEnv)
-  class UnPicklerState(val stream: Array[Byte], val dict: UnPicklerEnv)
-
-  abstract class RefDef
-  case class Ref() extends RefDef
-  case class Def() extends RefDef
-
-  def refDef: PU[RefDef] = new PU[RefDef] {
-    def appP(b: RefDef, s: Array[Byte]): Array[Byte] =
-      b match {
-        case Ref() => Array.concat(s, Array[Byte](0))
-        case Def() => Array.concat(s, Array[Byte](1))
-      };
-    def appU(s: Array[Byte]): (RefDef, Array[Byte]) =
-      if (s(0) == (0: Byte)) (Ref(), s.slice(1, s.length))
-      else (Def(), s.slice(1, s.length));
-  }
-
-  val REF = 0
-  val DEF = 1
-
-  def unat: PU[Int] = new PU[Int] {
-    def appP(n: Int, s: Array[Byte]): Array[Byte] =
-      Array.concat(s, nat2Bytes(n));
-    def appU(s: Array[Byte]): (Int, Array[Byte]) = {
-      var num = 0
-      def readNat: Int = {
-        var b = 0;
-        var x = 0;
-        do {
-          b = s(num)
-          num += 1
-          x = (x << 7) + (b & 0x7f);
-        } while ((b & 0x80) != 0);
-        x
-      }
-      (readNat, s.slice(num, s.length))
-    }
-  }
-
-  def share[a](pa: SPU[a]): SPU[a] = new SPU[a] {
-    def appP(v: a, state: PicklerState): PicklerState = {
-      /*
-      - is there some value equal to v associated with a location l in the pickle environment?
-      - yes: write REF-tag to outstream together with l
-      - no:
-          write DEF-tag to outstream
-          record current location l of outstream
-          --> serialize value
-          add entry to pickle environment, mapping v onto l
-      */
-      val pe = state.dict
-      pe.get(v) match {
-        case None =>
-          val sPrime = refDef.appP(Def(), state.stream)
-          val l = pe.nextLoc()
-
-          val sPrimePrime = pa.appP(v, new PicklerState(sPrime, pe))
-
-          pe.update(v, l)
-
-          return sPrimePrime
-        case Some(l) =>
-          val sPrime = refDef.appP(Ref(), state.stream)
-
-          return new PicklerState(unat.appP(l, sPrime), pe)
-      }
-    }
-    def appU(state: UnPicklerState): (a, UnPicklerState) = {
-      /*
-      - first, read tag (i.e. DEF or REF)
-      - if REF:
-          read location l
-          look up resulting value in unpickler environment
-      - if DEF:
-          record location l of input stream
-          --> deserialize value v with argument deserializer
-          add entry to unpickler environment, mapping l onto v
-      */
-      val upe = state.dict
-      val res = refDef.appU(state.stream)
-      res._1 match {
-        case Def() =>
-          val l = upe.nextLoc
-          val res2 = pa.appU(new UnPicklerState(res._2, upe))
-          upe.update(l, res2._1)
-          return res2
-        case Ref() =>
-          val res2 = unat.appU(res._2)  // read location
-          upe.get(res2._1) match {     // lookup value in unpickler env
-            case None => throw new IllegalArgumentException("invalid unpickler environment")
-            case Some(v) => return (v.asInstanceOf[a], new UnPicklerState(res2._2, upe))
-          }
-      }
-    }
-  }
-
-  def ulift[t](x: t): PU[t] = new PU[t] {
-    def appP(a: t, state: Array[Byte]): Array[Byte] =
-      if (x != a) throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x)
-      else state;
-    def appU(state: Array[Byte]) = (x, state)
-  }
-
-  def lift[t](x: t): SPU[t] = new SPU[t] {
-    def appP(a: t, state: PicklerState): PicklerState =
-      if (x != a) { /*throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x);*/ state }
-      else state;
-    def appU(state: UnPicklerState) = (x, state)
-  }
-
-  def usequ[t,u](f: u => t, pa: PU[t], k: t => PU[u]): PU[u] = new PU[u] {
-    def appP(b: u, s: Array[Byte]): Array[Byte] = {
-      val a = f(b)
-      val sPrime = pa.appP(a, s)
-      val pb = k(a)
-      val sPrimePrime = pb.appP(b, sPrime)
-      sPrimePrime
-    }
-    def appU(s: Array[Byte]): (u, Array[Byte]) = {
-      val resPa = pa.appU(s)
-      val a = resPa._1
-      val sPrime = resPa._2
-      val pb = k(a)
-      pb.appU(sPrime)
-    }
-  }
-
-  def sequ[t,u](f: u => t, pa: SPU[t], k: t => SPU[u]): SPU[u] = new SPU[u] {
-    def appP(b: u, s: PicklerState): PicklerState = {
-      val a = f(b)
-      val sPrime = pa.appP(a, s)
-      val pb = k(a)
-      pb.appP(b, sPrime)
-    }
-    def appU(s: UnPicklerState): (u, UnPicklerState) = {
-      val resPa = pa.appU(s)
-      val a = resPa._1
-      val sPrime = resPa._2
-      val pb = k(a)
-      pb.appU(sPrime)
-    }
-  }
-
-  def upair[a,b](pa: PU[a], pb: PU[b]): PU[(a,b)] = {
-    def fst(p: (a,b)): a = p._1
-    def snd(p: (a,b)): b = p._2
-    usequ(fst, pa, (x: a) => usequ(snd, pb, (y: b) => ulift((x, y))))
-  }
-
-  def pair[a,b](pa: SPU[a], pb: SPU[b]): SPU[(a,b)] = {
-    def fst(p: (a,b)): a = p._1
-    def snd(p: (a,b)): b = p._2
-    sequ(fst, pa, (x: a) => sequ(snd, pb, (y: b) => lift((x, y))))
-  }
-
-  def triple[a,b,c](pa: SPU[a], pb: SPU[b], pc: SPU[c]): SPU[(a,b,c)] = {
-    def fst(p: (a,b,c)): a = p._1
-    def snd(p: (a,b,c)): b = p._2
-    def trd(p: (a,b,c)): c = p._3
-
-    sequ(fst, pa,
-         (x: a) => sequ(snd, pb,
-         (y: b) => sequ(trd, pc,
-         (z: c) => lift((x, y, z)))))
-  }
-
-  def uwrap[a,b](i: a => b, j: b => a, pa: PU[a]): PU[b] =
-    usequ(j, pa, (x: a) => ulift(i(x)))
-
-  def wrap[a,b](i: a => b, j: b => a, pa: SPU[a]): SPU[b] =
-    sequ(j, pa, (x: a) => lift(i(x)))
-
-  def appendByte(a: Array[Byte], b: Int): Array[Byte] =
-    Array.concat(a, Array(b.toByte))
-
-  def nat2Bytes(x: Int): Array[Byte] = {
-    val buf = new mutable.ArrayBuffer[Byte]
-    def writeNatPrefix(x: Int) {
-      val y = x >>> 7;
-      if (y != 0) writeNatPrefix(y);
-      buf += ((x & 0x7f) | 0x80).asInstanceOf[Byte];
-    }
-    val y = x >>> 7;
-    if (y != 0) writeNatPrefix(y);
-    buf += (x & 0x7f).asInstanceOf[Byte];
-    buf.toArray
-  }
-
-  def nat: SPU[Int] = new SPU[Int] {
-    def appP(n: Int, s: PicklerState): PicklerState = {
-      new PicklerState(Array.concat(s.stream, nat2Bytes(n)), s.dict);
-    }
-    def appU(s: UnPicklerState): (Int,UnPicklerState) = {
-      var num = 0
-      def readNat: Int = {
-        var b = 0
-        var x = 0
-        do {
-          b = s.stream(num)
-          num += 1
-          x = (x << 7) + (b & 0x7f);
-        } while ((b & 0x80) != 0);
-        x
-      }
-      (readNat, new UnPicklerState(s.stream.slice(num, s.stream.length), s.dict))
-    }
-  }
-
-  def byte: SPU[Byte] = new SPU[Byte] {
-    def appP(b: Byte, s: PicklerState): PicklerState =
-      new PicklerState(Array.concat(s.stream, Array(b)), s.dict)
-    def appU(s: UnPicklerState): (Byte, UnPicklerState) =
-      (s.stream(0), new UnPicklerState(s.stream.slice(1, s.stream.length), s.dict));
-  }
-
-  def string: SPU[String] = share(wrap(
-    (a: Array[Byte]) => (Codec fromUTF8 a).mkString,
-    (s: String) => Codec toUTF8 s,
-    bytearray
-  ))
-
-  def bytearray: SPU[Array[Byte]] = {
-    wrap((l:List[Byte]) => l.toArray, (_.toList), list(byte))
-  }
-
-  def bool: SPU[Boolean] = {
-    def toEnum(b: Boolean) = if (b) 1 else 0
-    def fromEnum(n: Int) = if (n == 0) false else true
-    wrap(fromEnum, toEnum, nat)
-  }
-
-  def ufixedList[A](pa: PU[A])(n: Int): PU[List[A]] = {
-    def pairToList(p: (A, List[A])): List[A] =
-      p._1 :: p._2;
-    def listToPair(l: List[A]): (A, List[A]) =
-      (l: @unchecked) match { case x :: xs => (x, xs) }
-
-    if (n == 0) ulift(Nil)
-    else
-      uwrap(pairToList, listToPair, upair(pa, ufixedList(pa)(n-1)))
-  }
-
-  def fixedList[a](pa: SPU[a])(n: Int): SPU[List[a]] = {
-    def pairToList(p: (a,List[a])): List[a] =
-      p._1 :: p._2;
-    def listToPair(l: List[a]): (a,List[a]) =
-      (l: @unchecked) match { case x :: xs => (x, xs) }
-
-    if (n == 0) lift(Nil)
-    else
-      wrap(pairToList, listToPair, pair(pa, fixedList(pa)(n-1)))
-  }
-
-  def list[a](pa: SPU[a]): SPU[List[a]] =
-    sequ((l: List[a])=>l.length, nat, fixedList(pa));
-
-  def ulist[a](pa: PU[a]): PU[List[a]] =
-    usequ((l:List[a]) => l.length, unat, ufixedList(pa));
-
-  def data[a](tag: a => Int, ps: List[()=>SPU[a]]): SPU[a] =
-    sequ(tag, nat, (x: Int)=> ps.apply(x)());
-}
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 5d046e4..60f9919 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -6,8 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-package scala.io
+package scala
+package io
 
 import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
 import scala.annotation.migration
@@ -43,42 +43,37 @@ class Codec(val charSet: Charset) {
   override def toString = name
 
   // these methods can be chained to configure the variables above
-  def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
-  def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
-  def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this }
+  def onMalformedInput(newAction: Action): this.type              = { _onMalformedInput = newAction ; this }
+  def onUnmappableCharacter(newAction: Action): this.type         = { _onUnmappableCharacter = newAction ; this }
+  def decodingReplaceWith(newReplacement: String): this.type      = { _decodingReplacement = newReplacement ; this }
   def encodingReplaceWith(newReplacement: Array[Byte]): this.type = { _encodingReplacement = newReplacement ; this }
-  def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this }
+  def onCodingException(handler: Handler): this.type              = { _onCodingException = handler ; this }
 
   def name = charSet.name
-  def encoder =
-    applyFunctions[CharsetEncoder](charSet.newEncoder(),
-      (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
-      (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
-      (_ replaceWith _encodingReplacement, _encodingReplacement != null)
-    )
-
-  def decoder =
-    applyFunctions[CharsetDecoder](charSet.newDecoder(),
-      (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
-      (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
-      (_ replaceWith _decodingReplacement, _decodingReplacement != null)
-    )
+  def encoder: CharsetEncoder = {
+    val enc = charSet.newEncoder()
+    if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput
+    if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter
+    if (_encodingReplacement ne null) enc replaceWith _encodingReplacement
+    enc
+  }
+  def decoder: CharsetDecoder = {
+    val dec = charSet.newDecoder()
+    if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput
+    if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter
+    if (_decodingReplacement ne null) dec replaceWith _decodingReplacement
+    dec
+  }
 
   def wrap(body: => Int): Int =
     try body catch { case e: CharacterCodingException => _onCodingException(e) }
-
-  // call a series of side effecting methods on an object, finally returning the object
-  private def applyFunctions[T](x: T, fs: Configure[T]*) =
-    fs.foldLeft(x)((x, pair) => pair match {
-      case (f, cond) => if (cond) f(x) else x
-    })
 }
 
 trait LowPriorityCodecImplicits {
   self: Codec.type =>
 
   /** The Codec of Last Resort. */
-  implicit def fallbackSystemCodec: Codec = defaultCharsetCodec
+  implicit lazy val fallbackSystemCodec: Codec = defaultCharsetCodec
 }
 
 object Codec extends LowPriorityCodecImplicits {
@@ -90,9 +85,9 @@ object Codec extends LowPriorityCodecImplicits {
    *  the fact that you can influence anything at all via -Dfile.encoding
    *  as an accident, with any anomalies considered "not a bug".
    */
-  def defaultCharsetCodec                   = apply(Charset.defaultCharset)
-  def fileEncodingCodec                     = apply(scala.util.Properties.encodingString)
-  def default                               = defaultCharsetCodec
+  def defaultCharsetCodec = apply(Charset.defaultCharset)
+  def fileEncodingCodec   = apply(scala.util.Properties.encodingString)
+  def default             = defaultCharsetCodec
 
   def apply(encoding: String): Codec        = new Codec(Charset forName encoding)
   def apply(charSet: Charset): Codec        = new Codec(charSet)
@@ -130,7 +125,7 @@ object Codec extends LowPriorityCodecImplicits {
     bytes
   }
 
-  implicit def string2codec(s: String) = apply(s)
-  implicit def charset2codec(c: Charset) = apply(c)
-  implicit def decoder2codec(cd: CharsetDecoder) = apply(cd)
+  implicit def string2codec(s: String): Codec           = apply(s)
+  implicit def charset2codec(c: Charset): Codec         = apply(c)
+  implicit def decoder2codec(cd: CharsetDecoder): Codec = apply(cd)
 }
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index daa4e10..011d0f1 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.io
+package scala
+package io
 
 /** The object Position provides convenience methods to encode
  *  line and column number in one single integer.  The encoded line
@@ -33,7 +34,7 @@ package scala.io
  *  @author Burak Emir (translated from work by Matthias Zenger and others)
  */
 @deprecated("This class will be removed.", "2.10.0")
-abstract class Position {
+private[scala] abstract class Position {
   /** Definable behavior for overflow conditions.
    */
   def checkInput(line: Int, column: Int): Unit
@@ -67,15 +68,7 @@ abstract class Position {
   def toString(pos: Int): String = line(pos) + ":" + column(pos)
 }
 
-object Position extends Position {
-  /** The undefined position */
-  @deprecated("This will be removed", "2.9.0")
-  final val NOPOS = 0
-
-  /** The first position in a source file */
-  @deprecated("This will be removed", "2.9.0")
-  final val FIRSTPOS = encode(1, 1)
-
+private[scala] object Position extends Position {
   def checkInput(line: Int, column: Int) {
     if (line < 0)
       throw new IllegalArgumentException(line + " < 0")
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index b13729a..74c3e06 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.io
+package scala
+package io
 
 import scala.collection.AbstractIterator
 import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
@@ -194,11 +195,11 @@ abstract class Source extends Iterator[Char] {
     lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
     def isNewline(ch: Char) = ch == '\r' || ch == '\n'
     def getc() = iter.hasNext && {
-      val ch = iter.next
+      val ch = iter.next()
       if (ch == '\n') false
       else if (ch == '\r') {
         if (iter.hasNext && iter.head == '\n')
-          iter.next
+          iter.next()
 
         false
       }
@@ -209,7 +210,7 @@ abstract class Source extends Iterator[Char] {
     }
     def hasNext = iter.hasNext
     def next = {
-      sb.clear
+      sb.clear()
       while (getc()) { }
       sb.toString
     }
@@ -227,7 +228,7 @@ abstract class Source extends Iterator[Char] {
 
   /** Returns next character.
    */
-  def next(): Char = positioner.next
+  def next(): Char = positioner.next()
 
   class Positioner(encoder: Position) {
     def this() = this(RelaxedPosition)
@@ -245,7 +246,7 @@ abstract class Source extends Iterator[Char] {
     var tabinc = 4
 
     def next(): Char = {
-      ch = iter.next
+      ch = iter.next()
       pos = encoder.encode(cline, ccol)
       ch match {
         case '\n' =>
@@ -267,7 +268,7 @@ abstract class Source extends Iterator[Char] {
   }
   object RelaxedPositioner extends Positioner(RelaxedPosition) { }
   object NoPositioner extends Positioner(Position) {
-    override def next(): Char = iter.next
+    override def next(): Char = iter.next()
   }
   def ch = positioner.ch
   def pos = positioner.pos
diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala
new file mode 100644
index 0000000..64836ec
--- /dev/null
+++ b/src/library/scala/io/StdIn.scala
@@ -0,0 +1,229 @@
+package scala
+package io
+
+import java.text.MessageFormat
+
+/** private[scala] because this is not functionality we should be providing
+ *  in the standard library, at least not in this idiosyncractic form.
+ *  Factored into trait because it is better code structure regardless.
+ */
+private[scala] trait StdIn {
+  import scala.Console._
+
+  /** Read a full line from the default input.  Returns `null` if the end of the
+   * input stream has been reached.
+   *
+   * @return the string read from the terminal or null if the end of stream was reached.
+   */
+  def readLine(): String = in.readLine()
+
+  /** Print and flush formatted text to the default output, and read a full line from the default input.
+   *  Returns `null` if the end of the input stream has been reached.
+   *
+   *  @param text the format of the text to print out, as in `printf`.
+   *  @param args the parameters used to instantiate the format, as in `printf`.
+   *  @return the string read from the default input
+   */
+  def readLine(text: String, args: Any*): String = {
+    printf(text, args: _*)
+    out.flush()
+    readLine()
+  }
+
+  /** Reads a boolean value from an entire line of the default input.
+   *  Has a fairly liberal interpretation of the input.
+   *
+   *  @return the boolean value read, or false if it couldn't be converted to a boolean
+   *  @throws java.io.EOFException if the end of the input stream has been reached.
+   */
+  def readBoolean(): Boolean = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toLowerCase() match {
+        case "true" => true
+        case "t" => true
+        case "yes" => true
+        case "y" => true
+        case _ => false
+      }
+  }
+
+  /** Reads a byte value from an entire line of the default input.
+   *
+   *  @return the Byte that was read
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
+   */
+  def readByte(): Byte = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toByte
+  }
+
+  /** Reads a short value from an entire line of the default input.
+   *
+   *  @return the short that was read
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
+   */
+  def readShort(): Short = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toShort
+  }
+
+  /** Reads a char value from an entire line of the default input.
+   *
+   *  @return the Char that was read
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
+   */
+  def readChar(): Char = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s charAt 0
+  }
+
+  /** Reads an int value from an entire line of the default input.
+   *
+   *  @return the Int that was read
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
+   */
+  def readInt(): Int = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toInt
+  }
+
+  /** Reads an long value from an entire line of the default input.
+   *
+   *  @return the Long that was read
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
+   */
+  def readLong(): Long = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toLong
+  }
+
+  /** Reads a float value from an entire line of the default input.
+   *  @return the Float that was read.
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+   *
+   */
+  def readFloat(): Float = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toFloat
+  }
+
+  /** Reads a double value from an entire line of the default input.
+   *
+   *  @return the Double that was read.
+   *  @throws java.io.EOFException if the end of the
+   *  input stream has been reached.
+   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+   */
+  def readDouble(): Double = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      s.toDouble
+  }
+
+  /** Reads in some structured input (from the default input), specified by
+   *  a format specifier. See class `java.text.MessageFormat` for details of
+   *  the format specification.
+   *
+   *  @param format the format of the input.
+   *  @return a list of all extracted values.
+   *  @throws java.io.EOFException if the end of the input stream has been
+   *          reached.
+   */
+  def readf(format: String): List[Any] = {
+    val s = readLine()
+    if (s == null)
+      throw new java.io.EOFException("Console has reached end of input")
+    else
+      textComponents(new MessageFormat(format).parse(s))
+  }
+
+  /** Reads in some structured input (from the default input), specified by
+   *  a format specifier, returning only the first value extracted, according
+   *  to the format specification.
+   *
+   *  @param format format string, as accepted by `readf`.
+   *  @return The first value that was extracted from the input
+   */
+  def readf1(format: String): Any = readf(format).head
+
+  /** Reads in some structured input (from the default input), specified
+   *  by a format specifier, returning only the first two values extracted,
+   *  according to the format specification.
+   *
+   *  @param format format string, as accepted by `readf`.
+   *  @return A [[scala.Tuple2]] containing the first two values extracted
+   */
+  def readf2(format: String): (Any, Any) = {
+    val res = readf(format)
+    (res.head, res.tail.head)
+  }
+
+  /** Reads in some structured input (from the default input), specified
+   *  by a format specifier, returning only the first three values extracted,
+   *  according to the format specification.
+   *
+   *  @param format format string, as accepted by `readf`.
+   *  @return A [[scala.Tuple3]] containing the first three values extracted
+   */
+  def readf3(format: String): (Any, Any, Any) = {
+    val res = readf(format)
+    (res.head, res.tail.head, res.tail.tail.head)
+  }
+
+  private def textComponents(a: Array[AnyRef]): List[Any] = {
+    var i: Int = a.length - 1
+    var res: List[Any] = Nil
+    while (i >= 0) {
+      res = (a(i) match {
+        case x: java.lang.Boolean   => x.booleanValue()
+        case x: java.lang.Byte      => x.byteValue()
+        case x: java.lang.Short     => x.shortValue()
+        case x: java.lang.Character => x.charValue()
+        case x: java.lang.Integer   => x.intValue()
+        case x: java.lang.Long      => x.longValue()
+        case x: java.lang.Float     => x.floatValue()
+        case x: java.lang.Double    => x.doubleValue()
+        case x => x
+      }) :: res
+      i -= 1
+    }
+    res
+  }
+}
+
+object StdIn extends StdIn
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
deleted file mode 100644
index e4c2145..0000000
--- a/src/library/scala/io/UTF8Codec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.io
-
-/**
- *  @author  Martin Odersky
- *  @version 1.0, 04/10/2004
- */
- at deprecated("This class will be removed.", "2.10.0")
-object UTF8Codec {
-  final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
-  final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
-
-  // Note, from http://unicode.org/faq/utf_bom.html#utf8-5
-  //
-  // A different issue arises if an unpaired surrogate is encountered when converting
-  // ill-formed UTF-16 data. By represented such an unpaired surrogate on its own as a
-  // 3-byte sequence, the resulting UTF-8 data stream would become ill-formed.
-  // While it faithfully reflects the nature of the input, Unicode conformance
-  // requires that encoding form conversion always results in valid data stream.
-  // Therefore a converter must treat this as an error.
-  //
-  // Some useful locations:
-  //    http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
-}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 62528e1..bcbed64 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.math
+package scala
+package math
 
 import java.{ lang => jl }
 import java.math.{ MathContext, BigDecimal => BigDec }
@@ -15,41 +16,165 @@ import scala.collection.immutable.NumericRange
 import scala.language.implicitConversions
 
 
-/**
+/** 
  *  @author  Stephane Micheloud
- *  @version 1.0
+ *  @author  Rex Kerr
+ *  @version 1.1
  *  @since 2.7
  */
 object BigDecimal {
+  private final val maximumHashScale = 4934           // Quit maintaining hash identity with BigInt beyond this scale
+  private final val hashCodeNotComputed = 0x5D50690F  // Magic value (happens to be "BigDecimal" old MurmurHash3 value)
+  private final val deci2binary = 3.3219280948873626  // Ratio of log(10) to log(2)
   private val minCached = -512
   private val maxCached = 512
   val defaultMathContext = MathContext.DECIMAL128
 
-  @deprecated("Use Long.MinValue", "2.9.0")
-  val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext)
-
-  @deprecated("Use Long.MaxValue", "2.9.0")
-  val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext)
-
-  /** Cache ony for defaultMathContext using BigDecimals in a small range. */
+  /** Cache only for defaultMathContext using BigDecimals in a small range. */
   private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
 
   object RoundingMode extends Enumeration {
+    // Annoying boilerplate to ensure consistency with java.math.RoundingMode
+    import java.math.{RoundingMode => RM}
     type RoundingMode = Value
-    // These are supposed to be the same as java.math.RoundingMode.values,
-    // though it seems unwise to rely on the correspondence.
-    val UP, DOWN, CEILING, FLOOR, HALF_UP, HALF_DOWN, HALF_EVEN, UNNECESSARY = Value
+    val UP          = Value(RM.UP.ordinal)
+    val DOWN        = Value(RM.DOWN.ordinal)
+    val CEILING     = Value(RM.CEILING.ordinal)
+    val FLOOR       = Value(RM.FLOOR.ordinal)
+    val HALF_UP     = Value(RM.HALF_UP.ordinal)
+    val HALF_DOWN   = Value(RM.HALF_DOWN.ordinal)
+    val HALF_EVEN   = Value(RM.HALF_EVEN.ordinal)
+    val UNNECESSARY = Value(RM.UNNECESSARY.ordinal) 
   }
+  
+  /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
+  def decimal(d: Double, mc: MathContext): BigDecimal =
+    new BigDecimal(new BigDec(java.lang.Double.toString(d), mc))
+
+  /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
+  def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
+  
+  /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary. 
+   *  Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
+   *  `0.1 != 0.1f`.
+   */
+  def decimal(f: Float, mc: MathContext): BigDecimal =
+    new BigDecimal(new BigDec(java.lang.Float.toString(f), mc))
+
+  /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`.
+   *  Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
+   *  `0.1 != 0.1f`.
+   */
+  def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext)
+  
+  // This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
+  /** Constructs a `BigDecimal` from a `Long`, rounding if necessary.  This is identical to `BigDecimal(l, mc)`. */
+  def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc)
+  
+  // This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
+  /** Constructs a `BigDecimal` from a `Long`.  This is identical to `BigDecimal(l)`. */
+  def decimal(l: Long): BigDecimal = apply(l)
+  
+  /** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */
+  def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc)
+  
+  /** Constructs a `BigDecimal` by expanding the binary fraction
+   *  contained by `Double` value `d` into a decimal representation,
+   *  rounding if necessary.  When a `Float` is converted to a
+   *  `Double`, the binary fraction is preserved, so this method
+   *  also works for converted `Float`s.
+   */
+  def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc)
+  
+  /** Constructs a `BigDecimal` by expanding the binary fraction
+   *  contained by `Double` value `d` into a decimal representation.
+   *  Note: this also works correctly on converted `Float`s.
+   */
+  def binary(d: Double): BigDecimal = binary(d, defaultMathContext)
+  
+  /** Constructs a `BigDecimal` from a `java.math.BigDecimal`.  The
+   *  precision is the default for `BigDecimal` or enough to represent
+   *  the `java.math.BigDecimal` exactly, whichever is greater.
+   */
+  def exact(repr: BigDec): BigDecimal = {
+    val mc = 
+      if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext
+      else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN)
+    new BigDecimal(repr, mc)
+  }
+  
+  /** Constructs a `BigDecimal` by fully expanding the binary fraction
+   *  contained by `Double` value `d`, adjusting the precision as
+   *  necessary.  Note: this works correctly on converted `Float`s also.
+   */
+  def exact(d: Double): BigDecimal = exact(new BigDec(d))
+  
+  /** Constructs a `BigDecimal` that exactly represents a `BigInt`.
+   */
+  def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger))
+  
+  /** Constructs a `BigDecimal` that exactly represents a `Long`.  Note that
+   *  all creation methods for `BigDecimal` that do not take a `MathContext`
+   *  represent a `Long`; this is equivalent to `apply`, `valueOf`, etc..
+   */
+  def exact(l: Long): BigDecimal = apply(l)
+  
+  /** Constructs a `BigDecimal` that exactly represents the number
+   *  specified in a `String`.
+   */
+  def exact(s: String): BigDecimal = exact(new BigDec(s))
+  
+  /** Constructs a 'BigDecimal` that exactly represents the number
+   *  specified in base 10 in a character array.
+   */
+ def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs))
+  
 
   /** Constructs a `BigDecimal` using the java BigDecimal static
-   *  valueOf constructor.
+   *  valueOf constructor.  Equivalent to `BigDecimal.decimal`.
    *
    *  @param  d the specified double value
    *  @return the constructed `BigDecimal`
    */
   def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
+  
+  /** Constructs a `BigDecimal` using the java BigDecimal static
+   *  valueOf constructor, specifying a `MathContext` that is
+   *  used for computations but isn't used for rounding.  Use
+   *  `BigDecimal.decimal` to use `MathContext` for rounding,
+   *  or `BigDecimal(java.math.BigDecimal.valueOf(d), mc)` for
+   *  no rounding.
+   *
+   *  @param  d the specified double value
+   *  @param  mc the `MathContext` used for future computations
+   *  @return the constructed `BigDecimal`
+   */
+  @deprecated("MathContext is not applied to Doubles in valueOf.  Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.","2.11")
   def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc)
+  
+  /** Constructs a `BigDecimal` using the java BigDecimal static
+   *  valueOf constructor.
+   *
+   *  @param  x the specified `Long` value
+   *  @return the constructed `BigDecimal`
+   */
+  def valueOf(x: Long): BigDecimal = apply(x)
+  
+  /** Constructs a `BigDecimal` using the java BigDecimal static
+   *  valueOf constructor.  This is unlikely to do what you want;
+   *  use `valueOf(f.toDouble)` or `decimal(f)` instead.
+   */
+  @deprecated("Float arguments to valueOf may not do what you wish.  Use decimal or valueOf(f.toDouble).","2.11")
+  def valueOf(f: Float): BigDecimal = valueOf(f.toDouble)
+  
+  /** Constructs a `BigDecimal` using the java BigDecimal static
+   *  valueOf constructor.  This is unlikely to do what you want;
+   *  use `valueOf(f.toDouble)` or `decimal(f)` instead.
+   */
+  @deprecated("Float arguments to valueOf may not do what you wish.  Use decimal or valueOf(f.toDouble).","2.11")
+  def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc)
 
+  
   /** Constructs a `BigDecimal` whose value is equal to that of the
    *  specified `Integer` value.
    *
@@ -57,14 +182,22 @@ object BigDecimal {
    *  @return  the constructed `BigDecimal`
    */
   def apply(i: Int): BigDecimal = apply(i, defaultMathContext)
+
+  /** Constructs a `BigDecimal` whose value is equal to that of the
+   *  specified `Integer` value, rounding if necessary.
+   *
+   *  @param i the specified integer value
+   *  @param mc the precision and rounding mode for creation of this value and future operations on it
+   *  @return  the constructed `BigDecimal`
+   */
   def apply(i: Int, mc: MathContext): BigDecimal =
     if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
       val offset = i - minCached
       var n = cache(offset)
-      if (n eq null) { n = new BigDecimal(BigDec.valueOf(i), mc); cache(offset) = n }
+      if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n }
       n
     }
-    else new BigDecimal(BigDec.valueOf(i), mc)
+    else apply(i.toLong, mc)
 
   /** Constructs a `BigDecimal` whose value is equal to that of the
    *  specified long value.
@@ -76,6 +209,13 @@ object BigDecimal {
     if (minCached <= l && l <= maxCached) apply(l.toInt)
     else new BigDecimal(BigDec.valueOf(l), defaultMathContext)
 
+  /** Constructs a `BigDecimal` whose value is equal to that of the
+   *  specified long value, but rounded if necessary.
+   *
+   *  @param l the specified long value
+   *  @param mc the precision and rounding mode for creation of this value and future operations on it
+   *  @return  the constructed `BigDecimal`
+   */
   def apply(l: Long, mc: MathContext): BigDecimal =
     new BigDecimal(new BigDec(l, mc), mc)
 
@@ -89,32 +229,62 @@ object BigDecimal {
   def apply(unscaledVal: Long, scale: Int): BigDecimal =
     apply(BigInt(unscaledVal), scale)
 
+  /** Constructs a `BigDecimal` whose unscaled value is equal to that
+   *  of the specified long value, but rounded if necessary.
+   *
+   *  @param  unscaledVal the value
+   *  @param  scale       the scale
+   *  @param mc the precision and rounding mode for creation of this value and future operations on it
+   *  @return the constructed `BigDecimal`
+   */
   def apply(unscaledVal: Long, scale: Int, mc: MathContext): BigDecimal =
     apply(BigInt(unscaledVal), scale, mc)
 
   /** Constructs a `BigDecimal` whose value is equal to that of the
-   *  specified double value.
+   *  specified double value.  Equivalent to `BigDecimal.decimal`.
    *
    *  @param d the specified `Double` value
    *  @return  the constructed `BigDecimal`
    */
-  def apply(d: Double): BigDecimal = apply(d, defaultMathContext)
+  def apply(d: Double): BigDecimal = decimal(d, defaultMathContext)
+  
   // note we don't use the static valueOf because it doesn't let us supply
   // a MathContext, but we should be duplicating its logic, modulo caching.
-  def apply(d: Double, mc: MathContext): BigDecimal =
-    new BigDecimal(new BigDec(jl.Double.toString(d), mc), mc)
+  /** Constructs a `BigDecimal` whose value is equal to that of the
+   *  specified double value, but rounded if necessary.  Equivalent to
+   *  `BigDecimal.decimal`.
+   *
+   *  @param d the specified `Double` value
+   *  @param mc the precision and rounding mode for creation of this value and future operations on it
+   *  @return  the constructed `BigDecimal`
+   */
+  def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc)
+
+  @deprecated("The default conversion from Float may not do what you want.  Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11")
+  def apply(x: Float): BigDecimal = apply(x.toDouble)
+
+  @deprecated("The default conversion from Float may not do what you want.  Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11")
+  def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc)
 
   /** Translates a character array representation of a `BigDecimal`
    *  into a `BigDecimal`.
    */
-  def apply(x: Array[Char]): BigDecimal = apply(x, defaultMathContext)
+  def apply(x: Array[Char]): BigDecimal = exact(x)
+
+  /** Translates a character array representation of a `BigDecimal`
+   *  into a `BigDecimal`, rounding if necessary.
+   */
   def apply(x: Array[Char], mc: MathContext): BigDecimal =
-    new BigDecimal(new BigDec(x.mkString, mc), mc)
+    new BigDecimal(new BigDec(x, mc), mc)
 
   /** Translates the decimal String representation of a `BigDecimal`
    *  into a `BigDecimal`.
    */
-  def apply(x: String): BigDecimal = apply(x, defaultMathContext)
+  def apply(x: String): BigDecimal = exact(x)
+  
+  /** Translates the decimal String representation of a `BigDecimal`
+   *  into a `BigDecimal`, rounding if necessary.
+   */
   def apply(x: String, mc: MathContext): BigDecimal =
     new BigDecimal(new BigDec(x, mc), mc)
 
@@ -124,7 +294,15 @@ object BigDecimal {
    *  @param x the specified `BigInt` value
    *  @return  the constructed `BigDecimal`
    */
-  def apply(x: BigInt): BigDecimal = apply(x, defaultMathContext)
+  def apply(x: BigInt): BigDecimal = exact(x)
+  
+  /** Constructs a `BigDecimal` whose value is equal to that of the
+   *  specified `BigInt` value, rounding if necessary.
+   *
+   *  @param x  the specified `BigInt` value
+   *  @param mc the precision and rounding mode for creation of this value and future operations on it   
+   *  @return   the constructed `BigDecimal`
+   */
   def apply(x: BigInt, mc: MathContext): BigDecimal =
     new BigDecimal(new BigDec(x.bigInteger, mc), mc)
 
@@ -135,11 +313,24 @@ object BigDecimal {
    *  @param scale       the scale
    *  @return  the constructed `BigDecimal`
    */
-  def apply(unscaledVal: BigInt, scale: Int): BigDecimal = apply(unscaledVal, scale, defaultMathContext)
+  def apply(unscaledVal: BigInt, scale: Int): BigDecimal =
+    exact(new BigDec(unscaledVal.bigInteger, scale))
+  
+  /** Constructs a `BigDecimal` whose unscaled value is equal to that
+   *  of the specified `BigInt` value.
+   *
+   *  @param unscaledVal the specified `BigInt` value
+   *  @param scale       the scale
+   *  @param mc          the precision and rounding mode for creation of this value and future operations on it   
+   *  @return  the constructed `BigDecimal`
+   */
   def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal =
     new BigDecimal(new BigDec(unscaledVal.bigInteger, scale, mc), mc)
 
+  /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */
   def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext)
+  
+  @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't.  Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11")
   def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc)
 
   /** Implicit conversion from `Int` to `BigDecimal`. */
@@ -149,44 +340,123 @@ object BigDecimal {
   implicit def long2bigDecimal(l: Long): BigDecimal = apply(l)
 
   /** Implicit conversion from `Double` to `BigDecimal`. */
-  implicit def double2bigDecimal(d: Double): BigDecimal = valueOf(d, defaultMathContext)
+  implicit def double2bigDecimal(d: Double): BigDecimal = decimal(d)
 
   /** Implicit conversion from `java.math.BigDecimal` to `scala.BigDecimal`. */
   implicit def javaBigDecimal2bigDecimal(x: BigDec): BigDecimal = apply(x)
 }
 
 /**
+ *  `BigDecimal` represents decimal floating-point numbers of arbitrary precision.
+ *  By default, the precision approximately matches that of IEEE 128-bit floating
+ *  point numbers (34 decimal digits, `HALF_EVEN` rounding mode).  Within the range
+ *  of IEEE binary128 numbers, `BigDecimal` will agree with `BigInt` for both
+ *  equality and hash codes (and will agree with primitive types as well).  Beyond
+ *  that range--numbers with more than 4934 digits when written out in full--the
+ *  `hashCode` of `BigInt` and `BigDecimal` is allowed to diverge due to difficulty
+ *  in efficiently computing both the decimal representation in `BigDecimal` and the
+ *  binary representation in `BigInt`.
+ *
+ *  When creating a `BigDecimal` from a `Double` or `Float`, care must be taken as
+ *  the binary fraction representation of `Double` and `Float` does not easily
+ *  convert into a decimal representation.  Three explicit schemes are available
+ *  for conversion.  `BigDecimal.decimal` will convert the floating-point number
+ *  to a decimal text representation, and build a `BigDecimal` based on that.
+ *  `BigDecimal.binary` will expand the binary fraction to the requested or default
+ *  precision.  `BigDecimal.exact` will expand the binary fraction to the
+ *  full number of digits, thus producing the exact decimal value corrsponding to
+ *  the binary fraction of that floating-point number.  `BigDecimal` equality
+ *  matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`.
+ *  Note that since `0.1f != 0.1`, the same is not true for `Float`.  Instead,
+ *  `0.1f == BigDecimal.decimal((0.1f).toDouble)`.
+ *
+ *  To test whether a `BigDecimal` number can be converted to a `Double` or
+ *  `Float` and then back without loss of information by using one of these
+ *  methods, test with `isDecimalDouble`, `isBinaryDouble`, or `isExactDouble`
+ *  or the corresponding `Float` versions.  Note that `BigInt`'s `isValidDouble`
+ *  will agree with `isExactDouble`, not the `isDecimalDouble` used by default.
+ *
+ *  `BigDecimal` uses the decimal representation of binary floating-point numbers
+ *  to determine equality and hash codes.  This yields different answers than
+ *  conversion between `Long` and `Double` values, where the exact form is used.
+ *  As always, since floating-point is a lossy representation, it is advisable to
+ *  take care when assuming identity will be maintained across multiple conversions.
+ *
+ *  `BigDecimal` maintains a `MathContext` that determines the rounding that
+ *  is applied to certain calculations.  In most cases, the value of the
+ *  `BigDecimal` is also rounded to the precision specified by the `MathContext`.
+ *  To create a `BigDecimal` with a different precision than its `MathContext`,
+ *  use `new BigDecimal(new java.math.BigDecimal(...), mc)`.  Rounding will
+ *  be applied on those mathematical operations that can dramatically change the
+ *  number of digits in a full representation, namely multiplication, division,
+ *  and powers.  The left-hand argument's `MathContext` always determines the
+ *  degree of rounding, if any, and is the one propagated through arithmetic
+ *  operations that do not apply rounding themselves.
+ *
  *  @author  Stephane Micheloud
- *  @version 1.0
+ *  @author  Rex Kerr
+ *  @version 1.1
  */
- at deprecatedInheritance("This class will be made final.", "2.10.0")
-class BigDecimal(
-  val bigDecimal: BigDec,
-  val mc: MathContext)
+final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext)
 extends ScalaNumber with ScalaNumericConversions with Serializable {
   def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext)
   import BigDecimal.RoundingMode._
-
-  /** Cuts way down on the wrapper noise. */
-  private implicit def bigdec2BigDecimal(x: BigDec): BigDecimal = new BigDecimal(x, mc)
-
+  import BigDecimal.{decimal, binary, exact}
+  
+  if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal")
+  if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal")
+
+  // There was an implicit to cut down on the wrapper noise for BigDec -> BigDecimal.
+  // However, this may mask introduction of surprising behavior (e.g. lack of rounding
+  // where one might expect it).  Wrappers should be applied explicitly with an
+  // eye to correctness.
+
+  // Sane hash code computation (which is surprisingly hard).
+  // Note--not lazy val because we can't afford the extra space.
+  private final var computedHashCode: Int = BigDecimal.hashCodeNotComputed
+  private final def computeHashCode(): Unit = {
+    computedHashCode =
+      if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode
+      else if (isValidDouble) doubleValue.##
+      else {
+        val temp = bigDecimal.stripTrailingZeros
+        scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
+      }
+  }
+  
   /** Returns the hash code for this BigDecimal.
-   *  Note that this does not use the underlying java object's
-   *  hashCode because we compare BigDecimals with compareTo
+   *  Note that this does not merely use the underlying java object's
+   *  `hashCode` because we compare `BigDecimal`s with `compareTo`
    *  which deems 2 == 2.00, whereas in java these are unequal
-   *  with unequal hashCodes.
-   */
-  override def hashCode(): Int =
-    if (isWhole) unifiedPrimitiveHashcode
-    else doubleValue.##
+   *  with unequal `hashCode`s.  These hash codes agree with `BigInt`
+   *  for whole numbers up ~4934 digits (the range of IEEE 128 bit floating
+   *  point).  Beyond this, hash codes will disagree; this prevents the
+   *  explicit represention of the `BigInt` form for `BigDecimal` values
+   *  with large exponents.
+   */
+  override def hashCode(): Int = {
+    if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode
+    computedHashCode
+  }
 
-  /** Compares this BigDecimal with the specified value for equality.
+  /** Compares this BigDecimal with the specified value for equality.  Where `Float` and `Double`
+   *  disagree, `BigDecimal` will agree with the `Double` value
    */
   override def equals (that: Any): Boolean = that match {
     case that: BigDecimal     => this equals that
-    case that: BigInt         => this.toBigIntExact exists (that equals _)
-    case that: Double         => isValidDouble && toDouble == that
-    case that: Float          => isValidFloat && toFloat == that
+    case that: BigInt         => 
+      that.bitLength > (precision-scale-2)*BigDecimal.deci2binary && 
+      this.toBigIntExact.exists(that equals _)
+    case that: Double         => 
+      !that.isInfinity && {
+        val d = toDouble
+        !d.isInfinity && d == that && equals(decimal(d))
+      }
+    case that: Float          => 
+      !that.isInfinity && {
+        val f = toFloat
+        !f.isInfinity && f == that && equals(decimal(f.toDouble))
+      }
     case _                    => isValidLong && unifiedPrimitiveEquals(that)
   }
   override def isValidByte  = noArithmeticException(toByteExact)
@@ -194,26 +464,71 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
   override def isValidChar  = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue
   override def isValidInt   = noArithmeticException(toIntExact)
   def isValidLong  = noArithmeticException(toLongExact)
-  /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`.
+  /** Tests whether the value is a valid Float.  "Valid" has several distinct meanings, however.  Use
+    * `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning.
+    * By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want.
     */
+  @deprecated("What constitutes validity is unclear.  Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11")
   def isValidFloat = {
     val f = toFloat
-    !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f)) == 0
+    !f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0
   }
-  /** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`.
+  /** Tests whether the value is a valid Double.  "Valid" has several distinct meanings, however.  Use
+    * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning.
+    * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want.
     */
+  @deprecated("Validity has two distinct meanings.  Use `isExactBinaryDouble` or `equivalentToDouble` instead.", "2.11")
   def isValidDouble = {
     val d = toDouble
-    !d.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(d)) == 0
+    !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
+  }
+  
+  /** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */
+  def isDecimalDouble = {
+    val d = toDouble
+    !d.isInfinity && equals(decimal(d))
+  }
+  
+  /** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */
+  def isDecimalFloat = {
+    val f = toFloat
+    !f.isInfinity && equals(decimal(f))
+  }
+  
+  /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */
+  def isBinaryDouble = {
+    val d = toDouble
+    !d.isInfinity && equals(binary(d,mc))
+  }
+  
+  /** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */
+  def isBinaryFloat = {
+    val f = toFloat
+    !f.isInfinity && equals(binary(f,mc))
+  }
+  
+  /** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */
+  def isExactDouble = {
+    val d = toDouble
+    !d.isInfinity && equals(exact(d))
+  }
+  
+  /** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */
+  def isExactFloat = {
+    val f = toFloat
+    !f.isInfinity && equals(exact(f.toDouble))
   }
+  
 
   private def noArithmeticException(body: => Unit): Boolean = {
     try   { body ; true }
     catch { case _: ArithmeticException => false }
   }
 
-  def isWhole() = (this remainder 1) == BigDecimal(0)
+  def isWhole() = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0
+  
   def underlying = bigDecimal
+  
 
   /** Compares this BigDecimal with the specified BigDecimal for equality.
    */
@@ -241,60 +556,66 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
 
   /** Addition of BigDecimals
    */
-  def +  (that: BigDecimal): BigDecimal = this.bigDecimal.add(that.bigDecimal)
+  def +  (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal add that.bigDecimal, mc)
 
   /** Subtraction of BigDecimals
    */
-  def -  (that: BigDecimal): BigDecimal = this.bigDecimal.subtract(that.bigDecimal)
+  def -  (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal subtract that.bigDecimal, mc)
 
   /** Multiplication of BigDecimals
    */
-  def *  (that: BigDecimal): BigDecimal = this.bigDecimal.multiply(that.bigDecimal, mc)
+  def *  (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.multiply(that.bigDecimal, mc), mc)
 
   /** Division of BigDecimals
    */
-  def /  (that: BigDecimal): BigDecimal = this.bigDecimal.divide(that.bigDecimal, mc)
+  def /  (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.divide(that.bigDecimal, mc), mc)
 
   /** Division and Remainder - returns tuple containing the result of
-   *  divideToIntegralValue and the remainder.
+   *  divideToIntegralValue and the remainder.  The computation is exact: no rounding is applied.
    */
   def /% (that: BigDecimal): (BigDecimal, BigDecimal) =
     this.bigDecimal.divideAndRemainder(that.bigDecimal) match {
-      case Array(q, r)  => (q, r)
+      case Array(q, r)  => (new BigDecimal(q, mc), new BigDecimal(r, mc))
     }
 
   /** Divide to Integral value.
    */
   def quot (that: BigDecimal): BigDecimal =
-    this.bigDecimal.divideToIntegralValue(that.bigDecimal)
+    new BigDecimal(this.bigDecimal divideToIntegralValue that.bigDecimal, mc)
 
-  /** Returns the minimum of this and that
+  /** Returns the minimum of this and that, or this if the two are equal
    */
-  def min (that: BigDecimal): BigDecimal = this.bigDecimal min that.bigDecimal
-
-  /** Returns the maximum of this and that
+  def min (that: BigDecimal): BigDecimal = (this compare that) match {
+    case x if x <= 0 => this
+    case _           => that
+  }
+  
+  /** Returns the maximum of this and that, or this if the two are equal
    */
-  def max (that: BigDecimal): BigDecimal = this.bigDecimal max that.bigDecimal
-
+  def max (that: BigDecimal): BigDecimal = (this compare that) match {
+    case x if x >= 0 => this
+    case _           => that
+  }
+  
   /** Remainder after dividing this by that.
    */
-  def remainder (that: BigDecimal): BigDecimal = this.bigDecimal.remainder(that.bigDecimal)
+  def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal remainder that.bigDecimal, mc)
 
   /** Remainder after dividing this by that.
    */
-  def % (that: BigDecimal): BigDecimal = this.remainder(that)
+  def % (that: BigDecimal): BigDecimal = this remainder that
 
   /** Returns a BigDecimal whose value is this ** n.
    */
-  def pow (n: Int): BigDecimal = this.bigDecimal.pow(n, mc)
+  def pow (n: Int): BigDecimal = new BigDecimal(this.bigDecimal.pow(n, mc), mc)
 
   /** Returns a BigDecimal whose value is the negation of this BigDecimal
    */
-  def unary_- : BigDecimal = this.bigDecimal.negate()
+  def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(), mc)
 
   /** Returns the absolute value of this BigDecimal
    */
-  def abs: BigDecimal = this.bigDecimal.abs
+  def abs: BigDecimal = if (signum < 0) unary_- else this
 
   /** Returns the sign of this BigDecimal, i.e.
    *   -1 if it is less than 0,
@@ -307,9 +628,19 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
    */
   def precision: Int = this.bigDecimal.precision()
 
-  /** Returns a BigDecimal rounded according to the MathContext settings.
+  /** Returns a BigDecimal rounded according to the supplied MathContext settings, but
+   *  preserving its own MathContext for future operations.
    */
-  def round(mc: MathContext): BigDecimal = this.bigDecimal round mc
+  def round(mc: MathContext): BigDecimal = {
+    val r = this.bigDecimal round mc
+    if (r eq bigDecimal) this else new BigDecimal(r, this.mc)
+  }
+  
+  /** Returns a `BigDecimal` rounded according to its own `MathContext` */
+  def rounded: BigDecimal = {
+    val r = bigDecimal round mc
+    if (r eq bigDecimal) this else new BigDecimal(r, mc)
+  }
 
   /** Returns the scale of this `BigDecimal`.
    */
@@ -317,19 +648,22 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
 
   /** Returns the size of an ulp, a unit in the last place, of this BigDecimal.
    */
-  def ulp: BigDecimal = this.bigDecimal.ulp
+  def ulp: BigDecimal = new BigDecimal(this.bigDecimal.ulp, mc)
 
-  /** Returns a new BigDecimal based on the supplied MathContext.
+  /** Returns a new BigDecimal based on the supplied MathContext, rounded as needed.
    */
-  def apply(mc: MathContext): BigDecimal = BigDecimal(this.bigDecimal.toString, mc)
+  def apply(mc: MathContext): BigDecimal = new BigDecimal(this.bigDecimal round mc, mc)
 
   /** Returns a `BigDecimal` whose scale is the specified value, and whose value is
    *  numerically equal to this BigDecimal's.
    */
-  def setScale(scale: Int): BigDecimal = this.bigDecimal setScale scale
+  def setScale(scale: Int): BigDecimal = 
+    if (this.scale == scale) this
+    else new BigDecimal(this.bigDecimal setScale scale, mc)
 
   def setScale(scale: Int, mode: RoundingMode): BigDecimal =
-    this.bigDecimal.setScale(scale, mode.id)
+    if (this.scale == scale) this
+    else new BigDecimal(this.bigDecimal.setScale(scale, mode.id), mc)
 
   /** Converts this BigDecimal to a Byte.
    *  If the BigDecimal is too big to fit in a Byte, only the low-order 8 bits are returned.
@@ -339,21 +673,21 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
   override def byteValue   = intValue.toByte
 
   /** Converts this BigDecimal to a Short.
-   *  If the BigDecimal is too big to fit in a Byte, only the low-order 16 bits are returned.
+   *  If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned.
    *  Note that this conversion can lose information about the overall magnitude of the
    *  BigDecimal value as well as return a result with the opposite sign.
    */
   override def shortValue  = intValue.toShort
 
   /** Converts this BigDecimal to a Char.
-   *  If the BigDecimal is too big to fit in a char, only the low-order 16 bits are returned.
+   *  If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned.
    *  Note that this conversion can lose information about the overall magnitude of the
    *  BigDecimal value and that it always returns a positive result.
    */
   def charValue   = intValue.toChar
 
   /** Converts this BigDecimal to an Int.
-   *  If the BigDecimal is too big to fit in a char, only the low-order 32 bits
+   *  If the BigDecimal is too big to fit in an Int, only the low-order 32 bits
    *  are returned. Note that this conversion can lose information about the
    *  overall magnitude of the BigDecimal value as well as return a result with
    *  the opposite sign.
@@ -361,7 +695,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
   def intValue    = this.bigDecimal.intValue
 
   /** Converts this BigDecimal to a Long.
-   *  If the BigDecimal is too big to fit in a char, only the low-order 64 bits
+   *  If the BigDecimal is too big to fit in a Long, only the low-order 64 bits
    *  are returned. Note that this conversion can lose information about the
    *  overall magnitude of the BigDecimal value as well as return a result with
    *  the opposite sign.
@@ -444,8 +778,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
    *  can be done losslessly, returning Some(BigInt) or None.
    */
   def toBigIntExact(): Option[BigInt] =
-    try Some(new BigInt(this.bigDecimal.toBigIntegerExact()))
-    catch { case _: ArithmeticException => None }
+    if (isWhole()) {
+      try Some(new BigInt(this.bigDecimal.toBigIntegerExact()))
+      catch { case _: ArithmeticException => None }
+    }
+    else None
 
   /** Returns the decimal String representation of this BigDecimal.
    */
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 58838f1..689fc0c 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 import java.math.BigInteger
 import scala.language.implicitConversions
@@ -23,12 +24,6 @@ object BigInt {
   private val cache = new Array[BigInt](maxCached - minCached + 1)
   private val minusOne = BigInteger.valueOf(-1)
 
-  @deprecated("Use Long.MinValue", "2.9.0")
-  val MinLong = BigInt(Long.MinValue)
-
-  @deprecated("Use Long.MaxValue", "2.9.0")
-  val MaxLong = BigInt(Long.MaxValue)
-
   /** Constructs a `BigInt` whose value is equal to that of the
    *  specified integer value.
    *
@@ -39,9 +34,9 @@ object BigInt {
     if (minCached <= i && i <= maxCached) {
       val offset = i - minCached
       var n = cache(offset)
-      if (n eq null) { n = new BigInt(BigInteger.valueOf(i)); cache(offset) = n }
+      if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n }
       n
-    } else new BigInt(BigInteger.valueOf(i))
+    } else new BigInt(BigInteger.valueOf(i.toLong))
 
   /** Constructs a `BigInt` whose value is equal to that of the
    *  specified long value.
@@ -114,18 +109,17 @@ object BigInt {
  *  @author  Martin Odersky
  *  @version 1.0, 15/07/2003
  */
- at deprecatedInheritance("This class will be made final.", "2.10.0")
-class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
+final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
   /** Returns the hash code for this BigInt. */
   override def hashCode(): Int =
-    if (isValidLong) unifiedPrimitiveHashcode
+    if (isValidLong) unifiedPrimitiveHashcode()
     else bigInteger.##
 
   /** Compares this BigInt with the specified value for equality.
    */
   override def equals(that: Any): Boolean = that match {
     case that: BigInt     => this equals that
-    case that: BigDecimal => that.toBigIntExact exists (this equals _)
+    case that: BigDecimal => that equals this
     case that: Double     => isValidDouble && toDouble == that
     case that: Float      => isValidFloat && toFloat == that
     case x                => isValidLong && unifiedPrimitiveEquals(x)
@@ -295,9 +289,6 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
    */
   def signum: Int = this.bigInteger.signum()
 
-  @deprecated("Use ~bigInt (the unary_~ method) instead", "2.10.0")
-  def ~ : BigInt = ~this
-
   /** Returns the bitwise complement of this BigInt
    */
   def unary_~ : BigInt = new BigInt(this.bigInteger.not())
@@ -364,7 +355,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
   def charValue   = intValue.toChar
 
   /** Converts this BigInt to an <tt>int</tt>.
-   *  If the BigInt is too big to fit in a int, only the low-order 32 bits
+   *  If the BigInt is too big to fit in an int, only the low-order 32 bits
    *  are returned. Note that this conversion can lose information about the
    *  overall magnitude of the BigInt value as well as return a result with
    *  the opposite sign.
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index 5f5e049..45b2b36 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 import java.util.Comparator
 
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index ca33675..b7e0ed5 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 import scala.language.implicitConversions
 
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index f3684c4..ff1f695 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.math
+package scala
+package math
 
 import scala.language.implicitConversions
 
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index 5a76f4f..eafbf96 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 import scala.language.implicitConversions
 
@@ -50,9 +51,9 @@ object Numeric {
     def negate(x: Int): Int = -x
     def fromInt(x: Int): Int = x
     def toInt(x: Int): Int = x
-    def toLong(x: Int): Long = x
-    def toFloat(x: Int): Float = x
-    def toDouble(x: Int): Double = x
+    def toLong(x: Int): Long = x.toLong
+    def toFloat(x: Int): Float = x.toFloat
+    def toDouble(x: Int): Double = x.toDouble
   }
   implicit object IntIsIntegral extends IntIsIntegral with Ordering.IntOrdering
 
@@ -108,11 +109,11 @@ object Numeric {
     def quot(x: Long, y: Long): Long = x / y
     def rem(x: Long, y: Long): Long = x % y
     def negate(x: Long): Long = -x
-    def fromInt(x: Int): Long = x
+    def fromInt(x: Int): Long = x.toLong
     def toInt(x: Long): Int = x.toInt
     def toLong(x: Long): Long = x
-    def toFloat(x: Long): Float = x
-    def toDouble(x: Long): Double = x
+    def toFloat(x: Long): Float = x.toFloat
+    def toDouble(x: Long): Double = x.toDouble
   }
   implicit object LongIsIntegral extends LongIsIntegral with Ordering.LongOrdering
 
@@ -121,11 +122,13 @@ object Numeric {
     def minus(x: Float, y: Float): Float = x - y
     def times(x: Float, y: Float): Float = x * y
     def negate(x: Float): Float = -x
-    def fromInt(x: Int): Float = x
+    def fromInt(x: Int): Float = x.toFloat
     def toInt(x: Float): Int = x.toInt
     def toLong(x: Float): Long = x.toLong
     def toFloat(x: Float): Float = x
-    def toDouble(x: Float): Double = x
+    def toDouble(x: Float): Double = x.toDouble
+    // logic in Numeric base trait mishandles abs(-0.0f)
+    override def abs(x: Float): Float = math.abs(x)
   }
   trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] {
     def div(x: Float, y: Float): Float = x / y
@@ -143,11 +146,13 @@ object Numeric {
     def minus(x: Double, y: Double): Double = x - y
     def times(x: Double, y: Double): Double = x * y
     def negate(x: Double): Double = -x
-    def fromInt(x: Int): Double = x
+    def fromInt(x: Int): Double = x.toDouble
     def toInt(x: Double): Int = x.toInt
     def toLong(x: Double): Long = x.toLong
     def toFloat(x: Double): Float = x.toFloat
     def toDouble(x: Double): Double = x
+    // logic in Numeric base trait mishandles abs(-0.0)
+    override def abs(x: Double): Double = math.abs(x)
   }
   trait DoubleIsFractional extends DoubleIsConflicted with Fractional[Double] {
     def div(x: Double, y: Double): Double = x / y
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index e8be92e..51f2765 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 import scala.language.implicitConversions
 
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 11b1205..d1a4e7c 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -26,14 +26,14 @@ import scala.language.{implicitConversions, higherKinds}
   * val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
   *
   * // sort by 2nd element
-  * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
+  * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
   *
   * // sort by the 3rd element, then 1st
   * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
   * }}}
   *
   * An Ordering[T] is implemented by specifying compare(a:T, b:T), which
-  * decides how to order to instances a and b. Instances of Ordering[T] can be
+  * decides how to order two instances a and b. Instances of Ordering[T] can be
   * used by things like scala.util.Sorting to sort collections like Array[T].
   *
   * For example:
@@ -173,7 +173,7 @@ object Ordering extends LowPriorityOrderingImplicits {
           val ye = y.iterator
 
           while (xe.hasNext && ye.hasNext) {
-            val res = ord.compare(xe.next, ye.next)
+            val res = ord.compare(xe.next(), ye.next())
             if (res != 0) return res
           }
 
@@ -347,7 +347,7 @@ object Ordering extends LowPriorityOrderingImplicits {
         val ye = y.iterator
 
         while (xe.hasNext && ye.hasNext) {
-          val res = ord.compare(xe.next, ye.next)
+          val res = ord.compare(xe.next(), ye.next())
           if (res != 0) return res
         }
 
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index a9e317d..9e35381 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
+package scala
+package math
 
 /** A trait for representing partial orderings.  It is important to
  *  distinguish between a type that has a partial order and a representation
diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala
index 7823e5b..f58210d 100644
--- a/src/library/scala/math/PartiallyOrdered.scala
+++ b/src/library/scala/math/PartiallyOrdered.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.math
+package scala
+package math
 
 /** A class for partially ordered data.
  *
diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java
index 7345147..f03ba7b 100644
--- a/src/library/scala/math/ScalaNumber.java
+++ b/src/library/scala/math/ScalaNumber.java
@@ -6,8 +6,6 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
 package scala.math;
 
 /** A marker class for Number types introduced by Scala
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 6ddf48d..0006133 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -6,9 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.math
-
-import java.{ lang => jl }
+package scala
+package math
 
 /** A slightly more specific conversion trait for classes which
  *  extend ScalaNumber (which excludes value classes.)
@@ -21,6 +20,7 @@ trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversion
  *  across all the numeric types, suitable for use in value classes.
  */
 trait ScalaNumericAnyConversions extends Any {
+  /** @return `'''true'''` if this number has no decimal component, `'''false'''` otherwise. */
   def isWhole(): Boolean
   def underlying(): Any
 
@@ -34,37 +34,37 @@ trait ScalaNumericAnyConversions extends Any {
   /** Returns the value of this as a [[scala.Char]]. This may involve
     * rounding or truncation.
     */
-  def toChar = intValue.toChar
+  def toChar = intValue().toChar
 
   /** Returns the value of this as a [[scala.Byte]]. This may involve
     * rounding or truncation.
     */
-  def toByte = byteValue
+  def toByte = byteValue()
 
   /** Returns the value of this as a [[scala.Short]]. This may involve
     * rounding or truncation.
     */
-  def toShort = shortValue
+  def toShort = shortValue()
 
   /** Returns the value of this as an [[scala.Int]]. This may involve
     * rounding or truncation.
     */
-  def toInt = intValue
+  def toInt = intValue()
 
   /** Returns the value of this as a [[scala.Long]]. This may involve
     * rounding or truncation.
     */
-  def toLong = longValue
+  def toLong = longValue()
 
   /** Returns the value of this as a [[scala.Float]]. This may involve
     * rounding or truncation.
     */
-  def toFloat = floatValue
+  def toFloat = floatValue()
 
   /** Returns the value of this as a [[scala.Double]]. This may involve
     * rounding or truncation.
     */
-  def toDouble = doubleValue
+  def toDouble = doubleValue()
 
   /** Returns `true` iff this has a zero fractional part, and is within the
     * range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`.
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index cb033bd..58ece8a 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -16,12 +16,12 @@ package object math {
   /** The `double` value that is closer than any other to `e`, the base of
    *  the natural logarithms.
    */
-  val E = java.lang.Math.E
+  @inline final val E = java.lang.Math.E
 
   /** The `double` value that is closer than any other to `pi`, the ratio of
    *  the circumference of a circle to its diameter.
    */
-  val Pi = java.lang.Math.PI
+  @inline final val Pi = java.lang.Math.PI
 
   /** Returns a `double` value with a positive sign, greater than or equal
    *  to `0.0` and less than `1.0`.
@@ -62,7 +62,7 @@ package object math {
   def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
   def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
 
-  def ceil(x: Double): Double = java.lang.Math.ceil(x)
+  def ceil(x: Double): Double  = java.lang.Math.ceil(x)
   def floor(x: Double): Double = java.lang.Math.floor(x)
 
   /** Returns the `double` value that is closest in value to the
@@ -93,31 +93,47 @@ package object math {
    */
   def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
 
-  /** Returns the closest `long` to the argument.
+  /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. */
+  @deprecated("This is an integer type; there is no reason to round it.  Perhaps you meant to call this with a floating-point value?", "2.11.0")
+  def round(x: Long): Long = x
+
+  /** Returns the closest `Int` to the argument.
    *
-   *  @param  x a floating-point value to be rounded to a `long`.
-   *  @return the value of the argument rounded to the nearest`long` value.
+   *  @param  x a floating-point value to be rounded to a `Int`.
+   *  @return the value of the argument rounded to the nearest `Int` value.
    */
   def round(x: Float): Int = java.lang.Math.round(x)
+  
+  /** Returns the closest `Long` to the argument.
+   *
+   *  @param  x a floating-point value to be rounded to a `Long`.
+   *  @return the value of the argument rounded to the nearest`long` value.
+   */
   def round(x: Double): Long = java.lang.Math.round(x)
-  def abs(x: Int): Int = java.lang.Math.abs(x)
-  def abs(x: Long): Long = java.lang.Math.abs(x)
-  def abs(x: Float): Float = java.lang.Math.abs(x)
+
+  def abs(x: Int): Int       = java.lang.Math.abs(x)
+  def abs(x: Long): Long     = java.lang.Math.abs(x)
+  def abs(x: Float): Float   = java.lang.Math.abs(x)
   def abs(x: Double): Double = java.lang.Math.abs(x)
 
-  def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
-  def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
-  def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+  def max(x: Int, y: Int): Int          = java.lang.Math.max(x, y)
+  def max(x: Long, y: Long): Long       = java.lang.Math.max(x, y)
+  def max(x: Float, y: Float): Float    = java.lang.Math.max(x, y)
   def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
 
-  def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
-  def min(x: Long, y: Long): Long  = java.lang.Math.min(x, y)
-  def min(x: Float, y: Float): Float  = java.lang.Math.min(x, y)
+  def min(x: Int, y: Int): Int          = java.lang.Math.min(x, y)
+  def min(x: Long, y: Long): Long       = java.lang.Math.min(x, y)
+  def min(x: Float, y: Float): Float    = java.lang.Math.min(x, y)
   def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
 
-  def signum(x: Int): Int = java.lang.Integer.signum(x)
-  def signum(x: Long): Long = java.lang.Long.signum(x)
-  def signum(x: Float): Float = java.lang.Math.signum(x)
+  /** Note that these are not pure forwarders to the java versions.
+   *  In particular, the return type of java.lang.Long.signum is Int,
+   *  but here it is widened to Long so that each overloaded variant
+   *  will return the same numeric type it is passed.
+   */
+  def signum(x: Int): Int       = java.lang.Integer.signum(x)
+  def signum(x: Long): Long     = java.lang.Long.signum(x)
+  def signum(x: Float): Float   = java.lang.Math.signum(x)
   def signum(x: Double): Double = java.lang.Math.signum(x)
 
   // -----------------------------------------------------------------------
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 84f6f0b..224112c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -34,12 +34,6 @@ package object scala {
     override def toString = "object AnyRef"
   }
 
-  @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-  type serializable = annotation.serializable
-
-  @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
-  type cloneable = annotation.cloneable
-
   type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
 
   type Traversable[+A] = scala.collection.Traversable[A]
@@ -95,7 +89,10 @@ package object scala {
   val Equiv = scala.math.Equiv
 
   type Fractional[T] = scala.math.Fractional[T]
+  val Fractional = scala.math.Fractional
+
   type Integral[T] = scala.math.Integral[T]
+  val Integral = scala.math.Integral
 
   type Numeric[T] = scala.math.Numeric[T]
   val Numeric = scala.math.Numeric
@@ -121,14 +118,12 @@ package object scala {
   // Annotations which we might move to annotation.*
 /*
   type SerialVersionUID = annotation.SerialVersionUID
-  type cloneable = annotation.cloneable
   type deprecated = annotation.deprecated
   type deprecatedName = annotation.deprecatedName
   type inline = annotation.inline
   type native = annotation.native
-  type noinline = noannotation.inline
+  type noinline = annotation.noinline
   type remote = annotation.remote
-  type serializable = annotation.serializable
   type specialized = annotation.specialized
   type transient = annotation.transient
   type throws  = annotation.throws
diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala
deleted file mode 100644
index e255a57..0000000
--- a/src/library/scala/parallel/Future.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2005-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.parallel
-
-
-
-/** A future is a function without parameters that will block the caller if
- *  the parallel computation associated with the function is not completed.
- *  
- *  @tparam R   the type of the result
- *
- *  @since 2.9
- */
- at deprecated("Use `scala.concurrent.Future` instead.", "2.10.0")
-trait Future[@specialized +R] extends (() => R) {
-  /** Returns a result once the parallel computation completes. If the
-   *  computation produced an exception, an exception is forwarded.
-   *
-   *  '''Note:''' creating a circular dependency between futures by calling
-   *  this method will result in a deadlock.
-   *
-   *  @return     the result
-   *  @throws     the exception that was thrown during a parallel computation
-   */
-  def apply(): R
-
-  /** Returns `true` if the parallel computation is completed.
-   *
-   *  @return     `true` if the parallel computation is completed, `false` otherwise
-   */
-  def isDone(): Boolean
-}
-
diff --git a/src/library/scala/parallel/package.scala.disabled b/src/library/scala/parallel/package.scala.disabled
deleted file mode 100644
index 45f5470..0000000
--- a/src/library/scala/parallel/package.scala.disabled
+++ /dev/null
@@ -1,178 +0,0 @@
-package scala
-
-
-
-import scala.concurrent.forkjoin._
-
-
-/** This package object contains various parallel operations.
- *
- *  @define invokingPar
- *  Invoking a parallel computation creates a future which will
- *  hold the result of the computation once it completes. Querying
- *  the result of a future before its parallel computation has completed
- *  will block the caller. For all practical concerns, the dependency
- *  chain obtained by querying results of unfinished futures can have
- *  arbitrary lengths. However, care must be taken not to create a
- *  circular dependency, as this will result in a deadlock.
- *  
- *  Additionally, if the parallel computation performs a blocking call
- *  (e.g. an I/O operation or waiting for a lock) other than waiting for a future,
- *  it should do so by invoking the `block` method. This is another
- *  form of waiting that could potentially create a circular dependency,
- *  an the user should take care not to do this.
- *  
- *  Users should be aware that invoking a parallel computation has a
- *  certain overhead. Parallel computations should not be invoked for
- *  small computations, as this can lead to bad performance. A rule of the
- *  thumb is having parallel computations equivalent to a loop
- *  with 50000 arithmetic operations (at least). If a parallel computation
- *  is invoked within another parallel computation, then it should be
- *  computationally equivalent to a loop with 10000 arithmetic operations.
- */
-package object parallel {
-  
-  private[scala] val forkjoinpool = new ForkJoinPool()
-  
-  private class Task[T](body: =>T) extends RecursiveTask[T] with Future[T] {
-    def compute = body
-    def apply() = join()
-  }
-  
-  private final def newTask[T](body: =>T) = new Task[T](body)
-  
-  private final def executeTask[T](task: RecursiveTask[T]) {
-    if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) task.fork
-    else forkjoinpool.execute(task)
-  }
-  
-  /* public methods */
-  
-  /** Performs a call which can potentially block execution.
-   *  
-   *  Example:
-   *  {{{
-   *    val lock = new ReentrantLock
-   *    
-   *    // ... do something ...
-   *    
-   *    blocking {
-   *      if (!lock.hasLock) lock.lock()
-   *    }
-   *  }}}
-   *  
-   *  '''Note:''' calling methods that wait arbitrary amounts of time
-   *  (e.g. for I/O operations or locks) may severely decrease performance
-   *  or even result in deadlocks. This does not include waiting for
-   *  results of futures.
-   */
-  def blocking[T](body: =>T): T = {
-    if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) {
-      val blocker = new ForkJoinPool.ManagedBlocker {
-        @volatile var done = false
-        @volatile var result: Any = _
-        def block() = {
-          result = body
-          done = true
-          true
-        }
-        def isReleasable() = done
-      }
-      ForkJoinPool.managedBlock(blocker, true)
-      blocker.result.asInstanceOf[T]
-    } else body
-  }
-  
-  /** Starts a parallel computation and returns a future.
-   *  
-   *  $invokingPar
-   *  
-   *  @tparam T     the type of the result of the parallel computation
-   *  @param body   the computation to be invoked in parallel
-   *  @return       a future with the result
-   */
-  def par[T](body: =>T): Future[T] = {
-    val task = newTask(body)
-    executeTask(task)
-    task
-  }
-  
-  /** Starts 2 parallel computations and returns a future.
-   *  
-   *  $invokingPar
-   *
-   *  @tparam T1    the type of the result of 1st the parallel computation
-   *  @tparam T2    the type of the result of 2nd the parallel computation
-   *  @param b1     the 1st computation to be invoked in parallel
-   *  @param b2     the 2nd computation to be invoked in parallel
-   *  @return       a tuple of futures corresponding to parallel computations
-   */
-  def par[T1, T2](b1: =>T1, b2: =>T2): (Future[T1], Future[T2]) = {
-    val t1 = newTask(b1)
-    executeTask(t1)
-    val t2 = newTask(b2)
-    executeTask(t2)
-    (t1, t2)
-  }
-  
-  /** Starts 3 parallel computations and returns a future.
-   *  
-   *  $invokingPar
-   *
-   *  @tparam T1    the type of the result of 1st the parallel computation
-   *  @tparam T2    the type of the result of 2nd the parallel computation
-   *  @tparam T3    the type of the result of 3rd the parallel computation
-   *  @param b1     the 1st computation to be invoked in parallel
-   *  @param b2     the 2nd computation to be invoked in parallel
-   *  @param b3     the 3rd computation to be invoked in parallel
-   *  @return       a tuple of futures corresponding to parallel computations
-   */
-  def par[T1, T2, T3](b1: =>T1, b2: =>T2, b3: =>T3): (Future[T1], Future[T2], Future[T3]) = {
-    val t1 = newTask(b1)
-    executeTask(t1)
-    val t2 = newTask(b2)
-    executeTask(t2)
-    val t3 = newTask(b3)
-    executeTask(t3)
-    (t1, t2, t3)
-  }
-  
-  /** Starts 4 parallel computations and returns a future.
-   *  
-   *  $invokingPar
-   *
-   *  @tparam T1    the type of the result of 1st the parallel computation
-   *  @tparam T2    the type of the result of 2nd the parallel computation
-   *  @tparam T3    the type of the result of 3rd the parallel computation
-   *  @tparam T4    the type of the result of 4th the parallel computation
-   *  @param b1     the 1st computation to be invoked in parallel
-   *  @param b2     the 2nd computation to be invoked in parallel
-   *  @param b3     the 3rd computation to be invoked in parallel
-   *  @param b4     the 4th computation to be invoked in parallel
-   *  @return       a tuple of futures corresponding to parallel computations
-   */
-  def par[T1, T2, T3, T4](b1: =>T1, b2: =>T2, b3: =>T3, b4: =>T4): (Future[T1], Future[T2], Future[T3], Future[T4]) = {
-    val t1 = newTask(b1)
-    executeTask(t1)
-    val t2 = newTask(b2)
-    executeTask(t2)
-    val t3 = newTask(b3)
-    executeTask(t3)
-    val t4 = newTask(b4)
-    executeTask(t4)
-    (t1, t2, t3, t4)
-  }
-  
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index b414db6..e4ce667 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -13,7 +13,8 @@ package scala.ref
  *  @author Sean McDirmid
  */
 class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
-  def this(value : T) = this(value, null);
+  def this(value : T) = this(value, null)
+
   val underlying: java.lang.ref.SoftReference[_ <: T] =
     new SoftReferenceWithWrapper[T](value, queue, this)
 }
diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala
index 6eb4899..6ee40ae 100644
--- a/src/library/scala/ref/WeakReference.scala
+++ b/src/library/scala/ref/WeakReference.scala
@@ -10,7 +10,7 @@
 package scala.ref
 
 /**
- *  A wrapper class for java.lag.ref.WeakReference
+ *  A wrapper class for java.lang.ref.WeakReference
  *  The new functionality is (1) results are Option values, instead of using null.
  *  (2) There is an extractor that maps the weak reference itself into an option.
  *  @author Sean McDirmid
@@ -29,7 +29,7 @@ object WeakReference {
 
   /** Optionally returns the referenced value, or `None` if that value no longer exists */
   def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = {
-    val x = wr.underlying.get 
+    val x = wr.underlying.get
     if (x != null) Some(x) else None
   }
 }
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
index 0a3d818..ca7a3cd 100644
--- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.reflect
+package scala
+package reflect
 
 import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
 import java.lang.{ Class => jClass }
@@ -15,6 +16,7 @@ import java.lang.{ Class => jClass }
 trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
   self: ClassManifest[T] =>
 
+  // Still in use in target test.junit.comp.
   @deprecated("Use runtimeClass instead", "2.10.0")
   def erasure: jClass[_] = runtimeClass
 
@@ -63,12 +65,12 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
     // when the erasure is the same, even before considering variance.
     !cannotMatch && {
       // this part is wrong for not considering variance
-      if (this.erasure == that.erasure)
+      if (this.runtimeClass == that.runtimeClass)
         subargs(this.typeArguments, that.typeArguments)
       // this part is wrong for punting unless the rhs has no type
       // arguments, but it's better than a blindfolded pinata swing.
       else
-        that.typeArguments.isEmpty && subtype(this.erasure, that.erasure)
+        that.typeArguments.isEmpty && subtype(this.runtimeClass, that.runtimeClass)
     }
   }
 
@@ -90,29 +92,29 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
 
   @deprecated("Use wrap instead", "2.10.0")
   def arrayManifest: ClassManifest[Array[T]] =
-    ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
+    ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this)
 
   override def newArray(len: Int): Array[T] =
-    java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+    java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]]
 
   @deprecated("Use wrap.newArray instead", "2.10.0")
   def newArray2(len: Int): Array[Array[T]] =
-    java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
+    java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len)
       .asInstanceOf[Array[Array[T]]]
 
   @deprecated("Use wrap.wrap.newArray instead", "2.10.0")
   def newArray3(len: Int): Array[Array[Array[T]]] =
-    java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
+    java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](runtimeClass)), len)
       .asInstanceOf[Array[Array[Array[T]]]]
 
   @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0")
   def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
-    java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
+    java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass))), len)
       .asInstanceOf[Array[Array[Array[Array[T]]]]]
 
   @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
   def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
-    java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
+    java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass)))), len)
       .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
 
   @deprecated("Create WrappedArray directly instead", "2.10.0")
@@ -130,7 +132,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
 
   protected def argString =
     if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]")
-    else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]"
+    else if (runtimeClass.isArray) "["+ClassManifest.fromClass(runtimeClass.getComponentType)+"]"
     else ""
 }
 
@@ -220,7 +222,7 @@ object ClassManifestFactory {
     */
   def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
     new ClassManifest[T] {
-      override def runtimeClass = upperbound.erasure
+      override def runtimeClass = upperbound.runtimeClass
       override val typeArguments = args.toList
       override def toString = prefix.toString+"#"+name+argString
     }
@@ -235,6 +237,6 @@ private class ClassTypeManifest[T](
 {
   override def toString =
     (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
-    (if (erasure.isArray) "Array" else erasure.getName) +
+    (if (runtimeClass.isArray) "Array" else runtimeClass.getName) +
     argString
 }
\ No newline at end of file
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index d699e34..33c5cee 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -28,7 +28,7 @@ import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
  *   scala> mkArray("Japan","Brazil","Germany")
  *   res1: Array[String] = Array(Japan, Brazil, Germany)
  * }}}
- * 
+ *
  * See [[scala.reflect.api.TypeTags]] for more examples, or the
  * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]]
  * for more details.
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index f62d0ec..803c980 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.reflect
+package scala
+package reflect
 
 import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
 
@@ -45,7 +46,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
   override def typeArguments: List[Manifest[_]] = Nil
 
   override def arrayManifest: Manifest[Array[T]] =
-    Manifest.classType[Array[T]](arrayClass[T](erasure), this)
+    Manifest.classType[Array[T]](arrayClass[T](runtimeClass), this)
 
   override def canEqual(that: Any): Boolean = that match {
     case _: Manifest[_]   => true
@@ -55,10 +56,10 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
    *  faster than <:< and rules out most comparisons.
    */
   override def equals(that: Any): Boolean = that match {
-    case m: Manifest[_] => (m canEqual this) && (this.erasure == m.erasure) && (this <:< m) && (m <:< this)
+    case m: Manifest[_] => (m canEqual this) && (this.runtimeClass == m.runtimeClass) && (this <:< m) && (m <:< this)
     case _              => false
   }
-  override def hashCode = this.erasure.##
+  override def hashCode = this.runtimeClass.##
 }
 
 // TODO undeprecated until Scala reflection becomes non-experimental
@@ -237,7 +238,7 @@ object ManifestFactory {
                                      override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
     override def toString =
       (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
-      (if (erasure.isArray) "Array" else erasure.getName) +
+      (if (runtimeClass.isArray) "Array" else runtimeClass.getName) +
       argString
    }
 
@@ -258,7 +259,7 @@ object ManifestFactory {
     */
   def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
     new Manifest[T] {
-      def runtimeClass = upperBound.erasure
+      def runtimeClass = upperBound.runtimeClass
       override def toString =
         "_" +
         (if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
@@ -268,7 +269,7 @@ object ManifestFactory {
   /** Manifest for the intersection type `parents_0 with ... with parents_n'. */
   def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
     new Manifest[T] {
-      def runtimeClass = parents.head.erasure
+      def runtimeClass = parents.head.runtimeClass
       override def toString = parents.mkString(" with ")
     }
 }
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 384ebc6..a843054 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -15,9 +15,12 @@ package reflect
 object NameTransformer {
   // XXX Short term: providing a way to alter these without having to recompile
   // the compiler before recompiling the compiler.
-  val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
-  val NAME_JOIN_STRING     = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
-  val MODULE_INSTANCE_NAME = "MODULE$"
+  val MODULE_SUFFIX_STRING          = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
+  val NAME_JOIN_STRING              = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
+  val MODULE_INSTANCE_NAME          = "MODULE$"
+  val LOCAL_SUFFIX_STRING           = " "
+  val SETTER_SUFFIX_STRING          = "_$eq"
+  val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
 
   private val nops = 128
   private val ncodes = 26 * 26
@@ -27,9 +30,9 @@ object NameTransformer {
   private val op2code = new Array[String](nops)
   private val code2op = new Array[OpCodes](ncodes)
   private def enterOp(op: Char, code: String) = {
-    op2code(op) = code
+    op2code(op.toInt) = code
     val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a'
-    code2op(c) = new OpCodes(op, code, code2op(c))
+    code2op(c.toInt) = new OpCodes(op, code, code2op(c))
   }
 
   /* Note: decoding assumes opcodes are only ever lowercase. */
@@ -63,12 +66,12 @@ object NameTransformer {
     var i = 0
     while (i < len) {
       val c = name charAt i
-      if (c < nops && (op2code(c) ne null)) {
+      if (c < nops && (op2code(c.toInt) ne null)) {
         if (buf eq null) {
           buf = new StringBuilder()
           buf.append(name.substring(0, i))
         }
-        buf.append(op2code(c))
+        buf.append(op2code(c.toInt))
       /* Handle glyphs that are not valid Java/JVM identifiers */
       }
       else if (!Character.isJavaIdentifierPart(c)) {
@@ -93,8 +96,8 @@ object NameTransformer {
    */
   def decode(name0: String): String = {
     //System.out.println("decode: " + name);//DEBUG
-    val name = if (name0.endsWith("<init>")) name0.substring(0, name0.length() - ("<init>").length()) + "this"
-               else name0;
+    val name = if (name0.endsWith("<init>")) name0.stripSuffix("<init>") + "this"
+               else name0
     var buf: StringBuilder = null
     val len = name.length()
     var i = 0
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 61bc5e2..2ef946c 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.reflect
+package scala
+package reflect
 
 /** One of the branches of an [[scala.reflect.OptManifest]].
   */
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 5e373c7..b69f554 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.reflect
+package scala
+package reflect
 
 /** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]].
  *
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 10e6d7d..509d181 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,5 +1,7 @@
 package scala
 
+import java.lang.reflect.{ AccessibleObject => jAccessibleObject }
+
 package object reflect {
 
   // in the new scheme of things ClassManifests are aliased to ClassTags
@@ -42,26 +44,23 @@ package object reflect {
 
   def classTag[T](implicit ctag: ClassTag[T]) = ctag
 
+  /** Make a java reflection object accessible, if it is not already
+   *  and it is possible to do so. If a SecurityException is thrown in the
+   *  attempt, it is caught and discarded.
+   */
+  def ensureAccessible[T <: jAccessibleObject](m: T): T = {
+    if (!m.isAccessible) {
+      try m setAccessible true
+      catch { case _: SecurityException => } // does nothing
+    }
+    m
+  }
+
   // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
   // implementation is hardwired into `scala.reflect.reify.Taggers`
   // using the mechanism implemented in `scala.tools.reflect.FastTrack`
   // todo. once we have implicit macros for tag generation, we can remove this anchor
-  private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
-
-  @deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
-  type BeanDescription = scala.beans.BeanDescription
-  @deprecated("Use `@scala.beans.BeanDisplayName` instead", "2.10.0")
-  type BeanDisplayName = scala.beans.BeanDisplayName
-  @deprecated("Use `@scala.beans.BeanInfo` instead", "2.10.0")
-  type BeanInfo = scala.beans.BeanInfo
-  @deprecated("Use `@scala.beans.BeanInfoSkip` instead", "2.10.0")
-  type BeanInfoSkip = scala.beans.BeanInfoSkip
-  @deprecated("Use `@scala.beans.BeanProperty` instead", "2.10.0")
-  type BeanProperty = scala.beans.BeanProperty
-  @deprecated("Use `@scala.beans.BooleanBeanProperty` instead", "2.10.0")
-  type BooleanBeanProperty = scala.beans.BooleanBeanProperty
-  @deprecated("Use `@scala.beans.ScalaBeanInfo` instead", "2.10.0")
-  type ScalaBeanInfo = scala.beans.ScalaBeanInfo
+  private[scala] def materializeClassTag[T](): ClassTag[T] = macro ???
 }
 
 /** An exception that indicates an error during Scala reflection */
diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala
index 1b351c6..1e677e8 100644
--- a/src/library/scala/runtime/AbstractFunction0.scala
+++ b/src/library/scala/runtime/AbstractFunction0.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a68a82e..178280c 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -9,6 +9,6 @@
 
 package scala.runtime
 
-abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends Function1[T1, R] {
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
 
 }
diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala
index 72c0a2e..776f522 100644
--- a/src/library/scala/runtime/AbstractFunction10.scala
+++ b/src/library/scala/runtime/AbstractFunction10.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction11.scala b/src/library/scala/runtime/AbstractFunction11.scala
index 031f304..76cd8fb 100644
--- a/src/library/scala/runtime/AbstractFunction11.scala
+++ b/src/library/scala/runtime/AbstractFunction11.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala
index 9823edb..10066ed 100644
--- a/src/library/scala/runtime/AbstractFunction12.scala
+++ b/src/library/scala/runtime/AbstractFunction12.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala
index 528719b..6c3a457 100644
--- a/src/library/scala/runtime/AbstractFunction13.scala
+++ b/src/library/scala/runtime/AbstractFunction13.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala
index ecae45a..bf2b673 100644
--- a/src/library/scala/runtime/AbstractFunction14.scala
+++ b/src/library/scala/runtime/AbstractFunction14.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala
index 5f5e8af..5136f66 100644
--- a/src/library/scala/runtime/AbstractFunction15.scala
+++ b/src/library/scala/runtime/AbstractFunction15.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala
index c0093c4..dbafab8 100644
--- a/src/library/scala/runtime/AbstractFunction16.scala
+++ b/src/library/scala/runtime/AbstractFunction16.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala
index caae343..9c36dbf 100644
--- a/src/library/scala/runtime/AbstractFunction17.scala
+++ b/src/library/scala/runtime/AbstractFunction17.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala
index 9a2bdff..30eee95 100644
--- a/src/library/scala/runtime/AbstractFunction18.scala
+++ b/src/library/scala/runtime/AbstractFunction18.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala
index 1dbbd61..14baf5f 100644
--- a/src/library/scala/runtime/AbstractFunction19.scala
+++ b/src/library/scala/runtime/AbstractFunction19.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction2.scala b/src/library/scala/runtime/AbstractFunction2.scala
index 0905ea1..223ade9 100644
--- a/src/library/scala/runtime/AbstractFunction2.scala
+++ b/src/library/scala/runtime/AbstractFunction2.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala
index eb4c085..f5c2957 100644
--- a/src/library/scala/runtime/AbstractFunction20.scala
+++ b/src/library/scala/runtime/AbstractFunction20.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction21.scala b/src/library/scala/runtime/AbstractFunction21.scala
index 98e32b2..15feea3 100644
--- a/src/library/scala/runtime/AbstractFunction21.scala
+++ b/src/library/scala/runtime/AbstractFunction21.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction22.scala b/src/library/scala/runtime/AbstractFunction22.scala
index 67b1339..d77369f 100644
--- a/src/library/scala/runtime/AbstractFunction22.scala
+++ b/src/library/scala/runtime/AbstractFunction22.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction3.scala b/src/library/scala/runtime/AbstractFunction3.scala
index 3a45cdc..f863509 100644
--- a/src/library/scala/runtime/AbstractFunction3.scala
+++ b/src/library/scala/runtime/AbstractFunction3.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction4.scala b/src/library/scala/runtime/AbstractFunction4.scala
index fbf5534..5927015 100644
--- a/src/library/scala/runtime/AbstractFunction4.scala
+++ b/src/library/scala/runtime/AbstractFunction4.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala
index 949bae8..411e1e1 100644
--- a/src/library/scala/runtime/AbstractFunction5.scala
+++ b/src/library/scala/runtime/AbstractFunction5.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala
index 337fd9f..411c30d 100644
--- a/src/library/scala/runtime/AbstractFunction6.scala
+++ b/src/library/scala/runtime/AbstractFunction6.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala
index 2445867..498f986 100644
--- a/src/library/scala/runtime/AbstractFunction7.scala
+++ b/src/library/scala/runtime/AbstractFunction7.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala
index 6d3dac8..c6d320b 100644
--- a/src/library/scala/runtime/AbstractFunction8.scala
+++ b/src/library/scala/runtime/AbstractFunction8.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala
index 43cf3d2..34bd9d7 100644
--- a/src/library/scala/runtime/AbstractFunction9.scala
+++ b/src/library/scala/runtime/AbstractFunction9.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 57f8e26..986cd03 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -6,7 +6,10 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
+import scala.annotation.unspecialized
 
 /** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
  *  in terms of `isDefinedAt` and `applyOrElse`.
@@ -22,7 +25,7 @@ package scala.runtime
  *  @author  Pavel Pavlov
  *  @since   2.10
  */
-abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self =>
+abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self =>
   // this method must be overridden for better performance,
   // for backwards compatibility, fall back to the one inherited from PartialFunction
   // this assumes the old-school partial functions override the apply method, though
diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java
index 889db31..92e8055 100644
--- a/src/library/scala/runtime/BooleanRef.java
+++ b/src/library/scala/runtime/BooleanRef.java
@@ -17,4 +17,7 @@ public class BooleanRef implements java.io.Serializable {
     public boolean elem;
     public BooleanRef(boolean elem) { this.elem = elem; }
     public String toString() { return String.valueOf(elem); }
+
+    public static BooleanRef create(boolean e) { return new BooleanRef(e); }
+    public static BooleanRef zero() { return new BooleanRef(false); }
 }
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 8b53107..9334447 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -6,12 +6,7 @@
 **                          |/                                          **
 \*                                                                      */
 
+package scala
+package runtime
 
-
-package scala.runtime
-
-trait Boxed {
-
-}
-
-
+trait Boxed { }
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 3504c57..82a3b00 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -10,7 +10,6 @@
 
 package scala.runtime;
 
-import java.io.*;
 import scala.math.ScalaNumber;
 
 /** An object (static class) that defines methods used for creating,
diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java
index cc10611..27d3259 100644
--- a/src/library/scala/runtime/ByteRef.java
+++ b/src/library/scala/runtime/ByteRef.java
@@ -17,4 +17,7 @@ public class ByteRef implements java.io.Serializable {
     public byte elem;
     public ByteRef(byte elem) { this.elem = elem; }
     public String toString() { return java.lang.Byte.toString(elem); }
+
+    public static ByteRef create(byte e) { return new ByteRef(e); }
+    public static ByteRef zero() { return new ByteRef((byte)0); }
 }
diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java
index 03d3337..31956f5 100644
--- a/src/library/scala/runtime/CharRef.java
+++ b/src/library/scala/runtime/CharRef.java
@@ -17,4 +17,7 @@ public class CharRef implements java.io.Serializable {
     public char elem;
     public CharRef(char elem) { this.elem = elem; }
     public String toString() { return java.lang.Character.toString(elem); }
+
+    public static CharRef create(char e) { return new CharRef(e); }
+    public static CharRef zero() { return new CharRef((char)0); }
 }
diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java
index 317198e..0c7d915 100644
--- a/src/library/scala/runtime/DoubleRef.java
+++ b/src/library/scala/runtime/DoubleRef.java
@@ -17,4 +17,7 @@ public class DoubleRef implements java.io.Serializable {
     public double elem;
     public DoubleRef(double elem) { this.elem = elem; }
     public String toString() { return java.lang.Double.toString(elem); }
+
+    public static DoubleRef create(double e) { return new DoubleRef(e); }
+    public static DoubleRef zero() { return new DoubleRef(0); }
 }
diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java
index e26b89b..f0e1d5f 100644
--- a/src/library/scala/runtime/FloatRef.java
+++ b/src/library/scala/runtime/FloatRef.java
@@ -17,4 +17,7 @@ public class FloatRef implements java.io.Serializable {
     public float elem;
     public FloatRef(float elem) { this.elem = elem; }
     public String toString() { return java.lang.Float.toString(elem); }
+
+    public static FloatRef create(float e) { return new FloatRef(e); }
+    public static FloatRef zero() { return new FloatRef(0); }
 }
diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java
index edb6faf..adcf474 100644
--- a/src/library/scala/runtime/IntRef.java
+++ b/src/library/scala/runtime/IntRef.java
@@ -17,4 +17,7 @@ public class IntRef implements java.io.Serializable {
     public int elem;
     public IntRef(int elem) { this.elem = elem; }
     public String toString() { return java.lang.Integer.toString(elem); }
+
+    public static IntRef create(int e) { return new IntRef(e); }
+    public static IntRef zero() { return new IntRef(0); }
 }
diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java
index 12004b5..51426ab 100644
--- a/src/library/scala/runtime/LongRef.java
+++ b/src/library/scala/runtime/LongRef.java
@@ -17,4 +17,7 @@ public class LongRef implements java.io.Serializable {
     public long elem;
     public LongRef(long elem) { this.elem = elem; }
     public String toString() { return java.lang.Long.toString(elem); }
+
+    public static LongRef create(long e) { return new LongRef(e); }
+    public static LongRef zero() { return new LongRef(0); }
 }
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index 217b518..2d5f832 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 import java.lang.reflect.{ Method => JMethod }
 import java.lang.{ Class => JClass }
@@ -20,7 +22,7 @@ import scala.annotation.tailrec
  *  generated per call point, and will uniquely relate to the method called
  *  at that point, making the method name and argument types irrelevant. */
 /* TODO: if performance is acceptable, PolyMethodCache should be made generic on the method type */
-sealed abstract class MethodCache {
+private[scala] sealed abstract class MethodCache {
   /** Searches for a cached method in the `MethodCache` chain that
    *  is compatible with receiver class `forReceiver`. If none is cached,
    *  `null` is returned. If `null` is returned, find's caller should look-
@@ -30,7 +32,7 @@ sealed abstract class MethodCache {
   def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache
 }
 
-final class EmptyMethodCache extends MethodCache {
+private[scala] final class EmptyMethodCache extends MethodCache {
 
   def find(forReceiver: JClass[_]): JMethod = null
 
@@ -39,7 +41,7 @@ final class EmptyMethodCache extends MethodCache {
 
 }
 
-final class MegaMethodCache(
+private[scala] final class MegaMethodCache(
   private[this] val forName: String,
   private[this] val forParameterTypes: Array[JClass[_]]
 ) extends MethodCache {
@@ -51,7 +53,7 @@ final class MegaMethodCache(
 
 }
 
-final class PolyMethodCache(
+private[scala] final class PolyMethodCache(
   private[this] val next: MethodCache,
   private[this] val receiver: JClass[_],
   private[this] val method: JMethod,
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index b9525ef..a926956 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 import scala.util.control.ControlThrowable
 
diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala
index 04fcc55..4ecc536 100644
--- a/src/library/scala/runtime/Nothing$.scala
+++ b/src/library/scala/runtime/Nothing$.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 /**
  * Dummy class which exist only to satisfy the JVM. It corresponds
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 797b315..87ce0a2 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -6,11 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 /**
  * Dummy class which exist only to satisfy the JVM. It corresponds to
  * `scala.Null`. If such type appears in method signatures, it is erased
- * to this one.
+ * to this one. A private constructor ensures that Java code can't create
+ * subclasses. The only value of type Null$ should be null
  */
-sealed abstract class Null$
+sealed abstract class Null$ private ()
diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java
index c8298b8..b34f81c 100644
--- a/src/library/scala/runtime/ObjectRef.java
+++ b/src/library/scala/runtime/ObjectRef.java
@@ -16,5 +16,9 @@ public class ObjectRef<T> implements java.io.Serializable {
 
     public T elem;
     public ObjectRef(T elem) { this.elem = elem; }
+    @Override
     public String toString() { return String.valueOf(elem); }
+
+    public static <U> ObjectRef<U> create(U e) { return new ObjectRef<U>(e); }
+    public static ObjectRef<Object> zero() { return new ObjectRef<Object>(null); }
 }
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index 97e2b77..4f86796 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
   protected def ord = scala.math.Ordering.Boolean
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index ca57862..ce658d2 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -6,9 +6,25 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
   protected def num = scala.math.Numeric.ByteIsIntegral
   protected def ord = scala.math.Ordering.Byte
+
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self.toLong
+  override def intValue()    = self.toInt
+  override def byteValue()   = self
+  override def shortValue()  = self.toShort
+
+  override def isValidByte   = true
+
+  override def abs: Byte             = math.abs(self).toByte
+  override def max(that: Byte): Byte = math.max(self, that).toByte
+  override def min(that: Byte): Byte = math.min(self, that).toByte
+  override def signum: Int           = math.signum(self.toInt)
 }
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 5124ca0..71ea3a2 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 import java.lang.Character
 
@@ -14,6 +16,20 @@ final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] {
   protected def num = scala.math.Numeric.CharIsIntegral
   protected def ord = scala.math.Ordering.Char
 
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self.toLong
+  override def intValue()    = self.toInt
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self.toShort
+
+  override def isValidChar   = true
+
+  override def abs: Char             = self
+  override def max(that: Char): Char = math.max(self.toInt, that.toInt).toChar
+  override def min(that: Char): Char = math.min(self.toInt, that.toInt).toChar
+  override def signum: Int           = math.signum(self.toInt)
+
   def asDigit: Int                      = Character.digit(self, Character.MAX_RADIX)
 
   def isControl: Boolean                = Character.isISOControl(self)
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 2f16a29..9d7a55d 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -14,6 +14,35 @@ final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Dou
   protected def ord = scala.math.Ordering.Double
   protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral
 
+  override def doubleValue() = self
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self.toLong
+  override def intValue()    = self.toInt
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self.toShort
+
+  override def isWhole = {
+    val l = self.toLong
+    l.toDouble == self || l == Long.MaxValue && self < Double.PositiveInfinity || l == Long.MinValue && self > Double.NegativeInfinity
+  }
+  override def isValidByte  = self.toByte.toDouble == self
+  override def isValidShort = self.toShort.toDouble == self
+  override def isValidChar  = self.toChar.toDouble == self
+  override def isValidInt   = self.toInt.toDouble == self
+  // override def isValidLong = { val l = self.toLong; l.toDouble == self && l != Long.MaxValue }
+  // override def isValidFloat = self.toFloat.toDouble == self
+  // override def isValidDouble = !java.lang.Double.isNaN(self)
+
+  def isNaN: Boolean         = java.lang.Double.isNaN(self)
+  def isInfinity: Boolean    = java.lang.Double.isInfinite(self)
+  def isPosInfinity: Boolean = Double.PositiveInfinity == self
+  def isNegInfinity: Boolean = Double.NegativeInfinity == self
+
+  override def abs: Double               = math.abs(self)
+  override def max(that: Double): Double = math.max(self, that)
+  override def min(that: Double): Double = math.min(self, that)
+  override def signum: Int               = math.signum(self).toInt  // !!! NaN
+
   def round: Long   = math.round(self)
   def ceil: Double  = math.ceil(self)
   def floor: Double = math.floor(self)
@@ -30,22 +59,4 @@ final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Dou
    *  @return the measurement of the angle x in degrees.
    */
   def toDegrees: Double = math.toDegrees(self)
-
-  // isNaN is provided by the implicit conversion to java.lang.Double
-  // def isNaN: Boolean = java.lang.Double.isNaN(self)
-  def isInfinity: Boolean = java.lang.Double.isInfinite(self)
-  def isPosInfinity: Boolean = isInfinity && self > 0.0
-  def isNegInfinity: Boolean = isInfinity && self < 0.0
-
-  override def isValidByte = self.toByte.toDouble == self
-  override def isValidShort = self.toShort.toDouble == self
-  override def isValidChar = self.toChar.toDouble == self
-  override def isValidInt = self.toInt.toDouble == self
-  // override def isValidLong = { val l = self.toLong; l.toDouble == self && l != Long.MaxValue }
-  // override def isValidFloat = self.toFloat.toDouble == self
-  // override def isValidDouble = !java.lang.Double.isNaN(self)
-  override def isWhole = {
-    val l = self.toLong
-    l.toDouble == self || l == Long.MaxValue && self < Double.PositiveInfinity || l == Long.MinValue && self > Double.NegativeInfinity
-  }
 }
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 94c4137..f01788a 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -6,10 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 import scala.compat.Platform.EOL
 
+ at deprecated("Use Throwable#getStackTrace", "2.11.0")
 final class RichException(exc: Throwable) {
   def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
 }
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index cb0681b..93777f2 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -14,39 +14,50 @@ final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float
   protected def ord         = scala.math.Ordering.Float
   protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
 
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self
+  override def longValue()   = self.toLong
+  override def intValue()    = self.toInt
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self.toShort
+
+  override def isWhole = {
+    val l = self.toLong
+    l.toFloat == self || l == Long.MaxValue && self < Float.PositiveInfinity || l == Long.MinValue && self > Float.NegativeInfinity
+  }
+  override def isValidByte  = self.toByte.toFloat == self
+  override def isValidShort = self.toShort.toFloat == self
+  override def isValidChar  = self.toChar.toFloat == self
+  override def isValidInt   = { val i = self.toInt; i.toFloat == self && i != Int.MaxValue }
+  // override def isValidLong = { val l = self.toLong; l.toFloat == self && l != Long.MaxValue }
+  // override def isValidFloat = !java.lang.Float.isNaN(self)
+  // override def isValidDouble = !java.lang.Float.isNaN(self)
+
+  def isNaN: Boolean         = java.lang.Float.isNaN(self)
+  def isInfinity: Boolean    = java.lang.Float.isInfinite(self)
+  def isPosInfinity: Boolean = Float.PositiveInfinity == self
+  def isNegInfinity: Boolean = Float.NegativeInfinity == self
+
+  override def abs: Float              = math.abs(self)
+  override def max(that: Float): Float = math.max(self, that)
+  override def min(that: Float): Float = math.min(self, that)
+  override def signum: Int             = math.signum(self).toInt  // !!! NaN
+
   def round: Int   = math.round(self)
-  def ceil: Float  = math.ceil(self).toFloat
-  def floor: Float = math.floor(self).toFloat
+  def ceil: Float  = math.ceil(self.toDouble).toFloat
+  def floor: Float = math.floor(self.toDouble).toFloat
 
   /** Converts an angle measured in degrees to an approximately equivalent
    *  angle measured in radians.
    *
    *  @return the measurement of the angle `x` in radians.
    */
-  def toRadians: Float = math.toRadians(self).toFloat
+  def toRadians: Float = math.toRadians(self.toDouble).toFloat
 
   /** Converts an angle measured in radians to an approximately equivalent
    *  angle measured in degrees.
    *
    *  @return the measurement of the angle `x` in degrees.
    */
-  def toDegrees: Float = math.toDegrees(self).toFloat
-
-  // isNaN is provided by the implicit conversion to java.lang.Float
-  // def isNaN: Boolean = java.lang.Float.isNaN(self)
-  def isInfinity: Boolean = java.lang.Float.isInfinite(self)
-  def isPosInfinity: Boolean = isInfinity && self > 0.0f
-  def isNegInfinity: Boolean = isInfinity && self < 0.0f
-
-  override def isValidByte = self.toByte.toFloat == self
-  override def isValidShort = self.toShort.toFloat == self
-  override def isValidChar = self.toChar.toFloat == self
-  override def isValidInt = { val i = self.toInt; i.toFloat == self && i != Int.MaxValue }
-  // override def isValidLong = { val l = self.toLong; l.toFloat == self && l != Long.MaxValue }
-  // override def isValidFloat = !java.lang.Float.isNaN(self)
-  // override def isValidDouble = !java.lang.Float.isNaN(self)
-  override def isWhole = {
-    val l = self.toLong
-    l.toFloat == self || l == Long.MaxValue && self < Float.PositiveInfinity || l == Long.MinValue && self > Float.NegativeInfinity
-  }
+  def toDegrees: Float = math.toDegrees(self.toDouble).toFloat
 }
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index 192f94f..cda9d29 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 import scala.collection.immutable.Range
 
@@ -15,14 +16,37 @@ import scala.collection.immutable.Range
 final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] {
   protected def num = scala.math.Numeric.IntIsIntegral
   protected def ord = scala.math.Ordering.Int
-  type ResultWithoutStep = Range
 
-  /**
-    * @return `'''true'''` if this number has no decimal component.
-    *         Always returns `'''true'''` for `RichInt`.
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self.toLong
+  override def intValue()    = self
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self.toShort
+
+  /** Returns `'''true'''` if this number has no decimal component.
+    * Always `'''true'''` for `RichInt`.
     */
   def isWhole() = true
 
+  override def isValidInt   = true
+  def isValidLong  = true
+
+  override def abs: Int            = math.abs(self)
+  override def max(that: Int): Int = math.max(self, that)
+  override def min(that: Int): Int = math.min(self, that)
+  override def signum: Int         = math.signum(self)
+  
+  /** There is no reason to round an `Int`, but this method is provided to avoid accidental loss of precision from a detour through `Float`. */
+  @deprecated("This is an integer type; there is no reason to round it.  Perhaps you meant to call this on a floating-point value?", "2.11.0")
+  def round: Int = self
+
+  def toBinaryString: String = java.lang.Integer.toBinaryString(self)
+  def toHexString: String    = java.lang.Integer.toHexString(self)
+  def toOctalString: String  = java.lang.Integer.toOctalString(self)
+
+  type ResultWithoutStep = Range
+
   /**
     * @param end The final bound of the range to make.
     * @return A [[scala.collection.immutable.Range]] from `this` up to but
@@ -53,23 +77,4 @@ final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] wit
     *         and including `end`.
     */
   def to(end: Int, step: Int): Range.Inclusive = Range.inclusive(self, end, step)
-
-  /**
-    * @return `'''this'''` if `'''this''' < that` or `that` otherwise
-    */
-  override def min(that: Int): Int = if (self < that) self else that
-
-  /**
-    * @return `'''this'''` if `'''this''' > that` or `that` otherwise
-    */
-  override def max(that: Int): Int = if (self > that) self else that
-
-  /**
-    * Computes the absolute value of `'''this'''`.
-    */
-  override def abs: Int = if (self < 0) -self else self
-
-  def toBinaryString: String = java.lang.Integer.toBinaryString(self)
-  def toHexString: String = java.lang.Integer.toHexString(self)
-  def toOctalString: String = java.lang.Integer.toOctalString(self)
 }
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index ce2d1fd..b405fcd 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -6,21 +6,38 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
   protected def num = scala.math.Numeric.LongIsIntegral
   protected def ord = scala.math.Ordering.Long
 
-  def toBinaryString: String = java.lang.Long.toBinaryString(self)
-  def toHexString: String = java.lang.Long.toHexString(self)
-  def toOctalString: String = java.lang.Long.toOctalString(self)
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self
+  override def intValue()    = self.toInt
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self.toShort
 
-  override def isValidByte = self.toByte.toLong == self
+  override def isValidByte  = self.toByte.toLong == self
   override def isValidShort = self.toShort.toLong == self
-  override def isValidChar = self.toChar.toLong == self
-  override def isValidInt = self.toInt.toLong == self
-  // override def isValidLong = true
+  override def isValidChar  = self.toChar.toLong == self
+  override def isValidInt   = self.toInt.toLong == self
+           def isValidLong  = true
   // override def isValidFloat = self.toFloat.toLong == self && self != Long.MaxValue
   // override def isValidDouble = self.toDouble.toLong == self && self != Long.MaxValue
+
+  override def abs: Long             = math.abs(self)
+  override def max(that: Long): Long = math.max(self, that)
+  override def min(that: Long): Long = math.min(self, that)
+  override def signum: Int           = math.signum(self).toInt
+  
+  /** There is no reason to round a `Long`, but this method is provided to avoid accidental conversion to `Int` through `Float`. */
+  @deprecated("This is an integer type; there is no reason to round it.  Perhaps you meant to call this on a floating-point value?", "2.11.0")
+  def round: Long = self
+
+  def toBinaryString: String = java.lang.Long.toBinaryString(self)
+  def toHexString: String    = java.lang.Long.toHexString(self)
+  def toOctalString: String  = java.lang.Long.toOctalString(self)
 }
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index aa24dd2..b35beff 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -6,9 +6,25 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
   protected def num = scala.math.Numeric.ShortIsIntegral
   protected def ord = scala.math.Ordering.Short
+
+  override def doubleValue() = self.toDouble
+  override def floatValue()  = self.toFloat
+  override def longValue()   = self.toLong
+  override def intValue()    = self.toInt
+  override def byteValue()   = self.toByte
+  override def shortValue()  = self
+
+  override def isValidShort  = true
+
+  override def abs: Short              = math.abs(self.toInt).toShort
+  override def max(that: Short): Short = math.max(self.toInt, that.toInt).toShort
+  override def min(that: Short): Short = math.min(self.toInt, that.toInt).toShort
+  override def signum: Int             = math.signum(self.toInt)
 }
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index 76fc38b..5e4da24 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
 
 import scala.collection.{ mutable, immutable }
 import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
@@ -28,12 +29,16 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed
   def floatValue()  = num.toFloat(self)
   def longValue()   = num.toLong(self)
   def intValue()    = num.toInt(self)
-  def byteValue()   = intValue.toByte
-  def shortValue()  = intValue.toShort
+  def byteValue()   = intValue().toByte
+  def shortValue()  = intValue().toShort
 
+  /** Returns `'''this'''` if `'''this''' < that` or `that` otherwise. */
   def min(that: T): T = num.min(self, that)
+  /** Returns `'''this'''` if `'''this''' > that` or `that` otherwise. */
   def max(that: T): T = num.max(self, that)
+  /** Returns the absolute value of `'''this'''`. */
   def abs             = num.abs(self)
+  /** Returns the signum of `'''this'''`. */
   def signum          = num.signum(self)
 }
 trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] {
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index dcd3239..5fb24f2 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -12,10 +12,9 @@ package runtime
 import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
 import scala.collection.mutable.WrappedArray
 import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
-import scala.collection.generic.{ Sorted }
+import scala.collection.generic.{ Sorted, IsTraversableLike }
 import scala.reflect.{ ClassTag, classTag }
 import scala.util.control.ControlThrowable
-import scala.xml.{ Node, MetaData }
 import java.lang.{ Class => jClass }
 
 import java.lang.Double.doubleToLongBits
@@ -26,8 +25,7 @@ import java.lang.reflect.{ Modifier, Method => JMethod }
  *  outside the API and subject to change or removal without notice.
  */
 object ScalaRunTime {
-  def isArray(x: AnyRef): Boolean = isArray(x, 1)
-  def isArray(x: Any, atLevel: Int): Boolean =
+  def isArray(x: Any, atLevel: Int = 1): Boolean =
     x != null && isArrayClass(x.getClass, atLevel)
 
   private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
@@ -42,6 +40,10 @@ object ScalaRunTime {
     case _                                                                                             => false
   }
 
+  // A helper method to make my life in the pattern matcher a lot easier.
+  def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr =
+    traversable conversion coll drop num
+
   /** Return the class object representing an array with element class `clazz`.
    */
   def arrayClass(clazz: jClass[_]): jClass[_] = {
@@ -155,13 +157,7 @@ object ScalaRunTime {
 
   // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
   // More background at ticket #2318.
-  def ensureAccessible(m: JMethod): JMethod = {
-    if (!m.isAccessible) {
-      try m setAccessible true
-      catch { case _: SecurityException => () }
-    }
-    m
-  }
+  def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m)
 
   def checkInitialized[T <: AnyRef](x: T): T =
     if (x == null) throw new UninitializedError else x
@@ -224,7 +220,7 @@ object ScalaRunTime {
     if (iv == fv) return iv
 
     val lv = fv.toLong
-    if (lv == fv) return hash(lv)
+    if (lv == fv) hash(lv)
     else fv.hashCode
   }
   def hash(lv: Long): Int = {
@@ -272,10 +268,20 @@ object ScalaRunTime {
     def isScalaClass(x: AnyRef)         = packageOf(x) startsWith "scala."
     def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
 
+    // We use reflection because the scala.xml package might not be available
+    def isSubClassOf(potentialSubClass: Class[_], ofClass: String) =
+      try {
+        val classLoader = potentialSubClass.getClassLoader
+        val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader)
+        clazz.isAssignableFrom(potentialSubClass)
+      } catch {
+        case cnfe: ClassNotFoundException => false
+      }
+    def isXmlNode(potentialSubClass: Class[_])     = isSubClassOf(potentialSubClass, "scala.xml.Node")
+    def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData")
+
     // When doing our own iteration is dangerous
     def useOwnToString(x: Any) = x match {
-      // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
-      case _: Node | _: MetaData => true
       // Range/NumericRange have a custom toString to avoid walking a gazillion elements
       case _: Range | _: NumericRange[_] => true
       // Sorted collections to the wrong thing (for us) on iteration - ticket #3493
@@ -284,10 +290,12 @@ object ScalaRunTime {
       case _: StringLike[_] => true
       // Don't want to evaluate any elements in a view
       case _: TraversableView[_, _] => true
+      // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
+      // -> catch those by isXmlNode and isXmlMetaData.
       // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
       // collections which may have useful toString methods - ticket #3710
       // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
-      case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x)
+      case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass)
       // Otherwise, nothing could possibly go wrong
       case _ => false
     }
@@ -328,7 +336,7 @@ object ScalaRunTime {
     // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
     try inner(arg)
     catch {
-      case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
+      case _: UnsupportedOperationException | _: AssertionError => "" + arg
     }
   }
 
@@ -339,18 +347,17 @@ object ScalaRunTime {
 
     nl + s + "\n"
   }
-  private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
-    if (sys.props contains "scala.debug.zip") {
-      val xs = coll1.toIndexedSeq
-      val ys = coll2.toIndexedSeq
-      if (xs.length != ys.length) {
-        Console.err.println(
-          "Mismatched zip in " + what + ":\n" +
-          "  this: " + xs.mkString(", ") + "\n" +
-          "  that: " + ys.mkString(", ")
-        )
-        (new Exception).getStackTrace.drop(2).take(10).foreach(println)
-      }
-    }
+
+  def box[T](clazz: jClass[T]): jClass[_] = clazz match {
+    case java.lang.Byte.TYPE => classOf[java.lang.Byte]
+    case java.lang.Short.TYPE => classOf[java.lang.Short]
+    case java.lang.Character.TYPE => classOf[java.lang.Character]
+    case java.lang.Integer.TYPE => classOf[java.lang.Integer]
+    case java.lang.Long.TYPE => classOf[java.lang.Long]
+    case java.lang.Float.TYPE => classOf[java.lang.Float]
+    case java.lang.Double.TYPE => classOf[java.lang.Double]
+    case java.lang.Void.TYPE => classOf[scala.runtime.BoxedUnit]
+    case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
+    case _ => clazz
   }
 }
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index d2084a6..ce7d7af 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -11,6 +11,7 @@ package runtime
 
 import java.util.Arrays.copyOfRange
 
+ at deprecated("Use Predef.SeqCharSequence", "2.11.0")
 final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
   def length: Int                                     = xs.length
   def charAt(index: Int): Char                        = xs(index)
@@ -18,6 +19,8 @@ final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends C
   override def toString = xs.mkString("")
 }
 
+// Still need this one since the implicit class ArrayCharSequence only converts
+// a single argument.
 final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence {
   // yikes
   // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: <init> signature: ([C)V)
diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java
index 461b521..e5e8de3 100644
--- a/src/library/scala/runtime/ShortRef.java
+++ b/src/library/scala/runtime/ShortRef.java
@@ -17,4 +17,7 @@ public class ShortRef implements java.io.Serializable {
     public short elem;
     public ShortRef(short elem) { this.elem = elem; }
     public String toString() { return java.lang.Short.toString(elem); }
+
+    public static ShortRef create(short e) { return new ShortRef(e); }
+    public static ShortRef zero() { return new ShortRef((short)0); }
 }
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 9d848f0..d5b51a6 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -6,9 +6,12 @@
 **                                                                      **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 /** A wrapper class that adds string concatenation `+` to any value */
+ at deprecated("Use Predef.StringAdd", "2.11.0")
 final class StringAdd(val self: Any) extends AnyVal {
   def +(other: String) = String.valueOf(self) + other
 }
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 983ae2f..de32ac7 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -6,10 +6,13 @@
 **                                                                      **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 /** A wrapper class that adds a `formatted` operation to any value
  */
+ at deprecated("Use Predef.StringFormat", "2.11.0")
 final class StringFormat(val self: Any) extends AnyVal {
   /** Returns string formatted according to given `format` string.
    *  Format strings are as for `String.format`
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index ef29075..b28f6d4 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 import scala.collection.{ TraversableLike, IterableLike }
 import scala.collection.generic.{ CanBuildFrom => CBF }
@@ -37,12 +39,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
 
     for (el1 <- colls._1) {
       if (elems2.hasNext)
-        b += f(el1, elems2.next)
+        b += f(el1, elems2.next())
       else
-        return b.result
+        return b.result()
     }
 
-    b.result
+    b.result()
   }
 
   def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -51,12 +53,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
 
     for (el1 <- colls._1) {
       if (elems2.hasNext)
-        b ++= f(el1, elems2.next)
+        b ++= f(el1, elems2.next())
       else
-        return b.result
+        return b.result()
     }
 
-    b.result
+    b.result()
   }
 
   def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
@@ -66,16 +68,16 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
 
     for (el1 <- colls._1) {
       if (elems2.hasNext) {
-        val el2 = elems2.next
+        val el2 = elems2.next()
         if (f(el1, el2)) {
           b1 += el1
           b2 += el2
         }
       }
-      else return (b1.result, b2.result)
+      else return (b1.result(), b2.result())
     }
 
-    (b1.result, b2.result)
+    (b1.result(), b2.result())
   }
 
   def exists(f: (El1, El2) => Boolean): Boolean = {
@@ -83,7 +85,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
 
     for (el1 <- colls._1) {
       if (elems2.hasNext) {
-        if (f(el1, elems2.next))
+        if (f(el1, elems2.next()))
           return true
       }
       else return false
@@ -99,7 +101,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
 
     for (el1 <- colls._1) {
       if (elems2.hasNext)
-        f(el1, elems2.next)
+        f(el1, elems2.next())
       else
         return
     }
@@ -117,9 +119,9 @@ object Tuple2Zipped {
         val it1 = x._1.toIterator
         val it2 = x._2.toIterator
         while (it1.hasNext && it2.hasNext)
-          buf += ((it1.next, it2.next))
+          buf += ((it1.next(), it2.next()))
 
-        buf.result
+        buf.result()
       }
 
     def zipped[El1, Repr1, El2, Repr2]
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 3f2afaf..7c50138 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -6,7 +6,9 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.runtime
+package scala
+package runtime
+
 
 import scala.collection.{ TraversableLike, IterableLike }
 import scala.collection.generic.{ CanBuildFrom => CBF }
@@ -34,11 +36,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
 
     for (el1 <- colls._1) {
       if (elems2.hasNext && elems3.hasNext)
-        b += f(el1, elems2.next, elems3.next)
+        b += f(el1, elems2.next(), elems3.next())
       else
-        return b.result
+        return b.result()
     }
-    b.result
+    b.result()
   }
 
   def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -48,11 +50,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
 
     for (el1 <- colls._1) {
       if (elems2.hasNext && elems3.hasNext)
-        b ++= f(el1, elems2.next, elems3.next)
+        b ++= f(el1, elems2.next(), elems3.next())
       else
-        return b.result
+        return b.result()
     }
-    b.result
+    b.result()
   }
 
   def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
@@ -64,12 +66,12 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
     val b3 = cbf3(colls._3.repr)
     val elems2 = colls._2.iterator
     val elems3 = colls._3.iterator
-    def result = (b1.result, b2.result, b3.result)
+    def result = (b1.result(), b2.result(), b3.result())
 
     for (el1 <- colls._1) {
       if (elems2.hasNext && elems3.hasNext) {
-        val el2 = elems2.next
-        val el3 = elems3.next
+        val el2 = elems2.next()
+        val el3 = elems3.next()
 
         if (f(el1, el2, el3)) {
           b1 += el1
@@ -89,7 +91,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
 
     for (el1 <- colls._1) {
       if (elems2.hasNext && elems3.hasNext) {
-        if (f(el1, elems2.next, elems3.next))
+        if (f(el1, elems2.next(), elems3.next()))
           return true
       }
       else return false
@@ -106,7 +108,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
 
     for (el1 <- colls._1) {
       if (elems2.hasNext && elems3.hasNext)
-        f(el1, elems2.next, elems3.next)
+        f(el1, elems2.next(), elems3.next())
       else
         return
     }
@@ -126,9 +128,9 @@ object Tuple3Zipped {
         val it2 = x._2.toIterator
         val it3 = x._3.toIterator
         while (it1.hasNext && it2.hasNext && it3.hasNext)
-          buf += ((it1.next, it2.next, it3.next))
+          buf += ((it1.next(), it2.next(), it3.next()))
 
-        buf.result
+        buf.result()
       }
 
     def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java
index e3bd182..ef5b691 100755
--- a/src/library/scala/runtime/VolatileBooleanRef.java
+++ b/src/library/scala/runtime/VolatileBooleanRef.java
@@ -17,4 +17,7 @@ public class VolatileBooleanRef implements java.io.Serializable {
     volatile public boolean elem;
     public VolatileBooleanRef(boolean elem) { this.elem = elem; }
     public String toString() { return String.valueOf(elem); }
+
+    public static VolatileBooleanRef create(boolean e) { return new VolatileBooleanRef(e); }
+    public static VolatileBooleanRef zero() { return new VolatileBooleanRef(false); }
 }
diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java
index 034b003..d792b0a 100755
--- a/src/library/scala/runtime/VolatileByteRef.java
+++ b/src/library/scala/runtime/VolatileByteRef.java
@@ -17,4 +17,7 @@ public class VolatileByteRef implements java.io.Serializable {
     volatile public byte elem;
     public VolatileByteRef(byte elem) { this.elem = elem; }
     public String toString() { return java.lang.Byte.toString(elem); }
+
+    public static VolatileByteRef create(byte e) { return new VolatileByteRef(e); }
+    public static VolatileByteRef zero() { return new VolatileByteRef((byte)0); }
 }
diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java
index f90648c..555b171 100755
--- a/src/library/scala/runtime/VolatileCharRef.java
+++ b/src/library/scala/runtime/VolatileCharRef.java
@@ -17,4 +17,7 @@ public class VolatileCharRef implements java.io.Serializable {
     volatile public char elem;
     public VolatileCharRef(char elem) { this.elem = elem; }
     public String toString() { return java.lang.Character.toString(elem); }
+
+    public static VolatileCharRef create(char e) { return new VolatileCharRef(e); }
+    public static VolatileCharRef zero() { return new VolatileCharRef((char)0); }
 }
diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java
index d47c957..1932055 100755
--- a/src/library/scala/runtime/VolatileDoubleRef.java
+++ b/src/library/scala/runtime/VolatileDoubleRef.java
@@ -16,4 +16,7 @@ public class VolatileDoubleRef implements java.io.Serializable {
     volatile public double elem;
     public VolatileDoubleRef(double elem) { this.elem = elem; }
     public String toString() { return java.lang.Double.toString(elem); }
+
+    public static VolatileDoubleRef create(double e) { return new VolatileDoubleRef(e); }
+    public static VolatileDoubleRef zero() { return new VolatileDoubleRef(0); }
 }
diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java
index 97da95f..3a81be1 100755
--- a/src/library/scala/runtime/VolatileFloatRef.java
+++ b/src/library/scala/runtime/VolatileFloatRef.java
@@ -17,4 +17,7 @@ public class VolatileFloatRef implements java.io.Serializable {
     volatile public float elem;
     public VolatileFloatRef(float elem) { this.elem = elem; }
     public String toString() { return java.lang.Float.toString(elem); }
+
+    public static VolatileFloatRef create(float e) { return new VolatileFloatRef(e); }
+    public static VolatileFloatRef zero() { return new VolatileFloatRef(0); }
 }
diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java
index e8a68a1..ae015bc 100755
--- a/src/library/scala/runtime/VolatileIntRef.java
+++ b/src/library/scala/runtime/VolatileIntRef.java
@@ -16,4 +16,7 @@ public class VolatileIntRef implements java.io.Serializable {
     volatile public int elem;
     public VolatileIntRef(int elem) { this.elem = elem; }
     public String toString() { return java.lang.Integer.toString(elem); }
+
+    public static VolatileIntRef create(int e) { return new VolatileIntRef(e); }
+    public static VolatileIntRef zero() { return new VolatileIntRef(0); }
 }
diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java
index 80e627c..e596f5a 100755
--- a/src/library/scala/runtime/VolatileLongRef.java
+++ b/src/library/scala/runtime/VolatileLongRef.java
@@ -17,4 +17,7 @@ public class VolatileLongRef implements java.io.Serializable {
     volatile public long elem;
     public VolatileLongRef(long elem) { this.elem = elem; }
     public String toString() { return java.lang.Long.toString(elem); }
+
+    public static VolatileLongRef create(long e) { return new VolatileLongRef(e); }
+    public static VolatileLongRef zero() { return new VolatileLongRef(0); }
 }
diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java
index 848b063..6063501 100755
--- a/src/library/scala/runtime/VolatileObjectRef.java
+++ b/src/library/scala/runtime/VolatileObjectRef.java
@@ -16,5 +16,9 @@ public class VolatileObjectRef<T> implements java.io.Serializable {
 
     volatile public T elem;
     public VolatileObjectRef(T elem) { this.elem = elem; }
+    @Override
     public String toString() { return String.valueOf(elem); }
+
+    public static <U> VolatileObjectRef<U> create(U e) { return new VolatileObjectRef<U>(e); }
+    public static VolatileObjectRef<Object> zero() { return new VolatileObjectRef<Object>(null); }
 }
diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java
index 4e91d0d..0a28259 100755
--- a/src/library/scala/runtime/VolatileShortRef.java
+++ b/src/library/scala/runtime/VolatileShortRef.java
@@ -17,4 +17,7 @@ public class VolatileShortRef implements java.io.Serializable {
     volatile public short elem;
     public VolatileShortRef(short elem) { this.elem = elem; }
     public String toString() { return java.lang.Short.toString(elem); }
+
+    public static VolatileShortRef create(short e) { return new VolatileShortRef(e); }
+    public static VolatileShortRef zero() { return new VolatileShortRef((short)0); }
 }
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
deleted file mode 100644
index 016a0d0..0000000
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-package scala.runtime
-import java.io.{OutputStream, PrintStream}
-import scala.runtime.ScalaRunTime.stringOf
-
-/** A utility object that's needed by the code that executes a worksheet.
- */
- at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-object WorksheetSupport {
-
-  /** The offset in the source which should be printed */
-  private var currentOffset = 0
-
-  /** A stream that flushes in regular intervals so that output can be captured
-   *  in real time. The flush interval is determined by the field "flushInterval".
-   *  By default it is 30ms.
-   */
-  private class FlushedOutputStream(out: OutputStream) extends OutputStream {
-    protected def flushInterval = 30000000L // interval between flushes, by default 30ms
-    protected def width = 80                // output width, by default 80 characters
-    protected def tabInc = 8                // tab increment, by default 8 characters
-    private var lastFlush: Long = 0L
-    private var col = -1
-    override def write(b: Array[Byte], off: Int, len: Int) = {
-      for (idx <- off until (off + len min b.length)) writeOne(b(idx))
-      flush()
-    }
-    override def write(c: Int) {
-      writeOne(c)
-      flush()
-    }
-    override def flush() {
-      val current = System.nanoTime
-      if (current - lastFlush >= flushInterval) {
-        out.flush()
-        lastFlush = current
-      }
-    }
-    def writeOne(c: Int) {
-      if (col < 0) {
-        col = 0
-        write((currentOffset+" ").getBytes)
-      }
-      out.write(c)
-      col =
-        if (c == '\n') -1
-        else if (c == '\t') (col / tabInc) * tabInc + tabInc
-        else col + 1
-      if (col >= width) writeOne('\n')
-    }
-    def ensureNewLine() = if (col > 0) writeOne('\n')
-  }
-
-  private val flushedOut = new FlushedOutputStream(System.out)
-  private val printOut = new PrintStream(flushedOut)
-
-  private def redirected(op: => Unit) = {
-    val oldSysOut = System.out
-    val oldSysErr = System.err
-    val oldConsOut = Console.out
-    val oldConsErr = Console.err
-    System.setOut(printOut)
-    System.setErr(printOut)
-    Console.setOut(printOut)
-    Console.setErr(printOut)
-    try op
-    finally {
-      printOut.close()
-      System.setOut(oldSysOut)
-      System.setErr(oldSysErr)
-      Console.setOut(oldConsOut)
-      Console.setErr(oldConsErr)
-    }
-  }
-
-  def $execute(op: => Unit) = redirected {
-    try op
-    catch {
-      case ex: StopException => ;
-      case ex: Throwable => ex.printStackTrace()
-    }
-  }
-
-  def $skip(n: Int) = {
-    flushedOut.ensureNewLine()
-    currentOffset += n
-  }
-
-  def $stop() = throw new StopException
-
-  def $show(x: Any): String = stringOf(x)
-}
-
-class StopException extends Exception
-
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index e3c25bb..74b0a90 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 
 import scala.language.implicitConversions
 
diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala
index b50e0e1..3b451ab 100644
--- a/src/library/scala/sys/PropImpl.scala
+++ b/src/library/scala/sys/PropImpl.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 
 import scala.collection.mutable
 
diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala
index a8f4871..6018ac8 100644
--- a/src/library/scala/sys/ShutdownHookThread.scala
+++ b/src/library/scala/sys/ShutdownHookThread.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 
 /** A minimal Thread wrapper to enhance shutdown hooks.  It knows
  *  how to unregister itself.
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index da9adb3..39f66f5 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 
 import scala.collection.{ mutable, Iterator }
 import scala.collection.JavaConverters._
@@ -64,7 +65,6 @@ object SystemProperties {
     propertyHelp(p.key) = helpText
     p
   }
-  private def str(key: String, helpText: String) = addHelp(Prop[String](key), helpText)
   private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
     if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
     helpText
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 0003df6..b31bbf0 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
@@ -46,7 +47,7 @@ object BasicIO {
       def next(): Stream[T] = q.take match {
         case Left(0)    => Stream.empty
         case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
-        case Right(s)   => Stream.cons(s, next)
+        case Right(s)   => Stream.cons(s, next())
       }
       new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
     }
@@ -161,21 +162,29 @@ object BasicIO {
     */
   def processFully(processLine: String => Unit): InputStream => Unit = in => {
     val reader = new BufferedReader(new InputStreamReader(in))
-    processLinesFully(processLine)(reader.readLine)
-    reader.close()
+    try processLinesFully(processLine)(reader.readLine)
+    finally reader.close()
   }
 
   /** Calls `processLine` with the result of `readLine` until the latter returns
-    * `null`.
-    */
+   *  `null` or the current thread is interrupted.
+   */
   def processLinesFully(processLine: String => Unit)(readLine: () => String) {
-    def readFully() {
-      val line = readLine()
-      if (line != null) {
-        processLine(line)
-        readFully()
+    def working = (Thread.currentThread.isInterrupted == false)
+    def halting = { Thread.currentThread.interrupt(); null }
+    def readFully(): Unit =
+      if (working) {
+        val line =
+          try readLine()
+          catch {
+            case _: InterruptedException    => halting
+            case e: IOException if !working => halting
+          }
+        if (line != null) {
+          processLine(line)
+          readFully()
+        }
       }
-    }
     readFully()
   }
 
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index 715b364..dcd06c8 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
@@ -126,15 +127,6 @@ trait ProcessCreation {
     */
   def apply(url: URL): URLBuilder                     = new URLImpl(url)
 
-  /** Creates a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
-    * This can be used as a way to template strings.
-    *
-    * @example {{{
-    * apply(<x> {dxPath.absolutePath} --dex --output={classesDexPath.absolutePath} {classesMinJarPath.absolutePath}</x>)
-    * }}}
-    */
-  def apply(command: scala.xml.Elem): ProcessBuilder  = apply(command.text.trim)
-
   /** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
     * to force an exit value.
     */
@@ -219,14 +211,6 @@ trait ProcessImplicits {
     */
   implicit def urlToProcess(url: URL): URLBuilder                         = apply(url)
 
-  /** Implicitly convert a [[scala.xml.Elem]] into a
-    * [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
-    * elements of the element, trimming spaces, and then converting the result
-    * from string to a process. Importantly, tags are completely ignored, so
-    * they cannot be used to separate parameters.
-    */
-  implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder      = apply(command)
-
   /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */
   implicit def stringToProcess(command: String): ProcessBuilder           = apply(command)
 
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index d0b2ecf..ac86495 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
@@ -23,15 +24,13 @@ import ProcessBuilder._
   * based on these factories made available in the package object
   * [[scala.sys.process]]. Here are some examples:
   * {{{
-  * import.scala.sys.process._
+  * import scala.sys.process._
   *
   * // Executes "ls" and sends output to stdout
   * "ls".!
   *
   * // Execute "ls" and assign a `Stream[String]` of its output to "contents".
-  * // Because [[scala.Predef]] already defines a `lines` method for `String`,
-  * // we use [[scala.sys.process.Process]]'s object companion to create it.
-  * val contents = Process("ls").lines
+  * val contents = Process("ls").lineStream
   *
   * // Here we use a `Seq` to make the parameter whitespace-safe
   * def contentsOf(dir: String): String = Seq("ls", dir).!!
@@ -46,14 +45,14 @@ import ProcessBuilder._
   *
   * Two existing `ProcessBuilder` can be combined in the following ways:
   *
-  *   * They can be executed in parallel, with the output of the first being fed
-  *   as input to the second, like Unix pipes. This is achieved with the `#|`
-  *   method.
-  *   * They can be executed in sequence, with the second starting as soon as
-  *   the first ends. This is done by the `###` method.
-  *   * The execution of the second one can be conditioned by the return code
-  *   (exit status) of the first, either only when it's zero, or only when it's
-  *   not zero. The methods `#&&` and `#||` accomplish these tasks.
+  *   - They can be executed in parallel, with the output of the first being fed
+  *     as input to the second, like Unix pipes. This is achieved with the `#|`
+  *     method.
+  *   - They can be executed in sequence, with the second starting as soon as
+  *     the first ends. This is done by the `###` method.
+  *   - The execution of the second one can be conditioned by the return code
+  *     (exit status) of the first, either only when it's zero, or only when it's
+  *     not zero. The methods `#&&` and `#||` accomplish these tasks.
   *
   * ==Redirecting Input/Output==
   *
@@ -61,7 +60,7 @@ import ProcessBuilder._
   * there's a few methods that create a new `ProcessBuilder` with a
   * pre-configured input or output. They are `#<`, `#>` and `#>>`, and may take
   * as input either another `ProcessBuilder` (like the pipe described above), or
-  * something else such as a `java.io.File` or a `java.lang.InputStream`.
+  * something else such as a `java.io.File` or a `java.io.InputStream`.
   * For example:
   * {{{
   * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") !
@@ -74,18 +73,18 @@ import ProcessBuilder._
   * overloads and variations to enable further control over the I/O. These
   * methods are:
   *
-  *   * `run`: the most general method, it returns a
-  *   [[scala.sys.process.Process]] immediately, and the external command
-  *   executes concurrently.
-  *   * `!`: blocks until all external commands exit, and returns the exit code
-  *   of the last one in the chain of execution.
-  *   * `!!`: blocks until all external commands exit, and returns a `String`
-  *   with the output generated.
-  *   * `lines`: returns immediately like `run`, and the output being generared
-  *   is provided through a `Stream[String]`. Getting the next element of that
-  *   `Stream` may block until it becomes available. This method will throw an
-  *   exception if the return code is different than zero -- if this is not
-  *   desired, use the `lines_!` method.
+  *   - `run`: the most general method, it returns a
+  *     [[scala.sys.process.Process]] immediately, and the external command
+  *     executes concurrently.
+  *   - `!`: blocks until all external commands exit, and returns the exit code
+  *     of the last one in the chain of execution.
+  *   - `!!`: blocks until all external commands exit, and returns a `String`
+  *     with the output generated.
+  *   - `lineStream`: returns immediately like `run`, and the output being generated
+  *     is provided through a `Stream[String]`. Getting the next element of that
+  *     `Stream` may block until it becomes available. This method will throw an
+  *     exception if the return code is different than zero -- if this is not
+  *     desired, use the `lineStream_!` method.
   *
   * ==Handling Input and Output==
   *
@@ -122,14 +121,22 @@ import ProcessBuilder._
   *   1. `#&&` conditionally executes the second command if the previous one finished with
   *      exit value 0. It mirrors shell's `&&`.
   *   1. `#||` conditionally executes the third command if the exit value of the previous
-  *      command is different than zero. It mirrors shell's `&&`.
+  *      command is different than zero. It mirrors shell's `||`.
   *
   * Finally, `!` at the end executes the commands, and returns the exit value.
   * Whatever is printed will be sent to the Scala process standard output. If
-  * we wanted to caputre it, we could run that with `!!` instead.
+  * we wanted to capture it, we could run that with `!!` instead.
   *
   * Note: though it is not shown above, the equivalent of a shell's `;` would be
   * `###`. The reason for this name is that `;` is a reserved token in Scala.
+  *
+  * Note: the `lines` method, though deprecated, may conflict with the `StringLike`
+  * method of the same name.  To avoid this, one may wish to call the builders in
+  * `Process` instead of importing `scala.sys.process._`.  The example above would be
+  * {{{
+  * import scala.sys.process.Process
+  * Process("find src -name *.scala -exec grep null {} ;") #| Process("xargs test -z") #&& Process("echo null-free") #|| Process("echo null detected") !
+  * }}}
   */
 trait ProcessBuilder extends Source with Sink {
   /** Starts the process represented by this builder, blocks until it exits, and
@@ -164,15 +171,23 @@ trait ProcessBuilder extends Source with Sink {
     * with a non-zero value, the Stream will provide all lines up to termination
     * and then throw an exception.
     */
-  def lines: Stream[String]
+  def lineStream: Stream[String]
+  
+  /** Deprecated (renamed). Use `lineStream` instead. */
+  @deprecated("Use lineStream instead.", "2.11.0")
+  def lines: Stream[String] = lineStream
 
   /** Starts the process represented by this builder.  The output is returned as
     * a Stream that blocks when lines are not available but the process has not
     * completed.  Standard error is sent to the provided ProcessLogger.  If the
     * process exits with a non-zero value, the Stream will provide all lines up
-    * to termination but will not throw an exception.
+    * to termination and then throw an exception.
     */
-  def lines(log: ProcessLogger): Stream[String]
+  def lineStream(log: ProcessLogger): Stream[String]
+  
+  /** Deprecated (renamed).  Use `lineStream(log: ProcessLogger)` instead. */
+  @deprecated("Use stream instead.", "2.11.0")
+  def lines(log: ProcessLogger): Stream[String] = lineStream(log)
 
   /** Starts the process represented by this builder.  The output is returned as
     * a Stream that blocks when lines are not available but the process has not
@@ -180,7 +195,11 @@ trait ProcessBuilder extends Source with Sink {
     * with a non-zero value, the Stream will provide all lines up to termination
     * but will not throw an exception.
     */
-  def lines_! : Stream[String]
+  def lineStream_! : Stream[String]
+  
+  /** Deprecated (renamed).  Use `lineStream_!` instead. */
+  @deprecated("Use lineStream_! instead.", "2.11.0")  
+  def lines_! : Stream[String] = lineStream_!
 
   /** Starts the process represented by this builder.  The output is returned as
     * a Stream that blocks when lines are not available but the process has not
@@ -188,7 +207,11 @@ trait ProcessBuilder extends Source with Sink {
     * process exits with a non-zero value, the Stream will provide all lines up
     * to termination but will not throw an exception.
     */
-  def lines_!(log: ProcessLogger): Stream[String]
+  def lineStream_!(log: ProcessLogger): Stream[String]
+  
+  /** Deprecated (renamed).  Use `lineStream_!(log: ProcessLogger)` instead. */
+  @deprecated("Use stream_! instead.", "2.11.0")
+  def lines_!(log: ProcessLogger): Stream[String] = lineStream_!(log)
 
   /** Starts the process represented by this builder, blocks until it exits, and
     * returns the exit code.  Standard output and error are sent to the console.
@@ -305,10 +328,10 @@ object ProcessBuilder extends ProcessBuilderImpl {
     protected def toSource: ProcessBuilder
 
     /** Writes the output stream of this process to the given file. */
-    def #> (f: File): ProcessBuilder = toFile(f, false)
+    def #> (f: File): ProcessBuilder = toFile(f, append = false)
 
     /** Appends the output stream of this process to the given file. */
-    def #>> (f: File): ProcessBuilder = toFile(f, true)
+    def #>> (f: File): ProcessBuilder = toFile(f, append = true)
 
     /** Writes the output stream of this process to the given OutputStream. The
       * argument is call-by-name, so the stream is recreated, written, and closed each
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 49fea6f..236baaf 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
@@ -69,7 +70,7 @@ private[process] trait ProcessBuilderImpl {
       import io._
 
       // spawn threads that process the input, output, and error streams using the functions defined in `io`
-      val inThread  = Spawn(writeInput(process.getOutputStream), true)
+      val inThread  = Spawn(writeInput(process.getOutputStream), daemon = true)
       val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads)
       val errorThread =
         if (p.redirectErrorStream) Nil
@@ -93,26 +94,26 @@ private[process] trait ProcessBuilderImpl {
     def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other)
     def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other)
 
-    def run(): Process                                          = run(false)
+    def run(): Process                                          = run(connectInput = false)
     def run(connectInput: Boolean): Process                     = run(BasicIO.standard(connectInput))
-    def run(log: ProcessLogger): Process                        = run(log, false)
+    def run(log: ProcessLogger): Process                        = run(log, connectInput = false)
     def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log))
 
-    def !!                      = slurp(None, false)
-    def !!(log: ProcessLogger)  = slurp(Some(log), false)
-    def !!<                     = slurp(None, true)
-    def !!<(log: ProcessLogger) = slurp(Some(log), true)
+    def !!                      = slurp(None, withIn = false)
+    def !!(log: ProcessLogger)  = slurp(Some(log), withIn = false)
+    def !!<                     = slurp(None, withIn = true)
+    def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true)
 
-    def lines: Stream[String]                       = lines(false, true, None)
-    def lines(log: ProcessLogger): Stream[String]   = lines(false, true, Some(log))
-    def lines_! : Stream[String]                    = lines(false, false, None)
-    def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log))
+    def lineStream: Stream[String]                       = lineStream(withInput = false, nonZeroException = true, None)
+    def lineStream(log: ProcessLogger): Stream[String]   = lineStream(withInput = false, nonZeroException = true, Some(log))
+    def lineStream_! : Stream[String]                    = lineStream(withInput = false, nonZeroException = false, None)
+    def lineStream_!(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log))
 
-    def !                      = run(false).exitValue()
+    def !                      = run(connectInput = false).exitValue()
     def !(io: ProcessIO)       = run(io).exitValue()
-    def !(log: ProcessLogger)  = runBuffered(log, false)
-    def !<                     = run(true).exitValue()
-    def !<(log: ProcessLogger) = runBuffered(log, true)
+    def !(log: ProcessLogger)  = runBuffered(log, connectInput = false)
+    def !<                     = run(connectInput = true).exitValue()
+    def !<(log: ProcessLogger) = runBuffered(log, connectInput = true)
 
     /** Constructs a new builder which runs this command with all input/output threads marked
      *  as daemon threads.  This allows the creation of a long running process while still
@@ -131,7 +132,7 @@ private[process] trait ProcessBuilderImpl {
       else scala.sys.error("Nonzero exit value: " + code)
     }
 
-    private[this] def lines(
+    private[this] def lineStream(
       withInput: Boolean,
       nonZeroException: Boolean,
       log: Option[ProcessLogger]
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index f5b2668..eedf667 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index c21c0da..2b7fcde 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import processInternal._
@@ -17,7 +18,7 @@ private[process] trait ProcessImpl {
 
   /** Runs provided code in a new Thread and returns the Thread instance. */
   private[process] object Spawn {
-    def apply(f: => Unit): Thread = apply(f, false)
+    def apply(f: => Unit): Thread = apply(f, daemon = false)
     def apply(f: => Unit, daemon: Boolean): Thread = {
       val thread = new Thread() { override def run() = { f } }
       thread.setDaemon(daemon)
@@ -32,7 +33,7 @@ private[process] trait ProcessImpl {
         try result set Right(f)
         catch { case e: Exception => result set Left(e) }
 
-      Spawn(run)
+      Spawn(run())
 
       () => result.get match {
         case Right(value)    => value
@@ -68,10 +69,10 @@ private[process] trait ProcessImpl {
 
     protected[this] override def runAndExitValue() = {
       val first = a.run(io)
-      runInterruptible(first.exitValue)(first.destroy()) flatMap { codeA =>
+      runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA =>
         if (evaluateSecondProcess(codeA)) {
           val second = b.run(io)
-          runInterruptible(second.exitValue)(second.destroy())
+          runInterruptible(second.exitValue())(second.destroy())
         }
         else Some(codeA)
       }
@@ -132,10 +133,10 @@ private[process] trait ProcessImpl {
       val first = a.run(firstIO)
       try {
         runInterruptible {
-          val exit1 = first.exitValue
+          val exit1 = first.exitValue()
           currentSource put None
           currentSink put None
-          val exit2 = second.exitValue
+          val exit2 = second.exitValue()
           // Since file redirection (e.g. #>) is implemented as a piped process,
           // we ignore its exit value so cmd #> file doesn't always return 0.
           if (b.hasExitValue) exit2 else exit1
@@ -222,8 +223,8 @@ private[process] trait ProcessImpl {
       p.exitValue()
     }
     override def destroy() = {
-      try{
-        outputThreads foreach (_.stop())
+      try {
+        outputThreads foreach (_.interrupt()) // on destroy, don't bother consuming any more output
         p.destroy()
       }
       finally inputThread.interrupt()
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index a4acb06..ae34722 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.sys
+package scala
+package sys
 package process
 
 import java.io._
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index ed436feb..1340a6c 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -25,7 +25,7 @@ package scala.sys {
     *
     * {{{
     * import scala.sys.process._
-    * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines
+    * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lines
     * }}}
     *
     * We describe below the general concepts and architecture of the package,
@@ -80,10 +80,7 @@ package scala.sys {
     * spaces -- no escaping of spaces is possible -- or out of a
     * [[scala.collection.Seq]], where the first element represents the command
     * name, and the remaining elements are arguments to it. In this latter case,
-    * arguments may contain spaces.  One can also implicitly convert
-    * [[scala.xml.Elem]] and `java.lang.ProcessBuilder` into a `ProcessBuilder`.
-    * In the introductory example, the strings were converted into
-    * `ProcessBuilder` implicitly.
+    * arguments may contain spaces.
     *
     * To further control what how the process will be run, such as specifying
     * the directory in which it will be run, see the factories on
@@ -154,7 +151,7 @@ package scala.sys {
     *
     * // An overly complex way of computing size of a compressed file
     * def gzFileSize(name: String) = {
-    *   val cat = Seq("zcat", "name")
+    *   val cat = Seq("zcat", name)
     *   var count = 0
     *   def byteCounter(input: java.io.InputStream) = {
     *     while(input.read() != -1) count += 1
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
deleted file mode 100644
index 66d7d44..0000000
--- a/src/library/scala/testing/Benchmark.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.testing
-
-import scala.compat.Platform
-
-/** `Benchmark` can be used to quickly turn an existing class into a
- *  benchmark. Here is a short example:
- *  {{{
- *  object sort1 extends Sorter with Benchmark {
- *    def run = sort(List.range(1, 1000))
- *  }
- *  }}}
- *  The `run` method has to be defined by the user, who will perform the
- *  timed operation there. Run the benchmark as follows:
- *  {{{
- *  > scala sort1 5
- *  }}}
- *  This will run the benchmark 5 times, forcing a garbage collection
- *  between runs, and printing the execution times to stdout.
- *
- *  It is also possible to add a multiplier, so
- *  {{{
- *  > scala sort1 5 10
- *  }}}
- *  will run the entire benchmark 10 times, each time for 5 runs.
- *
- *  @author Iulian Dragos, Burak Emir
- */
- at deprecated("This class will be removed.", "2.10.0")
-trait Benchmark {
-
-  /** this method should be implemented by the concrete benchmark.
-   *  This method is called by the benchmarking code for a number of times.
-   *  The GC is called between "multiplier" calls to run, right after tear
-   *  down.
-   *
-   *  @see setUp
-   *  @see tearDown
-   */
-  def run()
-
-  var multiplier = 1
-
-  /** Run the benchmark the specified number of times and return a list with
-   *  the execution times in milliseconds in reverse order of the execution.
-   */
-  def runBenchmark(noTimes: Int): List[Long] =
-    for (i <- List.range(1, noTimes + 1)) yield {
-      setUp
-      val startTime = Platform.currentTime
-      var i = 0; while (i < multiplier) {
-        run()
-        i += 1
-      }
-      val stopTime = Platform.currentTime
-      tearDown
-      Platform.collectGarbage
-
-      stopTime - startTime
-    }
-
-  /** Prepare any data needed by the benchmark, but whose execution time
-   *  should not be measured. This method is run before each call to the
-   *  benchmark payload, 'run'.
-   */
-  def setUp() {}
-
-  /** Perform cleanup operations after each 'run'. For micro benchmarks,
-   *  think about using the result of 'run' in a way that prevents the JVM
-   *  to dead-code eliminate the whole 'run' method. For instance, print or
-   *  write the results to a file. The execution time of this method is not
-   *  measured.
-   */
-  def tearDown() {}
-
-  /** a string that is written at the beginning of the output line
-   *   that contains the timings. By default, this is the class name.
-   */
-  def prefix: String = getClass().getName()
-
-  /**
-   * The entry point. It takes two arguments:
-   * - argument `n` is the number of consecutive runs
-   * - optional argument `mult` specifies that the `n` runs are repeated
-   *   `mult` times.
-   */
-  def main(args: Array[String]) {
-    if (args.length > 0) {
-      val logFile = new java.io.OutputStreamWriter(System.out)
-      if (args.length > 1) multiplier = args(1).toInt
-      logFile.write(prefix)
-      for (t <- runBenchmark(args(0).toInt))
-        logFile.write("\t" + t)
-
-      logFile.write(Platform.EOL)
-      logFile.flush()
-    } else {
-      println("Usage: scala benchmarks.program <runs> ")
-      println("   or: scala benchmarks.program <runs> <multiplier>")
-      println("""
-    The benchmark is run <runs> times, forcing a garbage collection between runs. The optional
-    <multiplier> causes the benchmark to be repeated <multiplier> times, each time for <runs>
-    executions.
-      """)
-    }
-  }
-}
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
deleted file mode 100644
index 9376e26..0000000
--- a/src/library/scala/testing/Show.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.testing
-
-/** Classes inheriting trait `Show` can test their member methods using the
- *  notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
- *  the method and `arg,,1,,,...,arg,,n,,` are the arguments.
- *
- *  The only difference to a normal method call is the leading quote
- *  character (`'`). A quoted method call like the one above will produces
- *  a legible diagnostic to be printed on [[scala.Console]].
- *
- *  It is of the form
- *
- *    `meth(arg,,1,,, ..., arg,,n,,)`  gives  `<result>`
- *
- *  where `<result>` is the result of evaluating the call.
- *
- */
- at deprecated("This class will be removed.", "2.10.0")
-trait Show {
-
-  /** An implicit definition that adds an apply method to Symbol which forwards to `test`. 
-   *  Prints out diagnostics of method applications.
-   */
-  implicit class SymApply(f: Symbol) {
-    def apply[A](args: A*) {
-      println(test(f, args: _*))
-    }
-  }
-
-  @deprecated("use SymApply instead", "2.10.0")
-  def symApply(sym: Symbol): SymApply = new SymApply(sym)
-
-  /** Apply method with name of given symbol `f` to given arguments and return
-   *  a result diagnostics.
-   */
-  def test[A](f: Symbol, args: A*): String = {
-    val args1 = args map (_.asInstanceOf[AnyRef])
-    def testMethod(meth: java.lang.reflect.Method): String =
-      f.name+"("+(args mkString ",")+")  gives  "+
-      {
-        try {
-          meth.invoke(this, args1: _*)
-        } catch {
-          case ex: IllegalAccessException => ex
-          case ex: IllegalArgumentException => ex
-          case ex: java.lang.reflect.InvocationTargetException => ex
-        }
-      }
-    getClass.getMethods.toList filter (_.getName == f.name) match {
-      case List() =>
-        f.name+" is not defined"
-      case List(m) =>
-        testMethod(m)
-      case ms => // multiple methods, disambiguate by number of arguments
-        ms filter (_.getParameterTypes.length == args.length) match {
-          case List() =>
-            testMethod(ms.head) // go ahead anyway, to get an exception
-          case List(m) =>
-            testMethod(m)
-          case ms =>
-            "cannot disambiguate between multiple implementations of "+f.name
-        }
-    }
-  }
-}
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index b74fd15..aa55ac4 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -10,11 +10,17 @@ package scala.text
 
 import java.io.Writer
 
+ at deprecated("This object will be removed.", "2.11.0")
 case object DocNil extends Document
+ at deprecated("This object will be removed.", "2.11.0")
 case object DocBreak extends Document
+ at deprecated("This class will be removed.", "2.11.0")
 case class DocText(txt: String) extends Document
+ at deprecated("This class will be removed.", "2.11.0")
 case class DocGroup(doc: Document) extends Document
+ at deprecated("This class will be removed.", "2.11.0")
 case class DocNest(indent: Int, doc: Document) extends Document
+ at deprecated("This class will be removed.", "2.11.0")
 case class DocCons(hd: Document, tl: Document) extends Document
 
 /**
@@ -24,6 +30,7 @@ case class DocCons(hd: Document, tl: Document) extends Document
  * @author Michel Schinz
  * @version 1.0
  */
+ at deprecated("This class will be removed.", "2.11.0")
 abstract class Document {
   def ::(hd: Document): Document = DocCons(hd, this)
   def ::(hd: String): Document = DocCons(DocText(hd), this)
@@ -80,7 +87,7 @@ abstract class Document {
         fmt(k, (i + ii, b, d) :: z)
       case (i, true, DocBreak) :: z =>
         writer write "\n"
-        spaces(i);
+        spaces(i)
         fmt(i, z)
       case (i, false, DocBreak) :: z =>
         writer write " "
@@ -96,6 +103,7 @@ abstract class Document {
   }
 }
 
+ at deprecated("This object will be removed.", "2.11.0")
 object Document {
   /** The empty document */
   def empty = DocNil
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 159f1f0..5a5dd9a 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -24,5 +24,5 @@ package scala
  * @since   2.1
  */
 class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation {
-  def this(clazz: Class[T]) = this()
+  def this(clazz: Class[T]) = this("")
 }
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index 8ff7c58..ec87439 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -6,8 +6,6 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
 package scala
 
 import scala.annotation.meta._
diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala
index 52cba68..963fe1c 100644
--- a/src/library/scala/util/DynamicVariable.scala
+++ b/src/library/scala/util/DynamicVariable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 
 import java.lang.InheritableThreadLocal
 
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index dba11ed..b1a932b 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -8,7 +8,8 @@
 
 
 
-package scala.util
+package scala
+package util
 
 import scala.language.implicitConversions
 
@@ -21,7 +22,7 @@ import scala.language.implicitConversions
  *  [[scala.util.Right]] takes the place of [[scala.Some]].  Convention dictates
  *  that Left is used for failure and Right is used for success.
  *
- *  For example, you could use ``Either[String, Int]`` to detect whether a
+ *  For example, you could use `Either[String, Int]` to detect whether a
  *  received input is a String or an Int.
  *
  *  {{{
@@ -205,7 +206,7 @@ final case class Right[+A, +B](b: B) extends Either[A, B] {
 object Either {
 
   /**
-   * Allows use of a ``merge`` method to extract values from Either instances
+   * Allows use of a `merge` method to extract values from Either instances
    * regardless of whether they are Left or Right.
    *
    * {{{
@@ -215,14 +216,12 @@ object Either {
    * r.merge: Seq[Int] // Vector(1)
    * }}}
    */
-  implicit class MergeableEither[A](x: Either[A, A]) {
+  implicit class MergeableEither[A](private val x: Either[A, A]) extends AnyVal {
     def merge: A = x match {
       case Left(a)  => a
       case Right(a) => a
     }
   }
-  @deprecated("use MergeableEither instead", "2.10.0")
-  def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
 
   /**
    * Projects an `Either` into a `Left`.
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
deleted file mode 100644
index b78ed21..0000000
--- a/src/library/scala/util/Marshal.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2008-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util
-
-/**
- * Marshalling of Scala objects using Scala tags.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-object Marshal {
-  import java.io._
-  import scala.reflect.ClassTag
-
-  def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
-    val ba = new ByteArrayOutputStream(512)
-    val out = new ObjectOutputStream(ba)
-    out.writeObject(t)
-    out.writeObject(o)
-    out.close()
-    ba.toByteArray()
-  }
-
-  @throws(classOf[IOException])
-  @throws(classOf[ClassCastException])
-  @throws(classOf[ClassNotFoundException])
-  def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
-    val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
-    val found = in.readObject.asInstanceOf[ClassTag[_]]
-    try {
-      found.runtimeClass.asSubclass(expected.runtimeClass)
-      in.readObject.asInstanceOf[A]
-    } catch {
-      case _: ClassCastException =>
-        in.close()
-        throw new ClassCastException("type mismatch;"+
-          "\n found   : "+found+
-          "\n required: "+expected)
-    }
-  }
-}
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index a5bc8fa..e05fe08 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 
 /** An implementation of Austin Appleby's MurmurHash 3.0 algorithm
  *  (32 bit version); reference: http://code.google.com/p/smhasher
@@ -81,6 +82,7 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T =>
  *  needs to be called to finalize the hash.
  */
 @deprecated("Use the object MurmurHash3 instead.", "2.10.0")
+// NOTE: Used by SBT 0.13.0-M2 and below
 object MurmurHash {
   // Magic values used for MurmurHash's 32 bit hash.
   // Don't change these without consulting a hashing expert!
@@ -164,13 +166,13 @@ object MurmurHash {
     var k = hiddenMagicB
     var j = 0
     while (j+1 < s.length) {
-      val i = (s.charAt(j)<<16) + s.charAt(j+1);
+      val i = (s.charAt(j)<<16) + s.charAt(j+1)
       h = extendHash(h,i,c,k)
       c = nextMagicA(c)
       k = nextMagicB(k)
       j += 2
     }
-    if (j < s.length) h = extendHash(h,s.charAt(j),c,k)
+    if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k)
     finalizeHash(h)
   }
 
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index d04e5e4..d597feb 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.util
+package scala
+package util
 
 import java.io.{ IOException, PrintWriter }
 import java.util.jar.Attributes.{ Name => AttributeName }
@@ -59,6 +60,8 @@ private[scala] trait PropertiesTrait {
   def envOrElse(name: String, alt: String)      = Option(System getenv name) getOrElse alt
   def envOrNone(name: String)                   = Option(System getenv name)
 
+  def envOrSome(name: String, alt: Option[String])       = envOrNone(name) orElse alt
+
   // for values based on propFilename
   def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
   def scalaPropOrEmpty(name: String): String             = scalaPropOrElse(name, "")
@@ -128,10 +131,9 @@ private[scala] trait PropertiesTrait {
   def javaVmName            = propOrEmpty("java.vm.name")
   def javaVmVendor          = propOrEmpty("java.vm.vendor")
   def javaVmVersion         = propOrEmpty("java.vm.version")
-  // this property must remain less-well-known until 2.11
-  private def javaSpecVersion       = propOrEmpty("java.specification.version")
-  //private def javaSpecVendor        = propOrEmpty("java.specification.vendor")
-  //private def javaSpecName          = propOrEmpty("java.specification.name")
+  def javaSpecVersion       = propOrEmpty("java.specification.version")
+  def javaSpecVendor        = propOrEmpty("java.specification.vendor")
+  def javaSpecName          = propOrEmpty("java.specification.name")
   def osName                = propOrEmpty("os.name")
   def scalaHome             = propOrEmpty("scala.home")
   def tmpDir                = propOrEmpty("java.io.tmpdir")
@@ -145,7 +147,10 @@ private[scala] trait PropertiesTrait {
   // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
   // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
   /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX.  */
-  def isMac                 = osName startsWith "Mac OS X" 
+  def isMac                 = osName startsWith "Mac OS X"
+
+  /* Some runtime values. */
+  private[scala] def isAvian = javaVmName contains "Avian"
 
   // This is looking for javac, tools.jar, etc.
   // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
@@ -168,7 +173,7 @@ private[scala] trait PropertiesTrait {
    * isJavaAtLeast("1.6")            // true
    * isJavaAtLeast("1.7")            // true
    * isJavaAtLeast("1.8")            // false
-   * }}
+   * }}}
    */
   def isJavaAtLeast(version: String): Boolean = {
     def parts(x: String) = {
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 24c4cd7..8d68c5b 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.generic.CanBuildFrom
@@ -17,7 +18,7 @@ import scala.language.{implicitConversions, higherKinds}
  *  @author Stephane Micheloud
  *
  */
-class Random(val self: java.util.Random) {
+class Random(val self: java.util.Random) extends AnyRef with Serializable {
   /** Creates a new random number generator using a single long seed. */
   def this(seed: Long) = this(new java.util.Random(seed))
 
@@ -117,7 +118,7 @@ class Random(val self: java.util.Random) {
       swap(n - 1, k)
     }
 
-    (bf(xs) ++= buf).result
+    (bf(xs) ++= buf).result()
   }
 
   /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 276e157..2e021ad 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -141,14 +141,14 @@ object Sorting {
         var done = false
         while (!done) {
           while (b <= c && x(b) <= v) {
-            if (x(b) == v) {
+            if (x(b) equiv v) {
               swap(a, b)
               a += 1
             }
             b += 1
           }
           while (c >= b && x(c) >= v) {
-            if (x(c) == v) {
+            if (x(c) equiv v) {
               swap(c, d)
               d -= 1
             }
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 7749543..b0cf122 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 
 import scala.collection.Seq
 import scala.util.control.NonFatal
@@ -110,6 +111,35 @@ sealed abstract class Try[+T] {
    */
   def filter(p: T => Boolean): Try[T]
 
+  /** Creates a non-strict filter, which eventually converts this to a `Failure`
+   *  if the predicate is not satisfied.
+   *
+   *  Note: unlike filter, withFilter does not create a new Try.
+   *        Instead, it restricts the domain of subsequent
+   *        `map`, `flatMap`, `foreach`, and `withFilter` operations.
+   *
+   * As Try is a one-element collection, this may be a bit overkill,
+   * but it's consistent with withFilter on Option and the other collections.
+   *
+   *  @param p   the predicate used to test elements.
+   *  @return    an object of class `WithFilter`, which supports
+   *             `map`, `flatMap`, `foreach`, and `withFilter` operations.
+   *             All these operations apply to those elements of this Try
+   *             which satisfy the predicate `p`.
+   */
+  @inline final def withFilter(p: T => Boolean): WithFilter = new WithFilter(p)
+
+  /** We need a whole WithFilter class to honor the "doesn't create a new
+   *  collection" contract even though it seems unlikely to matter much in a
+   *  collection with max size 1.
+   */
+  class WithFilter(p: T => Boolean) {
+    def map[U](f:     T => U): Try[U]           = Try.this filter p map f
+    def flatMap[U](f: T => Try[U]): Try[U]      = Try.this filter p flatMap f
+    def foreach[U](f: T => U): Unit             = Try.this filter p foreach f
+    def withFilter(q: T => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
+  }
+
   /**
    * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
    * This is like `flatMap` for the exception.
@@ -164,7 +194,7 @@ object Try {
 
 }
 
-final case class Failure[+T](val exception: Throwable) extends Try[T] {
+final case class Failure[+T](exception: Throwable) extends Try[T] {
   def isFailure: Boolean = true
   def isSuccess: Boolean = false
   def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/util/automata/BaseBerrySethi.scala
deleted file mode 100644
index 3f6f450..0000000
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.automata
-
-import scala.util.regexp.{ Base }
-import scala.collection.{ mutable, immutable }
-
-// todo: replace global variable pos with acc
-
-/** This class turns a regular expression over `A` into a
-  * [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated
-  * position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
-  */
- at deprecated("This class will be removed", "2.10.0")
-abstract class BaseBerrySethi {
-  val lang: Base
-  import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
-
-  protected var pos = 0
-
-  // results which hold all info for the NondetWordAutomaton
-  protected var follow: mutable.HashMap[Int, Set[Int]] = _
-
-  protected var finalTag: Int = _
-
-  protected var finals: immutable.Map[Int, Int] = _     // final states
-
-  // constants --------------------------
-
-  final val emptySet: Set[Int] = Set()
-
-  private def doComp(r: RegExp, compFunction: RegExp => Set[Int]) = r match {
-    case x: Alt   => (x.rs map compFirst).foldLeft(emptySet)(_ ++ _)
-    case Eps      => emptySet
-    case x: Meta  => compFunction(x.r)
-    case x: Sequ  =>
-      val (l1, l2) = x.rs span (_.isNullable)
-      ((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _)
-    case Star(t)  => compFunction(t)
-    case _        => throw new IllegalArgumentException("unexpected pattern " + r.getClass)
-  }
-
-  /** Computes `first(r)` for the word regexp `r`. */
-  protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst)
-
-  /** Computes `last(r)` for the regexp `r`. */
-  protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast)
-
-  /** Starts from the right-to-left
-   *  precondition: pos is final
-   *               pats are successor patterns of a Sequence node
-   */
-  protected def compFollow(rs: Seq[RegExp]): Set[Int] = {
-    follow(0) =
-      if (rs.isEmpty) emptySet
-      else rs.foldRight(Set(pos))((p, fol) => {
-        val first = compFollow1(fol, p)
-
-        if (p.isNullable) fol ++ first
-        else first
-      })
-
-    follow(0)
-  }
-
-  /** Returns the first set of an expression, setting the follow set along the way.
-   */
-  protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
-    case x: Alt     => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*)
-    case x: Meta    => compFollow1(fol1, x.r)
-    case x: Star    => compFollow1(fol1 ++ compFirst(x.r), x.r)
-    case x: Sequ    =>
-      x.rs.foldRight(fol1) { (p, fol) =>
-        val first = compFollow1(fol, p)
-
-        if (p.isNullable) fol ++ first
-        else first
-      }
-    case _          => throw new IllegalArgumentException("unexpected pattern: " + r.getClass)
-  }
-
-  /** Returns the "Sethi-length" of a pattern, creating the set of position along the way.
-   */
-  protected def traverse(r: RegExp): Unit = r match {
-    // (is tree automaton stuff, more than Berry-Sethi)
-    case x: Alt  => x.rs foreach traverse
-    case x: Sequ => x.rs foreach traverse
-    case x: Meta => traverse(x.r)
-    case Star(t) => traverse(t)
-    case _       => throw new IllegalArgumentException("unexp pattern " + r.getClass)
-  }
-}
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala
deleted file mode 100644
index 5d70910..0000000
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.automata
-
-import scala.collection.{ mutable, immutable }
-
-/** A deterministic automaton. States are integers, where
- *  0 is always the only initial state. Transitions are represented
- *  in the delta function. A default transitions is one that
- *  is taken when no other transition can be taken.
- *  All states are reachable. Accepting states are those for which
- *  the partial function 'finals' is defined.
- *
- *  @author Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-abstract class DetWordAutom[T <: AnyRef] {
-  val nstates: Int
-  val finals: Array[Int]
-  val delta: Array[mutable.Map[T, Int]]
-  val default: Array[Int]
-
-  def isFinal(q: Int)        = finals(q) != 0
-  def isSink(q: Int)         = delta(q).isEmpty && default(q) == q
-  def next(q: Int, label: T) = delta(q).getOrElse(label, default(q))
-
-  override def toString() = {
-    val sb = new StringBuilder("[DetWordAutom  nstates=")
-    sb.append(nstates)
-    sb.append(" finals=")
-    val map = Map(finals.zipWithIndex map (_.swap): _*)
-    sb.append(map.toString())
-    sb.append(" delta=\n")
-
-    for (i <- 0 until nstates) {
-      sb append "%d->%s\n".format(i, delta(i))
-      if (i < default.length)
-        sb append "_>%s\n".format(default(i))
-    }
-    sb.toString
-  }
-}
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/util/automata/Inclusion.scala
deleted file mode 100644
index 91441bd..0000000
--- a/src/library/scala/util/automata/Inclusion.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.automata
-
-
-/** A fast test of language inclusion between minimal automata.
- *  inspired by the ''AMoRE automata library''.
- *
- *  @author Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-trait Inclusion[A <: AnyRef] {
-
-  val labels: Seq[A]
-
-  /** Returns true if `dfa1` is included in `dfa2`.
-   */
-  def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = {
-
-    def encode(q1: Int, q2: Int) = 1 + q1 + q2 * dfa1.nstates
-    def decode2(c: Int) = (c-1) / (dfa1.nstates) //integer division
-    def decode1(c: Int) = (c-1) % (dfa1.nstates)
-
-    var q1 = 0 //dfa1.initstate; // == 0
-    var q2 = 0 //dfa2.initstate; // == 0
-
-    val max = 1 + dfa1.nstates * dfa2.nstates
-    val mark = new Array[Int](max)
-
-    var result = true
-    var current = encode(q1, q2)
-    var last = current
-    mark(last) = max // mark (q1,q2)
-    while (current != 0 && result) {
-      //Console.println("current = [["+q1+" "+q2+"]] = "+current);
-      for (letter <- labels) {
-        val r1 = dfa1.next(q1,letter)
-        val r2 = dfa2.next(q2,letter)
-        if (dfa1.isFinal(r1) && !dfa2.isFinal(r2))
-	  result = false
-        val test = encode(r1, r2)
-        //Console.println("test = [["+r1+" "+r2+"]] = "+test);
-        if (mark(test) == 0) {
-	  mark(last) = test
-	  mark(test) = max
-	  last = test
-        }
-      }
-      val ncurrent = mark(current)
-      if( ncurrent != max ) {
-        q1 = decode1(ncurrent)
-        q2 = decode2(ncurrent)
-        current = ncurrent
-      } else {
-        current = 0
-      }
-    }
-    result
-  }
-}
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala
deleted file mode 100644
index 24c6612..0000000
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.automata
-
-import scala.collection.{ immutable, mutable }
-
-/** A nondeterministic automaton. States are integers, where
- *  0 is always the only initial state. Transitions are represented
- *  in the delta function. Default transitions are transitions that
- *  are taken when no other transitions can be applied.
- *  All states are reachable. Accepting states are those for which
- *  the partial function `finals` is defined.
- */
- at deprecated("This class will be removed", "2.10.0")
-abstract class NondetWordAutom[T <: AnyRef] {
-  val nstates: Int
-  val labels: Seq[T]
-  val finals: Array[Int] // 0 means not final
-  val delta: Array[mutable.Map[T, immutable.BitSet]]
-  val default: Array[immutable.BitSet]
-
-  /** @return true if the state is final */
-  final def isFinal(state: Int) = finals(state) > 0
-
-  /** @return tag of final state */
-  final def finalTag(state: Int) = finals(state)
-
-  /** @return true if the set of states contains at least one final state */
-  final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal
-
-  /** @return true if there are no accepting states */
-  final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
-
-  /** @return a immutable.BitSet with the next states for given state and label */
-  def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q))
-
-  /** @return a immutable.BitSet with the next states for given state and label */
-  def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
-  def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
-
-  private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet =
-    (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _)
-
-  private def finalStates = 0 until nstates filter isFinal
-  override def toString = {
-
-    val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
-    val deltaString = (0 until nstates) 
-      .map(i => "   %d->%s\n    _>%s\n".format(i, delta(i), default(i))).mkString
-
-    "[NondetWordAutom  nstates=%d  finals=%s  delta=\n%s".format(nstates, finalString, deltaString)
-  }
-}
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
deleted file mode 100644
index 0ee7685..0000000
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.automata
-
-import scala.collection.{ mutable, immutable }
-
- at deprecated("This class will be removed", "2.10.0")
-class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
-  import nfa.labels
-
-  def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
-    (Q map finals filter (_ > 0)).min
-
-  def determinize: DetWordAutom[T] = {
-    // for assigning numbers to bitsets
-    var indexMap    = scala.collection.Map[immutable.BitSet, Int]()
-    var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
-    var ix = 0
-
-    // we compute the dfa with states = bitsets
-    val q0 = immutable.BitSet(0)            // the set { 0 }
-    val sink = immutable.BitSet.empty       // the set { }
-
-    var states = Set(q0, sink)    // initial set of sets
-    val delta    = new mutable.HashMap[immutable.BitSet, mutable.HashMap[T, immutable.BitSet]]
-    var deftrans = mutable.Map(q0 -> sink, sink -> sink)  // initial transitions
-    var finals: mutable.Map[immutable.BitSet, Int]  = mutable.Map()
-    val rest = new mutable.Stack[immutable.BitSet]
-
-    rest.push(sink, q0)
-
-    def addFinal(q: immutable.BitSet) {
-      if (nfa containsFinal q)
-        finals = finals.updated(q, selectTag(q, nfa.finals))
-    }
-    def add(Q: immutable.BitSet) {
-      if (!states(Q)) {
-        states += Q
-        rest push Q
-        addFinal(Q)
-      }
-    }
-
-    addFinal(q0)                          // initial state may also be a final state
-
-    while (!rest.isEmpty) {
-      val P = rest.pop
-      // assign a number to this bitset
-      indexMap = indexMap.updated(P, ix)
-      invIndexMap = invIndexMap.updated(ix, P)
-      ix += 1
-
-      // make transition map
-      val Pdelta = new mutable.HashMap[T, immutable.BitSet]
-      delta.update(P, Pdelta)
-
-      labels foreach { label =>
-        val Q = nfa.next(P, label)
-        Pdelta.update(label, Q)
-        add(Q)
-      }
-
-      // collect default transitions
-      val Pdef = nfa nextDefault P
-      deftrans = deftrans.updated(P, Pdef)
-      add(Pdef)
-    }
-
-    // create DetWordAutom, using indices instead of sets
-    val nstatesR = states.size
-    val deltaR = new Array[mutable.Map[T, Int]](nstatesR)
-    val defaultR = new Array[Int](nstatesR)
-    val finalsR = new Array[Int](nstatesR)
-
-    for (Q <- states) {
-      val q = indexMap(Q)
-      val trans = delta(Q)
-      val transDef = deftrans(Q)
-      val qDef = indexMap(transDef)
-      val ntrans = new mutable.HashMap[T, Int]()
-
-      for ((label, value) <- trans) {
-        val p = indexMap(value)
-        if (p != qDef)
-          ntrans.update(label, p)
-      }
-
-      deltaR(q) = ntrans
-      defaultR(q) = qDef
-    }
-
-    finals foreach { case (k,v) => finalsR(indexMap(k)) = v }
-
-    new DetWordAutom [T] {
-      val nstates = nstatesR
-      val delta = deltaR
-      val default = defaultR
-      val finals = finalsR
-    }
-  }
-}
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
deleted file mode 100644
index 12448f5..0000000
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.automata
-
-import scala.collection.{ immutable, mutable }
-import scala.util.regexp.WordExp
-
-/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]]
-  * celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
-  *
-  *  @author Burak Emir
-  *  @version 1.0
-  */
- at deprecated("This class will be removed", "2.10.0")
-abstract class WordBerrySethi extends BaseBerrySethi {
-  override val lang: WordExp
-
-  import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star, _labelT }
-
-  protected var labels: mutable.HashSet[_labelT]                   = _
-  // don't let this fool you, only labelAt is a real, surjective mapping
-  protected var labelAt: Map[Int, _labelT]                         = _ // new alphabet "gamma"
-  protected var deltaq: Array[mutable.HashMap[_labelT, List[Int]]] = _ // delta
-  protected var defaultq: Array[List[Int]]                         = _ // default transitions
-  protected var initials: Set[Int]                                 = _
-
-  /** Computes `first(r)` where the word regexp `r`.
-   *
-   *  @param r the regular expression
-   *  @return  the computed set `first(r)`
-   */
-  protected override def compFirst(r: RegExp): Set[Int] = r match {
-    case x: Letter  => Set(x.pos)
-    case _          => super.compFirst(r)
-  }
-
-  /** Computes `last(r)` where the word regexp `r`.
-   *
-   *  @param r the regular expression
-   *  @return  the computed set `last(r)`
-   */
-  protected override def compLast(r: RegExp): Set[Int] = r match {
-    case x: Letter  => Set(x.pos)
-    case _          => super.compLast(r)
-  }
-
-  /** Returns the first set of an expression, setting the follow set along
-   *  the way.
-   *
-   *  @param r    the regular expression
-   *  @return     the computed set
-   */
-  protected override def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
-      case x: Letter  => follow(x.pos) = fol1 ; Set(x.pos)
-      case Eps        => emptySet
-      case _          => super.compFollow1(fol1, r)
-    }
-
-  /** Returns "Sethi-length" of a pattern, creating the set of position
-   *  along the way
-   */
-
-  /** Called at the leaves of the regexp */
-  protected def seenLabel(r: RegExp, i: Int, label: _labelT) {
-    labelAt = labelAt.updated(i, label)
-    this.labels += label
-  }
-
-  // overridden in BindingBerrySethi
-  protected def seenLabel(r: RegExp, label: _labelT): Int = {
-    pos += 1
-    seenLabel(r, pos, label)
-    pos
-  }
-
-  // todo: replace global variable pos with acc
-  override def traverse(r: RegExp): Unit = r match {
-    case a @ Letter(label) => a.pos = seenLabel(r, label)
-    case Eps               => // ignore
-    case _                 => super.traverse(r)
-  }
-
-
-  protected def makeTransition(src: Int, dest: Int, label: _labelT) {
-    val q = deltaq(src)
-    q.update(label, dest :: q.getOrElse(label, Nil))
-  }
-
-  protected def initialize(subexpr: Seq[RegExp]): Unit = {
-    this.labelAt = immutable.Map()
-    this.follow = mutable.HashMap()
-    this.labels = mutable.HashSet()
-    this.pos = 0
-
-    // determine "Sethi-length" of the regexp
-    subexpr foreach traverse
-
-    this.initials = Set(0)
-  }
-
-  protected def initializeAutom() {
-    finals   = immutable.Map.empty[Int, Int]                    // final states
-    deltaq   = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta
-    defaultq = new Array[List[Int]](pos)                        // default transitions
-
-    for (j <- 0 until pos) {
-      deltaq(j) = mutable.HashMap[_labelT, List[Int]]()
-      defaultq(j) = Nil
-    }
-  }
-
-  protected def collectTransitions(): Unit =                // make transitions
-    for (j <- 0 until pos ; fol = follow(j) ; k <- fol) {
-      if (pos == k) finals = finals.updated(j, finalTag)
-      else makeTransition(j, k, labelAt(k))
-    }
-
-  def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[_labelT] = {
-    this.finalTag = finalTag
-
-    pat match {
-      case x: Sequ =>
-        // (1,2) compute follow + first
-        initialize(x.rs)
-        pos += 1
-        compFollow(x.rs)  // this used to be assigned to var globalFirst and then never used.
-
-        // (3) make automaton from follow sets
-        initializeAutom()
-        collectTransitions()
-
-        if (x.isNullable) // initial state is final
-          finals = finals.updated(0, finalTag)
-
-        val delta1      = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
-        val finalsArr   = (0 until pos map (k => finals.getOrElse(k, 0))).toArray  // 0 == not final
-        val initialsArr = initials.toArray
-
-        val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
-          (0 until pos map { x =>
-            mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
-          }).toArray
-
-        val defaultArr  = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray
-
-        new NondetWordAutom[_labelT] {
-          val nstates  = pos
-          val labels   = WordBerrySethi.this.labels.toList
-          val initials = initialsArr
-          val finals   = finalsArr
-          val delta    = deltaArr
-          val default  = defaultArr
-        }
-      case z =>
-       automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag)
-    }
-  }
-}
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 89e1b58..5524b10 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.control
+package scala
+package util.control
 
 /** A class that can be instantiated for the break control abstraction.
  *  Example usage:
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 33c90c5..7ed3d95 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.control
+package scala
+package util.control
 
 /** A marker trait indicating that the `Throwable` it is mixed into is
  *  intended for flow control.
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index b97914c..be6d03a 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 package control
 
 import scala.collection.immutable.List
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
index 0d8cdfb..9d3dfea 100644
--- a/src/library/scala/util/control/NonFatal.scala
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -6,13 +6,13 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.control
+package scala
+package util.control
 
 /**
  * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
- * (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
- * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
- * However, `StackOverflowError` is matched, i.e. considered non-fatal.
+ * (for example, `OutOfMemoryError` and `StackOverflowError`, subclasses of `VirtualMachineError`), `ThreadDeath`,
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`.
  *
  * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
  * `NonFatal` (and would therefore be thrown).
@@ -33,9 +33,8 @@ object NonFatal {
     * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal
     */
    def apply(t: Throwable): Boolean = t match {
-     case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError
      // VirtualMachineError includes OutOfMemoryError and other fatal errors
-     case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false
+     case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false
      case _ => true
    }
   /**
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 955cee7..953d5b4 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -6,13 +6,18 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.control
+package scala
+package util.control
 
 /** Methods exported by this object implement tail calls via trampolining.
  *  Tail calling methods have to return their result using `done` or call the
  *  next method using `tailcall`. Both return a `TailRec` object. The result
  *  of evaluating a tailcalling function can be retrieved from a `Tailrec`
- *  value using method `result`. Here's a usage example:
+ *  value using method `result`.
+ *  Implemented as described in "Stackless Scala with Free Monads"
+ *  http://blog.higher-order.com/assets/trampolines.pdf
+ *
+ *  Here's a usage example:
  *  {{{
  *  import scala.util.control.TailCalls._
  *
@@ -23,6 +28,14 @@ package scala.util.control
  *   if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
  *
  *  isEven((1 to 100000).toList).result
+ *
+ *  def fib(n: Int): TailRec[Int] =
+ *    if (n < 2) done(n) else for {
+ *      x <- tailcall(fib(n - 1))
+ *      y <- tailcall(fib(n - 2))
+ *    } yield (x + y)
+ *
+ *  fib(40).result
  *  }}}
  */
 object TailCalls {
@@ -30,14 +43,43 @@ object TailCalls {
   /** This class represents a tailcalling computation
    */
   abstract class TailRec[+A] {
+
+    /** Continue the computation with `f`. */
+    final def map[B](f: A => B): TailRec[B] =
+      flatMap(a => Call(() => Done(f(a))))
+
+    /** Continue the computation with `f` and merge the trampolining
+      * of this computation with that of `f`. */
+    final def flatMap[B](f: A => TailRec[B]): TailRec[B] =
+      this match {
+        case Done(a) => Call(() => f(a))
+        case c at Call(_) => Cont(c, f)
+        // Take advantage of the monad associative law to optimize the size of the required stack
+        case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c f x flatMap f)
+      }
+
+    /** Returns either the next step of the tailcalling computation,
+      * or the result if there are no more steps. */
+    @annotation.tailrec final def resume: Either[() => TailRec[A], A] = this match {
+      case Done(a) => Right(a)
+      case Call(k) => Left(k)
+      case Cont(a, f) => a match {
+        case Done(v) => f(v).resume
+        case Call(k) => Left(() => k().flatMap(f))
+        case Cont(b, g) => b.flatMap(x => g(x) flatMap f).resume
+      }
+    }
+
     /** Returns the result of the tailcalling computation.
      */
-    def result: A = {
-      def loop(body: TailRec[A]): A = body match {
-        case Call(rest) => loop(rest())
-        case Done(result) => result
+    @annotation.tailrec final def result: A = this match {
+      case Done(a) => a
+      case Call(t) => t().result
+      case Cont(a, f) => a match {
+        case Done(v) => f(v).result
+        case Call(t) => t().flatMap(f).result
+        case Cont(b, g) => b.flatMap(x => g(x) flatMap f).result
       }
-      loop(this)
     }
   }
 
@@ -46,19 +88,23 @@ object TailCalls {
 
   /** Internal class representing the final result returned from a tailcalling
     * computation */
-  protected case class Done[A](override val result: A) extends TailRec[A]
+  protected case class Done[A](value: A) extends TailRec[A]
+
+  /** Internal class representing a continuation with function A => TailRec[B].
+    * It is needed for the flatMap to be implemented. */
+  protected case class Cont[A, B](a: TailRec[A], f: A => TailRec[B]) extends TailRec[B]
 
   /** Performs a tailcall
    *  @param rest  the expression to be evaluated in the tailcall
    *  @return a `TailRec` object representing the expression `rest`
    */
-  def tailcall[A](rest: => TailRec[A]): TailRec[A] = new Call(() => rest)
+  def tailcall[A](rest: => TailRec[A]): TailRec[A] = Call(() => rest)
 
   /** Used to return final result from tailcalling computation
    *  @param  `result` the result value
    *  @return a `TailRec` object representing a computation which immediately
    *          returns `result`
    */
-  def done[A](result: A): TailRec[A] = new Done(result)
+  def done[A](result: A): TailRec[A] = Done(result)
 
 }
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
deleted file mode 100644
index d1c11a2..0000000
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.grammar
-
- at deprecated("This class will be removed", "2.10.0")
-abstract class HedgeRHS
-
-/** Right hand side of a hedge production, deriving a single tree. */
- at deprecated("This class will be removed", "2.10.0")
-case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving any hedge. */
- at deprecated("This class will be removed", "2.10.0")
-case object AnyHedgeRHS extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving the empty hedge. */
- at deprecated("This class will be removed", "2.10.0")
-case object EmptyHedgeRHS extends HedgeRHS
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
deleted file mode 100644
index ee72ea9..0000000
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.grammar
-
-/** Right hand side of a tree production. */
- at deprecated("This class will be removed", "2.10.0")
-abstract class TreeRHS
-
-/** Right hand side of a tree production, labelled with a letter from an alphabet. */
- at deprecated("This class will be removed", "2.10.0")
-case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS
-
- at deprecated("This class will be removed", "2.10.0")
-case object AnyTreeRHS extends TreeRHS
diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala
index a969457..4704797 100644
--- a/src/library/scala/util/hashing/ByteswapHashing.scala
+++ b/src/library/scala/util/hashing/ByteswapHashing.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.hashing
+package scala
+package util.hashing
 
 
 
@@ -16,20 +17,20 @@ package scala.util.hashing
 /** A fast multiplicative hash by Phil Bagwell.
  */
 final class ByteswapHashing[T] extends Hashing[T] {
-  
+
   def hash(v: T) = byteswap32(v.##)
-  
+
 }
 
 
 object ByteswapHashing {
-  
+
   private class Chained[T](h: Hashing[T]) extends Hashing[T] {
     def hash(v: T) = byteswap32(h.hash(v))
   }
-  
+
   /** Composes another `Hashing` with the Byteswap hash.
    */
   def chain[T](h: Hashing[T]): Hashing[T] = new Chained(h)
-  
+
 }
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index b57f858..2b72c1d 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.hashing
+package scala
+package util.hashing
 
 import scala.annotation.implicitNotFound
 
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 0aa7e6f..1bfaeb2 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util.hashing
+package scala
+package util.hashing
 
 import java.lang.Integer.{ rotateLeft => rotl }
 
@@ -76,7 +77,7 @@ private[hashing] class MurmurHash3 {
       h = mix(h, data)
       i += 2
     }
-    if (i < str.length) h = mixLast(h, str.charAt(i))
+    if (i < str.length) h = mixLast(h, str.charAt(i).toInt)
     finalizeHash(h, str.length)
   }
 
@@ -274,12 +275,4 @@ object MurmurHash3 extends MurmurHash3 {
     finalizeHash(h, n)
   }
   */
-
-  @deprecated("Use unorderedHash", "2.10.0")
-  final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
-    unorderedHash(xs.seq, seed)
-
-  @deprecated("Use orderedHash", "2.10.0")
-  final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
-    orderedHash(xs.seq, seed)
 }
diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala
index 7d38f15..2c8e015 100644
--- a/src/library/scala/util/hashing/package.scala
+++ b/src/library/scala/util/hashing/package.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.util
+package scala
+package util
 
 
 
@@ -14,7 +15,7 @@ package scala.util
 
 
 package object hashing {
-  
+
   /** Fast multiplicative hash with a nice distribution.
    */
   def byteswap32(v: Int): Int = {
@@ -22,7 +23,7 @@ package object hashing {
     hc = java.lang.Integer.reverseBytes(hc)
     hc * 0x9e3775cd
   }
-  
+
   /** Fast multiplicative hash with a nice distribution
    *  for 64-bit values.
    */
@@ -31,5 +32,5 @@ package object hashing {
     hc = java.lang.Long.reverseBytes(hc)
     hc * 0x9e3775cd9e3775cdL
   }
-  
+
 }
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
deleted file mode 100644
index 74f058b..0000000
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.logging
-
-/**
- *  The trait `ConsoleLogger` is mixed into a concrete class who
- *  has class `Logged` among its base classes.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed.", "2.10.0")
-trait ConsoleLogger extends Logged {
-
-  /** logs argument to Console using [[scala.Console.println]]
-   */
-  override def log(msg: String): Unit = Console.println(msg)
-}
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
deleted file mode 100644
index f2661d3..0000000
--- a/src/library/scala/util/logging/Logged.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.logging
-
-/** Mixing in Logged indicates that a class provides support for logging.
-  *
-  * For instance:
-  * {{{
-  * // The developer of the library writes:
-  * class MyClass extends Logged {
-  *   // do stuff, call log
-  * }
-  *
-  * // The user of the library instantiates:
-  * val x = new MyClass() with ConsoleLogger
-  * }}}
-  * and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
-  */
- at deprecated("This class will be removed.", "2.10.0")
-trait Logged {
-  /** This method should log the message given as argument somewhere
-   *  as a side-effect.
-   *
-   *  @param msg  message to be logged
-   */
-  def log(msg: String): Unit = {}
-}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 716d746..6743b9e 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -28,7 +28,8 @@
  *   into a [[java.lang.String]].
  *
  */
-package scala.util.matching
+package scala
+package util.matching
 
 import scala.collection.AbstractIterator
 import java.util.regex.{ Pattern, Matcher }
@@ -66,7 +67,21 @@ import java.util.regex.{ Pattern, Matcher }
  *  Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a
  *  pattern match.
  *
- *  Note, however, that when Regex is used as an extractor in a pattern match, it
+ *  Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]]
+ *  needs to be initialized (by calling `hasNext` or `next()`, or causing these to be
+ *  called) before information about a match can be retrieved:
+ *
+ *  {{{
+ *  val msg = "I love Scala"
+ *
+ *  // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException
+ *
+ *  val matches = " ".r.findAllIn(msg)
+ *  matches.hasNext // initializes the matcher
+ *  val start = matches.start
+ *  }}}
+ *
+ *  When Regex is used as an extractor in a pattern match, note that it
  *  only succeeds if the whole text can be matched. For this reason, one usually
  *  calls a method to find the matching substrings, and then use it as an extractor
  *  to break match into subgroups.
@@ -131,7 +146,7 @@ import java.util.regex.{ Pattern, Matcher }
  *  @author  Martin Odersky
  *  @version 1.1, 29/01/2008
  *
- *  @param regex      A string representing a regular expression
+ *  @param pattern    The compiled pattern
  *  @param groupNames A mapping from names to indices in capture groups
  *
  *  @define replacementString
@@ -144,49 +159,119 @@ import java.util.regex.{ Pattern, Matcher }
  *  to automatically escape these characters.
  */
 @SerialVersionUID(-2094783597747625537L)
-class Regex(regex: String, groupNames: String*) extends Serializable {
+class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable {
   outer =>
 
   import Regex._
 
-  /** The compiled pattern */
-  val pattern = Pattern.compile(regex)
+  /**
+    *  @param regex      A string representing a regular expression
+    *  @param groupNames A mapping from names to indices in capture groups
+    */
+  def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*)
 
-  /** Tries to match target (whole match) and returns the matching subgroups.
-   *  if the pattern has no subgroups, then it returns an empty list on a
-   *  successful match.
-   *
-   *  Note, however, that if some subgroup has not been matched, a `null` will
-   *  be returned for that subgroup.
+  /** Tries to match a [[java.lang.CharSequence]].
+   *  If the match succeeds, the result is a list of the matching
+   *  groups (or a `null` element if a group did not match any input).
+   *  If the pattern specifies no groups, then the result will be an empty list
+   *  on a successful match.
    *
+   *  This method attempts to match the entire input by default; to find the next
+   *  matching subsequence, use an unanchored Regex.
+
    *  For example:
    *
    *  {{{
    *  val p1 = "ab*c".r
-   *  val p2 = "a(b*)c".r
-   *
    *  val p1Matches = "abbbc" match {
    *    case p1() => true
    *    case _    => false
    *  }
-   *
+   *  val p2 = "a(b*)c".r
    *  val numberOfB = "abbbc" match {
    *    case p2(b) => Some(b.length)
    *    case _     => None
    *  }
+   *  val p3 = "b*".r.unanchored
+   *  val p3Matches = "abbbc" match {
+   *    case p3() => true
+   *    case _    => false
+   *  }
    *  }}}
    *
+   *  @param  s     The string to match
+   *  @return       The matches
+   */
+  def unapplySeq(s: CharSequence): Option[List[String]] = {
+    val m = pattern matcher s
+    if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
+    else None
+  }
+
+  /** Tries to match the String representation of a [[scala.Char]].
+   *  If the match succeeds, the result is the first matching
+   *  group if any groups are defined, or an empty Sequence otherwise.
+   *
+   *  For example:
+   *
+   *  {{{
+   *  val cat = "cat"
+   *  // the case must consume the group to match
+   *  val r = """(\p{Lower})""".r
+   *  cat(0) match { case r(x) => true }
+   *  cat(0) match { case r(_) => true }
+   *  cat(0) match { case r(_*) => true }
+   *  cat(0) match { case r() => true }     // no match
+   *
+   *  // there is no group to extract
+   *  val r = """\p{Lower}""".r
+   *  cat(0) match { case r(x) => true }    // no match
+   *  cat(0) match { case r(_) => true }    // no match
+   *  cat(0) match { case r(_*) => true }   // matches
+   *  cat(0) match { case r() => true }     // matches
+   *
+   *  // even if there are multiple groups, only one is returned
+   *  val r = """((.))""".r
+   *  cat(0) match { case r(_) => true }    // matches
+   *  cat(0) match { case r(_,_) => true }  // no match
+   *  }}}
+   *
+   *  @param  c     The Char to match
+   *  @return       The match
+   */
+  def unapplySeq(c: Char): Option[List[Char]] = {
+    val m = pattern matcher c.toString
+    if (runMatcher(m)) {
+      if (m.groupCount > 0) Some((m group 1).toList) else Some(Nil)
+    } else None
+  }
+
+  /** Tries to match on a [[scala.util.matching.Regex.Match]].
+   *  A previously failed match results in None.
+   *  If a successful match was made against the current pattern, then that result is used.
+   *  Otherwise, this Regex is applied to the previously matched input,
+   *  and the result of that match is used.
+   */
+  def unapplySeq(m: Match): Option[List[String]] =
+    if (m.matched == null) None
+    else if (m.matcher.pattern == this.pattern) Some((1 to m.groupCount).toList map m.group)
+    else unapplySeq(m.matched)
+
+  /** Tries to match target.
    *  @param target The string to match
    *  @return       The matches
    */
+  @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0")
   def unapplySeq(target: Any): Option[List[String]] = target match {
     case s: CharSequence =>
       val m = pattern matcher s
       if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
       else None
-    case m: Match        => unapplySeq(m.matched)
-    case _               => None
+    case m: Match => unapplySeq(m.matched)
+    case _ => None
   }
+
+  //  @see UnanchoredRegex
   protected def runMatcher(m: Matcher) = m.matches()
 
   /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
@@ -196,11 +281,15 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  that can be queried for data such as the text that precedes the
    *  match, subgroups, etc.
    *
+   *  Attempting to retrieve information about a match before initializing
+   *  the iterator can result in [[java.lang.IllegalStateException]]s. See
+   *  [[scala.util.matching.Regex.MatchIterator]] for details.
+   *
    *  @param source The text to match against.
    *  @return       A [[scala.util.matching.Regex.MatchIterator]] of all matches.
    *  @example      {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
    */
-  def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames)
+  def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames)
 
 
   /** Return all matches of this regexp in given character sequence as a
@@ -210,12 +299,12 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return       A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
    *  @example      {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}}
    */
-  def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = {
+  def findAllMatchIn(source: CharSequence): Iterator[Match] = {
     val matchIterator = findAllIn(source)
     new Iterator[Match] {
       def hasNext = matchIterator.hasNext
       def next: Match = {
-        matchIterator.next;
+        matchIterator.next()
         new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
       }
     }
@@ -228,7 +317,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return       An [[scala.Option]] of the first matching string in the text.
    *  @example      {{{"""\w+""".r findFirstIn "A simple example." foreach println // prints "A"}}}
    */
-  def findFirstIn(source: java.lang.CharSequence): Option[String] = {
+  def findFirstIn(source: CharSequence): Option[String] = {
     val m = pattern.matcher(source)
     if (m.find) Some(m.group) else None
   }
@@ -245,7 +334,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return       A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text.
    *  @example      {{{("""[a-z]""".r findFirstMatchIn "A simple example.") map (_.start) // returns Some(2), the index of the first match in the text}}}
    */
-  def findFirstMatchIn(source: java.lang.CharSequence): Option[Match] = {
+  def findFirstMatchIn(source: CharSequence): Option[Match] = {
     val m = pattern.matcher(source)
     if (m.find) Some(new Match(source, m, groupNames)) else None
   }
@@ -262,7 +351,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return       A [[scala.Option]] of the matched prefix.
    *  @example      {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}}
    */
-  def findPrefixOf(source: java.lang.CharSequence): Option[String] = {
+  def findPrefixOf(source: CharSequence): Option[String] = {
     val m = pattern.matcher(source)
     if (m.lookingAt) Some(m.group) else None
   }
@@ -279,7 +368,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return       A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string.
    *  @example      {{{"""\w+""".r findPrefixMatchOf "A simple example." map (_.after) // returns Some(" simple example.")}}}
    */
-  def findPrefixMatchOf(source: java.lang.CharSequence): Option[Match] = {
+  def findPrefixMatchOf(source: CharSequence): Option[Match] = {
     val m = pattern.matcher(source)
     if (m.lookingAt) Some(new Match(source, m, groupNames)) else None
   }
@@ -293,7 +382,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return            The resulting string
    *  @example           {{{"""\d+""".r replaceAllIn ("July 15", "<NUMBER>") // returns "July <NUMBER>"}}}
    */
-  def replaceAllIn(target: java.lang.CharSequence, replacement: String): String = {
+  def replaceAllIn(target: CharSequence, replacement: String): String = {
     val m = pattern.matcher(target)
     m.replaceAll(replacement)
   }
@@ -316,7 +405,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    * @param replacer    The function which maps a match to another string.
    * @return            The target string after replacements.
    */
-  def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = {
+  def replaceAllIn(target: CharSequence, replacer: Match => String): String = {
     val it = new Regex.MatchIterator(target, this, groupNames).replacementData
     it foreach (md => it replace replacer(md))
     it.replaced
@@ -343,7 +432,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    * @param replacer    The function which optionally maps a match to another string.
    * @return            The target string after replacements.
    */
-  def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = {
+  def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = {
     val it = new Regex.MatchIterator(target, this, groupNames).replacementData
     for (matchdata <- it ; replacement <- replacer(matchdata))
       it replace replacement
@@ -359,7 +448,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @param replacement The string that will replace the match
    *  @return            The resulting string
    */
-  def replaceFirstIn(target: java.lang.CharSequence, replacement: String): String = {
+  def replaceFirstIn(target: CharSequence, replacement: String): String = {
     val m = pattern.matcher(target)
     m.replaceFirst(replacement)
   }
@@ -370,7 +459,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *  @return        The array of strings computed by splitting the
    *                 input around matches of this regexp
    */
-  def split(toSplit: java.lang.CharSequence): Array[String] =
+  def split(toSplit: CharSequence): Array[String] =
     pattern.split(toSplit)
 
   /** Create a new Regex with the same pattern, but no requirement that
@@ -390,9 +479,11 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
    *
    *  @return        The new unanchored regex
    */
-  def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
+  def unanchored: UnanchoredRegex = new Regex(pattern, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
   def anchored: Regex             = this
 
+  def regex: String = pattern.pattern
+
   /** The string defining the regular expression */
   override def toString = regex
 }
@@ -403,15 +494,7 @@ trait UnanchoredRegex extends Regex {
 }
 
 /** This object defines inner classes that describe
- *  regex matches and helper objects. The class hierarchy
- *  is as follows:
- *
- *  {{{
- *            MatchData
- *            /      \
- *   MatchIterator  Match
- *  }}}
- *
+ *  regex matches and helper objects.
  */
 object Regex {
 
@@ -421,7 +504,7 @@ object Regex {
   trait MatchData {
 
     /** The source from where the match originated */
-    val source: java.lang.CharSequence
+    val source: CharSequence
 
     /** The names of the groups, or some empty sequence if one defined */
     val groupNames: Seq[String]
@@ -459,25 +542,25 @@ object Regex {
 
     /** The char sequence before first character of match,
      *  or `null` if nothing was matched */
-    def before: java.lang.CharSequence =
+    def before: CharSequence =
       if (start >= 0) source.subSequence(0, start)
       else null
 
     /** The char sequence before first character of match in group `i`,
      *  or `null` if nothing was matched for that group  */
-    def before(i: Int): java.lang.CharSequence =
+    def before(i: Int): CharSequence =
       if (start(i) >= 0) source.subSequence(0, start(i))
       else null
 
     /** Returns char sequence after last character of match,
      *  or `null` if nothing was matched */
-    def after: java.lang.CharSequence =
+    def after: CharSequence =
       if (end >= 0) source.subSequence(end, source.length)
       else null
 
     /** The char sequence after last character of match in group `i`,
      *  or `null` if nothing was matched for that group  */
-    def after(i: Int): java.lang.CharSequence =
+    def after(i: Int): CharSequence =
       if (end(i) >= 0) source.subSequence(end(i), source.length)
       else null
 
@@ -501,8 +584,8 @@ object Regex {
 
   /** Provides information about a succesful match.
    */
-  class Match(val source: java.lang.CharSequence,
-              matcher: Matcher,
+  class Match(val source: CharSequence,
+              private[matching] val matcher: Matcher,
               val groupNames: Seq[String]) extends MatchData {
 
     /** The index of the first matched character */
@@ -561,9 +644,17 @@ object Regex {
     def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
   }
 
-  /** A class to step through a sequence of regex matches
+  /** A class to step through a sequence of regex matches.
+   *
+   *  All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw
+   *  a [[java.lang.IllegalStateException]] until the matcher is initialized. The
+   *  matcher can be initialized by calling `hasNext` or `next()` or causing these
+   *  methods to be called, such as by invoking `toString` or iterating through
+   *  the iterator's elements.
+   *
+   *  @see [[java.util.regex.Matcher]]
    */
-  class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
+  class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
   extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
 
     protected[Regex] val matcher = regex.pattern.matcher(source)
@@ -602,14 +693,14 @@ object Regex {
     /** Convert to an iterator that yields MatchData elements instead of Strings */
     def matchData: Iterator[Match] = new AbstractIterator[Match] {
       def hasNext = self.hasNext
-      def next = { self.next; new Match(source, matcher, groupNames).force }
+      def next = { self.next(); new Match(source, matcher, groupNames).force }
     }
 
     /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
     private[matching] def replacementData = new AbstractIterator[Match] with Replacement {
       def matcher = self.matcher
       def hasNext = self.hasNext
-      def next = { self.next; new Match(source, matcher, groupNames).force }
+      def next = { self.next(); new Match(source, matcher, groupNames).force }
     }
   }
 
@@ -620,7 +711,7 @@ object Regex {
   private[matching] trait Replacement {
     protected def matcher: Matcher
 
-    private var sb = new java.lang.StringBuffer
+    private val sb = new java.lang.StringBuffer
 
     def replaced = {
       val newsb = new java.lang.StringBuffer(sb)
@@ -631,6 +722,14 @@ object Regex {
     def replace(rs: String) = matcher.appendReplacement(sb, rs)
   }
 
+  /** Quotes strings to be used literally in regex patterns.
+   *
+   *  All regex metacharacters in the input match themselves literally in the output.
+   *
+   *  @example {{{List("US$", "CAN$").map(Regex.quote).mkString("|").r}}}
+   */
+  def quote(text: String): String = Pattern quote text
+
   /** Quotes replacement strings to be used in replacement methods.
    *
    *  Replacement methods give special meaning to backslashes (`\`) and
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
deleted file mode 100644
index 30b20d7..0000000
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.ast
-
-import scala.util.parsing.input.Positional
-
-/** This component provides the core abstractions for representing an Abstract Syntax Tree
- *
- * @author Adriaan Moors
- */
- at deprecated("This class will be removed", "2.10.0")
-trait AbstractSyntax {
-  /** The base class for elements of the abstract syntax tree.
-   */
-  trait Element extends Positional
-
-  /** The base class for elements in the AST that represent names [[scala.util.parsing.ast.Binders]].
-   */
-  trait NameElement extends Element {
-    def name: String
-    override def equals(that: Any): Boolean = that match {
-      case n: NameElement => n.name == name
-      case _ => false
-    }
-  }
-}
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
deleted file mode 100644
index a6ad190..0000000
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ /dev/null
@@ -1,347 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.ast
-
-import scala.collection.AbstractIterable
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-//DISCLAIMER: this code is highly experimental!
-
-  // TODO: avoid clashes when substituting
-  // TODO: check binders in the same scope are distinct
-
-/** This trait provides the core ''Scrap-Your-Boilerplate'' abstractions as
- *  well as implementations for common datatypes.
- *
- *  (Based on Ralf Lämmel's [[http://homepages.cwi.nl/~ralf/syb3/ SYB papers]].)
- *
- *  @author Adriaan Moors
- */
- at deprecated("This class will be removed", "2.10.0")
-trait Mappable {
-  trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
-  sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
-  beneath a certain threshold, we have some leeway.
-  concretely: to use gmap for substitution, we simply require that ast nodes are mapped to ast nodes,
-  we can't require that the type is preserved precisely: a Name may map to e.g., a MethodCall
-  */
-
-  trait Mappable[T] {
-    // one-layer traversal
-    def gmap(f: Mapper): T
-    //  everywhere f x = f (gmapT (everywhere f) x)
-    def everywhere(f: Mapper)(implicit c: T => Mappable[T]): T =
-      f(gmap(new Mapper { def apply[T <% Mappable[T]](x: T): T = x.everywhere(f)}))
-  }
-
-  implicit def StringIsMappable(s: String): Mappable[String] =
-    new Mappable[String] {
-      def gmap(f: Mapper): String = f(s)
-    }
-
-  implicit def ListIsMappable[t <% Mappable[t]](xs: List[t]): Mappable[List[t]] =
-    new Mappable[List[t]] {
-      def gmap(f: Mapper): List[t] = (for (x <- xs) yield f(x)).toList
-    }
-
-  implicit def OptionIsMappable[t <% Mappable[t]](xs: Option[t]): Mappable[Option[t]] =
-    new Mappable[Option[t]] {
-      def gmap(f: Mapper): Option[t] = (for (x <- xs) yield f(x))
-    }
-}
-
-/** This component provides functionality for enforcing variable binding
- *  during parse-time.
- *
- *  When parsing simple languages, like Featherweight Scala, these parser
- *  combinators will fully enforce the binding discipline. When names are
- *  allowed to be left unqualified, these mechanisms would have to be
- *  complemented by an extra phase that resolves names that couldn't be
- *  resolved using the naive binding rules. (Maybe some machinery to
- *  model `implicit` binders (e.g., `this` and imported qualifiers)
- *  and selection on a binder will suffice?)
- *
- * @author Adriaan Moors
- */
-trait Binders extends AbstractSyntax with Mappable {
-  /** A `Scope` keeps track of one or more syntactic elements that represent bound names.
-   *  The elements it contains share the same scope and must all be distinct, as determined by `==`.
-   *
-   *  A `NameElement` `n` in the AST that is conceptually bound by a `Scope` `s`, is replaced by a
-   *  `BoundElement(n, s)`. (For example, in `val x:Int=x+1`, the first `x` is modelled by a
-   *  Scope `s` that contains `x` and the second `x` is represented by a `BoundElement(x, s)`)
-   *  The term (`x+1`) in scope of the Scope becomes an `UnderBinder(s, x+1)`.
-   *
-   *  A `NameElement` `n` is bound by a `Scope` `s` if it is wrapped as a `BoundElement(n, s)`, and
-   *  `s` has a binder element that is semantically equal (`equals` or `==`) to `n`.
-   *
-   *  A `Scope` is represented textually by its list of binder elements, followed by the scope's `id`.
-   *  For example: `[x, y]!1` represents the scope with `id` `1` and binder elements `x` and `y`.
-   *  (`id` is solely used for this textual representation.)
-   */
-  class Scope[binderType <: NameElement] extends AbstractIterable[binderType] with Iterable[binderType] {
-    private val substitution: mutable.Map[binderType, Element] =
-      new mutable.LinkedHashMap[binderType, Element] // a LinkedHashMap is ordered by insertion order -- important!
-
-    /** Returns a unique number identifying this Scope (only used for representation purposes). */
-    val id: Int = _Binder.genId
-
-    /** Returns the binders in this scope.
-     *  For a typical let-binding, this is just the variable name. For an argument list to a method body,
-     *  there is one binder per formal argument.
-     */
-    def iterator = substitution.keysIterator
-
-    /** Return the `i`th binder in this scope. */
-    def apply(i: Int): binderType = this.iterator.toList(i)
-
-    /** Returns true if this container has a binder equal (as determined by `==`) to `b`. */
-    def binds(b: binderType): Boolean = substitution.contains(b)
-
-    def indexFor(b: binderType): Option[Int] = {
-      val iter = this.iterator.zipWithIndex
-      for ((that, count) <- iter) {
-        if (that.name == b.name) // TODO: why do name equals and structural equals differ?
-          return Some(count + 1)
-        else
-          Console.println(that+"!="+b)
-      }
-
-      None
-    }
-
-    /** Adds a new binder, for example the variable name in a local variable declaration.
-     *
-     * @param b a new binder that is distinct from the existing binders in this scope,
-     *           and shares their conceptual scope. `canAddBinder(b)` must hold.
-     * @return `binds(b)` and `getElementFor(b) eq b` will hold.
-     */
-    def addBinder(b: binderType) { substitution += Pair(b, b) }
-
-    // TODO: strengthen this condition so that no binders may be added after this scope has been
-    //       linked to its `UnderBinder` (i.e., while parsing, BoundElements may be added to the Scope
-    //       associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
-    /** `canAddElement` indicates whether `b` may be added to this scope.
-     *
-     *
-     * @return true if `b` had not been added yet
-     */
-    def canAddBinder(b: binderType): Boolean = !binds(b)
-
-    /** ''Replaces'' the bound occurrences of a contained binder by their new value.
-     *  The bound occurrences of `b` are not actually replaced; the scope keeps track
-     *  of a substitution that maps every binder to its current value. Since a `BoundElement` is
-     *  a proxy for the element it is bound to by its binder, `substitute` may thus be thought of
-     *  as replacing all the bound occurrences of the given binder `b` by their new value `value`.
-     *
-     *  @param b    the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
-     *  @param value the new value for the bound occurrences of `b`
-     *  @return `getElementFor(b) eq value` will hold.
-     */
-    def substitute(b: binderType, value: Element): Unit = substitution(b) = value
-
-    /** Returns the current value for the bound occurrences of `b`.
-     *
-     *  @param b the contained binder whose current value should be returned `binds(b)` must hold.
-     */
-    def getElementFor(b: binderType): Element = substitution(b)
-
-    override def toString: String =  this.iterator.toList.mkString("[",", ","]")+"!"+id // TODO show substitution?
-
-    /** Returns a list of strings that represent the binder elements, each tagged with this scope's id. */
-    def bindersToString: List[String] = (for(b <- this.iterator) yield b+"!"+id).toList
-
-    /** Return a new inheriting scope that won't check whether binding is respected until the scope is left (so as to support forward references). */
-    def allowForwardRef: Scope[binderType] = this // TODO
-
-    /** Return a nested scope -- binders entered into it won't be visible in this scope, but if this scope allows forward references,
-     *  the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left.
-     */
-    def nested: Scope[binderType] = this // TODO
-
-    def onEnter() {}
-    def onLeft() {}
-  }
-
-
-  trait BindingSensitive {
-    // would like to specify this as one method:
-    // def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean
-    // def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean
-  }
-
-  /** A `BoundElement` is bound in a certain scope `scope`, which keeps track of the actual element that
-   *  `el` stands for.
-   *
-   *  A `BoundElement` is represented textually by its bound element, followed by its scope's `id`.
-   *  For example: `x at 1` represents the variable `x` that is bound in the scope with `id` `1`.
-   *
-   *  @note `scope.binds(el)` holds before and after.
-   */
-  case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
-    /** Returns the element this `BoundElement` stands for.
-     *  The `Proxy` trait ensures `equals`, `hashCode` and `toString` are forwarded to
-     *  the result of this method.
-     */
-    def self: Element = scope.getElementFor(el)
-
-    def name = self.asInstanceOf[NameElement].name // TODO: this is only safe when substituted to a NameElement, which certainly isn't required -- I want dynamic inheritance! :)
-
-    // decorate element's representation with the id of the scope it's bound in
-    override def toString: String =  super.toString+"@"+scope.id
-
-    def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean = scope.indexFor(el) == other.scope.indexFor(other.el)
-  }
-
-  /** A variable that escaped its scope (i.e., a free variable) -- we don't deal very well with these yet. */
-  class UnboundElement[N <: NameElement](private val el: N) extends NameElement {
-    def name = el.name+"@??"
-  }
-
-  // this is useless, as Element is a supertype of BoundElement --> the coercion will never be inferred
-  // if we knew a more specific type for the element that the bound element represents, this could make sense
-  // implicit def BoundElementProxy[t <: NameElement](e: BoundElement[t]): Element = e.self
-
-  /** Represents an element with variables that are bound in a certain scope. */
-  class UnderBinder[binderType  <: NameElement, elementT <% Mappable[elementT]](val scope: Scope[binderType], private[Binders] val element: elementT) extends Element with BindingSensitive {
-    override def toString: String = "(" + scope.toString + ") in { "+element.toString+" }"
-
-    /** Alpha-equivalence -- TODO
-     *  Returns true if the `element` of the `other` `UnderBinder` is equal to this `element` up to alpha-conversion.
-     *
-     *  That is, regular equality is used for all elements but `BoundElement`s: such an element is
-     *  equal to a `BoundElement` in `other` if their binders are equal. Binders are equal if they
-     *  are at the same index in their respective scope.
-     *
-     *  Example:
-     *  {{{
-     *    UnderBinder([x, y]!1, x at 1) alpha_== UnderBinder([a, b]!2, a at 2)
-     *    ! (UnderBinder([x, y]!1, y at 1) alpha_== UnderBinder([a, b]!2, a at 2))
-     *  }}}
-     */
-    /*def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean = {
-       var result = true
-
-       // TODO: generic zip or gmap2
-       element.gmap2(other.element, new Mapper2 {
-         def apply[s  <% Mappable[s], t  <% Mappable[t]](x :{s, t}): {s, t} = x match {
-           case {be1: BoundElement[_], be2: BoundElement[_]} => result == result && be1.alpha_==(be2) // monadic gmap (cheating using state directly)
-           case {ub1: UnderBinder[_, _], ub2: UnderBinder[_, _]} => result == result && be1.alpha_==(be2)
-           case {a, b} => result == result && a.equals(b)
-         }; x
-       })
-    }*/
-
-    def cloneElementWithSubst(subst: Map[NameElement, NameElement]) = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
-      case substable: NameElement if subst.contains(substable) => subst.get(substable).asInstanceOf[t] // TODO: wrong... substitution is not (necessarily) the identity function
-         //Console.println("substed: "+substable+"-> "+subst.get(substable)+")");
-      case x => x // Console.println("subst: "+x+"(keys: "+subst.keys+")");x
-    }})
-
-    // TODO
-    def cloneElementNoBoundElements = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
-      case BoundElement(el, _) => new UnboundElement(el).asInstanceOf[t] // TODO: precision stuff
-      case x => x
-    }})
-
-    def extract: elementT = cloneElementNoBoundElements
-    def extract(subst: Map[NameElement, NameElement]): elementT = cloneElementWithSubst(subst)
-
-    /** Get a string representation of element, normally we don't allow direct access to element, but just getting a string representation is ok. */
-    def elementToString: String = element.toString
-  }
-
-  //SYB type class instances
-  implicit def UnderBinderIsMappable[bt <: NameElement <% Mappable[bt], st <% Mappable[st]](ub: UnderBinder[bt, st]): Mappable[UnderBinder[bt, st]] =
-    new Mappable[UnderBinder[bt, st]] {
-      def gmap(f: Mapper): UnderBinder[bt, st] = UnderBinder(f(ub.scope), f(ub.element))
-    }
-
-  implicit def ScopeIsMappable[bt <: NameElement <% Mappable[bt]](scope: Scope[bt]): Mappable[Scope[bt]] =
-    new Mappable[Scope[bt]] {
-      def gmap(f: Mapper): Scope[bt] = { val newScope = new Scope[bt]()
-        for(b <- scope) newScope.addBinder(f(b))
-        newScope
-      }
-    }
-
-  implicit def NameElementIsMappable(self: NameElement): Mappable[NameElement] = new Mappable[NameElement] {
-    def gmap(f: Mapper): NameElement = self match {
-      case BoundElement(el, scope) => BoundElement(f(el), f(scope))
-      case _ => UserNameElementIsMappable(self).gmap(f)
-    }
-  }
-
-  def UserNameElementIsMappable[t <: NameElement](self: t): Mappable[t]
-
-  object UnderBinder {
-    def apply[binderType <: NameElement, elementT <% Mappable[elementT]](scope: Scope[binderType], element: elementT) = new UnderBinder(scope, element)
-    def unit[bt <: NameElement, elementT <% Mappable[elementT]](x: elementT) = UnderBinder(new Scope[bt](), x)
-  }
-
-  /** If a list of `UnderBinder`s all have the same scope, they can be turned in to an `UnderBinder`
-   *  containing a list of the elements in the original `UnderBinder`.
-   *
-   *  The name `sequence` comes from the fact that this method's type is equal to the type of monadic sequence.
-   *
-   *  @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
-   *
-   */
-  def sequence[bt <: NameElement, st <% Mappable[st]](orig: List[UnderBinder[bt, st]]): UnderBinder[bt, List[st]] =
-    if(orig.isEmpty) UnderBinder.unit(Nil)
-    else UnderBinder(orig(0).scope, orig.map(_.element))
-
-  // couldn't come up with a better name...
-  def unsequence[bt <: NameElement, st <% Mappable[st]](orig: UnderBinder[bt, List[st]]): List[UnderBinder[bt, st]] =
-    orig.element.map(sc => UnderBinder(orig.scope, sc))
-
-  //TODO: more documentation
-  /** An environment that maps a `NameElement` to the scope in which it is bound.
-   *  This can be used to model scoping during parsing.
-   *
-   *  @note This class uses similar techniques as described by ''Burak Emir'' in
-   *        [[http://library.epfl.ch/theses/?nr=3899 Object-oriented pattern matching]],
-   *        but uses `==` instead of `eq`, thus types can't be unified in general.
-   */
-  abstract class BinderEnv {
-    def apply[A <: NameElement](v: A): Option[Scope[A]]
-    def extend[a <: NameElement](v : a, x : Scope[a]) = new BinderEnv {
-      def apply[b <: NameElement](w : b): Option[Scope[b]] =
-        if(w == v) Some(x.asInstanceOf[Scope[b]])
-        else BinderEnv.this.apply(w)
-    }
-  }
-
-  object EmptyBinderEnv extends BinderEnv {
-    def apply[A <: NameElement](v: A): Option[Scope[A]] = None
-  }
-
-  // TODO: move this to some utility object higher in the scala hierarchy?
-  /** Returns a given result, but executes the supplied closure before returning.
-   *  (The effect of this closure does not influence the returned value.)
-   */
-  trait ReturnAndDo[T]{
-    /**
-     *  @param block  code to be executed, purely for its side-effects
-     */
-    def andDo(block: => Unit): T
-  }
-
-  def return_[T](result: T): ReturnAndDo[T] =
-    new ReturnAndDo[T] {
-      val r = result
-      def andDo(block: => Unit): T = {block; r}
-    }
-
-  private object _Binder {
-    private var currentId = 0
-    private[Binders] def genId = return_(currentId) andDo {currentId=currentId+1}
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
deleted file mode 100644
index ad06749..0000000
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.combinator
-
-import scala.language.implicitConversions
-
-/** This object contains implicit conversions that come in handy when using the `^^` combinator.
- *
- *  Refer to [[scala.util.parsing.combinator.Parsers]] to construct an AST from the concrete syntax.
- *
- * The reason for this is that the sequential composition combinator (`~`) combines its constituents
- * into a ~. When several `~`s are combined, this results in nested `~`s (to the left).
- * The `flatten*` coercions makes it easy to apply an `n`-argument function to a nested `~` of
- * depth `n-1`
- *
- * The `headOptionTailToFunList` converts a function that takes a `List[A]` to a function that
- * accepts a `~[A, Option[List[A]]]` (this happens when parsing something of the following
- * shape: `p ~ opt("." ~ repsep(p, "."))` -- where `p` is a parser that yields an `A`).
- *
- * @author Martin Odersky
- * @author Iulian Dragos
- * @author Adriaan Moors
- */
-trait ImplicitConversions { self: Parsers =>
-  implicit def flatten2[A, B, C]         (f: (A, B) => C) =
-    (p: ~[A, B]) => p match {case a ~ b => f(a, b)}
-  implicit def flatten3[A, B, C, D]      (f: (A, B, C) => D) =
-    (p: ~[~[A, B], C]) => p match {case a ~ b ~ c => f(a, b, c)}
-  implicit def flatten4[A, B, C, D, E]   (f: (A, B, C, D) => E) =
-    (p: ~[~[~[A, B], C], D]) => p match {case a ~ b ~ c ~ d => f(a, b, c, d)}
-  implicit def flatten5[A, B, C, D, E, F](f: (A, B, C, D, E) => F) =
-    (p: ~[~[~[~[A, B], C], D], E]) => p match {case a ~ b ~ c ~ d ~ e=> f(a, b, c, d, e)}
-  implicit def headOptionTailToFunList[A, T] (f: List[A] => T)=
-    (p: ~[A, Option[List[A]]]) => f(p._1 :: (p._2 match { case Some(xs) => xs case None => Nil}))
-}
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
deleted file mode 100644
index 89832d3..0000000
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.combinator
-
-import scala.annotation.migration
-
-/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
- *  by adding the following definitions:
- *
- *  - `ident`
- *  - `wholeNumber`
- *  - `decimalNumber`
- *  - `stringLiteral`
- *  - `floatingPointNumber`
- */
-trait JavaTokenParsers extends RegexParsers {
-  /** Anything that is a valid Java identifier, according to
-   * <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-3.html#jls-3.8">The Java Language Spec</a>.
-   * Generally, this means a letter, followed by zero or more letters or numbers.
-   */
-  def ident: Parser[String] =
-    """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r
-  /** An integer, without sign or with a negative sign. */
-  def wholeNumber: Parser[String] =
-    """-?\d+""".r
-  /** Number following one of these rules:
-   *
-   *  - An integer. For example: `13`
-   *  - An integer followed by a decimal point. For example: `3.`
-   *  - An integer followed by a decimal point and fractional part. For example: `3.14`
-   *  - A decimal point followed by a fractional part. For example: `.1`
-   */
-  def decimalNumber: Parser[String] =
-    """(\d+(\.\d*)?|\d*\.\d+)""".r
-  /** Double quotes (`"`) enclosing a sequence of:
-   *
-   *  - Any character except double quotes, control characters or backslash (`\`)
-   *  - A backslash followed by another backslash, a single or double quote, or one
-   *    of the letters `b`, `f`, `n`, `r` or `t`
-   *  - `\` followed by `u` followed by four hexadecimal digits
-   */
-  @migration("`stringLiteral` allows escaping single and double quotes, but not forward slashes any longer.", "2.10.0")
-  def stringLiteral: Parser[String] =
-    ("\""+"""([^"\p{Cntrl}\\]|\\[\\'"bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
-  /** A number following the rules of `decimalNumber`, with the following
-   *  optional additions:
-   *
-   *  - Preceded by a negative sign
-   *  - Followed by `e` or `E` and an optionally signed integer
-   *  - Followed by `f`, `f`, `d` or `D` (after the above rule, if both are used)
-   */
-  def floatingPointNumber: Parser[String] =
-    """-?(\d+(\.\d*)?|\d*\.\d+)([eE][+-]?\d+)?[fFdD]?""".r
-}
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
deleted file mode 100644
index 16705d4..0000000
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ /dev/null
@@ -1,312 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.combinator
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input.{ Reader, Position }
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-/**
- *  `PackratParsers` is a component that extends the parser combinators
- *  provided by [[scala.util.parsing.combinator.Parsers]] with a memoization
- *  facility (''Packrat Parsing'').
- *
- *  Packrat Parsing is a technique for implementing backtracking,
- *  recursive-descent parsers, with the advantage that it guarantees
- *  unlimited lookahead and a linear parse time. Using this technique,
- *  left recursive grammars can also be accepted.
- *
- *  Using `PackratParsers` is very similar to using `Parsers`:
- *   - any class/trait that extends `Parsers` (directly or through a subclass)
- *     can mix in `PackratParsers`.
- *     Example: `'''object''' MyGrammar '''extends''' StandardTokenParsers '''with''' PackratParsers`
- *   - each grammar production previously declared as a `def` without formal
- *     parameters becomes a `lazy val`, and its type is changed from
- *     `Parser[Elem]` to `PackratParser[Elem]`.
- *     So, for example, `'''def''' production: Parser[Int] = {...}`
- *     becomes `'''lazy val''' production: PackratParser[Int] = {...}`
- *   - Important: using `PackratParser`s is not an ''all or nothing'' decision.
- *     They can be free mixed with regular `Parser`s in a single grammar.
- *
- *  Cached parse results are attached to the ''input'', not the grammar.
- *  Therefore, `PackratsParser`s require a `PackratReader` as input, which
- *  adds memoization to an underlying `Reader`. Programmers can create
- *  `PackratReader` objects either manually, as in
- *  `production('''new''' PackratReader('''new''' lexical.Scanner("input")))`,
- *  but the common way should be to rely on the combinator `phrase` to wrap
- *  a given input with a `PackratReader` if the input is not one itself.
- *
- * @see Bryan Ford: "Packrat Parsing: Simple, Powerful, Lazy, Linear Time." ICFP'02
- * @see Alessandro Warth, James R. Douglass, Todd Millstein: "Packrat Parsers Can Support Left Recursion." PEPM'08
- *
- * @since 2.8
- * @author Manohar Jonnalagedda
- * @author Tiark Rompf
- */
-
-trait PackratParsers extends Parsers {
-
-  //type Input = PackratReader[Elem]
-
-  /**
-   * A specialized `Reader` class that wraps an underlying `Reader`
-   * and provides memoization of parse results.
-   */
-  class PackratReader[+T](underlying: Reader[T]) extends Reader[T] { outer =>
-
-    /*
-     * caching of intermediate parse results and information about recursion
-     */
-    private[PackratParsers] val cache = mutable.HashMap.empty[(Parser[_], Position), MemoEntry[_]]
-
-    private[PackratParsers] def getFromCache[T](p: Parser[T]): Option[MemoEntry[T]] = {
-      cache.get((p, pos)).asInstanceOf[Option[MemoEntry[T]]]
-    }
-
-    private[PackratParsers] def updateCacheAndGet[T](p: Parser[T], w: MemoEntry[T]): MemoEntry[T] = {
-      cache.put((p, pos),w)
-      w
-    }
-
-    /* a cache for storing parser heads: allows to know which parser is involved
-       in a recursion*/
-    private[PackratParsers] val recursionHeads: mutable.HashMap[Position, Head] = mutable.HashMap.empty
-
-    //a stack that keeps a list of all involved rules
-    private[PackratParsers] var lrStack: List[LR] = Nil
-
-    override def source: java.lang.CharSequence = underlying.source
-    override def offset: Int = underlying.offset
-
-    def first: T = underlying.first
-    def rest: Reader[T] = new PackratReader(underlying.rest) {
-      override private[PackratParsers] val cache = outer.cache
-      override private[PackratParsers] val recursionHeads = outer.recursionHeads
-      lrStack = outer.lrStack
-    }
-
-    def pos: Position = underlying.pos
-    def atEnd: Boolean = underlying.atEnd
-  }
-
-  /**
-   *  A parser generator delimiting whole phrases (i.e. programs).
-   *
-   *  Overridden to make sure any input passed to the argument parser
-   *  is wrapped in a `PackratReader`.
-   */
-  override def phrase[T](p: Parser[T]) = {
-    val q = super.phrase(p)
-    new PackratParser[T] {
-      def apply(in: Input) = in match {
-        case in: PackratReader[_] => q(in)
-        case in => q(new PackratReader(in))
-      }
-    }
-  }
-
-  private def getPosFromResult(r: ParseResult[_]): Position = r.next.pos
-
-  // auxiliary data structures
-
-  private case class MemoEntry[+T](var r: Either[LR,ParseResult[_]]){
-    def getResult: ParseResult[T] = r match {
-      case Left(LR(res,_,_)) => res.asInstanceOf[ParseResult[T]]
-      case Right(res) => res.asInstanceOf[ParseResult[T]]
-    }
-  }
-
-  private case class LR(var seed: ParseResult[_], var rule: Parser[_], var head: Option[Head]){
-    def getPos: Position = getPosFromResult(seed)
-  }
-
-  private case class Head(var headParser: Parser[_], var involvedSet: List[Parser[_]], var evalSet: List[Parser[_]]){
-    def getHead = headParser
-  }
-
-  /**
-   * The root class of packrat parsers.
-   */
-  abstract class PackratParser[+T] extends super.Parser[T]
-
-  /**
-   * Implicitly convert a parser to a packrat parser.
-   * The conversion is triggered by giving the appropriate target type:
-   * {{{
-   *   val myParser: PackratParser[MyResult] = aParser
-   * }}} */
-  implicit def parser2packrat[T](p: => super.Parser[T]): PackratParser[T] = {
-    lazy val q = p
-    memo(super.Parser {in => q(in)})
-  }
-
-  /*
-   * An unspecified function that is called when a packrat reader is applied.
-   * It verifies whether we are in the process of growing a parse or not.
-   * In the former case, it makes sure that rules involved in the recursion are evaluated.
-   * It also prevents non-involved rules from getting evaluated further
-   */
-  private def recall(p: super.Parser[_], in: PackratReader[Elem]): Option[MemoEntry[_]] = {
-    val cached = in.getFromCache(p)
-    val head = in.recursionHeads.get(in.pos)
-
-    head match {
-      case None => /*no heads*/ cached
-      case Some(h at Head(hp, involved, evalSet)) => {
-        //heads found
-        if(cached == None && !(hp::involved contains p)) {
-          //Nothing in the cache, and p is not involved
-          return Some(MemoEntry(Right(Failure("dummy ",in))))
-        }
-        if(evalSet contains p){
-          //something in cache, and p is in the evalSet
-          //remove the rule from the evalSet of the Head
-          h.evalSet = h.evalSet.filterNot(_==p)
-          val tempRes = p(in)
-          //we know that cached has an entry here
-          val tempEntry: MemoEntry[_] = cached.get // match {case Some(x: MemoEntry[_]) => x}
-          //cache is modified
-          tempEntry.r = Right(tempRes)
-        }
-        cached
-      }
-    }
-  }
-
-  /*
-   * setting up the left-recursion. We have the LR for the rule head
-   * we modify the involvedSets of all LRs in the stack, till we see
-   * the current parser again
-   */
-  private def setupLR(p: Parser[_], in: PackratReader[_], recDetect: LR): Unit = {
-    if(recDetect.head == None) recDetect.head = Some(Head(p, Nil, Nil))
-
-    in.lrStack.takeWhile(_.rule != p).foreach {x =>
-      x.head = recDetect.head
-      recDetect.head.map(h => h.involvedSet = x.rule::h.involvedSet)
-    }
-  }
-
-  /*
-   * growing, if needed the recursion
-   * check whether the parser we are growing is the head of the rule.
-   * Not => no grow
-   */
-
-  /*
-   * Once the result of the recall function is known, if it is nil, then we need to store a dummy
-failure into the cache (much like in the previous listings) and compute the future parse. If it
-is not, however, this means we have detected a recursion, and we use the setupLR function
-to update each parser involved in the recursion.
-   */
-
-  private def lrAnswer[T](p: Parser[T], in: PackratReader[Elem], growable: LR): ParseResult[T] = growable match {
-    //growable will always be having a head, we can't enter lrAnswer otherwise
-    case LR(seed ,rule, Some(head)) =>
-      if(head.getHead != p) /*not head rule, so not growing*/ seed.asInstanceOf[ParseResult[T]]
-      else {
-        in.updateCacheAndGet(p, MemoEntry(Right[LR, ParseResult[T]](seed.asInstanceOf[ParseResult[T]])))
-        seed match {
-          case f at Failure(_,_) => f
-          case e at Error(_,_) => e
-          case s at Success(_,_) => /*growing*/ grow(p, in, head)
-        }
-      }
-    case _=> throw new Exception("lrAnswer with no head !!")
-  }
-
-  //p here should be strict (cannot be non-strict) !!
-  //failing left-recursive grammars: This is done by simply storing a failure if nothing is found
-
-  /**
-   * Explicitly convert a given parser to a memoizing packrat parser.
-   * In most cases, client code should avoid calling `memo` directly
-   * and rely on implicit conversion instead.
-   */
-  def memo[T](p: super.Parser[T]): PackratParser[T] = {
-    new PackratParser[T] {
-      def apply(in: Input) = {
-        /*
-         * transformed reader
-         */
-        val inMem = in.asInstanceOf[PackratReader[Elem]]
-
-        //look in the global cache if in a recursion
-        val m = recall(p, inMem)
-        m match {
-          //nothing has been done due to recall
-          case None =>
-            val base = LR(Failure("Base Failure",in), p, None)
-            inMem.lrStack = base::inMem.lrStack
-            //cache base result
-            inMem.updateCacheAndGet(p,MemoEntry(Left(base)))
-            //parse the input
-            val tempRes = p(in)
-            //the base variable has passed equality tests with the cache
-            inMem.lrStack = inMem.lrStack.tail
-            //check whether base has changed, if yes, we will have a head
-            base.head match {
-              case None =>
-                /*simple result*/
-                inMem.updateCacheAndGet(p,MemoEntry(Right(tempRes)))
-                tempRes
-              case s at Some(_) =>
-                /*non simple result*/
-                base.seed = tempRes
-                //the base variable has passed equality tests with the cache
-                val res = lrAnswer(p, inMem, base)
-                res
-            }
-
-          case Some(mEntry) => {
-            //entry found in cache
-            mEntry match {
-              case MemoEntry(Left(recDetect)) => {
-                setupLR(p, inMem, recDetect)
-                //all setupLR does is change the heads of the recursions, so the seed will stay the same
-                recDetect match {case LR(seed, _, _) => seed.asInstanceOf[ParseResult[T]]}
-              }
-              case MemoEntry(Right(res: ParseResult[_])) => res.asInstanceOf[ParseResult[T]]
-            }
-          }
-        }
-      }
-    }
-  }
-
-  private def grow[T](p: super.Parser[T], rest: PackratReader[Elem], head: Head): ParseResult[T] = {
-    //store the head into the recursionHeads
-    rest.recursionHeads.put(rest.pos, head /*match {case Head(hp,involved,_) => Head(hp,involved,involved)}*/)
-    val oldRes: ParseResult[T] = rest.getFromCache(p).get match {
-      case MemoEntry(Right(x)) => x.asInstanceOf[ParseResult[T]]
-      case _ => throw new Exception("impossible match")
-    }
-
-    //resetting the evalSet of the head of the recursion at each beginning of growth
-    head.evalSet = head.involvedSet
-    val tempRes = p(rest); tempRes match {
-      case s at Success(_,_) =>
-        if(getPosFromResult(oldRes) < getPosFromResult(tempRes)) {
-          rest.updateCacheAndGet(p, MemoEntry(Right(s)))
-          grow(p, rest, head)
-        } else {
-          //we're done with growing, we can remove data from recursion head
-          rest.recursionHeads -= rest.pos
-          rest.getFromCache(p).get match {
-            case MemoEntry(Right(x: ParseResult[_])) => x.asInstanceOf[ParseResult[T]]
-            case _ => throw new Exception("impossible match")
-          }
-        }
-      case f =>
-        rest.recursionHeads -= rest.pos
-        /*rest.updateCacheAndGet(p, MemoEntry(Right(f)));*/oldRes
-    }
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
deleted file mode 100644
index ead4446..0000000
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ /dev/null
@@ -1,928 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.combinator
-
-import scala.util.parsing.input._
-import scala.collection.mutable.ListBuffer
-import scala.annotation.tailrec
-import scala.annotation.migration
-import scala.language.implicitConversions
-import scala.util.DynamicVariable
-
-// TODO: better error handling (labelling like parsec's <?>)
-
-/** `Parsers` is a component that ''provides'' generic parser combinators.
- *
- *  There are two abstract members that must be defined in order to
- *  produce parsers: the type `Elem` and
- *  [[scala.util.parsing.combinator.Parsers.Parser]]. There are helper
- *  methods that produce concrete `Parser` implementations -- see ''primitive
- *  parser'' below.
- *
- *  A `Parsers` may define multiple `Parser` instances, which are combined
- *  to produced the desired parser.
- *
- *  The type of the elements these parsers should parse must be defined
- *  by declaring `Elem`
- *  (each parser is polymorphic in the type of result it produces).
- *
- *  There are two aspects to the result of a parser:
- *  1. success or failure
- *  1. the result.
- *
- *  A [[scala.util.parsing.combinator.Parsers.Parser]] produces both kinds of information,
- *  by returning a [[scala.util.parsing.combinator.Parsers.ParseResult]] when its `apply`
- *  method is called on an input.
- *
- *  The term ''parser combinator'' refers to the fact that these parsers
- *  are constructed from primitive parsers and composition operators, such
- *  as sequencing, alternation, optionality, repetition, lifting, and so on. For example,
- *  given `p1` and `p2` of type [[scala.util.parsing.combinator.Parsers.Parser]]:
- *
- *  {{{
- *  p1 ~ p2 // sequencing: must match p1 followed by p2
- *  p1 | p2 // alternation: must match either p1 or p2, with preference given to p1
- *  p1.?    // optionality: may match p1 or not
- *  p1.*    // repetition: matches any number of repetitions of p1
- *  }}}
- *
- *  These combinators are provided as methods on [[scala.util.parsing.combinator.Parsers.Parser]],
- *  or as methods taking one or more `Parsers` and returning a `Parser` provided in
- *  this class.
- *
- *  A ''primitive parser'' is a parser that accepts or rejects a single
- *  piece of input, based on a certain criterion, such as whether the
- *  input...
- *  - is equal to some given object (see method `accept`),
- *  - satisfies a certain predicate (see method `acceptIf`),
- *  - is in the domain of a given partial function (see method `acceptMatch`)
- *  - or other conditions, by using one of the other methods available, or subclassing `Parser`
- *
- *  Even more primitive parsers always produce the same result, irrespective of the input. See
- *  methods `success`, `err` and `failure` as examples.
- *
- *  @see [[scala.util.parsing.combinator.RegexParsers]] and other known subclasses for practical examples.
- *
- *  @author Martin Odersky
- *  @author Iulian Dragos
- *  @author Adriaan Moors
- */
-trait Parsers {
-  /** the type of input elements the provided parsers consume (When consuming
-   *  invidual characters, a parser is typically called a ''scanner'', which
-   *  produces ''tokens'' that are consumed by what is normally called a ''parser''.
-   *  Nonetheless, the same principles apply, regardless of the input type.) */
-  type Elem
-
-  /** The parser input is an abstract reader of input elements, i.e. the type
-   *  of input the parsers in this component expect. */
-  type Input = Reader[Elem]
-
-  /** A base class for parser results. A result is either successful or not
-   *  (failure may be fatal, i.e., an Error, or not, i.e., a Failure). On
-   *  success, provides a result of type `T` which consists of some result
-   *  (and the rest of the input). */
-  sealed abstract class ParseResult[+T] {
-    /** Functional composition of ParseResults.
-     *
-     * @param f the function to be lifted over this result
-     * @return `f` applied to the result of this `ParseResult`, packaged up as a new `ParseResult`
-     */
-    def map[U](f: T => U): ParseResult[U]
-
-    /** Partial functional composition of ParseResults.
-     *
-     * @param f the partial function to be lifted over this result
-     * @param error a function that takes the same argument as `f` and
-     *        produces an error message to explain why `f` wasn't applicable
-     *        (it is called when this is the case)
-     * @return if `f` f is defined at the result in this `ParseResult`, `f`
-     *         applied to the result of this `ParseResult`, packaged up as
-     *         a new `ParseResult`. If `f` is not defined, `Failure`.
-     */
-    def mapPartial[U](f: PartialFunction[T, U], error: T => String): ParseResult[U]
-
-    def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
-
-    def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T]
-
-    def append[U >: T](a: => ParseResult[U]): ParseResult[U]
-
-    def isEmpty = !successful
-
-    /** Returns the embedded result. */
-    def get: T
-
-    def getOrElse[B >: T](default: => B): B =
-        if (isEmpty) default else this.get
-
-    val next: Input
-
-    val successful: Boolean
-  }
-
-  /** The success case of `ParseResult`: contains the result and the remaining input.
-   *
-   *  @param result The parser's output
-   *  @param next   The parser's remaining input
-   */
-  case class Success[+T](result: T, override val next: Input) extends ParseResult[T] {
-    def map[U](f: T => U) = Success(f(result), next)
-    def mapPartial[U](f: PartialFunction[T, U], error: T => String): ParseResult[U]
-       = if(f.isDefinedAt(result)) Success(f(result), next)
-         else Failure(error(result), next)
-
-    def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
-      = f(result)(next)
-
-    def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T] =
-      if (p(result)) this
-      else Failure(error(result), position)
-
-    def append[U >: T](a: => ParseResult[U]): ParseResult[U] = this
-
-    def get: T = result
-
-    /** The toString method of a Success. */
-    override def toString = "["+next.pos+"] parsed: "+result
-
-    val successful = true
-  }
-
-  private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
-
-  @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
-  def lastNoSuccess: NoSuccess = lastNoSuccessVar.value.orNull
-
-  @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
-  def lastNoSuccess_=(x: NoSuccess): Unit = lastNoSuccessVar.value = Option(x)
-
-  /** A common super-class for unsuccessful parse results. */
-  sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
-    val successful = false
-
-    if (lastNoSuccessVar.value forall (v => !(next.pos < v.next.pos)))
-      lastNoSuccessVar.value = Some(this)
-
-    def map[U](f: Nothing => U) = this
-    def mapPartial[U](f: PartialFunction[Nothing, U], error: Nothing => String): ParseResult[U] = this
-
-    def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U]
-      = this
-
-    def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
-
-    def get: Nothing = scala.sys.error("No result when parsing failed")
-  }
-  /** An extractor so `NoSuccess(msg, next)` can be used in matches. */
-  object NoSuccess {
-    def unapply[T](x: ParseResult[T]) = x match {
-      case Failure(msg, next)   => Some((msg, next))
-      case Error(msg, next)     => Some((msg, next))
-      case _                    => None
-    }
-  }
-
-  /** The failure case of `ParseResult`: contains an error-message and the remaining input.
-   *  Parsing will back-track when a failure occurs.
-   *
-   *  @param msg    An error message string describing the failure.
-   *  @param next   The parser's unconsumed input at the point where the failure occurred.
-   */
-  case class Failure(override val msg: String, override val next: Input) extends NoSuccess(msg, next) {
-    /** The toString method of a Failure yields an error message. */
-    override def toString = "["+next.pos+"] failure: "+msg+"\n\n"+next.pos.longString
-
-    def append[U >: Nothing](a: => ParseResult[U]): ParseResult[U] = { val alt = a; alt match {
-      case Success(_, _) => alt
-      case ns: NoSuccess => if (alt.next.pos < next.pos) this else alt
-    }}
-  }
-
-  /** The fatal failure case of ParseResult: contains an error-message and
-   *  the remaining input.
-   *  No back-tracking is done when a parser returns an `Error`.
-   *
-   *  @param msg    An error message string describing the error.
-   *  @param next   The parser's unconsumed input at the point where the error occurred.
-   */
-  case class Error(override val msg: String, override val next: Input) extends NoSuccess(msg, next) {
-    /** The toString method of an Error yields an error message. */
-    override def toString = "["+next.pos+"] error: "+msg+"\n\n"+next.pos.longString
-    def append[U >: Nothing](a: => ParseResult[U]): ParseResult[U] = this
-  }
-
-  def Parser[T](f: Input => ParseResult[T]): Parser[T]
-    = new Parser[T]{ def apply(in: Input) = f(in) }
-
-  def OnceParser[T](f: Input => ParseResult[T]): Parser[T] with OnceParser[T]
-    = new Parser[T] with OnceParser[T] { def apply(in: Input) = f(in) }
-
-  /** The root class of parsers.
-   *  Parsers are functions from the Input type to ParseResult.
-   */
-  abstract class Parser[+T] extends (Input => ParseResult[T]) {
-    private var name: String = ""
-    def named(n: String): this.type = {name=n; this}
-    override def toString() = "Parser ("+ name +")"
-
-    /** An unspecified method that defines the behaviour of this parser. */
-    def apply(in: Input): ParseResult[T]
-
-    def flatMap[U](f: T => Parser[U]): Parser[U]
-      = Parser{ in => this(in) flatMapWithNext(f)}
-
-    def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))}
-      = Parser{ in => this(in) map(f)}
-
-    def filter(p: T => Boolean): Parser[T]
-      = withFilter(p)
-
-    def withFilter(p: T => Boolean): Parser[T]
-      = Parser{ in => this(in) filterWithError(p, "Input doesn't match filter: "+_, in)}
-
-    // no filter yet, dealing with zero is tricky!
-
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def append[U >: T](p0: => Parser[U]): Parser[U] = { lazy val p = p0 // lazy argument
-      Parser{ in => this(in) append p(in)}
-    }
-
-    // the operator formerly known as +++, ++, &, but now, behold the venerable ~
-    // it's short, light (looks like whitespace), has few overloaded meaning (thanks to the recent change from ~ to unary_~)
-    // and we love it! (or do we like `,` better?)
-
-    /** A parser combinator for sequential composition.
-     *
-     * `p ~ q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
-     *
-     * @param q a parser that will be executed after `p` (this parser)
-     *          succeeds -- evaluated at most once, and only when necessary.
-     * @return a `Parser` that -- on success -- returns a `~` (like a `Pair`,
-     *         but easier to pattern match on) that contains the result of `p` and
-     *         that of `q`. The resulting parser fails if either `p` or `q` fails.
-     */
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { lazy val p = q // lazy argument
-      (for(a <- this; b <- p) yield new ~(a,b)).named("~")
-    }
-
-    /** A parser combinator for sequential composition which keeps only the right result.
-     *
-     * `p ~> q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
-     *
-     * @param q a parser that will be executed after `p` (this parser)
-     *        succeeds -- evaluated at most once, and only when necessary.
-     * @return a `Parser` that -- on success -- returns the result of `q`.
-     */
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def ~> [U](q: => Parser[U]): Parser[U] = { lazy val p = q // lazy argument
-      (for(a <- this; b <- p) yield b).named("~>")
-    }
-
-    /** A parser combinator for sequential composition which keeps only the left result.
-     *
-     *  `p <~ q` succeeds if `p` succeeds and `q` succeeds on the input
-     *           left over by `p`.
-     *
-     * @note <~ has lower operator precedence than ~ or ~>.
-     *
-     * @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
-     * @return a `Parser` that -- on success -- returns the result of `p`.
-     */
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def <~ [U](q: => Parser[U]): Parser[T] = { lazy val p = q // lazy argument
-      (for(a <- this; b <- p) yield a).named("<~")
-    }
-
-     /* not really useful: V cannot be inferred because Parser is covariant in first type parameter (V is always trivially Nothing)
-    def ~~ [U, V](q: => Parser[U])(implicit combine: (T, U) => V): Parser[V] = new Parser[V] {
-      def apply(in: Input) = seq(Parser.this, q)((x, y) => combine(x,y))(in)
-    }  */
-
-    /** A parser combinator for non-back-tracking sequential composition.
-     *
-     *  `p ~! q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
-     *   In case of failure, no back-tracking is performed (in an earlier parser produced by the `|` combinator).
-     *
-     * @param p a parser that will be executed after `p` (this parser) succeeds
-     * @return a `Parser` that -- on success -- returns a `~` (like a Pair, but easier to pattern match on)
-     *         that contains the result of `p` and that of `q`.
-     *         The resulting parser fails if either `p` or `q` fails, this failure is fatal.
-     */
-    def ~! [U](p: => Parser[U]): Parser[~[T, U]]
-      = OnceParser{ (for(a <- this; b <- commit(p)) yield new ~(a,b)).named("~!") }
-
-    /** A parser combinator for alternative composition.
-     *
-     *  `p | q` succeeds if `p` succeeds or `q` succeeds.
-     *   Note that `q` is only tried if `p`s failure is non-fatal (i.e., back-tracking is allowed).
-     *
-     * @param q a parser that will be executed if `p` (this parser) fails (and allows back-tracking)
-     * @return a `Parser` that returns the result of the first parser to succeed (out of `p` and `q`)
-     *         The resulting parser succeeds if (and only if)
-     *         - `p` succeeds, ''or''
-     *         - if `p` fails allowing back-tracking and `q` succeeds.
-     */
-    def | [U >: T](q: => Parser[U]): Parser[U] = append(q).named("|")
-
-    // TODO
-    /** A parser combinator for alternative with longest match composition.
-     *
-     *  `p ||| q` succeeds if `p` succeeds or `q` succeeds.
-     *  If `p` and `q` both succeed, the parser that consumed the most characters accepts.
-     *
-     * @param q0 a parser that accepts if p consumes less characters. -- evaluated at most once, and only when necessary
-     * @return a `Parser` that returns the result of the parser consuming the most characters (out of `p` and `q`).
-     */
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def ||| [U >: T](q0: => Parser[U]): Parser[U] = new Parser[U] {
-      lazy val q = q0 // lazy argument
-      def apply(in: Input) = {
-        val res1 = Parser.this(in)
-        val res2 = q(in)
-
-        (res1, res2) match {
-          case (s1 @ Success(_, next1), s2 @ Success(_, next2)) => if (next2.pos < next1.pos) s1 else s2
-          case (s1 @ Success(_, _), _) => s1
-          case (_, s2 @ Success(_, _)) => s2
-          case (e1 @ Error(_, _), _) => e1
-          case (f1 @ Failure(_, next1), ns2 @ NoSuccess(_, next2)) => if (next2.pos < next1.pos) f1 else ns2
-        }
-      }
-      override def toString = "|||"
-    }
-
-    /** A parser combinator for function application.
-     *
-     *  `p ^^ f` succeeds if `p` succeeds; it returns `f` applied to the result of `p`.
-     *
-     * @param f a function that will be applied to this parser's result (see `map` in `ParseResult`).
-     * @return a parser that has the same behaviour as the current parser, but whose result is
-     *         transformed by `f`.
-     */
-    def ^^ [U](f: T => U): Parser[U] = map(f).named(toString+"^^")
-
-    /** A parser combinator that changes a successful result into the specified value.
-     *
-     *  `p ^^^ v` succeeds if `p` succeeds; discards its result, and returns `v` instead.
-     *
-     * @param v The new result for the parser, evaluated at most once (if `p` succeeds), not evaluated at all if `p` fails.
-     * @return a parser that has the same behaviour as the current parser, but whose successful result is `v`
-     */
-    @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-    def ^^^ [U](v: => U): Parser[U] =  new Parser[U] {
-      lazy val v0 = v // lazy argument
-      def apply(in: Input) = Parser.this(in) map (x => v0)
-    }.named(toString+"^^^")
-
-    /** A parser combinator for partial function application.
-     *
-     *  `p ^? (f, error)` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
-     *  in that case, it returns `f` applied to the result of `p`. If `f` is not applicable,
-     *  error(the result of `p`) should explain why.
-     *
-     * @param f a partial function that will be applied to this parser's result
-     *          (see `mapPartial` in `ParseResult`).
-     * @param error a function that takes the same argument as `f` and produces an error message
-     *        to explain why `f` wasn't applicable
-     * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
-     *         to the result. If so, the result will be transformed by `f`.
-     */
-    def ^? [U](f: PartialFunction[T, U], error: T => String): Parser[U] = Parser{ in =>
-      this(in).mapPartial(f, error)}.named(toString+"^?")
-
-    /** A parser combinator for partial function application.
-     *
-     *  `p ^? f` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
-     *  in that case, it returns `f` applied to the result of `p`.
-     *
-     * @param f a partial function that will be applied to this parser's result
-     *          (see `mapPartial` in `ParseResult`).
-     * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
-     *         to the result. If so, the result will be transformed by `f`.
-     */
-    def ^? [U](f: PartialFunction[T, U]): Parser[U] = ^?(f, r => "Constructor function not defined at "+r)
-
-    /** A parser combinator that parameterizes a subsequent parser with the
-     *  result of this one.
-     *
-     *  Use this combinator when a parser depends on the result of a previous
-     *  parser. `p` should be a function that takes the result from the first
-     *  parser and returns the second parser.
-     *
-     *  `p into fq` (with `fq` typically `{x => q}`) first applies `p`, and
-     *  then, if `p` successfully returned result `r`, applies `fq(r)` to the
-     *  rest of the input.
-     *
-     *  ''From: G. Hutton. Higher-order functions for parsing. J. Funct. Program., 2(3):323--343, 1992.''
-     *
-     *  @example {{{
-     *  def perlRE = "m" ~> (".".r into (separator => """[^%s]*""".format(separator).r <~ separator))
-     *  }}}
-     *
-     *  @param fq a function that, given the result from this parser, returns
-     *         the second parser to be applied
-     *  @return a parser that succeeds if this parser succeeds (with result `x`)
-     *          and if then `fq(x)` succeeds
-     */
-    def into[U](fq: T => Parser[U]): Parser[U] = flatMap(fq)
-
-    // shortcuts for combinators:
-
-    /** Returns `into(fq)`. */
-    def >>[U](fq: T => Parser[U])=into(fq)
-
-    /** Returns a parser that repeatedly parses what this parser parses.
-     *
-     *  @return rep(this)
-     */
-    def * = rep(this)
-
-    /** Returns a parser that repeatedly parses what this parser parses,
-     *  interleaved with the `sep` parser. The `sep` parser specifies how
-     *  the results parsed by this parser should be combined.
-     *
-     *  @return chainl1(this, sep)
-     */
-    def *[U >: T](sep: => Parser[(U, U) => U]) = chainl1(this, sep)
-
-    // TODO: improve precedence? a ~ b*(",") = a ~ (b*(","))  should be true
-
-    /** Returns a parser that repeatedly (at least once) parses what this parser parses.
-     *
-     *  @return rep1(this)
-     */
-    def + = rep1(this)
-
-    /** Returns a parser that optionally parses what this parser parses.
-     *
-     *  @return opt(this)
-     */
-    def ? = opt(this)
-
-    /** Changes the failure message produced by a parser.
-     *
-     *  This doesn't change the behavior of a parser on neither
-     *  success nor error, just on failure. The semantics are
-     *  slightly different than those obtained by doing `| failure(msg)`,
-     *  in that the message produced by this method will always
-     *  replace the message produced, which is not guaranteed
-     *  by that idiom.
-     *
-     *  For example, parser `p` below will always produce the
-     *  designated failure message, while `q` will not produce
-     *  it if `sign` is parsed but `number` is not.
-     *
-     *  {{{
-     *  def p = sign.? ~ number withFailureMessage  "Number expected!"
-     *  def q = sign.? ~ number | failure("Number expected!")
-     *  }}}
-     *
-     *  @param msg The message that will replace the default failure message.
-     *  @return    A parser with the same properties and different failure message.
-     */
-    def withFailureMessage(msg: String) = Parser{ in =>
-      this(in) match {
-        case Failure(_, next) => Failure(msg, next)
-        case other            => other
-      }
-    }
-
-    /** Changes the error message produced by a parser.
-     *
-     *  This doesn't change the behavior of a parser on neither
-     *  success nor failure, just on error. The semantics are
-     *  slightly different than those obtained by doing `| error(msg)`,
-     *  in that the message produced by this method will always
-     *  replace the message produced, which is not guaranteed
-     *  by that idiom.
-     *
-     *  For example, parser `p` below will always produce the
-     *  designated error message, while `q` will not produce
-     *  it if `sign` is parsed but `number` is not.
-     *
-     *  {{{
-     *  def p = sign.? ~ number withErrorMessage  "Number expected!"
-     *  def q = sign.? ~ number | error("Number expected!")
-     *  }}}
-     *
-     *  @param msg The message that will replace the default error message.
-     *  @return    A parser with the same properties and different error message.
-     */
-    def withErrorMessage(msg: String) = Parser{ in =>
-      this(in) match {
-        case Error(_, next) => Error(msg, next)
-        case other          => other
-      }
-    }
-  }
-
-  /** Wrap a parser so that its failures become errors (the `|` combinator
-   *  will give up as soon as it encounters an error, on failure it simply
-   *  tries the next alternative).
-   */
-  def commit[T](p: => Parser[T]) = Parser{ in =>
-    p(in) match{
-      case s @ Success(_, _) => s
-      case e @ Error(_, _) => e
-      case f @ Failure(msg, next) => Error(msg, next)
-    }
-  }
-
-	/*trait ElemFun
-  case class EFCons(hd: Elem => ElemFun, tl: ElemFun) extends ElemFun
-  case class EFNil(res: Boolean) extends ElemFun*/
-
-  /** A parser matching input elements that satisfy a given predicate.
-   *
-   *  `elem(kind, p)` succeeds if the input starts with an element `e` for which `p(e)` is true.
-   *
-   *  @param  kind   The element kind, used for error messages
-   *  @param  p      A predicate that determines which elements match.
-   *  @return
-   */
-  def elem(kind: String, p: Elem => Boolean) = acceptIf(p)(inEl => kind+" expected")
-
-  /** A parser that matches only the given element `e`.
-   *
-   *  `elem(e)` succeeds if the input starts with an element `e`.
-   *
-   *  @param e the `Elem` that must be the next piece of input for the returned parser to succeed
-   *  @return a `Parser` that succeeds if `e` is the next available input (and returns it).
-   */
-  def elem(e: Elem): Parser[Elem] = accept(e)
-
-  /** A parser that matches only the given element `e`.
-   *
-   *  The method is implicit so that elements can automatically be lifted to their parsers.
-   *  For example, when parsing `Token`s, `Identifier("new")` (which is a `Token`) can be used directly,
-   *  instead of first creating a `Parser` using `accept(Identifier("new"))`.
-   *
-   *  @param e the `Elem` that must be the next piece of input for the returned parser to succeed
-   *  @return a `tParser` that succeeds if `e` is the next available input.
-   */
-
-  implicit def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("`"+e+"' expected but " + _ + " found")
-
-  /** A parser that matches only the given list of element `es`.
-   *
-   *  `accept(es)` succeeds if the input subsequently provides the elements in the list `es`.
-   *
-   *  @param  es the list of expected elements
-   *  @return a Parser that recognizes a specified list of elements
-   */
-  def accept[ES <% List[Elem]](es: ES): Parser[List[Elem]] = acceptSeq(es)
-
-  /** The parser that matches an element in the domain of the partial function `f`.
-   *
-   *  If `f` is defined on the first element in the input, `f` is applied
-   *  to it to produce this parser's result.
-   *
-   *  Example: The parser `accept("name", {case Identifier(n) => Name(n)})`
-   *          accepts an `Identifier(n)` and returns a `Name(n)`
-   *
-   *  @param expected a description of the kind of element this parser expects (for error messages)
-   *  @param f a partial function that determines when this parser is successful and what its output is
-   *  @return A parser that succeeds if `f` is applicable to the first element of the input,
-   *          applying `f` to it to produce the result.
-   */
-  def accept[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = acceptMatch(expected, f)
-
-  /** A parser matching input elements that satisfy a given predicate.
-   *
-   *  `acceptIf(p)(el => "Unexpected "+el)` succeeds if the input starts with an element `e` for which `p(e)` is true.
-   *
-   *  @param  err    A function from the received element into an error message.
-   *  @param  p      A predicate that determines which elements match.
-   *  @return        A parser for elements satisfying p(e).
-   */
-  def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in =>
-    if (in.atEnd) Failure("end of input", in)
-    else if (p(in.first)) Success(in.first, in.rest)
-    else Failure(err(in.first), in)
-  }
-
-  /** The parser that matches an element in the domain of the partial function `f`.
-   *
-   *  If `f` is defined on the first element in the input, `f` is applied
-   *  to it to produce this parser's result.
-   *
-   *  Example: The parser `acceptMatch("name", {case Identifier(n) => Name(n)})`
-   *          accepts an `Identifier(n)` and returns a `Name(n)`
-   *
-   *  @param expected a description of the kind of element this parser expects (for error messages)
-   *  @param f a partial function that determines when this parser is successful and what its output is
-   *  @return A parser that succeeds if `f` is applicable to the first element of the input,
-   *          applying `f` to it to produce the result.
-   */
-  def acceptMatch[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = Parser{ in =>
-    if (in.atEnd) Failure("end of input", in)
-    else if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
-    else Failure(expected+" expected", in)
-  }
-
-  /** A parser that matches only the given [[scala.collection.Iterable]] collection of elements `es`.
-   *
-   *  `acceptSeq(es)` succeeds if the input subsequently provides the elements in the iterable `es`.
-   *
-   *  @param  es the list of expected elements
-   *  @return a Parser that recognizes a specified list of elements
-   */
-  def acceptSeq[ES <% Iterable[Elem]](es: ES): Parser[List[Elem]] =
-    es.foldRight[Parser[List[Elem]]](success(Nil)){(x, pxs) => accept(x) ~ pxs ^^ mkList}
-
-  /** A parser that always fails.
-   *
-   * @param msg The error message describing the failure.
-   * @return A parser that always fails with the specified error message.
-   */
-  def failure(msg: String) = Parser{ in => Failure(msg, in) }
-
-  /** A parser that results in an error.
-   *
-   * @param msg The error message describing the failure.
-   * @return A parser that always fails with the specified error message.
-   */
-  def err(msg: String) = Parser{ in => Error(msg, in) }
-
-  /** A parser that always succeeds.
-   *
-   * @param v The result for the parser
-   * @return A parser that always succeeds, with the given result `v`
-   */
-  def success[T](v: T) = Parser{ in => Success(v, in) }
-
-  /** A helper method that turns a `Parser` into one that will
-   *  print debugging information to stdout before and after
-   *  being applied.
-   */
-  def log[T](p: => Parser[T])(name: String): Parser[T] = Parser{ in =>
-    println("trying "+ name +" at "+ in)
-    val r = p(in)
-    println(name +" --> "+ r)
-    r
-  }
-
-  /** A parser generator for repetitions.
-   *
-   *  `rep(p)` repeatedly uses `p` to parse the input until `p` fails
-   *  (the result is a List of the consecutive results of `p`).
-   *
-   * @param p a `Parser` that is to be applied successively to the input
-   * @return A parser that returns a list of results produced by repeatedly applying `p` to the input.
-   */
-  def rep[T](p: => Parser[T]): Parser[List[T]] = rep1(p) | success(List())
-
-  /** A parser generator for interleaved repetitions.
-   *
-   *  `repsep(p, q)` repeatedly uses `p` interleaved with `q` to parse the input, until `p` fails.
-   *  (The result is a `List` of the results of `p`.)
-   *
-   *  Example: `repsep(term, ",")` parses a comma-separated list of term's, yielding a list of these terms.
-   *
-   * @param p a `Parser` that is to be applied successively to the input
-   * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
-   * @return A parser that returns a list of results produced by repeatedly applying `p` (interleaved with `q`) to the input.
-   *         The results of `p` are collected in a list. The results of `q` are discarded.
-   */
-  def repsep[T](p: => Parser[T], q: => Parser[Any]): Parser[List[T]] =
-    rep1sep(p, q) | success(List())
-
-  /** A parser generator for non-empty repetitions.
-   *
-   *  `rep1(p)` repeatedly uses `p` to parse the input until `p` fails -- `p` must succeed at least
-   *             once (the result is a `List` of the consecutive results of `p`)
-   *
-   * @param p a `Parser` that is to be applied successively to the input
-   * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
-   *        (and that only succeeds if `p` matches at least once).
-   */
-  def rep1[T](p: => Parser[T]): Parser[List[T]] = rep1(p, p)
-
-  /** A parser generator for non-empty repetitions.
-   *
-   *  `rep1(f, p)` first uses `f` (which must succeed) and then repeatedly
-   *     uses `p` to parse the input until `p` fails
-   *     (the result is a `List` of the consecutive results of `f` and `p`)
-   *
-   * @param first a `Parser` that parses the first piece of input
-   * @param p0 a `Parser` that is to be applied successively to the rest of the input (if any) -- evaluated at most once, and only when necessary
-   * @return A parser that returns a list of results produced by first applying `f` and then
-   *         repeatedly `p` to the input (it only succeeds if `f` matches).
-   */
-  @migration("The `p0` call-by-name arguments is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
-  def rep1[T](first: => Parser[T], p0: => Parser[T]): Parser[List[T]] = Parser { in =>
-    lazy val p = p0 // lazy argument
-    val elems = new ListBuffer[T]
-
-    def continue(in: Input): ParseResult[List[T]] = {
-      val p0 = p    // avoid repeatedly re-evaluating by-name parser
-      @tailrec def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
-        case Success(x, rest) => elems += x ; applyp(rest)
-        case e @ Error(_, _)  => e  // still have to propagate error
-        case _                => Success(elems.toList, in0)
-      }
-
-      applyp(in)
-    }
-
-    first(in) match {
-      case Success(x, rest) => elems += x ; continue(rest)
-      case ns: NoSuccess    => ns
-    }
-  }
-
-  /** A parser generator for a specified number of repetitions.
-   *
-   *  `repN(n, p)` uses `p` exactly `n` time to parse the input
-   *  (the result is a `List` of the `n` consecutive results of `p`).
-   *
-   * @param p   a `Parser` that is to be applied successively to the input
-   * @param num the exact number of times `p` must succeed
-   * @return    A parser that returns a list of results produced by repeatedly applying `p` to the input
-   *        (and that only succeeds if `p` matches exactly `n` times).
-   */
-  def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
-    if (num == 0) success(Nil) else Parser { in =>
-      val elems = new ListBuffer[T]
-      val p0 = p    // avoid repeatedly re-evaluating by-name parser
-
-      @tailrec def applyp(in0: Input): ParseResult[List[T]] =
-        if (elems.length == num) Success(elems.toList, in0)
-        else p0(in0) match {
-          case Success(x, rest) => elems += x ; applyp(rest)
-          case ns: NoSuccess    => return ns
-        }
-
-      applyp(in)
-    }
-
-  /** A parser generator for non-empty repetitions.
-   *
-   *  `rep1sep(p, q)` repeatedly applies `p` interleaved with `q` to parse the
-   *  input, until `p` fails. The parser `p` must succeed at least once.
-   *
-   * @param p a `Parser` that is to be applied successively to the input
-   * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
-   *          (interleaved with `q`)
-   * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
-   *         (and that only succeeds if `p` matches at least once).
-   *         The results of `p` are collected in a list. The results of `q` are discarded.
-   */
-  def rep1sep[T](p : => Parser[T], q : => Parser[Any]): Parser[List[T]] =
-    p ~ rep(q ~> p) ^^ {case x~y => x::y}
-
-  /** A parser generator that, roughly, generalises the rep1sep generator so
-   *  that `q`, which parses the separator, produces a left-associative
-   *  function that combines the elements it separates.
-   *
-   *  ''From: J. Fokker. Functional parsers. In J. Jeuring and E. Meijer, editors, Advanced Functional Programming,
-   *  volume 925 of Lecture Notes in Computer Science, pages 1--23. Springer, 1995.''
-   *
-   * @param p a parser that parses the elements
-   * @param q a parser that parses the token(s) separating the elements, yielding a left-associative function that
-   *          combines two elements into one
-   */
-  def chainl1[T](p: => Parser[T], q: => Parser[(T, T) => T]): Parser[T]
-    = chainl1(p, p, q)
-
-  /** A parser generator that, roughly, generalises the `rep1sep` generator
-   *  so that `q`, which parses the separator, produces a left-associative
-   *  function that combines the elements it separates.
-   *
-   * @param first a parser that parses the first element
-   * @param p a parser that parses the subsequent elements
-   * @param q a parser that parses the token(s) separating the elements,
-   *          yielding a left-associative function that combines two elements
-   *          into one
-   */
-  def chainl1[T, U](first: => Parser[T], p: => Parser[U], q: => Parser[(T, U) => T]): Parser[T]
-    = first ~ rep(q ~ p) ^^ {
-        case x ~ xs => xs.foldLeft(x: T){case (a, f ~ b) => f(a, b)} // x's type annotation is needed to deal with changed type inference due to SI-5189
-      }
-
-  /** A parser generator that generalises the `rep1sep` generator so that `q`,
-   *  which parses the separator, produces a right-associative function that
-   *  combines the elements it separates. Additionally, the right-most (last)
-   *  element and the left-most combining function have to be supplied.
-   *
-   * rep1sep(p: Parser[T], q) corresponds to chainr1(p, q ^^ cons, cons, Nil) (where val cons = (x: T, y: List[T]) => x :: y)
-   *
-   * @param p a parser that parses the elements
-   * @param q a parser that parses the token(s) separating the elements, yielding a right-associative function that
-   *          combines two elements into one
-   * @param combine the "last" (left-most) combination function to be applied
-   * @param first   the "first" (right-most) element to be combined
-   */
-  def chainr1[T, U](p: => Parser[T], q: => Parser[(T, U) => U], combine: (T, U) => U, first: U): Parser[U]
-    = p ~ rep(q ~ p) ^^ {
-        case x ~ xs => (new ~(combine, x) :: xs).foldRight(first){case (f ~ a, b) => f(a, b)}
-      }
-
-  /** A parser generator for optional sub-phrases.
-   *
-   *  `opt(p)` is a parser that returns `Some(x)` if `p` returns `x` and `None` if `p` fails.
-   *
-   * @param p A `Parser` that is tried on the input
-   * @return a `Parser` that always succeeds: either with the result provided by `p` or
-   *         with the empty result
-   */
-  def opt[T](p: => Parser[T]): Parser[Option[T]] =
-    p ^^ (x => Some(x)) | success(None)
-
-  /** Wrap a parser so that its failures and errors become success and
-   *  vice versa -- it never consumes any input.
-   */
-  def not[T](p: => Parser[T]): Parser[Unit] = Parser { in =>
-    p(in) match {
-      case Success(_, _)  => Failure("Expected failure", in)
-      case _              => Success((), in)
-    }
-  }
-
-  /** A parser generator for guard expressions. The resulting parser will
-   *  fail or succeed just like the one given as parameter but it will not
-   *  consume any input.
-   *
-   * @param p a `Parser` that is to be applied to the input
-   * @return A parser that returns success if and only if `p` succeeds but
-   *         never consumes any input
-   */
-  def guard[T](p: => Parser[T]): Parser[T] = Parser { in =>
-    p(in) match{
-      case s@ Success(s1,_) => Success(s1, in)
-      case e => e
-    }
-  }
-
-  /** `positioned` decorates a parser's result with the start position of the
-   *  input it consumed.
-   *
-   * @param p a `Parser` whose result conforms to `Positional`.
-   * @return A parser that has the same behaviour as `p`, but which marks its
-   *         result with the start position of the input it consumed,
-   *         if it didn't already have a position.
-   */
-  def positioned[T <: Positional](p: => Parser[T]): Parser[T] = Parser { in =>
-    p(in) match {
-      case Success(t, in1) => Success(if (t.pos == NoPosition) t setPos in.pos else t, in1)
-      case ns: NoSuccess => ns
-    }
-  }
-
-  /** A parser generator delimiting whole phrases (i.e. programs).
-   *
-   *  `phrase(p)` succeeds if `p` succeeds and no input is left over after `p`.
-   *
-   *  @param p the parser that must consume all input for the resulting parser
-   *           to succeed.
-   *  @return  a parser that has the same result as `p`, but that only succeeds
-   *           if `p` consumed all the input.
-   */
-  def phrase[T](p: Parser[T]) = new Parser[T] {
-    def apply(in: Input) = lastNoSuccessVar.withValue(None) {
-      p(in) match {
-      case s @ Success(out, in1) =>
-        if (in1.atEnd)
-          s
-        else
-            lastNoSuccessVar.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
-        case ns => lastNoSuccessVar.value.getOrElse(ns)
-      }
-    }
-  }
-
-  /** Given a concatenation with a repetition (list), move the concatenated element into the list */
-  def mkList[T] = (_: ~[T, List[T]]) match { case x ~ xs => x :: xs }
-
-  /** A wrapper over sequence of matches.
-   *
-   *  Given `p1: Parser[A]` and `p2: Parser[B]`, a parser composed with
-   *  `p1 ~ p2` will have type `Parser[~[A, B]]`. The successful result
-   *  of the parser can be extracted from this case class.
-   *
-   *  It also enables pattern matching, so something like this is possible:
-   *
-   *  {{{
-   *  def concat(p1: Parser[String], p2: Parser[String]): Parser[String] =
-   *    p1 ~ p2 ^^ { case a ~ b => a + b }
-   *  }}}
-   */
-  case class ~[+a, +b](_1: a, _2: b) {
-    override def toString = "("+ _1 +"~"+ _2 +")"
-  }
-
-  /** A parser whose `~` combinator disallows back-tracking.
-   */
-  trait OnceParser[+T] extends Parser[T] {
-    override def ~ [U](p: => Parser[U]): Parser[~[T, U]]
-      = OnceParser{ (for(a <- this; b <- commit(p)) yield new ~(a,b)).named("~") }
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
deleted file mode 100644
index d17d0ca..0000000
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.combinator
-
-import java.util.regex.Pattern
-import scala.util.matching.Regex
-import scala.util.parsing.input._
-import scala.collection.immutable.PagedSeq
-import scala.language.implicitConversions
-
-/** The ''most important'' differences between `RegexParsers` and
- *  [[scala.util.parsing.combinator.Parsers]] are:
- *
- *  - `Elem` is defined to be [[scala.Char]]
- *  - There's an implicit conversion from [[java.lang.String]] to `Parser[String]`,
- *    so that string literals can be used as parser combinators.
- *  - There's an implicit conversion from [[scala.util.matching.Regex]] to `Parser[String]`,
- *    so that regex expressions can be used as parser combinators.
- *  - The parsing methods call the method `skipWhitespace` (defaults to `true`) and, if true,
- *    skip any whitespace before each parser is called.
- *  - Protected val `whiteSpace` returns a regex that identifies whitespace.
- *
- *  For example, this creates a very simple calculator receiving `String` input:
- *
- *  {{{
- *  object Calculator extends RegexParsers {
- *    def number: Parser[Double] = """\d+(\.\d*)?""".r ^^ { _.toDouble }
- *    def factor: Parser[Double] = number | "(" ~> expr <~ ")"
- *    def term  : Parser[Double] = factor ~ rep( "*" ~ factor | "/" ~ factor) ^^ {
- *      case number ~ list => (number /: list) {
- *        case (x, "*" ~ y) => x * y
- *        case (x, "/" ~ y) => x / y
- *      }
- *    }
- *    def expr  : Parser[Double] = term ~ rep("+" ~ log(term)("Plus term") | "-" ~ log(term)("Minus term")) ^^ {
- *      case number ~ list => list.foldLeft(number) { // same as before, using alternate name for /:
- *        case (x, "+" ~ y) => x + y
- *        case (x, "-" ~ y) => x - y
- *      }
- *    }
- *
- *    def apply(input: String): Double = parseAll(expr, input) match {
- *      case Success(result, _) => result
- *      case failure : NoSuccess => scala.sys.error(failure.msg)
- *    }
- *  }
- *  }}}
- */
-trait RegexParsers extends Parsers {
-
-  type Elem = Char
-
-  protected val whiteSpace = """\s+""".r
-
-  def skipWhitespace = whiteSpace.toString.length > 0
-
-  /** Method called to handle whitespace before parsers.
-   *
-   *  It checks `skipWhitespace` and, if true, skips anything
-   *  matching `whiteSpace` starting from the current offset.
-   *
-   *  @param source  The input being parsed.
-   *  @param offset  The offset into `source` from which to match.
-   *  @return        The offset to be used for the next parser.
-   */
-  protected def handleWhiteSpace(source: java.lang.CharSequence, offset: Int): Int =
-    if (skipWhitespace)
-      (whiteSpace findPrefixMatchOf (source.subSequence(offset, source.length))) match {
-        case Some(matched) => offset + matched.end
-        case None => offset
-      }
-    else
-      offset
-
-  /** A parser that matches a literal string */
-  implicit def literal(s: String): Parser[String] = new Parser[String] {
-    def apply(in: Input) = {
-      val source = in.source
-      val offset = in.offset
-      val start = handleWhiteSpace(source, offset)
-      var i = 0
-      var j = start
-      while (i < s.length && j < source.length && s.charAt(i) == source.charAt(j)) {
-        i += 1
-        j += 1
-      }
-      if (i == s.length)
-        Success(source.subSequence(start, j).toString, in.drop(j - offset))
-      else  {
-        val found = if (start == source.length()) "end of source" else "`"+source.charAt(start)+"'"
-        Failure("`"+s+"' expected but "+found+" found", in.drop(start - offset))
-      }
-    }
-  }
-
-  /** A parser that matches a regex string */
-  implicit def regex(r: Regex): Parser[String] = new Parser[String] {
-    def apply(in: Input) = {
-      val source = in.source
-      val offset = in.offset
-      val start = handleWhiteSpace(source, offset)
-      (r findPrefixMatchOf (source.subSequence(start, source.length))) match {
-        case Some(matched) =>
-          Success(source.subSequence(start, start + matched.end).toString,
-                  in.drop(start + matched.end - offset))
-        case None =>
-          val found = if (start == source.length()) "end of source" else "`"+source.charAt(start)+"'"
-          Failure("string matching regex `"+r+"' expected but "+found+" found", in.drop(start - offset))
-      }
-    }
-  }
-
-  /** `positioned` decorates a parser's result with the start position of the input it consumed.
-   * If whitespace is being skipped, then it is skipped before the start position is recorded.
-   *
-   * @param p a `Parser` whose result conforms to `Positional`.
-   * @return A parser that has the same behaviour as `p`, but which marks its result with the
-   *         start position of the input it consumed after whitespace has been skipped, if it
-   *         didn't already have a position.
-   */
-  override def positioned[T <: Positional](p: => Parser[T]): Parser[T] = {
-    val pp = super.positioned(p)
-    new Parser[T] {
-      def apply(in: Input) = {
-        val offset = in.offset
-        val start = handleWhiteSpace(in.source, offset)
-        pp(in.drop (start - offset))
-      }
-    }
-  }
-
-  override def phrase[T](p: Parser[T]): Parser[T] =
-    super.phrase(p <~ opt("""\z""".r))
-
-  /** Parse some prefix of reader `in` with parser `p`. */
-  def parse[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
-    p(in)
-
-  /** Parse some prefix of character sequence `in` with parser `p`. */
-  def parse[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
-    p(new CharSequenceReader(in))
-
-  /** Parse some prefix of reader `in` with parser `p`. */
-  def parse[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
-    p(new PagedSeqReader(PagedSeq.fromReader(in)))
-
-  /** Parse all of reader `in` with parser `p`. */
-  def parseAll[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
-    parse(phrase(p), in)
-
-  /** Parse all of reader `in` with parser `p`. */
-  def parseAll[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
-    parse(phrase(p), in)
-
-  /** Parse all of character sequence `in` with parser `p`. */
-  def parseAll[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
-    parse(phrase(p), in)
-}
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
deleted file mode 100644
index c25c972..0000000
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing
-package combinator
-package lexical
-
-import token._
-import input.CharArrayReader.EofCh
-
-/** This component complements the `Scanners` component with
- *  common operations for lexical parsers.
- *
- *  Refer to [[scala.util.parsing.combinator.lexical.StdLexical]]
- *  for a concrete implementation for a simple, Scala-like language.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-abstract class Lexical extends Scanners with Tokens {
-
-  /** A character-parser that matches a letter (and returns it).*/
-  def letter = elem("letter", _.isLetter)
-
-  /** A character-parser that matches a digit (and returns it).*/
-  def digit = elem("digit", _.isDigit)
-
-  /** A character-parser that matches any character except the ones given in `cs` (and returns it).*/
-  def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch != _)))
-
-  /** A character-parser that matches a white-space character (and returns it).*/
-  def whitespaceChar = elem("space char", ch => ch <= ' ' && ch != EofCh)
-}
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
deleted file mode 100644
index 5c23ad7..0000000
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing
-package combinator
-package lexical
-
-import token._
-import input._
-
-/** This component provides core functionality for lexical parsers.
- *
- *  See its subclasses [[scala.util.parsing.combinator.lexical.Lexical]] and -- most interestingly
- *  [[scala.util.parsing.combinator.lexical.StdLexical]], for more functionality.
- *
- *  @author Martin Odersky, Adriaan Moors
- */
-trait Scanners extends Parsers {
-  type Elem = Char
-  type Token
-
-  /** This token is produced by a scanner `Scanner` when scanning failed. */
-  def errorToken(msg: String): Token
-
-  /** A parser that produces a token (from a stream of characters). */
-  def token: Parser[Token]
-
-  /** A parser for white-space -- its result will be discarded. */
-  def whitespace: Parser[Any]
-
-  /** `Scanner` is essentially¹ a parser that produces `Token`s
-   *  from a stream of characters. The tokens it produces are typically
-   *  passed to parsers in `TokenParsers`.
-   *
-   *  @note ¹ `Scanner` is really a `Reader` of `Token`s
-   */
-  class Scanner(in: Reader[Char]) extends Reader[Token] {
-    /** Convenience constructor (makes a character reader out of the given string) */
-    def this(in: String) = this(new CharArrayReader(in.toCharArray()))
-    private val (tok, rest1, rest2) = whitespace(in) match {
-      case Success(_, in1) =>
-        token(in1) match {
-          case Success(tok, in2) => (tok, in1, in2)
-          case ns: NoSuccess => (errorToken(ns.msg), ns.next, skip(ns.next))
-        }
-      case ns: NoSuccess => (errorToken(ns.msg), ns.next, skip(ns.next))
-    }
-    private def skip(in: Reader[Char]) = if (in.atEnd) in else in.rest
-
-    override def source: java.lang.CharSequence = in.source
-    override def offset: Int = in.offset
-    def first = tok
-    def rest = new Scanner(rest2)
-    def pos = rest1.pos
-    def atEnd = in.atEnd || (whitespace(in) match { case Success(_, in1) => in1.atEnd case _ => false })
-  }
-}
-
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
deleted file mode 100644
index f3491c0..0000000
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing
-package combinator
-package lexical
-
-import token._
-import input.CharArrayReader.EofCh
-import scala.collection.mutable
-
-/** This component provides a standard lexical parser for a simple,
- *  [[http://scala-lang.org Scala]]-like language. It parses keywords and
- *  identifiers, numeric literals (integers), strings, and delimiters.
- *
- *  To distinguish between identifiers and keywords, it uses a set of
- *  reserved identifiers:  every string contained in `reserved` is returned
- *  as a keyword token. (Note that `=>` is hard-coded as a keyword.)
- *  Additionally, the kinds of delimiters can be specified by the
- *  `delimiters` set.
- *
- *  Usually this component is used to break character-based input into
- *  bigger tokens, which are then passed to a token-parser (see
- *  [[scala.util.parsing.combinator.syntactical.TokenParsers]].)
- *
- * @author Martin Odersky
- * @author Iulian Dragos
- * @author Adriaan Moors
- */
-class StdLexical extends Lexical with StdTokens {
-  // see `token` in `Scanners`
-  def token: Parser[Token] =
-    ( identChar ~ rep( identChar | digit )              ^^ { case first ~ rest => processIdent(first :: rest mkString "") }
-    | digit ~ rep( digit )                              ^^ { case first ~ rest => NumericLit(first :: rest mkString "") }
-    | '\'' ~ rep( chrExcept('\'', '\n', EofCh) ) ~ '\'' ^^ { case '\'' ~ chars ~ '\'' => StringLit(chars mkString "") }
-    | '\"' ~ rep( chrExcept('\"', '\n', EofCh) ) ~ '\"' ^^ { case '\"' ~ chars ~ '\"' => StringLit(chars mkString "") }
-    | EofCh                                             ^^^ EOF
-    | '\'' ~> failure("unclosed string literal")
-    | '\"' ~> failure("unclosed string literal")
-    | delim
-    | failure("illegal character")
-    )
-
-  /** Returns the legal identifier chars, except digits. */
-  def identChar = letter | elem('_')
-
-  // see `whitespace in `Scanners`
-  def whitespace: Parser[Any] = rep(
-      whitespaceChar
-    | '/' ~ '*' ~ comment
-    | '/' ~ '/' ~ rep( chrExcept(EofCh, '\n') )
-    | '/' ~ '*' ~ failure("unclosed comment")
-    )
-
-  protected def comment: Parser[Any] = (
-      '*' ~ '/'  ^^ { case _ => ' '  }
-    | chrExcept(EofCh) ~ comment
-    )
-
-  /** The set of reserved identifiers: these will be returned as `Keyword`s. */
-  val reserved = new mutable.HashSet[String]
-
-  /** The set of delimiters (ordering does not matter). */
-  val delimiters = new mutable.HashSet[String]
-
-  protected def processIdent(name: String) =
-    if (reserved contains name) Keyword(name) else Identifier(name)
-
-  private lazy val _delim: Parser[Token] = {
-    // construct parser for delimiters by |'ing together the parsers for the individual delimiters,
-    // starting with the longest one -- otherwise a delimiter D will never be matched if there is
-    // another delimiter that is a prefix of D
-    def parseDelim(s: String): Parser[Token] = accept(s.toList) ^^ { x => Keyword(s) }
-
-    val d = new Array[String](delimiters.size)
-    delimiters.copyToArray(d, 0)
-    scala.util.Sorting.quickSort(d)
-    (d.toList map parseDelim).foldRight(failure("no matching delimiter"): Parser[Token])((x, y) => y | x)
-  }
-  protected def delim: Parser[Token] = _delim
-}
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
deleted file mode 100644
index d3ae0ea..0000000
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing
-package combinator
-package syntactical
-
-import token._
-import lexical.StdLexical
-import scala.language.implicitConversions
-
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
-*
-* @author Martin Odersky, Adriaan Moors
- */
-class StandardTokenParsers extends StdTokenParsers {
-  type Tokens = StdTokens
-  val lexical = new StdLexical
-
-  //an implicit keyword function that gives a warning when a given word is not in the reserved/delimiters list
-  override implicit def keyword(chars : String): Parser[String] =
-    if(lexical.reserved.contains(chars) || lexical.delimiters.contains(chars)) super.keyword(chars)
-    else failure("You are trying to parse \""+chars+"\", but it is neither contained in the delimiters list, nor in the reserved keyword list of your lexical object")
-
-}
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
deleted file mode 100644
index 7283b01..0000000
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing
-package combinator
-package syntactical
-
-import token._
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
-*
-* @author Martin Odersky, Adriaan Moors
- */
-trait StdTokenParsers extends TokenParsers {
-  type Tokens <: StdTokens
-  import lexical.{Keyword, NumericLit, StringLit, Identifier}
-
-  protected val keywordCache = mutable.HashMap[String, Parser[String]]()
-
-  /** A parser which matches a single keyword token.
-   *
-   * @param chars    The character string making up the matched keyword.
-   * @return a `Parser` that matches the given string
-   */
-//  implicit def keyword(chars: String): Parser[String] = accept(Keyword(chars)) ^^ (_.chars)
-    implicit def keyword(chars: String): Parser[String] =
-      keywordCache.getOrElseUpdate(chars, accept(Keyword(chars)) ^^ (_.chars))
-
-  /** A parser which matches a numeric literal */
-  def numericLit: Parser[String] =
-    elem("number", _.isInstanceOf[NumericLit]) ^^ (_.chars)
-
-  /** A parser which matches a string literal */
-  def stringLit: Parser[String] =
-    elem("string literal", _.isInstanceOf[StringLit]) ^^ (_.chars)
-
-  /** A parser which matches an identifier */
-  def ident: Parser[String] =
-    elem("identifier", _.isInstanceOf[Identifier]) ^^ (_.chars)
-}
-
-
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
deleted file mode 100644
index 1c4b25b..0000000
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing
-package combinator
-package syntactical
-
-/** This is the core component for token-based parsers.
- *
- *  @author Martin Odersky
- *  @author Adriaan Moors
- */
-trait TokenParsers extends Parsers {
-  /** `Tokens` is the abstract type of the `Token`s consumed by the parsers in this component. */
-  type Tokens <: token.Tokens
-
-  /** `lexical` is the component responsible for consuming some basic kind of
-   *  input (usually character-based) and turning it into the tokens
-   *  understood by these parsers.
-   */
-  val lexical: Tokens
-
-  /** The input-type for these parsers*/
-  type Elem = lexical.Token
-
-}
-
-
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
deleted file mode 100644
index 80e9b0d..0000000
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-package scala.util.parsing.combinator.testing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input._
-import scala.language.postfixOps
-
- at deprecated("This class will be removed", "2.10.0")
-case class Ident(s: String)
- at deprecated("This class will be removed", "2.10.0")
-case class Number(n: Int)
- at deprecated("This class will be removed", "2.10.0")
-case class Str(s: String)
-
- at deprecated("This class will be removed", "2.10.0")
-object RegexTest extends RegexParsers {
-  val ident: Parser[Any] = """[a-zA-Z_]\w*""".r ^^ (s => Ident(s))
-  val number: Parser[Any] = """\d\d*""".r ^^ (s => Number(s.toInt))
-  val string: Parser[Any] = "\".*\"".r ^^ (s => Str(s.substring(1, s.length - 1)))
-  val parser = (ident | number | string)*
-
-  def main(args: Array[String]) = {
-    val in = args mkString " "
-    println("\nin : "+in)
-    println(phrase[Any](parser)(new CharSequenceReader(in)))
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
deleted file mode 100644
index 95730ee..0000000
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.combinator.testing
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.lexical.Lexical
-import scala.util.parsing.combinator.syntactical.TokenParsers
-
-/** Facilitates testing a given parser on various input strings.
- *
- *  Example use:
- *  {{{
- *    val syntactic = new MyParsers
- *  }}}
- *  and
- *  {{{
- *    val parser = syntactic.term
- *  }}}
- *  (If `MyParsers` extends [[scala.util.parsing.combinator.syntactical.TokenParsers]]
- *  with a parser called `term`.)
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
- at deprecated("This class will be removed", "2.10.0")
-abstract class Tester {
-
-  val syntactic: TokenParsers { val lexical: Lexical }
-  val parser: syntactic.Parser[Any]
-
-  /** Scans a String (using a `syntactic.lexical.Scanner`), parses it using
-   *  `phrase(parser)`, and  prints the input and the parsed result to the
-   *  console.
-   */
-  def test(in: String) {
-    Console.println("\nin : "+in)
-    Console.println(syntactic.phrase[Any](parser)(new syntactic.lexical.Scanner(in)))
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
deleted file mode 100644
index 605f53b..0000000
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing
-package combinator
-package token
-
-/** This component provides the standard `Token`s for a simple, Scala-like language.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait StdTokens extends Tokens {
-  /** The class of keyword tokens */
-  case class Keyword(chars: String) extends Token {
-    override def toString = "`"+chars+"'"
-  }
-
-  /** The class of numeric literal tokens */
-  case class NumericLit(chars: String) extends Token {
-    override def toString = chars
-  }
-
-  /** The class of string literal tokens */
-  case class StringLit(chars: String) extends Token {
-    override def toString = "\""+chars+"\""
-  }
-
-  /** The class of identifier tokens */
-  case class Identifier(chars: String) extends Token {
-    override def toString = "identifier "+chars
-  }
-}
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
deleted file mode 100644
index ff92802..0000000
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing
-package combinator
-package token
-
-/** This component provides the notion of `Token`, the unit of information that is passed from lexical
- * parsers in the `Lexical` component to the parsers in the `TokenParsers` component.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait Tokens {
-  /** Objects of this type are produced by a lexical parser or ``scanner'', and consumed by a parser.
-   *
-   *  @see [[scala.util.parsing.combinator.syntactical.TokenParsers]]
-   */
-  abstract class Token {
-    def chars: String
-  }
-
-  /** A class of error tokens. Error tokens are used to communicate
-   *  errors detected during lexical analysis
-   */
-  case class ErrorToken(msg: String) extends Token {
-    def chars = "*** error: "+msg
-  }
-
-  /** A class for end-of-file tokens */
-  case object EOF extends Token {
-    def chars = "<eof>"
-  }
-
-  /** This token is produced by a scanner `Scanner` when scanning failed. */
-  def errorToken(msg: String): Token = new ErrorToken(msg)
-}
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
deleted file mode 100644
index 3ba69b2..0000000
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.input
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object CharArrayReader {
-  final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param chars  an array of characters
- * @param index  starting offset into the array; the first element returned will be `source(index)`
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-class CharArrayReader(chars: Array[Char], index: Int) extends CharSequenceReader(chars, index) {
-
-  def this(chars: Array[Char]) = this(chars, 0)
-
-}
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/library/scala/util/parsing/input/CharSequenceReader.scala
deleted file mode 100644
index 02aa2ab..0000000
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.input
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-object CharSequenceReader {
-  final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param source the source sequence
- * @param offset  starting offset.
- *
- * @author Martin Odersky
- */
-class CharSequenceReader(override val source: java.lang.CharSequence,
-                         override val offset: Int) extends Reader[Char] {
-  import CharSequenceReader._
-
-  /** Construct a `CharSequenceReader` with its first element at
-   *  `source(0)` and position `(1,1)`.
-   */
-  def this(source: java.lang.CharSequence) = this(source, 0)
-
-  /** Returns the first element of the reader, or EofCh if reader is at its end.
-   */
-  def first =
-    if (offset < source.length) source.charAt(offset) else EofCh
-
-  /** Returns a CharSequenceReader consisting of all elements except the first.
-   *
-   * @return If `atEnd` is `true`, the result will be `this`;
-   *         otherwise, it's a `CharSequenceReader` containing the rest of input.
-   */
-  def rest: CharSequenceReader =
-    if (offset < source.length) new CharSequenceReader(source, offset + 1)
-    else this
-
-  /** The position of the first element in the reader.
-   */
-  def pos: Position = new OffsetPosition(source, offset)
-
-  /** true iff there are no more elements in this reader (except for trailing
-   *  EofCh's)
-   */
-  def atEnd = offset >= source.length
-
-  /** Returns an abstract reader consisting of all elements except the first
-   *  `n` elements.
-   */
-  override def drop(n: Int): CharSequenceReader =
-    new CharSequenceReader(source, offset + n)
-}
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/library/scala/util/parsing/input/NoPosition.scala
deleted file mode 100644
index 40584b3..0000000
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing.input
-
-/** Undefined position.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object NoPosition extends Position {
-  def line = 0
-  def column = 0
-  override def toString = "<undefined position>"
-  override def longString = toString
-  def lineContents = ""
-}
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
deleted file mode 100644
index 01d9ea5..0000000
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.input
-
-import scala.collection.mutable.ArrayBuffer
-
-/** `OffsetPosition` is a standard class for positions
- *   represented as offsets into a source ``document''.
- *
- *   @param source   The source document
- *   @param offset   The offset indicating the position
- *
- * @author Martin Odersky
- */
-case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends Position {
-
-  /** An index that contains all line starts, including first line, and eof. */
-  private lazy val index: Array[Int] = {
-    var lineStarts = new ArrayBuffer[Int]
-    lineStarts += 0
-    for (i <- 0 until source.length)
-      if (source.charAt(i) == '\n') lineStarts += (i + 1)
-    lineStarts += source.length
-    lineStarts.toArray
-  }
-
-  /** The line number referred to by the position; line numbers start at 1. */
-  def line: Int = {
-    var lo = 0
-    var hi = index.length - 1
-    while (lo + 1 < hi) {
-      val mid = (hi + lo) / 2
-      if (offset < index(mid)) hi = mid
-      else lo = mid
-    }
-    lo + 1
-  }
-
-  /** The column number referred to by the position; column numbers start at 1. */
-  def column: Int = offset - index(line - 1) + 1
-
-  /** The contents of the line numbered at the current offset.
-   *
-   * @return the line at `offset` (not including a newline)
-   */
-  def lineContents: String =
-    source.subSequence(index(line - 1), index(line)).toString
-
-  /** Returns a string representation of the `Position`, of the form `line.column`. */
-  override def toString = line+"."+column
-
-  /** Compare this position to another, by first comparing their line numbers,
-   * and then -- if necessary -- using the columns to break a tie.
-   *
-   * @param  that a `Position` to compare to this `Position`
-   * @return true if this position's line number or (in case of equal line numbers)
-   *         column is smaller than the corresponding components of `that`
-   */
-  override def <(that: Position) = that match {
-    case OffsetPosition(_, that_offset) =>
-      this.offset < that_offset
-    case _ =>
-      this.line < that.line ||
-      this.line == that.line && this.column < that.column
-  }
-}
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
deleted file mode 100644
index 9140bf2..0000000
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.util.parsing.input
-
-import scala.collection.immutable.PagedSeq
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object PagedSeqReader {
-  final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param seq     the source sequence
- * @param offset  starting offset.
- *
- * @author Martin Odersky
- */
-class PagedSeqReader(seq: PagedSeq[Char],
-                     override val offset: Int) extends Reader[Char] {
-  import PagedSeqReader._
-
-  override lazy val source: java.lang.CharSequence = seq
-
-  /** Construct a `PagedSeqReader` with its first element at
-   *  `source(0)` and position `(1,1)`.
-   */
-  def this(seq: PagedSeq[Char]) = this(seq, 0)
-
-  /** Returns the first element of the reader, or EofCh if reader is at its end
-   */
-  def first =
-    if (seq.isDefinedAt(offset)) seq(offset) else EofCh
-
-  /** Returns a PagedSeqReader consisting of all elements except the first
-   *
-   * @return If `atEnd` is `true`, the result will be `this`;
-   *         otherwise, it's a `PagedSeqReader` containing the rest of input.
-   */
-  def rest: PagedSeqReader =
-    if (seq.isDefinedAt(offset)) new PagedSeqReader(seq, offset + 1)
-    else this
-
-  /** The position of the first element in the reader.
-   */
-  def pos: Position = new OffsetPosition(source, offset)
-
-  /** true iff there are no more elements in this reader (except for trailing
-   *  EofCh's).
-   */
-  def atEnd = !seq.isDefinedAt(offset)
-
-  /** Returns an abstract reader consisting of all elements except the first
-   *  `n` elements.
-   */
-  override def drop(n: Int): PagedSeqReader =
-    new PagedSeqReader(seq, offset + n)
-}
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
deleted file mode 100644
index 31715bd..0000000
--- a/src/library/scala/util/parsing/input/Position.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.input
-
-/** `Position` is the base trait for objects describing a position in a ``document''.
- *
- *  It provides functionality for:
- *   - generating a visual representation of this position (`longString`);
- *   - comparing two positions (`<`).
- *
- *  To use this class for a concrete kind of ``document'', implement the `lineContents` method.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait Position {
-
-  /** The line number referred to by the position; line numbers start at 1. */
-  def line: Int
-
-  /** The column number referred to by the position; column numbers start at 1. */
-  def column: Int
-
-  /** The contents of the line at this position. (must not contain a new-line character).
-   */
-  protected def lineContents: String
-
-  /** Returns a string representation of the `Position`, of the form `line.column`. */
-  override def toString = ""+line+"."+column
-
-  /** Returns a more ``visual'' representation of this position.
-   *  More precisely, the resulting string consists of two lines:
-   *   1. the line in the document referred to by this position
-   *   2. a caret indicating the column
-   *
-   *  Example:
-   *  {{{
-   *    List(this, is, a, line, from, the, document)
-   *                 ^
-   *  }}}
-   */
-  def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^"
-
-  /** Compare this position to another, by first comparing their line numbers,
-   * and then -- if necessary -- using the columns to break a tie.
-   *
-   * @param `that` a `Position` to compare to this `Position`
-   * @return true if this position's line number or (in case of equal line numbers)
-   *         column is smaller than the corresponding components of `that`
-   */
-  def <(that: Position) = {
-    this.line < that.line ||
-    this.line == that.line && this.column < that.column
-  }
-}
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
deleted file mode 100644
index 87cb16e..0000000
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.input
-
-/** A trait for objects that have a source position.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-trait Positional {
-
-  /** The source position of this object, initially set to undefined. */
-  var pos: Position = NoPosition
-
-  /** If current source position is undefined, update it with given position `newpos`
-   *  @return  the object itself
-   */
-  def setPos(newpos: Position): this.type = {
-    if (pos eq NoPosition) pos = newpos
-    this
-  }
-}
-
-
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/library/scala/util/parsing/input/Reader.scala
deleted file mode 100644
index bded57b..0000000
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing.input
-
-
-/** An interface for streams of values that have positions.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-abstract class Reader[+T] {
-
-  /** If this is a reader over character sequences, the underlying char sequence.
-   *  If not, throws a `NoSuchMethodError` exception.
-   *
-   *  @throws [[java.lang.NoSuchMethodError]] if this not a char sequence reader.
-   */
-  def source: java.lang.CharSequence =
-    throw new NoSuchMethodError("not a char sequence reader")
-
-  def offset: Int =
-    throw new NoSuchMethodError("not a char sequence reader")
-
-   /** Returns the first element of the reader
-    */
-  def first: T
-
-  /** Returns an abstract reader consisting of all elements except the first
-   *
-   * @return If `atEnd` is `true`, the result will be `this';
-   *         otherwise, it's a `Reader` containing more elements.
-   */
-  def rest: Reader[T]
-
-  /** Returns an abstract reader consisting of all elements except the first `n` elements.
-   */
-  def drop(n: Int): Reader[T] = {
-    var r: Reader[T] = this
-    var cnt = n
-    while (cnt > 0) {
-      r = r.rest; cnt -= 1
-    }
-    r
-  }
-
-  /** The position of the first element in the reader.
-   */
-  def pos: Position
-
-  /** `true` iff there are no more elements in this reader.
-   */
-  def atEnd: Boolean
-}
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
deleted file mode 100644
index ba7ab65..0000000
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.input
-
-import java.io.BufferedReader
-import scala.collection.immutable.PagedSeq
-
-/** An object to create a `StreamReader` from a `java.io.Reader`.
- *
- * @author Miles Sabin
- */
-object StreamReader {
-  final val EofCh = '\032'
-
-  /** Create a `StreamReader` from a `java.io.Reader`.
-   *
-   * @param in the `java.io.Reader` that provides the underlying
-   *           stream of characters for this Reader.
-   */  
-  def apply(in: java.io.Reader): StreamReader = {
-    new StreamReader(PagedSeq.fromReader(in), 0, 1)
-  }
-}
-
-/** A StreamReader reads from a character sequence, typically created as a PagedSeq
- *  from a java.io.Reader
- *
- *  NOTE:
- *  StreamReaders do not really fulfill the new contract for readers, which
- *  requires a `source` CharSequence representing the full input.
- *  Instead source is treated line by line.
- *  As a consequence, regex matching cannot extend beyond a single line
- *  when a StreamReader are used for input.
- *
- *  If you need to match regexes spanning several lines you should consider
- *  class `PagedSeqReader` instead.
- *
- *  @author Miles Sabin
- *  @author Martin Odersky
- */
-sealed class StreamReader(seq: PagedSeq[Char], off: Int, lnum: Int) extends PagedSeqReader(seq, off) {
-  import StreamReader._
-
-  override def rest: StreamReader =
-    if (off == seq.length) this
-    else if (seq(off) == '\n')
-      new StreamReader(seq.slice(off + 1), 0, lnum + 1)
-    else new StreamReader(seq, off + 1, lnum)
-
-  private def nextEol = {
-    var i = off
-    while (i < seq.length && seq(i) != '\n' && seq(i) != EofCh) i += 1
-    i
-  }
-
-  override def drop(n: Int): StreamReader = {
-    val eolPos = nextEol
-    if (eolPos < off + n && eolPos < seq.length)
-      new StreamReader(seq.slice(eolPos + 1), 0, lnum + 1).drop(off + n - (eolPos + 1))
-    else
-      new StreamReader(seq, off + n, lnum)
-  }
-
-  override def pos: Position = new Position {
-    def line = lnum
-    def column = off + 1
-    def lineContents = seq.slice(0, nextEol).toString
-  }
-}
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
deleted file mode 100644
index 2f450ed..0000000
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.util.parsing.json
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
-
-/**
- * This object provides a simple interface to the JSON parser class.
- * The default conversion for numerics is into a double. If you wish to
- * override this behavior at the global level, you can set the
- * `globalNumberParser` property to your own `(String => Any)` function.
- * If you only want to override at the per-thread level then you can set
- * the `perThreadNumberParser` property to your function. For example:
- * {{{
- * val myConversionFunc = {input : String => BigDecimal(input)}
- *
- * // Global override
- * JSON.globalNumberParser = myConversionFunc
- *
- * // Per-thread override
- * JSON.perThreadNumberParser = myConversionFunc
- * }}}
- *
- * @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-object JSON extends Parser {
-
-  /**
-   * This method converts ''raw'' results back into the original, deprecated
-   * form.
-   */
-  private def unRaw (in : Any) : Any = in match {
-    case JSONObject(obj) => obj.map({ case (k,v) => (k,unRaw(v))}).toList
-    case JSONArray(list) => list.map(unRaw)
-    case x => x
-  }
-
-  /**
-   * Parse the given `JSON` string and return a list of elements. If the
-   * string is a `JSON` object it will be a `JSONObject`. If it's a `JSON`
-   * array it will be a `JSONArray`.
-   *
-   * @param input the given `JSON` string.
-   * @return      an optional `JSONType` element.
-   */
-  def parseRaw(input : String) : Option[JSONType] =
-    phrase(root)(new lexical.Scanner(input)) match {
-      case Success(result, _) => Some(result)
-      case _ => None
-    }
-
-  /**
-   * Parse the given `JSON` string and return either a `List[Any]`
-   * if the `JSON` string specifies an `Array`, or a
-   * `Map[String,Any]` if the `JSON` string specifies an object.
-   *
-   * @param input the given `JSON` string.
-   * @return      an optional list or map.
-   */
-  def parseFull(input: String): Option[Any] =
-    parseRaw(input) match {
-      case Some(data) => Some(resolveType(data))
-      case None => None
-    }
-
-  /**
-   * A utility method to resolve a parsed `JSON` list into objects or
-   * arrays. See the `parse` method for details.
-   */
-  def resolveType(input: Any): Any = input match {
-    case JSONObject(data) => data.transform {
-      case (k,v) => resolveType(v)
-    }
-    case JSONArray(data) => data.map(resolveType)
-    case x => x
-  }
-
-  /**
-   * The global (VM) default function for converting a string to a numeric value.
-   */
-  def globalNumberParser_=(f: NumericParser) { defaultNumberParser = f }
-  def globalNumberParser : NumericParser = defaultNumberParser
-
-  /**
-   * Defines the function used to convert a numeric string literal into a
-   * numeric format on a per-thread basis. Use `globalNumberParser` for a
-   * global override.
-   */
-   def perThreadNumberParser_=(f : NumericParser) { numberParser.set(f) }
-   def perThreadNumberParser : NumericParser = numberParser.get()
-}
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
deleted file mode 100644
index 991b5d5..0000000
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing.json
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
-import scala.util.parsing.input.CharArrayReader.EofCh
-
-/**
- *  @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-class Lexer extends StdLexical with ImplicitConversions {
-
-  override def token: Parser[Token] =
-    //( '\"' ~ rep(charSeq | letter) ~ '\"' ^^ lift(StringLit)
-    ( string ^^ StringLit
-    | number ~ letter ^^ { case n ~ l => ErrorToken("Invalid number format : " + n + l) }
-    | '-' ~> whitespace ~ number ~ letter ^^ { case ws ~ num ~ l => ErrorToken("Invalid number format : -" + num + l) }
-    | '-' ~> whitespace ~ number ^^ { case ws ~ num => NumericLit("-" + num) }
-    | number ^^ NumericLit
-    | EofCh ^^^ EOF
-    | delim
-    | '\"' ~> failure("Unterminated string")
-    | rep(letter) ^^ checkKeyword
-    | failure("Illegal character")
-    )
-
-  def checkKeyword(xs : List[Any]) = {
-    val strRep = xs mkString ""
-    if (reserved contains strRep) Keyword(strRep) else ErrorToken("Not a keyword: " + strRep)
-  }
-
-  /** A string is a collection of zero or more Unicode characters, wrapped in
-   *  double quotes, using backslash escapes (cf. http://www.json.org/).
-   */
-  def string = '\"' ~> rep(charSeq | chrExcept('\"', '\n', EofCh)) <~ '\"' ^^ { _ mkString "" }
-
-  override def whitespace = rep(whitespaceChar)
-
-  def number = intPart ~ opt(fracPart) ~ opt(expPart) ^^ { case i ~ f ~ e =>
-    i + optString(".", f) + optString("", e)
-  }
-  def intPart = zero | intList
-  def intList = nonzero ~ rep(digit) ^^ {case x ~ y => (x :: y) mkString ""}
-  def fracPart = '.' ~> rep(digit) ^^ { _ mkString "" }
-  def expPart = exponent ~ opt(sign) ~ rep1(digit) ^^ { case e ~ s ~ d =>
-    e + optString("", s) + d.mkString("")
-  }
-
-  private def optString[A](pre: String, a: Option[A]) = a match {
-    case Some(x) => pre + x.toString
-    case None => ""
-  }
-
-  def zero: Parser[String] = '0' ^^^ "0"
-  def nonzero = elem("nonzero digit", d => d.isDigit && d != '0')
-  def exponent = elem("exponent character", d => d == 'e' || d == 'E')
-  def sign = elem("sign character", d => d == '-' || d == '+')
-
-  def charSeq: Parser[String] =
-    ('\\' ~ '\"' ^^^ "\""
-    |'\\' ~ '\\' ^^^ "\\"
-    |'\\' ~ '/'  ^^^ "/"
-    |'\\' ~ 'b'  ^^^ "\b"
-    |'\\' ~ 'f'  ^^^ "\f"
-    |'\\' ~ 'n'  ^^^ "\n"
-    |'\\' ~ 'r'  ^^^ "\r"
-    |'\\' ~ 't'  ^^^ "\t"
-    |'\\' ~> 'u' ~> unicodeBlock)
-
-  val hexDigits = Set[Char]() ++ "0123456789abcdefABCDEF".toArray
-  def hexDigit = elem("hex digit", hexDigits.contains(_))
-
-  private def unicodeBlock = hexDigit ~ hexDigit ~ hexDigit ~ hexDigit ^^ {
-    case a ~ b ~ c ~ d =>
-      new String(Array(Integer.parseInt(List(a, b, c, d) mkString "", 16)), 0, 1)
-  }
-
-  //private def lift[T](f: String => T)(xs: List[Any]): T = f(xs mkString "")
-}
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
deleted file mode 100644
index cb87866..0000000
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.parsing.json
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
-
-/**
- *  A marker class for the JSON result types.
- *
- *  @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-sealed abstract class JSONType {
-  /**
-   * This version of toString allows you to provide your own value
-   * formatter.
-   */
-  def toString (formatter : JSONFormat.ValueFormatter) : String
-
-  /**
-   * Returns a String representation of this JSON value
-   * using the JSONFormat.defaultFormatter.
-   */
-  override def toString = toString(JSONFormat.defaultFormatter)
-}
-
-/**
- * This object defines functions that are used when converting JSONType
- * values into String representations. Mostly this is concerned with
- * proper quoting of strings.
- *
- * @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-object JSONFormat {
-  /**
-   * This type defines a function that can be used to
-   * format values into JSON format.
-   */
-  type ValueFormatter = Any => String
-
-  /**
-   * The default formatter used by the library. You can
-   * provide your own with the toString calls on
-   * JSONObject and JSONArray instances.
-   */
-  val defaultFormatter : ValueFormatter = (x : Any) => x match {
-    case s : String => "\"" + quoteString(s) + "\""
-    case jo : JSONObject => jo.toString(defaultFormatter)
-    case ja : JSONArray => ja.toString(defaultFormatter)
-    case other => other.toString
-  }
-
-  /**
-   * This function can be used to properly quote Strings
-   * for JSON output.
-   */
-  def quoteString (s : String) : String =
-    s.map {
-      case '"'  => "\\\""
-      case '\\' => "\\\\"
-      case '/'  => "\\/"
-      case '\b' => "\\b"
-      case '\f' => "\\f"
-      case '\n' => "\\n"
-      case '\r' => "\\r"
-      case '\t' => "\\t"
-      /* We'll unicode escape any control characters. These include:
-       * 0x0 -> 0x1f  : ASCII Control (C0 Control Codes)
-       * 0x7f         : ASCII DELETE
-       * 0x80 -> 0x9f : C1 Control Codes
-       *
-       * Per RFC4627, section 2.5, we're not technically required to
-       * encode the C1 codes, but we do to be safe.
-       */
-      case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c: Int)
-      case c => c
-    }.mkString
-}
-
-/**
- *  Represents a JSON Object (map).
- *
- *  @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-case class JSONObject (obj : Map[String,Any]) extends JSONType {
-  def toString (formatter : JSONFormat.ValueFormatter) =
-    "{" + obj.map({ case (k,v) => formatter(k.toString) + " : " + formatter(v) }).mkString(", ") + "}"
-}
-
-/**
- *  Represents a JSON Array (list).
- *  @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-case class JSONArray (list : List[Any]) extends JSONType {
-  def toString (formatter : JSONFormat.ValueFormatter) =
-    "[" + list.map(formatter).mkString(", ") + "]"
-}
-
-/**
- *  The main JSON Parser.
- *
- *  @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
- */
-class Parser extends StdTokenParsers with ImplicitConversions {
-  // Fill in abstract defs
-  type Tokens = Lexer
-  val lexical = new Tokens
-
-  // Configure lexical parsing
-  lexical.reserved ++= List("true", "false", "null")
-  lexical.delimiters ++= List("{", "}", "[", "]", ":", ",")
-
-  /** Type signature for functions that can parse numeric literals */
-  type NumericParser = String => Any
-
-  // Global default number parsing function
-  protected var defaultNumberParser : NumericParser = {_.toDouble}
-
-  // Per-thread default number parsing function
-  protected val numberParser = new ThreadLocal[NumericParser]() {
-    override def initialValue() = defaultNumberParser
-  }
-
-  // Define the grammar
-  def root       = jsonObj | jsonArray
-  def jsonObj    = "{" ~> repsep(objEntry, ",") <~ "}" ^^ { case vals : List[_] => JSONObject(Map(vals : _*)) }
-  def jsonArray  = "[" ~> repsep(value, ",") <~ "]" ^^ { case vals : List[_] => JSONArray(vals) }
-  def objEntry   = stringVal ~ (":" ~> value) ^^ { case x ~ y => (x, y) }
-  def value: Parser[Any] = (jsonObj | jsonArray | number | "true" ^^^ true | "false" ^^^ false | "null" ^^^ null | stringVal)
-  def stringVal  = accept("string", { case lexical.StringLit(n) => n} )
-  def number     = accept("number", { case lexical.NumericLit(n) => numberParser.get.apply(n)} )
-}
-
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/util/regexp/Base.scala
deleted file mode 100644
index 7dbe60a..0000000
--- a/src/library/scala/util/regexp/Base.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.regexp
-
-/** Basic regular expressions.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-
- at deprecated("This class will be removed", "2.10.0")
-abstract class Base {
-  type _regexpT <: RegExp
-
-  abstract class RegExp {
-    val isNullable: Boolean
-  }
-
-  object Alt {
-    /** `Alt( R,R,R* )`. */
-    def apply(rs: _regexpT*) =
-      if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt")
-      else new Alt(rs: _*)
-    // Can't enforce that statically without changing the interface
-    // def apply(r1: _regexpT, r2: _regexpT, rs: _regexpT*) = new Alt(Seq(r1, r2) ++ rs: _*)
-    def unapplySeq(x: Alt) = Some(x.rs)
-  }
-
-  class Alt private (val rs: _regexpT*) extends RegExp {
-    final val isNullable = rs exists (_.isNullable)
-  }
-
-  object Sequ {
-    /** Sequ( R,R* ) */
-    def apply(rs: _regexpT*) = if (rs.isEmpty) Eps else new Sequ(rs: _*)
-    def unapplySeq(x: Sequ) = Some(x.rs)
-  }
-
-  class Sequ private (val rs: _regexpT*) extends RegExp {
-    final val isNullable = rs forall (_.isNullable)
-  }
-
-  case class Star(r: _regexpT) extends RegExp {
-    final lazy val isNullable = true
-  }
-
-  // The empty Sequ.
-  case object Eps extends RegExp {
-    final lazy val isNullable = true
-    override def toString() = "Eps"
-  }
-
-  /** this class can be used to add meta information to regexps. */
-  class Meta(r1: _regexpT) extends RegExp {
-    final val isNullable = r1.isNullable
-    def r = r1
-  }
-}
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/util/regexp/PointedHedgeExp.scala
deleted file mode 100644
index 5c0379b..0000000
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.regexp
-
-/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-abstract class PointedHedgeExp extends Base {
-
-  type _regexpT <: RegExp
-  type _labelT
-
-  case class  Node(label: _labelT, r: _regexpT) extends RegExp {
-    final val isNullable = false
-  }
-
-  case class  TopIter(r1: _regexpT, r2: _regexpT) extends RegExp {
-    final val isNullable = r1.isNullable && r2.isNullable //?
-  }
-
-  case object Point extends RegExp {
-    final val isNullable = false
-  }
-
-}
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/util/regexp/SyntaxError.scala
deleted file mode 100644
index 1788fdf..0000000
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.regexp
-
-/** This runtime exception is thrown if an attempt to instantiate a
- *  syntactically incorrect expression is detected.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/util/regexp/WordExp.scala
deleted file mode 100644
index 3c0c2ec..0000000
--- a/src/library/scala/util/regexp/WordExp.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.util.regexp
-
-/**
- *  The class `WordExp` provides regular word expressions.
- *
- *  Users have to instantiate type member `_regexpT <;: RegExp`
- *  (from class `Base`) and a type member `_labelT <;: Label`.
- *
- *  Here is a short example:
- *  {{{
- *  import scala.util.regexp._
- *  import scala.util.automata._
- *  object MyLang extends WordExp {
- *    type _regexpT = RegExp
- *    type _labelT = MyChar
- *
- *    case class MyChar(c:Char) extends Label
- *  }
- *  import MyLang._
- *  // (a* | b)*
- *  val rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b'))))
- *  object MyBerriSethi extends WordBerrySethi {
- *    override val lang = MyLang
- *  }
- *  val nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1)
- *  }}}
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
- at deprecated("This class will be removed", "2.10.0")
-abstract class WordExp extends Base {
-
-  abstract class Label
-
-  type _regexpT <: RegExp
-  type _labelT <: Label
-
-  case class Letter(a: _labelT) extends RegExp {
-    final lazy val isNullable = false
-    var pos = -1
-  }
-
-  case class Wildcard() extends RegExp {
-    final lazy val isNullable = false
-    var pos = -1
-  }
-}
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index bea216e..c612732 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -6,8 +6,6 @@
 **                          |/                                          **
 \*                                                                      */
 
-
-
 package scala
 
 import scala.annotation.meta._
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
deleted file mode 100644
index cba0b96..0000000
--- a/src/library/scala/xml/Atom.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** The class `Atom` provides an XML node for text (`PCDATA`).
- *  It is used in both non-bound and bound XML representations.
- *
- *  @author Burak Emir
- *  @param data the text contained in this node, may not be `'''null'''`.
- */
-class Atom[+A](val data: A) extends SpecialNode with Serializable {
-  if (data == null)
-    throw new IllegalArgumentException("cannot construct "+getClass.getSimpleName+" with null")
-
-  override protected def basisForHashCode: Seq[Any] = Seq(data)
-
-  override def strict_==(other: Equality) = other match {
-    case x: Atom[_] => data == x.data
-    case _          => false
-  }
-
-  override def canEqual(other: Any) = other match {
-    case _: Atom[_] => true
-    case _          => false
-  }
-
-  final override def doCollectNamespaces = false
-  final override def doTransform         = false
-
-  def label = "#PCDATA"
-
-  /** Returns text, with some characters escaped according to the XML
-   *  specification.
-   */
-  def buildString(sb: StringBuilder): StringBuilder =
-    Utility.escape(data.toString, sb)
-
-  override def text: String = data.toString
-
-}
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
deleted file mode 100644
index 0224913..0000000
--- a/src/library/scala/xml/Attribute.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** This singleton object contains the `apply` and `unapply` methods for
- *  convenient construction and deconstruction.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object Attribute {
-  def unapply(x: Attribute) = x match {
-    case PrefixedAttribute(_, key, value, next) => Some((key, value, next))
-    case UnprefixedAttribute(key, value, next)  => Some((key, value, next))
-    case _                                      => None
-  }
-
-  /** Convenience functions which choose Un/Prefixedness appropriately */
-  def apply(key: String, value: Seq[Node], next: MetaData): Attribute =
-    new UnprefixedAttribute(key, value, next)
-
-  def apply(pre: String, key: String, value: String, next: MetaData): Attribute =
-    if (pre == null || pre == "") new UnprefixedAttribute(key, value, next)
-    else new PrefixedAttribute(pre, key, value, next)
-
-  def apply(pre: String, key: String, value: Seq[Node], next: MetaData): Attribute =
-    if (pre == null || pre == "") new UnprefixedAttribute(key, value, next)
-    else new PrefixedAttribute(pre, key, value, next)
-
-  def apply(pre: Option[String], key: String, value: Seq[Node], next: MetaData): Attribute =
-    pre match {
-      case None    => new UnprefixedAttribute(key, value, next)
-      case Some(p) => new PrefixedAttribute(p, key, value, next)
-    }
-}
-
-/** The `Attribute` trait defines the interface shared by both
- *  [[scala.xml.PrefixedAttribute]] and [[scala.xml.UnprefixedAttribute]].
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-abstract trait Attribute extends MetaData {
-  def pre: String        // will be null if unprefixed
-  val key: String
-  val value: Seq[Node]
-  val next: MetaData
-
-  def apply(key: String): Seq[Node]
-  def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node]
-  def copy(next: MetaData): Attribute
-
-  def remove(key: String) =
-    if (!isPrefixed && this.key == key) next
-    else copy(next remove key)
-
-  def remove(namespace: String, scope: NamespaceBinding, key: String) =
-    if (this.key == key && (scope getURI pre) == namespace) next
-    else copy(next.remove(namespace, scope, key))
-
-  def isPrefixed: Boolean = pre != null
-
-  def getNamespace(owner: Node): String
-
-  def wellformed(scope: NamespaceBinding): Boolean = {
-    val arg = if (isPrefixed) scope getURI pre else null
-    (next(arg, scope, key) == null) && (next wellformed scope)
-  }
-
-  /** Returns an iterator on attributes */
-  override def iterator: Iterator[MetaData] = {
-    if (value == null) next.iterator
-    else Iterator.single(this) ++ next.iterator
-  }
-
-  override def size: Int = {
-    if (value == null) next.size
-    else 1 + next.size
-  }
-
-  /** Appends string representation of only this attribute to stringbuffer.
-   */
-  protected def toString1(sb: StringBuilder) {
-    if (value == null)
-      return
-    if (isPrefixed)
-      sb append pre append ':'
-
-    sb append key append '='
-    val sb2 = new StringBuilder()
-    Utility.sequenceToXML(value, TopScope, sb2, true)
-    Utility.appendQuoted(sb2.toString, sb)
-  }
-}
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
deleted file mode 100644
index ff4280d..0000000
--- a/src/library/scala/xml/Comment.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** The class `Comment` implements an XML node for comments.
- *
- * @author Burak Emir
- * @param commentText the text contained in this node, may not contain "--"
- */
-case class Comment(commentText: String) extends SpecialNode {
-
-  def label = "#REM"
-  override def text = ""
-  final override def doCollectNamespaces = false
-  final override def doTransform         = false
-
-  if (commentText contains "--")
-    throw new IllegalArgumentException("text contains \"--\"")
-
-  /** Appends "<!-- text -->" to this string buffer.
-   */
-  override def buildString(sb: StringBuilder) =
-    sb append "<!--" append commentText append "-->"
-}
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
deleted file mode 100644
index a064c4d..0000000
--- a/src/library/scala/xml/Document.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** A document information item (according to InfoSet spec). The comments
- *  are copied from the Infoset spec, only augmented with some information
- *  on the Scala types for definitions that might have no value.
- *  Also plays the role of an `XMLEvent` for pull parsing.
- *
- *  @author  Burak Emir
- *  @version 1.0, 26/04/2005
- */
- at SerialVersionUID(-2289320563321795109L)
-class Document extends NodeSeq with pull.XMLEvent with Serializable {
-
-  /** An ordered list of child information items, in document
-   *  order. The list contains exactly one element information item. The
-   *  list also contains one processing instruction information item for
-   *  each processing instruction outside the document element, and one
-   *  comment information item for each comment outside the document
-   *  element. Processing instructions and comments within the DTD are
-   *  excluded. If there is a document type declaration, the list also
-   *  contains a document type declaration information item.
-   */
-  var children: Seq[Node] = _
-
-  /** The element information item corresponding to the document element. */
-  var docElem: Node = _
-
-  /** The dtd that comes with the document, if any */
-  var dtd: scala.xml.dtd.DTD = _
-
-  /** An unordered set of notation information items, one for each notation
-   *  declared in the DTD. If any notation is multiply declared, this property
-   *  has no value.
-   */
-  def notations: Seq[scala.xml.dtd.NotationDecl] =
-    dtd.notations
-
-  /** An unordered set of unparsed entity information items, one for each
-   *  unparsed entity declared in the DTD.
-   */
-  def unparsedEntities: Seq[scala.xml.dtd.EntityDecl] =
-    dtd.unparsedEntities
-
-  /** The base URI of the document entity. */
-  var baseURI: String = _
-
-  /** The name of the character encoding scheme in which the document entity
-   *  is expressed.
-   */
-  var encoding: Option[String] = _
-
-  /** An indication of the standalone status of the document, either
-   *  true or false. This property is derived from the optional standalone
-   *  document declaration in the XML declaration at the beginning of the
-   *  document entity, and has no value (`None`) if there is no
-   *  standalone document declaration.
-   */
-  var standAlone: Option[Boolean] = _
-
-  /** A string representing the XML version of the document. This
-   *  property is derived from the XML declaration optionally present at
-   *  the beginning of the document entity, and has no value (`None`)
-   *  if there is no XML declaration.
-   */
-  var version: Option[String] = _
-
-  /** 9. This property is not strictly speaking part of the infoset of
-   *  the document. Rather it is an indication of whether the processor
-   *  has read the complete DTD. Its value is a boolean. If it is false,
-   *  then certain properties (indicated in their descriptions below) may
-   *  be unknown. If it is true, those properties are never unknown.
-   */
-  var allDeclarationsProcessed = false
-
-  // methods for NodeSeq
-
-  def theSeq: Seq[Node] = this.docElem
-
-  override def canEqual(other: Any) = other match {
-    case _: Document  => true
-    case _            => false
-  }
-}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
deleted file mode 100755
index b9e665e..0000000
--- a/src/library/scala/xml/Elem.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** This singleton object contains the `apply` and `unapplySeq` methods for
- *  convenient construction and deconstruction. It is possible to deconstruct
- *  any `Node` instance (that is not a `SpecialNode` or a `Group`) using the
- *  syntax `case Elem(prefix, label, attribs, scope, child @ _*) => ...`
- *
- *  Copyright 2008 Google Inc. All Rights Reserved.
- *  @author Burak Emir <bqe at google.com>
- */
-object Elem {
-  /** Build an Elem, setting its minimizeEmpty property to <code>true</code> if it has no children.  Note that this
-   *  default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
-   *
-   * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
-   *             can specify your own preference for minimizeEmpty.
-   */
-  @deprecated("Use the other apply method in this object", "2.10.0")
-  def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
-    apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
-
-  def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
-    new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
-
-  def unapplySeq(n: Node) = n match {
-    case _: SpecialNode | _: Group  => None
-    case _                          => Some((n.prefix, n.label, n.attributes, n.scope, n.child))
-  }
-}
-
-/** The case class `Elem` extends the `Node` class,
- *  providing an immutable data object representing an XML element.
- *
- *  @param prefix        namespace prefix (may be null, but not the empty string)
- *  @param label         the element name
- *  @param attributes1   the attribute map
- *  @param scope         the scope containing the namespace bindings
- *  @param minimizeEmpty `true` if this element should be serialized as minimized (i.e. "<el/>") when
- *                       empty; `false` if it should be written out in long form.
- *  @param child         the children of this node
- *
- *  Copyright 2008 Google Inc. All Rights Reserved.
- *  @author Burak Emir <bqe at google.com>
- */
-class Elem(
-  override val prefix: String,
-  val label: String,
-  attributes1: MetaData,
-  override val scope: NamespaceBinding,
-  val minimizeEmpty: Boolean,
-  val child: Node*)
-extends Node with Serializable
-{
-  @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10.0")
-  def this(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) = {
-    this(prefix, label, attributes, scope, child.isEmpty, child: _*)
-  }
-
-  final override def doCollectNamespaces = true
-  final override def doTransform         = true
-
-  override val attributes = MetaData.normalize(attributes1, scope)
-
-  if (prefix == "")
-    throw new IllegalArgumentException("prefix of zero length, use null instead")
-
-  if (scope == null)
-    throw new IllegalArgumentException("scope is null, use scala.xml.TopScope for empty scope")
-
-  //@todo: copy the children,
-  //  setting namespace scope if necessary
-  //  cleaning adjacent text nodes if necessary
-
-  override protected def basisForHashCode: Seq[Any] =
-    prefix :: label :: attributes :: child.toList
-
-  /** Returns a new element with updated attributes, resolving namespace uris
-   *  from this element's scope. See MetaData.update for details.
-   *
-   *  @param  updates MetaData with new and updated attributes
-   *  @return a new symbol with updated attributes
-   */
-  final def %(updates: MetaData): Elem =
-    copy(attributes = MetaData.update(attributes, scope, updates))
-
-  /** Returns a copy of this element with any supplied arguments replacing
-   *  this element's value for that field.
-   *
-   *  @return a new symbol with updated attributes
-   */
-  def copy(
-    prefix: String = this.prefix,
-    label: String = this.label,
-    attributes: MetaData = this.attributes,
-    scope: NamespaceBinding = this.scope,
-    minimizeEmpty: Boolean = this.minimizeEmpty,
-    child: Seq[Node] = this.child.toSeq
-  ): Elem = Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
-
-  /** Returns concatenation of `text(n)` for each child `n`.
-   */
-  override def text = (child map (_.text)).mkString
-}
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
deleted file mode 100644
index a7b9835..0000000
--- a/src/library/scala/xml/EntityRef.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** The class `EntityRef` implements an XML node for entity references.
- *
- * @author  Burak Emir
- * @version 1.0
- * @param   entityName the name of the entity reference, for example `amp`.
- */
-case class EntityRef(entityName: String) extends SpecialNode {
-  final override def doCollectNamespaces = false
-  final override def doTransform         = false
-  def label = "#ENTITY"
-
-  override def text = entityName match {
-    case "lt"   => "<"
-    case "gt"   => ">"
-    case "amp"  => "&"
-    case "apos" => "'"
-    case "quot" => "\""
-    case _      => Utility.sbToString(buildString)
-  }
-
-  /** Appends `"& entityName;"` to this string buffer.
-   *
-   *  @param  sb the string buffer.
-   *  @return the modified string buffer `sb`.
-   */
-  override def buildString(sb: StringBuilder) =
-    sb.append("&").append(entityName).append(";")
-
-}
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
deleted file mode 100644
index 02db22a..0000000
--- a/src/library/scala/xml/Equality.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** In an attempt to contain the damage being inflicted on consistency by the
- *  ad hoc `equals` methods spread around `xml`, the logic is centralized and
- *  all the `xml` classes go through the `xml.Equality trait`.  There are two
- *  forms of `xml` comparison.
- *
- *  1. `'''def''' strict_==(other: scala.xml.Equality)`
- *
- *  This one tries to honor the little things like symmetry and hashCode
- *  contracts.  The `equals` method routes all comparisons through this.
- *
- *  1. `xml_==(other: Any)`
- *
- *  This one picks up where `strict_==` leaves off.  It might declare any two
- *  things equal.
- *
- *  As things stood, the logic not only made a mockery of the collections
- *  equals contract, but also laid waste to that of case classes.
- *
- *  Among the obstacles to sanity are/were:
- *
- *    Node extends NodeSeq extends Seq[Node]
- *    MetaData extends Iterable[MetaData]
- *    The hacky "Group" xml node which throws exceptions
- *      with wild abandon, so don't get too close
- *    Rampant asymmetry and impossible hashCodes
- *    Most classes claiming to be equal to "String" if
- *      some specific stringification of it was the same.
- *      String was never going to return the favor.
- */
-
-object Equality {
-  def asRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
-
-  /** Note - these functions assume strict equality has already failed.
-   */
-  def compareBlithely(x1: AnyRef, x2: String): Boolean = x1 match {
-    case x: Atom[_]   => x.data == x2
-    case x: NodeSeq   => x.text == x2
-    case _            => false
-  }
-  def compareBlithely(x1: AnyRef, x2: Node): Boolean = x1 match {
-    case x: NodeSeq if x.length == 1  => x2 == x(0)
-    case _                            => false
-  }
-  def compareBlithely(x1: AnyRef, x2: AnyRef): Boolean = {
-    if (x1 == null || x2 == null)
-      return (x1 eq x2)
-
-    x2 match {
-      case s: String  => compareBlithely(x1, s)
-      case n: Node    => compareBlithely(x1, n)
-      case _          => false
-    }
-  }
-}
-import Equality._
-
-trait Equality extends scala.Equals {
-  protected def basisForHashCode: Seq[Any]
-
-  def strict_==(other: Equality): Boolean
-  def strict_!=(other: Equality) = !strict_==(other)
-
-  /** We insist we're only equal to other `xml.Equality` implementors,
-   *  which heads off a lot of inconsistency up front.
-   */
-  override def canEqual(other: Any): Boolean = other match {
-    case x: Equality    => true
-    case _              => false
-  }
-
-  /** It's be nice to make these final, but there are probably
-   *  people out there subclassing the XML types, especially when
-   *  it comes to equals.  However WE at least can pretend they
-   *  are final since clearly individual classes cannot be trusted
-   *  to maintain a semblance of order.
-   */
-  override def hashCode()         = basisForHashCode.##
-  override def equals(other: Any) = doComparison(other, false)
-  final def xml_==(other: Any)    = doComparison(other, true)
-  final def xml_!=(other: Any)    = !xml_==(other)
-
-  /** The "blithe" parameter expresses the caller's unconcerned attitude
-   *  regarding the usual constraints on equals.  The method is thereby
-   *  given carte blanche to declare any two things equal.
-   */
-  private def doComparison(other: Any, blithe: Boolean) = {
-    val strictlyEqual = other match {
-      case x: AnyRef if this eq x => true
-      case x: Equality            => (x canEqual this) && (this strict_== x)
-      case _                      => false
-    }
-
-    strictlyEqual || (blithe && compareBlithely(this, asRef(other)))
-  }
-}
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
deleted file mode 100644
index 92da2f9..0000000
--- a/src/library/scala/xml/Group.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** A hack to group XML nodes in one node for output.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-final case class Group(val nodes: Seq[Node]) extends Node {
-  override def theSeq = nodes
-
-  override def canEqual(other: Any) = other match {
-    case x: Group => true
-    case _        => false
-  }
-
-  override def strict_==(other: Equality) = other match {
-    case Group(xs)  => nodes sameElements xs
-    case _          => false
-  }
-
-  override protected def basisForHashCode = nodes
-
-  /** Since Group is very much a hack it throws an exception if you
-   *  try to do anything with it.
-   */
-  private def fail(msg: String) = throw new UnsupportedOperationException("class Group does not support method '%s'" format msg)
-
-  def label                           = fail("label")
-  override def attributes             = fail("attributes")
-  override def namespace              = fail("namespace")
-  override def child                  = fail("child")
-  def buildString(sb: StringBuilder)  = fail("toString(StringBuilder)")
-}
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/library/scala/xml/MalformedAttributeException.scala
deleted file mode 100644
index 3431cb6..0000000
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-
-
-case class MalformedAttributeException(msg: String) extends RuntimeException(msg)
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
deleted file mode 100644
index 3bf3ebb..0000000
--- a/src/library/scala/xml/MetaData.scala
+++ /dev/null
@@ -1,216 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import Utility.sbToString
-import scala.annotation.tailrec
-import scala.collection.{ AbstractIterable, Iterator }
-
-/**
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe at google.com>
- */
-object MetaData {
-  /**
-   * appends all attributes from new_tail to attribs, without attempting to
-   * detect or remove duplicates. The method guarantees that all attributes
-   * from attribs come before the attributes in new_tail, but does not
-   * guarantee to preserve the relative order of attribs.
-   *
-   * Duplicates can be removed with `normalize`.
-   */
-  @tailrec  // temporarily marked final so it will compile under -Xexperimental
-  final def concatenate(attribs: MetaData, new_tail: MetaData): MetaData =
-    if (attribs eq Null) new_tail
-    else concatenate(attribs.next, attribs copy new_tail)
-
-  /**
-   * returns normalized MetaData, with all duplicates removed and namespace prefixes resolved to
-   *  namespace URIs via the given scope.
-   */
-  def normalize(attribs: MetaData, scope: NamespaceBinding): MetaData = {
-    def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
-      lazy val key = getUniversalKey(md, scope)
-      if (md eq Null) normalized_attribs
-      else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set)
-      else md copy iterate(md.next, normalized_attribs, set + key)
-    }
-    iterate(attribs, Null, Set())
-  }
-
-  /**
-   * returns key if md is unprefixed, pre+key is md is prefixed
-   */
-  def getUniversalKey(attrib: MetaData, scope: NamespaceBinding) = attrib match {
-    case prefixed: PrefixedAttribute     => scope.getURI(prefixed.pre) + prefixed.key
-    case unprefixed: UnprefixedAttribute => unprefixed.key
-  }
-
-  /**
-   *  returns MetaData with attributes updated from given MetaData
-   */
-  def update(attribs: MetaData, scope: NamespaceBinding, updates: MetaData): MetaData =
-    normalize(concatenate(updates, attribs), scope)
-
-}
-
-/** This class represents an attribute and at the same time a linked list of
- *  attributes. Every instance of this class is either
- *  - an instance of `UnprefixedAttribute key,value` or
- *  - an instance of `PrefixedAttribute namespace_prefix,key,value` or
- *  - `Null, the empty attribute list.
- *
- *  Namespace URIs are obtained by using the namespace scope of the element
- *  owning this attribute (see `getNamespace`).
- *
- *  Copyright 2008 Google Inc. All Rights Reserved.
- *  @author Burak Emir <bqe at google.com>
- */
-abstract class MetaData
-extends AbstractIterable[MetaData]
-   with Iterable[MetaData]
-   with Equality
-   with Serializable {
-
-  /** Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
-   *  are part of the resulting MetaData. If an attribute occurs in both this instance and
-   *  updates, only the one in updates is part of the result (avoiding duplicates). For prefixed
-   *  attributes, namespaces are resolved using the given scope, which defaults to TopScope.
-   *
-   *  @param updates MetaData with new and updated attributes
-   *  @return a new MetaData instance that contains old, new and updated attributes
-   */
-  def append(updates: MetaData, scope: NamespaceBinding = TopScope): MetaData =
-    MetaData.update(this, scope, updates)
-
-  /**
-   * Gets value of unqualified (unprefixed) attribute with given key, null if not found
-   *
-   * @param  key
-   * @return value as Seq[Node] if key is found, null otherwise
-   */
-  def apply(key: String): Seq[Node]
-
-  /** convenience method, same as `apply(namespace, owner.scope, key)`.
-   *
-   *  @param namespace_uri namespace uri of key
-   *  @param owner the element owning this attribute list
-   *  @param key   the attribute key
-   */
-  final def apply(namespace_uri: String, owner: Node, key: String): Seq[Node] =
-    apply(namespace_uri, owner.scope, key)
-
-  /**
-   * Gets value of prefixed attribute with given key and namespace, null if not found
-   *
-   * @param  namespace_uri namespace uri of key
-   * @param  scp a namespace scp (usually of the element owning this attribute list)
-   * @param  k   to be looked for
-   * @return value as Seq[Node] if key is found, null otherwise
-   */
-  def apply(namespace_uri: String, scp: NamespaceBinding, k: String): Seq[Node]
-
-  /** returns a copy of this MetaData item with next field set to argument.
-   */
-  def copy(next: MetaData): MetaData
-
-  /** if owner is the element of this metadata item, returns namespace */
-  def getNamespace(owner: Node): String
-
-  def hasNext = (Null != next)
-
-  def length: Int = length(0)
-
-  def length(i: Int): Int = next.length(i + 1)
-
-  def isPrefixed: Boolean
-
-  override def canEqual(other: Any) = other match {
-    case _: MetaData  => true
-    case _            => false
-  }
-  override def strict_==(other: Equality) = other match {
-    case m: MetaData  => this.asAttrMap == m.asAttrMap
-    case _            => false
-  }
-  protected def basisForHashCode: Seq[Any] = List(this.asAttrMap)
-
-  /** filters this sequence of meta data */
-  override def filter(f: MetaData => Boolean): MetaData =
-    if (f(this)) copy(next filter f)
-    else next filter f
-
-  /** returns key of this MetaData item */
-  def key: String
-
-  /** returns value of this MetaData item */
-  def value: Seq[Node]
-
-  /** Returns a String containing "prefix:key" if the first key is
-   *  prefixed, and "key" otherwise.
-   */
-  def prefixedKey = this match {
-    case x: Attribute if x.isPrefixed => x.pre + ":" + key
-    case _                            => key
-  }
-
-  /** Returns a Map containing the attributes stored as key/value pairs.
-   */
-  def asAttrMap: Map[String, String] =
-    (iterator map (x => (x.prefixedKey, x.value.text))).toMap
-
-  /** returns Null or the next MetaData item */
-  def next: MetaData
-
-  /**
-   * Gets value of unqualified (unprefixed) attribute with given key, None if not found
-   *
-   * @param  key
-   * @return value in Some(Seq[Node]) if key is found, None otherwise
-   */
-  final def get(key: String): Option[Seq[Node]] = Option(apply(key))
-
-  /** same as get(uri, owner.scope, key) */
-  final def get(uri: String, owner: Node, key: String): Option[Seq[Node]] =
-    get(uri, owner.scope, key)
-
-  /** gets value of qualified (prefixed) attribute with given key.
-   *
-   * @param  uri namespace of key
-   * @param  scope a namespace scp (usually of the element owning this attribute list)
-   * @param  key to be looked fore
-   * @return value as Some[Seq[Node]] if key is found, None otherwise
-   */
-  final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
-    Option(apply(uri, scope, key))
-
-  protected def toString1(): String = sbToString(toString1)
-
-  // appends string representations of single attribute to StringBuilder
-  protected def toString1(sb: StringBuilder): Unit
-
-  override def toString(): String = sbToString(buildString)
-
-  def buildString(sb: StringBuilder): StringBuilder = {
-    sb append ' '
-    toString1(sb)
-    next buildString sb
-  }
-
-  /**
-   */
-  def wellformed(scope: NamespaceBinding): Boolean
-
-  def remove(key: String): MetaData
-
-  def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
-
-  final def remove(namespace: String, owner: Node, key: String): MetaData =
-    remove(namespace, owner.scope, key)
-}
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
deleted file mode 100644
index c7cd9e6..0000000
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import Utility.sbToString
-
-/** The class `NamespaceBinding` represents namespace bindings
- *  and scopes. The binding for the default namespace is treated as a null
- *  prefix. the absent namespace is represented with the null uri. Neither
- *  prefix nor uri may be empty, which is not checked.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
- at SerialVersionUID(0 - 2518644165573446725L)
-case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef with Equality
-{
-  if (prefix == "")
-    throw new IllegalArgumentException("zero length prefix not allowed")
-
-  def getURI(_prefix: String): String =
-    if (prefix == _prefix) uri else parent getURI _prefix
-
-  /** Returns some prefix that is mapped to the URI.
-   *
-   * @param _uri the input URI
-   * @return the prefix that is mapped to the input URI, or null
-   * if no prefix is mapped to the URI.
-   */
-  def getPrefix(_uri: String): String =
-    if (_uri == uri) prefix else parent getPrefix _uri
-
-  override def toString(): String = sbToString(buildString(_, TopScope))
-
-  override def canEqual(other: Any) = other match {
-    case _: NamespaceBinding  => true
-    case _                    => false
-  }
-
-  override def strict_==(other: Equality) = other match {
-    case x: NamespaceBinding  => (prefix == x.prefix) && (uri == x.uri) && (parent == x.parent)
-    case _                    => false
-  }
-
-  def basisForHashCode: Seq[Any] = List(prefix, uri, parent)
-
-  def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
-
-  def buildString(sb: StringBuilder, stop: NamespaceBinding) {
-    if (this eq stop) return    // contains?
-
-    val s = " xmlns%s=\"%s\"".format(
-      (if (prefix != null) ":" + prefix else ""),
-      (if (uri != null) uri else "")
-    )
-    parent.buildString(sb append s, stop) // copy(ignore)
-  }
-}
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
deleted file mode 100755
index 6b6c962..0000000
--- a/src/library/scala/xml/Node.scala
+++ /dev/null
@@ -1,197 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** This singleton object contains the `unapplySeq` method for
- *  convenient deconstruction.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object Node {
-  /** the constant empty attribute sequence */
-  final def NoAttributes: MetaData = Null
-
-  /** the empty namespace */
-  val EmptyNamespace = ""
-
-  def unapplySeq(n: Node) = Some((n.label, n.attributes, n.child))
-}
-
-/**
- * An abstract class representing XML with nodes of a labelled tree.
- * This class contains an implementation of a subset of XPath for navigation.
- *
- * @author  Burak Emir and others
- * @version 1.1
- */
-abstract class Node extends NodeSeq {
-
-  /** prefix of this node */
-  def prefix: String = null
-
-  /** label of this node. I.e. "foo" for <foo/>) */
-  def label: String
-
-  /** used internally. Atom/Molecule = -1 PI = -2 Comment = -3 EntityRef = -5
-   */
-  def isAtom = this.isInstanceOf[Atom[_]]
-
-  /** The logic formerly found in typeTag$, as best I could infer it. */
-  def doCollectNamespaces = true  // if (tag >= 0) DO collect namespaces
-  def doTransform         = true  // if (tag < 0) DO NOT transform
-
-  /**
-   *  method returning the namespace bindings of this node. by default, this
-   *  is TopScope, which means there are no namespace bindings except the
-   *  predefined one for "xml".
-   */
-  def scope: NamespaceBinding = TopScope
-
-  /**
-   *  convenience, same as <code>getNamespace(this.prefix)</code>
-   */
-  def namespace = getNamespace(this.prefix)
-
-  /**
-   * Convenience method, same as `scope.getURI(pre)` but additionally
-   * checks if scope is `'''null'''`.
-   *
-   * @param pre the prefix whose namespace name we would like to obtain
-   * @return    the namespace if <code>scope != null</code> and prefix was
-   *            found, else <code>null</code>
-   */
-  def getNamespace(pre: String): String = if (scope eq null) null else scope.getURI(pre)
-
-  /**
-   * Convenience method, looks up an unprefixed attribute in attributes of this node.
-   * Same as `attributes.getValue(key)`
-   *
-   * @param  key of queried attribute.
-   * @return value of <code>UnprefixedAttribute</code> with given key
-   *         in attributes, if it exists, otherwise <code>null</code>.
-   */
-  final def attribute(key: String): Option[Seq[Node]] = attributes.get(key)
-
-  /**
-   * Convenience method, looks up a prefixed attribute in attributes of this node.
-   * Same as `attributes.getValue(uri, this, key)`-
-   *
-   * @param  uri namespace of queried attribute (may not be null).
-   * @param  key of queried attribute.
-   * @return value of `PrefixedAttribute` with given namespace
-   *         and given key, otherwise `'''null'''`.
-   */
-  final def attribute(uri: String, key: String): Option[Seq[Node]] =
-    attributes.get(uri, this, key)
-
-  /**
-   * Returns attribute meaning all attributes of this node, prefixed and
-   * unprefixed, in no particular order. In class `Node`, this
-   * defaults to `Null` (the empty attribute list).
-   *
-   * @return all attributes of this node
-   */
-  def attributes: MetaData = Null
-
-  /**
-   * Returns child axis i.e. all children of this node.
-   *
-   * @return all children of this node
-   */
-  def child: Seq[Node]
-
-  /** Children which do not stringify to "" (needed for equality)
-   */
-  def nonEmptyChildren: Seq[Node] = child filterNot (_.toString == "")
-
-  /**
-   * Descendant axis (all descendants of this node, not including node itself)
-   * includes all text nodes, element nodes, comments and processing instructions.
-   */
-  def descendant: List[Node] =
-    child.toList.flatMap { x => x::x.descendant }
-
-  /**
-   * Descendant axis (all descendants of this node, including thisa node)
-   * includes all text nodes, element nodes, comments and processing instructions.
-   */
-  def descendant_or_self: List[Node] = this :: descendant
-
-  override def canEqual(other: Any) = other match {
-    case x: Group   => false
-    case x: Node    => true
-    case _          => false
-  }
-
-  override protected def basisForHashCode: Seq[Any] =
-    prefix :: label :: attributes :: nonEmptyChildren.toList
-
-  override def strict_==(other: Equality) = other match {
-    case _: Group => false
-    case x: Node  =>
-      (prefix == x.prefix) &&
-      (label == x.label) &&
-      (attributes == x.attributes) &&
-      // (scope == x.scope)               // note - original code didn't compare scopes so I left it as is.
-      (nonEmptyChildren sameElements x.nonEmptyChildren)
-    case _        =>
-      false
-  }
-
-  // implementations of NodeSeq methods
-
-  /**
-   *  returns a sequence consisting of only this node
-   */
-  def theSeq: Seq[Node] = this :: Nil
-
-  /**
-   * String representation of this node
-   *
-   * @param stripComments if true, strips comment nodes from result
-   */
-  def buildString(stripComments: Boolean): String =
-    Utility.serialize(this, stripComments = stripComments).toString
-
-  /**
-   * Same as `toString('''false''')`.
-   */
-  override def toString(): String = buildString(false)
-
-  /**
-   * Appends qualified name of this node to `StringBuilder`.
-   */
-  def nameToString(sb: StringBuilder): StringBuilder = {
-    if (null != prefix) {
-      sb append prefix
-      sb append ':'
-    }
-    sb append label
-  }
-
-  /**
-   * Returns a type symbol (e.g. DTD, XSD), default `'''null'''`.
-   */
-  def xmlType(): TypeSymbol = null
-
-  /**
-   * Returns a text representation of this node. Note that this is not equivalent to
-   * the XPath node-test called text(), it is rather an implementation of the
-   * XPath function string()
-   *  Martin to Burak: to do: if you make this method abstract, the compiler will now
-   *  complain if there's no implementation in a subclass. Is this what we want? Note that
-   *  this would break doc/DocGenator and doc/ModelToXML, with an error message like:
-   * {{{
-   * doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
-   * new SpecialNode {
-   * ^
-   * }}} */
-  override def text: String = super.text
-}
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
deleted file mode 100644
index 2db4338..0000000
--- a/src/library/scala/xml/NodeBuffer.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/**
- * This class acts as a Buffer for nodes. If it is used as a sequence of
- * nodes `Seq[Node]`, it must be ensured that no updates occur after that
- * point, because `scala.xml.Node` is assumed to be immutable.
- *
- * Despite this being a sequence, don't use it as key in a hashtable.
- * Calling the hashcode function will result in a runtime error.
- *
- * @author  Burak Emir
- * @version 1.0
- */
-class NodeBuffer extends scala.collection.mutable.ArrayBuffer[Node] {
-
-  /**
-   * Append given object to this buffer, returns reference on this
-   * `NodeBuffer` for convenience. Some rules apply:
-   * - If argument `o` is `'''null'''`, it is ignored.
-   * - If it is an `Iterator` or `Iterable`, its elements will be added.
-   * - If `o` is a node, it is added as it is.
-   * - If it is anything else, it gets wrapped in an [[scala.xml.Atom]].
-   *
-   * @param o converts to an xml node and adds to this node buffer
-   * @return  this nodebuffer
-   */
-  def &+(o: Any): NodeBuffer = {
-    o match {
-      case null | _: Unit | Text("")  => // ignore
-      case it: Iterator[_]            => it foreach &+
-      case n: Node                    => super.+=(n)
-      case ns: Iterable[_]            => this &+ ns.iterator
-      case ns: Array[_]               => this &+ ns.iterator
-      case d                          => super.+=(new Atom(d))
-    }
-    this
-  }
-}
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
deleted file mode 100644
index decf60d..0000000
--- a/src/library/scala/xml/NodeSeq.scala
+++ /dev/null
@@ -1,151 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
-import mutable.{ Builder, ListBuffer }
-import generic.{ CanBuildFrom }
-import scala.language.implicitConversions
-
-/** This object ...
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object NodeSeq {
-  final val Empty = fromSeq(Nil)
-  def fromSeq(s: Seq[Node]): NodeSeq = new NodeSeq {
-    def theSeq = s
-  }
-  type Coll = NodeSeq
-  implicit def canBuildFrom: CanBuildFrom[Coll, Node, NodeSeq] =
-    new CanBuildFrom[Coll, Node, NodeSeq] {
-      def apply(from: Coll) = newBuilder
-      def apply() = newBuilder
-    }
-  def newBuilder: Builder[Node, NodeSeq] = new ListBuffer[Node] mapResult fromSeq
-  implicit def seqToNodeSeq(s: Seq[Node]): NodeSeq = fromSeq(s)
-}
-
-/** This class implements a wrapper around `Seq[Node]` that adds XPath
- *  and comprehension methods.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
-  import NodeSeq.seqToNodeSeq // import view magic for NodeSeq wrappers
-
-  /** Creates a list buffer as builder for this class */
-  override protected[this] def newBuilder = NodeSeq.newBuilder
-
-  def theSeq: Seq[Node]
-  def length = theSeq.length
-  override def iterator = theSeq.iterator
-
-  def apply(i: Int): Node = theSeq(i)
-  def apply(f: Node => Boolean): NodeSeq = filter(f)
-
-  def xml_sameElements[A](that: Iterable[A]): Boolean = {
-    val these = this.iterator
-    val those = that.iterator
-    while (these.hasNext && those.hasNext)
-      if (these.next xml_!= those.next)
-        return false
-
-    !these.hasNext && !those.hasNext
-  }
-
-  protected def basisForHashCode: Seq[Any] = theSeq
-
-  override def canEqual(other: Any) = other match {
-    case _: NodeSeq   => true
-    case _            => false
-  }
-
-  override def strict_==(other: Equality) = other match {
-    case x: NodeSeq => (length == x.length) && (theSeq sameElements x.theSeq)
-    case _          => false
-  }
-
-  /** Projection function, which returns  elements of `this` sequence based
-   *  on the string `that`. Use:
-   *   - `this \ "foo"` to get a list of all elements that are labelled with `"foo"`;
-   *   - `\ "_"` to get a list of all elements (wildcard);
-   *   - `ns \ "@foo"` to get the unprefixed attribute `"foo"`;
-   *   - `ns \ "@{uri}foo"` to get the prefixed attribute `"pre:foo"` whose
-   *     prefix `"pre"` is resolved to the namespace `"uri"`.
-   *
-   *  For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
-   *  values are wrapped in a [[scala.xml.Group]].
-   *
-   *  There is no support for searching a prefixed attribute by its literal prefix.
-   *
-   *  The document order is preserved.
-   */
-  def \(that: String): NodeSeq = {
-    def fail = throw new IllegalArgumentException(that)
-    def atResult = {
-      lazy val y = this(0)
-      val attr =
-        if (that.length == 1) fail
-        else if (that(1) == '{') {
-          val i = that indexOf '}'
-          if (i == -1) fail
-          val (uri, key) = (that.substring(2,i), that.substring(i+1, that.length()))
-          if (uri == "" || key == "") fail
-          else y.attribute(uri, key)
-        }
-        else y.attribute(that drop 1)
-
-      attr match {
-        case Some(x)  => Group(x)
-        case _        => NodeSeq.Empty
-      }
-    }
-
-    def makeSeq(cond: (Node) => Boolean) =
-      NodeSeq fromSeq (this flatMap (_.child) filter cond)
-
-    that match {
-      case ""                                         => fail
-      case "_"                                        => makeSeq(!_.isAtom)
-      case _ if (that(0) == '@' && this.length == 1)  => atResult
-      case _                                          => makeSeq(_.label == that)
-    }
-  }
-
-  /** Projection function, which returns elements of `this` sequence and of
-   *  all its subsequences, based on the string `that`. Use:
-   *   - `this \\ 'foo` to get a list of all elements that are labelled with `"foo"`;
-   *   - `\\ "_"` to get a list of all elements (wildcard);
-   *   - `ns \\ "@foo"` to get the unprefixed attribute `"foo"`;
-   *   - `ns \\ "@{uri}foo"` to get each prefixed attribute `"pre:foo"` whose
-   *     prefix `"pre"` is resolved to the namespace `"uri"`.
-   *
-   *  For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
-   *  values are wrapped in a [[scala.xml.Group]].
-   *
-   *  There is no support for searching a prefixed attribute by its literal prefix.
-   *
-   *  The document order is preserved.
-   */
-  def \\ (that: String): NodeSeq = {
-    def filt(cond: (Node) => Boolean) = this flatMap (_.descendant_or_self) filter cond
-    that match {
-      case "_"                  => filt(!_.isAtom)
-      case _ if that(0) == '@'  => filt(!_.isAtom) flatMap (_ \ that)
-      case _                    => filt(x => !x.isAtom && x.label == that)
-    }
-  }
-
-  override def toString(): String = theSeq.mkString
-
-  def text: String = (this map (_.text)).mkString
-}
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
deleted file mode 100644
index b39ef5d..0000000
--- a/src/library/scala/xml/Null.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import Utility.isNameStart
-import scala.collection.Iterator
-
-/** Essentially, every method in here is a dummy, returning Zero[T].
- *  It provides a backstop for the unusual collection defined by MetaData,
- *  sort of a linked list of tails.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-case object Null extends MetaData {
-  override def iterator = Iterator.empty
-  override def size = 0
-  override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
-  override def filter(f: MetaData => Boolean): MetaData = this
-
-  def copy(next: MetaData) = next
-  def getNamespace(owner: Node) = null
-
-  override def hasNext = false
-  def next = null
-  def key = null
-  def value = null
-  def isPrefixed = false
-
-  override def length = 0
-  override def length(i: Int) = i
-
-  override def strict_==(other: Equality) = other match {
-    case x: MetaData  => x.length == 0
-    case _            => false
-  }
-  override protected def basisForHashCode: Seq[Any] = Nil
-
-  def apply(namespace: String, scope: NamespaceBinding, key: String) = null
-  def apply(key: String) =
-    if (isNameStart(key.head)) null
-    else throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
-
-  protected def toString1(sb: StringBuilder) = ()
-  override protected def toString1(): String = ""
-
-  override def toString(): String = ""
-
-  override def buildString(sb: StringBuilder): StringBuilder = sb
-
-  override def wellformed(scope: NamespaceBinding) = true
-
-  def remove(key: String) = this
-  def remove(namespace: String, scope: NamespaceBinding, key: String) = this
-}
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
deleted file mode 100644
index 64818a9..0000000
--- a/src/library/scala/xml/PCData.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** This class (which is not used by all XML parsers, but always used by the
- *  XHTML one) represents parseable character data, which appeared as CDATA
- *  sections in the input and is to be preserved as CDATA section in the output.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-class PCData(data: String) extends Atom[String](data) {
-
-  /** Returns text, with some characters escaped according to the XML
-   *  specification.
-   *
-   *  @param  sb the input string buffer associated to some XML element
-   *  @return the input string buffer with the formatted CDATA section
-   */
-  override def buildString(sb: StringBuilder): StringBuilder =
-    sb append "<![CDATA[%s]]>".format(data)
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- *  convenient construction and deconstruction.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object PCData {
-  def apply(data: String) = new PCData(data)
-  def unapply(other: Any): Option[String] = other match {
-    case x: PCData => Some(x.data)
-    case _         => None
-  }
-}
-
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
deleted file mode 100644
index 429cd68..0000000
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-
-/** prefixed attributes always have a non-null namespace.
- *
- *  @param pre   
- *  @param key   
- *  @param value the attribute value
- *  @param next1
- */
-class PrefixedAttribute(
-  val pre: String,
-  val key: String,
-  val value: Seq[Node],
-  val next1: MetaData)
-extends Attribute
-{
-  val next = if (value ne null) next1 else next1.remove(key)
-
-  /** same as this(pre, key, Text(value), next), or no attribute if value is null */
-  def this(pre: String, key: String, value: String, next: MetaData) =
-    this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next)
-
-  /** same as this(pre, key, value.get, next), or no attribute if value is None */
-  def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) =
-    this(pre, key, value.orNull, next)
-
-  /** Returns a copy of this unprefixed attribute with the given
-   *  next field.
-   */
-  def copy(next: MetaData) =
-    new PrefixedAttribute(pre, key, value, next)
-
-  def getNamespace(owner: Node) =
-    owner.getNamespace(pre)
-
-  /** forwards the call to next (because caller looks for unprefixed attribute */
-  def apply(key: String): Seq[Node] = next(key)
-
-  /** gets attribute value of qualified (prefixed) attribute with given key
-   */
-  def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] = {
-    if (key == this.key && scope.getURI(pre) == namespace)
-      value
-    else
-      next(namespace, scope, key)
-  }
-}
-
-object PrefixedAttribute {
-  def unapply(x: PrefixedAttribute) = Some((x.pre, x.key, x.value, x.next))
-}
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
deleted file mode 100755
index 39ff8c3..0000000
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import Utility.sbToString
-
-/** Class for pretty printing. After instantiating, you can use the
- *  format() and formatNode() methods to convert XML to a formatted
- *  string. The class can be reused to pretty print any number of
- *  XML nodes.
- *
- *  @author  Burak Emir
- *  @version 1.0
- *
- *  @param width the width to fit the output into
- *  @param step  indentation
- */
-class PrettyPrinter(width: Int, step: Int) {
-
-  class BrokenException() extends java.lang.Exception
-
-  class Item
-  case object Break extends Item {
-    override def toString() = "\\"
-  }
-  case class Box(col: Int, s: String) extends Item
-  case class Para(s: String) extends Item
-
-  protected var items: List[Item] = Nil
-
-  protected var cur = 0
-
-  protected def reset() = {
-    cur = 0
-    items = Nil
-  }
-
-  /** Try to cut at whitespace.
-   */
-  protected def cut(s: String, ind: Int): List[Item] = {
-    val tmp = width - cur
-    if (s.length <= tmp)
-      return List(Box(ind, s))
-    val sb = new StringBuilder()
-    var i = s indexOf ' '
-    if (i > tmp || i == -1) throw new BrokenException() // cannot break
-
-    var last: List[Int] = Nil
-    while (i != -1 && i < tmp) {
-      last = i::last
-      i = s.indexOf(' ', i+1)
-    }
-    var res: List[Item] = Nil
-    while (Nil != last) try {
-      val b = Box(ind, s.substring(0, last.head))
-      cur = ind
-      res = b :: Break :: cut(s.substring(last.head, s.length), ind)
-       // backtrack
-      last = last.tail
-    } catch {
-      case _:BrokenException => last = last.tail
-    }
-    throw new BrokenException()
-  }
-
-  /** Try to make indented box, if possible, else para.
-   */
-  protected def makeBox(ind: Int, s: String) =
-    if (cur + s.length > width) {            // fits in this line
-      items ::= Box(ind, s)
-      cur += s.length
-    }
-    else try cut(s, ind) foreach (items ::= _)            // break it up
-    catch { case _: BrokenException => makePara(ind, s) } // give up, para
-
-  // dont respect indent in para, but afterwards
-  protected def makePara(ind: Int, s: String) = {
-    items = Break::Para(s)::Break::items
-    cur = ind
-  }
-
-  // respect indent
-  protected def makeBreak() = { // using wrapping here...
-    items = Break :: items
-    cur = 0
-  }
-
-  protected def leafTag(n: Node) = {
-    def mkLeaf(sb: StringBuilder) {
-      sb append '<'
-      n nameToString sb
-      n.attributes buildString sb
-      sb append "/>"
-    }
-    sbToString(mkLeaf)
-  }
-
-  protected def startTag(n: Node, pscope: NamespaceBinding): (String, Int) = {
-    var i = 0
-    def mkStart(sb: StringBuilder) {
-      sb append '<'
-      n nameToString sb
-      i = sb.length + 1
-      n.attributes buildString sb
-      n.scope.buildString(sb, pscope)
-      sb append '>'
-    }
-    (sbToString(mkStart), i)
-  }
-
-  protected def endTag(n: Node) = {
-    def mkEnd(sb: StringBuilder) {
-      sb append "</"
-      n nameToString sb
-      sb append '>'
-    }
-    sbToString(mkEnd)
-  }
-
-  protected def childrenAreLeaves(n: Node): Boolean = {
-    def isLeaf(l: Node) = l match {
-      case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr  => true
-      case _                                                  => false
-    }
-    n.child forall isLeaf
-  }
-
-  protected def fits(test: String) =
-    test.length < width - cur
-
-  private def doPreserve(node: Node) =
-    node.attribute(XML.namespace, XML.space).map(_.toString == XML.preserve) getOrElse false
-
-  protected def traverse(node: Node, pscope: NamespaceBinding, ind: Int): Unit =  node match {
-
-      case Text(s) if s.trim() == "" =>
-        ;
-      case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
-        makeBox( ind, node.toString.trim() )
-      case g @ Group(xs) =>
-        traverse(xs.iterator, pscope, ind)
-      case _ =>
-        val test = {
-          val sb = new StringBuilder()
-          Utility.serialize(node, pscope, sb, false)
-          if (doPreserve(node)) sb.toString
-          else TextBuffer.fromString(sb.toString).toText(0).data
-        }
-        if (childrenAreLeaves(node) && fits(test)) {
-          makeBox(ind, test)
-        } else {
-          val (stg, len2) = startTag(node, pscope)
-          val etg = endTag(node)
-          if (stg.length < width - cur) { // start tag fits
-            makeBox(ind, stg)
-            makeBreak()
-            traverse(node.child.iterator, node.scope, ind + step)
-            makeBox(ind, etg)
-          } else if (len2 < width - cur) {
-            // <start label + attrs + tag + content + end tag
-            makeBox(ind, stg.substring(0, len2))
-            makeBreak() // todo: break the rest in pieces
-            /*{ //@todo
-             val sq:Seq[String] = stg.split(" ");
-             val it = sq.iterator;
-             it.next;
-             for (c <- it) {
-               makeBox(ind+len2-2, c)
-               makeBreak()
-             }
-             }*/
-            makeBox(ind, stg.substring(len2, stg.length))
-            makeBreak()
-            traverse(node.child.iterator, node.scope, ind + step)
-            makeBox(cur, etg)
-            makeBreak()
-          } else { // give up
-            makeBox(ind, test)
-            makeBreak()
-          }
-        }
-  }
-
-  protected def traverse(it: Iterator[Node], scope: NamespaceBinding, ind: Int ): Unit =
-    for (c <- it) {
-      traverse(c, scope, ind)
-      makeBreak()
-    }
-
-  /** Appends a formatted string containing well-formed XML with
-   *  given namespace to prefix mapping to the given string buffer.
-   *
-   * @param n    the node to be serialized
-   * @param sb   the stringbuffer to append to
-   */
-  def format(n: Node, sb: StringBuilder) { // entry point
-    format(n, null, sb)
-  }
-
-  def format(n: Node, pscope: NamespaceBinding, sb: StringBuilder) { // entry point
-    var lastwasbreak = false
-    reset()
-    traverse(n, pscope, 0)
-    var cur = 0
-    for (b <- items.reverse) b match {
-      case Break =>
-        if (!lastwasbreak) sb.append('\n')  // on windows: \r\n ?
-        lastwasbreak = true
-        cur = 0
-//        while (cur < last) {
-//          sb append ' '
-//          cur += 1
-//        }
-
-      case Box(i, s) =>
-        lastwasbreak = false
-        while (cur < i) {
-          sb append ' '
-          cur += 1
-        }
-        sb.append(s)
-      case Para( s ) =>
-        lastwasbreak = false
-        sb append s
-    }
-  }
-
-  // public convenience methods
-
-  /** Returns a formatted string containing well-formed XML with
-   *  given namespace to prefix mapping.
-   *
-   *  @param n      the node to be serialized
-   *  @param pscope the namespace to prefix mapping
-   *  @return      the formatted string
-   */
-  def format(n: Node, pscope: NamespaceBinding = null): String =
-    sbToString(format(n, pscope, _))
-
-  /** Returns a formatted string containing well-formed XML.
-   *
-   *  @param nodes  the sequence of nodes to be serialized
-   *  @param pscope the namespace to prefix mapping
-   */
-  def formatNodes(nodes: Seq[Node], pscope: NamespaceBinding = null): String =
-    sbToString(formatNodes(nodes, pscope, _))
-
-  /** Appends a formatted string containing well-formed XML with
-   *  the given namespace to prefix mapping to the given stringbuffer.
-   *
-   *  @param nodes  the nodes to be serialized
-   *  @param pscope the namespace to prefix mapping
-   *  @param sb     the string buffer to which to append to
-   */
-  def formatNodes(nodes: Seq[Node], pscope: NamespaceBinding, sb: StringBuilder): Unit =
-    nodes foreach (n => sb append format(n, pscope))
-}
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
deleted file mode 100644
index 64a9dd5..0000000
--- a/src/library/scala/xml/ProcInstr.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-
-/** an XML node for processing instructions (PI)
- *
- * @author Burak Emir
- * @param  target     target name of this PI
- * @param  proctext   text contained in this node, may not contain "?>"
- */
-case class ProcInstr(target: String, proctext: String) extends SpecialNode
-{
-  if (!Utility.isName(target))
-    throw new IllegalArgumentException(target+" must be an XML Name")
-  if (proctext contains "?>")
-    throw new IllegalArgumentException(proctext+" may not contain \"?>\"")
-  if (target.toLowerCase == "xml")
-    throw new IllegalArgumentException(target+" is reserved")
-
-  final override def doCollectNamespaces = false
-  final override def doTransform         = false
-
-  final def label   = "#PI"
-  override def text = ""
-
-  /** appends "<?" target (" "+text)?+"?>"
-   *  to this stringbuffer.
-   */
-  override def buildString(sb: StringBuilder) =
-    sb append "<?%s%s?>".format(target, (if (proctext == "") "" else " " + proctext))
-}
diff --git a/src/library/scala/xml/QNode.scala b/src/library/scala/xml/QNode.scala
deleted file mode 100644
index d4d3872..0000000
--- a/src/library/scala/xml/QNode.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** This object provides an extractor method to match a qualified node with
- *  its namespace URI
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object QNode {
-  def unapplySeq(n: Node) = Some((n.scope.getURI(n.prefix), n.label, n.attributes, n.child))
-}
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
deleted file mode 100644
index 4c1b81c..0000000
--- a/src/library/scala/xml/SpecialNode.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** `SpecialNode` is a special XML node which represents either text
- *  `(PCDATA)`, a comment, a `PI`, or an entity ref.
- *
- *  `SpecialNode`s also play the role of [[scala.xml.pull.XMLEvent]]s for
- *  pull-parsing.
- *
- *  @author Burak Emir
- */
-abstract class SpecialNode extends Node with pull.XMLEvent {
-
-  /** always empty */
-  final override def attributes = Null
-
-  /** always Node.EmptyNamespace */
-  final override def namespace = null
-
-  /** always empty */
-  final def child = Nil
-
-  /** Append string representation to the given string buffer argument. */
-  def buildString(sb: StringBuilder): StringBuilder
-}
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
deleted file mode 100644
index 782c80f..0000000
--- a/src/library/scala/xml/Text.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** The class `Text` implements an XML node for text (PCDATA).
- *  It is used in both non-bound and bound XML representations.
- *
- *  @author Burak Emir
- *  @param data the text contained in this node, may not be null.
- */
-class Text(data: String) extends Atom[String](data) {
-
-  /** Returns text, with some characters escaped according to the XML
-   *  specification.
-   */
-  override def buildString(sb: StringBuilder): StringBuilder =
-    Utility.escape(data, sb)
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- *  convenient construction and deconstruction.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object Text {
-  def apply(data: String) = new Text(data)
-  def unapply(other: Any): Option[String] = other match {
-    case x: Text => Some(x.data)
-    case _       => None
-  }
-}
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
deleted file mode 100644
index 0b96379..0000000
--- a/src/library/scala/xml/TextBuffer.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-
-import Utility.isSpace
-
-object TextBuffer {
-  def fromString(str: String): TextBuffer = new TextBuffer() append str
-}
-
-/** The class `TextBuffer` is for creating text nodes without surplus
- *  whitespace. All occurrences of one or more whitespace in strings
- *  appended with the `append` method will be replaced by a single space
- *  character, and leading and trailing space will be removed completely.
- */
-class TextBuffer
-{
-  val sb = new StringBuilder()
-
-  /** Appends this string to the text buffer, trimming whitespaces as needed.
-   */
-  def append(cs: Seq[Char]): this.type = {
-    cs foreach { c =>
-      if (!isSpace(c)) sb append c
-      else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
-    }
-    this
-  }
-
-  /** Returns an empty sequence if text is only whitespace.
-   *
-   *  @return the text without whitespaces.
-   */
-  def toText: Seq[Text] = sb.toString.trim match {
-    case "" => Nil
-    case s  => Seq(Text(s))
-  }
-}
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
deleted file mode 100644
index 1ed1d50..0000000
--- a/src/library/scala/xml/TopScope.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-
-/** top level namespace scope. only contains the predefined binding
- *  for the "xml" prefix which is bound to
- *  "http://www.w3.org/XML/1998/namespace"
- */
-object TopScope extends NamespaceBinding(null, null, null) {
-
-  import XML.{ xml, namespace }
-
-  override def getURI(prefix1: String): String =
-    if (prefix1 == xml) namespace else null
-
-  override def getPrefix(uri1: String): String =
-    if (uri1 == namespace) xml else null
-
-  override def toString() = ""
-
-  override def buildString(stop: NamespaceBinding) = ""
-  override def buildString(sb: StringBuilder, ignore: NamespaceBinding) = {}
-}
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/library/scala/xml/TypeSymbol.scala
deleted file mode 100644
index f02c026..0000000
--- a/src/library/scala/xml/TypeSymbol.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-
-
-abstract class TypeSymbol
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
deleted file mode 100644
index ef80823..0000000
--- a/src/library/scala/xml/Unparsed.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-/** An XML node for unparsed content. It will be output verbatim, all bets
- *  are off regarding wellformedness etc.
- *
- *  @author Burak Emir
- *  @param data content in this node, may not be null.
- */
-class Unparsed(data: String) extends Atom[String](data) {
-
-  /** Returns text, with some characters escaped according to XML
-   *  specification.
-   */
-  override def buildString(sb: StringBuilder): StringBuilder =
-    sb append data
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- *  convenient construction and deconstruction.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-object Unparsed {
-  def apply(data: String) = new Unparsed(data)
-  def unapply(x: Unparsed) = Some(x.data)
-}
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
deleted file mode 100644
index 2985591..0000000
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-
-/** Unprefixed attributes have the null namespace, and no prefix field
- *
- *  @author Burak Emir
- */
-class UnprefixedAttribute(
-  val key: String,
-  val value: Seq[Node],
-  next1: MetaData)
-extends Attribute
-{
-  final val pre = null
-  val next = if (value ne null) next1 else next1.remove(key)
-
-  /** same as this(key, Text(value), next), or no attribute if value is null */
-  def this(key: String, value: String, next: MetaData) =
-    this(key, if (value ne null) Text(value) else null: NodeSeq, next)
-
-  /** same as this(key, value.get, next), or no attribute if value is None */
-  def this(key: String, value: Option[Seq[Node]], next: MetaData) =
-    this(key, value.orNull, next)
-
-  /** returns a copy of this unprefixed attribute with the given next field*/
-  def copy(next: MetaData) = new UnprefixedAttribute(key, value, next)
-
-  final def getNamespace(owner: Node): String = null
-
-  /**
-   * Gets value of unqualified (unprefixed) attribute with given key, null if not found
-   *
-   * @param  key
-   * @return value as Seq[Node] if key is found, null otherwise
-   */
-  def apply(key: String): Seq[Node] =
-    if (key == this.key) value else next(key)
-
-  /**
-   * Forwards the call to next (because caller looks for prefixed attribute).
-   *
-   * @param  namespace
-   * @param  scope
-   * @param  key
-   * @return ..
-   */
-  def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] =
-    next(namespace, scope, key)
-}
-object UnprefixedAttribute {
-  def unapply(x: UnprefixedAttribute) = Some((x.key, x.value, x.next))
-}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
deleted file mode 100755
index 030a897..0000000
--- a/src/library/scala/xml/Utility.scala
+++ /dev/null
@@ -1,409 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import scala.collection.mutable
-import parsing.XhtmlEntities
-import scala.language.implicitConversions
-
-/**
- * The `Utility` object provides utility functions for processing instances
- * of bound and not bound XML classes, as well as escaping text nodes.
- *
- * @author Burak Emir
- */
-object Utility extends AnyRef with parsing.TokenTests {
-  final val SU = '\u001A'
-
-  // [Martin] This looks dubious. We don't convert StringBuilders to
-  // Strings anywhere else, why do it here?
-  implicit def implicitSbToString(sb: StringBuilder) = sb.toString()
-
-  // helper for the extremely oft-repeated sequence of creating a
-  // StringBuilder, passing it around, and then grabbing its String.
-  private [xml] def sbToString(f: (StringBuilder) => Unit): String = {
-    val sb = new StringBuilder
-    f(sb)
-    sb.toString
-  }
-  private[xml] def isAtomAndNotText(x: Node) = x.isAtom && !x.isInstanceOf[Text]
-
-  /** Trims an element - call this method, when you know that it is an
-   *  element (and not a text node) so you know that it will not be trimmed
-   *  away. With this assumption, the function can return a `Node`, rather
-   *  than a `Seq[Node]`. If you don't know, call `trimProper` and account
-   *  for the fact that you may get back an empty sequence of nodes.
-   *
-   *  Precondition: node is not a text node (it might be trimmed)
-   */
-  def trim(x: Node): Node = x match {
-    case Elem(pre, lab, md, scp, child at _*) =>
-      Elem(pre, lab, md, scp, (child flatMap trimProper):_*)
-  }
-
-  /** trim a child of an element. `Attribute` values and `Atom` nodes that
-   *  are not `Text` nodes are unaffected.
-   */
-  def trimProper(x:Node): Seq[Node] = x match {
-    case Elem(pre,lab,md,scp,child at _*) =>
-      Elem(pre,lab,md,scp, (child flatMap trimProper):_*)
-    case Text(s) =>
-      new TextBuffer().append(s).toText
-    case _ =>
-      x
-  }
-
-  /** returns a sorted attribute list */
-  def sort(md: MetaData): MetaData = if((md eq Null) || (md.next eq Null)) md else {
-    val key = md.key
-    val smaller = sort(md.filter { m => m.key < key })
-    val greater = sort(md.filter { m => m.key > key })
-    smaller.foldRight (md copy greater) ((x, xs) => x copy xs)
-  }
-
-  /** Return the node with its attribute list sorted alphabetically
-   *  (prefixes are ignored) */
-  def sort(n:Node): Node = n match {
-	case Elem(pre,lab,md,scp,child at _*) =>
-      Elem(pre,lab,sort(md),scp, (child map sort):_*)
-    case _ => n
-  }
-
-  /**
-   * Escapes the characters < > & and " from string.
-   */
-  final def escape(text: String): String = sbToString(escape(text, _))
-
-  object Escapes {
-    /** For reasons unclear escape and unescape are a long ways from
-      * being logical inverses. */
-    val pairs = Map(
-      "lt"    -> '<',
-      "gt"    -> '>',
-      "amp"   -> '&',
-      "quot"  -> '"'
-      // enigmatic comment explaining why this isn't escaped --
-      // is valid xhtml but not html, and IE doesn't know it, says jweb
-      // "apos"  -> '\''
-    )
-    val escMap    = pairs map { case (s, c) => c-> ("&%s;" format s) }
-    val unescMap  = pairs ++ Map("apos"  -> '\'')
-  }
-  import Escapes.{ escMap, unescMap }
-
-  /**
-   * Appends escaped string to `s`.
-   */
-  final def escape(text: String, s: StringBuilder): StringBuilder = {
-    // Implemented per XML spec:
-    // http://www.w3.org/International/questions/qa-controls
-    // imperative code 3x-4x faster than current implementation
-    // dpp (David Pollak) 2010/02/03
-    val len = text.length
-    var pos = 0
-    while (pos < len) {
-      text.charAt(pos) match {
-        case '<' => s.append("<")
-        case '>' => s.append(">")
-        case '&' => s.append("&")
-        case '"' => s.append(""")
-        case '\n' => s.append('\n')
-        case '\r' => s.append('\r')
-        case '\t' => s.append('\t')
-        case c => if (c >= ' ') s.append(c)
-      }
-
-      pos += 1
-    }
-    s
-  }
-
-  /**
-   * Appends unescaped string to `s`, `amp` becomes `&`,
-   * `lt` becomes `<` etc..
-   *
-   * @return    `'''null'''` if `ref` was not a predefined entity.
-   */
-  final def unescape(ref: String, s: StringBuilder): StringBuilder =
-    ((unescMap get ref) map (s append _)).orNull
-
-  /**
-   * Returns a set of all namespaces used in a sequence of nodes
-   * and all their descendants, including the empty namespaces.
-   */
-  def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
-    nodes.foldLeft(new mutable.HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
-
-  /**
-   * Adds all namespaces in node to set.
-   */
-  def collectNamespaces(n: Node, set: mutable.Set[String]) {
-    if (n.doCollectNamespaces) {
-      set += n.namespace
-      for (a <- n.attributes) a match {
-        case _:PrefixedAttribute =>
-          set += a.getNamespace(n)
-        case _ =>
-      }
-      for (i <- n.child)
-        collectNamespaces(i, set)
-    }
-  }
-
-  // def toXML(
-  //   x: Node,
-  //   pscope: NamespaceBinding = TopScope,
-  //   sb: StringBuilder = new StringBuilder,
-  //   stripComments: Boolean = false,
-  //   decodeEntities: Boolean = true,
-  //   preserveWhitespace: Boolean = false,
-  //   minimizeTags: Boolean = false): String =
-  // {
-  //   toXMLsb(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-  //   sb.toString()
-  // }
-
-  /**
-   * Serialize the provided Node to the provided StringBuilder.
-   * <p/>
-   * Note that calling this source-compatible method will result in the same old, arguably almost universally unwanted,
-   * behaviour.
-   */
-  @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10.0")
-  def toXML(
-    x: Node,
-    pscope: NamespaceBinding = TopScope,
-    sb: StringBuilder = new StringBuilder,
-    stripComments: Boolean = false,
-    decodeEntities: Boolean = true,
-    preserveWhitespace: Boolean = false,
-    minimizeTags: Boolean = false): StringBuilder =
-  {
-    serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, if (minimizeTags) MinimizeMode.Always else MinimizeMode.Never)
-  }
-
-  /**
-   * Serialize an XML Node to a StringBuilder.
-   *
-   * This is essentially a minor rework of `toXML` that can't have the same name due to an unfortunate
-   * combination of named/default arguments and overloading.
-   *
-   * @todo use a Writer instead
-   */
-  def serialize(
-    x: Node,
-    pscope: NamespaceBinding = TopScope,
-    sb: StringBuilder = new StringBuilder,
-    stripComments: Boolean = false,
-    decodeEntities: Boolean = true,
-    preserveWhitespace: Boolean = false,
-    minimizeTags: MinimizeMode.Value = MinimizeMode.Default): StringBuilder =
-  {
-    x match {
-      case c: Comment if !stripComments => c buildString sb
-      case s: SpecialNode               => s buildString sb
-      case g: Group                     => for (c <- g.nodes) serialize(c, g.scope, sb, minimizeTags = minimizeTags) ; sb
-      case el: Elem  =>
-        // print tag with namespace declarations
-        sb.append('<')
-        el.nameToString(sb)
-        if (el.attributes ne null) el.attributes.buildString(sb)
-        el.scope.buildString(sb, pscope)
-        if (el.child.isEmpty &&
-                (minimizeTags == MinimizeMode.Always ||
-                (minimizeTags == MinimizeMode.Default && el.minimizeEmpty)))
-        {
-          // no children, so use short form: <xyz .../>
-          sb.append("/>")
-        } else {
-          // children, so use long form: <xyz ...>...</xyz>
-          sb.append('>')
-          sequenceToXML(el.child, el.scope, sb, stripComments)
-          sb.append("</")
-          el.nameToString(sb)
-          sb.append('>')
-        }
-      case _ => throw new IllegalArgumentException("Don't know how to serialize a " + x.getClass.getName)
-    }
-  }
-
-  def sequenceToXML(
-    children: Seq[Node],
-    pscope: NamespaceBinding = TopScope,
-    sb: StringBuilder = new StringBuilder,
-    stripComments: Boolean = false,
-    decodeEntities: Boolean = true,
-    preserveWhitespace: Boolean = false,
-    minimizeTags: MinimizeMode.Value = MinimizeMode.Default): Unit =
-  {
-    if (children.isEmpty) return
-    else if (children forall isAtomAndNotText) { // add space
-      val it = children.iterator
-      val f = it.next
-      serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-      while (it.hasNext) {
-        val x = it.next
-        sb.append(' ')
-        serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-      }
-    }
-    else children foreach { serialize(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
-  }
-
-  /**
-   * Returns prefix of qualified name if any.
-   */
-  final def prefix(name: String): Option[String] = (name indexOf ':') match {
-    case -1   => None
-    case i    => Some(name.substring(0, i))
-  }
-
-  /**
-   * Returns a hashcode for the given constituents of a node
-   */
-  def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) =
-    scala.util.hashing.MurmurHash3.orderedHash(label +: attribHashCode +: scpeHash +: children, pre.##)
-
-  def appendQuoted(s: String): String = sbToString(appendQuoted(s, _))
-
-  /**
-   * Appends "s" if string `s` does not contain ",
-   * 's' otherwise.
-   */
-  def appendQuoted(s: String, sb: StringBuilder) = {
-    val ch = if (s contains '"') '\'' else '"'
-    sb.append(ch).append(s).append(ch)
-  }
-
-  /**
-   * Appends "s" and escapes and " i s with \"
-   */
-  def appendEscapedQuoted(s: String, sb: StringBuilder): StringBuilder = {
-    sb.append('"')
-    for (c <- s) c match {
-      case '"' => sb.append('\\'); sb.append('"')
-      case _   => sb.append(c)
-    }
-    sb.append('"')
-  }
-
-  def getName(s: String, index: Int): String = {
-    if (index >= s.length) null
-    else {
-      val xs = s drop index
-      if (xs.nonEmpty && isNameStart(xs.head)) xs takeWhile isNameChar
-      else ""
-    }
-  }
-
-  /**
-   * Returns `'''null'''` if the value is a correct attribute value,
-   * error message if it isn't.
-   */
-  def checkAttributeValue(value: String): String = {
-    var i = 0
-    while (i < value.length) {
-      value.charAt(i) match {
-        case '<' =>
-          return "< not allowed in attribute value";
-        case '&' =>
-          val n = getName(value, i+1)
-          if (n eq null)
-            return "malformed entity reference in attribute value ["+value+"]";
-          i = i + n.length + 1
-          if (i >= value.length || value.charAt(i) != ';')
-            return "malformed entity reference in attribute value ["+value+"]";
-        case _   =>
-      }
-      i = i + 1
-    }
-    null
-  }
-
-  def parseAttributeValue(value: String): Seq[Node] = {
-    val sb  = new StringBuilder
-    var rfb: StringBuilder = null
-    val nb = new NodeBuffer()
-
-    val it = value.iterator
-    while (it.hasNext) {
-      var c = it.next
-      // entity! flush buffer into text node
-      if (c == '&') {
-        c = it.next
-        if (c == '#') {
-          c = it.next
-          val theChar = parseCharRef ({ ()=> c },{ () => c = it.next },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
-          sb.append(theChar)
-        }
-        else {
-          if (rfb eq null) rfb = new StringBuilder()
-          rfb append c
-          c = it.next
-          while (c != ';') {
-            rfb.append(c)
-            c = it.next
-          }
-          val ref = rfb.toString()
-          rfb.clear()
-          unescape(ref,sb) match {
-            case null =>
-              if (sb.length > 0) {  // flush buffer
-                nb += Text(sb.toString())
-                sb.clear()
-              }
-              nb += EntityRef(ref) // add entityref
-            case _ =>
-          }
-        }
-      }
-      else sb append c
-    }
-    if (sb.length > 0) { // flush buffer
-      val x = Text(sb.toString())
-      if (nb.length == 0)
-        return x
-      else
-        nb += x
-    }
-    nb
-  }
-
-  /**
-   * {{{
-   *   CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
-   *             | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
-   * }}}
-   * See [66]
-   */
-  def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
-    val hex  = (ch() == 'x') && { nextch(); true }
-    val base = if (hex) 16 else 10
-    var i = 0
-    while (ch() != ';') {
-      ch() match {
-        case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
-          i = i * base + ch().asDigit
-        case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
-           | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
-          if (! hex)
-            reportSyntaxError("hex char not allowed in decimal char ref\n" +
-                              "Did you mean to write &#x ?")
-          else
-            i = i * base + ch().asDigit
-        case SU =>
-          reportTruncatedError("")
-        case _ =>
-          reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
-      }
-      nextch()
-    }
-    new String(Array(i), 0, 1)
-  }
-}
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
deleted file mode 100755
index d101684..0000000
--- a/src/library/scala/xml/XML.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-
-import parsing.NoBindingFactoryAdapter
-import factory.XMLLoader
-import java.io.{ File, FileDescriptor, FileInputStream, FileOutputStream }
-import java.io.{ InputStream, Reader, StringReader, Writer }
-import java.nio.channels.Channels
-import scala.util.control.Exception.ultimately
-
-object Source {
-  def fromFile(file: File)              = new InputSource(new FileInputStream(file))
-  def fromFile(fd: FileDescriptor)      = new InputSource(new FileInputStream(fd))
-  def fromFile(name: String)            = new InputSource(new FileInputStream(name))
-
-  def fromInputStream(is: InputStream)  = new InputSource(is)
-  def fromReader(reader: Reader)        = new InputSource(reader)
-  def fromSysId(sysID: String)          = new InputSource(sysID)
-  def fromString(string: String)        = fromReader(new StringReader(string))
-}
-
-/**
- * Governs how empty elements (i.e. those without child elements) should be serialized.
- */
-object MinimizeMode extends Enumeration {
-  /** Minimize empty tags if they were originally empty when parsed, or if they were constructed
-   *  with [[scala.xml.Elem]]`#minimizeEmpty` == true
-   */
-  val Default = Value
-
-  /** Always minimize empty tags.  Note that this may be problematic for XHTML, in which
-   * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead.
-   */
-  val Always = Value
-
-  /** Never minimize empty tags.
-   */
-  val Never = Value
-}
-
-import Source._
-
-/** The object `XML` provides constants, and functions to load
- *  and save XML elements. Use this when data binding is not desired, i.e.
- *  when XML is handled using `Symbol` nodes.
- *
- *  @author  Burak Emir
- *  @version 1.0, 25/04/2005
- */
-object XML extends XMLLoader[Elem] {
-  val xml       = "xml"
-  val xmlns     = "xmlns"
-  val namespace = "http://www.w3.org/XML/1998/namespace"
-  val preserve  = "preserve"
-  val space     = "space"
-  val lang      = "lang"
-  val encoding  = "ISO-8859-1"
-
-  /** Returns an XMLLoader whose load* methods will use the supplied SAXParser. */
-  def withSAXParser(p: SAXParser): XMLLoader[Elem] =
-    new XMLLoader[Elem] { override val parser: SAXParser = p }
-
-  /** Saves a node to a file with given filename using given encoding
-   *  optionally with xmldecl and doctype declaration.
-   *
-   *  @param filename the filename
-   *  @param node     the xml node we want to write
-   *  @param enc      encoding to use
-   *  @param xmlDecl  if true, write xml declaration
-   *  @param doctype  if not null, write doctype declaration
-   */
-  final def save(
-    filename: String,
-    node: Node,
-    enc: String = encoding,
-    xmlDecl: Boolean = false,
-    doctype: dtd.DocType = null
-    ): Unit =
-  {
-    val fos = new FileOutputStream(filename)
-    val w = Channels.newWriter(fos.getChannel(), enc)
-
-    ultimately(w.close())(
-      write(w, node, enc, xmlDecl, doctype)
-    )
-  }
-
-  /** Writes the given node using writer, optionally with xml decl and doctype.
-   *  It's the caller's responsibility to close the writer.
-   *
-   *  @param w        the writer
-   *  @param node     the xml node we want to write
-   *  @param enc      the string to be used in `xmlDecl`
-   *  @param xmlDecl  if true, write xml declaration
-   *  @param doctype  if not null, write doctype declaration
-   */
-  final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType, minimizeTags: MinimizeMode.Value = MinimizeMode.Default) {
-    /* TODO: optimize by giving writer parameter to toXML*/
-    if (xmlDecl) w.write("<?xml version='1.0' encoding='" + enc + "'?>\n")
-    if (doctype ne null) w.write( doctype.toString() + "\n")
-    w.write(Utility.serialize(node, minimizeTags = minimizeTags).toString)
-  }
-}
diff --git a/src/library/scala/xml/Xhtml.scala b/src/library/scala/xml/Xhtml.scala
deleted file mode 100644
index 6730548..0000000
--- a/src/library/scala/xml/Xhtml.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-
-package scala.xml
-
-import parsing.XhtmlEntities
-import Utility.{ sbToString, isAtomAndNotText }
-
-/* (c) David Pollak  2007 WorldWide Conferencing, LLC */
-
-object Xhtml
-{
-  /**
-   * Convenience function: same as toXhtml(node, false, false)
-   *
-   * @param node      the node
-   */
-  def toXhtml(node: Node): String = sbToString(sb => toXhtml(x = node, sb = sb))
-
-  /**
-   * Convenience function: amounts to calling toXhtml(node) on each
-   * node in the sequence.
-   *
-   * @param nodeSeq   the node sequence
-   */
-  def toXhtml(nodeSeq: NodeSeq): String = sbToString(sb => sequenceToXML(nodeSeq: Seq[Node], sb = sb))
-
-  /** Elements which we believe are safe to minimize if minimizeTags is true.
-   *  See http://www.w3.org/TR/xhtml1/guidelines.html#C_3
-   */
-  private val minimizableElements =
-    List("base", "meta", "link", "hr", "br", "param", "img", "area", "input", "col")
-
-  def toXhtml(
-    x: Node,
-    pscope: NamespaceBinding = TopScope,
-    sb: StringBuilder = new StringBuilder,
-    stripComments: Boolean = false,
-    decodeEntities: Boolean = false,
-    preserveWhitespace: Boolean = false,
-    minimizeTags: Boolean = true): Unit =
-  {
-    def decode(er: EntityRef) = XhtmlEntities.entMap.get(er.entityName) match {
-      case Some(chr) if chr.toInt >= 128  => sb.append(chr)
-      case _                              => er.buildString(sb)
-    }
-    def shortForm =
-      minimizeTags &&
-      (x.child == null || x.child.length == 0) &&
-      (minimizableElements contains x.label)
-
-    x match {
-      case c: Comment                       => if (!stripComments) c buildString sb
-      case er: EntityRef if decodeEntities  => decode(er)
-      case x: SpecialNode                   => x buildString sb
-      case g: Group                         =>
-        g.nodes foreach { toXhtml(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
-
-      case _  =>
-        sb.append('<')
-        x.nameToString(sb)
-        if (x.attributes ne null) x.attributes.buildString(sb)
-        x.scope.buildString(sb, pscope)
-
-        if (shortForm) sb.append(" />")
-        else {
-          sb.append('>')
-          sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-          sb.append("</")
-          x.nameToString(sb)
-          sb.append('>')
-        }
-    }
-  }
-
-  /**
-   * Amounts to calling toXhtml(node, ...) with the given parameters on each node.
-   */
-  def sequenceToXML(
-    children: Seq[Node],
-    pscope: NamespaceBinding = TopScope,
-    sb: StringBuilder = new StringBuilder,
-    stripComments: Boolean = false,
-    decodeEntities: Boolean = false,
-    preserveWhitespace: Boolean = false,
-    minimizeTags: Boolean = true): Unit =
-  {
-    if (children.isEmpty)
-      return
-
-    val doSpaces = children forall isAtomAndNotText // interleave spaces
-    for (c <- children.take(children.length - 1)) {
-      toXhtml(c, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-      if (doSpaces) sb append ' '
-    }
-    toXhtml(children.last, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
-  }
-}
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
deleted file mode 100644
index abc71f5..0000000
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ /dev/null
@@ -1,120 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package dtd
-
-import scala.util.regexp.WordExp
-import scala.util.automata._
-import scala.xml.Utility.sbToString
-import PartialFunction._
-
-object ContentModel extends WordExp {
-  type _labelT = ElemName
-  type _regexpT = RegExp
-
-  object Translator extends WordBerrySethi {
-    override val lang: ContentModel.this.type = ContentModel.this
-  }
-
-  case class ElemName(name: String) extends Label {
-    override def toString() = """ElemName("%s")""" format name
-  }
-
-  def isMixed(cm: ContentModel) = cond(cm) { case _: MIXED => true }
-  def containsText(cm: ContentModel) = (cm == PCDATA) || isMixed(cm)
-  def parse(s: String): ContentModel = ContentModelParser.parse(s)
-
-  def getLabels(r: RegExp): Set[String] = {
-    def traverse(r: RegExp): Set[String] = r match { // !!! check for match translation problem
-      case Letter(ElemName(name)) => Set(name)
-      case Star(  x @ _  ) => traverse( x ) // bug if x at _*
-      case Sequ( xs @ _* ) => Set(xs flatMap traverse: _*)
-      case Alt(  xs @ _* ) => Set(xs flatMap traverse: _*)
-    }
-
-    traverse(r)
-  }
-
-  def buildString(r: RegExp): String = sbToString(buildString(r, _))
-
-  /* precond: rs.length >= 1 */
-  private def buildString(rs: Seq[RegExp], sb: StringBuilder, sep: Char) {
-    buildString(rs.head, sb)
-    for (z <- rs.tail) {
-      sb append sep
-      buildString(z, sb)
-    }
-  }
-
-  def buildString(c: ContentModel, sb: StringBuilder): StringBuilder = c match {
-    case ANY                    => sb append "ANY"
-    case EMPTY                  => sb append "EMPTY"
-    case PCDATA                 => sb append "(#PCDATA)"
-    case ELEMENTS(_) | MIXED(_) => c buildString sb
-  }
-
-  def buildString(r: RegExp, sb: StringBuilder): StringBuilder =
-    r match {  // !!! check for match translation problem
-      case Eps =>
-        sb
-      case Sequ(rs @ _*) =>
-        sb.append( '(' ); buildString(rs, sb, ','); sb.append( ')' )
-      case Alt(rs @ _*) =>
-        sb.append( '(' ); buildString(rs, sb, '|');  sb.append( ')' )
-      case Star(r: RegExp) =>
-        sb.append( '(' ); buildString(r, sb); sb.append( ")*" )
-      case Letter(ElemName(name)) =>
-        sb.append(name)
-    }
-
-}
-
-sealed abstract class ContentModel
-{
-  override def toString(): String = sbToString(buildString)
-  def buildString(sb: StringBuilder): StringBuilder
-}
-
-case object PCDATA extends ContentModel {
-  override def buildString(sb: StringBuilder): StringBuilder = sb.append("(#PCDATA)")
-}
-case object EMPTY extends ContentModel {
-  override def buildString(sb: StringBuilder): StringBuilder = sb.append("EMPTY")
-}
-case object ANY extends ContentModel {
-  override def buildString(sb: StringBuilder): StringBuilder = sb.append("ANY")
-}
-sealed abstract class DFAContentModel extends ContentModel {
-  import ContentModel.{ ElemName, Translator }
-  def r: ContentModel.RegExp
-
-  lazy val dfa: DetWordAutom[ElemName] = {
-    val nfa = Translator.automatonFrom(r, 1)
-    new SubsetConstruction(nfa).determinize
-  }
-}
-
-case class MIXED(r: ContentModel.RegExp) extends DFAContentModel {
-  import ContentModel.{ Alt, RegExp }
-
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    val newAlt = r match { case Alt(rs @ _*) => Alt(rs drop 1: _*) }
-
-    sb append "(#PCDATA|"
-    ContentModel.buildString(newAlt: RegExp, sb)
-    sb append ")*"
-  }
-}
-
-case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
-  override def buildString(sb: StringBuilder): StringBuilder =
-    ContentModel.buildString(r, sb)
-}
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
deleted file mode 100644
index ace0219..0000000
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package dtd
-
-/** Parser for regexps (content models in DTD element declarations) */
-
-object ContentModelParser extends Scanner { // a bit too permissive concerning #PCDATA
-  import ContentModel._
-
-  /** parses the argument to a regexp */
-  def parse(s: String): ContentModel = { initScanner(s); contentspec }
-
-  def accept(tok: Int) = {
-    if (token != tok) {
-      if ((tok == STAR) && (token == END))                  // common mistake
-        scala.sys.error("in DTDs, \n"+
-              "mixed content models must be like (#PCDATA|Name|Name|...)*");
-      else
-        scala.sys.error("expected "+token2string(tok)+
-              ", got unexpected token:"+token2string(token));
-    }
-    nextToken
-  }
-
-  // s [ '+' | '*' | '?' ]
-  def maybeSuffix(s: RegExp) = token match {
-    case STAR => nextToken; Star(s)
-    case PLUS => nextToken; Sequ(s, Star(s))
-    case OPT  => nextToken; Alt(Eps, s)
-    case _    => s
-  }
-
-  // contentspec ::= EMPTY | ANY | (#PCDATA) | "(#PCDATA|"regexp)
-
-  def contentspec: ContentModel = token match {
-
-    case NAME => value match {
-      case "ANY"   => ANY
-      case "EMPTY" => EMPTY
-      case _       => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
-    }
-    case LPAREN =>
-
-      nextToken;
-      sOpt;
-      if (token != TOKEN_PCDATA)
-        ELEMENTS(regexp);
-      else {
-        nextToken;
-        token match {
-        case RPAREN =>
-          PCDATA
-        case CHOICE =>
-          val res = MIXED(choiceRest(Eps));
-          sOpt;
-          accept( RPAREN );
-          accept( STAR );
-          res
-        case _ =>
-          scala.sys.error("unexpected token:" + token2string(token) );
-        }
-      }
-
-    case _ =>
-      scala.sys.error("unexpected token:" + token2string(token) );
-    }
-  //                                  sopt ::= S?
-  def sOpt() = if( token == S ) nextToken;
-
-  //                      (' S? mixed ::= '#PCDATA' S? ')'
-  //                                    | '#PCDATA' (S? '|' S? atom)* S? ')*'
-
-  //       '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ]
-  def regexp: RegExp = {
-    val p = particle;
-    sOpt;
-    maybeSuffix(token match {
-      case RPAREN  => nextToken; p
-      case CHOICE  => val q = choiceRest( p );accept( RPAREN ); q
-      case COMMA   => val q = seqRest( p );   accept( RPAREN ); q
-    })
-  }
-
-  //                                             seqRest ::= (',' S? cp S?)+
-  def seqRest(p: RegExp) = {
-    var k = List(p);
-    while( token == COMMA ) {
-      nextToken;
-      sOpt;
-      k = particle::k;
-      sOpt;
-    }
-    Sequ( k.reverse:_* )
-  }
-
-  //                                          choiceRest ::= ('|' S? cp S?)+
-  def choiceRest( p:RegExp ) = {
-    var k = List( p );
-    while( token == CHOICE ) {
-      nextToken;
-      sOpt;
-      k = particle::k;
-      sOpt;
-    }
-    Alt( k.reverse:_* )
-  }
-
-  //                                  particle ::=  '(' S? regexp
-  //                                             |  name [ '+' | '*' | '?' ]
-  def particle = token match {
-    case LPAREN => nextToken; sOpt; regexp;
-    case NAME   => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
-    case _      => scala.sys.error("expected '(' or Name, got:"+token2string(token));
-  }
-
-  //                                     atom ::= name
-  def atom = token match {
-    case NAME   => val a = Letter(ElemName(value)); nextToken; a
-    case _      => scala.sys.error("expected Name, got:"+token2string(token));
-  }
-}
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
deleted file mode 100644
index 1f8af3b..0000000
--- a/src/library/scala/xml/dtd/DTD.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package dtd
-
-import scala.collection.mutable
-
-/** A document type declaration.
- *
- *  @author Burak Emir
- */
-abstract class DTD {
-  var externalID: ExternalID            = null
-  var decls: List[Decl]                 = Nil
-  def notations: Seq[NotationDecl]      = Nil
-  def unparsedEntities: Seq[EntityDecl] = Nil
-
-  var elem: mutable.Map[String, ElemDecl]    = new mutable.HashMap[String, ElemDecl]()
-  var attr: mutable.Map[String, AttListDecl] = new mutable.HashMap[String, AttListDecl]()
-  var ent:  mutable.Map[String, EntityDecl]  = new mutable.HashMap[String, EntityDecl]()
-
-  override def toString() =
-    "DTD [\n%s%s]".format(
-      Option(externalID) getOrElse "",
-      decls.mkString("", "\n", "\n")
-    )
-}
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
deleted file mode 100644
index dc4cb93..0000000
--- a/src/library/scala/xml/dtd/Decl.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/*                     __                                               *\
- **     ________ ___   / /  ___     Scala API                            **
- **    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
- **  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
- ** /____/\___/_/ |_/____/_/ | |                                         **
- **                          |/                                          **
- \*                                                                      */
-
-package scala.xml
-package dtd
-
-import Utility.sbToString
-
-abstract class Decl
-
-abstract class MarkupDecl extends Decl {
-  def buildString(sb: StringBuilder): StringBuilder
-}
-
-/** an element declaration
- */
-case class ElemDecl(name: String, contentModel: ContentModel)
-extends MarkupDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!ELEMENT " append name append ' '
-
-    ContentModel.buildString(contentModel, sb)
-    sb append '>'
-  }
-}
-
-case class AttListDecl(name: String, attrs:List[AttrDecl])
-extends MarkupDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!ATTLIST " append name append '\n' append attrs.mkString("","\n",">")
-  }
-}
-
-/** an attribute declaration. at this point, the tpe is a string. Future
- *  versions might provide a way to access the attribute types more
- *  directly.
- */
-case class AttrDecl(name: String, tpe: String, default: DefaultDecl) {
-  override def toString(): String = sbToString(buildString)
-
-  def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "  " append name append ' ' append tpe append ' '
-    default buildString sb
-  }
-
-}
-
-/** an entity declaration */
-abstract class EntityDecl extends MarkupDecl
-
-/** a parsed general entity declaration */
-case class ParsedEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!ENTITY " append name append ' '
-    entdef buildString sb append '>'
-  }
-}
-
-/** a parameter entity declaration */
-case class ParameterEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!ENTITY % " append name append ' '
-    entdef buildString sb append '>'
-  }
-}
-
-/** an unparsed entity declaration */
-case class UnparsedEntityDecl( name:String, extID:ExternalID, notation:String ) extends EntityDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!ENTITY " append name append ' '
-    extID buildString sb append " NDATA " append notation append '>'
-  }
-}
-/** a notation declaration */
-case class NotationDecl( name:String, extID:ExternalID ) extends MarkupDecl {
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    sb append "<!NOTATION " append name append ' '
-    extID buildString sb
-  }
-}
-
-abstract class EntityDef {
-  def buildString(sb: StringBuilder): StringBuilder
-}
-
-case class IntDef(value:String) extends EntityDef {
-  private def validateValue() {
-    var tmp = value
-    var ix  = tmp indexOf '%'
-    while (ix != -1) {
-      val iz = tmp.indexOf(';', ix)
-      if(iz == -1 && iz == ix + 1)
-        throw new IllegalArgumentException("no % allowed in entity value, except for parameter-entity-references")
-      else {
-        val n = tmp.substring(ix, iz)
-
-        if (!Utility.isName(n))
-          throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name")
-
-        tmp = tmp.substring(iz+1, tmp.length)
-        ix  = tmp indexOf '%'
-      }
-    }
-  }
-  validateValue()
-
-  override def buildString(sb: StringBuilder): StringBuilder =
-    Utility.appendQuoted(value, sb)
-
-}
-
-case class ExtDef(extID:ExternalID) extends EntityDef {
-  override def buildString(sb: StringBuilder): StringBuilder =
-    extID buildString sb
-}
-
-
-/** a parsed entity reference */
-case class PEReference(ent:String) extends MarkupDecl {
-  if( !Utility.isName( ent ))
-    throw new IllegalArgumentException("ent must be an XML Name");
-
-  override def buildString(sb: StringBuilder): StringBuilder =
-    sb append '%' append ent append ';'
-}
-
-
-// default declarations for attributes
-
-abstract class DefaultDecl {
-  override def toString(): String
-  def buildString(sb: StringBuilder): StringBuilder
-}
-
-case object REQUIRED extends DefaultDecl {
-  override def toString(): String = "#REQUIRED"
-  override def buildString(sb: StringBuilder) = sb append "#REQUIRED"
-}
-
-case object IMPLIED extends DefaultDecl {
-  override def toString(): String = "#IMPLIED"
-  override def buildString(sb: StringBuilder) = sb append "#IMPLIED"
-}
-
-case class DEFAULT(fixed: Boolean, attValue: String) extends DefaultDecl {
-  override def toString(): String = sbToString(buildString)
-  override def buildString(sb: StringBuilder): StringBuilder = {
-    if (fixed) sb append "#FIXED "
-    Utility.appendEscapedQuoted(attValue, sb)
-  }
-}
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
deleted file mode 100644
index 79f8f9f..0000000
--- a/src/library/scala/xml/dtd/DocType.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package dtd
-
-/** An XML node for document type declaration.
- *
- *  @author Burak Emir
- *
- *  @param  name   name of this DOCTYPE
- *  @param  extID  None, or Some(external ID of this doctype)
- *  @param  intSubset sequence of internal subset declarations
- */
-case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl])
-{
-  if (!Utility.isName(name))
-    throw new IllegalArgumentException(name+" must be an XML Name")
-
-  /** returns "<!DOCTYPE + name + extID? + ("["+intSubSet+"]")? >" */
-  final override def toString() = {
-    def intString =
-      if (intSubset.isEmpty) ""
-      else intSubset.mkString("[", "", "]")
-
-    """<!DOCTYPE %s %s%s>""".format(name, extID.toString, intString)
-  }
-}
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
deleted file mode 100644
index bfc85f4..0000000
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package dtd
-
-import PartialFunction._
-import ContentModel.ElemName
-import MakeValidationException._    // @todo other exceptions
-import scala.util.automata._
-import scala.collection.mutable
-
-/** validate children and/or attributes of an element
- *  exceptions are created but not thrown.
- */
-class ElementValidator() extends Function1[Node,Boolean] {
-
-  private var exc: List[ValidationException] = Nil
-
-  protected var contentModel: ContentModel           = _
-  protected var dfa:          DetWordAutom[ElemName] = _
-  protected var adecls:       List[AttrDecl]         = _
-
-  /** set content model, enabling element validation */
-  def setContentModel(cm: ContentModel) = {
-    contentModel = cm
-    cm match {
-      case ELEMENTS(r) =>
-        val nfa = ContentModel.Translator.automatonFrom(r, 1)
-        dfa = new SubsetConstruction(nfa).determinize
-      case _ =>
-        dfa = null
-    }
-  }
-
-  def getContentModel = contentModel
-
-  /** set meta data, enabling attribute validation */
-  def setMetaData(adecls: List[AttrDecl]) { this.adecls = adecls }
-
-  def getIterable(nodes: Seq[Node], skipPCDATA: Boolean): Iterable[ElemName] = {
-    def isAllWhitespace(a: Atom[_]) = cond(a.data) { case s: String if s.trim == "" => true }
-
-    nodes.filter {
-      case y: SpecialNode => y match {
-        case a: Atom[_] if isAllWhitespace(a) => false  // always skip all-whitespace nodes
-        case _                                => !skipPCDATA
-      }
-      case x                                  => x.namespace eq null
-    } . map (x => ElemName(x.label))
-  }
-
-  /** check attributes, return true if md corresponds to attribute declarations in adecls.
-   */
-  def check(md: MetaData): Boolean = {
-    val len: Int = exc.length
-    var ok = new mutable.BitSet(adecls.length)
-
-    for (attr <- md) {
-      def attrStr = attr.value.toString
-      def find(Key: String): Option[AttrDecl] = {
-        adecls.zipWithIndex find {
-          case (a @ AttrDecl(Key, _, _), j) => ok += j ; return Some(a)
-          case _                            => false
-        }
-        None
-      }
-
-      find(attr.key) match {
-        case None =>
-          exc ::= fromUndefinedAttribute(attr.key)
-
-        case Some(AttrDecl(_, tpe, DEFAULT(true, fixedValue))) if attrStr != fixedValue =>
-          exc ::= fromFixedAttribute(attr.key, fixedValue, attrStr)
-
-        case _ =>
-      }
-    }
-
-    adecls.zipWithIndex foreach {
-      case (AttrDecl(key, tpe, REQUIRED), j) if !ok(j) => exc ::= fromMissingAttribute(key, tpe)
-      case _ =>
-    }
-
-    exc.length == len //- true if no new exception
-  }
-
-  /** check children, return true if conform to content model
-   *  @note contentModel != null
-   */
-  def check(nodes: Seq[Node]): Boolean = contentModel match {
-    case ANY    => true
-    case EMPTY  => getIterable(nodes, false).isEmpty
-    case PCDATA => getIterable(nodes, true).isEmpty
-    case MIXED(ContentModel.Alt(branches @ _*))  =>   // @todo
-      val j = exc.length
-      def find(Key: String): Boolean =
-        branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
-
-      getIterable(nodes, true) map (_.name) filterNot find foreach {
-        exc ::= MakeValidationException fromUndefinedElement _
-      }
-      (exc.length == j)   // - true if no new exception
-
-    case _: ELEMENTS =>
-      dfa isFinal {
-        getIterable(nodes, false).foldLeft(0) { (q, e) =>
-          (dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
-        }
-      }
-    case _ => false
-  }
-
-  /** applies various validations - accumulates error messages in exc
-   *  @todo fail on first error, ignore other errors (rearranging conditions)
-   */
-  def apply(n: Node): Boolean =
-    //- ? check children
-    ((contentModel == null) || check(n.child)) &&
-    //- ? check attributes
-    ((adecls == null) || check(n.attributes))
-}
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
deleted file mode 100644
index 7a74635..0000000
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package dtd
-
-/** an ExternalIDs - either PublicID or SystemID
- *
- *  @author Burak Emir
- */
-abstract class ExternalID extends parsing.TokenTests
-{
-  def quoted(s: String) = {
-    val c = if (s contains '"') '\'' else '"'
-    c + s + c
-  }
-
-  // public != null: PUBLIC " " publicLiteral " " [systemLiteral]
-  // public == null: SYSTEM " " systemLiteral
-  override def toString(): String = {
-    lazy val quotedSystemLiteral = quoted(systemId)
-    lazy val quotedPublicLiteral = quoted(publicId)
-
-    if (publicId == null) "SYSTEM " + quotedSystemLiteral
-    else "PUBLIC " + quotedPublicLiteral +
-      (if (systemId == null) "" else " " + quotedSystemLiteral)
-  }
-  def buildString(sb: StringBuilder): StringBuilder =
-    sb.append(this.toString())
-
-  def systemId: String
-  def publicId: String
-}
-
-/** a system identifier
- *
- *  @author Burak Emir
- *  @param  systemId the system identifier literal
- */
-case class SystemID(systemId: String) extends ExternalID {
-  val publicId = null
-
-  if (!checkSysID(systemId))
-    throw new IllegalArgumentException("can't use both \" and ' in systemId")
-}
-
-
-/** a public identifier (see http://www.w3.org/QA/2002/04/valid-dtd-list.html).
- *
- *  @author Burak Emir
- *  @param  publicId the public identifier literal
- *  @param  systemId (can be null for notation pubIDs) the system identifier literal
- */
-case class PublicID(publicId: String, systemId: String) extends ExternalID {
-  if (!checkPubID(publicId))
-    throw new IllegalArgumentException("publicId must consist of PubidChars")
-
-  if (systemId != null && !checkSysID(systemId))
-    throw new IllegalArgumentException("can't use both \" and ' in systemId")
-
-  /** the constant "#PI" */
-  def label = "#PI"
-
-  /** always empty */
-  def attribute = Node.NoAttributes
-
-  /** always empty */
-  def child = Nil
-}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
deleted file mode 100644
index 9b64cc6..0000000
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package dtd
-
-/** Scanner for regexps (content models in DTD element declarations)
- *  todo: cleanup
- */
-class Scanner extends Tokens with parsing.TokenTests {
-
-  final val ENDCH = '\u0000'
-
-  var token:Int = END
-  var value:String = _
-
-  private var it: Iterator[Char] = null
-  private var c: Char = 'z'
-
-  /** initializes the scanner on input s */
-  final def initScanner(s: String) {
-    value = ""
-    it = (s).iterator
-    token = 1+END
-    next
-    nextToken
-  }
-
-  /** scans the next token */
-  final def nextToken() {
-    if (token != END) token = readToken
-  }
-
-  // todo: see XML specification... probably isLetter,isDigit is fine
-  final def isIdentChar = ( ('a' <= c && c <= 'z')
-                           || ('A' <= c && c <= 'Z'));
-
-  final def next() = if (it.hasNext) c = it.next else c = ENDCH
-
-  final def acc(d: Char) {
-    if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
-  }
-
-  final def accS(ds: Seq[Char]) { ds foreach acc }
-
-  final def readToken: Int =
-    if (isSpace(c)) {
-      while (isSpace(c)) c = it.next
-      S
-    } else c match {
-      case '('   => next; LPAREN
-      case ')'   => next; RPAREN
-      case ','   => next; COMMA
-      case '*'   => next; STAR
-      case '+'   => next; PLUS
-      case '?'   => next; OPT
-      case '|'   => next; CHOICE
-      case '#'   => next; accS( "PCDATA" ); TOKEN_PCDATA
-      case ENDCH => END
-      case _     =>
-        if (isNameStart(c)) name; // NAME
-        else scala.sys.error("unexpected character:" + c)
-    }
-
-  final def name = {
-    val sb = new StringBuilder()
-    do { sb.append(c); next } while (isNameChar(c));
-    value = sb.toString()
-    NAME
-  }
-
-}
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/library/scala/xml/dtd/Tokens.scala
deleted file mode 100644
index eaffba9..0000000
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package dtd
-
-
-class Tokens {
-
-  // Tokens
-
-  final val TOKEN_PCDATA = 0
-  final val NAME         = 1
-  final val LPAREN       = 3
-  final val RPAREN       = 4
-  final val COMMA        = 5
-  final val STAR         = 6
-  final val PLUS         = 7
-  final val OPT          = 8
-  final val CHOICE       = 9
-  final val END          = 10
-  final val S            = 13
-
-  final def token2string(i: Int): String = i match {
-    case  0 => "#PCDATA"
-    case  1 => "NAME"
-    case  3 => "("
-    case  4 => ")"
-    case  5 => ","
-    case  6 => "*"
-    case  7 => "+"
-    case  8 => "?"
-    case  9 => "|"
-    case 10 => "END"
-    case 13 => " "
-  }
-}
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
deleted file mode 100644
index 243db69..0000000
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://www.scala-lang.org/           **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package dtd
-
-
-case class ValidationException(e: String) extends Exception(e)
-
-/**
- *  @author Burak Emir
- */
-object MakeValidationException {
-  def fromFixedAttribute(k: String, value: String, actual: String) =
-    ValidationException("value of attribute " + k + " FIXED to \""+
-                        value+"\", but document tries \""+actual+"\"")
-
-  def fromNonEmptyElement() =
-    new ValidationException("element should be *empty*")
-
-  def fromUndefinedElement(label: String) =
-    new ValidationException("element \""+ label +"\" not allowed here")
-
-  def fromUndefinedAttribute(key: String) =
-    new ValidationException("attribute " + key +" not allowed here")
-
-  def fromMissingAttribute(allKeys: Set[String]) = {
-    val sb = new StringBuilder("missing value for REQUIRED attribute")
-    if (allKeys.size > 1) sb.append('s');
-    allKeys foreach (k => sb append "'%s'".format(k))
-    new ValidationException(sb.toString())
-  }
-
-  def fromMissingAttribute(key: String, tpe: String) =
-    new ValidationException("missing value for REQUIRED attribute %s of type %s".format(key, tpe))
-}
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
deleted file mode 100755
index bad4a4e..0000000
--- a/src/library/scala/xml/factory/Binder.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package factory
-
-import parsing.ValidatingMarkupHandler
-
-/**
- *  @author Burak Emir
- */
-abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
-
-  var result: NodeBuffer = new NodeBuffer()
-
-  def reportSyntaxError(pos:Int, str:String) = {}
-
-  final def procInstr(pos: Int, target: String, txt: String) =
-    ProcInstr(target, txt)
-
-  final def comment(pos: Int, txt: String) =
-    Comment(txt)
-
-  final def entityRef(pos: Int, n: String) =
-    EntityRef(n)
-
-  final def text(pos: Int, txt: String) =
-    Text(txt)
-
-  final def traverse(n:Node): Unit = n match {
-    case x:ProcInstr =>
-      result &+ procInstr(0, x.target, x.text)
-    case x:Comment   =>
-      result &+ comment(0, x.text)
-    case x:Text      =>
-      result &+ text(0, x.data)
-    case x:EntityRef =>
-      result &+ entityRef(0, x.entityName)
-    case x:Elem =>
-      elemStart(0, x.prefix, x.label, x.attributes, x.scope)
-      val old = result
-      result = new NodeBuffer()
-      for (m <- x.child) traverse(m)
-      result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
-      elemEnd(0, x.prefix, x.label)
-  }
-
-  final def validate(n: Node): Node = {
-    this.rootLabel = n.label
-    traverse(n)
-    result(0)
-  }
-}
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
deleted file mode 100644
index cac61ac..0000000
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package factory
-
-/** This class logs what the nodefactory is actually doing.
- *  If you want to see what happens during loading, use it like this:
-{{{
-object testLogged extends App {
-  val x = new scala.xml.parsing.NoBindingFactoryAdapter
-        with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem]
-        with scala.util.logging.ConsoleLogger
-
-  Console.println("Start")
-  val doc = x.load(new java.net.URL("http://example.com/file.xml"))
-  Console.println("End")
-  Console.println(doc)
-}
-}}}
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.logging.Logged {
-  // configuration values
-  val logNode      = true
-  val logText      = false
-  val logComment   = false
-  val logProcInstr = false
-
-  final val NONE  = 0
-  final val CACHE = 1
-  final val FULL  = 2
-  /** 0 = no logging, 1 = cache hits, 2 = detail */
-  val logCompressLevel  = 1
-
-  // methods of NodeFactory
-
-  /** logged version of makeNode method */
-  override def makeNode(pre: String, label: String, attrSeq: MetaData,
-                        scope: NamespaceBinding, children: Seq[Node]): A = {
-    if (logNode)
-      log("[makeNode for "+label+"]");
-
-    val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
-
-    /*
-    if(logCompressLevel >= FULL) {
-      log("[hashcode total:"+hash);
-      log(" elem name "+uname+" hash "+ ? ));
-      log(" attrs     "+attrSeq+" hash "+attrSeq.hashCode());
-      log(" children :"+children+" hash "+children.hashCode());
-    }
-    */
-    if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE))
-      log("[cache hit !]");
-
-    super.makeNode(pre, label, attrSeq, scope, children)
-  }
-
-  override def makeText(s: String) = {
-    if (logText)
-      log("[makeText:\""+s+"\"]");
-    super.makeText(s)
-  }
-
-  override def makeComment(s: String): Seq[Comment] = {
-    if (logComment)
-      log("[makeComment:\""+s+"\"]");
-    super.makeComment(s)
-  }
-
-  override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = {
-    if (logProcInstr)
-      log("[makeProcInstr:\""+t+" "+ s+"\"]");
-    super.makeProcInstr(t, s)
-  }
-
-}
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
deleted file mode 100644
index 28a1b6f..0000000
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package factory
-
-import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-
-trait NodeFactory[A <: Node] {
-  val ignoreComments  = false
-  val ignoreProcInstr = false
-
-  /* default behaviour is to use hash-consing */
-  val cache = new scala.collection.mutable.HashMap[Int, List[A]]
-
-  protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
-
-  protected def construct(hash: Int, old:List[A], pre: String, name: String, attrSeq:MetaData, scope: NamespaceBinding, children:Seq[Node]): A = {
-    val el = create(pre, name, attrSeq, scope, children)
-    cache.update(hash, el :: old)
-    el
-  }
-
-  def eqElements(ch1: Seq[Node], ch2: Seq[Node]): Boolean =
-    ch1.view.zipAll(ch2.view, null, null) forall { case (x,y) => x eq y }
-
-  def nodeEquals(n: Node, pre: String, name: String, attrSeq:MetaData, scope: NamespaceBinding, children: Seq[Node]) =
-    n.prefix == pre &&
-    n.label == name &&
-    n.attributes == attrSeq &&
-    // scope?
-    eqElements(n.child, children)
-
-  def makeNode(pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = {
-    val hash = Utility.hashCode( pre, name, attrSeq.##, scope.##, children)
-    def cons(old: List[A]) = construct(hash, old, pre, name, attrSeq, scope, children)
-
-    (cache get hash) match {
-      case Some(list) =>    // find structurally equal
-        list.find(nodeEquals(_, pre, name, attrSeq, scope, children)) match {
-          case Some(x)    => x
-          case _          => cons(list)
-        }
-      case None           => cons(Nil)
-    }
-  }
-
-  def makeText(s: String) = Text(s)
-  def makeComment(s: String): Seq[Comment]  =
-    if (ignoreComments) Nil else List(Comment(s))
-  def makeProcInstr(t: String, s: String): Seq[ProcInstr] =
-    if (ignoreProcInstr) Nil else List(ProcInstr(t, s))
-}
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
deleted file mode 100644
index 72e4c51..0000000
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package factory
-
-import javax.xml.parsers.SAXParserFactory
-import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-import java.net.URL
-
-/** Presents collection of XML loading methods which use the parser
- *  created by "def parser".
- */
-trait XMLLoader[T <: Node]
-{
-  import scala.xml.Source._
-  def adapter: FactoryAdapter = new NoBindingFactoryAdapter()
-
-  /* Override this to use a different SAXParser. */
-  def parser: SAXParser = {
-    val f = SAXParserFactory.newInstance()
-    f.setNamespaceAware(false)
-    f.newSAXParser()
-  }
-
-  /** Loads XML from the given InputSource, using the supplied parser.
-   *  The methods available in scala.xml.XML use the XML parser in the JDK.
-   */
-  def loadXML(source: InputSource, parser: SAXParser): T = {
-    val newAdapter = adapter
-
-    newAdapter.scopeStack push TopScope
-    parser.parse(source, newAdapter)
-    newAdapter.scopeStack.pop
-
-    newAdapter.rootElem.asInstanceOf[T]
-  }
-
-  /** Loads XML from the given file, file descriptor, or filename. */
-  def loadFile(file: File): T          = loadXML(fromFile(file), parser)
-  def loadFile(fd: FileDescriptor): T  = loadXML(fromFile(fd), parser)
-  def loadFile(name: String): T        = loadXML(fromFile(name), parser)
-
-  /** loads XML from given InputStream, Reader, sysID, InputSource, or URL. */
-  def load(is: InputStream): T         = loadXML(fromInputStream(is), parser)
-  def load(reader: Reader): T          = loadXML(fromReader(reader), parser)
-  def load(sysID: String): T           = loadXML(fromSysId(sysID), parser)
-  def load(source: InputSource): T     = loadXML(source, parser)
-  def load(url: URL): T                = loadXML(fromInputStream(url.openStream()), parser)
-
-  /** Loads XML from the given String. */
-  def loadString(string: String): T    = loadXML(fromString(string), parser)
-}
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/library/scala/xml/include/CircularIncludeException.scala
deleted file mode 100644
index 5e74967..0000000
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package include
-
-/**
- *  A `CircularIncludeException` is thrown when an included document attempts
- *  to include itself or one of its ancestor documents.
- */
-class CircularIncludeException(message: String) extends XIncludeException {
-
-  /**
-   * Constructs a `CircularIncludeException` with `'''null'''`.
-   * as its error detail message.
-   */
-  def this() = this(null)
-
-}
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/library/scala/xml/include/UnavailableResourceException.scala
deleted file mode 100644
index f00cc58..0000000
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package include
-
-/**
- * An `UnavailableResourceException` is thrown when an included document
- * cannot be found or loaded.
- */
-class UnavailableResourceException(message: String)
-extends XIncludeException(message) {
-  def this() = this(null)
-}
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
deleted file mode 100644
index 84033f8..0000000
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package include
-
-/**
- * `XIncludeException` is the generic superclass for all checked exceptions
- * that may be thrown as a result of a violation of XInclude's rules.
- *
- * Constructs an `XIncludeException` with the specified detail message.
- * The error message string `message` can later be retrieved by the
- * `{@link java.lang.Throwable#getMessage}`
- * method of class `java.lang.Throwable`.
- *
- * @param   message   the detail message.
- */
-class XIncludeException(message: String) extends Exception(message) {
-
-  /**
-   * uses `'''null'''` as its error detail message.
-   */
-  def this() = this(null)
-
-  private var rootCause: Throwable = null
-
-  /**
-   * When an `IOException`, `MalformedURLException` or other generic
-   * exception is thrown while processing an XML document for XIncludes,
-   * it is customarily replaced by some form of `XIncludeException`.
-   * This method allows you to store the original exception.
-   *
-   * @param   nestedException   the underlying exception which
-   * caused the XIncludeException to be thrown
-   */
-  def setRootCause(nestedException: Throwable ) {
-    this.rootCause = nestedException
-  }
-
-  /**
-   * When an `IOException`, `MalformedURLException` or other generic
-   * exception is thrown while processing an XML document for XIncludes,
-   * it is customarily replaced by some form of `XIncludeException`.
-   * This method allows you to retrieve the original exception.
-   * It returns null if no such exception caused this `XIncludeException`.
-   *
-   * @return Throwable   the underlying exception which caused the
-   *                     `XIncludeException` to be thrown
-   */
-  def getRootCause(): Throwable = this.rootCause
-
-}
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
deleted file mode 100644
index 1340689..0000000
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package include.sax
-import scala.xml.include._
-
-import java.io.InputStream
-import scala.util.matching.Regex
-
-/** `EncodingHeuristics` reads from a stream
- * (which should be buffered) and attempts to guess
- * what the encoding of the text in the stream is.
- * If it fails to determine the type of the encoding,
- * it returns the default UTF-8.
- *
- * @author Burak Emir
- * @author Paul Phillips
- */
-object EncodingHeuristics
-{
-  object EncodingNames {
-    // UCS-4 isn't yet implemented in java releases anyway...
-    val bigUCS4       = "UCS-4"
-    val littleUCS4    = "UCS-4"
-    val unusualUCS4   = "UCS-4"
-    val bigUTF16      = "UTF-16BE"
-    val littleUTF16   = "UTF-16LE"
-    val utf8          = "UTF-8"
-    val default       = utf8
-  }
-  import EncodingNames._
-
-  /** This utility method attempts to determine the XML character encoding
-    * by examining the input stream, as specified at
-    * [[http://www.w3.org/TR/xml/#sec-guessing w3]].
-    *
-    * @param    in   `InputStream` to read from.
-    * @throws IOException if the stream cannot be reset
-    * @return         the name of the encoding.
-    */
-  def readEncodingFromStream(in: InputStream): String = {
-    var ret: String = null
-    val bytesToRead = 1024 // enough to read most XML encoding declarations
-    def resetAndRet = { in.reset ; ret }
-
-    // This may fail if there are a lot of space characters before the end
-    // of the encoding declaration
-    in mark bytesToRead
-    val bytes = (in.read, in.read, in.read, in.read)
-
-    // first look for byte order mark
-    ret = bytes match {
-      case (0x00, 0x00, 0xFE, 0xFF) => bigUCS4
-      case (0xFF, 0xFE, 0x00, 0x00) => littleUCS4
-      case (0x00, 0x00, 0xFF, 0xFE) => unusualUCS4
-      case (0xFE, 0xFF, 0x00, 0x00) => unusualUCS4
-      case (0xFE, 0xFF, _   , _   ) => bigUTF16
-      case (0xFF, 0xFE, _   , _   ) => littleUTF16
-      case (0xEF, 0xBB, 0xBF, _   ) => utf8
-      case _                        => null
-    }
-    if (ret != null)
-      return resetAndRet
-
-    def readASCIIEncoding: String = {
-      val data = new Array[Byte](bytesToRead - 4)
-      val length = in.read(data, 0, bytesToRead - 4)
-
-      // Use Latin-1 (ISO-8859-1) because all byte sequences are legal.
-      val declaration = new String(data, 0, length, "ISO-8859-1")
-      val regexp = """(?m).*?encoding\s*=\s*["'](.+?)['"]""".r
-      (regexp findFirstMatchIn declaration) match {
-        case None     => default
-        case Some(md) => md.subgroups(0)
-      }
-    }
-
-    // no byte order mark present; first character must be '<' or whitespace
-    ret = bytes match {
-      case (0x00, 0x00, 0x00, '<' ) => bigUCS4
-      case ('<' , 0x00, 0x00, 0x00) => littleUCS4
-      case (0x00, 0x00, '<' , 0x00) => unusualUCS4
-      case (0x00, '<' , 0x00, 0x00) => unusualUCS4
-      case (0x00, '<' , 0x00, '?' ) => bigUTF16     // XXX must read encoding
-      case ('<' , 0x00, '?' , 0x00) => littleUTF16  // XXX must read encoding
-      case ('<' , '?' , 'x' , 'm' ) => readASCIIEncoding
-      case (0x4C, 0x6F, 0xA7, 0x94) => utf8         // XXX EBCDIC
-      case _                        => utf8         // no XML or text declaration present
-    }
-    resetAndRet
-  }
-}
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
deleted file mode 100644
index 92d4d6e..0000000
--- a/src/library/scala/xml/include/sax/Main.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package include.sax
-
-import scala.util.control.Exception.{ catching, ignoring }
-import org.xml.sax.XMLReader
-import org.xml.sax.helpers.XMLReaderFactory
-
- at deprecated("Code example will be moved to documentation.", "2.10.0")
-object Main {
-  private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
-  private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
-
-  /**
-  * The driver method for xinc
-  * Output is written to System.out via Conolse
-  * </p>
-  *
-  * @param args  contains the URLs and/or filenames
-  *              of the documents to be processed.
-  */
-  def main(args: Array[String]) {
-    def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body
-    def fail(msg: String) = System.err.println(msg)
-
-    val parser: XMLReader =
-      saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse (
-        saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse (
-          return fail("Could not find an XML parser")
-        )
-      )
-
-    // Need better namespace handling
-    try parser.setFeature(namespacePrefixes, true)
-    catch { case e: SAXException => return System.err.println(e) }
-
-    if (args.isEmpty)
-      return
-
-    def dashR = args.size >= 2 && args(0) == "-r"
-    val args2 = if (dashR) args drop 2 else args
-    val resolver: Option[EntityResolver] =
-      if (dashR) None
-      else catching(classOf[Exception]) opt {
-          val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
-          parser setEntityResolver r
-          r
-        } orElse (return fail("Could not load requested EntityResolver"))
-
-    for (arg <- args2) {
-      try {
-        val includer = new XIncludeFilter()
-        includer setParent parser
-        val s = new XIncluder(System.out, "UTF-8")
-        includer setContentHandler s
-
-        resolver map (includer setEntityResolver _)
-        // SAXException here means will not support comments
-        ignoring(classOf[SAXException]) {
-          includer.setProperty(lexicalHandler, s)
-          s setFilter includer
-        }
-        includer parse arg
-      }
-      catch {
-        case e: SAXParseException =>
-          fail(e.toString)
-          fail("Problem in %s at line %d".format(e.getSystemId, e.getLineNumber))
-        case e: SAXException =>
-          fail(e.toString)
-      }
-    }
-  }
-}
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
deleted file mode 100644
index 7297693..0000000
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ /dev/null
@@ -1,372 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package include.sax
-
-import scala.xml.include._
-
-import org.xml.sax.{ Attributes, XMLReader, Locator }
-import org.xml.sax.helpers.{ XMLReaderFactory, XMLFilterImpl, NamespaceSupport, AttributesImpl }
-
-import java.io.{ InputStream, BufferedInputStream, InputStreamReader, IOException, UnsupportedEncodingException }
-import java.util.Stack
-import java.net.{ URL, MalformedURLException }
-
-/** This is a SAX filter which resolves all XInclude include elements before
- *  passing them on to the client application. Currently this class has the
- *  following known deviation from the XInclude specification:
- *
- *  1. XPointer is not supported.
- *
- *  Furthermore, I would definitely use a new instance of this class for each
- *  document you want to process. I doubt it can be used successfully on
- *  multiple documents. Furthermore, I can virtually guarantee that this
- *  class is not thread safe. You have been warned.
- *
- *  Since this class is not designed to be subclassed, and since I have not
- *  yet considered how that might affect the methods herein or what other
- *  protected methods might be needed to support subclasses, I have declared
- *  this class final. I may remove this restriction later, though the use-case
- *  for subclassing is weak. This class is designed to have its functionality
- *  extended via a horizontal chain of filters, not a vertical hierarchy of
- *  sub and superclasses.
- *
- *  To use this class:
- *
- *  - Construct an `XIncludeFilter` object with a known base URL
- *  - Pass the `XMLReader` object from which the raw document will be read to
- *    the `setParent()` method of this object.
- *  - Pass your own `ContentHandler` object to the `setContentHandler()`
- *    method of this object. This is the object which will receive events
- *    from the parsed and included document.
- *  - Optional: if you wish to receive comments, set your own `LexicalHandler`
- *    object as the value of this object's
- *    `http://xml.org/sax/properties/lexical-handler` property.
- *    Also make sure your `LexicalHandler` asks this object for the status of
- *    each comment using `insideIncludeElement` before doing anything with the
- *    comment.
- *  - Pass the URL of the document to read to this object's `parse()` method
- *
- *  e.g.
- *  {{{
- *  val includer = new XIncludeFilter(base)
- *  includer setParent parser
- *  includer setContentHandler new SAXXIncluder(System.out)
- *  includer parse args(i)
- *  }}}
- *  translated from Elliotte Rusty Harold's Java source.
- *
- * @author Burak Emir
- */
-class XIncludeFilter extends XMLFilterImpl {
-
-  final val XINCLUDE_NAMESPACE = "http://www.w3.org/2001/XInclude"
-
-  private val bases = new Stack[URL]()
-  private val locators = new Stack[Locator]()
-
-/*    private EntityResolver resolver;
-
-    public XIncludeFilter() {
-        this(null);
-    }
-
-    public XIncludeFilter(EntityResolver resolver) {
-        this.resolver = resolver;
-    }   */
-
-
-    // what if this isn't called????
-    // do I need to check this in startDocument() and push something
-    // there????
-  override def setDocumentLocator(locator: Locator) {
-    locators push locator
-    val base = locator.getSystemId()
-    try {
-      bases.push(new URL(base))
-    }
-    catch {
-      case e:MalformedURLException =>
-        throw new UnsupportedOperationException("Unrecognized SYSTEM ID: " + base)
-    }
-    super.setDocumentLocator(locator)
-  }
-
-
-  // necessary to throw away contents of non-empty XInclude elements
-  private var level = 0
-
-  /** This utility method returns true if and only if this reader is
-    * currently inside a non-empty include element. (This is '''not''' the
-    * same as being inside the node set which replaces the include element.)
-    * This is primarily needed for comments inside include elements.
-    * It must be checked by the actual `LexicalHandler` to see whether
-    * a comment is passed or not.
-    *
-    * @return boolean
-    */
-  def insideIncludeElement(): Boolean = level != 0
-
-  override def startElement(uri: String, localName: String, qName: String, atts1: Attributes) {
-    var atts = atts1
-    if (level == 0) { // We're not inside an xi:include element
-
-      // Adjust bases stack by pushing either the new
-      // value of xml:base or the base of the parent
-      val base = atts.getValue(NamespaceSupport.XMLNS, "base")
-      val parentBase = bases.peek().asInstanceOf[URL]
-      var currentBase = parentBase
-      if (base != null) {
-        try {
-          currentBase = new URL(parentBase, base)
-        }
-        catch {
-          case e: MalformedURLException =>
-            throw new SAXException("Malformed base URL: "
-                                   + currentBase, e)
-        }
-      }
-      bases push currentBase
-
-      if (uri.equals(XINCLUDE_NAMESPACE) && localName.equals("include")) {
-        // include external document
-        val href = atts.getValue("href")
-        // Verify that there is an href attribute
-        if (href == null) {
-          throw new SAXException("Missing href attribute")
-        }
-
-        var parse = atts getValue "parse"
-        if (parse == null) parse = "xml"
-
-        if (parse equals "text") {
-          val encoding = atts getValue "encoding"
-          includeTextDocument(href, encoding);
-        }
-        else if (parse equals "xml") {
-          includeXMLDocument(href);
-        }
-        // Need to check this also in DOM and JDOM????
-        else {
-          throw new SAXException(
-            "Illegal value for parse attribute: " + parse)
-        }
-        level += 1
-      }
-      else {
-        if (atRoot) {
-          // add xml:base attribute if necessary
-          val attsImpl = new AttributesImpl(atts)
-          attsImpl.addAttribute(NamespaceSupport.XMLNS, "base",
-                                "xml:base", "CDATA", currentBase.toExternalForm())
-          atts = attsImpl
-          atRoot = false
-        }
-        super.startElement(uri, localName, qName, atts)
-      }
-    }
-  }
-
-  override def endElement(uri: String, localName: String, qName: String) {
-    if (uri.equals(XINCLUDE_NAMESPACE)
-        && localName.equals("include")) {
-          level -= 1
-    }
-    else if (level == 0) {
-      bases.pop()
-      super.endElement(uri, localName, qName)
-    }
-  }
-
-  private var depth = 0;
-
-  override def startDocument() {
-    level = 0
-    if (depth == 0) super.startDocument()
-    depth += 1
-  }
-
-  override def endDocument() {
-    locators.pop()
-    bases.pop()  // pop the URL for the document itself
-    depth -= 1
-    if (depth == 0) super.endDocument()
-  }
-
-  // how do prefix mappings move across documents????
-  override def startPrefixMapping(prefix: String , uri: String) {
-    if (level == 0) super.startPrefixMapping(prefix, uri)
-  }
-
-  override def endPrefixMapping(prefix: String) {
-    if (level == 0) super.endPrefixMapping(prefix)
-  }
-
-  override def characters(ch: Array[Char], start: Int, length: Int) {
-    if (level == 0) super.characters(ch, start, length)
-  }
-
-  override def ignorableWhitespace(ch: Array[Char], start: Int, length: Int) {
-    if (level == 0) super.ignorableWhitespace(ch, start, length)
-  }
-
-  override def processingInstruction(target: String, data: String) {
-    if (level == 0) super.processingInstruction(target, data)
-  }
-
-  override def skippedEntity(name: String) {
-    if (level == 0) super.skippedEntity(name)
-  }
-
-  // convenience method for error messages
-  private def getLocation(): String = {
-    var locationString = ""
-    val locator = locators.peek().asInstanceOf[Locator]
-    var publicID = ""
-    var systemID = ""
-    var column = -1
-    var line = -1
-    if (locator != null) {
-      publicID = locator.getPublicId()
-      systemID = locator.getSystemId()
-      line = locator.getLineNumber()
-      column = locator.getColumnNumber()
-    }
-    locationString = (" in document included from " + publicID
-    + " at " + systemID
-    + " at line " + line + ", column " + column);
-
-    locationString
-  }
-
-  /** This utility method reads a document at a specified URL and fires off
-    * calls to `characters()`. It's used to include files with `parse="text"`.
-    *
-    * @param  url          URL of the document that will be read
-    * @param  encoding1    Encoding of the document; e.g. UTF-8,
-    *                      ISO-8859-1, etc.
-    * @return void
-    * @throws SAXException if the requested document cannot
-                           be downloaded from the specified URL
-                           or if the encoding is not recognized
-    */
-  private def includeTextDocument(url: String, encoding1: String) {
-    var encoding = encoding1
-    if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8";
-    var source: URL = null
-    try {
-      val base = bases.peek().asInstanceOf[URL]
-      source = new URL(base, url)
-    }
-    catch {
-      case e: MalformedURLException =>
-        val ex = new UnavailableResourceException("Unresolvable URL " + url
-                                                  + getLocation())
-      ex.setRootCause(e)
-      throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
-    }
-
-    try {
-      val uc = source.openConnection()
-      val in = new BufferedInputStream(uc.getInputStream())
-      var encodingFromHeader = uc.getContentEncoding()
-      var contentType = uc.getContentType()
-      if (encodingFromHeader != null)
-        encoding = encodingFromHeader
-      else {
-        // What if file does not have a MIME type but name ends in .xml????
-        // MIME types are case-insensitive
-        // Java may be picking this up from file URL
-        if (contentType != null) {
-          contentType = contentType.toLowerCase();
-          if (contentType.equals("text/xml")
-              || contentType.equals("application/xml")
-              || (contentType.startsWith("text/") && contentType.endsWith("+xml") )
-              || (contentType.startsWith("application/") && contentType.endsWith("+xml"))) {
-                encoding = EncodingHeuristics.readEncodingFromStream(in);
-              }
-        }
-      }
-      val reader = new InputStreamReader(in, encoding)
-      val c = new Array[Char](1024)
-      var charsRead: Int = 0  // bogus init value
-      do {
-        charsRead = reader.read(c, 0, 1024)
-        if (charsRead > 0) this.characters(c, 0, charsRead)
-      } while (charsRead != -1)
-    }
-    catch {
-      case e: UnsupportedEncodingException =>
-        throw new SAXException("Unsupported encoding: "
-                               + encoding + getLocation(), e)
-      case e: IOException =>
-        throw new SAXException("Document not found: "
-                               + source.toExternalForm() + getLocation(), e)
-    }
-
-  }
-
-  private var atRoot = false
-
-  /** This utility method reads a document at a specified URL
-    * and fires off calls to various `ContentHandler` methods.
-    * It's used to include files with `parse="xml"`.
-    *
-    * @param  url          URL of the document that will be read
-    * @return void
-    * @throws SAXException if the requested document cannot
-                           be downloaded from the specified URL.
-    */
-  private def includeXMLDocument(url: String) {
-    val source =
-      try new URL(bases.peek(), url)
-      catch {
-        case e: MalformedURLException =>
-          val ex = new UnavailableResourceException("Unresolvable URL " + url + getLocation())
-          ex setRootCause e
-          throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
-      }
-
-    try {
-      val parser: XMLReader =
-        try XMLReaderFactory.createXMLReader()
-        catch {
-          case e: SAXException  =>
-            try XMLReaderFactory.createXMLReader(XercesClassName)
-            catch { case _: SAXException => return System.err.println("Could not find an XML parser") }
-        }
-
-      parser setContentHandler this
-      val resolver = this.getEntityResolver()
-      if (resolver != null)
-        parser setEntityResolver resolver
-
-      // save old level and base
-      val previousLevel = level
-      this.level = 0
-      if (bases contains source)
-        throw new SAXException(
-          "Circular XInclude Reference",
-          new CircularIncludeException("Circular XInclude Reference to " + source + getLocation())
-        )
-
-      bases push source
-      atRoot = true
-      parser parse source.toExternalForm()
-
-      // restore old level and base
-      this.level = previousLevel
-      bases.pop()
-    }
-    catch {
-      case e: IOException =>
-        throw new SAXException("Document not found: " + source.toExternalForm() + getLocation(), e)
-    }
-  }
-}
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
deleted file mode 100644
index 5064d6b..0000000
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ /dev/null
@@ -1,188 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package include.sax
-
-import scala.xml.include._
-import scala.collection.mutable
-import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
-import org.xml.sax.ext.LexicalHandler
-import java.io.{ File, OutputStream, OutputStreamWriter, Writer, IOException }
-
-/** XIncluder is a SAX `ContentHandler` that writes its XML document onto
- * an output stream after resolving all `xinclude:include` elements.
- *
- * Based on Eliotte Rusty Harold's SAXXIncluder.
- */
-class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler with LexicalHandler {
-
-  var out = new OutputStreamWriter(outs, encoding)
-
-  def setDocumentLocator(locator: Locator) {}
-
-  def startDocument() {
-    try {
-      out.write("<?xml version='1.0' encoding='"
-                + encoding + "'?>\r\n");
-    }
-    catch {
-      case e:IOException =>
-        throw new SAXException("Write failed", e)
-    }
-  }
-
-  def endDocument() {
-    try {
-      out.flush()
-    }
-    catch {
-      case e:IOException =>
-        throw new SAXException("Flush failed", e)
-    }
-  }
-
-  def startPrefixMapping(prefix: String , uri: String) {}
-
-  def endPrefixMapping(prefix: String) {}
-
-  def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = {
-    try {
-      out.write("<" + qualifiedName);
-      var i = 0; while (i < atts.getLength()) {
-        out.write(" ");
-        out.write(atts.getQName(i));
-        out.write("='");
-        val value = atts.getValue(i);
-        // @todo Need to use character references if the encoding
-        // can't support the character
-        out.write(scala.xml.Utility.escape(value))
-        out.write("'");
-        i += 1
-      }
-      out.write(">")
-    }
-    catch {
-      case e:IOException =>
-        throw new SAXException("Write failed", e)
-    }
-  }
-
-  def endElement(namespaceURI: String, localName:String, qualifiedName: String) {
-    try {
-      out.write("</" + qualifiedName + ">")
-    }
-    catch {
-      case e: IOException =>
-        throw new SAXException("Write failed", e)
-    }
-  }
-
-  // need to escape characters that are not in the given
-  // encoding using character references????
-  def characters(ch: Array[Char], start: Int, length: Int) {
-    try {
-      var  i = 0; while (i < length) {
-        val c = ch(start+i);
-        if (c == '&') out.write("&");
-        else if (c == '<') out.write("<");
-        // This next fix is normally not necessary.
-        // However, it is required if text contains ]]>
-        // (The end CDATA section delimiter)
-        else if (c == '>') out.write(">");
-        else out.write(c);
-        i += 1
-      }
-    }
-    catch {
-      case e: IOException =>
-        throw new SAXException("Write failed", e);
-    }
-  }
-
-  def  ignorableWhitespace(ch: Array[Char], start: Int , length: Int) {
-    this.characters(ch, start, length)
-  }
-
-  // do I need to escape text in PI????
-  def processingInstruction(target: String, data: String) {
-    try {
-      out.write("<?" + target + " " + data + "?>")
-    }
-    catch {
-      case e:IOException =>
-        throw new SAXException("Write failed", e)
-    }
-  }
-
-  def skippedEntity(name: String) {
-    try {
-      out.write("&" + name + ";")
-    }
-    catch {
-      case e:IOException =>
-        throw new SAXException("Write failed", e)
-    }
-  }
-
-  // LexicalHandler methods
-  private var inDTD: Boolean = false
-  private val entities = new mutable.Stack[String]()
-
-  def startDTD(name: String, publicID: String, systemID: String) {
-    inDTD = true
-    // if this is the source document, output a DOCTYPE declaration
-    if (entities.isEmpty) {
-      var id = ""
-      if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
-      else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
-      try {
-        out.write("<!DOCTYPE " + name + id + ">\r\n")
-      }
-      catch {
-        case e:IOException =>
-          throw new SAXException("Error while writing DOCTYPE", e)
-      }
-    }
-  }
-  def endDTD() {}
-
-  def startEntity(name: String) {
-    entities push name
-  }
-
-  def endEntity(name: String) {
-    entities.pop()
-  }
-
-  def startCDATA() {}
-  def endCDATA() {}
-
-  // Just need this reference so we can ask if a comment is
-  // inside an include element or not
-  private var filter: XIncludeFilter = null
-
-  def setFilter(filter: XIncludeFilter) {
-    this.filter = filter
-  }
-
-  def comment(ch: Array[Char], start: Int, length: Int) {
-    if (!inDTD && !filter.insideIncludeElement()) {
-      try {
-        out.write("<!--")
-        out.write(ch, start, length)
-        out.write("-->")
-      }
-      catch {
-        case e: IOException =>
-          throw new SAXException("Write failed", e)
-      }
-    }
-  }
-}
diff --git a/src/library/scala/xml/package.scala b/src/library/scala/xml/package.scala
deleted file mode 100644
index 4001cc5..0000000
--- a/src/library/scala/xml/package.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala
-
-package object xml {
-  val XercesClassName = "org.apache.xerces.parsers.SAXParser"
-
-  type SAXException       = org.xml.sax.SAXException
-  type SAXParseException  = org.xml.sax.SAXParseException
-  type EntityResolver     = org.xml.sax.EntityResolver
-  type InputSource        = org.xml.sax.InputSource
-  type SAXParser          = javax.xml.parsers.SAXParser
-}
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
deleted file mode 100755
index 6fda4da..0000000
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-/** Implementation of MarkupHandler that constructs nodes.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-abstract class ConstructingHandler extends MarkupHandler
-{
-  val preserveWS: Boolean
-
-  def elem(pos: Int, pre: String, label: String, attrs: MetaData,
-           pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
-    Elem(pre, label, attrs, pscope, empty, nodes:_*)
-
-  def procInstr(pos: Int, target: String, txt: String) =
-    ProcInstr(target, txt)
-
-  def comment(pos: Int, txt: String)  = Comment(txt)
-  def entityRef(pos: Int, n: String)  = EntityRef(n)
-  def text(pos: Int, txt: String)     = Text(txt)
-}
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
deleted file mode 100644
index 4044118..0000000
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-import java.io.File
-import scala.io.Source
-
-object ConstructingParser {
-  def fromFile(inp: File, preserveWS: Boolean) =
-    new ConstructingParser(Source.fromFile(inp), preserveWS).initialize
-
-  def fromSource(inp: Source, preserveWS: Boolean) =
-    new ConstructingParser(inp, preserveWS).initialize
-}
-
-/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
-  * Don't forget to call next.ch on a freshly instantiated parser in order to
-  * initialize it. If you get the parser from the object method, initialization
-  * is already done for you.
-  *
-  * {{{
-  * object parseFromURL {
-  *   def main(args: Array[String]) {
-  *     val url = args(0)
-  *     val src = scala.io.Source.fromURL(url)
-  *     val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false) // fromSource initializes automatically
-  *     val doc = cpa.document()
-  *
-  *     // let's see what it is
-  *     val ppr = new scala.xml.PrettyPrinter(80, 5)
-  *     val ele = doc.docElem
-  *     println("finished parsing")
-  *     val out = ppr.format(ele)
-  *     println(out)
-  *   }
-  * }
-  * }}} */
-class ConstructingParser(val input: Source, val preserveWS: Boolean)
-extends  ConstructingHandler
-with     ExternalSources
-with     MarkupParser  {
-
-  // default impl. of Logged
-  override def log(msg: String): Unit = {}
-}
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
deleted file mode 100755
index 0152e44..0000000
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-
-/** Default implementation of markup handler always returns `NodeSeq.Empty` */
-abstract class DefaultMarkupHandler extends MarkupHandler {
-
-  def elem(pos: Int, pre: String, label: String, attrs: MetaData,
-           scope:NamespaceBinding, empty: Boolean, args: NodeSeq) = NodeSeq.Empty
-
-  def procInstr(pos: Int, target: String, txt: String) = NodeSeq.Empty
-
-  def comment(pos: Int, comment: String ): NodeSeq = NodeSeq.Empty
-
-  def entityRef(pos: Int, n: String) = NodeSeq.Empty
-
-  def text(pos: Int, txt:String) = NodeSeq.Empty
-
-}
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
deleted file mode 100644
index aaac588..0000000
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-import java.net.URL
-import java.io.File.separator
-
-import scala.io.Source
-
-/**
- *  @author  Burak Emir
- *  @version 1.0
- */
-trait ExternalSources {
-  self: ExternalSources with MarkupParser with MarkupHandler =>
-
-  def externalSource(systemId: String): Source = {
-    if (systemId startsWith "http:")
-      return Source fromURL new URL(systemId)
-
-    val fileStr: String = input.descr match {
-      case x if x startsWith "file:"  => x drop 5
-      case x                          => x take ((x lastIndexOf separator) + 1)
-    }
-
-    Source.fromFile(fileStr + systemId)
-  }
-}
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
deleted file mode 100644
index 5f776f5..0000000
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ /dev/null
@@ -1,186 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
-import scala.collection.{ mutable, Iterator }
-import org.xml.sax.Attributes
-import org.xml.sax.helpers.DefaultHandler
-
-// can be mixed into FactoryAdapter if desired
-trait ConsoleErrorHandler extends DefaultHandler {
-  // ignore warning, crimson warns even for entity resolution!
-  override def warning(ex: SAXParseException): Unit = { }
-  override def error(ex: SAXParseException): Unit = printError("Error", ex)
-  override def fatalError(ex: SAXParseException): Unit = printError("Fatal Error", ex)
-
-  protected def printError(errtype: String, ex: SAXParseException): Unit =
-    Console.withOut(Console.err) {
-      val s = "[%s]:%d:%d: %s".format(
-        errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage)
-      Console.println(s)
-      Console.flush
-    }
-}
-
-/** SAX adapter class, for use with Java SAX parser. Keeps track of
- *  namespace bindings, without relying on namespace handling of the
- *  underlying SAX parser.
- */
-abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node] {
-  var rootElem: Node = null
-
-  val buffer      = new StringBuilder()
-  val attribStack = new mutable.Stack[MetaData]
-  val hStack      = new mutable.Stack[Node]   // [ element ] contains siblings
-  val tagStack    = new mutable.Stack[String]
-  var scopeStack  = new mutable.Stack[NamespaceBinding]
-
-  var curTag : String = null
-  var capture: Boolean = false
-
-  // abstract methods
-
-  /** Tests if an XML element contains text.
-   * @return true if element named `localName` contains text.
-   */
-  def nodeContainsText(localName: String): Boolean // abstract
-
-  /** creates an new non-text(tree) node.
-   * @param elemName
-   * @param attribs
-   * @param chIter
-   * @return a new XML element.
-   */
-  def createNode(pre: String, elemName: String, attribs: MetaData,
-                 scope: NamespaceBinding, chIter: List[Node]): Node // abstract
-
-  /** creates a Text node.
-   * @param text
-   * @return a new Text node.
-   */
-  def createText(text: String): Text // abstract
-
-  /** creates a new processing instruction node.
-  */
-  def createProcInstr(target: String, data: String): Seq[ProcInstr]
-
-  //
-  // ContentHandler methods
-  //
-
-  val normalizeWhitespace = false
-
-  /** Characters.
-  * @param ch
-  * @param offset
-  * @param length
-  */
-  override def characters(ch: Array[Char], offset: Int, length: Int): Unit = {
-    if (!capture) return
-    // compliant: report every character
-    else if (!normalizeWhitespace) buffer.appendAll(ch, offset, length)
-    // normalizing whitespace is not compliant, but useful
-    else {
-      var it = ch.slice(offset, offset + length).iterator
-      while (it.hasNext) {
-        val c = it.next
-        val isSpace = c.isWhitespace
-        buffer append (if (isSpace) ' ' else c)
-        if (isSpace)
-          it = it dropWhile (_.isWhitespace)
-      }
-    }
-  }
-
-  private def splitName(s: String) = {
-    val idx = s indexOf ':'
-    if (idx < 0) (null, s)
-    else (s take idx, s drop (idx + 1))
-  }
-
-  /* ContentHandler methods */
-
-  /* Start element. */
-  override def startElement(
-    uri: String,
-    _localName: String,
-    qname: String,
-    attributes: Attributes): Unit =
-  {
-    captureText()
-    tagStack push curTag
-    curTag = qname
-
-    val localName = splitName(qname)._2
-    capture = nodeContainsText(localName)
-
-    hStack push null
-    var m: MetaData = Null
-    var scpe: NamespaceBinding =
-      if (scopeStack.isEmpty) TopScope
-      else scopeStack.top
-
-    for (i <- 0 until attributes.getLength()) {
-      val qname = attributes getQName i
-      val value = attributes getValue i
-      val (pre, key) = splitName(qname)
-      def nullIfEmpty(s: String) = if (s == "") null else s
-
-      if (pre == "xmlns" || (pre == null && qname == "xmlns")) {
-        val arg = if (pre == null) null else key
-        scpe = new NamespaceBinding(arg, nullIfEmpty(value), scpe)
-      }
-      else
-        m = Attribute(Option(pre), key, Text(value), m)
-    }
-
-    scopeStack push scpe
-    attribStack push m
-  }
-
-
-  /** captures text, possibly normalizing whitespace
-   */
-  def captureText(): Unit = {
-    if (capture && buffer.length > 0)
-      hStack push createText(buffer.toString)
-
-    buffer.clear()
-  }
-
-  /** End element.
-   * @param uri
-   * @param _localName
-   * @param qname
-   * @throws org.xml.sax.SAXException if ..
-   */
-  override def endElement(uri: String , _localName: String, qname: String): Unit = {
-    captureText()
-    val metaData = attribStack.pop
-
-    // reverse order to get it right
-    val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse
-    val (pre, localName) = splitName(qname)
-    val scp = scopeStack.pop
-
-    // create element
-    rootElem = createNode(pre, localName, metaData, scp, v)
-    hStack push rootElem
-    curTag = tagStack.pop
-    capture = curTag != null && nodeContainsText(curTag) // root level
-  }
-
-  /** Processing instruction.
-  */
-  override def processingInstruction(target: String, data: String) {
-    hStack pushAll createProcInstr(target, data)
-  }
-}
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
deleted file mode 100644
index a8b4f8f..0000000
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-/** !!! This is poorly named, but I guess it's in the API.
- */
-case class FatalError(msg: String) extends java.lang.RuntimeException(msg)
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
deleted file mode 100755
index 7028161..0000000
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-import scala.collection.mutable
-import scala.io.Source
-import scala.util.logging.Logged
-import scala.xml.dtd._
-
-/** class that handles markup - provides callback methods to MarkupParser.
- *  the default is nonvalidating behaviour
- *
- *  @author  Burak Emir
- *  @version 1.0
- *
- *  @todo can we ignore more entity declarations (i.e. those with extIDs)?
- *  @todo expanding entity references
- */
-abstract class MarkupHandler extends Logged
-{
-  /** returns true is this markup handler is validating */
-  val isValidating: Boolean = false
-
-  var decls: List[Decl] = Nil
-  var ent: mutable.Map[String, EntityDecl] = new mutable.HashMap[String, EntityDecl]()
-
-  def lookupElemDecl(Label: String): ElemDecl = {
-    for (z @ ElemDecl(Label, _) <- decls)
-      return z
-
-    null
-  }
-
-  def replacementText(entityName: String): Source =
-    Source fromString ((ent get entityName) match {
-      case Some(ParsedEntityDecl(_, IntDef(value)))     => value
-      case Some(ParameterEntityDecl(_, IntDef(value)))  => " %s " format value
-      case Some(_)                                      => "<!-- %s; -->" format entityName
-      case None                                         => "<!-- unknown entity %s; -->" format entityName
-    })
-
-  def endDTD(n: String): Unit = ()
-
-  /** callback method invoked by MarkupParser after start-tag of element.
-   *
-   *  @param pos      the position in the sourcefile
-   *  @param pre      the prefix
-   *  @param label    the local name
-   *  @param attrs    the attributes (metadata)
-   */
-  def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding): Unit = ()
-
-  /** callback method invoked by MarkupParser after end-tag of element.
-   *
-   *  @param pos      the position in the source file
-   *  @param pre      the prefix
-   *  @param label    the local name
-   */
-  def elemEnd(pos: Int, pre: String, label: String): Unit = ()
-
-  /** callback method invoked by MarkupParser after parsing an element,
-   *  between the elemStart and elemEnd callbacks
-   *
-   *  @param pos      the position in the source file
-   *  @param pre      the prefix
-   *  @param label    the local name
-   *  @param attrs    the attributes (metadata)
-   *  @param empty    `true` if the element was previously empty; `false` otherwise.
-   *  @param args     the children of this element
-   */
-  def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, empty: Boolean, args: NodeSeq): NodeSeq
-
-  /** callback method invoked by MarkupParser after parsing PI.
-   */
-  def procInstr(pos: Int, target: String, txt: String): NodeSeq
-
-  /** callback method invoked by MarkupParser after parsing comment.
-   */
-  def comment(pos: Int, comment: String): NodeSeq
-
-  /** callback method invoked by MarkupParser after parsing entity ref.
-   *  @todo expanding entity references
-   */
-  def entityRef(pos: Int, n: String): NodeSeq
-
-  /** callback method invoked by MarkupParser after parsing text.
-   */
-  def text(pos: Int, txt: String): NodeSeq
-
-  // DTD handler methods
-
-  def elemDecl(n: String, cmstr: String): Unit = ()
-
-  def attListDecl(name: String, attList: List[AttrDecl]): Unit = ()
-
-  private def someEntityDecl(name: String, edef: EntityDef, f: (String, EntityDef) => EntityDecl): Unit =
-    edef match {
-      case _: ExtDef if !isValidating =>  // ignore (cf REC-xml 4.4.1)
-      case _  =>
-        val y = f(name, edef)
-        decls ::= y
-        ent.update(name, y)
-    }
-
-  def parameterEntityDecl(name: String, edef: EntityDef): Unit =
-    someEntityDecl(name, edef, ParameterEntityDecl.apply _)
-
-  def parsedEntityDecl(name: String, edef: EntityDef): Unit =
-    someEntityDecl(name, edef, ParsedEntityDecl.apply _)
-
-  def peReference(name: String) { decls ::= PEReference(name) }
-  def unparsedEntityDecl(name: String, extID: ExternalID, notat: String): Unit = ()
-  def notationDecl(notat: String, extID: ExternalID): Unit = ()
-  def reportSyntaxError(pos: Int, str: String): Unit
-}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
deleted file mode 100755
index f9ff54d..0000000
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ /dev/null
@@ -1,941 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-import scala.io.Source
-import scala.xml.dtd._
-import Utility.Escapes.{ pairs => unescape }
-
-/**
- * An XML parser.
- *
- * Parses XML 1.0, invokes callback methods of a `MarkupHandler` and returns
- * whatever the markup handler returns. Use `ConstructingParser` if you just
- * want to parse XML to construct instances of `scala.xml.Node`.
- *
- * While XML elements are returned, DTD declarations - if handled - are
- * collected using side-effects.
- *
- * @author  Burak Emir
- * @version 1.0
- */
-trait MarkupParser extends MarkupParserCommon with TokenTests
-{
-  self: MarkupParser with MarkupHandler =>
-
-  type PositionType = Int
-  type InputType    = Source
-  type ElementType  = NodeSeq
-  type AttributesType = (MetaData, NamespaceBinding)
-  type NamespaceType = NamespaceBinding
-
-  def truncatedError(msg: String): Nothing = throw FatalError(msg)
-  def errorNoEnd(tag: String) = throw FatalError("expected closing tag of " + tag)
-
-  def xHandleError(that: Char, msg: String) = reportSyntaxError(msg)
-
-  val input: Source
-
-  /** if true, does not remove surplus whitespace */
-  val preserveWS: Boolean
-
-  def externalSource(systemLiteral: String): Source
-
-  //
-  // variables, values
-  //
-
-  protected var curInput: Source = input
-
-  // See ticket #3720 for motivations.
-  private class WithLookAhead(underlying: Source) extends Source {
-    private val queue = scala.collection.mutable.Queue[Char]()
-    def lookahead(): BufferedIterator[Char] = {
-      val iter = queue.iterator ++ new Iterator[Char] {
-        def hasNext = underlying.hasNext
-        def next() = { val x = underlying.next(); queue += x; x }
-      }
-      iter.buffered
-    }
-    val iter = new Iterator[Char] {
-      def hasNext = underlying.hasNext || !queue.isEmpty
-      def next() = if (!queue.isEmpty) queue.dequeue() else underlying.next()
-    }
-  }
-
-  def lookahead(): BufferedIterator[Char] = curInput match {
-    case curInputWLA:WithLookAhead =>
-      curInputWLA.lookahead()
-    case _ =>
-      val newInput = new WithLookAhead(curInput)
-      curInput = newInput
-      newInput.lookahead()
-  }
-
-
-  /** the handler of the markup, returns this */
-  private val handle: MarkupHandler = this
-
-  /** stack of inputs */
-  var inpStack: List[Source] = Nil
-
-  /** holds the position in the source file */
-  var pos: Int = _
-
-  /* used when reading external subset */
-  var extIndex = -1
-
-  /** holds temporary values of pos */
-  var tmppos: Int = _
-
-  /** holds the next character */
-  var nextChNeeded: Boolean = false
-  var reachedEof: Boolean = false
-  var lastChRead: Char = _
-  def ch: Char = {
-    if (nextChNeeded) {
-      if (curInput.hasNext) {
-        lastChRead = curInput.next
-        pos = curInput.pos
-      } else {
-        val ilen = inpStack.length;
-        //Console.println("  ilen = "+ilen+ " extIndex = "+extIndex);
-        if ((ilen != extIndex) && (ilen > 0)) {
-          /** for external source, inpStack == Nil ! need notify of eof! */
-          pop()
-        } else {
-          reachedEof = true
-          lastChRead = 0.asInstanceOf[Char]
-        }
-      }
-      nextChNeeded = false
-    }
-    lastChRead
-  }
-
-  /** character buffer, for names */
-  protected val cbuf = new StringBuilder()
-
-  var dtd: DTD = null
-
-  protected var doc: Document = null
-
-  def eof: Boolean = { ch; reachedEof }
-
-  //
-  // methods
-  //
-
-  /** {{{
-   *  <? prolog ::= xml S ... ?>
-   *  }}} */
-  def xmlProcInstr(): MetaData = {
-    xToken("xml")
-    xSpace
-    val (md,scp) = xAttributes(TopScope)
-    if (scp != TopScope)
-      reportSyntaxError("no xmlns definitions here, please.");
-    xToken('?')
-    xToken('>')
-    md
-  }
-
-  /** Factored out common code.
-   */
-  private def prologOrTextDecl(isProlog: Boolean): (Option[String], Option[String], Option[Boolean]) = {
-    var info_ver: Option[String] = None
-    var info_enc: Option[String] = None
-    var info_stdl: Option[Boolean] = None
-
-    var m = xmlProcInstr()
-    var n = 0
-
-    if (isProlog)
-      xSpaceOpt
-
-    m("version") match {
-      case null =>
-      case Text("1.0") => info_ver = Some("1.0"); n += 1
-      case _     => reportSyntaxError("cannot deal with versions != 1.0")
-    }
-
-    m("encoding") match {
-      case null =>
-      case Text(enc) =>
-        if (!isValidIANAEncoding(enc))
-          reportSyntaxError("\"" + enc + "\" is not a valid encoding")
-        else {
-          info_enc = Some(enc)
-          n += 1
-        }
-    }
-
-    if (isProlog) {
-      m("standalone") match {
-        case null =>
-        case Text("yes") => info_stdl = Some(true);  n += 1
-        case Text("no")  => info_stdl = Some(false); n += 1
-        case _     => reportSyntaxError("either 'yes' or 'no' expected")
-      }
-    }
-
-    if (m.length - n != 0) {
-      val s = if (isProlog) "SDDecl? " else ""
-      reportSyntaxError("VersionInfo EncodingDecl? %sor '?>' expected!" format s)
-    }
-
-    (info_ver, info_enc, info_stdl)
-  }
-
-  /** {{{
-   *  <? prolog ::= xml S?
-   *  // this is a bit more lenient than necessary...
-   *  }}} */
-  def prolog(): (Option[String], Option[String], Option[Boolean]) =
-    prologOrTextDecl(true)
-
-  /** prolog, but without standalone */
-  def textDecl(): (Option[String], Option[String]) =
-    prologOrTextDecl(false) match { case (x1, x2, _)  => (x1, x2) }
-
-  /** {{{
-   *  [22]     prolog      ::= XMLDecl? Misc* (doctypedecl Misc*)?
-   *  [23]     XMLDecl     ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
-   *  [24]     VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
-   *  [25]     Eq          ::= S? '=' S?
-   *  [26]     VersionNum  ::= '1.0'
-   *  [27]     Misc        ::= Comment | PI | S
-   * }}} */
-  def document(): Document = {
-    doc = new Document()
-
-    this.dtd = null
-    var info_prolog: (Option[String], Option[String], Option[Boolean]) = (None, None, None)
-    if ('<' != ch) {
-      reportSyntaxError("< expected")
-      return null
-    }
-
-    nextch // is prolog ?
-    var children: NodeSeq = null
-    if ('?' == ch) {
-      nextch
-      info_prolog = prolog()
-      doc.version    = info_prolog._1
-      doc.encoding   = info_prolog._2
-      doc.standAlone = info_prolog._3
-
-      children = content(TopScope) // DTD handled as side effect
-    }
-    else {
-      val ts = new NodeBuffer()
-      content1(TopScope, ts)  // DTD handled as side effect
-      ts &+ content(TopScope)
-      children = NodeSeq.fromSeq(ts)
-    }
-    //println("[MarkupParser::document] children now: "+children.toList)
-    var elemCount = 0
-    var theNode: Node = null
-    for (c <- children) c match {
-      case _:ProcInstr =>
-      case _:Comment =>
-      case _:EntityRef => // todo: fix entities, shouldn't be "special"
-        reportSyntaxError("no entity references allowed here");
-      case s:SpecialNode =>
-        if (s.toString.trim().length > 0) //non-empty text nodes not allowed
-          elemCount += 2
-      case m:Node =>
-        elemCount += 1
-        theNode = m
-    }
-    if (1 != elemCount) {
-      reportSyntaxError("document must contain exactly one element")
-      Console.println(children.toList)
-    }
-
-    doc.children = children
-    doc.docElem = theNode
-    doc
-  }
-
-  /** append Unicode character to name buffer*/
-  protected def putChar(c: Char) = cbuf append c
-
-  /** As the current code requires you to call nextch once manually
-   *  after construction, this method formalizes that suboptimal reality.
-   */
-  def initialize: this.type = {
-    nextch
-    this
-  }
-
-  protected def ch_returning_nextch: Char = { val res = ch; nextch(); res }
-
-  def mkAttributes(name: String, pscope: NamespaceBinding): AttributesType =
-    if (isNameStart (ch)) xAttributes(pscope)
-    else (Null, pscope)
-
-  def mkProcInstr(position: Int, name: String, text: String): ElementType =
-    handle.procInstr(position, name, text)
-
-  /** this method tells ch to get the next character when next called */
-  def nextch() {
-    // Read current ch if needed
-    ch
-
-    // Mark next ch to be required
-    nextChNeeded = true
-  }
-
-  /** parse attribute and create namespace scope, metadata
-   *  {{{
-   *  [41] Attributes    ::= { S Name Eq AttValue }
-   *  }}}
-   */
-  def xAttributes(pscope: NamespaceBinding): (MetaData, NamespaceBinding) = {
-    var scope: NamespaceBinding = pscope
-    var aMap: MetaData = Null
-    while (isNameStart(ch)) {
-      val pos = this.pos
-
-      val qname = xName
-      val _     = xEQ
-      val value = xAttributeValue()
-
-      Utility.prefix(qname) match {
-        case Some("xmlns") =>
-          val prefix = qname.substring(6 /*xmlns:*/ , qname.length)
-          scope = new NamespaceBinding(prefix, value, scope)
-
-        case Some(prefix) =>
-          val key = qname.substring(prefix.length+1, qname.length)
-          aMap = new PrefixedAttribute(prefix, key, Text(value), aMap)
-
-        case _ =>
-          if( qname == "xmlns" )
-            scope = new NamespaceBinding(null, value, scope)
-          else
-            aMap = new UnprefixedAttribute(qname, Text(value), aMap)
-      }
-
-      if ((ch != '/') && (ch != '>') && ('?' != ch))
-        xSpace
-    }
-
-    if(!aMap.wellformed(scope))
-        reportSyntaxError( "double attribute");
-
-    (aMap,scope)
-  }
-
-  /** entity value, terminated by either ' or ". value may not contain <.
-   *  {{{
-   *       AttValue     ::= `'` { _  } `'`
-   *                      | `"` { _ } `"`
-   *  }}}
-   */
-  def xEntityValue(): String = {
-    val endch = ch
-    nextch
-    while (ch != endch && !eof) {
-      putChar(ch)
-      nextch
-    }
-    nextch
-    val str = cbuf.toString()
-    cbuf.length = 0
-    str
-  }
-
-  /** {{{
-   *  '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
-   *
-   *  see [15]
-   *  }}} */
-  def xCharData: NodeSeq = {
-    xToken("[CDATA[")
-    def mkResult(pos: Int, s: String): NodeSeq = {
-      handle.text(pos, s)
-      PCData(s)
-    }
-    xTakeUntil(mkResult, () => pos, "]]>")
-  }
-
-  /** {{{
-   *  Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
-   *
-   * see [15]
-   *  }}} */
-  def xComment: NodeSeq = {
-    val sb: StringBuilder = new StringBuilder()
-    xToken("--")
-    while (true) {
-      if (ch == '-'  && { sb.append(ch); nextch; ch == '-' }) {
-        sb.length = sb.length - 1
-        nextch
-        xToken('>')
-        return handle.comment(pos, sb.toString())
-      } else sb.append(ch)
-      nextch
-    }
-    throw FatalError("this cannot happen")
-  }
-
-  /* todo: move this into the NodeBuilder class */
-  def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
-    if (preserveWS)
-      ts &+ handle.text(pos, txt);
-    else
-      for (t <- TextBuffer.fromString(txt).toText) {
-        ts &+ handle.text(pos, t.text);
-      }
-  }
-
-  /** {{{
-   *  '<' content1 ::=  ...
-   *  }}} */
-  def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
-    ch match {
-      case '!' =>
-        nextch
-      if ('[' == ch)                 // CDATA
-        ts &+ xCharData
-      else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK
-        parseDTD()
-      else // comment
-        ts &+ xComment
-      case '?' =>                    // PI
-        nextch
-        ts &+ xProcInstr
-      case _   =>
-        ts &+ element1(pscope)      // child
-    }
-  }
-
-  /** {{{
-   *  content1 ::=  '<' content1 | '&' charref ...
-   *  }}} */
-  def content(pscope: NamespaceBinding): NodeSeq = {
-    var ts = new NodeBuffer
-    var exit = eof
-    // todo: optimize seq repr.
-    def done = new NodeSeq { val theSeq = ts.toList }
-
-    while (!exit) {
-      tmppos = pos
-      exit = eof
-
-      if (eof)
-        return done
-
-      ch match {
-        case '<' => // another tag
-          nextch; ch match {
-            case '/'    => exit = true  // end tag
-            case _      => content1(pscope, ts)
-          }
-
-        // postcond: xEmbeddedBlock == false!
-        case '&' => // EntityRef or CharRef
-          nextch; ch match {
-            case '#'  =>  // CharacterRef
-              nextch
-              val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
-              xToken(';');
-              ts &+ theChar
-            case _ =>     // EntityRef
-              val n = xName
-              xToken(';')
-
-              if (unescape contains n) {
-                handle.entityRef(tmppos, n)
-                ts &+ unescape(n)
-              } else push(n)
-          }
-        case _ => // text content
-          appendText(tmppos, ts, xText)
-      }
-    }
-    done
-  } // content(NamespaceBinding)
-
-  /** {{{
-   *  externalID ::= SYSTEM S syslit
-   *                 PUBLIC S pubid S syslit
-   *  }}} */
-  def externalID(): ExternalID = ch match {
-    case 'S' =>
-      nextch
-      xToken("YSTEM")
-      xSpace
-      val sysID = systemLiteral()
-      new SystemID(sysID)
-    case 'P' =>
-      nextch; xToken("UBLIC")
-      xSpace
-      val pubID = pubidLiteral()
-      xSpace
-      val sysID = systemLiteral()
-      new PublicID(pubID, sysID)
-  }
-
-
-  /** parses document type declaration and assigns it to instance variable
-   *  dtd.
-   *  {{{
-   *  <! parseDTD ::= DOCTYPE name ... >
-   *  }}} */
-  def parseDTD() { // dirty but fast
-    var extID: ExternalID = null
-    if (this.dtd ne null)
-      reportSyntaxError("unexpected character (DOCTYPE already defined")
-    xToken("DOCTYPE")
-    xSpace
-    val n = xName
-    xSpace
-    //external ID
-    if ('S' == ch || 'P' == ch) {
-      extID = externalID()
-      xSpaceOpt
-    }
-
-    /* parse external subset of DTD
-     */
-
-    if ((null != extID) && isValidating) {
-
-      pushExternal(extID.systemId)
-      extIndex = inpStack.length
-
-      extSubset()
-      pop()
-      extIndex = -1
-    }
-
-    if ('[' == ch) { // internal subset
-      nextch
-      /* TODO */
-      intSubset()
-      // TODO: do the DTD parsing?? ?!?!?!?!!
-      xToken(']')
-      xSpaceOpt
-    }
-    xToken('>')
-    this.dtd = new DTD {
-      /*override var*/ externalID = extID
-      /*override val */decls      = handle.decls.reverse
-    }
-    //this.dtd.initializeEntities();
-    if (doc ne null)
-      doc.dtd = this.dtd
-
-    handle.endDTD(n)
-  }
-
-  def element(pscope: NamespaceBinding): NodeSeq = {
-    xToken('<')
-    element1(pscope)
-  }
-
-  /** {{{
-   *  '<' element ::= xmlTag1 '>'  { xmlExpr | '{' simpleExpr '}' } ETag
-   *               | xmlTag1 '/' '>'
-   *  }}} */
-  def element1(pscope: NamespaceBinding): NodeSeq = {
-    val pos = this.pos
-    val (qname, (aMap, scope)) = xTag(pscope)
-    val (pre, local) = Utility.prefix(qname) match {
-      case Some(p) => (p, qname drop p.length+1)
-      case _       => (null, qname)
-    }
-    val ts = {
-      if (ch == '/') {  // empty element
-        xToken("/>")
-        handle.elemStart(pos, pre, local, aMap, scope)
-        NodeSeq.Empty
-      }
-      else {           // element with content
-        xToken('>')
-        handle.elemStart(pos, pre, local, aMap, scope)
-        val tmp = content(scope)
-        xEndTag(qname)
-        tmp
-      }
-    }
-    val res = handle.elem(pos, pre, local, aMap, scope, ts == NodeSeq.Empty, ts)
-    handle.elemEnd(pos, pre, local)
-    res
-  }
-
-  /** Parse character data.
-   *
-   *  precondition: `xEmbeddedBlock == false` (we are not in a scala block)
-   */
-  private def xText: String = {
-    var exit = false
-    while (! exit) {
-      putChar(ch)
-      val opos = pos
-      nextch
-
-      exit = eof || ( ch == '<' ) || ( ch == '&' )
-    }
-    val str = cbuf.toString
-    cbuf.length = 0
-    str
-  }
-
-  /** attribute value, terminated by either ' or ". value may not contain <.
-   *  {{{
-   *       AttValue     ::= `'` { _ } `'`
-   *                      | `"` { _ } `"`
-   *  }}} */
-  def systemLiteral(): String = {
-    val endch = ch
-    if (ch != '\'' && ch != '"')
-      reportSyntaxError("quote ' or \" expected");
-    nextch
-    while (ch != endch && !eof) {
-      putChar(ch)
-      nextch
-    }
-    nextch
-    val str = cbuf.toString()
-    cbuf.length = 0
-    str
-  }
-
-  /** {{{
-   *  [12]       PubidLiteral ::=        '"' PubidChar* '"' | "'" (PubidChar - "'")* "'"
-   *  }}} */
-  def pubidLiteral(): String = {
-    val endch = ch
-    if (ch!='\'' && ch != '"')
-      reportSyntaxError("quote ' or \" expected");
-    nextch
-    while (ch != endch && !eof) {
-      putChar(ch)
-      //println("hello '"+ch+"'"+isPubIDChar(ch))
-      if (!isPubIDChar(ch))
-        reportSyntaxError("char '"+ch+"' is not allowed in public id")
-      nextch
-    }
-    nextch
-    val str = cbuf.toString
-    cbuf.length = 0
-    str
-  }
-
-  //
-  //  dtd parsing
-  //
-
-  def extSubset(): Unit = {
-    var textdecl: (Option[String],Option[String]) = null
-    if (ch == '<') {
-      nextch
-      if (ch == '?') {
-        nextch
-        textdecl = textDecl()
-      } else
-        markupDecl1()
-    }
-    while (!eof)
-      markupDecl()
-  }
-
-  def markupDecl1() = {
-    def doInclude() = {
-      xToken('['); while(']' != ch) markupDecl(); nextch // ']'
-    }
-    def doIgnore() = {
-      xToken('['); while(']' != ch) nextch; nextch // ']'
-    }
-    if ('?' == ch) {
-      nextch
-      xProcInstr // simply ignore processing instructions!
-    } else {
-      xToken('!')
-      ch match {
-        case '-' =>
-          xComment // ignore comments
-
-        case 'E' =>
-          nextch
-          if ('L' == ch) {
-            nextch
-            elementDecl()
-          } else
-            entityDecl()
-
-        case 'A' =>
-          nextch
-          attrDecl()
-
-        case 'N' =>
-          nextch
-          notationDecl()
-
-        case '[' if inpStack.length >= extIndex =>
-          nextch
-          xSpaceOpt
-          ch match {
-            case '%' =>
-              nextch
-              val ent = xName
-              xToken(';')
-              xSpaceOpt
-
-              push(ent)
-              xSpaceOpt
-              val stmt = xName
-              xSpaceOpt
-
-              stmt match {
-                // parameter entity
-                case "INCLUDE" => doInclude()
-                case "IGNORE"  => doIgnore()
-              }
-            case 'I' =>
-              nextch
-              ch match {
-                case 'G' =>
-                  nextch
-                  xToken("NORE")
-                  xSpaceOpt
-                  doIgnore()
-                case 'N' =>
-                  nextch
-                  xToken("NCLUDE")
-                  doInclude()
-              }
-          }
-        xToken(']')
-        xToken('>')
-
-        case _  =>
-          curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl")
-        while (ch!='>')
-          nextch
-      }
-    }
-  }
-
-  def markupDecl(): Unit = ch match {
-    case '%' =>                  // parameter entity reference
-      nextch
-      val ent = xName
-      xToken(';')
-      if (!isValidating)
-        handle.peReference(ent)  //  n-v: just create PE-reference
-      else
-        push(ent)                //    v: parse replacementText
-
-    //peReference
-    case '<' =>
-      nextch
-      markupDecl1()
-    case _ if isSpace(ch) =>
-      xSpace
-    case _ =>
-      reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt)
-      nextch
-  }
-
-  /**  "rec-xml/#ExtSubset" pe references may not occur within markup declarations
-   */
-  def intSubset() {
-    //Console.println("(DEBUG) intSubset()")
-    xSpace
-    while (']' != ch)
-      markupDecl()
-  }
-
-  /** <! element := ELEMENT
-   */
-  def elementDecl() {
-    xToken("EMENT")
-    xSpace
-    val n = xName
-    xSpace
-    while ('>' != ch) {
-      //Console.println("["+ch+"]")
-      putChar(ch)
-      nextch
-    }
-    //Console.println("END["+ch+"]")
-    nextch
-    val cmstr = cbuf.toString()
-    cbuf.length = 0
-    handle.elemDecl(n, cmstr)
-  }
-
-  /** {{{
-   *  <! attlist := ATTLIST
-   *  }}} */
-  def attrDecl() = {
-    xToken("TTLIST")
-    xSpace
-    val n = xName
-    xSpace
-    var attList: List[AttrDecl] = Nil
-
-    // later: find the elemDecl for n
-    while ('>' != ch) {
-      val aname = xName
-      xSpace
-      // could be enumeration (foo,bar) parse this later :-/
-      while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
-        if (!isSpace(ch))
-          cbuf.append(ch)
-        nextch
-      }
-      val atpe = cbuf.toString
-      cbuf.length = 0
-
-      val defdecl: DefaultDecl = ch match {
-        case '\'' | '"' =>
-          DEFAULT(false, xAttributeValue())
-
-        case '#' =>
-          nextch
-          xName match {
-            case "FIXED"    => xSpace ; DEFAULT(true, xAttributeValue())
-            case "IMPLIED"  => IMPLIED
-            case "REQUIRED" => REQUIRED
-          }
-        case _ =>
-          null
-      }
-      xSpaceOpt
-
-      attList ::= AttrDecl(aname, atpe, defdecl)
-      cbuf.length = 0
-    }
-    nextch
-    handle.attListDecl(n, attList.reverse)
-  }
-
-  /** {{{
-   *  <! element := ELEMENT
-   *  }}} */
-  def entityDecl() = {
-    var isParameterEntity = false
-    var entdef: EntityDef = null
-    xToken("NTITY")
-    xSpace
-    if ('%' == ch) {
-      nextch
-      isParameterEntity = true
-      xSpace
-    }
-    val n = xName
-    xSpace
-    ch match {
-      case 'S' | 'P' => //sy
-        val extID = externalID()
-        if (isParameterEntity) {
-          xSpaceOpt
-          xToken('>')
-          handle.parameterEntityDecl(n, ExtDef(extID))
-        } else { // notation?
-          xSpace
-          if ('>' != ch) {
-            xToken("NDATA")
-            xSpace
-            val notat = xName
-            xSpaceOpt
-            xToken('>')
-            handle.unparsedEntityDecl(n, extID, notat)
-          } else {
-            nextch
-            handle.parsedEntityDecl(n, ExtDef(extID))
-          }
-        }
-
-      case '"' | '\'' =>
-        val av = xEntityValue()
-        xSpaceOpt
-        xToken('>')
-        if (isParameterEntity)
-          handle.parameterEntityDecl(n, IntDef(av))
-        else
-          handle.parsedEntityDecl(n, IntDef(av))
-    }
-    {}
-  } // entityDecl
-
-  /** {{{
-   *  'N' notationDecl ::= "OTATION"
-   *  }}} */
-  def notationDecl() {
-    xToken("OTATION")
-    xSpace
-    val notat = xName
-    xSpace
-    val extID = if (ch == 'S') {
-      externalID()
-    }
-    else if (ch == 'P') {
-      /** PublicID (without system, only used in NOTATION) */
-      nextch
-      xToken("UBLIC")
-      xSpace
-      val pubID = pubidLiteral()
-      xSpaceOpt
-      val sysID = if (ch != '>')
-        systemLiteral()
-      else
-        null;
-      new PublicID(pubID, sysID)
-    } else {
-      reportSyntaxError("PUBLIC or SYSTEM expected");
-      scala.sys.error("died parsing notationdecl")
-    }
-    xSpaceOpt
-    xToken('>')
-    handle.notationDecl(notat, extID)
-  }
-
-  def reportSyntaxError(pos: Int, str: String) { curInput.reportError(pos, str) }
-  def reportSyntaxError(str: String) { reportSyntaxError(pos, str) }
-  def reportValidationError(pos: Int, str: String) { reportSyntaxError(pos, str) }
-
-  def push(entityName: String) {
-    if (!eof)
-      inpStack = curInput :: inpStack
-
-    // can't push before getting next character if needed
-    ch
-
-    curInput = replacementText(entityName)
-    nextch
-  }
-
-  def pushExternal(systemId: String) {
-    if (!eof)
-      inpStack = curInput :: inpStack
-
-    // can't push before getting next character if needed
-    ch
-
-    curInput = externalSource(systemId)
-    nextch
-  }
-
-  def pop() {
-    curInput = inpStack.head
-    inpStack = inpStack.tail
-    lastChRead = curInput.ch
-    nextChNeeded = false
-    pos = curInput.pos
-    reachedEof = false // must be false, because of places where entity refs occur
-  }
-}
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
deleted file mode 100644
index da64048..0000000
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ /dev/null
@@ -1,260 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-import scala.io.Source
-import scala.xml.dtd._
-import scala.annotation.switch
-import Utility.Escapes.{ pairs => unescape }
-
-import Utility.SU
-
-/** This is not a public trait - it contains common code shared
- *  between the library level XML parser and the compiler's.
- *  All members should be accessed through those.
- */
-private[scala] trait MarkupParserCommon extends TokenTests {
-  protected def unreachable = scala.sys.error("Cannot be reached.")
-
-  // type HandleType       // MarkupHandler, SymbolicXMLBuilder
-  type InputType        // Source, CharArrayReader
-  type PositionType     // Int, Position
-  type ElementType      // NodeSeq, Tree
-  type NamespaceType    // NamespaceBinding, Any
-  type AttributesType   // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
-
-  def mkAttributes(name: String, pscope: NamespaceType): AttributesType
-  def mkProcInstr(position: PositionType, name: String, text: String): ElementType
-
-  /** parse a start or empty tag.
-   *  [40] STag         ::= '<' Name { S Attribute } [S]
-   *  [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
-   */
-  protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
-    val name = xName
-    xSpaceOpt
-
-    (name, mkAttributes(name, pscope))
-  }
-
-  /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
-   *
-   * see [15]
-   */
-  def xProcInstr: ElementType = {
-    val n = xName
-    xSpaceOpt
-    xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
-  }
-
-  /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
-   @param endCh either `'` or `"`
-   */
-  def xAttributeValue(endCh: Char): String = {
-    val buf = new StringBuilder
-    while (ch != endCh) {
-      // well-formedness constraint
-      if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
-      else if (ch == SU) truncatedError("")
-      else buf append ch_returning_nextch
-    }
-    ch_returning_nextch
-    // @todo: normalize attribute value
-    buf.toString
-  }
-
-  def xAttributeValue(): String = {
-    val str = xAttributeValue(ch_returning_nextch)
-    // well-formedness constraint
-    normalizeAttributeValue(str)
-  }
-
-  private def takeUntilChar(it: Iterator[Char], end: Char): String = {
-    val buf = new StringBuilder
-    while (it.hasNext) it.next match {
-      case `end`  => return buf.toString
-      case ch     => buf append ch
-    }
-    scala.sys.error("Expected '%s'".format(end))
-  }
-
-  /** [42]  '<' xmlEndTag ::=  '<' '/' Name S? '>'
-   */
-  def xEndTag(startName: String) {
-    xToken('/')
-    if (xName != startName)
-      errorNoEnd(startName)
-
-    xSpaceOpt
-    xToken('>')
-  }
-
-  /** actually, Name ::= (Letter | '_' | ':') (NameChar)*  but starting with ':' cannot happen
-   *  Name ::= (Letter | '_') (NameChar)*
-   *
-   *  see  [5] of XML 1.0 specification
-   *
-   *  pre-condition:  ch != ':' // assured by definition of XMLSTART token
-   *  post-condition: name does neither start, nor end in ':'
-   */
-  def xName: String = {
-    if (ch == SU)
-      truncatedError("")
-    else if (!isNameStart(ch))
-      return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
-
-    val buf = new StringBuilder
-
-    do buf append ch_returning_nextch
-    while (isNameChar(ch))
-
-    if (buf.last == ':') {
-      reportSyntaxError( "name cannot end in ':'" )
-      buf.toString dropRight 1
-    }
-    else buf.toString
-  }
-
-  private def attr_unescape(s: String) = s match {
-    case "lt"     => "<"
-    case "gt"     => ">"
-    case "amp"    => "&"
-    case "apos"   => "'"
-    case "quot"   => "\""
-    case "quote"  => "\""
-    case _        => "&" + s + ";"
-  }
-
-  /** Replaces only character references right now.
-   *  see spec 3.3.3
-   */
-  private def normalizeAttributeValue(attval: String): String = {
-    val buf = new StringBuilder
-    val it = attval.iterator.buffered
-
-    while (it.hasNext) buf append (it.next match {
-      case ' ' | '\t' | '\n' | '\r' => " "
-      case '&' if it.head == '#'    => it.next ; xCharRef(it)
-      case '&'                      => attr_unescape(takeUntilChar(it, ';'))
-      case c                        => c
-    })
-
-    buf.toString
-  }
-
-  /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
-   *            | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
-   *
-   * see [66]
-   */
-  def xCharRef(ch: () => Char, nextch: () => Unit): String =
-    Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
-
-  def xCharRef(it: Iterator[Char]): String = {
-    var c = it.next
-    Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
-  }
-
-  def xCharRef: String = xCharRef(() => ch, () => nextch)
-
-  /** Create a lookahead reader which does not influence the input */
-  def lookahead(): BufferedIterator[Char]
-
-  /** The library and compiler parsers had the interesting distinction of
-   *  different behavior for nextch (a function for which there are a total
-   *  of two plausible behaviors, so we know the design space was fully
-   *  explored.) One of them returned the value of nextch before the increment
-   *  and one of them the new value.  So to unify code we have to at least
-   *  temporarily abstract over the nextchs.
-   */
-  def ch: Char
-  def nextch(): Unit
-  protected def ch_returning_nextch: Char
-  def eof: Boolean
-
-  // def handle: HandleType
-  var tmppos: PositionType
-
-  def xHandleError(that: Char, msg: String): Unit
-  def reportSyntaxError(str: String): Unit
-  def reportSyntaxError(pos: Int, str: String): Unit
-
-  def truncatedError(msg: String): Nothing
-  def errorNoEnd(tag: String): Nothing
-
-  protected def errorAndResult[T](msg: String, x: T): T = {
-    reportSyntaxError(msg)
-    x
-  }
-
-  def xToken(that: Char) {
-    if (ch == that) nextch
-    else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
-  }
-  def xToken(that: Seq[Char]) { that foreach xToken }
-
-  /** scan [S] '=' [S]*/
-  def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
-
-  /** skip optional space S? */
-  def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
-
-  /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
-  def xSpace() =
-    if (isSpace(ch)) { nextch; xSpaceOpt }
-    else xHandleError(ch, "whitespace expected")
-
-  /** Apply a function and return the passed value */
-  def returning[T](x: T)(f: T => Unit): T = { f(x); x }
-
-  /** Execute body with a variable saved and restored after execution */
-  def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
-    val saved = getter
-    try body
-    finally setter(saved)
-  }
-
-  /** Take characters from input stream until given String "until"
-   *  is seen.  Once seen, the accumulated characters are passed
-   *  along with the current Position to the supplied handler function.
-   */
-  protected def xTakeUntil[T](
-    handler: (PositionType, String) => T,
-    positioner: () => PositionType,
-    until: String): T =
-  {
-    val sb = new StringBuilder
-    val head = until.head
-    val rest = until.tail
-
-    while (true) {
-      if (ch == head && peek(rest))
-        return handler(positioner(), sb.toString)
-      else if (ch == SU)
-        truncatedError("")  // throws TruncatedXMLControl in compiler
-
-      sb append ch
-      nextch
-    }
-    unreachable
-  }
-
-  /** Create a non-destructive lookahead reader and see if the head
-   *  of the input would match the given String.  If yes, return true
-   *  and drop the entire String from input; if no, return false
-   *  and leave input unchanged.
-   */
-  private def peek(lookingFor: String): Boolean =
-    (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
-      // drop the chars from the real reader (all lookahead + orig)
-      (0 to lookingFor.length) foreach (_ => nextch)
-      true
-    }
-}
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
deleted file mode 100644
index 22dd450..0000000
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |                                         **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package parsing
-
-import factory.NodeFactory
-
-/** nobinding adaptor providing callbacks to parser to create elements.
-*   implements hash-consing
-*/
-class NoBindingFactoryAdapter extends FactoryAdapter with NodeFactory[Elem]
-{
-  /** True.  Every XML node may contain text that the application needs */
-  def nodeContainsText(label: String) = true
-
-  /** From NodeFactory.  Constructs an instance of scala.xml.Elem */
-  protected def create(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, children: Seq[Node]): Elem =
-     Elem(pre, label, attrs, scope, children: _*)
-
-  /** From FactoryAdapter.  Creates a node. never creates the same node twice, using hash-consing. */
-  def createNode(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, children: List[Node]): Elem =
-    Elem(pre, label, attrs, scope, children: _*)
-
-  /** Creates a text node. */
-  def createText(text: String) = Text(text)
-
-  /** Creates a processing instruction. */
-  def createProcInstr(target: String, data: String) = makeProcInstr(target, data)
-}
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
deleted file mode 100644
index c9cafae..0000000
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-/**
- * Helper functions for parsing XML fragments
- */
-trait TokenTests {
-
-  /** {{{
-   *  (#x20 | #x9 | #xD | #xA)
-   *  }}} */
-  final def isSpace(ch: Char): Boolean = ch match {
-    case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
-    case _                                         => false
-  }
-  /** {{{
-   *  (#x20 | #x9 | #xD | #xA)+
-   *  }}} */
-  final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
-
-  /** These are 99% sure to be redundant but refactoring on the safe side. */
-  def isAlpha(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')
-  def isAlphaDigit(c: Char) = isAlpha(c) || (c >= '0' && c <= '9')
-
-  /** {{{
-   *  NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
-   *             | CombiningChar | Extender
-   *  }}}
-   *  See [4] and Appendix B of XML 1.0 specification.
-  */
-  def isNameChar(ch: Char) = {
-    import java.lang.Character._
-    // The constants represent groups Mc, Me, Mn, Lm, and Nd.
-
-    isNameStart(ch) || (getType(ch).toByte match {
-      case COMBINING_SPACING_MARK |
-              ENCLOSING_MARK | NON_SPACING_MARK |
-              MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
-      case _                                         => ".-:" contains ch
-    })
-  }
-
-  /** {{{
-   *  NameStart ::= ( Letter | '_' )
-   *  }}}
-   *  where Letter means in one of the Unicode general
-   *  categories `{ Ll, Lu, Lo, Lt, Nl }`.
-   *
-   *  We do not allow a name to start with `:`.
-   *  See [3] and Appendix B of XML 1.0 specification
-   */
-  def isNameStart(ch: Char) = {
-    import java.lang.Character._
-
-    getType(ch).toByte match {
-      case LOWERCASE_LETTER |
-              UPPERCASE_LETTER | OTHER_LETTER |
-              TITLECASE_LETTER | LETTER_NUMBER => true
-      case _                                   => ch == '_'
-    }
-  }
-
-  /** {{{
-   *  Name ::= ( Letter | '_' ) (NameChar)*
-   *  }}}
-   *  See [5] of XML 1.0 specification.
-   */
-  def isName(s: String) =
-    s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
-
-  def isPubIDChar(ch: Char): Boolean =
-    isAlphaDigit(ch) || (isSpace(ch) && ch != '\u0009') ||
-    ("""-\()+,./:=?;!*#@$_%""" contains ch)
-
-  /**
-   * Returns `true` if the encoding name is a valid IANA encoding.
-   * This method does not verify that there is a decoder available
-   * for this encoding, only that the characters are valid for an
-   * IANA encoding name.
-   *
-   * @param ianaEncoding The IANA encoding name.
-   */
-  def isValidIANAEncoding(ianaEncoding: Seq[Char]) = {
-    def charOK(c: Char) = isAlphaDigit(c) || ("._-" contains c)
-
-    ianaEncoding.nonEmpty && isAlpha(ianaEncoding.head) &&
-    (ianaEncoding.tail forall charOK)
-  }
-
-  def checkSysID(s: String) = List('"', '\'') exists (c => !(s contains c))
-  def checkPubID(s: String) = s forall isPubIDChar
-}
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
deleted file mode 100644
index 0edea04..0000000
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ /dev/null
@@ -1,119 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package parsing
-
-import scala.xml.dtd._
-import scala.util.logging.Logged
-
-abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
-
-  var rootLabel:String = _
-  var qStack: List[Int] = Nil
-  var qCurrent: Int = -1
-
-  var declStack: List[ElemDecl] = Nil
-  var declCurrent: ElemDecl = null
-
-  final override val isValidating = true
-
-  override def log(msg: String) {}
-
-  /*
-  override def checkChildren(pos: Int, pre: String, label:String,ns:NodeSeq): Unit = {
-    Console.println("checkChildren()");
-    val decl = lookupElemDecl(label);
-    // @todo: nice error message
-    val res = decl.contentModel.validate(ns);
-    Console.println("res = "+res);
-    if(!res)
-      //sys.error("invalid!");
-  }
-  */
-
-  override def endDTD(n:String) = {
-    rootLabel = n
-  }
-  override def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope:NamespaceBinding) {
-
-    def advanceDFA(dm:DFAContentModel) = {
-      val trans = dm.dfa.delta(qCurrent)
-      log("advanceDFA(dm): " + dm)
-      log("advanceDFA(trans): " + trans)
-      trans.get(ContentModel.ElemName(label)) match {
-          case Some(qNew) => qCurrent = qNew
-          case _          => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
-        }
-    }
-    // advance in current automaton
-    log("[qCurrent = "+qCurrent+" visiting "+label+"]")
-
-    if (qCurrent == -1) { // root
-      log("  checking root")
-      if (label != rootLabel)
-        reportValidationError(pos, "this element should be "+rootLabel)
-    } else {
-      log("  checking node")
-      declCurrent.contentModel match {
-        case ANY =>
-        case EMPTY =>
-          reportValidationError(pos, "DTD says, no elems, no text allowed here")
-        case PCDATA =>
-          reportValidationError(pos, "DTD says, no elements allowed here")
-        case m @ MIXED(r) =>
-          advanceDFA(m)
-        case e @ ELEMENTS(r) =>
-          advanceDFA(e)
-      }
-    }
-    // push state, decl
-    qStack    =    qCurrent :: qStack
-    declStack = declCurrent :: declStack
-
-    declCurrent = lookupElemDecl(label)
-    qCurrent = 0
-    log("  done  now")
-  }
-
-  override def elemEnd(pos: Int, pre: String, label: String) {
-    log("  elemEnd")
-    qCurrent = qStack.head
-    qStack   = qStack.tail
-    declCurrent = declStack.head
-    declStack   = declStack.tail
-    log("    qCurrent now" + qCurrent)
-    log("    declCurrent now" + declCurrent)
-  }
-
-  final override def elemDecl(name: String, cmstr: String) {
-    decls = ElemDecl(name, ContentModel.parse(cmstr)) :: decls
-  }
-
-  final override def attListDecl(name: String, attList: List[AttrDecl]) {
-    decls = AttListDecl(name, attList) :: decls
-  }
-
-  final override def unparsedEntityDecl(name: String, extID: ExternalID, notat: String) {
-    decls = UnparsedEntityDecl(name, extID, notat) :: decls
-  }
-
-  final override def notationDecl(notat: String, extID: ExternalID) {
-    decls = NotationDecl(notat, extID) :: decls;
-  }
-
-  final override def peReference(name: String) {
-    decls = PEReference(name) :: decls
-  }
-
-  /** report a syntax error */
-  def reportValidationError(pos: Int, str: String): Unit
-
-}
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
deleted file mode 100644
index 1bb8438..0000000
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
-
-/**
- *  @author (c) David Pollak 2007 WorldWide Conferencing, LLC.
- *
- */
-object XhtmlEntities {
-  val entList = List(("quot",34), ("amp",38), ("lt",60), ("gt",62), ("nbsp",160), ("iexcl",161), ("cent",162), ("pound",163), ("curren",164), ("yen",165),
-      ("euro",8364), ("brvbar",166), ("sect",167), ("uml",168), ("copy",169), ("ordf",170), ("laquo",171), ("shy",173), ("reg",174), ("trade",8482),
-      ("macr",175), ("deg",176), ("plusmn",177), ("sup2",178), ("sup3",179), ("acute",180), ("micro",181), ("para",182), ("middot",183), ("cedil",184),
-      ("sup1",185), ("ordm",186), ("raquo",187), ("frac14",188), ("frac12",189), ("frac34",190), ("iquest",191), ("times",215), ("divide",247),
-      ("Agrave",192), ("Aacute",193), ("Acirc",194), ("Atilde",195), ("Auml",196), ("Aring",197), ("AElig",198), ("Ccedil",199), ("Egrave",200),
-      ("Eacute",201), ("Ecirc",202), ("Euml",203), ("Igrave",204), ("Iacute",205), ("Icirc",206), ("Iuml",207), ("ETH",208), ("Ntilde",209),
-      ("Ograve",210), ("Oacute",211), ("Ocirc",212), ("Otilde",213), ("Ouml",214), ("Oslash",216), ("Ugrave",217), ("Uacute",218), ("Ucirc",219),
-      ("Uuml",220), ("Yacute",221), ("THORN",222), ("szlig",223), ("agrave",224), ("aacute",225), ("acirc",226), ("atilde",227), ("auml",228),
-      ("aring",229), ("aelig",230), ("ccedil",231), ("egrave",232), ("eacute",233), ("ecirc",234), ("euml",235), ("igrave",236), ("iacute",237),
-      ("icirc",238), ("iuml",239), ("eth",240), ("ntilde",241), ("ograve",242), ("oacute",243), ("ocirc",244), ("otilde",245), ("ouml",246),
-      ("oslash",248), ("ugrave",249), ("uacute",250), ("ucirc",251), ("uuml",252), ("yacute",253), ("thorn",254), ("yuml",255), ("OElig",338),
-      ("oelig",339), ("Scaron",352), ("scaron",353), ("Yuml",376), ("circ",710), ("ensp",8194), ("emsp",8195), ("zwnj",204), ("zwj",8205), ("lrm",8206),
-      ("rlm",8207), ("ndash",8211), ("mdash",8212), ("lsquo",8216), ("rsquo",8217), ("sbquo",8218), ("ldquo",8220), ("rdquo",8221), ("bdquo",8222),
-      ("dagger",8224), ("Dagger",8225), ("permil",8240), ("lsaquo",8249), ("rsaquo",8250), ("fnof",402), ("bull",8226), ("hellip",8230), ("prime",8242),
-      ("Prime",8243), ("oline",8254), ("frasl",8260), ("weierp",8472), ("image",8465), ("real",8476), ("alefsym",8501), ("larr",8592), ("uarr",8593),
-      ("rarr",8594), ("darr",8495), ("harr",8596), ("crarr",8629), ("lArr",8656), ("uArr",8657), ("rArr",8658), ("dArr",8659), ("hArr",8660),
-      ("forall",8704), ("part",8706), ("exist",8707), ("empty",8709), ("nabla",8711), ("isin",8712), ("notin",8713), ("ni",8715), ("prod",8719),
-      ("sum",8721), ("minus",8722), ("lowast",8727), ("radic",8730), ("prop",8733), ("infin",8734), ("ang",8736), ("and",8743), ("or",8744),
-      ("cap",8745), ("cup",8746), ("int",8747), ("there4",8756), ("sim",8764), ("cong",8773), ("asymp",8776), ("ne",8800), ("equiv",8801), ("le",8804),
-      ("ge",8805), ("sub",8834), ("sup",8835), ("nsub",8836), ("sube",8838), ("supe",8839), ("oplus",8853), ("otimes",8855), ("perp",8869), ("sdot",8901),
-      ("lceil",8968), ("rceil",8969), ("lfloor",8970), ("rfloor",8971), ("lang",9001), ("rang",9002), ("loz",9674), ("spades",9824), ("clubs",9827),
-      ("hearts",9829), ("diams",9830), ("Alpha",913), ("Beta",914), ("Gamma",915), ("Delta",916), ("Epsilon",917), ("Zeta",918), ("Eta",919),
-      ("Theta",920), ("Iota",921), ("Kappa",922), ("Lambda",923), ("Mu",924), ("Nu",925), ("Xi",926), ("Omicron",927), ("Pi",928), ("Rho",929),
-      ("Sigma",931), ("Tau",932), ("Upsilon",933), ("Phi",934), ("Chi",935), ("Psi",936), ("Omega",937), ("alpha",945), ("beta",946), ("gamma",947),
-      ("delta",948), ("epsilon",949), ("zeta",950), ("eta",951), ("theta",952), ("iota",953), ("kappa",954), ("lambda",955), ("mu",956), ("nu",957),
-      ("xi",958), ("omicron",959), ("pi",960), ("rho",961), ("sigmaf",962), ("sigma",963), ("tau",964), ("upsilon",965), ("phi",966), ("chi",967),
-      ("psi",968), ("omega",969), ("thetasym",977), ("upsih",978), ("piv",982))
-
-  val entMap: Map[String, Char] = Map.empty[String, Char] ++ entList.map { case (name, value) => (name, value.toChar)}
-
-  val entities = entList.
-        map { case (name, value) => (name, new ParsedEntityDecl(name, new IntDef(value.toChar.toString)))}
-
-  def apply() = entities
-}
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
deleted file mode 100644
index d08cb1f..0000000
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package parsing
-
-import scala.io.Source
-
-/** An XML Parser that preserves `CDATA` blocks and knows about
- *  [[scala.xml.parsing.XhtmlEntities]].
- *
- *  @author (c) David Pollak, 2007 WorldWide Conferencing, LLC.
- */
-class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupParser with ExternalSources  {
-  val preserveWS = true
-  ent ++= XhtmlEntities()
-}
-
-/** Convenience method that instantiates, initializes and runs an `XhtmlParser`.
- *
- *  @author Burak Emir
- */
-object XhtmlParser {
-  def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document
-}
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
deleted file mode 100644
index 916a1a0..0000000
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package persistent
-
-import java.io.{ File, FileOutputStream }
-import java.nio.ByteBuffer
-import java.nio.channels.Channels
-import java.lang.Thread
-import scala.util.logging.Logged
-import scala.collection.Iterator
-
-/** Mutable storage of immutable xml trees. Everything is kept in memory,
- *  with a thread periodically checking for changes and writing to file.
- *
- *  To ensure atomicity, two files are used, `filename1` and `'$'+filename1`.
- *  The implementation switches between the two, deleting the older one
- *  after a complete dump of the database has been written.
- *
- *  @author Burak Emir
- */
-abstract class CachedFileStorage(private val file1: File) extends Thread with Logged {
-
-  private val file2 = new File(file1.getParent, file1.getName+"$")
-
-  /** Either equals `file1` or `file2`, references the next file in which
-   *  updates will be stored.
-   */
-  private var theFile: File = null
-
-  private def switch() = { theFile = if (theFile == file1) file2 else file1; }
-
-  /** this storage modified since last modification check */
-  protected var dirty = false
-
-  /** period between modification checks, in milliseconds */
-  protected val interval = 1000
-
-  /** finds and loads the storage file. subclasses should call this method
-   *  prior to any other, but only once, to obtain the initial sequence of nodes.
-   */
-  protected def initialNodes: Iterator[Node] = (file1.exists, file2.exists) match {
-    case (false,false) =>
-      theFile = file1
-      Iterator.empty
-    case (true, true ) if (file1.lastModified < file2.lastModified) =>
-      theFile = file2
-      load
-    case (true, _ ) =>
-      theFile = file1
-      load
-    case _ =>
-      theFile = file2
-      load
-  }
-
-  /** returns an iterator over the nodes in this storage */
-  def nodes: Iterator[Node]
-
-  /** adds a node, setting this.dirty to true as a side effect */
-  def += (e: Node): Unit
-
-  /** removes a tree, setting this.dirty to true as a side effect */
-  def -= (e: Node): Unit
-
-  /* loads and parses XML from file */
-  private def load: Iterator[Node] = {
-    import scala.io.Source
-    import scala.xml.parsing.ConstructingParser
-    log("[load]\nloading "+theFile)
-    val src = Source.fromFile(theFile)
-    log("parsing "+theFile)
-    val res = ConstructingParser.fromSource(src,false).document.docElem(0)
-    switch
-    log("[load done]")
-    res.child.iterator
-  }
-
-  /** saves the XML to file */
-  private def save() = if (this.dirty) {
-    log("[save]\ndeleting "+theFile)
-    theFile.delete()
-    log("creating new "+theFile)
-    theFile.createNewFile()
-    val fos = new FileOutputStream(theFile)
-    val c   = fos.getChannel()
-
-    // @todo: optimize
-    val storageNode = <nodes>{ nodes.toList }</nodes>
-    val w = Channels.newWriter(c, "utf-8")
-    XML.write(w, storageNode, "utf-8", true, null)
-
-    log("writing to "+theFile)
-
-    w.close
-    c.close
-    fos.close
-    dirty = false
-    switch
-    log("[save done]")
-  }
-
-  /** Run method of the thread. remember to use `start()` to start a thread,
-    * not `run`. */
-  override def run = {
-    log("[run]\nstarting storage thread, checking every "+interval+" ms")
-    while (true) {
-      Thread.sleep( this.interval )
-      save
-    }
-  }
-
-  /** Force writing of contents to the file, even if there has not been any
-    * update. */
-  def flush() = {
-    this.dirty = true
-    save
-  }
-}
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/library/scala/xml/persistent/Index.scala
deleted file mode 100644
index defaf67..0000000
--- a/src/library/scala/xml/persistent/Index.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package persistent
-
-/** an Index returns some unique key that is part of a node
- */
-abstract class Index[A] extends Function1[Node,A] {}
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
deleted file mode 100644
index 20a5bb6..0000000
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.xml
-package persistent
-
-import scala.collection.mutable
-import java.io.File
-
-/** A persistent store with set semantics. This class allows to add and remove
- *  trees, but never contains two structurally equal trees.
- *
- *  @author Burak Emir
- */
-class SetStorage(file: File) extends CachedFileStorage(file) {
-
-  private var theSet: mutable.HashSet[Node] = new mutable.HashSet[Node]
-
-  // initialize
-
-  {
-    val it = super.initialNodes
-    dirty = it.hasNext
-    for(x <- it) {
-      theSet += x;
-    }
-  }
-
-  /* forwarding methods to hashset*/
-
-  def += (e: Node): Unit = synchronized { this.dirty = true; theSet += e }
-
-  def -= (e: Node): Unit = synchronized { this.dirty = true; theSet -= e }
-
-  def nodes = synchronized { theSet.iterator }
-
-}
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
deleted file mode 100644
index a266380..0000000
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package pull
-
-/** An XML event for pull parsing.  All events received during
- * parsing will be one of the subclasses of this trait.
- */
-trait XMLEvent
-
-/**
- * An Element's start tag was encountered.
- * @param pre prefix, if any, on the element.  This is the `xs` in `<xs:string>foo</xs:string>`.
- * @param label the name of the element, not including the prefix
- * @param attrs any attributes on the element
- */
-case class EvElemStart(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding) extends XMLEvent
-
-/**
- * An Element's end tag was encountered.
- * @param pre prefix, if any, on the element.  This is the `xs` in `<xs:string>foo</xs:string>`.
- * @param label the name of the element, not including the prefix
- */
-case class EvElemEnd(pre: String, label: String) extends XMLEvent
-
-/**
- * A text node was encountered.
- * @param text the text that was found
- */
-case class EvText(text: String) extends XMLEvent
-
-/** An entity reference was encountered.
- * @param entity the name of the entity, e.g. `gt` when encountering the entity `>`
- */
-case class EvEntityRef(entity: String) extends XMLEvent
-
-/**
- * A processing instruction was encountered.
- * @param target the "PITarget" of the processing instruction.  For the instruction `<?foo bar="baz"?>`, the target would
- * be `foo`
- * @param text the remainder of the instruction.  For the instruction `<?foo bar="baz"?>`, the text would
- * be `bar="baz"`
- * @see [[http://www.w3.org/TR/REC-xml/#sec-pi]]
- */
-case class EvProcInstr(target: String, text: String) extends XMLEvent
-
-/**
- * A comment was encountered
- * @param text the text of the comment
- */
-case class EvComment(text: String) extends XMLEvent
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
deleted file mode 100755
index 428c305..0000000
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.xml
-package pull
-
-import scala.io.Source
-import java.lang.Thread
-import java.util.concurrent.LinkedBlockingQueue
-import java.nio.channels.ClosedChannelException
-import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
-
-/**
- * Main entry point into creating an event-based XML parser.  Treating this
- * as a [[scala.collection.Iterator]] will provide access to the generated events.
- * @param src A [[scala.io.Source]] for XML data to parse
- *
- *  @author Burak Emir
- *  @author Paul Phillips
- */
-class XMLEventReader(src: Source)
-extends scala.collection.AbstractIterator[XMLEvent]
-   with ProducerConsumerIterator[XMLEvent] {
-
-  // We implement a pull parser as an iterator, but since we may be operating on
-  // a stream (e.g. XML over a network) there may be arbitrarily long periods when
-  // the queue is empty.  Fortunately the ProducerConsumerIterator is ideally
-  // suited to this task, possibly because it was written for use by this class.
-
-  // to override as necessary
-  val preserveWS = true
-
-  override val MaxQueueSize = 1000
-  protected case object POISON extends XMLEvent
-  val EndOfStream = POISON
-
-  // thread machinery
-  private[this] val parser = new Parser(src)
-  private[this] val parserThread = new Thread(parser, "XMLEventReader")
-  parserThread.start
-  // enqueueing the poison object is the reliable way to cause the
-  // iterator to terminate; hasNext will return false once it sees it.
-  // Calling interrupt() on the parserThread is the only way we can get
-  // it to stop producing tokens since it's lost deep in document() -
-  // we cross our fingers the interrupt() gets to its target, but if it
-  // fails for whatever reason the iterator correctness is not impacted,
-  // only performance (because it will finish the entire XML document,
-  // or at least as much as it can fit in the queue.)
-  def stop() = {
-    produce(POISON)
-    parserThread.interrupt()
-  }
-
-  private class Parser(val input: Source) extends MarkupHandler with MarkupParser with ExternalSources with Runnable {
-    val preserveWS = XMLEventReader.this.preserveWS
-    // track level for elem memory usage optimization
-    private var level = 0
-
-    // this is Parser's way to add to the queue - the odd return type
-    // is to conform to MarkupHandler's interface
-    def setEvent(es: XMLEvent*): NodeSeq = {
-      es foreach produce
-      NodeSeq.Empty
-    }
-
-    override def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding) {
-      level += 1
-      setEvent(EvElemStart(pre, label, attrs, scope))
-    }
-    override def elemEnd(pos: Int, pre: String, label: String) {
-      setEvent(EvElemEnd(pre, label))
-      level -= 1
-    }
-
-    // this is a dummy to satisfy MarkupHandler's API
-    // memory usage optimization return one <ignore/> for top level to satisfy
-    // MarkupParser.document() otherwise NodeSeq.Empty
-    private var ignoreWritten = false
-    final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
-      if (level == 1 && !ignoreWritten) {ignoreWritten = true; <ignore/> } else NodeSeq.Empty
-
-    def procInstr(pos: Int, target: String, txt: String)  = setEvent(EvProcInstr(target, txt))
-    def comment(pos: Int, txt: String)                    = setEvent(EvComment(txt))
-    def entityRef(pos: Int, n: String)                    = setEvent(EvEntityRef(n))
-    def text(pos: Int, txt:String)                        = setEvent(EvText(txt))
-
-    override def run() {
-      curInput = input
-      interruptibly { this.initialize.document() }
-      setEvent(POISON)
-    }
-  }
-}
-
-// An iterator designed for one or more producers to generate
-// elements, and a single consumer to iterate.  Iteration will continue
-// until closeIterator() is called, after which point producers
-// calling produce() will receive interruptions.
-//
-// Since hasNext may block indefinitely if nobody is producing,
-// there is also an available() method which will return true if
-// the next call hasNext is guaranteed not to block.
-//
-// This is not thread-safe for multiple consumers!
-trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
-  // abstract - iterator-specific distinguished object for marking eos
-  val EndOfStream: T
-
-  // defaults to unbounded - override to positive Int if desired
-  val MaxQueueSize = -1
-
-  def interruptibly[T](body: => T): Option[T] = try Some(body) catch {
-    case _: InterruptedException    => Thread.currentThread.interrupt(); None
-    case _: ClosedChannelException  => None
-  }
-
-  private[this] lazy val queue =
-    if (MaxQueueSize < 0) new LinkedBlockingQueue[T]()
-    else new LinkedBlockingQueue[T](MaxQueueSize)
-  private[this] var buffer: T = _
-  private def fillBuffer() = {
-    buffer = interruptibly(queue.take) getOrElse EndOfStream
-    isElement(buffer)
-  }
-  private def isElement(x: T) = x != null && x != EndOfStream
-  private def eos() = buffer == EndOfStream
-
-  // public producer interface - this is the only method producers call, so
-  // LinkedBlockingQueue's synchronization is all we need.
-  def produce(x: T): Unit = if (!eos) interruptibly(queue put x)
-
-  // consumer/iterator interface - we need not synchronize access to buffer
-  // because we required there to be only one consumer.
-  def hasNext = !eos && (buffer != null || fillBuffer)
-
-  def next() = {
-    if (eos) throw new NoSuchElementException("ProducerConsumerIterator")
-    if (buffer == null) fillBuffer
-
-    drainBuffer
-  }
-
-  def available() = isElement(buffer) || isElement(queue.peek)
-
-  private def drainBuffer() = {
-    assert(!eos)
-    val res = buffer
-    buffer = null
-    res
-  }
-}
diff --git a/src/library/scala/xml/pull/package.scala b/src/library/scala/xml/pull/package.scala
deleted file mode 100644
index 3742c55..0000000
--- a/src/library/scala/xml/pull/package.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package scala.xml
-
-/**
- * Classes needed to view an XML document as a series of events.  The document
- * is parsed by an [[scala.xml.pull.XMLEventReader]] instance.  You can treat it as
- * an [[scala.collection.Iterator]] to retrieve the events, which are all
- * subclasses of [[scala.xml.pull.XMLEvent]].
- *
- * {{{
- * scala> val source = Source.fromString("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
- * <?instruction custom value="customvalue"?>
- * <!DOCTYPE foo [
- *   <!ENTITY bar "BAR">
- * ]><foo>Hello<!-- this is a comment --><bar>&bar;</bar><bar>></bar></foo>""")
- *
- * source: scala.io.Source = non-empty iterator
- *
- * scala> val reader = new XMLEventReader(source)
- * reader: scala.xml.pull.XMLEventReader = non-empty iterator
- *
- * scala> reader.foreach{ println(_) }
- * EvProcInstr(instruction,custom value="customvalue")
- * EvText(
- * )
- * EvElemStart(null,foo,,)
- * EvText(Hello)
- * EvComment( this is a comment )
- * EvElemStart(null,bar,,)
- * EvText(BAR)
- * EvElemEnd(null,bar)
- * EvElemStart(null,bar,,)
- * EvEntityRef(gt)
- * EvElemEnd(null,bar)
- * EvElemEnd(null,foo)
- * EvText(
- *
- * )
- *
- * }}}
- */
-package object pull
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
deleted file mode 100644
index 1402ccd..0000000
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package transform
-
-/** A class for XML transformations.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-abstract class BasicTransformer extends Function1[Node,Node]
-{
-  protected def unchanged(n: Node, ns: Seq[Node]) =
-    ns.length == 1 && (ns.head == n)
-
-  /** Call transform(Node) for each node in ns, append results
-   *  to NodeBuffer.
-   */
-  def transform(it: Iterator[Node], nb: NodeBuffer): Seq[Node] =
-    it.foldLeft(nb)(_ ++= transform(_)).toSeq
-
-  /** Call transform(Node) to each node in ns, yield ns if nothing changes,
-   *  otherwise a new sequence of concatenated results.
-   */
-  def transform(ns: Seq[Node]): Seq[Node] = {
-    val (xs1, xs2) = ns span (n => unchanged(n, transform(n)))
-
-    if (xs2.isEmpty) ns
-    else xs1 ++ transform(xs2.head) ++ transform(xs2.tail)
-  }
-
-  def transform(n: Node): Seq[Node] = {
-    if (n.doTransform) n match {
-      case Group(xs)  => Group(transform(xs)) // un-group the hack Group tag
-      case _          =>
-        val ch = n.child
-        val nch = transform(ch)
-
-        if (ch eq nch) n
-        else           Elem(n.prefix, n.label, n.attributes, n.scope, nch: _*)
-    }
-    else n
-  }
-
-  def apply(n: Node): Node = {
-    val seq = transform(n)
-    if (seq.length > 1)
-      throw new UnsupportedOperationException("transform must return single node for root");
-    else seq.head
-  }
-}
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
deleted file mode 100644
index 1dca495..0000000
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package transform
-
-/** a RewriteRule, when applied to a term, yields either
- *  the resulting of rewriting or the term itself it the rule
- *  is not applied.
- *
- *  @author  Burak Emir
- *  @version 1.0
- */
-abstract class RewriteRule extends BasicTransformer {
-  /** a name for this rewrite rule */
-  val name = this.toString()
-  override def transform(ns: Seq[Node]): Seq[Node] = super.transform(ns)
-  override def transform(n: Node): Seq[Node] = n
-}
-
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/library/scala/xml/transform/RuleTransformer.scala
deleted file mode 100644
index 85e92e5..0000000
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.xml
-package transform
-
-class RuleTransformer(rules: RewriteRule*) extends BasicTransformer {
-  override def transform(n: Node): Seq[Node] =
-    rules.foldLeft(super.transform(n)) { (res, rule) => rule transform res }
-}
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index dbd4ea5..92d9c59 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -39,23 +39,23 @@ object scala extends Command {
         CmdOptionBound("howtorun:", Argument("how")),
         "How to execute " & Argument("torun") & ", if it is present. " &
         "Options for " & Argument("how") & " are " & Mono("guess") &
-        " (the default), " & Mono("script") & ", and " & Mono("object") &
+        " (the default), " & Mono("script") & ", " & Mono("jar") & ", and " & Mono("object") &
         "."),
 
       Definition(
-        CmdOption("i"),
+        CmdOption("i", Argument("file")),
         "Requests that a file be pre-loaded.  It is only " &
         "meaningful for interactive shells."),
 
       Definition(
-        CmdOption("e"),
+        CmdOption("e", Argument("string")),
         "Requests that its argument be executed as Scala code."),
 
       Definition(
         CmdOption("savecompiled"),
         "Save this compiled version of scripts in order to speed up " &
         "later executions of the same script.  When running a script, " &
-        "save the compiled version of in a file with the same name as the " &
+        "save the compiled version in a file with the same name as the " &
         "script but with an extension of " & Mono(".jar") & ".  On subsequent " &
         "runs of the same script, the pre-compiled " & Mono(".jar") & " file " &
         "will be used if it is newer than the script file."),
@@ -215,7 +215,7 @@ object scala extends Command {
       "exec scala \"$0\" \"$@\"\n" +
       "!#\n" +
       "Console.println(\"Hello, world!\")\n" +
-      "argv.toList foreach Console.println"),
+      "args.toList foreach Console.println"),
 
     "Here is a complete Scala script for MS Windows: ",
 
@@ -226,7 +226,7 @@ object scala extends Command {
       "goto :eof\n" +
       "::!#\n" +
       "Console.println(\"Hello, world!\")\n" +
-      "argv.toList foreach Console.println"),
+      "args.toList foreach Console.println"),
 
     "If you want to use the compilation cache to speed up multiple executions " +
     "of the script, then add " & Mono("-savecompiled") & " to the scala " +
@@ -237,7 +237,7 @@ object scala extends Command {
       "exec scala -savecompiled \"$0\" \"$@\"\n" +
       "!#\n" +
       "Console.println(\"Hello, world!\")\n" +
-      "argv.toList foreach Console.println"))
+      "args.toList foreach Console.println"))
 
   val exitStatus = Section("EXIT STATUS",
 
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 13b1fd5..31d25d4 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -109,6 +109,9 @@ object scalac extends Command {
           CmdOption("extdirs", Argument("dirs")),
           "Override location of installed extensions."),
         Definition(
+          CmdOption("feature"),
+          "Emit warning and location for usages of features that should be imported explicitly."),
+        Definition(
           CmdOptionBound("g:", "{none,source,line,vars,notailcalls}"),
           SeqPara(
             Mono("\"none\"") & " generates no debugging info,",
@@ -128,6 +131,9 @@ object scalac extends Command {
           CmdOption("javaextdirs", Argument("path")),
           "Override Java extdirs classpath."),
         Definition(
+          CmdOptionBound("language:", Argument("feature")),
+          "Enable one or more language features."),
+        Definition(
           CmdOption("no-specialization"),
           "Ignore " & MItalic("@specialize") & " annotations."),
         Definition(
@@ -146,12 +152,11 @@ object scalac extends Command {
           CmdOption("sourcepath", Argument("path")),
           "Specify location(s) of source files."),
         Definition(
-          CmdOptionBound("target:", Argument("target")),
-          SeqPara(
-            "Specify which backend to use (" & Mono("jvm-1.5," &
-            "msil") & ").",
-            "The default value is " & Mono("\"jvm-1.5\"") & " (was " &
-            Mono("\"jvm-1.4\"") & " up to Scala version 2.6.1).")),
+          CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7}"),
+	  SeqPara(
+            Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),",
+            Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),",
+            Mono("\"jvm-1.7\"") & " target JVM 1.7,")),
         Definition(
           CmdOption("toolcp", Argument("path")),
           "Add to the runner classpath."),
@@ -166,6 +171,12 @@ object scalac extends Command {
           CmdOption("uniqid"),
           "Uniquely tag all identifiers in debugging output."),
         Definition(
+          CmdOption("usejavacp"),
+          "Utilize the java.class.path in classpath resolution."),
+        Definition(
+          CmdOption("usemanifestcp"),
+          "Utilize the manifest in classpath resolution."),
+        Definition(
           CmdOption("verbose"),
           "Output messages about what the compiler is doing"),
         Definition(
@@ -182,24 +193,11 @@ object scalac extends Command {
     Section("Advanced Options",
       DefinitionList(
         Definition(
-          CmdOption("Xassem-extdirs", Argument("dirs")),
-          "(Requires " & Mono("-target:msil") &
-          ") List of directories containing assemblies." &
-          "  default:" & Mono("lib") & "."),
-        Definition(
-          CmdOption("Xassem-name", Argument("file")),
-          "(Requires " & Mono("-target:msil") &
-          ") Name of the output assembly."),
-        Definition(
-          CmdOption("Xassem-path", Argument("path")),
-          "(Requires " & Mono("-target:msil") &
-          ") List of assemblies referenced by the program."),
-        Definition(
-          CmdOption("Xcheck-null"),
-          "Warn upon selection of nullable reference"),
-        Definition(
           CmdOption("Xcheckinit"),
           "Wrap field accessors to throw an exception on uninitialized access."),
+	Definition(
+          CmdOption("Xdev"),
+          "Enable warnings for developers working on the Scala compiler"),
         Definition(
           CmdOption("Xdisable-assertions"),
           "Generate no assertions and assumptions"),
@@ -213,6 +211,9 @@ object scalac extends Command {
         Definition(
           CmdOption("Xfatal-warnings"),
           "Fail the compilation if there are any warnings."),
+	Definition(
+          CmdOption("Xfull-lubs"),
+          "Retain pre 2.10 behavior of less aggressive truncation of least upper bounds."),
         Definition(
           CmdOption("Xfuture"),
           "Turn on future language features."),
@@ -222,18 +223,39 @@ object scalac extends Command {
         Definition(
           CmdOption("Xlint"),
           "Enable recommended additional warnings."),
+	Definition(
+          CmdOption("Xlog-free-terms"),
+          "Print a message when reification creates a free term."),
+	Definition(
+          CmdOption("Xlog-free-types"),
+          "Print a message when reification resorts to generating a free type."),
+	Definition(
+          CmdOption("Xlog-implicit-conversions"),
+          "Print a message whenever an implicit conversion is inserted."),
         Definition(
           CmdOption("Xlog-implicits"),
           "Show more detail on why some implicits are not applicable."),
+	Definition(
+          CmdOption("Xlog-reflective-calls"),
+          "Print a message when a reflective method call is generated."),
+	Definition(
+          CmdOptionBound("Xmacro-settings:", Argument("option")),
+          "Custom settings for macros."),
+	Definition(
+          CmdOption("Xmain-class", Argument("path")),
+          "Class for manifest's Main-Class entry (only useful with -d <jar>)."),
         Definition(
           CmdOption("Xmax-classfile-name", Argument("n")),
           "Maximum filename length for generated classes."),
         Definition(
-          CmdOption("Xmigration"),
-          "Warn about constructs whose behavior may have changed between 2.7 and 2.8."),
+          CmdOptionBound("Xmigration:", Argument("version")),
+          "Warn about constructs whose behavior may have changed since" & Argument("version") & "."),
         Definition(
           CmdOption("Xno-forwarders"),
           "Do not generate static forwarders in mirror classes."),
+	Definition(
+          CmdOption("Xno-patmat-analysis"),
+          "Don't perform exhaustivity/unreachability analysis. Also, ignore " & MItalic("@switch") & " annotation."),
         Definition(
           CmdOption("Xno-uescape"),
           "Disable handling of " & BSlash & "u unicode escapes"),
@@ -241,26 +263,26 @@ object scalac extends Command {
           CmdOption("Xnojline"),
           "Do not use JLine for editing."),
         Definition(
-          CmdOptionBound("Xplugin:", Argument("file")),
-          "Load a plugin from a file"),
+          CmdOptionBound("Xplugin:", Argument("paths")),
+          "Load a plugin from each classpath."),
         Definition(
           CmdOptionBound("Xplugin-disable:", Argument("plugin")),
-          "Disable a plugin"),
+          "Disable plugins by name."),
         Definition(
           CmdOption("Xplugin-list"),
-          "Print a synopsis of loaded plugins"),
+          "Print a synopsis of loaded plugins."),
         Definition(
           CmdOptionBound("Xplugin-require:", Argument("plugin")),
-          "Abort unless the given plugin(s) are available"),
+          "Abort if a named plugin is not loaded."),
         Definition(
           CmdOption("Xpluginsdir", Argument("path")),
-          "Path to search compiler plugins."),
+          "Path to search for plugin archives."),
         Definition(
           CmdOptionBound("Xprint:", Argument("phases")),
           "Print out program after " & Argument("phases") & " (see below)."),
         Definition(
-          CmdOption("Xprint-icode"),
-          "Log internal icode to *.icode files."),
+          CmdOptionBound("Xprint-icode", "[:" & Argument("phases") & "]"),
+          "Log internal icode to *.icode files after" & Argument("phases") & " (default: icode)."),
         Definition(
           CmdOption("Xprint-pos"),
           "Print tree positions, as offsets."),
@@ -287,15 +309,17 @@ object scalac extends Command {
           CmdOption("Xshow-phases"),
           "Print a synopsis of compiler phases."),
         Definition(
+          CmdOptionBound("Xsource:", Argument("version")),
+          "Treat compiler input as Scala source for the specified version, see SI-8126."),
+	Definition(
           CmdOption("Xsource-reader", Argument("classname")),
           "Specify a custom method for reading source files."),
-        Definition(
-          CmdOption("Xsourcedir", Argument("path")),
-          "(Requires " & Mono("-target:msil") &
-          ") Mirror source folder structure in output directory.."),
+	Definition(
+          CmdOption("Xstrict-inference"),
+          "Don't infer known-unsound types."),
         Definition(
           CmdOption("Xverify"),
-          "Verify generic signatures in generated bytecode."),
+          "Verify generic signatures in generated bytecode (asm backend only)."),
         Definition(
           CmdOption("Y"),
           "Print a synopsis of private options.")
@@ -305,65 +329,101 @@ object scalac extends Command {
     Section("Compilation Phases",
       DefinitionList(
         Definition(
-          MItalic("initial"),
-          "initializing compiler"),
-        Definition(
-          MItalic("parse"),
-          "parse source files"),
+          MItalic("parser"),
+          "parse source into ASTs, perform simple desugaring"),
         Definition(
           MItalic("namer"),
-          "create symbols"),
+          "resolve names, attach symbols to named trees"),
+	Definition(
+          MItalic("packageobjects"),
+          "load package objects"),
+	Definition(
+          MItalic("typer"),
+          "the meat and potatoes: type the trees"),
+        Definition(
+          MItalic("patmat"),
+          "translate match expressions"),
+	Definition(
+          MItalic("superaccessors"),
+          "add super accessors in traits and nested classes"),
+	Definition(
+          MItalic("extmethods"),
+          "add extension methods for inline classes"),
+	Definition(
+          MItalic("pickler"),
+          "serialize symbol tables"),
+        Definition(
+          MItalic("refchecks"),
+          "reference/override checking, translate nested objects"),
+	Definition(
+          MItalic("selectiveanf"),
+          "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"),
+	Definition(
+          MItalic("selectivecps"),
+          MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"),
+	Definition(
+          MItalic("uncurry"),
+          "uncurry, translate function values to anonymous classes"),
         Definition(
-          MItalic("analyze"),
-          "name and type analysis"),
+          MItalic("tailcalls"),
+          "replace tail calls by jumps"),
         Definition(
-          MItalic("refcheck"),
-          "reference checking"),
+          MItalic("specialize"),
+          MItalic("@specialized") & "-driven class and method specialization"),
         Definition(
-          MItalic("uncurry"),
-          "uncurry function types and applications"),
+          MItalic("explicitouter"),
+          "this refs to outer pointers, translate patterns"),
+        Definition(
+          MItalic("erasure"),
+          "erase types, add interfaces for traits"),
+        Definition(
+          MItalic("posterasure"),
+          "clean up erased inline classes"),
+        Definition(
+          MItalic("lazyvals"),
+          "allocate bitmaps, translate lazy vals into lazified defs"),
         Definition(
           MItalic("lambdalift"),
-          "lambda lifter"),
+          "move nested functions to top level"),
         Definition(
-          MItalic("typesasvalues"),
-          "represent types as values"),
+          MItalic("constructors"),
+          "move field definitions into constructors"),
         Definition(
-          MItalic("addaccessors"),
-          "add accessors for constructor arguments"),
+          MItalic("flatten"),
+          "eliminate inner classes"),
         Definition(
-          MItalic("explicitouterclasses"),
-          "make links from inner classes to enclosing one explicit"),
+          MItalic("mixin"),
+          "mixin composition"),
         Definition(
-          MItalic("addconstructors"),
-          "add explicit constructor for each class"),
+          MItalic("cleanup"),
+          "platform-specific cleanups, generate reflective calls"),
         Definition(
-          MItalic("tailcall"),
-          "add tail-calls"),
+          MItalic("delambdafy"),
+          "remove lambdas"),
         Definition(
-          MItalic("wholeprog"),
-          "perform whole program analysis"),
+          MItalic("icode"),
+          "generate portable intermediate code"),
         Definition(
-          MItalic("addinterfaces"),
-          "add one interface per class"),
+          MItalic("inliner"),
+          "optimization: do inlining"),
         Definition(
-          MItalic("expandmixins"),
-          "expand mixins by code copying"),
+          MItalic("inlineHandlers"),
+          "optimization: inline exception handlers"),
         Definition(
-          MItalic("boxing"),
-          "makes boxing explicit"),
+          MItalic("closelim"),
+          "optimization: eliminate uncalled closures"),
         Definition(
-          MItalic("erasure"),
-          "type eraser"),
+          MItalic("constopt"),
+          "optimization: optimize null and other constants"),
         Definition(
-          MItalic("icode"),
-          "generate icode"),
+          MItalic("dce"),
+          "optimization: eliminate dead code"),
         Definition(
-          MItalic("codegen"),
-          "enable code generation"),
+          MItalic("jvm"),
+          "generate JVM bytecode"),
         Definition(
           MItalic("terminal"),
-          "compilation terminated"),
+          "the last phase in the compiler chain"),
         Definition(
           MItalic("all"),
           "matches all phases"))))
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index c30e847..21f1bf5 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -110,8 +110,8 @@ object EmitManPage {
         case lst:NumberedList =>
           for {
             idx <- List.range(0, lst.items.length)
-            val item = lst.items(idx)
           } {
+            val item = lst.items(idx)
             out.println(".IP \"   " + (idx+1) + ".\"")
             emitText(item)
             out.println
diff --git a/src/manual/scala/tools/docutil/ManPage.scala b/src/manual/scala/tools/docutil/ManPage.scala
index 2c5d696..853c17b 100644
--- a/src/manual/scala/tools/docutil/ManPage.scala
+++ b/src/manual/scala/tools/docutil/ManPage.scala
@@ -6,6 +6,8 @@
 
 package scala.tools.docutil
 
+import scala.language.implicitConversions
+
 object ManPage {
   abstract class AbstractText {
     def &(more: AbstractText) = SeqText(this, more)
diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html
index aaef94d..18e2343 100644
--- a/src/manual/scala/tools/docutil/resources/index.html
+++ b/src/manual/scala/tools/docutil/resources/index.html
@@ -8,7 +8,7 @@
   <meta http-equiv="Content-Style-Type" content="text/css"/>
   <meta http-equiv="Content-Language" content="en"/>
   <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>
-  <meta name="Copyright" content="(C) 2002-2012 LAMP/EPFL"/>
+  <meta name="Copyright" content="(C) 2002-2013 LAMP/EPFL"/>
   <meta name="Language" content="en"/>
   <meta name="Description" content="The Scala Programming Language"/>
   <meta name="Author" content="Stephane Micheloud"/>
@@ -180,7 +180,7 @@
   <hr/>
 
   <div style="font-size:x-small;">
-    Copyright (c) 2002-2012 <a href="http://www.epfl.ch/">EPFL</a>,
+    Copyright (c) 2002-2013 <a href="http://www.epfl.ch/">EPFL</a>,
     Lausanne, unless specified otherwise.<br/>
     All rights reserved.
   </div>
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
deleted file mode 100644
index 59bbeee..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.AssemblyDef;
-import ch.epfl.lamp.compiler.msil.util.Table.ModuleDef;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * Defines an Assembly, which is a reusable, versionable, and self-describing
- * building block of a common language runtime application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Assembly extends CustomAttributeProvider {
-
-    //##########################################################################
-    // static members
-
-    // all the assemblies
-    public static final HashMap assemblies = new HashMap();
-
-    /** Loads an assembly from the specified path. */
-    public static Assembly LoadFrom(String assemblyFileName) {
-	File afile = new File(assemblyFileName);
-	return LoadFrom(afile.getParentFile(), afile.getName());
-    }
-
-    /** Loads an assembly with the given name from the given directory. */
-    public static Assembly LoadFrom(File dir, String name) {
-	File file = null;
-	PEFile pefile = null;
-// 	try {
-// 	    if (dir == null)
-// 		dir = new File(".");
-// 	    dir = dir.getCanonicalFile();
-// 	} catch (java.io.IOException e) {}
-
-	if (name.toUpperCase().endsWith(".EXE") || name.toUpperCase().endsWith(".DLL")) {
-		file = new File(dir, name);
-		pefile = getPEFile(file);
-		name = name.substring(0, name.length() - 4);
-	}
-
-	File adir = pefile == null ? new File(dir, name) : null;
-
-	if (pefile == null) {
-	    file = new File(dir, name + ".dll");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null) {
-	    file = new File(dir, name + ".DLL");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null && adir.exists()) {
-	    file = new File(adir, name + ".dll");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null && adir.exists()) {
-	    file = new File(adir, name + ".DLL");
-	    pefile = getPEFile(file);
-	}
-
-	if (pefile == null) {
-	    file = new File(dir, name + ".exe");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null) {
-	    file = new File(dir, name + ".EXE");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null && adir.exists()) {
-	    file = new File(adir, name + ".exe");
-	    pefile = getPEFile(file);
-	}
-	if (pefile == null && adir.exists()) {
-	    file = new File(adir, name + ".EXE");
-	    pefile = getPEFile(file);
-	}
-
-	if (pefile == null)
-	    throw new RuntimeException("Cannot find assembly " + new File(dir, name));
-	return getPEAssembly(pefile);
-    }
-
-    private static Assembly getPEAssembly(PEFile pefile) {
-	AssemblyDef assem = pefile.AssemblyDef;
-	if (assem == null)
-	    throw new RuntimeException("File " + pefile
-				       + " does not contain a manifest");
-	assem.readRow(1);
-	String name = pefile.getString(assem.Name);
-	Assembly a = (Assembly) assemblies.get(name);
-	if (a != null) {
-	    return a;
-	}
-
-	AssemblyName an = new AssemblyName();
-	an.Name = pefile.getString(assem.Name);
-	an.Version = new Version(assem.MajorVersion, assem.MinorVersion,
-				 assem.BuildNumber, assem.RevisionNumber);
-	an.SetPublicKey(pefile.getBlob(assem.PublicKey));
-	return new PEAssembly(pefile, an);
-    }
-
-    protected static PEFile getPEFile(File f) {
-	PEFile pefile = null;
-	try { pefile = new PEFile(f.getAbsolutePath()); }
-	catch (FileNotFoundException e) {}
-	catch (RuntimeException e) {
-            java.lang.System.out.println("swallowed RuntimeException at getPEFile");
-    }
-	return pefile;
-    }
-
-    //##########################################################################
-    // public fields
-
-    /** The entry point of this assembly. */
-    public MethodInfo EntryPoint;
-
-    /** the display name of the assembly. */
-    public final String FullName;
-
-    //##########################################################################
-    // constructor
-
-    protected Assembly(AssemblyName an, boolean external) {
-	assemblyName = an;
-	FullName = an.toString();
-    if(external) {
-	  assemblies.put(an.Name, this);
-    }
-	//System.out.println("assemblies after adding the current one: " + assemblies);
-    }
-
-    protected Assembly(AssemblyName an) {
-      this(an, false);
-    }
-
-    protected static Assembly getAssembly(String name) {
-	return (Assembly) assemblies.get(name);
-    }
-
-    //##########################################################################
-    // instrumental methods
-
-    /** @return the file from which this assembly was loaded. */
-    public File getFile() {
-	throw new RuntimeException("Not supported");
-    }
-
-    /** Gets the specified module in this assembly. Works on filenames. */
-    public Module GetModule(String name) {
-	initModules();
-	return (Module)modulesMap.get(name);
-    }
-
-    /** Get all the modules of the assembly. */
-    public Module[] GetModules() {
-	initModules();
-	return (Module[])modulesMap.values().
-	    toArray(new Module[modulesMap.size()]);
-    }
-
-    /** Get the corresponding type. */
-    public Type GetType(String name) {
-	initModules();
-	Iterator modules = modulesMap.values().iterator();
-	Type t = null;
-	while (t == null && modules.hasNext()) {
-	    t = ((Module)modules.next()).GetType(name);
-	}
-	return t;
-    }
-
-    /** @return an array of all types defined in the assembly. */
-    public synchronized Type[] GetTypes() {
- 	if (types != null)
-	    return (Type[])types.clone();
-	initModules();
-
-	Iterator modules = modulesMap.values().iterator();
-	Type[] newTypes = ((Module)modules.next()).GetTypes();
-	while (modules.hasNext()) {
-	    Module module = (Module)modules.next();
-	    Type[] mtypes = module.GetTypes();
-	    Type[] oldTypes = newTypes;
-	    newTypes = new Type[oldTypes.length + mtypes.length];
-	    System.arraycopy(oldTypes, 0, newTypes, 0, oldTypes.length);
-	    System.arraycopy(mtypes, 0, newTypes, oldTypes.length, mtypes.length);
-	}
-	types = newTypes;
-	return (Type[]) types.clone();
-    }
-
-    public AssemblyName GetName() {
-	return assemblyName;
-    }
-
-    public String toString() {
-	return FullName;
-    }
-
-    //##########################################################################
-    // protected members
-
-    // the assembly name
-    protected final AssemblyName assemblyName;
-
-    // all the types exported by the assembly
-    protected Type[] types = null;
-
-    // the module defined in this assembly (only one right now)
-    private final HashMap/*<String, Module>*/ modulesMap = new HashMap();
-
-    protected void addType(Type type) {
-	Type.addType(type);
-    }
-
-    protected void addModule(String name, Module module) {
-	modulesMap.put(name, module);
-    }
-
-    private boolean initModules = true;
-    protected final void initModules() {
-	if (initModules) {
-	    loadModules();
-	    initModules = false;
-	}
-    }
-
-    /** used for lazy construction of the Assembly. */
-    protected abstract void loadModules();
-
-    void dumpTypes() {
-	Type[] types = GetTypes();
-	for (int i = 0; i < types.length; i++)
-	    System.out.println(types[i]);
-    }
-
-    //##########################################################################
-
-}  // class Assembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
deleted file mode 100644
index acdcb32..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import javax.crypto.Mac;
-
-import java.security.MessageDigest;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-/**
- * Fully describes an assembly's unique identity.
- * Right now it's only the name
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class AssemblyName {
-
-    //##########################################################################
-    // public interface
-
-    /** The simple, unencrypted name of the assembly. */
-    public String Name;
-
-    /**
-     * Gets or sets the major, minor, revision, and build numbers
-     * of the assembly.
-     */
-    public Version Version;
-
-    /**
-     * Gets a strong name consisting of a public key, a given name,
-     * and version parts.
-     */
-    public byte[] GetPublicKeyToken() {
-	return publicKeyToken == null ? null : (byte[]) publicKeyToken.clone();
-    }
-
-    /**
-     * Sets a strong name consisting of a public key, a given name,
-     * and version parts.
-     */
-    public void SetPublicKeyToken(byte[] key) {
-	this.publicKeyToken = key.length == 0 ? null : (byte[]) key.clone();
-    }
-
-    /**
-     * Returns the public key identifying the originator of the assembly.
-     */
-    public byte[] GetPublicKey() {
-	return publicKey == null ? null : (byte[]) publicKey.clone();
-    }
-
-    /**
-     * Sets the public key identifying the originator of the assembly.
-     */
-    public void SetPublicKey(byte[] key) {
-	if (key.length > 0) {
-	    this.publicKey = (byte[]) key.clone();
-	    byte[] hash = sha.digest(key);
-	    byte[] keyToken = new byte[8];
-	    for (int i = 0; i < keyToken.length; i++)
-		keyToken[i] = hash[hash.length - 1 - i];
-	    this.publicKeyToken = keyToken;
-	    //System.out.println("Pubic key and key token of assembly " + this + ":");
-	    //System.out.println("\tPublic key = " + Table.bytes2hex(key));
-	    //System.out.println("\tKey token  = " + Table.bytes2hex(keyToken));
-	}
-    }
-
-    public String toString() {
-	return Name + ", Version=" + Version;
-    }
-
-    //##########################################################################
-
-    private byte[] publicKeyToken;
-
-    private byte[] publicKey;
-
-    private static final MessageDigest sha;
-    static {
-	MessageDigest md = null;
-	try {
-	    md = MessageDigest.getInstance("SHA");
-	} catch (java.security.NoSuchAlgorithmException e) {}
-	sha = md;
-    }
-
-    //##########################################################################
-
-} // class AssemblyName
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
deleted file mode 100644
index 0f2c4e6..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
+++ /dev/null
@@ -1,654 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Signature;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Iterator;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.io.UnsupportedEncodingException;
-
-/**
- * Describes custom attribute instances.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class Attribute {
-
-    //##########################################################################
-
-    private final ConstructorInfo constr;
-
-    private final byte[] value;
-
-    Attribute(ConstructorInfo constr, byte[] value) {
-        assert constr != null;
-	this.constr = constr;
-        assert value != null : constr.toString();
-	this.value = value;
-    }
-
-    //##########################################################################
-    // public interface
-
-    /** @return the type (class) of the attribute. */
-    public Type GetType() { return constr.DeclaringType; }
-
-    /** @return the constructor of this attribute. */
-    public ConstructorInfo getConstructor() {
-	return constr;
-    }
-
-    /** @return the Blob with serialized constructor & named arguments. */
-    public byte[] getValue() {
-	byte[] value = new byte[this.value.length];
-	System.arraycopy(this.value, 0, value, 0, value.length);
-	return value;
-    }
-
-    /**@return an array with the arguments to the attribute's constructor. */
-    public Object[] getConstructorArguments() {
-        parseBlob();
-        Object[] cas = new Object[constrArgs.length];
-        System.arraycopy(constrArgs, 0, cas, 0, cas.length);
-        return cas;
-    }
-
-    /** @return the named argument with the given name. */
-    public NamedArgument getNamedArgument(String name) {
-        return (NamedArgument)namedArgs.get(name);
-    }
-
-    /** @return an array of all named arguments for this attribute. */
-    public NamedArgument[] getNamedArguments() {
-        NamedArgument[] nargs =
-            (NamedArgument[])namedArgs.values().toArray(NamedArgument.EMPTY);
-        return nargs;
-    }
-
-    /** @return a string representation of this attribute. */
-    public String toString() {
-        parseBlob();
-	ParameterInfo[] params = constr.GetParameters();
-	assert params.length == constrArgs.length : this.constr;
-        StringBuffer str = new StringBuffer();
-	str.append('[');
-	str.append(constr.DeclaringType.FullName);
-        str.append('(');
-        for (int i = 0; i < constrArgs.length; i++) {
-            if (i > 0)
-                str.append(", ");
-	    Type t = params[i].ParameterType;
-	    if (t.IsEnum()) {
-		str.append('(');
-		str.append(t.FullName);
-		str.append(')');
-	    }
-            formatValue(str, constrArgs[i]);
-        }
-        NamedArgument[] nargs = getNamedArguments();
-        for (int i = 0; i < nargs.length; i++) {
-            str.append(", ").append(nargs[i]);
-            }
-        str.append(")]");
-	return str.toString();
-    }
-
-    //#########################################################################
-
-    private static final Map type2id = new HashMap();
-    private static final Map id2type = new HashMap();
-    static {
-        map("Boolean", Signature.ELEMENT_TYPE_BOOLEAN);
-        map("Char",    Signature.ELEMENT_TYPE_CHAR);
-        map("SByte",   Signature.ELEMENT_TYPE_I1);
-        map("Byte",    Signature.ELEMENT_TYPE_U1);
-        map("Int16",   Signature.ELEMENT_TYPE_I2);
-        map("UInt16",  Signature.ELEMENT_TYPE_U2);
-        map("Int32",   Signature.ELEMENT_TYPE_I4);
-        map("UInt32",  Signature.ELEMENT_TYPE_U4);
-        map("Int64",   Signature.ELEMENT_TYPE_I8);
-        map("UInt64",  Signature.ELEMENT_TYPE_U8);
-        map("Single",  Signature.ELEMENT_TYPE_R4);
-        map("Double",  Signature.ELEMENT_TYPE_R8);
-        map("String",  Signature.ELEMENT_TYPE_STRING);
-        map("Type",    Signature.X_ELEMENT_TYPE_TYPE);
-        map("Object",  Signature.ELEMENT_TYPE_OBJECT);
-    }
-    private static void map(String type, int id) {
-        Type t = Type.GetType("System." + type);
-        assert type != null : type + " -> " + id;
-        Integer i = new Integer(id);
-        type2id.put(t, i);
-        id2type.put(i, t);
-    }
-    private static int getTypeId(Type type) {
-        Integer id = (Integer)type2id.get(type);
-        assert id != null : type;
-        return id.intValue();
-    }
-
-    private Object[] constrArgs;
-    private Map namedArgs;
-    private ByteBuffer buf;
-
-    private void parseBlob() {
-        try { parseBlob0(); }
-        catch (RuntimeException e) {
-            throw new RuntimeException(PEFile.bytes2hex(value), e);
-        }
-    }
-
-    private void parseBlob0() {
-        if (buf != null)
-            return;
-        buf = ByteBuffer.wrap(value);                                   // Sec. 23.3 in Partition II of CLR Spec.
-        buf.order(ByteOrder.LITTLE_ENDIAN);
-
-        short sig = buf.getShort();                                     // Prolog
-        assert sig == 1 : PEFile.bytes2hex(value);
-        ParameterInfo[] params = constr.GetParameters();
-        constrArgs = new Object[params.length];
-        for (int i = 0; i < params.length; i++) {
-            constrArgs[i] = parseFixedArg(params[i].ParameterType);     // FixedArg
-        }
-
-        int ncount = buf.getShort();                                   // NumNamed
-        namedArgs = new LinkedHashMap();
-        for (int i = 0; i < ncount; i++) {
-            int designator = buf.get();                                // designator one of 0x53 (FIELD) or 0x54 (PROPERTY)
-            assert designator == Signature.X_ELEMENT_KIND_FIELD
-                || designator == Signature.X_ELEMENT_KIND_PROPERTY
-                : "0x" + PEFile.byte2hex(designator);
-            Type type = parseFieldOrPropTypeInNamedArg();              // FieldOrPropType
-            String name = parseString();                               // FieldOrPropName
-            Object value = parseFixedArg(type);                        // FixedArg
-            NamedArgument narg =
-                new NamedArgument(designator, name, type, value);
-            namedArgs.put(name, narg);
-        }
-    }
-
-    private Object parseFixedArg(Type type) {
-      if (type.IsArray())
-	    return parseArray(type.GetElementType());
-	  else
-        return parseElem(type);
-    }
-
-    /* indicates whether the "simple" case (the other is "enum") of the first row
-       in the Elem production should be taken. */
-    private boolean isSimpleElem(Type type) {
-        if(!type2id.containsKey(type)) return false;
-        int id = getTypeId(type);
-        switch(id){
-            case Signature.ELEMENT_TYPE_STRING:
-            case Signature.X_ELEMENT_TYPE_TYPE:
-            case Signature.ELEMENT_TYPE_OBJECT:
-                return false;
-            default:
-                return true;
-        }
-    }
-
-    /* indicates whether the second row in the Elem production
-       should be taken (and more specifically, "string" case within that row). */
-    private boolean isStringElem(Type type) {
-        if(!type2id.containsKey(type)) return false;
-        int id = getTypeId(type);
-        return id == Signature.ELEMENT_TYPE_STRING;
-    }
-
-    /* indicates whether the second row in the Elem production
-       should be taken (and more specifically, "type" case within that row). */
-    private boolean isTypeElem(Type type) {
-        if(!type2id.containsKey(type)) return false;
-        int id = getTypeId(type);
-        return id == Signature.X_ELEMENT_TYPE_TYPE;
-    }
-
-    /* indicates whether the third row in the Elem production
-       should be taken (and more specifically, "boxed" case within that row). */
-    private boolean isSystemObject(Type type) {
-        if(!type2id.containsKey(type)) return false;
-        int id = getTypeId(type);
-        return id == Signature.ELEMENT_TYPE_OBJECT;
-    }
-
-    private Object parseElem(Type type) {
-       // simple or enum
-       if (isSimpleElem(type)) return parseVal(getTypeId(type));
-       if (type.IsEnum())      return parseVal(getTypeId(type.getUnderlyingType()));
-       // string or type
-       if (isStringElem(type)) return parseString();
-       if (isTypeElem(type))   return getTypeFromSerString();
-       // boxed valuetype, please notice that a "simple" boxed valuetype is preceded by 0x51
-       if (isSystemObject(type)) {
-           Type boxedT = parse0x51();
-           if(boxedT.IsEnum()) {
-               return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT.getUnderlyingType())));
-           } else {
-               return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT))); // TODO dead code?
-           }
-       } else {
-           Type boxedT = parseType();
-           return parseVal(getTypeId(boxedT));
-       }
-    }
-
-    /* this does not parse an Elem, but a made-up production (Element). Don't read too much into this method name! */
-    private Object parseVal(int id) {
-        switch (id) {
-        case Signature.ELEMENT_TYPE_BOOLEAN:
-            return new Boolean(buf.get() == 0 ? false : true);
-        case Signature.ELEMENT_TYPE_CHAR:
-            return new Character(buf.getChar());
-        case Signature.ELEMENT_TYPE_I1:
-        case Signature.ELEMENT_TYPE_U1:
-            return new Byte(buf.get());       // TODO U1 not the same as I1
-        case Signature.ELEMENT_TYPE_I2:
-        case Signature.ELEMENT_TYPE_U2:
-            return new Short(buf.getShort()); // TODO U2 not the same as I2
-        case Signature.ELEMENT_TYPE_I4:
-        case Signature.ELEMENT_TYPE_U4:
-            return new Integer(buf.getInt()); // TODO U4 not the same as I4
-        case Signature.ELEMENT_TYPE_I8:
-        case Signature.ELEMENT_TYPE_U8:
-            return new Long(buf.getLong());   // TODO U8 not the same as I8
-        case Signature.ELEMENT_TYPE_R4:
-            return new Float(buf.getFloat());
-        case Signature.ELEMENT_TYPE_R8:
-            return new Double(buf.getDouble());
-        case Signature.X_ELEMENT_TYPE_TYPE:
-            return getTypeFromSerString();
-        case Signature.ELEMENT_TYPE_STRING:
-            return parseString();
-        default:
-            throw new RuntimeException("Shouldn't have called parseVal with: " + id);
-        }
-    }
-
-    private Object parseArray(Type type) {
-	if (type.IsEnum())
-	    return parseArray(type.getUnderlyingType());
-	return parseArray(getTypeId(type));
-    }
-
-    private Object parseArray(int id) {
-        switch (id) {
-        case Signature.ELEMENT_TYPE_BOOLEAN:
-            return parseBooleanArray();
-        case Signature.ELEMENT_TYPE_CHAR:
-            return parseCharArray();
-        case Signature.ELEMENT_TYPE_I1:
-        case Signature.ELEMENT_TYPE_U1:    // TODO U1 not the same as I1
-            return parseByteArray();
-        case Signature.ELEMENT_TYPE_I2:
-        case Signature.ELEMENT_TYPE_U2:
-            return parseShortArray();
-        case Signature.ELEMENT_TYPE_I4:
-        case Signature.ELEMENT_TYPE_U4:
-            return parseIntArray();
-        case Signature.ELEMENT_TYPE_I8:
-        case Signature.ELEMENT_TYPE_U8:
-            return parseLongArray();
-        case Signature.ELEMENT_TYPE_R4:
-            return parseFloatArray();
-        case Signature.ELEMENT_TYPE_R8:
-            return parseDoubleArray();
-        case Signature.ELEMENT_TYPE_STRING:
-            return parseStringArray();
-        case Signature.X_ELEMENT_TYPE_ENUM:
-	    return parseArray(getTypeFromSerString());
-        default:
-            throw new RuntimeException("Unknown type id: " + id);
-        }
-    }
-
-    private Type parseType() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
-        int id = buf.get();
-        switch (id) {
-        case Signature.ELEMENT_TYPE_SZARRAY:
-            Type arrT = Type.mkArray(parseType(), 1);
-            return arrT;
-        case Signature.X_ELEMENT_TYPE_ENUM:
-            String enumName = parseString();
-            Type enumT = Type.getType(enumName);
-            return enumT;
-        default:
-            Type t = (Type)id2type.get(new Integer(id));
-            assert t != null : PEFile.byte2hex(id);
-            return t;
-        }
-    }
-
-    private Type parse0x51() {
-        int id = buf.get();
-        switch (id) {
-        case 0x51:
-            return parse0x51();
-        case Signature.ELEMENT_TYPE_SZARRAY:
-            Type arrT = Type.mkArray(parseType(), 1);
-            return arrT;
-        case Signature.X_ELEMENT_TYPE_ENUM:
-            String enumName = parseString();
-            Type enumT = Type.getType(enumName);
-            return enumT;
-        default:
-            Type t = (Type)id2type.get(new Integer(id));
-            assert t != null : PEFile.byte2hex(id);
-            return t;
-        }
-    }
-
-
-    private Type parseFieldOrPropTypeInNamedArg() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
-        int id = buf.get();
-        switch (id) {
-        case 0x51:
-            return (Type)(id2type.get(new Integer(Signature.ELEMENT_TYPE_OBJECT)));
-        // TODO remove case Signature.ELEMENT_TYPE_SZARRAY:
-            // Type arrT = Type.mkArray(parseType(), 1);
-            // return arrT;
-        case Signature.X_ELEMENT_TYPE_ENUM:
-            String enumName = parseString();
-            Type enumT = Type.getType(enumName); // TODO this "lookup" only covers already-loaded assemblies.
-            return enumT; // TODO null as return value (due to the above) spells trouble later.
-        default:
-            Type t = (Type)id2type.get(new Integer(id));
-            assert t != null : PEFile.byte2hex(id);
-            return t;
-        }
-    }
-
-    private Type getTypeFromSerString() {
-        String typename = parseString();
-        int i = typename.indexOf(',');
-        /* fully qualified assembly name follows. Just strip it on the assumption that
-           the assembly is referenced in the externs and the type will be found. */
-        String name = (i < 0) ? typename : typename.substring(0, i);
-        Type t = Type.GetType(name);
-        if (t == null && i > 0) {
-            int j = typename.indexOf(',', i + 1);
-            if (j > 0) {
-                String assemName = typename.substring(i + 1, j);
-                try {
-                    Assembly.LoadFrom(assemName);
-                } catch (Throwable e) {
-                    throw new RuntimeException(typename, e);
-                }
-                t = Type.GetType(name);
-            }
-        }
-        assert t != null : typename;
-        return t;
-    }
-
-    private boolean[] parseBooleanArray() {
-        boolean[] arr = new boolean[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.get() == 0 ? false : true;
-        return arr;
-    }
-
-    private char[] parseCharArray() {
-        char[] arr = new char[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getChar();
-        return arr;
-    }
-
-    private byte[] parseByteArray() {
-        byte[] arr = new byte[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.get();
-        return arr;
-    }
-
-    private short[] parseShortArray() {
-        short[] arr = new short[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getShort();
-        return arr;
-    }
-
-    private int[] parseIntArray() {
-        int[] arr = new int[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getInt();
-        return arr;
-    }
-
-    private long[] parseLongArray() {
-        long[] arr = new long[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getLong();
-        return arr;
-    }
-
-    private float[] parseFloatArray() {
-        float[] arr = new float[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getFloat();
-        return arr;
-    }
-
-    private double[] parseDoubleArray() {
-        double[] arr = new double[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = buf.getDouble();
-        return arr;
-    }
-
-    private String[] parseStringArray() {
-        String[] arr = new String[buf.getInt()];
-        for (int i = 0; i < arr.length; i++)
-            arr[i] = parseString();
-        return arr;
-    }
-
-    private String parseString() { // SerString convention
-        String str = null;
-        int length = parseLength();
-        if (length < 0)
-            return null;
-        try { str = new String(value, buf.position(), length, "UTF-8" ); }
-        catch (UnsupportedEncodingException e) { throw new Error(e); }
-        buf.position(buf.position() + length);
-        return str;
-    }
-
-    private int getByte() {
-        return (buf.get() + 0x0100) & 0xff;
-    }
-
-    public int parseLength() {
-	int length = getByte();
-        // check for invalid length format: the first, second or third
-        // most significant bits should be 0; if all are 1 the length is invalid.
-        if ((length & 0xe0) == 0xe0)
-            return -1;
-	if ((length & 0x80) != 0) {
-	    length = ((length & 0x7f) << 8) | getByte();
-	    if ((length & 0x4000) != 0)
-		length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
-	}
-	return length;
-    }
-
-    //##########################################################################
-    private static void formatValue(StringBuffer str, Object o) {
-        Class c = (o == null) ? null : o.getClass();
-        if (c == null) {
-            str.append("<null>");
-        } else if (c == String.class) {
-            str.append('"');
-            str.append(o);
-            str.append('"');
-        } else if (c == Character.class) {
-            str.append('\'');
-            str.append(o);
-            str.append('\'');
-        } else if (c == boolean[].class) {
-            str.append("new boolean[] {");
-            boolean[] arr = (boolean[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == char[].class) {
-            str.append("new short[] {");
-            short[] arr = (short[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == byte[].class) {
-            str.append("new byte[] {");
-            byte[] arr = (byte[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == short[].class) {
-            str.append("new short[] {");
-            short[] arr = (short[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == int[].class) {
-            str.append("new int[] {");
-            int[] arr = (int[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == long[].class) {
-            str.append("new long[] {");
-            long[] arr = (long[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == float[].class) {
-            str.append("new float[] {");
-            float[] arr = (float[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == double[].class) {
-            str.append("new double[] {");
-            double[] arr = (double[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                str.append(arr[i]);
-            }
-            str.append('}');
-        } else if (c == String[].class) {
-            str.append("new String[] {");
-            String[] arr = (String[])o;
-            for (int i = 0; i < arr.length; i++) {
-                if (i > 0) str.append(", ");
-                formatValue(str, arr[i]);
-            }
-            str.append('}');
-	} else if (o instanceof Type) {
-	    str.append("typeof(");
-	    str.append(o);
-	    str.append(")");
-        } else
-            str.append(o);
-    }
-
-    //##########################################################################
-
-    /** Represents named arguments (assigned outside of the constructor)
-     *  of a custom attribute
-     */
-    public static class NamedArgument {
-
-        /** Designates if the named argument corresponds to a field or property.
-         *  Possible values:
-         *      Signature.X_ELEMENT_KIND_FIELD = 0x53
-         *      Signature.X_ELEMENT_KIND_PROPERTY = 0x54
-         */
-        public final int designator;
-
-        /** The name of the field/property. */
-        public final String name;
-
-        /** Type of the field/property. */
-        public final Type type;
-
-        /** The value for the field/property. */
-        public final Object value;
-
-        /** An empty array NamedArgument. */
-        public static final NamedArgument[] EMPTY = new NamedArgument[0];
-
-        public NamedArgument(int designator, String name,Type type,Object value)
-        {
-            this.designator = designator;
-            this.name = name;
-            this.type = type;
-            this.value = value;
-        }
-
-        /** @return <b>true</b> if the named argument specifies a field;
-         *  <b>false<b> otherwise.
-         */
-        public boolean isField() {
-            return designator == Signature.X_ELEMENT_KIND_FIELD;
-        }
-
-        /** @return <b>true</b> if the named argument specifies a property;
-         *  <b>false<b> otherwise.
-         */
-        public boolean isProperty() {
-            return designator == Signature.X_ELEMENT_KIND_PROPERTY;
-        }
-
-        /** @return a string representation of the named argument. */
-        public String toString() {
-            StringBuffer str = new StringBuffer(name);
-            str.append(" = ");
-            if (type.IsEnum())
-                str.append('(').append(type.FullName).append(')');
-            formatValue(str, value);
-            return str.toString();
-        }
-    }
-
-    //##########################################################################
-
-    public static class BoxedArgument {
-        public final Type type;
-        public final Object value;
-        public BoxedArgument(Type type, Object value) {
-            this.type = type; this.value = value;
-        }
-        public String toString() {
-            return "(" + type.FullName + ")" + value;
-        }
-    }
-
-    //##########################################################################
-
-}  // class Attribute
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
deleted file mode 100644
index cac2319..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that control binding and the way in which
- * the search for members and types is conducted by reflection.
- *
- * Note: You must specify Instance or Static along with Public or NonPublic
- * or no members will be returned.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class BindingFlags {
-
-    //##########################################################################
-
-    // disallows extending the class;
-    private BindingFlags() {}
-
-    /**
-     * Specifies no binding flag.
-     */
-    public static final int Default = 0x0000;
-
-    /**
-     * Specifies that the case of the member name should not be considered
-     * when binding.
-     */
-    public static final int IgnoreCase = 0x0001;
-
-    /**
-     * Specifies that only members declared at the level of the supplied type's
-     * hierarchy should be considered. Inherited members are not considered.
-     */
-    public static final int DeclaredOnly = 0x0002;
-
-    /**
-     * Specifies that instance members are to be included in the search.
-     */
-    public static final int Instance = 0x0004;
-
-    /**
-     * Specifies that static members are to be included in the search.
-     */
-    public static final int Static = 0x0008;
-
-    /**
-     * Specifies that public members are to be included in the search.
-     */
-    public static final int Public = 0x0010;
-
-    /**
-     * Specifies that non-public members are to be included in the search.
-     */
-    public static final int NonPublic = 0x0020;
-
-    /**
-     * Specifies that static members up the hierarchy should be returned.
-     * Static members include fields, methods, events, and properties.
-     * Nested types are not returned.
-     */
-    public static final int FlattenHierarchy = 0x0040;
-
-    /**
-     * Specifies that a method is to be invoked. This may not be a constructor
-     * or a type initializer.
-     */
-    public static final int InvokeMethod = 0x0100;
-
-    /**
-     * Specifies that Reflection should create an instance of
-     * the specified type. Calls the constructor that matches
-     * the given arguments. The supplied member name is ignored.
-     * If the type of lookup is not specified, (Instance | Public)
-     * will apply. It is not possible to call a type initializer.
-     */
-    public static final int CreateInstance = 0x0200;
-
-    /**
-     * Specifies that the value of the specified field should be returned.
-     */
-    public static final int GetField = 0x0400;
-
-    /**
-     * Specifies that the value of the specified field should be set.
-     */
-    public static final int SetField = 0x0800;
-
-    /**
-     * Specifies that the value of the specified property should be returned.
-     */
-    public static final int GetProperty = 0x1000;
-
-    /**
-     * Specifies that the value of the specified property should be set.
-     * For COM properties, specifying this binding flag is equivalent to
-     * specifying PutDispProperty and PutRefDispProperty.
-     */
-    public static final int SetProperty = 0x2000;
-
-    /**
-     * Specifies that the PROPPUT member on a COM object should be invoked.
-     * PROPPUT specifies a property-setting function that uses a value.
-     * Use PutDispProperty if a property has both PROPPUT and PROPPUTREF
-     * and you need to distinguish which one is called.
-     */
-    public static final int PutDispProperty = 0x4000;
-
-
-    /**
-     * Specifies that the PROPPUTREF member on a COM object should be invoked.
-     * PROPPUTREF specifies a property-setting function that uses a reference
-     * instead of a value. Use PutRefDispProperty if a property has both
-     * PROPPUT and PROPPUTREF and you need to distinguish which one is called.
-     */
-    public static final int PutRefDispProperty = 0x8000;
-
-    /**
-     * Specifies that types of the supplied arguments must exactly match
-     * the types of the corresponding formal parameters. Reflection
-     * throws an exception if the caller supplies a non-null Binder object,
-     * since that implies that the caller is supplying BindToXXX
-     * implementations that will pick the appropriate method.
-     * Reflection models the accessibility rules of the common type system.
-     * For example, if the caller is in the same assembly, the caller
-     * does not need special permissions for internal members. Otherwise,
-     * the caller needs ReflectionPermission. This is consistent with
-     * lookup of members that are protected, private, and so on.
-     * The general principle is that ChangeType should perform only
-     * widening coercions, which never lose data. An example of a
-     * widening coercion is coercing a value that is a 32-bit signed integer
-     * to a value that is a 64-bit signed integer. This is distinguished
-     * from a narrowing coercion, which may lose data. An example of
-     * a narrowing coercion is coercing a 64-bit signed integer to
-     * a 32-bit signed integer.
-     * The default binder ignores this flag, while custom binders can
-     * implement the semantics of this flag.
-     */
-    public static final int ExactBinding = 0x10000;
-
-    /**
-     * Used in COM interop to specify that the return value of the member
-     * can be ignored.
-     */
-    public static final int IgnoreReturn = 0x100000 ;
-
-    /**
-     * Returns the set of members whose parameter count matches the number
-     * of supplied arguments. This binding flag is used for methods with
-     * parameters that have default values and methods with variable arguments
-     * (varargs). This flag should only be used with Type.InvokeMember.
-     * Parameters with default values are used only in calls where trailing
-     * arguments are omitted. They must be the last arguments.
-     */
-    public static final int OptionalParamBinding = 0x40000;
-
-    /**
-     * Not implemented.
-     */
-    public static final int SuppressChangeType = 0x20000;
-
-    //##########################################################################
-
-}  // class BindingFlags
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
deleted file mode 100644
index 50bf9fb..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Calling conventions
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CallingConventions {
-
-    //########################################################################
-
-    /**
-     * Specifies the default calling convention as determined by the
-     * common language runtime.
-     */
-    public static final short Standard = (short) 0x0001;
-
-    /**
-     * Specifies the calling convention for methods with variable arguments.
-     */
-    public static final short VarArgs = (short) 0x0002;
-
-    /**
-     * Specifies that either the Standard or the VarArgs calling
-     * convention may be used.
-     */
-    public static final short Any = Standard | VarArgs;
-
-    /**
-     * Specifies an instance or virtual method (not a static method).
-     * At run-time, the called method is passed a pointer to the target
-     * object as its first argument (the this pointer). The signature
-     * stored in metadata does not include the type of this first argument,
-     * because the method is known and its owner class can be discovered
-     * from metadata.
-     */
-    public static final short HasThis = (short) 0x0020;
-
-    /**
-     * Specifies that the signature is a function-pointer signature,
-     * representing a call to an instance or virtual method (not a static
-     * method). If ExplicitThis is set, HasThis must also be set. The first
-     * argument passed to the called method is still a this pointer, but the
-     * type of the first argument is now unknown. Therefore, a token that
-     * describes the type (or class) of the this pointer is explicitly stored
-     * into its metadata signature.
-     */
-    public static final short ExplicitThis = (short) 0x0040;
-
-    //########################################################################
-
-    private CallingConventions() {}
-
-    public static String toString(int callConv) {
-	StringBuffer s = new StringBuffer();
-
-	if ((callConv & HasThis) != 0) {
-	    s.append("instance");
-	    if ((callConv & ExplicitThis) != 0)
-		s.append(" explicit");
-	}
-
-	return s.toString();
-    }
-
-    //##########################################################################
-
-}  // class CallingConventions
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
deleted file mode 100644
index 8c82cb4..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Arrays;
-
-/* The only reason for ConstructedType to extend Type is complying with existing code
-  (e.g., caseFieldBuilder in ILPrinterVisitor) expecting a Type.
- */
-public class ConstructedType extends Type {
-
-    public final Type instantiatedType;
-    public final Type[] typeArgs;
-
-    public ConstructedType(Type instantiatedType, Type[] typeArgs) {
-        super(instantiatedType.Module, instantiatedType.Attributes, "", null, null, null, instantiatedType.auxAttr /*AuxAttr.None*/ , null);
-        this.instantiatedType = instantiatedType;
-        this.typeArgs = typeArgs;
-    }
-
-    public String toString() {
-        String res = instantiatedType.toString()  + "[";
-        for (int i = 0; i < typeArgs.length; i++) {
-            res = res + typeArgs[i].toString();
-            if(i + 1 < typeArgs.length) {
-                res = res + ", ";
-            }
-        }
-        return res + "]";
-    }
-
-
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        ConstructedType that = (ConstructedType) o;
-
-        if (!instantiatedType.equals(that.instantiatedType)) return false;
-        if (!Arrays.equals(typeArgs, that.typeArgs)) return false;
-
-        return true;
-    }
-
-    public int hashCode() {
-        int result = instantiatedType.hashCode();
-        result = 31 * result + Arrays.hashCode(typeArgs);
-        return result;
-    }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
deleted file mode 100644
index 69f5d6d..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a class constructor and provides
- * access to constructor metadata.
- * ConstructorInfo is used to discover the attributes of a constructor
- * as well as to invoke a constructor. Objects are created by invoking
- * either the GetConstructors or GetConstructor method of a Type object.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ConstructorInfo extends MethodBase {
-    //##########################################################################
-
-    public final int MemberType() { return MemberTypes.Constructor; }
-
-    public final boolean IsConstructor() { return true; }
-
-    protected static final String CTOR = ".ctor";
-    protected static final String CCTOR = ".cctor";
-    protected static final ConstructorInfo[] EMPTY_ARRAY = new ConstructorInfo[0];
-
-    protected static String getName(int attrs) {
-	  return (attrs & MethodAttributes.Static) == 0 ? CTOR : CCTOR;
-    }
-
-    /** Public constructors */
-
-    public ConstructorInfo(Type declType, int attrs, Type[] paramTypes) {
-	  super(getName(attrs), declType, attrs, paramTypes);
-	  assert declType != null : "Owner can't be 'null' for a constructor!";
-    }
-
-    public ConstructorInfo(Type declType, int attrs, ParameterInfo[] params)
-    {
-	  super(getName(attrs), declType, attrs, params);
-	  assert declType != null : "Owner can't be 'null' for a constructor!";
-    }
-
-
-    public String toString() {
-	return MethodAttributes.toString(Attributes) + " " + Type.VOID() +
-	    " " + DeclaringType.FullName + "::" + Name + params2String();
-    }
-
-    //##########################################################################
-
-} // class ConstructorInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
deleted file mode 100644
index 0e58c18..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.List;
-import java.util.LinkedList;
-import java.util.Iterator;
-
-/**
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CustomAttributeProvider implements ICustomAttributeProvider {
-
-    //##########################################################################
-
-    protected List/*<Attribute>*/ custAttrs;
-    private static final Object[] EMPTY = new Object[0];
-
-    //TODO: take inherit into account
-    public Object[] GetCustomAttributes(boolean inherit) {
-	initAttributes(null);
-	return custAttrs.size() == 0 ? EMPTY
-            : custAttrs.toArray(new Attribute[custAttrs.size()]);
-    }
-
-    //TODO: take inherit into account
-    public Object[] GetCustomAttributes(Type attributeType, boolean inherit) {
-	initAttributes(attributeType);
-        List tAttrs = null;
-        if (constrType == attributeType)
-            tAttrs = custAttrs;
-        else {
-            tAttrs = new LinkedList();
-            for (Iterator attrs = custAttrs.iterator(); attrs.hasNext(); ) {
-                Attribute a = (Attribute) attrs.next();
-                if (a.GetType() == attributeType) tAttrs.add(a);
-            }
-        }
-	return tAttrs.size() == 0 ? EMPTY
-            : tAttrs.toArray(new Attribute[tAttrs.size()]);
-    }
-
-    //TODO: take inherit into account
-    public boolean IsDefined(Type attributeType, boolean inherit) {
-	initAttributes(attributeType);
-        if (constrType == attributeType)
-            return custAttrs.size() > 0;
-	Iterator attrs = custAttrs.iterator();
-	while (attrs.hasNext()) {
-	    if (((Attribute)attrs.next()).GetType() == attributeType)
-		return true;
-	}
-	return false;
-// 	return inherit && (DeclaringClass.BaseType != null)
-// 	    && DeclaringClass.BaseType.IsDefined(inherit);
-    }
-
-    protected void addCustomAttribute(ConstructorInfo constr, byte[] value) {
-        Attribute attr = new Attribute(constr, value);
-        assert constrType == null || constrType == attr.GetType();
-        if (custAttrs == null)
-            custAttrs = new LinkedList();
-	custAttrs.add(attr);
-    }
-
-    private void initAttributes(Type atype) {
-	if (custAttrs != null
-            && (constrType == null || constrType == atype))
-	    return;
-	custAttrs = new LinkedList();
-        constrType = atype;
-	loadCustomAttributes(atype);
-    }
-
-    protected void loadCustomAttributes(Type atype) {}
-
-    private Type constrType;
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
deleted file mode 100644
index cf30008..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Quoting from  the CIL spec, Partition II, Sec. 7.1.1:
- *
- * Custom modifiers, defined using `modreq` (required modifier) and `modopt` (optional modifier), are
- * similar to custom attributes (Sec. 21) except that modifiers are part of a signature rather than being attached to a
- * declaration. Each modifer associates a type reference with an item in the signature.
- *
- */
-public class CustomModifier {
-
-    public boolean isReqd;
-    public Type    marker;
-
-    public CustomModifier(boolean isReqd, Type marker) {
-        this.isReqd = isReqd;
-        this.marker = marker;
-    }
-
-    public String toString() {
-        String res = (isReqd ? "modreq( " : "modopt( ") + marker.toString() + " )";
-        return res;
-    }
-
-    public static Type[] helperCustomMods(boolean isReqd, CustomModifier[] cmods) {
-        if(cmods == null) return null;
-        int count = 0;
-        for (int idx = 0; idx < cmods.length; idx++) {
-            if(cmods[idx].isReqd == isReqd) count++;
-        }
-        Type[] res = new Type[count];
-        int residx = 0;
-        for (int idx = 0; idx < cmods.length; idx++) {
-            res[residx] = cmods[idx].marker;
-            residx++;
-        }
-        return res;
-    }
-
-    public static Type VolatileMarker() {
-        return Type.GetType("System.Runtime.CompilerServices.IsVolatile");
-    }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
deleted file mode 100644
index a183993..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a an event.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class EventAttributes {
-
-    //##########################################################################
-
-    /** Specifies that the event has no attributes. */
-    public static final short None = 0x000;
-
-    /** Specifies a reserved flag for CLR use only. */
-    public static final short ReservedMask = 0x0400;
-
-    /** Specifies that the event is special in a way described by the name. */
-    public static final short SpecialName = 0x0200;
-
-    /** Specifies the the CLR should check name encoding. */
-    public static final short RTSpecialName = 0x0400;
-
-    //##########################################################################
-
-} // class EventAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
deleted file mode 100644
index 3ccba79..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Discovers the attributes of an event
- * and provides access to event metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class EventInfo extends MemberInfo {
-
-    //##########################################################################
-
-    public final int MemberType() { return MemberTypes.Event; }
-
-    /** Attributes associated with the event. */
-    public final short Attributes;
-
-    /** The Type object for the underlying event-handler delegate
-     *  associated with this event.
-     */
-    public final Type EventHandlerType;
-
-    public MethodInfo GetAddMethod() { return addMethod; }
-
-    public MethodInfo GetRemoveMethod() { return removeMethod; }
-
-    public String toString() {
-        return "" + EventHandlerType + " " + Name;
-    }
-
-    //##########################################################################
-
-    protected static final EventInfo[] EMPTY_ARRAY = new EventInfo[0];
-
-    protected MethodInfo addMethod;
-
-    protected MethodInfo removeMethod;
-
-    protected EventInfo(String name, Type declType, short attr,
-                        Type handlerType, MethodInfo add, MethodInfo remove)
-    {
-        super(name, declType);
-        Attributes = attr;
-        EventHandlerType = handlerType;
-        this.addMethod = add;
-        this.removeMethod = remove;
-    }
-
-    //##########################################################################
-
-} // class EventInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
deleted file mode 100644
index d7d1bb3..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a field.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class FieldAttributes {
-
-    //##########################################################################
-
-    /** Specifies the access level of a given field. */
-    public static final short FieldAccessMask = 0x0007;
-
-    /** Member not refereneceable. */
-    public static final short CompilerControlled = 0x0000;
-
-    /** Field is accessible only by the parent type. */
-    public static final short Private = 0x0001;
-
-    /** Field is accessible only by subtypes in this assembly. */
-    public static final short FamANDAssem = 0x0002;
-
-    /** Field is accessible throughout the assembly. */
-    public static final short Assembly = 0x0003;
-
-    /** Field is accessible only by type and subtypes. */
-    public static final short Family = 0x0004;
-
-    /** Field is accessible by subtypes anywhere,
-     *  as well as throughout this assembly. */
-    public static final short FamORAssem  = 0x0005;
-
-    /** Specifies that the field is accessible by any member
-     *  for whom this scope is visible. */
-    public static final short Public = 0x0006;
-
-    //##########################################################################
-    //
-
-    /** Field represents the defined type, or else it is per-instance. */
-    public static final short Static = 0x0010;
-
-    /** Field is initialized only and cannot be written after initialization. */
-    public static final short InitOnly = 0x0020;
-
-    /** Value is compile-time constant. */
-    public static final short Literal = 0x0040;
-
-    /** Field does not have to be serialized when the type is remoted. */
-    public static final short NotSerialized = 0x0080;
-
-    /** Field is special. */
-    public static final short SpecialName = 0x0200;
-
-    //##########################################################################
-    // Interop attributes
-
-    /** Implementation is forwarded through PInvoke */
-    public static final short PinvokeImpl = 0x2000;
-
-
-    //##########################################################################
-    // Additional flags
-
-    /** CLI provides 'special' behavior depending upon the name of the field */
-    public static final short RTSpecialName = 0x0400;
-
-    /** Field has marshalling information. */
-    public static final short HasFieldMarshal = 0x1000;
-
-    /** Field has a default value. */
-    public static final short HasDefault = (short)0x8000;
-
-    /** Field has a Relative Virtual Address (RVA). The RVA is the location
-     *  of the method body in the current image, as an address relative
-     *  to the start of the image file in which it is located. */
-    public static final short HasFieldRVA = 0x0100;
-
-    //##########################################################################
-    //
-
-    public static String toString(short attrs) {
-	StringBuffer str = new StringBuffer();
-	switch (attrs & FieldAccessMask) {
-	case CompilerControlled: str.append("compilercontrolled"); break;
-	case Private:            str.append("private"); break;
-	case FamANDAssem:        str.append("famandassem"); break;
-	case Assembly:           str.append("assembly"); break;
-	case Family:             str.append("family"); break;
-	case FamORAssem:         str.append("famorassem"); break;
-	case Public:             str.append("public"); break;
-	}
-	if ((attrs & Static) != 0) str.append(" static");
-	if ((attrs & InitOnly) != 0) str.append(" initonly");
-	if ((attrs & Literal) != 0) str.append(" literal");
-	if ((attrs & NotSerialized) != 0) str.append(" notserialized");
-	if ((attrs & SpecialName) != 0) str.append(" specialname");
-	if ((attrs & PinvokeImpl) != 0) str.append("");
-	if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
-	if ((attrs & HasFieldMarshal) != 0) str.append(" marshal(<native type>)");
-	//if ((attrs & HasDefault) != 0) str.append(" default(???)");
-	return str.toString();
-    }
-
-    //##########################################################################
-
-    // makes the class uninstantiable
-    private FieldAttributes() {}
-
-    //##########################################################################
-
-}  // class FieldAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
deleted file mode 100644
index 536a67e..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class FieldInfo extends MemberInfo implements HasCustomModifiers {
-
-    //##########################################################################
-    // public interface
-
-    public final int MemberType() { return MemberTypes.Field; }
-
-    /** Attributes associated with this field. */
-    public final short Attributes;
-
-    /** Type of the field represented by this FieldInfo object. */
-    public final Type FieldType;
-
-    /** can be null */
-    public final CustomModifier[] cmods;
-
-    protected final Object value;
-
-    public final boolean IsStatic() {
-	return (Attributes & FieldAttributes.Static)   != 0;
-    }
-
-    public final boolean IsInitOnly() {
-	return (Attributes & FieldAttributes.InitOnly) != 0;
-    }
-
-    public final boolean IsLiteral() {
- 	return (Attributes & FieldAttributes.Literal) != 0;
-
-    }
-
-    public final boolean IsPublic() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.Public;
-    }
-
-    public final boolean IsPrivate() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.Private;
-    }
-
-    public final boolean IsFamily() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.Family;
-    }
-
-    public final boolean IsAssembly() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.Assembly;
-    }
-
-    public final boolean IsFamilyOrAssembly() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.FamORAssem;
-    }
-
-    public final boolean IsFamilyAndAssembly() {
-	return (Attributes & FieldAttributes.FieldAccessMask)
-	    == FieldAttributes.FamANDAssem;
-    }
-    public final boolean IsSpecialName() {
- 	return (Attributes & FieldAttributes.SpecialName) != 0;
-    }
-
-    public final boolean IsPinvokeImpl() {
- 	return (Attributes & FieldAttributes.PinvokeImpl) != 0;
-    }
-
-    public final boolean IsNotSerialized() {
- 	return (Attributes & FieldAttributes.NotSerialized) != 0;
-    }
-
-    private boolean knownVolatile  = false;
-    private boolean cachedVolatile = false;
-    public final boolean IsVolatile() {
-        if(knownVolatile) return cachedVolatile;
-        knownVolatile  = true;
-        if(cmods == null) {
-            cachedVolatile = false;
-            return cachedVolatile;
-        }
-        for (int idx = 0; idx < cmods.length; idx++) {
-            if(cmods[idx].marker == CustomModifier.VolatileMarker()) {
-                cachedVolatile = true;
-                return cachedVolatile;
-            }
-        }
-        cachedVolatile = false;
-        return cachedVolatile;
-    }
-
-    public final Type[] GetOptionalCustomModifiers () {
-        return CustomModifier.helperCustomMods(false, cmods);
-    }
-
-    public final Type[] GetRequiredCustomModifiers() {
-        return CustomModifier.helperCustomMods(true, cmods);
-    }
-
-    public String toString() {
-	return FieldAttributes.toString(Attributes) + " " +
-	    FieldType + " " + DeclaringType.FullName + "::" +  Name;
-    }
-
-    //##########################################################################
-
-    protected static final FieldInfo[] EMPTY_ARRAY = new FieldInfo[0];
-
-    /** Initializes a new instance of the FieldInfo class. */
-    protected FieldInfo(String name, Type declType,
-			int attrs, PECustomMod fieldTypeWithMods, Object value)
-    {
-        super(name, declType);
-        FieldType = fieldTypeWithMods.marked;
-        cmods = fieldTypeWithMods.cmods;
-        Attributes = (short) attrs;
-        this.value = value;
-    }
-
-    /**
-     */
-    public Object getValue() { return value; }
-
-    //##########################################################################
-
-}  // class FieldInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java b/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
deleted file mode 100644
index 6237fba..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * @author Miguel Garcia
- */
-public class GenericParamAndConstraints {
-
-    public GenericParamAndConstraints(int Number, String Name, Type[] Constraints,
-                                      boolean isInvariant, boolean isCovariant, boolean isContravariant,
-                                      boolean isReferenceType, boolean isValueType, boolean hasDefaultConstructor) {
-        this.Number = Number;
-        this.Name = Name;
-        this.Constraints = Constraints; // TODO representation for the class and new() constraints missing
-        this.isInvariant = isInvariant;
-        this.isCovariant = isCovariant;
-        this.isContravariant = isContravariant;
-        this.isReferenceType = isReferenceType;
-        this.isValueType  = isValueType;
-        this.hasDefaultConstructor = hasDefaultConstructor;
-
-    }
-
-    public final int Number;
-    public final String Name; // can be null
-    public final Type[] Constraints; // can be empty array
-    public final boolean isInvariant; // only relevant for TVars, not for an MVar
-    public final boolean isCovariant; // only relevant for TVars, not for an MVar
-    public final boolean isContravariant; // only relevant for TVars, not for an MVar
-    public final boolean isReferenceType;
-    public final boolean isValueType;
-    public final boolean hasDefaultConstructor;
-
-    public String toString() {
-        String res = Name == null ? "<NoName>" : (Name.equals("") ? "<NoName>" : Name);
-        res = res + " <: " + Constraints;
-        return res;
-    }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java b/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
deleted file mode 100644
index 5ead087..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-public interface HasCustomModifiers {
-
-    public Type[] GetOptionalCustomModifiers();
-
-    public Type[] GetRequiredCustomModifiers();
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
deleted file mode 100644
index 9271859..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Provides custom attributes for reflection objects that support them.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface ICustomAttributeProvider {
-
-    //##########################################################################
-    // interface method definitions
-
-    /** Returns an array of all of the custom attributes
-     *  defined on this member, excluding named attributes,
-     * 	or an empty array if there are no custom attributes.
-     *
-     *  @param inherit - When true, look up the hierarchy chain
-     *                   for the inherited custom attribute.
-     *  @return - An array of Objects representing custom attributes,
-     *            or an empty array.
-     */
-    public Object[] GetCustomAttributes(boolean inherit);
-
-
-    /** Returns an array of custom attributes defined on this member,
-     *  identified by type, or an empty array
-     *  if there are no custom attributes of that type.
-     *
-     *  @param attributeType - The type of the custom attributes.
-     *  @param inherit - When true, look up the hierarchy chain
-     *                   for the inherited custom attribute.
-     *  @return - An array of Objects representing custom attributes,
-     *            or an empty array.
-     */
-    public Object[] GetCustomAttributes(Type attributeType, boolean inherit);
-
-
-    /** Indicates whether one or more instance of attributeType
-     *  is defined on this member
-     *
-     *  @param attributeType - The type of the custom attributes
-     *  @param inherit - When true, look up the hierarchy chain
-     *                   for the inherited custom attribute.
-     *  @return - true if the attributeType is defined on this member;
-     *            false otherwise.
-     */
-    public boolean IsDefined(Type attributeType, boolean inherit);
-
-    //##########################################################################
-
-}  // interface ICustomAttributeProvider
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
deleted file mode 100644
index 65ff1b2..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * The root class of the Reflection hierarchy.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MemberInfo extends CustomAttributeProvider {
-
-    //##########################################################################
-
-    /** The name of this member. */
-    public final String Name;
-
-    /**
-     * The class that declares this member.
-     * Note: if the MemberInfo object is a global member,
-     * (that is, it was obtained from Module.GetMethods,
-     * which returns global methods on a module), then DeclaringType
-     * will be a null reference.
-     */
-    public final Type DeclaringType;
-
-    /** An enumerated value from the MemberTypes class,
-     *  specifying a constructor, event, field, method,
-     *  property, type information, all, or custom. */
-    public abstract int MemberType();
-
-    //##########################################################################
-    // protected members
-
-    protected static final MemberInfo[] EMPTY_ARRAY = new MemberInfo[0];
-
-    protected MemberInfo(String name, Type declType) {
-	Name = name;
-	DeclaringType = declType;
-    }
-
-    //########################################################################
-
-}  // class MemberInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
deleted file mode 100644
index 5f49ad3..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Marks each type of member that is defined as a derived class of MemberInfo.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MemberTypes {
-
-    //##########################################################################
-
-    /** Specifies that the member is a constructor,
-     *  representing a ConstructorInfo member. */
-    public static final int Constructor = 0x01;
-
-
-    /** Specifies that the member is an event,
-     *  representing an EventInfo member. */
-    public static final int Event = 0x02;
-
-
-    /** Specifies that the member is a field,
-     *	representing a FieldInfo member. */
-    public static final int Field = 0x04;
-
-
-    /** Specifies that the member is a method,
-     *  representing a MethodInfo member. */
-    public static final int Method = 0x08;
-
-
-    /** Specifies that the member is a property,
-     *  representing a PropertyInfo member.
-     */
-    public static final int Property = 0x10;
-
-    /** Specifies that the member is a type,
-     *  representing a TypeInfo member. */
-    public static final int TypeInfo = 0x20;
-
-
-    /** Specifies that the member is a custom member type. */
-    public static final int Custom = 0x40;
-
-
-    /** Specifies that the member is a nested type,
-     *  extending MemberInfo. */
-    public static final int NestedType = 0x80;
-
-
-    /** Specifies all member types. */
-    public static final int All =
-	Constructor | Event | Field | Method | Property | TypeInfo | NestedType;
-
-
-    public static String toString(int memberType) {
-	if ((memberType & Constructor) != 0) return "Constructor";
-	if ((memberType & Event) != 0) return "Event";
-	if ((memberType & Field) != 0) return "Field";
-	if ((memberType & Method) != 0) return "Method";
-	if ((memberType & Property) != 0) return "Property";
-	if ((memberType & TypeInfo) != 0) return "TypeInfo";
-	if ((memberType & Custom) != 0) return "Custom";
-	if ((memberType & NestedType) != 0) return "NestedType";
-	return "Unknown MemberType: " + memberType;
-    }
-
-    //##########################################################################
-
-    // makes the class uninstantiable
-    private MemberTypes() {}
-
-    //##########################################################################
-
-}  // class MemberTypes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
deleted file mode 100644
index a703c38..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/** Specifies flags for method attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MethodAttributes {
-
-    //##########################################################################
-    // Method access attributes
-
-    /** Bitmask used to retrieve accessibility information. */
-    public static final short MemberAccessMask = 0x0007;
-
-    ///** Member not referenceable*/
-    //public static final short CompilerConstrolled = 0x0000;
-
-    /** Indicates that the member cannot be referenced. */
-    public static final short PrivateScope = 0x0000;
-
-    /** Method is accessible only by the current class. */
-    public static final short Private = 0x0001;
-
-    /** Method is accessible to members of this type
-     *  and its derived types that are in this assembly only. */
-    public static final short FamANDAssem = 0x0002;
-
-    /** Method is accessible to any class of this assembly. */
-    public static final short Assembly = 0x0003;
-
-    /** Method is accessible only to members of this class
-     *  and its derived classes. */
-    public static final short Family = 0x0004;
-
-    /** Method is accessible to derived classes anywhere,
-     *  as well as to any class in the assembly. */
-    public static final short FamORAssem = 0x0005;
-
-    /** Method is accessible to any object for which this object is in scope. */
-    public static final short Public = 0x0006;
-
-
-    //##########################################################################
-    // Flags
-
-    /** Method is defined on the type; otherwise, it is defined per instance. */
-    public static final short Static = 0x0010;
-
-    /** Method cannot be overridden. */
-    public static final short Final = 0x0020;
-
-    /** Method is virtual. */
-    public static final short Virtual = 0x0040;
-
-    /** Method hides by name and signature; otherwise, by name only. */
-    public static final short HideBySig  = 0x0080;
-
-
-    //##########################################################################
-    // vtable attributes
-
-    /** Bitmask used to retrieve vtable attributes. */
-    public static final short VtableLayoutMask = 0x0100;
-
-    /** Method reuses existing slot in the vtable. */
-    public static final short ReuseSlot = 0x0000;
-
-
-    /** Method always gets a new slot in the vtable. */
-    public static final short NewSlot = 0x0100;
-
-
-    //##########################################################################
-    // Flags
-
-    /** Method does not provide implementation. */
-    public static final short Abstract = 0x0400;
-
-    /** Method is special. */
-    public static final short SpecialName = 0x0800;
-
-
-    //##########################################################################
-    // Interop attributes
-
-    /** Method implementation is forwarded through PInvoke. */
-    public static final short PInvokeImpl = 0x2000;
-
-    /** Reserved: shall be zero for conforming implementations.
-     *  Managed method is exported by thunk to unmanaged code. */
-    public static final short UnmanagedExport = 0x0008;
-
-
-    //##########################################################################
-    // Additional flags
-
-    /** CLI provides special behavior, depending on the name of the method. */
-    public static final short RTSpecialName = 0x1000;
-
-    /** Method has security associated with it.
-     * Reserved flag for runtime use only.
-     */
-    public static final short HasSecurity = 0x00000040;
-
-    /**
-     * Indicates that the method calls another method containing security code.
-     * Reserved flag for runtime use only.
-     */
-    public static final short RequireSecObject = 0x00004000;
-
-    /** Indicates a reserved flag for runtime use only. */
-    public static final short ReservedMask = 0x0000;
-
-
-    //##########################################################################
-
-    public static String toString(short attrs) {
-	StringBuffer str = new StringBuffer(accessFlagsToString(attrs));
-	if ((attrs & Static) != 0) str.append(" static");
-	if ((attrs & Final) != 0) str.append(" final");
-	if ((attrs & Virtual) != 0) str.append(" virtual");
-	if ((attrs & Abstract) != 0) str.append(" abstract");
-	if ((attrs & HideBySig) != 0) str.append(" hidebysig");
-	if ((attrs & NewSlot) != 0) str.append(" newslot");
-	if ((attrs & SpecialName) != 0) str.append(" specialname");
-	if ((attrs & PInvokeImpl) != 0) str.append(" pinvokeimpl(?!?)");
-	if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
-	return str.toString();
-
-    }
-
-    public static String accessFlagsToString(short attrs) {
-	switch (attrs & MemberAccessMask) {
-	case PrivateScope: return "compilercontrolled";
-	case Private:      return "private";
-	case FamANDAssem:  return "famandassem";
-	case Assembly:     return "assembly";
-	case Family:       return "family";
-	case FamORAssem:   return "famorassem";
-	case Public:       return "public";
-	default: return "xxx";
-	}
-    }
-
-    //##########################################################################
-
-    // makes the class uninstantiable
-    private MethodAttributes() {}
-
-    //##########################################################################
-
-}  // class Method Attributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
deleted file mode 100644
index fe64043..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * The common superclass of MemberInfo and ConstructorInfo
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodBase extends MemberInfo {
-
-    //##########################################################################
-    // public interface
-
-    private java.util.List /* GenericParamAndConstraints */ mVars = new java.util.LinkedList();
-    private GenericParamAndConstraints[] sortedMVars = null;
-
-    public void addMVar(GenericParamAndConstraints tvarAndConstraints) {
-        sortedMVars = null;
-        mVars.add(tvarAndConstraints);
-    }
-
-    public GenericParamAndConstraints[] getSortedMVars() {
-        if(sortedMVars == null) {
-            sortedMVars = new GenericParamAndConstraints[mVars.size()];
-            for (int i = 0; i < sortedMVars.length; i ++){
-                Iterator iter = mVars.iterator();
-                while(iter.hasNext()) {
-                    GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
-                    if(tvC.Number == i) {
-                        sortedMVars[i] = tvC;
-                    }
-                }
-            }
-        }
-        return sortedMVars;
-    }
-
-    public final boolean IsGeneric() {
-        return mVars.size() > 0;
-    }
-
-    /** The attributes associated with this method/constructor. */
-    public final short Attributes;
-
-    /***/
-    public final short CallingConvention;
-
-    public abstract boolean IsConstructor();
-
-    public final boolean IsAbstract() {
-	return (Attributes & MethodAttributes.Abstract) != 0;
-    }
-
-    public final boolean IsFinal() {
-	return (Attributes& MethodAttributes.Final)    != 0;
-    }
-
-    public final boolean IsVirtual() {
-	return (Attributes& MethodAttributes.Virtual)  != 0;
-    }
-
-    public final boolean IsInstance() {
-        return !IsStatic() && !IsVirtual();
-    }
-
-    public final boolean IsStatic() {
-	return (Attributes & MethodAttributes.Static)   != 0;
-    }
-
-    public final boolean IsHideBySig() {
- 	return (Attributes & MethodAttributes.HideBySig) != 0;
-    }
-
-    public final boolean IsSpecialName() {
- 	return (Attributes & MethodAttributes.SpecialName) != 0;
-    }
-
-
-    public final boolean IsPublic() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.Public;
-    }
-
-    public final boolean IsPrivate() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.Private;
-    }
-
-    public final boolean IsFamily() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.Family;
-    }
-
-    public final boolean IsAssembly() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.Assembly;
-    }
-
-    public final boolean IsFamilyOrAssembly() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.FamORAssem;
-    }
-
-    public final boolean IsFamilyAndAssembly() {
-	return (Attributes & MethodAttributes.MemberAccessMask)
-	    == MethodAttributes.FamANDAssem;
-    }
-
-    public boolean HasPtrParamOrRetType() {
-        // the override in MethodInfo checks the return type
-        ParameterInfo[] ps = GetParameters();
-        for (int i = 0; i < ps.length; i++) {
-            Type pT = ps[i].ParameterType;
-            if(pT.IsPointer()) {
-                // Type.mkPtr creates a msil.Type for a pointer type
-                return true;
-            }
-            if(pT.IsByRef() && !pT.GetElementType().CanBeTakenAddressOf()) {
-                /* TODO Cases where GenMSIL (so far) con't emit good bytecode:
-                   the type being taken address of IsArray(),  IsGeneric(), or IsTMVarUsage.
-                   For example, System.Enum declares
-                     public static bool TryParse<TEnum>(string value, out TEnum result) where TEnum : struct, new();
-                */
-                return true;
-            }
-        }
-        return false;
-    }
-
-    /** Returns the parameters of the method/constructor. */
-    public ParameterInfo[] GetParameters() {
-	return (ParameterInfo[]) params.clone();
-    }
-
-    public int GetMethodImplementationFlags() { return implAttributes; }
-
-    //##########################################################################
-
-    /** Method parameters. */
-    protected ParameterInfo[] params;
-
-    protected short implAttributes;
-
-    protected MethodBase(String name, Type declType, int attrs, Type[] paramTypes)
-    {
-	this(name, declType, attrs);
-	assert paramTypes != null;
-	params = new ParameterInfo[paramTypes.length];
-	for (int i = 0; i < params.length; i++)
-	    params[i] = new ParameterInfo(null, paramTypes[i], 0, i);
-    }
-
-    protected MethodBase(String name, Type declType, int attrs,
-			 ParameterInfo[] params)
-    {
-	this(name, declType, attrs);
-	this.params = params;
-    }
-
-    /**
-     */
-    private MethodBase(String name, Type declType, int attrs) {
-	super(name, declType);
-
-	Attributes = (short) attrs;
-
-	if (IsConstructor()) {
-	    attrs |= MethodAttributes.SpecialName;
-	    attrs |= MethodAttributes.RTSpecialName;
-	}
-
-	CallingConvention = (short) (CallingConventions.Standard
-	    | (IsStatic() ? (short)0 : CallingConventions.HasThis));
-   }
-
-    //##########################################################################
-    // internal methods
-
-    protected String params2String() {
-	StringBuffer s = new StringBuffer("(");
-	for (int i = 0; i < params.length; i++) {
-	    if (i > 0) s.append(", ");
-	    s.append(params[i].ParameterType);
-	}
-	s.append(")");
-	return s.toString();
-    }
-
-    //##########################################################################
-
-}  // class MethodBase
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
deleted file mode 100644
index 8e8d879..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Method implementation attributes
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodImplAttributes {
-
-    //##########################################################################
-
-    /**
-     * Specifies flags about code type. 3
-     */
-    public static final short CodeTypeMask = (short) 0x0003;
-
-    /**
-     * Specifies that the method implementation is in MSIL. 0
-     */
-    public static final short IL = (short) 0x0000;
-
-    /**
-     * Specifies that the method implementation is native. 1
-     */
-    public static final short Native = (short) 0x0001;
-
-    /**
-     * This member supports the .NET Framework infrastructure and
-     * is not intended to be used directly from your code. 2
-     */
-    public static final short OPTIL = (short) 0x0002;
-
-    /**
-     * Specifies that the method implementation is provided by the runtime. 3
-     */
-    public static final short Runtime = (short) 0x0003;
-
-
-
-    /**
-     * Specifies whether the code is managed or unmanaged. 4
-     */
-    public static final short ManagedMask = (short) 0x0004;
-
-    /**
-     * Specifies that the method implementation is managed, otherwise unmanaged.
-     */
-    public static final short Managed = (short) 0x0000;
-
-    /**
-     * Specifies that the method implementation is unmanaged, otherwise managed.
-     */
-    public static final short Unmanaged = (short) 0x0004;
-
-
-
-    /**
-     * Specifies that the method cannot be inlined. 8
-     */
-    public static final short NoInlining = (short) 0x0008;
-
-    /**
-     * Specifies that the method is not defined. 16
-     */
-    public static final short ForwardRef = (short) 0x0010;
-
-    /**
-     * Specifies that the method is single-threaded through the body.
-     * You can also use the C# lock statement or the Visual Basic
-     * Lock function for this purpose. 32
-     */
-    public static final short Synchronized = (short) 0x0020;
-
-    /**
-     * Specifies that the method signature is exported exactly as declared. 128
-     */
-    public static final short PreserveSig = (short) 0x0080;
-
-    /**
-     * Specifies an internal call. 4096
-     */
-    public static final short InternalCall = (short) 0x1000;
-
-    /**
-     * Specifies a range check value. 65535
-     */
-    public static final short MaxMethodImplVal = (short) 0xffff;
-
-    //##########################################################################
-
-    public static String toString(int implAttr) {
-	StringBuffer s = new StringBuffer();
-	switch (implAttr & CodeTypeMask) {
-	case IL: s.append("cil"); break;
-	case Native: s.append("native"); break;
-	case Runtime: s.append("runtime"); break;
-	}
-	switch (implAttr & ManagedMask) {
-	case Managed: s.append(" managed"); break;
-	case Unmanaged: s.append(" unmanaged"); break;
-	}
-	if ((implAttr & NoInlining) != 0) s.append(" noinlining");
-	if ((implAttr & ForwardRef) != 0) s.append(" forwardref");
-	if ((implAttr & Synchronized) != 0) s.append(" synchronized");
-	if ((implAttr & InternalCall) != 0) s.append(" internalcall");
-	return s.toString();
-    }
-
-    //##########################################################################
-
-} // class MethodImplAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
deleted file mode 100644
index a415e75..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * Discovers the attributes of a method and provides access to method metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class MethodInfo extends MethodBase {
-
-    public boolean HasPtrParamOrRetType() {
-        if(ReturnType.IsByRef() && !(ReturnType.GetElementType().IsValueType())) {
-            /* A method returning ByRef won't pass peverify, so I guess this is dead code. */
-            return true;
-        }
-        if(ReturnType.IsPointer()) {
-            return true;
-        }
-        return super.HasPtrParamOrRetType();
-    }
-
-    //##########################################################################
-    // public members
-
-    public final int MemberType() { return MemberTypes.Method; }
-
-    public final boolean IsConstructor() { return false; }
-
-    /** The return type of this method.
-     */
-    public final Type ReturnType;
-
-    //##########################################################################
-    // protected members
-
-    protected static final MethodInfo[] EMPTY_ARRAY = new MethodInfo[0];
-
-    /**
-     * Constructor Initializes a new instance of the MethodInfo class.
-     */
-    protected MethodInfo(String name, Type declType,
-			 int attrs, Type returnType, Type[] paramTypes )
-    {
-	super(name, declType, attrs, paramTypes);
-	ReturnType = returnType;
-    }
-
-    protected MethodInfo(String name, Type declType,
-			 int attrs, Type returnType, ParameterInfo[] params )
-    {
-	super(name, declType, attrs, params);
-	ReturnType = returnType;
-    }
-
-    public String toString() {
- 	return MethodAttributes.toString(Attributes) + " " + ReturnType +
-	    " " + DeclaringType + "::" + Name + params2String();
-    }
-
-    //##########################################################################
-
-}  // class MethodInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Module.java b/src/msil/ch/epfl/lamp/compiler/msil/Module.java
deleted file mode 100644
index 8dd5e71..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Module.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- * A module is a portable executable file of type .dll or .exe consisting
- * of one or more classes and interfaces. There may be multiple namespaces
- * contained in a single module, and a namespace may span multiple modules.
- * One or more modules deployed as a unit compose an assembly.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Module extends CustomAttributeProvider {
-
-    //##########################################################################
-    // public fields
-
-    /** String representing the name of the module with the path removed. */
-    public final String Name;
-
-    /** String representing the fully qualified name and path to this module. */
-    public final String FullyQualifiedName;
-
-    /** String representing the name of the module. */
-    public String ScopeName;
-
-    /** The Assembly the Module belongs to. */
-    public final Assembly Assembly;
-
-    //##########################################################################
-    // constructor
-
-    protected Module(String name, String filename,
-		     String scopeName, Assembly assembly)
-    {
-	this.Name = name;
-	this.FullyQualifiedName = filename;
-	this.ScopeName = scopeName;
-	this.Assembly = assembly;
-    }
-
-    //##########################################################################
-    // public methods
-
-    /** Returns the specified class, performing a case-sensitive search. */
-    public Type GetType(String name) {
-        initTypes();
-	return (Type) typesMap.get(name);
-    }
-
-    /**
-     * @return all the classes defined within this module.
-     */
-    public Type[] GetTypes() {
-        initTypes();
-	return (Type[]) types.clone();
-    }
-
-    /**
-     * @return the global field with the specified name.
-     */
-    public FieldInfo GetField(String name) {
-	for (int i = 0; i < fields.length; i++)
-	    if (fields[i].Name.equals(name))
-		return fields[i];
-	return null;
-    }
-
-    /**
-     * @return an array of the global fields of the module
-     */
-    public FieldInfo[] GetFields() {
-	return (FieldInfo[]) fields.clone();
-    }
-
-    /**
-     * @return - the global method with the specified name
-     */
-    public MethodInfo GetMethod(String name) {
-	for (int i = 0; i < methods.length; i++)
-	    if (methods[i].Name.equals(name))
-		return methods[i];
-	return null;
-    }
-
-    /**
-     * @return - an array of all the global methods defined in this modules.
-     */
-    public MethodInfo[] GetMethods() {
-	return (MethodInfo[]) methods.clone();
-    }
-
-    /**
-     */
-    public String toString() { return Name; }
-
-    //########################################################################
-    // protected members
-
-    // all the types defined in this module
-    protected final Map typesMap = new HashMap();
-
-    // all the types defined in this module
-    protected Type[] types;
-
-    // the global fields of the module
-    protected FieldInfo[] fields = FieldInfo.EMPTY_ARRAY;
-
-    // the global methods of the module
-    protected MethodInfo[] methods = MethodInfo.EMPTY_ARRAY;
-
-    protected Type addType(Type type) {
-	addType(type.FullName, type);
-	Assembly.addType(type);
-	return type;
-    }
-
-    protected Type addType(String name, Type type) {
-	assert type!= null;
-	typesMap.put(name, type);
-	return type;
-    }
-
-    private boolean initTypes = true;
-    protected final void initTypes() {
-        if (initTypes) {
-            loadTypes();
-            initTypes = false;
-        }
-    }
-
-    protected void loadTypes() {}
-
-    private boolean initGlobals = true;
-    protected final void initGlobals() {
-        if (initGlobals) {
-            loadGlobals();
-            initGlobals = false;
-        }
-    }
-
-    protected void loadGlobals() {}
-
-    //##########################################################################
-
-}  // class Module
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
deleted file mode 100644
index a31db16..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.File;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/** Represents an assembly that resides in a real .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEAssembly extends Assembly {
-
-    private final PEFile pefile;
-
-    private PEModule mainModule;
-
-    public PEAssembly(PEFile pefile, AssemblyName an) {
-	super(an, true);
-	this.pefile = pefile;
-	String name = pefile.ModuleDef(1).getName();
-	mainModule = new PEModule(pefile, 1, name, this);
-	addModule(name, mainModule);
-        //initModules();
-    }
-
-    protected void loadModules() {
-	File parentDir = pefile.getParentFile();
-	FileDef fd = pefile.FileDef;
-	for (int row = 1; row <= fd.rows; row++) {
-	    fd.readRow(row);
-	    String filename = fd.getName();
-	    File f = new File(parentDir, filename);
-	    PEFile pe = Assembly.getPEFile(f);
-	    if (pe == null) {
-		f = new File(filename);
-		pe = Assembly.getPEFile(f);
-		if (pe == null)
-		    continue;
-// 		throw new RuntimeException("Cannot find file " + filename +
-// 					   " referenced by assembly " + this);
-	    }
-	    String name = pe.ModuleDef(1).getName();
-	    PEModule module = new PEModule(pe, 1, name, this);
-	    addModule(name, module);
-	}
-    }
-
-    public File getFile() {
-	return pefile.getUnderlyingFile();
-    }
-
-    protected void loadCustomAttributes(Type attributeType) {
-        initModules();
-        mainModule.initAttributes(this, 1, Table.AssemblyDef.ID, attributeType);
-    }
-
-    //##########################################################################
-
-} // class PEAssembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
deleted file mode 100644
index 3eb22b9..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ /dev/null
@@ -1,941 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.*;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.Module;
-
-import java.io.File;
-import java.io.RandomAccessFile;
-import java.io.PrintStream;
-import java.io.IOException;
-import java.io.FileNotFoundException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.MappedByteBuffer;
-
-import java.util.Date;
-
-/**
- * A class that represents a .NET PE/COFF image.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- * @see <a href="http://www.ecma-international.org/publications/standards/Ecma-335.htm">Standard ECMA-335:  Common Language Infrastructure (CLI), 4th edition (June 2006)</a>
- */
-public class PEFile {
-
-    //##########################################################################
-
-    public static final int INT_SIZE = 4;
-
-    protected final int PE_SIGNATURE_OFFSET;
-    protected final int COFF_HEADER_OFFSET;
-    protected final int PE_HEADER_OFFSET;
-
-    protected final int numOfSections;
-    protected final int CLI_RVA;
-    protected final int CLI_Length;
-    public final int rvaMetadata;
-    public final int posMetadata;
-    protected final int numOfStreams;
-    protected final int optHeaderSize;
-
-    protected final File underlyingFile;
-    protected final RandomAccessFile file;
-    protected final MappedByteBuffer buf;
-
-    protected final PESection [] sections;
-
-    public PEStream Meta, Strings, US, Blob, GUID;
-
-    private final Table [] tables = new Table[Table.MAX_NUMBER];
-
-    public final boolean isDLL;
-
-    protected final int heapSizes;
-    public final boolean StringIsShort, BlobIsShort, GUIDIsShort;
-
-    protected PEModule pemodule = null;
-
-    //##########################################################################
-    // PEFile constructor
-
-    private static void fileFormatCheck(boolean cond, String s) {
-	if (cond)
-	    throw new RuntimeException(s);
-    }
-
-    /**
-     */
-    public PEFile(String filename) throws FileNotFoundException {
-	this.underlyingFile = new File(filename);
-	this.file = new RandomAccessFile(underlyingFile, "r");
-	FileChannel fc = file.getChannel();
-	MappedByteBuffer bb = null;
-	try {
-	    bb = fc.map(FileChannel.MapMode.READ_ONLY, 0L, fc.size());
- 	} catch (IOException e) { throw new RuntimeException(e); }
-
-    /** Ecma 335, 25 File format extensions to PE:
-     *
-     *  "Unless stated otherwise, all binary values are stored in little-endian format."
-     */
-
-	bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
-	this.buf = bb;
-
-    /** Ecma 335, 25.2.1 MS-DOS header:
-     *
-     *  "The PE format starts with an MS-DOS stub of exactly the following 128 bytes to
-     *   be placed at the front of the module."
-     *
-     *  We are only checking for MZ (Mark Zbikowski)
-     */
-
-	seek(0);
-	fileFormatCheck(readByte() != 0x4d, "Invalid PE file format: " + filename); // 'M'
-	fileFormatCheck(readByte() != 0x5a, "Invalid PE file format: " + filename); // 'Z'
-
-    /** Ecma 335, 25.2.1 MS-DOS header:
-     *
-     *  "At offset 0x3c in the DOS header is a 4-byte unsigned integer offset, lfanew,
-     *   to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header."
-     */
-
-	seek(0x3c);
-	PE_SIGNATURE_OFFSET = readInt();
-	seek(PE_SIGNATURE_OFFSET);
-    // start of PE signature (a signature that is just 4 bytes long)
-	fileFormatCheck(readByte() != 0x50, "Invalid PE file format: " + filename); // 'P'
-	fileFormatCheck(readByte() != 0x45, "Invalid PE file format: " + filename); // 'E'
-    fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); //  0
-    fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); //  0
-
-	//trace("PE signature offset = 0x" + Table.int2hex(PE_SIGNATURE_OFFSET));
-
-	COFF_HEADER_OFFSET = PE_SIGNATURE_OFFSET + 4;
-	PE_HEADER_OFFSET = COFF_HEADER_OFFSET + 20;
-
-	seek(COFF_HEADER_OFFSET);
-
-    /* start of PE file header, Sec. 25.2.2 in Partition II  */
-	skip(2); // Machine (always 0x14c)
-    numOfSections = readShort(); // Number of sections; indicates size of the Section Table
-	Date timeStamp = new Date(readInt() * 1000L);
-	skip(2 * INT_SIZE); // skip Pointer to Symbol Table (always 0) and Number of Symbols (always 0)
-	optHeaderSize = readShort();
-	int characteristics = readShort();
-	isDLL = (characteristics & 0x2000) != 0;
-
-	seek(PE_HEADER_OFFSET + 208); // p.157, Partition II
-
- 	CLI_RVA = readInt();    // called "Data Directory Table" in Ch. 4 of Expert IL book
-	CLI_Length = readInt();
-	//trace("CLI_RVA = 0x" + Table.int2hex(CLI_RVA));
-	//trace("CLI_Length = 0x" + Table.int2hex(CLI_Length));
-
-	sections = new PESection[numOfSections];
-
-	seek(PE_HEADER_OFFSET + optHeaderSize); // go to the sections descriptors
-
-	for (int i = 0; i < numOfSections; i++) {
-	    seek(PE_HEADER_OFFSET + optHeaderSize + i * 40);
-	    sections[i] = new PESection(this);
-	    //sections[i].dump(System.out);
-	}
-
-	seek(fromRVA(CLI_RVA));
-	skip(8);
-	rvaMetadata = readInt();
-	posMetadata = fromRVA(rvaMetadata);
-	//trace("rvaMetadata = 0x" + Table.int2hex(rvaMetadata));
-	//trace("posMetadata = 0x" + Table.int2hex(posMetadata));
-
-	seek(posMetadata);
-	int magic = readInt();
-	//trace("Magic metadata signature = 0x" + Table.int2hex(magic));
-	fileFormatCheck(magic != 0x424a5342, "Invalid metadata signature!");
-	skip(8);
-
-	int strlength = readInt();
-	//trace("version name string length = " + strlength);
-	skip(strlength);
-	align(INT_SIZE, posMetadata);
-	//trace("position of flags = 0x" + Table.int2hex((int)pos()));
-	skip(2); // ignore the flags
-	numOfStreams = readShort();
-	//trace("Number of metadata streams = " + numOfStreams);
-
-	for (int i = 0; i < numOfStreams; i++) {
-	    PEStream strm = new PEStream(this);
-	    //strm.dump(System.out);
-	    if (strm.name.equals("#~")
-		|| strm.name.equals("#-"))    Meta = strm;
-	    if (strm.name.equals("#Strings")) Strings = strm;
-	    if (strm.name.equals("#US"))      US = strm;
-	    if (strm.name.equals("#Blob"))    Blob = strm;
-	    if (strm.name.equals("#GUID"))    GUID = strm;
-	}
-
-	seek(Meta.offset);
-	skip(6);
-	heapSizes = readByte();
-	StringIsShort = (heapSizes & 0x01) == 0;
-	GUIDIsShort   = (heapSizes & 0x02) == 0;
-	BlobIsShort   = (heapSizes & 0x04) == 0;
-
-	skip(1);
-	long tablesMask = readLong();
-	long nonStandardTables = tablesMask & ~Table.VALID_TABLES_MASK;
-	skip(8); //go to the list of number of rows
-	for (int i = 0; i < tables.length; i++) {
-	    tables[i] = Table.newTable
-		(this, i, ((tablesMask >> i) & 0x01) != 0 ? readInt() : 0);
-	}
-
-	initIndexSize();
-	initTableRefs();
-	// populate the tables from the CLI image file
-	long start = pos();
-	for (int i = 0; i < tables.length; i++)
-	    start = tables[i].init(start);
-
-    } // PEFile()
-
-
-    public final int[] indexSize = new int[Table.TABLE_SET_LENGTH];
-
-    private void initIndexSize() {
-	for (int i = 0; i < Table.TABLE_SET_LENGTH; i++) {
-	    indexSize[i] = 2;
-	    int[] tableSet = Table.TableSet[i];
-	    int treshold = (65536 >> Table.NoBits[i]);
-	    for (int j = 0; j < tableSet.length; j++) {
-		if (tableSet[j] >= 0) {
-		    Table t = tables[tableSet[j]];
-		    if (t.rows >= treshold) {
-			indexSize[i] = 4;
-			break;
-		    }
-		}
-	    }
-	}
-    }
-
-    protected void initModule(PEModule module) {
-	if (pemodule != null)
-	    throw new RuntimeException("File " + this
-				       + " has already been assigned module "
-				       + pemodule + "; new module is " + module);
-	this.pemodule = module;
-    }
-
-    //##########################################################################
-
-    public ModuleDef ModuleDef;
-    public ModuleDef ModuleDef(int i) {
-	ModuleDef.readRow(i);
-	return ModuleDef;
-    }
-
-    public TypeRef TypeRef;
-
-    public TypeDef TypeDef;
-    public TypeDef TypeDef(int i) {
-	TypeDef.readRow(i);
-	return TypeDef;
-    }
-
-    public FieldTrans FieldTrans;
-    public FieldTrans FieldTrans(int i) {
-	FieldTrans.readRow(i);
-	return FieldTrans;
-    }
-
-    public FieldDef FieldDef;
-    public FieldDef FieldDef(int i) {
-	FieldDef.readRow(i);
-	return FieldDef;
-    }
-
-    public MethodTrans MethodTrans;
-    public MethodTrans MethodTrans(int i) {
-	MethodTrans.readRow(i);
-	return MethodTrans;
-    }
-
-    public MethodDef MethodDef;
-    public MethodDef MethodDef(int i) { MethodDef.readRow(i); return MethodDef; }
-
-
-    public ParamDef ParamDef;
-    public ParamDef ParamDef(int i) { ParamDef.readRow(i); return ParamDef; }
-
-    public GenericParam GenericParam;
-
-    public GenericParam GenericParam(int i) {
-        GenericParam.readRow(i);
-        return GenericParam;
-    }
-
-    public MethodSpec MethodSpec;
-
-    public MethodSpec MethodSpec(int i) {
-        MethodSpec.readRow(i);
-        return MethodSpec;
-    }
-
-    public GenericParamConstraint GenericParamConstraint;
-
-    public GenericParamConstraint GenericParamConstraint(int i) {
-        GenericParamConstraint.readRow(i);
-        return GenericParamConstraint;
-    }
-
-    public InterfaceImpl InterfaceImpl;
-    public MemberRef MemberRef;
-    public Constant Constant;
-    public CustomAttribute CustomAttribute;
-    public FieldMarshal FieldMarshal;
-    public DeclSecurity DeclSecurity;
-    public ClassLayout ClassLayout;
-    public FieldLayout FieldLayout;
-    public StandAloneSig StandAloneSig;
-    public EventMap EventMap;
-    public EventDef EventDef;
-    public PropertyMap PropertyMap;
-    public PropertyDef PropertyDef;
-    public MethodSemantics MethodSemantics;
-    public MethodImpl MethodImpl;
-    public ModuleRef ModuleRef;
-    public TypeSpec TypeSpec;
-    public ImplMap ImplMap;
-    public FieldRVA FieldRVA;
-    public AssemblyDef AssemblyDef;
-    public AssemblyRef AssemblyRef;
-    public FileDef FileDef;
-    public ExportedType ExportedType;
-    public ManifestResource ManifestResource;
-    public NestedClass NestedClass;
-
-
-    private void initTableRefs() {
-	ModuleDef = (ModuleDef) getTable(Table.ModuleDef.ID);
-	TypeRef = (TypeRef) getTable(Table.TypeRef.ID);
-	TypeDef = (TypeDef) getTable(Table.TypeDef.ID);
-	FieldTrans = (FieldTrans) getTable(Table.FieldTrans.ID);
-	FieldDef = (FieldDef) getTable(Table.FieldDef.ID);
-	MethodTrans = (MethodTrans) getTable(Table.MethodTrans.ID);
-	MethodDef = (MethodDef) getTable(Table.MethodDef.ID);
-	ParamDef = (ParamDef) getTable(Table.ParamDef.ID);
-	InterfaceImpl = (InterfaceImpl) getTable(Table.InterfaceImpl.ID);
-	MemberRef = (MemberRef) getTable(Table.MemberRef.ID);
-	Constant = (Constant) getTable(Table.Constant.ID);
-	CustomAttribute = (CustomAttribute) getTable(Table.CustomAttribute.ID);
-	FieldMarshal = (FieldMarshal) getTable(Table.FieldMarshal.ID);
-	DeclSecurity = (DeclSecurity) getTable(Table.DeclSecurity.ID);
-	ClassLayout = (ClassLayout) getTable(Table.ClassLayout.ID);
-	FieldLayout = (FieldLayout) getTable(Table.FieldLayout.ID);
-	StandAloneSig = (StandAloneSig) getTable(Table.StandAloneSig.ID);
-	EventMap = (EventMap) getTable(Table.EventMap.ID);
-	EventDef = (EventDef) getTable(Table.EventDef.ID);
-	PropertyMap = (PropertyMap) getTable(Table.PropertyMap.ID);
-	PropertyDef = (PropertyDef) getTable(Table.PropertyDef.ID);
-	MethodSemantics = (MethodSemantics) getTable(Table.MethodSemantics.ID);
-	MethodImpl = (MethodImpl) getTable(Table.MethodImpl.ID);
-	ModuleRef = (ModuleRef) getTable(Table.ModuleRef.ID);
-	TypeSpec = (TypeSpec) getTable(Table.TypeSpec.ID);
-	ImplMap = (ImplMap) getTable(Table.ImplMap.ID);
-	FieldRVA = (FieldRVA) getTable(Table.FieldRVA.ID);
-	AssemblyDef = (AssemblyDef) getTable(Table.AssemblyDef.ID);
-	AssemblyRef = (AssemblyRef) getTable(Table.AssemblyRef.ID);
-	FileDef = (FileDef) getTable(Table.FileDef.ID);
-	ExportedType = (ExportedType) getTable(Table.ExportedType.ID);
-	NestedClass = (NestedClass) getTable(Table.NestedClass.ID);
-	ManifestResource =
-	    (ManifestResource) getTable(Table.ManifestResource.ID);
-        GenericParam = (GenericParam) getTable(Table.GenericParam.ID);
-        MethodSpec = (MethodSpec) getTable(Table.MethodSpec.ID);
-        GenericParamConstraint = (GenericParamConstraint) getTable(Table.GenericParamConstraint.ID);
-    }
-
-    public static String long2hex(long a) {
-	StringBuffer str = new StringBuffer("0000000000000000");
-	str.append(Long.toHexString(a));
-	int l = str.length();
-	return str.substring(l - 16, l);
-    }
-
-    public static String int2hex(int a) {
-	StringBuffer str = new StringBuffer("00000000");
-	str.append(Integer.toHexString(a));
-	int l = str.length();
-	return str.substring(l - 8, l);
-    }
-
-    public static String short2hex(int a) {
-	StringBuffer str = new StringBuffer("0000");
-	str.append(Integer.toHexString(a));
-	int l = str.length();
-	return str.substring(l - 4, l);
-    }
-
-    public static String byte2hex(int a) {
-	StringBuffer str = new StringBuffer("00");
-	str.append(Integer.toHexString(a));
-	int l = str.length();
-	return str.substring(l - 2, l);
-    }
-
-    public static String bytes2hex(byte[] buf) {
-	StringBuffer str = new StringBuffer();
-	for (int i = 0; i < buf.length; i++) {
-	    str.append(byte2hex(buf[i]));
-	    if (i < buf.length - 1)
-		str.append(" ");
-	}
-	return str.toString();
-    }
-
-    //##########################################################################
-    // filename
-
-    public File getUnderlyingFile() {
-	return underlyingFile;
-    }
-
-    /**
-     * @return the absolute path of the file
-     */
-    public String getAbsolutePath() {
-	return underlyingFile.getAbsolutePath();
-    }
-
-    /**
-     * @return the name of this file
-     */
-    public String getName() {
-	return underlyingFile.getName();
-    }
-
-    /**
-     * @return
-     */
-    public String getParent() {
-	return underlyingFile.getParent();
-    }
-
-    /**
-     * @return the file representing the directory the file belongs to
-     */
-    public File getParentFile() {
-	return underlyingFile.getParentFile();
-    }
-
-    public String toString() {
-	return getAbsolutePath();
-    }
-
-    //##########################################################################
-    // file pointer manipulation methods
-
-    /** Returns the current position in the file. */
-    public int pos() {
-	return buf.position();
-    }
-
-    /** Go to the specified position in the file. */
-    public void seek(int pos) {
-	buf.position(pos);
-    }
-
-
-    /** Align the current position in the file. */
-    public void align(int base) { align(base, 0); }
-
-    /** Align the current position in a section starting at offset. */
-    public void align(int base, int offset) {
-	int p = pos() - offset;
-	seek( offset + ((p % base) == 0 ? p : (p/base + 1) * base));
-    }
-
-    /** Computes the position in the file that corresponds to the given RVA. */
-    public int fromRVA(int rva) {
-	int i;
-	for(i = 0; i < numOfSections; i++)
-	    if(sections[i].virtAddr <= rva &&
-	       rva <= (sections[i].virtAddr + sections[i].virtSize))
-		return rva - sections[i].virtAddr + sections[i].realAddr;
-	throw new RuntimeException("RVA 0x" + Integer.toHexString(rva) +
-				   " is not within this file's sections!");
-    }
-
-    /** Go to the specified RVA (Relative Virtual Address). */
-    public void gotoRVA(int rva) {
-	seek(fromRVA(rva));
-    }
-
-    /** Move the forward in the file by the specified number of bytes. */
-    public void skip(int n) {
-	buf.position(buf.position() + n);
-    }
-
-    /**
-     * Returns a memory mapped little-endian buffer
-     * for the specified region of the file.
-     */
-    public MappedByteBuffer mapBuffer(long offset, int size) {
- 	try {
-	    MappedByteBuffer b = file.getChannel()
-		.map(FileChannel.MapMode.READ_ONLY, offset, size);
-	    b.order(java.nio.ByteOrder.LITTLE_ENDIAN);
-	    return b;
- 	} catch (IOException e) { throw new RuntimeException(e); }
-    }
-
-    /** Returns a buffer from the given offset to the end of the file. */
-    public ByteBuffer getBuffer(long offset, int size) {
-	buf.mark();
-	buf.position((int)offset);
-	ByteBuffer bb = buf.slice();
-	buf.reset();
-	bb.limit(size);
-	bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
-	return bb;
-    }
-
-    //##########################################################################
-    // file read methods
-
-    /**
-     * Read bs.length number of bytes
-     */
-    public void read(byte[] bs) {
-	buf.get(bs);
-    }
-
-    /**
-     * Read 1-byte integer from the current position in the file.
-     */
-    public int readByte() {
-	return buf.get();
-    }
-
-    /**
-     * Read 2-byte integer from the current position in the file.
-     */
-    public int readShort() {
-	return buf.getShort();
-    }
-
-    /**
-     * Read 4-byte integer from the current position in the file.
-     */
-    public int readInt() {
-	return buf.getInt();
-    }
-
-    /**
-     * Read 8-byte integer from the current position in the file.
-     */
-    public long readLong() {
-	return buf.getLong();
-    }
-
-    /**
-     * @return the size of string indeces for this file.
-     */
-    public int getStringIndexSize() {
-	return StringIsShort ? 2 : 4;
-    }
-
-    /**
-     * @return the size of GUID indeces for this file.
-     */
-    public int getGUIDIndexSize() {
-	return GUIDIsShort ? 2 : 4;
-    }
-
-    /**
-     * @return the size of Blob indeces for this file.
-     */
-    public int getBlobIndexSize() {
-	return BlobIsShort ? 2 : 4;
-    }
-
-    /**
-     * @return the size of the index to tableID for this file;
-     * @param tableID the ID of the table
-     */
-    public int getTableIndexSize(int tableID) {
-	return tables[tableID].isShort ? 2 : 4;
-    }
-
-    /**
-     * @return the size of the index to a set of tables with the given @param TableSetID
-     * @param tableSetID the ID of the table set
-     */
-    public int getTableSetIndexSize(int tableSetID) {
-	return indexSize[tableSetID];
-    }
-
-    /**
-     * Read a String index from the current position in the file.
-     * @return an index into the String stream
-     */
-    public int readStringIndex() {
-	return StringIsShort ? readShort() : readInt();
-    }
-
-    /**
-     * Read a GUID index from the current position in the file.
-     * @return an index in to the GUID stream
-     */
-    public int readGUIDIndex() {
-	return GUIDIsShort ? readShort() : readInt();
-    }
-
-    /**
-     * Read a Blob index from the current position in the file.
-     * @return an index into the Blob stream
-     */
-    public int readBlobIndex() {
-	return BlobIsShort ? readShort() : readInt();
-    }
-
-    /** Read an entry interpreted as index into table @param tableID. */
-    public int readTableIndex(int tableId) {
-	return tables[tableId].isShort ? readShort() : readInt();
-    }
-
-    /***/
-    public int readTableSetIndex(int tableSetId) {
-	return indexSize[tableSetId] == 2 ? readShort() : readInt();
-    }
-
-    /**
-     * Read a string from the String stream
-     * @return the string at the given position
-     * @param pos the position of the string in the String stream
-     */
-    public String getString(int pos) {
-	String s = Strings.getString(pos);
-	return s;//.length() == 0 ? null : s;
-    }
-
-    /**
-     * Read a string from the US (User Strings) stream
-     * @return the string at the given position
-     * @param pos the position of the string in the US stream
-     */
-    public String getUString(int pos) {
-	return US.getString(pos);
-    }
-
-    /**
-     * Read a blob from the Blob Stream
-     * @return the blob at the given position
-     * @param pos the position of the blob in the Blob stream
-     */
-    public byte[] getBlob(int pos) {
-	return Blob.getBlob(pos);
-    }
-
-    /***/
-    public Sig getSignature(int pos) {
-	//return new Sig(getBlob(pos));
-	return Blob.getSignature(pos);
-    }
-
-    /***/
-    public byte[] getGUID(int pos) {
-	return GUID.getGUID(pos);
-    }
-
-    /**
-     * @return the table with the corresponding ID.
-     */
-    public final Table getTable(int tableID) {
-	return tables[tableID];
-    }
-
-    //##########################################################################
-
-    /***/
-    void trace(String msg) {
-	System.out.println("[trace] " + msg);
-    }
-
-    //##########################################################################
-
-    public Sig newSignature(ByteBuffer buf) {
-	return new Sig(buf);
-    }
-
-    /**
-     */
-    public class Sig implements Signature {
-
-	//######################################################################
-	// instance members
-
-	protected final ByteBuffer buf;
-	protected final int pos;
-	protected final int length;
-
-	public Sig(ByteBuffer buf) {
-	    this.buf = buf;
-	    //int tmpPos = buf.position();
-	    length = decodeInt();
-	    this.pos = buf.position();
-	}
-
-	public String toString() {
-	    StringBuffer b = new StringBuffer("(");
-        int savedPos = buf.position();
-	    reset();
-	    for (int i = 0; i < length; i++) {
-		b.append(byte2hex(readByte()));
-		if (i < length - 1)
-		    b.append(" ");
-	    }
-        buf.position(savedPos);
-	    return b.append(")").toString();
-	}
-
-	public Sig reset() { buf.position(pos); return this; }
-
-	public int pos() { return buf.position() - pos; }
-
-	/** @return the byte at the current position in the signature Blob.
-	 *  Stay at the same position
-	 */
-	public int getByte() {
-	    return (buf.get(buf.position()) + 0x100) & 0xff;
-	}
-
-	/** @return the byte at the current position in the signature Blob.
-	 *  Move to the next byte.
-	 */
-	public int readByte() { return (buf.get() + 0x100) & 0xff; }
-
-	/** Skip the current byte if equal to the given value. */
-	public void skipByte(int b) { if (b == getByte()) buf.get(); }
-
-	/** Decodes an integer from the signature Blob.
-	 *  @return the decoded integer
-	 */
-	public int decodeInt() {
-	    int res = readByte();
-	    if ((res & 0x80) != 0) {
-		res = ((res & 0x7f) << 8) | readByte();
-		if ((res & 0x4000) != 0)
-		    res = ((res & 0x3fff)<<16) | (readByte()<<8) | readByte();
-	    }
-	    return res;
-	}
-
-	/** @return - the type encoded at the current position in the signature
-         *         according to 23.2.12
-	 */
-	public Type decodeType() {
-	    try { return decodeType0(); }
-	    catch (RuntimeException e) {
-		System.out.println("" + pos() + "@" + this);
-		throw e;
-	    }
-	}
-
-	public Type decodeType0() {
-	    Type type = null;
-	    int desc = readByte();
-	    switch (desc) {
-	    case ELEMENT_TYPE_BOOLEAN:type = Type.GetType("System.Boolean"); break;
-	    case ELEMENT_TYPE_CHAR:   type = Type.GetType("System.Char"); break;
-	    case ELEMENT_TYPE_I1:     type = Type.GetType("System.SByte"); break;
-	    case ELEMENT_TYPE_U1:     type = Type.GetType("System.Byte"); break;
-	    case ELEMENT_TYPE_I2:     type = Type.GetType("System.Int16"); break;
-	    case ELEMENT_TYPE_U2:     type = Type.GetType("System.UInt16"); break;
-	    case ELEMENT_TYPE_I4:     type = Type.GetType("System.Int32"); break;
-	    case ELEMENT_TYPE_U4:     type = Type.GetType("System.UInt32"); break;
-	    case ELEMENT_TYPE_I8:     type = Type.GetType("System.Int64"); break;
-	    case ELEMENT_TYPE_U8:     type = Type.GetType("System.UInt64"); break;
-	    case ELEMENT_TYPE_R4:     type = Type.GetType("System.Single"); break;
-	    case ELEMENT_TYPE_R8:     type = Type.GetType("System.Double"); break;
-	    case ELEMENT_TYPE_OBJECT: type = Type.GetType("System.Object"); break;
-	    case ELEMENT_TYPE_STRING: type = Type.GetType("System.String"); break;
-	    case ELEMENT_TYPE_I:      type = Type.GetType("System.IntPtr"); break;
-	    case ELEMENT_TYPE_U:      type = Type.GetType("System.UIntPtr"); break;
-	    case ELEMENT_TYPE_PTR:        // Followed by <type> token.
-		if (getByte() == ELEMENT_TYPE_VOID) {
-		    readByte();
-		    type = Type.mkPtr(Type.GetType("System.Void"));
-		} else type = Type.mkPtr(decodeType());
-		break;
-        case ELEMENT_TYPE_BYREF:      /* although BYREF is not listed in 23.2.12. as possible alternative, this method is also called when parsing the signatures of a method param and a method return, which do allow for BYREF */
-            type = Type.mkByRef(decodeType());
-            break;
-        case ELEMENT_TYPE_VALUETYPE:  // Followed by TypeDefOrRefEncoded
-            assert true;
-	    case ELEMENT_TYPE_CLASS:
-		// Followed by <type> token
-		type = pemodule.getTypeDefOrRef(decodeInt());
-		if (type == null) throw new RuntimeException();
-		break;
-
-	    case ELEMENT_TYPE_SZARRAY:    // Single-dim array with 0 lower bound.
-		skipCustomMods();
-		type = Type.mkArray(decodeType(), 1);
-		break;
-	    case ELEMENT_TYPE_ARRAY:
-		// <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
-                    // ArrayShape defined in 23.2.13 ArrayShape
-		Type elem = decodeType();
-		int rank = decodeInt();
-		int numSizes = decodeInt();
-		for (int i = 0; i < numSizes; i++)
-            decodeInt(); // TODO don't ignore
-		int numLoBounds = decodeInt();
-		for (int i = 0; i < numLoBounds; i++)
-            decodeInt(); // TODO don't ignore
-		type = Type.mkArray(elem, rank);
-		break;
-
-        // a grammar production from 23.2.12 Type
-        // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncoded GenArgCount Type*
-        case ELEMENT_TYPE_GENERICINST:
-            int b = readByte();
-            /*- TODO don't ignore b as done above. Should .NET valuetypes be represented as Scala case classes? */
-            Type instantiatedType = pemodule.getTypeDefOrRef(decodeInt());
-            int numberOfTypeArgs = decodeInt();
-            Type[] typeArgs = new Type[numberOfTypeArgs];
-            for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
-                typeArgs[iarg] = decodeType();
-            }
-            type = new ConstructedType(instantiatedType, typeArgs);
-            break;
-
-        // another grammar production from 23.2.12 Type
-        // ELEMENT_TYPE_VAR number The number non-terminal following MVAR
-        // or VAR is an unsigned integer value (compressed).
-        /* See also duplicate code in PEModule.java  */
-        case ELEMENT_TYPE_VAR:
-            int typeArgAsZeroBased = decodeInt();
-            type = new Type.TMVarUsage(typeArgAsZeroBased, true);
-            break;
-
-        // another grammar production from 23.2.12 Type
-        // ELEMENT_TYPE_MVAR number The number non-terminal following MVAR
-        // or VAR is an unsigned integer value (compressed).
-        /* See also duplicate code in PEModule.java  */
-        case ELEMENT_TYPE_MVAR:
-            typeArgAsZeroBased = decodeInt();
-            type = new Type.TMVarUsage(typeArgAsZeroBased, false);
-            break;
-
-	    case ELEMENT_TYPE_FNPTR:
-             // Followed MethodDefSig or by MethodRefSig.
-	    case ELEMENT_TYPE_END:
-		// Marks end of a list
-	    case ELEMENT_TYPE_CMOD_REQD:
-		// Required modifier : followed by a TypeDef or TypeRef token.
-	    case ELEMENT_TYPE_CMOD_OPT:
-		// Optional modifier : followed by a TypeDef or TypeRef token.
-	    case ELEMENT_TYPE_INTERNAL:
-		// Implemented within the CLI.
-	    case ELEMENT_TYPE_MODIFIER:
-		// Or'd with following element types.
-	    case ELEMENT_TYPE_SENTINEL:
-		// Sentinel for varargs method signature.
-	    case ELEMENT_TYPE_PINNED:
-		// Denotes a local variable that points at a pinned object.
-	    default:
-		throw new RuntimeException(byte2hex(desc) +
-					   "@" + pos() + " in " + this);
-
-	    }
-	    if (type == null) throw new RuntimeException();
-	    return type;
-    } // decodeType0()
-
-	public PECustomMod decodeFieldType() {
-	    skipByte(FIELD); // 0x06
-	    CustomModifier[] cmods = getCustomMods();
-        Type fieldType = decodeType();
-	    return new PECustomMod(fieldType, cmods);
-	}
-
-	/** decodes the return type of a method signature (22.2.11). */
-	public Type decodeRetType() {
-	    skipCustomMods();
-	    switch (getByte()) {
-	    case ELEMENT_TYPE_VOID:
-		readByte();
-		return Type.GetType("System.Void");
-	    case ELEMENT_TYPE_TYPEDBYREF:
-		return Type.GetType("System.TypedReference");
-	    case ELEMENT_TYPE_BYREF:
-		return decodeType();
-	    default:
-		return decodeType();
-	    }
-	}
-
-	public Type decodeParamType() {
-	    skipCustomMods();
-	    switch (getByte()) {
-	    case ELEMENT_TYPE_BYREF:
-		    return decodeType();
-	    case ELEMENT_TYPE_TYPEDBYREF:
-		    return Type.GetType("System.TypedReference");
-	    default:
-		    return decodeType();
-	    }
-	}
-
-	public void skipCustomMods() {
-	    while (getByte() == ELEMENT_TYPE_CMOD_OPT /* 0x20 */
-		   || getByte() == ELEMENT_TYPE_CMOD_REQD /* 0x1f */ )
-		{
-            boolean isREQD = (getByte() == ELEMENT_TYPE_CMOD_REQD); // 0x1f
-                    // skip the tag 23.2.7
-                    readByte();
-                    // skip the TypeDefOrRefEncoded (23.2.8)
-            Type ignored = pemodule.getTypeDefOrRef(decodeInt());
-            if(isREQD) {
-                // System.err.println("ELEMENT_TYPE_CMOD_REQD: " + ignored);
-                // throw new RuntimeException("Reqired CMOD: " + ignored);
-		}
-	}
-	}
-
-    /**
-     * @see CustomModifier
-     */
-	public CustomModifier[] getCustomMods() {
-      java.util.List/*<CustomModifier>*/ cmods = new java.util.LinkedList();
-      while (getByte() == ELEMENT_TYPE_CMOD_OPT || getByte() == ELEMENT_TYPE_CMOD_REQD) {
-        boolean isReqd = (getByte() == ELEMENT_TYPE_CMOD_REQD);
-        readByte(); // tag 23.2.7
-        Type t = pemodule.getTypeDefOrRef(decodeInt()); // TypeDefOrRefEncoded (23.2.8)
-        cmods.add(new CustomModifier(isReqd, t));
-      }
-      CustomModifier[] res = (CustomModifier[])cmods.toArray(new CustomModifier[0]);
-      return res;
-	}
-
-    //######################################################################
-
-    }  // class Sig
-
-    //##########################################################################
-
-}  // class PEFile
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
deleted file mode 100644
index cb8cd8f..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.nio.ByteBuffer;
-
-/** Represents a module corresponding to a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEModule extends Module {
-
-    //##########################################################################
-
-    protected final PEFile pefile;
-
-    private final int definingRow;
-
-    private Type[] typeRefs = null;
-
-    protected PEModule(PEFile pefile, int definingRow, String scopeName,
-		       Assembly assem)
-    {
-	super(pefile.getName(), pefile.getAbsolutePath(), scopeName, assem);
-	this.pefile = pefile;
-        this.definingRow = definingRow;
-	pefile.initModule(this);
-	pefile.TypeDef.load(); // load into memory
-	//loadTypes();
-	//pefile.FieldDef.load();
-	//pefile.MethodDef.load();
-	loadGlobals();
-    }
-
-    //##########################################################################
-
-    public Type GetType(String typeName) {
-        initTypes();
-	Object o = typesMap.get(typeName);
-	if (o == null) {
-	    //System.out.println("PEModule.GetType(): Unable to find type "
-	    //                   + typeName + " int module " + this);
-	    return null;
-	}
-	return o instanceof Type ? (Type)o
-	    : getTypeDef(((Integer)o).intValue());
-    }
-
-
-    /** Load information about the types defined in this module.
-     */
-    protected void loadTypes() {
-	typeRefs = new Type[pefile.TypeRef.rows];
-	final int nbTypes = pefile.TypeDef.rows;
-	for (int row = 2; row <= nbTypes; row++) {
-	    String name = pefile.TypeDef(row).getFullName();
-	    typesMap.put(name, new Integer(row));
-	}
-	this.types = new Type[nbTypes - 1];
-	for (int row = 2; row <= nbTypes; row++) {
-	    getTypeDef(row);
-	}
-    }
-
-    /** Return the type defined at the given row in the TypeDef table.
-     */
-    Type getTypeDef(int row) {
-	if (this.types[row - 2] != null)
-	    return this.types[row - 2];
-
-	TypeDef type = pefile.TypeDef(row);
-	int attrs = type.Flags;
-	String name = type.getFullName();
-
-	Type declType = null;
-	if (TypeAttributes.isNested(attrs)) {
-	    for (int i = 1; i <= pefile.NestedClass.rows; i++) {
-		pefile.NestedClass.readRow(i);
-		if (pefile.NestedClass.NestedClass == row)
-		    declType = getTypeDef
-			(pefile.NestedClass.EnclosingClass);
-	    }
-	}
-	Type t = new PEType
-            (this, attrs, name, declType, Type.AuxAttr.None, pefile, row);
-	types[row - 2] = t;
-	addType(t);
-        int[] tvarIdxes = pefile.GenericParam.getTVarIdxes(row);
-        // if(tvarIdxes.length > 0) { System.out.println("Type: " + t); }
-        for(int i = 0; i < tvarIdxes.length; i++) {
-            GenericParamAndConstraints tvarAndConstraints = getTypeConstraints(tvarIdxes[i]);
-            // add tvarAndConstraints as i-th TVar in t
-            t.addTVar(tvarAndConstraints);
-        }
-	return t;
-    }
-
-    public GenericParamAndConstraints getTypeConstraints(int genParamIdx) {
-        int tvarNumber = pefile.GenericParam(genParamIdx).Number;
-        // tvarName can be null
-        String tvarName = pefile.GenericParam.getName();
-        boolean isInvariant = pefile.GenericParam.isInvariant();
-        boolean isCovariant = pefile.GenericParam.isCovariant();
-        boolean isContravariant = pefile.GenericParam.isContravariant();
-        boolean isReferenceType = pefile.GenericParam.isReferenceType();
-        boolean isValueType = pefile.GenericParam.isValueType();
-        boolean hasDefaultConstructor = pefile.GenericParam.hasDefaultConstructor();
-        // grab constraints
-        int[] TypeDefOrRefIdxes = pefile.GenericParamConstraint.getTypeDefOrRefIdxes(genParamIdx);
-        Type[] tCtrs = new Type[TypeDefOrRefIdxes.length];
-        for(int i = 0; i < TypeDefOrRefIdxes.length; i++) {
-            Type tConstraint = getTypeDefOrRef(TypeDefOrRefIdxes[i]);
-            tCtrs[i] = tConstraint;
-            // System.out.println("\t\tConstraint: " + tConstraint);
-        }
-        GenericParamAndConstraints res = new GenericParamAndConstraints(tvarNumber, tvarName, tCtrs,
-                isInvariant, isCovariant, isContravariant,
-                isReferenceType, isValueType, hasDefaultConstructor);
-        return res;
-    }
-
-    /**
-     * Load the desription of the module-global fields and methods
-     */
-    protected void loadGlobals() {
-	//TODO:
-    }
-
-    protected void loadCustomAttributes(Type attributeType) {
-        initAttributes(this, 1, Table.ModuleDef.ID, attributeType);
-    }
-
-    /** Return the type referenced by the given row in the TypeRef table.
-     */
-    Type getTypeRef(int row) {
-        return getTypeRef(row, null);
-    }
-
-    /** Return the type referenced by the given row in the TypeRef table
-     *  only if it resides in the given assembly.
-     *  <i>Used by initCustomAttributes to avoid unnecessary loading
-     *  of referenced assemblies.</i>
-     */
-    Type getTypeRef(int row, Assembly inAssembly) {
-	Type type = typeRefs[row - 1];
-	if (type != null)
-	    return type;
-
-	Table.TypeRef tr = pefile.TypeRef;
-	tr.readRow(row);
-	int tableId = Table.getTableId(Table._ResolutionScope,
-				       tr.ResolutionScope);
-	int refRow = tr.ResolutionScope >> Table.NoBits[Table._ResolutionScope];
-	final String typeName = tr.getFullName();
-	pefile.getTable(tableId).readRow(refRow);
-	switch (tableId) {
-	case AssemblyRef.ID:
-	    String name = pefile.AssemblyRef.getName();
-            if (inAssembly != null && !inAssembly.GetName().Name.equals(name))
-                return null;
-            Assembly assem = getAssembly(name);
-	    type = assem.GetType(typeName);
-	    if (type == null) {
-                // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0
-                // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we
-                // use the v1.0
-                // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type
-                // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib
-                // v2.0
-                // For now, jsut return Object (fine as long as we don't use that method).
-                Assembly asmb = getAssembly("mscorlib");
-                type = asmb.GetType("System.Object");
-		//throw new RuntimeException("Failed to locate type " +
-                                           //typeName + " in assembly " + assem);
-	    }
-	    break;
-	case ModuleDef.ID:
-	    assert refRow == 1;
-	    type = this.GetType(typeName);
-	    //assert type != null;
-	    break;
-	case TypeRef.ID:
-        Type nestingType = getTypeRef(refRow);
-        String nestedName = typeName;
-	    type = nestingType.GetNestedType(nestedName);
-	    break;
-	case ModuleRef.ID:
-            type = getAssembly(pefile.ModuleRef.getName()).GetType(typeName);
-	default:
-	    throw new RuntimeException(refRow + "@" + pefile.getTable(tableId).getTableName()/* PEFile.byte2hex(tableId)*/);
-	}
-	if (typeRefs[row - 1] != null)
-	    System.out.println("TypeRef[" + PEFile.short2hex(row) + "] " +
-			       "changing type " + typeRefs[row - 1] +
-			       " for type " + type);
-	typeRefs[row - 1] = type;
-	assert type != null : "Couldn't find type " + typeName;
-	return type;
-    }
-
-    private Assembly getAssembly(String name) {
-        Assembly assem = Assembly.getAssembly(name);
-        if (assem != null)
-            return assem;
-        java.io.File dir = pefile.getParentFile();
-        assem = Assembly.LoadFrom(dir, name);
-        if (assem != null)
-            return assem;
-        try {
-            dir = pefile.getUnderlyingFile().getCanonicalFile().getParentFile();
-        } catch (java.io.IOException e) {
-            throw new RuntimeException(e);
-        }
-        assem = Assembly.LoadFrom(dir, name);
-        if (assem != null)
-            return assem;
-        throw new RuntimeException("Cannot find assembly: " + name);
-
-    }
-
-    /** Return the type corresponding to TypeDefOrRef coded index.
-     *  @param index - TypeDefOrRef coded index according to 23.2.6.
-     */
-    public Type getTypeDefOrRef(int index) {
-	int tableId = Table.getTableId(Table._TypeDefOrRef, index);
-	int row = index >> Table.NoBits[Table._TypeDefOrRef];
-	Type type = null;
-	switch (tableId) {
-	case Table.TypeDef.ID:
-	    type = getTypeDef(row);
-	    break;
-	case Table.TypeRef.ID:
-	    return getTypeRef(row);
-	case Table.TypeSpec.ID:
-                Table.TypeSpec ts = pefile.TypeSpec;
-                ts.readRow(row);
-                int posInBlobStream = ts.Signature;
-                byte[] blobArrWithLengthStripped = pefile.Blob.getBlob(posInBlobStream);
-                byte[] compressedUInt = compressUInt(blobArrWithLengthStripped.length);
-                byte[] byteArr = new byte[blobArrWithLengthStripped.length + compressedUInt.length];
-                System.arraycopy(compressedUInt, 0, byteArr, 0, compressedUInt.length);
-                System.arraycopy(blobArrWithLengthStripped, 0, byteArr, compressedUInt.length, blobArrWithLengthStripped.length);
-                ByteBuffer buf = ByteBuffer.wrap(byteArr);
-                Sig sig = pefile.new Sig(buf);
-                int desc = sig.readByte();
-
-                switch (desc) {
-
-                    // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncodred GenArgCount Type*
-                    case Signature.ELEMENT_TYPE_GENERICINST:      // i.e. 0x15
-                        int b = sig.readByte(); // i.e. (0x12 | 0x11)
-                        /* TODO don't ignore b as done above */
-                        Type instantiatedType = getTypeDefOrRef(sig.decodeInt());  // TypeDefOrRefEncoded
-                        int numberOfTypeArgs = sig.decodeInt();    // GenArgCount
-                        Type[] typeArgs = new Type[numberOfTypeArgs];
-                        for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
-                            typeArgs[iarg] = sig.decodeType();       // Type*
-                        }
-                        type = new ConstructedType(instantiatedType, typeArgs);
-                        break;
-
-                    /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
-                       but I've found it in assemblies compiled from C# 3.0.
-                       See also duplicate code in PEFile.java */
-                    case Signature.ELEMENT_TYPE_VAR:
-                        int typeArgAsZeroBased = sig.decodeInt();
-                        type = new Type.TMVarUsage(typeArgAsZeroBased, true);
-                        break;
-
-                    /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
-                       but I've found it in assemblies compiled from C# 3.0.
-                       See also duplicate code in PEFile.java */
-                    case Signature.ELEMENT_TYPE_MVAR:
-                        typeArgAsZeroBased = sig.decodeInt();
-                        type = new Type.TMVarUsage(typeArgAsZeroBased, false);
-                        break;
-
-                    case Signature.ELEMENT_TYPE_SZARRAY:    // Single-dim array with 0 lower bound.
-                        sig.skipCustomMods();
-                        type = Type.mkArray(sig.decodeType(), 1);
-                        break;
-
-                    case Signature.ELEMENT_TYPE_ARRAY:
-                        // <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
-                        // ArrayShape defined in 23.2.13 ArrayShape
-                        Type elem = sig.decodeType();
-                        int rank = sig.decodeInt();
-                        int numSizes = sig.decodeInt();
-                        for (int i = 0; i < numSizes; i++)
-                            sig.decodeInt(); // TODO don't ignore
-                        int numLoBounds = sig.decodeInt();
-                        for (int i = 0; i < numLoBounds; i++)
-                            sig.decodeInt(); // TODO don't ignore
-                        type = Type.mkArray(elem, rank);
-                        break;
-
-                    default:
-                        // TODO remaining grammar productions in 23.2.14 are for PTR and FNPTR only
-	    throw new RuntimeException("PEModule.getTypeDefOrRef(): TypeSpec");
-                }
-                break;
-	default:
-	    throw new RuntimeException("PEModule.getTypeDefOrRef(): oops!");
-	}
-	return type;
-    }
-
-    private byte[] compressUInt(int u) {
-        // 23.2 in Partition II
-        // TODO add tests based on the examples in 23.2 in Partition II
-        // the CCI implementation is WriteCompressedUInt
-
-        /* informal discussion at http://www.cnblogs.com/AndersLiu/archive/2010/02/09/en-compressed-integer-in-metadata.html  */
-        if (u <= 127 && 0 <= u) {
-            return new byte[]{(byte) u};
-        } else if (u > 127 && u <= (2 ^ 14 - 1)) {
-            byte loByte = (byte)(u & 0xff);
-            byte hiByte = (byte)((u >> 8) | 0x80);
-            byte[] res = new byte[] { hiByte, loByte };
-            return res;
-        } else {
-            byte b0 = (byte)(u & 0xff);
-            byte b1 = (byte)((u & 0xff00)>>8);
-            byte b2 = (byte)((u & 0xff0000)>>16);
-            byte b3 = (byte)((u >> 24)|0xc0);
-            byte[] res = new byte[] { b3, b2, b1, b0 };
-            return res;
-        }
-    }
-
-    /**
-     * Returns the method defined at the given row of the MethodDef table
-     *  by looking up the type that defines the method.
-     */
-    MethodBase getMethod(int row) {
-	for (int i = 0; i < types.length; i++) {
-	    PEType type = (PEType)types[i];
-	    if ((type.methodListBeg <= row) && (row < type.methodListEnd)) {
-		type.initMethods();
-		return type.methoddefs[row - type.methodListBeg];
-	    }
-	}
-	throw new RuntimeException("In module " + this
-				   + ": cannot find type defining method 0x"
-				   + PEFile.int2hex(row));
-    }
-
-    /** Returns the member referenced by the given row of the MemberRef table.
-     */
-    protected MemberInfo getMemberRef(int row) {
-        return getMemberRef(row, null);
-    }
-
-    /** Returns the member referenced by the given row of the MemberRef table
-     *  if defined in the given assembly.
-     *  <i>Used by initCustomAttributes to avoid unnecessary loading of
-     *  referenced assemblies</i>
-     */
-    protected MemberInfo getMemberRef(int row, Assembly inAssembly) {
-	MemberInfo member = null;
-	MemberRef mref = pefile.MemberRef;
-	mref.readRow(row);
-	int mtbl = Table.getTableId(Table._MemberRefParent, mref.Class);
-	int mind = Table.getTableIndex(Table._MemberRefParent, mref.Class);
-	switch (mtbl) {
-	case TypeRef.ID:
-	    Type type = getTypeRef(mind, inAssembly);
-            if (type == null)
-                return null;
-            Sig sig = mref.getSignature();
-            int callconv = sig.readByte(); // should be 0x20
-            int paramCount = sig.decodeInt();
-	    //sig.skipByte(Signature.ELEMENT_TYPE_BYREF); //from MethodDef
-	    Type retType = sig.decodeRetType();
-	    Type[] paramType = new Type[paramCount];
-	    for (int i = 0; i < paramCount; i++)
-		paramType[i] = sig.decodeParamType();
-
-            String memberName = mref.getName();
-            if (memberName.equals(ConstructorInfo.CTOR) ||
-                memberName.equals(ConstructorInfo.CCTOR))
-            {
-                member = type.GetConstructor(paramType);
-            } else {
-                member = type.GetMethod(memberName, paramType);
-            }
-            assert member != null : type + "::" + memberName;
-	    break;
-	case ModuleRef.ID:
-	case MethodDef.ID:
-	case TypeSpec.ID:
-	    throw new RuntimeException("initCustomAttributes: "
-                                       + pefile.getTable(mtbl).getTableName());
-	}
-	return member;
-    }
-
-    protected void initCustomAttributes(Type attributeType) {
-        initAttributes(this, definingRow, Table.ModuleDef.ID, attributeType);
-    }
-
-    // explicitly only package-visible
-    void initAttributes(CustomAttributeProvider cap, int definingRow,
-                         int sourceTableId, Type attributeType)
-    {
-	int parentIndex = Table.encodeIndex(definingRow,
-                                            Table._HasCustomAttribute,
-                                            sourceTableId);
-	Table.CustomAttribute attrs = pefile.CustomAttribute;
-	for (int row = 1; row <= attrs.rows; row++) {
-            ConstructorInfo attrConstr = null;
-	    attrs.readRow(row);
-	    if (attrs.Parent == parentIndex) {
-		int tableId = Table.getTableId(Table._CustomAttributeType,
-					       attrs.Type);
-		int ind = Table.getTableIndex(Table._CustomAttributeType,
-					      attrs.Type);
-		switch (tableId) {
-		case MethodDef.ID:
-		    attrConstr = (ConstructorInfo)this.getMethod(ind);
-		    break;
-		case MemberRef.ID:
-		    //System.out.println(PEFile.short2hex(ind) + "@MemberRef");
-                    Assembly attrAssem =
-                        attributeType == null ? null : attributeType.Assembly();
-		    MemberInfo mi = this.getMemberRef(ind, attrAssem);
-                    if (mi != null) {
-                        assert mi instanceof ConstructorInfo
-                            : "Expected ConstructorInfo; found " + mi;
-                        attrConstr = (ConstructorInfo)mi;
-                    }
-		    break;
-		default:
-		    throw new RuntimeException();
-		}
-                if (attrConstr != null
-                    && (attrConstr.DeclaringType == attributeType
-                        || attributeType == null))
-                    cap.addCustomAttribute(attrConstr, attrs.getValue());
-	    }
-	}
-    }
-
-    //##########################################################################
-
-} // class PEModule
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
deleted file mode 100644
index 418c660..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-import java.util.ArrayList;
-
-/**
- * Represents a type from a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEType extends Type implements Signature {
-
-    //##########################################################################
-
-    /** The PEFile that holds the description of the type. */
-    final PEFile file;
-
-    /** The number of the row in the TypeDef table defining the type. */
-    final int definingRow;
-
-    /** The row of the first method in the MethodDef table. */
-    final int methodListBeg;
-
-    /** The row of the last method in the MethodDef table + 1. */
-    final int methodListEnd;
-
-    /** @param definingRow - the index in the TypeDef table where
-     *  the type description is.
-     */
-    PEType(PEModule module,
-	   int attributes,
-	   String fullName,
-	   Type declType,
-	   int auxAttr,
-	   PEFile file,
-	   int definingRow)
-    {
-	super(module, attributes, fullName, null, null, declType, auxAttr);
-	this.file = file;
-	this.definingRow = definingRow;
-	methodListBeg = file.TypeDef(definingRow).MethodList;
-	methodListEnd = definingRow < file.TypeDef.rows
-	    ? file.TypeDef(definingRow + 1).MethodList
-	    : file.MethodDef.rows + 1;
-    }
-
-    //##########################################################################
-    // lazy type construction methods
-
-    protected void loadBaseType() {
-	TypeDef type = file.TypeDef(definingRow);
-	baseType = type.Extends == 0 ? null
-	    : ((PEModule)Module).getTypeDefOrRef(type.Extends);
-    }
-
-    protected void loadFields() {
-	// the list of the declared fields starts from the
-	// FieldList index in the TypeDef table up to the smaller of the:
-	//  - the last row of the FieldDef table
-	//  - the start of the next list of fields determined by the
-	// FieldList index of the next row in the TypeDef table
-	final ArrayList fields = new ArrayList();
-	int fieldListBeg = file.TypeDef(definingRow).FieldList;
-	int fieldListEnd = file.FieldDef.rows + 1;
-	if (definingRow < file.TypeDef.rows)
-	    fieldListEnd = file.TypeDef(definingRow + 1).FieldList;
-
-	for (int row = fieldListBeg; row < fieldListEnd; row++) {
-	    int frow = file.FieldTrans.rows == 0
-		? row : file.FieldTrans(row).Field;
-	    int attrs = file.FieldDef(frow).Flags;
-	    String name = file.FieldDef.getName();
-	    //System.out.println("\t-->Loading field: " + name);
-	    Sig sig = file.FieldDef.getSignature();
-	    PECustomMod pecmod = sig.decodeFieldType();
-	    Object val = null;
-	    Table.Constant consts = file.Constant;
-	    for (int i = 1; i <= consts.rows; i++) {
-		consts.readRow(i);
-		int tableId = Table.getTableId(Table._HasConstant,consts.Parent);
-		int refRow = consts.Parent >> Table.NoBits[Table._HasConstant];
-		if (tableId == Table.FieldDef.ID && refRow == frow)
-		    val = consts.getValue();
-	    }
-	    FieldInfo field = new PEFieldInfo(row, name, attrs, pecmod, val);
-	    if (field.Name.equals("value__") && field.IsSpecialName()) {
-		    assert underlyingType == null : underlyingType.toString();
-		    underlyingType = field.FieldType;
-		}
-	    fields.add(field);
-	}
-	this.fields = (FieldInfo[])
-	    fields.toArray(FieldInfo.EMPTY_ARRAY);
-	fields.clear();
-    }
-
-    protected MethodBase[] methoddefs;
-
-    protected MethodInfo getMethod(int n) {
-        return (MethodInfo)methoddefs[n - methodListBeg];
-    }
-
-    protected void loadMethods() {
-	methoddefs = new MethodBase[methodListEnd - methodListBeg];
-
-	final ArrayList methods = new ArrayList();
-	final ArrayList constrs = new ArrayList();
-	PEModule pemodule = (PEModule) Module;
-	for (int row = methodListBeg; row < methodListEnd; row++) {
-	    int mrow = file.MethodTrans.rows == 0
-		? row : file.MethodTrans(row).Method;
-	    int attrs = file.MethodDef(mrow).Flags;
-	    String name = file.MethodDef.getName();
-	    Sig sig = file.MethodDef.getSignature();
-            /* we're about to parse a MethodDefSig, defined in Sec. 23.2.1 of Partition II ()  */
-
-	    int callConv = sig.readByte();
-            // TODO decode HASTHIS from high byte of calling convention
-            // TODO decode EXPLICITTHIS from high byte of calling convention
-            // TODO handle VARARG calling convention (not CLS but may show up )
-            if((callConv & 0x1F) == Signature.GENERIC) {
-                int genParamCount = sig.decodeInt();
-                /* genParamCount is ignored because the method's type params will be obtained below
-                (see: file.GenericParam.getMVarIdxes(row) ) */
-            }
-	    int paramCount = sig.decodeInt();
-	    Type retType = sig.decodeRetType();
-	    Type[] paramType = new Type[paramCount];
-	    for (int i = 0; i < paramCount; i++)
-		paramType[i] = sig.decodeParamType();
-
-	    ParameterInfo[] params = new ParameterInfo[paramCount];
-	    int paramListBeg = file.MethodDef.ParamList;
-            int paramListEnd = paramListBeg + paramCount;
-            if (paramListEnd > file.ParamDef.rows) {
-                /* don't try to read param names past ParamDef's row count
-                   Some assembly-writers don't bother to give names for all params. */
-                paramListEnd = file.ParamDef.rows + 1;
-	    }
-	    for (int i = paramListBeg; i < paramListEnd; i++) {
-		int pattr = file.ParamDef(i).Flags;
-		String paramName = file.ParamDef.getName();
-		int seq = file.ParamDef.Sequence;
-		if (seq == 0) {
-		    //System.out.println("Retval attributes 0x" +
-		    //		       PEFile.short2hex(pattr));
-		} else {
-		    params[seq - 1] = new ParameterInfo(paramName, paramType[seq - 1], pattr, seq - 1);
-		}
-	    }
-	    for (int i = 0; i < params.length; i++) {
-		if (params[i] == null)
-		    params[i] = new ParameterInfo(null, paramType[i], 0, 0);
-	    }
-	    MethodBase method = null;
-	    if ((attrs & MethodAttributes.SpecialName) != 0
-		&& (attrs & MethodAttributes.RTSpecialName) != 0
-		&& (name.equals(ConstructorInfo.CTOR)
-		    || name.equals(ConstructorInfo.CCTOR)))
-            {
-		method = new PEConstructorInfo(row, attrs, params);
-            }
-            else {
-		method = new PEMethodInfo(row, name, attrs, retType, params);
-                int[] mvarIdxes = file.GenericParam.getMVarIdxes(row);
-                // if(mvarIdxes.length > 0) { System.out.println("Method: " + method); }
-                for(int i = 0; i < mvarIdxes.length; i++) {
-                    GenericParamAndConstraints mvarAndConstraints = pemodule.getTypeConstraints(mvarIdxes[i]);
-                    // add mvarAndConstraints as i-th MVar in method
-                    ((PEMethodInfo)method).addMVar(mvarAndConstraints);
-                }
-            }
-	    (method.IsConstructor() ? constrs : methods).add(method);
-	    methoddefs[row - methodListBeg] = method;
-	}
-
-	this.constructors = (ConstructorInfo[])
-	    constrs.toArray(ConstructorInfo.EMPTY_ARRAY);
-	this.methods = (MethodInfo[])
-	    methods.toArray(MethodInfo.EMPTY_ARRAY);
-	constrs.clear(); methods.clear();
-    }
-
-    protected void loadProperties() {
-	final PropertyMap pmap = file.PropertyMap;
-	if (pmap == null) {
-	    properties = PropertyInfo.EMPTY_ARRAY;
-	    return;
-	}
-
-        final PropertyDef pdef = file.PropertyDef;
-        int propListBeg =  -1;
-        int propListEnd = pdef.rows + 1;
-	for (int i = 1; i <= pmap.rows; i++) {
-	    pmap.readRow(i);
-	    if (pmap.Parent == this.definingRow) {
-                propListBeg = pmap.PropertyList;
-                if (i < pmap.rows) {
-                    pmap.readRow(i + 1);
-                    propListEnd = pmap.PropertyList;
-                }
-                break;
-            }
-        }
-	if (propListBeg < 0) {
-	    properties = PropertyInfo.EMPTY_ARRAY;
-	    return;
-	}
-
-	final ArrayList properties = new ArrayList();
-        for (int i = propListBeg; i < propListEnd; i++) {
-            pdef.readRow(i);
-            Sig sig = pdef.getSignature();
-            int b = sig.readByte();
-            b &= ~HASTHIS;
-            int paramCount = sig.readByte();
-            assert b == PROPERTY;
-            Type propType = sig.decodeType();
-            int index = Table.encodeIndex(i, Table._HasSemantics,
-                                          Table.PropertyDef.ID);
-            MethodSemantics msem = file.MethodSemantics;
-            MethodInfo getter = null, setter = null;
-            for (int j = 1; j <= msem.rows; j++) {
-                msem.readRow(j);
-                if (msem.Association != index)
-                    continue;
-                if (msem.isGetter())
-                    getter = getMethod(msem.Method);
-                else if (msem.isSetter())
-                    setter = getMethod(msem.Method);
-                else
-                    System.err.println("PEType.loadProperties(): !?!");
-            }
-            properties.add
-                (new PEPropertyInfo(i, pdef.getName(), (short)pdef.Flags,
-                                    propType, getter, setter));
-	}
-	this.properties = (PropertyInfo[]) properties
-	    .toArray(PropertyInfo.EMPTY_ARRAY);
-    }
-
-    protected void loadEvents() {
-        EventMap emap = file.EventMap;
-        if (emap == null) {
-            this.events = EventInfo.EMPTY_ARRAY;
-            return;
-        }
-
-        final EventDef edef = file.EventDef;
-        int eventListBeg = -1;
-        int eventListEnd = edef.rows + 1;
-        for (int i = 1; i <= emap.rows; i++) {
-            emap.readRow(i);
-            if (emap.Parent == this.definingRow) {
-                eventListBeg = emap.EventList;
-                if (i < emap.rows) {
-                    emap.readRow(i + 1);
-                    eventListEnd = emap.EventList;
-                }
-                break;
-            }
-        }
-        if (eventListBeg < 0) {
-            this.events = EventInfo.EMPTY_ARRAY;
-            return;
-        }
-
-        final ArrayList events = new ArrayList();
-        final MethodSemantics msem = file.MethodSemantics;
-        for (int i = eventListBeg; i < eventListEnd; i++) {
-            edef.readRow(i);
-            final Type handler =
-                ((PEModule)Module).getTypeDefOrRef(edef.EventType);
-            int index =
-                Table.encodeIndex(i, Table._HasSemantics, Table.EventDef.ID);
-            MethodInfo add = null, remove = null;
-            for (int j = 1; j <= msem.rows; j++) {
-                msem.readRow(j);
-                if (msem.Association != index)
-                    continue;
-                if (msem.isAddOn())
-                    add = getMethod(msem.Method);
-                else if (msem.isRemoveOn())
-                    remove = getMethod(msem.Method);
-                else {
-                }
-            }
-            events.add(new PEEventInfo(i, edef.getName(),
-                                       (short)edef.EventFlags,
-                                       handler, add, remove));
-        }
-        this.events = (EventInfo[]) events
-            .toArray(EventInfo.EMPTY_ARRAY);
-    }
-
-    protected void loadNestedTypes() {
-	final ArrayList nested = new ArrayList();
-	for (int i = 1; i <= file.NestedClass.rows; i++) {
-	    file.NestedClass.readRow(i);
-	    if (file.NestedClass.EnclosingClass == this.definingRow)
-		nested.add(((PEModule)Module)
-			   .getTypeDef(file.NestedClass.NestedClass));
-	}
-	this.nestedTypes = (Type[]) nested.toArray(Type.EmptyTypes);
-    }
-
-    protected void loadInterfaces() {
-	// get the interfaces implemented by this class
-	interfaces = Type.EmptyTypes;
-	int index = file.InterfaceImpl.findType(definingRow);
-	if (index > 0) {
-	    ArrayList ifaces = new ArrayList();
-	    for (int i = index; i <= file.InterfaceImpl.rows; i++) {
-		file.InterfaceImpl.readRow(i);
-		if (file.InterfaceImpl.Class != definingRow)
-		    break;
-		ifaces.add(((PEModule)Module)
-			   .getTypeDefOrRef(file.InterfaceImpl.Interface));
-	    }
-	    interfaces = (Type[]) ifaces.toArray(new Type[ifaces.size()]);
-	}
-    }
-
-    protected void loadCustomAttributes(Type attributeType) {
-	initAttributes(this, definingRow, Table.TypeDef.ID, attributeType);
-    }
-
-    private void initAttributes(CustomAttributeProvider cap, int definingRow,
-                                 int sourceTableId, Type attributeType)
-    {
-        ((PEModule)this.Module).initAttributes
-            (cap, definingRow, sourceTableId, attributeType);
-    }
-
-    //##########################################################################
-
-    private class PEFieldInfo extends FieldInfo {
-	private final int definingRow;
-	public PEFieldInfo(int definingRow, String name,
-                       int attrs, PECustomMod pecmod, Object value)
-	{
-	    super(name, PEType.this, attrs, pecmod, value);
-	    this.definingRow = definingRow;
-	}
-	protected void loadCustomAttributes(Type attributeType) {
-	    PEType.this.initAttributes
-                (this, definingRow, Table.FieldDef.ID, attributeType);
-	}
-    }
-
-    private class PEMethodInfo extends MethodInfo {
-	private final int definingRow;
-	public PEMethodInfo(int row, String name,
-                            int attrs, Type retType, ParameterInfo[] params)
-	{
-	    super(name, PEType.this, attrs, retType, params);
-	    this.definingRow = row;
-	}
-	protected void loadCustomAttributes(Type attributeType) {
-	    PEType.this.initAttributes
-                (this, definingRow, Table.MethodDef.ID, attributeType);
-	}
-    }
-
-    private class PEConstructorInfo extends ConstructorInfo {
-	private final int definingRow;
-	public PEConstructorInfo(int row, int attrs, ParameterInfo[] params) {
-	    super(PEType.this, attrs, params);
-	    this.definingRow = row;
-	}
-	protected void loadCustomAttributes(Type attributeType) {
-	    PEType.this.initAttributes
-                (this, definingRow, Table.MethodDef.ID, attributeType);
-	}
-    }
-
-    private class PEPropertyInfo extends PropertyInfo {
-	private final int definingRow;
-	public PEPropertyInfo(int row, String name, short attrs, Type propType,
-                              MethodInfo getter, MethodInfo setter)
-	{
-	    super(name, PEType.this, attrs, propType, getter, setter);
-	    this.definingRow = row;
-	}
-	protected void loadCustomAttributes(Type attributeType) {
-	    PEType.this.initAttributes
-                (this, definingRow, Table.PropertyDef.ID, attributeType);
-	}
-    }
-
-    private class PEEventInfo extends EventInfo {
-        private final int definingRow;
-        public PEEventInfo(int row, String name, short attrs, Type handler,
-                           MethodInfo add, MethodInfo remove)
-        {
-            super(name, PEType.this, attrs, handler, add, remove);
-            this.definingRow = row;
-        }
-        protected void loadCustomAttributes(Type attributeType) {
-            PEType.this.initAttributes
-                (this, definingRow, Table.EventDef.ID, attributeType);
-        }
-    }
-
-    //##########################################################################
-
-}  // class PEType
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
deleted file mode 100644
index d436036..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Defines the attributes that may be associated with a parameter.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class ParameterAttributes {
-
-    // just to make the class uninstantiable
-    private ParameterAttributes() {}
-
-    //##########################################################################
-
-    /** Specifies that there is no parameter attribute. */
-    public static final short None = 0x0000;
-
-    /** Specifies that the parameter is an input parameter. */
-    public static final short In = 0x0001;
-
-    /** Specifies that the parameter is an output parameter. */
-    public static final short Out = 0x0002;
-
-    /** Specifies that the parameter is a locale identifier. */
-    public static final short Lcid = 0x0004;
-
-    /** Specifies that the parameter is a return value. */
-    public static final short Retval = 0x0008;
-
-    /** Specifies that the parameter is optional.
-     *  Attention: In the specification the value is 0x0004 but
-     *  in mscorlib.dll that it Lcid and Optional is 0x0010
-     */
-    public static final short Optional = 0x0010;
-
-    /** Specifies that the parameter has a default value. */
-    public static final short HasDefault = 0x1000;
-
-    /** Specifies that the parameter has field marshaling information. */
-    public static final short HasFieldMarshal = 0x2000;
-
-    /** Reserved. */
-    public static final short Reserved3 = 0x4000;
-
-    /** Reserved. */
-    public static final short Reserved4 = (short)0x8000;
-
-    /** Specifies that the parameter is reserved. */
-    public static final short ReservedMask = (short)0xf000;
-
-    /** Reserved: shall be zero in all conforming implementations. */
-    public static final short Unused = (short) 0xcfe0;
-
-    public static final String toString(int attrs) {
-	StringBuffer s = new StringBuffer();
-	if ((attrs & In) != 0) s.append("in ");
-	if ((attrs & Out) != 0) s.append("out ");
-	if ((attrs & Optional) != 0) s.append("opt ");
-	if ((attrs & HasDefault) != 0) s.append("default(???) ");
-	if ((attrs & HasFieldMarshal) != 0) s.append("marshal(???) ");
-	return s.toString();
-    }
-
-    //##########################################################################
-
-}  // class ParameterAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
deleted file mode 100644
index 877d7aa..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a parameter and provides access to
- * parameter metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ParameterInfo extends CustomAttributeProvider {
-
-    //##########################################################################
-
-    /** Attributes of the parameter. */
-    public final short Attributes;
-
-    /** Name of the parameter. */
-    public final String Name;
-
-    /** Type of the parameter. */
-    public final Type ParameterType;
-
-    /** Position of the parameter in the parameter list. */
-    public final int Position;
-
-    //##########################################################################
-
-    /** Is this an input parameter? */
-    public final boolean IsIn() {
-        return (Attributes & ParameterAttributes.In) != 0;
-    }
-
-    /** Is this an output parameter? */
-    public final boolean IsOut() {
-        return (Attributes & ParameterAttributes.Out) != 0;
-    }
-
-    /** Is this an Lcid? */
-    public final boolean IsLcid() {
-        return (Attributes & ParameterAttributes.Lcid) != 0;
-    }
-
-    /** Is this a return value? */
-    public final boolean IsRetval() {
-        return (Attributes & ParameterAttributes.Retval) != 0;
-    }
-
-    /** Is this an optional parameter? */
-    public final boolean IsOptional() {
-        return (Attributes & ParameterAttributes.Optional) != 0;
-    }
-
-    //##########################################################################
-    // members not part of the public Reflection.ParameterInfo interface
-
-    /** Initializes a new instance of the ParameterInfo class. */
-    protected ParameterInfo(String name, Type type, int attr, int pos) {
-	Name = name;
-	ParameterType = type;
-	Attributes = (short)attr;
-	Position = pos;
-    }
-
-    public String toString() {
-        return ParameterAttributes.toString(Attributes) + ParameterType + " "
-            + Name;
-    }
-
-    //##########################################################################
-
-}  // class ParameterInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java b/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
deleted file mode 100644
index b19fe29..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-public final class PrimitiveType extends Type {
-    public PrimitiveType(Module module,
-                         int attributes,
-                         String fullName,
-                         Type baseType,
-                         Type[] interfaces,
-                         Type declType,
-                         int auxAttr,
-                         Type elemType) {
-        super(module, attributes, fullName,
-                baseType, interfaces, declType, auxAttr, elemType);
-        clearMembers();
-    }
-
-    public void clearMembers() {
-        fields = FieldInfo.EMPTY_ARRAY;
-        methods = MethodInfo.EMPTY_ARRAY;
-        constructors = ConstructorInfo.EMPTY_ARRAY;
-        events = EventInfo.EMPTY_ARRAY;
-
-        initBaseType();
-        initInterfaces();
-
-        initFields();
-        initMethods();
-        initEvents();
-        initProperties();
-        initNestedTypes();
-    }
-
-    public FieldInfo addField(String name, int attrs, Type fieldType) {
-        PECustomMod fieldTypeWithMods = new PECustomMod(fieldType, null);
-        FieldInfo res = new FieldInfo(name, this, attrs, fieldTypeWithMods, null);
-        FieldInfo[] ms = new FieldInfo[fields.length + 1];
-        System.arraycopy(fields, 0, ms, 0, fields.length);
-        ms[ms.length - 1] = res;
-        fields = ms;
-        return res;
-    }
-
-    public MethodInfo addMethod(String name, int attrs, Type returnType, Type[] paramTypes) {
-        MethodInfo res = new MethodInfo(name, this, attrs, returnType, paramTypes);
-        MethodInfo[] ms = new MethodInfo[methods.length + 1];
-        System.arraycopy(methods, 0, ms, 0, methods.length);
-        ms[ms.length - 1] = res;
-        return res;
-    }
-
-    public ConstructorInfo addConstructor(int attrs, Type[] paramTypes) {
-        ConstructorInfo res = new ConstructorInfo(this, attrs, paramTypes);
-        ConstructorInfo[] ms = new ConstructorInfo[constructors.length + 1];
-        System.arraycopy(constructors, 0, ms, 0, constructors.length);
-        ms[ms.length - 1] = res;
-        return res;
-    }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
deleted file mode 100644
index b1bec64..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Attributes applcicable to properties
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PropertyAttributes {
-
-    // makes the class uninstantiable
-    private PropertyAttributes() {}
-
-    //##########################################################################
-
-    /** Specifies that the property is special, with the name describing
-     *  how the property is special.
-     */
-    public static final short SpecialName = 0x0200;
-
-    /** Specifies that the metadata internal APIs check the name encoding.
-     */
-    public static final short RTSpecialName = 0x0400;
-
-    /** Specifies that the property has a default value.
-     */
-    public static final short HasDefault = 0x1000;
-
-    //##########################################################################
-
-    public static String toString(short attrs) {
-	StringBuffer str = new StringBuffer();
-	if ((attrs & SpecialName) != 0) str.append("specialname ");
-	if ((attrs & RTSpecialName) != 0) str.append("rtspecialname ");
-	return str.toString();
-    }
-
-    //##########################################################################
-
-} // class PropertyAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
deleted file mode 100644
index 4b7cef8..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a property
- * and provides access to property metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class PropertyInfo extends MemberInfo {
-
-    //##########################################################################
-
-    public final int MemberType() { return MemberTypes.Property; }
-
-    public final short Attributes;
-
-    public final boolean CanRead;
-
-    public final boolean CanWrite;
-
-    public final Type PropertyType;
-
-    /** Returns an array of the public get and set accessors for this property.
-     */
-    public MethodInfo[] GetAccessors() {
-	return GetAccessors(false);
-    }
-
-    /** Returns an array of the public or non-public <b>get</b>
-     *  and <b>set</b> accessors for this property.
-     */
-    public MethodInfo[] GetAccessors(boolean nonPublic) {
-	MethodInfo getter = GetGetMethod(nonPublic);
-	MethodInfo setter = GetSetMethod(nonPublic);
-	if (getter == null)
-	    if (setter == null) return MethodInfo.EMPTY_ARRAY;
-	    else return new MethodInfo[]{setter};
-	else if (setter == null) return new MethodInfo[] {getter};
-	else return new MethodInfo[] {getter, setter};
-    }
-
-    /** Returns the public <b>get</b> accessor for this property.
-     */
-    public MethodInfo GetGetMethod() {
-	return GetGetMethod(false);
-    }
-
-    /** Returns the public or non-public <b>get</b> accessor for this property.
-     */
-    public MethodInfo GetGetMethod(boolean nonPublic) {
-	return nonPublic ? getter
-	    : getter == null || getter.IsPublic() ? getter : null;
-    }
-
-    /** Returns the public <b>set</b> accessor for this property.
-     */
-    public MethodInfo GetSetMethod() {
-	return GetSetMethod(false);
-    }
-
-    /** Returns the public or non-public <b>set</b> accessor for this property.
-     */
-    public MethodInfo GetSetMethod(boolean nonPublic) {
-	return nonPublic ? setter
-	    : setter == null || setter.IsPublic() ? setter : null;
-    }
-
-    public String toString() {
-	MethodInfo m = getter != null ? getter : setter;
-	return MethodAttributes.accessFlagsToString
-	    ((getter != null ? getter : setter).Attributes)
-	    +  " " + PropertyAttributes.toString(Attributes)
-	    + DeclaringType + "::" + Name;
-    }
-
-    //##########################################################################
-    // protected members
-
-    protected static final PropertyInfo[] EMPTY_ARRAY = new PropertyInfo[0];
-
-    protected MethodInfo getter;
-    protected MethodInfo setter;
-
-    protected PropertyInfo(String name, Type declType, short attr,
-			   Type propType, MethodInfo getter, MethodInfo setter)
-    {
-	super(name, declType);
-	Attributes = attr;
-	PropertyType = propType;
-	this.getter = getter;
-	this.setter = setter;
-	CanRead = getter != null;
-	CanWrite = setter != null;
-    }
-
-    //##########################################################################
-
-} // class PropertyInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Type.java b/src/msil/ch/epfl/lamp/compiler/msil/Type.java
deleted file mode 100644
index 830632c..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Type.java
+++ /dev/null
@@ -1,1142 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Arrays;
-
-/**
- * Represents type declarations: class types, interface types, array types,
- * value types, and enumeration types.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Type extends MemberInfo {
-
-    private java.util.List /* GenericParamAndConstraints */ tVars = new java.util.LinkedList();
-    private GenericParamAndConstraints[] sortedTVars = null;
-
-    public void addTVar(GenericParamAndConstraints tvarAndConstraints) {
-        sortedTVars = null;
-        tVars.add(tvarAndConstraints);
-    }
-
-    public GenericParamAndConstraints[] getSortedTVars() {
-        if(sortedTVars == null) {
-            sortedTVars = new GenericParamAndConstraints[tVars.size()];
-            for (int i = 0; i < sortedTVars.length; i ++){
-                Iterator iter = tVars.iterator();
-                while(iter.hasNext()) {
-                    GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
-                    if(tvC.Number == i) {
-                        sortedTVars[i] = tvC;
-                    }
-                }
-            }
-        }
-        return sortedTVars;
-    }
-
-
-    //##########################################################################
-    // public static members
-
-    /** Empty array of type Type. */
-    public static final Type[] EmptyTypes = new Type[0];
-
-    /** Separates names in the namespace of the Type. */
-    public static final char Delimiter = '.';
-
-    //##########################################################################
-    // public properties
-
-    /** The fully qualified name of the Type. */
-    public final String FullName;
-
-    /** The namespace of the Type. */
-    public final String Namespace;
-
-    /** The type from which the current Type directly inherits. */
-    public final Type BaseType() {
-        initBaseType();
-        return baseType;
-    }
-    protected Type baseType;
-
-    /** The attributes associated with the Type. */
-    public final int Attributes;
-
-    /** The sssembly that the type is declared in. */
-    public final Assembly Assembly() { return Module.Assembly; }
-
-    /** The module (the EXE/DLL) in which the current Type is defined. */
-    public final Module Module;
-
-    public final int MemberType() {
-        return DeclaringType == null
-            ? MemberTypes.TypeInfo : MemberTypes.NestedType;
-    }
-
-    //##########################################################################
-    // internal members
-
-    // Fields declared by this class
-    protected FieldInfo[] fields;
-
-    // Methods declared by this class
-    protected MethodInfo[] methods;
-
-    // Constructors of this class
-    protected ConstructorInfo[] constructors;
-
-    // Properties of the class
-    protected PropertyInfo[] properties;
-
-    // Events of the class
-    protected EventInfo[] events;
-
-    // Interfaces implemented by this class
-    protected Type[] interfaces;
-
-    // Nested types declared by this class
-    protected Type[] nestedTypes;
-
-    // holds the element type of array, pointer and byref types
-    private final Type elemType;
-
-    // the underlying type of an enumeration. null if the type is not enum.
-    protected Type underlyingType;
-
-    protected int auxAttr;
-
-    //##########################################################################
-    // Map with all the types known so far and operations on it
-
-    private static final Map types = new HashMap();
-
-    protected static Type getType(String name) {
-	return (Type) types.get(name);
-    }
-
-    protected static Type addType(Type t) {
-        assert(!(t instanceof TMVarUsage));
-        assert(!(t instanceof ConstructedType));
-	Type oldType = (Type) types.put(t.FullName, t);
-// 	if (oldType != null)
-// 	    throw new RuntimeException("The type: [" + t.Assembly + "]" + t
-// 				       + " replaces the type: [" +
-// 				       oldType.Assembly + "]" + oldType);
- 	return t;
-    }
-
-    //##########################################################################
-
-    /** The main constructor. */
-    protected Type(Module module,
-		   int attr,
-		   String fullName,
-		   Type baseType,
-		   Type[] interfaces,
-		   Type declType,
-		   int auxAttr,
-		   Type elemType)
-    {
-	super(fullName.lastIndexOf(Delimiter) < 0 ? fullName :
-	      fullName.substring(fullName.lastIndexOf(Delimiter) + 1,
-				 fullName.length()),
-	      declType);
-
-	Module = module; // null only for TMVarUsage and for PrimitiveType
-	Attributes = attr;
-	this.baseType = baseType;
-	if (DeclaringType == null) {
-	    FullName = fullName;
-	    int i = FullName.lastIndexOf(Delimiter);
-	    Namespace = (i < 0) ? "" : FullName.substring(0,i);
-	} else {
-	    FullName = declType.FullName + "+" + fullName;
-	    Namespace = DeclaringType.Namespace;
-	}
-
-	this.interfaces = interfaces;
-	this.elemType = elemType;
-	this.auxAttr = auxAttr;
-    }
-
-    public final boolean IsAbstract() {
-	return (Attributes & TypeAttributes.Abstract) != 0;
-
-    }
-    public final boolean IsPublic() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.Public;
-    }
-
-    public final boolean IsNotPublic() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NotPublic;
-    }
-
-    public final boolean IsNestedPublic() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedPublic;
-    }
-
-    public final boolean IsNestedPrivate() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedPrivate;
-    }
-
-    public final boolean IsNestedFamily() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedFamily;
-    }
-
-    public final boolean IsNestedAssembly() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedAssembly;
-    }
-
-    public final boolean IsNestedFamORAssem() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedFamORAssem;
-    }
-
-    public final boolean IsNestedFamANDAssem() {
-	return (Attributes & TypeAttributes.VisibilityMask)
-	    == TypeAttributes.NestedFamANDAssem;
-    }
-
-    public final boolean IsSealed() {
-	return (Attributes & TypeAttributes.Sealed) != 0;
-    }
-
-    public final boolean IsSpecialName() {
-	return (Attributes & TypeAttributes.SpecialName) != 0;
-    }
-
-    public final boolean IsClass() {
-	return  (Attributes & TypeAttributes.ClassSemanticsMask)
-	    == TypeAttributes.Class;
-    }
-
-    public final boolean IsInterface(){
-	return  (Attributes & TypeAttributes.ClassSemanticsMask)
-	    == TypeAttributes.Interface;
-    }
-
-    public final boolean IsAutoLayout() {
-        return  (Attributes & TypeAttributes.LayoutMask)
-	    == TypeAttributes.AutoLayout;
-    }
-    public final boolean IsExplictitLayout() {
-        return  (Attributes & TypeAttributes.LayoutMask)
-	    == TypeAttributes.ExplicitLayout;
-    }
-    public final boolean IsLayoutSequential() {
-        return  (Attributes & TypeAttributes.LayoutMask)
-	    == TypeAttributes.SequentialLayout;
-    }
-
-    public final boolean IsImport() {
-	return (Attributes & TypeAttributes.Import) != 0;
-    }
-    public final boolean IsSerializable() {
-	return (Attributes & TypeAttributes.Serializable) != 0;
-    }
-
-    public final boolean IsAnsiClass() {
-	return (Attributes & TypeAttributes.StringFormatMask)
-	    == TypeAttributes.AnsiClass;
-    }
-
-    public final boolean IsUnicodeClass() {
-	return (Attributes & TypeAttributes.StringFormatMask)
-	    == TypeAttributes.UnicodeClass;
-    }
-    public final boolean IsAutoClass() {
-	return (Attributes & TypeAttributes.StringFormatMask)
-	    == TypeAttributes.AutoClass;
-    }
-
-    public final boolean IsArray() {
-	return (auxAttr & AuxAttr.Array) != 0;
-    }
-    public final boolean IsByRef() {
-	return (auxAttr & AuxAttr.ByRef) != 0;
-    }
-    public final boolean IsPointer() {
-	return (auxAttr & AuxAttr.Pointer) != 0;
-    }
-    public final boolean IsPrimitive() {
-	return (auxAttr & AuxAttr.Primitive) != 0;
-    }
-    public final boolean IsValueType() {
-	return BaseType() == VALUE_TYPE() || IsEnum();
-    }
-    public final boolean IsEnum() {
-	return BaseType() == ENUM();
-    }
-    public boolean CanBeTakenAddressOf() {
-    /*  TODO should be overridden in TMVarUsage,
-        but there's currently no way to bind a TMVarUsage to its GenericParamAndConstraints definition. Why?
-        Because of the way the msil library is organized (e.g., mkArray() returns the same !0[] representation
-        for all !0[] usages, irrespective of the scope of the !0 type-param)
-        This  in turn is so because without generics there's no harm in using a type-def instance
-        where a type-ref should go (e.g., the ParameterType of a ParameterInfo nowadays may point to a PEType).
-        The net effect is that this method (CanBeTakenAddressOf) is conservative, it will answer "no"
-        for example for !0 where !0 refers to a type-param with the isValuetype constraint set.
-        The whole thing is ok at this point in time, where generics are not supported at the backend. */
-	    return IsValueType() && (this != ENUM());
-        /* ENUM() is a singleton, i.e. System.Enum is not generic */
-    }
-
-    /** IsGeneric, true for a PEType or TypeBuilder (i.e., a type definition)
-     * containing one or more type params. Not to be called on a reference
-     * to a constructed type. */
-    public final boolean IsGeneric() {
-        return tVars.size() > 0;
-    }
-
-    public final boolean HasElementType() {
-	return IsArray() || IsPointer() || IsByRef();
-    }
-
-    public boolean IsTMVarUsage() {
-        // overridden in TMVarUsage
-        return false;
-    }
-
-    public boolean IsNestedType() {
-        return DeclaringType != null;
-    }
-
-    public boolean IsDefinitelyInternal() {
-      if(IsNestedType()) {
-        return IsNestedPrivate();
-      } else {
-        return IsNotPublic();
-      }
-    }
-
-    //public final boolean IsCOMObject;
-    //public final boolean IsContextful;
-    //public final boolean IsMarshalByRef;
-
-    protected Type(Module module,
-		   int attr,
-		   String fullName,
-		   Type baseType,
-		   Type[] interfaces,
-		   Type declType,
-		   int auxAttr)
-    {
-	this(module, attr, fullName, baseType, interfaces,
-	     declType, auxAttr, null);
-    }
-
-    //##########################################################################
-
-    public static final class TMVarUsage extends Type {
-
-        public final int Number;
-        public final boolean isTVar;
-
-        /** Non-defining reference to either a TVar or an MVar.
-         *  An instance of GenericParamAndConstraints represents a TVar or an MVar definition. */
-        public TMVarUsage(int Number, boolean isTVar) {
-            super(null, 0, ((isTVar ? "!" : "!!") + Number), null, null, null, AuxAttr.None, null);
-            this.Number = Number;
-            this.isTVar = isTVar;
-	}
-
-        public String toString() {
-            return (isTVar ? "!" : "!!") + Number;
-    }
-
-        public final boolean IsTMVarUsage() {
-            return true;
-    }
-
-        public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
-
-            TMVarUsage that = (TMVarUsage) o;
-
-            if (Number != that.Number) return false;
-            if (isTVar != that.isTVar) return false;
-
-            return true;
-        }
-
-        public int hashCode() {
-            int result = Number;
-            result = 31 * result + (isTVar ? 1 : 0);
-            return result;
-        }
-    }
-
-    protected static final class AuxAttr {
-	public static final int None      = 0x0000;
-	public static final int Array     = 0x0001;
-	public static final int ByRef     = 0x0002;
-	public static final int Pointer   = 0x0008;
-	public static final int Primitive = 0x0010;
-    }
-
-    /***/
-    public static Type mkArray(Type elemType, int rank) {
-	StringBuffer arrSig = new StringBuffer("[");
- 	for (int i = 0; i < rank; i++) {
- 	    if (i > 0) arrSig.append(',');
- 	}
-	arrSig.append(']');
-	Type array = getType(elemType.FullName + arrSig);
-	if (array != null)
-	    return array;
-	array = new PrimitiveType(elemType.Module,
-				  elemType.Attributes
-				  | TypeAttributes.Sealed
-				  | TypeAttributes.Serializable,
-				  elemType.FullName + arrSig,
-				  ARRAY(), EmptyTypes, null,
-				  AuxAttr.Array, elemType);
-	return addType(array);
-    }
-
-    /***/
-    public static Type mkPtr(Type elemType) {
-	String name = elemType.FullName + "*";
-	Type type = getType(name);
-	if (type != null) return type;
-	type = new PrimitiveType(elemType.Module,
-				 elemType.Attributes,
-				 name, null, EmptyTypes, null,
-				 AuxAttr.Pointer, elemType);
-	return addType(type);
-    }
-
-    /***/
-    public static Type mkByRef(Type elemType) {
-        String name = elemType.FullName + "&";
-        Type type = getType(name);
-        if (type != null) return type;
-        type = new PrimitiveType(elemType.Module,
-                elemType.Attributes,
-                name, null, EmptyTypes, null,
-                AuxAttr.ByRef, elemType);
-        return addType(type);
-    }
-
-    //##########################################################################
-    // public methods
-
-    /**
-     * Return the type with the specified signature parameters.
-     * For example, the fully qualified name for a class might look like this:
-     * TopNamespace.SubNameSpace.ContainingClass+NestedClass,MyAssembly
-     */
-    public static Type GetType(String fullName) {
-	Type type = getType(fullName);
-	if (type != null) return type;
-
-	// check if it's an array type; TODO: make array type handling more robust
-	int i = fullName.lastIndexOf('[');
-	int j = fullName.lastIndexOf(']');
-	if (i >= 0)
-	    if (j > i && j == (fullName.length() - 1)) {
-		String elementTypeName = fullName.substring(0, i);
-		Type elementType = GetType(elementTypeName);
-		if (elementType == null)
-		    throw new RuntimeException
-			("Unknown element type '" + elementTypeName +
-			 "' for the array type: " + fullName);
-		int rank = j - i;
-		for (int k = i + 1; k < j; k++) {
-		    if (fullName.charAt(k) != ',')
-			throw new RuntimeException
-			    ("Malformed type name: " + fullName);
-		}
-		return mkArray(elementType, rank);
-	    } else
-		throw new RuntimeException("Malformed type name: " + fullName);
-
-	// check if it's a pointer type
-	if (fullName.charAt(fullName.length() - 1) == '*')
-	    return addType
-		(mkPtr(GetType(fullName.substring(0, fullName.length()-1))));
-
-	// check if it's a nested class
-	i = fullName.lastIndexOf('+');
-	if (i > 0) {
-	    if (i == 0 || i == (fullName.length() - 1))
-		throw new RuntimeException("malformedTypeName");
-	    Type enclosing = GetType(fullName.substring(0, i));
-	    return enclosing == null ? null
-		: enclosing.GetNestedType(fullName.substring(i + 1));
-	}
-
-	//System.out.println("Looking for type: " + fullName + " (" + fullName.length() + ")");
-	// try in the assemblies
-	Iterator assems = ch.epfl.lamp.compiler.msil.Assembly.
-	    assemblies.values().iterator();
-	while (type == null && assems.hasNext()) {
-	    Assembly assem = ((Assembly) assems.next());
-	    type = assem.GetType(fullName);
-	    //System.out.println("\tin assemby " + assem + " -> " + type);
-	}
-
-	Type type2 = getType(fullName);
-	if (type == type2) return type;
-	return type == null ? null : addType(type);
-    }
-
-    /**
-     * @return the type of the object encompassed or referenced to
-     * by the current array, pointer or reference type.
-     */
-    public Type GetElementType() {
-	return elemType;
-    }
-
-    /**
-     * @return the type underlying an enumeration type.
-     */
-    public Type getUnderlyingType() {
-	if (!IsEnum()) return null;
-	// this would force the loading of the underlying type from the
-	// the type of the value__ field of the enumeration
-	initFields();
-	return underlyingType;
-    }
-
-    //##########################################################################
-    // GetField/s/
-
-    /** Searches for the field with the specified name. */
-    public FieldInfo GetField(String name) {
-	initFields();
-	for (int i = 0; i < fields.length; i++)
-	    if (fields[i].Name.equals(name) && !fields[i].IsPrivate())
-		return fields[i];
-	return null;
-    }
-
-    /**
-     */
-    public FieldInfo GetField(String name, int bindingFlags) {
-	FieldInfo[] fields = this.GetFields(bindingFlags);
-	for (int i = 0; i < fields.length; i++)
-	    if (name.equals(fields[i].Name))
-		return fields[i];
-	return null;
-    }
-
-    /** Gets the fields of the current Type. */
-    public FieldInfo[] GetFields() {
-	return GetFields(BindingFlags.Instance | BindingFlags.Public);
-    }
-
-    /**
-     */
-    public FieldInfo[] GetFields(int bindingFlags) {
- 	initFields();
-	final FieldInfo[] fields =
-	    getAllFields((bindingFlags & BindingFlags.DeclaredOnly) != 0);
-	final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
-	final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
-	final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
-	final boolean getNonPublic =
-	    (bindingFlags & BindingFlags.NonPublic) != 0;
-
-	int cnt = 0;
-	for (int i = 0; i < fields.length; i++) {
-	    FieldInfo field = fields[i];
-	    boolean accessible = (getPublic && field.IsPublic())
-		|| (getNonPublic && !field.IsPublic());
-	    if (accessible
-		// strip off the private fields up the hierarchy
-		&& ((field.DeclaringType == this)
-		    || ((field.DeclaringType != this) && !field.IsPrivate()))
-		&& ((getInstance && !field.IsStatic())
-		    || ((getStatic && field.IsStatic()) &&
-			(field.DeclaringType == this
-			 || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
-		    )
-		)
-		fields[cnt++] = field;
-	}
-	FieldInfo [] resFields = new FieldInfo[cnt];
-	System.arraycopy(fields, 0, resFields, 0, cnt);
-	return resFields;
-    }
-
-    protected FieldInfo[] getAllFields(boolean declaredOnly) {
-	initFields();
-	FieldInfo [] inherited = BaseType() == null || declaredOnly
-	    ? FieldInfo.EMPTY_ARRAY
-	    : BaseType().getAllFields(declaredOnly);
-	FieldInfo[] allFields =
-	    new FieldInfo[inherited.length + this.fields.length];
-	System.arraycopy(inherited, 0, allFields, 0, inherited.length);
-	System.arraycopy(this.fields, 0,
-			 allFields, inherited.length, this.fields.length);
-	return allFields;
-    }
-
-    //##########################################################################
-    // GetConstructor/s/
-
-    /** Searches for a public instance constructor whose parameters
-     *  match the types in the specified array. */
-    public ConstructorInfo GetConstructor(Type[] paramTypes) {
-	initMethods();
-	for (int i = 0; i < constructors.length; i++) {
-	    if (equalParameters(constructors[i].GetParameters(), paramTypes))
-		return constructors[i];
-	}
-	return null;
-    }
-
-    /** Returns all public instance constructors defined for the current Type.*/
-    public ConstructorInfo[] GetConstructors() {
-	return GetConstructors(BindingFlags.Instance | BindingFlags.Public);
-    }
-
-    /***/
-    public ConstructorInfo[] GetConstructors(int bindingFlags) {
-	initMethods();
-	final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
-	final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
-	final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
-	final boolean getNonPublic =
-	    (bindingFlags & BindingFlags.NonPublic) != 0;
-
-	ConstructorInfo[] constrs =
-	    new ConstructorInfo[this.constructors.length];
-	int cnt = 0;
-	for (int i = 0; i < this.constructors.length; i++) {
-	    ConstructorInfo constr = this.constructors[i];
-	    boolean accessible = (getPublic && constr.IsPublic())
-		|| (getNonPublic && !constr.IsPublic());
-	    if (accessible
-		&& ((getInstance && !constr.IsStatic())
-		    || (getStatic && constr.IsStatic())))
-		constrs[cnt++] = constr;
-	}
-	ConstructorInfo [] resConstrs = new ConstructorInfo[cnt];
-	System.arraycopy(constrs, 0, resConstrs, 0, cnt);
-	return resConstrs;
-    }
-
-    //##########################################################################
-    // GetMethod/s/
-
-    /** Searches for the specified public method whose parameters
-     *  match the specified argument types. */
-    public MethodInfo GetMethod(String name, Type[] paramTypes) {
-        return GetMethod(name, paramTypes, null);
-    }
-
-    public MethodInfo GetMethod(String name, Type[] paramTypes, Type retType) {
-	initMethods();
-	MethodInfo method = findMethod(methods, name, paramTypes, retType);
-	if (method != null)
-	    return method;
-	if (BaseType() != null) {
-	    method = BaseType().GetMethod(name, paramTypes, retType);
-	    if (method != null)
-		return method;
-	}
-// 	StringBuffer str = new StringBuffer(name);
-// 	str.append('(');
-// 	for (int i = 0; i < paramTypes.length; i++) {
-// 	    if (i > 0) str.append(", ");
-// 	    str.append(paramTypes[i]);
-// 	}
-// 	str.append(')');
-// 	System.out.println("Cannot find method " + str + ":");
-// 	System.out.println("Methods of class " + this);
-// 	for (int i = 0; i < methods.length; i++)
-// 	    System.out.println("\t" + methods[i]);
-	return null;
-    }
-
-    /**
-     */
-    protected static MethodInfo findMethod(MethodInfo[] methods,
-					   String name,
-					   Type[] paramTypes,
-                                           Type retType)
-    {
-	for (int i = 0; i < methods.length; i++)
-	    if (name.equals(methods[i].Name)
-		&& equalParameters(methods[i].GetParameters(), paramTypes)
-                && (retType == null || methods[i].ReturnType == retType))
-		return methods[i];
-	return null;
-    }
-
-    /**
-     */
-    protected static boolean equalParameters(ParameterInfo[] params,
-					    Type[] paramTypes)
-    {
-	if (params.length != paramTypes.length)
-	    return false;
-	for (int i = 0; i < params.length; i++) {
-// 	    System.out.println(params[i].ParameterType + " == " + paramTypes[i]
-// 			       + " = " + (params[i].ParameterType == paramTypes[i]));
-	    if (params[i].ParameterType != paramTypes[i])
-		return false;
-	}
-	return true;
-    }
-
-    /**
-     */
-    public MethodInfo GetMethod(String name, Type[] paramTypes, int bindingFlags) {
-	MethodInfo[] methods = GetMethods(bindingFlags);
-	MethodInfo method =  findMethod(methods, name, paramTypes, null);
-	if (method == null) {
-	    StringBuffer str = new StringBuffer(name);
-	    str.append('(');
-	    for (int i = 0; i < paramTypes.length; i++) {
-		if (i > 0) str.append(", ");
-		str.append(paramTypes[i]);
-	    }
-	    str.append(')');
-	    System.out.println("Cannot find method " + str + ":");
-	    System.out.println("Methods of class " + this);
-	    for (int i = 0; i < methods.length; i++)
-		System.out.println("\t" + methods[i]);
-	}
-	return method;
-    }
-
-    /** Returns all public methods of the current Type. */
-    public MethodInfo[] GetMethods() {
-	return GetMethods(BindingFlags.Instance | BindingFlags.Public);
-    }
-
-    /**
-     */
-    public MethodInfo[] GetMethods(int bindingFlags) {
-	initMethods();
-	final MethodInfo[] methods =
-	    getAllMethods((bindingFlags & BindingFlags.DeclaredOnly) != 0);
-	//System.out.println("" + this + ".GetMethods(int) -> " + methods.length);
-	final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
-	final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
-	final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
-	final boolean getNonPublic =
-	    (bindingFlags & BindingFlags.NonPublic) != 0;
-
-	int cnt = 0;
-	for (int i = 0; i < methods.length; i++) {
-	    MethodInfo method = methods[i];
-	    boolean accessible = (getPublic && method.IsPublic())
-		|| (getNonPublic && !method.IsPublic());
-	    if (accessible
-		// strip off the private methods up the hierarchy
-		&& ((method.DeclaringType == this)
-		    || ((method.DeclaringType != this) && !method.IsPrivate()))
-		&& ((getInstance && !method.IsStatic())
-		    || ((getStatic && method.IsStatic()) &&
-			(method.DeclaringType == this
-			 || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
-		    )
-		)
-		methods[cnt++] = method;
-	}
-	MethodInfo [] resMethods = new MethodInfo[cnt];
-	System.arraycopy(methods, 0, resMethods, 0, cnt);
-	return resMethods;
-    }
-
-    protected MethodInfo[] getAllMethods(boolean declaredOnly) {
-	initMethods();
-	MethodInfo[] inherited = BaseType() == null || declaredOnly
-	    ? MethodInfo.EMPTY_ARRAY
-	    : BaseType().getAllMethods(declaredOnly);
-	MethodInfo[] allMethods =
-	    new MethodInfo[inherited.length + this.methods.length];
-	System.arraycopy(inherited, 0, allMethods, 0, inherited.length);
-	System.arraycopy(this.methods, 0,
-			 allMethods, inherited.length, this.methods.length);
-	return allMethods;
-    }
-
-    //##########################################################################
-    // GetProperty/ies/
-
-    /** Returns all public properties of the current Type.
-     */
-    public PropertyInfo[] GetProperties() {
-	initProperties();
-	return (PropertyInfo[]) properties.clone();
-    }
-
-    /** Returns the properties of the current class
-     *  that satisfy the binding constrints.
-     */
-    public PropertyInfo[] GetProperties(int bindingFlags) {
-	initProperties();
-	return (PropertyInfo[]) properties.clone();
-    }
-
-    /** Returns the public property with the given name.
-     */
-    public PropertyInfo GetProperty(String name) {
-	initProperties();
-	for (int i = 0; i < properties.length; i++)
-	    if (name.equals(properties[i].Name))
-		return properties[i];
-	return null;
-    }
-
-    /** Returns the property with the given name
-     *  that satisfies the binding constraints.
-     */
-    public PropertyInfo GetProperty(String name, int bindingFlags) {
-	throw new RuntimeException("Method not implemented yet");
-    }
-
-    //##########################################################################
-    // GetEvent(s)
-
-    public EventInfo[] GetEvents() {
-        initEvents();
-        return (EventInfo[]) events.clone();
-    }
-
-    //##########################################################################
-    // GetNestedType/s/
-
-    /** Searches for nested type with the specified name. */
-    public Type GetNestedType(String name) {
-	initNestedTypes();
-	for (int i = 0; i < nestedTypes.length; i++)
-	    if (nestedTypes[i].Name.equals(name))
-		return nestedTypes[i];
-	return null;
-    }
-
-    /** Returns all types nested within the current Type. */
-    public Type[] GetNestedTypes() {
-	initNestedTypes();
-	return (Type[]) nestedTypes.clone();
-    }
-
-    //##########################################################################
-    // GetInterface/s/
-
-    /** Searches for an Interface with the given name implemented by this type
-     */
-    public Type GetInterface(String name) {
-	return GetInterface(name, false);
-    }
-
-    /** Searches for the specified interface,
-     * specifying whether to do a case-sensitive search.
-     * @param name - the name of the interface to get
-     * @param ignoreCase <b>true</b> to perform a case-insensitive search for name
-     *                   <b>false</b> to perform a case-sensitive search for name
-     * @return A Type object representing the interface with the specified name,
-     *         implemented or inherited by the current Type, if found;
-     *         otherwise, a null reference
-     */
-    public Type GetInterface(String name, boolean ignoreCase) {
-	initInterfaces();
-	for (int i = 0; i < interfaces.length; i++) {
-	    Type iface = interfaces[i];
-	    if (ignoreCase) {
-		if (name.equalsIgnoreCase(iface.Name)) return iface;
-		if (name.equalsIgnoreCase(iface.FullName)) return iface;
-	    } else {
-		if (name.equals(iface.Name)) return iface;
-		if (name.equals(iface.FullName)) return iface;
-	    }
-	}
-	return BaseType() == null ? null
-	    : BaseType().GetInterface(name, ignoreCase);
-    }
-
-    /** Returns the interfaces implemented or inherited by the current Type. */
-    public Type[] GetInterfaces() {
-	initInterfaces();
-	if (BaseType() == null) return interfaces;
-
-	Type[] ifaces = interfaces;
-	int count = 0;
-	for (int i = 0; i < interfaces.length; i++) {
-	    if (BaseType().GetInterface(interfaces[i].FullName) == null)
-		ifaces[count++] = ifaces[i];
-	}
-	Type[] baseTypeIfaces = BaseType().GetInterfaces();
-
-	Type[] res = new Type[baseTypeIfaces.length + count];
-	System.arraycopy(baseTypeIfaces, 0, res, 0, baseTypeIfaces.length);
-	System.arraycopy(ifaces, 0, res, baseTypeIfaces.length, count);
-
-	return res;
-    }
-
-
-    public boolean isSubtypeOf(Type that) {
-	if (this == that || BaseType() == that || that == OBJECT()) return true;
-	initInterfaces();
-	for (int i = 0; i < interfaces.length; i++)
-	    if (interfaces[i].isSubtypeOf(that))
-		return true;
-	boolean res = BaseType() == null ? false : BaseType().isSubtypeOf(that);
-// 	if (!res) {
-// 	    System.out.println(dumpType(this) + " not a subtype of " +
-// 			       dumpType(that));
-// 	}
-	return res;
-    }
-
-    private static String formatType(Type t) {
-	if (t == null) return "<null>";
-	String cname = t.getClass().getName();
-	int k = cname.lastIndexOf(".");
-	if (k >= 0)
-	    cname = cname.substring(k + 1);
-	return  "[" + t.Assembly().GetName() + "]" + t +
-	    "(" + cname + "#" + Integer.toHexString(t.hashCode()) + ")";
-    }
-    private static String dumpType(Type t) {
-	StringBuffer str = new StringBuffer();
-	str.append(formatType(t) + " : ");
-	str.append(formatType(t.BaseType()));
-	Type[] ifaces = t.GetInterfaces();
-	for (int i = 0; i < ifaces.length; i++)
-	    str.append(", " + formatType(ifaces[i]));
-	return str.toString();
-    }
-
-    //##########################################################################
-    // GetMember/s/
-
-    protected MemberInfo[] members;
-
-    public MemberInfo[] GetMember(String name) {
-	aggregateMembers();
-	List l = new ArrayList();
-	for (int i = 0; i < members.length; i++) {
-	    if (name.equals(members[i].Name))
-		l.add(members[i]);
-	}
-	return (MemberInfo[])l.toArray(MemberInfo.EMPTY_ARRAY);
-    }
-
-    protected void aggregateMembers() {
-	if (members != null)
-	    return;
-	initFields();
-	initMethods();
-	initProperties();
-	initNestedTypes();
-	// the List returned by Arrays.asList doesn't support the addAll method
-	// so we have to wrap it in ArrayList
-	List l = new ArrayList(Arrays.asList(fields));
-	l.addAll(Arrays.asList(constructors));
-	l.addAll(Arrays.asList(methods));
-	l.addAll(Arrays.asList(properties));
-	l.addAll(Arrays.asList(nestedTypes));
-	members = (MemberInfo[]) l.toArray(MemberInfo.EMPTY_ARRAY);
-    }
-
-    //##########################################################################
-    // non-standard methods that return only members declared in this type
-
-    /**
-     * Return only the fields declared in this type.
-     */
-    public FieldInfo[] getFields() {
-	initFields();
-	FieldInfo[] fields = new FieldInfo[this.fields.length];
-	System.arraycopy(this.fields, 0, fields, 0, fields.length);
-	return fields;
-    }
-
-    /**
-     * Return only the conrtuctors declared in this type.
-     */
-    public ConstructorInfo[] getConstructors() {
-	initMethods();
-	ConstructorInfo[] ctors = new ConstructorInfo[constructors.length];
-	System.arraycopy(constructors, 0, ctors, 0, ctors.length);
-	return ctors;
-    }
-
-    /**
-     * Return only the methods declared in this type.
-     */
-    public MethodInfo[] getMethods() {
-	initMethods();
-	MethodInfo[] methods = new MethodInfo[this.methods.length];
-	System.arraycopy(this.methods, 0, methods, 0, methods.length);
-	return methods;
-    }
-
-    /**
-     * Return only the properties declared in this type.
-     */
-    public PropertyInfo[] getProperties() {
-	initProperties();
-	PropertyInfo[] props = new PropertyInfo[properties.length];
-	System.arraycopy(properties, 0, props, 0, props.length);
-	return props;
-    }
-
-    /**
-     * Return only the interfaces directly implemented by this type.
-     */
-    public Type[] getInterfaces() {
-	initInterfaces();
-	Type[] ifaces = new Type[interfaces.length];
-	System.arraycopy(interfaces, 0, ifaces, 0, ifaces.length);
-	return ifaces;
-    }
-
-    /**
-     * Return the types declared in this type.
-     */
-    public Type[] getNestedTypes() {
-	initNestedTypes();
-	Type[] nested = new Type[nestedTypes.length];
-	System.arraycopy(nestedTypes, 0, nested, 0, nested.length);
-	return nested;
-    }
-
-    //##########################################################################
-
-    public String toString() {
-	return FullName;
-    }
-
-    //##########################################################################
-    // lazy type construction members
-
-    private boolean initBaseType = true;
-    protected final void initBaseType() {
-        if (initBaseType) {
-            loadBaseType();
-            initBaseType = false;
-        }
-    }
-    protected void loadBaseType() {}
-
-    private boolean initInterfaces = true;
-    protected void initInterfaces() {
-	if (initInterfaces) {
-	    loadInterfaces();
-	    initInterfaces = false;
-	}
-	assert interfaces != null : "In type " + this;
-    }
-    protected void loadInterfaces() {}
-
-    private boolean initNestedTypes = true;
-    protected void initNestedTypes() {
-	if (initNestedTypes) {
-	    loadNestedTypes();
-	    initNestedTypes = false;
-	}
-	assert nestedTypes != null : "In type " + this;
-    }
-    protected void loadNestedTypes() {}
-
-    private boolean initFields = true;
-    protected void initFields() {
-	if (initFields) {
-	    loadFields();
-	    initFields = false;
-	}
-	assert fields != null : "In type " + this;
-    }
-    protected void loadFields() {}
-
-    private boolean initMethods = true;
-    protected void initMethods() {
-	if (initMethods) {
-	    loadMethods();
-	    initMethods = false;
-	}
-	assert constructors != null : "In type " + this;
-	assert methods != null : "In type " + this;
-    }
-    protected void loadMethods() {}
-
-    private boolean initProperties = true;
-    protected void initProperties() {
-	if (initProperties) {
-	    initMethods();
-	    loadProperties();
-	    initProperties = false;
-	}
-	assert properties != null : "In type " + this;
-    }
-    protected void loadProperties() {}
-
-    private boolean initEvents = true;
-    protected void initEvents() {
-	if (initEvents) {
-	    initMethods();
-	    loadEvents();
-	    initEvents = false;
-	}
-	assert events != null : "In type " + this;
-    }
-    protected void loadEvents() {}
-
-    //##########################################################################
-
-    //##########################################################################
-    // static members
-
-    private static Assembly MSCORLIB;
-    private static Module   MSCORLIB_DLL;
-
-    public static Type OBJECT() { return __OBJECT; }
-    public static Type STRING() { return __STRING; }
-    public static Type ARRAY() { return __ARRAY; }
-    public static Type VOID() { return __VOID; }
-    public static Type ENUM() { return __ENUM; }
-    public static Type VALUE_TYPE() { return __VALUE_TYPE; }
-
-    private static Type __OBJECT;
-    private static Type __STRING;
-    private static Type __ARRAY;
-    private static Type __VOID;
-    private static Type __ENUM;
-    private static Type __VALUE_TYPE;
-
-    public static void initMSCORLIB(Assembly mscorlib) {
-        if (MSCORLIB != null)
-            throw new RuntimeException("mscorlib already initialized");
-	MSCORLIB = mscorlib;
-	MSCORLIB_DLL = MSCORLIB.GetModules()[0];
-
-	__OBJECT = mscorlib.GetType("System.Object");
-	__STRING = mscorlib.GetType("System.String");
-	__ARRAY  = mscorlib.GetType("System.Array");
-	__VOID   = mscorlib.GetType("System.Void");
-	__ENUM   = mscorlib.GetType("System.Enum");
-	__VALUE_TYPE   = mscorlib.GetType("System.ValueType");
-    }
-
-    //##########################################################################
-
-}  // class Type
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
deleted file mode 100644
index 8f489fa..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies type attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class TypeAttributes {
-
-    //##########################################################################
-    // Visibilty attributes
-
-    /** Bitmask used to retrieve visibility information. */
-    public static final int VisibilityMask = 0x00000007;
-
-    /** Class has no public scope. */
-    public static final int NotPublic = 0x00000000;
-
-    /** Class has public scope. */
-    public static final int Public = 0x00000001;
-
-    /** Class is nested with public visibility. */
-    public static final int NestedPublic =  0x00000002;
-
-    /** Class is nested with private visibility. */
-    public static final int NestedPrivate = 0x00000003;
-
-    /** Class is nested with family visibility, and is thus accessible
-     *  only by methods within its own type and any subtypes. */
-    public static final int NestedFamily = 0x00000004;
-
-    /** Class is nested with assembly visibility, and is thus accessible
-     *  only by methods within its assembly. */
-    public static final int NestedAssembly = 0x00000005;
-
-    /** Class is nested with assembly and family visibility, and is thus accessible
-     *  only by methods lying in the intersection of its family and assembly. */
-    public static final int NestedFamANDAssem = 0x00000006;
-
-    /** Class is nested with family or assembly visibility, and is thus accessible
-     *  only by methods lying in the union of its family and assembly. */
-    public static final int NestedFamORAssem = 0x00000007;
-
-    //##########################################################################
-    // Class layout attributes
-
-    /** Bitmask used to retrieve class layout information. */
-    public static final int LayoutMask = 0x00000018;
-
-    /** Class fields are automatically laid out by the CLR. */
-    public static final int AutoLayout = 0x00000000;
-
-    /** Class fields are laid out sequentially, in the order that the fields
-     *  were emitted to the metadata. */
-    public static final int SequentialLayout = 0x00000008;
-
-    /** Class fields are laid out at the specified offsets. */
-    public static final int ExplicitLayout = 0x00000010;
-
-    //##########################################################################
-    // Class semantics attributes
-
-    /** Bitmask used to retrieve class semantics information. */
-    public static final int ClassSemanticsMask = 0x00000020;
-
-    /** Type is a class. */
-    public static final int Class = 0x00000000;
-
-    /** Type is an interface. */
-    public static final int Interface = 0x00000020;
-
-    //##########################################################################
-    // Special semantics in addition to class semantics
-
-    /** Class is abstract. */
-    public static final int Abstract = 0x00000080;
-
-    /** Class is cannot be extended. */
-    public static final int Sealed = 0x00000100;
-
-    /** Class is special in a way denoted by the name. */
-    public static final int SpecialName = 0x00000400;
-
-    //##########################################################################
-    // Implementation attributes
-
-    /** Class/interface is imported from another module. */
-    public static final int Import = 0x00001000;
-
-    /** Class can be serialized. */
-    public static final int Serializable = 0x00002000;
-
-    //##########################################################################
-    // String formatting attributes
-
-    /** Bitmask used to retrieve string information for native interop. */
-    public static final int StringFormatMask = 0x00030000;
-
-    /** LPTSTR is interpreted as ANSI. */
-    public static final int AnsiClass = 0x00000000;
-
-    /** LPTSTR is interpreted as UNICODE. */
-    public static final int UnicodeClass = 0x00010000;
-
-    /** LPTSTR is interpreted automatically. */
-    public static final int AutoClass = 0x00020000;
-
-    //##########################################################################
-    // Class initialization attributes
-
-    /** Initialize the class before first static field access. */
-    public static final int BeforeFieldInit = 0x00100000;
-
-    //##########################################################################
-    // Additional flags
-
-    /** CLI provides 'special' behavior, depending upon the name of the type. */
-    public static final int RTSpecialName = 0x00000800;
-
-    /** Type has security associate with it. */
-    public static final int HasSecurity = 0x00040000;
-
-    //##########################################################################
-
-    public static String accessModsToString(int attrs) {
-	switch (attrs & VisibilityMask) {
-	case NotPublic: return "private";
-	case Public: return "public";
-	case NestedPublic: return "nested public";
-	case NestedPrivate: return "nested private";
-	case NestedFamily: return "nested family";
-	case NestedAssembly: return "nested assembly";
-	case NestedFamANDAssem: return "nested famandassem";
-	case NestedFamORAssem: return "nested famorassem";
-	default:
-	    throw new RuntimeException();
-	}
-    }
-
-    /** Returns a string representation of the given attributes. */
-    public static String toString(int attrs) {
-	StringBuffer str = new StringBuffer(accessModsToString(attrs));
-	switch (attrs & LayoutMask) {
-	case AutoLayout: str.append(" auto"); break;
-	case SequentialLayout: str.append(" sequential"); break;
-	case ExplicitLayout: str.append(" explicit"); break;
-	}
-	switch (attrs & StringFormatMask) {
-	case AnsiClass: str.append(" ansi"); break;
-	case UnicodeClass: str.append(" unicode"); break;
-	case AutoClass: str.append(" autochar"); break;
-	}
-	if ((attrs & Interface) != 0) str.append(" interface");
-	if ((attrs & Abstract) != 0) str.append(" abstract");
-	if ((attrs & Sealed) != 0) str.append(" sealed");
-	if ((attrs & BeforeFieldInit) != 0) str.append(" beforefieldinit");
-	if ((attrs & Serializable) != 0) str.append(" serializable");
-	if ((attrs & SpecialName) != 0) str.append(" specialname");
-	if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
-	return str.toString();
-    }
-
-    /***/
-    public static final boolean isNested(int attrs) {
-	switch (attrs & VisibilityMask) {
-	case NestedPublic:
-	case NestedPrivate:
-	case NestedFamily:
-	case NestedAssembly:
-	case NestedFamANDAssem:
-	case NestedFamORAssem:
-	    return true;
-	default: return false;
-	}
-    }
-
-    //##########################################################################
-
-    // makes the class uninstantiable
-    private TypeAttributes() {}
-
-    //##########################################################################
-
-}  // class TypeAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Version.java b/src/msil/ch/epfl/lamp/compiler/msil/Version.java
deleted file mode 100644
index ad4b09b..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Version.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Represents the version number for a common language runtime assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class Version {
-
-    //##########################################################################
-    // public interface
-
-    /**
-     * Gets the value of the major component of the version
-     * number for this instance.
-     */
-    public final int  Major;
-
-    /**
-     * Gets the value of the minor component of the version
-     * number for this instance.
-     */
-    public final int Minor;
-
-    /**
-     * Gets the value of the build component of the version
-     * number for this instance.
-     */
-    public final int Build;
-
-    /**
-     * Gets the value of the revision component of the version
-     * number for this instance.
-     */
-    public final int Revision;
-
-    /**
-     * Initializes a new instance of the Version class.
-     */
-    public Version() {
-	this(0,0,0,0);
-    }
-
-    /**
-     * Initializes a new instance of the Version class with
-     * the specified major, minor, build, and revision numbers.
-     */
-    public Version(int major, int minor, int build, int revision) {
-	this.Major = major;
-	this.Minor = minor;
-	this.Build = build;
-	this.Revision = revision;
-    }
-
-    /**
-     * Converts the value of this instance to its equivalent String representation
-     */
-    public String toString() {
-	return "" + Major + "." + Minor + "." + Build + "." +  Revision;
-    }
-
-    //##########################################################################
-
-} // class Version
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
deleted file mode 100644
index 3110ccd..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a dynamic assembly.
- * A dynamic assembly is an assembly that is created using the compiler.msil
- * emit APIs. The dynamic modules in the assembly are saved when the dynamic
- * assembly is saved using the Save method. To generate an executable, the
- * SetEntryPoint method must be called to identify the method that is the
- * entry point to the assembly. Assemblies are saved as DLL by default,
- * unless SetEntryPoint requests the generation of a console application
- * or a Windows-based application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class AssemblyBuilder(name: AssemblyName)
-      extends Assembly(name)
-      with ICustomAttributeSetter
-      with Visitable
-{
-    //##########################################################################
-    // public methods
-
-    /**
-     * Defines a dynamic module with the given name that will be saved
-     * to the specified file. No symbol information is emitted.
-     */
-    def DefineDynamicModule(name: String, fileName: String): ModuleBuilder = {
-	val module = new ModuleBuilder(name, fileName, "" + null, this)
-	addModule(name, module)
-	return module
-    }
-
-    /** Returns the dynamic module with the specified name. */
-    def GetDynamicModule(name: String): ModuleBuilder = {
-	return GetModule(name).asInstanceOf[ModuleBuilder]
-    }
-
-    /** Saves this dynamic assembly to disk. */
-    @throws(classOf[IOException])
-    def Save(fileName: String) {
-    generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
-	ILPrinterVisitor.printAssembly(this, fileName)
-    }
-
-    @throws(classOf[IOException])
-    def Save(destPath: String, sourceFilesPath: String) {
-    generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
-    ILPrinterVisitor.printAssembly(this, destPath, sourceFilesPath)
-    }
-
-    /** Returns the list of generated files from calling Save(). */
-    def GetGeneratedFiles(): Array[String] = {
-       return generatedFiles.toArray // (new Array[String](generatedFiles.size())).asInstanceOf[Array[String]]
-    }
-
-    /** Sets the entry point for this dynamic assembly. */
-    def SetEntryPoint(entryMethod: MethodInfo) {
-	EntryPoint = entryMethod
-    }
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-    // protected members
-
-    // the access properties -  Save, Run, RunAndSave
-    private var access : Int = _
-
-    // all extern assemblies used in this assembly builder
-    protected var externAssemblies = scala.collection.mutable.Set.empty[Assembly]
-
-    // register an extern assembly
-    protected def registerExternAssembly(assembly: Assembly) {
-	externAssemblies += assembly
-    }
-
-    // get all extern Assemblies used in this Assembly Builder
-    def getExternAssemblies(): Array[Assembly] = {
-      externAssemblies = scala.collection.mutable.Set[Assembly]()
-      val iter = Assembly.assemblies.values().iterator
-      while (iter.hasNext) {
-        externAssemblies += iter.next.asInstanceOf[Assembly]
-    }
-      externAssemblies -= this
-      return externAssemblies.toArray
-    }
-
-    def loadModules() {}
-
-    // contains list of generated .msil files after calling Save()
-    var generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
-
-    //##########################################################################
-    //##########################################################################
-
-    /** the apply method for a visitor */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseAssemblyBuilder(this)
-    }
-
-    //##########################################################################
-}
-
-object AssemblyBuilderFactory {
-    /**
-     * Defines a dynamic assembly with the specified name.
-     */
-    def DefineDynamicAssembly(name: AssemblyName): AssemblyBuilder = {
-    //Assembly.reset()
-    return new AssemblyBuilder(name)
-    }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
deleted file mode 100644
index ddd4708..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.Type
-import java.io.IOException
-
-/**
- * Defines and represents a constructor of a dynamic class.
- * ConstructorBuilder is used to fully describe a constructor in
- * Microsoft intermediate language (MSIL), including the name, attributes,
- * signature, and constructor body. It is used in conjunction with the
- * TypeBuilder class to create classes at run time. Call DefineConstructor
- * to get an instance of ConstructorBuilder.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type])
-      extends ConstructorInfo(declType, attrs, paramTypes)
-      with ICustomAttributeSetter
-      with Visitable
-{
-
-    //##########################################################################
-    // public interface
-
-    /** Defines a parameter of this constructor. */
-    def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
-	val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
-	params(pos) = param
-	return param
-    }
-
-    /** Returns an ILGenerator for this constructor. */
-    def GetILGenerator(): ILGenerator = {
-	return ilGenerator
-    }
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-
-    /** The apply method for a visitor. */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseConstructorBuilder(this)
-    }
-
-    //##########################################################################
-
-    // the Intermediate Language Generator
-    // it contains the method's body
-    protected var ilGenerator: ILGenerator = new ILGenerator(this)
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
deleted file mode 100644
index 7ef9dc7..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.FieldInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.FieldAttributes
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class FieldBuilder(name: String, declType: Type, attrs: Int, fieldTypeWithMods: PECustomMod)
-      extends FieldInfo(name, declType, attrs, fieldTypeWithMods, null)
-      with ICustomAttributeSetter
-      with Visitable
-{
-
-    //##########################################################################
-    // public interface
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-
-    /** the apply method for a visitor */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseFieldBuilder(this)
-    }
-
-    //##########################################################################
-
-    protected var defaultValue: Object = _
-
-    /** Sets the default value of this field. */
-    def SetConstant(defaultValue: Object) {
-        this.defaultValue = defaultValue
-    }
-
-    /** Specifies the field layout. */
-    def SetOffset(iOffset: Int) {
-	//this.fieldOffset = FieldAttributes.Offset.Value(iOffset)
-    }
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
deleted file mode 100644
index 5d74d3a..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-/**
- * Declares the possibility to set a custom attribute for a member
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-trait ICustomAttributeSetter {
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte])
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
deleted file mode 100644
index 2aa9a99..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-import java.util.Stack
-import java.io.IOException
-import ILGenerator._
-
-/**
- * Generates Microsoft intermediate language (MSIL) instructions.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
- final class ILGenerator(_owner: MethodBase) extends Visitable {
-
-    //##########################################################################
-    // public interface
-
-    /**
-     * Puts the specified instruction onto the stream of instructions.
-     */
-    def Emit(opcode: OpCode) {
-	// switch opcode
-        if (opcode == OpCode.Ret) {
-	   emit(opcode, null, 0)
-        } else {
-           emit(opcode, null)
-	}
-    }
-
-    /**
-     * Puts the specified instruction and character argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Char) {
-	emit(opcode,new Character(arg))
-    }
-
-    /**
-     * Puts the specified instruction and metadata token for the
-     * specified constructor onto the Microsoft intermediate language
-     * (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: ConstructorInfo) {
-	assert(arg != null)
-	// newobj
-	// pop size is the number of parameters
-	emit(opcode,arg, OpCode.PUSH_size(opcode.CEE_push) -
-	     arg.GetParameters().length)
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate language (MSIL)
-     * stream followed by the index of the given local variable.
-     */
-    def Emit(opcode: OpCode, arg: LocalBuilder) {
-	assert(arg != null)
-	// ldarg    | ldarg.s  | ldarga
-	// ldarga.s  | ldloc    | ldloc.s  | ldloca
-	// ldloca.s  | starg    | starg.s  | stloc
-	// stloc.s
-
-	// <instr_var> <localname>
-	emit(opcode, arg)
-    }
-
-
-    /**
-     * Puts the specified instruction and numerical argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Double) {
-	// ldc.r4 | ldc.r8
-	emit(opcode, new java.lang.Double(arg))
-    }
-
-    /**
-     * Puts the specified instruction and metadata token for the
-     * specified field onto the Microsoft intermediate language (MSIL)
-     * stream of instructions.
-     */
-    def Emit(opcode: OpCode,arg: FieldInfo) {
-	assert(arg != null)
-	// ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
-	emit(opcode,arg)
-    }
-
-    /**
-     * Puts the specified instruction and numerical argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Short ) {
-	emit(opcode, new java.lang.Short(arg))
-    }
-
-    /**
-     * Puts the specified instruction and numerical argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Int) {
-	// ldc.i4 | ldc.i4.s | unaligned
-	emit(opcode, new java.lang.Integer(arg))
-    }
-
-    /**
-     * Puts the specified instruction and numerical argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Long) {
-	// ldc.i8
-	emit(opcode, new java.lang.Long(arg))
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate
-     * language (MSIL) stream and leaves space to include a label when
-     * fixes are done.
-     */
-    def Emit(opcode: OpCode,label: Label) {
-	assert(label != null)
-	// beq    | beq.s    | bge    | bge.s    |
-	// bge.un    | bge.un.s   | bgt    | bgt.s    | bgt.un | bgt.un.s |
-	// ble       | ble.s      | ble.un | ble.un.s | blt    | blt.s    |
-	// blt.un    | blt.un.s   | bne.un | bne.un.s | br     | br.s     |
-	// brfalse   | brfalse.s  | brtrue | brtrue.s | leave  | leave.s
-
-	emit(opcode, label)
-	// is the label initialized ? if true backward jump else forward jump
-	if (label.isInitialized()) {
-// 	    if (arg.stacksize != lastLabel.stacksize) {
-// 		System.err.println("ILGenerator.Emit: Stack depth differs depending on path:");
-// 		System.err.println("\tmethod = " + owner);
-// 		System.err.println("\tPC = 0x" + Table.short2hex(lastLabel.address));
-// 	    }
-	    //assert arg.stacksize == lastLabel.stacksize;
-	}
-	else {
-	    label.setStacksize(lastLabel.getStacksize())
-	}
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate
-     * language (MSIL) stream and leaves space to include a label when
-     * fixes are done.
-     */
-    def Emit(opcode: OpCode, arg: Array[Label] ) {
-	assert(arg != null)
-	// switch
-
-	// <instr> ::= <instr_switch> ( <labels> )
-	// Examples:
-	// switch (0x3, -14, Label1)
-	// switch (5, Label2)
-	emit(opcode, arg, arg.length)
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate
-     * language (MSIL) stream followed by the metadata token for the
-     * given method.
-     */
-    def Emit(opcode: OpCode,arg: MethodInfo) {
-	assert(arg != null)
-	// call  | callvirt | jmp | ldftn | ldvirtftn
-	// pop size is the number of parameters
-	// pop 1 more if method is not static !
-	// push size is either 0 (void Method) either 1
-	assert(arg.ReturnType != null, "No ReturnType: " + arg.DeclaringType + "::" + arg.Name)
-
-	val popush: Int = if (opcode == OpCode.Ldftn ||
-            opcode == OpCode.Ldvirtftn ||
-            opcode == OpCode.Jmp)
-        {
-           OpCode.PUSH_size(opcode.CEE_push) - OpCode.POP_size(opcode.CEE_pop)
-        } else if (opcode == OpCode.Calli || opcode == OpCode.Callvirt) {
-	        (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length - 1
-        } else {
-	        (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length
-	}
-	emit(opcode, arg, popush)
-    }
-
-    /**
-     * Puts the specified instruction and numerical argument onto
-     * the Microsoft intermediate language (MSIL) stream of instructions.
-     */
-    def Emit(opcode: OpCode, arg: Float ) {
-	emit(opcode, new java.lang.Float(arg))
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate
-     * language (MSIL) stream followed by the metadata token for the
-     * given string.
-     */
-    def Emit(opcode: OpCode, arg: String ) {
-	assert(arg != null)
-	// ldstr
-	emit(opcode, arg)
-    }
-
-    /**
-     * Puts the specified instruction onto the Microsoft intermediate
-     * language (MSIL) stream followed by the metadata token for the
-     * given type.
-     */
-    def Emit(opcode: OpCode, arg: Type) {
-	assert(arg != null)
-	// box     | castclass | cpobj    | initobj | isinst    |
-	// ldelema | ldobj     | mkrefany | newarr  | refanyval |
-	// sizeof  | stobj     | unbox
-
-	emit(opcode, arg)
-    }
-
-    /**
-     * Puts a call or callvirt instruction onto the Microsoft intermediate
-     * language (MSIL) stream.
-     */
-    def EmitCall(opcode: OpCode, arg: MethodInfo,
-			 optionalParameterTypes: Array[Type]) {
-	assert(arg != null)
-	// pop size is the number of parameters
-	// push size is either 0 (void Method) either 1
-	//System.out.println(arg.ReturnType.Size + " " + arg.GetParameters().length);
-	emit(opcode, arg, (if(arg.ReturnType == VOID) 0 else 1) -
-	     arg.GetParameters().length)
-    }
-
-    /**
-     * Emits the Microsoft intermediate language (MSIL) necessary to
-     * call WriteLine with the given field.
-     */
-    def EmitWriteLine(arg: FieldInfo) {
-	// first load field info
-	// if static use OpCode.Ldsfld
-	if (arg.IsStatic())
-	    Emit(OpCodes.Ldsfld, arg)
-	else
-	    Emit(OpCodes.Ldfld, arg)
-	// then call System.Console.WriteLine(arg.Type)
-	val t: Type = Type.GetType("System.Console")
-	val argsType: Array[Type] = new Array[Type](1)
-	argsType(0) = arg.FieldType
-	val m: MethodInfo = t.GetMethod("WriteLine", argsType)
-	EmitCall(OpCode.Call, m, null)
-    }
-
-    /**
-     * Emits the Microsoft intermediate language (MSIL) necessary
-     * to call WriteLine with the given local variable.
-     */
-    def EmitWriteLine(arg: LocalBuilder) {
-	// first load local variable
-	Emit(OpCodes.Ldloc, arg)
-	// then call System.Console.WriteLine(arg.Type)
-	val t: Type = Type.GetType("System.Console")
-	val argsType: Array[Type] = new Array[Type](1)
-	argsType(0) = arg.LocalType
-	val m: MethodInfo = t.GetMethod("WriteLine", argsType)
-	EmitCall(OpCode.Call, m, null)
-    }
-
-    /**
-     * Emits the Microsoft intermediate language (MSIL) to call
-     * WriteLine with a string.
-     */
-    def EmitWriteLine(arg: String) {
-	// first load string
-	Emit(OpCode.Ldstr, arg)
-	// then call System.Console.WriteLine(string)
-	val t: Type = Type.GetType("System.Console")
-	val argsType: Array[Type] = new Array[Type](1)
-	argsType(0) = Type.GetType("System.String")
-	val m: MethodInfo = t.GetMethod("WriteLine", argsType)
-	EmitCall(OpCode.Call, m, null)
-    }
-
-    /**
-     * Declares a local variable.
-     */
-    def DeclareLocal(localType: Type): LocalBuilder = {
-	val l: LocalBuilder = new LocalBuilder(locals, localType)
-    locals = locals + 1
-      localList += l
-	return l
-    }
-
-    /**
-     * Returns a new label that can be used as a token for branching.
-     * In order to set the position of the label within the stream, you
-     * must call MarkLabel. This is just a token and does not yet represent
-     * any particular location within the stream.
-     */
-    def DefineLabel():Label = {
-	new Label.NormalLabel()
-    }
-
-    /**
-     * Marks the Microsoft intermediate language (MSIL) stream's
-     * current position with the given label.
-     */
-    def MarkLabel(label: Label) {
-	label.mergeWith(lastLabel)
-	/*
-	label.address = lastLabel.address;
-	//label.stacksize = lastLabel.stacksize;
-	if (label.stacksize >= 0)
-	    lastLabel.stacksize = label.stacksize;
-	*/
-    }
-
-    /** Begins a lexical scope. */
-    def BeginScope() {
-	emitSpecialLabel(Label.NewScope)
-    }
-
-    /** Ends a lexical scope. */
-    def EndScope() {
-	emitSpecialLabel(Label.EndScope)
-    }
-
-    /**
-     * Begins an exception block for a non-filtered exception.
-     * The label for the end of the block. This will leave you in the correct
-     * place to execute finally blocks or to finish the try.
-     */
-    def BeginExceptionBlock() {
-        emitSpecialLabel(Label.Try)
-        val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
-        excStack.push(Label.Try, endExc)
-    }
-
-    /** Begins a catch block. */
-    def BeginCatchBlock(exceptionType: Type) {
-        val kind = excStack.peekKind()
-        if (kind == Label.Kind.Try ||
-            kind == Label.Kind.Catch) {
-        /* ok */
-        } else {
-            throw new RuntimeException("Catch should follow either a try or catch")
-	}
-        val endExc: Label = excStack.popLabel()
-	Emit(OpCodes.Leave, endExc)
-	// the CLI automatically provide the exception object on the evaluation stack
-	// we adjust the stacksize
-	lastLabel.incStacksize()
-        excStack.push(Label.Catch, endExc)
-        emitSpecialLabel(Label.Catch, exceptionType)
-    }
-
-    /** Ends an exception block. */
-    def EndExceptionBlock() {
-        val kind = excStack.peekKind()
-        if (kind == Label.Kind.Try) {
-	    throw new RuntimeException("Try block with neither catch nor finally")
-        } else if (kind == Label.Kind.Catch) {
-	    Emit(OpCodes.Leave, excStack.peekLabel())
-        } else if (kind == Label.Kind.Finally) {
-	    Emit(OpCodes.Endfinally)
-	}
-        MarkLabel(excStack.popLabel())
-        emitSpecialLabel(Label.EndTry)
-    }
-
-    /**
-     * Begins a finally block in the Microsoft intermediate language
-     * (MSIL) instruction stream.
-     */
-    def BeginFinallyBlock() {
-        val endExc: Label = excStack.popLabel()
-	Emit(OpCodes.Leave, endExc)
-        excStack.push(Label.Finally, endExc)
-	emitSpecialLabel(Label.Finally)
-    }
-
-    /**
-     * Emits an instruction to throw an exception.
-     */
-    def ThrowException(exceptionType: Type) {
-	assert(exceptionType != null)
-	if (!exceptionType.isSubtypeOf(Type.GetType("System.Exception")))
-	    throw new RuntimeException
-		(exceptionType + " doesn't extend System.Exception" )
-	val ctor: ConstructorInfo = exceptionType.GetConstructor(Type.EmptyTypes)
-	if (ctor == null)
-	    throw new RuntimeException("Type " + exceptionType
-				       + "doesn't have a default constructor")
-	Emit(OpCodes.Newobj, ctor)
-	Emit(OpCodes.Throw)
-    }
-
-    /**
-     * sets the line of the source file corresponding to the next instruction
-     */
-    def setPosition(line: Int) {
-	    if (line != 0) lineNums.put(lastLabel, Integer.toString(line))
-    }
-
-    def setPosition(line: Int, filename: String) {
-	    if (line != 0) lineNums.put(lastLabel, line + "  '" + filename + "'")
-    }
-
-    def setPosition(startLine: Int, endLine: Int, startCol: Int, endCol: Int, filename: String) {
-      val lineRange = startLine + "," + endLine
-      val colRange  = startCol  + "," + endCol
-	  lineNums.put(lastLabel, lineRange + ":" + colRange + "  '" + filename + "'")
-    }
-
-   def getLocals(): Array[LocalBuilder] = localList.toArray
-
-    def getLabelIterator() = labelList.iterator
-
-    def getOpcodeIterator() = opcodeList.iterator
-
-    def getArgumentIterator() = argumentList.iterator
-
-    //##########################################################################
-    // private implementation details
-
-
-
-    // the local variable list
-    private final val localList  = scala.collection.mutable.ArrayBuffer.empty[LocalBuilder]
-
-    // the label list, the opcode list and the opcode argument list
-    // labelList is an array of Label
-    // opcodeList is an array of OpCode
-    // argumentList is an array of Object (null if no argument)
-    private final val labelList = scala.collection.mutable.ArrayBuffer.empty[Label]
-    private final val opcodeList = scala.collection.mutable.ArrayBuffer.empty[OpCode]
-    private final val argumentList = scala.collection.mutable.ArrayBuffer.empty[Object]
-
-    // the program counter (pc)
-    // also called the stream's current position
-    private var pc: Int = 0
-
-    // last label
-    private var lastLabel: Label = new Label.NormalLabel(pc,0)
-
-    // the maximum size of stack
-    private var maxstack: Int = 0
-
-    // the number of the locals
-    private var locals: Int = 0
-
-    // stack of label for exception mechanism
-    private var excStack: ExceptionStack = new ExceptionStack()
-
-    // the method info owner of this ILGenerator
-    var owner: MethodBase = _owner
-
-    val lineNums = scala.collection.mutable.Map.empty[Label, String]
-
-
-    def getMaxStacksize(): Int = { this.maxstack }
-
-    // private emit with Object Argument
-    private def emit(opcode: OpCode, arg: Object) {
-	emit(opcode, arg, opcode.CEE_popush)
-    }
-
-    // private emit with Object Argument and override POPUSH
-    private def emit(opcode: OpCode, arg: Object, overridePOPUSH: Int) {
-	// add label, opcode and argument
-      labelList += lastLabel
-      opcodeList += opcode
-      argumentList += arg
-	// compute new lastLabel (next label)
-	val stackSize: Int = lastLabel.getStacksize() + overridePOPUSH
-	if (stackSize < 0) {
-          val msg = "ILGenerator.emit(): Stack underflow in method: " + owner
-          scala.Console.println(msg)
-          // throw new RuntimeException(msg)
-	}
-	if (stackSize > maxstack)
-	    maxstack = stackSize
-	var address: Int = lastLabel.getAddress() + opcode.CEE_length
-        if (opcode.CEE_opcode == OpCode.CEE_SWITCH) {
-            address = address + 4*arg.asInstanceOf[Array[Label]].length
-        }
-	lastLabel = new Label.NormalLabel(address, stackSize)
-	pc = pc + 1
-    }
-
-   def Ldarg0WasJustEmitted() : Boolean = {
-     if(opcodeList.isEmpty)
-       return false
-     val lastEmitted = opcodeList(opcodeList.size - 1)
-     lastEmitted eq OpCode.Ldarg_0
-   }
-
-    private def emitSpecialLabel(l: Label) {
-        emitSpecialLabel(l, null)
-    }
-    private def emitSpecialLabel(l: Label, catchType: Type) {
-        labelList += l
-        opcodeList += null
-        argumentList += catchType
-    }
-
-    //##########################################################################
-    //
-    @throws(classOf[IOException])
-    def apply(v: Visitor)  {
-	v.caseILGenerator(this)
-    }
-
-    //##########################################################################
-}  // class ILGenerator
-
-
-object ILGenerator {
-
-   val VOID: Type = Type.GetType("System.Void")
-   val NO_LABEL: String = ""
-
-    private final class ExceptionStack {
-        private val labels = new scala.collection.mutable.Stack[Label]()
-        private val kinds = new scala.collection.mutable.Stack[Label]()
-        def ExceptionStack() {}
-        def pop() { labels.pop; kinds.pop }
-        def push(kind: Label, label: Label) {
-            kinds.push(kind); labels.push(label)
-        }
-        def peekKind(): Label.Kind = kinds.top.getKind
-        def peekLabel(): Label = labels.top
-        def popLabel(): Label = { kinds.pop(); labels.pop() }
-    }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
deleted file mode 100644
index 0ed5e3f..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ /dev/null
@@ -1,861 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Comparator
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single or multiple files. Then this file can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class ILPrinterVisitor extends Visitor {
-
-    import ILPrinterVisitor._
-    import OpCode._
-
-    //##########################################################################
-
-    protected final val assemblyNameComparator =
-        new scala.math.Ordering[Assembly]() {
-            override def compare(o1: Assembly, o2: Assembly): Int = {
-                val a1 = o1.asInstanceOf[Assembly]
-                val a2 = o2.asInstanceOf[Assembly]
-                return a1.GetName().Name.compareTo(a2.GetName().Name)
-            }
-        }
-
-    // the output file writer
-    protected var out: PrintWriter = null
-
-    // the left margin
-    private var lmargin = 0
-
-    // indicate a newline
-    private var newline = true
-
-    // print types without or with members?
-    protected var nomembers: Boolean = false
-
-    // external assemblies
-    protected var as: Array[Assembly] = null
-
-    private def align() {
-	if (newline)
-	    padding = lmargin
-	printPadding()
-	newline = false
-    }
-    private def indent() {
-	lmargin += TAB
-    }
-    private def undent() {
-	lmargin -= TAB
-	assert(lmargin >= 0)
-    }
-
-    private var padding = 0
-    private def pad(n: Int) {
-	assert(n >= 0, "negative padding: " + n)
-        padding += n
-    }
-    private def printPadding() {
-	if (padding <= 0)
-	    return
-	while (padding > SPACES_LEN) {
-	    out.print(SPACES)
-	    padding -= SPACES_LEN
-	}
-        out.print(SPACES.substring(0, padding))
-        padding = 0
-    }
-
-    // methods to print code
-    protected def print(s: String)  { align(); out.print(s)}
-    protected def print(o: Object)  { align(); out.print(o) }
-    protected def print(c: Char)    { align(); out.print(c) }
-    protected def print(`val`: Int) { align(); out.print(`val`)}
-    protected def print(`val`: Long){ align(); out.print(`val`)}
-    protected def println()         { out.println(); newline = true; padding = 0 }
-    protected def println(c: Char)  { print(c); println() }
-    protected def println(i: Int)   { print(i); println() }
-    protected def println(l: Long)  { print(l); println() }
-    protected def println(s: String){ print(s); println() }
-    protected def println(o: Object){ print(o); println() }
-    protected def printName(name: String) {
-	var ch = name.charAt(0)
-	//if (Character.isLetter(ch) && Character.isLowerCase(ch)) {
-	if ((ch != '.') && (ch != '!')) {
-	    print('\''); print(name); print('\'')
-	} else
-	    print(name)
-    }
-
-	protected def printAssemblyBoilerplate() {
-	// print all the external assemblies
-    for (j <- 0 until as.length) {
-	    printAssemblySignature(as(j), true)
-	}
-	// print assembly declaration
-	printAssemblySignature(currAssembly, false)
-	}
-
-    // the entrypoint method
-    protected var entryPoint: MethodInfo = null
-
-    // current opcode argument
-    protected var argument: Object = null
-
-    /***/
-    @throws(classOf[IOException])
-    protected def print(vAble: Visitable) {
-	if (vAble != null)
-	    vAble.apply(this)
-    }
-
-    /**
-     * Visit an AssemblyBuilder
-     */
-    @throws(classOf[IOException])
-    def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder)
-
-    protected var currentModule: Module = null
-    /**
-     * Visit a ModuleBuilder
-     */
-    @throws(classOf[IOException])
-    def caseModuleBuilder(module: ModuleBuilder)
-
-    protected var currentType: Type = null
-
-  def printTypeParams(sortedTVars : Array[GenericParamAndConstraints]) {
-
-    def constraintFlags(tVar : GenericParamAndConstraints) = {
-      val varianceDirective = (if (tVar.isCovariant) "+ " else (if (tVar.isContravariant) "- " else ""))
-      val typeKindDirective = (if (tVar.isReferenceType) "class " else (if (tVar.isValueType) "valuetype " else ""))
-      val dfltConstrDirective = (if (tVar.hasDefaultConstructor) ".ctor " else "")
-      varianceDirective + typeKindDirective + dfltConstrDirective
-    }
-
-    def tparamName(tVar : GenericParamAndConstraints) = {
-     /* TODO Type-params in referenced assemblies may lack a name (those in a TypeBuilder or MethodBuilder shouldn't).
-        Given that we need not list (in ilasm syntax) the original type-params' names when
-         providing type arguments to it, the only type-param-names we'll serialize into a .msil file
-         are those for type-params in a TypeBuilder or MethodBuilder. Still, more details on this
-         appear in Sec. 4.5 "Faulty metadata in XMLReaderFactory" of
-         http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/Libs4Lib.pdf
-
-        To avoid name clashes when choosing a param name,
-        first collect all existing tparam-names from a type (and its nested types).
-        Not that those names are needed (ordinal positions can be used instead)
-        but will look better when disassembling with ildasm. */
-      assert(tVar.Name != null)
-      tVar.Name
-    }
-
-    if(sortedTVars.length == 0) { return }
-    print('<')
-    val lastIdx = sortedTVars.length - 1
-    for (it <- 0 until sortedTVars.length) {
-      val tVar = sortedTVars(it)
-      print(constraintFlags(tVar))
-      if(tVar.Constraints.length > 0) {
-        print('(')
-        val lastCnstrtIdx = tVar.Constraints.length - 1
-        for (ic <- 0 until tVar.Constraints.length) {
-          val cnstrt = tVar.Constraints(ic)
-          printReference(cnstrt)
-          if (ic < lastIdx) { print(", ") }
-        }
-        print(')')
-      }
-      print(" " + tparamName(tVar))
-      if (it < lastIdx) { print(", ") }
-    }
-    print('>')
-  }
-
-    /**
-     * Visit a TypeBuilder
-     */
-    @throws(classOf[IOException])
-    def caseTypeBuilder(`type`: TypeBuilder) {
-	currentType = `type`
-	if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
-	    print(".namespace \'" ); print(`type`.Namespace); println("\'")
-	    println("{"); indent()
-	}
-	print(".class ")
-	// <classHead> ::=
-	// <classAttr>* <id>
-	// [extends <typeReference>]
-	// [implements <typeReference> [, <typeReference>]*]
-	print(TypeAttributes.toString(`type`.Attributes))
-	print(" \'"); print(`type`.Name); print("\'")
-    printTypeParams(`type`.getSortedTVars())
-	if (`type`.BaseType() != null) {
-	    println()
-	    print("       extends    ")
-	    printReference(`type`.BaseType())
-	}
-	var ifaces: Array[Type] = `type`.getInterfaces()
-	if (ifaces.length > 0) {
-	    println()
-	    print("       implements ")
-        for (i <- 0 until ifaces.length) {
-		if (i > 0) {
-		    println(",")
-		    print("                  ")
-		}
-		printReference(ifaces(i))
-	    }
-	}
-	println()
-	println("{")
-	indent()
-	if (!nomembers && `type`.sourceFilename != null)
-	    println(".line  " + `type`.sourceLine
-		    + "  '" + `type`.sourceFilename + "'")
-        if (!nomembers) {
-            printAttributes(`type`)
-        }
-	// print nested classes
-    val nested = `type`.nestedTypeBuilders.iterator
-    while(nested.hasNext)
-      print(nested.next().asInstanceOf[TypeBuilder])
-
-	// print each field
-    val fields = `type`.fieldBuilders.iterator
-    while(fields.hasNext)
-	  print(fields.next().asInstanceOf[FieldBuilder])
-
-	// print each constructor
-	val constrs = `type`.constructorBuilders.iterator
-	while (constrs.hasNext)
-	    print(constrs.next().asInstanceOf[ConstructorBuilder])
-
-	// print each method
-	val methods = `type`.methodBuilders.iterator
-	while (methods.hasNext) {
-	    val method = methods.next().asInstanceOf[MethodBuilder]
-	    assert(method.DeclaringType == `type`)
-	    print(method)
-	}
-
-	undent(); println("}")
-	if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
-	    undent(); println("}")
-	}
-	currentType = null
-    }
-
-    /**
-     * Visit a FieldBuilder
-     */
-    @throws(classOf[IOException])
-    def caseFieldBuilder(field: FieldBuilder) {
-        if (nomembers) return
-	// [[int32]] <fieldAttr>* <type> <id> [= <fieldInit> | at <dataLabel>]
-	print(".field ")
-	print(FieldAttributes.toString(field.Attributes))
-	print(" "); printSignature(field.FieldType, field.cmods)
-	print(" \'"); print(field.Name); print("\'")
-	if (field.IsLiteral()) {
-	    print(" = ")
-	    val value = field.getValue()
-	    if (value == null) {
-		print("nullref")
-	    } else if (value.isInstanceOf[String]) {
-		print(msilString(value.asInstanceOf[String]))
-	    } else if (value.isInstanceOf[Boolean]) {
-		print("bool (")
-		print(if((value.asInstanceOf[Boolean]).booleanValue()) { "true" } else { "false" })
-		print(")")
-	    } else if (value.isInstanceOf[Byte]) {
-		print("int8 (")
-		print(value)
-		print(")")
-	    } else if (value.isInstanceOf[java.lang.Short]) {
-		print("int16 (")
-		print(value)
-		print(")")
-	    } else if (value.isInstanceOf[Character]) {
-		print("char (")
-		print((value.asInstanceOf[Character]).charValue())
-		print(")")
-	    } else if (value.isInstanceOf[Integer]) {
-		print("int32 (")
-		print((value.asInstanceOf[Integer]).intValue())
-		print(")")
-	    } else if (value.isInstanceOf[Long]) {
-		print("int64 (")
-		print((value.asInstanceOf[Long]).longValue())
-		print(")")
-	    } else if (value.isInstanceOf[Float]) {
-        print(msilSyntaxFloat(value.asInstanceOf[Float]))
-	    } else if (value.isInstanceOf[Double]) {
-        print(msilSyntaxDouble(value.asInstanceOf[Double]))
-	    } else {
-		throw new Error("ILPrinterVisitor: Illegal default value: "
-				+ value.getClass())
-	    }
-	}
-	println()
-        printAttributes(field)
-    }
-
-    def msilSyntaxFloat(valFlo: java.lang.Float) : String = {
-      // !!! check if encoding is correct
-      val bits = java.lang.Float.floatToRawIntBits(valFlo.floatValue())
-      /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
-      /* Note: no value is equal to Nan, including NaN. Thus, x == Float.NaN always evaluates to false. */
-      val res = if (valFlo.isNaN) "0xFFC00000 /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
-                else if (java.lang.Float.NEGATIVE_INFINITY == valFlo.floatValue) "0xFF800000 /* NEGATIVE_INFINITY */ "
-                else if (java.lang.Float.POSITIVE_INFINITY == valFlo.floatValue) "0x7F800000 /* POSITIVE_INFINITY */ "
-                else bits
-      "float32 (" + res + ")"
-    }
-
-  def msilSyntaxDouble(valDou: java.lang.Double) : String = {
-    // !!! check if encoding is correct
-    var bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue())
-    /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
-    /* Note: no value is equal to Nan, including NaN. Thus, x == Double.NaN always evaluates to false. */
-    val res = if (valDou.isNaN) "0xffffffffffffffff /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
-              else if (java.lang.Double.NEGATIVE_INFINITY == valDou.doubleValue) "0xfff0000000000000 /* NEGATIVE_INFINITY */ "
-              else if (java.lang.Double.POSITIVE_INFINITY == valDou.doubleValue) "0x7ff0000000000000 /* POSITIVE_INFINITY */ "
-              else bits
-    // float64(float64(...)) != float64(...)
-    "float64 (" + res + ")"
-  }
-
-    /**
-     * Visit a ConstructorBuilder
-     */
-    @throws(classOf[IOException])
-    def caseConstructorBuilder(constr: ConstructorBuilder) {
-        if (nomembers) return
-	print(".method "); printHeader(constr, VOID)
-	println(); println("{"); indent()
-        printAttributes(constr)
-	try {
-	    print(constr.GetILGenerator())
-	} catch {
-        case e : RuntimeException => {
-	       System.err.println("In method " + constr)
-	       e.printStackTrace()
-        }
-	}
-	undent(); println("}")
-    }
-
-    /**
-     * Visit a MethodBuilder
-     */
-    @throws(classOf[IOException])
-    def caseMethodBuilder(method: MethodBuilder) {
-        if (nomembers) return
-	print(".method "); printHeader(method, method.ReturnType)
-	if (method.IsAbstract()
-	    || (method.DeclaringType != null
-		&& method.DeclaringType.IsInterface()
-		&& !method.IsStatic()))
-	    {
-		println(" {"); indent()
-                printAttributes(method)
-                undent(); println("}")
-	    } else {
-		println(); println("{"); indent()
-                printAttributes(method)
-		if (method == entryPoint)
-		    println(".entrypoint")
-		try {
-		    print(method.GetILGenerator())
-		} catch {
-            case e: RuntimeException =>
-		      System.err.println("In method " + method)
-		      e.printStackTrace()
-		}
-		undent(); println("}")
-	    }
-    }
-
-    /**
-     * Visit a ParameterBuilder
-     */
-    @throws(classOf[IOException])
-    def caseParameterBuilder(param: ParameterBuilder) {
-	print(ParameterAttributes.toString(param.Attributes))
-	printSignature(param.ParameterType)
-	//print(' ') print(marshal)
-	print(' '); printName(param.Name)
-    }
-
-  var locals: Array[LocalBuilder] = null
-    /**
-     * Visit an ILGenerator
-     */
-    @throws(classOf[IOException])
-    def caseILGenerator(code: ILGenerator) {
-	// print maxstack
-	println(".maxstack   " + code.getMaxStacksize())
-	// get the local variables
-	locals = code.getLocals()
-	if (locals.length > 0) {
-	    println(".locals init (")
-	    indent()
-        for (i <- 0 until locals.length) {
-		if (i > 0) println(",")
-		print(locals(i))
-	    } // end while
-	    undent()
-	    println(")")
-	}
-	// get 3 iterators for the 3 lists
-	val itL = code.getLabelIterator()
-	val itO = code.getOpcodeIterator()
-	val itA = code.getArgumentIterator()
-	// iterate over each opcode
-	while (itO.hasNext) {
-	    // first print label
-	    val label = itL.next
-      val oOpt = code.lineNums.get(label)
-      if (oOpt.isDefined) {
-        println(".line       " + oOpt.get)
-      }
-	    argument = itA.next.asInstanceOf[Object]
-	    printLabel(label)
-            val o2 = itO.next
-            if (o2 != null) {
-                print("   ")
-                print(o2.asInstanceOf[OpCode])
-            }
-	    println()
-	} // end while
-    }
-
-    /**
-     * visit an OpCode
-     */
-    @throws(classOf[IOException])
-    def caseOpCode(opCode: OpCode) {
-	var opString = opCode.toString()
-	print(opString)
-	pad(14 - opString.length())
-
-	// switch opcode
-        if (opCode == OpCode.Ldstr) {
-            print(msilString(argument.toString()))
-        } else if(opCode == OpCode.Switch) {
-	    // switch ( <labels> )
-	    print("(")
-	    val targets = argument.asInstanceOf[Array[Label]]
-            val m = targets.length
-            for (i <- 0 until m) {
-	        if (i != 0) print(", ")
-		print(targets(i))
-	    } // end for
-	    print(")")
-        } else if(opCode == OpCode.Call || opCode == OpCode.Callvirt || opCode == OpCode.Jmp || opCode == OpCode.Ldftn || opCode == OpCode.Ldvirtftn) {
-        // call  | callvirt | jmp | ldftn | ldvirtftn
-        // <instr_method> <callConv> <type> [ <typeSpec> :: ] <methodName>
-	    printSignature(argument.asInstanceOf[MethodBase])
-        } else if (opCode == OpCode.Newobj) {
-	    printSignature(argument.asInstanceOf[ConstructorInfo])
-    // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
-        } else if (opCode == OpCode.Ldfld || opCode == OpCode.Ldflda || opCode == OpCode.Ldsfld || opCode == OpCode.Ldsflda || opCode == OpCode.Stfld || opCode == OpCode.Stsfld) {
-	    printSignature(argument.asInstanceOf[FieldInfo])
-        } else if (opCode == OpCode.Castclass || opCode == OpCode.Isinst || opCode == OpCode.Ldobj || opCode == OpCode.Newarr) {
-	    printSignature(argument.asInstanceOf[Type])
-    } else if (opCode == OpCode.Box || opCode == OpCode.Unbox || opCode == OpCode.Ldtoken || opCode == OpCode.Initobj) {
-	    printReference(argument.asInstanceOf[Type])
-        } else if (opCode == OpCode.Ldloc || opCode == OpCode.Ldloc_S || opCode == OpCode.Ldloca || opCode == OpCode.Ldloca_S || opCode == OpCode.Stloc || opCode == OpCode.Stloc_S) {
-	    val loc = argument.asInstanceOf[LocalBuilder]
-	    print(loc.slot); print("\t// "); printSignature(loc.LocalType)
-	    print(" \'"); print(loc.name); print("\'")
-	    //print("'") print(((LocalBuilder)argument).name) print("'")
-    } else if (opCode == OpCode.Ldloc_0 || opCode == OpCode.Ldloc_1 || opCode == OpCode.Ldloc_2 || opCode == OpCode.Ldloc_3 ) {
-          val loc = locals(opCode.CEE_opcode - OpCode.CEE_LDLOC_0)
-          print("\t// "); printSignature(loc.LocalType)
-          print(" \'"); print(loc.name); print("\'")
-    } else if (opCode == OpCode.Stloc_0 || opCode == OpCode.Stloc_1 || opCode == OpCode.Stloc_2 || opCode == OpCode.Stloc_3 ) {
-          val loc = locals(opCode.CEE_opcode - OpCode.CEE_STLOC_0)
-          print("\t// "); printSignature(loc.LocalType)
-          print(" \'"); print(loc.name); print("\'")
-    } else if (opCode == OpCode.Readonly) {
-      // nothing to do
-    } else if (opCode == OpCode.Constrained) {
-      printReference(argument.asInstanceOf[Type])
-    } else if (opCode == OpCode.Ldelema) {
-      printReference(argument.asInstanceOf[Type])
-        } else {
-	    // by default print toString argument if any
-	    if (argument != null) {
-        val strArgument = java.lang.String.valueOf(argument)
-        if (         argument.isInstanceOf[java.lang.Float]
-                  && (   strArgument.equals("NaN")
-                      || strArgument.equals("-Infinity")
-                      || strArgument.equals("Infinity")))
-                print(msilSyntaxFloat(argument.asInstanceOf[java.lang.Float]))
-        else if (    argument.isInstanceOf[java.lang.Double]
-                  && (   strArgument.equals("NaN")
-                      || strArgument.equals("-Infinity")
-                      || strArgument.equals("Infinity")))
-                print(msilSyntaxDouble(argument.asInstanceOf[java.lang.Double]))
-        else print(strArgument)
-      }
-
-	} // end switch
-    }
-
-    /**
-     * Visit a Label
-     */
-    def printLabel(label: Label) {
-       val kind = label.getKind()
-       if (kind == Label.Kind.Normal) {
-          print(label+ ": ")
-       } else if (kind == Label.Kind.NewScope) {
-          print("{"); indent()
-       } else if (kind == Label.Kind.EndScope) {
-	  undent(); print("}")
-       } else if (kind == Label.Kind.Try) {
-          print(".try {"); indent()
-       } else if (kind == Label.Kind.Catch) {
-	  undent()
-	  println("}")
-	  print("catch ")
-	  printReference(argument.asInstanceOf[Type])
-	  print(" {")
-	  indent()
-       } else if (kind == Label.Kind.Filter) {
-	  undent()
-	  println("}")
-	  print("filter {")
-	  indent()
-       } else if (kind == Label.Kind.EndFilter) {
-	  print("endfilter")
-	  undent()
-	  println("}")
-       } else if (kind == Label.Kind.Finally) {
-	  undent()
-	  println("}")
-	  print("finally {")
-	  indent()
-       } else if (kind == Label.Kind.EndTry) {
-	  undent()
-	  print("}")
-       }
-    }
-
-    /**
-     * Visit a LocalBuilder
-     */
-    @throws(classOf[IOException])
-    def caseLocalBuilder(localBuilder: LocalBuilder) {
-	// print type
-	printSignature(localBuilder.LocalType)
-	// space
-	print(" \'")
-	// print name
-	print(localBuilder.name)
-	print("\'")
-    }
-
-
-    //##########################################################################
-
-    def printAssemblySignature(assem: Assembly, extern: Boolean) {
-        print(".assembly ")
-        if (extern)
-            print("extern ")
-        val an = assem.GetName()
-	printName(an.Name); println()
-	println("{")
-        if (!extern)
-            printAttributes(assem)
-	val v = an.Version
-	if (v != null) {
-	    print("    .ver "); print(v.Major); print(':'); print(v.Minor)
-	    print(':'); print(v.Build); print(':')
-	    print(v.Revision); println()
-	}
-	var key = an.GetPublicKeyToken()
-	if (key != null) {
-	    print("    .publickeytoken = ("); print(PEFile.bytes2hex(key))
-	    println(")")
-	} else {
-	    key = an.GetPublicKey()
-	    if (key != null) {
-		print("    .publickey = ("); print(PEFile.bytes2hex(key))
-		println(")")
-	    }
-	}
-	println("}")
-    }
-
-
-  def printSignature(field: FieldInfo) {
-    printSignature(field.FieldType, field.cmods)
-    //print(' ') print(owner)
-    print(' ')
-    //if (field.IsStatic && field.DeclaringType != currentType) {
-    printReference(field.DeclaringType)
-    print("::")
-    //}
-    printName(field.Name)
-  }
-
-    // print method head
-    @throws(classOf[IOException])
-    def printHeader(method: MethodBase, returnType: Type) {
-	print(MethodAttributes.toString(method.Attributes))
-	print(' '); print(CallingConventions.toString(method.CallingConvention))
-	print(' '); printSignature(returnType)
-	//print(' ') print(marshal)
-	print(' '); printName(method.Name)
-    if(method.isInstanceOf[MethodInfo]) {
-      val mthdInfo = method.asInstanceOf[MethodInfo]
-      printTypeParams(mthdInfo.getSortedMVars())
-    }
-	val params = method.GetParameters()
-	print('(')
-	for (i <- 0 until params.length) {
-	    if (i > 0) print(", ")
-	    print(params(i).asInstanceOf[ParameterBuilder])
-	}
-	print(") ")
-
-	print(MethodImplAttributes
-	      .toString(method.GetMethodImplementationFlags()))
-    }
-
-
-    def printSignature(method: MethodBase) {
-	var returnType: Type = null
-	if (method.isInstanceOf[MethodInfo])
-	    returnType = (method.asInstanceOf[MethodInfo]).ReturnType
-	else if (method.isInstanceOf[ConstructorInfo])
-	    returnType = VOID
-	else
-	    throw new RuntimeException()
-
-	val s = CallingConventions.toString(method.CallingConvention)
-	print(s)
-	if (s.length() > 0) print(' ')
-	printSignature(returnType)
-	//print(' ') print(owner)
-	print(' '); printReference(method.DeclaringType)
-	print("::"); printName(method.Name)
-
-	var params = method.GetParameters()
-	print("(")
-	for (i <- 0 until params.length) {
-	    if (i > 0) print(", ")
-	    printSignature(params(i).ParameterType)
-	}
-	print(")")
-    }
-
-  def printSignature(marked: Type, cmods: Array[CustomModifier]) {
-    printSignature(marked)
-    if( (cmods != null) && !cmods.isEmpty ) {
-      print(" ")
-      for(cm <- cmods) {
-        print(if (cm.isReqd) "modreq( " else "modopt( ")
-        printReference(cm.marker)
-        print(" ) ")
-      }
-    }
-  }
-
-  def printSignature(`type`: Type) {
-      val sigOpt = primitive.get(`type`)
-      if (sigOpt.isDefined) {
-          print(sigOpt.get)
-	    return
-	}
-	if (`type`.HasElementType()) {
-	    printSignature(`type`.GetElementType())
-	    if (`type`.IsArray())
-		print("[]")
-	    else if (`type`.IsPointer())
-		print('*')
-	    else if (`type`.IsByRef())
-		print('&')
-	} else {
-          val preref = if (`type`.isInstanceOf[Type.TMVarUsage]) ""
-                       else if(`type`.IsValueType()) "valuetype "
-                       else "class "
-          print(preref)
-	    printReference(`type`)
-	}
-    }
-
-    def printReference(`type`: Type) {
-      if (`type`.Module != null) { // i.e. not PrimitiveType and not TMVarUsage
-	if (`type`.Assembly() != currentModule.Assembly) {
-	    print('['); print(`type`.Assembly().GetName().Name); print("]")
-	} else if (`type`.Module != currentModule) {
-	    print("[.module "); print(`type`.Module.Name); print("]")
-	}
-      }
-	printTypeName(`type`)
-    }
-
-    def printTypeName(`type`: Type) {
-    if (`type`.isInstanceOf[ConstructedType]) {
-      val ct = `type`.asInstanceOf[ConstructedType]
-        printTypeName(ct.instantiatedType)
-      print("<")
-      var i = 0
-      while (i < ct.typeArgs.length) {
-        val ta = ct.typeArgs(i)
-          val sigOpt = primitive.get(ta)
-          if (sigOpt.isDefined) print(sigOpt.get)
-          else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype`
-        appearing before a type param usage. */
-        i = i + 1;
-        if (i < ct.typeArgs.length) {
-          print(", ")
-        }
-      }
-      print(">")
-    } else if (`type`.DeclaringType != null) {
-	    printTypeName(`type`.DeclaringType)
-	    print('/')
-	    printName(`type`.Name)
-	} else {
-	    printName(`type`.FullName)
-    }
-    }
-
-    def printAttributes(icap: ICustomAttributeProvider) {
-        var attrs = icap.GetCustomAttributes(false)
-        for (i <- 0 until attrs.length) {
-            print(".custom ")
-            printSignature((attrs(i).asInstanceOf[Attribute]).getConstructor())
-            print(" = (")
-            print(PEFile.bytes2hex((attrs(i).asInstanceOf[Attribute]).getValue()))
-            println(")")
-        }
-    }
-
-    //##########################################################################
-
-}  // class ILPrinterVisitor
-
-object ILPrinterVisitor {
-    final val VOID: Type = Type.GetType("System.Void")
-    protected final val TAB = 4
-
-    protected final val SPACES = "                                "
-    protected final val SPACES_LEN = SPACES.length()
-
-    def hasControlChars(str: String): Boolean = {
-    for(i <- 0 until str.length()) {
-        var ch = str.charAt(i)
-        ch match {
-          case '\b' =>
-          case '\t' =>
-          case '\n' =>
-          case '\f' =>
-          case '\r' =>
-          case _    => if(Character.isISOControl(ch)) return true
-        }
-    }
-    return false
-    }
-
-    final val EMPTY: String = ""
-    def msilString(s: String): String = {
-    if (hasControlChars(s)) {
-        try {
-        return "bytearray (" + PEFile.bytes2hex(s.getBytes("UTF-16LE")) + ")"
-        } catch {
-          case e : java.io.UnsupportedEncodingException => throw new RuntimeException(e)
-        }
-    }
-    var str = new StringBuffer(s)
-    var ss = EMPTY
-    var i = 0
-    while(i < str.length()) {
-        ss = EMPTY
-        val c = str.charAt(i)
-        c match {
-          case '\b' => ss = "\\b"
-          case '\t' => ss = "\\t"
-          case '\n' => ss = "\\n"
-          case '\f' => ss = "\\f"
-          case '\r' => ss = "\\r"
-          case '\"' => ss = "\\\""
-          case '\'' => ss = "\\\'"
-          case '\\' => ss = "\\\\"
-          case  _   => if (Character.isISOControl(c))
-                         ss = "\\u" + PEFile.int2hex(Character.getNumericValue(c))
-        }
-        if (ss != EMPTY) {
-        str.replace(i, i + 1, ss)
-        i = i + ss.length() - 1
-        }
-        i = i + 1
-    }
-    return "\"" + str.toString() + "\""
-    }
-
-    /**
-     * the main printer method
-     */
-    @throws(classOf[IOException])
-    def printAssembly(assemblyBuilder: AssemblyBuilder, fileName: String) {
-      assemblyBuilder.apply(new SingleFileILPrinterVisitor(fileName))
-    }
-
-    @throws(classOf[IOException])
-    def printAssembly(assemblyBuilder: AssemblyBuilder, destPath: String, sourceFilesPath: String) {
-      assemblyBuilder.apply(new MultipleFilesILPrinterVisitor(destPath, sourceFilesPath))
-    }
-
-    /** The current assembly */
-    var currAssembly: Assembly = _
-
-    final var primitive = scala.collection.mutable.Map.empty[Type, String]
-    def addPrimitive(name: String, sig: String) {
-      var `type` =
-      Type.GetType(name)
-      assert(`type` != null, "Cannot lookup primitive type " + `type`)
-      primitive.put(`type`, sig)
-    }
-
-    addPrimitive("System.Object", "object")
-    addPrimitive("System.String", "string")
-    addPrimitive("System.Void", "void")
-    addPrimitive("System.Boolean", "bool")
-    addPrimitive("System.Char", "char")
-    addPrimitive("System.SByte", "int8")
-    addPrimitive("System.Byte", "unsigned int8")
-    addPrimitive("System.Int16", "int16")
-    addPrimitive("System.UInt16", "unsigned int16")
-    addPrimitive("System.Int32", "int32")
-    addPrimitive("System.UInt32", "unsigned int32")
-    addPrimitive("System.Int64", "int64")
-    addPrimitive("System.UInt64", "unsigned int64")
-    addPrimitive("System.IntPtr", "native int")
-    addPrimitive("System.UIntPtr", "unsigned native int")
-    addPrimitive("System.Single", "float32")
-    addPrimitive("System.Double", "float64")
-    addPrimitive("System.TypedReference", "typedref")
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
deleted file mode 100644
index 22c1b11..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a label in the instruction stream. Label is used in conjunction
- * with the ILGenerator class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class Label protected {
-    import Label._
-    def isInitialized(): Boolean
-    def getKind(): Kind
-    def getAddress(): Int
-    def getStacksize(): Int
-    def setStacksize(stacksize: Int): Unit
-    def incStacksize(): Unit
-    def mergeWith(that: Label): Unit
-}
-
-object Label {
-    final val DUMMY: Int = -((1<<31)-1)
-
-    //##########################################################################
-
-    final class NormalLabel(_address: Int, _stacksize: Int) extends Label {
-
-        //##########################################################################
-        // protected constructors
-
-        //the position of the label
-        private var address: Int = _address
-
-        //the stacksize at the label
-        private var stacksize: Int = _stacksize
-
-	def this() {
-          this(-1, DUMMY)
-        }
-
-        def this(that: NormalLabel) {
-          this(that.getAddress(), that.getStacksize())
-        }
-
-        //##########################################################################
-        // instrumental methods only used by ILGenerator
-
-        def isInitialized() = (getAddress() != -1) || (stacksize != DUMMY)
-
-        def getAddress() = address
-
-        def getStacksize() = stacksize
-
-        def setStacksize(stacksize: Int) {
-            assert(stacksize >= 0)
-            this.stacksize = stacksize
-        }
-
-        def incStacksize() {
-            stacksize = stacksize + 1
-        }
-
-        def getKind(): Kind = Kind.Normal
-
-        def mergeWith(that: Label) {
-            //assert address < 0 : "this.address = " + address + " that.address = " + that.address
-            address = that.getAddress()
-
-            // 	assert stacksize == that.stacksize
-            // 	    : "this.stacksize = " + stacksize + " that.stacksize = "
-            // 	    + that.stacksize
-            // 	stacksize = that.stacksize
-            val ss: Int = math.max(stacksize, that.getStacksize())
-            stacksize = ss
-            that.setStacksize(ss)
-        }
-
-        //##########################################################################
-        //
-
-        /**
-         * the toString Method return the label name
-         * it's "IL" + address
-         */
-        override def toString(): String = {
-            var pad: String = ""
-            if (address < 16) pad = "000"
-            else if (address < 256) pad = "00"
-            else if (address < 4096) pad = "0"
-            return "IL_"  + pad + Integer.toHexString(address)
-        }
-
-        def getString(): String = {
-            val name = super.toString()
-            val i: Int = name.lastIndexOf('.')
-            return name.substring(i+1, name.length())
-        }
-    }
-
-    //########################################################################
-    // Special Labels
-
-    final class SpecialLabel(_kind: Label.Kind) extends Label {
-        private final var kind: Label.Kind = _kind
-        def isInitialized() = true
-        def getAddress(): Int = { throw new RuntimeException("" + kind.toString()) }
-        def getStacksize(): Int = { throw new RuntimeException("" + kind.toString()) }
-        def setStacksize(stacksize: Int) { throw new RuntimeException(kind.toString()) }
-        def incStacksize() { throw new RuntimeException(kind.toString()) }
-        def getKind(): Kind = kind
-        def mergeWith(that: Label) { throw new RuntimeException(kind.toString()) }
-        override def toString(): String = "Label(" + kind.toString() + ")"
-    }
-
-    final val NewScope: Label = new SpecialLabel(Kind.NewScope)
-    final val EndScope: Label = new SpecialLabel(Kind.EndScope)
-    final val Try: Label = new SpecialLabel(Kind.Try)
-    final val Catch: Label = new SpecialLabel(Kind.Catch)
-    final val Filter: Label = new SpecialLabel(Kind.Filter)
-    final val EndFilter: Label = new SpecialLabel(Kind.EndFilter)
-    final val Finally: Label = new SpecialLabel(Kind.Finally)
-    final val EndTry: Label = new SpecialLabel(Kind.EndTry)
-
-    final class Kind() {}
-
-    final object Kind {
-        final val Normal: Kind = new Kind()
-
-	final val NewScope: Kind = new Kind()
-	final val EndScope: Kind = new Kind()
-
-        final val Try: Kind = new Kind()
-        final val Catch: Kind = new Kind()
-        final val Filter: Kind = new Kind()
-        final val EndFilter: Kind = new Kind()
-        final val Finally: Kind = new Kind()
-        final val EndTry: Kind = new Kind()
-    }
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
deleted file mode 100644
index 73bca46..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a local variable within a method or constructor.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class LocalBuilder(_slot : Int, localType : Type) extends Visitable {
-
-    /**
-     * the type of the local variable.
-     */
-    var LocalType : Type = localType
-
-    // the name of the local variable
-    var name : String = "L_" + slot
-
-    // the slot occupied by this local in the corresponding ILGenerator
-    var slot : Int = _slot
-
-    /**
-     * Sets the name of this local variable.
-     */
-    def SetLocalSymInfo(name : String) {
-	    this.name = name
-    }
-
-    override def toString() : String = name
-
-    /**
-     * the apply method for a visitor
-     */
-    def apply(v : Visitor) {
-	    v.caseLocalBuilder(this)
-    }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
deleted file mode 100644
index 237d8fd..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.MethodInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import java.io.IOException
-
-/**
- * Defines and represents a method of a dynamic class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type, paramTypes: Array[Type])
-      extends MethodInfo(name, declType, attrs, returnType, paramTypes)
-      with ICustomAttributeSetter
-      with Visitable
-{
-
-    //##########################################################################
-    // public interface
-
-    /** Defines a parameter of this method. TODO: Parameters are indexed staring
-     *  from number 1 for the first parameter
-     */
-    def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
-	val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
-	params(pos) = param
-	return param
-    }
-
-    /** Returns an ILGenerator for this method. */
-    def GetILGenerator(): ILGenerator = {
-	if (ilGenerator == null)
-	    throw new RuntimeException
-		("No code generator available for this method: " + this)
-	return ilGenerator
-    }
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-
-    /** The apply method for a visitor. */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseMethodBuilder(this)
-    }
-
-    //##########################################################################
-
-    // the Intermediate Language Generator
-    // it contains the method's body
-    protected final val ilGenerator : ILGenerator =
-	  if (DeclaringType == null // global method
-	      || !DeclaringType.IsInterface())
-	      new ILGenerator(this)
-	  else null
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
deleted file mode 100644
index 981e855..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly: Assembly)
-      extends Module(name, fullname, scopeName, assembly)
-      with ICustomAttributeSetter
-      with Visitable
-{
-
-    //##########################################################################
-    // public interface
-
-    /**
-     * Complete the global function definitions for this dynamic module.
-     * This method should be called when the user is done with defining
-     * all of the global functions within this dynamic module. After calling
-     * this function, no more new global functions or new global data are
-     * allowed.
-     */
-    def CreateGlobalFunctions() {
-	if (globalsCreated)
-	    throw new RuntimeException("Global functions are already created")
-	this.fields = fieldBuilders.toArray // (fields).asInstanceOf[Array[FieldInfo]]
-	this.methods = methodBuilders.toArray //  (methods).asInstanceOf[Array[MethodInfo]]
-	globalsCreated = true
-    }
-
-    /**
-     * Constructs a TypeBuilder for a type with the specified name
-     */
-    def DefineType(typeName: String): TypeBuilder = {
-	return DefineType(typeName, 0, null, Type.EmptyTypes)
-    }
-
-    /**
-     * Constructs a TypeBuilder for a type with the specified name
-     * and specified attributes
-     */
-    def DefineType(typeName: String, attributes: Int): TypeBuilder = {
-	return DefineType(typeName, attributes, null, Type.EmptyTypes)
-    }
-
-    /**
-     * Constructs a TypeBuilder given type name, its attributes,
-     * and the type that the defined type extends.
-     */
-    def DefineType(typeName: String, attributes: Int,
-				  baseType: Type): TypeBuilder = {
-	return DefineType(typeName, attributes, baseType, Type.EmptyTypes)
-    }
-
-    /**
-     * Constructs a TypeBuilder given the Full specification of a type,
-     * Given the type name, attributes, the type that the defined type
-     * extends, and the interfaces that the defined type implements.
-     */
-    def DefineType(typeName: String,
-				  attributes: Int,
-				  baseType: Type,
-				  interfaces: Array[Type]): TypeBuilder =
-    {
-	var t: Type = GetType(typeName) // Module.GetType(String)
-	if (t != null)
-	    throw new RuntimeException
-		("Type [" + Assembly + "]" + typeName + "' already exists!")
-	val `type` =
-	    new TypeBuilder(this, attributes, typeName, baseType, interfaces, null)
-	addType(`type`)
-	return `type`
-    }
-
-    /**
-     * Defines a global method given its name, attributes, return type, and
-     * parameter types.
-     */
-    def DefineGlobalMethod(name: String, attributes: Int,
-					    returnType: Type, paramTypes: Array[Type]): MethodBuilder =
-    {
-	val method =
-	    new MethodBuilder(name, null, attributes, returnType, paramTypes)
-	methodBuilders += method
-	return method
-    }
-
-
-    override def GetTypes(): Array[Type] = {
-      val res = scala.collection.mutable.ArrayBuffer.empty[Type]
-      val iter = typesMap.values().iterator
-      while (iter.hasNext) {
-        res += iter.next.asInstanceOf[Type]
-    }
-	    return res.toArray
-    }
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-    // internal members
-
-    var globalsCreated = false
-    protected var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldInfo]
-    protected var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodInfo]
-
-    override def addType(t: Type): Type = {
-	return super.addType(t)
-    }
-
-    //##########################################################################
-
-    /**
-     * the apply method for a visitor
-     */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseModuleBuilder(this)
-    }
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
deleted file mode 100644
index 55c5210..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly into separate files. Then these files can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: String) extends ILPrinterVisitor {
-   /**
-     * Visit an AssemblyBuilder
-     */
-    @throws(classOf[IOException])
-    def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
-	ILPrinterVisitor.currAssembly = assemblyBuilder
-
-	// first get the entryPoint
-	this.entryPoint = assemblyBuilder.EntryPoint
-
-	// all external assemblies
-	as = assemblyBuilder.getExternAssemblies()
-	scala.util.Sorting.quickSort(as)(assemblyNameComparator)  // Arrays.sort(as, assemblyNameComparator)
-
-	// print each module
-	var m: Array[Module] = assemblyBuilder.GetModules()
-        nomembers = true
-        for(i <- 0 until m.length) {
-	    print(m(i).asInstanceOf[ModuleBuilder])
-	}
-
-        nomembers = false
-        for(i <- 0 until m.length) {
-	    print(m(i).asInstanceOf[ModuleBuilder])
-	}
-	ILPrinterVisitor.currAssembly = null
-    }
-
-    /**
-     * Visit a ModuleBuilder
-     */
-    @throws(classOf[IOException])
-    def caseModuleBuilder(module: ModuleBuilder) {
-    val assemblyBuilder = ILPrinterVisitor.currAssembly.asInstanceOf[AssemblyBuilder]
-
-	// print module declaration
-	currentModule = module
-
-	// global methods typically contain the main method
-	if (!module.globalsCreated)
-	    module.CreateGlobalFunctions()
-
-	var m: Array[MethodInfo] = module.GetMethods()
-
-	// "Types" contain all the classes
-	var t: Array[Type] = module.GetTypes()
-        for(i <- 0 until t.length) {
-        val tBuilder       = t(i).asInstanceOf[TypeBuilder]
-        val sourceFilename = tBuilder.sourceFilename
-        val sourceFilepath = new File(tBuilder.sourceFilepath).getCanonicalPath
-        val sourcePath     = new File(sourceFilesPath).getCanonicalPath
-		var append         = false
-
-        if(!sourceFilepath.startsWith(sourcePath)) {
-            throw new IOException("Source file " + sourceFilename + " must lie inside sourcepath " + sourcePath)
-        }
-
-        assert(sourceFilepath.endsWith(".scala"), "Source file doesn't end with .scala")
-        val relativeFilename = sourceFilepath.substring(sourcePath.length, sourceFilepath.length() - 6) + ".msil"
-        val fileName         = new File(destPath, relativeFilename)
-        if(assemblyBuilder.generatedFiles.contains(fileName.getPath)) {
-            append = true
-        } else {
-            fileName.getParentFile().mkdirs()
-            assemblyBuilder.generatedFiles += (fileName.getPath)
-        }
-
-	    out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, append)))
-		// only write assembly boilerplate and class prototypes
-		if (!append && nomembers) {
-			printAssemblyBoilerplate()
-
-			print(".module \'"); print(module.Name); println("\'")
-		    printAttributes(module)
-        }
-
-	    print(t(i).asInstanceOf[TypeBuilder])
-	    out.close()
-	}
-
-    // now write the global methods (typically contains the "main" method)
-	if(!nomembers) {
-       var globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil")
-       val append = assemblyBuilder.generatedFiles.contains(globalMethods.getPath)
-
-		out = new PrintWriter(new BufferedWriter(new FileWriter(globalMethods, append)))
-
-        // make sure we're the first in the list (ilasm uses the first file name to guess the output file name)
-        assemblyBuilder.generatedFiles.insert(0, globalMethods.getPath)
-
-		// if this file hasn't been created by one of the classes, write boilerplate
-		if(!append) {
-			printAssemblyBoilerplate()
-
-			print(".module \'"); print(module.Name); println("\'")
-		    printAttributes(module)
-		}
-
-                for(i <- 0 until m.length) {
-	   		print(m(i).asInstanceOf[MethodBuilder])
-		}
-
-		out.close()
-	}
-
-	currentModule = null
-    }
-
-}  // class MultipleFilesILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
deleted file mode 100644
index b0c2688..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ /dev/null
@@ -1,1948 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/** Describes a Microsoft intermediate language (MSIL) instruction.
- *
- *  @author  Nikolay Mihaylov
- *  @version 1.0
- */
-class OpCode extends Visitable {
-    import OpCode._
-
-    /** The Operation Code of Microsoft intermediate language (MSIL) instruction. */
-    var CEE_opcode : Int = _
-
-    /** The name of the Microsoft intermediate language (MSIL) instruction. */
-    var CEE_string: String = _
-
-    /** The type of Microsoft intermediate language (MSIL) instruction. */
-    var CEE_code: Short = _
-
-    /** How the Microsoft intermediate language (MSIL) instruction pops the stack. */
-    var CEE_pop: Byte = _
-
-    /** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */
-    var CEE_push: Byte = _
-
-    /** Describes the type of flow control. */
-    var CEE_flow: Byte = _
-
-    /** ????? */
-    var CEE_inline: Byte = _
-
-    var CEE_length: Byte = _
-
-    var CEE_popush: Byte = _
-
-    /**
-     * the apply method for a visitor
-     */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseOpCode(this)
-    }
-
-    protected def length(): Byte = {
-	val code = OpCode.length(CEE_code)
-	val inline = OpCode.INLINE_length(CEE_inline)
-	return if(inline < 0) { -1 } else { (code + inline).toByte }
-    }
-
-    protected def popush(): Byte = {
-	val pop = OpCode.POP_size(CEE_pop)
-	val push = OpCode.PUSH_size(CEE_push)
-	return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte }
-    }
-
-    override def toString(): String = {
-	return CEE_string
-    }
-}
-
-object OpCode {
-
-    //########################################################################
-    // Common Execution Environment opcodes
-
-    final val CEE_NOP            : Int = 0x0000
-    final val CEE_BREAK          : Int = 0x0001
-    final val CEE_LDARG_0        : Int = 0x0002
-    final val CEE_LDARG_1        : Int = 0x0003
-    final val CEE_LDARG_2        : Int = 0x0004
-    final val CEE_LDARG_3        : Int = 0x0005
-    final val CEE_LDLOC_0        : Int = 0x0006
-    final val CEE_LDLOC_1        : Int = 0x0007
-    final val CEE_LDLOC_2        : Int = 0x0008
-    final val CEE_LDLOC_3        : Int = 0x0009
-    final val CEE_STLOC_0        : Int = 0x000A
-    final val CEE_STLOC_1        : Int = 0x000B
-    final val CEE_STLOC_2        : Int = 0x000C
-    final val CEE_STLOC_3        : Int = 0x000D
-    final val CEE_LDARG_S        : Int = 0x000E
-    final val CEE_LDARGA_S       : Int = 0x000F
-    final val CEE_STARG_S        : Int = 0x0010
-    final val CEE_LDLOC_S        : Int = 0x0011
-    final val CEE_LDLOCA_S       : Int = 0x0012
-    final val CEE_STLOC_S        : Int = 0x0013
-    final val CEE_LDNULL         : Int = 0x0014
-    final val CEE_LDC_I4_M1      : Int = 0x0015
-    final val CEE_LDC_I4_0       : Int = 0x0016
-    final val CEE_LDC_I4_1       : Int = 0x0017
-    final val CEE_LDC_I4_2       : Int = 0x0018
-    final val CEE_LDC_I4_3       : Int = 0x0019
-    final val CEE_LDC_I4_4       : Int = 0x001A
-    final val CEE_LDC_I4_5       : Int = 0x001B
-    final val CEE_LDC_I4_6       : Int = 0x001C
-    final val CEE_LDC_I4_7       : Int = 0x001D
-    final val CEE_LDC_I4_8       : Int = 0x001E
-    final val CEE_LDC_I4_S       : Int = 0x001F
-    final val CEE_LDC_I4         : Int = 0x0020
-    final val CEE_LDC_I8         : Int = 0x0021
-    final val CEE_LDC_R4         : Int = 0x0022
-    final val CEE_LDC_R8         : Int = 0x0023
-    final val CEE_UNUSED49       : Int = 0x0024
-    final val CEE_DUP            : Int = 0x0025
-    final val CEE_POP            : Int = 0x0026
-    final val CEE_JMP            : Int = 0x0027
-    final val CEE_CALL           : Int = 0x0028
-    final val CEE_CALLI          : Int = 0x0029
-    final val CEE_RET            : Int = 0x002A
-    final val CEE_BR_S           : Int = 0x002B
-    final val CEE_BRFALSE_S      : Int = 0x002C
-    final val CEE_BRTRUE_S       : Int = 0x002D
-    final val CEE_BEQ_S          : Int = 0x002E
-    final val CEE_BGE_S          : Int = 0x002F
-    final val CEE_BGT_S          : Int = 0x0030
-    final val CEE_BLE_S          : Int = 0x0031
-    final val CEE_BLT_S          : Int = 0x0032
-    final val CEE_BNE_UN_S       : Int = 0x0033
-    final val CEE_BGE_UN_S       : Int = 0x0034
-    final val CEE_BGT_UN_S       : Int = 0x0035
-    final val CEE_BLE_UN_S       : Int = 0x0036
-    final val CEE_BLT_UN_S       : Int = 0x0037
-    final val CEE_BR             : Int = 0x0038
-    final val CEE_BRFALSE        : Int = 0x0039
-    final val CEE_BRTRUE         : Int = 0x003A
-    final val CEE_BEQ            : Int = 0x003B
-    final val CEE_BGE            : Int = 0x003C
-    final val CEE_BGT            : Int = 0x003D
-    final val CEE_BLE            : Int = 0x003E
-    final val CEE_BLT            : Int = 0x003F
-    final val CEE_BNE_UN         : Int = 0x0040
-    final val CEE_BGE_UN         : Int = 0x0041
-    final val CEE_BGT_UN         : Int = 0x0042
-    final val CEE_BLE_UN         : Int = 0x0043
-    final val CEE_BLT_UN         : Int = 0x0044
-    final val CEE_SWITCH         : Int = 0x0045
-    final val CEE_LDIND_I1       : Int = 0x0046
-    final val CEE_LDIND_U1       : Int = 0x0047
-    final val CEE_LDIND_I2       : Int = 0x0048
-    final val CEE_LDIND_U2       : Int = 0x0049
-    final val CEE_LDIND_I4       : Int = 0x004A
-    final val CEE_LDIND_U4       : Int = 0x004B
-    final val CEE_LDIND_I8       : Int = 0x004C
-    final val CEE_LDIND_I        : Int = 0x004D
-    final val CEE_LDIND_R4       : Int = 0x004E
-    final val CEE_LDIND_R8       : Int = 0x004F
-    final val CEE_LDIND_REF      : Int = 0x0050
-    final val CEE_STIND_REF      : Int = 0x0051
-    final val CEE_STIND_I1       : Int = 0x0052
-    final val CEE_STIND_I2       : Int = 0x0053
-    final val CEE_STIND_I4       : Int = 0x0054
-    final val CEE_STIND_I8       : Int = 0x0055
-    final val CEE_STIND_R4       : Int = 0x0056
-    final val CEE_STIND_R8       : Int = 0x0057
-    final val CEE_ADD            : Int = 0x0058
-    final val CEE_SUB            : Int = 0x0059
-    final val CEE_MUL            : Int = 0x005A
-    final val CEE_DIV            : Int = 0x005B
-    final val CEE_DIV_UN         : Int = 0x005C
-    final val CEE_REM            : Int = 0x005D
-    final val CEE_REM_UN         : Int = 0x005E
-    final val CEE_AND            : Int = 0x005F
-    final val CEE_OR             : Int = 0x0060
-    final val CEE_XOR            : Int = 0x0061
-    final val CEE_SHL            : Int = 0x0062
-    final val CEE_SHR            : Int = 0x0063
-    final val CEE_SHR_UN         : Int = 0x0064
-    final val CEE_NEG            : Int = 0x0065
-    final val CEE_NOT            : Int = 0x0066
-    final val CEE_CONV_I1        : Int = 0x0067
-    final val CEE_CONV_I2        : Int = 0x0068
-    final val CEE_CONV_I4        : Int = 0x0069
-    final val CEE_CONV_I8        : Int = 0x006A
-    final val CEE_CONV_R4        : Int = 0x006B
-    final val CEE_CONV_R8        : Int = 0x006C
-    final val CEE_CONV_U4        : Int = 0x006D
-    final val CEE_CONV_U8        : Int = 0x006E
-    final val CEE_CALLVIRT       : Int = 0x006F
-    final val CEE_CPOBJ          : Int = 0x0070
-    final val CEE_LDOBJ          : Int = 0x0071
-    final val CEE_LDSTR          : Int = 0x0072
-    final val CEE_NEWOBJ         : Int = 0x0073
-    final val CEE_CASTCLASS      : Int = 0x0074
-    final val CEE_ISINST         : Int = 0x0075
-    final val CEE_CONV_R_UN      : Int = 0x0076
-    final val CEE_UNUSED58       : Int = 0x0077
-    final val CEE_UNUSED1        : Int = 0x0078
-    final val CEE_UNBOX          : Int = 0x0079
-    final val CEE_THROW          : Int = 0x007A
-    final val CEE_LDFLD          : Int = 0x007B
-    final val CEE_LDFLDA         : Int = 0x007C
-    final val CEE_STFLD          : Int = 0x007D
-    final val CEE_LDSFLD         : Int = 0x007E
-    final val CEE_LDSFLDA        : Int = 0x007F
-    final val CEE_STSFLD         : Int = 0x0080
-    final val CEE_STOBJ          : Int = 0x0081
-    final val CEE_CONV_OVF_I1_UN : Int = 0x0082
-    final val CEE_CONV_OVF_I2_UN : Int = 0x0083
-    final val CEE_CONV_OVF_I4_UN : Int = 0x0084
-    final val CEE_CONV_OVF_I8_UN : Int = 0x0085
-    final val CEE_CONV_OVF_U1_UN : Int = 0x0086
-    final val CEE_CONV_OVF_U2_UN : Int = 0x0087
-    final val CEE_CONV_OVF_U4_UN : Int = 0x0088
-    final val CEE_CONV_OVF_U8_UN : Int = 0x0089
-    final val CEE_CONV_OVF_I_UN  : Int = 0x008A
-    final val CEE_CONV_OVF_U_UN  : Int = 0x008B
-    final val CEE_BOX            : Int = 0x008C
-    final val CEE_NEWARR         : Int = 0x008D
-    final val CEE_LDLEN          : Int = 0x008E
-    final val CEE_LDELEMA        : Int = 0x008F
-    final val CEE_LDELEM_I1      : Int = 0x0090
-    final val CEE_LDELEM_U1      : Int = 0x0091
-    final val CEE_LDELEM_I2      : Int = 0x0092
-    final val CEE_LDELEM_U2      : Int = 0x0093
-    final val CEE_LDELEM_I4      : Int = 0x0094
-    final val CEE_LDELEM_U4      : Int = 0x0095
-    final val CEE_LDELEM_I8      : Int = 0x0096
-    final val CEE_LDELEM_I       : Int = 0x0097
-    final val CEE_LDELEM_R4      : Int = 0x0098
-    final val CEE_LDELEM_R8      : Int = 0x0099
-    final val CEE_LDELEM_REF     : Int = 0x009A
-    final val CEE_STELEM_I       : Int = 0x009B
-    final val CEE_STELEM_I1      : Int = 0x009C
-    final val CEE_STELEM_I2      : Int = 0x009D
-    final val CEE_STELEM_I4      : Int = 0x009E
-    final val CEE_STELEM_I8      : Int = 0x009F
-    final val CEE_STELEM_R4      : Int = 0x00A0
-    final val CEE_STELEM_R8      : Int = 0x00A1
-    final val CEE_STELEM_REF     : Int = 0x00A2
-    final val CEE_UNUSED2        : Int = 0x00A3
-    final val CEE_UNUSED3        : Int = 0x00A4
-    final val CEE_UNUSED4        : Int = 0x00A5
-    final val CEE_UNUSED5        : Int = 0x00A6
-    final val CEE_UNUSED6        : Int = 0x00A7
-    final val CEE_UNUSED7        : Int = 0x00A8
-    final val CEE_UNUSED8        : Int = 0x00A9
-    final val CEE_UNUSED9        : Int = 0x00AA
-    final val CEE_UNUSED10       : Int = 0x00AB
-    final val CEE_UNUSED11       : Int = 0x00AC
-    final val CEE_UNUSED12       : Int = 0x00AD
-    final val CEE_UNUSED13       : Int = 0x00AE
-    final val CEE_UNUSED14       : Int = 0x00AF
-    final val CEE_UNUSED15       : Int = 0x00B0
-    final val CEE_UNUSED16       : Int = 0x00B1
-    final val CEE_UNUSED17       : Int = 0x00B2
-    final val CEE_CONV_OVF_I1    : Int = 0x00B3
-    final val CEE_CONV_OVF_U1    : Int = 0x00B4
-    final val CEE_CONV_OVF_I2    : Int = 0x00B5
-    final val CEE_CONV_OVF_U2    : Int = 0x00B6
-    final val CEE_CONV_OVF_I4    : Int = 0x00B7
-    final val CEE_CONV_OVF_U4    : Int = 0x00B8
-    final val CEE_CONV_OVF_I8    : Int = 0x00B9
-    final val CEE_CONV_OVF_U8    : Int = 0x00BA
-    final val CEE_UNUSED50       : Int = 0x00BB
-    final val CEE_UNUSED18       : Int = 0x00BC
-    final val CEE_UNUSED19       : Int = 0x00BD
-    final val CEE_UNUSED20       : Int = 0x00BE
-    final val CEE_UNUSED21       : Int = 0x00BF
-    final val CEE_UNUSED22       : Int = 0x00C0
-    final val CEE_UNUSED23       : Int = 0x00C1
-    final val CEE_REFANYVAL      : Int = 0x00C2
-    final val CEE_CKFINITE       : Int = 0x00C3
-    final val CEE_UNUSED24       : Int = 0x00C4
-    final val CEE_UNUSED25       : Int = 0x00C5
-    final val CEE_MKREFANY       : Int = 0x00C6
-    final val CEE_UNUSED59       : Int = 0x00C7
-    final val CEE_UNUSED60       : Int = 0x00C8
-    final val CEE_UNUSED61       : Int = 0x00C9
-    final val CEE_UNUSED62       : Int = 0x00CA
-    final val CEE_UNUSED63       : Int = 0x00CB
-    final val CEE_UNUSED64       : Int = 0x00CC
-    final val CEE_UNUSED65       : Int = 0x00CD
-    final val CEE_UNUSED66       : Int = 0x00CE
-    final val CEE_UNUSED67       : Int = 0x00CF
-    final val CEE_LDTOKEN        : Int = 0x00D0
-    final val CEE_CONV_U2        : Int = 0x00D1
-    final val CEE_CONV_U1        : Int = 0x00D2
-    final val CEE_CONV_I         : Int = 0x00D3
-    final val CEE_CONV_OVF_I     : Int = 0x00D4
-    final val CEE_CONV_OVF_U     : Int = 0x00D5
-    final val CEE_ADD_OVF        : Int = 0x00D6
-    final val CEE_ADD_OVF_UN     : Int = 0x00D7
-    final val CEE_MUL_OVF        : Int = 0x00D8
-    final val CEE_MUL_OVF_UN     : Int = 0x00D9
-    final val CEE_SUB_OVF        : Int = 0x00DA
-    final val CEE_SUB_OVF_UN     : Int = 0x00DB
-    final val CEE_ENDFINALLY     : Int = 0x00DC
-    final val CEE_LEAVE          : Int = 0x00DD
-    final val CEE_LEAVE_S        : Int = 0x00DE
-    final val CEE_STIND_I        : Int = 0x00DF
-    final val CEE_CONV_U         : Int = 0x00E0
-    final val CEE_UNUSED26       : Int = 0x00E1
-    final val CEE_UNUSED27       : Int = 0x00E2
-    final val CEE_UNUSED28       : Int = 0x00E3
-    final val CEE_UNUSED29       : Int = 0x00E4
-    final val CEE_UNUSED30       : Int = 0x00E5
-    final val CEE_UNUSED31       : Int = 0x00E6
-    final val CEE_UNUSED32       : Int = 0x00E7
-    final val CEE_UNUSED33       : Int = 0x00E8
-    final val CEE_UNUSED34       : Int = 0x00E9
-    final val CEE_UNUSED35       : Int = 0x00EA
-    final val CEE_UNUSED36       : Int = 0x00EB
-    final val CEE_UNUSED37       : Int = 0x00EC
-    final val CEE_UNUSED38       : Int = 0x00ED
-    final val CEE_UNUSED39       : Int = 0x00EE
-    final val CEE_UNUSED40       : Int = 0x00EF
-    final val CEE_UNUSED41       : Int = 0x00F0
-    final val CEE_UNUSED42       : Int = 0x00F1
-    final val CEE_UNUSED43       : Int = 0x00F2
-    final val CEE_UNUSED44       : Int = 0x00F3
-    final val CEE_UNUSED45       : Int = 0x00F4
-    final val CEE_UNUSED46       : Int = 0x00F5
-    final val CEE_UNUSED47       : Int = 0x00F6
-    final val CEE_UNUSED48       : Int = 0x00F7
-    final val CEE_PREFIX7        : Int = 0x00F8
-    final val CEE_PREFIX6        : Int = 0x00F9
-    final val CEE_PREFIX5        : Int = 0x00FA
-    final val CEE_PREFIX4        : Int = 0x00FB
-    final val CEE_PREFIX3        : Int = 0x00FC
-    final val CEE_PREFIX2        : Int = 0x00FD
-    final val CEE_PREFIX1        : Int = 0x00FE
-    final val CEE_PREFIXREF      : Int = 0x00FF
-
-    final val CEE_ARGLIST         : Int = 0x0100
-    final val CEE_CEQ             : Int = 0x0101
-    final val CEE_CGT             : Int = 0x0102
-    final val CEE_CGT_UN          : Int = 0x0103
-    final val CEE_CLT             : Int = 0x0104
-    final val CEE_CLT_UN          : Int = 0x0105
-    final val CEE_LDFTN           : Int = 0x0106
-    final val CEE_LDVIRTFTN       : Int = 0x0107
-    final val CEE_UNUSED56        : Int = 0x0108
-    final val CEE_LDARG           : Int = 0x0109
-    final val CEE_LDARGA          : Int = 0x010A
-    final val CEE_STARG           : Int = 0x010B
-    final val CEE_LDLOC           : Int = 0x010C
-    final val CEE_LDLOCA          : Int = 0x010D
-    final val CEE_STLOC           : Int = 0x010E
-    final val CEE_LOCALLOC        : Int = 0x010F
-    final val CEE_UNUSED57        : Int = 0x0110
-    final val CEE_ENDFILTER       : Int = 0x0111
-    final val CEE_UNALIGNED       : Int = 0x0112
-    final val CEE_VOLATILE        : Int = 0x0113
-    final val CEE_TAILCALL        : Int = 0x0114
-    final val CEE_INITOBJ         : Int = 0x0115
-    final val CEE_CONSTRAINED     : Int = 0xFE16
-    final val CEE_READONLY        : Int = 0xFE1E
-    final val CEE_UNUSED68        : Int = 0x0116
-    final val CEE_CPBLK           : Int = 0x0117
-    final val CEE_INITBLK         : Int = 0x0118
-    final val CEE_UNUSED69        : Int = 0x0119
-    final val CEE_RETHROW         : Int = 0x011A
-    final val CEE_UNUSED51        : Int = 0x011B
-    final val CEE_SIZEOF          : Int = 0x011C
-    final val CEE_REFANYTYPE      : Int = 0x011D
-    final val CEE_UNUSED52        : Int = 0x011E
-    final val CEE_UNUSED53        : Int = 0x011F
-    final val CEE_UNUSED54        : Int = 0x0120
-    final val CEE_UNUSED55        : Int = 0x0121
-    final val CEE_UNUSED70        : Int = 0x0122
-
-    final val CEE_ILLEGAL         : Int = 0x0140
-    final val CEE_MACRO_END       : Int = 0x0141
-
-    final val CEE_BRNULL          : Int = 0x0180 // CEE_BRFALSE
-    final val CEE_BRNULL_S        : Int = 0x0181 // CEE_BRFALSE_S
-    final val CEE_BRZERO          : Int = 0x0182 // CEE_BRFALSE
-    final val CEE_BRZERO_S        : Int = 0x0183 // CEE_BRFALSE_S
-    final val CEE_BRINST          : Int = 0x0184 // CEE_BRTRUE
-    final val CEE_BRINST_S        : Int = 0x0185 // CEE_BRTRUE_S
-    final val CEE_LDIND_U8        : Int = 0x0186 // CEE_LDIND_I8
-    final val CEE_LDELEM_U8       : Int = 0x0187 // CEE_LDELEM_I8
-    final val CEE_LDC_I4_M1x      : Int = 0x0188 // CEE_LDC_I4_M1
-    final val CEE_ENDFAULT        : Int = 0x0189 // CEE_ENDFINALLY
-
-    final val CEE_BRNONZERO       : Int = 0x01C0 // CEE_BRTRUE
-    final val CEE_BRNONZERO_S     : Int = 0x01C1 // CEE_BRTRUE_S
-
-    final val CEE_BRNOT           : Int = 0x01C2
-    final val CEE_BRNOT_S         : Int = 0x01C3
-    final val CEE_NOCODE          : Int = 0x01C4
-
-    final val CEE_count           : Int = 0x0200
-
-
-    //########################################################################
-    // Opcode's amount and type of poped data
-
-    final val POP_NONE          : Byte        = 0x00
-    final val POP_1             : Byte        = 0x01
-    final val POP_1_1           : Byte        = 0x02
-    final val POP_I             : Byte        = 0x03
-    final val POP_I_1           : Byte        = 0x04
-    final val POP_I_I           : Byte        = 0x05
-    final val POP_I_I8          : Byte        = 0x06
-    final val POP_I_R4          : Byte        = 0x07
-    final val POP_I_R8          : Byte        = 0x08
-    final val POP_I_I_I         : Byte        = 0x09
-    final val POP_REF           : Byte        = 0x0A
-    final val POP_REF_1         : Byte        = 0x0B
-    final val POP_REF_I         : Byte        = 0x0C
-    final val POP_REF_I_I       : Byte        = 0x0D
-    final val POP_REF_I_I8      : Byte        = 0x0E
-    final val POP_REF_I_R4      : Byte        = 0x0F
-    final val POP_REF_I_R8      : Byte        = 0x10
-    final val POP_REF_I_REF     : Byte        = 0x11
-    final val POP_SPECIAL       : Byte        = 0x12
-    final val POP_count         : Int         = 0x13
-    final val POP_size          : Array[Byte] = new Array[Byte](POP_count)
-
-	POP_size(POP_NONE)              =  0
-	POP_size(POP_1)                 =  1
-	POP_size(POP_1_1)               =  2
-	POP_size(POP_I)                 =  1
-	POP_size(POP_I_1)               =  2
-	POP_size(POP_I_I)               =  2
-	POP_size(POP_I_I8)              =  2
-	POP_size(POP_I_R4)              =  2
-	POP_size(POP_I_R8)              =  2
-	POP_size(POP_I_I_I)             =  3
-	POP_size(POP_REF)               =  1
-	POP_size(POP_REF_1)             =  2
-	POP_size(POP_REF_I)             =  2
-	POP_size(POP_REF_I_I)           =  3
-	POP_size(POP_REF_I_I8)          =  3
-	POP_size(POP_REF_I_R4)          =  3
-	POP_size(POP_REF_I_R8)          =  3
-	POP_size(POP_REF_I_REF)         =  3
-	POP_size(POP_SPECIAL)           = -1
-
-    //########################################################################
-    // Opcode's amount and type of pushed data
-
-    final val PUSH_NONE         : Byte        = 0x00
-    final val PUSH_1            : Byte        = 0x01
-    final val PUSH_1_1          : Byte        = 0x02
-    final val PUSH_I            : Byte        = 0x03
-    final val PUSH_I8           : Byte        = 0x04
-    final val PUSH_R4           : Byte        = 0x05
-    final val PUSH_R8           : Byte        = 0x06
-    final val PUSH_REF          : Byte        = 0x07
-    final val PUSH_SPECIAL      : Byte        = 0x08
-    final val PUSH_count        : Int         = 0x09
-    final val PUSH_size         : Array[Byte] = new Array[Byte](PUSH_count)
-
-	PUSH_size(PUSH_NONE)             =  0
-	PUSH_size(PUSH_1)                =  1
-	PUSH_size(PUSH_1_1)              =  2
-	PUSH_size(PUSH_I)                =  1
-	PUSH_size(PUSH_I8)               =  1
-	PUSH_size(PUSH_R4)               =  1
-	PUSH_size(PUSH_R8)               =  1
-	PUSH_size(PUSH_REF)              =  1
-	PUSH_size(PUSH_SPECIAL)          = -1
-
-    //########################################################################
-    // Opcode's amount of moved data
-
-    final val POPUSH_SPECIAL    : Byte        = -128
-
-    //########################################################################
-    // Opcode's inline argument types
-
-    final val INLINE_NONE       : Byte        = 0x00
-    final val INLINE_VARIABLE_S : Byte        = 0x01
-    final val INLINE_TARGET_S   : Byte        = 0x02
-    final val INLINE_I_S        : Byte        = 0x03
-    final val INLINE_VARIABLE   : Byte        = 0x04
-    final val INLINE_TARGET     : Byte        = 0x05
-    final val INLINE_I          : Byte        = 0x06
-    final val INLINE_I8         : Byte        = 0x07
-    final val INLINE_R          : Byte        = 0x08
-    final val INLINE_R8         : Byte        = 0x09
-    final val INLINE_STRING     : Byte        = 0x0A
-    final val INLINE_TYPE       : Byte        = 0x0B
-    final val INLINE_FIELD      : Byte        = 0x0C
-    final val INLINE_METHOD     : Byte        = 0x0D
-    final val INLINE_SIGNATURE  : Byte        = 0x0E
-    final val INLINE_TOKEN      : Byte        = 0x0F
-    final val INLINE_SWITCH     : Byte        = 0x10
-    final val INLINE_count      : Int         = 0x11
-    final val INLINE_length     : Array[Byte] = new Array[Byte](INLINE_count)
-
-	INLINE_length(INLINE_NONE)       =  0
-	INLINE_length(INLINE_VARIABLE_S) =  1
-	INLINE_length(INLINE_TARGET_S)   =  1
-	INLINE_length(INLINE_I_S)        =  1
-	INLINE_length(INLINE_VARIABLE)   =  2
-	INLINE_length(INLINE_TARGET)     =  4
-	INLINE_length(INLINE_I)          =  4
-	INLINE_length(INLINE_I8)         =  8
-	INLINE_length(INLINE_R)          =  4
-	INLINE_length(INLINE_R8)         =  8
-	INLINE_length(INLINE_STRING)     =  4
-	INLINE_length(INLINE_TYPE)       =  4
-	INLINE_length(INLINE_FIELD)      =  4
-	INLINE_length(INLINE_METHOD)     =  4
-	INLINE_length(INLINE_SIGNATURE)  =  4
-	INLINE_length(INLINE_SWITCH)     =  4
-	INLINE_length(INLINE_TOKEN)      =  4
-
-    //########################################################################
-    // Opcode's control flow implications
-
-    final val FLOW_META         : Byte = 0x00
-    final val FLOW_NEXT         : Byte = 0x01
-    final val FLOW_BRANCH       : Byte = 0x02
-    final val FLOW_COND_BRANCH  : Byte = 0x03
-    final val FLOW_BREAK        : Byte = 0x04
-    final val FLOW_CALL         : Byte = 0x05
-    final val FLOW_RETURN       : Byte = 0x06
-    final val FLOW_THROW        : Byte = 0x07
-    final val FLOW_count        : Int  = 0x08
-
-    //########################################################################
-    // Init methods for Opcode
-
-    def opcode(that: OpCode, opcode: Int, string: String, code: Int,
-			pop: Byte, push: Byte, inline: Byte, flow: Byte) {
-	that.CEE_opcode = opcode
-	that.CEE_string = string
-	that.CEE_code = code.toShort
-	that.CEE_pop = pop
-	that.CEE_push = push
-	that.CEE_inline = inline
-	that.CEE_flow = flow
-	that.CEE_length = that.length()
-	that.CEE_popush = that.popush()
-    }
-
-    def length(code: Int): Byte = {
-	if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1
-	if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2
-	return 0
-    }
-
-    //########################################################################
-    // case OpCode
-
-    /**
-     * Adds two values and pushes the result onto the evaluation stack.
-     */
-    final val Add = new OpCode()
-    opcode(Add, CEE_ADD, "add", 0xFFFFFF58, POP_1_1, PUSH_1, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Fills space if bytecodes are patched. No meaningful operation is performed
-     * although a processing cycle can be consumed.
-     */
-    final val Nop = new OpCode()
-	opcode(Nop, CEE_NOP, "nop", 0xFFFFFF00, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Signals the Common Language Infrastructure (CLI) to inform the debugger that
-     * a break point has been tripped.
-     */
-    final val Break = new OpCode()
-	opcode(Break, CEE_BREAK, "break"    , 0xFFFFFF01, POP_NONE, PUSH_NONE   , INLINE_NONE , FLOW_BREAK)
-
-    /**
-     * Loads the argument at index 0 onto the evaluation stack.
-     */
-    final val Ldarg_0 = new OpCode()
-	opcode(Ldarg_0, CEE_LDARG_0  , "ldarg.0"  , 0xFFFFFF02, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the argument at index 1 onto the evaluation stack.
-     */
-    final val Ldarg_1 = new OpCode()
-	opcode(Ldarg_1, CEE_LDARG_1  , "ldarg.1"  , 0xFFFFFF03, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the argument at index 2 onto the evaluation stack.
-     */
-    final val Ldarg_2 = new OpCode()
-	opcode(Ldarg_2, CEE_LDARG_2  , "ldarg.2"  , 0xFFFFFF04, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the argument at index 3 onto the evaluation stack.
-     */
-    final val Ldarg_3 = new OpCode()
-	opcode(Ldarg_3, CEE_LDARG_3  , "ldarg.3"  , 0xFFFFFF05, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the local variable at index 0 onto the evaluation stack.
-     */
-    final val Ldloc_0 = new OpCode()
-	opcode(Ldloc_0, CEE_LDLOC_0  , "ldloc.0"  , 0xFFFFFF06, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the local variable at index 1 onto the evaluation stack.
-     */
-    final val Ldloc_1 = new OpCode()
-	opcode(Ldloc_1, CEE_LDLOC_1  , "ldloc.1"  , 0xFFFFFF07, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the local variable at index 2 onto the evaluation stack.
-     */
-    final val Ldloc_2 = new OpCode()
-	opcode(Ldloc_2, CEE_LDLOC_2  , "ldloc.2"  , 0xFFFFFF08, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the local variable at index 3 onto the evaluation stack.
-     */
-    final val Ldloc_3 = new OpCode()
-	opcode(Ldloc_3, CEE_LDLOC_3  , "ldloc.3"  , 0xFFFFFF09, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 0.
-     */
-    final val Stloc_0 = new OpCode()
-	opcode(Stloc_0, CEE_STLOC_0  , "stloc.0"  , 0xFFFFFF0A, POP_1   , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 1.
-     */
-    final val Stloc_1 = new OpCode()
-	opcode(Stloc_1, CEE_STLOC_1  , "stloc.1"  , 0xFFFFFF0B, POP_1   , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 2.
-     */
-    final val Stloc_2 = new OpCode()
-	opcode(Stloc_2, CEE_STLOC_2  , "stloc.2"  , 0xFFFFFF0C, POP_1   , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 3.
-     */
-    final val Stloc_3 = new OpCode()
-	opcode(Stloc_3, CEE_STLOC_3  , "stloc.3"  , 0xFFFFFF0D, POP_1   , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the argument (referenced by a specified short form index)
-     * onto the evaluation stack.
-     */
-    final val Ldarg_S = new OpCode()
-	opcode(Ldarg_S, CEE_LDARG_S  , "ldarg.s"  , 0xFFFFFF0E, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Load an argument address, in short form, onto the evaluation stack.
-     */
-    final val Ldarga_S = new OpCode()
-	opcode(Ldarga_S, CEE_LDARGA_S , "ldarga.s" , 0xFFFFFF0F, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Loads the local variable at a specific index onto the evaluation stack,
-     * short form.
-     */
-    final val Ldloc_S = new OpCode()
-	opcode(Ldloc_S, CEE_LDLOC_S  , "ldloc.s"  , 0xFFFFFF11, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Loads the address of the local variable at a specific index onto
-     * the evaluation stack, short form.
-     */
-    final val Ldloca_S = new OpCode()
-	opcode(Ldloca_S, CEE_LDLOCA_S , "ldloca.s" , 0xFFFFFF12, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Stores the value on top of the evaluation stack in the argument slot
-     * at a specified index, short form.
-     */
-    final val Starg_S = new OpCode()
-	opcode(Starg_S, CEE_STARG_S  , "starg.s"  , 0xFFFFFF10, POP_1   , PUSH_NONE , INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and stores it
-     * in a the local variable list at index (short form).
-     */
-    final val Stloc_S = new OpCode()
-	opcode(Stloc_S, CEE_STLOC_S  , "stloc.s"  , 0xFFFFFF13, POP_1   , PUSH_NONE, INLINE_VARIABLE_S, FLOW_NEXT)
-
-    /**
-     * Pushes a null reference (type O) onto the evaluation stack.
-     */
-    final val Ldnull = new OpCode()
-	opcode(Ldnull, CEE_LDNULL   , "ldnull"   , 0xFFFFFF14, POP_NONE, PUSH_REF , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of -1 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_M1 = new OpCode()
-	opcode(Ldc_I4_M1, CEE_LDC_I4_M1, "ldc.i4.m1", 0xFFFFFF15, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 0 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_0 = new OpCode()
-	opcode(Ldc_I4_0, CEE_LDC_I4_0 , "ldc.i4.0" , 0xFFFFFF16, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 1 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_1 = new OpCode()
-	opcode(Ldc_I4_1, CEE_LDC_I4_1 , "ldc.i4.1" , 0xFFFFFF17, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 2 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_2 = new OpCode()
-	opcode(Ldc_I4_2, CEE_LDC_I4_2 , "ldc.i4.2" , 0xFFFFFF18, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 3 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_3 = new OpCode()
-	opcode(Ldc_I4_3, CEE_LDC_I4_3 , "ldc.i4.3" , 0xFFFFFF19, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 4 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_4 = new OpCode()
-	opcode(Ldc_I4_4, CEE_LDC_I4_4 , "ldc.i4.4" , 0xFFFFFF1A, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 5 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_5 = new OpCode()
-	opcode(Ldc_I4_5, CEE_LDC_I4_5 , "ldc.i4.5" , 0xFFFFFF1B, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 6 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_6 = new OpCode()
-	opcode(Ldc_I4_6, CEE_LDC_I4_6 , "ldc.i4.6", 0xFFFFFF1C, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 7 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_7 = new OpCode()
-	opcode(Ldc_I4_7, CEE_LDC_I4_7 , "ldc.i4.7", 0xFFFFFF1D, POP_NONE   , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the integer value of 8 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4_8 = new OpCode()
-	opcode(Ldc_I4_8, CEE_LDC_I4_8 , "ldc.i4.8", 0xFFFFFF1E, POP_NONE   , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
-     */
-    final val Ldc_I4_S = new OpCode()
-	opcode(Ldc_I4_S, CEE_LDC_I4_S , "ldc.i4.s", 0xFFFFFF1F, POP_NONE   , PUSH_I, INLINE_I_S, FLOW_NEXT)
-
-    /**
-     * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
-     */
-    final val Ldc_I4 = new OpCode()
-	opcode(Ldc_I4, CEE_LDC_I4, "ldc.i4"  , 0xFFFFFF20, POP_NONE   , PUSH_I, INLINE_I  , FLOW_NEXT)
-
-    /**
-     *  Pushes a supplied value of type int64 onto the evaluation stack as an int64.
-     */
-    final val Ldc_I8 = new OpCode()
-	opcode(Ldc_I8, CEE_LDC_I8, "ldc.i8"  , 0xFFFFFF21, POP_NONE   , PUSH_I8, INLINE_I8 , FLOW_NEXT)
-
-    /**
-     * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
-     */
-    final val Ldc_R4 = new OpCode()
-	opcode(Ldc_R4, CEE_LDC_R4, "ldc.r4"  , 0xFFFFFF22, POP_NONE   , PUSH_R4, INLINE_R  , FLOW_NEXT)
-
-    /**
-     * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
-     */
-    final val Ldc_R8 = new OpCode()
-	opcode(Ldc_R8, CEE_LDC_R8, "ldc.r8"  , 0xFFFFFF23, POP_NONE   , PUSH_R8, INLINE_R8 , FLOW_NEXT)
-
-    /**
-     * Copies the current topmost value on the evaluation stack, and then pushes the copy
-     * onto the evaluation stack.
-     */
-    final val Dup = new OpCode()
-	opcode(Dup, CEE_DUP , "dup"     , 0xFFFFFF25, POP_1      , PUSH_1_1 , INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Removes the value currently on top of the evaluation stack.
-     */
-    final val Pop = new OpCode()
-	opcode(Pop, CEE_POP , "pop"     , 0xFFFFFF26, POP_1      , PUSH_NONE , INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Exits current method and jumps to specified method.
-     */
-    final val Jmp = new OpCode()
-	opcode(Jmp, CEE_JMP , "jmp"     , 0xFFFFFF27, POP_NONE   , PUSH_NONE , INLINE_METHOD, FLOW_CALL)
-
-    /**
-     * Calls the method indicated by the passed method descriptor.
-     */
-    final val Call = new OpCode()
-	opcode(Call, CEE_CALL , "call"    , 0xFFFFFF28, POP_SPECIAL, PUSH_SPECIAL, INLINE_METHOD    , FLOW_CALL)
-
-    /**
-   * constrained prefix
-   */
-  final val Constrained = new OpCode()
-opcode(Constrained, CEE_CONSTRAINED , "constrained."    , 0xFFFFFE16, POP_NONE, PUSH_NONE, INLINE_NONE    , FLOW_NEXT)
-
-  /**
-   * readonly prefix
-   */
-  final val Readonly = new OpCode()
-opcode(Readonly, CEE_READONLY , "readonly."    , 0xFFFFFE1E, POP_NONE, PUSH_NONE, INLINE_NONE    , FLOW_NEXT)
-
-    /**
-     * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
-     * with arguments described by a calling convention.
-     */
-    final val Calli = new OpCode()
-	opcode(Calli, CEE_CALLI, "calli"   , 0xFFFFFF29, POP_SPECIAL, PUSH_SPECIAL, INLINE_SIGNATURE , FLOW_CALL)
-
-    /**
-     * Returns from the current method, pushing a return value (if present) from the caller's
-     * evaluation stack onto the callee's evaluation stack.
-     */
-    final val Ret = new OpCode()
-	opcode(Ret, CEE_RET  , "ret"     , 0xFFFFFF2A, POP_SPECIAL, PUSH_NONE, INLINE_NONE      , FLOW_RETURN)
-
-    /**
-     * Unconditionally transfers control to a target instruction (short form).
-     */
-    final val Br_S = new OpCode()
-	opcode(Br_S, CEE_BR_S , "br.s"    , 0xFFFFFF2B, POP_NONE, PUSH_NONE, INLINE_TARGET_S  , FLOW_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if value is false, a null reference, or zero.
-     */
-    final val Brfalse_S = new OpCode()
-	opcode(Brfalse_S, CEE_BRFALSE_S,"brfalse.s", 0xFFFFFF2C, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
-     */
-    final val Brtrue_S = new OpCode()
-	opcode(Brtrue_S, CEE_BRTRUE_S , "brtrue.s", 0xFFFFFF2D, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if two values are equal.
-     */
-    final val Beq_S = new OpCode()
-	opcode(Beq_S, CEE_BEQ_S, "beq.s", 0xFFFFFF2E, POP_1_1 , PUSH_NONE, INLINE_TARGET_S  , FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * or equal to the second value.
-     */
-    final val Bge_S = new OpCode()
-	opcode(Bge_S, CEE_BGE_S, "bge.s", 0xFFFFFF2F, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * the second value.
-     */
-    final val Bgt_S = new OpCode()
-	opcode(Bgt_S, CEE_BGT_S, "bgt.s"    , 0xFFFFFF30, POP_1_1 , PUSH_NONE, INLINE_TARGET_S  , FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * or equal to the second value.
-     */
-    final val Ble_S = new OpCode()
-	opcode(Ble_S, CEE_BLE_S, "ble.s"    , 0xFFFFFF31, POP_1_1 , PUSH_NONE, INLINE_TARGET_S  , FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * the second value.
-     */
-    final val Blt_S = new OpCode()
-	opcode(Blt_S, CEE_BLT_S, "blt.s", 0xFFFFFF32, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) when two unsigned integer values
-     * or unordered float values are not equal.
-     */
-    final val Bne_Un_S = new OpCode()
-	opcode(Bne_Un_S, CEE_BNE_UN_S, "bne.un.s", 0xFFFFFF33, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greather
-     * than the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Bge_Un_S = new OpCode()
-	opcode(Bge_Un_S, CEE_BGE_UN_S, "bge.un.s", 0xFFFFFF34, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Bgt_Un_S = new OpCode()
-	opcode(Bgt_Un_S, CEE_BGT_UN_S, "bgt.un.s", 0xFFFFFF35, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * or equal to the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Ble_Un_S = new OpCode()
-	opcode(Ble_Un_S, CEE_BLE_UN_S , "ble.un.s", 0xFFFFFF36, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Blt_Un_S = new OpCode()
-	opcode(Blt_Un_S, CEE_BLT_UN_S, "blt.un.s", 0xFFFFFF37, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
-    /**
-     * Unconditionally transfers control to a target instruction.
-     */
-    final val Br = new OpCode()
-	opcode(Br, CEE_BR       , "br"       , 0xFFFFFF38, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if value is false, a null reference
-     * (Nothing in Visual Basic), or zero.
-     */
-    final val Brfalse = new OpCode()
-	opcode(Brfalse, CEE_BRFALSE, "brfalse", 0xFFFFFF39, POP_I, PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if value is true, not null, or non-zero.
-     */
-    final val Brtrue = new OpCode()
-	opcode(Brtrue, CEE_BRTRUE , "brtrue", 0xFFFFFF3A, POP_I   , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if two values are equal.
-     */
-    final val Beq = new OpCode()
-	opcode(Beq, CEE_BEQ, "beq", 0xFFFFFF3B, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than or
-     * equal to the second value.
-     */
-    final val Bge = new OpCode()
-	opcode(Bge, CEE_BGE, "bge", 0xFFFFFF3C, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than the second value.
-     */
-    final val Bgt = new OpCode()
-	opcode(Bgt, CEE_BGT, "bgt", 0xFFFFFF3D, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is less than or equal
-     * to the second value.
-     */
-    final val Ble = new OpCode()
-	opcode(Ble, CEE_BLE, "ble", 0xFFFFFF3E, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     *  Transfers control to a target instruction if the first value is less than the second value.
-     */
-    final val Blt = new OpCode()
-	opcode(Blt, CEE_BLT, "blt", 0xFFFFFF3F, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction when two unsigned integer values or
-     * unordered float values are not equal.
-     */
-    final val Bne_Un = new OpCode()
-	opcode(Bne_Un, CEE_BNE_UN , "bne.un", 0xFFFFFF40, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is greather than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Bge_Un = new OpCode()
-	opcode(Bge_Un, CEE_BGE_UN , "bge.un", 0xFFFFFF41, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than the
-     * second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Bgt_Un = new OpCode()
-	opcode(Bgt_Un, CEE_BGT_UN , "bgt.un", 0xFFFFFF42, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is less than or equal to
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-    final val Ble_Un = new OpCode()
-	opcode(Ble_Un, CEE_BLE_UN , "ble.un"   , 0xFFFFFF43, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Transfers control to a target instruction if the first value is less than the second value,
-     * when comparing unsigned integer values or unordered float values.
-     */
-    final val Blt_Un = new OpCode()
-	opcode(Blt_Un, CEE_BLT_UN , "blt.un", 0xFFFFFF44, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
-    /**
-     * Implements a jump table.
-     */
-    final val Switch = new OpCode()
-	opcode(Switch, CEE_SWITCH , "switch", 0xFFFFFF45, POP_I   , PUSH_NONE, INLINE_SWITCH, FLOW_COND_BRANCH)
-
-    /**
-     * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_I1 = new OpCode()
-	opcode(Ldind_I1, CEE_LDIND_I1 , "ldind.i1" , 0xFFFFFF46, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_I2 = new OpCode()
-	opcode(Ldind_I2, CEE_LDIND_I2 , "ldind.i2" , 0xFFFFFF48, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_I4 = new OpCode()
-	opcode(Ldind_I4, CEE_LDIND_I4 , "ldind.i4" , 0xFFFFFF4A, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
-     */
-    final val Ldind_I8 = new OpCode()
-	opcode(Ldind_I8, CEE_LDIND_I8 , "ldind.i8" , 0xFFFFFF4C, POP_I   , PUSH_I8 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
-     */
-    final val Ldind_I = new OpCode()
-	opcode(Ldind_I, CEE_LDIND_I  , "ldind.i"  , 0xFFFFFF4D, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
-     */
-    final val Ldind_R4 = new OpCode()
-	opcode(Ldind_R4, CEE_LDIND_R4 , "ldind.r4" , 0xFFFFFF4E, POP_I   , PUSH_R4 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
-     */
-    final val Ldind_R8 = new OpCode()
-	opcode(Ldind_R8, CEE_LDIND_R8 , "ldind.r8" , 0xFFFFFF4F, POP_I   , PUSH_R8 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
-     */
-    final val Ldind_Ref = new OpCode()
-	opcode(Ldind_Ref, CEE_LDIND_REF, "ldind.ref", 0xFFFFFF50, POP_I   , PUSH_REF, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_U1 = new OpCode()
-	opcode(Ldind_U1, CEE_LDIND_U1 , "ldind.u1" , 0xFFFFFF47, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_U2 = new OpCode()
-	opcode(Ldind_U2, CEE_LDIND_U2 , "ldind.u2" , 0xFFFFFF49, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
-     */
-    final val Ldind_U4 = new OpCode()
-	opcode(Ldind_U4, CEE_LDIND_U4 , "ldind.u4" , 0xFFFFFF4B, POP_I   , PUSH_I  , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a object reference value at a supplied address.
-     */
-    final val Stind_Ref = new OpCode()
-	opcode(Stind_Ref, CEE_STIND_REF, "stind.ref", 0xFFFFFF51, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type int8 at a supplied address.
-     */
-    final val Stind_I1 = new OpCode()
-	opcode(Stind_I1, CEE_STIND_I1 , "stind.i1", 0xFFFFFF52, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type int16 at a supplied address.
-     */
-    final val Stind_I2 = new OpCode()
-	opcode(Stind_I2, CEE_STIND_I2 , "stind.i2", 0xFFFFFF53, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type int32 at a supplied address.
-     */
-    final val Stind_I4 = new OpCode()
-	opcode(Stind_I4, CEE_STIND_I4 , "stind.i4", 0xFFFFFF54, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type int64 at a supplied address.
-     */
-    final val Stind_I8 = new OpCode()
-	opcode(Stind_I8, CEE_STIND_I8 , "stind.i8", 0xFFFFFF55, POP_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type float32 at a supplied address.
-     */
-    final val Stind_R4 = new OpCode()
-	opcode(Stind_R4, CEE_STIND_R4 , "stind.r4", 0xFFFFFF56, POP_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Stores a value of type float64 at a supplied address.
-     */
-    final val Stind_R8 = new OpCode()
-	opcode(Stind_R8, CEE_STIND_R8 , "stind.r8", 0xFFFFFF57, POP_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Subtracts one value from another and pushes the result onto the evaluation stack.
-     */
-    final val Sub = new OpCode()
-	opcode(Sub, CEE_SUB, "sub"    , 0xFFFFFF59, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Multiplies two values and pushes the result on the evaluation stack.
-     */
-    final val Mul = new OpCode()
-	opcode(Mul, CEE_MUL, "mul"    , 0xFFFFFF5A, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Divides two values and pushes the result as a floating-point (type F) or
-     * quotient (type int32) onto the evaluation stack.
-     */
-    final val Div = new OpCode()
-	opcode(Div, CEE_DIV, "div"    , 0xFFFFFF5B, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
-     */
-    final val Div_Un = new OpCode()
-	opcode(Div_Un, CEE_DIV_UN, "div.un" , 0xFFFFFF5C, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Divides two values and pushes the remainder onto the evaluation stack.
-     */
-    final val Rem = new OpCode()
-	opcode(Rem, CEE_REM   , "rem"    , 0xFFFFFF5D, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Divides two unsigned values and pushes the remainder onto the evaluation stack.
-     */
-    final val Rem_Un = new OpCode()
-	opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
-     */
-    final val And = new OpCode()
-	opcode(And, CEE_AND, "and"    , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Compute the bitwise complement of the two integer values on top of the stack and
-     * pushes the result onto the evaluation stack.
-     */
-    final val Or = new OpCode()
-	opcode(Or, CEE_OR , "or"     , 0xFFFFFF60, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Computes the bitwise XOR of the top two values on the evaluation stack,
-     * pushing the result onto the evaluation stack.
-     */
-    final val Xor = new OpCode()
-	opcode(Xor, CEE_XOR, "xor"    , 0xFFFFFF61, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Shifts an integer value to the left (in zeroes) by a specified number of bits,
-     *  pushing the result onto the evaluation stack.
-     */
-    final val Shl = new OpCode()
-	opcode(Shl, CEE_SHL, "shl"    , 0xFFFFFF62, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Shifts an integer value (in sign) to the right by a specified number of bits,
-     * pushing the result onto the evaluation stack.
-     */
-    final val Shr = new OpCode()
-	opcode(Shr, CEE_SHR, "shr"    , 0xFFFFFF63, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
-     * pushing the result onto the evaluation stack.
-     */
-    final val Shr_Un = new OpCode()
-	opcode(Shr_Un, CEE_SHR_UN, "shr.un" , 0xFFFFFF64, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Negates a value and pushes the result onto the evaluation stack.
-     */
-    final val Neg = new OpCode()
-	opcode(Neg, CEE_NEG , "neg"    , 0xFFFFFF65, POP_1  , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Computes the bitwise complement of the integer value on top of the stack and pushes
-     * the result onto the evaluation stack as the same type.
-     */
-    final val Not = new OpCode()
-	opcode(Not, CEE_NOT , "not"    , 0xFFFFFF66, POP_1  , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
-     */
-    final val Conv_I1 = new OpCode()
-	opcode(Conv_I1, CEE_CONV_I1, "conv.i1", 0xFFFFFF67, POP_1  , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
-     */
-    final val Conv_I2 = new OpCode()
-	opcode(Conv_I2, CEE_CONV_I2, "conv.i2", 0xFFFFFF68, POP_1  , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to int32.
-     */
-    final val Conv_I4 = new OpCode()
-	opcode(Conv_I4, CEE_CONV_I4, "conv.i4", 0xFFFFFF69, POP_1  , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to int64.
-     */
-    final val Conv_I8 = new OpCode()
-	opcode(Conv_I8, CEE_CONV_I8, "conv.i8", 0xFFFFFF6A, POP_1  , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to float32.
-     */
-    final val Conv_R4 = new OpCode()
-	opcode(Conv_R4, CEE_CONV_R4, "conv.r4", 0xFFFFFF6B, POP_1  , PUSH_R4, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to float64.
-     */
-    final val Conv_R8 = new OpCode()
-	opcode(Conv_R8, CEE_CONV_R8, "conv.r8", 0xFFFFFF6C, POP_1  , PUSH_R8, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
-     */
-    final val Conv_U4 = new OpCode()
-	opcode(Conv_U4, CEE_CONV_U4, "conv.u4", 0xFFFFFF6D, POP_1  , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
-     */
-    final val Conv_U8 = new OpCode()
-	opcode(Conv_U8, CEE_CONV_U8, "conv.u8", 0xFFFFFF6E, POP_1  , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
-     */
-    final val Callvirt = new OpCode()
-	opcode(Callvirt, CEE_CALLVIRT, "callvirt", 0xFFFFFF6F,POP_SPECIAL,PUSH_SPECIAL,INLINE_METHOD,FLOW_CALL)
-
-    /**
-     * Copies the value type located at the address of an object (type &, * or natural int)
-     * to the address of the destination object (type &, * or natural int).
-     */
-    final val Cpobj = new OpCode()
-	opcode(Cpobj, CEE_CPOBJ , "cpobj" , 0xFFFFFF70, POP_I_I , PUSH_NONE, INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Copies the value type object pointed to by an address to the top of the evaluation stack.
-     */
-    final val Ldobj = new OpCode()
-	opcode(Ldobj, CEE_LDOBJ , "ldobj" , 0xFFFFFF71, POP_I    , PUSH_1   , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Pushes a new object reference to a string literal stored in the metadata.
-     */
-    final val Ldstr = new OpCode()
-	opcode(Ldstr, CEE_LDSTR , "ldstr" , 0xFFFFFF72, POP_NONE  , PUSH_REF , INLINE_STRING, FLOW_NEXT)
-
-    /**
-     * Creates a new object or a new instance of a value type, pushing an object reference
-     * (type O) onto the evaluation stack.
-     */
-    final val Newobj = new OpCode()
-	opcode(Newobj, CEE_NEWOBJ, "newobj", 0xFFFFFF73, POP_SPECIAL , PUSH_REF , INLINE_METHOD, FLOW_CALL)
-
-    /**
-     * Attempts to cast an object passed by reference to the specified class.
-     */
-    final val Castclass = new OpCode()
-	opcode(Castclass, CEE_CASTCLASS, "castclass", 0xFFFFFF74, POP_REF  , PUSH_REF , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Tests whether an object reference (type O) is an instance of a particular class.
-     */
-    final val Isinst = new OpCode()
-	opcode(Isinst, CEE_ISINST   , "isinst"   , 0xFFFFFF75, POP_REF  , PUSH_I   , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     *  Converts the unsigned integer value on top of the evaluation stack to float32.
-     */
-    final val Conv_R_Un = new OpCode()
-	opcode(Conv_R_Un, CEE_CONV_R_UN, "conv.r.un", 0xFFFFFF76, POP_1    , PUSH_R8  , INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Converts the boxed representation of a value type to its unboxed form.
-     */
-    final val Unbox = new OpCode()
-	opcode(Unbox, CEE_UNBOX  , "unbox"  , 0xFFFFFF79, POP_REF  , PUSH_I   , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Throws the exception object currently on the evaluation stack.
-     */
-    final val Throw = new OpCode()
-	opcode(Throw, CEE_THROW  , "throw"  , 0xFFFFFF7A, POP_REF  , PUSH_NONE, INLINE_NONE  , FLOW_THROW)
-
-    /**
-     *  Finds the value of a field in the object whose reference is currently
-     * on the evaluation stack.
-     */
-    final val Ldfld = new OpCode()
-	opcode(Ldfld, CEE_LDFLD  , "ldfld"  , 0xFFFFFF7B, POP_REF  , PUSH_1   , INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     *  Finds the address of a field in the object whose reference is currently
-     * on the evaluation stack.
-     */
-    final val Ldflda = new OpCode()
-	opcode(Ldflda, CEE_LDFLDA , "ldflda" , 0xFFFFFF7C, POP_REF  , PUSH_I   , INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     * Pushes the value of a static field onto the evaluation stack.
-     */
-    final val Ldsfld = new OpCode()
-	opcode(Ldsfld, CEE_LDSFLD , "ldsfld" , 0xFFFFFF7E, POP_NONE , PUSH_1   , INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     * Pushes the address of a static field onto the evaluation stack.
-     */
-    final val Ldsflda = new OpCode()
-	opcode(Ldsflda, CEE_LDSFLDA, "ldsflda", 0xFFFFFF7F, POP_NONE , PUSH_I   , INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     *  Replaces the value stored in the field of an object reference or pointer with a new value.
-     */
-    final val Stfld = new OpCode()
-	opcode(Stfld, CEE_STFLD  , "stfld"  , 0xFFFFFF7D, POP_REF_1, PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     * Replaces the value of a static field with a value from the evaluation stack.
-     */
-    final val Stsfld = new OpCode()
-	opcode(Stsfld, CEE_STSFLD , "stsfld" , 0xFFFFFF80, POP_1    , PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
-    /**
-     * Copies a value of a specified type from the evaluation stack into a supplied memory address.
-     */
-    final val Stobj = new OpCode()
-	opcode(Stobj, CEE_STOBJ  , "stobj"  , 0xFFFFFF81, POP_I_1, PUSH_NONE, INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I1_Un = new OpCode()
-	opcode(Conv_Ovf_I1_Un, CEE_CONV_OVF_I1_UN, "conv.ovf.i1.un", 0xFFFFFF82, POP_1,PUSH_I,INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Converts the unsigned value on top of the evaluation stack to signed int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I2_Un = new OpCode()
-	opcode(Conv_Ovf_I2_Un, CEE_CONV_OVF_I2_UN, "conv.ovf.i2.un", 0xFFFFFF83,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int32,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I4_Un = new OpCode()
-	opcode(Conv_Ovf_I4_Un, CEE_CONV_OVF_I4_UN, "conv.ovf.i4.un", 0xFFFFFF84,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int64,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I8_Un = new OpCode()
-	opcode(Conv_Ovf_I8_Un, CEE_CONV_OVF_I8_UN, "conv.ovf.i8.un", 0xFFFFFF85,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed natural int,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I_Un = new OpCode()
-	opcode(Conv_Ovf_I_Un, CEE_CONV_OVF_I_UN , "conv.ovf.i.un" , 0xFFFFFF8A,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U1_Un = new OpCode()
-	opcode(Conv_Ovf_U1_Un, CEE_CONV_OVF_U1_UN, "conv.ovf.u1.un", 0xFFFFFF86,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U2_Un = new OpCode()
-	opcode(Conv_Ovf_U2_Un, CEE_CONV_OVF_U2_UN, "conv.ovf.u2.un", 0xFFFFFF87,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int32,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U4_Un = new OpCode()
-	opcode(Conv_Ovf_U4_Un, CEE_CONV_OVF_U4_UN, "conv.ovf.u4.un", 0xFFFFFF88,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int64,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U8_Un = new OpCode()
-	opcode(Conv_Ovf_U8_Un, CEE_CONV_OVF_U8_UN, "conv.ovf.u8.un", 0xFFFFFF89,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U_Un = new OpCode()
-	opcode(Conv_Ovf_U_Un, CEE_CONV_OVF_U_UN , "conv.ovf.u.un" , 0xFFFFFF8B,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts a value type to an object reference (type O).
-     */
-    final val Box = new OpCode()
-	opcode(Box, CEE_BOX       , "box"       , 0xFFFFFF8C, POP_1  , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
-    /**
-     * Pushes an object reference to a new zero-based, one-dimensional array whose elements
-     * are of a specific type onto the evaluation stack.
-     */
-    final val Newarr = new OpCode()
-	opcode(Newarr, CEE_NEWARR, "newarr"    , 0xFFFFFF8D, POP_I  , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
-    /**
-     * Pushes the number of elements of a zero-based, one-dimensional array
-     * onto the evaluation stack.
-     */
-    final val Ldlen = new OpCode()
-	opcode(Ldlen, CEE_LDLEN, "ldlen", 0xFFFFFF8E, POP_REF, PUSH_I,INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the address of the array element at a specified array index onto
-     * the top of the evaluation stack as type & (managed pointer).
-     */
-    final val Ldelema = new OpCode()
-	opcode(Ldelema, CEE_LDELEMA, "ldelema"   , 0xFFFFFF8F, POP_REF_I, PUSH_I, INLINE_TYPE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type natural int at a specified array index onto the top
-     * of the evaluation stack as a natural int.
-     */
-    final val Ldelem_I = new OpCode()
-	opcode(Ldelem_I, CEE_LDELEM_I, "ldelem.i"  , 0xFFFFFF97, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type int8 at a specified array index onto the top of the
-     * evaluation stack as an int32.
-     */
-    final val Ldelem_I1 = new OpCode()
-	opcode(Ldelem_I1, CEE_LDELEM_I1, "ldelem.i1" , 0xFFFFFF90, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type int16 at a specified array index onto the top of
-     * the evaluation stack as an int32.
-     */
-    final val Ldelem_I2 = new OpCode()
-	opcode(Ldelem_I2, CEE_LDELEM_I2, "ldelem.i2" , 0xFFFFFF92, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     *  Loads the element with type int32 at a specified array index onto the top of the
-     * evaluation stack as an int32.
-     */
-    final val Ldelem_I4 = new OpCode()
-	opcode(Ldelem_I4, CEE_LDELEM_I4, "ldelem.i4" , 0xFFFFFF94, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     *  Loads the element with type int64 at a specified array index onto the top of the
-     * evaluation stack as an int64.
-     */
-    final val Ldelem_I8 = new OpCode()
-	opcode(Ldelem_I8, CEE_LDELEM_I8, "ldelem.i8" , 0xFFFFFF96, POP_REF_I, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type float32 at a specified array index onto the top of the
-     * evaluation stack as type F (float)
-     */
-    final val Ldelem_R4 = new OpCode()
-	opcode(Ldelem_R4, CEE_LDELEM_R4, "ldelem.r4" , 0xFFFFFF98, POP_REF_I, PUSH_R4, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type float64 at a specified array index onto the top of the
-     * evaluation stack as type F (float) .
-     */
-    final val Ldelem_R8 = new OpCode()
-	opcode(Ldelem_R8, CEE_LDELEM_R8, "ldelem.r8" , 0xFFFFFF99, POP_REF_I, PUSH_R8, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element containing an object reference at a specified array index onto
-     * the top of the evaluation stack as type O (object reference).
-     */
-    final val Ldelem_Ref = new OpCode()
-	opcode(Ldelem_Ref, CEE_LDELEM_REF, "ldelem.ref", 0xFFFFFF9A, POP_REF_I, PUSH_REF, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type unsigned int8 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-    final val Ldelem_U1 = new OpCode()
-	opcode(Ldelem_U1, CEE_LDELEM_U1, "ldelem.u1" , 0xFFFFFF91, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type unsigned int16 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-    final val Ldelem_U2 = new OpCode()
-	opcode(Ldelem_U2, CEE_LDELEM_U2, "ldelem.u2" , 0xFFFFFF93, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Loads the element with type unsigned int32 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-    final val Ldelem_U4 = new OpCode()
-	opcode(Ldelem_U4, CEE_LDELEM_U4, "ldelem.u4" , 0xFFFFFF95, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     *  Replaces the array element at a given index with the natural int value on
-     * the evaluation stack.
-     */
-    final val Stelem_I = new OpCode()
-	opcode(Stelem_I, CEE_STELEM_I, "stelem.i", 0xFFFFFF9B, POP_REF_I_I, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Replaces the array element at a given index with the int8 value on the evaluation stack.
-     */
-    final val Stelem_I1 = new OpCode()
-	opcode(Stelem_I1, CEE_STELEM_I1, "stelem.i1", 0xFFFFFF9C, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Replaces the array element at a given index with the int16 value on the evaluation stack.
-     */
-    final val Stelem_I2 = new OpCode()
-	opcode(Stelem_I2, CEE_STELEM_I2, "stelem.i2", 0xFFFFFF9D, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Replaces the array element at a given index with the int32 value on the evaluation stack.
-     */
-    final val Stelem_I4 = new OpCode()
-	opcode(Stelem_I4, CEE_STELEM_I4, "stelem.i4", 0xFFFFFF9E, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Replaces the array element at a given index with the int64 value on the evaluation stack.
-     */
-    final val Stelem_I8 = new OpCode()
-	opcode(Stelem_I8, CEE_STELEM_I8,"stelem.i8", 0xFFFFFF9F, POP_REF_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Replaces the array element at a given index with the float32 value on the evaluation stack.
-     */
-    final val Stelem_R4 = new OpCode()
-	opcode(Stelem_R4, CEE_STELEM_R4,"stelem.r4", 0xFFFFFFA0, POP_REF_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Replaces the array element at a given index with the float64 value on the evaluation stack.
-     */
-    final val Stelem_R8 = new OpCode()
-	opcode(Stelem_R8, CEE_STELEM_R8,"stelem.r8", 0xFFFFFFA1, POP_REF_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Replaces the array element at a given index with the object ref value (type O)
-     * on the evaluation stack.
-     */
-    final val Stelem_Ref = new OpCode()
-	opcode(Stelem_Ref, CEE_STELEM_REF,"stelem.ref",0xFFFFFFA2,POP_REF_I_REF,PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I1 = new OpCode()
-	opcode(Conv_Ovf_I1, CEE_CONV_OVF_I1, "conv.ovf.i1", 0xFFFFFFB3, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int16 and
-     * extending it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I2 = new OpCode()
-	opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int32,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I4 = new OpCode()
-	opcode(Conv_Ovf_I4, CEE_CONV_OVF_I4, "conv.ovf.i4", 0xFFFFFFB7, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int64,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I8 = new OpCode()
-	opcode(Conv_Ovf_I8, CEE_CONV_OVF_I8, "conv.ovf.i8", 0xFFFFFFB9, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U1 = new OpCode()
-	opcode(Conv_Ovf_U1, CEE_CONV_OVF_U1, "conv.ovf.u1", 0xFFFFFFB4, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U2 = new OpCode()
-	opcode(Conv_Ovf_U2, CEE_CONV_OVF_U2, "conv.ovf.u2", 0xFFFFFFB6, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     *  Converts the signed value on top of the evaluation stack to unsigned int32,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U4 = new OpCode()
-	opcode(Conv_Ovf_U4, CEE_CONV_OVF_U4, "conv.ovf.u4", 0xFFFFFFB8, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int64,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U8 = new OpCode()
-	opcode(Conv_Ovf_U8, CEE_CONV_OVF_U8, "conv.ovf.u8", 0xFFFFFFBA, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
-    /**
-     *  Retrieves the address (type &) embedded in a typed reference.
-     */
-    final val Refanyval = new OpCode()
-	opcode(Refanyval, CEE_REFANYVAL, "refanyval", 0xFFFFFFC2, POP_1, PUSH_I   , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Retrieves the type token embedded in a typed reference .
-     */
-    final val Refanytype = new OpCode()
-	opcode(Refanytype, CEE_REFANYTYPE, "refanytype", 0xFFFFFE1D, POP_1    , PUSH_I   , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Throws ArithmeticException if value is not a finite number.
-     */
-    final val Ckfinite = new OpCode()
-	opcode(Ckfinite, CEE_CKFINITE, "ckfinite" , 0xFFFFFFC3, POP_1, PUSH_R8  , INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
-     */
-    final val Mkrefany = new OpCode()
-	opcode(Mkrefany, CEE_MKREFANY, "mkrefany" , 0xFFFFFFC6, POP_I, PUSH_1   , INLINE_TYPE  , FLOW_NEXT)
-
-    /**
-     * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
-     */
-    final val Ldtoken = new OpCode()
-	opcode(Ldtoken, CEE_LDTOKEN    , "ldtoken"   , 0xFFFFFFD0, POP_NONE, PUSH_I, INLINE_TOKEN , FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
-     */
-    final val Conv_U1 = new OpCode()
-	opcode(Conv_U1, CEE_CONV_U1    , "conv.u1"   , 0xFFFFFFD2, POP_1, PUSH_I, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
-     */
-    final val Conv_U2 = new OpCode()
-	opcode(Conv_U2, CEE_CONV_U2    , "conv.u2"   , 0xFFFFFFD1, POP_1, PUSH_I, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Converts the value on top of the evaluation stack to natural int.
-     */
-    final val Conv_I = new OpCode()
-	opcode(Conv_I, CEE_CONV_I     , "conv.i"    , 0xFFFFFFD3, POP_1, PUSH_I, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed natural int,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_I = new OpCode()
-	opcode(Conv_Ovf_I, CEE_CONV_OVF_I , "conv.ovf.i", 0xFFFFFFD4, POP_1, PUSH_I, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned natural int,
-     * throwing OverflowException on overflow.
-     */
-    final val Conv_Ovf_U = new OpCode()
-	opcode(Conv_Ovf_U, CEE_CONV_OVF_U , "conv.ovf.u", 0xFFFFFFD5, POP_1, PUSH_I, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Adds two integers, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-    final val Add_Ovf = new OpCode()
-	opcode(Add_Ovf, CEE_ADD_OVF    , "add.ovf"   , 0xFFFFFFD6, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     *  Adds two unsigned integer values, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-    final val Add_Ovf_Un = new OpCode()
-	opcode(Add_Ovf_Un, CEE_ADD_OVF_UN , "add.ovf.un", 0xFFFFFFD7, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Multiplies two integer values, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-    final val Mul_Ovf = new OpCode()
-	opcode(Mul_Ovf, CEE_MUL_OVF    , "mul.ovf"   , 0xFFFFFFD8, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Multiplies two unsigned integer values , performs an overflow check ,
-     * and pushes the result onto the evaluation stack.
-     */
-    final val Mul_Ovf_Un = new OpCode()
-	opcode(Mul_Ovf_Un, CEE_MUL_OVF_UN , "mul.ovf.un", 0xFFFFFFD9, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Subtracts one integer value from another, performs an overflow check,
-     * and pushes the result onto the evaluation stack.
-     */
-    final val Sub_Ovf = new OpCode()
-	opcode(Sub_Ovf, CEE_SUB_OVF   , "sub.ovf"   , 0xFFFFFFDA, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Subtracts one unsigned integer value from another, performs an overflow check,
-     * and pushes the result onto the evaluation stack.
-     */
-    final val Sub_Ovf_Un = new OpCode()
-	opcode(Sub_Ovf_Un, CEE_SUB_OVF_UN, "sub.ovf.un", 0xFFFFFFDB, POP_1_1, PUSH_1, INLINE_NONE  , FLOW_NEXT)
-
-    /**
-     * Transfers control from the fault or finally clause of an exception block back to
-     * the Common Language Infrastructure (CLI) exception handler.
-     */
-    final val Endfinally = new OpCode()
-	opcode(Endfinally, CEE_ENDFINALLY, "endfinally", 0xFFFFFFDC, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
-    /**
-     * Exits a protected region of code, unconditionally tranferring control
-     * to a specific target instruction.
-     */
-    final val Leave = new OpCode()
-	opcode(Leave, CEE_LEAVE, "leave", 0xFFFFFFDD, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
-    /**
-     * Exits a protected region of code, unconditionally tranferring control
-     * to a target instruction (short form).
-     */
-    final val Leave_S = new OpCode()
-	opcode(Leave_S, CEE_LEAVE_S, "leave.s", 0xFFFFFFDE, POP_NONE, PUSH_NONE, INLINE_TARGET_S, FLOW_BRANCH)
-
-    /**
-     * Stores a value of type natural int at a supplied address.
-     */
-    final val Stind_I = new OpCode()
-	opcode(Stind_I, CEE_STIND_I, "stind.i", 0xFFFFFFDF, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Converts the value on top of the evaluation stack to unsigned natural int,
-     * and extends it to natural int.
-     */
-    final val Conv_U = new OpCode()
-	opcode(Conv_U, CEE_CONV_U, "conv.u", 0xFFFFFFE0, POP_1   , PUSH_I   , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Returns an unmanaged pointer to the argument list of the current method.
-     */
-    final val Arglist = new OpCode()
-	opcode(Arglist, CEE_ARGLIST, "arglist"  , 0xFFFFFE00, POP_NONE, PUSH_I   , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Compares two values. If they are equal, the integer value 1 (int32) is pushed
-     * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-    final val Ceq = new OpCode()
-	opcode(Ceq, CEE_CEQ, "ceq", 0xFFFFFE01, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Compares two values. If the first value is greater than the second,
-     * the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-    final val Cgt = new OpCode()
-	opcode(Cgt, CEE_CGT, "cgt", 0xFFFFFE02, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Compares two unsigned or unordered values. If the first value is greater than
-     * the second, the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-    final val Cgt_Un = new OpCode()
-	opcode(Cgt_Un, CEE_CGT_UN, "cgt.un", 0xFFFFFE03, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Compares two values. If the first value is less than the second,
-     * the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-    final val Clt = new OpCode()
-	opcode(Clt, CEE_CLT, "clt"      , 0xFFFFFE04, POP_1_1 , PUSH_I   , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     *  Compares the unsigned or unordered values value1 and value2. If value1 is
-     * less than value2, then the integer value 1 (int32) is pushed onto the
-     * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-    final val Clt_Un = new OpCode()
-	opcode(Clt_Un, CEE_CLT_UN , "clt.un"   , 0xFFFFFE05, POP_1_1 , PUSH_I   , INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Pushes an unmanaged pointer (type natural int) to the native code implementing
-     * a specific method onto the evaluation stack.
-     */
-    final val Ldftn = new OpCode()
-	opcode(Ldftn, CEE_LDFTN , "ldftn"    , 0xFFFFFE06, POP_NONE, PUSH_I   , INLINE_METHOD, FLOW_NEXT)
-
-    /**
-     * Pushes an unmanaged pointer (type natural int) to the native code implementing
-     * a particular virtual method associated with a specified object onto the evaluation stack.
-     */
-    final val Ldvirtftn = new OpCode()
-	opcode(Ldvirtftn, CEE_LDVIRTFTN, "ldvirtftn", 0xFFFFFE07, POP_REF , PUSH_I   , INLINE_METHOD, FLOW_NEXT)
-
-    /**
-     * Loads an argument (referenced by a specified index value) onto the stack.
-     */
-    final val Ldarg = new OpCode()
-	opcode(Ldarg, CEE_LDARG , "ldarg"    , 0xFFFFFE09, POP_NONE, PUSH_1   , INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     * Load an argument address onto the evaluation stack.
-     */
-    final val Ldarga = new OpCode()
-	opcode(Ldarga, CEE_LDARGA , "ldarga", 0xFFFFFE0A, POP_NONE, PUSH_I, INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     * Loads the local variable at a specific index onto the evaluation stack.
-     */
-    final val Ldloc = new OpCode()
-	opcode(Ldloc, CEE_LDLOC, "ldloc", 0xFFFFFE0C, POP_NONE, PUSH_1   , INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     *  Loads the address of the local variable at a specific index onto the evaluation stack.
-     */
-    final val Ldloca = new OpCode()
-	opcode(Ldloca, CEE_LDLOCA, "ldloca", 0xFFFFFE0D, POP_NONE, PUSH_I, INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     *  Stores the value on top of the evaluation stack in the argument slot at a specified index.
-     */
-    final val Starg = new OpCode()
-	opcode(Starg, CEE_STARG, "starg", 0xFFFFFE0B, POP_1   , PUSH_NONE, INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     * Pops the current value from the top of the evaluation stack and stores it in a
-     * the local variable list at a specified index.
-     */
-    final val Stloc = new OpCode()
-	opcode(Stloc, CEE_STLOC, "stloc", 0xFFFFFE0E, POP_1   , PUSH_NONE, INLINE_VARIABLE  , FLOW_NEXT)
-
-    /**
-     * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
-     * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
-     */
-    final val Localloc = new OpCode()
-	opcode(Localloc, CEE_LOCALLOC, "localloc"  , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Transfers control from the filter clause of an exception back to the
-     * Common Language Infrastructure (CLI) exception handler.
-     */
-    final val Endfilter = new OpCode()
-	opcode(Endfilter, CEE_ENDFILTER, "endfilter" , 0xFFFFFE11, POP_I   , PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
-    /**
-     * Indicates that an address currently atop the evaluation stack might not be aligned
-     * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
-     * stobj, initblk, or cpblk instruction.
-     */
-    final val Unaligned = new OpCode()
-	opcode(Unaligned, CEE_UNALIGNED, "unaligned.", 0xFFFFFE12, POP_NONE, PUSH_NONE, INLINE_I_S , FLOW_META)
-
-    /**
-     * Specifies that an address currently atop the evaluation stack might be volatile,
-     * and the results of reading that location cannot be cached or that multiple stores
-     * to that location cannot be suppressed.
-     */
-    final val Volatile = new OpCode()
-	opcode(Volatile, CEE_VOLATILE, "volatile." , 0xFFFFFE13, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
-    /**
-     * Performs a postfixed method call instruction such that the current method's stack
-     * frame is removed before the actual call instruction is executed.
-     */
-    final val Tailcall = new OpCode()
-	opcode(Tailcall, CEE_TAILCALL, "tail."     , 0xFFFFFE14, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
-    /**
-     * Initializes all the fields of the object at a specific address to a null reference
-     * or a 0 of the appropriate primitive type.
-     */
-    final val Initobj = new OpCode()
-	opcode(Initobj, CEE_INITOBJ , "initobj"   , 0xFFFFFE15, POP_I   , PUSH_NONE, INLINE_TYPE, FLOW_NEXT)
-
-    /**
-     * Copies a specified number bytes from a source address to a destination address .
-     */
-    final val Cpblk = new OpCode()
-	opcode(Cpblk, CEE_CPBLK , "cpblk"     , 0xFFFFFE17, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Initializes a specified block of memory at a specific address to a given size
-     * and initial value.
-     */
-    final val Initblk = new OpCode()
-	opcode(Initblk, CEE_INITBLK , "initblk"   , 0xFFFFFE18, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
-    /**
-     * Rethrows the current exception.
-     */
-    final val Rethrow = new OpCode()
-	opcode(Rethrow, CEE_RETHROW , "rethrow", 0xFFFFFE1A, POP_NONE , PUSH_NONE, INLINE_NONE, FLOW_THROW)
-
-    /**
-     * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
-     */
-    final val Sizeof = new OpCode()
-	opcode(Sizeof, CEE_SIZEOF, "sizeof", 0xFFFFFE1C, POP_NONE , PUSH_I   , INLINE_TYPE, FLOW_NEXT)
-
-
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
deleted file mode 100644
index 80e4267..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ /dev/null
@@ -1,1205 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-
-/**
- * Provides field representations of the Microsoft Intermediate Language (MSIL)
- * instructions for emission by the ILGenerator class members (such as Emit).
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-object OpCodes {
-
-    //##########################################################################
-
-    /**
-     * Adds two values and pushes the result onto the evaluation stack.
-     */
-     final val Add = OpCode.Add
-
-    /**
-     * Fills space if bytecodes are patched. No meaningful operation is performed
-     * although a processing cycle can be consumed.
-     */
-     final val Nop = OpCode.Nop
-
-    /**
-     * Signals the Common Language Infrastructure (CLI) to inform the debugger that
-     * a break point has been tripped.
-     */
-     final val Break = OpCode.Break
-
-    /**
-     * Loads the argument at index 0 onto the evaluation stack.
-     */
-     final val Ldarg_0 = OpCode.Ldarg_0
-
-    /**
-     * Loads the argument at index 1 onto the evaluation stack.
-     */
-     final val Ldarg_1 = OpCode.Ldarg_1
-
-    /**
-     * Loads the argument at index 2 onto the evaluation stack.
-     */
-     final val Ldarg_2 = OpCode.Ldarg_2
-
-    /**
-     * Loads the argument at index 3 onto the evaluation stack.
-     */
-     final val Ldarg_3 = OpCode.Ldarg_3
-
-    /**
-     * Loads the local variable at index 0 onto the evaluation stack.
-     */
-     final val Ldloc_0 = OpCode.Ldloc_0
-
-    /**
-     * Loads the local variable at index 1 onto the evaluation stack.
-     */
-     final val Ldloc_1 = OpCode.Ldloc_1
-
-    /**
-     * Loads the local variable at index 2 onto the evaluation stack.
-     */
-     final val Ldloc_2 = OpCode.Ldloc_2
-
-    /**
-     * Loads the local variable at index 3 onto the evaluation stack.
-     */
-     final val Ldloc_3 = OpCode.Ldloc_3
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 0.
-     */
-     final val Stloc_0 = OpCode.Stloc_0
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 1.
-     */
-     final val Stloc_1 = OpCode.Stloc_1
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 2.
-     */
-     final val Stloc_2 = OpCode.Stloc_2
-
-    /**
-     * Pops the current value from the top of the evaluation stack and
-     * stores it in a the local variable list at index 3.
-     */
-     final val Stloc_3 = OpCode.Stloc_3
-
-    /**
-     * Loads the argument (referenced by a specified short form index)
-     * onto the evaluation stack.
-     */
-     final val Ldarg_S = OpCode.Ldarg_S
-
-    /**
-     * Load an argument address, in short form, onto the evaluation stack.
-     */
-     final val Ldarga_S = OpCode.Ldarga_S
-
-    /**
-     * Loads the local variable at a specific index onto the evaluation stack,
-     * short form.
-     */
-     final val Ldloc_S = OpCode.Ldloc_S
-
-    /**
-     * Loads the address of the local variable at a specific index onto
-     * the evaluation stack, short form.
-     */
-     final val Ldloca_S = OpCode.Ldloca_S
-
-    /**
-     * Stores the value on top of the evaluation stack in the argument slot
-     * at a specified index, short form.
-     */
-     final val Starg_S = OpCode.Starg_S
-
-    /**
-     * Pops the current value from the top of the evaluation stack and stores it
-     * in a the local variable list at index (short form).
-     */
-     final val Stloc_S = OpCode.Stloc_S
-
-    /**
-     * Pushes a null reference (type O) onto the evaluation stack.
-     */
-     final val Ldnull = OpCode.Ldnull
-
-    /**
-     * Pushes the integer value of -1 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_M1 = OpCode.Ldc_I4_M1
-
-    /**
-     * Pushes the integer value of 0 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_0 = OpCode.Ldc_I4_0
-
-    /**
-     * Pushes the integer value of 1 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_1 = OpCode.Ldc_I4_1
-
-    /**
-     * Pushes the integer value of 2 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_2 = OpCode.Ldc_I4_2
-
-    /**
-     * Pushes the integer value of 3 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_3 = OpCode.Ldc_I4_3
-
-    /**
-     * Pushes the integer value of 4 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_4 = OpCode.Ldc_I4_4
-
-    /**
-     * Pushes the integer value of 5 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_5 = OpCode.Ldc_I4_5
-
-    /**
-     * Pushes the integer value of 6 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_6 = OpCode.Ldc_I4_6
-
-    /**
-     * Pushes the integer value of 7 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_7 = OpCode.Ldc_I4_7
-
-    /**
-     * Pushes the integer value of 8 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4_8 = OpCode.Ldc_I4_8
-
-    /**
-     * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
-     */
-     final val Ldc_I4_S = OpCode.Ldc_I4_S
-
-    /**
-     * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
-     */
-     final val Ldc_I4 = OpCode.Ldc_I4
-
-    /**
-     *  Pushes a supplied value of type int64 onto the evaluation stack as an int64.
-     */
-     final val Ldc_I8 = OpCode.Ldc_I8
-
-    /**
-     * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
-     */
-     final val Ldc_R4 = OpCode.Ldc_R4
-
-    /**
-     * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
-     */
-     final val Ldc_R8 = OpCode.Ldc_R8
-
-    /**
-     * Copies the current topmost value on the evaluation stack, and then pushes the copy
-     * onto the evaluation stack.
-     */
-     final val Dup = OpCode.Dup
-
-    /**
-     * Removes the value currently on top of the evaluation stack.
-     */
-     final val Pop = OpCode.Pop
-
-    /**
-     * Exits current method and jumps to specified method.
-     */
-     final val Jmp = OpCode.Jmp
-
-    /**
-     * Calls the method indicated by the passed method descriptor.
-     */
-     final val Call = OpCode.Call
-
-    /**
-     * constrained. prefix
-     */
-     final val Constrained = OpCode.Constrained
-
-    /**
-     * readonly. prefix
-     */
-     final val Readonly = OpCode.Readonly
-
-    /**
-     * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
-     * with arguments described by a calling convention.
-     */
-     final val Calli = OpCode.Calli
-
-    /**
-     * Returns from the current method, pushing a return value (if present) from the caller's
-     * evaluation stack onto the callee's evaluation stack.
-     */
-     final val Ret = OpCode.Ret
-
-    /**
-     * Unconditionally transfers control to a target instruction (short form).
-     */
-     final val Br_S = OpCode.Br_S
-
-    /**
-     * Transfers control to a target instruction if value is false, a null reference, or zero.
-     */
-     final val Brfalse_S = OpCode.Brfalse_S
-
-    /**
-     * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
-     */
-     final val Brtrue_S = OpCode.Brtrue_S
-
-    /**
-     * Transfers control to a target instruction (short form) if two values are equal.
-     */
-     final val Beq_S = OpCode.Beq_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * or equal to the second value.
-     */
-     final val Bge_S = OpCode.Bge_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * the second value.
-     */
-     final val Bgt_S = OpCode.Bgt_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * or equal to the second value.
-     */
-     final val Ble_S = OpCode.Ble_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * the second value.
-     */
-     final val Blt_S = OpCode.Blt_S
-
-    /**
-     * Transfers control to a target instruction (short form) when two unsigned integer values
-     * or unordered float values are not equal.
-     */
-     final val Bne_Un_S = OpCode.Bne_Un_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greather
-     * than the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Bge_Un_S = OpCode.Bge_Un_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is greater than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Bgt_Un_S = OpCode.Bgt_Un_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * or equal to the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Ble_Un_S = OpCode.Ble_Un_S
-
-    /**
-     * Transfers control to a target instruction (short form) if the first value is less than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Blt_Un_S = OpCode.Blt_Un_S
-
-    /**
-     * Unconditionally transfers control to a target instruction.
-     */
-     final val Br = OpCode.Br
-
-    /**
-     * Transfers control to a target instruction if value is false, a null reference
-     * (Nothing in Visual Basic), or zero.
-     */
-     final val Brfalse = OpCode.Brfalse
-
-    /**
-     * Transfers control to a target instruction if value is true, not null, or non-zero.
-     */
-     final val Brtrue = OpCode.Brtrue
-
-    /**
-     * Transfers control to a target instruction if two values are equal.
-     */
-     final val Beq = OpCode.Beq
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than or
-     * equal to the second value.
-     */
-     final val Bge = OpCode.Bge
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than the second value.
-     */
-     final val Bgt = OpCode.Bgt
-
-    /**
-     * Transfers control to a target instruction if the first value is less than or equal
-     * to the second value.
-     */
-     final val Ble = OpCode.Ble
-
-    /**
-     *  Transfers control to a target instruction if the first value is less than the second value.
-     */
-     final val Blt = OpCode.Blt
-
-    /**
-     * Transfers control to a target instruction when two unsigned integer values or
-     * unordered float values are not equal.
-     */
-     final val Bne_Un = OpCode.Bne_Un
-
-    /**
-     * Transfers control to a target instruction if the first value is greather than
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Bge_Un = OpCode.Bge_Un
-
-    /**
-     * Transfers control to a target instruction if the first value is greater than the
-     * second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Bgt_Un = OpCode.Bgt_Un
-
-    /**
-     * Transfers control to a target instruction if the first value is less than or equal to
-     * the second value, when comparing unsigned integer values or unordered float values.
-     */
-     final val Ble_Un = OpCode.Ble_Un
-
-    /**
-     * Transfers control to a target instruction if the first value is less than the second value,
-     * when comparing unsigned integer values or unordered float values.
-     */
-     final val Blt_Un = OpCode.Blt_Un
-
-    /**
-     * Implements a jump table.
-     */
-     final val Switch = OpCode.Switch
-
-    /**
-     * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_I1 = OpCode.Ldind_I1
-
-    /**
-     *  Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_I2 = OpCode.Ldind_I2
-
-    /**
-     * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_I4 = OpCode.Ldind_I4
-
-    /**
-     * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
-     */
-     final val Ldind_I8 = OpCode.Ldind_I8
-
-    /**
-     * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
-     */
-     final val Ldind_I = OpCode.Ldind_I
-
-    /**
-     *  Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
-     */
-     final val Ldind_R4 = OpCode.Ldind_R4
-
-    /**
-     * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
-     */
-     final val Ldind_R8 = OpCode.Ldind_R8
-
-    /**
-     * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
-     */
-     final val Ldind_Ref = OpCode.Ldind_Ref
-
-    /**
-     * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_U1 = OpCode.Ldind_U1
-
-    /**
-     * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_U2 = OpCode.Ldind_U2
-
-    /**
-     * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
-     */
-     final val Ldind_U4 = OpCode.Ldind_U4
-
-    /**
-     * Stores a object reference value at a supplied address.
-     */
-     final val Stind_Ref = OpCode.Stind_Ref
-
-    /**
-     * Stores a value of type int8 at a supplied address.
-     */
-     final val Stind_I1 = OpCode.Stind_I1
-
-    /**
-     * Stores a value of type int16 at a supplied address.
-     */
-     final val Stind_I2 = OpCode.Stind_I2
-
-    /**
-     * Stores a value of type int32 at a supplied address.
-     */
-     final val Stind_I4 = OpCode.Stind_I4
-
-    /**
-     * Stores a value of type int64 at a supplied address.
-     */
-     final val Stind_I8 = OpCode.Stind_I8
-
-    /**
-     * Stores a value of type float32 at a supplied address.
-     */
-     final val Stind_R4 = OpCode.Stind_R4
-
-    /**
-     * Stores a value of type float64 at a supplied address.
-     */
-     final val Stind_R8 = OpCode.Stind_R8
-
-    /**
-     * Subtracts one value from another and pushes the result onto the evaluation stack.
-     */
-     final val Sub = OpCode.Sub
-
-    /**
-     * Multiplies two values and pushes the result on the evaluation stack.
-     */
-     final val Mul = OpCode.Mul
-
-    /**
-     * Divides two values and pushes the result as a floating-point (type F) or
-     * quotient (type int32) onto the evaluation stack.
-     */
-     final val Div = OpCode.Div
-
-    /**
-     * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
-     */
-     final val Div_Un = OpCode.Div_Un
-
-    /**
-     * Divides two values and pushes the remainder onto the evaluation stack.
-     */
-     final val Rem = OpCode.Rem
-
-    /**
-     * Divides two unsigned values and pushes the remainder onto the evaluation stack.
-     */
-     final val Rem_Un = OpCode.Rem_Un
-
-    /**
-     * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
-     */
-     final val And = OpCode.And
-
-    /**
-     * Compute the bitwise complement of the two integer values on top of the stack and
-     * pushes the result onto the evaluation stack.
-     */
-     final val Or = OpCode.Or
-
-    /**
-     * Computes the bitwise XOR of the top two values on the evaluation stack,
-     * pushing the result onto the evaluation stack.
-     */
-     final val Xor = OpCode.Xor
-
-    /**
-     * Shifts an integer value to the left (in zeroes) by a specified number of bits,
-     *  pushing the result onto the evaluation stack.
-     */
-     final val Shl = OpCode.Shl
-
-    /**
-     * Shifts an integer value (in sign) to the right by a specified number of bits,
-     * pushing the result onto the evaluation stack.
-     */
-     final val Shr = OpCode.Shr
-
-    /**
-     * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
-     * pushing the result onto the evaluation stack.
-     */
-     final val Shr_Un = OpCode.Shr_Un
-
-    /**
-     * Negates a value and pushes the result onto the evaluation stack.
-     */
-     final val Neg = OpCode.Neg
-
-    /**
-     * Computes the bitwise complement of the integer value on top of the stack and pushes
-     * the result onto the evaluation stack as the same type.
-     */
-     final val Not = OpCode.Not
-
-    /**
-     *  Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
-     */
-     final val Conv_I1 = OpCode.Conv_I1
-
-    /**
-     * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
-     */
-     final val Conv_I2 = OpCode.Conv_I2
-
-    /**
-     * Converts the value on top of the evaluation stack to int32.
-     */
-     final val Conv_I4 = OpCode.Conv_I4
-
-    /**
-     * Converts the value on top of the evaluation stack to int64.
-     */
-     final val Conv_I8 = OpCode.Conv_I8
-
-    /**
-     * Converts the value on top of the evaluation stack to float32.
-     */
-     final val Conv_R4 = OpCode.Conv_R4
-
-    /**
-     * Converts the value on top of the evaluation stack to float64.
-     */
-     final val Conv_R8 = OpCode.Conv_R8
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
-     */
-     final val Conv_U4 = OpCode.Conv_U4
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
-     */
-     final val Conv_U8 = OpCode.Conv_U8
-
-    /**
-     * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
-     */
-     final val Callvirt = OpCode.Callvirt
-
-    /**
-     * Copies the value type located at the address of an object (type &, * or natural int)
-     * to the address of the destination object (type &, * or natural int).
-     */
-     final val Cpobj = OpCode.Cpobj
-
-    /**
-     * Copies the value type object pointed to by an address to the top of the evaluation stack.
-     */
-     final val Ldobj = OpCode.Ldobj
-
-    /**
-     * Pushes a new object reference to a string literal stored in the metadata.
-     */
-     final val Ldstr = OpCode.Ldstr
-
-    /**
-     * Creates a new object or a new instance of a value type, pushing an object reference
-     * (type O) onto the evaluation stack.
-     */
-     final val Newobj = OpCode.Newobj
-
-    /**
-     * Attempts to cast an object passed by reference to the specified class.
-     */
-     final val Castclass = OpCode.Castclass
-
-    /**
-     * Tests whether an object reference (type O) is an instance of a particular class.
-     */
-     final val Isinst = OpCode.Isinst
-
-    /**
-     *  Converts the unsigned integer value on top of the evaluation stack to float32.
-     */
-     final val Conv_R_Un = OpCode.Conv_R_Un
-
-    /**
-     * Converts the boxed representation of a value type to its unboxed form.
-     */
-     final val Unbox = OpCode.Unbox
-
-    /**
-     * Throws the exception object currently on the evaluation stack.
-     */
-     final val Throw = OpCode.Throw
-
-    /**
-     *  Finds the value of a field in the object whose reference is currently
-     * on the evaluation stack.
-     */
-     final val Ldfld = OpCode.Ldfld
-
-    /**
-     *  Finds the address of a field in the object whose reference is currently
-     * on the evaluation stack.
-     */
-     final val Ldflda = OpCode.Ldflda
-
-    /**
-     * Pushes the value of a static field onto the evaluation stack.
-     */
-     final val Ldsfld = OpCode.Ldsfld
-
-    /**
-     * Pushes the address of a static field onto the evaluation stack.
-     */
-     final val Ldsflda = OpCode.Ldsflda
-
-    /**
-     *  Replaces the value stored in the field of an object reference or pointer with a new value.
-     */
-     final val Stfld = OpCode.Stfld
-
-    /**
-     * Replaces the value of a static field with a value from the evaluation stack.
-     */
-     final val Stsfld = OpCode.Stsfld
-
-    /**
-     * Copies a value of a specified type from the evaluation stack into a supplied memory address.
-     */
-     final val Stobj = OpCode.Stobj
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I1_Un = OpCode.Conv_Ovf_I1_Un
-
-    /**
-     *  Converts the unsigned value on top of the evaluation stack to signed int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I2_Un = OpCode.Conv_Ovf_I2_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int32,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I4_Un = OpCode.Conv_Ovf_I4_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed int64,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I8_Un = OpCode.Conv_Ovf_I8_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to signed natural int,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I_Un = OpCode.Conv_Ovf_I_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U1_Un = OpCode.Conv_Ovf_U1_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U2_Un = OpCode.Conv_Ovf_U2_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int32,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U4_Un = OpCode.Conv_Ovf_U4_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned int64,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U8_Un = OpCode.Conv_Ovf_U8_Un
-
-    /**
-     * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U_Un = OpCode.Conv_Ovf_U_Un
-
-    /**
-     * Converts a value type to an object reference (type O).
-     */
-     final val Box = OpCode.Box
-
-    /**
-     * Pushes an object reference to a new zero-based, one-dimensional array whose elements
-     * are of a specific type onto the evaluation stack.
-     */
-     final val Newarr = OpCode.Newarr
-
-    /**
-     * Pushes the number of elements of a zero-based, one-dimensional array
-     * onto the evaluation stack.
-     */
-     final val Ldlen = OpCode.Ldlen
-
-    /**
-     * Loads the address of the array element at a specified array index onto
-     * the top of the evaluation stack as type & (managed pointer).
-     */
-     final val Ldelema = OpCode.Ldelema
-
-    /**
-     * Loads the element with type natural int at a specified array index onto the top
-     * of the evaluation stack as a natural int.
-     */
-     final val Ldelem_I = OpCode.Ldelem_I
-
-    /**
-     * Loads the element with type int8 at a specified array index onto the top of the
-     * evaluation stack as an int32.
-     */
-     final val Ldelem_I1 = OpCode.Ldelem_I1
-
-    /**
-     * Loads the element with type int16 at a specified array index onto the top of
-     * the evaluation stack as an int32.
-     */
-     final val Ldelem_I2 = OpCode.Ldelem_I2
-
-    /**
-     *  Loads the element with type int32 at a specified array index onto the top of the
-     * evaluation stack as an int32.
-     */
-     final val Ldelem_I4 = OpCode.Ldelem_I4
-
-    /**
-     *  Loads the element with type int64 at a specified array index onto the top of the
-     * evaluation stack as an int64.
-     */
-     final val Ldelem_I8 = OpCode.Ldelem_I8
-
-    /**
-     * Loads the element with type float32 at a specified array index onto the top of the
-     * evaluation stack as type F (float)
-     */
-     final val Ldelem_R4 = OpCode.Ldelem_R4
-
-    /**
-     * Loads the element with type float64 at a specified array index onto the top of the
-     * evaluation stack as type F (float) .
-     */
-     final val Ldelem_R8 = OpCode.Ldelem_R8
-
-    /**
-     * Loads the element containing an object reference at a specified array index onto
-     * the top of the evaluation stack as type O (object reference).
-     */
-     final val Ldelem_Ref = OpCode.Ldelem_Ref
-
-    /**
-     * Loads the element with type unsigned int8 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-     final val Ldelem_U1 = OpCode.Ldelem_U1
-
-    /**
-     * Loads the element with type unsigned int16 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-     final val Ldelem_U2 = OpCode.Ldelem_U2
-
-    /**
-     * Loads the element with type unsigned int32 at a specified array index onto the top
-     * of the evaluation stack as an int32.
-     */
-     final val Ldelem_U4 = OpCode.Ldelem_U4
-
-    /**
-     *  Replaces the array element at a given index with the natural int value on
-     * the evaluation stack.
-     */
-     final val Stelem_I = OpCode.Stelem_I
-
-    /**
-     * Replaces the array element at a given index with the int8 value on the evaluation stack.
-     */
-     final val Stelem_I1 = OpCode.Stelem_I1
-
-    /**
-     *  Replaces the array element at a given index with the int16 value on the evaluation stack.
-     */
-     final val Stelem_I2 = OpCode.Stelem_I2
-
-    /**
-     *  Replaces the array element at a given index with the int32 value on the evaluation stack.
-     */
-     final val Stelem_I4 = OpCode.Stelem_I4
-
-    /**
-     * Replaces the array element at a given index with the int64 value on the evaluation stack.
-     */
-     final val Stelem_I8 = OpCode.Stelem_I8
-
-    /**
-     * Replaces the array element at a given index with the float32 value on the evaluation stack.
-     */
-     final val Stelem_R4 = OpCode.Stelem_R4
-
-    /**
-     * Replaces the array element at a given index with the float64 value on the evaluation stack.
-     */
-     final val Stelem_R8 = OpCode.Stelem_R8
-
-    /**
-     * Replaces the array element at a given index with the object ref value (type O)
-     * on the evaluation stack.
-     */
-     final val Stelem_Ref = OpCode.Stelem_Ref
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I1 = OpCode.Conv_Ovf_I1
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int16 and
-     * extending it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int32,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed int64,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I8 = OpCode.Conv_Ovf_I8
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int8 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U1 = OpCode.Conv_Ovf_U1
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int16 and
-     * extends it to int32, throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U2 = OpCode.Conv_Ovf_U2
-
-    /**
-     *  Converts the signed value on top of the evaluation stack to unsigned int32,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U4 = OpCode.Conv_Ovf_U4
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned int64,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U8 = OpCode.Conv_Ovf_U8
-
-    /**
-     *  Retrieves the address (type &) embedded in a typed reference.
-     */
-     final val Refanyval = OpCode.Refanyval
-
-    /**
-     * Retrieves the type token embedded in a typed reference .
-     */
-     final val Refanytype = OpCode.Refanytype
-
-    /**
-     * Throws ArithmeticException if value is not a finite number.
-     */
-     final val Ckfinite = OpCode.Ckfinite
-
-    /**
-     * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
-     */
-     final val Mkrefany = OpCode.Mkrefany
-
-    /**
-     * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
-     */
-     final val Ldtoken = OpCode.Ldtoken
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
-     */
-     final val Conv_U1 = OpCode.Conv_U1
-
-    /**
-     * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
-     */
-     final val Conv_U2 = OpCode.Conv_U2
-
-    /**
-     * Converts the value on top of the evaluation stack to natural int.
-     */
-     final val Conv_I = OpCode.Conv_I
-
-    /**
-     * Converts the signed value on top of the evaluation stack to signed natural int,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_I = OpCode.Conv_Ovf_I
-
-    /**
-     * Converts the signed value on top of the evaluation stack to unsigned natural int,
-     * throwing OverflowException on overflow.
-     */
-     final val Conv_Ovf_U = OpCode.Conv_Ovf_U
-
-    /**
-     * Adds two integers, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-     final val Add_Ovf = OpCode.Add_Ovf
-
-    /**
-     *  Adds two unsigned integer values, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-     final val Add_Ovf_Un = OpCode.Add_Ovf_Un
-
-    /**
-     * Multiplies two integer values, performs an overflow check, and pushes the result
-     * onto the evaluation stack.
-     */
-     final val Mul_Ovf = OpCode.Mul_Ovf
-
-    /**
-     * Multiplies two unsigned integer values , performs an overflow check ,
-     * and pushes the result onto the evaluation stack.
-     */
-     final val Mul_Ovf_Un = OpCode.Mul_Ovf_Un
-
-    /**
-     * Subtracts one integer value from another, performs an overflow check,
-     * and pushes the result onto the evaluation stack.
-     */
-     final val Sub_Ovf = OpCode.Sub_Ovf
-
-    /**
-     * Subtracts one unsigned integer value from another, performs an overflow check,
-     * and pushes the result onto the evaluation stack.
-     */
-     final val Sub_Ovf_Un = OpCode.Sub_Ovf_Un
-
-    /**
-     * Transfers control from the fault or finally clause of an exception block back to
-     * the Common Language Infrastructure (CLI) exception handler.
-     */
-     final val Endfinally = OpCode.Endfinally
-
-    /**
-     * Exits a protected region of code, unconditionally tranferring control
-     * to a specific target instruction.
-     */
-     final val Leave = OpCode.Leave
-
-    /**
-     * Exits a protected region of code, unconditionally tranferring control
-     * to a target instruction (short form).
-     */
-     final val Leave_S = OpCode.Leave_S
-
-    /**
-     * Stores a value of type natural int at a supplied address.
-     */
-     final val Stind_I = OpCode.Stind_I
-
-    /**
-     *  Converts the value on top of the evaluation stack to unsigned natural int,
-     * and extends it to natural int.
-     */
-     final val Conv_U = OpCode.Conv_U
-
-    /**
-     * Returns an unmanaged pointer to the argument list of the current method.
-     */
-     final val Arglist = OpCode.Arglist
-
-    /**
-     * Compares two values. If they are equal, the integer value 1 (int32) is pushed
-     * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-     final val Ceq = OpCode.Ceq
-
-    /**
-     * Compares two values. If the first value is greater than the second,
-     * the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-     final val Cgt = OpCode.Cgt
-
-    /**
-     *  Compares two unsigned or unordered values. If the first value is greater than
-     * the second, the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-     final val Cgt_Un = OpCode.Cgt_Un
-
-    /**
-     * Compares two values. If the first value is less than the second,
-     * the integer value 1 (int32) is pushed onto the evaluation stack
-     * otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-     final val Clt = OpCode.Clt
-
-    /**
-     *  Compares the unsigned or unordered values value1 and value2. If value1 is
-     * less than value2, then the integer value 1 (int32) is pushed onto the
-     * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
-     */
-     final val Clt_Un = OpCode.Clt_Un
-
-    /**
-     * Pushes an unmanaged pointer (type natural int) to the native code implementing
-     * a specific method onto the evaluation stack.
-     */
-     final val Ldftn = OpCode.Ldftn
-
-    /**
-     * Pushes an unmanaged pointer (type natural int) to the native code implementing
-     * a particular virtual method associated with a specified object onto the evaluation stack.
-     */
-     final val Ldvirtftn = OpCode.Ldvirtftn
-
-    /**
-     * Loads an argument (referenced by a specified index value) onto the stack.
-     */
-     final val Ldarg = OpCode.Ldarg
-
-    /**
-     * Load an argument address onto the evaluation stack.
-     */
-     final val Ldarga = OpCode.Ldarga
-
-    /**
-     * Loads the local variable at a specific index onto the evaluation stack.
-     */
-     final val Ldloc = OpCode.Ldloc
-
-    /**
-     *  Loads the address of the local variable at a specific index onto the evaluation stack.
-     */
-     final val Ldloca = OpCode.Ldloca
-
-    /**
-     *  Stores the value on top of the evaluation stack in the argument slot at a specified index.
-     */
-     final val Starg = OpCode.Starg
-
-    /**
-     * Pops the current value from the top of the evaluation stack and stores it in a
-     * the local variable list at a specified index.
-     */
-     final val Stloc = OpCode.Stloc
-
-    /**
-     * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
-     * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
-     */
-     final val Localloc = OpCode.Localloc
-
-    /**
-     * Transfers control from the filter clause of an exception back to the
-     * Common Language Infrastructure (CLI) exception handler.
-     */
-     final val Endfilter = OpCode.Endfilter
-
-    /**
-     * Indicates that an address currently atop the evaluation stack might not be aligned
-     * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
-     * stobj, initblk, or cpblk instruction.
-     */
-     final val Unaligned = OpCode.Unaligned
-
-    /**
-     * Specifies that an address currently atop the evaluation stack might be volatile,
-     * and the results of reading that location cannot be cached or that multiple stores
-     * to that location cannot be suppressed.
-     */
-     final val Volatile = OpCode.Volatile
-
-    /**
-     * Performs a postfixed method call instruction such that the current method's stack
-     * frame is removed before the actual call instruction is executed.
-     */
-     final val Tailcall = OpCode.Tailcall
-
-    /**
-     * Initializes all the fields of the object at a specific address to a null reference
-     * or a 0 of the appropriate primitive type.
-     */
-     final val Initobj = OpCode.Initobj
-
-    /**
-     * Copies a specified number bytes from a source address to a destination address .
-     */
-     final val Cpblk = OpCode.Cpblk
-
-    /**
-     * Initializes a specified block of memory at a specific address to a given size
-     * and initial value.
-     */
-     final val Initblk = OpCode.Initblk
-
-    /**
-     * Rethrows the current exception.
-     */
-     final val Rethrow = OpCode.Rethrow
-
-    /**
-     * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
-     */
-     final val Sizeof = OpCode.Sizeof
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
deleted file mode 100644
index 8f9d81a..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import java.io.IOException
-
-/**
- * Creates or associates parameter information.
- * Parameter attributes need to consistent with the method signature.
- * If you specify Out attributes for a parameter, you should ensure that
- * the type of that method parameter is a ByRef type
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int)
-      extends ParameterInfo(name, tpe, attr, pos)
-      with ICustomAttributeSetter
-      with Visitable
-{
-
-    //##########################################################################
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    //##########################################################################
-
-    /** The apply method for a visitor */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseParameterBuilder(this)
-    }
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
deleted file mode 100644
index 5d59d4d..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.HashMap
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single file that can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisitor {
-    var fileName: String = _fileName
-
-    out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)))
-
-   /**
-     * Visit an AssemblyBuilder
-     */
-   @throws(classOf[IOException])
-   def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
-	ILPrinterVisitor.currAssembly = assemblyBuilder
-
-	// first get the entryPoint
-	this.entryPoint = assemblyBuilder.EntryPoint
-
-	// all external assemblies
-	as = assemblyBuilder.getExternAssemblies()
-  scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator)
-
-        assemblyBuilder.generatedFiles += fileName
-	printAssemblyBoilerplate()
-
-	// print each module
-        var m: Array[Module] = assemblyBuilder.GetModules()
-        nomembers = true
-        for(i <- 0 until m.length) {
-	    print(m(i).asInstanceOf[ModuleBuilder])
-	}
-
-        nomembers = false
-        for(i <- 0 until m.length) {
-	    print(m(i).asInstanceOf[ModuleBuilder])
-	}
-	// close out file
-	out.close()
-	ILPrinterVisitor.currAssembly = null
-    }
-
-    /**
-     * Visit a ModuleBuilder
-     */
-    @throws(classOf[IOException])
-    def caseModuleBuilder(module: ModuleBuilder) {
-	// print module declaration
-	currentModule = module
-        if (nomembers) {
-            print(".module \'"); print(module.Name); println("\'")
-            printAttributes(module)
-        }
-
-	if (!module.globalsCreated)
-	    module.CreateGlobalFunctions()
-
-	var m: Array[MethodInfo] = module.GetMethods()
-        for(i <- 0 until m.length) {
-	    print(m(i).asInstanceOf[MethodBuilder])
-	}
-
-	var t: Array[Type] = module.GetTypes()
-        for(i <- 0 until t.length) {
-	    print(t(i).asInstanceOf[TypeBuilder])
-	}
-	currentModule = null
-    }
-
-}  // class SingleFileILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
deleted file mode 100644
index 57dc883..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Defines and creates new instances of classes during runtime.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
-      extends Type(module, attributes, fullName, baseType, interfaces, declType, 0)
-      with ICustomAttributeSetter
-      with Visitable
-{
-    import TypeBuilder._
-
-    //##########################################################################
-    // public members
-
-    /** 'Bakes' the type. */
-    def CreateType(): Type = {
-	fields = fieldBuilders.toArray // (new Array[FieldInfo](fieldBuilders.size())).asInstanceOf[Array[FieldInfo]]
-	methods = methodBuilders.toArray // (new Array[MethodInfo](methodBuilders.size())).asInstanceOf[Array[MethodInfo]]
-	constructors = constructorBuilders.toArray // (new Array[ConstructorInfo](constructorBuilders.size())).asInstanceOf[Array[ConstructorInfo]]
-	nestedTypes = nestedTypeBuilders.toArray // (new Array[Type](nestedTypeBuilders.size())).asInstanceOf[Array[Type]]
-
-	raw = false
-	if (DeclaringType == null)
-	    Module.asInstanceOf[ModuleBuilder].addType(this)
-	return this
-    }
-
-    /**
-     * Adds a new field to the class, with the given name, attributes and field type. The location has no custom mods.
-     */
-    def DefineField(name: String, fieldType: Type, attrs: Short): FieldBuilder = {
-      val fieldTypeWithCustomMods = new PECustomMod(fieldType, null)
-      DefineField(name, fieldTypeWithCustomMods, attrs)
-    }
-
-  /**
-   * Adds a new field to the class, with the given name, attributes and (field type , custom mods) combination.
-   */
-  def DefineField(name: String, fieldTypeWithMods: PECustomMod, attrs: Short): FieldBuilder = {
-    val field: FieldBuilder = new FieldBuilder(name, this, attrs, fieldTypeWithMods)
-    fieldBuilders += field
-    return field
-  }
-
-    /**
-     * Adds a new method to the class, with the given name and
-     * method signature.
-     */
-    def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
-	val method = new MethodBuilder(name, this, attrs, returnType, paramTypes)
-      val methods = methodBuilders.iterator
-      while(methods.hasNext) {
-        val m = methods.next().asInstanceOf[MethodInfo]
-        if (methodsEqual(m, method)) {
-          throw new RuntimeException("["+ Assembly() + "] Method has already been defined: " + m)
-	}
-      }
-      methodBuilders += method
-	return method
-    }
-
-    /**
-     * Adds a new constructor to the class, with the given attributes
-     * and signature.
-     */
-    def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = {
-	val constr = new ConstructorBuilder(this, attrs, paramTypes)
-      val iter = constructorBuilders.iterator
-      while(iter.hasNext) {
-        val c = iter.next().asInstanceOf[ConstructorInfo]
-        if (constructorsEqual(c, constr)) {
-          throw new RuntimeException("["+ Assembly() + "] Constructor has already been defined: " + c)
-        }
-      }
-      constructorBuilders += constr
-	return constr
-    }
-
-    /**
-     * Defines a nested type given its name.
-     */
-    def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
-    val nested = nestedTypeBuilders.iterator
-    while(nested.hasNext) {
-        val nt = nested.next
-		if (nt.Name.equals(name)) {
-		    val message = "Nested type " + name + " has already been defined: " + nt
-		    throw new RuntimeException(message)
-		}
-	    }
-	val t = new TypeBuilder(Module, attributes, name, baseType, interfaces, this)
-	nestedTypeBuilders += t
-	return t
-    }
-
-    /** Get the field with the corresponding name. */
-    override def GetField(name: String): FieldInfo = {
-	testRaw(name)
-	return super.GetField(name)
-    }
-
-    /** Get all fields of the current Type. */
-    override def GetFields(): Array[FieldInfo] = {
-	testRaw("<GetFields>")
-	return super.GetFields()
-    }
-
-    /**
-     * Searches for a public instance constructor whose parameters
-     * match the types in the specified array.
-     */
-    override def GetConstructor(params: Array[Type]): ConstructorInfo = {
-	testRaw(".ctor" + types2String(params))
-	return super.GetConstructor(params)
-    }
-
-    /**
-     * Returns all the public constructors defined for the current Type.
-     */
-    override def GetConstructors(): Array[ConstructorInfo] = {
-	testRaw("<GetConstructors>")
-	return super.GetConstructors()
-    }
-
-    /**
-     * Searches for the specified public method whose parameters
-     * match the specified argument types.
-     */
-    override def GetMethod(name: String, params: Array[Type]): MethodInfo = {
-	testRaw(name + types2String(params))
-	return super.GetMethod(name, params)
-    }
-
-    /** Returns all the public methods of the current Type. */
-    override def GetMethods(): Array[MethodInfo] = {
-	testRaw("<GetMethods>")
-	return super.GetMethods()
-    }
-
-    /** Searches for the nested type with the specified name. */
-    override def GetNestedType(name: String): Type = {
-      testRaw(name)
-      super.GetNestedType(name)
-    }
-
-    /** Returns all the types nested within the current Type. */
-    override def GetNestedTypes(): Array[Type] = {
-      testRaw("<GetNestedTypes>")
-      super.GetNestedTypes()
-    }
-
-    /** Returns a Type object that represents a one-dimensional array of the current type */
-    def MakeArrayType(): Type = {
-      Type.mkArray(this, 1)
-    }
-
-    /** Sets a custom attribute. */
-    def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
-	addCustomAttribute(constr, value)
-    }
-
-    def setPosition(sourceLine: Int, sourceFilename: String) {
-	this.sourceLine = sourceLine
-	this.sourceFilename = sourceFilename
-    }
-
-    def setSourceFilepath(sourceFilepath: String) {
-    this.sourceFilepath = sourceFilepath
-    }
-
-    //##########################################################################
-    // protected members
-
-    var sourceLine: Int = _
-    var sourceFilename: String = _
-    var sourceFilepath: String = _
-
-    var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldBuilder]
-    var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodBuilder]
-    var constructorBuilders = scala.collection.mutable.ArrayBuffer.empty[ConstructorBuilder]
-    var nestedTypeBuilders = scala.collection.mutable.ArrayBuffer.empty[TypeBuilder]
-
-    // shows if the type is 'raw', i.e. still subject to changes
-    private var raw = true
-
-    // throws an exception if the type is 'raw',
-    // i.e. not finalized by call to CreateType
-    protected def testRaw(member: String) {
-	if (raw)
-	    throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " +
-		 FullName + "::" + member)
-    }
-
-    //##########################################################################
-    // public members not part of the Reflection.Emit.TypeBuilder interface.
-
-    /** The apply method for a visitor. */
-    @throws(classOf[IOException])
-    def apply(v: Visitor) {
-	v.caseTypeBuilder(this)
-    }
-
-    //##########################################################################
-
-}  // class TypeBuilder
-
-object TypeBuilder {
-    def types2String(types: Array[Type]): String = {
-    var s = new StringBuffer("(")
-    for(i <- 0 until types.length) {
-        if (i > 0) s.append(", ")
-        s.append(types(i))
-    }
-    s.append(")")
-    return s.toString()
-    }
-
-    def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = {
-    if (!m1.Name.equals(m2.Name))
-        return false
-    if (m1.ReturnType != m2.ReturnType)
-        return false
-    val p1 = m1.GetParameters()
-    val p2 = m2.GetParameters()
-    if (p1.length != p2.length)
-        return false
-    for(i <- 0 until p1.length)
-        if (p1(i).ParameterType != p2(i).ParameterType)
-        return false
-    return true
-     }
-
-    def constructorsEqual(c1: ConstructorInfo, c2: ConstructorInfo): Boolean = {
-      if (c1.IsStatic != c2.IsStatic)
-          return false
-      val p1 = c1.GetParameters()
-      val p2 = c2.GetParameters()
-      if (p1.length != p2.length)
-          return false
-      for(i <- 0 until p1.length)
-          if (p1(i).ParameterType != p2(i).ParameterType)
-          return false
-      return true
-}
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
deleted file mode 100644
index 28ec801..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitable interface
- */
-trait Visitable {
-
-    //##########################################################################
-
-    /**
-     * the visitable method to apply a visitor
-     */
-    @throws(classOf[IOException])
-    def apply(v: Visitor): Unit
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
deleted file mode 100644
index d4b84cd..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitor interface to walk through the MSIL code Builder hierarchy.
- */
-trait Visitor {
-
-    //##########################################################################
-
-    /** Visit an AssemblyBuilder */
-    @throws(classOf[IOException])
-    def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder): Unit
-
-    /** Visit a ModuleBuilder */
-    @throws(classOf[IOException])
-    def caseModuleBuilder(moduleBuilder: ModuleBuilder): Unit
-
-    /** Visit a TypeBuilder */
-    @throws(classOf[IOException])
-    def caseTypeBuilder(typeBuilder: TypeBuilder): Unit
-
-    /** Visit a FieldBuilder */
-    @throws(classOf[IOException])
-    def caseFieldBuilder(fieldBuilder: FieldBuilder): Unit
-
-    /** Visit a ConstructorBuilder */
-    @throws(classOf[IOException])
-    def caseConstructorBuilder(constructorBuilder: ConstructorBuilder): Unit
-
-    /** Visit a MethodBuilder */
-    @throws(classOf[IOException])
-    def caseMethodBuilder(methodBuilder: MethodBuilder): Unit
-
-    /** Visit a ParameterBuilder */
-    @throws(classOf[IOException])
-    def caseParameterBuilder(parameterBuilder: ParameterBuilder): Unit
-
-    /** Visit an ILGenerator */
-    @throws(classOf[IOException])
-    def caseILGenerator(iLGenerator: ILGenerator): Unit
-
-    /** Visit an OpCode */
-    @throws(classOf[IOException])
-    def caseOpCode(opCode: OpCode): Unit
-
-    /** Visit a LocalBuilder */
-    @throws(classOf[IOException])
-    def caseLocalBuilder(localBuilder: LocalBuilder): Unit
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
deleted file mode 100644
index 9a6e28a..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class CustomAttributesTest {
-    public static void main(String[] args) {
-	if (args.length < 1) {
-	    System.err.println("You must supply a filename!");
-	    System.exit(1);
-	}
-
-	Assembly assem = Assembly.LoadFrom(args[0]);
-	Type.initMSCORLIB(assem);
-
-	testCustomAttributes();
-    }
-
-    public static void testCustomAttributes() {
-	Object[] attrs = Type.GetType("System.ObsoleteAttribute")
-	    .GetCustomAttributes(false);
-	assert attrs != null;
-	for (int i = 0; i < attrs.length; i++) {
-	    System.out.println("\t" + attrs[i]);
-	}
-    }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
deleted file mode 100644
index 96ec1bf..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
+++ /dev/null
@@ -1,18 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.VJSAssembly;
-
-public class JavaTypeTest {
-
-    public static void main(String[] args) {
-	if (args.length < 1) {
-	    System.err.println("usage: java test.JavaTypeTest classname");
-	    System.exit(1);
-	}
-
-	Type type = VJSAssembly.VJSLIB.GetType(args[0]);
-	MembersTest.dumpType(System.out, type);
-    }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
deleted file mode 100644
index 37a5c6e..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class MembersTest {
-
-    public static void main(String[] args) {
-	if (args.length < 1) {
-	    System.err.println
-		("usage: java test.MembersTest assembly [classname]");
-	    System.exit(1);
-	}
-
-	Assembly mscorlib = Assembly.LoadFrom("mscorlib.dll");
-	Type.initMSCORLIB(mscorlib);
-	Assembly assem = Assembly.LoadFrom(args[0]);
-	if (args.length > 1) {
-	    Type type = assem.GetType(args[1]);
-	    if (type != null)
-		dumpMember(System.out, type);
-	    else System.err.println("Cannot find type " + args[1]
-				    + " in " + assem);
-	} else {
-	    Type[] types = assem.GetTypes();
-	    System.out.println("Number of types in assembly " + assem
-			       + " -> " + types.length);
-            dumpCustomAttributes(System.out, "assembly: ", assem);
-            Module[] modules = assem.GetModules();
-            for (int i = 0; i < modules.length; i++) {
-                dumpCustomAttributes(System.out, "module " + modules[i] + ": ",
-                                     modules[i]);
-            }
-            dumpMembers(System.out, types);
-	}
-    }
-
-    public static final void dumpMember(PrintStream out, MemberInfo member) {
-        try {
-            if (member.MemberType() == MemberTypes.TypeInfo
-                || member.MemberType() == MemberTypes.NestedType) {
-                Type type = (Type)member;
-                dumpCustomAttributes(out, "", type);
-                out.print(TypeAttributes.accessModsToString(type.Attributes));
-                out.print(type.IsInterface() ? " interface " : " class ");
-                out.print(type);
-                if (type.BaseType() != null)
-                    out.println(" extends " + type.BaseType());
-                Type[] ifaces = type.GetInterfaces();
-                if (ifaces.length > 0) {
-                    out.print("\timplements ");
-                    for (int i = 0; i < ifaces.length; i++) {
-                        out.print(ifaces[i]);
-                        if (i < (ifaces.length - 1))
-                            out.print(", ");
-                    }
-                    out.println();
-                }
-                out.println("{");
-                int all = BindingFlags.Public | BindingFlags.DeclaredOnly// | BindingFlags.NonPublic
-                    | BindingFlags.Instance | BindingFlags.Static;
-                dumpMembers(out, type.GetNestedTypes());
-                dumpMembers(out, type.GetFields(all));
-                dumpMembers(out, type.GetConstructors(all));
-                dumpMembers(out, type.GetMethods(all));
-                dumpMembers(out, type.GetProperties(all));
-                dumpMembers(out, type.GetEvents());
-                out.println("}");
-            } else {
-                dumpCustomAttributes(out, "", member);
-                out.print(MemberTypes.toString(member.MemberType()));
-                out.print(": "); out.print(member);
-                out.println();
-            }
-        } catch (Throwable e) {
-            String message = MemberTypes.toString(member.MemberType())
-                + ": " + member;
-            throw new RuntimeException(message, e);
-        }
-    }
-
-    public static void dumpCustomAttributes(PrintStream out,
-                                            String prefix,
-                                            ICustomAttributeProvider att)
-    {
-        Object[] attrs = att.GetCustomAttributes(false);
-        for (int j = 0; j < attrs.length; j++)
-            out.println(prefix + attrs[j]);
-    }
-
-    public static void dumpMembers(PrintStream out, MemberInfo[] members) {
-	for (int i = 0; i < members.length; i++) {
-            dumpMember(out, members[i]);
-	}
-    }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
deleted file mode 100644
index 1df389b..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
+++ /dev/null
@@ -1,311 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.PrintStream;
-import java.io.FileNotFoundException;
-
-public class TableDump extends PEFile {
-
-    //##########################################################################
-
-    public TableDump(String filename) throws FileNotFoundException {
-	super(filename);
-    }
-
-    /***/
-    public void dump(PrintStream out) {
-	out.println("CLI RVA: " + CLI_RVA);
-	out.println("Optional header size: " + optHeaderSize);
-	out.println("Number of sections: " + numOfSections);
-	out.println();
-
-	for (int i = 0; i < sections.length; i++) {
-	    sections[i].dump(out);
-	    out.println();
-	}
-
-	out.println("MetaData Offset:   0x" + Integer.toHexString(posMetadata));
-	out.println("Number of streams: " + numOfStreams);
-
-	out.println("#~ stream");       Meta.dump(out);    out.println();
-	out.println("#Strings stream"); Strings.dump(out); out.println();
-	if (US != null) {
-	    out.println("#US stream");      US.dump(out);      out.println();
-	}
-	out.println("#GUID stream");    GUID.dump(out);    out.println();
-	out.println("#Blob stream");    Blob.dump(out);    out.println();
-
-	out.println("Heap Sizes IndexedSeq = 0x0" + Integer.toHexString(heapSizes));
-	out.println();
-
- 	for(int i = 0; i < Table.MAX_NUMBER; i++)
- 	    if(getTable(i).rows > 0) {
- 		dump(out, getTable(i));
- 		out.println();
- 	    }
-
-    }
-
-    /** Dumps the contents of this table. */
-    public void dump(PrintStream out, Table table) {
-	out.println("Table:" + "  ID = 0x" + byte2hex(table.id));
-	out.println("\tname = " + table.getTableName());
-	out.println("\trows =  " + table.rows);
-	//out.println("\tStart pos in file = 0x" + Long.toHexString(table.start));
- 	for (int i = 1; i <= table.rows; i++)
- 	    dumpRow(out, table, i);
-    }
-
-    public void dumpIndex(PrintStream out, int tableSetId, int index) {
-	int tableId = Table.getTableId(tableSetId, index);
-	int row =  Table.getTableIndex(tableSetId, index);
-	out.print(getTable(tableId).getTableName());
-        out.print('[');
-	out.print(getTable(tableId).isShort ? short2hex(row) : int2hex(row));
-        out.print(']');
-    }
-
-    public void dumpRow(PrintStream out, Table table, int row) {
-	table.readRow(row);
-	out.print(table.getTableName());
-	out.print("[" + short2hex(row) + "]: ");
-	dumpRow(out, table);
-	out.println();
-    }
-
-    /** Prints the current content of the fields of the class. */
-    public void dumpRow(PrintStream out, Table table) {
-	if (table instanceof ModuleDef) {
-	    ModuleDef t = (ModuleDef)table;
-	    out.print("Generation = 0x" + short2hex(t.Generation));
-	    out.print("; Name = " + getString(t.Name));
-	    //out.print("; Mvid = (" + bytes2hex(getGUID(Mvid)) + ")");
-	} else if (table instanceof TypeRef) {
-	    TypeRef t = (TypeRef)table;
-	    out.print("FullName = " + t.getFullName());
-	    out.print("; ResolutionScope = 0x" + int2hex(t.ResolutionScope));
-	} else if (table instanceof TypeDef) {
-	    TypeDef t = (TypeDef)table;
-	    out.print("Flags = 0x"); out.print(int2hex(t.Flags));
-	    out.print("; FullName = "); out.print(t.getFullName());
-	    out.print("; Extends = ");
-	    dumpIndex(out, Table._TypeDefOrRef, t.Extends);
-	    out.print("; FieldList = "); out.print(t.FieldList);
-	    out.print("; MethodList = "); out.print(t.MethodList);
-	} else if (table instanceof FieldTrans) {
-	    FieldTrans t = (FieldTrans)table;
-	    out.print("Field = "); out.print(t.Field);
-	} else if (table instanceof FieldDef) {
-	    FieldDef t = (FieldDef)table;
-	    out.print("Flags = 0x" + short2hex(t.Flags));
-	    out.print("; Name = " + t.getName());
-	    out.print("; Signature = (" +
-		      bytes2hex(getBlob(t.Signature)) + ")");
-	} else if (table instanceof MethodTrans) {
-	    MethodTrans t = (MethodTrans)table;
-	    out.print("Method = "); out.print(t.Method);
-	} else if (table instanceof MethodDef) {
-	    MethodDef t = (MethodDef)table;
-	    out.print("Flags = 0x" + short2hex(t.Flags));
-	    out.print("; Name = " + t.getName());
-	    out.print("; ParamList = " + t.ParamList);
-	    out.print("; Signature = (" +
-		      bytes2hex(getBlob(t.Signature)) + ")");
-	} else if (table instanceof ParamDef) {
-	    ParamDef t = (ParamDef)table;
-	    out.print("Flags = 0x" + short2hex(t.Flags));
-	    out.print("; Name = " + t.getName());
-	    out.print("; Sequence = " + t.Sequence);
-	} else if (table instanceof InterfaceImpl) {
-	    InterfaceImpl t = (InterfaceImpl)table;
-	    out.print("Class = 0x" + short2hex(t.Class));// + " (ref to: ");
-	    //TypeDef td = (TypeDef) getTable(TypeDef.ID);
-	    //td.readRow(Class);
-	    //td.dumpRow(out);
-	    out.print("; Interface = 0x" + short2hex(t.Interface));
-	} else if (table instanceof MemberRef) {
-	    MemberRef t = (MemberRef)table;
-	    out.print("Name = " + t.getName());
-	    out.print("; Signature = (" +
-		      bytes2hex(getBlob(t.Signature)) + ")");
-	    out.print("; Class = " + t.Class);
-	} else if (table instanceof Constant) {
-	    Constant t = (Constant)table;
-	    out.print("Parent = "); dumpIndex(out, Table._HasConstant, t.Parent);
-	    out.print("; Type = 0x" + byte2hex(t.Type));
- 	    out.print("; Value = (" + bytes2hex(getBlob(t.Value)));
-	    out.print("); Value = " + t.getValue());
-	} else if (table instanceof CustomAttribute) {
-	    CustomAttribute t = (CustomAttribute)table;
-	    //out.print("Parent = 0x" + int2hex(t.Parent));
-	    out.print("Parent = ");
-            dumpIndex(out, Table._HasCustomAttribute, t.Parent);
-	    //out.print("; Type = 0x" + short2hex(t.Type));
-	    out.print("; Type = ");
-            dumpIndex(out, Table._CustomAttributeType, t.Type);
-	    out.print("; Value = (" + bytes2hex(t.getValue()) + ")");
-	} else if (table instanceof FieldMarshal) {
-	    FieldMarshal t = (FieldMarshal)table;
-	    out.print("NativeType = (");
-	    out.print(bytes2hex(getBlob(t.NativeType)) + ")");
-	} else if (table instanceof DeclSecurity) {
-	    DeclSecurity t = (DeclSecurity)table;
-	    out.print("Action = 0x" + short2hex(t.Action));
-	    out.print("; PermissionSet = (" +
-		      bytes2hex(getBlob(t.PermissionSet)) + ")");
-	} else if (table instanceof ClassLayout) {
-	    ClassLayout t = (ClassLayout)table;
-	    out.print("PackingSize = 0x" + short2hex(t.PackingSize));
-	    out.print("; ClassSize = 0x" + int2hex(t.ClassSize));
-	    out.print(": Parent = " + t.Parent + " (ref to: ");
-	    dumpRow(out, this.TypeDef(t.Parent));
-	    out.print(")");
-	} else if (table instanceof FieldLayout) {
-	    FieldLayout t = (FieldLayout)table;
-	    out.print("Offset = 0x" + int2hex(t.Offset));
-	    out.print("; Field = (ref to: ");
-	    dumpRow(out, this.FieldDef(t.Field));
-	    out.print(")");
-	} else if (table instanceof StandAloneSig) {
-	    StandAloneSig t = (StandAloneSig)table;
-	    out.print("StandAloneSig: Signature = (" +
-		      bytes2hex(getBlob(t.Signature)) + ")");
-	} else if (table instanceof EventMap) {
-	    EventMap t = (EventMap)table;
-	    out.print("Parent = 0x" + int2hex(t.Parent) + " (ref to: ");
-	    dumpRow(out, this.TypeDef(t.Parent));
-	    out.print("); EventList = 0x"); out.print(int2hex(t.EventList));
-	} else if (table instanceof EventDef) {
-	    EventDef t = (EventDef)table;
-	    out.print("EventFlags = 0x" + short2hex(t.EventFlags));
-	    out.print("; Name = " + t.getName());
-            out.print("; EventType = 0x" + int2hex(t.EventType));
-	} else if (table instanceof PropertyMap) {
-	    PropertyMap t = (PropertyMap)table;
-	    out.print("Parent = " + t.Parent + " (ref to: ");
-	    dumpRow(out, this.TypeDef(t.Parent));
-	    out.print(")");
-	} else if (table instanceof PropertyDef) {
-	    PropertyDef t = (PropertyDef)table;
-	    out.print("Flags = 0x" + short2hex(t.Flags));
-	    out.print("; Name = " + t.getName());
-	    out.print("; Type = (" + bytes2hex(getBlob(t.Type)) + ")");
-	} else if (table instanceof MethodSemantics) {
-	    MethodSemantics t = (MethodSemantics)table;
-	    out.print("Semantics = 0x" + short2hex(t.Semantics));
-	    out.print("; Method = 0x" + int2hex(t.Method) + " (ref to: ");
-	    dumpRow(out, this.MethodDef(t.Method));
-	    out.print("); Association = 0x" + int2hex(t.Association));
-	} else if (table instanceof MethodImpl) {
-	    MethodImpl t = (MethodImpl)table;
-	    out.print("Class = (ref to: ");
-	    dumpRow(out, this.TypeDef(t.Class));
-	    out.print(")");
-	} else if (table instanceof ModuleRef) {
-	    ModuleRef t = (ModuleRef)table;
-	    out.print("Name = " + t.getName());
-	} else if (table instanceof TypeSpec) {
-	    TypeSpec t = (TypeSpec)table;
-	    out.print("Signature = (" +
-		      bytes2hex(getBlob(t.Signature)) + ")");
-	} else if (table instanceof ImplMap) {
-	    ImplMap t = (ImplMap)table;
-	    out.print("ImportName = " + getString(t.ImportName));
-	} else if (table instanceof FieldRVA) {
-	    FieldRVA t = (FieldRVA)table;
-	    out.print("RVA = 0x" + int2hex(t.RVA));
-	    out.print("; Field = (ref to: ");
-	    dumpRow(out, this.FieldDef(t.Field));
-	    out.print(")");
-	} else if (table instanceof AssemblyDef) {
-	    AssemblyDef t = (AssemblyDef)table;
-	    out.print("Flags = 0x" + int2hex(t.Flags));
-	    out.print(" ; Name = " + getString(t.Name));
-	    out.print("; Culture = " + getString(t.Culture));
-	    out.print(" ; Version = " + t.MajorVersion + ".");
-	    out.print(t.MinorVersion + "." + t.BuildNumber);
-	    out.print("." + t.RevisionNumber);
-	    out.print("; HashAlgId = 0x" + int2hex(t.HashAlgId));
-	    out.print("; PublicKey = (");
-	    out.print(bytes2hex(getBlob(t.PublicKey)) + ")");
-	} else if (table instanceof AssemblyProcessor) {
-	    AssemblyProcessor t = (AssemblyProcessor)table;
-	    out.print("Processor = 0x" + int2hex(t.Processor));
-	} else if (table instanceof AssemblyOS) {
-	    AssemblyOS t = (AssemblyOS)table;
-	    out.print("!?!");
-	} else if (table instanceof AssemblyRef) {
-	    AssemblyRef t = (AssemblyRef)table;
-	    out.print("Flags = 0x" + int2hex(t.Flags));
-	    out.print("; Name = " + getString(t.Name));
-	    out.print("; Culture = " + getString(t.Culture));
-	    out.print("; Version = " + t.MajorVersion + "." + t.MinorVersion);
-	    out.print("." + t.BuildNumber + "." + t.RevisionNumber);
-	    out.print("; PublicKeyOrToken = (" +
-		      bytes2hex(getBlob(t.PublicKeyOrToken)) + ")");
-	    out.print("; HashValue = (" +
-		      bytes2hex(getBlob(t.HashValue)) + ")");
-	} else if (table instanceof AssemblyRefProcessor) {
-	    AssemblyRefProcessor t = (AssemblyRefProcessor)table;
-	    out.print("!?!");
-	} else if (table instanceof AssemblyRefOS) {
-	    AssemblyRefOS t = (AssemblyRefOS)table;
-	    out.print("!?!");
-	} else if (table instanceof FileDef) {
-	    FileDef t = (FileDef)table;
-	    out.print("Flags = 0x" + int2hex(t.Flags));
-	    out.print("; Name = " + t.getName());
-	    out.print("; HashValue = (" + bytes2hex(getBlob(t.HashValue)) +")");
-	} else if (table instanceof ExportedType) {
-	    ExportedType t = (ExportedType)table;
-	    out.print("FullName = " + t.getFullName());
-	} else if (table instanceof ManifestResource) {
-	    ManifestResource t = (ManifestResource)table;
-	    out.print("Name = " + getString(t.Name));
-	    out.print("; Flags = 0x" + int2hex(t.Flags));
-	} else if (table instanceof NestedClass) {
-	    NestedClass t = (NestedClass)table;
-	    out.print(this.TypeDef(t.EnclosingClass).getFullName());
-	    out.print("/");
-	    out.print(this.TypeDef(t.NestedClass).getFullName());
-	} else
-	    throw new RuntimeException("Unknown table " + table.getClass());
-    }
-
-    //##########################################################################
-
-    public static void main(String[] args) {
-	if (args.length < 1) {
-	    System.err.println("You must supply a filename!");
-	    System.exit(1);
-	}
-
-	TableDump file = null;
-	try {
-	    file = new TableDump(args[0]);
-	} catch (FileNotFoundException e) { e.printStackTrace(); }
-
-	if (args.length > 1) {
-            nextarg:
-	    for (int i = 1; i < args.length; i++) {
-		String name = args[i];
-		for (int tableId = 0; tableId < Table.MAX_NUMBER; tableId++) {
-		    Table table = file.getTable(tableId);
-		    if ((table.rows > 0) && name.equals(table.getTableName())) {
-			file.dump(System.out, table);
-			System.out.println();
-			continue nextarg;
-		    }
-		}
-                System.err.println("No such table: " + name);
-	    }
-	} else
-	    file.dump(System.out);
-    }
-
-    //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
deleted file mode 100644
index 2c5946a..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
+++ /dev/null
@@ -1,92 +0,0 @@
-
-package test;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class Test {
-    public static void main(String[] args) {
-	if (args.length < 1) {
-	    System.err.println("You must supply a filename!");
-	    System.exit(1);
-	}
-
-	Assembly assem = Assembly.LoadFrom(args[0]);
-	Type.initMSCORLIB(assem);
-
-	//"System.Collections.ArrayList"
-	if (args.length >= 2) {
-	    Type t = Type.GetType(args[1]);
-	    dumpType(System.out, t);
-	} else {
-	    dumpAssembly(assem);
-	}
-    }
-
-
-    public static void dumpAssembly(Assembly assem) {
-	Module[] modules = assem.GetModules();
-// 	System.out.println("Modules in assembly " + assem +
-// 			   " (" + modules.length + ")");
-// 	for (int i = 0; i < modules.length; i++) {
-// 	    System.out.println("\t" + modules[i]);
-// 	}
-
- 	Type[] types = modules[0].GetTypes();
-// 	System.out.println("Types in assembly " + assem +
-// 			   " (" + types.length + ")");
-	for (int i = 0; i < types.length; i++) {
-	    System.out.println("#" + i + " -> " + types[i]);
-	    types[i].completeType();
-	}
-    }
-
-    public static final void dumpType(PrintStream out, Type type) {
-	out.println("Type = " + type);
-	out.println("Name = " + type.Name);
-	out.println("Namespace = " + type.Namespace);
-	out.println("FullName = " + type.FullName);
-	out.println("Attributes = " + TypeAttributes.toString(type.Attributes));
-	out.println("BaseType = " + type.BaseType);
-	Type[] ifaces = type.GetInterfaces();
-	if (ifaces != null) {
-	    for (int i = 0; i < ifaces.length; i++)
-		out.println("\timplements " + ifaces[i]);
-	}
-	out.println("Assembly = " + type.Assembly);
-	out.println("Module = " + type.Module);
-	out.println("DeclaringType = " + type.DeclaringType);
-	out.println("IsInterface = " + type.IsInterface);
-	out.println("IsAbstract = " + type.IsAbstract);
-
-	FieldInfo[] fields = type.GetFields(BindingFlags.Instance
-					    | BindingFlags.Static
-					    | BindingFlags.NonPublic);
-	out.println("\nFields (" + fields.length + "):");
-	for (int i = 0; i < fields.length; i++) {
-	    out.println("\t" + fields[i]);
-	    out.println("\t\tDeclaringType = " + fields[i].DeclaringType);
-	    out.println("\t\tReflectedType = " + fields[i].ReflectedType);
-	}
-
-	ConstructorInfo[] constrs = type.GetConstructors();
-	out.println("\nConstructors (" + constrs.length + "):");
-	for (int i = 0; i < constrs.length; i++) {
-	    out.println("\t" + constrs[i]);
-	}
-
-// 	MethodInfo[] methods = type.GetMethods(BindingFlags.Instance
-// 					       | BindingFlags.Static
-// 					       | BindingFlags.Public
-// 					       | BindingFlags.NonPublic);
-	MethodInfo[] methods = type.GetMethods();
-	out.println("\nMethods (" + methods.length + "):");
-	for (int i = 0; i < methods.length; i++) {
-	    out.println("\t" + methods[i]);
-	    out.println("\t\tDeclaringType = " + methods[i].DeclaringType);
-	    out.println("\t\tReflectedType = " + methods[i].ReflectedType);
-	}
-    }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
deleted file mode 100644
index 56519e8..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.CustomModifier;
-
-/**
- * A PECustomMod holds the info parsed from metadata per the CustomMod production in Sec. 23.2.7, Partition II.
- * */
-public final class PECustomMod {
-
-    public final Type marked;
-    public final CustomModifier[] cmods;
-
-    /** Terminology:
-        the CustomModifier(s) are markers,
-        and the msil.Type is a type marked by those markers. */
-    public PECustomMod(Type marked, CustomModifier[] cmods) {
-      this.marked = marked;
-      this.cmods = cmods;
-    }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
deleted file mode 100644
index 454a94e..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-
-import java.io.PrintStream;
-
-/** Describes a section from a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PESection {
-
-    private final PEFile file;
-    private final long sectionStart;
-
-    public final String name;
-    public final int virtAddr;
-    public final int virtSize;
-    public final int realAddr;
-    public final int realSize;
-    public final int flags;
-
-    private static final byte[] buf = new byte[8];
-
-    public PESection(PEFile file) {
-	this.file = file;
-	sectionStart = file.pos();
-	file.read(buf);
- 	int i;
- 	for(i = 7; (i >= 0) && (0 == buf[i]); i--);
- 	name = new String(buf, 0, i + 1);
-	virtSize = file.readInt();
-	virtAddr = file.readInt();
-	realSize = file.readInt();
-	realAddr = file.readInt();
-	file.skip(3 * PEFile.INT_SIZE);
-	flags = file.readInt();
-    }
-
-
-    public void dump(PrintStream out) {
-	out.println("Section name:    " + name +
-		   " (name.length=" + name.length() + ")");
-	out.println("Virtual Address: 0x" + PEFile.int2hex(virtAddr));
-	out.println("Virtual Size:    0x" + PEFile.int2hex(virtSize));
-	out.println("Real Address:    0x" + PEFile.int2hex(realAddr));
-	out.println("Real Size:       0x" + PEFile.int2hex(realSize));
-	out.println("Flags:           0x" + PEFile.int2hex(flags));
-    }
-
-} // class PESection
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
deleted file mode 100644
index 649d9e7..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-
-/**
- * Implements support for CLI streams within a PE file.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PEStream implements Signature {
-
-    //##########################################################################
-    // Members
-
-    /** The name of the stream. */
-    public final String name;
-
-    /** The offset of the stream from the beginning of the file. */
-    public final int offset;
-
-    /** The size of the stream in bytes; shall be multiple of 4. */
-    public final int size;
-
-    private final PEFile file;
-
-    private final ByteBuffer buffer;
-
-    //##########################################################################
-
-    /** The PEStream class constructor.
-     *  @param file - the PEFile to which this stream belongs
-     */
-    public PEStream(PEFile file) {
-	this.file = file;
-	offset = file.fromRVA(file.rvaMetadata + file.readInt());
-	size = file.readInt();
-	buffer = file.getBuffer(offset, size);
-
-	int i = 0;
-	byte [] _buf = new byte [16];
-	do {
-	    _buf[i] = (byte) file.readByte();
-	    i++;
-	} while(0 != _buf[i-1]);
-	name = new String(_buf, 0, i - 1);
-
-	file.align(PEFile.INT_SIZE, file.posMetadata);
-	//assert size % 4 == 0;
-    }
-
-    /** Move to the specified position in the stream. */
-    private  void seek(int pos) {
-	try {
-	    buffer.position(pos);
-	} catch (IllegalArgumentException e) {
-	    System.err.println("\nSeek failed in file " + file
-			       + " for position " + pos
-			       + " of stream " + name + " (" + buffer + ")");
-	    throw e;
-	}
-    }
-
-    /** Return a string from the specified position in the stream. */
-    public String getString(int pos) {
-	seek(pos);
-	buffer.mark();
-	int i;
-	for (i = 0; getByte() != 0; i++);
-	byte[] buf = new byte[i];
-	buffer.reset(); // go back to the marked position
-	buffer.get(buf);
-	try {
-	    return new String(buf, "UTF-8");
-	} catch (java.io.UnsupportedEncodingException e) {
-	    throw new RuntimeException(e);
-	}
-    }
-
-    /** Read a byte from the stream. */
-    public int getByte() {
-	return (buffer.get() + 0x0100) & 0xff;
-    }
-
-    /** Return the GUID at the given position in the stream. */
-    public byte[] getGUID(int pos) {
-	seek(pos);
-	byte[] buf = new byte[32]; // 128-bit GUID
-	try {
-	    buffer.get(buf);
-	} catch (Exception e) {
-	    System.err.println();
-	    System.err.println("PEStream.getBlob(): Exception for pos = " +
-			       pos + " and buf.length = " + buf.length);
-	    System.err.println("\tbuffer = " + buffer);
-	    e.printStackTrace();
-	    throw new RuntimeException();
-	}
-	return buf;
-    }
-
-    public int readLength() {
-	int length = getByte();
-	if ((length & 0x80) != 0) {
-	    length = ((length & 0x7f) << 8) | getByte();
-	    if ((length & 0x4000) != 0)
-		length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
-	}
-	return length;
-    }
-
-    /** Return a blob from the specified position in the stream. */
-    public byte[] getBlob(int pos) {
-	seek(pos);
-	// the length indicates the number of bytes
-	// AFTER the encoded size of the blob
-	int length = readLength();
-	byte[] buf = new byte[length];
-	buffer.get(buf);
-	return buf;
-    }
-
-    /***/
-    public Sig getSignature(int pos) {
-	seek(pos);
-	return file.newSignature(buffer);
-    }
-
-    /**
-     */
-    public Object getConstant(int type, int pos) {
-	Object val = null;
-	seek(pos);
-	int length = readLength(); // skip over the blob length field
-	switch (type) {
-	case ELEMENT_TYPE_BOOLEAN:
-	    assert length == 1;
-	    return buffer.get() == 0 ? Boolean.FALSE : Boolean.TRUE;
-	case ELEMENT_TYPE_CHAR:
-	    assert length == 2 : "length == " + length;
-	    return new Character(buffer.getChar());
-	case ELEMENT_TYPE_I1:
-	case ELEMENT_TYPE_U1:       // TODO U1 not the same as I1
-	    assert length == 1;
-	    return new Byte(buffer.get());
-	case ELEMENT_TYPE_I2:
-	case ELEMENT_TYPE_U2:
-	    assert length == 2;
-	    return new Short(buffer.getShort());
-	case ELEMENT_TYPE_I4:
-	case ELEMENT_TYPE_U4:
-	    assert length == 4;
-	    return new Integer(buffer.getInt());
-	case ELEMENT_TYPE_I8:
-	case ELEMENT_TYPE_U8:
-	    assert length == 8;
-	    return new Long(buffer.getLong());
-	case ELEMENT_TYPE_R4:
-	    assert length == 4;
-	    return new Float(buffer.getFloat());
-	case ELEMENT_TYPE_R8:
-	    assert length == 8;
-	    return new Double(buffer.getDouble());
-	case ELEMENT_TYPE_STRING:
-// 	    length /= 2;
-// 	    char[] chars = new char[length];
-// 	    for (int i = 0; i < length; i++)
-// 		chars[i] = buffer.getChar();
-// 	    val = new String(chars);
-	    try {
-		return new String(getBlob(pos), "UTF-16LE");
-	    } catch(java.io.UnsupportedEncodingException e) {
-		throw new RuntimeException(e);
-	    }
-	default: throw new RuntimeException("Illegal constant type: " + type);
-	}
-    }
-
-    public void dump(PrintStream out) {
-	out.println("Stream name:   " + name + " (length " +
-		   name.length() + " characters)");
-	out.println("Stream offset: 0x" + PEFile.int2hex(offset));
-	out.println("Stream size:   0x" + PEFile.int2hex(size));
-    }
-
-    //##########################################################################
-}  // class PEStream
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
deleted file mode 100644
index d5dc0ff..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-
-/**
- * Signatures
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface Signature {
-
-    //##########################################################################
-
-    /** Marks end of a list. */
-    public static final int  ELEMENT_TYPE_END = 0x00;
-    /** void */
-    public static final int  ELEMENT_TYPE_VOID = 0x01;
-    /** boolean */
-    public static final int  ELEMENT_TYPE_BOOLEAN = 0x02;
-    /** char */
-    public static final int  ELEMENT_TYPE_CHAR = 0x03;
-    /** signed byte */
-    public static final int  ELEMENT_TYPE_I1 = 0x04;
-    /** byte */
-    public static final int  ELEMENT_TYPE_U1 = 0x05;
-    /** short */
-    public static final int  ELEMENT_TYPE_I2 = 0x06;
-    /** unsigned short */
-    public static final int  ELEMENT_TYPE_U2 = 0x07;
-    /** int */
-    public static final int  ELEMENT_TYPE_I4 = 0x08;
-    /** unsigned int */
-    public static final int  ELEMENT_TYPE_U4 = 0x09;
-    /** long */
-    public static final int  ELEMENT_TYPE_I8 = 0x0a;
-    /** unsigned long */
-    public static final int  ELEMENT_TYPE_U8 = 0x0b;
-    /** float */
-    public static final int  ELEMENT_TYPE_R4 = 0x0c;
-    /** double */
-    public static final int  ELEMENT_TYPE_R8 = 0x0d;
-    /** string */
-    public static final int  ELEMENT_TYPE_STRING = 0x0e;
-    /** Followed by <type> token. */
-    public static final int  ELEMENT_TYPE_PTR = 0x0f;
-    /** Followed by <type> token. */
-    public static final int  ELEMENT_TYPE_BYREF = 0x10;
-    /** Followed by <type> token */
-    public static final int  ELEMENT_TYPE_VALUETYPE = 0x11;
-    /** Followed by <type> token */
-    public static final int  ELEMENT_TYPE_CLASS = 0x12;
-
-    public static final int ELEMENT_TYPE_VAR = 0x13;
-
-    /**
-     * <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
-     */
-    public static final int  ELEMENT_TYPE_ARRAY = 0x14;
-
-    public static final int ELEMENT_TYPE_GENERICINST = 0x15;
-    /***/
-    public static final int  ELEMENT_TYPE_TYPEDBYREF = 0x16;
-    /** System.IntPtr */
-    public static final int  ELEMENT_TYPE_I = 0x18;
-    /** System.UIntPtr */
-    public static final int  ELEMENT_TYPE_U = 0x19;
-    /** Followed by full method signature. */
-    public static final int  ELEMENT_TYPE_FNPTR = 0x1b;
-    /** System.Object. */
-    public static final int  ELEMENT_TYPE_OBJECT = 0x1c;
-    /** Single-dim array with 0 lower bound. */
-    public static final int  ELEMENT_TYPE_SZARRAY = 0x1d;
-
-    public static final int ELEMENT_TYPE_MVAR = 0x1e;
-
-    /** Required modifier : followed by a TypeDef or TypeRef token. */
-    public static final int  ELEMENT_TYPE_CMOD_REQD = 0x1f;
-    /** Optional modifier : followed by a TypeDef or TypeRef token. */
-    public static final int  ELEMENT_TYPE_CMOD_OPT = 0x20;
-    /** Implemented within the CLI. */
-    public static final int  ELEMENT_TYPE_INTERNAL = 0x21;
-    /** Or'd with following element types. */
-    public static final int  ELEMENT_TYPE_MODIFIER = 0x40;
-    /** Sentinel for varargs method signature. */
-    public static final int  ELEMENT_TYPE_SENTINEL = 0x41;
-    /**Denotes a local variable that points at a pinned object. */
-    public static final int  ELEMENT_TYPE_PINNED = 0x45;
-
-    //##########################################################################
-    // signature designators
-
-    public static final int HASTHIS = 0x20;
-    public static final int EXPLICITTHIS = 0x40;
-    public static final int DEFAULT = 0x00;
-    public static final int VARARG = 0x05;
-    public static final int GENERIC = 0x10;
-    public static final int SENTINEL = 0x41;
-    public static final int C = 0x01;
-    public static final int STDCALL = 0x02;
-    public static final int THISCALL = 0x03;
-    public static final int FASTCALL = 0x04;
-    public static final int FIELD = 0x06;
-    public static final int PROPERTY = 0x08;
-    public static final int LOCAL_SIG = 0x07;
-
-    //##########################################################################
-    // extra IDs used in the serialization format of named arguments
-    // to custom attributes. Reverse-engineered from compiled C# example
-
-    /** What follows is a string with the full name of the type. */
-    public static final int X_ELEMENT_TYPE_TYPE = 0x50;
-
-    /** What follows is a string with the full name of the enumeration type*/
-    public static final int X_ELEMENT_TYPE_ENUM = 0x55;
-
-    /** The named argument specifies a field. */
-    public static final int X_ELEMENT_KIND_FIELD = 0x53;
-
-    /** The named argument specifies a property. */
-    public static final int X_ELEMENT_KIND_PROPERTY = 0x54;
-
-    //##########################################################################
-}  // interface Signature
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
deleted file mode 100644
index 1f43b8c..0000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
+++ /dev/null
@@ -1,1859 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET Assemblies
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-
-/**
- * Represents a table in a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Table {
-
-    //##########################################################################
-
-    public static final int MAX_NUMBER = 64;
-
-    public static final long VALID_TABLES_MASK = 0x03ff3fb7ff57L;
-
-    //##########################################################################
-    // fields and methods for handling predefined sets of tables
-
-    public static final int TABLE_SET_LENGTH = 13;
-
-    public static final int _TypeDefOrRef = 0;
-    public static final int _HasConstant = 1;
-    public static final int _HasCustomAttribute = 2;
-    public static final int _HasFieldMarshal = 3;
-    public static final int _HasDeclSecurity = 4;
-    public static final int _MemberRefParent = 5;
-    public static final int _HasSemantics = 6;
-    public static final int _MethodDefOrRef = 7;
-    public static final int _MemberForwarded = 8;
-    public static final int _Implementation = 9;
-    public static final int _CustomAttributeType = 10;
-    public static final int _ResolutionScope = 11;
-    public static final int _TypeOrMethodDef = 12;
-
-
-    public static final int[][] TableSet = new int[TABLE_SET_LENGTH][];
-
-    static {
-	TableSet[_TypeDefOrRef] =
-	    new int[] {TypeDef.ID, TypeRef.ID, TypeSpec.ID};
-	TableSet[_HasConstant] =
-	    new int[] {FieldDef.ID, ParamDef.ID, PropertyDef.ID};
-	TableSet[_HasCustomAttribute] =
-	    new int[] {MethodDef.ID, FieldDef.ID, TypeRef.ID, TypeDef.ID,
-		       ParamDef.ID, InterfaceImpl.ID, MemberRef.ID, ModuleDef.ID,
-		       -1, PropertyDef.ID, EventDef.ID, -1, ModuleRef.ID,
-		       TypeSpec.ID, AssemblyDef.ID, AssemblyRef.ID,
-		       FileDef.ID, ExportedType.ID, ManifestResource.ID};
-	TableSet[_HasFieldMarshal] =
-	    new int[] {FieldDef.ID, ParamDef.ID};
-	TableSet[_HasDeclSecurity] =
-	    new int[] {TypeDef.ID, MethodDef.ID, AssemblyDef.ID};
-	TableSet[_MemberRefParent] =
-	    new int[] {-1, TypeRef.ID, ModuleRef.ID, MethodDef.ID, TypeSpec.ID};
-	TableSet[_HasSemantics] =
-	    new int[] {EventDef.ID, PropertyDef.ID};
-	TableSet[_MethodDefOrRef] =
-	    new int[] {MethodDef.ID, MemberRef.ID};
-	TableSet[_MemberForwarded] =
-	    new int[] {FieldDef.ID, MethodDef.ID};
-	TableSet[_Implementation] =
-	    new int[] {FileDef.ID, AssemblyRef.ID, ExportedType.ID};
-	TableSet[_CustomAttributeType] =
-	    new int[] {-1, -1, MethodDef.ID, MemberRef.ID, -1};
-	TableSet[_ResolutionScope] =
-	    new int[] {ModuleDef.ID, ModuleRef.ID, AssemblyRef.ID, TypeRef.ID};
-        TableSet[_TypeOrMethodDef] =
-                new int[]{TypeDef.ID, MethodDef.ID};
-    }
-
-    public static final int[] NoBits =
-            new int[]{2, 2, 5, 1, 2, 3, 1, 1, 1, 2, 3, 2, 1};
-
-    public static int getMask(int tableSetId) {
-	return (1 << NoBits[tableSetId]) - 1;
-    }
-
-    public static int getTableId(int tableSet, int index) {
-	return TableSet[tableSet][index & getMask(tableSet)];
-    }
-
-    public static int getTableIndex(int tableSet, int index) {
-	return index >> NoBits[tableSet];
-    }
-
-    public static int encodeIndex(int index, int tableSetId, int tableId) {
-	int[] tableSet = TableSet[tableSetId];
-	for (int i = 0; i < tableSet.length; i++) {
-	    if (tableSet[i] == tableId)
-		return (index << NoBits[tableSetId]) | i;
-	}
-	throw new RuntimeException("Cannot find table #" + tableId +
-				   " in table set #" + tableSetId);
-    }
-
-    //##########################################################################
-
-    private static final String [] tableName = {
-	"Module",              "TypeRef",          "TypeDef", "   FieldTrans",
-	"Field",               "MethodTrans",      "Method",      "",
-	"Param",               "InterfaceImpl",    "MemberRef",   "Constant",
-	"CustomAttribute",     "FieldMarshal",     "DeclSecurity","ClassLayout",
-	"FieldLayout",         "StandAloneSig",    "EventMap",    "",
-	"Event",               "PropertyMap",      "",            "Property",
-	"MethodSemantics",     "MethodImpl",       "ModuleRef",   "TypeSpec",
-	"ImplMap",             "FieldRVA",         "",            "",
-	"Assembly",            "AssemblyProcessor","AssemblyOS",  "AssemblyRef",
-	"AssemblyRefProcessor","AssemblyRefOS",    "File",        "ExportedType",
-            "ManifestResource", "NestedClass", "GenericParam", "MethodSpec",
-            "GenericParamConstraint", "", "", "",
-	"",                    "",                 "",            "",
-	"",                    "",                 "",            "",//0x30-0x37
-	"",                    "",                 "",            "",
-	"",                    "",                 "",            "" //0x37-0x3f
-    };
-
-    /** Creates a table with the given id and number of rows.
-     */
-    public static Table newTable(PEFile file, int id, int rows) {
-	Table table = null;
-	switch(id) {
-	case ModuleDef.ID:         table = new ModuleDef(file, rows); break;
- 	case TypeRef.ID:           table = new TypeRef(file, rows); break;
- 	case TypeDef.ID:           table = new TypeDef(file, rows); break;
-	case FieldTrans.ID:        table = new FieldTrans(file, rows); break;
-	case FieldDef.ID:          table = new FieldDef(file, rows); break;
-	case MethodTrans.ID:       table = new MethodTrans(file, rows); break;
-	case MethodDef.ID:         table = new MethodDef(file, rows); break;
-	case ParamDef.ID:          table = new ParamDef(file, rows); break;
-	case InterfaceImpl.ID:     table = new InterfaceImpl(file, rows); break;
-	case MemberRef.ID:         table = new MemberRef(file, rows); break;
-	case Constant.ID:          table = new Constant(file, rows); break;
-	case CustomAttribute.ID:   table = new CustomAttribute(file, rows); break;
-	case FieldMarshal.ID:      table = new FieldMarshal(file, rows); break;
-	case DeclSecurity.ID:      table = new DeclSecurity(file, rows); break;
-	case ClassLayout.ID:       table = new ClassLayout(file, rows); break;
-	case FieldLayout.ID:       table = new FieldLayout(file, rows); break;
-	case StandAloneSig.ID:     table = new StandAloneSig(file, rows); break;
-	case EventMap.ID:          table = new EventMap(file, rows); break;
-	case EventDef.ID:          table = new EventDef(file, rows); break;
-	case PropertyMap.ID:       table = new PropertyMap(file, rows); break;
-	case PropertyDef.ID:       table = new PropertyDef(file, rows); break;
-	case MethodSemantics.ID:   table = new MethodSemantics(file, rows); break;
-	case MethodImpl.ID:        table = new MethodImpl(file, rows); break;
-	case ModuleRef.ID:         table = new ModuleRef(file, rows); break;
-	case TypeSpec.ID:          table = new TypeSpec(file, rows); break;
-	case ImplMap.ID:           table = new ImplMap(file, rows); break;
-	case FieldRVA.ID:          table = new FieldRVA(file, rows); break;
-	case AssemblyDef.ID:       table = new AssemblyDef(file, rows); break;
-	case AssemblyProcessor.ID: table = new AssemblyProcessor(file, rows); break;
-	case AssemblyOS.ID:        table = new AssemblyOS(file, rows); break;
-	case AssemblyRef.ID:       table = new AssemblyRef(file, rows); break;
-	case AssemblyRefProcessor.ID:
-	    table = new AssemblyRefProcessor(file, rows); break;
-	case AssemblyRefOS.ID:     table = new AssemblyRefOS(file, rows); break;
-	case FileDef.ID:           table = new FileDef(file, rows); break;
-	case ExportedType.ID:      table = new ExportedType(file, rows); break;
-	case ManifestResource.ID:  table = new ManifestResource(file, rows); break;
-	case NestedClass.ID:       table = new NestedClass(file, rows); break;
-    case GenericParam.ID:
-        table = new GenericParam(file, rows);
-        break;
-    case MethodSpec.ID:
-        table = new MethodSpec(file, rows);
-        break;
-    case GenericParamConstraint.ID:
-        table = new GenericParamConstraint(file, rows);
-        break;
-	default:
-	    table = new Empty(id);
-	}
-// 	System.out.println("created table " + table.getName() + " with "
-// 			   + table.rows + " rows");
-	return table;
-    }
-
-
-    //##########################################################################
-    // public fields
-
-    /** Number of rows in the table. */
-    public final int rows;
-
-    /** Table ID as specified in Partition II. */
-    public final int id;
-
-    /** The file to which the table belongs. */
-    protected final PEFile file;
-
-    /** Memory mapped buffer wrapping the table. */
-    protected ByteBuffer buffer;
-
-    /**
-     * specified wheter a new memory-mapped byte buffer should be created
-     * for this table.
-     */
-    protected boolean newMapping = false;
-
-    /** Tells wheter the table is indexed by 2-byte (short) integer
-     *  or by 4-byte integer. */
-    public final boolean isShort;
-
-    private int rowSize = -1;
-
-    // the starting position of the table relative to the beginning of the file
-    private long start = -1;
-
-    // the number of the row who can be accessed via the fields of the table
-    private int currentRow = 0;
-
-    //##########################################################################
-
-    protected Table(PEFile file, int id, int rows) {
-	this.file = file;
-	this.id = id;
-	this.rows = rows;//file.readInt();
-	this.isShort = rows < (1 << 16);
-// 	assert ((1L << id) & VALID_TABLES_MASK) != 0
-// 	    : "Table does not have a vaid ID: " + byte2hex(id);
-    }
-
-    /**
-     * Additional table initialization.
-     * @return the starting position of the next table in the stream.
-     */
-    public final long init(long start) {
-	if (rows < 1)
-	    return start;
-	if (this.start == -1)
-	    this.start = start;
-	else throw new RuntimeException
-		 ("Cannot re-initialize table \'" + getTableName() + "\'");
-	rowSize = getRowSize();
-	int size = rows * rowSize();
-	buffer = this.newMapping ? file.mapBuffer(start, size)
-	    : file.getBuffer(start, size);
-	return start + size;
-    }
-
-
-    public final String getTableName() {
-	return 0 <= id && id < MAX_NUMBER ? tableName[id] : "<NoTable>";
-    }
-
-    /**
-     * @return the size of the row in bytes
-     */
-    public final int rowSize() {
-	return rowSize;
-    }
-
-    /**
-     * if the underlying buffer is memory-mapped, load its contents into memory
-     */
-    public void load() {
-	if (buffer instanceof MappedByteBuffer)
-	    ((MappedByteBuffer)buffer).load();
-    }
-
-    /***/
-    public final int readByte() {
-	return (buffer.get() + 0x100) & 0xff;
-    }
-
-    /***/
-    public final int readShort() {
-	return (buffer.getShort() + 0x10000) & 0xffff;
-    }
-
-    /***/
-    public final int readInt() {
-	return buffer.getInt();
-    }
-
-    /***/
-    public final int readStringIndex() {
-	return file.StringIsShort ? readShort() : readInt();
-    }
-
-    /***/
-    public final int readBlobIndex() {
-	return file.BlobIsShort ? readShort() : readInt();
-    }
-
-    /***/
-    public final int readGUIDIndex() {
-	return file.GUIDIsShort ? readShort() : readInt();
-    }
-
-    /***/
-    public final int readTableIndex(int tableId) {
-	return file.getTable(tableId).isShort ? readShort() : readInt();
-    }
-
-    /***/
-    public final int readTableSetIndex(int tableSetId) {
-	return file.indexSize[tableSetId] == 2 ? readShort() : readInt();
-    }
-
-    /** Read the specified row and populate the fields of the instance. */
-    public final void readRow(int row) {
-	seekRow(row);
-	int lastSeek = buffer.position();
-	populateFields();
-	int rowSizeRead = (int) (buffer.position() - lastSeek);
-	if (rowSizeRead != rowSize())
-	    throw new RuntimeException("Table ID=0x" + PEFile.byte2hex(id) +
-				       ": read row size = " + rowSizeRead +
-				       "; expected row size = " + rowSize());
-	currentRow = row;
-    }
-
-    /** Seeks in the file the position of the specified row. */
-    protected final void seekRow(int row) {
-	assert row > 0 && row <= rows
-	    : "Index " + row + " is not within the table with #rows = " + rows;
-	buffer.position((row - 1)* rowSize());
-    }
-
-    public final int currentRow() { return currentRow; }
-
-    public final void nextRow() { readRow(currentRow() + 1); }
-
-    //##########################################################################
-    // abstract members
-
-    /** Assigns values to the fields of the class. */
-    protected abstract void populateFields();
-
-    /** Returns the size of a row in bytes. */
-    protected abstract int getRowSize();
-
-    //##########################################################################
-    // a table with 0 rows
-
-    private static final class Empty extends Table {
-	public Empty(int id) {
-	    super(null, id, 0);
-	}
-	protected int getRowSize() { return 0; }
-	protected void populateFields() {
-	    throw new RuntimeException("Table 0x" + PEFile.byte2hex(id));
-	}
-    }
-
-    //##########################################################################
-    // table Module; ID=0x00; p115, 21.27
-
-    public static final class ModuleDef extends Table {
-	public static final int ID = 0x00;
-
-	/** 2-byte value; reserved - shall be 0. */
-	public int Generation;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #GUID; used to distinguish between
-	 *  two version of the same module. */
-	public int Mvid;
-
-	/** Index into #GUID; reserved - shall be 0. */
-	public int EncId;
-
-	/** Index into #GUID; reseved - shall be 0. */
-	public int EncBaseId;
-
-	public ModuleDef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Generation = readShort();
-	    Name = readStringIndex();
-	    Mvid = readGUIDIndex();
-	    EncId = readGUIDIndex();
-	    EncBaseId = readGUIDIndex();
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getStringIndexSize() + 3*file.getGUIDIndexSize();
-	}
-
-	public String getName() {
-	    return file.getString(Name);
-	}
-
-    } // class ModuleDef
-
-    //##########################################################################
-    // table TypeRef; ID=0x01; p125, 21.35
-
-    public static final class TypeRef extends Table {
-	public static final int ID = 0x1;
-
-	/** A ResolutionScope coded index. */
-	public int ResolutionScope;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #String. */
-	public int Namespace;
-
-	public TypeRef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    ResolutionScope = readTableSetIndex(_ResolutionScope);
-	    Name = readStringIndex();
-	    Namespace = readStringIndex();
-	}
-
-	protected int getRowSize() {
-	    return file.getTableSetIndexSize(_ResolutionScope) +
-		2 * file.getStringIndexSize();
-	}
-
-	public String getFullName() {
-	    String namespace = file.getString(Namespace);
-	    return namespace.length() == 0 ? file.getString(Name)
-		: namespace + "." + file.getString(Name);
-	}
-
-    } // class TypeRef
-
-    //##########################################################################
-    // table TypeDef; ID=0x02; p120, 21.34
-
-    public static final class TypeDef extends Table {
-	public static final int ID = 0x02;
-
-	/** 4-byte bitmask of type TypeAttributes (22.1.14). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #String. */
-	public int Namespace;
-
-	/** TypeDefOrRef coded index. */
-	public int Extends;
-
-	/** Index into Field table.
-	 */
-	public int FieldList;
-
-	/** Index into Method table. */
-	public int MethodList;
-
-
-	public TypeDef(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    this.newMapping = true;
-	}
-
-	public String getFullName() {
-	    String namespace = file.getString(Namespace);
-	    return namespace.length() == 0 ? file.getString(Name)
-		: namespace + "." + file.getString(Name);
-	}
-
-	protected void populateFields() {
-	    Flags = readInt();
-	    Name = readStringIndex();
-	    Namespace = readStringIndex();
-	    Extends = readTableSetIndex(_TypeDefOrRef);
-	    FieldList = readTableIndex(FieldDef.ID);
-	    MethodList = readTableIndex(MethodDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 4 + 2*file.getStringIndexSize() +
-		file.getTableSetIndexSize(_TypeDefOrRef) +
-		file.getTableIndexSize(FieldDef.ID) +
-		file.getTableIndexSize(MethodDef.ID);
-	}
-
-    } // class TypeDef
-
-    //##########################################################################
-    // Table FieldTrans; ID=0x03; undocumented
-
-    /**
-     * Undocumented table. Appears to be used for translating the Field entry
-     * in the TypeDef(0x02) table into the real entry in the Fields(0x06) table
-     */
-    public static final class FieldTrans extends Table {
-	public static final int ID = 0x03;
-
-	public int Field;
-
-	public FieldTrans(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    newMapping = true;
-	}
-
-	protected void populateFields() {
-	    Field = readTableIndex(FieldDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(FieldDef.ID);
-	}
-
-    }
-
-    //##########################################################################
-    // table Field; ID=0x04; p102, 21.15
-
-    public static final class FieldDef extends Table {
-	public static final int ID = 0x04;
-
-	/** 2-byte bitmask of type FieldAttributes (22.1.5). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #Blob. */
-	public int Signature;
-
-	public FieldDef(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    newMapping = true;
-	}
-
-	protected void populateFields() {
-	    Flags = readShort();
-	    Name = readStringIndex();
-	    Signature = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getStringIndexSize() + file.getBlobIndexSize();
-	}
-
-	public String getName() { return file.getString(Name); }
-
-	public Sig getSignature() { return file.getSignature(Signature); }
-
-    } //class FieldDef
-
-    //##########################################################################
-    // Table MethodTrans; ID=0x05; undocumented
-
-    /**
-     * Undocumented table. Appears to be used for translating the Method entry
-     * in the TypeDef(0x02) table into the real entry in the Methods(0x06) table
-     */
-    public static final class MethodTrans extends Table {
-	public static final int ID = 0x05;
-
-	public int Method;
-
-	public MethodTrans(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    newMapping = true;
-	}
-
-	protected void populateFields() {
-	    Method = readTableIndex(FieldDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(MethodDef.ID);
-	}
-
-    }
-
-    //##########################################################################
-    // table MethodDef; ID=0x06; p110, 21.24
-
-    public static final class MethodDef extends Table {
-	public static final int ID = 0x06;
-
-	/** 4-byte constant. */
-	public int RVA;
-
-	/** 2-byte bitmask of type MethodImplAttributes (22.1.10). */
-	public int ImplFlags;
-
-	/** 2-byte bitmask of type MethodAttributes (22.1.9). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #Blob. */
-	public int Signature;
-
-	/** Index into Param Table. */
-	public int ParamList;
-
-	public MethodDef(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    newMapping = true;
-	}
-
-	protected void populateFields() {
-	    RVA = readInt();
-	    ImplFlags = readShort();
-	    Flags = readShort();
-	    Name = readStringIndex();
-	    Signature = readBlobIndex();
-	    ParamList = readTableIndex(ParamDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 8 + file.getStringIndexSize() + file.getBlobIndexSize() +
-		file.getTableIndexSize(ParamDef.ID);
-	}
-
-	public String getName() { return file.getString(Name); }
-
-	public Sig getSignature() { return file.getSignature(Signature); }
-    } // class Method
-
-    //##########################################################################
-    // table Param; ID=0x08; p116, 21.30
-
-    public static final class ParamDef extends Table {
-	public static final int ID = 0x08;
-
-	/** 2-byte bitmask of type ParamAttributes (22.1.12). */
-	public int Flags;
-
-	/** 2-byte constant. */
-	public int Sequence;
-
-	/** Index into #String. */
-	public int Name;
-
-	public ParamDef(PEFile file, int rows) {
-	    super(file, ID, rows);
-	    newMapping = true;
-	}
-
-	protected void populateFields() {
-	    Flags = readShort();
-	    Sequence = readShort();
-	    Name = readStringIndex();
-	}
-
-	protected int getRowSize() { return 4 + file.getStringIndexSize(); }
-
-	public String getName() { return file.getString(Name); }
-
-    } // class Param
-
-    //##########################################################################
-    // table InterfaceImpl, ID=0x09; p107, 21.21
-
-    public static final class InterfaceImpl extends Table {
-	public static final int ID = 0x09;
-
-	/** Index into TypeDef table. */
-	public int Class;
-
-	/** Index into TypeDefOrRef table set. */
-	public int Interface;
-
-	public InterfaceImpl(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Class = readTableIndex(TypeDef.ID);
-	    Interface = readTableSetIndex(_TypeDefOrRef);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(TypeDef.ID) +
-		file.getTableSetIndexSize(_TypeDefOrRef);
-	}
-
-	/** finds the index of the first entry
-	 * @param targetIndex - index in the TypeDef table - the type to look for
-	 * @return the index of the first interface for the given type;
-	 *         0 if the type doesn't implement any interfaces
-	 */
-
-	// binary search implementation
-// 	public int findType(int targetIndex) {
-// 	    int l = 1, h = rows;
-// 	    int classIndex;
-// 	    while (l <= h) {
-// 		int mid = (l + h) / 2;
-// 		seekRow(mid);
-// 		classIndex = readTableIndex(TypeDef.ID);
-// 		if (targetIndex <= classIndex) h = mid - 1;
-// 		else l = mid + 1;
-// 	    }
-// 	    return (targetIndex == classIndex) ? h : 0;
-// 	}
-
-	//linear search implementation
-	public int findType(int targetIndex) {
-	    for (int i = 1; i <= rows; i++) {
-		seekRow(i);
-		if (targetIndex == readTableIndex(TypeDef.ID))
-		    return i;
-	    }
-	    return 0;
-	}
-
-    } // class InterfaceImpl
-
-    //##########################################################################
-    // table MemberRef; ID=0x0a; p109, 21.23
-
-    public static final class MemberRef extends Table {
-	public static final int ID = 0x0a;
-
-	/** Index into MemberRefParent table set. */
-	public int Class;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #Blob. */
-	public int Signature;
-
-	public MemberRef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Class = readTableSetIndex(_MemberRefParent);
-	    Name = readStringIndex();
-	    Signature = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return file.getTableSetIndexSize(_MemberRefParent) +
-		file.getStringIndexSize() + file.getBlobIndexSize();
-	}
-
-	public String getName() {
-	    return file.getString(Name);
-	}
-
-	public Sig getSignature() {
-	    return file.getSignature(Signature);
-	}
-
-    } // class MemberRef
-
-    //##########################################################################
-    // table Constant; ID=0x0b; p95, 21.9
-
-    public static final class Constant extends Table {
-	public static final int ID = 0x0b;
-
-	/** 1-byte constant followed by 1-byte padding 0 (see 22.1.15). */
-	public int Type;
-
-	/** Index into HasConst table set. */
-	public int Parent;
-
-	/** Index into #Blob. */
-	public int Value;
-
-	public Constant(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Type = readShort();
-	    Parent = readTableSetIndex(_HasConstant);
-	    Value = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getTableSetIndexSize(_HasConstant) +
-		file.getBlobIndexSize();
-	}
-
-	public Object getValue() {
-	    if (Type == Signature.ELEMENT_TYPE_CLASS)
-		return null;
-	    return file.Blob.getConstant(Type, Value);
-	}
-
-
-    } // class Constant
-
-    //##########################################################################
-    // table CustomAttribute; ID=0x0c; p95, 21.10
-
-    public static final class CustomAttribute extends Table {
-	public static final int ID = 0x0c;
-
-	/** Index into any metadata table, except the CustomAttribute itself;
-	 *  more precisely - index into HasCustomAttribute table set.
-	 */
-	public int Parent;
-
-	/** Index into the CustomAttributeType table set. */
-	public int Type;
-
-	/** Index into #Blob. */
-	public int Value;
-
-	public CustomAttribute(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Parent = readTableSetIndex(_HasCustomAttribute);
-	    Type = readTableSetIndex(_CustomAttributeType);
-	    Value = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return file.getTableSetIndexSize(_HasCustomAttribute) +
-		file.getTableSetIndexSize(_CustomAttributeType) +
-		file.getBlobIndexSize();
-	}
-
-	public byte[] getValue() {
-	    return Value == 0 ? null : file.getBlob(Value);
-	}
-    } // class CustomAttribute
-
-    //##########################################################################
-    // table FieldMarshal; ID=0x0d; p105, 21.17
-
-    public static final class FieldMarshal extends Table {
-	public static final int ID = 0x0d;
-
-	/** Index into HasFieldMarshal table set. */
-	public int Parent;
-
-	/** Index into #Blob. */
-	public int NativeType;
-
-	public FieldMarshal(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Parent = readTableSetIndex(_HasFieldMarshal);
-	    NativeType = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return file.getTableSetIndexSize(_HasFieldMarshal) +
-		file.getBlobIndexSize();
-	}
-
-    } // class FieldMarshal
-
-    //##########################################################################
-    // table DeclSecurity; ID=0x0e; p97, 21.11
-
-    public static final class DeclSecurity extends Table {
-	public static final int ID = 0x0e;
-
-	/** 2-byte value. */
-	public int Action;
-
-	/** Index into HasDeclSecurity table set. */
-	public int Parent;
-
-	/** Index into #Blob. */
-	public int PermissionSet;
-
-	public DeclSecurity(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Action = readShort();
-	    Parent = readTableSetIndex(_HasDeclSecurity);
-	    PermissionSet = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getTableSetIndexSize(_HasDeclSecurity) +
-		file.getBlobIndexSize();
-	}
-
-    } // class DeclSecurity
-
-    //##########################################################################
-    // table ClassLayout; ID=0x0f, p92, 21.8
-
-    public static final class ClassLayout extends Table {
-	public static final int ID = 0x0f;
-
-	/** 2-byte constant. */
-	public int PackingSize;
-
-	/** 4-byte constant. */
-	public int ClassSize;
-
-	/** Index into TypeDef table. */
-	public int Parent;
-
-	public ClassLayout(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    PackingSize = readShort();
-	    ClassSize = readInt();
-	    Parent = readTableIndex(TypeDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 6 + file.getTableIndexSize(TypeDef.ID);
-	}
-
-    } // class ClassLayout
-
-    //##########################################################################
-    // table FieldLayout; ID=0x10; p104, 21.16
-
-    public static final class FieldLayout extends Table {
-	public static final int ID = 0x10;
-
-	/** 4-byte constant. */
-	public int Offset;
-
-	/** Index into the Field table. */
-	public int Field;
-
-	public FieldLayout(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Offset = readInt();
-	    Field = readTableIndex(FieldDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 4 + file.getTableIndexSize(FieldDef.ID);
-	}
-
-    } // class FieldLayout
-
-    //##########################################################################
-    // table StandAloneSig; ID=0x11; p119, 21.33
-
-    public static final class StandAloneSig extends Table {
-	public static final int ID = 0x11;
-
-	/** Index into #Blob. */
-	public int Signature;
-
-	public StandAloneSig(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Signature = readBlobIndex();
-	}
-
-	protected int getRowSize() { return file.getBlobIndexSize(); }
-
-    } // class StandAloneSig
-
-    //##########################################################################
-    // table EventMap; ID=0x12; p99, 21.12
-
-    public static final class EventMap extends Table {
-	public static final int ID = 0x12;
-
-	/** Index into the TypeDef table. */
-	public int Parent;
-
-	/** Index into the Event table. */
-	public int EventList;
-
-	public EventMap(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Parent = readTableIndex(TypeDef.ID);
-	    EventList = readTableIndex(EventDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(TypeDef.ID) +
-		file.getTableIndexSize(EventDef.ID);
-	}
-
-    } // class EventMap
-
-    //##########################################################################
-    // table Event; ID=0x14; p99, 21.13
-
-    public static final class EventDef extends Table {
-	public static final int ID = 0x14;
-
-	/** 2-byte bitmask of type EventAttribute (22.1.4). */
-	public int EventFlags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into TypeDefOrRef table set. [This corresponds to the Type
-	 *  of the event; it is not the Type that owns the event]
-	 */
-	public int EventType;
-
-	public EventDef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    EventFlags = readShort();
-	    Name = readStringIndex();
-	    EventType = readTableSetIndex(_TypeDefOrRef);
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getStringIndexSize() +
-		file.getTableSetIndexSize(_TypeDefOrRef);
-	}
-
-	public String getName() { return file.getString(Name); }
-
-    } // class EventDef
-
-    //##########################################################################
-    // table PropertyMap; ID=0x15; p119, 21.32
-
-    public static final class PropertyMap extends Table {
-	public static final int ID = 0x15;
-
-	/** Index into the TypeDef table. */
-	public int Parent;
-
-	/** Index into the Property table. */
-	public int PropertyList;
-
-	public PropertyMap(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Parent = readTableIndex(TypeDef.ID);
-	    PropertyList = readTableIndex(PropertyDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(TypeDef.ID) +
-		file.getTableIndexSize(PropertyDef.ID);
-	}
-
-    } // class PropertyMap
-
-    //##########################################################################
-    // table Property; ID=0x17; p117, 21.31
-
-    public static final class PropertyDef extends Table {
-	public static final int ID = 0x17;
-
-	/** 2-byte bitmask of type PropertyAttributes (22.1.13). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #Blob. (Indexes the signature in the #Blob) */
-	public int Type;
-
-	public PropertyDef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Flags = readShort();
-	    Name = readStringIndex();
-	    Type = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getStringIndexSize() +
-		file.getBlobIndexSize();
-	}
-
-	public String getName() { return file.getString(Name); }
-
-	public Sig getSignature() { return file.getSignature(Type); }
-
-    } // class PropertyDef
-
-    //##########################################################################
-    // table MethodSemantics; ID=0x18; p114, 21.26
-
-    public static final class MethodSemantics extends Table {
-	public static final int ID = 0x18;
-
-	/** 2-byte bitmaks of type MethodSemanticsAttribute (22.1.11). */
-	public int Semantics;
-
-	/** Index into the Method table. */
-	public int Method;
-
-	/** Index into Event or Property table (HasSemantics table set). */
-	public int Association;
-
-	public MethodSemantics(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Semantics = readShort();
-	    Method = readTableIndex(MethodDef.ID);
-	    Association = readTableSetIndex(_HasSemantics);
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getTableIndexSize(MethodDef.ID) +
-		file.getTableSetIndexSize(_HasSemantics);
-	}
-
-        public boolean isGetter()   { return (Semantics & Getter)   != 0; }
-        public boolean isSetter()   { return (Semantics & Setter)   != 0; }
-        public boolean isOther()    { return (Semantics & Other)    != 0; }
-        public boolean isAddOn()    { return (Semantics & AddOn)    != 0; }
-        public boolean isRemoveOn() { return (Semantics & RemoveOn) != 0; }
-        public boolean isFire()     { return (Semantics & Fire)     != 0; }
-
-        private static final short Setter   = (short)0x0001;
-        private static final short Getter   = (short)0x0002;
-        private static final short Other    = (short)0x0004;
-        private static final short AddOn    = (short)0x0008;
-        private static final short RemoveOn = (short)0x0010;
-        private static final short Fire     = (short)0x0020;
-
-    } // class MethodSemantics
-
-
-    //##########################################################################
-    // table MethodImpl; ID=0x19; p113, 21.25
-
-    public static final class MethodImpl extends Table {
-	public static final int ID = 0x19;
-
-	/** Index into the TypeDef table. */
-	public int Class;
-
-	/** Index into MethodDefOrRef table set. */
-	public int MethodBody;
-
-	/** Index into MethodDefOrRef table set. */
-	public int MethodDeclaration;
-
-	public MethodImpl(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Class = readTableIndex(TypeDef.ID);
-	    MethodBody = readTableSetIndex(_MethodDefOrRef);
-	    MethodDeclaration = readTableSetIndex(_MethodDefOrRef);
-	}
-
-	protected int getRowSize() {
-	    return file.getTableIndexSize(TypeDef.ID) +
-		2 * file.getTableSetIndexSize(_MethodDefOrRef);
-	}
-
-    } // class MethodImpl
-
-    //##########################################################################
-    // table ModuleRef; ID=0x1a; p116, 21.28
-
-    public static final class ModuleRef extends Table {
-	public static final int ID = 0x1a;
-
-	/** Index into #String. */
-	public int Name;
-
-	public ModuleRef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Name = readStringIndex();
-	}
-
-	protected int getRowSize() { return file.getStringIndexSize(); }
-
-	public String getName() { return file.getString(Name); }
-
-    } // class ModuleRef
-
-    //##########################################################################
-    // table TypeSpec; ID=0x1b; p126, 21.36
-
-    public static final class TypeSpec extends Table {
-	public static final int ID = 0x1b;
-
-	/** Index into #Blob, where the blob is formatted
-	 *  as specified in 22.2.15
-	 */
-	public int Signature;
-
-	public TypeSpec(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Signature = readBlobIndex();
-	}
-
-	protected int getRowSize() { return file.getBlobIndexSize(); }
-
-	public Sig getSignature() { return file.getSignature(Signature); }
-    } // class TypeSpec
-
-    //##########################################################################
-    // table ImplMap; ID=0x1c; p107, 21.20
-
-    public static final class ImplMap extends Table {
-	public static final int ID = 0x1c;
-
-	/** 2-byte bitmask of type PInvokeAttributes (22.1.7). */
-	public int MappingFlags;
-
-	/** Index into MemberForwarded table set. */
-	public int MemberForwarded;
-
-	/** Index into #String. */
-	public int ImportName;
-
-	/** Index into the ModuleRef table. */
-	public int ImportScope;
-
-	public ImplMap(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    MappingFlags = readShort();
-	    MemberForwarded = readTableSetIndex(_MemberForwarded);
-	    ImportName = readStringIndex();
-	    ImportScope = readTableIndex(ModuleRef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 2 + file.getTableSetIndexSize(_MemberForwarded) +
-		file.getStringIndexSize() +
-		file.getTableIndexSize(ModuleRef.ID);
-	}
-
-    } // class ImplMap
-
-    //##########################################################################
-    // table FieldRVA; ID=0x1d; p106, 21.18
-
-    public static final class FieldRVA extends Table {
-	public static final int ID = 0x1d;
-
-	/** 4-byte constant. */
-	public int RVA;
-
-	/** Index into the Field table. */
-	public int Field;
-
-	public FieldRVA(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    RVA = readInt();
-	    Field = readTableIndex(Table.FieldDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 4 + file.getTableIndexSize(FieldDef.ID);
-	}
-
-    }
-
-    //##########################################################################
-    // table Assembly; ID=0x20; p90, 21.2
-
-    public static final class AssemblyDef extends Table {
-	public static final int ID = 0x20;
-
-	/** 4-byte constatnt of type AssemblyHashAlgorithm, clause 22.1.1 */
-	public int HashAlgId;
-
-	/** 2-byte constant */
-	public int MajorVersion;
-
-	/** 2-byte constant */
-	public int MinorVersion;
-
-	/** 2-byte constant */
-	public int BuildNumber;
-
-	/** 2-byte constant */
-	public int RevisionNumber;
-
-	/** 4-byte constant */
-	public int Flags;
-
-	/** index into #Blob */
-	public int PublicKey;
-
-	/** index into #String */
-	public int Name;
-
-	/** index into #String */
-	public int Culture;
-
-	public AssemblyDef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    HashAlgId = readInt();
-	    MajorVersion = readShort();
-	    MinorVersion = readShort();
-	    BuildNumber = readShort();
-	    RevisionNumber = readShort();
-	    Flags = readInt();
-	    PublicKey = readBlobIndex();
-	    Name = readStringIndex();
-	    Culture = readStringIndex();
-	}
-
-	protected int getRowSize() {
-	    return 16 + file.getBlobIndexSize() + 2*file.getStringIndexSize();
-	}
-
-    } // class AssemblyDef
-
-    //##########################################################################
-    // table AssemblyProcessor; ID=0x21; p91, 21.4
-
-    public static final class AssemblyProcessor extends Table {
-	public static final int ID = 0x21;
-
-	/** 4-byte constant. */
-	public int Processor;
-
-	public AssemblyProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Processor = readInt();
-	}
-
-	protected int getRowSize() { return 4; }
-
-    }
-
-    //##########################################################################
-    // table AssemblyOS; ID = 0x22; p90, 21.3
-
-    public static final class AssemblyOS extends Table {
-	public static final int ID = 0x22;
-
-	/** 4-byte constant. */
-	public int OSPlatformID;
-
-	/** 4-byte constant. */
-	public int OSMajorVersion;
-
-	/** 4-byte constant. */
-	public int OSMinorVersion;
-
-	public AssemblyOS(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    OSPlatformID = readInt();
-	    OSMajorVersion = readInt();
-	    OSMinorVersion = readInt();
-	}
-
-	protected int getRowSize() { return 12; }
-
-    }
-
-    //##########################################################################
-    // table AssemblyRef; ID = 0x23; pp91, 21.5
-
-    public static final class AssemblyRef extends Table {
-	public static final int ID = 0x23;
-
-	/** 2-byte constant. */
-	public int MajorVersion;
-
-	/** 2-byte constant. */
-	public int MinorVersion;
-
-	/** 2-byte constant. */
-	public int BuildNumber;
-
-	/** 2-byte constant. */
-	public int RevisionNumber;
-
-	/** 4-byte bitmask of type AssemblyFlags (22.1.2). */
-	public int Flags;
-
-	/** index into #Blob. */
-	public int PublicKeyOrToken;
-
-	/** index into #String. */
-	public int Name;
-
-	/** index into #String. */
-	public int Culture;
-
-	/** index into #Blob. */
-	public int HashValue;
-
-	public AssemblyRef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    MajorVersion = readShort();
-	    MinorVersion = readShort();
-	    BuildNumber = readShort();
-	    RevisionNumber = readShort();
-	    Flags = readInt();
-	    PublicKeyOrToken = readBlobIndex();
-	    Name = readStringIndex();
-	    Culture = readStringIndex();
-	    HashValue = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 12 + 2*file.getBlobIndexSize() + 2*file.getStringIndexSize();
-	}
-
-	public String getName() { return file.getString(Name); }
-    }
-
-    //##########################################################################
-    // table AssemblyRefProcessor; ID=0x24; p92, 21.7
-
-    public static final class AssemblyRefProcessor extends Table {
-	public static final int ID = 0x24;
-
-	/** 4-byte constant. */
-	public int Processor;
-
-	/** Index into the AssemblyRef table. */
-	public int AssemblyRef;
-
-	public AssemblyRefProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Processor = readInt();
-	    AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 4 + file.getTableIndexSize(Table.AssemblyRef.ID);
-	}
-
-    } // class AssemblyRefProcessor
-
-    //##########################################################################
-    // table AssemblyRefOS; ID=0x25; p92, 21.6
-
-    public static final class AssemblyRefOS extends Table {
-	public static final int ID = 0x25;
-
-	/** 4-byte constant. */
-	public int OSPlatformId;
-
-	/** 4-byte constant. */
-	public int OSMajorVersion;
-
-	/** 4-byte constant. */
-	public int OSMinorVersion;
-
-	/** Index into the AssemblyRef table. */
-	public int AssemblyRef;
-
-	public AssemblyRefOS(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    OSPlatformId = readInt();
-	    OSMajorVersion = readInt();
-	    OSMinorVersion = readInt();
-	    AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 12 + file.getTableIndexSize(Table.AssemblyRef.ID);
-	}
-
-    } // class AssemblyRefOS
-
-    //##########################################################################
-    // table File; ID=0x26; p106, 21.19
-
-    public static final class FileDef extends Table {
-	public static final int ID = 0x26;
-
-	/** 4-byte bitmask of type FileAttributes (22.1.6). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into #Blob. */
-	public int HashValue;
-
-	public FileDef(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Flags = readInt();
-	    Name = readStringIndex();
-	    HashValue = readBlobIndex();
-	}
-
-	protected int getRowSize() {
-	    return 4 + file.getStringIndexSize() + file.getBlobIndexSize();
-	}
-
-	public String getName() {
-	    return file.getString(Name);
-	}
-
-    } // class FileDef
-
-    //##########################################################################
-    // table ExportedType; ID=0x27; p100, 21.14
-
-    public static final class ExportedType extends Table {
-	public static final int ID = 0x27;
-
-	/** 4-byte bitmask of type TypeAttribute (22.1.6). */
-	public int Flags;
-
-	/** 4-byte index into a TypeDef table of
-	 *  another module in this assembly.
-	 */
-	public int TypeDefId;
-
-	/** Index into #String. */
-	public int TypeName;
-
-	/** Index into #Stream. */
-	public int TypeNamespace;
-
-	/** Index into one of two tables as follows:
-	 *   - 'File' table, where that entry says which module
-	 *     in the current assembly holds the TypeDef
-	 *   - 'ExportedType' table, where that entry is
-	 *     the enclosing Type of the current nested Type
-	 */
-	public int Implementation;
-
-	public ExportedType(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Flags = readInt();
-	    TypeDefId = readInt();
-	    TypeName = readStringIndex();
-	    TypeNamespace = readStringIndex();
-	    Implementation = readTableSetIndex(_Implementation);
-	}
-
-	protected int getRowSize() {
-	    return 8 + 2*file.getStringIndexSize() +
-		file.getTableSetIndexSize(_Implementation);
-	}
-
-	public String getFullName() {
-	    String namespace = file.getString(TypeNamespace);
-	    return namespace.length() == 0 ? file.getString(TypeName)
-		: namespace + "." + file.getString(TypeName);
-	}
-
-    } // class ExportedType
-
-    //##########################################################################
-    // table ManifestResource; ID=0x28; p108, 21.22
-
-    public static final class ManifestResource extends Table {
-	public static final int ID = 0x28;
-
-	/** 4-byte constant. */
-	public int Offset;
-
-	/** 4-byte bitmask of type ManifestResourceAttributes (22.1.8). */
-	public int Flags;
-
-	/** Index into #String. */
-	public int Name;
-
-	/** Index into the Implementation table set. */
-	public int Implementation;
-
-	public ManifestResource(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    Offset = readInt();
-	    Flags = readInt();
-	    Name = readStringIndex();
-	    Implementation = readTableSetIndex(_Implementation);
-	}
-
-	protected int getRowSize() {
-	    return  8 + file.getStringIndexSize() +
-		file.getTableSetIndexSize(_Implementation);
-	}
-
-    } // class ManifestResource
-
-    //##########################################################################
-    // table NestedClass; ID=0x29; p116, 21.29
-
-    public static final class NestedClass extends Table {
-	public static final int ID = 0x29;
-
-	/** Index into the TypeDef table. */
-	public int NestedClass;
-
-	/** Index into the TypeDef table. */
-	public int EnclosingClass;
-
-	public NestedClass(PEFile file, int rows) { super(file, ID, rows); }
-
-	protected void populateFields() {
-	    NestedClass = readTableIndex(TypeDef.ID);
-	    EnclosingClass = readTableIndex(TypeDef.ID);
-	}
-
-	protected int getRowSize() {
-	    return 2 * file.getTableIndexSize(TypeDef.ID);
-	}
-
-    } // class NestedClass
-
-    //##########################################################################
-    // table GenericParam; ID=0x2a; p137, 22.20
-
-    public static final class GenericParam extends Table {
-        public static final int ID = 0x2a;
-
-        public int Number;
-        public int Flags;
-        public int Owner;  // a TypeOrMethodDef (Sec 24.2.6) coded index
-        public int Name; // a non-null index into the String heap
-
-        private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForMethodDefIdx =
-                new java.util.HashMap();
-        private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForTypeDefIdx  =
-                new java.util.HashMap();
-
-        private void addToMap(int key, int value, java.util.Map IdxesForIdx) {
-            java.util.Set /*<Integer>*/ bucket = (java.util.Set)IdxesForIdx.get(Integer.valueOf(key));
-            if(bucket == null) {
-                bucket = new java.util.HashSet();
-                IdxesForIdx.put(Integer.valueOf(key), bucket);
-            }
-            bucket.add(Integer.valueOf(value));
-        }
-
-        /** Indexes of rows in the GenericParam table representing type parameters defined by the type given by
-         * its row index TypeDefIdx (in the TypeDef table).
-         * No need to position the current record before invoking this method.  */
-        public int[] getTVarIdxes(int TypeDefIdx) {
-            if(!mapsPopulated) {
-                initMaps();
-            }
-            java.util.Set bucket = (java.util.Set)GenericParamIdxesForTypeDefIdx.get(Integer.valueOf(TypeDefIdx));
-            if(bucket == null) {
-                bucket = java.util.Collections.EMPTY_SET;
-            }
-            int[] res = new int[bucket.size()];
-            java.util.Iterator /*<Integer>*/ it = bucket.iterator();
-            for(int i = 0; i < bucket.size(); i++) {
-                res[i] = ((Integer)it.next()).intValue();
-            }
-            return res;
-        }
-
-        /** Indexes of rows in the GenericParam table representing type parameters defined by the method given by
-         * its row index MethodDefIdx (in the MethodDef table)
-         * No need to position the current record before invoking this method.  */
-        public int[] getMVarIdxes(int MethodDefIdx) {
-            if(!mapsPopulated) {
-                initMaps();
-            }
-            java.util.Set bucket = (java.util.Set)GenericParamIdxesForMethodDefIdx.get(Integer.valueOf(MethodDefIdx));
-            if(bucket == null) {
-                bucket = java.util.Collections.EMPTY_SET;
-            }
-            int[] res = new int[bucket.size()];
-            java.util.Iterator /*<Integer>*/ it = bucket.iterator();
-            for(int i = 0; i < bucket.size(); i++) {
-                res[i] = ((Integer)it.next()).intValue();
-            }
-            return res;
-        }
-
-        private boolean mapsPopulated = false;
-
-        private void initMaps() {
-            mapsPopulated = true;
-            for (int currentParamRow = 1; currentParamRow <= rows; currentParamRow++) {
-                int currentOwner = file.GenericParam(currentParamRow).Owner;
-                int targetTableId = Table.getTableId(Table._TypeOrMethodDef, currentOwner);
-                int targetRow = currentOwner >> Table.NoBits[Table._TypeOrMethodDef];
-                if(targetTableId == TypeDef.ID){
-                    addToMap(targetRow, currentParamRow, GenericParamIdxesForTypeDefIdx);
-                } else if(targetTableId == MethodDef.ID) {
-                    addToMap(targetRow, currentParamRow, GenericParamIdxesForMethodDefIdx);
-                } else {
-                    throw new RuntimeException();
-                }
-            }
-        }
-
-        public GenericParam(PEFile file, int rows) {
-            super(file, ID, rows);
-            this.newMapping = true;
-        }
-
-        protected void populateFields() {
-            Number = readShort();
-            Flags = readShort();
-            Owner = readTableSetIndex(_TypeOrMethodDef);
-            Name = readStringIndex();
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean isInvariant() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x0003) == 0;
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean isCovariant() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x0003) == 1;
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean isContravariant() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x0003) == 2;
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean isReferenceType() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x001C) == 4;
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean isValueType() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x001C) == 8;
-        }
-
-        /** This method assumes populateFields() has been just called to set Flags for the current record */
-        public boolean hasDefaultConstructor() {
-            /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
-            return (Flags & 0x001C) == 0x0010;
-        }
-
-        protected int getRowSize() {
-            return 2 + 2 + file.getTableSetIndexSize(_TypeOrMethodDef) + file.getStringIndexSize();
-            /* Columns:
-                 Number (2 bytes),
-                 Flags (2 bytes),
-                 Owner (coded token of type TypeOrMethodDef),
-                 Name (offset in the #Strings stream).
-            */
-        }
-
-        public String getName() {
-            return file.getString(Name);
-        }
-
-    } // class GenericParam
-
-
-    //##########################################################################
-    // table GenericParamConstraint; ID=0x2c; p139, 22.20
-
-    public static final class GenericParamConstraint extends Table {
-        public static final int ID = 0x2c;
-
-        public int Owner; // an index into the GenericParam table
-        public int Constraint; // a TypeDefOrRef (Sec 24.2.6) coded index
-
-        public GenericParamConstraint(PEFile file, int rows) {
-            super(file, ID, rows);
-            this.newMapping = true;
-        }
-
-        protected void populateFields() {
-            Owner = readTableIndex(GenericParam.ID);
-            Constraint = readTableSetIndex(_TypeDefOrRef);
-        }
-
-        protected int getRowSize() {
-            return file.getTableIndexSize(GenericParam.ID) + file.getTableSetIndexSize(_TypeDefOrRef);
-            /* Columns:
-                 Owner (RID in the GenericParam table),
-                 Constraint (coded token of type TypeDefOrRef).
-            */
-        }
-
-        private boolean mapPopulated = false;
-
-        /** Indexes of rows (in the TypeDef, TypeRef, or TypeSpec tables) denoting the base class (if any)
-         * and interfaces (if any) that the generic parameter (of TVar or MVar kind) should support,  where
-         * that generic parameter is represented by its index into the GenericParam table. */
-        public int[] getTypeDefOrRefIdxes(int genParamIdx) {
-            if(!mapPopulated) {
-                initMap();
-            }
-            java.util.Set bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(genParamIdx));
-            if(bucket == null) {
-                bucket = java.util.Collections.EMPTY_SET;
-            }
-            int[] res = new int[bucket.size()];
-            java.util.Iterator /*<Integer>*/ it = bucket.iterator();
-            for(int i = 0; i < bucket.size(); i++) {
-                res[i] = ((Integer)it.next()).intValue();
-            }
-            return res;
-        }
-
-
-        private void initMap() {
-            mapPopulated = true;
-            for (int currentConstraintRow = 1; currentConstraintRow <= rows; currentConstraintRow++) {
-                int targetGenericParam = file.GenericParamConstraint(currentConstraintRow).Owner;
-                int value = file.GenericParamConstraint.Constraint;
-                addToMap(targetGenericParam, value);
-            }
-        }
-
-        private java.util.Map /*<Integer, java.util.Set<Integer>>*/ TypeDefOrRefIdxesForGenParamIdx  =
-                new java.util.HashMap();
-
-        private void addToMap(int key, int value) {
-            java.util.Set /*<Integer>*/ bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(key));
-            if(bucket == null) {
-                bucket = new java.util.HashSet();
-                TypeDefOrRefIdxesForGenParamIdx.put(Integer.valueOf(key), bucket);
-            }
-            bucket.add(Integer.valueOf(value));
-        }
-
-    } // class GenericParamConstraint
-
-    //##########################################################################
-    // table MethodSpec; ID=0x2b; p149, in Sec. 22.29 of Partition II
-
-    public static final class MethodSpec extends Table {
-        public static final int ID = 0x2b;
-
-        /* an index into the MethodDef or MemberRef table, specifying which generic method this row is an instantiation of.
-           A MethodDefOrRef (Sec. 24.2.6) coded index  */
-        public int Method;
-
-        /* an index into the Blob heap (Sec. 23.2.15), holding the signature of this instantiation */
-        public int Instantiation;
-
-        public MethodSpec(PEFile file, int rows) {
-            super(file, ID, rows);
-            this.newMapping = true;
-        }
-
-        protected void populateFields() {
-            Method = readTableSetIndex(_MethodDefOrRef);
-            Instantiation = readBlobIndex();
-        }
-
-        protected int getRowSize() {
-            return file.getTableSetIndexSize(_MethodDefOrRef) + file.getBlobIndexSize();
-        }
-
-
-    } // class MethodSpec
-    //##########################################################################
-
-}  // class Table
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
similarity index 100%
rename from src/partest/scala/tools/partest/ASMConverters.scala
rename to src/partest-extras/scala/tools/partest/ASMConverters.scala
diff --git a/src/partest-extras/scala/tools/partest/AsmNode.scala b/src/partest-extras/scala/tools/partest/AsmNode.scala
new file mode 100644
index 0000000..e6a9149
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/AsmNode.scala
@@ -0,0 +1,61 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm._
+import asm.tree._
+import java.lang.reflect.Modifier
+
+sealed trait AsmNode[+T] {
+  def node: T
+  def access: Int
+  def desc: String
+  def name: String
+  def signature: String
+  def attrs: List[Attribute]
+  def visibleAnnotations: List[AnnotationNode]
+  def invisibleAnnotations: List[AnnotationNode]
+  def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+  def erasedCharacteristics = f"$name%15s $desc%-30s$accessString"
+
+  private def accessString     = if (access == 0) "" else " " + Modifier.toString(access)
+  private def sigString        = if (signature == null) "" else " " + signature
+  override def toString        = characteristics
+}
+
+object AsmNode {
+  type AsmMethod = AsmNode[MethodNode]
+  type AsmField = AsmNode[FieldNode]
+  type AsmMember = AsmNode[_]
+
+  implicit class ClassNodeOps(val node: ClassNode) {
+    def fieldsAndMethods: List[AsmMember] = {
+      val xs: List[AsmMember] = (
+           node.methods.asScala.toList.map(x => (x: AsmMethod))
+        ++ node.fields.asScala.toList.map(x => (x: AsmField))
+      )
+      xs sortBy (_.characteristics)
+    }
+  }
+  implicit class AsmMethodNode(val node: MethodNode) extends AsmNode[MethodNode] {
+    def access: Int                                = node.access
+    def desc: String                               = node.desc
+    def name: String                               = node.name
+    def signature: String                          = node.signature
+    def attrs: List[Attribute]                     = node.attrs.asScala.toList
+    def visibleAnnotations: List[AnnotationNode]   = node.visibleAnnotations.asScala.toList
+    def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+  }
+  implicit class AsmFieldNode(val node: FieldNode) extends AsmNode[FieldNode] {
+    def access: Int                                = node.access
+    def desc: String                               = node.desc
+    def name: String                               = node.name
+    def signature: String                          = node.signature
+    def attrs: List[Attribute]                     = node.attrs.asScala.toList
+    def visibleAnnotations: List[AnnotationNode]   = node.visibleAnnotations.asScala.toList
+    def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+  }
+
+  def apply(node: MethodNode): AsmMethodNode = new AsmMethodNode(node)
+  def apply(node: FieldNode): AsmFieldNode   = new AsmFieldNode(node)
+}
diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
new file mode 100644
index 0000000..7650a89
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -0,0 +1,167 @@
+package scala.tools.partest
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.{FileOutputStream, FileInputStream, File => JFile, InputStream}
+import AsmNode._
+
+/**
+ * Provides utilities for inspecting bytecode using ASM library.
+ *
+ * HOW TO USE
+ * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
+ * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
+ *    want to inspect the bytecode for. The '_1' suffix signals to partest that it
+ *    should compile this file first.
+ * 3. Create $TESTDIR/Test.scala:
+ *    import scala.tools.partest.BytecodeTest
+ *    object Test extends BytecodeTest {
+ *      def show {
+ *        // your code that inspect ASM trees and prints values
+ *      }
+ *    }
+ * 4. Create corresponding check file.
+ *
+ * EXAMPLE
+ * See test/files/jvm/bytecode-test-example for an example of bytecode test.
+ *
+ */
+abstract class BytecodeTest extends ASMConverters {
+  import instructions._
+
+  /** produce the output to be compared against a checkfile */
+  protected def show(): Unit
+
+  def main(args: Array[String]): Unit = show
+
+  // asserts
+  def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+    val isa = instructions.fromMethod(methA)
+    val isb = instructions.fromMethod(methB)
+    if (isa == isb) println("bytecode identical")
+    else diffInstructions(isa, isb)
+  }
+
+  // Do these classes have all the same methods, with the same names, access,
+  // descriptors and generic signatures? Method bodies are not considered, and
+  // the names of the classes containing the methods are substituted so they do
+  // not appear as differences.
+  def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode) =
+    sameCharacteristics(clazzA, clazzB)(_.characteristics)
+
+  // Same as sameMethodAndFieldSignatures, but ignoring generic signatures.
+  // This allows for methods which receive the same descriptor but differing
+  // generic signatures. In particular, this happens with value classes,
+  // which get a generic signature where a method written in terms of the
+  // underlying values does not.
+  def sameMethodAndFieldDescriptors(clazzA: ClassNode, clazzB: ClassNode) =
+    sameCharacteristics(clazzA, clazzB)(_.erasedCharacteristics)
+
+  private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): Boolean = {
+    val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
+    val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
+    val name1 = clazzA.name
+    val name2 = clazzB.name
+
+    if (ms1.length != ms2.length) {
+      println(s"Different member counts in $name1 and $name2")
+      false
+    }
+    else (ms1, ms2).zipped forall { (m1, m2) =>
+      val c1 = f(m1)
+      val c2 = f(m2).replaceAllLiterally(name2, name1)
+      if (c1 == c2)
+        println(s"[ok] $m1")
+      else
+        println(s"[fail]\n  in $name1: $c1\n  in $name2: $c2")
+
+      c1 == c2
+    }
+  }
+
+  // bytecode is equal modulo local variable numbering
+  def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
+    case _ if a == b => true
+    case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
+    case _ => false
+  }
+
+  def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
+    val isa = fromMethod(methA)
+    val isb = fromMethod(methB)
+    if (isa == isb) println("bytecode identical")
+    else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+    else diffInstructions(isa, isb)
+  }
+
+  def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+    val len = Math.max(isa.length, isb.length)
+    if (len > 0 ) {
+      val width = isa.map(_.toString.length).max
+      val lineWidth = len.toString.length
+      (1 to len) foreach { line =>
+        val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+        val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+        val a = isaPadded(line-1)
+        val b = isbPadded(line-1)
+
+        println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
+      }
+    }
+  }
+
+// loading
+  protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+    classNode.methods.asScala.find(_.name == name) getOrElse
+      sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+  protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
+    val classBytes: InputStream = (for {
+      classRep <- classpath.findClass(name)
+      binary <- classRep.binary
+    } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+
+    val cr = new ClassReader(classBytes)
+    val cn = new ClassNode()
+    cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+    cn
+  }
+
+  protected lazy val classpath: JavaClassPath = {
+    import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+    import scala.tools.util.PathResolver.Defaults
+    // logic inspired by scala.tools.util.PathResolver implementation
+    val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
+    new JavaClassPath(containers, DefaultJavaContext)
+  }
+}
+
+object BytecodeTest {
+  /** Parse `file` as a class file, transforms the ASM representation with `f`,
+   *  and overwrites the orginal file.
+   */
+  def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) {
+    val rfile = new reflect.io.File(file)
+    def readClass: ClassNode = {
+      val cr = new ClassReader(rfile.toByteArray())
+      val cn = new ClassNode()
+      cr.accept(cn, 0)
+      cn
+    }
+
+    def writeClass(cn: ClassNode) {
+      val writer = new ClassWriter(0)
+      cn.accept(writer)
+      val os = rfile.bufferedOutput()
+      try {
+        os.write(writer.toByteArray)
+      } finally {
+        os.close()
+      }
+    }
+
+    writeClass(f(readClass))
+  }
+}
diff --git a/src/partest-extras/scala/tools/partest/IcodeComparison.scala b/src/partest-extras/scala/tools/partest/IcodeComparison.scala
new file mode 100644
index 0000000..7122703
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/IcodeComparison.scala
@@ -0,0 +1,73 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.partest.nest.FileManager.compareContents
+
+/** A class for testing icode.  All you need is this in a
+ *  partest source file --
+ *  {{{
+ *    object Test extends IcodeComparison
+ *  }}}
+ *  -- and the generated output will be the icode for everything
+ *  in that file.  See scaladoc for possible customizations.
+ *  TODO promote me to partest
+ */
+abstract class IcodeComparison extends DirectTest {
+  /** The phase after which icode is printed.
+   *  Override to check icode at a different point,
+   *  but you can't print at a phase that is not enabled
+   *  in this compiler run. Defaults to "icode".
+   */
+  def printIcodeAfterPhase = "icode"
+
+  /** When comparing the output of two phases, this is
+   *  the other phase of interest, normally the preceding
+   *  phase.  Defaults to "icode" for tests of optimizer phases.
+   */
+  def printSuboptimalIcodeAfterPhase = "icode"
+
+  /** The source code to compile defaults to the test file.
+   *  I.e., the test file compiles itself. For a comparison,
+   *  the test file will be compiled three times.
+   */
+  def code = testPath.slurp()
+
+  /** By default, the test code is compiled with -usejavacp. */
+  override def extraSettings: String = "-usejavacp"
+
+  /** Compile the test code and return the contents of all
+   *  (sorted) .icode files, which are immediately deleted.
+   *  @param arg0 at least one arg is required
+   *  @param args must include -Xprint-icode:phase
+   */
+  def collectIcode(arg0: String, args: String*): List[String] = {
+    compile("-d" :: testOutput.path :: arg0 :: args.toList : _*)
+    val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
+
+    try     icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
+    finally icodeFiles foreach (f => f.delete())
+  }
+
+  /** Collect icode at the default phase, `printIcodeAfterPhase`. */
+  def collectIcode(): List[String] = collectIcode(s"-Xprint-icode:$printIcodeAfterPhase")
+
+  /** Default show is showComparison. May be overridden for showIcode or similar. */
+  def show() = showComparison()
+
+  /** Compile the test code with and without optimization, and
+   *  then print the diff of the icode.
+   */
+  def showComparison() = {
+    val lines1 = collectIcode(s"-Xprint-icode:$printSuboptimalIcodeAfterPhase")
+    val lines2 = collectIcode("-optimise", s"-Xprint-icode:$printIcodeAfterPhase")
+
+    println(compareContents(lines1, lines2))
+  }
+
+  /** Print icode at the default phase, `printIcodeAfterPhase`. */
+  def showIcode() = println(collectIcode() mkString EOL)
+}
diff --git a/src/partest-extras/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala
new file mode 100644
index 0000000..3cb3dc6
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/JavapTest.scala
@@ -0,0 +1,26 @@
+
+package scala.tools.partest
+
+import scala.util.{Try,Success,Failure}
+import java.lang.System.{out => sysout}
+
+/** A trait for testing repl's javap command
+ *  or possibly examining its output.
+ */
+abstract class JavapTest extends ReplTest {
+
+  /** Your Assertion Here, whatever you want to bejahen.
+   *  Assertions must be satisfied by all flavors of javap
+   *  and should not be fragile with respect to compiler output.
+   */
+  def yah(res: Seq[String]): Boolean
+
+  def baddies = List(":javap unavailable", ":javap not yet working")
+
+  // give it a pass if javap is broken
+  override def show() = try {
+    val res = eval().toSeq
+    val unsupported = res exists (s => baddies exists (s contains _))
+    assert ((unsupported || yah(res)), res.mkString("","\n","\n"))
+  } catch { case ae: AssertionError => ae.printStackTrace(sysout) }
+}
diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
new file mode 100644
index 0000000..a728e8b
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -0,0 +1,77 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter.ILoop
+import java.lang.reflect.{ Method => JMethod, Field => JField }
+
+/** A class for testing repl code.
+ *  It filters the line of output that mentions a version number.
+ */
+abstract class ReplTest extends DirectTest {
+  // override to transform Settings object immediately before the finish
+  def transformSettings(s: Settings): Settings = s
+  // final because we need to enforce the existence of a couple settings.
+  final override def settings: Settings = {
+    val s = super.settings
+    // s.Yreplsync.value = true
+    s.Xnojline.value = true
+    transformSettings(s)
+  }
+  def welcoming: Boolean = false
+  lazy val welcome = "(Welcome to Scala) version .*".r
+  def normalize(s: String) = s match {
+    case welcome(w) => w
+    case s          => s
+  }
+  def unwelcoming(s: String) = s match {
+    case welcome(w) => false
+    case _          => true
+  }
+  def eval() = {
+    val s = settings
+    log("eval(): settings = " + s)
+    //ILoop.runForTranscript(code, s).lines drop 1  // not always first line
+    val lines = ILoop.runForTranscript(code, s).lines
+    if (welcoming) lines map normalize
+    else lines filter unwelcoming
+  }
+  def show() = eval() foreach println
+}
+
+/** Retain and normalize the welcome message. */
+trait Welcoming { this: ReplTest =>
+  override def welcoming = true
+}
+
+/** Run a REPL test from a session transcript.
+ *  The `session` should be a triple-quoted String starting
+ *  with the `Type in expressions` message and ending
+ *  after the final `prompt`, including the last space.
+ */
+abstract class SessionTest extends ReplTest  {
+  /** Session transcript, as a triple-quoted, multiline, marginalized string. */
+  def session: String
+
+  /** Expected output, as an iterator. */
+  def expected = session.stripMargin.lines
+
+  /** Code is the command list culled from the session (or the expected session output).
+   *  Would be nicer if code were lazy lines.
+   */
+  override final def code = expected filter (_ startsWith prompt) map (_ drop prompt.length) mkString "\n"
+
+  final def prompt = "scala> "
+
+  /** Default test is to compare expected and actual output and emit the diff on a failed comparison. */
+  override def show() = {
+    val evaled = eval().toList
+    val wanted = expected.toList
+    if (evaled.size != wanted.size) Console println s"Expected ${wanted.size} lines, got ${evaled.size}"
+    if (evaled != wanted) Console print nest.FileManager.compareContents(wanted, evaled, "expected", "actual")
+  }
+}
diff --git a/src/partest-extras/scala/tools/partest/ScriptTest.scala b/src/partest-extras/scala/tools/partest/ScriptTest.scala
new file mode 100644
index 0000000..3000d75
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ScriptTest.scala
@@ -0,0 +1,22 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.partest
+
+import scala.reflect.internal.util.ScalaClassLoader
+
+/** A `ScriptTest` is a `DirectTest` for which the code
+ *  is the contents of a script file.
+ */
+abstract class ScriptTest extends DirectTest {
+  def testmain = "TestMain"
+  override def extraSettings = s"-usejavacp -Xscript $testmain"
+  def scriptPath = testPath changeExtension "script"
+  def code = scriptPath.toFile.slurp
+  def argv = Seq.empty[String]
+  def show() = {
+    compile()
+    ScalaClassLoader(getClass.getClassLoader).run(testmain, argv)
+  }
+}
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala
similarity index 100%
rename from src/partest/scala/tools/partest/SigTest.scala
rename to src/partest-extras/scala/tools/partest/SigTest.scala
diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala
new file mode 100644
index 0000000..60e9dbb
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/Util.scala
@@ -0,0 +1,52 @@
+package scala.tools.partest
+
+import scala.language.experimental.macros
+
+object Util {
+  /**
+   * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+   * {{{
+   *   trace> "".isEmpty
+   *   res: Boolean = true
+   *
+   * }}}
+   *
+   * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+   * test code in a string.
+   */
+  def trace[A](a: A) = macro traceImpl[A]
+
+  import scala.reflect.macros.blackbox.Context
+  def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+    import c.universe._
+    import definitions._
+
+    // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+    // because this impairs reflection refactorings
+    //
+    // val exprCode = c.literal(show(a.tree))
+    // val exprType = c.literal(show(a.actualType))
+    // reify {
+    //   println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+    //   a.splice
+    // }
+
+    c.Expr(Block(
+      List(Apply(
+        Select(Ident(PredefModule), TermName("println")),
+        List(Apply(
+          Select(Apply(
+            Select(Ident(ScalaPackage), TermName("StringContext")),
+            List(
+              Literal(Constant("trace> ")),
+              Literal(Constant("\\nres: ")),
+              Literal(Constant(" = ")),
+              Literal(Constant("\\n")))),
+          TermName("s")),
+          List(
+            Literal(Constant(show(a.tree))),
+            Literal(Constant(show(a.actualType))),
+            a.tree))))),
+      a.tree))
+  }
+}
\ No newline at end of file
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
new file mode 100644
index 0000000..18dd740
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
@@ -0,0 +1,93 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented
+
+import scala.collection.JavaConverters._
+
+case class MethodCallTrace(className: String, methodName: String, methodDescriptor: String) {
+  override def toString(): String = className + "." + methodName + methodDescriptor
+}
+object MethodCallTrace {
+  implicit val ordering: Ordering[MethodCallTrace] = Ordering.by(x => (x.className, x.methodName, x.methodDescriptor))
+}
+
+/**
+ * An object that controls profiling of instrumented byte-code. The instrumentation is achieved
+ * by using `java.lang.instrument` package. The instrumentation agent can be found in
+ * `scala.tools.partest.javaagent` package.
+ *
+ * At the moment the following classes are being instrumented:
+ *   * all classes with empty package
+ *   * all classes from scala package (except for classes responsible for instrumentation)
+ *
+ * The canonical way of using instrumentation is have a test-case in `files/instrumented` directory.
+ * The following code in main:
+ *
+ * {{{
+ * import scala.tools.partest.instrumented.Instrumentation._
+ * def main(args: Array[String]): Unit = {
+ *   startProfiling()
+ *   // should box the boolean
+    println(true)
+    stopProfiling()
+    printStatistics()
+ * }
+ * }}}
+ *
+ *
+ * should print:
+ *
+ * {{{
+ * true
+ * Method call statistics:
+ * scala/Predef$.println(Ljava/lang/Object;)V: 1
+ * scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;: 1
+ * }}}
+ */
+object Instrumentation {
+
+  type Statistics = Map[MethodCallTrace, Int]
+
+  def startProfiling(): Unit = Profiler.startProfiling()
+  def stopProfiling(): Unit = Profiler.stopProfiling()
+  def resetProfiling(): Unit = Profiler.resetProfiling()
+  def isProfiling(): Boolean = Profiler.isProfiling()
+
+  def getStatistics: Statistics = {
+    val isProfiling = Profiler.isProfiling()
+    if (isProfiling) {
+      Profiler.stopProfiling()
+    }
+    val stats = Profiler.getStatistics().asScala.toSeq.map {
+      case (trace, count) => MethodCallTrace(trace.className, trace.methodName, trace.methodDescriptor) -> count.intValue
+    }
+    val res = Map(stats: _*)
+    if (isProfiling) {
+      Profiler.startProfiling()
+    }
+    res
+  }
+
+  val standardFilter: MethodCallTrace => Boolean = t => {
+    // ignore all calls to Console trigger by printing
+    t.className != "scala/Console$" &&
+    // console accesses DynamicVariable, let's discard it too
+    !t.className.startsWith("scala/util/DynamicVariable")
+  }
+
+  // Used in tests.
+  def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = {
+    val stats = getStatistics
+    println("Method call statistics:")
+    val toBePrinted = stats.toSeq.filter(p => filter(p._1)).sortBy(_._1)
+    // <count> <trace>
+    val format = "%5d  %s\n"
+    toBePrinted foreach {
+      case (trace, count) => printf(format, count, trace)
+    }
+  }
+
+}
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
new file mode 100644
index 0000000..d6b62e1
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -0,0 +1,82 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
+ * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
+ *
+ * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * because it must be universally accessible for instrumentation needs. If you want to profile your test use
+ * {@link Instrumentation} instead.
+ */
+public class Profiler {
+
+        private static boolean isProfiling = false;
+        private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
+
+        static public class MethodCallTrace {
+          final String className;
+          final String methodName;
+          final String methodDescriptor;
+
+          public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
+            this.className = className;
+            this.methodName = methodName;
+            this.methodDescriptor = methodDescriptor;
+          }
+
+          @Override
+          public boolean equals(Object obj) {
+            if (!(obj instanceof MethodCallTrace)) {
+              return false;
+            } else {
+              MethodCallTrace that = (MethodCallTrace) obj;
+              return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
+            }
+          }
+          @Override
+          public int hashCode() {
+            return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
+          }
+        }
+
+        public static void startProfiling() {
+                isProfiling = true;
+        }
+
+        public static void stopProfiling() {
+                isProfiling = false;
+        }
+
+        public static boolean isProfiling() {
+          return isProfiling;
+        }
+
+        public static void resetProfiling() {
+          counts = new HashMap<MethodCallTrace, Integer>();
+        }
+
+        public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
+                if (isProfiling) {
+                  MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
+                        Integer counter = counts.get(trace);
+                        if (counter == null) {
+                                counts.put(trace, 1);
+                        } else {
+                                counts.put(trace, counter+1);
+                        }
+                }
+        }
+
+        public static Map<MethodCallTrace, Integer> getStatistics() {
+          return new HashMap<MethodCallTrace, Integer>(counts);
+        }
+
+}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
new file mode 100644
index 0000000..86f5e64
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
@@ -0,0 +1,49 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassWriter;
+
+public class ASMTransformer implements ClassFileTransformer {
+
+  private boolean shouldTransform(String className) {
+    return
+        // do not instrument instrumentation logic (in order to avoid infinite recursion)
+        !className.startsWith("scala/tools/partest/instrumented/") &&
+        !className.startsWith("scala/tools/partest/javaagent/") &&
+        // we instrument all classes from empty package
+        (!className.contains("/") ||
+        // we instrument all classes from scala package
+        className.startsWith("scala/") ||
+        // we instrument all classes from `instrumented` package
+        className.startsWith("instrumented/"));
+  }
+
+        public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+          if (shouldTransform(className)) {
+            ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+              @Override protected String getCommonSuperClass(final String type1, final String type2) {
+                // Since we are not recomputing stack frame map, this should never be called we override this method because
+                // default implementation uses reflection for implementation and might try to load the class that we are
+                // currently processing. That leads to weird results like swallowed exceptions and classes being not
+                // transformed.
+                throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+                    ") while transforming " + className);
+              }
+            };
+                ProfilerVisitor visitor = new ProfilerVisitor(writer);
+                ClassReader reader = new ClassReader(classfileBuffer);
+                reader.accept(visitor, 0);
+                return writer.toByteArray();
+          } else {
+            return classfileBuffer;
+          }
+        }
+}
diff --git a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
similarity index 100%
rename from src/partest/scala/tools/partest/javaagent/MANIFEST.MF
rename to src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
new file mode 100644
index 0000000..b1b100f
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -0,0 +1,59 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+public class ProfilerVisitor extends ClassVisitor implements Opcodes {
+
+  private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
+
+  public ProfilerVisitor(final ClassVisitor cv) {
+    super(ASM4, cv);
+  }
+
+  private String className = null;
+
+  @Override
+  public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
+    className = name;
+    super.visit(version, access, name, signature, superName, interfaces);
+  }
+
+  public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
+    // delegate the method call to the next
+    // chained visitor
+    MethodVisitor mv = cv.visitMethod(access, name, desc, signature, exceptions);
+    if (!profilerClass.equals(className)) {
+      // only instrument non-abstract methods
+      if((access & ACC_ABSTRACT) == 0) {
+        assert(className != null);
+        /* The following instructions do not modify compressed stack frame map so
+         * we don't need to worry about recalculating stack frame map. Specifically,
+         * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40):
+         *
+         *   In order to save space, a compiled method does not contain one frame per
+         *   instruction: in fact it contains only the frames for the instructions
+         *   that correspond to jump targets or exception handlers, or that follow
+         *   unconditional jump instructions. Indeed the other frames can be easily
+         *   and quickly inferred from these ones.
+         *
+         * Instructions below are just loading constants and calling a method so according
+         * to definition above they do not contribute to compressed stack frame map.
+         */
+        mv.visitLdcInsn(className);
+        mv.visitLdcInsn(name);
+        mv.visitLdcInsn(desc);
+        mv.visitMethodInsn(INVOKESTATIC, profilerClass, "methodCalled",
+            "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+      }
+    }
+    return mv;
+  }
+
+}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
new file mode 100644
index 0000000..819a5cc
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -0,0 +1,25 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+
+/**
+ * Profiling agent that instruments byte-code to insert calls to
+ * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
+ * by using ASM library for byte-code manipulation.
+ */
+public class ProfilingAgent {
+        public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
+          // NOTE: we are adding transformer that won't be applied to classes that are already loaded
+          // This should be ok because premain should be executed before main is executed so Scala library
+          // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
+          // not depend on Scala library. In case our assumptions are wrong we can always insert call to
+          // inst.retransformClasses.
+          inst.addTransformer(new ASMTransformer(), false);
+        }
+}
diff --git a/src/partest/README b/src/partest/README
deleted file mode 100644
index 0434aa7..0000000
--- a/src/partest/README
+++ /dev/null
@@ -1,32 +0,0 @@
-How partest chooses the compiler / library:
-
-  * ''-Dpartest.build=build/four-pack'' -> will search for libraries in
-    ''lib'' directory of given path
-  * ''--pack'' -> will set ''partest.build=build/pack'', and run all tests.
-    add ''--[kind]'' to run a selected set of tests.
-  * auto detection:
-    - partest.build property -> ''bin'' / ''lib'' directories
-    - distribution (''dists/latest'')
-    - supersabbus pack (''build/pack'')
-    - sabbus quick (''build/quick'')
-    - installed dist (test files in ''misc/scala-test/files'')
-
-How partest choses test files: the test files must be accessible from
-the directory on which partest is run. So the test files must be either
-at:
-  * ./test/files
-  * ./files (cwd is "test")
-  * ./misc/scala-test/files (installed scala distribution)
-
-Other arguments:
-  * --pos          next files test a compilation success
-  * --neg          next files test a compilation failure
-  * --run          next files test the interpreter and all backends
-  * --jvm          next files test the JVM backend
-  * --res          next files test the resident compiler
-  * --buildmanager next files test the build manager
-  * --shootout     next files are shootout tests
-  * --script       next files test the script runner
-  * ''-Dpartest.scalac_opts=...'' -> add compiler options
-  * ''--verbose'' -> print verbose messages
-  * ''-Dpartest.debug=true'' -> print debug messages
diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest/scala/tools/partest/AsmNode.scala
deleted file mode 100644
index d181436..0000000
--- a/src/partest/scala/tools/partest/AsmNode.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-package scala.tools.partest
-
-import scala.collection.JavaConverters._
-import scala.tools.asm
-import asm._
-import asm.tree._
-import java.lang.reflect.Modifier
-
-sealed trait AsmNode[+T] {
-  def node: T
-  def access: Int
-  def desc: String
-  def name: String
-  def signature: String
-  def attrs: List[Attribute]
-  def visibleAnnotations: List[AnnotationNode]
-  def invisibleAnnotations: List[AnnotationNode]
-  def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
-
-  private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
-  private def sigString = if (signature == null) "" else " " + signature
-  override def toString = characteristics
-}
-
-object AsmNode {
-  type AsmMethod = AsmNode[MethodNode]
-  type AsmField = AsmNode[FieldNode]
-  type AsmMember = AsmNode[_]
-
-  implicit class ClassNodeOps(val node: ClassNode) {
-    def fieldsAndMethods: List[AsmMember] = {
-      val xs: List[AsmMember] = (
-           node.methods.asScala.toList.map(x => (x: AsmMethod))
-        ++ node.fields.asScala.toList.map(x => (x: AsmField))
-      )
-      xs sortBy (_.characteristics)
-    }
-  }
-  implicit class AsmMethodNode(val node: MethodNode) extends AsmNode[MethodNode] {
-    def access: Int                                = node.access
-    def desc: String                               = node.desc
-    def name: String                               = node.name
-    def signature: String                          = node.signature
-    def attrs: List[Attribute]                     = node.attrs.asScala.toList
-    def visibleAnnotations: List[AnnotationNode]   = node.visibleAnnotations.asScala.toList
-    def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
-  }
-  implicit class AsmFieldNode(val node: FieldNode) extends AsmNode[FieldNode] {
-    def access: Int                                = node.access
-    def desc: String                               = node.desc
-    def name: String                               = node.name
-    def signature: String                          = node.signature
-    def attrs: List[Attribute]                     = node.attrs.asScala.toList
-    def visibleAnnotations: List[AnnotationNode]   = node.visibleAnnotations.asScala.toList
-    def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
-  }
-
-  def apply(node: MethodNode): AsmMethodNode = new AsmMethodNode(node)
-  def apply(node: FieldNode): AsmFieldNode   = new AsmFieldNode(node)
-}
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
deleted file mode 100644
index 2699083..0000000
--- a/src/partest/scala/tools/partest/BytecodeTest.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.util.JavaClassPath
-import scala.collection.JavaConverters._
-import scala.tools.asm
-import asm.{ ClassReader }
-import asm.tree.{ClassNode, MethodNode, InsnList}
-import java.io.InputStream
-import AsmNode._
-
-/**
- * Provides utilities for inspecting bytecode using ASM library.
- *
- * HOW TO USE
- * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
- * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
- *    want to inspect the bytecode for. The '_1' suffix signals to partest that it
- *    should compile this file first.
- * 3. Create $TESTDIR/Test.scala:
- *    import scala.tools.partest.BytecodeTest
- *    object Test extends BytecodeTest {
- *      def show {
- *        // your code that inspect ASM trees and prints values
- *      }
- *    }
- * 4. Create corresponding check file.
- *
- * EXAMPLE
- * See test/files/jvm/bytecode-test-example for an example of bytecode test.
- *
- */
-abstract class BytecodeTest extends ASMConverters {
-  import instructions._
-
-  /** produce the output to be compared against a checkfile */
-  protected def show(): Unit
-
-  def main(args: Array[String]): Unit = show
-
-  // asserts
-  def sameBytecode(methA: MethodNode, methB: MethodNode) = {
-    val isa = instructions.fromMethod(methA)
-    val isb = instructions.fromMethod(methB)
-    if (isa == isb) println("bytecode identical")
-    else diffInstructions(isa, isb)
-  }
-
-  // Do these classes have all the same methods, with the same names, access,
-  // descriptors and generic signatures? Method bodies are not considered, and
-  // the names of the classes containing the methods are substituted so they do
-  // not appear as differences.
-  def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode): Boolean = {
-    val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
-    val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
-    val name1 = clazzA.name
-    val name2 = clazzB.name
-
-    if (ms1.length != ms2.length) {
-      println("Different member counts in $name1 and $name2")
-      false
-    }
-    else (ms1, ms2).zipped forall { (m1, m2) =>
-      val c1 = m1.characteristics
-      val c2 = m2.characteristics.replaceAllLiterally(name2, name1)
-      if (c1 == c2)
-        println(s"[ok] $m1")
-      else
-        println(s"[fail]\n  in $name1: $c1\n  in $name2: $c2")
-
-      c1 == c2
-    }
-  }
-
-  // bytecode is equal modulo local variable numbering
-  def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
-    case _ if a == b => true
-    case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
-    case _ => false
-  }
-
-  def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
-    val isa = fromMethod(methA)
-    val isb = fromMethod(methB)
-    if (isa == isb) println("bytecode identical")
-    else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
-    else diffInstructions(isa, isb)
-  }
-
-  def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
-    val len = Math.max(isa.length, isb.length)
-    if (len > 0 ) {
-      val width = isa.map(_.toString.length).max
-      val lineWidth = len.toString.length
-      (1 to len) foreach { line =>
-        val isaPadded = isa.map(_.toString) orElse Stream.continually("")
-        val isbPadded = isb.map(_.toString) orElse Stream.continually("")
-        val a = isaPadded(line-1)
-        val b = isbPadded(line-1)
-
-        println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
-      }
-    }
-  }
-
-// loading
-  protected def getMethod(classNode: ClassNode, name: String): MethodNode =
-    classNode.methods.asScala.find(_.name == name) getOrElse
-      sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
-
-  protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
-    val classBytes: InputStream = (for {
-      classRep <- classpath.findClass(name)
-      binary <- classRep.binary
-    } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
-
-    val cr = new ClassReader(classBytes)
-    val cn = new ClassNode()
-    cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
-    cn
-  }
-
-  protected lazy val classpath: JavaClassPath = {
-    import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-    import scala.tools.util.PathResolver.Defaults
-    // logic inspired by scala.tools.util.PathResolver implementation
-    val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
-    new JavaClassPath(containers, DefaultJavaContext)
-  }
-}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
deleted file mode 100644
index 848deef..0000000
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.nsc._
-
-/** For testing compiler internals directly.
- *  Each source code string in "sources" will be compiled, and
- *  the check function will be called with the source code and the
- *  resulting CompilationUnit.  The check implementation should
- *  test for what it wants to test and fail (via assert or other
- *  exception) if it is not happy.
- */
-abstract class CompilerTest extends DirectTest {
-  def check(source: String, unit: global.CompilationUnit): Unit
-
-  lazy val global: Global = newCompiler()
-  lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
-  import global._
-  import definitions._
-
-  override def extraSettings = "-usejavacp -d " + testOutput.path
-
-  def show() = (sources, units).zipped foreach check
-
-  // Override at least one of these...
-  def code = ""
-  def sources: List[String] = List(code)
-
-  // Utility functions
-
-  class MkType(sym: Symbol) {
-    def apply[M](implicit t: ru.TypeTag[M]): Type =
-      if (sym eq NoSymbol) NoType
-      else appliedType(sym, compilerTypeFromTag(t))
-  }
-  implicit def mkMkType(sym: Symbol) = new MkType(sym)
-
-  def allMembers(root: Symbol): List[Symbol] = {
-    def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
-      val latest = roots flatMap (_.info.members) filterNot (seen contains _)
-      if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
-      else loop(seen ++ latest, latest)
-    }
-    loop(Set(), List(root))
-  }
-
-  class SymsInPackage(pkgName: String) {
-    def pkg     = rootMirror.getRequiredPackage(pkgName)
-    def classes = allMembers(pkg) filter (_.isClass)
-    def modules = allMembers(pkg) filter (_.isModule)
-    def symbols = classes ++ terms filterNot (_ eq NoSymbol)
-    def terms   = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
-    def tparams = classes flatMap (_.info.typeParams)
-    def tpes    = symbols map (_.tpe) distinct
-  }
-}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
deleted file mode 100644
index 8fcaa64..0000000
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ /dev/null
@@ -1,131 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc._
-import settings.ScalaVersion
-import io.Directory
-import util.{ SourceFile, BatchSourceFile, CommandLineParser }
-import reporters.{Reporter, ConsoleReporter}
-
-/** A class for testing code which is embedded as a string.
- *  It allows for more complete control over settings, compiler
- *  configuration, sequence of events, etc. than does partest.
- */
-abstract class DirectTest extends App {
-  // The program being tested in some fashion
-  def code: String
-  // produce the output to be compared against a checkfile
-  def show(): Unit
-
-  // the test file or dir, and output directory
-  def testPath   = io.File(sys.props("partest.test-path"))
-  def testOutput = io.Directory(sys.props("partest.output"))
-
-  // override to add additional settings with strings
-  def extraSettings: String = ""
-  // a default Settings object
-  def settings: Settings = newSettings(CommandLineParser tokenize extraSettings)
-  // a custom Settings object
-  def newSettings(args: List[String]) = {
-    val s = new Settings
-    val allArgs = args ++ (CommandLineParser tokenize debugSettings)
-    log("newSettings: allArgs = " + allArgs)
-    s processArguments (allArgs, true)
-    s
-  }
-  // new compiler
-  def newCompiler(args: String*): Global = {
-    val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
-    newCompiler(settings)
-  }
-
-  def newCompiler(settings: Settings): Global = {
-    if (settings.Yrangepos.value) new Global(settings, reporter(settings)) with interactive.RangePositions
-    else new Global(settings, reporter(settings))
-  }
-
-  def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
-
-  private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
-    codes.toList.zipWithIndex map {
-      case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
-    }
-
-  def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
-  def newSources(codes: String*)     = newSourcesWithExtension("scala")(codes: _*)
-
-  def compileString(global: Global)(sourceCode: String): Boolean = {
-    withRun(global)(_ compileSources newSources(sourceCode))
-    !global.reporter.hasErrors
-  }
-
-  def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
-    sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
-  }
-
-  def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
-    withRun(global) { run =>
-      run compileSources files
-      run.units.toList
-    }
-  }
-
-  def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
-    val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
-    if (global.reporter.hasErrors) {
-      global.reporter.flush()
-      sys.error("Compilation failure.")
-    }
-    units
-  }
-
-  def withRun[T](global: Global)(f: global.Run => T): T = {
-    global.reporter.reset()
-    f(new global.Run)
-  }
-
-  // compile the code, optionally first adding to the settings
-  def compile(args: String*) = compileString(newCompiler(args: _*))(code)
-
-  /**  Constructor/main body  **/
-  try show()
-  catch { case t: Exception => println(t.getMessage) ; t.printStackTrace ; sys.exit(1) }
-
-  /** Debugger interest only below this line **/
-  protected def isDebug       = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
-  protected def debugSettings = sys.props.getOrElse("partest.debug.settings", "")
-
-  final def log(msg: => Any) {
-    if (isDebug) Console.err println msg
-  }
-
-  /**
-   * Run a test only if the current java version is at least the version specified.
-   */
-  def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
-
-  class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
-    val javaVersion = System.getProperty("java.specification.version")
-
-    // the "ScalaVersion" class parses Java specification versions just fine
-    val requiredJavaVersion = ScalaVersion(version)
-    val executingJavaVersion = ScalaVersion(javaVersion)
-    val shouldRun = executingJavaVersion >= requiredJavaVersion
-    val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
-
-    def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
- 
-   /*
-    * If the current java version is at least 'version' then 'yesRun' is evaluated
-    * otherwise 'fallback' is 
-    */
-    def otherwise(fallback: =>A): A = {
-      logInfo()
-      if (shouldRun) yesRun else fallback
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
deleted file mode 100644
index f5333cc..0000000
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc._
-import nest.FileUtil._
-import io.Directory
-
-/** A trait for testing icode.  All you need is this in a
- *  partest source file:
- *  {{{
- *    object Test extends IcodeTest
- *  }}}
- *  And then the generated output will be the icode for everything
- *  in that file.  See source for possible customizations.
- */
-abstract class IcodeTest extends DirectTest {
-  // override to check icode at a different point.
-  def printIcodeAfterPhase = "icode"
-  // override to use source code other than the file being tested.
-  def code = testPath.slurp()
-
-  override def extraSettings: String = "-usejavacp -Xprint-icode:" + printIcodeAfterPhase
-
-  // Compile, read in all the *.icode files, delete them, and return their contents
-  def collectIcode(args: String*): List[String] = {
-    compile("-d" :: testOutput.path :: args.toList : _*)
-    val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
-
-    try     icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
-    finally icodeFiles foreach (f => f.delete())
-  }
-
-  // Default show() compiles the code with and without optimization and
-  // outputs the diff.
-  def show() {
-    val lines1 = collectIcode("")
-    val lines2 = collectIcode("-optimise")
-
-    println(compareContents(lines1, lines2))
-  }
-}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
deleted file mode 100644
index 58d25d2..0000000
--- a/src/partest/scala/tools/partest/MemoryTest.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-abstract class MemoryTest {
-  def maxDelta: Double
-  def calcsPerIter: Int
-  def calc(): Unit
-
-  def main(args: Array[String]) {
-    val rt = Runtime.getRuntime()
-    def memUsage() = {
-      import java.lang.management._
-      import scala.collection.JavaConverters._
-      val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
-      pools.map(_.getUsage.getUsed).sum / 1000000d
-    }
-
-    val history = scala.collection.mutable.ListBuffer[Double]()
-    def stressTestIter() = {
-      var i = 0
-      while (i < calcsPerIter) { calc(); i += 1 }
-      1 to 5 foreach (_ => rt.gc())
-      history += memUsage
-    }
-
-    1 to 5 foreach (_ => stressTestIter())
-    val reference = memUsage()
-    1 to 5 foreach (_ => stressTestIter())
-    1 to 5 foreach (_ => rt.gc())
-    val result = memUsage()
-    history += result
-
-    val delta = result - reference
-    if (delta > maxDelta) {
-      println("FAILED")
-      history foreach (mb => println(mb + " Mb"))
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
deleted file mode 100644
index a21c602..0000000
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.tools
-package partest
-
-import nsc.io.{ File, Path, Directory }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.getRuntime
-
-object PartestDefaults {
-  import nsc.Properties._
-  private def wrapAccessControl[T](body: => Option[T]): Option[T] =
-    try body catch { case _: java.security.AccessControlException => None }
-
-  def testRootName  = propOrNone("partest.root")
-  def srcDirName    = propOrElse("partest.srcdir", "files")
-  def testRootDir   = testRootName map (x => Directory(x))
-
-  // def classPath   = propOrElse("partest.classpath", "")
-  def classPath   = PathResolver.Environment.javaUserClassPath    // XXX
-
-  def javaCmd     = propOrElse("partest.javacmd", "java")
-  def javacCmd    = propOrElse("partest.javac_cmd", "javac")
-  def javaOpts    = propOrElse("partest.java_opts", "")
-  def scalacOpts  = propOrElse("partest.scalac_opts", "")
-
-  def testBuild  = propOrNone("partest.build")
-  def errorCount = propOrElse("partest.errors", "0").toInt
-  def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse getRuntime.availableProcessors
-
-  def timeout     = "1200000"
-}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
deleted file mode 100644
index dc40f9f..0000000
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ /dev/null
@@ -1,438 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala Parallel Testing               **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.tools
-package partest
-
-import scala.util.Properties.setProp
-import scala.tools.nsc.io.{ Directory, Path => SPath }
-import nsc.util.ClassPath
-import util.PathResolver
-import scala.tools.ant.sabbus.CompilationPathProperty
-import java.io.File
-import java.lang.reflect.Method
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{Path, Reference, FileSet}
-import org.apache.tools.ant.types.Commandline.Argument
-import scala.tools.ant.ScalaTask
-
-/** An Ant task to execute the Scala test suite (NSC).
- *
- *  This task can take the following parameters as attributes:
- *  - `srcdir`,
- *  - `classpath`,
- *  - `classpathref`,
- *  - `showlog`,
- *  - `showdiff`,
- *  - `erroronfailed`,
- *  - `javacmd`,
- *  - `javaccmd`,
- *  - `scalacopts`,
- *  - `timeout`,
- *  - `debug`,
- *  - `junitreportdir`.
- *
- *  It also takes the following parameters as nested elements:
- *  - `compilationpath`.
- *  - `postests`,
- *  - `negtests`,
- *  - `runtests`,
- *  - `jvmtests`,
- *  - `residenttests`,
- *  - `buildmanagertests`,
- *  - `shootouttests`,
- *  - `scalaptests`,
- *  - `scalachecktests`,
- *  - `specializedtests`,
- *  - `instrumentedtests`,
- *  - `presentationtests`,
- *  - `scripttests`.
- *
- * @author Philippe Haller
- */
-class PartestTask extends Task with CompilationPathProperty with ScalaTask {
-
-  def addConfiguredPosTests(input: FileSet) {
-    posFiles = Some(input)
-  }
-
-  def addConfiguredNegTests(input: FileSet) {
-    negFiles = Some(input)
-  }
-
-  def addConfiguredRunTests(input: FileSet) {
-    runFiles = Some(input)
-  }
-
-  def addConfiguredJvmTests(input: FileSet) {
-    jvmFiles = Some(input)
-  }
-
-  def addConfiguredResidentTests(input: FileSet) {
-    residentFiles = Some(input)
-  }
-
-  def addConfiguredBuildManagerTests(input: FileSet) {
-    buildManagerFiles = Some(input)
-  }
-
-  def addConfiguredScalacheckTests(input: FileSet) {
-    scalacheckFiles = Some(input)
-  }
-
-  def addConfiguredScriptTests(input: FileSet) {
-    scriptFiles = Some(input)
-  }
-
-  def addConfiguredShootoutTests(input: FileSet) {
-    shootoutFiles = Some(input)
-  }
-
-  def addConfiguredScalapTests(input: FileSet) {
-    scalapFiles = Some(input)
-  }
-
-  def addConfiguredSpecializedTests(input: FileSet) {
-    specializedFiles = Some(input)
-  }
-
-  def addConfiguredInstrumentedTests(input: FileSet) {
-    instrumentedFiles = Some(input)
-  }
-
-  def addConfiguredPresentationTests(input: FileSet) {
-    presentationFiles = Some(input)
-  }
-
-  def addConfiguredAntTests(input: FileSet) {
-    antFiles = Some(input)
-  }
-
-
-  def setSrcDir(input: String) {
-    srcDir = Some(input)
-  }
-
-  def setClasspath(input: Path) {
-    if (classpath.isEmpty)
-      classpath = Some(input)
-    else
-      classpath.get.append(input)
-  }
-
-  def createClasspath(): Path = {
-    if (classpath.isEmpty) classpath = Some(new Path(getProject()))
-    classpath.get.createPath()
-  }
-
-  def setClasspathref(input: Reference) {
-    createClasspath().setRefid(input)
-  }
-
-  def setShowLog(input: Boolean) {
-    showLog = input
-  }
-
-  def setShowDiff(input: Boolean) {
-    showDiff = input
-  }
-
-  def setErrorOnFailed(input: Boolean) {
-    errorOnFailed = input
-  }
-
-  def setJavaCmd(input: File) {
-    javacmd = Some(input)
-  }
-
-  def setJavacCmd(input: File) {
-    javaccmd = Some(input)
-  }
-
-  def setScalacOpts(input: String) {
-    val s = input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
-    scalacArgs = Some(scalacArgs.getOrElse(Seq()) ++ s)
-  }
-
-  def createCompilerArg(): Argument = {
-    val a = new Argument
-    scalacArgs = Some(scalacArgs.getOrElse(Seq()) :+ a)
-    a
-  }
-
-  def setTimeout(delay: String) {
-    timeout = Some(delay)
-  }
-
-  def setDebug(input: Boolean) {
-    debug = input
-  }
-
-  def setJUnitReportDir(input: File) {
-    jUnitReportDir = Some(input)
-  }
-
-  private var classpath: Option[Path] = None
-  private var srcDir: Option[String] = None
-  private var javacmd: Option[File] = None
-  private var javaccmd: Option[File] = None
-  private var showDiff: Boolean = false
-  private var showLog: Boolean = false
-  private var runFailed: Boolean = false
-  private var posFiles: Option[FileSet] = None
-  private var negFiles: Option[FileSet] = None
-  private var runFiles: Option[FileSet] = None
-  private var jvmFiles: Option[FileSet] = None
-  private var residentFiles: Option[FileSet] = None
-  private var buildManagerFiles: Option[FileSet] = None
-  private var scalacheckFiles: Option[FileSet] = None
-  private var scriptFiles: Option[FileSet] = None
-  private var shootoutFiles: Option[FileSet] = None
-  private var scalapFiles: Option[FileSet] = None
-  private var specializedFiles: Option[FileSet] = None
-  private var instrumentedFiles: Option[FileSet] = None
-  private var presentationFiles: Option[FileSet] = None
-  private var antFiles: Option[FileSet] = None
-  private var errorOnFailed: Boolean = false
-  private var scalacArgs: Option[Seq[Argument]] = None
-  private var timeout: Option[String] = None
-  private var jUnitReportDir: Option[File] = None
-  private var debug = false
-
-  def fileSetToDir(fs: FileSet) = Directory(fs getDir getProject)
-  def fileSetToArray(fs: FileSet): Array[SPath] = {
-    val root = fileSetToDir(fs)
-    (fs getDirectoryScanner getProject).getIncludedFiles map (root / _)
-  }
-
-  private def getFiles(fileSet: Option[FileSet]): Array[File] = fileSet match {
-    case None     => Array()
-    case Some(fs) => fileSetToArray(fs) filterNot (_ hasExtension "log") map (_.jfile)
-  }
-
-  private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
-    case None     => Array()
-    case Some(fs) =>
-      def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
-      // println("----> " + fileSet)
-
-      val fileTests = getFiles(Some(fs)) filterNot (x => shouldExclude(x.getName))
-      val dirResult = getDirs(Some(fs))  filterNot (x => shouldExclude(x.getName))
-      // println("dirs: " + dirResult.toList)
-      // println("files: " + fileTests.toList)
-
-      dirResult ++ fileTests
-  }
-
-  private def getDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
-    case None     => Array()
-    case Some(fs) =>
-      def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
-
-      val dirTests: Iterator[SPath] = fileSetToDir(fs).dirs filterNot (x => shouldExclude(x.name))
-      val dirResult = dirTests.toList.toArray map (_.jfile)
-
-      dirResult
-  }
-
-
-  private def getPosFiles          = getFilesAndDirs(posFiles)
-  private def getNegFiles          = getFilesAndDirs(negFiles)
-  private def getRunFiles          = getFilesAndDirs(runFiles)
-  private def getJvmFiles          = getFilesAndDirs(jvmFiles)
-  private def getResidentFiles     = getFiles(residentFiles)
-  private def getBuildManagerFiles = getFilesAndDirs(buildManagerFiles)
-  private def getScalacheckFiles   = getFilesAndDirs(scalacheckFiles)
-  private def getScriptFiles       = getFiles(scriptFiles)
-  private def getShootoutFiles     = getFiles(shootoutFiles)
-  private def getScalapFiles       = getFiles(scalapFiles)
-  private def getSpecializedFiles  = getFiles(specializedFiles)
-  private def getInstrumentedFiles = getFilesAndDirs(instrumentedFiles)
-  private def getPresentationFiles = getDirs(presentationFiles)
-  private def getAntFiles          = getFiles(antFiles)
-
-  override def execute() {
-    val opts = getProject().getProperties() get "env.PARTEST_OPTS"
-    if (opts != null && opts.toString != "")
-      opts.toString.split(" ") foreach { propDef =>
-        log("setting system property " + propDef)
-        val kv = propDef split "="
-        val key = kv(0) substring 2
-        val value = kv(1)
-        setProp(key, value)
-      }
-
-    if (isPartestDebug || debug) {
-      setProp("partest.debug", "true")
-      nest.NestUI._verbose = true
-    }
-
-    srcDir foreach (x => setProp("partest.srcdir", x))
-
-    val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
-
-    val scalaLibrary = {
-      (classpath.list map { fs => new File(fs) }) find { f =>
-        f.getName match {
-          case "scala-library.jar" => true
-          case "library" if (f.getParentFile.getName == "classes") => true
-          case _ => false
-        }
-      }
-    } getOrElse sys.error("Provided classpath does not contain a Scala library.")
-
-    val scalaReflect = {
-      (classpath.list map { fs => new File(fs) }) find { f =>
-        f.getName match {
-          case "scala-reflect.jar" => true
-          case "reflect" if (f.getParentFile.getName == "classes") => true
-          case _ => false
-        }
-      }
-    } getOrElse sys.error("Provided classpath does not contain a Scala reflection library.")
-
-    val scalaCompiler = {
-      (classpath.list map { fs => new File(fs) }) find { f =>
-        f.getName match {
-          case "scala-compiler.jar" => true
-          case "compiler" if (f.getParentFile.getName == "classes") => true
-          case _ => false
-        }
-      }
-    } getOrElse sys.error("Provided classpath does not contain a Scala compiler.")
-
-    val scalaPartest = {
-      (classpath.list map { fs => new File(fs) }) find { f =>
-        f.getName match {
-          case "scala-partest.jar" => true
-          case "partest" if (f.getParentFile.getName == "classes") => true
-          case _ => false
-        }
-      }
-    } getOrElse sys.error("Provided classpath does not contain a Scala partest.")
-
-    val scalaActors = {
-      (classpath.list map { fs => new File(fs) }) find { f =>
-        f.getName match {
-          case "scala-actors.jar" => true
-          case "actors" if (f.getParentFile.getName == "classes") => true
-          case _ => false
-        }
-      }
-    } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
-
-    def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
-      val parts = a.getParts
-      if(parts eq null) Seq[String]() else parts.toSeq
-    })
-
-    val antRunner = new scala.tools.partest.nest.AntRunner
-    val antFileManager = antRunner.fileManager
-
-    // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
-    // when that bug is fixed, this paragraph of code can be safely removed
-    // we hack into the classloader that will become parent classloader for scalac
-    // this way we ensure that reflective macro lookup will pick correct Code.lift
-    val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
-    val path = new org.apache.tools.ant.types.Path(getProject())
-    val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
-    path.setPath(newClassPath)
-    loader.setClassPath(path)
-
-    antFileManager.showDiff = showDiff
-    antFileManager.showLog = showLog
-    antFileManager.failed = runFailed
-    antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
-    antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
-    antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
-    antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
-    antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
-    antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
-
-    javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
-    javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
-    scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
-    timeout foreach (antFileManager.timeout = _)
-
-    type TFSet = (Array[File], String, String)
-    val testFileSets = List(
-      (getPosFiles, "pos", "Compiling files that are expected to build"),
-      (getNegFiles, "neg", "Compiling files that are expected to fail"),
-      (getRunFiles, "run", "Compiling and running files"),
-      (getJvmFiles, "jvm", "Compiling and running files"),
-      (getResidentFiles, "res", "Running resident compiler scenarii"),
-      (getBuildManagerFiles, "buildmanager", "Running Build Manager scenarii"),
-      (getScalacheckFiles, "scalacheck", "Running scalacheck tests"),
-      (getScriptFiles, "script", "Running script files"),
-      (getShootoutFiles, "shootout", "Running shootout tests"),
-      (getScalapFiles, "scalap", "Running scalap tests"),
-      (getSpecializedFiles, "specialized", "Running specialized files"),
-      (getInstrumentedFiles, "instrumented", "Running instrumented files"),
-      (getPresentationFiles, "presentation", "Running presentation compiler test files"),
-      (getAntFiles, "ant", "Running ant task tests")
-    )
-
-    def runSet(set: TFSet): (Int, Int, Iterable[String]) = {
-      val (files, name, msg) = set
-      if (files.isEmpty) (0, 0, List())
-      else {
-        log(msg)
-        val results: Iterable[(String, TestState)] = antRunner.reflectiveRunTestsForFiles(files, name)
-        val (succs, fails) = resultsToStatistics(results)
-
-        val failed: Iterable[String] = results collect {
-          case (path, TestState.Fail)    => path + " [FAILED]"
-          case (path, TestState.Timeout) => path + " [TIMOUT]"
-        }
-
-        // create JUnit Report xml files if directory was specified
-        jUnitReportDir foreach { d =>
-          d.mkdir
-
-          val report = testReport(name, results, succs, fails)
-          scala.xml.XML.save(d.getAbsolutePath+"/"+name+".xml", report)
-        }
-
-        (succs, fails, failed)
-      }
-    }
-
-    val _results = testFileSets map runSet
-    val allSuccesses = _results map (_._1) sum
-    val allFailures = _results map (_._2) sum
-    val allFailedPaths = _results flatMap (_._3)
-
-    def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
-    def s = if (allFailures > 1) "s" else ""
-    val msg =
-      if (allFailures > 0)
-        "Test suite finished with %d case%s failing:\n".format(allFailures, s)+
-        allFailedPaths.mkString("\n")
-      else if (allSuccesses == 0) "There were no tests to run."
-      else "Test suite finished with no failures."
-
-    f(msg)
-  }
-
-  private def oneResult(res: (String, TestState)) =
-    <testcase name={res._1}>{
-      res._2 match {
-        case TestState.Ok      => scala.xml.NodeSeq.Empty
-        case TestState.Fail    => <failure message="Test failed"/>
-        case TestState.Timeout => <failure message="Test timed out"/>
-      }
-    }</testcase>
-
-  private def testReport(kind: String, results: Iterable[(String, TestState)], succs: Int, fails: Int) =
-    <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
-      <properties/>
-      {
-        results.map(oneResult(_))
-      }
-    </testsuite>
-}
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest/scala/tools/partest/ReplTest.scala
deleted file mode 100644
index edd1f70..0000000
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.ILoop
-import java.lang.reflect.{ Method => JMethod, Field => JField }
-
-/** A trait for testing repl code.  It drops the first line
- *  of output because the real repl prints a version number.
- */
-abstract class ReplTest extends DirectTest {
-  // override to transform Settings object immediately before the finish
-  def transformSettings(s: Settings): Settings = s
-  // final because we need to enforce the existence of a couple settings.
-  final override def settings: Settings = {
-    val s = super.settings
-    // s.Yreplsync.value = true
-    s.Xnojline.value = true
-    transformSettings(s)
-  }
-  def eval() = {
-    val s = settings
-    log("eval(): settings = " + s)
-    ILoop.runForTranscript(code, s).lines drop 1
-  }
-  def show() = eval() foreach println
-}
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
deleted file mode 100644
index b9abff6..0000000
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ /dev/null
@@ -1,205 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Vlad Ureche
- */
-
-package scala.tools.partest
-
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.model.diagram._
-import scala.tools.nsc.doc.base.comment._
-import scala.tools.nsc.reporters.ConsoleReporter
-
-/** A class for testing scaladoc model generation
- *   - you need to specify the code in the `code` method
- *   - you need to override the testModel method to test the model
- *   - you may specify extra parameters to send to scaladoc in `scaladocSettings`
- * {{{
-      import scala.tools.nsc.doc.model._
-      import scala.tools.partest.ScaladocModelTest
-
-      object Test extends ScaladocModelTest {
-
-        override def code = """ ... """ // or override def resourceFile = "<file>.scala" (from test/scaladoc/resources)
-        def scaladocSettings = " ... "
-        def testModel(rootPackage: Package) = {
-          // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
-          import access._
-
-          // just need to check the member exists, access methods will throw an error if there's a problem
-          rootPackage._package("scala")._package("test")._class("C")._method("foo")
-        }
-      }
- * }}}
- */
-abstract class ScaladocModelTest extends DirectTest {
-
-  /** Override this to give scaladoc command line parameters */
-  def scaladocSettings: String
-
-  /** Override this to test the model */
-  def testModel(root: Package): Unit
-
-  /** Override to feed a file in resources to scaladoc*/
-  def resourceFile: String = null
-
-  /** Override to feed code into scaladoc */
-  override def code =
-    if (resourceFile ne null)
-      io.File(resourcePath + "/" + resourceFile).slurp()
-    else
-      sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!")
-
-  def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources")
-
-  // Implementation follows:
-  override def extraSettings: String = "-usejavacp"
-
-  override def show(): Unit = {
-    // redirect err to out, for logging
-    val prevErr = System.err
-    System.setErr(System.out)
-
-    try {
-      // 1 - compile with scaladoc and get the model out
-      val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
-      // 2 - check the model generated
-      testModel(universe.rootPackage)
-      println("Done.")
-    } catch {
-      case e: Exception =>
-        println(e)
-        e.printStackTrace
-    }
-    // set err back to the real err handler
-    System.setErr(prevErr)
-  }
-
-  private[this] var settings: Settings = null
-
-  // create a new scaladoc compiler
-  private[this] def newDocFactory: DocFactory = {
-    settings = new Settings(_ => ())
-    settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
-    val args = extraSettings + " " + scaladocSettings
-    val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
-    val docFact = new DocFactory(new ConsoleReporter(settings), settings)
-    docFact
-  }
-
-  // compile with scaladoc and output the result
-  def model: Option[Universe] = newDocFactory.makeUniverse(Right(code))
-
-  // so we don't get the newSettings warning
-  override def isDebug = false
-
-
-  // finally, enable easy navigation inside the entities
-  object access {
-
-    implicit class TemplateAccess(tpl: DocTemplateEntity) {
-      def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
-      def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: DocTemplateEntity with Class => c})
-
-      def _classMbr(name: String): MemberTemplateEntity = getTheFirst(_classesMbr(name), tpl.qualifiedName + ".classMember(" + name + ")")
-      def _classesMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: MemberTemplateEntity if c.isClass => c})
-
-      def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
-      def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t})
-
-      def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")")
-      def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t})
-
-      def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
-      def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: DocTemplateEntity with Object => o})
-
-      def _objectMbr(name: String): MemberTemplateEntity = getTheFirst(_objectsMbr(name), tpl.qualifiedName + ".objectMember(" + name + ")")
-      def _objectsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: MemberTemplateEntity if o.isObject => o})
-
-      def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
-      def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
-
-      def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
-      def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
-
-      def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")")
-      def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name)
-
-      def _absType(name: String): MemberEntity = getTheFirst(_absTypes(name), tpl.qualifiedName + ".abstractType(" + name + ")")
-      def _absTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAbstractType)
-
-      def _absTypeTpl(name: String): DocTemplateEntity = getTheFirst(_absTypeTpls(name), tpl.qualifiedName + ".abstractType(" + name + ")")
-      def _absTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AbstractType if dtpl.name == name => dtpl })
-
-      def _aliasType(name: String): MemberEntity = getTheFirst(_aliasTypes(name), tpl.qualifiedName + ".aliasType(" + name + ")")
-      def _aliasTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAliasType)
-
-      def _aliasTypeTpl(name: String): DocTemplateEntity = getTheFirst(_aliasTypeTpls(name), tpl.qualifiedName + ".aliasType(" + name + ")")
-      def _aliasTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AliasType if dtpl.name == name => dtpl })
-    }
-
-    trait WithMembers {
-      def members: List[MemberEntity]
-      def _member(name: String): MemberEntity = getTheFirst(_members(name), this.toString + ".member(" + name + ")")
-      def _members(name: String): List[MemberEntity] = members.filter(_.name == name)
-    }
-    implicit class PackageAccess(pack: Package) extends TemplateAccess(pack) {
-      def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")")
-      def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
-    }
-    implicit class DocTemplateEntityMembers(val underlying: DocTemplateEntity) extends WithMembers {
-      def members = underlying.members
-    }
-    implicit class ImplicitConversionMembers(val underlying: ImplicitConversion) extends WithMembers {
-      def members = underlying.members
-    }
-
-    def getTheFirst[T](list: List[T], expl: String): T = list.length match {
-      case 1 => list.head
-      case 0 => sys.error("Error getting " + expl + ": No such element.")
-      case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
-                  "All elements in list: [" + list.map({
-                    case ent: Entity => ent.kind + " " + ent.qualifiedName
-                    case other => other.toString
-                  }).mkString(", ") + "]")
-    }
-
-    def extractCommentText(c: Any) = {
-      def extractText(body: Any): String = body match {
-        case s: String  => s
-        case s: Seq[_]  => s.toList.map(extractText(_)).mkString
-        case p: Product => p.productIterator.toList.map(extractText(_)).mkString
-        case _          => ""
-      }
-      c match {
-        case c: Comment =>
-          extractText(c.body)
-        case b: Body =>
-          extractText(b)
-      }
-    }
-
-    def countLinks(c: Comment, p: EntityLink => Boolean) = {
-      def countLinks(body: Any): Int = body match {
-        case el: EntityLink if p(el) => 1
-        case s: Seq[_]  => s.toList.map(countLinks(_)).sum
-        case p: Product => p.productIterator.toList.map(countLinks(_)).sum
-        case _          => 0
-      }
-      countLinks(c.body)
-    }
-
-    def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = {
-      assert(diag.isDefined, doc.qualifiedName + " diagram missing")
-      assert(diag.get.nodes.length == nodes,
-             doc.qualifiedName + "'s diagram: node count " + diag.get.nodes.length + " == " + nodes)
-      assert(diag.get.edges.map(_._2.length).sum == edges,
-             doc.qualifiedName + "'s diagram: edge count " + diag.get.edges.length + " == " + edges)
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
deleted file mode 100644
index 2d6f61d..0000000
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import java.security._
-import java.util._
-
-abstract class SecurityTest extends App {
-  def throwIt(x: Any) = throw new AccessControlException("" + x)
-
-  def readPerm(p: PropertyPermission)            = p.getActions contains "read"
-  def writePerm(p: PropertyPermission)           = p.getActions contains "write"
-  def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
-
-  def check(perm: Permission): Unit = perm match {
-    case p: PropertyPermission  => propertyCheck(p)
-    case _                      => ()
-  }
-
-  lazy val sm = new SecurityManager {
-    // these two are the choke points for all permissions checks
-    override def checkPermission(perm: Permission): Unit = check(perm)
-    override def checkPermission(perm: Permission, context: Object): Unit = check(perm)
-  }
-  def securityOn(): Boolean = {
-    try   { System.setSecurityManager(sm) ; true }
-    catch { case _: SecurityException => false }
-  }
-}
diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
deleted file mode 100644
index 7f3604c..0000000
--- a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.reporters.StoreReporter
-import scala.collection.mutable
-
-trait StoreReporterDirectTest extends DirectTest {
-  lazy val storeReporter: StoreReporter = new scala.tools.nsc.reporters.StoreReporter()
-
-  /** Discards all but the first message issued at a given position. */
-  def filteredInfos: Seq[storeReporter.Info] = storeReporter.infos.groupBy(_.pos).map(_._2.head).toList
-
-  /** Hook into [[scala.tools.partest.DirectTest]] to install the custom reporter */
-  override def reporter(settings: Settings) = storeReporter
-}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
deleted file mode 100644
index 9bfd444..0000000
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.tools.partest
-
-import scala.reflect.{ classTag, ClassTag }
-
-trait TestUtil {
-  /** Given function and block of code, evaluates code block,
-   *  calls function with nanoseconds elapsed, and returns block result.
-   */
-  def timed[T](f: Long => Unit)(body: => T): T = {
-    val start = System.nanoTime
-    val result = body
-    val end = System.nanoTime
-
-    f(end - start)
-    result
-  }
-  /** Times body and returns (nanos, result).
-   */
-  def alsoNanos[T](body: => T): (Long, T) = {
-    var nanos = 0L
-    val result = timed(nanos = _)(body)
-
-    (nanos, result)
-  }
-  def nanos(body: => Unit): Long = alsoNanos(body)._1
-
-  def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = {
-    val t1 = nanos(body1).toDouble
-    val t2 = nanos(body2).toDouble
-    val mult = if (t1 > t2) t1 / t2 else t2 / t1
-
-    assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
-  }
-
-  def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
-    try {
-      code
-      assert(false, "did not throw " + classTag[T])
-    } catch {
-      case ex: Exception if classTag[T].runtimeClass isInstance ex =>
-    }
-}
-
-object TestUtil extends TestUtil {
-
-}
diff --git a/src/partest/scala/tools/partest/antlib.xml b/src/partest/scala/tools/partest/antlib.xml
deleted file mode 100644
index b3b98e8..0000000
--- a/src/partest/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<antlib>
-    <taskdef name="partest"
-             classname="scala.tools.partest.PartestTask"/>
-</antlib>
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
deleted file mode 100644
index 8a284b3..0000000
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.instrumented
-
-import scala.collection.JavaConverters._
-
-case class MethodCallTrace(className: String, methodName: String, methodDescriptor: String) {
-  override def toString(): String = className + "." + methodName + methodDescriptor
-}
-object MethodCallTrace {
-  implicit val ordering: Ordering[MethodCallTrace] = Ordering.by(x => (x.className, x.methodName, x.methodDescriptor))
-}
-
-/**
- * An object that controls profiling of instrumented byte-code. The instrumentation is achieved
- * by using `java.lang.instrument` package. The instrumentation agent can be found in
- * `scala.tools.partest.javaagent` package.
- *
- * At the moment the following classes are being instrumented:
- *   * all classes with empty package
- *   * all classes from scala package (except for classes responsible for instrumentation)
- *
- * The canonical way of using instrumentation is have a test-case in `files/instrumented` directory.
- * The following code in main:
- *
- * {{{
- * import scala.tools.partest.instrumented.Instrumentation._
- * def main(args: Array[String]): Unit = {
- *   startProfiling()
- *   // should box the boolean
-    println(true)
-    stopProfiling()
-    printStatistics()
- * }
- * }}}
- *
- *
- * should print:
- *
- * {{{
- * true
- * Method call statistics:
- * scala/Predef$.println(Ljava/lang/Object;)V: 1
- * scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;: 1
- * }}}
- */
-object Instrumentation {
-
-  type Statistics = Map[MethodCallTrace, Int]
-
-  def startProfiling(): Unit = Profiler.startProfiling()
-  def stopProfiling(): Unit = Profiler.stopProfiling()
-  def resetProfiling(): Unit = Profiler.resetProfiling()
-  def isProfiling(): Boolean = Profiler.isProfiling()
-
-  def getStatistics: Statistics = {
-    val isProfiling = Profiler.isProfiling()
-    if (isProfiling) {
-      Profiler.stopProfiling()
-    }
-    val stats = Profiler.getStatistics().asScala.toSeq.map {
-      case (trace, count) => MethodCallTrace(trace.className, trace.methodName, trace.methodDescriptor) -> count.intValue
-    }
-    val res = Map(stats: _*)
-    if (isProfiling) {
-      Profiler.startProfiling()
-    }
-    res
-  }
-
-  val standardFilter: MethodCallTrace => Boolean = t => {
-    // ignore all calls to Console trigger by printing
-    t.className != "scala/Console$" &&
-    // console accesses DynamicVariable, let's discard it too
-    !t.className.startsWith("scala/util/DynamicVariable")
-  }
-
-  def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = {
-    val stats = getStatistics
-    println("Method call statistics:")
-    val toBePrinted = stats.toSeq.filter(p => filter(p._1)).sortBy(_._1)
-    // <count> <trace>
-    val format = "%5d  %s\n"
-    toBePrinted foreach {
-      case (trace, count) => printf(format, count, trace)
-    }
-  }
-
-}
diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java
deleted file mode 100644
index e267e19..0000000
--- a/src/partest/scala/tools/partest/instrumented/Profiler.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.instrumented;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
- * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
- *
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
- * because it must be universally accessible for instrumentation needs. If you want to profile your test use
- * {@link Instrumentation} instead.
- */
-public class Profiler {
-	
-	private static boolean isProfiling = false;
-	private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
-	
-	static public class MethodCallTrace {
-	  final String className;
-	  final String methodName;
-	  final String methodDescriptor;
-	  
-	  public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
-	    this.className = className;
-	    this.methodName = methodName;
-	    this.methodDescriptor = methodDescriptor;
-	  }
-	  
-	  @Override
-	  public boolean equals(Object obj) {
-	    if (!(obj instanceof MethodCallTrace)) {
-	      return false;
-	    } else {
-	      MethodCallTrace that = (MethodCallTrace) obj;
-	      return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
-	    }
-	  }
-	  @Override
-	  public int hashCode() {
-	    return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
-	  }
-	}
-	
-	public static void startProfiling() {
-		isProfiling = true;
-	}
-	
-	public static void stopProfiling() {
-		isProfiling = false;
-	}
-	
-	public static boolean isProfiling() {
-	  return isProfiling;
-	}
-
-	public static void resetProfiling() {
-	  counts = new HashMap<MethodCallTrace, Integer>();
-	}
-	
-	public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
-		if (isProfiling) {
-		  MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
-			Integer counter = counts.get(trace);
-			if (counter == null) {
-				counts.put(trace, 1);
-			} else {
-				counts.put(trace, counter+1);
-			}
-		}
-	}
-	
-	public static Map<MethodCallTrace, Integer> getStatistics() {
-	  return new HashMap<MethodCallTrace, Integer>(counts);
-	}
-
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
deleted file mode 100644
index 878c861..0000000
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.ClassFileTransformer;
-import java.security.ProtectionDomain;
-
-import scala.tools.asm.ClassReader;
-import scala.tools.asm.ClassWriter;
-
-public class ASMTransformer implements ClassFileTransformer {
-  
-  private boolean shouldTransform(String className) {
-    return 
-        // do not instrument instrumentation logic (in order to avoid infinite recursion)
-        !className.startsWith("scala/tools/partest/instrumented/") &&
-        !className.startsWith("scala/tools/partest/javaagent/") &&
-        // we instrument all classes from empty package
-        (!className.contains("/") ||
-        // we instrument all classes from scala package
-        className.startsWith("scala/") ||
-        // we instrument all classes from `instrumented` package
-        className.startsWith("instrumented/"));
-  }
-	
-	public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
-	  if (shouldTransform(className)) {
-	    ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
-	      @Override protected String getCommonSuperClass(final String type1, final String type2) {
-	        // Since we are not recomputing stack frame map, this should never be called we override this method because
-	        // default implementation uses reflection for implementation and might try to load the class that we are
-	        // currently processing. That leads to weird results like swallowed exceptions and classes being not
-	        // transformed.
-	        throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
-	            ") while transforming " + className);
-	      }
-	    };
-  		ProfilerVisitor visitor = new ProfilerVisitor(writer);
-  		ClassReader reader = new ClassReader(classfileBuffer);
-  		reader.accept(visitor, 0);
-  		return writer.toByteArray();
-	  } else {
-	    return classfileBuffer;
-	  }
-	}
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
deleted file mode 100644
index 8306327..0000000
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import scala.tools.asm.ClassVisitor;
-import scala.tools.asm.MethodVisitor;
-import scala.tools.asm.Opcodes;
-
-public class ProfilerVisitor extends ClassVisitor implements Opcodes {
-  
-  private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
-
-  public ProfilerVisitor(final ClassVisitor cv) {
-    super(ASM4, cv);
-  }
-
-  private String className = null;
-
-  @Override
-  public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
-    className = name;
-    super.visit(version, access, name, signature, superName, interfaces);
-  }
-
-  public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
-    // delegate the method call to the next
-    // chained visitor
-    MethodVisitor mv = cv.visitMethod(access, name, desc, signature, exceptions);
-    if (!profilerClass.equals(className)) {
-      // only instrument non-abstract methods
-      if((access & ACC_ABSTRACT) == 0) {
-        assert(className != null);
-        /* The following instructions do not modify compressed stack frame map so
-         * we don't need to worry about recalculating stack frame map. Specifically,
-         * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40):
-         *
-         *   In order to save space, a compiled method does not contain one frame per
-         *   instruction: in fact it contains only the frames for the instructions
-         *   that correspond to jump targets or exception handlers, or that follow
-         *   unconditional jump instructions. Indeed the other frames can be easily
-         *   and quickly inferred from these ones.
-         *
-         * Instructions below are just loading constants and calling a method so according
-         * to definition above they do not contribute to compressed stack frame map.
-         */
-        mv.visitLdcInsn(className);
-        mv.visitLdcInsn(name);
-        mv.visitLdcInsn(desc);
-        mv.visitMethodInsn(INVOKESTATIC, profilerClass, "methodCalled",
-            "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
-      }
-    }
-    return mv; 
-  }
-
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
deleted file mode 100644
index 3b18987..0000000
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.instrument.UnmodifiableClassException;
-
-/**
- * Profiling agent that instruments byte-code to insert calls to
- * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
- * by using ASM library for byte-code manipulation.
- */
-public class ProfilingAgent {
-	public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
-	  // NOTE: we are adding transformer that won't be applied to classes that are already loaded
-	  // This should be ok because premain should be executed before main is executed so Scala library
-	  // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
-	  // not depend on Scala library. In case our assumptions are wrong we can always insert call to
-	  // inst.retransformClasses.
-	  inst.addTransformer(new ASMTransformer(), false);
-	}
-}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
deleted file mode 100644
index 93045b8..0000000
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala Parallel Testing               **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-
-class AntRunner extends DirectRunner {
-
-  val fileManager = new FileManager {
-    var JAVACMD: String = "java"
-    var JAVAC_CMD: String = "javac"
-    var CLASSPATH: String = _
-    var LATEST_LIB: String = _
-    var LATEST_REFLECT: String = _
-    var LATEST_COMP: String = _
-    var LATEST_PARTEST: String = _
-    var LATEST_ACTORS: String = _
-    val testRootPath: String = "test"
-    val testRootDir: Directory = Directory(testRootPath)
-  }
-
-  def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String) =
-    runTestsForFiles(kindFiles.toList, kind)
-}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
deleted file mode 100644
index 3d902d6..0000000
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
-import scala.tools.nsc.io.{ File => SFile }
-import scala.tools.nsc.interactive.RangePositions
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ ClassPath, FakePos }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.util.PathResolver
-import io.Path
-import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
-import File.pathSeparator
-
-sealed abstract class CompilationOutcome {
-  def merge(other: CompilationOutcome): CompilationOutcome
-  def isPositive = this eq CompileSuccess
-  def isNegative = this eq CompileFailed
-}
-case object CompileSuccess extends CompilationOutcome {
-  def merge(other: CompilationOutcome) = other
-}
-case object CompileFailed extends CompilationOutcome {
-  def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other
-}
-case object CompilerCrashed extends CompilationOutcome {
-  def merge(other: CompilationOutcome) = this
-}
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
-  shortname = true
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
-  def this(cp: String) = this(cp, _ => ())
-
-  nowarnings.value  = false
-  encoding.value    = "UTF-8"
-  classpath.value   = cp
-}
-
-abstract class SimpleCompiler {
-  def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome
-}
-
-class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
-  def newGlobal(settings: Settings, reporter: Reporter): Global =
-    if (settings.Yrangepos.value)
-      new Global(settings, reporter) with RangePositions
-    else
-      new Global(settings, reporter)
-
-  def newGlobal(settings: Settings, logWriter: FileWriter): Global =
-    newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
-  def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
-  def newSettings(outdir: String): TestSettings = {
-    val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
-    val s = new TestSettings(cp)
-    s.outdir.value = outdir
-
-    s
-  }
-
-  private def updatePluginPath(options: String): String = {
-    val dir = fileManager.testRootDir
-    def absolutize(path: String) = Path(path) match {
-      case x if x.isAbsolute  => x.path
-      case x                  => (fileManager.testRootDir / x).toAbsolute.path
-    }
-
-    val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:")
-    val plugins = opt1 map (_ stripPrefix "-Xplugin:") flatMap (_ split pathSeparator) map absolutize
-    val pluginOption = if (opt1.isEmpty) Nil else List("-Xplugin:" + (plugins mkString pathSeparator))
-
-    (opt2 ::: pluginOption) mkString " "
-  }
-
-  def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = {
-    val testSettings = out match {
-      case Some(f)  => newSettings(f.getAbsolutePath)
-      case _        => newSettings()
-    }
-    val logWriter = new FileWriter(log)
-
-    // check whether there is a ".flags" file
-    val logFile = basename(log.getName)
-    val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
-    val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
-
-    // slurp local flags (e.g., "A_1.flags")
-    val fstFile = SFile(files(0))
-    def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
-    val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
-    val localFlagsList = if (inGroup.nonEmpty) {
-      val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
-      localArgString.split(' ').toList.filter(_.length > 0)
-    } else List()
-
-    val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList
-    val args = allOpts.toList
-
-    NestUI.verbose("scalac options: "+allOpts)
-
-    val command = new CompilerCommand(args, testSettings)
-    val global = newGlobal(command.settings, logWriter)
-    val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter]
-
-    val testFileFn: (File, FileManager) => TestFile = kind match {
-      case "pos"          => PosTestFile.apply
-      case "neg"          => NegTestFile.apply
-      case "run"          => RunTestFile.apply
-      case "jvm"          => JvmTestFile.apply
-      case "shootout"     => ShootoutTestFile.apply
-      case "scalap"       => ScalapTestFile.apply
-      case "scalacheck"   => ScalaCheckTestFile.apply
-      case "specialized"  => SpecializedTestFile.apply
-      case "instrumented" => InstrumentedTestFile.apply
-      case "presentation" => PresentationTestFile.apply
-      case "ant"          => AntTestFile.apply
-    }
-    val test: TestFile = testFileFn(files.head, fileManager)
-    if (!test.defineSettings(command.settings, out.isEmpty)) {
-      testRep.error(FakePos("partest"), test.flags match {
-        case Some(flags)  => "bad flags: " + flags
-        case _            => "bad settings: " + command.settings
-      })
-    }
-
-    val toCompile = files map (_.getPath)
-
-    try {
-      NestUI.verbose("compiling "+toCompile)
-      NestUI.verbose("with classpath: "+global.classPath.toString)
-      NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
-      try new global.Run compile toCompile
-      catch {
-        case FatalError(msg) =>
-          testRep.error(null, "fatal error: " + msg)
-          return CompilerCrashed
-      }
-
-      testRep.printSummary()
-      testRep.writer.close()
-    }
-    finally logWriter.close()
-
-    if (testRep.hasErrors) CompileFailed
-    else CompileSuccess
-  }
-}
-
-class CompileManager(val fileManager: FileManager) {
-  private def newCompiler = new DirectCompiler(fileManager)
-  def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome =
-    newCompiler.compile(outdir, sources, kind, log)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
deleted file mode 100644
index 08e709d..0000000
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ /dev/null
@@ -1,213 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{ File, FilenameFilter, IOException, StringWriter }
-import java.net.URI
-import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.util.PathResolver
-import scala.tools.nsc.{ io, util }
-import util.{ ClassPath }
-import io.{ Path, Directory }
-import File.pathSeparator
-import ClassPath.{ join }
-import PathResolver.{ Environment, Defaults }
-import RunnerUtils._
-
-
-class ConsoleFileManager extends FileManager {
-  var testBuild: Option[String] = PartestDefaults.testBuild
-  def testBuildFile = testBuild map (testParent / _)
-
-  var testClasses: Option[String] = None
-
-  def this(buildPath: String, rawClasses: Boolean) = {
-    this()
-    if (rawClasses)
-      testClasses = Some(buildPath)
-    else
-      testBuild = Some(buildPath)
-    // re-run because initialization of default
-    // constructor must be updated
-    findLatest()
-  }
-
-  def this(buildPath: String) = {
-    this(buildPath, false)
-  }
-
-  def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
-    this(buildPath, rawClasses)
-    SCALAC_OPTS = SCALAC_OPTS ++ moreOpts.split(' ').toSeq.filter(_.length > 0)
-  }
-
-  lazy val srcDir        = PathSettings.srcDir
-  lazy val testRootDir   = PathSettings.testRoot
-  lazy val testRootPath  = testRootDir.toAbsolute.path
-  def testParent    = testRootDir.parent
-
-  var CLASSPATH   = PartestDefaults.classPath
-  var JAVACMD     = PartestDefaults.javaCmd
-  var JAVAC_CMD   = PartestDefaults.javacCmd
-
-
-  NestUI.verbose("CLASSPATH: "+CLASSPATH)
-
-  if (!srcDir.isDirectory) {
-    NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
-    sys.exit(1)
-  }
-
-  CLASSPATH = {
-    val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.toCanonical.path)
-
-    // add all jars in libs
-    (CLASSPATH :: libs.toList) mkString pathSeparator
-  }
-
-  def findLatest() {
-    NestUI.verbose("test parent: "+testParent)
-
-    def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).toCanonical
-    def prefixFile(relPath: String) = (testParent / relPath).toCanonical
-
-    if (!testClasses.isEmpty) {
-      testClassesDir = Path(testClasses.get).toCanonical.toDirectory
-      NestUI.verbose("Running with classes in "+testClassesDir)
-
-      latestFile        = testClassesDir.parent / "bin"
-      latestLibFile     = testClassesDir / "library"
-      latestActorsFile  = testClassesDir / "library" / "actors"
-      latestReflectFile = testClassesDir / "reflect"
-      latestCompFile    = testClassesDir / "compiler"
-      latestPartestFile = testClassesDir / "partest"
-      latestFjbgFile    = testParent / "lib" / "fjbg.jar"
-    }
-    else if (testBuild.isDefined) {
-      val dir = Path(testBuild.get)
-      NestUI.verbose("Running on "+dir)
-      latestFile        = dir / "bin"
-      latestLibFile     = dir / "lib/scala-library.jar"
-      latestActorsFile  = dir / "lib/scala-actors.jar"
-      latestReflectFile = dir / "lib/scala-reflect.jar"
-      latestCompFile    = dir / "lib/scala-compiler.jar"
-      latestPartestFile = dir / "lib/scala-partest.jar"
-      latestFjbgFile    = testParent / "lib" / "fjbg.jar"
-    }
-    else {
-      def setupQuick() {
-        NestUI.verbose("Running build/quick")
-        latestFile        = prefixFile("build/quick/bin")
-        latestLibFile     = prefixFile("build/quick/classes/library")
-        latestActorsFile  = prefixFile("build/quick/classes/library/actors")
-        latestReflectFile = prefixFile("build/quick/classes/reflect")
-        latestCompFile    = prefixFile("build/quick/classes/compiler")
-        latestPartestFile = prefixFile("build/quick/classes/partest")
-      }
-
-      def setupInst() {
-        NestUI.verbose("Running dist (installed)")
-        val p = testParent.getParentFile
-        latestFile        = prefixFileWith(p, "bin")
-        latestLibFile     = prefixFileWith(p, "lib/scala-library.jar")
-        latestActorsFile  = prefixFileWith(p, "lib/scala-actors.jar")
-        latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
-        latestCompFile    = prefixFileWith(p, "lib/scala-compiler.jar")
-        latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
-      }
-
-      def setupDist() {
-        NestUI.verbose("Running dists/latest")
-        latestFile        = prefixFile("dists/latest/bin")
-        latestLibFile     = prefixFile("dists/latest/lib/scala-library.jar")
-        latestActorsFile  = prefixFile("dists/latest/lib/scala-actors.jar")
-        latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
-        latestCompFile    = prefixFile("dists/latest/lib/scala-compiler.jar")
-        latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
-      }
-
-      def setupPack() {
-        NestUI.verbose("Running build/pack")
-        latestFile        = prefixFile("build/pack/bin")
-        latestLibFile     = prefixFile("build/pack/lib/scala-library.jar")
-        latestActorsFile  = prefixFile("build/pack/lib/scala-actors.jar")
-        latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
-        latestCompFile    = prefixFile("build/pack/lib/scala-compiler.jar")
-        latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
-      }
-
-      val dists = testParent / "dists"
-      val build = testParent / "build"
-      // in case of an installed dist, testRootDir is one level deeper
-      val bin = testParent.parent / "bin"
-
-      def mostRecentOf(base: String, names: String*) =
-        names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
-
-      // detect most recent build
-      val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "reflect/reflect.properties", "library/library.properties")
-      val packTime  = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-      val distTime  = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-      val instTime  = mostRecentOf("lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-
-      val pairs = Map(
-        (quickTime, () => setupQuick()),
-        (packTime,  () => setupPack()),
-        (distTime,  () => setupDist()),
-        (instTime,  () => setupInst())
-      )
-
-      // run setup based on most recent time
-      pairs(pairs.keys max)()
-
-      latestFjbgFile = prefixFile("lib/fjbg.jar")
-    }
-
-    LATEST_LIB = latestLibFile.getAbsolutePath
-    LATEST_REFLECT = latestReflectFile.getAbsolutePath
-    LATEST_COMP = latestCompFile.getAbsolutePath
-    LATEST_PARTEST = latestPartestFile.getAbsolutePath
-    LATEST_ACTORS = latestActorsFile.getAbsolutePath
-  }
-
-  var LATEST_LIB: String = ""
-  var LATEST_REFLECT: String = ""
-  var LATEST_COMP: String = ""
-  var LATEST_PARTEST: String = ""
-  var LATEST_ACTORS: String = ""
-
-  var latestFile: File = _
-  var latestLibFile: File = _
-  var latestActorsFile: File = _
-  var latestReflectFile: File = _
-  var latestCompFile: File = _
-  var latestPartestFile: File = _
-  var latestFjbgFile: File = _
-  def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
-  var testClassesDir: Directory = _
-  // initialize above fields
-  findLatest()
-
-  var testFiles: List[io.Path] = Nil
-
-  def getFiles(kind: String, cond: Path => Boolean): List[File] = {
-    def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
-
-    val dir = Directory(srcDir / kind)
-
-    if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
-    else NestUI.failure("Directory '%s' not found" format dir)
-
-    val files =
-      if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir)
-      else dir.list filterNot ignoreDir filter cond toList
-
-    ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
deleted file mode 100644
index e016fb7..0000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ /dev/null
@@ -1,239 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
-                InputStreamReader, StringWriter, PrintWriter}
-import utils.Properties._
-import RunnerUtils._
-import scala.tools.nsc.Properties.{ versionMsg, setProp }
-import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.io
-import io.{ Path }
-import scala.collection.{ mutable, immutable }
-
-class ConsoleRunner extends DirectRunner {
-  import PathSettings.{ srcDir, testRoot }
-
-  case class TestSet(kind: String, filter: Path => Boolean, msg: String)
-  private def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala")
-  private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml")
-
-  val testSets = {
-    val pathFilter: Path => Boolean = x => x.isDirectory || (x hasExtension "scala")
-
-    List(
-      TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"),
-      TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"),
-      TestSet("run", stdFilter, "Testing interpreter and backend"),
-      TestSet("jvm", stdFilter, "Testing JVM backend"),
-      TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"),
-      TestSet("buildmanager", _.isDirectory, "Testing Build Manager"),
-      TestSet("shootout", stdFilter, "Testing shootout tests"),
-      TestSet("script", stdFilter, "Testing script tests"),
-      TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"),
-      TestSet("scalap", _.isDirectory, "Run scalap decompiler tests"),
-      TestSet("specialized", stdFilter, "Testing specialized tests"),
-      TestSet("instrumented", stdFilter, "Testing instrumented tests"),
-      TestSet("presentation", _.isDirectory, "Testing presentation compiler tests."),
-      TestSet("ant", antFilter, "Run Ant task tests.")
-    )
-  }
-
-  var fileManager: ConsoleFileManager = _
-
-  private var testFiles: List[File] = List()
-  private val errors = PartestDefaults.errorCount
-  private val testSetKinds  = testSets map (_.kind)
-  private val testSetArgs   = testSets map ("--" + _.kind)
-  private val testSetArgMap = testSetArgs zip testSets toMap
-
-  def denotesTestSet(arg: String)  = testSetArgs contains arg
-
-  private def printVersion() { NestUI outline (versionMsg + "\n") }
-
-  private val unaryArgs = List(
-    "--pack", "--all", "--verbose", "--show-diff", "--show-log",
-    "--failed", "--update-check", "--version", "--ansi", "--debug", "--help"
-  ) ::: testSetArgs
-
-  private val binaryArgs = List(
-    "--grep", "--srcpath", "--buildpath", "--classpath"
-  )
-
-  // true if a test path matches the --grep expression.
-  private def pathMatchesExpr(path: Path, expr: String) = {
-    def pred(p: Path) = file2String(p.toFile) contains expr
-    def srcs = path.toDirectory.deepList() filter (_.hasExtension("scala", "java"))
-
-    (path.isFile && pred(path)) ||
-    (path.isDirectory && srcs.exists(pred)) ||
-    (pred(path changeExtension "check"))
-  }
-
-  def main(argstr: String) {
-    val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs
-    val args   = onlyValidTestPaths(parsed.residualArgs)
-
-    /** Early return on no args, version, or invalid args */
-    if (argstr == "") return NestUI.usage()
-    if (parsed isSet "--version") return printVersion
-    if (parsed isSet "--help") return NestUI.usage()
-
-    parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x))
-
-    fileManager =
-      if (parsed isSet "--buildpath") new ConsoleFileManager(parsed("--buildpath"))
-      else if (parsed isSet "--classpath") new ConsoleFileManager(parsed("--classpath"), true)
-      else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
-      else new ConsoleFileManager  // auto detection, see ConsoleFileManager.findLatest
-
-    def argNarrowsTests(x: String) = denotesTestSet(x) || denotesTestPath(x)
-
-    NestUI._verbose         = parsed isSet "--verbose"
-    fileManager.showDiff    = true
-    // parsed isSet "--show-diff"
-    fileManager.updateCheck = parsed isSet "--update-check"
-    fileManager.showLog     = parsed isSet "--show-log"
-    fileManager.failed      = parsed isSet "--failed"
-
-    if (parsed isSet "--ansi") NestUI initialize NestUI.MANY
-    if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout")
-    if (parsed isSet "--debug") setProp("partest.debug", "true")
-
-    def addTestFile(file: File) = {
-      if (!file.exists)
-        NestUI.failure("Test file '%s' not found, skipping.\n" format file)
-      else {
-        NestUI.verbose("adding test file " + file)
-        testFiles +:= file
-      }
-    }
-
-    // If --grep is given we suck in every file it matches.
-
-    val grepOption = parsed get "--grep"
-    val grepPaths = grepOption.toList flatMap { expr =>
-      val subjectDirs = testSetKinds map (srcDir / _ toDirectory)
-      val testPaths   = subjectDirs flatMap (_.files filter stdFilter)
-      val paths       = testPaths filter (p => pathMatchesExpr(p, expr))
-
-      if (paths.isEmpty)
-         NestUI.failure("--grep string '%s' matched no tests." format expr)
-
-      paths map (_.jfile)
-    }
-    val grepMessage = grepOption map (x => "Argument '%s' matched %d test(s)".format(x, grepPaths.size)) getOrElse ""
-
-    grepPaths foreach addTestFile
-    args foreach (x => addTestFile(new File(x)))
-
-    // If no file arguments were given, we assume --all
-    val enabledTestSets: List[TestSet] = {
-      val enabledArgs = testSetArgs filter parsed.isSet
-
-      if (args.isEmpty && !(parsed isSet "--grep") && (enabledArgs.isEmpty || (parsed isSet "--all"))) testSets
-      else enabledArgs map testSetArgMap
-    }
-
-    val dir =
-      if (fileManager.testClasses.isDefined) fileManager.testClassesDir
-      else fileManager.testBuildFile getOrElse {
-        fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
-      }
-
-    val vmBin  = javaHome + File.separator + "bin"
-    val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
-    val vmOpts = fileManager.JAVA_OPTS
-
-    NestUI.verbose("enabled test sets: " + (enabledTestSets map (_.kind) mkString " "))
-
-    List(
-      "Scala compiler classes in: " + dir,
-      "Scala version is:          " + versionMsg,
-      "Scalac options are:        " + fileManager.SCALAC_OPTS,
-      "Java binaries in:          " + vmBin,
-      "Java runtime is:           " + vmName,
-      "Java options are:          " + vmOpts,
-      "Source directory is:       " + srcDir,
-      ""
-    ) foreach (x => NestUI verbose (x + "\n"))
-
-    NestUI.verbose("available processors: " + Runtime.getRuntime().availableProcessors())
-
-    // Dragged down here so it isn't buried under the banner.
-    if (grepMessage != "")
-      NestUI.normal(grepMessage + "\n")
-
-    val ((successes, failures), elapsedMillis) = timed(testCheckAll(enabledTestSets))
-    val total = successes + failures
-
-    val elapsedSecs = elapsedMillis/1000
-    val elapsedMins = elapsedSecs/60
-    val elapsedHrs  = elapsedMins/60
-    val dispMins = elapsedMins - elapsedHrs  * 60
-    val dispSecs = elapsedSecs - elapsedMins * 60
-
-    val dispElapsed = {
-      def form(num: Long) = if (num < 10) "0"+num else ""+num
-      form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
-    }
-
-    if (failures == 0)
-      NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
-    else
-      NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
-
-    System exit ( if (failures == errors) 0 else 1 )
-  }
-
-  def runTests(testSet: TestSet): (Int, Int) = {
-    val TestSet(kind, filter, msg) = testSet
-
-    fileManager.getFiles(kind, filter) match {
-      case Nil    => NestUI.verbose("test dir empty\n") ; (0, 0)
-      case files  =>
-        NestUI.verbose("test files: "+files)
-        NestUI.outline("\n"+msg+"\n")
-        resultsToStatistics(runTestsForFiles(files, kind))
-    }
-  }
-
-  /**
-   * @return (success count, failure count)
-   */
-  def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
-    def kindOf(f: File) = {
-      (srcDir relativize Path(f).toCanonical).segments match {
-        case (".." :: "scaladoc" :: xs) => xs.head
-        case xs => xs.head
-      }
-    }
-
-    val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
-    invalid foreach (x => NestUI.failure(
-      "Invalid test file '%s', skipping.\n".format(x) +
-      "(Test kind '%s' not in known set '%s')".format(kindOf(x), testSetKinds))
-    )
-
-    val grouped = (valid groupBy kindOf).toList sortBy (x => testSetKinds indexOf x._1)
-    val runTestsFileLists =
-      for ((kind, files) <- grouped) yield {
-        NestUI.outline("\nTesting individual files\n")
-        resultsToStatistics(runTestsForFiles(files, kind))
-      }
-
-    if (enabledSets.nonEmpty)
-      NestUI.verbose("Run sets: "+enabledSets)
-
-    val results = runTestsFileLists ::: (enabledSets map runTests)
-
-    (results map (_._1) sum, results map (_._2) sum)
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
deleted file mode 100644
index 32ef8b4..0000000
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.util.Properties.setProp
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.io.Path
-import scala.collection.{ mutable, immutable }
-import java.util.concurrent._
-import scala.collection.convert.decorateAll._
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-trait DirectRunner {
-  def fileManager: FileManager
-
-  import PartestDefaults.numThreads
-
-  def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res", "xml")
-  def denotesTestDir(arg: String)  = Path(arg).ifDirectory(_.files.nonEmpty) exists (x => x)
-  def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg)
-
-  /** No duplicate, no empty directories, don't mess with this unless
-   *  you like partest hangs.
-   */
-  def onlyValidTestPaths[T](args: List[T]): List[T] = {
-    args.distinct filter (arg => denotesTestPath("" + arg) || {
-      NestUI.warning("Discarding invalid test path '%s'\n" format arg)
-      false
-    })
-  }
-  def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
-    System.setProperty("line.separator", "\n")
-
-    // @partest maintainer: we cannot create a fresh file manager here
-    // since the FM must respect --buildpath and --classpath from the command line
-    // for example, see how it's done in ReflectiveRunner
-    //val consFM = new ConsoleFileManager
-    //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
-    val latestCompFile    = new File(fileManager.LATEST_COMP)
-    val latestReflectFile = new File(fileManager.LATEST_REFLECT)
-    val latestLibFile     = new File(fileManager.LATEST_LIB)
-    val latestPartestFile = new File(fileManager.LATEST_PARTEST)
-    val latestActorsFile  = new File(fileManager.LATEST_ACTORS)
-    val scalacheckURL     = PathSettings.scalaCheck.toURL
-    val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
-      scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
-    )
-
-    val kindFiles = onlyValidTestPaths(_kindFiles)
-    val pool      = Executors.newFixedThreadPool(numThreads)
-    val manager   = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
-    val futures   = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
-
-    pool.shutdown()
-    try if (!pool.awaitTermination(4, TimeUnit.HOURS))
-      NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
-    catch { case t: InterruptedException =>
-      NestUI.warning("Thread pool was interrupted")
-      t.printStackTrace()
-    }
-
-    for ((file, future) <- futures) yield {
-      val state = if (future.isCancelled) TestState.Timeout else future.get
-      (file.getAbsolutePath, state)
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
deleted file mode 100644
index 70fdb33..0000000
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter,
-                FileInputStream, FileOutputStream, BufferedReader,
-                FileReader, PrintWriter, FileWriter}
-import java.net.URI
-import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import scala.sys.process._
-import scala.collection.mutable
-
-trait FileUtil {
-  /**
-   * Compares two files using difflib to produce a unified diff.
-   *
-   * @param  f1  the first file to be compared
-   * @param  f2  the second file to be compared
-   * @return the unified diff of the compared files or the empty string if they're equal
-   */
-  def compareFiles(f1: File, f2: File): String = {
-    compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
-  }
-
-  /**
-   * Compares two lists of lines using difflib to produce a unified diff.
-   *
-   * @param  origLines  the first seq of lines to be compared
-   * @param  newLines   the second seq of lines to be compared
-   * @param  origName   file name to be used in unified diff for `origLines`
-   * @param  newName    file name to be used in unified diff for `newLines`
-   * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
-   */
-  def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
-    import collection.JavaConverters._
-
-    val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
-    if (diff.getDeltas.isEmpty) ""
-    else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
-  }
-}
-object FileUtil extends FileUtil { }
-
-trait FileManager extends FileUtil {
-
-  def testRootDir: Directory
-  def testRootPath: String
-
-  var JAVACMD: String
-  var JAVAC_CMD: String
-
-  var CLASSPATH: String
-  var LATEST_LIB: String
-  var LATEST_REFLECT: String
-  var LATEST_COMP: String
-  var LATEST_PARTEST: String
-  var LATEST_ACTORS: String
-
-  var showDiff = false
-  var updateCheck = false
-  var showLog = false
-  var failed = false
-
-  var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
-  var JAVA_OPTS   = PartestDefaults.javaOpts
-  var timeout     = PartestDefaults.timeout
-  // how can 15 minutes not be enough? What are you doing, run/lisp.scala?
-  // You complete in 11 seconds on my machine.
-  var oneTestTimeout = 60 * 60 * 1000
-
-  /** Only when --debug is given. */
-  lazy val testTimings = new mutable.HashMap[String, Long]
-  def recordTestTiming(name: String, milliseconds: Long) =
-    synchronized { testTimings(name) = milliseconds }
-  def showTestTimings() {
-    testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) }
-  }
-
-  def getLogFile(dir: File, fileBase: String, kind: String): File =
-    new File(dir, fileBase + "-" + kind + ".log")
-
-  def getLogFile(file: File, kind: String): File = {
-    val dir      = file.getParentFile
-    val fileBase = basename(file.getName)
-
-    getLogFile(dir, fileBase, kind)
-  }
-
-  def logFileExists(file: File, kind: String) =
-    getLogFile(file, kind).canRead
-
-  def overwriteFileWith(dest: File, file: File) =
-    dest.isFile && copyFile(file, dest)
-
-  def copyFile(from: File, dest: File): Boolean = {
-    if (from.isDirectory) {
-      assert(dest.isDirectory, "cannot copy directory to file")
-      val subDir:Directory = Path(dest) / Directory(from.getName)
-      subDir.createDirectory()
-      from.listFiles.toList forall (copyFile(_, subDir))
-    }
-    else {
-      val to = if (dest.isDirectory) new File(dest, from.getName) else dest
-
-      try {
-        SFile(to) writeAll SFile(from).slurp()
-        true
-      }
-      catch { case _: IOException => false }
-    }
-  }
-
-  def mapFile(file: File, replace: String => String) {
-    val f = SFile(file)
-
-    f.printlnAll(f.lines.toList map replace: _*)
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
deleted file mode 100644
index e398d2e..0000000
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object NestRunner {
-  def main(args: Array[String]) {
-    new ReflectiveRunner main (args mkString " ")
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
deleted file mode 100644
index 70db6d0..0000000
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.PrintWriter
-
-object NestUI {
-
-  val NONE = 0
-  val SOME = 1
-  val MANY = 2
-
-  private var _outline = ""
-  private var _success = ""
-  private var _failure = ""
-  private var _warning = ""
-  private var _default = ""
-
-  def initialize(number: Int) = number match {
-    case MANY =>
-      _outline = Console.BOLD + Console.BLACK
-      _success = Console.BOLD + Console.GREEN
-      _failure = Console.BOLD + Console.RED
-      _warning = Console.BOLD + Console.YELLOW
-      _default = Console.RESET
-    case SOME =>
-      _outline = Console.BOLD + Console.BLACK
-      _success = Console.RESET
-      _failure = Console.BOLD + Console.BLACK
-      _warning = Console.BOLD + Console.BLACK
-      _default = Console.RESET
-    case _ =>
-  }
-
-  def outline(msg: String) = print(_outline + msg + _default)
-  def outline(msg: String, wr: PrintWriter) = synchronized {
-    wr.print(_outline + msg + _default)
-  }
-
-  def success(msg: String) = print(_success  + msg + _default)
-  def success(msg: String, wr: PrintWriter) = synchronized {
-    wr.print(_success + msg + _default)
-  }
-
-  def failure(msg: String) = print(_failure  + msg + _default)
-  def failure(msg: String, wr: PrintWriter) = synchronized {
-    wr.print(_failure + msg + _default)
-  }
-
-  def warning(msg: String) = print(_warning  + msg + _default)
-  def warning(msg: String, wr: PrintWriter) = synchronized {
-    wr.print(_warning + msg + _default)
-  }
-
-  def normal(msg: String) = print(_default + msg)
-  def normal(msg: String, wr: PrintWriter) = synchronized {
-    wr.print(_default + msg)
-  }
-
-  def usage() {
-    println("Usage: NestRunner [<options>] [<testfile> ..] [<resfile>]")
-    println("  <testfile>: list of files ending in '.scala'")
-    println("  <resfile>: a file not ending in '.scala'")
-    println("  <options>:")
-    println
-    println("  Test categories:")
-    println("    --all           run all tests")
-    println("    --pos           run compilation tests (success)")
-    println("    --neg           run compilation tests (failure)")
-    println("    --run           run interpreter and backend tests")
-    println("    --jvm           run JVM backend tests")
-    println("    --res           run resident compiler tests")
-    println("    --buildmanager  run Build Manager tests")
-    println("    --scalacheck    run ScalaCheck tests")
-    println("    --script        run script runner tests")
-    println("    --shootout      run shootout tests")
-    println("    --instrumented  run instrumented tests")
-    println("    --presentation  run presentation compiler tests")
-    println("    --grep <expr>    run all tests whose source file contains <expr>")
-    println
-    println("  Other options:")
-    println("    --pack       pick compiler/reflect/library in build/pack, and run all tests")
-    println("    --show-log   show log")
-    println("    --show-diff  show diff between log and check file")
-    println("    --failed     run only those tests that failed during the last run")
-    println("    --update-check instead of failing tests with output change, update checkfile. (Use with care!)")
-    println("    --verbose    show progress information")
-    println("    --buildpath  set (relative) path to build jars")
-    println("                 ex.: --buildpath build/pack")
-    println("    --classpath  set (absolute) path to build classes")
-    println("    --srcpath    set (relative) path to test source files")
-    println("                 ex.: --srcpath pending")
-    println("    --debug      enable debugging output")
-    println
-    println(utils.Properties.versionString)
-    println("maintained by Philipp Haller (EPFL)")
-    sys.exit(1)
-  }
-
-  var _verbose = false
-  var _debug = false
-
-  def verbose(msg: String) {
-    if (_verbose) {
-      outline("debug: ")
-      println(msg)
-    }
-  }
-  def debug(msg: String) {
-    if (isPartestDebug) {
-      outline("debug: ")
-      println(msg)
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
deleted file mode 100644
index 0ba3477..0000000
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse }
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.{ Path, File, Directory }
-import RunnerUtils._
-
-object PathSettings {
-  import PartestDefaults.{ testRootDir, srcDirName }
-
-  private def cwd = Directory.Current getOrElse sys.error("user.dir property not set")
-  private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
-  private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
-  private def findJar(files: Iterator[File], name: String): Option[File] =
-    files filter (_ hasExtension "jar") find { _.name startsWith name }
-
-  // Directory <root>/test
-  lazy val testRoot: Directory = testRootDir getOrElse {
-    val candidates: List[Directory] = (cwd :: cwd.parents) flatMap (d => List(d, Directory(d / "test")))
-
-    candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
-  }
-
-  // Directory <root>/test/files
-  lazy val srcDir = Directory(testRoot / srcDirName toCanonical)
-
-  // Directory <root>/test/files/lib
-  lazy val srcLibDir = Directory(srcDir / "lib")
-
-  // Directory <root>/test/files/speclib
-  lazy val srcSpecLibDir = Directory(srcDir / "speclib")
-
-  lazy val srcSpecLib: File = findJar(srcSpecLibDir, "instrumented") getOrElse {
-    sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
-  }
-
-  // Directory <root>/test/files/codelib
-  lazy val srcCodeLibDir = Directory(srcDir / "codelib")
-
-  lazy val srcCodeLib: File = (
-    findJar(srcCodeLibDir, "code")
-      orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
-      getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
-  )
-
-  lazy val instrumentationAgentLib: File = {
-    findJar(buildPackLibDir.files, "scala-partest-javaagent") getOrElse {
-      sys.error("No partest-javaagent jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
-    }
-  }
-
-  // Directory <root>/build
-  lazy val buildDir: Directory = {
-    val bases      = testRoot :: testRoot.parents
-    // In the classic "ant" build, the relevant subdirectory is called build,
-    // but in the postmodern "sbt" build, it is called target.  Look for both.
-    val dirs = Path.onlyDirs(bases flatMap (x => List(x / "build", x / "target")))
-
-    dirs.headOption getOrElse sys.error("Neither 'build' nor 'target' dir found under test root " + testRoot + ".")
-  }
-
-  // Directory <root>/build/pack/lib
-  lazy val buildPackLibDir = Directory(buildDir / "pack" / "lib")
-
-  lazy val scalaCheck: File =
-    findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
-      sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
-    }
-
-  lazy val diffUtils: File =
-    findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
-}
-
-class PathSettings() {
-  // def classpathAsURLs: List[URL]
-}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
deleted file mode 100644
index 700667a..0000000
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.Path
-import RunnerUtils._
-import java.net.URLClassLoader
-
-/* This class is used to load an instance of DirectRunner using
- * a custom class loader.
- * The purpose is to "auto-detect" a good classpath for the
- * rest of the classes (Worker, CompileManager etc.), so that
- * the main NestRunner can be started merely by putting its
- * class on the classpath (ideally).
- */
-class ReflectiveRunner {
-  // TODO: we might also use fileManager.CLASSPATH
-  // to use the same classes as used by `scala` that
-  // was used to start the runner.
-  val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
-
-  def main(args: String) {
-    val argList = (args.split("\\s")).toList
-
-    if (isPartestDebug)
-      showAllJVMInfo
-
-    // find out which build to test
-    val buildPath = searchPath("--buildpath", argList)
-    val classPath = searchPath("--classpath", argList)
-    val fileManager =
-      if (!buildPath.isEmpty)
-        new ConsoleFileManager(buildPath.get)
-      else if (!classPath.isEmpty)
-        new ConsoleFileManager(classPath.get, true)
-      else if (argList contains "--pack")
-        new ConsoleFileManager("build/pack")
-      else // auto detection
-        new ConsoleFileManager
-
-    import fileManager.
-      { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
-    val files =
-      Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
-
-    val sepUrls   = files map (_.toURL)
-    // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
-    var sepLoader = new URLClassLoader(sepUrls, null)
-
-    // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
-    // we hack into the classloader that will become parent classloader for scalac
-    // this way we ensure that reflective macro lookup will pick correct Code.lift
-    // it's also used to inject diffutils into the classpath when running partest from the test/partest script
-    sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: (PathSettings.diffUtils +: files)) map (_.toURL), null)
-
-    if (isPartestDebug)
-      println("Loading classes from:\n" + sepUrls.mkString("\n"))
-
-    // @partest maintainer: it seems to me that commented lines are incorrect
-    // if classPath is not empty, then it has been provided by the --classpath option
-    // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
-    // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
-    //
-    //val paths = classPath match {
-    //  case Some(cp) => Nil
-    //  case _        => files.toList map (_.path)
-    //}
-    val paths = files.toList map (_.path)
-
-    val newClasspath = ClassPath.join(paths: _*)
-
-    setProp("java.class.path", newClasspath)
-    setProp("scala.home", "")
-
-    if (isPartestDebug)
-      for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
-        println(prop + ": " + propOrEmpty(prop))
-
-    try {
-      val sepRunnerClass  = sepLoader loadClass sepRunnerClassName
-      val sepRunner       = sepRunnerClass.newInstance()
-      val sepMainMethod   = sepRunnerClass.getMethod("main", Array(classOf[String]): _*)
-      val cargs: Array[AnyRef] = Array(args)
-      sepMainMethod.invoke(sepRunner, cargs: _*)
-    }
-    catch {
-      case cnfe: ClassNotFoundException =>
-        cnfe.printStackTrace()
-        NestUI.failure(sepRunnerClassName +" could not be loaded from:\n")
-        sepUrls foreach (x => NestUI.failure(x + "\n"))
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
deleted file mode 100644
index f80f6f3..0000000
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ /dev/null
@@ -1,862 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io._
-import java.net.URL
-import java.util.{ Timer, TimerTask }
-
-import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse }
-import scala.util.Properties.{ envOrElse, isWin }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString }
-import ClassPath.{ join, split }
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager }
-import scala.sys.process._
-import java.util.concurrent.{ Executors, TimeUnit, TimeoutException }
-import PartestDefaults.{ javaCmd, javacCmd }
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
-  def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
-    require (file != null)
-    new LogContext(file, Some((swr, wr)))
-  }
-  def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
-  object outRedirect extends Redirecter(out)
-  object errRedirect extends Redirecter(err)
-
-  System.setOut(outRedirect)
-  System.setErr(errRedirect)
-
-  import scala.util.DynamicVariable
-  private def out = java.lang.System.out
-  private def err = java.lang.System.err
-  private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
-  class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
-    def write(b: Int) = withStream(_ write b)
-
-    private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
-    override def write(b: Array[Byte]) = withStream(_ write b)
-    override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
-    override def flush = withStream(_.flush)
-    override def close = withStream(_.close)
-  })
-
-  // this supports thread-safe nested output redirects
-  def withRedirected[T](newstream: PrintStream)(func: => T): T = {
-    // note down old redirect destination
-    // this may be None in which case outRedirect and errRedirect print to stdout and stderr
-    val saved = redirVar.value
-    // set new redirecter
-    // this one will redirect both out and err to newstream
-    redirVar.value = Some(newstream)
-
-    try func
-    finally {
-      newstream.flush()
-      redirVar.value = saved
-    }
-  }
-}
-
-class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunParams) {
-  import fileManager._
-
-  val compileMgr = new CompileManager(fileManager)
-  fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
-  fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
-  private def compareFiles(f1: File, f2: File): String =
-    try fileManager.compareFiles(f1, f2)
-    catch { case t: Exception => t.toString }
-
-  /** This does something about absolute paths and file separator
-   *  chars before diffing.
-   */
-  private def replaceSlashes(dir: File, s: String): String = {
-    val base = (dir.getAbsolutePath + File.separator).replace('\\', '/')
-    var regex = """\Q%s\E""" format base
-    if (isWin) regex = "(?i)" + regex
-    s.replace('\\', '/').replaceAll(regex, "")
-  }
-
-  private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
-  private def printInfoStart(file: File, printer: PrintWriter) {
-    NestUI.outline("testing: ", printer)
-    val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
-    val testdir = filesdir.getParentFile
-    val totalWidth = 56
-    val name = {
-      // 1. try with [...]/files/run/test.scala
-      val name = file.getAbsolutePath drop testdir.getAbsolutePath.length
-      if (name.length <= totalWidth) name
-      // 2. try with [...]/run/test.scala
-      else file.getAbsolutePath drop filesdir.getAbsolutePath.length
-    }
-    NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
-  }
-
-  private def printInfoEnd(success: Boolean, printer: PrintWriter) {
-    NestUI.normal("[", printer)
-    if (success) NestUI.success("  OK  ", printer)
-    else NestUI.failure("FAILED", printer)
-    NestUI.normal("]\n", printer)
-  }
-
-  private def printInfoTimeout(printer: PrintWriter) {
-    NestUI.normal("[", printer)
-    NestUI.failure("TIMOUT", printer)
-    NestUI.normal("]\n", printer)
-  }
-
-  private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = {
-    // compile using command-line javac compiler
-    val args = Seq(
-      javacCmd,
-      "-d",
-      outDir.getAbsolutePath,
-      "-classpath",
-      join(outDir.toString, CLASSPATH)
-    ) ++ files.map("" + _)
-
-    try if (runCommand(args, output)) CompileSuccess else CompileFailed
-    catch exHandler(output, "javac command failed:\n" + args.map("  " + _ + "\n").mkString + "\n", CompilerCrashed)
-  }
-
-  /** Runs command redirecting standard out and error out to output file.
-   *  Overloaded to accept a sequence of arguments.
-   */
-  private def runCommand(args: Seq[String], outFile: File): Boolean = {
-    NestUI.verbose("running command:\n"+args.map("  " + _ + "\n").mkString)
-    runCommandImpl(Process(args), outFile)
-  }
-
-  /** Runs command redirecting standard out and error out to output file.
-   *  Overloaded to accept a single string = concatenated command + arguments.
-   */
-  private def runCommand(command: String, outFile: File): Boolean = {
-    NestUI.verbose("running command:"+command)
-    runCommandImpl(Process(command), outFile)
-  }
-
-  private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = {
-    val exitCode = (process #> outFile !)
-    // normalize line endings
-    // System.getProperty("line.separator") should be "\n" here
-    // so reading a file and writing it back should convert all CRLFs to LFs
-    SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*)
-    exitCode == 0
-  }
-
-  @inline private def isJava(f: File) = SFile(f) hasExtension "java"
-  @inline private def isScala(f: File) = SFile(f) hasExtension "scala"
-  @inline private def isJavaOrScala(f: File) = isJava(f) || isScala(f)
-
-  private def outputLogFile(logFile: File) {
-    val lines = SFile(logFile).lines
-    if (lines.nonEmpty) {
-      NestUI.normal("Log file '" + logFile + "': \n")
-      lines foreach (x => NestUI.normal(x + "\n"))
-    }
-  }
-  private def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = {
-    SFile(logFile).writeAll(msg, stackTraceString(t))
-    outputLogFile(logFile) // if running the test threw an exception, output log file
-    false
-  }
-
-  private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = {
-    case e: Exception => logStackTrace(logFile, e, msg) ; value
-  }
-
-  class Runner(testFile: File) {
-    var testDiff: String = ""
-    var passed: Option[Boolean] = None
-
-    val fileBase = basename(testFile.getName)
-    val logFile  = fileManager.getLogFile(testFile, kind)
-    val parent   = testFile.getParentFile
-    val outDir   = new File(parent, "%s-%s.obj".format(fileBase, kind))
-    def toDelete = if (isPartestDebug) Nil else List(
-      if (passed exists (x => x)) Some(logFile) else None,
-      if (outDir.isDirectory) Some(outDir) else None
-    ).flatten
-
-    private def createOutputDir(): File = {
-      outDir.mkdirs()
-      outDir
-    }
-
-    private def execTest(outDir: File, logFile: File, classpathPrefix: String = "", javaOpts: String = ""): Boolean = {
-      // check whether there is a ".javaopts" file
-      val argsFile  = new File(logFile.getParentFile, fileBase + ".javaopts")
-      val argString = file2String(argsFile)
-      if (argString != "")
-        NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
-      val testFullPath = {
-        val d = new File(logFile.getParentFile, fileBase)
-        if (d.isDirectory) d.getAbsolutePath
-        else {
-          val f = new File(logFile.getParentFile, fileBase + ".scala")
-          if (f.isFile) f.getAbsolutePath
-          else ""
-        }
-      }
-
-      // Note! As this currently functions, JAVA_OPTS must precede argString
-      // because when an option is repeated to java only the last one wins.
-      // That means until now all the .javaopts files were being ignored because
-      // they all attempt to change options which are also defined in
-      // partest.java_opts, leading to debug output like:
-      //
-      // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
-      // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
-      val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
-      val propertyOptions = List(
-        "-Dfile.encoding=UTF-8",
-        "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
-        "-Dpartest.output="+outDir.getAbsolutePath,
-        "-Dpartest.lib="+LATEST_LIB,
-        "-Dpartest.reflect="+LATEST_REFLECT,
-        "-Dpartest.comp="+LATEST_COMP,
-        "-Dpartest.cwd="+outDir.getParent,
-        "-Dpartest.test-path="+testFullPath,
-        "-Dpartest.testname="+fileBase,
-        "-Djavacmd="+javaCmd,
-        "-Djavaccmd="+javacCmd,
-        "-Duser.language=en",
-        "-Duser.country=US"
-      ) ++ extras
-
-      val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH
-      val cmd = javaCmd +: (
-        (JAVA_OPTS.split(' ') ++ javaOpts.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "") ++ Seq(
-          "-classpath",
-          join(outDir.toString, classpath)
-        ) ++ propertyOptions ++ Seq(
-          "scala.tools.nsc.MainGenericRunner",
-          "-usejavacp",
-          "Test",
-          "jvm"
-        )
-      )
-
-      runCommand(cmd, logFile)
-    }
-
-    private def getCheckFilePath(dir: File, suffix: String = "") = {
-      def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
-
-      if (chkFile("").isFile || suffix == "") chkFile("")
-      else chkFile("-" + suffix)
-    }
-    private def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead)
-
-    private def compareOutput(dir: File, logFile: File): String = {
-      val checkFile = getCheckFilePath(dir, kind)
-      val diff =
-        if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
-        else file2String(logFile)
-
-      // if check file exists, compare with log file
-      if (diff != "" && fileManager.updateCheck) {
-        NestUI.verbose("Updating checkfile " + checkFile.jfile)
-        val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
-        toWrite writeAll file2String(logFile)
-        ""
-      }
-      else diff
-    }
-
-    def newTestWriters() = {
-      val swr = new StringWriter
-      val wr  = new PrintWriter(swr, true)
-      // diff    = ""
-
-      ((swr, wr))
-    }
-
-    def fail(what: Any) = {
-      NestUI.verbose("scalac: compilation of "+what+" failed\n")
-      false
-    }
-    def diffCheck(testFile: File, diff: String) = {
-      testDiff = diff
-      testDiff == ""
-    }
-
-    /** 1. Creates log file and output directory.
-     *  2. Runs script function, providing log file and output directory as arguments.
-     */
-    def runInContext(file: File, script: (File, File) => Boolean): (Boolean, LogContext) = {
-      val (swr, wr) = newTestWriters()
-      printInfoStart(file, wr)
-
-      NestUI.verbose(this+" running test "+fileBase)
-      val outDir = createOutputDir()
-      NestUI.verbose("output directory: "+outDir)
-
-      // run test-specific code
-      val succeeded = try {
-        if (isPartestDebug) {
-          val (result, millis) = timed(script(logFile, outDir))
-          fileManager.recordTestTiming(file.getPath, millis)
-          result
-        }
-        else script(logFile, outDir)
-      }
-      catch exHandler(logFile, "", false)
-
-      (succeeded, LogContext(logFile, swr, wr))
-    }
-
-    def groupedFiles(dir: File): List[List[File]] = {
-      val testFiles = dir.listFiles.toList filter isJavaOrScala
-
-      def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
-      val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
-      val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
-
-      noGroupSuffix :: groups filterNot (_.isEmpty)
-    }
-
-    def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = {
-      def compileGroup(g: List[File]): CompilationOutcome = {
-        val (scalaFiles, javaFiles) = g partition isScala
-        val allFiles = javaFiles ++ scalaFiles
-
-        List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
-          case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile)     // java + scala
-          case (CompileSuccess, 2) if javaFiles.nonEmpty  => javac(outDir, javaFiles, logFile)                                    // java
-          case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile)   // scala
-          case (outcome, _)                               => outcome
-        }
-      }
-      groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) {
-        case (CompileSuccess, files) => compileGroup(files)
-        case (outcome, _)            => outcome
-      }
-    }
-
-    def runTestCommon(file: File, expectFailure: Boolean)(
-      onSuccess: (File, File) => Boolean,
-      onFail: (File, File) => Unit = (_, _) => ()): (Boolean, LogContext) =
-    {
-      runInContext(file, (logFile: File, outDir: File) => {
-        val outcome = (
-          if (file.isDirectory) compileFilesIn(file, logFile, outDir)
-          else compileMgr.attemptCompile(None, List(file), kind, logFile)
-        )
-        val result = (
-          if (expectFailure) outcome.isNegative
-          else outcome.isPositive
-        )
-
-        if (result) onSuccess(logFile, outDir)
-        else { onFail(logFile, outDir) ; false }
-      })
-    }
-
-    def runJvmTest(file: File): (Boolean, LogContext) =
-      runTestCommon(file, expectFailure = false)((logFile, outDir) => {
-        val dir      = file.getParentFile
-
-        // adding codelib.jar to the classpath
-        // codelib provides the possibility to override standard reify
-        // this shields the massive amount of reification tests from changes in the API
-        execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && {
-          // cannot replace paths here since this also inverts slashes
-          // which affects a bunch of tests
-          //fileManager.mapFile(logFile, replaceSlashes(dir, _))
-          diffCheck(file, compareOutput(dir, logFile))
-        }
-      })
-
-    // Apache Ant 1.6 or newer
-    def ant(args: Seq[String], output: File): Boolean = {
-      val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
-      val antLibDir = Directory(antDir / "lib")
-      val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
-      val antOptions =
-        if (NestUI._verbose) List("-verbose", "-noinput")
-        else List("-noinput")
-      val cmd = javaCmd +: (
-        JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
-          "-classpath",
-          antLauncherPath,
-          "org.apache.tools.ant.launch.Launcher"
-        ) ++ antOptions ++ args
-      )
-
-      try runCommand(cmd, output)
-      catch exHandler(output, "ant command '" + cmd + "' failed:\n", false)
-    }
-
-    def runAntTest(file: File): (Boolean, LogContext) = {
-      val (swr, wr) = newTestWriters()
-      printInfoStart(file, wr)
-
-      NestUI.verbose(this+" running test "+fileBase)
-
-      val succeeded = try {
-        val binary = "-Dbinary="+(
-          if      (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
-          else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
-          else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
-          else "installed"
-        )
-        val args = Array(binary, "-logfile", logFile.path, "-file", file.path)
-        NestUI.verbose("ant "+args.mkString(" "))
-        ant(args, logFile) && diffCheck(file, compareOutput(file.getParentFile, logFile))
-      }
-      catch { // *catch-all*
-        case e: Exception =>
-          NestUI.verbose("caught "+e)
-          false
-      }
-
-      (succeeded, LogContext(logFile, swr, wr))
-    }
-
-    def runSpecializedTest(file: File): (Boolean, LogContext) =
-      runTestCommon(file, expectFailure = false)((logFile, outDir) => {
-        val dir       = file.getParentFile
-
-        // adding the instrumented library to the classpath
-        ( execTest(outDir, logFile, PathSettings.srcSpecLib.toString) &&
-          diffCheck(file, compareOutput(dir, logFile))
-        )
-      })
-
-    def runInstrumentedTest(file: File): (Boolean, LogContext) =
-      runTestCommon(file, expectFailure = false)((logFile, outDir) => {
-        val dir       = file.getParentFile
-
-        // adding the javagent option with path to instrumentation agent
-        execTest(outDir, logFile, javaOpts = "-javaagent:"+PathSettings.instrumentationAgentLib) &&
-        diffCheck(file, compareOutput(dir, logFile))
-      })
-
-    def processSingleFile(file: File): (Boolean, LogContext) = kind match {
-      case "scalacheck" =>
-        val succFn: (File, File) => Boolean = { (logFile, outDir) =>
-          NestUI.verbose("compilation of "+file+" succeeded\n")
-
-          val outURL    = outDir.getAbsoluteFile.toURI.toURL
-          val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
-          Output.withRedirected(logWriter) {
-            // this classloader is test specific: its parent contains library classes and others
-            ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil)
-          }
-
-          NestUI.verbose(file2String(logFile))
-          // obviously this must be improved upon
-          val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer;
-          lines.forall(x => !x.startsWith("!")) || {
-            NestUI.normal("ScalaCheck test failed. Output:\n")
-            lines foreach (x => NestUI.normal(x + "\n"))
-            false
-          }
-        }
-        runTestCommon(file, expectFailure = false)(
-          succFn,
-          (logFile, outDir) => outputLogFile(logFile)
-        )
-
-      case "pos" =>
-        runTestCommon(file, expectFailure = false)(
-          (logFile, outDir) => true,
-          (_, _) => ()
-        )
-
-      case "neg" =>
-        runTestCommon(file, expectFailure = true)((logFile, outDir) => {
-          // compare log file to check file
-          val dir      = file.getParentFile
-
-          // diff is contents of logFile
-          fileManager.mapFile(logFile, replaceSlashes(dir, _))
-          diffCheck(file, compareOutput(dir, logFile))
-        })
-
-      case "run" | "jvm" =>
-        runJvmTest(file)
-
-      case "specialized" =>
-        runSpecializedTest(file)
-
-      case "instrumented" =>
-        runInstrumentedTest(file)
-
-      case "presentation" =>
-        runJvmTest(file) // for the moment, it's exactly the same as for a run test
-
-      case "ant" =>
-        runAntTest(file)
-
-      case "buildmanager" =>
-        val (swr, wr) = newTestWriters()
-        printInfoStart(file, wr)
-        val (outDir, testFile, changesDir) = {
-          if (!file.isDirectory)
-            (null, null, null)
-          else {
-            NestUI.verbose(this+" running test "+fileBase)
-            val outDir = createOutputDir()
-            val testFile = new File(file, fileBase + ".test")
-            val changesDir = new File(file, fileBase + ".changes")
-
-            if (changesDir.isFile || !testFile.isFile) {
-              // if changes exists then it has to be a dir
-              if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
-              if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
-              (null, null, null)
-            }
-            else {
-              copyTestFiles(file, outDir)
-              NestUI.verbose("outDir:  "+outDir)
-              NestUI.verbose("logFile: "+logFile)
-              (outDir, testFile, changesDir)
-            }
-          }
-        }
-        if (outDir == null)
-          return (false, LogContext(logFile))
-
-        // Pre-conditions satisfied
-        val sourcepath = outDir.getAbsolutePath+File.separator
-
-        // configure input/output files
-        val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-        val testReader = new BufferedReader(new FileReader(testFile))
-        val logConsoleWriter = new PrintWriter(logWriter, true)
-
-        // create proper settings for the compiler
-        val settings = new Settings(workerError)
-        settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
-        settings.sourcepath.value = sourcepath
-        settings.classpath.value = fileManager.CLASSPATH
-        settings.Ybuildmanagerdebug.value = true
-
-        // simulate Build Manager loop
-        val prompt = "builder > "
-        val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
-        val bM: BuildManager =
-            new RefinedBuildManager(settings) {
-              override protected def newCompiler(settings: Settings) =
-                  new BuilderGlobal(settings, reporter)
-            }
-
-        def testCompile(line: String): Boolean = {
-          NestUI.verbose("compiling " + line)
-          val args = (line split ' ').toList
-          val command = new CompilerCommand(args, settings)
-          command.ok && {
-            bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
-            !reporter.hasErrors
-          }
-        }
-
-        val updateFiles = (line: String) => {
-          NestUI.verbose("updating " + line)
-          (line split ' ').toList forall (u =>
-            (u split "=>").toList match {
-                case origFileName::(newFileName::Nil) =>
-                  val newFile = new File(changesDir, newFileName)
-                  if (newFile.isFile) {
-                    val v = overwriteFileWith(new File(outDir, origFileName), newFile)
-                    if (!v)
-                      NestUI.verbose("'update' operation on " + u + " failed")
-                    v
-                  } else {
-                    NestUI.verbose("File " + newFile + " is invalid")
-                    false
-                  }
-                case a =>
-                  NestUI.verbose("Other =: " + a)
-                  false
-            }
-          )
-        }
-
-        def loop(): Boolean = {
-          testReader.readLine() match {
-            case null | ""    =>
-              NestUI.verbose("finished")
-              true
-            case s if s startsWith ">>update "  =>
-              updateFiles(s stripPrefix ">>update ") && loop()
-            case s if s startsWith ">>compile " =>
-              val files = s stripPrefix ">>compile "
-              logWriter.println(prompt + files)
-              // In the end, it can finish with an error
-              if (testCompile(files)) loop()
-              else {
-                val t = testReader.readLine()
-                (t == null) || (t == "")
-              }
-            case s =>
-              NestUI.verbose("wrong command in test file: " + s)
-              false
-          }
-        }
-
-        Output.withRedirected(logWriter) {
-          try loop()
-          finally testReader.close()
-        }
-        fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _))
-
-        (diffCheck(file, compareOutput(file, logFile)), LogContext(logFile, swr, wr))
-
-      case "res" => {
-          // simulate resident compiler loop
-          val prompt = "\nnsc> "
-
-          val (swr, wr) = newTestWriters()
-          printInfoStart(file, wr)
-
-          NestUI.verbose(this+" running test "+fileBase)
-          val dir = file.getParentFile
-          val outDir = createOutputDir()
-          val resFile = new File(dir, fileBase + ".res")
-          NestUI.verbose("outDir:  "+outDir)
-          NestUI.verbose("logFile: "+logFile)
-          //NestUI.verbose("logFileErr: "+logFileErr)
-          NestUI.verbose("resFile: "+resFile)
-
-          // run compiler in resident mode
-          // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
-          val sourcedir  = logFile.getParentFile.getAbsoluteFile
-          val sourcepath = sourcedir.getAbsolutePath+File.separator
-          NestUI.verbose("sourcepath: "+sourcepath)
-
-          val argList = List(
-            "-d", outDir.getAbsoluteFile.getPath,
-            "-Xresident",
-            "-sourcepath", sourcepath)
-
-          // configure input/output files
-          val logOut    = new FileOutputStream(logFile)
-          val logWriter = new PrintStream(logOut, true)
-          val resReader = new BufferedReader(new FileReader(resFile))
-          val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
-          // create compiler
-          val settings = new Settings(workerError)
-          settings.sourcepath.value = sourcepath
-          settings.classpath.value = fileManager.CLASSPATH
-          val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
-          val command = new CompilerCommand(argList, settings)
-          object compiler extends Global(command.settings, reporter)
-
-          val resCompile = (line: String) => {
-            NestUI.verbose("compiling "+line)
-            val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
-            NestUI.verbose("cmdArgs: "+cmdArgs)
-            val sett = new Settings(workerError)
-            sett.sourcepath.value = sourcepath
-            val command = new CompilerCommand(cmdArgs, sett)
-            command.ok && {
-              (new compiler.Run) compile command.files
-              !reporter.hasErrors
-            }
-          }
-
-          def loop(action: String => Boolean): Boolean = {
-            logWriter.print(prompt)
-            resReader.readLine() match {
-              case null | ""  => logWriter.flush() ; true
-              case line       => action(line) && loop(action)
-            }
-          }
-
-          Output.withRedirected(logWriter) {
-            try loop(resCompile)
-            finally resReader.close()
-          }
-          fileManager.mapFile(logFile, replaceSlashes(dir, _))
-
-          (diffCheck(file, compareOutput(dir, logFile)), LogContext(logFile, swr, wr))
-        }
-
-      case "shootout" =>
-        val (swr, wr) = newTestWriters()
-        printInfoStart(file, wr)
-
-        NestUI.verbose(this+" running test "+fileBase)
-        val outDir = createOutputDir()
-
-        // 2. define file {outDir}/test.scala that contains code to compile/run
-        val testFile = new File(outDir, "test.scala")
-        NestUI.verbose("outDir:   "+outDir)
-        NestUI.verbose("logFile:  "+logFile)
-        NestUI.verbose("testFile: "+testFile)
-
-        // 3. cat {test}.scala.runner {test}.scala > testFile
-        val runnerFile = new File(parent, fileBase+".scala.runner")
-        val bodyFile   = new File(parent, fileBase+".scala")
-        SFile(testFile).writeAll(
-          file2String(runnerFile),
-          file2String(bodyFile)
-        )
-
-        // 4. compile testFile
-        val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess
-        NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
-        val result = ok && {
-          execTest(outDir, logFile) && {
-            NestUI.verbose(this+" finished running "+fileBase)
-            diffCheck(file, compareOutput(parent, logFile))
-          }
-        }
-
-        (result, LogContext(logFile, swr, wr))
-
-      case "scalap" =>
-        runInContext(file, (logFile: File, outDir: File) => {
-          val sourceDir = Directory(if (file.isFile) file.getParent else file)
-          val sources   = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList
-          val results   = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList
-
-          if (sources.length != 1 || results.length != 1) {
-            NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n")
-            false
-          }
-          else {
-            val resFile = results.head
-            // 2. Compile source file
-
-            if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) {
-              NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
-              false
-            }
-            else {
-              // 3. Decompile file and compare results
-              val isPackageObject = sourceDir.name startsWith "package"
-              val className       = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package")
-              val url             = outDir.toURI.toURL
-              val loader          = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader)
-              val clazz           = loader.loadClass(className)
-
-              val byteCode = ByteCode.forClass(clazz)
-              val result   = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
-
-              SFile(logFile) writeAll result
-              diffCheck(file, compareFiles(logFile, resFile))
-            }
-          }
-        })
-
-      case "script" =>
-        val (swr, wr) = newTestWriters()
-        printInfoStart(file, wr)
-
-        NestUI.verbose(this+" running test "+fileBase)
-
-        // check whether there is an args file
-        val argsFile = new File(file.getParentFile, fileBase+".args")
-        NestUI.verbose("argsFile: "+argsFile)
-        val argString = file2String(argsFile)
-        val succeeded = try {
-          val cmdString =
-            if (isWin) {
-              val batchFile = new File(file.getParentFile, fileBase+".bat")
-              NestUI.verbose("batchFile: "+batchFile)
-              batchFile.getAbsolutePath
-            }
-            else file.getAbsolutePath
-
-          val ok = runCommand(cmdString+argString, logFile)
-          ( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) )
-        }
-        catch { case e: Exception => NestUI.verbose("caught "+e) ; false }
-
-        (succeeded, LogContext(logFile, swr, wr))
-    }
-
-    private def crashContext(t: Throwable): LogContext = {
-      try {
-        logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n")
-        LogContext(logFile)
-      }
-      catch { case t: Throwable => LogContext(null) }
-    }
-
-    def run(): (Boolean, LogContext) = {
-      val result = try processSingleFile(testFile) catch { case t: Throwable => (false, crashContext(t)) }
-      passed = Some(result._1)
-      result
-    }
-
-    def reportResult(writers: Option[(StringWriter, PrintWriter)]) {
-      writers foreach { case (swr, wr) =>
-        if (passed.isEmpty) printInfoTimeout(wr)
-        else printInfoEnd(passed.get, wr)
-        wr.flush()
-        swr.flush()
-        NestUI.normal(swr.toString)
-
-        if (passed exists (x => !x)) {
-          if (fileManager.showDiff || isPartestDebug)
-            NestUI.normal(testDiff)
-          if (fileManager.showLog)
-            showLog(logFile)
-        }
-      }
-      toDelete foreach (_.deleteRecursively())
-    }
-  }
-
-  def runTest(testFile: File): TestState = {
-    val runner = new Runner(testFile)
-    // when option "--failed" is provided execute test only if log
-    // is present (which means it failed before)
-    if (fileManager.failed && !runner.logFile.canRead)
-      return TestState.Ok
-
-    // sys addShutdownHook cleanup()
-    val ((success, ctx), elapsed) = timed(runner.run())
-    val state                     = if (success) TestState.Ok else TestState.Fail
-
-    runner.reportResult(ctx.writers)
-    state
-  }
-
-  private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
-    fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
-
-  private def copyTestFiles(testDir: File, destDir: File) {
-    val invalidExts = List("changes", "svn", "obj")
-    testDir.listFiles.toList filter (
-            f => (isJavaOrScala(f) && f.isFile) ||
-                 (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
-      { f => fileManager.copyFile(f, destDir) }
-  }
-
-  private def showLog(logFile: File) {
-    file2String(logFile) match {
-      case "" if logFile.canRead  => ()
-      case ""                     => NestUI.failure("Couldn't open log file: " + logFile + "\n")
-      case s                      => NestUI.normal(s)
-    }
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
deleted file mode 100644
index 6707a93..0000000
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object RunnerUtils {
-  def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList
-
-  def searchPath(option: String, as: List[String]): Option[String] = as match {
-    case `option` :: r :: _ => Some(r)
-    case _ :: rest          => searchPath(option, rest)
-    case Nil                => None
-  }
-
-  def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match {
-    case -1   => (None, as)
-    case idx  => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2)))
-  }
-
-  def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match {
-    case -1   => (false, as)
-    case idx  => (true, (as take idx) ::: (as drop (idx + 1)))
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
deleted file mode 100644
index 20f9c70..0000000
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-import scala.util.Properties.setProp
-import scala.collection.JavaConverters._
-
-object SBTRunner extends DirectRunner {
-
-  val fileManager = new FileManager {
-    var JAVACMD: String        = "java"
-    var JAVAC_CMD: String      = "javac"
-    var CLASSPATH: String      = _
-    var LATEST_LIB: String     = _
-    var LATEST_REFLECT: String = _
-    var LATEST_COMP: String     = _
-    var LATEST_PARTEST: String     = _
-    var LATEST_ACTORS: String     = _
-    val testRootPath: String   = "test"
-    val testRootDir: Directory = Directory(testRootPath)
-  }
-
-  def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.Map[String, TestState] = {
-    def failedOnlyIfRequired(files:List[File]):List[File]={
-      if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
-    }
-    runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind).asJava
-  }
-
-  case class CommandLineOptions(classpath: Option[String] = None,
-                                tests: Map[String, Array[File]] = Map(),
-                                scalacOptions: Seq[String] = Seq(),
-                                justFailedTests: Boolean = false)
-
-  def mainReflect(args: Array[String]): java.util.Map[String, String] = {
-    setProp("partest.debug", "true")
-
-    val Argument = new scala.util.matching.Regex("-(.*)")
-    def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
-      case Seq("--failed", rest @ _*)               => parseArgs(rest, data.copy(justFailedTests = true))
-      case Seq("-cp", cp, rest @ _*)                => parseArgs(rest, data.copy(classpath=Some(cp)))
-      case Seq("-scalacoption", opt, rest @ _*)     => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
-      case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
-      case Seq()                                    => data
-      case x                                        => sys.error("Unknown command line options: " + x)
-    }
-    val config = parseArgs(args, CommandLineOptions())
-    fileManager.SCALAC_OPTS ++= config.scalacOptions
-    fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
-
-    def findClasspath(jar: String, name: String): Option[String] = {
-      val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
-      val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
-      optJar orElse optClassDir
-    }
-    // Find scala library jar file...
-    fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
-    fileManager.LATEST_REFLECT = findClasspath("scala-reflect", "scala-reflect") getOrElse sys.error("No scala-reflect found! Classpath = " + fileManager.CLASSPATH)
-    fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
-    fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
-    fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
-
-    // TODO - Do something useful here!!!
-    fileManager.JAVAC_CMD = "javac"
-    fileManager.failed      = config.justFailedTests
-    // TODO - Make this a flag?
-    //fileManager.updateCheck = true
-    // Now run and report...
-    val runs = config.tests.filterNot(_._2.isEmpty)
-    (for {
-     (testType, files) <- runs
-     (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
-    } yield (path, fixResult(result))).seq.asJava
-  }
-  def fixResult(result: TestState): String = result match {
-    case TestState.Ok => "OK"
-    case TestState.Fail => "FAIL"
-    case TestState.Timeout => "TIMEOUT"
-  }
-  def main(args: Array[String]): Unit = {
-    val failures = (
-      for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
-        path + ( if (result == TestState.Fail) " [FAILED]" else " [TIMEOUT]" )
-    )
-    // Re-list all failures so we can go figure out what went wrong.
-    failures foreach System.err.println
-    if(!failures.isEmpty) sys.exit(1)
-  }
-}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
deleted file mode 100644
index 8717777..0000000
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{ File => JFile }
-import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io._
-import scala.util.Properties.{ propIsSet, propOrElse, setProp }
-
-trait TestFileCommon {
-  def file: JFile
-  def kind: String
-
-  val dir       = file.toAbsolute.parent
-  val fileBase  = file.stripExtension
-  val flags     = dir / (fileBase + ".flags") ifFile (f => f.slurp().trim)
-
-  lazy val objectDir = dir / (fileBase + "-" + kind + ".obj") createDirectory true
-  def setOutDirTo = objectDir
-}
-
-abstract class TestFile(val kind: String) extends TestFileCommon {
-  def file: JFile
-  def fileManager: FileManager
-
-  def defineSettings(settings: Settings, setOutDir: Boolean) = {
-    settings.classpath append dir.path
-    if (setOutDir)
-      settings.outputDirs setSingleOutput setOutDirTo.path
-
-    // adding codelib.jar to the classpath
-    // codelib provides the possibility to override standard reify
-    // this shields the massive amount of reification tests from changes in the API
-    settings.classpath prepend PathSettings.srcCodeLib.toString
-    if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
-
-    // have to catch bad flags somewhere
-    (flags forall (f => settings.processArgumentString(f)._1)) && {
-      settings.classpath append fileManager.CLASSPATH
-      true
-    }
-  }
-
-  override def toString(): String = "%s %s".format(kind, file)
-}
-
-case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos")
-case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg")
-case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run")
-case class BuildManagerTestFile(file: JFile, fileManager: FileManager) extends TestFile("bm")
-case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck")
-case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm")
-case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") {
-  override def setOutDirTo = file.parent
-}
-case class ScalapTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalap") {
-  override def setOutDirTo = file.parent
-}
-case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends TestFile("specialized") {
-  override def defineSettings(settings: Settings, setOutDir: Boolean): Boolean = {
-    super.defineSettings(settings, setOutDir) && {
-      // add the instrumented library version to classpath
-      settings.classpath prepend PathSettings.srcSpecLib.toString
-      // @partest maintainer: if we use a custom Scala build (specified via --classpath)
-      // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath
-      // this entire classpath business needs a thorough solution
-      if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", ""))
-      true
-    }
-  }
-}
-case class PresentationTestFile(file: JFile, fileManager: FileManager) extends TestFile("presentation")
-case class AntTestFile(file: JFile, fileManager: FileManager) extends TestFile("ant")
-case class InstrumentedTestFile(file: JFile, fileManager: FileManager) extends TestFile("instrumented")
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
deleted file mode 100644
index d38ce69..0000000
--- a/src/partest/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools
-
-import java.io.{ FileNotFoundException, File => JFile }
-import nsc.io.{ Path, Directory, File => SFile }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import scala.sys.process.javaVmArguments
-import java.util.concurrent.Callable
-
-package partest {
-  class TestState {
-    def isOk      = this eq TestState.Ok
-    def isFail    = this eq TestState.Fail
-    def isTimeout = this eq TestState.Timeout
-  }
-  object TestState {
-    val Ok      = new TestState
-    val Fail    = new TestState
-    val Timeout = new TestState
-  }
-}
-
-package object partest {
-  import nest.NestUI
-
-  implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
-  implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
-
-  implicit lazy val postfixOps = scala.language.postfixOps
-  implicit lazy val implicitConversions = scala.language.implicitConversions
-
-  def timed[T](body: => T): (T, Long) = {
-    val t1 = System.currentTimeMillis
-    val result = body
-    val t2 = System.currentTimeMillis
-
-    (result, t2 - t1)
-  }
-
-  def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
-
-  def path2String(path: String) = file2String(new JFile(path))
-  def file2String(f: JFile) =
-    try SFile(f).slurp()
-    catch { case _: FileNotFoundException => "" }
-
-  def basename(name: String): String = Path(name).stripExtension
-
-  def resultsToStatistics(results: Iterable[(_, TestState)]): (Int, Int) = {
-    val (files, failures) = results map (_._2 == TestState.Ok) partition (_ == true)
-    (files.size, failures.size)
-  }
-
-  def vmArgString = javaVmArguments.mkString(
-    "Java VM started with arguments: '",
-    " ",
-    "'"
-  )
-
-  def allPropertiesString = {
-    import scala.collection.JavaConversions._
-    System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
-  }
-
-  def showAllJVMInfo() {
-    NestUI.verbose(vmArgString)
-    NestUI.verbose(allPropertiesString)
-  }
-
-  def isPartestDebug: Boolean =
-    propOrEmpty("partest.debug") == "true"
-
-
-  import scala.language.experimental.macros
-
-  /**
-   * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
-   * {{{
-   *   trace> "".isEmpty
-   *   res: Boolean = true
-   *
-   * }}}
-   *
-   * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
-   * test code in a string.
-   */
-  def trace[A](a: A) = macro traceImpl[A]
-
-  import scala.reflect.macros.Context
-  def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
-    import c.universe._
-    import definitions._
-
-    // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
-    // because this impairs reflection refactorings
-    //
-    // val exprCode = c.literal(show(a.tree))
-    // val exprType = c.literal(show(a.actualType))
-    // reify {
-    //   println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
-    //   a.splice
-    // }
-
-    c.Expr(Block(
-      List(Apply(
-        Select(Ident(PredefModule), newTermName("println")),
-        List(Apply(
-          Select(Apply(
-            Select(Ident(ScalaPackage), newTermName("StringContext")),
-            List(
-              Literal(Constant("trace> ")),
-              Literal(Constant("\\nres: ")),
-              Literal(Constant(" = ")),
-              Literal(Constant("\\n")))),
-          newTermName("s")),
-          List(
-            Literal(Constant(show(a.tree))),
-            Literal(Constant(show(a.actualType))),
-            a.tree))))),
-      a.tree))
-  }
-}
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
deleted file mode 100644
index d25be87..0000000
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala Parallel Testing               **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-// $Id$
-
-package scala.tools.partest
-package utils
-
-/**
- * @author Thomas Hofer
- */
-object PrintMgr {
-
-  val NONE = 0
-  val SOME = 1
-  val MANY = 2
-
-  var outline = ""
-  var success = ""
-  var failure = ""
-  var warning = ""
-  var default = ""
-
-  def initialization(number: Int) = number match {
-    case MANY =>
-      outline = Console.BOLD + Console.BLACK
-      success = Console.BOLD + Console.GREEN
-      failure = Console.BOLD + Console.RED
-      warning = Console.BOLD + Console.YELLOW
-      default = Console.RESET
-    case SOME =>
-      outline = Console.BOLD + Console.BLACK
-      success = Console.RESET
-      failure = Console.BOLD + Console.BLACK
-      warning = Console.BOLD + Console.BLACK
-      default = Console.RESET
-    case _ =>
-  }
-
-  def printOutline(msg: String) = print(outline + msg + default)
-
-  def printSuccess(msg: String) = print(success  + msg + default)
-
-  def printFailure(msg: String) = print(failure  + msg + default)
-
-  def printWarning(msg: String) = print(warning  + msg + default)
-}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
deleted file mode 100644
index 1263c96..0000000
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala Parallel Testing               **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.tools.partest
-package utils
-
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
-  protected def propCategory    = "partest"
-  protected def pickJarBasedOn  = classOf[nest.RunnerManager]
-}
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
index 09eaf7a..b880fad 100644
--- a/src/reflect/scala/reflect/api/Annotations.scala
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -1,14 +1,15 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 import scala.collection.immutable.ListMap
 
 /**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *  <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- * This trait provides annotation support for the reflection API.
+ *  This trait provides annotation support for the reflection API.
  *
- *  The API distinguishes between two kinds of annotations:
+ *  In Scala, annotations belong to one of the two categories:
  *
  *  <ul>
  *  <li>''Java annotations'': annotations on definitions produced by the Java compiler, i.e., subtypes of [[java.lang.annotation.Annotation]]
@@ -21,16 +22,13 @@ import scala.collection.immutable.ListMap
  *  it is stored as special attributes in the corresponding classfile, and not as a Java annotation. Note that subclassing
  *  just [[scala.annotation.Annotation]] is not enough to have the corresponding metadata persisted for runtime reflection.
  *
- *  The distinction between Java and Scala annotations is manifested in the contract of [[scala.reflect.api.Annotations#Annotation]], which exposes
- *  both `scalaArgs` and `javaArgs`. For Scala or Java annotations extending [[scala.annotation.ClassfileAnnotation]] `scalaArgs` is empty
- *  and arguments are stored in `javaArgs`. For all other Scala annotations, arguments are stored in `scalaArgs` and `javaArgs` is empty.
+ *  Both Java and Scala annotations are represented as typed trees carrying constructor invocations corresponding
+ *  to the annotation. For instance, the annotation in `@ann(1, 2) class C` is represented as `q"@new ann(1, 2)"`.
  *
- *  Arguments in `scalaArgs` are represented as typed trees. Note that these trees are not transformed by any phases
- *  following the type-checker. Arguments in `javaArgs` are repesented as a map from [[scala.reflect.api.Names#Name]] to
- *  [[scala.reflect.api.Annotations#JavaArgument]]. Instances of `JavaArgument` represent different kinds of Java annotation arguments:
- *    - literals (primitive and string constants),
- *    - arrays and
- *    - nested annotations.
+ *  Unlike Java reflection, Scala reflection does not support evaluation of constructor invocations stored in annotations
+ *  into underlying objects. For instance it's impossible to go from `@ann(1, 2) class C` to `ann(1, 2)`, so one
+ *  has to analyze trees representing annotation arguments to manually extract corresponding values. Towards that end,
+ *  arguments of an annotation can be obtained via `annotation.tree.children.tail`.
  *
  *  For more information about `Annotation`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
  *
@@ -45,23 +43,22 @@ trait Annotations { self: Universe =>
    */
   type Annotation >: Null <: AnyRef with AnnotationApi
 
-  /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** The constructor/extractor for `Annotation` instances.
+   *  @group Extractors
    */
-  implicit val AnnotationTag: ClassTag[Annotation]
-
-   /** The constructor/extractor for `Annotation` instances.
-    *  @group Extractors
-    */
-   val Annotation: AnnotationExtractor
+  val Annotation: AnnotationExtractor
 
   /** An extractor class to create and pattern match with syntax `Annotation(tpe, scalaArgs, javaArgs)`.
    *  Here, `tpe` is the annotation type, `scalaArgs` the payload of Scala annotations, and `javaArgs` the payload of Java annotations.
     *  @group Extractors
     */
   abstract class AnnotationExtractor {
+    def apply(tree: Tree): Annotation = treeToAnnotation(tree)
+
+    @deprecated("Use `apply(tree: Tree): Annotation` instead", "2.11.0")
     def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
+
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
   }
 
@@ -70,55 +67,64 @@ trait Annotations { self: Universe =>
    *  @group API
    */
   trait AnnotationApi {
+    /** The tree underlying the annotation. */
+    def tree: Tree = annotationToTree(this.asInstanceOf[Annotation])
+
     /** The type of the annotation. */
+    @deprecated("Use `tree.tpe` instead", "2.11.0")
     def tpe: Type
 
     /** Payload of the Scala annotation: a list of abstract syntax trees that represent the argument.
      *  Empty for Java annotations.
      */
+    @deprecated("Use `tree.children.tail` instead", "2.11.0")
     def scalaArgs: List[Tree]
 
     /** Payload of the Java annotation: a list of name-value pairs.
      *  Empty for Scala annotations.
      */
+    @deprecated("Use `tree.children.tail` instead", "2.11.0")
     def javaArgs: ListMap[Name, JavaArgument]
   }
 
+  protected[scala] def annotationToTree(ann: Annotation): Tree
+  protected[scala] def treeToAnnotation(tree: Tree): Annotation
+
   /** A Java annotation argument
    *  @template
    *  @group Annotations
    */
-  type JavaArgument >: Null <: AnyRef
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+  type JavaArgument >: Null <: AnyRef with JavaArgumentApi
 
-  /** A tag that preserves the identity of the `JavaArgument` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** Has no special methods. Is here to provides erased identity for `CompoundType`.
+   *  @group API
    */
-  implicit val JavaArgumentTag: ClassTag[JavaArgument]
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+  trait JavaArgumentApi
 
   /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`
    *  @template
    *  @group Annotations
    */
-  type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi
-
-  /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+  type LiteralArgument >: Null <: LiteralArgumentApi with JavaArgument
 
   /** The constructor/extractor for `LiteralArgument` instances.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   val LiteralArgument: LiteralArgumentExtractor
 
   /** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
    *  where `value` is the constant argument.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   abstract class LiteralArgumentExtractor {
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def apply(value: Constant): LiteralArgument
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def unapply(arg: LiteralArgument): Option[Constant]
   }
 
@@ -126,8 +132,10 @@ trait Annotations { self: Universe =>
    *  The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
    *  @group API
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   trait LiteralArgumentApi {
     /** The underlying compile-time constant value. */
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def value: Constant
   }
 
@@ -135,25 +143,24 @@ trait Annotations { self: Universe =>
    *  @template
    *  @group Annotations
    */
-  type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi
-
-  /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+  type ArrayArgument >: Null <: ArrayArgumentApi with JavaArgument
 
   /** The constructor/extractor for `ArrayArgument` instances.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   val ArrayArgument: ArrayArgumentExtractor
 
   /** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
    *  where `args` is the argument array.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   abstract class ArrayArgumentExtractor {
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def apply(args: Array[JavaArgument]): ArrayArgument
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
   }
 
@@ -161,8 +168,10 @@ trait Annotations { self: Universe =>
    *  The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
    *  @group API
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   trait ArrayArgumentApi {
     /** The underlying array of Java annotation arguments. */
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def args: Array[JavaArgument]
   }
 
@@ -170,25 +179,24 @@ trait Annotations { self: Universe =>
    *  @template
    *  @group Annotations
    */
-  type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi
-
-  /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val NestedArgumentTag: ClassTag[NestedArgument]
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+  type NestedArgument >: Null <: NestedArgumentApi with JavaArgument
 
   /** The constructor/extractor for `NestedArgument` instances.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   val NestedArgument: NestedArgumentExtractor
 
   /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
    *  where `annotation` is the nested annotation.
    *  @group Extractors
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   abstract class NestedArgumentExtractor {
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def apply(annotation: Annotation): NestedArgument
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def unapply(arg: NestedArgument): Option[Annotation]
   }
 
@@ -196,8 +204,10 @@ trait Annotations { self: Universe =>
    *  The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
    *  @group API
    */
+  @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
   trait NestedArgumentApi {
     /** The underlying nested annotation. */
+    @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
     def annotation: Annotation
   }
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
deleted file mode 100644
index 0c8e81a..0000000
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-package scala.reflect
-package api
-
-/**
- * This is an internal implementation class.
- * @groupname TreeBuilders Tree Building
- */
-private[reflect] trait BuildUtils { self: Universe =>
-
-  /** @group TreeBuilders */
-  val build: BuildApi
-
-  // this API abstracts away the functionality necessary for reification
-  // it's too gimmicky and unstructured to be exposed directly in the universe
-  // but we need it in a publicly available place for reification to work
-
-  /** @group TreeBuilders */
-  abstract class BuildApi {
-    /** Selects type symbol with given simple name `name` from the defined members of `owner`.
-     */
-    def selectType(owner: Symbol, name: String): TypeSymbol
-
-    /** Selects term symbol with given name and type from the defined members of prefix type
-     */
-    def selectTerm(owner: Symbol, name: String): TermSymbol
-
-    /** Selects overloaded method symbol with given name and index
-     */
-    def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
-
-    /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
-     *  the current symbol as its owner.
-     */
-    def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
-
-    /** Create a fresh free term symbol.
-     *  @param   name   the name of the free variable
-     *  @param   value  the value of the free variable at runtime
-     *  @param   flags  (optional) flags of the free variable
-     *  @param   origin debug information that tells where this symbol comes from
-     */
-    def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
-
-    /** Create a fresh free type symbol.
-     *  @param   name   the name of the free variable
-     *  @param   flags  (optional) flags of the free variable
-     *  @param   origin debug information that tells where this symbol comes from
-     */
-    def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
-
-    /** Set symbol's type signature to given type.
-     *  @return the symbol itself
-     */
-    def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S
-
-    /** Set symbol's annotations to given annotations `annots`.
-     */
-    def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
-
-    def flagsFromBits(bits: Long): FlagSet
-
-    def emptyValDef: ValDef
-
-    def This(sym: Symbol): Tree
-
-    def Select(qualifier: Tree, sym: Symbol): Select
-
-    def Ident(sym: Symbol): Ident
-
-    def TypeTree(tp: Type): TypeTree
-
-    def thisPrefix(sym: Symbol): Type
-
-    def setType[T <: Tree](tree: T, tpe: Type): T
-
-    def setSymbol[T <: Tree](tree: T, sym: Symbol): T
-  }
-}
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index f3d75c3..e73c5ff 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -68,7 +69,7 @@ package api
  *    val enumRef = jarg("enumRef").symbolValue
  *    println(enumRef)                       // value BAR
  *
- *    val siblings = enumRef.owner.typeSignature.declarations
+ *    val siblings = enumRef.owner.info.decls
  *    val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic)
  *    println(enumValues)                    // Scope{
  *                                           //   final val FOO: JavaSimpleEnumeration;
@@ -164,7 +165,7 @@ trait Constants {
    *                                           // ideally one should match instead of casting
    *    println(enumRef)                       // value BAR
    *
-   *    val siblings = enumRef.owner.typeSignature.declarations
+   *    val siblings = enumRef.owner.info.decls
    *    val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic)
    *    println(enumValues)                    // Scope{
    *                                           //   final val FOO: JavaSimpleEnumeration;
@@ -183,12 +184,6 @@ trait Constants {
    */
   type Constant >: Null <: AnyRef with ConstantApi
 
-  /** A tag that preserves the identity of the `Constant` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ConstantTag: ClassTag[Constant]
-
   /** The constructor/extractor for `Constant` instances.
    *  @group Extractors
    */
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 2ba18a8..5b6ff23 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -3,10 +3,12 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package api
 
 import scala.reflect.runtime.{universe => ru}
+import scala.annotation.compileTimeOnly
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -90,7 +92,7 @@ trait Exprs { self: Universe =>
      * }}}
      * because expr of type Expr[T] itself does not have a method foo.
      */
-    // @compileTimeOnly("Cannot use splice outside reify")
+    @compileTimeOnly("splice must be enclosed within a reify {} block")
     def splice: T
 
     /**
@@ -107,7 +109,7 @@ trait Exprs { self: Universe =>
      * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
      * }}}
      */
-    // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
+    @compileTimeOnly("cannot use value except for signatures of macro implementations")
     val value: T
 
     override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
@@ -174,4 +176,4 @@ private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakType
     import ru._
     Expr(rootMirror, treec)(tag)
   }
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 4357aec..bf4d635 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 import scala.language.implicitConversions
@@ -61,12 +62,6 @@ trait FlagSets { self: Universe =>
    */
   type FlagSet
 
-  /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val FlagSetTag: ClassTag[FlagSet]
-
   /** The API of `FlagSet` instances.
    *  The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page.
    *  @group Flags
@@ -136,8 +131,8 @@ trait FlagSets { self: Universe =>
     /** Flag indicating that tree has `protected` modifier set */
     val PROTECTED: FlagSet
 
-    /** Flag indicating that tree represents a member local to current class
-     *  (i.e. private[this] or protected[this].
+    /** Flag indicating that tree represents a member local to current class,
+     *  i.e. private[this] or protected[this].
      *  This requires having either PRIVATE or PROTECTED set as well.
      */
     val LOCAL: FlagSet
@@ -171,6 +166,90 @@ trait FlagSets { self: Universe =>
 
     /** Flag indicating that tree represents a variable or a member initialized to the default value */
     val DEFAULTINIT: FlagSet
+
+    /** Flag indicating that tree represents an enum.
+     *
+     *  It can only appear at
+     *  - the enum's class
+     *  - enum constants
+     **/
+    val ENUM: FlagSet
+
+    /** Flag indicating that tree represents a parameter of the primary constructor of some class
+     *  or a synthetic member underlying thereof. E.g. here's how 'class C(val x: Int)' is represented:
+     *
+     *      [[syntax trees at end of parser]]// Scala source: tmposDU52
+     *      class C extends scala.AnyRef {
+     *        <paramaccessor> val x: Int = _;
+     *        def <init>(x: Int) = {
+     *          super.<init>();
+     *          ()
+     *        }
+     *      }
+     *      ClassDef(
+     *        Modifiers(), TypeName("C"), List(),
+     *        Template(
+     *          List(Select(Ident(scala), TypeName("AnyRef"))),
+     *          noSelfType,
+     *          List(
+     *            ValDef(Modifiers(PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree),
+     *            DefDef(
+     *              Modifiers(), nme.CONSTRUCTOR, List(),
+     *              List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree))), TypeTree(),
+     *              Block(List(pendingSuperCall), Literal(Constant(())))))))))
+     */
+    val PARAMACCESSOR: FlagSet
+
+    /** Flag indicating that tree represents a parameter of the primary constructor of some case class
+     *  or a synthetic member underlying thereof.  E.g. here's how 'case class C(val x: Int)' is represented:
+     *
+     *      [[syntax trees at end of parser]]// Scala source: tmpnHkJ3y
+     *      case class C extends scala.Product with scala.Serializable {
+     *        <caseaccessor> <paramaccessor> val x: Int = _;
+     *        def <init>(x: Int) = {
+     *          super.<init>();
+     *          ()
+     *        }
+     *      }
+     *      ClassDef(
+     *        Modifiers(CASE), TypeName("C"), List(),
+     *        Template(
+     *          List(Select(Ident(scala), TypeName("Product")), Select(Ident(scala), TypeName("Serializable"))),
+     *          noSelfType,
+     *          List(
+     *            ValDef(Modifiers(CASEACCESSOR | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree),
+     *            DefDef(
+     *              Modifiers(), nme.CONSTRUCTOR, List(),
+     *              List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree))), TypeTree(),
+     *              Block(List(pendingSuperCall), Literal(Constant(())))))))))
+     */
+    val CASEACCESSOR: FlagSet
+
+    /** Flag used to distinguish programmatically generated definitions from user-written ones.
+     *  @see ARTIFACT
+     */
+    val SYNTHETIC: FlagSet
+
+    /** Flag used to distinguish platform-specific implementation details.
+     *  Trees and symbols which are currently marked ARTIFACT by scalac:
+     *    * $outer fields and accessors
+     *    * super accessors
+     *    * protected accessors
+     *    * lazy local accessors
+     *    * bridge methods
+     *    * default argument getters
+     *    * evaluation-order preserving locals for right-associative and out-of-order named arguments
+     *    * catch-expression storing vals
+     *    * anything else which feels a setFlag(ARTIFACT)
+     *
+     *  @see SYNTHETIC
+     */
+    val ARTIFACT: FlagSet
+
+    /** Flag that indicates methods that are supposed to be stable
+     *  (e.g. synthetic getters of valdefs).
+     */
+    val STABLE: FlagSet
   }
 
   /** The empty set of flags
diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala
new file mode 100644
index 0000000..aca0692
--- /dev/null
+++ b/src/reflect/scala/reflect/api/ImplicitTags.scala
@@ -0,0 +1,119 @@
+package scala
+package reflect
+package api
+
+/** Tags which preserve the identity of abstract types in the face of erasure.
+ *  Can be used for pattern matching, instance tests, serialization and the like.
+ *  @group Tags
+ */
+trait ImplicitTags {
+  self: Universe =>
+
+  // Tags for Types.
+  implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+  implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+  implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+  implicit val CompoundTypeTag: ClassTag[CompoundType]
+  implicit val ConstantTypeTag: ClassTag[ConstantType]
+  implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+  implicit val MethodTypeTag: ClassTag[MethodType]
+  implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+  implicit val PolyTypeTag: ClassTag[PolyType]
+  implicit val RefinedTypeTag: ClassTag[RefinedType]
+  implicit val SingleTypeTag: ClassTag[SingleType]
+  implicit val SingletonTypeTag: ClassTag[SingletonType]
+  implicit val SuperTypeTag: ClassTag[SuperType]
+  implicit val ThisTypeTag: ClassTag[ThisType]
+  implicit val TypeBoundsTag: ClassTag[TypeBounds]
+  implicit val TypeRefTag: ClassTag[TypeRef]
+  implicit val TypeTagg: ClassTag[Type]
+
+  // Tags for Names.
+  implicit val NameTag: ClassTag[Name]
+  implicit val TermNameTag: ClassTag[TermName]
+  implicit val TypeNameTag: ClassTag[TypeName]
+
+  // Tags for Scopes.
+  implicit val ScopeTag: ClassTag[Scope]
+  implicit val MemberScopeTag: ClassTag[MemberScope]
+
+  // Tags for Annotations.
+  implicit val AnnotationTag: ClassTag[Annotation]
+  implicit val JavaArgumentTag: ClassTag[JavaArgument]
+  implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+  implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+  implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+  // Tags for Symbols.
+  implicit val TermSymbolTag: ClassTag[TermSymbol]
+  implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+  implicit val SymbolTag: ClassTag[Symbol]
+  implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+  implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+  implicit val ClassSymbolTag: ClassTag[ClassSymbol]
+
+  // Tags for misc Tree relatives.
+  implicit val PositionTag: ClassTag[Position]
+  implicit val ConstantTag: ClassTag[Constant]
+  implicit val FlagSetTag: ClassTag[FlagSet]
+  implicit val ModifiersTag: ClassTag[Modifiers]
+
+  // Tags for Trees. WTF.
+  implicit val AlternativeTag: ClassTag[Alternative]
+  implicit val AnnotatedTag: ClassTag[Annotated]
+  implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+  implicit val ApplyTag: ClassTag[Apply]
+  implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+  implicit val AssignTag: ClassTag[Assign]
+  implicit val BindTag: ClassTag[Bind]
+  implicit val BlockTag: ClassTag[Block]
+  implicit val CaseDefTag: ClassTag[CaseDef]
+  implicit val ClassDefTag: ClassTag[ClassDef]
+  implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+  implicit val DefDefTag: ClassTag[DefDef]
+  implicit val DefTreeTag: ClassTag[DefTree]
+  implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+  implicit val FunctionTag: ClassTag[Function]
+  implicit val GenericApplyTag: ClassTag[GenericApply]
+  implicit val IdentTag: ClassTag[Ident]
+  implicit val IfTag: ClassTag[If]
+  implicit val ImplDefTag: ClassTag[ImplDef]
+  implicit val ImportSelectorTag: ClassTag[ImportSelector]
+  implicit val ImportTag: ClassTag[Import]
+  implicit val LabelDefTag: ClassTag[LabelDef]
+  implicit val LiteralTag: ClassTag[Literal]
+  implicit val MatchTag: ClassTag[Match]
+  implicit val MemberDefTag: ClassTag[MemberDef]
+  implicit val ModuleDefTag: ClassTag[ModuleDef]
+  implicit val NameTreeTag: ClassTag[NameTree]
+  implicit val NewTag: ClassTag[New]
+  implicit val PackageDefTag: ClassTag[PackageDef]
+  implicit val RefTreeTag: ClassTag[RefTree]
+  implicit val ReturnTag: ClassTag[Return]
+  implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+  implicit val SelectTag: ClassTag[Select]
+  implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+  implicit val StarTag: ClassTag[Star]
+  implicit val SuperTag: ClassTag[Super]
+  implicit val SymTreeTag: ClassTag[SymTree]
+  implicit val TemplateTag: ClassTag[Template]
+  implicit val TermTreeTag: ClassTag[TermTree]
+  implicit val ThisTag: ClassTag[This]
+  implicit val ThrowTag: ClassTag[Throw]
+  implicit val TreeTag: ClassTag[Tree]
+  implicit val TryTag: ClassTag[Try]
+  implicit val TypTreeTag: ClassTag[TypTree]
+  implicit val TypeApplyTag: ClassTag[TypeApply]
+  implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+  implicit val TypeDefTag: ClassTag[TypeDef]
+  implicit val TypeTreeTag: ClassTag[TypeTree]
+  implicit val TypedTag: ClassTag[Typed]
+  implicit val UnApplyTag: ClassTag[UnApply]
+  implicit val ValDefTag: ClassTag[ValDef]
+  implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
+
+  // Miscellaneous
+  implicit val TreeCopierTag: ClassTag[TreeCopier]
+  implicit val RuntimeClassTag: ClassTag[RuntimeClass]
+  implicit val MirrorTag: ClassTag[Mirror]
+}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
deleted file mode 100644
index afc4f2f..0000000
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-package scala.reflect
-package api
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- * This trait provides support for importers, a facility to migrate reflection artifacts between universes.
- * ''Note: this trait should typically be used only rarely.''
- *
- *  Reflection artifacts, such as [[scala.reflect.api.Symbols Symbols]] and [[scala.reflect.api.Types Types]],
- *  are contained in [[scala.reflect.api.Universes Universe]]s. Typically all processing happens
- *  within a single `Universe` (e.g. a compile-time macro `Universe` or a runtime reflection `Universe`), but sometimes
- *  there is a need to migrate artifacts from one `Universe` to another. For example, runtime compilation works by
- *  importing runtime reflection trees into a runtime compiler universe, compiling the importees and exporting the
- *  result back.
- *
- *  Reflection artifacts are firmly grounded in their `Universe`s, which is reflected by the fact that types of artifacts
- *  from different universes are not compatible. By using `Importer`s, however, they be imported from one universe
- *  into another. For example, to import `foo.bar.Baz` from the source `Universe` to the target `Universe`,
- *  an importer will first check whether the entire owner chain exists in the target `Universe`.
- *  If it does, then nothing else will be done. Otherwise, the importer will recreate the entire owner chain
- *  and will import the corresponding type signatures into the target `Universe`.
- *
- *  Since importers match `Symbol` tables of the source and the target `Universe`s using plain string names,
- *  it is programmer's responsibility to make sure that imports don't distort semantics, e.g., that
- *  `foo.bar.Baz` in the source `Universe` means the same that `foo.bar.Baz` does in the target `Universe`.
- *
- *  === Example ===
- *
- *  Here's how one might implement a macro that performs compile-time evaluation of its argument
- *  by using a runtime compiler to compile and evaluate a tree that belongs to a compile-time compiler:
- *
- *  {{{
- *  def staticEval[T](x: T) = macro staticEval[T]
- *
- *  def staticEval[T](c: scala.reflect.macros.Context)(x: c.Expr[T]) = {
- *    // creates a runtime reflection universe to host runtime compilation
- *    import scala.reflect.runtime.{universe => ru}
- *    val mirror = ru.runtimeMirror(c.libraryClassLoader)
- *    import scala.tools.reflect.ToolBox
- *    val toolBox = mirror.mkToolBox()
- *
- *    // runtime reflection universe and compile-time macro universe are different
- *    // therefore an importer is needed to bridge them
- *    // currently mkImporter requires a cast to correctly assign the path-dependent types
- *    val importer0 = ru.mkImporter(c.universe)
- *    val importer = importer0.asInstanceOf[ru.Importer { val from: c.universe.type }]
- *
- *    // the created importer is used to turn a compiler tree into a runtime compiler tree
- *    // both compilers use the same classpath, so semantics remains intact
- *    val imported = importer.importTree(tree)
- *
- *    // after the tree is imported, it can be evaluated as usual
- *    val tree = toolBox.resetAllAttrs(imported.duplicate)
- *    val valueOfX = toolBox.eval(imported).asInstanceOf[T]
- *    ...
- *  }
- *  }}}
- *
- * @group ReflectionAPI
- */
-trait Importers { self: Universe =>
-
-  /** Creates an importer that moves reflection artifacts between universes.
-   *  @group Importers
-   */
-  def mkImporter(from0: Universe): Importer { val from: from0.type }
-
-  /** The API of importers.
-   *  The main source of information about importers is the [[scala.reflect.api.Importers]] page.
-   *  @group Importers
-   */
-  trait Importer {
-    /** The source universe of reflection artifacts that will be processed.
-     *  The target universe is universe that created this importer with `mkImporter`.
-     */
-    val from: Universe
-
-    /** An importer that works in reverse direction, namely:
-     *  imports reflection artifacts from the current universe to the universe specified in `from`.
-     */
-    val reverse: from.Importer { val from: self.type }
-
-    /** In the current universe, locates or creates a symbol that corresponds to the provided symbol in the source universe.
-     *  If necessary imports the owner chain, companions, type signature, annotations and attachments.
-     */
-    def importSymbol(sym: from.Symbol): Symbol
-
-    /** In the current universe, locates or creates a type that corresponds to the provided type in the source universe.
-     *  If necessary imports the underlying symbols, annotations, scopes and trees.
-     */
-    def importType(tpe: from.Type): Type
-
-    /** In the current universe, creates a tree that corresponds to the provided tree in the source universe.
-     *  If necessary imports the underlying symbols, types and attachments.
-     */
-    def importTree(tree: from.Tree): Tree
-
-    /** In the current universe, creates a position that corresponds to the provided position in the source universe.
-     */
-    def importPosition(pos: from.Position): Position
-  }
-}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala
new file mode 100644
index 0000000..577cd09
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Internals.scala
@@ -0,0 +1,1238 @@
+package scala
+package reflect
+package api
+
+import scala.language.implicitConversions
+import scala.language.higherKinds
+
+/**
+ *  <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ *  This trait assembles APIs occasionally necessary for performing low-level operations on reflection artifacts.
+ *  See [[Internals#InternalApi]] for more information about nature, usefulness and compatibility guarantees of these APIs.
+ *
+ *  @group ReflectionAPI
+ */
+trait Internals { self: Universe =>
+
+  /** @see [[InternalApi]]
+   *  @group Internal
+   */
+  val internal: Internal
+
+  /** @see [[InternalApi]]
+   *  @group Internal
+   */
+  type Internal <: InternalApi
+
+  /** Reflection API exhibits a tension inherent to experimental things:
+   *  on the one hand we want it to grow into a beautiful and robust API,
+   *  but on the other hand we have to deal with immaturity of underlying mechanisms
+   *  by providing not very pretty solutions to enable important use cases.
+   *
+   *  In Scala 2.10, which was our first stab at reflection API, we didn't have a systematic
+   *  approach to dealing with this tension, sometimes exposing too much of internals (e.g. Symbol.deSkolemize)
+   *  and sometimes exposing too little (e.g. there's still no facility to change owners, to do typing
+   *  transformations, etc). This resulted in certain confusion with some internal APIs
+   *  living among public ones, scaring the newcomers, and some internal APIs only available via casting,
+   *  which requires intimate knowledge of the compiler and breaks compatibility guarantees.
+   *
+   *  This led to creation of the `internal` API module for the reflection API, which
+   *  provides advanced APIs necessary for macros that push boundaries of the state of the art,
+   *  clearly demarcating them from the more or less straightforward rest and
+   *  providing compatibility guarantees on par with the rest of the reflection API
+   *  (full compatibility within minor releases, best effort towards backward compatibility within major releases,
+   *  clear replacement path in case of rare incompatible changes in major releases).
+   *
+   *  The `internal` module itself (the value that implements [[InternalApi]]) isn't defined here,
+   *  in [[scala.reflect.api.Universe]], but is provided on per-implementation basis. Runtime API endpoint
+   *  ([[scala.reflect.runtime.universe]]) provides `universe.compat: InternalApi`, whereas compile-time API endpoints
+   *  (instances of [[scala.reflect.macros.Context]]) provide `c.compat: ContextInternalApi`, which extends `InternalApi`
+   *  with additional universe-specific and context-specific functionality.
+   *
+   *  @group Internal
+   */
+  trait InternalApi { internal =>
+    /** This is an internal implementation module.
+     */
+    val reificationSupport: ReificationSupportApi
+
+    /** Creates an importer that moves reflection artifacts between universes.
+     *  @see [[Importer]]
+     */
+    // SI-6241: move importers to a mirror
+    def createImporter(from0: Universe): Importer { val from: from0.type }
+
+    /**
+     * Convert a [[scala.reflect.api.TypeTags#TypeTag]] to a [[scala.reflect.Manifest]].
+     *
+     * Compiler usually generates these conversions automatically, when a type tag for a type `T` is in scope,
+     * and an implicit of type `Manifest[T]` is requested, but this method can also be called manually.
+     * For example:
+     * {{{
+     * typeTagToManifest(scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]])
+     * }}}
+     * @group TagInterop
+     */
+    def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
+      throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
+
+    /**
+     * Convert a [[scala.reflect.Manifest]] to a [[scala.reflect.api.TypeTags#TypeTag]].
+     *
+     * Compiler usually generates these conversions automatically, when a manifest for a type `T` is in scope,
+     * and an implicit of type `TypeTag[T]` is requested, but this method can also be called manually.
+     * For example:
+     * {{{
+     * manifestToTypeTag(scala.reflect.runtime.currentMirror, implicitly[Manifest[String]])
+     * }}}
+     * @group TagInterop
+     */
+    def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
+      throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
+
+    /** Create a new scope with the given initial elements.
+     */
+    def newScopeWith(elems: Symbol*): Scope
+
+    /** Extracts free term symbols from a tree that is reified or contains reified subtrees.
+     */
+    def freeTerms(tree: Tree): List[FreeTermSymbol]
+
+    /** Extracts free type symbols from a tree that is reified or contains reified subtrees.
+     */
+    def freeTypes(tree: Tree): List[FreeTypeSymbol]
+
+    /** Substitute symbols in `to` for corresponding occurrences of references to
+     *  symbols `from` in this type.
+     */
+    def substituteSymbols(tree: Tree, from: List[Symbol], to: List[Symbol]): Tree
+
+    /** Substitute types in `to` for corresponding occurrences of references to
+     *  symbols `from` in this tree.
+     */
+    def substituteTypes(tree: Tree, from: List[Symbol], to: List[Type]): Tree
+
+    /** Substitute given tree `to` for occurrences of nodes that represent
+     *  `C.this`, where `C` referes to the given class `clazz`.
+     */
+    def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree
+
+    /** A factory method for `ClassDef` nodes.
+     */
+    def classDef(sym: Symbol, impl: Template): ClassDef
+
+    /** A factory method for `ModuleDef` nodes.
+     */
+    def moduleDef(sym: Symbol, impl: Template): ModuleDef
+
+    /** A factory method for `ValDef` nodes.
+     */
+    def valDef(sym: Symbol, rhs: Tree): ValDef
+
+    /** A factory method for `ValDef` nodes.
+     */
+    def valDef(sym: Symbol): ValDef
+
+    /** A factory method for `DefDef` nodes.
+     */
+    def defDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+    /** A factory method for `DefDef` nodes.
+     */
+    def defDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+    /** A factory method for `DefDef` nodes.
+     */
+    def defDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
+
+    /** A factory method for `DefDef` nodes.
+     */
+    def defDef(sym: Symbol, rhs: Tree): DefDef
+
+    /** A factory method for `DefDef` nodes.
+     */
+    def defDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
+
+    /** A factory method for `TypeDef` nodes.
+     */
+    def typeDef(sym: Symbol, rhs: Tree): TypeDef
+
+    /** A factory method for `TypeDef` nodes.
+     */
+    def typeDef(sym: Symbol): TypeDef
+
+    /** A factory method for `LabelDef` nodes.
+     */
+    def labelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
+
+    /** Does this symbol represent a free term captured by reification?
+     *  If yes, `isTerm` is also guaranteed to be true.
+     */
+    def isFreeTerm(symbol: Symbol): Boolean
+
+    /** This symbol cast to a free term symbol.
+     *  @throws ScalaReflectionException if `isFreeTerm` is false.
+     */
+    def asFreeTerm(symbol: Symbol): FreeTermSymbol
+
+    /** Does this symbol represent a free type captured by reification?
+     *  If yes, `isType` is also guaranteed to be true.
+     */
+    def isFreeType(symbol: Symbol): Boolean
+
+    /** This symbol cast to a free type symbol.
+     *  @throws ScalaReflectionException if `isFreeType` is false.
+     */
+    def asFreeType(symbol: Symbol): FreeTypeSymbol
+
+    def newTermSymbol(owner: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
+
+    def newModuleAndClassSymbol(owner: Symbol, name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
+
+    def newMethodSymbol(owner: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
+
+    def newTypeSymbol(owner: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
+
+    def newClassSymbol(owner: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
+
+    def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
+
+    def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
+
+    /** Does this symbol or its underlying type represent a typechecking error?
+     */
+    def isErroneous(symbol: Symbol): Boolean
+
+    /** Does this symbol represent the definition of a skolem?
+     *  Skolems are used during typechecking to represent type parameters viewed from inside their scopes.
+     */
+    def isSkolem(symbol: Symbol): Boolean
+
+    /** If this symbol is a skolem, its corresponding type parameter, otherwise the symbol itself.
+     *
+     *  [[https://groups.google.com/forum/#!msg/scala-internals/0j8laVNTQsI/kRXMF_c8bGsJ To quote Martin Odersky]],
+     *  skolems are synthetic type "constants" that are copies of existentially bound or universally
+     *  bound type variables. E.g. if one is inside the right-hand side of a method:
+     *
+     *  {{{
+     *  def foo[T](x: T) = ... foo[List[T]]....
+     *  }}}
+     *
+     *  the skolem named `T` refers to the unknown type instance of `T` when `foo` is called. It needs to be different
+     *  from the type parameter because in a recursive call as in the `foo[List[T]]` above the type parameter gets
+     *  substituted with `List[T]`, but the ''type skolem'' stays what it is.
+     *
+     *  The other form of skolem is an ''existential skolem''. Say one has a function
+     *
+     *  {{{
+     *  def bar(xs: List[T] forSome { type T }) = xs.head
+     *  }}}
+     *
+     *  then each occurrence of `xs` on the right will have type `List[T']` where `T'` is a fresh copy of `T`.
+     */
+    def deSkolemize(symbol: Symbol): Symbol
+
+    /** Forces all outstanding completers associated with this symbol.
+     *  After this call returns, the symbol becomes immutable and thread-safe.
+     */
+    def initialize(symbol: Symbol): symbol.type
+
+    /** Calls [[initialize]] on the owner and all the value and type parameters of the symbol.
+     */
+    def fullyInitialize(symbol: Symbol): symbol.type
+
+    /** Calls [[initialize]] on all the value and type parameters of the type.
+     */
+    def fullyInitialize(tp: Type): tp.type
+
+    /** Calls [[initialize]] on all the symbols that the scope consists of.
+     */
+    def fullyInitialize(scope: Scope): scope.type
+
+    /** Returns internal flags associated with the symbol.
+     */
+    def flags(symbol: Symbol): FlagSet
+
+    /** A creator for `ThisType` types.
+     */
+    def thisType(sym: Symbol): Type
+
+    /** A creator for `SingleType` types.
+     */
+    def singleType(pre: Type, sym: Symbol): Type
+
+    /** A creator for `SuperType` types.
+     */
+    def superType(thistpe: Type, supertpe: Type): Type
+
+    /** A creator for `ConstantType` types.
+     */
+    def constantType(value: Constant): ConstantType
+
+    /** A creator for `TypeRef` types.
+     */
+    def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type
+
+    /** A creator for `RefinedType` types.
+     */
+    def refinedType(parents: List[Type], decls: Scope): RefinedType
+
+    /** A creator for `RefinedType` types.
+     */
+    def refinedType(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
+
+    /** A creator for `RefinedType` types.
+     */
+    def refinedType(parents: List[Type], owner: Symbol): Type
+
+    /** A creator for `RefinedType` types.
+     */
+    def refinedType(parents: List[Type], owner: Symbol, decls: Scope): Type
+
+    /** A creator for `RefinedType` types.
+     */
+    def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type
+
+    /** A creator for intersection type where intersections of a single type are
+     *  replaced by the type itself.
+     */
+    def intersectionType(tps: List[Type]): Type
+
+    /** A creator for intersection type where intersections of a single type are
+     *  replaced by the type itself, and repeated parent classes are merged.
+     *
+     *  !!! Repeated parent classes are not merged - is this a bug in the
+     *  comment or in the code?
+     */
+    def intersectionType(tps: List[Type], owner: Symbol): Type
+
+    /** A creator for `ClassInfoType` types.
+     */
+    def classInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
+
+    /** A creator for `MethodType` types.
+     */
+    def methodType(params: List[Symbol], resultType: Type): MethodType
+
+    /** A creator for `NullaryMethodType` types.
+     */
+    def nullaryMethodType(resultType: Type): NullaryMethodType
+
+    /** A creator for type parameterizations that strips empty type parameter lists.
+     *  Use this factory method to indicate the type has kind * (it's a polymorphic value)
+     *  until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
+     */
+    def polyType(tparams: List[Symbol], tpe: Type): PolyType
+
+    /** A creator for `ExistentialType` types.
+     */
+    def existentialType(quantified: List[Symbol], underlying: Type): ExistentialType
+
+    /** A creator for existential types. This generates:
+     *
+     *  {{{
+     *    tpe1 where { tparams }
+     *  }}}
+     *
+     *  where `tpe1` is the result of extrapolating `tpe` with regard to `tparams`.
+     *  Extrapolating means that type variables in `tparams` occurring
+     *  in covariant positions are replaced by upper bounds, (minus any
+     *  SingletonClass markers), type variables in `tparams` occurring in
+     *  contravariant positions are replaced by upper bounds, provided the
+     *  resulting type is legal with regard to stability, and does not contain
+     *  any type variable in `tparams`.
+     *
+     *  The abstraction drops all type parameters that are not directly or
+     *  indirectly referenced by type `tpe1`. If there are no remaining type
+     *  parameters, simply returns result type `tpe`.
+     *  @group TypeCreators
+     */
+    def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type
+
+    /** A creator for `AnnotatedType` types.
+     */
+    def annotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType
+
+    /** A creator for `TypeBounds` types.
+     */
+    def typeBounds(lo: Type, hi: Type): TypeBounds
+
+    /** A creator for `BoundedWildcardType` types.
+     */
+    def boundedWildcardType(bounds: TypeBounds): BoundedWildcardType
+
+    /** Syntactic conveniences for additional internal APIs for trees, symbols and types */
+    type Decorators <: DecoratorApi
+
+    /** @see [[Decorators]] */
+    val decorators: Decorators
+
+    /** @see [[Decorators]] */
+    trait DecoratorApi {
+      /** Extension methods for trees */
+      type TreeDecorator[T <: Tree] <: TreeDecoratorApi[T]
+
+      /** @see [[TreeDecorator]] */
+      implicit def treeDecorator[T <: Tree](tree: T): TreeDecorator[T]
+
+      /** @see [[TreeDecorator]] */
+      class TreeDecoratorApi[T <: Tree](val tree: T) {
+        /** @see [[internal.freeTerms]] */
+        def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree)
+
+        /** @see [[internal.freeTypes]] */
+        def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree)
+
+        /** @see [[internal.substituteSymbols]] */
+        def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to)
+
+        /** @see [[internal.substituteTypes]] */
+        def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to)
+
+        /** @see [[internal.substituteThis]] */
+        def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to)
+      }
+
+      /** Extension methods for symbols */
+      type SymbolDecorator[T <: Symbol] <: SymbolDecoratorApi[T]
+
+      /** @see [[SymbolDecorator]] */
+      implicit def symbolDecorator[T <: Symbol](symbol: T): SymbolDecorator[T]
+
+      /** @see [[SymbolDecorator]] */
+      class SymbolDecoratorApi[T <: Symbol](val symbol: T) {
+        /** @see [[internal.isFreeTerm]] */
+        def isFreeTerm: Boolean = internal.isFreeTerm(symbol)
+
+        /** @see [[internal.asFreeTerm]] */
+        def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol)
+
+        /** @see [[internal.isFreeType]] */
+        def isFreeType: Boolean = internal.isFreeType(symbol)
+
+        /** @see [[internal.asFreeType]] */
+        def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol)
+
+        /** @see [[internal.newTermSymbol]] */
+        def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags)
+
+        /** @see [[internal.newModuleAndClassSymbol]] */
+        def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags)
+
+        /** @see [[internal.newMethodSymbol]] */
+        def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags)
+
+        /** @see [[internal.newTypeSymbol]] */
+        def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags)
+
+        /** @see [[internal.newClassSymbol]] */
+        def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags)
+
+        /** @see [[internal.isErroneous]] */
+        def isErroneous: Boolean = internal.isErroneous(symbol)
+
+        /** @see [[internal.isSkolem]] */
+        def isSkolem: Boolean = internal.isSkolem(symbol)
+
+        /** @see [[internal.deSkolemize]] */
+        def deSkolemize: Symbol = internal.deSkolemize(symbol)
+
+        /** @see [[internal.initialize]] */
+        def initialize: T = internal.initialize(symbol)
+
+        /** @see [[internal.fullyInitialize]] */
+        def fullyInitialize: T = internal.fullyInitialize(symbol)
+
+        /** @see [[internal.flags]] */
+        def flags: FlagSet = internal.flags(symbol)
+      }
+
+      /** Extension methods for types */
+      type TypeDecorator[T <: Type] <: TypeDecoratorApi[T]
+
+      /** @see [[TypeDecorator]] */
+      implicit def typeDecorator[T <: Type](tp: T): TypeDecorator[T]
+
+      /** @see [[TypeDecorator]] */
+      implicit class TypeDecoratorApi[T <: Type](val tp: T) {
+        /** @see [[internal.fullyInitialize]] */
+        def fullyInitialize: T = internal.fullyInitialize(tp)
+      }
+    }
+  }
+
+  /** This is an internal implementation class.
+   *  @group Internal
+   */
+  // this API abstracts away the functionality necessary for reification and quasiquotes
+  // it's too gimmicky and unstructured to be exposed directly in the universe
+  // but we need it in a publicly available place for reification to work
+  trait ReificationSupportApi {
+    /** Selects type symbol with given simple name `name` from the defined members of `owner`.
+     */
+    def selectType(owner: Symbol, name: String): TypeSymbol
+
+    /** Selects term symbol with given name and type from the defined members of prefix type
+     */
+    def selectTerm(owner: Symbol, name: String): TermSymbol
+
+    /** Selects overloaded method symbol with given name and index
+     */
+    def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
+
+    /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
+     *  the current symbol as its owner.
+     */
+    def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
+
+    def newScopeWith(elems: Symbol*): Scope
+
+    /** Create a fresh free term symbol.
+     *  @param   name   the name of the free variable
+     *  @param   value  the value of the free variable at runtime
+     *  @param   flags  (optional) flags of the free variable
+     *  @param   origin debug information that tells where this symbol comes from
+     */
+    def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
+
+    /** Create a fresh free type symbol.
+     *  @param   name   the name of the free variable
+     *  @param   flags  (optional) flags of the free variable
+     *  @param   origin debug information that tells where this symbol comes from
+     */
+    def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
+
+    /** Set symbol's type signature to given type.
+     *  @return the symbol itself
+     */
+    def setInfo[S <: Symbol](sym: S, tpe: Type): S
+
+    /** Set symbol's annotations to given annotations `annots`.
+     */
+    def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
+
+    def mkThis(sym: Symbol): Tree
+
+    def mkSelect(qualifier: Tree, sym: Symbol): Select
+
+    def mkIdent(sym: Symbol): Ident
+
+    def mkTypeTree(tp: Type): TypeTree
+
+    def ThisType(sym: Symbol): Type
+
+    def SingleType(pre: Type, sym: Symbol): Type
+
+    def SuperType(thistpe: Type, supertpe: Type): Type
+
+    def ConstantType(value: Constant): ConstantType
+
+    def TypeRef(pre: Type, sym: Symbol, args: List[Type]): Type
+
+    def RefinedType(parents: List[Type], decls: Scope, typeSymbol: Symbol): RefinedType
+
+    def ClassInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
+
+    def MethodType(params: List[Symbol], resultType: Type): MethodType
+
+    def NullaryMethodType(resultType: Type): NullaryMethodType
+
+    def PolyType(typeParams: List[Symbol], resultType: Type): PolyType
+
+    def ExistentialType(quantified: List[Symbol], underlying: Type): ExistentialType
+
+    def AnnotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType
+
+    def TypeBounds(lo: Type, hi: Type): TypeBounds
+
+    def BoundedWildcardType(bounds: TypeBounds): BoundedWildcardType
+
+    def thisPrefix(sym: Symbol): Type
+
+    def setType[T <: Tree](tree: T, tpe: Type): T
+
+    def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+
+    def toStats(tree: Tree): List[Tree]
+
+    def mkAnnotation(tree: Tree): Tree
+
+    def mkAnnotation(trees: List[Tree]): List[Tree]
+
+    def mkRefineStat(stat: Tree): Tree
+
+    def mkRefineStat(stats: List[Tree]): List[Tree]
+
+    def mkPackageStat(stat: Tree): Tree
+
+    def mkPackageStat(stats: List[Tree]): List[Tree]
+
+    def mkEarlyDef(defn: Tree): Tree
+
+    def mkEarlyDef(defns: List[Tree]): List[Tree]
+
+    def mkRefTree(qual: Tree, sym: Symbol): Tree
+
+    def freshTermName(prefix: String): TermName
+
+    def freshTypeName(prefix: String): TypeName
+
+    val ImplicitParams: ImplicitParamsExtractor
+
+    trait ImplicitParamsExtractor {
+      def apply(paramss: List[List[Tree]], implparams: List[Tree]): List[List[Tree]]
+      def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])]
+    }
+
+    val ScalaDot: ScalaDotExtractor
+
+    trait ScalaDotExtractor {
+      def apply(name: Name): Tree
+      def unapply(tree: Tree): Option[Name]
+    }
+
+    val FlagsRepr: FlagsReprExtractor
+
+    trait FlagsReprExtractor {
+      def apply(value: Long): FlagSet
+      def unapply(flags: Long): Some[Long]
+    }
+
+    val SyntacticTypeApplied: SyntacticTypeAppliedExtractor
+    val SyntacticAppliedType: SyntacticTypeAppliedExtractor
+
+    trait SyntacticTypeAppliedExtractor {
+      def apply(tree: Tree, targs: List[Tree]): Tree
+      def unapply(tree: Tree): Option[(Tree, List[Tree])]
+    }
+
+    val SyntacticApplied: SyntacticAppliedExtractor
+
+    trait SyntacticAppliedExtractor {
+      def apply(tree: Tree, argss: List[List[Tree]]): Tree
+      def unapply(tree: Tree): Some[(Tree, List[List[Tree]])]
+    }
+
+    val SyntacticClassDef: SyntacticClassDefExtractor
+
+    trait SyntacticClassDefExtractor {
+      def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
+                constrMods: Modifiers, vparamss: List[List[Tree]],
+                earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
+      def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
+                                       List[Tree], List[Tree], ValDef, List[Tree])]
+    }
+
+    val SyntacticTraitDef: SyntacticTraitDefExtractor
+
+    trait SyntacticTraitDefExtractor {
+      def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
+                earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
+      def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
+                                       List[Tree], List[Tree], ValDef, List[Tree])]
+    }
+
+    val SyntacticObjectDef: SyntacticObjectDefExtractor
+
+    trait SyntacticObjectDefExtractor {
+      def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
+                parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef
+      def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
+    }
+
+    val SyntacticPackageObjectDef: SyntacticPackageObjectDefExtractor
+
+    trait SyntacticPackageObjectDefExtractor {
+      def apply(name: TermName, earlyDefs: List[Tree],
+                parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef
+      def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])]
+    }
+
+    val SyntacticTuple: SyntacticTupleExtractor
+    val SyntacticTupleType: SyntacticTupleExtractor
+
+    trait SyntacticTupleExtractor {
+      def apply(args: List[Tree]): Tree
+      def unapply(tree: Tree): Option[List[Tree]]
+    }
+
+    val SyntacticBlock: SyntacticBlockExtractor
+
+    trait SyntacticBlockExtractor {
+      def apply(stats: List[Tree]): Tree
+      def unapply(tree: Tree): Option[List[Tree]]
+    }
+
+    val SyntacticNew: SyntacticNewExtractor
+
+    trait SyntacticNewExtractor {
+      def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): Tree
+      def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])]
+    }
+
+    val SyntacticFunctionType: SyntacticFunctionTypeExtractor
+
+    trait SyntacticFunctionTypeExtractor {
+      def apply(argtpes: List[Tree], restpe: Tree): Tree
+      def unapply(tree: Tree): Option[(List[Tree], Tree)]
+    }
+
+    val SyntacticFunction: SyntacticFunctionExtractor
+
+    trait SyntacticFunctionExtractor {
+      def apply(params: List[Tree], body: Tree): Function
+
+      def unapply(tree: Function): Option[(List[ValDef], Tree)]
+    }
+
+    val SyntacticDefDef: SyntacticDefDefExtractor
+
+    trait SyntacticDefDefExtractor {
+      def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+                vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef
+
+      def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+    }
+
+    val SyntacticValDef: SyntacticValDefExtractor
+    val SyntacticVarDef: SyntacticValDefExtractor
+
+    trait SyntacticValDefExtractor {
+      def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
+      def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)]
+    }
+
+    val SyntacticPatDef: SyntacticPatDefExtractor
+
+    trait SyntacticPatDefExtractor {
+      def apply(mods: Modifiers, pat: Tree, tpt: Tree, rhs: Tree): List[ValDef]
+    }
+
+    val SyntacticAssign: SyntacticAssignExtractor
+
+    trait SyntacticAssignExtractor {
+      def apply(lhs: Tree, rhs: Tree): Tree
+      def unapply(tree: Tree): Option[(Tree, Tree)]
+    }
+
+    val SyntacticValFrom: SyntacticValFromExtractor
+
+    trait SyntacticValFromExtractor {
+      def apply(pat: Tree, rhs: Tree): Tree
+      def unapply(tree: Tree): Option[(Tree, Tree)]
+    }
+
+    val SyntacticValEq: SyntacticValEqExtractor
+
+    trait SyntacticValEqExtractor {
+      def apply(pat: Tree, rhs: Tree): Tree
+      def unapply(tree: Tree): Option[(Tree, Tree)]
+    }
+
+    val SyntacticFilter: SyntacticFilterExtractor
+
+    trait SyntacticFilterExtractor {
+      def apply(test: Tree): Tree
+      def unapply(tree: Tree): Option[(Tree)]
+    }
+
+    val SyntacticEmptyTypeTree: SyntacticEmptyTypeTreeExtractor
+
+    trait SyntacticEmptyTypeTreeExtractor {
+      def apply(): TypeTree
+      def unapply(tt: TypeTree): Boolean
+    }
+
+    val SyntacticFor: SyntacticForExtractor
+    val SyntacticForYield: SyntacticForExtractor
+
+    trait SyntacticForExtractor {
+      def apply(enums: List[Tree], body: Tree): Tree
+      def unapply(tree: Tree): Option[(List[Tree], Tree)]
+    }
+
+    def UnliftListElementwise[T](unliftable: Unliftable[T]): UnliftListElementwise[T]
+    trait UnliftListElementwise[T] {
+      def unapply(lst: List[Tree]): Option[List[T]]
+    }
+
+    def UnliftListOfListsElementwise[T](unliftable: Unliftable[T]): UnliftListOfListsElementwise[T]
+    trait UnliftListOfListsElementwise[T] {
+      def unapply(lst: List[List[Tree]]): Option[List[List[T]]]
+    }
+
+    val SyntacticPartialFunction: SyntacticPartialFunctionExtractor
+    trait SyntacticPartialFunctionExtractor {
+      def apply(cases: List[Tree]): Match
+      def unapply(tree: Tree): Option[List[CaseDef]]
+    }
+
+    val SyntacticMatch: SyntacticMatchExtractor
+    trait SyntacticMatchExtractor {
+      def apply(scrutinee: Tree, cases: List[Tree]): Match
+      def unapply(tree: Match): Option[(Tree, List[CaseDef])]
+    }
+
+    val SyntacticTry: SyntacticTryExtractor
+    trait SyntacticTryExtractor {
+      def apply(block: Tree, catches: List[Tree], finalizer: Tree): Try
+      def unapply(tree: Try): Option[(Tree, List[CaseDef], Tree)]
+    }
+
+    val SyntacticTermIdent: SyntacticTermIdentExtractor
+    trait SyntacticTermIdentExtractor {
+      def apply(name: TermName, isBackquoted: Boolean = false): Ident
+      def unapply(id: Ident): Option[(TermName, Boolean)]
+    }
+
+    val SyntacticTypeIdent: SyntacticTypeIdentExtractor
+    trait SyntacticTypeIdentExtractor {
+      def apply(name: TypeName): Ident
+      def unapply(tree: Tree): Option[TypeName]
+    }
+
+    val SyntacticImport: SyntacticImportExtractor
+    trait SyntacticImportExtractor {
+      def apply(expr: Tree, selectors: List[Tree]): Import
+      def unapply(imp: Import): Some[(Tree, List[Tree])]
+    }
+
+    val SyntacticSelectType: SyntacticSelectTypeExtractor
+    trait SyntacticSelectTypeExtractor {
+      def apply(qual: Tree, name: TypeName): Select
+      def unapply(tree: Tree): Option[(Tree, TypeName)]
+    }
+
+    val SyntacticSelectTerm: SyntacticSelectTermExtractor
+    trait SyntacticSelectTermExtractor {
+      def apply(qual: Tree, name: TermName): Select
+      def unapply(tree: Tree): Option[(Tree, TermName)]
+    }
+
+    val SyntacticCompoundType: SyntacticCompoundTypeExtractor
+    trait SyntacticCompoundTypeExtractor {
+      def apply(parents: List[Tree], defns: List[Tree]): CompoundTypeTree
+      def unapply(tree: Tree): Option[(List[Tree], List[Tree])]
+    }
+
+    val SyntacticSingletonType: SyntacitcSingletonTypeExtractor
+    trait SyntacitcSingletonTypeExtractor {
+      def apply(tree: Tree): SingletonTypeTree
+      def unapply(tree: Tree): Option[Tree]
+    }
+
+    val SyntacticTypeProjection: SyntacticTypeProjectionExtractor
+    trait SyntacticTypeProjectionExtractor {
+      def apply(qual: Tree, name: TypeName): SelectFromTypeTree
+      def unapply(tree: Tree): Option[(Tree, TypeName)]
+    }
+
+    val SyntacticAnnotatedType: SyntacticAnnotatedTypeExtractor
+    trait SyntacticAnnotatedTypeExtractor {
+      def apply(tpt: Tree, annot: Tree): Annotated
+      def unapply(tree: Tree): Option[(Tree, Tree)]
+    }
+
+    val SyntacticExistentialType: SyntacticExistentialTypeExtractor
+    trait SyntacticExistentialTypeExtractor {
+      def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree
+      def unapply(tree: Tree): Option[(Tree, List[MemberDef])]
+    }
+  }
+
+  @deprecated("Use `internal.reificationSupport` instead", "2.11.0")
+  val build: ReificationSupportApi
+
+  @deprecated("Use `internal.ReificationSupportApi` instead", "2.11.0")
+  type BuildApi = ReificationSupportApi
+
+  /** This trait provides support for importers, a facility to migrate reflection artifacts between universes.
+   * ''Note: this trait should typically be used only rarely.''
+   *
+   *  Reflection artifacts, such as [[scala.reflect.api.Symbols Symbols]] and [[scala.reflect.api.Types Types]],
+   *  are contained in [[scala.reflect.api.Universe Universe]]s. Typically all processing happens
+   *  within a single `Universe` (e.g. a compile-time macro `Universe` or a runtime reflection `Universe`), but sometimes
+   *  there is a need to migrate artifacts from one `Universe` to another. For example, runtime compilation works by
+   *  importing runtime reflection trees into a runtime compiler universe, compiling the importees and exporting the
+   *  result back.
+   *
+   *  Reflection artifacts are firmly grounded in their `Universe`s, which is reflected by the fact that types of artifacts
+   *  from different universes are not compatible. By using `Importer`s, however, they be imported from one universe
+   *  into another. For example, to import `foo.bar.Baz` from the source `Universe` to the target `Universe`,
+   *  an importer will first check whether the entire owner chain exists in the target `Universe`.
+   *  If it does, then nothing else will be done. Otherwise, the importer will recreate the entire owner chain
+   *  and will import the corresponding type signatures into the target `Universe`.
+   *
+   *  Since importers match `Symbol` tables of the source and the target `Universe`s using plain string names,
+   *  it is programmer's responsibility to make sure that imports don't distort semantics, e.g., that
+   *  `foo.bar.Baz` in the source `Universe` means the same that `foo.bar.Baz` does in the target `Universe`.
+   *
+   *  === Example ===
+   *
+   *  Here's how one might implement a macro that performs compile-time evaluation of its argument
+   *  by using a runtime compiler to compile and evaluate a tree that belongs to a compile-time compiler:
+   *
+   *  {{{
+   *  def staticEval[T](x: T) = macro staticEval[T]
+   *
+   *  def staticEval[T](c: scala.reflect.macros.blackbox.Context)(x: c.Expr[T]) = {
+   *    // creates a runtime reflection universe to host runtime compilation
+   *    import scala.reflect.runtime.{universe => ru}
+   *    val mirror = ru.runtimeMirror(c.libraryClassLoader)
+   *    import scala.tools.reflect.ToolBox
+   *    val toolBox = mirror.mkToolBox()
+   *
+   *    // runtime reflection universe and compile-time macro universe are different
+   *    // therefore an importer is needed to bridge them
+   *    // currently mkImporter requires a cast to correctly assign the path-dependent types
+   *    val importer0 = ru.internal.mkImporter(c.universe)
+   *    val importer = importer0.asInstanceOf[ru.internal.Importer { val from: c.universe.type }]
+   *
+   *    // the created importer is used to turn a compiler tree into a runtime compiler tree
+   *    // both compilers use the same classpath, so semantics remains intact
+   *    val imported = importer.importTree(tree)
+   *
+   *    // after the tree is imported, it can be evaluated as usual
+   *    val tree = toolBox.untypecheck(imported.duplicate)
+   *    val valueOfX = toolBox.eval(imported).asInstanceOf[T]
+   *    ...
+   *  }
+   *  }}}
+   *
+   *  @group Internal
+   */
+  // SI-6241: move importers to a mirror
+  trait Importer {
+    /** The source universe of reflection artifacts that will be processed.
+     *  The target universe is universe that created this importer with `mkImporter`.
+     */
+    val from: Universe
+
+    /** An importer that works in reverse direction, namely:
+     *  imports reflection artifacts from the current universe to the universe specified in `from`.
+     */
+    val reverse: from.Importer { val from: self.type }
+
+    /** In the current universe, locates or creates a symbol that corresponds to the provided symbol in the source universe.
+     *  If necessary imports the owner chain, companions, type signature, annotations and attachments.
+     */
+    def importSymbol(sym: from.Symbol): Symbol
+
+    /** In the current universe, locates or creates a type that corresponds to the provided type in the source universe.
+     *  If necessary imports the underlying symbols, annotations, scopes and trees.
+     */
+    def importType(tpe: from.Type): Type
+
+    /** In the current universe, creates a tree that corresponds to the provided tree in the source universe.
+     *  If necessary imports the underlying symbols, types and attachments.
+     */
+    def importTree(tree: from.Tree): Tree
+
+    /** In the current universe, creates a position that corresponds to the provided position in the source universe.
+     */
+    def importPosition(pos: from.Position): Position
+  }
+
+  @deprecated("Use `internal.createImporter` instead", "2.11.0")
+  def mkImporter(from0: Universe): Importer { val from: from0.type } = internal.createImporter(from0)
+
+  /** Marks underlying reference to id as boxed.
+   *
+   *  <b>Precondition:<\b> id must refer to a captured variable
+   *  A reference such marked will refer to the boxed entity, no dereferencing
+   *  with `.elem` is done on it.
+   *  This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
+   *  It is eliminated in LambdaLift, where the boxing conversion takes place.
+   *  @group Internal
+   *  @template
+   */
+  type ReferenceToBoxed >: Null <: ReferenceToBoxedApi with TermTree
+
+  /** The constructor/extractor for `ReferenceToBoxed` instances.
+   *  @group Internal
+   */
+  val ReferenceToBoxed: ReferenceToBoxedExtractor
+
+  /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
+   *  This AST node does not have direct correspondence to Scala code,
+   *  and is emitted by macros to reference capture vars directly without going through `elem`.
+   *
+   *  For example:
+   *
+   *    var x = ...
+   *    fun { x }
+   *
+   *  Will emit:
+   *
+   *    Ident(x)
+   *
+   *  Which gets transformed to:
+   *
+   *    Select(Ident(x), "elem")
+   *
+   *  If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
+   *  @group Internal
+   */
+  abstract class ReferenceToBoxedExtractor {
+    def apply(ident: Ident): ReferenceToBoxed
+    def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
+  }
+
+  /** The API that all references support
+   *  @group Internal
+   */
+  trait ReferenceToBoxedApi extends TermTreeApi { this: ReferenceToBoxed =>
+    /** The underlying reference. */
+    def ident: Tree
+  }
+
+  /** Tag that preserves the identity of `ReferenceToBoxed` in the face of erasure.
+   *  Can be used for pattern matching, instance tests, serialization and the like.
+   *  @group Internal
+   */
+  implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
+
+  /** The type of free terms introduced by reification.
+   *  @group Internal
+   *  @template
+   */
+  type FreeTermSymbol >: Null <: FreeTermSymbolApi with TermSymbol
+
+  /** The API of free term symbols.
+   *  The main source of information about symbols is the [[Symbols]] page.
+   *
+   *  $SYMACCESSORS
+   *  @group Internal
+   */
+  trait FreeTermSymbolApi extends TermSymbolApi { this: FreeTermSymbol =>
+    /** The place where this symbol has been spawned
+     *
+     *  @group FreeTerm
+     */
+    def origin: String
+
+    /** The valus this symbol refers to
+     *
+     *  @group FreeTerm
+     */
+    def value: Any
+  }
+
+  /** Tag that preserves the identity of `FreeTermSymbol` in the face of erasure.
+   *  Can be used for pattern matching, instance tests, serialization and the like.
+   *  @group Internal
+   */
+  implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
+
+  /** The type of free types introduced by reification.
+   *  @group Internal
+   *  @template
+   */
+  type FreeTypeSymbol >: Null <: FreeTypeSymbolApi with TypeSymbol
+
+  /** The API of free type symbols.
+   *  The main source of information about symbols is the [[Symbols]] page.
+   *
+   *  $SYMACCESSORS
+   *  @group Internal
+   */
+  trait FreeTypeSymbolApi extends TypeSymbolApi { this: FreeTypeSymbol =>
+    /** The place where this symbol has been spawned
+     *
+     *  @group FreeType
+     */
+    def origin: String
+  }
+
+  /** Tag that preserves the identity of `FreeTermSymbol` in the face of erasure.
+   *  Can be used for pattern matching, instance tests, serialization and the like.
+   *  @group Internal
+   */
+  implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+
+  /** Provides enrichments to ensure source compatibility between Scala 2.10 and Scala 2.11.
+   *  If in your reflective program for Scala 2.10 you've used something that's now become an internal API,
+   *  a single `compat._` import will fix things for you.
+   *  @group Internal
+   */
+  val compat: Compat
+
+  /** @see [[compat]]
+   *  @group Internal
+   */
+  type Compat <: CompatApi
+
+  /** Presence of an implicit value of this type in scope
+   *  indicates that source compatibility with Scala 2.10 has been enabled.
+   *  @group Internal
+   */
+  @scala.annotation.implicitNotFound("This method has been removed from the public API. Import compat._ or migrate away.")
+  class CompatToken
+
+  /** @see [[compat]]
+   *  @group Internal
+   */
+  trait CompatApi {
+    /** @see [[CompatToken]] */
+    implicit val token = new CompatToken
+
+    /** @see [[InternalApi.typeTagToManifest]] */
+    @deprecated("Use `internal.typeTagToManifest` instead", "2.11.0")
+    def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
+      internal.typeTagToManifest(mirror, tag)
+
+    /** @see [[InternalApi.manifestToTypeTag]] */
+    @deprecated("Use `internal.manifestToTypeTag` instead", "2.11.0")
+    def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
+      internal.manifestToTypeTag(mirror, manifest)
+
+    /** @see [[InternalApi.newScopeWith]] */
+    @deprecated("Use `internal.newScopeWith` instead", "2.11.0")
+    def newScopeWith(elems: Symbol*): Scope =
+      internal.newScopeWith(elems: _*)
+
+    /** Scala 2.10 compatibility enrichments for BuildApi. */
+    implicit class CompatibleBuildApi(api: BuildApi) {
+      /** @see [[BuildApi.setInfo]] */
+      @deprecated("Use `internal.reificationSupport.setInfo` instead", "2.11.0")
+      def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = internal.reificationSupport.setInfo(sym, tpe)
+
+      /** @see [[BuildApi.FlagsRepr]] */
+      @deprecated("Use `internal.reificationSupport.FlagsRepr` instead", "2.11.0")
+      def flagsFromBits(bits: Long): FlagSet = internal.reificationSupport.FlagsRepr(bits)
+
+      /** @see [[BuildApi.noSelfType]] */
+      @deprecated("Use `noSelfType` instead", "2.11.0")
+      def emptyValDef: ValDef = noSelfType
+
+      /** @see [[BuildApi.mkThis]] */
+      @deprecated("Use `internal.reificationSupport.mkThis` instead", "2.11.0")
+      def This(sym: Symbol): Tree = internal.reificationSupport.mkThis(sym)
+
+      /** @see [[BuildApi.mkSelect]] */
+      @deprecated("Use `internal.reificationSupport.mkSelect` instead", "2.11.0")
+      def Select(qualifier: Tree, sym: Symbol): Select = internal.reificationSupport.mkSelect(qualifier, sym)
+
+      /** @see [[BuildApi.mkIdent]] */
+      @deprecated("Use `internal.reificationSupport.mkIdent` instead", "2.11.0")
+      def Ident(sym: Symbol): Ident = internal.reificationSupport.mkIdent(sym)
+
+      /** @see [[BuildApi.mkTypeTree]] */
+      @deprecated("Use `internal.reificationSupport.mkTypeTree` instead", "2.11.0")
+      def TypeTree(tp: Type): TypeTree = internal.reificationSupport.mkTypeTree(tp)
+    }
+
+    /** Scala 2.10 compatibility enrichments for Tree. */
+    implicit class CompatibleTree(tree: Tree) {
+      /** @see [[InternalApi.freeTerms]] */
+      @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree)
+
+      /** @see [[InternalApi.freeTypes]] */
+      @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree)
+
+      /** @see [[InternalApi.substituteSymbols]] */
+      @deprecated("Use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to)
+
+      /** @see [[InternalApi.substituteTypes]] */
+      @deprecated("Use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to)
+
+      /** @see [[InternalApi.substituteThis]] */
+      @deprecated("Use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to)
+    }
+
+    /** Scala 2.10 compatibility enrichments for Tree. */
+    implicit class CompatibleSymbol(symbol: Symbol) {
+      @deprecated("This API is unreliable. Use `isPrivateThis` or `isProtectedThis` instead", "2.11.0")
+      def isLocal: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isLocal
+
+      @deprecated("This API is unreliable. Use `overrides.nonEmpty` instead", "2.11.0")
+      def isOverride: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isOverride
+
+      /** @see [[InternalApi.isFreeTerm]] */
+      @deprecated("Use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def isFreeTerm: Boolean = internal.isFreeTerm(symbol)
+
+      /** @see [[InternalApi.asFreeTerm]] */
+      @deprecated("Use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol)
+
+      /** @see [[InternalApi.isFreeType]] */
+      @deprecated("Use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def isFreeType: Boolean = internal.isFreeType(symbol)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def isErroneous: Boolean = internal.isErroneous(symbol)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def isSkolem: Boolean = internal.isSkolem(symbol)
+
+      /** @see [[InternalApi.asFreeType]] */
+      @deprecated("Use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def deSkolemize: Symbol = internal.deSkolemize(symbol)
+    }
+
+    /** @see [[InternalApi.singleType]] */
+    @deprecated("Use `internal.singleType` instead", "2.11.0")
+    def singleType(pre: Type, sym: Symbol): Type = internal.singleType(pre, sym)
+
+    /** @see [[InternalApi.refinedType]] */
+    @deprecated("Use `internal.refinedType` instead", "2.11.0")
+    def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = internal.refinedType(parents, owner, decls, pos)
+
+    /** @see [[InternalApi.refinedType]] */
+    @deprecated("Use `internal.refinedType` instead", "2.11.0")
+    def refinedType(parents: List[Type], owner: Symbol): Type = internal.refinedType(parents, owner)
+
+    /** @see [[InternalApi.typeRef]] */
+    @deprecated("Use `internal.typeRef` instead", "2.11.0")
+    def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = internal.typeRef(pre, sym, args)
+
+    /** @see [[InternalApi.intersectionType]] */
+    @deprecated("Use `internal.intersectionType` instead", "2.11.0")
+    def intersectionType(tps: List[Type]): Type = internal.intersectionType(tps)
+
+    /** @see [[InternalApi.intersectionType]] */
+    @deprecated("Use `internal.intersectionType` instead", "2.11.0")
+    def intersectionType(tps: List[Type], owner: Symbol): Type = internal.intersectionType(tps, owner)
+
+    /** @see [[InternalApi.polyType]] */
+    @deprecated("Use `internal.polyType` instead", "2.11.0")
+    def polyType(tparams: List[Symbol], tpe: Type): Type = internal.polyType(tparams, tpe)
+
+    /** @see [[InternalApi.existentialAbstraction]] */
+    @deprecated("Use `internal.existentialAbstraction` instead", "2.11.0")
+    def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = internal.existentialAbstraction(tparams, tpe0)
+  }
+}
diff --git a/src/reflect/scala/reflect/api/JavaMirrors.scala b/src/reflect/scala/reflect/api/JavaMirrors.scala
deleted file mode 100644
index b678033..0000000
--- a/src/reflect/scala/reflect/api/JavaMirrors.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-package scala.reflect
-package api
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- * A refinement of [[scala.reflect.api.Mirror]] for runtime reflection using JVM classloaders.
- *
- *  This refinement equips mirrors with reflection capabilities for the JVM. `JavaMirror` can
- *  convert Scala reflection artifacts (symbols and types) into Java reflection artifacts (classes)
- *  and vice versa. It can also perform reflective invocations (getting/setting field values,
- *  calling methods, etc).
- *
- *  For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
- *  [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
- *
- *  @groupname JavaMirrors Java Mirrors
- *  @group ReflectionAPI
- */
-trait JavaMirrors { self: JavaUniverse =>
-
-  /** In runtime reflection universes, runtime representation of a class is `java.lang.Class`.
-   *  @group JavaMirrors
-   */
-  type RuntimeClass = java.lang.Class[_]
-
-  /** In runtime reflection universes, mirrors are `JavaMirrors`.
-   *  @group JavaMirrors
-   */
-  override type Mirror >: Null <: JavaMirror
-
-  /** A refinement of [[scala.reflect.api.Mirror]] for runtime reflection using JVM classloaders.
-   *
-   *  With this upgrade, mirrors become capable of converting Scala reflection artifacts (symbols and types)
-   *  into Java reflection artifacts (classes) and vice versa. Consequently, refined mirrors
-   *  become capable of performing reflective invocations (getting/setting field values, calling methods, etc).
-   *
-   *  For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
-   * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
-   *
-   *  @group JavaMirrors
-   */
-  trait JavaMirror extends scala.reflect.api.Mirror[self.type] with RuntimeMirror {
-    val classLoader: ClassLoader
-    override def toString = s"JavaMirror with ${runtime.ReflectionUtils.show(classLoader)}"
-  }
-
-  /** Creates a runtime reflection mirror from a JVM classloader.
-   *
-   *  For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
-   * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
-   *
-   *  @group JavaMirrors
-   */
-  def runtimeMirror(cl: ClassLoader): Mirror
-}
diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala
index 04d091e..88107ea 100644
--- a/src/reflect/scala/reflect/api/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/api/JavaUniverse.scala
@@ -1,13 +1,16 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *  <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- * A refinement of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
+ *  A refinement of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
  *
- *  The refinement consists of an upgrade to the mirror API, which gets extended from [[scala.reflect.api.Mirror]]
- *  to [[scala.reflect.api.JavaMirrors#JavaMirror]].
+ *  This refinement equips mirrors with reflection capabilities for the JVM. `JavaMirror` can
+ *  convert Scala reflection artifacts (symbols and types) into Java reflection artifacts (classes)
+ *  and vice versa. It can also perform reflective invocations (getting/setting field values,
+ *  calling methods, etc).
  *
  *  See the [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] for details on how to use runtime reflection.
  *
@@ -16,31 +19,41 @@ package api
  *
  *  @contentDiagram hideNodes "*Api"
  */
-trait JavaUniverse extends Universe with JavaMirrors { self =>
+trait JavaUniverse extends Universe { self =>
 
-  /*  @group JavaUniverse */
-  override def typeTagToManifest[T: ClassTag](mirror0: Any, tag: Universe # TypeTag[T]): Manifest[T] = {
-    // SI-6239: make this conversion more precise
-    val mirror = mirror0.asInstanceOf[Mirror]
-    val runtimeClass = mirror.runtimeClass(tag.in(mirror).tpe)
-    Manifest.classType(runtimeClass).asInstanceOf[Manifest[T]]
+  /** In runtime reflection universes, runtime representation of a class is `java.lang.Class`.
+   *  @group JavaMirrors
+   */
+  type RuntimeClass = java.lang.Class[_]
+  implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass])
+
+  /** In runtime reflection universes, mirrors are `JavaMirrors`.
+   *  @group JavaMirrors
+   */
+  override type Mirror >: Null <: JavaMirror
+
+  /** A refinement of [[scala.reflect.api.Mirror]] for runtime reflection using JVM classloaders.
+   *
+   *  With this upgrade, mirrors become capable of converting Scala reflection artifacts (symbols and types)
+   *  into Java reflection artifacts (classes) and vice versa. Consequently, refined mirrors
+   *  become capable of performing reflective invocations (getting/setting field values, calling methods, etc).
+   *
+   *  For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
+   * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+   *
+   *  @group JavaMirrors
+   */
+  trait JavaMirror extends scala.reflect.api.Mirror[self.type] with RuntimeMirror {
+    val classLoader: ClassLoader
+    override def toString = s"JavaMirror with ${runtime.ReflectionUtils.show(classLoader)}"
   }
 
-  /*  @group JavaUniverse */
-  override def manifestToTypeTag[T](mirror0: Any, manifest: Manifest[T]): Universe # TypeTag[T] =
-    TypeTag(mirror0.asInstanceOf[Mirror], new TypeCreator {
-      def apply[U <: Universe with Singleton](mirror: scala.reflect.api.Mirror[U]): U # Type = {
-        mirror.universe match {
-          case ju: JavaUniverse =>
-            val jm = mirror.asInstanceOf[ju.Mirror]
-            val sym = jm.classSymbol(manifest.erasure)
-            val tpe =
-              if (manifest.typeArguments.isEmpty) sym.toType
-              else ju.appliedType(sym.toTypeConstructor, manifest.typeArguments map (targ => ju.manifestToTypeTag(jm, targ)) map (_.in(jm).tpe))
-            tpe.asInstanceOf[U # Type]
-          case u =>
-            u.manifestToTypeTag(mirror.asInstanceOf[u.Mirror], manifest).in(mirror).tpe
-        }
-      }
-    })
-}
+  /** Creates a runtime reflection mirror from a JVM classloader.
+   *
+   *  For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
+   * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+   *
+   *  @group JavaMirrors
+   */
+  def runtimeMirror(cl: ClassLoader): Mirror
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala
new file mode 100644
index 0000000..673dbce
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Liftables.scala
@@ -0,0 +1,75 @@
+package scala
+package reflect
+package api
+
+trait Liftables { self: Universe =>
+
+  /** A type class that defines a representation of `T` as a `Tree`.
+   *
+   *  @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]]
+   */
+  trait Liftable[T] {
+    def apply(value: T): Tree
+  }
+
+  /** Companion to `Liftable` type class that contains standard instances
+   *  and provides a helper `apply` method to simplify creation of new ones.
+   */
+  object Liftable extends StandardLiftableInstances {
+    /** A helper method that simplifies creation of `Liftable` instances.
+     *  Takes a type and a function that maps that type to a tree representation.
+     *
+     *  For example to write Liftable for object one might use it like:
+     *
+     *  {{{
+     *  scala> object O
+     *
+     *  scala> val Oref = symbolOf[O.type].asClass.module
+     *
+     *  scala> implicit val liftO = Liftable[O.type] { _ => q"$Oref" }
+     *
+     *  scala> val lifted = q"$O"
+     *  lifted: universe.Tree = O
+     *  }}}
+     *
+     *  @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]]
+     */
+    def apply[T](f: T => Tree): Liftable[T] =
+      new Liftable[T] { def apply(value: T): Tree = f(value) }
+  }
+
+  /** A type class that defines a way to extract instance of `T` from a `Tree`.
+   *
+   *  @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]]
+   */
+  trait Unliftable[T] {
+    def unapply(tree: Tree): Option[T]
+  }
+
+  /** Companion to `Unliftable` type class that contains standard instances
+   *  and provides a helper `apply` method to simplify creation of new ones.
+   */
+  object Unliftable extends StandardUnliftableInstances {
+    /** A helper method that simplifies creation of `Unliftable` instances.
+     *  Takes a partial function which is defined on correct representations of `T`
+     *  and returns corresponing instances.
+     *
+     *  For example to extract a reference to an object as object itself:
+     *
+     *  {{{
+     *  scala> object O
+     *
+     *  scala> val Oref = symbolOf[O.type].asClass.module
+     *
+     *  scala> implicit val unliftO = Unliftable[O.type] { case t if t.symbol == Oref => O }
+     *
+     *  scala> val q"${_: O.type}" = q"$Oref"
+     *  }}}
+     *
+     *  @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]]
+     */
+    def apply[T](pf: PartialFunction[Tree, T]): Unliftable[T] = new Unliftable[T] {
+      def unapply(value: Tree): Option[T] = pf.lift(value)
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 1223326..318fdb3 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -49,16 +50,16 @@ abstract class Mirror[U <: Universe with Singleton] {
    *  If you need a symbol that corresponds to the type alias itself, load it directly from the package class:
    *
    *    scala> cm.staticClass("scala.List")
-   *    res0: reflect.runtime.universe.ClassSymbol = class List
+   *    res0: scala.reflect.runtime.universe.ClassSymbol = class List
    *
    *    scala> res0.fullName
    *    res1: String = scala.collection.immutable.List
    *
    *    scala> cm.staticPackage("scala")
-   *    res2: reflect.runtime.universe.ModuleSymbol = package scala
+   *    res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
    *
-   *    scala> res2.moduleClass.typeSignature member newTypeName("List")
-   *    res3: reflect.runtime.universe.Symbol = type List
+   *    scala> res2.moduleClass.info member newTypeName("List")
+   *    res3: scala.reflect.runtime.universe.Symbol = type List
    *
    *    scala> res3.fullName
    *    res4: String = scala.List
@@ -78,11 +79,10 @@ abstract class Mirror[U <: Universe with Singleton] {
    *    }
    *
    *  staticClass("foo.B") will resolve to the symbol corresponding to the class B declared in the package foo, and
-   *  staticClass("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
-   *  fully qualified class name is written inside any package in a Scala program).
+   *  staticClass("foo.A") will throw a ScalaReflectionException.
    *
    *  In the example above, to load a symbol that corresponds to the class B declared in the object foo,
-   *  use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
+   *  use staticModule("foo") to load the module symbol and then navigate info.members of its moduleClass.
    *  @group Mirror
    */
   def staticClass(fullName: String): U#ClassSymbol
@@ -105,11 +105,10 @@ abstract class Mirror[U <: Universe with Singleton] {
    *    }
    *
    *  staticModule("foo.B") will resolve to the symbol corresponding to the object B declared in the package foo, and
-   *  staticModule("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
-   *  fully qualified class name is written inside any package in a Scala program).
+   *  staticModule("foo.A") will throw a ScalaReflectionException
    *
    *  In the example above, to load a symbol that corresponds to the object B declared in the object foo,
-   *  use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
+   *  use staticModule("foo") to load the module symbol and then navigate info.members of its moduleClass.
    *  @group Mirror
    */
   def staticModule(fullName: String): U#ModuleSymbol
@@ -119,4 +118,22 @@ abstract class Mirror[U <: Universe with Singleton] {
    *  @group Mirror
    */
   def staticPackage(fullName: String): U#ModuleSymbol
+
+  /**
+   * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+   * @group TypeTags
+   */
+  def weakTypeOf[T: universe.WeakTypeTag]: U#Type = universe.weakTypeTag[T].in(this).tpe
+
+  /**
+   * Shortcut for `implicitly[TypeTag[T]].tpe`
+   * @group TypeTags
+   */
+  def typeOf[T: universe.TypeTag]: U#Type = universe.typeTag[T].in(this).tpe
+
+  /**
+   * Type symbol of `x` as derived from a type tag.
+   * @group TypeTags
+   */
+  def symbolOf[T: universe.WeakTypeTag]: U#TypeSymbol
 }
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index 76a7594..ec420d1 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -28,12 +29,12 @@ package api
  * Compile-time `Mirror`s make use of only classloader `Mirror`s to load `Symbol`s
  * by name.
  *
- * The entry point to classloader `Mirror`s is via [[scala.reflect.macros.Context#mirror]].
+ * The entry point to classloader `Mirror`s is via [[scala.reflect.macros.blackbox.Context#mirror]] or [[scala.reflect.macros.whitebox.Context#mirror]].
  * Typical methods which use classloader `Mirror`s include [[scala.reflect.api.Mirror#staticClass]],
  * [[scala.reflect.api.Mirror#staticModule]], and [[scala.reflect.api.Mirror#staticPackage]]. For
  * example:
  * {{{
- *  import scala.reflect.macros.Context
+ *  import scala.reflect.macros.blackbox.Context
  *
  *  case class Location(filename: String, line: Int, column: Int)
  *
@@ -61,7 +62,7 @@ package api
  * The entry point to `Mirror`s for use at runtime is via `ru.runtimeMirror(<classloader>)`, where
  * `ru` is [[scala.reflect.runtime.universe]].
  *
- * The result of a [[scala.reflect.api.JavaMirrors#runtimeMirror]] call is a classloader mirror,
+ * The result of a [[scala.reflect.api.JavaUniverse#runtimeMirror]] call is a classloader mirror,
  * of type [[scala.reflect.api.Mirrors#ReflectiveMirror]], which can load symbols by names as
  * discussed above (in the “Compile-time” section).
  *
@@ -100,7 +101,7 @@ package api
  * via `ModuleMirror.instance`). Entry point: `val mm = im.reflectMethod(<method symbol>)`.
  * Example:
  * {{{
- *   scala> val methodX = typeOf[C].declaration(newTermName("x")).asMethod
+ *   scala> val methodX = typeOf[C].declaration(TermName("x")).asMethod
  *   methodX: reflect.runtime.universe.MethodSymbol = method x
  *
  *   scala> val mm = im.reflectMethod(methodX)
@@ -125,7 +126,7 @@ package api
  *   scala> val im = m.reflect(new C)
  *   im: reflect.runtime.universe.InstanceMirror = instance mirror for C at 5f0c8ac1
  *
- *   scala> val fieldX = typeOf[C].declaration(newTermName("x")).asTerm.accessed.asTerm
+ *   scala> val fieldX = typeOf[C].declaration(TermName("x")).asTerm.accessed.asTerm
  *   fieldX: reflect.runtime.universe.TermSymbol = value x
  *   scala> val fmX = im.reflectField(fieldX)
  *   fmX: reflect.runtime.universe.FieldMirror = field mirror for C.x (bound to C at 5f0c8ac1)
@@ -135,7 +136,7 @@ package api
  *
  *   scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field!
  *
- *   scala> val fieldY = typeOf[C].declaration(newTermName("y")).asTerm.accessed.asTerm
+ *   scala> val fieldY = typeOf[C].declaration(TermName("y")).asTerm.accessed.asTerm
  *   fieldY: reflect.runtime.universe.TermSymbol = variable y
  *
  *   scala> val fmY = im.reflectField(fieldY)
@@ -152,7 +153,7 @@ package api
  *
  * '''[[scala.reflect.api.Mirrors#ClassMirror]]'''. Used for creating invoker mirrors for constructors.
  * Entry points: for ''static classes'' `val cm1 = m.reflectClass(<class symbol>)`,
- * for ''inner classes'' `val mm2 = im.reflectClass(<module symbol>)`.
+ * for ''inner classes'' `val mm2 = im.reflectClass(<class symbol>)`.
  * Example:
  * {{{
  *   scala> case class C(x: Int)
@@ -224,7 +225,12 @@ trait Mirrors { self: Universe =>
   /** Abstracts the runtime representation of a class on the underlying platform.
    *  @group Mirrors
    */
-  type RuntimeClass >: Null
+  type RuntimeClass >: Null <: AnyRef
+
+  /** Has no special methods. Is here to provides erased identity for `RuntimeClass`.
+   *  @group API
+   */
+  trait RuntimeClassApi
 
   // todo. an improvement might be having mirrors reproduce the structure of the reflection domain
   // e.g. a ClassMirror could also have a list of fields, methods, constructors and so on
@@ -254,8 +260,8 @@ trait Mirrors { self: Universe =>
      *  Note also that only accessor MethodMirrors, but not FieldMirrors will accurately reflect overriding behavior.
      *
      *  To get a field symbol by the name of the field you would like to reflect,
-     *  use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the field>)).asTerm.accessed`.
-     *  For further information about member lookup refer to `Symbol.typeSignature`.
+     *  use `<this mirror>.symbol.info.member(TermName(<name of the field>)).asTerm.accessed`.
+     *  For further information about member lookup refer to `Symbol.info`.
      *
      *  The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
      *  It must be a member (declared or inherited) of the class of the instance underlying this mirror.
@@ -274,8 +280,8 @@ trait Mirrors { self: Universe =>
      *  that can be used to invoke the method provided.
      *
      *  To get a method symbol by the name of the method you would like to reflect,
-     *  use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the method>)).asMethod`.
-     *  For further information about member lookup refer to `Symbol.typeSignature`.
+     *  use `<this mirror>.symbol.info.member(TermName(<name of the method>)).asMethod`.
+     *  For further information about member lookup refer to `Symbol.info`.
      *
      *  The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
      *  It must be a member (declared or inherited) of the instance underlying this mirror.
@@ -286,8 +292,8 @@ trait Mirrors { self: Universe =>
      *  that can be used to create instances of the class, inspect its companion object or perform further reflections.
      *
      *  To get a class symbol by the name of the class you would like to reflect,
-     *  use `<this mirror>.symbol.typeSignature.member(newTypeName(<name of the class>)).asClass`.
-     *  For further information about member lookup refer to `Symbol.typeSignature`.
+     *  use `<this mirror>.symbol.info.member(newTypeName(<name of the class>)).asClass`.
+     *  For further information about member lookup refer to `Symbol.info`.
      *
      *  The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
      *  It must be a member (declared or inherited) of the instance underlying this mirror.
@@ -298,8 +304,8 @@ trait Mirrors { self: Universe =>
      *  that can be used to get the instance of the object or inspect its companion class.
      *
      *  To get a module symbol by the name of the object you would like to reflect,
-     *  use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the object>)).asModule`.
-     *  For further information about member lookup refer to `Symbol.typeSignature`.
+     *  use `<this mirror>.symbol.info.member(TermName(<name of the object>)).asModule`.
+     *  For further information about member lookup refer to `Symbol.info`.
      *
      *  The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
      *  It must be a member (declared or inherited) of the instance underlying this mirror.
@@ -350,6 +356,11 @@ trait Mirrors { self: Universe =>
      *  the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
      */
     def set(value: Any): Unit
+
+    /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+     *  This is significantly faster than recreating the mirror from scratch.
+     */
+    def bind(newReceiver: Any): FieldMirror
   }
 
   /** A mirror that reflects a method.
@@ -371,6 +382,11 @@ trait Mirrors { self: Universe =>
      *  with invoking the corresponding method or constructor.
      */
     def apply(args: Any*): Any
+
+    /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+     *  This is significantly faster than recreating the mirror from scratch.
+     */
+    def bind(newReceiver: Any): MethodMirror
   }
 
   /** A mirror that reflects the instance or static parts of a runtime class.
@@ -421,8 +437,8 @@ trait Mirrors { self: Universe =>
      *  that can be used to invoke it and construct instances of this mirror's symbols.
      *
      *  To get a constructor symbol you would like to reflect,
-     *  use `<this mirror>.symbol.typeSignature.member(nme.CONSTRUCTOR).asMethod`.
-     *  For further information about member lookup refer to `Symbol.typeSignature`.
+     *  use `<this mirror>.symbol.info.member(termNames.CONSTRUCTOR).asMethod`.
+     *  For further information about member lookup refer to `Symbol.info`.
      *
      *  The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
      *  It must be a member (declared or inherited) of the class underlying this mirror.
@@ -488,7 +504,7 @@ trait Mirrors { self: Universe =>
     /** A class symbol for the specified runtime class.
      *  @return The class symbol for the runtime class in the current class loader.
      *  @throws java.lang.ClassNotFoundException if no class with that name exists
-     *  @throws scala.reflect.internal.MissingRequirementError if no corresponding symbol exists
+     *  @throws scala.reflect.ScalaReflectionException if no corresponding symbol exists
      *  to do: throws anything else?
      */
     def classSymbol(rtcls: RuntimeClass): ClassSymbol
@@ -496,7 +512,7 @@ trait Mirrors { self: Universe =>
     /** A module symbol for the specified runtime class.
      *  @return The module symbol for the runtime class in the current class loader.
      *  @throws java.lang.ClassNotFoundException if no class with that name exists
-     *  @throws scala.reflect.internal.MissingRequirementError if no corresponding symbol exists
+     *  @throws scala.reflect.ScalaReflectionException if no corresponding symbol exists
      *  to do: throws anything else?
      */
     def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index 7c12f18..fe5f47c 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -1,6 +1,9 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
+import scala.language.implicitConversions
+
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
@@ -30,55 +33,49 @@ trait Names {
    *  Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
    *  @group Names
    */
-  implicit def stringToTermName(s: String): TermName = newTermName(s)
+  @deprecated("Use explicit `TermName(s)` instead", "2.11.0")
+  implicit def stringToTermName(s: String): TermName = TermName(s)
 
   /** An implicit conversion from String to TypeName.
    *  Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
    *  @group Names
    */
-  implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+  @deprecated("Use explicit `TypeName(s)` instead", "2.11.0")
+  implicit def stringToTypeName(s: String): TypeName = TypeName(s)
 
   /** The abstract type of names.
    *  @group Names
    */
-  type Name >: Null <: NameApi
-
-  /** A tag that preserves the identity of the `Name` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val NameTag: ClassTag[Name]
+  type Name >: Null <: AnyRef with NameApi
 
   /** The abstract type of names representing terms.
    *  @group Names
    */
-  type TypeName >: Null <: Name
+  type TypeName >: Null <: TypeNameApi with Name
 
-  /** A tag that preserves the identity of the `TypeName` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** Has no special methods. Is here to provides erased identity for `TypeName`.
+   *  @group API
    */
-implicit val TypeNameTag: ClassTag[TypeName]
+  trait TypeNameApi
 
   /** The abstract type of names representing types.
    *  @group Names
    */
-  type TermName >: Null <: Name
+  type TermName >: Null <: TermNameApi with Name
 
-  /** A tag that preserves the identity of the `TermName` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** Has no special methods. Is here to provides erased identity for `TermName`.
+   *  @group API
    */
-  implicit val TermNameTag: ClassTag[TermName]
+  trait TermNameApi
 
   /** The API of Name instances.
    *  @group API
    */
   abstract class NameApi {
-    /** Checks wether the name is a a term name */
+    /** Checks wether the name is a term name */
     def isTermName: Boolean
 
-    /** Checks wether the name is a a type name */
+    /** Checks wether the name is a type name */
     def isTypeName: Boolean
 
     /** Returns a term name that wraps the same string as `this` */
@@ -90,11 +87,13 @@ implicit val TypeNameTag: ClassTag[TypeName]
     /** Replaces all occurrences of \$op_names in this name by corresponding operator symbols.
      *  Example: `foo_\$plus\$eq` becomes `foo_+=`
      */
+    @deprecated("Use `decodedName.toString` instead", "2.11.0")
     def decoded: String
 
     /** Replaces all occurrences of operator symbols in this name by corresponding \$op_names.
      *  Example: `foo_+=` becomes `foo_\$plus\$eq`.
      */
+    @deprecated("Use `encodedName.toString` instead", "2.11.0")
     def encoded: String
 
     /** The decoded name, still represented as a name.
@@ -109,10 +108,38 @@ implicit val TypeNameTag: ClassTag[TypeName]
   /** Create a new term name.
    *  @group Names
    */
+  @deprecated("Use TermName instead", "2.11.0")
   def newTermName(s: String): TermName
 
   /** Creates a new type name.
    *  @group Names
    */
+  @deprecated("Use TypeName instead", "2.11.0")
   def newTypeName(s: String): TypeName
+
+  /** The constructor/extractor for `TermName` instances.
+   *  @group Extractors
+   */
+  val TermName: TermNameExtractor
+
+  /** An extractor class to create and pattern match with syntax `TermName(s)`.
+   *  @group Extractors
+   */
+  abstract class TermNameExtractor {
+    def apply(s: String): TermName
+    def unapply(name: TermName): Option[String]
+  }
+
+  /** The constructor/extractor for `TypeName` instances.
+   *  @group Extractors
+   */
+  val TypeName: TypeNameExtractor
+
+  /** An extractor class to create and pattern match with syntax `TypeName(s)`.
+   *  @group Extractors
+   */
+  abstract class TypeNameExtractor {
+    def apply(s: String): TypeName
+    def unapply(name: TypeName): Option[String]
+  }
 }
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
index 63c6762..9d1b7c3 100644
--- a/src/reflect/scala/reflect/api/Position.scala
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -1,17 +1,52 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 import scala.reflect.macros.Attachments
 
 /**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *  <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- * Position tracks the origin of [[Symbols#Symbol symbols]] and [[Trees#Tree tree nodes]]. They are commonly used when
- * displaying warnings and errors, to indicate the incorrect point in the program.
+ *  Position tracks the origin of [[Symbols#Symbol symbols]] and [[Trees#Tree tree nodes]]. They are commonly used when
+ *  displaying warnings and errors, to indicate the incorrect point in the program.
  *
- * <b>Please note that this trait may be refactored in future versions of the Scala reflection API.</b>
+ *  Every non-empty position refers to a SourceFile and three character
+ *  offsets within it: start, end, and point. The point is where the ^ belongs when
+ *  issuing an error message, usually a Name. A range position can be designated
+ *  as transparent, which excuses it from maintaining the invariants to follow. If
+ *  a transparent position has opaque children, those are considered as if they were
+ *  the direct children of the transparent position's parent.
  *
- * For more information about `Position`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
+ *  Note: some of these invariants actually apply to the trees which carry
+ *  the positions, but they are phrased as if the positions themselves were
+ *  the parent/children for conciseness.
+ *
+ *  Invariant 1: in a focused/offset position, start == point == end
+ *  Invariant 2: in a range position,          start <= point <  end
+ *  Invariant 3: an offset position never has a child with a range position
+ *  Invariant 4: every range position child of a range position parent is contained within its parent
+ *  Invariant 5: opaque range position siblings overlap at most at a single point
+ *
+ *  The following tests are useful on positions:
+ *
+ *  pos.isDefined     true if position is not an UndefinedPosition (those being NoPosition and FakePos)
+ *  pos.isRange       true if position is a range (opaque or transparent) which implies start < end
+ *  pos.isOpaqueRange true if position is an opaque range
+ *
+ *  The following accessor methods are provided - an exception will be thrown if
+ *  point/start/end are attempted on an UndefinedPosition.
+ *
+ *  pos.source       The source file of the position, or NoSourceFile if unavailable
+ *  pos.point        The offset of the point
+ *  pos.start        The (inclusive) start offset, or the point of an offset position
+ *  pos.end          The (exclusive) end offset, or the point of an offset position
+ *
+ *  The following conversion methods are often used:
+ *
+ *  pos.focus           Converts a range position to an offset position focused on the point
+ *  pos.makeTransparent Convert an opaque range into a transparent range
+ *
+ *  For more information about `Position`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
  *
  *  @groupname Common   Commonly used methods
  *  @group ReflectionAPI
@@ -19,23 +54,9 @@ import scala.reflect.macros.Attachments
 trait Position extends Attachments {
 
   /** @inheritdoc */
-  type Pos >: Null <: Position
+  type Pos >: Null <: AnyRef with Position
 
-  /** Java file corresponding to the source file of this position.
-   *
-   *  The return type is `scala.reflect.io.AbstractFile`, which belongs to an experimental part of Scala reflection.
-   *  It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
-   *  and exposed as a part of scala.reflect.api.
-   *
-   *  @group Common
-   */
-  def source: scala.reflect.internal.util.SourceFile
-
-  /** Is this position neither a NoPosition nor a FakePosition?
-   *  If isDefined is true, offset and source are both defined.
-   *  @group Common
-   */
-  def isDefined: Boolean
+  ////////////////// POSITION FLAVORS //////////////////
 
   /** Is this position a range position? */
   def isRange: Boolean
@@ -46,14 +67,18 @@ trait Position extends Attachments {
   /** Is this position a non-transparent range position? */
   def isOpaqueRange: Boolean
 
+  /** If this is a range position, the offset position of its point.
+   *  Otherwise the position itself
+   */
+  def focus: Pos
+
   /** If opaque range, make this position transparent. */
   def makeTransparent: Pos
 
-  /** The start of the position's range, error if not a range position. */
-  def start: Int
+  ////////////////// POSITION ESSENTIALS //////////////////
 
-  /** The start of the position's range, or point if not a range position. */
-  def startOrPoint: Int
+  /** The start of the position's range, or the point if not a range position. */
+  def start: Int
 
   /** The point (where the ^ is) of the position, which is easiest to access using the [[line]] and [[column]] values.
    *  The [[lineContent line content]] is also available.
@@ -61,104 +86,122 @@ trait Position extends Attachments {
    */
   def point: Int
 
-  /** The point (where the ^ is) of the position, or else `default` if undefined.
+  /** The end of the position's range, or the point if not a range position.
+   */
+  def end: Int
+
+  /** Java file corresponding to the source file of this position.
+   *
+   *  The return type is `scala.reflect.io.AbstractFile`, which belongs to an experimental part of Scala reflection.
+   *  It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+   *  and exposed as a part of scala.reflect.api.
+   *
    *  @group Common
    */
-  def pointOrElse(default: Int): Int
+  def source: scala.reflect.internal.util.SourceFile
 
-  /** The end of the position's range, error if not a range position.
+  /** The position indicates a [[column `column`]] and the `line` in the source file.
+   *  @group Common
    */
-  def end: Int
+  def line: Int
 
-  /** The end of the position's range, or point if not a range position.
+  /** The position indicates a `column` and the [[line `line`]] in the source file.
+   *  @group Common
    */
-  def endOrPoint: Int
+  def column: Int
+
+  ////////////////// POSITION FACTORIES //////////////////
 
-  /** The same position with a different start value (if a range).
+  /** Returns a new position with the same attributes, but a different start value (if a range).
    */
   def withStart(off: Int): Pos
 
-  /** The same position with a different end value (if a range).
+  /** Returns a new position with the same attributes, but a different end value (if a range).
    */
   def withEnd(off: Int): Pos
 
-  /** The same position with a different point value (if a range or offset).
+  /** Returns a new position with the same attributes, but a different point value (if a range or offset).
    */
   def withPoint(off: Int): Pos
 
-  /** If this is a range, the union with the other range, with the point of this position.
-   *  Otherwise, this position
+  ////////////////// STUFF //////////////////
+
+  /** Is this position not a NoPosition?
+   *  If isDefined is true, offset and source are both defined.
+   *  @group Common
    */
-  def union(pos: Pos): Pos
+  @deprecated("Removed from the public API", "2.11.0") def isDefined: Boolean
 
-  /** If this is a range position, the offset position of its point.
-   *  Otherwise the position itself
+  /** The point (where the ^ is) of the position, or else `default` if undefined.
+   *  @group Common
    */
-  def focus: Pos
+  @deprecated("Removed from the public API", "2.11.0") def pointOrElse(default: Int): Int
+
+  /** The start of the position's range, or point if not a range position. */
+  @deprecated("Removed from the public API", "2.11.0") def startOrPoint: Int
+
+  /** The end of the position's range, or point if not a range position.
+   */
+  @deprecated("Removed from the public API", "2.11.0") def endOrPoint: Int
+
+  /** If this is a range, the union with the other range, with the point of this position.
+   *  Otherwise, this position
+   */
+  @deprecated("Removed from the public API", "2.11.0") def union(pos: Pos): Pos
 
   /** If this is a range position, the offset position of its start.
    *  Otherwise the position itself
    */
-  def focusStart: Pos
+  @deprecated("Removed from the public API", "2.11.0") def focusStart: Pos
 
   /** If this is a range position, the offset position of its end.
    *  Otherwise the position itself
    */
-  def focusEnd: Pos
+  @deprecated("Removed from the public API", "2.11.0") def focusEnd: Pos
 
   /** Does this position include the given position `pos`?
    *  This holds if `this` is a range position and its range [start..end]
    *  is the same or covers the range of the given position, which may or may not be a range position.
    */
-  def includes(pos: Pos): Boolean
+  @deprecated("Removed from the public API", "2.11.0") def includes(pos: Pos): Boolean
 
   /** Does this position properly include the given position `pos` ("properly" meaning their
    *  ranges are not the same)?
    */
-  def properlyIncludes(pos: Pos): Boolean
+  @deprecated("Removed from the public API", "2.11.0") def properlyIncludes(pos: Pos): Boolean
 
   /** Does this position precede that position?
    *  This holds if both positions are defined and the end point of this position
    *  is not larger than the start point of the given position.
    */
-  def precedes(pos: Pos): Boolean
+  @deprecated("Removed from the public API", "2.11.0") def precedes(pos: Pos): Boolean
 
   /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
    *  do not share a common point).
    */
-  def properlyPrecedes(pos: Pos): Boolean
+  @deprecated("Removed from the public API", "2.11.0") def properlyPrecedes(pos: Pos): Boolean
 
   /** Does this position overlap with that position?
    *  This holds if both positions are ranges and there is an interval of
    *  non-zero length that is shared by both position ranges.
    */
-  def overlaps(pos: Pos): Boolean
+  @deprecated("Removed from the public API", "2.11.0") def overlaps(pos: Pos): Boolean
 
   /** Does this position cover the same range as that position?
    *  Holds only if both position are ranges
    */
-  def sameRange(pos: Pos): Boolean
-
-  /** The position indicates a [[column `column`]] and the `line` in the source file.
-   *  @group Common
-   */
-  def line: Int
-
-  /** The position indicates a `column` and the [[line `line`]] in the source file.
-   *  @group Common
-   */
-  def column: Int
+  @deprecated("Removed from the public API", "2.11.0") def sameRange(pos: Pos): Boolean
 
   /** Convert this to a position around `point` that spans a single source line
    */
-  def toSingleLine: Pos
+  @deprecated("Removed from the public API", "2.11.0") def toSingleLine: Pos
 
   /** The content of the line this Position refers to.
    *  @group Common
    */
-  def lineContent: String
+  @deprecated("Removed from the public API", "2.11.0") def lineContent: String
 
   /** Show a textual representation of the position.
    */
-  def show: String
+  @deprecated("Use `universe.show(position)` instead", "2.11.0") def show: String
 }
diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala
index 87f00fd..63ad605 100644
--- a/src/reflect/scala/reflect/api/Positions.scala
+++ b/src/reflect/scala/reflect/api/Positions.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -18,13 +19,7 @@ trait Positions {
    *  The main documentation entry about positions is located at [[scala.reflect.api.Position]].
    *  @group Positions
    */
-  type Position >: Null <: scala.reflect.api.Position { type Pos = Position }
-
-  /** A tag that preserves the identity of the `Position` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val PositionTag: ClassTag[Position]
+  type Position >: Null <: AnyRef with scala.reflect.api.Position { type Pos = Position }
 
   /** A special "missing" position.
    *  @group Positions
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 85ddcc6..92ae6d8 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 import java.io.{ PrintWriter, StringWriter }
@@ -47,7 +48,7 @@ import java.io.{ PrintWriter, StringWriter }
  *  res1: String = Block(List(
  *    ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template(
  *      List(Ident(newTypeName("AnyRef"))),
- *      emptyValDef,
+ *      noSelfType,
  *      List(
  *        DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
  *          Block(List(
@@ -67,11 +68,11 @@ import java.io.{ PrintWriter, StringWriter }
  *  scala> import scala.reflect.runtime.{currentMirror => cm}
  *  import scala.reflect.runtime.{currentMirror=>cm}
  *
- *  scala> showRaw(cm.mkToolBox().typeCheck(tree), printTypes = true)
+ *  scala> showRaw(cm.mkToolBox().typecheck(tree), printTypes = true)
  *  res2: String = Block[1](List(
  *    ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3](
  *      List(Ident[4](newTypeName("AnyRef"))),
- *      emptyValDef,
+ *      noSelfType,
  *      List(
  *        DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](),
  *          Block[1](List(
@@ -141,66 +142,97 @@ trait Printers { self: Universe =>
     def print(args: Any*)
     protected var printTypes = false
     protected var printIds = false
+    protected var printOwners = false
     protected var printKinds = false
     protected var printMirrors = false
+    protected var printPositions = false
     def withTypes: this.type = { printTypes = true; this }
     def withoutTypes: this.type = { printTypes = false; this }
     def withIds: this.type = { printIds = true; this }
     def withoutIds: this.type = { printIds = false; this }
+    def withOwners: this.type = { printOwners = true; this }
+    def withoutOwners: this.type = { printOwners = false; this }
     def withKinds: this.type = { printKinds = true; this }
     def withoutKinds: this.type = { printKinds = false; this }
     def withMirrors: this.type = { printMirrors = true; this }
     def withoutMirrors: this.type = { printMirrors = false; this }
+    def withPositions: this.type = { printPositions = true; this }
+    def withoutPositions: this.type = { printPositions = false; this }
   }
 
   /** @group Printers */
-  case class BooleanFlag(val value: Option[Boolean])
+  case class BooleanFlag(value: Option[Boolean])
   /** @group Printers */
   object BooleanFlag {
     import scala.language.implicitConversions
     implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
     implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+    import scala.reflect.internal.settings.MutableSettings
+    implicit def settingToBooleanFlag(setting: MutableSettings#BooleanSetting): BooleanFlag = BooleanFlag(Some(setting.value))
   }
 
   /** @group Printers */
-  protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
+  protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printOwners: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = {
     val buffer = new StringWriter()
     val writer = new PrintWriter(buffer)
-    var printer = mkPrinter(writer)
+    val printer = mkPrinter(writer)
     printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
     printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
+    printOwners.value.map(printOwners => if (printOwners) printer.withOwners else printer.withoutOwners)
     printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
     printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
+    printPositions.value.map(printPositions => if (printPositions) printer.withPositions else printer.withoutPositions)
     printer.print(what)
     writer.flush()
     buffer.toString
   }
 
   /** By default trees are printed with `show`
-   * @group Printers
+   *  @group Printers
    */
   override protected def treeToString(tree: Tree) = show(tree)
 
   /** Renders a representation of a reflection artifact
-   * as desugared Java code.
+   *  as desugared Scala code.
    *
-   * @group Printers
+   *  @group Printers
    */
-  def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
-    render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+  def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printOwners: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+    render(any, newTreePrinter(_), printTypes, printIds, printOwners, printKinds, printMirrors, printPositions)
 
   /** Hook to define what `show(...)` means.
    * @group Printers
    */
   protected def newTreePrinter(out: PrintWriter): TreePrinter
 
-  /** Renders internal structure of a reflection artifact as the
-   * visualization of a Scala syntax tree.
+  /**
+   * Renders the code of the passed tree, so that:
+   *  1) it can be later compiled by scalac retaining the same meaning,
+   *  2) it looks pretty.
+   *  #1 is available for unattributed trees and attributed trees
+   *  #2 is more or less okay indentation-wise, but at the moment there's a lot of desugaring
+   *  left in place, and that's what we plan to improve in the future.
+   *  printTypes, printIds, printPositions options have the same meaning as for TreePrinter
+   *  printRootPkg option is available only for attributed trees.
    *
+   *  @group Printers
+   */
+  def showCode(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printOwners: BooleanFlag = None, printPositions: BooleanFlag = None, printRootPkg: Boolean = false) =
+    render(tree, newCodePrinter(_, tree, printRootPkg), printTypes, printIds, printOwners, printKinds = None, printMirrors = None, printPositions)
+
+  /**
+   * Hook to define what `showCode(...)` means.
    * @group Printers
    */
-  def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
-    render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+  protected def newCodePrinter(out: PrintWriter, tree: Tree, printRootPkg: Boolean): TreePrinter
+
+  /** Renders internal structure of a reflection artifact as the
+   *  visualization of a Scala syntax tree.
+   *
+   *  @group Printers
+   */
+  def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printOwners: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+    render(any, newRawTreePrinter(_), printTypes, printIds, printOwners, printKinds, printMirrors, printPositions)
 
   /** Hook to define what `showRaw(...)` means.
    * @group Printers
@@ -222,8 +254,23 @@ trait Printers { self: Universe =>
    */
   def show(flags: FlagSet): String
 
+  /** Renders a prettified representation of a position.
+   * @group Printers
+   */
+  def show(position: Position): String
+
   /** Renders internal structure of a flag set.
    * @group Printers
    */
   def showRaw(flags: FlagSet): String = flags.toString
+
+  /** Renders internal structure of a position.
+   * @group Printers
+   */
+  def showRaw(position: Position): String = position.toString
+
+  /** Renders a string that represents a declaration of this symbol written in Scala.
+   * @group Printers
+   */
+  def showDecl(sym: Symbol): String
 }
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
new file mode 100644
index 0000000..e905aa4
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -0,0 +1,25 @@
+package scala.reflect
+package api
+
+trait Quasiquotes { self: Universe =>
+
+  /** Implicit class that introduces `q`, `tq`, `cq,` `p` and `fq` string interpolators
+   *  that are also known as quasiquotes. With their help you can easily manipulate
+   *  Scala reflection ASTs.
+   *
+   *  @see [[http://docs.scala-lang.org/overviews/quasiquotes/intro.html]]
+   */
+  implicit class Quasiquote(ctx: StringContext) {
+    protected trait api {
+      // implementation is hardwired to `dispatch` method of `scala.tools.reflect.quasiquotes.Quasiquotes`
+      // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+      def apply[T](args: T*): Tree = macro ???
+      def unapply(scrutinee: Any): Any = macro ???
+    }
+    object q extends api
+    object tq extends api
+    object cq extends api
+    object pq extends api
+    object fq extends api
+  }
+}
diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala
index 7f97993..c9142fb 100644
--- a/src/reflect/scala/reflect/api/Scopes.scala
+++ b/src/reflect/scala/reflect/api/Scopes.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -15,7 +16,7 @@ package api
  * there is the `newScopeWith` function.
  *
  * Additional functionality is exposed in member scopes that are returned by
- * `members` and `declarations` defined in [[scala.reflect.api.Types#TypeApi]].
+ * `members` and `decls` defined in [[scala.reflect.api.Types#TypeApi]].
  * Such scopes support the `sorted` method, which sorts members in declaration order.
  *
  * @group ReflectionAPI
@@ -26,29 +27,18 @@ trait Scopes { self: Universe =>
    *  @template
    *  @group Scopes
    */
-  type Scope >: Null <: ScopeApi
+  type Scope >: Null <: AnyRef with ScopeApi
 
   /** The API that all scopes support
    *  @group API
    */
   trait ScopeApi extends Iterable[Symbol]
 
-  /** A tag that preserves the identity of the `Scope` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ScopeTag: ClassTag[Scope]
-
-  /** Create a new scope with the given initial elements.
-   *  @group Scopes
-   */
-  def newScopeWith(elems: Symbol*): Scope
-
   /** The type of member scopes, as in class definitions, for example.
    *  @template
    *  @group Scopes
    */
-  type MemberScope >: Null <: Scope with MemberScopeApi
+  type MemberScope >: Null <: AnyRef with MemberScopeApi with Scope
 
   /** The API that all member scopes support
    *  @group API
@@ -61,10 +51,4 @@ trait Scopes { self: Universe =>
      */
     def sorted: List[Symbol]
   }
-
-  /** A tag that preserves the identity of the `MemberScope` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val MemberScopeTag: ClassTag[MemberScope]
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 721b0bc..524b7ea 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -2,7 +2,8 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -130,10 +131,10 @@ trait StandardDefinitions {
      *  scala> val m = typeOf[C].member(newTermName("m")).asMethod
      *  m: reflect.runtime.universe.MethodSymbol = method m
      *
-     *  scala> m.params(0)(0).typeSignature
+     *  scala> m.params(0)(0).info
      *  res1: reflect.runtime.universe.Type = => scala.Int
      *
-     *  scala> showRaw(m.params(0)(0).typeSignature)
+     *  scala> showRaw(m.params(0)(0).info)
      *  res2: String = TypeRef(
      *      ThisType(scala),
      *      scala.<byname>, // <-- ByNameParamClass
@@ -158,10 +159,10 @@ trait StandardDefinitions {
      *  scala> val m = typeOf[C].member(newTermName("m")).asMethod
      *  m: reflect.runtime.universe.MethodSymbol = method m
      *
-     *  scala> m.params(0)(0).typeSignature
+     *  scala> m.params(0)(0).info
      *  res1: reflect.runtime.universe.Type = <repeated...>[Object]
      *
-     *  scala> showRaw(m.params(0)(0).typeSignature)
+     *  scala> showRaw(m.params(0)(0).info)
      *  res2: String = TypeRef(
      *      ThisType(scala),
      *      scala.<repeated...>, // <-- JavaRepeatedParamClass
@@ -183,10 +184,10 @@ trait StandardDefinitions {
      *  scala> val m = typeOf[C].member(newTermName("m")).asMethod
      *  m: reflect.runtime.universe.MethodSymbol = method m
      *
-     *  scala> m.params(0)(0).typeSignature
+     *  scala> m.params(0)(0).info
      *  res1: reflect.runtime.universe.Type = scala.Int*
      *
-     *  scala> showRaw(m.params(0)(0).typeSignature)
+     *  scala> showRaw(m.params(0)(0).info)
      *  res2: String = TypeRef(
      *      ThisType(scala),
      *      scala.<repeated>, // <-- RepeatedParamClass
@@ -213,29 +214,43 @@ trait StandardDefinitions {
     /** The module symbol of module `scala.Some`. */
     def SomeModule: ModuleSymbol
 
-    /** The array of class symbols for classes `scala.ProductX`.
+    /** Function-like api that lets you acess symbol
+     *  of the definition with given arity and also look
+     *  through all known symbols via `seq`.
+     */
+    abstract class VarArityClassApi extends (Int => Symbol) {
+      def seq: Seq[ClassSymbol]
+    }
+
+    /** Function-like object that maps arity to symbols for classes `scala.ProductX`.
      *   -  0th element is `Unit`
      *   -  1st element is `Product1`
      *   -  ...
      *   - 22nd element is `Product22`
+     *   - 23nd element is `NoSymbol`
+     *   - ...
      */
-    def ProductClass  : Array[ClassSymbol]
+    def ProductClass: VarArityClassApi
 
-    /** The array of class symbols for classes `scala.FunctionX`.
+    /** Function-like object that maps arity to symbols for classes `scala.FunctionX`.
      *   -  0th element is `Function0`
      *   -  1st element is `Function1`
      *   -  ...
      *   - 22nd element is `Function22`
+     *   - 23nd element is `NoSymbol`
+     *   - ...
      */
-    def FunctionClass : Array[ClassSymbol]
+    def FunctionClass: VarArityClassApi
 
-    /** The array of class symbols for classes `scala.TupleX`.
+    /** Function-like object that maps arity to symbols for classes `scala.TupleX`.
      *   -  0th element is `NoSymbol`
-     *   -  1st element is `Product1`
+     *   -  1st element is `Tuple1`
      *   -  ...
-     *   - 22nd element is `Product22`
+     *   - 22nd element is `Tuple22`
+     *   - 23nd element is `NoSymbol`
+     *   - ...
      */
-    def TupleClass: Array[Symbol] // cannot make it Array[ClassSymbol], because TupleClass(0) is supposed to be NoSymbol. weird
+    def TupleClass: VarArityClassApi
 
     /** Contains Scala primitive value classes:
      *   - Byte
diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala
new file mode 100644
index 0000000..66ac62c
--- /dev/null
+++ b/src/reflect/scala/reflect/api/StandardLiftables.scala
@@ -0,0 +1,235 @@
+package scala.reflect
+package api
+
+trait StandardLiftables { self: Universe =>
+  import internal._
+  import reificationSupport.{SyntacticTuple, ScalaDot}
+
+  trait StandardLiftableInstances {
+    private def lift[T: Liftable](value: T): Tree            = implicitly[Liftable[T]].apply(value)
+    private def selectScala(names: Name*)                    = names.tail.foldLeft(ScalaDot(names.head)) { Select(_, _) }
+    private def callScala(names: Name*)(args: List[Tree])    = Apply(selectScala(names: _*), args)
+    private def callCollection(name: Name)(args: List[Tree]) = callScala(stdnme.collection, stdnme.immutable, name)(args)
+    private def liftAsLiteral[T]: Liftable[T]                = Liftable { v => Literal(Constant(v)) }
+
+    implicit def liftByte[T <: Byte]: Liftable[T]       = liftAsLiteral[T]
+    implicit def liftShort[T <: Short]: Liftable[T]     = liftAsLiteral[T]
+    implicit def liftChar[T <: Char]: Liftable[T]       = liftAsLiteral[T]
+    implicit def liftInt[T <: Int]: Liftable[T]         = liftAsLiteral[T]
+    implicit def liftLong[T <: Long]: Liftable[T]       = liftAsLiteral[T]
+    implicit def liftFloat[T <: Float]: Liftable[T]     = liftAsLiteral[T]
+    implicit def liftDouble[T <: Double]: Liftable[T]   = liftAsLiteral[T]
+    implicit def liftBoolean[T <: Boolean]: Liftable[T] = liftAsLiteral[T]
+    implicit def liftUnit: Liftable[Unit]               = liftAsLiteral[Unit]
+    implicit def liftString[T <: String]: Liftable[T]   = liftAsLiteral[T]
+
+    implicit def liftScalaSymbol: Liftable[scala.Symbol] = Liftable { v =>
+      callScala(stdnme.Symbol)(Literal(Constant(v.name)) :: Nil)
+    }
+
+    implicit def liftTree[T <: Tree]: Liftable[T]              = Liftable { identity }
+    implicit def liftName[T <: Name]: Liftable[T]              = Liftable { name => Ident(name) }
+    implicit def liftExpr[T <: Expr[_]]: Liftable[T]           = Liftable { expr => expr.tree }
+    implicit def liftType[T <: Type]: Liftable[T]              = Liftable { tpe => TypeTree(tpe) }
+    implicit def liftTypeTag[T <: WeakTypeTag[_]]: Liftable[T] = Liftable { ttag => TypeTree(ttag.tpe) }
+    implicit def liftConstant[T <: Constant]: Liftable[T]      = Liftable { const => Literal(const) }
+
+    implicit def liftArray[T: Liftable]: Liftable[Array[T]]             = Liftable { arr => callScala(stdnme.Array)(arr.map(lift(_)).toList) }
+    implicit def liftVector[T: Liftable]: Liftable[Vector[T]]           = Liftable { vect => callCollection(stdnme.Vector)(vect.map(lift(_)).toList) }
+    implicit def liftList[T: Liftable]: Liftable[List[T]]               = Liftable { lst => callCollection(stdnme.List)(lst.map(lift(_))) }
+    implicit def liftNil: Liftable[Nil.type]                            = Liftable { _ => selectScala(stdnme.collection, stdnme.immutable, stdnme.Nil) }
+    implicit def liftMap[K: Liftable, V: Liftable]: Liftable[Map[K, V]] = Liftable { m => callCollection(stdnme.Map)(m.toList.map(lift(_))) }
+    implicit def liftSet[T: Liftable]: Liftable[Set[T]]                 = Liftable { s => callCollection(stdnme.Set)(s.toList.map(lift(_))) }
+
+    implicit def liftSome[T: Liftable]: Liftable[Some[T]]     = Liftable { case Some(v) => callScala(stdnme.Some)(lift(v) :: Nil) }
+    implicit def liftNone: Liftable[None.type]                = Liftable { _ => selectScala(stdnme.None) }
+    implicit def liftOption[T: Liftable]: Liftable[Option[T]] = Liftable {
+      case some: Some[T]   => lift(some)
+      case none: None.type => lift(none)
+    }
+
+    implicit def liftLeft[L: Liftable, R]: Liftable[Left[L, R]]               = Liftable { case Left(v)  => callScala(stdnme.util, stdnme.Left)(lift(v) :: Nil) }
+    implicit def liftRight[L, R: Liftable]: Liftable[Right[L, R]]             = Liftable { case Right(v) => callScala(stdnme.util, stdnme.Right)(lift(v) :: Nil) }
+    implicit def liftEither[L: Liftable, R: Liftable]: Liftable[Either[L, R]] = Liftable {
+      case left: Left[L, R]   => lift(left)
+      case right: Right[L, R] => lift(right)
+    }
+
+    implicit def liftTuple2[T1, T2](implicit liftT1: Liftable[T1], liftT2: Liftable[T2]): Liftable[Tuple2[T1, T2]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: Nil)
+    }
+    implicit def liftTuple3[T1, T2, T3](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3]): Liftable[Tuple3[T1, T2, T3]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: Nil)
+    }
+    implicit def liftTuple4[T1, T2, T3, T4](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4]): Liftable[Tuple4[T1, T2, T3, T4]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: Nil)
+    }
+    implicit def liftTuple5[T1, T2, T3, T4, T5](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5]): Liftable[Tuple5[T1, T2, T3, T4, T5]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: Nil)
+    }
+    implicit def liftTuple6[T1, T2, T3, T4, T5, T6](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6]): Liftable[Tuple6[T1, T2, T3, T4, T5, T6]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: Nil)
+    }
+    implicit def liftTuple7[T1, T2, T3, T4, T5, T6, T7](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7]): Liftable[Tuple7[T1, T2, T3, T4, T5, T6, T7]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: Nil)
+    }
+    implicit def liftTuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8]): Liftable[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: Nil)
+    }
+    implicit def liftTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9]): Liftable[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: Nil)
+    }
+    implicit def liftTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10]): Liftable[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: Nil)
+    }
+    implicit def liftTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11]): Liftable[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: Nil)
+    }
+    implicit def liftTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12]): Liftable[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: Nil)
+    }
+    implicit def liftTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13]): Liftable[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] = Liftable { t =>
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: Nil)
+    }
+    implicit def liftTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14]): Liftable[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: Nil)
+    }
+    implicit def liftTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15]): Liftable[Tuple15[T1, T2, T3, T4, T5, T6, T7,  [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: Nil)
+    }
+    implicit def liftTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftable[T16]): Liftable[Tuple16 [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: Nil)
+    }
+    implicit def liftTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftable[T16], liftT17: Lif [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: Nil)
+    }
+    implicit def liftTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftable[T16], liftT17 [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: liftT18(t._18) :: Nil)
+    }
+    implicit def liftTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftable[T16], li [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: liftT18(t._18) :: liftT19(t._19) :: Nil)
+    }
+    implicit def liftTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftable[T16 [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: liftT18(t._18) :: liftT19(t._19) :: liftT20(t._20) :: Nil)
+    }
+    implicit def liftTuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Liftabl [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: liftT18(t._18) :: liftT19(t._19) :: liftT20(t._20) :: liftT21(t._21) :: Nil)
+    }
+    implicit def liftTuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](implicit liftT1: Liftable[T1], liftT2: Liftable[T2], liftT3: Liftable[T3], liftT4: Liftable[T4], liftT5: Liftable[T5], liftT6: Liftable[T6], liftT7: Liftable[T7], liftT8: Liftable[T8], liftT9: Liftable[T9], liftT10: Liftable[T10], liftT11: Liftable[T11], liftT12: Liftable[T12], liftT13: Liftable[T13], liftT14: Liftable[T14], liftT15: Liftable[T15], liftT16: Li [...]
+      SyntacticTuple(liftT1(t._1) :: liftT2(t._2) :: liftT3(t._3) :: liftT4(t._4) :: liftT5(t._5) :: liftT6(t._6) :: liftT7(t._7) :: liftT8(t._8) :: liftT9(t._9) :: liftT10(t._10) :: liftT11(t._11) :: liftT12(t._12) :: liftT13(t._13) :: liftT14(t._14) :: liftT15(t._15) :: liftT16(t._16) :: liftT17(t._17) :: liftT18(t._18) :: liftT19(t._19) :: liftT20(t._20) :: liftT21(t._21) :: liftT22(t._22) :: Nil)
+    }
+  }
+
+  trait StandardUnliftableInstances {
+    private def unliftPrimitive[Unboxed: ClassTag, Boxed: ClassTag] = Unliftable[Unboxed] {
+      case Literal(Constant(value))
+           if value.getClass == implicitly[ClassTag[Boxed]].runtimeClass
+           || value.getClass == implicitly[ClassTag[Unboxed]].runtimeClass =>
+        value.asInstanceOf[Unboxed]
+    }
+    implicit def unliftByte: Unliftable[Byte]       = unliftPrimitive[Byte, java.lang.Byte]
+    implicit def unliftShort: Unliftable[Short]     = unliftPrimitive[Short, java.lang.Short]
+    implicit def unliftChar: Unliftable[Char]       = unliftPrimitive[Char, java.lang.Character]
+    implicit def unliftInt: Unliftable[Int]         = unliftPrimitive[Int, java.lang.Integer]
+    implicit def unliftLong: Unliftable[Long]       = unliftPrimitive[Long, java.lang.Long]
+    implicit def unliftFloat: Unliftable[Float]     = unliftPrimitive[Float, java.lang.Float]
+    implicit def unliftDouble: Unliftable[Double]   = unliftPrimitive[Double, java.lang.Double]
+    implicit def unliftBoolean: Unliftable[Boolean] = unliftPrimitive[Boolean, java.lang.Boolean]
+    implicit def unliftUnit: Unliftable[Unit]       = unliftPrimitive[Unit, scala.runtime.BoxedUnit]
+    implicit def unliftString: Unliftable[String]   = Unliftable { case Literal(Constant(s: String)) => s }
+
+    implicit def unliftScalaSymbol: Unliftable[scala.Symbol] = Unliftable {
+      case Apply(ScalaDot(stdnme.Symbol), List(Literal(Constant(name: String)))) => scala.Symbol(name)
+    }
+
+    implicit def unliftName[T <: Name : ClassTag]: Unliftable[T] = Unliftable[T] { case Ident(name: T) => name; case Bind(name: T, Ident(stdnme.WILDCARD)) => name }
+    implicit def unliftType: Unliftable[Type]                    = Unliftable[Type] { case tt: TypeTree if tt.tpe != null => tt.tpe }
+    implicit def unliftConstant: Unliftable[Constant]            = Unliftable[Constant] { case Literal(const) => const }
+
+    implicit def unliftTuple2[T1, T2](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2]): Unliftable[Tuple2[T1, T2]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: Nil) => Tuple2(v1, v2)
+    }
+    implicit def unliftTuple3[T1, T2, T3](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3]): Unliftable[Tuple3[T1, T2, T3]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: Nil) => Tuple3(v1, v2, v3)
+    }
+    implicit def unliftTuple4[T1, T2, T3, T4](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4]): Unliftable[Tuple4[T1, T2, T3, T4]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: Nil) => Tuple4(v1, v2, v3, v4)
+    }
+    implicit def unliftTuple5[T1, T2, T3, T4, T5](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5]): Unliftable[Tuple5[T1, T2, T3, T4, T5]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: Nil) => Tuple5(v1, v2, v3, v4, v5)
+    }
+    implicit def unliftTuple6[T1, T2, T3, T4, T5, T6](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6]): Unliftable[Tuple6[T1, T2, T3, T4, T5, T6]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: Nil) => Tuple6(v1, v2, v3, v4, v5, v6)
+    }
+    implicit def unliftTuple7[T1, T2, T3, T4, T5, T6, T7](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7]): Unliftable[Tuple7[T1, T2, T3, T4, T5, T6, T7]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: Nil) => Tuple7(v1, v2, v3, v4, v5, v6, v7)
+    }
+    implicit def unliftTuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8]): Unliftable[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: Nil) => Tuple8(v1, v2, v3, v4, v5, v6, v7, v8)
+    }
+    implicit def unliftTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9]): Unliftable[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: Nil) => Tuple9(v1, v2, v3, v4, v5, v6, v7, v8, v9)
+    }
+    implicit def unliftTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10]): Unliftable[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: Nil) => Tuple10(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10)
+    }
+    implicit def unliftTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11]): Unliftable[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: Nil) => Tuple11(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11)
+    }
+    implicit def unliftTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12]): Unliftable[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] = Unliftable {
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: Nil) => Tuple12(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12)
+    }
+    implicit def unliftTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13]): Unliftable[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8 [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: Nil) => Tuple13(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13)
+    }
+    implicit def unliftTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14]): Unliftable[Tuple [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: Nil) => Tuple14(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14)
+    }
+    implicit def unliftTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: U [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: Nil) => Tuple15(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15)
+    }
+    implicit def unliftTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: Nil) => Tuple16(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16)
+    }
+    implicit def unliftTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], Un [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: Nil) => Tuple17(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17)
+    }
+    implicit def unliftTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14 [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: Nil) => Tuple18(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18)
+    }
+    implicit def unliftTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftabl [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: Nil) => Tuple19(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19)
+    }
+    implicit def unliftTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unli [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: Nil) => Tuple20(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20)
+    }
+    implicit def unliftTuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: UnliftT21(v21) :: Nil) => Tuple21(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21)
+    }
+    implicit def unliftTuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], Unlif [...]
+      case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: UnliftT21(v21) :: UnliftT22(v22) :: Nil) => Tuple22(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v [...]
+    }
+  }
+
+  // names used internally by implementations of standard liftables and unliftables
+  // can't be `private object nme` because of https://groups.google.com/forum/#!topic/scala-internals/b-Full9WZeE
+  // can't be `private[this] object nme` because then STARR has problems prioritizing this.nme over self.nme
+  // therefore I'm essentially forced to give this object a non-standard name
+  private object stdnme {
+    val Array      = TermName("Array")
+    val collection = TermName("collection")
+    val immutable  = TermName("immutable")
+    val Left       = TermName("Left")
+    val List       = TermName("List")
+    val Map        = TermName("Map")
+    val None       = TermName("None")
+    val Nil        = TermName("Nil")
+    val Right      = TermName("Right")
+    val Set        = TermName("Set")
+    val Some       = TermName("Some")
+    val Symbol     = TermName("Symbol")
+    val util       = TermName("util")
+    val Vector     = TermName("Vector")
+    val WILDCARD   = self.nme.WILDCARD
+  }
+}
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 4886e4f..19bdfca 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -2,7 +2,8 @@
 * Copyright 2005-2013 LAMP/EPFL
 * @author  Martin Odersky
 */
-package scala.reflect
+package scala
+package reflect
 package api
 
 // Q: I have a pretty name. Can I put it here?
@@ -27,15 +28,23 @@ package api
 trait StandardNames {
   self: Universe =>
 
+  /** @see [[termNames]] */
+  @deprecated("Use `termNames` instead", "2.11.0")
+  val nme: TermNamesApi
+
   /** A value containing all [[TermNamesApi standard term names]].
    *  @group StandardNames
    */
-  val nme: TermNamesApi
+  val termNames: TermNamesApi
+
+  /** @see [[typeNames]] */
+  @deprecated("Use `typeNames` instead", "2.11.0")
+  val tpnme: TypeNamesApi
 
   /** A value containing all [[TypeNamesApi standard type names]].
    *  @group StandardNames
    */
-  val tpnme: TypeNamesApi
+  val typeNames: TypeNamesApi
 
   /** Defines standard names, common for term and type names: These can be accessed via the [[nme]] and [[tpnme]] members.
    *  @group API
@@ -84,6 +93,11 @@ trait StandardNames {
      */
     val ROOTPKG: NameType
 
+    /** The term name `<empty>`.
+     *  Represents the empty package.
+     */
+    val EMPTY_PACKAGE_NAME: NameType
+
     /** The string " " (a single whitespace).
      *  `LOCAL_SUFFIX_STRING` is appended to the names of local identifiers,
      *  when it's necessary to prevent a naming conflict. For example, underlying fields
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index c8e03f1..dddd3c0 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -29,7 +30,7 @@ package api
  *    scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod
  *    test: reflect.runtime.universe.MethodSymbol = method test
  *
- *    scala> test.typeSignature
+ *    scala> test.info
  *    res0: reflect.runtime.universe.Type = [U](x: T)(y: U)scala.Int
  *  }}}
  *
@@ -59,99 +60,39 @@ trait Symbols { self: Universe =>
    *  @group Symbols
    *  @template
    */
-  type Symbol >: Null <: SymbolApi
-
-  /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SymbolTag: ClassTag[Symbol]
+  type Symbol >: Null <: AnyRef with SymbolApi
 
   /** The type of type symbols representing type, class, and trait declarations,
    *  as well as type parameters.
    *  @group Symbols
    *  @template
    */
-  type TypeSymbol >: Null <: Symbol with TypeSymbolApi
-
-  /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+  type TypeSymbol >: Null <: TypeSymbolApi with Symbol
 
   /** The type of term symbols representing val, var, def, and object declarations as
    *  well as packages and value parameters.
    *  @group Symbols
    *  @template
    */
-  type TermSymbol >: Null <: Symbol with TermSymbolApi
-
-  /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TermSymbolTag: ClassTag[TermSymbol]
+  type TermSymbol >: Null <: TermSymbolApi with Symbol
 
   /** The type of method symbols representing def declarations.
    *  @group Symbols
    *  @template
    */
-  type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
-
-  /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+  type MethodSymbol >: Null <: MethodSymbolApi with TermSymbol
 
   /** The type of module symbols representing object declarations.
    *  @group Symbols
    *  @template
    */
-  type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
-
-  /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+  type ModuleSymbol >: Null <: ModuleSymbolApi with TermSymbol
 
   /** The type of class symbols representing class and trait definitions.
    *  @group Symbols
    *  @template
    */
-  type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
-
-  /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ClassSymbolTag: ClassTag[ClassSymbol]
-
-  /** The type of free terms introduced by reification.
-   *  @group Symbols
-   *  @template
-   */
-  type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
-
-  /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
-
-  /** The type of free types introduced by reification.
-   *  @group Symbols
-   *  @template
-   */
-  type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
-
-  /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+  type ClassSymbol >: Null <: ClassSymbolApi with TypeSymbol
 
   /** A special "missing" symbol. Commonly used in the API to denote a default or empty value.
    *  @group Symbols
@@ -178,12 +119,8 @@ trait Symbols { self: Universe =>
    *  @groupdesc Helpers       These methods enable collections-like operations on symbols.
    *  @groupname Type          TypeSymbol Members
    *  @groupprio Type          -1
-   *  @groupname FreeType      FreeType Symbol Members
-   *  @groupprio FreeType      -2
    *  @groupname Term          TermSymbol Members
    *  @groupprio Term          -1
-   *  @groupname FreeTerm      FreeTerm Symbol Members
-   *  @groupprio FreeTerm      -2
    *  @groupname Class         Class Symbol Members
    *  @groupprio Class         -2
    *  @groupname Method        Method Symbol Members
@@ -218,7 +155,7 @@ trait Symbols { self: Universe =>
     /** The name of the symbol as a member of the `Name` type.
      *  @group Basics
      */
-    def name: Name
+    def name: NameType
 
     /** The encoded full path name of this symbol, where outer names and inner names
      *  are separated by periods.
@@ -226,6 +163,9 @@ trait Symbols { self: Universe =>
      */
     def fullName: String
 
+    /** Position of the tree. */
+    def pos: Position
+
     /** Does this symbol represent the definition of a type?
      *  Note that every symbol is either a term or a type.
      *  So for every symbol `sym` (except for `NoSymbol`),
@@ -245,7 +185,7 @@ trait Symbols { self: Universe =>
     /** Does this symbol represent the definition of a term?
      *  Note that every symbol is either a term or a type.
      *  So for every symbol `sym` (except for `NoSymbol`),
-     *  either `sym.isTerm` is true or `sym.isTerm` is true.
+     *  either `sym.isTerm` is true or `sym.isType` is true.
      *
      *  @group Tests
      */
@@ -265,6 +205,15 @@ trait Symbols { self: Universe =>
      */
     def isMethod: Boolean = false
 
+    /** Does this method represent a constructor?
+     *
+     *  If `owner` is a class, then this is a vanilla JVM constructor.
+     *  If `owner` is a trait, then this is a mixin constructor.
+     *
+     *  @group Method
+     */
+    def isConstructor: Boolean
+
     /** This symbol cast to a MethodSymbol.
      *  @throws ScalaReflectionException if `isMethod` is false.
      *
@@ -322,45 +271,6 @@ trait Symbols { self: Universe =>
      */
     def asClass: ClassSymbol = throw new ScalaReflectionException(s"$this is not a class")
 
-    /** Does this symbol represent a free term captured by reification?
-     *  If yes, `isTerm` is also guaranteed to be true.
-     *
-     *  @group Tests
-     */
-    def isFreeTerm: Boolean = false
-
-    /** This symbol cast to a free term symbol.
-     *  @throws ScalaReflectionException if `isFreeTerm` is false.
-     *
-     *  @group Conversions
-     */
-    def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
-
-    /** Does this symbol represent a free type captured by reification?
-     *  If yes, `isType` is also guaranteed to be true.
-     *
-     *  @group Tests
-     */
-    def isFreeType: Boolean = false
-
-    /** This symbol cast to a free type symbol.
-     *  @throws ScalaReflectionException if `isFreeType` is false.
-     *
-     *  @group Conversions
-     */
-    def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
-
-    /** @group Constructors */
-    def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
-    /** @group Constructors */
-    def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
-    /** @group Constructors */
-    def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
-    /** @group Constructors */
-    def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
-    /** @group Constructors */
-    def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
-
     /** Source file if this symbol is created during this compilation run,
      *  or a class file if this symbol is loaded from a *.class or *.jar.
      *
@@ -370,6 +280,7 @@ trait Symbols { self: Universe =>
      *
      *  @group Basics
      */
+    @deprecated("Use `pos.source.file` instead", "2.11.0")
     def associatedFile: scala.reflect.io.AbstractFile
 
     /** A list of annotations attached to this Symbol.
@@ -382,15 +293,32 @@ trait Symbols { self: Universe =>
      *  For a module: the class with the same name in the same package.
      *  For all others: NoSymbol
      *
+     *  This API may return unexpected results for module classes, packages and package classes.
+     *  Use `companion` instead in order to get predictable results.
+     *
      *  @group Basics
      */
+    @deprecated("Use `companion` instead, but beware of possible changes in behavior", "2.11.0")
     def companionSymbol: Symbol
 
+    /** For a class: its companion object if exists.
+     *  For a module or a module class: companion class of the module if exists.
+     *  For a package or a package class: NoSymbol.
+     *  For all others: NoSymbol.
+     */
+    def companion: Symbol
+
+    /** @see [[infoIn]] */
+    def typeSignatureIn(site: Type): Type
+
     /** The type signature of this symbol seen as a member of given type `site`.
      *
      *  @group Basics
      */
-    def typeSignatureIn(site: Type): Type
+    def infoIn(site: Type): Type
+
+    /** @see [[info]] */
+    def typeSignature: Type
 
     /** The type signature of this symbol.
      *
@@ -398,17 +326,27 @@ trait Symbols { self: Universe =>
      *  instantiation of a generic type. For example, signature
      *  of the method `def map[B](f: (A) ⇒ B): List[B]`, which refers to the type parameter `A` of the declaring class `List[A]`,
      *  will always feature `A`, regardless of whether `map` is loaded from the `List[_]` or from `List[Int]`. To get a signature
-     *  with type parameters appropriately instantiated, one should use `typeSignatureIn`.
+     *  with type parameters appropriately instantiated, one should use `infoIn`.
      *
      *  @group Basics
      */
-    def typeSignature: Type
+    def info: Type
+
+    /** @see [[overrides]] */
+    @deprecated("Use `overrides` instead", "2.11.0")
+    def allOverriddenSymbols: List[Symbol]
 
     /** Returns all symbols overriden by this symbol.
      *
      *  @group Basics
      */
-    def allOverriddenSymbols: List[Symbol]
+    def overrides: List[Symbol]
+
+    /** The overloaded alternatives of this symbol
+     *
+     *  @group Basics
+     */
+    def alternatives: List[Symbol]
 
     /******************* tests *******************/
 
@@ -427,16 +365,13 @@ trait Symbols { self: Universe =>
      */
     def isImplementationArtifact: Boolean
 
-    /** Does this symbol represent a local declaration or definition?
-     *
-     *  If yes, either `isPrivate` or `isProtected` are guaranteed to be true.
-     *  Local symbols can only be accessed from the same object instance.
-     *
-     *  If yes, `privateWithin` might tell more about this symbol's visibility scope.
+    /** Does this symbol represent a declaration or definition written in a source file as `private[this]`
+     *  or generated in tree/symbol form with the combination of flags LOCAL and PRIVATE?
+     *  If yes, `isPrivate` is guaranteed to be true,
      *
      *  @group Tests
      */
-    def isLocal: Boolean
+    def isPrivateThis: Boolean
 
     /** Does this symbol represent a private declaration or definition?
      *  If yes, `privateWithin` might tell more about this symbol's visibility scope.
@@ -445,6 +380,14 @@ trait Symbols { self: Universe =>
      */
     def isPrivate: Boolean
 
+    /** Does this symbol represent a declaration or definition written in a source file as `protected[this]`
+     *  or generated in tree/symbol form with the combination of flags LOCAL and PROTECTED?
+     *  If yes, `isProtected` is guaranteed to be true,
+     *
+     *  @group Tests
+     */
+    def isProtectedThis: Boolean
+
     /** Does this symbol represent a protected declaration or definition?
      *  If yes, `privateWithin` might tell more about this symbol's visibility scope.
      *
@@ -459,7 +402,7 @@ trait Symbols { self: Universe =>
     def isPublic: Boolean
 
     /**
-     * Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
+     *  Set when symbol has a modifier of the form private[X] or protected[X], NoSymbol otherwise.
      *
      *  Access level encoding: there are three scala flags (PRIVATE, PROTECTED,
      *  and LOCAL) which combine with value privateWithin (the "foo" in private[foo])
@@ -488,7 +431,7 @@ trait Symbols { self: Universe =>
     def privateWithin: Symbol
 
     /** Does this symbol represent the definition of a package?
-     *  If yes, `isTerm` is also guaranteed to be true.
+     *  Known issues: [[https://issues.scala-lang.org/browse/SI-6732]].
      *
      *  @group Tests
      */
@@ -501,12 +444,6 @@ trait Symbols { self: Universe =>
      */
     def isPackageClass: Boolean
 
-    /** Does this symbol or its underlying type represent a typechecking error?
-     *
-     *  @group Tests
-     */
-    def isErroneous : Boolean
-
     /** Is this symbol static (i.e. with no outer instance)?
      *  Q: When exactly is a sym marked as STATIC?
      *  A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
@@ -522,11 +459,11 @@ trait Symbols { self: Universe =>
      */
     def isFinal: Boolean
 
-    /** Is this symbol overriding something?
+    /** Is this symbol abstract (i.e. an abstract class, an abstract method, value or type member)?
      *
      *  @group Tests
      */
-    def isOverride: Boolean
+    def isAbstract: Boolean
 
     /** Is this symbol labelled as "abstract override"?
      *
@@ -660,12 +597,6 @@ trait Symbols { self: Universe =>
      */
     def isLazy: Boolean
 
-    /** The overloaded alternatives of this symbol
-     *
-     *  @group Term
-     */
-    def alternatives: List[Symbol]
-
     /** Used to provide a better error message for `asMethod` */
     override protected def isOverloadedMethod = alternatives exists (_.isMethod)
 
@@ -751,7 +682,7 @@ trait Symbols { self: Universe =>
       *  Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
       *  `C`. Then `C.toType` is the type `C[T]`.
       *
-      *  By contrast, `C.typeSignature` would be a type signature of form
+      *  By contrast, `C.info` would be a type signature of form
       *  `PolyType(ClassInfoType(...))` that describes type parameters, value
       *  parameters, parent types, and members of `C`.
      *
@@ -774,13 +705,6 @@ trait Symbols { self: Universe =>
      */
     def isCovariant     : Boolean
 
-    /** Does this symbol represent the definition of a skolem?
-     *  Skolems are used during typechecking to represent type parameters viewed from inside their scopes.
-     *
-     *  @group Type
-     */
-    def isSkolem       : Boolean
-
     /** Does this symbol represent the definition of a type alias?
      *
      *  @group Type
@@ -791,6 +715,7 @@ trait Symbols { self: Universe =>
      *
      *  @group Type
      */
+    @deprecated("Use isAbstract instead", "2.11.0")
     def isAbstractType : Boolean
 
     /** Does this symbol represent an existentially bound type?
@@ -816,15 +741,6 @@ trait Symbols { self: Universe =>
     final override def isMethod = true
     final override def asMethod = this
 
-    /** Does this method represent a constructor?
-     *
-     *  If `owner` is a class, then this is a vanilla JVM constructor.
-     *  If `owner` is a trait, then this is a mixin constructor.
-     *
-     *  @group Method
-     */
-    def isConstructor: Boolean
-
     /** Does this symbol denote the primary constructor of its enclosing class?
      *
      *  @group Method
@@ -837,6 +753,10 @@ trait Symbols { self: Universe =>
      */
     def typeParams: List[Symbol]
 
+    /** @see [[paramLists]] */
+    @deprecated("Use `paramLists` instead", "2.11.0")
+    def paramss: List[List[Symbol]]
+
     /** All parameter lists of the method.
      *  The name ending with "ss" indicates that the result type is a list of lists.
      *
@@ -846,7 +766,7 @@ trait Symbols { self: Universe =>
      *
      *  @group Method
      */
-    def paramss: List[List[Symbol]]
+    def paramLists: List[List[Symbol]]
 
     /** Does this method support variable length argument lists?
      *
@@ -859,6 +779,14 @@ trait Symbols { self: Universe =>
      *  @group Method
      */
     def returnType: Type
+
+    /** Exceptions that this method is known to throw.
+     *  For Scala methods, the list is calculated from [[throws]] annotations present on a method.
+     *  For Java methods, the list is calculated from `throws` clauses attached to the method and stored in bytecode.
+     *
+     *  @group Method
+     */
+    def exceptions: List[Symbol]
   }
 
   /** The API of module symbols.
@@ -924,6 +852,7 @@ trait Symbols { self: Universe =>
      *
      *  @group Class
      */
+    @deprecated("Use isAbstract instead", "2.11.0")
     def isAbstractClass: Boolean
 
     /** Does this symbol represent a case class?
@@ -973,50 +902,37 @@ trait Symbols { self: Universe =>
      */
     def thisPrefix: Type
 
-    /** For a polymorphic class/trait, its type parameters, the empty list for all other classes/trait
+    /** The type `C.super[M]`, where `C` is the current class and `M` is supertpe.
      *
      *  @group Class
      */
-    def typeParams: List[Symbol]
-  }
+    def superPrefix(supertpe: Type): Type
 
-  /** The API of free term symbols.
-   *  The main source of information about symbols is the [[Symbols]] page.
-   *
-   *  $SYMACCESSORS
-   *  @group API
-   */
-  trait FreeTermSymbolApi extends TermSymbolApi { this: FreeTermSymbol =>
-    final override def isFreeTerm = true
-    final override def asFreeTerm = this
-
-    /** The place where this symbol has been spawned
+    /** For a polymorphic class/trait, its type parameters, the empty list for all other classes/trait
      *
-     *  @group FreeTerm
+     *  @group Class
      */
-    def origin: String
+    def typeParams: List[Symbol]
 
-    /** The valus this symbol refers to
+    /** For a Scala class or module class, the primary constructor of the class.
+     *  For a Scala trait, its mixin constructor.
+     *  For a Scala package class, NoSymbol.
+     *  For a Java class, NoSymbol.
      *
-     *  @group FreeTerm
-     */
-    def value: Any
-  }
-
-  /** The API of free type symbols.
-   *  The main source of information about symbols is the [[Symbols]] page.
-   *
-   *  $SYMACCESSORS
-   *  @group API
-   */
-  trait FreeTypeSymbolApi extends TypeSymbolApi { this: FreeTypeSymbol =>
-    final override def isFreeType = true
-    final override def asFreeType = this
-
-    /** The place where this symbol has been spawned
+     *  Known issues: Due to SI-8367, primaryConstructor may return unexpected results
+     *  when called for Java classes (for some vague definition of a "Java class", which apparently
+     *  not only includes javac-produced classfiles, but also consists of classes defined in
+     *  Scala programs under the java.lang package). What's even worse, for some Java classes
+     *  we can't even guarantee stability of the return value - depending on your classloader configuration
+     *  and/or JDK version you might get different primaryConstructor for the same ClassSymbol.
+     *  We have logged these issues at SI-8193.
      *
-     *  @group FreeType
+     *  @group Class
      */
-    def origin: String
+    // TODO: SI-8193 I think we should only return a non-empty symbol if called for Scala classes
+    // returning something for traits and module classes is outright confusing
+    // This, however, will require some refactoring in the compiler, so I'll leave it for later
+    // as at the moment we don't have time or risk tolerance for that
+    def primaryConstructor: Symbol
   }
 }
diff --git a/src/reflect/scala/reflect/api/TagInterop.scala b/src/reflect/scala/reflect/api/TagInterop.scala
deleted file mode 100644
index 5de8115..0000000
--- a/src/reflect/scala/reflect/api/TagInterop.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.reflect
-package api
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- *  This trait provides type tag <-> manifest interoperability.
- *  @group ReflectionAPI
- *
- *  @groupname TagInterop TypeTag and Manifest Interoperability
- */
-trait TagInterop { self: Universe =>
-  // TODO `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
-  // if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
-
-  /**
-   * Convert a [[scala.reflect.api.TypeTags#TypeTag]] to a [[scala.reflect.Manifest]].
-   *
-   * Compiler usually generates these conversions automatically, when a type tag for a type `T` is in scope,
-   * and an implicit of type `Manifest[T]` is requested, but this method can also be called manually.
-   * For example:
-   * {{{
-   * typeTagToManifest(scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]])
-   * }}}
-   * @group TagInterop
-   */
-  def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
-    throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
-
-  /**
-   * Convert a [[scala.reflect.Manifest]] to a [[scala.reflect.api.TypeTags#TypeTag]].
-   *
-   * Compiler usually generates these conversions automatically, when a manifest for a type `T` is in scope,
-   * and an implicit of type `TypeTag[T]` is requested, but this method can also be called manually.
-   * For example:
-   * {{{
-   * manifestToTypeTag(scala.reflect.runtime.currentMirror, implicitly[Manifest[String]])
-   * }}}
-   * @group TagInterop
-   */
-  def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
-    throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
-}
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
index 6969418..027c840 100644
--- a/src/reflect/scala/reflect/api/TreeCreator.scala
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /** This is an internal implementation class.
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 0937a93..ff89266 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2,7 +2,8 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -58,13 +59,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Tree >: Null <: TreeApi
-
-  /** A tag that preserves the identity of the `Tree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TreeTag: ClassTag[Tree]
+  type Tree >: Null <: AnyRef with TreeApi
 
   /** The API that all trees support.
    *  The main source of information about trees is the [[scala.reflect.api.Trees]] page.
@@ -75,11 +70,25 @@ trait Trees { self: Universe =>
     def isDef: Boolean
 
     /** Is this tree one of the empty trees?
-     *  Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
-     *  and the special `emptyValDef` singleton.
+     *
+     *  Empty trees are: the `EmptyTree` null object and `TypeTree` instances that don't carry a type.
+     *
+     *  @see `canHaveAttrs`
      */
     def isEmpty: Boolean
 
+    /** Is this tree not an empty tree?
+     *
+     *  @see `isEmpty`
+     */
+    def nonEmpty: Boolean
+
+    /** Can this tree carry attributes (i.e. symbols, types or positions)?
+     *  Typically the answer is yes, except for the `EmptyTree` null object and
+     *  two special singletons: `noSelfType` and `pendingSuperCall`.
+     */
+    def canHaveAttrs: Boolean
+
     /** The canonical way to test if a Tree represents a term.
      */
     def isTerm: Boolean
@@ -159,29 +168,6 @@ trait Trees { self: Universe =>
      */
     def children: List[Tree]
 
-    /** Extracts free term symbols from a tree that is reified or contains reified subtrees.
-     */
-    def freeTerms: List[FreeTermSymbol]
-
-    /** Extracts free type symbols from a tree that is reified or contains reified subtrees.
-     */
-    def freeTypes: List[FreeTypeSymbol]
-
-    /** Substitute symbols in `to` for corresponding occurrences of references to
-     *  symbols `from` in this type.
-     */
-    def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree
-
-    /** Substitute types in `to` for corresponding occurrences of references to
-     *  symbols `from` in this tree.
-     */
-    def substituteTypes(from: List[Symbol], to: List[Type]): Tree
-
-    /** Substitute given tree `to` for occurrences of nodes that represent
-     *  `C.this`, where `C` referes to the given class `clazz`.
-     */
-    def substituteThis(clazz: Symbol, to: Tree): Tree
-
     /** Make a copy of this tree, keeping all attributes,
      *  except that all positions are focused (so nothing
      *  in this tree will be found when searching by position).
@@ -207,13 +193,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TermTree >: Null <: AnyRef with Tree with TermTreeApi
-
-  /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TermTreeTag: ClassTag[TermTree]
+  type TermTree >: Null <: TermTreeApi with Tree
 
   /** The API that all term trees support
    *  @group API
@@ -226,13 +206,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TypTree >: Null <: AnyRef with Tree with TypTreeApi
-
-  /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypTreeTag: ClassTag[TypTree]
+  type TypTree >: Null <: TypTreeApi with Tree
 
   /** The API that all typ trees support
    *  @group API
@@ -240,17 +214,14 @@ trait Trees { self: Universe =>
   trait TypTreeApi extends TreeApi { this: TypTree =>
   }
 
-  /** A tree with a mutable symbol field, initialized to NoSymbol.
+  /** A tree that carries a symbol, e.g. by defining it (`DefTree`) or by referring to it (`RefTree`).
+   *  Such trees start their life naked, returning `NoSymbol`, but after being typechecked without errors
+   *  they hold non-empty symbols.
+   *
    *  @group Trees
    *  @template
    */
-  type SymTree >: Null <: AnyRef with Tree with SymTreeApi
-
-  /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SymTreeTag: ClassTag[SymTree]
+  type SymTree >: Null <: SymTreeApi with Tree
 
   /** The API that all sym trees support
    *  @group API
@@ -260,24 +231,18 @@ trait Trees { self: Universe =>
     def symbol: Symbol
   }
 
-  /** A tree with a name - effectively, a DefTree or RefTree.
+  /** A tree that carries a name, e.g. by defining it (`DefTree`) or by referring to it (`RefTree`).
    *  @group Trees
    *  @template
    */
-  type NameTree >: Null <: AnyRef with Tree with NameTreeApi
-
-  /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val NameTreeTag: ClassTag[NameTree]
+  type NameTree >: Null <: NameTreeApi with Tree
 
   /** The API that all name trees support
    *  @group API
    */
   trait NameTreeApi extends TreeApi { this: NameTree =>
     /** The underlying name.
-     *  For example, the `<List>` part of `Ident("List": TermName)`.
+     *  For example, the `List` part of `Ident(TermName("List"))`.
      */
     def name: Name
   }
@@ -288,20 +253,14 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type RefTree >: Null <: SymTree with NameTree with RefTreeApi
-
-  /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val RefTreeTag: ClassTag[RefTree]
+  type RefTree >: Null <: RefTreeApi with SymTree with NameTree
 
   /** The API that all ref trees support
    *  @group API
    */
   trait RefTreeApi extends SymTreeApi with NameTreeApi { this: RefTree =>
     /** The qualifier of the reference.
-     *  For example, the `<scala>` part of `Select("scala": TermName, "List": TermName)`.
+     *  For example, the `Ident(TermName("scala"))` part of `Select(Ident(TermName("scala")), TermName("List"))`.
      *  `EmptyTree` for `Ident` instances.
      */
     def qualifier: Tree
@@ -310,17 +269,28 @@ trait Trees { self: Universe =>
     def name: Name
   }
 
-  /** A tree which defines a symbol-carrying entity.
-   *  @group Trees
-   *  @template
+  /** The constructor/extractor for `RefTree` instances.
+   *  @group Extractors
+   */
+  val RefTree: RefTreeExtractor
+
+  /** An extractor class to create and pattern match with syntax `RefTree(qual, name)`.
+   *  This AST node corresponds to either Ident, Select or SelectFromTypeTree.
+   *  @group Extractors
    */
-  type DefTree >: Null <: SymTree with NameTree with DefTreeApi
+  abstract class RefTreeExtractor {
+    def apply(qualifier: Tree, name: Name): RefTree
+    def unapply(refTree: RefTree): Option[(Tree, Name)]
+  }
 
-  /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** A tree representing a symbol-defining entity:
+   *    1) A declaration or a definition (type, class, object, package, val, var, or def)
+   *    2) `Bind` that is used to represent binding occurrences in pattern matches
+   *    3) `LabelDef` that is used internally to represent while loops
+   *  @group Trees
+   *  @template
    */
-  implicit val DefTreeTag: ClassTag[DefTree]
+  type DefTree >: Null <: DefTreeApi with SymTree with NameTree
 
   /** The API that all def trees support
    *  @group API
@@ -335,13 +305,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type MemberDef >: Null <: DefTree with MemberDefApi
-
-  /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val MemberDefTag: ClassTag[MemberDef]
+  type MemberDef >: Null <: MemberDefApi with DefTree
 
   /** The API that all member defs support
    *  @group API
@@ -355,13 +319,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type PackageDef >: Null <: MemberDef with PackageDefApi
-
-  /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val PackageDefTag: ClassTag[PackageDef]
+  type PackageDef >: Null <: PackageDefApi with MemberDef
 
   /** The constructor/extractor for `PackageDef` instances.
    *  @group Extractors
@@ -394,13 +352,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ImplDef >: Null <: MemberDef with ImplDefApi
-
-  /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ImplDefTag: ClassTag[ImplDef]
+  type ImplDef >: Null <: ImplDefApi with MemberDef
 
   /** The API that all impl defs support
    *  @group API
@@ -414,13 +366,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ClassDef >: Null <: ImplDef with ClassDefApi
-
-  /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ClassDefTag: ClassTag[ClassDef]
+  type ClassDef >: Null <: ClassDefApi with ImplDef
 
   /** The constructor/extractor for `ClassDef` instances.
    *  @group Extractors
@@ -440,6 +386,10 @@ trait Trees { self: Universe =>
   abstract class ClassDefExtractor {
     def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef
     def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
+
+    /** @see [[InternalApi.classDef]] */
+    @deprecated("Use `internal.classDef` instead", "2.11.0")
+    def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ClassDef = internal.classDef(sym, impl)
   }
 
   /** The API that all class defs support
@@ -465,13 +415,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ModuleDef >: Null <: ImplDef with ModuleDefApi
-
-  /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ModuleDefTag: ClassTag[ModuleDef]
+  type ModuleDef >: Null <: ModuleDefApi with ImplDef
 
   /** The constructor/extractor for `ModuleDef` instances.
    *  @group Extractors
@@ -491,6 +435,10 @@ trait Trees { self: Universe =>
   abstract class ModuleDefExtractor {
     def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef
     def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
+
+    /** @see [[InternalApi.moduleDef]] */
+    @deprecated("Use `internal.moduleDef` instead", "2.11.0")
+    def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ModuleDef = internal.moduleDef(sym, impl)
   }
 
   /** The API that all module defs support
@@ -511,20 +459,14 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
-
-  /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
+  type ValOrDefDef >: Null <: ValOrDefDefApi with MemberDef
 
   /** The API that all val defs and def defs support
    *  @group API
    */
   trait ValOrDefDefApi extends MemberDefApi { this: ValOrDefDef =>
     /** @inheritdoc */
-    def name: Name // can't be a TermName because macros can be type names.
+    def name: TermName
 
     /** The type ascribed to the definition.
      *  An empty `TypeTree` if the type hasn't been specified explicitly
@@ -548,13 +490,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ValDef >: Null <: ValOrDefDef with ValDefApi
-
-  /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ValDefTag: ClassTag[ValDef]
+  type ValDef >: Null <: ValDefApi with ValOrDefDef
 
   /** The constructor/extractor for `ValDef` instances.
    *  @group Extractors
@@ -579,6 +515,14 @@ trait Trees { self: Universe =>
   abstract class ValDefExtractor {
     def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
     def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
+
+    /** @see [[InternalApi.valDef]] */
+    @deprecated("Use `internal.valDef` instead", "2.11.0")
+    def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): ValDef = internal.valDef(sym, rhs)
+
+    /** @see [[InternalApi.valDef]] */
+    @deprecated("Use `internal.valDef` instead", "2.11.0")
+    def apply(sym: Symbol)(implicit token: CompatToken): ValDef = internal.valDef(sym)
   }
 
   /** The API that all val defs support
@@ -603,13 +547,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type DefDef >: Null <: ValOrDefDef with DefDefApi
-
-  /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val DefDefTag: ClassTag[DefDef]
+  type DefDef >: Null <: DefDefApi with ValOrDefDef
 
   /** The constructor/extractor for `DefDef` instances.
    *  @group Extractors
@@ -626,8 +564,28 @@ trait Trees { self: Universe =>
    *  @group Extractors
    */
   abstract class DefDefExtractor {
-    def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
-    def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+    def apply(mods: Modifiers, name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+    def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+
+    /** @see [[InternalApi.defDef]] */
+    @deprecated("Use `internal.defDef` instead", "2.11.0")
+    def apply(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, vparamss, rhs)
+
+    /** @see [[InternalApi.defDef]] */
+    @deprecated("Use `internal.defDef` instead", "2.11.0")
+    def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, vparamss, rhs)
+
+    /** @see [[InternalApi.defDef]] */
+    @deprecated("Use `internal.defDef` instead", "2.11.0")
+    def apply(sym: Symbol, mods: Modifiers, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, rhs)
+
+    /** @see [[InternalApi.defDef]] */
+    @deprecated("Use `internal.defDef` instead", "2.11.0")
+    def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs)
+
+    /** @see [[InternalApi.defDef]] */
+    @deprecated("Use `internal.defDef` instead", "2.11.0")
+    def apply(sym: Symbol, rhs: List[List[Symbol]] => Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs)
   }
 
   /** The API that all def defs support
@@ -638,7 +596,7 @@ trait Trees { self: Universe =>
     def mods: Modifiers
 
     /** @inheritdoc */
-    def name: Name
+    def name: TermName
 
     /** The type parameters of the method. */
     def tparams: List[TypeDef]
@@ -658,13 +616,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TypeDef >: Null <: MemberDef with TypeDefApi
-
-  /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeDefTag: ClassTag[TypeDef]
+  type TypeDef >: Null <: TypeDefApi with MemberDef
 
   /** The constructor/extractor for `TypeDef` instances.
    *  @group Extractors
@@ -686,6 +638,14 @@ trait Trees { self: Universe =>
   abstract class TypeDefExtractor {
     def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef
     def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
+
+    /** @see [[InternalApi.typeDef]] */
+    @deprecated("Use `internal.typeDef` instead", "2.11.0")
+    def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): TypeDef = internal.typeDef(sym, rhs)
+
+    /** @see [[InternalApi.typeDef]] */
+    @deprecated("Use `internal.typeDef` instead", "2.11.0")
+    def apply(sym: Symbol)(implicit token: CompatToken): TypeDef = internal.typeDef(sym)
   }
 
   /** The API that all type defs support
@@ -723,13 +683,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
-
-  /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val LabelDefTag: ClassTag[LabelDef]
+  type LabelDef >: Null <: LabelDefApi with DefTree with TermTree
 
   /** The constructor/extractor for `LabelDef` instances.
    *  @group Extractors
@@ -752,6 +706,10 @@ trait Trees { self: Universe =>
   abstract class LabelDefExtractor {
     def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
     def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
+
+    /** @see [[InternalApi.labelDef]] */
+    @deprecated("Use `internal.labelDef` instead", "2.11.0")
+    def apply(sym: Symbol, params: List[Symbol], rhs: Tree)(implicit token: CompatToken): LabelDef = internal.labelDef(sym, params, rhs)
   }
 
   /** The API that all label defs support
@@ -772,7 +730,7 @@ trait Trees { self: Universe =>
     def rhs: Tree
   }
 
-  /** Import selector
+  /** Import selector (not a tree, but a component of the `Import` tree)
    *
    *  Representation of an imported name its optional rename and their optional positions
    *
@@ -787,12 +745,6 @@ trait Trees { self: Universe =>
    */
   type ImportSelector >: Null <: AnyRef with ImportSelectorApi
 
-  /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ImportSelectorTag: ClassTag[ImportSelector]
-
   /** The constructor/extractor for `ImportSelector` instances.
    *  @group Extractors
    */
@@ -837,13 +789,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Import >: Null <: SymTree with ImportApi
-
-  /** A tag that preserves the identity of the `Import` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ImportTag: ClassTag[Import]
+  type Import >: Null <: ImportApi with SymTree
 
   /** The constructor/extractor for `Import` instances.
    *  @group Extractors
@@ -895,13 +841,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Template >: Null <: SymTree with TemplateApi
-
-  /** A tag that preserves the identity of the `Template` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TemplateTag: ClassTag[Template]
+  type Template >: Null <: TemplateApi with SymTree
 
   /** The constructor/extractor for `Template` instances.
    *  @group Extractors
@@ -940,7 +880,7 @@ trait Trees { self: Universe =>
     def parents: List[Tree]
 
     /** Self type of the template.
-     *  Is equal to `emptyValDef` if the self type is not specified.
+     *  Is equal to `noSelfType` if the self type is not specified.
      */
     def self: ValDef
 
@@ -953,13 +893,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Block >: Null <: TermTree with BlockApi
-
-  /** A tag that preserves the identity of the `Block` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val BlockTag: ClassTag[Block]
+  type Block >: Null <: BlockApi with TermTree
 
   /** The constructor/extractor for `Block` instances.
    *  @group Extractors
@@ -998,13 +932,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type CaseDef >: Null <: AnyRef with Tree with CaseDefApi
-
-  /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val CaseDefTag: ClassTag[CaseDef]
+  type CaseDef >: Null <: CaseDefApi with Tree
 
   /** The constructor/extractor for `CaseDef` instances.
    *  @group Extractors
@@ -1017,7 +945,7 @@ trait Trees { self: Universe =>
    *    `case` pat `if` guard => body
    *
    *  If the guard is not present, the `guard` is set to `EmptyTree`.
-   *  If the body is not specified, the `body` is set to `Literal(Constant())`
+   *  If the body is not specified, the `body` is set to `Literal(Constant(()))`
    *  @group Extractors
    */
   abstract class CaseDefExtractor {
@@ -1038,7 +966,7 @@ trait Trees { self: Universe =>
     def guard: Tree
 
     /** The body of the pattern matching clause.
-     *  Is equal to `Literal(Constant())` if the body is not specified.
+     *  Is equal to `Literal(Constant(()))` if the body is not specified.
      */
     def body: Tree
   }
@@ -1051,13 +979,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Alternative >: Null <: TermTree with AlternativeApi
-
-  /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AlternativeTag: ClassTag[Alternative]
+  type Alternative >: Null <: AlternativeApi with TermTree
 
   /** The constructor/extractor for `Alternative` instances.
    *  @group Extractors
@@ -1089,13 +1011,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Star >: Null <: TermTree with StarApi
-
-  /** A tag that preserves the identity of the `Star` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val StarTag: ClassTag[Star]
+  type Star >: Null <: StarApi with TermTree
 
   /** The constructor/extractor for `Star` instances.
    *  @group Extractors
@@ -1130,13 +1046,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Bind >: Null <: DefTree with BindApi
-
-  /** A tag that preserves the identity of the `Bind` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val BindTag: ClassTag[Bind]
+  type Bind >: Null <: BindApi with DefTree
 
   /** The constructor/extractor for `Bind` instances.
    *  @group Extractors
@@ -1187,7 +1097,7 @@ trait Trees { self: Universe =>
    *          UnApply(
    *            // a dummy node that carries the type of unapplication to patmat
    *            // the <unapply-selector> here doesn't have an underlying symbol
-   *            // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+   *            // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
    *            Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
    *            // arguments of the unapply => nothing synthetic here
    *            List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
@@ -1199,13 +1109,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type UnApply >: Null <: TermTree with UnApplyApi
-
-  /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val UnApplyTag: ClassTag[UnApply]
+  type UnApply >: Null <: UnApplyApi with TermTree
 
   /** The constructor/extractor for `UnApply` instances.
    *  @group Extractors
@@ -1241,13 +1145,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Function >: Null <: TermTree with SymTree with FunctionApi
-
-  /** A tag that preserves the identity of the `Function` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val FunctionTag: ClassTag[Function]
+  type Function >: Null <: FunctionApi with TermTree with SymTree
 
   /** The constructor/extractor for `Function` instances.
    *  @group Extractors
@@ -1285,13 +1183,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Assign >: Null <: TermTree with AssignApi
-
-  /** A tag that preserves the identity of the `Assign` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AssignTag: ClassTag[Assign]
+  type Assign >: Null <: AssignApi with TermTree
 
   /** The constructor/extractor for `Assign` instances.
    *  @group Extractors
@@ -1327,13 +1219,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
-
-  /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+  type AssignOrNamedArg >: Null <: AssignOrNamedArgApi with TermTree
 
   /** The constructor/extractor for `AssignOrNamedArg` instances.
    *  @group Extractors
@@ -1374,13 +1260,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type If >: Null <: TermTree with IfApi
-
-  /** A tag that preserves the identity of the `If` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val IfTag: ClassTag[If]
+  type If >: Null <: IfApi with TermTree
 
   /** The constructor/extractor for `If` instances.
    *  @group Extractors
@@ -1431,13 +1311,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Match >: Null <: TermTree with MatchApi
-
-  /** A tag that preserves the identity of the `Match` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val MatchTag: ClassTag[Match]
+  type Match >: Null <: MatchApi with TermTree
 
   /** The constructor/extractor for `Match` instances.
    *  @group Extractors
@@ -1472,13 +1346,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Return >: Null <: TermTree with SymTree with ReturnApi
-
-  /** A tag that preserves the identity of the `Return` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ReturnTag: ClassTag[Return]
+  type Return >: Null <: ReturnApi with SymTree with TermTree
 
   /** The constructor/extractor for `Return` instances.
    *  @group Extractors
@@ -1510,13 +1378,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Try >: Null <: TermTree with TryApi
-
-  /** A tag that preserves the identity of the `Try` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TryTag: ClassTag[Try]
+  type Try >: Null <: TryApi with TermTree
 
   /** The constructor/extractor for `Try` instances.
    *  @group Extractors
@@ -1554,13 +1416,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Throw >: Null <: TermTree with ThrowApi
-
-  /** A tag that preserves the identity of the `Throw` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ThrowTag: ClassTag[Throw]
+  type Throw >: Null <: ThrowApi with TermTree
 
   /** The constructor/extractor for `Throw` instances.
    *  @group Extractors
@@ -1590,13 +1446,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type New >: Null <: TermTree with NewApi
-
-  /** A tag that preserves the identity of the `New` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val NewTag: ClassTag[New]
+  type New >: Null <: NewApi with TermTree
 
   /** The constructor/extractor for `New` instances.
    *  @group Extractors
@@ -1646,13 +1496,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Typed >: Null <: TermTree with TypedApi
-
-  /** A tag that preserves the identity of the `Typed` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypedTag: ClassTag[Typed]
+  type Typed >: Null <: TypedApi with TermTree
 
   /** The constructor/extractor for `Typed` instances.
    *  @group Extractors
@@ -1685,13 +1529,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type GenericApply >: Null <: TermTree with GenericApplyApi
-
-  /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val GenericApplyTag: ClassTag[GenericApply]
+  type GenericApply >: Null <: GenericApplyApi with TermTree
 
   /** The API that all applies support
    *  @group API
@@ -1712,13 +1550,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TypeApply >: Null <: GenericApply with TypeApplyApi
-
-  /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeApplyTag: ClassTag[TypeApply]
+  type TypeApply >: Null <: TypeApplyApi with GenericApply
 
   /** The constructor/extractor for `TypeApply` instances.
    *  @group Extractors
@@ -1756,13 +1588,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Apply >: Null <: GenericApply with ApplyApi
-
-  /** A tag that preserves the identity of the `Apply` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ApplyTag: ClassTag[Apply]
+  type Apply >: Null <: ApplyApi with GenericApply
 
   /** The constructor/extractor for `Apply` instances.
    *  @group Extractors
@@ -1799,13 +1625,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Super >: Null <: TermTree with SuperApi
-
-  /** A tag that preserves the identity of the `Super` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SuperTag: ClassTag[Super]
+  type Super >: Null <: SuperApi with TermTree
 
   /** The constructor/extractor for `Super` instances.
    *  @group Extractors
@@ -1851,13 +1671,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type This >: Null <: TermTree with SymTree with ThisApi
-
-  /** A tag that preserves the identity of the `This` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ThisTag: ClassTag[This]
+  type This >: Null <: ThisApi with TermTree with SymTree
 
   /** The constructor/extractor for `This` instances.
    *  @group Extractors
@@ -1892,13 +1706,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Select >: Null <: RefTree with SelectApi
-
-  /** A tag that preserves the identity of the `Select` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SelectTag: ClassTag[Select]
+  type Select >: Null <: SelectApi with RefTree
 
   /** The constructor/extractor for `Select` instances.
    *  @group Extractors
@@ -1937,13 +1745,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Ident >: Null <: RefTree with IdentApi
-
-  /** A tag that preserves the identity of the `Ident` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val IdentTag: ClassTag[Ident]
+  type Ident >: Null <: IdentApi with RefTree
 
   /** The constructor/extractor for `Ident` instances.
    *  @group Extractors
@@ -1968,77 +1770,18 @@ trait Trees { self: Universe =>
    *  @group API
    */
   trait IdentApi extends RefTreeApi { this: Ident =>
+    /** Was this ident created from a backquoted identifier? */
+    def isBackquoted: Boolean
+
     /** @inheritdoc */
     def name: Name
   }
 
-  /** Marks underlying reference to id as boxed.
-   *
-   *  <b>Precondition:<\b> id must refer to a captured variable
-   *  A reference such marked will refer to the boxed entity, no dereferencing
-   *  with `.elem` is done on it.
-   *  This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
-   *  It is eliminated in LambdaLift, where the boxing conversion takes place.
-   *  @group Trees
-   *  @template
-   */
-  type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
-
-  /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
-
-  /** The constructor/extractor for `ReferenceToBoxed` instances.
-   *  @group Extractors
-   */
-  val ReferenceToBoxed: ReferenceToBoxedExtractor
-
-  /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
-   *  This AST node does not have direct correspondence to Scala code,
-   *  and is emitted by macros to reference capture vars directly without going through `elem`.
-   *
-   *  For example:
-   *
-   *    var x = ...
-   *    fun { x }
-   *
-   *  Will emit:
-   *
-   *    Ident(x)
-   *
-   *  Which gets transformed to:
-   *
-   *    Select(Ident(x), "elem")
-   *
-   *  If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
-   *  @group Extractors
-   */
-  abstract class ReferenceToBoxedExtractor {
-    def apply(ident: Ident): ReferenceToBoxed
-    def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
-  }
-
-  /** The API that all references support
-   *  @group API
-   */
-  trait ReferenceToBoxedApi extends TermTreeApi { this: ReferenceToBoxed =>
-    /** The underlying reference. */
-    def ident: Tree
-  }
-
   /** Literal
    *  @group Trees
    *  @template
    */
-  type Literal >: Null <: TermTree with LiteralApi
-
-  /** A tag that preserves the identity of the `Literal` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val LiteralTag: ClassTag[Literal]
+  type Literal >: Null <: LiteralApi with TermTree
 
   /** The constructor/extractor for `Literal` instances.
    *  @group Extractors
@@ -2071,13 +1814,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type Annotated >: Null <: AnyRef with Tree with AnnotatedApi
-
-  /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AnnotatedTag: ClassTag[Annotated]
+  type Annotated >: Null <: AnnotatedApi with Tree
 
   /** The constructor/extractor for `Annotated` instances.
    *  @group Extractors
@@ -2111,13 +1848,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
-
-  /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+  type SingletonTypeTree >: Null <: SingletonTypeTreeApi with TypTree
 
   /** The constructor/extractor for `SingletonTypeTree` instances.
    *  @group Extractors
@@ -2147,13 +1878,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
-
-  /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+  type SelectFromTypeTree >: Null <: SelectFromTypeTreeApi with TypTree with RefTree
 
   /** The constructor/extractor for `SelectFromTypeTree` instances.
    *  @group Extractors
@@ -2194,13 +1919,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
-
-  /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+  type CompoundTypeTree >: Null <: CompoundTypeTreeApi with TypTree
 
   /** The constructor/extractor for `CompoundTypeTree` instances.
    *  @group Extractors
@@ -2230,13 +1949,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
-
-  /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+  type AppliedTypeTree >: Null <: AppliedTypeTreeApi with TypTree
 
   /** The constructor/extractor for `AppliedTypeTree` instances.
    *  @group Extractors
@@ -2278,13 +1991,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
-
-  /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+  type TypeBoundsTree >: Null <: TypeBoundsTreeApi with TypTree
 
   /** The constructor/extractor for `TypeBoundsTree` instances.
    *  @group Extractors
@@ -2321,13 +2028,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
-
-  /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+  type ExistentialTypeTree >: Null <: ExistentialTypeTreeApi with TypTree
 
   /** The constructor/extractor for `ExistentialTypeTree` instances.
    *  @group Extractors
@@ -2341,8 +2042,8 @@ trait Trees { self: Universe =>
    *  @group Extractors
    */
   abstract class ExistentialTypeTreeExtractor {
-    def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
-    def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
+    def apply(tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree
+    def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[MemberDef])]
   }
 
   /** The API that all existential type trees support
@@ -2352,8 +2053,12 @@ trait Trees { self: Universe =>
     /** The underlying type of the existential type. */
     def tpt: Tree
 
-    /** The clauses of the definition of the existential type. */
-    def whereClauses: List[Tree]
+    /** The clauses of the definition of the existential type.
+     *  Elements are one of the following:
+     *    1) TypeDef with TypeBoundsTree right-hand side
+     *    2) ValDef with empty right-hand side
+     */
+    def whereClauses: List[MemberDef]
   }
 
   /** A synthetic tree holding an arbitrary type.  Not to be confused with
@@ -2364,13 +2069,7 @@ trait Trees { self: Universe =>
    *  @group Trees
    *  @template
    */
-  type TypeTree >: Null <: TypTree with TypeTreeApi
-
-  /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeTreeTag: ClassTag[TypeTree]
+  type TypeTree >: Null <: TypeTreeApi with TypTree
 
   /** The constructor/extractor for `TypeTree` instances.
    *  @group Extractors
@@ -2403,93 +2102,33 @@ trait Trees { self: Universe =>
    *  no definition of a self value of self type.
    *  @group Trees
    */
-  val emptyValDef: ValDef
-
-// ---------------------- factories ----------------------------------------------
-
-  /** A factory method for `ClassDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical ClassDef constructor to create a class and then initialize its position and symbol manually", "2.10.1")
-  def ClassDef(sym: Symbol, impl: Template): ClassDef
-
-  /** A factory method for `ModuleDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical ModuleDef constructor to create an object and then initialize its position and symbol manually", "2.10.1")
-  def ModuleDef(sym: Symbol, impl: Template): ModuleDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical ValDef constructor to create a val and then initialize its position and symbol manually", "2.10.1")
-  def ValDef(sym: Symbol, rhs: Tree): ValDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical ValDef constructor to create a val with an empty right-hand side and then initialize its position and symbol manually", "2.10.1")
-  def ValDef(sym: Symbol): ValDef
+  val noSelfType: ValDef
 
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
-  def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
-  def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
-  def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
-  def DefDef(sym: Symbol, rhs: Tree): DefDef
-
-  /** A factory method for `ValDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
-  def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
-
-  /** A factory method for `TypeDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical TypeDef constructor to create a type alias and then initialize its position and symbol manually", "2.10.1")
-  def TypeDef(sym: Symbol, rhs: Tree): TypeDef
+  @deprecated("Use `noSelfType` instead", "2.11.0")
+  val emptyValDef: ValDef
 
-  /** A factory method for `TypeDef` nodes.
-   *  @group Factories
+  /** An empty superclass constructor call corresponding to:
+   *    super.<init>()
+   *  This is used as a placeholder in the primary constructor body in class templates
+   *  to denote the insertion point of a call to superclass constructor after the typechecker
+   *  figures out the superclass of a given template.
+   *  @group Trees
    */
-  @deprecated("Use the canonical TypeDef constructor to create an abstract type or type parameter and then initialize its position and symbol manually", "2.10.1")
-  def TypeDef(sym: Symbol): TypeDef
+  val pendingSuperCall: Apply
 
-  /** A factory method for `LabelDef` nodes.
-   *  @group Factories
-   */
-  @deprecated("Use the canonical LabelDef constructor to create a label and then initialize its position and symbol manually", "2.10.1")
-  def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
+// ---------------------- factories ----------------------------------------------
 
   /** A factory method for `Block` nodes.
    *  Flattens directly nested blocks.
    *  @group Factories
    */
-  @deprecated("Use the canonical Block constructor, explicitly specifying its expression if necessary. Flatten directly nested blocks manually if needed", "2.10.1")
+  @deprecated("Use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1")
   def Block(stats: Tree*): Block
 
   /** A factory method for `CaseDef` nodes.
    *  @group Factories
    */
-  @deprecated("Use the canonical CaseDef constructor passing EmptyTree for guard", "2.10.1")
+  @deprecated("Use cq\"$pat => $body\" instead", "2.10.1")
   def CaseDef(pat: Tree, body: Tree): CaseDef
 
   /** A factory method for `Bind` nodes.
@@ -2501,50 +2140,50 @@ trait Trees { self: Universe =>
   /** A factory method for `Try` nodes.
    *  @group Factories
    */
-  @deprecated("Use canonical CaseDef constructors to to create exception catching expressions and then wrap them in Try", "2.10.1")
+  @deprecated("Convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1")
   def Try(body: Tree, cases: (Tree, Tree)*): Try
 
   /** A factory method for `Throw` nodes.
    *  @group Factories
    */
-  @deprecated("Use the canonical New constructor to create an object instantiation expression and then wrap it in Throw", "2.10.1")
+  @deprecated("Use q\"throw new $tpe(..$args)\" instead", "2.10.1")
   def Throw(tpe: Type, args: Tree*): Throw
 
   /** Factory method for object creation `new tpt(args_1)...(args_n)`
    *  A `New(t, as)` is expanded to: `(new t).<init>(as)`
    *  @group Factories
    */
-  @deprecated("Use Apply(...Apply(Select(New(tpt), nme.CONSTRUCTOR), args1)...argsN) instead", "2.10.1")
+  @deprecated("Use q\"new $tpt(...$argss)\" instead", "2.10.1")
   def New(tpt: Tree, argss: List[List[Tree]]): Tree
 
   /** 0-1 argument list new, based on a type.
    *  @group Factories
    */
-  @deprecated("Use New(TypeTree(tpe), args.toList) instead", "2.10.1")
+  @deprecated("Use q\"new $tpe(..$args)\" instead", "2.10.1")
   def New(tpe: Type, args: Tree*): Tree
 
   /** 0-1 argument list new, based on a symbol.
    *  @group Factories
    */
-  @deprecated("Use New(sym.toType, args) instead", "2.10.1")
+  @deprecated("Use q\"new ${sym.toType}(..$args)\" instead", "2.10.1")
   def New(sym: Symbol, args: Tree*): Tree
 
   /** A factory method for `Apply` nodes.
    *  @group Factories
    */
-  @deprecated("Use Apply(Ident(sym), args.toList) instead", "2.10.1")
+  @deprecated("Use q\"$sym(..$args)\" instead", "2.10.1")
   def Apply(sym: Symbol, args: Tree*): Tree
 
   /** 0-1 argument list new, based on a type tree.
    *  @group Factories
    */
-  @deprecated("Use Apply(Select(New(tpt), nme.CONSTRUCTOR), args) instead", "2.10.1")
+  @deprecated("Use q\"new $tpt(..$args)\" instead", "2.10.1")
   def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
 
   /** A factory method for `Super` nodes.
    *  @group Factories
    */
-  @deprecated("Use Super(This(sym), mix) instead", "2.10.1")
+  @deprecated("Use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1")
   def Super(sym: Symbol, mix: TypeName): Tree
 
   /** A factory method for `This` nodes.
@@ -2556,7 +2195,7 @@ trait Trees { self: Universe =>
    *  The string `name` argument is assumed to represent a [[scala.reflect.api.Names#TermName `TermName`]].
    *  @group Factories
    */
-  @deprecated("Use Select(tree, newTermName(name)) instead", "2.10.1")
+  @deprecated("Use Select(tree, TermName(name)) instead", "2.10.1")
   def Select(qualifier: Tree, name: String): Select
 
   /** A factory method for `Select` nodes.
@@ -2567,7 +2206,7 @@ trait Trees { self: Universe =>
   /** A factory method for `Ident` nodes.
    *  @group Factories
    */
-  @deprecated("Use Ident(newTermName(name)) instead", "2.10.1")
+  @deprecated("Use Ident(TermName(name)) instead", "2.10.1")
   def Ident(name: String): Ident
 
   /** A factory method for `Ident` nodes.
@@ -2586,7 +2225,7 @@ trait Trees { self: Universe =>
    *  @template
    *  @group Copying
    */
-  type TreeCopier <: TreeCopierOps
+  type TreeCopier >: Null <: AnyRef with TreeCopierOps
 
   /** The standard (lazy) tree copier.
    *  @group Copying
@@ -2762,6 +2401,11 @@ trait Trees { self: Universe =>
      */
     def Ident(tree: Tree, name: Name): Ident
 
+    /** Creates a `RefTree` node from the given components, having a given `tree` as a prototype.
+     *  Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+     */
+    def RefTree(tree: Tree, qualifier: Tree, selector: Name): RefTree
+
     /** Creates a `ReferenceToBoxed` node from the given components, having a given `tree` as a prototype.
      *  Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
      */
@@ -2810,7 +2454,7 @@ trait Trees { self: Universe =>
     /** Creates a `ExistentialTypeTree` node from the given components, having a given `tree` as a prototype.
      *  Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
      */
-    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
+    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree
   }
 
 // ---------------------- traversing and transforming ------------------------------
@@ -2821,18 +2465,32 @@ trait Trees { self: Universe =>
   class Traverser {
     protected[scala] var currentOwner: Symbol = rootMirror.RootClass
 
+    /** Traverse something which Trees contain, but which isn't a Tree itself. */
+    def traverseName(name: Name): Unit                    = ()
+    def traverseConstant(c: Constant): Unit               = ()
+    def traverseImportSelector(sel: ImportSelector): Unit = ()
+    def traverseModifiers(mods: Modifiers): Unit          = traverseAnnotations(mods.annotations)
+
     /** Traverses a single tree. */
-    def traverse(tree: Tree): Unit = itraverse(this, tree)
+    def traverse(tree: Tree): Unit              = itraverse(this, tree)
+    def traversePattern(pat: Tree): Unit        = traverse(pat)
+    def traverseGuard(guard: Tree): Unit        = traverse(guard)
+    def traverseTypeAscription(tpt: Tree): Unit = traverse(tpt)
+    // Special handling of noSelfType necessary for backward compat: existing
+    // traversers break down when they see the unexpected tree.
+    def traverseSelfType(self: ValDef): Unit    = if (self ne noSelfType) traverse(self)
 
     /** Traverses a list of trees. */
-    def traverseTrees(trees: List[Tree]) {
-      trees foreach traverse
-    }
+    def traverseTrees(trees: List[Tree]): Unit          = trees foreach traverse
+    def traverseTypeArgs(args: List[Tree]): Unit        = traverseTrees(args)
+    def traverseParents(parents: List[Tree]): Unit      = traverseTrees(parents)
+    def traverseCases(cases: List[CaseDef]): Unit       = traverseTrees(cases)
+    def traverseAnnotations(annots: List[Tree]): Unit   = traverseTrees(annots)
 
     /** Traverses a list of lists of trees. */
-    def traverseTreess(treess: List[List[Tree]]) {
-      treess foreach traverseTrees
-    }
+    def traverseTreess(treess: List[List[Tree]]): Unit    = treess foreach traverseTrees
+    def traverseParams(params: List[Tree]): Unit          = traverseTrees(params)
+    def traverseParamss(vparamss: List[List[Tree]]): Unit = vparamss foreach traverseParams
 
     /** Traverses a list of trees with a given owner symbol. */
     def traverseStats(stats: List[Tree], exprOwner: Symbol) {
@@ -2897,7 +2555,8 @@ trait Trees { self: Universe =>
     def transform(tree: Tree): Tree = itransform(this, tree)
 
     /** Transforms a list of trees. */
-    def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve (transform(_))
+    def transformTrees(trees: List[Tree]): List[Tree] =
+      if (trees.isEmpty) Nil else trees mapConserve transform
 
     /** Transforms a `Template`. */
     def transformTemplate(tree: Template): Template =
@@ -2907,7 +2566,8 @@ trait Trees { self: Universe =>
       trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
     /** Transforms a `ValDef`. */
     def transformValDef(tree: ValDef): ValDef =
-      if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
+      if (tree eq noSelfType) tree
+      else transform(tree).asInstanceOf[ValDef]
     /** Transforms a list of `ValDef` nodes. */
     def transformValDefs(trees: List[ValDef]): List[ValDef] =
       trees mapConserve (transformValDef(_))
@@ -2915,6 +2575,8 @@ trait Trees { self: Universe =>
     def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] =
       treess mapConserve (transformValDefs(_))
     /** Transforms a list of `CaseDef` nodes. */
+    def transformMemberDefs(trees: List[MemberDef]): List[MemberDef] =
+      trees mapConserve (tree => transform(tree).asInstanceOf[MemberDef])
     def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] =
       trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef])
     /** Transforms a list of `Ident` nodes. */
@@ -2926,8 +2588,10 @@ trait Trees { self: Universe =>
         if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
         else transform(stat)) filter (EmptyTree != _)
     /** Transforms `Modifiers`. */
-    def transformModifiers(mods: Modifiers): Modifiers =
-      mods.mapAnnotations(transformTrees)
+    def transformModifiers(mods: Modifiers): Modifiers = {
+      if (mods.annotations.isEmpty) mods
+      else mods mapAnnotations transformTrees
+    }
 
     /** Transforms a tree with a given owner symbol. */
     def atOwner[A](owner: Symbol)(trans: => A): A = {
@@ -2951,17 +2615,11 @@ trait Trees { self: Universe =>
    */
   protected def xtransform(transformer: Transformer, tree: Tree): Tree = throw new MatchError(tree)
 
-  /** The type of tree modifiers.
+  /** The type of tree modifiers (not a tree, but rather part of DefTrees).
    *  @group Traversal
    */
   type Modifiers >: Null <: AnyRef with ModifiersApi
 
-  /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Traversal
-   */
-  implicit val ModifiersTag: ClassTag[Modifiers]
-
   /** The API that all Modifiers support
    *  @group API
    */
@@ -2993,15 +2651,19 @@ trait Trees { self: Universe =>
   /** The constructor/extractor for `Modifiers` instances.
    *  @group Traversal
    */
-  val Modifiers: ModifiersCreator
+  val Modifiers: ModifiersExtractor
+
+  @deprecated("Use ModifiersExtractor instead", "2.11.0")
+  type ModifiersCreator = ModifiersExtractor
 
   /** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`.
    *  Modifiers encapsulate flags, visibility annotations and Scala annotations for member definitions.
    *  @group Traversal
    */
-  abstract class ModifiersCreator {
+  abstract class ModifiersExtractor {
     def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List())
     def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
+    def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])]
   }
 
   /** The factory for `Modifiers` instances.
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
index 24271cb..37fff90 100644
--- a/src/reflect/scala/reflect/api/TypeCreator.scala
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /** A mirror-aware factory for types.
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index 7457910..1dfc84b 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -134,7 +134,7 @@ import scala.language.implicitConversions
  * reflection APIs provided by Java (for classes) and Scala (for types).</li>
  *
  * <li>'''Certain manifest operations(i.e., <:<, >:> and typeArguments) are not
- * supported.''' <br/>Instead, one culd use the reflection APIs provided by Java (for
+ * supported.''' <br/>Instead, one could use the reflection APIs provided by Java (for
  * classes) and Scala (for types).</li>
  *</ul>
  *
@@ -333,6 +333,12 @@ trait TypeTags { self: Universe =>
    * @group TypeTags
    */
   def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+
+  /**
+   * Type symbol of `x` as derived from a type tag.
+   * @group TypeTags
+   */
+  def symbolOf[T: WeakTypeTag]: TypeSymbol
 }
 
 private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 72163ef..f6995dd 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -50,20 +51,15 @@ package api
  *
  *  @contentDiagram hideNodes "*Api"
  */
-trait Types { self: Universe =>
+trait Types {
+  self: Universe =>
 
   /** The type of Scala types, and also Scala type signatures.
    *  (No difference is internally made between the two).
    *  @template
    *  @group Types
    */
-  type Type >: Null <: TypeApi
-
-  /** A tag that preserves the identity of the `Type` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeTagg: ClassTag[Type]
+  type Type >: Null <: AnyRef with TypeApi
 
   /** This constant is used as a special value that indicates that no meaningful type exists.
    *  @group Types
@@ -80,6 +76,12 @@ trait Types { self: Universe =>
   /** The API of types.
    *  The main source of information about types is the [[scala.reflect.api.Types]] page.
    *  @group API
+   *
+   *  @define dealiasWidenWarning Note that type aliases can hide beneath
+   *  singleton types and singleton types can hide inside type aliases.
+   *  Moreover, aliases might lurk in the upper bounds of abstract types.
+   *  Therefore careful thought has to be applied to identify and carry out
+   *  unwrapping logic specific to your use case.
    */
   abstract class TypeApi {
     /** The term symbol associated with the type, or `NoSymbol` for types
@@ -92,11 +94,19 @@ trait Types { self: Universe =>
      */
     def typeSymbol: Symbol
 
+    /** @see [[decl]] */
+    @deprecated("Use `decl` instead", "2.11.0")
+    def declaration(name: Name): Symbol
+
     /** The defined or declared members with name `name` in this type;
      *  an OverloadedSymbol if several exist, NoSymbol if none exist.
      *  Alternatives of overloaded symbol appear in the order they are declared.
      */
-    def declaration(name: Name): Symbol
+    def decl(name: Name): Symbol
+
+    /** @see [[decls]] */
+    @deprecated("Use `decls` instead", "2.11.0")
+    def declarations: MemberScope
 
     /** A `Scope` containing directly declared members of this type.
      *  Unlike `members` this method doesn't returns inherited members.
@@ -104,7 +114,7 @@ trait Types { self: Universe =>
      *  Members in the returned scope might appear in arbitrary order.
      *  Use `declarations.sorted` to get an ordered list of members.
      */
-    def declarations: MemberScope
+    def decls: MemberScope
 
     /** The member with given name, either directly declared or inherited,
      *  an OverloadedSymbol if several exist, NoSymbol if none exist.
@@ -119,6 +129,11 @@ trait Types { self: Universe =>
      */
     def members: MemberScope
 
+    /** Type signature of the companion of the underlying class symbol.
+     *  NoType if the underlying symbol is not a class symbol, or if it doesn't have a companion.
+     */
+    def companion: Type
+
     /** Is this type a type constructor that is missing its type arguments?
      */
     def takesTypeArgs: Boolean
@@ -127,7 +142,7 @@ trait Types { self: Universe =>
      */
     def typeConstructor: Type
 
-    /**
+    /** Reduce to beta eta-long normal form.
      *  Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
      *  Functions on types are also implemented as PolyTypes.
      *
@@ -135,8 +150,18 @@ trait Types { self: Universe =>
      *    TypeRef(pre, <List>, List()) is replaced by
      *    PolyType(X, TypeRef(pre, <List>, List(X)))
      */
+    @deprecated("Use `dealias` or `etaExpand` instead", "2.11.0")
     def normalize: Type
 
+    /** Converts higher-kinded TypeRefs to PolyTypes.
+     *  Functions on types are also implemented as PolyTypes.
+     *
+     *  Example: (in the below, <List> is the type constructor of List)
+     *    TypeRef(pre, <List>, List()) is replaced by
+     *    PolyType(X, TypeRef(pre, <List>, List(X)))
+     */
+    def etaExpand: Type
+
     /** Does this type conform to given type argument `that`? */
     def <:< (that: Type): Boolean
 
@@ -209,11 +234,132 @@ trait Types { self: Universe =>
      *  class Outer { class C ; val x: C }
      *  val o: Outer
      *  <o.x.type>.widen = o.C
+     *
+     *  $dealiasWidenWarning
      */
     def widen: Type
 
+    /** Expands type aliases arising from type members.
+     *  $dealiasWidenWarning
+     */
+    def dealias: Type
+
+    /******* popular methods from subclasses *******/
+
+    /** List of type arguments ingrained in this type reference.
+     *  Depending on your use case you might or might not want to call `dealias` first.
+     *
+     *  {{{
+     *  scala> type T = List[Int]
+     *  defined type alias T
+     *
+     *  scala> typeOf[T].typeArgs
+     *  res0: List[reflect.runtime.universe.Type] = List()
+     *
+     *  scala> typeOf[T].dealias.typeArgs
+     *  res1: List[reflect.runtime.universe.Type] = List(scala.Int)
+     *  }}}
+     */
+    def typeArgs: List[Type]
+
+    /** @see [[paramLists]] */
+    @deprecated("Use `paramLists` instead", "2.11.0")
+    def paramss: List[List[Symbol]]
+
+    /** For a method or poly type, a list of its value parameter sections,
+     *  the empty list of lists for all other types.
+     */
+    def paramLists: List[List[Symbol]]
+
+    /** For a poly type, its type parameters,
+     *  the empty list for all other types.
+     */
+    def typeParams: List[Symbol]
+
+    /** For a (nullary) method or poly type, its direct result type
+     *  (can be a MethodType if the method has multiple argument lists),
+     *  the type itself for all other types.
+     *
+     *  {{{
+     *  scala> class C { def foo[T](x: T)(y: T) = ??? }
+     *  defined class C
+     *
+     *  scala> typeOf[C].member(TermName("foo")).asMethod
+     *  res0: reflect.runtime.universe.MethodSymbol = method foo
+     *
+     *  scala> res0.info // PolyType wrapping a MethodType
+     *  res1: reflect.runtime.universe.Type = [T](x: T)(y: T)scala.Nothing
+     *
+     *  scala> res1.resultType // MethodType wrapping a MethodType
+     *  res2: reflect.runtime.universe.Type = (x: T)(y: T)scala.Nothing
+     *
+     *  scala> res1.resultType.resultType // vanilla MethodType
+     *  res3: reflect.runtime.universe.Type = (y: T)scala.Nothing
+     *
+     *  scala> res1.resultType.resultType.resultType
+     *  res4: reflect.runtime.universe.Type = scala.Nothing
+     *
+     *  scala> res1.finalResultType
+     *  res5: reflect.runtime.universe.Type = scala.Nothing
+     *  }}}
+     *
+     *  @see finalResultType
+     */
+    def resultType: Type
+
+    /** For a curried/nullary method or poly type its non-method result type,
+     *  the type itself for all other types.
+     *
+     *  {{{
+     *  scala> class C {
+     *       | def foo[T](x: T)(y: T) = ???
+     *       | def bar: Int = ???
+     *       | }
+     *  defined class C
+     *
+     *  scala> typeOf[C].member(TermName("foo")).asMethod
+     *  res0: reflect.runtime.universe.MethodSymbol = method foo
+     *
+     *  scala> res0.info // PolyType wrapping a MethodType
+     *  res1: reflect.runtime.universe.Type = [T](x: T)(y: T)scala.Nothing
+     *
+     *  scala> res1.resultType // MethodType wrapping a MethodType
+     *  res2: reflect.runtime.universe.Type = (x: T)(y: T)scala.Nothing
+     *
+     *  scala> res1.resultType.resultType // vanilla MethodType
+     *  res3: reflect.runtime.universe.Type = (y: T)scala.Nothing
+     *
+     *  scala> res1.resultType.resultType.resultType
+     *  res4: reflect.runtime.universe.Type = scala.Nothing
+     *
+     *  scala> res1.finalResultType
+     *  res5: reflect.runtime.universe.Type = scala.Nothing
+     *
+     *  scala> typeOf[C].member(TermName("bar")).asMethod
+     *  res6: reflect.runtime.universe.MethodSymbol = method bar
+     *
+     *  scala> res6.info
+     *  res7: reflect.runtime.universe.Type = => scala.Int
+     *
+     *  scala> res6.info.resultType
+     *  res8: reflect.runtime.universe.Type = scala.Int
+     *
+     *  scala> res6.info.finalResultType
+     *  res9: reflect.runtime.universe.Type = scala.Int
+     *  }}}
+     *
+     *  @see resultType
+     */
+    def finalResultType: Type
+
     /******************* helpers *******************/
 
+    /** Provides an alternate if type is NoType.
+     *
+     *  @group Helpers
+     */
+    def orElse(alt: => Type): Type
+
     /** Substitute symbols in `to` for corresponding occurrences of references to
      *  symbols `from` in this type.
      */
@@ -254,13 +400,12 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type SingletonType >: Null <: Type
+  type SingletonType >: Null <: SingletonTypeApi with Type
 
-  /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** Has no special methods. Is here to provides erased identity for `SingletonType`.
+   *  @group API
    */
-  implicit val SingletonTypeTag: ClassTag[SingletonType]
+  trait SingletonTypeApi
 
   /** A singleton type that describes types of the form on the left with the
    *  corresponding `ThisType` representation to the right:
@@ -270,13 +415,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi
-
-  /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ThisTypeTag: ClassTag[ThisType]
+  type ThisType >: Null <: ThisTypeApi with SingletonType
 
   /** The constructor/extractor for `ThisType` instances.
    *  @group Extractors
@@ -288,11 +427,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class ThisTypeExtractor {
-    /**
-     * Creates a ThisType from the given class symbol.
-     */
-    def apply(sym: Symbol): Type
     def unapply(tpe: ThisType): Option[Symbol]
+
+    /** @see [[InternalApi.thisType]] */
+    @deprecated("Use `internal.thisType` instead", "2.11.0")
+    def apply(sym: Symbol)(implicit token: CompatToken): Type = internal.thisType(sym)
   }
 
   /** The API that all this types support.
@@ -314,13 +453,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi
-
-  /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SingleTypeTag: ClassTag[SingleType]
+  type SingleType >: Null <: SingleTypeApi with SingletonType
 
   /** The constructor/extractor for `SingleType` instances.
    *  @group Extractors
@@ -333,8 +466,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class SingleTypeExtractor {
-    def apply(pre: Type, sym: Symbol): Type // not SingleTypebecause of implementation details
     def unapply(tpe: SingleType): Option[(Type, Symbol)]
+
+    /** @see [[InternalApi.singleType]] */
+    @deprecated("Use `ClassSymbol.thisPrefix` or `internal.singleType` instead")
+    def apply(pre: Type, sym: Symbol)(implicit token: CompatToken): Type = internal.singleType(pre, sym)
   }
 
   /** The API that all single types support.
@@ -359,13 +495,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi
-
-  /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val SuperTypeTag: ClassTag[SuperType]
+  type SuperType >: Null <: SuperTypeApi with SingletonType
 
   /** The constructor/extractor for `SuperType` instances.
    *  @group Extractors
@@ -376,8 +506,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class SuperTypeExtractor {
-    def apply(thistpe: Type, supertpe: Type): Type // not SuperTypebecause of implementation details
     def unapply(tpe: SuperType): Option[(Type, Type)]
+
+    /** @see [[InternalApi.superType]] */
+    @deprecated("Use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0")
+    def apply(thistpe: Type, supertpe: Type)(implicit token: CompatToken): Type = internal.superType(thistpe, supertpe)
   }
 
   /** The API that all super types support.
@@ -404,13 +537,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi
-
-  /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ConstantTypeTag: ClassTag[ConstantType]
+  type ConstantType >: Null <: ConstantTypeApi with SingletonType
 
   /** The constructor/extractor for `ConstantType` instances.
    *  @group Extractors
@@ -422,8 +549,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class ConstantTypeExtractor {
-    def apply(value: Constant): ConstantType
     def unapply(tpe: ConstantType): Option[Constant]
+
+    /** @see [[InternalApi.constantType]] */
+    @deprecated("Use `value.tpe` or `internal.constantType` instead", "2.11.0")
+    def apply(value: Constant)(implicit token: CompatToken): ConstantType = internal.constantType(value)
   }
 
   /** The API that all constant types support.
@@ -448,13 +578,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type TypeRef >: Null <: AnyRef with Type with TypeRefApi
-
-  /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeRefTag: ClassTag[TypeRef]
+  type TypeRef >: Null <: TypeRefApi with Type
 
   /** The constructor/extractor for `TypeRef` instances.
    *  @group Extractors
@@ -468,8 +592,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class TypeRefExtractor {
-    def apply(pre: Type, sym: Symbol, args: List[Type]): Type // not TypeRefbecause of implementation details
     def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
+
+    /** @see [[InternalApi.typeRef]] */
+    @deprecated("Use `internal.typeRef` instead", "2.11.0")
+    def apply(pre: Type, sym: Symbol, args: List[Type])(implicit token: CompatToken): Type = internal.typeRef(pre, sym, args)
   }
 
   /** The API that all type refs support.
@@ -495,13 +622,12 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type CompoundType >: Null <: AnyRef with Type
+  type CompoundType >: Null <: CompoundTypeApi with Type
 
-  /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
+  /** Has no special methods. Is here to provides erased identity for `CompoundType`.
+   *  @group API
    */
-  implicit val CompoundTypeTag: ClassTag[CompoundType]
+  trait CompoundTypeApi
 
   /** The `RefinedType` type defines types of any of the forms on the left,
    *  with their RefinedType representations to the right.
@@ -513,13 +639,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi
-
-  /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val RefinedTypeTag: ClassTag[RefinedType]
+  type RefinedType >: Null <: RefinedTypeApi with CompoundType
 
   /** The constructor/extractor for `RefinedType` instances.
    *  @group Extractors
@@ -532,13 +652,15 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class RefinedTypeExtractor {
-    def apply(parents: List[Type], decls: Scope): RefinedType
-
-    /** An alternative constructor that passes in the synthetic classs symbol
-     *  that backs the refined type. (Normally, a fresh class symbol is created automatically).
-     */
-    def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
     def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
+
+    /** @see [[InternalApi.refinedType]] */
+    @deprecated("Use `internal.refinedType` instead", "2.11.0")
+    def apply(parents: List[Type], decls: Scope)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls)
+
+    /** @see [[InternalApi.refinedType]] */
+    @deprecated("Use `internal.refinedType` instead", "2.11.0")
+    def apply(parents: List[Type], decls: Scope, clazz: Symbol)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls, clazz)
   }
 
   /** The API that all refined types support.
@@ -550,7 +672,7 @@ trait Types { self: Universe =>
     def parents: List[Type]
 
     /** The scope that holds the definitions comprising the type. */
-    def decls: Scope
+    def decls: MemberScope
   }
 
   /** The `ClassInfo` type signature is used to define parents and declarations
@@ -565,13 +687,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi
-
-  /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+  type ClassInfoType >: Null <: ClassInfoTypeApi with CompoundType
 
   /** The constructor/extractor for `ClassInfoType` instances.
    *  @group Extractors
@@ -585,8 +701,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class ClassInfoTypeExtractor {
-    def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
     def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
+
+    /** @see [[InternalApi.classInfoType]] */
+    @deprecated("Use `internal.classInfoType` instead", "2.11.0")
+    def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol)(implicit token: CompatToken): ClassInfoType = internal.classInfoType(parents, decls, typeSymbol)
   }
 
   /** The API that all class info types support.
@@ -598,7 +717,7 @@ trait Types { self: Universe =>
     def parents: List[Type]
 
     /** The scope that holds the definitions comprising the class type. */
-    def decls: Scope
+    def decls: MemberScope
 
     /** The symbol underlying the class type. */
     def typeSymbol: Symbol
@@ -608,13 +727,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type MethodType >: Null <: AnyRef with Type with MethodTypeApi
-
-  /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val MethodTypeTag: ClassTag[MethodType]
+  type MethodType >: Null <: MethodTypeApi with Type
 
   /** The constructor/extractor for `MethodType` instances.
    *  @group Extractors
@@ -637,8 +750,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class MethodTypeExtractor {
-    def apply(params: List[Symbol], resultType: Type): MethodType
     def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
+
+    /** @see [[InternalApi.methodType]] */
+    @deprecated("Use `internal.methodType` instead", "2.11.0")
+    def apply(params: List[Symbol], resultType: Type)(implicit token: CompatToken): MethodType = internal.methodType(params, resultType)
   }
 
   /** The API that all method types support.
@@ -658,13 +774,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi
-
-  /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+  type NullaryMethodType >: Null <: NullaryMethodTypeApi with Type
 
   /** The constructor/extractor for `NullaryMethodType` instances.
    *  @group Extractors
@@ -676,8 +786,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class NullaryMethodTypeExtractor {
-    def apply(resultType: Type): NullaryMethodType
     def unapply(tpe: NullaryMethodType): Option[(Type)]
+
+    /** @see [[InternalApi.nullaryMethodType]] */
+    @deprecated("Use `internal.nullaryMethodType` instead", "2.11.0")
+    def apply(resultType: Type)(implicit token: CompatToken): NullaryMethodType = internal.nullaryMethodType(resultType)
   }
 
   /** The API that all nullary method types support.
@@ -694,13 +807,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type PolyType >: Null <: AnyRef with Type with PolyTypeApi
-
-  /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val PolyTypeTag: ClassTag[PolyType]
+  type PolyType >: Null <: PolyTypeApi with Type
 
   /** The constructor/extractor for `PolyType` instances.
    *  @group Extractors
@@ -713,8 +820,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class PolyTypeExtractor {
-    def apply(typeParams: List[Symbol], resultType: Type): PolyType
     def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
+
+    /** @see [[InternalApi.polyType]] */
+    @deprecated("Use `internal.polyType` instead", "2.11.0")
+    def apply(typeParams: List[Symbol], resultType: Type)(implicit token: CompatToken): PolyType = internal.polyType(typeParams, resultType)
   }
 
   /** The API that all polymorphic types support.
@@ -734,13 +844,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi
-
-  /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+  type ExistentialType >: Null <: ExistentialTypeApi with Type
 
   /** The constructor/extractor for `ExistentialType` instances.
    *  @group Extractors
@@ -754,8 +858,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class ExistentialTypeExtractor {
-    def apply(quantified: List[Symbol], underlying: Type): ExistentialType
     def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
+
+    /** @see [[InternalApi.existentialType]] */
+    @deprecated("Use `internal.existentialType` instead", "2.11.0")
+    def apply(quantified: List[Symbol], underlying: Type)(implicit token: CompatToken): ExistentialType = internal.existentialType(quantified, underlying)
   }
 
   /** The API that all existential types support.
@@ -775,13 +882,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi
-
-  /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+  type AnnotatedType >: Null <: AnnotatedTypeApi with Type
 
   /** The constructor/extractor for `AnnotatedType` instances.
    *  @group Extractors
@@ -789,14 +890,17 @@ trait Types { self: Universe =>
   val AnnotatedType: AnnotatedTypeExtractor
 
   /** An extractor class to create and pattern match with syntax
-   * `AnnotatedType(annotations, underlying, selfsym)`.
+   * `AnnotatedType(annotations, underlying)`.
    *  Here, `annotations` are the annotations decorating the underlying type `underlying`.
    *  `selfSym` is a symbol representing the annotated type itself.
    *  @group Extractors
    */
   abstract class AnnotatedTypeExtractor {
-    def apply(annotations: List[Annotation], underlying: Type, selfsym: Symbol): AnnotatedType
-    def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type, Symbol)]
+    def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type)]
+
+    /** @see [[InternalApi.annotatedType]] */
+    @deprecated("Use `internal.annotatedType` instead", "2.11.0")
+    def apply(annotations: List[Annotation], underlying: Type)(implicit token: CompatToken): AnnotatedType = internal.annotatedType(annotations, underlying)
   }
 
   /** The API that all annotated types support.
@@ -809,9 +913,6 @@ trait Types { self: Universe =>
 
     /** The annotee. */
     def underlying: Type
-
-    /** A symbol that represents the annotated type itself. */
-    def selfsym: Symbol
   }
 
   /** The `TypeBounds` type signature is used to indicate lower and upper type bounds
@@ -826,13 +927,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi
-
-  /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val TypeBoundsTag: ClassTag[TypeBounds]
+  type TypeBounds >: Null <: TypeBoundsApi with Type
 
   /** The constructor/extractor for `TypeBounds` instances.
    *  @group Extractors
@@ -845,8 +940,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class TypeBoundsExtractor {
-    def apply(lo: Type, hi: Type): TypeBounds
     def unapply(tpe: TypeBounds): Option[(Type, Type)]
+
+    /** @see [[InternalApi.typeBounds]] */
+    @deprecated("Use `internal.typeBounds` instead", "2.11.0")
+    def apply(lo: Type, hi: Type)(implicit token: CompatToken): TypeBounds = internal.typeBounds(lo, hi)
   }
 
   /** The API that all type bounds support.
@@ -883,13 +981,7 @@ trait Types { self: Universe =>
    *  @template
    *  @group Types
    */
-  type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi
-
-  /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
-   *  Can be used for pattern matching, instance tests, serialization and likes.
-   *  @group Tags
-   */
-  implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+  type BoundedWildcardType >: Null <: BoundedWildcardTypeApi with Type
 
   /** The constructor/extractor for `BoundedWildcardType` instances.
    *  @group Extractors
@@ -901,8 +993,11 @@ trait Types { self: Universe =>
    *  @group Extractors
    */
   abstract class BoundedWildcardTypeExtractor {
-    def apply(bounds: TypeBounds): BoundedWildcardType
     def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
+
+    /** @see [[InternalApi.boundedWildcardType]] */
+    @deprecated("Use `internal.boundedWildcardType` instead", "2.11.0")
+    def apply(bounds: TypeBounds)(implicit token: CompatToken): BoundedWildcardType = internal.boundedWildcardType(bounds)
   }
 
   /** The API that all this types support.
@@ -924,74 +1019,17 @@ trait Types { self: Universe =>
    */
   def glb(ts: List[Type]): Type
 
-  // Creators ---------------------------------------------------------------
-  // too useful and too non-trivial to be left out of public API
-
-  /** The canonical creator for single-types
-   *  @group TypeCreators
-   */
-  def singleType(pre: Type, sym: Symbol): Type
-
-  /** the canonical creator for a refined type with a given scope
-   *  @group TypeCreators
-   */
-  def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type
-
-  /** The canonical creator for a refined type with an initially empty scope.
-   *  @group TypeCreators
-   */
-  def refinedType(parents: List[Type], owner: Symbol): Type
-
-  /** The canonical creator for typerefs
-   *  @group TypeCreators
-   */
-  def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type
-
-  /** A creator for intersection type where intersections of a single type are
-   *  replaced by the type itself.
-   *  @group TypeCreators
-   */
-  def intersectionType(tps: List[Type]): Type
-
-  /** A creator for intersection type where intersections of a single type are
-   *  replaced by the type itself, and repeated parent classes are merged.
-   *
-   *  !!! Repeated parent classes are not merged - is this a bug in the
-   *  comment or in the code?
-   *  @group TypeCreators
-   */
-  def intersectionType(tps: List[Type], owner: Symbol): Type
-
   /** A creator for type applications
-   *  @group Types
+   *  @group TypeOps
    */
   def appliedType(tycon: Type, args: List[Type]): Type
 
-  /** A creator for type parameterizations that strips empty type parameter lists.
-   *  Use this factory method to indicate the type has kind * (it's a polymorphic value)
-   *  until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
-   *  @group Types
-   */
-  def polyType(tparams: List[Symbol], tpe: Type): Type
+  /** @see [[appliedType]] */
+  def appliedType(tycon: Type, args: Type*): Type
 
-  /** A creator for existential types. This generates:
-   *
-   *  {{{
-   *    tpe1 where { tparams }
-   *  }}}
-   *
-   *  where `tpe1` is the result of extrapolating `tpe` with regard to `tparams`.
-   *  Extrapolating means that type variables in `tparams` occurring
-   *  in covariant positions are replaced by upper bounds, (minus any
-   *  SingletonClass markers), type variables in `tparams` occurring in
-   *  contravariant positions are replaced by upper bounds, provided the
-   *  resulting type is legal with regard to stability, and does not contain
-   *  any type variable in `tparams`.
-   *
-   *  The abstraction drops all type parameters that are not directly or
-   *  indirectly referenced by type `tpe1`. If there are no remaining type
-   *  parameters, simply returns result type `tpe`.
-   *  @group TypeCreators
-   */
-  def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type
+  /** @see [[appliedType]] */
+  def appliedType(sym: Symbol, args: List[Type]): Type
+
+  /** @see [[appliedType]] */
+  def appliedType(sym: Symbol, args: Type*): Type
 }
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 15fa11c..a3d1d29 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package api
 
 /**
@@ -40,7 +41,8 @@ package api
  *   res1: reflect.runtime.universe.Type = scala.Either[String,Int]
  *   }}}
  *
- * To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.Context#universe]]. For example:
+ * To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.blackbox.Context#universe]].
+ * or [[scala.reflect.macros.whitebox.Context#universe]]. For example:
  * {{{
  *  def printf(format: String, params: Any*): Unit = macro impl
  *  def impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = {
@@ -67,13 +69,15 @@ abstract class Universe extends Symbols
                            with Positions
                            with Exprs
                            with TypeTags
-                           with TagInterop
+                           with ImplicitTags
                            with StandardDefinitions
                            with StandardNames
-                           with BuildUtils
+                           with StandardLiftables
                            with Mirrors
                            with Printers
-                           with Importers
+                           with Liftables
+                           with Quasiquotes
+                           with Internals
 {
   /** Use `reify` to produce the abstract syntax tree representing a given Scala expression.
    *
@@ -92,5 +96,5 @@ abstract class Universe extends Symbols
    */
   // implementation is hardwired to `scala.reflect.reify.Taggers`
   // using the mechanism implemented in `scala.tools.reflect.FastTrack`
-  def reify[T](expr: T): Expr[T] = ??? // macro
+  def reify[T](expr: T): Expr[T] = macro ???
 }
diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala
index dbda84d..a8f409e 100644
--- a/src/reflect/scala/reflect/api/package.scala
+++ b/src/reflect/scala/reflect/api/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 
 import scala.reflect.api.{Universe => ApiUniverse}
 
@@ -42,6 +43,6 @@ package object api {
   // implementation is hardwired into `scala.reflect.reify.Taggers`
   // using the mechanism implemented in `scala.tools.reflect.FastTrack`
   // todo. once we have implicit macros for tag generation, we can remove these anchors
-  private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = ??? // macro
-  private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = ??? // macro
+  private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = macro ???
+  private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = macro ???
 }
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 1ab975b..74310e1 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 /** Additions to the type checker that can be added at
@@ -52,7 +53,7 @@ trait AnnotationCheckers {
      * given type tp, taking into account the given mode (see method adapt in trait Typers).
      */
     @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
-    def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
+    def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false
 
     /**
      * Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -62,7 +63,7 @@ trait AnnotationCheckers {
      * class cannot do the adaptiong, it should return the tree unchanged.
      */
     @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
-    def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
+    def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree
 
     /**
      * Adapt the type of a return expression. The decision of a typer plugin whether the type
@@ -126,13 +127,13 @@ trait AnnotationCheckers {
     else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
       if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
 
-  def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+  def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean =
     if (annotationCheckers.isEmpty) false
     else annotationCheckers.exists(checker => {
       checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
     })
 
-  def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+  def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree =
     if (annotationCheckers.isEmpty) tree
     else annotationCheckers.foldLeft(tree)((tree, checker) =>
       if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 032b453..19e9eef 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -3,17 +3,19 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
-import util._
 import pickling.ByteCodecs
 import scala.annotation.tailrec
 import scala.collection.immutable.ListMap
+import scala.language.postfixOps
 
 /** AnnotationInfo and its helpers */
 trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
-  import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
+  import definitions._
+  import treeInfo._
 
   // Common annotation code between Symbol and Type.
   // For methods altering the annotation list, on Symbol it mutates
@@ -27,6 +29,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
     def filterAnnotations(p: AnnotationInfo => Boolean): Self // Retain only annotations meeting the condition.
     def withoutAnnotations: Self                              // Remove all annotations from this type.
 
+    def staticAnnotations = annotations filter (_.isStatic)
+
     /** Symbols of any @throws annotations on this symbol.
      */
     def throwsAnnotations(): List[Symbol] = annotations collect {
@@ -40,14 +44,13 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
         // monomorphic one by introducing existentials, see SI-7009 for details
         existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
       }
-      val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
-      withAnnotations(List(throwsAnn))
+      this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
     }
 
     /** Tests for, get, or remove an annotation */
     def hasAnnotation(cls: Symbol): Boolean =
       //OPT inlined from exists to save on #closures; was:  annotations exists (_ matches cls)
-      dropOtherAnnotations(annotations, cls).nonEmpty
+      dropOtherAnnotations(annotations, cls) ne Nil
 
     def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
       //OPT inlined from exists to save on #closures; was:  annotations find (_ matches cls)
@@ -74,7 +77,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
    *  - arrays of constants
    *  - or nested classfile annotations
    */
-  abstract class ClassfileAnnotArg extends Product
+  sealed abstract class ClassfileAnnotArg extends Product with JavaArgumentApi
   implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
   case object UnmappableAnnotArg extends ClassfileAnnotArg
 
@@ -122,25 +125,32 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
    *  must be `String`. This specialised class is used to encode Scala
    *  signatures for reasons of efficiency, both in term of class-file size
    *  and in term of compiler performance.
+   *  Details about the storage format of pickles at the bytecode level (classfile annotations) can be found in SIP-10.
    */
   case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
     override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
-    lazy val encodedBytes = ByteCodecs.encode(bytes)    // TODO remove after migration to ASM-based GenJVM complete
-    def isLong: Boolean = (encodedBytes.length > 65535) // TODO remove after migration to ASM-based GenJVM complete
     lazy val sevenBitsMayBeZero: Array[Byte] = {
       mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes))
     }
+
+    /* In order to store a byte array (the pickle) using a bytecode-level annotation,
+     * the most compact representation is used (which happens to be string-constant and not byte array as one would expect).
+     * However, a String constant in a classfile annotation is limited to a maximum of 65535 characters.
+     * Method `fitsInOneString` tells us whether the pickle can be held by a single classfile-annotation of string-type.
+     * Otherwise an array of strings will be used.
+     */
     def fitsInOneString: Boolean = {
+      // due to escaping, a zero byte in a classfile-annotation of string-type takes actually two characters.
       val numZeros = (sevenBitsMayBeZero count { b => b == 0 })
-      val res = (sevenBitsMayBeZero.length + numZeros) <= 65535
-      assert(this.isLong == !res, "As things stand, can't just swap in `fitsInOneString()` for `isLong()`")
-      res
+
+      (sevenBitsMayBeZero.length + numZeros) <= 65535
     }
+
     def sigAnnot: Type =
-      if (this.isLong)
-        definitions.ScalaLongSignatureAnnotation.tpe
-      else
+      if (fitsInOneString)
         definitions.ScalaSignatureAnnotation.tpe
+      else
+        definitions.ScalaLongSignatureAnnotation.tpe
 
     private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = {
       var i = 0
@@ -282,8 +292,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
      *  metaAnnotations = List(setter, field).
      */
     def metaAnnotations: List[AnnotationInfo] = atp match {
-      case AnnotatedType(metas, _, _) => metas
-      case _                          => Nil
+      case AnnotatedType(metas, _) => metas
+      case _                       => Nil
     }
 
     /** The default kind of members to which this annotation is attached.
@@ -304,10 +314,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
     /** Check whether any of the arguments mention a symbol */
     def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
 
-    /** Change all ident's with Symbol "from" to instead use symbol "to" */
-    def substIdentSyms(from: Symbol, to: Symbol) =
-      AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos
-
     def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
     def intArg(index: Int)    = constantAtIndex(index) map (_.intValue)
     def symbolArg(index: Int) = argAtIndex(index) collect {
@@ -340,15 +346,74 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
   }
   implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
 
+  protected[scala] def annotationToTree(ann: Annotation): Tree = {
+    def reverseEngineerArgs(): List[Tree] = {
+      def reverseEngineerArg(jarg: ClassfileAnnotArg): Tree = jarg match {
+        case LiteralAnnotArg(const) =>
+          val tpe = if (const.tag == UnitTag) UnitTpe else ConstantType(const)
+          Literal(const) setType tpe
+        case ArrayAnnotArg(jargs) =>
+          val args = jargs map reverseEngineerArg
+          // TODO: I think it would be a good idea to typecheck Java annotations using a more traditional algorithm
+          // sure, we can't typecheck them as is using the `new jann(foo = bar)` syntax (because jann is going to be an @interface)
+          // however we can do better than `typedAnnotation` by desugaring the aforementioned expression to
+          // something like `new jann() { override def annotatedType() = ...; override def foo = bar }`
+          // and then using the results of that typecheck to produce a Java-compatible classfile entry
+          // in that case we're going to have correctly typed Array.apply calls, however that's 2.12 territory
+          // and for 2.11 exposing an untyped call to ArrayModule should suffice
+          Apply(Ident(ArrayModule), args.toList)
+        case NestedAnnotArg(ann: Annotation) =>
+          annotationToTree(ann)
+        case _ =>
+          EmptyTree
+      }
+      def reverseEngineerArgs(jargs: List[(Name, ClassfileAnnotArg)]): List[Tree] = jargs match {
+        case (name, jarg) :: rest => AssignOrNamedArg(Ident(name), reverseEngineerArg(jarg)) :: reverseEngineerArgs(rest)
+        case Nil => Nil
+      }
+      if (ann.javaArgs.isEmpty) ann.scalaArgs
+      else reverseEngineerArgs(ann.javaArgs.toList)
+    }
+
+    // TODO: at the moment, constructor selection is unattributed, because AnnotationInfos lack necessary information
+    // later on, in 2.12, for every annotation we could save an entire tree instead of just bits and pieces
+    // but for 2.11 the current situation will have to do
+    val ctorSelection = Select(New(TypeTree(ann.atp)), nme.CONSTRUCTOR)
+    Apply(ctorSelection, reverseEngineerArgs()) setType ann.atp
+  }
+
+  protected[scala] def treeToAnnotation(tree: Tree): Annotation = tree match {
+    case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
+      def encodeJavaArg(arg: Tree): ClassfileAnnotArg = arg match {
+        case Literal(const) => LiteralAnnotArg(const)
+        case Apply(ArrayModule, args) => ArrayAnnotArg(args map encodeJavaArg toArray)
+        case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => NestedAnnotArg(treeToAnnotation(arg))
+        case _ => throw new Exception("unexpected java argument shape $arg: literals, arrays and nested annotations are supported")
+      }
+      def encodeJavaArgs(args: List[Tree]): List[(Name, ClassfileAnnotArg)] = args match {
+        case AssignOrNamedArg(Ident(name), arg) :: rest => (name, encodeJavaArg(arg)) :: encodeJavaArgs(rest)
+        case arg :: rest => throw new Exception("unexpected java argument shape $arg: only AssignOrNamedArg trees are supported")
+        case Nil => Nil
+      }
+      val atp = tpt.tpe
+      if (atp != null && (atp.typeSymbol isNonBottomSubClass StaticAnnotationClass)) AnnotationInfo(atp, args, Nil)
+      else if (atp != null && (atp.typeSymbol isNonBottomSubClass ClassfileAnnotationClass)) AnnotationInfo(atp, Nil, encodeJavaArgs(args))
+      else throw new Exception(s"unexpected annotation type $atp: only subclasses of StaticAnnotation and ClassfileAnnotation are supported")
+    case _ =>
+      throw new Exception("""unexpected tree shape: only q"new $annType(..$args)" is supported""")
+  }
+
   object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
 
+  object ErroneousAnnotation extends CompleteAnnotationInfo(ErrorType, Nil, Nil)
+
   /** Extracts symbol of thrown exception from AnnotationInfo.
     *
     * Supports both “old-style” `@throws(classOf[Exception])`
     * as well as “new-stye” `@throws[Exception]("cause")` annotations.
     */
   object ThrownException {
-    def unapply(ann: AnnotationInfo): Option[Symbol] =
+    def unapply(ann: AnnotationInfo): Option[Symbol] = {
       ann match {
         case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
           None
@@ -356,8 +421,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
         case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) =>
           Some(tpe.typeSymbol)
         // new-style: @throws[Exception], @throws[Exception]("cause")
-        case AnnotationInfo(TypeRef(_, _, args), _, _) =>
-          Some(args.head.typeSymbol)
+        case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) =>
+          Some(arg.typeSymbol)
+        case AnnotationInfo(TypeRef(_, _, Nil), _, _) =>
+          Some(ThrowableClass)
       }
+    }
   }
 }
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 3c2b128..0ca8611 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -2,7 +2,8 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect
+package scala
+package reflect
 package internal
 
 // todo implement in terms of BitSet
@@ -37,7 +38,7 @@ trait BaseTypeSeqs {
    *  This is necessary because when run from reflection every base type sequence needs to have a
    *  SynchronizedBaseTypeSeq as mixin.
    */
-  class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
+  class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
   self =>
     if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount)
     if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
@@ -64,16 +65,15 @@ trait BaseTypeSeqs {
             //Console.println("compute closure of "+this+" => glb("+variants+")")
             pending += i
             try {
-              mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
-                case Some(tp0) =>
+              mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
+                case NoType => typeError("no common type instance of base types "+(variants mkString ", and ")+" exists.")
+                case tp0    =>
                   pending(i) = false
                   elems(i) = tp0
                   tp0
-                case None =>
-                  typeError(
-                    "no common type instance of base types "+(variants mkString ", and ")+" exists.")
               }
-            } catch {
+            }
+            catch {
               case CyclicInheritance =>
                 typeError(
                   "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
@@ -115,7 +115,7 @@ trait BaseTypeSeqs {
     def map(f: Type => Type): BaseTypeSeq = {
 	  // inlined `elems map f` for performance
       val len = length
-      var arr = new Array[Type](len)
+      val arr = new Array[Type](len)
       var i = 0
       while (i < len) {
         arr(i) = f(elems(i))
@@ -130,9 +130,9 @@ trait BaseTypeSeqs {
 
     lazy val maxDepth = maxDepthOfElems
 
-    protected def maxDepthOfElems: Int = {
-      var d = 0
-      for (i <- 1 until length) d = max(d, typeDepth(elems(i)))
+    protected def maxDepthOfElems: Depth = {
+      var d = Depth.Zero
+      1 until length foreach (i => d = d max typeDepth(elems(i)))
       d
     }
 
@@ -158,7 +158,7 @@ trait BaseTypeSeqs {
     val parents = tp.parents
 //    Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG
     val buf = new mutable.ListBuffer[Type]
-    buf += tsym.tpe
+    buf += tsym.tpe_*
     var btsSize = 1
     if (parents.nonEmpty) {
       val nparents = parents.length
@@ -166,9 +166,10 @@ trait BaseTypeSeqs {
       val index = new Array[Int](nparents)
       var i = 0
       for (p <- parents) {
+        val parentBts = p.dealias.baseTypeSeq // dealias need for SI-8046.
         pbtss(i) =
-          if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
-          else p.baseTypeSeq
+          if (parentBts eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
+          else parentBts
         index(i) = 0
         i += 1
       }
@@ -180,7 +181,7 @@ trait BaseTypeSeqs {
       def nextRawElem(i: Int): Type = {
         val j = index(i)
         val pbts = pbtss(i)
-        if (j < pbts.length) pbts.rawElem(j) else AnyClass.tpe
+        if (j < pbts.length) pbts.rawElem(j) else AnyTpe
       }
       var minSym: Symbol = NoSymbol
       while (minSym != AnyClass) {
@@ -193,15 +194,23 @@ trait BaseTypeSeqs {
           i += 1
         }
         var minTypes: List[Type] = List()
+        def alreadyInMinTypes(tp: Type): Boolean = {
+          @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match {
+            case Nil     => false
+            case x :: xs => (tp =:= x) || loop(xs)
+          }
+          loop(minTypes)
+        }
+
         i = 0
         while (i < nparents) {
           if (nextTypeSymbol(i) == minSym) {
             nextRawElem(i) match {
               case RefinedType(variants, decls) =>
                 for (tp <- variants)
-                  if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+                  if (!alreadyInMinTypes(tp)) minTypes ::= tp
               case tp =>
-                if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+                if (!alreadyInMinTypes(tp)) minTypes ::= tp
             }
             index(i) = index(i) + 1
           }
@@ -226,7 +235,7 @@ trait BaseTypeSeqs {
     override def map(g: Type => Type) = lateMap(g)
     override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x)))
     override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
-    override protected def maxDepthOfElems: Int = elems.map(x => typeDepth(f(x))).max
+    override protected def maxDepthOfElems: Depth = elems.map(x => typeDepth(f(x))).max
     override def toString = elems.mkString("MBTS(", ",", ")")
   }
 
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
deleted file mode 100644
index 9f41f03..0000000
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package scala.reflect
-package internal
-
-import Flags._
-
-trait BuildUtils { self: SymbolTable =>
-
-  class BuildImpl extends BuildApi {
-
-    def selectType(owner: Symbol, name: String): TypeSymbol =
-      select(owner, newTypeName(name)).asType
-
-    def selectTerm(owner: Symbol, name: String): TermSymbol = {
-      val result = select(owner, newTermName(name)).asTerm
-      if (result.isOverloaded) result.suchThat(!_.isMethod).asTerm
-      else result
-    }
-
-    private def select(owner: Symbol, name: Name): Symbol = {
-      val result = owner.info decl name
-      if (result ne NoSymbol) result
-      else
-        mirrorThatLoaded(owner).missingHook(owner, name) orElse
-        MissingRequirementError.notFound("%s %s in %s".format(if (name.isTermName) "term" else "type", name, owner.fullName))
-    }
-
-    def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
-      val result = owner.info.decl(newTermName(name)).alternatives(index)
-      if (result ne NoSymbol) result.asMethod
-      else MissingRequirementError.notFound("overloaded method %s #%d in %s".format(name, index, owner.fullName))
-    }
-
-    def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
-      newFreeTermSymbol(newTermName(name), value, flags, origin)
-
-    def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
-      newFreeTypeSymbol(newTypeName(name), flags, origin)
-
-    def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
-      owner.newNestedSymbol(name, pos, flags, isClass)
-
-    def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
-      sym.setAnnotations(annots)
-
-    def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S =
-      sym.setTypeSignature(tpe)
-
-    def flagsFromBits(bits: Long): FlagSet = bits
-
-    def emptyValDef: ValDef = self.emptyValDef
-
-    def This(sym: Symbol): Tree = self.This(sym)
-
-    def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
-
-    def Ident(sym: Symbol): Ident = self.Ident(sym)
-
-    def TypeTree(tp: Type): TypeTree = self.TypeTree(tp)
-
-    def thisPrefix(sym: Symbol): Type = sym.thisPrefix
-
-    def setType[T <: Tree](tree: T, tpe: Type): T = { tree.setType(tpe); tree }
-
-    def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
-  }
-
-  val build: BuildApi = new BuildImpl
-}
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
index a3d2a8b..ef9646b 100644
--- a/src/reflect/scala/reflect/internal/CapturedVariables.scala
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import Flags._
@@ -19,7 +20,7 @@ trait CapturedVariables { self: SymbolTable =>
   /** Convert type of a captured variable to *Ref type.
    */
   def capturedVariableType(vble: Symbol): Type =
-    capturedVariableType(vble, NoType, false)
+    capturedVariableType(vble, NoType, erasedTypes = false)
 
   /** Convert type of a captured variable to *Ref type.
    */
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 2d07092..74413fd 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -2,7 +2,8 @@
  * Copyright 2006-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.annotation.{ tailrec, switch }
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index eb70ff3..e0a6757 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -3,13 +3,13 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.annotation.switch
 
 object ClassfileConstants {
-
   final val JAVA_MAGIC = 0xCAFEBABE
   final val JAVA_MAJOR_VERSION = 45
   final val JAVA_MINOR_VERSION = 3
@@ -340,7 +340,7 @@ object ClassfileConstants {
       case JAVA_ACC_PRIVATE    => PRIVATE
       case JAVA_ACC_PROTECTED  => PROTECTED
       case JAVA_ACC_FINAL      => FINAL
-      case JAVA_ACC_SYNTHETIC  => SYNTHETIC
+      case JAVA_ACC_SYNTHETIC  => SYNTHETIC | ARTIFACT  // maybe should be just artifact?
       case JAVA_ACC_STATIC     => STATIC
       case JAVA_ACC_ABSTRACT   => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
       case JAVA_ACC_INTERFACE  => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
@@ -349,7 +349,7 @@ object ClassfileConstants {
     private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
       def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
       var res: Long = JAVA | baseFlags
-      /** fast, elegant, maintainable, pick any two... */
+      /* fast, elegant, maintainable, pick any two... */
       res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
       res |= translateFlag0(jflags & JAVA_ACC_PROTECTED)
       res |= translateFlag0(jflags & JAVA_ACC_FINAL)
@@ -367,7 +367,7 @@ object ClassfileConstants {
       translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
     }
     def methodFlags(jflags: Int): Long = {
-      translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0, isAnnotation(jflags), isClass = false)
+      translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isAnnotation(jflags), isClass = false)
     }
   }
   object FlagTranslation extends FlagTranslation { }
@@ -375,11 +375,4 @@ object ClassfileConstants {
   def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
   def toScalaClassFlags(flags: Int): Long  = FlagTranslation classFlags flags
   def toScalaFieldFlags(flags: Int): Long  = FlagTranslation fieldFlags flags
-
-  @deprecated("Use another method in this object", "2.10.0")
-  def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
-    if (isClass) toScalaClassFlags(flags)
-    else if (isField) toScalaFieldFlags(flags)
-    else toScalaMethodFlags(flags)
-  )
 }
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 28bc3e1..85d0efd 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import java.lang.Integer.toOctalString
@@ -62,17 +63,17 @@ trait Constants extends api.Constants {
     def isAnyVal              = UnitTag <= tag && tag <= DoubleTag
 
     def tpe: Type = tag match {
-      case UnitTag    => UnitClass.tpe
-      case BooleanTag => BooleanClass.tpe
-      case ByteTag    => ByteClass.tpe
-      case ShortTag   => ShortClass.tpe
-      case CharTag    => CharClass.tpe
-      case IntTag     => IntClass.tpe
-      case LongTag    => LongClass.tpe
-      case FloatTag   => FloatClass.tpe
-      case DoubleTag  => DoubleClass.tpe
-      case StringTag  => StringClass.tpe
-      case NullTag    => NullClass.tpe
+      case UnitTag    => UnitTpe
+      case BooleanTag => BooleanTpe
+      case ByteTag    => ByteTpe
+      case ShortTag   => ShortTpe
+      case CharTag    => CharTpe
+      case IntTag     => IntTpe
+      case LongTag    => LongTpe
+      case FloatTag   => FloatTpe
+      case DoubleTag  => DoubleTpe
+      case StringTag  => StringTpe
+      case NullTag    => NullTpe
       case ClazzTag   => ClassType(typeValue)
       case EnumTag    => EnumType(symbolValue)
     }
@@ -94,7 +95,7 @@ trait Constants extends api.Constants {
 
     def booleanValue: Boolean =
       if (tag == BooleanTag) value.asInstanceOf[Boolean]
-      else throw new Error("value " + value + " is not a boolean");
+      else throw new Error("value " + value + " is not a boolean")
 
     def byteValue: Byte = tag match {
       case ByteTag   => value.asInstanceOf[Byte]
@@ -211,7 +212,7 @@ trait Constants extends api.Constants {
       case '"'  => "\\\""
       case '\'' => "\\\'"
       case '\\' => "\\\\"
-      case _    => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+      case _    => if (ch.isControl) "\\0" + toOctalString(ch.toInt) else String.valueOf(ch)
     }
 
     def escapedStringValue: String = {
@@ -222,7 +223,15 @@ trait Constants extends api.Constants {
         case ClazzTag  =>
           def show(tpe: Type) = "classOf[" + signature(tpe) + "]"
           typeValue match {
-            case ErasedValueType(orig) => show(orig)
+            case ErasedValueType(clazz, underlying) =>
+              // A note on tpe_* usage here:
+              //
+              // We've intentionally erased the type arguments to the value class so that different
+              // instantiations of a particular value class that erase to the same underlying type
+              // don't result in spurious bridges (e.g. run/t6385.scala). I don't think that matters;
+              // printing trees of `classOf[ValueClass[String]]` shows `classOf[ValueClass]` at phase
+              // erasure both before and after the use of `tpe_*` here.
+              show(clazz.tpe_*)
             case _ => show(typeValue)
           }
         case CharTag   => "'" + escapedChar(charValue) + "'"
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 09d7af8..25d78f4 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -3,19 +3,20 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
+import scala.language.postfixOps
 import scala.annotation.{ switch, meta }
 import scala.collection.{ mutable, immutable }
 import Flags._
-import PartialFunction._
 import scala.reflect.api.{Universe => ApiUniverse}
 
 trait Definitions extends api.StandardDefinitions {
   self: SymbolTable =>
 
-  import rootMirror.{getModule, getClassByName, getRequiredClass, getRequiredModule, getRequiredPackage, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
+  import rootMirror.{getModuleByName, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageIfDefined, getPackageObjectIfDefined, requiredClass, requiredModule}
 
   object definitions extends DefinitionsClass
 
@@ -29,12 +30,13 @@ trait Definitions extends api.StandardDefinitions {
 
   private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = {
     val clazz = owner.newClassSymbol(name, NoPosition, flags)
-    clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
+    clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) markAllCompleted
   }
-  private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = {
+  private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = {
     val msym   = owner.newMethod(name.encode, NoPosition, flags)
     val params = msym.newSyntheticValueParams(formals)
-    msym setInfo MethodType(params, restpe)
+    val info = if (owner.isJavaDefined) JavaMethodType(params, restpe) else MethodType(params, restpe)
+    msym setInfo info markAllCompleted
   }
   private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol =
     owner.info.decls enter newMethod(owner, name, formals, restpe, flags)
@@ -67,7 +69,7 @@ trait Definitions extends api.StandardDefinitions {
       tpnme.Unit    -> VOID_TAG
     )
 
-    private def catastrophicFailure() =
+    private[Definitions] def catastrophicFailure() =
       abort("Could not find value classes! This is a catastrophic failure.  scala " +
         scala.util.Properties.versionString)
 
@@ -77,16 +79,8 @@ trait Definitions extends api.StandardDefinitions {
         case _              => catastrophicFailure()
       }
     }
-    private def valueClassCompanion(name: TermName): ModuleSymbol = {
-      getMember(ScalaPackageClass, name) match {
-        case x: ModuleSymbol => x
-        case _               => catastrophicFailure()
-      }
-    }
-    private def valueCompanionMember(className: Name, methodName: TermName): TermSymbol =
-      getMemberMethod(valueClassCompanion(className.toTermName).moduleClass, methodName)
 
-    private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
+    private[Definitions] def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
     private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = mapFrom(syms)(x => f(x.name))
     private def symbolsMapFilt[T](syms: List[Symbol], p: Name => Boolean, f: Name => T) = symbolsMap(syms filter (x => p(x.name)), f)
 
@@ -94,12 +88,10 @@ trait Definitions extends api.StandardDefinitions {
 
     lazy val abbrvTag         = symbolsMap(ScalaValueClasses, nameToTag) withDefaultValue OBJECT_TAG
     lazy val numericWeight    = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
-    lazy val boxedModule      = classesMap(x => getModule(boxedName(x)))
+    lazy val boxedModule      = classesMap(x => getModuleByName(boxedName(x)))
     lazy val boxedClass       = classesMap(x => getClassByName(boxedName(x)))
     lazy val refClass         = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
     lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
-    lazy val boxMethod        = classesMap(x => valueCompanionMember(x, nme.box))
-    lazy val unboxMethod      = classesMap(x => valueCompanionMember(x, nme.unbox))
 
     def isNumericSubClass(sub: Symbol, sup: Symbol) = (
          (numericWeight contains sub)
@@ -124,19 +116,19 @@ trait Definitions extends api.StandardDefinitions {
     lazy val FloatClass   = valueClassSymbol(tpnme.Float)
     lazy val DoubleClass  = valueClassSymbol(tpnme.Double)
     lazy val BooleanClass = valueClassSymbol(tpnme.Boolean)
-      lazy val Boolean_and = getMemberMethod(BooleanClass, nme.ZAND)
-      lazy val Boolean_or  = getMemberMethod(BooleanClass, nme.ZOR)
-      lazy val Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
-
-    lazy val UnitTpe    = UnitClass.toTypeConstructor
-    lazy val ByteTpe    = ByteClass.toTypeConstructor
-    lazy val ShortTpe   = ShortClass.toTypeConstructor
-    lazy val CharTpe    = CharClass.toTypeConstructor
-    lazy val IntTpe     = IntClass.toTypeConstructor
-    lazy val LongTpe    = LongClass.toTypeConstructor
-    lazy val FloatTpe   = FloatClass.toTypeConstructor
-    lazy val DoubleTpe  = DoubleClass.toTypeConstructor
-    lazy val BooleanTpe = BooleanClass.toTypeConstructor
+          def Boolean_and = getMemberMethod(BooleanClass, nme.ZAND)
+          def Boolean_or  = getMemberMethod(BooleanClass, nme.ZOR)
+          def Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
+
+    lazy val UnitTpe      = UnitClass.tpe
+    lazy val ByteTpe      = ByteClass.tpe
+    lazy val ShortTpe     = ShortClass.tpe
+    lazy val CharTpe      = CharClass.tpe
+    lazy val IntTpe       = IntClass.tpe
+    lazy val LongTpe      = LongClass.tpe
+    lazy val FloatTpe     = FloatClass.tpe
+    lazy val DoubleTpe    = DoubleClass.tpe
+    lazy val BooleanTpe   = BooleanClass.tpe
 
     lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
     lazy val ScalaValueClassesNoUnit  = ScalaValueClasses filterNot (_ eq UnitClass)
@@ -151,50 +143,26 @@ trait Definitions extends api.StandardDefinitions {
       FloatClass,
       DoubleClass
     )
-    def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
     def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
+
+    def underlyingOfValueClass(clazz: Symbol): Type =
+      clazz.derivedValueClassUnbox.tpe.resultType
+
   }
 
   abstract class DefinitionsClass extends DefinitionsApi with ValueClassDefinitions {
     private var isInitialized = false
     def isDefinitionsInitialized = isInitialized
 
-    // symbols related to packages
-    var emptypackagescope: Scope = null //debug
-
-    @deprecated("Moved to rootMirror.RootPackage", "2.10.0")
-    val RootPackage: ModuleSymbol = rootMirror.RootPackage
-
-    @deprecated("Moved to rootMirror.RootClass", "2.10.0")
-    val RootClass: ClassSymbol = rootMirror.RootClass
-
-    @deprecated("Moved to rootMirror.EmptyPackage", "2.10.0")
-    val EmptyPackage: ModuleSymbol = rootMirror.EmptyPackage
-
-    @deprecated("Moved to rootMirror.EmptyPackageClass", "2.10.0")
-    val EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
-
     // It becomes tricky to create dedicated objects for other symbols because
     // of initialization order issues.
-    lazy val JavaLangPackage      = getRequiredPackage(sn.JavaLang)
+    lazy val JavaLangPackage      = getPackage("java.lang")
     lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass
-    lazy val ScalaPackage         = getRequiredPackage(nme.scala_)
+    lazy val ScalaPackage         = getPackage("scala")
     lazy val ScalaPackageClass    = ScalaPackage.moduleClass.asClass
-    lazy val RuntimePackage       = getRequiredPackage("scala.runtime")
+    lazy val RuntimePackage       = getPackage("scala.runtime")
     lazy val RuntimePackageClass  = RuntimePackage.moduleClass.asClass
 
-    lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]]
-
-    // convenient one-argument parameter lists
-    lazy val anyparam     = List(AnyClass.tpe)
-    lazy val anyvalparam  = List(AnyValClass.typeConstructor)
-    lazy val anyrefparam  = List(AnyRefClass.typeConstructor)
-
-    // private parameter conveniences
-    private def booltype    = BooleanClass.tpe
-    private def inttype     = IntClass.tpe
-    private def stringtype  = StringClass.tpe
-
     def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
       case java.lang.Void.TYPE      => UnitClass
       case java.lang.Byte.TYPE      => ByteClass
@@ -224,61 +192,92 @@ trait Definitions extends api.StandardDefinitions {
      */
     def fullyInitializeSymbol(sym: Symbol): Symbol = {
       sym.initialize
+      // Watch out for those darn raw types on method parameters
+      if (sym.owner.initialize.isJavaDefined)
+        sym.cookJavaRawInfo()
+
       fullyInitializeType(sym.info)
-      fullyInitializeType(sym.tpe)
+      fullyInitializeType(sym.tpe_*)
       sym
     }
     def fullyInitializeType(tp: Type): Type = {
       tp.typeParams foreach fullyInitializeSymbol
-      tp.paramss.flatten foreach fullyInitializeSymbol
+      mforeach(tp.paramss)(fullyInitializeSymbol)
       tp
     }
     def fullyInitializeScope(scope: Scope): Scope = {
       scope.sorted foreach fullyInitializeSymbol
       scope
     }
+    /** Is this symbol a member of Object or Any? */
+    def isUniversalMember(sym: Symbol) = ObjectClass isSubClass sym.owner
+
+    /** Is this symbol unimportable? Unimportable symbols include:
+     *  - constructors, because <init> is not a real name
+     *  - private[this] members, which cannot be referenced from anywhere else
+     *  - members of Any or Object, because every instance will inherit a
+     *    definition which supersedes the imported one
+     */
+    def isUnimportable(sym: Symbol) = (
+         (sym eq NoSymbol)
+      || sym.isConstructor
+      || sym.isPrivateLocal
+    )
+    def isUnimportableUnlessRenamed(sym: Symbol) = isUnimportable(sym) || isUniversalMember(sym)
+    def isImportable(sym: Symbol) = !isUnimportable(sym)
+
     /** Is this type equivalent to Any, AnyVal, or AnyRef? */
     def isTrivialTopType(tp: Type) = (
-         tp =:= AnyClass.tpe
-      || tp =:= AnyValClass.tpe
-      || tp =:= AnyRefClass.tpe
+         tp =:= AnyTpe
+      || tp =:= AnyValTpe
+      || tp =:= AnyRefTpe
     )
-    /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
-    def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
+
+    def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
+    def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
+      case PolyType(_, restpe)                                   => hasMultipleNonImplicitParamLists(restpe)
+      case MethodType(_, MethodType(p :: _, _)) if !p.isImplicit => true
+      case _                                                     => false
+    }
 
     private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
       case ClassInfoType(parents, decls, clazz) =>
         if (parents.head.typeSymbol == AnyClass) tpe
         else {
           assert(parents.head.typeSymbol == ObjectClass, parents)
-          ClassInfoType(AnyClass.tpe :: parents.tail, decls, clazz)
+          ClassInfoType(AnyTpe :: parents.tail, decls, clazz)
         }
       case PolyType(tparams, restpe) =>
         PolyType(tparams, fixupAsAnyTrait(restpe))
-//      case _ => tpe
     }
 
     // top types
-    lazy val AnyClass    = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
-    lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
+    lazy val AnyClass    = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted
+    lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted
     lazy val ObjectClass = getRequiredClass(sn.Object.toString)
-    lazy val AnyTpe     = definitions.AnyClass.toTypeConstructor
-    lazy val AnyRefTpe  = definitions.AnyRefClass.toTypeConstructor
-    lazy val ObjectTpe  = definitions.ObjectClass.toTypeConstructor
 
-    // Note: this is not the type alias AnyRef, it's a companion-like
-    // object used by the @specialize annotation.
-    lazy val AnyRefModule = getMemberModule(ScalaPackageClass, nme.AnyRef)
-    @deprecated("Use AnyRefModule", "2.10.0")
-    def Predef_AnyRef = AnyRefModule
+    // Cached types for core monomorphic classes
+    lazy val AnyRefTpe       = AnyRefClass.tpe
+    lazy val AnyTpe          = AnyClass.tpe
+    lazy val AnyValTpe       = AnyValClass.tpe
+    lazy val BoxedUnitTpe    = BoxedUnitClass.tpe
+    lazy val NothingTpe      = NothingClass.tpe
+    lazy val NullTpe         = NullClass.tpe
+    lazy val ObjectTpe       = ObjectClass.tpe
+    lazy val SerializableTpe = SerializableClass.tpe
+    lazy val StringTpe       = StringClass.tpe
+    lazy val ThrowableTpe    = ThrowableClass.tpe
+
+    lazy val ConstantTrue  = ConstantType(Constant(true))
+    lazy val ConstantFalse = ConstantType(Constant(false))
+    lazy val ConstantNull  = ConstantType(Constant(null))
 
     lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
-      val anyval    = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), ABSTRACT)
+      val anyval    = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
       val av_constr = anyval.newClassConstructor(NoPosition)
       anyval.info.decls enter av_constr
-      anyval
+      anyval markAllCompleted
     }).asInstanceOf[ClassSymbol]
-    lazy val AnyValTpe  = definitions.AnyValClass.toTypeConstructor
       def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
 
     // bottom types
@@ -289,8 +288,10 @@ trait Definitions extends api.StandardDefinitions {
       locally {
         this initFlags ABSTRACT | FINAL
         this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
+        this markAllCompleted
       }
       final override def isBottomClass = true
+      final override def isThreadsafe(purpose: SymbolOps): Boolean = true
     }
     final object NothingClass extends BottomClassSymbol(tpnme.Nothing, AnyClass) {
       override def isSubClass(that: Symbol) = true
@@ -301,8 +302,6 @@ trait Definitions extends api.StandardDefinitions {
         || (that ne NothingClass) && (that isSubClass ObjectClass)
       )
     }
-    lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
-    lazy val NullTpe    = definitions.NullClass.toTypeConstructor
 
     // exceptions and other throwables
     lazy val ClassCastExceptionClass        = requiredClass[ClassCastException]
@@ -314,6 +313,8 @@ trait Definitions extends api.StandardDefinitions {
     lazy val ThrowableClass                 = getClassByName(sn.Throwable)
     lazy val UninitializedErrorClass        = requiredClass[UninitializedFieldError]
 
+    lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor
+
     // fundamental reference classes
     lazy val PartialFunctionClass       = requiredClass[PartialFunction[_,_]]
     lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]]
@@ -336,22 +337,9 @@ trait Definitions extends api.StandardDefinitions {
     // Those modules and their module classes
     lazy val UnqualifiedOwners  = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
 
-    lazy val PredefModule      = requiredModule[scala.Predef.type]
-    lazy val PredefModuleClass = PredefModule.moduleClass
-
-      def Predef_classOf             = getMemberMethod(PredefModule, nme.classOf)
-      def Predef_identity            = getMemberMethod(PredefModule, nme.identity)
-      def Predef_conforms            = getMemberMethod(PredefModule, nme.conforms)
-    def Predef_wrapRefArray        = getMemberMethod(PredefModule, nme.wrapRefArray)
-    def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
-    def Predef_???                 = getMemberMethod(PredefModule, nme.???)
-    def Predef_implicitly          = getMemberMethod(PredefModule, nme.implicitly)
-
-    /** Is `sym` a member of Predef with the given name?
-     *  Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
-     *  which does a member lookup (it can't be a lazy val because we might reload Predef
-     *  during resident compilations).
-     */
+    lazy val PredefModule               = requiredModule[scala.Predef.type]
+         def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+         def Predef_???                 = getMemberMethod(PredefModule, nme.???)
     def isPredefMemberNamed(sym: Symbol, name: Name) = (
       (sym.name == name) && (sym.owner == PredefModule.moduleClass)
     )
@@ -359,50 +347,30 @@ trait Definitions extends api.StandardDefinitions {
     /** Specialization.
      */
     lazy val SpecializableModule  = requiredModule[Specializable]
-    lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group)
 
-    lazy val ConsoleModule      = requiredModule[scala.Console.type]
     lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type]
     lazy val SymbolModule       = requiredModule[scala.Symbol.type]
-    lazy val Symbol_apply       = getMemberMethod(SymbolModule, nme.apply)
-
-      def arrayApplyMethod = getMemberMethod(ScalaRunTimeModule, nme.array_apply)
-      def arrayUpdateMethod = getMemberMethod(ScalaRunTimeModule, nme.array_update)
-      def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length)
-      def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
-      def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
-      def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements)
-      def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
-      def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass)
+         def Symbol_apply       = getMemberMethod(SymbolModule, nme.apply)
 
     // classes with special meanings
     lazy val StringAddClass             = requiredClass[scala.runtime.StringAdd]
-    lazy val ArrowAssocClass            = getRequiredClass("scala.Predef.ArrowAssoc") // SI-5731
-    lazy val StringAdd_+                = getMemberMethod(StringAddClass, nme.PLUS)
-    lazy val NotNullClass               = getRequiredClass("scala.NotNull")
     lazy val ScalaNumberClass           = requiredClass[scala.math.ScalaNumber]
     lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
     lazy val DelayedInitClass           = requiredClass[scala.DelayedInit]
       def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
-      // a dummy value that communicates that a delayedInit call is compiler-generated
-      // from phase UnCurry to phase Constructors
-      // !!! This is not used anywhere (it was checked in that way.)
-      // def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
-      //   .setInfo(UnitClass.tpe)
 
     lazy val TypeConstraintClass   = requiredClass[scala.annotation.TypeConstraint]
-    lazy val SingletonClass        = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
+    lazy val SingletonClass        = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) markAllCompleted
     lazy val SerializableClass     = requiredClass[scala.Serializable]
     lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
     lazy val ComparableClass       = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
-    lazy val CloneableClass        = requiredClass[scala.Cloneable]
     lazy val JavaCloneableClass    = requiredClass[java.lang.Cloneable]
     lazy val JavaNumberClass       = requiredClass[java.lang.Number]
+    lazy val JavaEnumClass         = requiredClass[java.lang.Enum[_]]
     lazy val RemoteInterfaceClass  = requiredClass[java.rmi.Remote]
     lazy val RemoteExceptionClass  = requiredClass[java.rmi.RemoteException]
 
-    lazy val ByNameParamClass       = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
-    lazy val EqualsPatternClass     = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe)
+    lazy val ByNameParamClass       = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe)
     lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
     lazy val RepeatedParamClass     = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe))
 
@@ -410,7 +378,8 @@ trait Definitions extends api.StandardDefinitions {
     def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
     def isJavaRepeatedParamType(tp: Type)  = tp.typeSymbol == JavaRepeatedParamClass
     def isRepeatedParamType(tp: Type)      = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
-    def isRepeated(param: Symbol)          = isRepeatedParamType(param.tpe)
+    def isRepeated(param: Symbol)          = isRepeatedParamType(param.tpe_*)
+    def isByName(param: Symbol)            = isByNameParamType(param.tpe_*)
     def isCastSymbol(sym: Symbol)          = sym == Any_asInstanceOf || sym == Object_asInstanceOf
 
     def isJavaVarArgsMethod(m: Symbol)      = m.isMethod && isJavaVarArgs(m.info.params)
@@ -419,52 +388,49 @@ trait Definitions extends api.StandardDefinitions {
     def isVarArgsList(params: Seq[Symbol])  = params.nonEmpty && isRepeatedParamType(params.last.tpe)
     def isVarArgTypes(formals: Seq[Type])   = formals.nonEmpty && isRepeatedParamType(formals.last)
 
+    def firstParamType(tpe: Type): Type = tpe.paramTypes match {
+      case p :: _ => p
+      case _      => NoType
+    }
+    def isImplicitParamss(paramss: List[List[Symbol]]) = paramss match {
+      case (p :: _) :: _ => p.isImplicit
+      case _             => false
+    }
+
     def hasRepeatedParam(tp: Type): Boolean = tp match {
       case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
       case PolyType(_, restpe)         => hasRepeatedParam(restpe)
       case _                           => false
     }
 
-    def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
-      case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
-      case _                                          => tp
-    }
-
-    def seqToRepeated(tp: Type): Type = (tp baseType SeqClass) match {
-      case TypeRef(_, SeqClass, arg :: Nil) => scalaRepeatedType(arg)
-      case _                                => tp
-    }
-
-    def isPrimitiveArray(tp: Type) = tp match {
-      case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol)
-      case _                                  => false
-    }
-    def isReferenceArray(tp: Type) = tp match {
-      case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
-      case _                                  => false
-    }
-    def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
-      case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == elem
-      case _                                  => false
-    }
-
-    lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
+    // wrapping and unwrapping
+    def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+    def dropRepeated(tp: Type): Type = (
+      if (isJavaRepeatedParamType(tp)) elementExtract(JavaRepeatedParamClass, tp) orElse tp
+      else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp
+      else tp
+    )
+    def repeatedToSingle(tp: Type): Type                     = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp
+     // We don't need to deal with JavaRepeatedParamClass here, as `repeatedToSeq` is only called in the patmat translation for Scala sources.
+    def repeatedToSeq(tp: Type): Type                        = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp
+    def seqToRepeated(tp: Type): Type                        = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp
+    def isReferenceArray(tp: Type)                           = elementTest(ArrayClass, tp)(_ <:< AnyRefTpe)
+    def isArrayOfSymbol(tp: Type, elem: Symbol)              = elementTest(ArrayClass, tp)(_.typeSymbol == elem)
+    def elementType(container: Symbol, tp: Type): Type       = elementExtract(container, tp)
 
     // collections classes
     lazy val ConsClass          = requiredClass[scala.collection.immutable.::[_]]
-    lazy val IterableClass      = requiredClass[scala.collection.Iterable[_]]
     lazy val IteratorClass      = requiredClass[scala.collection.Iterator[_]]
+    lazy val IterableClass      = requiredClass[scala.collection.Iterable[_]]
     lazy val ListClass          = requiredClass[scala.collection.immutable.List[_]]
     lazy val SeqClass           = requiredClass[scala.collection.Seq[_]]
     lazy val StringBuilderClass = requiredClass[scala.collection.mutable.StringBuilder]
     lazy val TraversableClass   = requiredClass[scala.collection.Traversable[_]]
 
     lazy val ListModule       = requiredModule[scala.collection.immutable.List.type]
-      lazy val List_apply     = getMemberMethod(ListModule, nme.apply)
+         def List_apply       = getMemberMethod(ListModule, nme.apply)
     lazy val NilModule        = requiredModule[scala.collection.immutable.Nil.type]
     lazy val SeqModule        = requiredModule[scala.collection.Seq.type]
-    lazy val IteratorModule   = requiredModule[scala.collection.Iterator.type]
-      lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply)
 
     // arrays and their members
     lazy val ArrayModule                   = requiredModule[scala.Array.type]
@@ -479,47 +445,44 @@ trait Definitions extends api.StandardDefinitions {
 
     // reflection / structural types
     lazy val SoftReferenceClass     = requiredClass[java.lang.ref.SoftReference[_]]
-    lazy val WeakReferenceClass     = requiredClass[java.lang.ref.WeakReference[_]]
     lazy val MethodClass            = getClassByName(sn.MethodAsObject)
-      def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible)
     lazy val EmptyMethodCacheClass  = requiredClass[scala.runtime.EmptyMethodCache]
     lazy val MethodCacheClass       = requiredClass[scala.runtime.MethodCache]
       def methodCache_find          = getMemberMethod(MethodCacheClass, nme.find_)
       def methodCache_add           = getMemberMethod(MethodCacheClass, nme.add_)
 
+    // XML
+    lazy val ScalaXmlTopScope = getModuleIfDefined("scala.xml.TopScope")
+    lazy val ScalaXmlPackage  = getPackageIfDefined("scala.xml")
+
     // scala.reflect
     lazy val ReflectPackage              = requiredModule[scala.reflect.`package`.type]
     lazy val ReflectApiPackage           = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful
     lazy val ReflectRuntimePackage       = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful
-         def ReflectRuntimeUniverse      = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
-         def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
+         def ReflectRuntimeUniverse      = ReflectRuntimePackage.map(sym => getMemberValue(sym, nme.universe))
+         def ReflectRuntimeCurrentMirror = ReflectRuntimePackage.map(sym => getMemberMethod(sym, nme.currentMirror))
+
+    lazy val UniverseClass    = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
+         def UniverseInternal = getMemberValue(UniverseClass, nme.internal)
 
-    lazy val PartialManifestClass  = getTypeMember(ReflectPackage, tpnme.ClassManifest)
     lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
     lazy val FullManifestClass     = requiredClass[scala.reflect.Manifest[_]]
     lazy val FullManifestModule    = requiredModule[scala.reflect.ManifestFactory.type]
     lazy val OptManifestClass      = requiredClass[scala.reflect.OptManifest[_]]
     lazy val NoManifest            = requiredModule[scala.reflect.NoManifest.type]
 
+    lazy val TreesClass            = getClassIfDefined("scala.reflect.api.Trees") // defined in scala-reflect.jar, so we need to be careful
+
     lazy val ExprsClass            = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
-    lazy val ExprClass             = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
-         def ExprSplice            = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
-         def ExprValue             = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
-    lazy val ExprModule            = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol
+         def ExprClass             = ExprsClass.map(sym => getMemberClass(sym, tpnme.Expr))
+         def ExprSplice            = ExprClass.map(sym => getMemberMethod(sym, nme.splice))
+         def ExprValue             = ExprClass.map(sym => getMemberMethod(sym, nme.value))
 
     lazy val ClassTagModule         = requiredModule[scala.reflect.ClassTag[_]]
     lazy val ClassTagClass          = requiredClass[scala.reflect.ClassTag[_]]
     lazy val TypeTagsClass          = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful
-    lazy val WeakTypeTagClass       = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.WeakTypeTag) else NoSymbol
-    lazy val WeakTypeTagModule      = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.WeakTypeTag) else NoSymbol
-    lazy val TypeTagClass           = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.TypeTag) else NoSymbol
-    lazy val TypeTagModule          = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.TypeTag) else NoSymbol
-         def materializeClassTag    = getMemberMethod(ReflectPackage, nme.materializeClassTag)
-         def materializeWeakTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeWeakTypeTag) else NoSymbol
-         def materializeTypeTag     = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeTypeTag) else NoSymbol
 
     lazy val ApiUniverseClass      = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
-         def ApiUniverseReify      = if (ApiUniverseClass != NoSymbol) getMemberMethod(ApiUniverseClass, nme.reify) else NoSymbol
     lazy val JavaUniverseClass     = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful
 
     lazy val MirrorClass           = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful
@@ -527,15 +490,24 @@ trait Definitions extends api.StandardDefinitions {
     lazy val TypeCreatorClass      = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
     lazy val TreeCreatorClass      = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
 
-    lazy val MacroContextClass                   = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
-         def MacroContextPrefix                  = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
-         def MacroContextPrefixType              = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
-         def MacroContextUniverse                = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
-         def MacroContextMirror                  = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol
-    lazy val MacroImplAnnotation                 = requiredClass[scala.reflect.macros.internal.macroImpl]
-
-    lazy val StringContextClass                  = requiredClass[scala.StringContext]
-         def StringContext_f                     = getMemberMethod(StringContextClass, nme.f)
+    lazy val BlackboxContextClass         = getClassIfDefined("scala.reflect.macros.blackbox.Context") // defined in scala-reflect.jar, so we need to be careful
+    lazy val WhiteboxContextClass         = getClassIfDefined("scala.reflect.macros.whitebox.Context") // defined in scala-reflect.jar, so we need to be careful
+         def MacroContextPrefix           = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.prefix))
+         def MacroContextPrefixType       = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType))
+         def MacroContextUniverse         = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.universe))
+         def MacroContextExprClass        = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.Expr))
+         def MacroContextWeakTypeTagClass = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.WeakTypeTag))
+         def MacroContextTreeType         = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.Tree))
+    lazy val MacroImplAnnotation          = requiredClass[scala.reflect.macros.internal.macroImpl]
+
+    lazy val StringContextClass           = requiredClass[scala.StringContext]
+
+    // SI-8392 a reflection universe on classpath may not have
+    // quasiquotes, if e.g. crosstyping with -Xsource on
+    lazy val QuasiquoteClass             = if (ApiUniverseClass != NoSymbol) getMemberIfDefined(ApiUniverseClass, tpnme.Quasiquote) else NoSymbol
+    lazy val QuasiquoteClass_api         = if (QuasiquoteClass != NoSymbol) getMember(QuasiquoteClass, tpnme.api) else NoSymbol
+    lazy val QuasiquoteClass_api_apply   = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.apply) else NoSymbol
+    lazy val QuasiquoteClass_api_unapply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.unapply) else NoSymbol
 
     lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
     lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
@@ -543,7 +515,6 @@ trait Definitions extends api.StandardDefinitions {
     // Option classes
     lazy val OptionClass: ClassSymbol   = requiredClass[Option[_]]
     lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
-      lazy val Option_apply             = getMemberMethod(OptionModule, nme.apply)
     lazy val SomeClass: ClassSymbol     = requiredClass[Some[_]]
     lazy val NoneModule: ModuleSymbol   = requiredModule[scala.None.type]
     lazy val SomeModule: ModuleSymbol   = requiredModule[scala.Some.type]
@@ -551,10 +522,6 @@ trait Definitions extends api.StandardDefinitions {
     def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
     def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
 
-    // The given symbol represents either String.+ or StringAdd.+
-    def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
-    def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym
-
     // The given symbol is a method with the right name and signature to be a runnable java program.
     def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match {
       case MethodType(p :: Nil, restpe) => isArrayOfSymbol(p.tpe, StringClass) && restpe.typeSymbol == UnitClass
@@ -563,37 +530,33 @@ trait Definitions extends api.StandardDefinitions {
     // The given class has a main method.
     def hasJavaMainMethod(sym: Symbol): Boolean =
       (sym.tpe member nme.main).alternatives exists isJavaMainMethod
-    def hasJavaMainMethod(path: String): Boolean =
-      hasJavaMainMethod(getModuleIfDefined(path))
 
-    def isOptionType(tp: Type)  = tp.typeSymbol isSubClass OptionClass
-    def isSomeType(tp: Type)    = tp.typeSymbol eq SomeClass
-    def isNoneType(tp: Type)    = tp.typeSymbol eq NoneModule
-
-    // Product, Tuple, Function, AbstractFunction
-    private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = {
-      val list = countFrom to arity map (i => getRequiredClass("scala." + name + i))
-      list.toArray
-    }
-    def prepend[S >: ClassSymbol : ClassTag](elem0: S, elems: Array[ClassSymbol]): Array[S] = elem0 +: elems
-
-    private def aritySpecificType[S <: Symbol](symbolArray: Array[S], args: List[Type], others: Type*): Type = {
-      val arity = args.length
-      if (arity >= symbolArray.length) NoType
-      else appliedType(symbolArray(arity), args ++ others: _*)
+    class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends VarArityClassApi {
+      private val offset = countFrom - init.size
+      private def isDefinedAt(i: Int) = i < seq.length + offset && i >= offset
+      val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map { i => getRequiredClass("scala." + name + i) }).toVector
+      def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol
+      def specificType(args: List[Type], others: Type*): Type = {
+        val arity = args.length
+        if (!isDefinedAt(arity)) NoType
+        else appliedType(apply(arity), args ++ others: _*)
+      }
     }
+    // would be created synthetically for the default args. We call all objects in this method from the generated code
+    // in JavaUniverseForce, so it is clearer to define this explicitly define this in source.
+    object VarArityClass
 
     val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22
-    lazy val ProductClass: Array[ClassSymbol] = prepend(UnitClass, mkArityArray("Product", MaxProductArity, 1))
-    lazy val TupleClass: Array[Symbol] = prepend(NoSymbol, mkArityArray("Tuple", MaxTupleArity, 1))
-    lazy val FunctionClass         = mkArityArray("Function", MaxFunctionArity, 0)
-    lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
+
+    lazy val ProductClass          = new VarArityClass("Product", MaxProductArity, countFrom = 1, init = Some(UnitClass))
+    lazy val TupleClass            = new VarArityClass("Tuple", MaxTupleArity, countFrom = 1)
+    lazy val FunctionClass         = new VarArityClass("Function", MaxFunctionArity)
+    lazy val AbstractFunctionClass = new VarArityClass("runtime.AbstractFunction", MaxFunctionArity)
 
     /** Creators for TupleN, ProductN, FunctionN. */
-    def tupleType(elems: List[Type])                            = aritySpecificType(TupleClass, elems)
-    def productType(elems: List[Type])                          = aritySpecificType(ProductClass, elems)
-    def functionType(formals: List[Type], restpe: Type)         = aritySpecificType(FunctionClass, formals, restpe)
-    def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe)
+    def tupleType(elems: List[Type])                            = TupleClass.specificType(elems)
+    def functionType(formals: List[Type], restpe: Type)         = FunctionClass.specificType(formals, restpe)
+    def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe)
 
     def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match {
       case ByteClass    => nme.wrapByteArray
@@ -606,17 +569,13 @@ trait Definitions extends api.StandardDefinitions {
       case BooleanClass => nme.wrapBooleanArray
       case UnitClass    => nme.wrapUnitArray
       case _        =>
-        if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+        if ((elemtp <:< AnyRefTpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
         else nme.genericWrapArray
     }
 
-    @deprecated("Use isTupleType", "2.10.0")
-    def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
-
-    def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
-    // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
-    def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
-    def isProductNClass(sym: Symbol) = ProductClass contains sym
+    def isTupleSymbol(sym: Symbol) = TupleClass.seq contains unspecializedSymbol(sym)
+    def isFunctionSymbol(sym: Symbol) = FunctionClass.seq contains unspecializedSymbol(sym)
+    def isProductNSymbol(sym: Symbol) = ProductClass.seq contains unspecializedSymbol(sym)
 
     def unspecializedSymbol(sym: Symbol): Symbol = {
       if (sym hasFlag SPECIALIZED) {
@@ -627,31 +586,71 @@ trait Definitions extends api.StandardDefinitions {
       }
       else sym
     }
+    def unspecializedTypeArgs(tp: Type): List[Type] =
+      (tp baseType unspecializedSymbol(tp.typeSymbolDirect)).typeArgs
+
+    object MacroContextType {
+      def unapply(tp: Type) = {
+        def isOneOfContextTypes(tp: Type) =
+          tp =:= BlackboxContextClass.tpe || tp =:= WhiteboxContextClass.tpe
+        def isPrefix(sym: Symbol) =
+          sym.allOverriddenSymbols.contains(MacroContextPrefixType)
+
+        tp.dealias match {
+          case RefinedType(List(tp), Scope(sym)) if isOneOfContextTypes(tp) && isPrefix(sym) => Some(tp)
+          case tp if isOneOfContextTypes(tp) => Some(tp)
+          case _ => None
+        }
+      }
+    }
 
-    // Checks whether the given type is true for the given condition,
-    // or if it is a specialized subtype of a type for which it is true.
-    //
-    // Origins notes:
-    // An issue was introduced with specialization in that the implementation
-    // of "isTupleType" in Definitions relied upon sym == TupleClass(elems.length).
-    // This test is untrue for specialized tuples, causing mysterious behavior
-    // because only some tuples are specialized.
-    def isPossiblySpecializedType(tp: Type)(cond: Type => Boolean) = {
-      cond(tp) || (tp match {
-        case TypeRef(pre, sym, args) if sym hasFlag SPECIALIZED =>
-          cond(tp baseType unspecializedSymbol(sym))
+    def isMacroContextType(tp: Type) = MacroContextType.unapply(tp).isDefined
+
+    def isWhiteboxContextType(tp: Type) =
+      isMacroContextType(tp) && (tp <:< WhiteboxContextClass.tpe)
+
+    private def macroBundleParamInfo(tp: Type) = {
+      val ctor = tp.erasure.typeSymbol.primaryConstructor
+      ctor.paramss match {
+        case List(List(c)) =>
+          val sym = c.info.typeSymbol
+          val isContextCompatible = sym.isNonBottomSubClass(BlackboxContextClass) || sym.isNonBottomSubClass(WhiteboxContextClass)
+          if (isContextCompatible) c.info else NoType
         case _ =>
-          false
-      })
+          NoType
+      }
     }
-    // No normalization.
-    def isTupleTypeDirect(tp: Type) = isPossiblySpecializedType(tp) {
-      case TypeRef(_, sym, args) if args.nonEmpty =>
-        val len = args.length
-        len <= MaxTupleArity && sym == TupleClass(len)
-      case _ => false
+
+    def looksLikeMacroBundleType(tp: Type) =
+      macroBundleParamInfo(tp) != NoType
+
+    def isMacroBundleType(tp: Type) = {
+      val isMonomorphic = tp.typeSymbol.typeParams.isEmpty
+      val isContextCompatible = isMacroContextType(macroBundleParamInfo(tp))
+      val hasSingleConstructor = !tp.declaration(nme.CONSTRUCTOR).isOverloaded
+      val nonAbstract = !tp.erasure.typeSymbol.isAbstractClass
+      isMonomorphic && isContextCompatible && hasSingleConstructor && nonAbstract
     }
-    def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
+
+    def isBlackboxMacroBundleType(tp: Type) = {
+      val isBundle = isMacroBundleType(tp)
+      val unwrappedContext = MacroContextType.unapply(macroBundleParamInfo(tp)).getOrElse(NoType)
+      val isBlackbox = unwrappedContext =:= BlackboxContextClass.tpe
+      isBundle && isBlackbox
+    }
+
+    def isListType(tp: Type)     = tp <:< classExistentialType(ListClass)
+    def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass)
+
+    // These "direct" calls perform no dealiasing. They are most needed when
+    // printing types when one wants to preserve the true nature of the type.
+    def isFunctionTypeDirect(tp: Type) = !tp.isHigherKinded && isFunctionSymbol(tp.typeSymbolDirect)
+    def isTupleTypeDirect(tp: Type)    = !tp.isHigherKinded && isTupleSymbol(tp.typeSymbolDirect)
+
+    // Note that these call .dealiasWiden and not .normalize, the latter of which
+    // tends to change the course of events by forcing types.
+    def isFunctionType(tp: Type)       = isFunctionTypeDirect(tp.dealiasWiden)
+    def isTupleType(tp: Type)          = isTupleTypeDirect(tp.dealiasWiden)
 
     lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
       def Product_productArity          = getMemberMethod(ProductRootClass, nme.productArity)
@@ -659,43 +658,122 @@ trait Definitions extends api.StandardDefinitions {
       def Product_iterator              = getMemberMethod(ProductRootClass, nme.productIterator)
       def Product_productPrefix         = getMemberMethod(ProductRootClass, nme.productPrefix)
       def Product_canEqual              = getMemberMethod(ProductRootClass, nme.canEqual_)
-      // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName)
 
       def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
-      def productProj(n: Int,   j: Int): TermSymbol = productProj(ProductClass(n), j)
-
-      /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
-      def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol)
 
     /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
-    def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match {
+    @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
       case Some(x)  => tpe.baseType(x).typeArgs
       case _        => Nil
     }
 
+    @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+      case RefinedType(p :: _, _) => p.dealiasWiden
+      case tp                     => tp
+    }
+
+    def getterMemberTypes(tpe: Type, getters: List[Symbol]): List[Type] =
+      getters map (m => dropNullaryMethod(tpe memberType m))
+
     def dropNullaryMethod(tp: Type) = tp match {
       case NullaryMethodType(restpe) => restpe
       case _                         => tp
     }
 
-    def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
-      case RefinedType(p :: _, _) => p.normalize
-      case tp                     => tp
+    /** An implementation of finalResultType which does only what
+     *  finalResultType is documented to do. Defining it externally to
+     *  Type helps ensure people can't come to depend on accidental
+     *  aspects of its behavior. This is all of it!
+     */
+    def finalResultType(tp: Type): Type = tp match {
+      case PolyType(_, restpe)       => finalResultType(restpe)
+      case MethodType(_, restpe)     => finalResultType(restpe)
+      case NullaryMethodType(restpe) => finalResultType(restpe)
+      case _                         => tp
+    }
+    /** Similarly, putting all the isStable logic in one place.
+     *  This makes it like 1000x easier to see the overall logic
+     *  of the method.
+     */
+    def isStable(tp: Type): Boolean = tp match {
+      case _: SingletonType                             => true
+      case NoPrefix                                     => true
+      case TypeRef(_, NothingClass | SingletonClass, _) => true
+      case TypeRef(_, sym, _) if sym.isAbstractType     => tp.bounds.hi.typeSymbol isSubClass SingletonClass
+      case TypeRef(pre, sym, _) if sym.isModuleClass    => isStable(pre)
+      case TypeRef(_, _, _) if tp ne tp.dealias         => isStable(tp.dealias)
+      case TypeVar(origin, _)                           => isStable(origin)
+      case AnnotatedType(_, atp)                        => isStable(atp)    // Really?
+      case _: SimpleTypeProxy                           => isStable(tp.underlying)
+      case _                                            => false
     }
+    def isVolatile(tp: Type): Boolean = {
+      // need to be careful not to fall into an infinite recursion here
+      // because volatile checking is done before all cycles are detected.
+      // the case to avoid is an abstract type directly or
+      // indirectly upper-bounded by itself. See #2918
+      def isVolatileAbstractType: Boolean = {
+        def sym = tp.typeSymbol
+        def volatileUpperBound = isVolatile(tp.bounds.hi)
+        def safeIsVolatile = (
+          if (volatileRecursions < TypeConstants.LogVolatileThreshold)
+            volatileUpperBound
+          // we can return true when pendingVolatiles contains sym, because
+          // a cycle will be detected afterwards and an error will result anyway.
+          else pendingVolatiles(sym) || {
+            pendingVolatiles += sym
+            try volatileUpperBound finally pendingVolatiles -= sym
+          }
+        )
+        volatileRecursions += 1
+        try safeIsVolatile finally volatileRecursions -= 1
+      }
+      /** A refined type P1 with ... with Pn { decls } is volatile if
+       *  one of the parent types Pi is an abstract type, and
+       *  either i > 1, or decls or a following parent Pj, j > 1, contributes
+       *  an abstract member.
+       *  A type contributes an abstract member if it has an abstract member which
+       *  is also a member of the whole refined type. A scope `decls` contributes
+       *  an abstract member if it has an abstract definition which is also
+       *  a member of the whole type.
+       */
+      def isVolatileRefinedType: Boolean = {
+        val RefinedType(parents, decls)         = tp
+        def isVisibleDeferred(m: Symbol)        = m.isDeferred && ((tp nonPrivateMember m.name).alternatives contains m)
+        def contributesAbstractMembers(p: Type) = p.deferredMembers exists isVisibleDeferred
+        def dropConcreteParents                 = parents dropWhile (p => !p.typeSymbol.isAbstractType)
+
+        (parents exists isVolatile) || {
+          dropConcreteParents match {
+            case Nil => false
+            case ps  => (ps ne parents) || (ps.tail exists contributesAbstractMembers) || (decls exists isVisibleDeferred)
+          }
+        }
+      }
 
-    def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
+      tp match {
+        case ThisType(_)                              => false
+        case SingleType(_, sym)                       => isVolatile(tp.underlying) && (sym.hasVolatileType || !sym.isStable)
+        case NullaryMethodType(restpe)                => isVolatile(restpe)
+        case PolyType(_, restpe)                      => isVolatile(restpe)
+        case TypeRef(_, _, _) if tp ne tp.dealias     => isVolatile(tp.dealias)
+        case TypeRef(_, sym, _) if sym.isAbstractType => isVolatileAbstractType
+        case RefinedType(_, _)                        => isVolatileRefinedType
+        case TypeVar(origin, _)                       => isVolatile(origin)
+        case _: SimpleTypeProxy                       => isVolatile(tp.underlying)
+        case _                                        => false
+      }
+    }
 
+    private[this] var volatileRecursions: Int = 0
+    private[this] val pendingVolatiles = mutable.HashSet[Symbol]()
     def abstractFunctionForFunctionType(tp: Type) = {
       assert(isFunctionType(tp), tp)
       abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
     }
-
-    def isFunctionType(tp: Type): Boolean = tp.normalize match {
-      case TypeRef(_, sym, args) if args.nonEmpty =>
-        val arity = args.length - 1   // -1 is the return type
-        arity <= MaxFunctionArity && sym == FunctionClass(arity)
-      case _ =>
-        false
+    def functionNBaseType(tp: Type): Type = tp.baseClasses find isFunctionSymbol match {
+      case Some(sym) => tp baseType unspecializedSymbol(sym)
+      case _         => tp
     }
 
     def isPartialFunctionType(tp: Type): Boolean = {
@@ -703,11 +781,42 @@ trait Definitions extends api.StandardDefinitions {
       (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
     }
 
-    def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
+    /** The single abstract method declared by type `tp` (or `NoSymbol` if it cannot be found).
+     *
+     * The method must be monomorphic and have exactly one parameter list.
+     * The class defining the method is a supertype of `tp` that
+     * has a public no-arg primary constructor.
+     */
+    def samOf(tp: Type): Symbol = {
+      // if tp has a constructor, it must be public and must not take any arguments
+      // (not even an implicit argument list -- to keep it simple for now)
+      val tpSym  = tp.typeSymbol
+      val ctor   = tpSym.primaryConstructor
+      val ctorOk = !ctor.exists || (!ctor.isOverloaded && ctor.isPublic && ctor.info.params.isEmpty && ctor.info.paramSectionCount <= 1)
+
+      if (tpSym.exists && ctorOk) {
+        // find the single abstract member, if there is one
+        // don't go out requiring DEFERRED members, as you will get them even if there's a concrete override:
+        //    scala> abstract class X { def m: Int }
+        //    scala> class Y extends X { def m: Int = 1}
+        //    scala> typeOf[Y].deferredMembers
+        //    Scopes(method m, method getClass)
+        //
+        //    scala> typeOf[Y].members.filter(_.isDeferred)
+        //    Scopes()
+        // must filter out "universal" members (getClass is deferred for some reason)
+        val deferredMembers = (
+          tp membersBasedOnFlags (excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD)
+          filter (mem => mem.isDeferredNotDefault && !isUniversalMember(mem)) // TODO: test
+        )
 
-    def elementType(container: Symbol, tp: Type): Type = tp match {
-      case TypeRef(_, `container`, arg :: Nil)  => arg
-      case _                                    => NoType
+        // if there is only one, it's monomorphic and has a single argument list
+        if (deferredMembers.size == 1 &&
+            deferredMembers.head.typeParams.isEmpty &&
+            deferredMembers.head.info.paramSectionCount == 1)
+          deferredMembers.head
+        else NoSymbol
+      } else NoSymbol
     }
 
     def arrayType(arg: Type)         = appliedType(ArrayClass, arg)
@@ -717,14 +826,75 @@ trait Definitions extends api.StandardDefinitions {
     def optionType(tp: Type)         = appliedType(OptionClass, tp)
     def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
     def seqType(arg: Type)           = appliedType(SeqClass, arg)
-    def someType(tp: Type)           = appliedType(SomeClass, tp)
 
-    def StringArray   = arrayType(StringClass.tpe)
-    lazy val ObjectArray   = arrayType(ObjectClass.tpe)
+    // FYI the long clunky name is because it's really hard to put "get" into the
+    // name of a method without it sounding like the method "get"s something, whereas
+    // this method is about a type member which just happens to be named get.
+    def typeOfMemberNamedGet(tp: Type)   = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)())
+    def typeOfMemberNamedHead(tp: Type)  = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)())
+    def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe))
+    def typeOfMemberNamedDrop(tp: Type)  = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe))
+    def typesOfSelectors(tp: Type)       = getterMemberTypes(tp, productSelectors(tp))
+    // SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible)
+    //         extractor to limit exposure to regressions like the reported problem with existentials.
+    //         TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala
+    private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match {
+      case x :: Nil => x
+      case _        => or
+    }
 
-    def ClassType(arg: Type) =
-      if (phase.erasedTypes || forMSIL) ClassClass.tpe
-      else appliedType(ClassClass, arg)
+    // Can't only check for _1 thanks to pos/t796.
+    def hasSelectors(tp: Type) = (
+         (tp.members containsName nme._1)
+      && (tp.members containsName nme._2)
+    )
+
+    /** Returns the method symbols for members _1, _2, ..., _N
+     *  which exist in the given type.
+     */
+    def productSelectors(tpe: Type): List[Symbol] = {
+      def loop(n: Int): List[Symbol] = tpe member TermName("_" + n) match {
+        case NoSymbol                => Nil
+        case m if m.paramss.nonEmpty => Nil
+        case m                       => m :: loop(n + 1)
+      }
+      // Since ErrorType always returns a symbol from a call to member, we
+      // had better not start looking for _1, _2, etc. expecting it to run out.
+      if (tpe.isErroneous) Nil else loop(1)
+    }
+
+    /** If `tp` has a term member `name`, the first parameter list of which
+     *  matches `paramTypes`, and which either has no further parameter
+     *  lists or only an implicit one, then the result type of the matching
+     *  method. Otherwise, NoType.
+     */
+    def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = {
+      def matchesParams(member: Symbol) = member.paramss match {
+        case Nil        => paramTypes.isEmpty
+        case ps :: rest => (rest.isEmpty || isImplicitParamss(rest)) && (ps corresponds paramTypes)(_.tpe =:= _)
+      }
+      tp member name filter matchesParams match {
+        case NoSymbol => NoType
+        case member   => (tp memberType member).finalResultType
+      }
+    }
+
+    def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
+
+    /** Can we tell by inspecting the symbol that it will never
+     *  at any phase have type parameters?
+     */
+    def neverHasTypeParameters(sym: Symbol) = sym match {
+      case _: RefinementClassSymbol => true
+      case _: ModuleClassSymbol     => true
+      case _: ImplClassSymbol       => true
+      case _                        =>
+        (
+             sym.isPrimitiveValueClass
+          || sym.isAnonymousClass
+          || sym.initialize.isMonomorphicType
+        )
+    }
 
     def EnumType(sym: Symbol) =
       // given (in java): "class A { enum E { VAL1 } }"
@@ -733,66 +903,34 @@ trait Definitions extends api.StandardDefinitions {
       //  - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
       sym.owner.linkedClassOfClass.tpe
 
-    def vmClassType(arg: Type): Type = ClassType(arg)
-    def vmSignature(sym: Symbol, info: Type): String = signature(info)    // !!!
-
     /** Given a class symbol C with type parameters T1, T2, ... Tn
      *  which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
      *  returns an existential type of the form
      *
      *    C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
      */
+    // TODO Review the way this is used. I see two potential problems:
+    //  1. `existentialAbstraction` here doesn't create fresh existential type symbols, it just
+    //     uses the class type parameter symbols directly as the list of quantified symbols.
+    //     See SI-8244 for the trouble that this can cause.
+    //     Compare with callers of `typeParamsToExistentials` (used in Java raw type handling)
+    //  2. Why don't we require a prefix? Could its omission lead to wrong results in CheckabilityChecker?
     def classExistentialType(clazz: Symbol): Type =
-      newExistentialType(clazz.typeParams, clazz.tpe)
-
-    /** Given type U, creates a Type representing Class[_ <: U].
-     */
-    def boundedClassType(upperBound: Type) =
-      appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
-
-    /** To avoid unchecked warnings on polymorphic classes, translate
-     *  a Foo[T] into a Foo[_] for use in the pattern matcher.
-     */
-    @deprecated("Use classExistentialType", "2.10.0")
-    def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
+      existentialAbstraction(clazz.typeParams, clazz.tpe_*)
 
-    //
-    // .NET backend
-    //
+    // members of class scala.Any
 
-    lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
-    // System.ValueType
-    lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType)
-    // System.MulticastDelegate
-    lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate)
-    var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported.
-    // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
-    // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
-    lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap()
-
-    def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = {
-      isSubType(delegateType, DelegateClass.tpe) &&
-      (delegateType.member(nme.apply).tpe match {
-      	case MethodType(delegateParams, delegateReturn) =>
-      	  isFunctionType(functionType) &&
-      	  (functionType.normalize match {
-      	    case TypeRef(_, _, args) =>
-      	      (delegateParams.map(pt => {
-                      if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt})
-      	       ::: List(delegateReturn)) == args
-      	    case _ => false
-      	  })
-        case _ => false
-      })
-    }
+    // TODO these aren't final! They are now overriden in AnyRef/Object. Prior to the fix
+    //      for SI-8129, they were actually *overloaded* by the members in AnyRef/Object.
+    //      We should unfinalize these, override in AnyValClass, and make the overrides final.
+    //      Refchecks never actually looks at these, so its just for consistency.
+    lazy val Any_==       = enterNewMethod(AnyClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
+    lazy val Any_!=       = enterNewMethod(AnyClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
 
-    // members of class scala.Any
-    lazy val Any_==       = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
-    lazy val Any_!=       = enterNewMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
-    lazy val Any_equals   = enterNewMethod(AnyClass, nme.equals_, anyparam, booltype)
-    lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, inttype)
-    lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, stringtype)
-    lazy val Any_##       = enterNewMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL)
+    lazy val Any_equals   = enterNewMethod(AnyClass, nme.equals_, AnyTpe :: Nil, BooleanTpe)
+    lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, IntTpe)
+    lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, StringTpe)
+    lazy val Any_##       = enterNewMethod(AnyClass, nme.HASHHASH, Nil, IntTpe, FINAL)
 
     // Any_getClass requires special handling.  The return type is determined on
     // a per-call-site basis as if the function being called were actually:
@@ -804,7 +942,7 @@ trait Definitions extends api.StandardDefinitions {
     // participation.  At the "Any" level, the return type is Class[_] as it is in
     // java.lang.Object.  Java also special cases the return type.
     lazy val Any_getClass     = enterNewMethod(AnyClass, nme.getClass_, Nil, getMemberMethod(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED)
-    lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
+    lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => BooleanTpe)
     lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
 
     lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
@@ -834,7 +972,7 @@ trait Definitions extends api.StandardDefinitions {
       else {
         val eparams    = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
         val upperBound = (
-          if (isPhantomClass(sym)) AnyClass.tpe
+          if (isPhantomClass(sym)) AnyTpe
           else if (sym.isLocalClass) erasure.intersectionDominator(tp.parents)
           else tp.widen
         )
@@ -860,12 +998,7 @@ trait Definitions extends api.StandardDefinitions {
         else
           x :: removeRedundantObjects(xs)
     }
-    /** Order a list of types with non-trait classes before others. */
-    def classesFirst(tps: List[Type]): List[Type] = {
-      val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait)
-      if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps
-      else classes ::: others
-    }
+
     /** The following transformations applied to a list of parents.
      *  If any parent is a class/trait, all parents which normalize to
      *  Object are discarded.  Otherwise, all parents which normalize
@@ -893,27 +1026,23 @@ trait Definitions extends api.StandardDefinitions {
     def parentsString(parents: List[Type]) =
       normalizedParents(parents) mkString " with "
 
-    def typeParamsString(tp: Type) = tp match {
-      case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
-      case _                    => ""
-    }
     def valueParamsString(tp: Type) = tp match {
       case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
       case _                     => ""
     }
 
     // members of class java.lang.{ Object, String }
-    lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
-    lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
-    lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
-    lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
-    lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
-    lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype)
-    lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor)
+    lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, IntTpe, FINAL)
+    lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
+    lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
+    lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+    lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+    lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_ => BooleanTpe)
+    lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_.typeConstructor)
     lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
       (Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
     )
-    lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL)
+    lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, AnyTpe :: Nil, StringTpe, FINAL)
 
     def Object_getClass  = getMemberMethod(ObjectClass, nme.getClass_)
     def Object_clone     = getMemberMethod(ObjectClass, nme.clone_)
@@ -940,9 +1069,6 @@ trait Definitions extends api.StandardDefinitions {
     lazy val BoxedFloatClass        = requiredClass[java.lang.Float]
     lazy val BoxedDoubleClass       = requiredClass[java.lang.Double]
 
-    lazy val Boxes_isNumberOrBool  = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
-    lazy val Boxes_isNumber        = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
-
     lazy val BoxedUnitClass         = requiredClass[scala.runtime.BoxedUnit]
     lazy val BoxedUnitModule        = getRequiredModule("scala.runtime.BoxedUnit")
       def BoxedUnit_UNIT            = getMemberValue(BoxedUnitModule, nme.UNIT)
@@ -959,7 +1085,6 @@ trait Definitions extends api.StandardDefinitions {
     lazy val ImplicitNotFoundClass      = requiredClass[scala.annotation.implicitNotFound]
     lazy val MigrationAnnotationClass   = requiredClass[scala.annotation.migration]
     lazy val ScalaStrictFPAttr          = requiredClass[scala.annotation.strictfp]
-    lazy val SerializableAttr           = requiredClass[scala.annotation.serializable] // @serializable is deprecated
     lazy val SwitchClass                = requiredClass[scala.annotation.switch]
     lazy val TailrecClass               = requiredClass[scala.annotation.tailrec]
     lazy val VarargsClass               = requiredClass[scala.annotation.varargs]
@@ -968,8 +1093,7 @@ trait Definitions extends api.StandardDefinitions {
 
     lazy val BeanPropertyAttr           = requiredClass[scala.beans.BeanProperty]
     lazy val BooleanBeanPropertyAttr    = requiredClass[scala.beans.BooleanBeanProperty]
-    lazy val CloneableAttr              = requiredClass[scala.annotation.cloneable]
-    lazy val CompileTimeOnlyAttr        = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
+    lazy val CompileTimeOnlyAttr        = getClassIfDefined("scala.annotation.compileTimeOnly")
     lazy val DeprecatedAttr             = requiredClass[scala.deprecated]
     lazy val DeprecatedNameAttr         = requiredClass[scala.deprecatedName]
     lazy val DeprecatedInheritanceAttr  = requiredClass[scala.deprecatedInheritance]
@@ -979,6 +1103,7 @@ trait Definitions extends api.StandardDefinitions {
     lazy val ScalaInlineClass           = requiredClass[scala.inline]
     lazy val ScalaNoInlineClass         = requiredClass[scala.noinline]
     lazy val SerialVersionUIDAttr       = requiredClass[scala.SerialVersionUID]
+    lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
     lazy val SpecializedClass           = requiredClass[scala.specialized]
     lazy val ThrowsClass                = requiredClass[scala.throws[_]]
     lazy val TransientAttr              = requiredClass[scala.transient]
@@ -994,53 +1119,56 @@ trait Definitions extends api.StandardDefinitions {
     lazy val GetterTargetClass          = requiredClass[meta.getter]
     lazy val ParamTargetClass           = requiredClass[meta.param]
     lazy val SetterTargetClass          = requiredClass[meta.setter]
-    lazy val ClassTargetClass           = requiredClass[meta.companionClass]
     lazy val ObjectTargetClass          = requiredClass[meta.companionObject]
+    lazy val ClassTargetClass           = requiredClass[meta.companionClass]
     lazy val MethodTargetClass          = requiredClass[meta.companionMethod]    // TODO: module, moduleClass? package, packageObject?
     lazy val LanguageFeatureAnnot       = requiredClass[meta.languageFeature]
 
     // Language features
     lazy val languageFeatureModule      = getRequiredModule("scala.languageFeature")
-    lazy val experimentalModule         = getMemberModule(languageFeatureModule, nme.experimental)
-    lazy val MacrosFeature              = getLanguageFeature("macros", experimentalModule)
-    lazy val DynamicsFeature            = getLanguageFeature("dynamics")
-    lazy val PostfixOpsFeature          = getLanguageFeature("postfixOps")
-    lazy val ReflectiveCallsFeature     = getLanguageFeature("reflectiveCalls")
-    lazy val ImplicitConversionsFeature = getLanguageFeature("implicitConversions")
-    lazy val HigherKindsFeature         = getLanguageFeature("higherKinds")
-    lazy val ExistentialsFeature        = getLanguageFeature("existentials")
 
     def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || (
       // Trying to allow for deprecated locations
       sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
     )
-    lazy val metaAnnotations = Set[Symbol](
-      FieldTargetClass, ParamTargetClass,
-      GetterTargetClass, SetterTargetClass,
-      BeanGetterTargetClass, BeanSetterTargetClass
-    )
+    lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet
+
+    // According to the scala.annotation.meta package object:
+    // * By default, annotations on (`val`-, `var`- or plain) constructor parameters
+    // * end up on the parameter, not on any other entity. Annotations on fields
+    // * by default only end up on the field.
+    def defaultAnnotationTarget(t: Tree): Symbol = t match {
+      case ClassDef(_, _, _, _)                                  => ClassTargetClass
+      case ModuleDef(_, _, _)                                    => ObjectTargetClass
+      case vd @ ValDef(_, _, _, _) if vd.symbol.isParamAccessor  => ParamTargetClass
+      case vd @ ValDef(_, _, _, _) if vd.symbol.isValueParameter => ParamTargetClass
+      case ValDef(_, _, _, _)                                    => FieldTargetClass
+      case DefDef(_, _, _, _, _, _)                              => MethodTargetClass
+      case _                                                     => GetterTargetClass
+    }
 
     lazy val AnnotationDefaultAttr: ClassSymbol = {
-      val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
-      // This attribute needs a constructor so that modifiers in parsed Java code make sense
-      attr.info.decls enter attr.newClassConstructor(NoPosition)
-      attr
+      val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L)
+      sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym)
+      markAllCompleted(sym)
+      RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match {
+        case existing :: _ =>
+          existing.asInstanceOf[ClassSymbol]
+        case _ =>
+          RuntimePackageClass.info.decls enter sym
+          // This attribute needs a constructor so that modifiers in parsed Java code make sense
+          sym.info.decls enter sym.newClassConstructor(NoPosition)
+          sym
+      }
     }
 
-    @deprecated("Moved to rootMirror.getClass", "2.10.0")
-    def getClass(fullname: Name): ClassSymbol = rootMirror.getClassByName(fullname)
-
-    @deprecated("Moved to rootMirror.getModule", "2.10.0")
-    def getModule(fullname: Name): ModuleSymbol = rootMirror.getModule(fullname)
-
-    private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member") = {
-      throw new FatalError(owner + " does not have a " + what + " " + name)
+    private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member", addendum: String = "") = {
+      throw new FatalError(owner + " does not have a " + what + " " + name + addendum)
     }
 
     def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
 
     def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
-    def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
 
     def findNamedMember(fullName: Name, root: Symbol): Symbol = {
       val segs = nme.segments(fullName.toString, fullName.isTermName)
@@ -1070,7 +1198,8 @@ trait Definitions extends api.StandardDefinitions {
     def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
       getMember(owner, name.toTermName) match {
         case x: ModuleSymbol => x
-        case _               => fatalMissingSymbol(owner, name, "member object")
+        case NoSymbol        => fatalMissingSymbol(owner, name, "member object")
+        case other           => fatalMissingSymbol(owner, name, "member object", addendum = s". A symbol ${other} of kind ${other.accurateKindString} already exists.")
       }
     }
     def getTypeMember(owner: Symbol, name: Name): TypeSymbol = {
@@ -1080,7 +1209,6 @@ trait Definitions extends api.StandardDefinitions {
       }
     }
     def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
-      val y = getMember(owner, name.toTypeName)
       getMember(owner, name.toTypeName) match {
         case x: ClassSymbol => x
         case _              => fatalMissingSymbol(owner, name, "member class")
@@ -1088,15 +1216,21 @@ trait Definitions extends api.StandardDefinitions {
     }
     def getMemberMethod(owner: Symbol, name: Name): TermSymbol = {
       getMember(owner, name.toTermName) match {
-        // todo. member symbol becomes a term symbol in cleanup. is this a bug?
-        // case x: MethodSymbol => x
         case x: TermSymbol => x
         case _             => fatalMissingSymbol(owner, name, "method")
       }
     }
 
+    private lazy val erasurePhase = findPhaseWithName("erasure")
     def getMemberIfDefined(owner: Symbol, name: Name): Symbol =
-      owner.info.nonPrivateMember(name)
+      // findMember considered harmful after erasure; e.g.
+      //
+      // scala> exitingErasure(Symbol_apply).isOverloaded
+      // res27: Boolean = true
+      //
+      enteringPhaseNotLaterThan(erasurePhase )(
+        owner.info.nonPrivateMember(name)
+      )
 
     /** Using getDecl rather than getMember may avoid issues with
      *  OverloadedTypes turning up when you don't want them, if you
@@ -1108,18 +1242,15 @@ trait Definitions extends api.StandardDefinitions {
     def getDeclIfDefined(owner: Symbol, name: Name): Symbol =
       owner.info.nonPrivateDecl(name)
 
-    def packageExists(packageName: String): Boolean =
-      getModuleIfDefined(packageName).isPackage
-
     private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol =
       owner.newAliasType(name) setInfoAndEnter alias
 
     private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = {
       val clazz   = enterNewClass(ScalaPackageClass, name, Nil)
       val tparam  = clazz.newSyntheticTypeParam("T0", flags)
-      val parents = List(AnyRefClass.tpe, parentFn(tparam))
+      val parents = List(AnyRefTpe, parentFn(tparam))
 
-      clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
+      clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) markAllCompleted
     }
 
     def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = {
@@ -1130,7 +1261,7 @@ trait Definitions extends api.StandardDefinitions {
         case (_, restpe)             => NullaryMethodType(restpe)
       }
 
-      msym setInfoAndEnter genPolyType(tparams, mtpe)
+      msym setInfoAndEnter genPolyType(tparams, mtpe) markAllCompleted
     }
 
     /** T1 means one type parameter.
@@ -1142,10 +1273,6 @@ trait Definitions extends api.StandardDefinitions {
       newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
     }
 
-    lazy val boxedClassValues = boxedClass.values.toSet[Symbol]
-    lazy val isUnbox = unboxMethod.values.toSet[Symbol]
-    lazy val isBox = boxMethod.values.toSet[Symbol]
-
     /** Is symbol a phantom class for which no runtime representation exists? */
     lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
     /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
@@ -1159,8 +1286,7 @@ trait Definitions extends api.StandardDefinitions {
       AnyValClass,
       NullClass,
       NothingClass,
-      SingletonClass,
-      EqualsPatternClass
+      SingletonClass
     )
     /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
     lazy val syntheticCoreMethods = List(
@@ -1196,14 +1322,12 @@ trait Definitions extends api.StandardDefinitions {
     lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
 
     /** Is the symbol that of a parent which is added during parsing? */
-    lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
+    lazy val isPossibleSyntheticParent = ProductClass.seq.toSet[Symbol] + ProductRootClass + SerializableClass
 
     private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass
 
     /** Is symbol a value class? */
     def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym
-    def isNonUnitValueClass(sym: Symbol)   = isPrimitiveValueClass(sym) && (sym != UnitClass)
-    def isSpecializableClass(sym: Symbol)  = isPrimitiveValueClass(sym) || (sym == AnyRefClass)
     def isPrimitiveValueType(tp: Type)     = isPrimitiveValueClass(tp.typeSymbol)
 
     /** Is symbol a boxed value class, e.g. java.lang.Integer? */
@@ -1220,7 +1344,7 @@ trait Definitions extends api.StandardDefinitions {
     /** Is type's symbol a numeric value class? */
     def isNumericValueType(tp: Type): Boolean = tp match {
       case TypeRef(_, sym, _) => isNumericValueClass(sym)
-      case _ => false
+      case _                  => false
     }
 
     // todo: reconcile with javaSignature!!!
@@ -1232,10 +1356,10 @@ trait Definitions extends api.StandardDefinitions {
       }
       def flatNameString(sym: Symbol, separator: Char): String =
         if (sym == NoSymbol) ""   // be more resistant to error conditions, e.g. neg/t3222.scala
-        else if (sym.owner.isPackageClass) sym.javaClassName
+        else if (sym.isTopLevel) sym.javaClassName
         else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName
       def signature1(etp: Type): String = {
-        if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
+        if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.dealiasWiden.typeArgs.head))
         else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
         else "L" + flatNameString(etp.typeSymbol, '/') + ";"
       }
@@ -1244,49 +1368,127 @@ trait Definitions extends api.StandardDefinitions {
       else flatNameString(etp.typeSymbol, '.')
     }
 
-   /** Surgery on the value classes.  Without this, AnyVals defined in source
-     *  files end up with an AnyRef parent.  It is likely there is a better way
-     *  to evade that AnyRef.
-     */
-    private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match {
-      case ClassInfoType(_, scope, clazz) =>
-        sym setInfo ClassInfoType(parents, scope, clazz)
-      case _ =>
-        sym
-    }
-
+    // documented in JavaUniverse.init
     def init() {
       if (isInitialized) return
-      // force initialization of every symbol that is synthesized or hijacked by the compiler
-      val forced = symbolsNotPresentInBytecode
+      ObjectClass.initialize
+      ScalaPackageClass.initialize
+      val forced1 = symbolsNotPresentInBytecode
+      val forced2 = NoSymbol
       isInitialized = true
     } //init
 
-    var nbScalaCallers: Int = 0
-    def newScalaCaller(delegateType: Type): MethodSymbol = {
-      assert(forMSIL, "scalaCallers can only be created if target is .NET")
-      // object: reference to object on which to call (scala-)method
-      val paramTypes: List[Type] = List(ObjectClass.tpe)
-      val name = newTermName("$scalaCaller$$" + nbScalaCallers)
-      // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
-      // type parameter =-> a MethodType in this case
-      // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
-      val newCaller = enterNewMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC)
-      // val newCaller = newPolyMethod(DelegateClass, name,
-      // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
-      Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
-      nbScalaCallers += 1
-      newCaller
+    class UniverseDependentTypes(universe: Tree) {
+      lazy val nameType         = universeMemberType(tpnme.Name)
+      lazy val modsType         = universeMemberType(tpnme.Modifiers)
+      lazy val flagsType        = universeMemberType(tpnme.FlagSet)
+      lazy val symbolType       = universeMemberType(tpnme.Symbol)
+      lazy val treeType         = universeMemberType(tpnme.Tree)
+      lazy val caseDefType      = universeMemberType(tpnme.CaseDef)
+      lazy val liftableType     = universeMemberType(tpnme.Liftable)
+      lazy val unliftableType   = universeMemberType(tpnme.Unliftable)
+      lazy val iterableTreeType = appliedType(IterableClass, treeType)
+      lazy val listTreeType     = appliedType(ListClass, treeType)
+      lazy val listListTreeType = appliedType(ListClass, listTreeType)
+
+      def universeMemberType(name: TypeName) = universe.tpe.memberType(getTypeMember(universe.symbol, name))
     }
 
-    // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) {
-    // assert(Delegate_scalaCallers contains scalaCaller)
-    // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType))
-    // }
+    /** Efficient access to member symbols which must be looked up each run. Access via `currentRun.runDefinitions` */
+    final class RunDefinitions {
+      lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS)
+
+      // The given symbol represents either String.+ or StringAdd.+
+      def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
+
+      lazy val StringContext_f = getMemberMethod(StringContextClass, nme.f)
+
+      lazy val ArrowAssocClass = getMemberClass(PredefModule, TypeName("ArrowAssoc")) // SI-5731
+      def isArrowAssoc(sym: Symbol) = sym.owner == ArrowAssocClass
+
+      lazy val Boxes_isNumberOrBool  = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
+      lazy val Boxes_isNumber        = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
 
-    def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) {
-      assert(Delegate_scalaCallers contains scalaCaller)
-      Delegate_scalaCallerTargets += (scalaCaller -> methSym)
+      private def valueClassCompanion(name: TermName): ModuleSymbol = {
+        getMember(ScalaPackageClass, name) match {
+          case x: ModuleSymbol => x
+          case _               => catastrophicFailure()
+        }
+      }
+
+      private def valueCompanionMember(className: Name, methodName: TermName): TermSymbol =
+        getMemberMethod(valueClassCompanion(className.toTermName).moduleClass, methodName)
+
+      lazy val boxMethod        = classesMap(x => valueCompanionMember(x, nme.box))
+      lazy val unboxMethod      = classesMap(x => valueCompanionMember(x, nme.unbox))
+      lazy val isUnbox          = unboxMethod.values.toSet[Symbol]
+      lazy val isBox            = boxMethod.values.toSet[Symbol]
+
+      lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
+      lazy val List_apply = DefinitionsClass.this.List_apply
+
+      /**
+       * Is the given symbol `List.apply`?
+       * To to avoid bootstrapping cycles, this return false if the given symbol or List itself is not initialized.
+       */
+      def isListApply(sym: Symbol) = sym.isInitialized && ListModule.hasCompleteInfo && sym == List_apply
+      def isPredefClassOf(sym: Symbol) = if (PredefModule.hasCompleteInfo) sym == Predef_classOf else isPredefMemberNamed(sym, nme.classOf)
+
+      lazy val TagMaterializers = Map[Symbol, Symbol](
+        ClassTagClass    -> materializeClassTag,
+        WeakTypeTagClass -> materializeWeakTypeTag,
+        TypeTagClass     -> materializeTypeTag
+      )
+      lazy val TagSymbols = TagMaterializers.keySet
+      lazy val Predef_conforms     = (getMemberIfDefined(PredefModule, nme.conforms)
+                               orElse getMemberMethod(PredefModule, "conforms": TermName)) // TODO: predicate on -Xsource:2.10 (for now, needed for transition from M8 -> RC1)
+      lazy val Predef_classOf      = getMemberMethod(PredefModule, nme.classOf)
+      lazy val Predef_implicitly   = getMemberMethod(PredefModule, nme.implicitly)
+      lazy val Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+      lazy val Predef_???          = DefinitionsClass.this.Predef_???
+
+      lazy val arrayApplyMethod       = getMemberMethod(ScalaRunTimeModule, nme.array_apply)
+      lazy val arrayUpdateMethod      = getMemberMethod(ScalaRunTimeModule, nme.array_update)
+      lazy val arrayLengthMethod      = getMemberMethod(ScalaRunTimeModule, nme.array_length)
+      lazy val arrayCloneMethod       = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
+      lazy val ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
+      lazy val arrayClassMethod       = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
+      lazy val traversableDropMethod  = getMemberMethod(ScalaRunTimeModule, nme.drop)
+
+      lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group)
+
+      lazy val WeakTypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.WeakTypeTag))
+      lazy val WeakTypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.WeakTypeTag))
+      lazy val TypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.TypeTag))
+      lazy val TypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.TypeTag))
+      lazy val MacroContextUniverse = DefinitionsClass.this.MacroContextUniverse
+
+      lazy val materializeClassTag    = getMemberMethod(ReflectPackage, nme.materializeClassTag)
+      lazy val materializeWeakTypeTag = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeWeakTypeTag))
+      lazy val materializeTypeTag     = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeTypeTag))
+
+      lazy val experimentalModule         = getMemberModule(languageFeatureModule, nme.experimental)
+      lazy val MacrosFeature              = getLanguageFeature("macros", experimentalModule)
+      lazy val DynamicsFeature            = getLanguageFeature("dynamics")
+      lazy val PostfixOpsFeature          = getLanguageFeature("postfixOps")
+      lazy val ReflectiveCallsFeature     = getLanguageFeature("reflectiveCalls")
+      lazy val ImplicitConversionsFeature = getLanguageFeature("implicitConversions")
+      lazy val HigherKindsFeature         = getLanguageFeature("higherKinds")
+      lazy val ExistentialsFeature        = getLanguageFeature("existentials")
+
+      lazy val ApiUniverseReify = ApiUniverseClass.map(sym => getMemberMethod(sym, nme.reify))
+
+      lazy val ReflectRuntimeUniverse      = DefinitionsClass.this.ReflectRuntimeUniverse
+      lazy val ReflectRuntimeCurrentMirror = DefinitionsClass.this.ReflectRuntimeCurrentMirror
+
+      lazy val TreesTreeType         = TreesClass.map(sym => getTypeMember(sym, tpnme.Tree))
+      object TreeType { def unapply(tpe: Type): Boolean = tpe.typeSymbol.overrideChain contains TreesTreeType }
+      object SubtreeType { def unapply(tpe: Type): Boolean = tpe.typeSymbol.overrideChain exists (_.tpe <:< TreesTreeType.tpe) }
+
+      object ExprClassOf { def unapply(tp: Type): Option[Type] = elementExtractOption(ExprClass, tp) }
+
+      lazy val PartialManifestClass  = getTypeMember(ReflectPackage, tpnme.ClassManifest)
+      lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
     }
   }
 }
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
new file mode 100644
index 0000000..357abf7
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -0,0 +1,28 @@
+package scala
+package reflect
+package internal
+
+import Depth._
+
+final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
+  def max(that: Depth): Depth   = if (this < that) that else this
+  def decr(n: Int): Depth       = if (isAnyDepth) this else Depth(depth - n)
+  def incr(n: Int): Depth       = if (isAnyDepth) this else Depth(depth + n)
+  def decr: Depth               = decr(1)
+  def incr: Depth               = incr(1)
+
+  def isNegative = depth < 0
+  def isZero     = depth == 0
+  def isAnyDepth = this == AnyDepth
+
+  def compare(that: Depth): Int = if (depth < that.depth) -1 else if (this == that) 0 else 1
+  override def toString = s"Depth($depth)"
+}
+
+object Depth {
+  // A don't care value for the depth parameter in lubs/glbs and related operations.
+  final val AnyDepth = new Depth(Int.MinValue)
+  final val Zero     = new Depth(0)
+
+  @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth)
+}
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 2c2ed35..0eeca4a 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -3,11 +3,11 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.{ mutable, immutable }
-import util._
 
 /** The name of this trait defines the eventual intent better than
  *  it does the initial contents.
@@ -84,14 +84,14 @@ trait ExistentialsAndSkolems {
    *  also replaced, except for term symbols of an Ident tree, where
    *  only the type of the Ident is changed.
    */
-  final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = {
+  final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Symbol = NoSymbol)(creator: (List[Symbol], Type) => T): T = {
     val allBounds = existentialBoundsExcludingHidden(rawSyms)
     val typeParams: List[Symbol] = rawSyms map { sym =>
       val name = sym.name match {
         case x: TypeName  => x
         case x            => tpnme.singletonName(x)
       }
-      def rawOwner0  = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym"))
+      def rawOwner0  = rawOwner orElse abort(s"no owner provided for existential transform over raw parameter: $sym")
       val bound      = allBounds(sym)
       val sowner     = if (isRawParameter(sym)) rawOwner0 else sym.owner
       val quantified = sowner.newExistential(name, sym.pos)
@@ -113,7 +113,7 @@ trait ExistentialsAndSkolems {
    * @param hidden   The original type
    * @param rawOwner The owner for Java raw types.
    */
-  final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type =
+  final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Symbol = NoSymbol): Type =
     if (hidden.isEmpty) tp
     else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
 }
diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala
index a084fc2..08a9a63 100644
--- a/src/reflect/scala/reflect/internal/FatalError.scala
+++ b/src/reflect/scala/reflect/internal/FatalError.scala
@@ -2,5 +2,6 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect.internal
+package scala
+package reflect.internal
 case class FatalError(msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 6a3b687..ef9c778 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.language.implicitConversions
@@ -41,5 +42,11 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
     val DEFAULTPARAM  : FlagSet = Flags.DEFAULTPARAM
     val PRESUPER      : FlagSet = Flags.PRESUPER
     val DEFAULTINIT   : FlagSet = Flags.DEFAULTINIT
+    val ENUM          : FlagSet = Flags.ENUM
+    val PARAMACCESSOR : FlagSet = Flags.PARAMACCESSOR
+    val CASEACCESSOR  : FlagSet = Flags.CASEACCESSOR
+    val SYNTHETIC     : FlagSet = Flags.SYNTHETIC
+    val ARTIFACT      : FlagSet = Flags.ARTIFACT
+    val STABLE        : FlagSet = Flags.STABLE
   }
 }
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 5ebe02d..1707061 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.{ mutable, immutable }
@@ -62,7 +63,7 @@ import scala.collection.{ mutable, immutable }
 // 45:  SYNCHRONIZED/M
 // 46:        ARTIFACT
 // 47: DEFAULTMETHOD/M
-// 48:
+// 48:            ENUM
 // 49:
 // 50:
 // 51:    lateDEFERRED
@@ -116,8 +117,10 @@ class ModifierFlags {
   final val LAZY          = 1L << 31      // symbol is a lazy val. can't have MUTABLE unless transformed by typer
   final val PRESUPER      = 1L << 37      // value is evaluated before super call
   final val DEFAULTINIT   = 1L << 41      // symbol is initialized to the default value: used by -Xcheckinit
-  // ARTIFACT at #46 in 2.11+
+  final val ARTIFACT      = 1L << 46      // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+                                          // to see which symbols are marked as ARTIFACT, see scaladocs for FlagValues.ARTIFACT
   final val DEFAULTMETHOD = 1L << 47      // symbol is a java default method
+  final val ENUM          = 1L << 48      // symbol is an enum
 
   // Overridden.
   def flagToString(flag: Long): String = ""
@@ -128,7 +131,7 @@ class ModifierFlags {
 }
 object ModifierFlags extends ModifierFlags
 
-/** All flags and associated operatins */
+/** All flags and associated operations */
 class Flags extends ModifierFlags {
   final val METHOD        = 1 << 6        // a method
   final val MODULE        = 1 << 8        // symbol is module or class implementing a module
@@ -163,11 +166,10 @@ class Flags extends ModifierFlags {
   final val VBRIDGE       = 1L << 42      // symbol is a varargs bridge
 
   final val VARARGS       = 1L << 43      // symbol is a Java-style varargs method
-  final val TRIEDCOOKING  = 1L << 44      // ``Cooking'' has been tried on this symbol
-                                          // A Java method's type is ``cooked'' by transforming raw types to existentials
+  final val TRIEDCOOKING  = 1L << 44      // `Cooking` has been tried on this symbol
+                                          // A Java method's type is `cooked` by transforming raw types to existentials
 
   final val SYNCHRONIZED  = 1L << 45      // symbol is a method which should be marked ACC_SYNCHRONIZED
-  final val ARTIFACT      = 1L << 46      // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
 
   // ------- shift definitions -------------------------------------------------------
 
@@ -250,7 +252,7 @@ class Flags extends ModifierFlags {
   /** These modifiers appear in TreePrinter output. */
   final val PrintableFlags =
     ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
-    ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED
+    ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT
 
   /** When a symbol for a field is created, only these flags survive
    *  from Modifiers.  Others which may be applied at creation time are:
@@ -266,11 +268,13 @@ class Flags extends ModifierFlags {
   final val GetterFlags = ~(PRESUPER | MUTABLE)
   final val SetterFlags = ~(PRESUPER | MUTABLE | STABLE | CASEACCESSOR | IMPLICIT)
 
-  /** When a symbol for a default getter is created, it inherits these
-   *  flags from the method with the default.  Other flags applied at creation
-   *  time are SYNTHETIC, DEFAULTPARAM, and possibly OVERRIDE, and maybe PRESUPER.
+  /** Since DEFAULTPARAM is overloaded with TRAIT, we need some additional
+   *  means of determining what that bit means. Usually DEFAULTPARAM is coupled
+   *  with PARAM, which suffices. Default getters get METHOD instead.
+   *  This constant is the mask of flags which can survive from the parameter modifiers.
+   *  See paramFlagsToDefaultGetter for the full logic.
    */
-  final val DefaultGetterFlags = PRIVATE | PROTECTED | FINAL
+  final val DefaultGetterFlags = PRIVATE | PROTECTED | FINAL | PARAMACCESSOR
 
   /** When a symbol for a method parameter is created, only these flags survive
    *  from Modifiers.  Others which may be applied at creation time are:
@@ -296,7 +300,11 @@ class Flags extends ModifierFlags {
   assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled))
 
   /** These flags are pickled */
-  final val PickledFlags  = InitialFlags & ~FlagsNotPickled
+  final val PickledFlags  = (
+      (InitialFlags & ~FlagsNotPickled)
+    | notPRIVATE // for value class constructors (SI-6601), and private members referenced
+                 // in @inline-marked methods publicized in SuperAccessors (see SI-6608, e6b4204604)
+  )
 
   /** If we have a top-level class or module
    *  and someone asks us for a flag not in TopLevelPickledFlags,
@@ -304,6 +312,9 @@ class Flags extends ModifierFlags {
    */
   final val TopLevelPickledFlags = PickledFlags & ~(MODULE | METHOD | PACKAGE | PARAM | EXISTENTIAL)
 
+  def paramFlagsToDefaultGetter(paramFlags: Long): Long =
+    (paramFlags & DefaultGetterFlags) | SYNTHETIC | METHOD | DEFAULTPARAM
+
   def getterFlags(fieldFlags: Long): Long = ACCESSOR + (
     if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER
     else fieldFlags & ~PRESUPER | STABLE
@@ -422,9 +433,9 @@ class Flags extends ModifierFlags {
     case             VARARGS => "<varargs>"                           // (1L << 43)
     case        TRIEDCOOKING => "<triedcooking>"                      // (1L << 44)
     case        SYNCHRONIZED => "<synchronized>"                      // (1L << 45)
-    case     0x400000000000L => ""                                    // (1L << 46)
+    case            ARTIFACT => "<artifact>"                          // (1L << 46)
     case       DEFAULTMETHOD => "<defaultmethod>"                     // (1L << 47)
-    case    0x1000000000000L => ""                                    // (1L << 48)
+    case                ENUM => "<enum>"                              // (1L << 48)
     case    0x2000000000000L => ""                                    // (1L << 49)
     case    0x4000000000000L => ""                                    // (1L << 50)
     case      `lateDEFERRED` => "<latedeferred>"                      // (1L << 51)
@@ -456,7 +467,7 @@ class Flags extends ModifierFlags {
   )
 
   @deprecated("Use flagString on the flag-carrying member", "2.10.0")
-  def flagsToString(flags: Long, privateWithin: String): String = {
+  private[scala] def flagsToString(flags: Long, privateWithin: String): String = {
     val access    = accessString(flags, privateWithin)
     val nonAccess = flagsToString(flags & ~AccessFlags)
 
@@ -464,7 +475,7 @@ class Flags extends ModifierFlags {
   }
 
   @deprecated("Use flagString on the flag-carrying member", "2.10.0")
-  def flagsToString(flags: Long): String = {
+  private[scala] def flagsToString(flags: Long): String = {
     // Fast path for common case
     if (flags == 0L) "" else {
       var sb: StringBuilder = null
@@ -497,4 +508,4 @@ class Flags extends ModifierFlags {
   final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
 }
 
-object Flags extends Flags { }
+object Flags extends Flags
diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala
new file mode 100644
index 0000000..7e9a568
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/FreshNames.scala
@@ -0,0 +1,39 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala
+package reflect
+package internal
+
+import scala.reflect.internal.util.FreshNameCreator
+
+trait FreshNames { self: Names with StdNames =>
+  // SI-6879 Keeps track of counters that are supposed to be globally unique
+  //         as opposed to traditional freshers that are unique to compilation units.
+  val globalFreshNameCreator = new FreshNameCreator
+
+  // default fresh name creator used to abstract over currentUnit.fresh and runtime fresh name creator
+  def currentFreshNameCreator: FreshNameCreator
+
+  // create fresh term/type name using implicit fresh name creator
+  def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX)(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
+  def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
+
+  // Extractor that matches names which were generated by some
+  // FreshNameCreator with known prefix. Extracts user-specified
+  // prefix that was used as a parameter to newName by stripping
+  // global creator prefix and unique number in the end of the name.
+  class FreshNameExtractor(creatorPrefix: String = "") {
+    // quote prefix so that it can be used with replaceFirst
+    // which expects regExp rather than simple string
+    val quotedCreatorPrefix = java.util.regex.Pattern.quote(creatorPrefix)
+
+    def unapply(name: Name): Option[String] = {
+      val sname = name.toString
+      // name should start with creatorPrefix and end with number
+      if (!sname.startsWith(creatorPrefix) || !sname.matches("^.*\\d*$")) None
+      else Some(NameTransformer.decode(sname.replaceFirst(quotedCreatorPrefix, "").replaceAll("\\d*$", "")))
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 12fd3a3..aa8f4c5 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import Flags._
@@ -80,8 +81,11 @@ trait HasFlags {
   // identically, testing for a single flag.
   def hasAbstractFlag    = hasFlag(ABSTRACT)
   def hasAccessorFlag    = hasFlag(ACCESSOR)
-  def hasDefault         = hasAllFlags(DEFAULTPARAM | PARAM)
+  def hasDefault         = hasFlag(DEFAULTPARAM) && hasFlag(METHOD | PARAM) // Second condition disambiguates with TRAIT
+  def hasEnumFlag        = hasFlag(ENUM)
+  @deprecated("Use isLocalToThis instead", "2.11.0")
   def hasLocalFlag       = hasFlag(LOCAL)
+  def isLocalToThis      = hasFlag(LOCAL)
   def hasModuleFlag      = hasFlag(MODULE)
   def hasPackageFlag     = hasFlag(PACKAGE)
   def hasStableFlag      = hasFlag(STABLE)
@@ -104,6 +108,7 @@ trait HasFlags {
   def isOverride         = hasFlag(OVERRIDE)
   def isParamAccessor    = hasFlag(PARAMACCESSOR)
   def isPrivate          = hasFlag(PRIVATE)
+  @deprecated ("Use `hasPackageFlag` instead", "2.11.0")
   def isPackage          = hasFlag(PACKAGE)
   def isPrivateLocal     = hasAllFlags(PrivateLocal)
   def isProtected        = hasFlag(PROTECTED)
@@ -161,15 +166,6 @@ trait HasFlags {
     else nonAccess + " " + access
   }
 
-  // Backward compat section
-  @deprecated( "Use isTrait", "2.10.0")
-  def hasTraitFlag = hasFlag(TRAIT)
-  @deprecated("Use hasDefault", "2.10.0")
-  def hasDefaultFlag = hasFlag(DEFAULTPARAM)
-  @deprecated("Use isValueParameter or isTypeParameter", "2.10.0")
+  // Guess this can't be deprecated seeing as it's in the reflect API.
   def isParameter = hasFlag(PARAM)
-  @deprecated("Use flagString", "2.10.0")
-  def defaultFlagString = flagString
-  @deprecated("Use flagString(mask)", "2.10.0")
-  def hasFlagsToString(mask: Long): String = flagString(mask)
 }
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 9a8dee1..dc4ad25 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -1,21 +1,23 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.mutable.WeakHashMap
 import scala.ref.WeakReference
+import scala.reflect.internal.Flags._
 
 // SI-6241: move importers to a mirror
-trait Importers extends api.Importers { self: SymbolTable =>
+trait Importers { to: SymbolTable =>
 
-  def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
-    if (self eq from0) {
+  override def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
+    if (to eq from0) {
       new Importer {
         val from = from0
-        val reverse = this.asInstanceOf[from.Importer{ val from: self.type }]
-        def importSymbol(sym: from.Symbol) = sym.asInstanceOf[self.Symbol]
-        def importType(tpe: from.Type) = tpe.asInstanceOf[self.Type]
-        def importTree(tree: from.Tree) = tree.asInstanceOf[self.Tree]
-        def importPosition(pos: from.Position) = pos.asInstanceOf[self.Position]
+        val reverse = this.asInstanceOf[from.Importer{ val from: to.type }]
+        def importSymbol(their: from.Symbol) = their.asInstanceOf[to.Symbol]
+        def importType(their: from.Type) = their.asInstanceOf[to.Type]
+        def importTree(their: from.Tree) = their.asInstanceOf[to.Tree]
+        def importPosition(their: from.Position) = their.asInstanceOf[to.Position]
       }
     } else {
       // todo. fix this loophole
@@ -28,8 +30,8 @@ trait Importers extends api.Importers { self: SymbolTable =>
 
     val from: SymbolTable
 
-    protected lazy val symMap = new Cache[from.Symbol, Symbol]()
-    protected lazy val tpeMap = new Cache[from.Type, Type]()
+    protected lazy val symMap = new Cache[from.Symbol, to.Symbol]()
+    protected lazy val tpeMap = new Cache[from.Type, to.Type]()
     protected class Cache[K <: AnyRef, V <: AnyRef] extends WeakHashMap[K, WeakReference[V]] {
       def weakGet(key: K): Option[V] = this get key flatMap WeakReference.unapply
       def weakUpdate(key: K, value: V) = this.update(key, WeakReference(value))
@@ -49,158 +51,164 @@ trait Importers extends api.Importers { self: SymbolTable =>
     }
 
     object reverse extends from.StandardImporter {
-      val from: self.type = self
+      val from: to.type = to
       // FIXME this and reverse should be constantly kept in sync
       // not just synced once upon the first usage of reverse
-      for ((fromsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(fromsym)))
-      for ((fromtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(fromtpe)))
+      for ((theirsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(theirsym)))
+      for ((theirtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(theirtpe)))
     }
 
-    // todo. careful import of positions
-    def importPosition(pos: from.Position): Position =
-      pos.asInstanceOf[Position]
+    // ============== SYMBOLS ==============
+
+    protected def recreatedSymbolCompleter(my: to.Symbol, their: from.Symbol) = {
+      // we lock the symbol that is imported for a very short period of time
+      // i.e. only for when type parameters of the symbol are being imported
+      // the lock is used to communicate to the recursive importSymbol calls
+      // that type parameters need to be created from scratch
+      // because otherwise type parameters are imported by looking into owner.typeParams
+      // which is obviously unavailable while the completer is being created
+      try {
+        my setFlag Flags.LOCKED
+        val mytypeParams = their.typeParams map importSymbol
+        new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
+          override def complete(my: to.Symbol): Unit = {
+            val theirCore = their.info match {
+              case from.PolyType(_, core) => core
+              case core => core
+            }
+            my setInfo GenPolyType(mytypeParams, importType(theirCore))
+            my setAnnotations (their.annotations map importAnnotationInfo)
+            markAllCompleted(my)
+          }
+        }
+      } finally {
+        my resetFlag Flags.LOCKED
+      }
+    }
+
+    protected def recreateSymbol(their: from.Symbol): to.Symbol = {
+      val myowner = importSymbol(their.owner)
+      val mypos   = importPosition(their.pos)
+      val myname  = importName(their.name)
+      val myflags = their.flags
+      def linkReferenced(my: TermSymbol, their: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
+        symMap.weakUpdate(their, my)
+        my.referenced = op(their.referenced)
+        my
+      }
+      val my = their match {
+        case their: from.MethodSymbol =>
+          linkReferenced(myowner.newMethod(myname.toTermName, mypos, myflags), their, importSymbol)
+        case their: from.ModuleSymbol =>
+          val ret = linkReferenced(myowner.newModuleSymbol(myname.toTermName, mypos, myflags), their, importSymbol)
+          ret.associatedFile = their.associatedFile
+          ret
+        case their: from.FreeTermSymbol =>
+          newFreeTermSymbol(myname.toTermName, their.value, their.flags, their.origin) setInfo importType(their.info)
+        case their: from.FreeTypeSymbol =>
+          newFreeTypeSymbol(myname.toTypeName, their.flags, their.origin)
+        case their: from.TermSymbol =>
+          linkReferenced(myowner.newValue(myname.toTermName, mypos, myflags), their, importSymbol)
+        case their: from.TypeSkolem =>
+          val origin = their.unpackLocation match {
+            case null                  => null
+            case theirloc: from.Tree   => importTree(theirloc)
+            case theirloc: from.Symbol => importSymbol(theirloc)
+          }
+          myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+        case their: from.ModuleClassSymbol =>
+          val my = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
+          symMap.weakUpdate(their, my)
+          my.sourceModule = importSymbol(their.sourceModule)
+          my
+        case their: from.ClassSymbol =>
+          val my = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+          symMap.weakUpdate(their, my)
+          if (their.thisSym != their) {
+            my.typeOfThis = importType(their.typeOfThis)
+            my.thisSym setName importName(their.thisSym.name)
+          }
+          my.associatedFile = their.associatedFile
+          my
+        case their: from.TypeSymbol =>
+          myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
+      }
+      symMap.weakUpdate(their, my)
+      markFlagsCompleted(my)(mask = AllFlags)
+      my setInfo recreatedSymbolCompleter(my, their)
+    }
 
-    def importSymbol(sym0: from.Symbol): Symbol = {
-      def doImport(sym: from.Symbol): Symbol =
-        symMap weakGet sym match {
+    def importSymbol(their0: from.Symbol): Symbol = {
+      def cachedRecreateSymbol(their: from.Symbol): Symbol =
+        symMap weakGet their match {
           case Some(result) => result
-          case _ =>
-            val myowner = importSymbol(sym.owner)
-            val mypos   = importPosition(sym.pos)
-            val myname  = importName(sym.name).toTermName
-            val myflags = sym.flags
-            def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
-              symMap.weakUpdate(x, mysym)
-              mysym.referenced = op(x.referenced)
-              mysym
-            }
-            val mysym = sym match {
-              case x: from.MethodSymbol =>
-                linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
-              case x: from.ModuleSymbol =>
-                linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
-              case x: from.FreeTermSymbol =>
-                newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
-              case x: from.FreeTypeSymbol =>
-                newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
-              case x: from.TermSymbol =>
-                linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
-              case x: from.TypeSkolem =>
-                val origin = x.unpackLocation match {
-                  case null           => null
-                  case y: from.Tree   => importTree(y)
-                  case y: from.Symbol => importSymbol(y)
-                }
-                myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
-              case x: from.ModuleClassSymbol =>
-                val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
-                symMap.weakUpdate(x, mysym)
-                mysym.sourceModule = importSymbol(x.sourceModule)
-                mysym
-              case x: from.ClassSymbol =>
-                val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
-                symMap.weakUpdate(x, mysym)
-                if (sym.thisSym != sym) {
-                  mysym.typeOfThis = importType(sym.typeOfThis)
-                  mysym.thisSym setName importName(sym.thisSym.name)
-                }
-                mysym
-              case x: from.TypeSymbol =>
-                myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
-            }
-            symMap.weakUpdate(sym, mysym)
-            mysym setFlag Flags.LOCKED
-            mysym setInfo {
-              val mytypeParams = sym.typeParams map importSymbol
-              new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
-                override def complete(s: Symbol) {
-                  val result = sym.info match {
-                    case from.PolyType(_, res) => res
-                    case result => result
-                  }
-                  s setInfo GenPolyType(mytypeParams, importType(result))
-                  s setAnnotations (sym.annotations map importAnnotationInfo)
-                }
-              }
-            }
-            mysym resetFlag Flags.LOCKED
-        } // end doImport
+          case _ => recreateSymbol(their)
+        }
 
-      def importOrRelink: Symbol = {
-        val sym = sym0 // makes sym visible in the debugger
-        if (sym == null)
+      def recreateOrRelink: Symbol = {
+        val their = their0 // makes their visible in the debugger
+        if (their == null)
           null
-        else if (sym == from.NoSymbol)
+        else if (their == from.NoSymbol)
           NoSymbol
-        else if (sym.isRoot)
+        else if (their.isRoot)
           rootMirror.RootClass // !!! replace with actual mirror when we move importers to the mirror
         else {
-          val name = sym.name
-          val owner = sym.owner
-          var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
-          var existing = scope.decl(name)
-          if (sym.isModuleClass)
-            existing = existing.moduleClass
-
-          if (!existing.exists) scope = from.NoType
-
-          val myname = importName(name)
-          val myowner = importSymbol(owner)
-          val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
-          var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
-          if (sym.isModuleClass)
-            myexisting = importSymbol(sym.sourceModule).moduleClass
-
-          if (!sym.isOverloaded && myexisting.isOverloaded) {
-            myexisting =
-              if (sym.isMethod) {
-                val localCopy = doImport(sym)
-                myexisting filter (_.tpe matches localCopy.tpe)
-              } else {
-                myexisting filter (!_.isMethod)
+          val isModuleClass = their.isModuleClass
+          val isTparam = their.isTypeParameter && their.paramPos >= 0
+          val isOverloaded = their.isOverloaded
+
+          var theirscope = if (their.owner.isClass && !their.owner.isRefinementClass) their.owner.info else from.NoType
+          val theirexisting = if (isModuleClass) theirscope.decl(their.name).moduleClass else theirscope.decl(their.name)
+          if (!theirexisting.exists) theirscope = from.NoType
+
+          val myname = importName(their.name)
+          val myowner = importSymbol(their.owner)
+          val myscope = if (theirscope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+          val myexisting = {
+            if (isModuleClass) importSymbol(their.sourceModule).moduleClass
+            else if (isTparam) (if (myowner hasFlag Flags.LOCKED) NoSymbol else myowner.typeParams(their.paramPos))
+            else if (isOverloaded) myowner.newOverloaded(myowner.thisType, their.alternatives map importSymbol)
+            else {
+              def disambiguate(my: Symbol) = {
+                val result =
+                  if (their.isMethod) {
+                    val localCopy = cachedRecreateSymbol(their)
+                    my filter (_.tpe matches localCopy.tpe)
+                  } else {
+                    my filter (!_.isMethod)
+                  }
+                assert(!result.isOverloaded,
+                    "import failure: cannot determine unique overloaded method alternative from\n "+
+                    (result.alternatives map (_.defString) mkString "\n")+"\n that matches "+their+":"+their.tpe)
+                result
               }
-            assert(!myexisting.isOverloaded,
-                "import failure: cannot determine unique overloaded method alternative from\n "+
-                (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
-          }
 
-          val mysym = {
-            if (sym.isOverloaded) {
-              myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
-            } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
-              assert(myowner.typeParams.length > sym.paramPos,
-                  "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
-                  myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
-                  sym.owner+from.typeParamsString(sym.owner.info))
-              myowner.typeParams(sym.paramPos)
-            } else {
-              if (myexisting != NoSymbol) {
-                myexisting
-              } else {
-                val mysym = doImport(sym)
-
-                if (myscope != NoType) {
-                  assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
-                  myowner.info.decls enter mysym
-                }
-
-                mysym
-              }
+              val myexisting = if (myscope != NoType) myscope.decl(myname) else NoSymbol
+              if (myexisting.isOverloaded) disambiguate(myexisting)
+              else myexisting
             }
           }
 
-          mysym
+          myexisting.orElse {
+            val my = cachedRecreateSymbol(their)
+            if (myscope != NoType) {
+              assert(myscope.decls.lookup(myname) == NoSymbol, myname+" "+myscope.decl(myname)+" "+myexisting)
+              myscope.decls enter my
+            }
+            my
+          }
         }
-      } // end importOrRelink
+      } // end recreateOrRelink
 
-      val sym = sym0
-      symMap.weakGet(sym) match {
+      val their = their0
+      symMap.weakGet(their) match {
         case Some(result) => result
         case None =>
           pendingSyms += 1
           try {
-            val result = importOrRelink
-            symMap.weakUpdate(sym, result)
+            val result = recreateOrRelink
+            symMap.weakUpdate(their, result)
             result
           } finally {
             pendingSyms -= 1
@@ -209,71 +217,72 @@ trait Importers extends api.Importers { self: SymbolTable =>
       }
     }
 
-    def importType(tpe: from.Type): Type = {
-      def doImport(tpe: from.Type): Type = tpe match {
-        case from.TypeRef(pre, sym, args) =>
-          TypeRef(importType(pre), importSymbol(sym), args map importType)
-        case from.ThisType(clazz) =>
-          ThisType(importSymbol(clazz))
-        case from.SingleType(pre, sym) =>
-          SingleType(importType(pre), importSymbol(sym))
-        case from.MethodType(params, restpe) =>
-          MethodType(params map importSymbol, importType(restpe))
-        case from.PolyType(tparams, restpe) =>
-          PolyType(tparams map importSymbol, importType(restpe))
-        case from.NullaryMethodType(restpe) =>
-          NullaryMethodType(importType(restpe))
-        case from.ConstantType(constant @ from.Constant(_)) =>
-          ConstantType(importConstant(constant))
-        case from.SuperType(thistpe, supertpe) =>
-          SuperType(importType(thistpe), importType(supertpe))
-        case from.TypeBounds(lo, hi) =>
-          TypeBounds(importType(lo), importType(hi))
-        case from.BoundedWildcardType(bounds) =>
-          BoundedWildcardType(importTypeBounds(bounds))
-        case from.ClassInfoType(parents, decls, clazz) =>
-          val myclazz = importSymbol(clazz)
-          val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
-          val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
-          myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
-          decls foreach importSymbol // will enter itself into myclazz
-          myclazzTpe
-        case from.RefinedType(parents, decls) =>
-          RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
-        case from.ExistentialType(tparams, restpe) =>
-          newExistentialType(tparams map importSymbol, importType(restpe))
-        case from.OverloadedType(pre, alts) =>
-          OverloadedType(importType(pre), alts map importSymbol)
-        case from.AntiPolyType(pre, targs) =>
-          AntiPolyType(importType(pre), targs map importType)
-        case x: from.TypeVar =>
-          TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol)
-        case from.NotNullType(tpe) =>
-          NotNullType(importType(tpe))
-        case from.AnnotatedType(annots, tpe, selfsym) =>
-          AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
-        case from.ErrorType =>
-          ErrorType
-        case from.WildcardType =>
-          WildcardType
-        case from.NoType =>
-          NoType
-        case from.NoPrefix =>
-          NoPrefix
-        case null =>
-          null
-      } // end doImport
-
-      def importOrRelink: Type =
-        doImport(tpe)
+    // ============== TYPES ==============
+
+    def recreateType(their: from.Type): Type = their match {
+      case from.TypeRef(pre, sym, args) =>
+        TypeRef(importType(pre), importSymbol(sym), args map importType)
+      case from.ThisType(clazz) =>
+        ThisType(importSymbol(clazz))
+      case from.SingleType(pre, sym) =>
+        SingleType(importType(pre), importSymbol(sym))
+      case from.MethodType(params, result) =>
+        MethodType(params map importSymbol, importType(result))
+      case from.PolyType(tparams, result) =>
+        PolyType(tparams map importSymbol, importType(result))
+      case from.NullaryMethodType(result) =>
+        NullaryMethodType(importType(result))
+      case from.ConstantType(constant @ from.Constant(_)) =>
+        ConstantType(importConstant(constant))
+      case from.SuperType(thistpe, supertpe) =>
+        SuperType(importType(thistpe), importType(supertpe))
+      case from.TypeBounds(lo, hi) =>
+        TypeBounds(importType(lo), importType(hi))
+      case from.BoundedWildcardType(bounds) =>
+        BoundedWildcardType(importType(bounds).asInstanceOf[TypeBounds])
+      case from.ClassInfoType(parents, decls, clazz) =>
+        val myclazz = importSymbol(clazz)
+        val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+        val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+        myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+        decls foreach importSymbol // will enter itself into myclazz
+        myclazzTpe
+      case from.RefinedType(parents, decls) =>
+        RefinedType(parents map importType, importScope(decls), importSymbol(their.typeSymbol))
+      case from.ExistentialType(tparams, result) =>
+        newExistentialType(tparams map importSymbol, importType(result))
+      case from.OverloadedType(pre, alts) =>
+        OverloadedType(importType(pre), alts map importSymbol)
+      case from.ImportType(qual) =>
+        ImportType(importTree(qual))
+      case from.AntiPolyType(pre, targs) =>
+        AntiPolyType(importType(pre), targs map importType)
+      case their: from.TypeVar =>
+        val myconstr = new TypeConstraint(their.constr.loBounds map importType, their.constr.hiBounds map importType)
+        myconstr.inst = importType(their.constr.inst)
+        TypeVar(importType(their.origin), myconstr, their.typeArgs map importType, their.params map importSymbol)
+      case from.AnnotatedType(annots, result) =>
+        AnnotatedType(annots map importAnnotationInfo, importType(result))
+      case from.ErrorType =>
+        ErrorType
+      case from.WildcardType =>
+        WildcardType
+      case from.NoType =>
+        NoType
+      case from.NoPrefix =>
+        NoPrefix
+      case null =>
+        null
+    }
 
-      tpeMap.weakGet(tpe) match {
+    def importType(their: from.Type): Type = {
+      tpeMap.weakGet(their) match {
         case Some(result) => result
         case None =>
           pendingTpes += 1
           try {
-            val result = importOrRelink
-            tpeMap.weakUpdate(tpe, result)
+            val result = recreateType(their)
+            tpeMap.weakUpdate(their, result)
             result
           } finally {
             pendingTpes -= 1
@@ -282,7 +291,145 @@ trait Importers extends api.Importers { self: SymbolTable =>
       }
     }
 
-    def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
+    // ============== TREES ==============
+
+    def recreatedTreeCompleter(their: from.Tree, my: to.Tree): Unit = {
+      if (their.canHaveAttrs) {
+        if (my.hasSymbolField) my.symbol = importSymbol(their.symbol)
+        my.pos = importPosition(their.pos)
+        (their, my) match {
+          case (their: from.TypeTree, my: to.TypeTree) =>
+            if (their.wasEmpty) my.defineType(importType(their.tpe)) else my.setType(importType(their.tpe))
+          case (_, _) =>
+            my.tpe = importType(their.tpe)
+        }
+      }
+    }
+
+    def recreateTree(their: from.Tree): to.Tree = their match {
+      case from.ClassDef(mods, name, tparams, impl) =>
+        new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
+      case from.PackageDef(pid, stats) =>
+        new PackageDef(importRefTree(pid), stats map importTree)
+      case from.ModuleDef(mods, name, impl) =>
+        new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
+      case from.noSelfType =>
+        noSelfType
+      case from.pendingSuperCall =>
+        pendingSuperCall
+      case from.ValDef(mods, name, tpt, rhs) =>
+        new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
+      case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+        new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
+      case from.TypeDef(mods, name, tparams, rhs) =>
+        new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
+      case from.LabelDef(name, params, rhs) =>
+        new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
+      case from.Import(expr, selectors) =>
+        new Import(importTree(expr), selectors map importImportSelector)
+      case from.Template(parents, self, body) =>
+        new Template(parents map importTree, importValDef(self), body map importTree)
+      case from.Block(stats, expr) =>
+        new Block(stats map importTree, importTree(expr))
+      case from.CaseDef(pat, guard, body) =>
+        new CaseDef(importTree(pat), importTree(guard), importTree(body))
+      case from.Alternative(trees) =>
+        new Alternative(trees map importTree)
+      case from.Star(elem) =>
+        new Star(importTree(elem))
+      case from.Bind(name, body) =>
+        new Bind(importName(name), importTree(body))
+      case from.UnApply(fun, args) =>
+        new UnApply(importTree(fun), args map importTree)
+      case from.ArrayValue(elemtpt ,elems) =>
+        new ArrayValue(importTree(elemtpt), elems map importTree)
+      case from.Function(vparams, body) =>
+        new Function(vparams map importValDef, importTree(body))
+      case from.Assign(lhs, rhs) =>
+        new Assign(importTree(lhs), importTree(rhs))
+      case from.AssignOrNamedArg(lhs, rhs) =>
+        new AssignOrNamedArg(importTree(lhs), importTree(rhs))
+      case from.If(cond, thenp, elsep) =>
+        new If(importTree(cond), importTree(thenp), importTree(elsep))
+      case from.Match(selector, cases) =>
+        new Match(importTree(selector), cases map importCaseDef)
+      case from.Return(expr) =>
+        new Return(importTree(expr))
+      case from.Try(block, catches, finalizer) =>
+        new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
+      case from.Throw(expr) =>
+        new Throw(importTree(expr))
+      case from.New(tpt) =>
+        new New(importTree(tpt))
+      case from.Typed(expr, tpt) =>
+        new Typed(importTree(expr), importTree(tpt))
+      case from.TypeApply(fun, args) =>
+        new TypeApply(importTree(fun), args map importTree)
+      case from.Apply(fun, args) => their match {
+        case _: from.ApplyToImplicitArgs =>
+          new ApplyToImplicitArgs(importTree(fun), args map importTree)
+        case _: from.ApplyImplicitView =>
+          new ApplyImplicitView(importTree(fun), args map importTree)
+        case _ =>
+          new Apply(importTree(fun), args map importTree)
+      }
+      case from.ApplyDynamic(qual, args) =>
+        new ApplyDynamic(importTree(qual), args map importTree)
+      case from.Super(qual, mix) =>
+        new Super(importTree(qual), importName(mix).toTypeName)
+      case from.This(qual) =>
+        new This(importName(qual).toTypeName)
+      case from.Select(qual, name) =>
+        new Select(importTree(qual), importName(name))
+      case from.Ident(name) =>
+        new Ident(importName(name))
+      case from.ReferenceToBoxed(ident) =>
+        new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
+      case from.Literal(constant @ from.Constant(_)) =>
+        new Literal(importConstant(constant))
+      case theirtt @ from.TypeTree() =>
+        val mytt = TypeTree()
+        if (theirtt.original != null) mytt.setOriginal(importTree(theirtt.original))
+        mytt
+      case from.Annotated(annot, arg) =>
+        new Annotated(importTree(annot), importTree(arg))
+      case from.SingletonTypeTree(ref) =>
+        new SingletonTypeTree(importTree(ref))
+      case from.SelectFromTypeTree(qual, name) =>
+        new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
+      case from.CompoundTypeTree(templ) =>
+        new CompoundTypeTree(importTemplate(templ))
+      case from.AppliedTypeTree(tpt, args) =>
+        new AppliedTypeTree(importTree(tpt), args map importTree)
+      case from.TypeBoundsTree(lo, hi) =>
+        new TypeBoundsTree(importTree(lo), importTree(hi))
+      case from.ExistentialTypeTree(tpt, whereClauses) =>
+        new ExistentialTypeTree(importTree(tpt), whereClauses map importMemberDef)
+      case from.EmptyTree =>
+        EmptyTree
+      case null =>
+        null
+    }
+
+    def importTree(their: from.Tree): Tree = {
+      val my = recreateTree(their)
+      if (my != null) {
+        addFixup(recreatedTreeCompleter(their, my))
+        tryFixup()
+        // we have to be careful with position import as some shared trees
+        // like EmptyTree, noSelfType don't support position assignment
+        if (their.pos != NoPosition) {
+          my.setPos(importPosition(their.pos))
+        }
+      }
+      importAttachments(their.attachments.all).foreach { my.updateAttachment(_) }
+      my
+    }
+
+    // ============== MISCELLANEOUS ==============
+
+    def importAttachments(attachments: Set[Any]): Set[Any] =
+      attachments.collect { case ia: ImportableAttachment => ia.importAttachment(this) }
 
     def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
       val atp1 = importType(ann.atp)
@@ -301,13 +448,13 @@ trait Importers extends api.Importers { self: SymbolTable =>
         ScalaSigBytes(bytes)
       case from.NestedAnnotArg(annInfo) =>
         NestedAnnotArg(importAnnotationInfo(annInfo))
+      case from.UnmappableAnnotArg =>
+        UnmappableAnnotArg
     }
 
-    def importTypeConstraint(constr: from.TypeConstraint): TypeConstraint = {
-      val result = new TypeConstraint(constr.loBounds map importType, constr.hiBounds map importType)
-      result.inst = importType(constr.inst)
-      result
-    }
+    // todo. careful import of positions
+    def importPosition(their: from.Position): to.Position =
+      their.asInstanceOf[Position]
 
     // !!! todo: override to cater for PackageScopes
     def importScope(decls: from.Scope): Scope =
@@ -315,144 +462,15 @@ trait Importers extends api.Importers { self: SymbolTable =>
 
     def importName(name: from.Name): Name =
       if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
-    def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName
-    def importTermName(name: from.TermName): TermName = importName(name).toTermName
 
     def importModifiers(mods: from.Modifiers): Modifiers =
       new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
 
     def importImportSelector(sel: from.ImportSelector): ImportSelector =
       new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
-
-    def importTree(tree: from.Tree): Tree = {
-      val mytree = tree match {
-        case from.ClassDef(mods, name, tparams, impl) =>
-          new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
-        case from.PackageDef(pid, stats) =>
-          new PackageDef(importRefTree(pid), stats map importTree)
-        case from.ModuleDef(mods, name, impl) =>
-          new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
-        case from.emptyValDef =>
-          emptyValDef
-        case from.ValDef(mods, name, tpt, rhs) =>
-          new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
-        case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
-          new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
-        case from.TypeDef(mods, name, tparams, rhs) =>
-          new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
-        case from.LabelDef(name, params, rhs) =>
-          new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
-        case from.Import(expr, selectors) =>
-          new Import(importTree(expr), selectors map importImportSelector)
-        case from.Template(parents, self, body) =>
-          new Template(parents map importTree, importValDef(self), body map importTree)
-        case from.Block(stats, expr) =>
-          new Block(stats map importTree, importTree(expr))
-        case from.CaseDef(pat, guard, body) =>
-          new CaseDef(importTree(pat), importTree(guard), importTree(body))
-        case from.Alternative(trees) =>
-          new Alternative(trees map importTree)
-        case from.Star(elem) =>
-          new Star(importTree(elem))
-        case from.Bind(name, body) =>
-          new Bind(importName(name), importTree(body))
-        case from.UnApply(fun, args) =>
-          new UnApply(importTree(fun), args map importTree)
-        case from.ArrayValue(elemtpt ,elems) =>
-          new ArrayValue(importTree(elemtpt), elems map importTree)
-        case from.Function(vparams, body) =>
-          new Function(vparams map importValDef, importTree(body))
-        case from.Assign(lhs, rhs) =>
-          new Assign(importTree(lhs), importTree(rhs))
-        case from.AssignOrNamedArg(lhs, rhs) =>
-          new AssignOrNamedArg(importTree(lhs), importTree(rhs))
-        case from.If(cond, thenp, elsep) =>
-          new If(importTree(cond), importTree(thenp), importTree(elsep))
-        case from.Match(selector, cases) =>
-          new Match(importTree(selector), cases map importCaseDef)
-        case from.Return(expr) =>
-          new Return(importTree(expr))
-        case from.Try(block, catches, finalizer) =>
-          new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
-        case from.Throw(expr) =>
-          new Throw(importTree(expr))
-        case from.New(tpt) =>
-          new New(importTree(tpt))
-        case from.Typed(expr, tpt) =>
-          new Typed(importTree(expr), importTree(tpt))
-        case from.TypeApply(fun, args) =>
-          new TypeApply(importTree(fun), args map importTree)
-        case from.Apply(fun, args) => tree match {
-          case _: from.ApplyToImplicitArgs =>
-            new ApplyToImplicitArgs(importTree(fun), args map importTree)
-          case _: from.ApplyImplicitView =>
-            new ApplyImplicitView(importTree(fun), args map importTree)
-          case _ =>
-            new Apply(importTree(fun), args map importTree)
-        }
-        case from.ApplyDynamic(qual, args) =>
-          new ApplyDynamic(importTree(qual), args map importTree)
-        case from.Super(qual, mix) =>
-          new Super(importTree(qual), importTypeName(mix))
-        case from.This(qual) =>
-          new This(importName(qual).toTypeName)
-        case from.Select(qual, name) =>
-          new Select(importTree(qual), importName(name))
-        case from.Ident(name) =>
-          new Ident(importName(name))
-        case from.ReferenceToBoxed(ident) =>
-          new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
-        case from.Literal(constant @ from.Constant(_)) =>
-          new Literal(importConstant(constant))
-        case from.TypeTree() =>
-          new TypeTree()
-        case from.Annotated(annot, arg) =>
-          new Annotated(importTree(annot), importTree(arg))
-        case from.SingletonTypeTree(ref) =>
-          new SingletonTypeTree(importTree(ref))
-        case from.SelectFromTypeTree(qual, name) =>
-          new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
-        case from.CompoundTypeTree(templ) =>
-          new CompoundTypeTree(importTemplate(templ))
-        case from.AppliedTypeTree(tpt, args) =>
-          new AppliedTypeTree(importTree(tpt), args map importTree)
-        case from.TypeBoundsTree(lo, hi) =>
-          new TypeBoundsTree(importTree(lo), importTree(hi))
-        case from.ExistentialTypeTree(tpt, whereClauses) =>
-          new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
-        case from.EmptyTree =>
-          EmptyTree
-        case null =>
-          null
-      }
-      addFixup({
-        if (mytree != null) {
-          val mysym = if (tree.hasSymbol) importSymbol(tree.symbol) else NoSymbol
-          val mytpe = importType(tree.tpe)
-
-          mytree match {
-            case mytt: TypeTree =>
-              val tt = tree.asInstanceOf[from.TypeTree]
-              if (mytree.hasSymbol) mytt.symbol = mysym
-              if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
-              if (tt.original != null) mytt.setOriginal(importTree(tt.original))
-            case _ =>
-              if (mytree.hasSymbol) mytree.symbol = importSymbol(tree.symbol)
-              mytree.tpe = importType(tree.tpe)
-          }
-        }
-      })
-      tryFixup()
-      // we have to be careful with position import as some shared trees
-      // like EmptyTree, emptyValDef don't support position assignment
-      if (tree.pos != NoPosition)
-        mytree.setPos(importPosition(tree.pos))
-      else
-        mytree
-    }
-
     def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
     def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef]
+    def importMemberDef(tree: from.MemberDef): MemberDef = importTree(tree).asInstanceOf[MemberDef]
     def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template]
     def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree]
     def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident]
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
index 82904b0..3814259 100644
--- a/src/reflect/scala/reflect/internal/InfoTransformers.scala
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 trait InfoTransformers {
@@ -43,7 +44,7 @@ trait InfoTransformers {
       if (from == this.pid) this
       else if (from < this.pid)
         if (prev.pid < from) this
-        else prev.nextFrom(from);
+        else prev.nextFrom(from)
       else if (next.pid == NoPhase.id) next
       else next.nextFrom(from)
   }
diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala
new file mode 100644
index 0000000..e9916cf
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Internals.scala
@@ -0,0 +1,174 @@
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+import scala.language.higherKinds
+import scala.collection.mutable.WeakHashMap
+import scala.ref.WeakReference
+import scala.reflect.api.Universe
+import scala.reflect.macros.Attachments
+import scala.reflect.internal.util.FreshNameCreator
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.util.ListOfNil
+
+trait Internals extends api.Internals {
+  self: SymbolTable =>
+
+  type Internal = MacroInternalApi
+  lazy val internal: Internal = new SymbolTableInternal {}
+
+  type Compat = MacroCompatApi
+  lazy val compat: Compat = new Compat {}
+
+  trait SymbolTableInternal extends MacroInternalApi {
+    lazy val reificationSupport: ReificationSupportApi = self.build
+
+    def createImporter(from0: Universe): Importer { val from: from0.type } = self.mkImporter(from0)
+
+    def newScopeWith(elems: Symbol*): Scope = self.newScopeWith(elems: _*)
+    def enter(scope: Scope, sym: Symbol): scope.type = { scope.enter(sym); scope }
+    def unlink(scope: Scope, sym: Symbol): scope.type = { scope.unlink(sym); scope }
+
+    def freeTerms(tree: Tree): List[FreeTermSymbol] = tree.freeTerms
+    def freeTypes(tree: Tree): List[FreeTypeSymbol] = tree.freeTypes
+    def substituteSymbols(tree: Tree, from: List[Symbol], to: List[Symbol]): Tree = tree.substituteSymbols(from, to)
+    def substituteTypes(tree: Tree, from: List[Symbol], to: List[Type]): Tree = tree.substituteTypes(from, to)
+    def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree = tree.substituteThis(clazz, to)
+    def attachments(tree: Tree): Attachments { type Pos = Position } = tree.attachments
+    def updateAttachment[T: ClassTag](tree: Tree, attachment: T): tree.type = tree.updateAttachment(attachment)
+    def removeAttachment[T: ClassTag](tree: Tree): tree.type = tree.removeAttachment[T]
+    def setPos(tree: Tree, newpos: Position): tree.type = tree.setPos(newpos)
+    def setType(tree: Tree, tp: Type): tree.type = tree.setType(tp)
+    def defineType(tree: Tree, tp: Type): tree.type = tree.defineType(tp)
+    def setSymbol(tree: Tree, sym: Symbol): tree.type = tree.setSymbol(sym)
+    def setOriginal(tt: TypeTree, tree: Tree): TypeTree = tt.setOriginal(tree)
+
+    def captureVariable(vble: Symbol): Unit = self.captureVariable(vble)
+    def referenceCapturedVariable(vble: Symbol): Tree = self.referenceCapturedVariable(vble)
+    def capturedVariableType(vble: Symbol): Type = self.capturedVariableType(vble)
+
+    def classDef(sym: Symbol, impl: Template): ClassDef = self.ClassDef(sym, impl)
+    def moduleDef(sym: Symbol, impl: Template): ModuleDef = self.ModuleDef(sym, impl)
+    def valDef(sym: Symbol, rhs: Tree): ValDef = self.ValDef(sym, rhs)
+    def valDef(sym: Symbol): ValDef = self.ValDef(sym)
+    def defDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = self.DefDef(sym, mods, vparamss, rhs)
+    def defDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = self.DefDef(sym, vparamss, rhs)
+    def defDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef = self.DefDef(sym, mods, rhs)
+    def defDef(sym: Symbol, rhs: Tree): DefDef = self.DefDef(sym, rhs)
+    def defDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = self.DefDef(sym, rhs)
+    def typeDef(sym: Symbol, rhs: Tree): TypeDef = self.TypeDef(sym, rhs)
+    def typeDef(sym: Symbol): TypeDef = self.TypeDef(sym)
+    def labelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = self.LabelDef(sym, params, rhs)
+
+    def changeOwner(tree: Tree, prev: Symbol, next: Symbol): tree.type = {
+      object changeOwnerAndModuleClassTraverser extends ChangeOwnerTraverser(prev, next) {
+        override def traverse(tree: Tree) {
+          tree match {
+            case _: DefTree => change(tree.symbol.moduleClass)
+            case _          => // do nothing
+          }
+          super.traverse(tree)
+        }
+      }
+      changeOwnerAndModuleClassTraverser.traverse(tree)
+      tree
+    }
+
+    lazy val gen = self.treeBuild
+
+    def isFreeTerm(symbol: Symbol): Boolean = symbol.isFreeTerm
+    def asFreeTerm(symbol: Symbol): FreeTermSymbol = symbol.asFreeTerm
+    def isFreeType(symbol: Symbol): Boolean = symbol.isFreeType
+    def asFreeType(symbol: Symbol): FreeTypeSymbol = symbol.asFreeType
+    def newTermSymbol(symbol: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = symbol.newTermSymbol(name, pos, flags)
+    def newModuleAndClassSymbol(symbol: Symbol, name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = symbol.newModuleAndClassSymbol(name, pos, flags)
+    def newMethodSymbol(symbol: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = symbol.newMethodSymbol(name, pos, flags)
+    def newTypeSymbol(symbol: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = symbol.newTypeSymbol(name, pos, flags)
+    def newClassSymbol(symbol: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = symbol.newClassSymbol(name, pos, flags)
+    def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol = reificationSupport.newFreeTerm(name, value, flags, origin)
+    def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol = reificationSupport.newFreeType(name, flags, origin)
+    def isErroneous(symbol: Symbol): Boolean = symbol.isErroneous
+    def isSkolem(symbol: Symbol): Boolean = symbol.isSkolem
+    def deSkolemize(symbol: Symbol): Symbol = symbol.deSkolemize
+    def initialize(symbol: Symbol): symbol.type = symbol.initialize
+    def fullyInitialize(symbol: Symbol): symbol.type = definitions.fullyInitializeSymbol(symbol).asInstanceOf[symbol.type]
+    def fullyInitialize(tp: Type): tp.type = definitions.fullyInitializeType(tp).asInstanceOf[tp.type]
+    def fullyInitialize(scope: Scope): scope.type = definitions.fullyInitializeScope(scope).asInstanceOf[scope.type]
+    def flags(symbol: Symbol): FlagSet = symbol.flags
+    def attachments(symbol: Symbol): Attachments { type Pos = Position } = symbol.attachments
+    def updateAttachment[T: ClassTag](symbol: Symbol, attachment: T): symbol.type = symbol.updateAttachment(attachment)
+    def removeAttachment[T: ClassTag](symbol: Symbol): symbol.type = symbol.removeAttachment[T]
+    def setOwner(symbol: Symbol, newowner: Symbol): symbol.type = { symbol.owner = newowner; symbol }
+    def setInfo(symbol: Symbol, tpe: Type): symbol.type = symbol.setInfo(tpe)
+    def setAnnotations(symbol: Symbol, annots: Annotation*): symbol.type = symbol.setAnnotations(annots: _*)
+    def setName(symbol: Symbol, name: Name): symbol.type = symbol.setName(name)
+    def setPrivateWithin(symbol: Symbol, sym: Symbol): symbol.type = symbol.setPrivateWithin(sym)
+    def setFlag(symbol: Symbol, flags: FlagSet): symbol.type = symbol.setFlag(flags)
+    def resetFlag(symbol: Symbol, flags: FlagSet): symbol.type = symbol.resetFlag(flags)
+
+    def thisType(sym: Symbol): Type = self.ThisType(sym)
+    def singleType(pre: Type, sym: Symbol): Type = self.SingleType(pre, sym)
+    def superType(thistpe: Type, supertpe: Type): Type = self.SuperType(thistpe, supertpe)
+    def constantType(value: Constant): ConstantType = self.ConstantType(value)
+    def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = self.TypeRef(pre, sym, args)
+    def refinedType(parents: List[Type], decls: Scope): RefinedType = self.RefinedType(parents, decls)
+    def refinedType(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType = self.RefinedType(parents, decls, clazz)
+    def refinedType(parents: List[Type], owner: Symbol): Type = self.refinedType(parents, owner)
+    def refinedType(parents: List[Type], owner: Symbol, decls: Scope): Type = self.RefinedType(parents, decls, owner)
+    def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = self.refinedType(parents, owner, decls, pos)
+    def intersectionType(tps: List[Type]): Type = self.intersectionType(tps)
+    def intersectionType(tps: List[Type], owner: Symbol): Type = self.intersectionType(tps, owner)
+    def classInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType = self.ClassInfoType(parents, decls, typeSymbol)
+    def methodType(params: List[Symbol], resultType: Type): MethodType = self.MethodType(params, resultType)
+    def nullaryMethodType(resultType: Type): NullaryMethodType = self.NullaryMethodType(resultType)
+    def polyType(typeParams: List[Symbol], resultType: Type): PolyType = self.PolyType(typeParams, resultType)
+    def existentialType(quantified: List[Symbol], underlying: Type): ExistentialType = self.ExistentialType(quantified, underlying)
+    def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = self.existentialAbstraction(tparams, tpe0)
+    def annotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType = self.AnnotatedType(annotations, underlying)
+    def typeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi)
+    def boundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds)
+
+    def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(_.duplicate))
+
+    type Decorators = MacroDecoratorApi
+    lazy val decorators: Decorators = new MacroDecoratorApi {
+      override type ScopeDecorator[T <: Scope] = MacroScopeDecoratorApi[T]
+      override implicit def scopeDecorator[T <: Scope](scope: T): ScopeDecorator[T] = new MacroScopeDecoratorApi[T](scope)
+      override type TreeDecorator[T <: Tree] = MacroTreeDecoratorApi[T]
+      override implicit def treeDecorator[T <: Tree](tree: T): TreeDecorator[T] = new MacroTreeDecoratorApi[T](tree)
+      override type TypeTreeDecorator[T <: TypeTree] = MacroTypeTreeDecoratorApi[T]
+      override implicit def typeTreeDecorator[T <: TypeTree](tt: T): TypeTreeDecorator[T] = new MacroTypeTreeDecoratorApi[T](tt)
+      override type SymbolDecorator[T <: Symbol] = MacroSymbolDecoratorApi[T]
+      override implicit def symbolDecorator[T <: Symbol](symbol: T): SymbolDecorator[T] = new MacroSymbolDecoratorApi[T](symbol)
+      override type TypeDecorator[T <: Type] = TypeDecoratorApi[T]
+      override implicit def typeDecorator[T <: Type](tp: T): TypeDecorator[T] = new TypeDecoratorApi[T](tp)
+    }
+  }
+
+  lazy val treeBuild = new self.TreeGen {
+    def mkAttributedQualifier(tpe: Type): Tree = self.gen.mkAttributedQualifier(tpe)
+    def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = self.gen.mkAttributedQualifier(tpe, termSym)
+    def mkAttributedRef(pre: Type, sym: Symbol): RefTree = self.gen.mkAttributedRef(pre, sym)
+    def mkAttributedRef(sym: Symbol): RefTree = self.gen.mkAttributedRef(sym)
+    def stabilize(tree: Tree): Tree = self.gen.stabilize(tree)
+    def mkAttributedStableRef(pre: Type, sym: Symbol): Tree = self.gen.mkAttributedStableRef(pre, sym)
+    def mkAttributedStableRef(sym: Symbol): Tree = self.gen.mkAttributedStableRef(sym)
+    def mkUnattributedRef(sym: Symbol): RefTree = self.gen.mkUnattributedRef(sym)
+    def mkUnattributedRef(fullName: Name): RefTree = self.gen.mkUnattributedRef(fullName)
+    def mkAttributedThis(sym: Symbol): This = self.gen.mkAttributedThis(sym)
+    def mkAttributedIdent(sym: Symbol): RefTree = self.gen.mkAttributedIdent(sym)
+    def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree = self.gen.mkAttributedSelect(qual, sym)
+    def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, methodName, targs, args)
+    def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(method, targs, args)
+    def mkMethodCall(method: Symbol, args: List[Tree]): Tree = self.gen.mkMethodCall(method, args)
+    def mkMethodCall(target: Tree, args: List[Tree]): Tree = self.gen.mkMethodCall(target, args)
+    def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, methodName, args)
+    def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, method, targs, args)
+    def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(target, targs, args)
+    def mkNullaryCall(method: Symbol, targs: List[Type]): Tree = self.gen.mkNullaryCall(method, targs)
+    def mkRuntimeUniverseRef: Tree = self.gen.mkRuntimeUniverseRef
+    def mkZero(tp: Type): Tree = self.gen.mkZero(tp)
+    def mkCast(tree: Tree, pt: Type): Tree = self.gen.mkCast(tree, pt)
+  }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
new file mode 100644
index 0000000..fb1cdb3
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+import java.lang.{ Class => jClass }
+import java.lang.annotation.{ Annotation => jAnnotation }
+import java.lang.reflect.{
+  Member => jMember, Constructor => jConstructor, Method => jMethod,
+  AnnotatedElement => jAnnotatedElement, Type => jType,
+  TypeVariable => jTypeVariable
+}
+
+/** This class tries to abstract over some of the duplication
+ *  in java.lang.reflect.{ Method, Constructor }.
+ */
+class JMethodOrConstructor(val member: jMember with jAnnotatedElement) {
+  def isVarArgs: Boolean = member match {
+    case m: jMethod         => m.isVarArgs
+    case m: jConstructor[_] => m.isVarArgs
+  }
+  def typeParams: Array[_ <: jTypeVariable[_]] = member match {
+    case m: jMethod         => m.getTypeParameters
+    case m: jConstructor[_] => m.getTypeParameters
+  }
+  def paramTypes: Array[jType] = member match {
+    case m: jMethod         => m.getGenericParameterTypes
+    case m: jConstructor[_] => m.getGenericParameterTypes
+  }
+  def paramAnnotations: Array[Array[jAnnotation]] = member match {
+    case m: jMethod         => m.getParameterAnnotations
+    case m: jConstructor[_] => m.getParameterAnnotations
+  }
+  def resultType: jType = member match {
+    case m: jMethod         => m.getGenericReturnType
+    case m: jConstructor[_] => classOf[Unit]
+  }
+}
+
+object JMethodOrConstructor {
+  implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor              = new JMethodOrConstructor(m)
+  implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = new JMethodOrConstructor(m)
+}
diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
new file mode 100644
index 0000000..0a33b8c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
@@ -0,0 +1,84 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember, Constructor => jConstructor, Field => jField, Method => jMethod }
+import JavaAccFlags._
+import ClassfileConstants._
+
+/** A value class which encodes the access_flags (JVMS 4.1)
+ *  for a field, method, or class. The low 16 bits are the same
+ *  as those returned by java.lang.reflect.Member#getModifiers
+ *  and found in the bytecode.
+ *
+ *  The high bits encode whether the access flags are directly
+ *  associated with a class, constructor, field, or method.
+ */
+final class JavaAccFlags private (val coded: Int) extends AnyVal {
+  private def has(mask: Int) = (flags & mask) != 0
+  private def flagCarrierId  = coded >>> 16
+  private def flags          = coded & 0xFFFF
+
+  def isAbstract     = has(JAVA_ACC_ABSTRACT)
+  def isAnnotation   = has(JAVA_ACC_ANNOTATION)
+  def isBridge       = has(JAVA_ACC_BRIDGE)
+  def isEnum         = has(JAVA_ACC_ENUM)
+  def isFinal        = has(JAVA_ACC_FINAL)
+  def isInterface    = has(JAVA_ACC_INTERFACE)
+  def isNative       = has(JAVA_ACC_NATIVE)
+  def isPrivate      = has(JAVA_ACC_PRIVATE)
+  def isProtected    = has(JAVA_ACC_PROTECTED)
+  def isPublic       = has(JAVA_ACC_PUBLIC)
+  def isStatic       = has(JAVA_ACC_STATIC)
+  def isStrictFp     = has(JAVA_ACC_STRICT)
+  def isSuper        = has(JAVA_ACC_SUPER)
+  def isSynchronized = has(JAVA_ACC_SYNCHRONIZED)
+  def isSynthetic    = has(JAVA_ACC_SYNTHETIC)
+  def isTransient    = has(JAVA_ACC_TRANSIENT)
+  def isVarargs      = has(JAVA_ACC_VARARGS)
+  def isVolatile     = has(JAVA_ACC_VOLATILE)
+
+  /** Do these flags describe a member which has either protected or package access?
+   *  Such access in java is encoded in scala as protected[foo] or private[foo], where
+   *  `foo` is the defining package.
+   */
+  def hasPackageAccessBoundary = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC) // equivalently, allows protected or package level access
+  def isPackageProtected       = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)
+
+  def toJavaFlags: Int = flags
+  def toScalaFlags: Long = flagCarrierId match {
+    case Method | Constructor => FlagTranslation methodFlags flags
+    case Class                => FlagTranslation classFlags flags
+    case _                    => FlagTranslation fieldFlags flags
+  }
+}
+
+object JavaAccFlags {
+  private val Unknown     = 0
+  private val Class       = 1
+  private val Field       = 2
+  private val Method      = 3
+  private val Constructor = 4
+
+  private def create(flagCarrier: Int, access_flags: Int): JavaAccFlags =
+    new JavaAccFlags((flagCarrier << 16) | (access_flags & 0xFFFF))
+
+  def classFlags(flags: Int): JavaAccFlags       = create(Class, flags)
+  def methodFlags(flags: Int): JavaAccFlags      = create(Method, flags)
+  def fieldFlags(flags: Int): JavaAccFlags       = create(Field, flags)
+  def constructorFlags(flags: Int): JavaAccFlags = create(Constructor, flags)
+
+  def apply(access_flags: Int): JavaAccFlags = create(Unknown, access_flags)
+  def apply(clazz: jClass[_]): JavaAccFlags  = classFlags(clazz.getModifiers)
+  def apply(member: jMember): JavaAccFlags   = member match {
+    case x: jConstructor[_] => constructorFlags(x.getModifiers)
+    case x: jMethod         => methodFlags(x.getModifiers)
+    case x: jField          => fieldFlags(x.getModifiers)
+    case _                  => apply(member.getModifiers)
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 3ab7b20..8ae201f 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.{ mutable, immutable }
@@ -36,7 +37,7 @@ trait Kinds {
     private def varStr(s: Symbol): String =
       if (s.isCovariant) "covariant"
       else if (s.isContravariant) "contravariant"
-      else "invariant";
+      else "invariant"
 
     private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
       if((a0 eq b0) || (a0.owner eq b0.owner)) ""
@@ -86,15 +87,15 @@ trait Kinds {
   // plan: split into kind inference and subkinding
   // every Type has a (cached) Kind
   def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
-    checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+    checkKindBounds0(tparams, targs, pre, owner, explainErrors = false).isEmpty
 
   /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
    *
    *  If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
    */
   private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
-       sym2.variance==0
-    || sym1.variance==sym2.variance
+       sym2.variance.isInvariant
+    || sym1.variance == sym2.variance
   )
 
   /** Check well-kindedness of type application (assumes arities are already checked) -- @M
@@ -145,7 +146,7 @@ trait Kinds {
           kindErrors = f(kindErrors)
       }
 
-      if (settings.debug.value) {
+      if (settings.debug) {
         log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
         log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
         log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
@@ -202,7 +203,7 @@ trait Kinds {
       else NoKindErrors
     }
 
-    if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log(
+    if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log(
       "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
       + owner + ", " + explainErrors + ")"
     )
@@ -230,4 +231,182 @@ trait Kinds {
       }
     }
   }
+
+  /**
+   * The data structure describing the kind of a given type.
+   *
+   * Proper types are represented using ProperTypeKind.
+   *
+   * Type constructors are reprented using TypeConKind.
+   */
+  abstract class Kind {
+    import Kind.StringState
+    def description: String
+    def order: Int
+    def bounds: TypeBounds
+
+    /** Scala syntax notation of this kind.
+     * Proper types are expresses as A.
+     * Type constructors are expressed as F[k1 >: lo <: hi, k2, ...] where k1, k2, ... are parameter kinds.
+     * If the bounds exists at any level, it preserves the type variable names. Otherwise,
+     * it uses prescribed letters for each level: A, F, X, Y, Z.
+     */
+    def scalaNotation: String
+
+    /** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
+     * Proper types are expressed as *.
+     * Type constructors are expressed * -> *(lo, hi) -(+)-> *.
+     */
+    def starNotation: String
+
+    /** Contains bounds either as part of itself or its arguments.
+     */
+    def hasBounds: Boolean = !bounds.isEmptyBounds
+
+    private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
+  }
+  object Kind {
+    private[internal] sealed trait ScalaNotation
+    private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
+      override def toString: String = {
+        alias getOrElse {
+          typeAlias(order) + n.map(_.toString).getOrElse("")
+        }
+      }
+      private def typeAlias(x: Int): String =
+        x match {
+          case 0 => "A"
+          case 1 => "F"
+          case 2 => "X"
+          case 3 => "Y"
+          case 4 => "Z"
+          case n if n < 12 => ('O'.toInt - 5 + n).toChar.toString
+          case _ => "V"
+        }
+    }
+    private[internal] sealed case class Text(value: String) extends ScalaNotation {
+      override def toString: String = value
+    }
+    private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
+      override def toString: String = tokens.mkString
+      def append(value: String): StringState = StringState(tokens :+ Text(value))
+      def appendHead(order: Int, sym: Symbol): StringState = {
+        val n = countByOrder(order) + 1
+        val alias = if (sym eq NoSymbol) None
+                    else Some(sym.nameString)
+        StringState(tokens :+ Head(order, Some(n), alias))
+      }
+      def countByOrder(o: Int): Int = tokens count {
+        case Head(`o`, _, _) => true
+        case t               => false
+      }
+      // Replace Head(o, Some(1), a) with Head(o, None, a) if countByOrder(o) <= 1, so F1[A] becomes F[A]
+      def removeOnes: StringState = {
+        val maxOrder = (tokens map {
+          case Head(o, _, _) => o
+          case _             => 0
+        }).max
+        StringState((tokens /: (0 to maxOrder)) { (ts: Seq[ScalaNotation], o: Int) =>
+          if (countByOrder(o) <= 1)
+            ts map {
+              case Head(`o`, _, a) => Head(o, None, a)
+              case t               => t
+            }
+          else ts
+        })
+      }
+      // Replace Head(o, n, Some(_)) with Head(o, n, None), so F[F] becomes F[A].
+      def removeAlias: StringState = {
+        StringState(tokens map {
+          case Head(o, n, Some(_)) => Head(o, n, None)
+          case t                   => t
+        })
+      }
+    }
+    private[internal] object StringState {
+      def empty: StringState = StringState(Seq())
+    }
+    def FromParams(tparams: List[Symbol]): Type = GenPolyType(tparams, AnyTpe)
+    def Wildcard: Type                          = WildcardType
+  }
+  class ProperTypeKind(val bounds: TypeBounds) extends Kind {
+    import Kind.StringState
+    val description: String = "This is a proper type."
+    val order = 0
+    private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
+      s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
+    }
+    def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
+    def starNotation: String = "*" + bounds.starNotation(_.toString)
+  }
+  object ProperTypeKind {
+    def apply: ProperTypeKind = this(TypeBounds.empty)
+    def apply(bounds: TypeBounds): ProperTypeKind = new ProperTypeKind(bounds)
+    def unapply(ptk: ProperTypeKind): Some[TypeBounds] = Some(ptk.bounds)
+  }
+
+  class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
+    import Kind.StringState
+    val order = (args map (_.kind.order)).max + 1
+    def description: String =
+      if (order == 1) "This is a type constructor: a 1st-order-kinded type."
+      else  "This is a type constructor that takes type constructor(s): a higher-kinded type."
+    override def hasBounds: Boolean = super.hasBounds || args.exists(_.kind.hasBounds)
+    def scalaNotation: String = {
+      val s = buildState(NoSymbol, Variance.Invariant)(StringState.empty).removeOnes
+      val s2 = if (hasBounds) s
+               else s.removeAlias
+      s2.toString
+    }
+    private[internal] def buildState(sym: Symbol, v: Variance)(s0: StringState): StringState = {
+      var s: StringState = s0
+      s = s.append(v.symbolicString).appendHead(order, sym).append("[")
+      args.zipWithIndex foreach { case (arg, i) =>
+        s = arg.kind.buildState(arg.sym, arg.variance)(s)
+        if (i != args.size - 1) {
+          s = s.append(",")
+        }
+      }
+      s = s.append("]").append(bounds.scalaNotation(_.toString))
+      s
+    }
+    def starNotation: String = {
+      import Variance._
+      (args map { arg =>
+        (if (arg.kind.order == 0) arg.kind.starNotation
+        else "(" + arg.kind.starNotation + ")") +
+        (if (arg.variance == Invariant) " -> "
+        else " -(" + arg.variance.symbolicString + ")-> ")
+      }).mkString + "*" + bounds.starNotation(_.toString)
+    }
+  }
+  object TypeConKind {
+    def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
+    def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
+    def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some((tck.bounds, tck.args))
+    case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
+  }
+
+  /**
+   * Starting from a Symbol (sym) or a Type (tpe), infer the kind that classifies it (sym.tpeHK/tpe).
+   */
+  object inferKind {
+    import TypeConKind.Argument
+
+    abstract class InferKind {
+      protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
+      protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
+      def apply(sym: Symbol): Kind = infer(sym, true)
+      def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, true)
+    }
+
+    def apply(pre: Type): InferKind = new InferKind {
+      protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
+        val bounds = if (topLevel) TypeBounds.empty
+                     else tpe.asSeenFrom(pre, owner).bounds
+        if(!tpe.isHigherKinded) ProperTypeKind(bounds)
+        else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, false))(p) })
+      }
+    }
+  }
 }
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 0beb8e3..4a35e02 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import Flags._
@@ -19,6 +20,8 @@ trait Mirrors extends api.Mirrors {
   trait RootSymbol extends Symbol { def mirror: Mirror }
 
   abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror =>
+    private[this] var initialized = false
+    def isMirrorInitialized = initialized
 
     protected[scala] def rootLoader: LazyType
 
@@ -27,24 +30,26 @@ trait Mirrors extends api.Mirrors {
     val EmptyPackageClass: ClassSymbol
     val EmptyPackage: ModuleSymbol
 
+    def symbolOf[T: universe.WeakTypeTag]: universe.TypeSymbol = universe.weakTypeTag[T].in(this).tpe.typeSymbolDirect.asType
+
     def findMemberFromRoot(fullName: Name): Symbol = {
       val segs = nme.segments(fullName.toString, fullName.isTermName)
       if (segs.isEmpty) NoSymbol
       else definitions.findNamedMember(segs.tail, RootClass.info member segs.head)
     }
 
-    /** Todo: organize similar to mkStatic in reflect.Base */
+    /** Todo: organize similar to mkStatic in scala.reflect.Base */
     private def getModuleOrClass(path: Name, len: Int): Symbol = {
       val point = path lastPos('.', len - 1)
       val owner =
         if (point > 0) getModuleOrClass(path.toTermName, point)
         else RootClass
       val name = path subName (point + 1, len)
-      var sym = owner.info member name
+      val sym = owner.info member name
       val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym
       if (result != NoSymbol) result
       else {
-        if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug
+        if (settings.debug) { log(sym.info); log(sym.info.members) }//debug
         thisMirror.missingHook(owner, name) orElse {
           MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror)
         }
@@ -76,7 +81,9 @@ trait Mirrors extends api.Mirrors {
 
     protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name)
 
-    private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+    private[scala] def missingHook(owner: Symbol, name: Name): Symbol = logResult(s"missingHook($owner, $name)")(
+      mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+    )
 
     // todo: get rid of most the methods here and keep just staticClass/Module/Package
 
@@ -91,10 +98,6 @@ trait Mirrors extends api.Mirrors {
       }
     }
 
-    @deprecated("Use getClassByName", "2.10.0")
-    def getClass(fullname: Name): ClassSymbol =
-      getClassByName(fullname)
-
     def getClassByName(fullname: Name): ClassSymbol =
       ensureClassSymbol(fullname.toString, getModuleOrClass(fullname.toTypeName))
 
@@ -116,25 +119,22 @@ trait Mirrors extends api.Mirrors {
      *  Compiler might ignore them, but they should be loadable with macros.
      */
     override def staticClass(fullname: String): ClassSymbol =
-      ensureClassSymbol(fullname, staticModuleOrClass(newTypeNameCached(fullname)))
+      try ensureClassSymbol(fullname, staticModuleOrClass(newTypeNameCached(fullname)))
+      catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
 
     /************************ loaders of module symbols ************************/
 
     private def ensureModuleSymbol(fullname: String, sym: Symbol, allowPackages: Boolean): ModuleSymbol =
       sym match {
-        case x: ModuleSymbol if allowPackages || !x.isPackage => x
-        case _                                                => MissingRequirementError.notFound("object " + fullname)
+        case x: ModuleSymbol if allowPackages || !x.hasPackageFlag => x
+        case _                                                     => MissingRequirementError.notFound("object " + fullname)
       }
 
-    @deprecated("Use getModuleByName", "2.10.0")
-    def getModule(fullname: Name): ModuleSymbol =
-      getModuleByName(fullname)
-
     def getModuleByName(fullname: Name): ModuleSymbol =
       ensureModuleSymbol(fullname.toString, getModuleOrClass(fullname.toTermName), allowPackages = true)
 
     def getRequiredModule(fullname: String): ModuleSymbol =
-      getModule(newTermNameCached(fullname))
+      getModuleByName(newTermNameCached(fullname))
 
     // TODO: What syntax do we think should work here? Say you have an object
     // like scala.Predef.  You can't say requiredModule[scala.Predef] since there's
@@ -150,7 +150,7 @@ trait Mirrors extends api.Mirrors {
       getModuleIfDefined(newTermNameCached(fullname))
 
     def getModuleIfDefined(fullname: Name): Symbol =
-      wrapMissing(getModule(fullname.toTermName))
+      wrapMissing(getModuleByName(fullname.toTermName))
 
     /** @inheritdoc
      *
@@ -158,24 +158,29 @@ trait Mirrors extends api.Mirrors {
      *  Compiler might ignore them, but they should be loadable with macros.
      */
     override def staticModule(fullname: String): ModuleSymbol =
-      ensureModuleSymbol(fullname, staticModuleOrClass(newTermNameCached(fullname)), allowPackages = false)
+      try ensureModuleSymbol(fullname, staticModuleOrClass(newTermNameCached(fullname)), allowPackages = false)
+      catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
 
     /************************ loaders of package symbols ************************/
 
     private def ensurePackageSymbol(fullname: String, sym: Symbol, allowModules: Boolean): ModuleSymbol =
       sym match {
-        case x: ModuleSymbol if allowModules || x.isPackage => x
-        case _                                              => MissingRequirementError.notFound("package " + fullname)
+        case x: ModuleSymbol if allowModules || x.hasPackageFlag => x
+        case _                                                   => MissingRequirementError.notFound("package " + fullname)
       }
 
-    def getPackage(fullname: Name): ModuleSymbol =
+    def getPackage(fullname: TermName): ModuleSymbol =
       ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true)
 
-    def getRequiredPackage(fullname: String): ModuleSymbol =
+    def getPackageIfDefined(fullname: TermName): Symbol =
+      wrapMissing(getPackage(fullname))
+
+    @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol =
       getPackage(newTermNameCached(fullname))
 
-    def getPackageObject(fullname: String): ModuleSymbol =
-      (getPackage(newTermName(fullname)).info member nme.PACKAGE) match {
+    def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
+    def getPackageObject(fullname: TermName): ModuleSymbol =
+      (getPackage(fullname).info member nme.PACKAGE) match {
         case x: ModuleSymbol => x
         case _               => MissingRequirementError.notFound("package object " + fullname)
       }
@@ -183,17 +188,27 @@ trait Mirrors extends api.Mirrors {
     def getPackageObjectIfDefined(fullname: String): Symbol =
       getPackageObjectIfDefined(newTermNameCached(fullname))
 
-    def getPackageObjectIfDefined(fullname: Name): Symbol =
-      wrapMissing(getPackageObject(fullname.toTermName))
+    def getPackageObjectIfDefined(fullname: TermName): Symbol =
+      wrapMissing(getPackageObject(fullname))
+
+    final def getPackageObjectWithMember(pre: Type, sym: Symbol): Symbol = {
+      // The owner of a symbol which requires package qualification may be the
+      // package object iself, but it also could be any superclass of the package
+      // object.  In the latter case, we must go through the qualifier's info
+      // to obtain the right symbol.
+      if (sym.owner.isModuleClass) sym.owner.sourceModule // fast path, if the member is owned by a module class, that must be linked to the package object
+      else pre member nme.PACKAGE                         // otherwise we have to findMember
+    }
 
     override def staticPackage(fullname: String): ModuleSymbol =
-      ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
+      try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
+      catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
 
     /************************ helpers ************************/
 
     def erasureName[T: ClassTag] : String = {
-      /** We'd like the String representation to be a valid
-       *  scala type, so we have to decode the jvm's secret language.
+      /* We'd like the String representation to be a valid
+       * scala type, so we have to decode the jvm's secret language.
        */
       def erasureString(clazz: Class[_]): String = {
         if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]"
@@ -202,7 +217,7 @@ trait Mirrors extends api.Mirrors {
       erasureString(classTag[T].runtimeClass)
     }
 
-   @inline private def wrapMissing(body: => Symbol): Symbol =
+   @inline final def wrapMissing(body: => Symbol): Symbol =
       try body
       catch { case _: MissingRequirementError => NoSymbol }
 
@@ -228,6 +243,7 @@ trait Mirrors extends api.Mirrors {
     // }
 
     def init() {
+      if (initialized) return
       // Still fiddling with whether it's cleaner to do some of this setup here
       // or from constructors.  The latter approach tends to invite init order issues.
 
@@ -239,6 +255,21 @@ trait Mirrors extends api.Mirrors {
 
       RootClass.info.decls enter EmptyPackage
       RootClass.info.decls enter RootPackage
+
+      if (rootOwner != NoSymbol) {
+        // synthetic core classes are only present in root mirrors
+        // because Definitions.scala, which initializes and enters them, only affects rootMirror
+        // therefore we need to enter them manually for non-root mirrors
+        definitions.syntheticCoreClasses foreach (theirSym => {
+          val theirOwner = theirSym.owner
+          assert(theirOwner.isPackageClass, s"theirSym = $theirSym, theirOwner = $theirOwner")
+          val ourOwner = staticPackage(theirOwner.fullName).moduleClass
+          val ourSym = theirSym // just copy the symbol into our branch of the symbol table
+          ourOwner.info.decls enterIfNew ourSym
+        })
+      }
+
+      initialized = true
     }
   }
 
@@ -262,34 +293,46 @@ trait Mirrors extends api.Mirrors {
       def mirror                      = thisMirror.asInstanceOf[Mirror]
     }
 
-    // This is the package _root_.  The actual root cannot be referenced at
-    // the source level, but _root_ is essentially a function => <root>.
-    final object RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
+    class RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
       this setInfo NullaryMethodType(RootClass.tpe)
-      RootClass.sourceModule = this
 
       override def isRootPackage = true
     }
+
+    // This is the package _root_.  The actual root cannot be referenced at
+    // the source level, but _root_ is essentially a function => <root>.
+    lazy val RootPackage = new RootPackage
+
+    class RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
+      this setInfo rootLoader
+
+      override def isRoot            = true
+      override def isEffectiveRoot   = true
+      override def isNestedClass     = false
+      override def sourceModule      = RootPackage
+    }
+
     // This is <root>, the actual root of everything except the package _root_.
     // <root> and _root_ (RootPackage and RootClass) should be the only "well known"
     // symbols owned by NoSymbol.  All owner chains should go through RootClass,
     // although it is probable that some symbols are created as direct children
     // of NoSymbol to ensure they will not be stumbled upon.  (We should designate
     // a better encapsulated place for that.)
-    final object RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
-      this setInfo rootLoader
+    lazy val RootClass = new RootClass
 
-      override def isRoot            = true
-      override def isEffectiveRoot   = true
-      override def isNestedClass     = false
-    }
-    // The empty package, which holds all top level types without given packages.
-    final object EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+    class EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
       override def isEmptyPackage = true
     }
-    final object EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+
+    // The empty package, which holds all top level types without given packages.
+    lazy val EmptyPackage = new EmptyPackage
+
+    class EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
       override def isEffectiveRoot     = true
       override def isEmptyPackageClass = true
+      override def sourceModule        = EmptyPackage
     }
+
+    lazy val EmptyPackageClass = new EmptyPackageClass
   }
 }
diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
index 48203ca..66dbf53 100644
--- a/src/reflect/scala/reflect/internal/MissingRequirementError.scala
+++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 class MissingRequirementError private (msg: String) extends FatalError(msg) {
diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala
new file mode 100644
index 0000000..557ec9c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Mode.scala
@@ -0,0 +1,141 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+
+object Mode {
+  private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits)
+  def apply(bits: Int): Mode = new Mode(bits)
+
+  /** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
+   */
+  final val NOmode: Mode        = 0x000
+  final val EXPRmode: Mode      = 0x001
+  final val PATTERNmode: Mode   = 0x002
+
+  /** TYPEmode needs a comment. <-- XXX.
+   */
+  final val TYPEmode: Mode      = 0x004
+
+  /** SCCmode is orthogonal to above. When set we are
+   *  in the this or super constructor call of a constructor.
+   */
+  final val SCCmode: Mode       = 0x008
+
+  /** FUNmode is orthogonal to above.
+   *  When set we are looking for a method or constructor.
+   */
+  final val FUNmode: Mode       = 0x010
+
+  /** POLYmode is orthogonal to above.
+   *  When set expression types can be polymorphic.
+   */
+  final val POLYmode: Mode      = 0x020
+
+  /** QUALmode is orthogonal to above. When set
+   *  expressions may be packages and Java statics modules.
+   */
+  final val QUALmode: Mode      = 0x040
+
+  /** TAPPmode is set for the function/type constructor
+   *  part of a type application. When set we do not decompose PolyTypes.
+   */
+  final val TAPPmode: Mode      = 0x080
+
+  /** LHSmode is set for the left-hand side of an assignment.
+   */
+  final val LHSmode: Mode       = 0x400
+
+  /** BYVALmode is set when we are typing an expression
+   *  that occurs in a by-value position. An expression e1 is in by-value
+   *  position within expression e2 iff it will be reduced to a value at that
+   *  position during the evaluation of e2.  Examples are by-value function
+   *  arguments or the conditional of an if-then-else clause.
+   *  This mode has been added to support continuations.
+   */
+  final val BYVALmode: Mode     = 0x8000
+
+  /** TYPEPATmode is set when we are typing a type in a pattern.
+   */
+  final val TYPEPATmode: Mode   = 0x10000
+
+  private val StickyModes: Mode       = EXPRmode | PATTERNmode | TYPEmode
+  private val StickyModesForFun: Mode = StickyModes | SCCmode
+  final val MonoQualifierModes: Mode  = EXPRmode | QUALmode
+  final val PolyQualifierModes: Mode  = EXPRmode | QUALmode | POLYmode
+  final val OperatorModes: Mode       = EXPRmode |            POLYmode | TAPPmode | FUNmode
+
+  /** Translates a mask of mode flags into something readable.
+   */
+  private val modeNameMap = Map[Int, String]( // TODO why duplicate the bitmasks here, rather than just referring to this.EXPRmode etc?
+    (1 << 0)  -> "EXPRmode",
+    (1 << 1)  -> "PATTERNmode",
+    (1 << 2)  -> "TYPEmode",
+    (1 << 3)  -> "SCCmode",
+    (1 << 4)  -> "FUNmode",
+    (1 << 5)  -> "POLYmode",
+    (1 << 6)  -> "QUALmode",
+    (1 << 7)  -> "TAPPmode",
+    (1 << 8)  -> "<>",      // formerly SUPERCONSTRmode
+    (1 << 9)  -> "<>",      // formerly SNDTRYmode
+    (1 << 10) -> "LHSmode",
+    (1 << 11) -> "<>",
+    (1 << 12) -> "<>",      // formerly STARmode
+    (1 << 13) -> "<>",      // formerly ALTmode
+    (1 << 14) -> "<>",      // formerly HKmode
+    (1 << 15) -> "BYVALmode",
+    (1 << 16) -> "TYPEPATmode"
+  ).map({ case (k, v) => Mode(k) -> v })
+}
+import Mode._
+
+final class Mode private (val bits: Int) extends AnyVal {
+  def &(other: Mode): Mode  = new Mode(bits & other.bits)
+  def |(other: Mode): Mode  = new Mode(bits | other.bits)
+  def &~(other: Mode): Mode = new Mode(bits & ~(other.bits))
+
+  def onlyTypePat = this & TYPEPATmode
+  def onlySticky  = this & Mode.StickyModes
+  def forFunMode  = this & Mode.StickyModesForFun | FUNmode | POLYmode | BYVALmode
+  def forTypeMode = if (typingPatternOrTypePat) TYPEmode | TYPEPATmode else TYPEmode
+
+  def inAll(required: Mode)    = (this & required) == required
+  def inAny(required: Mode)    = (this & required) != NOmode
+  def inNone(prohibited: Mode) = (this & prohibited) == NOmode
+
+  /** True if this mode matches every mode in the 'all' Mode,
+   *  and no modes in the 'none' Mode.
+   */
+  def in(all: Mode = NOmode, none: Mode = NOmode) = inAll(all) && inNone(none)
+
+  def inByValMode   = inAll(BYVALmode)
+  def inExprMode    = inAll(EXPRmode)
+  def inFunMode     = inAll(FUNmode)
+  def inPatternMode = inAll(PATTERNmode)
+  def inPolyMode    = inAll(POLYmode)
+  def inQualMode    = inAll(QUALmode)
+  def inSccMode     = inAll(SCCmode)
+  def inTappMode    = inAll(TAPPmode)
+  def inTypeMode    = inAll(TYPEmode)
+
+  def typingExprByValue           = inAll(EXPRmode | BYVALmode)
+  def typingExprFun               = inAll(EXPRmode | FUNmode)
+  def typingExprNotFun            = in(all = EXPRmode, none = FUNmode)
+  def typingExprNotFunNotLhs      = in(all = EXPRmode, none = FUNmode | LHSmode)
+  def typingExprNotLhs            = in(all = EXPRmode, none = LHSmode)
+  def typingExprNotValue          = in(all = EXPRmode, none = BYVALmode)
+  def typingMonoExprByValue       = in(all = EXPRmode | BYVALmode, none = POLYmode)
+  def typingConstructorPattern    = inAll(PATTERNmode | FUNmode)
+  def typingPatternNotConstructor = in(all = PATTERNmode, none = FUNmode)
+  def typingPatternOrTypePat      = inAny(PATTERNmode | TYPEPATmode)
+
+  override def toString =
+    if (this == NOmode) "NOmode"
+    else (modeNameMap filterKeys inAll).values.toList.sorted mkString "-"
+}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index c78ba72..ae9f2da 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -3,35 +3,33 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.io.Codec
 import java.security.MessageDigest
 import scala.language.implicitConversions
 
-trait LowPriorityNames {
-  self: Names =>
-
-  implicit def nameToNameOps(name: Name): NameOps[Name] = new NameOps[Name](name)
-}
-
-/** The class Names ...
- *
- *  @author  Martin Odersky
- *  @version 1.0, 05/02/2005
- */
-trait Names extends api.Names with LowPriorityNames {
-  implicit def promoteTermNamesAsNecessary(name: Name): TermName = name.toTermName
-
-// Operations -------------------------------------------------------------
-
+trait Names extends api.Names {
   private final val HASH_SIZE  = 0x8000
   private final val HASH_MASK  = 0x7FFF
   private final val NAME_SIZE  = 0x20000
 
   final val nameDebug = false
 
+  // Ideally we would just synchronize unconditionally and let HotSpot's Biased Locking
+  // kick in in the compiler universe, where access to the lock is single threaded. But,
+  // objects created in the first 4seconds of the JVM startup aren't eligible for biased
+  // locking.
+  //
+  // We might also be able to accept the performance hit, but we don't have tools to
+  // detect performance regressions.
+  //
+  // Discussion: https://groups.google.com/forum/#!search/biased$20scala-internals/scala-internals/0cYB7SkJ-nM/47MLhsgw8jwJ
+  protected def synchronizeNames: Boolean = false
+  private val nameLock: Object = new Object
+
   /** Memory to store all names sequentially. */
   var chrs: Array[Char] = new Array[Char](NAME_SIZE)
   private var nc = 0
@@ -49,7 +47,7 @@ trait Names extends api.Names with LowPriorityNames {
        cs(offset) * (41 * 41) +
        cs(offset + len - 1) * 41 +
        cs(offset + (len >> 1)))
-    else 0;
+    else 0
 
   /** Is (the ASCII representation of) name at given index equal to
    *  cs[offset..offset+len-1]?
@@ -57,7 +55,7 @@ trait Names extends api.Names with LowPriorityNames {
   private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
     var i = 0
     while ((i < len) && (chrs(index + i) == cs(offset + i)))
-      i += 1;
+      i += 1
     i == len
   }
 
@@ -78,65 +76,110 @@ trait Names extends api.Names with LowPriorityNames {
   }
 
   /** Create a term name from the characters in cs[offset..offset+len-1]. */
-  def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
+  final def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
     newTermName(cs, offset, len, cachedString = null)
 
-  def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
-  def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
+  final def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
+
+  final def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
 
   /** Create a term name from the characters in cs[offset..offset+len-1].
    *  TODO - have a mode where name validation is performed at creation time
    *  (e.g. if a name has the string "$class" in it, then fail if that
    *  string is not at the very end.)
+   *
+   *  @param len0 the length of the name. Negative lengths result in empty names.
    */
-  protected def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
-    val h = hashValue(cs, offset, len) & HASH_MASK
-    var n = termHashtable(h)
-    while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
-      n = n.next
-
-    if (n ne null) n
-    else {
-      // The logic order here is future-proofing against the possibility
-      // that name.toString will become an eager val, in which case the call
-      // to enterChars cannot follow the construction of the TermName.
-      val ncStart = nc
-      enterChars(cs, offset, len)
-      if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
-      else new TermName_R(ncStart, len, h)
+  final def newTermName(cs: Array[Char], offset: Int, len0: Int, cachedString: String): TermName = {
+    def body = {
+      require(offset >= 0, "offset must be non-negative, got " + offset)
+      val len = math.max(len0, 0)
+      val h = hashValue(cs, offset, len) & HASH_MASK
+      var n = termHashtable(h)
+      while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
+        n = n.next
+
+      if (n ne null) n
+      else {
+        // The logic order here is future-proofing against the possibility
+        // that name.toString will become an eager val, in which case the call
+        // to enterChars cannot follow the construction of the TermName.
+        val ncStart = nc
+        enterChars(cs, offset, len)
+        if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
+        else new TermName_R(ncStart, len, h)
+      }
     }
+    if (synchronizeNames) nameLock.synchronized(body) else body
   }
-  protected def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
+
+  final def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
     newTermName(cs, offset, len, cachedString).toTypeName
 
   /** Create a term name from string. */
+  @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
   def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
 
   /** Create a type name from string. */
+  @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
   def newTypeName(s: String): TypeName = newTermName(s).toTypeName
 
   /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
-  def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+  final def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
     val chars = Codec.fromUTF8(bs, offset, len)
     newTermName(chars, 0, chars.length)
   }
 
-  def newTermNameCached(s: String): TermName =
+  final def newTermNameCached(s: String): TermName =
     newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
 
-  def newTypeNameCached(s: String): TypeName =
+  final def newTypeNameCached(s: String): TypeName =
     newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
 
   /** Create a type name from the characters in cs[offset..offset+len-1]. */
-  def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
+  final def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
     newTermName(cs, offset, len, cachedString = null).toTypeName
 
   /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
-  def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
+  final def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
     newTermName(bs, offset, len).toTypeName
 
-  def nameChars: Array[Char] = chrs
-  @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
+  /**
+   *  Used only by the GenBCode backend, to represent bytecode-level types in a way that makes equals() and hashCode() efficient.
+   *  For bytecode-level types of OBJECT sort, its internal name (not its descriptor) is stored.
+   *  For those of ARRAY sort,  its descriptor is stored ie has a leading '['
+   *  For those of METHOD sort, its descriptor is stored ie has a leading '('
+   *
+   *  can-multi-thread
+   *  TODO SI-6240 !!! JZ Really? the constructors TermName and TypeName publish unconstructed `this` references
+   *               into the hash tables; we could observe them here before the subclass constructor completes.
+   */
+  final def lookupTypeName(cs: Array[Char]): TypeName = { lookupTypeNameIfExisting(cs, true) }
+
+  final def lookupTypeNameIfExisting(cs: Array[Char], failOnNotFound: Boolean): TypeName = {
+
+    val hterm = hashValue(cs, 0, cs.size) & HASH_MASK
+    var nterm = termHashtable(hterm)
+    while ((nterm ne null) && (nterm.length != cs.size || !equals(nterm.start, cs, 0, cs.size))) {
+      nterm = nterm.next
+    }
+    if (nterm eq null) {
+      if (failOnNotFound) { assert(false, "TermName not yet created: " + new String(cs)) }
+      return null
+    }
+
+    val htype = hashValue(chrs, nterm.start, nterm.length) & HASH_MASK
+    var ntype = typeHashtable(htype)
+    while ((ntype ne null) && ntype.start != nterm.start) {
+      ntype = ntype.next
+    }
+    if (ntype eq null) {
+      if (failOnNotFound) { assert(false, "TypeName not yet created: " + new String(cs)) }
+      return null
+    }
+
+    ntype
+  }
 
 // Classes ----------------------------------------------------------------------
 
@@ -186,28 +229,26 @@ trait Names extends api.Names with LowPriorityNames {
       scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
 
     /** @return the ascii representation of this name */
-    final def toChars: Array[Char] = {
+    final def toChars: Array[Char] = {  // used by ide
       val cs = new Array[Char](len)
       copyChars(cs, 0)
       cs
     }
 
-    /** Write to UTF8 representation of this name to given character array.
-     *  Start copying to index `to`. Return index of next free byte in array.
-     *  Array must have enough remaining space for all bytes
-     *  (i.e. maximally 3*length bytes).
-     */
-    final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
-      val bytes = Codec.toUTF8(chrs, index, len)
-      scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
-      offset + bytes.length
-    }
-
     /** @return the hash value of this name */
     final override def hashCode(): Int = index
 
-    // Presently disabled.
-    // override def equals(other: Any) = paranoidEquals(other)
+    /** @return true if the string value of this name is equal
+     *  to the string value of the given name or String.
+     */
+    def string_==(that: Name): Boolean   = (that ne null) && (toString == that.toString)
+    def string_==(that: String): Boolean = (that ne null) && (toString == that)
+
+    /****
+     *  This has been quite useful to find places where people are comparing
+     *  a TermName and a TypeName, or a Name and a String.
+
+    override def equals(other: Any) = paranoidEquals(other)
     private def paranoidEquals(other: Any): Boolean = {
       val cmp = this eq other.asInstanceOf[AnyRef]
       if (cmp || !nameDebug)
@@ -215,7 +256,7 @@ trait Names extends api.Names with LowPriorityNames {
 
       other match {
         case x: String  =>
-          Console.println("Compared " + debugString + " and String '" + x + "'")
+          Console.println(s"Compared $debugString and String '$x'")
         case x: Name    =>
           if (this.isTermName != x.isTermName) {
             val panic = this.toTermName == x.toTermName
@@ -228,6 +269,7 @@ trait Names extends api.Names with LowPriorityNames {
       }
       false
     }
+    ****/
 
     /** @return the i'th Char of this name */
     final def charAt(i: Int): Char = chrs(index + i)
@@ -235,14 +277,14 @@ trait Names extends api.Names with LowPriorityNames {
     /** @return the index of first occurrence of char c in this name, length if not found */
     final def pos(c: Char): Int = pos(c, 0)
 
-    /** @return the index of first occurrence of char c in this name, length if not found */
+    /** @return the index of first occurrence of s in this name, length if not found */
     final def pos(s: String): Int = pos(s, 0)
 
     /** Returns the index of the first occurrence of character c in
      *  this name from start, length if not found.
      *
      *  @param c     the character
-     *  @param start ...
+     *  @param start the index from which to search
      *  @return      the index of the first occurrence of c
      */
     final def pos(c: Char, start: Int): Int = {
@@ -255,7 +297,7 @@ trait Names extends api.Names with LowPriorityNames {
      *  in this name from start, length if not found.
      *
      *  @param s     the string
-     *  @param start ...
+     *  @param start the index from which to search
      *  @return      the index of the first occurrence of s
      */
     final def pos(s: String, start: Int): Int = {
@@ -279,13 +321,11 @@ trait Names extends api.Names with LowPriorityNames {
      */
     final def lastPos(c: Char): Int = lastPos(c, len - 1)
 
-    final def lastPos(s: String): Int = lastPos(s, len - s.length)
-
     /** Returns the index of the last occurrence of char c in this
      *  name from start, -1 if not found.
      *
      *  @param c     the character
-     *  @param start ...
+     *  @param start the index from which to search
      *  @return      the index of the last occurrence of c
      */
     final def lastPos(c: Char, start: Int): Int = {
@@ -294,26 +334,6 @@ trait Names extends api.Names with LowPriorityNames {
       i
     }
 
-    /** Returns the index of the last occurrence of string s in this
-     *  name from start, -1 if not found.
-     *
-     *  @param s     the string
-     *  @param start ...
-     *  @return      the index of the last occurrence of s
-     */
-    final def lastPos(s: String, start: Int): Int = {
-      var i = lastPos(s.charAt(0), start)
-      while (i >= 0) {
-        var j = 1;
-        while (s.charAt(j) == chrs(index + i + j)) {
-          j += 1
-          if (j == s.length()) return i;
-        }
-        i = lastPos(s.charAt(0), i - 1)
-      }
-      -s.length()
-    }
-
     /** Does this name start with prefix? */
     final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
 
@@ -322,7 +342,14 @@ trait Names extends api.Names with LowPriorityNames {
       var i = 0
       while (i < prefix.length && start + i < len &&
              chrs(index + start + i) == chrs(prefix.start + i))
-        i += 1;
+        i += 1
+      i == prefix.length
+    }
+    final def startsWith(prefix: String, start: Int): Boolean = {
+      var i = 0
+      while (i < prefix.length && start + i < len &&
+             chrs(index + start + i) == prefix.charAt(i))
+        i += 1
       i == prefix.length
     }
 
@@ -334,7 +361,14 @@ trait Names extends api.Names with LowPriorityNames {
       var i = 1
       while (i <= suffix.length && i <= end &&
              chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
-        i += 1;
+        i += 1
+      i > suffix.length
+    }
+    final def endsWith(suffix: String, end: Int): Boolean = {
+      var i = 1
+      while (i <= suffix.length && i <= end &&
+             chrs(index + end - i) == suffix.charAt(suffix.length - i))
+        i += 1
       i > suffix.length
     }
 
@@ -362,20 +396,22 @@ trait Names extends api.Names with LowPriorityNames {
     final def startChar: Char                   = this charAt 0
     final def endChar: Char                     = this charAt len - 1
     final def startsWith(char: Char): Boolean   = len > 0 && startChar == char
-    final def startsWith(name: String): Boolean = startsWith(newTermName(name))
+    final def startsWith(name: String): Boolean = startsWith(name, 0)
     final def endsWith(char: Char): Boolean     = len > 0 && endChar == char
-    final def endsWith(name: String): Boolean   = endsWith(newTermName(name))
+    final def endsWith(name: String): Boolean   = endsWith(name, len)
 
-    def indexOf(ch: Char) = {
-      val idx = pos(ch)
-      if (idx == length) -1 else idx
-    }
-    def indexOf(ch: Char, fromIndex: Int) = {
-      val idx = pos(ch, fromIndex)
-      if (idx == length) -1 else idx
-    }
-    def lastIndexOf(ch: Char) = lastPos(ch)
-    def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
+    /** Rewrite the confusing failure indication via result == length to
+     *  the normal failure indication via result == -1.
+     */
+    private def fixIndexOf(idx: Int): Int = if (idx == length) -1 else idx
+
+    def indexOf(ch: Char)                 = fixIndexOf(pos(ch))
+    def indexOf(ch: Char, fromIndex: Int) = fixIndexOf(pos(ch, fromIndex))
+    def indexOf(s: String)                = fixIndexOf(pos(s))
+
+    /** The lastPos methods already return -1 on failure. */
+    def lastIndexOf(ch: Char): Int  = lastPos(ch)
+    def lastIndexOf(s: String): Int = toString lastIndexOf s
 
     /** Replace all occurrences of `from` by `to` in
      *  name; result is always a term name.
@@ -421,27 +457,44 @@ trait Names extends api.Names with LowPriorityNames {
     }
 
     /** TODO - find some efficiency. */
-    def append(ch: Char)        = newName("" + this + ch)
-    def append(suffix: String)  = newName("" + this + suffix)
-    def append(suffix: Name)    = newName("" + this + suffix)
-    def prepend(ch: Char)       = newName("" + ch + this)
+    def append(ch: Char)        = newName(toString + ch)
+    def append(suffix: String)  = newName(toString + suffix)
+    def append(suffix: Name)    = newName(toString + suffix)
+    def append(separator: Char, suffix: Name) = newName(toString + separator + suffix)
     def prepend(prefix: String) = newName("" + prefix + this)
-    def prepend(prefix: Name)   = newName("" + prefix + this)
 
     def decodedName: ThisNameType = newName(decode)
-    def isOperatorName: Boolean = decode != toString
+    def isOperatorName: Boolean = decode != toString  // used by ide
     def longString: String      = nameKind + " " + decode
     def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
   }
 
+  implicit def AnyNameOps(name: Name): NameOps[Name]          = new NameOps(name)
   implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name)
   implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name)
 
+  /** FIXME: This is a good example of something which is pure "value class" but cannot
+   *  reap the benefits because an (unused) $outer pointer so it is not single-field.
+   */
   final class NameOps[T <: Name](name: T) {
-    def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
-    def dropRight(n: Int): T         = name.subName(0, name.length - n).asInstanceOf[T]
-    def drop(n: Int): T              = name.subName(n, name.length).asInstanceOf[T]
-    def nonEmpty: Boolean            = name.length > 0
+    import NameTransformer._
+    def stripSuffix(suffix: String): T = if (name endsWith suffix) dropRight(suffix.length) else name // OPT avoid creating a Name with `suffix`
+    def stripSuffix(suffix: Name): T   = if (name endsWith suffix) dropRight(suffix.length) else name
+    def take(n: Int): T                = name.subName(0, n).asInstanceOf[T]
+    def drop(n: Int): T                = name.subName(n, name.length).asInstanceOf[T]
+    def dropRight(n: Int): T           = name.subName(0, name.length - n).asInstanceOf[T]
+    def dropLocal: TermName            = name.toTermName stripSuffix LOCAL_SUFFIX_STRING
+    def dropSetter: TermName           = name.toTermName stripSuffix SETTER_SUFFIX_STRING
+    def dropModule: T                  = this stripSuffix MODULE_SUFFIX_STRING
+    def localName: TermName            = getterName append LOCAL_SUFFIX_STRING
+    def setterName: TermName           = getterName append SETTER_SUFFIX_STRING
+    def getterName: TermName           = dropTraitSetterSeparator.dropSetter.dropLocal
+
+    private def dropTraitSetterSeparator: TermName =
+      name indexOf TRAIT_SETTER_SEPARATOR_STRING match {
+        case -1  => name.toTermName
+        case idx => name.toTermName drop idx drop TRAIT_SETTER_SEPARATOR_STRING.length
+      }
   }
 
   implicit val NameTag = ClassTag[Name](classOf[Name])
@@ -462,42 +515,45 @@ trait Names extends api.Names with LowPriorityNames {
   /** TermName_S and TypeName_S have fields containing the string version of the name.
    *  TermName_R and TypeName_R recreate it each time toString is called.
    */
-  private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+  private final class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
     protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
     override def newName(str: String): TermName = newTermNameCached(str)
   }
-  private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+  private final class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
     protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
     override def newName(str: String): TypeName = newTypeNameCached(str)
   }
 
-  private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+  private final class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
     protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
     override def toString = new String(chrs, index, len)
   }
 
-  private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+  private final class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
     protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
     override def toString = new String(chrs, index, len)
   }
 
-  sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+  // SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled
+  sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TermNameApi {
     type ThisNameType = TermName
     protected[this] def thisName: TermName = this
-
-    var next: TermName = termHashtable(hash)
+    val next: TermName = termHashtable(hash)
     termHashtable(hash) = this
     def isTermName: Boolean = true
     def isTypeName: Boolean = false
     def toTermName: TermName = this
     def toTypeName: TypeName = {
-      val h = hashValue(chrs, index, len) & HASH_MASK
-      var n = typeHashtable(h)
-      while ((n ne null) && n.start != index)
-        n = n.next
-
-      if (n ne null) n
-      else createCompanionName(h)
+      def body = {
+        val h = hashValue(chrs, index, len) & HASH_MASK
+        var n = typeHashtable(h)
+        while ((n ne null) && n.start != index)
+          n = n.next
+
+        if (n ne null) n
+        else createCompanionName(h)
+      }
+      if (synchronizeNames) nameLock.synchronized(body) else body
     }
     def newName(str: String): TermName = newTermName(str)
     def companionName: TypeName = toTypeName
@@ -505,27 +561,37 @@ trait Names extends api.Names with LowPriorityNames {
       newTermName(chrs, start + from, to - from)
 
     def nameKind = "term"
+    /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
     protected def createCompanionName(h: Int): TypeName
   }
 
   implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
 
-  sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+  object TermName extends TermNameExtractor {
+    def apply(s: String) = newTermName(s)
+    def unapply(name: TermName): Option[String] = Some(name.toString)
+  }
+
+  sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TypeNameApi {
     type ThisNameType = TypeName
     protected[this] def thisName: TypeName = this
 
-    var next: TypeName = typeHashtable(hash)
+    val next: TypeName = typeHashtable(hash)
     typeHashtable(hash) = this
+
     def isTermName: Boolean = false
     def isTypeName: Boolean = true
     def toTermName: TermName = {
-      val h = hashValue(chrs, index, len) & HASH_MASK
-      var n = termHashtable(h)
-      while ((n ne null) && n.start != index)
-        n = n.next
-
-      if (n ne null) n
-      else createCompanionName(h)
+      def body = {
+        val h = hashValue(chrs, index, len) & HASH_MASK
+        var n = termHashtable(h)
+        while ((n ne null) && n.start != index)
+          n = n.next
+
+        if (n ne null) n
+        else createCompanionName(h)
+      }
+      if (synchronizeNames) nameLock.synchronized(body) else body
     }
     def toTypeName: TypeName = this
     def newName(str: String): TypeName = newTypeName(str)
@@ -535,8 +601,14 @@ trait Names extends api.Names with LowPriorityNames {
 
     def nameKind = "type"
     override def decode = if (nameDebug) super.decode + "!" else super.decode
+    /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
     protected def createCompanionName(h: Int): TermName
   }
 
   implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
+
+  object TypeName extends TypeNameExtractor {
+    def apply(s: String) = newTypeName(s)
+    def unapply(name: TypeName): Option[String] = Some(name.toString)
+  }
 }
diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala
index c0f4232..1ecc202 100644
--- a/src/reflect/scala/reflect/internal/Phase.scala
+++ b/src/reflect/scala/reflect/internal/Phase.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 abstract class Phase(val prev: Phase) {
@@ -25,11 +26,14 @@ abstract class Phase(val prev: Phase) {
   )
   def flagMask: Long = fmask
 
-  private var nx: Phase = this
+  private var nx: Phase = NoPhase
 
-  def next: Phase = nx
+  // does anyone rely on next == this for terminus?
+  def next: Phase = if (nx eq NoPhase) this else nx
   def hasNext = next != this
-  def iterator = Iterator.iterate(this)(_.next) takeWhile (p => p.next != p)
+  // this definition excludes the terminal phase
+  //def iterator = Iterator.iterate(this)(_.nx) takeWhile (p => p.next != p)
+  def iterator = Iterator.iterate(this)(_.nx) takeWhile (_ ne NoPhase)
 
   def name: String
   def description: String = name
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index faa161d..01fba1e 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -1,34 +1,274 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
+import util._
+import scala.collection.mutable.ListBuffer
+
+/** Handling range positions
+ *  atPos, the main method in this trait, will add positions to a tree,
+ *  and will ensure the following properties:
+ *
+ *    1. All nodes between the root of the tree and nodes that already have positions
+ *       will be assigned positions.
+ *    2. No node which already has a position will be assigned a different range; however
+ *       a RangePosition might become a TransparentPosition.
+ *    3. The position of each assigned node includes the positions of each of its children.
+ *    4. The positions of all solid descendants of children of an assigned node
+ *       are mutually non-overlapping.
+ *
+ * Here, the solid descendant of a node are:
+ *
+ *   If the node has a TransparentPosition, the solid descendants of all its children
+ *   Otherwise, the singleton consisting of the node itself.
+ */
 trait Positions extends api.Positions { self: SymbolTable =>
 
   type Position = scala.reflect.internal.util.Position
   val NoPosition = scala.reflect.internal.util.NoPosition
   implicit val PositionTag = ClassTag[Position](classOf[Position])
 
+  def inform(msg: String): Unit
+
+  def useOffsetPositions: Boolean = true
+
   /** A position that wraps a set of trees.
    *  The point of the wrapping position is the point of the default position.
    *  If some of the trees are ranges, returns a range position enclosing all ranges
    *  Otherwise returns default position that is either focused or not.
    */
-  def wrappingPos(default: Position, trees: List[Tree]) = wrappingPos(default, trees, true)
-  def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default
+  def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true)
+  def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
+    if (useOffsetPositions) default else {
+      val ranged = trees filter (_.pos.isRange)
+      if (ranged.isEmpty) if (focus) default.focus else default
+      else Position.range(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
+    }
+  }
 
   /** A position that wraps the non-empty set of trees.
    *  The point of the wrapping position is the point of the first trees' position.
    *  If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
    *  Otherwise returns a synthetic offset position to point.
    */
-  def wrappingPos(trees: List[Tree]): Position = trees.head.pos
+  def wrappingPos(trees: List[Tree]): Position = {
+    val headpos = trees.head.pos
+    if (useOffsetPositions || !headpos.isDefined) headpos
+    else wrappingPos(headpos, trees)
+  }
 
   /** Ensure that given tree has no positions that overlap with
    *  any of the positions of `others`. This is done by
    *  shortening the range, assigning TransparentPositions
    *  to some of the nodes in `tree` or focusing on the position.
    */
-  def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) }
-  def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {}
+  def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, focus = true) }
+  def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
+    if (useOffsetPositions) return
+
+    def isOverlapping(pos: Position) =
+      pos.isRange && (others exists (pos overlaps _.pos))
+
+    if (isOverlapping(tree.pos)) {
+      val children = tree.children
+      children foreach (ensureNonOverlapping(_, others, focus))
+      if (tree.pos.isOpaqueRange) {
+        val wpos = wrappingPos(tree.pos, children, focus)
+        tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
+      }
+    }
+  }
+
+  def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position =
+    if (useOffsetPositions) Position.offset(source, point)
+    else Position.range(source, start, point, end)
+
+  def validatePositions(tree: Tree) {
+    if (useOffsetPositions) return
+
+    def reportTree(prefix : String, tree : Tree) {
+      val source = if (tree.pos.isDefined) tree.pos.source else ""
+      inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
+      inform("")
+      inform(treeStatus(tree))
+      inform("")
+    }
+
+    def positionError(msg: String)(body : => Unit) {
+      inform("======= Position error\n" + msg)
+      body
+      inform("\nWhile validating #" + tree.id)
+      inform(treeStatus(tree))
+      inform("\nChildren:")
+      tree.children map (t => "  " + treeStatus(t, tree)) foreach inform
+      inform("=======")
+      throw new ValidateException(msg)
+    }
+
+    def validate(tree: Tree, encltree: Tree): Unit = {
+
+      if (!tree.isEmpty && tree.canHaveAttrs) {
+        if (settings.Yposdebug && (settings.verbose || settings.Yrangepos))
+          println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
+
+        if (!tree.pos.isDefined)
+          positionError("Unpositioned tree #"+tree.id) {
+            inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
+            inform("%15s %s".format("enclosing", treeStatus(encltree)))
+            encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
+          }
+        if (tree.pos.isRange) {
+          if (!encltree.pos.isRange)
+            positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
+            reportTree("Enclosing", encltree)
+            reportTree("Enclosed", tree)
+            }
+          if (!(encltree.pos includes tree.pos))
+            positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
+              reportTree("Enclosing", encltree)
+              reportTree("Enclosed", tree)
+            }
+
+          findOverlapping(tree.children flatMap solidDescendants) match {
+            case List() => ;
+            case xs => {
+              positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
+                reportTree("Ancestor", tree)
+                for((x, y) <- xs) {
+                  reportTree("First overlapping", x)
+                  reportTree("Second overlapping", y)
+                }
+              }
+            }
+          }
+        }
+        for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
+      }
+    }
+
+    if (!isPastTyper)
+      validate(tree, tree)
+  }
+
+  def solidDescendants(tree: Tree): List[Tree] =
+    if (tree.pos.isTransparent) tree.children flatMap solidDescendants
+    else List(tree)
+
+  /** A free range from `lo` to `hi` */
+  private def free(lo: Int, hi: Int): Range =
+    Range(Position.range(null, lo, lo, hi), EmptyTree)
+
+  /** The maximal free range */
+  private lazy val maxFree: Range = free(0, Int.MaxValue)
+
+  /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
+  private def maybeFree(lo: Int, hi: Int) =
+    if (lo < hi) List(free(lo, hi))
+    else List()
+
+  /** Insert `pos` into ranges `rs` if possible;
+   *  otherwise add conflicting trees to `conflicting`.
+   */
+  private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
+    case List() =>
+      assert(conflicting.nonEmpty)
+      rs
+    case r :: rs1 =>
+      assert(!t.pos.isTransparent)
+      if (r.isFree && (r.pos includes t.pos)) {
+//      println("subdividing "+r+"/"+t.pos)
+        maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
+      } else {
+        if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
+        r :: insert(rs1, t, conflicting)
+      }
+  }
+
+  /** Replace elem `t` of `ts` by `replacement` list. */
+  private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
+    if (ts.head == t) replacement ::: ts.tail
+    else ts.head :: replace(ts.tail, t, replacement)
+
+  /** Does given list of trees have mutually non-overlapping positions?
+   *  pre: None of the trees is transparent
+   */
+  def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
+    var ranges = List(maxFree)
+    for (ct <- cts) {
+      if (ct.pos.isOpaqueRange) {
+        val conflicting = new ListBuffer[Tree]
+        ranges = insert(ranges, ct, conflicting)
+        if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
+      }
+    }
+    List()
+  }
+
+  /** Set position of all children of a node
+   *  @param  pos   A target position.
+   *                Uses the point of the position as the point of all positions it assigns.
+   *                Uses the start of this position as an Offset position for unpositioed trees
+   *                without children.
+   *  @param  trees  The children to position. All children must be positionable.
+   */
+  private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
+    for (tree <- trees) {
+      if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+        val children = tree.children
+        if (children.isEmpty) {
+          tree setPos pos.focus
+        } else {
+          setChildrenPos(pos, children)
+          tree setPos wrappingPos(pos, children)
+        }
+      }
+    }
+  } catch {
+    case ex: Exception =>
+      println("error while set children pos "+pos+" of "+trees)
+      throw ex
+  }
+
+
+  class ValidateException(msg : String) extends Exception(msg)
+
+
+  /** A locator for trees with given positions.
+   *  Given a position `pos`, locator.apply returns
+   *  the smallest tree that encloses `pos`.
+   */
+  class Locator(pos: Position) extends Traverser {
+    var last: Tree = _
+    def locateIn(root: Tree): Tree = {
+      this.last = EmptyTree
+      traverse(root)
+      this.last
+    }
+    protected def isEligible(t: Tree) = !t.pos.isTransparent
+    override def traverse(t: Tree) {
+      t match {
+        case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
+          traverse(tt.original)
+        case _ =>
+          if (t.pos includes pos) {
+            if (isEligible(t)) last = t
+            super.traverse(t)
+          } else t match {
+            case mdef: MemberDef =>
+              traverseTrees(mdef.mods.annotations)
+            case _ =>
+          }
+      }
+    }
+  }
+
+  case class Range(pos: Position, tree: Tree) {
+    def isFree = tree == EmptyTree
+  }
+
+  class TypedLocator(pos: Position) extends Locator(pos) {
+    override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
+  }
 
   trait PosAssigner extends Traverser {
     var pos: Position
@@ -38,7 +278,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
   protected class DefaultPosAssigner extends PosAssigner {
     var pos: Position = _
     override def traverse(t: Tree) {
-      if (t eq EmptyTree) ()
+      if (!t.canHaveAttrs) ()
       else if (t.pos == NoPosition) {
         t.setPos(pos)
         super.traverse(t)   // TODO: bug? shouldn't the traverse be outside of the if?
@@ -57,9 +297,25 @@ trait Positions extends api.Positions { self: SymbolTable =>
     }
   }
 
+  /** Position a tree.
+   *  This means: Set position of a node and position all its unpositioned children.
+   */
   def atPos[T <: Tree](pos: Position)(tree: T): T = {
-    posAssigner.pos = pos
-    posAssigner.traverse(tree)
-    tree
+    if (useOffsetPositions || !pos.isOpaqueRange) {
+      posAssigner.pos = pos
+      posAssigner.traverse(tree)
+      tree
+    }
+    else {
+      if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+        tree.setPos(pos)
+        val children = tree.children
+        if (children.nonEmpty) {
+          if (children.tail.isEmpty) atPos(pos)(children.head)
+          else setChildrenPos(pos, children)
+        }
+      }
+      tree
+    }
   }
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Precedence.scala b/src/reflect/scala/reflect/internal/Precedence.scala
new file mode 100644
index 0000000..1430838
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Precedence.scala
@@ -0,0 +1,38 @@
+package scala
+package reflect
+package internal
+
+import scala.annotation.switch
+import Chars._
+
+final class Precedence private (val level: Int) extends AnyVal with Ordered[Precedence] {
+  def compare(that: Precedence): Int = level compare that.level
+  override def toString = s"Precedence($level)"
+}
+
+
+object Precedence extends (Int => Precedence) {
+  private val ErrorName = "<error>"
+  private def isAssignmentOp(name: String) = name match {
+    case "!=" | "<=" | ">=" | "" => false
+    case _                       => name.last == '=' && name.head != '=' && isOperatorPart(name.head)
+  }
+  private def firstChar(ch: Char): Precedence = apply((ch: @switch) match {
+    case '|'             => 2
+    case '^'             => 3
+    case '&'             => 4
+    case '=' | '!'       => 5
+    case '<' | '>'       => 6
+    case ':'             => 7
+    case '+' | '-'       => 8
+    case '*' | '/' | '%' => 9
+    case _               => if (isScalaLetter(ch)) 1 else 10
+  })
+
+  def apply(level: Int): Precedence = new Precedence(level)
+  def apply(name: String): Precedence = name match {
+    case "" | ErrorName            => this(-1)
+    case _ if isAssignmentOp(name) => this(0)
+    case _                         => firstChar(name charAt 0)
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 35cb749..fcc377b 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -5,7 +5,8 @@
 
 // todo. we need to unify this prettyprinter with NodePrinters
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
@@ -16,8 +17,6 @@ trait Printers extends api.Printers { self: SymbolTable =>
 
   //nsc import treeInfo.{ IsTrue, IsFalse }
 
-  final val showOuterTests = false
-
   /** Adds backticks if the name is a scala keyword. */
   def quotedName(name: Name, decode: Boolean): String = {
     val s = if (decode) name.decode else name.toString
@@ -25,34 +24,35 @@ trait Printers extends api.Printers { self: SymbolTable =>
     if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s
     else s
   }
-  def quotedName(name: Name): String = quotedName(name, false)
-  def quotedName(name: String): String = quotedName(newTermName(name), false)
+  def quotedName(name: Name): String = quotedName(name, decode = false)
+  def quotedName(name: String): String = quotedName(newTermName(name), decode = false)
 
   private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
-    val sym = tree.symbol
-    if (sym.name.toString == nme.ERROR.toString) {
-      "<" + quotedName(name, decoded) + ": error>"
-    } else if (sym != null && sym != NoSymbol) {
-      val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
-      var suffix = ""
-      if (settings.uniqid.value) suffix += ("#" + sym.id)
-      if (settings.Yshowsymkinds.value) suffix += ("#" + sym.abbreviatedKindString)
-      prefix + quotedName(tree.symbol.decodedName) + suffix
-    } else {
-      quotedName(name, decoded)
-    }
+    val sym     = tree.symbol
+    def qname   = quotedName(name.dropLocal, decoded)
+    def qowner  = quotedName(sym.owner.name.dropLocal, decoded)
+    def qsymbol = quotedName(sym.nameString)
+
+    if (sym == null || sym == NoSymbol)
+      qname
+    else if (sym.isErroneous)
+      s"<$qname: error>"
+    else if (sym.isMixinConstructor)
+      s"/*$qowner*/$qsymbol"
+    else
+      qsymbol
   }
 
-  def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
-  def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+  def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = true)
+  def symName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = false)
 
   /** Turns a path into a String, introducing backquotes
    *  as necessary.
    */
   def backquotedPath(t: Tree): String = {
     t match {
-      case Select(qual, name) if name.isTermName  => "%s.%s".format(backquotedPath(qual), symName(t, name))
-      case Select(qual, name) if name.isTypeName  => "%s#%s".format(backquotedPath(qual), symName(t, name))
+      case Select(qual, name) if name.isTermName  => s"${backquotedPath(qual)}.${symName(t, name)}"
+      case Select(qual, name) if name.isTypeName  => s"${backquotedPath(qual)}#${symName(t, name)}"
       case Ident(name)                            => symName(t, name)
       case _                                      => t.toString
     }
@@ -65,16 +65,24 @@ trait Printers extends api.Printers { self: SymbolTable =>
 
     printTypes = settings.printtypes.value
     printIds = settings.uniqid.value
+    printOwners = settings.Yshowsymowners.value
     printKinds = settings.Yshowsymkinds.value
     printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
-    protected def doPrintPositions = settings.Xprintpos.value
+    printPositions = settings.Xprintpos.value
 
     def indent() = indentMargin += indentStep
     def undent() = indentMargin -= indentStep
 
-    def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+    def printPosition(tree: Tree) = 
+      if (printPositions) comment(print(tree.pos.show))
+    
+    protected def printTypesInfo(tree: Tree) = 
+      if (printTypes && tree.isTerm && tree.canHaveAttrs)
+        comment{
+          print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
+        }
 
-    def println() {
+    def println() = {
       out.println()
       while (indentMargin > indentString.length())
         indentString += indentString
@@ -82,116 +90,237 @@ trait Printers extends api.Printers { self: SymbolTable =>
         out.write(indentString, 0, indentMargin)
     }
 
-    def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit) {
+    def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit): Unit =
       ls match {
         case List() =>
         case List(x) => printelem(x)
         case x :: rest => printelem(x); printsep; printSeq(rest)(printelem)(printsep)
       }
-    }
 
-    def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
-      print(start); indent; println()
-      printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end)
+    def printColumn(ts: List[Tree], start: String, sep: String, end: String) = {
+      print(start); indent(); println()
+      printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end)
     }
 
-    def printRow(ts: List[Tree], start: String, sep: String, end: String) {
+    def printRow(ts: List[Tree], start: String, sep: String, end: String): Unit = {
       print(start); printSeq(ts){print(_)}{print(sep)}; print(end)
     }
 
-    def printRow(ts: List[Tree], sep: String) { printRow(ts, "", sep, "") }
+    def printRow(ts: List[Tree], sep: String): Unit = printRow(ts, "", sep, "")
 
-    def printTypeParams(ts: List[TypeDef]) {
-      if (!ts.isEmpty) {
+    def printTypeParams(ts: List[TypeDef]): Unit =
+      if (ts.nonEmpty) {
         print("["); printSeq(ts){ t =>
           printAnnotations(t)
+          if (t.mods.hasFlag(CONTRAVARIANT)) {
+            print("-")
+          } else if (t.mods.hasFlag(COVARIANT)) {
+            print("+")
+          }
           printParam(t)
         }{print(", ")}; print("]")
       }
-    }
 
-    def printLabelParams(ps: List[Ident]) {
+    def printLabelParams(ps: List[Ident]) = {
       print("(")
       printSeq(ps){printLabelParam}{print(", ")}
       print(")")
     }
 
-    def printLabelParam(p: Ident) {
+    def printLabelParam(p: Ident) = {
       print(symName(p, p.name)); printOpt(": ", TypeTree() setType p.tpe)
     }
 
-    def printValueParams(ts: List[ValDef]) {
-      print("(")
-      if (!ts.isEmpty) printFlags(ts.head.mods.flags & IMPLICIT, "")
-      printSeq(ts){printParam}{print(", ")}
-      print(")")
+    protected def parenthesize(condition: Boolean = true, open: String = "(", close: String = ")")(body: => Unit) = {
+      if (condition) print(open)
+      body
+      if (condition) print(close)
     }
 
-    def printParam(tree: Tree) {
+    protected val commentsRequired = false
+
+    protected def comment(body: => Unit) =
+      parenthesize(commentsRequired, "/*", "*/")(body)
+
+    protected def printImplicitInParamsList(vds: List[ValDef]) =
+      if (vds.nonEmpty) printFlags(vds.head.mods.flags & IMPLICIT, "")
+
+    def printValueParams(ts: List[ValDef], inParentheses: Boolean = true): Unit =
+      parenthesize(inParentheses){
+        printImplicitInParamsList(ts)
+        printSeq(ts){printParam}{print(", ")}
+      }
+
+    def printParam(tree: Tree) =
       tree match {
-        case ValDef(mods, name, tp, rhs) =>
+        case vd @ ValDef(mods, name, tp, rhs) =>
           printPosition(tree)
-          printAnnotations(tree)
+          printAnnotations(vd)
           print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs)
         case TypeDef(mods, name, tparams, rhs) =>
           printPosition(tree)
           print(symName(tree, name))
           printTypeParams(tparams); print(rhs)
       }
-    }
 
-    def printBlock(tree: Tree) {
+    def printBlock(tree: Tree) =
       tree match {
         case Block(_, _) =>
           print(tree)
         case _ =>
           printColumn(List(tree), "{", ";", "}")
       }
-    }
 
     private def symFn[T](tree: Tree, f: Symbol => T, orElse: => T): T = tree.symbol match {
-      case null | NoSymbol  => orElse
-      case sym              => f(sym)
+      case null | NoSymbol => orElse
+      case sym             => f(sym)
     }
     private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false)
 
-    def printOpt(prefix: String, tree: Tree) {
-      if (!tree.isEmpty) { print(prefix, tree) }
-    }
+    def printOpt(prefix: String, tree: Tree) = if (tree.nonEmpty) { print(prefix, tree) }
 
     def printModifiers(tree: Tree, mods: Modifiers): Unit = printFlags(
-       if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + (
-         if (tree.symbol == NoSymbol) mods.privateWithin
-         else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name
-         else ""
+      if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + (
+        if (tree.symbol == NoSymbol) mods.privateWithin
+        else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name
+        else ""
       )
     )
 
-    def printFlags(flags: Long, privateWithin: String) {
-      var mask: Long = if (settings.debug.value) -1L else PrintableFlags
+    def printFlags(flags: Long, privateWithin: String) = {
+      val mask: Long = if (settings.debug) -1L else PrintableFlags
       val s = flagsToString(flags & mask, privateWithin)
       if (s != "") print(s + " ")
     }
 
-    def printAnnotations(tree: Tree) {
+    def printAnnotations(tree: MemberDef) = {
       // SI-5885: by default this won't print annotations of not yet initialized symbols
       val annots = tree.symbol.annotations match {
-        case Nil  => tree.asInstanceOf[MemberDef].mods.annotations
+        case Nil  => tree.mods.annotations
         case anns => anns
       }
-      annots foreach (annot => print("@"+annot+" "))
+      annots foreach (annot => print(s"@$annot "))
     }
 
     private var currentOwner: Symbol = NoSymbol
     private var selectorType: Type = NoType
 
-    def printTree(tree: Tree) {
+    protected def printPackageDef(tree: PackageDef, separator: String) = {
+      val PackageDef(packaged, stats) = tree
+      printAnnotations(tree)
+      print("package ", packaged); printColumn(stats, " {", separator, "}")
+    }
+
+    protected def printValDef(tree: ValDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = {
+      val ValDef(mods, name, tp, rhs) = tree
+      printAnnotations(tree)
+      printModifiers(tree, mods)
+      print(if (mods.isMutable) "var " else "val ", resultName)
+      printTypeSignature
+      printRhs
+    }
+
+    protected def printDefDef(tree: DefDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = {
+      val DefDef(mods, name, tparams, vparamss, tp, rhs) = tree
+      printAnnotations(tree)
+      printModifiers(tree, mods)
+      print("def " + resultName)
+      printTypeParams(tparams);
+      vparamss foreach {printValueParams(_)}
+      printTypeSignature
+      printRhs
+    }
+
+    protected def printTypeDef(tree: TypeDef, resultName: => String) = {
+      val TypeDef(mods, name, tparams, rhs) = tree
+      if (mods hasFlag (PARAM | DEFERRED)) {
+        printAnnotations(tree)
+        printModifiers(tree, mods)
+        print("type ")
+        printParam(tree)
+      } else {
+        printAnnotations(tree)
+        printModifiers(tree, mods)
+        print("type " + resultName)
+        printTypeParams(tparams)
+        printOpt(" = ", rhs)
+      }
+    }
+
+    protected def printImport(tree: Import, resSelect: => String) = {
+      val Import(expr, selectors) = tree
+      // Is this selector renaming a name (i.e, {name1 => name2})
+      def isNotRename(s: ImportSelector): Boolean =
+        s.name == nme.WILDCARD || s.name == s.rename
+
+      def selectorToString(s: ImportSelector): String = {
+        val from = quotedName(s.name)
+        if (isNotRename(s)) from
+        else from + "=>" + quotedName(s.rename)
+      }
+      print("import ", resSelect, ".")
+      selectors match {
+        case List(s) =>
+          // If there is just one selector and it is not renaming a name, no braces are needed
+          if (isNotRename(s)) print(selectorToString(s))
+          else print("{", selectorToString(s), "}")
+        // If there is more than one selector braces are always needed
+        case many =>
+          print(many.map(selectorToString).mkString("{", ", ", "}"))
+      }
+    }
+
+    protected def printCaseDef(tree: CaseDef) = {
+      val CaseDef(pat, guard, body) = tree
+      print("case ")
+      def patConstr(pat: Tree): Tree = pat match {
+        case Apply(fn, args) => patConstr(fn)
+        case _ => pat
+      }
+
+      print(pat); printOpt(" if ", guard)
+      print(" => ", body)
+    }
+
+    protected def printFunction(tree: Function)(printValueParams: => Unit) = {
+      val Function(vparams, body) = tree
+      print("(");
+      printValueParams
+      print(" => ", body, ")")
+      if (printIds && tree.symbol != null)
+        comment{
+          print("#" + tree.symbol.id)
+        }
+
+      if (printOwners && tree.symbol != null)
+        comment{
+          print("@" + tree.symbol.owner.id)
+        }
+    }
+
+    protected def printSuper(tree: Super, resultName: => String, checkSymbol: Boolean = true) = {
+      val Super(This(qual), mix) = tree
+      if (qual.nonEmpty || (checkSymbol && tree.symbol != NoSymbol)) print(resultName + ".")
+      print("super")
+      if (mix.nonEmpty) print(s"[$mix]")
+    }
+
+    protected def printThis(tree: This, resultName: => String) = {
+      val This(qual) = tree
+      if (qual.nonEmpty) print(resultName + ".")
+      print("this")
+    }
+
+    protected def printBlock(stats: List[Tree], expr: Tree) =
+      printColumn(stats ::: List(expr), "{", ";", "}")
+    
+    def printTree(tree: Tree) = {
       tree match {
         case EmptyTree =>
           print("<empty>")
 
-        case ClassDef(mods, name, tparams, impl) =>
-          printAnnotations(tree)
+        case cd @ ClassDef(mods, name, tparams, impl) =>
+          printAnnotations(cd)
           printModifiers(tree, mods)
           val word =
             if (mods.isTrait) "trait"
@@ -202,85 +331,49 @@ trait Printers extends api.Printers { self: SymbolTable =>
           printTypeParams(tparams)
           print(if (mods.isDeferred) " <: " else " extends ", impl)
 
-        case PackageDef(packaged, stats) =>
-          printAnnotations(tree)
-          print("package ", packaged); printColumn(stats, " {", ";", "}")
+        case pd @ PackageDef(packaged, stats) =>
+          printPackageDef(pd, ";")
 
-        case ModuleDef(mods, name, impl) =>
-          printAnnotations(tree)
-          printModifiers(tree, mods);
+        case md @ ModuleDef(mods, name, impl) =>
+          printAnnotations(md)
+          printModifiers(tree, mods)
           print("object " + symName(tree, name), " extends ", impl)
 
-        case ValDef(mods, name, tp, rhs) =>
-          printAnnotations(tree)
-          printModifiers(tree, mods)
-          print(if (mods.isMutable) "var " else "val ", symName(tree, name))
-          printOpt(": ", tp)
-          if (!mods.isDeferred)
-            print(" = ", if (rhs.isEmpty) "_" else rhs)
+        case vd @ ValDef(mods, name, tp, rhs) =>
+          printValDef(vd, symName(tree, name))(printOpt(": ", tp)) {
+            if (!mods.isDeferred) print(" = ", if (rhs.isEmpty) "_" else rhs)
+          }
 
-        case DefDef(mods, name, tparams, vparamss, tp, rhs) =>
-          printAnnotations(tree)
-          printModifiers(tree, mods)
-          print("def " + symName(tree, name))
-          printTypeParams(tparams); vparamss foreach printValueParams
-          printOpt(": ", tp); printOpt(" = ", rhs)
+        case dd @ DefDef(mods, name, tparams, vparamss, tp, rhs) =>
+          printDefDef(dd, symName(tree, name))(printOpt(": ", tp))(printOpt(" = ", rhs))
 
-        case TypeDef(mods, name, tparams, rhs) =>
-          if (mods hasFlag (PARAM | DEFERRED)) {
-            printAnnotations(tree)
-            printModifiers(tree, mods); print("type "); printParam(tree)
-          } else {
-            printAnnotations(tree)
-            printModifiers(tree, mods); print("type " + symName(tree, name))
-            printTypeParams(tparams); printOpt(" = ", rhs)
-          }
+        case td @ TypeDef(mods, name, tparams, rhs) =>
+          printTypeDef(td, symName(tree, name))
 
         case LabelDef(name, params, rhs) =>
           print(symName(tree, name)); printLabelParams(params); printBlock(rhs)
 
-        case Import(expr, selectors) =>
-          // Is this selector remapping a name (i.e, {name1 => name2})
-          def isNotRemap(s: ImportSelector) : Boolean = (s.name == nme.WILDCARD || s.name == s.rename)
-          def selectorToString(s: ImportSelector): String = {
-            val from = quotedName(s.name)
-            if (isNotRemap(s)) from
-            else from + "=>" + quotedName(s.rename)
-          }
-          print("import ", backquotedPath(expr), ".")
-          selectors match {
-            case List(s) =>
-              // If there is just one selector and it is not remapping a name, no braces are needed
-              if (isNotRemap(s)) print(selectorToString(s))
-              else print("{", selectorToString(s), "}")
-              // If there is more than one selector braces are always needed
-            case many =>
-              print(many.map(selectorToString).mkString("{", ", ", "}"))
-          }
+        case imp @ Import(expr, _) =>
+          printImport(imp, backquotedPath(expr))
 
-       case Template(parents, self, body) =>
+        case Template(parents, self, body) =>
           val currentOwner1 = currentOwner
           if (tree.symbol != NoSymbol) currentOwner = tree.symbol.owner
-//          if (parents exists isReferenceToAnyVal) {
-//            print("AnyVal")
-//          }
-//          else {
           printRow(parents, " with ")
-          if (!body.isEmpty) {
+          if (body.nonEmpty) {
             if (self.name != nme.WILDCARD) {
               print(" { ", self.name); printOpt(": ", self.tpt); print(" => ")
-            } else if (!self.tpt.isEmpty) {
+            } else if (self.tpt.nonEmpty) {
               print(" { _ : ", self.tpt, " => ")
             } else {
               print(" {")
             }
             printColumn(body, "", ";", "}")
           }
-//          }
           currentOwner = currentOwner1
 
         case Block(stats, expr) =>
-          printColumn(stats ::: List(expr), "{", ";", "}")
+          printBlock(stats, expr)
 
         case Match(selector, cases) =>
           val selectorType1 = selectorType
@@ -288,18 +381,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print(selector); printColumn(cases, " match {", "", "}")
           selectorType = selectorType1
 
-        case CaseDef(pat, guard, body) =>
-          print("case ")
-          def patConstr(pat: Tree): Tree = pat match {
-            case Apply(fn, args) => patConstr(fn)
-            case _ => pat
-          }
-          if (showOuterTests &&
-              needsOuterTest(
-                patConstr(pat).tpe.finalResultType, selectorType, currentOwner))
-            print("???")
-          print(pat); printOpt(" if ", guard)
-          print(" => ", body)
+        case cd @ CaseDef(pat, guard, body) =>
+          printCaseDef(cd)
 
         case Alternative(trees) =>
           printRow(trees, "(", "| ", ")")
@@ -316,9 +399,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
         case ArrayValue(elemtpt, trees) =>
           print("Array[", elemtpt); printRow(trees, "]{", ", ", "}")
 
-        case Function(vparams, body) =>
-          print("("); printValueParams(vparams); print(" => ", body, ")")
-          if (printIds && tree.symbol != null) print("#"+tree.symbol.id)
+        case f @ Function(vparams, body) =>
+          printFunction(f)(printValueParams(vparams))
 
         case Assign(lhs, rhs) =>
           print(lhs, " = ", rhs)
@@ -327,10 +409,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print(lhs, " = ", rhs)
 
         case If(cond, thenp, elsep) =>
-          print("if (", cond, ")"); indent; println()
-          print(thenp); undent
-          if (!elsep.isEmpty) {
-            println(); print("else"); indent; println(); print(elsep); undent
+          print("if (", cond, ")"); indent(); println()
+          print(thenp); undent()
+          if (elsep.nonEmpty) {
+            println(); print("else"); indent(); println(); print(elsep); undent()
           }
 
         case Return(expr) =>
@@ -338,7 +420,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
 
         case Try(block, catches, finalizer) =>
           print("try "); printBlock(block)
-          if (!catches.isEmpty) printColumn(catches, " catch {", "", "}")
+          if (catches.nonEmpty) printColumn(catches, " catch {", "", "}")
           printOpt(" finally ", finalizer)
 
         case Throw(expr) =>
@@ -360,22 +442,18 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print("<apply-dynamic>(", qual, "#", tree.symbol.nameString)
           printRow(vargs, ", (", ", ", "))")
 
-        case Super(This(qual), mix) =>
-          if (!qual.isEmpty || tree.symbol != NoSymbol) print(symName(tree, qual) + ".")
-          print("super")
-          if (!mix.isEmpty)
-            print("[" + mix + "]")
+        case st @ Super(This(qual), mix) =>
+          printSuper(st, symName(tree, qual))
 
         case Super(qual, mix) =>
           print(qual, ".super")
-          if (!mix.isEmpty)
+          if (mix.nonEmpty)
             print("[" + mix + "]")
 
-        case This(qual) =>
-          if (!qual.isEmpty) print(symName(tree, qual) + ".")
-          print("this")
+        case th @ This(qual) =>
+          printThis(th, symName(tree, qual))
 
-        case Select(qual @ New(tpe), name) if (!settings.debug.value) =>
+        case Select(qual: New, name) if !settings.debug =>
           print(qual)
 
         case Select(qualifier, name) =>
@@ -389,7 +467,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print(x.escapedStringValue)
 
         case tt: TypeTree =>
-          if ((tree.tpe eq null) || (doPrintPositions && tt.original != null)) {
+          if ((tree.tpe eq null) || (printPositions && tt.original != null)) {
             if (tt.original != null) print("<type: ", tt.original, ">")
             else print("<type ?>")
           } else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) {
@@ -398,10 +476,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
             print(tree.tpe.toString)
           }
 
-        case Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) =>
+        case an @ Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) =>
           def printAnnot() {
             print("@", tpt)
-            if (!args.isEmpty)
+            if (args.nonEmpty)
               printRow(args, "(", ",", ")")
           }
           print(tree, if (tree.isType) " " else ": ")
@@ -420,24 +498,29 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print(tp); printRow(args, "[", ", ", "]")
 
         case TypeBoundsTree(lo, hi) =>
-          printOpt(" >: ", lo); printOpt(" <: ", hi)
+          // Avoid printing noisy empty typebounds everywhere
+          // Untyped empty bounds are not printed by printOpt,
+          // but after they are typed we have to exclude Nothing/Any.
+          if ((lo.tpe eq null) || !(lo.tpe =:= definitions.NothingTpe))
+            printOpt(" >: ", lo)
+
+          if ((hi.tpe eq null) || !(hi.tpe =:= definitions.AnyTpe))
+            printOpt(" <: ", hi)
 
         case ExistentialTypeTree(tpt, whereClauses) =>
-          print(tpt);
+          print(tpt)
           printColumn(whereClauses, " forSome { ", ";", "}")
 
-// SelectFromArray is no longer visible in reflect.internal.
-// eliminated until we figure out what we will do with both Printers and
-// SelectFromArray.
-//          case SelectFromArray(qualifier, name, _) =>
-//          print(qualifier); print(".<arr>"); print(symName(tree, name))
+        // SelectFromArray is no longer visible in scala.reflect.internal.
+        // eliminated until we figure out what we will do with both Printers and
+        // SelectFromArray.
+        // case SelectFromArray(qualifier, name, _) =>
+        //   print(qualifier); print(".<arr>"); print(symName(tree, name))
 
         case tree =>
           xprintTree(this, tree)
       }
-      if (printTypes && tree.isTerm && !tree.isEmpty) {
-        print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
-      }
+      printTypesInfo(tree)
     }
 
     def print(args: Any*): Unit = args foreach {
@@ -451,10 +534,570 @@ trait Printers extends api.Printers { self: SymbolTable =>
     }
   }
 
+  // it's the printer for AST-based code generation
+  class CodePrinter(out: PrintWriter, printRootPkg: Boolean) extends TreePrinter(out) {
+    protected val parentsStack = scala.collection.mutable.Stack[Tree]()
+
+    protected def currentTree = if (parentsStack.nonEmpty) Some(parentsStack.top) else None
+
+    protected def currentParent = if (parentsStack.length > 1) Some(parentsStack(1)) else None
+
+    protected def printedName(name: Name, decoded: Boolean = true) = {
+      import Chars._
+      val decName = name.decoded
+      val bslash = '\\'
+      val brackets = List('[',']','(',')','{','}')
+
+      def addBackquotes(s: String) =
+        if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch)) ||
+          (name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash))))
+          s"`$s`" else s
+
+      if (name == nme.CONSTRUCTOR) "this"
+      else addBackquotes(quotedName(name, decoded))
+    }
+
+    protected def isIntLitWithDecodedOp(qual: Tree, name: Name) = {
+      val qualIsIntLit = qual match {
+        case Literal(Constant(x: Int)) => true
+        case _ => false
+      }
+      qualIsIntLit && name.isOperatorName
+    }
+
+    override protected val commentsRequired = true
+
+    protected def needsParentheses(parent: Tree)(insideIf: Boolean = true, insideMatch: Boolean = true, insideTry: Boolean = true,
+        insideAnnotated: Boolean = true, insideBlock: Boolean = true, insideLabelDef: Boolean = true, insideAssign: Boolean = true) = {
+      parent match {
+        case _: If => insideIf
+        case _: Match => insideMatch
+        case _: Try => insideTry
+        case _: Annotated => insideAnnotated
+        case _: Block => insideBlock
+        case _: LabelDef => insideLabelDef
+        case _: Assign => insideAssign
+        case _ => false
+      }
+    }
+
+    protected def checkForBlank(cond: Boolean) = if (cond) " " else ""
+    protected def blankForOperatorName(name: Name) = checkForBlank(name.isOperatorName)
+    protected def blankForName(name: Name) = checkForBlank(name.isOperatorName || name.endsWith("_"))
+
+    protected def resolveSelect(t: Tree): String = {
+      t match {
+        // case for: 1) (if (a) b else c).meth1.meth2 or 2) 1 + 5 should be represented as (1).+(5)
+        case Select(qual, name) if (name.isTermName && needsParentheses(qual)(insideLabelDef = false)) || isIntLitWithDecodedOp(qual, name) => s"(${resolveSelect(qual)}).${printedName(name)}"
+        case Select(qual, name) if name.isTermName => s"${resolveSelect(qual)}.${printedName(name)}"
+        case Select(qual, name) if name.isTypeName => s"${resolveSelect(qual)}#${blankForOperatorName(name)}%${printedName(name)}"
+        case Ident(name) => printedName(name)
+        case _ => render(t, new CodePrinter(_, printRootPkg))
+      }
+    }
+
+    protected def emptyTree(tree: Tree) = tree match {
+      case EmptyTree | build.SyntacticEmptyTypeTree() => true
+      case _ => false
+    }
+
+    protected def originalTypeTrees(trees: List[Tree]) =
+      trees.filter(!emptyTree(_)) map {
+        case tt: TypeTree => tt.original
+  	    case tree => tree
+      }
+
+    val defaultClasses = List(tpnme.AnyRef)
+    val defaultTraitsForCase = List(tpnme.Product, tpnme.Serializable)
+    protected def removeDefaultTypesFromList(trees: List[Tree])(classesToRemove: List[Name] = defaultClasses)(traitsToRemove: List[Name]) = {
+      def removeDefaultTraitsFromList(trees: List[Tree], traitsToRemove: List[Name]): List[Tree] =
+        trees match {
+          case Nil => trees
+          case init :+ last => last match {
+            case Select(Ident(sc), name) if traitsToRemove.contains(name) && sc == nme.scala_ =>
+              removeDefaultTraitsFromList(init, traitsToRemove)
+            case _ => trees
+          }
+        }
+
+      removeDefaultTraitsFromList(removeDefaultClassesFromList(trees, classesToRemove), traitsToRemove)
+    }
+
+    protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) = 
+      originalTypeTrees(trees) filter {
+        case Select(Ident(sc), name) => !(classesToRemove.contains(name) && sc == nme.scala_)
+        case _ => true
+      }
+
+    protected def syntheticToRemove(tree: Tree) = 
+      tree match {
+        case _: ValDef | _: TypeDef => false // don't remove ValDef and TypeDef
+        case md: MemberDef if md.mods.isSynthetic => true
+        case _ => false
+      }
+
+    override def printOpt(prefix: String, tree: Tree) = 
+      if (!emptyTree(tree)) super.printOpt(prefix, tree)
+
+    override def printColumn(ts: List[Tree], start: String, sep: String, end: String) = {
+      super.printColumn(ts.filter(!syntheticToRemove(_)), start, sep, end)
+    }
+
+    def printFlags(mods: Modifiers, primaryCtorParam: Boolean = false): Unit = {
+      val base = AccessFlags | OVERRIDE | ABSTRACT | FINAL | SEALED | LAZY
+      val mask = if (primaryCtorParam) base else base | IMPLICIT
+
+      val s = mods.flagString(mask)
+      if (s != "") print(s"$s ")
+      // case flag should be the last
+      if (mods.isCase) print(mods.flagBitsToString(CASE) + " ")
+      if (mods.isAbstractOverride) print("abstract override ")
+    }
+
+    override def printModifiers(tree: Tree, mods: Modifiers): Unit = printModifiers(mods, primaryCtorParam = false)
+
+    def printModifiers(mods: Modifiers, primaryCtorParam: Boolean): Unit = {
+      def modsAccepted = List(currentTree, currentParent) exists (_ map {
+        case _: ClassDef | _: ModuleDef | _: Template | _: PackageDef => true
+        case _ => false
+      } getOrElse false)
+
+      if (currentParent.isEmpty || modsAccepted)
+        printFlags(mods, primaryCtorParam)
+      else
+        List(IMPLICIT, CASE, LAZY, SEALED).foreach{flag => if (mods.hasFlag(flag)) print(s"${mods.flagBitsToString(flag)} ")}
+    }
+
+    def printParam(tree: Tree, primaryCtorParam: Boolean): Unit =
+      tree match {
+        case vd @ ValDef(mods, name, tp, rhs) =>
+          printPosition(tree)
+          printAnnotations(vd)
+          val mutableOrOverride = mods.isOverride || mods.isMutable
+          val hideCtorMods = mods.isParamAccessor && mods.isPrivateLocal && !mutableOrOverride
+          val hideCaseCtorMods = mods.isCaseAccessor && mods.isPublic && !mutableOrOverride
+
+          if (primaryCtorParam && !(hideCtorMods || hideCaseCtorMods)) {
+            printModifiers(mods, primaryCtorParam)
+            print(if (mods.isMutable) "var " else "val ");
+          }
+          print(printedName(name), blankForName(name));
+          printOpt(": ", tp);
+          printOpt(" = ", rhs)
+        case TypeDef(_, name, tparams, rhs) =>
+          printPosition(tree)
+          print(printedName(name))
+          printTypeParams(tparams);
+          print(rhs)
+        case _ =>
+          super.printParam(tree)
+      }
+
+    override def printParam(tree: Tree): Unit = {
+      printParam(tree, primaryCtorParam = false)
+    }
+
+    protected def printArgss(argss: List[List[Tree]]) =
+      argss foreach {x: List[Tree] => if (!(x.isEmpty && argss.size == 1)) printRow(x, "(", ", ", ")")}
+
+    override def printAnnotations(tree: MemberDef) = {
+      val annots = tree.mods.annotations
+      annots foreach {annot => printAnnot(annot); print(" ")}
+    }
+
+    protected def printAnnot(tree: Tree) = {
+      tree match {
+        case treeInfo.Applied(core, _, argss) =>
+          print("@")
+          core match {
+            case Select(New(tree), _) => print(tree)
+            case _ =>
+          }
+          printArgss(argss)
+        case _ => super.printTree(tree)
+      }
+    }
+
+    override def printTree(tree: Tree): Unit = {
+      parentsStack.push(tree)
+      try {
+        processTreePrinting(tree);
+        printTypesInfo(tree)
+      } finally parentsStack.pop()
+    }
+
+    def processTreePrinting(tree: Tree): Unit = {
+      tree match {
+        // don't remove synthetic ValDef/TypeDef
+        case _ if syntheticToRemove(tree) =>
+
+        case cl @ ClassDef(mods, name, tparams, impl) =>
+          if (mods.isJavaDefined) super.printTree(cl)
+          printAnnotations(cl)
+          // traits
+          val clParents: List[Tree] = if (mods.isTrait) {
+            // avoid abstract modifier for traits
+            printModifiers(tree, mods &~ ABSTRACT)
+            print("trait ", printedName(name))
+            printTypeParams(tparams)
+
+            val build.SyntacticTraitDef(_, _, _, _, parents, _, _) = tree
+            parents
+          // classes
+          } else {
+            printModifiers(tree, mods)
+            print("class ", printedName(name))
+            printTypeParams(tparams)
+
+            val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl
+
+            // constructor's modifier
+            if (ctorMods.hasFlag(AccessFlags)) {
+              print(" ")
+              printModifiers(ctorMods, primaryCtorParam = false)
+            }
+
+            def printConstrParams(ts: List[ValDef]): Unit = {
+              parenthesize() {
+                printImplicitInParamsList(ts)
+                printSeq(ts)(printParam(_, primaryCtorParam = true))(print(", "))
+              }
+            }
+            // constructor's params processing (don't print single empty constructor param list)
+            vparamss match {
+              case Nil | List(Nil) if (!mods.isCase && !ctorMods.hasFlag(AccessFlags)) =>
+              case _ => vparamss foreach printConstrParams
+            }
+            parents
+          }
+
+          // get trees without default classes and traits (when they are last)
+          val printedParents = removeDefaultTypesFromList(clParents)()(if (mods.hasFlag(CASE)) defaultTraitsForCase else Nil)
+          print(if (mods.isDeferred) "<: " else if (printedParents.nonEmpty) " extends " else "", impl)
+
+        case pd @ PackageDef(packaged, stats) =>
+          packaged match {
+            case Ident(name) if name == nme.EMPTY_PACKAGE_NAME =>
+              printSeq(stats) {
+                print(_)
+              } {
+                println()
+                println()
+              };
+            case _ =>
+              printPackageDef(pd, scala.util.Properties.lineSeparator)
+          }
+
+        case md @ ModuleDef(mods, name, impl) =>
+          printAnnotations(md)
+          printModifiers(tree, mods)
+          val Template(parents, self, methods) = impl
+          val parWithoutAnyRef = removeDefaultClassesFromList(parents)
+          print("object " + printedName(name), if (parWithoutAnyRef.nonEmpty) " extends " else "", impl)
+
+        case vd @ ValDef(mods, name, tp, rhs) =>
+          printValDef(vd, printedName(name)) {
+            // place space after symbolic def name (val *: Unit does not compile)
+            printOpt(s"${blankForName(name)}: ", tp)
+          } {
+            if (!mods.isDeferred) print(" = ", if (rhs.isEmpty) "_" else rhs)
+          }
+
+        case dd @ DefDef(mods, name, tparams, vparamss, tp, rhs) =>
+          printDefDef(dd, printedName(name)) {
+            if (tparams.isEmpty && (vparamss.isEmpty || vparamss(0).isEmpty)) print(blankForName(name))
+            printOpt(": ", tp)
+          } {
+            printOpt(" = " + (if (mods.isMacro) "macro " else ""), rhs)
+          }
+
+        case td @ TypeDef(mods, name, tparams, rhs) =>
+          printTypeDef(td, printedName(name))
+
+        case LabelDef(name, params, rhs) =>
+          if (name.startsWith(nme.WHILE_PREFIX)) {
+            val If(cond, thenp, elsep) = rhs
+            print("while (", cond, ") ")
+            val Block(list, wh) = thenp
+            printColumn(list, "", ";", "")
+          } else if (name.startsWith(nme.DO_WHILE_PREFIX)) {
+            val Block(bodyList, ifCond @ If(cond, thenp, elsep)) = rhs
+            print("do ")
+            printColumn(bodyList, "", ";", "")
+            print(" while (", cond, ") ")
+          } else {
+            print(printedName(name)); printLabelParams(params);
+            printBlock(rhs)
+          }
+
+        case imp @ Import(expr, _) =>
+          printImport(imp, resolveSelect(expr))
+
+        case t @ Template(parents, self, tbody) =>
+          val body = treeInfo.untypecheckedTemplBody(t)
+          val printedParents =
+            currentParent map {
+              case _: CompoundTypeTree => parents
+              case ClassDef(mods, name, _, _) if mods.isCase => removeDefaultTypesFromList(parents)()(List(tpnme.Product, tpnme.Serializable))
+              case _ => removeDefaultClassesFromList(parents)
+            } getOrElse (parents)
+
+          val primaryCtr = treeInfo.firstConstructor(body)
+          val ap: Option[Apply] = primaryCtr match {
+              case DefDef(_, _, _, _, _, Block(ctBody, _)) =>
+                val earlyDefs = treeInfo.preSuperFields(ctBody) ::: body.filter {
+                  case td: TypeDef => treeInfo.isEarlyDef(td)
+                  case _ => false
+                }
+                if (earlyDefs.nonEmpty) {
+                  print("{")
+                  printColumn(earlyDefs, "", ";", "")
+                  print("} " + (if (printedParents.nonEmpty) "with " else ""))
+                }
+                ctBody collectFirst {
+                  case apply: Apply => apply
+                }
+              case _ => None
+            }
+
+          if (printedParents.nonEmpty) {
+            val (clParent :: traits) = printedParents
+            print(clParent)
+
+            val constrArgss = ap match {
+              case Some(treeInfo.Applied(_, _, argss)) => argss
+              case _ => Nil
+            }
+            printArgss(constrArgss)
+            if (traits.nonEmpty) {
+              printRow(traits, " with ", " with ", "")
+            }
+          }
+          /* Remove primary constr def and constr val and var defs
+           * right contains all constructors
+           */
+          val (left, right) = body.filter {
+            // remove valdefs defined in constructor and presuper vals
+            case vd: ValDef => !vd.mods.isParamAccessor && !treeInfo.isEarlyValDef(vd)
+            // remove $this$ from traits
+            case dd: DefDef => dd.name != nme.MIXIN_CONSTRUCTOR
+            case td: TypeDef => !treeInfo.isEarlyDef(td)
+            case EmptyTree => false
+            case _ => true
+          } span {
+            case dd: DefDef => dd.name != nme.CONSTRUCTOR
+            case _ => true
+          }
+          val modBody = (left ::: right.drop(1))
+          val showBody = !(modBody.isEmpty && (self == noSelfType || self.isEmpty))
+          if (showBody) {
+            if (self.name != nme.WILDCARD) {
+              print(" { ", self.name);
+              printOpt(": ", self.tpt);
+              print(" =>")
+            } else if (self.tpt.nonEmpty) {
+              print(" { _ : ", self.tpt, " =>")
+            } else {
+              print(" {")
+            }
+            printColumn(modBody, "", ";", "}")
+          }
+
+        case bl @ Block(stats, expr) =>
+          printBlock(treeInfo.untypecheckedBlockBody(bl), expr)
+
+        case Match(selector, cases) =>
+          /* Insert braces if match is inner
+           * make this function available for other cases
+           * passing required type for checking
+           */
+          def insertBraces(body: => Unit): Unit =
+            if (parentsStack.nonEmpty && parentsStack.tail.exists(_.isInstanceOf[Match])) {
+              print("(")
+              body
+              print(")")
+            } else body
+
+          val printParentheses = needsParentheses(selector)(insideLabelDef = false)
+          tree match {
+            case Match(EmptyTree, cs) =>
+              printColumn(cases, "{", "", "}")
+            case _ =>
+              insertBraces {
+                parenthesize(printParentheses)(print(selector))
+                printColumn(cases, " match {", "", "}")
+              }
+          }
+
+        case cd @ CaseDef(pat, guard, body) =>
+          printCaseDef(cd)
+
+        case Star(elem) =>
+          print(elem, "*")
+
+        case Bind(name, t) =>
+          if (t == EmptyTree) print("(", printedName(name), ")")
+          else if (t.exists(_.isInstanceOf[Star])) print(printedName(name), " @ ", t)
+          else print("(", printedName(name), " @ ", t, ")")
+
+        case f @ Function(vparams, body) =>
+          // parentheses are not allowed for val a: Int => Int = implicit x => x
+          val printParentheses = vparams match {
+              case head :: _ => !head.mods.isImplicit
+              case _ => true
+            }
+          printFunction(f)(printValueParams(vparams, inParentheses = printParentheses))
+
+        case Typed(expr, tp) =>
+          def printTp = print("(", tp, ")")
+
+          tp match {
+            case EmptyTree | build.SyntacticEmptyTypeTree() => printTp
+            // case for untypechecked trees
+            case Annotated(annot, arg) if (expr ne null) && (arg ne null) && expr.equalsStructure(arg) => printTp // remove double arg - 5: 5: @unchecked
+            case tt: TypeTree if tt.original.isInstanceOf[Annotated] => printTp
+            case Function(List(), EmptyTree) => print("(", expr, " _)") //func _
+            // parentheses required when (a match {}) : Type
+            case _ => print("((", expr, "): ", tp, ")")
+          }
+
+        // print only fun when targs are TypeTrees with empty original
+        case TypeApply(fun, targs) =>
+          if (targs.exists(emptyTree(_))) {
+            print(fun)
+          } else super.printTree(tree)
+
+        case Apply(fun, vargs) =>
+          tree match {
+            // processing methods ending on colons (x \: list)
+            case Apply(Block(l1 @ List(sVD: ValDef), a1 @ Apply(Select(_, methodName), l2 @ List(Ident(iVDName)))), l3)
+              if sVD.mods.isSynthetic && treeInfo.isLeftAssoc(methodName) && sVD.name == iVDName =>
+              val printBlock = Block(l1, Apply(a1, l3))
+              print(printBlock)
+            case Apply(tree1, _) if (needsParentheses(tree1)(insideAnnotated = false)) =>
+              parenthesize()(print(fun)); printRow(vargs, "(", ", ", ")")
+            case _ => super.printTree(tree)
+          }
+
+        case UnApply(fun, args) =>
+          fun match {
+            case treeInfo.Unapplied(body) =>
+              body match {
+                case Select(qual, name) if name == nme.unapply  => print(qual)
+                case TypeApply(Select(qual, name), args) if name == nme.unapply || name == nme.unapplySeq =>
+                  print(TypeApply(qual, args))
+                case _ => print(body)
+              }
+            case _ => print(fun)
+          }
+          printRow(args, "(", ", ", ")")  
+
+        case st @ Super(This(qual), mix) =>
+          printSuper(st, printedName(qual), checkSymbol = false)
+
+        case th @ This(qual) =>
+          if (tree.hasExistingSymbol && tree.symbol.isPackage) print(tree.symbol.fullName)
+          else printThis(th, printedName(qual))
+
+        // remove this prefix from constructor invocation in typechecked trees: this.this -> this
+        case Select(This(_), name @ nme.CONSTRUCTOR) => print(printedName(name))
+
+        case Select(qual: New, name) =>
+          print(qual)
+
+        case Select(qual, name) =>
+          def checkRootPackage(tr: Tree): Boolean = 
+            (currentParent match { //check that Select is not for package def name
+              case Some(_: PackageDef) => false
+              case _ => true
+            }) && (tr match { // check that Select contains package
+              case Select(q, _) => checkRootPackage(q)
+              case _: Ident | _: This => val sym = tr.symbol
+                tr.hasExistingSymbol && sym.isPackage && sym.name != nme.ROOTPKG
+              case _ => false
+            })
+
+          if (printRootPkg && checkRootPackage(tree)) print(s"${printedName(nme.ROOTPKG)}.")
+          val printParentheses = needsParentheses(qual)(insideAnnotated = false) || isIntLitWithDecodedOp(qual, name)
+          if (printParentheses) print("(", resolveSelect(qual), ").", printedName(name))
+          else print(resolveSelect(qual), ".", printedName(name))
+
+        case id @ Ident(name) =>
+          if (name.nonEmpty) {
+            if (name == nme.dollarScope) {
+              print(s"scala.xml.${nme.TopScope}")
+            } else {
+              val str = printedName(name)
+              val strIsBackquoted = str.startsWith("`") && str.endsWith("`")
+              print(if (id.isBackquoted && !strIsBackquoted) "`" + str + "`" else str)
+            }
+          } else {
+            print("")
+          }
+
+        case l @ Literal(x) =>
+          import Chars.LF
+          x match {
+            case Constant(v: String) if {
+              val strValue = x.stringValue
+              strValue.contains(LF) && strValue.contains("\"\"\"") && strValue.size > 1
+            } =>
+              val splitValue = x.stringValue.split(s"$LF").toList
+              val multilineStringValue = if (x.stringValue.endsWith(s"$LF")) splitValue :+ "" else splitValue
+              val trQuotes = "\"\"\""
+              print(trQuotes); printSeq(multilineStringValue) { print(_) } { print(LF) }; print(trQuotes)
+            case _ =>
+              // processing Float constants
+              val printValue = x.escapedStringValue + (if (x.value.isInstanceOf[Float]) "F" else "")
+              print(printValue)
+          }
+
+        case an @ Annotated(ap, tree) =>
+          val printParentheses = needsParentheses(tree)()
+          parenthesize(printParentheses) { print(tree) }; print(if (tree.isType) " " else ": ")
+          printAnnot(ap)
+
+        case SelectFromTypeTree(qualifier, selector) =>
+          print("(", qualifier, ")#", blankForOperatorName(selector), printedName(selector))
+
+        case tt: TypeTree =>
+          if (!emptyTree(tt)) print(tt.original)   
+
+        case AppliedTypeTree(tp, args) =>
+          // it's possible to have (=> String) => String type but Function1[=> String, String] is not correct
+          val containsByNameTypeParam = args exists treeInfo.isByNameParamType
+
+          if (containsByNameTypeParam) {
+            print("(")
+            printRow(args.init, "(", ", ", ")")
+            print(" => ", args.last, ")")
+          } else {
+            if (treeInfo.isRepeatedParamType(tree) && args.nonEmpty) {
+              print(args(0), "*")
+            } else if (treeInfo.isByNameParamType(tree)) {
+              print("=> ", if (args.isEmpty) "()" else args(0))
+            } else
+              super.printTree(tree)
+          }
+
+        case ExistentialTypeTree(tpt, whereClauses) =>
+          print("(", tpt);
+          printColumn(whereClauses, " forSome { ", ";", "})")
+
+        case EmptyTree =>
+
+        case tree => super.printTree(tree)
+      }
+    }
+  }
+
   /** Hook for extensions */
   def xprintTree(treePrinter: TreePrinter, tree: Tree) =
     treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")"))
 
+  def newCodePrinter(writer: PrintWriter, tree: Tree, printRootPkg: Boolean): TreePrinter =
+    new CodePrinter(writer, printRootPkg)
+
   def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
   def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
   def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
@@ -475,8 +1118,6 @@ trait Printers extends api.Printers { self: SymbolTable =>
   }
 
   def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
-  def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
-  def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
 
   // provides footnotes for types and mirrors
   import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
@@ -525,7 +1166,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
     private var depth = 0
     private var printTypesInFootnotes = true
     private var printingFootnotes = false
-    private var footnotes = footnoteIndex.mkFootnotes()
+    private val footnotes = footnoteIndex.mkFootnotes()
 
     def print(args: Any*): Unit = {
       // don't print type footnotes if the argument is a mere type
@@ -542,14 +1183,17 @@ trait Printers extends api.Printers { self: SymbolTable =>
           print(")")
         case EmptyTree =>
           print("EmptyTree")
-        case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
-          print("emptyValDef")
+        case self.noSelfType =>
+          print("noSelfType")
+        case self.pendingSuperCall =>
+          print("pendingSuperCall")
         case tree: Tree =>
-          val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
-          val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+          def hasSymbolField = tree.hasSymbolField && tree.symbol != NoSymbol
+          val isError = hasSymbolField && (tree.symbol.name string_== nme.ERROR)
           printProduct(
             tree,
             preamble = _ => {
+              if (printPositions) print(tree.pos.show)
               print(tree.productPrefix)
               if (printTypes && tree.tpe != null) print(tree.tpe)
             },
@@ -559,7 +1203,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
                   if (isError) print("<")
                   print(name)
                   if (isError) print(": error>")
-                } else if (hasSymbol) {
+                } else if (hasSymbolField) {
                   tree match {
                     case refTree: RefTree =>
                       if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
@@ -590,6 +1234,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
           else if (sym.isStatic && (sym.isClass || sym.isModule)) print(sym.fullName)
           else print(sym.name)
           if (printIds) print("#", sym.id)
+          if (printOwners) print("@", sym.owner.id)
           if (printKinds) print("#", sym.abbreviatedKindString)
           if (printMirrors) print("%M", footnotes.put[scala.reflect.api.Mirror[_]](mirrorThatLoaded(sym)))
         case tag: TypeTag[_] =>
@@ -653,7 +1298,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
       print("(")
       val it = iterable.iterator
       while (it.hasNext) {
-        body(it.next)
+        body(it.next())
         print(if (it.hasNext) ", " else "")
       }
       print(")")
@@ -662,19 +1307,19 @@ trait Printers extends api.Printers { self: SymbolTable =>
   }
 
   def show(name: Name): String = name match {
-    case tpnme.WILDCARD => "tpnme.WILDCARD"
-    case tpnme.EMPTY => "tpnme.EMPTY"
-    case tpnme.ERROR => "tpnme.ERROR"
-    case tpnme.PACKAGE => "tpnme.PACKAGE"
-    case tpnme.WILDCARD_STAR => "tpnme.WILDCARD_STAR"
-    case nme.WILDCARD => "nme.WILDCARD"
-    case nme.EMPTY => "nme.EMPTY"
-    case nme.ERROR => "tpnme.ERROR"
-    case nme.PACKAGE => "nme.PACKAGE"
-    case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
-    case nme.ROOTPKG => "nme.ROOTPKG"
+    case tpnme.WILDCARD => "typeNames.WILDCARD"
+    case tpnme.EMPTY => "typeNames.EMPTY"
+    case tpnme.ERROR => "typeNames.ERROR"
+    case tpnme.PACKAGE => "typeNames.PACKAGE"
+    case tpnme.WILDCARD_STAR => "typeNames.WILDCARD_STAR"
+    case nme.WILDCARD => "termNames.WILDCARD"
+    case nme.EMPTY => "termNames.EMPTY"
+    case nme.ERROR => "termNames.ERROR"
+    case nme.PACKAGE => "termNames.PACKAGE"
+    case nme.CONSTRUCTOR => "termNames.CONSTRUCTOR"
+    case nme.ROOTPKG => "termNames.ROOTPKG"
     case _ =>
-      val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+      val prefix = if (name.isTermName) "TermName(\"" else "TypeName(\""
       prefix + name.toString + "\")"
   }
 
@@ -688,4 +1333,13 @@ trait Printers extends api.Printers { self: SymbolTable =>
       s_flags mkString " | "
     }
   }
+
+  def show(position: Position): String = {
+    position.show
+  }
+
+  def showDecl(sym: Symbol): String = {
+    if (!isCompilerUniverse) definitions.fullyInitializeSymbol(sym)
+    sym.defString
+  }
 }
diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala
new file mode 100644
index 0000000..996f9c1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala
@@ -0,0 +1,27 @@
+package scala
+package reflect
+package internal
+
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember }
+
+trait PrivateWithin {
+  self: SymbolTable =>
+
+  def propagatePackageBoundary(c: jClass[_], syms: Symbol*): Unit =
+    propagatePackageBoundary(JavaAccFlags(c), syms: _*)
+  def propagatePackageBoundary(m: jMember, syms: Symbol*): Unit =
+    propagatePackageBoundary(JavaAccFlags(m), syms: _*)
+  def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*) {
+    if (jflags.hasPackageAccessBoundary)
+      syms foreach setPackageAccessBoundary
+  }
+
+  // protected in java means package protected. #3946
+  // See ticket #1687 for an example of when the enclosing top level class is NoSymbol;
+  // it apparently occurs when processing v45.3 bytecode.
+  def setPackageAccessBoundary(sym: Symbol): Symbol = (
+    if (sym.enclosingTopLevelClass eq NoSymbol) sym
+    else sym setPrivateWithin sym.enclosingTopLevelClass.owner
+  )
+}
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
new file mode 100644
index 0000000..ad8a259
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -0,0 +1,1146 @@
+package scala
+package reflect
+package internal
+
+import Flags._
+import util._
+
+trait ReificationSupport { self: SymbolTable =>
+  import definitions._
+  import internal._
+
+  class ReificationSupportImpl extends ReificationSupportApi {
+    def selectType(owner: Symbol, name: String): TypeSymbol =
+      select(owner, newTypeName(name)).asType
+
+    def selectTerm(owner: Symbol, name: String): TermSymbol = {
+      val result = select(owner, newTermName(name)).asTerm
+      if (result.isOverloaded) result.suchThat(!_.isMethod).asTerm
+      else result
+    }
+
+    protected def select(owner: Symbol, name: Name): Symbol = {
+      val result = owner.info decl name
+      if (result ne NoSymbol) result
+      else
+        mirrorThatLoaded(owner).missingHook(owner, name) orElse {
+          throw new ScalaReflectionException("%s %s in %s not found".format(if (name.isTermName) "term" else "type", name, owner.fullName))
+        }
+    }
+
+    def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
+      val result = owner.info.decl(newTermName(name)).alternatives(index)
+      if (result ne NoSymbol) result.asMethod
+      else throw new ScalaReflectionException("overloaded method %s #%d in %s not found".format(name, index, owner.fullName))
+    }
+
+    def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+      newFreeTermSymbol(newTermName(name), value, flags, origin).markFlagsCompleted(mask = AllFlags)
+
+    def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+      newFreeTypeSymbol(newTypeName(name), flags, origin).markFlagsCompleted(mask = AllFlags)
+
+    def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
+      owner.newNestedSymbol(name, pos, flags, isClass).markFlagsCompleted(mask = AllFlags)
+
+    def newScopeWith(elems: Symbol*): Scope =
+      self.newScopeWith(elems: _*)
+
+    def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
+      sym.setAnnotations(annots)
+
+    def setInfo[S <: Symbol](sym: S, tpe: Type): S =
+      sym.setInfo(tpe).markAllCompleted()
+
+    def mkThis(sym: Symbol): Tree = self.This(sym)
+
+    def mkSelect(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
+
+    def mkIdent(sym: Symbol): Ident = self.Ident(sym)
+
+    def mkTypeTree(tp: Type): TypeTree = self.TypeTree(tp)
+
+    def ThisType(sym: Symbol): Type = self.ThisType(sym)
+
+    def SingleType(pre: Type, sym: Symbol): Type = self.SingleType(pre, sym)
+
+    def SuperType(thistpe: Type, supertpe: Type): Type = self.SuperType(thistpe, supertpe)
+
+    def ConstantType(value: Constant): ConstantType = self.ConstantType(value)
+
+    def TypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = self.TypeRef(pre, sym, args)
+
+    def RefinedType(parents: List[Type], decls: Scope, typeSymbol: Symbol): RefinedType = self.RefinedType(parents, decls, typeSymbol)
+
+    def ClassInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType = self.ClassInfoType(parents, decls, typeSymbol)
+
+    def MethodType(params: List[Symbol], resultType: Type): MethodType = self.MethodType(params, resultType)
+
+    def NullaryMethodType(resultType: Type): NullaryMethodType = self.NullaryMethodType(resultType)
+
+    def PolyType(typeParams: List[Symbol], resultType: Type): PolyType = self.PolyType(typeParams, resultType)
+
+    def ExistentialType(quantified: List[Symbol], underlying: Type): ExistentialType = self.ExistentialType(quantified, underlying)
+
+    def AnnotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType = self.AnnotatedType(annotations, underlying)
+
+    def TypeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi)
+
+    def BoundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds)
+
+    def thisPrefix(sym: Symbol): Type = sym.thisPrefix
+
+    def setType[T <: Tree](tree: T, tpe: Type): T = { tree.setType(tpe); tree }
+
+    def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
+
+    def toStats(tree: Tree): List[Tree] = tree match {
+      case EmptyTree             => Nil
+      case SyntacticBlock(stats) => stats
+      case _                     => throw new IllegalArgumentException(s"can't flatten $tree")
+    }
+
+    def mkAnnotation(tree: Tree): Tree = tree match {
+      case SyntacticNew(Nil, SyntacticApplied(SyntacticAppliedType(_, _), _) :: Nil, noSelfType, Nil) =>
+        tree
+      case _ =>
+        throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation." +
+                                            """Consider reformatting it into a q"new $name[..$targs](...$argss)" shape""")
+    }
+
+    def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation)
+
+    def mkParam(argss: List[List[Tree]], extraFlags: FlagSet = NoFlags, excludeFlags: FlagSet = DEFERRED): List[List[ValDef]] =
+      argss.map { args => args.map { mkParam(_, extraFlags, excludeFlags) } }
+
+    def mkParam(tree: Tree, extraFlags: FlagSet, excludeFlags: FlagSet): ValDef = tree match {
+      case Typed(Ident(name: TermName), tpt) =>
+        mkParam(ValDef(NoMods, name, tpt, EmptyTree), extraFlags, excludeFlags)
+      case vd: ValDef =>
+        var newmods = vd.mods & (~excludeFlags)
+        if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
+        copyValDef(vd)(mods = newmods | extraFlags)
+      case _ =>
+        throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " +
+                                            """consider reformatting it into q"val $name: $T = $default" shape""")
+    }
+
+    def mkImplicitParam(args: List[Tree]): List[ValDef] = args.map(mkImplicitParam)
+
+    def mkImplicitParam(tree: Tree): ValDef = mkParam(tree, IMPLICIT | PARAM, NoFlags)
+
+    def mkTparams(tparams: List[Tree]): List[TypeDef] =
+      tparams.map {
+        case td: TypeDef => copyTypeDef(td)(mods = (td.mods | PARAM) & (~DEFERRED))
+        case other => throw new IllegalArgumentException(s"can't splice $other as type parameter")
+      }
+
+    def mkRefineStat(stat: Tree): Tree = {
+      stat match {
+        case dd: DefDef => require(dd.rhs.isEmpty, "can't use DefDef with non-empty body as refine stat")
+        case vd: ValDef => require(vd.rhs.isEmpty, "can't use ValDef with non-empty rhs as refine stat")
+        case td: TypeDef =>
+        case _ => throw new IllegalArgumentException(s"not legal refine stat: $stat")
+      }
+      stat
+    }
+
+    def mkRefineStat(stats: List[Tree]): List[Tree] = stats.map(mkRefineStat)
+
+    def mkPackageStat(stat: Tree): Tree = {
+      stat match {
+        case cd: ClassDef =>
+        case md: ModuleDef =>
+        case pd: PackageDef =>
+        case _ => throw new IllegalArgumentException(s"not legal package stat: $stat")
+      }
+      stat
+    }
+
+    def mkPackageStat(stats: List[Tree]): List[Tree] = stats.map(mkPackageStat)
+
+    object ScalaDot extends ScalaDotExtractor {
+      def apply(name: Name): Tree = gen.scalaDot(name)
+      def unapply(tree: Tree): Option[Name] = tree match {
+        case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage => Some(name)
+        case _ => None
+      }
+    }
+
+    def mkEarlyDef(defn: Tree): Tree = defn match {
+      case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+        copyValDef(vdef)(mods = mods | PRESUPER)
+      case tdef @ TypeDef(mods, _, _, _) =>
+        copyTypeDef(tdef)(mods = mods | PRESUPER)
+      case _ =>
+        throw new IllegalArgumentException(s"not legal early def: $defn")
+    }
+
+    def mkEarlyDef(defns: List[Tree]): List[Tree] = defns.map(mkEarlyDef)
+
+    def mkRefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
+
+    def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX): TermName = self.freshTermName(prefix)
+
+    def freshTypeName(prefix: String): TypeName = self.freshTypeName(prefix)
+
+    protected implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
+
+    object ImplicitParams extends ImplicitParamsExtractor {
+      def apply(paramss: List[List[Tree]], implparams: List[Tree]): List[List[Tree]] =
+        if (implparams.nonEmpty) paramss :+ mkImplicitParam(implparams) else paramss
+
+      def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])] = vparamss match {
+        case init :+ (last @ (initlast :: _)) if initlast.mods.isImplicit => Some((init, last))
+        case _ => Some((vparamss, Nil))
+      }
+    }
+
+    object FlagsRepr extends FlagsReprExtractor {
+      def apply(bits: Long): FlagSet = bits
+      def unapply(flags: Long): Some[Long] = Some(flags)
+    }
+
+    /** Construct/deconstruct type application term trees.
+     *  Treats other term trees as zero-argument type applications.
+     */
+    object SyntacticTypeApplied extends SyntacticTypeAppliedExtractor {
+      def apply(tree: Tree, targs: List[Tree]): Tree =
+        if (targs.isEmpty) tree
+        else if (tree.isTerm) TypeApply(tree, targs)
+        else throw new IllegalArgumentException(s"can't apply type arguments to $tree")
+
+      def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+        case TypeApply(fun, targs) => Some((fun, targs))
+        case _ if tree.isTerm      => Some((tree, Nil))
+        case _                     => None
+      }
+    }
+
+    /** Construct/deconstruct applied type trees.
+     *  Treats other types as zero-arity applied types.
+     */
+    object SyntacticAppliedType extends SyntacticTypeAppliedExtractor {
+      def apply(tree: Tree, targs: List[Tree]): Tree =
+        if (targs.isEmpty) tree
+        else if (tree.isType) AppliedTypeTree(tree, targs)
+        else throw new IllegalArgumentException(s"can't create applied type from non-type $tree")
+
+      def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+        case MaybeTypeTreeOriginal(AppliedTypeTree(tpe, targs)) => Some((tpe, targs))
+        case _ if tree.isType => Some((tree, Nil))
+        case _ => None
+      }
+    }
+
+    object SyntacticApplied extends SyntacticAppliedExtractor {
+      def apply(tree: Tree, argss: List[List[Tree]]): Tree =
+        argss.foldLeft(tree) { (f, args) => Apply(f, args.map(treeInfo.assignmentToMaybeNamedArg)) }
+
+      def unapply(tree: Tree): Some[(Tree, List[List[Tree]])] = tree match {
+        case UnApply(treeInfo.Unapplied(Select(fun, nme.unapply)), pats) =>
+          Some((fun, pats :: Nil))
+        case treeInfo.Applied(fun, targs, argss) =>
+          fun match {
+            case Select(_: New, nme.CONSTRUCTOR) =>
+              Some((tree, Nil))
+            case _ =>
+              val callee =
+                if (fun.isTerm) SyntacticTypeApplied(fun, targs)
+                else SyntacticAppliedType(fun, targs)
+              Some((callee, argss))
+          }
+      }
+    }
+
+    // recover constructor contents generated by gen.mkTemplate
+    protected object UnCtor {
+      def unapply(tree: Tree): Option[(Modifiers, List[List[ValDef]], List[Tree])] = tree match {
+        case DefDef(mods, nme.MIXIN_CONSTRUCTOR, _, _, _, SyntacticBlock(lvdefs :+ _)) =>
+          Some((mods | Flag.TRAIT, Nil, lvdefs))
+        case DefDef(mods, nme.CONSTRUCTOR, Nil, vparamss, _, SyntacticBlock(lvdefs :+ _ :+ _)) =>
+          Some((mods, vparamss, lvdefs))
+        case _ => None
+      }
+    }
+
+    // undo gen.mkTemplate
+    protected object UnMkTemplate {
+      def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
+        val Template(parents, selfType, _) = templ
+        val tbody = treeInfo.untypecheckedTemplBody(templ)
+
+        def result(ctorMods: Modifiers, vparamss: List[List[ValDef]], edefs: List[Tree], body: List[Tree]) =
+          Some((parents, selfType, ctorMods, vparamss, edefs, body))
+        def indexOfCtor(trees: List[Tree]) =
+          trees.indexWhere { case UnCtor(_, _, _) => true ; case _ => false }
+
+        if (tbody forall treeInfo.isInterfaceMember)
+          result(NoMods | Flag.TRAIT, Nil, Nil, tbody)
+        else if (indexOfCtor(tbody) == -1)
+          None
+        else {
+          val (rawEdefs, rest) = tbody.span(treeInfo.isEarlyDef)
+          val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef)
+          val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest))
+          val evdefs = gvdefs.zip(lvdefs).map {
+            case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) =>
+              copyValDef(gvdef)(tpt = tpt.original, rhs = rhs)
+          }
+          val edefs = evdefs ::: etdefs
+          if (ctorMods.isTrait)
+            result(ctorMods, Nil, edefs, body)
+          else {
+            // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
+            val vparamssRestoredImplicits = ctorVparamss match {
+              case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit => tail
+              case other => other
+            }
+            // undo flag modifications by merging flag info from constructor args and fieldDefs
+            val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap
+            def ctorArgsCorrespondToFields = vparamssRestoredImplicits.flatten.forall { vd => modsMap.contains(vd.name) }
+            if (!ctorArgsCorrespondToFields) None
+            else {
+              val vparamss = mmap(vparamssRestoredImplicits) { vd =>
+                val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
+                atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
+              }
+              result(ctorMods, vparamss, edefs, body)
+            }
+          }
+        }
+      }
+    }
+
+    protected def mkSelfType(tree: Tree) = tree match {
+      case vd: ValDef =>
+        require(vd.rhs.isEmpty, "self types must have empty right hand side")
+        copyValDef(vd)(mods = (vd.mods | PRIVATE) & (~DEFERRED))
+      case _ =>
+        throw new IllegalArgumentException(s"$tree is not a valid representation of self type, " +
+                                           """consider reformatting into q"val $self: $T" shape""")
+    }
+
+    object SyntacticClassDef extends SyntacticClassDefExtractor {
+      def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
+                constrMods: Modifiers, vparamss: List[List[Tree]],
+                earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
+        val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
+        val vparamss0 = mkParam(vparamss, extraFlags, excludeFlags = DEFERRED | PARAM)
+        val tparams0 = mkTparams(tparams)
+        val parents0 = gen.mkParents(mods,
+          if (mods.isCase) parents.filter {
+            case ScalaDot(tpnme.Product | tpnme.Serializable | tpnme.AnyRef) => false
+            case _ => true
+          } else parents
+        )
+        val body0 = earlyDefs ::: body
+        val selfType0 = mkSelfType(selfType)
+        val templ = gen.mkTemplate(parents0, selfType0, constrMods, vparamss0, body0)
+        gen.mkClassDef(mods, name, tparams0, templ)
+      }
+
+      def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
+                                       List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+        case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
+          if !ctorMods.isTrait && !ctorMods.hasFlag(JAVA) =>
+          Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticTraitDef extends SyntacticTraitDefExtractor {
+      def apply(mods: Modifiers, name: TypeName, tparams: List[Tree], earlyDefs: List[Tree],
+                parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
+        val mods0 = mods | TRAIT | ABSTRACT
+        val templ = gen.mkTemplate(parents, mkSelfType(selfType), Modifiers(TRAIT), Nil, earlyDefs ::: body)
+        gen.mkClassDef(mods0, name, mkTparams(tparams), templ)
+      }
+
+      def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
+                                       List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+        case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
+          if mods.isTrait =>
+          Some((mods, name, tparams, earlyDefs, parents, selfType, body))
+        case _ => None
+      }
+    }
+
+    object SyntacticObjectDef extends SyntacticObjectDefExtractor {
+      def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
+                parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef =
+        ModuleDef(mods, name, gen.mkTemplate(parents, mkSelfType(selfType), NoMods, Nil, earlyDefs ::: body))
+
+      def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+        case ModuleDef(mods, name, UnMkTemplate(parents, selfType, _, _, earlyDefs, body)) =>
+          Some((mods, name, earlyDefs, parents, selfType, body))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
+      def apply(name: TermName, earlyDefs: List[Tree],
+                parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef =
+        gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
+
+      def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+        case PackageDef(Ident(name: TermName), List(SyntacticObjectDef(NoMods, nme.PACKAGEkw, earlyDefs, parents, selfType, body))) =>
+          Some((name, earlyDefs, parents, selfType, body))
+        case _ =>
+          None
+      }
+    }
+
+    // match references to `scala.$name`
+    protected class ScalaMemberRef(symbols: Seq[Symbol]) {
+      def result(name: Name): Option[Symbol] =
+        symbols.collect { case sym if sym.name == name => sym }.headOption
+      def unapply(tree: Tree): Option[Symbol] = tree match {
+        case id @ Ident(name) if symbols.contains(id.symbol) && name == id.symbol.name =>
+          Some(id.symbol)
+        case Select(scalapkg @ Ident(nme.scala_), name) if scalapkg.symbol == ScalaPackage =>
+          result(name)
+        case Select(Select(Ident(nme.ROOTPKG), nme.scala_), name) =>
+          result(name)
+        case _ => None
+      }
+    }
+    protected object TupleClassRef extends ScalaMemberRef(TupleClass.seq)
+    protected object TupleCompanionRef extends ScalaMemberRef(TupleClass.seq.map { _.companionModule })
+    protected object UnitClassRef extends ScalaMemberRef(Seq(UnitClass))
+    protected object FunctionClassRef extends ScalaMemberRef(FunctionClass.seq)
+
+    object SyntacticTuple extends SyntacticTupleExtractor {
+      def apply(args: List[Tree]): Tree = {
+        require(args.isEmpty || TupleClass(args.length).exists, s"Tuples with ${args.length} arity aren't supported")
+        gen.mkTuple(args)
+      }
+
+      def unapply(tree: Tree): Option[List[Tree]] = tree match {
+        case Literal(Constant(())) =>
+          Some(Nil)
+        case Apply(MaybeTypeTreeOriginal(SyntacticTypeApplied(MaybeSelectApply(TupleCompanionRef(sym)), targs)), args)
+          if sym == TupleClass(args.length).companionModule
+          && (targs.isEmpty || targs.length == args.length) =>
+          Some(args)
+        case _ if tree.isTerm =>
+          Some(tree :: Nil)
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticTupleType extends SyntacticTupleExtractor {
+      def apply(args: List[Tree]): Tree = {
+        require(args.isEmpty || TupleClass(args.length).exists, s"Tuples with ${args.length} arity aren't supported")
+        gen.mkTupleType(args)
+      }
+
+      def unapply(tree: Tree): Option[List[Tree]] = tree match {
+        case MaybeTypeTreeOriginal(UnitClassRef(_)) =>
+          Some(Nil)
+        case MaybeTypeTreeOriginal(AppliedTypeTree(TupleClassRef(sym), args))
+          if sym == TupleClass(args.length) =>
+          Some(args)
+        case _ if tree.isType =>
+          Some(tree :: Nil)
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticFunctionType extends SyntacticFunctionTypeExtractor {
+      def apply(argtpes: List[Tree], restpe: Tree): Tree = {
+        require(FunctionClass(argtpes.length).exists, s"Function types with ${argtpes.length} arity aren't supported")
+        gen.mkFunctionTypeTree(argtpes, restpe)
+      }
+
+      def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
+        case MaybeTypeTreeOriginal(AppliedTypeTree(FunctionClassRef(sym), args @ (argtpes :+ restpe)))
+          if sym == FunctionClass(args.length - 1) =>
+          Some((argtpes, restpe))
+        case _ => None
+      }
+    }
+
+    object SyntheticUnit {
+      def unapply(tree: Tree): Boolean = tree match {
+        case Literal(Constant(())) if tree.hasAttachment[SyntheticUnitAttachment.type] => true
+        case _ => false
+      }
+    }
+
+    /** Syntactic combinator that abstracts over Block tree.
+     *
+     *  Apart from providing a more straightforward api that exposes
+     *  block as a list of elements rather than (stats, expr) pair
+     *  it also:
+     *
+     *  1. Strips trailing synthetic units which are inserted by the
+     *     compiler if the block ends with a definition rather
+     *     than an expression or is empty.
+     *
+     *  2. Matches non-block term trees and recognizes them as
+     *     single-element blocks for sake of consistency with
+     *     compiler's default to treat single-element blocks with
+     *     expressions as just expressions. The only exception is q""
+     *     which is not considered to be a block.
+     */
+    object SyntacticBlock extends SyntacticBlockExtractor {
+      def apply(stats: List[Tree]): Tree = gen.mkBlock(stats)
+
+      def unapply(tree: Tree): Option[List[Tree]] = tree match {
+        case bl @ self.Block(stats, SyntheticUnit()) => Some(treeInfo.untypecheckedBlockBody(bl))
+        case bl @ self.Block(stats, expr)            => Some(treeInfo.untypecheckedBlockBody(bl) :+ expr)
+        case SyntheticUnit()                         => Some(Nil)
+        case _ if tree.isTerm && tree.nonEmpty       => Some(tree :: Nil)
+        case _                                       => None
+      }
+    }
+
+    object SyntacticFunction extends SyntacticFunctionExtractor {
+      def apply(params: List[Tree], body: Tree): Function = {
+        val params0 :: Nil = mkParam(params :: Nil, PARAM)
+        require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values")
+        Function(params0, body)
+      }
+
+      def unapply(tree: Function): Option[(List[ValDef], Tree)] = Function.unapply(tree)
+    }
+
+    object SyntacticNew extends SyntacticNewExtractor {
+      def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): Tree =
+        gen.mkNew(parents, mkSelfType(selfType), earlyDefs ::: body, NoPosition, NoPosition)
+
+      def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+        case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, List(Nil)) =>
+          Some((Nil, SyntacticAppliedType(ident, targs) :: Nil, noSelfType, Nil))
+        case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, argss) =>
+          Some((Nil, SyntacticApplied(SyntacticAppliedType(ident, targs), argss) :: Nil, noSelfType, Nil))
+        case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfType, body) ::
+                            Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
+          Some((earlyDefs, parents, selfType, body))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticDefDef extends SyntacticDefDefExtractor {
+      def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+                vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef = {
+        val tparams0 = mkTparams(tparams)
+        val vparamss0 = mkParam(vparamss, PARAM)
+        val rhs0 = {
+          if (name != nme.CONSTRUCTOR) rhs
+          else rhs match {
+            case Block(_, _) => rhs
+            case _ => Block(List(rhs), gen.mkSyntheticUnit)
+          }
+        }
+        DefDef(mods, name, tparams0, vparamss0, tpt, rhs0)
+      }
+
+      def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] = tree match {
+        case DefDef(mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, Block(List(expr), Literal(Constant(())))) =>
+          Some((mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, expr))
+        case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+          Some((mods, name, tparams, vparamss, tpt, rhs))
+        case _ => None
+      }
+    }
+
+    protected class SyntacticValDefBase(isMutable: Boolean) extends SyntacticValDefExtractor {
+      def modifiers(mods: Modifiers): Modifiers = if (isMutable) mods | MUTABLE else mods
+
+      def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef = ValDef(modifiers(mods), name, tpt, rhs)
+
+      def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+        case ValDef(mods, name, tpt, rhs) if mods.hasFlag(MUTABLE) == isMutable =>
+          Some((mods, name, tpt, rhs))
+        case _ =>
+          None
+      }
+    }
+    object SyntacticValDef extends SyntacticValDefBase(isMutable = false)
+    object SyntacticVarDef extends SyntacticValDefBase(isMutable = true)
+
+    object SyntacticAssign extends SyntacticAssignExtractor {
+      def apply(lhs: Tree, rhs: Tree): Tree = gen.mkAssign(lhs, rhs)
+      def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+        case Assign(lhs, rhs) => Some((lhs, rhs))
+        case AssignOrNamedArg(lhs, rhs) => Some((lhs, rhs))
+        case Apply(Select(fn, nme.update), args :+ rhs) => Some((atPos(fn.pos)(Apply(fn, args)), rhs))
+        case _ => None
+      }
+    }
+
+    def UnliftListElementwise[T](unliftable: Unliftable[T]) = new UnliftListElementwise[T] {
+      def unapply(lst: List[Tree]): Option[List[T]] = {
+        val unlifted = lst.flatMap { unliftable.unapply(_) }
+        if (unlifted.length == lst.length) Some(unlifted) else None
+      }
+    }
+
+    def UnliftListOfListsElementwise[T](unliftable: Unliftable[T]) = new UnliftListOfListsElementwise[T] {
+      def unapply(lst: List[List[Tree]]): Option[List[List[T]]] = {
+        val unlifted = lst.map { l => l.flatMap { unliftable.unapply(_) } }
+        if (unlifted.flatten.length == lst.flatten.length) Some(unlifted) else None
+      }
+    }
+
+    object SyntacticValFrom extends SyntacticValFromExtractor {
+      def apply(pat: Tree, rhs: Tree): Tree = gen.ValFrom(pat, gen.mkCheckIfRefutable(pat, rhs))
+      def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+        case gen.ValFrom(pat, UnCheckIfRefutable(pat1, rhs1)) if pat.equalsStructure(pat1) =>
+          Some((pat, rhs1))
+        case gen.ValFrom(pat, rhs) =>
+          Some((pat, rhs))
+        case _ => None
+      }
+    }
+
+    object SyntacticValEq extends SyntacticValEqExtractor {
+      def apply(pat: Tree, rhs: Tree): Tree         = gen.ValEq(pat, rhs)
+      def unapply(tree: Tree): Option[(Tree, Tree)] = gen.ValEq.unapply(tree)
+    }
+
+    object SyntacticFilter extends SyntacticFilterExtractor {
+      def apply(tree: Tree): Tree           = gen.Filter(tree)
+      def unapply(tree: Tree): Option[Tree] = gen.Filter.unapply(tree)
+    }
+
+    // If a tree in type position isn't provided by the user (e.g. `tpt` fields of
+    // `ValDef` and `DefDef`, function params etc), then it's going to be parsed as
+    // TypeTree with empty original and empty tpe. This extractor matches such trees
+    // so that one can write q"val x = 2" to match typecheck(q"val x = 2"). Note that
+    // TypeTree() is the only possible representation for empty trees in type positions.
+    // We used to sometimes receive EmptyTree in such cases, but not anymore.
+    object SyntacticEmptyTypeTree extends SyntacticEmptyTypeTreeExtractor {
+      def apply(): TypeTree = self.TypeTree()
+      def unapply(tt: TypeTree): Boolean = tt.original == null || tt.original.isEmpty
+    }
+
+    // match a sequence of desugared `val $pat = $value`
+    protected object UnPatSeq {
+      def unapply(trees: List[Tree]): Option[List[(Tree, Tree)]] = {
+        val imploded = implodePatDefs(trees)
+        val patvalues = imploded.flatMap {
+          case SyntacticPatDef(_, pat, EmptyTree, rhs) => Some((pat, rhs))
+          case ValDef(_, name, SyntacticEmptyTypeTree(), rhs) => Some((Bind(name, self.Ident(nme.WILDCARD)), rhs))
+          case ValDef(_, name, tpt, rhs) => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), rhs))
+          case _ => None
+        }
+        if (patvalues.length == imploded.length) Some(patvalues) else None
+      }
+    }
+
+    // implode multiple-statement desugaring of pattern definitions
+    // into single-statement valdefs with nme.QUASIQUOTE_PAT_DEF name
+    object implodePatDefs extends Transformer {
+      override def transform(tree: Tree) = tree match {
+        case templ: Template => deriveTemplate(templ)(transformStats)
+        case block: Block =>
+          val Block(init, last) = block
+          Block(transformStats(init), transform(last)).copyAttrs(block)
+        case ValDef(mods, name1, SyntacticEmptyTypeTree(), Match(MaybeTyped(MaybeUnchecked(value), tpt), CaseDef(pat, EmptyTree, Ident(name2)) :: Nil))
+          if name1 == name2 =>
+          ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value))
+        case _ =>
+          super.transform(tree)
+      }
+      def transformStats(trees: List[Tree]): List[Tree] = trees match {
+        case Nil => Nil
+        case ValDef(mods, _, SyntacticEmptyTypeTree(), Match(MaybeTyped(MaybeUnchecked(value), tpt), CaseDef(pat, EmptyTree, SyntacticTuple(ids)) :: Nil)) :: tail
+          if mods.hasFlag(SYNTHETIC) && mods.hasFlag(ARTIFACT) =>
+          ids match {
+            case Nil =>
+              ValDef(NoMods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value)) :: transformStats(tail)
+            case _   =>
+              val mods = tail.take(1).head.asInstanceOf[ValDef].mods
+              ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value)) :: transformStats(tail.drop(ids.length))
+          }
+        case other :: tail =>
+          transform(other) :: transformStats(tail)
+      }
+      def apply(tree: Tree) = transform(tree)
+      def apply(trees: List[Tree]) = transformStats(trees)
+    }
+
+    object SyntacticPatDef extends SyntacticPatDefExtractor {
+      def apply(mods: Modifiers, pat: Tree, tpt: Tree, rhs: Tree): List[ValDef] = tpt match {
+        case SyntacticEmptyTypeTree() => gen.mkPatDef(mods, pat, rhs)
+        case _                        => gen.mkPatDef(mods, Typed(pat, tpt), rhs)
+      }
+      def unapply(tree: Tree): Option[(Modifiers, Tree, Tree, Tree)] = tree match {
+        case ValDef(mods, nme.QUASIQUOTE_PAT_DEF, Typed(pat,  tpt), rhs) => Some((mods, pat, tpt, rhs))
+        case _ => None
+      }
+    }
+
+    // match a sequence of desugared `val $pat = $value` with a tuple in the end
+    protected object UnPatSeqWithRes {
+      def unapply(tree: Tree): Option[(List[(Tree, Tree)], List[Tree])] = tree match {
+        case SyntacticBlock(UnPatSeq(trees) :+ SyntacticTuple(elems)) => Some((trees, elems))
+        case _ => None
+      }
+    }
+
+    // undo gen.mkSyntheticParam
+    protected object UnSyntheticParam {
+      def unapply(tree: Tree): Option[TermName] = tree match {
+        case ValDef(mods, name, _, EmptyTree)
+          if mods.hasFlag(SYNTHETIC) && mods.hasFlag(PARAM) =>
+          Some(name)
+        case _ => None
+      }
+    }
+
+    // undo gen.mkVisitor
+    protected object UnVisitor {
+      def unapply(tree: Tree): Option[(TermName, List[CaseDef])] = tree match {
+        case Function(UnSyntheticParam(x1) :: Nil, Match(MaybeUnchecked(Ident(x2)), cases))
+          if x1 == x2 =>
+          Some((x1, cases))
+        case _ => None
+      }
+    }
+
+    // undo gen.mkFor:makeClosure
+    protected object UnClosure {
+      def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+        case Function(ValDef(Modifiers(PARAM, _, _), name, tpt, EmptyTree) :: Nil, body) =>
+          tpt match {
+            case SyntacticEmptyTypeTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
+            case _                        => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
+          }
+        case UnVisitor(_, CaseDef(pat, EmptyTree, body) :: Nil) =>
+          Some((pat, body))
+        case _ => None
+      }
+    }
+
+    // match call to either withFilter or filter
+    protected object FilterCall {
+      def unapply(tree: Tree): Option[(Tree,Tree)] = tree match {
+        case Apply(Select(obj, nme.withFilter | nme.filter), arg :: Nil) =>
+          Some(obj, arg)
+        case _ => None
+      }
+    }
+
+    // transform a chain of withFilter calls into a sequence of for filters
+    protected object UnFilter {
+      def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
+        case UnCheckIfRefutable(_, _) =>
+          Some((tree, Nil))
+        case FilterCall(UnFilter(rhs, rest), UnClosure(_, test)) =>
+          Some((rhs, rest :+ SyntacticFilter(test)))
+        case _ =>
+          Some((tree, Nil))
+      }
+    }
+
+    // undo gen.mkCheckIfRefutable
+    protected object UnCheckIfRefutable {
+      def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+        case FilterCall(rhs, UnVisitor(name,
+            CaseDef(pat, EmptyTree, Literal(Constant(true))) ::
+            CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) :: Nil))
+          if name.toString.contains(nme.CHECK_IF_REFUTABLE_STRING) =>
+          Some((pat, rhs))
+        case _ => None
+      }
+    }
+
+    // undo gen.mkFor:makeCombination accounting for possible extra implicit argument
+    protected class UnForCombination(name: TermName) {
+      def unapply(tree: Tree) = tree match {
+        case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: Nil)
+          if name == meth && sel.hasAttachment[ForAttachment.type] =>
+          Some(lhs, f)
+        case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: _ :: Nil)
+          if name == meth && sel.hasAttachment[ForAttachment.type] =>
+          Some(lhs, f)
+        case _ => None
+      }
+    }
+    protected object UnMap     extends UnForCombination(nme.map)
+    protected object UnForeach extends UnForCombination(nme.foreach)
+    protected object UnFlatMap extends UnForCombination(nme.flatMap)
+
+    // undo desugaring done in gen.mkFor
+    protected object UnFor {
+      def unapply(tree: Tree): Option[(List[Tree], Tree)] = {
+        val interm = tree match {
+          case UnFlatMap(UnFilter(rhs, filters), UnClosure(pat, UnFor(rest, body))) =>
+            Some(((pat, rhs), filters ::: rest, body))
+          case UnForeach(UnFilter(rhs, filters), UnClosure(pat, UnFor(rest, body))) =>
+            Some(((pat, rhs), filters ::: rest, body))
+          case UnMap(UnFilter(rhs, filters), UnClosure(pat, cbody)) =>
+            Some(((pat, rhs), filters, gen.Yield(cbody)))
+          case UnForeach(UnFilter(rhs, filters), UnClosure(pat, cbody)) =>
+            Some(((pat, rhs), filters, cbody))
+          case _ => None
+        }
+        interm.flatMap {
+          case ((Bind(_, SyntacticTuple(_)) | SyntacticTuple(_),
+                 UnFor(SyntacticValFrom(pat, rhs) :: innerRest, gen.Yield(UnPatSeqWithRes(pats, elems2)))),
+                outerRest, fbody) =>
+            val valeqs = pats.map { case (pat, rhs) => SyntacticValEq(pat, rhs) }
+            Some((SyntacticValFrom(pat, rhs) :: innerRest ::: valeqs ::: outerRest, fbody))
+          case ((pat, rhs), filters, body) =>
+            Some((SyntacticValFrom(pat, rhs) :: filters, body))
+        }
+      }
+    }
+
+    // check that enumerators are valid
+    protected def mkEnumerators(enums: List[Tree]): List[Tree] = {
+      require(enums.nonEmpty, "enumerators can't be empty")
+      enums.head match {
+        case SyntacticValFrom(_, _) =>
+        case t => throw new IllegalArgumentException(s"$t is not a valid fist enumerator of for loop")
+      }
+      enums.tail.foreach {
+        case SyntacticValEq(_, _) | SyntacticValFrom(_, _) | SyntacticFilter(_) =>
+        case t => throw new IllegalArgumentException(s"$t is not a valid representation of a for loop enumerator")
+      }
+      enums
+    }
+
+    object SyntacticFor extends SyntacticForExtractor {
+      def apply(enums: List[Tree], body: Tree): Tree = gen.mkFor(mkEnumerators(enums), body)
+      def unapply(tree: Tree) = tree match {
+        case UnFor(enums, gen.Yield(body)) => None
+        case UnFor(enums, body) => Some((enums, body))
+        case _ => None
+      }
+    }
+
+    object SyntacticForYield extends SyntacticForExtractor {
+      def apply(enums: List[Tree], body: Tree): Tree = gen.mkFor(mkEnumerators(enums), gen.Yield(body))
+      def unapply(tree: Tree) = tree match {
+        case UnFor(enums, gen.Yield(body)) => Some((enums, body))
+        case _ => None
+      }
+    }
+
+    // use typetree's original instead of typetree itself
+    protected object MaybeTypeTreeOriginal {
+      def unapply(tree: Tree): Some[Tree] = tree match {
+        case tt: TypeTree => Some(tt.original)
+        case _            => Some(tree)
+      }
+    }
+
+    // drop potential extra call to .apply
+    protected object MaybeSelectApply {
+      def unapply(tree: Tree): Some[Tree] = tree match {
+        case Select(f, nme.apply) => Some(f)
+        case other                => Some(other)
+      }
+    }
+
+    // drop potential @scala.unchecked annotation
+    protected object MaybeUnchecked {
+      def unapply(tree: Tree): Some[Tree] = tree match {
+        case Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), annottee) =>
+          Some(annottee)
+        case Typed(annottee, MaybeTypeTreeOriginal(
+          Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), _))) =>
+          Some(annottee)
+        case annottee => Some(annottee)
+      }
+    }
+
+    protected object MaybeTyped {
+      def unapply(tree: Tree): Some[(Tree, Tree)] = tree match {
+        case Typed(v, tpt) => Some((v, tpt))
+        case v             => Some((v, SyntacticEmptyTypeTree()))
+      }
+    }
+
+    protected def mkCases(cases: List[Tree]): List[CaseDef] = cases.map {
+      case c: CaseDef => c
+      case tree => throw new IllegalArgumentException("$tree is not valid representation of pattern match case")
+    }
+
+    object SyntacticPartialFunction extends SyntacticPartialFunctionExtractor {
+      def apply(cases: List[Tree]): Match = Match(EmptyTree, mkCases(cases))
+      def unapply(tree: Tree): Option[List[CaseDef]] = tree match {
+        case Match(EmptyTree, cases) => Some(cases)
+        case Typed(
+               Block(
+                 List(ClassDef(clsMods, tpnme.ANON_FUN_NAME, Nil, Template(
+                   List(abspf: TypeTree, ser: TypeTree), noSelfType, List(
+                     DefDef(_, nme.CONSTRUCTOR, _, _, _, _),
+                     DefDef(_, nme.applyOrElse, _, _, _,
+                       Match(_, cases :+
+                         CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), _, _))),
+                     DefDef(_, nme.isDefinedAt, _, _, _, _))))),
+                 Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), termNames.CONSTRUCTOR), List())),
+               pf: TypeTree)
+          if pf.tpe != null && pf.tpe.typeSymbol.eq(PartialFunctionClass) &&
+             abspf.tpe != null && abspf.tpe.typeSymbol.eq(AbstractPartialFunctionClass) &&
+             ser.tpe != null && ser.tpe.typeSymbol.eq(SerializableClass) &&
+             clsMods.hasFlag(FINAL) && clsMods.hasFlag(SYNTHETIC) =>
+          Some(cases)
+        case _ => None
+      }
+    }
+
+    object SyntacticMatch extends SyntacticMatchExtractor {
+      def apply(scrutinee: Tree, cases: List[Tree]) = {
+        require(scrutinee.nonEmpty, "match's scrutinee may not be empty")
+        Match(scrutinee, mkCases(cases))
+      }
+
+      def unapply(tree: Match): Option[(Tree, List[CaseDef])] = tree match {
+        case Match(scrutinee, cases) if scrutinee.nonEmpty => Some((scrutinee, cases))
+        case _                                             => None
+      }
+    }
+
+    object SyntacticTry extends SyntacticTryExtractor {
+      def apply(block: Tree, catches: List[Tree], finalizer: Tree) = Try(block, mkCases(catches), finalizer)
+      def unapply(tree: Try): Option[(Tree, List[CaseDef], Tree)] = Try.unapply(tree)
+    }
+
+    object SyntacticTermIdent extends SyntacticTermIdentExtractor {
+      def apply(name: TermName, isBackquoted: Boolean): Ident = {
+        val id = self.Ident(name)
+        if (isBackquoted) id updateAttachment BackquotedIdentifierAttachment
+        id
+      }
+      def unapply(id: Ident): Option[(TermName, Boolean)] = id.name match {
+        case name: TermName => Some((name, id.hasAttachment[BackquotedIdentifierAttachment.type]))
+        case _              => None
+      }
+    }
+
+    object SyntacticTypeIdent extends SyntacticTypeIdentExtractor {
+      def apply(name: TypeName): Ident = self.Ident(name)
+      def unapply(tree: Tree): Option[TypeName] = tree match {
+        case MaybeTypeTreeOriginal(Ident(name: TypeName)) => Some(name)
+        case _ => None
+      }
+    }
+
+    /** Facade over Imports and ImportSelectors that lets to structurally
+     *  deconstruct/reconstruct them.
+     *
+     *  Selectors are represented in the following way:
+     *  1. q"import foo._"            <==> q"import foo.${pq"_"}"
+     *  2. q"import foo.bar"          <==> q"import foo.${pq"bar"}"
+     *  3. q"import foo.{bar => baz}" <==> q"import foo.${pq"bar -> baz"}"
+     *  4. q"import foo.{bar => _}"   <==> q"import foo.${pq"bar -> _"}"
+     *
+     *  All names in selectors are TermNames despite the fact ImportSelector
+     *  can theoretically contain TypeNames too (but they never do in practice.)
+     */
+    object SyntacticImport extends SyntacticImportExtractor {
+      // construct/deconstruct {_} import selector
+      private object WildcardSelector {
+        def apply(offset: Int): ImportSelector = ImportSelector(nme.WILDCARD, offset, null, -1)
+        def unapply(sel: ImportSelector): Option[Int] = sel match {
+          case ImportSelector(nme.WILDCARD, offset, null, -1) => Some(offset)
+          case _                                              => None
+        }
+      }
+
+      // construct/deconstruct {foo} import selector
+      private object NameSelector {
+        def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, name, offset)
+        def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+          case ImportSelector(name1, offset1, name2, offset2) if name1 == name2 && offset1 == offset2 =>
+            Some((name1.toTermName, offset1))
+          case _ =>
+            None
+        }
+      }
+
+      // construct/deconstruct {foo => bar} import selector
+      private object RenameSelector {
+        def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector =
+          ImportSelector(name1, offset1, name2, offset2)
+        def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = sel match {
+          case ImportSelector(_, _, null | nme.WILDCARD, _) =>
+            None
+          case ImportSelector(name1, offset1, name2, offset2) if name1 != name2 =>
+            Some((name1.toTermName, offset1, name2.toTermName, offset2))
+          case _ =>
+            None
+        }
+      }
+
+      // construct/deconstruct {foo => _} import selector
+      private object UnimportSelector {
+        def apply(name: TermName, offset: Int): ImportSelector =
+          ImportSelector(name, offset, nme.WILDCARD, -1)
+        def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+          case ImportSelector(name, offset, nme.WILDCARD, _) => Some((name.toTermName, offset))
+          case _                                             => None
+        }
+      }
+
+      // represent {_} import selector as pq"_"
+      private object WildcardSelectorRepr {
+        def apply(pos: Position): Tree = atPos(pos)(self.Ident(nme.WILDCARD))
+        def unapply(tree: Tree): Option[Position] = tree match {
+          case self.Ident(nme.WILDCARD) => Some(tree.pos)
+          case _                        => None
+        }
+      }
+
+      // represent {foo} import selector as pq"foo"
+      private object NameSelectorRepr {
+        def apply(name: TermName, pos: Position): Tree = atPos(pos)(Bind(name, WildcardSelectorRepr(pos)))
+        def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+          case Bind(name, WildcardSelectorRepr(_)) => Some((name.toTermName, tree.pos))
+          case _                                   => None
+        }
+      }
+
+      // pq"left -> right"
+      private object Arrow {
+        def apply(left: Tree, right: Tree): Apply =
+          Apply(self.Ident(nme.MINGT), left :: right :: Nil)
+        def unapply(tree: Apply): Option[(Tree, Tree)] = tree match {
+          case Apply(self.Ident(nme.MINGT), left :: right :: Nil) => Some((left, right))
+          case _ => None
+        }
+      }
+
+      // represent {foo => bar} import selector as pq"foo -> bar"
+      private object RenameSelectorRepr {
+        def apply(name1: TermName, pos1: Position, name2: TermName, pos2: Position): Tree = {
+          val left = NameSelectorRepr(name1, pos1)
+          val right = NameSelectorRepr(name2, pos2)
+          atPos(wrappingPos(left :: right :: Nil))(Arrow(left, right))
+        }
+        def unapply(tree: Tree): Option[(TermName, Position, TermName, Position)] = tree match {
+          case Arrow(NameSelectorRepr(name1, pos1), NameSelectorRepr(name2, pos2)) =>
+            Some((name1.toTermName, pos1, name2.toTermName, pos2))
+          case _ =>
+            None
+        }
+      }
+
+      // represent {foo => _} import selector as pq"foo -> _"
+      private object UnimportSelectorRepr {
+        def apply(name: TermName, pos: Position): Tree =
+          atPos(pos)(Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(pos)))
+        def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+          case Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(_)) =>
+            Some((name, pos))
+          case _ =>
+            None
+        }
+      }
+
+      private def derivedPos(t: Tree, offset: Int): Position =
+        if (t.pos == NoPosition) NoPosition else t.pos.withPoint(offset)
+
+      private def derivedOffset(pos: Position): Int =
+        if (pos == NoPosition) -1 else pos.point
+
+      def apply(expr: Tree, selectors: List[Tree]): Import = {
+        val importSelectors = selectors.map {
+          case WildcardSelectorRepr(pos)                    => WildcardSelector(derivedOffset(pos))
+          case NameSelectorRepr(name, pos)                  => NameSelector(name, derivedOffset(pos))
+          case RenameSelectorRepr(name1, pos1, name2, pos2) => RenameSelector(name1, derivedOffset(pos1), name2, derivedOffset(pos2))
+          case UnimportSelectorRepr(name, pos)              => UnimportSelector(name, derivedOffset(pos))
+          case tree                                         => throw new IllegalArgumentException(s"${showRaw(tree)} doesn't correspond to import selector")
+        }
+        Import(expr, importSelectors)
+      }
+
+      def unapply(imp: Import): Some[(Tree, List[Tree])] = {
+        val selectors = imp.selectors.map {
+          case WildcardSelector(offset)                       => WildcardSelectorRepr(derivedPos(imp, offset))
+          case NameSelector(name, offset)                     => NameSelectorRepr(name, derivedPos(imp, offset))
+          case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2))
+          case UnimportSelector(name, offset)                 => UnimportSelectorRepr(name, derivedPos(imp, offset))
+        }
+        Some((imp.expr, selectors))
+      }
+    }
+
+    object SyntacticSelectType extends SyntacticSelectTypeExtractor {
+      def apply(qual: Tree, name: TypeName): Select = Select(qual, name)
+      def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match {
+        case MaybeTypeTreeOriginal(Select(qual, name: TypeName)) => Some((qual, name))
+        case _ => None
+      }
+    }
+
+    object SyntacticSelectTerm extends SyntacticSelectTermExtractor {
+      def apply(qual: Tree, name: TermName): Select = Select(qual, name)
+      def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
+        case Select(qual, name: TermName) => Some((qual, name))
+        case _                            => None
+      }
+    }
+
+    object SyntacticCompoundType extends SyntacticCompoundTypeExtractor {
+      def apply(parents: List[Tree], defns: List[Tree]) =
+        CompoundTypeTree(Template(gen.mkParents(NoMods, parents), noSelfType, defns))
+      def unapply(tree: Tree): Option[(List[Tree], List[Tree])] = tree match {
+        case MaybeTypeTreeOriginal(CompoundTypeTree(Template(parents, _, defns))) =>
+          Some((parents, defns))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticSingletonType extends SyntacitcSingletonTypeExtractor {
+      def apply(ref: Tree): SingletonTypeTree = SingletonTypeTree(ref)
+      def unapply(tree: Tree): Option[Tree] = tree match {
+        case MaybeTypeTreeOriginal(SingletonTypeTree(ref)) =>
+          Some(ref)
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticTypeProjection extends SyntacticTypeProjectionExtractor {
+      def apply(qual: Tree, name: TypeName): SelectFromTypeTree =
+        SelectFromTypeTree(qual, name)
+      def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match {
+        case MaybeTypeTreeOriginal(SelectFromTypeTree(qual, name)) =>
+          Some((qual, name))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticAnnotatedType extends SyntacticAnnotatedTypeExtractor {
+      def apply(tpt: Tree, annot: Tree): Annotated =
+        Annotated(annot, tpt)
+      def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+        case MaybeTypeTreeOriginal(Annotated(annot, tpt)) =>
+          Some((tpt, annot))
+        case _ =>
+          None
+      }
+    }
+
+    object SyntacticExistentialType extends SyntacticExistentialTypeExtractor {
+      def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree =
+        ExistentialTypeTree(tpt, where.map {
+          case md: MemberDef => md
+          case tree => throw new IllegalArgumentException("$tree is not legal forSome definition")
+        })
+      def unapply(tree: Tree): Option[(Tree, List[MemberDef])] = tree match {
+        case MaybeTypeTreeOriginal(ExistentialTypeTree(tpt, where)) =>
+          Some((tpt, where))
+        case _ =>
+          None
+      }
+    }
+  }
+
+  val build = new ReificationSupportImpl
+}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 842491d..009bc39 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -1,15 +1,16 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import settings.MutableSettings
 
 trait Required { self: SymbolTable =>
-
   def picklerPhase: Phase
 
-  def settings: MutableSettings
+  def erasurePhase: Phase
 
-  def forInteractive: Boolean
+  def settings: MutableSettings
 
-  def forScaladoc: Boolean
+  @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false
+  @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0")    def forScaladoc = false
 }
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index ab3b9b7..cf3f356 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -3,11 +3,22 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
+import scala.annotation.tailrec
+
 trait Scopes extends api.Scopes { self: SymbolTable =>
 
+  /** An ADT to represent the results of symbol name lookups.
+   */
+  sealed trait NameLookup { def symbol: Symbol ; def isSuccess = false }
+  case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup { override def isSuccess = true }
+  case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol }
+  case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup
+  case object LookupNotFound extends NameLookup { def symbol = NoSymbol }
+
   class ScopeEntry(val sym: Symbol, val owner: Scope) {
     /** the next entry in the hash bucket
      */
@@ -17,15 +28,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
      */
     var next: ScopeEntry = null
 
+    def depth = owner.nestingLevel
     override def hashCode(): Int = sym.name.start
-    override def toString(): String = sym.toString()
+    override def toString() = s"$sym (depth=$depth)"
   }
 
-  /**
-   *  @param sym   ...
-   *  @param owner ...
-   *  @return      ...
-   */
   private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = {
     val e = new ScopeEntry(sym, owner)
     e.next = owner.elems
@@ -61,6 +68,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     /** a cache for all elements, to be used by symbol iterator.
      */
     private var elemsCache: List[Symbol] = null
+    private var cachedSize = -1
+    private def flushElemsCache() {
+      elemsCache = null
+      cachedSize = -1
+    }
 
     /** size and mask of hash tables
      *  todo: make hashtables grow?
@@ -82,6 +94,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
 
     /** the number of entries in this scope */
     override def size: Int = {
+      if (cachedSize < 0)
+        cachedSize = directSize
+
+      cachedSize
+    }
+    private def directSize: Int = {
       var s = 0
       var e = elems
       while (e ne null) {
@@ -92,11 +110,9 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     }
 
     /** enter a scope entry
-     *
-     *  @param e ...
      */
     protected def enterEntry(e: ScopeEntry) {
-      elemsCache = null
+      flushElemsCache()
       if (hashtable ne null)
         enterInHash(e)
       else if (size >= MIN_HASH)
@@ -110,8 +126,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     }
 
     /** enter a symbol
-     *
-     *  @param sym ...
      */
     def enter[T <: Symbol](sym: T): T = {
       enterEntry(newScopeEntry(sym, this))
@@ -119,14 +133,18 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     }
 
     /** enter a symbol, asserting that no symbol with same name exists in scope
-     *
-     *  @param sym ...
      */
     def enterUnique(sym: Symbol) {
       assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
       enter(sym)
     }
 
+    def enterIfNew[T <: Symbol](sym: T): T = {
+      val existing = lookupEntry(sym.name)
+      if (existing == null) enter(sym)
+      else existing.sym.asInstanceOf[T]
+    }
+
     private def createHash() {
       hashtable = new Array[ScopeEntry](HASHSIZE)
       enterAllInHash(elems)
@@ -175,8 +193,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     }
 
     /** remove entry
-     *
-     *  @param e ...
      */
     def unlink(e: ScopeEntry) {
       if (elems == e) {
@@ -192,30 +208,64 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
         if (e1 == e) {
           hashtable(index) = e.tail
         } else {
-          while (e1.tail != e) e1 = e1.tail;
+          while (e1.tail != e) e1 = e1.tail
           e1.tail = e.tail
         }
       }
-      elemsCache = null
+      flushElemsCache()
     }
 
     /** remove symbol */
     def unlink(sym: Symbol) {
       var e = lookupEntry(sym.name)
       while (e ne null) {
-        if (e.sym == sym) unlink(e);
+        if (e.sym == sym) unlink(e)
         e = lookupNextEntry(e)
       }
     }
 
-    /** lookup a symbol
-     *
-     *  @param name ...
-     *  @return     ...
+    /** Lookup a module or a class, filtering out matching names in scope
+     *  which do not match that requirement.
+     */
+    def lookupModule(name: Name): Symbol = findSymbol(lookupAll(name.toTermName))(_.isModule)
+    def lookupClass(name: Name): Symbol  = findSymbol(lookupAll(name.toTypeName))(_.isClass)
+
+    /** True if the name exists in this scope, false otherwise. */
+    def containsName(name: Name) = lookupEntry(name) != null
+
+    /** Lookup a symbol.
      */
     def lookup(name: Name): Symbol = {
       val e = lookupEntry(name)
-      if (e eq null) NoSymbol else e.sym
+      if (e eq null) NoSymbol
+      else if (lookupNextEntry(e) eq null) e.sym
+      else {
+        // We shouldn't get here: until now this method was picking a random
+        // symbol when there was more than one with the name, so this should
+        // only be called knowing that there are 0-1 symbols of interest. So, we
+        // can safely return an overloaded symbol rather than throwing away the
+        // rest of them. Most likely we still break, but at least we will break
+        // in an understandable fashion (unexpectedly overloaded symbol) rather
+        // than a non-deterministic bizarre one (see any bug involving overloads
+        // in package objects.)
+        val alts = lookupAll(name).toList
+        def alts_s = alts map (s => s.defString) mkString " <and> "
+        devWarning(s"scope lookup of $name found multiple symbols: $alts_s")
+        // FIXME - how is one supposed to create an overloaded symbol without
+        // knowing the correct owner? Using the symbol owner is not correct;
+        // say for instance this is List's scope and the symbols are its three
+        // mkString members. Those symbols are owned by TraversableLike, which
+        // is no more meaningful an owner than NoSymbol given that we're in
+        // List. Maybe it makes no difference who owns the overloaded symbol, in
+        // which case let's establish that and have a canonical creation method.
+        //
+        // FIXME - a similar question for prefix, although there are more
+        // clues from the symbols on that one, as implemented here. In general
+        // the distinct list is one type and lub becomes the identity.
+        // val prefix = lub(alts map (_.info.prefix) distinct)
+        // Now using NoSymbol and NoPrefix always to avoid forcing info (SI-6664)
+        NoSymbol.newOverloaded(NoPrefix, alts)
+      }
     }
 
     /** Returns an iterator yielding every symbol with given name in this scope.
@@ -223,7 +273,20 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
     def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
       var e = lookupEntry(name)
       def hasNext: Boolean = e ne null
-      def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+      def next(): Symbol = try e.sym finally e = lookupNextEntry(e)
+    }
+
+    def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new Iterator[ScopeEntry] {
+      var e = lookupEntry(name)
+      def hasNext: Boolean = e ne null
+      def next(): ScopeEntry = try e finally e = lookupNextEntry(e)
+    }
+
+    def lookupUnshadowedEntries(name: Name): Iterator[ScopeEntry] = {
+      lookupEntry(name) match {
+        case null => Iterator.empty
+        case e    => lookupAllEntries(name) filter (e1 => (e eq e1) || (e.depth == e1.depth && e.sym != e1.sym))
+      }
     }
 
     /** lookup a symbol entry matching given name.
@@ -257,20 +320,47 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
       if (hashtable ne null)
         do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
       else
-        do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
+        do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name)
       e
     }
 
+    /** TODO - we can test this more efficiently than checking isSubScope
+     *  in both directions. However the size test might be enough to quickly
+     *  rule out most failures.
+     */
+    def isSameScope(other: Scope) = (
+         (size == other.size)     // optimization - size is cached
+      && (this isSubScope other)
+      && (other isSubScope this)
+    )
+
+    def isSubScope(other: Scope) = {
+      def scopeContainsSym(sym: Symbol): Boolean = {
+        @tailrec def entryContainsSym(e: ScopeEntry): Boolean = e match {
+          case null => false
+          case _    =>
+            val comparableInfo = sym.info.substThis(sym.owner, e.sym.owner)
+            (e.sym.info =:= comparableInfo) || entryContainsSym(lookupNextEntry(e))
+        }
+        entryContainsSym(this lookupEntry sym.name)
+      }
+      other.toList forall scopeContainsSym
+    }
+
     /** Return all symbols as a list in the order they were entered in this scope.
      */
     override def toList: List[Symbol] = {
       if (elemsCache eq null) {
-        elemsCache = Nil
+        var symbols: List[Symbol] = Nil
+        var count = 0
         var e = elems
         while ((e ne null) && e.owner == this) {
-          elemsCache = e.sym :: elemsCache
+          count += 1
+          symbols ::= e.sym
           e = e.next
         }
+        elemsCache = symbols
+        cachedSize = count
       }
       elemsCache
     }
@@ -287,37 +377,17 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
      */
     def iterator: Iterator[Symbol] = toList.iterator
 
-/*
-    /** Does this scope contain an entry for `sym`?
-     */
-    def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
-
-    /** A scope that contains all symbols of this scope and that also contains `sym`.
-     */
-    def +(sym: Symbol): Scope =
-      if (contains(sym)) this
-      else {
-        val result = cloneScope
-        result enter sym
-        result
-      }
-
-    /** A scope that contains all symbols of this scope except `sym`.
-     */
-    def -(sym: Symbol): Scope =
-      if (!contains(sym)) this
-      else {
-        val result = cloneScope
-        result unlink sym
-        result
-      }
-*/
     override def foreach[U](p: Symbol => U): Unit = toList foreach p
 
-    override def filter(p: Symbol => Boolean): Scope =
-      if (!(toList forall p)) newScopeWith(toList filter p: _*) else this
-
-    @deprecated("Use `toList.reverse` instead", "2.10.0")
+    override def filterNot(p: Symbol => Boolean): Scope = (
+      if (toList exists p) newScopeWith(toList filterNot p: _*)
+      else this
+    )
+    override def filter(p: Symbol => Boolean): Scope = (
+      if (toList forall p) this
+      else newScopeWith(toList filter p: _*)
+    )
+    @deprecated("Use `toList.reverse` instead", "2.10.0") // Used in SBT 0.12.4
     def reverse: List[Symbol] = toList.reverse
 
     override def mkString(start: String, sep: String, end: String) =
@@ -396,6 +466,4 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
   class ErrorScope(owner: Symbol) extends Scope
 
   private final val maxRecursions = 1000
-
 }
-
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 539d191..614e71b 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 trait StdAttachments {
@@ -13,6 +14,7 @@ trait StdAttachments {
     def setAttachments(attachments: scala.reflect.macros.Attachments { type Pos = Position }): this.type = { rawatt = attachments; this }
     def updateAttachment[T: ClassTag](attachment: T): this.type = { rawatt = rawatt.update(attachment); this }
     def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
+    def hasAttachment[T: ClassTag]: Boolean = rawatt.contains[T]
 
     // cannot be final due to SynchronizedSymbols
     def pos: Position = rawatt.pos
@@ -20,9 +22,15 @@ trait StdAttachments {
     def setPos(newpos: Position): this.type = { pos = newpos; this }
   }
 
-  /** When present, indicates that the host `Ident` has been created from a backquoted identifier.
-   */
-  case object BackquotedIdentifierAttachment
+  /** Attachment that knows how to import itself into another universe. */
+  trait ImportableAttachment {
+    def importAttachment(importer: Importer): this.type
+  }
+
+  /** Attachment that doesn't contain any reflection artificats and can be imported as-is. */
+  trait PlainAttachment extends ImportableAttachment {
+    def importAttachment(importer: Importer): this.type = this
+  }
 
   /** Stores the trees that give rise to a refined type to be used in reification.
    *  Unfortunately typed `CompoundTypeTree` is lacking essential info, and the reifier cannot use `CompoundTypeTree.tpe`.
@@ -30,47 +38,18 @@ trait StdAttachments {
    */
   case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
 
-  /** Is added by the macro engine to the results of macro expansions.
-   *  Stores the original expandee as it entered the `macroExpand` function.
+  /** When present, indicates that the host `Ident` has been created from a backquoted identifier.
    */
-  case class MacroExpansionAttachment(original: Tree)
+  case object BackquotedIdentifierAttachment extends PlainAttachment
 
-  /** When present, suppresses macro expansion for the host.
-   *  This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
-   *
-   *  Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
-   *  (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+  /** Identifies trees are either result or intermidiate value of for loop desugaring.
    */
-  case object SuppressMacroExpansionAttachment
+  case object ForAttachment extends PlainAttachment
 
-  /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+  /** Identifies unit constants which were inserted by the compiler (e.g. gen.mkBlock)
    */
-  def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+  case object SyntheticUnitAttachment extends PlainAttachment
 
-  /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
-   */
-  def unsuppressMacroExpansion(tree: Tree): Tree = {
-    tree.removeAttachment[SuppressMacroExpansionAttachment.type]
-    tree match {
-      // see the comment to `isMacroExpansionSuppressed` to learn why we need
-      // a special traversal strategy here
-      case Apply(fn, _) => unsuppressMacroExpansion(fn)
-      case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
-      case _ => // do nothing
-    }
-    tree
-  }
-
-  /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
-   */
-  def isMacroExpansionSuppressed(tree: Tree): Boolean =
-    if (tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) true
-    else tree match {
-      // we have to account for the fact that during typechecking an expandee might become wrapped,
-      // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
-      // in that case the expandee itself will no longer be suppressed and we need to look at the core
-      case Apply(fn, _) => isMacroExpansionSuppressed(fn)
-      case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
-      case _ => false
-    }
+  /** Untyped list of subpatterns attached to selector dummy. */
+  case class SubpatternsAttachment(patterns: List[Tree])
 }
diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala
index 5e5e4f9..a0084dc 100644
--- a/src/reflect/scala/reflect/internal/StdCreators.scala
+++ b/src/reflect/scala/reflect/internal/StdCreators.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.reflect.api.{TreeCreator, TypeCreator}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index c3b7f24..6848c35 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -3,10 +3,12 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import java.security.MessageDigest
+import java.util.UUID.randomUUID
 import Chars.isOperatorPart
 import scala.annotation.switch
 import scala.language.implicitConversions
@@ -18,8 +20,6 @@ trait StdNames {
 
   def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
 
-  implicit def lowerTermNames(n: TermName): String = n.toString
-
   /** Tensions: would like the keywords to be the very first names entered into the names
    *  storage so their ids count from 0, which simplifies the parser. Switched to abstract
    *  classes to avoid all the indirection which is generated with implementation-containing
@@ -37,11 +37,7 @@ trait StdNames {
       kws = kws + result
       result
     }
-    def result: Set[TermName] = {
-      val result = kws
-      kws = null
-      result
-    }
+    def result: Set[TermName] = try kws finally kws = null
   }
 
   private[reflect] def compactifyName(orig: String): String = compactify(orig)
@@ -57,14 +53,17 @@ trait StdNames {
      *
      * We obtain the formula:
      *
-     *   FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+     *   FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + suffixLength
      *
-     * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+     * (+suffixLength for ".class" and potential module class suffix that is added *after* this transform).
+     *
+     * MaxNameLength can therefore be computed as follows:
      */
     val marker = "$$$$"
+    val maxSuffixLength = "$.class".length + 1 // potential module class suffix and file extension
     val MaxNameLength = math.min(
-      settings.maxClassfileName.value - 6,
-      2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
+      settings.maxClassfileName.value - maxSuffixLength,
+      2 * (settings.maxClassfileName.value - maxSuffixLength - 2*marker.length - 32)
     )
     def toMD5(s: String, edge: Int): String = {
       val prefix = s take edge
@@ -93,8 +92,11 @@ trait StdNames {
     def flattenedName(segments: Name*): NameType =
       compactify(segments mkString NAME_JOIN_STRING)
 
-    val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
-    val NAME_JOIN_STRING: String     = NameTransformer.NAME_JOIN_STRING
+    val NAME_JOIN_STRING: String              = NameTransformer.NAME_JOIN_STRING
+    val MODULE_SUFFIX_STRING: String          = NameTransformer.MODULE_SUFFIX_STRING
+    val LOCAL_SUFFIX_STRING: String           = NameTransformer.LOCAL_SUFFIX_STRING
+    val TRAIT_SETTER_SEPARATOR_STRING: String = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING
+
     val SINGLETON_SUFFIX: String     = ".type"
 
     val ANON_CLASS_NAME: NameType    = "$anon"
@@ -105,7 +107,6 @@ trait StdNames {
     val IMPORT: NameType             = "<import>"
     val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
     val MODULE_VAR_SUFFIX: NameType  = "$module"
-    val NAME_JOIN_NAME: NameType     = NAME_JOIN_STRING
     val PACKAGE: NameType            = "package"
     val ROOT: NameType               = "<root>"
     val SPECIALIZED_SUFFIX: NameType = "$sp"
@@ -122,20 +123,17 @@ trait StdNames {
     final val Short: NameType   = "Short"
     final val Unit: NameType    = "Unit"
 
-    final val ScalaValueNames: scala.List[NameType] =
-      scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
-
     // some types whose companions we utilize
-    final val AnyRef: NameType     = "AnyRef"
-    final val Array: NameType      = "Array"
-    final val List: NameType       = "List"
-    final val Seq: NameType        = "Seq"
-    final val Symbol: NameType     = "Symbol"
-    final val ClassTag: NameType   = "ClassTag"
-    final val WeakTypeTag: NameType = "WeakTypeTag"
-    final val TypeTag : NameType   = "TypeTag"
-    final val Expr: NameType       = "Expr"
-    final val String: NameType     = "String"
+    final val AnyRef: NameType        = "AnyRef"
+    final val Array: NameType         = "Array"
+    final val List: NameType          = "List"
+    final val Seq: NameType           = "Seq"
+    final val Symbol: NameType        = "Symbol"
+    final val WeakTypeTag: NameType   = "WeakTypeTag"
+    final val TypeTag : NameType      = "TypeTag"
+    final val Expr: NameType          = "Expr"
+    final val String: NameType        = "String"
+    final val StringContext: NameType = "StringContext"
 
     // fictions we use as both types and terms
     final val ERROR: NameType    = "<error>"
@@ -207,10 +205,11 @@ trait StdNames {
   }
 
   abstract class TypeNames extends Keywords with TypeNamesApi {
+    override type NameType = TypeName
+
     protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
 
     final val BYNAME_PARAM_CLASS_NAME: NameType        = "<byname>"
-    final val EQUALS_PATTERN_NAME: NameType            = "<equals>"
     final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
     final val LOCAL_CHILD: NameType                    = "<local child>"
     final val REFINE_CLASS_NAME: NameType              = "<refinement>"
@@ -218,29 +217,45 @@ trait StdNames {
     final val WILDCARD_STAR: NameType                  = "_*"
     final val REIFY_TREECREATOR_PREFIX: NameType       = "$treecreator"
     final val REIFY_TYPECREATOR_PREFIX: NameType       = "$typecreator"
+    final val MACRO_BUNDLE_SUFFIX: NameType            = "$Bundle"
 
     final val Any: NameType             = "Any"
     final val AnyVal: NameType          = "AnyVal"
-    final val ExprApi: NameType         = "ExprApi"
+    final val FlagSet: NameType         = "FlagSet"
     final val Mirror: NameType          = "Mirror"
+    final val Modifiers: NameType       = "Modifiers"
     final val Nothing: NameType         = "Nothing"
     final val Null: NameType            = "Null"
     final val Object: NameType          = "Object"
-    final val PartialFunction: NameType = "PartialFunction"
     final val PrefixType: NameType      = "PrefixType"
     final val Product: NameType         = "Product"
     final val Serializable: NameType    = "Serializable"
     final val Singleton: NameType       = "Singleton"
     final val Throwable: NameType       = "Throwable"
+    final val unchecked: NameType       = "unchecked"
 
+    final val api: NameType                 = "api"
     final val Annotation: NameType          = "Annotation"
+    final val CaseDef: NameType             = "CaseDef"
     final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
     final val ClassManifest: NameType       = "ClassManifest"
     final val Enum: NameType                = "Enum"
     final val Group: NameType               = "Group"
+    final val implicitNotFound: NameType    = "implicitNotFound"
+    final val Liftable: NameType            = "Liftable"
+    final val Unliftable: NameType          = "Unliftable"
+    final val Name: NameType                = "Name"
     final val Tree: NameType                = "Tree"
+    final val TermName: NameType            = "TermName"
     final val Type : NameType               = "Type"
-    final val TypeTree: NameType            = "TypeTree"
+    final val TypeName: NameType            = "TypeName"
+    final val TypeDef: NameType             = "TypeDef"
+    final val Quasiquote: NameType          = "Quasiquote"
+
+    // quasiquote-specific names
+    final val QUASIQUOTE_FUNCTION: NameType     = "$quasiquote$function$"
+    final val QUASIQUOTE_MODS: NameType         = "$quasiquote$mods$"
+    final val QUASIQUOTE_TUPLE: NameType        = "$quasiquote$tuple$"
 
     // Annotation simple names, used in Namer
     final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -250,22 +265,20 @@ trait StdNames {
     // Classfile Attributes
     final val AnnotationDefaultATTR: NameType      = "AnnotationDefault"
     final val BridgeATTR: NameType                 = "Bridge"
-    final val ClassfileAnnotationATTR: NameType    = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
     final val CodeATTR: NameType                   = "Code"
     final val ConstantValueATTR: NameType          = "ConstantValue"
     final val DeprecatedATTR: NameType             = "Deprecated"
     final val ExceptionsATTR: NameType             = "Exceptions"
     final val InnerClassesATTR: NameType           = "InnerClasses"
-    final val LineNumberTableATTR: NameType        = "LineNumberTable"
-    final val LocalVariableTableATTR: NameType     = "LocalVariableTable"
     final val RuntimeAnnotationATTR: NameType      = "RuntimeVisibleAnnotations"   // RetentionPolicy.RUNTIME
-    final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
     final val ScalaATTR: NameType                  = "Scala"
     final val ScalaSignatureATTR: NameType         = "ScalaSig"
     final val SignatureATTR: NameType              = "Signature"
     final val SourceFileATTR: NameType             = "SourceFile"
     final val SyntheticATTR: NameType              = "Synthetic"
 
+    final val scala_ : NameType = "scala"
+
     def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName
     def singletonName(name: Name): TypeName     = (name append SINGLETON_SUFFIX).toTypeName
     def implClassName(name: Name): TypeName     = (name append IMPL_CLASS_SUFFIX).toTypeName
@@ -273,27 +286,30 @@ trait StdNames {
   }
 
   abstract class TermNames extends Keywords with TermNamesApi {
+    override type NameType = TermName
+
     protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
 
     /** Base strings from which synthetic names are derived. */
-    val BITMAP_PREFIX                 = "bitmap$"
-    val CHECK_IF_REFUTABLE_STRING     = "check$ifrefutable$"
-    val DEFAULT_GETTER_STRING         = "$default$"
-    val DEFAULT_GETTER_INIT_STRING    = "$lessinit$greater" // CONSTRUCTOR.encoded, less is more
-    val DO_WHILE_PREFIX               = "doWhile$"
-    val EVIDENCE_PARAM_PREFIX         = "evidence$"
-    val EXCEPTION_RESULT_PREFIX       = "exceptionResult"
-    val EXPAND_SEPARATOR_STRING       = "$$"
-    val INTERPRETER_IMPORT_WRAPPER    = "$iw"
-    val INTERPRETER_LINE_PREFIX       = "line"
-    val INTERPRETER_VAR_PREFIX        = "res"
-    val INTERPRETER_WRAPPER_SUFFIX    = "$object"
-    val LOCALDUMMY_PREFIX             = "<local "       // owner of local blocks
-    val PROTECTED_PREFIX              = "protected$"
-    val PROTECTED_SET_PREFIX          = PROTECTED_PREFIX + "set"
-    val SUPER_PREFIX_STRING           = "super$"
-    val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
-    val WHILE_PREFIX                  = "while$"
+    val BITMAP_PREFIX                  = "bitmap$"
+    val CHECK_IF_REFUTABLE_STRING      = "check$ifrefutable$"
+    val DEFAULT_GETTER_STRING          = "$default$"
+    val DEFAULT_GETTER_INIT_STRING     = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
+    val DO_WHILE_PREFIX                = "doWhile$"
+    val EVIDENCE_PARAM_PREFIX          = "evidence$"
+    val EXCEPTION_RESULT_PREFIX        = "exceptionResult"
+    val EXPAND_SEPARATOR_STRING        = "$$"
+    val FRESH_TERM_NAME_PREFIX         = "x$"
+    val INTERPRETER_IMPORT_WRAPPER     = "$iw"
+    val LOCALDUMMY_PREFIX              = "<local "       // owner of local blocks
+    val PROTECTED_PREFIX               = "protected$"
+    val PROTECTED_SET_PREFIX           = PROTECTED_PREFIX + "set"
+    val SUPER_PREFIX_STRING            = "super$"
+    val WHILE_PREFIX                   = "while$"
+    val FRESH_PREFIX                   = "fresh"
+    val FRESH_SUFFIX                   = "macro$" // uses a keyword to avoid collisions with mangled names
+    val QUAL_PREFIX                    = "qual$"
+    val NAMEDARG_PREFIX                = "x$"
 
     // Compiler internal names
     val ANYname: NameType                  = "<anyname>"
@@ -301,12 +317,9 @@ trait StdNames {
     val DEFAULT_CASE: NameType             = "defaultCase$"
     val EQEQ_LOCAL_VAR: NameType           = "eqEqTemp$"
     val FAKE_LOCAL_THIS: NameType          = "this$"
-    val INITIALIZER: NameType              = CONSTRUCTOR // Is this buying us something?
     val LAZY_LOCAL: NameType               = "$lzy"
     val LAZY_SLOW_SUFFIX: NameType         = "$lzycompute"
-    val LOCAL_SUFFIX_STRING                = " "
-    val UNIVERSE_BUILD_PREFIX: NameType    = "$u.build."
-    val UNIVERSE_BUILD: NameType           = "$u.build"
+    val UNIVERSE_BUILD_PREFIX: NameType    = "$u.internal.reificationSupport."
     val UNIVERSE_PREFIX: NameType          = "$u."
     val UNIVERSE_SHORT: NameType           = "$u"
     val MIRROR_PREFIX: NameType            = "$m."
@@ -316,24 +329,31 @@ trait StdNames {
     val REIFY_FREE_THIS_SUFFIX: NameType   = "$this"
     val REIFY_FREE_VALUE_SUFFIX: NameType  = "$value"
     val REIFY_SYMDEF_PREFIX: NameType      = "symdef$"
+    val QUASIQUOTE_CASE: NameType          = "$quasiquote$case$"
+    val QUASIQUOTE_EARLY_DEF: NameType     = "$quasiquote$early$def$"
+    val QUASIQUOTE_FILE: String            = "<quasiquote>"
+    val QUASIQUOTE_FOR_ENUM: NameType      = "$quasiquote$for$enum$"
+    val QUASIQUOTE_NAME_PREFIX: String     = "nn$"
+    val QUASIQUOTE_PACKAGE_STAT: NameType  = "$quasiquote$package$stat$"
+    val QUASIQUOTE_PARAM: NameType         = "$quasiquote$param$"
+    val QUASIQUOTE_PAT_DEF: NameType       = "$quasiquote$pat$def$"
+    val QUASIQUOTE_PREFIX: String          = "qq$"
+    val QUASIQUOTE_REFINE_STAT: NameType   = "$quasiquote$refine$stat$"
+    val QUASIQUOTE_TUPLE: NameType         = "$quasiquote$tuple$"
+    val QUASIQUOTE_UNLIFT_HELPER: String   = "$quasiquote$unlift$helper$"
     val MIXIN_CONSTRUCTOR: NameType        = "$init$"
     val MODULE_INSTANCE_FIELD: NameType    = NameTransformer.MODULE_INSTANCE_NAME  // "MODULE$"
     val OUTER: NameType                    = "$outer"
-    val OUTER_LOCAL: NameType              = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
+    val OUTER_LOCAL: NameType              = OUTER.localName
     val OUTER_SYNTH: NameType              = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
     val ROOTPKG: NameType                  = "_root_"
     val SELECTOR_DUMMY: NameType           = "<unapply-selector>"
     val SELF: NameType                     = "$this"
-    val SETTER_SUFFIX: NameType            = encode("_=")
+    val SETTER_SUFFIX: NameType            = NameTransformer.SETTER_SUFFIX_STRING
     val SPECIALIZED_INSTANCE: NameType     = "specInstance$"
     val STAR: NameType                     = "*"
     val THIS: NameType                     = "_$this"
 
-    @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
-    def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
-    @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
-    def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
-
     def isConstructorName(name: Name)       = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
     def isExceptionResultName(name: Name)   = name startsWith EXCEPTION_RESULT_PREFIX
     def isImplClassName(name: Name)         = name endsWith IMPL_CLASS_SUFFIX
@@ -341,7 +361,6 @@ trait StdNames {
     def isLocalName(name: Name)             = name endsWith LOCAL_SUFFIX_STRING
     def isLoopHeaderLabel(name: Name)       = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
     def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
-    def isSuperAccessorName(name: Name)     = name startsWith SUPER_PREFIX_STRING
     def isReplWrapperName(name: Name)       = name containsName INTERPRETER_IMPORT_WRAPPER
     def isSetterName(name: Name)            = name endsWith SETTER_SUFFIX
     def isTraitSetterName(name: Name)       = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
@@ -358,33 +377,32 @@ trait StdNames {
       )
     }
 
-    def isDeprecatedIdentifierName(name: Name) = name.toTermName match {
-      case nme.`then` | nme.`macro` => true
-      case _                        => false
-    }
-
     def isOpAssignmentName(name: Name) = name match {
       case raw.NE | raw.LE | raw.GE | EMPTY => false
       case _                                =>
       name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
     }
 
-    /** The expanded name of `name` relative to this class `base` with given `separator`
-     */
-    def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
+    private def expandedNameInternal(name: TermName, base: Symbol, separator: String): TermName =
       newTermNameCached(base.fullName('$') + separator + name)
 
+    /** The expanded name of `name` relative to this class `base`
+     */
+    def expandedName(name: TermName, base: Symbol) = expandedNameInternal(name, base, EXPAND_SEPARATOR_STRING)
+
     /** The expanded setter name of `name` relative to this class `base`
     */
-    def expandedSetterName(name: TermName, base: Symbol): TermName =
-      expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
+    def expandedSetterName(name: TermName, base: Symbol) = expandedNameInternal(name, base, TRAIT_SETTER_SEPARATOR_STRING)
 
-    /** If `name` is an expandedName name, the original name.
-    *  Otherwise `name` itself.
-    */
-    def originalName(name: Name): Name = name.toString lastIndexOf "$$" match {
-      case -1 | 0 => name
-      case idx0 =>
+    /** If `name` is an expandedName name, the original (unexpanded) name.
+     *  Otherwise `name` itself.
+     *  Look backward from the end of the string for "$$", and take the
+     *  part of the string after that; but if the string is "$$$" or longer,
+     *  be sure to retain the extra dollars.
+     */
+    def unexpandedName(name: Name): Name = name lastIndexOf "$$" match {
+      case 0 | -1 => name
+      case idx0   =>
         // Sketchville - We've found $$ but if it's part of $$$ or $$$$
         // or something we need to keep the bonus dollars, so e.g. foo$$$outer
         // has an original name of $outer.
@@ -394,34 +412,38 @@ trait StdNames {
         name drop idx + 2
     }
 
+    @deprecated("Use unexpandedName", "2.11.0") def originalName(name: Name): Name            = unexpandedName(name)
+    @deprecated("Use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name      = name.dropModule
+    @deprecated("Use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName   = name.dropLocal
+    @deprecated("Use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName     = name.dropLocal
+    @deprecated("Use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName   = name.localName
+    @deprecated("Use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName
+    @deprecated("Use Name#getterName", "2.11.0") def getterName(name: TermName): TermName     = name.getterName
+    @deprecated("Use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName
+
+    /**
+     * Convert `Tuple2$mcII` to `Tuple2`, or `T1$sp` to `T1`.
+     */
     def unspecializedName(name: Name): Name = (
+      // DUPLICATED LOGIC WITH `splitSpecializedName`
       if (name endsWith SPECIALIZED_SUFFIX)
-      name.subName(0, name.lastIndexOf('m') - 1)
+        name.subName(0, name.lastIndexOf('m') - 1)
       else name
     )
 
-    /*
-    def anonNumberSuffix(name: Name): Name = {
-      ("" + name) lastIndexOf '$' match {
-        case -1   => nme.EMPTY
-        case idx  =>
-          val s = name drop idx
-          if (s.toString forall (_.isDigit)) s
-          else nme.EMPTY
-      }
-    }
-    */
-
     /** Return the original name and the types on which this name
     *  is specialized. For example,
     *  {{{
-    *     splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
+    *     splitSpecializedName("foo$mIcD$sp") == ('foo', "D", "I")
     *  }}}
     *  `foo$mIcD$sp` is the name of a method specialized on two type
     *  parameters, the first one belonging to the method itself, on Int,
     *  and another one belonging to the enclosing class, on Double.
+    *
+    *  @return (unspecializedName, class tparam specializations, method tparam specializations)
     */
     def splitSpecializedName(name: Name): (Name, String, String) =
+      // DUPLICATED LOGIC WITH `unspecializedName`
     if (name endsWith SPECIALIZED_SUFFIX) {
       val name1 = name dropRight SPECIALIZED_SUFFIX.length
       val idxC  = name1 lastIndexOf 'c'
@@ -433,51 +455,23 @@ trait StdNames {
     } else
     (name, "", "")
 
-    def getterName(name: TermName): TermName     = if (isLocalName(name)) localToGetter(name) else name
-    def getterToLocal(name: TermName): TermName  = name append LOCAL_SUFFIX_STRING
-    def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
-    def localToGetter(name: TermName): TermName  = name dropRight LOCAL_SUFFIX_STRING.length
-
-    def dropLocalSuffix(name: Name): Name  = if (name endsWith ' ') name dropRight 1 else name
-
-    def setterToGetter(name: TermName): TermName = {
-      val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
-      if (p < name.length)
-      setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
-      else
-      name.subName(0, name.length - SETTER_SUFFIX.length)
-    }
-
     // Nominally, name$default$N, encoded for <init>
-    def defaultGetterName(name: Name, pos: Int): TermName = {
-      val prefix = if (isConstructorName(name)) DEFAULT_GETTER_INIT_STRING else name
-      newTermName(prefix + DEFAULT_GETTER_STRING + pos)
-    }
+    def defaultGetterName(name: Name, pos: Int): TermName = (
+      if (isConstructorName(name))
+        DEFAULT_GETTER_INIT_STRING + pos
+      else
+        name + DEFAULT_GETTER_STRING + pos
+    )
     // Nominally, name from name$default$N, CONSTRUCTOR for <init>
-    def defaultGetterToMethod(name: Name): TermName = {
-      val p = name.pos(DEFAULT_GETTER_STRING)
-      if (p < name.length) {
-        val q = name.toTermName.subName(0, p)
-        // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
-        if (q.toString == DEFAULT_GETTER_INIT_STRING) CONSTRUCTOR else q
-      } else name.toTermName
-    }
-
-    // If the name ends with $nn where nn are
-    // all digits, strip the $ and the digits.
-    // Otherwise return the argument.
-    def stripAnonNumberSuffix(name: Name): Name = {
-      var pos = name.length
-      while (pos > 0 && name.charAt(pos - 1).isDigit)
-      pos -= 1
-
-      if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name
-      else name.subName(0, pos - 1)
-    }
-
-    def stripModuleSuffix(name: Name): Name = (
-      if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
+    def defaultGetterToMethod(name: Name): TermName = (
+      if (name startsWith DEFAULT_GETTER_INIT_STRING)
+        nme.CONSTRUCTOR
+      else name indexOf DEFAULT_GETTER_STRING match {
+        case -1  => name.toTermName
+        case idx => name.toTermName take idx
+      }
     )
+
     def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
     def superName(name: Name): TermName         = newTermName(SUPER_PREFIX_STRING + name)
 
@@ -489,8 +483,6 @@ trait StdNames {
 
     final val Nil: NameType                 = "Nil"
     final val Predef: NameType              = "Predef"
-    final val ScalaRunTime: NameType        = "ScalaRunTime"
-    final val Some: NameType                = "Some"
 
     val _1 : NameType  = "_1"
     val _2 : NameType  = "_2"
@@ -586,115 +578,109 @@ trait StdNames {
     val Annotation: NameType           = "Annotation"
     val Any: NameType                  = "Any"
     val AnyVal: NameType               = "AnyVal"
-    val AppliedTypeTree: NameType      = "AppliedTypeTree"
     val Apply: NameType                = "Apply"
     val ArrayAnnotArg: NameType        = "ArrayAnnotArg"
-    val Constant: NameType             = "Constant"
+    val CaseDef: NameType              = "CaseDef"
+    val ClassInfoType: NameType        = "ClassInfoType"
     val ConstantType: NameType         = "ConstantType"
     val EmptyPackage: NameType         = "EmptyPackage"
     val EmptyPackageClass: NameType    = "EmptyPackageClass"
-    val ExistentialTypeTree: NameType  = "ExistentialTypeTree"
+    val ExistentialType: NameType      = "ExistentialType"
     val Flag : NameType                = "Flag"
+    val FlagsRepr: NameType            = "FlagsRepr"
     val Ident: NameType                = "Ident"
+    val ImplicitParams: NameType       = "ImplicitParams"
     val Import: NameType               = "Import"
     val Literal: NameType              = "Literal"
     val LiteralAnnotArg: NameType      = "LiteralAnnotArg"
+    val MethodType: NameType           = "MethodType"
     val Modifiers: NameType            = "Modifiers"
     val NestedAnnotArg: NameType       = "NestedAnnotArg"
+    val New: NameType                  = "New"
     val NoFlags: NameType              = "NoFlags"
-    val NoPrefix: NameType             = "NoPrefix"
     val NoSymbol: NameType             = "NoSymbol"
+    val NoMods: NameType               = "NoMods"
     val Nothing: NameType              = "Nothing"
-    val NoType: NameType               = "NoType"
     val Null: NameType                 = "Null"
+    val NullaryMethodType: NameType    = "NullaryMethodType"
     val Object: NameType               = "Object"
+    val PolyType: NameType             = "PolyType"
+    val RefinedType: NameType          = "RefinedType"
     val RootPackage: NameType          = "RootPackage"
     val RootClass: NameType            = "RootClass"
     val Select: NameType               = "Select"
     val SelectFromTypeTree: NameType   = "SelectFromTypeTree"
-    val StringContext: NameType        = "StringContext"
+    val SingleType: NameType           = "SingleType"
+    val SuperType: NameType            = "SuperType"
     val This: NameType                 = "This"
     val ThisType: NameType             = "ThisType"
-    val Tree : NameType                = "Tree"
     val Tuple2: NameType               = "Tuple2"
     val TYPE_ : NameType               = "TYPE"
-    val TypeApply: NameType            = "TypeApply"
+    val TypeBounds: NameType           = "TypeBounds"
     val TypeRef: NameType              = "TypeRef"
     val TypeTree: NameType             = "TypeTree"
     val UNIT : NameType                = "UNIT"
+    val accessor: NameType             = "accessor"
     val add_ : NameType                = "add"
     val annotation: NameType           = "annotation"
     val anyValClass: NameType          = "anyValClass"
-    val append: NameType               = "append"
     val apply: NameType                = "apply"
     val applyDynamic: NameType         = "applyDynamic"
     val applyDynamicNamed: NameType    = "applyDynamicNamed"
     val applyOrElse: NameType          = "applyOrElse"
     val args : NameType                = "args"
-    val argv : NameType                = "argv"
     val arrayClass: NameType           = "arrayClass"
-    val arrayElementClass: NameType    = "arrayElementClass"
-    val arrayValue: NameType           = "arrayValue"
     val array_apply : NameType         = "array_apply"
     val array_clone : NameType         = "array_clone"
     val array_length : NameType        = "array_length"
     val array_update : NameType        = "array_update"
-    val arraycopy: NameType            = "arraycopy"
-    val asTerm: NameType               = "asTerm"
     val asModule: NameType             = "asModule"
-    val asMethod: NameType             = "asMethod"
     val asType: NameType               = "asType"
-    val asClass: NameType              = "asClass"
     val asInstanceOf_ : NameType       = "asInstanceOf"
     val asInstanceOf_Ob : NameType     = "$asInstanceOf"
-    val assert_ : NameType             = "assert"
-    val assume_ : NameType             = "assume"
     val box: NameType                  = "box"
-    val build : NameType               = "build"
     val bytes: NameType                = "bytes"
+    val c: NameType                    = "c"
     val canEqual_ : NameType           = "canEqual"
-    val checkInitialized: NameType     = "checkInitialized"
-    val ClassManifestFactory: NameType = "ClassManifestFactory"
     val classOf: NameType              = "classOf"
-    val clone_ : NameType              = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
-    val conforms: NameType             = "conforms"
+    val clone_ : NameType              = "clone"
+    val collection: NameType           = "collection"
+    val conforms: NameType             = "$conforms" // dollar prefix to avoid accidental shadowing
     val copy: NameType                 = "copy"
+    val create: NameType               = "create"
     val currentMirror: NameType        = "currentMirror"
-    val definitions: NameType          = "definitions"
     val delayedInit: NameType          = "delayedInit"
     val delayedInitArg: NameType       = "delayedInit$body"
+    val dollarScope: NameType          = "$scope"
     val drop: NameType                 = "drop"
     val elem: NameType                 = "elem"
-    val emptyValDef: NameType          = "emptyValDef"
+    val noSelfType: NameType           = "noSelfType"
     val ensureAccessible : NameType    = "ensureAccessible"
     val eq: NameType                   = "eq"
     val equalsNumChar : NameType       = "equalsNumChar"
     val equalsNumNum : NameType        = "equalsNumNum"
     val equalsNumObject : NameType     = "equalsNumObject"
-    val equals_ : NameType             = if (forMSIL) "Equals" else "equals"
+    val equals_ : NameType             = "equals"
     val error: NameType                = "error"
-    val eval: NameType                 = "eval"
     val ex: NameType                   = "ex"
     val experimental: NameType         = "experimental"
     val f: NameType                    = "f"
     val false_ : NameType              = "false"
     val filter: NameType               = "filter"
-    val finalize_ : NameType           = if (forMSIL) "Finalize" else "finalize"
+    val finalize_ : NameType           = "finalize"
     val find_ : NameType               = "find"
-    val flagsFromBits : NameType       = "flagsFromBits"
     val flatMap: NameType              = "flatMap"
     val foreach: NameType              = "foreach"
-    val genericArrayOps: NameType      = "genericArrayOps"
+    val freshTermName: NameType        = "freshTermName"
+    val freshTypeName: NameType        = "freshTypeName"
     val get: NameType                  = "get"
-    val getOrElse: NameType            = "getOrElse"
-    val hasNext: NameType              = "hasNext"
-    val hashCode_ : NameType           = if (forMSIL) "GetHashCode" else "hashCode"
+    val hashCode_ : NameType           = "hashCode"
     val hash_ : NameType               = "hash"
-    val head: NameType                 = "head"
-    val identity: NameType             = "identity"
+    val head : NameType                = "head"
+    val immutable: NameType            = "immutable"
     val implicitly: NameType           = "implicitly"
     val in: NameType                   = "in"
-    val info: NameType                 = "info"
+    val internal: NameType             = "internal"
     val inlinedEquals: NameType        = "inlinedEquals"
     val isArray: NameType              = "isArray"
     val isDefinedAt: NameType          = "isDefinedAt"
@@ -706,102 +692,140 @@ trait StdNames {
     val lang: NameType                 = "lang"
     val length: NameType               = "length"
     val lengthCompare: NameType        = "lengthCompare"
-    val liftedTree: NameType           = "liftedTree"
-    val `macro` : NameType             = "macro"
-    val macroThis : NameType           = "_this"
     val macroContext : NameType        = "c"
     val main: NameType                 = "main"
-    val manifest: NameType             = "manifest"
-    val ManifestFactory: NameType      = "ManifestFactory"
     val manifestToTypeTag: NameType    = "manifestToTypeTag"
     val map: NameType                  = "map"
     val materializeClassTag: NameType  = "materializeClassTag"
     val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
     val materializeTypeTag: NameType   = "materializeTypeTag"
-    val mirror : NameType              = "mirror"
     val moduleClass : NameType         = "moduleClass"
-    val name: NameType                 = "name"
+    val mkAnnotation: NameType         = "mkAnnotation"
+    val mkEarlyDef: NameType           = "mkEarlyDef"
+    val mkIdent: NameType              = "mkIdent"
+    val mkPackageStat: NameType        = "mkPackageStat"
+    val mkRefineStat: NameType         = "mkRefineStat"
+    val mkRefTree: NameType            = "mkRefTree"
+    val mkSelect: NameType             = "mkSelect"
+    val mkThis: NameType               = "mkThis"
+    val mkTypeTree: NameType           = "mkTypeTree"
     val ne: NameType                   = "ne"
     val newArray: NameType             = "newArray"
     val newFreeTerm: NameType          = "newFreeTerm"
     val newFreeType: NameType          = "newFreeType"
     val newNestedSymbol: NameType      = "newNestedSymbol"
     val newScopeWith: NameType         = "newScopeWith"
-    val next: NameType                 = "next"
-    val nmeNewTermName: NameType       = "newTermName"
-    val nmeNewTypeName: NameType       = "newTypeName"
-    val normalize: NameType            = "normalize"
     val notifyAll_ : NameType          = "notifyAll"
     val notify_ : NameType             = "notify"
     val null_ : NameType               = "null"
-    val ofDim: NameType                = "ofDim"
-    val origin: NameType               = "origin"
+    val pendingSuperCall: NameType     = "pendingSuperCall"
     val prefix : NameType              = "prefix"
     val productArity: NameType         = "productArity"
     val productElement: NameType       = "productElement"
     val productIterator: NameType      = "productIterator"
     val productPrefix: NameType        = "productPrefix"
     val readResolve: NameType          = "readResolve"
-    val reflect : NameType             = "reflect"
     val reify : NameType               = "reify"
+    val reificationSupport : NameType  = "reificationSupport"
     val rootMirror : NameType          = "rootMirror"
-    val runOrElse: NameType            = "runOrElse"
     val runtime: NameType              = "runtime"
     val runtimeClass: NameType         = "runtimeClass"
     val runtimeMirror: NameType        = "runtimeMirror"
-    val sameElements: NameType         = "sameElements"
     val scala_ : NameType              = "scala"
     val selectDynamic: NameType        = "selectDynamic"
     val selectOverloadedMethod: NameType = "selectOverloadedMethod"
     val selectTerm: NameType           = "selectTerm"
     val selectType: NameType           = "selectType"
     val self: NameType                 = "self"
-    val setAccessible: NameType        = "setAccessible"
     val setAnnotations: NameType       = "setAnnotations"
+    val setInfo: NameType              = "setInfo"
     val setSymbol: NameType            = "setSymbol"
     val setType: NameType              = "setType"
-    val setTypeSignature: NameType     = "setTypeSignature"
     val splice: NameType               = "splice"
     val staticClass : NameType         = "staticClass"
     val staticModule : NameType        = "staticModule"
     val staticPackage : NameType       = "staticPackage"
     val synchronized_ : NameType       = "synchronized"
-    val tail: NameType                 = "tail"
-    val `then` : NameType              = "then"
+    val ScalaDot: NameType             = "ScalaDot"
+    val TermName: NameType             = "TermName"
     val this_ : NameType               = "this"
     val thisPrefix : NameType          = "thisPrefix"
-    val throw_ : NameType              = "throw"
     val toArray: NameType              = "toArray"
     val toList: NameType               = "toList"
     val toObjectArray : NameType       = "toObjectArray"
-    val toSeq: NameType                = "toSeq"
-    val toString_ : NameType           = if (forMSIL) "ToString" else "toString"
+    val toStats: NameType              = "toStats"
+    val TopScope: NameType             = "TopScope"
+    val toString_ : NameType           = "toString"
     val toTypeConstructor: NameType    = "toTypeConstructor"
     val tpe : NameType                 = "tpe"
     val tree : NameType                = "tree"
     val true_ : NameType               = "true"
     val typedProductIterator: NameType = "typedProductIterator"
+    val TypeName: NameType             = "TypeName"
     val typeTagToManifest: NameType    = "typeTagToManifest"
     val unapply: NameType              = "unapply"
     val unapplySeq: NameType           = "unapplySeq"
     val unbox: NameType                = "unbox"
     val universe: NameType             = "universe"
+    val UnliftListElementwise: NameType = "UnliftListElementwise"
+    val UnliftListOfListsElementwise: NameType = "UnliftListOfListsElementwise"
     val update: NameType               = "update"
     val updateDynamic: NameType        = "updateDynamic"
     val value: NameType                = "value"
     val valueOf : NameType             = "valueOf"
     val values : NameType              = "values"
-    val view_ : NameType               = "view"
     val wait_ : NameType               = "wait"
     val withFilter: NameType           = "withFilter"
-    val wrap: NameType                 = "wrap"
-    val zip: NameType                  = "zip"
-
-    val synthSwitch: NameType          = "$synthSwitch"
+    val zero: NameType                 = "zero"
+
+    // quasiquote interpolators:
+    val q: NameType  = "q"
+    val tq: NameType = "tq"
+    val cq: NameType = "cq"
+    val pq: NameType = "pq"
+    val fq: NameType = "fq"
+
+    // quasiquote's syntactic combinators
+    val SyntacticAnnotatedType: NameType    = "SyntacticAnnotatedType"
+    val SyntacticApplied: NameType          = "SyntacticApplied"
+    val SyntacticAppliedType: NameType      = "SyntacticAppliedType"
+    val SyntacticAssign: NameType           = "SyntacticAssign"
+    val SyntacticBlock: NameType            = "SyntacticBlock"
+    val SyntacticClassDef: NameType         = "SyntacticClassDef"
+    val SyntacticCompoundType: NameType     = "SyntacticCompoundType"
+    val SyntacticDefDef: NameType           = "SyntacticDefDef"
+    val SyntacticEmptyTypeTree: NameType    = "SyntacticEmptyTypeTree"
+    val SyntacticExistentialType: NameType  = "SyntacticExistentialType"
+    val SyntacticFilter: NameType           = "SyntacticFilter"
+    val SyntacticFor: NameType              = "SyntacticFor"
+    val SyntacticForYield: NameType         = "SyntacticForYield"
+    val SyntacticFunction: NameType         = "SyntacticFunction"
+    val SyntacticFunctionType: NameType     = "SyntacticFunctionType"
+    val SyntacticImport: NameType           = "SyntacticImport"
+    val SyntacticMatch: NameType            = "SyntacticMatch"
+    val SyntacticNew: NameType              = "SyntacticNew"
+    val SyntacticObjectDef: NameType        = "SyntacticObjectDef"
+    val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef"
+    val SyntacticPartialFunction: NameType  = "SyntacticPartialFunction"
+    val SyntacticPatDef: NameType           = "SyntacticPatDef"
+    val SyntacticSelectTerm: NameType       = "SyntacticSelectTerm"
+    val SyntacticSelectType: NameType       = "SyntacticSelectType"
+    val SyntacticSingletonType: NameType    = "SyntacticSingletonType"
+    val SyntacticTermIdent: NameType        = "SyntacticTermIdent"
+    val SyntacticTraitDef: NameType         = "SyntacticTraitDef"
+    val SyntacticTry: NameType              = "SyntacticTry"
+    val SyntacticTuple: NameType            = "SyntacticTuple"
+    val SyntacticTupleType: NameType        = "SyntacticTupleType"
+    val SyntacticTypeApplied: NameType      = "SyntacticTypeApplied"
+    val SyntacticTypeIdent: NameType        = "SyntacticTypeIdent"
+    val SyntacticTypeProjection: NameType   = "SyntacticTypeProjection"
+    val SyntacticValDef: NameType           = "SyntacticValDef"
+    val SyntacticValEq: NameType            = "SyntacticValEq"
+    val SyntacticValFrom: NameType          = "SyntacticValFrom"
+    val SyntacticVarDef: NameType           = "SyntacticVarDef"
 
     // unencoded operators
     object raw {
-      final val AMP  : NameType  = "&"
       final val BANG : NameType  = "!"
       final val BAR  : NameType  = "|"
       final val DOLLAR: NameType = "$"
@@ -810,7 +834,6 @@ trait StdNames {
       final val MINUS: NameType  = "-"
       final val NE: NameType     = "!="
       final val PLUS : NameType  = "+"
-      final val SLASH: NameType  = "/"
       final val STAR : NameType  = "*"
       final val TILDE: NameType  = "~"
 
@@ -834,29 +857,33 @@ trait StdNames {
     def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
 
     // ASCII names for operators
-    val ADD      = encode("+")
-    val AND      = encode("&")
-    val ASR      = encode(">>")
-    val DIV      = encode("/")
-    val EQ       = encode("==")
-    val EQL      = encode("=")
-    val GE       = encode(">=")
-    val GT       = encode(">")
-    val HASHHASH = encode("##")
-    val LE       = encode("<=")
-    val LSL      = encode("<<")
-    val LSR      = encode(">>>")
-    val LT       = encode("<")
-    val MINUS    = encode("-")
-    val MOD      = encode("%")
-    val MUL      = encode("*")
-    val NE       = encode("!=")
-    val OR       = encode("|")
-    val PLUS     = ADD    // technically redundant, but ADD looks funny with MINUS
-    val SUB      = MINUS  // ... as does SUB with PLUS
-    val XOR      = encode("^")
-    val ZAND     = encode("&&")
-    val ZOR      = encode("||")
+    val ADD       = encode("+")
+    val AND       = encode("&")
+    val ASR       = encode(">>")
+    val CONS      = encode("::")
+    val COLONPLUS = encode(":+")
+    val DIV       = encode("/")
+    val EQ        = encode("==")
+    val EQL       = encode("=")
+    val GE        = encode(">=")
+    val GT        = encode(">")
+    val HASHHASH  = encode("##")
+    val LE        = encode("<=")
+    val LSL       = encode("<<")
+    val LSR       = encode(">>>")
+    val LT        = encode("<")
+    val MINUS     = encode("-")
+    val MINGT     = encode("->")
+    val MOD       = encode("%")
+    val MUL       = encode("*")
+    val NE        = encode("!=")
+    val OR        = encode("|")
+    val PLUS      = ADD    // technically redundant, but ADD looks funny with MINUS
+    val PLUSPLUS  = encode("++")
+    val SUB       = MINUS  // ... as does SUB with PLUS
+    val XOR       = encode("^")
+    val ZAND      = encode("&&")
+    val ZOR       = encode("||")
 
     // unary operators
     val UNARY_~ = encode("unary_~")
@@ -866,14 +893,7 @@ trait StdNames {
 
     // Grouped here so Cleanup knows what tests to perform.
     val CommonOpNames   = Set[Name](OR, XOR, AND, EQ, NE)
-    val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
     val BooleanOpNames  = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
-    val NumberOpNames   = (
-         Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
-      ++ Set(UNARY_+, UNARY_-, UNARY_!)
-      ++ ConversionNames
-      ++ CommonOpNames
-    )
 
     val add: NameType                    = "add"
     val complement: NameType             = "complement"
@@ -998,6 +1018,8 @@ trait StdNames {
     val BITMAP_CHECKINIT_TRANSIENT: NameType = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values
   }
 
+  lazy val typeNames: tpnme.type = tpnme
+
   object tpnme extends TypeNames { }
 
   /** For fully qualified type names.
@@ -1005,7 +1027,6 @@ trait StdNames {
   object fulltpnme extends TypeNames {
     val RuntimeNothing: NameType = "scala.runtime.Nothing$"
     val RuntimeNull: NameType    = "scala.runtime.Null$"
-    val JavaLangEnum: NameType   = "java.lang.Enum"
   }
 
   /** Java binary names, like scala/runtime/Nothing$.
@@ -1019,17 +1040,14 @@ trait StdNames {
 
   val javanme = nme.javaKeywords
 
-  object nme extends TermNames {
-
-    def isModuleVarName(name: Name): Boolean =
-      stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX
+  lazy val termNames: nme.type = nme
 
+  object nme extends TermNames {
     def moduleVarName(name: TermName): TermName =
       newTermNameCached("" + name + MODULE_VAR_SUFFIX)
 
     def getCause         = sn.GetCause
     def getClass_        = sn.GetClass
-    def getComponentType = sn.GetComponentType
     def getMethod_       = sn.GetMethod
     def invoke_          = sn.Invoke
 
@@ -1037,58 +1055,9 @@ trait StdNames {
     val isBoxedNumber: NameType = "isBoxedNumber"
 
     val reflPolyCacheName: NameType   = "reflPoly$Cache"
-    val reflClassCacheName: NameType  = "reflClass$Cache"
     val reflParamsCacheName: NameType = "reflParams$Cache"
-    val reflMethodCacheName: NameType = "reflMethod$Cache"
     val reflMethodName: NameType      = "reflMethod$Method"
 
-    private val reflectionCacheNames = Set[NameType](
-      reflPolyCacheName,
-      reflClassCacheName,
-      reflParamsCacheName,
-      reflMethodCacheName,
-      reflMethodName
-    )
-    def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
-
-    @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name)
-    @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName     = tpnme.singletonName(name)
-    @deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName     = tpnme.implClassName(name)
-    @deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname)
-  }
-
-  abstract class SymbolNames {
-    protected val stringToTermName = null
-    protected val stringToTypeName = null
-    protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
-
-    val BeanProperty        : TypeName
-    val BooleanBeanProperty : TypeName
-    val BoxedBoolean        : TypeName
-    val BoxedCharacter      : TypeName
-    val BoxedNumber         : TypeName
-    val Class               : TypeName
-    val Delegate            : TypeName
-    val IOOBException       : TypeName // IndexOutOfBoundsException
-    val InvTargetException  : TypeName // InvocationTargetException
-    val JavaSerializable    : TypeName
-    val MethodAsObject      : TypeName
-    val NPException         : TypeName // NullPointerException
-    val Object              : TypeName
-    val String              : TypeName
-    val Throwable           : TypeName
-    val ValueType           : TypeName
-
-    val ForName             : TermName
-    val GetCause            : TermName
-    val GetClass            : TermName
-    val GetClassLoader      : TermName
-    val GetComponentType    : TermName
-    val GetMethod           : TermName
-    val Invoke              : TermName
-    val JavaLang            : TermName
-
-    val Boxed: immutable.Map[TypeName, TypeName]
   }
 
   class JavaKeywords {
@@ -1148,7 +1117,11 @@ trait StdNames {
     final val keywords = kw.result
   }
 
-  private abstract class JavaNames extends SymbolNames {
+  sealed abstract class SymbolNames {
+    protected val stringToTermName = null
+    protected val stringToTypeName = null
+    protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
+
     final val BoxedBoolean: TypeName       = "java.lang.Boolean"
     final val BoxedByte: TypeName          = "java.lang.Byte"
     final val BoxedCharacter: TypeName     = "java.lang.Character"
@@ -1158,25 +1131,18 @@ trait StdNames {
     final val BoxedLong: TypeName          = "java.lang.Long"
     final val BoxedNumber: TypeName        = "java.lang.Number"
     final val BoxedShort: TypeName         = "java.lang.Short"
-    final val Class: TypeName              = "java.lang.Class"
-    final val Delegate: TypeName           = tpnme.NO_NAME
     final val IOOBException: TypeName      = "java.lang.IndexOutOfBoundsException"
     final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException"
     final val MethodAsObject: TypeName     = "java.lang.reflect.Method"
     final val NPException: TypeName        = "java.lang.NullPointerException"
     final val Object: TypeName             = "java.lang.Object"
-    final val String: TypeName             = "java.lang.String"
     final val Throwable: TypeName          = "java.lang.Throwable"
-    final val ValueType: TypeName          = tpnme.NO_NAME
 
-    final val ForName: TermName          = newTermName("forName")
     final val GetCause: TermName         = newTermName("getCause")
     final val GetClass: TermName         = newTermName("getClass")
     final val GetClassLoader: TermName   = newTermName("getClassLoader")
-    final val GetComponentType: TermName = newTermName("getComponentType")
     final val GetMethod: TermName        = newTermName("getMethod")
     final val Invoke: TermName           = newTermName("invoke")
-    final val JavaLang: TermName         = newTermName("java.lang")
 
     val Boxed = immutable.Map[TypeName, TypeName](
       tpnme.Boolean -> BoxedBoolean,
@@ -1190,52 +1156,5 @@ trait StdNames {
     )
   }
 
-  private class MSILNames extends SymbolNames {
-    final val BeanProperty: TypeName        = tpnme.NO_NAME
-    final val BooleanBeanProperty: TypeName = tpnme.NO_NAME
-    final val BoxedBoolean: TypeName        = "System.IConvertible"
-    final val BoxedCharacter: TypeName      = "System.IConvertible"
-    final val BoxedNumber: TypeName         = "System.IConvertible"
-    final val Class: TypeName               = "System.Type"
-    final val Delegate: TypeName            = "System.MulticastDelegate"
-    final val IOOBException: TypeName       = "System.IndexOutOfRangeException"
-    final val InvTargetException: TypeName  = "System.Reflection.TargetInvocationException"
-    final val JavaSerializable: TypeName    = tpnme.NO_NAME
-    final val MethodAsObject: TypeName      = "System.Reflection.MethodInfo"
-    final val NPException: TypeName         = "System.NullReferenceException"
-    final val Object: TypeName              = "System.Object"
-    final val String: TypeName              = "System.String"
-    final val Throwable: TypeName           = "System.Exception"
-    final val ValueType: TypeName           = "System.ValueType"
-
-    final val ForName: TermName          = newTermName("GetType")
-    final val GetCause: TermName         = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
-    final val GetClass: TermName         = newTermName("GetType")
-    final lazy val GetClassLoader: TermName   = throw new UnsupportedOperationException("Scala reflection is not supported on this platform");
-    final val GetComponentType: TermName = newTermName("GetElementType")
-    final val GetMethod: TermName        = newTermName("GetMethod")
-    final val Invoke: TermName           = newTermName("Invoke")
-    final val JavaLang: TermName         = newTermName("System")
-
-    val Boxed = immutable.Map[TypeName, TypeName](
-      tpnme.Boolean -> "System.Boolean",
-      tpnme.Byte    -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte)
-      tpnme.Char    -> "System.Char",
-      tpnme.Short   -> "System.Int16",
-      tpnme.Int     -> "System.Int32",
-      tpnme.Long    -> "System.Int64",
-      tpnme.Float   -> "System.Single",
-      tpnme.Double  -> "System.Double"
-    )
-  }
-
-  private class J2SENames extends JavaNames {
-    final val BeanProperty: TypeName        = "scala.beans.BeanProperty"
-    final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
-    final val JavaSerializable: TypeName    = "java.io.Serializable"
-  }
-
-  lazy val sn: SymbolNames =
-    if (forMSIL) new MSILNames
-    else new J2SENames
+  lazy val sn: SymbolNames = new SymbolNames { }
 }
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
new file mode 100644
index 0000000..c088e8f
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -0,0 +1,302 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package reflect
+package internal
+
+import scala.collection.mutable
+import Flags._
+import util.HashSet
+import scala.annotation.tailrec
+
+/** An abstraction for considering symbol pairs.
+ *  One of the greatest sources of compiler bugs is that symbols can
+ *  trivially lose their prefixes and turn into some completely different
+ *  type with the smallest of errors. It is the exception not the rule
+ *  that type comparisons are done correctly.
+ *
+ *  This offers a small step toward coherence with two abstractions
+ *  which come up over and over again:
+ *
+ *    RelativeTo: operations relative to a prefix
+ *    SymbolPair: two symbols being related somehow, plus the class
+ *       in which the relation is being performed
+ *
+ *  This is only a start, but it is a start.
+ */
+abstract class SymbolPairs {
+  val global: SymbolTable
+  import global._
+
+  /** Type operations relative to a prefix.  All operations work on Symbols,
+   *  and the types are the member types of those symbols in the prefix.
+   */
+  class RelativeTo(val prefix: Type) {
+    def this(clazz: Symbol) = this(clazz.thisType)
+    import scala.language.implicitConversions // geez, it even has to hassle me when it's private
+    private implicit def symbolToType(sym: Symbol): Type = prefix memberType sym
+
+    def erasureOf(sym: Symbol): Type         = erasure.erasure(sym)(sym: Type)
+    def signature(sym: Symbol): String       = sym defStringSeenAs (sym: Type)
+    def erasedSignature(sym: Symbol): String = sym defStringSeenAs erasureOf(sym)
+
+    def isSameType(sym1: Symbol, sym2: Symbol): Boolean    = sym1 =:= sym2
+    def isSubType(sym1: Symbol, sym2: Symbol): Boolean     = sym1 <:< sym2
+    def isSuperType(sym1: Symbol, sym2: Symbol): Boolean   = sym2 <:< sym1
+    def isSameErasure(sym1: Symbol, sym2: Symbol): Boolean = erasureOf(sym1) =:= erasureOf(sym2)
+    def matches(sym1: Symbol, sym2: Symbol): Boolean       = (sym1: Type) matches (sym2: Type)
+
+    override def toString = s"RelativeTo($prefix)"
+  }
+
+  /** Are types tp1 and tp2 equivalent seen from the perspective
+   *  of `baseClass`? For instance List[Int] and Seq[Int] are =:=
+   *  when viewed from IterableClass.
+   */
+  def sameInBaseClass(baseClass: Symbol)(tp1: Type, tp2: Type) =
+    (tp1 baseType baseClass) =:= (tp2 baseType baseClass)
+
+  case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) {
+    def pos                 = if (low.owner == base) low.pos else if (high.owner == base) high.pos else base.pos
+    def self: Type          = base.thisType
+    def rootType: Type      = base.thisType
+
+    def lowType: Type       = self memberType low
+    def lowErased: Type     = erasure.specialErasure(base)(low.tpe)
+    def lowClassBound: Type = classBoundAsSeen(low.tpe.typeSymbol)
+
+    def highType: Type       = self memberType high
+    def highInfo: Type       = self memberInfo high
+    def highErased: Type     = erasure.specialErasure(base)(high.tpe)
+    def highClassBound: Type = classBoundAsSeen(high.tpe.typeSymbol)
+
+    def isErroneous = low.tpe.isErroneous || high.tpe.isErroneous
+    def sameKind    = sameLength(low.typeParams, high.typeParams)
+
+    private def classBoundAsSeen(tsym: Symbol) =
+      tsym.classBound.asSeenFrom(rootType, tsym.owner)
+
+    private def memberDefString(sym: Symbol, where: Boolean) = {
+      val def_s = (
+        if (sym.isConstructor) s"$sym: ${self memberType sym}"
+        else sym defStringSeenAs (self memberType sym)
+      )
+      def_s + whereString(sym)
+    }
+    /** A string like ' at line 55' if the symbol is defined in the class
+     *  under consideration, or ' in trait Foo' if defined elsewhere.
+     */
+    private def whereString(sym: Symbol) =
+      if (sym.owner == base) " at line " + sym.pos.line else sym.locationString
+
+    def lowString  = memberDefString(low, where = true)
+    def highString = memberDefString(high, where = true)
+
+    override def toString = sm"""
+      |Cursor(in $base) {
+      |   high  $highString
+      | erased  $highErased
+      |  infos  ${high.infosString}
+      |    low  $lowString
+      | erased  $lowErased
+      |  infos  ${low.infosString}
+      |}""".trim
+  }
+
+  /** The cursor class
+   *  @param base   the base class containing the participating symbols
+   */
+  abstract class Cursor(val base: Symbol) {
+    cursor =>
+
+      final val self  = base.thisType   // The type relative to which symbols are seen.
+    private val decls = newScope        // all the symbols which can take part in a pair.
+    private val size  = bases.length
+
+    /** A symbol for which exclude returns true will not appear as
+     *  either end of a pair.
+     */
+    protected def exclude(sym: Symbol): Boolean
+
+    /** Does `sym1` match `sym2` such that (sym1, sym2) should be
+     *  considered as a (lo, high) pair? Types always match. Term symbols
+     *  match if their member types relative to `self` match.
+     */
+    protected def matches(lo: Symbol, high: Symbol): Boolean
+
+    /** The parents and base classes of `base`.  Can be refined in subclasses.
+     */
+    protected def parents: List[Type] = base.info.parents
+    protected def bases: List[Symbol] = base.info.baseClasses
+
+    /** An implementation of BitSets as arrays (maybe consider collection.BitSet
+     *  for that?) The main purpose of this is to implement
+     *  intersectionContainsElement efficiently.
+     */
+    private type BitSet = Array[Int]
+
+    /** A mapping from all base class indices to a bitset
+     *  which indicates whether parents are subclasses.
+     *
+     *   i \in subParents(j)   iff
+     *   exists p \in parents, b \in baseClasses:
+     *     i = index(p)
+     *     j = index(b)
+     *     p isSubClass b
+     *     p.baseType(b) == self.baseType(b)
+     */
+    private val subParents = new Array[BitSet](size)
+
+    /** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
+     *  linearization order. Symbols that are not baseclasses map to -1.
+     */
+    private val index = new mutable.HashMap[Symbol, Int] { override def default(key: Symbol) = -1 }
+
+    /** The scope entries that have already been visited as highSymbol
+     *  (but may have been excluded via hasCommonParentAsSubclass.)
+     *  These will not appear as lowSymbol.
+     */
+    private val visited = HashSet[ScopeEntry]("visited", 64)
+
+    /** Initialization has to run now so decls is populated before
+     *  the declaration of curEntry.
+     */
+    init()
+
+    // The current low and high symbols; the high may be null.
+    private[this] var lowSymbol: Symbol  = _
+    private[this] var highSymbol: Symbol = _
+
+    // The current entry candidates for low and high symbol.
+    private[this] var curEntry  = decls.elems
+    private[this] var nextEntry = curEntry
+
+    // These fields are initially populated with a call to next().
+    next()
+
+    // populate the above data structures
+    private def init() {
+      // Fill `decls` with lower symbols shadowing higher ones
+      def fillDecls(bcs: List[Symbol], deferred: Boolean) {
+        if (!bcs.isEmpty) {
+          fillDecls(bcs.tail, deferred)
+          var e = bcs.head.info.decls.elems
+          while (e ne null) {
+            if (e.sym.initialize.isDeferred == deferred && !exclude(e.sym))
+              decls enter e.sym
+            e = e.next
+          }
+        }
+      }
+      var i = 0
+      for (bc <- bases) {
+        index(bc) = i
+        subParents(i) = new BitSet(size)
+        i += 1
+      }
+      for (p <- parents) {
+        val pIndex = index(p.typeSymbol)
+        if (pIndex >= 0)
+          for (bc <- p.baseClasses ; if sameInBaseClass(bc)(p, self)) {
+            val bcIndex = index(bc)
+            if (bcIndex >= 0)
+              include(subParents(bcIndex), pIndex)
+          }
+      }
+      // first, deferred (this will need to change if we change lookup rules!)
+      fillDecls(bases, deferred = true)
+      // then, concrete.
+      fillDecls(bases, deferred = false)
+    }
+
+    private def include(bs: BitSet, n: Int) {
+      val nshifted = n >> 5
+      val nmask    = 1 << (n & 31)
+      bs(nshifted) |= nmask
+    }
+
+    /** Implements `bs1 * bs2 * {0..n} != 0.
+     *  Used in hasCommonParentAsSubclass */
+    private def intersectionContainsElementLeq(bs1: BitSet, bs2: BitSet, n: Int): Boolean = {
+      val nshifted = n >> 5
+      val nmask = 1 << (n & 31)
+      var i = 0
+      while (i < nshifted) {
+        if ((bs1(i) & bs2(i)) != 0) return true
+        i += 1
+      }
+      (bs1(nshifted) & bs2(nshifted) & (nmask | nmask - 1)) != 0
+    }
+
+    /** Do `sym1` and `sym2` have a common subclass in `parents`?
+     *  In that case we do not follow their pairs.
+     */
+    private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
+      val index1 = index(sym1.owner)
+      (index1 >= 0) && {
+        val index2 = index(sym2.owner)
+        (index2 >= 0) && {
+          intersectionContainsElementLeq(
+            subParents(index1), subParents(index2), index1 min index2)
+        }
+      }
+    }
+
+    @tailrec private def advanceNextEntry() {
+      if (nextEntry ne null) {
+        nextEntry = decls lookupNextEntry nextEntry
+        if (nextEntry ne null) {
+          val high    = nextEntry.sym
+          val isMatch = matches(lowSymbol, high) && { visited addEntry nextEntry ; true } // side-effect visited on all matches
+
+          // skip nextEntry if a class in `parents` is a subclass of the
+          // owners of both low and high.
+          if (isMatch && !hasCommonParentAsSubclass(lowSymbol, high))
+            highSymbol = high
+          else
+            advanceNextEntry()
+        }
+      }
+    }
+    @tailrec private def advanceCurEntry() {
+      if (curEntry ne null) {
+        curEntry = curEntry.next
+        if (curEntry ne null) {
+          if (visited(curEntry) || exclude(curEntry.sym))
+            advanceCurEntry()
+          else
+            nextEntry = curEntry
+        }
+      }
+    }
+
+    /** The `low` and `high` symbol.  In the context of overriding pairs,
+     *  low == overriding and high == overridden.
+     */
+    def low  = lowSymbol
+    def high = highSymbol
+
+    def hasNext     = curEntry ne null
+    def currentPair = new SymbolPair(base, low, high)
+    def iterator    = new Iterator[SymbolPair] {
+      def hasNext = cursor.hasNext
+      def next()  = try cursor.currentPair finally cursor.next()
+    }
+
+    // Note that next is called once during object initialization to
+    // populate the fields tracking the current symbol pair.
+    def next() {
+      if (curEntry ne null) {
+        lowSymbol = curEntry.sym
+        advanceNextEntry()        // sets highSymbol
+        if (nextEntry eq null) {
+          advanceCurEntry()
+          next()
+        }
+      }
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 6ca8900..c76dedb 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -3,18 +3,22 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.annotation.elidable
 import scala.collection.{ mutable, immutable }
 import util._
+import java.util.concurrent.TimeUnit
+import scala.reflect.internal.{TreeGen => InternalTreeGen}
 
 abstract class SymbolTable extends macros.Universe
                               with Collections
                               with Names
                               with Symbols
                               with Types
+                              with Variances
                               with Kinds
                               with ExistentialsAndSkolems
                               with FlagSets
@@ -37,36 +41,53 @@ abstract class SymbolTable extends macros.Universe
                               with CapturedVariables
                               with StdAttachments
                               with StdCreators
-                              with BuildUtils
+                              with ReificationSupport
+                              with PrivateWithin
+                              with pickling.Translations
+                              with FreshNames
+                              with Internals
 {
 
-  val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
-  lazy val treeBuild = gen
+  val gen = new InternalTreeGen { val global: SymbolTable.this.type = SymbolTable.this }
 
   def log(msg: => AnyRef): Unit
+  def deprecationWarning(pos: Position, msg: String): Unit = warning(msg)
   def warning(msg: String): Unit     = Console.err.println(msg)
+  def inform(msg: String): Unit      = Console.err.println(msg)
   def globalError(msg: String): Unit = abort(msg)
   def abort(msg: String): Nothing    = throw new FatalError(supplementErrorMessage(msg))
 
+  protected def elapsedMessage(msg: String, start: Long) =
+    msg + " in " + (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - start) + "ms"
+
+  def informProgress(msg: String)          = if (settings.verbose) inform("[" + msg + "]")
+  def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
+
   def shouldLogAtThisPhase = false
+  def isPastTyper = false
+  protected def isDeveloper: Boolean = settings.debug
 
-  @deprecated("Give us a reason", "2.10.0")
-  def abort(): Nothing = abort("unknown error")
+  @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0")
+  def debugwarn(msg: => String): Unit = devWarning(msg)
 
   /** Override with final implementation for inlining. */
-  def debuglog(msg:  => String): Unit = if (settings.debug.value) log(msg)
-  def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+  def debuglog(msg:  => String): Unit = if (settings.debug) log(msg)
+  def devWarning(msg: => String): Unit = if (isDeveloper) Console.err.println(msg)
   def throwableAsString(t: Throwable): String = "" + t
+  def throwableAsString(t: Throwable, maxFrames: Int): String = t.getStackTrace take maxFrames mkString "\n  at "
+
+  @inline final def devWarningDumpStack(msg: => String, maxFrames: Int): Unit =
+    devWarning(msg + "\n" + throwableAsString(new Throwable, maxFrames))
 
   /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
-  def debugStack(t: Throwable): Unit  = debugwarn(throwableAsString(t))
+  def debugStack(t: Throwable): Unit  = devWarning(throwableAsString(t))
 
   /** Overridden when we know more about what was happening during a failure. */
   def supplementErrorMessage(msg: String): String = msg
 
   private[scala] def printCaller[T](msg: String)(result: T) = {
     Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
-      (new Throwable).getStackTrace.drop(2).take(15).mkString("\n")))
+      (new Throwable).getStackTrace.drop(2).take(50).mkString("\n")))
 
     result
   }
@@ -81,12 +102,33 @@ abstract class SymbolTable extends macros.Universe
     result
   }
   @inline
+  final private[scala] def debuglogResult[T](msg: => String)(result: T): T = {
+    debuglog(msg + ": " + result)
+    result
+  }
+  @inline
+  final private[scala] def devWarningResult[T](msg: => String)(result: T): T = {
+    devWarning(msg + ": " + result)
+    result
+  }
+  @inline
   final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
     if (cond(result))
       log(msg + ": " + result)
 
     result
   }
+  @inline
+  final private[scala] def debuglogResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
+    if (cond(result))
+      debuglog(msg + ": " + result)
+
+    result
+  }
+
+  @inline final def findSymbol(xs: TraversableOnce[Symbol])(p: Symbol => Boolean): Symbol = {
+    xs find p getOrElse NoSymbol
+  }
 
   // For too long have we suffered in order to sort NAMES.
   // I'm pretty sure there's a reasonable default for that.
@@ -108,17 +150,13 @@ abstract class SymbolTable extends macros.Universe
     val global: SymbolTable.this.type = SymbolTable.this
   } with util.TraceSymbolActivity
 
+  val treeInfo: TreeInfo { val global: SymbolTable.this.type }
+
   /** Check that the executing thread is the compiler thread. No-op here,
    *  overridden in interactive.Global. */
   @elidable(elidable.WARNING)
   def assertCorrectThread() {}
 
-  /** Are we compiling for Java SE? */
-  // def forJVM: Boolean
-
-  /** Are we compiling for .NET? */
-  def forMSIL: Boolean = false
-
   /** A last effort if symbol in a select <owner>.<name> is not found.
    *  This is overridden by the reflection compiler to make up a package
    *  when it makes sense (i.e. <owner> is a package and <name> is a term name).
@@ -139,7 +177,7 @@ abstract class SymbolTable extends macros.Universe
   type RunId = Int
   final val NoRunId = 0
 
-  // sigh, this has to be public or atPhase doesn't inline.
+  // sigh, this has to be public or enteringPhase doesn't inline.
   var phStack: List[Phase] = Nil
   private[this] var ph: Phase = NoPhase
   private[this] var per = NoPeriod
@@ -182,9 +220,6 @@ abstract class SymbolTable extends macros.Universe
   /** The phase identifier of the given period. */
   final def phaseId(period: Period): Phase#Id = period & 0xFF
 
-  /** The period at the start of run that includes `period`. */
-  final def startRun(period: Period): Period = period & 0xFFFFFF00
-
   /** The current period. */
   final def currentPeriod: Period = {
     //assert(per == (currentRunId << 8) + phase.id)
@@ -202,23 +237,37 @@ abstract class SymbolTable extends macros.Universe
     p != NoPhase && phase.id > p.id
 
   /** Perform given operation at given phase. */
-  @inline final def atPhase[T](ph: Phase)(op: => T): T = {
+  @inline final def enteringPhase[T](ph: Phase)(op: => T): T = {
     val saved = pushPhase(ph)
     try op
     finally popPhase(saved)
   }
 
+  final def findPhaseWithName(phaseName: String): Phase = {
+    var ph = phase
+    while (ph != NoPhase && ph.name != phaseName) {
+      ph = ph.prev
+    }
+    if (ph eq NoPhase) phase else ph
+  }
+  final def enteringPhaseWithName[T](phaseName: String)(body: => T): T = {
+    val phase = findPhaseWithName(phaseName)
+    enteringPhase(phase)(body)
+  }
 
-  /** Since when it is to be "at" a phase is inherently ambiguous,
-   *  a couple unambiguously named methods.
-   */
-  @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
-  @inline final def afterPhase[T](ph: Phase)(op: => T): T  = atPhase(ph.next)(op)
-  @inline final def afterCurrentPhase[T](op: => T): T      = atPhase(phase.next)(op)
-  @inline final def beforePrevPhase[T](op: => T): T        = atPhase(phase.prev)(op)
+  def slowButSafeEnteringPhase[T](ph: Phase)(op: => T): T = {
+    if (isCompilerUniverse) enteringPhase(ph)(op)
+    else op
+  }
+
+  @inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op)
+  @inline final def enteringPrevPhase[T](op: => T): T       = enteringPhase(phase.prev)(op)
 
-  @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
-    if (isAtPhaseAfter(target)) atPhase(target)(op) else op
+  @inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+    if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op
+
+  def slowButSafeEnteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+    if (isCompilerUniverse) enteringPhaseNotLaterThan(target)(op) else op
 
   final def isValid(period: Period): Boolean =
     period != 0 && runId(period) == currentRunId && {
@@ -231,7 +280,7 @@ abstract class SymbolTable extends macros.Universe
     def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
       it.pid >= limit ||
       !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
-    );
+    )
     period != 0 && runId(period) == currentRunId && {
       val pid = phaseId(period)
       if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
@@ -302,27 +351,45 @@ abstract class SymbolTable extends macros.Universe
   }
 
   object perRunCaches {
-    import scala.runtime.ScalaRunTime.stringOf
     import scala.collection.generic.Clearable
 
     // Weak references so the garbage collector will take care of
     // letting us know when a cache is really out of commission.
-    private val caches = WeakHashSet[Clearable]()
+    import java.lang.ref.WeakReference
+    private var caches = List[WeakReference[Clearable]]()
 
     def recordCache[T <: Clearable](cache: T): T = {
-      caches += cache
+      caches ::= new WeakReference(cache)
       cache
     }
 
     def clearAll() = {
       debuglog("Clearing " + caches.size + " caches.")
-      caches foreach (_.clear)
+      caches foreach (ref => Option(ref.get).foreach(_.clear))
+      caches = caches.filterNot(_.get == null)
     }
 
     def newWeakMap[K, V]()        = recordCache(mutable.WeakHashMap[K, V]())
     def newMap[K, V]()            = recordCache(mutable.HashMap[K, V]())
     def newSet[K]()               = recordCache(mutable.HashSet[K]())
     def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]())
+
+    def newAnyRefMap[K <: AnyRef, V]() = recordCache(mutable.AnyRefMap[K, V]())
+    def newGeneric[T](f: => T): () => T = {
+      val NoCached: T = null.asInstanceOf[T]
+      var cached: T = NoCached
+      var cachedRunId = NoRunId
+      recordCache(new Clearable {
+        def clear(): Unit = cached = NoCached
+      })
+      () => {
+        if (currentRunId != cachedRunId || cached == NoCached) {
+          cached = f
+          cachedRunId = currentRunId
+        }
+        cached
+      }
+    }
   }
 
   /** The set of all installed infotransformers. */
@@ -339,26 +406,14 @@ abstract class SymbolTable extends macros.Universe
    */
   def isCompilerUniverse = false
 
+  @deprecated("Use enteringPhase", "2.10.0") // Used in SBT 0.12.4
+  @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op)
+
+
   /**
    * Adds the `sm` String interpolator to a [[scala.StringContext]].
    */
   implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
-
-  def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
-    import ClassfileConstants._
-    if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
-      // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
-      // apparently occurs when processing v45.3 bytecode.
-      if (sym.enclosingTopLevelClass != NoSymbol)
-        sym.privateWithin = sym.enclosingTopLevelClass.owner
-
-    // protected in java means package protected. #3946
-    if ((jflags & JAVA_ACC_PROTECTED) != 0)
-      if (sym.enclosingTopLevelClass != NoSymbol)
-        sym.privateWithin = sym.enclosingTopLevelClass.owner
-
-    sym
-  }
 }
 
 object SymbolTableStats {
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 579f768..2ce54d2 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3,15 +3,17 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.{ mutable, immutable }
 import scala.collection.mutable.ListBuffer
-import util.Statistics
+import util.{ Statistics, shortClassOfInstance }
 import Flags._
 import scala.annotation.tailrec
-import scala.reflect.io.AbstractFile
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import Variance._
 
 trait Symbols extends api.Symbols { self: SymbolTable =>
   import definitions._
@@ -19,8 +21,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
   protected var ids = 0
 
-  val emptySymbolArray = new Array[Symbol](0)
-
   protected def nextId() = { ids += 1; ids }
 
   /** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -30,13 +30,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
   //protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
 
   /** Used to keep track of the recursion depth on locked symbols */
-  private var recursionTable = immutable.Map.empty[Symbol, Int]
+  private var _recursionTable = immutable.Map.empty[Symbol, Int]
+  def recursionTable = _recursionTable
+  def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable = value
 
-  private var nextexid = 0
-  protected def freshExistentialName(suffix: String) = {
-    nextexid += 1
-    newTypeName("_" + nextexid + suffix)
-  }
+  private var existentialIds = 0
+  protected def nextExistentialId() = { existentialIds += 1; existentialIds }
+  protected def freshExistentialName(suffix: String) = newTypeName("_" + nextExistentialId() + suffix)
 
   // Set the fields which point companions at one another.  Returns the module.
   def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
@@ -55,31 +55,50 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
   def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
     new FreeTypeSymbol(name, origin) initFlags flags
 
-  /** Determines whether the given information request should trigger the given symbol's completer.
-   *  See comments to `Symbol.needsInitialize` for details.
-   */
-  protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
-    completer match {
-      case null => false
-      case _: FlagAgnosticCompleter => !isFlagRelated
-      case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}")
-    }
-
   /** The original owner of a class. Used by the backend to generate
    *  EnclosingMethod attributes.
    */
   val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
 
-  abstract class SymbolContextApiImpl extends SymbolContextApi {
+  // TODO - don't allow the owner to be changed without checking invariants, at least
+  // when under some flag. Define per-phase invariants for owner/owned relationships,
+  // e.g. after flatten all classes are owned by package classes, there are lots and
+  // lots of these to be declared (or more realistically, discovered.)
+  protected def saveOriginalOwner(sym: Symbol) {
+    if (originalOwner contains sym) ()
+    else originalOwner(sym) = sym.rawowner
+  }
+  protected def originalEnclosingMethod(sym: Symbol): Symbol = {
+    if (sym.isMethod || sym == NoSymbol) sym
+    else {
+      val owner = originalOwner.getOrElse(sym, sym.rawowner)
+      if (sym.isLocalDummy) owner.enclClass.primaryConstructor
+      else originalEnclosingMethod(owner)
+    }
+  }
+
+  def symbolOf[T: WeakTypeTag]: TypeSymbol = weakTypeOf[T].typeSymbolDirect.asType
+
+  abstract class SymbolContextApiImpl extends SymbolApi {
     this: Symbol =>
 
+    def isFreeTerm: Boolean = false
+    def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
+    def isFreeType: Boolean = false
+    def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
+
     def isExistential: Boolean = this.isExistentiallyBound
     def isParamWithDefault: Boolean = this.hasDefault
+    // `isByNameParam` is only true for a call-by-name parameter of a *method*,
+    // an argument of the primary constructor seen in the class body is excluded by `isValueParameter`
     def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
     def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
     def isJava: Boolean = isJavaDefined
     def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable
     def isVar: Boolean = isTerm && !isModule && !isMethod && !isLazy && isMutable
+    def isAbstract: Boolean = isAbstractClass || isDeferred || isAbstractType
+    def isPrivateThis = (this hasFlag PRIVATE) && (this hasFlag LOCAL)
+    def isProtectedThis = (this hasFlag PROTECTED) && (this hasFlag LOCAL)
 
     def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
       case n: TermName => newTermSymbol(n, pos, newFlags)
@@ -87,27 +106,70 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     }
 
     def knownDirectSubclasses = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
       children
     }
 
+    def selfType = {
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
+      typeOfThis
+    }
+
     def baseClasses                       = info.baseClasses
     def module                            = sourceModule
     def thisPrefix: Type                  = thisType
-    def selfType: Type                    = typeOfThis
-    def typeSignature: Type               = { fullyInitializeSymbol(this); info }
-    def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
+    def superPrefix(supertpe: Type): Type = SuperType(thisType, supertpe)
+
+    // These two methods used to call fullyInitializeSymbol on `this`.
+    //
+    // The only positive effect of that is, to the best of my knowledge, convenient printing
+    // (if you print a signature of the symbol that's not fully initialized,
+    // you might end up with weird <?>'s in value/type params)
+    //
+    // Another effect is obviously full initialization of that symbol,
+    // but that one shouldn't be necessary from the public API standpoint,
+    // because everything that matters auto-initializes at runtime,
+    // and auto-initialization at compile-time is anyway dubious
+    // (I've had spurious cyclic refs caused by calling typeSignature
+    // that initialized parent, which was in the middle of initialization).
+    //
+    // Given that and also given the pressure of being uniform with info and infoIn,
+    // I've removed calls to fullyInitializeSymbol from typeSignature and typeSignatureIn,
+    // injected fullyInitializeSymbol in showDecl, and injected fullyInitializeType in runtime Type.toString
+    // (the latter will make things a bit harder to debug in runtime universe, because
+    // toString might now very rarely cause cyclic references, but we also have showRaw that doesn't do initialization).
+    //
+    // Auto-initialization in runtime Type.toString is one of the examples of why a cake-based design
+    // isn't a very good idea for reflection API. Sometimes we want to same pretty name for both a compiler-facing
+    // and a user-facing API that should have different behaviors (other examples here include isPackage, isCaseClass, etc).
+    // Within a cake it's fundamentally impossible to achieve that.
+    def typeSignature: Type               = info
+    def typeSignatureIn(site: Type): Type = site memberInfo this
 
     def toType: Type = tpe
     def toTypeIn(site: Type): Type = site.memberType(this)
     def toTypeConstructor: Type = typeConstructor
-    def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this }
     def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
 
     def getter: Symbol = getter(owner)
     def setter: Symbol = setter(owner)
+
+    def companion: Symbol = {
+      if (isModule && !isPackage) companionSymbol
+      else if (isModuleClass && !isPackageClass) sourceModule.companionSymbol
+      else if (isClass && !isModuleClass && !isPackageClass) companionSymbol
+      else NoSymbol
+    }
+
+    def infoIn(site: Type): Type = typeSignatureIn(site)
+    def overrides: List[Symbol] = allOverriddenSymbols
+    def paramLists: List[List[Symbol]] = paramss
   }
 
+  private[reflect] case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
+
   /** The class for all symbols */
   abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
           extends SymbolContextApiImpl
@@ -115,6 +177,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
              with Annotatable[Symbol]
              with Attachable {
 
+    // makes sure that all symbols that runtime reflection deals with are synchronized
+    private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol]
+    private def isAprioriThreadsafe = isThreadsafe(AllOps)
+    assert(isCompilerUniverse || isSynchronized || isAprioriThreadsafe, s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe")
+
     type AccessBoundaryType = Symbol
     type AnnotationType     = AnnotationInfo
 
@@ -128,13 +195,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def name: NameType
     def name_=(n: Name): Unit = {
       if (shouldLogAtThisPhase) {
-        val msg = s"Renaming $fullLocationString to $n"
+        def msg = s"In $owner, renaming $name -> $n"
         if (isSpecialized) debuglog(msg) else log(msg)
       }
     }
     def asNameType(n: Name): NameType
 
-    private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+    // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+    // The null check is for NoSymbol, which can't pass a reference to itself to the constructor and also
+    // can't call owner_= due to an assertion it contains.
+    private[this] var _rawowner = if (initOwner eq null) this else initOwner
     private[this] var _rawflags: Long = _
 
     def rawowner = _rawowner
@@ -171,13 +241,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def debugFlagString: String           = flagString(AllFlags)
 
     /** String representation of symbol's variance */
-    def varianceString: String =
-      if (variance == 1) "+"
-      else if (variance == -1) "-"
-      else ""
+    def varianceString: String = variance.symbolicString
 
     override def flagMask =
-      if (settings.debug.value && !isAbstractType) AllFlags
+      if (settings.debug && !isAbstractType) AllFlags
       else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
       else ExplicitFlags
 
@@ -186,10 +253,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       if (isGADTSkolem) " (this is a GADT skolem)"
       else ""
 
-    def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
+    def shortSymbolClass = shortClassOfInstance(this)
     def symbolCreationString: String = (
       "%s%25s | %-40s | %s".format(
-        if (settings.uniqid.value) "%06d | ".format(id) else "",
+        if (settings.uniqid) "%06d | ".format(id) else "",
         shortSymbolClass,
         name.decode + " in " + owner,
         rawFlagString
@@ -221,7 +288,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
     /** Static constructor with info set. */
     def newStaticConstructor(pos: Position): MethodSymbol =
-      newConstructor(pos, STATIC) setInfo UnitClass.tpe
+      newConstructor(pos, STATIC) setInfo UnitTpe
 
     /** Instance constructor with info set. */
     def newClassConstructor(pos: Position): MethodSymbol =
@@ -249,20 +316,29 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     final def newImport(pos: Position): TermSymbol =
       newTermSymbol(nme.IMPORT, pos)
 
+    def newModuleVarSymbol(accessor: Symbol): TermSymbol = {
+      val newName  = nme.moduleVarName(accessor.name.toTermName)
+      val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 )
+      val newInfo  = accessor.tpe.finalResultType
+      val mval     = newVariable(newName, accessor.pos.focus, newFlags.toLong) addAnnotation VolatileAttr
+
+      if (this.isClass)
+        mval setInfoAndEnter newInfo
+      else
+        mval setInfo newInfo
+    }
+
     final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
       newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
 
     final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = {
       val flags = flags0 | MODULE
-      val m = newModuleSymbol(name, pos, flags)
+      val m = newModuleSymbol(name.toTermName, pos, flags)
       val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags)
       connectModuleToClass(m, c)
       (m, c)
     }
 
-    final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
-      newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
-
     final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
       newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol]
 
@@ -325,11 +401,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       () => { cnt += 1; nme.syntheticParamName(cnt) }
     }
 
-    /** Synthetic value parameters when parameter symbols are not available
-     */
-    final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] =
-      argtypess map (xs => newSyntheticValueParams(xs, freshNamer))
-
     /** Synthetic value parameters when parameter symbols are not available.
      *  Calling this method multiple times will re-use the same parameter names.
      */
@@ -345,16 +416,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol =
       newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype
 
-    def newSyntheticTypeParam(): TypeSymbol                             = newSyntheticTypeParam("T0", 0L)
     def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
     def newSyntheticTypeParams(num: Int): List[TypeSymbol]              = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
 
     /** Create a new existential type skolem with this symbol its owner,
      *  based on the given symbol and origin.
      */
-    def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
-      val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
-      skolem setInfo (basis.info cloneInfo skolem)
+    def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem =
+      newExistentialSkolem(basis.name.toTypeName, basis.info, basis.flags, basis.pos, origin)
+
+    /** Create a new existential type skolem with this symbol its owner, and the given other properties.
+     */
+    def newExistentialSkolem(name: TypeName, info: Type, flags: Long, pos: Position, origin: AnyRef): TypeSkolem = {
+      val skolem = newTypeSkolemSymbol(name.toTypeName, origin, pos, (flags | EXISTENTIAL) & ~PARAM)
+      skolem setInfo (info cloneInfo skolem)
     }
 
     // don't test directly -- use isGADTSkolem
@@ -409,14 +484,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     final def newRefinementClass(pos: Position): RefinementClassSymbol =
       createRefinementClassSymbol(pos, 0L)
 
-    /** Create a new getter for current symbol (which must be a field)
-     */
-    final def newGetter: MethodSymbol = (
-      owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
-        setPrivateWithin privateWithin
-        setInfo MethodType(Nil, tpe)
-    )
-
     final def newErrorSymbol(name: Name): Symbol = name match {
       case x: TypeName  => newErrorClass(x)
       case x: TermName  => newErrorValue(x)
@@ -432,23 +499,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       case _            => new StubTermSymbol(this, name.toTermName, missingMessage)
     }
 
-    @deprecated("Use the other signature", "2.10.0")
-    def newClass(pos: Position, name: TypeName): Symbol        = newClass(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newModuleClass(pos: Position, name: TypeName): Symbol  = newModuleClass(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newLabel(pos: Position, name: TermName): MethodSymbol  = newLabel(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newValue(pos: Position, name: TermName): TermSymbol    = newTermSymbol(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newAliasType(pos: Position, name: TypeName): Symbol    = newAliasType(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newAbstractType(pos: Position, name: TypeName): Symbol = newAbstractType(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newExistential(pos: Position, name: TypeName): Symbol  = newExistential(name, pos)
-    @deprecated("Use the other signature", "2.10.0")
-    def newMethod(pos: Position, name: TermName): MethodSymbol = newMethod(name, pos)
-
 // ----- locking and unlocking ------------------------------------------------------
 
     // True if the symbol is unlocked.
@@ -532,14 +582,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     def isContravariant         = false
     def isCovariant             = false
-    def isExistentialQuantified = false
     def isExistentialSkolem     = false
     def isExistentiallyBound    = false
     def isGADTSkolem            = false
     def isTypeParameter         = false
     def isTypeParameterOrSkolem = false
     def isTypeSkolem            = false
-    def isTypeMacro             = false
     def isInvariant             = !isCovariant && !isContravariant
 
     /** Qualities of Terms, always false for TypeSymbols.
@@ -551,6 +599,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def isConstructor       = false
     def isEarlyInitialized  = false
     def isGetter            = false
+    def isDefaultGetter     = false
     def isLocalDummy        = false
     def isMixinConstructor  = false
     def isOverloaded        = false
@@ -559,7 +608,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def isValue             = false
     def isValueParameter    = false
     def isVariable          = false
-    override def hasDefault = false
     def isTermMacro         = false
 
     /** Qualities of MethodSymbols, always false for TypeSymbols
@@ -589,27 +637,75 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     def isEffectiveRoot = false
 
+    /** Can this symbol only be subclassed by bottom classes? This is assessed
+     *  to be the case if it is final, and any type parameters are invariant.
+     */
+    def hasOnlyBottomSubclasses = {
+      def loop(tparams: List[Symbol]): Boolean = tparams match {
+        case Nil     => true
+        case x :: xs => x.variance.isInvariant && loop(xs)
+      }
+      isClass && isFinal && loop(typeParams)
+    }
+
     final def isLazyAccessor       = isLazy && lazyAccessor != NoSymbol
-    final def isOverridableMember  = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
+    final def isOverridableMember  = !(isClass || isEffectivelyFinal) && safeOwner.isClass
 
     /** Does this symbol denote a wrapper created by the repl? */
     final def isInterpreterWrapper = (
          (this hasFlag MODULE)
-      && owner.isPackageClass
+      && isTopLevel
       && nme.isReplWrapperName(name)
     )
+
+    /** In our current architecture, symbols for top-level classes and modules
+     *  are created as dummies. Package symbols just call newClass(name) or newModule(name) and
+     *  consider their job done.
+     *
+     *  In order for such a dummy to provide meaningful info (e.g. a list of its members),
+     *  it needs to go through unpickling. Unpickling is a process of reading Scala metadata
+     *  from ScalaSignature annotations and assigning it to symbols and types.
+     *
+     *  A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
+     *  and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
+     *  (i.e. the pickle not only contains info about directly nested classes/modules, but also about
+     *  classes/modules nested into those and so on).
+     *
+     *  Unpickling is triggered automatically whenever info (info in compiler parlance) is called.
+     *  This happens because package symbols assign completer thunks to the dummies they create.
+     *  Therefore metadata loading happens lazily and transparently.
+     *
+     *  Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
+     *  It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
+     *  This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
+     *  produces incorrect results.
+     *
+     *  One might think that the solution is simple: automatically call the completer
+     *  whenever one needs flags, annotations and privateWithin - just like it's done for info.
+     *  Unfortunately, this leads to weird crashes in scalac, and currently we can't attempt
+     *  to fix the core of the compiler risk stability a few weeks before the final release.
+     *  upd. Haha, "a few weeks before the final release". This surely sounds familiar :)
+     *
+     *  However we do need to fix this for runtime reflection, since this idionsynchrazy is not something
+     *  we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a
+     *  runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization.
+     */
     final def getFlag(mask: Long): Long = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+      if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
       flags & mask
     }
     /** Does symbol have ANY flag in `mask` set? */
     final def hasFlag(mask: Long): Boolean = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
       (flags & mask) != 0
     }
+    def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong)
+
     /** Does symbol have ALL the flags in `mask` set? */
     final def hasAllFlags(mask: Long): Boolean = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
       (flags & mask) == mask
     }
 
@@ -642,11 +738,32 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
     final def hasGetter = isTerm && nme.isLocalName(name)
 
+    /** A little explanation for this confusing situation.
+     *  Nested modules which have no static owner when ModuleDefs
+     *  are eliminated (refchecks) are given the lateMETHOD flag,
+     *  which makes them appear as methods after refchecks.
+     *  Here's an example where one can see all four of FF FT TF TT
+     *  for (isStatic, isMethod) at various phases.
+     *
+     *    trait A1 { case class Quux() }
+     *    object A2 extends A1 { object Flax }
+     *    // --  namer         object Quux in trait A1
+     *    // -M  flatten       object Quux in trait A1
+     *    // S-  flatten       object Flax in object A2
+     *    // -M  posterasure   object Quux in trait A1
+     *    // -M  jvm           object Quux in trait A1
+     *    // SM  jvm           object Quux in object A2
+     *
+     *  So "isModuleNotMethod" exists not for its achievement in
+     *  brevity, but to encapsulate the relevant condition.
+     */
+    def isModuleNotMethod = isModule && !isMethod
+    def isStaticModule    = isModuleNotMethod && isStatic
+
     final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
-    final def isStaticModule = isModule && isStatic && !isMethod
     final def isThisSym = isTerm && owner.thisSym == this
     final def isError = hasFlag(IS_ERROR)
-    final def isErroneous = isError || isInitialized && tpe.isErroneous
+    final def isErroneous = isError || isInitialized && tpe_*.isErroneous
 
     def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
 
@@ -658,7 +775,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
 
     final def isMethodWithExtension =
-      isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isTermMacro
+      isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro
 
     final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
     final def isDefinedInPackage  = effectiveOwner.isPackageClass
@@ -715,10 +832,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       }
 
     def isStrictFP          = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
-    def isSerializable      = (
-         info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
-      || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
-    )
+    def isSerializable      = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
     def hasBridgeAnnotation = hasAnnotation(BridgeClass)
     def isDeprecated        = hasAnnotation(DeprecatedAttr)
     def deprecationMessage  = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
@@ -728,14 +842,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
                             = hasAnnotation(DeprecatedInheritanceAttr)
     def deprecatedInheritanceMessage
                             = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
-    def deprecatedInheritanceVersion
-                            = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
     def hasDeprecatedOverridingAnnotation
                             = hasAnnotation(DeprecatedOverridingAttr)
     def deprecatedOverridingMessage
                             = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
-    def deprecatedOverridingVersion
-                            = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
 
     // !!! when annotation arguments are not literal strings, but any sort of
     // assembly of strings, there is a fair chance they will turn up here not as
@@ -753,19 +863,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def compileTimeOnlyMessage  = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0)
 
     /** Is this symbol an accessor method for outer? */
-    final def isOuterAccessor = {
-      hasFlag(STABLE | ARTIFACT) &&
-      originalName == nme.OUTER
-    }
+    final def isOuterAccessor = hasFlag(STABLE | ARTIFACT) && (unexpandedName == nme.OUTER)
 
     /** Is this symbol an accessor method for outer? */
-    final def isOuterField = {
-      hasFlag(ARTIFACT) &&
-      originalName == nme.OUTER_LOCAL
-    }
+    final def isOuterField = isArtifact && (unexpandedName == nme.OUTER_LOCAL)
 
-    /** Does this symbol denote a stable value? */
-    def isStable = false
+    /** Does this symbol denote a stable value, ignoring volatility?
+     *
+     * Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
+     */
+    final def isStable        = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+    final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
 
     /** Does this symbol denote the primary constructor of its enclosing class? */
     final def isPrimaryConstructor =
@@ -776,9 +884,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       isConstructor && !isPrimaryConstructor
 
     /** Is this symbol a synthetic apply or unapply method in a companion object of a case class? */
+    // xeno-by: why this obscure use of the CASE flag? why not simply compare name with nme.apply and nme.unapply?
     final def isCaseApplyOrUnapply =
       isMethod && isCase && isSynthetic
 
+    /** Is this symbol a synthetic copy method in a case class? */
+    final def isCaseCopy =
+      isMethod && owner.isCase && isSynthetic && name == nme.copy
+
     /** Is this symbol a trait which needs an implementation class? */
     final def needsImplClass = (
          isTrait
@@ -815,26 +928,40 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     final def isStaticOwner: Boolean =
       isPackageClass || isModuleClass && isStatic
 
-    def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass
+    /** A helper function for isEffectivelyFinal. */
+    private def isNotOverridden = (
+      owner.isClass && (
+           owner.isEffectivelyFinal
+        || owner.isSealed && owner.children.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol))
+      )
+    )
 
     /** Is this symbol effectively final? I.e, it cannot be overridden */
     final def isEffectivelyFinal: Boolean = (
          (this hasFlag FINAL | PACKAGE)
-      || isModuleOrModuleClass && (owner.isPackageClass || !settings.overrideObjects.value)
+      || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects)
       || isTerm && (
              isPrivate
-          || isLocal
-          || owner.isClass && owner.isEffectivelyFinal
-      )
+          || isLocalToBlock
+         )
     )
+    /** Is this symbol effectively final or a concrete term member of sealed class whose childred do not override it */
+    final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden)
+
+    /** Is this symbol owned by a package? */
+    final def isTopLevel = owner.isPackageClass
 
-    /** Is this symbol locally defined? I.e. not accessed from outside `this` instance */
+    /** Is this symbol defined in a block? */
+    @deprecated("Use isLocalToBlock instead", "2.11.0")
     final def isLocal: Boolean = owner.isTerm
 
+    /** Is this symbol defined in a block? */
+    final def isLocalToBlock: Boolean = owner.isTerm
+
     /** Is this symbol a constant? */
     final def isConstant: Boolean = isStable && isConstantType(tpe.resultType)
 
-    /** Is this class nested in another class or module (not a package)? */
+    /** Is this class nested in another class or module (not a package). Includes locally defined classes. */
     def isNestedClass = false
 
     /** Is this class locally defined?
@@ -845,41 +972,36 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     def isLocalClass = false
 
-    def isStableClass = false
-
-/* code for fixing nested objects
-    override final def isModuleClass: Boolean =
-      super.isModuleClass && !isExpandedModuleClass
-*/
     /** Is this class or type defined as a structural refinement type?
      */
     final def isStructuralRefinement: Boolean =
-      (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+      (isClass || isType || isModule) && info.dealiasWiden/*.underlying*/.isStructuralRefinement
 
     /** Is this a term symbol only defined in a refinement (so that it needs
      *  to be accessed by reflection)?
      */
-    def isOnlyRefinementMember: Boolean =
-       isTerm && // type members are not affected
-       owner.isRefinementClass && // owner must be a refinement class
-       (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
-       allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class
-       !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+    def isOnlyRefinementMember = (
+         isTerm                  // Type members are unaffected
+      && owner.isRefinementClass // owner must be a refinement class
+      && isPossibleInRefinement  // any overridden symbols must also have refinement class owners
+      && !isConstant             // Must not be a constant. Question: Can we exclude @inline methods as well?
+      && isDeclaredByOwner       // Must be explicitly declared in the refinement (not synthesized from glb)
+    )
+    // "(owner.info decl name) == this" is inadequate, because "name" might
+    // be overloaded in owner - and this might be an overloaded symbol.
+    // TODO - make this cheaper and see where else we should be doing something similar.
+    private def isDeclaredByOwner = (owner.info decl name).alternatives exists (alternatives contains _)
 
     final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
-    final def isPossibleInRefinement       = !isConstructor && !isOverridingSymbol
-
-    /** Is this symbol a member of class `clazz`? */
-    def isMemberOf(clazz: Symbol) =
-      clazz.info.member(name).alternatives contains this
+    final def isPossibleInRefinement       = (
+         !isConstructor
+      && allOverriddenSymbols.forall(_.owner.isRefinementClass) // this includes allOverriddenSymbols.isEmpty
+    )
 
     /** A a member of class `base` is incomplete if
      *  (1) it is declared deferred or
      *  (2) it is abstract override and its super symbol in `base` is
      *      nonexistent or incomplete.
-     *
-     *  @param base ...
-     *  @return     ...
      */
     final def isIncompleteIn(base: Symbol): Boolean =
       this.isDeferred ||
@@ -888,13 +1010,43 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         supersym == NoSymbol || supersym.isIncompleteIn(base)
       }
 
-    // Does not always work if the rawInfo is a SourcefileLoader, see comment
-    // in "def coreClassesFirst" in Global.
-    def exists = !owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType }
+    def exists: Boolean = !isTopLevel || {
+      val isSourceLoader = rawInfo match {
+        case sl: SymLoader => sl.fromSource
+        case _             => false
+      }
+      def warnIfSourceLoader() {
+        if (isSourceLoader)
+          // Predef is completed early due to its autoimport; we used to get here when type checking its
+          // parent LowPriorityImplicits. See comment in c5441dc for more elaboration.
+          // Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not
+          // get here anymore.
+          devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.");
+      }
+
+      rawInfo load this
+      rawInfo != NoType || { warnIfSourceLoader(); false }
+    }
 
     final def isInitialized: Boolean =
       validTo != NoPeriod
 
+    /** We consider a symbol to be thread-safe, when multiple concurrent threads can call its methods
+     *  (either directly or indirectly via public reflection or internal compiler infrastructure),
+     *  without any locking and everything works as it should work.
+     *
+     *  In its basic form, `isThreadsafe` always returns false. Runtime reflection augments reflection infrastructure
+     *  with threadsafety-tracking mechanism implemented in `SynchronizedSymbol` that communicates with underlying completers
+     *  and can sometimes return true if the symbol has been completed to the point of thread safety.
+     *
+     *  The `purpose` parameter signifies whether we want to just check immutability of certain flags for the given mask.
+     *  This is necessary to enable robust auto-initialization of `Symbol.flags` for runtime reflection, and is also quite handy
+     *  in avoiding unnecessary initializations when requesting for flags that have already been set.
+     */
+    def isThreadsafe(purpose: SymbolOps): Boolean = false
+    def markFlagsCompleted(mask: Long): this.type = this
+    def markAllCompleted(): this.type = this
+
     /** Can this symbol be loaded by a reflective mirror?
      *
      *  Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
@@ -916,14 +1068,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       if (isAliasType) return true
       if (isType && isNonClassType) return false
       if (isRefinementClass) return false
-      return true
+      true
     }
 
-    /** The variance of this symbol as an integer */
-    final def variance: Int =
-      if (isCovariant) 1
-      else if (isContravariant) -1
-      else 0
+    /** The variance of this symbol. */
+    def variance: Variance =
+      if (isCovariant) Covariant
+      else if (isContravariant) Contravariant
+      else Invariant
 
     /** The sequence number of this parameter symbol among all type
      *  and value parameters of symbol's owner. -1 if symbol does not
@@ -947,23 +1099,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
 // ------ owner attribute --------------------------------------------------------------
 
+    /** In general when seeking the owner of a symbol, one should call `owner`.
+     *  The other possibilities include:
+     *    - call `safeOwner` if it is expected that the target may be NoSymbol
+     *    - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+     *
+     *  `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
+     *  `assertOwner` aborts compilation immediately if called on NoSymbol.
+     */
     def owner: Symbol = {
       if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
       rawowner
     }
+    final def safeOwner: Symbol   = if (this eq NoSymbol) NoSymbol else owner
+    final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner
 
     // TODO - don't allow the owner to be changed without checking invariants, at least
     // when under some flag. Define per-phase invariants for owner/owned relationships,
     // e.g. after flatten all classes are owned by package classes, there are lots and
     // lots of these to be declared (or more realistically, discovered.)
     def owner_=(owner: Symbol) {
-      // don't keep the original owner in presentation compiler runs
-      // (the map will grow indefinitely, and the only use case is the
-      // backend).
-      if (!forInteractive) {
-        if (originalOwner contains this) ()
-        else originalOwner(this) = rawowner
-      }
+      saveOriginalOwner(this)
       assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code")
       if (traceSymbolActivity)
         traceSymbols.recordNewSymbolOwner(this, owner)
@@ -993,10 +1149,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
 // ------ name attribute --------------------------------------------------------------
 
-    /** If this symbol has an expanded name, its original name, otherwise its name itself.
-     *  @see expandName
+    @deprecated("Use unexpandedName", "2.11.0") def originalName: Name = unexpandedName
+
+    /** If this symbol has an expanded name, its original (unexpanded) name,
+     *  otherwise the name itself.
      */
-    def originalName: Name = nme.originalName(nme.dropLocalSuffix(name))
+    def unexpandedName: Name = nme.unexpandedName(name)
 
     /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
      */
@@ -1004,7 +1162,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
     /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
      */
-    def decodedName: String = nme.dropLocalSuffix(name).decode
+    def decodedName: String = name.decode
 
     private def addModuleSuffix(n: Name): Name =
       if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
@@ -1023,7 +1181,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     )
     /** These should be moved somewhere like JavaPlatform.
      */
-    def javaSimpleName: Name = addModuleSuffix(nme.dropLocalSuffix(simpleName))
+    def javaSimpleName: Name = addModuleSuffix(simpleName.dropLocal)
     def javaBinaryName: Name = addModuleSuffix(fullNameInternal('/'))
     def javaClassName: String  = addModuleSuffix(fullNameInternal('.')).toString
 
@@ -1041,10 +1199,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     private def fullNameInternal(separator: Char): Name = (
       if (isRoot || isRootPackage || this == NoSymbol) name
       else if (owner.isEffectiveRoot) name
-      else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
+      else effectiveOwner.enclClass.fullNameAsName(separator) append (separator, name)
     )
 
-    def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
+    def fullNameAsName(separator: Char): Name = fullNameInternal(separator).dropLocal
 
     /** The encoded full path name of this symbol, where outer names and inner names
      *  are separated by periods.
@@ -1082,9 +1240,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
       new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags
 
-    protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
-      new TermSymbol(this, pos, name) initFlags newFlags
-
     protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
       new MethodSymbol(this, pos, name) initFlags newFlags
 
@@ -1101,12 +1256,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       new TermSymbol(this, pos, name) initFlags newFlags
 
     final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
-      if ((newFlags & METHOD) != 0)
-        createMethodSymbol(name, pos, newFlags)
-      else if ((newFlags & PACKAGE) != 0)
+      // Package before Module, Module before Method, or we might grab the wrong guy.
+      if ((newFlags & PACKAGE) != 0)
         createPackageSymbol(name, pos, newFlags | PackageFlags)
       else if ((newFlags & MODULE) != 0)
         createModuleSymbol(name, pos, newFlags)
+      else if ((newFlags & METHOD) != 0)
+        createMethodSymbol(name, pos, newFlags)
       else if ((newFlags & PARAM) != 0)
         createValueParameterSymbol(name, pos, newFlags)
       else
@@ -1144,7 +1300,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  as public.
      */
     def accessBoundary(base: Symbol): Symbol = {
-      if (hasFlag(PRIVATE) || isLocal) owner
+      if (hasFlag(PRIVATE) || isLocalToBlock) owner
       else if (hasAllFlags(PROTECTED | STATIC | JAVA)) enclosingRootClass
       else if (hasAccessBoundary && !phase.erasedTypes) privateWithin
       else if (hasFlag(PROTECTED)) base
@@ -1166,7 +1322,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     private[this] var _privateWithin: Symbol = _
     def privateWithin = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
       _privateWithin
     }
     def privateWithin_=(sym: Symbol) { _privateWithin = sym }
@@ -1187,20 +1344,61 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       }
     }
 
-    /** Get type. The type of a symbol is:
-     *  for a type symbol, the type corresponding to the symbol itself,
-     *    @M you should use tpeHK for a type symbol with type parameters if
-     *       the kind of the type need not be *, as tpe introduces dummy arguments
-     *       to generate a type of kind *
-     *  for a term symbol, its usual type.
-     *  See the tpe/tpeHK overrides in TypeSymbol for more.
+    /** The "type" of this symbol.  The type of a term symbol is its usual
+     *  type.  A TypeSymbol is more complicated; see that class for elaboration.
+     *  Since tpe forwards to tpe_*, if you call it on a type symbol with unapplied
+     *  type parameters, the type returned will contain dummies types.  These will
+     *  hide legitimate errors or create spurious ones if used as normal types.
      *
      *  For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
      *  to the type symbol, `info` returns the type information of the type symbol,
      *  e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
      */
-    def tpe: Type = info
-    def tpeHK: Type = tpe
+    final def tpe: Type = tpe_*
+
+    /** typeConstructor throws an exception when called on term
+     *  symbols; this is a more forgiving alternative.  Calls
+     *  typeConstructor on TypeSymbols, returns info otherwise.
+     */
+    def tpeHK: Type = info
+
+    /** Only applicable to TypeSymbols, it is the type corresponding
+     *  to the symbol itself.  For instance, the type of a List might
+     *  be List[Int] - the same symbol's typeConstructor is simply List.
+     *  One might be tempted to write that as List[_], and in some
+     *  contexts this is possible, but it is discouraged because it is
+     *  syntactically indistinguishable from and easily confused with the
+     *  type List[T] forSome { type T; }, which can also be written List[_].
+     */
+    def typeConstructor: Type = (
+      // Avoiding a third override in NoSymbol to preserve bimorphism
+      if (this eq NoSymbol)
+        abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
+      else
+        abort("typeConstructor inapplicable for " + this)
+    )
+
+    /** The type of this symbol, guaranteed to be of kind *.
+     *  If there are unapplied type parameters, they will be
+     *  substituted with dummy type arguments derived from the
+     *  type parameters.  Such types are not valid in a general
+     *  sense and will cause difficult-to-find bugs if allowed
+     *  to roam free.
+     *
+     *  If you call tpe_* explicitly to obtain these types,
+     *  you are responsible for them as if it they were your own
+     *  minor children.
+     */
+    def tpe_* : Type = info
+
+    // Alternate implementation of def tpe for warning about misuse,
+    // disabled to keep the method maximally hotspot-friendly:
+    // def tpe: Type = {
+    //   val result = tpe_*
+    //   if (settings.debug.value && result.typeArgs.nonEmpty)
+    //     printCaller(s"""Call to ${this.tpe} created $result: call tpe_* or tpeHK""")("")
+    //   result
+    // }
 
     /** Get type info associated with symbol at current phase, after
      *  ensuring that symbol is initialized (i.e. type is completed).
@@ -1236,13 +1434,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         cnt += 1
         // allow for two completions:
         //   one: sourceCompleter to LazyType, two: LazyType to completed type
-        if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
+        if (cnt == 3) abort(s"no progress in completing $this: $tp")
       }
       rawInfo
     }
     catch {
       case ex: CyclicReference =>
-        debugwarn("... hit cycle trying to complete " + this.fullLocationString)
+        devWarning("... hit cycle trying to complete " + this.fullLocationString)
         throw ex
     }
 
@@ -1254,9 +1452,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     }
 
     /** Set initial info. */
-    def setInfo(info: Type): this.type                      = { info_=(info); this }
+    def setInfo(info: Type): this.type  = { info_=(info); this }
     /** Modifies this symbol's info in place. */
-    def modifyInfo(f: Type => Type): this.type              = setInfo(f(info))
+    def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
     /** Substitute second list of symbols for first in current info. */
     def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
       if (syms0.isEmpty) this
@@ -1342,6 +1540,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       assert(isCompilerUniverse)
       if (infos == null || runId(infos.validFrom) == currentRunId) {
         infos
+      } else if (isPackageClass) {
+        // SI-7801 early phase package scopes are mutated in new runs (Namers#enterPackage), so we have to
+        //         discard transformed infos, rather than just marking them as from this run.
+        val oldest = infos.oldest
+        oldest.validFrom = validTo
+        this.infos = oldest
+        oldest
       } else {
         val prev1 = adaptInfos(infos.prev)
         if (prev1 ne infos.prev) prev1
@@ -1351,10 +1556,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
           _validTo = period(currentRunId, pid)
           phase   = phaseWithId(pid)
 
-          val info1 = (
-            if (isPackageClass) infos.info
-            else adaptToNewRunMap(infos.info)
-          )
+          val info1 = adaptToNewRunMap(infos.info)
           if (info1 eq infos.info) {
             infos.validFrom = validTo
             infos
@@ -1374,53 +1576,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       if (!isInitialized) info
       this
     }
-
-    /** Called when the programmer requests information that might require initialization of the underlying symbol.
-     *
-     *  `isFlagRelated` and `mask` describe the nature of this information.
-     *  isFlagRelated = true means that the programmer needs particular bits in flags.
-     *  isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin).
-     *
-     *  In our current architecture, symbols for top-level classes and modules
-     *  are created as dummies. Package symbols just call newClass(name) or newModule(name) and
-     *  consider their job done.
-     *
-     *  In order for such a dummy to provide meaningful info (e.g. a list of its members),
-     *  it needs to go through unpickling. Unpickling is a process of reading Scala metadata
-     *  from ScalaSignature annotations and assigning it to symbols and types.
-     *
-     *  A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
-     *  and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
-     *  (i.e. the pickle not only contains info about directly nested classes/modules, but also about
-     *  classes/modules nested into those and so on).
-     *
-     *  Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
-     *  This happens because package symbols assign completer thunks to the dummies they create.
-     *  Therefore metadata loading happens lazily and transparently.
-     *
-     *  Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
-     *  It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
-     *  This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
-     *  produces incorrect results.
-     *
-     *  One might think that the solution is simple: automatically call the completer whenever one needs
-     *  flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this
-     *  leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler
-     *  risk stability a few weeks before the final release.
-     *
-     *  However we do need to fix this for runtime reflection, since it's not something we'd like to
-     *  expose to reflection users. Therefore a proposed solution is to check whether we're in a
-     *  runtime reflection universe and if yes then to commence initialization.
-     */
-    protected def needsInitialize(isFlagRelated: Boolean, mask: Long) =
-      !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
-
-    /** Was symbol's type updated during given phase? */
-    final def isUpdatedAt(pid: Phase#Id): Boolean = {
-      assert(isCompilerUniverse)
-      var infos = this.infos
-      while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
-      infos ne null
+    def maybeInitialize = {
+      try   { initialize ; true }
+      catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false }
     }
 
     /** Was symbol's type updated during given phase? */
@@ -1436,21 +1594,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      * This is done in checkAccessible and overriding checks in refchecks
      * We can't do this on class loading because it would result in infinite cycles.
      */
-    final def cookJavaRawInfo() {
-      if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once...
-      val oldInfo = info
-      doCookJavaRawInfo()
-    }
+    def cookJavaRawInfo(): this.type = {
+      // only try once...
+      if (phase.erasedTypes || (this hasFlag TRIEDCOOKING))
+        return this
 
-    protected def doCookJavaRawInfo(): Unit
+      this setFlag TRIEDCOOKING
+      info  // force the current info
+      if (isJavaDefined || isType && owner.isJavaDefined)
+        this modifyInfo rawToExistential
+      else if (isOverloaded)
+        alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential)
 
-    /** The type constructor of a symbol is:
-     *  For a type symbol, the type corresponding to the symbol itself,
-     *  excluding parameters.
-     *  Not applicable for term symbols.
-     */
-    def typeConstructor: Type =
-      abort("typeConstructor inapplicable for " + this)
+      this
+    }
 
     /** The logic approximately boils down to finding the most recent phase
      *  which immediately follows any of parser, namer, typer, or erasure.
@@ -1472,23 +1629,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  assumption: if a type starts out as monomorphic, it will not acquire
      *  type parameters later.
      */
+    // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+    // don't forget to modify the code over there if you modify this method
     def unsafeTypeParams: List[Symbol] =
       if (isMonomorphicType) Nil
-      else atPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
+      else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
 
     /** The type parameters of this symbol.
      *  assumption: if a type starts out as monomorphic, it will not acquire
      *  type parameters later.
      */
+    // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+    // don't forget to modify the code over there if you modify this method
     def typeParams: List[Symbol] =
       if (isMonomorphicType) Nil
       else {
         // analogously to the "info" getter, here we allow for two completions:
         //   one: sourceCompleter to LazyType, two: LazyType to completed type
         if (validTo == NoPeriod)
-          atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+          enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
         if (validTo == NoPeriod)
-          atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+          enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
 
         rawInfo.typeParams
       }
@@ -1555,7 +1716,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def makeSerializable() {
       info match {
         case ci @ ClassInfoType(_, _, _) =>
-          setInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
+          setInfo(ci.copy(parents = ci.parents :+ SerializableTpe))
         case i =>
           abort("Only ClassInfoTypes can be made serializable: "+ i)
       }
@@ -1578,7 +1739,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  the annotations attached to member a definition (class, method, type, field).
      */
     def annotations: List[AnnotationInfo] = {
-      if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+      // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+      if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
       _annotations
     }
 
@@ -1645,6 +1807,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  and is this class symbol also different from Null or Nothing? */
     def isNonBottomSubClass(that: Symbol): Boolean = false
 
+    /** Is this class symbol Null or Nothing,
+     *  and (if Null) is `that` inhabited by null?
+     *  If this is Nothing, of course, it is a
+     *  subclass of `that` by definition.
+     *
+     *  TODO - what is implied by the fact that AnyVal now has
+     *  infinitely many non-bottom subclasses, not only 9?
+     */
+    def isBottomSubClass(that: Symbol) = (
+         (this eq NothingClass)
+      || (this eq NullClass) && that.isClass && (that ne NothingClass) && !(that isNonBottomSubClass AnyValClass)
+    )
+
     /** Overridden in NullClass and NothingClass for custom behavior.
      */
     def isSubClass(that: Symbol) = isNonBottomSubClass(that)
@@ -1663,12 +1838,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
     def filter(cond: Symbol => Boolean): Symbol =
       if (isOverloaded) {
-        val alts = alternatives
-        val alts1 = alts filter cond
-        if (alts1 eq alts) this
+        var changed = false
+        var alts0: List[Symbol] = alternatives
+        var alts1: List[Symbol] = Nil
+
+        while (alts0.nonEmpty) {
+          if (cond(alts0.head))
+            alts1 ::= alts0.head
+          else
+            changed = true
+
+          alts0 = alts0.tail
+        }
+
+        if (!changed) this
         else if (alts1.isEmpty) NoSymbol
         else if (alts1.tail.isEmpty) alts1.head
-        else owner.newOverloaded(info.prefix, alts1)
+        else owner.newOverloaded(info.prefix, alts1.reverse)
       }
       else if (cond(this)) this
       else NoSymbol
@@ -1679,10 +1865,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       result
     }
 
-    @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
-
-    final def toOption: Option[Symbol] = if (exists) Some(this) else None
-
 // ------ cloneing -------------------------------------------------------------------
 
     /** A clone of this symbol. */
@@ -1748,11 +1930,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     def thisSym: Symbol = this
 
+    def hasSelfType = thisSym.tpeHK != this.tpeHK
+
     /** The type of `this` in a class, or else the type of the symbol itself. */
-    def typeOfThis = thisSym.tpe
+    def typeOfThis = thisSym.tpe_*
 
-    /** If symbol is a class, the type <code>this.type</code> in this class,
-     * otherwise <code>NoPrefix</code>.
+    /** If symbol is a class, the type `this.type` in this class,
+     * otherwise `NoPrefix`.
      * We always have: thisType <:< typeOfThis
      */
     def thisType: Type = NoPrefix
@@ -1773,7 +1957,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       //
       // The slightly more principled approach of using the paramss of the
       // primary constructor leads to cycles in, for example, pos/t5084.scala.
-      val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+      val primaryNames = constrParamAccessors map (_.name.dropLocal)
       caseFieldAccessorsUnsorted.sortBy { acc =>
         primaryNames indexWhere { orig =>
           (acc.name == orig) || (acc.name startsWith (orig append "$"))
@@ -1792,7 +1976,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     /** The symbol accessed by this accessor function, but with given owner type. */
     final def accessed(ownerTp: Type): Symbol = {
       assert(hasAccessorFlag, this)
-      ownerTp decl nme.getterToLocal(getterName.toTermName)
+      ownerTp decl localName
     }
 
     /** The module corresponding to this module class (note that this
@@ -1886,15 +2070,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *        originalOwner map is not populated for memory considerations (the symbol
      *        may hang on to lazy types and in turn to whole (outdated) compilation units.
      */
-    def originalEnclosingMethod: Symbol = {
-      assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.")
-      if (isMethod) this
-      else {
-        val owner = originalOwner.getOrElse(this, rawowner)
-        if (isLocalDummy) owner.enclClass.primaryConstructor
-        else owner.originalEnclosingMethod
-      }
-    }
+    def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this)
 
     /** The method or class which logically encloses the current symbol.
      *  If the symbol is defined in the initialization part of a template
@@ -1920,18 +2096,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  (or, for traits: `$init`) of `C`.
      *
      */
-    def logicallyEnclosingMember: Symbol =
+    final def logicallyEnclosingMember: Symbol =
       if (isLocalDummy) enclClass.primaryConstructor
-      else if (isMethod || isClass) this
+      else if (isMethod || isClass || this == NoSymbol) this
+      else if (this == NoSymbol) { devWarningDumpStack("NoSymbol.logicallyEnclosingMember", 15); this }
       else owner.logicallyEnclosingMember
 
-    /** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
-    @deprecated("Use enclosingTopLevelClass", "2.10.0")
-    def toplevelClass: Symbol = enclosingTopLevelClass
-
     /** The top-level class containing this symbol. */
     def enclosingTopLevelClass: Symbol =
-      if (owner.isPackageClass) {
+      if (isTopLevel) {
         if (isClass) this else moduleClass
       } else owner.enclosingTopLevelClass
 
@@ -1940,11 +2113,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
          !rawInfoIsNoType
       && (this.effectiveOwner == that.effectiveOwner)
       && (   !this.effectiveOwner.isPackageClass
-          || (this.sourceFile eq null)
-          || (that.sourceFile eq null)
-          || (this.sourceFile.path == that.sourceFile.path)  // Cheap possibly wrong check, then expensive normalization
-          || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
-      )
+          || (this.associatedFile eq NoAbstractFile)
+          || (that.associatedFile eq NoAbstractFile)
+          || (this.associatedFile.path == that.associatedFile.path)  // Cheap possibly wrong check, then expensive normalization
+          || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath)
+         )
     )
 
     /** The internal representation of classes and objects:
@@ -2040,70 +2213,113 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  @param ofclazz   The class containing the symbol's definition
      *  @param site      The base type from which member types are computed
      */
-    final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = {
-      //OPT cut down on #closures by special casing non-overloaded case
-      // was: ofclazz.info.nonPrivateDecl(name) filter (sym =>
-      //        !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
-      val result = ofclazz.info.nonPrivateDecl(name)
-      def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym))
-      if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result
-      else result filter qualifies
-    }
+    final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol =
+      matchingSymbolInternal(site, ofclazz.info nonPrivateDecl name)
 
     /** The non-private member of `site` whose type and name match the type of this symbol. */
     final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
-      site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
-        !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+      matchingSymbolInternal(site, site.nonPrivateMemberAdmitting(name, admit))
 
-    /** The symbol, in class `ofclazz`, that is overridden by this symbol.
+    private def matchingSymbolInternal(site: Type, candidate: Symbol): Symbol = {
+      def qualifies(sym: Symbol) = !sym.isTerm || ((site memberType this) matches (site memberType sym))
+      //OPT cut down on #closures by special casing non-overloaded case
+      if (candidate.isOverloaded) candidate filter qualifies
+      else if (qualifies(candidate)) candidate
+      else NoSymbol
+    }
+
+    /** The symbol, in class `baseClass`, that is overridden by this symbol.
      *
-     *  @param ofclazz is a base class of this symbol's owner.
+     *  @param baseClass is a base class of this symbol's owner.
      */
-    final def overriddenSymbol(ofclazz: Symbol): Symbol =
-      if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
+    final def overriddenSymbol(baseClass: Symbol): Symbol = (
+      // concrete always overrides abstract, so don't let an abstract definition
+      // claim to be overriding an inherited concrete one.
+      matchingInheritedSymbolIn(baseClass) filter (res => res.isDeferred || !this.isDeferred)
+    )
+
+    private def matchingInheritedSymbolIn(baseClass: Symbol): Symbol =
+      if (canMatchInheritedSymbols) matchingSymbol(baseClass, owner.thisType) else NoSymbol
 
     /** The symbol overriding this symbol in given subclass `ofclazz`.
      *
      *  @param ofclazz is a subclass of this symbol's owner
      */
-    final def overridingSymbol(ofclazz: Symbol): Symbol =
-      if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
+    final def overridingSymbol(ofclazz: Symbol): Symbol = (
+      if (canMatchInheritedSymbols)
+        matchingSymbol(ofclazz, ofclazz.thisType)
+      else
+        NoSymbol
+    )
+
+    /** If false, this symbol cannot possibly participate in an override,
+     *  either as overrider or overridee. For internal use; you should consult
+     *  with isOverridingSymbol. This is used by isOverridingSymbol to escape
+     *  the recursive knot.
+     */
+    private def canMatchInheritedSymbols = (
+         owner.isClass
+      && !this.isClass
+      && !this.isConstructor
+    )
 
-    /** Returns all symbols overriden by this symbol. */
-    final def allOverriddenSymbols: List[Symbol] =
-      if (!owner.isClass) Nil
-      else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
+    // All the symbols overridden by this symbol and this symbol at the head,
+    // or Nil if this is NoSymbol.
+    def overrideChain = (
+      if (this eq NoSymbol) Nil
+      else if (isOverridingSymbol) this :: allOverriddenSymbols
+      else this :: Nil
+    )
+
+    /** Returns all symbols overridden by this symbol. */
+    final def allOverriddenSymbols: List[Symbol] = {
+      def loop(xs: List[Symbol]): List[Symbol] = xs match {
+        case Nil     => Nil
+        case x :: xs =>
+          overriddenSymbol(x) match {
+            case NoSymbol => loop(xs)
+            case sym      => sym :: loop(xs)
+          }
+      }
+      if (isOverridingSymbol) loop(owner.ancestors) else Nil
+    }
 
     /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */
-    // !!! When if ever will this answer differ from .isOverride?
-    // How/where is the OVERRIDE flag managed, as compared to how checks
-    // based on type membership will evaluate?
-    def isOverridingSymbol = owner.isClass && (
-      owner.ancestors exists (cls => matchingSymbol(cls, owner.thisType) != NoSymbol)
+    lazy val isOverridingSymbol = (
+         canMatchInheritedSymbols
+      && owner.ancestors.exists(base => overriddenSymbol(base) != NoSymbol)
     )
+
     /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */
     def nextOverriddenSymbol: Symbol = {
-      if (owner.isClass) owner.ancestors foreach { base =>
-        val sym = overriddenSymbol(base)
-        if (sym != NoSymbol)
-          return sym
+      @tailrec def loop(bases: List[Symbol]): Symbol = bases match {
+        case Nil          => NoSymbol
+        case base :: rest =>
+          val sym = overriddenSymbol(base)
+          if (sym == NoSymbol) loop(rest) else sym
       }
-      NoSymbol
+      if (isOverridingSymbol) loop(owner.ancestors) else NoSymbol
     }
 
     /** Returns all symbols overridden by this symbol, plus all matching symbols
      *  defined in parents of the selftype.
      */
-    final def extendedOverriddenSymbols: List[Symbol] =
-      if (!owner.isClass) Nil
-      else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+    final def extendedOverriddenSymbols: List[Symbol] = (
+      if (canMatchInheritedSymbols)
+        owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+      else
+        Nil
+    )
 
     /** The symbol accessed by a super in the definition of this symbol when
      *  seen from class `base`. This symbol is always concrete.
      *  pre: `this.owner` is in the base class sequence of `base`.
      */
-    final def superSymbol(base: Symbol): Symbol = {
-      var bcs = base.info.baseClasses.dropWhile(owner != _).tail
+    @deprecated("Use `superSymbolIn` instead", "2.11.0")
+    final def superSymbol(base: Symbol): Symbol = superSymbolIn(base)
+
+    final def superSymbolIn(base: Symbol): Symbol = {
+      var bcs = base.info.baseClasses dropWhile (owner != _) drop 1
       var sym: Symbol = NoSymbol
       while (!bcs.isEmpty && sym == NoSymbol) {
         if (!bcs.head.isImplClass)
@@ -2116,22 +2332,30 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     /** The getter of this value or setter definition in class `base`, or NoSymbol if
      *  none exists.
      */
-    final def getter(base: Symbol): Symbol = base.info.decl(getterName) filter (_.hasAccessorFlag)
+    @deprecated("Use `getterIn` instead", "2.11.0")
+    final def getter(base: Symbol): Symbol = getterIn(base)
 
-    def getterName: TermName = (
-      if (isSetter) nme.setterToGetter(name.toTermName)
-      else if (nme.isLocalName(name)) nme.localToGetter(name.toTermName)
-      else name.toTermName
-    )
+    final def getterIn(base: Symbol): Symbol =
+      base.info decl getterName filter (_.hasAccessorFlag)
+
+    def getterName: TermName = name.getterName
+    def setterName: TermName = name.setterName
+    def localName: TermName  = name.localName
 
     /** The setter of this value or getter definition, or NoSymbol if none exists */
-    final def setter(base: Symbol): Symbol = setter(base, false)
+    @deprecated("Use `setterIn` instead", "2.11.0")
+    final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
+      setterIn(base, hasExpandedName)
 
-    final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
-      var sname = nme.getterToSetter(nme.getterName(name.toTermName))
-      if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
-      base.info.decl(sname) filter (_.hasAccessorFlag)
-    }
+    final def setterIn(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
+      base.info decl setterNameInBase(base, hasExpandedName) filter (_.hasAccessorFlag)
+
+    def needsExpandedSetterName = (
+      if (isMethod) hasStableFlag && !isLazy
+      else hasNoFlags(LAZY | MUTABLE)
+    )
+    def setterNameInBase(base: Symbol, expanded: Boolean): TermName =
+      if (expanded) nme.expandedSetterName(setterName, base) else setterName
 
     /** If this is a derived value class, return its unbox method
      *  or NoSymbol if it does not exist.
@@ -2188,35 +2412,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       case p :: _ => p
       case _      => NoSymbol
     }
-/* code for fixing nested objects
-    def expandModuleClassName() {
-      name = newTypeName(name.toString + "$")
-    }
 
-    def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
-*/
+    // Desire to re-use the field in ClassSymbol which stores the source
+    // file to also store the classfile, but without changing the behavior
+    // of sourceFile (which is expected at least in the IDE only to
+    // return actual source code.) So sourceFile has classfiles filtered out.
+    final def sourceFile: AbstractFile =
+      if ((associatedFile eq NoAbstractFile) || (associatedFile.path endsWith ".class")) null else associatedFile
 
-    /** Desire to re-use the field in ClassSymbol which stores the source
-     *  file to also store the classfile, but without changing the behavior
-     *  of sourceFile (which is expected at least in the IDE only to
-     *  return actual source code.) So sourceFile has classfiles filtered out.
+    /** Overridden in ModuleSymbols to delegate to the module class.
+     *  Never null; if there is no associated file, returns NoAbstractFile.
      */
-    private def sourceFileOnly(file: AbstractFile): AbstractFile =
-      if ((file eq null) || (file.path endsWith ".class")) null else file
-
-    private def binaryFileOnly(file: AbstractFile): AbstractFile =
-      if ((file eq null) || !(file.path endsWith ".class")) null else file
-
-    final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
-    final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
-
-    /** Overridden in ModuleSymbols to delegate to the module class. */
     def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
     def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
 
-    @deprecated("Use associatedFile_= instead", "2.10.0")
-    def sourceFile_=(f: AbstractFile): Unit = associatedFile_=(f)
-
     /** If this is a sealed class, its known direct subclasses.
      *  Otherwise, the empty set.
      */
@@ -2228,11 +2437,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
     @inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
     @inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
+    @inline final def fold[T](none: => T)(f: Symbol => T): T = if (this ne NoSymbol) f(this) else none
+    @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
 
-// ------ toString -------------------------------------------------------------------
+    final def toOption: Option[Symbol] = if (exists) Some(this) else None
 
-    /** A tag which (in the ideal case) uniquely identifies class symbols */
-    final def tag: Int = fullName.##
+
+// ------ toString -------------------------------------------------------------------
 
     /** The simple name of this Symbol */
     final def simpleName: Name = name
@@ -2251,20 +2462,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       else if (isClass) "class"
       else if (isType && !isParameter) "type"
       else if (isVariable) "var"
-      else if (isPackage) "package"
+      else if (hasPackageFlag) "package"
       else if (isModule) "object"
       else if (isSourceMethod) "def"
       else if (isTerm && (!isParameter || isParamAccessor)) "val"
       else ""
 
-    private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
     private def symbolKind: SymbolKind = {
       var kind =
-        if (isTermMacro) ("macro method", "macro method", "MAC")
+        if (isTermMacro) ("term macro", "macro method", "MACM")
         else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
         else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
-        else if (isPackage) ("package", "package", "PK")
         else if (isPackageClass) ("package class", "package", "PKC")
+        else if (isPackage) ("package", "package", "PK")
         else if (isPackageObject) ("package object", "package", "PKO")
         else if (isPackageObjectClass) ("package object class", "package", "PKOC")
         else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
@@ -2279,7 +2489,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         else if (isTrait) ("trait", "trait", "TRT")
         else if (isClass) ("class", "class", "CLS")
         else if (isType) ("type", "type", "TPE")
-        else if (isClassConstructor && isPrimaryConstructor) ("primary constructor", "constructor", "PCTOR")
+        else if (isClassConstructor && (owner.hasCompleteInfo && isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR")
         else if (isClassConstructor) ("constructor", "constructor", "CTOR")
         else if (isSourceMethod) ("method", "method", "METH")
         else if (isTerm) ("value", "value", "VAL")
@@ -2321,14 +2531,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  If !settings.debug translates expansions of operators back to operator symbol.
      *  E.g. $eq => =.
      *  If settings.uniqid, adds id.
+     *  If settings.Yshowsymowners, adds owner's id
      *  If settings.Yshowsymkinds, adds abbreviated symbol kind.
      */
-    def nameString: String = (
-      if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + originalName.decode
-      else if (settings.uniqid.value && !settings.Yshowsymkinds.value) originalName.decode + "#" + id
-      else if (!settings.uniqid.value && settings.Yshowsymkinds.value) originalName.decode + "#" + abbreviatedKindString
-      else originalName.decode + "#" + id + "#" + abbreviatedKindString
-    )
+    def nameString: String = {
+      val name_s = if (settings.debug.value) "" + unexpandedName else unexpandedName.dropLocal.decode
+      val kind_s = if (settings.Yshowsymkinds.value) "#" + abbreviatedKindString else ""
+
+      name_s + idString + kind_s
+    }
 
     def fullNameString: String = {
       def recur(sym: Symbol): String = {
@@ -2341,15 +2552,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     }
 
     /** If settings.uniqid is set, the symbol's id, else "" */
-    final def idString = if (settings.uniqid.value) "#"+id else ""
+    final def idString = {
+      val id_s = if (settings.uniqid.value) "#"+id else ""
+      val owner_s = if (settings.Yshowsymowners.value) "@"+owner.id else ""
+      id_s + owner_s
+    }
 
     /** String representation, including symbol's kind e.g., "class Foo", "method Bar".
      *  If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
      */
-    override def toString: String = compose(
-      kindString,
-      if (hasMeaninglessName) owner.decodedName + idString else nameString
-    )
+    override def toString: String = {
+      if (isPackageObjectOrClass && !settings.debug)
+        s"package object ${owner.decodedName}"
+      else compose(
+        kindString,
+        if (hasMeaninglessName) owner.decodedName + idString else nameString
+      )
+    }
 
     /** String representation of location.
      */
@@ -2374,6 +2593,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         if (settings.debug.value) parentsString(tp.parents)
         else briefParentsString(tp.parents)
       )
+      def isStructuralThisType = (
+        // prevents disasters like SI-8158
+        owner.isInitialized && owner.isStructuralRefinement && tp == owner.tpe
+      )
       if (isType) typeParamsString(tp) + (
         if (isClass) " extends " + parents
         else if (isAliasType) " = " + tp.resultType
@@ -2384,15 +2607,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       )
       else if (isModule) "" //  avoid "object X of type X.type"
       else tp match {
-        case PolyType(tparams, res)  => typeParamsString(tp) + infoString(res)
-        case NullaryMethodType(res)  => infoString(res)
-        case MethodType(params, res) => valueParamsString(tp) + infoString(res)
-        case _                       => ": " + tp
+        case PolyType(tparams, res)    => typeParamsString(tp) + infoString(res)
+        case NullaryMethodType(res)    => infoString(res)
+        case MethodType(params, res)   => valueParamsString(tp) + infoString(res)
+        case _ if isStructuralThisType => ": " + owner.name
+        case _                         => ": " + tp
       }
     }
 
     def infosString = infos.toString
-    def debugLocationString = fullLocationString + " (flags: " + debugFlagString + ")"
+    def debugLocationString = {
+      val pre = flagString match {
+        case ""                  => ""
+        case s if s contains ' ' => "(" + s + ") "
+        case s                   => s + " "
+      }
+      pre + fullLocationString
+    }
 
     private def defStringCompose(infoString: String) = compose(
       flagString,
@@ -2458,31 +2689,32 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def companionSymbol: Symbol = companionClass
     override def moduleClass = if (isModule) referenced else NoSymbol
 
-    override def hasDefault         = this hasFlag DEFAULTPARAM // overloaded with TRAIT
     override def isBridge           = this hasFlag BRIDGE
     override def isEarlyInitialized = this hasFlag PRESUPER
     override def isMethod           = this hasFlag METHOD
     override def isModule           = this hasFlag MODULE
     override def isOverloaded       = this hasFlag OVERLOADED
-    override def isPackage          = this hasFlag PACKAGE
+    /*** !!! TODO: shouldn't we do something like the following:
+    override def isOverloaded       = (
+      if (this.isInitialized)
+        this hasFlag OVERLOADED
+      else
+        (infos ne null) && infos.info.isInstanceOf[OverloadedType]
+    )
+    ***/
     override def isValueParameter   = this hasFlag PARAM
 
     override def isSetterParameter  = isValueParameter && owner.isSetter
     override def isAccessor         = this hasFlag ACCESSOR
     override def isGetter           = isAccessor && !isSetter
+    override def isDefaultGetter    = name containsName nme.DEFAULT_GETTER_STRING
     override def isSetter           = isAccessor && nme.isSetterName(name)  // todo: make independent of name, as this can be forged.
     override def isLocalDummy       = nme.isLocalDummyName(name)
     override def isClassConstructor = name == nme.CONSTRUCTOR
     override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR
     override def isConstructor      = nme.isConstructorName(name)
 
-    override def isPackageObject  = isModule && (name == nme.PACKAGE)
-    override def isStable = !isUnstable
-    private def isUnstable = (
-         isMutable
-      || (hasFlag(METHOD | BYNAMEPARAM) && !hasFlag(STABLE))
-      || (tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
-    )
+    override def isPackageObject = isModule && (name == nme.PACKAGE)
 
     // The name in comments is what it is being disambiguated from.
     // TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names.
@@ -2562,36 +2794,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         name = nme.expandedName(name.toTermName, base)
       }
     }
-
-    protected def doCookJavaRawInfo() {
-      def cook(sym: Symbol) {
-        require(sym.isJavaDefined, sym)
-        // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
-        // object rawToExistentialInJava extends TypeMap {
-        //   def apply(tp: Type): Type = tp match {
-        //     // any symbol that occurs in a java sig, not just java symbols
-        //     // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
-        //     case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
-        //       val eparams = typeParamsToExistentials(sym, sym.typeParams)
-        //       existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
-        //     case _ =>
-        //       mapOver(tp)
-        //   }
-        // }
-        val tpe1 = rawToExistential(sym.tpe)
-        // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
-        if (tpe1 ne sym.tpe) {
-          sym.setInfo(tpe1)
-        }
-      }
-
-      if (isJavaDefined)
-        cook(this)
-      else if (isOverloaded)
-        for (sym2 <- alternatives)
-          if (sym2.isJavaDefined)
-            cook(sym2)
-    }
   }
   implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
 
@@ -2672,17 +2874,36 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         }
       loop(info)
     }
+
+    override def exceptions = annotations flatMap ThrownException.unapply
   }
   implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol])
 
   class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
   extends TypeSymbol(initOwner, initPos, initName) {
     type TypeOfClonedSymbol = TypeSymbol
+    override def variance = if (isLocalToThis) Bivariant else info.typeSymbol.variance
+    override def isContravariant = variance.isContravariant
+    override def isCovariant     = variance.isCovariant
     final override def isAliasType = true
     override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
       owner.newNonClassSymbol(name, pos, newFlags)
   }
 
+  /** Let's say you have a type definition
+   *
+   *  {{{
+   *    type T <: Number
+   *  }}}
+   *
+   *  and tsym is the symbol corresponding to T. Then
+   *
+   *  {{{
+   *    tsym is an instance of AbstractTypeSymbol
+   *    tsym.info == TypeBounds(Nothing, Number)
+   *    tsym.tpe  == TypeRef(NoPrefix, T, List())
+   *  }}}
+   */
   class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
   extends TypeSymbol(initOwner, initPos, initName) {
     type TypeOfClonedSymbol = TypeSymbol
@@ -2711,7 +2932,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     final def asNameType(n: Name) = n.toTypeName
 
     override def isNonClassType = true
-    override def isTypeMacro    = hasFlag(MACRO)
 
     override def resolveOverloadedFlag(flag: Long) = flag match {
       case TRAIT         => "<trait>"         // DEFAULTPARAM
@@ -2729,7 +2949,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def isAbstractType          = this hasFlag DEFERRED
     override def isContravariant         = this hasFlag CONTRAVARIANT
     override def isCovariant             = this hasFlag COVARIANT
-    override def isExistentialQuantified = isExistentiallyBound && !isSkolem
     override def isExistentiallyBound    = this hasFlag EXISTENTIAL
     override def isTypeParameter         = isTypeParameterOrSkolem && !isSkolem
     override def isTypeParameterOrSkolem = this hasFlag PARAM
@@ -2751,63 +2970,63 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     private def newPrefix = if (this hasFlag EXISTENTIAL | PARAM) NoPrefix else owner.thisType
     private def newTypeRef(targs: List[Type]) = typeRef(newPrefix, this, targs)
 
-    /** Let's say you have a type definition
-     *
-     *  {{{
-     *    type T <: Number
-     *  }}}
+    /** A polymorphic type symbol has two distinct "types":
      *
-     *  and tsym is the symbol corresponding to T. Then
+     *  tpe_*  a TypeRef with: dummy type args, no unapplied type parameters, and kind *
+     *  tpeHK  a TypeRef with: no type args, unapplied type parameters, and
+     *           kind (*,*,...,*) => * depending on the number of tparams.
      *
-     *  {{{
-     *    tsym.info = TypeBounds(Nothing, Number)
-     *    tsym.tpe  = TypeRef(NoPrefix, T, List())
-     *  }}}
-     */
-    override def tpe: Type = {
-      if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
+     * The dummy type args in tpe_* are created by wrapping a TypeRef
+     * around the type parameter symbols.  Types containing dummies will
+     * hide errors or introduce spurious ones if they are passed around
+     * as if normal types.  They should only be used in local operations
+     * where they will either be discarded immediately after, or will
+     * undergo substitution in which the dummies are replaced by actual
+     * type arguments.
+     */
+    override def tpe_* : Type = {
+      maybeUpdateTypeCache()
+      tpeCache
+    }
+    override def typeConstructor: Type = {
+      if (tyconCacheNeedsUpdate)
+        setTyconCache(newTypeRef(Nil))
+      tyconCache
+    }
+    override def tpeHK: Type = typeConstructor
+
+    private def tyconCacheNeedsUpdate = (tyconCache eq null) || tyconRunId != currentRunId
+    private def setTyconCache(tycon: Type) {
+      tyconCache = tycon
+      tyconRunId = currentRunId
+      assert(tyconCache ne null, this)
+    }
+
+    private def maybeUpdateTypeCache() {
       if (tpePeriod != currentPeriod) {
-        if (isValid(tpePeriod)) {
+        if (isValid(tpePeriod))
           tpePeriod = currentPeriod
-        } else {
-          if (isInitialized) tpePeriod = currentPeriod
-          tpeCache = NoType
-          val targs =
-            if (phase.erasedTypes && this != ArrayClass) List()
-            else unsafeTypeParams map (_.typeConstructor)
-            //@M! use typeConstructor to generate dummy type arguments,
-            // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
-            // memberType should be used instead, that's why it uses tpeHK and not tpe
-          tpeCache = newTypeRef(targs)
-        }
+        else
+          updateTypeCache()   // perform the actual update
       }
-      assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug
-      tpeCache
     }
+    private def updateTypeCache() {
+      if (tpeCache eq NoType)
+        throw CyclicReference(this, typeConstructor)
 
-    /** @M -- tpe vs tpeHK:
-     *
-     *    tpe: creates a TypeRef with dummy type arguments and kind *
-     *  tpeHK: creates a TypeRef with no type arguments but with type parameters
-     *
-     * If typeParams is nonEmpty, calling tpe may hide errors or
-     * introduce spurious ones. (For example, when deriving a type from
-     * the symbol of a type argument that may be higher-kinded.) As far
-     * as I can tell, it only makes sense to call tpe in conjunction
-     * with a substitution that replaces the generated dummy type
-     * arguments by their actual types.
-     *
-     * TODO: the above conditions desperately need to be enforced by code.
-     */
-    override def tpeHK = typeConstructor // @M! used in memberType
+      if (isInitialized)
+        tpePeriod = currentPeriod
 
-    override def typeConstructor: Type = {
-      if ((tyconCache eq null) || tyconRunId != currentRunId) {
-        tyconCache = newTypeRef(Nil)
-        tyconRunId = currentRunId
-      }
-      assert(tyconCache ne null)
-      tyconCache
+      tpeCache = NoType // cycle marker
+      val noTypeParams = phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty
+      tpeCache = newTypeRef(
+        if (noTypeParams) Nil
+        else unsafeTypeParams map (_.typeConstructor)
+      )
+      // Avoid carrying around different types in tyconCache and tpeCache
+      // for monomorphic types.
+      if (noTypeParams && tyconCacheNeedsUpdate)
+        setTyconCache(tpeCache)
     }
 
     override def info_=(tp: Type) {
@@ -2833,15 +3052,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      * public class Test1<T extends Test3> {}
      * info for T in Test1 should be >: Nothing <: Test3[_]
      */
-    protected def doCookJavaRawInfo() {
-      if (isJavaDefined || owner.isJavaDefined) {
-        val tpe1 = rawToExistential(info)
-        // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
-        if (tpe1 ne info) {
-          setInfo(tpe1)
-        }
-      }
-    }
 
     if (Statistics.hotEnabled) Statistics.incCounter(typeSymbolCount)
   }
@@ -2871,11 +3081,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     // a type symbol bound by an existential type, for instance the T in
     // List[T] forSome { type T }
     override def isExistentialSkolem = this hasFlag EXISTENTIAL
-    override def isGADTSkolem        = this hasAllFlags GADT_SKOLEM_FLAGS
+    override def isGADTSkolem        = this hasAllFlags GADT_SKOLEM_FLAGS.toLong
     override def isTypeSkolem        = this hasFlag PARAM
     override def isAbstractType      = this hasFlag DEFERRED
 
-    override def isExistentialQuantified = false
     override def existentialBound = if (isAbstractType) this.info else super.existentialBound
 
     /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
@@ -2934,7 +3143,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def isAnonymousClass        = name containsName tpnme.ANON_CLASS_NAME
     override def isConcreteClass         = !(this hasFlag ABSTRACT | TRAIT)
     override def isJavaInterface         = hasAllFlags(JAVA | TRAIT)
-    override def isNestedClass           = !owner.isPackageClass
+    override def isNestedClass           = !isTopLevel
     override def isNumericValueClass     = definitions.isNumericValueClass(this)
     override def isNumeric               = isNumericValueClass
     override def isPackageObjectClass    = isModuleClass && (name == tpnme.PACKAGE)
@@ -2959,24 +3168,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      */
     override def isLocalClass = (
          isAnonOrRefinementClass
-      || isLocal
-      || !owner.isPackageClass && owner.isLocalClass
+      || isLocalToBlock
+      || !isTopLevel && owner.isLocalClass
     )
-    override def isStableClass = (this hasFlag STABLE) || checkStable()
-
-    private def checkStable() = {
-      def hasNoAbstractTypeMember(clazz: Symbol): Boolean =
-        (clazz hasFlag STABLE) || {
-          var e = clazz.info.decls.elems
-          while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym))
-            e = e.next
-          e == null
-        }
-      (info.baseClasses forall hasNoAbstractTypeMember) && {
-        setFlag(STABLE)
-        true
-      }
-    }
 
     override def enclClassChain = this :: owner.enclClassChain
 
@@ -2985,8 +3179,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
      *  returned, otherwise, `NoSymbol` is returned.
      */
     protected final def companionModule0: Symbol =
-      flatOwnerInfo.decl(name.toTermName).suchThat(
-        sym => sym.isModule && (sym isCoDefinedWith this) && !sym.isMethod)
+      flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModuleNotMethod && (sym isCoDefinedWith this))
 
     override def companionModule    = companionModule0
     override def companionSymbol    = companionModule0
@@ -3003,7 +3196,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
       if (c.isOverloaded) c.alternatives.head else c
     }
 
-    override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
+    override def associatedFile = (
+      if (!isTopLevel) super.associatedFile
+      else {
+        if (_associatedFile eq null) NoAbstractFile // guarantee not null, but save cost of initializing the var
+        else _associatedFile
+      }
+    )
     override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
 
     override def reset(completer: Type): this.type = {
@@ -3016,8 +3215,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def thisType: Type = {
       val period = thisTypePeriod
       if (period != currentPeriod) {
-        thisTypePeriod = currentPeriod
         if (!isValid(period)) thisTypeCache = ThisType(this)
+        thisTypePeriod = currentPeriod
       }
       thisTypeCache
     }
@@ -3052,9 +3251,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         clone.typeOfThis = typeOfThis
         clone.thisSym setName thisSym.name
       }
-      if (_associatedFile ne null)
-        clone.associatedFile = _associatedFile
-
+      clone.associatedFile = _associatedFile
       clone
     }
 
@@ -3067,6 +3264,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def children = childSet
     override def addChild(sym: Symbol) { childSet = childSet + sym }
 
+    def anonOrRefinementString = {
+      if (hasCompleteInfo) {
+        val label   = if (isAnonymousClass) "$anon:" else "refinement of"
+        val parents = parentsString(info.parents map functionNBaseType filterNot (_.typeSymbol == SerializableClass))
+        s"<$label $parents>"
+      }
+      else if (isAnonymousClass) "$anon"
+      else nameString
+    }
+    override def toString = (
+      if (isAnonOrRefinementClass) anonOrRefinementString
+      else super.toString
+    )
+
     if (Statistics.hotEnabled) Statistics.incCounter(classSymbolCount)
   }
   implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
@@ -3093,9 +3304,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def typeOfThis = {
       val period = typeOfThisPeriod
       if (period != currentPeriod) {
-        typeOfThisPeriod = currentPeriod
         if (!isValid(period))
           typeOfThisCache = singleType(owner.thisType, sourceModule)
+        typeOfThisPeriod = currentPeriod
       }
       typeOfThisCache
     }
@@ -3106,9 +3317,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
         // Skip a package object class, because the members are also in
         // the package and we wish to avoid spurious ambiguities as in pos/t3999.
         if (!isPackageObjectClass) {
+          implicitMembersCacheValue = tp.implicitMembers
           implicitMembersCacheKey1 = tp
           implicitMembersCacheKey2 = tp.decls.elems
-          implicitMembersCacheValue = tp.implicitMembers
         }
       }
       implicitMembersCacheValue
@@ -3162,6 +3373,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     )
   }
   trait StubSymbol extends Symbol {
+    devWarning("creating stub symbol to defer error: " + missingMessage)
+
     protected def missingMessage: String
 
     /** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
@@ -3189,8 +3402,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def info            = fail(NoType)
     override def rawInfo         = fail(NoType)
     override def companionSymbol = fail(NoSymbol)
-
-    debugwarn("creating stub symbol to defer error: " + missingMessage)
   }
   class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
   class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
@@ -3199,11 +3410,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def origin: String
   }
   class FreeTermSymbol(name0: TermName, value0: => Any, val origin: String) extends TermSymbol(NoSymbol, NoPosition, name0) with FreeSymbol with FreeTermSymbolApi {
+    final override def isFreeTerm = true
+    final override def asFreeTerm = this
     def value = value0
   }
   implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
 
-  class FreeTypeSymbol(name0: TypeName, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi
+  class FreeTypeSymbol(name0: TypeName, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi {
+    final override def isFreeType = true
+    final override def asFreeType = this
+  }
   implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
 
   /** An object representing a missing symbol */
@@ -3216,10 +3432,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     def name = nme.NO_NAME
     override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
 
-    synchronized {
-      setInfo(NoType)
-      privateWithin = this
-    }
+    // Syncnote: no need to synchronize this, because NoSymbol's initialization is triggered by JavaUniverse.init
+    // which is called in universe's constructor - something that's inherently single-threaded
+    setInfo(NoType)
+    privateWithin = this
+
     override def info_=(info: Type) = {
       infos = TypeHistory(1, NoType, null)
       unlock()
@@ -3240,23 +3457,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     override def enclosingTopLevelClass: Symbol = this
     override def enclosingPackageClass: Symbol = this
     override def enclMethod: Symbol = this
-    override def associatedFile = null
-    override def ownerChain: List[Symbol] = List()
+    override def associatedFile = NoAbstractFile
+    override def owner: Symbol = {
+      devWarningDumpStack("NoSymbol.owner", 15)
+      this
+    }
+    override def ownerChain: List[Symbol] = Nil
     override def ownersIterator: Iterator[Symbol] = Iterator.empty
     override def alternatives: List[Symbol] = List()
     override def reset(completer: Type): this.type = this
     override def info: Type = NoType
     override def existentialBound: Type = NoType
     override def rawInfo: Type = NoType
-    protected def doCookJavaRawInfo() {}
     override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
     def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
     override def originalEnclosingMethod = this
-
-    override def owner: Symbol =
-      abort("no-symbol does not have an owner")
-    override def typeConstructor: Type =
-      abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
   }
 
   protected def makeNoSymbol: NoSymbol = new NoSymbol
@@ -3272,8 +3487,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
    *  @return           the new list of info-adjusted symbols
    */
   def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = {
-    val syms1 = syms map symFn
-    syms1 map (_ substInfo (syms, syms1))
+    val syms1 = mapList(syms)(symFn)
+    mapList(syms1)(_ substInfo (syms, syms1))
+  }
+
+  /** Derives a new list of symbols from the given list by mapping the given
+   *  list of `syms` and `as` across the given function.
+   *  Then fixes the info of all the new symbols
+   *  by substituting the new symbols for the original symbols.
+   *
+   *  @param    syms    the prototypical symbols
+   *  @param    as      arguments to be passed to symFn together with symbols from syms (must be same length)
+   *  @param    symFn   the function to create new symbols
+   *  @return           the new list of info-adjusted symbols
+   */
+  def deriveSymbols2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol): List[Symbol] = {
+    val syms1 = map2(syms, as)(symFn)
+    mapList(syms1)(_ substInfo (syms, syms1))
   }
 
   /** Derives a new Type by first deriving new symbols as in deriveSymbols,
@@ -3289,6 +3519,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
     val syms1 = deriveSymbols(syms, symFn)
     tpe.substSym(syms, syms1)
   }
+
+  /** Derives a new Type by first deriving new symbols as in deriveSymbols2,
+   *  then performing the same oldSyms => newSyms substitution on `tpe` as is
+   *  performed on the symbol infos in deriveSymbols.
+   *
+   *  @param    syms    the prototypical symbols
+   *  @param    as      arguments to be passed to symFn together with symbols from syms (must be same length)
+   *  @param    symFn   the function to create new symbols based on `as`
+   *  @param    tpe     the prototypical type
+   *  @return           the new symbol-subsituted type
+   */
+  def deriveType2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol)(tpe: Type): Type = {
+    val syms1 = deriveSymbols2(syms, as, symFn)
+    tpe.substSym(syms, syms1)
+  }
+
   /** Derives a new Type by instantiating the given list of symbols as
    *  WildcardTypes.
    *
@@ -3313,9 +3559,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
    *  @return           the newly created, info-adjusted symbols
    */
   def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
-    cloneSymbols(syms) map (_ modifyInfo infoFn)
+    mapList(cloneSymbols(syms))(_ modifyInfo infoFn)
   def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
-    cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
+    mapList(cloneSymbolsAtOwner(syms, owner))(_ modifyInfo infoFn)
 
   /** Functions which perform the standard clone/substituting on the given symbols and type,
    *  then call the creator function with the new symbols and type as arguments.
@@ -3333,6 +3579,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
    */
   def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
 
+  def existingSymbols(syms: List[Symbol]): List[Symbol] =
+    syms filter (s => (s ne null) && (s ne NoSymbol))
+
   /** Return closest enclosing method, unless shadowed by an enclosing class. */
   // TODO Move back to ExplicitOuter when the other call site is removed.
   // no use of closures here in the interest of speed.
@@ -3348,25 +3597,26 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
   }
 
   /** A class for type histories */
-  private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
+  private case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
     assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
     assert(validFrom != NoPeriod, this)
 
-    override def toString() =
-      "TypeHistory(" + phaseOf(validFrom)+":"+runId(validFrom) + "," + info + "," + prev + ")"
+    private def phaseString = "%s: %s".format(phaseOf(validFrom), info)
+    override def toString = toList reverseMap (_.phaseString) mkString ", "
 
     def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
+
+    def oldest: TypeHistory = if (prev == null) this else prev.oldest
   }
 
 // ----- Hoisted closures and convenience methods, for compile time reductions -------
 
   private[scala] final val symbolIsPossibleInRefinement = (sym: Symbol) => sym.isPossibleInRefinement
-  private[scala] final val symbolIsNonVariant = (sym: Symbol) => sym.variance == 0
 
   @tailrec private[scala] final
   def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
     case sym :: rest => sym.owner == owner && allSymbolsHaveOwner(rest, owner)
-    case _ => true
+    case _           => true
   }
 
 
@@ -3374,6 +3624,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
 
   Statistics.newView("#symbols")(ids)
 
+
+// -------------- Completion --------------------------------------------------------
+
+  // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe`
+  case class SymbolOps(isFlagRelated: Boolean, mask: Long)
+  val AllOps = SymbolOps(isFlagRelated = false, mask = 0L)
+  def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask)
+
+  private def relevantSymbols(syms: Seq[Symbol]) = syms.flatMap(sym => List(sym, sym.moduleClass, sym.sourceModule))
+  def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = relevantSymbols(syms).foreach(_.markFlagsCompleted(mask))
+  def markAllCompleted(syms: Symbol*): Unit = relevantSymbols(syms).foreach(_.markAllCompleted)
 }
 
 object SymbolsStats {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index c1753fc..9066c73 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -1,7 +1,12 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
-abstract class TreeGen extends macros.TreeBuilder {
+import Flags._
+import util._
+import scala.collection.mutable.ListBuffer
+
+abstract class TreeGen {
   val global: SymbolTable
 
   import global._
@@ -11,10 +16,7 @@ abstract class TreeGen extends macros.TreeBuilder {
   def rootScalaDot(name: Name)       = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
   def scalaDot(name: Name)           = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
   def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
-  def scalaAnyRefConstr              = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
-  def scalaUnitConstr                = scalaDot(tpnme.Unit) setSymbol UnitClass
-  def productConstr                  = scalaDot(tpnme.Product) setSymbol ProductRootClass
-  def serializableConstr             = scalaDot(tpnme.Serializable) setSymbol SerializableClass
+  def scalaAnyRefConstr              = scalaDot(tpnme.AnyRef) // used in ide
 
   def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
     val cls = if (abstractFun)
@@ -47,10 +49,10 @@ abstract class TreeGen extends macros.TreeBuilder {
     mkMethodCall(Select(receiver, method), targs, args)
 
   def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
-    Apply(mkTypeApply(target, targs map TypeTree), args)
+    Apply(mkTypeApply(target, mapList(targs)(TypeTree)), args)
 
   def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
-    mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+    mkTypeApply(mkAttributedRef(method), mapList(targs)(TypeTree))
 
   /** Builds a reference to value whose type is given stable prefix.
    *  The type must be suitable for this.  For example, it
@@ -93,7 +95,7 @@ abstract class TreeGen extends macros.TreeBuilder {
       case ConstantType(value) =>
         Literal(value) setType tpe
 
-      case AnnotatedType(_, atp, _) =>
+      case AnnotatedType(_, atp) =>
         mkAttributedQualifier(atp)
 
       case RefinedType(parents, _) =>
@@ -116,7 +118,7 @@ abstract class TreeGen extends macros.TreeBuilder {
   }
 
   /** Builds a reference to given symbol with given stable prefix. */
-  def mkAttributedRef(pre: Type, sym: Symbol): Tree = {
+  def mkAttributedRef(pre: Type, sym: Symbol): RefTree = {
     val qual = mkAttributedQualifier(pre)
     qual match {
       case EmptyTree                                  => mkAttributedIdent(sym)
@@ -126,33 +128,33 @@ abstract class TreeGen extends macros.TreeBuilder {
   }
 
   /** Builds a reference to given symbol. */
-  def mkAttributedRef(sym: Symbol): Tree =
+  def mkAttributedRef(sym: Symbol): RefTree =
     if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
     else mkAttributedIdent(sym)
 
-  /** Builds an untyped reference to given symbol. */
-  def mkUnattributedRef(sym: Symbol): Tree =
-    if (sym.owner.isClass) Select(This(sym.owner), sym)
-    else Ident(sym)
+  def mkUnattributedRef(sym: Symbol): RefTree = mkUnattributedRef(sym.fullNameAsName('.'))
+
+  def mkUnattributedRef(fullName: Name): RefTree = {
+    val hd :: tl = nme.segments(fullName.toString, assumeTerm = fullName.isTermName)
+    tl.foldLeft(Ident(hd): RefTree)(Select(_,_))
+  }
 
   /** Replaces tree type with a stable type if possible */
-  def stabilize(tree: Tree): Tree = {
-    for(tp <- stableTypeFor(tree)) tree.tpe = tp
-    tree
+  def stabilize(tree: Tree): Tree = stableTypeFor(tree) match {
+    case NoType => tree
+    case tp     => tree setType tp
   }
 
   /** Computes stable type for a tree if possible */
-  def stableTypeFor(tree: Tree): Option[Type] = tree match {
-    case This(_) if tree.symbol != null && !tree.symbol.isError =>
-      Some(ThisType(tree.symbol))
-    case Ident(_) if tree.symbol.isStable =>
-      Some(singleType(tree.symbol.owner.thisType, tree.symbol))
-    case Select(qual, _) if ((tree.symbol ne null) && (qual.tpe ne null)) && // turned assert into guard for #4064
-                            tree.symbol.isStable && qual.tpe.isStable =>
-      Some(singleType(qual.tpe, tree.symbol))
-    case _ =>
-      None
-  }
+  def stableTypeFor(tree: Tree): Type = (
+    if (!treeInfo.admitsTypeSelection(tree)) NoType
+    else tree match {
+      case This(_)         => ThisType(tree.symbol)
+      case Ident(_)        => singleType(tree.symbol.owner.thisType, tree.symbol)
+      case Select(qual, _) => singleType(qual.tpe, tree.symbol)
+      case _               => NoType
+    }
+  )
 
   /** Builds a reference with stable type to given symbol */
   def mkAttributedStableRef(pre: Type, sym: Symbol): Tree =
@@ -161,13 +163,13 @@ abstract class TreeGen extends macros.TreeBuilder {
   def mkAttributedStableRef(sym: Symbol): Tree =
     stabilize(mkAttributedRef(sym))
 
-  def mkAttributedThis(sym: Symbol): Tree =
+  def mkAttributedThis(sym: Symbol): This =
     This(sym.name.toTypeName) setSymbol sym setType sym.thisType
 
-  def mkAttributedIdent(sym: Symbol): Tree =
-    Ident(sym.name) setSymbol sym setType sym.tpe
+  def mkAttributedIdent(sym: Symbol): RefTree =
+    Ident(sym.name) setSymbol sym setType sym.tpeHK
 
-  def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = {
+  def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree = {
     // Tests involving the repl fail without the .isEmptyPackage condition.
     if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
       mkAttributedIdent(sym)
@@ -184,16 +186,12 @@ abstract class TreeGen extends macros.TreeBuilder {
       )
       val needsPackageQualifier = (
            (sym ne null)
-        && qualsym.isPackage
-        && !sym.isDefinedInPackage
+        && qualsym.hasPackageFlag
+        && !(sym.isDefinedInPackage || sym.moduleClass.isDefinedInPackage) // SI-7817 work around strangeness in post-flatten `Symbol#owner`
       )
       val pkgQualifier =
         if (needsPackageQualifier) {
-          // The owner of a symbol which requires package qualification may be the
-          // package object iself, but it also could be any superclass of the package
-          // object.  In the latter case, we must go through the qualifier's info
-          // to obtain the right symbol.
-          val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE
+          val packageObject = rootMirror.getPackageObjectWithMember(qual.tpe, sym)
           Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
         }
         else qual
@@ -207,13 +205,13 @@ abstract class TreeGen extends macros.TreeBuilder {
   /** Builds a type application node if args.nonEmpty, returns fun otherwise. */
   def mkTypeApply(fun: Tree, targs: List[Tree]): Tree =
     if (targs.isEmpty) fun else TypeApply(fun, targs)
-  def mkTypeApply(target: Tree, method: Symbol, targs: List[Type]): Tree =
-    mkTypeApply(Select(target, method), targs map TypeTree)
+  def mkAppliedTypeTree(fun: Tree, targs: List[Tree]): Tree =
+    if (targs.isEmpty) fun else AppliedTypeTree(fun, targs)
   def mkAttributedTypeApply(target: Tree, method: Symbol, targs: List[Type]): Tree =
     mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
 
   private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
-    val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
+    val tapp = mkAttributedTypeApply(value, what, tpe.dealias :: Nil)
     if (wrapInApply) Apply(tapp, Nil) else tapp
   }
   private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
@@ -229,7 +227,7 @@ abstract class TreeGen extends macros.TreeBuilder {
 
   /** Cast `tree` to `pt`, unless tpe is a subtype of pt, or pt is Unit.  */
   def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
-    if ((pt == UnitClass.tpe) || (tpe <:< pt)) tree
+    if ((pt == UnitTpe) || (tpe <:< pt)) tree
     else atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = true, wrapInApply = !beforeRefChecks))
 
   /** Apparently we smuggle a Type around as a Literal(Constant(tp))
@@ -248,10 +246,6 @@ abstract class TreeGen extends macros.TreeBuilder {
     Literal(Constant(tp)) setType ConstantType(Constant(tp))
 
   /** Builds a list with given head and tail. */
-  def mkNewCons(head: Tree, tail: Tree): Tree =
-    New(Apply(mkAttributedRef(ConsClass), List(head, tail)))
-
-  /** Builds a list with given head and tail. */
   def mkNil: Tree = mkAttributedRef(NilModule)
 
   /** Builds a tree representing an undefined local, as in
@@ -259,7 +253,7 @@ abstract class TreeGen extends macros.TreeBuilder {
    *  which is appropriate to the given Type.
    */
   def mkZero(tp: Type): Tree = tp.typeSymbol match {
-    case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingClass.tpe
+    case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingTpe
     case _            => Literal(mkConstantZero(tp)) setType tp
   }
 
@@ -276,12 +270,28 @@ abstract class TreeGen extends macros.TreeBuilder {
     case _            => Constant(null)
   }
 
+  /** Wrap an expression in a named argument. */
+  def mkNamedArg(name: Name, tree: Tree): Tree = mkNamedArg(Ident(name), tree)
+  def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(AssignOrNamedArg(lhs, rhs))
+
   /** Builds a tuple */
-  def mkTuple(elems: List[Tree]): Tree =
-    if (elems.isEmpty) Literal(Constant())
-    else Apply(
-      Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply),
-      elems)
+  def mkTuple(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
+    case Nil =>
+      Literal(Constant(()))
+    case tree :: Nil if flattenUnary =>
+      tree
+    case _ =>
+      Apply(scalaDot(TupleClass(elems.length).name.toTermName), elems)
+  }
+
+  def mkTupleType(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
+    case Nil =>
+      scalaDot(tpnme.Unit)
+    case List(tree) if flattenUnary =>
+      tree
+    case _ =>
+      AppliedTypeTree(scalaDot(TupleClass(elems.length).name), elems)
+  }
 
   // tree1 AND tree2
   def mkAnd(tree1: Tree, tree2: Tree): Tree =
@@ -295,4 +305,595 @@ abstract class TreeGen extends macros.TreeBuilder {
     assert(ReflectRuntimeUniverse != NoSymbol)
     mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse)
   }
+
+  def mkSeqApply(arg: Tree): Apply = {
+    val factory = Select(mkAttributedRef(SeqModule), nme.apply)
+    Apply(factory, List(arg))
+  }
+
+  def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
+
+  /** Generates a template with constructor corresponding to
+   *
+   *  constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
+   *  extends superclass(args_1) ... (args_n) with mixins { self => body }
+   *
+   *  This gets translated to
+   *
+   *  extends superclass with mixins { self =>
+   *    presupers' // presupers without rhs
+   *    vparamss   // abstract fields corresponding to value parameters
+   *    def <init>(vparamss) {
+   *      presupers
+   *      super.<init>(args)
+   *    }
+   *    body
+   *  }
+   */
+  def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers,
+                 vparamss: List[List[ValDef]], body: List[Tree], superPos: Position = NoPosition): Template = {
+    /* Add constructor to template */
+
+    // create parameters for <init> as synthetic trees.
+    var vparamss1 = mmap(vparamss) { vd =>
+      val param = atPos(vd.pos.makeTransparent) {
+        val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
+        ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, duplicateAndKeepPositions(vd.rhs))
+      }
+      param
+    }
+
+    val (edefs, rest) = body span treeInfo.isEarlyDef
+    val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
+    val gvdefs = evdefs map {
+      case vdef @ ValDef(_, _, tpt, _) =>
+        copyValDef(vdef)(
+        // atPos for the new tpt is necessary, since the original tpt might have no position
+        // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
+        // position of TypeTree, it would still be NoPosition. That's what the author meant.
+        tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
+        rhs = EmptyTree
+      )
+    }
+    val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
+
+    val constr = {
+      if (constrMods.isTrait) {
+        if (body forall treeInfo.isInterfaceMember) None
+        else Some(
+          atPos(wrappingPos(superPos, lvdefs)) (
+            DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
+      }
+      else {
+        // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
+        if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
+          vparamss1 = List() :: vparamss1
+        val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
+                                         // this requires knowing which of the parents is a type macro and which is not
+                                         // and that's something that cannot be found out before typer
+                                         // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
+                                         // this means that we don't know what will be the arguments of the super call
+                                         // therefore here we emit a dummy which gets populated when the template is named and typechecked
+        Some(
+          atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) (
+            DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
+      }
+    }
+    constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
+    // Field definitions for the class - remove defaults.
+
+    val fieldDefs = vparamss.flatten map (vd => {
+      val field = copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree)
+      // Prevent overlapping of `field` end's position with default argument's start position.
+      // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
+      // the `pos` is a point position with all its values equal to `vd.rhs.pos.start`.
+      if(field.pos.isRange && vd.rhs.pos.isRange) field.pos = field.pos.withEnd(vd.rhs.pos.start - 1)
+      field
+    })
+
+    global.Template(parents, self, gvdefs ::: fieldDefs ::: constr ++: etdefs ::: rest)
+  }
+
+  def mkParents(ownerMods: Modifiers, parents: List[Tree], parentPos: Position = NoPosition) =
+    if (ownerMods.isCase) parents ::: List(scalaDot(tpnme.Product), scalaDot(tpnme.Serializable))
+    else if (parents.isEmpty) atPos(parentPos)(scalaAnyRefConstr) :: Nil
+    else parents
+
+  def mkClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], templ: Template): ClassDef = {
+    val isInterface = mods.isTrait && (templ.body forall treeInfo.isInterfaceMember)
+    val mods1 = if (isInterface) (mods | Flags.INTERFACE) else mods
+    ClassDef(mods1, name, tparams, templ)
+  }
+
+  /** Create positioned tree representing an object creation <new parents { stats }
+   *  @param npos  the position of the new
+   *  @param cpos  the position of the anonymous class starting with parents
+   */
+  def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree],
+            npos: Position, cpos: Position): Tree =
+    if (parents.isEmpty)
+      mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+    else if (parents.tail.isEmpty && stats.isEmpty) {
+      // `Parsers.template` no longer differentiates tpts and their argss
+      // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+      // instead of parents = Ident(C), argss = Nil as before
+      // this change works great for things that are actually templates
+      // but in this degenerate case we need to perform postprocessing
+      val app = treeInfo.dissectApplied(parents.head)
+      atPos(npos union cpos) { New(app.callee, app.argss) }
+    } else {
+      val x = tpnme.ANON_CLASS_NAME
+      atPos(npos union cpos) {
+        Block(
+          List(
+            atPos(cpos) {
+              ClassDef(
+                Modifiers(FINAL), x, Nil,
+                mkTemplate(parents, self, NoMods, ListOfNil, stats, cpos.focus))
+            }),
+          atPos(npos) {
+            New(
+              Ident(x) setPos npos.focus,
+              Nil)
+          }
+        )
+      }
+    }
+
+  /** Create a tree representing the function type (argtpes) => restpe */
+  def mkFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+    AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+
+  /** Create a literal unit tree that is inserted by the compiler but not
+   *  written by end user. It's important to distinguish the two so that
+   *  quasiquotes can strip synthetic ones away.
+   */
+  def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment)
+
+  /** Create block of statements `stats`  */
+  def mkBlock(stats: List[Tree]): Tree =
+    if (stats.isEmpty) mkSyntheticUnit()
+    else if (!stats.last.isTerm) Block(stats, mkSyntheticUnit())
+    else if (stats.length == 1) stats.head
+    else Block(stats.init, stats.last)
+
+  /** Create a block that wraps multiple statements but don't
+   *  do any wrapping if there is just one statement. Used by
+   *  quasiquotes, macro c.parse api and toolbox.
+   */
+  def mkTreeOrBlock(stats: List[Tree]) = stats match {
+    case Nil         => EmptyTree
+    case head :: Nil => head
+    case _           => mkBlock(stats)
+  }
+
+  /** Create a tree representing an assignment <lhs = rhs> */
+  def mkAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
+    case Apply(fn, args) => Apply(atPos(fn.pos)(Select(fn, nme.update)), args :+ rhs)
+    case _               => Assign(lhs, rhs)
+  }
+
+  def mkPackageObject(defn: ModuleDef, pidPos: Position = NoPosition, pkgPos: Position = NoPosition) = {
+    val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
+    val pid    = atPos(pidPos)(Ident(defn.name))
+    atPos(pkgPos)(PackageDef(pid, module :: Nil))
+  }
+
+  // Following objects represent encoding of for loop enumerators
+  // into the regular trees. Such representations are used for:
+  //
+  //   - as intermediate value of enumerators inside of the parser
+  //     right before the mkFor desugaring is being called
+  //
+  //   - as intermediate value of enumerators obtained after
+  //     re-sugaring of for loops through build.SyntacticFor
+  //     and build.SyntacticForYield (which are used by quasiquotes)
+  //
+  // The encoding uses regular trees with ForAttachment that helps
+  // to reliably differentiate them from normal trees that can have
+  // similar shape. fq"$pat <- $rhs" for example is represented in
+  // the same way as "`<-`($pat, $rhs)"" but with added attachment to
+  // the `<-` identifier.
+  //
+  // The primary rationale behind such representation in favor of
+  // simple case classes is a wish to re-use the same representation
+  // between quasiquotes and parser without exposing compiler internals.
+  // Opaque tree encoding can be changed/adapted at any time without
+  // breaking end users code.
+
+  /** Encode/decode fq"$pat <- $rhs" enumerator as q"`<-`($pat, $rhs)" */
+  object ValFrom {
+    def apply(pat: Tree, rhs: Tree): Tree =
+      Apply(Ident(nme.LARROWkw).updateAttachment(ForAttachment),
+        List(pat, rhs))
+
+    def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+      case Apply(id @ Ident(nme.LARROWkw), List(pat, rhs))
+        if id.hasAttachment[ForAttachment.type] =>
+        Some((pat, rhs))
+      case _ => None
+    }
+  }
+
+  /** Encode/decode fq"$pat = $rhs" enumerator as q"$pat = $rhs" */
+  object ValEq {
+    def apply(pat: Tree, rhs: Tree): Tree =
+      Assign(pat, rhs).updateAttachment(ForAttachment)
+
+    def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
+      case Assign(pat, rhs)
+        if tree.hasAttachment[ForAttachment.type] =>
+        Some((pat, rhs))
+      case _ => None
+    }
+  }
+
+  /** Encode/decode fq"if $cond" enumerator as q"`if`($cond)" */
+  object Filter {
+    def apply(tree: Tree) =
+      Apply(Ident(nme.IFkw).updateAttachment(ForAttachment), List(tree))
+
+    def unapply(tree: Tree): Option[Tree] = tree match {
+      case Apply(id @ Ident(nme.IFkw), List(cond))
+        if id.hasAttachment[ForAttachment.type] =>
+        Some((cond))
+      case _ => None
+    }
+  }
+
+  /** Encode/decode body of for yield loop as q"`yield`($tree)" */
+  object Yield {
+    def apply(tree: Tree): Tree =
+      Apply(Ident(nme.YIELDkw).updateAttachment(ForAttachment), List(tree))
+
+    def unapply(tree: Tree): Option[Tree] = tree match {
+      case Apply(id @ Ident(nme.YIELDkw), List(tree))
+        if id.hasAttachment[ForAttachment.type] =>
+        Some(tree)
+      case _  => None
+    }
+  }
+
+  /** Create tree for for-comprehension <for (enums) do body> or
+  *   <for (enums) yield body> where mapName and flatMapName are chosen
+  *  corresponding to whether this is a for-do or a for-yield.
+  *  The creation performs the following rewrite rules:
+  *
+  *  1.
+  *
+  *    for (P <- G) E   ==>   G.foreach (P => E)
+  *
+  *     Here and in the following (P => E) is interpreted as the function (P => E)
+  *     if P is a variable pattern and as the partial function { case P => E } otherwise.
+  *
+  *  2.
+  *
+  *    for (P <- G) yield E  ==>  G.map (P => E)
+  *
+  *  3.
+  *
+  *    for (P_1 <- G_1; P_2 <- G_2; ...) ...
+  *      ==>
+  *    G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
+  *
+  *  4.
+  *
+  *    for (P <- G; E; ...) ...
+  *      =>
+  *    for (P <- G.filter (P => E); ...) ...
+  *
+  *  5. For N < MaxTupleArity:
+  *
+  *    for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
+  *      ==>
+  *    for (TupleN(P_1, P_2, ... P_N) <-
+  *      for (x_1 @ P_1 <- G) yield {
+  *        val x_2 @ P_2 = E_2
+  *        ...
+  *        val x_N & P_N = E_N
+  *        TupleN(x_1, ..., x_N)
+  *      } ...)
+  *
+  *    If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
+  *    and the variable constituting P_i is used instead of x_i
+  *
+  *  @param mapName      The name to be used for maps (either map or foreach)
+  *  @param flatMapName  The name to be used for flatMaps (either flatMap or foreach)
+  *  @param enums        The enumerators in the for expression
+  *  @param body          The body of the for expression
+  */
+  def mkFor(enums: List[Tree], sugarBody: Tree)(implicit fresh: FreshNameCreator): Tree = {
+    val (mapName, flatMapName, body) = sugarBody match {
+      case Yield(tree) => (nme.map, nme.flatMap, tree)
+      case _           => (nme.foreach, nme.foreach, sugarBody)
+    }
+
+    /* make a closure pat => body.
+     * The closure is assigned a transparent position with the point at pos.point and
+     * the limits given by pat and body.
+     */
+    def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
+      def wrapped  = wrappingPos(List(pat, body))
+      def splitpos = (if (pos != NoPosition) wrapped.withPoint(pos.point) else pos).makeTransparent
+      matchVarPattern(pat) match {
+        case Some((name, tpt)) =>
+          Function(
+            List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }),
+            body) setPos splitpos
+        case None =>
+          atPos(splitpos) {
+            mkVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false)
+          }
+      }
+    }
+
+    /* Make an application  qual.meth(pat => body) positioned at `pos`.
+     */
+    def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
+      // ForAttachment on the method selection is used to differentiate
+      // result of for desugaring from a regular method call
+      Apply(Select(qual, meth) setPos qual.pos updateAttachment ForAttachment,
+        List(makeClosure(pos, pat, body))) setPos pos
+
+    /* If `pat` is not yet a `Bind` wrap it in one with a fresh name */
+    def makeBind(pat: Tree): Tree = pat match {
+      case Bind(_, _) => pat
+      case _ => Bind(freshTermName(), pat) setPos pat.pos
+    }
+
+    /* A reference to the name bound in Bind `pat`. */
+    def makeValue(pat: Tree): Tree = pat match {
+      case Bind(name, _) => Ident(name) setPos pat.pos.focus
+    }
+
+    /* The position of the closure that starts with generator at position `genpos`. */
+    def closurePos(genpos: Position) =
+      if (genpos == NoPosition) NoPosition
+      else {
+        val end = body.pos match {
+          case NoPosition => genpos.point
+          case bodypos => bodypos.end
+        }
+        rangePos(genpos.source, genpos.start, genpos.point, end)
+      }
+
+    enums match {
+      case (t @ ValFrom(pat, rhs)) :: Nil =>
+        makeCombination(closurePos(t.pos), mapName, rhs, pat, body)
+      case (t @ ValFrom(pat, rhs)) :: (rest @ (ValFrom(_, _) :: _)) =>
+        makeCombination(closurePos(t.pos), flatMapName, rhs, pat,
+                        mkFor(rest, sugarBody))
+      case (t @ ValFrom(pat, rhs)) :: Filter(test) :: rest =>
+        mkFor(ValFrom(pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)).setPos(t.pos) :: rest, sugarBody)
+      case (t @ ValFrom(pat, rhs)) :: rest =>
+        val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile { ValEq.unapply(_).nonEmpty }
+        assert(!valeqs.isEmpty)
+        val rest1 = rest.drop(valeqs.length)
+        val pats = valeqs map { case ValEq(pat, _) => pat }
+        val rhss = valeqs map { case ValEq(_, rhs) => rhs }
+        val defpat1 = makeBind(pat)
+        val defpats = pats map makeBind
+        val pdefs = (defpats, rhss).zipped flatMap mkPatDef
+        val ids = (defpat1 :: defpats) map makeValue
+        val rhs1 = mkFor(
+          List(ValFrom(defpat1, rhs).setPos(t.pos)),
+          Yield(Block(pdefs, atPos(wrappingPos(ids)) { mkTuple(ids) }) setPos wrappingPos(pdefs)))
+        val allpats = (pat :: pats) map (_.duplicate)
+        val pos1 =
+          if (t.pos == NoPosition) NoPosition
+          else rangePos(t.pos.source, t.pos.start, t.pos.point, rhs1.pos.end)
+        val vfrom1 = ValFrom(atPos(wrappingPos(allpats)) { mkTuple(allpats) }, rhs1).setPos(pos1)
+        mkFor(vfrom1 :: rest1, sugarBody)
+      case _ =>
+        EmptyTree //may happen for erroneous input
+
+    }
+  }
+
+  /** Create tree for pattern definition <val pat0 = rhs> */
+  def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] =
+    mkPatDef(Modifiers(0), pat, rhs)
+
+  /** Create tree for pattern definition <mods val pat0 = rhs> */
+  def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match {
+    case Some((name, tpt)) =>
+      List(atPos(pat.pos union rhs.pos) {
+        ValDef(mods, name.toTermName, tpt, rhs)
+      })
+
+    case None =>
+      //  in case there is exactly one variable x_1 in pattern
+      //  val/var p = e  ==>  val/var x_1 = e.match (case p => (x_1))
+      //
+      //  in case there are zero or more than one variables in pattern
+      //  val/var p = e  ==>  private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
+      //                  val/var x_1 = t$._1
+      //                  ...
+      //                  val/var x_N = t$._N
+
+      val rhsUnchecked = mkUnchecked(rhs)
+
+      // TODO: clean this up -- there is too much information packked into mkPatDef's `pat` argument
+      // when it's a simple identifier (case Some((name, tpt)) -- above),
+      // pat should have the type ascription that was specified by the user
+      // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
+      // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
+      // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
+      val (pat1, rhs1) = patvarTransformer.transform(pat) match {
+        // move the Typed ascription to the rhs
+        case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
+          val rhsTypedUnchecked =
+            if (tpt.isEmpty) rhsUnchecked
+            else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
+          (expr, rhsTypedUnchecked)
+        case ok =>
+          (ok, rhsUnchecked)
+      }
+      val vars = getVariables(pat1)
+      val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
+        Match(
+          rhs1,
+          List(
+            atPos(pat1.pos) {
+              CaseDef(pat1, EmptyTree, mkTuple(vars map (_._1) map Ident.apply))
+            }
+          ))
+      }
+      vars match {
+        case List((vname, tpt, pos)) =>
+          List(atPos(pat.pos union pos union rhs.pos) {
+            ValDef(mods, vname.toTermName, tpt, matchExpr)
+          })
+        case _ =>
+          val tmp = freshTermName()
+          val firstDef =
+            atPos(matchExpr.pos) {
+              ValDef(Modifiers(PrivateLocal | SYNTHETIC | ARTIFACT | (mods.flags & LAZY)),
+                     tmp, TypeTree(), matchExpr)
+            }
+          var cnt = 0
+          val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) {
+            cnt += 1
+            ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))
+          }
+          firstDef :: restDefs
+      }
+  }
+
+  /** Create tree for for-comprehension generator <val pat0 <- rhs0> */
+  def mkGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree)(implicit fresh: FreshNameCreator): Tree = {
+    val pat1 = patvarTransformer.transform(pat)
+    if (valeq) ValEq(pat1, rhs).setPos(pos)
+    else ValFrom(pat1, mkCheckIfRefutable(pat1, rhs)).setPos(pos)
+  }
+
+  def mkCheckIfRefutable(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator) =
+    if (treeInfo.isVarPatternDeep(pat)) rhs
+    else {
+      val cases = List(
+        CaseDef(pat.duplicate, EmptyTree, Literal(Constant(true))),
+        CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+      )
+      val visitor = mkVisitor(cases, checkExhaustive = false, nme.CHECK_IF_REFUTABLE_STRING)
+      atPos(rhs.pos)(Apply(Select(rhs, nme.withFilter), visitor :: Nil))
+    }
+
+  /** If tree is a variable pattern, return Some("its name and type").
+   *  Otherwise return none */
+  private def matchVarPattern(tree: Tree): Option[(Name, Tree)] = {
+    def wildType(t: Tree): Option[Tree] = t match {
+      case Ident(x) if x.toTermName == nme.WILDCARD             => Some(TypeTree())
+      case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
+      case _                                                    => None
+    }
+    tree match {
+      case Ident(name)             => Some((name, TypeTree()))
+      case Bind(name, body)        => wildType(body) map (x => (name, x))
+      case Typed(Ident(name), tpt) => Some((name, tpt))
+      case _                       => None
+    }
+  }
+
+  /** Create visitor <x => x match cases> */
+  def mkVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String = "x$")(implicit fresh: FreshNameCreator): Tree = {
+    val x   = freshTermName(prefix)
+    val id  = Ident(x)
+    val sel = if (checkExhaustive) id else mkUnchecked(id)
+    Function(List(mkSyntheticParam(x)), Match(sel, cases))
+  }
+
+  /** Traverse pattern and collect all variable names with their types in buffer
+   *  The variables keep their positions; whereas the pattern is converted to be
+   *  synthetic for all nodes that contain a variable position.
+   */
+  class GetVarTraverser extends Traverser {
+    val buf = new ListBuffer[(Name, Tree, Position)]
+
+    def namePos(tree: Tree, name: Name): Position =
+      if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus
+      else {
+        val start = tree.pos.start
+        val end = start + name.decode.length
+        rangePos(tree.pos.source, start, start, end)
+      }
+
+    override def traverse(tree: Tree): Unit = {
+      def seenName(name: Name)     = buf exists (_._1 == name)
+      def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
+      val bl = buf.length
+
+      tree match {
+        case Bind(nme.WILDCARD, _)          =>
+          super.traverse(tree)
+
+        case Bind(name, Typed(tree1, tpt))  =>
+          val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
+          add(name, newTree)
+          traverse(tree1)
+
+        case Bind(name, tree1)              =>
+          // can assume only name range as position, as otherwise might overlap
+          // with binds embedded in pattern tree1
+          add(name, TypeTree())
+          traverse(tree1)
+
+        case _ =>
+          super.traverse(tree)
+      }
+      if (buf.length > bl)
+        tree setPos tree.pos.makeTransparent
+    }
+    def apply(tree: Tree) = {
+      traverse(tree)
+      buf.toList
+    }
+  }
+
+  /** Returns list of all pattern variables, possibly with their types,
+   *  without duplicates
+   */
+  private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
+    new GetVarTraverser apply tree
+
+  /** Convert all occurrences of (lower-case) variables in a pattern as follows:
+   *    x                  becomes      x @ _
+   *    x: T               becomes      x @ (_: T)
+   */
+  object patvarTransformer extends Transformer {
+    override def transform(tree: Tree): Tree = tree match {
+      case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
+        atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
+      case Typed(id @ Ident(name), tpt) if (treeInfo.isVarPattern(id) && name != nme.WILDCARD) =>
+        atPos(tree.pos.withPoint(id.pos.point)) {
+          Bind(name, atPos(tree.pos.withStart(tree.pos.point)) {
+            Typed(Ident(nme.WILDCARD), tpt)
+          })
+        }
+      case Apply(fn @ Apply(_, _), args) =>
+        treeCopy.Apply(tree, transform(fn), transformTrees(args))
+      case Apply(fn, args) =>
+        treeCopy.Apply(tree, fn, transformTrees(args))
+      case Typed(expr, tpt) =>
+        treeCopy.Typed(tree, transform(expr), tpt)
+      case Bind(name, body) =>
+        treeCopy.Bind(tree, name, transform(body))
+      case Alternative(_) | Star(_) =>
+        super.transform(tree)
+      case _ =>
+        tree
+    }
+  }
+
+  // annotate the expression with @unchecked
+  def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
+    // This can't be "Annotated(New(UncheckedClass), expr)" because annotations
+    // are very picky about things and it crashes the compiler with "unexpected new".
+    Annotated(New(scalaDot(tpnme.unchecked), Nil), expr)
+  }
+
+  def mkSyntheticParam(pname: TermName) =
+    ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
+
+  def mkCast(tree: Tree, pt: Type): Tree =
+    atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = true, wrapInApply = false))
 }
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index fa4441e..7cf749c 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import Flags._
@@ -17,7 +18,7 @@ abstract class TreeInfo {
   val global: SymbolTable
 
   import global._
-  import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+  import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, uncheckedStableClass, isBlackboxMacroBundleType, isWhiteboxContextType }
 
   /* Does not seem to be used. Not sure what it does anyway.
   def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -65,6 +66,90 @@ abstract class TreeInfo {
       false
   }
 
+  /** Is `tree` a path, defined as follows? (Spec: 3.1 Paths)
+   *
+   * - The empty path ε (which cannot be written explicitly in user programs).
+   * - C.this, where C references a class.
+   * - p.x where p is a path and x is a stable member of p.
+   * - C.super.x or C.super[M].x where C references a class
+   *   and x references a stable member of the super class or designated parent class M of C.
+   *
+   * NOTE: Trees with errors are (mostly) excluded.
+   *
+   * Path ::= StableId | [id ‘.’] this
+   *
+   */
+  def isPath(tree: Tree, allowVolatile: Boolean): Boolean =
+    tree match {
+      // Super is not technically a path.
+      // However, syntactically, it can only occur nested in a Select.
+      // This gives a nicer definition of isStableIdentifier that's equivalent to the spec's.
+      // must consider Literal(_) a path for typedSingletonTypeTree
+      case EmptyTree | Literal(_) => true
+      case This(_) | Super(_, _)  => symOk(tree.symbol)
+      case _                      => isStableIdentifier(tree, allowVolatile)
+    }
+
+  /** Is `tree` a stable identifier, a path which ends in an identifier?
+   *
+   * StableId ::= id
+   *           | Path ‘.’ id
+   *           | [id ’.’] ‘super’ [‘[’ id ‘]’] ‘.’ id
+   */
+  def isStableIdentifier(tree: Tree, allowVolatile: Boolean): Boolean =
+    tree match {
+      case i @ Ident(_)    => isStableIdent(i, allowVolatile)
+      case Select(qual, _) => isStableMemberOf(tree.symbol, qual, allowVolatile) && isPath(qual, allowVolatile)
+      case Apply(Select(free @ Ident(_), nme.apply), _) if free.symbol.name endsWith nme.REIFY_FREE_VALUE_SUFFIX =>
+        // see a detailed explanation of this trick in `GenSymbols.reifyFreeTerm`
+        free.symbol.hasStableFlag && isPath(free, allowVolatile)
+      case _               => false
+    }
+
+  private def symOk(sym: Symbol) = sym != null && !sym.isError && sym != NoSymbol
+  private def typeOk(tp: Type)   =  tp != null && ! tp.isError
+
+  /** Assuming `sym` is a member of `tree`, is it a "stable member"?
+   *
+   * Stable members are packages or members introduced
+   * by object definitions or by value definitions of non-volatile types (§3.6).
+   */
+  def isStableMemberOf(sym: Symbol, tree: Tree, allowVolatile: Boolean): Boolean = (
+    symOk(sym)       && (!sym.isTerm   || (sym.isStable && (allowVolatile || !sym.hasVolatileType))) &&
+    typeOk(tree.tpe) && (allowVolatile || !hasVolatileType(tree)) && !definitions.isByNameParamType(tree.tpe)
+  )
+
+  private def isStableIdent(tree: Ident, allowVolatile: Boolean): Boolean = (
+       symOk(tree.symbol)
+    && tree.symbol.isStable
+    && !definitions.isByNameParamType(tree.tpe)
+    && (allowVolatile || !tree.symbol.hasVolatileType) // TODO SPEC: not required by spec
+  )
+
+  /** Is `tree`'s type volatile? (Ignored if its symbol has the @uncheckedStable annotation.)
+   */
+  def hasVolatileType(tree: Tree): Boolean =
+    symOk(tree.symbol) && tree.tpe.isVolatile && !tree.symbol.hasAnnotation(uncheckedStableClass)
+
+  /** Is `tree` either a non-volatile type,
+   *  or a path that does not include any of:
+   *   - a reference to a mutable variable/field
+   *   - a reference to a by-name parameter
+   *   - a member selection on a volatile type (Spec: 3.6 Volatile Types)?
+   *
+   * Such a tree is a suitable target for type selection.
+   */
+  def admitsTypeSelection(tree: Tree): Boolean = isPath(tree, allowVolatile = false)
+
+  /** Is `tree` admissible as a stable identifier pattern (8.1.5 Stable Identifier Patterns)?
+   *
+   * We disregard volatility, as it's irrelevant in patterns (SI-6815)
+   */
+  def isStableIdentifierPattern(tree: Tree): Boolean = isStableIdentifier(tree, allowVolatile = true)
+
+  // TODO SI-5304 tighten this up so we don't elide side effect in module loads
+  def isQualifierSafeToElide(tree: Tree): Boolean = isExprSafeToInline(tree)
+
   /** Is tree an expression which can be inlined without affecting program semantics?
    *
    *  Note that this is not called "isExprPure" since purity (lack of side-effects)
@@ -108,80 +193,70 @@ abstract class TreeInfo {
       false
   }
 
-  @deprecated("Use isExprSafeToInline instead", "2.10.0")
-  def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
+  /** As if the name of the method didn't give it away,
+   *  this logic is designed around issuing helpful
+   *  warnings and minimizing spurious ones.  That means
+   *  don't reuse it for important matters like inlining
+   *  decisions.
+   */
+  def isPureExprForWarningPurposes(tree: Tree): Boolean = tree match {
+    case Typed(expr, _)                    => isPureExprForWarningPurposes(expr)
+    case EmptyTree | Literal(Constant(())) => false
+    case _                                 =>
+      def isWarnableRefTree = tree match {
+        case t: RefTree => isExprSafeToInline(t.qualifier) && t.symbol != null && t.symbol.isAccessor
+        case _          => false
+      }
+      def isWarnableSymbol = {
+        val sym = tree.symbol
+        (sym == null) || !(sym.isModule || sym.isLazy || definitions.isByNameParamType(sym.tpe_*)) || {
+          debuglog("'Pure' but side-effecting expression in statement position: " + tree)
+          false
+        }
+      }
 
-  def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
-    mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+      (    !tree.isErrorTyped
+        && (isExprSafeToInline(tree) || isWarnableRefTree)
+        && isWarnableSymbol
+      )
+  }
 
   def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
     val b = List.newBuilder[R]
     foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
-    b.result
+    b.result()
   }
   def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
     val plen   = params.length
     val alen   = args.length
     def fail() = {
-      global.debugwarn(
-        "Mismatch trying to zip method parameters and argument list:\n" +
-        "  params = " + params + "\n" +
-        "    args = " + args + "\n"
-      )
+      global.devWarning(
+        s"""|Mismatch trying to zip method parameters and argument list:
+            |  params = $params
+            |    args = $args""".stripMargin)
       false
     }
 
     if (plen == alen) foreach2(params, args)(f)
-    else if (params.isEmpty) return fail
+    else if (params.isEmpty) return fail()
     else if (isVarArgsList(params)) {
       val plenInit = plen - 1
       if (alen == plenInit) {
         if (alen == 0) Nil        // avoid calling mismatched zip
         else foreach2(params.init, args)(f)
       }
-      else if (alen < plenInit) return fail
+      else if (alen < plenInit) return fail()
       else {
         foreach2(params.init, args take plenInit)(f)
         val remainingArgs = args drop plenInit
         foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
       }
     }
-    else return fail
+    else return fail()
 
     true
   }
 
-  /**
-   * Selects the correct parameter list when there are nested applications.
-   * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
-   * lists.  To choose the correct one before uncurry, we have to unwrap any
-   * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
-   * correspond to the third parameter list.
-   *
-   * The argument fn is the function part of the apply node being considered.
-   *
-   * Also accounts for varargs.
-   */
-  private def applyMethodParameters(fn: Tree): List[Symbol] = {
-    val depth  = dissectApplied(fn).applyDepth
-    // There could be applies which go beyond the parameter list(s),
-    // being applied to the result of the method call.
-    // !!! Note that this still doesn't seem correct, although it should
-    // be closer than what it replaced.
-    if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
-    else if (fn.symbol.paramss.isEmpty) Nil
-    else fn.symbol.paramss.last
-  }
-
-  def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
-    case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
-    case _               => Nil
-  }
-  def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
-    case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
-    case _               =>
-  }
-
   /** Is symbol potentially a getter of a variable?
    */
   def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
@@ -196,7 +271,7 @@ abstract class TreeInfo {
   def isVariableOrGetter(tree: Tree) = {
     def sym       = tree.symbol
     def isVar     = sym.isVariable
-    def isGetter  = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name.toTermName)) != NoSymbol
+    def isGetter  = mayBeVarGetter(sym) && sym.owner.info.member(sym.setterName) != NoSymbol
 
     tree match {
       case Ident(_)                               => isVar
@@ -206,20 +281,24 @@ abstract class TreeInfo {
     }
   }
 
+  def isDefaultGetter(tree: Tree) = {
+    tree.symbol != null && tree.symbol.isDefaultGetter
+  }
+
   /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
    *  same object?
    */
-  def isSelfConstrCall(tree: Tree): Boolean = tree match {
-    case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
-    case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
-    case _ => false
+  def isSelfConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+    case Ident(nme.CONSTRUCTOR)           => true
+    case Select(This(_), nme.CONSTRUCTOR) => true
+    case _                                => false
   }
 
   /** Is tree a super constructor call?
    */
-  def isSuperConstrCall(tree: Tree): Boolean = tree match {
-    case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
-    case _ => false
+  def isSuperConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+    case Select(Super(_, _), nme.CONSTRUCTOR) => true
+    case _                                    => false
   }
 
   /**
@@ -325,9 +404,66 @@ abstract class TreeInfo {
     case x: Ident           => !x.isBackquoted && nme.isVariableName(x.name)
     case _                  => false
   }
-  def isDeprecatedIdentifier(tree: Tree): Boolean = tree match {
-    case x: Ident           => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name)
-    case _                  => false
+
+  /** Does the tree have a structure similar to typechecked trees? */
+  private[internal] def detectTypecheckedTree(tree: Tree) =
+    tree.hasExistingSymbol || tree.exists {
+      case dd: DefDef => dd.mods.hasAccessorFlag || dd.mods.isSynthetic // for untypechecked trees
+      case md: MemberDef => md.hasExistingSymbol
+      case _ => false
+    }
+
+  /** Recover template body to parsed state */
+  private[internal] def untypecheckedTemplBody(templ: Template) =
+    untypecheckedTreeBody(templ, templ.body)
+
+  /** Recover block body to parsed state */
+  private[internal] def untypecheckedBlockBody(block: Block) =
+    untypecheckedTreeBody(block, block.stats)
+
+  /** Recover tree body to parsed state */
+  private[internal] def untypecheckedTreeBody(tree: Tree, tbody: List[Tree]) = {
+    def filterBody(body: List[Tree]) = body filter {
+      case _: ValDef | _: TypeDef => true
+      // keep valdef or getter for val/var
+      case dd: DefDef if dd.mods.hasAccessorFlag => !nme.isSetterName(dd.name) && !tbody.exists {
+        case vd: ValDef => dd.name == vd.name.dropLocal
+        case _ => false
+      }
+      case md: MemberDef => !md.mods.isSynthetic
+      case tree => true
+    }
+
+    def lazyValDefRhs(body: Tree) =
+      body match {
+        case Block(List(Assign(_, rhs)), _) => rhs
+        case _ => body
+      }
+
+    def recoverBody(body: List[Tree]) = body map {
+      case vd @ ValDef(vmods, vname, _, vrhs) if nme.isLocalName(vname) =>
+        tbody find {
+          case dd: DefDef => dd.name == vname.dropLocal
+          case _ => false
+        } map { dd =>
+          val DefDef(dmods, dname, _, _, _, drhs) = dd
+          // get access flags from DefDef
+          val vdMods = (vmods &~ Flags.AccessFlags) | (dmods & Flags.AccessFlags).flags
+          // for most cases lazy body should be taken from accessor DefDef
+          val vdRhs = if (vmods.isLazy) lazyValDefRhs(drhs) else vrhs
+          copyValDef(vd)(mods = vdMods, name = dname, rhs = vdRhs)
+        } getOrElse (vd)
+      // for abstract and some lazy val/vars
+      case dd @ DefDef(mods, name, _, _, tpt, rhs) if mods.hasAccessorFlag =>
+        // transform getter mods to field
+        val vdMods = (if (!mods.hasStableFlag) mods | Flags.MUTABLE else mods &~ Flags.STABLE) &~ Flags.ACCESSOR
+        ValDef(vdMods, name, tpt, rhs)
+      case tr => tr
+    }
+
+    if (detectTypecheckedTree(tree)) {
+      recoverBody(filterBody(tbody))
+    } else tbody
   }
 
   /** The first constructor definitions in `stats` */
@@ -346,6 +482,9 @@ abstract class TreeInfo {
   def preSuperFields(stats: List[Tree]): List[ValDef] =
     stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
 
+  def hasUntypedPreSuperFields(stats: List[Tree]): Boolean =
+    preSuperFields(stats) exists (_.tpt.isEmpty)
+
   def isEarlyDef(tree: Tree) = tree match {
     case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
     case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -357,11 +496,6 @@ abstract class TreeInfo {
     case _ => false
   }
 
-  def isEarlyTypeDef(tree: Tree) = tree match {
-    case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
-    case _ => false
-  }
-
   /** Is tpt a vararg type of the form T* ? */
   def isRepeatedParamType(tpt: Tree) = tpt match {
     case TypeTree()                                                          => definitions.isRepeatedParamType(tpt.tpe)
@@ -384,15 +518,17 @@ abstract class TreeInfo {
     case _                                                          => false
   }
 
+  /** Translates an Assign(_, _) node to AssignOrNamedArg(_, _) if
+   *  the lhs is a simple ident. Otherwise returns unchanged.
+   */
+  def assignmentToMaybeNamedArg(tree: Tree) = tree match {
+    case t @ Assign(id: Ident, rhs) => atPos(t.pos)(AssignOrNamedArg(id, rhs))
+    case t                          => t
+  }
+
   /** Is name a left-associative operator? */
   def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':')
 
-  /** Is tree a `this` node which belongs to `enclClass`? */
-  def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
-    case This(_) => tree.symbol == enclClass
-    case _ => false
-  }
-
   /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */
   def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass
 
@@ -408,8 +544,15 @@ abstract class TreeInfo {
   /** Is this argument node of the form <expr> : _* ?
    */
   def isWildcardStarArg(tree: Tree): Boolean = tree match {
-    case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
-    case _                                  => false
+    case WildcardStarArg(_) => true
+    case _                  => false
+  }
+
+  object WildcardStarArg {
+    def unapply(tree: Tree): Option[Tree] = tree match {
+      case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
+      case _                                       => None
+    }
   }
 
   /** If this tree has type parameters, those.  Otherwise Nil.
@@ -445,6 +588,8 @@ abstract class TreeInfo {
     case _                          => false
   }
 
+  private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol
+
   /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
     * whether the user provided cases are exhaustive. */
   def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
@@ -453,14 +598,19 @@ abstract class TreeInfo {
   }
 
   /** Does this CaseDef catch Throwable? */
-  def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
+  def catchesThrowable(cdef: CaseDef) = (
+    cdef.guard.isEmpty && (unbind(cdef.pat) match {
+      case Ident(nme.WILDCARD) => true
+      case i at Ident(name)       => hasNoSymbol(i)
+      case _                   => false
+    })
+  )
 
-  /** Does this CaseDef catch everything of a certain Type? */
-  def catchesAllOf(cdef: CaseDef, threshold: Type) =
-    isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match {
-      case Typed(Ident(nme.WILDCARD), tpt)  => (tpt.tpe != null) && (threshold <:< tpt.tpe)
-      case _                                => false
-    }))
+  /** Is this CaseDef synthetically generated, e.g. by `MatchTranslation.translateTry`? */
+  def isSyntheticCase(cdef: CaseDef) = cdef.pat.exists {
+    case dt: DefTree => dt.symbol.isSynthetic
+    case _           => false
+  }
 
   /** Is this pattern node a catch-all or type-test pattern? */
   def isCatchCase(cdef: CaseDef) = cdef match {
@@ -485,7 +635,7 @@ abstract class TreeInfo {
 
       tp match {
         case TypeRef(pre, sym, args) =>
-          args.isEmpty && (sym.owner.isPackageClass || isSimple(pre))
+          args.isEmpty && (sym.isTopLevel || isSimple(pre))
         case NoPrefix =>
           true
         case _ =>
@@ -524,11 +674,12 @@ abstract class TreeInfo {
    * case Extractor(a @ (b, c)) => 2
    * }}}
    */
-  def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
-    case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
-    case xs                                                 => xs
-  }).length
+  def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length
 
+  def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match {
+    case build.SyntacticTuple(xs) :: Nil => xs
+    case xs                              => xs
+  }
 
   // used in the symbols for labeldefs and valdefs emitted by the pattern matcher
   // tailcalls, cps,... use this flag combination to detect translated matches
@@ -538,6 +689,10 @@ abstract class TreeInfo {
   def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
   def hasSynthCaseSymbol(t: Tree)    = t.symbol != null && isSynthCaseSymbol(t.symbol)
 
+  def isTraitRef(tree: Tree): Boolean = {
+    val sym = if (tree.tpe != null) tree.tpe.typeSymbol else null
+    ((sym ne null) && sym.initialize.isTrait)
+  }
 
   /** Applications in Scala can have one of the following shapes:
    *
@@ -620,19 +775,6 @@ abstract class TreeInfo {
       }
       loop(tree)
     }
-
-    /** The depth of the nested applies: e.g. Apply(Apply(Apply(_, _), _), _)
-     *  has depth 3.  Continues through type applications (without counting them.)
-     */
-    def applyDepth: Int = {
-      def loop(tree: Tree): Int = tree match {
-        case Apply(fn, _)           => 1 + loop(fn)
-        case TypeApply(fn, _)       => loop(fn)
-        case AppliedTypeTree(fn, _) => loop(fn)
-        case _                      => 0
-      }
-      loop(tree)
-    }
   }
 
   /** Returns a wrapper that knows how to destructure and analyze applications.
@@ -649,6 +791,8 @@ abstract class TreeInfo {
    *  For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
    */
   object Applied {
+    def apply(tree: Tree): Applied = new Applied(tree)
+
     def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
       Some((applied.core, applied.targs, applied.argss))
 
@@ -660,13 +804,26 @@ abstract class TreeInfo {
    *  a class of module with given name (ignoring imports)
    */
   def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
-      case Import(_, _) :: xs               => firstDefinesClassOrObject(xs, name)
-      case Annotated(_, tree1) :: Nil       => firstDefinesClassOrObject(List(tree1), name)
-      case ModuleDef(_, `name`, _) :: Nil   => true
-      case ClassDef(_, `name`, _, _) :: Nil => true
-      case _                                => false
-    }
+    case Import(_, _) :: xs             => firstDefinesClassOrObject(xs, name)
+    case Annotated(_, tree1) :: _       => firstDefinesClassOrObject(List(tree1), name)
+    case ModuleDef(_, `name`, _) :: _   => true
+    case ClassDef(_, `name`, _, _) :: _ => true
+    case _                              => false
+  }
 
+  /** Locates the synthetic Apply node corresponding to an extractor's call to
+   *  unapply (unwrapping nested Applies) and returns the fun part of that Apply.
+   */
+  object Unapplied {
+    // Duplicated with `spliceApply`
+    def unapply(tree: Tree): Option[Tree] = tree match {
+      // SI-7868 Admit Select() to account for numeric widening, e.g. <unappplySelector>.toInt
+      case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil)
+                         => Some(fun)
+      case Apply(fun, _) => unapply(fun)
+      case _             => None
+    }
+  }
 
   /** Is this file the body of a compilation unit which should not
    *  have Predef imported?
@@ -721,12 +878,6 @@ abstract class TreeInfo {
       case _                        => false
     }
   }
-  object IsIf extends SeeThroughBlocks[Option[(Tree, Tree, Tree)]] {
-    protected def unapplyImpl(x: Tree) = x match {
-      case If(cond, thenp, elsep) => Some((cond, thenp, elsep))
-      case _                      => None
-    }
-  }
 
   def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed)
 
@@ -750,8 +901,24 @@ abstract class TreeInfo {
     }
 
     def unapply(tree: Tree) = refPart(tree) match {
-      case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, dissectApplied(tree).targs))
-      case _            => None
+      case ref: RefTree => {
+        val qual = ref.qualifier
+        val isBundle = definitions.isMacroBundleType(qual.tpe)
+        val isBlackbox =
+          if (isBundle) isBlackboxMacroBundleType(qual.tpe)
+          else ref.symbol.paramss match {
+            case (c :: Nil) :: _ if isWhiteboxContextType(c.info) => false
+            case _ => true
+          }
+        val owner =
+          if (isBundle) qual.tpe.typeSymbol
+          else {
+            val qualSym = if (qual.hasSymbolField) qual.symbol else NoSymbol
+            if (qualSym.isModule) qualSym.moduleClass else qualSym
+          }
+        Some((isBundle, isBlackbox, owner, ref.symbol, dissectApplied(tree).targs))
+      }
+      case _  => None
     }
   }
 
@@ -761,4 +928,14 @@ abstract class TreeInfo {
       case tree: RefTree => true
       case _ => false
     })
+
+  def isMacroApplication(tree: Tree): Boolean = !tree.isDef && {
+    val sym = tree.symbol
+    sym != null && sym.isTermMacro && !sym.isErroneous
+  }
+
+  def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
+    case Block(_, expr) => isMacroApplicationOrBlock(expr)
+    case tree => isMacroApplication(tree)
+  }
 }
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 53b9b1d..9dc4bae 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -3,17 +3,37 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import Flags._
-import scala.collection.mutable.{ListBuffer, LinkedHashSet}
+import pickling.PickleFormat._
+import scala.collection.{ mutable, immutable }
 import util.Statistics
 
-trait Trees extends api.Trees { self: SymbolTable =>
+trait Trees extends api.Trees {
+  self: SymbolTable =>
 
   private[scala] var nodeCount = 0
 
+  protected def treeLine(t: Tree): String =
+    if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
+    else t.summaryString
+
+  protected def treeStatus(t: Tree, enclosingTree: Tree = null) = {
+    val parent = if (enclosingTree eq null) "        " else " P#%5s".format(enclosingTree.id)
+
+    "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.line, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
+  }
+  protected def treeSymStatus(t: Tree) = {
+    val line = if (t.pos.isDefined) "line %-4s".format(t.pos.line) else "         "
+    "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
+      if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
+      else treeLine(t)
+    )
+  }
+
   abstract class Tree extends TreeContextApiImpl with Attachable with Product {
     val id = nodeCount // TODO: add to attachment?
     nodeCount += 1
@@ -24,18 +44,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
     private[this] var rawtpe: Type = _
     final def tpe = rawtpe
-    def tpe_=(t: Type) = rawtpe = t
+    @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t)
+
+    def clearType(): this.type = this setType null
     def setType(tp: Type): this.type = { rawtpe = tp; this }
     def defineType(tp: Type): this.type = setType(tp)
 
     def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch?
     def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
     def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
-    def hasSymbol = false
+    def hasSymbolField = false
+    @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField
 
     def isDef = false
 
     def isEmpty = false
+    def nonEmpty = !isEmpty
+
+    def canHaveAttrs = true
 
     /** The canonical way to test if a Tree represents a term.
      */
@@ -62,7 +88,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
     private[scala] def copyAttrs(tree: Tree): this.type = {
       rawatt = tree.rawatt
       tpe = tree.tpe
-      if (hasSymbol) symbol = tree.symbol
+      if (hasSymbolField) symbol = tree.symbol
       this
     }
 
@@ -73,7 +99,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       (duplicator transform this).asInstanceOf[this.type]
   }
 
-  abstract class TreeContextApiImpl extends TreeContextApi { this: Tree =>
+  abstract class TreeContextApiImpl extends TreeApi { this: Tree =>
 
     override def orElse(alt: => Tree) = if (!isEmpty) this else alt
 
@@ -132,11 +158,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
       productIterator.toList flatMap subtrees
     }
 
-    override def freeTerms: List[FreeTermSymbol] = freeSyms[FreeTermSymbol](_.isFreeTerm, _.termSymbol)
-    override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
+    def freeTerms: List[FreeTermSymbol] = freeSyms[FreeTermSymbol](_.isFreeTerm, _.termSymbol)
+    def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
 
     private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
-      val s = scala.collection.mutable.LinkedHashSet[S]()
+      val s = mutable.LinkedHashSet[S]()
       def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
       for (t <- this) {
         addIfFree(t.symbol)
@@ -149,17 +175,17 @@ trait Trees extends api.Trees { self: SymbolTable =>
       s.toList
     }
 
-    override def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree =
+    def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree =
       new TreeSymSubstituter(from, to)(this)
 
-    override def substituteTypes(from: List[Symbol], to: List[Type]): Tree =
+    def substituteTypes(from: List[Symbol], to: List[Type]): Tree =
       new TreeTypeSubstituter(from, to)(this)
 
-    override def substituteThis(clazz: Symbol, to: Tree): Tree =
+    def substituteThis(clazz: Symbol, to: Tree): Tree =
       new ThisSubstituter(clazz, to) transform this
 
-    def hasSymbolWhich(f: Symbol => Boolean) =
-      (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
+    def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol)
+    def hasSymbolWhich(f: Symbol => Boolean) = hasExistingSymbol && f(symbol)
 
     def isErroneous = (tpe ne null) && tpe.isErroneous
     def isTyped     = (tpe ne null) && !tpe.isErroneous
@@ -209,13 +235,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   trait TypTree extends Tree with TypTreeApi
 
-  abstract class SymTree extends Tree with SymTreeContextApi {
-    override def hasSymbol = true
+  abstract class SymTree extends Tree with SymTreeApi {
+    override def hasSymbolField = true
     override var symbol: Symbol = NoSymbol
   }
 
   trait NameTree extends Tree with NameTreeApi {
     def name: Name
+    def getterName: TermName = name.getterName
+    def setterName: TermName = name.setterName
+    def localName: TermName = name.localName
   }
 
   trait RefTree extends SymTree with NameTree with RefTreeApi {
@@ -223,19 +252,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
     def name: Name
   }
 
+  object RefTree extends RefTreeExtractor {
+    def apply(qualifier: Tree, name: Name): RefTree = qualifier match {
+      case EmptyTree =>
+        Ident(name)
+      case qual if qual.isTerm =>
+        Select(qual, name)
+      case qual if qual.isType =>
+        assert(name.isTypeName, s"qual = $qual, name = $name")
+        SelectFromTypeTree(qual, name.toTypeName)
+    }
+    def unapply(refTree: RefTree): Option[(Tree, Name)] = Some((refTree.qualifier, refTree.name))
+  }
+
   abstract class DefTree extends SymTree with NameTree with DefTreeApi {
     def name: Name
     override def isDef = true
   }
 
-  case object EmptyTree extends TermTree {
-    val asList = List(this)
-    super.tpe_=(NoType)
-    override def tpe_=(t: Type) =
-      if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
-    override def isEmpty = true
-  }
-
   abstract class MemberDef extends DefTree with MemberDefApi {
     def mods: Modifiers
     def keyword: String = this match {
@@ -262,37 +296,91 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template)
        extends ImplDef with ClassDefApi
-  object ClassDef extends ClassDefExtractor
+  object ClassDef extends ClassDefExtractor {
+    /** @param sym       the class symbol
+     *  @param impl      the implementation template
+     *  @return          the class definition
+     */
+    def apply(sym: Symbol, impl: Template): ClassDef =
+      atPos(sym.pos) {
+        ClassDef(Modifiers(sym.flags),
+                 sym.name.toTypeName,
+                 sym.typeParams map TypeDef.apply,
+                 impl) setSymbol sym
+      }
+
+    /** @param sym       the class symbol
+     *  @param body      trees that constitute the body of the class
+     *  @return          the class definition
+     */
+    def apply(sym: Symbol, body: List[Tree]): ClassDef =
+      ClassDef(sym, Template(sym, body))
+  }
 
   case class ModuleDef(mods: Modifiers, name: TermName, impl: Template)
         extends ImplDef with ModuleDefApi
-  object ModuleDef extends ModuleDefExtractor
+  object ModuleDef extends ModuleDefExtractor {
+    /**
+     *  @param sym       the class symbol
+     *  @param impl      the implementation template
+     */
+    def apply(sym: Symbol, impl: Template): ModuleDef =
+      atPos(sym.pos) {
+        ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
+      }
+  }
 
   abstract class ValOrDefDef extends MemberDef with ValOrDefDefApi {
-    def name: Name
+    def name: TermName
     def tpt: Tree
     def rhs: Tree
   }
 
+  object ValOrDefDef {
+    def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+      case ValDef(mods, name, tpt, rhs)       => Some((mods, name, tpt, rhs))
+      case DefDef(mods, name, _, _, tpt, rhs) => Some((mods, name, tpt, rhs))
+      case _                                  => None
+    }
+  }
+
   case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi
-  object ValDef extends ValDefExtractor
+  object ValDef extends ValDefExtractor {
+    def apply(sym: Symbol): ValDef            = newValDef(sym, EmptyTree)()
+    def apply(sym: Symbol, rhs: Tree): ValDef = newValDef(sym, rhs)()
+  }
 
-  case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+  case class DefDef(mods: Modifiers, name: TermName, tparams: List[TypeDef],
                     vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi
-  object DefDef extends DefDefExtractor
+  object DefDef extends DefDefExtractor {
+    def apply(sym: Symbol, rhs: Tree): DefDef                                                = newDefDef(sym, rhs)()
+    def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef                  = newDefDef(sym, rhs)(vparamss = vparamss)
+    def apply(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef                               = newDefDef(sym, rhs)(mods = mods)
+    def apply(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = newDefDef(sym, rhs)(mods = mods, vparamss = vparamss)
+    def apply(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef                          = newDefDef(sym, rhs(sym.info.paramss))()
+  }
 
   case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree)
        extends MemberDef with TypeDefApi
-  object TypeDef extends TypeDefExtractor
+  object TypeDef extends TypeDefExtractor {
+    /** A TypeDef node which defines abstract type or type parameter for given `sym` */
+    def apply(sym: Symbol): TypeDef            = newTypeDef(sym, TypeBoundsTree(sym))()
+    def apply(sym: Symbol, rhs: Tree): TypeDef = newTypeDef(sym, rhs)()
+  }
 
   case class LabelDef(name: TermName, params: List[Ident], rhs: Tree)
        extends DefTree with TermTree with LabelDefApi
-  object LabelDef extends LabelDefExtractor
+  object LabelDef extends LabelDefExtractor {
+    def apply(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
+      atPos(sym.pos) {
+        LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
+      }
+  }
 
   case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) extends ImportSelectorApi
   object ImportSelector extends ImportSelectorExtractor {
     val wild     = ImportSelector(nme.WILDCARD, -1, null, -1)
-    val wildList = List(wild)
+    val wildList = List(wild) // OPT This list is shared for performance.
   }
 
   case class Import(expr: Tree, selectors: List[ImportSelector])
@@ -416,6 +504,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
 
+  // Creates a constructor call from the constructor symbol.  This is
+  // to avoid winding up with an OverloadedType for the constructor call.
+  def NewFromConstructor(constructor: Symbol, args: Tree*) = {
+    assert(constructor.isConstructor, constructor)
+    val instance = New(TypeTree(constructor.owner.tpe))
+    val init     = Select(instance, nme.CONSTRUCTOR) setSymbol constructor
+
+    Apply(init, args.toList)
+  }
+
   case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
 
   case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
@@ -436,9 +534,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
   }
   object Select extends SelectExtractor
 
-  case class Ident(name: Name) extends RefTree with IdentContextApi {
+  case class Ident(name: Name) extends RefTree with IdentApi {
     def qualifier: Tree = EmptyTree
-    def isBackquoted = this.attachments.get[BackquotedIdentifierAttachment.type].isDefined
+    def isBackquoted = this.hasAttachment[BackquotedIdentifierAttachment.type]
   }
   object Ident extends IdentExtractor
 
@@ -489,11 +587,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
        extends TypTree with TypeBoundsTreeApi
   object TypeBoundsTree extends TypeBoundsTreeExtractor
 
-  case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
+  case class ExistentialTypeTree(tpt: Tree, whereClauses: List[MemberDef])
        extends TypTree with ExistentialTypeTreeApi
   object ExistentialTypeTree extends ExistentialTypeTreeExtractor
 
-  case class TypeTree() extends TypTree with TypeTreeContextApi {
+  case class TypeTree() extends TypTree with TypeTreeApi {
     private var orig: Tree = null
     /** Was this type tree originally empty? That is, does it now contain
       * an inferred type that must be forgotten in `resetAttrs` to
@@ -511,7 +609,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
         case t => t
       }
 
-      orig = followOriginal(tree); setPos(tree.pos);
+      orig = followOriginal(tree); setPos(tree.pos)
       this
     }
 
@@ -536,8 +634,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
   object TypeTree extends TypeTreeExtractor
 
   def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
+  private def TypeTreeMemberType(sym: Symbol): TypeTree = {
+    // Needed for pos/t4970*.scala. See SI-7853
+    val resType = (if (sym.isLocalToBlock) sym.tpe else (sym.owner.thisType memberType sym)).finalResultType
+    atPos(sym.pos.focus)(TypeTree(resType))
+  }
 
-  override type TreeCopier <: InternalTreeCopierOps
+  def TypeBoundsTree(bounds: TypeBounds): TypeBoundsTree = TypeBoundsTree(TypeTree(bounds.lo), TypeTree(bounds.hi))
+  def TypeBoundsTree(sym: Symbol): TypeBoundsTree        = atPos(sym.pos)(TypeBoundsTree(sym.info.bounds))
+
+  override type TreeCopier >: Null <: InternalTreeCopierOps
   abstract class InternalTreeCopierOps extends TreeCopierOps {
     def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
     def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
@@ -603,6 +709,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
         case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
         case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
         // TODO: ApplyConstructor ???
+        case self.pendingSuperCall => self.pendingSuperCall
         case _ => new Apply(fun, args)
       }).copyAttrs(tree)
     def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -615,6 +722,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
       new Select(qualifier, selector).copyAttrs(tree)
     def Ident(tree: Tree, name: Name) =
       new Ident(name) copyAttrs tree
+    def RefTree(tree: Tree, qualifier: Tree, selector: Name) =
+      self.RefTree(qualifier, selector) copyAttrs tree
     def ReferenceToBoxed(tree: Tree, idt: Ident) =
       new ReferenceToBoxed(idt).copyAttrs(tree)
     def Literal(tree: Tree, value: Constant) =
@@ -633,7 +742,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       new AppliedTypeTree(tpt, args).copyAttrs(tree)
     def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) =
       new TypeBoundsTree(lo, hi).copyAttrs(tree)
-    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) =
+    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) =
       new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
   }
 
@@ -805,6 +914,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
       if name0 == name => t
       case _ => treeCopy.Ident(tree, name)
     }
+    def RefTree(tree: Tree, qualifier: Tree, selector: Name) = tree match {
+      case t @ Select(qualifier0, selector0)
+      if (qualifier0 == qualifier) && (selector0 == selector) => t
+      case _ => treeCopy.RefTree(tree, qualifier, selector)
+    }
     def ReferenceToBoxed(tree: Tree, idt: Ident) = tree match {
       case t @ ReferenceToBoxed(idt0)
       if (idt0 == idt) => t
@@ -849,7 +963,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       if (lo0 == lo) && (hi0 == hi) => t
       case _ => treeCopy.TypeBoundsTree(tree, lo, hi)
     }
-    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = tree match {
+    def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) = tree match {
       case t @ ExistentialTypeTree(tpt0, whereClauses0)
       if (tpt0 == tpt) && (whereClauses0 == whereClauses) => t
       case _ => treeCopy.ExistentialTypeTree(tree, tpt, whereClauses)
@@ -866,7 +980,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
   /** Is the tree Predef, scala.Predef, or _root_.scala.Predef?
    */
   def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef)
-  def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal)
 
   // --- modifiers implementation ---------------------------------------
 
@@ -912,6 +1025,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       if (flags1 == flags) this
       else Modifiers(flags1, privateWithin, annotations) setPositions positions
     }
+    def | (flag: Int): Modifiers = this | flag.toLong
     def | (flag: Long): Modifiers = {
       val flags1 = flags | flag
       if (flags1 == flags) this
@@ -924,102 +1038,83 @@ trait Trees extends api.Trees { self: SymbolTable =>
     def withPosition(flag: Long, position: Position) =
       copy() setPositions positions + (flag -> position)
 
-    override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
-      Modifiers(flags, privateWithin, f(annotations)) setPositions positions
+    override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = {
+      val newAnns = f(annotations)
+      if (annotations == newAnns) this
+      else Modifiers(flags, privateWithin, newAnns) setPositions positions
+    }
 
     override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions)
   }
 
-  object Modifiers extends ModifiersCreator
+  object Modifiers extends ModifiersExtractor
 
   implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
 
   // ---- values and creators ---------------------------------------
 
-  /** @param sym       the class symbol
-   *  @return          the implementation template
-   */
-  def ClassDef(sym: Symbol, impl: Template): ClassDef =
-    atPos(sym.pos) {
-      ClassDef(Modifiers(sym.flags),
-               sym.name.toTypeName,
-               sym.typeParams map TypeDef,
-               impl) setSymbol sym
-    }
-
-  /**
-   *  @param sym       the class symbol
-   *  @param impl      the implementation template
+  /** @param sym       the template's symbol
+   *  @param body      trees that constitute the body of the template
+   *  @return          the template
    */
-  def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
-    atPos(sym.pos) {
-      ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
-    }
-
-  def ValDef(sym: Symbol, rhs: Tree): ValDef =
+  def Template(sym: Symbol, body: List[Tree]): Template = {
     atPos(sym.pos) {
-      ValDef(Modifiers(sym.flags), sym.name.toTermName,
-             TypeTree(sym.tpe) setPos sym.pos.focus,
-             rhs) setSymbol sym
+      Template(sym.info.parents map TypeTree,
+               if (sym.thisSym == sym) noSelfType else ValDef(sym),
+               body)
     }
-
-  def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
-
-  object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
-    override def isEmpty = true
-    super.setPos(NoPosition)
-    override def setPos(pos: Position) = { assert(false); this }
   }
 
-  def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
-    atPos(sym.pos) {
-      assert(sym != NoSymbol)
-      DefDef(mods,
-             sym.name.toTermName,
-             sym.typeParams map TypeDef,
-             vparamss,
-             TypeTree(sym.tpe.finalResultType) setPos sym.pos.focus,
-             rhs) setSymbol sym
-    }
-
-  def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
-    DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
-
-  def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
-    DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
-
-  /** A DefDef with original trees attached to the TypeTree of each parameter */
-  def DefDef(sym: Symbol, mods: Modifiers, originalParamTpts: Symbol => Tree, rhs: Tree): DefDef = {
-    val paramms = mapParamss(sym){ sym =>
-      val vd = ValDef(sym, EmptyTree)
-      (vd.tpt : @unchecked) match {
-        case tt: TypeTree => tt setOriginal (originalParamTpts(sym) setPos sym.pos.focus)
+  trait CannotHaveAttrs extends Tree {
+    super.setPos(NoPosition)
+    super.setType(NoType)
+
+    override def canHaveAttrs = false
+    override def setPos(pos: Position) = { requireLegal(pos, NoPosition, "pos"); this }
+    override def pos_=(pos: Position) = setPos(pos)
+    override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
+    override def tpe_=(t: Type) = setType(t)
+
+    private def requireLegal(value: Any, allowed: Any, what: String) = (
+      if (value != allowed) {
+        log(s"can't set $what for $self to value other than $allowed")
+        if (settings.debug && settings.developer)
+          (new Throwable).printStackTrace
       }
-      vd
-    }
-    DefDef(sym, mods, paramms, rhs)
+    )
   }
 
-  def DefDef(sym: Symbol, rhs: Tree): DefDef =
-    DefDef(sym, Modifiers(sym.flags), rhs)
-
-  def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef =
-    DefDef(sym, rhs(sym.info.paramss))
-
-  /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
-  def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
-    atPos(sym.pos) {
-      TypeDef(Modifiers(sym.flags), sym.name.toTypeName, sym.typeParams map TypeDef, rhs) setSymbol sym
-    }
-
-  /** A TypeDef node which defines abstract type or type parameter for given `sym` */
-  def TypeDef(sym: Symbol): TypeDef =
-    TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
-
-  def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
-    atPos(sym.pos) {
-      LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
-    }
+  case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
+  object noSelfType extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
+  object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
+
+  @deprecated("Use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType
+
+  def newValDef(sym: Symbol, rhs: Tree)(
+    mods: Modifiers = Modifiers(sym.flags),
+    name: TermName  = sym.name.toTermName,
+    tpt: Tree       = TypeTreeMemberType(sym)
+  ): ValDef = (
+    atPos(sym.pos)(ValDef(mods, name, tpt, rhs)) setSymbol sym
+  )
+
+  def newDefDef(sym: Symbol, rhs: Tree)(
+    mods: Modifiers              = Modifiers(sym.flags),
+    name: TermName               = sym.name.toTermName,
+    tparams: List[TypeDef]       = sym.typeParams map TypeDef.apply,
+    vparamss: List[List[ValDef]] = mapParamss(sym)(ValDef.apply),
+    tpt: Tree                    = TypeTreeMemberType(sym)
+  ): DefDef = (
+    atPos(sym.pos)(DefDef(mods, name, tparams, vparamss, tpt, rhs)) setSymbol sym
+  )
+
+  def newTypeDef(sym: Symbol, rhs: Tree)(
+    mods: Modifiers        = Modifiers(sym.flags),
+    name: TypeName         = sym.name.toTypeName,
+    tparams: List[TypeDef] = sym.typeParams map TypeDef.apply
+  ): TypeDef = (
+    atPos(sym.pos)(TypeDef(mods, name, tparams, rhs)) setSymbol sym
+  )
 
   /** casedef shorthand */
   def CaseDef(pat: Tree, body: Tree): CaseDef =
@@ -1050,6 +1145,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
   def New(tpe: Type, args: Tree*): Tree =
     ApplyConstructor(TypeTree(tpe), args.toList)
 
+  def New(tpe: Type, argss: List[List[Tree]]): Tree =
+    New(TypeTree(tpe), argss)
+
   def New(sym: Symbol, args: Tree*): Tree =
     New(sym.tpe, args: _*)
 
@@ -1094,113 +1192,136 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   override protected def itraverse(traverser: Traverser, tree: Tree): Unit = {
     import traverser._
-    tree match {
-      case EmptyTree =>
-        ;
-      case PackageDef(pid, stats) =>
-        traverse(pid)
-        atOwner(mclass(tree.symbol)) {
-          traverseTrees(stats)
-        }
-      case ClassDef(mods, name, tparams, impl) =>
-        atOwner(tree.symbol) {
-          traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
-        }
-      case ModuleDef(mods, name, impl) =>
-        atOwner(mclass(tree.symbol)) {
-          traverseTrees(mods.annotations); traverse(impl)
-        }
-      case ValDef(mods, name, tpt, rhs) =>
-        atOwner(tree.symbol) {
-          traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
-        }
-      case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
-        atOwner(tree.symbol) {
-          traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
-        }
-      case TypeDef(mods, name, tparams, rhs) =>
-        atOwner(tree.symbol) {
-          traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
-        }
+
+    def traverseMemberDef(md: MemberDef, owner: Symbol): Unit = atOwner(owner) {
+      traverseModifiers(md.mods)
+      traverseName(md.name)
+      md match {
+        case ClassDef(_, _, tparams, impl)             => traverseParams(tparams) ; traverse(impl)
+        case ModuleDef(_, _, impl)                     => traverse(impl)
+        case ValDef(_, _, tpt, rhs)                    => traverseTypeAscription(tpt) ; traverse(rhs)
+        case TypeDef(_, _, tparams, rhs)               => traverseParams(tparams) ; traverse(rhs)
+        case DefDef(_, _, tparams, vparamss, tpt, rhs) =>
+          traverseParams(tparams)
+          traverseParamss(vparamss)
+          traverseTypeAscription(tpt)
+          traverse(rhs)
+      }
+    }
+    def traverseComponents(): Unit = tree match {
       case LabelDef(name, params, rhs) =>
-        traverseTrees(params); traverse(rhs)
+        traverseName(name)
+        traverseParams(params)
+        traverse(rhs)
       case Import(expr, selectors) =>
         traverse(expr)
+        selectors foreach traverseImportSelector
       case Annotated(annot, arg) =>
-        traverse(annot); traverse(arg)
+        traverse(annot)
+        traverse(arg)
       case Template(parents, self, body) =>
-        traverseTrees(parents)
-        if (!self.isEmpty) traverse(self)
+        traverseParents(parents)
+        traverseSelfType(self)
         traverseStats(body, tree.symbol)
       case Block(stats, expr) =>
-        traverseTrees(stats); traverse(expr)
+        traverseTrees(stats)
+        traverse(expr)
       case CaseDef(pat, guard, body) =>
-        traverse(pat); traverse(guard); traverse(body)
+        traversePattern(pat)
+        traverseGuard(guard)
+        traverse(body)
       case Alternative(trees) =>
         traverseTrees(trees)
       case Star(elem) =>
         traverse(elem)
       case Bind(name, body) =>
+        traverseName(name)
         traverse(body)
       case UnApply(fun, args) =>
-        traverse(fun); traverseTrees(args)
+        traverse(fun)
+        traverseTrees(args)
       case ArrayValue(elemtpt, trees) =>
-        traverse(elemtpt); traverseTrees(trees)
-      case Function(vparams, body) =>
-        atOwner(tree.symbol) {
-          traverseTrees(vparams); traverse(body)
-        }
+        traverse(elemtpt)
+        traverseTrees(trees)
       case Assign(lhs, rhs) =>
-        traverse(lhs); traverse(rhs)
+        traverse(lhs)
+        traverse(rhs)
       case AssignOrNamedArg(lhs, rhs) =>
-        traverse(lhs); traverse(rhs)
+        traverse(lhs)
+        traverse(rhs)
       case If(cond, thenp, elsep) =>
-        traverse(cond); traverse(thenp); traverse(elsep)
+        traverse(cond)
+        traverse(thenp)
+        traverse(elsep)
       case Match(selector, cases) =>
-        traverse(selector); traverseTrees(cases)
+        traverse(selector)
+        traverseCases(cases)
       case Return(expr) =>
         traverse(expr)
       case Try(block, catches, finalizer) =>
-        traverse(block); traverseTrees(catches); traverse(finalizer)
+        traverse(block)
+        traverseCases(catches)
+        traverse(finalizer)
       case Throw(expr) =>
         traverse(expr)
       case New(tpt) =>
         traverse(tpt)
       case Typed(expr, tpt) =>
-        traverse(expr); traverse(tpt)
+        traverse(expr)
+        traverseTypeAscription(tpt)
       case TypeApply(fun, args) =>
-        traverse(fun); traverseTrees(args)
+        traverse(fun)
+        traverseTypeArgs(args)
       case Apply(fun, args) =>
-        traverse(fun); traverseTrees(args)
+        traverse(fun)
+        traverseTrees(args)
       case ApplyDynamic(qual, args) =>
-        traverse(qual); traverseTrees(args)
-      case Super(qual, _) =>
         traverse(qual)
-      case This(_) =>
-        ;
+        traverseTrees(args)
+      case Super(qual, mix) =>
+        traverse(qual)
+        traverseName(mix)
+      case This(qual) =>
+        traverseName(qual)
       case Select(qualifier, selector) =>
         traverse(qualifier)
-      case Ident(_) =>
-        ;
+        traverseName(selector)
+      case Ident(name) =>
+        traverseName(name)
       case ReferenceToBoxed(idt) =>
         traverse(idt)
-      case Literal(_) =>
-        ;
+      case Literal(const) =>
+        traverseConstant(const)
       case TypeTree() =>
         ;
       case SingletonTypeTree(ref) =>
         traverse(ref)
       case SelectFromTypeTree(qualifier, selector) =>
         traverse(qualifier)
+        traverseName(selector)
       case CompoundTypeTree(templ) =>
         traverse(templ)
       case AppliedTypeTree(tpt, args) =>
-        traverse(tpt); traverseTrees(args)
+        traverse(tpt)
+        traverseTypeArgs(args)
       case TypeBoundsTree(lo, hi) =>
-        traverse(lo); traverse(hi)
+        traverse(lo)
+        traverse(hi)
       case ExistentialTypeTree(tpt, whereClauses) =>
-        traverse(tpt); traverseTrees(whereClauses)
-      case _ => xtraverse(traverser, tree)
+        traverse(tpt)
+        traverseTrees(whereClauses)
+      case _ =>
+        xtraverse(traverser, tree)
+    }
+
+    if (tree.canHaveAttrs) {
+      tree match {
+        case PackageDef(pid, stats)  => traverse(pid) ; traverseStats(stats, mclass(tree.symbol))
+        case md: ModuleDef           => traverseMemberDef(md, mclass(tree.symbol))
+        case md: MemberDef           => traverseMemberDef(md, tree.symbol)
+        case Function(vparams, body) => atOwner(tree.symbol) { traverseParams(vparams) ; traverse(body) }
+        case _                       => traverseComponents()
+      }
     }
   }
 
@@ -1307,7 +1428,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       case CompoundTypeTree(templ) =>
         treeCopy.CompoundTypeTree(tree, transformTemplate(templ))
       case ExistentialTypeTree(tpt, whereClauses) =>
-        treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
+        treeCopy.ExistentialTypeTree(tree, transform(tpt), transformMemberDefs(whereClauses))
       case Return(expr) =>
         treeCopy.Return(tree, transform(expr))
       case Alternative(trees) =>
@@ -1315,7 +1436,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
       case Star(elem) =>
         treeCopy.Star(tree, transform(elem))
       case UnApply(fun, args) =>
-        treeCopy.UnApply(tree, fun, transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
+        treeCopy.UnApply(tree, transform(fun), transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
       case ArrayValue(elemtpt, trees) =>
         treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees))
       case ApplyDynamic(qual, args) =>
@@ -1331,9 +1452,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   // --- specific traversers and transformers
 
-  @deprecated("Moved to tree.duplicate", "2.10.0")
-  protected[scala] def duplicateTree(tree: Tree): Tree = tree.duplicate
-
   class ForeachPartialTreeTraverser(pf: PartialFunction[Tree, Tree]) extends Traverser {
     override def traverse(tree: Tree) {
       val t = if (pf isDefinedAt tree) pf(tree) else tree
@@ -1372,6 +1490,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
       if (tree eq orig) super.transform(tree)
       else tree
   }
+
+  /** A transformer that replaces tree `from` with tree `to` in a given tree */
+  class TreeReplacer(from: Tree, to: Tree, positionAware: Boolean) extends Transformer {
+    override def transform(t: Tree): Tree = {
+      if (t == from) to
+      else if (!positionAware || (t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
+      else t
+    }
+  }
+
   // Create a readable string describing a substitution.
   private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
     "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
@@ -1387,7 +1515,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
         def subst(from: List[Symbol], to: List[Tree]): Tree =
           if (from.isEmpty) tree
           else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
-          else subst(from.tail, to.tail);
+          else subst(from.tail, to.tail)
         subst(from, to)
       case _ =>
         super.transform(tree)
@@ -1400,7 +1528,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
   class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer {
     val newtpe = to.tpe
     override def transform(tree: Tree) = {
-      if (tree.tpe ne null) tree.tpe = tree.tpe.substThis(clazz, newtpe)
+      tree modifyType (_.substThis(clazz, newtpe))
       tree match {
         case This(_) if tree.symbol == clazz => to
         case _ => super.transform(tree)
@@ -1410,8 +1538,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
 
   class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser {
     override def traverse(tree: Tree) {
-      if (tree.tpe ne null)
-        tree.tpe = typeMap(tree.tpe)
+      tree modifyType typeMap
       if (tree.isDef)
         tree.symbol modifyInfo typeMap
 
@@ -1448,9 +1575,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
           if (tree.symbol == from.head) tree setSymbol to.head
           else subst(from.tail, to.tail)
       }
+      tree modifyType symSubst
 
-      if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
-      if (tree.hasSymbol) {
+      if (tree.hasSymbolField) {
         subst(from, to)
         tree match {
           case _: DefTree =>
@@ -1492,7 +1619,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
   }
 
   class FilterTreeTraverser(p: Tree => Boolean) extends Traverser {
-    val hits = new ListBuffer[Tree]
+    val hits = mutable.ListBuffer[Tree]()
     override def traverse(t: Tree) {
       if (p(t)) hits += t
       super.traverse(t)
@@ -1500,7 +1627,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
   }
 
   class CollectTreeTraverser[T](pf: PartialFunction[Tree, T]) extends Traverser {
-    val results = new ListBuffer[T]
+    val results = mutable.ListBuffer[T]()
     override def traverse(t: Tree) {
       if (pf.isDefinedAt(t)) results += pf(t)
       super.traverse(t)
@@ -1517,6 +1644,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
     }
   }
 
+  private lazy val duplicator = new Duplicator(focusPositions = true)
+  private class Duplicator(focusPositions: Boolean) extends Transformer {
+    override val treeCopy = newStrictTreeCopier
+    override def transform(t: Tree) = {
+      val t1 = super.transform(t)
+      if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
+      t1
+    }
+  }
   trait TreeStackTraverser extends Traverser {
     import collection.mutable
     val path: mutable.Stack[Tree] = mutable.Stack()
@@ -1526,18 +1662,38 @@ trait Trees extends api.Trees { self: SymbolTable =>
     }
   }
 
-  private lazy val duplicator = new Duplicator(focusPositions = true)
-  private class Duplicator(focusPositions: Boolean) extends Transformer {
-    override val treeCopy = newStrictTreeCopier
-    override def transform(t: Tree) = {
-      val t1 = super.transform(t)
-      if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
-      t1
+  /** Tracks the classes currently under construction during a transform */
+  trait UnderConstructionTransformer extends Transformer {
+    import collection.mutable
+
+    protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
+
+    /** The stack of class symbols in which a call to this() or to the super
+      * constructor, or early definition is active */
+    private val selfOrSuperCalls = mutable.Stack[Symbol]()
+
+    abstract override def transform(tree: Tree) = {
+      if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree)) {
+        selfOrSuperCalls push currentOwner.owner
+        try super.transform(tree)
+        finally selfOrSuperCalls.pop()
+      } else super.transform(tree)
     }
   }
 
   def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
 
+  // this is necessary to avoid crashes like https://github.com/scalamacros/paradise/issues/1
+  // when someone tries to c.typecheck a naked MemberDef
+  def wrappingIntoTerm(tree0: Tree)(op: Tree => Tree): Tree = {
+    val neededWrapping = !tree0.isTerm
+    val tree1 = build.SyntacticBlock(tree0 :: Nil)
+    op(tree1) match {
+      case Block(tree2 :: Nil, Literal(Constant(()))) if neededWrapping => tree2
+      case tree2 => tree2
+    }
+  }
+
   // ------ copiers -------------------------------------------
 
   def copyDefDef(tree: Tree)(
@@ -1576,6 +1732,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
     case t =>
       sys.error("Not a ValDef: " + t + "/" + t.getClass)
   }
+  def copyTypeDef(tree: Tree)(
+    mods: Modifiers        = null,
+    name: Name             = null,
+    tparams: List[TypeDef] = null,
+    rhs: Tree              = null
+  ): TypeDef = tree match {
+    case TypeDef(mods0, name0, tparams0, rhs0) =>
+      treeCopy.TypeDef(tree,
+        if (mods eq null) mods0 else mods,
+        if (name eq null) name0 else name,
+        if (tparams eq null) tparams0 else tparams,
+        if (rhs eq null) rhs0 else rhs
+      )
+    case t =>
+      sys.error("Not a TypeDef: " + t + "/" + t.getClass)
+  }
   def copyClassDef(tree: Tree)(
     mods: Modifiers        = null,
     name: Name             = null,
@@ -1593,6 +1765,21 @@ trait Trees extends api.Trees { self: SymbolTable =>
       sys.error("Not a ClassDef: " + t + "/" + t.getClass)
   }
 
+  def copyModuleDef(tree: Tree)(
+    mods: Modifiers        = null,
+    name: Name             = null,
+    impl: Template         = null
+  ): ModuleDef = tree match {
+    case ModuleDef(mods0, name0, impl0) =>
+      treeCopy.ModuleDef(tree,
+        if (mods eq null) mods0 else mods,
+        if (name eq null) name0 else name,
+        if (impl eq null) impl0 else impl
+      )
+    case t =>
+      sys.error("Not a ModuleDef: " + t + "/" + t.getClass)
+  }
+
   def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match {
     case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
       treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0))
@@ -1635,62 +1822,68 @@ trait Trees extends api.Trees { self: SymbolTable =>
     case t =>
       sys.error("Not a LabelDef: " + t + "/" + t.getClass)
   }
+  def deriveFunction(func: Tree)(applyToRhs: Tree => Tree): Function = func match {
+    case Function(params0, rhs0) =>
+      treeCopy.Function(func, params0, applyToRhs(rhs0))
+    case t =>
+      sys.error("Not a Function: " + t + "/" + t.getClass)
+  }
 
 // -------------- Classtags --------------------------------------------------------
 
-  implicit val TreeTag = ClassTag[Tree](classOf[Tree])
-  implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree])
-  implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree])
-  implicit val SymTreeTag = ClassTag[SymTree](classOf[SymTree])
-  implicit val NameTreeTag = ClassTag[NameTree](classOf[NameTree])
-  implicit val RefTreeTag = ClassTag[RefTree](classOf[RefTree])
-  implicit val DefTreeTag = ClassTag[DefTree](classOf[DefTree])
-  implicit val MemberDefTag = ClassTag[MemberDef](classOf[MemberDef])
-  implicit val PackageDefTag = ClassTag[PackageDef](classOf[PackageDef])
-  implicit val ImplDefTag = ClassTag[ImplDef](classOf[ImplDef])
-  implicit val ClassDefTag = ClassTag[ClassDef](classOf[ClassDef])
-  implicit val ModuleDefTag = ClassTag[ModuleDef](classOf[ModuleDef])
-  implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef])
-  implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef])
-  implicit val DefDefTag = ClassTag[DefDef](classOf[DefDef])
-  implicit val TypeDefTag = ClassTag[TypeDef](classOf[TypeDef])
-  implicit val LabelDefTag = ClassTag[LabelDef](classOf[LabelDef])
-  implicit val ImportSelectorTag = ClassTag[ImportSelector](classOf[ImportSelector])
-  implicit val ImportTag = ClassTag[Import](classOf[Import])
-  implicit val TemplateTag = ClassTag[Template](classOf[Template])
-  implicit val BlockTag = ClassTag[Block](classOf[Block])
-  implicit val CaseDefTag = ClassTag[CaseDef](classOf[CaseDef])
-  implicit val AlternativeTag = ClassTag[Alternative](classOf[Alternative])
-  implicit val StarTag = ClassTag[Star](classOf[Star])
-  implicit val BindTag = ClassTag[Bind](classOf[Bind])
-  implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply])
-  implicit val FunctionTag = ClassTag[Function](classOf[Function])
-  implicit val AssignTag = ClassTag[Assign](classOf[Assign])
-  implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
-  implicit val IfTag = ClassTag[If](classOf[If])
-  implicit val MatchTag = ClassTag[Match](classOf[Match])
-  implicit val ReturnTag = ClassTag[Return](classOf[Return])
-  implicit val TryTag = ClassTag[Try](classOf[Try])
-  implicit val ThrowTag = ClassTag[Throw](classOf[Throw])
-  implicit val NewTag = ClassTag[New](classOf[New])
-  implicit val TypedTag = ClassTag[Typed](classOf[Typed])
-  implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
-  implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
-  implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
-  implicit val SuperTag = ClassTag[Super](classOf[Super])
-  implicit val ThisTag = ClassTag[This](classOf[This])
-  implicit val SelectTag = ClassTag[Select](classOf[Select])
-  implicit val IdentTag = ClassTag[Ident](classOf[Ident])
-  implicit val ReferenceToBoxedTag = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed])
-  implicit val LiteralTag = ClassTag[Literal](classOf[Literal])
-  implicit val AnnotatedTag = ClassTag[Annotated](classOf[Annotated])
-  implicit val SingletonTypeTreeTag = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree])
-  implicit val SelectFromTypeTreeTag = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree])
-  implicit val CompoundTypeTreeTag = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree])
-  implicit val AppliedTypeTreeTag = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree])
-  implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
+  implicit val AlternativeTag         = ClassTag[Alternative](classOf[Alternative])
+  implicit val AnnotatedTag           = ClassTag[Annotated](classOf[Annotated])
+  implicit val AppliedTypeTreeTag     = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree])
+  implicit val ApplyTag               = ClassTag[Apply](classOf[Apply])
+  implicit val AssignOrNamedArgTag    = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
+  implicit val AssignTag              = ClassTag[Assign](classOf[Assign])
+  implicit val BindTag                = ClassTag[Bind](classOf[Bind])
+  implicit val BlockTag               = ClassTag[Block](classOf[Block])
+  implicit val CaseDefTag             = ClassTag[CaseDef](classOf[CaseDef])
+  implicit val ClassDefTag            = ClassTag[ClassDef](classOf[ClassDef])
+  implicit val CompoundTypeTreeTag    = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree])
+  implicit val DefDefTag              = ClassTag[DefDef](classOf[DefDef])
+  implicit val DefTreeTag             = ClassTag[DefTree](classOf[DefTree])
   implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
-  implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
+  implicit val FunctionTag            = ClassTag[Function](classOf[Function])
+  implicit val GenericApplyTag        = ClassTag[GenericApply](classOf[GenericApply])
+  implicit val IdentTag               = ClassTag[Ident](classOf[Ident])
+  implicit val IfTag                  = ClassTag[If](classOf[If])
+  implicit val ImplDefTag             = ClassTag[ImplDef](classOf[ImplDef])
+  implicit val ImportSelectorTag      = ClassTag[ImportSelector](classOf[ImportSelector])
+  implicit val ImportTag              = ClassTag[Import](classOf[Import])
+  implicit val LabelDefTag            = ClassTag[LabelDef](classOf[LabelDef])
+  implicit val LiteralTag             = ClassTag[Literal](classOf[Literal])
+  implicit val MatchTag               = ClassTag[Match](classOf[Match])
+  implicit val MemberDefTag           = ClassTag[MemberDef](classOf[MemberDef])
+  implicit val ModuleDefTag           = ClassTag[ModuleDef](classOf[ModuleDef])
+  implicit val NameTreeTag            = ClassTag[NameTree](classOf[NameTree])
+  implicit val NewTag                 = ClassTag[New](classOf[New])
+  implicit val PackageDefTag          = ClassTag[PackageDef](classOf[PackageDef])
+  implicit val ReferenceToBoxedTag    = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed])
+  implicit val RefTreeTag             = ClassTag[RefTree](classOf[RefTree])
+  implicit val ReturnTag              = ClassTag[Return](classOf[Return])
+  implicit val SelectFromTypeTreeTag  = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree])
+  implicit val SelectTag              = ClassTag[Select](classOf[Select])
+  implicit val SingletonTypeTreeTag   = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree])
+  implicit val StarTag                = ClassTag[Star](classOf[Star])
+  implicit val SuperTag               = ClassTag[Super](classOf[Super])
+  implicit val SymTreeTag             = ClassTag[SymTree](classOf[SymTree])
+  implicit val TemplateTag            = ClassTag[Template](classOf[Template])
+  implicit val TermTreeTag            = ClassTag[TermTree](classOf[TermTree])
+  implicit val ThisTag                = ClassTag[This](classOf[This])
+  implicit val ThrowTag               = ClassTag[Throw](classOf[Throw])
+  implicit val TreeTag                = ClassTag[Tree](classOf[Tree])
+  implicit val TryTag                 = ClassTag[Try](classOf[Try])
+  implicit val TypTreeTag             = ClassTag[TypTree](classOf[TypTree])
+  implicit val TypeApplyTag           = ClassTag[TypeApply](classOf[TypeApply])
+  implicit val TypeBoundsTreeTag      = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
+  implicit val TypeDefTag             = ClassTag[TypeDef](classOf[TypeDef])
+  implicit val TypeTreeTag            = ClassTag[TypeTree](classOf[TypeTree])
+  implicit val TypedTag               = ClassTag[Typed](classOf[Typed])
+  implicit val UnApplyTag             = ClassTag[UnApply](classOf[UnApply])
+  implicit val ValDefTag              = ClassTag[ValDef](classOf[ValDef])
+  implicit val ValOrDefDefTag         = ClassTag[ValOrDefDef](classOf[ValOrDefDef])
 
   val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount)
 }
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 68b4fa6..63f897c 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -3,16 +3,77 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
+import util._
+
 trait TypeDebugging {
   self: SymbolTable =>
 
   import definitions._
 
-  // @M toString that is safe during debugging (does not normalize, ...)
+  /** There's a whole lot of implementation detail which is nothing but noise when
+   *  you are trying to see what's going on. This is my attempt to filter it out.
+   */
+  object noPrint extends (Tree => Boolean) {
+    def skipScalaName(name: Name) = name match {
+      case tpnme.Any | tpnme.Nothing | tpnme.AnyRef => true
+      case _                                        => false
+    }
+    def skipRefTree(t: RefTree) = t match {
+      case Select(Select(Ident(nme.ROOTPKG), nme.scala_), name) if skipScalaName(name) => true
+      case Select(sel, name) if sel.symbol == ScalaPackage && skipScalaName(name)      => true
+      case Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)              => true
+      case Ident(nme.ROOTPKG)                                                          => true
+      case _                                                                           => skipSym(t.symbol)
+    }
+    def skipSym(sym: Symbol): Boolean = sym match {
+      case null                    => false
+      case NothingClass | AnyClass => true
+      case PredefModule            => true
+      case ObjectClass             => true
+      case _                       => sym.hasPackageFlag
+    }
+    def skipType(tpe: Type): Boolean = (tpe eq null) || skipSym(tpe.typeSymbolDirect)
+
+    def skip(t: Tree): Boolean = t match {
+      case EmptyTree                                          => true
+      case PackageDef(_, _)                                   => true
+      case t: RefTree                                         => skipRefTree(t)
+      case TypeBoundsTree(lo, hi)                             => skip(lo) && skip(hi)
+      case Block(Nil, expr)                                   => skip(expr)
+      case Apply(fn, Nil)                                     => skip(fn)
+      case Block(stmt :: Nil, expr)                           => skip(stmt) && skip(expr)
+      case DefDef(_, nme.CONSTRUCTOR, Nil, ListOfNil, _, rhs) => skip(rhs)
+      case Literal(Constant(()))                              => true
+      case tt @ TypeTree()                                    => skipType(tt.tpe)
+      case _                                                  => skipSym(t.symbol)
+    }
+    def apply(t: Tree) = skip(t)
+  }
+
+  /** Light color wrappers.
+   */
   object typeDebug {
+    import scala.Console._
+
+    private val colorsOk = sys.props contains "scala.color"
+    private def inColor(s: String, color: String) = if (colorsOk && s != "") color +        s + RESET else s
+    private def inBold(s: String, color: String)  = if (colorsOk && s != "") color + BOLD + s + RESET else s
+
+    def inLightRed(s: String)          = inColor(s, RED)
+    def inLightGreen(s: String)        = inColor(s, GREEN)
+    def inLightMagenta(s: String)      = inColor(s, MAGENTA)
+    def inLightCyan(s: String): String = inColor(s, CYAN)
+    def inGreen(s: String): String     = inBold(s, GREEN)
+    def inRed(s: String): String       = inBold(s, RED)
+    def inBlue(s: String): String      = inBold(s, BLUE)
+    def inCyan(s: String): String      = inBold(s, CYAN)
+    def inMagenta(s: String)           = inBold(s, MAGENTA)
+    def resetColor(s: String): String  = if (colorsOk) s + RESET else s
+
     private def to_s(x: Any): String = x match {
       // otherwise case classes are caught looking like products
       case _: Tree | _: Type     => "" + x
@@ -20,7 +81,6 @@ trait TypeDebugging {
       case x: Product            => x.productIterator mkString ("(", ", ", ")")
       case _                     => "" + x
     }
-    def ptIndent(x: Any) = ("" + x).replaceAll("\\n", "  ")
     def ptBlock(label: String, pairs: (String, Any)*): String = {
       if (pairs.isEmpty) label + "{ }"
       else {
@@ -31,16 +91,32 @@ trait TypeDebugging {
         strs.mkString(label + " {\n  ", "\n  ", "\n}")
       }
     }
-    def ptLine(label: String, pairs: (String, Any)*): String = {
-      val strs = pairs map { case (k, v) => k + "=" + to_s(v) }
-      strs.mkString(label + ": ", ", ", "")
+    def ptLine(pairs: (String, Any)*): String = (
+      pairs
+              map { case (k,  v) => (k, to_s(v)) }
+        filterNot { case (_,  v) => v == "" }
+              map { case ("", v) => v ; case (k, v) => s"$k=$v" }
+        mkString ", "
+    )
+    def ptTree(t: Tree): String = t match {
+      case PackageDef(pid, _)                                                      => s"package $pid"
+      case ModuleDef(_, name, _)                                                   => s"object $name"
+      case DefDef(_, name, tparams, _, _, _)                                       => "def " + name + ptTypeParams(tparams)
+      case ClassDef(_, name, Nil, _) if t.symbol != null && t.symbol.isModuleClass => s"module class $name"
+      case ClassDef(_, name, tparams, _)                                           => "class " + name + ptTypeParams(tparams)
+      case td: TypeDef                                                             => ptTypeParam(td)
+      case TypeBoundsTree(lo, hi)                                                  =>
+        val lo_s = if (noPrint(lo)) "" else " >: " + ptTree(lo)
+        val hi_s = if (noPrint(hi)) "" else " <: " + ptTree(hi)
+        lo_s + hi_s
+      case _ if (t.symbol eq null) || (t.symbol eq NoSymbol) => to_s(t)
+      case _                                                 => "" + t.symbol.tpe
     }
-    def ptTree(t: Tree) = t match {
-      case PackageDef(pid, _)            => "package " + pid
-      case ModuleDef(_, name, _)         => "object " + name
-      case ClassDef(_, name, tparams, _) => "class " + name + str.brackets(tparams)
-      case _                             => to_s(t)
+    def ptTypeParam(td: TypeDef): String = {
+      val TypeDef(_, name, tparams, rhs) = td
+      name + ptTypeParams(tparams) + ptTree(rhs)
     }
+    def ptTypeParams(tparams: List[TypeDef]): String = str brackets (tparams map ptTypeParam)
 
     object str {
       def parentheses(xs: List[_]): String     = xs.mkString("(", ", ", ")")
@@ -48,24 +124,28 @@ trait TypeDebugging {
       def tparams(tparams: List[Type]): String = brackets(tparams map debug)
       def parents(ps: List[Type]): String      = (ps map debug).mkString(" with ")
       def refine(defs: Scope): String          = defs.toList.mkString("{", " ;\n ", "}")
+      def bounds(lo: Type, hi: Type): String   = {
+        val lo_s = if (typeIsNothing(lo)) "" else s" >: $lo"
+        val hi_s = if (typeIsAny(hi)) "" else s" <: $hi"
+        lo_s + hi_s
+      }
     }
-
+    import str._
     private def debug(tp: Type): String = tp match {
-      case TypeRef(pre, sym, args)             => debug(pre) + "." + sym.nameString + str.tparams(args)
-      case ThisType(sym)                       => sym.nameString + ".this"
-      case SingleType(pre, sym)                => debug(pre) +"."+ sym.nameString +".type"
-      case RefinedType(parents, defs)          => str.parents(parents) + str.refine(defs)
-      case ClassInfoType(parents, defs, clazz) => "class "+ clazz.nameString + str.parents(parents) + str.refine(defs)
-      case PolyType(tparams, result)           => str.brackets(tparams) + " " + debug(result)
-      case TypeBounds(lo, hi)                  => ">: "+ debug(lo) +" <: "+ debug(hi)
-      case tv @ TypeVar(_, _)                  => tv.toString
-      case ExistentialType(tparams, qtpe)      => "forSome "+ str.brackets(tparams) + " " + debug(qtpe)
-      case _                                   => "?"+tp.getClass.getName+"?"//tp.toString might produce cyclic error...
+      case TypeRef(pre, sym, args)         => s"${debug(pre)}.${sym.nameString}.${tparams(args)}"
+      case ThisType(sym)                   => s"${sym.nameString}.this"
+      case SingleType(pre, sym)            => s"${debug(pre)}.${sym.nameString}.type"
+      case RefinedType(ps, decls)          => s"${parents(ps)} ${refine(decls)}"
+      case ClassInfoType(ps, decls, clazz) => s"class ${clazz.nameString} ${parents(ps)} ${refine(decls)}"
+      case PolyType(tparams, result)       => s"${brackets(tparams)}${debug(result)}"
+      case TypeBounds(lo, hi)              => bounds(lo, hi)
+      case tv @ TypeVar(_, _)              => "" + tv
+      case ExistentialType(tparams, qtpe)  => s"forSome ${brackets(tparams)} ${debug(qtpe)}"
+      case _                               => s"?${shortClassOfInstance(tp)}?" // tp.toString might produce cyclic error...
     }
     def debugString(tp: Type) = debug(tp)
   }
   def paramString(tp: Type)      = typeDebug.str parentheses (tp.params map (_.defString))
   def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString))
-  def typeArgsString(tp: Type)   = typeDebug.str brackets (tp.typeArgs map (_.safeToString))
   def debugString(tp: Type)      = typeDebug debugString tp
 }
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 2f49995..eb56f4b 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3,19 +3,21 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 
 import scala.collection.{ mutable, immutable, generic }
-import generic.Clearable
 import scala.ref.WeakReference
 import mutable.ListBuffer
 import Flags._
 import scala.util.control.ControlThrowable
 import scala.annotation.tailrec
 import util.Statistics
-import scala.runtime.ObjectRef
 import util.ThreeValues._
+import Variance._
+import Depth._
+import TypeConstants._
 
 /* A standard type pattern match:
   case ErrorType =>
@@ -41,7 +43,7 @@ import util.ThreeValues._
     // parent1 with ... with parentn { defs }
   case ExistentialType(tparams, result) =>
     // result forSome { tparams }
-  case AnnotatedType(annots, tp, selfsym) =>
+  case AnnotatedType(annots, tp) =>
     // tp @annots
 
   // the following are non-value types; you cannot write them down in Scala source.
@@ -68,147 +70,63 @@ import util.ThreeValues._
     // a type variable
     // Replace occurrences of type parameters with type vars, where
     // inst is the instantiation and constr is a list of bounds.
-  case DeBruijnIndex(level, index, args)
-    // for dependent method types: a type referring to a method parameter.
-  case ErasedValueType(tref)
+  case ErasedValueType(clazz, underlying)
     // only used during erasure of derived value classes.
 */
 
-trait Types extends api.Types { self: SymbolTable =>
+trait Types
+  extends api.Types
+  with tpe.TypeComparers
+  with tpe.TypeToStrings
+  with tpe.CommonOwners
+  with tpe.GlbLubs
+  with tpe.TypeMaps
+  with tpe.TypeConstraints
+  with tpe.FindMembers
+  with util.Collections { self: SymbolTable =>
+
   import definitions._
   import TypesStats._
 
   private var explainSwitch = false
   private final val emptySymbolSet = immutable.Set.empty[Symbol]
 
-  private final val LogPendingSubTypesThreshold = 50
-  private final val LogPendingBaseTypesThreshold = 50
-  private final val LogVolatileThreshold = 50
-
-  /** A don't care value for the depth parameter in lubs/glbs and related operations. */
-  private final val AnyDepth = -3
-
-  /** Decrement depth unless it is a don't care. */
-  private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
-
-  private final val printLubs = sys.props contains "scalac.debug.lub"
   private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
-  /** In case anyone wants to turn off lub verification without reverting anything. */
-  private final val verifyLubs = true
-  /** In case anyone wants to turn off type parameter bounds being used
+  private final val breakCycles = settings.breakCycles.value
+  /** In case anyone wants to turn on type parameter bounds being used
    *  to seed type constraints.
    */
   private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
+  private final val sharperSkolems = sys.props contains "scalac.experimental.sharper-skolems"
 
   protected val enableTypeVarExperimentals = settings.Xexperimental.value
 
-  /** Empty immutable maps to avoid allocations. */
-  private val emptySymMap   = immutable.Map[Symbol, Symbol]()
-  private val emptySymCount = immutable.Map[Symbol, Int]()
-
-  /** The current skolemization level, needed for the algorithms
-   *  in isSameType, isSubType that do constraint solving under a prefix.
-   */
-  var skolemizationLevel = 0
-
-  /** A log of type variable with their original constraints. Used in order
-   *  to undo constraints in the case of isSubType/isSameType failure.
-   */
-  lazy val undoLog = newUndoLog
-
-  protected def newUndoLog = new UndoLog
-
-  class UndoLog extends Clearable {
-    private type UndoPairs = List[(TypeVar, TypeConstraint)]
-    //OPT this method is public so we can do `manual inlining`
-    var log: UndoPairs = List()
-
-    /*
-     * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
-     *
-     * The idea behind explicit locking mechanism is that all public methods that access mutable state
-     * will have to obtain the lock for their entire execution so both reads and writes can be kept in
-     * right order. Originally, that was achieved by overriding those public methods in
-     * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
-     * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
-     * can go away.
-     *
-     * By using explicit locking we can achieve inlining.
-     *
-     * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
-     * places implementation of `undo` or `undoUnless`). This should be changed back to protected
-     * once inliner is fixed.
-     */
-    def lock(): Unit = ()
-    def unlock(): Unit = ()
-
-    // register with the auto-clearing cache manager
-    perRunCaches.recordCache(this)
-
-    /** Undo all changes to constraints to type variables upto `limit`. */
-    //OPT this method is public so we can do `manual inlining`
-    def undoTo(limit: UndoPairs) {
-      assertCorrectThread()
-      while ((log ne limit) && log.nonEmpty) {
-        val (tv, constr) = log.head
-        tv.constr = constr
-        log = log.tail
-      }
-    }
-
-    /** No sync necessary, because record should only
-     *  be called from within a undo or undoUnless block,
-     *  which is already synchronized.
-     */
-    private[reflect] def record(tv: TypeVar) = {
-      log ::= ((tv, tv.constr.cloneInternal))
-    }
-
-    def clear() {
-      lock()
-      try {
-        if (settings.debug.value)
-          self.log("Clearing " + log.size + " entries from the undoLog.")
-        log = Nil
-      } finally unlock()
-    }
-    def size = {
-      lock()
-      try log.size finally unlock()
-    }
-
-    // `block` should not affect constraints on typevars
-    def undo[T](block: => T): T = {
-      lock()
-      try {
-        val before = log
-
-        try block
-        finally undoTo(before)
-      } finally unlock()
-    }
-
-    // if `block` evaluates to false, it should not affect constraints on typevars
-    def undoUnless(block: => Boolean): Boolean = {
-      lock()
-      try {
-        val before = log
-        var result = false
+  /** Caching the most recent map has a 75-90% hit rate. */
+  private object substTypeMapCache {
+    private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
 
-        try result = block
-        finally if (!result) undoTo(before)
+    def apply(from: List[Symbol], to: List[Type]): SubstTypeMap = {
+      if ((cached.from ne from) || (cached.to ne to))
+        cached = new SubstTypeMap(from, to)
 
-        result
-      } finally unlock()
+      cached
     }
   }
 
+  /** The current skolemization level, needed for the algorithms
+   *  in isSameType, isSubType that do constraint solving under a prefix.
+   */
+  private var _skolemizationLevel = 0
+  def skolemizationLevel = _skolemizationLevel
+  def skolemizationLevel_=(value: Int) = _skolemizationLevel = value
+
   /** A map from lists to compound types that have the given list as parents.
    *  This is used to avoid duplication in the computation of base type sequences and baseClasses.
    *  It makes use of the fact that these two operations depend only on the parents,
    *  not on the refinement.
    */
-  val intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+  private val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+  def intersectionWitness = _intersectionWitness
 
   /** A proxy for a type (identified by field `underlying`) that forwards most
    *  operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
@@ -223,12 +141,8 @@ trait Types extends api.Types { self: SymbolTable =>
     override def isTrivial = underlying.isTrivial
     override def isHigherKinded: Boolean = underlying.isHigherKinded
     override def typeConstructor: Type = underlying.typeConstructor
-    override def isNotNull = underlying.isNotNull
     override def isError = underlying.isError
     override def isErroneous = underlying.isErroneous
-    override def isStable: Boolean = underlying.isStable
-    override def isVolatile = underlying.isVolatile
-    override def finalResultType = underlying.finalResultType
     override def paramSectionCount = underlying.paramSectionCount
     override def paramss = underlying.paramss
     override def params = underlying.params
@@ -256,7 +170,14 @@ trait Types extends api.Types { self: SymbolTable =>
    *  forwarded here. Some operations are rewrapped again.
    */
   trait RewrappingTypeProxy extends SimpleTypeProxy {
-    protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
+    protected def maybeRewrap(newtp: Type) = (
+      if (newtp eq underlying) this
+      // BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
+      // Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
+      // Otherwise, if newtp =:= underlying, don't rewrap it.
+      else if (!newtp.isWildcard && !newtp.isHigherKinded && (newtp =:= underlying)) this
+      else rewrap(newtp)
+    )
     protected def rewrap(newtp: Type): Type
 
     // the following are all operations in class Type that are overridden in some subclass
@@ -266,16 +187,15 @@ trait Types extends api.Types { self: SymbolTable =>
     override def deconst = maybeRewrap(underlying.deconst)
     override def resultType = maybeRewrap(underlying.resultType)
     override def resultType(actuals: List[Type]) = maybeRewrap(underlying.resultType(actuals))
-    override def finalResultType = maybeRewrap(underlying.finalResultType)
     override def paramSectionCount = 0
     override def paramss: List[List[Symbol]] = List()
     override def params: List[Symbol] = List()
     override def paramTypes: List[Type] = List()
     override def typeArgs = underlying.typeArgs
-    override def notNull = maybeRewrap(underlying.notNull)
     override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals)
     override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin)
     override def normalize = maybeRewrap(underlying.normalize)
+    override def etaExpand = maybeRewrap(underlying.etaExpand)
     override def dealias = maybeRewrap(underlying.dealias)
     override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
     override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
@@ -297,7 +217,6 @@ trait Types extends api.Types { self: SymbolTable =>
 
   abstract class TypeApiImpl extends TypeApi { this: Type =>
     def declaration(name: Name): Symbol = decl(name)
-    def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
     def declarations = decls
     def typeArguments = typeArgs
     def erasure = this match {
@@ -325,18 +244,16 @@ trait Types extends api.Types { self: SymbolTable =>
     def isSpliceable = {
       this.isInstanceOf[TypeRef] && typeSymbol.isAbstractType && !typeSymbol.isExistential
     }
-  }
 
-  /** Same as a call to narrow unless existentials are visible
-   *  after widening the type. In that case, narrow from the widened
-   *  type instead of the proxy. This gives buried existentials a
-   *  chance to make peace with the other types. See SI-5330.
-   */
-  private def narrowForFindMember(tp: Type): Type = {
-    val w = tp.widen
-    // Only narrow on widened type when we have to -- narrow is expensive unless the target is a singleton type.
-    if ((tp ne w) && containsExistential(w)) w.narrow
-    else tp.narrow
+    def companion = {
+      val sym = typeSymbolDirect
+      if (sym.isModule && !sym.isPackage) sym.companionSymbol.tpe
+      else if (sym.isModuleClass && !sym.isPackageClass) sym.sourceModule.companionSymbol.tpe
+      else if (sym.isClass && !sym.isModuleClass && !sym.isPackageClass) sym.companionSymbol.info
+      else NoType
+    }
+
+    def paramLists: List[List[Symbol]] = paramss
   }
 
   /** The base class for all types */
@@ -351,7 +268,7 @@ trait Types extends api.Types { self: SymbolTable =>
     def takesTypeArgs: Boolean = this.isHigherKinded
 
     /** Does this type denote a stable reference (i.e. singleton type)? */
-    def isStable: Boolean = false
+    final def isStable: Boolean = definitions isStable this
 
     /** Is this type dangerous (i.e. it might contain conflicting
      *  type information when empty, so that it can be constructed
@@ -359,10 +276,7 @@ trait Types extends api.Types { self: SymbolTable =>
      *  type of the form T_1 with T_n { decls }, where one of the
      *  T_i (i > 1) is an abstract type.
      */
-    def isVolatile: Boolean = false
-
-    /** Is this type guaranteed not to have `null` as a value? */
-    def isNotNull: Boolean = false
+    final def isVolatile: Boolean = definitions isVolatile this
 
     /** Is this type a structural refinement type (it ''refines'' members that have not been inherited) */
     def isStructuralRefinement: Boolean = false
@@ -384,15 +298,11 @@ trait Types extends api.Types { self: SymbolTable =>
     /** Is this type produced as a repair for an error? */
     def isErroneous: Boolean = ErroneousCollector.collect(this)
 
-    /** Does this type denote a reference type which can be null? */
-    // def isNullable: Boolean = false
-
     /** Can this type only be subtyped by bottom types?
      *  This is assessed to be the case if the class is final,
      *  and all type parameters (if any) are invariant.
      */
-    def isFinalType: Boolean =
-      typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
+    def isFinalType = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable
 
     /** Is this type completed (i.e. not a lazy type)? */
     def isComplete: Boolean = true
@@ -489,7 +399,7 @@ trait Types extends api.Types { self: SymbolTable =>
     /** For a class with nonEmpty parents, the first parent.
      *  Otherwise some specific fixed top type.
      */
-    def firstParent = if (parents.nonEmpty) parents.head else ObjectClass.tpe
+    def firstParent = if (parents.nonEmpty) parents.head else ObjectTpe
 
     /** For a typeref or single-type, the prefix of the normalized type (@see normalize).
      *  NoType for all other types. */
@@ -524,14 +434,9 @@ trait Types extends api.Types { self: SymbolTable =>
     /** Only used for dependent method types. */
     def resultApprox: Type = ApproximateDependentMap(resultType)
 
-    /** If this is a TypeRef `clazz`[`T`], return the argument `T`
-     *  otherwise return this type
-     */
-    def remove(clazz: Symbol): Type = this
-
     /** For a curried/nullary method or poly type its non-method result type,
      *  the type itself for all other types */
-    def finalResultType: Type = this
+    final def finalResultType: Type = definitions finalResultType this
 
     /** For a method type, the number of its value parameter sections,
      *  0 for all other types */
@@ -557,13 +462,6 @@ trait Types extends api.Types { self: SymbolTable =>
      *  the empty list for all other types */
     def boundSyms: immutable.Set[Symbol] = emptySymbolSet
 
-    /** Mixin a NotNull trait unless type already has one
-     *  ...if the option is given, since it is causing typing bugs.
-     */
-    def notNull: Type =
-      if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this
-      else NotNullType(this)
-
     /** Replace formal type parameter symbols with actual type arguments.
      *
      * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
@@ -589,15 +487,39 @@ trait Types extends api.Types { self: SymbolTable =>
      *  Example: (in the below, `<List>` is the type constructor of List)
      *    TypeRef(pre, `<List>`, List()) is replaced by
      *    PolyType(X, TypeRef(pre, `<List>`, List(X)))
+     *
+     *  Discussion: normalize is NOT usually what you want to be calling.
+     *  The (very real) danger with normalize is that it will force types
+     *  which would not otherwise have been forced, leading to mysterious
+     *  behavioral differences, cycles, and other elements of mysteries.
+     *  Under most conditions the method you should be calling is `dealiasWiden`
+     *  (see that method for more info.)
+     *
+     *  Here are a few of the side-effect-trail-leaving methods called
+     *  by various implementations of normalize:
+     *
+     *   - sym.info
+     *   - tpe.etaExpand
+     *   - tpe.betaReduce
+     *   - tpe.memberType
+     *   - sym.nextOverriddenSymbol
+     *   - constraint.inst
+     *
+     *  If you've been around the compiler a while that list must fill
+     *  your heart with fear.
      */
     def normalize = this // @MAT
 
+    def etaExpand = this
+
     /** Expands type aliases. */
     def dealias = this
 
     /** Repeatedly apply widen and dealias until they have no effect.
      *  This compensates for the fact that type aliases can hide beneath
      *  singleton types and singleton types can hide inside type aliases.
+     *  !!! - and yet it is still inadequate, because aliases and singletons
+     *  might lurk in the upper bounds of an abstract type. See SI-7051.
      */
     def dealiasWiden: Type = (
       if (this ne widen) widen.dealiasWiden
@@ -615,8 +537,6 @@ trait Types extends api.Types { self: SymbolTable =>
       else Nil
     )
 
-    def etaExpand: Type = this
-
     /** Performs a single step of beta-reduction on types.
      *  Given:
      *
@@ -686,16 +606,6 @@ trait Types extends api.Types { self: SymbolTable =>
     def nonPrivateMember(name: Name): Symbol =
       memberBasedOnName(name, BridgeAndPrivateFlags)
 
-    /** All members with the given flags, excluding bridges.
-     */
-    def membersWithFlags(requiredFlags: Long): Scope =
-      membersBasedOnFlags(BridgeFlags, requiredFlags)
-
-    /** All non-private members with the given flags, excluding bridges.
-     */
-    def nonPrivateMembersWithFlags(requiredFlags: Long): Scope =
-      membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
-
     /** The non-private member with given name, admitting members with given flags `admit`.
      *  "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
      *  flag are usually excluded from findMember results, but supplying any of those flags
@@ -716,10 +626,9 @@ trait Types extends api.Types { self: SymbolTable =>
      */
     def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope =
       findMembers(excludedFlags, requiredFlags)
-//      findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
 
     def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
-      findMember(name, excludedFlags, 0, false)
+      findMember(name, excludedFlags, 0, stableOnly = false)
 
     /** The least type instance of given class which is a supertype
      *  of this type.  Example:
@@ -750,7 +659,7 @@ trait Types extends api.Types { self: SymbolTable =>
         )
         if (trivial) this
         else {
-          val m     = new AsSeenFromMap(pre.normalize, clazz)
+          val m     = newAsSeenFromMap(pre.normalize, clazz)
           val tp    = m(this)
           val tp1   = existentialAbstraction(m.capturedParams, tp)
 
@@ -770,6 +679,7 @@ trait Types extends api.Types { self: SymbolTable =>
      *  }}}
      */
     def memberInfo(sym: Symbol): Type = {
+      require(sym ne NoSymbol, this)
       sym.info.asSeenFrom(this, sym.owner)
     }
 
@@ -785,15 +695,14 @@ trait Types extends api.Types { self: SymbolTable =>
       case OverloadedType(_, alts) =>
         OverloadedType(this, alts)
       case tp =>
-        tp.asSeenFrom(this, sym.owner)
+        if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner)
     }
 
     /** Substitute types `to` for occurrences of references to
      *  symbols `from` in this type.
      */
     def subst(from: List[Symbol], to: List[Type]): Type =
-      if (from.isEmpty) this
-      else new SubstTypeMap(from, to) apply this
+      if (from.isEmpty) this else substTypeMapCache(from, to)(this)
 
     /** Substitute symbols `to` for occurrences of symbols `from` in this type.
      *
@@ -830,7 +739,6 @@ trait Types extends api.Types { self: SymbolTable =>
       else substThis(from, to).substSym(symsFrom, symsTo)
 
     /** Returns all parts of this type which satisfy predicate `p` */
-    def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
     def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
 
     class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
@@ -838,6 +746,8 @@ trait Types extends api.Types { self: SymbolTable =>
       def map[T](f: Type => T): List[T]  = collect(Type.this) map f
     }
 
+    @inline final def orElse(alt: => Type): Type = if (this ne NoType) this else alt
+
     /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
      *  or None if none exists.
      */
@@ -860,45 +770,46 @@ trait Types extends api.Types { self: SymbolTable =>
     /** Does this type contain a reference to this symbol? */
     def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
 
-    /** Does this type contain a reference to this type */
-    def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this)
-
     /** Is this type a subtype of that type? */
     def <:<(that: Type): Boolean = {
       if (Statistics.canEnable) stat_<:<(that)
       else {
         (this eq that) ||
-        (if (explainSwitch) explain("<:", isSubType, this, that)
-         else isSubType(this, that, AnyDepth))
+        (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
+         else isSubType(this, that))
       }
     }
 
     /** Is this type a subtype of that type in a pattern context?
-     *  Any type arguments on the right hand side are replaced with
+     *  Dummy type arguments on the right hand side are replaced with
      *  fresh existentials, except for Arrays.
      *
      *  See bug1434.scala for an example of code which would fail
      *  if only a <:< test were applied.
      */
-    def matchesPattern(that: Type): Boolean = {
-      (this <:< that) || ((this, that) match {
-        case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
-          arg1 matchesPattern arg2
-        case (_, TypeRef(_, _, args)) =>
-          val newtp = existentialAbstraction(args map (_.typeSymbol), that)
-          !(that =:= newtp) && (this <:< newtp)
-        case _ =>
-          false
-      })
-    }
+    def matchesPattern(that: Type): Boolean = (this <:< that) || (that match {
+      case ArrayTypeRef(elem2) if elem2.typeConstructor.isHigherKinded =>
+        this match {
+          case ArrayTypeRef(elem1) => elem1 matchesPattern elem2
+          case _                   => false
+        }
+      case TypeRef(_, sym, args) =>
+        val that1 = existentialAbstraction(args map (_.typeSymbol), that)
+        (that ne that1) && (this <:< that1) && {
+          debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
+          true
+        }
+      case _ =>
+        false
+    })
 
     def stat_<:<(that: Type): Boolean = {
       if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
       val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
       val result =
         (this eq that) ||
-        (if (explainSwitch) explain("<:", isSubType, this, that)
-         else isSubType(this, that, AnyDepth))
+        (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
+         else isSubType(this, that))
       if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
       result
     }
@@ -921,12 +832,7 @@ trait Types extends api.Types { self: SymbolTable =>
       (this eq that) ||
       (if (explainSwitch) explain("=", isSameType, this, that)
        else isSameType(this, that))
-    );
-
-    /** Does this type implement symbol `sym` with same or stronger type? */
-    def specializes(sym: Symbol): Boolean =
-      if (explainSwitch) explain("specializes", specializesSym, this, sym)
-      else specializesSym(this, sym)
+    )
 
     /** Is this type close enough to that type so that members
      *  with the two type would override each other?
@@ -943,7 +849,7 @@ trait Types extends api.Types { self: SymbolTable =>
     def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
 
     /** Same as matches, except that non-method types are always assumed to match. */
-    def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
+    def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true)
 
     /** The shortest sorted upwards closed array of types that contains
      *  this type as first element.
@@ -968,10 +874,10 @@ trait Types extends api.Types { self: SymbolTable =>
     /** The maximum depth (@see typeDepth)
      *  of each type in the BaseTypeSeq of this type except the first.
      */
-    def baseTypeSeqDepth: Int = 1
+    def baseTypeSeqDepth: Depth = Depth(1)
 
     /** The list of all baseclasses of this type (including its own typeSymbol)
-     *  in reverse linearization order, starting with the class itself and ending
+     *  in linearization order, starting with the class itself and ending
      *  in class Any.
      */
     def baseClasses: List[Symbol] = List()
@@ -991,12 +897,12 @@ trait Types extends api.Types { self: SymbolTable =>
         if (sym == btssym) return mid
         else if (sym isLess btssym) hi = mid - 1
         else if (btssym isLess sym) lo = mid + 1
-        else abort()
+        else abort("sym is neither `sym == btssym`, `sym isLess btssym` nor `btssym isLess sym`")
       }
       -1
     }
 
-    /** If this is a poly- or methodtype, a copy with cloned type / value parameters
+    /** If this is a ExistentialType, PolyType or MethodType, a copy with cloned type / value parameters
      *  owned by `owner`. Identity for all other types.
      */
     def cloneInfo(owner: Symbol) = this
@@ -1015,7 +921,11 @@ trait Types extends api.Types { self: SymbolTable =>
      *  after `maxTostringRecursions` recursion levels. Uses `safeToString`
      *  to produce a string on each level.
      */
-    override def toString: String = typeToString(this)
+    override final def toString: String = {
+      // see comments to internal#Symbol.typeSignature for an explanation why this initializes
+      if (!isCompilerUniverse) fullyInitializeType(this)
+      typeToString(this)
+    }
 
     /** Method to be implemented in subclasses.
      *  Converts this type to a string in calling toString for its parts.
@@ -1029,7 +939,9 @@ trait Types extends api.Types { self: SymbolTable =>
       else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
         widen match {
           case RefinedType(_, _)                      => "" + widen
-          case _                                      => s"$str (with underlying type $widen)"
+          case _                                      =>
+            if (widen.toString.trim == "") str
+            else s"$str (with underlying type $widen)"
         }
       else str
     }
@@ -1058,7 +970,7 @@ trait Types extends api.Types { self: SymbolTable =>
       var sym: Symbol = NoSymbol
       var e: ScopeEntry = decls.lookupEntry(name)
       while (e ne null) {
-        if (!e.sym.hasFlag(excludedFlags)) {
+        if (!e.sym.hasFlag(excludedFlags.toLong)) {
           if (sym == NoSymbol) sym = e.sym
           else {
             if (alts.isEmpty) alts = sym :: Nil
@@ -1071,187 +983,38 @@ trait Types extends api.Types { self: SymbolTable =>
       else (baseClasses.head.newOverloaded(this, alts))
     }
 
+    /** Find all members meeting the flag requirements.
+     *
+     * If you require a DEFERRED member, you will get it if it exists -- even if there's an overriding concrete member.
+     * If you exclude DEFERRED members, or don't specify any requirements,
+     *    you won't get deferred members (whether they have an overriding concrete member or not)
+     *
+     * Thus, findMember requiring DEFERRED flags yields deferred members,
+     * while `findMember(excludedFlags = 0, requiredFlags = 0).filter(_.isDeferred)` may not (if there's a corresponding concrete member)
+     *
+     * Requirements take precedence over exclusions, so requiring and excluding DEFERRED will yield a DEFERRED member (if there is one).
+     *
+     */
     def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = {
-      // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
-      // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
-      // without this, the matchesType call would lead to type variables on both sides
-      // of a subtyping/equality judgement, which can lead to recursive types being constructed.
-      // See (t0851) for a situation where this happens.
-      val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
-      if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
-      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
-
-      //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
-      var members: Scope = null
-      var required = requiredFlags
-      var excluded = excludedFlags | DEFERRED
-      var continue = true
-      var self: Type = null
-      while (continue) {
-        continue = false
-        val bcs0 = baseClasses
-        var bcs = bcs0
-        while (!bcs.isEmpty) {
-          val decls = bcs.head.info.decls
-          var entry = decls.elems
-          while (entry ne null) {
-            val sym = entry.sym
-            val flags = sym.flags
-            if ((flags & required) == required) {
-              val excl = flags & excluded
-              if (excl == 0L &&
-                  (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
-                   (bcs eq bcs0) ||
-                   (flags & PrivateLocal) != PrivateLocal ||
-                   (bcs0.head.hasTransOwner(bcs.head)))) {
-                if (members eq null) members = newFindMemberScope
-                var others: ScopeEntry = members.lookupEntry(sym.name)
-                var symtpe: Type = null
-                while ((others ne null) && {
-                         val other = others.sym
-                         (other ne sym) &&
-                         ((other.owner eq sym.owner) ||
-                          (flags & PRIVATE) != 0 || {
-                             if (self eq null) self = narrowForFindMember(this)
-                             if (symtpe eq null) symtpe = self.memberType(sym)
-                             !(self.memberType(other) matches symtpe)
-                          })}) {
-                  others = members lookupNextEntry others
-                }
-                if (others eq null) members enter sym
-              } else if (excl == DEFERRED) {
-                continue = true
-              }
-            }
-            entry = entry.next
-          } // while (entry ne null)
-          // excluded = excluded | LOCAL
-          bcs = bcs.tail
-        } // while (!bcs.isEmpty)
-        required |= DEFERRED
-        excluded &= ~(DEFERRED.toLong)
-      } // while (continue)
-      if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
-      if (suspension ne null) suspension foreach (_.suspended = false)
-      if (members eq null) EmptyScope else members
+      def findMembersInternal = new FindMembers(this, excludedFlags, requiredFlags).apply()
+      if (this.isGround) findMembersInternal
+      else suspendingTypeVars(typeVarsInType(this))(findMembersInternal)
     }
 
     /**
      *  Find member(s) in this type. If several members matching criteria are found, they are
      *  returned in an OverloadedSymbol
      *
-     *  @param name           The member's name, where nme.ANYNAME means `unspecified`
+     *  @param name           The member's name
      *  @param excludedFlags  Returned members do not have these flags
      *  @param requiredFlags  Returned members do have these flags
      *  @param stableOnly     If set, return only members that are types or stable values
      */
-    //TODO: use narrow only for modules? (correct? efficiency gain?)
     def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
-      // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
-      // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
-      // without this, the matchesType call would lead to type variables on both sides
-      // of a subtyping/equality judgement, which can lead to recursive types being constructed.
-      // See (t0851) for a situation where this happens.
-      val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
-      if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
-      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
-
-      //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
-      var member: Symbol = NoSymbol
-      var members: List[Symbol] = null
-      var lastM: ::[Symbol] = null
-      var membertpe: Type = null
-      var required = requiredFlags
-      var excluded = excludedFlags | DEFERRED
-      var continue = true
-      var self: Type = null
-
-      while (continue) {
-        continue = false
-        val bcs0 = baseClasses
-        var bcs = bcs0
-        // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
-        def admitPrivateLocal(owner: Symbol): Boolean = {
-          val selectorClass = this match {
-            case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
-            case _            => bcs0.head
-          }
-          selectorClass.hasTransOwner(owner)
-        }
-        while (!bcs.isEmpty) {
-          val decls = bcs.head.info.decls
-          var entry = decls.lookupEntry(name)
-          while (entry ne null) {
-            val sym = entry.sym
-            val flags = sym.flags
-            if ((flags & required) == required) {
-              val excl = flags & excluded
-              if (excl == 0L &&
-                    (
-                  (bcs eq bcs0) ||
-                  (flags & PrivateLocal) != PrivateLocal ||
-                  admitPrivateLocal(bcs.head))) {
-                if (name.isTypeName || stableOnly && sym.isStable) {
-                  if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
-                  if (suspension ne null) suspension foreach (_.suspended = false)
-                  return sym
-                } else if (member eq NoSymbol) {
-                  member = sym
-                } else if (members eq null) {
-                  if ((member ne sym) &&
-                    ((member.owner eq sym.owner) ||
-                      (flags & PRIVATE) != 0 || {
-                        if (self eq null) self = narrowForFindMember(this)
-                        if (membertpe eq null) membertpe = self.memberType(member)
-                        !(membertpe matches self.memberType(sym))
-                      })) {
-                    lastM = new ::(sym, null)
-                    members = member :: lastM
-                  }
-                } else {
-                  var others: List[Symbol] = members
-                  var symtpe: Type = null
-                  while ((others ne null) && {
-                    val other = others.head
-                    (other ne sym) &&
-                      ((other.owner eq sym.owner) ||
-                        (flags & PRIVATE) != 0 || {
-                          if (self eq null) self = narrowForFindMember(this)
-                          if (symtpe eq null) symtpe = self.memberType(sym)
-                          !(self.memberType(other) matches symtpe)
-                             })}) {
-                    others = others.tail
-                  }
-                  if (others eq null) {
-                    val lastM1 = new ::(sym, null)
-                    lastM.tl = lastM1
-                    lastM = lastM1
-                  }
-                }
-              } else if (excl == DEFERRED) {
-                continue = true
-              }
-            }
-            entry = decls lookupNextEntry entry
-          } // while (entry ne null)
-          // excluded = excluded | LOCAL
-          bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
-        } // while (!bcs.isEmpty)
-        required |= DEFERRED
-        excluded &= ~(DEFERRED.toLong)
-      } // while (continue)
-      if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
-      if (suspension ne null) suspension foreach (_.suspended = false)
-      if (members eq null) {
-        if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
-        member
-      } else {
-        if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
-        lastM.tl = Nil
-        baseClasses.head.newOverloaded(this, members)
-      }
+      def findMemberInternal = new FindMember(this, name, excludedFlags, requiredFlags, stableOnly).apply()
+
+      if (this.isGround) findMemberInternal
+      else suspendingTypeVars(typeVarsInType(this))(findMemberInternal)
     }
 
     /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
@@ -1280,17 +1043,6 @@ trait Types extends api.Types { self: SymbolTable =>
     def setAnnotations(annots: List[AnnotationInfo]): Type  = annotatedType(annots, this)
     def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
 
-    /** Remove any annotations from this type and from any
-     *  types embedded in this type. */
-    def stripAnnotations = StripAnnotationsMap(this)
-
-    /** Set the self symbol of an annotated type, or do nothing
-     *  otherwise.  */
-    def withSelfsym(sym: Symbol) = this
-
-    /** The selfsym of an annotated type, or NoSymbol of anything else */
-    def selfsym: Symbol = NoSymbol
-
     /** The kind of this type; used for debugging */
     def kind: String = "unknown type of class "+getClass()
   }
@@ -1314,29 +1066,16 @@ trait Types extends api.Types { self: SymbolTable =>
     override def decls: Scope = supertype.decls
     override def baseType(clazz: Symbol): Type = supertype.baseType(clazz)
     override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
-    override def baseTypeSeqDepth: Int = supertype.baseTypeSeqDepth
+    override def baseTypeSeqDepth: Depth = supertype.baseTypeSeqDepth
     override def baseClasses: List[Symbol] = supertype.baseClasses
-    override def isNotNull = supertype.isNotNull
-  }
-
-  case class NotNullType(override val underlying: Type) extends SubType with RewrappingTypeProxy {
-    def supertype = underlying
-    protected def rewrap(newtp: Type): Type = NotNullType(newtp)
-    override def isNotNull: Boolean = true
-    override def notNull = this
-    override def deconst: Type = underlying //todo: needed?
-    override def safeToString: String = underlying.toString + " with NotNull"
-    override def kind = "NotNullType"
   }
 
   /** A base class for types that represent a single value
    *  (single-types and this-types).
    */
-  abstract class SingletonType extends SubType with SimpleTypeProxy {
+  abstract class SingletonType extends SubType with SimpleTypeProxy with SingletonTypeApi {
     def supertype = underlying
     override def isTrivial = false
-    override def isStable = true
-    override def isVolatile = underlying.isVolatile
     override def widen: Type = underlying.widen
     override def baseTypeSeq: BaseTypeSeq = {
       if (Statistics.canEnable) Statistics.incCounter(singletonBaseTypeSeqCount)
@@ -1350,7 +1089,6 @@ trait Types extends api.Types { self: SymbolTable =>
       if (pre.isOmittablePrefix) pre.fullName + ".type"
       else prefixString + "type"
     }
-
 /*
     override def typeOfThis: Type = typeSymbol.typeOfThis
     override def bounds: TypeBounds = TypeBounds(this, this)
@@ -1376,7 +1114,6 @@ trait Types extends api.Types { self: SymbolTable =>
     override def baseType(clazz: Symbol): Type = this
     override def safeToString: String = "<error>"
     override def narrow: Type = this
-    // override def isNullable: Boolean = true
     override def kind = "ErrorType"
   }
 
@@ -1386,7 +1123,6 @@ trait Types extends api.Types { self: SymbolTable =>
   case object WildcardType extends Type {
     override def isWildcard = true
     override def safeToString: String = "?"
-    // override def isNullable: Boolean = true
     override def kind = "WildcardType"
   }
   /** BoundedWildcardTypes, used only during type inference, are created in
@@ -1411,38 +1147,32 @@ trait Types extends api.Types { self: SymbolTable =>
   case object NoType extends Type {
     override def isTrivial: Boolean = true
     override def safeToString: String = "<notype>"
-    // override def isNullable: Boolean = true
     override def kind = "NoType"
   }
 
   /** An object representing a non-existing prefix */
   case object NoPrefix extends Type {
     override def isTrivial: Boolean = true
-    override def isStable: Boolean = true
     override def prefixString = ""
     override def safeToString: String = "<noprefix>"
-    // override def isNullable: Boolean = true
     override def kind = "NoPrefixType"
   }
 
   /** A class for this-types of the form <sym>.this.type
    */
   abstract case class ThisType(sym: Symbol) extends SingletonType with ThisTypeApi {
-    if (!sym.isClass) {
+    if (!sym.isClass && !sym.isFreeType) {
       // SI-6640 allow StubSymbols to reveal what's missing from the classpath before we trip the assertion.
       sym.failIfStub()
       abort(s"ThisType($sym) for sym which is not a class")
     }
 
-    //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
     override def isTrivial: Boolean = sym.isPackageClass
-    override def isNotNull = true
     override def typeSymbol = sym
     override def underlying: Type = sym.typeOfThis
-    override def isVolatile = false
     override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
     override def prefixString =
-      if (settings.debug.value) sym.nameString + ".this."
+      if (settings.debug) sym.nameString + ".this."
       else if (sym.isAnonOrRefinementClass) "this."
       else if (sym.isOmittablePrefix) ""
       else if (sym.isModuleClass) sym.fullNameString + "."
@@ -1460,7 +1190,7 @@ trait Types extends api.Types { self: SymbolTable =>
     def apply(sym: Symbol): Type = (
       if (!phase.erasedTypes) unique(new UniqueThisType(sym))
       else if (sym.isImplClass) sym.typeOfThis
-      else sym.tpe
+      else sym.tpe_*
     )
   }
 
@@ -1475,8 +1205,6 @@ trait Types extends api.Types { self: SymbolTable =>
     }
     override def isGround = sym.isPackageClass || pre.isGround
 
-    // override def isNullable = underlying.isNullable
-    override def isNotNull = underlying.isNotNull
     private[reflect] var underlyingCache: Type = NoType
     private[reflect] var underlyingPeriod = NoPeriod
     override def underlying: Type = {
@@ -1490,8 +1218,6 @@ trait Types extends api.Types { self: SymbolTable =>
 
     // more precise conceptually, but causes cyclic errors:    (paramss exists (_ contains sym))
     override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
-
-    override def isVolatile : Boolean = underlying.isVolatile && !sym.isStable
 /*
     override def narrow: Type = {
       if (phase.erasedTypes) this
@@ -1531,7 +1257,7 @@ trait Types extends api.Types { self: SymbolTable =>
       tpe.underlyingPeriod = currentPeriod
       if (!isValid(period)) {
         // [Eugene to Paul] needs review
-        tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType;
+        tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType
         assert(tpe.underlyingCache ne tpe, tpe)
       }
     }
@@ -1543,7 +1269,6 @@ trait Types extends api.Types { self: SymbolTable =>
       if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
       toBoolean(trivial)
     }
-    override def isNotNull = true;
     override def typeSymbol = thistpe.typeSymbol
     override def underlying = supertpe
     override def prefix: Type = supertpe.prefix
@@ -1571,31 +1296,52 @@ trait Types extends api.Types { self: SymbolTable =>
       case TypeBounds(_, _) => that <:< this
       case _                => lo <:< that && that <:< hi
     }
-    private def lowerString = if (emptyLowerBound) "" else " >: " + lo
-    private def upperString = if (emptyUpperBound) "" else " <: " + hi
-    private def emptyLowerBound = typeIsNothing(lo)
-    private def emptyUpperBound = typeIsAny(hi)
+    private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard
+    private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard
     def isEmptyBounds = emptyLowerBound && emptyUpperBound
 
-    // override def isNullable: Boolean = NullClass.tpe <:< lo;
-    override def safeToString = lowerString + upperString
+    override def safeToString = scalaNotation(_.toString)
+
+    /** Bounds notation used in Scala syntax.
+      * For example +This <: scala.collection.generic.Sorted[K,This].
+      */
+    private[internal] def scalaNotation(typeString: Type => String): String = {
+      (if (emptyLowerBound) "" else " >: " + typeString(lo)) +
+      (if (emptyUpperBound) "" else " <: " + typeString(hi))
+    }
+    /** Bounds notation used in http://adriaanm.github.com/files/higher.pdf.
+      * For example *(scala.collection.generic.Sorted[K,This]).
+      */
+    private[internal] def starNotation(typeString: Type => String): String = {
+      if (emptyLowerBound && emptyUpperBound) ""
+      else if (emptyLowerBound) "(" + typeString(hi) + ")"
+      else "(%s, %s)" format (typeString(lo), typeString(hi))
+    }
     override def kind = "TypeBoundsType"
   }
 
   final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
 
   object TypeBounds extends TypeBoundsExtractor {
-    def empty: TypeBounds           = apply(NothingClass.tpe, AnyClass.tpe)
-    def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
-    def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
+    def empty: TypeBounds           = apply(NothingTpe, AnyTpe)
+    def upper(hi: Type): TypeBounds = apply(NothingTpe, hi)
+    def lower(lo: Type): TypeBounds = apply(lo, AnyTpe)
     def apply(lo: Type, hi: Type): TypeBounds = {
       unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
     }
   }
 
+  object CompoundType {
+    def unapply(tp: Type): Option[(List[Type], Scope, Symbol)] = tp match {
+      case ClassInfoType(parents, decls, clazz) => Some((parents, decls, clazz))
+      case RefinedType(parents, decls)          => Some((parents, decls, tp.typeSymbol))
+      case _                                    => None
+    }
+  }
+
   /** A common base class for intersection types and class types
    */
-  abstract class CompoundType extends Type {
+  abstract class CompoundType extends Type with CompoundTypeApi {
 
     private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
     private[reflect] var baseTypeSeqPeriod = NoPeriod
@@ -1615,7 +1361,7 @@ trait Types extends api.Types { self: SymbolTable =>
       }
     }
 
-    override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
+    override def baseTypeSeqDepth: Depth = baseTypeSeq.maxDepth
 
     override def baseClasses: List[Symbol] = {
       val cached = baseClassesCache
@@ -1657,18 +1403,46 @@ trait Types extends api.Types { self: SymbolTable =>
     }
 
     override def narrow: Type = typeSymbol.thisType
-    override def isNotNull: Boolean = parents exists typeIsNotNull
 
     override def isStructuralRefinement: Boolean =
       typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
 
-    // override def isNullable: Boolean =
-    // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
+    protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty
+    protected def initDecls        = fullyInitializeScope(decls)
+    protected def scopeString      = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else ""
+    override def safeToString      = parentsString(parents) + scopeString
+  }
 
-    override def safeToString: String = parentsString(parents) + (
-      (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
-        fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
+  protected def computeBaseClasses(tpe: Type): List[Symbol] = {
+    val parents = tpe.parents // adriaan says tpe.parents does work sometimes, so call it only once
+    val baseTail = (
+      if (parents.isEmpty || parents.head.isInstanceOf[PackageTypeRef]) Nil
+      else {
+        //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
+        // optimized, since this seems to be performance critical
+        val superclazz = parents.head // parents.isEmpty was already excluded
+        var mixins     = parents.tail
+        val sbcs       = superclazz.baseClasses
+        var bcs        = sbcs
+        def isNew(clazz: Symbol): Boolean = (
+          superclazz.baseTypeIndex(clazz) < 0 &&
+          { var p = bcs
+            while ((p ne sbcs) && (p.head != clazz)) p = p.tail
+            p eq sbcs
+          }
+        )
+        while (!mixins.isEmpty) {
+          def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
+            if (mbcs.isEmpty) bcs
+            else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
+            else addMixinBaseClasses(mbcs.tail)
+          bcs = addMixinBaseClasses(mixins.head.baseClasses)
+          mixins = mixins.tail
+        }
+        bcs
+      }
     )
+    tpe.typeSymbol :: baseTail
   }
 
   protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
@@ -1690,7 +1464,7 @@ trait Types extends api.Types { self: SymbolTable =>
           val paramToVarMap = varToParamMap map (_.swap)
           val varToParam = new TypeMap {
             def apply(tp: Type) = varToParamMap get tp match {
-              case Some(sym) => sym.tpe
+              case Some(sym) => sym.tpe_*
               case _ => mapOver(tp)
             }
           }
@@ -1709,7 +1483,7 @@ trait Types extends api.Types { self: SymbolTable =>
             tpe.baseTypeSeqCache = undetBaseTypeSeq
             tpe.baseTypeSeqCache =
               if (tpe.typeSymbol.isRefinementClass)
-                tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+                tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe_*)
               else
                 compoundBaseTypeSeq(tpe)
           } finally {
@@ -1731,41 +1505,61 @@ trait Types extends api.Types { self: SymbolTable =>
       throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
   }
 
-  protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
-    def computeBaseClasses: List[Symbol] =
-      if (tpe.parents.isEmpty) List(tpe.typeSymbol)
-      else {
-        //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
-        // optimized, since this seems to be performance critical
-        val superclazz = tpe.firstParent
-        var mixins = tpe.parents.tail
-        val sbcs = superclazz.baseClasses
-        var bcs = sbcs
-        def isNew(clazz: Symbol): Boolean =
-          superclazz.baseTypeIndex(clazz) < 0 &&
-          { var p = bcs;
-            while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
-            p eq sbcs
-          }
-        while (!mixins.isEmpty) {
-          def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
-            if (mbcs.isEmpty) bcs
-            else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
-            else addMixinBaseClasses(mbcs.tail)
-          bcs = addMixinBaseClasses(mixins.head.baseClasses)
-          mixins = mixins.tail
+  object baseClassesCycleMonitor {
+    private var open: List[Symbol] = Nil
+    @inline private def cycleLog(msg: => String) {
+      if (settings.debug)
+        Console.err.println(msg)
+    }
+    def size = open.size
+    def push(clazz: Symbol) {
+      cycleLog("+ " + ("  " * size) + clazz.fullNameString)
+      open ::= clazz
+    }
+    def pop(clazz: Symbol) {
+      assert(open.head eq clazz, (clazz, open))
+      open = open.tail
+    }
+    def isOpen(clazz: Symbol) = open contains clazz
+  }
+
+  protected def defineBaseClassesOfCompoundType(tpe: CompoundType) {
+    def define() = defineBaseClassesOfCompoundType(tpe, force = false)
+    if (!breakCycles || isPastTyper) define()
+    else tpe match {
+      // non-empty parents helpfully excludes all package classes
+      case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass =>
+        // Cycle: force update
+        if (baseClassesCycleMonitor isOpen clazz)
+          defineBaseClassesOfCompoundType(tpe, force = true)
+        else {
+          baseClassesCycleMonitor push clazz
+          try define()
+          finally baseClassesCycleMonitor pop clazz
         }
-        tpe.typeSymbol :: bcs
-      }
+      case _ =>
+        define()
+    }
+  }
+  private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) {
     val period = tpe.baseClassesPeriod
-    if (period != currentPeriod) {
+    if (period == currentPeriod) {
+      if (force && breakCycles) {
+        def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString
+        val bcs  = computeBaseClasses(tpe)
+        tpe.baseClassesCache = bcs
+        warning(s"Breaking cycle in base class computation of $what ($bcs)")
+      }
+    }
+    else {
       tpe.baseClassesPeriod = currentPeriod
       if (!isValidForBaseClasses(period)) {
         val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
         try {
           tpe.baseClassesCache = null
-          tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
-        } finally {
+          tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail)
+        }
+        finally {
           if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
         }
       }
@@ -1813,47 +1607,27 @@ trait Types extends api.Types { self: SymbolTable =>
       } else if (flattened != parents) {
         refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
       } else if (isHigherKinded) {
-        // MO to AM: This is probably not correct
-        // If they are several higher-kinded parents with different bounds we need
-        // to take the intersection of their bounds
-        typeFun(
-          typeParams,
-          RefinedType(
-            parents map {
-              case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
-              case p => p
-            },
-            decls,
-            typeSymbol))
+        etaExpand
       } else super.normalize
     }
 
-    /** A refined type P1 with ... with Pn { decls } is volatile if
-     *  one of the parent types Pi is an abstract type, and
-     *  either i > 1, or decls or a following parent Pj, j > 1, contributes
-     *  an abstract member.
-     *  A type contributes an abstract member if it has an abstract member which
-     *  is also a member of the whole refined type. A scope `decls` contributes
-     *  an abstract member if it has an abstract definition which is also
-     *  a member of the whole type.
-     */
-    override def isVolatile = {
-      def isVisible(m: Symbol) =
-        this.nonPrivateMember(m.name).alternatives contains m
-      def contributesAbstractMembers(p: Type) =
-        p.deferredMembers exists isVisible
-
-      ((parents exists (_.isVolatile))
-       ||
-       (parents dropWhile (! _.typeSymbol.isAbstractType) match {
-         case ps @ (_ :: ps1) =>
-           (ps ne parents) ||
-           (ps1 exists contributesAbstractMembers) ||
-           (decls.iterator exists (m => m.isDeferred && isVisible(m)))
-         case _ =>
-           false
-       }))
+    final override def etaExpand: Type = {
+      // MO to AM: This is probably not correct
+      // If they are several higher-kinded parents with different bounds we need
+      // to take the intersection of their bounds
+      // !!! inconsistent with TypeRef.etaExpand that uses initializedTypeParams
+      if (!isHigherKinded) this
+      else typeFun(
+        typeParams,
+        RefinedType(
+          parents map {
+            case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
+            case p => p
+          },
+          decls,
+          typeSymbol))
     }
+
     override def kind = "RefinedType"
   }
 
@@ -1960,8 +1734,8 @@ trait Types extends api.Types { self: SymbolTable =>
         tp match {
           case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
             val tparams = tr.initializedTypeParams
-            if (settings.debug.value && !sameLength(tparams, args))
-              debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+            if (settings.debug && !sameLength(tparams, args))
+              devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args")
 
             foreach2(tparams, args) { (tparam1, arg) =>
               if (arg contains tparam) {
@@ -2004,7 +1778,7 @@ trait Types extends api.Types { self: SymbolTable =>
       var change = false
       for ((from, targets) <- refs(NonExpansive).iterator)
         for (target <- targets) {
-          var thatInfo = classInfo(target)
+          val thatInfo = classInfo(target)
           if (thatInfo.state != Initialized)
             change = change | thatInfo.propagate()
           addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2012,7 +1786,7 @@ trait Types extends api.Types { self: SymbolTable =>
         }
       for ((from, targets) <- refs(Expansive).iterator)
         for (target <- targets) {
-          var thatInfo = classInfo(target)
+          val thatInfo = classInfo(target)
           if (thatInfo.state != Initialized)
             change = change | thatInfo.propagate()
           addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2023,27 +1797,13 @@ trait Types extends api.Types { self: SymbolTable =>
       change
     }
 
-    // override def isNullable: Boolean =
-    // symbol == AnyClass ||
-    // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass);
-
-    // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
     override def kind = "ClassInfoType"
-
-    override def safeToString =
-      if (settings.debug.value || decls.size > 1)
-        formattedToString
-      else
-        super.safeToString
-
     /** A nicely formatted string with newlines and such.
      */
-    def formattedToString: String =
-      parents.mkString("\n        with ") + (
-        if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
-         fullyInitializeScope(decls).mkString(" {\n  ", "\n  ", "\n}")
-        else ""
-      )
+    def formattedToString = parents.mkString("\n        with ") + scopeString
+    override protected def shouldForceScope = settings.debug || decls.size > 1
+    override protected def scopeString      = initDecls.mkString(" {\n  ", "\n  ", "\n}")
+    override def safeToString               = if (shouldForceScope) formattedToString else super.safeToString
   }
 
   object ClassInfoType extends ClassInfoTypeExtractor
@@ -2052,19 +1812,14 @@ trait Types extends api.Types { self: SymbolTable =>
   extends ClassInfoType(List(), decls, clazz)
 
   /** A class representing a constant type.
-   *
-   *  @param value ...
    */
   abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi {
     override def underlying: Type = value.tpe
     assert(underlying.typeSymbol != UnitClass)
     override def isTrivial: Boolean = true
-    override def isNotNull = value.value != null
-    override def deconst: Type = underlying
+    override def deconst: Type = underlying.deconst
     override def safeToString: String =
       underlying.toString + "(" + value.escapedStringValue + ")"
-    // override def isNullable: Boolean = value.value eq null
-    // override def isNonNull: Boolean = value.value ne null
     override def kind = "ConstantType"
   }
 
@@ -2082,7 +1837,7 @@ trait Types extends api.Types { self: SymbolTable =>
   private val pendingVolatiles = new mutable.HashSet[Symbol]
 
   class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
-    require(args0.nonEmpty, this)
+    require(args0 ne Nil, this)
 
     /** No unapplied type params size it has (should have) equally as many args. */
     override def isHigherKinded = false
@@ -2093,10 +1848,39 @@ trait Types extends api.Types { self: SymbolTable =>
       // too little information is known to determine its kind, and
       // it later turns out not to have kind *. See SI-4070.  Only
       // logging it for now.
-      if (sym.typeParams.size != args.size)
-        log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args))
-
-      asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
+      val tparams = sym.typeParams
+      if (tparams.size != args.size)
+        devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
+      def asSeenFromInstantiated(tp: Type) =
+        asSeenFromOwner(tp).instantiateTypeParams(tparams, args)
+      // If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
+      // type, we can end up with new symbols for the type parameters (clones from TypeMap).
+      // The subsequent substitution of type arguments would fail. This problem showed up during
+      // the fix for SI-8046, however the solution taken there wasn't quite right, and led to
+      // SI-8170.
+      //
+      // Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
+      // on the result type.
+      //
+      // TODO: Revisit this and explore the questions raised:
+      //
+      //  AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
+      //  JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
+      //      In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
+      //        `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
+      //      But I thought it was better to retain the kind.
+      //  AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
+      //      In general, it seems iffy the tparams can never occur in the result
+      //      then we might as well represent the type as a no-arg typeref.
+      //  AM: I've also been trying to track down uses of transform (pretty generic name for something that
+      //      does not seem that widely applicable).
+      //      It's kind of a helper for computing baseType (since it tries to propagate our type args to some
+      //      other type, which has to be related to this type for that to make sense).
+      //
+      tp match {
+        case PolyType(`tparams`, result) => PolyType(tparams, asSeenFromInstantiated(result))
+        case _                           => asSeenFromInstantiated(tp)
+      }
     }
 
     // note: does not go through typeRef. There's no need to because
@@ -2108,14 +1892,12 @@ trait Types extends api.Types { self: SymbolTable =>
   class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
     require(sym.isModuleClass, sym)
     private[this] var narrowedCache: Type = _
-    override def isStable = true
     override def narrow = {
       if (narrowedCache eq null)
         narrowedCache = singleType(pre, sym.sourceModule)
 
       narrowedCache
     }
-    final override def isNotNull = true
     override protected def finishPrefix(rest: String) = objectPrefix + rest
     override def directObjectString = super.safeToString
     override def toLongString = toString
@@ -2140,7 +1922,7 @@ trait Types extends api.Types { self: SymbolTable =>
     // to a java or scala symbol, but it does matter whether it occurs in java or scala code.
     // TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
     // represented as existential types.
-    override def isHigherKinded = typeParams.nonEmpty
+    override def isHigherKinded = (typeParams ne Nil)
     override def typeParams     = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
     private def isRaw           = !phase.erasedTypes && isRawIfWithoutArgs(sym)
 
@@ -2230,8 +2012,6 @@ trait Types extends api.Types { self: SymbolTable =>
     require(sym.isAliasType, sym)
 
     override def dealias    = if (typeParamsMatchArgs) betaReduce.dealias else super.dealias
-    override def isStable   = normalize.isStable
-    override def isVolatile = normalize.isVolatile
     override def narrow     = normalize.narrow
     override def thisInfo   = normalize
     override def prefix     = if (this ne normalize) normalize.prefix else pre
@@ -2252,7 +2032,7 @@ trait Types extends api.Types { self: SymbolTable =>
         else ErrorType
       }
 
-    // isHKSubType0 introduces synthetic type params so that
+    // isHKSubType introduces synthetic type params so that
     // betaReduce can first apply sym.info to typeArgs before calling
     // asSeenFrom.  asSeenFrom then skips synthetic type params, which
     // are used to reduce HO subtyping to first-order subtyping, but
@@ -2262,22 +2042,51 @@ trait Types extends api.Types { self: SymbolTable =>
     // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
     override def betaReduce = transform(sym.info.resultType)
 
-    // #3731: return sym1 for which holds: pre bound sym.name to sym and
-    // pre1 now binds sym.name to sym1, conceptually exactly the same
-    // symbol as sym.  The selection of sym on pre must be updated to the
-    // selection of sym1 on pre1, since sym's info was probably updated
-    // by the TypeMap to yield a new symbol, sym1 with transformed info.
-    // @returns sym1
-    override def coevolveSym(pre1: Type): Symbol =
-      if (pre eq pre1) sym else (pre, pre1) match {
-        // don't look at parents -- it would be an error to override alias types anyway
-        case (RefinedType(_, _), RefinedType(_, decls1)) => decls1 lookup sym.name
-        // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
-        case _                                           => sym
-      }
+    /** SI-3731, SI-8177: when prefix is changed to `newPre`, maintain consistency of prefix and sym
+     *  (where the symbol refers to a declaration "embedded" in the prefix).
+     *
+     *  @returns newSym so that `newPre` binds `sym.name` to `newSym`,
+     *                  to remain consistent with `pre` previously binding `sym.name` to `sym`.
+     *
+     *  `newSym` and `sym` are conceptually the same symbols, but some change to our `prefix`
+     *  got them out of whack. (Usually triggered by substitution or `asSeenFrom`.)
+     *  The only kind of "binds" we consider is where `prefix` (or its underlying type)
+     *  is a refined type that declares `sym` (since the old prefix was discarded,
+     *  the old symbol is now stale and we should update it, like in `def rebind`,
+     *  except this is not for overriding symbols -- a vertical move -- but a "lateral" change.)
+     *
+     *  The reason for this hack is that substitution and asSeenFrom clone RefinedTypes and
+     *  their members, without updating the potential references to those members -- here, we aim to patch
+     *  this up, so that: when changing a TypeRef(pre, sym, args) to a TypeRef(pre', sym', args'), and pre
+     *  embeds a symbol sym (pre is a RefinedType(_, Scope(..., sym,...)) or a SingleType with such an
+     *  underlying type), make sure that we update sym' to compensate for the change of pre -> pre' (which may
+     *  have created a new symbol for the one the original sym referred to)
+     */
+    override def coevolveSym(newPre: Type): Symbol =
+      if ((pre ne newPre) && embeddedSymbol(pre, sym.name) == sym) {
+        val newSym = embeddedSymbol(newPre, sym.name)
+        debuglog(s"co-evolve: ${pre} -> ${newPre}, $sym : ${sym.info} -> $newSym : ${newSym.info}")
+        // To deal with erroneous `preNew`, fallback via `orElse sym`, in case `preNew` does not have a decl named `sym.name`.
+        newSym orElse sym
+      } else sym
+
     override def kind = "AliasTypeRef"
   }
 
+  // Return the symbol named `name` that's "embedded" in tp
+  // This is the case if `tp` is a `T{...; type/val $name ; ...}`,
+  // or a singleton type with such an underlying type.
+  private def embeddedSymbol(tp: Type, name: Name): Symbol =
+    // normalize to flatten nested RefinedTypes
+    // don't check whether tp is a RefinedType -- it may be a ThisType of one, for example
+    // TODO: check the resulting symbol is owned by the refinement class? likely an invariant...
+    if (tp.typeSymbol.isRefinementClass) tp.normalize.decls lookup name
+    else {
+      debuglog(s"no embedded symbol $name found in ${showRaw(tp)} --> ${tp.normalize.decls lookup name}")
+      NoSymbol
+    }
+
+
   trait AbstractTypeRef extends NonClassTypeRef {
     require(sym.isAbstractType, sym)
 
@@ -2286,30 +2095,6 @@ trait Types extends api.Types { self: SymbolTable =>
     private var symInfoCache: Type = _
     private var thisInfoCache: Type = _
 
-    override def isVolatile = {
-      // need to be careful not to fall into an infinite recursion here
-      // because volatile checking is done before all cycles are detected.
-      // the case to avoid is an abstract type directly or
-      // indirectly upper-bounded by itself. See #2918
-      try {
-        volatileRecursions += 1
-        if (volatileRecursions < LogVolatileThreshold)
-          bounds.hi.isVolatile
-        else if (pendingVolatiles(sym))
-          true // we can return true here, because a cycle will be detected
-               // here afterwards and an error will result anyway.
-        else
-          try {
-            pendingVolatiles += sym
-            bounds.hi.isVolatile
-          } finally {
-            pendingVolatiles -= sym
-          }
-      } finally {
-        volatileRecursions -= 1
-      }
-    }
-
     override def thisInfo   = {
       val symInfo = sym.info
       if (thisInfoCache == null || (symInfo ne symInfoCache)) {
@@ -2324,9 +2109,7 @@ trait Types extends api.Types { self: SymbolTable =>
       }
       thisInfoCache
     }
-    override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass
     override def bounds   = thisInfo.bounds
-    // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
     override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
     override def kind = "AbstractTypeRef"
   }
@@ -2345,6 +2128,10 @@ trait Types extends api.Types { self: SymbolTable =>
         trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args))
       toBoolean(trivial)
     }
+    private[scala] def invalidateCaches(): Unit = {
+      parentsPeriod = NoPeriod
+      baseTypeSeqPeriod = NoPeriod
+    }
     private[reflect] var parentsCache: List[Type]      = _
     private[reflect] var parentsPeriod                 = NoPeriod
     private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
@@ -2354,12 +2141,12 @@ trait Types extends api.Types { self: SymbolTable =>
     //OPT specialize hashCode
     override final def computeHashCode = {
       import scala.util.hashing.MurmurHash3._
-      val hasArgs = args.nonEmpty
+      val hasArgs = args ne Nil
       var h = productSeed
       h = mix(h, pre.hashCode)
       h = mix(h, sym.hashCode)
       if (hasArgs)
-        finalizeHash(mix(h, args.hashCode), 3)
+        finalizeHash(mix(h, args.hashCode()), 3)
       else
         finalizeHash(h, 2)
     }
@@ -2390,7 +2177,7 @@ trait Types extends api.Types { self: SymbolTable =>
       || pre.isGround && args.forall(_.isGround)
     )
 
-    override def etaExpand: Type = {
+    final override def etaExpand: Type = {
       // must initialise symbol, see test/files/pos/ticket0137.scala
       val tpars = initializedTypeParams
       if (tpars.isEmpty) this
@@ -2411,7 +2198,6 @@ trait Types extends api.Types { self: SymbolTable =>
 
     override def baseClasses      = thisInfo.baseClasses
     override def baseTypeSeqDepth = baseTypeSeq.maxDepth
-    override def isStable         = (sym eq NothingClass) || (sym eq SingletonClass)
     override def prefix           = pre
     override def termSymbol       = super.termSymbol
     override def termSymbolDirect = super.termSymbol
@@ -2420,9 +2206,6 @@ trait Types extends api.Types { self: SymbolTable =>
     override def typeSymbol       = sym
     override def typeSymbolDirect = sym
 
-    override def isNotNull =
-      sym.isModuleClass || sym == NothingClass || (sym isNonBottomSubClass NotNullClass) || super.isNotNull
-
     override def parents: List[Type] = {
       val cache = parentsCache
       if (parentsPeriod == currentPeriod && cache != null) cache
@@ -2440,8 +2223,14 @@ trait Types extends api.Types { self: SymbolTable =>
       }
       thisInfo.decls
     }
-
-    protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
+    protected[Types] def baseTypeSeqImpl: BaseTypeSeq =
+      if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType))
+        // SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise.
+        transform(sym.info).baseTypeSeq
+      else
+        // Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map
+        //               of the BTS of the referenced symbol.
+        sym.info.baseTypeSeq map transform
 
     override def baseTypeSeq: BaseTypeSeq = {
       val cache = baseTypeSeqCache
@@ -2455,66 +2244,69 @@ trait Types extends api.Types { self: SymbolTable =>
         baseTypeSeqCache
       }
     }
-
     // ensure that symbol is not a local copy with a name coincidence
     private def needsPreString = (
-         settings.debug.value
+         settings.debug
       || !shorthands(sym.fullName)
       || (sym.ownersIterator exists (s => !s.isClass))
     )
     private def preString  = if (needsPreString) pre.prefixString else ""
     private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
 
-    def refinementString = (
-      if (sym.isStructuralRefinement) (
-        fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
-          map (_.defString)
-          mkString("{", "; ", "}")
-      )
+    private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
+    private def refinementString = (
+      if (sym.isStructuralRefinement)
+        refinementDecls map (_.defString) mkString("{", "; ", "}")
       else ""
     )
-
     protected def finishPrefix(rest: String) = (
       if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes)
         parentsString(thisInfo.parents) + refinementString
       else rest
     )
+    private def noArgsString = finishPrefix(preString + sym.nameString)
+    private def tupleTypeString: String = args match {
+      case Nil        => noArgsString
+      case arg :: Nil => s"($arg,)"
+      case _          => args.mkString("(", ", ", ")")
+    }
     private def customToString = sym match {
-      case RepeatedParamClass => args.head + "*"
+      case RepeatedParamClass | JavaRepeatedParamClass => args.head + "*"
       case ByNameParamClass   => "=> " + args.head
       case _                  =>
-        def targs = normalize.typeArgs
-
-        if (isFunctionType(this)) {
+        if (isFunctionTypeDirect(this)) {
           // Aesthetics: printing Function1 as T => R rather than (T) => R
           // ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
           // from (T1, T2) => R.
-          targs match {
-            case in :: out :: Nil if !isTupleType(in) =>
+          unspecializedTypeArgs(this) match {
+            // See neg/t588 for an example which arrives here - printing
+            // the type of a Function1 after erasure.
+            case Nil => noArgsString
+            case in :: out :: Nil if !isTupleTypeDirect(in) =>
               // A => B => C should be (A => B) => C or A => (B => C).
               // Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
               // would mean => (A => B) which is a different type
-              val in_s  = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
-              val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
+              val in_s  = if (isFunctionTypeDirect(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
+              val out_s = if (isFunctionTypeDirect(out)) "(" + out + ")" else "" + out
               in_s + " => " + out_s
             case xs =>
               xs.init.mkString("(", ", ", ")") + " => " + xs.last
           }
         }
-        else if (isTupleType(this))
-          targs.mkString("(", ", ", if (hasLength(targs, 1)) ",)" else ")")
-        else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne this.normalize))
-          "" + normalize
+        else if (isTupleTypeDirect(this))
+          tupleTypeString
+        else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias))
+          "" + dealias
         else
           ""
     }
     override def safeToString = {
-      val custom = if (settings.debug.value) "" else customToString
+      val custom = if (settings.debug) "" else customToString
       if (custom != "") custom
       else finishPrefix(preString + sym.nameString + argsString)
     }
     override def prefixString = "" + (
-      if (settings.debug.value)
+      if (settings.debug)
         super.prefixString
       else if (sym.isOmittablePrefix)
         ""
@@ -2525,23 +2317,33 @@ trait Types extends api.Types { self: SymbolTable =>
       else
         super.prefixString
     )
+    // Suppressing case class copy method which risks subverting our single point of creation.
+    private def copy = null
     override def kind = "TypeRef"
   }
 
+  // No longer defined as anonymous classes in `object TypeRef` to avoid an unnecessary outer pointer.
+  private final class AliasArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AliasTypeRef
+  private final class AbstractArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AbstractTypeRef
+  private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with ClassTypeRef
+  private final class AliasNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AliasTypeRef
+  private final class AbstractNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AbstractTypeRef
+  private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with ClassTypeRef
+
   object TypeRef extends TypeRefExtractor {
     def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
-      if (args.nonEmpty) {
-        if (sym.isAliasType)              new ArgsTypeRef(pre, sym, args) with AliasTypeRef
-        else if (sym.isAbstractType)      new ArgsTypeRef(pre, sym, args) with AbstractTypeRef
-        else                              new ArgsTypeRef(pre, sym, args) with ClassTypeRef
+      if (args ne Nil) {
+        if (sym.isAliasType)              new AliasArgsTypeRef(pre, sym, args)
+        else if (sym.isAbstractType)      new AbstractArgsTypeRef(pre, sym, args)
+        else                              new ClassArgsTypeRef(pre, sym, args)
       }
       else {
-        if (sym.isAliasType)              new NoArgsTypeRef(pre, sym) with AliasTypeRef
-        else if (sym.isAbstractType)      new NoArgsTypeRef(pre, sym) with AbstractTypeRef
+        if (sym.isAliasType)              new AliasNoArgsTypeRef(pre, sym)
+        else if (sym.isAbstractType)      new AbstractNoArgsTypeRef(pre, sym)
         else if (sym.isRefinementClass)   new RefinementTypeRef(pre, sym)
         else if (sym.isPackageClass)      new PackageTypeRef(pre, sym)
         else if (sym.isModuleClass)       new ModuleTypeRef(pre, sym)
-        else                              new NoArgsTypeRef(pre, sym) with ClassTypeRef
+        else                              new ClassNoArgsTypeRef(pre, sym)
       }
     })
   }
@@ -2553,7 +2355,7 @@ trait Types extends api.Types { self: SymbolTable =>
       if (!isValidForBaseClasses(period)) {
         tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
       } else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
-        tpe.parentsCache = List(AnyClass.tpe)
+        tpe.parentsCache = List(AnyTpe)
       }
     }
   }
@@ -2603,7 +2405,7 @@ trait Types extends api.Types { self: SymbolTable =>
         true
     }
 
-    def isImplicit = params.nonEmpty && params.head.isImplicit
+    def isImplicit = (params ne Nil) && params.head.isImplicit
     def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
 
     //assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
@@ -2611,7 +2413,7 @@ trait Types extends api.Types { self: SymbolTable =>
 
     override def paramss: List[List[Symbol]] = params :: resultType.paramss
 
-    override def paramTypes = params map (_.tpe)
+    override def paramTypes = mapList(params)(symTpe) // OPT use mapList rather than .map
 
     override def boundSyms = resultType.boundSyms ++ params
 
@@ -2634,8 +2436,6 @@ trait Types extends api.Types { self: SymbolTable =>
     //TODO this may be generalised so that the only constraint is dependencies are acyclic
     def approximate: MethodType = MethodType(params, resultApprox)
 
-    override def finalResultType: Type = resultType.finalResultType
-
     override def safeToString = paramString(this) + resultType
 
     override def cloneInfo(owner: Symbol) = {
@@ -2662,17 +2462,15 @@ trait Types extends api.Types { self: SymbolTable =>
     override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
     override def prefix: Type = resultType.prefix
     override def narrow: Type = resultType.narrow
-    override def finalResultType: Type = resultType.finalResultType
     override def termSymbol: Symbol = resultType.termSymbol
     override def typeSymbol: Symbol = resultType.typeSymbol
     override def parents: List[Type] = resultType.parents
     override def decls: Scope = resultType.decls
     override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
-    override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+    override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
     override def baseClasses: List[Symbol] = resultType.baseClasses
     override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
     override def boundSyms = resultType.boundSyms
-    override def isVolatile = resultType.isVolatile
     override def safeToString: String = "=> "+ resultType
     override def kind = "NullaryMethodType"
   }
@@ -2707,12 +2505,10 @@ trait Types extends api.Types { self: SymbolTable =>
     override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*)
     override def prefix: Type = resultType.prefix
     override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
-    override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+    override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
     override def baseClasses: List[Symbol] = resultType.baseClasses
     override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
     override def narrow: Type = resultType.narrow
-    override def isVolatile = resultType.isVolatile
-    override def finalResultType: Type = resultType.finalResultType
 
     /** @M: typeDefSig wraps a TypeBounds in a PolyType
      *  to represent a higher-kinded type parameter
@@ -2757,7 +2553,6 @@ trait Types extends api.Types { self: SymbolTable =>
     override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp)
 
     override def isTrivial = false
-    override def isStable: Boolean = false
     override def bounds = TypeBounds(maybeRewrap(underlying.bounds.lo), maybeRewrap(underlying.bounds.hi))
     override def parents = underlying.parents map maybeRewrap
     override def boundSyms = quantified.toSet
@@ -2781,8 +2576,59 @@ trait Types extends api.Types { self: SymbolTable =>
     override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
     override def isHigherKinded = false
 
-    override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
-      deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
+    // TODO: check invariant that all quantifiers have the same (existing) owner
+    private def quantifierOwner = quantified collectFirst { case q if q.owner.exists => q.owner } getOrElse NoSymbol
+
+    // Is this existential of the form: T[Q1, ..., QN] forSome { type Q1 >: L1 <: U1, ..., QN >: LN <: UN}
+    private def isStraightApplication = (quantified corresponds underlying.typeArgs){ (q, a) => q.tpe =:= a }
+
+    /** [SI-6169, SI-8197 -- companion to SI-1786]
+     *
+     * Approximation to improve the bounds of a Java-defined existential type,
+     * based on the bounds of the type parameters of the quantified type
+     * In Scala syntax, given a java-defined class C[T <: String], the existential type C[_]
+     * is improved to C[_ <: String] before skolemization, which captures (get it?) what Java does:
+     * enter the type paramers' bounds into the context when checking subtyping/type equality of existential types
+     *
+     * Also tried doing this once during class file parsing or when creating the existential type,
+     * but that causes cyclic errors because it happens too early.
+     *
+     * NOTE: we're only modifying the skolems to avoid leaking the sharper bounds to `quantified` (SI-8283)
+     *
+     * TODO: figure out how to do this earlier without running into cycles, so this can subsume the fix for SI-1786
+     */
+    override def skolemizeExistential(owner0: Symbol, origin: AnyRef) = {
+      val owner = owner0 orElse quantifierOwner
+
+      // do this here because it's quite close to what Java does:
+      // when checking subtyping/type equality, enter constraints
+      // derived from the existentially quantified type into the typing environment
+      // (aka \Gamma, which tracks types for variables and constraints/kinds for types)
+      // as a nice bonus, delaying this until we need it avoids cyclic errors
+      def tpars = underlying.typeSymbol.initialize.typeParams
+
+      def newSkolem(quant: Symbol) = owner.newExistentialSkolem(quant, origin)
+      def newSharpenedSkolem(quant: Symbol, tparam: Symbol): Symbol = {
+        def emptyBounds(sym: Symbol) = sym.info.bounds.isEmptyBounds
+
+        // avoid creating cycles [pos/t2940] that consist of an existential quantifier's
+        // bounded by an existential type that unhygienically has that quantifier as its own quantifier
+        // (TODO: clone latter existential with fresh quantifiers -- not covering this case for now)
+        val canSharpen = (
+             emptyBounds(quant) && !emptyBounds(tparam)
+          && (existentialsInType(tparam.info) intersect quantified).isEmpty
+        )
+
+        val skolemInfo = if (!canSharpen) quant.info else tparam.info.substSym(tpars, quantified)
+
+        owner.newExistentialSkolem(quant.name.toTypeName, skolemInfo, quant.flags, quant.pos, origin)
+      }
+
+      val canSharpenBounds = (underlying.typeSymbol.isJavaDefined || sharperSkolems) && isStraightApplication
+
+      if (canSharpenBounds) deriveType2(quantified, tpars, newSharpenedSkolem)(underlying)
+      else deriveType(quantified, newSkolem)(underlying)
+    }
 
     private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
       case TypeRef(_, sym, _) if (qset contains sym) =>
@@ -2820,10 +2666,10 @@ trait Types extends api.Types { self: SymbolTable =>
     override def safeToString: String = {
       def clauses = {
         val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
-        if (settings.explaintypes.value) "(" + str + ")" else str
+        if (settings.explaintypes) "(" + str + ")" else str
       }
       underlying match {
-        case TypeRef(pre, sym, args) if !settings.debug.value && isRepresentableWithWildcards =>
+        case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards =>
           "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
         case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
           "(" + underlying + ")" + clauses
@@ -2842,13 +2688,13 @@ trait Types extends api.Types { self: SymbolTable =>
 
     def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
 
-    def withTypeVars(op: Type => Boolean, depth: Int): Boolean = {
+    def withTypeVars(op: Type => Boolean, depth: Depth): Boolean = {
       val quantifiedFresh = cloneSymbols(quantified)
       val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
       val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
       op(underlying1) && {
-        solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
-        isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
+        solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) &&
+        isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.inst))
       }
     }
   }
@@ -2865,9 +2711,17 @@ trait Types extends api.Types { self: SymbolTable =>
     override def kind = "OverloadedType"
   }
 
-  def overloadedType(pre: Type, alternatives: List[Symbol]): Type =
-    if (alternatives.tail.isEmpty) pre memberType alternatives.head
-    else OverloadedType(pre, alternatives)
+  /** The canonical creator for OverloadedTypes.
+   */
+  def overloadedType(pre: Type, alternatives: List[Symbol]): Type = alternatives match {
+    case Nil        => NoType
+    case alt :: Nil => pre memberType alt
+    case _          => OverloadedType(pre, alternatives)
+  }
+
+  case class ImportType(expr: Tree) extends Type {
+    override def safeToString = "ImportType("+expr+")"
+  }
 
   /** A class remembering a type instantiation for some a set of overloaded
    *  polymorphic symbols.
@@ -2875,23 +2729,12 @@ trait Types extends api.Types { self: SymbolTable =>
    */
   case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
     override def safeToString =
-      pre.toString + targs.mkString("(with type arguments ", ", ", ")");
+      pre.toString + targs.mkString("(with type arguments ", ", ", ")")
+
     override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
-//     override def memberType(sym: Symbol) = pre.memberType(sym) match {
-//       case PolyType(tparams, restp) =>
-//         restp.subst(tparams, targs)
-// /* I don't think this is needed, as existential types close only over value types
-//       case ExistentialType(tparams, qtpe) =>
-//         existentialAbstraction(tparams, qtpe.memberType(sym))
-// */
-//       case ErrorType =>
-//         ErrorType
-//     }
     override def kind = "AntiPolyType"
   }
 
-  //private var tidCount = 0  //DEBUG
-
   object HasTypeMember {
     def apply(name: TypeName, tp: Type): Type = {
       val bound = refinedType(List(WildcardType), NoSymbol)
@@ -2906,13 +2749,10 @@ trait Types extends api.Types { self: SymbolTable =>
     }
   }
 
-  // Not used yet.
-  object HasTypeParams {
-    def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match {
-      case AnnotatedType(_, tp, _)        => unapply(tp)
-      case ExistentialType(tparams, qtpe) => Some((tparams, qtpe))
-      case PolyType(tparams, restpe)      => Some((tparams, restpe))
-      case _                              => None
+  object ArrayTypeRef {
+    def unapply(tp: Type) = tp match {
+      case TypeRef(_, ArrayClass, arg :: Nil) => Some(arg)
+      case _                                  => None
     }
   }
 
@@ -2941,10 +2781,10 @@ trait Types extends api.Types { self: SymbolTable =>
        *  See SI-5359.
        */
       val bounds  = tparam.info.bounds
-      /** We can seed the type constraint with the type parameter
-       *  bounds as long as the types are concrete.  This should lower
-       *  the complexity of the search even if it doesn't improve
-       *  any results.
+      /* We can seed the type constraint with the type parameter
+       * bounds as long as the types are concrete.  This should lower
+       * the complexity of the search even if it doesn't improve
+       * any results.
        */
       if (propagateParameterBoundsToTypeVars) {
         val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
@@ -2985,20 +2825,6 @@ trait Types extends api.Types { self: SymbolTable =>
       createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
   }
 
-  /** Repack existential types, otherwise they sometimes get unpacked in the
-   *  wrong location (type inference comes up with an unexpected skolem)
-   */
-  def repackExistential(tp: Type): Type = (
-    if (tp == NoType) tp
-    else existentialAbstraction(existentialsInType(tp), tp)
-  )
-
-  def containsExistential(tpe: Type) =
-    tpe exists typeIsExistentiallyBound
-
-  def existentialsInType(tpe: Type) =
-    tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
-
   /** Precondition: params.nonEmpty.  (args.nonEmpty enforced structurally.)
    */
   class HKTypeVar(
@@ -3009,7 +2835,6 @@ trait Types extends api.Types { self: SymbolTable =>
 
     require(params.nonEmpty, this)
     override def isHigherKinded          = true
-    override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
   }
 
   /** Precondition: zipped params/args nonEmpty.  (Size equivalence enforced structurally.)
@@ -3025,9 +2850,7 @@ trait Types extends api.Types { self: SymbolTable =>
     override def params: List[Symbol] = zippedArgs map (_._1)
     override def typeArgs: List[Type] = zippedArgs map (_._2)
 
-    override protected def typeVarString = (
-      zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
-    )
+    override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]")
   }
 
   trait UntouchableTypeVar extends TypeVar {
@@ -3057,7 +2880,7 @@ trait Types extends api.Types { self: SymbolTable =>
    *  Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
    */
   abstract case class TypeVar(
-    val origin: Type,
+                               origin: Type,
     var constr: TypeConstraint
   ) extends Type {
 
@@ -3077,9 +2900,10 @@ trait Types extends api.Types { self: SymbolTable =>
      *  in operations that are exposed from types. Hence, no syncing of `constr`
      *  or `encounteredHigherLevel` or `suspended` accesses should be necessary.
      */
-//    var constr = constr0
     def instValid = constr.instValid
-    override def isGround = instValid && constr.inst.isGround
+    def inst = constr.inst
+    def instWithinBounds = constr.instWithinBounds
+    override def isGround = instValid && inst.isGround
 
     /** The variable's skolemization level */
     val level = skolemizationLevel
@@ -3098,6 +2922,7 @@ trait Types extends api.Types { self: SymbolTable =>
         this
       else if (newArgs.size == params.size) {
         val tv = TypeVar(origin, constr, newArgs, params)
+        tv.linkSuspended(this)
         TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
       }
       else
@@ -3120,8 +2945,7 @@ trait Types extends api.Types { self: SymbolTable =>
     // When comparing to types containing skolems, remember the highest level
     // of skolemization. If that highest level is higher than our initial
     // skolemizationLevel, we can't re-use those skolems as the solution of this
-    // typevar, which means we'll need to repack our constr.inst into a fresh
-    // existential.
+    // typevar, which means we'll need to repack our inst into a fresh existential.
     // were we compared to skolems at a higher skolemizationLevel?
     // EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
     // see SI-5729 for why this is still experimental
@@ -3132,7 +2956,10 @@ trait Types extends api.Types { self: SymbolTable =>
     // invariant: before mutating constr, save old state in undoLog
     // (undoLog is used to reset constraints to avoid piling up unrelated ones)
     def setInst(tp: Type): this.type = {
-//      assert(!(tp containsTp this), this)
+      if (tp eq this) {
+        log(s"TypeVar cycle: called setInst passing $this to itself.")
+        return this
+      }
       undoLog record this
       // if we were compared against later typeskolems, repack the existential,
       // because skolems are only compatible if they were created at the same level
@@ -3144,20 +2971,43 @@ trait Types extends api.Types { self: SymbolTable =>
     def addLoBound(tp: Type, isNumericBound: Boolean = false) {
       assert(tp != this, tp) // implies there is a cycle somewhere (?)
       //println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
-      undoLog record this
-      constr.addLoBound(tp, isNumericBound)
+      if (!sharesConstraints(tp)) {
+        undoLog record this
+        constr.addLoBound(tp, isNumericBound)
+      }
     }
 
     def addHiBound(tp: Type, isNumericBound: Boolean = false) {
       // assert(tp != this)
       //println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
-      undoLog record this
-      constr.addHiBound(tp, isNumericBound)
+      if (!sharesConstraints(tp)) {
+        undoLog record this
+        constr.addHiBound(tp, isNumericBound)
+      }
     }
     // </region>
 
     // ignore subtyping&equality checks while true -- see findMember
-    private[Types] var suspended = false
+    // OPT: This could be Either[TypeVar, Boolean], but this encoding was chosen instead to save allocations.
+    private var _suspended: Type = ConstantFalse
+    private[Types] def suspended: Boolean = (_suspended: @unchecked) match {
+      case ConstantFalse => false
+      case ConstantTrue  => true
+      case tv: TypeVar   => tv.suspended
+    }
+
+    /** `AppliedTypeVar`s share the same `TypeConstraint` with the `HKTypeVar` that it was spawned from.
+     *   A type inference session can also have more than one ATV.
+     *   If we don't detect that, we end up with "cyclic constraint" when we try to instantiate type parameters
+     *   after solving in, pos/t8237
+     */
+    protected final def sharesConstraints(other: Type): Boolean = other match {
+      case other: TypeVar => constr == other.constr // SI-8237 avoid cycles. Details in pos/t8237.scala
+      case _ => false
+    }
+    private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse
+    // SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
+    private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
 
     /** Called when a TypeVar is involved in a subtyping check.  Result is whether
      *  this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
@@ -3187,7 +3037,7 @@ trait Types extends api.Types { self: SymbolTable =>
         else lhs <:< rhs
       }
 
-      /** Simple case: type arguments can be ignored, because either this typevar has
+      /*  Simple case: type arguments can be ignored, because either this typevar has
        *  no type parameters, or we are comparing to Any/Nothing.
        *
        *  The latter condition is needed because HK unification is limited to constraints of the shape
@@ -3214,7 +3064,7 @@ trait Types extends api.Types { self: SymbolTable =>
         } else false
       }
 
-      /** Full case: involving a check of the form
+      /*  Full case: involving a check of the form
        *  {{{
        *    TC1[T1,..., TN] <: TC2[T'1,...,T'N]
        *  }}}
@@ -3263,8 +3113,8 @@ trait Types extends api.Types { self: SymbolTable =>
       // AM: I think we could use the `suspended` flag to avoid side-effecting during unification
       if (suspended)         // constraint accumulation is disabled
         checkSubtype(tp, origin)
-      else if (constr.instValid)  // type var is already set
-        checkSubtype(tp, constr.inst)
+      else if (instValid)  // type var is already set
+        checkSubtype(tp, inst)
       else isRelatable(tp) && {
         unifySimple || unifyFull(tp) || (
           // only look harder if our gaze is oriented toward Any
@@ -3280,17 +3130,20 @@ trait Types extends api.Types { self: SymbolTable =>
     }
 
     def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
-//      println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
-//      println("constr: "+ constr)
-      def checkIsSameType(tp: Type) =
-        if(typeVarLHS) constr.inst =:= tp
-        else           tp          =:= constr.inst
+//      println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (instValid) "IV" else "")) //@MDEBUG
+      def checkIsSameType(tp: Type) = (
+        if (typeVarLHS) inst =:= tp
+        else            tp   =:= inst
+      )
 
       if (suspended) tp =:= origin
-      else if (constr.instValid) checkIsSameType(tp)
+      else if (instValid) checkIsSameType(tp)
       else isRelatable(tp) && {
         val newInst = wildcardToTypeVarMap(tp)
-        (constr isWithinBounds newInst) && { setInst(tp); true }
+        (constr isWithinBounds newInst) && {
+          setInst(newInst)
+          true
+        }
       }
     }
 
@@ -3301,7 +3154,7 @@ trait Types extends api.Types { self: SymbolTable =>
      * (`T` corresponds to @param sym)
      */
     def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
-      registerBound(HasTypeMember(sym.name.toTypeName, tp), false)
+      registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false)
     }
 
     private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match {
@@ -3323,15 +3176,17 @@ trait Types extends api.Types { self: SymbolTable =>
     )
 
     override def normalize: Type = (
-      if (constr.instValid) constr.inst
+      if (instValid) inst
       // get here when checking higher-order subtyping of the typevar by itself
       // TODO: check whether this ever happens?
-      else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
+      else if (isHigherKinded) etaExpand
       else super.normalize
     )
+    override def etaExpand: Type = (
+      if (!isHigherKinded) this
+      else logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
+    )
     override def typeSymbol = origin.typeSymbol
-    override def isStable = origin.isStable
-    override def isVolatile = origin.isVolatile
 
     private def tparamsOfSym(sym: Symbol) = sym.info match {
       case PolyType(tparams, _) if tparams.nonEmpty =>
@@ -3352,13 +3207,13 @@ trait Types extends api.Types { self: SymbolTable =>
         if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
       ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
     }
-    private def levelString = if (settings.explaintypes.value) level else ""
-    protected def typeVarString = originName
+    private def levelString = if (settings.explaintypes) level else ""
     override def safeToString = (
-      if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
-      else if (constr.inst ne NoType) "=?" + constr.inst
+      if ((constr eq null) || (inst eq null)) "TVar<" + originName + "=null>"
+      else if (inst ne NoType) "=?" + inst
       else (if(untouchable) "!?" else "?") + levelString + originName
     )
+    def originString = s"$originName in $originLocation"
     override def kind = "TypeVar"
 
     def cloneInternal = {
@@ -3376,13 +3231,9 @@ trait Types extends api.Types { self: SymbolTable =>
    *
    *  @param annotations the list of annotations on the type
    *  @param underlying the type without the annotation
-   *  @param selfsym a "self" symbol with type `underlying`;
-   *    only available if -Yself-in-annots is turned on. Can be `NoSymbol`
-   *    if it is not used.
    */
   case class AnnotatedType(override val annotations: List[AnnotationInfo],
-                           override val underlying: Type,
-                           override val selfsym: Symbol)
+                           override val underlying: Type)
   extends RewrappingTypeProxy with AnnotatedTypeApi {
 
     assert(!annotations.isEmpty, "" + underlying)
@@ -3415,9 +3266,6 @@ trait Types extends api.Types { self: SymbolTable =>
      */
     override def withoutAnnotations = underlying.withoutAnnotations
 
-    /** Set the self symbol */
-    override def withSelfsym(sym: Symbol) = copy(selfsym = sym)
-
     /** Drop the annotations on the bounds, unless the low and high
      *  bounds are exactly tp.
      */
@@ -3432,7 +3280,7 @@ trait Types extends api.Types { self: SymbolTable =>
           formals, actuals), info.args, info.assocs).setPos(info.pos))
       val underlying1 = underlying.instantiateTypeParams(formals, actuals)
       if ((annotations1 eq annotations) && (underlying1 eq underlying)) this
-      else AnnotatedType(annotations1, underlying1, selfsym)
+      else AnnotatedType(annotations1, underlying1)
     }
 
     /** Return the base type sequence of tp, dropping the annotations, unless the base type sequence of tp
@@ -3451,12 +3299,19 @@ trait Types extends api.Types { self: SymbolTable =>
   /** Creator for AnnotatedTypes.  It returns the underlying type if annotations.isEmpty
    *  rather than walking into the assertion.
    */
-  def annotatedType(annots: List[AnnotationInfo], underlying: Type, selfsym: Symbol = NoSymbol): Type =
+  def annotatedType(annots: List[AnnotationInfo], underlying: Type): Type =
     if (annots.isEmpty) underlying
-    else AnnotatedType(annots, underlying, selfsym)
+    else AnnotatedType(annots, underlying)
 
   object AnnotatedType extends AnnotatedTypeExtractor
 
+  object StaticallyAnnotatedType {
+    def unapply(tp: Type): Option[(List[AnnotationInfo], Type)] = tp.staticAnnotations match {
+      case Nil    => None
+      case annots => Some((annots, tp.withoutAnnotations))
+    }
+  }
+
   /** A class representing types with a name. When an application uses
    *  named arguments, the named argument types for calling isApplicable
    *  are represented as NamedType.
@@ -3464,39 +3319,36 @@ trait Types extends api.Types { self: SymbolTable =>
   case class NamedType(name: Name, tp: Type) extends Type {
     override def safeToString: String = name.toString +": "+ tp
   }
-
-  /** A De Bruijn index referring to a previous type argument. Only used
-   *  as a serialization format.
-   */
-  case class DeBruijnIndex(level: Int, idx: Int, args: List[Type]) extends Type {
-    override def safeToString: String = "De Bruijn index("+level+","+idx+")"
-  }
-
-  /** A binder defining data associated with De Bruijn indices. Only used
-   *  as a serialization format.
+  /** As with NamedType, used only when calling isApplicable.
+   *  Records that the application has a wildcard star (aka _*)
+   *  at the end of it.
    */
-  case class DeBruijnBinder(pnames: List[Name], ptypes: List[Type], restpe: Type) extends Type {
-    override def safeToString = {
-      val kind = if (pnames.head.isTypeName) "poly" else "method"
-      "De Bruijn "+kind+"("+(pnames mkString ",")+";"+(ptypes mkString ",")+";"+restpe+")"
-    }
+  case class RepeatedType(tp: Type) extends Type {
+    override def safeToString: String = tp + ": _*"
   }
 
   /** A temporary type representing the erasure of a user-defined value type.
    *  Created during phase erasure, eliminated again in posterasure.
    *
-   *  @param   original  The underlying type before erasure
+   *  SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`,
+   *          which contain `ErasedValueType`-s. In order to correctly consider the overriding
+   *          and overriden signatures as equivalent in `run/t6385.scala`, it is critical that
+   *          this type contains the erasure of the wrapped type, rather than the unerased type
+   *          of the value class itself, as was originally done.
+   *
+   *  @param   valueClazz        The value class symbol
+   *  @param   erasedUnderlying  The erased type of the unboxed value
    */
-  abstract case class ErasedValueType(original: TypeRef) extends UniqueType {
-    override def safeToString = "ErasedValueType("+original+")"
+  abstract case class ErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends UniqueType {
+    override def safeToString = s"ErasedValueType($valueClazz, $erasedUnderlying)"
   }
 
-  final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original)
+  final class UniqueErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends ErasedValueType(valueClazz, erasedUnderlying)
 
   object ErasedValueType {
-    def apply(original: TypeRef): Type = {
-      assert(original.sym ne NoSymbol, "ErasedValueType over NoSymbol")
-      unique(new UniqueErasedValueType(original))
+    def apply(valueClazz: Symbol, erasedUnderlying: Type): Type = {
+      assert(valueClazz ne NoSymbol, "ErasedValueType over NoSymbol")
+      unique(new UniqueErasedValueType(valueClazz, erasedUnderlying))
     }
   }
 
@@ -3524,17 +3376,19 @@ trait Types extends api.Types { self: SymbolTable =>
       (if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
   }
 
-  // def mkLazyType(tparams: Symbol*)(f: Symbol => Unit): LazyType = (
-  //   if (tparams.isEmpty) new LazyType { override def complete(sym: Symbol) = f(sym) }
-  //   else new LazyPolyType(tparams.toList) { override def complete(sym: Symbol) = f(sym) }
-  // )
-
 // Creators ---------------------------------------------------------------
 
   /** Rebind symbol `sym` to an overriding member in type `pre`. */
   private def rebind(pre: Type, sym: Symbol): Symbol = {
     if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
-    else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable) orElse sym
+    else pre.nonPrivateMember(sym.name).suchThat { sym =>
+      // SI-7928 `isModuleNotMethod` is here to avoid crashing with spuriously "overloaded" module accessor and module symbols.
+      //         These appear after refchecks eliminates ModuleDefs that implement an interface.
+      //         Here, we exclude the module symbol, which allows us to bind to the accessor.
+      // SI-8054 We must only do this after refchecks, otherwise we exclude the module symbol which does not yet have an accessor!
+      val isModuleWithAccessor = phase.refChecked && sym.isModuleNotMethod
+      sym.isType || (!isModuleWithAccessor && sym.isStable && !sym.hasVolatileType)
+    } orElse sym
   }
 
   /** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
@@ -3563,7 +3417,7 @@ trait Types extends api.Types { self: SymbolTable =>
   /** the canonical creator for a refined type with a given scope */
   def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
     if (phase.erasedTypes)
-      if (parents.isEmpty) ObjectClass.tpe else parents.head
+      if (parents.isEmpty) ObjectTpe else parents.head
     else {
       val clazz = owner.newRefinementClass(pos)
       val result = RefinedType(parents, decls, clazz)
@@ -3573,10 +3427,6 @@ trait Types extends api.Types { self: SymbolTable =>
   }
 
   /** The canonical creator for a refined type with an initially empty scope.
-   *
-   *  @param parents ...
-   *  @param owner   ...
-   *  @return        ...
    */
   def refinedType(parents: List[Type], owner: Symbol): Type =
     refinedType(parents, owner, newScope, owner.pos)
@@ -3584,7 +3434,7 @@ trait Types extends api.Types { self: SymbolTable =>
   def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
     if ((parents eq original.parents) && (decls eq original.decls)) original
     else {
-      val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
+      val owner = original.typeSymbol.owner
       val result = refinedType(parents, owner)
       val syms1 = decls.toList
       for (sym <- syms1)
@@ -3685,7 +3535,7 @@ trait Types extends api.Types { self: SymbolTable =>
     if (args.isEmpty)
       return tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
 
-    /** Disabled - causes cycles in tcpoly tests. */
+    /* Disabled - causes cycles in tcpoly tests. */
     if (false && isDefinitionsInitialized) {
       assert(isUseableAsTypeArgs(args), {
         val tapp_s = s"""$tycon[${args mkString ", "}]"""
@@ -3696,43 +3546,30 @@ trait Types extends api.Types { self: SymbolTable =>
 
     tycon match {
       case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil)   //@M drop type args to Any/Nothing
-      case TypeRef(pre, sym, _)                           => copyTypeRef(tycon, pre, sym, args)
+      case TypeRef(pre, sym, Nil)                         => copyTypeRef(tycon, pre, sym, args)
+      case TypeRef(pre, sym, bogons)                      => devWarning(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args)
       case PolyType(tparams, restpe)                      => restpe.instantiateTypeParams(tparams, args)
       case ExistentialType(tparams, restpe)               => newExistentialType(tparams, appliedType(restpe, args))
       case st: SingletonType                              => appliedType(st.widen, args) // @M TODO: what to do? see bug1
-      case RefinedType(parents, decls)                    => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
-      case TypeBounds(lo, hi)                             => TypeBounds(appliedType(lo, args), appliedType(hi, args))
+      case RefinedType(parents, decls)                    => RefinedType(parents map (appliedType(_, args)), decls)   // @PP: Can this be right?
+      case TypeBounds(lo, hi)                             => TypeBounds(appliedType(lo, args), appliedType(hi, args)) // @PP: Can this be right?
       case tv at TypeVar(_, _)                               => tv.applyArgs(args)
-      case AnnotatedType(annots, underlying, self)        => AnnotatedType(annots, appliedType(underlying, args), self)
-      case ErrorType                                      => tycon
-      case WildcardType                                   => tycon // needed for neg/t0226
+      case AnnotatedType(annots, underlying)              => AnnotatedType(annots, appliedType(underlying, args))
+      case ErrorType | WildcardType                       => tycon
       case _                                              => abort(debugString(tycon))
     }
   }
 
+  def appliedType(tycon: Type, args: Type*): Type =
+    appliedType(tycon, args.toList)
+
+  def appliedType(tyconSym: Symbol, args: List[Type]): Type =
+    appliedType(tyconSym.typeConstructor, args)
+
   /** Very convenient. */
   def appliedType(tyconSym: Symbol, args: Type*): Type =
     appliedType(tyconSym.typeConstructor, args.toList)
 
-  /** A creator for existential types where the type arguments,
-   *  rather than being applied directly, are interpreted as the
-   *  upper bounds of unknown types.  For instance if the type argument
-   *  list given is List(AnyRefClass), the resulting type would be
-   *  e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
-   */
-  def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
-    tycon match {
-      case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
-        val eparams  = typeParamsToExistentials(sym)
-        val bounds   = args map (TypeBounds upper _)
-        foreach2(eparams, bounds)(_ setInfo _)
-
-        newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
-      case _ =>
-        appliedType(tycon, args)
-    }
-  }
-
   /** A creator and extractor for type parameterizations that strips empty type parameter lists.
    *  Use this factory method to indicate the type has kind * (it's a polymorphic value)
    *  until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
@@ -3759,7 +3596,7 @@ trait Types extends api.Types { self: SymbolTable =>
   }
   def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
 
-  @deprecated("use genPolyType(...) instead", "2.10.0")
+  @deprecated("use genPolyType(...) instead", "2.10.0") // Used in reflection API
   def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
 
   /** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
@@ -3806,131 +3643,7 @@ trait Types extends api.Types { self: SymbolTable =>
       newExistentialType(tparams1, tpe1)
     }
 
-  /** Normalize any type aliases within this type (@see Type#normalize).
-   *  Note that this depends very much on the call to "normalize", not "dealias",
-   *  so it is no longer carries the too-stealthy name "deAlias".
-   */
-  object normalizeAliases extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case TypeRef(_, sym, _) if sym.isAliasType =>
-        def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
-        mapOver(logResult(msg)(tp.normalize))
-      case _                                     => mapOver(tp)
-    }
-  }
-
-  /** Remove any occurrence of type <singleton> from this type and its parents */
-  object dropSingletonType extends TypeMap {
-    def apply(tp: Type): Type = {
-      tp match {
-        case TypeRef(_, SingletonClass, _) =>
-          AnyClass.tpe
-        case tp1 @ RefinedType(parents, decls) =>
-          parents filter (_.typeSymbol != SingletonClass) match {
-            case Nil                       => AnyClass.tpe
-            case p :: Nil if decls.isEmpty => mapOver(p)
-            case ps                        => mapOver(copyRefinedType(tp1, ps, decls))
-          }
-        case tp1 =>
-          mapOver(tp1)
-      }
-    }
-  }
-
-  /** Substitutes the empty scope for any non-empty decls in the type. */
-  object dropAllRefinements extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case rt @ RefinedType(parents, decls) if !decls.isEmpty =>
-        mapOver(copyRefinedType(rt, parents, EmptyScope))
-      case ClassInfoType(parents, decls, clazz) if !decls.isEmpty =>
-        mapOver(ClassInfoType(parents, EmptyScope, clazz))
-      case _ =>
-        mapOver(tp)
-    }
-  }
-
-  /** Type with all top-level occurrences of abstract types replaced by their bounds */
-  def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
-    case TypeRef(_, sym, _) if sym.isAbstractType =>
-      abstractTypesToBounds(tp.bounds.hi)
-    case TypeRef(_, sym, _) if sym.isAliasType =>
-      abstractTypesToBounds(tp.normalize)
-    case rtp @ RefinedType(parents, decls) =>
-      copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls)
-    case AnnotatedType(_, underlying, _) =>
-      abstractTypesToBounds(underlying)
-    case _ =>
-      tp
-  }
-
-  // Set to true for A* => Seq[A]
-  //   (And it will only rewrite A* in method result types.)
-  //   This is the pre-existing behavior.
-  // Or false for Seq[A] => Seq[A]
-  //   (It will rewrite A* everywhere but method parameters.)
-  //   This is the specified behavior.
-  protected def etaExpandKeepsStar = false
-
-  /** Turn any T* types into Seq[T] except when
-   *  in method parameter position.
-   */
-  object dropRepeatedParamType extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case MethodType(params, restpe) =>
-        // Not mapping over params
-        val restpe1 = apply(restpe)
-        if (restpe eq restpe1) tp
-        else MethodType(params, restpe1)
-      case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
-        seqType(arg)
-      case _ =>
-        if (etaExpandKeepsStar) tp else mapOver(tp)
-    }
-  }
-
-  object toDeBruijn extends TypeMap {
-    private var paramStack: List[List[Symbol]] = Nil
-    def mkDebruijnBinder(params: List[Symbol], restpe: Type) = {
-      paramStack = params :: paramStack
-      try {
-        DeBruijnBinder(params map (_.name), params map (p => this(p.info)), this(restpe))
-      } finally paramStack = paramStack.tail
-    }
-    def apply(tp: Type): Type = tp match {
-      case PolyType(tparams, restpe) =>
-        mkDebruijnBinder(tparams, restpe)
-      case MethodType(params, restpe) =>
-        mkDebruijnBinder(params, restpe)
-      case TypeRef(NoPrefix, sym, args) =>
-        val level = paramStack indexWhere (_ contains sym)
-        if (level < 0) mapOver(tp)
-        else DeBruijnIndex(level, paramStack(level) indexOf sym, args mapConserve this)
-      case _ =>
-        mapOver(tp)
-    }
-  }
 
-  def fromDeBruijn(owner: Symbol) = new TypeMap {
-    private var paramStack: List[List[Symbol]] = Nil
-    def apply(tp: Type): Type = tp match {
-      case DeBruijnBinder(pnames, ptypes, restpe) =>
-        val isType = pnames.head.isTypeName
-        val newParams = for (name <- pnames) yield
-          if (isType) owner.newTypeParameter(name.toTypeName)
-          else owner.newValueParameter(name.toTermName)
-        paramStack = newParams :: paramStack
-        try {
-          foreach2(newParams, ptypes)((p, t) => p setInfo this(t))
-          val restpe1 = this(restpe)
-          if (isType) PolyType(newParams, restpe1)
-          else MethodType(newParams, restpe1)
-        } finally paramStack = paramStack.tail
-      case DeBruijnIndex(level, idx, args) =>
-        TypeRef(NoPrefix, paramStack(level)(idx), args map this)
-      case _ =>
-        mapOver(tp)
-    }
-  }
 
 // Hash consing --------------------------------------------------------------
 
@@ -3942,7 +3655,11 @@ trait Types extends api.Types { self: SymbolTable =>
     if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
     if (uniqueRunId != currentRunId) {
       uniques = util.WeakHashSet[Type](initialUniquesCapacity)
-      perRunCaches.recordCache(uniques)
+      // JZ: We used to register this as a perRunCache so it would be cleared eagerly at
+      // the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
+      // When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
+      // I've removed the registration for now. I don't think its particularly harmful anymore
+      // as a) this is now a weak set, and b) it is discarded completely before the next run.
       uniqueRunId = currentRunId
     }
     (uniques findEntryOrUpdate tp).asInstanceOf[T]
@@ -3950,124 +3667,9 @@ trait Types extends api.Types { self: SymbolTable =>
 
 // Helper Classes ---------------------------------------------------------
 
-  /** @PP: Unable to see why these apparently constant types should need vals
-   *  in every TypeConstraint, I lifted them out.
-   */
-  private lazy val numericLoBound = IntClass.tpe
-  private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
-
-  /** A class expressing upper and lower bounds constraints of type variables,
-   * as well as their instantiations.
-   */
-  class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
-    def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
-    def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
-    def this() = this(List(), List())
-
-    /*  Syncnote: Type constraints are assumed to be used from only one
-     *  thread. They are not exposed in api.Types and are used only locally
-     *  in operations that are exposed from types. Hence, no syncing of any
-     *  variables should be ncessesary.
-     */
-
-    /** Guard these lists against AnyClass and NothingClass appearing,
-     *  else loBounds.isEmpty will have different results for an empty
-     *  constraint and one with Nothing as a lower bound.  [Actually
-     *  guarding addLoBound/addHiBound somehow broke raw types so it
-     *  only guards against being created with them.]
-     */
-    private var lobounds = lo0 filterNot typeIsNothing
-    private var hibounds = hi0 filterNot typeIsAny
-    private var numlo = numlo0
-    private var numhi = numhi0
-    private var avoidWidening = avoidWidening0
-
-    def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
-    def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
-    def avoidWiden: Boolean = avoidWidening
-
-    def addLoBound(tp: Type, isNumericBound: Boolean = false) {
-      // For some reason which is still a bit fuzzy, we must let Nothing through as
-      // a lower bound despite the fact that Nothing is always a lower bound.  My current
-      // supposition is that the side-effecting type constraint accumulation mechanism
-      // depends on these subtype tests being performed to make forward progress when
-      // there are mutally recursive type vars.
-      // See pos/t6367 and pos/t6499 for the competing test cases.
-      val mustConsider = tp.typeSymbol match {
-        case NothingClass => true
-        case _            => !(lobounds contains tp)
-      }
-      if (mustConsider) {
-        if (isNumericBound && isNumericValueType(tp)) {
-          if (numlo == NoType || isNumericSubType(numlo, tp))
-            numlo = tp
-          else if (!isNumericSubType(tp, numlo))
-            numlo = numericLoBound
-        }
-        else lobounds ::= tp
-      }
-    }
-
-    def checkWidening(tp: Type) {
-      if(tp.isStable) avoidWidening = true
-      else tp match {
-        case HasTypeMember(_, _) => avoidWidening = true
-        case _ =>
-      }
-    }
-
-    def addHiBound(tp: Type, isNumericBound: Boolean = false) {
-      // My current test case only demonstrates the need to let Nothing through as
-      // a lower bound, but I suspect the situation is symmetrical.
-      val mustConsider = tp.typeSymbol match {
-        case AnyClass => true
-        case _        => !(hibounds contains tp)
-      }
-      if (mustConsider) {
-        checkWidening(tp)
-        if (isNumericBound && isNumericValueType(tp)) {
-          if (numhi == NoType || isNumericSubType(tp, numhi))
-            numhi = tp
-          else if (!isNumericSubType(numhi, tp))
-            numhi = numericHiBound
-        }
-        else hibounds ::= tp
-      }
-    }
-
-    def isWithinBounds(tp: Type): Boolean =
-      lobounds.forall(_ <:< tp) &&
-      hibounds.forall(tp <:< _) &&
-      (numlo == NoType || (numlo weak_<:< tp)) &&
-      (numhi == NoType || (tp weak_<:< numhi))
-
-    var inst: Type = NoType // @M reduce visibility?
-
-    def instValid = (inst ne null) && (inst ne NoType)
-
-    def cloneInternal = {
-      val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
-      tc.inst = inst
-      tc
-    }
-
-    override def toString = {
-      val boundsStr = {
-        val lo    = loBounds filterNot typeIsNothing
-        val hi    = hiBounds filterNot typeIsAny
-        val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
-        val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
-        lostr ++ histr mkString ("[", " | ", "]")
-      }
-      if (inst eq NoType) boundsStr
-      else boundsStr + " _= " + inst.safeToString
-    }
-  }
-
   class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
     def apply(tp: Type): Type = tp match {
-      case AnnotatedType(_, underlying, _) if annotated   => apply(underlying)
+      case AnnotatedType(_, underlying) if annotated      => apply(underlying)
       case ExistentialType(_, underlying) if existential  => apply(underlying)
       case PolyType(_, underlying) if poly                => apply(underlying)
       case NullaryMethodType(underlying) if nullary       => apply(underlying)
@@ -4075,309 +3677,69 @@ trait Types extends api.Types { self: SymbolTable =>
     }
   }
   class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) {
-    override def apply(tp: Type) = super.apply(tp.normalize)
+    override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here
   }
 
   object        unwrapToClass extends ClassUnwrapper(existential = true) { }
   object  unwrapToStableClass extends ClassUnwrapper(existential = false) { }
   object   unwrapWrapperTypes extends  TypeUnwrapper(true, true, true, true) { }
 
-  trait AnnotationFilter extends TypeMap {
-    def keepAnnotation(annot: AnnotationInfo): Boolean
-
-    override def mapOver(annot: AnnotationInfo) =
-      if (keepAnnotation(annot)) super.mapOver(annot)
-      else UnmappableAnnotation
-  }
-
-  trait KeepOnlyTypeConstraints extends AnnotationFilter {
-    // filter keeps only type constraint annotations
-    def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
-  }
-
-  trait VariantTypeMap extends TypeMap {
-    private[this] var _variance = 1
-
-    override def variance = _variance
-    def variance_=(x: Int) = _variance = x
-
-    override protected def noChangeToSymbols(origSyms: List[Symbol]) =
-      //OPT inline from forall to save on #closures
-      origSyms match {
-        case sym :: rest =>
-          val v = variance
-          if (sym.isAliasType) variance = 0
-          val result = this(sym.info)
-          variance = v
-          (result eq sym.info) && noChangeToSymbols(rest)
-        case _ =>
-          true
-      }
-
-    override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
-      map2Conserve(args, tparams) { (arg, tparam) =>
-        val v = variance
-        if (tparam.isContravariant) variance = -variance
-        else if (!tparam.isCovariant) variance = 0
-        val arg1 = this(arg)
-        variance = v
-        arg1
-      }
-
-    /** Map this function over given type */
-    override def mapOver(tp: Type): Type = tp match {
-      case MethodType(params, result) =>
-        variance = -variance
-        val params1 = mapOver(params)
-        variance = -variance
-        val result1 = this(result)
-        if ((params1 eq params) && (result1 eq result)) tp
-        else copyMethodType(tp, params1, result1.substSym(params, params1))
-      case PolyType(tparams, result) =>
-        variance = -variance
-        val tparams1 = mapOver(tparams)
-        variance = -variance
-        var result1 = this(result)
-        if ((tparams1 eq tparams) && (result1 eq result)) tp
-        else PolyType(tparams1, result1.substSym(tparams, tparams1))
-      case TypeBounds(lo, hi) =>
-        variance = -variance
-        val lo1 = this(lo)
-        variance = -variance
-        val hi1 = this(hi)
-        if ((lo1 eq lo) && (hi1 eq hi)) tp
-        else TypeBounds(lo1, hi1)
-      case tr @ TypeRef(pre, sym, args) =>
-        val pre1 = this(pre)
-        val args1 =
-          if (args.isEmpty)
-            args
-          else if (variance == 0) // fast & safe path: don't need to look at typeparams
-            args mapConserve this
-          else {
-            val tparams = sym.typeParams
-            if (tparams.isEmpty) args
-            else mapOverArgs(args, tparams)
-          }
-        if ((pre1 eq pre) && (args1 eq args)) tp
-        else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
-      case _ =>
-        super.mapOver(tp)
+  def elementExtract(container: Symbol, tp: Type): Type = {
+    assert(!container.isAliasType, container)
+    unwrapWrapperTypes(tp baseType container).dealiasWiden match {
+      case TypeRef(_, `container`, arg :: Nil)  => arg
+      case _                                    => NoType
     }
   }
-
-  // todo. move these into scala.reflect.api
-
-  /** A prototype for mapping a function over all possible types
-   */
-  abstract class TypeMap extends (Type => Type) {
-    def apply(tp: Type): Type
-
-    /** Mix in VariantTypeMap if you want variances to be significant.
-     */
-    def variance = 0
-
-    /** Map this function over given type */
-    def mapOver(tp: Type): Type = tp match {
-      case tr @ TypeRef(pre, sym, args) =>
-        val pre1 = this(pre)
-        val args1 = args mapConserve this
-        if ((pre1 eq pre) && (args1 eq args)) tp
-        else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
-      case ThisType(_) => tp
-      case SingleType(pre, sym) =>
-        if (sym.isPackageClass) tp // short path
-        else {
-          val pre1 = this(pre)
-          if (pre1 eq pre) tp
-          else singleType(pre1, sym)
-        }
-      case MethodType(params, result) =>
-        val params1 = mapOver(params)
-        val result1 = this(result)
-        if ((params1 eq params) && (result1 eq result)) tp
-        else copyMethodType(tp, params1, result1.substSym(params, params1))
-      case PolyType(tparams, result) =>
-        val tparams1 = mapOver(tparams)
-        var result1 = this(result)
-        if ((tparams1 eq tparams) && (result1 eq result)) tp
-        else PolyType(tparams1, result1.substSym(tparams, tparams1))
-      case NullaryMethodType(result) =>
-        val result1 = this(result)
-        if (result1 eq result) tp
-        else NullaryMethodType(result1)
-      case ConstantType(_) => tp
-      case SuperType(thistp, supertp) =>
-        val thistp1 = this(thistp)
-        val supertp1 = this(supertp)
-        if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
-        else SuperType(thistp1, supertp1)
-      case TypeBounds(lo, hi) =>
-        val lo1 = this(lo)
-        val hi1 = this(hi)
-        if ((lo1 eq lo) && (hi1 eq hi)) tp
-        else TypeBounds(lo1, hi1)
-      case BoundedWildcardType(bounds) =>
-        val bounds1 = this(bounds)
-        if (bounds1 eq bounds) tp
-        else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
-      case rtp @ RefinedType(parents, decls) =>
-        val parents1 = parents mapConserve this
-        val decls1 = mapOver(decls)
-        //if ((parents1 eq parents) && (decls1 eq decls)) tp
-        //else refinementOfClass(tp.typeSymbol, parents1, decls1)
-        copyRefinedType(rtp, parents1, decls1)
-      case ExistentialType(tparams, result) =>
-        val tparams1 = mapOver(tparams)
-        var result1 = this(result)
-        if ((tparams1 eq tparams) && (result1 eq result)) tp
-        else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
-      case OverloadedType(pre, alts) =>
-        val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
-        if (pre1 eq pre) tp
-        else OverloadedType(pre1, alts)
-      case AntiPolyType(pre, args) =>
-        val pre1 = this(pre)
-        val args1 = args mapConserve (this)
-        if ((pre1 eq pre) && (args1 eq args)) tp
-        else AntiPolyType(pre1, args1)
-      case tv at TypeVar(_, constr) =>
-        if (constr.instValid) this(constr.inst)
-        else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params))  //@M !args.isEmpty implies !typeParams.isEmpty
-      case NotNullType(tp) =>
-        val tp1 = this(tp)
-        if (tp1 eq tp) tp
-        else NotNullType(tp1)
-      case AnnotatedType(annots, atp, selfsym) =>
-        val annots1 = mapOverAnnotations(annots)
-        val atp1 = this(atp)
-        if ((annots1 eq annots) && (atp1 eq atp)) tp
-        else if (annots1.isEmpty) atp1
-        else AnnotatedType(annots1, atp1, selfsym)
-      case DeBruijnIndex(shift, idx, args) =>
-        val args1 = args mapConserve this
-        if (args1 eq args) tp
-        else DeBruijnIndex(shift, idx, args1)
-/*
-      case ErrorType => tp
-      case WildcardType => tp
-      case NoType => tp
-      case NoPrefix => tp
-      case ErasedSingleType(sym) => tp
-*/
-      case _ =>
-        tp
-        // throw new Error("mapOver inapplicable for " + tp);
-    }
-
-    protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
-      args mapConserve this
-
-    /** Called by mapOver to determine whether the original symbols can
-     *  be returned, or whether they must be cloned.  Overridden in VariantTypeMap.
-     */
-    protected def noChangeToSymbols(origSyms: List[Symbol]) =
-      origSyms forall (sym => sym.info eq this(sym.info))
-
-    /** Map this function over given scope */
-    def mapOver(scope: Scope): Scope = {
-      val elems = scope.toList
-      val elems1 = mapOver(elems)
-      if (elems1 eq elems) scope
-      else newScopeWith(elems1: _*)
-    }
-
-    /** Map this function over given list of symbols */
-    def mapOver(origSyms: List[Symbol]): List[Symbol] = {
-      // fast path in case nothing changes due to map
-      if (noChangeToSymbols(origSyms)) origSyms
-      // map is not the identity --> do cloning properly
-      else cloneSymbolsAndModify(origSyms, TypeMap.this)
-    }
-
-    def mapOver(annot: AnnotationInfo): AnnotationInfo = {
-      val AnnotationInfo(atp, args, assocs) = annot
-      val atp1  = mapOver(atp)
-      val args1 = mapOverAnnotArgs(args)
-      // there is no need to rewrite assocs, as they are constants
-
-      if ((args eq args1) && (atp eq atp1)) annot
-      else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation  // some annotation arg was unmappable
-      else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+  def elementExtractOption(container: Symbol, tp: Type): Option[Type] = {
+    elementExtract(container, tp) match {
+      case NoType => None
+      case tp => Some(tp)
     }
-
-    def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
-      val annots1 = annots mapConserve mapOver
-      if (annots1 eq annots) annots
-      else annots1 filterNot (_ eq UnmappableAnnotation)
+  }
+  def elementTest(container: Symbol, tp: Type)(f: Type => Boolean): Boolean = {
+    elementExtract(container, tp) match {
+      case NoType => false
+      case tp => f(tp)
     }
-
-    /** Map over a set of annotation arguments.  If any
-     *  of the arguments cannot be mapped, then return Nil.  */
-    def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
-      val args1 = args mapConserve mapOver
-      if (args1 contains UnmappableTree) Nil
-      else args1
+  }
+  def elementTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
+    elementExtract(container, tp) match {
+      case NoType => NoType
+      case tp => f(tp)
     }
+  }
 
-    def mapOver(tree: Tree): Tree =
-      mapOver(tree, () => return UnmappableTree)
-
-    /** Map a tree that is part of an annotation argument.
-     *  If the tree cannot be mapped, then invoke giveup().
-     *  The default is to transform the tree with
-     *  TypeMapTransformer.
-     */
-    def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
-      (new TypeMapTransformer).transform(tree)
-
-    /** This transformer leaves the tree alone except to remap
-     *  its types. */
-    class TypeMapTransformer extends Transformer {
-      override def transform(tree: Tree) = {
-        val tree1 = super.transform(tree)
-        val tpe1 = TypeMap.this(tree1.tpe)
-        if ((tree eq tree1) && (tree.tpe eq tpe1))
-          tree
-        else
-          tree1.shallowDuplicate.setType(tpe1)
-      }
+  def transparentShallowTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
+    def loop(tp: Type): Type = tp match {
+      case tp @ AnnotatedType(_, underlying)        => tp.copy(underlying = loop(underlying))
+      case tp @ ExistentialType(_, underlying)      => tp.copy(underlying = loop(underlying))
+      case tp @ PolyType(_, resultType)             => tp.copy(resultType = loop(resultType))
+      case tp @ NullaryMethodType(resultType)       => tp.copy(resultType = loop(resultType))
+      case tp                                       => elementTransform(container, tp)(el => appliedType(container, f(el))).orElse(f(tp))
     }
+    loop(tp)
   }
 
-  abstract class TypeTraverser extends TypeMap {
-    def traverse(tp: Type): Unit
-    def apply(tp: Type): Type = { traverse(tp); tp }
-  }
+  /** Repack existential types, otherwise they sometimes get unpacked in the
+   *  wrong location (type inference comes up with an unexpected skolem)
+   */
+  def repackExistential(tp: Type): Type = (
+    if (tp == NoType) tp
+    else existentialAbstraction(existentialsInType(tp), tp)
+  )
 
-  abstract class TypeTraverserWithResult[T] extends TypeTraverser {
-    def result: T
-    def clear(): Unit
-  }
+  def containsExistential(tpe: Type) = tpe exists typeIsExistentiallyBound
+  def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
 
-  abstract class TypeCollector[T](initial: T) extends TypeTraverser {
-    var result: T = _
-    def collect(tp: Type) = {
-      result = initial
-      traverse(tp)
-      result
-    }
+  private def isDummyOf(tpe: Type)(targ: Type) = {
+    val sym = targ.typeSymbol
+    sym.isTypeParameter && sym.owner == tpe.typeSymbol
+  }
+  def isDummyAppliedType(tp: Type) = tp.dealias match {
+    case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr)
+    case _                        => false
   }
-
-  /** A collector that tests for existential types appearing at given variance in a type
-   *  @PP: Commenting out due to not being used anywhere.
-   */
-  // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
-  //   variance = v
-  //
-  //   def traverse(tp: Type) = tp match {
-  //     case ExistentialType(_, _) if (variance == v) => result = true
-  //     case _ => mapOver(tp)
-  //   }
-  // }
-  //
-  // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
-  // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
 
   def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
     val eparams = mapWithIndex(tparams)((tparam, i) =>
@@ -4388,954 +3750,140 @@ trait Types extends api.Types { self: SymbolTable =>
   def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
     typeParamsToExistentials(clazz, clazz.typeParams)
 
+  def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
+  /** Is type tp a ''raw type''? */
   //  note: it's important to write the two tests in this order,
   //  as only typeParams forces the classfile to be read. See #400
-  private def isRawIfWithoutArgs(sym: Symbol) =
-    sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
+  def isRawType(tp: Type) = !phase.erasedTypes && (tp match {
+    case TypeRef(_, sym, Nil) => isRawIfWithoutArgs(sym)
+    case _                    => false
+  })
 
-  def isRaw(sym: Symbol, args: List[Type]) =
-    !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
+  @deprecated("Use isRawType", "2.10.1") // presently used by sbt
+  def isRaw(sym: Symbol, args: List[Type]) = (
+       !phase.erasedTypes
+    && args.isEmpty
+    && isRawIfWithoutArgs(sym)
+  )
 
-  /** Is type tp a ''raw type''? */
-  def isRawType(tp: Type) = tp match {
-    case TypeRef(_, sym, args) => isRaw(sym, args)
-    case _ => false
-  }
+  def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
 
-  /** The raw to existential map converts a ''raw type'' to an existential type.
-   *  It is necessary because we might have read a raw type of a
-   *  parameterized Java class from a class file. At the time we read the type
-   *  the corresponding class file might still not be read, so we do not
-   *  know what the type parameters of the type are. Therefore
-   *  the conversion of raw types to existential types might not have taken place
-   *  in ClassFileparser.sigToType (where it is usually done).
+  /**
+   * A more persistent version of `Type#memberType` which does not require
+   * that the symbol is a direct member of the prefix.
+   *
+   * For instance:
+   *
+   * {{{
+   * class C[T] {
+   *   sealed trait F[A]
+   *   object X {
+   *     object S1 extends F[T]
+   *   }
+   *   class S2 extends F[T]
+   * }
+   * object O extends C[Int] {
+   *   def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
+   * }
+   * class S3 extends O.F[String]
+   *
+   * nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
+   * nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
+   * nestedMemberType(<S3>, <O.type>, <C>) = S3.type
+   * }}}
+   *
+   * @param sym    The symbol of the subtype
+   * @param pre    The prefix from which the symbol is seen
+   * @param owner
    */
-  def rawToExistential = new TypeMap {
-    private var expanded = immutable.Set[Symbol]()
-    def apply(tp: Type): Type = tp match {
-      case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
-        if (expanded contains sym) AnyRefClass.tpe
-        else try {
-          expanded += sym
-          val eparams = mapOver(typeParamsToExistentials(sym))
-          existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
-        } finally {
-          expanded -= sym
+  def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
+    def loop(tp: Type): Type =
+      if (tp.isTrivial) tp
+      else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
+        val widened = tp match {
+          case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+          case _               => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
         }
-      case _ =>
-        mapOver(tp)
-    }
-  }
-
-  /** Used by existentialAbstraction.
-   */
-  class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
-    private val occurCount = mutable.HashMap[Symbol, Int]()
-    private def countOccs(tp: Type) = {
-      tp foreach {
-        case TypeRef(_, sym, _) =>
-          if (tparams contains sym)
-            occurCount(sym) += 1
-        case _ => ()
-      }
-    }
-    def extrapolate(tpe: Type): Type = {
-      tparams foreach (t => occurCount(t) = 0)
-      countOccs(tpe)
-      for (tparam <- tparams)
-        countOccs(tparam.info)
-
-      apply(tpe)
-    }
-
-    def apply(tp: Type): Type = {
-      val tp1 = mapOver(tp)
-      if (variance == 0) tp1
-      else tp1 match {
-        case TypeRef(pre, sym, args) if tparams contains sym =>
-          val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
-          //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
-          if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
-            repl
-          else tp1
-        case _ =>
-          tp1
+        val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
+        if (tp eq widened) memType else memType.narrow
       }
-    }
-    override def mapOver(tp: Type): Type = tp match {
-      case SingleType(pre, sym) =>
-        if (sym.isPackageClass) tp // short path
-        else {
-          val pre1 = this(pre)
-          if ((pre1 eq pre) || !pre1.isStable) tp
-          else singleType(pre1, sym)
-        }
-      case _ => super.mapOver(tp)
-    }
+      else loop(tp.prefix) memberType tp.typeSymbol
 
-    // Do not discard the types of existential ident's. The
-    // symbol of the Ident itself cannot be listed in the
-    // existential's parameters, so the resulting existential
-    // type would be ill-formed.
-    override def mapOver(tree: Tree) = tree match {
-      case Ident(_) if tree.tpe.isStable => tree
-      case _                             => super.mapOver(tree)
-    }
+    val result = loop(sym.tpeHK)
+    assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
+    result
   }
 
-  def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
+  class MissingAliasControl extends ControlThrowable
+  val missingAliasException = new MissingAliasControl
+  class MissingTypeControl extends ControlThrowable
 
-  /** Might the given symbol be important when calculating the prefix
-   *  of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
-   *  the result will be `tp` unchanged if `pre` is trivial and `clazz`
-   *  is a symbol such that isPossiblePrefix(clazz) == false.
-   */
-  def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+// Helper Methods  -------------------------------------------------------------
 
-  private def skipPrefixOf(pre: Type, clazz: Symbol) = (
-    (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+  /** The maximum allowable depth of lubs or glbs over types `ts`.
+    */
+  def lubDepth(ts: List[Type]): Depth = {
+    val td = typeDepth(ts)
+    val bd = baseTypeSeqDepth(ts)
+    lubDepthAdjust(td, td max bd)
+  }
+
+  /** The maximum allowable depth of lubs or glbs over given types,
+   *  as a function over the maximum depth `td` of these types, and
+   *  the maximum depth `bd` of all types in the base type sequences of these types.
+   */
+  private def lubDepthAdjust(td: Depth, bd: Depth): Depth = (
+    if (settings.XfullLubs) bd
+    else if (bd <= Depth(3)) bd
+    else if (bd <= Depth(5)) td max bd.decr
+    else if (bd <= Depth(7)) td max (bd decr 2)
+    else td.decr max (bd decr 3)
   )
 
-  /** A map to compute the asSeenFrom method  */
-  class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
-    var capturedSkolems: List[Symbol] = List()
-    var capturedParams: List[Symbol] = List()
-
-    override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
-      object annotationArgRewriter extends TypeMapTransformer {
-        private def canRewriteThis(sym: Symbol) = (
-             (sym isNonBottomSubClass clazz)
-          && (pre.widen.typeSymbol isNonBottomSubClass sym)
-          && (pre.isStable || giveup())
+  private def symTypeDepth(syms: List[Symbol]): Depth  = typeDepth(syms map (_.info))
+  private def typeDepth(tps: List[Type]): Depth        = maxDepth(tps)
+  private def baseTypeSeqDepth(tps: List[Type]): Depth = maxbaseTypeSeqDepth(tps)
+
+  /** Is intersection of given types populated? That is,
+   *  for all types tp1, tp2 in intersection
+   *    for all common base classes bc of tp1 and tp2
+   *      let bt1, bt2 be the base types of tp1, tp2 relative to class bc
+   *      Then:
+   *        bt1 and bt2 have the same prefix, and
+   *        any corresponding non-variant type arguments of bt1 and bt2 are the same
+   */
+  def isPopulated(tp1: Type, tp2: Type): Boolean = {
+    def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
+      case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
+        assert(sym1 == sym2, (sym1, sym2))
+        (    pre1 =:= pre2
+          && forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+               // if left-hand argument is a typevar, make it compatible with variance
+               // this is for more precise pattern matching
+               // todo: work this in the spec of this method
+               // also: think what happens if there are embedded typevars?
+               if (tparam.variance.isInvariant)
+                 arg1 =:= arg2
+               else !arg1.isInstanceOf[TypeVar] || {
+                 if (tparam.variance.isContravariant) arg1 <:< arg2
+                 else arg2 <:< arg1
+               }
+             }
         )
-        // what symbol should really be used?
-        private def newTermSym() = {
-          val p = pre.typeSymbol
-          p.owner.newValue(p.name.toTermName, p.pos) setInfo pre
-        }
-        /** Rewrite `This` trees in annotation argument trees */
-        override def transform(tree: Tree): Tree = super.transform(tree) match {
-          case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym())
-          case tree                                   => tree
-        }
-      }
-      annotationArgRewriter.transform(tree)
+      case (et: ExistentialType, _) =>
+        et.withTypeVars(isConsistent(_, tp2))
+      case (_, et: ExistentialType) =>
+        et.withTypeVars(isConsistent(tp1, _))
     }
 
-    def stabilize(pre: Type, clazz: Symbol): Type = {
-      capturedParams find (_.owner == clazz) match {
-        case Some(qvar) => qvar.tpe
-        case _          =>
-          val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
-          capturedParams ::= qvar
-          qvar.tpe
-      }
-    }
+    def check(tp1: Type, tp2: Type) = (
+      if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
+        tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
+      else tp1.baseClasses forall (bc =>
+        tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
+    )
 
-    def apply(tp: Type): Type =
-      tp match {
-        case ThisType(sym) =>
-          def toPrefix(pre: Type, clazz: Symbol): Type =
-            if (skipPrefixOf(pre, clazz)) tp
-            else if ((sym isNonBottomSubClass clazz) &&
-                     (pre.widen.typeSymbol isNonBottomSubClass sym)) {
-              val pre1 = pre match {
-                case SuperType(thistp, _) => thistp
-                case _ => pre
-              }
-              if (!(pre1.isStable ||
-                    pre1.typeSymbol.isPackageClass ||
-                    pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) {
-                stabilize(pre1, sym)
-              } else {
-                pre1
-              }
-            } else {
-              toPrefix(pre.baseType(clazz).prefix, clazz.owner)
-            }
-          toPrefix(pre, clazz)
-        case SingleType(pre, sym) =>
-          if (sym.isPackageClass) tp // short path
-          else {
-            val pre1 = this(pre)
-            if (pre1 eq pre) tp
-            else if (pre1.isStable) singleType(pre1, sym)
-            else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
-          }
-        // AM: Martin, is this description accurate?
-        // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain)
-        // once we're at the owner, extract the information that pre encodes about the type param,
-        // by minimally subsuming pre to the type instance of the class that owns the type param,
-        // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter
-        // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen
-        // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion)
-        // (skolems also aren't affected: they are ruled out by the isTypeParameter check)
-        case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
-          def toInstance(pre: Type, clazz: Symbol): Type =
-            if (skipPrefixOf(pre, clazz)) mapOver(tp)
-            //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
-            else {
-              def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
-
-              val symclazz = sym.owner
-              if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
-                // have to deconst because it may be a Class[T].
-                pre.baseType(symclazz).deconst match {
-                  case TypeRef(_, basesym, baseargs) =>
-
-                   def instParam(ps: List[Symbol], as: List[Type]): Type =
-                      if (ps.isEmpty) {
-                        if (forInteractive) {
-                          val saved = settings.uniqid.value
-                          try {
-                            settings.uniqid.value = true
-                            println("*** stale type parameter: " + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
-                            println("*** confused with params: " + sym + " in " + sym.owner + " not in " + ps + " of " + basesym)
-                            println("*** stacktrace = ")
-                            new Error().printStackTrace()
-                          } finally settings.uniqid.value = saved
-                          instParamRelaxed(basesym.typeParams, baseargs)
-                        } else throwError
-                      } else if (sym eq ps.head)
-                        // @M! don't just replace the whole thing, might be followed by type application
-                        appliedType(as.head, args mapConserve (this)) // @M: was as.head
-                      else instParam(ps.tail, as.tail)
-
-                    /** Relaxed version of instParams which matches on names not symbols.
-                     *  This is a last fallback in interactive mode because races in calls
-                     *  from the IDE to the compiler may in rare cases lead to symbols referring
-                     *  to type parameters that are no longer current.
-                     */
-                    def instParamRelaxed(ps: List[Symbol], as: List[Type]): Type =
-                      if (ps.isEmpty) throwError
-                      else if (sym.name == ps.head.name)
-                        // @M! don't just replace the whole thing, might be followed by type application
-                        appliedType(as.head, args mapConserve (this)) // @M: was as.head
-                      else instParamRelaxed(ps.tail, as.tail)
-
-                    //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
-                    if (sameLength(basesym.typeParams, baseargs))
-                      instParam(basesym.typeParams, baseargs)
-                    else
-                      if (symclazz.tpe.parents exists typeIsErroneous)
-                        ErrorType // don't be to overzealous with throwing exceptions, see #2641
-                      else
-                        throw new Error(
-                          "something is wrong (wrong class file?): "+basesym+
-                          " with type parameters "+
-                          basesym.typeParams.map(_.name).mkString("[",",","]")+
-                          " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
-                  case ExistentialType(tparams, qtpe) =>
-                    capturedSkolems = capturedSkolems union tparams
-                    toInstance(qtpe, clazz)
-                  case t =>
-                    throwError
-                }
-              } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
-            }
-          toInstance(pre, clazz)
-        case _ =>
-          mapOver(tp)
-      }
-  }
-
-  /** A base class to compute all substitutions */
-  abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
-    assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
-
-    /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
-    protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
-
-    /** Map target to type, can be tuned by subclasses */
-    protected def toType(fromtp: Type, tp: T): Type
-
-    protected def renameBoundSyms(tp: Type): Type = tp match {
-      case MethodType(ps, restp) =>
-        createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
-      case PolyType(bs, restp) =>
-        createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
-      case ExistentialType(bs, restp) =>
-        createFromClonedSymbols(bs, restp)(newExistentialType)
-      case _ =>
-        tp
-    }
-
-    def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
-      @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
-        if (from.isEmpty) tp
-        // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
-        else if (matches(from.head, sym)) toType(tp, to.head)
-        else subst(tp, sym, from.tail, to.tail)
-
-      val boundSyms = tp0.boundSyms
-      val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
-      val tp = mapOver(tp1)
-
-      tp match {
-        // @M
-        // 1) arguments must also be substituted (even when the "head" of the
-        // applied type has already been substituted)
-        // example: (subst RBound[RT] from [type RT,type RBound] to
-        // [type RT&,type RBound&]) = RBound&[RT&]
-        // 2) avoid loops (which occur because alpha-conversion is
-        // not performed properly imo)
-        // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
-        // we must replace the a in Iterable[a] by (a,b)
-        // (must not recurse --> loops)
-        // 3) replacing m by List in m[Int] should yield List[Int], not just List
-        case TypeRef(NoPrefix, sym, args) =>
-          appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity
-        case SingleType(NoPrefix, sym) =>
-          subst(tp, sym, from, to)
-        case _ =>
-          tp
-      }
-    }
-  }
-
-  /** A map to implement the `substSym` method. */
-  class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
-    def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
-
-    protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
-      case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
-      case SingleType(pre, _) => singleType(pre, sym)
-    }
-    override def apply(tp: Type): Type = if (from.isEmpty) tp else {
-      @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
-        if (from.isEmpty) sym
-        // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
-        else if (matches(from.head, sym)) to.head
-        else subst(sym, from.tail, to.tail)
-      tp match {
-        case TypeRef(pre, sym, args) if pre ne NoPrefix =>
-          val newSym = subst(sym, from, to)
-          // mapOver takes care of subst'ing in args
-          mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
-          // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
-        case SingleType(pre, sym) if pre ne NoPrefix =>
-          val newSym = subst(sym, from, to)
-          mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
-        case _ =>
-          super.apply(tp)
-      }
-    }
-
-    object mapTreeSymbols extends TypeMapTransformer {
-      val strictCopy = newStrictTreeCopier
-
-      def termMapsTo(sym: Symbol) = from indexOf sym match {
-        case -1   => None
-        case idx  => Some(to(idx))
-      }
-
-      // if tree.symbol is mapped to another symbol, passes the new symbol into the
-      // constructor `trans` and sets the symbol and the type on the resulting tree.
-      def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
-        case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
-        case None => tree
-      }
-
-      // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
-      override def transform(tree: Tree) = {
-        // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
-        super.transform(tree) match {
-          case id @ Ident(_) =>
-            transformIfMapped(id)(toSym =>
-              strictCopy.Ident(id, toSym.name))
-
-          case sel @ Select(qual, name) =>
-            transformIfMapped(sel)(toSym =>
-              strictCopy.Select(sel, qual, toSym.name))
-
-          case tree => tree
-        }
-      }
-    }
-    override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
-      mapTreeSymbols.transform(tree)
-    }
-  }
-
-  /** A map to implement the `subst` method. */
-  class SubstTypeMap(from: List[Symbol], to: List[Type])
-  extends SubstMap(from, to) {
-    protected def toType(fromtp: Type, tp: Type) = tp
-
-    override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
-      object trans extends TypeMapTransformer {
-        override def transform(tree: Tree) = tree match {
-          case Ident(name) =>
-            from indexOf tree.symbol match {
-              case -1   => super.transform(tree)
-              case idx  =>
-                val totpe = to(idx)
-                if (totpe.isStable) tree.duplicate setType totpe
-                else giveup()
-            }
-          case _ =>
-            super.transform(tree)
-        }
-      }
-      trans.transform(tree)
-    }
-  }
-
-  /** A map to implement the `substThis` method. */
-  class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case ThisType(sym) if (sym == from) => to
-      case _ => mapOver(tp)
-    }
-  }
-
-  class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
-    def apply(tp: Type): Type = try {
-      tp match {
-        case TypeRef(_, sym, _) if from contains sym =>
-          BoundedWildcardType(sym.info.bounds)
-        case _ =>
-          mapOver(tp)
-      }
-    } catch {
-      case ex: MalformedType =>
-        WildcardType
-    }
-  }
-
-// dependent method types
-  object IsDependentCollector extends TypeCollector(false) {
-    def traverse(tp: Type) {
-      if (tp.isImmediatelyDependent) result = true
-      else if (!result) mapOver(tp.dealias)
-    }
-  }
-
-  object ApproximateDependentMap extends TypeMap {
-    def apply(tp: Type): Type =
-      if (tp.isImmediatelyDependent) WildcardType
-      else mapOver(tp)
-  }
-
-  /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
-   */
-  class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
-    private val actuals      = actuals0.toIndexedSeq
-    private val existentials = new Array[Symbol](actuals.size)
-    def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
-
-    private object StableArg {
-      def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
-        tp.isStable && (tp.typeSymbol != NothingClass)
-      )
-    }
-    private object Arg {
-      def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
-    }
-
-    def apply(tp: Type): Type = mapOver(tp) match {
-      // unsound to replace args by unstable actual #3873
-      case SingleType(NoPrefix, StableArg(arg)) => arg
-      // (soundly) expand type alias selections on implicit arguments,
-      // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
-      case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
-        val arg = actuals(pid)
-        val res = typeRef(arg, sym, targs)
-        if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
-      // don't return the original `tp`, which may be different from `tp1`,
-      // due to dropping annotations
-      case tp1 => tp1
-    }
-
-    /* Return the type symbol for referencing a parameter inside the existential quantifier.
-     * (Only needed if the actual is unstable.)
-     */
-    private def existentialFor(pid: Int) = {
-      if (existentials(pid) eq null) {
-        val param = params(pid)
-        existentials(pid) = (
-          param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
-            setInfo singletonBounds(actuals(pid))
-        )
-      }
-      existentials(pid)
-    }
-
-    //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
-    override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
-      // TODO: this should be simplified; in the stable case, one can
-      // probably just use an Ident to the tree.symbol.
-      //
-      // @PP: That leads to failure here, where stuff no longer has type
-      // 'String @Annot("stuff")' but 'String @Annot(x)'.
-      //
-      //   def m(x: String): String @Annot(x) = x
-      //   val stuff = m("stuff")
-      //
-      // (TODO cont.) Why an existential in the non-stable case?
-      //
-      // @PP: In the following:
-      //
-      //   def m = { val x = "three" ; val y: String @Annot(x) = x; y }
-      //
-      // m is typed as 'String @Annot(x) forSome { val x: String }'.
-      //
-      // Both examples are from run/constrained-types.scala.
-      object treeTrans extends Transformer {
-        override def transform(tree: Tree): Tree = tree.symbol match {
-          case StableArg(actual) =>
-            gen.mkAttributedQualifier(actual, tree.symbol)
-          case Arg(pid) =>
-            val sym = existentialFor(pid)
-            Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
-          case _ =>
-            super.transform(tree)
-        }
-      }
-      treeTrans transform arg
-    }
-  }
-
-  object StripAnnotationsMap extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case AnnotatedType(_, atp, _) =>
-        mapOver(atp)
-      case tp =>
-        mapOver(tp)
-    }
-  }
-
-  /** A map to convert every occurrence of a wildcard type to a fresh
-   *  type variable */
-  object wildcardToTypeVarMap extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case WildcardType =>
-        TypeVar(tp, new TypeConstraint)
-      case BoundedWildcardType(bounds) =>
-        TypeVar(tp, new TypeConstraint(bounds))
-      case _ =>
-        mapOver(tp)
-    }
-  }
-
-  /** A map to convert every occurrence of a type variable to a wildcard type. */
-  object typeVarToOriginMap extends TypeMap {
-    def apply(tp: Type): Type = tp match {
-      case TypeVar(origin, _) => origin
-      case _ => mapOver(tp)
-    }
-  }
-
-  /** A map to implement the `contains` method. */
-  class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
-    def traverse(tp: Type) {
-      if (!result) {
-        tp.normalize match {
-          case TypeRef(_, sym1, _) if (sym == sym1) => result = true
-          case SingleType(_, sym1) if (sym == sym1) => result = true
-          case _ => mapOver(tp)
-        }
-      }
-    }
-
-    override def mapOver(arg: Tree) = {
-      for (t <- arg) {
-        traverse(t.tpe)
-        if (t.symbol == sym)
-          result = true
-      }
-      arg
-    }
-  }
-
-  /** A map to implement the `contains` method. */
-  class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
-    def traverse(tp: Type) {
-      if (!result) {
-        if (tp eq t) result = true
-        else mapOver(tp)
-      }
-    }
-    override def mapOver(arg: Tree) = {
-      for (t <- arg)
-        traverse(t.tpe)
-
-      arg
-    }
-  }
-
-  /** A map to implement the `filter` method. */
-  class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
-    def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp))
-
-    override def collect(tp: Type) = super.collect(tp).reverse
-
-    def traverse(tp: Type) {
-      if (p(tp)) result ::= tp
-      mapOver(tp)
-    }
-  }
-
-  /** A map to implement the `collect` method. */
-  class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
-    override def collect(tp: Type) = super.collect(tp).reverse
-
-    def traverse(tp: Type) {
-      if (pf.isDefinedAt(tp)) result ::= pf(tp)
-      mapOver(tp)
-    }
-  }
-
-  class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
-    def traverse(tp: Type) {
-      f(tp)
-      mapOver(tp)
-    }
-  }
-
-  /** A map to implement the `filter` method. */
-  class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
-    def traverse(tp: Type) {
-      if (result.isEmpty) {
-        if (p(tp)) result = Some(tp)
-        mapOver(tp)
-      }
-    }
-  }
-
-  /** A map to implement the `contains` method. */
-  object ErroneousCollector extends TypeCollector(false) {
-    def traverse(tp: Type) {
-      if (!result) {
-        result = tp.isError
-        mapOver(tp)
-      }
-    }
-  }
-
-  /**
-   * A more persistent version of `Type#memberType` which does not require
-   * that the symbol is a direct member of the prefix.
-   *
-   * For instance:
-   *
-   * {{{
-   * class C[T] {
-   *   sealed trait F[A]
-   *   object X {
-   *     object S1 extends F[T]
-   *   }
-   *   class S2 extends F[T]
-   * }
-   * object O extends C[Int] {
-   *   def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
-   * }
-   * class S3 extends O.F[String]
-   *
-   * nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
-   * nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
-   * nestedMemberType(<S3>, <O.type>, <C>) = S3.type
-   * }}}
-   *
-   * @param sym    The symbol of the subtype
-   * @param pre    The prefix from which the symbol is seen
-   * @param owner
-   */
-  def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
-    def loop(tp: Type): Type =
-      if (tp.isTrivial) tp
-      else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
-        val widened = tp match {
-          case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
-          case _               => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
-        }
-        val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
-        if (tp eq widened) memType else memType.narrow
-      }
-      else loop(tp.prefix) memberType tp.typeSymbol
-
-    val result = loop(sym.tpeHK)
-    assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
-    result
-  }
-
-  /** The most deeply nested owner that contains all the symbols
-   *  of thistype or prefixless typerefs/singletype occurrences in given type.
-   */
-  private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
-
-  /** The most deeply nested owner that contains all the symbols
-   *  of thistype or prefixless typerefs/singletype occurrences in given list
-   *  of types.
-   */
-  private def commonOwner(tps: List[Type]): Symbol = {
-    if (tps.isEmpty) NoSymbol
-    else {
-      commonOwnerMap.clear()
-      tps foreach (commonOwnerMap traverse _)
-      if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
-    }
-  }
-
-  protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
-  protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
-    var result: Symbol = _
-
-    def clear() { result = null }
-
-    private def register(sym: Symbol) {
-      // First considered type is the trivial result.
-      if ((result eq null) || (sym eq NoSymbol))
-        result = sym
-      else
-        while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
-          result = result.owner
-    }
-    def traverse(tp: Type) = tp.normalize match {
-      case ThisType(sym)                => register(sym)
-      case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
-      case SingleType(NoPrefix, sym)    => register(sym.owner)
-      case _                            => mapOver(tp)
-    }
-  }
-
-  private lazy val commonOwnerMapObj = new CommonOwnerMap
-
-  class MissingAliasControl extends ControlThrowable
-  val missingAliasException = new MissingAliasControl
-  class MissingTypeControl extends ControlThrowable
-
-  object adaptToNewRunMap extends TypeMap {
-
-    private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
-      if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
-        sym
-      else if (sym.isModuleClass) {
-        val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
-
-        sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
-          val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
-          debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
-          sym
-        }
-      }
-      else {
-        var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse {
-          if (sym.isAliasType) throw missingAliasException
-          debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase)
-          throw new MissingTypeControl // For build manager and presentation compiler purposes
-        }
-        /** The two symbols have the same fully qualified name */
-        def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
-          sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
-        if (!corresponds(sym.owner, rebind0.owner)) {
-          debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
-          val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
-          if (bcs.isEmpty)
-            assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
-          else
-            rebind0 = pre.baseType(bcs.head).member(sym.name)
-          debuglog(
-            "ADAPT2 pre = " + pre +
-            ", bcs.head = " + bcs.head +
-            ", sym = " + sym.fullLocationString +
-            ", rebind = " + rebind0.fullLocationString
-          )
-        }
-        rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
-          debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
-          throw new MalformedType(pre, sym.nameString)
-        }
-      }
-    }
-    def apply(tp: Type): Type = tp match {
-      case ThisType(sym) =>
-        try {
-          val sym1 = adaptToNewRun(sym.owner.thisType, sym)
-          if (sym1 == sym) tp else ThisType(sym1)
-        } catch {
-          case ex: MissingTypeControl =>
-            tp
-        }
-      case SingleType(pre, sym) =>
-        if (sym.isPackage) tp
-        else {
-          val pre1 = this(pre)
-          try {
-            val sym1 = adaptToNewRun(pre1, sym)
-            if ((pre1 eq pre) && (sym1 eq sym)) tp
-            else singleType(pre1, sym1)
-          } catch {
-            case _: MissingTypeControl =>
-              tp
-          }
-        }
-      case TypeRef(pre, sym, args) =>
-        if (sym.isPackageClass) tp
-        else {
-          val pre1 = this(pre)
-          val args1 = args mapConserve (this)
-          try {
-            val sym1 = adaptToNewRun(pre1, sym)
-            if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
-              tp
-            } else if (sym1 == NoSymbol) {
-              debugwarn("adapt fail: "+pre+" "+pre1+" "+sym)
-              tp
-            } else {
-              copyTypeRef(tp, pre1, sym1, args1)
-            }
-          } catch {
-            case ex: MissingAliasControl =>
-              apply(tp.dealias)
-            case _: MissingTypeControl =>
-              tp
-          }
-        }
-      case MethodType(params, restp) =>
-        val restp1 = this(restp)
-        if (restp1 eq restp) tp
-        else copyMethodType(tp, params, restp1)
-      case NullaryMethodType(restp) =>
-        val restp1 = this(restp)
-        if (restp1 eq restp) tp
-        else NullaryMethodType(restp1)
-      case PolyType(tparams, restp) =>
-        val restp1 = this(restp)
-        if (restp1 eq restp) tp
-        else PolyType(tparams, restp1)
-
-      // Lukas: we need to check (together) whether we should also include parameter types
-      // of PolyType and MethodType in adaptToNewRun
-
-      case ClassInfoType(parents, decls, clazz) =>
-        if (clazz.isPackageClass) tp
-        else {
-          val parents1 = parents mapConserve (this)
-          if (parents1 eq parents) tp
-          else ClassInfoType(parents1, decls, clazz)
-        }
-      case RefinedType(parents, decls) =>
-        val parents1 = parents mapConserve (this)
-        if (parents1 eq parents) tp
-        else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
-      case SuperType(_, _) => mapOver(tp)
-      case TypeBounds(_, _) => mapOver(tp)
-      case TypeVar(_, _) => mapOver(tp)
-      case AnnotatedType(_,_,_) => mapOver(tp)
-      case NotNullType(_) => mapOver(tp)
-      case ExistentialType(_, _) => mapOver(tp)
-      case _ => tp
-    }
-  }
-
-  final case class SubTypePair(tp1: Type, tp2: Type) {
-    // SI-8146 we used to implement equality here in terms of pairwise =:=.
-    //         But, this was inconsistent with hashCode, which was based on the
-    //         Type#hashCode, based on the structure of types, not the meaning.
-    //         Now, we use `Type#{equals,hashCode}` as the (consistent) basis for
-    //         detecting cycles (aka keeping subtyping decidable.)
-    //
-    //         I added tests to show that we detect the cycle: neg/t8146-no-finitary*
-
-    override def toString = tp1+" <:<? "+tp2
-  }
-
-// Helper Methods  -------------------------------------------------------------
-
-  /** The maximum allowable depth of lubs or glbs over types `ts`.
-    */
-  def lubDepth(ts: List[Type]): Int = {
-    val td = typeDepth(ts)
-    val bd = baseTypeSeqDepth(ts)
-    lubDepthAdjust(td, td max bd)
-  }
-
-  /** The maximum allowable depth of lubs or glbs over given types,
-   *  as a function over the maximum depth `td` of these types, and
-   *  the maximum depth `bd` of all types in the base type sequences of these types.
-   */
-  private def lubDepthAdjust(td: Int, bd: Int): Int =
-    if (settings.XfullLubs.value) bd
-    else if (bd <= 3) bd
-    else if (bd <= 5) td max (bd - 1)
-    else if (bd <= 7) td max (bd - 2)
-    else (td - 1) max (bd - 3)
-
-  /** The maximum depth of type `tp` */
-  def typeDepth(tp: Type): Int = tp match {
-    case TypeRef(pre, sym, args) =>
-      typeDepth(pre) max typeDepth(args) + 1
-    case RefinedType(parents, decls) =>
-      typeDepth(parents) max typeDepth(decls.toList.map(_.info)) + 1
-    case TypeBounds(lo, hi) =>
-      typeDepth(lo) max typeDepth(hi)
-    case MethodType(paramtypes, result) =>
-      typeDepth(result)
-    case NullaryMethodType(result) =>
-      typeDepth(result)
-    case PolyType(tparams, result) =>
-      typeDepth(result) max typeDepth(tparams map (_.info)) + 1
-    case ExistentialType(tparams, result) =>
-      typeDepth(result) max typeDepth(tparams map (_.info)) + 1
-    case _ =>
-      1
-  }
-
-  private def maxDepth(tps: List[Type], by: Type => Int): Int = {
-    //OPT replaced with tailrecursive function to save on #closures
-    // was:
-    //    var d = 0
-    //    for (tp <- tps) d = d max by(tp) //!!!OPT!!!
-    //    d
-    def loop(tps: List[Type], acc: Int): Int = tps match {
-      case tp :: rest => loop(rest, acc max by(tp))
-      case _ => acc
-    }
-    loop(tps, 0)
-  }
-
-  private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth)
-  private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
-
-  /** Is intersection of given types populated? That is,
-   *  for all types tp1, tp2 in intersection
-   *    for all common base classes bc of tp1 and tp2
-   *      let bt1, bt2 be the base types of tp1, tp2 relative to class bc
-   *      Then:
-   *        bt1 and bt2 have the same prefix, and
-   *        any corresponding non-variant type arguments of bt1 and bt2 are the same
-   */
-  def isPopulated(tp1: Type, tp2: Type): Boolean = {
-    def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
-      case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
-        assert(sym1 == sym2)
-        pre1 =:= pre2 &&
-        forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
-            //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
-          if (tparam.variance == 0) arg1 =:= arg2
-          else if (arg1.isInstanceOf[TypeVar])
-            // if left-hand argument is a typevar, make it compatible with variance
-            // this is for more precise pattern matching
-            // todo: work this in the spec of this method
-            // also: think what happens if there are embedded typevars?
-            if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
-          else true
-        }
-      case (et: ExistentialType, _) =>
-        et.withTypeVars(isConsistent(_, tp2))
-      case (_, et: ExistentialType) =>
-        et.withTypeVars(isConsistent(tp1, _))
-    }
-
-    def check(tp1: Type, tp2: Type) =
-      if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
-        tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
-      else tp1.baseClasses forall (bc =>
-        tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
-
-    check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
+    check(tp1, tp2) && check(tp2, tp1)
   }
 
   /** Does a pattern of type `patType` need an outer test when executed against
@@ -5378,85 +3926,14 @@ trait Types extends api.Types { self: SymbolTable =>
     }
   }
 
-  private var subsametypeRecursions: Int = 0
-
-  private def isUnifiable(pre1: Type, pre2: Type) =
-    (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
-
-  /** Returns true iff we are past phase specialize,
-   *  sym1 and sym2 are two existential skolems with equal names and bounds,
-   *  and pre1 and pre2 are equal prefixes
-   */
-  private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
-    sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
-    sym1.name == sym2.name &&
-    phase.specialized &&
-    sym1.info =:= sym2.info &&
-    pre1 =:= pre2
-  }
-
-  private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
-    if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
-      if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
-      true
-    } else
-      false
-
-  private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
-    if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
-    else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
-
-  /** Do `tp1` and `tp2` denote equivalent types? */
-  def isSameType(tp1: Type, tp2: Type): Boolean = try {
-    if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
-    subsametypeRecursions += 1
-    //OPT cutdown on Function0 allocation
-    //was:
-//    undoLog undoUnless {
-//      isSameType1(tp1, tp2)
-//    }
-
-    undoLog.lock()
-    try {
-      val before = undoLog.log
-      var result = false
-
-      try result = {
-        isSameType1(tp1, tp2)
-      } finally if (!result) undoLog.undoTo(before)
-      result
-    } finally undoLog.unlock()
-  } finally {
-    subsametypeRecursions -= 1
-    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
-    // it doesn't help to keep separate recursion counts for the three methods that now share it
-    // if (subsametypeRecursions == 0) undoLog.clear()
-  }
-
-  def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
-    subsametypeRecursions += 1
-    undoLog undo { // undo type constraints that arise from operations in this block
-      !isSameType1(tp1, tp2)
-    }
-  } finally {
-    subsametypeRecursions -= 1
-    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
-    // it doesn't help to keep separate recursion counts for the three methods that now share it
-    // if (subsametypeRecursions == 0) undoLog.clear()
-  }
-
-  def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
-    case TypeRef(pre1, sym1, _) =>
-      tp2 match {
-        case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
-        case _ => true
-      }
-    case _ => true
-  }
-
-  def normalizePlus(tp: Type) =
+  def normalizePlus(tp: Type) = (
     if (isRawType(tp)) rawToExistential(tp)
-    else tp.normalize
+    else tp.normalize match {
+      // Unify the two representations of module classes
+      case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize
+      case _                                       => tp.normalize
+    }
+  )
 
   /*
   todo: change to:
@@ -5468,288 +3945,7 @@ trait Types extends api.Types { self: SymbolTable =>
     case _ => tp.normalize
   }
   */
-/*
-  private def isSameType0(tp1: Type, tp2: Type): Boolean = {
-    if (tp1 eq tp2) return true
-    ((tp1, tp2) match {
-      case (ErrorType, _) => true
-      case (WildcardType, _) => true
-      case (_, ErrorType) => true
-      case (_, WildcardType) => true
-
-      case (NoType, _) => false
-      case (NoPrefix, _) => tp2.typeSymbol.isPackageClass
-      case (_, NoType) => false
-      case (_, NoPrefix) => tp1.typeSymbol.isPackageClass
-
-      case (ThisType(sym1), ThisType(sym2))
-      if (sym1 == sym2) =>
-        true
-      case (SingleType(pre1, sym1), SingleType(pre2, sym2))
-      if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
-        true
-/*
-      case (SingleType(pre1, sym1), ThisType(sym2))
-      if (sym1.isModule &&
-          sym1.moduleClass == sym2 &&
-          pre1 =:= sym2.owner.thisType) =>
-        true
-      case (ThisType(sym1), SingleType(pre2, sym2))
-      if (sym2.isModule &&
-          sym2.moduleClass == sym1 &&
-          pre2 =:= sym1.owner.thisType) =>
-        true
-*/
-      case (ConstantType(value1), ConstantType(value2)) =>
-        value1 == value2
-      case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
-        equalSymsAndPrefixes(sym1, pre1, sym2, pre2) &&
-        ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
-         isSameTypes(args1, args2))
-         // @M! normalize reduces higher-kinded case to PolyType's
-      case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
-        def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
-          sym2 =>
-            var e1 = s1.lookupEntry(sym2.name)
-            (e1 ne null) && {
-              val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
-              var isEqual = false
-              while (!isEqual && (e1 ne null)) {
-                isEqual = e1.sym.info =:= substSym
-                e1 = s1.lookupNextEntry(e1)
-              }
-              isEqual
-            }
-        }
-        //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
-        isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
-      case (MethodType(params1, res1), MethodType(params2, res2)) =>
-        // new dependent types: probably fix this, use substSym as done for PolyType
-        (isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
-         res1 =:= res2 &&
-         tp1.isImplicit == tp2.isImplicit)
-      case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
-        // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
-        (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
-          res1 =:= res2.substSym(tparams2, tparams1)
-      case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
-        (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
-          res1 =:= res2.substSym(tparams2, tparams1)
-      case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
-        lo1 =:= lo2 && hi1 =:= hi2
-      case (BoundedWildcardType(bounds), _) =>
-        bounds containsType tp2
-      case (_, BoundedWildcardType(bounds)) =>
-        bounds containsType tp1
-      case (tv @ TypeVar(_,_), tp) =>
-        tv.registerTypeEquality(tp, true)
-      case (tp, tv @ TypeVar(_,_)) =>
-        tv.registerTypeEquality(tp, false)
-      case (AnnotatedType(_,_,_), _) =>
-        annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-      case (_, AnnotatedType(_,_,_)) =>
-        annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-      case (_: SingletonType, _: SingletonType) =>
-        var origin1 = tp1
-        while (origin1.underlying.isInstanceOf[SingletonType]) {
-          assert(origin1 ne origin1.underlying, origin1)
-          origin1 = origin1.underlying
-        }
-        var origin2 = tp2
-        while (origin2.underlying.isInstanceOf[SingletonType]) {
-          assert(origin2 ne origin2.underlying, origin2)
-          origin2 = origin2.underlying
-        }
-        ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
-      case _ =>
-        false
-    }) || {
-      val tp1n = normalizePlus(tp1)
-      val tp2n = normalizePlus(tp2)
-      ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
-    }
-  }
-*/
-  private def isSameType1(tp1: Type, tp2: Type): Boolean = {
-    if ((tp1 eq tp2) ||
-        (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
-        (tp2 eq ErrorType) || (tp2 eq WildcardType))
-      true
-    else if ((tp1 eq NoType) || (tp2 eq NoType))
-      false
-    else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
-      tp2.typeSymbol.isPackageClass
-    else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
-      tp1.typeSymbol.isPackageClass
-    else {
-      isSameType2(tp1, tp2) || {
-        val tp1n = normalizePlus(tp1)
-        val tp2n = normalizePlus(tp2)
-        ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
-      }
-    }
-  }
 
-  def isSameType2(tp1: Type, tp2: Type): Boolean = {
-    tp1 match {
-      case tr1: TypeRef =>
-        tp2 match {
-          case tr2: TypeRef =>
-            return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
-              ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
-               isSameTypes(tr1.args, tr2.args))) ||
-               ((tr1.pre, tr2.pre) match {
-                 case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
-                 case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
-                 case _ => false
-               })
-          case _: SingleType =>
-            return isSameType2(tp2, tp1)  // put singleton type on the left, caught below
-          case _ =>
-        }
-      case tt1: ThisType =>
-        tp2 match {
-          case tt2: ThisType =>
-            if (tt1.sym == tt2.sym) return true
-          case _ =>
-        }
-      case st1: SingleType =>
-        tp2 match {
-          case st2: SingleType =>
-            if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
-          case TypeRef(pre2, sym2, Nil) =>
-            if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
-          case _ =>
-        }
-      case ct1: ConstantType =>
-        tp2 match {
-          case ct2: ConstantType =>
-            return (ct1.value == ct2.value)
-          case _ =>
-        }
-      case rt1: RefinedType =>
-        tp2 match {
-          case rt2: RefinedType => //
-            def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
-              sym2 =>
-                var e1 = s1.lookupEntry(sym2.name)
-                (e1 ne null) && {
-                  val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
-                  var isEqual = false
-                  while (!isEqual && (e1 ne null)) {
-                    isEqual = e1.sym.info =:= substSym
-                    e1 = s1.lookupNextEntry(e1)
-                  }
-                  isEqual
-                }
-            }
-            //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
-            return isSameTypes(rt1.parents, rt2.parents) && {
-              val decls1 = rt1.decls
-              val decls2 = rt2.decls
-              isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
-            }
-          case _ =>
-        }
-      case mt1: MethodType =>
-        tp2 match {
-          case mt2: MethodType =>
-            return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
-              mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
-              mt1.isImplicit == mt2.isImplicit
-          // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
-          case _ =>
-        }
-      case NullaryMethodType(restpe1) =>
-        tp2 match {
-          // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe  =:= mt2.resultType
-          case NullaryMethodType(restpe2) =>
-            return restpe1 =:= restpe2
-          case _ =>
-        }
-      case PolyType(tparams1, res1) =>
-        tp2 match {
-          case PolyType(tparams2, res2) =>
-//            assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
-              // @M looks like it might suffer from same problem as #2210
-              return (
-                (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
-                (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
-                res1 =:= res2.substSym(tparams2, tparams1)
-              )
-          case _ =>
-        }
-      case ExistentialType(tparams1, res1) =>
-        tp2 match {
-          case ExistentialType(tparams2, res2) =>
-            // @M looks like it might suffer from same problem as #2210
-            return (
-              // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
-              sameLength(tparams1, tparams2) &&
-              (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
-              res1 =:= res2.substSym(tparams2, tparams1)
-            )
-          case _ =>
-        }
-      case TypeBounds(lo1, hi1) =>
-        tp2 match {
-          case TypeBounds(lo2, hi2) =>
-            return lo1 =:= lo2 && hi1 =:= hi2
-          case _ =>
-        }
-      case BoundedWildcardType(bounds) =>
-        return bounds containsType tp2
-      case _ =>
-    }
-    tp2 match {
-      case BoundedWildcardType(bounds) =>
-        return bounds containsType tp1
-      case _ =>
-    }
-    tp1 match {
-      case tv @ TypeVar(_,_) =>
-        return tv.registerTypeEquality(tp2, true)
-      case _ =>
-    }
-    tp2 match {
-      case tv @ TypeVar(_,_) =>
-        return tv.registerTypeEquality(tp1, false)
-      case _ =>
-    }
-    tp1 match {
-      case _: AnnotatedType =>
-        return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-      case _ =>
-    }
-    tp2 match {
-      case _: AnnotatedType =>
-        return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-      case _ =>
-    }
-    tp1 match {
-      case _: SingletonType =>
-        tp2 match {
-          case _: SingletonType =>
-            def chaseDealiasedUnderlying(tp: Type): Type = {
-              var origin = tp
-              var next = origin.underlying.dealias
-              while (next.isInstanceOf[SingletonType]) {
-                assert(origin ne next, origin)
-                origin = next
-                next = origin.underlying.dealias
-              }
-              origin
-            }
-            val origin1 = chaseDealiasedUnderlying(tp1)
-            val origin2 = chaseDealiasedUnderlying(tp2)
-            ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
-          case _ =>
-            false
-        }
-      case _ =>
-        false
-    }
-  }
 
   /** Are `tps1` and `tps2` lists of pairwise equivalent types? */
   def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
@@ -5767,103 +3963,49 @@ trait Types extends api.Types { self: SymbolTable =>
    */
   final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
 
-  private val pendingSubTypes = new mutable.HashSet[SubTypePair]
-  private var basetypeRecursions: Int = 0
-  private val pendingBaseTypes = new mutable.HashSet[Type]
-
-  def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
-  def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
-    subsametypeRecursions += 1
-
-    //OPT cutdown on Function0 allocation
-    //was:
-//    undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
-//      if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
-//        val p = new SubTypePair(tp1, tp2)
-//        if (pendingSubTypes(p))
-//          false
-//        else
-//          try {
-//            pendingSubTypes += p
-//            isSubType2(tp1, tp2, depth)
-//          } finally {
-//            pendingSubTypes -= p
-//          }
-//      } else {
-//        isSubType2(tp1, tp2, depth)
-//      }
-//    }
-
-    undoLog.lock()
-    try {
-      val before = undoLog.log
-      var result = false
-
-      try result = { // if subtype test fails, it should not affect constraints on typevars
-        if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
-          val p = new SubTypePair(tp1, tp2)
-          if (pendingSubTypes(p))
-            false // see neg/t8146-no-finitary*
-          else
-            try {
-              pendingSubTypes += p
-              isSubType2(tp1, tp2, depth)
-            } finally {
-              pendingSubTypes -= p
-            }
-        } else {
-          isSubType2(tp1, tp2, depth)
-        }
-      } finally if (!result) undoLog.undoTo(before)
+  private var _basetypeRecursions: Int = 0
+  def basetypeRecursions = _basetypeRecursions
+  def basetypeRecursions_=(value: Int) = _basetypeRecursions = value
 
-      result
-    } finally undoLog.unlock()
-  } finally {
-    subsametypeRecursions -= 1
-    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
-    // it doesn't help to keep separate recursion counts for the three methods that now share it
-    // if (subsametypeRecursions == 0) undoLog.clear()
-  }
+  private val _pendingBaseTypes = new mutable.HashSet[Type]
+  def pendingBaseTypes = _pendingBaseTypes
 
   /** Does this type have a prefix that begins with a type variable,
    *  or is it a refinement type? For type prefixes that fulfil this condition,
    *  type selections with the same name of equal (as determined by `=:=`) prefixes are
    *  considered equal in regard to `=:=`.
    */
-  def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
-    case SingleType(pre, sym) =>
-      !(sym hasFlag PACKAGE) && beginsWithTypeVarOrIsRefined(pre)
-    case tv at TypeVar(_, constr) =>
-      !tv.instValid || beginsWithTypeVarOrIsRefined(constr.inst)
-    case RefinedType(_, _) =>
-      true
-    case _ =>
-      false
-  }
-
-  @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0")
-  def instTypeVar(tp: Type): Type = tp match {
-    case TypeRef(pre, sym, args) =>
-      copyTypeRef(tp, instTypeVar(pre), sym, args)
-    case SingleType(pre, sym) =>
-      singleType(instTypeVar(pre), sym)
-    case TypeVar(_, constr) =>
-      instTypeVar(constr.inst)
-    case _ =>
-      tp
+  def isEligibleForPrefixUnification(tp: Type): Boolean = tp match {
+    case SingleType(pre, sym)  => !(sym hasFlag PACKAGE) && isEligibleForPrefixUnification(pre)
+    case tv at TypeVar(_, constr) => !tv.instValid || isEligibleForPrefixUnification(constr.inst)
+    case RefinedType(_, _)     => true
+    case _                     => false
   }
 
   def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
 
+  /** This appears to be equivalent to tp.isInstanceof[SingletonType],
+   *  except it excludes ConstantTypes.
+   */
   def isSingleType(tp: Type) = tp match {
     case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
-    case _ => false
+    case _                                                => false
   }
 
   def isConstantType(tp: Type) = tp match {
     case ConstantType(_) => true
-    case _ => false
+    case _               => false
+  }
+
+  def isExistentialType(tp: Type): Boolean = tp match {
+    case _: ExistentialType           => true
+    case tp: Type if tp.dealias ne tp => isExistentialType(tp.dealias)
+    case _                            => false
+  }
+
+  def isImplicitMethodType(tp: Type) = tp match {
+    case mt: MethodType => mt.isImplicit
+    case _              => false
   }
 
   /** This is defined and named as it is because the goal is to exclude source
@@ -5891,10 +4033,11 @@ trait Types extends api.Types { self: SymbolTable =>
    *  types which are used internally in type applications and
    *  types which are not.
    */
+  /**** Not used right now, but kept around to document which Types
+   *    land in which bucket.
   private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match {
     case AntiPolyType(pre, targs)            => true
     case ClassInfoType(parents, defs, clazz) => true
-    case DeBruijnIndex(level, index, args)   => true
     case ErasedValueType(tref)               => true
     case NoPrefix                            => true
     case NoType                              => true
@@ -5902,6 +4045,7 @@ trait Types extends api.Types { self: SymbolTable =>
     case TypeBounds(lo, hi)                  => true
     case _                                   => false
   }
+  ****/
   private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match {
     case WildcardType           => true
     case BoundedWildcardType(_) => true
@@ -5929,7 +4073,7 @@ trait Types extends api.Types { self: SymbolTable =>
   private def isValueElseNonValue(tp: Type): Boolean = tp match {
     case tp if isAlwaysValueType(tp)           => true
     case tp if isAlwaysNonValueType(tp)        => false
-    case AnnotatedType(_, underlying, _)       => isValueElseNonValue(underlying)
+    case AnnotatedType(_, underlying)          => isValueElseNonValue(underlying)
     case SingleType(_, sym)                    => sym.isValue           // excludes packages and statics
     case TypeRef(_, _, _) if tp.isHigherKinded => false                 // excludes type constructors
     case ThisType(sym)                         => !sym.isPackageClass   // excludes packages
@@ -5947,7 +4091,7 @@ trait Types extends api.Types { self: SymbolTable =>
    *  useful as documentation; it is likely that !isNonValueType(tp)
    *  will serve better than isValueType(tp).
    */
-  def isValueType(tp: Type) = isValueElseNonValue(tp)
+  /** def isValueType(tp: Type) = isValueElseNonValue(tp) */
 
   /** SLS 3.3, Non-Value Types
    *  Is the given type definitely a non-value type, as defined in SLS 3.3?
@@ -5958,7 +4102,7 @@ trait Types extends api.Types { self: SymbolTable =>
    *  not designated non-value types because there is code which depends on using
    *  them as type arguments, but their precise status is unclear.
    */
-  def isNonValueType(tp: Type) = !isValueElseNonValue(tp)
+  /** def isNonValueType(tp: Type) = !isValueElseNonValue(tp) */
 
   def isNonRefinementClassType(tpe: Type) = tpe match {
     case SingleType(_, sym) => sym.isModuleClass
@@ -5967,294 +4111,70 @@ trait Types extends api.Types { self: SymbolTable =>
     case _                  => false
   }
 
-  // @assume tp1.isHigherKinded || tp2.isHigherKinded
-  def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = (
-    tp1.typeSymbol == NothingClass
-    ||
-    tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type
-    || // @M! normalize reduces higher-kinded case to PolyType's
-    ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match {
-      case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
-        sameLength(tparams1, tparams2) && {
-          if (tparams1.head.owner.isMethod) {  // fast-path: polymorphic method type -- type params cannot be captured
-            (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
-            res1 <:< res2.substSym(tparams2, tparams1)
-          } else { // normalized higher-kinded type
-            //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
-            val tpsFresh = cloneSymbols(tparams1)
-
-            (tparams1 corresponds tparams2)((p1, p2) =>
-              p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
-            res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
-
-            //@M the forall in the previous test could be optimised to the following,
-            // but not worth the extra complexity since it only shaves 1s from quick.comp
-            //   (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) =>
-            //   p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) &&
-            // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
-            // val tpsFresh = tparams1 map (_.cloneSymbol)
-            // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
-        }
-      } && annotationsConform(tp1.normalize, tp2.normalize)
-      case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
-      // --> thus, cannot be subtypes (Any/Nothing has already been checked)
-    }))
-
-  def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
-    def isSubArg(t1: Type, t2: Type, variance: Int) =
-      (variance > 0 || isSubType(t2, t1, depth)) &&
-      (variance < 0 || isSubType(t1, t2, depth))
-    corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
-  }
-
-  def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
-
-  /** Does type `tp1` conform to `tp2`? */
-  private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
-    if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
-    if ((tp1 eq NoType) || (tp2 eq NoType)) return false
-    if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
-    if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
-    if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
-    if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth)
-
-    /** First try, on the right:
-     *   - unwrap Annotated types, BoundedWildcardTypes,
-     *   - bind TypeVars  on the right, if lhs is not Annotated nor BoundedWildcard
-     *   - handle common cases for first-kind TypeRefs on both sides as a fast path.
-     */
-    def firstTry = tp2 match {
-      // fast path: two typerefs, none of them HK
-      case tr2: TypeRef =>
-        tp1 match {
-          case tr1: TypeRef =>
-            val sym1 = tr1.sym
-            val sym2 = tr2.sym
-            val pre1 = tr1.pre
-            val pre2 = tr2.pre
-            (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
-               else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
-                     (isUnifiable(pre1, pre2) ||
-                      isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
-                      sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
-                    isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
-             ||
-             sym2.isClass && {
-               val base = tr1 baseType sym2
-               (base ne tr1) && isSubType(base, tr2, depth)
-             }
-             ||
-             thirdTryRef(tr1, tr2))
-          case _ =>
-            secondTry
-        }
-      case AnnotatedType(_, _, _) =>
-        isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
-        annotationsConform(tp1, tp2)
-      case BoundedWildcardType(bounds) =>
-        isSubType(tp1, bounds.hi, depth)
-      case tv2 @ TypeVar(_, constr2) =>
-        tp1 match {
-          case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
-            secondTry
-          case _ =>
-            tv2.registerBound(tp1, true)
-        }
-      case _ =>
-        secondTry
-    }
-
-    /** Second try, on the left:
-     *   - unwrap AnnotatedTypes, BoundedWildcardTypes,
-     *   - bind typevars,
-     *   - handle existential types by skolemization.
-     */
-    def secondTry = tp1 match {
-      case AnnotatedType(_, _, _) =>
-        isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
-        annotationsConform(tp1, tp2)
-      case BoundedWildcardType(bounds) =>
-        isSubType(tp1.bounds.lo, tp2, depth)
-      case tv @ TypeVar(_,_) =>
-        tv.registerBound(tp2, false)
-      case ExistentialType(_, _) =>
-        try {
-          skolemizationLevel += 1
-          isSubType(tp1.skolemizeExistential, tp2, depth)
-        } finally {
-          skolemizationLevel -= 1
-        }
-      case _ =>
-        thirdTry
-    }
-
-    def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
-      val sym2 = tp2.sym
-      sym2 match {
-        case NotNullClass => tp1.isNotNull
-        case SingletonClass => tp1.isStable || fourthTry
-        case _: ClassSymbol =>
-          if (isRaw(sym2, tp2.args))
-            isSubType(tp1, rawToExistential(tp2), depth)
-          else if (sym2.name == tpnme.REFINE_CLASS_NAME)
-            isSubType(tp1, sym2.info, depth)
-          else
-            fourthTry
-        case _: TypeSymbol =>
-          if (sym2 hasFlag DEFERRED) {
-            val tp2a = tp2.bounds.lo
-            isDifferentTypeConstructor(tp2, tp2a) &&
-            isSubType(tp1, tp2a, depth) ||
-            fourthTry
-          } else {
-            isSubType(tp1.normalize, tp2.normalize, depth)
-          }
-        case _ =>
-          fourthTry
-      }
-    }
+  def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = {
+    def isSubArg(t1: Type, t2: Type, variance: Variance) = (
+         (variance.isContravariant || isSubType(t1, t2, depth))
+      && (variance.isCovariant || isSubType(t2, t1, depth))
+    )
 
-    /** Third try, on the right:
-     *   - decompose refined types.
-     *   - handle typerefs, existentials, and notnull types.
-     *   - handle left+right method types, polytypes, typebounds
-     */
-    def thirdTry = tp2 match {
-      case tr2: TypeRef =>
-        thirdTryRef(tp1, tr2)
-      case rt2: RefinedType =>
-        (rt2.parents forall (isSubType(tp1, _, depth))) &&
-        (rt2.decls forall (specializesSym(tp1, _, depth)))
-      case et2: ExistentialType =>
-        et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
-      case nn2: NotNullType =>
-        tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
-      case mt2: MethodType =>
-        tp1 match {
-          case mt1 @ MethodType(params1, res1) =>
-            val params2 = mt2.params
-            val res2 = mt2.resultType
-            (sameLength(params1, params2) &&
-             mt1.isImplicit == mt2.isImplicit &&
-             matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
-             isSubType(res1.substSym(params1, params2), res2, depth))
-          // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
-          case _ =>
-            false
-        }
-      case pt2 @ NullaryMethodType(_) =>
-        tp1 match {
-          // TODO: consider MethodType mt for which mt.params.isEmpty??
-          case pt1 @ NullaryMethodType(_) =>
-            isSubType(pt1.resultType, pt2.resultType, depth)
-          case _ =>
-            false
-        }
-      case TypeBounds(lo2, hi2) =>
-        tp1 match {
-          case TypeBounds(lo1, hi1) =>
-            isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
-          case _ =>
-            false
-        }
-      case _ =>
-        fourthTry
-    }
+    corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
+  }
 
-    /** Fourth try, on the left:
-     *   - handle typerefs, refined types, notnull and singleton types.
-     */
-    def fourthTry = tp1 match {
-      case tr1 @ TypeRef(pre1, sym1, _) =>
-        sym1 match {
-          case NothingClass => true
-          case NullClass =>
-            tp2 match {
-              case TypeRef(_, sym2, _) =>
-                containsNull(sym2)
-              case _ =>
-                isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
-            }
-          case _: ClassSymbol =>
-            if (isRaw(sym1, tr1.args))
-              isSubType(rawToExistential(tp1), tp2, depth)
-            else if (sym1.isModuleClass) tp2 match {
-              case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
-              case _                      => false
-            }
-            else if (sym1.isRefinementClass)
-              isSubType(sym1.info, tp2, depth)
-            else false
-
-          case _: TypeSymbol =>
-            if (sym1 hasFlag DEFERRED) {
-              val tp1a = tp1.bounds.hi
-              isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
-            } else {
-              isSubType(tp1.normalize, tp2.normalize, depth)
-            }
-          case _ =>
-            false
-        }
-      case RefinedType(parents1, _) =>
-        parents1 exists (isSubType(_, tp2, depth))
-      case _: SingletonType | _: NotNullType =>
-        isSubType(tp1.underlying, tp2, depth)
-      case _ =>
-        false
-    }
+  def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
+    def directlySpecializedBy(member: Symbol): Boolean = (
+         member == sym
+      || specializesSym(tp.narrow, member, sym.owner.thisType, sym, depth)
+    )
+    // Closure reduction, else this would be simply `member exists directlySpecializedBy`
+    def specializedBy(member: Symbol): Boolean = (
+      if (member eq NoSymbol) false
+      else if (member.isOverloaded) member.alternatives exists directlySpecializedBy
+      else directlySpecializedBy(member)
+    )
 
-    firstTry
+    (    (tp.typeSymbol isBottomSubClass sym.owner)
+      || specializedBy(tp nonPrivateMember sym.name)
+    )
   }
 
-  private def containsNull(sym: Symbol): Boolean =
-    sym.isClass && sym != NothingClass &&
-    !(sym isNonBottomSubClass AnyValClass) &&
-    !(sym isNonBottomSubClass NotNullClass)
-
-  /** Are `tps1` and `tps2` lists of equal length such that all elements
-   *  of `tps1` conform to corresponding elements of `tps2`?
+  /** Does member `symLo` of `tpLo` have a stronger type
+   *  than member `symHi` of `tpHi`?
    */
-  def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
+  protected[internal] def specializesSym(preLo: Type, symLo: Symbol, preHi: Type, symHi: Symbol, depth: Depth): Boolean =
+    (symHi.isAliasType || symHi.isTerm || symHi.isAbstractType) && {
+      // only now that we know symHi is a viable candidate ^^^^^^^, do the expensive checks: ----V
+      require((symLo ne NoSymbol) && (symHi ne NoSymbol), ((preLo, symLo, preHi, symHi, depth)))
+
+      val tpHi = preHi.memberInfo(symHi).substThis(preHi.typeSymbol, preLo)
+
+      // Should we use memberType or memberInfo?
+      // memberType transforms (using `asSeenFrom`) `sym.tpe`,
+      // whereas memberInfo performs the same transform on `sym.info`.
+      // For term symbols, this ends up being the same thing (`sym.tpe == sym.info`).
+      // For type symbols, however, the `.info` of an abstract type member
+      // is defined by its bounds, whereas its `.tpe` is a `TypeRef` to that type symbol,
+      // so that `sym.tpe <:< sym.info`, but not the other way around.
+      //
+      // Thus, for the strongest (correct) result,
+      // we should use `memberType` on the low side.
+      //
+      // On the high side, we should use the result appropriate
+      // for the right side of the `<:<` above (`memberInfo`).
+      val tpLo = preLo.memberType(symLo)
 
-  /** Does type `tp` implement symbol `sym` with same or
-   *  stronger type? Exact only if `sym` is a member of some
-   *  refinement type, otherwise we might return false negatives.
-   */
-  def specializesSym(tp: Type, sym: Symbol): Boolean =
-    specializesSym(tp, sym, AnyDepth)
-
-  def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean =
-    tp.typeSymbol == NothingClass ||
-    tp.typeSymbol == NullClass && containsNull(sym.owner) || {
-      def specializedBy(membr: Symbol): Boolean =
-        membr == sym || specializesSym(tp.narrow, membr, sym.owner.thisType, sym, depth)
-      val member = tp.nonPrivateMember(sym.name)
-      if (member eq NoSymbol) false
-      else if (member.isOverloaded) member.alternatives exists specializedBy
-      else specializedBy(member)
-      // was
-      // (tp.nonPrivateMember(sym.name).alternatives exists
-      //   (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym, depth)))
-    }
+      debuglog(s"specializesSymHi: $preHi . $symHi : $tpHi")
+      debuglog(s"specializesSymLo: $preLo . $symLo : $tpLo")
 
-  /** Does member `sym1` of `tp1` have a stronger type
-   *  than member `sym2` of `tp2`?
-   */
-  private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
-    val info1 = tp1.memberInfo(sym1)
-    val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
-    //System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
-    (    sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable)
-      || sym2.isAbstractType && {
-            val memberTp1 = tp1.memberType(sym1)
-            // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
-            info2.bounds.containsType(memberTp1) &&
-            kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
-        }
-      || sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
-    )
-  }
+      if (symHi.isTerm)
+        (isSubType(tpLo, tpHi, depth)        &&
+         (!symHi.isStable || symLo.isStable) &&                                // sub-member must remain stable
+         (!symLo.hasVolatileType || symHi.hasVolatileType || tpHi.isWildcard)) // sub-member must not introduce volatility
+      else if (symHi.isAbstractType)
+        ((tpHi.bounds containsType tpLo) &&
+         kindsConform(symHi :: Nil, tpLo :: Nil, preLo, symLo.owner))
+      else // we know `symHi.isAliasType` (see above)
+        tpLo =:= tpHi
+    }
 
   /** A function implementing `tp1` matches `tp2`. */
   final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
@@ -6265,7 +4185,7 @@ trait Types extends api.Types { self: SymbolTable =>
     def lastTry =
       tp2 match {
         case ExistentialType(_, res2) if alwaysMatchSimple =>
-          matchesType(tp1, res2, true)
+          matchesType(tp1, res2, alwaysMatchSimple = true)
         case MethodType(_, _) =>
           false
         case PolyType(_, _) =>
@@ -6285,7 +4205,7 @@ trait Types extends api.Types { self: SymbolTable =>
             if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
             else matchesType(tp1, res2, alwaysMatchSimple)
           case ExistentialType(_, res2) =>
-            alwaysMatchSimple && matchesType(tp1, res2, true)
+            alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
           case TypeRef(_, sym, Nil) =>
             params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
           case _ =>
@@ -6298,7 +4218,7 @@ trait Types extends api.Types { self: SymbolTable =>
           case NullaryMethodType(res2) =>
             matchesType(res1, res2, alwaysMatchSimple)
           case ExistentialType(_, res2) =>
-            alwaysMatchSimple && matchesType(tp1, res2, true)
+            alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
           case TypeRef(_, sym, Nil) if sym.isModuleClass =>
             matchesType(res1, tp2, alwaysMatchSimple)
           case _ =>
@@ -6312,7 +4232,7 @@ trait Types extends api.Types { self: SymbolTable =>
             else
               matchesQuantified(tparams1, tparams2, res1, res2)
           case ExistentialType(_, res2) =>
-            alwaysMatchSimple && matchesType(tp1, res2, true)
+            alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
           case _ =>
             false // remember that tparams1.nonEmpty is now an invariant of PolyType
         }
@@ -6321,7 +4241,7 @@ trait Types extends api.Types { self: SymbolTable =>
           case ExistentialType(tparams2, res2) =>
             matchesQuantified(tparams1, tparams2, res1, res2)
           case _ =>
-            if (alwaysMatchSimple) matchesType(res1, tp2, true)
+            if (alwaysMatchSimple) matchesType(res1, tp2, alwaysMatchSimple = true)
             else lastTry
         }
       case TypeRef(_, sym, Nil) if sym.isModuleClass =>
@@ -6374,7 +4294,7 @@ trait Types extends api.Types { self: SymbolTable =>
 */
 
   /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
-  private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+  protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
     case Nil =>
       syms2.isEmpty
     case sym1 :: rest1 =>
@@ -6391,99 +4311,6 @@ trait Types extends api.Types { self: SymbolTable =>
       }
   }
 
-  /** like map2, but returns list `xs` itself - instead of a copy - if function
-   *  `f` maps all elements to themselves.
-   */
-  def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
-    if (xs.isEmpty) xs
-    else {
-      val x1 = f(xs.head, ys.head)
-      val xs1 = map2Conserve(xs.tail, ys.tail)(f)
-      if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
-      else x1 :: xs1
-    }
-
-  /** Solve constraint collected in types `tvars`.
-   *
-   *  @param tvars      All type variables to be instantiated.
-   *  @param tparams    The type parameters corresponding to `tvars`
-   *  @param variances  The variances of type parameters; need to reverse
-   *                    solution direction for all contravariant variables.
-   *  @param upper      When `true` search for max solution else min.
-   */
-  def solve(tvars: List[TypeVar], tparams: List[Symbol],
-            variances: List[Int], upper: Boolean): Boolean =
-     solve(tvars, tparams, variances, upper, AnyDepth)
-
-  def solve(tvars: List[TypeVar], tparams: List[Symbol],
-            variances: List[Int], upper: Boolean, depth: Int): Boolean = {
-
-    def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
-      if (tvar.constr.inst == NoType) {
-        val up = if (variance != CONTRAVARIANT) upper else !upper
-        tvar.constr.inst = null
-        val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
-        //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
-        var cyclic = bound contains tparam
-        foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
-          val ok = (tparam2 != tparam) && (
-               (bound contains tparam2)
-            ||  up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
-            || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
-          )
-          if (ok) {
-            if (tvar2.constr.inst eq null) cyclic = true
-            solveOne(tvar2, tparam2, variance2)
-          }
-        })
-        if (!cyclic) {
-          if (up) {
-            if (bound.typeSymbol != AnyClass) {
-              log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
-              tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
-            }
-            for (tparam2 <- tparams)
-              tparam2.info.bounds.lo.dealias match {
-                case TypeRef(_, `tparam`, _) =>
-                  log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
-                  tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
-                case _ =>
-              }
-          } else {
-            if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
-              log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
-              tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
-            }
-            for (tparam2 <- tparams)
-              tparam2.info.bounds.hi.dealias match {
-                case TypeRef(_, `tparam`, _) =>
-                  log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
-                  tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
-                case _ =>
-              }
-          }
-        }
-        tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
-
-        //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
-        val newInst = (
-          if (up) {
-            if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
-          } else {
-            if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
-          }
-        )
-        log(s"$tvar setInst $newInst")
-        tvar setInst newInst
-        //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
-      }
-    }
-
-    // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
-    foreach3(tvars, tparams, variances)(solveOne)
-    tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
-  }
-
   /** Do type arguments `targs` conform to formal parameters `tparams`?
    */
   def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
@@ -6494,167 +4321,7 @@ trait Types extends api.Types { self: SymbolTable =>
   }
 
   def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
-    tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
-
-// Lubs and Glbs ---------------------------------------------------------
-
-  private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
-    import util.TableDef
-    import TableDef.Column
-    def str(tp: Type) = {
-      if (tp == NoType) ""
-      else {
-        val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
-        if (s.length < 60) s
-        else (s take 57) + "..."
-      }
-    }
-
-    val sorted       = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
-    val maxSeqLength = sorted.map(_._2.size).max
-    val padded       = sorted map (_._2.padTo(maxSeqLength, NoType))
-    val transposed   = padded.transpose
-
-    val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
-      case ((k, v), idx) =>
-        Column(str(k), (xs: List[Type]) => str(xs(idx)), true)
-    }
-
-    val tableDef = TableDef(columns: _*)
-    val formatted = tableDef.table(transposed)
-    println("** Depth is " + depth + "\n" + formatted)
-  }
-
-  /** From a list of types, find any which take type parameters
-   *  where the type parameter bounds contain references to other
-   *  any types in the list (including itself.)
-   *
-   *  @return List of symbol pairs holding the recursive type
-   *    parameter and the parameter which references it.
-   */
-  def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
-    if (ts.isEmpty) Nil
-    else {
-      val sym = ts.head.typeSymbol
-      require(ts.tail forall (_.typeSymbol == sym), ts)
-      for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
-        p -> in
-    }
-  }
-
-  /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
-   * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
-   *
-   *    xs <= ys   iff   forall y in ys exists x in xs such that x <: y
-   *
-   *  @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
-   *                (these type parameters may be referred to by type arguments in the BTS column of those types,
-   *                and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
-   *  @arg tsBts    a matrix whose columns are basetype sequences
-   *                the first row is the original list of types for which we're computing the lub
-   *                  (except that type constructors have been applied to their dummyArgs)
-   *  @See baseTypeSeq  for a definition of sorted and upwards closed.
-   */
-  private def lubList(ts: List[Type], depth: Int): List[Type] = {
-    // Matching the type params of one of the initial types means dummies.
-    val initialTypeParams = ts map (_.typeParams)
-    def isHotForTs(xs: List[Type]) = initialTypeParams contains (xs map (_.typeSymbol))
-
-    def elimHigherOrderTypeParam(tp: Type) = tp match {
-      case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
-      case _                                                            => tp
-    }
-    var lubListDepth = 0
-    def loop(tsBts: List[List[Type]]): List[Type] = {
-      lubListDepth += 1
-
-      if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) Nil
-      else if (tsBts.tail.isEmpty) tsBts.head
-      else {
-        // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
-        // Invariant: all symbols "under" (closer to the first row) the frontier
-        // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
-        val ts0  = tsBts map (_.head)
-
-        // Is the frontier made up of types with the same symbol?
-        val isUniformFrontier = (ts0: @unchecked) match {
-          case t :: ts  => ts forall (_.typeSymbol == t.typeSymbol)
-        }
-
-        // Produce a single type for this frontier by merging the prefixes and arguments of those
-        // typerefs that share the same symbol: that symbol is the current maximal symbol for which
-        // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
-        // merging, strip targs that refer to bound tparams (when we're computing the lub of type
-        // constructors.) Also filter out all types that are a subtype of some other type.
-        if (isUniformFrontier) {
-          if (settings.debug.value || printLubs) {
-            val fbounds = findRecursiveBounds(ts0)
-            if (fbounds.nonEmpty) {
-              println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
-              for ((p0, p1) <- fbounds) {
-                val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
-
-                println("  " + p0.fullLocationString + " appears in " + desc)
-                println("    " + p1 + " " + p1.info.bounds)
-              }
-              println("")
-            }
-          }
-          val tails = tsBts map (_.tail)
-          mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
-            case Some(tp) => tp :: loop(tails)
-            case _        => loop(tails)
-          }
-        }
-        else {
-          // frontier is not uniform yet, move it beyond the current minimal symbol;
-          // lather, rinSe, repeat
-          val sym    = minSym(ts0)
-          val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
-          if (printLubs) {
-            val str = (newtps.zipWithIndex map { case (tps, idx) =>
-              tps.map("        " + _ + "\n").mkString("   (" + idx + ")\n", "", "\n")
-            }).mkString("")
-
-            println("Frontier(\n" + str + ")")
-            printLubMatrix((ts zip tsBts).toMap, lubListDepth)
-          }
-
-          loop(newtps)
-        }
-      }
-    }
-
-    val initialBTSes = ts map (_.baseTypeSeq.toList)
-    if (printLubs)
-      printLubMatrix((ts zip initialBTSes).toMap, depth)
-
-    loop(initialBTSes)
-  }
-
-  /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
-  private def minSym(tps: List[Type]): Symbol =
-    (tps.head.typeSymbol /: tps.tail) {
-      (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
-    }
-
-  /** A minimal type list which has a given list of types as its base type sequence */
-  def spanningTypes(ts: List[Type]): List[Type] = ts match {
-    case List() => List()
-    case first :: rest =>
-      first :: spanningTypes(
-        rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
-  }
-
-  /** Eliminate from list of types all elements which are a supertype
-   *  of some other element of the list. */
-  private def elimSuper(ts: List[Type]): List[Type] = ts match {
-    case List() => List()
-    case List(t) => List(t)
-    case t :: ts1 =>
-      val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
-      if (rest exists (t1 => t1 <:< t)) rest else t :: rest
-  }
+    mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
 
   def elimAnonymousClass(t: Type) = t match {
     case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
@@ -6662,397 +4329,6 @@ trait Types extends api.Types { self: SymbolTable =>
     case _ =>
       t
   }
-  def elimRefinement(t: Type) = t match {
-    case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
-    case _                                             => t
-  }
-
-  /** Eliminate from list of types all elements which are a subtype
-   *  of some other element of the list. */
-  private def elimSub(ts: List[Type], depth: Int): List[Type] = {
-    def elimSub0(ts: List[Type]): List[Type] = ts match {
-      case List() => List()
-      case List(t) => List(t)
-      case t :: ts1 =>
-        val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
-        if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
-    }
-    val ts0 = elimSub0(ts)
-    if (ts0.isEmpty || ts0.tail.isEmpty) ts0
-    else {
-      val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
-      if (ts1 eq ts0) ts0
-      else elimSub(ts1, depth)
-    }
-  }
-
-  private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
-    val quantified = ts flatMap {
-      case ExistentialType(qs, _) => qs
-      case t => List()
-    }
-    def stripType(tp: Type): Type = tp match {
-      case ExistentialType(_, res) =>
-        res
-      case tv at TypeVar(_, constr) =>
-        if (tv.instValid) stripType(constr.inst)
-        else if (tv.untouchable) tv
-        else abort("trying to do lub/glb of typevar "+tp)
-      case t => t
-    }
-    val strippedTypes = ts mapConserve stripType
-    (strippedTypes, quantified)
-  }
-
-  def weakLub(ts: List[Type]) =
-    if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
-    else if (ts exists typeHasAnnotations)
-      (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
-    else (lub(ts), false)
-
-  def weakGlb(ts: List[Type]) = {
-    if (ts.nonEmpty && (ts forall isNumericValueType)) {
-      val nglb = numericGlb(ts)
-      if (nglb != NoType) (nglb, true)
-      else (glb(ts), false)
-    } else if (ts exists typeHasAnnotations) {
-      (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
-    } else (glb(ts), false)
-  }
-
-  def numericLub(ts: List[Type]) =
-    ts reduceLeft ((t1, t2) =>
-      if (isNumericSubType(t1, t2)) t2
-      else if (isNumericSubType(t2, t1)) t1
-      else IntClass.tpe)
-
-  def numericGlb(ts: List[Type]) =
-    ts reduceLeft ((t1, t2) =>
-      if (isNumericSubType(t1, t2)) t1
-      else if (isNumericSubType(t2, t1)) t2
-      else NoType)
-
-  def isWeakSubType(tp1: Type, tp2: Type) =
-    tp1.deconst.normalize match {
-      case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
-        tp2.deconst.normalize match {
-          case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
-            isNumericSubClass(sym1, sym2)
-          case tv2 @ TypeVar(_, _) =>
-            tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
-          case _ =>
-            isSubType(tp1, tp2)
-        }
-      case tv1 @ TypeVar(_, _) =>
-        tp2.deconst.normalize match {
-          case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
-            tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
-          case _ =>
-            isSubType(tp1, tp2)
-        }
-      case _ =>
-        isSubType(tp1, tp2)
-    }
-
-  /** The isNumericValueType tests appear redundant, but without them
-   *  test/continuations-neg/function3.scala goes into an infinite loop.
-   *  (Even if the calls are to typeSymbolDirect.)
-   */
-  def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
-       isNumericValueType(tp1)
-    && isNumericValueType(tp2)
-    && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
-  )
-
-  private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
-  private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
-
-  def lub(ts: List[Type]): Type = ts match {
-    case List() => NothingClass.tpe
-    case List(t) => t
-    case _ =>
-      if (Statistics.canEnable) Statistics.incCounter(lubCount)
-      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
-      try {
-         lub(ts, lubDepth(ts))
-      } finally {
-        lubResults.clear()
-        glbResults.clear()
-        if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
-      }
-  }
-
-  /** The least upper bound wrt <:< of a list of types */
-  private def lub(ts: List[Type], depth: Int): Type = {
-    def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
-      case List() => NothingClass.tpe
-      case List(t) => t
-      case ts @ PolyType(tparams, _) :: _ =>
-        val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
-          tparam.cloneSymbol.setInfo(glb(bounds, depth)))
-        PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
-      case ts @ (mt @ MethodType(params, _)) :: rest =>
-        MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
-      case ts @ NullaryMethodType(_) :: rest =>
-        NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
-      case ts @ TypeBounds(_, _) :: rest =>
-        TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
-      case ts @ AnnotatedType(annots, tpe, _) :: rest =>
-        annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
-      case ts =>
-        lubResults get (depth, ts) match {
-          case Some(lubType) =>
-            lubType
-          case None =>
-            lubResults((depth, ts)) = AnyClass.tpe
-            val res = if (depth < 0) AnyClass.tpe else lub1(ts)
-            lubResults((depth, ts)) = res
-            res
-        }
-    }
-    def lub1(ts0: List[Type]): Type = {
-      val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
-      val lubBaseTypes: List[Type] = lubList(ts, depth)
-      val lubParents = spanningTypes(lubBaseTypes)
-      val lubOwner = commonOwner(ts)
-      val lubBase = intersectionType(lubParents, lubOwner)
-      val lubType =
-        if (phase.erasedTypes || depth == 0) lubBase
-        else {
-          val lubRefined  = refinedType(lubParents, lubOwner)
-          val lubThisType = lubRefined.typeSymbol.thisType
-          val narrowts    = ts map (_.narrow)
-          def excludeFromLub(sym: Symbol) = (
-               sym.isClass
-            || sym.isConstructor
-            || !sym.isPublic
-            || isGetClass(sym)
-            || narrowts.exists(t => !refines(t, sym))
-          )
-          def lubsym(proto: Symbol): Symbol = {
-            val prototp = lubThisType.memberInfo(proto)
-            val syms = narrowts map (t =>
-              t.nonPrivateMember(proto.name).suchThat(sym =>
-                sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
-            if (syms contains NoSymbol) NoSymbol
-            else {
-              val symtypes =
-                map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
-              if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
-                proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
-              else if (symtypes.tail forall (symtypes.head =:= _))
-                proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
-              else {
-                def lubBounds(bnds: List[TypeBounds]): TypeBounds =
-                  TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
-                lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
-                  .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
-              }
-            }
-          }
-          def refines(tp: Type, sym: Symbol): Boolean = {
-            val syms = tp.nonPrivateMember(sym.name).alternatives;
-            !syms.isEmpty && (syms forall (alt =>
-              // todo alt != sym is strictly speaking not correct, but without it we lose
-              // efficiency.
-              alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
-          }
-          // add a refinement symbol for all non-class members of lubBase
-          // which are refined by every type in ts.
-          for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
-            try {
-              val lsym = lubsym(sym)
-              if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym, depth)
-            } catch {
-              case ex: NoCommonType =>
-            }
-          }
-          if (lubRefined.decls.isEmpty) lubBase
-          else if (!verifyLubs) lubRefined
-          else {
-            // Verify that every given type conforms to the calculated lub.
-            // In theory this should not be necessary, but higher-order type
-            // parameters are not handled correctly.
-            val ok = ts forall { t =>
-              isSubType(t, lubRefined, depth) || {
-                if (settings.debug.value || printLubs) {
-                  Console.println(
-                    "Malformed lub: " + lubRefined + "\n" +
-                    "Argument " + t + " does not conform.  Falling back to " + lubBase
-                  )
-                }
-                false
-              }
-            }
-            // If not, fall back on the more conservative calculation.
-            if (ok) lubRefined
-            else lubBase
-          }
-        }
-      // dropRepeatedParamType is a localized fix for SI-6897. We should probably
-      // integrate that transformation at a lower level in master, but lubs are
-      // the likely and maybe only spot they escape, so fixing here for 2.10.1.
-      existentialAbstraction(tparams, dropRepeatedParamType(lubType))
-    }
-    if (printLubs) {
-      println(indent + "lub of " + ts + " at depth "+depth)//debug
-      indent = indent + "  "
-      assert(indent.length <= 100)
-    }
-    if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
-    val res = lub0(ts)
-    if (printLubs) {
-      indent = indent stripSuffix "  "
-      println(indent + "lub of " + ts + " is " + res)//debug
-    }
-    if (ts forall typeIsNotNull) res.notNull else res
-  }
-
-  val GlbFailure = new Throwable
-
-  /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
-   *  call in `glb`. There's a possible infinite recursion when `specializes` calls
-   *  memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
-   *  The counter breaks this recursion after two calls.
-   *  If the recursion is broken, no member is added to the glb.
-   */
-  private var globalGlbDepth = 0
-  private final val globalGlbLimit = 2
-
-  /** The greatest lower bound of a list of types (as determined by `<:<`). */
-  def glb(ts: List[Type]): Type = elimSuper(ts) match {
-    case List() => AnyClass.tpe
-    case List(t) => t
-    case ts0 =>
-      if (Statistics.canEnable) Statistics.incCounter(lubCount)
-      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
-      try {
-        glbNorm(ts0, lubDepth(ts0))
-      } finally {
-        lubResults.clear()
-        glbResults.clear()
-        if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
-     }
-  }
-
-  private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
-    case List() => AnyClass.tpe
-    case List(t) => t
-    case ts0 => glbNorm(ts0, depth)
-  }
-
-  /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
-   *  with regard to `elimSuper`. */
-  protected def glbNorm(ts: List[Type], depth: Int): Type = {
-    def glb0(ts0: List[Type]): Type = ts0 match {
-      case List() => AnyClass.tpe
-      case List(t) => t
-      case ts @ PolyType(tparams, _) :: _ =>
-        val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
-          tparam.cloneSymbol.setInfo(lub(bounds, depth)))
-        PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
-      case ts @ (mt @ MethodType(params, _)) :: rest =>
-        MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
-      case ts @ NullaryMethodType(_) :: rest =>
-        NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
-      case ts @ TypeBounds(_, _) :: rest =>
-        TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
-      case ts =>
-        glbResults get (depth, ts) match {
-          case Some(glbType) =>
-            glbType
-          case _ =>
-            glbResults((depth, ts)) = NothingClass.tpe
-            val res = if (depth < 0) NothingClass.tpe else glb1(ts)
-            glbResults((depth, ts)) = res
-            res
-        }
-    }
-    def glb1(ts0: List[Type]): Type = {
-      try {
-        val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
-        val glbOwner = commonOwner(ts)
-        def refinedToParents(t: Type): List[Type] = t match {
-          case RefinedType(ps, _) => ps flatMap refinedToParents
-          case _ => List(t)
-        }
-        def refinedToDecls(t: Type): List[Scope] = t match {
-          case RefinedType(ps, decls) =>
-            val dss = ps flatMap refinedToDecls
-            if (decls.isEmpty) dss else decls :: dss
-          case _ => List()
-        }
-        val ts1 = ts flatMap refinedToParents
-        val glbBase = intersectionType(ts1, glbOwner)
-        val glbType =
-          if (phase.erasedTypes || depth == 0) glbBase
-          else {
-            val glbRefined = refinedType(ts1, glbOwner)
-            val glbThisType = glbRefined.typeSymbol.thisType
-            def glbsym(proto: Symbol): Symbol = {
-              val prototp = glbThisType.memberInfo(proto)
-              val syms = for (t <- ts;
-                    alt <- (t.nonPrivateMember(proto.name).alternatives);
-                if glbThisType.memberInfo(alt) matches prototp
-              ) yield alt
-              val symtypes = syms map glbThisType.memberInfo
-              assert(!symtypes.isEmpty)
-              proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
-                if (proto.isTerm) glb(symtypes, decr(depth))
-                else {
-                  def isTypeBound(tp: Type) = tp match {
-                    case TypeBounds(_, _) => true
-                    case _ => false
-                  }
-                  def glbBounds(bnds: List[Type]): TypeBounds = {
-                    val lo = lub(bnds map (_.bounds.lo), decr(depth))
-                    val hi = glb(bnds map (_.bounds.hi), decr(depth))
-                    if (lo <:< hi) TypeBounds(lo, hi)
-                    else throw GlbFailure
-                  }
-                  val symbounds = symtypes filter isTypeBound
-                  var result: Type =
-                    if (symbounds.isEmpty)
-                      TypeBounds.empty
-                    else glbBounds(symbounds)
-                  for (t <- symtypes if !isTypeBound(t))
-                    if (result.bounds containsType t) result = t
-                    else throw GlbFailure
-                  result
-                })
-            }
-            if (globalGlbDepth < globalGlbLimit)
-              try {
-                globalGlbDepth += 1
-                val dss = ts flatMap refinedToDecls
-                for (ds <- dss; sym <- ds.iterator)
-                  if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
-                    try {
-                      addMember(glbThisType, glbRefined, glbsym(sym), depth)
-                    } catch {
-                      case ex: NoCommonType =>
-                    }
-              } finally {
-                globalGlbDepth -= 1
-              }
-            if (glbRefined.decls.isEmpty) glbBase else glbRefined
-          }
-        existentialAbstraction(tparams, glbType)
-      } catch {
-        case GlbFailure =>
-          if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
-          else NothingClass.tpe
-      }
-    }
-    // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + "  " } //DEBUG
-
-    if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
-    val res = glb0(ts)
-
-    // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
-
-    if (ts exists typeIsNotNull) res.notNull else res
-  }
 
   /** A list of the typevars in a type. */
   def typeVarsInType(tp: Type): List[TypeVar] = {
@@ -7063,29 +4339,32 @@ trait Types extends api.Types { self: SymbolTable =>
     }
     tvs.reverse
   }
-  /** Make each type var in this type use its original type for comparisons instead
-   * of collecting constraints.
-   */
-  def suspendTypeVarsInType(tp: Type): List[TypeVar] = {
-    val tvs = typeVarsInType(tp)
-    // !!! Is it somehow guaranteed that this will not break under nesting?
-    // In general one has to save and restore the contents of the field...
+
+  // If this type contains type variables, put them to sleep for a while.
+  // Don't just wipe them out by replacing them by the corresponding type
+  // parameter, as that messes up (e.g.) type variables in type refinements.
+  // Without this, the matchesType call would lead to type variables on both
+  // sides of a subtyping/equality judgement, which can lead to recursive types
+  // being constructed. See pos/t0851 for a situation where this happens.
+  @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = {
+    val saved = tvs map (_.suspended)
     tvs foreach (_.suspended = true)
-    tvs
+
+    try op
+    finally foreach2(tvs, saved)(_.suspended = _)
   }
 
-  /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
+  /** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
    *  of types `tps`. All types in `tps` are typerefs or singletypes
    *  with the same symbol.
-   *  Return `Some(x)` if the computation succeeds with result `x`.
-   *  Return `None` if the computation fails.
+   *  Return `x` if the computation succeeds with result `x`.
+   *  Return `NoType` if the computation fails.
    */
-  def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
-    case List(tp) =>
-      Some(tp)
+  def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Depth): Type = tps match {
+    case tp :: Nil => tp
     case TypeRef(_, sym, _) :: rest =>
       val pres = tps map (_.prefix) // prefix normalizes automatically
-      val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+      val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
       val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
       val capturedParams = new ListBuffer[Symbol]
       try {
@@ -7094,12 +4373,13 @@ trait Types extends api.Types { self: SymbolTable =>
           // if argss contain one value type and some other type, the lub is Object
           // if argss contain several reference types, the lub is an array over lub of argtypes
           if (argss exists typeListIsEmpty) {
-            None  // something is wrong: an array without a type arg.
-          } else {
+            NoType  // something is wrong: an array without a type arg.
+          }
+          else {
             val args = argss map (_.head)
-            if (args.tail forall (_ =:= args.head)) Some(typeRef(pre, sym, List(args.head)))
-            else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
-            else Some(typeRef(pre, sym, List(lub(args))))
+            if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head))
+            else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe
+            else typeRef(pre, sym, List(lub(args)))
           }
         }
         else transposeSafe(argss) match {
@@ -7107,30 +4387,28 @@ trait Types extends api.Types { self: SymbolTable =>
             // transpose freaked out because of irregular argss
             // catching just in case (shouldn't happen, but also doesn't cost us)
             // [JZ] It happens: see SI-5683.
-            debuglog("transposed irregular matrix!?" +(tps, argss))
-            None
+            debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
+            NoType
           case Some(argsst) =>
-            val args = map2(sym.typeParams, argsst) { (tparam, as) =>
-              if (depth == 0) {
-                if (tparam.variance == variance) {
-                  // Take the intersection of the upper bounds of the type parameters
-                  // rather than falling all the way back to "Any", otherwise we end up not
-                  // conforming to bounds.
-                  val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
-                  if (bounds0.isEmpty) AnyClass.tpe
-                  else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
-                }
-                else if (tparam.variance == -variance) NothingClass.tpe
-                else NoType
+            val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
+              val as = as0.distinct
+              if (as.size == 1) as.head
+              else if (depth.isZero) {
+                log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
+                // Don't return "Any" (or "Nothing") when we have to give up due to
+                // recursion depth. Return NoType, which prevents us from poisoning
+                // lublist's results. It can recognize the recursion and deal with it, but
+                // only if we aren't returning invalid types.
+                NoType
               }
               else {
-                if (tparam.variance == variance) lub(as, decr(depth))
-                else if (tparam.variance == -variance) glb(as, decr(depth))
+                if (tparam.variance == variance) lub(as, depth.decr)
+                else if (tparam.variance == variance.flip) glb(as, depth.decr)
                 else {
-                  val l = lub(as, decr(depth))
-                  val g = glb(as, decr(depth))
+                  val l = lub(as, depth.decr)
+                  val g = glb(as, depth.decr)
                   if (l <:< g) l
-                else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+                  else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
                        // just err on the conservative side, i.e. with a bound that is too high.
                        // if(!(tparam.info.bounds contains tparam))   //@M can't deal with f-bounds, see #2251
 
@@ -7141,22 +4419,22 @@ trait Types extends api.Types { self: SymbolTable =>
                 }
               }
             }
-            if (args contains NoType) None
-            else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
+            if (args contains NoType) NoType
+            else existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))
         }
       } catch {
-        case ex: MalformedType => None
+        case ex: MalformedType => NoType
       }
     case SingleType(_, sym) :: rest =>
       val pres = tps map (_.prefix)
-      val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
-      try {
-        Some(singleType(pre, sym))
-      } catch {
-        case ex: MalformedType => None
-      }
+      val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
+      try singleType(pre, sym)
+      catch { case ex: MalformedType => NoType }
     case ExistentialType(tparams, quantified) :: rest =>
-      mergePrefixAndArgs(quantified :: rest, variance, depth) map (existentialAbstraction(tparams, _))
+      mergePrefixAndArgs(quantified :: rest, variance, depth) match {
+        case NoType => NoType
+        case tpe    => existentialAbstraction(tparams, tpe)
+      }
     case _ =>
       abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
   }
@@ -7166,14 +4444,14 @@ trait Types extends api.Types { self: SymbolTable =>
   /** Make symbol `sym` a member of scope `tp.decls`
    *  where `thistp` is the narrowed owner type of the scope.
    */
-  def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Int) {
+  def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Depth) {
     assert(sym != NoSymbol)
     // debuglog("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
     if (!specializesSym(thistp, sym, depth)) {
       if (sym.isTerm)
         for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
           if (specializesSym(thistp, sym, thistp, alt, depth))
-            tp.decls unlink alt;
+            tp.decls unlink alt
       tp.decls enter sym
     }
   }
@@ -7186,51 +4464,6 @@ trait Types extends api.Types { self: SymbolTable =>
   def inheritsJavaVarArgsMethod(clazz: Symbol) =
     clazz.thisType.baseClasses exists isJavaVarargsAncestor
 
-  /** All types in list must be polytypes with type parameter lists of
-   *  same length as tparams.
-   *  Returns list of list of bounds infos, where corresponding type
-   *  parameters are renamed to tparams.
-   */
-  private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
-    def getBounds(tp: Type): List[Type] = tp match {
-      case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
-        tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
-      case tp =>
-        if (tp ne tp.normalize) getBounds(tp.normalize)
-        else throw new NoCommonType(tps)
-    }
-    tps map getBounds
-  }
-
-  /** All types in list must be polytypes with type parameter lists of
-   *  same length as tparams.
-   *  Returns list of instance types, where corresponding type
-   *  parameters are renamed to tparams.
-   */
-  private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
-    def transformResultType(tp: Type): Type = tp match {
-      case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
-        restpe.substSym(tparams1, tparams)
-      case tp =>
-        if (tp ne tp.normalize) transformResultType(tp.normalize)
-        else throw new NoCommonType(tps)
-    }
-    tps map transformResultType
-  }
-
-  /** All types in list must be method types with equal parameter types.
-   *  Returns list of their result types.
-   */
-  private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
-    tps map {
-      case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
-        res
-      case NullaryMethodType(res) if pts.isEmpty =>
-        res
-      case _ =>
-        throw new NoCommonType(tps)
-    }
-
 // Errors and Diagnostics -----------------------------------------------------
 
   /** A throwable signalling a type error */
@@ -7243,7 +4476,7 @@ trait Types extends api.Types { self: SymbolTable =>
   /** An exception for cyclic references from which we can recover */
   case class RecoverableCyclicReference(sym: Symbol)
     extends TypeError("illegal cyclic reference involving " + sym) {
-    if (settings.debug.value) printStackTrace()
+    if (settings.debug) printStackTrace()
   }
 
   class NoCommonType(tps: List[Type]) extends Throwable(
@@ -7255,26 +4488,28 @@ trait Types extends api.Types { self: SymbolTable =>
   }
 
   /** The current indentation string for traces */
-  private var indent: String = ""
+  private var _indent: String = ""
+  protected def indent = _indent
+  protected def indent_=(value: String) = _indent = value
 
   /** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
   protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
-    Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
+    inform(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
     indent = indent + "  "
     val result = p(tp1, arg2)
     indent = indent stripSuffix "  "
-    Console.println(indent + result)
+    inform(indent + result)
     result
   }
 
   /** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */
   def explainTypes(found: Type, required: Type) {
-    if (settings.explaintypes.value) withTypesExplained(found <:< required)
+    if (settings.explaintypes) withTypesExplained(found <:< required)
   }
 
   /** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */
   def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
-    if (settings.explaintypes.value) withTypesExplained(op(found, required))
+    if (settings.explaintypes) withTypesExplained(op(found, required))
   }
 
   /** Execute `op` while printing a trace of the operations on types executed. */
@@ -7284,18 +4519,18 @@ trait Types extends api.Types { self: SymbolTable =>
   }
 
   def isUnboundedGeneric(tp: Type) = tp match {
-    case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefClass.tpe)
+    case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefTpe)
     case _                      => false
   }
   def isBoundedGeneric(tp: Type) = tp match {
-    case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
+    case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe)
     case TypeRef(_, sym, _)                       => !isPrimitiveValueClass(sym)
     case _                                        => false
   }
   // Add serializable to a list of parents, unless one of them already is
   def addSerializable(ps: Type*): List[Type] = (
     if (ps exists typeIsSubTypeOfSerializable) ps.toList
-    else (ps :+ SerializableClass.tpe).toList
+    else (ps :+ SerializableTpe).toList
   )
 
   /** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
@@ -7307,11 +4542,14 @@ trait Types extends api.Types { self: SymbolTable =>
   /** Members of the given class, other than those inherited
    *  from Any or AnyRef.
    */
-  def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
-    clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+  def nonTrivialMembers(clazz: Symbol): Scope = clazz.info.members filterNot isUniversalMember
+
+  /** Members which can be imported into other scopes.
+   */
+  def importableMembers(pre: Type): Scope = pre.members filter isImportable
 
   def objToAny(tp: Type): Type =
-    if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
+    if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe
     else tp
 
   val shorthands = Set(
@@ -7324,46 +4562,57 @@ trait Types extends api.Types { self: SymbolTable =>
     "scala.collection.IndexedSeq",
     "scala.collection.Iterator")
 
-
-  /** The maximum number of recursions allowed in toString
-   */
-  final val maxTostringRecursions = 50
-
-  private var tostringRecursions = 0
-
-  protected def typeToString(tpe: Type): String =
-    if (tostringRecursions >= maxTostringRecursions) {
-      debugwarn("Exceeded recursion depth attempting to print type.")
-      if (settings.debug.value)
-        (new Throwable).printStackTrace
-
-      "..."
-    }
-    else
-      try {
-        tostringRecursions += 1
-        tpe.safeToString
-      } finally {
-        tostringRecursions -= 1
-      }
-
 // ----- Hoisted closures and convenience methods, for compile time reductions -------
 
-  private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
   private[scala] val isTypeVar = (tp: Type) => tp.isInstanceOf[TypeVar]
   private[scala] val typeContainsTypeVar = (tp: Type) => tp exists isTypeVar
   private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
   private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
   private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
-  private[scala] val typeIsError = (tp: Type) => tp.isError
-  private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+  private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
+  private[scala] val treeTpe = (t: Tree) => t.tpe
+  private[scala] val symTpe = (sym: Symbol) => sym.tpe
+  private[scala] val symInfo = (sym: Symbol) => sym.info
+  private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations ne Nil
   private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
   private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
-  private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableClass.tpe
+  private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
   private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
   private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
   private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
 
+  /** The maximum depth of type `tp` */
+  def typeDepth(tp: Type): Depth = tp match {
+    case TypeRef(pre, sym, args)          => typeDepth(pre) max typeDepth(args).incr
+    case RefinedType(parents, decls)      => typeDepth(parents) max symTypeDepth(decls.toList).incr
+    case TypeBounds(lo, hi)               => typeDepth(lo) max typeDepth(hi)
+    case MethodType(paramtypes, result)   => typeDepth(result)
+    case NullaryMethodType(result)        => typeDepth(result)
+    case PolyType(tparams, result)        => typeDepth(result) max symTypeDepth(tparams).incr
+    case ExistentialType(tparams, result) => typeDepth(result) max symTypeDepth(tparams).incr
+    case _                                => Depth(1)
+  }
+
+  //OPT replaced with tailrecursive function to save on #closures
+  // was:
+  //    var d = 0
+  //    for (tp <- tps) d = d max by(tp) //!!!OPT!!!
+  //    d
+  private[scala] def maxDepth(tps: List[Type]): Depth = {
+    @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
+      case tp :: rest => loop(rest, acc max typeDepth(tp))
+      case _          => acc
+    }
+    loop(tps, Depth.Zero)
+  }
+  private[scala] def maxbaseTypeSeqDepth(tps: List[Type]): Depth = {
+    @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
+      case tp :: rest => loop(rest, acc max tp.baseTypeSeqDepth)
+      case _          => acc
+    }
+    loop(tps, Depth.Zero)
+  }
+
   @tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
     case tp :: rest => (tp contains sym) || typesContain(rest, sym)
     case _ => false
@@ -7400,10 +4649,15 @@ trait Types extends api.Types { self: SymbolTable =>
 
 }
 
+object TypeConstants {
+  final val DefaultLogThreshhold         = 50
+  final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
+  final val LogVolatileThreshold         = DefaultLogThreshhold
+}
+
 object TypesStats {
   import BaseTypeSeqsStats._
   val rawTypeCount        = Statistics.newCounter   ("#raw type creations")
-  val asSeenFromCount     = Statistics.newCounter   ("#asSeenFrom ops")
   val subtypeCount        = Statistics.newCounter   ("#subtype ops")
   val sametypeCount       = Statistics.newCounter   ("#sametype ops")
   val lubCount            = Statistics.newCounter   ("#toplevel lubs/glbs")
@@ -7425,7 +4679,7 @@ object TypesStats {
   val singletonBaseTypeSeqCount = Statistics.newSubCounter("  of which for singletons", baseTypeSeqCount)
   val typeOpsStack = Statistics.newTimerStack()
 
-  /** Commented out, because right now this does not inline, so creates a closure which will distort statistics
+  /* Commented out, because right now this does not inline, so creates a closure which will distort statistics
   @inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
     val start = Statistics.pushTimer(typeOpsStack, c)
     try op
diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala
new file mode 100644
index 0000000..ecc5d99
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variance.scala
@@ -0,0 +1,90 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package reflect
+package internal
+
+import Variance._
+
+/** Variances form a lattice:
+ *
+ *            - Covariant -
+ *           /             \
+ *  Invariant               Bivariant
+ *           \             /
+ *            Contravariant
+ *
+ *  The variance of a symbol within a type is calculated based on variance
+ *  annotations, e.g. +A or -A, and the positions of the types in which the
+ *  symbol appears. The actual mechanics are beyond the scope of this
+ *  comment, but the essential operations on a Variance are:
+ *
+ *  '&'  - like bitwise AND. Unless all inputs have compatible variance,
+ *  folding them across & will be invariant.
+ *  '*'  - like multiplication across { -1, 0, 1 } with contravariance as -1.
+ *  flip - if contravariant or covariant, flip to the other; otherwise leave unchanged.
+ *  cut  - if bivariant, remain bivariant; otherwise become invariant.
+ *
+ *  There is an important distinction between "isPositive" and "isCovariant".
+ *  The former is true for both Covariant and Bivariant, but the latter is true
+ *  only for Covariant.
+ */
+final class Variance private (val flags: Int) extends AnyVal {
+  def isBivariant     = flags == 2
+  def isCovariant     = flags == 1    // excludes bivariant
+  def isInvariant     = flags == 0
+  def isContravariant = flags == -1   // excludes bivariant
+  def isPositive      = flags > 0     // covariant or bivariant
+
+  def &(other: Variance): Variance = (
+    if (this == other) this
+    else if (this.isBivariant) other
+    else if (other.isBivariant) this
+    else Invariant
+  )
+
+  def *(other: Variance): Variance = (
+    if (other.isPositive) this
+    else if (other.isContravariant) this.flip
+    else this.cut
+  )
+
+  /** Flip between covariant and contravariant. I chose not to use unary_- because it doesn't stand out enough. */
+  def flip = if (isCovariant) Contravariant else if (isContravariant) Covariant else this
+
+  /** Map everything below bivariant to invariant. */
+  def cut  = if (isBivariant) this else Invariant
+
+  /** The symbolic annotation used to indicate the given kind of variance. */
+  def symbolicString = (
+    if (isCovariant) "+"
+    else if (isContravariant) "-"
+    else ""
+  )
+
+  override def toString = (
+    if (isContravariant) "contravariant"
+    else if (isCovariant) "covariant"
+    else if (isInvariant) "invariant"
+    else "" // noisy to print bivariant on everything without type parameters
+  )
+}
+
+object Variance {
+  implicit class SbtCompat(val v: Variance) {
+    def < (other: Int) = v.flags < other
+    def > (other: Int) = v.flags > other
+  }
+
+  def fold(variances: List[Variance]): Variance = (
+    if (variances.isEmpty) Bivariant
+    else variances reduceLeft (_ & _)
+  )
+  val Bivariant     = new Variance(2)
+  val Covariant     = new Variance(1)
+  val Contravariant = new Variance(-1)
+  val Invariant     = new Variance(0)
+}
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
new file mode 100644
index 0000000..cfe2ad8
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -0,0 +1,218 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package reflect
+package internal
+
+import Variance._
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+
+/** See comments at scala.reflect.internal.Variance.
+ */
+trait Variances {
+  self: SymbolTable =>
+
+  /** Used in Refchecks.
+   *  TODO - eliminate duplication with varianceInType
+   */
+  class VarianceValidator extends Traverser {
+    private val escapedLocals = mutable.HashSet[Symbol]()
+    // A flag for when we're in a refinement, meaning method parameter types
+    // need to be checked.
+    private var inRefinement = false
+    @inline private def withinRefinement(body: => Type): Type = {
+      val saved = inRefinement
+      inRefinement = true
+      try body finally inRefinement = saved
+    }
+
+    /** Is every symbol in the owner chain between `site` and the owner of `sym`
+     *  either a term symbol or private[this]? If not, add `sym` to the set of
+     *  esacped locals.
+     *  @pre  sym.isLocalToThis
+     */
+    @tailrec final def checkForEscape(sym: Symbol, site: Symbol) {
+      if (site == sym.owner || site == sym.owner.moduleClass || site.hasPackageFlag) () // done
+      else if (site.isTerm || site.isPrivateLocal) checkForEscape(sym, site.owner) // ok - recurse to owner
+      else escapedLocals += sym
+    }
+
+    protected def issueVarianceError(base: Symbol, sym: Symbol, required: Variance): Unit = ()
+
+    // Flip occurrences of type parameters and parameters, unless
+    //  - it's a constructor, or case class factory or extractor
+    //  - it's a type parameter of tvar's owner.
+    def shouldFlip(sym: Symbol, tvar: Symbol) = (
+         sym.isParameter
+      && !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem && tvar.owner == sym.owner)
+    )
+    // return Bivariant if `sym` is local to a term
+    // or is private[this] or protected[this]
+    def isLocalOnly(sym: Symbol) = !sym.owner.isClass || (
+         sym.isTerm // ?? shouldn't this be sym.owner.isTerm according to the comments above?
+      && (sym.isLocalToThis || sym.isSuperAccessor) // super accessors are implicitly local #4345
+      && !escapedLocals(sym)
+    )
+
+    private object ValidateVarianceMap extends TypeMap(trackVariance = true) {
+      private var base: Symbol = _
+
+      /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`.
+       *  The search proceeds from `base` to the owner of `tvar`.
+       *  Initially the state is covariant, but it might change along the search.
+       *
+       *  A local alias type is treated as Bivariant;
+       *  this is OK because we always expand aliases for variance checking.
+       *  However, for an alias which might be externally visible, we must assume Invariant,
+       *  because there may be references to the type parameter that are not checked,
+       *  leading to unsoundness (see SI-6566).
+       */
+      def relativeVariance(tvar: Symbol): Variance = {
+        def nextVariance(sym: Symbol, v: Variance): Variance = (
+          if (shouldFlip(sym, tvar)) v.flip
+          else if (isLocalOnly(sym)) Bivariant
+          else if (sym.isAliasType) (
+            // Unsound pre-2.11 behavior preserved under -Xsource:2.10
+            if (settings.isScala211 || sym.isOverridingSymbol) Invariant
+            else {
+              deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond")
+              Bivariant
+            }
+          )
+          else v
+        )
+        def loop(sym: Symbol, v: Variance): Variance = (
+          if (sym == tvar.owner || v.isBivariant) v
+          else loop(sym.owner, nextVariance(sym, v))
+        )
+        loop(base, Covariant)
+      }
+      def isUncheckedVariance(tp: Type) = tp match {
+        case AnnotatedType(annots, _)    => annots exists (_ matches definitions.uncheckedVarianceClass)
+        case _                           => false
+      }
+
+      private def checkVarianceOfSymbol(sym: Symbol) {
+        val relative = relativeVariance(sym)
+        val required = relative * variance
+        if (!relative.isBivariant) {
+          def sym_s  = s"$sym (${sym.variance}${sym.locationString})"
+          def base_s = s"$base in ${base.owner}" + (if (base.owner.isClass) "" else " in " + base.owner.enclClass)
+          log(s"verifying $sym_s is $required at $base_s")
+          if (sym.variance != required)
+            issueVarianceError(base, sym, required)
+        }
+      }
+      override def mapOver(decls: Scope): Scope = {
+        decls foreach (sym => withVariance(if (sym.isAliasType) Invariant else variance)(this(sym.info)))
+        decls
+      }
+      private def resultTypeOnly(tp: Type) = tp match {
+        case mt: MethodType => !inRefinement
+        case pt: PolyType   => true
+        case _              => false
+      }
+
+      /** For PolyTypes, type parameters are skipped because they are defined
+       *  explicitly (their TypeDefs will be passed here.) For MethodTypes, the
+       *  same is true of the parameters (ValDefs) unless we are inside a
+       *  refinement, in which case they are checked from here.
+       */
+      def apply(tp: Type): Type = tp match {
+        case _ if isUncheckedVariance(tp)                    => tp
+        case _ if resultTypeOnly(tp)                         => this(tp.resultType)
+        case TypeRef(_, sym, _) if sym.isAliasType           => this(tp.normalize)
+        case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp)
+        case RefinedType(_, _)                               => withinRefinement(mapOver(tp))
+        case ClassInfoType(parents, _, _)                    => parents foreach this ; tp
+        case mt @ MethodType(_, result)                      => flipped(mt.paramTypes foreach this) ; this(result)
+        case _                                               => mapOver(tp)
+      }
+      def validateDefinition(base: Symbol) {
+        val saved = this.base
+        this.base = base
+        try apply(base.info)
+        finally this.base = saved
+      }
+    }
+
+    /** Validate variance of info of symbol `base` */
+    private def validateVariance(base: Symbol) {
+      ValidateVarianceMap validateDefinition base
+    }
+
+    override def traverse(tree: Tree) {
+      def sym = tree.symbol
+      // No variance check for object-private/protected methods/values.
+      // Or constructors, or case class factory or extractor.
+      def skip = (
+           sym == NoSymbol
+        || sym.isLocalToThis
+        || sym.owner.isConstructor
+        || sym.owner.isCaseApplyOrUnapply
+      )
+      tree match {
+        case defn: MemberDef if skip =>
+          debuglog(s"Skipping variance check of ${sym.defString}")
+        case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
+          validateVariance(sym)
+          super.traverse(tree)
+        // ModuleDefs need not be considered because they have been eliminated already
+        case ValDef(_, _, _, _) =>
+          validateVariance(sym)
+        case DefDef(_, _, tparams, vparamss, _, _) =>
+          validateVariance(sym)
+          traverseTrees(tparams)
+          traverseTreess(vparamss)
+        case Template(_, _, _) =>
+          super.traverse(tree)
+        case CompoundTypeTree(templ) =>
+          super.traverse(tree)
+
+        // SI-7872 These two cases make sure we don't miss variance exploits
+        // in originals, e.g. in `foo[({type l[+a] = List[a]})#l]`
+        case tt @ TypeTree() if tt.original != null =>
+          super.traverse(tt.original)
+        case tt : TypTree =>
+          super.traverse(tt)
+
+        case _ =>
+      }
+    }
+  }
+
+  /** Compute variance of type parameter `tparam` in all types `tps`. */
+  def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance =
+    fold(tps map (tp => varianceInType(tp)(tparam)))
+
+  /** Compute variance of type parameter `tparam` in type `tp`. */
+  def varianceInType(tp: Type)(tparam: Symbol): Variance = {
+    def inArgs(sym: Symbol, args: List[Type]): Variance = fold(map2(args, sym.typeParams)((a, p) => inType(a) * p.variance))
+    def inSyms(syms: List[Symbol]): Variance            = fold(syms map inSym)
+    def inTypes(tps: List[Type]): Variance              = fold(tps map inType)
+
+    def inSym(sym: Symbol): Variance = if (sym.isAliasType) inType(sym.info).cut else inType(sym.info)
+    def inType(tp: Type): Variance   = tp match {
+      case ErrorType | WildcardType | NoType | NoPrefix => Bivariant
+      case ThisType(_) | ConstantType(_)                => Bivariant
+      case TypeRef(_, `tparam`, _)                      => Covariant
+      case BoundedWildcardType(bounds)                  => inType(bounds)
+      case NullaryMethodType(restpe)                    => inType(restpe)
+      case SingleType(pre, sym)                         => inType(pre)
+      case TypeRef(pre, _, _) if tp.isHigherKinded      => inType(pre)                 // a type constructor cannot occur in tp's args
+      case TypeRef(pre, sym, args)                      => inType(pre)                 & inArgs(sym, args)
+      case TypeBounds(lo, hi)                           => inType(lo).flip             & inType(hi)
+      case RefinedType(parents, defs)                   => inTypes(parents)            & inSyms(defs.toList)
+      case MethodType(params, restpe)                   => inSyms(params).flip         & inType(restpe)
+      case PolyType(tparams, restpe)                    => inSyms(tparams).flip        & inType(restpe)
+      case ExistentialType(tparams, restpe)             => inSyms(tparams)             & inType(restpe)
+      case AnnotatedType(annots, tp)                    => inTypes(annots map (_.atp)) & inType(tp)
+    }
+
+    inType(tp)
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
deleted file mode 100644
index 058ff61..0000000
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.reflect
-package internal
-package annotations
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * Later on, this annotation should be removed and implemented with domain-specific macros.
- * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
- * then it should itself be declared as a macro.
- *
- * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
- * report an error if `outer` is not detected. In principle, we could use this approach right now,
- * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
- * track of the stack of the trees being typechecked.
- *
- * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
- * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
- * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or  carry out
- * domain-specific logic.
- *
- * @param  message the error message to print during compilation if a reference remains
- *                 after type checking
- * @since  2.10.1
- */
- at getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala
new file mode 100644
index 0000000..ef299a6
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/package.scala
@@ -0,0 +1,6 @@
+package scala.reflect.internal
+
+package object annotations {
+  @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0")
+  type compileTimeOnly = scala.annotation.compileTimeOnly
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index 367a3b8..8615e34 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -5,7 +5,8 @@
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
 \*                                                                      */
-package scala.reflect.internal.pickling
+package scala
+package reflect.internal.pickling
 
 object ByteCodecs {
 
@@ -127,7 +128,7 @@ object ByteCodecs {
     var j = 0
     val dstlen = (srclen * 7 + 7) / 8
     while (i + 7 < srclen) {
-      var out: Int = src(i)
+      var out: Int = src(i).toInt
       var in: Byte = src(i + 1)
       src(j) = (out | (in & 0x01) << 7).toByte
       out = in >>> 1
@@ -152,7 +153,7 @@ object ByteCodecs {
       j += 7
     }
     if (i < srclen) {
-      var out: Int = src(i)
+      var out: Int = src(i).toInt
       if (i + 1 < srclen) {
         var in: Byte = src(i + 1)
         src(j) = (out | (in & 0x01) << 7).toByte; j += 1
@@ -195,10 +196,10 @@ object ByteCodecs {
    *
    * Sometimes returns (length+1) of the decoded array. Example:
    *
-   *   scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+   *   scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3))
    *   enc: Array[Byte] = Array(2, 5, 13, 1)
    *
-   *   scala> reflect.generic.ByteCodecs.decode(enc)
+   *   scala> scala.reflect.generic.ByteCodecs.decode(enc)
    *   res43: Int = 4
    *
    *   scala> enc
@@ -211,11 +212,3 @@ object ByteCodecs {
     decode7to8(xs, len)
   }
 }
-
-
-
-
-
-
-
-
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
index 6170fcb..a814256 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 package pickling
 
@@ -62,11 +63,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
     writeByte((x & 0x7f).toInt)
   }
 
-  /** Write a natural number <code>x</code> at position <code>pos</code>.
+  /** Write a natural number `x` at position `pos`.
    *  If number is more than one byte, shift rest of array to make space.
-   *
-   *  @param pos ...
-   *  @param x   ...
    */
   def patchNat(pos: Int, x: Int) {
     def patchNatPrefix(x: Int) {
@@ -81,7 +79,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
     if (y != 0) patchNatPrefix(y)
   }
 
-  /** Write a long number <code>x</code> in signed big endian format, base 256.
+  /** Write a long number `x` in signed big endian format, base 256.
    *
    *  @param x The long number to be written.
    */
@@ -94,12 +92,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
 
   // -- Basic input routines --------------------------------------------
 
-  /** Peek at the current byte without moving the read index */
-  def peekByte(): Int = bytes(readIndex)
-
   /** Read a byte */
   def readByte(): Int = {
-    val x = bytes(readIndex); readIndex += 1; x
+    val x = bytes(readIndex).toInt; readIndex += 1; x
   }
 
   /** Read a natural number in big endian format, base 128.
@@ -110,9 +105,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
     var b = 0L
     var x = 0L
     do {
-      b = readByte()
+      b = readByte().toLong
       x = (x << 7) + (b & 0x7f)
-    } while ((b & 0x80) != 0L);
+    } while ((b & 0x80) != 0L)
     x
   }
 
@@ -151,18 +146,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
     result.toIndexedSeq
   }
 
-  /** Perform operation <code>op</code> until the condition
-   *  <code>readIndex == end</code> is satisfied.
+  /** Perform operation `op` until the condition
+   *  `readIndex == end` is satisfied.
    *  Concatenate results into a list.
-   *
-   *  @param end ...
-   *  @param op  ...
-   *  @return    ...
    */
   def until[T](end: Int, op: () => T): List[T] =
-    if (readIndex == end) List() else op() :: until(end, op);
+    if (readIndex == end) List() else op() :: until(end, op)
 
-  /** Perform operation <code>op</code> the number of
+  /** Perform operation `op` the number of
    *  times specified.  Concatenate the results into a list.
    */
   def times[T](n: Int, op: ()=>T): List[T] =
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
index 16747af..ce0ceec 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package pickling
 
@@ -56,7 +57,7 @@ object PickleFormat {
  *                  | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
  *                  | 43 ANNOTINFO len_Nat AnnotInfoBody
  *                  | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- *                  | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
+ *                  | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat /* no longer needed */
  *                  | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
  *                  | 49 TREE len_Nat 1 EMPTYtree
  *                  | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref}
@@ -115,7 +116,6 @@ object PickleFormat {
  */
   val MajorVersion = 5
   val MinorVersion = 0
-  def VersionString = "V" + MajorVersion + "." + MinorVersion
 
   final val TERMname = 1
   final val TYPEname = 2
@@ -161,7 +161,7 @@ object PickleFormat {
   final val ANNOTARGARRAY = 44
 
   final val SUPERtpe = 46
-  final val DEBRUIJNINDEXtpe = 47
+  final val DEBRUIJNINDEXtpe = 47   // no longer generated
   final val EXISTENTIALtpe = 48
 
   final val TREE = 49      // prefix code that means a tree is coming
diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala
new file mode 100644
index 0000000..e56cf79
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala
@@ -0,0 +1,128 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package reflect
+package internal
+package pickling
+
+import PickleFormat._
+import util.shortClassOfInstance
+
+trait Translations {
+  self: SymbolTable =>
+
+  def isTreeSymbolPickled(code: Int): Boolean = (code: @annotation.switch) match {
+    case PACKAGEtree | CLASStree | MODULEtree | VALDEFtree | DEFDEFtree | TYPEDEFtree | LABELtree => true
+    case IMPORTtree | TEMPLATEtree | BINDtree | FUNCTIONtree | RETURNtree                         => true
+    case APPLYDYNAMICtree | SUPERtree | THIStree | SELECTtree | IDENTtree                         => true
+    case _                                                                                        => false
+  }
+  /** This method should be equivalent to tree.hasSymbolField, but that method
+   *  doesn't do us any good when we're unpickling because we need to know based
+   *  on the Int tag - the tree doesn't exist yet. Thus, this method is documentation only.
+   */
+  def isTreeSymbolPickled(tree: Tree): Boolean = isTreeSymbolPickled(picklerSubTag(tree))
+
+  // The ad hoc pattern matching of tuples out of AnyRefs is a
+  // truly terrible idea. It reaches the height of its powers in
+  // combination with scala's insistence on helpfully tupling
+  // multiple arguments passed to a single-arg AnyRef.
+  def picklerTag(ref: AnyRef): Int = ref match {
+    case tp: Type                       => picklerTag(tp)
+    case sym: Symbol                    => picklerTag(sym)
+    case const: Constant                => LITERAL + const.tag
+    case _: Tree                        => TREE           // its sub tag more precisely identifies it
+    case _: TermName                    => TERMname
+    case _: TypeName                    => TYPEname
+    case _: ArrayAnnotArg               => ANNOTARGARRAY  // an array of annotation arguments
+    case _: AnnotationInfo              => ANNOTINFO      // annotations on types (not linked to a symbol)
+    case (_: Symbol, _: AnnotationInfo) => SYMANNOT       // symbol annotations, i.e. on terms
+    case (_: Symbol, _: List[_])        => CHILDREN       // the direct subclasses of a sealed symbol
+    case _: Modifiers                   => MODIFIERS
+    case _                              => sys.error(s"unpicklable entry ${shortClassOfInstance(ref)} $ref")
+  }
+
+  /** Local symbols only. The assessment of locality depends
+   *  on convoluted conditions which depends in part on the root
+   *  symbol being pickled, so it cannot be reproduced here.
+   *  The pickler tags at stake are EXTMODCLASSref and EXTref.
+   *  Those tags are never produced here - such symbols must be
+   *  excluded prior to calling this method.
+   */
+  def picklerTag(sym: Symbol): Int = sym match {
+    case NoSymbol                            => NONEsym
+    case _: ClassSymbol                      => CLASSsym
+    case _: TypeSymbol if sym.isAbstractType => TYPEsym
+    case _: TypeSymbol                       => ALIASsym
+    case _: TermSymbol if sym.isModule       => MODULEsym
+    case _: TermSymbol                       => VALsym
+  }
+
+  def picklerTag(tpe: Type): Int = tpe match {
+    case NoType               => NOtpe
+    case NoPrefix             => NOPREFIXtpe
+    case _: ThisType          => THIStpe
+    case _: SingleType        => SINGLEtpe
+    case _: SuperType         => SUPERtpe
+    case _: ConstantType      => CONSTANTtpe
+    case _: TypeBounds        => TYPEBOUNDStpe
+    case _: TypeRef           => TYPEREFtpe
+    case _: RefinedType       => REFINEDtpe
+    case _: ClassInfoType     => CLASSINFOtpe
+    case _: MethodType        => METHODtpe
+    case _: PolyType          => POLYtpe
+    case _: NullaryMethodType => POLYtpe  // bad juju, distinct ints are not at a premium!
+    case _: ExistentialType   => EXISTENTIALtpe
+    case _: AnnotatedType     => ANNOTATEDtpe
+  }
+
+  def picklerSubTag(tree: Tree): Int = tree match {
+    case EmptyTree              => EMPTYtree
+    case _: PackageDef          => PACKAGEtree
+    case _: ClassDef            => CLASStree
+    case _: ModuleDef           => MODULEtree
+    case _: ValDef              => VALDEFtree
+    case _: DefDef              => DEFDEFtree
+    case _: TypeDef             => TYPEDEFtree
+    case _: LabelDef            => LABELtree
+    case _: Import              => IMPORTtree
+    // case _: DocDef              => DOCDEFtree
+    case _: Template            => TEMPLATEtree
+    case _: Block               => BLOCKtree
+    case _: CaseDef             => CASEtree
+    case _: Alternative         => ALTERNATIVEtree
+    case _: Star                => STARtree
+    case _: Bind                => BINDtree
+    case _: UnApply             => UNAPPLYtree
+    case _: ArrayValue          => ARRAYVALUEtree
+    case _: Function            => FUNCTIONtree
+    case _: Assign              => ASSIGNtree
+    case _: If                  => IFtree
+    case _: Match               => MATCHtree
+    case _: Return              => RETURNtree
+    case _: Try                 => TREtree     // TREtree?
+    case _: Throw               => THROWtree
+    case _: New                 => NEWtree
+    case _: Typed               => TYPEDtree
+    case _: TypeApply           => TYPEAPPLYtree
+    case _: Apply               => APPLYtree
+    case _: ApplyDynamic        => APPLYDYNAMICtree
+    case _: Super               => SUPERtree
+    case _: This                => THIStree
+    case _: Select              => SELECTtree
+    case _: Ident               => IDENTtree
+    case _: Literal             => LITERALtree
+    case _: TypeTree            => TYPEtree
+    case _: Annotated           => ANNOTATEDtree
+    case _: SingletonTypeTree   => SINGLETONTYPEtree
+    case _: SelectFromTypeTree  => SELECTFROMTYPEtree
+    case _: CompoundTypeTree    => COMPOUNDTYPEtree
+    case _: AppliedTypeTree     => APPLIEDTYPEtree
+    case _: TypeBoundsTree      => TYPEBOUNDStree
+    case _: ExistentialTypeTree => EXISTENTIALTYPEtree
+  }
+}
+
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 603fff4..64a1a44 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -3,7 +3,8 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal
 package pickling
 
@@ -21,15 +22,15 @@ import scala.annotation.switch
  *  @version 1.0
  */
 abstract class UnPickler {
-  val global: SymbolTable
-  import global._
+  val symbolTable: SymbolTable
+  import symbolTable._
 
   /** Unpickle symbol table information descending from a class and/or module root
    *  from an array of bytes.
    *  @param bytes      bytearray from which we unpickle
    *  @param offset     offset from which unpickling starts
-   *  @param classroot  the top-level class which is unpickled, or NoSymbol if inapplicable
-   *  @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+   *  @param classRoot  the top-level class which is unpickled, or NoSymbol if inapplicable
+   *  @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable
    *  @param filename   filename associated with bytearray, only used for error messages
    */
   def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
@@ -64,36 +65,38 @@ abstract class UnPickler {
     /** A map from symbols to their associated `decls` scopes */
     private val symScopes = mutable.HashMap[Symbol, Scope]()
 
+    private def expect(expected: Int, msg: => String) {
+      val tag = readByte()
+      if (tag != expected)
+        errorBadSignature(s"$msg ($tag)")
+    }
+
     //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
 
+    @inline private def runAtIndex[T](i: Int)(body: => T): T = {
+      val saved = readIndex
+      readIndex = index(i)
+      try body finally readIndex = saved
+    }
+
     // Laboriously unrolled for performance.
     def run() {
       var i = 0
       while (i < index.length) {
-        if (entries(i) == null && isSymbolEntry(i)) {
-          val savedIndex = readIndex
-          readIndex = index(i)
-          entries(i) = readSymbol()
-          readIndex = savedIndex
-        }
+        if (entries(i) == null && isSymbolEntry(i))
+          runAtIndex(i)(entries(i) = readSymbol())
+
         i += 1
       }
+
       // read children last, fix for #3951
       i = 0
       while (i < index.length) {
         if (entries(i) == null) {
-          if (isSymbolAnnotationEntry(i)) {
-            val savedIndex = readIndex
-            readIndex = index(i)
-            readSymbolAnnotation()
-            readIndex = savedIndex
-          }
-          else if (isChildrenEntry(i)) {
-            val savedIndex = readIndex
-            readIndex = index(i)
-            readChildren()
-            readIndex = savedIndex
-          }
+          if (isSymbolAnnotationEntry(i))
+            runAtIndex(i)(readSymbolAnnotation())
+          else if (isChildrenEntry(i))
+            runAtIndex(i)(readChildren())
         }
         i += 1
       }
@@ -144,6 +147,11 @@ abstract class UnPickler {
       tag == CHILDREN
     }
 
+    private def maybeReadSymbol(): Either[Int, Symbol] = readNat() match {
+      case index if isSymbolRef(index) => Right(at(index, readSymbol))
+      case index                       => Left(index)
+    }
+
     /** Does entry represent a refinement symbol?
      *  pre: Entry is a class symbol
      */
@@ -159,9 +167,9 @@ abstract class UnPickler {
       result
     }
 
-    /** If entry at <code>i</code> is undefined, define it by performing
-     *  operation <code>op</code> with <code>readIndex at start of i'th
-     *  entry. Restore <code>readIndex</code> afterwards.
+    /** If entry at `i` is undefined, define it by performing
+     *  operation `op` with `readIndex at start of i'th
+     *  entry. Restore `readIndex` afterwards.
      */
     protected def at[T <: AnyRef](i: Int, op: () => T): T = {
       var r = entries(i)
@@ -186,13 +194,12 @@ abstract class UnPickler {
         case _ => errorBadSignature("bad name tag: " + tag)
       }
     }
-    protected def readTermName(): TermName = readName().toTermName
-    protected def readTypeName(): TypeName = readName().toTypeName
+    private def readEnd() = readNat() + readIndex
 
     /** Read a symbol */
     protected def readSymbol(): Symbol = {
       val tag   = readByte()
-      val end   = readNat() + readIndex
+      val end   = readEnd()
       def atEnd = readIndex == end
 
       def readExtSymbol(): Symbol = {
@@ -222,6 +229,20 @@ abstract class UnPickler {
           NoSymbol
         }
 
+        def moduleAdvice(missing: String): String = {
+          val module =
+            if      (missing.startsWith("scala.xml"))                Some(("org.scala-lang.modules", "scala-xml"))
+            else if (missing.startsWith("scala.util.parsing"))       Some(("org.scala-lang.modules", "scala-parser-combinators"))
+            else if (missing.startsWith("scala.swing"))              Some(("org.scala-lang.modules", "scala-swing"))
+            else if (missing.startsWith("scala.util.continuations")) Some(("org.scala-lang.plugins", "scala-continuations-library"))
+            else None
+
+          (module map { case (group, art) =>
+            s"""\n(NOTE: It looks like the $art module is missing; try adding a dependency on "$group" : "$art".
+               |       See http://docs.scala-lang.org/overviews/core/scala-2.11.html for more information.)""".stripMargin
+           } getOrElse "")
+        }
+
         // (1) Try name.
         fromName(name) orElse {
           // (2) Try with expanded name.  Can happen if references to private
@@ -233,11 +254,12 @@ abstract class UnPickler {
               // (4) Call the mirror's "missing" hook.
               adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
                 // (5) Create a stub symbol to defer hard failure a little longer.
+                val fullName = s"${owner.fullName}.$name"
                 val missingMessage =
-                  s"""|bad symbolic reference. A signature in $filename refers to ${name.longString}
-                      |in ${owner.kindString} ${owner.fullName} which is not available.
-                      |It may be completely missing from the current classpath, or the version on
-                      |the classpath might be incompatible with the version used when compiling $filename.""".stripMargin
+                  s"""|bad symbolic reference to $fullName encountered in class file '$filename'.
+                      |Cannot access ${name.longString} in ${owner.kindString} ${owner.fullName}. The current classpath may be
+                      |missing a definition for $fullName, or $filename may have been compiled against a version that's
+                      |incompatible with the one found on the current classpath.${moduleAdvice(fullName)}""".stripMargin
                 owner.newStubSymbol(name, missingMessage)
               }
             }
@@ -256,14 +278,11 @@ abstract class UnPickler {
       val name         = at(nameref, readName)
       val owner        = readSymbolRef()
       val flags        = pickledToRawFlags(readLongNat())
-      var inforef      = readNat()
-      val privateWithin =
-        if (!isSymbolRef(inforef)) NoSymbol
-        else {
-          val pw = at(inforef, readSymbol)
-          inforef = readNat()
-          pw
-        }
+
+      val (privateWithin, inforef) = maybeReadSymbol() match {
+        case Left(index) => NoSymbol -> index
+        case Right(sym)  => sym -> readNat()
+      }
 
       def isModuleFlag = (flags & MODULE) != 0L
       def isClassRoot  = (name == classRoot.name) && (owner == classRoot.owner)
@@ -271,6 +290,7 @@ abstract class UnPickler {
       def pflags       = flags & PickledFlags
 
       def finishSym(sym: Symbol): Symbol = {
+        markFlagsCompleted(sym)(mask = AllFlags)
         sym.privateWithin = privateWithin
         sym.info = (
           if (atEnd) {
@@ -305,7 +325,7 @@ abstract class UnPickler {
 
           sym
         case MODULEsym =>
-          val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
+          val clazz = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
           if (isModuleRoot) moduleRoot setFlag pflags
           else owner.newLinkedModule(clazz, pflags)
         case VALsym =>
@@ -317,84 +337,48 @@ abstract class UnPickler {
       })
     }
 
-    /** Read a type
-     *
-     * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
-     *        the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
-     *        (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
-     */
     protected def readType(forceProperType: Boolean = false): Type = {
       val tag = readByte()
-      val end = readNat() + readIndex
+      val end = readEnd()
+      @inline def all[T](body: => T): List[T] = until(end, () => body)
+
+      def readTypes()   = all(readTypeRef)
+      def readSymbols() = all(readSymbolRef)
+      def readAnnots()  = all(readAnnotationRef)
+
+      // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
+      // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
+      // alternative after parsing the arguments.
+      def MethodTypeRef(restpe: Type, params: List[Symbol]): Type = (
+        if (restpe == NoType || (params contains NoSymbol)) NoType
+        else MethodType(params, restpe)
+      )
+      def PolyOrNullaryType(restpe: Type, tparams: List[Symbol]): Type = tparams match {
+        case Nil => NullaryMethodType(restpe)
+        case _   => PolyType(tparams, restpe)
+      }
+      def CompoundType(clazz: Symbol, parents: List[Type]): Type = tag match {
+        case REFINEDtpe   => RefinedType(parents, symScope(clazz), clazz)
+        case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz)
+      }
+
+      // We're stuck with the order types are pickled in, but with judicious use
+      // of named parameters we can recapture a declarative flavor in a few cases.
+      // But it's still a rat's nest of adhockery.
       (tag: @switch) match {
-        case NOtpe =>
-          NoType
-        case NOPREFIXtpe =>
-          NoPrefix
-        case THIStpe =>
-          ThisType(readSymbolRef())
-        case SINGLEtpe =>
-          SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
-        case SUPERtpe =>
-          val thistpe = readTypeRef()
-          val supertpe = readTypeRef()
-          SuperType(thistpe, supertpe)
-        case CONSTANTtpe =>
-          ConstantType(readConstantRef())
-        case TYPEREFtpe =>
-          val pre = readTypeRef()
-          val sym = readSymbolRef()
-          var args = until(end, readTypeRef)
-          TypeRef(pre, sym, args)
-        case TYPEBOUNDStpe =>
-          TypeBounds(readTypeRef(), readTypeRef())
-        case REFINEDtpe =>
-          val clazz = readSymbolRef()
-          RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
-        case CLASSINFOtpe =>
-          val clazz = readSymbolRef()
-          ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
-        case METHODtpe | IMPLICITMETHODtpe =>
-          val restpe = readTypeRef()
-          val params = until(end, readSymbolRef)
-          // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
-          // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
-          // alternative after parsing the arguments.
-          if (params.contains(NoSymbol) || restpe == NoType) NoType
-          else MethodType(params, restpe)
-        case POLYtpe =>
-          val restpe = readTypeRef()
-          val typeParams = until(end, readSymbolRef)
-          if (typeParams.nonEmpty) {
-            // NMT_TRANSITION: old class files denoted a polymorphic nullary method as PolyType(tps, restpe), we now require PolyType(tps, NullaryMethodType(restpe))
-            // when a type of kind * is expected (forceProperType is true), we know restpe should be wrapped in a NullaryMethodType (if it wasn't suitably wrapped yet)
-            def transitionNMT(restpe: Type) = {
-              val resTpeCls = restpe.getClass.toString // what's uglier than isInstanceOf? right! -- isInstanceOf does not work since the concrete types are defined in the compiler (not in scope here)
-              if(forceProperType /*&& pickleformat < 2.9 */ && !(resTpeCls.endsWith("MethodType"))) { assert(!resTpeCls.contains("ClassInfoType"))
-                  NullaryMethodType(restpe) }
-                else restpe
-            }
-            PolyType(typeParams, transitionNMT(restpe))
-          }
-          else
-            NullaryMethodType(restpe)
-        case EXISTENTIALtpe =>
-          val restpe  = readTypeRef()
-          newExistentialType(until(end, readSymbolRef), restpe)
-
-        case ANNOTATEDtpe =>
-          var typeRef = readNat()
-          val selfsym = if (isSymbolRef(typeRef)) {
-            val s = at(typeRef, readSymbol)
-            typeRef = readNat()
-            s
-          } else NoSymbol // selfsym can go.
-          val tp = at(typeRef, () => readType(forceProperType)) // NMT_TRANSITION
-          val annots = until(end, readAnnotationRef)
-          if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
-          else tp
-        case _ =>
-          noSuchTypeTag(tag, end)
+        case NOtpe                     => NoType
+        case NOPREFIXtpe               => NoPrefix
+        case THIStpe                   => ThisType(readSymbolRef())
+        case SINGLEtpe                 => SingleType(readTypeRef(), readSymbolRef())
+        case SUPERtpe                  => SuperType(readTypeRef(), readTypeRef())
+        case CONSTANTtpe               => ConstantType(readConstantRef())
+        case TYPEREFtpe                => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
+        case TYPEBOUNDStpe             => TypeBounds(readTypeRef(), readTypeRef())
+        case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes())
+        case METHODtpe                 => MethodTypeRef(readTypeRef(), readSymbols())
+        case POLYtpe                   => PolyOrNullaryType(readTypeRef(), readSymbols())
+        case EXISTENTIALtpe            => ExistentialType(underlying = readTypeRef(), quantified = readSymbols())
+        case ANNOTATEDtpe              => AnnotatedType(underlying = readTypeRef(), annotations = readAnnots())
       }
     }
 
@@ -431,7 +415,7 @@ abstract class UnPickler {
     protected def readChildren() {
       val tag = readByte()
       assert(tag == CHILDREN)
-      val end = readNat() + readIndex
+      val end = readEnd()
       val target = readSymbolRef()
       while (readIndex != end) target addChild readSymbolRef()
     }
@@ -450,7 +434,7 @@ abstract class UnPickler {
      */
     private def readArrayAnnot() = {
       readByte() // skip the `annotargarray` tag
-      val end = readNat() + readIndex
+      val end = readEnd()
       until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
     }
     protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
@@ -483,10 +467,8 @@ abstract class UnPickler {
      *  the symbol it requests. Called at top-level, for all
      *  (symbol, annotInfo) entries. */
     protected def readSymbolAnnotation() {
-      val tag = readByte()
-      if (tag != SYMANNOT)
-        errorBadSignature("symbol annotation expected ("+ tag +")")
-      val end = readNat() + readIndex
+      expect(SYMANNOT, "symbol annotation expected")
+      val end = readEnd()
       val target = readSymbolRef()
       target.addAnnotation(readAnnotationInfo(end))
     }
@@ -497,264 +479,105 @@ abstract class UnPickler {
       val tag = readByte()
       if (tag != ANNOTINFO)
         errorBadSignature("annotation expected (" + tag + ")")
-      val end = readNat() + readIndex
+      val end = readEnd()
       readAnnotationInfo(end)
     }
 
-    /* Read an abstract syntax tree */
-    protected def readTree(): Tree = {
-      val outerTag = readByte()
-      if (outerTag != TREE)
-        errorBadSignature("tree expected (" + outerTag + ")")
-      val end = readNat() + readIndex
-      val tag = readByte()
-      val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
-
-      // Set by the three functions to follow.  If symbol is non-null
-      // after the new tree 't' has been created, t has its Symbol
-      // set to symbol; and it always has its Type set to tpe.
-      var symbol: Symbol = null
-      var mods: Modifiers = null
-      var name: Name = null
-
-      /** Read a Symbol, Modifiers, and a Name */
-      def setSymModsName() {
-        symbol = readSymbolRef()
-        mods = readModifiersRef()
-        name = readNameRef()
+    private def readNonEmptyTree(tag: Int, end: Int): Tree = {
+      @inline def all[T](body: => T): List[T] = until(end, () => body)
+      @inline def rep[T](body: => T): List[T] = times(readNat(), () => body)
+
+      // !!! What is this doing here?
+      def fixApply(tree: Apply, tpe: Type): Apply = {
+        val Apply(fun, args) = tree
+        if (fun.symbol.isOverloaded) {
+          fun setType fun.symbol.info
+          inferMethodAlternative(fun, args map (_.tpe), tpe)
+        }
+        tree
       }
-      /** Read a Symbol and a Name */
-      def setSymName() {
-        symbol = readSymbolRef()
-        name = readNameRef()
+      def ref()         = readTreeRef()
+      def caseRef()     = readCaseDefRef()
+      def modsRef()     = readModifiersRef()
+      def implRef()     = readTemplateRef()
+      def nameRef()     = readNameRef()
+      def tparamRef()   = readTypeDefRef()
+      def vparamRef()   = readValDefRef()
+      def memberRef()   = readMemberDefRef()
+      def constRef()    = readConstantRef()
+      def idRef()       = readIdentRef()
+      def termNameRef() = readNameRef().toTermName
+      def typeNameRef() = readNameRef().toTypeName
+      def refTreeRef()  = ref() match {
+        case t: RefTree => t
+        case t          => errorBadSignature("RefTree expected, found " + t.shortClass)
       }
-      /** Read a Symbol */
-      def setSym() {
-        symbol = readSymbolRef()
+      def selectorsRef() = all(ImportSelector(nameRef(), -1, nameRef(), -1))
+
+      /** A few of the most popular trees have been pulled to the top for
+       *  switch efficiency purposes.
+       */
+      def readTree(tpe: Type): Tree = (tag: @switch) match {
+        case IDENTtree           => Ident(nameRef)
+        case SELECTtree          => Select(ref, nameRef)
+        case APPLYtree           => fixApply(Apply(ref, all(ref)), tpe) // !!!
+        case BINDtree            => Bind(nameRef, ref)
+        case BLOCKtree           => all(ref) match { case stats :+ expr => Block(stats, expr) }
+        case IFtree              => If(ref, ref, ref)
+        case LITERALtree         => Literal(constRef)
+        case TYPEAPPLYtree       => TypeApply(ref, all(ref))
+        case TYPEDtree           => Typed(ref, ref)
+        case ALTERNATIVEtree     => Alternative(all(ref))
+        case ANNOTATEDtree       => Annotated(ref, ref)
+        case APPLIEDTYPEtree     => AppliedTypeTree(ref, all(ref))
+        case APPLYDYNAMICtree    => ApplyDynamic(ref, all(ref))
+        case ARRAYVALUEtree      => ArrayValue(ref, all(ref))
+        case ASSIGNtree          => Assign(ref, ref)
+        case CASEtree            => CaseDef(ref, ref, ref)
+        case CLASStree           => ClassDef(modsRef, typeNameRef, rep(tparamRef), implRef)
+        case COMPOUNDTYPEtree    => CompoundTypeTree(implRef)
+        case DEFDEFtree          => DefDef(modsRef, termNameRef, rep(tparamRef), rep(rep(vparamRef)), ref, ref)
+        case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(memberRef))
+        case FUNCTIONtree        => Function(rep(vparamRef), ref)
+        case IMPORTtree          => Import(ref, selectorsRef)
+        case LABELtree           => LabelDef(termNameRef, rep(idRef), ref)
+        case MATCHtree           => Match(ref, all(caseRef))
+        case MODULEtree          => ModuleDef(modsRef, termNameRef, implRef)
+        case NEWtree             => New(ref)
+        case PACKAGEtree         => PackageDef(refTreeRef, all(ref))
+        case RETURNtree          => Return(ref)
+        case SELECTFROMTYPEtree  => SelectFromTypeTree(ref, typeNameRef)
+        case SINGLETONTYPEtree   => SingletonTypeTree(ref)
+        case STARtree            => Star(ref)
+        case SUPERtree           => Super(ref, typeNameRef)
+        case TEMPLATEtree        => Template(rep(ref), vparamRef, all(ref))
+        case THIStree            => This(typeNameRef)
+        case THROWtree           => Throw(ref)
+        case TREtree             => Try(ref, rep(caseRef), ref)
+        case TYPEBOUNDStree      => TypeBoundsTree(ref, ref)
+        case TYPEDEFtree         => TypeDef(modsRef, typeNameRef, rep(tparamRef), ref)
+        case TYPEtree            => TypeTree()
+        case UNAPPLYtree         => UnApply(ref, all(ref))
+        case VALDEFtree          => ValDef(modsRef, termNameRef, ref, ref)
+        case _                   => noSuchTreeTag(tag, end)
       }
 
-      val t = tag match {
-        case EMPTYtree =>
-          EmptyTree
-
-        case PACKAGEtree =>
-          setSym()
-          val pid = readTreeRef().asInstanceOf[RefTree]
-          val stats = until(end, readTreeRef)
-          PackageDef(pid, stats)
-
-        case CLASStree =>
-          setSymModsName()
-          val impl = readTemplateRef()
-          val tparams = until(end, readTypeDefRef)
-          ClassDef(mods, name.toTypeName, tparams, impl)
-
-        case MODULEtree =>
-          setSymModsName()
-          ModuleDef(mods, name.toTermName, readTemplateRef())
-
-        case VALDEFtree =>
-          setSymModsName()
-          val tpt = readTreeRef()
-          val rhs = readTreeRef()
-          ValDef(mods, name.toTermName, tpt, rhs)
-
-        case DEFDEFtree =>
-          setSymModsName()
-          val tparams = times(readNat(), readTypeDefRef)
-          val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
-          val tpt = readTreeRef()
-          val rhs = readTreeRef()
-          DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
-
-        case TYPEDEFtree =>
-          setSymModsName()
-          val rhs = readTreeRef()
-          val tparams = until(end, readTypeDefRef)
-          TypeDef(mods, name.toTypeName, tparams, rhs)
-
-        case LABELtree =>
-          setSymName()
-          val rhs = readTreeRef()
-          val params = until(end, readIdentRef)
-          LabelDef(name.toTermName, params, rhs)
-
-        case IMPORTtree =>
-          setSym()
-          val expr = readTreeRef()
-          val selectors = until(end, () => {
-            val from = readNameRef()
-            val to = readNameRef()
-            ImportSelector(from, -1, to, -1)
-          })
-
-          Import(expr, selectors)
-
-        case TEMPLATEtree =>
-          setSym()
-          val parents = times(readNat(), readTreeRef)
-          val self = readValDefRef()
-          val body = until(end, readTreeRef)
-
-          Template(parents, self, body)
-
-        case BLOCKtree =>
-          val expr = readTreeRef()
-          val stats = until(end, readTreeRef)
-          Block(stats, expr)
-
-        case CASEtree =>
-          val pat = readTreeRef()
-          val guard = readTreeRef()
-          val body = readTreeRef()
-          CaseDef(pat, guard, body)
-
-        case ALTERNATIVEtree =>
-          Alternative(until(end, readTreeRef))
-
-        case STARtree =>
-          Star(readTreeRef())
-
-        case BINDtree =>
-          setSymName()
-          Bind(name, readTreeRef())
-
-        case UNAPPLYtree =>
-          val fun = readTreeRef()
-          val args = until(end, readTreeRef)
-          UnApply(fun, args)
-
-        case ARRAYVALUEtree =>
-          val elemtpt = readTreeRef()
-          val trees = until(end, readTreeRef)
-          ArrayValue(elemtpt, trees)
-
-        case FUNCTIONtree =>
-          setSym()
-          val body = readTreeRef()
-          val vparams = until(end, readValDefRef)
-          Function(vparams, body)
-
-        case ASSIGNtree =>
-          val lhs = readTreeRef()
-          val rhs = readTreeRef()
-          Assign(lhs, rhs)
-
-        case IFtree =>
-          val cond = readTreeRef()
-          val thenp = readTreeRef()
-          val elsep = readTreeRef()
-          If(cond, thenp, elsep)
-
-        case MATCHtree =>
-          val selector = readTreeRef()
-          val cases = until(end, readCaseDefRef)
-          Match(selector, cases)
-
-        case RETURNtree =>
-          setSym()
-          Return(readTreeRef())
-
-        case TREtree =>
-          val block = readTreeRef()
-          val finalizer = readTreeRef()
-          val catches = until(end, readCaseDefRef)
-          Try(block, catches, finalizer)
-
-        case THROWtree =>
-          Throw(readTreeRef())
-
-        case NEWtree =>
-          New(readTreeRef())
-
-        case TYPEDtree =>
-          val expr = readTreeRef()
-          val tpt = readTreeRef()
-          Typed(expr, tpt)
-
-        case TYPEAPPLYtree =>
-          val fun = readTreeRef()
-          val args = until(end, readTreeRef)
-          TypeApply(fun, args)
-
-        case APPLYtree =>
-          val fun = readTreeRef()
-          val args = until(end, readTreeRef)
-          if (fun.symbol.isOverloaded) {
-            fun.setType(fun.symbol.info)
-            inferMethodAlternative(fun, args map (_.tpe), tpe)
-          }
-          Apply(fun, args)
-
-        case APPLYDYNAMICtree =>
-          setSym()
-          val qual = readTreeRef()
-          val args = until(end, readTreeRef)
-          ApplyDynamic(qual, args)
-
-        case SUPERtree =>
-          setSym()
-          val qual = readTreeRef()
-          val mix = readTypeNameRef()
-          Super(qual, mix)
-
-        case THIStree =>
-          setSym()
-          This(readTypeNameRef())
-
-        case SELECTtree =>
-          setSym()
-          val qualifier = readTreeRef()
-          val selector = readNameRef()
-          Select(qualifier, selector)
-
-        case IDENTtree =>
-          setSymName()
-          Ident(name)
-
-        case LITERALtree =>
-          Literal(readConstantRef())
-
-        case TYPEtree =>
-          TypeTree()
-
-        case ANNOTATEDtree =>
-          val annot = readTreeRef()
-          val arg = readTreeRef()
-          Annotated(annot, arg)
-
-        case SINGLETONTYPEtree =>
-          SingletonTypeTree(readTreeRef())
-
-        case SELECTFROMTYPEtree =>
-          val qualifier = readTreeRef()
-          val selector = readTypeNameRef()
-          SelectFromTypeTree(qualifier, selector)
-
-        case COMPOUNDTYPEtree =>
-          CompoundTypeTree(readTemplateRef())
-
-        case APPLIEDTYPEtree =>
-          val tpt = readTreeRef()
-          val args = until(end, readTreeRef)
-          AppliedTypeTree(tpt, args)
-
-        case TYPEBOUNDStree =>
-          val lo = readTreeRef()
-          val hi = readTreeRef()
-          TypeBoundsTree(lo, hi)
-
-        case EXISTENTIALTYPEtree =>
-          val tpt = readTreeRef()
-          val whereClauses = until(end, readTreeRef)
-          ExistentialTypeTree(tpt, whereClauses)
+      val tpe    = readTypeRef()
+      val sym    = if (isTreeSymbolPickled(tag)) readSymbolRef() else null
+      val result = readTree(tpe)
 
-        case _ =>
-          noSuchTreeTag(tag, end)
-      }
+      if (sym ne null) result setSymbol sym
+      result setType tpe
+    }
 
-      if (symbol == null) t setType tpe
-      else t setSymbol symbol setType tpe
+    /* Read an abstract syntax tree */
+    protected def readTree(): Tree = {
+      expect(TREE, "tree expected")
+      val end = readEnd()
+      readByte() match {
+        case EMPTYtree => EmptyTree
+        case tag       => readNonEmptyTree(tag, end)
+      }
     }
 
     def noSuchTreeTag(tag: Int, end: Int) =
@@ -764,7 +587,8 @@ abstract class UnPickler {
       val tag = readNat()
       if (tag != MODIFIERS)
         errorBadSignature("expected a modifiers tag (" + tag + ")")
-      val end = readNat() + readIndex
+
+      readEnd()
       val pflagsHi = readNat()
       val pflagsLo = readNat()
       val pflags = (pflagsHi.toLong << 32) + pflagsLo
@@ -796,7 +620,6 @@ abstract class UnPickler {
     protected def readTreeRef(): Tree                 = at(readNat(), readTree)
 
     protected def readTypeNameRef(): TypeName         = readNameRef().toTypeName
-    protected def readTermNameRef(): TermName         = readNameRef().toTermName
 
     protected def readTemplateRef(): Template =
       readTreeRef() match {
@@ -828,6 +651,12 @@ abstract class UnPickler {
         case other =>
           errorBadSignature("expected an TypeDef (" + other + ")")
       }
+    protected def readMemberDefRef(): MemberDef =
+      readTreeRef() match {
+        case tree:MemberDef => tree
+        case other =>
+          errorBadSignature("expected an MemberDef (" + other + ")")
+      }
 
     protected def errorBadSignature(msg: String) =
       throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
@@ -843,7 +672,6 @@ abstract class UnPickler {
      *  error reporting, so we rely on the typechecker to report the error).
      */
     def toTypeError(e: MissingRequirementError) = {
-      // e.printStackTrace()
       new TypeError(e.msg)
     }
 
@@ -851,15 +679,20 @@ abstract class UnPickler {
     private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter {
       private val definedAtRunId = currentRunId
       private val p = phase
-      override def complete(sym: Symbol) : Unit = try {
+      protected def completeInternal(sym: Symbol) : Unit = try {
         val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
-        atPhase(p) (sym setInfo tp)
+        if (p ne null)
+          slowButSafeEnteringPhase(p) (sym setInfo tp)
         if (currentRunId != definedAtRunId)
           sym.setInfo(adaptToNewRunMap(tp))
       }
       catch {
         case e: MissingRequirementError => throw toTypeError(e)
       }
+      override def complete(sym: Symbol) : Unit = {
+        completeInternal(sym)
+        if (!isCompilerUniverse) markAllCompleted(sym)
+      }
       override def load(sym: Symbol) { complete(sym) }
     }
 
@@ -867,11 +700,12 @@ abstract class UnPickler {
      *  of completed symbol to symbol at index `j`.
      */
     private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
-      override def complete(sym: Symbol) = try {
-        super.complete(sym)
+      override def completeInternal(sym: Symbol) = try {
+        super.completeInternal(sym)
+
         var alias = at(j, readSymbol)
         if (alias.isOverloaded)
-          alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+          alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
 
         sym.asInstanceOf[TermSymbol].setAlias(alias)
       }
diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
index a6fb418..859f703 100644
--- a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
@@ -3,7 +3,8 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect.internal
+package scala
+package reflect.internal
 package settings
 
 /** A Settings abstraction boiled out of the original highly mutable Settings
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 81368df..a494c7f 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -4,7 +4,8 @@
  */
 // $Id$
 
-package scala.reflect.internal
+package scala
+package reflect.internal
 package settings
 
 /** A mutable Settings object.
@@ -32,19 +33,32 @@ abstract class MutableSettings extends AbsSettings {
     }
   }
 
-  def overrideObjects: BooleanSetting
-  def printtypes: BooleanSetting
+  def Xexperimental: BooleanSetting
+  def XfullLubs: BooleanSetting
+  def XnoPatmatAnalysis: BooleanSetting
+  def Xprintpos: BooleanSetting
+  def strictInference: BooleanSetting
+  def Yposdebug: BooleanSetting
+  def Yrangepos: BooleanSetting
+  def Yshowsymowners: BooleanSetting
+  def Yshowsymkinds: BooleanSetting
+  def breakCycles: BooleanSetting
   def debug: BooleanSetting
-  def Ynotnull: BooleanSetting
+  def developer: BooleanSetting
   def explaintypes: BooleanSetting
-  def verbose: BooleanSetting
+  def overrideObjects: BooleanSetting
+  def printtypes: BooleanSetting
   def uniqid: BooleanSetting
-  def Yshowsymkinds: BooleanSetting
-  def Xprintpos: BooleanSetting
+  def verbose: BooleanSetting
+
   def Yrecursion: IntSetting
   def maxClassfileName: IntSetting
-  def Xexperimental: BooleanSetting
-  def XoldPatmat: BooleanSetting
-  def XnoPatmatAnalysis: BooleanSetting
-  def XfullLubs: BooleanSetting
+
+  def isScala211: Boolean
+}
+
+object MutableSettings {
+  import scala.language.implicitConversions
+  /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */
+  @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value
 }
diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
new file mode 100644
index 0000000..f879960
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
@@ -0,0 +1,51 @@
+package scala
+package reflect
+package internal
+package tpe
+
+private[internal] trait CommonOwners {
+  self: SymbolTable =>
+
+  /** The most deeply nested owner that contains all the symbols
+    *  of thistype or prefixless typerefs/singletype occurrences in given type.
+    */
+  protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
+
+  /** The most deeply nested owner that contains all the symbols
+    *  of thistype or prefixless typerefs/singletype occurrences in given list
+    *  of types.
+    */
+  protected[internal] def commonOwner(tps: List[Type]): Symbol = {
+    if (tps.isEmpty) NoSymbol
+    else {
+      commonOwnerMap.clear()
+      tps foreach (commonOwnerMap traverse _)
+      if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
+    }
+  }
+
+  protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+  protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
+    var result: Symbol = _
+
+    def clear() { result = null }
+
+    private def register(sym: Symbol) {
+      // First considered type is the trivial result.
+      if ((result eq null) || (sym eq NoSymbol))
+        result = sym
+      else
+        while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
+          result = result.owner
+    }
+    def traverse(tp: Type) = tp.normalize match {
+      case ThisType(sym)                => register(sym)
+      case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
+      case SingleType(NoPrefix, sym)    => register(sym.owner)
+      case _                            => mapOver(tp)
+    }
+  }
+
+  private lazy val commonOwnerMapObj = new CommonOwnerMap
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
new file mode 100644
index 0000000..de54f37
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
@@ -0,0 +1,288 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author  Jason Zaugg
+ */
+package scala.reflect.internal
+package tpe
+
+import Flags._
+import util.Statistics
+import TypesStats._
+
+trait FindMembers {
+  this: SymbolTable =>
+
+  /** Implementatation of `Type#{findMember, findMembers}` */
+  private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) {
+    protected val initBaseClasses: List[Symbol] = tpe.baseClasses
+
+    // The first base class, or the symbol of the ThisType
+    // e.g in:
+    // trait T { self: C => }
+    //
+    // The selector class of `T.this.type` is `T`, and *not* the first base class, `C`.
+    private[this] var _selectorClass: Symbol = null
+    private def selectorClass: Symbol = {
+      if (_selectorClass eq null) {
+        _selectorClass = tpe match {
+          case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+          case _            => initBaseClasses.head
+        }
+      }
+      _selectorClass
+    }
+
+    // Cache for the narrowed type of `tp` (in `tp.findMember`).
+    // This is needed to avoid mismatched existential types are reported in SI-5330.
+    private[this] var _self: Type = null
+    protected def self: Type = {
+      // TODO: use narrow only for modules? (correct? efficiency gain?) (<-- Note: this comment predates SI-5330)
+      if (_self eq null) _self = narrowForFindMember(tpe)
+      _self
+    }
+
+    // Main entry point
+    def apply(): T = {
+      if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
+      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
+      try searchConcreteThenDeferred
+      finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+    }
+
+    protected def result: T
+
+    // SLS 5.1.3 First, a concrete definition always overrides an abstract definition
+    private def searchConcreteThenDeferred: T = {
+      val deferredSeen = walkBaseClasses(requiredFlags, excludedFlags | DEFERRED)
+      if (deferredSeen) // OPT: the `if` avoids a second pass if the first pass didn't spot any candidates.
+        walkBaseClasses(requiredFlags | DEFERRED, excludedFlags & ~(DEFERRED.toLong))
+      result
+    }
+
+    /*
+     * Walk up through the decls of each base class.
+     *
+     * Called in two passes: first excluding deferred, then mandating it.
+     *
+     * @return if a potential deferred member was seen on the first pass that calls for a second pass,
+               and `excluded & DEFERRED != 0L`
+     */
+    private def walkBaseClasses(required: Long, excluded: Long): Boolean = {
+      var bcs = initBaseClasses
+
+      // Have we seen a candidate deferred member?
+      var deferredSeen = false
+
+      // All direct parents of refinement classes in the base class sequence
+      // from the current `walkBaseClasses`
+      var refinementParents: List[Symbol] = Nil
+
+      // Has the current `walkBaseClasses` encountered a non-refinement class?
+      var seenFirstNonRefinementClass = false
+
+      val findAll = name == nme.ANYname
+
+      while (!bcs.isEmpty) {
+        val currentBaseClass = bcs.head
+        val decls = currentBaseClass.info.decls
+        var entry = if (findAll) decls.elems else decls.lookupEntry(name)
+        while (entry ne null) {
+          val sym = entry.sym
+          val flags = sym.flags
+          val meetsRequirements = (flags & required) == required
+          if (meetsRequirements) {
+            val excl: Long = flags & excluded
+            val isExcluded: Boolean = excl != 0L
+            if (!isExcluded && isPotentialMember(sym, flags, currentBaseClass, seenFirstNonRefinementClass, refinementParents)) {
+              if (shortCircuit(sym)) return false
+              else addMemberIfNew(sym)
+            } else if (excl == DEFERRED) {
+              deferredSeen = true
+            }
+          }
+          entry = if (findAll) entry.next else decls lookupNextEntry entry
+        }
+
+        // SLS 5.2 The private modifier can be used with any definition or declaration in a template.
+        //         They are not inherited by subclasses [...]
+        if (currentBaseClass.isRefinementClass)
+          // SLS 3.2.7 A compound type T1 with . . . with Tn {R } represents objects with members as given in
+          //           the component types T1, ..., Tn and the refinement {R }
+          //
+          //           => private members should be included from T1, ... Tn. (SI-7475)
+          refinementParents :::= currentBaseClass.parentSymbols
+        else if (currentBaseClass.isClass)
+          seenFirstNonRefinementClass = true // only inherit privates of refinement parents after this point
+
+        bcs = bcs.tail
+      }
+      deferredSeen
+    }
+
+    /* Should this symbol be returned immediately as the sole result? */
+    protected def shortCircuit(sym: Symbol): Boolean
+
+    /* Add this member to the final result, unless an already-found member matches it. */
+    protected def addMemberIfNew(sym: Symbol): Unit
+
+    // Is `sym` a potentially member of `baseClass`?
+    //
+    // Q. When does a potential member fail to be a an actual member?
+    // A. if it is subsumed by an member in a subclass.
+    private def isPotentialMember(sym: Symbol, flags: Long, owner: Symbol,
+                                  seenFirstNonRefinementClass: Boolean, refinementParents: List[Symbol]): Boolean = {
+      // conservatively (performance wise) doing this with flags masks rather than `sym.isPrivate`
+      // to avoid multiple calls to `Symbol#flags`.
+      val isPrivate      = (flags & PRIVATE) == PRIVATE
+      val isPrivateLocal = (flags & PrivateLocal) == PrivateLocal
+
+      // TODO Is the special handling of `private[this]` vs `private` backed up by the spec?
+      def admitPrivate(sym: Symbol): Boolean =
+        (selectorClass == owner) || (
+             !isPrivateLocal // private[this] only a member from within the selector class. (Optimization only? Does the spec back this up?)
+          && (
+                  !seenFirstNonRefinementClass
+               || refinementParents.contains(owner)
+             )
+        )
+
+      (!isPrivate || admitPrivate(sym)) && (sym.name != nme.CONSTRUCTOR || owner == initBaseClasses.head)
+    }
+
+    // True unless the already-found member of type `memberType` matches the candidate symbol `other`.
+    protected def isNewMember(member: Symbol, other: Symbol): Boolean =
+      (    (other ne member)
+        && (    (member.owner eq other.owner)                         // same owner, therefore overload
+             || (member.flags & PRIVATE) != 0                         // (unqualified) private members never participate in overriding
+             || (other.flags & PRIVATE) != 0                          // ... as overrider or overridee.
+             || !(memberTypeLow(member) matches memberTypeHi(other))  // do the member types match? If so, its an override. Otherwise it's an overload.
+           )
+      )
+
+    // Cache for the member type of a candidate member when comparing against multiple, already-found existing members
+    //
+    // TODO this cache is probably unnecessary, `tp.memberType(sym: MethodSymbol)` is already cached internally.
+    private[this] var _memberTypeHiCache: Type = null
+    private[this] var _memberTypeHiCacheSym: Symbol = null
+
+    protected def memberTypeHi(sym: Symbol): Type = {
+      if (_memberTypeHiCacheSym ne sym) {
+        _memberTypeHiCache = self.memberType(sym)
+        _memberTypeHiCacheSym = sym
+      }
+      _memberTypeHiCache
+    }
+
+    // member type of the LHS of `matches` call. This is an extension point to enable a cache in
+    // FindMember.
+    protected def memberTypeLow(sym: Symbol): Type = self.memberType(sym)
+
+    /** Same as a call to narrow unless existentials are visible
+     *  after widening the type. In that case, narrow from the widened
+     *  type instead of the proxy. This gives buried existentials a
+     *  chance to make peace with the other types. See SI-5330.
+     */
+    private def narrowForFindMember(tp: Type): Type = {
+      val w = tp.widen
+      // Only narrow on widened type when we have to -- narrow is expensive unless the target is a singleton type.
+      if ((tp ne w) && containsExistential(w)) w.narrow
+      else tp.narrow
+    }
+  }
+
+  private[reflect] final class FindMembers(tpe: Type, excludedFlags: Long, requiredFlags: Long)
+    extends FindMemberBase[Scope](tpe, nme.ANYname, excludedFlags, requiredFlags) {
+    private[this] var _membersScope: Scope   = null
+    private def membersScope: Scope = {
+      if (_membersScope eq null) _membersScope = newFindMemberScope
+      _membersScope
+    }
+
+    protected def shortCircuit(sym: Symbol): Boolean = false
+    protected def result: Scope = membersScope
+
+    protected def addMemberIfNew(sym: Symbol): Unit = {
+      val members = membersScope
+      var others = members.lookupEntry(sym.name)
+      var isNew = true
+      while ((others ne null) && isNew) {
+        val member = others.sym
+        if (!isNewMember(member, sym))
+          isNew = false
+        others = members lookupNextEntry others // next existing member with the same name.
+      }
+      if (isNew) members.enter(sym)
+    }
+  }
+
+  private[reflect] final class FindMember(tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean)
+    extends FindMemberBase[Symbol](tpe, name, excludedFlags, requiredFlags) {
+    // Gathering the results into a hand rolled ListBuffer
+    // TODO Try just using a ListBuffer to see if this low-level-ness is worth it.
+    private[this] var member0: Symbol       = NoSymbol
+    private[this] var members: List[Symbol] = null
+    private[this] var lastM: ::[Symbol]     = null
+
+    private def clearAndAddResult(sym: Symbol): Unit = {
+      member0 = sym
+      members = null
+      lastM = null
+    }
+
+    protected def shortCircuit(sym: Symbol): Boolean = (name.isTypeName || (stableOnly && sym.isStable && !sym.hasVolatileType)) && {
+      clearAndAddResult(sym)
+      true
+    }
+
+    protected def addMemberIfNew(sym: Symbol): Unit =
+      if (member0 eq NoSymbol) {
+        member0 = sym // The first found member
+      } else if (members eq null) {
+        // We've found exactly one member so far...
+        if (isNewMember(member0, sym)) {
+          // ... make that two.
+          lastM = new ::(sym, null)
+          members = member0 :: lastM
+        }
+      } else {
+        // Already found 2 or more members
+        var ms: List[Symbol] = members
+
+        var isNew = true
+        while ((ms ne null) && isNew) {
+          val member = ms.head
+          if (!isNewMember(member, sym))
+            isNew = false
+          ms = ms.tail
+        }
+        if (isNew) {
+          val lastM1 = new ::(sym, null)
+          lastM.tl = lastM1
+          lastM = lastM1
+        }
+      }
+
+    // Cache for the member type of the first member we find.
+    private[this] var _member0Tpe: Type = null
+    private[this] def member0Tpe: Type = {
+      assert(member0 != null)
+      if (_member0Tpe eq null) _member0Tpe = self.memberType(member0)
+      _member0Tpe
+    }
+
+    override protected def memberTypeLow(sym: Symbol): Type =
+      if (sym eq member0) member0Tpe else super.memberTypeLow(sym)
+
+    // Assemble the result from the hand-rolled ListBuffer
+    protected def result: Symbol = if (members eq null) {
+      if (member0 == NoSymbol) {
+        if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
+        NoSymbol
+      } else member0
+    } else {
+      if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
+      lastM.tl = Nil
+      initBaseClasses.head.newOverloaded(tpe, members)
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
new file mode 100644
index 0000000..876685e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -0,0 +1,611 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.mutable
+import scala.annotation.tailrec
+import util.Statistics
+import Variance._
+
+private[internal] trait GlbLubs {
+  self: SymbolTable =>
+  import definitions._
+  import TypesStats._
+
+  private final val printLubs = scala.sys.props contains "scalac.debug.lub"
+  private final val strictInference = settings.strictInference
+
+  /** In case anyone wants to turn off lub verification without reverting anything. */
+  private final val verifyLubs = true
+
+  private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Depth) {
+    import util.TableDef
+    import TableDef.Column
+    def str(tp: Type) = {
+      if (tp == NoType) ""
+      else {
+        val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
+        if (s.length < 60) s
+        else (s take 57) + "..."
+      }
+    }
+
+    val sorted       = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
+    val maxSeqLength = sorted.map(_._2.size).max
+    val padded       = sorted map (_._2.padTo(maxSeqLength, NoType))
+    val transposed   = padded.transpose
+
+    val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
+      case ((k, v), idx) =>
+        Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true)
+    }
+
+    val tableDef = TableDef(columns: _*)
+    val formatted = tableDef.table(transposed)
+    println("** Depth is " + depth + "\n" + formatted)
+  }
+
+  /** From a list of types, find any which take type parameters
+    *  where the type parameter bounds contain references to other
+    *  any types in the list (including itself.)
+    *
+    *  @return List of symbol pairs holding the recursive type
+    *    parameter and the parameter which references it.
+    */
+  def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+    if (ts.isEmpty) Nil
+    else {
+      val sym = ts.head.typeSymbol
+      require(ts.tail forall (_.typeSymbol == sym), ts)
+      for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+        p -> in
+    }
+  }
+
+  // only called when strictInference
+  private def willViolateRecursiveBounds(tp: Type, ts: List[Type], tsElimSub: List[Type]) = {
+    val typeSym     = ts.head.typeSymbol // we're uniform, the `.head` is as good as any.
+    def fbounds     = findRecursiveBounds(ts) map (_._2)
+    def isRecursive = typeSym.typeParams exists fbounds.contains
+
+    isRecursive && (transposeSafe(tsElimSub map (_.normalize.typeArgs)) match {
+      case Some(arggsTransposed) =>
+        val mergedTypeArgs = (tp match { case et: ExistentialType => et.underlying; case _ => tp}).typeArgs
+        exists3(typeSym.typeParams, mergedTypeArgs, arggsTransposed) {
+          (param, arg, lubbedArgs) =>
+            val isExistential = arg.typeSymbol.isExistentiallyBound
+            val isInFBound    = fbounds contains param
+            val wasLubbed     = !lubbedArgs.exists(_ =:= arg)
+            (!isExistential && isInFBound && wasLubbed)
+        }
+      case None => false
+    })
+  }
+
+  /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+    * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
+    *
+    *    xs <= ys   iff   forall y in ys exists x in xs such that x <: y
+    *
+    *  @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+    *                (these type parameters may be referred to by type arguments in the BTS column of those types,
+    *                and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+    *  @arg tsBts    a matrix whose columns are basetype sequences
+    *                the first row is the original list of types for which we're computing the lub
+    *                  (except that type constructors have been applied to their dummyArgs)
+    *  @See baseTypeSeq  for a definition of sorted and upwards closed.
+    */
+  def lubList(ts: List[Type], depth: Depth): List[Type] = {
+    var lubListDepth = Depth.Zero
+    // This catches some recursive situations which would otherwise
+    // befuddle us, e.g. pos/hklub0.scala
+    def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
+
+    def elimHigherOrderTypeParam(tp: Type) = tp match {
+      case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) =>
+        logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor)
+      case _ => tp
+    }
+    // pretypes is a tail-recursion-preserving accumulator.
+    @tailrec
+    def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
+      lubListDepth = lubListDepth.incr
+
+      if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
+      else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
+      else {
+        // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
+        // Invariant: all symbols "under" (closer to the first row) the frontier
+        // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+        val ts0     = tsBts map (_.head)
+
+        // Is the frontier made up of types with the same symbol?
+        val isUniformFrontier = (ts0: @unchecked) match {
+          case t :: ts  => ts forall (_.typeSymbol == t.typeSymbol)
+        }
+
+        // Produce a single type for this frontier by merging the prefixes and arguments of those
+        // typerefs that share the same symbol: that symbol is the current maximal symbol for which
+        // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
+        // merging, strip targs that refer to bound tparams (when we're computing the lub of type
+        // constructors.) Also filter out all types that are a subtype of some other type.
+        if (isUniformFrontier) {
+          val tails = tsBts map (_.tail)
+          val ts1   = elimSub(ts0, depth) map elimHigherOrderTypeParam
+          mergePrefixAndArgs(ts1, Covariant, depth) match {
+            case NoType => loop(pretypes, tails)
+            case tp if strictInference && willViolateRecursiveBounds(tp, ts0, ts1) =>
+              log(s"Breaking recursion in lublist, advancing frontier and discaring merged prefix/args from $tp")
+              loop(pretypes, tails)
+            case tp =>
+              loop(tp :: pretypes, tails)
+          }
+        } else {
+          // frontier is not uniform yet, move it beyond the current minimal symbol;
+          // lather, rinse, repeat
+          val sym    = minSym(ts0)
+          val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
+          if (printLubs) {
+            val str = (newtps.zipWithIndex map { case (tps, idx) =>
+              tps.map("        " + _ + "\n").mkString("   (" + idx + ")\n", "", "\n")
+            }).mkString("")
+
+            println("Frontier(\n" + str + ")")
+            printLubMatrix((ts zip tsBts).toMap, lubListDepth)
+          }
+
+          loop(pretypes, newtps)
+        }
+      }
+    }
+
+    val initialBTSes = ts map (_.baseTypeSeq.toList)
+    if (printLubs)
+      printLubMatrix((ts zip initialBTSes).toMap, depth)
+
+    loop(Nil, initialBTSes)
+  }
+
+  /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
+  private def minSym(tps: List[Type]): Symbol =
+    (tps.head.typeSymbol /: tps.tail) {
+      (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
+    }
+
+  /** A minimal type list which has a given list of types as its base type sequence */
+  def spanningTypes(ts: List[Type]): List[Type] = ts match {
+    case List() => List()
+    case first :: rest =>
+      first :: spanningTypes(
+        rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
+  }
+
+  /** Eliminate from list of types all elements which are a supertype
+    *  of some other element of the list. */
+  private def elimSuper(ts: List[Type]): List[Type] = ts match {
+    case List() => List()
+    case List(t) => List(t)
+    case t :: ts1 =>
+      val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
+      if (rest exists (t1 => t1 <:< t)) rest else t :: rest
+  }
+
+  /** Eliminate from list of types all elements which are a subtype
+    *  of some other element of the list. */
+  private def elimSub(ts: List[Type], depth: Depth): List[Type] = {
+    def elimSub0(ts: List[Type]): List[Type] = ts match {
+      case List() => List()
+      case List(t) => List(t)
+      case t :: ts1 =>
+        val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, depth.decr)))
+        if (rest exists (t1 => isSubType(t, t1, depth.decr))) rest else t :: rest
+    }
+    val ts0 = elimSub0(ts)
+    if (ts0.isEmpty || ts0.tail.isEmpty) ts0
+    else {
+      val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
+      if (ts1 eq ts0) ts0
+      else elimSub(ts1, depth)
+    }
+  }
+
+  private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
+    val quantified = ts flatMap {
+      case ExistentialType(qs, _) => qs
+      case t => List()
+    }
+    def stripType(tp: Type): Type = tp match {
+      case ExistentialType(_, res) =>
+        res
+      case tv at TypeVar(_, constr) =>
+        if (tv.instValid) stripType(constr.inst)
+        else if (tv.untouchable) tv
+        else abort("trying to do lub/glb of typevar "+tp)
+      case t => t
+    }
+    val strippedTypes = ts mapConserve stripType
+    (strippedTypes, quantified)
+  }
+
+  /** Does this set of types have the same weak lub as
+   *  it does regular lub? This is exposed so lub callers
+   *  can discover whether the trees they are typing will
+   *  may require further adaptation. It may return false
+   *  negatives, but it will not return false positives.
+   */
+  def sameWeakLubAsLub(tps: List[Type]) = tps match {
+    case Nil       => true
+    case tp :: Nil => !typeHasAnnotations(tp)
+    case tps       => !(tps exists typeHasAnnotations) && !(tps forall isNumericValueType)
+  }
+
+  /** If the arguments are all numeric value types, the numeric
+   *  lub according to the weak conformance spec. If any argument
+   *  has type annotations, take the lub of the unannotated type
+   *  and call the analyzerPlugin method annotationsLub so it can
+   *  be further altered. Otherwise, the regular lub.
+   */
+  def weakLub(tps: List[Type]): Type = (
+    if (tps.isEmpty)
+      NothingTpe
+    else if (tps forall isNumericValueType)
+      numericLub(tps)
+    else if (tps exists typeHasAnnotations)
+      annotationsLub(lub(tps map (_.withoutAnnotations)), tps)
+    else
+      lub(tps)
+  )
+
+  def numericLub(ts: List[Type]) =
+    ts reduceLeft ((t1, t2) =>
+      if (isNumericSubType(t1, t2)) t2
+      else if (isNumericSubType(t2, t1)) t1
+      else IntTpe)
+
+  private val _lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
+  def lubResults = _lubResults
+
+  private val _glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
+  def glbResults = _glbResults
+
+  def lub(ts: List[Type]): Type = ts match {
+    case Nil      => NothingTpe
+    case t :: Nil => t
+    case _        =>
+      if (Statistics.canEnable) Statistics.incCounter(lubCount)
+      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+      try {
+        val res = lub(ts, lubDepth(ts))
+        // If the number of unapplied type parameters in all incoming
+        // types is consistent, and the lub does not match that, return
+        // the type constructor of the calculated lub instead.  This
+        // is because lubbing type constructors tends to result in types
+        // which have been applied to dummies or Nothing.
+        ts.map(_.typeParams.size).distinct match {
+          case x :: Nil if res.typeParams.size != x =>
+            logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor)
+          case _                                    =>
+            res
+        }
+      }
+      finally {
+        lubResults.clear()
+        glbResults.clear()
+        if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+      }
+  }
+
+  /** The least upper bound wrt <:< of a list of types */
+  protected[internal] def lub(ts: List[Type], depth: Depth): Type = {
+    def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
+      case List() => NothingTpe
+      case List(t) => t
+      case ts @ PolyType(tparams, _) :: _ =>
+        val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+          tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+        PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
+      case ts @ (mt @ MethodType(params, _)) :: rest =>
+        MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
+      case ts @ NullaryMethodType(_) :: rest =>
+        NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
+      case ts @ TypeBounds(_, _) :: rest =>
+        TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+      case ts @ AnnotatedType(annots, tpe) :: rest =>
+        annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
+      case ts =>
+        lubResults get ((depth, ts)) match {
+          case Some(lubType) =>
+            lubType
+          case None =>
+            lubResults((depth, ts)) = AnyTpe
+            val res = if (depth.isNegative) AnyTpe else lub1(ts)
+            lubResults((depth, ts)) = res
+            res
+        }
+    }
+    def lub1(ts0: List[Type]): Type = {
+      val (ts, tparams)            = stripExistentialsAndTypeVars(ts0)
+      val lubBaseTypes: List[Type] = lubList(ts, depth)
+      val lubParents               = spanningTypes(lubBaseTypes)
+      val lubOwner                 = commonOwner(ts)
+      val lubBase                  = intersectionType(lubParents, lubOwner)
+      val lubType =
+        if (phase.erasedTypes || depth.isZero ) lubBase
+        else {
+          val lubRefined  = refinedType(lubParents, lubOwner)
+          val lubThisType = lubRefined.typeSymbol.thisType
+          val narrowts    = ts map (_.narrow)
+          def excludeFromLub(sym: Symbol) = (
+            sym.isClass
+              || sym.isConstructor
+              || !sym.isPublic
+              || isGetClass(sym)
+              || sym.isFinal
+              || narrowts.exists(t => !refines(t, sym))
+            )
+          def lubsym(proto: Symbol): Symbol = {
+            val prototp = lubThisType.memberInfo(proto)
+            val syms = narrowts map (t =>
+              t.nonPrivateMember(proto.name).suchThat(sym =>
+                sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+
+            if (syms contains NoSymbol) NoSymbol
+            else {
+              val symtypes =
+                map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+              if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+                proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, depth.decr))
+              else if (symtypes.tail forall (symtypes.head =:= _))
+                proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
+              else {
+                def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+                  TypeBounds(glb(bnds map (_.lo), depth.decr), lub(bnds map (_.hi), depth.decr))
+                lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
+                  .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
+              }
+            }
+          }
+          def refines(tp: Type, sym: Symbol): Boolean = {
+            val syms = tp.nonPrivateMember(sym.name).alternatives
+            !syms.isEmpty && (syms forall (alt =>
+            // todo alt != sym is strictly speaking not correct, but without it we lose
+            // efficiency.
+              alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
+          }
+          // add a refinement symbol for all non-class members of lubBase
+          // which are refined by every type in ts.
+          for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+            try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth))
+            catch {
+              case ex: NoCommonType =>
+            }
+          }
+          if (lubRefined.decls.isEmpty) lubBase
+          else if (!verifyLubs) lubRefined
+          else {
+            // Verify that every given type conforms to the calculated lub.
+            // In theory this should not be necessary, but higher-order type
+            // parameters are not handled correctly.
+            val ok = ts forall { t =>
+              isSubType(t, lubRefined, depth) || {
+                if (settings.debug || printLubs) {
+                  Console.println(
+                    "Malformed lub: " + lubRefined + "\n" +
+                      "Argument " + t + " does not conform.  Falling back to " + lubBase
+                  )
+                }
+                false
+              }
+            }
+            // If not, fall back on the more conservative calculation.
+            if (ok) lubRefined
+            else lubBase
+          }
+        }
+      // dropIllegalStarTypes is a localized fix for SI-6897. We should probably
+      // integrate that transformation at a lower level in master, but lubs are
+      // the likely and maybe only spot they escape, so fixing here for 2.10.1.
+      existentialAbstraction(tparams, dropIllegalStarTypes(lubType))
+    }
+    if (printLubs) {
+      println(indent + "lub of " + ts + " at depth "+depth)//debug
+      indent = indent + "  "
+      assert(indent.length <= 100)
+    }
+    if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+    val res = lub0(ts)
+    if (printLubs) {
+      indent = indent stripSuffix "  "
+      println(indent + "lub of " + ts + " is " + res)//debug
+    }
+    res
+  }
+
+  val GlbFailure = new Throwable
+
+  /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+    *  call in `glb`. There's a possible infinite recursion when `specializes` calls
+    *  memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
+    *  The counter breaks this recursion after two calls.
+    *  If the recursion is broken, no member is added to the glb.
+    */
+  private var globalGlbDepth = Depth.Zero
+  private final val globalGlbLimit = Depth(2)
+
+  /** The greatest lower bound of a list of types (as determined by `<:<`). */
+  def glb(ts: List[Type]): Type = elimSuper(ts) match {
+    case List() => AnyTpe
+    case List(t) => t
+    case ts0 =>
+      if (Statistics.canEnable) Statistics.incCounter(lubCount)
+      val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+      try {
+        glbNorm(ts0, lubDepth(ts0))
+      } finally {
+        lubResults.clear()
+        glbResults.clear()
+        if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+      }
+  }
+
+  protected[internal] def glb(ts: List[Type], depth: Depth): Type = elimSuper(ts) match {
+    case List() => AnyTpe
+    case List(t) => t
+    case ts0 => glbNorm(ts0, depth)
+  }
+
+  /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+    *  with regard to `elimSuper`. */
+  protected def glbNorm(ts: List[Type], depth: Depth): Type = {
+    def glb0(ts0: List[Type]): Type = ts0 match {
+      case List() => AnyTpe
+      case List(t) => t
+      case ts @ PolyType(tparams, _) :: _ =>
+        val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+          tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+        PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
+      case ts @ (mt @ MethodType(params, _)) :: rest =>
+        MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
+      case ts @ NullaryMethodType(_) :: rest =>
+        NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
+      case ts @ TypeBounds(_, _) :: rest =>
+        TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+      case ts =>
+        glbResults get ((depth, ts)) match {
+          case Some(glbType) =>
+            glbType
+          case _ =>
+            glbResults((depth, ts)) = NothingTpe
+            val res = if (depth.isNegative) NothingTpe else glb1(ts)
+            glbResults((depth, ts)) = res
+            res
+        }
+    }
+    def glb1(ts0: List[Type]): Type = {
+      try {
+        val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+        val glbOwner = commonOwner(ts)
+        def refinedToParents(t: Type): List[Type] = t match {
+          case RefinedType(ps, _) => ps flatMap refinedToParents
+          case _ => List(t)
+        }
+        def refinedToDecls(t: Type): List[Scope] = t match {
+          case RefinedType(ps, decls) =>
+            val dss = ps flatMap refinedToDecls
+            if (decls.isEmpty) dss else decls :: dss
+          case _ => List()
+        }
+        val ts1 = ts flatMap refinedToParents
+        val glbBase = intersectionType(ts1, glbOwner)
+        val glbType =
+          if (phase.erasedTypes || depth.isZero) glbBase
+          else {
+            val glbRefined = refinedType(ts1, glbOwner)
+            val glbThisType = glbRefined.typeSymbol.thisType
+            def glbsym(proto: Symbol): Symbol = {
+              val prototp = glbThisType.memberInfo(proto)
+              val syms = for (t <- ts;
+                              alt <- (t.nonPrivateMember(proto.name).alternatives)
+                              if glbThisType.memberInfo(alt) matches prototp
+              ) yield alt
+              val symtypes = syms map glbThisType.memberInfo
+              assert(!symtypes.isEmpty)
+              proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+                if (proto.isTerm) glb(symtypes, depth.decr)
+                else {
+                  def isTypeBound(tp: Type) = tp match {
+                    case TypeBounds(_, _) => true
+                    case _ => false
+                  }
+                  def glbBounds(bnds: List[Type]): TypeBounds = {
+                    val lo = lub(bnds map (_.bounds.lo), depth.decr)
+                    val hi = glb(bnds map (_.bounds.hi), depth.decr)
+                    if (lo <:< hi) TypeBounds(lo, hi)
+                    else throw GlbFailure
+                  }
+                  val symbounds = symtypes filter isTypeBound
+                  var result: Type =
+                    if (symbounds.isEmpty)
+                      TypeBounds.empty
+                    else glbBounds(symbounds)
+                  for (t <- symtypes if !isTypeBound(t))
+                    if (result.bounds containsType t) result = t
+                    else throw GlbFailure
+                  result
+                })
+            }
+            if (globalGlbDepth < globalGlbLimit)
+              try {
+                globalGlbDepth = globalGlbDepth.incr
+                val dss = ts flatMap refinedToDecls
+                for (ds <- dss; sym <- ds.iterator)
+                  if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
+                    try {
+                      addMember(glbThisType, glbRefined, glbsym(sym), depth)
+                    } catch {
+                      case ex: NoCommonType =>
+                    }
+              } finally {
+                globalGlbDepth = globalGlbDepth.decr
+              }
+            if (glbRefined.decls.isEmpty) glbBase else glbRefined
+          }
+        existentialAbstraction(tparams, glbType)
+      } catch {
+        case GlbFailure =>
+          if (ts forall (t => NullTpe <:< t)) NullTpe
+          else NothingTpe
+      }
+    }
+    // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + "  " } //DEBUG
+    if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+    glb0(ts)
+    // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+  }
+
+  /** All types in list must be polytypes with type parameter lists of
+    *  same length as tparams.
+    *  Returns list of list of bounds infos, where corresponding type
+    *  parameters are renamed to tparams.
+    */
+  private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+    def getBounds(tp: Type): List[Type] = tp match {
+      case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
+        tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
+      case tp =>
+        if (tp ne tp.normalize) getBounds(tp.normalize)
+        else throw new NoCommonType(tps)
+    }
+    tps map getBounds
+  }
+
+  /** All types in list must be polytypes with type parameter lists of
+    *  same length as tparams.
+    *  Returns list of instance types, where corresponding type
+    *  parameters are renamed to tparams.
+    */
+  private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+    def transformResultType(tp: Type): Type = tp match {
+      case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
+        restpe.substSym(tparams1, tparams)
+      case tp =>
+        if (tp ne tp.normalize) transformResultType(tp.normalize)
+        else throw new NoCommonType(tps)
+    }
+    tps map transformResultType
+  }
+
+  /** All types in list must be method types with equal parameter types.
+    *  Returns list of their result types.
+    */
+  private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
+    tps map {
+      case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
+        res
+      case NullaryMethodType(res) if pts.isEmpty =>
+        res
+      case _ =>
+        throw new NoCommonType(tps)
+    }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
new file mode 100644
index 0000000..f9b10c9
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -0,0 +1,592 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import util.{ Statistics, TriState }
+import scala.annotation.tailrec
+
+trait TypeComparers {
+  self: SymbolTable =>
+  import definitions._
+  import TypesStats._
+
+  private final val LogPendingSubTypesThreshold = TypeConstants.DefaultLogThreshhold
+
+  private val _pendingSubTypes = new mutable.HashSet[SubTypePair]
+  def pendingSubTypes = _pendingSubTypes
+
+  final case class SubTypePair(tp1: Type, tp2: Type) {
+    // SI-8146 we used to implement equality here in terms of pairwise =:=.
+    //         But, this was inconsistent with hashCode, which was based on the
+    //         Type#hashCode, based on the structure of types, not the meaning.
+    //         Now, we use `Type#{equals,hashCode}` as the (consistent) basis for
+    //         detecting cycles (aka keeping subtyping decidable.)
+    //
+    //         I added a tests to show that we detect the cycle: neg/t8146-no-finitary*
+
+    override def toString = tp1+" <:<? "+tp2
+  }
+
+  private var _subsametypeRecursions: Int = 0
+  def subsametypeRecursions = _subsametypeRecursions
+  def subsametypeRecursions_=(value: Int) = _subsametypeRecursions = value
+
+  private def isUnifiable(pre1: Type, pre2: Type) = (
+       (isEligibleForPrefixUnification(pre1) || isEligibleForPrefixUnification(pre2))
+    && (pre1 =:= pre2)
+  )
+
+  /** Returns true iff we are past phase specialize,
+    *  sym1 and sym2 are two existential skolems with equal names and bounds,
+    *  and pre1 and pre2 are equal prefixes
+    */
+  private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+    sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+      sym1.name == sym2.name &&
+      phase.specialized &&
+      sym1.info =:= sym2.info &&
+      pre1 =:= pre2
+  }
+
+  private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
+    if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
+      if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2")
+      true
+    } else
+      false
+
+  private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = (
+    if (sym1 == sym2)
+      sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+    else
+      (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+  )
+
+  def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
+    subsametypeRecursions += 1
+    undoLog undo { // undo type constraints that arise from operations in this block
+      !isSameType1(tp1, tp2)
+    }
+  } finally {
+    subsametypeRecursions -= 1
+    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+    // it doesn't help to keep separate recursion counts for the three methods that now share it
+    // if (subsametypeRecursions == 0) undoLog.clear()
+  }
+
+  def isDifferentTypeConstructor(tp1: Type, tp2: Type) = !isSameTypeConstructor(tp1, tp2)
+
+  private def isSameTypeConstructor(tr1: TypeRef, tr2: TypeRef): Boolean = (
+       (tr1.sym == tr2.sym)
+    && !isDifferentType(tr1.pre, tr2.pre)
+  )
+  private def isSameTypeConstructor(tp1: Type, tp2: Type): Boolean = (
+       tp1.isInstanceOf[TypeRef]
+    && tp2.isInstanceOf[TypeRef]
+    && isSameTypeConstructor(tp1.asInstanceOf[TypeRef], tp2.asInstanceOf[TypeRef])
+  )
+
+  /** Do `tp1` and `tp2` denote equivalent types? */
+  def isSameType(tp1: Type, tp2: Type): Boolean = try {
+    if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
+    subsametypeRecursions += 1
+    //OPT cutdown on Function0 allocation
+    //was:
+    //    undoLog undoUnless {
+    //      isSameType1(tp1, tp2)
+    //    }
+
+    val before = undoLog.log
+    var result = false
+    try {
+      result = isSameType1(tp1, tp2)
+    }
+    finally if (!result) undoLog.undoTo(before)
+    result
+  }
+  finally {
+    subsametypeRecursions -= 1
+    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+    // it doesn't help to keep separate recursion counts for the three methods that now share it
+    // if (subsametypeRecursions == 0) undoLog.clear()
+  }
+
+  // @pre: at least one argument has annotations
+  private def sameAnnotatedTypes(tp1: Type, tp2: Type) = (
+       annotationsConform(tp1, tp2)
+    && annotationsConform(tp2, tp1)
+    && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
+  )
+  // We flush out any AnnotatedTypes before calling isSameType2 because
+  // unlike most other subclasses of Type, we have to allow for equivalence of any
+  // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+  // logic of "annotationsConform" is arbitrary and unknown.
+  private def isSameType1(tp1: Type, tp2: Type): Boolean = typeRelationPreCheck(tp1, tp2) match {
+    case state if state.isKnown                                  => state.booleanValue
+    case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => sameAnnotatedTypes(tp1, tp2)
+    case _                                                       => isSameType2(tp1, tp2)
+  }
+
+  private def isSameHKTypes(tp1: Type, tp2: Type) = (
+       tp1.isHigherKinded
+    && tp2.isHigherKinded
+    && (tp1.normalize =:= tp2.normalize)
+  )
+  private def isSameTypeRef(tr1: TypeRef, tr2: TypeRef) = (
+       equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre)
+    && (isSameHKTypes(tr1, tr2) || isSameTypes(tr1.args, tr2.args))
+  )
+
+  private def isSameSingletonType(tp1: SingletonType, tp2: SingletonType): Boolean = {
+    // We don't use dealiasWiden here because we are looking for the SAME type,
+    // and widening leads to a less specific type. The logic is along the lines of
+    // dealiasAndFollowUnderlyingAsLongAsTheTypeIsEquivalent. This method is only
+    // called after a surface comparison has failed, so if chaseDealiasedUnderlying
+    // does not produce a type other than tp1 and tp2, return false.
+    @tailrec def chaseDealiasedUnderlying(tp: Type): Type = tp.underlying.dealias match {
+      case next: SingletonType if tp ne next => chaseDealiasedUnderlying(next)
+      case _                                 => tp
+    }
+    val origin1 = chaseDealiasedUnderlying(tp1)
+    val origin2 = chaseDealiasedUnderlying(tp2)
+    ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+  }
+
+  private def isSameMethodType(mt1: MethodType, mt2: MethodType) = (
+       isSameTypes(mt1.paramTypes, mt2.paramTypes)
+    && (mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params))
+    && (mt1.isImplicit == mt2.isImplicit)
+  )
+
+  private def equalTypeParamsAndResult(tparams1: List[Symbol], res1: Type, tparams2: List[Symbol], res2: Type) = {
+    def subst(info: Type) = info.substSym(tparams2, tparams1)
+    // corresponds does not check length of two sequences before checking the predicate,
+    // but SubstMap assumes it has been checked (SI-2956)
+    (     sameLength(tparams1, tparams2)
+      && (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= subst(p2.info))
+      && (res1 =:= subst(res2))
+    )
+  }
+
+  // SI-2066 This prevents overrides with incompatible variance in higher order type parameters.
+  private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = {
+    def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod)
+    !settings.isScala211 || ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
+  }
+
+  private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) =
+    !settings.isScala211 || methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
+
+  def isSameType2(tp1: Type, tp2: Type): Boolean = {
+    def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
+
+    /*  Here we highlight those unfortunate type-like constructs which
+     *  are hidden bundles of mutable state, cruising the type system picking
+     *  up any type constraints naive enough to get into their hot rods.
+     */
+    def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match {
+      case BoundedWildcardType(bounds)         => bounds containsType rhs
+      case tv @ TypeVar(_, _)                  => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1)
+      case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs)
+      case _                                   => false
+    }
+    /*  SingletonType receives this additional scrutiny because there are
+     *  a variety of Types which must be treated as equivalent even if they
+     *  arrive in different guises. For instance, object Foo in the following
+     *  might appear in (at least) the four given below.
+     *
+     *    package pkg { object Foo ; type Bar = Foo.type }
+     *
+     *  ModuleClassTypeRef(pkg.type, Foo: ModuleClassSymbol, Nil)
+     *  ThisType(Foo: ModuleClassSymbol)
+     *  SingleType(pkg.type, Foo: ModuleSymbol)
+     *  AliasTypeRef(NoPrefix, sym: AliasSymbol, Nil) where sym.info is one of the above
+     */
+    def sameSingletonType = tp1 match {
+      case tp1: SingletonType => tp2 match {
+        case tp2: SingletonType => isSameSingletonType(tp1, tp2)
+        case _                  => false
+      }
+      case _ => false
+    }
+
+    /*  Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
+     *    https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA
+     */
+    def sameTypeAndSameCaseClass = tp1 match {
+      case tp1: TypeRef               => tp2 match { case tp2: TypeRef               => isSameTypeRef(tp1, tp2)                              ; case _ => false }
+      case tp1: MethodType            => tp2 match { case tp2: MethodType            => isSameMethodType(tp1, tp2)                           ; case _ => false }
+      case RefinedType(ps1, decls1)   => tp2 match { case RefinedType(ps2, decls2)   => isSameTypes(ps1, ps2) && (decls1 isSameScope decls2) ; case _ => false }
+      case SingleType(pre1, sym1)     => tp2 match { case SingleType(pre2, sym2)     => equalSymsAndPrefixes(sym1, pre1, sym2, pre2)         ; case _ => false }
+      case PolyType(ps1, res1)        => tp2 match { case PolyType(ps2, res2)        => equalTypeParamsAndResult(ps1, res1, ps2, res2)       ; case _ => false }
+      case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2)       ; case _ => false }
+      case ThisType(sym1)             => tp2 match { case ThisType(sym2)             => sym1 == sym2                                         ; case _ => false }
+      case ConstantType(c1)           => tp2 match { case ConstantType(c2)           => c1 == c2                                             ; case _ => false }
+      case NullaryMethodType(res1)    => tp2 match { case NullaryMethodType(res2)    => res1 =:= res2                                        ; case _ => false }
+      case TypeBounds(lo1, hi1)       => tp2 match { case TypeBounds(lo2, hi2)       => lo1 =:= lo2 && hi1 =:= hi2                           ; case _ => false }
+      case _                          => false
+    }
+
+    (    sameTypeAndSameCaseClass
+      || sameSingletonType
+      || mutateNonTypeConstructs(tp1, tp2)
+      || mutateNonTypeConstructs(tp2, tp1)
+      || retry(normalizePlus(tp1), normalizePlus(tp2))
+    )
+  }
+
+  def isSubType(tp1: Type, tp2: Type, depth: Depth = Depth.AnyDepth): Boolean = try {
+    subsametypeRecursions += 1
+
+    //OPT cutdown on Function0 allocation
+    //was:
+    //    undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+    //      if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+    //        val p = new SubTypePair(tp1, tp2)
+    //        if (pendingSubTypes(p))
+    //          false
+    //        else
+    //          try {
+    //            pendingSubTypes += p
+    //            isSubType2(tp1, tp2, depth)
+    //          } finally {
+    //            pendingSubTypes -= p
+    //          }
+    //      } else {
+    //        isSubType2(tp1, tp2, depth)
+    //      }
+    //    }
+
+    val before = undoLog.log
+    var result = false
+
+    try result = { // if subtype test fails, it should not affect constraints on typevars
+      if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+        val p = new SubTypePair(tp1, tp2)
+        if (pendingSubTypes(p))
+          false // see neg/t8146-no-finitary*
+        else
+          try {
+            pendingSubTypes += p
+            isSubType1(tp1, tp2, depth)
+          } finally {
+            pendingSubTypes -= p
+          }
+      } else {
+        isSubType1(tp1, tp2, depth)
+      }
+    } finally if (!result) undoLog.undoTo(before)
+
+    result
+  } finally {
+    subsametypeRecursions -= 1
+    // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+    // it doesn't help to keep separate recursion counts for the three methods that now share it
+    // if (subsametypeRecursions == 0) undoLog.clear()
+  }
+
+  /** Check whether the subtype or type equivalence relationship
+   *  between the argument is predetermined. Returns a tri-state
+   *  value: True means the arguments are always sub/same types,
+   *  False means they never are, and Unknown means the caller
+   *  will have to figure things out.
+   */
+  private def typeRelationPreCheck(tp1: Type, tp2: Type): TriState = {
+    def isTrue = (
+         (tp1 eq tp2)
+      || isErrorOrWildcard(tp1)
+      || isErrorOrWildcard(tp2)
+      || (tp1 eq NoPrefix) && tp2.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+      || (tp2 eq NoPrefix) && tp1.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+    )
+    // isFalse, assuming !isTrue
+    def isFalse = (
+         (tp1 eq NoType)
+      || (tp2 eq NoType)
+      || (tp1 eq NoPrefix)
+      || (tp2 eq NoPrefix)
+    )
+
+    if (isTrue) TriState.True
+    else if (isFalse) TriState.False
+    else TriState.Unknown
+  }
+
+  private def isSubType1(tp1: Type, tp2: Type, depth: Depth): Boolean = typeRelationPreCheck(tp1, tp2) match {
+    case state if state.isKnown                                  => state.booleanValue
+    case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
+    case _                                                       => isSubType2(tp1, tp2, depth)
+  }
+
+  private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
+    val PolyType(tparams1, res1) = tp1
+    val PolyType(tparams2, res2) = tp2
+
+    sameLength(tparams1, tparams2) && {
+      // fast-path: polymorphic method type -- type params cannot be captured
+      val isMethod = tparams1.head.owner.isMethod
+      //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala
+      val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
+      def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
+      def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
+      def cmp(p1: Symbol, p2: Symbol) = (
+            methodHigherOrderTypeParamsSubVariance(p2, p1)
+         && sub2(p2.info) <:< sub1(p1.info)
+      )
+
+      (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
+    }
+  }
+  // This is looking for situations such as B.this.x.type <:< B.super.x.type.
+  // If it's a ThisType on the lhs and a SuperType on the right, and they originate
+  // in the same class, and the 'x' in the ThisType has in its override chain
+  // the 'x' in the SuperType, then the types conform.
+  private def isThisAndSuperSubtype(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
+    case (SingleType(ThisType(lpre), v1), SingleType(SuperType(ThisType(rpre), _), v2)) => (lpre == rpre) && (v1.overrideChain contains v2)
+    case _                                                                              => false
+  }
+
+  // @assume tp1.isHigherKinded || tp2.isHigherKinded
+  def isHKSubType(tp1: Type, tp2: Type, depth: Depth): Boolean = {
+    def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
+      case (TypeRef(_, AnyClass, _), _)                                     => false                    // avoid some warnings when Nothing/Any are on the other side
+      case (_, TypeRef(_, NothingClass, _))                                 => false
+      case (pt1: PolyType, pt2: PolyType)                                   => isPolySubType(pt1, pt2)  // @assume both .isHigherKinded (both normalized to PolyType)
+      case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false                    // don't warn on HasMethodMatching on right hand side
+      case _                                                                =>                          // @assume !(both .isHigherKinded) thus cannot be subtypes
+        def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s"
+        devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n  tp1=${tp_s(ntp1)}\n  tp2=${tp_s(ntp2)}")
+        false
+    }
+
+    (    tp1.typeSymbol == NothingClass       // @M Nothing is subtype of every well-kinded type
+      || tp2.typeSymbol == AnyClass           // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
+      || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2)  // @M! normalize reduces higher-kinded case to PolyType's
+    )
+  }
+
+  /** Does type `tp1` conform to `tp2`? */
+  private def isSubType2(tp1: Type, tp2: Type, depth: Depth): Boolean = {
+    def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
+
+    if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
+      return (tp1 =:= tp2) || isThisAndSuperSubtype(tp1, tp2) || retry(tp1.underlying, tp2)
+
+    if (tp1.isHigherKinded || tp2.isHigherKinded)
+      return isHKSubType(tp1, tp2, depth)
+
+    /* First try, on the right:
+     *   - unwrap Annotated types, BoundedWildcardTypes,
+     *   - bind TypeVars  on the right, if lhs is not Annotated nor BoundedWildcard
+     *   - handle common cases for first-kind TypeRefs on both sides as a fast path.
+     */
+    def firstTry = tp2 match {
+      // fast path: two typerefs, none of them HK
+      case tr2: TypeRef =>
+        tp1 match {
+          case tr1: TypeRef =>
+            // TODO - dedicate a method to TypeRef/TypeRef subtyping.
+            // These typerefs are pattern matched up and down far more
+            // than is necessary.
+            val sym1 = tr1.sym
+            val sym2 = tr2.sym
+            val pre1 = tr1.pre
+            val pre2 = tr2.pre
+            (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+            else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
+              (isUnifiable(pre1, pre2) ||
+                isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
+                sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
+              isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
+              ||
+              sym2.isClass && {
+                val base = tr1 baseType sym2
+                (base ne tr1) && isSubType(base, tr2, depth)
+              }
+              ||
+              thirdTryRef(tr1, tr2))
+          case _ =>
+            secondTry
+        }
+      case AnnotatedType(_, _) =>
+        isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+          annotationsConform(tp1, tp2)
+      case BoundedWildcardType(bounds) =>
+        isSubType(tp1, bounds.hi, depth)
+      case tv2 @ TypeVar(_, constr2) =>
+        tp1 match {
+          case AnnotatedType(_, _) | BoundedWildcardType(_) =>
+            secondTry
+          case _ =>
+            tv2.registerBound(tp1, isLowerBound = true)
+        }
+      case _ =>
+        secondTry
+    }
+
+    /* Second try, on the left:
+     *   - unwrap AnnotatedTypes, BoundedWildcardTypes,
+     *   - bind typevars,
+     *   - handle existential types by skolemization.
+     */
+    def secondTry = tp1 match {
+      case AnnotatedType(_, _) =>
+        isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+          annotationsConform(tp1, tp2)
+      case BoundedWildcardType(bounds) =>
+        isSubType(tp1.bounds.lo, tp2, depth)
+      case tv @ TypeVar(_,_) =>
+        tv.registerBound(tp2, isLowerBound = false)
+      case ExistentialType(_, _) =>
+        try {
+          skolemizationLevel += 1
+          isSubType(tp1.skolemizeExistential, tp2, depth)
+        } finally {
+          skolemizationLevel -= 1
+        }
+      case _ =>
+        thirdTry
+    }
+
+    def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+      val sym2 = tp2.sym
+      def retry(lhs: Type, rhs: Type)   = isSubType(lhs, rhs, depth)
+      def abstractTypeOnRight(lo: Type) = isDifferentTypeConstructor(tp2, lo) && retry(tp1, lo)
+      def classOnRight                  = (
+        if (isRawType(tp2)) retry(tp1, rawToExistential(tp2))
+        else if (sym2.isRefinementClass) retry(tp1, sym2.info)
+        else fourthTry
+      )
+      sym2 match {
+        case SingletonClass                   => tp1.isStable || fourthTry
+        case _: ClassSymbol                   => classOnRight
+        case _: TypeSymbol if sym2.isDeferred => abstractTypeOnRight(tp2.bounds.lo) || fourthTry
+        case _: TypeSymbol                    => retry(tp1.normalize, tp2.normalize)
+        case _                                => fourthTry
+      }
+    }
+
+    /* Third try, on the right:
+     *   - decompose refined types.
+     *   - handle typerefs and existentials.
+     *   - handle left+right method types, polytypes, typebounds
+     */
+    def thirdTry = tp2 match {
+      case tr2: TypeRef =>
+        thirdTryRef(tp1, tr2)
+      case rt2: RefinedType =>
+        (rt2.parents forall (isSubType(tp1, _, depth))) &&
+          (rt2.decls forall (specializesSym(tp1, _, depth)))
+      case et2: ExistentialType =>
+        et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
+      case mt2: MethodType =>
+        tp1 match {
+          case mt1 @ MethodType(params1, res1) =>
+            val params2 = mt2.params
+            val res2 = mt2.resultType
+            (sameLength(params1, params2) &&
+              mt1.isImplicit == mt2.isImplicit &&
+              matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+              isSubType(res1.substSym(params1, params2), res2, depth))
+          // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
+          case _ =>
+            false
+        }
+      case pt2 @ NullaryMethodType(_) =>
+        tp1 match {
+          // TODO: consider MethodType mt for which mt.params.isEmpty??
+          case pt1 @ NullaryMethodType(_) =>
+            isSubType(pt1.resultType, pt2.resultType, depth)
+          case _ =>
+            false
+        }
+      case TypeBounds(lo2, hi2) =>
+        tp1 match {
+          case TypeBounds(lo1, hi1) =>
+            isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
+          case _ =>
+            false
+        }
+      case _ =>
+        fourthTry
+    }
+
+    /* Fourth try, on the left:
+     *   - handle typerefs, refined types, and singleton types.
+     */
+    def fourthTry = {
+      def retry(lhs: Type, rhs: Type)  = isSubType(lhs, rhs, depth)
+      def abstractTypeOnLeft(hi: Type) = isDifferentTypeConstructor(tp1, hi) && retry(hi, tp2)
+
+      tp1 match {
+        case tr1 @ TypeRef(pre1, sym1, _) =>
+          def nullOnLeft = tp2 match {
+            case TypeRef(_, sym2, _) => sym1 isBottomSubClass sym2
+            case _                   => isSingleType(tp2) && retry(tp1, tp2.widen)
+          }
+          def moduleOnLeft = tp2 match {
+            case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
+            case _                      => false
+          }
+          def classOnLeft = (
+            if (isRawType(tp1)) retry(rawToExistential(tp1), tp2)
+            else if (sym1.isModuleClass) moduleOnLeft
+            else sym1.isRefinementClass && retry(sym1.info, tp2)
+          )
+          sym1 match {
+            case NothingClass                     => true
+            case NullClass                        => nullOnLeft
+            case _: ClassSymbol                   => classOnLeft
+            case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.bounds.hi)
+            case _: TypeSymbol                    => retry(tp1.normalize, tp2.normalize)
+            case _                                => false
+          }
+        case RefinedType(parents, _) => parents exists (retry(_, tp2))
+        case _: SingletonType        => retry(tp1.underlying, tp2)
+        case _                       => false
+      }
+    }
+
+    firstTry
+  }
+
+
+  def isWeakSubType(tp1: Type, tp2: Type) =
+    tp1.dealiasWiden match {
+      case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
+        tp2.deconst.dealias match {
+          case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+            isNumericSubClass(sym1, sym2)
+          case tv2 @ TypeVar(_, _) =>
+            tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
+          case _ =>
+            isSubType(tp1, tp2)
+        }
+      case tv1 @ TypeVar(_, _) =>
+        tp2.deconst.dealias match {
+          case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+            tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
+          case _ =>
+            isSubType(tp1, tp2)
+        }
+      case _ =>
+        isSubType(tp1, tp2)
+    }
+
+  def isNumericSubType(tp1: Type, tp2: Type) = (
+    isNumericSubClass(primitiveBaseClass(tp1.dealiasWiden), primitiveBaseClass(tp2.dealias))
+   )
+
+  /** If the given type has a primitive class among its base classes,
+   *  the symbol of that class. Otherwise, NoSymbol.
+   */
+  private def primitiveBaseClass(tp: Type): Symbol = {
+    @tailrec def loop(bases: List[Symbol]): Symbol = bases match {
+      case Nil     => NoSymbol
+      case x :: xs => if (isPrimitiveValueClass(x)) x else loop(xs)
+    }
+    loop(tp.baseClasses)
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
new file mode 100644
index 0000000..564cbb1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -0,0 +1,268 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ generic }
+import generic.Clearable
+
+private[internal] trait TypeConstraints {
+  self: SymbolTable =>
+  import definitions._
+
+  /** A log of type variable with their original constraints. Used in order
+    *  to undo constraints in the case of isSubType/isSameType failure.
+    */
+  private lazy val _undoLog = new UndoLog
+  def undoLog = _undoLog
+
+  class UndoLog extends Clearable {
+    private type UndoPairs = List[(TypeVar, TypeConstraint)]
+    //OPT this method is public so we can do `manual inlining`
+    var log: UndoPairs = List()
+
+    // register with the auto-clearing cache manager
+    perRunCaches.recordCache(this)
+
+    /** Undo all changes to constraints to type variables upto `limit`. */
+    //OPT this method is public so we can do `manual inlining`
+    def undoTo(limit: UndoPairs) {
+      assertCorrectThread()
+      while ((log ne limit) && log.nonEmpty) {
+        val (tv, constr) = log.head
+        tv.constr = constr
+        log = log.tail
+      }
+    }
+
+    /** No sync necessary, because record should only
+      *  be called from within an undo or undoUnless block,
+      *  which is already synchronized.
+      */
+    private[reflect] def record(tv: TypeVar) = {
+      log ::= ((tv, tv.constr.cloneInternal))
+    }
+
+    def clear() {
+      if (settings.debug)
+        self.log("Clearing " + log.size + " entries from the undoLog.")
+      log = Nil
+    }
+
+    // `block` should not affect constraints on typevars
+    def undo[T](block: => T): T = {
+      val before = log
+      try block
+      finally undoTo(before)
+    }
+  }
+
+  /** @PP: Unable to see why these apparently constant types should need vals
+    *  in every TypeConstraint, I lifted them out.
+    */
+  private lazy val numericLoBound = IntTpe
+  private lazy val numericHiBound = intersectionType(List(ByteTpe, CharTpe), ScalaPackageClass)
+
+  /** A class expressing upper and lower bounds constraints of type variables,
+    * as well as their instantiations.
+    */
+  class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
+    def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+    def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
+    def this() = this(List(), List())
+
+    /*  Syncnote: Type constraints are assumed to be used from only one
+     *  thread. They are not exposed in api.Types and are used only locally
+     *  in operations that are exposed from types. Hence, no syncing of any
+     *  variables should be ncessesary.
+     */
+
+    /** Guard these lists against AnyClass and NothingClass appearing,
+      *  else loBounds.isEmpty will have different results for an empty
+      *  constraint and one with Nothing as a lower bound.  [Actually
+      *  guarding addLoBound/addHiBound somehow broke raw types so it
+      *  only guards against being created with them.]
+      */
+    private var lobounds = lo0 filterNot typeIsNothing
+    private var hibounds = hi0 filterNot typeIsAny
+    private var numlo = numlo0
+    private var numhi = numhi0
+    private var avoidWidening = avoidWidening0
+
+    def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
+    def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
+    def avoidWiden: Boolean = avoidWidening
+
+    def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+      // For some reason which is still a bit fuzzy, we must let Nothing through as
+      // a lower bound despite the fact that Nothing is always a lower bound.  My current
+      // supposition is that the side-effecting type constraint accumulation mechanism
+      // depends on these subtype tests being performed to make forward progress when
+      // there are mutally recursive type vars.
+      // See pos/t6367 and pos/t6499 for the competing test cases.
+      val mustConsider = tp.typeSymbol match {
+        case NothingClass => true
+        case _            => !(lobounds contains tp)
+      }
+      if (mustConsider) {
+        if (isNumericBound && isNumericValueType(tp)) {
+          if (numlo == NoType || isNumericSubType(numlo, tp))
+            numlo = tp
+          else if (!isNumericSubType(tp, numlo))
+            numlo = numericLoBound
+        }
+        else lobounds ::= tp
+      }
+    }
+
+    def checkWidening(tp: Type) {
+      if(tp.isStable) avoidWidening = true
+      else tp match {
+        case HasTypeMember(_, _) => avoidWidening = true
+        case _ =>
+      }
+    }
+
+    def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+      // My current test case only demonstrates the need to let Nothing through as
+      // a lower bound, but I suspect the situation is symmetrical.
+      val mustConsider = tp.typeSymbol match {
+        case AnyClass => true
+        case _        => !(hibounds contains tp)
+      }
+      if (mustConsider) {
+        checkWidening(tp)
+        if (isNumericBound && isNumericValueType(tp)) {
+          if (numhi == NoType || isNumericSubType(tp, numhi))
+            numhi = tp
+          else if (!isNumericSubType(numhi, tp))
+            numhi = numericHiBound
+        }
+        else hibounds ::= tp
+      }
+    }
+
+    def instWithinBounds = instValid && isWithinBounds(inst)
+
+    def isWithinBounds(tp: Type): Boolean = (
+         lobounds.forall(_ <:< tp)
+      && hibounds.forall(tp <:< _)
+      && (numlo == NoType || (numlo weak_<:< tp))
+      && (numhi == NoType || (tp weak_<:< numhi))
+    )
+
+    var inst: Type = NoType // @M reduce visibility?
+
+    def instValid = (inst ne null) && (inst ne NoType)
+
+    def cloneInternal = {
+      val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
+      tc.inst = inst
+      tc
+    }
+
+    override def toString = {
+      val boundsStr = {
+        val lo = loBounds filterNot typeIsNothing match {
+          case Nil       => ""
+          case tp :: Nil => " >: " + tp
+          case tps       => tps.mkString(" >: (", ", ", ")")
+        }
+        val hi = hiBounds filterNot typeIsAny match {
+          case Nil       => ""
+          case tp :: Nil => " <: " + tp
+          case tps       => tps.mkString(" <: (", ", ", ")")
+        }
+        lo + hi
+      }
+      if (inst eq NoType) boundsStr
+      else boundsStr + " _= " + inst.safeToString
+    }
+  }
+
+  /** Solve constraint collected in types `tvars`.
+    *
+    *  @param tvars      All type variables to be instantiated.
+    *  @param tparams    The type parameters corresponding to `tvars`
+    *  @param variances  The variances of type parameters; need to reverse
+    *                    solution direction for all contravariant variables.
+    *  @param upper      When `true` search for max solution else min.
+    */
+  def solve(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): Boolean = {
+
+    def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
+      if (tvar.constr.inst == NoType) {
+        val up = if (variance.isContravariant) !upper else upper
+        tvar.constr.inst = null
+        val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
+        //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
+        var cyclic = bound contains tparam
+        foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+          val ok = (tparam2 != tparam) && (
+            (bound contains tparam2)
+              ||  up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+              || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
+            )
+          if (ok) {
+            if (tvar2.constr.inst eq null) cyclic = true
+            solveOne(tvar2, tparam2, variance2)
+          }
+        })
+        if (!cyclic) {
+          if (up) {
+            if (bound.typeSymbol != AnyClass) {
+              debuglog(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+              tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
+            }
+            for (tparam2 <- tparams)
+              tparam2.info.bounds.lo.dealias match {
+                case TypeRef(_, `tparam`, _) =>
+                  debuglog(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+                  tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+                case _ =>
+              }
+          } else {
+            if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+              debuglog(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+              tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+            }
+            for (tparam2 <- tparams)
+              tparam2.info.bounds.hi.dealias match {
+                case TypeRef(_, `tparam`, _) =>
+                  debuglog(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+                  tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+                case _ =>
+              }
+          }
+        }
+        tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
+
+        //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
+        val newInst = (
+          if (up) {
+            if (depth.isAnyDepth) glb(tvar.constr.hiBounds)
+            else glb(tvar.constr.hiBounds, depth)
+          }
+          else {
+            if (depth.isAnyDepth) lub(tvar.constr.loBounds)
+            else lub(tvar.constr.loBounds, depth)
+          }
+          )
+
+        debuglog(s"$tvar setInst $newInst")
+        tvar setInst newInst
+        //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+      }
+    }
+
+    // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
+    foreach3(tvars, tparams, variances)(solveOne)
+
+    def logBounds(tv: TypeVar) = log {
+      val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}"
+      s"Inferred type for ${tv.originString} (${tv.inst}) $what"
+    }
+
+    tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv)))
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
new file mode 100644
index 0000000..f06420d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -0,0 +1,1173 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable, immutable }
+import Flags._
+import scala.annotation.tailrec
+import Variance._
+
+private[internal] trait TypeMaps {
+  self: SymbolTable =>
+  import definitions._
+
+  /** Normalize any type aliases within this type (@see Type#normalize).
+    *  Note that this depends very much on the call to "normalize", not "dealias",
+    *  so it is no longer carries the too-stealthy name "deAlias".
+    */
+  object normalizeAliases extends TypeMap {
+    def apply(tp: Type): Type = mapOver(tp match {
+      case TypeRef(_, sym, _) if sym.isAliasType && tp.isHigherKinded => logResult(s"Normalized type alias function $tp")(tp.normalize)
+      case TypeRef(_, sym, _) if sym.isAliasType                      => tp.normalize
+      case tp                                                         => tp
+    })
+  }
+
+  /** Remove any occurrence of type <singleton> from this type and its parents */
+  object dropSingletonType extends TypeMap {
+    def apply(tp: Type): Type = {
+      tp match {
+        case TypeRef(_, SingletonClass, _) =>
+          AnyTpe
+        case tp1 @ RefinedType(parents, decls) =>
+          parents filter (_.typeSymbol != SingletonClass) match {
+            case Nil                       => AnyTpe
+            case p :: Nil if decls.isEmpty => mapOver(p)
+            case ps                        => mapOver(copyRefinedType(tp1, ps, decls))
+          }
+        case tp1 =>
+          mapOver(tp1)
+      }
+    }
+  }
+
+  /** Type with all top-level occurrences of abstract types replaced by their bounds */
+  object abstractTypesToBounds extends TypeMap {
+    def apply(tp: Type): Type = tp match {
+      case TypeRef(_, sym, _) if sym.isAliasType    => apply(tp.dealias)
+      case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi)
+      case rtp @ RefinedType(parents, decls)        => copyRefinedType(rtp, parents mapConserve this, decls)
+      case AnnotatedType(_, _)                      => mapOver(tp)
+      case _                                        => tp             // no recursion - top level only
+    }
+  }
+
+  // Set to true for A* => Seq[A]
+  //   (And it will only rewrite A* in method result types.)
+  //   This is the pre-existing behavior.
+  // Or false for Seq[A] => Seq[A]
+  //   (It will rewrite A* everywhere but method parameters.)
+  //   This is the specified behavior.
+  protected def etaExpandKeepsStar = false
+
+  /** Turn any T* types into Seq[T] except when
+    *  in method parameter position.
+    */
+  object dropIllegalStarTypes extends TypeMap {
+    def apply(tp: Type): Type = tp match {
+      case MethodType(params, restpe) =>
+        // Not mapping over params
+        val restpe1 = apply(restpe)
+        if (restpe eq restpe1) tp
+        else MethodType(params, restpe1)
+      case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
+        seqType(arg)
+      case _ =>
+        if (etaExpandKeepsStar) tp else mapOver(tp)
+    }
+  }
+
+  trait AnnotationFilter extends TypeMap {
+    def keepAnnotation(annot: AnnotationInfo): Boolean
+
+    override def mapOver(annot: AnnotationInfo) =
+      if (keepAnnotation(annot)) super.mapOver(annot)
+      else UnmappableAnnotation
+  }
+
+  trait KeepOnlyTypeConstraints extends AnnotationFilter {
+    // filter keeps only type constraint annotations
+    def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
+  }
+
+  // todo. move these into scala.reflect.api
+
+  /** A prototype for mapping a function over all possible types
+    */
+  abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) {
+    def this() = this(trackVariance = false)
+    def apply(tp: Type): Type
+
+    private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant
+
+    def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x }
+    def variance = _variance
+
+    /** Map this function over given type */
+    def mapOver(tp: Type): Type = tp match {
+      case tr @ TypeRef(pre, sym, args) =>
+        val pre1 = this(pre)
+        val args1 = (
+          if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty)
+            mapOverArgs(args, sym.typeParams)
+          else
+            args mapConserve this
+          )
+        if ((pre1 eq pre) && (args1 eq args)) tp
+        else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+      case ThisType(_) => tp
+      case SingleType(pre, sym) =>
+        if (sym.isPackageClass) tp // short path
+        else {
+          val pre1 = this(pre)
+          if (pre1 eq pre) tp
+          else singleType(pre1, sym)
+        }
+      case MethodType(params, result) =>
+        val params1 = flipped(mapOver(params))
+        val result1 = this(result)
+        if ((params1 eq params) && (result1 eq result)) tp
+        else copyMethodType(tp, params1, result1.substSym(params, params1))
+      case PolyType(tparams, result) =>
+        val tparams1 = flipped(mapOver(tparams))
+        val result1 = this(result)
+        if ((tparams1 eq tparams) && (result1 eq result)) tp
+        else PolyType(tparams1, result1.substSym(tparams, tparams1))
+      case NullaryMethodType(result) =>
+        val result1 = this(result)
+        if (result1 eq result) tp
+        else NullaryMethodType(result1)
+      case ConstantType(_) => tp
+      case SuperType(thistp, supertp) =>
+        val thistp1 = this(thistp)
+        val supertp1 = this(supertp)
+        if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
+        else SuperType(thistp1, supertp1)
+      case TypeBounds(lo, hi) =>
+        val lo1 = flipped(this(lo))
+        val hi1 = this(hi)
+        if ((lo1 eq lo) && (hi1 eq hi)) tp
+        else TypeBounds(lo1, hi1)
+      case BoundedWildcardType(bounds) =>
+        val bounds1 = this(bounds)
+        if (bounds1 eq bounds) tp
+        else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
+      case rtp @ RefinedType(parents, decls) =>
+        val parents1 = parents mapConserve this
+        val decls1 = mapOver(decls)
+        copyRefinedType(rtp, parents1, decls1)
+      case ExistentialType(tparams, result) =>
+        val tparams1 = mapOver(tparams)
+        val result1 = this(result)
+        if ((tparams1 eq tparams) && (result1 eq result)) tp
+        else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
+      case OverloadedType(pre, alts) =>
+        val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
+        if (pre1 eq pre) tp
+        else OverloadedType(pre1, alts)
+      case AntiPolyType(pre, args) =>
+        val pre1 = this(pre)
+        val args1 = args mapConserve this
+        if ((pre1 eq pre) && (args1 eq args)) tp
+        else AntiPolyType(pre1, args1)
+      case tv at TypeVar(_, constr) =>
+        if (constr.instValid) this(constr.inst)
+        else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params))  //@M !args.isEmpty implies !typeParams.isEmpty
+      case AnnotatedType(annots, atp) =>
+        val annots1 = mapOverAnnotations(annots)
+        val atp1 = this(atp)
+        if ((annots1 eq annots) && (atp1 eq atp)) tp
+        else if (annots1.isEmpty) atp1
+        else AnnotatedType(annots1, atp1)
+      /*
+            case ErrorType => tp
+            case WildcardType => tp
+            case NoType => tp
+            case NoPrefix => tp
+            case ErasedSingleType(sym) => tp
+      */
+      case _ =>
+        tp
+      // throw new Error("mapOver inapplicable for " + tp);
+    }
+
+    def withVariance[T](v: Variance)(body: => T): T = {
+      val saved = variance
+      variance = v
+      try body finally variance = saved
+    }
+    @inline final def flipped[T](body: => T): T = {
+      if (trackVariance) variance = variance.flip
+      try body
+      finally if (trackVariance) variance = variance.flip
+    }
+    protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = (
+      if (trackVariance)
+        map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg)))
+      else
+        args mapConserve this
+      )
+    /** Applies this map to the symbol's info, setting variance = Invariant
+      *  if necessary when the symbol is an alias.
+      */
+    private def applyToSymbolInfo(sym: Symbol): Type = {
+      if (trackVariance && !variance.isInvariant && sym.isAliasType)
+        withVariance(Invariant)(this(sym.info))
+      else
+        this(sym.info)
+    }
+
+    /** Called by mapOver to determine whether the original symbols can
+      *  be returned, or whether they must be cloned.
+      */
+    protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = {
+      @tailrec def loop(syms: List[Symbol]): Boolean = syms match {
+        case Nil     => true
+        case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs)
+      }
+      loop(origSyms)
+    }
+
+    /** Map this function over given scope */
+    def mapOver(scope: Scope): Scope = {
+      val elems = scope.toList
+      val elems1 = mapOver(elems)
+      if (elems1 eq elems) scope
+      else newScopeWith(elems1: _*)
+    }
+
+    /** Map this function over given list of symbols */
+    def mapOver(origSyms: List[Symbol]): List[Symbol] = {
+      // fast path in case nothing changes due to map
+      if (noChangeToSymbols(origSyms)) origSyms
+      // map is not the identity --> do cloning properly
+      else cloneSymbolsAndModify(origSyms, TypeMap.this)
+    }
+
+    def mapOver(annot: AnnotationInfo): AnnotationInfo = {
+      val AnnotationInfo(atp, args, assocs) = annot
+      val atp1  = mapOver(atp)
+      val args1 = mapOverAnnotArgs(args)
+      // there is no need to rewrite assocs, as they are constants
+
+      if ((args eq args1) && (atp eq atp1)) annot
+      else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation  // some annotation arg was unmappable
+      else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+    }
+
+    def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
+      val annots1 = annots mapConserve mapOver
+      if (annots1 eq annots) annots
+      else annots1 filterNot (_ eq UnmappableAnnotation)
+    }
+
+    /** Map over a set of annotation arguments.  If any
+      *  of the arguments cannot be mapped, then return Nil.  */
+    def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
+      val args1 = args mapConserve mapOver
+      if (args1 contains UnmappableTree) Nil
+      else args1
+    }
+
+    def mapOver(tree: Tree): Tree =
+      mapOver(tree, () => return UnmappableTree)
+
+    /** Map a tree that is part of an annotation argument.
+      *  If the tree cannot be mapped, then invoke giveup().
+      *  The default is to transform the tree with
+      *  TypeMapTransformer.
+      */
+    def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
+      (new TypeMapTransformer).transform(tree)
+
+    /** This transformer leaves the tree alone except to remap
+      *  its types. */
+    class TypeMapTransformer extends Transformer {
+      override def transform(tree: Tree) = {
+        val tree1 = super.transform(tree)
+        val tpe1 = TypeMap.this(tree1.tpe)
+        if ((tree eq tree1) && (tree.tpe eq tpe1))
+          tree
+        else
+          tree1.shallowDuplicate.setType(tpe1)
+      }
+    }
+  }
+
+  abstract class TypeTraverser extends TypeMap {
+    def traverse(tp: Type): Unit
+    def apply(tp: Type): Type = { traverse(tp); tp }
+  }
+
+  abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+    def result: T
+    def clear(): Unit
+  }
+
+  abstract class TypeCollector[T](initial: T) extends TypeTraverser {
+    var result: T = _
+    def collect(tp: Type) = {
+      result = initial
+      traverse(tp)
+      result
+    }
+  }
+
+  /** The raw to existential map converts a ''raw type'' to an existential type.
+    *  It is necessary because we might have read a raw type of a
+    *  parameterized Java class from a class file. At the time we read the type
+    *  the corresponding class file might still not be read, so we do not
+    *  know what the type parameters of the type are. Therefore
+    *  the conversion of raw types to existential types might not have taken place
+    *  in ClassFileparser.sigToType (where it is usually done).
+    */
+  def rawToExistential = new TypeMap {
+    private var expanded = immutable.Set[Symbol]()
+    def apply(tp: Type): Type = tp match {
+      case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
+        if (expanded contains sym) AnyRefTpe
+        else try {
+          expanded += sym
+          val eparams = mapOver(typeParamsToExistentials(sym))
+          existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+        } finally {
+          expanded -= sym
+        }
+      case _ =>
+        mapOver(tp)
+    }
+  }
+  /***
+    *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+    object rawToExistentialInJava extends TypeMap {
+      def apply(tp: Type): Type = tp match {
+        // any symbol that occurs in a java sig, not just java symbols
+        // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+        case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+          val eparams = typeParamsToExistentials(sym, sym.typeParams)
+          existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+        case _ =>
+          mapOver(tp)
+      }
+    }
+    */
+
+  /** Used by existentialAbstraction.
+    */
+  class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) {
+    private val occurCount = mutable.HashMap[Symbol, Int]()
+    private def countOccs(tp: Type) = {
+      tp foreach {
+        case TypeRef(_, sym, _) =>
+          if (tparams contains sym)
+            occurCount(sym) += 1
+        case _ => ()
+      }
+    }
+    def extrapolate(tpe: Type): Type = {
+      tparams foreach (t => occurCount(t) = 0)
+      countOccs(tpe)
+      for (tparam <- tparams)
+        countOccs(tparam.info)
+
+      apply(tpe)
+    }
+
+    /** If these conditions all hold:
+      *   1) we are in covariant (or contravariant) position
+      *   2) this type occurs exactly once in the existential scope
+      *   3) the widened upper (or lower) bound of this type contains no references to tparams
+      *  Then we replace this lone occurrence of the type with the widened upper (or lower) bound.
+      *  All other types pass through unchanged.
+      */
+    def apply(tp: Type): Type = {
+      val tp1 = mapOver(tp)
+      if (variance.isInvariant) tp1
+      else tp1 match {
+        case TypeRef(pre, sym, args) if tparams contains sym =>
+          val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+          val count = occurCount(sym)
+          val containsTypeParam = tparams exists (repl contains _)
+          def msg = {
+            val word = if (variance.isPositive) "upper" else "lower"
+            s"Widened lone occurrence of $tp1 inside existential to $word bound"
+          }
+          if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
+            debuglogResult(msg)(repl)
+          else
+            tp1
+        case _ =>
+          tp1
+      }
+    }
+    override def mapOver(tp: Type): Type = tp match {
+      case SingleType(pre, sym) =>
+        if (sym.isPackageClass) tp // short path
+        else {
+          val pre1 = this(pre)
+          if ((pre1 eq pre) || !pre1.isStable) tp
+          else singleType(pre1, sym)
+        }
+      case _ => super.mapOver(tp)
+    }
+
+    // Do not discard the types of existential ident's. The
+    // symbol of the Ident itself cannot be listed in the
+    // existential's parameters, so the resulting existential
+    // type would be ill-formed.
+    override def mapOver(tree: Tree) = tree match {
+      case Ident(_) if tree.tpe.isStable => tree
+      case _                             => super.mapOver(tree)
+    }
+  }
+
+  /** Might the given symbol be important when calculating the prefix
+    *  of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+    *  the result will be `tp` unchanged if `pre` is trivial and `clazz`
+    *  is a symbol such that isPossiblePrefix(clazz) == false.
+    */
+  def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
+  protected[internal] def skipPrefixOf(pre: Type, clazz: Symbol) = (
+    (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+    )
+
+  def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+    new AsSeenFromMap(pre, clazz)
+
+  /** A map to compute the asSeenFrom method.
+    */
+  class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+    // Some example source constructs relevant in asSeenFrom:
+    //
+    // object CaptureThis {
+    //   trait X[A] { def f: this.type = this }
+    //   class Y[A] { def f: this.type = this }
+    //   // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton
+    //   def f1[B] = new X[B] { }
+    //   // TODO - why is the behavior different when it's a class?
+    //   def f2[B] = new Y[B] { }
+    // }
+    // class CaptureVal[T] {
+    //   val f: java.util.List[_ <: T] = null
+    //   // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1
+    //   def g = f get 0
+    // }
+    // class ClassParam[T] {
+    //   // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam)
+    //   class Inner(lhs: T) { def f = lhs }
+    // }
+    def capturedParams: List[Symbol]  = _capturedParams
+    def capturedSkolems: List[Symbol] = _capturedSkolems
+
+    def apply(tp: Type): Type = tp match {
+      case tp @ ThisType(_)                                            => thisTypeAsSeen(tp)
+      case tp @ SingleType(_, sym)                                     => if (sym.isPackageClass) tp else singleTypeAsSeen(tp)
+      case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp)
+      case _                                                           => mapOver(tp)
+    }
+
+    private var _capturedSkolems: List[Symbol] = Nil
+    private var _capturedParams: List[Symbol]  = Nil
+    private val isStablePrefix = seenFromPrefix.isStable
+
+    // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate
+    // but less succinct name.
+    private def isBaseClassOfEnclosingClass(base: Symbol) = {
+      def loop(encl: Symbol): Boolean = (
+        isPossiblePrefix(encl)
+          && ((encl isSubClass base) || loop(encl.owner.enclClass))
+        )
+      // The hasCompleteInfo guard is necessary to avoid cycles during the typing
+      // of certain classes, notably ones defined inside package objects.
+      !base.hasCompleteInfo || loop(seenFromClass)
+    }
+
+    /** Is the symbol a class type parameter from one of the enclosing
+      *  classes, or a base class of one of them?
+      */
+    private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = (
+      sym.isTypeParameter
+        && sym.owner.isClass
+        && isBaseClassOfEnclosingClass(sym.owner)
+      )
+
+    /** Creates an existential representing a type parameter which appears
+      *  in the prefix of a ThisType.
+      */
+    protected def captureThis(pre: Type, clazz: Symbol): Type = {
+      capturedParams find (_.owner == clazz) match {
+        case Some(p) => p.tpe
+        case _       =>
+          val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+          _capturedParams ::= qvar
+          debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
+          qvar.tpe
+      }
+    }
+    protected def captureSkolems(skolems: List[Symbol]) {
+      for (p <- skolems; if !(capturedSkolems contains p)) {
+        debuglog(s"Captured $p seen from $seenFromPrefix")
+        _capturedSkolems ::= p
+      }
+    }
+
+    /** Find the type argument in an applied type which corresponds to a type parameter.
+      *  The arguments are required to be related as follows, through intermediary `clazz`.
+      *  An exception will be thrown if this is violated.
+      *
+      *  @param   lhs    its symbol is a type parameter of `clazz`
+      *  @param   rhs    a type application constructed from `clazz`
+      */
+    private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
+      val TypeRef(_, lhsSym, lhsArgs) = lhs
+      val TypeRef(_, rhsSym, rhsArgs) = rhs
+      require(lhsSym.owner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+
+      // Find the type parameter position; we'll use the corresponding argument.
+      // Why are we checking by name rather than by equality? Because for
+      // reasons which aren't yet fully clear, we can arrive here holding a type
+      // parameter whose owner is rhsSym, and which shares the name of an actual
+      // type parameter of rhsSym, but which is not among the type parameters of
+      // rhsSym. One can see examples of it at SI-4365.
+      val argIndex = rhsSym.typeParams indexWhere (lhsSym.name == _.name)
+      // don't be too zealous with the exceptions, see #2641
+      if (argIndex < 0 && rhs.parents.exists(typeIsErroneous))
+        ErrorType
+      else {
+        // It's easy to get here when working on hardcore type machinery (not to
+        // mention when not doing so, see above) so let's provide a standout error.
+        def own_s(s: Symbol) = s.nameString + " in " + s.owner.nameString
+        def explain =
+          sm"""|   sought  ${own_s(lhsSym)}
+               | classSym  ${own_s(rhsSym)}
+               |  tparams  ${rhsSym.typeParams map own_s mkString ", "}
+               |"""
+
+        if (argIndex < 0)
+          abort(s"Something is wrong: cannot find $lhs in applied type $rhs\n" + explain)
+        else {
+          val targ   = rhsArgs(argIndex)
+          // @M! don't just replace the whole thing, might be followed by type application
+          val result = appliedType(targ, lhsArgs mapConserve this)
+          def msg = s"Created $result, though could not find ${own_s(lhsSym)} among tparams of ${own_s(rhsSym)}"
+          if (!rhsSym.typeParams.contains(lhsSym))
+            devWarning(s"Inconsistent tparam/owner views: had to fall back on names\n$msg\n$explain")
+
+          result
+        }
+      }
+    }
+
+    // 0) @pre: `classParam` is a class type parameter
+    // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam`
+    // 2) Take the base type of the prefix at that point with respect to the owning class
+    // 3) Solve for the type parameters through correspondence with the type args of the base type
+    //
+    // Only class type parameters (and not skolems) are considered, because other type parameters
+    // are not influenced by the prefix through which they are seen. Note that type params of
+    // anonymous type functions, which currently can only arise from normalising type aliases, are
+    // owned by the type alias of which they are the eta-expansion.
+    private def classParameterAsSeen(classParam: Type): Type = {
+      val TypeRef(_, tparam, _) = classParam
+
+      def loop(pre: Type, clazz: Symbol): Type = {
+        // have to deconst because it may be a Class[T]
+        def nextBase = (pre baseType clazz).deconst
+        //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
+        if (skipPrefixOf(pre, clazz))
+          mapOver(classParam)
+        else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
+          loop(nextBase.prefix, clazz.owner)
+        else nextBase match {
+          case NoType                         => loop(NoType, clazz.owner) // backstop for SI-2797, must remove `SingletonType#isHigherKinded` and run pos/t2797.scala to get here.
+          case applied @ TypeRef(_, _, _)     => correspondingTypeArgument(classParam, applied)
+          case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
+          case t                              => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
+        }
+      }
+      loop(seenFromPrefix, seenFromClass)
+    }
+
+    // Does the candidate symbol match the given prefix and class?
+    // Since pre may be something like ThisType(A) where trait A { self: B => },
+    // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
+    // B will not be considered.
+    private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) =
+      (clazz == candidate) && (pre.widen.typeSymbol isSubClass clazz)
+
+    // Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
+    private[this] var wroteAnnotation = false
+    private object annotationArgRewriter extends TypeMapTransformer {
+      private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz)
+
+      // what symbol should really be used?
+      private def newThis(): Tree = {
+        wroteAnnotation = true
+        val presym      = seenFromPrefix.widen.typeSymbol
+        val thisSym     = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix
+        gen.mkAttributedQualifier(seenFromPrefix, thisSym)
+      }
+
+      /** Rewrite `This` trees in annotation argument trees */
+      override def transform(tree: Tree): Tree = super.transform(tree) match {
+        case This(_) if matchesThis(tree.symbol) => newThis()
+        case tree                                => tree
+      }
+    }
+
+    // This becomes considerably cheaper if we optimize for the common cases:
+    // where the prefix is stable and where no This nodes are rewritten. If
+    // either is true, then we don't need to worry about calling giveup. So if
+    // the prefix is unstable, use a stack variable to indicate whether the tree
+    // was touched. This takes us to one allocation per AsSeenFromMap rather
+    // than an allocation on every call to mapOver, and no extra work when the
+    // tree only has its types remapped.
+    override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+      if (isStablePrefix)
+        annotationArgRewriter transform tree
+      else {
+        val saved = wroteAnnotation
+        wroteAnnotation = false
+        try annotationArgRewriter transform tree
+        finally if (wroteAnnotation) giveup() else wroteAnnotation = saved
+      }
+    }
+
+    private def thisTypeAsSeen(tp: ThisType): Type = {
+      def loop(pre: Type, clazz: Symbol): Type = {
+        val pre1 = pre match {
+          case SuperType(thistpe, _) => thistpe
+          case _                     => pre
+        }
+        if (skipPrefixOf(pre, clazz))
+          mapOver(tp) // TODO - is mapOver necessary here?
+        else if (!matchesPrefixAndClass(pre, clazz)(tp.sym))
+          loop((pre baseType clazz).prefix, clazz.owner)
+        else if (pre1.isStable)
+          pre1
+        else
+          captureThis(pre1, clazz)
+      }
+      loop(seenFromPrefix, seenFromClass)
+    }
+
+    private def singleTypeAsSeen(tp: SingleType): Type = {
+      val SingleType(pre, sym) = tp
+
+      val pre1 = this(pre)
+      if (pre1 eq pre) tp
+      else if (pre1.isStable) singleType(pre1, sym)
+      else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
+    }
+
+    override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)"
+  }
+
+  /** A base class to compute all substitutions */
+  abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+    // OPT this check was 2-3% of some profiles, demoted to -Xdev
+    if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
+
+    /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
+    protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
+
+    /** Map target to type, can be tuned by subclasses */
+    protected def toType(fromtp: Type, tp: T): Type
+
+    protected def renameBoundSyms(tp: Type): Type = tp match {
+      case MethodType(ps, restp) =>
+        createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
+      case PolyType(bs, restp) =>
+        createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
+      case ExistentialType(bs, restp) =>
+        createFromClonedSymbols(bs, restp)(newExistentialType)
+      case _ =>
+        tp
+    }
+
+    @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = (
+      if (from.isEmpty) tp
+      // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+      else if (matches(from.head, sym)) toType(tp, to.head)
+      else subst(tp, sym, from.tail, to.tail)
+      )
+
+    def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+      val boundSyms             = tp0.boundSyms
+      val tp1                   = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
+      val tp                    = mapOver(tp1)
+      def substFor(sym: Symbol) = subst(tp, sym, from, to)
+
+      tp match {
+        // @M
+        // 1) arguments must also be substituted (even when the "head" of the
+        // applied type has already been substituted)
+        // example: (subst RBound[RT] from [type RT,type RBound] to
+        // [type RT&,type RBound&]) = RBound&[RT&]
+        // 2) avoid loops (which occur because alpha-conversion is
+        // not performed properly imo)
+        // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
+        // we must replace the a in Iterable[a] by (a,b)
+        // (must not recurse --> loops)
+        // 3) replacing m by List in m[Int] should yield List[Int], not just List
+        case TypeRef(NoPrefix, sym, args) =>
+          val tcon = substFor(sym)
+          if ((tp eq tcon) || args.isEmpty) tcon
+          else appliedType(tcon.typeConstructor, args)
+        case SingleType(NoPrefix, sym) =>
+          substFor(sym)
+        case ClassInfoType(parents, decls, sym) =>
+          val parents1 = parents mapConserve this
+          // We don't touch decls here; they will be touched when an enclosing TreeSubstitutor
+          // transforms the tree that defines them.
+          if (parents1 eq parents) tp
+          else ClassInfoType(parents1, decls, sym)
+        case _ =>
+          tp
+      }
+    }
+  }
+
+  /** A map to implement the `substSym` method. */
+  class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+    def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
+    protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
+      case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
+      case SingleType(pre, _) => singleType(pre, sym)
+    }
+    @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = (
+      if (from.isEmpty) sym
+      // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
+      else if (matches(from.head, sym)) to.head
+      else subst(sym, from.tail, to.tail)
+      )
+    private def substFor(sym: Symbol) = subst(sym, from, to)
+
+    override def apply(tp: Type): Type = (
+      if (from.isEmpty) tp
+      else tp match {
+        case TypeRef(pre, sym, args) if pre ne NoPrefix =>
+          val newSym = substFor(sym)
+          // mapOver takes care of subst'ing in args
+          mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
+        // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
+        case SingleType(pre, sym) if pre ne NoPrefix =>
+          val newSym = substFor(sym)
+          mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
+        case _ =>
+          super.apply(tp)
+      }
+      )
+
+    object mapTreeSymbols extends TypeMapTransformer {
+      val strictCopy = newStrictTreeCopier
+
+      def termMapsTo(sym: Symbol) = from indexOf sym match {
+        case -1   => None
+        case idx  => Some(to(idx))
+      }
+
+      // if tree.symbol is mapped to another symbol, passes the new symbol into the
+      // constructor `trans` and sets the symbol and the type on the resulting tree.
+      def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
+        case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
+        case None => tree
+      }
+
+      // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
+      override def transform(tree: Tree) = {
+        // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
+        super.transform(tree) match {
+          case id @ Ident(_) =>
+            transformIfMapped(id)(toSym =>
+              strictCopy.Ident(id, toSym.name))
+
+          case sel @ Select(qual, name) =>
+            transformIfMapped(sel)(toSym =>
+              strictCopy.Select(sel, qual, toSym.name))
+
+          case tree => tree
+        }
+      }
+    }
+    override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+      mapTreeSymbols.transform(tree)
+    }
+  }
+
+  /** A map to implement the `subst` method. */
+  class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) {
+    protected def toType(fromtp: Type, tp: Type) = tp
+
+    override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
+      object trans extends TypeMapTransformer {
+        override def transform(tree: Tree) = tree match {
+          case Ident(name) =>
+            from indexOf tree.symbol match {
+              case -1   => super.transform(tree)
+              case idx  =>
+                val totpe = to(idx)
+                if (totpe.isStable) tree.duplicate setType totpe
+                else giveup()
+            }
+          case _ =>
+            super.transform(tree)
+        }
+      }
+      trans.transform(tree)
+    }
+  }
+
+  /** A map to implement the `substThis` method. */
+  class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
+    def apply(tp: Type): Type = tp match {
+      case ThisType(sym) if (sym == from) => to
+      case _ => mapOver(tp)
+    }
+  }
+
+  class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
+    def apply(tp: Type): Type = try {
+      tp match {
+        case TypeRef(_, sym, _) if from contains sym =>
+          BoundedWildcardType(sym.info.bounds)
+        case _ =>
+          mapOver(tp)
+      }
+    } catch {
+      case ex: MalformedType =>
+        WildcardType
+    }
+  }
+
+  // dependent method types
+  object IsDependentCollector extends TypeCollector(false) {
+    def traverse(tp: Type) {
+      if (tp.isImmediatelyDependent) result = true
+      else if (!result) mapOver(tp.dealias)
+    }
+  }
+
+  object ApproximateDependentMap extends TypeMap {
+    def apply(tp: Type): Type =
+      if (tp.isImmediatelyDependent) WildcardType
+      else mapOver(tp)
+  }
+
+  /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+    */
+  class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+    private val actuals      = actuals0.toIndexedSeq
+    private val existentials = new Array[Symbol](actuals.size)
+    def existentialsNeeded: List[Symbol] = existentials.iterator.filter(_ ne null).toList
+
+    private object StableArgTp {
+      // type of actual arg corresponding to param -- if the type is stable
+      def unapply(param: Symbol): Option[Type] = (params indexOf param) match {
+        case -1  => None
+        case pid =>
+          val tp = actuals(pid)
+          if (tp.isStable && (tp.typeSymbol != NothingClass)) Some(tp)
+          else None
+      }
+    }
+
+    /** Return the type symbol for referencing a parameter that's instantiated to an unstable actual argument.
+     *
+     * To soundly abstract over an unstable value (x: T) while retaining the most type information,
+     * use `x.type forSome { type x.type <: T with Singleton}`
+     * `typeOf[T].narrowExistentially(symbolOf[x])`.
+     *
+     * See also: captureThis in AsSeenFromMap.
+     */
+    private def existentialFor(pid: Int) = {
+      if (existentials(pid) eq null) {
+        val param = params(pid)
+        existentials(pid) = (
+          param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
+            setInfo singletonBounds(actuals(pid))
+          )
+      }
+      existentials(pid)
+    }
+
+    private object UnstableArgTp {
+      // existential quantifier and type of corresponding actual arg with unstable type
+      def unapply(param: Symbol): Option[(Symbol, Type)] = (params indexOf param) match {
+        case -1  => None
+        case pid =>
+          val sym = existentialFor(pid)
+          Some((sym, sym.tpe_*)) // refers to an actual value, must be kind-*
+      }
+    }
+
+    private object StabilizedArgTp {
+      def unapply(param: Symbol): Option[Type] =
+        param match {
+          case StableArgTp(tp)      => Some(tp)  // (1)
+          case UnstableArgTp(_, tp) => Some(tp)  // (2)
+          case _ => None
+        }
+    }
+
+    /** instantiate `param.type` to the (sound approximation of the) type `T`
+     * of the actual argument `arg` that was passed in for `param`
+     *
+     * (1) If `T` is stable, we can just use that.
+     *
+     * (2) SI-3873: it'd be unsound to instantiate `param.type` to an unstable `T`,
+     * so we approximate to `X forSome {type X <: T with Singleton}` -- we can't soundly say more.
+     */
+    def apply(tp: Type): Type = tp match {
+      case SingleType(NoPrefix, StabilizedArgTp(tp)) => tp
+      case _                                         => mapOver(tp)
+    }
+
+    //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+    override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+      // TODO: this should be simplified; in the stable case, one can
+      // probably just use an Ident to the tree.symbol.
+      //
+      // @PP: That leads to failure here, where stuff no longer has type
+      // 'String @Annot("stuff")' but 'String @Annot(x)'.
+      //
+      //   def m(x: String): String @Annot(x) = x
+      //   val stuff = m("stuff")
+      //
+      // (TODO cont.) Why an existential in the non-stable case?
+      //
+      // @PP: In the following:
+      //
+      //   def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+      //
+      // m is typed as 'String @Annot(x) forSome { val x: String }'.
+      //
+      // Both examples are from run/constrained-types.scala.
+      object treeTrans extends Transformer {
+        override def transform(tree: Tree): Tree = tree.symbol match {
+          case StableArgTp(tp)          => gen.mkAttributedQualifier(tp, tree.symbol)
+          case UnstableArgTp(quant, tp) => Ident(quant) copyAttrs tree setType tp
+          case _                        => super.transform(tree)
+        }
+      }
+      treeTrans transform arg
+    }
+  }
+
+  /** A map to convert every occurrence of a wildcard type to a fresh
+    *  type variable */
+  object wildcardToTypeVarMap extends TypeMap {
+    def apply(tp: Type): Type = tp match {
+      case WildcardType =>
+        TypeVar(tp, new TypeConstraint)
+      case BoundedWildcardType(bounds) =>
+        TypeVar(tp, new TypeConstraint(bounds))
+      case _ =>
+        mapOver(tp)
+    }
+  }
+
+  /** A map to convert each occurrence of a type variable to its origin. */
+  object typeVarToOriginMap extends TypeMap {
+    def apply(tp: Type): Type = tp match {
+      case TypeVar(origin, _) => origin
+      case _ => mapOver(tp)
+    }
+  }
+
+  /** A map to implement the `contains` method. */
+  class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
+    def traverse(tp: Type) {
+      if (!result) {
+        tp.normalize match {
+          case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+          case SingleType(_, sym1) if (sym == sym1) => result = true
+          case _ => mapOver(tp)
+        }
+      }
+    }
+
+    override def mapOver(arg: Tree) = {
+      for (t <- arg) {
+        traverse(t.tpe)
+        if (t.symbol == sym)
+          result = true
+      }
+      arg
+    }
+  }
+
+  /** A map to implement the `filter` method. */
+  class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+    override def collect(tp: Type) = super.collect(tp).reverse
+
+    def traverse(tp: Type) {
+      if (p(tp)) result ::= tp
+      mapOver(tp)
+    }
+  }
+
+  /** A map to implement the `collect` method. */
+  class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
+    override def collect(tp: Type) = super.collect(tp).reverse
+
+    def traverse(tp: Type) {
+      if (pf.isDefinedAt(tp)) result ::= pf(tp)
+      mapOver(tp)
+    }
+  }
+
+  class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
+    def traverse(tp: Type) {
+      f(tp)
+      mapOver(tp)
+    }
+  }
+
+  /** A map to implement the `filter` method. */
+  class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
+    def traverse(tp: Type) {
+      if (result.isEmpty) {
+        if (p(tp)) result = Some(tp)
+        mapOver(tp)
+      }
+    }
+  }
+
+  /** A map to implement the `contains` method. */
+  object ErroneousCollector extends TypeCollector(false) {
+    def traverse(tp: Type) {
+      if (!result) {
+        result = tp.isError
+        mapOver(tp)
+      }
+    }
+  }
+
+  object adaptToNewRunMap extends TypeMap {
+
+    private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
+      if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
+        sym
+      else if (sym.isModuleClass) {
+        val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
+
+        sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
+          val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+          debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+          sym
+        }
+      }
+      else {
+        var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse {
+          if (sym.isAliasType) throw missingAliasException
+          devWarning(s"$pre.$sym no longer exist at phase $phase")
+          throw new MissingTypeControl // For build manager and presentation compiler purposes
+        }
+        /* The two symbols have the same fully qualified name */
+        def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
+          sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
+        if (!corresponds(sym.owner, rebind0.owner)) {
+          debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
+          val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
+          if (bcs.isEmpty)
+            assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
+          else
+            rebind0 = pre.baseType(bcs.head).member(sym.name)
+          debuglog(
+            "ADAPT2 pre = " + pre +
+              ", bcs.head = " + bcs.head +
+              ", sym = " + sym.fullLocationString +
+              ", rebind = " + rebind0.fullLocationString
+          )
+        }
+        rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
+          debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+          throw new MalformedType(pre, sym.nameString)
+        }
+      }
+    }
+    def apply(tp: Type): Type = tp match {
+      case ThisType(sym) =>
+        try {
+          val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+          if (sym1 == sym) tp else ThisType(sym1)
+        } catch {
+          case ex: MissingTypeControl =>
+            tp
+        }
+      case SingleType(pre, sym) =>
+        if (sym.hasPackageFlag) tp
+        else {
+          val pre1 = this(pre)
+          try {
+            val sym1 = adaptToNewRun(pre1, sym)
+            if ((pre1 eq pre) && (sym1 eq sym)) tp
+            else singleType(pre1, sym1)
+          } catch {
+            case _: MissingTypeControl =>
+              tp
+          }
+        }
+      case TypeRef(pre, sym, args) =>
+        if (sym.isPackageClass) tp
+        else {
+          val pre1 = this(pre)
+          val args1 = args mapConserve (this)
+          try {
+            val sym1 = adaptToNewRun(pre1, sym)
+            if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+              tp
+            } else if (sym1 == NoSymbol) {
+              devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym")
+              tp
+            } else {
+              copyTypeRef(tp, pre1, sym1, args1)
+            }
+          } catch {
+            case ex: MissingAliasControl =>
+              apply(tp.dealias)
+            case _: MissingTypeControl =>
+              tp
+          }
+        }
+      case MethodType(params, restp) =>
+        val restp1 = this(restp)
+        if (restp1 eq restp) tp
+        else copyMethodType(tp, params, restp1)
+      case NullaryMethodType(restp) =>
+        val restp1 = this(restp)
+        if (restp1 eq restp) tp
+        else NullaryMethodType(restp1)
+      case PolyType(tparams, restp) =>
+        val restp1 = this(restp)
+        if (restp1 eq restp) tp
+        else PolyType(tparams, restp1)
+
+      // Lukas: we need to check (together) whether we should also include parameter types
+      // of PolyType and MethodType in adaptToNewRun
+
+      case ClassInfoType(parents, decls, clazz) =>
+        if (clazz.isPackageClass) tp
+        else {
+          val parents1 = parents mapConserve (this)
+          if (parents1 eq parents) tp
+          else ClassInfoType(parents1, decls, clazz)
+        }
+      case RefinedType(parents, decls) =>
+        val parents1 = parents mapConserve (this)
+        if (parents1 eq parents) tp
+        else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
+      case SuperType(_, _) => mapOver(tp)
+      case TypeBounds(_, _) => mapOver(tp)
+      case TypeVar(_, _) => mapOver(tp)
+      case AnnotatedType(_, _) => mapOver(tp)
+      case ExistentialType(_, _) => mapOver(tp)
+      case _ => tp
+    }
+  }
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
new file mode 100644
index 0000000..a062fc8
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -0,0 +1,48 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.mutable.HashSet
+
+private[internal] trait TypeToStrings {
+  self: SymbolTable =>
+
+  /** The maximum number of recursions allowed in toString
+    */
+  final val maxToStringRecursions = 50
+
+  private var _toStringRecursions = 0
+  def toStringRecursions = _toStringRecursions
+  def toStringRecursions_=(value: Int) = _toStringRecursions = value
+
+  private var _toStringSubjects = HashSet[Type]()
+  def toStringSubjects = _toStringSubjects
+
+  protected def typeToString(tpe: Type): String =
+    // if (toStringSubjects contains tpe) {
+    //   // handles self-referential anonymous classes and who knows what else
+    //   "..."
+    // }
+    // else
+    if (toStringRecursions >= maxToStringRecursions) {
+      devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe))
+      if (settings.debug)
+        (new Throwable).printStackTrace
+
+      "..."
+    }
+    else
+      try {
+        toStringRecursions += 1
+        // TODO: study performance impact of this cache
+        // to quote Jason:
+        //   I'm a little uneasy with the performance impact of the fail-safe. We end up calling Type#toString
+        //   when we generate error messages, including, importantly, errors issued during silent mode that are never issued.
+        // toStringSubjects += tpe
+        tpe.safeToString
+      } finally {
+        // toStringSubjects -= tpe
+        toStringRecursions -= 1
+      }
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 52d1657..d5b5967 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package transform
 
@@ -16,7 +17,7 @@ trait Erasure {
     /** Is `tp` an unbounded generic type (i.e. which could be instantiated
      *  with primitive as well as class types)?.
      */
-    private def genericCore(tp: Type): Type = tp.normalize match {
+    private def genericCore(tp: Type): Type = tp.dealiasWiden match {
       /* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
        * erased to Object. However, there is only symbol for the Array class. So to make the distinction between
        * a Java and a Scala array, we check if the owner of T comes from a Java class.
@@ -36,7 +37,7 @@ trait Erasure {
      *  then Some((N, T)) where N is the number of Array constructors enclosing `T`,
      *  otherwise None. Existentials on any level are ignored.
      */
-    def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
+    def unapply(tp: Type): Option[(Int, Type)] = tp.dealiasWiden match {
       case TypeRef(_, ArrayClass, List(arg)) =>
         genericCore(arg) match {
           case NoType =>
@@ -54,9 +55,13 @@ trait Erasure {
     }
   }
 
+  /** Arrays despite their finality may turn up as refined type parents,
+   *  e.g. with "tagged types" like Array[Int] with T.
+   */
   protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
-    case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
-    case _ => 0
+    case GenericArray(level, core) if !(core <:< AnyRefTpe) => level
+    case RefinedType(ps, _) if ps.nonEmpty                  => logResult(s"Unbounded generic level for $tp is")((ps map unboundedGenericArrayLevel).max)
+    case _                                                  => 0
   }
 
   // @M #2585 when generating a java generic signature that includes
@@ -68,17 +73,8 @@ trait Erasure {
   // included (use pre.baseType(cls.owner)).
   //
   // This requires that cls.isClass.
-  protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
-    if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
-  }
-
-  def unboxDerivedValueClassMethod(clazz: Symbol): Symbol =
-    (clazz.info.decl(nme.unbox)) orElse
-    (clazz.info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse
-     NoSymbol)
-
-  def underlyingOfValueClass(clazz: Symbol): Type =
-    clazz.derivedValueClassUnbox.tpe.resultType
+  protected def rebindInnerClass(pre: Type, cls: Symbol): Type =
+    if (cls.isTopLevel || cls.isLocalToBlock) pre else cls.owner.tpe_*
 
   /** The type of the argument of a value class reference after erasure
    *  This method needs to be called at a phase no later than erasurephase
@@ -101,36 +97,40 @@ trait Erasure {
   def valueClassIsParametric(clazz: Symbol): Boolean = {
     assert(!phase.erasedTypes)
     clazz.typeParams contains
-      clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+      clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol
   }
 
   abstract class ErasureMap extends TypeMap {
-    private lazy val ObjectArray  = arrayType(ObjectClass.tpe)
-    private lazy val ErasedObject = erasedTypeRef(ObjectClass)
-
     def mergeParents(parents: List[Type]): Type
 
-    def eraseNormalClassRef(pre: Type, clazz: Symbol): Type =
-      typeRef(apply(rebindInnerClass(pre, clazz)), clazz, List()) // #2585
+    def eraseNormalClassRef(tref: TypeRef): Type = {
+      val TypeRef(pre, clazz, args) = tref
+      val pre1 = apply(rebindInnerClass(pre, clazz))
+      val args1 = Nil
+      if ((pre eq pre1) && (args eq args1)) tref // OPT
+      else typeRef(pre1, clazz, args1) // #2585
+    }
 
     protected def eraseDerivedValueClassRef(tref: TypeRef): Type = erasedValueClassArg(tref)
 
     def apply(tp: Type): Type = tp match {
       case ConstantType(_) =>
         tp
+      case st: ThisType if st.sym.isPackageClass =>
+        tp
       case st: SubType =>
         apply(st.supertype)
       case tref @ TypeRef(pre, sym, args) =>
         if (sym == ArrayClass)
-          if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
-          else if (args.head.typeSymbol.isBottomClass) ObjectArray
+          if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe
+          else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe)
           else typeRef(apply(pre), sym, args map applyInArray)
-        else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) ErasedObject
-        else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
+        else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ObjectTpe
+        else if (sym == UnitClass) BoxedUnitTpe
         else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
         else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
-        else if (sym.isClass) eraseNormalClassRef(pre, sym)
-        else apply(sym.info) // alias type or abstract type
+        else if (sym.isClass) eraseNormalClassRef(tref)
+        else apply(sym.info asSeenFrom (pre, sym.owner)) // alias type or abstract type
       case PolyType(tparams, restpe) =>
         apply(restpe)
       case ExistentialType(tparams, restpe) =>
@@ -138,18 +138,18 @@ trait Erasure {
       case mt @ MethodType(params, restpe) =>
         MethodType(
           cloneSymbolsAndModify(params, ErasureMap.this),
-          if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
+          if (restpe.typeSymbol == UnitClass) UnitTpe
           // this replaces each typeref that refers to an argument
           // by the type `p.tpe` of the actual argument p (p in params)
           else apply(mt.resultType(mt.paramTypes)))
       case RefinedType(parents, decls) =>
         apply(mergeParents(parents))
-      case AnnotatedType(_, atp, _) =>
+      case AnnotatedType(_, atp) =>
         apply(atp)
       case ClassInfoType(parents, decls, clazz) =>
         ClassInfoType(
           if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
-          else if (clazz == ArrayClass) List(ErasedObject)
+          else if (clazz == ArrayClass) ObjectTpe :: Nil
           else removeLaterObjects(parents map this),
           decls, clazz)
       case _ =>
@@ -157,7 +157,7 @@ trait Erasure {
     }
 
     def applyInArray(tp: Type): Type = tp match {
-      case TypeRef(pre, sym, args) if (sym.isDerivedValueClass) => eraseNormalClassRef(pre, sym)
+      case tref @ TypeRef(_, sym, _) if sym.isDerivedValueClass => eraseNormalClassRef(tref)
       case _ => apply(tp)
     }
   }
@@ -214,9 +214,6 @@ trait Erasure {
         specialConstructorErasure(clazz, restpe)
       case ExistentialType(tparams, restpe) =>
         specialConstructorErasure(clazz, restpe)
-      case RefinedType(parents, decls) =>
-        specialConstructorErasure(
-          clazz, specialScalaErasure.mergeParents(parents))
       case mt @ MethodType(params, restpe) =>
         MethodType(
           cloneSymbolsAndModify(params, specialScalaErasure),
@@ -225,15 +222,7 @@ trait Erasure {
         typeRef(pre, clazz, List())
       case tp =>
         if (!(clazz == ArrayClass || tp.isError))
-          // See SI-6556. It seems in some cases the result constructor
-          // type of an anonymous class is a different version of the class.
-          // This has nothing to do with value classes per se.
-          // We simply used a less discriminating transform before, that
-          // did not look at the cases in detail.
-          // It seems there is a deeper problem here, which needs
-          // following up to. But we will not risk regressions
-          // in 2.10 because of it.
-          log(s"!!! unexpected constructor erasure $tp for $clazz")
+          assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz")
         specialScalaErasure(tp)
     }
   }
@@ -263,7 +252,7 @@ trait Erasure {
      *  An intersection such as `Object with Trait` erases to Object.
      */
     def mergeParents(parents: List[Type]): Type =
-      if (parents.isEmpty) ObjectClass.tpe
+      if (parents.isEmpty) ObjectTpe
       else parents.head
   }
 
@@ -271,11 +260,11 @@ trait Erasure {
 
   /** This is used as the Scala erasure during the erasure phase itself
    *  It differs from normal erasure in that value classes are erased to ErasedValueTypes which
-   *  are then later converted to the underlying parameter type in phase posterasure.
+   *  are then later unwrapped to the underlying parameter type in phase posterasure.
    */
   object specialScalaErasure extends ScalaErasureMap {
     override def eraseDerivedValueClassRef(tref: TypeRef): Type =
-      ErasedValueType(tref)
+      ErasedValueType(tref.sym, erasedValueClassArg(tref))
   }
 
   object javaErasure extends JavaErasureMap
@@ -291,11 +280,11 @@ trait Erasure {
   }
 
   object boxingErasure extends ScalaErasureMap {
-    override def eraseNormalClassRef(pre: Type, clazz: Symbol) =
-      if (isPrimitiveValueClass(clazz)) boxedClass(clazz).tpe
-      else super.eraseNormalClassRef(pre, clazz)
+    override def eraseNormalClassRef(tref: TypeRef) =
+      if (isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
+      else super.eraseNormalClassRef(tref)
     override def eraseDerivedValueClassRef(tref: TypeRef) =
-      super.eraseNormalClassRef(tref.pre, tref.sym)
+      super.eraseNormalClassRef(tref)
   }
 
   /** The intersection dominator (SLS 3.7) of a list of types is computed as follows.
@@ -311,7 +300,7 @@ trait Erasure {
    *  - Otherwise, the dominator is the first element of the span.
    */
   def intersectionDominator(parents: List[Type]): Type = {
-    if (parents.isEmpty) ObjectClass.tpe
+    if (parents.isEmpty) ObjectTpe
     else {
       val psyms = parents map (_.typeSymbol)
       if (psyms contains ArrayClass) {
@@ -333,10 +322,6 @@ trait Erasure {
     }
   }
 
-  /** Type reference after erasure */
-  def erasedTypeRef(sym: Symbol): Type =
-    typeRef(erasure(sym)(sym.owner.tpe), sym, Nil)
-
   /**  The symbol's erased info. This is the type's erasure, except for the following symbols:
    *
    *   - For $asInstanceOf      : [T]T
@@ -364,8 +349,7 @@ trait Erasure {
       else if (sym.name == nme.update)
         (tp: @unchecked) match {
           case MethodType(List(index, tvar), restpe) =>
-            MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar),
-                       erasedTypeRef(UnitClass))
+            MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar), UnitTpe)
         }
       else specialErasure(sym)(tp)
     } else if (
diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
new file mode 100644
index 0000000..f0c7d0f
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
@@ -0,0 +1,19 @@
+package scala.reflect
+package internal
+package transform
+
+trait PostErasure {
+  val global: SymbolTable
+  import global._
+  import definitions._
+
+  object elimErasedValueType extends TypeMap {
+    def apply(tp: Type) = tp match {
+      case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
+      case ErasedValueType(_, underlying)   => underlying
+      case _                                => mapOver(tp)
+    }
+  }
+
+  def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
+}
diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
index d6108ab..4ca114e 100644
--- a/src/reflect/scala/reflect/internal/transform/RefChecks.scala
+++ b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package transform
 
@@ -10,4 +11,4 @@ trait RefChecks {
   def transformInfo(sym: Symbol, tp: Type): Type =
     if (sym.isModule && !sym.isStatic) NullaryMethodType(tp)
     else tp
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index 71cc808..296ccde 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package transform
 
@@ -25,17 +26,20 @@ trait Transforms { self: SymbolTable =>
   private val refChecksLazy = new Lazy(new { val global: Transforms.this.type = self } with RefChecks)
   private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry)
   private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure)
+  private val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure)
 
   def refChecks = refChecksLazy.force
   def uncurry = uncurryLazy.force
   def erasure = erasureLazy.force
+  def postErasure = postErasureLazy.force
 
   def transformedType(sym: Symbol) =
-    erasure.transformInfo(sym,
-      uncurry.transformInfo(sym,
-        refChecks.transformInfo(sym, sym.info)))
+    postErasure.transformInfo(sym,
+      erasure.transformInfo(sym,
+        uncurry.transformInfo(sym,
+          refChecks.transformInfo(sym, sym.info))))
 
   def transformedType(tpe: Type) =
-    erasure.scalaErasure(uncurry.uncurry(tpe))
+    postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe)))
 
 }
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 00c7c3d..abea8be 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package transform
 
@@ -10,6 +11,14 @@ trait UnCurry {
   import global._
   import definitions._
 
+  /** Note: changing tp.normalize to tp.dealias in this method leads to a single
+   *  test failure: run/t5688.scala, where instead of the expected output
+   *    Vector(ta, tb, tab)
+   *  we instead get
+   *    Vector(tab, tb, tab)
+   *  I think that difference is not the product of sentience but of randomness.
+   *  Let us figure out why it is and then change this method.
+   */
   private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
 
   val uncurry: TypeMap = new TypeMap {
@@ -37,7 +46,7 @@ trait UnCurry {
           apply(seqType(arg))
         case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
           apply(arrayType(
-            if (isUnboundedGeneric(arg)) ObjectClass.tpe else arg))
+            if (isUnboundedGeneric(arg)) ObjectTpe else arg))
         case _ =>
           expandAlias(mapOver(tp))
       }
diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
new file mode 100644
index 0000000..10a8b4c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -0,0 +1,122 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala
+package reflect.internal.util
+
+import scala.reflect.io.AbstractFile
+import java.security.cert.Certificate
+import java.security.{ ProtectionDomain, CodeSource }
+import java.net.{ URL, URLConnection, URLStreamHandler }
+import scala.collection.{ mutable, immutable }
+
+/**
+ * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
+ *
+ * @author Lex Spoon
+ */
+class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
+    extends ClassLoader(parent)
+    with ScalaClassLoader
+{
+  protected def classNameToPath(name: String): String =
+    if (name endsWith ".class") name
+    else name.replace('.', '/') + ".class"
+
+  protected def findAbstractFile(name: String): AbstractFile = {
+    var file: AbstractFile = root
+    val pathParts          = name split '/'
+
+    for (dirPart <- pathParts.init) {
+      file = file.lookupName(dirPart, directory = true)
+      if (file == null)
+        return null
+    }
+
+    file.lookupName(pathParts.last, directory = false) match {
+      case null   => null
+      case file   => file
+    }
+  }
+
+  protected def dirNameToPath(name: String): String =
+    name.replace('.', '/')
+
+  protected def findAbstractDir(name: String): AbstractFile = {
+    var file: AbstractFile = root
+    val pathParts          = dirNameToPath(name) split '/'
+
+    for (dirPart <- pathParts) {
+      file = file.lookupName(dirPart, directory = true)
+      if (file == null)
+        return null
+    }
+
+    file
+  }
+
+  // parent delegation in JCL uses getResource; so either add parent.getResAsStream
+  // or implement findResource, which we do here as a study in scarlet (my complexion
+  // after looking at CLs and URLs)
+  override def findResource(name: String): URL = findAbstractFile(name) match {
+    case null => null
+    case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+      override def openConnection(url: URL): URLConnection = new URLConnection(url) {
+        override def connect() { }
+        override def getInputStream = file.input
+      }
+    })
+  }
+
+  // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
+  override def getResourceAsStream(name: String) = findAbstractFile(name) match {
+    case null => super.getResourceAsStream(name)
+    case file => file.input
+  }
+  // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
+  override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match {
+    case null => super.classBytes(name)
+    case file => file.toByteArray
+  }
+  override def findClass(name: String): Class[_] = {
+    val bytes = classBytes(name)
+    if (bytes.length == 0)
+      throw new ClassNotFoundException(name)
+    else
+      defineClass(name, bytes, 0, bytes.length, protectionDomain)
+  }
+
+  lazy val protectionDomain = {
+    val cl = Thread.currentThread().getContextClassLoader()
+    val resource = cl.getResource("scala/runtime/package.class")
+    if (resource == null || resource.getProtocol != "jar") null else {
+      val s = resource.getPath
+      val n = s.lastIndexOf('!')
+      if (n < 0) null else {
+        val path = s.substring(0, n)
+        new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null)
+      }
+    }
+  }
+
+  private val packages = mutable.Map[String, Package]()
+
+  override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
+    throw new UnsupportedOperationException()
+  }
+
+  override def getPackage(name: String): Package = {
+    findAbstractDir(name) match {
+      case null => super.getPackage(name)
+      case file => packages.getOrElseUpdate(name, {
+        val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
+        ctor.setAccessible(true)
+        ctor.newInstance(name, null, null, null, null, null, null, null, this)
+      })
+    }
+  }
+
+  override def getPackages(): Array[Package] =
+    root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray
+}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 2ba15e0..d128521 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -3,7 +3,8 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 import scala.collection.{ mutable, immutable }
 import scala.annotation.tailrec
@@ -33,15 +34,42 @@ trait Collections {
     xss forall (_ forall p)
   final def mmap[A, B](xss: List[List[A]])(f: A => B) =
     xss map (_ map f)
-  final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
-    xss foreach (_ foreach f)
   final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
     var res: Option[A] = null
     mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
     if (res eq null) None else res
   }
-  final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
-    for (xs <- xss; x <- xs; if p(x)) yield x
+
+  /** These are all written in terms of List because we're trying to wring all
+   *  the performance we can and List is used almost exclusively in the compiler,
+   *  but people are branching out in their collections so here's an overload.
+   */
+  final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f)
+  final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f)
+
+  /** A version of List#map, specialized for List, and optimized to avoid allocation if `as` is empty */
+  final def mapList[A, B](as: List[A])(f: A => B): List[B] = if (as eq Nil) Nil else {
+    val head = new ::[B](f(as.head), Nil)
+    var tail: ::[B] = head
+    var rest = as.tail
+    while (rest ne Nil) {
+      val next = new ::(f(rest.head), Nil)
+      tail.tl = next
+      tail = next
+      rest = rest.tail
+    }
+    head
+  }
+
+  final def collectFirst[A, B](as: List[A])(pf: PartialFunction[A, B]): Option[B] = {
+    @tailrec
+    def loop(rest: List[A]): Option[B] = rest match {
+      case Nil => None
+      case a :: as if pf.isDefinedAt(a) => Some(pf(a))
+      case a :: as => loop(as)
+    }
+    loop(as)
+  }
 
   final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
     val lb = new ListBuffer[C]
@@ -54,20 +82,60 @@ trait Collections {
     }
     lb.toList
   }
+
+  /** like map2, but returns list `xs` itself - instead of a copy - if function
+   *  `f` maps all elements to themselves.
+   */
+  final def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] = {
+    // Note to developers: there exists a duplication between this function and `List#mapConserve`.
+    // If any successful optimization attempts or other changes are made, please rehash them there too.
+    @tailrec
+    def loop(mapped: ListBuffer[A], unchanged: List[A], pending0: List[A], pending1: List[B]): List[A] = {
+      if (pending0.isEmpty || pending1.isEmpty) {
+        if (mapped eq null) unchanged
+        else mapped.prependToList(unchanged)
+      } else {
+        val head00 = pending0.head
+        val head01 = pending1.head
+        val head1  = f(head00, head01)
+
+        if ((head1 eq head00.asInstanceOf[AnyRef])) {
+          loop(mapped, unchanged, pending0.tail, pending1.tail)
+        } else {
+          val b = if (mapped eq null) new ListBuffer[A] else mapped
+          var xc = unchanged
+          while ((xc ne pending0) && (xc ne pending1)) {
+            b += xc.head
+            xc = xc.tail
+          }
+          b += head1
+          val tail0 = pending0.tail
+          val tail1 = pending1.tail
+          loop(b, tail0, tail0, tail1)
+        }
+      }
+    }
+    loop(null, xs, xs, ys)
+  }
+
   final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
     if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
     else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
   }
   final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
-    val lb = new ListBuffer[C]
+    var lb: ListBuffer[C] = null
     var ys1 = xs1
     var ys2 = xs2
     while (!ys1.isEmpty && !ys2.isEmpty) {
-      lb ++= f(ys1.head, ys2.head)
+      val cs = f(ys1.head, ys2.head)
+      if (cs ne Nil) {
+        if (lb eq null) lb = new ListBuffer[C]
+        lb ++= cs
+      }
       ys1 = ys1.tail
       ys2 = ys2.tail
     }
-    lb.toList
+    if (lb eq null) Nil else lb.result
   }
 
   final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
@@ -141,7 +209,7 @@ trait Collections {
       ys1 = ys1.tail
       ys2 = ys2.tail
     }
-    buf.result
+    buf.result()
   }
   final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
     var ys1 = xs1
@@ -189,18 +257,6 @@ trait Collections {
     }
     false
   }
-  final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
-    var ys1 = xs1
-    var ys2 = xs2
-    while (!ys1.isEmpty && !ys2.isEmpty) {
-      if (!f(ys1.head, ys2.head))
-        return false
-
-      ys1 = ys1.tail
-      ys2 = ys2.tail
-    }
-    true
-  }
   final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
     var ys1 = xs1
     var ys2 = xs2
@@ -216,6 +272,11 @@ trait Collections {
     true
   }
 
+  final def sequence[A](as: List[Option[A]]): Option[List[A]] = {
+    if (as.exists (_.isEmpty)) None
+    else Some(as.flatten)
+  }
+
   final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try {
     Some(ass.transpose)
   } catch {
@@ -223,5 +284,4 @@ trait Collections {
   }
 }
 
-object Collections extends Collections { }
-
+object Collections extends Collections
diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
new file mode 100644
index 0000000..8442c10
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
@@ -0,0 +1,28 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.reflect.internal
+package util
+
+import java.util.concurrent.ConcurrentHashMap
+import java.util.concurrent.atomic.AtomicLong
+import scala.collection.mutable
+import scala.reflect.NameTransformer
+
+class FreshNameCreator(creatorPrefix: String = "") {
+  protected val counters = new ConcurrentHashMap[String, AtomicLong]()
+
+  /**
+   * Create a fresh name with the given prefix. It is guaranteed
+   * that the returned name has never been returned by a previous
+   * call to this function (provided the prefix does not end in a digit).
+   */
+  def newName(prefix: String): String = {
+    val safePrefix = NameTransformer.encode(prefix)
+    counters.putIfAbsent(safePrefix, new AtomicLong(0))
+    val idx = counters.get(safePrefix).incrementAndGet()
+    s"$creatorPrefix$safePrefix$idx"
+  }
+}
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 4135f3c..b4178e0 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -3,11 +3,11 @@
  * @author  Martin Odersky
  */
 
-package scala.reflect.internal.util
+package scala
+package reflect
+package internal.util
 
 object HashSet {
-  def apply[T >: Null <: AnyRef](): HashSet[T] = this(16)
-  def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16)
   def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
   def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] =
     new HashSet[T](label, initialCapacity)
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
index 3259a12..2eb4fa2 100644
--- a/src/reflect/scala/reflect/internal/util/Origins.scala
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -3,12 +3,11 @@
  * @author Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package internal.util
 
-import NameTransformer._
 import scala.collection.{ mutable, immutable }
-import Origins._
 
 /** A debugging class for logging from whence a method is being called.
  *  Say you wanted to discover who was calling phase_= in SymbolTable.
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 8f287a1..0192d31 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -1,248 +1,210 @@
 /* NSC -- new Scala compiler
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
- *
  */
 
-package scala.reflect.internal.util
+package scala
+package reflect
+package internal
+package util
 
-import scala.reflect.ClassTag
-import scala.reflect.internal.FatalError
-import scala.reflect.macros.Attachments
+/** @inheritdoc */
+class Position extends scala.reflect.api.Position with InternalPositionImpl with DeprecatedPosition {
+  type Pos = Position
+  def pos: Position = this
+  def withPos(newPos: Position): macros.Attachments { type Pos = Position.this.Pos } = newPos
+
+  protected def fail(what: String) = throw new UnsupportedOperationException(s"Position.$what on $this")
+
+  // If scala-refactoring extends Position directly it seems I have no
+  // choice but to offer all the concrete methods.
+  def isDefined          = false
+  def isRange            = false
+  def source: SourceFile = NoSourceFile
+  def start: Int         = fail("start")
+  def point: Int         = fail("point")
+  def end: Int           = fail("end")
+}
 
 object Position {
   val tabInc = 8
 
+  private def validate[T <: Position](pos: T): T = {
+    if (pos.isRange)
+      assert(pos.start <= pos.end, s"bad position: ${pos.show}")
+
+    pos
+  }
+
   /** Prints the message with the given position indication. */
   def formatMessage(posIn: Position, msg: String, shortenFile: Boolean): String = {
-    val pos = (
-      if (posIn eq null) NoPosition
-      else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
-      else posIn
-    )
-    def file   = pos.source.file
-    def prefix = if (shortenFile) file.name else file.path
-
-    pos match {
-      case FakePos(fmsg) => fmsg+" "+msg
-      case NoPosition    => msg
-      case _             =>
-        List(
-          "%s:%s: %s".format(prefix, pos.line, msg),
-          pos.lineContent.stripLineEnd,
-          " " * (pos.column - 1) + "^"
-        ) mkString "\n"
+    val pos    = if (posIn eq null) NoPosition else posIn
+    val prefix = pos.source match {
+      case NoSourceFile     => ""
+      case s if shortenFile => s.file.name + ":"
+      case s                => s.file.path + ":"
     }
+    prefix + (pos showError msg)
   }
-}
-
-/** The Position class and its subclasses represent positions of ASTs and symbols.
- *  Except for NoPosition and FakePos, every position refers to a SourceFile
- *  and to an offset in the sourcefile (its `point`). For batch compilation,
- *  that's all. For interactive IDE's there are also RangePositions
- *  and TransparentPositions. A RangePosition indicates a start and an end
- *  in addition to its point. TransparentPositions are a subclass of RangePositions.
- *  Range positions that are not transparent are called opaque.
- *  Trees with RangePositions need to satisfy the following invariants.
- *
- *  INV1: A tree with an offset position never contains a child
- *        with a range position
- *  INV2: If the child of a tree with a range position also has a range position,
- *        then the child's range is contained in the parent's range.
- *  INV3: Opaque range positions of children of the same node are non-overlapping
- *        (this means their overlap is at most a single point).
- *
- *  The following tests are useful on positions:
- *
- *  pos.isDefined     true if position is not a NoPosition nor a FakePosition
- *  pos.isRange       true if position is a range
- *  pos.isOpaqueRange true if position is an opaque range
- *
- *  The following accessor methods are provided:
- *
- *  pos.source        The source file of the position, which must be defined
- *  pos.point         The offset of the position's point, which must be defined
- *  pos.start         The start of the position, which must be a range
- *  pos.end           The end of the position, which must be a range
- *
- *  There are also convenience methods, such as
- *
- *  pos.startOrPoint
- *  pos.endOrPoint
- *  pos.pointOrElse(default)
- *
- *  These are less strict about the kind of position on which they can be applied.
- *
- *  The following conversion methods are often used:
- *
- *  pos.focus           converts a range position to an offset position, keeping its point;
- *                      returns all other positions unchanged.
- *  pos.makeTransparent converts an opaque range position into a transparent one.
- *                      returns all other positions unchanged.
- */
-abstract class Position extends scala.reflect.api.Position { self =>
-
-  type Pos = Position
-
-  def pos: Position = this
-
-  def withPos(newPos: Position): Attachments { type Pos = self.Pos } = newPos
-
-  /** An optional value containing the source file referred to by this position, or
-   *  None if not defined.
-   */
-  def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
-
-  /** Is this position neither a NoPosition nor a FakePosition?
-   *  If isDefined is true, offset and source are both defined.
-   */
-  def isDefined: Boolean = false
-
-  /** Is this position a transparent position? */
-  def isTransparent: Boolean = false
-
-  /** Is this position a range position? */
-  def isRange: Boolean = false
-
-  /** Is this position a non-transparent range position? */
-  def isOpaqueRange: Boolean = false
 
-  /** if opaque range, make this position transparent */
-  def makeTransparent: Position = this
-
-  /** The start of the position's range, error if not a range position */
-  def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
-
-  /** The start of the position's range, or point if not a range position */
-  def startOrPoint: Int = point
-
-  /**  The point (where the ^ is) of the position */
-  def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
-
-  /**  The point (where the ^ is) of the position, or else `default` if undefined */
-  def pointOrElse(default: Int): Int = default
-
-  /** The end of the position's range, error if not a range position */
-  def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
-
-  /** The end of the position's range, or point if not a range position */
-  def endOrPoint: Int = point
-
-  @deprecated("use point instead", "2.9.0")
-  def offset: Option[Int] = if (isDefined) Some(point) else None
-
-  /** The same position with a different start value (if a range) */
-  def withStart(off: Int): Position = this
-
-  /** The same position with a different end value (if a range) */
-  def withEnd(off: Int): Position = this
-
-  /** The same position with a different point value (if a range or offset) */
-  def withPoint(off: Int): Position = this
-
-  /** The same position with a different source value, and its values shifted by given offset */
-  def withSource(source: SourceFile, shift: Int): Position = this
-
-  /** If this is a range, the union with the other range, with the point of this position.
-   *  Otherwise, this position
-   */
-  def union(pos: Position): Position = this
+  def offset(source: SourceFile, point: Int): Position                            = validate(new OffsetPosition(source, point))
+  def range(source: SourceFile, start: Int, point: Int, end: Int): Position       = validate(new RangePosition(source, start, point, end))
+  def transparent(source: SourceFile, start: Int, point: Int, end: Int): Position = validate(new TransparentPosition(source, start, point, end))
+}
 
-  /** If this is a range position, the offset position of its start.
-   *  Otherwise the position itself
-   */
-  def focusStart: Position = this
+class OffsetPosition(sourceIn: SourceFile, pointIn: Int) extends DefinedPosition {
+  override def isRange = false
+  override def source  = sourceIn
+  override def point   = pointIn
+  override def start   = point
+  override def end     = point
+}
+class RangePosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends OffsetPosition(sourceIn, pointIn) {
+  override def isRange = true
+  override def start   = startIn
+  override def end     = endIn
+}
+class TransparentPosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends RangePosition(sourceIn, startIn, pointIn, endIn) {
+  override def isTransparent = true
+}
+case object NoPosition extends UndefinedPosition
+case class FakePos(msg: String) extends UndefinedPosition {
+  override def toString = msg
+}
 
-  /** If this is a range position, the offset position of its point.
-   *  Otherwise the position itself
-   */
-  def focus: Position = this
+sealed abstract class DefinedPosition extends Position {
+  final override def isDefined = true
+  override def equals(that: Any) = that match {
+    case that: DefinedPosition => source.file == that.source.file && start == that.start && point == that.point && end == that.end
+    case _                     => false
+  }
+  override def hashCode = Seq[Any](source.file, start, point, end).##
+  override def toString = (
+    if (isRange) s"RangePosition($canonicalPath, $start, $point, $end)"
+    else s"source-$canonicalPath,line-$line,$pointMessage$point"
+  )
+  private def pointMessage  = if (point > source.length) "out-of-bounds-" else "offset="
+  private def canonicalPath = source.file.canonicalPath
+}
 
-  /** If this is a range position, the offset position of its end.
-   *  Otherwise the position itself
-   */
-  def focusEnd: Position = this
+sealed abstract class UndefinedPosition extends Position {
+  final override def isDefined = false
+  override def isRange         = false
+  override def source          = NoSourceFile
+  override def start           = fail("start")
+  override def point           = fail("point")
+  override def end             = fail("end")
+}
 
-  /** Does this position include the given position `pos`.
-   *  This holds if `this` is a range position and its range [start..end]
-   *  is the same or covers the range of the given position, which may or may not be a range position.
-   */
-  def includes(pos: Position): Boolean = false
+private[util] trait InternalPositionImpl {
+  self: Position =>
 
-  /** Does this position properly include the given position `pos` ("properly" meaning their
-   *  ranges are not the same)?
-   */
-  def properlyIncludes(pos: Position): Boolean =
-    includes(pos) && (start < pos.startOrPoint || pos.endOrPoint < end)
+  // The methods which would be abstract in Position if it were
+  // possible to change Position.
+  def isDefined: Boolean
+  def isRange: Boolean
+  def source: SourceFile
+  def start: Int
+  def point: Int
+  def end: Int
 
-  /** Does this position precede that position?
-   *  This holds if both positions are defined and the end point of this position
-   *  is not larger than the start point of the given position.
+  /** Map this position to its position in the original source file
+   *  (which may be this position unchanged.)
    */
-  def precedes(pos: Position): Boolean =
-    isDefined && pos.isDefined && endOrPoint <= pos.startOrPoint
+  def finalPosition: Pos = source positionInUltimateSource this
 
-  /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
-   *  do not share a common point).
-   */
-  def properlyPrecedes(pos: Position): Boolean =
-    isDefined && pos.isDefined && endOrPoint < pos.startOrPoint
+  def isTransparent              = false
+  def isOffset                   = isDefined && !isRange
+  def isOpaqueRange              = isRange && !isTransparent
+  def pointOrElse(alt: Int): Int = if (isDefined) point else alt
+  def makeTransparent: Position  = if (isOpaqueRange) Position.transparent(source, start, point, end) else this
 
-  /** Does this position overlap with that position?
-   *  This holds if both positions are ranges and there is an interval of
-   *  non-zero length that is shared by both position ranges.
+  /** Copy a range position with a changed value.
    */
-  def overlaps(pos: Position): Boolean =
-    isRange && pos.isRange &&
-    ((pos.start < end && start < pos.end) || (start < pos.end && pos.start < end))
+  def withStart(start: Int): Position          = copyRange(start = start)
+  def withPoint(point: Int): Position          = if (isRange) copyRange(point = point) else Position.offset(source, point)
+  def withEnd(end: Int): Position              = copyRange(end = end)
+  def withSource(source: SourceFile): Position = copyRange(source = source)
+  def withShift(shift: Int): Position          = Position.range(source, start + shift, point + shift, end + shift)
 
-  /** Does this position cover the same range as that position?
-   *  Holds only if both position are ranges
+  /** Convert a range position to a simple offset.
    */
-  def sameRange(pos: Position): Boolean =
-    isRange && pos.isRange && start == pos.start && end == pos.end
-
-  def line: Int = throw new UnsupportedOperationException("Position.line")
-
-  def column: Int = throw new UnsupportedOperationException("Position.column")
-
-  /** Convert this to a position around `point` that spans a single source line */
-  def toSingleLine: Position = this
-
-  def lineContent: String =
-    if (isDefined) source.lineToString(line - 1)
-    else "NO_LINE"
-
-  /** Map this position to a position in an original source
-   * file.  If the SourceFile is a normal SourceFile, simply
-   * return this.
+  def focusStart: Position = if (this.isRange) asOffset(start) else this
+  def focus: Position      = if (this.isRange) asOffset(point) else this
+  def focusEnd: Position   = if (this.isRange) asOffset(end) else this
+
+  /** If you have it in for punctuation you might not like these methods.
+   *  However I think they're aptly named.
+   *
+   *    |   means union
+   *    ^   means "the point" (look, it's a caret)
+   *    |^  means union, taking the point of the rhs
+   *    ^|  means union, taking the point of the lhs
    */
-  def inUltimateSource(source : SourceFile): Position =
-    if (source == null) this else source.positionInUltimateSource(this)
-
-  def dbgString: String = toString
-  def safeLine: Int = try line catch { case _: UnsupportedOperationException => -1 }
-
-  def show: String = "["+toString+"]"
-}
-
-case object NoPosition extends Position {
-  override def dbgString = toString
-}
-
-case class FakePos(msg: String) extends Position {
-  override def toString = msg
-}
-
-class OffsetPosition(override val source: SourceFile, override val point: Int) extends Position {
-  override def isDefined = true
-  override def pointOrElse(default: Int): Int = point
-  override def withPoint(off: Int) = new OffsetPosition(source, off)
-  override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
-
-  override def line: Int = source.offsetToLine(point) + 1
-
-  override def column: Int = {
+  def |(that: Position, poses: Position*): Position = poses.foldLeft(this | that)(_ | _)
+  def |(that: Position): Position                   = this union that
+  def ^(point: Int): Position                       = this withPoint point
+  def |^(that: Position): Position                  = (this | that) ^ that.point
+  def ^|(that: Position): Position                  = (this | that) ^ this.point
+
+  def union(pos: Position): Position = (
+    if (!pos.isRange) this
+    else if (this.isRange) copyRange(start = start min pos.start, end = end max pos.end)
+    else pos
+  )
+
+  def includes(pos: Position): Boolean         = isRange && pos.isDefined && start <= pos.start && pos.end <= end
+  def properlyIncludes(pos: Position): Boolean = includes(pos) && (start < pos.start || pos.end < end)
+  def precedes(pos: Position): Boolean         = bothDefined(pos) && end <= pos.start
+  def properlyPrecedes(pos: Position): Boolean = bothDefined(pos) && end < pos.start
+  def sameRange(pos: Position): Boolean        = bothRanges(pos) && start == pos.start && end == pos.end
+  // This works because it's a range position invariant that S1 < E1 and S2 < E2.
+  // So if S1 < E2 and S2 < E1, then both starts precede both ends, which is the
+  // necessary condition to establish that there is overlap.
+  def overlaps(pos: Position): Boolean         = bothRanges(pos) && start < pos.end && pos.start < end
+
+  def line: Int           = if (hasSource) source.offsetToLine(point) + 1 else 0
+  def column: Int         = if (hasSource) calculateColumn() else 0
+  def lineContent: String = if (hasSource) source.lineToString(line - 1) else ""
+  def lineCaret: String   = if (hasSource) " " * (column - 1) + "^" else ""
+  @deprecated("use `lineCaret`", since="2.11.0")
+  def lineCarat: String   = lineCaret
+
+  def showError(msg: String): String = {
+    def escaped(s: String) = {
+      def u(c: Int) = f"\\u$c%04x"
+      def uable(c: Int) = (c < 0x20 && c != '\t') || c == 0x7F
+      if (s exists (c => uable(c))) {
+        val sb = new StringBuilder
+        s foreach (c => sb append (if (uable(c)) u(c) else c))
+        sb.toString
+      } else s
+    }
+    def errorAt(p: Pos) = {
+      def where     = p.line
+      def content   = escaped(p.lineContent)
+      def indicator = p.lineCaret
+      f"$where: $msg%n$content%n$indicator"
+    }
+    finalPosition match {
+      case FakePos(fmsg) => s"$fmsg $msg"
+      case NoPosition    => msg
+      case pos           => errorAt(pos)
+    }
+  }
+  def showDebug: String = toString
+  def show = (
+    if (isOpaqueRange) s"[$start:$end]"
+    else if (isTransparent) s"<$start:$end>"
+    else if (isDefined) s"[$point]"
+    else "[NoPosition]"
+  )
+
+  private def asOffset(point: Int): Position = Position.offset(source, point)
+  private def copyRange(source: SourceFile = source, start: Int = start, point: Int = point, end: Int = end): Position =
+    Position.range(source, start, point, end)
+
+  private def calculateColumn(): Int = {
     var idx = source.lineToOffset(source.offsetToLine(point))
     var col = 0
     while (idx != point) {
@@ -251,61 +213,39 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
     }
     col + 1
   }
+  private def hasSource                      = source ne NoSourceFile
+  private def bothRanges(that: Position)     = isRange && that.isRange
+  private def bothDefined(that: Position)    = isDefined && that.isDefined
+}
 
-  override def union(pos: Position) = if (pos.isRange) pos else this
+/** Holding cell for methods unused and/or unnecessary. */
+private[util] trait DeprecatedPosition {
+  self: Position =>
 
-  override def equals(that : Any) = that match {
-    case that : OffsetPosition => point == that.point && source.file == that.source.file
-    case that => false
-  }
-  override def hashCode = point * 37 + source.file.hashCode
+  @deprecated("use `point`", "2.9.0") // Used in SBT 0.12.4
+  def offset: Option[Int] = if (isDefined) Some(point) else None
 
-  override def toString = {
-    val pointmsg = if (point > source.length) "out-of-bounds-" else "offset="
-    "source-%s,line-%s,%s%s".format(source.file.canonicalPath, line, pointmsg, point)
-  }
-  override def show = "["+point+"]"
-}
+  @deprecated("use `focus`", "2.11.0")
+  def toSingleLine: Position = this
 
-/** new for position ranges */
-class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
-extends OffsetPosition(source, point) {
-  if (start > end) sys.error("bad position: "+show)
-  override def isRange: Boolean = true
-  override def isOpaqueRange: Boolean = true
-  override def startOrPoint: Int = start
-  override def endOrPoint: Int = end
-  override def withStart(off: Int) = new RangePosition(source, off, point, end)
-  override def withEnd(off: Int) = new RangePosition(source, start, point, off)
-  override def withPoint(off: Int) = new RangePosition(source, start, off, end)
-  override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
-  override def focusStart = new OffsetPosition(source, start)
-  override def focus = {
-    if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
-    focusCache
-  }
-  override def focusEnd = new OffsetPosition(source, end)
-  override def makeTransparent = new TransparentPosition(source, start, point, end)
-  override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
-  override def union(pos: Position): Position =
-    if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
-
-  override def toSingleLine: Position = source match {
-    case bs: BatchSourceFile
-    if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
-      val pointLine = bs.offsetToLine(point)
-      new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
-    case _ => this
-  }
+  @deprecated("use `line`", "2.11.0")
+  def safeLine: Int = line
 
-  override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
-  override def show = "["+start+":"+end+"]"
-  private var focusCache: Position = NoPosition
-}
+  @deprecated("use `showDebug`", "2.11.0")
+  def dbgString: String = showDebug
 
-class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
-  override def isOpaqueRange: Boolean = false
-  override def isTransparent = true
-  override def makeTransparent = this
-  override def show = "<"+start+":"+end+">"
+  @deprecated("use `finalPosition`", "2.11.0")
+  def inUltimateSource(source: SourceFile): Position = source positionInUltimateSource this
+
+  @deprecated("use `lineCaret`", since="2.11.0")
+  def lineWithCarat(maxWidth: Int): (String, String) = ("", "")
+
+  @deprecated("Use `withSource(source)` and `withShift`", "2.11.0")
+  def withSource(source: SourceFile, shift: Int): Position = this withSource source withShift shift
+
+  @deprecated("Use `start` instead", "2.11.0")
+  def startOrPoint: Int = if (isRange) start else point
+
+  @deprecated("Use `end` instead", "2.11.0")
+  def endOrPoint: Int = if (isRange) end else point
 }
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
new file mode 100644
index 0000000..63ea6e2
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -0,0 +1,124 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package reflect.internal.util
+
+import java.lang.{ ClassLoader => JClassLoader }
+import java.lang.reflect.{ Constructor, Modifier, Method }
+import java.io.{ File => JFile }
+import java.net.{ URLClassLoader => JURLClassLoader }
+import java.net.URL
+import scala.reflect.runtime.ReflectionUtils.unwrapHandler
+import ScalaClassLoader._
+import scala.util.control.Exception.{ catching }
+import scala.language.implicitConversions
+import scala.reflect.{ ClassTag, classTag }
+
+trait HasClassPath {
+  def classPathURLs: Seq[URL]
+}
+
+/** A wrapper around java.lang.ClassLoader to lower the annoyance
+ *  of java reflection.
+ */
+trait ScalaClassLoader extends JClassLoader {
+  /** Executing an action with this classloader as context classloader */
+  def asContext[T](action: => T): T = {
+    val saved = contextLoader
+    try { setContext(this) ; action }
+    finally setContext(saved)
+  }
+  def setAsContext() { setContext(this) }
+
+  /** Load and link a class with this classloader */
+  def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = false)
+  /** Load, link and initialize a class with this classloader */
+  def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true)
+
+  private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
+    catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
+      Class.forName(path, initialize, this).asInstanceOf[Class[T]]
+
+  /** Create an instance of a class with this classloader */
+  def create(path: String): AnyRef =
+    tryToInitializeClass[AnyRef](path).map(_.newInstance()).orNull
+
+  /** The actual bytes for a class file, or an empty array if it can't be found. */
+  def classBytes(className: String): Array[Byte] = classAsStream(className) match {
+    case null   => Array()
+    case stream => scala.reflect.io.Streamable.bytes(stream)
+  }
+
+  /** An InputStream representing the given class name, or null if not found. */
+  def classAsStream(className: String) =
+    getResourceAsStream(className.replaceAll("""\.""", "/") + ".class")
+
+  /** Run the main method of a class to be loaded by this classloader */
+  def run(objectName: String, arguments: Seq[String]) {
+    val clsToRun = tryToInitializeClass(objectName) getOrElse (
+      throw new ClassNotFoundException(objectName)
+    )
+    val method = clsToRun.getMethod("main", classOf[Array[String]])
+    if (!Modifier.isStatic(method.getModifiers))
+      throw new NoSuchMethodException(objectName + ".main is not static")
+
+    try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
+    catch unwrapHandler({ case ex => throw ex })
+  }
+}
+
+/** Methods for obtaining various classloaders.
+ *      appLoader: the application classloader.  (Also called the java system classloader.)
+ *      extLoader: the extension classloader.
+ *     bootLoader: the boot classloader.
+ *  contextLoader: the context classloader.
+ */
+object ScalaClassLoader {
+  /** Returns loaders which are already ScalaClassLoaders unaltered,
+   *  and translates java.net.URLClassLoaders into scala URLClassLoaders.
+   *  Otherwise creates a new wrapper.
+   */
+  implicit def apply(cl: JClassLoader): ScalaClassLoader = cl match {
+    case cl: ScalaClassLoader => cl
+    case cl: JURLClassLoader  => new URLClassLoader(cl.getURLs.toSeq, cl.getParent)
+    case _                    => new JClassLoader(cl) with ScalaClassLoader
+  }
+  def contextLoader = apply(Thread.currentThread.getContextClassLoader)
+  def appLoader     = apply(JClassLoader.getSystemClassLoader)
+  def setContext(cl: JClassLoader) =
+    Thread.currentThread.setContextClassLoader(cl)
+  def savingContextLoader[T](body: => T): T = {
+    val saved = contextLoader
+    try body
+    finally setContext(saved)
+  }
+
+  class URLClassLoader(urls: Seq[URL], parent: JClassLoader)
+      extends JURLClassLoader(urls.toArray, parent)
+         with ScalaClassLoader
+         with HasClassPath {
+
+    private var classloaderURLs: Seq[URL] = urls
+    def classPathURLs: Seq[URL] = classloaderURLs
+
+    /** Override to widen to public */
+    override def addURL(url: URL) = {
+      classloaderURLs :+= url
+      super.addURL(url)
+    }
+  }
+
+  def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
+    new URLClassLoader(urls, parent)
+
+  /** True if supplied class exists in supplied path */
+  def classExists(urls: Seq[URL], name: String): Boolean =
+    (fromURLs(urls) tryToLoadClass name).isDefined
+
+  /** Finding what jar a clazz or instance came from */
+  def originOfClass(x: Class[_]): Option[URL] =
+    Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
+}
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
index 36bdb81..635bfb0 100644
--- a/src/reflect/scala/reflect/internal/util/Set.scala
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -2,7 +2,8 @@
  * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 /** A common class for lightweight sets.
  */
@@ -18,11 +19,8 @@ abstract class Set[T <: AnyRef] {
 
   def apply(x: T): Boolean = contains(x)
 
-  @deprecated("use `iterator` instead", "2.9.0") def elements = iterator
-
   def contains(x: T): Boolean =
     findEntry(x) ne null
 
   def toList = iterator.toList
-
 }
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index bc2d0ee..4fccad7 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -4,7 +4,8 @@
  */
 
 
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 import scala.reflect.io.{ AbstractFile, VirtualFile }
 import scala.collection.mutable.ArrayBuffer
@@ -15,16 +16,16 @@ import scala.reflect.internal.Chars._
 
 /** abstract base class of a source file used in the compiler */
 abstract class SourceFile {
-  def content : Array[Char]         // normalized, must end in SU
-  def file    : AbstractFile
-  def isLineBreak(idx : Int) : Boolean
+  def content: Array[Char]         // normalized, must end in SU
+  def file   : AbstractFile
+  def isLineBreak(idx: Int): Boolean
+  def isEndOfLine(idx: Int): Boolean
   def isSelfContained: Boolean
   def length : Int
-  def position(offset: Int) : Position = {
+  def position(offset: Int): Position = {
     assert(offset < length, file + ": " + offset + " >= " + length)
-    new OffsetPosition(this, offset)
+    Position.offset(this, offset)
   }
-  def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
 
   def offsetToLine(offset: Int): Int
   def lineToOffset(index : Int): Int
@@ -34,14 +35,14 @@ abstract class SourceFile {
    */
   def positionInUltimateSource(position: Position) = position
   override def toString() = file.name
-  def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
   def path = file.path
 
-  def beginsWith(offset: Int, text: String): Boolean =
-    (content drop offset) startsWith text
-
-  def lineToString(index: Int): String =
-    content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString ""
+  def lineToString(index: Int): String = {
+    val start = lineToOffset(index)
+    var end = start
+    while (!isEndOfLine(end) && end <= length) end += 1
+    new String(content, start, end - start)
+  }
 
   @tailrec
   final def skipWhitespace(offset: Int): Int =
@@ -56,6 +57,7 @@ object NoSourceFile extends SourceFile {
   def content                   = Array()
   def file                      = NoFile
   def isLineBreak(idx: Int)     = false
+  def isEndOfLine(idx: Int)     = false
   def isSelfContained           = true
   def length                    = -1
   def offsetToLine(offset: Int) = -1
@@ -81,7 +83,6 @@ object ScriptSourceFile {
     }
     else 0
   }
-  def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
 
   def apply(file: AbstractFile, content: Array[Char]) = {
     val underlying = new BatchSourceFile(file, content)
@@ -90,19 +91,23 @@ object ScriptSourceFile {
 
     stripped
   }
+
+  def apply(underlying: BatchSourceFile) = {
+    val headerLen = headerLength(underlying.content)
+    new ScriptSourceFile(underlying, underlying.content drop headerLen, headerLen)
+  }
 }
-import ScriptSourceFile._
 
 class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
   override def isSelfContained = false
 
   override def positionInUltimateSource(pos: Position) =
     if (!pos.isDefined) super.positionInUltimateSource(pos)
-    else pos.withSource(underlying, start)
+    else pos withSource underlying withShift start
 }
 
 /** a file whose contents do not change over time */
-class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extends SourceFile {
+class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends SourceFile {
   def this(_file: AbstractFile)                 = this(_file, _file.toCharArray)
   def this(sourceName: String, cs: Seq[Char])   = this(new VirtualFile(sourceName), cs.toArray)
   def this(file: AbstractFile, cs: Seq[Char])   = this(file, cs.toArray)
@@ -129,18 +134,30 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
       super.identifier(pos)
     }
 
-  def isLineBreak(idx: Int) =
-    if (idx >= length) false else {
-      val ch = content(idx)
-      // don't identify the CR in CR LF as a line break, since LF will do.
-      if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
-      else isLineBreakChar(ch)
-    }
+  private def charAtIsEOL(idx: Int)(p: Char => Boolean) = {
+    // don't identify the CR in CR LF as a line break, since LF will do.
+    def notCRLF0 = content(idx) != CR || !content.isDefinedAt(idx + 1) || content(idx + 1) != LF
+
+    idx < length && notCRLF0 && p(content(idx))
+  }
+
+  def isLineBreak(idx: Int) = charAtIsEOL(idx)(isLineBreakChar)
+
+  /** True if the index is included by an EOL sequence. */
+  def isEndOfLine(idx: Int) = (content isDefinedAt idx) && PartialFunction.cond(content(idx)) {
+    case CR | LF => true
+  }
+
+  /** True if the index is end of an EOL sequence. */
+  def isAtEndOfLine(idx: Int) = charAtIsEOL(idx) {
+    case CR | LF => true
+    case _       => false
+  }
 
   def calculateLineIndices(cs: Array[Char]) = {
     val buf = new ArrayBuffer[Int]
     buf += 0
-    for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
+    for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1
     buf += cs.length // sentinel, so that findLine below works smoother
     buf.toArray
   }
@@ -150,15 +167,17 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
 
   private var lastLine = 0
 
-  /** Convert offset to line in this source file
-   *  Lines are numbered from 0
+  /** Convert offset to line in this source file.
+   *  Lines are numbered from 0.
    */
   def offsetToLine(offset: Int): Int = {
     val lines = lineIndices
-    def findLine(lo: Int, hi: Int, mid: Int): Int =
-      if (offset < lines(mid)) findLine(lo, mid - 1, (lo + mid - 1) / 2)
+    def findLine(lo: Int, hi: Int, mid: Int): Int = (
+      if (mid < lo || hi < mid) mid // minimal sanity check - as written this easily went into infinite loopyland
+      else if (offset < lines(mid)) findLine(lo, mid - 1, (lo + mid - 1) / 2)
       else if (offset >= lines(mid + 1)) findLine(mid + 1, hi, (mid + 1 + hi) / 2)
       else mid
+    )
     lastLine = findLine(0, lines.length, lastLine)
     lastLine
   }
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index cbd27b0..905f1bf 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 import scala.collection.mutable
 
@@ -102,8 +103,8 @@ quant)
     for ((_, q) <- qs if q.underlying == q;
          r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
 
-  private def showPercent(x: Double, base: Double) =
-    if (base == 0) "" else f" (${x / base * 100}%2.1f%%)"
+  private def showPercent(x: Long, base: Long) =
+    if (base == 0) "" else f" (${x.toDouble / base.toDouble * 100}%2.1f%%)"
 
   /** The base trait for quantities.
    *  Quantities with non-empty prefix are printed in the statistics info.
@@ -132,6 +133,12 @@ quant)
       if (this.value < that.value) -1
       else if (this.value > that.value) 1
       else 0
+    override def equals(that: Any): Boolean =
+      that match {
+        case that: Counter => (this compare that) == 0
+        case _ => false
+      }
+    override def hashCode = value
     override def toString = value.toString
   }
 
@@ -155,7 +162,7 @@ quant)
       value = value0 + underlying.value - uvalue0
     }
     override def toString =
-      value + showPercent(value, underlying.value)
+      value + showPercent(value.toLong, underlying.value.toLong)
   }
 
   class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
@@ -183,6 +190,12 @@ quant)
       if (this.specificNanos < that.specificNanos) -1
       else if (this.specificNanos > that.specificNanos) 1
       else 0
+    override def equals(that: Any): Boolean =
+      that match {
+        case that: StackableTimer => (this compare that) == 0
+        case _ => false
+      }
+    override def hashCode = specificNanos.##
     override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
   }
 
@@ -257,7 +270,6 @@ quant)
   def enabled = _enabled
   def enabled_=(cond: Boolean) = {
     if (cond && !_enabled) {
-      val test = new Timer("", Nil)
       val start = System.nanoTime()
       var total = 0L
       for (i <- 1 to 10000) {
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index 8f6c409..efb8126 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -6,8 +6,12 @@
 **                          |/                                          **
 \*                                                                      */
 
+package scala
+package reflect
+package internal
+package util
 
-package scala.reflect.internal.util
+import scala.compat.Platform.EOL
 
 /** This object provides utility methods to extract elements
  *  from Strings.
@@ -16,24 +20,32 @@ package scala.reflect.internal.util
  *  @version 1.0
  */
 trait StringOps {
-  def onull(s: String)                            = if (s == null) "" else s
-  def oempty(xs: String*)                         = xs filterNot (x => x == null || x == "")
-  def ojoin(xs: String*): String                  = oempty(xs: _*) mkString " "
-  def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
-  def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
-    val ys = oempty(xs: _*)
-    if (ys.isEmpty) orElse else ys mkString sep
+  def oempty(xs: String*)        = xs filterNot (x => x == null || x == "")
+  def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+  def longestCommonPrefix(xs: List[String]): String = xs match {
+    case Nil      => ""
+    case w :: Nil => w
+    case _        =>
+      def lcp(ss: List[String]): String = {
+        val w :: ws = ss
+        if (w == "") ""
+        else if (ws exists (s => s == "" || (s charAt 0) != (w charAt 0))) ""
+        else w.substring(0, 1) + lcp(ss map (_ substring 1))
+      }
+      lcp(xs)
   }
-  def trimTrailingSpace(s: String) = {
-    if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s
-    else {
-      var idx = s.length - 1
-      while (idx >= 0 && s.charAt(idx).isWhitespace)
-        idx -= 1
+  /** Like String#trim, but trailing whitespace only.
+   */
+  def trimTrailingSpace(s: String): String = {
+    var end = s.length
+    while (end > 0 && s.charAt(end - 1).isWhitespace)
+      end -= 1
 
-      s.substring(0, idx + 1)
-    }
+    if (end == s.length) s
+    else s.substring(0, end)
   }
+  /** Breaks the string into lines and strips each line before reassembling. */
+  def trimAllTrailingSpace(s: String): String = s.lines map trimTrailingSpace mkString EOL
 
   def decompose(str: String, sep: Char): List[String] = {
     def ws(start: Int): List[String] =
@@ -49,14 +61,6 @@ trait StringOps {
 
   def words(str: String): List[String] = decompose(str, ' ')
 
-  def stripPrefixOpt(str: String, prefix: String): Option[String] =
-    if (str startsWith prefix) Some(str drop prefix.length)
-    else None
-
-  def stripSuffixOpt(str: String, suffix: String): Option[String] =
-    if (str endsWith suffix) Some(str dropRight suffix.length)
-    else None
-
   def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
     splitAt(str, str indexWhere f, doDropIndex)
 
@@ -65,10 +69,6 @@ trait StringOps {
     else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx)))
 
   /** Returns a string meaning "n elements".
-   *
-   *  @param n        ...
-   *  @param elements ...
-   *  @return         ...
    */
   def countElementsAsString(n: Int, elements: String): String =
     n match {
@@ -81,9 +81,6 @@ trait StringOps {
     }
 
   /** Turns a count into a friendly English description if n<=4.
-   *
-   *  @param n        ...
-   *  @return         ...
    */
   def countAsString(n: Int): String =
     n match {
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
index e757922..e622e78 100644
--- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 package util
 
@@ -6,7 +7,7 @@ trait StripMarginInterpolator {
   def stringContext: StringContext
 
   /**
-   * A safe combination of `[[scala.collection.immutable.StringLike#stripMargin]]
+   * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]]
    * and [[scala.StringContext#raw]].
    *
    * The margin of each line is defined by whitespace leading up to a '|' character.
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index 8e2bcc2..8708442 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -1,38 +1,27 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 import TableDef._
 import scala.language.postfixOps
 
 /** A class for representing tabular data in a way that preserves
- *  its inner beauty.  See Exceptional for an example usage.
+ *  its inner beauty.
  *  One creates an instance of TableDef by defining the columns of
  *  the table, then uses that to create an instance of Table by
  *  passing in a sequence of rows.
  */
 class TableDef[T](_cols: Column[T]*) {
-  /** These operators are about all there is to it.
-   *
-   *  ~   appends a column to the table
-   *  >>  creates a right-justified column and appends it
-   *  <<  creates a left-justified column and appends it
-   *  >+  specifies a string to separate the previous column from the next.
-   *      if none is specified, a space is used.
-   */
+  // These operators are about all there is to it.
+  /** Appends a column to the table. */
   def ~(next: Column[T])            = retThis(cols :+= next)
-  def >>(pair: (String, T => Any))  = this ~ Column(pair._1, pair._2, false)
-  def <<(pair: (String, T => Any))  = this ~ Column(pair._1, pair._2, true)
-  def >+(sep: String)               = retThis(separators += ((cols.size - 1, sep)))
 
-  /** Below this point should all be considered private/internal.
-   */
+  // Below this point should all be considered private/internal.
   private var cols: List[Column[T]] = _cols.toList
-  private var separators: Map[Int, String] = Map()
 
-  def defaultSep(index: Int) = if (index > (cols.size - 2)) "" else " "
-  def sepAfter(i: Int): String = separators.getOrElse(i, defaultSep(i))
-  def sepWidths = cols.indices map (i => sepAfter(i).length)
+  def defaultSep(index: Int)   = if (index > (cols.size - 2)) "" else " "
+  def sepAfter(i: Int): String = defaultSep(i)
+  def sepWidths                = cols.indices map (i => sepAfter(i).length)
 
-  def columns = cols
   def colNames = cols map (_.name)
   def colFunctions = cols map (_.f)
   def colApply(el: T) = colFunctions map (f => f(el))
@@ -59,20 +48,12 @@ class TableDef[T](_cols: Column[T]*) {
     def mkFormatString(sepf: Int => String): String =
       specs.zipWithIndex map { case (c, i) => c + sepf(i) } mkString
 
-    def pp(): Unit = allToSeq foreach println
-
     def toFormattedSeq = argLists map (xs => rowFormat.format(xs: _*))
     def allToSeq = headers ++ toFormattedSeq
 
     override def toString = allToSeq mkString "\n"
   }
 
-  def formatterFor(rows: Seq[T]): T => String = {
-    val formatStr = new Table(rows).rowFormat
-
-    x => formatStr.format(colApply(x) : _*)
-  }
-
   def table(rows: Seq[T]) = new Table(rows)
 
   override def toString = cols.mkString("TableDef(", ", ", ")")
diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
index f89bd9e..1841051 100644
--- a/src/reflect/scala/reflect/internal/util/ThreeValues.scala
+++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
 
 /** A simple three value type for booleans with an unknown value */
 object ThreeValues {
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index fa83f70..e4a6503 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal
+package scala
+package reflect.internal
 package util
 
 import scala.collection.{ mutable, immutable }
@@ -12,13 +13,9 @@ trait TraceSymbolActivity {
   if (enabled && global.isCompilerUniverse)
     scala.sys addShutdownHook showAllSymbols()
 
-  private type Set[T] = scala.collection.immutable.Set[T]
-  private val Set = scala.collection.immutable.Set
-
   val allSymbols  = mutable.Map[Int, Symbol]()
   val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil
   val prevOwners  = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil
-  val symsCaused  = mutable.Map[Int, Int]() withDefaultValue 0
   val allTrees    = mutable.Set[Tree]()
 
   def recordSymbolsInTree(tree: Tree) {
@@ -44,39 +41,8 @@ trait TraceSymbolActivity {
     }
   }
 
-  /** TODO.
-   */
-  private def reachableDirectlyFromSymbol(sym: Symbol): List[Symbol] = (
-       List(sym.owner, sym.alias, sym.thisSym)
-    ++ sym.children
-    ++ sym.info.parents.map(_.typeSymbol)
-    ++ sym.typeParams
-    ++ sym.paramss.flatten
-  )
-  private def reachable[T](inputs: Traversable[T], mkSymbol: T => Symbol): Set[Symbol] = {
-    def loop(seen: Set[Symbol], remaining: List[Symbol]): Set[Symbol] = {
-      remaining match {
-        case Nil          => seen
-        case head :: rest =>
-          if ((head eq null) || (head eq NoSymbol) || seen(head)) loop(seen, rest)
-          else loop(seen + head, rest ++ reachableDirectlyFromSymbol(head).filterNot(seen))
-      }
-    }
-    loop(immutable.Set(), inputs.toList map mkSymbol filterNot (_ eq null) distinct)
-  }
-  private def treeList(t: Tree) = {
-    val buf = mutable.ListBuffer[Tree]()
-    t foreach (buf += _)
-    buf.toList
-  }
-
-  private def reachableFromSymbol(root: Symbol): Set[Symbol] =
-    reachable[Symbol](List(root, root.info.typeSymbol), x => x)
-
-  private def reachableFromTree(tree: Tree): Set[Symbol] =
-    reachable[Tree](treeList(tree), _.symbol)
-
-  private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString)
+  private lazy val erasurePhase = findPhaseWithName("erasure")
+  private def signature(id: Int) = enteringPhase(erasurePhase)(allSymbols(id).defString)
 
   private def dashes(s: Any): String = ("" + s) map (_ => '-')
   private def show(s1: Any, ss: Any*) {
@@ -119,17 +85,9 @@ trait TraceSymbolActivity {
     }
     println("\n")
   }
-  private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String = (x: U) => "" + x) = {
+  private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = {
     showMapFreq(xs.toList groupBy groupFn)(showFn)
   }
-  private lazy val findErasurePhase: Phase = {
-    var ph = phase
-    while (ph != NoPhase && ph.name != "erasure") {
-      ph = ph.prev
-    }
-    ph
-  }
-  private def runBeforeErasure[T](body: => T): T = atPhase(findErasurePhase)(body)
 
   def showAllSymbols() {
     if (!enabled) return
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
new file mode 100644
index 0000000..4074d97
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -0,0 +1,28 @@
+package scala
+package reflect
+package internal
+package util
+
+import scala.language.implicitConversions
+
+import TriState._
+
+/** A simple true/false/unknown value, for those days when
+ *  true and false don't quite partition the space.
+ */
+final class TriState private (val value: Int) extends AnyVal {
+  def isKnown = this != Unknown
+  def booleanValue = this match {
+    case True  => true
+    case False => false
+    case _     => sys.error("Not a Boolean value")
+  }
+}
+
+object TriState {
+  implicit def booleanToTriState(b: Boolean): TriState = if (b) True else False
+
+  val Unknown = new TriState(-1)
+  val False   = new TriState(0)
+  val True    = new TriState(1)
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index fc12e31..a9a7c77 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -4,7 +4,7 @@ package reflect.internal.util
 import java.lang.ref.{WeakReference, ReferenceQueue}
 import scala.annotation.tailrec
 import scala.collection.generic.Clearable
-import scala.collection.mutable.{Set => mSet}
+import scala.collection.mutable.{Set => MSet}
 
 /**
  * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
@@ -16,8 +16,8 @@ import scala.collection.mutable.{Set => mSet}
  * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
  * properly when GC concurrently collects elements in this set.
  */
-final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
-  
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
+
   import WeakHashSet._
 
   def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
@@ -47,7 +47,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
       candidate  *= 2
     }
     candidate
-  } 
+  }
 
   /**
    * the underlying table of entries which is an array of Entry linked lists
@@ -65,7 +65,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
    * find the bucket associated with an elements's hash code
    */
   private[this] def bucketFor(hash: Int): Int = {
-    // spread the bits around to try to avoid accidental collisions using the 
+    // spread the bits around to try to avoid accidental collisions using the
     // same algorithm as java.util.HashMap
     var h = hash
     h ^= h >>> 20 ^ h >>> 12
@@ -98,7 +98,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
     def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
 
     @tailrec
-    def queueLoop {
+    def queueLoop(): Unit = {
       val stale = poll()
       if (stale != null) {
         val bucket = bucketFor(stale.hash)
@@ -109,11 +109,11 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
 
         linkedListLoop(null, table(bucket))
 
-        queueLoop
+        queueLoop()
       }
     }
-    
-    queueLoop
+
+    queueLoop()
   }
 
   /**
@@ -123,7 +123,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
     val oldTable = table
     table = new Array[Entry[A]](oldTable.size * 2)
     threshhold = computeThreshHold
-    
+
     @tailrec
     def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
       @tailrec
@@ -225,7 +225,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
   def +=(elem: A) = this + elem
 
   // from scala.reflect.interanl.Set
-  override def addEntry(x: A) { this += x }  
+  override def addEntry(x: A) { this += x }
 
   // remove an element from this set and return this set
   override def -(elem: A): this.type = elem match {
@@ -274,6 +274,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
 
   override def foreach[U](f: A => U): Unit = iterator foreach f
 
+  // It has the `()` because iterator runs `removeStaleEntries()`
   override def toList(): List[A] = iterator.toList
 
   // Iterator over all the elements in this set in no particular order
@@ -292,7 +293,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
        */
       private[this] var entry: Entry[A] = null
 
-      /** 
+      /**
        * the element that will be the result of the next call to next()
        */
       private[this] var lookaheadelement: A = null.asInstanceOf[A]
@@ -339,7 +340,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
      * the entries must be stable. If any are garbage collected during validation
      * then an assertion may inappropriately fire.
      */
-    def fullyValidate {
+    def fullyValidate: Unit = {
       var computedCount = 0
       var bucket = 0
       while (bucket < table.size) {
@@ -383,34 +384,9 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
      *  Number of buckets in the table
      */
     def bucketsCount: Int = table.size
-
-    /**
-     * Number of buckets that don't hold anything
-     */
-    def emptyBucketsCount = bucketsCount - fullBucketsCount
-
-    /**
-     * Number of elements that are in collision. Useful for diagnosing performance issues.
-     */
-    def collisionsCount = size - (fullBucketsCount - collisionBucketsCount)
-
-    /**
-     * A map from a count of elements to the number of buckets with that count
-     */
-    def elementCountDistribution = table map linkedListSize groupBy identity map {case (size, list) => (size, list.size)}
-
-    private def linkedListSize(entry: Entry[A]) = {
-      var e = entry
-      var count = 0
-      while (e != null) {
-        count += 1
-        e = e.tail
-      }
-      count
-    }  
   }
 
-  private[util] def diagnostics = new Diagnostics 
+  private[util] def diagnostics = new Diagnostics
 }
 
 /**
@@ -427,4 +403,4 @@ object WeakHashSet {
   val defaultLoadFactor = .75
 
   def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
-}
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 6d77235..3618c15 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -1,7 +1,42 @@
-package scala.reflect
+package scala
+package reflect
 package internal
 
+import scala.language.existentials // SI-6541
+
 package object util {
+  import StringOps.longestCommonPrefix
+
+  // An allocation-avoiding reusable instance of the so-common List(Nil).
+  val ListOfNil: List[List[Nothing]] = Nil :: Nil
+
+  def andFalse(body: Unit): Boolean = false
+
+  // Shorten a name like Symbols$FooSymbol to FooSymbol.
+  private def shortenName(name: String): String = {
+    if (name == "") return ""
+    val segments = (name split '$').toList
+    val last     = segments.last
+
+    if (last.length == 0)
+      segments takeRight 2 mkString "$"
+    else
+      last
+  }
+
+  def shortClassOfInstance(x: AnyRef): String = shortClass(x.getClass)
+  def shortClass(clazz: Class[_]): String = {
+    val name: String = (clazz.getName split '.').last
+    def isModule     = name endsWith "$"                        // object
+    def isAnon       = (name split '$').last forall (_.isDigit) // anonymous class
+
+    if (isModule)
+      (name split '$' filterNot (_ == "")).last + "$"
+    else if (isAnon)
+      clazz.getSuperclass :: clazz.getInterfaces.toList map (c => shortClass(c)) mkString " with "
+    else
+      shortenName(name)
+  }
   /**
    * Adds the `sm` String interpolator to a [[scala.StringContext]].
    */
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index 15befb6..ac1159b 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -4,19 +4,21 @@
  */
 
 
-package scala.reflect
+package scala
+package reflect
 package io
 
-import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
+import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
 import java.io.{ File => JFile }
 import java.net.URL
 import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.Statistics
 
 /**
  * An abstraction over files for use in the reflection/compiler libraries.
- * 
+ *
  * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
- *  
+ *
  * @author Philippe Altherr
  * @version 1.0, 23/03/2004
  */
@@ -27,7 +29,7 @@ object AbstractFile {
 
   /**
    * If the specified File exists and is a regular file, returns an
-   * abstract regular file backed by it. Otherwise, returns <code>null</code>.
+   * abstract regular file backed by it. Otherwise, returns `null`.
    */
   def getFile(file: File): AbstractFile =
     if (file.isFile) new PlainFile(file) else null
@@ -38,10 +40,7 @@ object AbstractFile {
   /**
    * If the specified File exists and is either a directory or a
    * readable zip or jar archive, returns an abstract directory
-   * backed by it. Otherwise, returns <code>null</code>.
-   *
-   * @param file ...
-   * @return     ...
+   * backed by it. Otherwise, returns `null`.
    */
   def getDirectory(file: File): AbstractFile =
     if (file.isDirectory) new PlainFile(file)
@@ -51,15 +50,14 @@ object AbstractFile {
   /**
    * If the specified URL exists and is a readable zip or jar archive,
    * returns an abstract directory backed by it. Otherwise, returns
-   * <code>null</code>.
-   *
-   * @param file ...
-   * @return     ...
+   * `null`.
    */
   def getURL(url: URL): AbstractFile = {
     if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
     else ZipArchive fromURL url
   }
+
+  def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
 }
 
 /**
@@ -80,12 +78,12 @@ object AbstractFile {
  * </p>
  * <p>
  *   The interface does <b>not</b> allow to access the content.
- *   The class <code>symtab.classfile.AbstractFileReader</code> accesses
+ *   The class `symtab.classfile.AbstractFileReader` accesses
  *   bytes, knowing that the character set of classfiles is UTF-8. For
- *   all other cases, the class <code>SourceFile</code> is used, which honors
- *   <code>global.settings.encoding.value</code>.
+ *   all other cases, the class `SourceFile` is used, which honors
+ *   `global.settings.encoding.value`.
  * </p>
- * 
+ *
  * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 abstract class AbstractFile extends Iterable[AbstractFile] {
@@ -116,7 +114,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
   def underlyingSource: Option[AbstractFile] = None
 
   /** Does this abstract file denote an existing file? */
-  def exists: Boolean = (file eq null) || file.exists
+  def exists: Boolean = {
+    if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+    (file eq null) || file.exists
+  }
 
   /** Does this abstract file represent something which can contain classfiles? */
   def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
@@ -130,6 +131,9 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
   /** Is this abstract file a directory? */
   def isDirectory: Boolean
 
+  /** Does this abstract file correspond to something on-disk? */
+  def isVirtual: Boolean = false
+
   /** Returns the time that this abstract file was last modified. */
   def lastModified: Long
 
@@ -148,7 +152,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
   def toURL: URL = if (file == null) null else file.toURI.toURL
 
   /** Returns contents of file (if applicable) in a Char array.
-   *  warning: use <code>Global.getSourceFile()</code> to use the proper
+   *  warning: use `Global.getSourceFile()` to use the proper
    *  encoding when converting to the char array.
    */
   @throws(classOf[IOException])
@@ -159,24 +163,36 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
   @throws(classOf[IOException])
   def toByteArray: Array[Byte] = {
     val in = input
-    var rest = sizeOption.getOrElse(0)
-    val arr = new Array[Byte](rest)
-    while (rest > 0) {
-      val res = in.read(arr, arr.length - rest, rest)
-      if (res == -1)
-        throw new IOException("read error")
-      rest -= res
+    sizeOption match {
+      case Some(size) =>
+        var rest = size
+        val arr = new Array[Byte](rest)
+        while (rest > 0) {
+          val res = in.read(arr, arr.length - rest, rest)
+          if (res == -1)
+            throw new IOException("read error")
+          rest -= res
+        }
+        in.close()
+        arr
+      case None =>
+        val out = new ByteArrayOutputStream()
+        var c = in.read()
+        while(c != -1) {
+          out.write(c)
+          c = in.read()
+        }
+        in.close()
+        out.toByteArray()
     }
-    in.close()
-    arr
   }
 
   /** Returns all abstract subfiles of this abstract directory. */
   def iterator: Iterator[AbstractFile]
 
   /** Returns the abstract file in this abstract directory with the specified
-   *  name. If there is no such file, returns <code>null</code>. The argument
-   *  <code>directory</code> tells whether to look for a directory or
+   *  name. If there is no such file, returns `null`. The argument
+   *  `directory` tells whether to look for a directory or
    *  a regular file.
    */
   def lookupName(name: String, directory: Boolean): AbstractFile
@@ -186,19 +202,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
    */
   def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
 
-  /** Returns the abstract file in this abstract directory with the specified
-   *  path relative to it, If there is no such file, returns null. The argument
-   *  <code>directory</code> tells whether to look for a directory or a regular
-   *  file.
-   *
-   *  @param path      ...
-   *  @param directory ...
-   *  @return          ...
-   */
-  def lookupPath(path: String, directory: Boolean): AbstractFile = {
-    lookup((f, p, dir) => f.lookupName(p, dir), path, directory)
-  }
-
   /** Return an abstract file that does not check that `path` denotes
    *  an existing file.
    */
@@ -243,7 +246,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
    */
   def fileNamed(name: String): AbstractFile = {
     assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
-    fileOrSubdirectoryNamed(name, false)
+    fileOrSubdirectoryNamed(name, isDir = false)
   }
 
   /**
@@ -252,7 +255,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
    */
   def subdirectoryNamed(name: String): AbstractFile = {
     assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
-    fileOrSubdirectoryNamed(name, true)
+    fileOrSubdirectoryNamed(name, isDir = true)
   }
 
   protected def unsupported(): Nothing = unsupported(null)
diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
index c040d1e..2b965e6 100644
--- a/src/reflect/scala/reflect/io/Directory.scala
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -6,7 +6,8 @@
 **                          |/                                          **
 \*                                                                      */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.io.{ File => JFile }
@@ -14,12 +15,10 @@ import java.io.{ File => JFile }
  * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 object Directory {
-  import scala.util.Properties.{ tmpDir, userHome, userDir }
+  import scala.util.Properties.userDir
 
   private def normalizePath(s: String) = Some(apply(Path(s).normalize))
   def Current: Option[Directory]  = if (userDir == "") None else normalizePath(userDir)
-  def Home: Option[Directory]     = if (userHome == "") None else normalizePath(userHome)
-  def TmpDir: Option[Directory]   = if (tmpDir == "") None else normalizePath(tmpDir)
 
   def apply(path: Path): Directory = path.toDirectory
 
@@ -30,20 +29,18 @@ object Directory {
     path.createDirectory()
   }
 }
-import Path._
 
 /** An abstraction for directories.
  *
  *  @author  Paul Phillips
  *  @since   2.8
- *  
+ *
  *  ''Note:  This is library is considered experimental and should not be used unless you know what you are doing.''
  */
 class Directory(jfile: JFile) extends Path(jfile) {
   override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
   override def toDirectory: Directory = this
   override def toFile: File = new File(jfile)
-  override def isValid = jfile.isDirectory() || !jfile.exists()
   override def normalize: Directory = super.normalize.toDirectory
 
   /** An iterator over the contents of this directory.
@@ -60,7 +57,6 @@ class Directory(jfile: JFile) extends Path(jfile) {
   override def walkFilter(cond: Path => Boolean): Iterator[Path] =
     list filter cond flatMap (_ walkFilter cond)
 
-  def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
   def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
 
   /** If optional depth argument is not given, will recurse
@@ -70,10 +66,4 @@ class Directory(jfile: JFile) extends Path(jfile) {
     if (depth < 0) list ++ (dirs flatMap (_ deepList (depth)))
     else if (depth == 0) Iterator.empty
     else list ++ (dirs flatMap (_ deepList (depth - 1)))
-
-  /** An iterator over the directories underneath this directory,
-   *  to the (optionally) given depth.
-   */
-  def subdirs(depth: Int = 1): Iterator[Directory] =
-    deepList(depth) collect { case x: Directory => x }
 }
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index 736ba5d..a9c6807 100644
--- a/src/reflect/scala/reflect/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -7,13 +7,16 @@
 \*                                                                      */
 
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.io.{
   FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
-  BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
-import java.io.{ File => JFile }
+  BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable,
+  File => JFile
+}
+
 import java.nio.channels.{ Channel, FileChannel }
 import scala.io.Codec
 import scala.language.{reflectiveCalls, implicitConversions}
@@ -22,8 +25,7 @@ import scala.language.{reflectiveCalls, implicitConversions}
  */
 object File {
   def pathSeparator = java.io.File.pathSeparator
-  def separator = java.io.File.separator
-
+  def separator     = java.io.File.separator
   def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
 
   // Create a temporary file, which will be deleted upon jvm exit.
@@ -32,41 +34,7 @@ object File {
     jfile.deleteOnExit()
     apply(jfile)
   }
-
-  type HasClose = { def close(): Unit }
-
-  def closeQuietly(target: HasClose) {
-    try target.close() catch { case e: IOException => }
-  }
-  def closeQuietly(target: JCloseable) {
-    try target.close() catch { case e: IOException => }
-  }
-
-  // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
-  // we are using a static initializer to statically initialize a java class so we don't
-  // trigger java.lang.InternalErrors later when using it concurrently.  We ignore all
-  // the exceptions so as not to cause spurious failures when no write access is available,
-  // e.g. google app engine.
-  //
-  // XXX need to put this behind a setting.
-  //
-  // try {
-  //   import Streamable.closing
-  //   val tmp = java.io.File.createTempFile("bug6503430", null, null)
-  //   try closing(new FileInputStream(tmp)) { in =>
-  //     val inc = in.getChannel()
-  //     closing(new FileOutputStream(tmp, true)) { out =>
-  //       out.getChannel().transferFrom(inc, 0, 0)
-  //     }
-  //   }
-  //   finally tmp.delete()
-  // }
-  // catch {
-  //   case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
-  // }
 }
-import File._
-import Path._
 
 /** An abstraction for files.  For character data, a Codec
  *  can be supplied at either creation time or when a method
@@ -76,19 +44,17 @@ import Path._
  *
  *  @author  Paul Phillips
  *  @since   2.8
- *  
+ *
  *  ''Note:  This is library is considered experimental and should not be used unless you know what you are doing.''
  */
 class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
   override val creationCodec = constructorCodec
-  def withCodec(codec: Codec): File = new File(jfile)(codec)
 
   override def addExtension(ext: String): File = super.addExtension(ext).toFile
   override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
   override def toDirectory: Directory = new Directory(jfile)
   override def toFile: File = this
   override def normalize: File = super.normalize.toFile
-  override def isValid = jfile.isFile() || !jfile.exists()
   override def length = super[Path].length
   override def walkFilter(cond: Path => Boolean): Iterator[Path] =
     if (cond(this)) Iterator.single(this) else Iterator.empty
@@ -99,26 +65,22 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
   /** Obtains a OutputStream. */
   def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
   def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
-  def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
 
   /** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
    *  This should behave like a less broken version of java.io.FileWriter,
    *  in that unlike the java version you can specify the encoding.
    */
-  def writer(): OutputStreamWriter = writer(false)
-  def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
   def writer(append: Boolean, codec: Codec): OutputStreamWriter =
     new OutputStreamWriter(outputStream(append), codec.charSet)
 
   /** Wraps a BufferedWriter around the result of writer().
    */
-  def bufferedWriter(): BufferedWriter = bufferedWriter(false)
+  def bufferedWriter(): BufferedWriter = bufferedWriter(append = false)
   def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
   def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
     new BufferedWriter(writer(append, codec))
 
   def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true)
-  def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true)
 
   /** Creates a new file and writes all the Strings to it. */
   def writeAll(strings: String*): Unit = {
@@ -127,12 +89,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
     finally out.close()
   }
 
-  def writeBytes(bytes: Array[Byte]): Unit = {
-    val out = bufferedOutput()
-    try out write bytes
-    finally out.close()
-  }
-
   def appendAll(strings: String*): Unit = {
     val out = bufferedWriter(append = true)
     try strings foreach (out write _)
@@ -150,39 +106,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
     try Some(slurp())
     catch { case _: IOException => None }
 
-  def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
-    val CHUNK = 1024 * 1024 * 16  // 16 MB
-    val dest = destPath.toFile
-    if (!isValid) fail("Source %s is not a valid file." format name)
-    if (this.normalize == dest.normalize) fail("Source and destination are the same.")
-    if (!dest.parent.exists) fail("Destination cannot be created.")
-    if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
-    if (dest.isDirectory) fail("Destination exists but is a directory.")
-
-    lazy val in_s = inputStream()
-    lazy val out_s = dest.outputStream()
-    lazy val in = in_s.getChannel()
-    lazy val out = out_s.getChannel()
-
-    try {
-      val size = in.size()
-      var pos, count = 0L
-      while (pos < size) {
-        count = (size - pos) min CHUNK
-        pos += out.transferFrom(in, pos, count)
-      }
-    }
-    finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly
-
-    if (this.length != dest.length)
-      fail("Failed to completely copy %s to %s".format(name, dest.name))
-
-    if (preserveFileDate)
-      dest.lastModified = this.lastModified
-
-    true
-  }
-
   /** Reflection since we're into the java 6+ API.
    */
   def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala
index 13a1322..fdfe023 100644
--- a/src/reflect/scala/reflect/io/FileOperationException.scala
+++ b/src/reflect/scala/reflect/io/FileOperationException.scala
@@ -7,7 +7,8 @@
 \*                                                                      */
 
 
-package scala.reflect
+package scala
+package reflect
 package io
 /** ''Note:  This library is considered experimental and should not be used unless you know what you are doing.'' */
 case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala
new file mode 100644
index 0000000..71f8be3
--- /dev/null
+++ b/src/reflect/scala/reflect/io/IOStats.scala
@@ -0,0 +1,32 @@
+package scala
+package reflect.io
+
+import scala.reflect.internal.util.Statistics
+
+// Due to limitations in the Statistics machinery, these are only
+// reported if this patch is applied.
+//
+// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
+// @@ -109,7 +109,7 @@ quant)
+//     *  Quantities with non-empty prefix are printed in the statistics info.
+//     */
+//    trait Quantity {
+// -    if (enabled && prefix.nonEmpty) {
+// +    if (prefix.nonEmpty) {
+//        val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+//        qs(key) = this
+//      }
+// @@ -243,7 +243,7 @@ quant)
+//     *
+//     *  to remove all Statistics code from build
+//     */
+// -  final val canEnable = _enabled
+// +  final val canEnable = true // _enabled
+//
+// We can commit this change as the first diff reverts a fix for an IDE memory leak.
+private[io] object IOStats {
+  val fileExistsCount      = Statistics.newCounter("# File.exists calls")
+  val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls")
+  val fileIsFileCount      = Statistics.newCounter("# File.isFile calls")
+}
diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
index 8c88d3a..18eca76 100644
--- a/src/reflect/scala/reflect/io/NoAbstractFile.scala
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -3,15 +3,15 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.io.InputStream
-import java.io.{ File => JFile }
 
 /** A distinguished object so you can avoid both null
  *  and Option.
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 object NoAbstractFile extends AbstractFile {
@@ -19,9 +19,10 @@ object NoAbstractFile extends AbstractFile {
   def container: AbstractFile = this
   def create(): Unit = ???
   def delete(): Unit = ???
-  def file: JFile = null
+  def file: java.io.File = null
   def input: InputStream = null
   def isDirectory: Boolean = false
+  override def isVirtual: Boolean = true
   def iterator: Iterator[AbstractFile] = Iterator.empty
   def lastModified: Long = 0L
   def lookupName(name: String, directory: Boolean): AbstractFile = null
@@ -30,4 +31,5 @@ object NoAbstractFile extends AbstractFile {
   def output: java.io.OutputStream = null
   def path: String = ""
   override def toByteArray = Array[Byte]()
+  override def toString = "<no file>"
 }
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index 36fdc04..15fce95 100644
--- a/src/reflect/scala/reflect/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -3,16 +3,17 @@
  * @author Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.io.{
   FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
-  BufferedInputStream, BufferedOutputStream, RandomAccessFile }
-import java.io.{ File => JFile }
+  BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
 import java.net.{ URI, URL }
 import scala.util.Random.alphanumeric
 import scala.language.implicitConversions
+import scala.reflect.internal.util.Statistics
 
 /** An abstraction for filesystem paths.  The differences between
  *  Path, File, and Directory are primarily to communicate intent.
@@ -27,7 +28,7 @@ import scala.language.implicitConversions
  *
  *  @author  Paul Phillips
  *  @since   2.8
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 object Path {
@@ -49,32 +50,28 @@ object Path {
   implicit def string2path(s: String): Path = apply(s)
   implicit def jfile2path(jfile: JFile): Path = apply(jfile)
 
-  // java 7 style, we don't use it yet
-  // object AccessMode extends Enumeration {
-  //   val EXECUTE, READ, WRITE = Value
-  // }
-  // def checkAccess(modes: AccessMode*): Boolean = {
-  //   modes foreach {
-  //     case EXECUTE  => throw new Exception("Unsupported") // can't check in java 5
-  //     case READ     => if (!jfile.canRead()) return false
-  //     case WRITE    => if (!jfile.canWrite()) return false
-  //   }
-  //   true
-  // }
-
   def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
   def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
   def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
-  def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
 
   def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
 
-  def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
   def apply(path: String): Path = apply(new JFile(path))
-  def apply(jfile: JFile): Path =
-    if (jfile.isFile) new File(jfile)
-    else if (jfile.isDirectory) new Directory(jfile)
+  def apply(jfile: JFile): Path = try {
+    def isFile = {
+      if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+      jfile.isFile
+    }
+
+    def isDirectory = {
+      if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+      jfile.isDirectory
+    }
+
+    if (isFile) new File(jfile)
+    else if (isDirectory) new Directory(jfile)
     else new Path(jfile)
+  } catch { case ex: SecurityException => new Path(jfile) }
 
   /** Avoiding any shell/path issues by only using alphanumerics. */
   private[io] def randomPrefix = alphanumeric take 6 mkString ""
@@ -84,19 +81,13 @@ import Path._
 
 /** The Path constructor is private so we can enforce some
  *  semantics regarding how a Path might relate to the world.
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 class Path private[io] (val jfile: JFile) {
   val separator = java.io.File.separatorChar
   val separatorStr = java.io.File.separator
 
-  // Validation: this verifies that the type of this object and the
-  // contents of the filesystem are in agreement.  All objects are
-  // valid except File objects whose path points to a directory and
-  // Directory objects whose path points to a file.
-  def isValid: Boolean = true
-
   // conversions
   def toFile: File = new File(jfile)
   def toDirectory: Directory = new Directory(jfile)
@@ -104,6 +95,7 @@ class Path private[io] (val jfile: JFile) {
   def toCanonical: Path = Path(jfile.getCanonicalPath())
   def toURI: URI = jfile.toURI()
   def toURL: URL = toURI.toURL()
+
   /** If this path is absolute, returns it: otherwise, returns an absolute
    *  path made up of root / this.
    */
@@ -136,7 +128,6 @@ class Path private[io] (val jfile: JFile) {
   def name: String = jfile.getName()
   def path: String = jfile.getPath()
   def normalize: Path = Path(jfile.getAbsolutePath())
-  def isRootPath: Boolean = roots exists (_ isSame this)
 
   def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
   def relativize(other: Path) = {
@@ -152,9 +143,8 @@ class Path private[io] (val jfile: JFile) {
     Path(createRelativePath(segments, other.segments))
   }
 
-  // derived from identity
-  def root: Option[Path] = roots find (this startsWith _)
   def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+
   /**
    * @return The path of the parent directory, or root if path is already root
    */
@@ -185,10 +175,6 @@ class Path private[io] (val jfile: JFile) {
     if (i < 0) ""
     else name.substring(i + 1)
   }
-  // def extension: String = (name lastIndexOf '.') match {
-  //   case -1   => ""
-  //   case idx  => name drop (idx + 1)
-  // }
   // compares against extensions in a CASE INSENSITIVE way.
   def hasExtension(ext: String, exts: String*) = {
     val lower = extension.toLowerCase
@@ -212,23 +198,28 @@ class Path private[io] (val jfile: JFile) {
   // Boolean tests
   def canRead = jfile.canRead()
   def canWrite = jfile.canWrite()
-  def exists = jfile.exists()
-  def notExists = try !jfile.exists() catch { case ex: SecurityException => false }
+  def exists = {
+    if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+    try jfile.exists() catch { case ex: SecurityException => false }
+  }
 
-  def isFile = jfile.isFile()
-  def isDirectory = jfile.isDirectory()
+  def isFile = {
+    if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+    try jfile.isFile() catch { case ex: SecurityException => false }
+  }
+  def isDirectory = {
+    if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+    try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." }
+  }
   def isAbsolute = jfile.isAbsolute()
-  def isHidden = jfile.isHidden()
   def isEmpty = path.length == 0
 
   // Information
   def lastModified = jfile.lastModified()
-  def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function?
   def length = jfile.length()
 
   // Boolean path comparisons
   def endsWith(other: Path) = segments endsWith other.segments
-  def startsWith(other: Path) = segments startsWith other.segments
   def isSame(other: Path) = toCanonical == other.toCanonical
   def isFresher(other: Path) = lastModified > other.lastModified
 
@@ -248,7 +239,6 @@ class Path private[io] (val jfile: JFile) {
 
   // deletions
   def delete() = jfile.delete()
-  def deleteIfExists() = if (jfile.exists()) delete() else false
 
   /** Deletes the path recursively. Returns false on failure.
    *  Use with caution!
@@ -270,16 +260,6 @@ class Path private[io] (val jfile: JFile) {
       length == 0
     }
 
-  def touch(modTime: Long = System.currentTimeMillis) = {
-    createFile()
-    if (isFile)
-      lastModified = modTime
-  }
-
-  // todo
-  // def copyTo(target: Path, options ...): Boolean
-  // def moveTo(target: Path, options ...): Boolean
-
   override def toString() = path
   override def equals(other: Any) = other match {
     case x: Path  => path == x.path
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index 82b0568..8f24d84 100644
--- a/src/reflect/scala/reflect/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -3,23 +3,12 @@
  * @author  Martin Odersky
  */
 
-
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.io.{ FileInputStream, FileOutputStream, IOException }
-import PartialFunction._
-/** ''Note:  This library is considered experimental and should not be used unless you know what you are doing.'' */
-object PlainFile {
-  /**
-   * If the specified File exists, returns an abstract file backed
-   * by it. Otherwise, returns null.
-   */
-  def fromPath(file: Path): PlainFile =
-    if (file.isDirectory) new PlainDirectory(file.toDirectory)
-    else if (file.isFile) new PlainFile(file)
-    else null
-}
+
 /** ''Note:  This library is considered experimental and should not be used unless you know what you are doing.'' */
 class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
   override def isDirectory = true
@@ -28,7 +17,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
 }
 
 /** This class implements an abstract file backed by a File.
- * 
+ *
  * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 class PlainFile(val givenPath: Path) extends AbstractFile {
@@ -54,7 +43,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
   override def sizeOption = Some(givenPath.length.toInt)
 
   override def toString = path
-  override def hashCode(): Int = fpath.hashCode
+  override def hashCode(): Int = fpath.hashCode()
   override def equals(that: Any): Boolean = that match {
     case x: PlainFile => fpath == x.fpath
     case _            => false
@@ -68,8 +57,14 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
 
   /** Returns all abstract subfiles of this abstract directory. */
   def iterator: Iterator[AbstractFile] = {
+    // Optimization: Assume that the file was not deleted and did not have permissions changed
+    // between the call to `list` and the iteration. This saves a call to `exists`.
+    def existsFast(path: Path) = path match {
+      case (_: Directory | _: io.File) => true
+      case _                           => path.exists
+    }
     if (!isDirectory) Iterator.empty
-    else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+    else givenPath.toDirectory.list filter existsFast map (new PlainFile(_))
   }
 
   /**
@@ -77,10 +72,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
    * specified name. If there is no such file, returns null. The
    * argument "directory" tells whether to look for a directory or
    * or a regular file.
-   *
-   * @param name      ...
-   * @param directory ...
-   * @return          ...
    */
   def lookupName(name: String, directory: Boolean): AbstractFile = {
     val child = givenPath / name
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index 61ec8a4..aa47947 100644
--- a/src/reflect/scala/reflect/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -3,7 +3,8 @@
  * @author Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.net.{ URI, URL }
@@ -17,14 +18,14 @@ import Path.fail
  *
  *  @author Paul Phillips
  *  @since  2.8
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 object Streamable {
   /** Traits which can be viewed as a sequence of bytes.  Source types
    *  which know their length should override def length: Long for more
    *  efficient method implementations.
-   *  
+   *
    *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
    */
   trait Bytes {
@@ -69,7 +70,7 @@ object Streamable {
   }
 
   /** For objects which can be viewed as Chars.
-   * 
+   *
    * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
    */
   trait Chars extends Bytes {
@@ -81,7 +82,6 @@ object Streamable {
      */
     def creationCodec: Codec = implicitly[Codec]
 
-    def chars(): BufferedSource = chars(creationCodec)
     def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
 
     def lines(): Iterator[String] = lines(creationCodec)
@@ -89,8 +89,7 @@ object Streamable {
 
     /** Obtains an InputStreamReader wrapped around a FileInputStream.
      */
-    def reader(): InputStreamReader = reader(creationCodec)
-    def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
+    def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet)
 
     /** Wraps a BufferedReader around the result of reader().
      */
@@ -108,7 +107,10 @@ object Streamable {
     /** Convenience function to import entire file into a String.
      */
     def slurp(): String = slurp(creationCodec)
-    def slurp(codec: Codec) = chars(codec).mkString
+    def slurp(codec: Codec) = {
+      val src = chars(codec)
+      try src.mkString finally src.close()  // Always Be Closing
+    }
   }
 
   /** Call a function on something Closeable, finally closing it. */
@@ -117,7 +119,9 @@ object Streamable {
     finally stream.close()
 
   def bytes(is: => InputStream): Array[Byte] =
-    (new Bytes { def inputStream() = is }).toByteArray
+    (new Bytes {
+      def inputStream() = is
+    }).toByteArray()
 
   def slurp(is: => InputStream)(implicit codec: Codec): String =
     new Chars { def inputStream() = is } slurp codec
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index 78713c2..aa6ceaa 100644
--- a/src/reflect/scala/reflect/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -2,7 +2,8 @@
  * Copyright 2005-2013 LAMP/EPFL
  */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import scala.collection.mutable
@@ -11,7 +12,7 @@ import scala.collection.mutable
  * An in-memory directory.
  *
  * @author Lex Spoon
- * 
+ *
  * ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
@@ -26,22 +27,23 @@ extends AbstractFile {
 
   def container = maybeContainer.get
   def isDirectory = true
-  var lastModified: Long = System.currentTimeMillis
+  override def isVirtual = true
+  val lastModified: Long = System.currentTimeMillis
 
   override def file = null
   override def input = sys.error("directories cannot be read")
   override def output = sys.error("directories cannot be written")
 
   /** Does this abstract file denote an existing file? */
-  def create() { unsupported }
+  def create() { unsupported() }
 
   /** Delete the underlying file or directory (recursively). */
-  def delete() { unsupported }
+  def delete() { unsupported() }
 
   /** Returns an abstract file with the given name. It does not
    *  check that it exists.
    */
-  def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
+  def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported()
 
   private val files = mutable.Map.empty[String, AbstractFile]
 
@@ -53,20 +55,20 @@ extends AbstractFile {
     (files get name filter (_.isDirectory == directory)).orNull
 
   override def fileNamed(name: String): AbstractFile =
-    Option(lookupName(name, false)) getOrElse {
+    Option(lookupName(name, directory = false)) getOrElse {
       val newFile = new VirtualFile(name, path+'/'+name)
       files(name) = newFile
       newFile
     }
 
   override def subdirectoryNamed(name: String): AbstractFile =
-    Option(lookupName(name, true)) getOrElse {
+    Option(lookupName(name, directory = true)) getOrElse {
       val dir = new VirtualDirectory(name, Some(this))
       files(name) = dir
       dir
     }
 
   def clear() {
-    files.clear();
+    files.clear()
   }
 }
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 95f4429..45f38db 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -3,18 +3,17 @@
  * @author  Martin Odersky
  */
 
-
-package scala.reflect
+package scala
+package reflect
 package io
 
-import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
-import java.io.{ File => JFile }
+import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream, File => JFile }
 
 /** This class implements an in-memory file.
  *
  *  @author  Philippe Altherr
  *  @version 1.0, 23/03/2004
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 class VirtualFile(val name: String, override val path: String) extends AbstractFile {
@@ -33,20 +32,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
     case _              => false
   }
 
-  //########################################################################
-  // Private data
   private var content = Array.emptyByteArray
 
-  //########################################################################
-  // Public Methods
   def absolute = this
 
   /** Returns null. */
-  final def file: JFile = null
+  def file: JFile = null
 
-  override def sizeOption: Option[Int] = Some(content.size)
+  override def sizeOption: Option[Int] = Some(content.length)
 
-  def input : InputStream = new ByteArrayInputStream(content);
+  def input : InputStream = new ByteArrayInputStream(content)
 
   override def output: OutputStream = {
     new ByteArrayOutputStream() {
@@ -62,10 +57,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
   /** Is this abstract file a directory? */
   def isDirectory: Boolean = false
 
+  /** @inheritdoc */
+  override def isVirtual: Boolean = true
+
+  // private var _lastModified: Long = 0
+  // _lastModified
+
   /** Returns the time that this abstract file was last modified. */
-  private var _lastModified: Long = 0
-  def lastModified: Long = _lastModified
-  def lastModified_=(x: Long) = _lastModified = x
+  // !!! Except it doesn't - it's private and never set - so I replaced it
+  // with constant 0 to save the field.
+  def lastModified: Long = 0
 
   /** Returns all abstract subfiles of this abstract directory. */
   def iterator: Iterator[AbstractFile] = {
@@ -74,20 +75,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
   }
 
   /** Does this abstract file denote an existing file? */
-  def create() { unsupported }
+  def create() { unsupported() }
 
   /** Delete the underlying file or directory (recursively). */
-  def delete() { unsupported }
+  def delete() { unsupported() }
 
   /**
    * Returns the abstract file in this abstract directory with the
    * specified name. If there is no such file, returns null. The
    * argument "directory" tells whether to look for a directory or
    * or a regular file.
-   *
-   * @param name      ...
-   * @param directory ...
-   * @return          ...
    */
   def lookupName(name: String, directory: Boolean): AbstractFile = {
     assert(isDirectory, "not a directory '" + this + "'")
@@ -97,7 +94,5 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
   /** Returns an abstract file with the given name. It does not
    *  check that it exists.
    */
-  def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-
-  //########################################################################
+  def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
 }
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 3b57721..8260189 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -3,14 +3,17 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect
+package scala
+package reflect
 package io
 
 import java.net.URL
-import java.io.{ IOException, InputStream, ByteArrayInputStream }
+import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStream }
 import java.io.{ File => JFile }
 import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
+import java.util.jar.Manifest
 import scala.collection.{ immutable, mutable }
+import scala.collection.convert.WrapAsScala.asScalaIterator
 import scala.annotation.tailrec
 
 /** An abstraction for zip files and streams.  Everything is written the way
@@ -20,13 +23,10 @@ import scala.annotation.tailrec
  *  @author  Philippe Altherr (original version)
  *  @author  Paul Phillips (this one)
  *  @version 2.0,
- *  
+ *
  *  ''Note:  This library is considered experimental and should not be used unless you know what you are doing.''
  */
 object ZipArchive {
-  def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
-  def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
-
   /**
    * @param   file  a File
    * @return  A ZipArchive if `file` is a readable zip file, otherwise null.
@@ -41,10 +41,11 @@ object ZipArchive {
    * @return  A ZipArchive backed by the given url.
    */
   def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
-  def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
 
-  private def dirName(path: String)  = splitPath(path, true)
-  private def baseName(path: String) = splitPath(path, false)
+  def fromManifestURL(url: URL): AbstractFile = new ManifestResources(url)
+
+  private def dirName(path: String)  = splitPath(path, front = true)
+  private def baseName(path: String) = splitPath(path, front = false)
   private def splitPath(path0: String, front: Boolean): String = {
     val isDir = path0.charAt(path0.length - 1) == '/'
     val path  = if (isDir) path0.substring(0, path0.length - 1) else path0
@@ -65,13 +66,13 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
 
   override def underlyingSource = Some(this)
   def isDirectory = true
-  def lookupName(name: String, directory: Boolean) = unsupported
-  def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-  def create()  = unsupported
-  def delete()  = unsupported
-  def output    = unsupported
-  def container = unsupported
-  def absolute  = unsupported
+  def lookupName(name: String, directory: Boolean) = unsupported()
+  def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
+  def create()  = unsupported()
+  def delete()  = unsupported()
+  def output    = unsupported()
+  def container = unsupported()
+  def absolute  = unsupported()
 
   private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
     its flatMap { f =>
@@ -79,7 +80,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
       else Iterator(f)
     }
   }
-  def deepIterator = walkIterator(iterator)
   /** ''Note:  This library is considered experimental and should not be used unless you know what you are doing.'' */
   sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
     // have to keep this name for compat with sbt's compiler-interface
@@ -126,7 +126,11 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
 /** ''Note:  This library is considered experimental and should not be used unless you know what you are doing.'' */
 final class FileZipArchive(file: JFile) extends ZipArchive(file) {
   def iterator: Iterator[Entry] = {
-    val zipFile = new ZipFile(file)
+    val zipFile = try {
+      new ZipFile(file)
+    } catch {
+      case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
+    }
     val root    = new DirEntry("/")
     val dirs    = mutable.HashMap[String, DirEntry]("/" -> root)
     val enum    = zipFile.entries()
@@ -232,3 +236,63 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
     case _                => false
   }
 }
+
+final class ManifestResources(val url: URL) extends ZipArchive(null) {
+  def iterator = {
+    val root     = new DirEntry("/")
+    val dirs     = mutable.HashMap[String, DirEntry]("/" -> root)
+    val manifest = new Manifest(input)
+    val iter     = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
+
+    while (iter.hasNext) {
+      val zipEntry = iter.next()
+      val dir = getDir(dirs, zipEntry)
+      if (zipEntry.isDirectory) dir
+      else {
+        class FileEntry() extends Entry(zipEntry.getName) {
+          override def lastModified = zipEntry.getTime()
+          override def input        = resourceInputStream(path)
+          override def sizeOption   = None
+        }
+        val f = new FileEntry()
+        dir.entries(f.name) = f
+      }
+    }
+
+    try root.iterator
+    finally dirs.clear()
+  }
+
+  def name  = path
+  def path: String = {
+    val s = url.getPath
+    val n = s.lastIndexOf('!')
+    s.substring(0, n)
+  }
+  def input = url.openStream()
+  def lastModified =
+    try url.openConnection().getLastModified()
+    catch { case _: IOException => 0 }
+
+  override def canEqual(other: Any) = other.isInstanceOf[ManifestResources]
+  override def hashCode() = url.hashCode
+  override def equals(that: Any) = that match {
+    case x: ManifestResources => url == x.url
+    case _                => false
+  }
+
+  private def resourceInputStream(path: String): InputStream = {
+    new FilterInputStream(null) {
+      override def read(): Int = {
+        if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+        if(in == null) throw new RuntimeException(path + " not found")
+        super.read();
+      }
+
+      override def close(): Unit = {
+        super.close();
+        in = null;
+      }
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
index 92d76f4..64819a8 100644
--- a/src/reflect/scala/reflect/macros/Aliases.scala
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -1,14 +1,15 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that defines shorthands for the
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that defines shorthands for the
  *  most frequently used types and functions of the underlying compiler universe.
  */
 trait Aliases {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** The type of symbols representing declarations. */
   type Symbol = universe.Symbol
@@ -39,10 +40,16 @@ trait Aliases {
   /** The type of tree modifiers. */
   type Modifiers = universe.Modifiers
 
-  /** The type of compilation runs. */
+  /** The type of compilation runs.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   type Run = universe.Run
 
-  /** The type of compilation units. */
+  /** The type of compilation units.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   type CompilationUnit = universe.CompilationUnit
 
   /** Expr wraps an abstract syntax tree and tags it with its type. */
@@ -109,4 +116,9 @@ trait Aliases {
    * Shortcut for `implicitly[TypeTag[T]].tpe`
    */
   def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+
+  /**
+   * Type symbol of `x` as derived from a type tag.
+   */
+  def symbolOf[T: WeakTypeTag]: universe.TypeSymbol = universe.symbolOf[T]
 }
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 007df3b..5ccdc15 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
@@ -40,6 +41,10 @@ abstract class Attachments { self =>
   def get[T: ClassTag]: Option[T] =
     (all filter matchesTag[T]).headOption.asInstanceOf[Option[T]]
 
+  /** Check underlying payload contains an instance of type `T`. */
+  def contains[T: ClassTag]: Boolean =
+    !isEmpty && (all exists matchesTag[T])
+
   /** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
    *  Replaces an existing payload of the same type, if exists.
    */
@@ -52,13 +57,14 @@ abstract class Attachments { self =>
     if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
     else new NonemptyAttachments[Pos](this.pos, newAll)
   }
+
+  def isEmpty: Boolean = true
 }
 
 // SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
 // IDE via $outer pointers.
-// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
-// However it's private, therefore it transcends the compatibility policy for 2.10.x.
 private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
   type Pos = P
   def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+  override def isEmpty: Boolean = false
 }
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
deleted file mode 100644
index aa1c1db..0000000
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-package scala.reflect
-package macros
-
-// todo. introduce context hierarchy
-// the most lightweight context should just expose the stuff from the SIP
-// the full context should include all traits from scala.reflect.macros (and probably reside in scala-compiler.jar)
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- *  The Scala macros context.
- *
- *  See [[scala.reflect.macros.package the overview page]] for a description of how macros work. This documentation
- *  entry provides information on the API available to macro writers.
- *
- *  A macro context wraps a compiler universe exposed in `universe` and having type [[scala.reflect.macros.Universe]].
- *  This type is a refinement over the generic reflection API provided in [[scala.reflect.api.Universe]]. The
- *  extended Universe provides mutability for reflection artifacts (e.g. macros can change types of compiler trees,
- *  add annotation to symbols representing definitions, etc) and exposes some internal compiler functionality
- *  such as `Symbol.deSkolemize` or `Tree.attachments`.
- *
- *  Another fundamental part of a macro context is `macroApplication`, which provides access to the tree undergoing
- *  macro expansion. Parts of this tree can be found in arguments of the corresponding macro implementations and
- *  in `prefix`, but `macroApplication` gives the full picture.
- *
- *  Other than that, macro contexts provide facilities for typechecking, exploring the compiler's symbol table and
- *  enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more.
- *  Refer to the documentation of top-level traits in this package to learn the details.
- */
-trait Context extends Aliases
-                 with Enclosures
-                 with Names
-                 with Reifiers
-                 with FrontEnds
-                 with Infrastructure
-                 with Typers
-                 with Parsers
-                 with Evals
-                 with ExprUtils {
-
-  /** The compile-time universe. */
-  val universe: Universe
-
-  /** The mirror of the compile-time universe. */
-  val mirror: universe.Mirror
-
-  /** The type of the prefix tree from which the macro is selected.
-   *  See the documentation entry for `prefix` for an example.
-   */
-  type PrefixType
-
-  /** The prefix tree from which the macro is selected.
-   *
-   *  For a example, for a macro `filter` defined as an instance method on a collection `Coll`,
-   *  `prefix` represents an equivalent of `this` for normal instance methods:
-   *
-   *  {{{
-   *  scala> class Coll[T] {
-   *       | def filter(p: T => Boolean): Coll[T] = macro M.filter[T]
-   *       | }; object M {
-   *       | def filter[T](c: Context { type PrefixType = Coll[T] })
-   *       |              (p: c.Expr[T => Boolean]): c.Expr[Coll[T]] =
-   *       |   {
-   *       |     println(c.prefix.tree)
-   *       |     c.prefix
-   *       |   }
-   *       | }
-   *  defined class Coll
-   *  defined module Macros
-   *
-   *  scala> new Coll[Int]().filter(_ % 2 == 0)
-   *  new Coll[Int]()
-   *  res0: Coll[Int] = ...
-   *
-   *  scala> val x = new Coll[String]()
-   *  x: Coll[String] = ...
-   *
-   *  scala> x.filter(_ != "")
-   *  \$line11.\$read.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.x
-   *  res1 @ 35563b4b: x.type = ...
-   *  }}}
-   *
-   *  Note how the value of `prefix` changes depending on the qualifier of the macro call
-   *  (i.e. the expression that is at the left-hand side of the dot).
-   *
-   *  Another noteworthy thing about the snippet above is the `Context { type PrefixType = Coll[T] }`
-   *  type that is used to stress that the macro implementation works with prefixes of type `Coll[T]`.
-   */
-  val prefix: Expr[PrefixType]
-}
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index a4ad71c..69ede42 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -1,66 +1,114 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
+import scala.language.existentials // SI-6541
+
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that exposes
- *  enclosing trees (method, class, compilation unit and currently compiled application),
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that exposes
+ *  enclosing trees (method, class, compilation unit and currently compiled macro application),
  *  the enclosing position of the macro expansion, as well as macros and implicits
  *  that are currently in-flight.
+ *
+ *  Starting from Scala 2.11.0, the APIs to get the trees enclosing by the current macro application are deprecated,
+ *  and the reasons for that are two-fold. Firstly, we would like to move towards the philosophy of locally-expanded macros,
+ *  as it has proven to be important for understanding of code. Secondly, within the current architecture of scalac,
+ *  we are unable to have c.enclosingTree-style APIs working robustly. Required changes to the typechecker would greatly
+ *  exceed the effort that we would like to expend on this feature given the existence of more pressing concerns at the moment.
+ *  This is somewhat aligned with the overall evolution of macros during the 2.11 development cycle, where we played with
+ *  `c.introduceTopLevel` and `c.introduceMember`, but at the end of the day decided to reject them.
+ *
+ *  If you're relying on the now deprecated APIs, consider using the new [[c.internal.enclosingOwner]] method that can be used to obtain
+ *  the names of enclosing definitions. Alternatively try reformulating your macros in terms of completely local expansion
+ *  and/or joining a discussion of a somewhat related potential language feature at [[https://groups.google.com/forum/#!topic/scala-debate/f4CLmYShX6Q]].
+ *  We also welcome questions and suggestions on our mailing lists, where we would be happy to further discuss this matter.
  */
 trait Enclosures {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** The tree that undergoes macro expansion.
    *  Can be useful to get an offset or a range position of the entire tree being processed.
    */
-  val macroApplication: Tree
+  def macroApplication: Tree
 
   /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
    *  Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
    *
    *  Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees.
-   *  In that dire case navigate the ``enclosingMacros'' stack, and it will most likely contain at least one macro with a position-ful macro application.
-   *  See ``enclosingPosition'' for a default implementation of this logic.
+   *  In that dire case navigate the `enclosingMacros` stack, and it will most likely contain at least one macro with a position-ful macro application.
+   *  See `enclosingPosition` for a default implementation of this logic.
    *
    *  Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created
    *  and always stays the same regardless of whatever happens during macro expansion.
    */
-  val enclosingMacros: List[Context]
-
-  /** Information about one of the currently considered implicit candidates.
-   *  Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
-   *  hence implicit searches can recursively trigger other implicit searches.
-   *
-   *  Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
-   *  If we're in an implicit macro being expanded, it's included in this list.
-   *
-   *  Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
-   *  and always stays the same regardless of whatever happens during macro expansion.
-   */
-  val enclosingImplicits: List[(Type, Tree)]
+  def enclosingMacros: List[blackbox.Context]
 
   /** Tries to guess a position for the enclosing application.
-   *  But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really.
+   *  But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
    *  If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
-   *  Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack.
+   *  Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack.
    */
-  val enclosingPosition: Position
+  def enclosingPosition: Position
 
   /** Tree that corresponds to the enclosing method, or EmptyTree if not applicable.
+   *  @see [[scala.reflect.macros.Enclosures]]
    */
-  val enclosingMethod: Tree
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingMethod: Tree
 
   /** Tree that corresponds to the enclosing class, or EmptyTree if not applicable.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingClass: Tree
+
+  /** Tree that corresponds to the enclosing DefDef tree.
+   *  Throws `EnclosureException` if there's no such enclosing tree.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingDef: universe.DefDef
+
+  /** Tree that corresponds to the enclosing Template tree.
+   *  Throws `EnclosureException` if there's no such enclosing tree.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingTemplate: universe.Template
+
+  /** Tree that corresponds to the enclosing ImplDef tree (i.e. either ClassDef or ModuleDef).
+   *  Throws `EnclosureException` if there's no such enclosing tree.
+   *  @see [[scala.reflect.macros.Enclosures]]
    */
-  val enclosingClass: Tree
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingImpl: universe.ImplDef
+
+  /** Tree that corresponds to the enclosing PackageDef tree.
+   *  Throws `EnclosureException` if there's no such enclosing tree.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingPackage: universe.PackageDef
 
   /** Compilation unit that contains this macro application.
+   *  @see [[scala.reflect.macros.Enclosures]]
    */
-  val enclosingUnit: CompilationUnit
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingUnit: CompilationUnit
 
   /** Compilation run that contains this macro application.
+   *  @see [[scala.reflect.macros.Enclosures]]
+   */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  def enclosingRun: Run
+
+  /** Indicates than one of the enclosure methods failed to find a tree
+   *  of required type among enclosing trees.
+   *  @see [[scala.reflect.macros.Enclosures]]
    */
-  val enclosingRun: Run
-}
\ No newline at end of file
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
+  case class EnclosureException(expected: Class[_], enclosingTrees: List[Tree])
+  extends Exception(s"Couldn't find a tree of type $expected among enclosing trees $enclosingTrees")
+}
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
index 37680c2..68e07dd 100644
--- a/src/reflect/scala/reflect/macros/Evals.scala
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -1,14 +1,15 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that provides
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that provides
  *  a facility to evaluate trees.
  */
 trait Evals {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** Takes a typed wrapper for a tree of type `T` and evaluates it to a value of type `T`.
    *
@@ -16,13 +17,13 @@ trait Evals {
    *  permitted by the shape of the arguments.
    *
    *  Known issues: because of [[https://issues.scala-lang.org/browse/SI-5748 https://issues.scala-lang.org/browse/SI-5748]]
-   *  trees being evaluated first need to undergo `resetAllAttrs`. Resetting symbols and types
+   *  trees being evaluated first need to undergo `untypecheck`. Resetting symbols and types
    *  mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first.
    *
    *  {{{
    *  scala> def impl(c: Context)(x: c.Expr[String]) = {
-   *       | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate))
-   *       | println(s"compile-time value is: \${c.eval(x1)}")
+   *       | val x1 = c.Expr[String](c.untypecheck(x.tree.duplicate))
+   *       | println(s"compile-time value is: ${c.eval(x1)}")
    *       | x
    *       | }
    *  impl: (c: Context)(x: c.Expr[String])c.Expr[String]
@@ -54,4 +55,4 @@ trait Evals {
    *  refers to a runtime value `x`, which is unknown at compile time.
    */
   def eval[T](expr: Expr[T]): T
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
index 458cde9..c438653 100644
--- a/src/reflect/scala/reflect/macros/ExprUtils.scala
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -1,51 +1,65 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- * A slice of [[scala.reflect.macros.Context the Scala macros context]] that defines shorthands for the
+ * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that defines shorthands for the
  *  most common `Expr`-creating functions.
  */
 trait ExprUtils {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literalNull: Expr[Null]
 
   /** Shorthand for `Literal(Constant(()))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literalUnit: Expr[Unit]
 
   /** Shorthand for `Literal(Constant(true))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literalTrue: Expr[Boolean]
 
   /** Shorthand for `Literal(Constant(false))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literalFalse: Expr[Boolean]
 
   /** Shorthand for `Literal(Constant(x: Boolean))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Boolean): Expr[Boolean]
 
   /** Shorthand for `Literal(Constant(x: Byte))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Byte): Expr[Byte]
 
   /** Shorthand for `Literal(Constant(x: Short))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Short): Expr[Short]
 
   /** Shorthand for `Literal(Constant(x: Int))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Int): Expr[Int]
 
   /** Shorthand for `Literal(Constant(x: Long))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Long): Expr[Long]
 
   /** Shorthand for `Literal(Constant(x: Float))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Float): Expr[Float]
 
   /** Shorthand for `Literal(Constant(x: Double))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Double): Expr[Double]
 
   /** Shorthand for `Literal(Constant(x: String))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: String): Expr[String]
 
   /** Shorthand for `Literal(Constant(x: Char))` in the underlying `universe`. */
+  @deprecated("Use quasiquotes instead", "2.11.0")
   def literal(x: Char): Expr[Char]
 }
diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala
index 67b2408..a770f32 100644
--- a/src/reflect/scala/reflect/macros/FrontEnds.scala
+++ b/src/reflect/scala/reflect/macros/FrontEnds.scala
@@ -1,15 +1,16 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
  *  provides facilities to communicate with the compiler's front end
  *  (emit warnings, errors and other sorts of messages).
  */
 trait FrontEnds {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** For sending a message which should not be labeled as a warning/error,
    *  but also shouldn't require -verbose to be visible.
@@ -44,4 +45,4 @@ trait FrontEnds {
    *  Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
    */
   def abort(pos: Position, msg: String): Nothing
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index 99706e8..0f2d9ce 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -1,14 +1,15 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
  *  provides facilities to communicate with the compiler's infrastructure.
  */
 trait Infrastructure {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** Exposes macro-specific settings as a list of strings.
    *  These settings are passed to the compiler via the "-Xmacro-settings:setting1,setting2...,settingN" command-line option.
@@ -22,4 +23,4 @@ trait Infrastructure {
 
   /** Exposes current classpath. */
   def classPath: List[java.net.URL]
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Internals.scala b/src/reflect/scala/reflect/macros/Internals.scala
new file mode 100644
index 0000000..7516434
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Internals.scala
@@ -0,0 +1,79 @@
+package scala
+package reflect
+package macros
+
+/**
+ *  <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *  @see [[scala.reflect.api.Internals]]
+ */
+trait Internals {
+  self: blackbox.Context =>
+
+  /** @see [[scala.reflect.api.Internals]] */
+  val internal: ContextInternalApi
+
+  /** @see [[scala.reflect.api.Internals]] */
+  trait ContextInternalApi extends universe.MacroInternalApi {
+    /** Symbol associated with the innermost enclosing lexical context.
+     *  Walking the owner chain of this symbol will reveal information about more and more enclosing contexts.
+     */
+    def enclosingOwner: Symbol
+
+    /** Functions that are available during [[transform]].
+     *  @see [[transform]]
+     */
+    trait TransformApi {
+      /** Calls the current transformer on the given tree.
+       *  Current transformer = argument to the `transform` call.
+       */
+      def recur(tree: Tree): Tree
+
+      /** Calls the default transformer on the given tree.
+       *  Default transformer = recur into tree's children and assemble the results.
+       */
+      def default(tree: Tree): Tree
+    }
+
+    /** Transforms a given tree using the provided function.
+     *  @see [[TransformApi]]
+     */
+    // TODO: explore a more concise notation that Denys and I discussed today
+    // when transformer is PartialFunction[Tree, Tree]] and TransformApi is passed magically
+    // also cf. https://github.com/dsl-paradise/dsl-paradise
+    def transform(tree: Tree)(transformer: (Tree, TransformApi) => Tree): Tree
+
+    /** Functions that are available during [[typingTransform]].
+     *  @see [[typingTransform]]
+     */
+    trait TypingTransformApi extends TransformApi {
+      /** Temporarily pushes the given symbol onto the owner stack, creating a new local typer,
+       *  invoke the given operation and then rollback the changes to the owner stack.
+       */
+      def atOwner[T](owner: Symbol)(op: => T): T
+
+      /** Temporarily pushes the given tree onto the recursion stack, and then calls `atOwner(symbol)(trans)`.
+       */
+      def atOwner[T](tree: Tree, owner: Symbol)(op: => T): T
+
+      /** Returns the symbol currently on the top of the owner stack.
+       *  If we're not inside any `atOwner` call, then macro application's context owner will be used.
+       */
+      def currentOwner: Symbol
+
+      /** Typechecks the given tree using the local typer currently on the top of the owner stack.
+       *  If we're not inside any `atOwner` call, then macro application's callsite typer will be used.
+       */
+      def typecheck(tree: Tree): Tree
+    }
+
+    /** Transforms a given tree using the provided function.
+     *  @see [[TypingTransformApi]]
+     */
+    def typingTransform(tree: Tree)(transformer: (Tree, TypingTransformApi) => Tree): Tree
+
+    /** Transforms a given tree at a given owner using the provided function.
+     *  @see [[TypingTransformApi]]
+     */
+    def typingTransform(tree: Tree, owner: Symbol)(transformer: (Tree, TypingTransformApi) => Tree): Tree
+  }
+}
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 8bbaa5f..4f3448e 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -1,23 +1,56 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that
- *  provides functions that generate unique names.
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
+ *  provides functions that generate fresh names.
+ *
+ *  In the current implementation, fresh names are more or less unique in the sense that
+ *  within the same compilation run they are guaranteed not to clash with:
+ *    1) Results of past and future invocations of functions of `freshName` family
+ *    2) User-defined or macro-generated names that don't contain dollar symbols
+ *    3) Macro-generated names that are created by concatenating names from the first, second and third categories
+ *
+ *  Uniqueness of fresh names across compilation runs is not guaranteed, but that's something
+ *  that we would like to improve upon in future releases. See [[https://issues.scala-lang.org/browse/SI-6879]] for more information.
+ *
+ *  @define freshNameNoParams
+ *  Creates a string that represents a more or less unique name.
+ *  Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
+ *
+ *  @define freshNameStringParam
+ *  Creates a string that represents a more or less unique name having a given prefix.
+ *  Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
+ *
+ *  @define freshNameNameParam
+ *  Creates a more or less unique name having a given name as a prefix and
+ *  having the same flavor (term name or type name) as the given name.
+ *  Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
  */
 trait Names {
-  self: Context =>
+  self: blackbox.Context =>
 
-  /** Creates a unique string. */
+  /** $freshNameNoParams */
+  @deprecated("Use freshName instead", "2.11.0")
   def fresh(): String
 
-  /** Creates a unique string having a given prefix. */
+  /** $freshNameStringParam */
+  @deprecated("Use freshName instead", "2.11.0")
   def fresh(name: String): String
 
-  /** Creates a unique name having a given name as a prefix and
-   *  having the same flavor (term name or type name) as the given name.
-   */
+  /** $freshNameNameParam */
+  @deprecated("Use freshName instead", "2.11.0")
   def fresh[NameType <: Name](name: NameType): NameType
+
+  /** $freshNameNoParams */
+  def freshName(): String
+
+  /** $freshNameStringParam */
+  def freshName(name: String): String
+
+  /** $freshNameNameParam */
+  def freshName[NameType <: Name](name: NameType): NameType
 }
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 93a7637..720b754 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -1,14 +1,15 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
  *  exposes functions to parse strings with Scala code into trees.
  */
 trait Parsers {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** Parses a string with a Scala expression into an abstract syntax tree.
    *  Only works for expressions, i.e. parsing a package declaration will fail.
@@ -19,4 +20,4 @@ trait Parsers {
 
 /** Indicates an error during [[scala.reflect.macros.Parsers#parse]].
  */
-case class ParseException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ParseException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 3db7b9a..e35a5c8 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -1,26 +1,27 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
  *  exposes functions to save reflection artifacts for runtime.
  */
 trait Reifiers {
-  self: Context =>
+  self: blackbox.Context =>
 
   /** Given a tree, generate a tree that when compiled and executed produces the original tree.
-   *  For more information and examples see the documentation for ``Universe.reify''.
+   *  For more information and examples see the documentation for `Universe.reify`.
    *
-   *  The produced tree will be bound to the specified ``universe'' and ``mirror''.
-   *  Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''.
-   *  Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror).
+   *  The produced tree will be bound to the specified `universe` and `mirror`.
+   *  Possible values for `universe` include `universe.internal.gen.mkRuntimeUniverseRef`.
+   *  Possible values for `mirror` include `EmptyTree` (in that case the reifier will automatically pick an appropriate mirror).
    *
-   *  This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees.
-   *  They do very similar things (``Universe.reify'' calls ``Context.reifyTree'' to implement itself), but they operate on different metalevels (see below).
+   *  This function is deeply connected to `Universe.reify`, a macro that reifies arbitrary expressions into runtime trees.
+   *  They do very similar things (`Universe.reify` calls `Context.reifyTree` to implement itself), but they operate on different metalevels (see below).
    *
-   *  Let's study the differences between ``Context.reifyTree'' and ``Universe.reify'' on an example of using them inside a ``fooMacro'' macro:
+   *  Let's study the differences between `Context.reifyTree` and `Universe.reify` on an example of using them inside a `fooMacro` macro:
    *
    *    * Since reify itself is a macro, it will be executed when fooMacro is being compiled (metalevel -1)
    *      and will produce a tree that when evaluated during macro expansion of fooMacro (metalevel 0) will recreate the input tree.
@@ -39,7 +40,7 @@ trait Reifiers {
    *    * The result of compiling and running the result of reify will be bound to the Universe that called reify.
    *      This is possible because it's a macro, so it can generate whatever code it wishes.
    *
-   *    * The result of compiling and running the result of reifyTree will be the ``prefix'' that needs to be passed explicitly.
+   *    * The result of compiling and running the result of reifyTree will be the `prefix` that needs to be passed explicitly.
    *      This happens because the Universe of the evaluated result is from a different metalevel than the Context the called reify.
    *
    *  Typical usage of this function is to retain some of the trees received/created by a macro
@@ -48,13 +49,13 @@ trait Reifiers {
   def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree
 
   /** Given a type, generate a tree that when compiled and executed produces the original type.
-   *  The produced tree will be bound to the specified ``universe'' and ``mirror''.
-   *  For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''.
+   *  The produced tree will be bound to the specified `universe` and `mirror`.
+   *  For more information and examples see the documentation for `Context.reifyTree` and `Universe.reify`.
    */
   def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree
 
   /** Given a type, generate a tree that when compiled and executed produces the runtime class of the original type.
-   *  If ``concrete'' is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
+   *  If `concrete` is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
    */
   def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree
 
@@ -86,10 +87,10 @@ trait Reifiers {
  *  Such errors represent one of the standard ways for reification to go wrong, e.g.
  *  an attempt to create a `TypeTag` from a weak type.
  */
-case class ReificationException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ReificationException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
 
 /** Indicates an unexpected expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]].
  *  Such errors wrap random crashes in reification logic and are distinguished from expected [[scala.reflect.macros.ReificationException]]s
  *  so that the latter can be reported as compilation errors, while the former manifest themselves as compiler crashes.
  */
-case class UnexpectedReificationException(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Exception(msg, cause)
+case class UnexpectedReificationException(pos: scala.reflect.api.Position, msg: String, cause: Throwable = null) extends Exception(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
deleted file mode 100644
index 204dc40..0000000
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package scala.reflect
-package macros
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- *  A helper available in [[scala.reflect.macros.Universe]] that defines shorthands for the
- *  most common tree-creating functions.
- */
-abstract class TreeBuilder {
-  val global: Universe
-
-  import global._
-  import definitions._
-
-  /** Builds a reference to value whose type is given stable prefix.
-   *  The type must be suitable for this.  For example, it
-   *  must not be a TypeRef pointing to an abstract type variable.
-   */
-  def mkAttributedQualifier(tpe: Type): Tree
-
-  /** Builds a reference to value whose type is given stable prefix.
-   *  If the type is unsuitable, e.g. it is a TypeRef for an
-   *  abstract type variable, then an Ident will be made using
-   *  termSym as the Ident's symbol.  In that case, termSym must
-   *  not be NoSymbol.
-   */
-  def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
-
-  /** Builds a typed reference to given symbol with given stable prefix. */
-  def mkAttributedRef(pre: Type, sym: Symbol): Tree
-
-  /** Builds a typed reference to given symbol. */
-  def mkAttributedRef(sym: Symbol): Tree
-
-  /** Builds a typed This reference to given symbol. */
-  def mkAttributedThis(sym: Symbol): Tree
-
-  /** Builds a typed Ident with an underlying symbol. */
-  def mkAttributedIdent(sym: Symbol): Tree
-
-  /** Builds a typed Select with an underlying symbol. */
-  def mkAttributedSelect(qual: Tree, sym: Symbol): Tree
-
-  /** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
-   *  There are a number of variations.
-   *
-   *  @param    receiver    symbol of the method receiver
-   *  @param    methodName  name of the method to call
-   *  @param    targs       type arguments (if Nil, no TypeApply node will be generated)
-   *  @param    args        value arguments
-   *  @return               the newly created trees.
-   */
-  def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree
-
-  def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree
-
-  def mkMethodCall(method: Symbol, args: List[Tree]): Tree
-
-  def mkMethodCall(target: Tree, args: List[Tree]): Tree
-
-  def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree
-
-  def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
-
-  def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
-
-  def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
-
-  /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */
-  def mkRuntimeUniverseRef: Tree
-}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index d36636a..d0dccb4 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -1,16 +1,17 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
+import scala.reflect.internal.{Mode => InternalMode}
+
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
- *  A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ *  A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
  *  partially exposes the type checker to macro writers.
  */
 trait Typers {
-  self: Context =>
-
-  import universe._
+  self: blackbox.Context =>
 
   /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
    *  Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
@@ -22,21 +23,45 @@ trait Typers {
    *  Unlike `enclosingMacros`, this is a def, which means that it gets recalculated on every invocation,
    *  so it might change depending on what is going on during macro expansion.
    */
-  def openMacros: List[Context]
+  def openMacros: List[blackbox.Context]
 
-  /** Information about one of the currently considered implicit candidates.
-   *  Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
-   *  hence implicit searches can recursively trigger other implicit searches.
-   *
-   *  Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
-   *  If we're in an implicit macro being expanded, it's included in this list.
-   *
-   *  Unlike `enclosingImplicits`, this is a def, which means that it gets recalculated on every invocation,
-   *  so it might change depending on what is going on during macro expansion.
+  /** Represents mode of operations of the typechecker underlying `c.typecheck` calls.
+   *  Is necessary since the shape of the typechecked tree alone is not enough to guess how it should be typechecked.
+   *  Can be EXPRmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern).
+   */
+  // I'd very much like to make use of https://github.com/dsl-paradise/dsl-paradise here!
+  type TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a term.
+   *  This is the default typechecking mode in Scala 2.11 and the only one supported in Scala 2.10.
+   */
+  val TERMmode: TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a type.
+   */
+  val TYPEmode: TypecheckMode
+
+  /** Indicates that an argument to `c.typecheck` should be typechecked as a pattern.
+   */
+  val PATTERNmode: TypecheckMode
+
+  /** @see `scala.reflect.macros.TypecheckException`
+   */
+  type TypecheckException = scala.reflect.macros.TypecheckException
+
+  /** @see `scala.reflect.macros.TypecheckException`
    */
-  def openImplicits: List[(Type, Tree)]
+  val TypecheckException = scala.reflect.macros.TypecheckException
 
-  /** Typechecks the provided tree against the expected type `pt` in the macro callsite context.
+  /** @see `Typers.typecheck`
+   */
+  @deprecated("Use `c.typecheck` instead", "2.11.0")
+  def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree =
+    typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled)
+
+  /** Typechecks the provided tree against the expected type `pt` in the macro callsite context
+   *  under typechecking mode specified in `mode` with [[EXPRmode]] being default.
+   *  This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings.
    *
    *  If `silent` is false, `TypecheckException` will be thrown in case of a typecheck error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
@@ -49,7 +74,7 @@ trait Typers {
    *
    *  @throws [[scala.reflect.macros.TypecheckException]]
    */
-  def typeCheck(tree: Tree, pt: Type = WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
+  def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
 
   /** Infers an implicit value of the expected type `pt` in the macro callsite context.
    *  Optional `pos` parameter provides a position that will be associated with the implicit search.
@@ -57,7 +82,7 @@ trait Typers {
    *  If `silent` is false, `TypecheckException` will be thrown in case of an inference error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
    *  Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
-   *  Unlike in `typeCheck`, `silent` is true by default.
+   *  Unlike in `typecheck`, `silent` is true by default.
    *
    *  @throws [[scala.reflect.macros.TypecheckException]]
    */
@@ -69,27 +94,35 @@ trait Typers {
    *  If `silent` is false, `TypecheckException` will be thrown in case of an inference error.
    *  If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
    *  Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
-   *  Unlike in `typeCheck`, `silent` is true by default.
+   *  Unlike in `typecheck`, `silent` is true by default.
    *
    *  @throws [[scala.reflect.macros.TypecheckException]]
    */
   def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
 
-  /** Recursively resets symbols and types in a given tree.
-   *
-   *  Note that this does not revert the tree to its pre-typer shape.
-   *  For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+  /** Recursively resets locally defined symbols and types in a given tree.
+   *  WARNING: Don't use this API, go for [[untypecheck]] instead.
    */
-  def resetAllAttrs(tree: Tree): Tree
+  @deprecated("Use `c.untypecheck` instead", "2.11.0")
+  def resetLocalAttrs(tree: Tree): Tree
 
-  /** Recursively resets locally defined symbols and types in a given tree.
+  /** In the current implementation of Scala's reflection API, untyped trees (also known as parser trees or unattributed trees)
+   *  are observationally different from typed trees (also known as typer trees, typechecked trees or attributed trees),
    *
-   *  Note that this does not revert the tree to its pre-typer shape.
-   *  For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+   *  Usually, if some compiler API takes a tree, then both untyped and typed trees will do. However in some cases,
+   *  only untyped or only typed trees are appropriate. For example, [[eval]] only accepts untyped trees and one can only splice
+   *  typed trees inside typed trees. Therefore in the current reflection API, there is a need in functions
+   *  that go back and forth between untyped and typed trees. For this we have [[typecheck]] and `untypecheck`.
+   *
+   *  Note that `untypecheck` is currently afflicted by https://issues.scala-lang.org/browse/SI-5464,
+   *  which makes it sometimes corrupt trees so that they don't make sense anymore. Unfortunately, there's no workaround for that.
+   *  We plan to fix this issue soon, but for now please keep it in mind.
+   *
+   *  @see [[http://stackoverflow.com/questions/20936509/scala-macros-what-is-the-difference-between-typed-aka-typechecked-an-untyped]]
    */
-  def resetLocalAttrs(tree: Tree): Tree
+  def untypecheck(tree: Tree): Tree
 }
 
 /** Indicates an error during one of the methods in [[scala.reflect.macros.Typers]].
  */
-case class TypecheckException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class TypecheckException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 4e76f7c..1eb6721 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -1,6 +1,10 @@
-package scala.reflect
+package scala
+package reflect
 package macros
 
+import scala.language.implicitConversions
+import scala.language.higherKinds
+
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
  *
@@ -16,111 +20,96 @@ package macros
  */
 abstract class Universe extends scala.reflect.api.Universe {
 
-  /** A factory that encapsulates common tree-building functions.
-   *  @group Macros
-   */
-  val treeBuild: TreeBuilder { val global: Universe.this.type }
+  /** @inheritdoc */
+  override type Internal <: MacroInternalApi
 
-  /** The API of reflection artifacts that support [[scala.reflect.macros.Attachments]].
-   *  These artifacts are trees and symbols.
-   *  @group Macros
-   */
-  trait AttachableApi {
-    /** The attachment of the reflection artifact. */
-    def attachments: Attachments { type Pos = Position }
+  /** @inheritdoc */
+  trait MacroInternalApi extends InternalApi { internal =>
 
-    /** Updates the attachment with the payload slot of T added/updated with the provided value.
-     *  Replaces an existing payload of the same type, if exists.
-     *  Returns the reflection artifact itself.
+    /** Adds a given symbol to the given scope.
      */
-    def updateAttachment[T: ClassTag](attachment: T): AttachableApi.this.type
+    def enter(scope: Scope, sym: Symbol): scope.type
 
-    /** Update the attachment with the payload of the given class type `T` removed.
-     *  Returns the reflection artifact itself.
+    /** Removes a given symbol to the given scope.
      */
-    def removeAttachment[T: ClassTag]: AttachableApi.this.type
-  }
+    def unlink(scope: Scope, sym: Symbol): scope.type
 
-  // Symbol extensions ---------------------------------------------------------------
+    /** Collects all the symbols defined by subtrees of `tree` that are owned by `prev`,
+     *  and then changes their owner to point to `next`.
+     *
+     *  This is an essential tool to battle owner chain corruption when moving trees
+     *  from one lexical context to another. Whenever you take an attributed tree that
+     *  has been typechecked under the Context owned by some symbol (let's call it `x`)
+     *  and splice it elsewhere, into the Context owned by another symbol (let's call it `y`),
+     *  it is imperative that you either call `untypecheck` or do `changeOwner(tree, x, y)`.
+     *
+     *  Since at the moment `untypecheck` has fundamental problem that can sometimes lead to tree corruption,
+     *  `changeOwner` becomes an indispensible tool in building 100% robust macros.
+     *  Future versions of the reflection API might obviate the need in taking care of
+     *  these low-level details, but at the moment this is what we've got.
+     */
+    def changeOwner(tree: Tree, prev: Symbol, next: Symbol): tree.type
 
-  /**  The `Symbol` API is extended for macros: See [[SymbolContextApi]] for details.
-   *
-   *  @group Macros
-   */
-  override type Symbol >: Null <: SymbolContextApi
+    /** Advanced tree factories */
+    val gen: TreeGen
 
-  /** The extended API of symbols that's supported in macro context universes
-   *  @group API
-   */
-  trait SymbolContextApi extends SymbolApi with AttachableApi { self: Symbol =>
+    /** The attachment of the symbol. */
+    def attachments(symbol: Symbol): Attachments { type Pos = Position }
 
-    /** If this symbol is a skolem, its corresponding type parameter, otherwise the symbol itself.
-     *
-     *  [[https://groups.google.com/forum/#!msg/scala-internals/0j8laVNTQsI/kRXMF_c8bGsJ To quote Martin Odersky]],
-     *  skolems are synthetic type "constants" that are copies of existentially bound or universally
-     *  bound type variables. E.g. if one is inside the right-hand side of a method:
-     *
-     *  {{{
-     *  def foo[T](x: T) = ... foo[List[T]]....
-     *  }}}
-     *
-     *  the skolem named `T` refers to the unknown type instance of `T` when `foo` is called. It needs to be different
-     *  from the type parameter because in a recursive call as in the `foo[List[T]]` above the type parameter gets
-     *  substituted with `List[T]`, but the ''type skolem'' stays what it is.
-     *
-     *  The other form of skolem is an ''existential skolem''. Say one has a function
-     *
-     *  {{{
-     *  def bar(xs: List[T] forSome { type T }) = xs.head
-     *  }}}
-     *
-     *  then each occurrence of `xs` on the right will have type `List[T']` where `T'` is a fresh copy of `T`.
+    /** Updates the attachment with the payload slot of T added/updated with the provided value.
+     *  Replaces an existing payload of the same type, if exists.
+     *  Returns the symbol itself.
      */
-    def deSkolemize: Symbol
+    def updateAttachment[T: ClassTag](symbol: Symbol, attachment: T): symbol.type
 
-    /** The position of this symbol. */
-    def pos: Position
+    /** Update the attachment with the payload of the given class type `T` removed.
+     *  Returns the symbol itself.
+     */
+    def removeAttachment[T: ClassTag](symbol: Symbol): symbol.type
+
+    /** Sets the `owner` of the symbol. */
+    def setOwner(symbol: Symbol, newowner: Symbol): symbol.type
 
-    /** Sets the `typeSignature` of the symbol. */
-    def setTypeSignature(tpe: Type): Symbol
+    /** Sets the `info` of the symbol. */
+    def setInfo(symbol: Symbol, tpe: Type): symbol.type
 
     /** Sets the `annotations` of the symbol. */
-    def setAnnotations(annots: Annotation*): Symbol
+    def setAnnotations(symbol: Symbol, annots: Annotation*): symbol.type
 
     /** Sets the `name` of the symbol. */
-    def setName(name: Name): Symbol
+    def setName(symbol: Symbol, name: Name): symbol.type
 
     /** Sets the `privateWithin` of the symbol. */
-    def setPrivateWithin(sym: Symbol): Symbol
-  }
+    def setPrivateWithin(symbol: Symbol, sym: Symbol): symbol.type
 
-  // Tree extensions ---------------------------------------------------------------
+    /** Enables `flags` on the symbol. */
+    def setFlag(symbol: Symbol, flags: FlagSet): symbol.type
 
-  /**  The `Tree` API is extended for macros: See [[TreeContextApi]] for details.
-   *
-   *  @group Macros
-   */
-  override type Tree >: Null <: TreeContextApi
+    /** Disables `flags` on the symbol. */
+    def resetFlag(symbol: Symbol, flags: FlagSet): symbol.type
 
-  /** The extended API of trees that's supported in macro context universes
-   *  @group API
-   */
-  trait TreeContextApi extends TreeApi with AttachableApi { self: Tree =>
+    /** The attachment of the tree. */
+    def attachments(tree: Tree): Attachments { type Pos = Position }
 
-    /** Sets the `pos` of the tree. Returns `Unit`. */
-    def pos_=(pos: Position): Unit
+    /** Updates the attachment with the payload slot of T added/updated with the provided value.
+     *  Replaces an existing payload of the same type, if exists.
+     *  Returns the tree itself.
+     */
+    def updateAttachment[T: ClassTag](tree: Tree, attachment: T): tree.type
 
-    /** Sets the `pos` of the tree. Returns the tree itself. */
-    def setPos(newpos: Position): Tree
+    /** Update the attachment with the payload of the given class type `T` removed.
+     *  Returns the tree itself.
+     */
+    def removeAttachment[T: ClassTag](tree: Tree): tree.type
 
-    /** Sets the `tpe` of the tree. Returns `Unit`. */
-    def tpe_=(t: Type): Unit
+    /** Sets the `pos` of the tree. Returns the tree itself. */
+    def setPos(tree: Tree, newpos: Position): tree.type
 
     /** Sets the `tpe` of the tree. Returns the tree itself. */
-    def setType(tp: Type): Tree
+    def setType(tree: Tree, tp: Type): tree.type
 
     /** Like `setType`, but if this is a previously empty TypeTree that
-     *  fact is remembered so that resetAllAttrs will snap back.
+     *  fact is remembered so that `untypecheck` will snap back.
      *
      *  \@PP: Attempting to elaborate on the above, I find: If defineType
      *  is called on a TypeTree whose type field is null or NoType,
@@ -128,7 +117,8 @@ abstract class Universe extends scala.reflect.api.Universe {
      *  ResetAttrsTraverser, which nulls out the type field of TypeTrees
      *  for which wasEmpty is true, leaving the others alone.
      *
-     *  resetAllAttrs is used in situations where some speculative
+     *  `untypecheck` (or `resetAttrs` in compiler parlance) is used
+     *  in situations where some speculative
      *  typing of a tree takes place, fails, and the tree needs to be
      *  returned to its former state to try again. So according to me:
      *  using `defineType` instead of `setType` is how you communicate
@@ -136,93 +126,355 @@ abstract class Universe extends scala.reflect.api.Universe {
      *  and therefore should be abandoned if the current line of type
      *  inquiry doesn't work out.
      */
-    def defineType(tp: Type): Tree
-
-    /** Sets the `symbol` of the tree. Returns `Unit`. */
-    def symbol_=(sym: Symbol): Unit
+    def defineType(tree: Tree, tp: Type): tree.type
 
     /** Sets the `symbol` of the tree. Returns the tree itself. */
-    def setSymbol(sym: Symbol): Tree
-  }
+    def setSymbol(tree: Tree, sym: Symbol): tree.type
 
-  /** @inheritdoc */
-  override type SymTree >: Null <: Tree with SymTreeContextApi
+    /** Sets the `original` field of the type tree. */
+    def setOriginal(tt: TypeTree, original: Tree): TypeTree
 
-  /** The extended API of sym trees that's supported in macro context universes
-   *  @group API
-   */
-  trait SymTreeContextApi extends SymTreeApi { this: SymTree =>
-    /** Sets the `symbol` field of the sym tree. */
-    var symbol: Symbol
-  }
+    /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+     *  @group Macros
+     */
+    def captureVariable(vble: Symbol): Unit
 
-  /** @inheritdoc */
-  override type TypeTree >: Null <: TypTree with TypeTreeContextApi
+    /** Mark given identifier as a reference to a captured variable itself
+     *  suppressing dereferencing with the `elem` field.
+     *  @group Macros
+     */
+    def referenceCapturedVariable(vble: Symbol): Tree
 
-  /** The extended API of sym trees that's supported in macro context universes
-   *  @group API
-   */
-  trait TypeTreeContextApi extends TypeTreeApi { this: TypeTree =>
-    /** Sets the `original` field of the type tree. */
-    def setOriginal(tree: Tree): this.type
+    /** Convert type of a captured variable to *Ref type.
+     *  @group Macros
+     */
+    def capturedVariableType(vble: Symbol): Type
+
+    /** Retrieves the untyped list of subpatterns attached to selector dummy of an UnApply node.
+     *  Useful in writing quasiquoting macros that do pattern matching.
+     */
+    def subpatterns(tree: Tree): Option[List[Tree]]
+
+    /** @inheritdoc */
+    override type Decorators <: MacroDecoratorApi
+
+    /** @inheritdoc */
+    trait MacroDecoratorApi extends DecoratorApi {
+      /** Extension methods for scopes */
+      type ScopeDecorator[T <: Scope] <: MacroScopeDecoratorApi[T]
+
+      /** @see [[ScopeDecorator]] */
+      implicit def scopeDecorator[T <: Scope](tree: T): ScopeDecorator[T]
+
+      /** @see [[ScopeDecorator]] */
+      class MacroScopeDecoratorApi[T <: Scope](val scope: T) {
+        /** @see [[internal.enter]] */
+        def enter(sym: Symbol): T = internal.enter(scope, sym)
+
+        /** @see [[internal.unlink]] */
+        def unlink(sym: Symbol): T = internal.unlink(scope, sym)
+      }
+
+      /** @inheritdoc */
+      override type TreeDecorator[T <: Tree] <: MacroTreeDecoratorApi[T]
+
+      /** @see [[TreeDecorator]] */
+      class MacroTreeDecoratorApi[T <: Tree](override val tree: T) extends TreeDecoratorApi[T](tree) {
+        /** @see [[internal.changeOwner]] */
+        def changeOwner(prev: Symbol, next: Symbol): tree.type = internal.changeOwner(tree, prev, next)
+
+        /** @see [[internal.attachments]] */
+        def attachments: Attachments { type Pos = Position } = internal.attachments(tree)
+
+        /** @see [[internal.updateAttachment]] */
+        def updateAttachment[A: ClassTag](attachment: A): tree.type = internal.updateAttachment(tree, attachment)
+
+        /** @see [[internal.removeAttachment]] */
+        def removeAttachment[A: ClassTag]: T = internal.removeAttachment[A](tree)
+
+        /** @see [[internal.setPos]] */
+        def setPos(newpos: Position): T = internal.setPos(tree, newpos)
+
+        /** @see [[internal.setType]] */
+        def setType(tp: Type): T = internal.setType(tree, tp)
+
+        /** @see [[internal.defineType]] */
+        def defineType(tp: Type): T = internal.defineType(tree, tp)
+
+        /** @see [[internal.setSymbol]] */
+        def setSymbol(sym: Symbol): T = internal.setSymbol(tree, sym)
+      }
+
+      /** Extension methods for typetrees */
+      type TypeTreeDecorator[T <: TypeTree] <: MacroTypeTreeDecoratorApi[T]
+
+      /** @see [[TypeTreeDecorator]] */
+      implicit def typeTreeDecorator[T <: TypeTree](tt: T): TypeTreeDecorator[T]
+
+      /** @see [[TypeTreeDecorator]] */
+      class MacroTypeTreeDecoratorApi[T <: TypeTree](val tt: T) {
+        /** @see [[internal.setOriginal]] */
+        def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree)
+      }
+
+      /** @inheritdoc */
+      override type SymbolDecorator[T <: Symbol] <: MacroSymbolDecoratorApi[T]
+
+      /** @see [[TreeDecorator]] */
+      class MacroSymbolDecoratorApi[T <: Symbol](override val symbol: T) extends SymbolDecoratorApi[T](symbol) {
+        /** @see [[internal.attachments]] */
+        def attachments: Attachments { type Pos = Position } = internal.attachments(symbol)
+
+        /** @see [[internal.updateAttachment]] */
+        def updateAttachment[A: ClassTag](attachment: A): T = internal.updateAttachment(symbol, attachment)
+
+        /** @see [[internal.removeAttachment]] */
+        def removeAttachment[A: ClassTag]: T = internal.removeAttachment[A](symbol)
+
+        /** @see [[internal.setOwner]] */
+        def setOwner(newowner: Symbol): T = internal.setOwner(symbol, newowner)
+
+        /** @see [[internal.setInfo]] */
+        def setInfo(tpe: Type): T = internal.setInfo(symbol, tpe)
+
+        /** @see [[internal.setAnnotations]] */
+        def setAnnotations(annots: Annotation*): T = internal.setAnnotations(symbol, annots: _*)
+
+        /** @see [[internal.setName]] */
+        def setName(name: Name): T = internal.setName(symbol, name)
+
+        /** @see [[internal.setPrivateWithin]] */
+        def setPrivateWithin(sym: Symbol): T = internal.setPrivateWithin(symbol, sym)
+
+        /** @see [[internal.setFlag]] */
+        def setFlag(flags: FlagSet): T = internal.setFlag(symbol, flags)
+
+        /** @see [[internal.setFlag]] */
+        def resetFlag(flags: FlagSet): T = internal.resetFlag(symbol, flags)
+      }
+    }
   }
 
-  /** @inheritdoc */
-  override type Ident >: Null <: RefTree with IdentContextApi
+  /** @group Internal */
+  trait TreeGen {
+    /** Builds a reference to value whose type is given stable prefix.
+     *  The type must be suitable for this.  For example, it
+     *  must not be a TypeRef pointing to an abstract type variable.
+     */
+    def mkAttributedQualifier(tpe: Type): Tree
 
-  /** The extended API of idents that's supported in macro context universes
-   *  @group API
-   */
-  trait IdentContextApi extends IdentApi { this: Ident =>
-    /** Was this ident created from a backquoted identifier? */
-    def isBackquoted: Boolean
+    /** Builds a reference to value whose type is given stable prefix.
+     *  If the type is unsuitable, e.g. it is a TypeRef for an
+     *  abstract type variable, then an Ident will be made using
+     *  termSym as the Ident's symbol.  In that case, termSym must
+     *  not be NoSymbol.
+     */
+    def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
+
+    /** Builds a typed reference to given symbol with given stable prefix. */
+    def mkAttributedRef(pre: Type, sym: Symbol): RefTree
+
+    /** Builds a typed reference to given symbol. */
+    def mkAttributedRef(sym: Symbol): RefTree
+
+    def stabilize(tree: Tree): Tree
+
+    def mkAttributedStableRef(pre: Type, sym: Symbol): Tree
+
+    def mkAttributedStableRef(sym: Symbol): Tree
+
+    /** Builds an untyped reference to given symbol. Requires the symbol to be static. */
+    def mkUnattributedRef(sym: Symbol): RefTree
+
+    /** Builds an untyped reference to symbol with given name. Requires the symbol to be static. */
+    def mkUnattributedRef(fullName: Name): RefTree
+
+    /** Builds a typed This reference to given symbol. */
+    def mkAttributedThis(sym: Symbol): This
+
+    /** Builds a typed Ident with an underlying symbol. */
+    def mkAttributedIdent(sym: Symbol): RefTree
+
+    /** Builds a typed Select with an underlying symbol. */
+    def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree
+
+    /** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
+     *  There are a number of variations.
+     *
+     *  @param    receiver    symbol of the method receiver
+     *  @param    methodName  name of the method to call
+     *  @param    targs       type arguments (if Nil, no TypeApply node will be generated)
+     *  @param    args        value arguments
+     *  @return               the newly created trees.
+     */
+    def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree
+
+    def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree
+
+    def mkMethodCall(method: Symbol, args: List[Tree]): Tree
+
+    def mkMethodCall(target: Tree, args: List[Tree]): Tree
+
+    def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree
+
+    def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
+
+    def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
+
+    def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
+
+    /** A tree that refers to the runtime reflexive universe, `scala.reflect.runtime.universe`. */
+    def mkRuntimeUniverseRef: Tree
+
+    def mkZero(tp: Type): Tree
+
+    def mkCast(tree: Tree, pt: Type): Tree
   }
 
-  /** Mark a variable as captured; i.e. force boxing in a *Ref type.
-   *  @group Macros
-   */
-  def captureVariable(vble: Symbol): Unit
+  /** @see [[internal.gen]] */
+  @deprecated("Use `internal.gen` instead", "2.11.0")
+  val treeBuild: TreeGen
 
-  /** Mark given identifier as a reference to a captured variable itself
-   *  suppressing dereferencing with the `elem` field.
-   *  @group Macros
-   */
-  def referenceCapturedVariable(vble: Symbol): Tree
+  /** @inheritdoc */
+  type Compat <: MacroCompatApi
 
-  /** Convert type of a captured variable to *Ref type.
-   *  @group Macros
+  /** @see [[compat]]
+   *  @group Internal
    */
-  def capturedVariableType(vble: Symbol): Type
+  trait MacroCompatApi extends CompatApi {
+    /** Scala 2.10 compatibility enrichments for Symbol. */
+    implicit class MacroCompatibleSymbol(symbol: Symbol) {
+      /** @see [[InternalMacroApi.attachments]] */
+      @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def attachments: Attachments { type Pos = Position } = internal.attachments(symbol)
+
+      /** @see [[InternalMacroApi.updateAttachment]] */
+      @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def updateAttachment[T: ClassTag](attachment: T): Symbol = internal.updateAttachment[T](symbol, attachment)
+
+      /** @see [[InternalMacroApi.removeAttachment]] */
+      @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def removeAttachment[T: ClassTag]: Symbol = internal.removeAttachment[T](symbol)
+
+      /** @see [[InternalMacroApi.setInfo]] */
+      @deprecated("Use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setTypeSignature(tpe: Type): Symbol = internal.setInfo(symbol, tpe)
+
+      /** @see [[InternalMacroApi.setAnnotations]] */
+      @deprecated("Use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setAnnotations(annots: Annotation*): Symbol = internal.setAnnotations(symbol, annots: _*)
+
+      /** @see [[InternalMacroApi.setName]] */
+      @deprecated("Use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setName(name: Name): Symbol = internal.setName(symbol, name)
+
+      /** @see [[InternalMacroApi.setPrivateWithin]] */
+      @deprecated("Use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setPrivateWithin(sym: Symbol): Symbol = internal.setPrivateWithin(symbol, sym)
+    }
+
+    /** Scala 2.10 compatibility enrichments for TypeTree. */
+    implicit class MacroCompatibleTree(tree: Tree) {
+      /** @see [[InternalMacroApi.attachments]] */
+      @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def attachments: Attachments { type Pos = Position } = internal.attachments(tree)
+
+      /** @see [[InternalMacroApi.updateAttachment]] */
+      @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def updateAttachment[T: ClassTag](attachment: T): Tree = internal.updateAttachment[T](tree, attachment)
+
+      /** @see [[InternalMacroApi.removeAttachment]] */
+      @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def removeAttachment[T: ClassTag]: Tree = internal.removeAttachment[T](tree)
+
+      /** @see [[InternalMacroApi.setPos]] */
+      @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def pos_=(pos: Position): Unit = internal.setPos(tree, pos)
+
+      /** @see [[InternalMacroApi.setPos]] */
+      @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setPos(newpos: Position): Tree = internal.setPos(tree, newpos)
+
+      /** @see [[InternalMacroApi.setType]] */
+      @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def tpe_=(t: Type): Unit = internal.setType(tree, t)
+
+      /** @see [[InternalMacroApi.setType]] */
+      @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setType(tp: Type): Tree = internal.setType(tree, tp)
+
+      /** @see [[InternalMacroApi.defineType]] */
+      @deprecated("Use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def defineType(tp: Type): Tree = internal.defineType(tree, tp)
+
+      /** @see [[InternalMacroApi.setSymbol]] */
+      @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def symbol_=(sym: Symbol): Unit = internal.setSymbol(tree, sym)
+
+      /** @see [[InternalMacroApi.setSymbol]] */
+      @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setSymbol(sym: Symbol): Tree = internal.setSymbol(tree, sym)
+    }
+
+    /** Scala 2.10 compatibility enrichments for TypeTree. */
+    implicit class CompatibleTypeTree(tt: TypeTree) {
+      /** @see [[InternalMacroApi.setOriginal]] */
+      @deprecated("Use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+      def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree)
+    }
+
+    /** @see [[InternalMacroApi.captureVariable]] */
+    @deprecated("Use `internal.captureVariable` instead", "2.11.0")
+    def captureVariable(vble: Symbol): Unit = internal.captureVariable(vble)
+
+    /** @see [[InternalMacroApi.captureVariable]] */
+    @deprecated("Use `internal.referenceCapturedVariable` instead", "2.11.0")
+    def referenceCapturedVariable(vble: Symbol): Tree = internal.referenceCapturedVariable(vble)
+
+    /** @see [[InternalMacroApi.captureVariable]] */
+    @deprecated("Use `internal.capturedVariableType` instead", "2.11.0")
+    def capturedVariableType(vble: Symbol): Type = internal.capturedVariableType(vble)
+  }
 
   /** The type of compilation runs.
+   *  @see [[scala.reflect.macros.Enclosures]]
    *  @template
    *  @group Macros
    */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   type Run <: RunContextApi
 
   /** Compilation run uniquely identifies current invocation of the compiler
    *  (e.g. can be used to implement per-run caches for macros) and provides access to units of work
    *  of the invocation (currently processed unit of work and the list of all units).
+   *  @see [[scala.reflect.macros.Enclosures]]
    *  @group API
    */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   trait RunContextApi {
     /** Currently processed unit of work (a real or a virtual file). */
+    @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
     def currentUnit: CompilationUnit
 
     /** All units of work comprising this compilation run. */
+    @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
     def units: Iterator[CompilationUnit]
   }
 
   /** The type of compilation units.
+   *  @see [[scala.reflect.macros.Enclosures]]
    *  @template
    *  @group Macros
    */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   type CompilationUnit <: CompilationUnitContextApi
 
   /** Compilation unit describes a unit of work of the compilation run.
    *  It provides such information as file name, textual representation of the unit and the underlying AST.
+   *  @see [[scala.reflect.macros.Enclosures]]
    *  @group API
    */
+  @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
   trait CompilationUnitContextApi {
     /** Source file corresponding to this compilation unit.
      *
@@ -233,9 +485,11 @@ abstract class Universe extends scala.reflect.api.Universe {
      *  It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
      *  and exposed as a part of scala.reflect.api.
      */
+    @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
     def source: scala.reflect.internal.util.SourceFile
 
     /** The AST that corresponds to this compilation unit. */
+    @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.11.0")
     def body: Tree
   }
-}
\ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/blackbox/Context.scala b/src/reflect/scala/reflect/macros/blackbox/Context.scala
new file mode 100644
index 0000000..2f9c512
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/blackbox/Context.scala
@@ -0,0 +1,97 @@
+package scala
+package reflect
+package macros
+package blackbox
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ *  The blackbox Scala macros context.
+ *
+ *  See [[scala.reflect.macros.package the overview page]] for a description of how macros work. This documentation
+ *  entry provides information on the API available to macro writers.
+ *
+ *  A macro context wraps a compiler universe exposed in `universe` and having type [[scala.reflect.macros.Universe]].
+ *  This type is a refinement over the generic reflection API provided in [[scala.reflect.api.Universe]]. The
+ *  extended Universe provides mutability for reflection artifacts (e.g. macros can change types of compiler trees,
+ *  add annotation to symbols representing definitions, etc) and exposes some internal compiler functionality
+ *  such as `Symbol.deSkolemize` or `Tree.attachments`.
+ *
+ *  Another fundamental part of a macro context is `macroApplication`, which provides access to the tree undergoing
+ *  macro expansion. Parts of this tree can be found in arguments of the corresponding macro implementations and
+ *  in `prefix`, but `macroApplication` gives the full picture.
+ *
+ *  Other than that, macro contexts provide facilities for typechecking, exploring the compiler's symbol table and
+ *  enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more.
+ *  Refer to the documentation of top-level traits in this package to learn the details.
+ *
+ *  If a macro def refers to a macro impl that uses `blackbox.Context`, then this macro def becomes a blackbox macro,
+ *  which means that its expansion will be upcast to its return type, enforcing faithfullness of that macro to its
+ *  type signature. Whitebox macros, i.e. the ones defined with `whitebox.Context`, aren't bound by this restriction,
+ *  which enables a number of important use cases, but they are also going to enjoy less support than blackbox macros,
+ *  so choose wisely. See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] for more information.
+ *
+ *  @see `scala.reflect.macros.whitebox.Context`
+ */
+trait Context extends Aliases
+                 with Enclosures
+                 with Names
+                 with Reifiers
+                 with FrontEnds
+                 with Infrastructure
+                 with Typers
+                 with Parsers
+                 with Evals
+                 with ExprUtils
+                 with Internals {
+
+  /** The compile-time universe. */
+  val universe: Universe
+
+  /** The mirror of the compile-time universe. */
+  val mirror: universe.Mirror
+
+  /** The type of the prefix tree from which the macro is selected.
+   *  See the documentation entry for `prefix` for an example.
+   */
+  type PrefixType
+
+  /** The prefix tree from which the macro is selected.
+   *
+   *  For example, for a macro `filter` defined as an instance method on a collection `Coll`,
+   *  `prefix` represents an equivalent of `this` for normal instance methods:
+   *
+   *  {{{
+   *  scala> class Coll[T] {
+   *       | def filter(p: T => Boolean): Coll[T] = macro M.filter[T]
+   *       | }; object M {
+   *       | def filter[T](c: Context { type PrefixType = Coll[T] })
+   *       |              (p: c.Expr[T => Boolean]): c.Expr[Coll[T]] =
+   *       |   {
+   *       |     println(c.prefix.tree)
+   *       |     c.prefix
+   *       |   }
+   *       | }
+   *  defined class Coll
+   *  defined module Macros
+   *
+   *  scala> new Coll[Int]().filter(_ % 2 == 0)
+   *  new Coll[Int]()
+   *  res0: Coll[Int] = ...
+   *
+   *  scala> val x = new Coll[String]()
+   *  x: Coll[String] = ...
+   *
+   *  scala> x.filter(_ != "")
+   *  \$line11.\$read.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.x
+   *  res1 @ 35563b4b: x.type = ...
+   *  }}}
+   *
+   *  Note how the value of `prefix` changes depending on the qualifier of the macro call
+   *  (i.e. the expression that is at the left-hand side of the dot).
+   *
+   *  Another noteworthy thing about the snippet above is the `Context { type PrefixType = Coll[T] }`
+   *  type that is used to stress that the macro implementation works with prefixes of type `Coll[T]`.
+   */
+  val prefix: Expr[PrefixType]
+}
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
index 21d189b..cc7111d 100644
--- a/src/reflect/scala/reflect/macros/package.scala
+++ b/src/reflect/scala/reflect/macros/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 
 /**
  * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -6,10 +7,22 @@ package scala.reflect
  *  The base package for Scala macros.
  *
  *  Macros are functions that are called by the compiler during compilation.
- *  Within these functions the programmer has access to compiler APIs exposed in [[scala.reflect.macros.Context]].
+ *  Within these functions the programmer has access to compiler APIs.
  *  For example, it is possible to generate, analyze and typecheck code.
  *
  *  See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] on how to get started with Scala macros.
  */
 package object macros {
+  /** The Scala macros context.
+   *
+   *  In Scala 2.11, macros that were once the one are split into blackbox and whitebox macros,
+   *  with the former being better supported and the latter being more powerful. You can read about
+   *  the details of the split and the associated trade-offs in the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]].
+   *
+   *  `scala.reflect.macros.Context` follows this tendency and turns into `scala.reflect.macros.blackbox.Context`
+   *  and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons,
+   *  but it is now deprecated, nudging the users to choose between blackbox and whitebox macros.
+   */
+  @deprecated("Use blackbox.Context or whitebox.Context instead", "2.11.0")
+  type Context = whitebox.Context
 }
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/whitebox/Context.scala b/src/reflect/scala/reflect/macros/whitebox/Context.scala
new file mode 100644
index 0000000..bd48df4
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/whitebox/Context.scala
@@ -0,0 +1,77 @@
+package scala
+package reflect
+package macros
+package whitebox
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ *  The whitebox Scala macros context.
+ *
+ *  See [[scala.reflect.macros.package the overview page]] for a description of how macros work. This documentation
+ *  entry provides information on the API available to macro writers.
+ *
+ *  A macro context wraps a compiler universe exposed in `universe` and having type [[scala.reflect.macros.Universe]].
+ *  This type is a refinement over the generic reflection API provided in [[scala.reflect.api.Universe]]. The
+ *  extended Universe provides mutability for reflection artifacts (e.g. macros can change types of compiler trees,
+ *  add annotation to symbols representing definitions, etc) and exposes some internal compiler functionality
+ *  such as `Symbol.deSkolemize` or `Tree.attachments`.
+ *
+ *  Another fundamental part of a macro context is `macroApplication`, which provides access to the tree undergoing
+ *  macro expansion. Parts of this tree can be found in arguments of the corresponding macro implementations and
+ *  in `prefix`, but `macroApplication` gives the full picture.
+ *
+ *  Other than that, macro contexts provide facilities for typechecking, exploring the compiler's symbol table and
+ *  enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more.
+ *  Refer to the documentation of top-level traits in this package to learn the details.
+ *
+ *  If a macro def refers to a macro impl that uses `whitebox.Context`, then this macro def becomes a whitebox macro,
+ *  gaining the ability to refine the type of its expansion beyond its official return type, which enables a number of important use cases.
+ *  Blackbox macros, i.e. the ones defined with `blackbox.Context`, can't do that, so they are less powerful.
+ *  However blackbox macros are also going to enjoy better support than whitebox macros, so choose wisely.
+ *  See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] for more information.
+ *
+ *  @see `scala.reflect.macros.blackbox.Context`
+ */
+trait Context extends blackbox.Context {
+  /** @inheritdoc
+   */
+  def openMacros: List[Context]
+
+  /** @inheritdoc
+   */
+  def enclosingMacros: List[Context]
+
+  /** Information about one of the currently considered implicit candidates.
+   *  Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+   *  hence implicit searches can recursively trigger other implicit searches.
+   *
+   *  `pre` and `sym` provide information about the candidate itself.
+   *  `pt` and `tree` store the parameters of the implicit search the candidate is participating in.
+   */
+  case class ImplicitCandidate(pre: Type, sym: Symbol, pt: Type, tree: Tree)
+
+  /** Information about one of the currently considered implicit candidates.
+   *  Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+   *  hence implicit searches can recursively trigger other implicit searches.
+   *
+   *  Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+   *  If we're in an implicit macro being expanded, it's included in this list.
+   *
+   *  Unlike `enclosingImplicits`, this is a def, which means that it gets recalculated on every invocation,
+   *  so it might change depending on what is going on during macro expansion.
+   */
+  def openImplicits: List[ImplicitCandidate]
+
+  /** Information about one of the currently considered implicit candidates.
+   *  Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+   *  hence implicit searches can recursively trigger other implicit searches.
+   *
+   *  Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+   *  If we're in an implicit macro being expanded, it's included in this list.
+   *
+   *  Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
+   *  and always stays the same regardless of whatever happens during macro expansion.
+   */
+  def enclosingImplicits: List[ImplicitCandidate]
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/runtime/Gil.scala b/src/reflect/scala/reflect/runtime/Gil.scala
new file mode 100644
index 0000000..0edb1e5
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/Gil.scala
@@ -0,0 +1,25 @@
+package scala.reflect
+package runtime
+
+private[reflect] trait Gil {
+  self: SymbolTable =>
+
+  // fixme... please...
+  // there are the following avenues of optimization we discussed with Roland:
+  // 1) replace PackageScope locks with ConcurrentHashMap, because PackageScope materializers seem to be idempotent
+  // 2) unlock unpickling completers by verifying that they are idempotent or moving non-idempotent parts
+  // 3) remove the necessity in global state for isSubType
+  private lazy val gil = new java.util.concurrent.locks.ReentrantLock
+
+  @inline final def gilSynchronized[T](body: => T): T = {
+    if (isCompilerUniverse) body
+    else {
+      try {
+        gil.lock()
+        body
+      } finally {
+        gil.unlock()
+      }
+    }
+  }
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 6fdb238..f5bddb1 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import scala.ref.WeakReference
@@ -6,25 +7,22 @@ import scala.collection.mutable.WeakHashMap
 
 import java.lang.{Class => jClass, Package => jPackage}
 import java.lang.reflect.{
-  Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
+  Method => jMethod, Constructor => jConstructor, Field => jField,
   Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
+  AccessibleObject => jAccessibleObject,
   GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
 import java.lang.annotation.{Annotation => jAnnotation}
 import java.io.IOException
-import internal.MissingRequirementError
+import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor }
 import internal.pickling.ByteCodecs
-import internal.ClassfileConstants._
 import internal.pickling.UnPickler
-import scala.collection.mutable.{ HashMap, ListBuffer }
+import scala.collection.mutable.{ HashMap, ListBuffer, ArrayBuffer }
 import internal.Flags._
-//import scala.tools.nsc.util.ScalaClassLoader
-//import scala.tools.nsc.util.ScalaClassLoader._
-import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
+import ReflectionUtils._
 import scala.language.existentials
 import scala.runtime.{ScalaRunTime, BoxesRunTime}
-import scala.reflect.internal.util.Collections._
 
-private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
+private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable =>
 
   private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
 
@@ -35,41 +33,39 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     jm
   }
 
-  override type RuntimeClass = java.lang.Class[_]
-
   override type Mirror = JavaMirror
+  implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[JavaMirror])
 
   override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader)
 
   // overriden by ReflectGlobal
   def rootClassLoader: ClassLoader = this.getClass.getClassLoader
 
-  trait JavaClassCompleter extends FlagAssigningCompleter
-
-  def init() = {
-    definitions.AnyValClass // force it.
-
-    // establish root association to avoid cyclic dependency errors later
-    rootMirror.classToScala(classOf[java.lang.Object]).initialize
+  trait JavaClassCompleter
 
-    // println("initializing definitions")
-    definitions.init()
-  }
-
-  def runtimeMirror(cl: ClassLoader): Mirror = mirrors get cl match {
-    case Some(WeakReference(m)) => m
-    case _ => createMirror(rootMirror.RootClass, cl)
+  def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized {
+    mirrors get cl match {
+      case Some(WeakReference(m)) => m
+      case _ => createMirror(rootMirror.RootClass, cl)
+    }
   }
 
   /** The API of a mirror for a reflective universe */
   class JavaMirror(owner: Symbol,
-    /** Class loader that is a mastermind behind the reflexive mirror */
+    /* Class loader that is a mastermind behind the reflexive mirror */
     val classLoader: ClassLoader
   ) extends Roots(owner) with super.JavaMirror { thisMirror =>
 
     val universe: thisUniverse.type = thisUniverse
 
     import definitions._
+    private[reflect] lazy val runDefinitions = new definitions.RunDefinitions // only one "run" in the reflection universe
+    import runDefinitions._
+
+    override lazy val RootPackage = (new RootPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+    override lazy val RootClass = (new RootClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
+    override lazy val EmptyPackage = (new EmptyPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+    override lazy val EmptyPackageClass = (new EmptyPackageClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
 
     /** The lazy type for root.
      */
@@ -84,19 +80,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     // the same thing is done by the `missingHook` below
     override def staticPackage(fullname: String): ModuleSymbol =
       try super.staticPackage(fullname)
-      catch {
-        case _: MissingRequirementError =>
-          makeScalaPackage(fullname)
-      }
+      catch { case _: ScalaReflectionException => makeScalaPackage(fullname) }
 
 // ----------- Caching ------------------------------------------------------------------
 
-    private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
-    private val packageCache = new TwoWayCache[Package, ModuleSymbol]
-    private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
+    private val classCache       = new TwoWayCache[jClass[_], ClassSymbol]
+    private val packageCache     = new TwoWayCache[Package, ModuleSymbol]
+    private val methodCache      = new TwoWayCache[jMethod, MethodSymbol]
     private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol]
-    private val fieldCache = new TwoWayCache[jField, TermSymbol]
-    private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
+    private val fieldCache       = new TwoWayCache[jField, TermSymbol]
+    private val tparamCache      = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
 
     private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
       cache.toScala(key){
@@ -104,39 +97,36 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
         body(mirrorDefining(jclazz), key)
       }
 
-    private implicit val classHasJavaClass: HasJavaClass[jClass[_]] =
-      new HasJavaClass(identity)
-    private implicit val methHasJavaClass: HasJavaClass[jMethod]
-      = new HasJavaClass(_.getDeclaringClass)
-    private implicit val fieldHasJavaClass: HasJavaClass[jField] =
-      new HasJavaClass(_.getDeclaringClass)
-    private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] =
-      new HasJavaClass(_.getDeclaringClass)
+    private implicit val classHasJavaClass: HasJavaClass[jClass[_]]        = new HasJavaClass(identity)
+    private implicit val methHasJavaClass: HasJavaClass[jMethod]           = new HasJavaClass(_.getDeclaringClass)
+    private implicit val fieldHasJavaClass: HasJavaClass[jField]           = new HasJavaClass(_.getDeclaringClass)
+    private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] = new HasJavaClass(_.getDeclaringClass)
     private implicit val tparamHasJavaClass: HasJavaClass[jTypeVariable[_ <: GenericDeclaration]] =
       new HasJavaClass ( (tparam: jTypeVariable[_ <: GenericDeclaration]) => {
         tparam.getGenericDeclaration match {
-          case jclazz: jClass[_] => jclazz
-          case jmeth: jMethod => jmeth.getDeclaringClass
+          case jclazz: jClass[_]        => jclazz
+          case jmeth: jMethod           => jmeth.getDeclaringClass
           case jconstr: jConstructor[_] => jconstr.getDeclaringClass
         }
       })
 
 // ----------- Implementations of mirror operations and classes  -------------------
 
-    private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
-    private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
-    private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
-    private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
-    private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
-    private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
-    private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(
+    private def abort(msg: String) = throw new ScalaReflectionException(msg)
+
+    private def ErrorInnerClass(sym: Symbol)                      = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+    private def ErrorInnerModule(sym: Symbol)                     = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+    private def ErrorStaticClass(sym: Symbol)                     = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+    private def ErrorStaticModule(sym: Symbol)                    = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+    private def ErrorNotMember(sym: Symbol, owner: Symbol)        = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+    private def ErrorNotField(sym: Symbol)                        = abort(s"expected a field or an accessor method symbol, you provided $sym")
+    private def ErrorNotConstructor(sym: Symbol, owner: Symbol)   = abort(s"expected a constructor of $owner, you provided $sym")
+    private def ErrorArrayConstructor(sym: Symbol, owner: Symbol) = abort(s"Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
+    private def ErrorFree(member: Symbol, freeType: Symbol)       = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+    private def ErrorNonExistentField(sym: Symbol)                = abort(
       sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
           |note that private parameters of class constructors don't get mapped onto fields and/or accessors,
           |unless they are used outside of their declaring constructors.""")
-    @deprecated("corresponding check has been removed from FieldMirror.set, this method is also being phased out", "2.11.0")
-    private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
-    private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
-    private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
 
     /** Helper functions for extracting typed values from a (Class[_], Any)
      *  representing an annotation argument.
@@ -152,7 +142,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       object ConstantArg {
         def enumToSymbol(enum: Enum[_]): Symbol = {
           val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
-          staticPartOfEnum.typeSignature.declaration(enum.name: TermName)
+          staticPartOfEnum.info.declaration(enum.name: TermName)
         }
 
         def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
@@ -214,7 +204,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
         case _ => body
       }
     }
-
     private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
       if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
         // do nothing
@@ -229,6 +218,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
 
     private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) {
       if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner)
+      if (owner == ArrayClass) ErrorArrayConstructor(sym, owner)
       ensuringNotFree(sym) {
         if (!owner.info.decls.toList.contains(sym)) ErrorNotConstructor(sym, owner)
       }
@@ -240,16 +230,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       if (staticClazz.isPrimitive) staticClazz else dynamicClazz
     }
 
-    private class JavaInstanceMirror[T: ClassTag](val instance: T)
-            extends InstanceMirror {
+    private class JavaInstanceMirror[T: ClassTag](val instance: T) extends InstanceMirror {
       def symbol = thisMirror.classSymbol(preciseClass(instance))
       def reflectField(field: TermSymbol): FieldMirror = {
         checkMemberOf(field, symbol)
         if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
-        val name =
-          if (field.isGetter) nme.getterToLocal(field.name)
-          else if (field.isSetter) nme.getterToLocal(nme.setterToGetter(field.name))
-          else field.name
+        val name = if (field.isAccessor) field.localName else field.name
         val field1 = (field.owner.info decl name).asTerm
         try fieldToJava(field1)
         catch {
@@ -259,7 +245,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       }
       def reflectMethod(method: MethodSymbol): MethodMirror = {
         checkMemberOf(method, symbol)
-        mkJavaMethodMirror(instance, method)
+        mkMethodMirror(instance, method)
       }
       def reflectClass(cls: ClassSymbol): ClassMirror = {
         if (cls.isStatic) ErrorStaticClass(cls)
@@ -274,105 +260,189 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       override def toString = s"instance mirror for $instance"
     }
 
-    private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol)
+    // caches value class metadata, so that we minimize the work that needs to be done during Mirror.apply
+    private class DerivedValueClassMetadata(info: Type) {
+      val symbol = info.typeSymbol
+      val isDerivedValueClass = symbol.isDerivedValueClass
+      lazy val boxer = runtimeClass(symbol.toType).getDeclaredConstructors().head
+      lazy val unboxer = {
+        val fields @ (field :: _) = symbol.toType.decls.collect{ case ts: TermSymbol if ts.isParamAccessor && ts.isMethod => ts }.toList
+        assert(fields.length == 1, s"$symbol: $fields")
+        runtimeClass(symbol.asClass).getDeclaredMethod(field.name.toString)
+      }
+    }
+
+    private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol, metadata: DerivedValueClassMetadata)
             extends FieldMirror {
-      lazy val jfield = {
-        val jfield = fieldToJava(symbol)
-        if (!jfield.isAccessible) jfield.setAccessible(true)
-        jfield
+      def this(receiver: Any, symbol: TermSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.info))
+      def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol, metadata)
+      import metadata._
+
+      lazy val jfield = ensureAccessible(fieldToJava(symbol))
+      def get = {
+        val value = jfield get receiver
+        if (isDerivedValueClass) boxer.newInstance(value) else value
       }
-      def get = jfield.get(receiver)
       def set(value: Any) = {
         // it appears useful to be able to set values of vals, therefore I'm disabling this check
         // if (!symbol.isMutable) ErrorSetImmutableField(symbol)
-        jfield.set(receiver, value)
-      }
-      // this dummy method is necessary to prevent the optimizer from stripping off ErrorSetImmutableField
-      // which would break binary compatibility with 2.10.0
-      private def dummy(symbol: Symbol) = ErrorSetImmutableField(symbol)
-      override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)"
-    }
-
-    private def showMethodSig(symbol: MethodSymbol): String = {
-      var sig = s"${symbol.fullName}"
-      if (symbol.typeParams.nonEmpty) {
-        def showTparam(tparam: Symbol) =
-          tparam.typeSignature match {
-            case tpe @ TypeBounds(_, _) => s"${tparam.name}$tpe"
-            case _ => tparam.name
-          }
-        def showTparams(tparams: List[Symbol]) = "[" + (tparams map showTparam mkString ", ") + "]"
-        sig += showTparams(symbol.typeParams)
-      }
-      if (symbol.paramss.nonEmpty) {
-        def showParam(param: Symbol) = s"${param.name}: ${param.typeSignature}"
-        def showParams(params: List[Symbol]) = {
-          val s_mods = if (params.nonEmpty && params(0).hasFlag(IMPLICIT)) "implicit " else ""
-          val s_params = params map showParam mkString ", "
-          "(" + s_mods + s_params + ")"
-        }
-        def showParamss(paramss: List[List[Symbol]]) = paramss map showParams mkString ""
-        sig += showParamss(symbol.paramss)
+        jfield.set(receiver, if (isDerivedValueClass) unboxer.invoke(value) else value)
       }
-      sig += s": ${symbol.returnType}"
-      sig
+
+      override def toString = s"field mirror for ${showDecl(symbol)} (bound to $receiver)"
     }
 
     // the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
     // because both AnyVal and its primitive descendants define their own getClass methods
-    private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.paramss.flatten.isEmpty
+    private def isGetClass(meth: MethodSymbol) = (meth.name string_== "getClass") && meth.paramss.flatten.isEmpty
     private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
     lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
     lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
                                         Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
     private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
-      if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+      if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == runDefinitions.Predef_classOf || meth.isMacro) return true
       bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
     }
 
+    private def isByNameParam(p: Type) = isByNameParamType(p)
+    private def isValueClassParam(p: Type) = p.typeSymbol.isDerivedValueClass
+
     // unlike other mirrors, method mirrors are created by a factory
     // that's because we want to have decent performance
     // therefore we move special cases into separate subclasses
     // rather than have them on a hot path them in a unified implementation of the `apply` method
-    private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
-      if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
-      else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
-      else new JavaVanillaMethodMirror(receiver, symbol)
+    private def mkMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): MethodMirror = {
+      def existsParam(pred: Type => Boolean) = symbol.paramss.flatten.map(_.info).exists(pred)
+      if (isBytecodelessMethod(symbol)) new BytecodelessMethodMirror(receiver, symbol)
+      else if (existsParam(isByNameParam) || existsParam(isValueClassParam)) new JavaTransformingMethodMirror(receiver, symbol)
+      else {
+        symbol.paramss.flatten.length match {
+          case 0 => new JavaVanillaMethodMirror0(receiver, symbol)
+          case 1 => new JavaVanillaMethodMirror1(receiver, symbol)
+          case 2 => new JavaVanillaMethodMirror2(receiver, symbol)
+          case 3 => new JavaVanillaMethodMirror3(receiver, symbol)
+          case 4 => new JavaVanillaMethodMirror4(receiver, symbol)
+          case _ => new JavaVanillaMethodMirror(receiver, symbol)
+        }
+      }
     }
 
-    private abstract class JavaMethodMirror(val symbol: MethodSymbol)
-            extends MethodMirror {
-      lazy val jmeth = {
-        val jmeth = methodToJava(symbol)
-        if (!jmeth.isAccessible) jmeth.setAccessible(true)
-        jmeth
+    private abstract class JavaMethodMirror(val symbol: MethodSymbol, protected val ret: DerivedValueClassMetadata) extends MethodMirror {
+      lazy val jmeth = ensureAccessible(methodToJava(symbol))
+      lazy val jconstr = ensureAccessible(constructorToJava(symbol))
+
+      def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+        else if (receiver == null) jconstr.newInstance(args.asInstanceOf[Seq[AnyRef]]: _*)
+        else jconstr.newInstance((receiver +: args).asInstanceOf[Seq[AnyRef]]: _*)
+      def jinvoke(args: Seq[Any]): Any = {
+        val result = jinvokeraw(args)
+        if (!symbol.isConstructor && jmeth.getReturnType == java.lang.Void.TYPE) ()
+        else if (!symbol.isConstructor && ret.isDerivedValueClass) ret.boxer.newInstance(result.asInstanceOf[AnyRef])
+        else result
       }
 
-      def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
-        val result = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
-        if (jmeth.getReturnType == java.lang.Void.TYPE) ()
-        else result
+      override def toString = {
+        val what = if (symbol.isConstructor) "constructor mirror" else "method mirror"
+        s"$what for ${showDecl(symbol)} (bound to $receiver)"
       }
+    }
+
+    private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaMethodMirror(symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol, ret)
+      def apply(args: Any*): Any = jinvoke(args)
+    }
+
+    private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol, ret)
+      override def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver)
+        else if (receiver == null) jconstr.newInstance()
+        else jconstr.newInstance(receiver.asInstanceOf[AnyRef])
+    }
+
+    private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol, ret)
+      override def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef])
+        else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef])
+        else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef])
+    }
+
+    private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol, ret)
+      override def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+        else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+        else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+    }
+
+    private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol, ret)
+      override def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+        else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+        else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+    }
 
-      override def toString = s"method mirror for ${showMethodSig(symbol)} (bound to $receiver)"
+    private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+            extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+      override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol, ret)
+      override def jinvokeraw(args: Seq[Any]) =
+        if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+        else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+        else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
     }
 
-    private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol)
-            extends JavaMethodMirror(symbol) {
-      def apply(args: Any*): Any = jinvoke(jmeth, receiver, args)
+    // caches MethodSymbol metadata, so that we minimize the work that needs to be done during Mirror.apply
+    // TODO: vararg is only supported in the last parameter list (SI-6182), so we don't need to worry about the rest for now
+    private class MethodMetadata(symbol: MethodSymbol) {
+      private val params = symbol.paramss.flatten.toArray
+      private val vcMetadata = params.map(p => new DerivedValueClassMetadata(p.info))
+      val isByName = params.map(p => isByNameParam(p.info))
+      def isDerivedValueClass(i: Int) = vcMetadata(i).isDerivedValueClass
+      def paramUnboxers(i: Int) = vcMetadata(i).unboxer
+      val paramCount = params.length
+      val ret = new DerivedValueClassMetadata(symbol.returnType)
     }
 
-    private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol)
-            extends JavaMethodMirror(symbol) {
+    private class JavaTransformingMethodMirror(val receiver: Any, symbol: MethodSymbol, metadata: MethodMetadata)
+            extends JavaMethodMirror(symbol, metadata.ret) {
+      def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new MethodMetadata(symbol))
+      override def bind(newReceiver: Any) = new JavaTransformingMethodMirror(newReceiver, symbol, metadata)
+      import metadata._
+
       def apply(args: Any*): Any = {
-        val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg)
-        jinvoke(jmeth, receiver, transformed)
+        val args1 = new Array[Any](args.length)
+        var i = 0
+        while (i < args1.length) {
+          val arg = args(i)
+          if (i >= paramCount) args1(i) = arg // don't transform varargs
+          else if (isByName(i)) args1(i) = () => arg // don't transform by-name value class params
+          else if (isDerivedValueClass(i)) args1(i) = paramUnboxers(i).invoke(arg)
+          i += 1
+        }
+        jinvoke(args1)
       }
     }
 
-    private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
-            extends JavaMethodMirror(symbol) {
-       def apply(args: Any*): Any = {
+    private class BytecodelessMethodMirror[T: ClassTag](val receiver: T, val symbol: MethodSymbol)
+            extends MethodMirror {
+      def bind(newReceiver: Any) = new BytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol)
+      override def toString = s"bytecodeless method mirror for ${showDecl(symbol)} (bound to $receiver)"
+
+      def apply(args: Any*): Any = {
         // checking type conformance is too much of a hassle, so we don't do it here
         // actually it's not even necessary, because we manually dispatch arguments below
         val params = symbol.paramss.flatten
@@ -383,19 +453,22 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
         val varargMatch = args.length >= params.length - 1 && isVarArgsList(params)
         if (!perfectMatch && !varargMatch) {
           val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}"
-          var s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
-          throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
+          val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
+          abort(s"${showDecl(symbol)} takes $n_arguments $s_arguments")
         }
 
         def objReceiver       = receiver.asInstanceOf[AnyRef]
         def objArg0           = args(0).asInstanceOf[AnyRef]
         def objArgs           = args.asInstanceOf[Seq[AnyRef]]
-        def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
+        def fail(msg: String) = abort(msg + ", it cannot be invoked with mirrors")
 
         def invokePrimitiveMethod = {
           val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
           assert(jmeths.length == 1, jmeths.toList)
-          jinvoke(jmeths.head, null, objReceiver +: objArgs)
+          val jmeth = jmeths.head
+          val result = jmeth.invoke(null, (objReceiver +: objArgs).asInstanceOf[Seq[AnyRef]]: _*)
+          if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+          else result
         }
 
         symbol match {
@@ -411,8 +484,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
           case sym if isGetClass(sym)                 => preciseClass(receiver)
           case Any_asInstanceOf                       => fail("Any.asInstanceOf requires a type argument")
           case Any_isInstanceOf                       => fail("Any.isInstanceOf requires a type argument")
-          case Object_asInstanceOf                    => fail("AnyRef.$asInstanceOf is an internal method")
-          case Object_isInstanceOf                    => fail("AnyRef.$isInstanceOf is an internal method")
+          case Object_asInstanceOf                    => fail("AnyRef.%s is an internal method" format symbol.name)
+          case Object_isInstanceOf                    => fail("AnyRef.%s is an internal method" format symbol.name)
           case Array_length                           => ScalaRunTime.array_length(objReceiver)
           case Array_apply                            => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
           case Array_update                           => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
@@ -420,37 +493,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
           case sym if isStringConcat(sym)             => receiver.toString + objArg0
           case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
           case sym if sym == Predef_classOf           => fail("Predef.classOf is a compile-time function")
-          case sym if sym.isTermMacro                 => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+          case sym if sym.isMacro                     => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
           case _                                      => abort(s"unsupported symbol $symbol when invoking $this")
         }
       }
     }
 
-    private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol)
-            extends MethodMirror {
-      override val receiver = outer
-      lazy val jconstr = {
-        val jconstr = constructorToJava(symbol)
-        if (!jconstr.isAccessible) jconstr.setAccessible(true)
-        jconstr
-      }
-      def apply(args: Any*): Any = {
-        if (symbol.owner == ArrayClass)
-          throw new ScalaReflectionException("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
-
-        val effectiveArgs =
-          if (outer == null) args.asInstanceOf[Seq[AnyRef]]
-          else outer +: args.asInstanceOf[Seq[AnyRef]]
-        jconstr.newInstance(effectiveArgs: _*)
-      }
-      override def toString = s"constructor mirror for ${showMethodSig(symbol)} (bound to $outer)"
-    }
-
     private abstract class JavaTemplateMirror
             extends TemplateMirror {
       def outer: AnyRef
       def erasure: ClassSymbol
-      lazy val signature = typeToScala(classToJava(erasure))
     }
 
     private class JavaClassMirror(val outer: AnyRef, val symbol: ClassSymbol)
@@ -459,7 +511,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       def isStatic = false
       def reflectConstructor(constructor: MethodSymbol) = {
         checkConstructorOf(constructor, symbol)
-        new JavaConstructorMirror(outer, constructor)
+        mkMethodMirror(outer, constructor)
       }
       override def toString = s"class mirror for ${symbol.fullName} (bound to $outer)"
     }
@@ -469,11 +521,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       def erasure = symbol.moduleClass.asClass
       def isStatic = true
       def instance = {
-        if (symbol.owner.isPackageClass)
+        if (symbol.isTopLevel)
           staticSingletonInstance(classLoader, symbol.fullName)
         else
           if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass))
-          else innerSingletonInstance(outer, symbol.name)
+          else innerSingletonInstance(outer, symbol.name.toString)
       }
       override def toString = s"module mirror for ${symbol.fullName} (bound to $outer)"
     }
@@ -497,16 +549,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     }
 
     def javaClass(path: String): jClass[_] =
-      Class.forName(path, true, classLoader)
+      jClass.forName(path, true, classLoader)
 
     /** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */
-    def tryJavaClass(path: String): Option[jClass[_]] =
-      try {
-        Some(javaClass(path))
-      } catch {
-        case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
-          None
-      }
+    def tryJavaClass(path: String): Option[jClass[_]] = (
+      try Some(javaClass(path))
+      catch { case ex @ (_: LinkageError | _: ClassNotFoundException) => None } // TODO - log
+    )
 
     /** The mirror that corresponds to the classloader that original defined the given Java class */
     def mirrorDefining(jclazz: jClass[_]): JavaMirror = {
@@ -515,7 +564,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     }
 
     private object unpickler extends UnPickler {
-      val global: thisUniverse.type = thisUniverse
+      val symbolTable: thisUniverse.type = thisUniverse
     }
 
     /** how connected????
@@ -530,7 +579,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe)
       def handleError(ex: Exception) = {
         markAbsent(ErrorType)
-        if (settings.debug.value) ex.printStackTrace()
+        if (settings.debug) ex.printStackTrace()
         val msg = ex.getMessage()
         MissingRequirementError.signal(
           (if (msg eq null) "reflection error while loading " + clazz.name
@@ -562,7 +611,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
             info(s"unpickling Scala $clazz and $module, owner = ${clazz.owner}")
             val bytes = ssig.getBytes
             val len = ByteCodecs.decode(bytes)
+            assignAssociatedFile(clazz, module, jclazz)
             unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
+            markAllCompleted(clazz, module)
           case None =>
             loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
               case Some(slsig) =>
@@ -570,11 +621,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
                 val encoded = slsig flatMap (_.getBytes)
                 val len = ByteCodecs.decode(encoded)
                 val decoded = encoded.take(len)
+                assignAssociatedFile(clazz, module, jclazz)
                 unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
+                markAllCompleted(clazz, module)
               case None =>
                 // class does not have a Scala signature; it's a Java class
                 info("translating reflection info for Java " + jclazz) //debug
-                initClassModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
+                initClassAndModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
             }
         }
       } catch {
@@ -593,6 +646,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
       val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName))
         .setInfo(new TypeParamCompleter(jtvar))
+      markFlagsCompleted(tparam)(mask = AllFlags)
       tparamCache enter (jtvar, tparam)
       tparam
     }
@@ -605,9 +659,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       override def load(sym: Symbol) = complete(sym)
       override def complete(sym: Symbol) = {
         sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny))
+        markAllCompleted(sym)
       }
     }
 
+    private def assignAssociatedFile(clazz: Symbol, module: Symbol, jclazz: jClass[_]): Unit = {
+      val associatedFile = ReflectionUtils.associatedFile(jclazz)
+      clazz.associatedFile = associatedFile
+      if (module != NoSymbol) module.associatedFile = associatedFile
+    }
+
     /**
      * Copy all annotations of Java annotated element `jann` over to Scala symbol `sym`.
      * Also creates `@throws` annotations if necessary.
@@ -618,13 +679,22 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
       // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
       val jexTpes = jann match {
-        case jm: jMethod => jm.getExceptionTypes.toList
+        case jm: jMethod              => jm.getExceptionTypes.toList
         case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
-        case _ => Nil
+        case _                        => Nil
       }
       jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
     }
 
+    private implicit class jClassOps(val clazz: jClass[_]) {
+      def javaFlags: JavaAccFlags = JavaAccFlags(clazz)
+      def scalaFlags: Long        = javaFlags.toScalaFlags
+    }
+    private implicit class jMemberOps(val member: jMember) {
+      def javaFlags: JavaAccFlags = JavaAccFlags(member)
+      def scalaFlags: Long        = javaFlags.toScalaFlags
+    }
+
     /**
      * A completer that fills in the types of a Scala class and its companion object
      *  by copying corresponding type info from a Java class. This completer is used
@@ -634,7 +704,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      *  @param   module  The Scala companion object for which info is copied
      *  @param   jclazz  The Java class
      */
-    private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter {
+    private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAgnosticCompleter {
+      // one doesn't need to do non-trivial computations to assign flags for Java-based reflection artifacts
+      // therefore I'm moving flag-assigning logic from completion to construction
+      val flags = jclazz.scalaFlags
+      clazz setFlag (flags | JAVA)
+      if (module != NoSymbol) {
+        module setFlag (flags & PRIVATE | JAVA)
+        module.moduleClass setFlag (flags & PRIVATE | JAVA)
+      }
+      markFlagsCompleted(clazz, module)(mask = AllFlags)
 
       /** used to avoid cycles while initializing classes */
       private var parentsLevel = 0
@@ -644,14 +723,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       override def load(sym: Symbol): Unit = {
         debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
         assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
-        val flags = toScalaClassFlags(jclazz.getModifiers)
-        clazz setFlag (flags | JAVA)
-        if (module != NoSymbol) {
-          module setFlag (flags & PRIVATE | JAVA)
-          module.moduleClass setFlag (flags & PRIVATE | JAVA)
-        }
 
-        relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers))
+        assignAssociatedFile(clazz, module, jclazz)
+        propagatePackageBoundary(jclazz, relatedSymbols: _*)
         copyAnnotations(clazz, jclazz)
         // to do: annotations to set also for module?
 
@@ -665,18 +739,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       override def complete(sym: Symbol): Unit = {
         load(sym)
         completeRest()
+        markAllCompleted(clazz, module)
       }
 
-      def completeRest(): Unit = thisUniverse.synchronized {
+      def completeRest(): Unit = gilSynchronized {
         val tparams = clazz.rawInfo.typeParams
 
         val parents = try {
           parentsLevel += 1
           val jsuperclazz = jclazz.getGenericSuperclass
           val ifaces = jclazz.getGenericInterfaces.toList map typeToScala
-          val isAnnotation = (jclazz.getModifiers & JAVA_ACC_ANNOTATION) != 0
+          val isAnnotation = JavaAccFlags(jclazz).isAnnotation
           if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces
-          else (if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)) :: ifaces
+          else (if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)) :: ifaces
         } finally {
           parentsLevel -= 1
         }
@@ -685,8 +760,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
           module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass)
         }
 
-        def enter(sym: Symbol, mods: Int) =
-          (if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
+        def enter(sym: Symbol, mods: JavaAccFlags) =
+          ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
 
         def enterEmptyCtorIfNecessary(): Unit = {
           if (jclazz.getConstructors.isEmpty)
@@ -698,23 +773,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
                                 // no need to call enter explicitly
         }
 
-        pendingLoadActions = { () =>
-
-          for (jfield <- jclazz.getDeclaredFields)
-            enter(jfieldAsScala(jfield), jfield.getModifiers)
-
-          for (jmeth <- jclazz.getDeclaredMethods)
-            enter(jmethodAsScala(jmeth), jmeth.getModifiers)
-
-          for (jconstr <- jclazz.getConstructors)
-            enter(jconstrAsScala(jconstr), jconstr.getModifiers)
-
+        pendingLoadActions ::= { () =>
+          jclazz.getDeclaredFields  foreach (f => enter(jfieldAsScala(f),  f.javaFlags))
+          jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags))
+          jclazz.getConstructors    foreach (c => enter(jconstrAsScala(c), c.javaFlags))
           enterEmptyCtorIfNecessary()
-
-        } :: pendingLoadActions
+        }
 
         if (parentsLevel == 0) {
-          while (!pendingLoadActions.isEmpty) {
+          while (pendingLoadActions.nonEmpty) {
             val item = pendingLoadActions.head
             pendingLoadActions = pendingLoadActions.tail
             item()
@@ -725,6 +792,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
         override def complete(sym: Symbol) {
           completeRest()
+          markAllCompleted(clazz, module)
         }
       }
     }
@@ -733,8 +801,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      * If Java modifiers `mods` contain STATIC, return the module class
      *  of the companion module of `clazz`, otherwise the class `clazz` itself.
      */
-    private def followStatic(clazz: Symbol, mods: Int) =
-      if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
+    private def followStatic(clazz: Symbol, mods: JavaAccFlags) =
+      if (mods.isStatic) clazz.companionModule.moduleClass else clazz
 
   /** Methods which need to be treated with care
    *  because they either are getSimpleName or call getSimpleName:
@@ -766,39 +834,25 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     /**
      * The Scala owner of the Scala class corresponding to the Java class `jclazz`
      */
-    private def sOwner(jclazz: jClass[_]): Symbol =
-      if (jclazz.isMemberClass) {
-        val jEnclosingClass = jclazz.getEnclosingClass
-        val sEnclosingClass = classToScala(jEnclosingClass)
-        followStatic(sEnclosingClass, jclazz.getModifiers)
-      } else if (jclazz.isLocalClass0) {
-        val jEnclosingMethod = jclazz.getEnclosingMethod
-        if (jEnclosingMethod != null) {
-          methodToScala(jEnclosingMethod)
-        } else {
-          val jEnclosingConstructor = jclazz.getEnclosingConstructor
-          constructorToScala(jEnclosingConstructor)
-        }
-      } else if (jclazz.isPrimitive || jclazz.isArray) {
-        ScalaPackageClass
-      } else if (jclazz.getPackage != null) {
-        val jPackage = jclazz.getPackage
-        packageToScala(jPackage).moduleClass
-      } else {
-        // @eb: a weird classloader might return a null package for something with a non-empty package name
-        // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
-        // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
-        // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
-        // [martin] I think it's better to be forgiving here. Restoring packageNameToScala.
-        val jPackageName = jclazz.getName take jclazz.getName.lastIndexOf('.')
-        packageNameToScala(jPackageName).moduleClass
-      }
+    // @eb: a weird classloader might return a null package for something with a non-empty package name
+    // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
+    // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
+    // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
+    // [martin] I think it's better to be forgiving here. Restoring packageNameToScala.
+    private def sOwner(jclazz: jClass[_]): Symbol = jclazz match {
+      case PrimitiveOrArray()            => ScalaPackageClass
+      case EnclosedInMethod(jowner)      => methodToScala(jowner)
+      case EnclosedInConstructor(jowner) => constructorToScala(jowner)
+      case EnclosedInClass(jowner)       => followStatic(classToScala(jowner), jclazz.javaFlags)
+      case EnclosedInPackage(jowner)     => packageToScala(jowner).moduleClass
+      case _                             => packageNameToScala(jclazz.getName take jclazz.getName.lastIndexOf('.')).moduleClass
+    }
 
     /**
      * The Scala owner of the Scala symbol corresponding to the Java member `jmember`
      */
     private def sOwner(jmember: jMember): Symbol = {
-      followStatic(classToScala(jmember.getDeclaringClass), jmember.getModifiers)
+      followStatic(classToScala(jmember.getDeclaringClass), jmember.javaFlags)
     }
 
     /**
@@ -814,9 +868,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      *  that start with the given name are searched instead.
      */
     private def lookup(clazz: Symbol, jname: String): Symbol = {
-      def approximateMatch(sym: Symbol, jstr: String): Boolean =
-        (sym.name.toString == jstr) ||
-        sym.isPrivate && nme.expandedName(sym.name.toTermName, sym.owner).toString == jstr
+      def approximateMatch(sym: Symbol, jstr: String): Boolean = (
+           (sym.name string_== jstr)
+        || sym.isPrivate && (nme.expandedName(sym.name.toTermName, sym.owner) string_== jstr)
+      )
 
       clazz.info.decl(newTermName(jname)) orElse {
         (clazz.info.decls.iterator filter (approximateMatch(_, jname))).toList match {
@@ -838,7 +893,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     private def methodToScala1(jmeth: jMethod): MethodSymbol = {
       val jOwner = jmeth.getDeclaringClass
       val preOwner = classToScala(jOwner)
-      val owner = followStatic(preOwner, jmeth.getModifiers)
+      val owner = followStatic(preOwner, jmeth.javaFlags)
       (lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth))
         .asMethod
     }
@@ -852,26 +907,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       toScala(constructorCache, jconstr)(_ constructorToScala1 _)
 
     private def constructorToScala1(jconstr: jConstructor[_]): MethodSymbol = {
-      val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.getModifiers)
+      val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.javaFlags)
       (lookup(owner, jconstr.getName) suchThat (erasesTo(_, jconstr)) orElse jconstrAsScala(jconstr))
         .asMethod
     }
 
     /**
-     * The Scala field corresponding to given Java field.
-     *  @param  jfield  The Java field
-     *  @return A Scala field object that corresponds to `jfield`.
-     *  // ??? should we return the getter instead?
-     */
-    def fieldToScala(jfield: jField): TermSymbol =
-      toScala(fieldCache, jfield)(_ fieldToScala1 _)
-
-    private def fieldToScala1(jfield: jField): TermSymbol = {
-      val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers)
-      (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm
-    }
-
-    /**
      * The Scala package corresponding to given Java package
      */
     def packageToScala(jpkg: jPackage): ModuleSymbol = packageCache.toScala(jpkg) {
@@ -893,19 +934,20 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      * The Scala package with given fully qualified name. Unlike `packageNameToScala`,
      *  this one bypasses the cache.
      */
-    private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = {
+    private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = gilSynchronized {
       val split = fullname lastIndexOf '.'
       val ownerModule: ModuleSymbol =
         if (split > 0) packageNameToScala(fullname take split) else this.RootPackage
       val owner = ownerModule.moduleClass
       val name = (fullname: TermName) drop split + 1
       val opkg = owner.info decl name
-      if (opkg.isPackage)
+      if (opkg.hasPackageFlag)
         opkg.asModule
       else if (opkg == NoSymbol) {
         val pkg = owner.newPackage(name)
         pkg.moduleClass setInfo new LazyPackageType
         pkg setInfoAndEnter pkg.moduleClass.tpe
+        markFlagsCompleted(pkg)(mask = AllFlags)
         info("made Scala "+pkg)
         pkg
       } else
@@ -974,8 +1016,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
             javaTypeToValueClass(jclazz) orElse lookupClass
 
         assert (cls.isType,
-          sm"""${if (cls == NoSymbol) "not a type: symbol" else "no symbol could be"}
-              | loaded from $jclazz in $owner with name $simpleName and classloader $classLoader""")
+          (if (cls != NoSymbol) s"not a type: symbol $cls" else "no symbol could be") +
+          s" loaded from $jclazz in $owner with name $simpleName and classloader $classLoader")
 
         cls.asClass
       }
@@ -992,7 +1034,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     private def typeParamToScala1(jparam: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
       val owner = genericDeclarationToScala(jparam.getGenericDeclaration)
       owner.info match {
-        case PolyType(tparams, _) => tparams.find(_.name.toString == jparam.getName).get.asType
+        case PolyType(tparams, _) => tparams.find(_.name string_== jparam.getName).get.asType
       }
     }
 
@@ -1004,6 +1046,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       case jmeth: jMethod           => methodToScala(jmeth)
       case jconstr: jConstructor[_] => constructorToScala(jconstr)
     }
+    def reflectMemberToScala(m: jMember): Symbol = m match {
+      case x: GenericDeclaration => genericDeclarationToScala(x)
+      case x: jField             => jfieldAsScala(x)
+    }
 
     /**
      * Given some Java type arguments, a corresponding list of Scala types, plus potentially
@@ -1056,18 +1102,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      *  @param jclazz  The Java class
      *  @return A Scala class symbol that wraps all reflection info of `jclazz`
      */
-    private def jclassAsScala(jclazz: jClass[_]): Symbol = {
-      val clazz = sOwner(jclazz) // sOwner called outside of closure for binary compatibility
-      toScala(classCache, jclazz){ (mirror, jclazz) =>
-        mirror.jclassAsScala(jclazz, clazz)
-      }
-    }
+    private def jclassAsScala(jclazz: jClass[_]): ClassSymbol =
+      toScala(classCache, jclazz)(_ jclassAsScala1 _)
+
+    private def jclassAsScala1(jclazz: jClass[_]): ClassSymbol = {
+      val owner = sOwner(jclazz)
+      val name = scalaSimpleName(jclazz)
+      val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
 
-    private def jclassAsScala(jclazz: jClass[_], owner: Symbol): ClassSymbol = {
-      val name       = scalaSimpleName(jclazz)
-      val completer  = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
-      val (clazz, _) = createClassModule(owner, name, completer)
-      clazz
+      initAndEnterClassAndModule(owner, name, completer)._1
     }
 
     /**
@@ -1081,11 +1124,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
 
     private def jfieldAsScala1(jfield: jField): TermSymbol = {
       val field = sOwner(jfield)
-          .newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
+          .newValue(newTermName(jfield.getName), NoPosition, jfield.scalaFlags)
           .setInfo(typeToScala(jfield.getGenericType))
-      fieldCache enter (jfield, field)
-      importPrivateWithinFromJavaFlags(field, jfield.getModifiers)
+
+      fieldCache.enter(jfield, field)
+      propagatePackageBoundary(jfield, field)
       copyAnnotations(field, jfield)
+      markAllCompleted(field)
       field
     }
 
@@ -1104,15 +1149,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
 
     private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = {
       val clazz = sOwner(jmeth)
-      val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers))
+      val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags)
       methodCache enter (jmeth, meth)
       val tparams = jmeth.getTypeParameters.toList map createTypeParameter
       val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
       val resulttpe = typeToScala(jmeth.getGenericReturnType)
       setMethType(meth, tparams, paramtpes, resulttpe)
-      importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers)
+      propagatePackageBoundary(jmeth.javaFlags, meth)
       copyAnnotations(meth, jmeth)
-      if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info))
+      if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated
+      markAllCompleted(meth)
       meth
     }
 
@@ -1128,26 +1174,20 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     private def jconstrAsScala1(jconstr: jConstructor[_]): MethodSymbol = {
       // [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out.
       val clazz = sOwner(jconstr)
-      val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers))
+      val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags)
       constructorCache enter (jconstr, constr)
       val tparams = jconstr.getTypeParameters.toList map createTypeParameter
       val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
-      setMethType(constr, tparams, paramtpes, clazz.tpe)
+      setMethType(constr, tparams, paramtpes, clazz.tpe_*)
       constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
-      importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
+      propagatePackageBoundary(jconstr.javaFlags, constr)
       copyAnnotations(constr, jconstr)
+      markAllCompleted(constr)
       constr
     }
 
 // -------------------- Scala to Java  -----------------------------------
 
-    /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists.
-     *  @param   pkg The Scala package
-     */
-    def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) {
-      Option(jPackage.getPackage(pkg.fullName.toString))
-    }
-
     /** The Java class corresponding to given Scala class.
      *  Note: This only works for
      *   - top-level classes
@@ -1163,16 +1203,18 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
         valueClassToJavaType(clazz)
       else if (clazz == ArrayClass)
         noClass
-      else if (clazz.owner.isPackageClass)
+      else if (clazz.isTopLevel)
         javaClass(clazz.javaClassName)
       else if (clazz.owner.isClass) {
-        val childOfClass = !clazz.owner.isModuleClass
-        val childOfTopLevel = clazz.owner.owner.isPackageClass
+        val childOfClass          = !clazz.owner.isModuleClass
+        val childOfTopLevel       = clazz.owner.isTopLevel
         val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel
 
         // suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759
         var ownerClazz = classToJava(clazz.owner.asClass)
-        if (childOfTopLevelObject) ownerClazz = Class.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+        if (childOfTopLevelObject)
+          ownerClazz = jClass.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+
         val ownerChildren = ownerClazz.getDeclaredClasses
 
         var fullNameOfJavaClass = ownerClazz.getName
@@ -1205,11 +1247,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       else sym.name.toString
 
     /** The Java field corresponding to a given Scala field.
-     *  @param   meth The Scala field.
+     *  @param   fld The Scala field.
      */
     def fieldToJava(fld: TermSymbol): jField = fieldCache.toJava(fld) {
       val jclazz = classToJava(fld.owner.asClass)
-      val jname = nme.dropLocalSuffix(fld.name).toString
+      val jname = fld.name.dropLocal.toString
       try jclazz getDeclaredField jname
       catch {
         case ex: NoSuchFieldException => jclazz getDeclaredField expandedName(fld)
@@ -1222,7 +1264,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     def methodToJava(meth: MethodSymbol): jMethod = methodCache.toJava(meth) {
       val jclazz = classToJava(meth.owner.asClass)
       val paramClasses = transformedType(meth).paramTypes map typeToJavaClass
-      val jname = nme.dropLocalSuffix(meth.name).toString
+      val jname = meth.name.dropLocal.toString
       try jclazz getDeclaredMethod (jname, paramClasses: _*)
       catch {
         case ex: NoSuchMethodException =>
@@ -1239,7 +1281,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
       val effectiveParamClasses =
         if (!constr.owner.owner.isStaticOwner) jclazz.getEnclosingClass +: paramClasses
         else paramClasses
-      jclazz getConstructor (effectiveParamClasses: _*)
+      jclazz getDeclaredConstructor (effectiveParamClasses: _*)
     }
 
     private def jArrayClass(elemClazz: jClass[_]): jClass[_] = {
@@ -1250,12 +1292,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
      *  Pre: Scala type is already transformed to Java level.
      */
     def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
-      case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
-      case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
-      case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
+      case ExistentialType(_, rtpe)                  => typeToJavaClass(rtpe)
+      case TypeRef(_, ArrayClass, List(elemtpe))     => jArrayClass(typeToJavaClass(elemtpe))
+      case TypeRef(_, sym: ClassSymbol, _)           => classToJava(sym.asClass)
       case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
-      case SingleType(_, sym: ModuleSymbol) => classToJava(sym.moduleClass.asClass)
-      case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
+      case SingleType(_, sym: ModuleSymbol)          => classToJava(sym.moduleClass.asClass)
+      case _                                         => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
     }
   }
 
@@ -1274,11 +1316,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
     case _ => abort(s"${sym}.enclosingRootClass = ${sym.enclosingRootClass}, which is not a RootSymbol")
   }
 
-  private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
-    def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
-    Map() ++ (definitions.syntheticCoreClasses map mapEntry)
-  }
-
   /** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
    *  <owner>.<name>, otherwise return NoSymbol.
    *  Exception: If owner is root and a java class with given name exists, create symbol in empty package instead
@@ -1288,22 +1325,20 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
   override def missingHook(owner: Symbol, name: Name): Symbol = {
     if (owner.hasPackageFlag) {
       val mirror = mirrorThatLoaded(owner)
-      // todo. this makes toolbox tests pass, but it's a mere workaround for SI-5865
-//      assert((owner.info decl name) == NoSymbol, s"already exists: $owner . $name")
       if (owner.isRootSymbol && mirror.tryJavaClass(name.toString).isDefined)
         return mirror.EmptyPackageClass.info decl name
       if (name.isTermName && !owner.isEmptyPackageClass)
         return mirror.makeScalaPackage(
           if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
-      syntheticCoreClasses get (owner.fullName, name) match {
-        case Some(tsym) =>
-          // synthetic core classes are only present in root mirrors
-          // because Definitions.scala, which initializes and enters them, only affects rootMirror
-          // therefore we need to enter them manually for non-root mirrors
-          if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
-          return tsym
-        case None =>
-      }
+      if (name == tpnme.AnyRef && owner.owner.isRoot && owner.name == tpnme.scala_)
+        // when we synthesize the scala.AnyRef symbol, we need to add it to the scope of the scala package
+        // the problem is that adding to the scope implies doing something like `owner.info.decls enter anyRef`
+        // which entails running a completer for the scala package
+        // which will try to unpickle the stuff in scala/package.class
+        // which will transitively load scala.AnyRef
+        // which doesn't exist yet, because it hasn't been added to the scope yet
+        // this missing hook ties the knot without introducing synchronization problems like before
+        return definitions.AnyRefClass
     }
     info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
     super.missingHook(owner, name)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 1b69ca4..b544669 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -1,7 +1,11 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
+import scala.reflect.internal.{TreeInfo, SomePhase}
+import scala.reflect.internal.{SymbolTable => InternalSymbolTable}
+import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable}
+import scala.reflect.api.{TreeCreator, TypeCreator, Universe}
 
 /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
  *
@@ -9,20 +13,125 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen}
  *
  *  @contentDiagram hideNodes "*Api" "*Extractor"
  */
-class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
+class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with ReflectSetup with RuntimeSymbolTable { self =>
 
+  override def inform(msg: String): Unit = log(msg)
   def picklerPhase = SomePhase
-
+  def erasurePhase = SomePhase
   lazy val settings = new Settings
-  def forInteractive = false
-  def forScaladoc = false
+  private val isLogging = sys.props contains "scala.debug.reflect"
 
-  def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg)
+  def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
 
   type TreeCopier = InternalTreeCopierOps
+  implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier])
   def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
   def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
 
+  def currentFreshNameCreator = globalFreshNameCreator
+
+  override lazy val internal: Internal = new SymbolTableInternal {
+    override def typeTagToManifest[T: ClassTag](mirror0: Any, tag: Universe # TypeTag[T]): Manifest[T] = {
+      // SI-6239: make this conversion more precise
+      val mirror = mirror0.asInstanceOf[Mirror]
+      val runtimeClass = mirror.runtimeClass(tag.in(mirror).tpe)
+      Manifest.classType(runtimeClass).asInstanceOf[Manifest[T]]
+    }
+    override def manifestToTypeTag[T](mirror0: Any, manifest: Manifest[T]): Universe # TypeTag[T] =
+      TypeTag(mirror0.asInstanceOf[Mirror], new TypeCreator {
+        def apply[U <: Universe with Singleton](mirror: scala.reflect.api.Mirror[U]): U # Type = {
+          mirror.universe match {
+            case ju: JavaUniverse =>
+              val jm = mirror.asInstanceOf[ju.Mirror]
+              val sym = jm.classSymbol(manifest.runtimeClass)
+              val tpe =
+                if (manifest.typeArguments.isEmpty) sym.toType
+                else {
+                  val tags = manifest.typeArguments map (targ => ju.internal.manifestToTypeTag(jm, targ))
+                  ju.appliedType(sym.toTypeConstructor, tags map (_.in(jm).tpe))
+                }
+              tpe.asInstanceOf[U # Type]
+            case u =>
+              u.internal.manifestToTypeTag(mirror.asInstanceOf[u.Mirror], manifest).in(mirror).tpe
+          }
+        }
+      })
+  }
+
+  // can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
+  object treeInfo extends {
+    val global: JavaUniverse.this.type = JavaUniverse.this
+  } with TreeInfo
+
   init()
-}
 
+  // ======= Initialization of runtime reflection =======
+  //
+  // This doc describes the carefully laid out sequence of actions used to initialize reflective universes.
+  //
+  // Before reading the text below, read up the section Mirrors in the reflection pre-SIP
+  // https://docs.google.com/document/d/1nAwSw4TmMplsIlzh2shYLUJ5mVh3wndDa1Zm1H6an9A/edit.
+  // Take an especially good look at Figure 2, because it illustrates fundamental principles underlying runtime reflection:
+  //   1) For each universe we have one mirror per classloader
+  //   2) Package symbols are per-mirror
+  //   3) Other symbols are per-universe, which means that a symbol (e.g. Seq on the picture) might be shared between multiple owners
+  //
+  // Main challenges that runtime reflection presents wrt initialization are:
+  //   1) Extravagant completion scheme that enters package members on-demand rather than a result of scanning a directory with class files.
+  //      (That's a direct consequence of the fact that in general case we can't enumerate all classes in a classloader.
+  //      As Paul rightfully mentioned, we could specialcase classloaders that point to filesystems, but that is left for future work).
+  //   2) Presence of synthetic symbols that aren't loaded by normal means (from classfiles) but are synthesized on-the-fly,
+  //      and the necessity to propagate these synthetic symbols from rootMirror to other mirrors,
+  //      complicated by the fact that such symbols depend on normal symbols (e.g. AnyRef depends on Object).
+  //   3) Necessity to remain thread-safe, which limits our options related to lazy initialization
+  //      (E.g. we cannot use missingHook to enter synthetic symbols, because that's thread-unsafe).
+  //
+  // Directly addressing the challenge #3, we create all synthetic symbols fully in advance during init().
+  // However, it's not that simple as just calling definitions.symbolsNotPresentInBytecode.
+  // Before doing that, we need to first initialize ObjectClass, then ScalaPackageClass, and only then deal with synthetics.
+  // Below you can find a detailed explanation for that.
+  //
+  // ### Why ScalaPackageClass? ###
+  //
+  // Forcing ScalaPackageClass first thing during startup is important, because syntheticCoreClasses such as AnyRefClass
+  // need to be entered into ScalaPackageClass, which entails calling ScalaPackageClass.info.decls.enter.
+  // If ScalaPackageClass isn't initialized by that moment, the following will happen for runtime reflection:
+  //   1) Initialization of ScalaPackageClass will trigger unpickling.
+  //   2) Unpickling will need to load some auxiliary types such as, for example, String.
+  //   3) To load String, runtime reflection will call mirrorDefining(classOf[String]).
+  //   4) This, in turn, will call runtimeMirror(classOf[String].getClassLoader).
+  //   5) For some classloader configurations, the resulting mirror will be different from rootMirror.
+  //   6) In that case, initialization of the resulting mirror will try to import definitions.syntheticCoreClasses into the mirror.
+  //   7) This will force all the lazy vals corresponding to syntheticCoreClasses.
+  //   8) By that time, the completer of ScalaPackageClass will have already called setInfo on ScalaPackageClass, so there won't be any stack overflow.
+  //
+  // So far so good, no crashes, no problems, right? Not quite.
+  // If forcing of ScalaPackageClass was called by a syntheticCoreClasses lazy val,
+  // then this lazy val will be entered twice: once during step 7 and once when returning from the original call.
+  // To avoid this we need to initialize ScalaPackageClass prior to other synthetics.
+  //
+  // ### Why ObjectClass? ###
+  //
+  // 1) As explained in JavaMirrors.missingHook, initialization of ScalaPackageClass critically depends on AnyRefClass.
+  // 2) AnyRefClass is defined as "lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)",
+  //    which means that initialization of AnyRefClass depends on ObjectClass.
+  // 3) ObjectClass is defined as "lazy val ObjectClass = getRequiredClass(sn.Object.toString)",
+  //    which means that under some classloader configurations (see JavaMirrors.missingHook for more details)
+  //    dereferencing ObjectClass might trigger an avalanche of initializations calling back into AnyRefClass
+  //    while another AnyRefClass initializer is still on stack.
+  // 4) That will lead to AnyRefClass being entered two times (once when the recursive call returns and once when the original one returns)
+  // 5) That will crash PackageScope.enter that helpfully detects double-enters.
+  //
+  // Therefore, before initializing ScalaPackageClass, we must pre-initialize ObjectClass
+  def init() {
+    definitions.init()
+
+    // workaround for http://groups.google.com/group/scala-internals/browse_thread/thread/97840ba4fd37b52e
+    // constructors are by definition single-threaded, so we initialize all lazy vals (and local object) in advance
+    // in order to avoid deadlocks later (e.g. one thread holds a global reflection lock and waits for definitions.Something to initialize,
+    // whereas another thread holds a definitions.Something initialization lock and needs a global reflection lock to complete the initialization)
+
+    // TODO Convert this into a macro
+    force()
+  }
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
new file mode 100644
index 0000000..dcd262c
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -0,0 +1,448 @@
+// Generated Code, validated by run/t6240-universe-code-gen.scala
+package scala.reflect
+package runtime
+
+trait JavaUniverseForce { self: runtime.JavaUniverse  =>
+  def force() {
+    Literal(Constant(42)).duplicate
+    nme.flattenedName()
+    nme.raw
+    WeakTypeTag
+    TypeTag
+    TypeTag.Byte.tpe
+    TypeTag.Short.tpe
+    TypeTag.Char.tpe
+    TypeTag.Int.tpe
+    TypeTag.Long.tpe
+    TypeTag.Float.tpe
+    TypeTag.Double.tpe
+    TypeTag.Boolean.tpe
+    TypeTag.Unit.tpe
+    TypeTag.Any.tpe
+    TypeTag.AnyVal.tpe
+    TypeTag.AnyRef.tpe
+    TypeTag.Object.tpe
+    TypeTag.Nothing.tpe
+    TypeTag.Null.tpe
+
+    this.settings
+    this.internal
+    this.treeInfo
+    this.rootMirror
+    this.traceSymbols
+    this.perRunCaches
+    this.compat
+    this.treeBuild
+    this.FreshNameExtractor
+    this.FixedMirrorTreeCreator
+    this.FixedMirrorTypeCreator
+    this.CompoundTypeTreeOriginalAttachment
+    this.BackquotedIdentifierAttachment
+    this.ForAttachment
+    this.SyntheticUnitAttachment
+    this.SubpatternsAttachment
+    this.noPrint
+    this.typeDebug
+    this.Range
+    // inaccessible: this.posAssigner
+    this.ConsoleWriter
+    this.RefTree
+    this.PackageDef
+    this.ClassDef
+    this.ModuleDef
+    this.ValOrDefDef
+    this.ValDef
+    this.DefDef
+    this.TypeDef
+    this.LabelDef
+    this.ImportSelector
+    this.Import
+    this.Template
+    this.Block
+    this.CaseDef
+    this.Alternative
+    this.Star
+    this.Bind
+    this.UnApply
+    this.ArrayValue
+    this.Function
+    this.Assign
+    this.AssignOrNamedArg
+    this.If
+    this.Match
+    this.Return
+    this.Try
+    this.Throw
+    this.New
+    this.Typed
+    this.TypeApply
+    this.Apply
+    this.ApplyDynamic
+    this.Super
+    this.This
+    this.Select
+    this.Ident
+    this.ReferenceToBoxed
+    this.Literal
+    this.Annotated
+    this.SingletonTypeTree
+    this.SelectFromTypeTree
+    this.CompoundTypeTree
+    this.AppliedTypeTree
+    this.TypeBoundsTree
+    this.ExistentialTypeTree
+    this.TypeTree
+    this.Modifiers
+    this.EmptyTree
+    this.noSelfType
+    this.pendingSuperCall
+    this.emptyValDef
+    this.EmptyTreeTypeSubstituter
+    this.UnmappableAnnotArg
+    this.LiteralAnnotArg
+    this.ArrayAnnotArg
+    this.NestedAnnotArg
+    this.ScalaSigBytes
+    this.AnnotationInfo
+    this.Annotation
+    this.UnmappableAnnotation
+    this.ErroneousAnnotation
+    this.ThrownException
+    this.typeNames
+    this.tpnme
+    this.fulltpnme
+    this.binarynme
+    this.termNames
+    this.nme
+    this.sn
+    this.Constant
+    this.definitions
+    this.LookupSucceeded
+    this.LookupAmbiguous
+    this.LookupInaccessible
+    this.LookupNotFound
+    this.Scope
+    this.EmptyScope
+    this.Flag
+    this.KindErrors
+    this.Kind
+    this.ProperTypeKind
+    this.TypeConKind
+    this.inferKind
+    this.UnmappableTree
+    this.ErrorType
+    this.WildcardType
+    this.BoundedWildcardType
+    this.NoType
+    this.NoPrefix
+    this.ThisType
+    this.SingleType
+    this.SuperType
+    this.TypeBounds
+    this.CompoundType
+    this.baseClassesCycleMonitor
+    this.RefinedType
+    this.ClassInfoType
+    this.ConstantType
+    this.TypeRef
+    this.MethodType
+    this.NullaryMethodType
+    this.PolyType
+    this.ExistentialType
+    this.OverloadedType
+    this.ImportType
+    this.AntiPolyType
+    this.HasTypeMember
+    this.ArrayTypeRef
+    this.TypeVar
+    this.AnnotatedType
+    this.StaticallyAnnotatedType
+    this.NamedType
+    this.RepeatedType
+    this.ErasedValueType
+    this.GenPolyType
+    this.unwrapToClass
+    this.unwrapToStableClass
+    this.unwrapWrapperTypes
+    this.RecoverableCyclicReference
+    this.TypeConstraint
+    this.normalizeAliases
+    this.dropSingletonType
+    this.abstractTypesToBounds
+    this.dropIllegalStarTypes
+    this.IsDependentCollector
+    this.ApproximateDependentMap
+    this.wildcardToTypeVarMap
+    this.typeVarToOriginMap
+    this.ErroneousCollector
+    this.adaptToNewRunMap
+    this.SubTypePair
+    this.SymbolKind
+    this.NoSymbol
+    this.CyclicReference
+    this.SymbolOps
+    this.TermName
+    this.TypeName
+    this.Liftable
+    this.Unliftable
+    this.BooleanFlag
+    this.WeakTypeTag
+    this.TypeTag
+    this.Expr
+    this.NoMods
+    definitions.JavaLangPackage
+    definitions.JavaLangPackageClass
+    definitions.ScalaPackage
+    definitions.ScalaPackageClass
+    definitions.RuntimePackage
+    definitions.RuntimePackageClass
+    definitions.AnyClass
+    definitions.AnyRefClass
+    definitions.ObjectClass
+    definitions.AnyRefTpe
+    definitions.AnyTpe
+    definitions.AnyValTpe
+    definitions.BoxedUnitTpe
+    definitions.NothingTpe
+    definitions.NullTpe
+    definitions.ObjectTpe
+    definitions.SerializableTpe
+    definitions.StringTpe
+    definitions.ThrowableTpe
+    definitions.ConstantTrue
+    definitions.ConstantFalse
+    definitions.ConstantNull
+    definitions.AnyValClass
+    definitions.RuntimeNothingClass
+    definitions.RuntimeNullClass
+    definitions.NothingClass
+    definitions.NullClass
+    definitions.ClassCastExceptionClass
+    definitions.IndexOutOfBoundsExceptionClass
+    definitions.InvocationTargetExceptionClass
+    definitions.MatchErrorClass
+    definitions.NonLocalReturnControlClass
+    definitions.NullPointerExceptionClass
+    definitions.ThrowableClass
+    definitions.UninitializedErrorClass
+    definitions.UninitializedFieldConstructor
+    definitions.PartialFunctionClass
+    definitions.AbstractPartialFunctionClass
+    definitions.SymbolClass
+    definitions.StringClass
+    definitions.StringModule
+    definitions.ClassClass
+    definitions.DynamicClass
+    definitions.SysPackage
+    definitions.UnqualifiedModules
+    definitions.UnqualifiedOwners
+    definitions.PredefModule
+    definitions.SpecializableModule
+    definitions.ScalaRunTimeModule
+    definitions.SymbolModule
+    definitions.StringAddClass
+    definitions.ScalaNumberClass
+    definitions.TraitSetterAnnotationClass
+    definitions.DelayedInitClass
+    definitions.TypeConstraintClass
+    definitions.SingletonClass
+    definitions.SerializableClass
+    definitions.JavaSerializableClass
+    definitions.ComparableClass
+    definitions.JavaCloneableClass
+    definitions.JavaNumberClass
+    definitions.JavaEnumClass
+    definitions.RemoteInterfaceClass
+    definitions.RemoteExceptionClass
+    definitions.ByNameParamClass
+    definitions.JavaRepeatedParamClass
+    definitions.RepeatedParamClass
+    definitions.ConsClass
+    definitions.IteratorClass
+    definitions.IterableClass
+    definitions.ListClass
+    definitions.SeqClass
+    definitions.StringBuilderClass
+    definitions.TraversableClass
+    definitions.ListModule
+    definitions.NilModule
+    definitions.SeqModule
+    definitions.ArrayModule
+    definitions.ArrayModule_overloadedApply
+    definitions.ArrayClass
+    definitions.Array_apply
+    definitions.Array_update
+    definitions.Array_length
+    definitions.Array_clone
+    definitions.SoftReferenceClass
+    definitions.MethodClass
+    definitions.EmptyMethodCacheClass
+    definitions.MethodCacheClass
+    definitions.ScalaXmlTopScope
+    definitions.ScalaXmlPackage
+    definitions.ReflectPackage
+    definitions.ReflectApiPackage
+    definitions.ReflectRuntimePackage
+    definitions.UniverseClass
+    definitions.PartialManifestModule
+    definitions.FullManifestClass
+    definitions.FullManifestModule
+    definitions.OptManifestClass
+    definitions.NoManifest
+    definitions.TreesClass
+    definitions.ExprsClass
+    definitions.ClassTagModule
+    definitions.ClassTagClass
+    definitions.TypeTagsClass
+    definitions.ApiUniverseClass
+    definitions.JavaUniverseClass
+    definitions.MirrorClass
+    definitions.TypeCreatorClass
+    definitions.TreeCreatorClass
+    definitions.BlackboxContextClass
+    definitions.WhiteboxContextClass
+    definitions.MacroImplAnnotation
+    definitions.StringContextClass
+    definitions.QuasiquoteClass
+    definitions.QuasiquoteClass_api
+    definitions.QuasiquoteClass_api_apply
+    definitions.QuasiquoteClass_api_unapply
+    definitions.ScalaSignatureAnnotation
+    definitions.ScalaLongSignatureAnnotation
+    definitions.OptionClass
+    definitions.OptionModule
+    definitions.SomeClass
+    definitions.NoneModule
+    definitions.SomeModule
+    definitions.VarArityClass
+    definitions.ProductClass
+    definitions.TupleClass
+    definitions.FunctionClass
+    definitions.AbstractFunctionClass
+    definitions.MacroContextType
+    definitions.ProductRootClass
+    definitions.Any_$eq$eq
+    definitions.Any_$bang$eq
+    definitions.Any_equals
+    definitions.Any_hashCode
+    definitions.Any_toString
+    definitions.Any_$hash$hash
+    definitions.Any_getClass
+    definitions.Any_isInstanceOf
+    definitions.Any_asInstanceOf
+    definitions.primitiveGetClassMethods
+    definitions.getClassMethods
+    definitions.Object_$hash$hash
+    definitions.Object_$eq$eq
+    definitions.Object_$bang$eq
+    definitions.Object_eq
+    definitions.Object_ne
+    definitions.Object_isInstanceOf
+    definitions.Object_asInstanceOf
+    definitions.Object_synchronized
+    definitions.String_$plus
+    definitions.ObjectRefClass
+    definitions.VolatileObjectRefClass
+    definitions.RuntimeStaticsModule
+    definitions.BoxesRunTimeModule
+    definitions.BoxesRunTimeClass
+    definitions.BoxedNumberClass
+    definitions.BoxedCharacterClass
+    definitions.BoxedBooleanClass
+    definitions.BoxedByteClass
+    definitions.BoxedShortClass
+    definitions.BoxedIntClass
+    definitions.BoxedLongClass
+    definitions.BoxedFloatClass
+    definitions.BoxedDoubleClass
+    definitions.BoxedUnitClass
+    definitions.BoxedUnitModule
+    definitions.AnnotationClass
+    definitions.ClassfileAnnotationClass
+    definitions.StaticAnnotationClass
+    definitions.BridgeClass
+    definitions.ElidableMethodClass
+    definitions.ImplicitNotFoundClass
+    definitions.MigrationAnnotationClass
+    definitions.ScalaStrictFPAttr
+    definitions.SwitchClass
+    definitions.TailrecClass
+    definitions.VarargsClass
+    definitions.uncheckedStableClass
+    definitions.uncheckedVarianceClass
+    definitions.BeanPropertyAttr
+    definitions.BooleanBeanPropertyAttr
+    definitions.CompileTimeOnlyAttr
+    definitions.DeprecatedAttr
+    definitions.DeprecatedNameAttr
+    definitions.DeprecatedInheritanceAttr
+    definitions.DeprecatedOverridingAttr
+    definitions.NativeAttr
+    definitions.RemoteAttr
+    definitions.ScalaInlineClass
+    definitions.ScalaNoInlineClass
+    definitions.SerialVersionUIDAttr
+    definitions.SerialVersionUIDAnnotation
+    definitions.SpecializedClass
+    definitions.ThrowsClass
+    definitions.TransientAttr
+    definitions.UncheckedClass
+    definitions.UncheckedBoundsClass
+    definitions.UnspecializedClass
+    definitions.VolatileAttr
+    definitions.BeanGetterTargetClass
+    definitions.BeanSetterTargetClass
+    definitions.FieldTargetClass
+    definitions.GetterTargetClass
+    definitions.ParamTargetClass
+    definitions.SetterTargetClass
+    definitions.ObjectTargetClass
+    definitions.ClassTargetClass
+    definitions.MethodTargetClass
+    definitions.LanguageFeatureAnnot
+    definitions.languageFeatureModule
+    definitions.metaAnnotations
+    definitions.AnnotationDefaultAttr
+    definitions.isPhantomClass
+    definitions.syntheticCoreClasses
+    definitions.syntheticCoreMethods
+    definitions.hijackedCoreClasses
+    definitions.symbolsNotPresentInBytecode
+    definitions.isPossibleSyntheticParent
+    definitions.abbrvTag
+    definitions.numericWeight
+    definitions.boxedModule
+    definitions.boxedClass
+    definitions.refClass
+    definitions.volatileRefClass
+    definitions.UnitClass
+    definitions.ByteClass
+    definitions.ShortClass
+    definitions.CharClass
+    definitions.IntClass
+    definitions.LongClass
+    definitions.FloatClass
+    definitions.DoubleClass
+    definitions.BooleanClass
+    definitions.UnitTpe
+    definitions.ByteTpe
+    definitions.ShortTpe
+    definitions.CharTpe
+    definitions.IntTpe
+    definitions.LongTpe
+    definitions.FloatTpe
+    definitions.DoubleTpe
+    definitions.BooleanTpe
+    definitions.ScalaNumericValueClasses
+    definitions.ScalaValueClassesNoUnit
+    definitions.ScalaValueClasses
+
+
+    erasure.GenericArray
+    erasure.scalaErasure
+    erasure.specialScalaErasure
+    erasure.javaErasure
+    erasure.verifiedJavaErasure
+    erasure.boxingErasure
+  }
+}
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
index 6e28fc8..6a364ff 100644
--- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -1,7 +1,8 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
+import internal.{SomePhase, NoPhase, Phase}
 
 /** A helper trait to initialize things that need to be set before JavaMirrors and other
  *  reflect specific traits are initialized */
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index ffed3cc..a4bd698 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -3,14 +3,18 @@
  * @author  Paul Phillips
  */
 
-package scala.reflect.runtime
+package scala
+package reflect.runtime
 
 import java.lang.{Class => jClass}
 import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException }
+import scala.reflect.internal.util.AbstractFileClassLoader
+import scala.reflect.io._
+import java.io.{File => JFile}
 
 /** A few java-reflection oriented utility functions useful during reflection bootstrapping.
  */
-private[scala] object ReflectionUtils {
+object ReflectionUtils {
   // Unwraps some chained exceptions which arise during reflective calls.
   def unwrapThrowable(x: Throwable): Throwable = x match {
     case  _: InvocationTargetException |      // thrown by reflectively invoked method or constructor
@@ -33,8 +37,8 @@ private[scala] object ReflectionUtils {
 
     def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
       if (clazz == null) return false
-      if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
-      return isAbstractFileClassLoader(clazz.getSuperclass)
+      if (clazz == classOf[AbstractFileClassLoader]) return true
+      isAbstractFileClassLoader(clazz.getSuperclass)
     }
     def inferClasspath(cl: ClassLoader): String = cl match {
       case cl: java.net.URLClassLoader =>
@@ -82,4 +86,87 @@ private[scala] object ReflectionUtils {
   def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
 
   def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
+
+  object PrimitiveOrArray {
+    def unapply(jclazz: jClass[_]) = jclazz.isPrimitive || jclazz.isArray
+  }
+
+  class EnclosedIn[T](enclosure: jClass[_] => T) {
+    def unapply(jclazz: jClass[_]): Option[T] = if (enclosure(jclazz) != null) Some(enclosure(jclazz)) else None
+  }
+
+  object EnclosedInMethod extends EnclosedIn(_.getEnclosingMethod)
+  object EnclosedInConstructor extends EnclosedIn(_.getEnclosingConstructor)
+  object EnclosedInClass extends EnclosedIn(_.getEnclosingClass)
+  object EnclosedInPackage extends EnclosedIn(_.getPackage)
+
+  def associatedFile(clazz: Class[_]): AbstractFile = {
+    // TODO: I agree with Jason - this implementation isn't something that we'd like to support
+    // therefore I'm having it commented out and this function will now return NoAbstractFile
+    // I think we can keep the source code though, because it can be useful to the others
+    //
+    // def inferAssociatedFile(clazz: Class[_]): AbstractFile = {
+    //   // http://stackoverflow.com/questions/227486/find-where-java-class-is-loaded-from
+    //   try {
+    //     var cl = clazz.getClassLoader()
+    //     if (cl == null) {
+    //       cl = ClassLoader.getSystemClassLoader()
+    //       while (cl != null && cl.getParent != null) cl = cl.getParent
+    //     }
+    //     var result: AbstractFile = null
+    //     if (cl != null) {
+    //       val name = clazz.getCanonicalName()
+    //       val resource = cl.getResource(name.replace(".", "/") + ".class")
+    //       if (resource != null) {
+    //         def fromFile(file: String) = AbstractFile.getFile(file)
+    //         def fromJarEntry(jarfile: String, entrypath: String) = {
+    //           val jar = fromFile(jarfile)
+    //           new VirtualFile(clazz.getName, entrypath) {
+    //             lazy val impl: AbstractFile = {
+    //               def loop(root: AbstractFile, path: List[String]): AbstractFile = {
+    //                 def find(name: String) = root.iterator.find(_.name == name).getOrElse(NoAbstractFile)
+    //                 path match {
+    //                   case step :: Nil => find(step)
+    //                   case step :: rest => loop(find(step), rest)
+    //                   case Nil => NoAbstractFile
+    //                 }
+    //               }
+    //               loop(ZipArchive.fromFile(new JFile(jarfile)), entrypath.split("/").toList)
+    //             }
+    //             override def container        = impl.container
+    //             override def lastModified     = impl.lastModified
+    //             override def input            = impl.input
+    //             override def sizeOption       = impl.sizeOption
+    //             override def underlyingSource = Some(jar)
+    //             override def toString         = jarfile + "(" + entrypath + ")"
+    //           }
+    //         }
+    //         def fallback() = new VirtualFile(clazz.getName, resource.toString)
+    //         result = resource.getProtocol match {
+    //           case "file" =>
+    //             fromFile(resource.getFile)
+    //           case "jar" =>
+    //             val intrajarUrl = new java.net.URL(resource.getFile)
+    //             intrajarUrl.getProtocol match {
+    //               case "file" =>
+    //                 val file = intrajarUrl.getFile()
+    //                 val expectedSuffix = "!/" + name.replace(".", "/") + ".class"
+    //                 if (file.endsWith(expectedSuffix)) fromJarEntry(file.stripSuffix(expectedSuffix), expectedSuffix.substring(2))
+    //                 else fallback()
+    //               case _ => fallback()
+    //             }
+    //           case _ =>
+    //             fallback()
+    //         }
+    //       }
+    //     }
+    //     if (result != null) result else NoAbstractFile
+    //   } catch {
+    //     case _: Exception => NoAbstractFile
+    //   }
+    // }
+    // inferAssociatedFile(clazz)
+    NoAbstractFile
+  }
 }
+
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 0e0cf3f..27d574b 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import scala.reflect.internal.settings.MutableSettings
@@ -32,12 +33,15 @@ private[reflect] class Settings extends MutableSettings {
   val Xexperimental     = new BooleanSetting(false)
   val XfullLubs         = new BooleanSetting(false)
   val XnoPatmatAnalysis = new BooleanSetting(false)
-  val XoldPatmat        = new BooleanSetting(false)
+  val strictInference   = new BooleanSetting(false)
   val Xprintpos         = new BooleanSetting(false)
-  val Ynotnull          = new BooleanSetting(false)
+  val Yposdebug         = new BooleanSetting(false)
+  val Yrangepos         = new BooleanSetting(false)
+  val Yshowsymowners    = new BooleanSetting(false)
   val Yshowsymkinds     = new BooleanSetting(false)
+  val breakCycles       = new BooleanSetting(false)
   val debug             = new BooleanSetting(false)
-  val deepCloning       = new BooleanSetting(false)
+  val developer         = new BooleanSetting(false)
   val explaintypes      = new BooleanSetting(false)
   val overrideObjects   = new BooleanSetting(false)
   val printtypes        = new BooleanSetting(false)
@@ -46,4 +50,5 @@ private[reflect] class Settings extends MutableSettings {
 
   val Yrecursion        = new IntSetting(0)
   val maxClassfileName  = new IntSetting(255)
+  def isScala211        = true
 }
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index b895092..c56bc28 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -1,10 +1,12 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import internal.Flags
 import java.lang.{Class => jClass, Package => jPackage}
 import scala.collection.mutable
 import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
+import scala.reflect.internal.Flags._
 
 private[reflect] trait SymbolLoaders { self: SymbolTable =>
 
@@ -16,37 +18,16 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
    *  is found, a package is created instead.
    */
   class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
-//    def makePackage() {
-//      println("wrong guess; making package "+clazz)
-//      val ptpe = newPackageType(module.moduleClass)
-//      for (sym <- List(clazz, module, module.moduleClass)) {
-//        sym setFlag Flags.PACKAGE
-//        sym setInfo ptpe
-//      }
-//    }
-
+    markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
     override def complete(sym: Symbol) = {
       debugInfo("completing "+sym+"/"+clazz.fullName)
       assert(sym == clazz || sym == module || sym == module.moduleClass)
-//      try {
-      atPhaseNotLaterThan(picklerPhase) {
+      slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
         val loadingMirror = mirrorThatLoaded(sym)
         val javaClass = loadingMirror.javaClass(clazz.javaClassName)
         loadingMirror.unpickleClass(clazz, module, javaClass)
-//      } catch {
-//        case ex: ClassNotFoundException => makePackage()
-//        case ex: NoClassDefFoundError => makePackage()
-          // Note: We catch NoClassDefFoundError because there are situations
-          // where a package and a class have the same name except for capitalization.
-          // It seems in this case the class is loaded even if capitalization differs
-          // but then a NoClassDefFound error is issued with a ("wrong name: ...")
-          // reason. (I guess this is a concession to Windows).
-          // The present behavior is a bit too forgiving, in that it masks
-          // all class load errors, not just wrong name errors. We should try
-          // to be more discriminating. To get on the right track simply delete
-          // the clause above and load a collection class such as collection.Iterable.
-          // You'll see an error that class `parallel` has the wrong name.
-//      }
+        // NOTE: can't mark as thread-safe here, because unpickleClass might decide to delegate to FromJavaClassCompleter
+        // if (!isCompilerUniverse) markAllCompleted(clazz, module)
       }
     }
     override def load(sym: Symbol) = complete(sym)
@@ -58,7 +39,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
    *  @param name    The simple name of the newly created class
    *  @param completer  The completer to be used to set the info of the class and the module
    */
-  protected def createClassModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+  protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
     assert(!(name.toString endsWith "[]"), name)
     val clazz = owner.newClass(name)
     val module = owner.newModule(name.toTermName)
@@ -68,7 +49,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
       owner.info.decls enter clazz
       owner.info.decls enter module
     }
-    initClassModule(clazz, module, completer(clazz, module))
+    initClassAndModule(clazz, module, completer(clazz, module))
     (clazz, module)
   }
 
@@ -76,7 +57,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
     List(clazz, module, module.moduleClass) foreach (_ setInfo info)
   }
 
-  protected def initClassModule(clazz: Symbol, module: Symbol, completer: LazyType) =
+  protected def initClassAndModule(clazz: Symbol, module: Symbol, completer: LazyType) =
     setAllInfos(clazz, module, completer)
 
   /** The type completer for packages.
@@ -87,15 +68,56 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
       sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
         // override def safeToString = pkgClass.toString
       openPackageModule(sym)
+      markAllCompleted(sym)
     }
   }
 
+
+  // Since runtime reflection doesn't have a luxury of enumerating all classes
+  // on the classpath, it has to materialize symbols for top-level definitions
+  // (packages, classes, objects) on demand.
+  //
+  // Someone asks us for a class named `foo.Bar`? Easy. Let's speculatively create
+  // a package named `foo` and then look up `newTypeName("bar")` in its decls.
+  // This lookup, implemented in `SymbolLoaders.PackageScope` tests the waters by
+  // trying to to `Class.forName("foo.Bar")` and then creates a ClassSymbol upon
+  // success (the whole story is a bit longer, but the rest is irrelevant here).
+  //
+  // That's all neat, but these non-deterministic mutations of the global symbol
+  // table give a lot of trouble in multi-threaded setting. One of the popular
+  // reflection crashes happens when multiple threads happen to trigger symbol
+  // materialization multiple times for the same symbol, making subsequent
+  // reflective operations stumble upon outrageous stuff like overloaded packages.
+  //
+  // Short of significantly changing SymbolLoaders I see no other way than just
+  // to slap a global lock on materialization in runtime reflection.
   class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
       with SynchronizedScope {
     assert(pkgClass.isType)
-    // disable fingerprinting as we do not know entries beforehand
-    private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected.
-    override def lookupEntry(name: Name): ScopeEntry = {
+
+    // materializing multiple copies of the same symbol in PackageScope is a very popular bug
+    // this override does its best to guard against it
+    override def enter[T <: Symbol](sym: T): T = {
+      // workaround for SI-7728
+      if (isCompilerUniverse) super.enter(sym)
+      else {
+        val existing = super.lookupEntry(sym.name)
+        assert(existing == null || existing.sym.isMethod, s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
+        super.enter(sym)
+      }
+    }
+
+    override def enterIfNew[T <: Symbol](sym: T): T = {
+      val existing = super.lookupEntry(sym.name)
+      if (existing == null) enter(sym)
+      else existing.sym.asInstanceOf[T]
+    }
+
+    // package scopes need to synchronize on the GIL
+    // because lookupEntry might cause changes to the global symbol table
+    override def syncLockSynchronized[T](body: => T): T = gilSynchronized(body)
+    private val negatives = new mutable.HashSet[Name]
+    override def lookupEntry(name: Name): ScopeEntry = syncLockSynchronized {
       val e = super.lookupEntry(name)
       if (e != null)
         e
@@ -109,17 +131,30 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
         currentMirror.tryJavaClass(path) match {
           case Some(cls) =>
             val loadingMirror = currentMirror.mirrorDefining(cls)
-            val (clazz, module) =
+            val (_, module) =
               if (loadingMirror eq currentMirror) {
-                createClassModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
+                initAndEnterClassAndModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
               } else {
                 val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName)
                 val clazz = origOwner.info decl name.toTypeName
                 val module = origOwner.info decl name.toTermName
                 assert(clazz != NoSymbol)
                 assert(module != NoSymbol)
-                pkgClass.info.decls enter clazz
-                pkgClass.info.decls enter module
+                // currentMirror.mirrorDefining(cls) might side effect by entering symbols into pkgClass.info.decls
+                // therefore, even though in the beginning of this method, super.lookupEntry(name) returned null
+                // entering clazz/module now will result in a double-enter assertion in PackageScope.enter
+                // here's how it might happen
+                // 1) we are the rootMirror
+                // 2) cls.getClassLoader is different from our classloader
+                // 3) mirrorDefining(cls) looks up a mirror corresponding to that classloader and cannot find it
+                // 4) mirrorDefining creates a new mirror
+                // 5) that triggers Mirror.init() of the new mirror
+                // 6) that triggers definitions.syntheticCoreClasses
+                // 7) that might materialize symbols and enter them into our scope (because syntheticCoreClasses live in rootMirror)
+                // 8) now we come back here and try to enter one of the now entered symbols => BAM!
+                // therefore we use enterIfNew rather than just enter
+                enterIfNew(clazz)
+                enterIfNew(module)
                 (clazz, module)
               }
             debugInfo(s"created $module/${module.moduleClass} in $pkgClass")
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index 5c08e9a..0215557 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import scala.reflect.internal.Flags._
@@ -8,13 +9,13 @@ import scala.reflect.internal.Flags._
  *  It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
  *  a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
  */
-private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
+private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps with Gil with ThreadLocalStorage {
 
   def info(msg: => String) =
-    if (settings.verbose.value) println("[reflect-compiler] "+msg)
+    if (settings.verbose) println("[reflect-compiler] "+msg)
 
   def debugInfo(msg: => String) =
-    if (settings.debug.value) info(msg)
+    if (settings.debug) info(msg)
 
   /** Declares that this is a runtime reflection universe.
    *
@@ -27,19 +28,4 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w
    *  in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
    */
   override def isCompilerUniverse = false
-
-  /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests.
-   *
-   *  scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions
-   *  that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors.
-   *  We could probably fix those, but at the moment it's too risky.
-   *
-   *  Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions.
-   *  These assumptions are taken care of in this overriden `shouldTriggerCompleter` method.
-   */
-  override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
-    completer match {
-      case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0
-      case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask)
-    }
 }
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 7b280e5..c909014 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 // SI-6240: test thread-safety, make trees synchronized as well
@@ -8,28 +9,30 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
 
 // Names
 
-  private lazy val nameLock = new Object
-
-  override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
-  override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
+  override protected def synchronizeNames = true
 
 // BaseTypeSeqs
 
   override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
-    new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+    // only need to synchronize BaseTypeSeqs if they contain refined types
+    if (elems.filter(_.isInstanceOf[RefinedType]).nonEmpty) new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+    else new BaseTypeSeq(parents, elems)
 
   trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
-    override def apply(i: Int): Type = synchronized { super.apply(i) }
-    override def rawElem(i: Int) = synchronized { super.rawElem(i) }
-    override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) }
-    override def toList: List[Type] = synchronized { super.toList }
-    override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) }
-    override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) }
-    override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) }
-    override lazy val maxDepth = synchronized { maxDepthOfElems }
-    override def toString = synchronized { super.toString }
-
-    override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+    override def apply(i: Int): Type = gilSynchronized { super.apply(i) }
+    override def rawElem(i: Int) = gilSynchronized { super.rawElem(i) }
+    override def typeSymbol(i: Int): Symbol = gilSynchronized { super.typeSymbol(i) }
+    override def toList: List[Type] = gilSynchronized { super.toList }
+    override def copy(head: Type, offset: Int): BaseTypeSeq = gilSynchronized { super.copy(head, offset) }
+    override def map(f: Type => Type): BaseTypeSeq = gilSynchronized { super.map(f) }
+    override def exists(p: Type => Boolean): Boolean = gilSynchronized { super.exists(p) }
+    override lazy val maxDepth = gilSynchronized { maxDepthOfElems }
+    override def toString = gilSynchronized { super.toString }
+
+    override def lateMap(f: Type => Type): BaseTypeSeq =
+      // only need to synchronize BaseTypeSeqs if they contain refined types
+      if (map(f).toList.filter(_.isInstanceOf[RefinedType]).nonEmpty) new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+      else new MappedBaseTypeSeq(this, f)
   }
 
 // Scopes
@@ -38,15 +41,19 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
   override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
 
   trait SynchronizedScope extends Scope {
-    override def isEmpty: Boolean = synchronized { super.isEmpty }
-    override def size: Int = synchronized { super.size }
-    override def enter[T <: Symbol](sym: T): T = synchronized { super.enter(sym) }
-    override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
-    override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
-    override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
-    override def lookupAll(name: Name) = synchronized { super.lookupAll(name) }
-    override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) }
-    override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) }
-    override def toList: List[Symbol] = synchronized { super.toList }
+    // we can keep this lock fine-grained, because methods of Scope don't do anything extraordinary, which makes deadlocks impossible
+    // fancy subclasses of internal.Scopes#Scope should do synchronization themselves (e.g. see PackageScope for an example)
+    private lazy val syncLock = new Object
+    def syncLockSynchronized[T](body: => T): T = if (isCompilerUniverse) body else syncLock.synchronized { body }
+    override def isEmpty: Boolean = syncLockSynchronized { super.isEmpty }
+    override def size: Int = syncLockSynchronized { super.size }
+    override def enter[T <: Symbol](sym: T): T = syncLockSynchronized { super.enter(sym) }
+    override def rehash(sym: Symbol, newname: Name) = syncLockSynchronized { super.rehash(sym, newname) }
+    override def unlink(e: ScopeEntry) = syncLockSynchronized { super.unlink(e) }
+    override def unlink(sym: Symbol) = syncLockSynchronized { super.unlink(sym) }
+    override def lookupAll(name: Name) = syncLockSynchronized { super.lookupAll(name) }
+    override def lookupEntry(name: Name) = syncLockSynchronized { super.lookupEntry(name) }
+    override def lookupNextEntry(entry: ScopeEntry) = syncLockSynchronized { super.lookupNextEntry(entry) }
+    override def toList: List[Symbol] = syncLockSynchronized { super.toList }
   }
 }
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 00f6952..f5e16c6 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -1,18 +1,26 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import scala.reflect.io.AbstractFile
+import scala.collection.{ immutable, mutable }
+import scala.reflect.internal.Flags._
 
 private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
 
-  override protected def nextId() = synchronized { super.nextId() }
+  private lazy val atomicIds = new java.util.concurrent.atomic.AtomicInteger(0)
+  override protected def nextId() = atomicIds.incrementAndGet()
 
-  override protected def freshExistentialName(suffix: String) =
-    synchronized { super.freshExistentialName(suffix) }
+  private lazy val atomicExistentialIds = new java.util.concurrent.atomic.AtomicInteger(0)
+  override protected def nextExistentialId() = atomicExistentialIds.incrementAndGet()
+
+  private lazy val _recursionTable = mkThreadLocalStorage(immutable.Map.empty[Symbol, Int])
+  override def recursionTable = _recursionTable.get
+  override def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable.set(value)
 
   // Set the fields which point companions at one another.  Returns the module.
   override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
-    synchronized { super.connectModuleToClass(m, moduleClass) }
+    gilSynchronized { super.connectModuleToClass(m, moduleClass) }
 
   override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
     new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
@@ -24,35 +32,127 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
 
   trait SynchronizedSymbol extends Symbol {
 
-    override def rawflags = synchronized { super.rawflags }
-    override def rawflags_=(x: Long) = synchronized { super.rawflags_=(x) }
-
-    override def rawowner = synchronized { super.rawowner }
-    override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
-
-    override def validTo = synchronized { super.validTo }
-    override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
-
-    override def pos = synchronized { super.pos }
-    override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
-
-    override def privateWithin = synchronized { super.privateWithin }
-    override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
-
-    override def info = synchronized { super.info }
-    override def info_=(info: Type) = synchronized { super.info_=(info) }
-    override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
-    override def rawInfo: Type = synchronized { super.rawInfo }
-
-    override def typeParams: List[Symbol] = synchronized { super.typeParams }
-
-    override def reset(completer: Type): this.type = synchronized { super.reset(completer) }
-
-    override def infosString: String = synchronized { super.infosString }
-
-    override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
-    override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
-
+    /** (Things written in this comment only applies to runtime reflection. Compile-time reflection,
+     *  especially across phases and runs, is somewhat more complicated, but we won't be touching it,
+     *  because at the moment we only care about synchronizing runtime reflection).
+     *
+     *  As it has been noted on multiple occasions, generally speaking, reflection artifacts aren't thread-safe.
+     *  Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number
+     *  of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make
+     *  sure that things stay deterministic). However, in case of symbols there's hope, because it's only during
+     *  initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable
+     *  (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe.
+     *
+     *  Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's,
+     *  even when a symbol finishes its initialization and becomes immutable, its info forever remains mutable.
+     *  Therefore even if we no longer need to synchronize a PackageClassSymbol after it's initialized, we still have to take
+     *  care of its ClassInfoType (or, more precisely, of the underlying Scope), but that's done elsewhere, and
+     *  here we don't need to worry about that.
+     *
+     *  Okay, so now we simply check `Symbol.isInitialized` and if it's true, then everything's fine? Haha, nope!
+     *  The thing is that some completers call sym.setInfo when still in-flight and then proceed with initialization
+     *  (e.g. see LazyPackageType). Consequently, setInfo sets _validTo to current period, which means that after
+     *  a call to setInfo isInitialized will start returning true. Unfortunately, this doesn't mean that info becomes
+     *  ready to be used, because subsequent initialization might change the info.
+     *
+     *  Therefore we need to somehow distinguish between initialized and really initialized symbol states.
+     *  Okay, let's do it on per-completer basis. We have seven kinds of completers to worry about:
+     *    1) LazyPackageType that initializes packages and their underlying package classes
+     *    2) TopClassCompleter that initializes top-level Scala-based class-module companion pairs of static definitions
+     *    3) LazyTypeRef and LazyTypeRefAndAlias set up by TopClassCompleter that initialize (transitive members) of top-level classes/modules
+     *    4) FromJavaClassCompleter that does the same for both top-level and non-toplevel Java-based classes/modules
+     *    5) Fully-initialized signatures of non-class/module Java-based reflection artifacts
+     *    6) Importing completer that transfers metadata from one universe to another
+     *    7) Signatures of special symbols such as roots and symbolsNotPresentInBytecode
+     *
+     *  The mechanisms underlying completion are quite complex, and it'd be only natural to suppose that over time we're going to overlook something.
+     *  Wrt isThreadsafe we could have two wrong situations: false positives (isThreadsafe = true, but the symbol isn't actually threadsafe)
+     *  and false negatives (isThreadsafe = false, but the symbol is actually threadsafe). However, even though both are wrong, only the former
+     *  is actively malicious. Indeed, false positives might lead to races, inconsistent state and crashes, while the latter would only cause
+     *  `initialize` to be called and a gil to be taken on every potentially auto-initializable operation. Unpleasant yes, but still robust.
+     *
+     *  What makes me hopeful is that:
+     *    1) By default (e.g. if some new completion mechanism gets introduced for a special flavor of symbols and we forget to call markCompleted)
+     *       isThreadsafe is always in false negative state, which is unpleasant but safe.
+     *    2) Calls to `markCompleted` which are the only potential source of erroneous behavior are few and are relatively easy to place:
+     *       just put them just before your completer's `complete` returns, and you should be fine.
+     *
+     *  upd. Actually, there's another problem of not keeping initialization mask up-to-date. If we're not careful enough,
+     *  then it might so happen that getting a certain flag that the compiler assumes to be definitively set will spuriously
+     *  return isThreadsafe(purpose = FlagsOp(<flag>)) = false and that will lead to spurious auto-initialization,
+     *  which will cause an SO or a cyclic reference or some other crash. I've done my best to go through all possible completers
+     *  and call `markFlagsCompleted` where appropriate, but again over time something might be overlooked, so to guard against that
+     *  I'm only considering TopLevelPickledFlags to be sources of potential initialization. This ensures that such system flags as
+     *  isMethod, isModule or isPackage are never going to auto-initialize.
+     */
+    override def isThreadsafe(purpose: SymbolOps) = {
+      if (isCompilerUniverse) false
+      else if (_initialized) true
+      else purpose.isFlagRelated && (_initializationMask & purpose.mask & TopLevelPickledFlags) == 0
+    }
+
+    /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders
+     *  about the status of initialization of the underlying symbol.
+     *
+     *  Unfortunately, it's not as easy as just introducing the `markThreadsafe` method that would be called
+     *  by the completers when they are really done (as opposed to `setInfo` that, as mentioned above, doesn't mean anything).
+     *
+     *  Since we also want to auto-initialize symbols when certain methods are being called (`Symbol.hasFlag` for example),
+     *  we need to track the identity of the initializer, so as to block until initialization is complete if the caller
+     *  comes from a different thread, but to skip auto-initialization if we're the initializing thread.
+     *
+     *  Just a volatile var is fine, because:
+     *    1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized
+     *       that effecively guards `Symbol.initialize`), which means that there can't be update conflicts.
+     *    2) If someone reads a stale value of status, then the worst thing that might happen is that this someone
+     *       is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet)
+     *       or a no-op (if the symbol is already inited), and that is fine in both cases.
+     *
+     *  upd. It looks like we also need to keep track of a mask of initialized flags to make sure
+     *  that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag).
+     *  Due to the same reasoning as above, a single volatile var is enough for to store the mask.
+     */
+    @volatile private[this] var _initialized = false
+    @volatile private[this] var _initializationMask = TopLevelPickledFlags
+    override def markFlagsCompleted(mask: Long): this.type = { _initializationMask = _initializationMask & ~mask; this }
+    override def markAllCompleted(): this.type = { _initializationMask = 0L; _initialized = true; this }
+
+    def gilSynchronizedIfNotThreadsafe[T](body: => T): T = {
+      // TODO: debug and fix the race that doesn't allow us uncomment this optimization
+      // if (isCompilerUniverse || isThreadsafe(purpose = AllOps)) body
+      // else gilSynchronized { body }
+      gilSynchronized { body }
+    }
+
+    override def validTo = gilSynchronizedIfNotThreadsafe { super.validTo }
+    override def info = gilSynchronizedIfNotThreadsafe { super.info }
+    override def rawInfo: Type = gilSynchronizedIfNotThreadsafe { super.rawInfo }
+    override def typeSignature: Type = gilSynchronizedIfNotThreadsafe { super.typeSignature }
+    override def typeSignatureIn(site: Type): Type = gilSynchronizedIfNotThreadsafe { super.typeSignatureIn(site) }
+
+    override def typeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
+      if (isCompilerUniverse) super.typeParams
+      else {
+        if (isMonomorphicType) Nil
+        else {
+          // analogously to the "info" getter, here we allow for two completions:
+          //   one: sourceCompleter to LazyType, two: LazyType to completed type
+          if (validTo == NoPeriod)
+            rawInfo load this
+          if (validTo == NoPeriod)
+            rawInfo load this
+
+          rawInfo.typeParams
+        }
+      }
+    }
+    override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
+      if (isCompilerUniverse) super.unsafeTypeParams
+      else {
+        if (isMonomorphicType) Nil
+        else rawInfo.typeParams
+      }
+    }
 
 // ------ creators -------------------------------------------------------------------
 
@@ -83,58 +183,44 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
     override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
       new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
 
-    override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
-      new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
-
     override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
       new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags
 
     override protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
       new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
 
-    override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol = createModuleSymbol(name, pos, newFlags)
+    override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+      createModuleSymbol(name, pos, newFlags)
+
+    override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long) =
+      new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
 
-    // TODO
-    // override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long)
-    // override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long)
+    override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long) =
+      new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
   }
 
 // ------- subclasses ---------------------------------------------------------------------
 
-  trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol {
-    override def name_=(x: Name) = synchronized { super.name_=(x) }
-    override def rawname = synchronized { super.rawname }
-    override def referenced: Symbol = synchronized { super.referenced }
-    override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) }
-  }
+  trait SynchronizedTermSymbol extends SynchronizedSymbol
 
   trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
-    override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) }
-    override def paramss: List[List[Symbol]] = synchronized { super.paramss }
-    override def returnType: Type = synchronized { super.returnType }
+    // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
+    // unfortunately we cannot elide this lock, because the cache depends on `pre`
+    private lazy val typeAsMemberOfLock = new Object
+    override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
   }
 
+  trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
+
   trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol {
-    override def name_=(x: Name) = synchronized { super.name_=(x) }
-    override def rawname = synchronized { super.rawname }
-    override def typeConstructor: Type = synchronized { super.typeConstructor }
-    override def tpe: Type = synchronized { super.tpe }
+    // unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on
+    // temporarily assigning NoType to tpeCache to detect cyclic reference errors
+    private lazy val tpeLock = new Object
+    override def tpe_* : Type = gilSynchronizedIfNotThreadsafe { tpeLock.synchronized { super.tpe_* } }
   }
 
-  trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
-    override def associatedFile = synchronized { super.associatedFile }
-    override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
-    override def thisSym: Symbol = synchronized { super.thisSym }
-    override def thisType: Type = synchronized { super.thisType }
-    override def typeOfThis: Type = synchronized { super.typeOfThis }
-    override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) }
-    override def children = synchronized { super.children }
-    override def addChild(sym: Symbol) = synchronized { super.addChild(sym) }
-  }
+  trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol
 
-  trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
-    override def sourceModule = synchronized { super.sourceModule }
-    override def implicitMembers: Scope = synchronized { super.implicitMembers }
-  }
+  trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol
 }
 
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index a3e7c28..9bcf85d 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -1,8 +1,11 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
-import scala.collection.mutable.WeakHashMap
-import java.lang.ref.WeakReference
+import scala.collection.mutable
+import java.lang.ref.{WeakReference => jWeakRef}
+import scala.ref.{WeakReference => sWeakRef}
+import scala.reflect.internal.Depth
 
 /** This trait overrides methods in reflect.internal, bracketing
  *  them in synchronized { ... } to make them thread-safe
@@ -12,9 +15,10 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
   // No sharing of map objects:
   override protected def commonOwnerMap = new CommonOwnerMap
 
-  private object uniqueLock
-
-  private val uniques = WeakHashMap[Type, WeakReference[Type]]()
+  // we can keep this lock fine-grained, because super.unique just updates the cache
+  // and, in particular, doesn't call any reflection APIs which makes deadlocks impossible
+  private lazy val uniqueLock = new Object
+  private val uniques = mutable.WeakHashMap[Type, jWeakRef[Type]]()
   override def unique[T <: Type](tp: T): T = uniqueLock.synchronized {
     // we need to have weak uniques for runtime reflection
     // because unlike the normal compiler universe, reflective universe isn't organized in runs
@@ -28,7 +32,7 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
       val result = if (inCache.isDefined) inCache.get.get else null
       if (result ne null) result.asInstanceOf[T]
       else {
-        uniques(tp) = new WeakReference(tp)
+        uniques(tp) = new jWeakRef(tp)
         tp
       }
     } else {
@@ -36,47 +40,46 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
     }
   }
 
-  class SynchronizedUndoLog extends UndoLog {
-    private val actualLock = new java.util.concurrent.locks.ReentrantLock
-
-    final override def lock(): Unit = actualLock.lock()
-    final override def unlock(): Unit = actualLock.unlock()
-  }
-
-  override protected def newUndoLog = new SynchronizedUndoLog
-
-  override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
-    synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
+  private lazy val _skolemizationLevel = mkThreadLocalStorage(0)
+  override def skolemizationLevel = _skolemizationLevel.get
+  override def skolemizationLevel_=(value: Int) = _skolemizationLevel.set(value)
 
-  private object subsametypeLock
+  private lazy val _undoLog = mkThreadLocalStorage(new UndoLog)
+  override def undoLog = _undoLog.get
 
-  override def isSameType(tp1: Type, tp2: Type): Boolean =
-    subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
+  private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]())
+  override def intersectionWitness = _intersectionWitness.get
 
-  override def isDifferentType(tp1: Type, tp2: Type): Boolean =
-    subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
+  private lazy val _subsametypeRecursions = mkThreadLocalStorage(0)
+  override def subsametypeRecursions = _subsametypeRecursions.get
+  override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value)
 
-  override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
-    subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
+  private lazy val _pendingSubTypes = mkThreadLocalStorage(new mutable.HashSet[SubTypePair])
+  override def pendingSubTypes = _pendingSubTypes.get
 
-  private object lubglbLock
+  private lazy val _basetypeRecursions = mkThreadLocalStorage(0)
+  override def basetypeRecursions = _basetypeRecursions.get
+  override def basetypeRecursions_=(value: Int) = _basetypeRecursions.set(value)
 
-  override def glb(ts: List[Type]): Type =
-    lubglbLock.synchronized { super.glb(ts) }
+  private lazy val _pendingBaseTypes = mkThreadLocalStorage(new mutable.HashSet[Type])
+  override def pendingBaseTypes = _pendingBaseTypes.get
 
-  override def lub(ts: List[Type]): Type =
-    lubglbLock.synchronized { super.lub(ts) }
+  private lazy val _lubResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+  override def lubResults = _lubResults.get
 
-  private object indentLock
+  private lazy val _glbResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+  override def glbResults = _glbResults.get
 
-  override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
-    indentLock.synchronized { super.explain(op, p, tp1, arg2) }
-  }
+  private lazy val _indent = mkThreadLocalStorage("")
+  override def indent = _indent.get
+  override def indent_=(value: String) = _indent.set(value)
 
-  private object toStringLock
+  private lazy val _toStringRecursions = mkThreadLocalStorage(0)
+  override def toStringRecursions = _toStringRecursions.get
+  override def toStringRecursions_=(value: Int) = _toStringRecursions.set(value)
 
-  override protected def typeToString(tpe: Type): String =
-    toStringLock.synchronized(super.typeToString(tpe))
+  private lazy val _toStringSubjects = mkThreadLocalStorage(new mutable.HashSet[Type])
+  override def toStringSubjects = _toStringSubjects.get
 
   /* The idea of caches is as follows.
    * When in reflexive mode, a cache is either null, or one sentinal
@@ -89,18 +92,18 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
    */
 
   override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
-    tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
+    gilSynchronized { super.defineUnderlyingOfSingleType(tpe) }
 
   override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
-    tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
+    gilSynchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
 
   override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
-    tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
+    gilSynchronized { super.defineBaseClassesOfCompoundType(tpe) }
 
   override protected def defineParentsOfTypeRef(tpe: TypeRef) =
-    tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
+    gilSynchronized { super.defineParentsOfTypeRef(tpe) }
 
   override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
-    tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
+    gilSynchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
 
 }
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
new file mode 100644
index 0000000..5edc051
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -0,0 +1,28 @@
+package scala.reflect
+package runtime
+
+import java.lang.Thread._
+
+private[reflect] trait ThreadLocalStorage {
+  self: SymbolTable =>
+
+  // see a discussion at scala-internals for more information:
+  // http://groups.google.com/group/scala-internals/browse_thread/thread/337ce68aa5e51f79
+  trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
+  private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
+    // TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
+    val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+    def get: T = {
+      if (values containsKey currentThread) values.get(currentThread)
+      else {
+        val value = initialValue
+        values.putIfAbsent(currentThread, value)
+        value
+      }
+    }
+    def set(newValue: T): Unit = {
+      values.put(currentThread, newValue)
+    }
+  }
+  @inline final def mkThreadLocalStorage[T](x: => T): ThreadLocalStorage[T] = new MyThreadLocalStorage(x)
+}
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
index 05debcb..d0fc3da 100644
--- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 package runtime
 
 import scala.collection.mutable.WeakHashMap
@@ -51,16 +52,4 @@ private[runtime] class TwoWayCache[J, S] {
         result
     }
   }
-
-  def toJavaOption(key: S)(body: => Option[J]): Option[J] = synchronized {
-    toJavaMap get key match {
-      case SomeRef(v) =>
-        Some(v)
-      case _ =>
-        val result = body
-        for (value <- result) enter(value, key)
-        result
-    }
-  }
 }
-
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
new file mode 100644
index 0000000..6e2890e
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
@@ -0,0 +1,68 @@
+package scala.reflect
+package runtime
+
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
+/** A cache that maintains a bijection between Java reflection type `J`
+ *  and Scala reflection type `S`.
+ *
+ *  The cache is two-way weak (i.e. is powered by weak references),
+ *  so that neither Java artifacts prevent Scala artifacts from being garbage collected,
+ *  nor the other way around.
+ */
+private[runtime] trait TwoWayCaches { self: SymbolTable =>
+  class TwoWayCache[J, S] {
+
+    private val toScalaMap = new WeakHashMap[J, WeakReference[S]]
+    private val toJavaMap = new WeakHashMap[S, WeakReference[J]]
+
+    def enter(j: J, s: S) = gilSynchronized {
+      // debugInfo("cached: "+j+"/"+s)
+      toScalaMap(j) = new WeakReference(s)
+      toJavaMap(s) = new WeakReference(j)
+    }
+
+    private object SomeRef {
+      def unapply[T](optRef: Option[WeakReference[T]]): Option[T] =
+        if (optRef.nonEmpty) {
+          val result = optRef.get.get
+          if (result != null) Some(result) else None
+        } else None
+    }
+
+    def toScala(key: J)(body: => S): S = gilSynchronized {
+      toScalaMap get key match {
+        case SomeRef(v) =>
+          v
+        case _ =>
+          val result = body
+          enter(key, result)
+          result
+      }
+    }
+
+    def toJava(key: S)(body: => J): J = gilSynchronized {
+      toJavaMap get key match {
+        case SomeRef(v) =>
+          v
+        case _ =>
+          val result = body
+          enter(result, key)
+          result
+      }
+    }
+
+    def toJavaOption(key: S)(body: => Option[J]): Option[J] = gilSynchronized {
+      toJavaMap get key match {
+        case SomeRef(v) =>
+          Some(v)
+        case _ =>
+          val result = body
+          for (value <- result) enter(value, key)
+          result
+      }
+    }
+  }
+}
+
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index b97913d..3c9bbcc 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
 
 /** Entry points into runtime reflection.
  *  See [[scala.reflect.api.package the overview page]] for details on how to use them.
@@ -6,7 +7,7 @@ package scala.reflect
 package object runtime {
 
   /** The entry point into Scala runtime reflection.
-   * 
+   *
    * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._`
    *
    * See [[scala.reflect.api.Universe]] or the
@@ -20,12 +21,12 @@ package object runtime {
    */
   // implementation hardwired to the `currentMirror` method below
   // using the mechanism implemented in `scala.tools.reflect.FastTrack`
-  def currentMirror: universe.Mirror = ??? // macro
+  def currentMirror: universe.Mirror = macro ???
 }
 
 package runtime {
   private[scala] object Macros {
-    def currentMirror(c: scala.reflect.macros.Context): c.Expr[universe.Mirror] = {
+    def currentMirror(c: scala.reflect.macros.blackbox.Context): c.Expr[universe.Mirror] = {
       import c.universe._
       val runtimeClass = c.reifyEnclosingRuntimeClass
       if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class")
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/Interpreter.scala
rename to src/repl/scala/tools/nsc/Interpreter.scala
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/InterpreterLoop.scala
rename to src/repl/scala/tools/nsc/InterpreterLoop.scala
diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
new file mode 100644
index 0000000..43f0ea1
--- /dev/null
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -0,0 +1,106 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2013 LAMP/EPFL
+ * @author  Lex Spoon
+ */
+
+package scala
+package tools.nsc
+
+import io.{ File }
+import util.{ ClassPath, ScalaClassLoader }
+import Properties.{ versionString, copyrightString }
+import GenericRunnerCommand._
+
+object JarRunner extends CommonRunner {
+  def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
+    val jar       = new io.Jar(jarPath)
+    val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath)
+    val jarURLs   = ClassPath expandManifestPath jarPath
+    val urls      = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs
+
+    if (settings.Ylogcp) {
+      Console.err.println("Running jar with these URLs as the classpath:")
+      urls foreach println
+    }
+
+    runAndCatch(urls, mainClass, arguments)
+  }
+}
+
+/** An object that runs Scala code.  It has three possible
+  * sources for the code to run: pre-compiled code, a script file,
+  * or interactive entry.
+  */
+class MainGenericRunner {
+  def errorFn(ex: Throwable): Boolean = {
+    ex.printStackTrace()
+    false
+  }
+  def errorFn(str: String): Boolean = {
+    Console.err println str
+    false
+  }
+
+  def process(args: Array[String]): Boolean = {
+    val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x))
+    import command.{ settings, howToRun, thingToRun }
+    def sampleCompiler = new Global(settings)   // def so its not created unless needed
+
+    if (!command.ok)                      return errorFn("\n" + command.shortUsageMsg)
+    else if (settings.version)            return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
+    else if (command.shouldStopWithInfo)  return errorFn(command getInfoMessage sampleCompiler)
+
+    def isE   = !settings.execute.isDefault
+    def dashe = settings.execute.value
+
+    def isI   = !settings.loadfiles.isDefault
+    def dashi = settings.loadfiles.value
+
+    // Deadlocks on startup under -i unless we disable async.
+    if (isI)
+      settings.Yreplsync.value = true
+
+    def combinedCode  = {
+      val files   = if (isI) dashi map (file => File(file).slurp()) else Nil
+      val str     = if (isE) List(dashe) else Nil
+
+      files ++ str mkString "\n\n"
+    }
+
+    def runTarget(): Either[Throwable, Boolean] = howToRun match {
+      case AsObject =>
+        ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments)
+      case AsScript =>
+        ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
+      case AsJar    =>
+        JarRunner.runJar(settings, thingToRun, command.arguments)
+      case Error =>
+        Right(false)
+      case _  =>
+        // We start the repl when no arguments are given.
+        Right(new interpreter.ILoop process settings)
+    }
+
+    /** If -e and -i were both given, we want to execute the -e code after the
+     *  -i files have been included, so they are read into strings and prepended to
+     *  the code given in -e.  The -i option is documented to only make sense
+     *  interactively so this is a pretty reasonable assumption.
+     *
+     *  This all needs a rewrite though.
+     */
+    if (isE) {
+      ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)
+    }
+    else runTarget() match {
+      case Left(ex) => errorFn(ex)
+      case Right(b) => b
+    }
+  }
+}
+
+object MainGenericRunner extends MainGenericRunner {
+  def main(args: Array[String]) {
+    if (!process(args))
+      sys.exit(1)
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
new file mode 100644
index 0000000..7122195
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.io.AbstractFile
+
+ at deprecated("Use `scala.tools.nsc.util.AbstractFileClassLoader`", "2.11.0")
+class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends util.AbstractFileClassLoader(root, parent)
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
rename to src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
new file mode 100644
index 0000000..0ab92ab
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
@@ -0,0 +1,13 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Lex Spoon
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** A command line for the interpreter.
+ */
+class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
+  override def cmdName = "scala"
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala
new file mode 100644
index 0000000..9ad7f95
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import Completion._
+
+/** An implementation-agnostic completion interface which makes no
+ *  reference to the jline classes.
+ */
+trait Completion {
+  def resetVerbosity(): Unit
+  def completer(): ScalaCompleter
+}
+object NoCompletion extends Completion {
+  def resetVerbosity() = ()
+  def completer() = NullCompleter
+}
+
+object Completion {
+  case class Candidates(cursor: Int, candidates: List[String]) { }
+  val NoCandidates = Candidates(-1, Nil)
+
+  object NullCompleter extends ScalaCompleter {
+    def complete(buffer: String, cursor: Int): Candidates = NoCandidates
+  }
+  trait ScalaCompleter {
+    def complete(buffer: String, cursor: Int): Candidates
+  }
+
+  def looksLikeInvocation(code: String) = (
+        (code != null)
+    &&  (code startsWith ".")
+    && !(code == ".")
+    && !(code startsWith "./")
+    && !(code startsWith "..")
+  )
+  object Forwarder {
+    def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
+      def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
+      override def follow(s: String) = forwardTo() flatMap (_ follow s)
+    }
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
new file mode 100644
index 0000000..3dd5d93
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -0,0 +1,53 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** An interface for objects which are aware of tab completion and
+ *  will supply their own candidates and resolve their own paths.
+ */
+trait CompletionAware {
+  /** The complete list of unqualified Strings to which this
+   *  object will complete.
+   */
+  def completions(verbosity: Int): List[String]
+
+  /** The next completor in the chain.
+   */
+  def follow(id: String): Option[CompletionAware] = None
+
+  /** A list of useful information regarding a specific uniquely
+   *  identified completion.  This is specifically written for the
+   *  following situation, but should be useful elsewhere too:
+   *
+   *    x.y.z.methodName<tab>
+   *
+   *  If "methodName" is among z's completions, and verbosity > 0
+   *  indicating tab has been pressed twice consecutively, then we
+   *  call alternativesFor and show a list of overloaded method
+   *  signatures.
+   */
+  def alternativesFor(id: String): List[String] = Nil
+
+  /** Given string 'buf', return a list of all the strings
+   *  to which it can complete.  This may involve delegating
+   *  to other CompletionAware objects.
+   */
+  def completionsFor(parsed: Parsed): List[String] = {
+    import parsed.{ buffer, verbosity }
+    val comps = completions(verbosity) filter (_ startsWith buffer)
+    val exact = comps contains buffer
+
+    val results =
+      if (parsed.isEmpty) comps
+      else if (parsed.isUnqualified && !parsed.isLastDelimiter)
+        if (verbosity > 0 && exact) alternativesFor(buffer)
+        else comps
+      else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
+
+    results.sorted
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
new file mode 100644
index 0000000..d24ad60
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -0,0 +1,85 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** This has a lot of duplication with other methods in Symbols and Types,
+ *  but repl completion utility is very sensitive to precise output.  Best
+ *  thing would be to abstract an interface for how such things are printed,
+ *  as is also in progress with error messages.
+ */
+trait CompletionOutput {
+  val global: Global
+
+  import global._
+  import definitions.{ isTupleType, isFunctionType, isRepeatedParamType }
+
+  /** Reducing fully qualified noise for some common packages.
+   */
+  val typeTransforms = List(
+    "java.lang." -> "",
+    "scala.collection.immutable." -> "immutable.",
+    "scala.collection.mutable." -> "mutable.",
+    "scala.collection.generic." -> "generic."
+  )
+
+  def quietString(tp: String): String =
+    typeTransforms.foldLeft(tp) {
+      case (str, (prefix, replacement)) =>
+        if (str startsWith prefix) replacement + (str stripPrefix prefix)
+        else str
+    }
+
+  class MethodSymbolOutput(method: Symbol) {
+    val pkg       = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
+
+    def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
+    def relativize(tp: Type): String    = relativize(tp.dealiasWiden.toString)
+
+    def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
+    def parenList(params: List[Any])  = params.mkString("(", ", ", ")")
+
+    def methodTypeToString(mt: MethodType) =
+      (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
+
+    def typeToString(tp: Type): String = relativize(
+      tp match {
+        case x if isFunctionType(x)      => functionString(x)
+        case x if isTupleType(x)         => tupleString(x)
+        case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
+        case mt @ MethodType(_, _)       => methodTypeToString(mt)
+        case x                           => x.toString
+      }
+    )
+
+    def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
+    def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
+      case List(t, r) => t + " => " + r
+      case xs         => parenList(xs.init) + " => " + xs.last
+    }
+
+    def tparamsString(tparams: List[Symbol])  = braceList(tparams map (_.defString))
+    def paramsString(params: List[Symbol])    = {
+      def paramNameString(sym: Symbol)  = if (sym.isSynthetic) "" else sym.nameString + ": "
+      def paramString(sym: Symbol)      = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
+
+      val isImplicit = params.nonEmpty && params.head.isImplicit
+      val strs = (params map paramString) match {
+        case x :: xs if isImplicit  => ("implicit " + x) :: xs
+        case xs                     => xs
+      }
+      parenList(strs)
+    }
+
+    def methodString() =
+      method.keyString + " " + method.nameString + (method.info.dealiasWiden match {
+        case NullaryMethodType(resType)         => ": " + typeToString(resType)
+        case PolyType(tparams, resType)         => tparamsString(tparams) + typeToString(resType)
+        case mt @ MethodType(_, _)              => methodTypeToString(mt)
+        case x                                  => x.toString
+      })
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
new file mode 100644
index 0000000..d8efcda
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -0,0 +1,160 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.console.{ ConsoleReader, CursorBuffer }
+
+trait ConsoleReaderHelper { _: ConsoleReader with Tabulator =>
+  def isAcross: Boolean
+
+  def terminal    = getTerminal()
+  def width       = terminal.getWidth()
+  def height      = terminal.getHeight()
+
+  def readOneKey(prompt: String): Int
+  def eraseLine(): Unit
+
+  val marginSize = 3
+
+  private def morePrompt = "--More--"
+  private def emulateMore(): Int = {
+    val key = readOneKey(morePrompt)
+    try key match {
+      case '\r' | '\n'  => 1
+      case 'q'          => -1
+      case _            => height - 1
+    }
+    finally {
+      eraseLine()
+      // TODO: still not quite managing to erase --More-- and get
+      // back to a scala prompt without another keypress.
+      if (key == 'q') {
+        putString(getPrompt())
+        redrawLine()
+        flush()
+      }
+    }
+  }
+
+  override def printColumns(items: JCollection[_ <: CharSequence]): Unit =
+    printColumns_(items: List[String])
+
+  private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) {
+    val grouped = tabulate(items)
+    var linesLeft  = if (isPaginationEnabled()) height - 1 else Int.MaxValue
+    grouped foreach { xs =>
+      println(xs.mkString)
+      linesLeft -= 1
+      if (linesLeft <= 0) {
+        linesLeft = emulateMore()
+        if (linesLeft < 0)
+          return
+      }
+    }
+  }
+}
+
+trait Tabulator {
+  def isAcross: Boolean
+  def width: Int
+  def marginSize: Int
+
+  protected def fits(items: Seq[String], width: Int): Boolean = (
+    (items map (_.length)).sum + (items.length - 1) * marginSize < width
+  )
+  def tabulate(items: Seq[String]): Seq[Seq[String]] = (
+    if (fits(items, width)) Seq(Seq(items mkString " " * marginSize))
+    else printMultiLineColumns(items)
+  )
+  protected def columnize(ss: Seq[String]): Seq[Seq[String]] = ss map (s => Seq(s))
+  protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = {
+    import SimpleMath._
+    val longest     = (items map (_.length)).max
+    val columnWidth = longest + marginSize
+    val maxcols = (
+      if (columnWidth >= width) 1
+      else 1 max (width / columnWidth)   // make sure it doesn't divide to 0
+    )
+    val nrows       = items.size /% maxcols
+    val ncols       = items.size /% nrows
+    val groupSize   = ncols
+    val padded      = items map (s"%-${columnWidth}s" format _)
+    val xwise       = isAcross || ncols >= items.length
+    val grouped: Seq[Seq[String]]    =
+      if (groupSize == 1) columnize(items)
+      else if (xwise) (padded grouped groupSize).toSeq
+      else {
+        val h       = 1 max padded.size /% groupSize
+        val cols    = (padded grouped h).toList
+        for (i <- 0 until h) yield
+          for (j <- 0 until groupSize) yield
+            if (i < cols(j).size) cols(j)(i) else ""
+      }
+    grouped
+  }
+}
+
+/** Adjust the column width and number of columns to minimize the row count. */
+trait VariColumnTabulator extends Tabulator {
+  override protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = {
+    import SimpleMath._
+    val longest  = (items map (_.length)).max
+    val shortest = (items map (_.length)).min
+    val fattest  = longest + marginSize
+    val skinny   = shortest + marginSize
+
+    // given ncols, calculate nrows and a list of column widths, or none if not possible
+    // if ncols > items.size, then columnWidths.size == items.size
+    def layout(ncols: Int): Option[(Int, Seq[Int], Seq[Seq[String]])] = {
+      val nrows = items.size /% ncols
+      val xwise = isAcross || ncols >= items.length
+      def maxima(sss: Seq[Seq[String]]) =
+        (0 until (ncols min items.size)) map (i => (sss map (ss => ss(i).length)).max)
+      def resulting(rows: Seq[Seq[String]]) = {
+        val columnWidths = maxima(rows) map (_ + marginSize)
+        val linelen      = columnWidths.sum
+        if (linelen <= width) Some((nrows, columnWidths, rows))
+        else None
+      }
+      if (ncols == 1) resulting(columnize(items))
+      else if (xwise) resulting((items grouped ncols).toSeq)
+      else {
+        val cols = (items grouped nrows).toList
+        val rows = for (i <- 0 until nrows) yield
+          for (j <- 0 until ncols) yield
+            if (j < cols.size && i < cols(j).size) cols(j)(i) else ""
+        resulting(rows)
+      }
+    }
+
+    if (fattest >= width) {
+      columnize(items)
+    } else {
+      // if every col is widest, we have at least this many cols
+      val mincols = 1 max (width / fattest)
+      // if every other col is skinniest, we have at most this many cols
+      val maxcols = 1 + ((width - fattest) / skinny)
+      val possibles = (mincols to maxcols).map(n => layout(n)).flatten
+      val minrows = (possibles map (_._1)).min
+
+      // select the min ncols that results in minrows
+      val (_, columnWidths, sss) = (possibles find (_._1 == minrows)).get
+
+      // format to column width
+      sss map (ss => ss.zipWithIndex map {
+        case (s, i) => s"%-${columnWidths(i)}s" format s
+      })
+    }
+  }
+}
+
+private[interpreter] object SimpleMath {
+  implicit class DivRem(private val i: Int) extends AnyVal {
+    /** i/n + if (i % n != 0) 1 else 0 */
+    def /%(n: Int): Int = (i + n - 1) / n
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
new file mode 100644
index 0000000..b7f06f1
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
@@ -0,0 +1,41 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
+
+class JLineDelimiter extends ArgumentDelimiter {
+  def toJLine(args: List[String], cursor: Int) = args match {
+    case Nil    => new ArgumentList(new Array[String](0), 0, 0, cursor)
+    case xs     => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor)
+  }
+
+  def delimit(buffer: CharSequence, cursor: Int) = {
+    val p = Parsed(buffer.toString, cursor)
+    toJLine(p.args, cursor)
+  }
+  def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter
+}
+
+trait Delimited {
+  self: Parsed =>
+
+  def delimited: Char => Boolean
+  def escapeChars: List[Char] = List('\\')
+
+  /** Break String into args based on delimiting function.
+   */
+  protected def toArgs(s: String): List[String] =
+    if (s == "") Nil
+    else (s indexWhere isDelimiterChar) match {
+      case -1   => List(s)
+      case idx  => (s take idx) :: toArgs(s drop (idx + 1))
+    }
+
+  def isDelimiterChar(ch: Char) = delimited(ch)
+  def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
new file mode 100644
index 0000000..8a6a405
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.tools.nsc.ast.parser.Tokens.EOF
+
+trait ExprTyper {
+  val repl: IMain
+
+  import repl._
+  import global.{ reporter => _, Import => _, _ }
+  import naming.freshInternalVarName
+
+  def symbolOfLine(code: String): Symbol = {
+    def asExpr(): Symbol = {
+      val name  = freshInternalVarName()
+      // Typing it with a lazy val would give us the right type, but runs
+      // into compiler bugs with things like existentials, so we compile it
+      // behind a def and strip the NullaryMethodType which wraps the expr.
+      val line = "def " + name + " = " + code
+
+      interpretSynthetic(line) match {
+        case IR.Success =>
+          val sym0 = symbolOfTerm(name)
+          // drop NullaryMethodType
+          sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
+        case _          => NoSymbol
+      }
+    }
+    def asDefn(): Symbol = {
+      val old = repl.definedSymbolList.toSet
+
+      interpretSynthetic(code) match {
+        case IR.Success =>
+          repl.definedSymbolList filterNot old match {
+            case Nil        => NoSymbol
+            case sym :: Nil => sym
+            case syms       => NoSymbol.newOverloaded(NoPrefix, syms)
+          }
+        case _ => NoSymbol
+      }
+    }
+    def asError(): Symbol = {
+      interpretSynthetic(code)
+      NoSymbol
+    }
+    beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
+  }
+
+  private var typeOfExpressionDepth = 0
+  def typeOfExpression(expr: String, silent: Boolean = true): Type = {
+    if (typeOfExpressionDepth > 2) {
+      repldbg("Terminating typeOfExpression recursion for expression: " + expr)
+      return NoType
+    }
+    typeOfExpressionDepth += 1
+    // Don't presently have a good way to suppress undesirable success output
+    // while letting errors through, so it is first trying it silently: if there
+    // is an error, and errors are desired, then it re-evaluates non-silently
+    // to induce the error message.
+    try beSilentDuring(symbolOfLine(expr).tpe) match {
+      case NoType if !silent => symbolOfLine(expr).tpe // generate error
+      case tpe               => tpe
+    }
+    finally typeOfExpressionDepth -= 1
+  }
+
+  // This only works for proper types.
+  def typeOfTypeString(typeString: String): Type = {
+    def asProperType(): Option[Type] = {
+      val name = freshInternalVarName()
+      val line = "def %s: %s = ???" format (name, typeString)
+      interpretSynthetic(line) match {
+        case IR.Success =>
+          val sym0 = symbolOfTerm(name)
+          Some(sym0.asMethod.returnType)
+        case _          => None
+      }
+    }
+    beSilentDuring(asProperType()) getOrElse NoType
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/Formatting.scala
rename to src/repl/scala/tools/nsc/interpreter/Formatting.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/IBindings.java b/src/repl/scala/tools/nsc/interpreter/IBindings.java
new file mode 100644
index 0000000..b4cee4b
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/IBindings.java
@@ -0,0 +1,45 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Raphael Jolly
+ */
+
+package scala.tools.nsc.interpreter;
+
+import java.util.Map;
+import java.util.AbstractMap;
+import java.util.Set;
+import java.util.AbstractSet;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import javax.script.Bindings;
+
+abstract class IBindings extends AbstractMap<String, Object> implements Bindings {
+    public Set<Map.Entry<String, Object>> entrySet() {
+        return new AbstractSet<Map.Entry<String, Object>>() {
+            public int size() {
+                return 0;
+            }
+
+            public Iterator<Map.Entry<String, Object>> iterator() {
+                return new Iterator<Map.Entry<String, Object>>() {
+                    public boolean hasNext() {
+                        return false;
+                    }
+
+                    public Map.Entry<String, Object> next() {
+                        throw new NoSuchElementException();
+                    }
+
+                    public void remove() {
+                        throw new UnsupportedOperationException();
+                    }
+                };
+            }
+
+            public boolean add(Map.Entry<String, Object> e) {
+                IBindings.this.put(e.getKey(), e.getValue());
+                return true;
+            }
+        };
+    }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
new file mode 100644
index 0000000..a96bed4
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -0,0 +1,929 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Alexander Spoon
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import scala.language.{ implicitConversions, existentials }
+import scala.annotation.tailrec
+import Predef.{ println => _, _ }
+import interpreter.session._
+import StdReplTags._
+import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
+import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
+import scala.reflect.classTag
+import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
+import ScalaClassLoader._
+import scala.reflect.io.{ File, Directory }
+import scala.tools.util._
+import scala.collection.generic.Clearable
+import scala.concurrent.{ ExecutionContext, Await, Future, future }
+import ExecutionContext.Implicits._
+import java.io.{ BufferedReader, FileReader }
+
+/** The Scala interactive shell.  It provides a read-eval-print loop
+ *  around the Interpreter class.
+ *  After instantiation, clients should call the main() method.
+ *
+ *  If no in0 is specified, then input will come from the console, and
+ *  the class will attempt to provide input editing feature such as
+ *  input history.
+ *
+ *  @author Moez A. Abdel-Gawad
+ *  @author  Lex Spoon
+ *  @version 1.2
+ */
+class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
+                extends AnyRef
+                   with LoopCommands
+{
+  def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
+  def this() = this(None, new JPrintWriter(Console.out, true))
+
+  @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
+  @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
+
+  var in: InteractiveReader = _   // the input stream from which commands come
+  var settings: Settings = _
+  var intp: IMain = _
+
+  private var globalFuture: Future[Boolean] = _
+
+  /** Print a welcome message */
+  def printWelcome() {
+    echo(s"""
+      |Welcome to Scala $versionString ($javaVmName, Java $javaVersion).
+      |Type in expressions to have them evaluated.
+      |Type :help for more information.""".trim.stripMargin
+    )
+    replinfo("[info] started at " + new java.util.Date)
+  }
+
+  protected def asyncMessage(msg: String) {
+    if (isReplInfo || isReplPower)
+      echoAndRefresh(msg)
+  }
+
+  override def echoCommandMessage(msg: String) {
+    intp.reporter printUntruncatedMessage msg
+  }
+
+  lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
+  def history = in.history
+
+  // classpath entries added via :cp
+  var addedClasspath: String = ""
+
+  /** A reverse list of commands to replay if the user requests a :replay */
+  var replayCommandStack: List[String] = Nil
+
+  /** A list of commands to replay if the user requests a :replay */
+  def replayCommands = replayCommandStack.reverse
+
+  /** Record a command for replay should the user request a :replay */
+  def addReplay(cmd: String) = replayCommandStack ::= cmd
+
+  def savingReplayStack[T](body: => T): T = {
+    val saved = replayCommandStack
+    try body
+    finally replayCommandStack = saved
+  }
+  def savingReader[T](body: => T): T = {
+    val saved = in
+    try body
+    finally in = saved
+  }
+
+  /** Close the interpreter and set the var to null. */
+  def closeInterpreter() {
+    if (intp ne null) {
+      intp.close()
+      intp = null
+    }
+  }
+
+  class ILoopInterpreter extends IMain(settings, out) {
+    outer =>
+
+    override lazy val formatting = new Formatting {
+      def prompt = ILoop.this.prompt
+    }
+    override protected def parentClassLoader =
+      settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
+  }
+
+  /** Create a new interpreter. */
+  def createInterpreter() {
+    if (addedClasspath != "")
+      settings.classpath append addedClasspath
+
+    intp = new ILoopInterpreter
+  }
+
+  /** print a friendly help message */
+  def helpCommand(line: String): Result = {
+    if (line == "") helpSummary()
+    else uniqueCommand(line) match {
+      case Some(lc) => echo("\n" + lc.help)
+      case _        => ambiguousError(line)
+    }
+  }
+  private def helpSummary() = {
+    val usageWidth  = commands map (_.usageMsg.length) max
+    val formatStr   = "%-" + usageWidth + "s %s"
+
+    echo("All commands can be abbreviated, e.g. :he instead of :help.")
+
+    commands foreach { cmd =>
+      echo(formatStr.format(cmd.usageMsg, cmd.help))
+    }
+  }
+  private def ambiguousError(cmd: String): Result = {
+    matchingCommands(cmd) match {
+      case Nil  => echo(cmd + ": no such command.  Type :help for help.")
+      case xs   => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
+    }
+    Result(keepRunning = true, None)
+  }
+  private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
+  private def uniqueCommand(cmd: String): Option[LoopCommand] = {
+    // this lets us add commands willy-nilly and only requires enough command to disambiguate
+    matchingCommands(cmd) match {
+      case List(x)  => Some(x)
+      // exact match OK even if otherwise appears ambiguous
+      case xs       => xs find (_.name == cmd)
+    }
+  }
+
+  /** Show the history */
+  lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
+    override def usage = "[num]"
+    def defaultLines = 20
+
+    def apply(line: String): Result = {
+      if (history eq NoHistory)
+        return "No history available."
+
+      val xs      = words(line)
+      val current = history.index
+      val count   = try xs.head.toInt catch { case _: Exception => defaultLines }
+      val lines   = history.asStrings takeRight count
+      val offset  = current - lines.size + 1
+
+      for ((line, index) <- lines.zipWithIndex)
+        echo("%3d  %s".format(index + offset, line))
+    }
+  }
+
+  // When you know you are most likely breaking into the middle
+  // of a line being typed.  This softens the blow.
+  protected def echoAndRefresh(msg: String) = {
+    echo("\n" + msg)
+    in.redrawLine()
+  }
+  protected def echo(msg: String) = {
+    out println msg
+    out.flush()
+  }
+
+  /** Search the history */
+  def searchHistory(_cmdline: String) {
+    val cmdline = _cmdline.toLowerCase
+    val offset  = history.index - history.size + 1
+
+    for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
+      echo("%d %s".format(index + offset, line))
+  }
+
+  private val currentPrompt = Properties.shellPromptString
+
+  /** Prompt to print when awaiting input */
+  def prompt = currentPrompt
+
+  import LoopCommand.{ cmd, nullary }
+
+  /** Standard commands **/
+  lazy val standardCommands = List(
+    cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
+    cmd("edit", "<id>|<line>", "edit history", editCommand),
+    cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
+    historyCommand,
+    cmd("h?", "<string>", "search the history", searchHistory),
+    cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
+    cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand),
+    cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
+    cmd("line", "<id>|<line>", "place line(s) at the end of history", lineCommand),
+    cmd("load", "<path>", "interpret lines in a file", loadCommand),
+    cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand),
+    nullary("power", "enable power user mode", powerCmd),
+    nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
+    nullary("replay", "reset execution and replay all previous commands", replay),
+    nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
+    cmd("save", "<path>", "save replayable session to a file", saveCommand),
+    shCommand,
+    cmd("settings", "[+|-]<options>", "+enable/-disable flags, set compiler options", changeSettings),
+    nullary("silent", "disable/enable automatic printing of results", verbosity),
+    cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+    cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
+    nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
+  )
+
+  /** Power user commands */
+  lazy val powerCommands: List[LoopCommand] = List(
+    cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
+  )
+
+  private def importsCommand(line: String): Result = {
+    val tokens    = words(line)
+    val handlers  = intp.languageWildcardHandlers ++ intp.importHandlers
+
+    handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
+      case (handler, idx) =>
+        val (types, terms) = handler.importedSymbols partition (_.name.isTypeName)
+        val imps           = handler.implicitSymbols
+        val found          = tokens filter (handler importsSymbolNamed _)
+        val typeMsg        = if (types.isEmpty) "" else types.size + " types"
+        val termMsg        = if (terms.isEmpty) "" else terms.size + " terms"
+        val implicitMsg    = if (imps.isEmpty) "" else imps.size + " are implicit"
+        val foundMsg       = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
+        val statsMsg       = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
+
+        intp.reporter.printMessage("%2d) %-30s %s%s".format(
+          idx + 1,
+          handler.importString,
+          statsMsg,
+          foundMsg
+        ))
+    }
+  }
+
+  private def findToolsJar() = PathResolver.SupplementalLocations.platformTools
+
+  private def addToolsJarToLoader() = {
+    val cl = findToolsJar() match {
+      case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+      case _           => intp.classLoader
+    }
+    if (Javap.isAvailable(cl)) {
+      repldbg(":javap available.")
+      cl
+    }
+    else {
+      repldbg(":javap unavailable: no tools.jar at " + jdkHome)
+      intp.classLoader
+    }
+  }
+
+  protected def newJavap() =
+    JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp))
+
+  private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
+
+  // Still todo: modules.
+  private def typeCommand(line0: String): Result = {
+    line0.trim match {
+      case "" => ":type [-v] <expression>"
+      case s  => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
+    }
+  }
+
+  private def kindCommand(expr: String): Result = {
+    expr.trim match {
+      case "" => ":kind [-v] <expression>"
+      case s  => intp.kindCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
+    }
+  }
+
+  private def warningsCommand(): Result = {
+    if (intp.lastWarnings.isEmpty)
+      "Can't find any cached warnings."
+    else
+      intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
+  }
+
+  private def changeSettings(args: String): Result = {
+    def showSettings() = {
+      for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
+    }
+    def updateSettings() = {
+      // put aside +flag options
+      val (pluses, rest) = (args split "\\s+").toList partition (_.startsWith("+"))
+      val tmps = new Settings
+      val (ok, leftover) = tmps.processArguments(rest, processAll = true)
+      if (!ok) echo("Bad settings request.")
+      else if (leftover.nonEmpty) echo("Unprocessed settings.")
+      else {
+        // boolean flags set-by-user on tmp copy should be off, not on
+        val offs = tmps.userSetSettings filter (_.isInstanceOf[Settings#BooleanSetting])
+        val (minuses, nonbools) = rest partition (arg => offs exists (_ respondsTo arg))
+        // update non-flags
+        settings.processArguments(nonbools, processAll = true)
+        // also snag multi-value options for clearing, e.g. -Ylog: and -language:
+        for {
+          s <- settings.userSetSettings
+          if s.isInstanceOf[Settings#MultiStringSetting] || s.isInstanceOf[Settings#PhasesSetting]
+          if nonbools exists (arg => arg.head == '-' && arg.last == ':' && (s respondsTo arg.init))
+        } s match {
+          case c: Clearable => c.clear()
+          case _ =>
+        }
+        def update(bs: Seq[String], name: String=>String, setter: Settings#Setting=>Unit) = {
+          for (b <- bs)
+            settings.lookupSetting(name(b)) match {
+              case Some(s) =>
+                if (s.isInstanceOf[Settings#BooleanSetting]) setter(s)
+                else echo(s"Not a boolean flag: $b")
+              case _ =>
+                echo(s"Not an option: $b")
+            }
+        }
+        update(minuses, identity, _.tryToSetFromPropertyValue("false"))  // turn off
+        update(pluses, "-" + _.drop(1), _.tryToSet(Nil))                 // turn on
+      }
+    }
+    if (args.isEmpty) showSettings() else updateSettings()
+  }
+
+  private def javapCommand(line: String): Result = {
+    if (javap == null)
+      ":javap unavailable, no tools.jar at %s.  Set JDK_HOME.".format(jdkHome)
+    else if (line == "")
+      ":javap [-lcsvp] [path1 path2 ...]"
+    else
+      javap(words(line)) foreach { res =>
+        if (res.isError) return "Failed: " + res.value
+        else res.show()
+      }
+  }
+
+  private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent"
+
+  private def phaseCommand(name: String): Result = {
+    val phased: Phased = power.phased
+    import phased.NoPhaseName
+
+    if (name == "clear") {
+      phased.set(NoPhaseName)
+      intp.clearExecutionWrapper()
+      "Cleared active phase."
+    }
+    else if (name == "") phased.get match {
+      case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
+      case ph          => "Active phase is '%s'.  (To clear, :phase clear)".format(phased.get)
+    }
+    else {
+      val what = phased.parse(name)
+      if (what.isEmpty || !phased.set(what))
+        "'" + name + "' does not appear to represent a valid phase."
+      else {
+        intp.setExecutionWrapper(pathToPhaseWrapper)
+        val activeMessage =
+          if (what.toString.length == name.length) "" + what
+          else "%s (%s)".format(what, name)
+
+        "Active phase is now: " + activeMessage
+      }
+    }
+  }
+
+  /** Available commands */
+  def commands: List[LoopCommand] = standardCommands ++ (
+    if (isReplPower) powerCommands else Nil
+  )
+
+  val replayQuestionMessage =
+    """|That entry seems to have slain the compiler.  Shall I replay
+       |your session? I can re-run each line except the last one.
+       |[y/n]
+    """.trim.stripMargin
+
+  private val crashRecovery: PartialFunction[Throwable, Boolean] = {
+    case ex: Throwable =>
+      echo(intp.global.throwableAsString(ex))
+
+      ex match {
+        case _: NoSuchMethodError | _: NoClassDefFoundError =>
+          echo("\nUnrecoverable error.")
+          throw ex
+        case _  =>
+          def fn(): Boolean =
+            try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+            catch { case _: RuntimeException => false }
+
+          if (fn()) replay()
+          else echo("\nAbandoning crashed session.")
+      }
+      true
+  }
+
+  // return false if repl should exit
+  def processLine(line: String): Boolean = {
+    import scala.concurrent.duration._
+    Await.ready(globalFuture, 60.seconds)
+
+    (line ne null) && (command(line) match {
+      case Result(false, _)      => false
+      case Result(_, Some(line)) => addReplay(line) ; true
+      case _                     => true
+    })
+  }
+
+  private def readOneLine() = {
+    out.flush()
+    in readLine prompt
+  }
+
+  /** The main read-eval-print loop for the repl.  It calls
+   *  command() for each line of input, and stops when
+   *  command() returns false.
+   */
+  @tailrec final def loop() {
+    if ( try processLine(readOneLine()) catch crashRecovery )
+      loop()
+  }
+
+  /** interpret all lines from a specified file */
+  def interpretAllFrom(file: File) {
+    savingReader {
+      savingReplayStack {
+        file applyReader { reader =>
+          in = SimpleReader(reader, out, interactive = false)
+          echo("Loading " + file + "...")
+          loop()
+        }
+      }
+    }
+  }
+
+  /** create a new interpreter and replay the given commands */
+  def replay() {
+    reset()
+    if (replayCommandStack.isEmpty)
+      echo("Nothing to replay.")
+    else for (cmd <- replayCommands) {
+      echo("Replaying: " + cmd)  // flush because maybe cmd will have its own output
+      command(cmd)
+      echo("")
+    }
+  }
+  def resetCommand() {
+    echo("Resetting interpreter state.")
+    if (replayCommandStack.nonEmpty) {
+      echo("Forgetting this session history:\n")
+      replayCommands foreach echo
+      echo("")
+      replayCommandStack = Nil
+    }
+    if (intp.namedDefinedTerms.nonEmpty)
+      echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+    if (intp.definedTypes.nonEmpty)
+      echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+
+    reset()
+  }
+  def reset() {
+    intp.reset()
+    unleashAndSetPhase()
+  }
+
+  def lineCommand(what: String): Result = editCommand(what, None)
+
+  // :edit id or :edit line
+  def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR"))
+
+  def editCommand(what: String, editor: Option[String]): Result = {
+    def diagnose(code: String) = {
+      echo("The edited code is incomplete!\n")
+      val errless = intp compileSources new BatchSourceFile("<pastie>", s"object pastel {\n$code\n}")
+      if (errless) echo("The compiler reports no errors.")
+    }
+    def historicize(text: String) = history match {
+      case jlh: JLineHistory => text.lines foreach jlh.add ; jlh.moveToEnd() ; true
+      case _ => false
+    }
+    def edit(text: String): Result = editor match {
+      case Some(ed) =>
+        val tmp = File.makeTemp()
+        tmp.writeAll(text)
+        try {
+          val pr = new ProcessResult(s"$ed ${tmp.path}")
+          pr.exitCode match {
+            case 0 =>
+              tmp.safeSlurp() match {
+                case Some(edited) if edited.trim.isEmpty => echo("Edited text is empty.")
+                case Some(edited) =>
+                  echo(edited.lines map ("+" + _) mkString "\n")
+                  val res = intp interpret edited
+                  if (res == IR.Incomplete) diagnose(edited)
+                  else {
+                    historicize(edited)
+                    Result(lineToRecord = Some(edited), keepRunning = true)
+                  }
+                case None => echo("Can't read edited text. Did you delete it?")
+              }
+            case x => echo(s"Error exit from $ed ($x), ignoring")
+          }
+        } finally {
+          tmp.delete()
+        }
+      case None =>
+        if (historicize(text)) echo("Placing text in recent history.")
+        else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text")
+    }
+
+    // if what is a number, use it as a line number or range in history
+    def isNum = what forall (c => c.isDigit || c == '-' || c == '+')
+    // except that "-" means last value
+    def isLast = (what == "-")
+    if (isLast || !isNum) {
+      val name = if (isLast) intp.mostRecentVar else what
+      val sym = intp.symbolOfIdent(name)
+      intp.prevRequestList collectFirst { case r if r.defines contains sym => r } match {
+        case Some(req) => edit(req.line)
+        case None      => echo(s"No symbol in scope: $what")
+      }
+    } else try {
+      val s = what
+      // line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)
+      val (start, len) =
+        if ((s indexOf '+') > 0) {
+          val (a,b) = s splitAt (s indexOf '+')
+          (a.toInt, b.drop(1).toInt)
+        } else {
+          (s indexOf '-') match {
+            case -1 => (s.toInt, 1)
+            case 0  => val n = s.drop(1).toInt ; (history.index - n, n)
+            case _ if s.last == '-' => val n = s.init.toInt ; (n, history.index - n)
+            case i  => val n = s.take(i).toInt ; (n, s.drop(i+1).toInt - n)
+          }
+        }
+      import scala.collection.JavaConverters._
+      val index = (start - 1) max 0
+      val text = history match {
+        case jlh: JLineHistory => jlh.entries(index).asScala.take(len) map (_.value) mkString "\n"
+        case _ => history.asStrings.slice(index, index + len) mkString "\n"
+      }
+      edit(text)
+    } catch {
+      case _: NumberFormatException => echo(s"Bad range '$what'")
+        echo("Use line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)")
+    }
+  }
+
+  /** fork a shell and run a command */
+  lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
+    override def usage = "<command line>"
+    def apply(line: String): Result = line match {
+      case ""   => showUsage()
+      case _    =>
+        val toRun = s"new ${classOf[ProcessResult].getName}(${string2codeQuoted(line)})"
+        intp interpret toRun
+        ()
+    }
+  }
+
+  def withFile[A](filename: String)(action: File => A): Option[A] = {
+    val res = Some(File(filename)) filter (_.exists) map action
+    if (res.isEmpty) echo("That file does not exist")  // courtesy side-effect
+    res
+  }
+
+  def loadCommand(arg: String) = {
+    var shouldReplay: Option[String] = None
+    withFile(arg)(f => {
+      interpretAllFrom(f)
+      shouldReplay = Some(":load " + arg)
+    })
+    Result(keepRunning = true, shouldReplay)
+  }
+
+  def saveCommand(filename: String): Result = (
+    if (filename.isEmpty) echo("File name is required.")
+    else if (replayCommandStack.isEmpty) echo("No replay commands in session")
+    else File(filename).printlnAll(replayCommands: _*)
+  )
+
+  def addClasspath(arg: String): Unit = {
+    val f = File(arg).normalize
+    if (f.exists) {
+      addedClasspath = ClassPath.join(addedClasspath, f.path)
+      val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+      echo("Added '%s'.  Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
+      replay()
+    }
+    else echo("The path '" + f + "' doesn't seem to exist.")
+  }
+
+  def powerCmd(): Result = {
+    if (isReplPower) "Already in power mode."
+    else enablePowerMode(isDuringInit = false)
+  }
+  def enablePowerMode(isDuringInit: Boolean) = {
+    replProps.power setValue true
+    unleashAndSetPhase()
+    asyncEcho(isDuringInit, power.banner)
+  }
+  private def unleashAndSetPhase() {
+    if (isReplPower) {
+      power.unleash()
+      // Set the phase to "typer"
+      intp beSilentDuring phaseCommand("typer")
+    }
+  }
+
+  def asyncEcho(async: Boolean, msg: => String) {
+    if (async) asyncMessage(msg)
+    else echo(msg)
+  }
+
+  def verbosity() = {
+    val old = intp.printResults
+    intp.printResults = !old
+    echo("Switched " + (if (old) "off" else "on") + " result printing.")
+  }
+
+  /** Run one command submitted by the user.  Two values are returned:
+    * (1) whether to keep running, (2) the line to record for replay,
+    * if any. */
+  def command(line: String): Result = {
+    if (line startsWith ":") {
+      val cmd = line.tail takeWhile (x => !x.isWhitespace)
+      uniqueCommand(cmd) match {
+        case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
+        case _        => ambiguousError(cmd)
+      }
+    }
+    else if (intp.global == null) Result(keepRunning = false, None)  // Notice failure to create compiler
+    else Result(keepRunning = true, interpretStartingWith(line))
+  }
+
+  private def readWhile(cond: String => Boolean) = {
+    Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
+  }
+
+  def pasteCommand(arg: String): Result = {
+    var shouldReplay: Option[String] = None
+    def result = Result(keepRunning = true, shouldReplay)
+    val (raw, file) =
+      if (arg.isEmpty) (false, None)
+      else {
+        val r = """(-raw)?(\s+)?([^\-]\S*)?""".r
+        arg match {
+          case r(flag, sep, name) =>
+            if (flag != null && name != null && sep == null)
+              echo(s"""I assume you mean "$flag $name"?""")
+            (flag != null, Option(name))
+          case _ =>
+            echo("usage: :paste -raw file")
+            return result
+        }
+      }
+    val code = file match {
+      case Some(name) =>
+        withFile(name)(f => {
+          shouldReplay = Some(s":paste $arg")
+          val s = f.slurp.trim
+          if (s.isEmpty) echo(s"File contains no code: $f")
+          else echo(s"Pasting file $f...")
+          s
+        }) getOrElse ""
+      case None =>
+        echo("// Entering paste mode (ctrl-D to finish)\n")
+        val text = (readWhile(_ => true) mkString "\n").trim
+        if (text.isEmpty) echo("\n// Nothing pasted, nothing gained.\n")
+        else echo("\n// Exiting paste mode, now interpreting.\n")
+        text
+    }
+    def interpretCode() = {
+      val res = intp interpret code
+      // if input is incomplete, let the compiler try to say why
+      if (res == IR.Incomplete) {
+        echo("The pasted code is incomplete!\n")
+        // Remembrance of Things Pasted in an object
+        val errless = intp compileSources new BatchSourceFile("<pastie>", s"object pastel {\n$code\n}")
+        if (errless) echo("...but compilation found no error? Good luck with that.")
+      }
+    }
+    def compileCode() = {
+      val errless = intp compileSources new BatchSourceFile("<pastie>", code)
+      if (!errless) echo("There were compilation errors!")
+    }
+    if (code.nonEmpty) {
+      if (raw) compileCode() else interpretCode()
+    }
+    result
+  }
+
+  private object paste extends Pasted {
+    val ContinueString = "     | "
+    val PromptString   = "scala> "
+
+    def interpret(line: String): Unit = {
+      echo(line.trim)
+      intp interpret line
+      echo("")
+    }
+
+    def transcript(start: String) = {
+      echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
+      apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
+    }
+  }
+  import paste.{ ContinueString, PromptString }
+
+  /** Interpret expressions starting with the first line.
+    * Read lines until a complete compilation unit is available
+    * or until a syntax error has been seen.  If a full unit is
+    * read, go ahead and interpret it.  Return the full string
+    * to be recorded for replay, if any.
+    */
+  def interpretStartingWith(code: String): Option[String] = {
+    // signal completion non-completion input has been received
+    in.completion.resetVerbosity()
+
+    def reallyInterpret = {
+      val reallyResult = intp.interpret(code)
+      (reallyResult, reallyResult match {
+        case IR.Error       => None
+        case IR.Success     => Some(code)
+        case IR.Incomplete  =>
+          if (in.interactive && code.endsWith("\n\n")) {
+            echo("You typed two blank lines.  Starting a new command.")
+            None
+          }
+          else in.readLine(ContinueString) match {
+            case null =>
+              // we know compilation is going to fail since we're at EOF and the
+              // parser thinks the input is still incomplete, but since this is
+              // a file being read non-interactively we want to fail.  So we send
+              // it straight to the compiler for the nice error message.
+              intp.compileString(code)
+              None
+
+            case line => interpretStartingWith(code + "\n" + line)
+          }
+      })
+    }
+
+    /** Here we place ourselves between the user and the interpreter and examine
+     *  the input they are ostensibly submitting.  We intervene in several cases:
+     *
+     *  1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+     *  2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+     *     on the previous result.
+     *  3) If the Completion object's execute returns Some(_), we inject that value
+     *     and avoid the interpreter, as it's likely not valid scala code.
+     */
+    if (code == "") None
+    else if (!paste.running && code.trim.startsWith(PromptString)) {
+      paste.transcript(code)
+      None
+    }
+    else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
+      interpretStartingWith(intp.mostRecentVar + code)
+    }
+    else if (code.trim startsWith "//") {
+      // line comment, do nothing
+      None
+    }
+    else
+      reallyInterpret._2
+  }
+
+  // runs :load `file` on any files passed via -i
+  def loadFiles(settings: Settings) = settings match {
+    case settings: GenericRunnerSettings =>
+      for (filename <- settings.loadfiles.value) {
+        val cmd = ":load " + filename
+        command(cmd)
+        addReplay(cmd)
+        echo("")
+      }
+    case _ =>
+  }
+
+  /** Tries to create a JLineReader, falling back to SimpleReader:
+   *  unless settings or properties are such that it should start
+   *  with SimpleReader.
+   */
+  def chooseReader(settings: Settings): InteractiveReader = {
+    if (settings.Xnojline || Properties.isEmacsShell)
+      SimpleReader()
+    else try new JLineReader(
+      if (settings.noCompletion) NoCompletion
+      else new JLineCompletion(intp)
+    )
+    catch {
+      case ex @ (_: Exception | _: NoClassDefFoundError) =>
+        echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
+        SimpleReader()
+    }
+  }
+
+  private def loopPostInit() {
+    // Bind intp somewhere out of the regular namespace where
+    // we can get at it in generated code.
+    intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))
+    // Auto-run code via some setting.
+    ( replProps.replAutorunCode.option
+        flatMap (f => io.File(f).safeSlurp())
+        foreach (intp quietRun _)
+    )
+    // classloader and power mode setup
+    intp.setContextClassLoader()
+    if (isReplPower) {
+      replProps.power setValue true
+      unleashAndSetPhase()
+      asyncMessage(power.banner)
+    }
+    // SI-7418 Now, and only now, can we enable TAB completion.
+    in match {
+      case x: JLineReader => x.consoleReader.postInit
+      case _              =>
+    }
+  }
+  def process(settings: Settings): Boolean = savingContextLoader {
+    this.settings = settings
+    createInterpreter()
+
+    // sets in to some kind of reader depending on environmental cues
+    in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true))
+    globalFuture = future {
+      intp.initializeSynchronous()
+      loopPostInit()
+      !intp.reporter.hasErrors
+    }
+    loadFiles(settings)
+    printWelcome()
+
+    try loop()
+    catch AbstractOrMissingHandler()
+    finally closeInterpreter()
+
+    true
+  }
+
+  @deprecated("Use `process` instead", "2.9.0")
+  def main(settings: Settings): Unit = process(settings) //used by sbt
+}
+
+object ILoop {
+  implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp
+
+  // Designed primarily for use by test code: take a String with a
+  // bunch of code, and prints out a transcript of what it would look
+  // like if you'd just typed it into the repl.
+  def runForTranscript(code: String, settings: Settings): String = {
+    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+    stringFromStream { ostream =>
+      Console.withOut(ostream) {
+        val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
+          override def write(str: String) = {
+            // completely skip continuation lines
+            if (str forall (ch => ch.isWhitespace || ch == '|')) ()
+            else super.write(str)
+          }
+        }
+        val input = new BufferedReader(new StringReader(code.trim + "\n")) {
+          override def readLine(): String = {
+            val s = super.readLine()
+            // helping out by printing the line being interpreted.
+            if (s != null)
+              output.println(s)
+            s
+          }
+        }
+        val repl = new ILoop(input, output)
+        if (settings.classpath.isDefault)
+          settings.classpath.value = sys.props("java.class.path")
+
+        repl process settings
+      }
+    }
+  }
+
+  /** Creates an interpreter loop with default settings and feeds
+   *  the given code to it as input.
+   */
+  def run(code: String, sets: Settings = new Settings): String = {
+    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+    stringFromStream { ostream =>
+      Console.withOut(ostream) {
+        val input    = new BufferedReader(new StringReader(code))
+        val output   = new JPrintWriter(new OutputStreamWriter(ostream), true)
+        val repl     = new ILoop(input, output)
+
+        if (sets.classpath.isDefault)
+          sets.classpath.value = sys.props("java.class.path")
+
+        repl process sets
+      }
+    }
+  }
+  def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
new file mode 100644
index 0000000..9c853fb
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -0,0 +1,1302 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import PartialFunction.cond
+import scala.language.implicitConversions
+import scala.beans.BeanProperty
+import scala.collection.mutable
+import scala.concurrent.{ Future, ExecutionContext }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
+import scala.tools.util.PathResolver
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.Exceptional.unwrap
+import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+
+/** An interpreter for Scala code.
+ *
+ *  The main public entry points are compile(), interpret(), and bind().
+ *  The compile() method loads a complete Scala file.  The interpret() method
+ *  executes one line of Scala code at the request of the user.  The bind()
+ *  method binds an object to a variable that can then be used by later
+ *  interpreted code.
+ *
+ *  The overall approach is based on compiling the requested code and then
+ *  using a Java classloader and Java reflection to run the code
+ *  and access its results.
+ *
+ *  In more detail, a single compiler instance is used
+ *  to accumulate all successfully compiled or interpreted Scala code.  To
+ *  "interpret" a line of code, the compiler generates a fresh object that
+ *  includes the line of code and which has public member(s) to export
+ *  all variables defined by that code.  To extract the result of an
+ *  interpreted line to show the user, a second "result object" is created
+ *  which imports the variables exported by the above object and then
+ *  exports members called "$eval" and "$print". To accomodate user expressions
+ *  that read from variables or methods defined in previous statements, "import"
+ *  statements are used.
+ *
+ *  This interpreter shares the strengths and weaknesses of using the
+ *  full compiler-to-Java.  The main strength is that interpreted code
+ *  behaves exactly as does compiled code, including running at full speed.
+ *  The main weakness is that redefining classes and methods is not handled
+ *  properly, because rebinding at the Java level is technically difficult.
+ *
+ *  @author Moez A. Abdel-Gawad
+ *  @author Lex Spoon
+ */
+class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Settings, protected val out: JPrintWriter) extends AbstractScriptEngine with Compilable with Imports {
+  imain =>
+
+  setBindings(createBindings, ScriptContext.ENGINE_SCOPE)
+  object replOutput extends ReplOutput(settings.Yreploutdir) { }
+
+  @deprecated("Use replOutput.dir instead", "2.11.0")
+  def virtualDirectory = replOutput.dir
+  // Used in a test case.
+  def showDirectory() = replOutput.show(out)
+
+  private[nsc] var printResults               = true      // whether to print result lines
+  private[nsc] var totalSilence               = false     // whether to print anything
+  private var _initializeComplete             = false     // compiler is initialized
+  private var _isInitialized: Future[Boolean] = null      // set up initialization future
+  private var bindExceptions                  = true      // whether to bind the lastException variable
+  private var _executionWrapper               = ""        // code to be wrapped around all lines
+
+  /** We're going to go to some trouble to initialize the compiler asynchronously.
+   *  It's critical that nothing call into it until it's been initialized or we will
+   *  run into unrecoverable issues, but the perceived repl startup time goes
+   *  through the roof if we wait for it.  So we initialize it with a future and
+   *  use a lazy val to ensure that any attempt to use the compiler object waits
+   *  on the future.
+   */
+  private var _classLoader: util.AbstractFileClassLoader = null                              // active classloader
+  private val _compiler: ReplGlobal                 = newCompiler(settings, reporter)   // our private compiler
+
+  def compilerClasspath: Seq[java.net.URL] = (
+    if (isInitializeComplete) global.classPath.asURLs
+    else new PathResolver(settings).result.asURLs  // the compiler's classpath
+  )
+  def settings = initialSettings
+  // Run the code body with the given boolean settings flipped to true.
+  def withoutWarnings[T](body: => T): T = beQuietDuring {
+    val saved = settings.nowarn.value
+    if (!saved)
+      settings.nowarn.value = true
+
+    try body
+    finally if (!saved) settings.nowarn.value = false
+  }
+
+  /** construct an interpreter that reports to Console */
+  def this(settings: Settings, out: JPrintWriter) = this(null, settings, out)
+  def this(factory: ScriptEngineFactory, settings: Settings) = this(factory, settings, new NewLinePrintWriter(new ConsoleWriter, true))
+  def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+  def this(factory: ScriptEngineFactory) = this(factory, new Settings())
+  def this() = this(new Settings())
+
+  lazy val formatting: Formatting = new Formatting {
+    val prompt = Properties.shellPromptString
+  }
+  lazy val reporter: ReplReporter = new ReplReporter(this)
+
+  import formatting._
+  import reporter.{ printMessage, withoutTruncating }
+
+  // This exists mostly because using the reporter too early leads to deadlock.
+  private def echo(msg: String) { Console println msg }
+  private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
+  private def _initialize() = {
+    try {
+      // todo. if this crashes, REPL will hang
+      new _compiler.Run() compileSources _initSources
+      _initializeComplete = true
+      true
+    }
+    catch AbstractOrMissingHandler()
+  }
+  private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+  private val logScope = scala.sys.props contains "scala.repl.scope"
+  private def scopelog(msg: String) = if (logScope) Console.err.println(msg)
+
+  // argument is a thunk to execute after init is done
+  def initialize(postInitSignal: => Unit) {
+    synchronized {
+      if (_isInitialized == null) {
+        _isInitialized =
+          Future(try _initialize() finally postInitSignal)(ExecutionContext.global)
+      }
+    }
+  }
+  def initializeSynchronous(): Unit = {
+    if (!isInitializeComplete) {
+      _initialize()
+      assert(global != null, global)
+    }
+  }
+  def isInitializeComplete = _initializeComplete
+
+  lazy val global: Global = {
+    if (!isInitializeComplete) _initialize()
+    _compiler
+  }
+
+  import global._
+  import definitions.{ ObjectClass, termMember, dropNullaryMethod}
+
+  lazy val runtimeMirror = ru.runtimeMirror(classLoader)
+
+  private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol }
+
+  def getClassIfDefined(path: String)  = (
+           noFatal(runtimeMirror staticClass path)
+    orElse noFatal(rootMirror staticClass path)
+  )
+  def getModuleIfDefined(path: String) = (
+           noFatal(runtimeMirror staticModule path)
+    orElse noFatal(rootMirror staticModule path)
+  )
+
+  implicit class ReplTypeOps(tp: Type) {
+    def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+  }
+
+  // TODO: If we try to make naming a lazy val, we run into big time
+  // scalac unhappiness with what look like cycles.  It has not been easy to
+  // reduce, but name resolution clearly takes different paths.
+  object naming extends {
+    val global: imain.global.type = imain.global
+  } with Naming {
+    // make sure we don't overwrite their unwisely named res3 etc.
+    def freshUserTermName(): TermName = {
+      val name = newTermName(freshUserVarName())
+      if (replScope containsName name) freshUserTermName()
+      else name
+    }
+    def isInternalTermName(name: Name) = isInternalVarName("" + name)
+  }
+  import naming._
+
+  object deconstruct extends {
+    val global: imain.global.type = imain.global
+  } with StructuredTypeStrings
+
+  lazy val memberHandlers = new {
+    val intp: imain.type = imain
+  } with MemberHandlers
+  import memberHandlers._
+
+  /** Temporarily be quiet */
+  def beQuietDuring[T](body: => T): T = {
+    val saved = printResults
+    printResults = false
+    try body
+    finally printResults = saved
+  }
+  def beSilentDuring[T](operation: => T): T = {
+    val saved = totalSilence
+    totalSilence = true
+    try operation
+    finally totalSilence = saved
+  }
+
+  def quietRun[T](code: String) = beQuietDuring(interpret(code))
+
+  /** takes AnyRef because it may be binding a Throwable or an Exceptional */
+  private def withLastExceptionLock[T](body: => T, alt: => T): T = {
+    assert(bindExceptions, "withLastExceptionLock called incorrectly.")
+    bindExceptions = false
+
+    try     beQuietDuring(body)
+    catch   logAndDiscard("withLastExceptionLock", alt)
+    finally bindExceptions = true
+  }
+
+  def executionWrapper = _executionWrapper
+  def setExecutionWrapper(code: String) = _executionWrapper = code
+  def clearExecutionWrapper() = _executionWrapper = ""
+
+  /** interpreter settings */
+  lazy val isettings = new ISettings(this)
+
+  /** Instantiate a compiler.  Overridable. */
+  protected def newCompiler(settings: Settings, reporter: reporters.Reporter): ReplGlobal = {
+    settings.outputDirs setSingleOutput replOutput.dir
+    settings.exposeEmptyPackage.value = true
+    new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
+  }
+
+  /** Parent classloader.  Overridable. */
+  protected def parentClassLoader: ClassLoader =
+    settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
+
+  /* A single class loader is used for all commands interpreted by this Interpreter.
+     It would also be possible to create a new class loader for each command
+     to interpret.  The advantages of the current approach are:
+
+       - Expressions are only evaluated one time.  This is especially
+         significant for I/O, e.g. "val x = Console.readLine"
+
+     The main disadvantage is:
+
+       - Objects, classes, and methods cannot be rebound.  Instead, definitions
+         shadow the old ones, and old code objects refer to the old
+         definitions.
+  */
+  def resetClassLoader() = {
+    repldbg("Setting new classloader: was " + _classLoader)
+    _classLoader = null
+    ensureClassLoader()
+  }
+  final def ensureClassLoader() {
+    if (_classLoader == null)
+      _classLoader = makeClassLoader()
+  }
+  def classLoader: util.AbstractFileClassLoader = {
+    ensureClassLoader()
+    _classLoader
+  }
+
+  def backticked(s: String): String = (
+    (s split '.').toList map {
+      case "_"                               => "_"
+      case s if nme.keywords(newTermName(s)) => s"`$s`"
+      case s                                 => s
+    } mkString "."
+  )
+  def readRootPath(readPath: String) = getModuleIfDefined(readPath)
+
+  abstract class PhaseDependentOps {
+    def shift[T](op: => T): T
+
+    def path(name: => Name): String = shift(path(symbolOfName(name)))
+    def path(sym: Symbol): String = backticked(shift(sym.fullName))
+    def sig(sym: Symbol): String  = shift(sym.defString)
+  }
+  object typerOp extends PhaseDependentOps {
+    def shift[T](op: => T): T = exitingTyper(op)
+  }
+  object flatOp extends PhaseDependentOps {
+    def shift[T](op: => T): T = exitingFlatten(op)
+  }
+
+  def originalPath(name: String): String = originalPath(name: TermName)
+  def originalPath(name: Name): String   = typerOp path name
+  def originalPath(sym: Symbol): String  = typerOp path sym
+  def flatPath(sym: Symbol): String      = flatOp shift sym.javaClassName
+  def translatePath(path: String) = {
+    val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
+    sym.toOption map flatPath
+  }
+  def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
+
+  private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
+    /** Overridden here to try translating a simple name to the generated
+     *  class name if the original attempt fails.  This method is used by
+     *  getResourceAsStream as well as findClass.
+     */
+    override protected def findAbstractFile(name: String): AbstractFile =
+      super.findAbstractFile(name) match {
+        case null if _initializeComplete => translatePath(name) map (super.findAbstractFile(_)) orNull
+        case file => file
+      }
+  }
+  private def makeClassLoader(): util.AbstractFileClassLoader =
+    new TranslatingClassLoader(parentClassLoader match {
+      case null   => ScalaClassLoader fromURLs compilerClasspath
+      case p      => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
+    })
+
+  // Set the current Java "context" class loader to this interpreter's class loader
+  def setContextClassLoader() = classLoader.setAsContext()
+
+  def allDefinedNames: List[Name]  = exitingTyper(replScope.toList.map(_.name).sorted)
+  def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted
+
+  /** Most recent tree handled which wasn't wholly synthetic. */
+  private def mostRecentlyHandledTree: Option[Tree] = {
+    prevRequests.reverse foreach { req =>
+      req.handlers.reverse foreach {
+        case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
+        case _ => ()
+      }
+    }
+    None
+  }
+
+  private def updateReplScope(sym: Symbol, isDefined: Boolean) {
+    def log(what: String) {
+      val mark = if (sym.isType) "t " else "v "
+      val name = exitingTyper(sym.nameString)
+      val info = cleanTypeAfterTyper(sym)
+      val defn = sym defStringSeenAs info
+
+      scopelog(f"[$mark$what%6s] $name%-25s $defn%s")
+    }
+    if (ObjectClass isSubClass sym.owner) return
+    // unlink previous
+    replScope lookupAll sym.name foreach { sym =>
+      log("unlink")
+      replScope unlink sym
+    }
+    val what = if (isDefined) "define" else "import"
+    log(what)
+    replScope enter sym
+  }
+
+  def recordRequest(req: Request) {
+    if (req == null)
+      return
+
+    prevRequests += req
+
+    // warning about serially defining companions.  It'd be easy
+    // enough to just redefine them together but that may not always
+    // be what people want so I'm waiting until I can do it better.
+    exitingTyper {
+      req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym =>
+        val oldSym = replScope lookup newSym.name.companionName
+        if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }) {
+          replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.")
+          replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
+        }
+      }
+    }
+    exitingTyper {
+      req.imports foreach (sym => updateReplScope(sym, isDefined = false))
+      req.defines foreach (sym => updateReplScope(sym, isDefined = true))
+    }
+  }
+
+  private[nsc] def replwarn(msg: => String) {
+    if (!settings.nowarnings)
+      printMessage(msg)
+  }
+
+  def compileSourcesKeepingRun(sources: SourceFile*) = {
+    val run = new Run()
+    reporter.reset()
+    run compileSources sources.toList
+    (!reporter.hasErrors, run)
+  }
+
+  /** Compile an nsc SourceFile.  Returns true if there are
+   *  no compilation errors, or false otherwise.
+   */
+  def compileSources(sources: SourceFile*): Boolean =
+    compileSourcesKeepingRun(sources: _*)._1
+
+  /** Compile a string.  Returns true if there are no
+   *  compilation errors, or false otherwise.
+   */
+  def compileString(code: String): Boolean =
+    compileSources(new BatchSourceFile("<script>", code))
+
+  /** Build a request from the user. `trees` is `line` after being parsed.
+   */
+  private def buildRequest(line: String, trees: List[Tree]): Request = {
+    executingRequest = new Request(line, trees)
+    executingRequest
+  }
+
+  private def safePos(t: Tree, alt: Int): Int =
+    try t.pos.start
+    catch { case _: UnsupportedOperationException => alt }
+
+  // Given an expression like 10 * 10 * 10 we receive the parent tree positioned
+  // at a '*'.  So look at each subtree and find the earliest of all positions.
+  private def earliestPosition(tree: Tree): Int = {
+    var pos = Int.MaxValue
+    tree foreach { t =>
+      pos = math.min(pos, safePos(t, Int.MaxValue))
+    }
+    pos
+  }
+
+  private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+    val content = indentCode(line)
+    val trees = parse(content) match {
+      case parse.Incomplete     => return Left(IR.Incomplete)
+      case parse.Error          => return Left(IR.Error)
+      case parse.Success(trees) => trees
+    }
+    repltrace(
+      trees map (t => {
+        // [Eugene to Paul] previously it just said `t map ...`
+        // because there was an implicit conversion from Tree to a list of Trees
+        // however Martin and I have removed the conversion
+        // (it was conflicting with the new reflection API),
+        // so I had to rewrite this a bit
+        val subs = t collect { case sub => sub }
+        subs map (t0 =>
+          "  " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
+        ) mkString ""
+      }) mkString "\n"
+    )
+    // If the last tree is a bare expression, pinpoint where it begins using the
+    // AST node position and snap the line off there.  Rewrite the code embodied
+    // by the last tree as a ValDef instead, so we can access the value.
+    val last = trees.lastOption.getOrElse(EmptyTree)
+    last match {
+      case _:Assign                        => // we don't want to include assignments
+      case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
+        val varName  = if (synthetic) freshInternalVarName() else freshUserVarName()
+        val rewrittenLine = (
+          // In theory this would come out the same without the 1-specific test, but
+          // it's a cushion against any more sneaky parse-tree position vs. code mismatches:
+          // this way such issues will only arise on multiple-statement repl input lines,
+          // which most people don't use.
+          if (trees.size == 1) "val " + varName + " =\n" + content
+          else {
+            // The position of the last tree
+            val lastpos0 = earliestPosition(last)
+            // Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
+            // with increasingly hard to decipher positions as we move on to "() => 5",
+            // (x: Int) => x + 1, and more.  So I abandon attempts to finesse and just
+            // look for semicolons and newlines, which I'm sure is also buggy.
+            val (raw1, raw2) = content splitAt lastpos0
+            repldbg("[raw] " + raw1 + "   <--->   " + raw2)
+
+            val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\n'))).size
+            val lastpos = lastpos0 - adjustment
+
+            // the source code split at the laboriously determined position.
+            val (l1, l2) = content splitAt lastpos
+            repldbg("[adj] " + l1 + "   <--->   " + l2)
+
+            val prefix   = if (l1.trim == "") "" else l1 + ";\n"
+            // Note to self: val source needs to have this precise structure so that
+            // error messages print the user-submitted part without the "val res0 = " part.
+            val combined   = prefix + "val " + varName + " =\n" + l2
+
+            repldbg(List(
+              "    line" -> line,
+              " content" -> content,
+              "     was" -> l2,
+              "combined" -> combined) map {
+                case (label, s) => label + ": '" + s + "'"
+              } mkString "\n"
+            )
+            combined
+          }
+        )
+        // Rewriting    "foo ; bar ; 123"
+        // to           "foo ; bar ; val resXX = 123"
+        requestFromLine(rewrittenLine, synthetic) match {
+          case Right(req) => return Right(req withOriginalLine line)
+          case x          => return x
+        }
+      case _ =>
+    }
+    Right(buildRequest(line, trees))
+  }
+
+  // dealias non-public types so we don't see protected aliases like Self
+  def dealiasNonPublic(tp: Type) = tp match {
+    case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
+    case _                                                      => tp
+  }
+
+  /**
+   *  Interpret one line of input. All feedback, including parse errors
+   *  and evaluation results, are printed via the supplied compiler's
+   *  reporter. Values defined are available for future interpreted strings.
+   *
+   *  The return value is whether the line was interpreter successfully,
+   *  e.g. that there were no parse errors.
+   */
+  def interpret(line: String): IR.Result = interpret(line, synthetic = false)
+  def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true)
+  def interpret(line: String, synthetic: Boolean): IR.Result = compile(line, synthetic) match {
+    case Left(result) => result
+    case Right(req)   => new WrappedRequest(req).loadAndRunReq
+  }
+
+  private def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+    if (global == null) Left(IR.Error)
+    else requestFromLine(line, synthetic) match {
+      case Left(result) => Left(result)
+      case Right(req)   =>
+       // null indicates a disallowed statement type; otherwise compile and
+       // fail if false (implying e.g. a type error)
+       if (req == null || !req.compile) Left(IR.Error) else Right(req)
+    }
+  }
+
+  var code = ""
+  var bound = false
+  def compiled(script: String): CompiledScript = {
+    if (!bound) {
+      quietBind("engine" -> this.asInstanceOf[ScriptEngine])
+      bound = true
+    }
+    val cat = code + script
+    compile(cat, false) match {
+      case Left(result) => result match {
+        case IR.Incomplete => {
+          code = cat + "\n"
+          new CompiledScript {
+            def eval(context: ScriptContext): Object = null
+            def getEngine: ScriptEngine = IMain.this
+          }
+        }
+        case _ => {
+          code = ""
+          throw new ScriptException("compile-time error")
+        }
+      }
+      case Right(req)   => {
+        code = ""
+        new WrappedRequest(req)
+      }
+    }
+  }
+
+  private class WrappedRequest(val req: Request) extends CompiledScript {
+    var recorded = false
+
+    /** In Java we would have to wrap any checked exception in the declared
+     *  ScriptException. Runtime exceptions and errors would be ok and would
+     *  not need to be caught. So let us do the same in Scala : catch and
+     *  wrap any checked exception, and let runtime exceptions and errors
+     *  escape. We could have wrapped runtime exceptions just like other
+     *  exceptions in ScriptException, this is a choice.
+     */
+    @throws[ScriptException]
+    def eval(context: ScriptContext): Object = {
+      val result = req.lineRep.evalEither match {
+        case Left(e: RuntimeException) => throw e
+        case Left(e: Exception) => throw new ScriptException(e)
+        case Left(e) => throw e
+        case Right(result) => result.asInstanceOf[Object]
+      }
+      if (!recorded) {
+        recordRequest(req)
+        recorded = true
+      }
+      result
+    }
+
+    def loadAndRunReq = classLoader.asContext {
+      val (result, succeeded) = req.loadAndRun
+
+      /** To our displeasure, ConsoleReporter offers only printMessage,
+       *  which tacks a newline on the end.  Since that breaks all the
+       *  output checking, we have to take one off to balance.
+       */
+      if (succeeded) {
+        if (printResults && result != "")
+          printMessage(result stripSuffix "\n")
+        else if (isReplDebug) // show quiet-mode activity
+          printMessage(result.trim.lines map ("[quiet] " + _) mkString "\n")
+
+        // Book-keeping.  Have to record synthetic requests too,
+        // as they may have been issued for information, e.g. :type
+        recordRequest(req)
+        IR.Success
+      }
+      else {
+        // don't truncate stack traces
+        withoutTruncating(printMessage(result))
+        IR.Error
+      }
+    }
+
+    def getEngine: ScriptEngine = IMain.this
+  }
+
+  /** Bind a specified name to a specified value.  The name may
+   *  later be used by expressions passed to interpret.
+   *
+   *  @param name      the variable name to bind
+   *  @param boundType the type of the variable, as a string
+   *  @param value     the object value to bind to it
+   *  @return          an indication of whether the binding succeeded
+   */
+  def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
+    val bindRep = new ReadEvalPrint()
+    bindRep.compile("""
+        |object %s {
+        |  var value: %s = _
+        |  def set(x: Any) = value = x.asInstanceOf[%s]
+        |}
+      """.stripMargin.format(bindRep.evalName, boundType, boundType)
+      )
+    bindRep.callEither("set", value) match {
+      case Left(ex) =>
+        repldbg("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
+        repldbg(util.stackTraceString(ex))
+        IR.Error
+
+      case Right(_) =>
+        val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
+        repldbg("Interpreting: " + line)
+        interpret(line)
+    }
+  }
+  def directBind(name: String, boundType: String, value: Any): IR.Result = {
+    val result = bind(name, boundType, value)
+    if (result == IR.Success)
+      directlyBoundNames += newTermName(name)
+    result
+  }
+  def directBind(p: NamedParam): IR.Result                                    = directBind(p.name, p.tpe, p.value)
+  def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
+
+  def rebind(p: NamedParam): IR.Result = {
+    val name     = p.name
+    val newType  = p.tpe
+    val tempName = freshInternalVarName()
+
+    quietRun("val %s = %s".format(tempName, name))
+    quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
+  }
+  def quietBind(p: NamedParam): IR.Result                               = beQuietDuring(bind(p))
+  def bind(p: NamedParam): IR.Result                                    = bind(p.name, p.tpe, p.value)
+  def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
+
+  /** Reset this interpreter, forgetting all user-specified requests. */
+  def reset() {
+    clearExecutionWrapper()
+    resetClassLoader()
+    resetAllCreators()
+    prevRequests.clear()
+    resetReplScope()
+    replOutput.dir.clear()
+  }
+
+  /** This instance is no longer needed, so release any resources
+   *  it is using.  The reporter's output gets flushed.
+   */
+  def close() {
+    reporter.flush()
+  }
+
+  /** Here is where we:
+   *
+   *  1) Read some source code, and put it in the "read" object.
+   *  2) Evaluate the read object, and put the result in the "eval" object.
+   *  3) Create a String for human consumption, and put it in the "print" object.
+   *
+   *  Read! Eval! Print! Some of that not yet centralized here.
+   */
+  class ReadEvalPrint(val lineId: Int) {
+    def this() = this(freshLineId())
+
+    val packageName = sessionNames.line + lineId
+    val readName    = sessionNames.read
+    val evalName    = sessionNames.eval
+    val printName   = sessionNames.print
+    val resultName  = sessionNames.result
+
+    def bindError(t: Throwable) = {
+      if (!bindExceptions) // avoid looping if already binding
+        throw t
+
+      val unwrapped = unwrap(t)
+
+      // Example input: $line3.$read$$iw$$iw$
+      val classNameRegex = (naming.lineRegex + ".*").r
+      def isWrapperInit(x: StackTraceElement) = cond(x.getClassName) {
+        case classNameRegex() if x.getMethodName == nme.CONSTRUCTOR.decoded => true
+      }
+      val stackTrace = unwrapped stackTracePrefixString (!isWrapperInit(_))
+
+      withLastExceptionLock[String]({
+        directBind[Throwable]("lastException", unwrapped)(StdReplTags.tagOfThrowable, classTag[Throwable])
+        stackTrace
+      }, stackTrace)
+    }
+
+    // TODO: split it out into a package object and a regular
+    // object and we can do that much less wrapping.
+    def packageDecl = "package " + packageName
+
+    def pathTo(name: String)   = packageName + "." + name
+    def packaged(code: String) = packageDecl + "\n\n" + code
+
+    def readPath  = pathTo(readName)
+    def evalPath  = pathTo(evalName)
+
+    def call(name: String, args: Any*): AnyRef = {
+      val m = evalMethod(name)
+      repldbg("Invoking: " + m)
+      if (args.nonEmpty)
+        repldbg("  with args: " + args.mkString(", "))
+
+      m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+    }
+
+    def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
+      try Right(call(name, args: _*))
+      catch { case ex: Throwable => Left(ex) }
+
+    class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
+
+    private def evalError(path: String, ex: Throwable) =
+      throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex)
+
+    private def load(path: String): Class[_] = {
+      try Class.forName(path, true, classLoader)
+      catch { case ex: Throwable => evalError(path, unwrap(ex)) }
+    }
+
+    lazy val evalClass = load(evalPath)
+
+    def evalEither = callEither(resultName) match {
+      case Left(ex) => ex match {
+          case ex: NullPointerException => Right(null)
+          case ex => Left(unwrap(ex))
+      }
+      case Right(result) => Right(result)
+    }
+
+    def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
+
+    /** The innermost object inside the wrapper, found by
+      * following accessPath into the outer one.
+      */
+    def resolvePathToSymbol(accessPath: String): Symbol = {
+      val readRoot = readRootPath(readPath) // the outermost wrapper
+      (accessPath split '.').foldLeft(readRoot: Symbol) {
+        case (sym, "")    => sym
+        case (sym, name)  => exitingTyper(termMember(sym, name))
+      }
+    }
+    /** We get a bunch of repeated warnings for reasons I haven't
+     *  entirely figured out yet.  For now, squash.
+     */
+    private def updateRecentWarnings(run: Run) {
+      def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
+        case Nil                  => Nil
+        case ((pos, msg)) :: rest =>
+          val filtered = rest filter { case (pos0, msg0) =>
+            (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
+              // same messages and same line content after whitespace removal
+              // but we want to let through multiple warnings on the same line
+              // from the same run.  The untrimmed line will be the same since
+              // there's no whitespace indenting blowing it.
+              (pos.lineContent == pos0.lineContent)
+            }
+          }
+          ((pos, msg)) :: loop(filtered)
+      }
+      val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
+      if (warnings.nonEmpty)
+        mostRecentWarnings = warnings
+    }
+    private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
+      case Array()       => null
+      case Array(method) => method
+      case xs            => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
+    }
+    private def compileAndSaveRun(label: String, code: String) = {
+      showCodeIfDebugging(code)
+      val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
+      updateRecentWarnings(run)
+      success
+    }
+  }
+
+  /** One line of code submitted by the user for interpretation */
+  class Request(val line: String, val trees: List[Tree]) {
+    def defines    = defHandlers flatMap (_.definedSymbols)
+    def imports    = importedSymbols
+    def value      = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
+
+    val lineRep = new ReadEvalPrint()
+
+    private var _originalLine: String = null
+    def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
+    def originalLine = if (_originalLine == null) line else _originalLine
+
+    /** handlers for each tree in this request */
+    val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+    def defHandlers = handlers collect { case x: MemberDefHandler => x }
+
+    /** list of names used by this expression */
+    val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
+
+    /** def and val names */
+    def termNames = handlers flatMap (_.definesTerm)
+    def typeNames = handlers flatMap (_.definesType)
+    def importedSymbols = handlers flatMap {
+      case x: ImportHandler => x.importedSymbols
+      case _                => Nil
+    }
+
+    /** Code to import bound names from previous lines - accessPath is code to
+      * append to objectName to access anything bound by request.
+      */
+    lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) =
+      exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode))
+
+    /** the line of code to compute */
+    def toCompute = line
+
+    /** The path of the value that contains the user code. */
+    def fullAccessPath = s"${lineRep.readPath}$accessPath"
+
+    /** The path of the given member of the wrapping instance. */
+    def fullPath(vname: String) = s"$fullAccessPath.`$vname`"
+
+    /** generate the source code for the object that computes this request */
+    abstract class Wrapper extends IMain.CodeAssembler[MemberHandler] {
+      def path = originalPath("$intp")
+      def envLines = {
+        if (!isReplPower) Nil // power mode only for now
+        else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees"))
+      }
+      def preamble = s"""
+        |$preambleHeader
+        |%s%s%s
+      """.stripMargin.format(lineRep.readName, envLines.map("  " + _ + ";\n").mkString,
+        importsPreamble, indentCode(toCompute))
+
+      val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+
+      /** A format string with %s for $read, specifying the wrapper definition. */
+      def preambleHeader: String
+
+      /** Like preambleHeader for an import wrapper. */
+      def prewrap: String = preambleHeader + "\n"
+
+      /** Like postamble for an import wrapper. */
+      def postwrap: String
+    }
+
+    private class ObjectBasedWrapper extends Wrapper {
+      def preambleHeader = "object %s {"
+
+      def postamble = importsTrailer + "\n}"
+
+      def postwrap = "}\n"
+    }
+
+    private class ClassBasedWrapper extends Wrapper {
+      def preambleHeader = "class %s extends Serializable {"
+
+      /** Adds an object that instantiates the outer wrapping class. */
+      def postamble  = s"""$importsTrailer
+                          |}
+                          |object ${lineRep.readName} extends ${lineRep.readName}
+                          |""".stripMargin
+
+      import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
+
+      /** Adds a val that instantiates the wrapping class. */
+      def postwrap = s"}\nval $iw = new $iw\n"
+    }
+
+    private lazy val ObjectSourceCode: Wrapper =
+      if (settings.Yreplclassbased) new ClassBasedWrapper else new ObjectBasedWrapper
+
+    private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
+      /** We only want to generate this code when the result
+       *  is a value which can be referred to as-is.
+       */
+      val evalResult = Request.this.value match {
+        case NoSymbol => ""
+        case sym      => "lazy val %s = %s".format(lineRep.resultName, originalPath(sym))
+      }
+      // first line evaluates object to make sure constructor is run
+      // initial "" so later code can uniformly be: + etc
+      val preamble = """
+      |object %s {
+      |  %s
+      |  lazy val %s: String = %s {
+      |    %s
+      |    (""
+      """.stripMargin.format(
+        lineRep.evalName, evalResult, lineRep.printName,
+        executionWrapper, fullAccessPath
+      )
+
+      val postamble = """
+      |    )
+      |  }
+      |}
+      """.stripMargin
+      val generate = (m: MemberHandler) => m resultExtractionCode Request.this
+    }
+
+    /** Compile the object file.  Returns whether the compilation succeeded.
+     *  If all goes well, the "types" map is computed. */
+    lazy val compile: Boolean = {
+      // error counting is wrong, hence interpreter may overlook failure - so we reset
+      reporter.reset()
+
+      // compile the object containing the user's code
+      lineRep.compile(ObjectSourceCode(handlers)) && {
+        // extract and remember types
+        typeOf
+        typesOfDefinedTerms
+
+        // Assign symbols to the original trees
+        // TODO - just use the new trees.
+        defHandlers foreach { dh =>
+          val name = dh.member.name
+          definedSymbols get name foreach { sym =>
+            dh.member setSymbol sym
+            repldbg("Set symbol of " + name + " to " + symbolDefString(sym))
+          }
+        }
+
+        // compile the result-extraction object
+        val handls = if (printResults) handlers else Nil
+        withoutWarnings(lineRep compile ResultObjectSourceCode(handls))
+      }
+    }
+
+    lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+    def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+
+    /* typeOf lookup with encoding */
+    def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
+
+    private def typeMap[T](f: Type => T) =
+      mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
+
+    /** Types of variables defined by this request. */
+    lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
+    /** String representations of same. */
+    lazy val typeOf         = typeMap[String](tp => exitingTyper(tp.toString))
+
+    lazy val definedSymbols = (
+      termNames.map(x => x -> applyToResultMember(x, x => x)) ++
+      typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
+    ).toMap[Name, Symbol] withDefaultValue NoSymbol
+
+    lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
+
+    /** load and run the code using reflection */
+    def loadAndRun: (String, Boolean) = {
+      try   { ("" + (lineRep call sessionNames.print), true) }
+      catch { case ex: Throwable => (lineRep.bindError(ex), false) }
+    }
+
+    override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
+  }
+
+  def createBindings: Bindings = new IBindings {
+    override def put(name: String, value: Object): Object = {
+      val n = name.indexOf(":")
+      val p: NamedParam = if (n < 0) (name, value) else {
+        val nme = name.substring(0, n).trim
+        val tpe = name.substring(n + 1).trim
+        NamedParamClass(nme, tpe, value)
+      }
+      if (!p.name.startsWith("javax.script")) bind(p)
+      null
+    }
+  }
+
+  @throws[ScriptException]
+  def compile(script: String): CompiledScript = eval("new javax.script.CompiledScript { def eval(context: javax.script.ScriptContext): Object = { " + script + " }.asInstanceOf[Object]; def getEngine: javax.script.ScriptEngine = engine }").asInstanceOf[CompiledScript]
+
+  @throws[ScriptException]
+  def compile(reader: java.io.Reader): CompiledScript = compile(stringFromReader(reader))
+
+  @throws[ScriptException]
+  def eval(script: String, context: ScriptContext): Object = compiled(script).eval(context)
+
+  @throws[ScriptException]
+  def eval(reader: java.io.Reader, context: ScriptContext): Object = eval(stringFromReader(reader), context)
+
+  override def finalize = close
+
+  /** Returns the name of the most recent interpreter result.
+   *  Mostly this exists so you can conveniently invoke methods on
+   *  the previous result.
+   */
+  def mostRecentVar: String =
+    if (mostRecentlyHandledTree.isEmpty) ""
+    else "" + (mostRecentlyHandledTree.get match {
+      case x: ValOrDefDef           => x.name
+      case Assign(Ident(name), _)   => name
+      case ModuleDef(_, name, _)    => name
+      case _                        => naming.mostRecentVar
+    })
+
+  private var mostRecentWarnings: List[(global.Position, String)] = Nil
+  def lastWarnings = mostRecentWarnings
+
+  private lazy val importToGlobal  = global mkImporter ru
+  private lazy val importToRuntime = ru.internal createImporter global
+  private lazy val javaMirror = ru.rootMirror match {
+    case x: ru.JavaMirror => x
+    case _                => null
+  }
+  private implicit def importFromRu(sym: ru.Symbol): Symbol = importToGlobal importSymbol sym
+  private implicit def importToRu(sym: Symbol): ru.Symbol   = importToRuntime importSymbol sym
+
+  def classOfTerm(id: String): Option[JClass] = symbolOfTerm(id) match {
+    case NoSymbol => None
+    case sym      => Some(javaMirror runtimeClass importToRu(sym).asClass)
+  }
+
+  def typeOfTerm(id: String): Type = symbolOfTerm(id).tpe
+
+  def valueOfTerm(id: String): Option[Any] = exitingTyper {
+    def value() = {
+      val sym0    = symbolOfTerm(id)
+      val sym     = (importToRuntime importSymbol sym0).asTerm
+      val module  = runtimeMirror.reflectModule(sym.owner.companionSymbol.asModule).instance
+      val module1 = runtimeMirror.reflect(module)
+      val invoker = module1.reflectField(sym)
+
+      invoker.get
+    }
+
+    try Some(value()) catch { case _: Exception => None }
+  }
+
+  /** It's a bit of a shotgun approach, but for now we will gain in
+   *  robustness. Try a symbol-producing operation at phase typer, and
+   *  if that is NoSymbol, try again at phase flatten. I'll be able to
+   *  lose this and run only from exitingTyper as soon as I figure out
+   *  exactly where a flat name is sneaking in when calculating imports.
+   */
+  def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op)
+
+  def symbolOfIdent(id: String): Symbol  = symbolOfType(id) orElse symbolOfTerm(id)
+  def symbolOfType(id: String): Symbol   = tryTwice(replScope lookup (id: TypeName))
+  def symbolOfTerm(id: String): Symbol   = tryTwice(replScope lookup (id: TermName))
+  def symbolOfName(id: Name): Symbol     = replScope lookup id
+
+  def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
+    classOfTerm(id) flatMap { clazz =>
+      clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
+        (nonAnon, runtimeTypeOfTerm(id))
+      }
+    }
+  }
+
+  def runtimeTypeOfTerm(id: String): Type = {
+    typeOfTerm(id) andAlso { tpe =>
+      val clazz      = classOfTerm(id) getOrElse { return NoType }
+      val staticSym  = tpe.typeSymbol
+      val runtimeSym = getClassIfDefined(clazz.getName)
+
+      if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+        runtimeSym.info
+      else NoType
+    }
+  }
+
+  def cleanTypeAfterTyper(sym: => Symbol): Type = {
+    exitingTyper(
+      dealiasNonPublic(
+        dropNullaryMethod(
+          sym.tpe_*
+        )
+      )
+    )
+  }
+  def cleanMemberDecl(owner: Symbol, member: Name): Type =
+    cleanTypeAfterTyper(owner.info nonPrivateDecl member)
+
+  object exprTyper extends {
+    val repl: IMain.this.type = imain
+  } with ExprTyper { }
+
+  /** Parse a line into and return parsing result (error, incomplete or success with list of trees) */
+  object parse {
+    abstract sealed class Result
+    case object Error extends Result
+    case object Incomplete extends Result
+    case class Success(trees: List[Tree]) extends Result
+
+    def apply(line: String): Result = debugging(s"""parse("$line")""")  {
+      var isIncomplete = false
+      reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+        reporter.reset()
+        val trees = newUnitParser(line).parseStats()
+        if (reporter.hasErrors) Error
+        else if (isIncomplete) Incomplete
+        else Success(trees)
+      }
+    }
+  }
+
+  def symbolOfLine(code: String): Symbol =
+    exprTyper.symbolOfLine(code)
+
+  def typeOfExpression(expr: String, silent: Boolean = true): Type =
+    exprTyper.typeOfExpression(expr, silent)
+
+  protected def onlyTerms(xs: List[Name]): List[TermName] = xs collect { case x: TermName => x }
+  protected def onlyTypes(xs: List[Name]): List[TypeName] = xs collect { case x: TypeName => x }
+
+  def definedTerms      = onlyTerms(allDefinedNames) filterNot isInternalTermName
+  def definedTypes      = onlyTypes(allDefinedNames)
+  def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name))
+
+  // Terms with user-given names (i.e. not res0 and not synthetic)
+  def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
+
+  private var _replScope: Scope = _
+  private def resetReplScope() {
+    _replScope = newScope
+  }
+  def replScope = {
+    if (_replScope eq null)
+      _replScope = newScope
+
+    _replScope
+  }
+
+  private var executingRequest: Request = _
+  private val prevRequests       = mutable.ListBuffer[Request]()
+  private val directlyBoundNames = mutable.Set[Name]()
+
+  def allHandlers     = prevRequestList flatMap (_.handlers)
+  def lastRequest     = if (prevRequests.isEmpty) null else prevRequests.last
+  def prevRequestList = prevRequests.toList
+  def importHandlers  = allHandlers collect { case x: ImportHandler => x }
+
+  def withoutUnwrapping(op: => Unit): Unit = {
+    val saved = isettings.unwrapStrings
+    isettings.unwrapStrings = false
+    try op
+    finally isettings.unwrapStrings = saved
+  }
+
+  def symbolDefString(sym: Symbol) = {
+    TypeStrings.quieter(
+      exitingTyper(sym.defString),
+      sym.owner.name + ".this.",
+      sym.owner.fullName + "."
+    )
+  }
+
+  def showCodeIfDebugging(code: String) {
+    /** Secret bookcase entrance for repl debuggers: end the line
+     *  with "// show" and see what's going on.
+     */
+    def isShow = code.lines exists (_.trim endsWith "// show")
+    if (isReplDebug || isShow) {
+      beSilentDuring(parse(code)) match {
+        case parse.Success(ts) =>
+          ts foreach { t =>
+            withoutUnwrapping(echo(asCompactString(t)))
+          }
+        case _ =>
+      }
+    }
+  }
+
+  // debugging
+  def debugging[T](msg: String)(res: T) = {
+    repldbg(msg + " " + res)
+    res
+  }
+}
+
+/** Utility methods for the Interpreter. */
+object IMain {
+  import java.util.Arrays.{ asList => asJavaList }
+
+  class Factory extends ScriptEngineFactory {
+    @BeanProperty
+    val engineName = "Scala Interpreter"
+
+    @BeanProperty
+    val engineVersion = "1.0"
+
+    @BeanProperty
+    val extensions: JList[String] = asJavaList("scala")
+
+    @BeanProperty
+    val languageName = "Scala"
+
+    @BeanProperty
+    val languageVersion = scala.util.Properties.versionString
+
+    def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
+
+    @BeanProperty
+    val mimeTypes: JList[String] = asJavaList("application/x-scala")
+
+    @BeanProperty
+    val names: JList[String] = asJavaList("scala")
+
+    def getOutputStatement(toDisplay: String): String = null
+
+    def getParameter(key: String): Object = key match {
+      case ScriptEngine.ENGINE => engineName
+      case ScriptEngine.ENGINE_VERSION => engineVersion
+      case ScriptEngine.LANGUAGE => languageName
+      case ScriptEngine.LANGUAGE_VERSION => languageVersion
+      case ScriptEngine.NAME => names.get(0)
+      case _ => null
+    }
+
+    def getProgram(statements: String*): String = null
+
+    def getScriptEngine: ScriptEngine = new IMain(this, new Settings() {
+      usemanifestcp.value = true
+    })
+  }
+
+  // The two name forms this is catching are the two sides of this assignment:
+  //
+  // $line3.$read.$iw.$iw.Bippy =
+  //   $line3.$read$$iw$$iw$Bippy at 4a6a00ca
+  private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
+  private def removeIWPackages(s: String)  = s.replaceAll("""\$(iw|read|eval|print)[$.]""", "")
+  def stripString(s: String)               = removeIWPackages(removeLineWrapper(s))
+
+  trait CodeAssembler[T] {
+    def preamble: String
+    def generate: T => String
+    def postamble: String
+
+    def apply(contributors: List[T]): String = stringFromWriter { code =>
+      code println preamble
+      contributors map generate foreach (code println _)
+      code println postamble
+    }
+  }
+
+  trait StrippingWriter {
+    def isStripping: Boolean
+    def stripImpl(str: String): String
+    def strip(str: String): String = if (isStripping) stripImpl(str) else str
+  }
+  trait TruncatingWriter {
+    def maxStringLength: Int
+    def isTruncating: Boolean
+    def truncate(str: String): String = {
+      if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
+        (str take maxStringLength - 3) + "..."
+      else str
+    }
+  }
+  abstract class StrippingTruncatingWriter(out: JPrintWriter)
+          extends JPrintWriter(out)
+             with StrippingWriter
+             with TruncatingWriter {
+    self =>
+
+    def clean(str: String): String = truncate(strip(str))
+    override def write(str: String) = super.write(clean(str))
+  }
+  class ReplStrippingWriter(intp: IMain) extends StrippingTruncatingWriter(intp.out) {
+    import intp._
+    def maxStringLength    = isettings.maxPrintString
+    def isStripping        = isettings.unwrapStrings
+    def isTruncating       = reporter.truncationOK
+
+    def stripImpl(str: String): String = naming.unmangle(str)
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ISettings.scala b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
new file mode 100644
index 0000000..9541d08
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
@@ -0,0 +1,54 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Alexander Spoon
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** Settings for the interpreter
+ *
+ * @version 1.0
+ * @author Lex Spoon, 2007/3/24
+ **/
+class ISettings(intp: IMain) {
+  /** The maximum length of toString to use when printing the result
+   *  of an evaluation.  0 means no maximum.  If a printout requires
+   *  more than this number of characters, then the printout is
+   *  truncated.
+   */
+  var maxPrintString = replProps.maxPrintString.option.getOrElse(800)
+
+  /** The maximum number of completion candidates to print for tab
+   *  completion without requiring confirmation.
+   */
+  var maxAutoprintCompletion = 250
+
+  /** String unwrapping can be disabled if it is causing issues.
+   *  Setting this to false means you will see Strings like "$iw.$iw.".
+   */
+  var unwrapStrings = true
+
+  def deprecation_=(x: Boolean) = {
+    val old = intp.settings.deprecation.value
+    intp.settings.deprecation.value = x
+    if (!old && x) println("Enabled -deprecation output.")
+    else if (old && !x) println("Disabled -deprecation output.")
+  }
+  def deprecation: Boolean = intp.settings.deprecation.value
+
+  def allSettings = Map[String, Any](
+    "maxPrintString" -> maxPrintString,
+    "maxAutoprintCompletion" -> maxAutoprintCompletion,
+    "unwrapStrings" -> unwrapStrings,
+    "deprecation" -> deprecation
+  )
+
+  private def allSettingsString =
+    allSettings.toList sortBy (_._1) map { case (k, v) => "  " + k + " = " + v + "\n" } mkString
+
+  override def toString = """
+    | ISettings {
+    | %s
+    | }""".stripMargin.format(allSettingsString)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
new file mode 100644
index 0000000..5244858
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -0,0 +1,183 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.collection.{ mutable, immutable }
+
+trait Imports {
+  self: IMain =>
+
+  import global._
+  import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule }
+  import memberHandlers._
+
+  /** Synthetic import handlers for the language defined imports. */
+  private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
+    val hd :: tl = sym.fullName.split('.').toList map newTermName
+    val tree = Import(
+      tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
+      ImportSelector.wildList
+    )
+    tree setSymbol sym
+    new ImportHandler(tree)
+  }
+
+  /** Symbols whose contents are language-defined to be imported. */
+  def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
+  def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
+
+  def allImportedNames = importHandlers flatMap (_.importedNames)
+
+  /** Types which have been wildcard imported, such as:
+   *    val x = "abc" ; import x._  // type java.lang.String
+   *    import java.lang.String._   // object java.lang.String
+   *
+   *  Used by tab completion.
+   *
+   *  XXX right now this gets import x._ and import java.lang.String._,
+   *  but doesn't figure out import String._.  There's a lot of ad hoc
+   *  scope twiddling which should be swept away in favor of digging
+   *  into the compiler scopes.
+   */
+  def sessionWildcards: List[Type] = {
+    importHandlers filter (_.importsWildcard) map (_.targetType) distinct
+  }
+
+  def languageSymbols        = languageWildcardSyms flatMap membersAtPickler
+  def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
+  def importedSymbols        = languageSymbols ++ sessionImportedSymbols
+  def importedTermSymbols    = importedSymbols collect { case x: TermSymbol => x }
+
+  /** Tuples of (source, imported symbols) in the order they were imported.
+   */
+  def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
+    val lang    = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
+    val session = importHandlers filter (_.targetType != NoType) map { mh =>
+      (mh.targetType.typeSymbol, mh.importedSymbols)
+    }
+
+    lang ++ session
+  }
+  def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
+    importedSymbolsBySource map {
+      case (k, vs) => (k, vs filter (_.isImplicit))
+    } filterNot (_._2.isEmpty)
+  }
+
+  /** Compute imports that allow definitions from previous
+   *  requests to be visible in a new request.  Returns
+   *  three pieces of related code:
+   *
+   *  1. An initial code fragment that should go before
+   *  the code of the new request.
+   *
+   *  2. A code fragment that should go after the code
+   *  of the new request.
+   *
+   *  3. An access path which can be traversed to access
+   *  any bindings inside code wrapped by #1 and #2 .
+   *
+   * The argument is a set of Names that need to be imported.
+   *
+   * Limitations: This method is not as precise as it could be.
+   * (1) It does not process wildcard imports to see what exactly
+   * they import.
+   * (2) If it imports any names from a request, it imports all
+   * of them, which is not really necessary.
+   * (3) It imports multiple same-named implicits, but only the
+   * last one imported is actually usable.
+   */
+  case class ComputedImports(prepend: String, append: String, access: String)
+  protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper): ComputedImports = {
+    /** Narrow down the list of requests from which imports
+     *  should be taken.  Removes requests which cannot contribute
+     *  useful imports for the specified set of wanted names.
+     */
+    case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+
+    def reqsToUse: List[ReqAndHandler] = {
+      /** Loop through a list of MemberHandlers and select which ones to keep.
+        * 'wanted' is the set of names that need to be imported.
+       */
+      def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
+        // Single symbol imports might be implicits! See bug #1752.  Rather than
+        // try to finesse this, we will mimic all imports for now.
+        def keepHandler(handler: MemberHandler) = handler match {
+          case _: ImportHandler => true
+          case x                => x.definesImplicit || (x.definedNames exists wanted)
+        }
+
+        reqs match {
+          case Nil                                    => Nil
+          case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
+          case rh :: rest                             =>
+            import rh.handler._
+            val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
+            rh :: select(rest, newWanted)
+        }
+      }
+
+      /** Flatten the handlers out and pair each with the original request */
+      select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
+    }
+
+    val code, trailingBraces, accessPath = new StringBuilder
+    val currentImps = mutable.HashSet[Name]()
+
+    // add code for a new object to hold some imports
+    def addWrapper() {
+      import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
+      code append (wrapper.prewrap format iw)
+      trailingBraces append wrapper.postwrap
+      accessPath append s".$iw"
+      currentImps.clear()
+    }
+
+    def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
+
+    def wrapBeforeAndAfter[T](op: => T): T = {
+      addWrapper()
+      try op finally addWrapper()
+    }
+
+    // loop through previous requests, adding imports for each one
+    wrapBeforeAndAfter {
+      for (ReqAndHandler(req, handler) <- reqsToUse) {
+        handler match {
+          // If the user entered an import, then just use it; add an import wrapping
+          // level if the import might conflict with some other import
+          case x: ImportHandler if x.importsWildcard =>
+            wrapBeforeAndAfter(code append (x.member + "\n"))
+          case x: ImportHandler =>
+            maybeWrap(x.importedNames: _*)
+            code append (x.member + "\n")
+            currentImps ++= x.importedNames
+
+          // For other requests, import each defined name.
+          // import them explicitly instead of with _, so that
+          // ambiguity errors will not be generated. Also, quote
+          // the name of the variable, so that we don't need to
+          // handle quoting keywords separately.
+          case x =>
+            for (sym <- x.definedSymbols) {
+              maybeWrap(sym.name)
+              code append s"import ${x.path}\n"
+              currentImps += sym.name
+            }
+        }
+      }
+    }
+
+    ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
+  }
+
+  private def allReqAndHandlers =
+    prevRequestList flatMap (req => req.handlers map (req -> _))
+
+  private def membersAtPickler(sym: Symbol): List[Symbol] =
+    enteringPickler(sym.info.nonPrivateMembers.toList)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
new file mode 100644
index 0000000..28ddf29
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Stepan Koltsov
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.io.IOException
+import session.History
+import InteractiveReader._
+import Properties.isMac
+
+/** Reads lines from an input stream */
+trait InteractiveReader {
+  val interactive: Boolean
+
+  def reset(): Unit
+  def history: History
+  def completion: Completion
+  def redrawLine(): Unit
+
+  def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
+    case 'y'  => true
+    case 'n'  => false
+    case _    => alt
+  }
+
+  protected def readOneLine(prompt: String): String
+  protected def readOneKey(prompt: String): Int
+
+  def readLine(prompt: String): String =
+    // hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
+    if (isMac) restartSysCalls(readOneLine(prompt), reset())
+    else readOneLine(prompt)
+}
+
+object InteractiveReader {
+  val msgEINTR = "Interrupted system call"
+  def restartSysCalls[R](body: => R, reset: => Unit): R =
+    try body catch {
+      case e: IOException if e.getMessage == msgEINTR => reset ; body
+    }
+
+  def apply(): InteractiveReader = SimpleReader()
+  @deprecated("Use `apply` instead.", "2.9.0")
+  def createDefault(): InteractiveReader = apply() // used by sbt
+}
+
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
new file mode 100644
index 0000000..c1122d4
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -0,0 +1,350 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import Completion._
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.StringOps.longestCommonPrefix
+
+// REPL completor - queries supplied interpreter for valid
+// completions based on current contents of buffer.
+class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
+  val global: intp.global.type = intp.global
+  import global._
+  import definitions._
+  import rootMirror.{ RootClass, getModuleIfDefined }
+  import intp.{ debugging }
+
+  // verbosity goes up with consecutive tabs
+  private var verbosity: Int = 0
+  def resetVerbosity() = verbosity = 0
+
+  def getSymbol(name: String, isModule: Boolean) = (
+    if (isModule) getModuleIfDefined(name)
+    else getModuleIfDefined(name)
+  )
+
+  trait CompilerCompletion {
+    def tp: Type
+    def effectiveTp = tp match {
+      case MethodType(Nil, resType)   => resType
+      case NullaryMethodType(resType) => resType
+      case _                          => tp
+    }
+
+    // for some reason any's members don't show up in subclasses, which
+    // we need so 5.<tab> offers asInstanceOf etc.
+    private def anyMembers = AnyTpe.nonPrivateMembers
+    def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
+
+    def tos(sym: Symbol): String = sym.decodedName
+    def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s))
+
+    // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
+    // compiler to crash for reasons not yet known.
+    def members     = exitingTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+    def methods     = members.toList filter (_.isMethod)
+    def packages    = members.toList filter (_.hasPackageFlag)
+    def aliases     = members.toList filter (_.isAliasType)
+
+    def memberNames   = members map tos
+    def methodNames   = methods map tos
+    def packageNames  = packages map tos
+    def aliasNames    = aliases map tos
+  }
+
+  object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+    override def memberNamed(s: String) = NoSymbol
+    override def members = Nil
+    override def follow(s: String) = None
+    override def alternativesFor(id: String) = Nil
+  }
+
+  object TypeMemberCompletion {
+    def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
+      new TypeMemberCompletion(tp) {
+        var upgraded = false
+        lazy val upgrade = {
+          intp rebind param
+          intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
+          upgraded = true
+          new TypeMemberCompletion(runtimeType)
+        }
+        override def completions(verbosity: Int) = {
+          super.completions(verbosity) ++ (
+            if (verbosity == 0) Nil
+            else upgrade.completions(verbosity)
+          )
+        }
+        override def follow(s: String) = super.follow(s) orElse {
+          if (upgraded) upgrade.follow(s)
+          else None
+        }
+        override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
+          if (upgraded) upgrade.alternativesFor(id)
+          else Nil
+        ) distinct
+      }
+    }
+    def apply(tp: Type): TypeMemberCompletion = {
+      if (tp eq NoType) NoTypeCompletion
+      else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+      else new TypeMemberCompletion(tp)
+    }
+    def imported(tp: Type) = new ImportCompletion(tp)
+  }
+
+  class TypeMemberCompletion(val tp: Type) extends CompletionAware
+                                              with CompilerCompletion {
+    def excludeEndsWith: List[String] = Nil
+    def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
+    def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
+
+    def methodSignatureString(sym: Symbol) = {
+      IMain stripString exitingTyper(new MethodSymbolOutput(sym).methodString())
+    }
+
+    def exclude(name: String): Boolean = (
+      (name contains "$") ||
+      (excludeNames contains name) ||
+      (excludeEndsWith exists (name endsWith _)) ||
+      (excludeStartsWith exists (name startsWith _))
+    )
+    def filtered(xs: List[String]) = xs filterNot exclude distinct
+
+    def completions(verbosity: Int) =
+      debugging(tp + " completions ==> ")(filtered(memberNames))
+
+    override def follow(s: String): Option[CompletionAware] =
+      debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
+
+    override def alternativesFor(id: String): List[String] =
+      debugging(id + " alternatives ==> ") {
+        val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
+
+        if (alts.nonEmpty) "" :: alts else Nil
+      }
+
+    override def toString = "%s (%d members)".format(tp, members.size)
+  }
+
+  class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+    override def excludeNames = anyref.methodNames
+  }
+
+  class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
+    override def completions(verbosity: Int) = verbosity match {
+      case 0    => filtered(memberNames)
+      case _    => memberNames
+    }
+  }
+
+  class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+    override def completions(verbosity: Int) = verbosity match {
+      case 0    => filtered(members filterNot (_.isSetter) map tos)
+      case _    => super.completions(verbosity)
+    }
+  }
+
+  // not for completion but for excluding
+  object anyref extends TypeMemberCompletion(AnyRefTpe) { }
+
+  // the unqualified vals/defs/etc visible in the repl
+  object ids extends CompletionAware {
+    override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
+    // now we use the compiler for everything.
+    override def follow(id: String): Option[CompletionAware] = {
+      if (!completions(0).contains(id))
+        return None
+
+      val tpe = intp typeOfExpression id
+      if (tpe == NoType)
+        return None
+
+      def default = Some(TypeMemberCompletion(tpe))
+
+      // only rebinding vals in power mode for now.
+      if (!isReplPower) default
+      else intp runtimeClassAndTypeOfTerm id match {
+        case Some((clazz, runtimeType)) =>
+          val sym = intp.symbolOfTerm(id)
+          if (sym.isStable) {
+            val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+            Some(TypeMemberCompletion(tpe, runtimeType, param))
+          }
+          else default
+        case _        =>
+          default
+      }
+    }
+    override def toString = "<repl ids> (%s)".format(completions(0).size)
+  }
+
+  // user-issued wildcard imports like "import global._" or "import String._"
+  private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
+
+  // literal Ints, Strings, etc.
+  object literals extends CompletionAware {
+    def simpleParse(code: String): Option[Tree] = newUnitParser(code).parseStats().lastOption
+    def completions(verbosity: Int) = Nil
+
+    override def follow(id: String) = simpleParse(id).flatMap {
+      case x: Literal   => Some(new LiteralCompletion(x))
+      case _            => None
+    }
+  }
+
+  // top level packages
+  object rootClass extends TypeMemberCompletion(RootClass.tpe) {
+    override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
+    override def follow(id: String) = id match {
+      case "_root_" => Some(this)
+      case _        => super.follow(id)
+    }
+  }
+  // members of Predef
+  object predef extends TypeMemberCompletion(PredefModule.tpe) {
+    override def excludeEndsWith    = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
+    override def excludeStartsWith  = super.excludeStartsWith ++ List("wrap")
+    override def excludeNames       = anyref.methodNames
+
+    override def exclude(name: String) = super.exclude(name) || (
+      (name contains "2")
+    )
+
+    override def completions(verbosity: Int) = verbosity match {
+      case 0    => Nil
+      case _    => super.completions(verbosity)
+    }
+  }
+  // members of scala.*
+  object scalalang extends PackageCompletion(ScalaPackage.tpe) {
+    def arityClasses = List("Product", "Tuple", "Function")
+    def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
+    override def exclude(name: String) = super.exclude(name) || (
+      skipArity(name)
+    )
+
+    override def completions(verbosity: Int) = verbosity match {
+      case 0    => filtered(packageNames ++ aliasNames)
+      case _    => super.completions(verbosity)
+    }
+  }
+  // members of java.lang.*
+  object javalang extends PackageCompletion(JavaLangPackage.tpe) {
+    override lazy val excludeEndsWith   = super.excludeEndsWith ++ List("Exception", "Error")
+    override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
+
+    override def completions(verbosity: Int) = verbosity match {
+      case 0    => filtered(packageNames)
+      case _    => super.completions(verbosity)
+    }
+  }
+
+  // the list of completion aware objects which should be consulted
+  // for top level unqualified, it's too noisy to let much in.
+  lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
+  def topLevel = topLevelBase ++ imported
+  def topLevelThreshold = 50
+
+  // the first tier of top level objects (doesn't include file completion)
+  def topLevelFor(parsed: Parsed): List[String] = {
+    val buf = new ListBuffer[String]
+    topLevel foreach { ca =>
+      buf ++= (ca completionsFor parsed)
+
+      if (buf.size > topLevelThreshold)
+        return buf.toList.sorted
+    }
+    buf.toList
+  }
+
+  // the most recent result
+  def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
+
+  def lastResultFor(parsed: Parsed) = {
+    /** The logic is a little tortured right now because normally '.' is
+     *  ignored as a delimiter, but on .<tab> it needs to be propagated.
+     */
+    val xs = lastResult completionsFor parsed
+    if (parsed.isEmpty) xs map ("." + _) else xs
+  }
+
+  def completer(): ScalaCompleter = new JLineTabCompletion
+
+  /** This gets a little bit hairy.  It's no small feat delegating everything
+   *  and also keeping track of exactly where the cursor is and where it's supposed
+   *  to end up.  The alternatives mechanism is a little hacky: if there is an empty
+   *  string in the list of completions, that means we are expanding a unique
+   *  completion, so don't update the "last" buffer because it'll be wrong.
+   */
+  class JLineTabCompletion extends ScalaCompleter {
+    // For recording the buffer on the last tab hit
+    private var lastBuf: String = ""
+    private var lastCursor: Int = -1
+
+    // Does this represent two consecutive tabs?
+    def isConsecutiveTabs(buf: String, cursor: Int) =
+      cursor == lastCursor && buf == lastBuf
+
+    // This is jline's entry point for completion.
+    override def complete(buf: String, cursor: Int): Candidates = {
+      verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
+      repldbg(f"%ncomplete($buf, $cursor%d) last = ($lastBuf, $lastCursor%d), verbosity: $verbosity")
+
+      // we don't try lower priority completions unless higher ones return no results.
+      def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
+        val winners = completionFunction(p)
+        if (winners.isEmpty)
+          return None
+        val newCursor =
+          if (winners contains "") p.cursor
+          else {
+            val advance = longestCommonPrefix(winners)
+            lastCursor = p.position + advance.length
+            lastBuf = (buf take p.position) + advance
+            repldbg(s"tryCompletion($p, _) lastBuf = $lastBuf, lastCursor = $lastCursor, p.position = ${p.position}")
+            p.position
+          }
+
+        Some(Candidates(newCursor, winners))
+      }
+
+      def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
+
+      // a single dot is special cased to completion on the previous result
+      def lastResultCompletion =
+        if (!looksLikeInvocation(buf)) None
+        else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
+
+      def tryAll = (
+                  lastResultCompletion
+           orElse tryCompletion(mkDotted, topLevelFor)
+        getOrElse Candidates(cursor, Nil)
+      )
+
+      /**
+       *  This is the kickoff point for all manner of theoretically
+       *  possible compiler unhappiness. The fault may be here or
+       *  elsewhere, but we don't want to crash the repl regardless.
+       *  The compiler makes it impossible to avoid catching Throwable
+       *  with its unfortunate tendency to throw java.lang.Errors and
+       *  AssertionErrors as the hats drop. We take two swings at it
+       *  because there are some spots which like to throw an assertion
+       *  once, then work after that. Yeah, what can I say.
+       */
+      try tryAll
+      catch { case ex: Throwable =>
+        repldbg("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
+        Candidates(cursor,
+          if (isReplDebug) List("<error:" + ex + ">")
+          else Nil
+        )
+      }
+    }
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
new file mode 100644
index 0000000..b6e834a
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
@@ -0,0 +1,75 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Stepan Koltsov
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.console.ConsoleReader
+import jline.console.completer._
+import session._
+import Completion._
+
+/**
+ *  Reads from the console using JLine.
+ */
+class JLineReader(_completion: => Completion) extends InteractiveReader {
+  val interactive = true
+  val consoleReader = new JLineConsoleReader()
+
+  lazy val completion = _completion
+  lazy val history: JLineHistory = JLineHistory()
+
+  private def term = consoleReader.getTerminal()
+  def reset() = term.reset()
+
+  def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
+    def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
+      val buf   = if (_buf == null) "" else _buf
+      val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
+      newCandidates foreach (candidates add _)
+      newCursor
+    }
+  }
+
+  class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper with VariColumnTabulator {
+    val isAcross = interpreter.`package`.isAcross
+
+    this setPaginationEnabled interpreter.`package`.isPaged
+
+    // ASAP
+    this setExpandEvents false
+
+    // working around protected/trait/java insufficiencies.
+    def goBack(num: Int): Unit = back(num)
+    if ((history: History) ne NoHistory)
+      this setHistory history
+
+    def readOneKey(prompt: String) = {
+      this.print(prompt)
+      this.flush()
+      this.readCharacter()
+    }
+    def eraseLine() = consoleReader.resetPromptLine("", "", 0)
+    def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
+
+    // A hook for running code after the repl is done initializing.
+    lazy val postInit: Unit = {
+      this setBellEnabled false
+
+      if (completion ne NoCompletion) {
+        val argCompletor: ArgumentCompleter =
+          new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
+        argCompletor setStrict false
+
+        this addCompleter argCompletor
+        this setAutoprintThreshold 400 // max completion candidates without warning
+      }
+    }
+  }
+
+  def redrawLine() = consoleReader.redrawLineAndFlush()
+  def readOneLine(prompt: String) = consoleReader readLine prompt
+  def readOneKey(prompt: String)  = consoleReader readOneKey prompt
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
new file mode 100644
index 0000000..915fd57
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -0,0 +1,742 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.nsc.util.ScalaClassLoader
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.util.{ Locale }
+import java.util.concurrent.ConcurrentLinkedQueue
+import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
+                     ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
+                     SimpleJavaFileObject, StandardLocation }
+import scala.reflect.io.{ AbstractFile, Directory, File, Path }
+import scala.io.Source
+import scala.util.{ Try, Success, Failure }
+import scala.util.Properties.lineSeparator
+import scala.util.matching.Regex
+import scala.collection.JavaConverters
+import scala.collection.generic.Clearable
+import java.net.URL
+import scala.language.reflectiveCalls
+import Javap._
+
+class JavapClass(
+  val loader: ScalaClassLoader,
+  val printWriter: PrintWriter,
+  intp: Option[IMain] = None
+) extends scala.tools.util.Javap {
+  import JavapTool.ToolArgs
+  import JavapClass._
+
+  lazy val tool = JavapTool()
+
+  /** Run the tool. Option args start with "-".
+   *  The default options are "-protected -verbose".
+   *  Byte data for filename args is retrieved with findBytes.
+   */
+  def apply(args: Seq[String]): List[JpResult] = {
+    val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
+    val (flags, upgraded) = upgrade(options)
+    import flags.{ app, fun, help, raw }
+    val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
+    if (help || claases.isEmpty)
+      List(JpResult(JavapTool.helper(printWriter)))
+    else if (targets.isEmpty)
+      List(JpResult("No anonfuns found."))
+    else
+      tool(raw, upgraded)(targets map (claas => targeted(claas, app)))
+  }
+
+  /** Cull our tool options. */
+  private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) =
+    ToolArgs fromArgs options match {
+      case (t, s) if s.nonEmpty => (t, s)
+      case (t, s)               => (t, JavapTool.DefaultOptions)
+    }
+
+  /** Associate the requested path with a possibly failed or empty array of bytes. */
+  private def targeted(path: String, app: Boolean): (String, Try[Array[Byte]]) =
+    bytesFor(path, app) match {
+      case Success((target, bytes)) => (target, Try(bytes))
+      case f: Failure[_]            => (path,   Failure(f.exception))
+    }
+
+  /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
+  private def bytesFor(path: String, app: Boolean) = Try {
+    def last = intp.get.mostRecentVar  // fail if no intp
+    def req = path match {
+      case "-" => last
+      case HashSplit(prefix, member) =>
+        if (prefix != null) prefix
+        else if (member != null) member
+        else "#"
+    }
+    val targetedBytes = if (app) findAppBody(req) else (req, findBytes(req))
+    if (targetedBytes._2.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '$path'")
+    targetedBytes
+  }
+
+  private def findAppBody(path: String): (String, Array[Byte]) = {
+    // is this new style delayedEndpoint? then find it.
+    // the name test is naive. could add $mangled path.
+    // assumes only the first match is of interest (because only one endpoint is generated).
+    def findNewStyle(bytes: Array[Byte]) = {
+      import scala.tools.asm.ClassReader
+      import scala.tools.asm.tree.ClassNode
+      import PartialFunction.cond
+      import JavaConverters._
+      val rdr = new ClassReader(bytes)
+      val nod = new ClassNode
+      rdr.accept(nod, 0)
+      //foo/Bar.delayedEndpoint$foo$Bar$1
+      val endpoint = "delayedEndpoint".r.unanchored
+      def isEndPoint(s: String) = (s contains '$') && cond(s) { case endpoint() => true }
+      nod.methods.asScala collectFirst { case m if isEndPoint(m.name) => m.name }
+    }
+    // try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint
+    def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs =>
+      findNewStyle(bs) map (n => (s"$path#$n", bs))
+    }
+    // use old style, and add foo# to filter on apply method
+    def asOldStyle = {
+      def asAppBody(s: String) = {
+        val (cls, fix) = s.splitSuffix
+        s"${cls}$$delayedInit$$body${fix}"
+      }
+      val oldStyle = asAppBody(path)
+      val oldBytes = findBytes(oldStyle)
+      if (oldBytes.nonEmpty) (s"$oldStyle#", oldBytes)
+      else (path, oldBytes)
+    }
+
+    val pathBytes = findBytes(path)
+    asNewStyle(pathBytes) getOrElse asOldStyle
+  }
+
+  def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
+
+  /** Assume the string is a path and try to find the classfile
+   *  it represents.
+   */
+  def tryFile(path: String): Option[Array[Byte]] =
+    (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
+
+  /** Assume the string is a fully qualified class name and try to
+   *  find the class object it represents.
+   *  There are other symbols of interest, too:
+   *  - a definition that is wrapped in an enclosing class
+   *  - a synthetic that is not in scope but its associated class is
+   */
+  def tryClass(path: String): Array[Byte] = {
+    def load(name: String) = loader classBytes name
+    def loadable(name: String) = loader resourceable name
+    // if path has an interior dollar, take it as a synthetic
+    // if the prefix up to the dollar is a symbol in scope,
+    // result is the translated prefix + suffix
+    def desynthesize(s: String) = {
+      val i = s indexOf '$'
+      if (0 until s.length - 1 contains i) {
+        val name = s substring (0, i)
+        val sufx = s substring i
+        val tran = intp flatMap (_ translatePath name)
+        def loadableOrNone(strip: Boolean) = {
+          def suffix(strip: Boolean)(x: String) =
+            (if (strip && (x endsWith "$")) x.init else x) + sufx
+          val res = tran map (suffix(strip) _)
+          if (res.isDefined && loadable(res.get)) res else None
+        }
+        // try loading translated+suffix
+        val res = loadableOrNone(strip = false)
+        // some synthetics lack a dollar, (e.g., suffix = delayedInit$body)
+        // so as a hack, if prefix$$suffix fails, also try prefix$suffix
+        if (res.isDefined) res else loadableOrNone(strip = true)
+      } else None
+    }
+    val p = path.asClassName   // scrub any suffix
+    // if repl, translate the name to something replish
+    // (for translate, would be nicer to get the sym and ask .isClass,
+    // instead of translatePath and then asking did I get a class back)
+    val q = if (intp.isEmpty) p else (
+      // only simple names get the scope treatment
+      Some(p) filter (_ contains '.')
+      // take path as a Name in scope
+      orElse (intp flatMap (_ translatePath p) filter loadable)
+      // take path as a Name in scope and find its enclosing class
+      orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
+      // take path as a synthetic derived from some Name in scope
+      orElse desynthesize(p)
+      // just try it plain
+      getOrElse p
+    )
+    load(q)
+  }
+
+  /** Base class for javap tool adapters for java 6 and 7. */
+  abstract class JavapTool {
+    type ByteAry = Array[Byte]
+    type Input = Tuple2[String, Try[ByteAry]]
+
+    /** Run the tool. */
+    def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
+
+    // Since the tool is loaded by reflection, check for catastrophic failure.
+    protected def failed: Boolean
+    implicit protected class Failer[A](a: =>A) {
+      def orFailed[B >: A](b: =>B) = if (failed) b else a
+    }
+    protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.")
+
+    // output filtering support
+    val writer = new CharArrayWriter
+    def written = {
+      writer.flush()
+      val w = writer.toString
+      writer.reset()
+      w
+    }
+
+    /** Create a Showable with output massage.
+     *  @param raw show ugly repl names
+     *  @param target attempt to filter output to show region of interest
+     *  @param preamble other messages to output
+     */
+    def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
+      // ReplStrippingWriter clips and scrubs on write(String)
+      // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+      def show() =
+        if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
+        else writeLines()
+      private def writeLines() {
+        // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+        // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+        val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+        var filtering = false   // true if in region matching filter
+        // true to output
+        def checkFilter(line: String) = if (filterOn.isEmpty) true else {
+          // cheap heuristic, todo maybe parse for the java sig.
+          // method sigs end in paren semi
+          def isAnyMethod = line.endsWith(");")
+          def isOurMethod = {
+            val lparen = line.lastIndexOf('(')
+            val blank = line.lastIndexOf(' ', lparen)
+            (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
+          }
+          filtering = if (filtering) {
+            // next blank line terminates section
+            // for -public, next line is next method, more or less
+            line.trim.nonEmpty && !isAnyMethod
+          } else {
+            isAnyMethod && isOurMethod
+          }
+          filtering
+        }
+        for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
+          printWriter write line+lineSeparator
+        printWriter.flush()
+      }
+    }
+  }
+
+  class JavapTool6 extends JavapTool {
+    import JavapTool._
+    val EnvClass     = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
+    val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
+    override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+    val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
+    val printWrapper = new PrintWriter(writer)
+    def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
+      PrinterCtr.newInstance(in, printWrapper, env) orFailed null
+    def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
+      fp.asInstanceOf[{ def print(): Unit }].print()      // run tool and flush to buffer
+      printWrapper.flush()  // just in case
+      showWithPreamble(raw, target)
+    }
+
+    lazy val parser = new JpOptions
+    def newEnv(opts: Seq[String]): FakeEnvironment = {
+      def result = {
+        val env: FakeEnvironment = EnvClass.newInstance()
+        parser(opts) foreach { case (name, value) =>
+          val field = EnvClass getDeclaredField name
+          field setAccessible true
+          field.set(env, value.asInstanceOf[AnyRef])
+        }
+        env
+      }
+      result orFailed null
+    }
+
+    override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
+      (inputs map {
+        case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
+        case (_, Failure(e))      => JpResult(e.toString)
+      }).toList orFailed List(noToolError)
+  }
+
+  class JavapTool7 extends JavapTool {
+    type Task = {
+      def call(): Boolean                             // true = ok
+      //def run(args: Array[String]): Int             // all args
+      //def handleOptions(args: Array[String]): Unit  // options, then run() or call()
+    }
+    // result of Task.run
+    //object TaskResult extends Enumeration {
+    //  val Ok, Error, CmdErr, SysErr, Abnormal = Value
+    //}
+    val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
+    override protected def failed = TaskClaas eq null
+
+    val TaskCtor  = TaskClaas.getConstructor(
+      classOf[Writer],
+      classOf[JavaFileManager],
+      classOf[DiagnosticListener[_]],
+      classOf[JIterable[String]],
+      classOf[JIterable[String]]
+    ) orFailed null
+
+    class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
+      import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
+      type D = Diagnostic[_ <: JavaFileObject]
+      val diagnostics = new ConcurrentLinkedQueue[D]
+      override def report(d: Diagnostic[_ <: JavaFileObject]) {
+        diagnostics add d
+      }
+      override def clear() = diagnostics.clear()
+      /** All diagnostic messages.
+       *  @param locale Locale for diagnostic messages, null by default.
+       */
+      def messages(implicit locale: Locale = null) = {
+        import JavaConverters._
+        diagnostics.asScala.map(_ getMessage locale).toList
+      }
+
+      def reportable(raw: Boolean): String = {
+        // don't filter this message if raw, since the names are likely to differ
+        val container = "Binary file .* contains .*".r
+        val m = if (raw) messages
+                else messages filter (_ match { case container() => false case _ => true })
+        clear()
+        if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
+        else ""
+      }
+    }
+    val reporter = new JavaReporter
+
+    // DisassemblerTool.getStandardFileManager(reporter,locale,charset)
+    val defaultFileManager: JavaFileManager =
+      (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod (
+        "create",
+        classOf[DiagnosticListener[_]],
+        classOf[PrintWriter]
+      ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null
+
+    // manages named arrays of bytes, which might have failed to load
+    class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager)
+      extends ForwardingJavaFileManager[JavaFileManager](delegate) {
+      import JavaFileObject.Kind
+      import Kind._
+      import StandardLocation._
+      import JavaFileManager.Location
+      import java.net.URI
+      def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
+
+      def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
+      def managedFile(name: String, kind: Kind) = kind match {
+        case CLASS  => fileObjectForInput(name, inputNamed(name), kind)
+        case _      => null
+      }
+      // todo: just wrap it as scala abstractfile and adapt it uniformly
+      def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject =
+        new SimpleJavaFileObject(uri(name), kind) {
+          override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get)
+          // if non-null, ClassWriter wrongly requires scheme non-null
+          override def toUri: URI = null
+          override def getName: String = name
+          // suppress
+          override def getLastModified: Long = -1L
+        }
+      override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject =
+        location match {
+          case CLASS_PATH => managedFile(className, kind)
+          case _          => null
+        }
+      override def hasLocation(location: Location): Boolean =
+        location match {
+          case CLASS_PATH => true
+          case _          => false
+        }
+    }
+    def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
+
+    // show tool messages and tool output, with output massage
+    def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
+
+    // eventually, use the tool interface
+    def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
+      //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
+      //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
+      import JavaConverters.asJavaIterableConverter
+      TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
+        .orFailed (throw new IllegalStateException)
+    }
+    // a result per input
+    private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
+      Try {
+        task(options, Seq(claas), inputs).call()
+      } map {
+        case true => JpResult(showable(raw, claas))
+        case _    => JpResult(reporter.reportable(raw))
+      } recoverWith {
+        case e: java.lang.reflect.InvocationTargetException => e.getCause match {
+          case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
+          case x => Failure(x)
+        }
+      } lastly {
+        reporter.clear()
+      }
+    override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
+      case (claas, Success(_))  => applyOne(raw, options, claas, inputs).get
+      case (_, Failure(e))      => JpResult(e.toString)
+    }).toList orFailed List(noToolError)
+  }
+
+  object JavapTool {
+    // >= 1.7
+    val Tool    = "com.sun.tools.javap.JavapTask"
+
+    // < 1.7
+    val Env     = "sun.tools.javap.JavapEnvironment"
+    val Printer = "sun.tools.javap.JavapPrinter"
+    // "documentation"
+    type FakeEnvironment = AnyRef
+    type FakePrinter = AnyRef
+
+    // support JavapEnvironment
+    class JpOptions {
+      private object Access {
+        final val PRIVATE = 0
+        final val PROTECTED = 1
+        final val PACKAGE = 2
+        final val PUBLIC = 3
+      }
+      private val envActionMap: Map[String, (String, Any)] = {
+        val map = Map(
+          "-l"         -> (("showLineAndLocal", true)),
+          "-c"         -> (("showDisassembled", true)),
+          "-s"         -> (("showInternalSigs", true)),
+          "-verbose"   -> (("showVerbose", true)),
+          "-private"   -> (("showAccess", Access.PRIVATE)),
+          "-package"   -> (("showAccess", Access.PACKAGE)),
+          "-protected" -> (("showAccess", Access.PROTECTED)),
+          "-public"    -> (("showAccess", Access.PUBLIC)),
+          "-all"       -> (("showallAttr", true))
+        )
+        map ++ List(
+          "-v" -> map("-verbose"),
+          "-p" -> map("-private")
+        )
+      }
+      def apply(opts: Seq[String]): Seq[(String, Any)] = {
+        opts flatMap { opt =>
+          envActionMap get opt match {
+            case Some(pair) => List(pair)
+            case _          =>
+              val charOpts = opt.tail.toSeq map ("-" + _)
+              if (charOpts forall (envActionMap contains _))
+                charOpts map envActionMap
+              else Nil
+          }
+        }
+      }
+    }
+
+    case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
+
+    object ToolArgs {
+      def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
+        case ((t,others), s) => s match {
+          case "-fun"   => (t copy (fun=true), others)
+          case "-app"   => (t copy (app=true), others)
+          case "-help"  => (t copy (help=true), others)
+          case "-raw"   => (t copy (raw=true), others)
+          case _        => (t, others :+ s)
+        }
+      }
+    }
+
+    val helps = List(
+      "usage"       -> ":javap [opts] [path or class or -]...",
+      "-help"       -> "Prints this help message",
+      "-raw"        -> "Don't unmangle REPL names",
+      "-app"        -> "Show the DelayedInit body of Apps",
+      "-fun"        -> "Show anonfuns for class or Class#method",
+      "-verbose/-v" -> "Stack size, number of locals, method args",
+      "-private/-p" -> "Private classes and members",
+      "-package"    -> "Package-private classes and members",
+      "-protected"  -> "Protected classes and members",
+      "-public"     -> "Public classes and members",
+      "-l"          -> "Line and local variable tables",
+      "-c"          -> "Disassembled code",
+      "-s"          -> "Internal type signatures",
+      "-sysinfo"    -> "System info of class",
+      "-constants"  -> "Static final constants"
+    )
+
+    // match prefixes and unpack opts, or -help on failure
+    def massage(arg: String): Seq[String] = {
+      require(arg startsWith "-")
+      // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
+      val r = """(-[^/]*)(/(-.))?""".r
+      def maybe(opt: String, s: String): Option[String] = opt match {
+        // disambiguate by preferring short form
+        case r(lf,_,sf) if s == sf          => Some(sf)
+        case r(lf,_,sf) if lf startsWith s  => Some(lf)
+        case _ => None
+      }
+      def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
+      // one candidate or one single-char candidate
+      def uniqueOf(maybes: Seq[String]) = {
+        def single(s: String) = s.length == 2
+        if (maybes.length == 1) maybes
+        else if ((maybes count single) == 1) maybes filter single
+        else Nil
+      }
+      // each optchar must decode to exactly one option
+      def unpacked(s: String): Try[Seq[String]] = {
+        val ones = (s drop 1) map { c =>
+          val maybes = uniqueOf(candidates(s"-$c"))
+          if (maybes.length == 1) Some(maybes.head) else None
+        }
+        Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
+      }
+      val res = uniqueOf(candidates(arg))
+      if (res.nonEmpty) res
+      else (unpacked(arg)
+        getOrElse (Seq("-help"))) // or else someone needs help
+    }
+
+    def helper(pw: PrintWriter) = new Showable {
+      def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
+    }
+
+    val DefaultOptions = List("-protected", "-verbose")
+
+    def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
+
+    private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
+
+    private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+
+    def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+  }
+}
+
+object JavapClass {
+  def apply(
+    loader: ScalaClassLoader = ScalaClassLoader.appLoader,
+    printWriter: PrintWriter = new PrintWriter(System.out, true),
+    intp: Option[IMain] = None
+  ) = new JavapClass(loader, printWriter, intp)
+
+  val HashSplit = "(.*?)(?:#([^#]*))?".r
+  // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
+  // or a resource path com/acme/Widget.class; but not widget.out
+  implicit class MaybeClassLike(val s: String) extends AnyVal {
+    /* private[this] final val suffix = ".class" */
+    private def suffix = ".class"
+    def asClassName = (s stripSuffix suffix).replace('/', '.')
+    def asClassResource = if (s endsWith suffix) s else s.replace('.', '/') + suffix
+    def splitSuffix: (String, String) = if (s endsWith suffix) (s dropRight suffix.length, suffix) else (s, "")
+    def strippingSuffix(f: String => String): String =
+      if (s endsWith suffix) f(s dropRight suffix.length) else s
+    // e.g. Foo#bar. Foo# yields zero-length member part.
+    def splitHashMember: (String, Option[String]) = {
+      val i = s lastIndexOf '#'
+      if (i < 0) (s, None)
+      //else if (i >= s.length - 1) (s.init, None)
+      else (s take i, Some(s drop i+1))
+    }
+  }
+  implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+    private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
+    def parents: List[ClassLoader] = parentsOf(cl)
+    /* all file locations */
+    def locations = {
+      def alldirs = parents flatMap (_ match {
+        case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
+        case jcl: java.net.URLClassLoader         => jcl.getURLs
+        case _ => Nil
+      })
+      val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
+      dirs
+    }
+    /* only the file location from which the given class is loaded */
+    def locate(k: String): Option[Path] = {
+      Try {
+        val claas = try cl loadClass k catch {
+          case _: NoClassDefFoundError => null    // let it snow
+        }
+        // cf ScalaClassLoader.originOfClass
+        claas.getProtectionDomain.getCodeSource.getLocation
+      } match {
+        case Success(null)              => None
+        case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
+        case _                          => None
+      }
+    }
+    /* would classBytes succeed with a nonempty array */
+    def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+  }
+  implicit class PathOps(val p: Path) extends AnyVal {
+    import scala.tools.nsc.io.Jar
+    def isJar = Jar isJarOrZip p
+  }
+  implicit class URLOps(val url: URL) extends AnyVal {
+    def isFile: Boolean = url.getProtocol == "file"
+  }
+  object FunFinder {
+    def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
+  }
+  class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+
+    // class k, candidate f without prefix
+    def isFunOfClass(k: String, f: String) = {
+      val p = (s"${Regex quote k}\\$$+anonfun").r
+      (p findPrefixOf f).nonEmpty
+    }
+    // class k, candidate f without prefix, method m
+    def isFunOfMethod(k: String, m: String, f: String) = {
+      val p = (s"${Regex quote k}\\$$+anonfun\\$$${Regex quote m}\\$$").r
+      (p findPrefixOf f).nonEmpty
+    }
+    def isFunOfTarget(k: String, m: Option[String], f: String) =
+      if (m.isEmpty) isFunOfClass(k, f)
+      else isFunOfMethod(k, m.get, f)
+    def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
+      for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
+    }
+    // path prefix p, class k, dir d
+    def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
+      val subdir  = Path(p)
+      for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+        yield f.name
+    }
+    // path prefix p, class k, jar file f
+    def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+      import java.util.jar.JarEntry
+      import scala.tools.nsc.io.Jar
+      def maybe(e: JarEntry) = {
+        val (path, name) = {
+          val parts = e.getName split "/"
+          if (parts.length < 2) ("", e.getName)
+          else (parts.init mkString "/", parts.last)
+        }
+        if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+      }
+      (new Jar(f) map maybe).flatten
+    }
+    def loadable(name: String) = loader resourceable name
+    // translated class, optional member, opt member to filter on, whether it is repl output
+    def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+      val (k0, m0) = s.splitHashMember
+      val k = k0.asClassName
+      val member = m0 filter (_.nonEmpty)  // take Foo# as no member, not ""
+      val filter = m0 flatMap { case "" => Some("apply") case _ => None }   // take Foo# as filter on apply
+      // class is either something replish or available to loader
+      // $line.$read$$etc$Foo#member
+      ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+      // s = "f" and $line.$read$$etc$#f is what we're after,
+      // ignoring any #member (except take # as filter on #apply)
+      orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
+      getOrElse ((k, member, filter, false)))
+    }
+    /** Find the classnames of anonfuns associated with k,
+     *  where k may be an available class or a symbol in scope.
+     */
+    def funsOf(k0: String): Seq[String] = {
+      // class is either something replish or available to loader
+      val (k, member, filter, isReplish) = translate(k0)
+      val splat   = k split "\\."
+      val name    = splat.last
+      val prefix  = if (splat.length > 1) splat.init mkString "/" else ""
+      val pkg     = if (splat.length > 1) splat.init mkString "." else ""
+      // reconstitute an anonfun with a package
+      // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
+      def packaged(s: String) = {
+        val p = if (pkg.isEmpty) s else s"$pkg.$s"
+        val pm = filter map (p + "#" + _)
+        pm getOrElse p
+      }
+      // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
+      val fs = if (isReplish) {
+        def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
+          if (p.isEmpty) Option(d)
+          else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
+        }
+        outed(intp.get.replOutput.dir, splat.init) map { d =>
+          listFunsInAbsFile(name, member, d) map packaged
+        }
+      } else {
+        loader locate k map { w =>
+          if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
+          else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
+          else Nil
+        }
+      }
+      fs match {
+        case Some(xs) => xs.to[Seq]     // maybe empty
+        case None     => Seq()          // nothing found, e.g., junk input
+      }
+    }
+    def funs(ks: Seq[String]) = ks flatMap funsOf _
+  }
+}
+
+object Javap {
+  def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
+
+  def apply(path: String): Unit      = apply(Seq(path))
+  def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
+
+  trait Showable {
+    def show(): Unit
+  }
+
+  sealed trait JpResult extends scala.tools.util.JpResult {
+    type ResultType
+    def isError: Boolean
+    def value: ResultType
+    def show(): Unit
+    // todo
+    // def header(): String
+    // def fields(): List[String]
+    // def methods(): List[String]
+    // def signatures(): List[String]
+  }
+  object JpResult {
+    def apply(msg: String)    = new JpError(msg)
+    def apply(res: Showable)  = new JpSuccess(res)
+  }
+  class JpError(msg: String) extends JpResult {
+    type ResultType = String
+    def isError = true
+    def value = msg
+    def show() = println(msg)   // makes sense for :javap, less for -Ygen-javap
+  }
+  class JpSuccess(val value: Showable) extends JpResult {
+    type ResultType = AnyRef
+    def isError = false
+    def show() = value.show()   // output to tool's PrintWriter
+  }
+  implicit class Lastly[A](val t: Try[A]) extends AnyVal {
+    private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+    def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Logger.scala b/src/repl/scala/tools/nsc/interpreter/Logger.scala
new file mode 100644
index 0000000..7407daf
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Logger.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+trait Logger {
+  def isInfo: Boolean
+  def isDebug: Boolean
+  def isTrace: Boolean
+  def out: JPrintWriter
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
new file mode 100644
index 0000000..12d6ee5
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -0,0 +1,88 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package interpreter
+
+import scala.collection.{ mutable, immutable }
+import mutable.ListBuffer
+import scala.language.implicitConversions
+
+class ProcessResult(val line: String) {
+  import scala.sys.process._
+  private val buffer = new ListBuffer[String]
+
+  val builder  = Process(line)
+  val logger   = ProcessLogger(buffer += _)
+  val exitCode = builder ! logger
+  def lines    = buffer.toList
+
+  override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
+}
+
+trait LoopCommands {
+  protected def out: JPrintWriter
+
+  // So outputs can be suppressed.
+  def echoCommandMessage(msg: String): Unit = out println msg
+
+  // a single interpreter command
+  abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
+    def usage: String = ""
+    def usageMsg: String = ":" + name + (
+      if (usage == "") "" else " " + usage
+    )
+    def apply(line: String): Result
+
+    // called if no args are given
+    def showUsage(): Result = {
+      "usage is " + usageMsg
+      Result(keepRunning = true, None)
+    }
+  }
+  object LoopCommand {
+    def nullary(name: String, help: String, f: () => Result): LoopCommand =
+      new NullaryCmd(name, help, _ => f())
+
+    def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
+      if (usage == "") new NullaryCmd(name, help, f)
+      else new LineCmd(name, usage, help, f)
+  }
+
+  class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
+    def apply(line: String): Result = f(line)
+  }
+
+  class LineCmd(name: String, argWord: String, help: String, f: String => Result) extends LoopCommand(name, help) {
+    override def usage = argWord
+    def apply(line: String): Result = f(line)
+  }
+
+  class VarArgsCmd(name: String, argWord: String, help: String, f: List[String] => Result)
+            extends LoopCommand(name, help) {
+    override def usage = argWord
+    def apply(line: String): Result = apply(words(line))
+    def apply(args: List[String]) = f(args)
+  }
+
+  // the result of a single command
+  case class Result(keepRunning: Boolean, lineToRecord: Option[String])
+
+  object Result {
+    // the default result means "keep running, and don't record that line"
+    val default = Result(keepRunning = true, None)
+
+    // most commands do not want to micromanage the Result, but they might want
+    // to print something to the console, so we accomodate Unit and String returns.
+    implicit def resultFromUnit(x: Unit): Result = default
+    implicit def resultFromString(msg: String): Result = {
+      echoCommandMessage(msg)
+      default
+    }
+  }
+}
+
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
new file mode 100644
index 0000000..f4cbcb5
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -0,0 +1,220 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.collection.{ mutable, immutable }
+import scala.language.implicitConversions
+
+trait MemberHandlers {
+  val intp: IMain
+
+  import intp.{ Request, global, naming }
+  import global._
+  import naming._
+
+  private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
+  private def codegenln(xs: String*): String = codegenln(true, xs: _*)
+  private def codegen(leadingPlus: Boolean, xs: String*): String = {
+    val front = if (leadingPlus) "+ " else ""
+    front + (xs map string2codeQuoted mkString " + ")
+  }
+  private implicit def name2string(name: Name) = name.toString
+
+  /** A traverser that finds all mentioned identifiers, i.e. things
+   *  that need to be imported.  It might return extra names.
+   */
+  private class ImportVarsTraverser extends Traverser {
+    val importVars = new mutable.HashSet[Name]()
+
+    override def traverse(ast: Tree) = ast match {
+      case Ident(name) =>
+        // XXX this is obviously inadequate but it's going to require some effort
+        // to get right.
+        if (name.toString startsWith "x$") ()
+        else importVars += name
+      case _        => super.traverse(ast)
+    }
+  }
+  private object ImportVarsTraverser {
+    def apply(member: Tree) = {
+      val ivt = new ImportVarsTraverser()
+      ivt traverse member
+      ivt.importVars.toList
+    }
+  }
+
+  private def isTermMacro(ddef: DefDef): Boolean = ddef.mods.isMacro
+
+  def chooseHandler(member: Tree): MemberHandler = member match {
+    case member: DefDef if isTermMacro(member) => new TermMacroHandler(member)
+    case member: DefDef                        => new DefHandler(member)
+    case member: ValDef                        => new ValHandler(member)
+    case member: ModuleDef                     => new ModuleHandler(member)
+    case member: ClassDef                      => new ClassHandler(member)
+    case member: TypeDef                       => new TypeAliasHandler(member)
+    case member: Assign                        => new AssignHandler(member)
+    case member: Import                        => new ImportHandler(member)
+    case DocDef(_, documented)                 => chooseHandler(documented)
+    case member                                => new GenericHandler(member)
+  }
+
+  sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+    override def name: Name = member.name
+    def mods: Modifiers     = member.mods
+    def keyword             = member.keyword
+    def prettyName          = name.decode
+
+    override def definesImplicit = member.mods.isImplicit
+    override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
+    override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+    override def definedSymbols = if (symbol.exists) symbol :: Nil else Nil
+  }
+
+  /** Class to handle one member among all the members included
+   *  in a single interpreter request.
+   */
+  sealed abstract class MemberHandler(val member: Tree) {
+    def name: Name      = nme.NO_NAME
+    def path            = intp.originalPath(symbol)
+    def symbol          = if (member.symbol eq null) NoSymbol else member.symbol
+    def definesImplicit = false
+    def definesValue    = false
+
+    def definesTerm     = Option.empty[TermName]
+    def definesType     = Option.empty[TypeName]
+
+    private lazy val _referencedNames = ImportVarsTraverser(member)
+    def referencedNames = _referencedNames
+    def importedNames   = List[Name]()
+    def definedNames    = definesTerm.toList ++ definesType.toList
+    def definedSymbols  = List[Symbol]()
+
+    def extraCodeToEvaluate(req: Request): String = ""
+    def resultExtractionCode(req: Request): String = ""
+
+    private def shortName = this.getClass.toString split '.' last
+    override def toString = shortName + referencedNames.mkString(" (refs: ", ", ", ")")
+  }
+
+  class GenericHandler(member: Tree) extends MemberHandler(member)
+
+  class ValHandler(member: ValDef) extends MemberDefHandler(member) {
+    val maxStringElements = 1000  // no need to mkString billions of elements
+    override def definesValue = true
+
+    override def resultExtractionCode(req: Request): String = {
+      val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
+      if (!mods.isPublic || isInternal) ""
+      else {
+        // if this is a lazy val we avoid evaluating it here
+        val resultString =
+          if (mods.isLazy) codegenln(false, "<lazy>")
+          else any2stringOf(path, maxStringElements)
+
+        val vidString =
+          if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim
+          else ""
+
+        """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
+      }
+    }
+  }
+
+  class DefHandler(member: DefDef) extends MemberDefHandler(member) {
+    override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity
+    override def resultExtractionCode(req: Request) =
+      if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
+  }
+
+  abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) {
+    override def referencedNames = super.referencedNames.flatMap(name => List(name.toTermName, name.toTypeName))
+    override def definesValue = false
+    override def definesTerm: Option[TermName] = Some(name.toTermName)
+    override def definesType: Option[TypeName] = None
+    override def resultExtractionCode(req: Request) = if (mods.isPublic) codegenln(notification(req)) else ""
+    def notification(req: Request): String
+  }
+
+  class TermMacroHandler(member: DefDef) extends MacroHandler(member) {
+    def notification(req: Request) = s"defined term macro $name: ${req.typeOf(name)}"
+  }
+
+  class AssignHandler(member: Assign) extends MemberHandler(member) {
+    val Assign(lhs, rhs) = member
+    override lazy val name = newTermName(freshInternalVarName())
+
+    override def definesTerm = Some(name)
+    override def definesValue = true
+    override def extraCodeToEvaluate(req: Request) =
+      """val %s = %s""".format(name, lhs)
+
+    /** Print out lhs instead of the generated varName */
+    override def resultExtractionCode(req: Request) = {
+      val lhsType = string2code(req lookupTypeOf name)
+      val res     = string2code(req fullPath name)
+      """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
+    }
+  }
+
+  class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
+    override def definesTerm = Some(name.toTermName)
+    override def definesValue = true
+
+    override def resultExtractionCode(req: Request) = codegenln("defined object ", name)
+  }
+
+  class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
+    override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol)
+    override def definesType = Some(name.toTypeName)
+    override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
+
+    override def resultExtractionCode(req: Request) =
+      codegenln("defined %s %s".format(keyword, name))
+  }
+
+  class TypeAliasHandler(member: TypeDef) extends MemberDefHandler(member) {
+    private def isAlias = mods.isPublic && treeInfo.isAliasTypeDef(member)
+    override def definesType = Some(name.toTypeName) filter (_ => isAlias)
+
+    override def resultExtractionCode(req: Request) =
+      codegenln("defined type alias ", name) + "\n"
+  }
+
+  class ImportHandler(imp: Import) extends MemberHandler(imp) {
+    val Import(expr, selectors) = imp
+    def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match {
+      case NoSymbol => intp.typeOfExpression("" + expr)
+      case sym      => sym.thisType
+    }
+    private def importableTargetMembers = importableMembers(targetType).toList
+    // wildcard imports, e.g. import foo._
+    private def selectorWild    = selectors filter (_.name == nme.USCOREkw)
+    // renamed imports, e.g. import foo.{ bar => baz }
+    private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
+    /** Whether this import includes a wildcard import */
+    val importsWildcard = selectorWild.nonEmpty
+
+    def implicitSymbols = importedSymbols filter (_.isImplicit)
+    def importedSymbols = individualSymbols ++ wildcardSymbols
+
+    private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet
+    lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name)))
+    lazy val wildcardSymbols: List[Symbol]   = exitingTyper(if (importsWildcard) importableTargetMembers else Nil)
+
+    /** Complete list of names imported by a wildcard */
+    lazy val wildcardNames: List[Name]   = wildcardSymbols map (_.name)
+    lazy val individualNames: List[Name] = individualSymbols map (_.name)
+
+    /** The names imported by this statement */
+    override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
+    lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
+
+    def importString = imp.toString
+    override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
new file mode 100644
index 0000000..a0af729
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
@@ -0,0 +1,46 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import NamedParam._
+import scala.language.implicitConversions
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+import scala.tools.nsc.typechecker.{ TypeStrings }
+
+trait NamedParamCreator {
+  protected def freshName: () => String
+
+  def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+  def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
+  def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
+
+  implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam       = apply(pair._1, pair._2)
+}
+
+object NamedParam extends NamedParamCreator {
+  class Typed[T: ru.TypeTag : ClassTag](val name: String, val value: T) extends NamedParam {
+    val tpe = TypeStrings.fromTag[T]
+  }
+  class Untyped(val name: String, val value: Any) extends NamedParam {
+    val tpe = TypeStrings.fromValue(value)
+  }
+
+  protected val freshName = {
+    var counter = 0
+    () => { counter += 1; "p" + counter }
+  }
+}
+
+case class NamedParamClass(name: String, tpe: String, value: Any) extends NamedParam { }
+
+trait NamedParam {
+  def name: String
+  def tpe: String
+  def value: Any
+  override def toString = name + ": " + tpe
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
new file mode 100644
index 0000000..e09c6f3
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -0,0 +1,105 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import scala.util.Properties.lineSeparator
+import scala.util.matching.Regex
+
+/** This is for name logic which is independent of the compiler (notice there's no Global.)
+ *  That includes at least generating, metaquoting, mangling, and unmangling.
+ */
+trait Naming {
+  def unmangle(str: String): String = {
+    val ESC = '\u001b'
+    val cleaned = removeIWPackages(removeLineWrapper(str))
+    // Looking to exclude binary data which hoses the terminal, but
+    // let through the subset of it we need, like whitespace and also
+    // <ESC> for ansi codes.
+    val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
+    // Lots of binary chars - translate all supposed whitespace into spaces
+    // except supposed line endings, otherwise scrubbed lines run together
+    if (binaryChars > 5)  // more than one can count while holding a hamburger
+      cleaned map {
+        case c if lineSeparator contains c  => c
+        case c if c.isWhitespace            => ' '
+        case c if c < 32                    => '?'
+        case c                              => c
+      }
+    // Not lots - preserve whitespace and ESC
+    else
+      cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
+  }
+
+  // The two name forms this is catching are the two sides of this assignment:
+  //
+  // $line3.$read.$iw.$iw.Bippy =
+  //   $line3.$read$$iw$$iw$Bippy at 4a6a00ca
+  lazy val lineRegex = {
+    val sn = sessionNames
+    val members = List(sn.read, sn.eval, sn.print) map Regex.quote mkString ("(?:", "|", ")")
+    debugging("lineRegex")(Regex.quote(sn.line) + """\d+[./]""" + members + """[$.]""")
+  }
+
+  private def removeLineWrapper(s: String) = s.replaceAll(lineRegex, "")
+  private def removeIWPackages(s: String)  = s.replaceAll("""\$iw[$.]""", "")
+
+  trait SessionNames {
+    // All values are configurable by passing e.g. -Dscala.repl.name.read=XXX
+    final def propOr(name: String): String = propOr(name, "$" + name)
+    final def propOr(name: String, default: String): String =
+      sys.props.getOrElse("scala.repl.name." + name, default)
+
+    // Prefixes used in repl machinery.  Default to $line, $read, etc.
+    def line   = propOr("line")
+    def read   = propOr("read")
+    def eval   = propOr("eval")
+    def print  = propOr("print")
+    def result = propOr("result")
+
+    // The prefix for unnamed results: by default res0, res1, etc.
+    def res   = propOr("res", "res")  // INTERPRETER_VAR_PREFIX
+    // Internal ones
+    def ires  = propOr("ires")
+  }
+  lazy val sessionNames: SessionNames = new SessionNames { }
+
+  /** Generates names pre0, pre1, etc. via calls to apply method */
+  class NameCreator(pre: String) {
+    private var x = -1
+    var mostRecent: String = ""
+
+    def apply(): String = {
+      x += 1
+      mostRecent = pre + x
+      mostRecent
+    }
+    def reset(): Unit = x = -1
+    def didGenerate(name: String) =
+      (name startsWith pre) && ((name drop pre.length) forall (_.isDigit))
+  }
+
+  private lazy val userVar     = new NameCreator(sessionNames.res)  // var name, like res0
+  private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0
+
+  def isUserVarName(name: String)     = userVar didGenerate name
+  def isInternalVarName(name: String) = internalVar didGenerate name
+
+  val freshLineId            = {
+    var x = 0
+    () => { x += 1 ; x }
+  }
+  def freshUserVarName() = userVar()
+  def freshInternalVarName() = internalVar()
+
+  def resetAllCreators() {
+    userVar.reset()
+    internalVar.reset()
+  }
+
+  def mostRecentVar = userVar.mostRecent
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
new file mode 100644
index 0000000..672a6fd
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
@@ -0,0 +1,60 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import util.returning
+
+/** One instance of a command buffer.
+ */
+class Parsed private (
+  val buffer: String,
+  val cursor: Int,
+  val delimited: Char => Boolean
+) extends Delimited {
+  def isEmpty       = args.isEmpty
+  def isUnqualified = args.size == 1
+  def isAtStart     = cursor <= 0
+
+  private var _verbosity = 0
+
+  def verbosity = _verbosity
+  def withVerbosity(v: Int): this.type = returning[this.type](this)(_ => _verbosity = v)
+
+  def args = toArgs(buffer take cursor).toList
+  def bufferHead = args.head
+  def headLength = bufferHead.length + 1
+  def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
+
+  def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
+  def currentChar = buffer(cursor)
+  def currentArg = args.last
+  def position =
+    if (isEmpty) 0
+    else if (isLastDelimiter) cursor
+    else cursor - currentArg.length
+
+  def isFirstDelimiter  = !isEmpty && isDelimiterChar(buffer.head)
+  def isLastDelimiter   = !isEmpty && isDelimiterChar(buffer.last)
+
+  def isQuoted = false // TODO
+  def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
+  def isDelimiter = !isQuoted && !isEscaped && isDelimiterChar(currentChar)
+
+  override def toString = "Parsed(%s / %d)".format(buffer, cursor)
+}
+
+object Parsed {
+  val DefaultDelimiters = "[]{},`; \t".toSet
+
+  private def onull(s: String) = if (s == null) "" else s
+
+  def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters)
+  def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
+    new Parsed(onull(s), cursor, delimited)
+
+  def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/Pasted.scala
rename to src/repl/scala/tools/nsc/interpreter/Pasted.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
new file mode 100644
index 0000000..1cdbd65
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
@@ -0,0 +1,144 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.collection.immutable
+import scala.language.implicitConversions
+
+/** Mix this into an object and use it as a phasing
+ *  swiss army knife.
+ */
+trait Phased {
+  val global: Global
+  import global._
+
+  private var active: PhaseName = NoPhaseName
+  private var multi: Seq[PhaseName] = Nil
+
+  def get = active
+  def set(phase: PhaseName): Boolean = phase match {
+    case NoPhaseName  => false
+    case name         => active = name ; true
+  }
+  def setMulti(phases: Seq[PhaseName]): Boolean = {
+    if (phases contains NoPhaseName) false
+    else {
+      multi = phases
+      true
+    }
+  }
+
+  private def parsePhaseChange(str: String): Option[Int] = {
+    if (str == "") Some(0)
+    else if (str startsWith ".prev") parsePhaseChange(str drop 5) map (_ - 1)
+    else if (str startsWith ".next") parsePhaseChange(str drop 5) map (_ + 1)
+    else str.head match {
+      case '+' | '-' =>
+        val (num, rest) = str.tail.span(_.isDigit)
+        val diff = if (str.head == '+') num.toInt else -num.toInt
+        parsePhaseChange(rest) map (_ + diff)
+      case _ =>
+        None
+    }
+  }
+
+  /** Takes a string like 4, typer+2, typer.next, etc.
+   *  and turns it into a PhaseName instance.
+   */
+  private def parseInternal(str: String): PhaseName = {
+    if (str == "") NoPhaseName
+    else if (str forall (_.isDigit)) PhaseName(str.toInt)
+    else {
+      val (name, rest) = str.toLowerCase span (_.isLetter)
+      val start        = PhaseName(name)
+      val change       = parsePhaseChange(rest)
+
+      if (start.isEmpty || change.isEmpty) NoPhaseName
+      else PhaseName(start.id + change.get)
+    }
+  }
+  def parse(str: String): PhaseName =
+    try parseInternal(str)
+    catch { case _: Exception => NoPhaseName }
+
+  def atCurrent[T](body: => T): T = enteringPhase(get)(body)
+  def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
+
+  def at[T](ph: PhaseName)(body: => T): T = {
+    val saved = get
+    set(ph)
+    try atCurrent(body)
+    finally set(saved)
+  }
+  def atMulti[T](phs: Seq[PhaseName])(body: => T): Seq[T] = {
+    val saved = multi
+    setMulti(phs)
+    try multi(body)
+    finally setMulti(saved)
+  }
+
+  def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] =
+    phs zip atMulti(phs)(body)
+
+  object PhaseName {
+    implicit lazy val phaseNameOrdering: Ordering[PhaseName] = Ordering[Int] on (_.id)
+
+    lazy val all = List(
+      Parser, Namer, Packageobjects, Typer, Superaccessors, Pickler, Refchecks,
+      Selectiveanf, Liftcode, Selectivecps, Uncurry, Tailcalls, Specialize,
+      Explicitouter, Erasure, Lazyvals, Lambdalift, Constructors, Flatten, Mixin,
+      Cleanup, Delambdafy, Icode, Inliner, Closelim, Dce, Jvm, Terminal
+    )
+    lazy val nameMap = all.map(x => x.name -> x).toMap withDefaultValue NoPhaseName
+    multi = all
+
+    def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName
+    implicit def apply(s: String): PhaseName = nameMap(s)
+  }
+  sealed abstract class PhaseName {
+    lazy val id   = phase.id
+    lazy val name = toString.toLowerCase
+    def phase     = currentRun.phaseNamed(name)
+    def isEmpty   = this eq NoPhaseName
+  }
+
+  case object Parser extends PhaseName
+  case object Namer extends PhaseName
+  case object Packageobjects extends PhaseName
+  case object Typer extends PhaseName
+  case object Superaccessors extends PhaseName
+  case object Pickler extends PhaseName
+  case object Refchecks extends PhaseName
+  case object Selectiveanf extends PhaseName
+  case object Liftcode extends PhaseName
+  case object Selectivecps extends PhaseName
+  case object Uncurry extends PhaseName
+  case object Tailcalls extends PhaseName
+  case object Specialize extends PhaseName
+  case object Explicitouter extends PhaseName
+  case object Erasure extends PhaseName
+  case object Lazyvals extends PhaseName
+  case object Lambdalift extends PhaseName
+  case object Constructors extends PhaseName
+  case object Flatten extends PhaseName
+  case object Mixin extends PhaseName
+  case object Cleanup extends PhaseName
+  case object Delambdafy extends PhaseName
+  case object Icode extends PhaseName
+  case object Inliner extends PhaseName
+  case object Closelim extends PhaseName
+  case object Dce extends PhaseName
+  case object Jvm extends PhaseName
+  case object Terminal extends PhaseName
+  case object NoPhaseName extends PhaseName {
+    override lazy val id   = -1
+    override lazy val name = phase.name
+    override def phase     = NoPhase
+  }
+
+  implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
new file mode 100644
index 0000000..f69a5b4
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -0,0 +1,326 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.collection.{ mutable, immutable }
+import scala.util.matching.Regex
+import scala.io.Codec
+import java.net.{ URL, MalformedURLException }
+import io.{ Path }
+import scala.language.implicitConversions
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+
+/** Collecting some power mode examples.
+
+scala> trait F[@specialized(Int) T] { def f: T = ??? }
+defined trait F
+
+scala> trait G[@specialized(Long, Int) T] extends F[T] { override def f: T = super.f }
+defined trait G
+
+scala> changesAfterEachPhase(intp("G").info.members filter (_.name.toString contains "super")) >
+Gained after  1/parser {
+  method super$f
+}
+
+Gained after 12/specialize {
+  method super$f$mcJ$sp
+  method super$f$mcI$sp
+}
+
+Lost after 18/flatten {
+  method super$f$mcJ$sp
+  method super$f$mcI$sp
+  method super$f
+}
+*/
+
+/** A class for methods to be injected into the intp in power mode.
+ */
+class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, replVals: ReplValsImpl) {
+  import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
+  import intp.global._
+  import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
+
+  abstract class SymSlurper {
+    def isKeep(sym: Symbol): Boolean
+    def isIgnore(sym: Symbol): Boolean
+    def isRecur(sym: Symbol): Boolean
+    def isFinished(): Boolean
+
+    val keep = mutable.HashSet[Symbol]()
+    val seen = mutable.HashSet[Symbol]()
+    def processed = keep.size + seen.size
+    def discarded = seen.size - keep.size
+
+    def members(x: Symbol): List[Symbol] =
+      if (x.rawInfo.isComplete) x.info.members.toList
+      else Nil
+
+    var lastCount = -1
+    var pass = 0
+    val unseenHistory = new mutable.ListBuffer[Int]
+
+    def loop(todo: Set[Symbol]): Set[Symbol] = {
+      pass += 1
+      val (repeats, unseen) = todo partition seen
+      unseenHistory += unseen.size
+      if (settings.verbose) {
+        println("%3d  %s accumulated, %s discarded.  This pass: %s unseen, %s repeats".format(
+          pass, keep.size, discarded, unseen.size, repeats.size))
+      }
+      if (lastCount == processed || unseen.isEmpty || isFinished())
+        return keep.toSet
+
+      lastCount = processed
+      keep ++= (unseen filter isKeep filterNot isIgnore)
+      seen ++= unseen
+      loop(unseen filter isRecur flatMap members)
+    }
+
+    def apply(sym: Symbol): Set[Symbol] = {
+      keep.clear()
+      seen.clear()
+      loop(Set(sym))
+    }
+  }
+
+  class PackageSlurper(packageClass: Symbol) extends SymSlurper {
+    /** Looking for dwindling returns */
+    def droppedEnough() = unseenHistory.size >= 4 && {
+      unseenHistory takeRight 4 sliding 2 forall { it =>
+        val List(a, b) = it.toList
+        a > b
+      }
+    }
+
+    def isRecur(sym: Symbol)  = true
+    def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc")
+    def isKeep(sym: Symbol)   = sym.hasTransOwner(packageClass)
+    def isFinished()          = droppedEnough()
+    def slurp()               = {
+      if (packageClass.isPackageClass)
+        apply(packageClass)
+      else {
+        repldbg("Not a package class! " + packageClass)
+        Set()
+      }
+    }
+  }
+
+  private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp())
+  private def customInit   = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
+
+  def banner = customBanner getOrElse """
+    |** Power User mode enabled - BEEP WHIR GYVE **
+    |** :phase has been set to 'typer'.          **
+    |** scala.tools.nsc._ has been imported      **
+    |** global._, definitions._ also imported    **
+    |** Try  :help, :vals, power.<tab>           **
+  """.stripMargin.trim
+
+  private def initImports = List(
+    "scala.tools.nsc._",
+    "scala.collection.JavaConverters._",
+    "intp.global.{ error => _, _ }",
+    "definitions.{ getClass => _, _ }",
+    "power.rutil._",
+    "replImplicits._",
+    "treedsl.CODE._"
+  )
+
+  def init = customInit match {
+    case Some(x)  => x
+    case _        => initImports.mkString("import ", ", ", "")
+  }
+
+  /** Starts up power mode and runs whatever is in init.
+   */
+  def unleash(): Unit = beQuietDuring {
+    // First we create the ReplVals instance and bind it to $r
+    intp.bind("$r", replVals)
+    // Then we import everything from $r.
+    intp interpret ("import " + intp.originalPath("$r") + "._")
+    // And whatever else there is to do.
+    init.lines foreach (intp interpret _)
+  }
+
+  trait LowPriorityInternalInfo {
+    implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
+  }
+  object InternalInfo extends LowPriorityInternalInfo { }
+
+  /** Now dealing with the problem of acidentally calling a method on Type
+   *  when you're holding a Symbol and seeing the Symbol converted to the
+   *  type of Symbol rather than the type of the thing represented by the
+   *  symbol, by only implicitly installing one method, "?", and the rest
+   *  of the conveniences exist on that wrapper.
+   */
+  trait LowPriorityInternalInfoWrapper { }
+  class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
+    def ? : InternalInfo[T] = new InternalInfo[T](value)
+  }
+
+  /** Todos...
+   *    translate tag type arguments into applied types
+   *    customizable symbol filter (had to hardcode no-spec to reduce noise)
+   */
+  class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
+    private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
+    private def isImplClass(s: Symbol)   = s.name.toString endsWith "$class"
+
+    /** Standard noise reduction filter. */
+    def excludeMember(s: Symbol) = (
+         isSpecialized(s)
+      || isImplClass(s)
+      || s.isAnonOrRefinementClass
+      || s.isAnonymousFunction
+    )
+    def symbol            = compilerSymbolFromTag(tag)
+    def tpe               = compilerTypeFromTag(tag)
+    def members           = membersUnabridged filterNot excludeMember
+    def membersUnabridged = tpe.members.toList
+    def pkg               = symbol.enclosingPackage
+    def tag               = typeEvidence
+    def runtimeClass      = runtimeClassEvidence.runtimeClass
+    def shortClass        = runtimeClass.getName split "[$.]" last
+    def baseClasses       = tpe.baseClasses
+
+    override def toString = value match {
+      case Some(x)  => "%s (%s)".format(x, shortClass)
+      case _        => runtimeClass.getName
+    }
+  }
+
+  trait LowPriorityPrettifier {
+    implicit object AnyPrettifier extends Prettifier[Any] {
+      def show(x: Any): Unit = prettify(x) foreach println
+      def prettify(x: Any): TraversableOnce[String] = x match {
+        case x: Name                => List(x.decode)
+        case Tuple2(k, v)           => List(prettify(k).toIterator ++ Iterator("->") ++ prettify(v) mkString " ")
+        case xs: Array[_]           => xs.iterator flatMap prettify
+        case xs: TraversableOnce[_] => xs flatMap prettify
+        case x                      => List(Prettifier.stringOf(x))
+      }
+    }
+  }
+  object StringPrettifier extends Prettifier[String] {
+    def show(x: String) = println(x)
+    def prettify(x: String) = List(Prettifier stringOf x)
+  }
+  object Prettifier extends LowPriorityPrettifier {
+    def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x)
+    def default[T] = new Prettifier[T] {
+      def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x
+      def show(x: T): Unit = AnyPrettifier show x
+    }
+  }
+  trait Prettifier[T] {
+    def show(x: T): Unit
+    def prettify(x: T): TraversableOnce[String]
+
+    def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x))
+  }
+
+  abstract class PrettifierClass[T: Prettifier]() {
+    val pretty = implicitly[Prettifier[T]]
+    def value: Seq[T]
+
+    def pp(f: Seq[T] => Seq[T]): Unit =
+      pretty prettify f(value) foreach (StringPrettifier show _)
+
+    def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap)
+
+    def >>(implicit ord: Ordering[T]): Unit      = pp(_.sorted)
+    def >!(): Unit                               = pp(_.distinct)
+    def >(): Unit                                = pp(identity)
+  }
+
+  class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { }
+  class SinglePrettifierClass[T: Prettifier](single: T) extends PrettifierClass[T]() {
+    val value = List(single)
+  }
+
+  class RichReplString(s: String) {
+    // make an url out of the string
+    def u: URL = (
+      if (s contains ":") new URL(s)
+      else if (new JFile(s) exists) new JFile(s).toURI.toURL
+      else new URL("http://" + s)
+    )
+  }
+  class RichInputStream(in: InputStream)(implicit codec: Codec) {
+    def bytes(): Array[Byte]  = io.Streamable.bytes(in)
+    def slurp(): String       = io.Streamable.slurp(in)
+    def <<(): String          = slurp()
+  }
+  class RichReplURL(url: URL)(implicit codec: Codec) {
+    def slurp(): String = io.Streamable.slurp(url)
+  }
+
+  trait Implicits1 {
+    // fallback
+    implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
+      new SinglePrettifierClass[T](x)
+  }
+  trait Implicits2 extends Implicits1 {
+    class RichSymbol(sym: Symbol) {
+      // convenient type application
+      def apply(targs: Type*): Type = typeRef(NoPrefix, sym, targs.toList)
+    }
+    object symbolSubtypeOrdering extends Ordering[Symbol] {
+      def compare(s1: Symbol, s2: Symbol) =
+        if (s1 eq s2) 0
+        else if (s1 isLess s2) -1
+        else 1
+    }
+    implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
+    implicit lazy val powerTypeOrdering: Ordering[Type]     = Ordering[Symbol] on (_.typeSymbol)
+
+    implicit def replInternalInfo[T: ru.TypeTag : ClassTag](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
+    implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
+    implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
+      new MultiPrettifierClass[T](xs.toSeq)
+    implicit def replPrettifier[T] : Prettifier[T] = Prettifier.default[T]
+    implicit def replTypeApplication(sym: Symbol): RichSymbol = new RichSymbol(sym)
+
+    implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
+    implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
+  }
+
+  trait ReplUtilities {
+    def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
+    def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
+    def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
+    def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
+    def sanitize(s: String): String = sanitize(s.getBytes())
+    def sanitize(s: Array[Byte]): String = (s map {
+      case x if x.toChar.isControl  => '?'
+      case x                        => x.toChar
+    }).mkString
+
+    def strings(s: Seq[Byte]): List[String] = {
+      if (s.length == 0) Nil
+      else s dropWhile (_.toChar.isControl) span (x => !x.toChar.isControl) match {
+        case (next, rest) => next.map(_.toChar).mkString :: strings(rest)
+      }
+    }
+  }
+
+  lazy val rutil: ReplUtilities = new ReplUtilities { }
+  lazy val phased: Phased       = new { val global: intp.global.type = intp.global } with Phased { }
+
+  def unit(code: String)    = newCompilationUnit(code)
+  def trees(code: String)   = parse(code) match { case parse.Success(trees) => trees; case _ => Nil }
+
+  override def toString = s"""
+    |** Power mode status **
+    |Default phase: ${phased.get}
+    |Names: ${intp.unqualifiedIds mkString " "}
+  """.stripMargin
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
new file mode 100644
index 0000000..046d6ec
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -0,0 +1,53 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.util.control.ControlThrowable
+import util.Exceptional.unwrap
+import util.stackTraceString
+
+trait ReplConfig {
+  lazy val replProps = new ReplProps
+
+  class TapMaker[T](x: T) {
+    def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
+    def tap[U](f: T => U): T = {
+      f(x)
+      x
+    }
+  }
+
+  private def parens(x: Any) = "(" + x + ")"
+  private def echo(msg: => String) =
+    try Console println msg
+    catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
+
+  private[nsc] def repldbg(msg: => String)    = if (isReplDebug) echo(msg)
+  private[nsc] def repltrace(msg: => String)  = if (isReplTrace) echo(msg)
+  private[nsc] def replinfo(msg: => String)   = if (isReplInfo)  echo(msg)
+
+  private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+    case t: ControlThrowable => throw t
+    case t: Throwable        =>
+      repldbg(label + ": " + unwrap(t))
+      repltrace(stackTraceString(unwrap(t)))
+      alt
+  }
+  private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
+    try body
+    catch logAndDiscard(label, alt)
+  }
+
+  def isReplTrace: Boolean = replProps.trace
+  def isReplDebug: Boolean = replProps.debug || isReplTrace
+  def isReplInfo: Boolean  = replProps.info || isReplDebug
+  def isReplPower: Boolean = replProps.power
+
+  private def csv(p: String, v: String) = p split "," contains v
+  def isPaged: Boolean     = replProps.format.isSet && csv(replProps.format.get, "paged")
+  def isAcross: Boolean    = replProps.format.isSet && csv(replProps.format.get, "across")
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplDir.scala b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
new file mode 100644
index 0000000..5d386b4
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
@@ -0,0 +1,48 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import io.VirtualDirectory
+import settings.MutableSettings
+import scala.reflect.io.{ AbstractFile, PlainDirectory, Directory }
+import scala.collection.generic.Clearable
+
+/** Directory to save .class files to. */
+trait ReplDir extends AbstractFile with Clearable { }
+
+private class ReplVirtualDir() extends VirtualDirectory("(memory)", None) with ReplDir { }
+private class ReplRealDir(dir: Directory) extends PlainDirectory(dir) with ReplDir {
+  def clear() = {
+    dir.deleteRecursively()
+    dir.createDirectory()
+  }
+}
+
+class ReplOutput(val dirSetting: MutableSettings#StringSetting) {
+  // outdir for generated classfiles - may be in-memory (the default),
+  // a generated temporary directory, or a specified outdir.
+  val dir: ReplDir = (
+    if (dirSetting.isDefault)
+      new ReplVirtualDir()
+    else if (dirSetting.value == "")
+      new ReplRealDir(Directory.makeTemp("repl"))
+    else
+      new ReplRealDir(Directory(dirSetting.value))
+  )
+
+  // print the contents hierarchically
+  def show(out: JPrintWriter) = {
+    def pp(root: AbstractFile, indentLevel: Int) {
+      val label = root.name
+      val spaces = "    " * indentLevel
+      out.println(spaces + label)
+      if (root.isDirectory)
+        root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+    }
+    pp(dir, 0)
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
new file mode 100644
index 0000000..51fab30
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -0,0 +1,64 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import typechecker.Analyzer
+
+/** A layer on top of Global so I can guarantee some extra
+ *  functionality for the repl.  It doesn't do much yet.
+ */
+trait ReplGlobal extends Global {
+  // This exists mostly because using the reporter too early leads to deadlock.
+  private def echo(msg: String) { Console println msg }
+
+  override def abort(msg: String): Nothing = {
+    echo("ReplGlobal.abort: " + msg)
+    super.abort(msg)
+  }
+
+  override lazy val analyzer = new {
+    val global: ReplGlobal.this.type = ReplGlobal.this
+  } with Analyzer {
+
+    override protected def findMacroClassLoader(): ClassLoader = {
+      val loader = super.findMacroClassLoader
+      macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs))
+      val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get
+      new util.AbstractFileClassLoader(virtualDirectory, loader) {}
+    }
+
+    override def newTyper(context: Context): Typer = new Typer(context) {
+      override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
+        val res = super.typed(tree, mode, pt)
+        tree match {
+          case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
+            repldbg("typed %s: %s".format(name, res.tpe))
+          case _ =>
+        }
+        res
+      }
+    }
+  }
+
+  object replPhase extends SubComponent {
+    val global: ReplGlobal.this.type = ReplGlobal.this
+    val phaseName = "repl"
+    val runsAfter = List[String]("typer")
+    val runsRightAfter = None
+    def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+      def apply(unit: CompilationUnit) {
+        repldbg("Running replPhase on " + unit.body)
+        // newNamer(rootContext(unit)).enterSym(unit.body)
+      }
+    }
+  }
+
+  override protected def computePhaseDescriptors: List[SubComponent] = {
+    addToPhasesSet(replPhase, "repl")
+    super.computePhaseDescriptors
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
new file mode 100644
index 0000000..36e6dbb
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.sys._
+import Prop._
+
+class ReplProps {
+  private def bool(name: String) = BooleanProp.keyExists(name)
+  private def int(name: String) = IntProp(name)
+
+  val info  = bool("scala.repl.info")
+  val debug = bool("scala.repl.debug")
+  val trace = bool("scala.repl.trace")
+  val power = bool("scala.repl.power")
+
+  /** CSV of paged,across to enable pagination or `-x` style
+   *  columns, "across" instead of down the column.  Since
+   *  pagination turns off columnar output, these flags are
+   *  currently mutually exclusive.
+   */
+  val format = Prop[String]("scala.repl.format")
+
+  val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
+  val powerInitCode   = Prop[JFile]("scala.repl.power.initcode")
+  val powerBanner     = Prop[JFile]("scala.repl.power.banner")
+
+  val vids = bool("scala.repl.vids")
+  val maxPrintString = int("scala.repl.maxprintstring")
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
rename to src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
new file mode 100644
index 0000000..43da5c6
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -0,0 +1,32 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.internal.Chars
+
+trait ReplStrings {
+  /** Convert a string into code that can recreate the string.
+   *  This requires replacing all special characters by escape
+   *  codes. It does not add the surrounding " marks.  */
+  def string2code(str: String): String = {
+    val res = new StringBuilder
+    for (c <- str) c match {
+      case '"' | '\'' | '\\'  => res += '\\' ; res += c
+      case _ if c.isControl   => res ++= Chars.char2uescape(c)
+      case _                  => res += c
+    }
+    res.toString
+  }
+
+  def string2codeQuoted(str: String) =
+    "\"" + string2code(str) + "\""
+
+  def any2stringOf(x: Any, maxlen: Int) =
+    "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
+
+  def words(s: String) = (s.trim split "\\s+" filterNot (_ == "")).toList
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
new file mode 100644
index 0000000..9346b05
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
@@ -0,0 +1,82 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.language.implicitConversions
+import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.runtime.{universe => ru}
+
+/** A class which the repl utilizes to expose predefined objects.
+ *  The base implementation is empty; the standard repl implementation
+ *  is StdReplVals.
+ */
+abstract class ReplVals { }
+
+class StdReplVals(final val r: ILoop) extends ReplVals {
+  final lazy val repl                     = r
+  final lazy val intp                     = r.intp
+  final lazy val power                    = r.power
+  final lazy val reader                   = r.in
+  final lazy val vals                     = this
+  final lazy val global: intp.global.type = intp.global
+  final lazy val isettings                = intp.isettings
+  final lazy val completion               = reader.completion
+  final lazy val history                  = reader.history
+  final lazy val phased                   = power.phased
+  final lazy val analyzer                 = global.analyzer
+
+  object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+
+  final lazy val typer = analyzer.newTyper(
+    analyzer.rootContext(
+      power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
+    )
+  )
+  def lastRequest = intp.lastRequest
+
+  class ReplImplicits extends power.Implicits2 {
+    import intp.global.Symbol
+
+    private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
+    implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
+  }
+
+  final lazy val replImplicits = new ReplImplicits
+
+  def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
+}
+
+object ReplVals {
+  /** Latest attempt to work around the challenge of foo.global.Type
+   *  not being seen as the same type as bar.global.Type even though
+   *  the globals are the same.  Dependent method types to the rescue.
+   */
+  def mkCompilerTypeFromTag[T <: Global](global: T) = {
+    import global._
+
+    /** We can't use definitions.compilerTypeFromTag directly because we're passing
+     *  it to map and the compiler refuses to perform eta expansion on a method
+     *  with a dependent return type.  (Can this be relaxed?) To get around this
+     *  I have this forwarder which widens the type and then cast the result back
+     *  to the dependent type.
+     */
+    def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type =
+      definitions.compilerTypeFromTag(t)
+
+    class AppliedTypeFromTags(sym: Symbol) {
+      def apply[M](implicit m1: ru.TypeTag[M]): Type =
+        if (sym eq NoSymbol) NoType
+        else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type])
+
+      def apply[M1, M2](implicit m1: ru.TypeTag[M1], m2: ru.TypeTag[M2]): Type =
+        if (sym eq NoSymbol) NoType
+        else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type])
+    }
+
+    (sym: Symbol) => new AppliedTypeFromTags(sym)
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
new file mode 100644
index 0000000..a4e1e25
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
@@ -0,0 +1,22 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.tools.nsc
+package interpreter
+
+object Results {
+  /** A result from the Interpreter interpreting one line of input. */
+  abstract sealed class Result
+
+  /** The line was interpreted successfully. */
+  case object Success extends Result
+
+  /** The line was erroneous in some way. */
+  case object Error extends Result
+
+  /** The input was incomplete.  The caller should request more input.
+   */
+  case object Incomplete extends Result
+}
\ No newline at end of file
diff --git a/src/repl/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
new file mode 100644
index 0000000..36cdf65
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.{ ClassTag, classTag }
+
+class RichClass[T](val clazz: Class[T]) {
+  def toTag: ClassTag[T] = ClassTag[T](clazz)
+
+  // Sadly isAnonymousClass does not return true for scala anonymous
+  // classes because our naming scheme is not doing well against the
+  // jvm's many assumptions.
+  def isScalaAnonymous = (
+    try clazz.isAnonymousClass || (clazz.getName contains "$anon$")
+    catch { case _: java.lang.InternalError => false }  // good ol' "Malformed class name"
+  )
+
+  def supertags: List[ClassTag[_]] = supers map (_.toTag)
+  def superNames: List[String]    = supers map (_.getName)
+  def interfaces: List[JClass]    = supers filter (_.isInterface)
+
+  def hasAncestorName(f: String => Boolean) = superNames exists f
+  def hasAncestor(f: JClass => Boolean) = supers exists f
+
+  def supers: List[JClass] = {
+    def loop(x: JClass): List[JClass] = x.getSuperclass match {
+      case null   => List(x)
+      case sc     => x :: (x.getInterfaces.toList flatMap loop) ++ loop(sc)
+    }
+    loop(clazz).distinct
+  }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
new file mode 100644
index 0000000..6634dc6
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -0,0 +1,41 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Stepan Koltsov
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import java.io.{ BufferedReader }
+import session.NoHistory
+
+/** Reads using standard JDK API */
+class SimpleReader(
+  in: BufferedReader,
+  out: JPrintWriter,
+  val interactive: Boolean)
+extends InteractiveReader
+{
+  val history = NoHistory
+  val completion = NoCompletion
+
+  def reset() = ()
+  def redrawLine() = ()
+  def readOneLine(prompt: String): String = {
+    if (interactive) {
+      out.print(prompt)
+      out.flush()
+    }
+    in.readLine()
+  }
+  def readOneKey(prompt: String)  = sys.error("No char-based input in SimpleReader")
+}
+
+object SimpleReader {
+  def defaultIn  = Console.in
+  def defaultOut = new JPrintWriter(Console.out)
+
+  def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
+    new SimpleReader(in, out, interactive)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
new file mode 100644
index 0000000..ebbb397
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.tools.reflect.StdTags
+import scala.reflect.runtime.{ universe => ru }
+
+trait StdReplTags extends StdTags {
+  lazy val tagOfStdReplVals = tagOfStaticClass[StdReplVals]
+  lazy val tagOfIMain = tagOfStaticClass[IMain]
+}
+
+object StdReplTags extends StdTags with StdReplTags {
+  val u: ru.type = ru
+  val m = u.runtimeMirror(getClass.getClassLoader)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
new file mode 100644
index 0000000..079097d
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -0,0 +1,199 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+import scala.language.implicitConversions
+import scala.reflect.{ classTag, ClassTag }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+import scala.util.control.Exception.catching
+
+/** The main REPL related classes and values are as follows.
+ *  In addition to standard compiler classes Global and Settings, there are:
+ *
+ *  History: an interface for session history.
+ *  Completion: an interface for tab completion.
+ *  ILoop (formerly InterpreterLoop): The umbrella class for a session.
+ *  IMain (formerly Interpreter): Handles the evolving state of the session
+ *    and handles submitting code to the compiler and handling the output.
+ *  InteractiveReader: how ILoop obtains input.
+ *  History: an interface for session history.
+ *  Completion: an interface for tab completion.
+ *  Power: a repository for more advanced/experimental features.
+ *
+ *  ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
+ *  InteractiveReader contains { history: History, completion: Completion }
+ *  IMain contains { global: Global }
+ */
+package object interpreter extends ReplConfig with ReplStrings {
+  type JFile          = java.io.File
+  type JClass         = java.lang.Class[_]
+  type JList[T]       = java.util.List[T]
+  type JCollection[T] = java.util.Collection[T]
+  type JPrintWriter   = java.io.PrintWriter
+  type InputStream    = java.io.InputStream
+  type OutputStream   = java.io.OutputStream
+
+  val IR = Results
+
+  implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
+  private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
+    import scala.collection.JavaConverters._
+    xs.asScala.toList map ("" + _)
+  }
+
+  private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
+  private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
+  private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
+
+  private val ourClassloader = getClass.getClassLoader
+
+  def staticTypeTag[T: ClassTag]: ru.TypeTag[T] = ru.TypeTag[T](
+    ru.runtimeMirror(ourClassloader),
+    new TypeCreator {
+      def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+        m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+  })
+
+  /** This class serves to trick the compiler into treating a var
+   *  (intp, in ILoop) as a stable identifier.
+   */
+  implicit class IMainOps(val intp: IMain) {
+    import intp._
+    import global.{ reporter => _, _ }
+    import definitions._
+
+    protected def echo(msg: String) = {
+      Console.out println msg
+      Console.out.flush()
+    }
+
+    def implicitsCommand(line: String): String = {
+      def p(x: Any) = intp.reporter.printMessage("" + x)
+
+      // If an argument is given, only show a source with that
+      // in its name somewhere.
+      val args     = line split "\\s+"
+      val filtered = intp.implicitSymbolsBySource filter {
+        case (source, syms) =>
+          (args contains "-v") || {
+            if (line == "") (source.fullName.toString != "scala.Predef")
+            else (args exists (source.name.toString contains _))
+          }
+      }
+
+      if (filtered.isEmpty)
+        return "No implicits have been imported other than those in Predef."
+
+      filtered foreach {
+        case (source, syms) =>
+          p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
+
+          // This groups the members by where the symbol is defined
+          val byOwner = syms groupBy (_.owner)
+          val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) }
+
+          sortedOwners foreach {
+            case (owner, members) =>
+              // Within each owner, we cluster results based on the final result type
+              // if there are more than a couple, and sort each cluster based on name.
+              // This is really just trying to make the 100 or so implicits imported
+              // by default into something readable.
+              val memberGroups: List[List[Symbol]] = {
+                val groups = members groupBy (_.tpe.finalResultType) toList
+                val (big, small) = groups partition (_._2.size > 3)
+                val xss = (
+                  (big sortBy (_._1.toString) map (_._2)) :+
+                  (small flatMap (_._2))
+                )
+
+                xss map (xs => xs sortBy (_.name.toString))
+              }
+
+              val ownerMessage = if (owner == source) " defined in " else " inherited from "
+              p("  /* " + members.size + ownerMessage + owner.fullName + " */")
+
+              memberGroups foreach { group =>
+                group foreach (s => p("  " + intp.symbolDefString(s)))
+                p("")
+              }
+          }
+          p("")
+      }
+      ""
+    }
+
+    def kindCommandInternal(expr: String, verbose: Boolean): Unit = {
+      val catcher = catching(classOf[MissingRequirementError],
+                             classOf[ScalaReflectionException])
+      def typeFromTypeString: Option[ClassSymbol] = catcher opt {
+        exprTyper.typeOfTypeString(expr).typeSymbol.asClass
+      }
+      def typeFromNameTreatedAsTerm: Option[ClassSymbol] = catcher opt {
+        val moduleClass = exprTyper.typeOfExpression(expr).typeSymbol
+        moduleClass.linkedClassOfClass.asClass
+      }
+      def typeFromFullName: Option[ClassSymbol] = catcher opt {
+        intp.global.rootMirror.staticClass(expr)
+      }
+      def typeOfTerm: Option[TypeSymbol] = replInfo(symbolOfLine(expr)).typeSymbol match {
+        case sym: TypeSymbol => Some(sym)
+        case _ => None
+      }
+      (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
+        val (kind, tpe) = exitingTyper {
+          val tpe = sym.tpeHK
+          (intp.global.inferKind(NoPrefix)(tpe, sym.owner), tpe)
+        }
+        echoKind(tpe, kind, verbose)
+      }
+    }
+
+    def echoKind(tpe: Type, kind: Kind, verbose: Boolean) {
+      def typeString(tpe: Type): String = {
+        tpe match {
+          case TypeRef(_, sym, _) => typeString(sym.info)
+          case RefinedType(_, _)  => tpe.toString
+          case _                  => tpe.typeSymbol.fullName
+        }
+      }
+      printAfterTyper(typeString(tpe) + "'s kind is " + kind.scalaNotation)
+      if (verbose) {
+        echo(kind.starNotation)
+        echo(kind.description)
+      }
+    }
+
+    /** TODO -
+     *  -n normalize
+     *  -l label with case class parameter names
+     *  -c complete - leave nothing out
+     */
+    def typeCommandInternal(expr: String, verbose: Boolean): Unit =
+      symbolOfLine(expr) andAlso (echoTypeSignature(_, verbose))
+
+    def printAfterTyper(msg: => String) =
+      reporter printUntruncatedMessage exitingTyper(msg)
+
+    private def replInfo(sym: Symbol) =
+      if (sym.isAccessor) dropNullaryMethod(sym.info) else sym.info
+
+    def echoTypeStructure(sym: Symbol) =
+      printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+    def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+      if (verbose) echo("// Type signature")
+      printAfterTyper("" + replInfo(sym))
+
+      if (verbose) {
+        echo("\n// Internal Type structure")
+        echoTypeStructure(sym)
+      }
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
rename to src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala
new file mode 100644
index 0000000..794d41a
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala
@@ -0,0 +1,22 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+package session
+
+/** An implementation-agnostic history interface which makes no
+ *  reference to the jline classes.  Very sparse right now.
+ */
+trait History {
+  def asStrings: List[String]
+  def index: Int
+  def size: Int
+}
+object NoHistory extends History {
+  def asStrings       = Nil
+  def index           = 0
+  def size            = 0
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
rename to src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
new file mode 100644
index 0000000..7c49b91
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -0,0 +1,63 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+package session
+
+import scala.collection.mutable.{ Buffer, ListBuffer }
+import scala.collection.JavaConverters._
+
+class SimpleHistory extends JLineHistory {
+  private var _index: Int = 0
+  private val buf: Buffer[String] = new ListBuffer[String]
+  private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x) }
+  private def setTo(num: Int)          = { _index = num ; true }
+  private def minusOne                 = { _index -= 1 ; true }
+  private def plusOne                  = { _index += 1 ; true }
+  private def lastIndex                = size - 1
+  private def fail(msg: String): String = {
+    repldbg("Internal error in history(size %d, index %d): %s".format(
+      size, index, msg)
+    )
+    ""
+  }
+
+  case class Entry(index: Int, value: CharSequence) extends JEntry {
+    override def toString = value
+  }
+
+  def maxSize: Int = 2500
+  def last = if (isEmpty) fail("last") else buf.last
+
+  def size = buf.size
+  def index = _index
+  def isEmpty = buf.isEmpty
+  def clear() = buf.clear()
+  def get(idx: Int): CharSequence = buf(idx)
+  def add(item: CharSequence): Unit = buf += item
+  def replace(item: CharSequence): Unit = {
+    buf trimEnd 1
+    add(item)
+  }
+  def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx)
+  def entries(): JListIterator[JEntry]         = toEntries().asJava.listIterator()
+  def iterator: JIterator[JEntry]              = toEntries().iterator.asJava
+
+  def remove(idx: Int): CharSequence        = buf remove idx
+  def removeFirst(): CharSequence           = buf remove 0
+  def removeLast(): CharSequence            = buf remove lastIndex
+  def set(idx: Int, to: CharSequence): Unit = buf(idx) = to
+
+  def current()         = if (index >= 0 && index < buf.size) buf(index) else fail("current()")
+  def previous()        = (index > 0) && minusOne
+  def next()            = (index <= lastIndex) && plusOne
+  def moveToFirst()     = (size > 0) && (index != 0) && setTo(0)
+  def moveToLast()      = (size > 0) && (index < lastIndex) && setTo(lastIndex)
+  def moveTo(idx: Int)  = (idx > 0) && (idx <= lastIndex) && setTo(idx)
+  def moveToEnd(): Unit = setTo(size)
+
+  def asStrings = buf.toList
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
new file mode 100644
index 0000000..a3d7312
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
@@ -0,0 +1,23 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+import scala.language.implicitConversions
+
+/** Files having to do with the state of a repl session:
+ *  lines of text entered, types and terms defined, etc.
+ */
+package object session {
+  type JIterator[T]       = java.util.Iterator[T]
+  type JListIterator[T]   = java.util.ListIterator[T]
+
+  type JEntry             = jline.console.history.History.Entry
+  type JHistory           = jline.console.history.History
+  type JMemoryHistory     = jline.console.history.MemoryHistory
+  type JPersistentHistory = jline.console.history.PersistentHistory
+
+  private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString
+}
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
deleted file mode 100644
index 8c43cda..0000000
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ /dev/null
@@ -1,426 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-import scala.reflect.ClassTag
-
-sealed abstract class Arbitrary[T] {
-  val arbitrary: Gen[T]
-}
-
-/** Defines implicit <code>Arbitrary</code> instances for common types.
- *  <p>
- *  ScalaCheck
- *  uses implicit <code>Arbitrary</code> instances when creating properties
- *  out of functions with the <code>Prop.property</code> method, and when
- *  the <code>Arbitrary.arbitrary</code> method is used. For example, the
- *  following code requires that there exists an implicit
- *  <code>Arbitrary[MyClass]</code> instance:
- *  </p>
- *
- *  <p>
- *  <code>
- *    val myProp = Prop.forAll { myClass: MyClass =><br />
- *      ...<br />
- *    }<br />
- *
- *    val myGen = Arbitrary.arbitrary[MyClass]
- *  </code>
- *  </p>
- *
- *  <p>
- *  The required implicit definition could look like this:
- *  </p>
- *
- *  <p>
- *  <code>
- *    implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- *  </code>
- *  </p>
- *
- *  <p>
- *  The factory method <code>Arbitrary(...)</code> takes a generator of type
- *  <code>Gen[T]</code> and returns an instance of <code>Arbitrary[T]</code>.
- *  </p>
- *
- *  <p>
- *  The <code>Arbitrary</code> module defines implicit <code>Arbitrary</code>
- *  instances for common types, for convenient use in your properties and
- *  generators.
- *  </p>
- */
-object Arbitrary {
-
-  import Gen.{value, choose, sized, listOf, listOf1,
-    frequency, oneOf, containerOf, resize}
-  import util.StdRand
-  import scala.collection.{immutable, mutable}
-  import java.util.Date
-
-  /** Creates an Arbitrary instance */
-  def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
-    lazy val arbitrary = g
-  }
-
-  /** Returns an arbitrary generator for the type T. */
-  def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
-
-  /**** Arbitrary instances for each AnyVal ****/
-
-  /** Arbitrary AnyVal */
-  implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
-    arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
-    arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
-    arbitrary[Double]
-  ))
-
-  /** Arbitrary instance of Boolean */
-  implicit lazy val arbBool: Arbitrary[Boolean] =
-    Arbitrary(oneOf(true, false))
-
-  /** Arbitrary instance of Int */
-  implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
-    Gen.chooseNum(Int.MinValue, Int.MaxValue)
-  )
-
-  /** Arbitrary instance of Long */
-  implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
-    Gen.chooseNum(Long.MinValue / 2, Long.MaxValue / 2)
-  )
-
-  /** Arbitrary instance of Float */
-  implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
-    Gen.chooseNum(
-      Float.MinValue, Float.MaxValue
-      // I find that including these by default is a little TOO testy.
-      // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
-    )
-  )
-
-  /** Arbitrary instance of Double */
-  implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
-    Gen.chooseNum(
-      Double.MinValue / 2, Double.MaxValue / 2
-      // As above.  Perhaps behind some option?
-      // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
-    )
-  )
-
-  /** Arbitrary instance of Char */
-  implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
-    Gen.frequency(
-      (0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
-      (Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
-    )
-  )
-
-  /** Arbitrary instance of Byte */
-  implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
-    Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
-  )
-
-  /** Arbitrary instance of Short */
-  implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
-    Gen.chooseNum(Short.MinValue, Short.MaxValue)
-  )
-
-  /** Absolutely, totally, 100% arbitrarily chosen Unit. */
-  implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(value(()))
-
-  /**** Arbitrary instances of other common types ****/
-
-  /** Arbitrary instance of String */
-  implicit lazy val arbString: Arbitrary[String] =
-    Arbitrary(arbitrary[List[Char]] map (_.mkString))
-
-  /** Arbitrary instance of Date */
-  implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
-    l <- arbitrary[Long]
-    d = new Date
-  } yield new Date(d.getTime + l))
-
-  /** Arbitrary instance of Throwable */
-  implicit lazy val arbThrowable: Arbitrary[Throwable] =
-    Arbitrary(value(new Exception))
-
-  /** Arbitrary BigInt */
-  implicit lazy val arbBigInt: Arbitrary[BigInt] = {
-    def chooseBigInt: Gen[BigInt] = sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
-    def chooseReallyBigInt = chooseBigInt.combine(choose(32, 128))((x, y) => Some(x.get << y.get))
-
-    Arbitrary(
-      frequency(
-        (5, chooseBigInt),
-        (10, chooseReallyBigInt),
-        (1, BigInt(0)),
-        (1, BigInt(1)),
-        (1, BigInt(-1)),
-        (1, BigInt(Int.MaxValue) + 1),
-        (1, BigInt(Int.MinValue) - 1),
-        (1, BigInt(Long.MaxValue)),
-        (1, BigInt(Long.MinValue)),
-        (1, BigInt(Long.MaxValue) + 1),
-        (1, BigInt(Long.MinValue) - 1)
-      )
-    )
-  }
-
-  /** Arbitrary BigDecimal */
-  implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
-    import java.math.MathContext._
-    val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
-    val bdGen = for {
-      x <- arbBigInt.arbitrary
-      mc <- mcGen
-      limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
-      scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
-    } yield BigDecimal(x, scale, mc)
-    Arbitrary(bdGen)
-  }
-
-  /** Arbitrary java.lang.Number */
-  implicit lazy val arbNumber: Arbitrary[Number] = {
-    val gen = Gen.oneOf(
-      arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
-      arbitrary[Float], arbitrary[Double]
-    )
-    Arbitrary(gen map (_.asInstanceOf[Number]))
-    // XXX TODO - restore BigInt and BigDecimal
-    // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
-  }
-
-  /** Generates an arbitrary property */
-  implicit lazy val arbProp: Arbitrary[Prop] = {
-    import Prop._
-    val undecidedOrPassed = forAll { b: Boolean =>
-      b ==> true
-    }
-    Arbitrary(frequency(
-      (4, falsified),
-      (4, passed),
-      (3, proved),
-      (3, undecidedOrPassed),
-      (2, undecided),
-      (1, exception(null))
-    ))
-  }
-
-  /** Arbitrary instance of test params */
-  implicit lazy val arbTestParams: Arbitrary[Test.Params] =
-    Arbitrary(for {
-      minSuccTests <- choose(10,200)
-      maxDiscardRatio <- choose(0.2f,10f)
-      minSize <- choose(0,500)
-      sizeDiff <- choose(0,500)
-      maxSize <- choose(minSize, minSize + sizeDiff)
-      ws <- choose(1,4)
-    } yield Test.Params(
-      minSuccessfulTests = minSuccTests,
-      maxDiscardRatio = maxDiscardRatio,
-      minSize = minSize,
-      maxSize = maxSize,
-      workers = ws
-    ))
-
-  /** Arbitrary instance of gen params */
-  implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
-    Arbitrary(for {
-      size <- arbitrary[Int] suchThat (_ >= 0)
-    } yield Gen.Params(size, StdRand))
-
-  /** Arbitrary instance of prop params */
-  implicit lazy val arbPropParams: Arbitrary[Prop.Params] =
-    Arbitrary(for {
-      genPrms <- arbitrary[Gen.Params]
-    } yield Prop.Params(genPrms, FreqMap.empty[immutable.Set[Any]]))
-
-
-  // Higher-order types //
-
-  /** Arbitrary instance of Gen */
-  implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
-    Arbitrary(frequency(
-      (5, arbitrary[T] map (value(_))),
-      (1, Gen.fail)
-    ))
-
-  /** Arbitrary instance of option type */
-  implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
-    Arbitrary(sized(n => if(n == 0) value(None) else resize(n - 1, arbitrary[T]).map(Some(_))))
-
-  implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
-    Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
-
-  /** Arbitrary instance of immutable map */
-  implicit def arbImmutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
-  ): Arbitrary[immutable.Map[T,U]] = Arbitrary(
-    for(seq <- arbitrary[Stream[(T,U)]]) yield immutable.Map(seq: _*)
-  )
-
-  /** Arbitrary instance of mutable map */
-  implicit def arbMutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
-  ): Arbitrary[mutable.Map[T,U]] = Arbitrary(
-    for(seq <- arbitrary[Stream[(T,U)]]) yield mutable.Map(seq: _*)
-  )
-
-  /** Arbitrary instance of any buildable container (such as lists, arrays,
-   *  streams, etc). The maximum size of the container depends on the size
-   *  generation parameter. */
-  implicit def arbContainer[C[_],T](implicit a: Arbitrary[T], b: Buildable[T,C]
-  ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
-
-  /** Arbitrary instance of any array. */
-  implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassTag[T]
-  ): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
-
-
-  // Functions //
-
-  /** Arbitrary instance of Function1 */
-  implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
-  ): Arbitrary[T1 => R] = Arbitrary(
-    for(r <- arbitrary[R]) yield (t1: T1) => r
-  )
-
-  /** Arbitrary instance of Function2 */
-  implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
-  ): Arbitrary[(T1,T2) => R] = Arbitrary(
-    for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
-  )
-
-  /** Arbitrary instance of Function3 */
-  implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
-  ): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
-    for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
-  )
-
-  /** Arbitrary instance of Function4 */
-  implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
-  ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
-    for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
-  )
-
-  /** Arbitrary instance of Function5 */
-  implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
-  ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
-    for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
-  )
-
-
-  // Tuples //
-
-  /** Arbitrary instance of 2-tuple */
-  implicit def arbTuple2[T1,T2](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2]
-  ): Arbitrary[(T1,T2)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-    } yield (t1,t2))
-
-  /** Arbitrary instance of 3-tuple */
-  implicit def arbTuple3[T1,T2,T3](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
-  ): Arbitrary[(T1,T2,T3)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-    } yield (t1,t2,t3))
-
-  /** Arbitrary instance of 4-tuple */
-  implicit def arbTuple4[T1,T2,T3,T4](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
-  ): Arbitrary[(T1,T2,T3,T4)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-    } yield (t1,t2,t3,t4))
-
-  /** Arbitrary instance of 5-tuple */
-  implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5]
-  ): Arbitrary[(T1,T2,T3,T4,T5)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-      t5 <- arbitrary[T5]
-    } yield (t1,t2,t3,t4,t5))
-
-  /** Arbitrary instance of 6-tuple */
-  implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6]
-  ): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-      t5 <- arbitrary[T5]
-      t6 <- arbitrary[T6]
-    } yield (t1,t2,t3,t4,t5,t6))
-
-  /** Arbitrary instance of 7-tuple */
-  implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
-  ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-      t5 <- arbitrary[T5]
-      t6 <- arbitrary[T6]
-      t7 <- arbitrary[T7]
-    } yield (t1,t2,t3,t4,t5,t6,t7))
-
-  /** Arbitrary instance of 8-tuple */
-  implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
-  ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-      t5 <- arbitrary[T5]
-      t6 <- arbitrary[T6]
-      t7 <- arbitrary[T7]
-      t8 <- arbitrary[T8]
-    } yield (t1,t2,t3,t4,t5,t6,t7,t8))
-
-  /** Arbitrary instance of 9-tuple */
-  implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
-    a9: Arbitrary[T9]
-  ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
-    Arbitrary(for {
-      t1 <- arbitrary[T1]
-      t2 <- arbitrary[T2]
-      t3 <- arbitrary[T3]
-      t4 <- arbitrary[T4]
-      t5 <- arbitrary[T5]
-      t6 <- arbitrary[T6]
-      t7 <- arbitrary[T7]
-      t8 <- arbitrary[T8]
-      t9 <- arbitrary[T9]
-    } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
-
-}
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
deleted file mode 100644
index 8959211..0000000
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-case class Arg[+T](
-  label: String,
-  arg: T,
-  shrinks: Int,
-  origArg: T
-)(implicit prettyPrinter: T => Pretty) {
-  lazy val prettyArg: Pretty = prettyPrinter(arg)
-  lazy val prettyOrigArg: Pretty = prettyPrinter(origArg)
-}
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
deleted file mode 100644
index 88ef8ae..0000000
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Gen._
-import Prop._
-import Shrink._
-
-/** See User Guide for usage examples */
-trait Commands extends Prop {
-
-  /** The abstract state data type. This type must be immutable.
-   *  The state type that encodes the abstract state. The abstract state
-   *  should model all the features we need from the real state, the system
-   *  under test. We should leave out all details that aren't needed for
-   *  specifying our pre- and postconditions. The state type must be called
-   *  State and be immutable. */
-  type State <: AnyRef
-
-  class Binding(private val key: State) {
-    def get: Any = bindings.find(_._1 eq key) match {
-      case None => sys.error("No value bound")
-      case Some(x) => x._2
-    }
-  }
-
-  /** Abstract commands are defined as subtypes of the traits Command or SetCommand.
-   *  Each command must have a run method and a method that returns the new abstract
-   *  state, as it should look after the command has been run.
-   *  A command can also define a precondition that states how the current
-   *  abstract state must look if the command should be allowed to run.
-   *  Finally, we can also define a postcondition which verifies that the
-   *  system under test is in a correct state after the command exectution. */
-  trait Command {
-
-    /** Used internally. */
-    protected[Commands] def run_(s: State) = run(s)
-
-    def run(s: State): Any
-    def nextState(s: State): State
-
-    /** Returns all preconditions merged into a single function */
-    def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
-
-    /** A precondition is a function that
-     *  takes the current abstract state as parameter and returns a boolean
-     *  that says if the precondition is fulfilled or not. You can add several
-     *  conditions to the precondition list */
-    val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
-
-    /** Returns all postconditions merged into a single function */
-    def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
-
-    /** A postcondition is a function that
-     *  takes three parameters, s0, s1 and r. s0 is the abstract state before
-     *  the command was run, s1 is the abstract state after the command was
-     *  run, and r is the result from the command's run
-     *  method. The postcondition function should return a Boolean (or
-     *  a Prop instance) that says if the condition holds or not. You can add several
-     *  conditions to the postConditions list. */
-    val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
-  }
-
-  /** A command that binds its result for later use */
-  trait SetCommand extends Command {
-    /** Used internally. */
-    protected[Commands] final override def run_(s: State) = {
-      val r = run(s)
-      bindings += ((s,r))
-      r
-    }
-
-    final def nextState(s: State) = nextState(s, new Binding(s))
-    def nextState(s: State, b: Binding): State
-  }
-
-  private case class Cmds(cs: List[Command], ss: List[State]) {
-    override def toString = cs.map(_.toString).mkString(", ")
-  }
-
-  private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
-  private def initState() = {
-    bindings.clear()
-    initialState()
-  }
-
-  private def genCmds: Gen[Cmds] = {
-    def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
-      if(sz <= 0) value(Cmds(Nil, Nil)) else for {
-        c <- genCommand(s) suchThat (_.preCondition(s))
-        Cmds(cs,ss) <- sizedCmds(c.nextState(s))(sz-1)
-      } yield Cmds(c::cs, s::ss)
-
-    for {
-      s0 <- wrap(value(initialState()))
-      cmds <- sized(sizedCmds(s0))
-    } yield cmds
-  }
-
-  private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
-    cs match {
-      case Nil => Some(Cmds(Nil, s::Nil))
-      case c::_ if !c.preCondition(s) => None
-      case c::cmds => for {
-        Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
-      } yield Cmds(cs, s::ss)
-    }
-
-  private def runCommands(cmds: Cmds): Prop = cmds match {
-    case Cmds(Nil, _) => proved
-    case Cmds(c::cs, s::ss) =>
-      c.postCondition(s,c.nextState(s),c.run_(s)) && runCommands(Cmds(cs,ss))
-    case _ => sys.error("Should not be here")
-  }
-
-  private def commandsProp: Prop = {
-    def shrinkCmds(cmds: Cmds) = cmds match { case Cmds(cs,_) =>
-      shrink(cs)(shrinkContainer).flatMap(cs => validCmds(initialState(), cs).toList)
-    }
-
-    forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
-  }
-
-  def apply(p: Prop.Params) = commandsProp(p)
-
-  /** initialState should reset the system under test to a well defined
-   *  initial state, and return the abstract version of that state. */
-  def initialState(): State
-
-  /** The command generator. Given an abstract state, the generator
-   *  should return a command that is allowed to run in that state. Note that
-   *  it is still neccessary to define preconditions on the commands if there
-   *  are any. The generator is just giving a hint of which commands that are
-   *  suitable for a given state, the preconditions will still be checked before
-   *  a command runs. Sometimes you maybe want to adjust the distribution of
-   *  your command generator according to the state, or do other calculations
-   *  based on the state. */
-  def genCommand(s: State): Gen[Command]
-
-}
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
deleted file mode 100644
index 93f1dc2..0000000
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Pretty._
-import util.FreqMap
-
-class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
-
-  private val prettyPrms = Params(verbosity)
-
-  override def onTestResult(name: String, res: Test.Result) = {
-    if(verbosity > 0) {
-      if(name == "") {
-        val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
-        printf("\r%s\n", format(s, "", "", 75))
-      } else {
-        val s = (if(res.passed) "+ " else "! ") + name + ": " +
-          pretty(res, prettyPrms)
-        printf("\r%s\n", format(s, "", "", 75))
-      }
-    }
-  }
-
-}
-
-object ConsoleReporter {
-
-  /** Factory method, creates a ConsoleReporter with the
-   *  the given verbosity */
-  def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
-
-  def testStatsEx(msg: String, res: Test.Result) = {
-    lazy val m = if(msg.length == 0) "" else msg + ": "
-    res.status match {
-      case Test.Proved(_) => {}
-      case Test.Passed => {}
-      case f @ Test.Failed(_, _) => sys.error(m + f)
-      case Test.Exhausted => {}
-      case f @ Test.GenException(_) => sys.error(m + f)
-      case f @ Test.PropException(_, _, _) => sys.error(m + f)
-    }
-  }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
deleted file mode 100644
index 64bb61c..0000000
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ /dev/null
@@ -1,531 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import scala.collection.mutable.ListBuffer
-import util.Buildable
-import Prop._
-import Arbitrary._
-
-trait Choose[T] {
-  def choose(min: T, max: T): Gen[T]
-}
-
-object Choose {
-  import Gen.{fail, parameterized, value}
-
-  implicit val chooseLong: Choose[Long] = new Choose[Long] {
-    def choose(low: Long, high: Long) =
-      if(low > high || (high-low < 0)) fail
-      else parameterized(prms => value(prms.choose(low,high)))
-  }
-
-  implicit val chooseDouble: Choose[Double] = new Choose[Double] {
-    def choose(low: Double, high: Double) =
-      if (low > high || (high-low > Double.MaxValue)) fail
-      else parameterized(prms => value(prms.choose(low,high)))
-  }
-
-  implicit val chooseInt: Choose[Int] = new Choose[Int] {
-    def choose(low: Int, high: Int) =
-      chooseLong.choose(low, high).map(_.toInt)
-  }
-
-  implicit val chooseByte: Choose[Byte] = new Choose[Byte] {
-    def choose(low: Byte, high: Byte) =
-      chooseLong.choose(low, high).map(_.toByte)
-  }
-
-  implicit val chooseShort: Choose[Short] = new Choose[Short] {
-    def choose(low: Short, high: Short) =
-      chooseLong.choose(low, high).map(_.toShort)
-  }
-
-  implicit val chooseChar: Choose[Char] = new Choose[Char] {
-    def choose(low: Char, high: Char) =
-      chooseLong.choose(low, high).map(_.toChar)
-  }
-
-  implicit val chooseFloat: Choose[Float] = new Choose[Float] {
-    def choose(low: Float, high: Float) =
-      chooseDouble.choose(low, high).map(_.toFloat)
-  }
-}
-
-case class FiniteGenRes[+T](
-  r: T
-)
-
-sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
-
-
-/** Class that represents a generator. */
-sealed trait Gen[+T] {
-
-  import Gen.choose
-
-  var label = "" // TODO: Ugly mutable field
-
-  /** Put a label on the generator to make test reports clearer */
-  def label(l: String): Gen[T] = {
-    label = l
-    this
-  }
-
-  /** Put a label on the generator to make test reports clearer */
-  def :|(l: String) = label(l)
-
-  /** Put a label on the generator to make test reports clearer */
-  def |:(l: String) = label(l)
-
-  /** Put a label on the generator to make test reports clearer */
-  def :|(l: Symbol) = label(l.toString.drop(1))
-
-  /** Put a label on the generator to make test reports clearer */
-  def |:(l: Symbol) = label(l.toString.drop(1))
-
-  def apply(prms: Gen.Params): Option[T]
-
-  def map[U](f: T => U): Gen[U] = Gen(prms => this(prms).map(f)).label(label)
-
-  def map2[U, V](g: Gen[U])(f: (T, U) => V) =
-    combine(g)((t, u) => t.flatMap(t => u.flatMap(u => Some(f(t, u)))))
-
-  def map3[U, V, W](gu: Gen[U], gv: Gen[V])(f: (T, U, V) => W) =
-    combine3(gu, gv)((t, u, v) => t.flatMap(t => u.flatMap(u => v.flatMap(v => Some(f(t, u, v))))))
-
-  def map4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])(f: (T, U, V, W) => X) =
-    combine4(gu, gv, gw)((t, u, v, w) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => Some(f(t, u, v, w)))))))
-
-  def map5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])(f: (T, U, V, W, X) => Y) =
-    combine5(gu, gv, gw, gx)((t, u, v, w, x) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => Some(f(t, u, v, w, x))))))))
-
-  def map6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])(f: (T, U, V, W, X, Y) => Z) =
-    combine6(gu, gv, gw, gx, gy)((t, u, v, w, x, y) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => y.flatMap(y => Some(f(t, u, v, w, x, y)))))))))
-
-  def flatMap[U](f: T => Gen[U]): Gen[U] = Gen(prms => for {
-    t <- this(prms)
-    u <- f(t)(prms)
-  } yield u)
-
-  def filter(p: T => Boolean): Gen[T] = Gen(prms => for {
-    t <- this(prms)
-    u <- if (p(t)) Some(t) else None
-  } yield u).label(label)
-
-  def withFilter(p: T => Boolean) = new GenWithFilter[T](this, p)
-
-  final class GenWithFilter[+A](self: Gen[A], p: A => Boolean) {
-    def map[B](f: A => B): Gen[B] = self filter p map f
-    def flatMap[B](f: A => Gen[B]): Gen[B] = self filter p flatMap f
-    def withFilter(q: A => Boolean): GenWithFilter[A] = new GenWithFilter[A](self, x => p(x) && q(x))
-  }
-
-  def suchThat(p: T => Boolean): Gen[T] = filter(p)
-
-  def combine[U,V](g: Gen[U])(f: (Option[T],Option[U]) => Option[V]): Gen[V] =
-    Gen(prms => f(this(prms), g(prms)))
-
-  def combine3[U, V, W](gu: Gen[U], gv: Gen[V])
-      (f: (Option[T], Option[U], Option[V]) => Option[W]) =
-    Gen(prms => f(this(prms), gu(prms), gv(prms)))
-
-  def combine4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])
-      (f: (Option[T], Option[U], Option[V], Option[W]) => Option[X]) =
-    Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms)))
-
-  def combine5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])
-      (f: (Option[T], Option[U], Option[V], Option[W], Option[X]) => Option[Y]) =
-    Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms)))
-
-  def combine6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])
-      (f: (Option[T], Option[U], Option[V], Option[W], Option[X], Option[Y]) => Option[Z]) =
-        Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms), gy(prms)))
-
-  def ap[U](g: Gen[T => U]) = flatMap(t => g.flatMap(u => Gen(p => Some(u(t)))))
-
-  override def toString =
-    if(label.length == 0) "Gen()" else "Gen(\"" + label + "\")"
-
-  /** Returns a new property that holds if and only if both this
-   *  and the given generator generates the same result, or both
-   *  generators generate no result.  */
-  def ==[U](g: Gen[U]) = Prop(prms =>
-    (this(prms.genPrms), g(prms.genPrms)) match {
-      case (None,None) => proved(prms)
-      case (Some(r1),Some(r2)) if r1 == r2 => proved(prms)
-      case _ => falsified(prms)
-    }
-  )
-
-  def !=[U](g: Gen[U]) = forAll(this)(r => forAll(g)(_ != r))
-
-  def !==[U](g: Gen[U]) = Prop(prms =>
-    (this(prms.genPrms), g(prms.genPrms)) match {
-      case (None,None) => falsified(prms)
-      case (Some(r1),Some(r2)) if r1 == r2 => falsified(prms)
-      case _ => proved(prms)
-    }
-  )
-
-  private var freq = 1
-  def |[U >: T](g: Gen[U]): Gen[U] = {
-    val h = Gen.frequency((freq, this), (1, g))
-    h.freq = freq+1
-    h
-  }
-
-  /** Generates a sample value by using default parameters */
-  def sample: Option[T] = apply(Gen.Params())
-
-}
-
-
-/** Contains combinators for building generators. */
-object Gen {
-
-  import Arbitrary._
-  import Shrink._
-
-  /** Record that encapsulates all parameters required for data generation */
-  case class Params(
-    size: Int = 100,
-    rng: java.util.Random = util.StdRand
-  ) {
-    def resize(newSize: Int) = this.copy(size = newSize)
-
-    /** @throws IllegalArgumentException if l is greater than h, or if
-     *  the range between l and h doesn't fit in a Long. */
-    def choose(l: Long, h: Long): Long = {
-      val d = h-l
-      if (d < 0) throw new IllegalArgumentException("Invalid range")
-      else l + math.abs(rng.nextLong % (d+1))
-    }
-
-    /** @throws IllegalArgumentException if l is greater than h, or if
-     *  the range between l and h doesn't fit in a Double. */
-    def choose(l: Double, h: Double) = {
-      val d = h-l
-      if (d < 0 || d > Double.MaxValue)
-        throw new IllegalArgumentException("Invalid range")
-      else if (d == 0) l
-      else rng.nextDouble * (h-l) + l
-    }
-  }
-
-  /* Generator factory method */
-  def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
-    def apply(p: Gen.Params) = g(p)
-  }
-
-  /* Convenience method for using the <code>frequency</code> method like this:
-   * <code>frequency((1, "foo"), (3, "bar"))</code> */
-  implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
-
-
-  //// Various Generator Combinators ////
-
-  /** Sequences generators. If any of the given generators fails, the
-   *  resulting generator will also fail. */
-  def sequence[C[_],T](gs: Iterable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = Gen(prms => {
-    val builder = b.builder
-    var none = false
-    val xs = gs.iterator
-    while(xs.hasNext && !none) xs.next.apply(prms) match {
-      case None => none = true
-      case Some(x) => builder += x
-    }
-    if(none) None else Some(builder.result())
-  })
-
-  /** Wraps a generator lazily. The given parameter is only evalutated once,
-   *  and not until the wrapper generator is evaluated. */
-  def lzy[T](g: => Gen[T]) = new Gen[T] {
-    lazy val h = g
-    def apply(prms: Params) = h(prms)
-  }
-
-  /** Wraps a generator for later evaluation. The given parameter is
-   *  evaluated each time the wrapper generator is evaluated. */
-  def wrap[T](g: => Gen[T]) = Gen(p => g(p))
-
-  /** A generator that always generates the given value */
-  implicit def value[T](x: T) = Gen(p => Some(x))
-
-  /** A generator that never generates a value */
-  def fail[T]: Gen[T] = Gen(p => None)
-
-  /** A generator that generates a random value in the given (inclusive)
-   *  range. If the range is invalid, the generator will not generate any value.
-   */
-  def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] = {
-    c.choose(min, max)
-  }
-
-  /** Creates a generator that can access its generation parameters */
-  def parameterized[T](f: Params => Gen[T]): Gen[T] = Gen(prms => f(prms)(prms))
-
-  /** Creates a generator that can access its generation size */
-  def sized[T](f: Int => Gen[T]) = parameterized(prms => f(prms.size))
-
-  /** Creates a resized version of a generator */
-  def resize[T](s: Int, g: Gen[T]) = Gen(prms => g(prms.resize(s)))
-
-  /** Chooses one of the given generators with a weighted random distribution */
-  def frequency[T](gs: (Int,Gen[T])*): Gen[T] = {
-    lazy val tot = (gs.map(_._1) :\ 0) (_+_)
-
-    def pick(n: Int, l: List[(Int,Gen[T])]): Gen[T] = l match {
-      case Nil => fail
-      case (k,g)::gs => if(n <= k) g else pick(n-k, gs)
-    }
-
-    for {
-      n <- choose(1,tot)
-      x <- pick(n,gs.toList)
-    } yield x
-  }
-
-  /** Picks a random value from a list */
-  def oneOf[T](xs: Seq[T]): Gen[T] = if(xs.isEmpty) fail else for {
-    i <- choose(0, xs.size-1)
-  } yield xs(i)
-
-  /** Picks a random generator from a list */
-  def oneOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
-    i <- choose(0, gs.length+1)
-    x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
-  } yield x
-
-
-  //// List Generators ////
-
-  /** Generates a container of any type for which there exists an implicit
-   *  <code>Buildable</code> instance. The elements in the container will
-   *  be generated by the given generator. The size of the generated container
-   *  is given by <code>n</code>. */
-  def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
-  ): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
-    def iterator = new Iterator[Gen[T]] {
-      var i = 0
-      def hasNext = i < n
-      def next = { i += 1; g }
-    }
-  })
-
-  /** Generates a container of any type for which there exists an implicit
-   *  <code>Buildable</code> instance. The elements in the container will
-   *  be generated by the given generator. The size of the container is
-   *  bounded by the size parameter used when generating values. */
-  def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
-    sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
-
-  /** Generates a non-empty container of any type for which there exists an
-   *  implicit <code>Buildable</code> instance. The elements in the container
-   *  will be generated by the given generator. The size of the container is
-   *  bounded by the size parameter used when generating values. */
-  def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
-    sized(size => for(n <- choose(1,size); c <- containerOfN[C,T](n,g)) yield c)
-
-  /** Generates a list of random length. The maximum length depends on the
-   *  size parameter. This method is equal to calling
-   *  <code>containerOf[List,T](g)</code>. */
-  def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
-
-  /** Generates a non-empty list of random length. The maximum length depends
-   *  on the size parameter. This method is equal to calling
-   *  <code>containerOf1[List,T](g)</code>. */
-  def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
-
-  /** Generates a list of the given length. This method is equal to calling
-   *  <code>containerOfN[List,T](n,g)</code>. */
-  def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
-  /** A generator that picks a random number of elements from a list */
-  def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
-
-  /** A generator that picks a random number of elements from a list */
-  def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
-    n <- choose(0, gs.length+2)
-    x <- pick(n, g1, g2, gs: _*)
-  } yield x
-
-  /** A generator that picks a given number of elements from a list, randomly */
-  def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] =
-    if(n > l.size || n < 0) fail
-    else Gen(prms => {
-      val buf = new ListBuffer[T]
-      buf ++= l
-      while(buf.length > n) {
-        val g = choose(0, buf.length-1)
-        buf.remove(g(prms).get)
-      }
-      Some(buf)
-    })
-
-  /** A generator that picks a given number of elements from a list, randomly */
-  def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gs: Gen[T]*): Gen[Seq[T]] = for {
-    is <- pick(n, 0 until (gs.size+2))
-    allGs = gs ++ (g1::g2::Nil)
-    xs <- sequence[List,T](is.toList.map(allGs(_)))
-  } yield xs
-
-
-  //// Character Generators ////
-
-  /* Generates a numerical character */
-  def numChar: Gen[Char] = choose(48,57) map (_.toChar)
-
-  /* Generates an upper-case alpha character */
-  def alphaUpperChar: Gen[Char] = choose(65,90) map (_.toChar)
-
-  /* Generates a lower-case alpha character */
-  def alphaLowerChar: Gen[Char] = choose(97,122) map (_.toChar)
-
-  /* Generates an alpha character */
-  def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
-
-  /* Generates an alphanumerical character */
-  def alphaNumChar = frequency((1,numChar), (9,alphaChar))
-
-  //// String Generators ////
-
-  /* Generates a string that starts with a lower-case alpha character,
-   * and only contains alphanumerical characters */
-  def identifier: Gen[String] = for {
-    c <- alphaLowerChar
-    cs <- listOf(alphaNumChar)
-  } yield (c::cs).mkString
-
-  /* Generates a string of alpha characters */
-  def alphaStr: Gen[String] = for(cs <- listOf(Gen.alphaChar)) yield cs.mkString
-
-  /* Generates a string of digits */
-  def numStr: Gen[String] = for(cs <- listOf(Gen.numChar)) yield cs.mkString
-
-  //// Number Generators ////
-
-  /** Generates positive numbers of uniform distribution, with an
-   *  upper bound of the generation size parameter. */
-  def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
-    import num._
-    sized(max => c.choose(one, fromInt(max)))
-  }
-
-  /** Generates negative numbers of uniform distribution, with an
-   *  lower bound of the negated generation size parameter. */
-  def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
-    import num._
-    sized(max => c.choose(-fromInt(max), -one))
-  }
-
-  /** Generates numbers within the given inclusive range, with
-   *  extra weight on zero, +/- unity, both extremities, and any special
-   *  numbers provided. The special numbers must lie within the given range,
-   *  otherwise they won't be included. */
-  def chooseNum[T](minT: T, maxT: T, specials: T*)(
-    implicit num: Numeric[T], c: Choose[T]
-  ): Gen[T] = {
-    import num._
-    val basics = List(minT, maxT, zero, one, -one)
-    val basicsAndSpecials = for {
-      t <- specials ++ basics if t >= minT && t <= maxT
-    } yield (1, value(t))
-    val allGens = basicsAndSpecials ++ List(
-      (basicsAndSpecials.length, c.choose(minT, maxT))
-    )
-    frequency(allGens: _*)
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
-    arbitrary[T] map f
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2]
-  ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
-  ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4))
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,T5,T6,R](
-    f: (T1,T2,T3,T4,T5,T6) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
-    a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
-    f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
-    a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
-    f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
-  }
-
-  /** Takes a function and returns a generator that generates arbitrary
-   *  results of that function by feeding it with arbitrarily generated input
-   *  parameters. */
-  def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
-    f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
-    a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
-    a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
-    a9: Arbitrary[T9]
-  ): Gen[R] = arbitrary[T1] flatMap {
-    t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
-  }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
deleted file mode 100644
index eeb5936..0000000
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import math.round
-
-
-sealed trait Pretty {
-  def apply(prms: Pretty.Params): String
-
-  def map(f: String => String) = Pretty(prms => f(Pretty.this(prms)))
-
-  def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms))
-}
-
-object Pretty {
-
-  case class Params(verbosity: Int)
-
-  val defaultParams = Params(0)
-
-  def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) }
-
-  def pretty[T <% Pretty](t: T, prms: Params): String = t(prms)
-
-  def pretty[T <% Pretty](t: T): String = t(defaultParams)
-
-  implicit def strBreak(s1: String) = new {
-    def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2
-  }
-
-  def pad(s: String, c: Char, length: Int) =
-    if(s.length >= length) s
-    else s + List.fill(length-s.length)(c).mkString
-
-  def break(s: String, lead: String, length: Int): String =
-    if(s.length <= length) s
-    else s.substring(0, length) / break(lead+s.substring(length), lead, length)
-
-  def format(s: String, lead: String, trail: String, width: Int) =
-    s.lines.map(l => break(lead+l+trail, "  ", width)).mkString("\n")
-
-  implicit def prettyAny(t: Any) = Pretty { p => t.toString }
-
-  implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
-
-  implicit def prettyList(l: List[Any]) = Pretty { p =>
-    l.map("\""+_+"\"").mkString("List(", ", ", ")")
-  }
-
-  implicit def prettyThrowable(e: Throwable) = Pretty { prms =>
-    val strs = e.getStackTrace.map { st =>
-      import st._
-      getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
-    }
-
-    val strs2 =
-      if(prms.verbosity <= 0) Array[String]()
-      else if(prms.verbosity <= 1) strs.take(5)
-      else strs
-
-    e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
-  }
-
-  implicit def prettyArgs(args: List[Arg[Any]]): Pretty = Pretty { prms =>
-    if(args.isEmpty) "" else {
-      for((a,i) <- args.zipWithIndex) yield {
-        val l = if(a.label == "") "ARG_"+i else a.label
-        val s =
-          if(a.shrinks == 0 || prms.verbosity <= 1) ""
-          else " (orig arg: "+a.prettyOrigArg(prms)+")"
-
-        "> "+l+": "+a.prettyArg(prms)+""+s
-      }
-    }.mkString("\n")
-  }
-
-  implicit def prettyFreqMap(fm: Prop.FM) = Pretty { prms =>
-    if(fm.total == 0) ""
-    else {
-      "> Collected test data: " / {
-        for {
-          (xs,r) <- fm.getRatios
-          ys = xs - ()
-          if !ys.isEmpty
-        } yield round(r*100)+"% " + ys.mkString(", ")
-      }.mkString("\n")
-    }
-  }
-
-  implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
-    def labels(ls: scala.collection.immutable.Set[String]) =
-      if(ls.isEmpty) ""
-      else "> Labels of failing property: " / ls.mkString("\n")
-    val s = res.status match {
-      case Test.Proved(args) => "OK, proved property."/pretty(args,prms)
-      case Test.Passed => "OK, passed "+res.succeeded+" tests."
-      case Test.Failed(args, l) =>
-        "Falsified after "+res.succeeded+" passed tests."/labels(l)/pretty(args,prms)
-      case Test.Exhausted =>
-        "Gave up after only "+res.succeeded+" passed tests. " +
-        res.discarded+" tests were discarded."
-      case Test.PropException(args,e,l) =>
-        "Exception raised on property evaluation."/labels(l)/pretty(args,prms)/
-        "> Exception: "+pretty(e,prms)
-      case Test.GenException(e) =>
-        "Exception raised on argument generation."/
-        "> Exception: "+pretty(e,prms)
-    }
-    val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
-    s/t/pretty(res.freqMap,prms)
-  }
-
-  def prettyTime(millis: Long): String = {
-    val min = millis/(60*1000)
-    val sec = (millis-(60*1000*min)) / 1000d
-    if(min <= 0) "%.3f sec ".format(sec)
-    else "%d min %.3f sec ".format(min, sec)
-  }
-}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
deleted file mode 100644
index dfd85a8..0000000
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ /dev/null
@@ -1,763 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-import scala.collection._
-import scala.annotation.tailrec
-
-/** A property is a generator that generates a property result */
-trait Prop {
-
-  import Prop.{Result,Params,Proof,True,False,Exception,Undecided,provedToTrue}
-  import Test.cmdLineParser.{Success, NoSuccess}
-  import Result.merge
-
-  def apply(prms: Params): Result
-
-  def map(f: Result => Result): Prop = Prop(prms => f(this(prms)))
-
-  def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms))
-
-  def combine(p: Prop)(f: (Result, Result) => Result) =
-    for(r1 <- this; r2 <- p) yield f(r1,r2)
-
-  /** Convenience method that checks this property with the given parameters
-   *  and reports the result on the console. If you need to get the results
-   *  from the test use the <code>check</code> methods in <code>Test</code>
-   *  instead. */
-  def check(prms: Test.Params): Unit = Test.check(
-    prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
-  )
-
-  /** Convenience method that checks this property and reports the
-   *  result on the console. If you need to get the results from the test use
-   *  the <code>check</code> methods in <code>Test</code> instead. */
-  def check: Unit = check(Test.Params())
-
-  /** The logic for main, separated out to make it easier to
-   *  avoid System.exit calls.  Returns exit code.
-   */
-  def mainRunner(args: Array[String]): Int = {
-    Test.cmdLineParser.parseParams(args) match {
-      case Success(params, _) =>
-        if (Test.check(params, this).passed) 0
-        else 1
-      case e: NoSuccess =>
-        println("Incorrect options:"+"\n"+e+"\n")
-        Test.cmdLineParser.printHelp
-        -1
-    }
-  }
-
-  /** Whether main should call System.exit with an exit code.
-   *  Defaults to true; override to change.
-   */
-  def mainCallsExit = false
-
-  /** Convenience method that makes it possible to use this property
-   *  as an application that checks itself on execution */
-  def main(args: Array[String]): Unit = {
-    val code = mainRunner(args)
-    if (mainCallsExit)
-      System exit code
-  }
-
-  /** Returns a new property that holds if and only if both this
-   *  and the given property hold. If one of the properties doesn't
-   *  generate a result, the new property will generate false.  */
-  def &&(p: Prop) = combine(p)(_ && _)
-
-  /** Returns a new property that holds if either this
-   *  or the given property (or both) hold.  */
-  def ||(p: Prop) = combine(p)(_ || _)
-
-  /** Returns a new property that holds if and only if both this
-   *  and the given property hold. If one of the properties doesn't
-   *  generate a result, the new property will generate the same result
-   *  as the other property.  */
-  def ++(p: Prop): Prop = combine(p)(_ ++ _)
-
-  /** Combines two properties through implication */
-  def ==>(p: => Prop): Prop = flatMap { r1 =>
-    if(r1.proved) p map { r2 => merge(r1,r2,r2.status) }
-    else if(r1.success) p map { r2 => provedToTrue(merge(r1,r2,r2.status)) }
-    else Prop(r1.copy(status = Undecided))
-  }
-
-  /** Returns a new property that holds if and only if both this
-   *  and the given property generates a result with the exact
-   *  same status. Note that this means that if one of the properties is
-   *  proved, and the other one passed, then the resulting property
-   *  will fail. */
-  def ==(p: Prop) = this.flatMap { r1 =>
-    p.map { r2 =>
-      Result.merge(r1, r2, if(r1.status == r2.status) True else False)
-    }
-  }
-
-  override def toString = "Prop"
-
-  /** Put a label on the property to make test reports clearer */
-  def label(l: String) = map(_.label(l))
-
-  /** Put a label on the property to make test reports clearer */
-  def :|(l: String) = label(l)
-
-  /** Put a label on the property to make test reports clearer */
-  def |:(l: String) = label(l)
-
-  /** Put a label on the property to make test reports clearer */
-  def :|(l: Symbol) = label(l.toString.drop(1))
-
-  /** Put a label on the property to make test reports clearer */
-  def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Prop {
-
-  import Gen.{value, fail, frequency, oneOf}
-  import Arbitrary._
-  import Shrink._
-
-
-  // Types
-
-  type Args = List[Arg[Any]]
-  type FM = FreqMap[immutable.Set[Any]]
-
-  /** Property parameters */
-  case class Params(val genPrms: Gen.Params, val freqMap: FM)
-
-  object Result {
-    def apply(st: Status) = new Result(
-      st,
-      Nil,
-      immutable.Set.empty[Any],
-      immutable.Set.empty[String]
-    )
-
-    def merge(x: Result, y: Result, status: Status) = new Result(
-      status,
-      x.args ++ y.args,
-      (x.collected.asInstanceOf[Set[AnyRef]] ++ y.collected).asInstanceOf[immutable.Set[Any]],
-      x.labels ++ y.labels
-    )
-  }
-
-  /** The result of evaluating a property */
-  case class Result(
-    status: Status,
-    args: Args,
-    collected: immutable.Set[Any],
-    labels: immutable.Set[String]
-  ) {
-    def success = status match {
-      case True => true
-      case Proof => true
-      case _ => false
-    }
-
-    def failure = status match {
-      case False => true
-      case Exception(_) => true
-      case _ => false
-    }
-
-    def proved = status == Proof
-
-    def addArg(a: Arg[Any]) = copy(args = a::args)
-
-    def collect(x: Any) = copy(collected = collected+x)
-
-    def label(l: String) = copy(labels = labels+l)
-
-    import Result.merge
-
-    def &&(r: Result) = (this.status, r.status) match {
-      case (Exception(_),_) => this
-      case (_,Exception(_)) => r
-
-      case (False,_) => this
-      case (_,False) => r
-
-      case (Undecided,_) => this
-      case (_,Undecided) => r
-
-      case (_,Proof) => merge(this, r, this.status)
-      case (Proof,_) => merge(this, r, r.status)
-
-      case (True,True) => merge(this, r, True)
-    }
-
-    def ||(r: Result) = (this.status, r.status) match {
-      case (Exception(_),_) => this
-      case (_,Exception(_)) => r
-
-      case (False,False) => merge(this, r, False)
-      case (False,_) => r
-      case (_,False) => this
-
-      case (Proof,_) => this
-      case (_,Proof) => r
-
-      case (True,_) => this
-      case (_,True) => r
-
-      case (Undecided,Undecided) => merge(this, r, Undecided)
-    }
-
-    def ++(r: Result) = (this.status, r.status) match {
-      case (Exception(_),_) => this
-      case (_,Exception(_)) => r
-
-      case (_, Undecided) => this
-      case (Undecided, _) => r
-
-      case (_, Proof) => this
-      case (Proof, _) => r
-
-      case (_, True) => this
-      case (True, _) => r
-
-      case (False, _) => this
-      case (_, False) => r
-    }
-
-    def ==>(r: Result) = (this.status, r.status) match {
-      case (Exception(_),_) => this
-      case (_,Exception(_)) => r
-
-      case (False,_) => merge(this, r, Undecided)
-
-      case (Undecided,_) => this
-
-      case (Proof,_) => merge(this, r, r.status)
-      case (True,_) => merge(this, r, r.status)
-    }
-
-  }
-
-  sealed trait Status
-
-  /** The property was proved */
-  case object Proof extends Status
-
-  /** The property was true */
-  case object True extends Status
-
-  /** The property was false */
-  case object False extends Status
-
-  /** The property could not be falsified or proved */
-  case object Undecided extends Status
-
-  /** Evaluating the property raised an exception */
-  sealed case class Exception(e: Throwable) extends Status {
-    override def equals(o: Any) = o match {
-      case Exception(_) => true
-      case _ => false
-    }
-  }
-
-  def apply(f: Params => Result): Prop = new Prop {
-    def apply(prms: Params) = f(prms)
-  }
-
-  def apply(r: Result): Prop = Prop(prms => r)
-
-
-  // Implicit defs
-
-  class ExtendedAny[T <% Pretty](x: => T) {
-    def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
-    def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
-    def throws[U <: Throwable](c: Class[U]) = Prop.throws(x, c)
-    def ?=(y: T) = Prop.?=(x, y)
-    def =?(y: T) = Prop.=?(x, y)
-  }
-
-  implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
-  implicit def propBoolean(b: Boolean): Prop = if(b) proved else falsified
-
-
-  // Private support functions
-
-  private def provedToTrue(r: Result) = r.status match {
-    case Proof => new Result(True, r.args, r.collected, r.labels)
-    case _ => r
-  }
-
-
-  // Property combinators
-
-  /** A property that never is proved or falsified */
-  lazy val undecided = Prop(Result(Undecided))
-
-  /** A property that always is false */
-  lazy val falsified = Prop(Result(False))
-
-  /** A property that always is proved */
-  lazy val proved = Prop(Result(Proof))
-
-  /** A property that always is passed */
-  lazy val passed = Prop(Result(True))
-
-  /** A property that denotes an exception */
-  def exception(e: Throwable): Prop = Prop(Result(Exception(e)))
-
-  /** A property that denotes an exception */
-  lazy val exception: Prop = exception(null)
-
-  def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
-    if(x == y) proved else falsified :| {
-      val exp = Pretty.pretty[T](y, Pretty.Params(0))
-      val act = Pretty.pretty[T](x, Pretty.Params(0))
-      "Expected "+exp+" but got "+act
-    }
-
-  def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
-
-  /** A property that depends on the generator size */
-  def sizedProp(f: Int => Prop): Prop = Prop { prms =>
-    // provedToTrue since if the property is proved for
-    // one size, it shouldn't be regarded as proved for
-    // all sizes.
-    provedToTrue(f(prms.genPrms.size)(prms))
-  }
-
-  /** Implication with several conditions */
-  def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
-    secure(if(f.isDefinedAt(x)) f(x) else undecided)
-
-  /** Property holds only if the given partial function is defined at
-   *  <code>x</code>, and returns a property that holds */
-  def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
-    secure(if(f.isDefinedAt(x)) f(x) else falsified)
-
-  /** Combines properties into one, which is true if and only if all the
-   *  properties are true */
-  def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms =>
-    ps.map(p => p(prms)).reduceLeft(_ && _)
-  )
-
-  /** Combines properties into one, which is true if at least one of the
-   *  properties is true */
-  def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms =>
-    ps.map(p => p(prms)).reduceLeft(_ || _)
-  )
-
-  /** A property that holds if at least one of the given generators
-   *  fails generating a value */
-  def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*)
-
-  /** A property that holds iff none of the given generators
-   *  fails generating a value */
-  def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
-
-  /** A property that holds if the given statement throws an exception
-   *  of the specified type */
-  def throws[T <: Throwable](x: => Any, c: Class[T]) =
-    try { x; falsified } catch { case e if c.isInstance(e) => proved }
-
-  /** Collect data for presentation in test report */
-  def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
-    val prop = f(t)
-    prop(prms).collect(t)
-  }
-
-  /** Collect data for presentation in test report */
-  def collect[T](t: T)(prop: Prop) = Prop { prms =>
-    prop(prms).collect(t)
-  }
-
-  /** Collect data for presentation in test report */
-  def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop =
-    if(c) collect(ifTrue)(prop) else collect(())(prop)
-
-  /** Collect data for presentation in test report */
-  def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop =
-    if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop)
-
-  /** Wraps and protects a property */
-  def secure[P <% Prop](p: => P): Prop =
-    try { p: Prop } catch { case e => exception(e) }
-
-  /** Existential quantifier for an explicit generator. */
-  def exists[A,P](f: A => P)(implicit
-    pv: P => Prop,
-    pp: A => Pretty,
-    aa: Arbitrary[A]
-  ): Prop = exists(aa.arbitrary)(f)
-
-  /** Existential quantifier for an explicit generator. */
-  def exists[A,P](g: Gen[A])(f: A => P)(implicit
-    pv: P => Prop,
-    pp: A => Pretty
-  ): Prop = Prop { prms =>
-    g(prms.genPrms) match {
-      case None => undecided(prms)
-      case Some(x) =>
-        val p = secure(f(x))
-        val r = p(prms).addArg(Arg(g.label,x,0,x))
-        r.status match {
-          case True => new Result(Proof, r.args, r.collected, r.labels)
-          case False => new Result(Undecided, r.args, r.collected, r.labels)
-          case _ => r
-        }
-    }
-  }
-
-  /** Universal quantifier for an explicit generator. Does not shrink failed
-   *  test cases. */
-  def forAllNoShrink[T1,P](
-    g1: Gen[T1])(
-    f: T1 => P)(implicit
-    pv: P => Prop,
-    pp1: T1 => Pretty
-  ): Prop = Prop { prms =>
-    g1(prms.genPrms) match {
-      case None => undecided(prms)
-      case Some(x) =>
-        val p = secure(f(x))
-        provedToTrue(p(prms)).addArg(Arg(g1.label,x,0,x))
-    }
-  }
-
-  /** Universal quantifier for two explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,P](
-    g1: Gen[T1], g2: Gen[T2])(
-    f: (T1,T2) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2)))
-
-  /** Universal quantifier for three explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
-    f: (T1,T2,T3) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3)))
-
-  /** Universal quantifier for four explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,T4,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
-    f: (T1,T2,T3,T4) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty,
-    pp4: T4 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
-  /** Universal quantifier for five explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,T4,T5,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
-    f: (T1,T2,T3,T4,T5) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty,
-    pp4: T4 => Pretty,
-    pp5: T5 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
-  /** Universal quantifier for six explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,T4,T5,T6,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
-    f: (T1,T2,T3,T4,T5,T6) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty,
-    pp4: T4 => Pretty,
-    pp5: T5 => Pretty,
-    pp6: T6 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
-  /** Universal quantifier for seven explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
-    f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty,
-    pp4: T4 => Pretty,
-    pp5: T5 => Pretty,
-    pp6: T6 => Pretty,
-    pp7: T7 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
-  /** Universal quantifier for eight explicit generators.
-   *  Does not shrink failed test cases. */
-  def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
-    f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
-    p: P => Prop,
-    pp1: T1 => Pretty,
-    pp2: T2 => Pretty,
-    pp3: T3 => Pretty,
-    pp4: T4 => Pretty,
-    pp5: T5 => Pretty,
-    pp6: T6 => Pretty,
-    pp7: T7 => Pretty,
-    pp8: T8 => Pretty
-  ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
-  /** Universal quantifier for an explicit generator. Shrinks failed arguments
-   *  with the given shrink function */
-  def forAllShrink[T <% Pretty, P <% Prop](g: Gen[T],
-    shrink: T => Stream[T])(f: T => P
-  ): Prop = Prop { prms =>
-
-    /** Returns the first failed result in Left or success in Right */
-    def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = {
-      assert(!xs.isEmpty, "Stream cannot be empty")
-      val results = xs.map { x =>
-        val p = secure(f(x))
-        (x, provedToTrue(p(prms)))
-      }
-      results.dropWhile(!_._2.failure).headOption match {
-        case None => Right(results.head)
-        case Some(xr) => Left(xr)
-      }
-    }
-
-    def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = {
-      val xs = shrink(x)
-      val res = r.addArg(Arg(g.label,x,shrinks,orig))
-      if(xs.isEmpty) res else getFirstFailure(xs) match {
-        case Right(_) => res
-        case Left((x2,r2)) => shrinker(x2, r2, shrinks+1, orig)
-      }
-    }
-
-    g(prms.genPrms) match {
-      case None => undecided(prms)
-      case Some(x) => getFirstFailure(Stream.cons(x, Stream.empty)) match {
-        case Right((x,r)) => r.addArg(Arg(g.label,x,0,x))
-        case Left((x,r)) => shrinker(x,r,0,x)
-      }
-    }
-
-  }
-
-  /** Universal quantifier for an explicit generator. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,P](
-    g1: Gen[T1])(
-    f: T1 => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1],
-    pp1: T1 => Pretty
-  ): Prop = forAllShrink(g1, shrink[T1])(f)
-
-  /** Universal quantifier for two explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,P](
-    g1: Gen[T1], g2: Gen[T2])(
-    f: (T1,T2) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2)))
-
-  /** Universal quantifier for three explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
-    f: (T1,T2,T3) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3)))
-
-  /** Universal quantifier for four explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,T4,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
-    f: (T1,T2,T3,T4) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty,
-    s4: Shrink[T4], pp4: T4 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
-  /** Universal quantifier for five explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,T4,T5,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
-    f: (T1,T2,T3,T4,T5) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty,
-    s4: Shrink[T4], pp4: T4 => Pretty,
-    s5: Shrink[T5], pp5: T5 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
-  /** Universal quantifier for six explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,T4,T5,T6,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
-    f: (T1,T2,T3,T4,T5,T6) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty,
-    s4: Shrink[T4], pp4: T4 => Pretty,
-    s5: Shrink[T5], pp5: T5 => Pretty,
-    s6: Shrink[T6], pp6: T6 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
-  /** Universal quantifier for seven explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,T4,T5,T6,T7,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
-    f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty,
-    s4: Shrink[T4], pp4: T4 => Pretty,
-    s5: Shrink[T5], pp5: T5 => Pretty,
-    s6: Shrink[T6], pp6: T6 => Pretty,
-    s7: Shrink[T7], pp7: T7 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
-  /** Universal quantifier for eight explicit generators. Shrinks failed arguments
-   *  with the default shrink function for the type */
-  def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P](
-    g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
-    f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
-    p: P => Prop,
-    s1: Shrink[T1], pp1: T1 => Pretty,
-    s2: Shrink[T2], pp2: T2 => Pretty,
-    s3: Shrink[T3], pp3: T3 => Pretty,
-    s4: Shrink[T4], pp4: T4 => Pretty,
-    s5: Shrink[T5], pp5: T5 => Pretty,
-    s6: Shrink[T6], pp6: T6 => Pretty,
-    s7: Shrink[T7], pp7: T7 => Pretty,
-    s8: Shrink[T8], pp8: T8 => Pretty
-  ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,P] (
-    f: A1 => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty
-  ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p)
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,P] (
-    f: (A1,A2) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,P] (
-    f: (A1,A2,A3) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,A4,P] (
-    f: (A1,A2,A3,A4) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
-    a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,A4,A5,P] (
-    f: (A1,A2,A3,A4,A5) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
-    a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
-    a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,A4,A5,A6,P] (
-    f: (A1,A2,A3,A4,A5,A6) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
-    a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
-    a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
-    a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,A4,A5,A6,A7,P] (
-    f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
-    a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
-    a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
-    a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
-    a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7)))
-
-  /** Converts a function into a universally quantified property */
-  def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] (
-    f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
-    p: P => Prop,
-    a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
-    a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
-    a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
-    a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
-    a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
-    a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
-    a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty,
-    a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
-  ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
-
-  /** Ensures that the property expression passed in completes within the given space of time. */
-  def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
-    @tailrec private def attempt(prms: Params, endTime: Long): Result = {
-      val result = wrappedProp.apply(prms)
-      if (System.currentTimeMillis > endTime) {
-        (if (result.failure) result else Result(False)).label("Timeout")
-      } else {
-        if (result.success) result
-        else attempt(prms, endTime)
-      }
-    }
-    def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
-  }
-}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
deleted file mode 100644
index 2605923..0000000
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-/** Represents a collection of properties, with convenient methods
- *  for checking all properties at once. This class is itself a property, which
- *  holds if and only if all of the contained properties hold.
- *  <p>Properties are added in the following way:</p>
- *
- *  <p>
- *  <code>
- *  object MyProps extends Properties("MyProps") {
- *    property("myProp1") = forAll { (n:Int, m:Int) =>
- *      n+m == m+n
- *    }
- *
- *    property("myProp2") = ((0/1) throws classOf[ArithmeticException])
- *  }
- */
-class Properties(val name: String) extends Prop {
-
-  import Test.cmdLineParser.{Success, NoSuccess}
-
-  private val props = new scala.collection.mutable.ListBuffer[(String,Prop)]
-
-  /** Returns one property which holds if and only if all of the
-   *  properties in this property collection hold */
-  private def oneProperty: Prop = Prop.all((properties map (_._2)):_*)
-
-  /** Returns all properties of this collection in a list of name/property
-   *  pairs.  */
-  def properties: Seq[(String,Prop)] = props
-
-  def apply(p: Prop.Params) = oneProperty(p)
-
-  /** Convenience method that checks the properties with the given parameters
-   *  and reports the result on the console. If you need to get the results
-   *  from the test use the <code>check</code> methods in <code>Test</code>
-   *  instead. */
-  override def check(prms: Test.Params): Unit = Test.checkProperties(
-    prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
-  )
-
-  /** Convenience method that checks the properties and reports the
-   *  result on the console. If you need to get the results from the test use
-   *  the <code>check</code> methods in <code>Test</code> instead. */
-  override def check: Unit = check(Test.Params())
-
-  /** The logic for main, separated out to make it easier to
-   *  avoid System.exit calls.  Returns exit code.
-   */
-  override def mainRunner(args: Array[String]): Int = {
-    Test.cmdLineParser.parseParams(args) match {
-      case Success(params, _) =>
-        val res = Test.checkProperties(params, this)
-        val failed = res.filter(!_._2.passed).size
-        failed
-      case e: NoSuccess =>
-        println("Incorrect options:"+"\n"+e+"\n")
-        Test.cmdLineParser.printHelp
-        -1
-    }
-  }
-
-  /** Adds all properties from another property collection to this one. */
-  def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
-
-  /** Used for specifying properties. Usage:
-   *  <code>property("myProp") = ...</code> */
-  class PropertySpecifier() {
-    def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
-  }
-
-  lazy val property = new PropertySpecifier()
-}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
deleted file mode 100644
index ae15bd9..0000000
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.Buildable
-import scala.collection.{ JavaConversions => jcl }
-
-sealed abstract class Shrink[T] {
-  def shrink(x: T): Stream[T]
-}
-
-object Shrink {
-
-  import Stream.{cons, empty}
-  import scala.collection._
-  import java.util.ArrayList
-
-  /** Interleaves to streams */
-  private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] =
-    if(xs.isEmpty) ys
-    else if(ys.isEmpty) xs
-    else Stream(xs.head, ys.head) append interleave(xs.tail, ys.tail)
-
-  /** Shrink instance factory */
-  def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] {
-    override def shrink(x: T) = s(x)
-  }
-
-  /** Shrink a value */
-  def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x)
-
-  /** Default shrink instance */
-  implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty)
-
-  /** Shrink instance of container */
-  implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T],
-    b: Buildable[T,C]
-  ): Shrink[C[T]] = Shrink { xs: C[T] =>
-
-    def removeChunks(n: Int, xs: Stream[T]): Stream[Stream[T]] =
-      if(xs.isEmpty) empty
-      else if(xs.tail.isEmpty) cons(empty, empty)
-      else {
-        val n1 = n / 2
-        val n2 = n - n1
-        lazy val xs1 = xs.take(n1)
-        lazy val xs2 = xs.drop(n1)
-        lazy val xs3 =
-          for(ys1 <- removeChunks(n1,xs1) if !ys1.isEmpty) yield ys1 append xs2
-        lazy val xs4 =
-          for(ys2 <- removeChunks(n2,xs2) if !ys2.isEmpty) yield xs1 append ys2
-
-        cons(xs1, cons(xs2, interleave(xs3,xs4)))
-      }
-
-    def shrinkOne(zs: Stream[T]): Stream[Stream[T]] =
-      if(zs.isEmpty) empty
-      else {
-        val x = zs.head
-        val xs = zs.tail
-        (for(y <- shrink(x)) yield cons(y,xs)) append
-        (for(ys <- shrinkOne(xs)) yield cons(x,ys))
-      }
-
-    val ys = v(xs)
-    val zs = ys.toStream
-    removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
-
-  }
-
-  /** Shrink instance of integer */
-  implicit lazy val shrinkInt: Shrink[Int] = Shrink { n =>
-
-    def halfs(n: Int): Stream[Int] =
-      if(n == 0) empty else cons(n, halfs(n/2))
-
-    if(n == 0) empty else {
-      val ns = halfs(n/2).map(n - _)
-      cons(0, interleave(ns, ns.map(-1 * _)))
-    }
-  }
-
-  /** Shrink instance of String */
-  implicit lazy val shrinkString: Shrink[String] = Shrink { s =>
-    shrinkContainer[List,Char].shrink(s.toList).map(_.mkString)
-  }
-
-  /** Shrink instance of Option */
-  implicit def shrinkOption[T](implicit s: Shrink[T]): Shrink[Option[T]] =
-    Shrink {
-      case None    => empty
-      case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y))
-    }
-
-  /** Shrink instance of 2-tuple */
-  implicit def shrinkTuple2[T1,T2](implicit
-    s1: Shrink[T1], s2: Shrink[T2]
-  ): Shrink[(T1,T2)] =
-    Shrink { case (t1,t2) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2))
-    }
-
-  /** Shrink instance of 3-tuple */
-  implicit def shrinkTuple3[T1,T2,T3](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3]
-  ): Shrink[(T1,T2,T3)] =
-    Shrink { case (t1,t2,t3) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3))
-    }
-
-  /** Shrink instance of 4-tuple */
-  implicit def shrinkTuple4[T1,T2,T3,T4](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4]
-  ): Shrink[(T1,T2,T3,T4)] =
-    Shrink { case (t1,t2,t3,t4) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4))
-    }
-
-  /** Shrink instance of 5-tuple */
-  implicit def shrinkTuple5[T1,T2,T3,T4,T5](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
-    s5: Shrink[T5]
-  ): Shrink[(T1,T2,T3,T4,T5)] =
-    Shrink { case (t1,t2,t3,t4,t5) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5)) append
-      (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5))
-    }
-
-  /** Shrink instance of 6-tuple */
-  implicit def shrinkTuple6[T1,T2,T3,T4,T5,T6](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
-    s5: Shrink[T5], s6: Shrink[T6]
-  ): Shrink[(T1,T2,T3,T4,T5,T6)] =
-    Shrink { case (t1,t2,t3,t4,t5,t6) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6)) append
-      (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6)) append
-      (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6))
-    }
-
-  /** Shrink instance of 7-tuple */
-  implicit def shrinkTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
-    s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7]
-  ): Shrink[(T1,T2,T3,T4,T5,T6,T7)] =
-    Shrink { case (t1,t2,t3,t4,t5,t6,t7) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7)) append
-      (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7)) append
-      (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7)) append
-      (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7))
-    }
-
-  /** Shrink instance of 8-tuple */
-  implicit def shrinkTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
-    s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8]
-  ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] =
-    Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8)) append
-      (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8)) append
-      (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8)) append
-      (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8)) append
-      (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8))
-    }
-
-  /** Shrink instance of 9-tuple */
-  implicit def shrinkTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
-    s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
-    s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8],
-    s9: Shrink[T9]
-  ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
-    Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
-      (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8, t9)) append
-      (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8, t9)) append
-      (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8, t9)) append
-      (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8, t9)) append
-      (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8, t9)) append
-      (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8, t9)) append
-      (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8, t9)) append
-      (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8, t9)) append
-      (for(x9 <- shrink(t9)) yield (t1, t2, t3, t4, t5, t6, t7, t8, x9))
-    }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
deleted file mode 100644
index 4368184..0000000
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ /dev/null
@@ -1,269 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-object Test {
-
-  import util.FreqMap
-  import scala.collection.immutable
-  import Prop.FM
-  import util.CmdLineParser
-
-  /** Test parameters */
-  case class Params(
-    minSuccessfulTests: Int = 100,
-
-    /** @deprecated Use maxDiscardRatio instead. */
-    @deprecated("Use maxDiscardRatio instead.", "1.10")
-    maxDiscardedTests: Int = -1,
-
-    minSize: Int = 0,
-    maxSize: Int = Gen.Params().size,
-    rng: java.util.Random = Gen.Params().rng,
-    workers: Int = 1,
-    testCallback: TestCallback = new TestCallback {},
-    maxDiscardRatio: Float = 5,
-    customClassLoader: Option[ClassLoader] = None
-  )
-
-  /** Test statistics */
-  case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
-    def passed = status match {
-      case Passed => true
-      case Proved(_) => true
-      case _ => false
-    }
-  }
-
-  /** Test status */
-  sealed trait Status
-
-  /** ScalaCheck found enough cases for which the property holds, so the
-   *  property is considered correct. (It is not proved correct, though). */
-  case object Passed extends Status
-
-  /** ScalaCheck managed to prove the property correct */
-  sealed case class Proved(args: Prop.Args) extends Status
-
-  /** The property was proved wrong with the given concrete arguments.  */
-  sealed case class Failed(args: Prop.Args, labels: Set[String]) extends Status
-
-  /** The property test was exhausted, it wasn't possible to generate enough
-   *  concrete arguments satisfying the preconditions to get enough passing
-   *  property evaluations. */
-  case object Exhausted extends Status
-
-  /** An exception was raised when trying to evaluate the property with the
-   *  given concrete arguments. */
-  sealed case class PropException(args: Prop.Args, e: Throwable,
-    labels: Set[String]) extends Status
-
-  /** An exception was raised when trying to generate concrete arguments
-   *  for evaluating the property. */
-  sealed case class GenException(e: Throwable) extends Status
-
-  trait TestCallback { self =>
-    /** Called each time a property is evaluated */
-    def onPropEval(name: String, threadIdx: Int, succeeded: Int,
-      discarded: Int): Unit = ()
-
-    /** Called whenever a property has finished testing */
-    def onTestResult(name: String, result: Result): Unit = ()
-
-    def chain(testCallback: TestCallback) = new TestCallback {
-      override def onPropEval(name: String, threadIdx: Int,
-        succeeded: Int, discarded: Int
-      ): Unit = {
-        self.onPropEval(name,threadIdx,succeeded,discarded)
-        testCallback.onPropEval(name,threadIdx,succeeded,discarded)
-      }
-
-      override def onTestResult(name: String, result: Result): Unit = {
-        self.onTestResult(name,result)
-        testCallback.onTestResult(name,result)
-      }
-    }
-  }
-
-  private def assertParams(prms: Params) = {
-    import prms._
-    if(
-      minSuccessfulTests <= 0 ||
-      maxDiscardRatio <= 0 ||
-      minSize < 0 ||
-      maxSize < minSize ||
-      workers <= 0
-    ) throw new IllegalArgumentException("Invalid test parameters")
-  }
-
-  private def secure[T](x: => T): Either[T,Throwable] =
-    try { Left(x) } catch { case e => Right(e) }
-
-  private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
-    object OptMinSuccess extends IntOpt {
-      val default = Test.Params().minSuccessfulTests
-      val names = Set("minSuccessfulTests", "s")
-      val help = "Number of tests that must succeed in order to pass a property"
-    }
-    object OptMaxDiscardRatio extends FloatOpt {
-      val default = Test.Params().maxDiscardRatio
-      val names = Set("maxDiscardRatio", "r")
-      val help =
-        "The maximum ratio between discarded and succeeded tests " +
-        "allowed before ScalaCheck stops testing a property. At " +
-        "least minSuccessfulTests will always be tested, though."
-    }
-    object OptMinSize extends IntOpt {
-      val default = Test.Params().minSize
-      val names = Set("minSize", "n")
-      val help = "Minimum data generation size"
-    }
-    object OptMaxSize extends IntOpt {
-      val default = Test.Params().maxSize
-      val names = Set("maxSize", "x")
-      val help = "Maximum data generation size"
-    }
-    object OptWorkers extends IntOpt {
-      val default = Test.Params().workers
-      val names = Set("workers", "w")
-      val help = "Number of threads to execute in parallel for testing"
-    }
-    object OptVerbosity extends IntOpt {
-      val default = 1
-      val names = Set("verbosity", "v")
-      val help = "Verbosity level"
-    }
-
-    val opts = Set[Opt[_]](
-      OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
-      OptMaxSize, OptWorkers, OptVerbosity
-    )
-
-    def parseParams(args: Array[String]) = parseArgs(args) {
-      optMap => Test.Params(
-        minSuccessfulTests = optMap(OptMinSuccess),
-        maxDiscardRatio = optMap(OptMaxDiscardRatio),
-        minSize = optMap(OptMinSize),
-        maxSize = optMap(OptMaxSize),
-        rng = Test.Params().rng,
-        workers = optMap(OptWorkers),
-        testCallback = ConsoleReporter(optMap(OptVerbosity))
-      )
-    }
-  }
-
-  /** Tests a property with the given testing parameters, and returns
-   *  the test results. */
-  def check(params: Params, p: Prop): Result = {
-
-    // maxDiscardedTests is deprecated, but if someone
-    // uses it let it override maxDiscardRatio
-    val mdr =
-      if(params.maxDiscardedTests < 0) params.maxDiscardRatio
-      else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
-    val prms = params.copy( maxDiscardRatio = mdr)
-
-    import prms._
-    import scala.actors.Futures.future
-
-    assertParams(prms)
-    if(workers > 1)
-      assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
-
-    val iterations = math.ceil(minSuccessfulTests / (workers: Double))
-    val sizeStep = (maxSize-minSize) / (iterations*workers)
-    var stop = false
-
-    def worker(workerIdx: Int) = future {
-      params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
-      var n = 0  // passed tests
-      var d = 0  // discarded tests
-      var res: Result = null
-      var fm = FreqMap.empty[immutable.Set[Any]]
-      while(!stop && res == null && n < iterations) {
-        val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
-        val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
-        secure(p(propPrms)) match {
-          case Right(e) => res =
-            Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
-          case Left(propRes) =>
-            fm =
-              if(propRes.collected.isEmpty) fm
-              else fm + propRes.collected
-            propRes.status match {
-              case Prop.Undecided =>
-                d += 1
-                testCallback.onPropEval("", workerIdx, n, d)
-                // The below condition is kind of hacky. We have to have
-                // some margin, otherwise workers might stop testing too
-                // early because they have been exhausted, but the overall
-                // test has not.
-                if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
-                  res = Result(Exhausted, n, d, fm)
-              case Prop.True =>
-                n += 1
-                testCallback.onPropEval("", workerIdx, n, d)
-              case Prop.Proof =>
-                n += 1
-                res = Result(Proved(propRes.args), n, d, fm)
-                stop = true
-              case Prop.False =>
-                res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
-                stop = true
-              case Prop.Exception(e) =>
-                res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
-                stop = true
-            }
-        }
-      }
-      if (res == null) {
-        if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
-        else Result(Exhausted, n, d, fm)
-      } else res
-    }
-
-    def mergeResults(r1: () => Result, r2: () => Result) = {
-      val Result(st1, s1, d1, fm1, _) = r1()
-      val Result(st2, s2, d2, fm2, _) = r2()
-      if (st1 != Passed && st1 != Exhausted)
-        () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
-      else if (st2 != Passed && st2 != Exhausted)
-        () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
-      else {
-        if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
-          () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
-        else
-          () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
-      }
-    }
-
-    val start = System.currentTimeMillis
-    val results = for(i <- 0 until workers) yield worker(i)
-    val r = results.reduceLeft(mergeResults)()
-    stop = true
-    results foreach (_.apply())
-    val timedRes = r.copy(time = System.currentTimeMillis-start)
-    prms.testCallback.onTestResult("", timedRes)
-    timedRes
-  }
-
-  def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
-    ps.properties.map { case (name,p) =>
-      val testCallback = new TestCallback {
-        override def onPropEval(n: String, t: Int, s: Int, d: Int) =
-          prms.testCallback.onPropEval(name,t,s,d)
-        override def onTestResult(n: String, r: Result) =
-          prms.testCallback.onTestResult(name,r)
-      }
-      val res = check(prms copy (testCallback = testCallback), p)
-      (name,res)
-    }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
deleted file mode 100644
index 221b8a6..0000000
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.collection._
-import scala.reflect.ClassTag
-
-trait Buildable[T,C[_]] {
-  def builder: mutable.Builder[T,C[T]]
-  def fromIterable(it: Traversable[T]): C[T] = {
-    val b = builder
-    b ++= it
-    b.result()
-  }
-}
-
-object Buildable {
-
-  implicit def buildableList[T] = new Buildable[T,List] {
-    def builder = new mutable.ListBuffer[T]
-  }
-
-  implicit def buildableStream[T] = new Buildable[T,Stream] {
-    def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
-  }
-
-  implicit def buildableArray[T](implicit cm: ClassTag[T]) =
-    new Buildable[T,Array] {
-      def builder = mutable.ArrayBuilder.make[T]
-    }
-
-  implicit def buildableMutableSet[T] = new Buildable[T,mutable.Set] {
-    def builder = new mutable.SetBuilder(mutable.Set.empty[T])
-  }
-
-  implicit def buildableImmutableSet[T] = new Buildable[T,immutable.Set] {
-    def builder = new mutable.SetBuilder(immutable.Set.empty[T])
-  }
-
-  implicit def buildableSet[T] = new Buildable[T,Set] {
-    def builder = new mutable.SetBuilder(Set.empty[T])
-  }
-
-  import java.util.ArrayList
-  implicit def buildableArrayList[T] = new Buildable[T,ArrayList] {
-    def builder = new mutable.Builder[T,ArrayList[T]] {
-      val al = new ArrayList[T]
-      def +=(x: T) = {
-        al.add(x)
-        this
-      }
-      def clear() = al.clear()
-      def result() = al
-    }
-  }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
deleted file mode 100644
index 4683c34..0000000
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-import scala.collection.Set
-import org.scalacheck.Test
-
-trait CmdLineParser extends Parsers {
-
-  type Elem = String
-
-  trait Opt[+T] {
-    val default: T
-    val names: Set[String]
-    val help: String
-  }
-  trait Flag extends Opt[Unit]
-  trait IntOpt extends Opt[Int]
-  trait FloatOpt extends Opt[Float]
-  trait StrOpt extends Opt[String]
-
-  class OptMap {
-    private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
-    def apply(flag: Flag): Boolean = opts.contains(flag)
-    def apply[T](opt: Opt[T]): T = opts.get(opt) match {
-      case None => opt.default
-      case Some(v) => v.asInstanceOf[T]
-    }
-    def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal)
-  }
-
-  val opts: Set[Opt[_]]
-
-  private class ArgsReader(args: Array[String], i: Int) extends Reader[String] {
-    val pos = new Position {
-      val column = (args take i).foldLeft(1)(_ + _.length + 1)
-      val line = 1
-      val lineContents = args.mkString(" ")
-    }
-    val atEnd = i >= args.length
-    def first = if(atEnd) null else args(i)
-    def rest = if(atEnd) this else new ArgsReader(args, i+1)
-  }
-
-  private def getOpt(s: String) = {
-    if(s == null || s.length == 0 || s.charAt(0) != '-') None
-    else opts.find(_.names.contains(s.drop(1)))
-  }
-
-  private val opt: Parser[Opt[Any]] = accept("option name", {
-    case s if getOpt(s).isDefined => getOpt(s).get
-  })
-
-  private val strVal: Parser[String] = accept("string", {
-    case s if s != null => s
-  })
-
-  private val intVal: Parser[Int] = accept("integer", {
-    case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
-  })
-
-  private val floatVal: Parser[Float] = accept("float", {
-    case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
-      => s.toFloat
-  })
-
-  private case class OptVal[T](o: Opt[T], v: T)
-
-  private val optVal: Parser[OptVal[Any]] = opt into {
-    case o: Flag => success(OptVal(o, ()))
-    case o: IntOpt => intVal ^^ (v => OptVal(o, v))
-    case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
-    case o: StrOpt => strVal ^^ (v => OptVal(o, v))
-  }
-
-  val options: Parser[OptMap] = rep(optVal) ^^ { xs =>
-    val map = new OptMap
-    xs.foreach { case OptVal(o,v) => map(o) = v }
-    map
-  }
-
-  def printHelp = {
-    println("Available options:")
-    opts.foreach { opt =>
-      println("  " + opt.names.map("-"+_).mkString(", ") + ": " + opt.help)
-    }
-  }
-
-  def parseArgs[T](args: Array[String])(f: OptMap => T) =
-    phrase(options map f)(new ArgsReader(args,0))
-}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
deleted file mode 100644
index c7474d3..0000000
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-trait FreqMap[T] {
-  protected val underlying: scala.collection.immutable.Map[T,Int]
-  val total: Int
-
-  def +(t: T) = new FreqMap[T] {
-    private val n = FreqMap.this.underlying.get(t) match {
-      case None => 1
-      case Some(n) => n+1
-    }
-    val underlying = FreqMap.this.underlying + (t -> n)
-    val total = FreqMap.this.total + 1
-  }
-
-  def -(t: T) = new FreqMap[T] {
-    val underlying = FreqMap.this.underlying.get(t) match {
-      case None => FreqMap.this.underlying
-      case Some(n) => FreqMap.this.underlying + (t -> (n-1))
-    }
-    val total = FreqMap.this.total + 1
-  }
-
-  def ++(fm: FreqMap[T]) = new FreqMap[T] {
-    private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
-    private val mappings = keys.toStream.map { x =>
-      (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
-    }
-    val underlying = scala.collection.immutable.Map(mappings: _*)
-    val total = FreqMap.this.total + fm.total
-  }
-
-  def --(fm: FreqMap[T]) = new FreqMap[T] {
-    val underlying = FreqMap.this.underlying transform {
-      case (x,n) => n - fm.getCount(x).getOrElse(0)
-    }
-    lazy val total = (0 /: underlying.valuesIterator) (_ + _)
-  }
-
-  def getCount(t: T) = underlying.get(t)
-
-  def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
-
-  def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total
-
-  def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total)
-
-  override def toString = underlying.toString
-}
-
-object FreqMap {
-  def empty[T] = new FreqMap[T] {
-    val underlying = scala.collection.immutable.Map.empty[T,Int]
-    val total = 0
-  }
-}
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
deleted file mode 100644
index 317b0cc..0000000
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/*-------------------------------------------------------------------------*\
-**  ScalaCheck                                                             **
-**  Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved.          **
-**  http://www.scalacheck.org                                              **
-**                                                                         **
-**  This software is released under the terms of the Revised BSD License.  **
-**  There is NO WARRANTY. See the file LICENSE for the full text.          **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-object StdRand extends java.util.Random
diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
new file mode 100644
index 0000000..36a1405
--- /dev/null
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -0,0 +1,695 @@
+/*                     __                                               *\
+**     ________ ___   / /  ___     Scala Ant Tasks                      **
+**    / __/ __// _ | / /  / _ |    (c) 2005-2013, LAMP/EPFL             **
+**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
+** /____/\___/_/ |_/____/_/ | |                                         **
+**                          |/                                          **
+\*                                                                      */
+
+
+package scala.tools.ant
+
+import java.io.File
+
+import org.apache.tools.ant.Project
+import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.doc.Settings
+import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+
+/** An Ant task to document Scala code.
+ *
+ *  This task can take the following parameters as attributes:
+ *  - `srcdir` (mandatory),
+ *  - `srcref`,
+ *  - `destdir`,
+ *  - `classpath`,
+ *  - `classpathref`,
+ *  - `sourcepath`,
+ *  - `sourcepathref`,
+ *  - `bootclasspath`,
+ *  - `bootclasspathref`,
+ *  - `extdirs`,
+ *  - `extdirsref`,
+ *  - `encoding`,
+ *  - `doctitle`,
+ *  - `header`,
+ *  - `footer`,
+ *  - `top`,
+ *  - `bottom`,
+ *  - `addparams`,
+ *  - `deprecation`,
+ *  - `docgenerator`,
+ *  - `docrootcontent`,
+ *  - `unchecked`,
+ *  - `nofail`,
+ *  - `skipPackages`.
+ *
+ *  It also takes the following parameters as nested elements:
+ *  - `src` (for srcdir),
+ *  - `classpath`,
+ *  - `sourcepath`,
+ *  - `bootclasspath`,
+ *  - `extdirs`.
+ *
+ *  @author Gilles Dubochet, Stephane Micheloud
+ */
+class Scaladoc extends ScalaMatchingTask {
+
+  /** The unique Ant file utilities instance to use in this task. */
+  private val fileUtils = FileUtils.getFileUtils()
+
+/*============================================================================*\
+**                             Ant user-properties                            **
+\*============================================================================*/
+
+  abstract class PermissibleValue {
+    val values: List[String]
+    def isPermissible(value: String): Boolean =
+      (value == "") || values.exists(_.startsWith(value))
+  }
+
+  /** Defines valid values for the `deprecation` and
+   *  `unchecked` properties.
+   */
+  object Flag extends PermissibleValue {
+    val values = List("yes", "no", "on", "off")
+    def getBooleanValue(value: String, flagName: String): Boolean =
+      if (Flag.isPermissible(value))
+        ("yes".equals(value) || "on".equals(value))
+      else
+        buildError("Unknown " + flagName + " flag '" + value + "'")
+  }
+
+  /** The directories that contain source files to compile. */
+  private var origin: Option[Path] = None
+  /** The directory to put the compiled files in. */
+  private var destination: Option[File] = None
+
+  /** The class path to use for this compilation. */
+  private var classpath: Option[Path] = None
+  /** The source path to use for this compilation. */
+  private var sourcepath: Option[Path] = None
+  /** The boot class path to use for this compilation. */
+  private var bootclasspath: Option[Path] = None
+  /** The external extensions path to use for this compilation. */
+  private var extdirs: Option[Path] = None
+
+  /** The character encoding of the files to compile. */
+  private var encoding: Option[String] = None
+
+  /** The fully qualified name of a doclet class, which will be used to generate the documentation. */
+  private var docgenerator: Option[String] = None
+
+  /** The file from which the documentation content of the root package will be taken */
+  private var docrootcontent: Option[File] = None
+
+  /** The document title of the generated HTML documentation. */
+  private var doctitle: Option[String] = None
+
+  /** The document footer of the generated HTML documentation. */
+  private var docfooter: Option[String] = None
+
+  /** The document version, to be added to the title. */
+  private var docversion: Option[String] = None
+
+  /** Instruct the compiler to generate links to sources */
+  private var docsourceurl: Option[String] = None
+
+  /** Point scaladoc at uncompilable sources. */
+  private var docUncompilable: Option[String] = None
+
+  /** Instruct the compiler to use additional parameters */
+  private var addParams: String = ""
+
+  /** Instruct the compiler to generate deprecation information. */
+  private var deprecation: Boolean = false
+
+  /** Instruct the compiler to generate unchecked information. */
+  private var unchecked: Boolean = false
+
+  /** Instruct the ant task not to fail in the event of errors */
+  private var nofail: Boolean = false
+
+  /** Instruct the scaladoc tool to document implicit conversions */
+  private var docImplicits: Boolean = false
+
+  /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
+  private var docImplicitsShowAll: Boolean = false
+
+  /** Instruct the scaladoc tool to output implicits debugging information */
+  private var docImplicitsDebug: Boolean = false
+
+  /** Instruct the scaladoc tool to create diagrams */
+  private var docDiagrams: Boolean = false
+
+  /** Instruct the scaladoc tool to output diagram creation debugging information */
+  private var docDiagramsDebug: Boolean = false
+
+  /** Instruct the scaladoc tool to use the binary given to create diagrams */
+  private var docDiagramsDotPath: Option[String] = None
+
+  /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */
+  private var docRawOutput: Boolean = false
+
+  /** Instruct the scaladoc not to generate prefixes */
+  private var docNoPrefixes: Boolean = false
+
+  /** Instruct the scaladoc tool to group similar functions together */
+  private var docGroups: Boolean = false
+
+  /** Instruct the scaladoc tool to skip certain packages */
+  private var docSkipPackages: String = ""
+
+/*============================================================================*\
+**                             Properties setters                             **
+\*============================================================================*/
+
+  /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `origin`.
+   */
+  def setSrcdir(input: Path) {
+    if (origin.isEmpty) origin = Some(input)
+    else origin.get.append(input)
+  }
+
+  /** Sets the `origin` as a nested src Ant parameter.
+   *
+   *  @return An origin path to be configured.
+   */
+  def createSrc(): Path = {
+    if (origin.isEmpty) origin = Some(new Path(getProject))
+    origin.get.createPath()
+  }
+
+  /** Sets the `origin` as an external reference Ant parameter.
+   *
+   *  @param input A reference to an origin path.
+   */
+  def setSrcref(input: Reference) {
+    createSrc().setRefid(input)
+  }
+
+  /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `destination`.
+   */
+  def setDestdir(input: File) {
+    destination = Some(input)
+  }
+
+  /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `classpath`.
+   */
+  def setClasspath(input: Path) {
+    if (classpath.isEmpty) classpath = Some(input)
+    else classpath.get.append(input)
+  }
+
+  /** Sets the `classpath` as a nested classpath Ant parameter.
+   *
+   *  @return A class path to be configured.
+   */
+  def createClasspath(): Path = {
+    if (classpath.isEmpty) classpath = Some(new Path(getProject))
+    classpath.get.createPath()
+  }
+
+  /** Sets the `classpath` as an external reference Ant parameter.
+   *
+   *  @param input A reference to a class path.
+   */
+  def setClasspathref(input: Reference) =
+    createClasspath().setRefid(input)
+
+  /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `sourcepath`.
+   */
+  def setSourcepath(input: Path) =
+    if (sourcepath.isEmpty) sourcepath = Some(input)
+    else sourcepath.get.append(input)
+
+  /** Sets the `sourcepath` as a nested sourcepath Ant parameter.
+   *
+   *  @return A source path to be configured.
+   */
+  def createSourcepath(): Path = {
+    if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject))
+    sourcepath.get.createPath()
+  }
+
+  /** Sets the `sourcepath` as an external reference Ant parameter.
+   *
+   *  @param input A reference to a source path.
+   */
+  def setSourcepathref(input: Reference) =
+    createSourcepath().setRefid(input)
+
+  /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `bootclasspath`.
+   */
+  def setBootclasspath(input: Path) =
+    if (bootclasspath.isEmpty) bootclasspath = Some(input)
+    else bootclasspath.get.append(input)
+
+  /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter.
+   *
+   *  @return A source path to be configured.
+   */
+  def createBootclasspath(): Path = {
+    if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject))
+    bootclasspath.get.createPath()
+  }
+
+  /** Sets the `bootclasspath` as an external reference Ant parameter.
+   *
+   *  @param input A reference to a source path.
+   */
+  def setBootclasspathref(input: Reference) {
+    createBootclasspath().setRefid(input)
+  }
+
+  /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]].
+   *
+   *  @param input The value of `extdirs`.
+   */
+  def setExtdirs(input: Path) {
+    if (extdirs.isEmpty) extdirs = Some(input)
+    else extdirs.get.append(input)
+  }
+
+  /** Sets the `extdirs` as a nested sourcepath Ant parameter.
+   *
+   *  @return An extensions path to be configured.
+   */
+  def createExtdirs(): Path = {
+    if (extdirs.isEmpty) extdirs = Some(new Path(getProject))
+    extdirs.get.createPath()
+  }
+
+  /** Sets the `extdirs` as an external reference Ant parameter.
+   *
+   *  @param input A reference to an extensions path.
+   */
+  def setExtdirsref(input: Reference) {
+    createExtdirs().setRefid(input)
+  }
+
+  /** Sets the `encoding` attribute. Used by Ant.
+   *
+   *  @param input The value of `encoding`.
+   */
+  def setEncoding(input: String) {
+    encoding = Some(input)
+  }
+
+  /** Sets the `docgenerator` attribute.
+   *
+   *  @param input A fully qualified class name of a doclet.
+   */
+  def setDocgenerator(input: String) {
+    docgenerator = Some(input)
+  }
+
+  /**
+   * Sets the `docrootcontent` attribute.
+   *
+   * @param input The file from which the documentation content of the root
+   * package will be taken.
+   */
+  def setDocrootcontent(input : File) {
+    docrootcontent = Some(input)
+  }
+
+  /** Sets the `docversion` attribute.
+   *
+   *  @param input The value of `docversion`.
+   */
+  def setDocversion(input: String) {
+    docversion = Some(input)
+  }
+
+  /** Sets the `docsourceurl` attribute.
+   *
+   *  @param input The value of `docsourceurl`.
+   */
+  def setDocsourceurl(input: String) {
+    docsourceurl = Some(input)
+  }
+
+  /** Sets the `doctitle` attribute.
+   *
+   *  @param input The value of `doctitle`.
+   */
+  def setDoctitle(input: String) {
+    doctitle = Some(input)
+  }
+
+  /** Sets the `docfooter` attribute.
+   *
+   *  @param input The value of `docfooter`.
+   */
+  def setDocfooter(input: String) {
+    docfooter = Some(input)
+  }
+
+  /** Set the `addparams` info attribute.
+   *
+   *  @param input The value for `addparams`.
+   */
+  def setAddparams(input: String) {
+    addParams = input
+  }
+
+  /** Set the `deprecation` info attribute.
+   *
+   *  @param input One of the flags `yes/no` or `on/off`.
+   */
+  def setDeprecation(input: String) {
+    if (Flag.isPermissible(input))
+      deprecation = "yes".equals(input) || "on".equals(input)
+    else
+      buildError("Unknown deprecation flag '" + input + "'")
+  }
+
+  /** Set the `unchecked` info attribute.
+   *
+   *  @param input One of the flags `yes/no` or `on/off`.
+   */
+  def setUnchecked(input: String) {
+    if (Flag.isPermissible(input))
+      unchecked = "yes".equals(input) || "on".equals(input)
+    else
+      buildError("Unknown unchecked flag '" + input + "'")
+  }
+
+  def setDocUncompilable(input: String) {
+    docUncompilable = Some(input)
+  }
+
+  /** Set the `nofail` info attribute.
+   *
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off.
+   */
+  def setNoFail(input: String) =
+      nofail = Flag.getBooleanValue(input, "nofail")
+
+  /** Set the `implicits` info attribute.
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setImplicits(input: String) =
+    docImplicits = Flag.getBooleanValue(input, "implicits")
+
+  /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
+   *  convert to from the default scope
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setImplicitsShowAll(input: String) =
+    docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
+
+  /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setImplicitsDebug(input: String) =
+    docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
+
+  /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setDiagrams(input: String) =
+    docDiagrams = Flag.getBooleanValue(input, "diagrams")
+
+  /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setDiagramsDebug(input: String) =
+    docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
+
+  /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
+   *  eg: /usr/bin/dot) */
+  def setDiagramsDotPath(input: String) =
+    docDiagramsDotPath = Some(input)
+
+  /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setRawOutput(input: String) =
+    docRawOutput = Flag.getBooleanValue(input, "rawOutput")
+
+  /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in
+   *  front of types -- may lead to confusion, but significantly speeds up the generation.
+   *  @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+  def setNoPrefixes(input: String) =
+    docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes")
+
+  /** Instruct the scaladoc tool to group similar functions together */
+  def setGroups(input: String) =
+    docGroups = Flag.getBooleanValue(input, "groups")
+
+  /** Instruct the scaladoc tool to skip certain packages.
+   *  @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc.
+   */
+  def setSkipPackages(input: String) =
+    docSkipPackages = input
+
+/*============================================================================*\
+**                             Properties getters                             **
+\*============================================================================*/
+
+  /** Gets the value of the `classpath` attribute in a
+   *  Scala-friendly form.
+   *
+   *  @return The class path as a list of files.
+   */
+  private def getClasspath: List[File] =
+    if (classpath.isEmpty) buildError("Member 'classpath' is empty.")
+    else classpath.get.list().toList map nameToFile
+
+  /** Gets the value of the `origin` attribute in a Scala-friendly
+   *  form.
+   *
+   *  @return The origin path as a list of files.
+   */
+  private def getOrigin: List[File] =
+    if (origin.isEmpty) buildError("Member 'origin' is empty.")
+    else origin.get.list().toList map nameToFile
+
+  /** Gets the value of the `destination` attribute in a
+   *  Scala-friendly form.
+   *
+   *  @return The destination as a file.
+   */
+  private def getDestination: File =
+    if (destination.isEmpty) buildError("Member 'destination' is empty.")
+    else existing(getProject resolveFile destination.get.toString)
+
+  /** Gets the value of the `sourcepath` attribute in a
+   *  Scala-friendly form.
+   *
+   *  @return The source path as a list of files.
+   */
+  private def getSourcepath: List[File] =
+    if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.")
+    else sourcepath.get.list().toList map nameToFile
+
+  /** Gets the value of the `bootclasspath` attribute in a
+   *  Scala-friendly form.
+   *
+   *  @return The boot class path as a list of files.
+   */
+  private def getBootclasspath: List[File] =
+    if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.")
+    else bootclasspath.get.list().toList map nameToFile
+
+  /** Gets the value of the `extdirs` attribute in a
+   *  Scala-friendly form.
+   *
+   *  @return The extensions path as a list of files.
+   */
+  private def getExtdirs: List[File] =
+    if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.")
+    else extdirs.get.list().toList map nameToFile
+
+/*============================================================================*\
+**                       Compilation and support methods                      **
+\*============================================================================*/
+
+  /** This is forwarding method to circumvent bug #281 in Scala 2. Remove when
+   *  bug has been corrected.
+   */
+  override protected def getDirectoryScanner(baseDir: java.io.File) =
+    super.getDirectoryScanner(baseDir)
+
+  /** Transforms a string name into a file relative to the provided base
+   *  directory.
+   *
+   *  @param base A file pointing to the location relative to which the name
+   *              will be resolved.
+   *  @param name A relative or absolute path to the file as a string.
+   *  @return     A file created from the name and the base file.
+   */
+  private def nameToFile(base: File)(name: String): File =
+    existing(fileUtils.resolveFile(base, name))
+
+  /** Transforms a string name into a file relative to the build root
+   *  directory.
+   *
+   *  @param name A relative or absolute path to the file as a string.
+   *  @return     A file created from the name.
+   */
+  private def nameToFile(name: String): File =
+    existing(getProject resolveFile name)
+
+  /** Tests if a file exists and prints a warning in case it doesn't. Always
+   *  returns the file, even if it doesn't exist.
+   *
+   *  @param file A file to test for existance.
+   *  @return     The same file.
+   */
+  private def existing(file: File): File = {
+    if (!file.exists())
+      log("Element '" + file.toString + "' does not exist.",
+          Project.MSG_WARN)
+    file
+  }
+
+  /** Transforms a path into a Scalac-readable string.
+   *
+   *  @param path A path to convert.
+   *  @return     A string-representation of the path like `a.jar:b.jar`.
+   */
+  private def asString(path: List[File]): String =
+    path.map(asString).mkString("", File.pathSeparator, "")
+
+  /** Transforms a file into a Scalac-readable string.
+   *
+   *  @param file A file to convert.
+   *  @return     A string-representation of the file like `/x/k/a.scala`.
+   */
+  private def asString(file: File): String =
+    file.getAbsolutePath()
+
+/*============================================================================*\
+**                           The big execute method                           **
+\*============================================================================*/
+
+  /** Initializes settings and source files */
+  protected def initialize: Tuple2[Settings, List[File]] = {
+    // Tests if all mandatory attributes are set and valid.
+    if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.")
+    if (getOrigin.isEmpty) buildError("Attribute 'srcdir' is not set.")
+    if (!destination.isEmpty && !destination.get.isDirectory())
+      buildError("Attribute 'destdir' does not refer to an existing directory.")
+    if (destination.isEmpty) destination = Some(getOrigin.head)
+
+    val mapper = new GlobPatternMapper()
+    mapper setTo "*.html"
+    mapper setFrom "*.scala"
+
+    // Scans source directories to build up a compile lists.
+    // If force is false, only files were the .class file in destination is
+    // older than the .scala file will be used.
+    val sourceFiles: List[File] =
+      for {
+        originDir <- getOrigin
+        originFile <- {
+          val includedFiles =
+            getDirectoryScanner(originDir).getIncludedFiles()
+          val list = includedFiles.toList
+          if (list.length > 0)
+            log(
+              "Documenting " + list.length + " source file" +
+              (if (list.length > 1) "s" else "") +
+              (" to " + getDestination.toString)
+            )
+          else
+            log("No files selected for documentation", Project.MSG_VERBOSE)
+
+          list
+        }
+      } yield {
+        log(originFile, Project.MSG_DEBUG)
+        nameToFile(originDir)(originFile)
+      }
+
+    def decodeEscapes(s: String): String = {
+      // In Ant script characters '<' and '>' must be encoded when
+      // used in attribute values, e.g. for attributes "doctitle", "header", ..
+      // in task Scaladoc you may write:
+      //   doctitle="<div>Scala</div>"
+      // so we have to decode them here.
+      s.replaceAll("<", "<").replaceAll(">",">")
+       .replaceAll("&", "&").replaceAll(""", "\"")
+    }
+
+    // Builds-up the compilation settings for Scalac with the existing Ant
+    // parameters.
+    val docSettings = new Settings(buildError)
+    docSettings.outdir.value = asString(destination.get)
+    if (!classpath.isEmpty)
+      docSettings.classpath.value = asString(getClasspath)
+    if (!sourcepath.isEmpty)
+      docSettings.sourcepath.value = asString(getSourcepath)
+    /*else if (origin.get.size() > 0)
+      settings.sourcepath.value = origin.get.list()(0)*/
+    if (!bootclasspath.isEmpty)
+      docSettings.bootclasspath.value = asString(getBootclasspath)
+    if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs)
+    if (!encoding.isEmpty) docSettings.encoding.value = encoding.get
+    if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
+    if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get)
+    if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
+    if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get)
+    if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get)
+
+    docSettings.deprecation.value = deprecation
+    docSettings.unchecked.value = unchecked
+    docSettings.docImplicits.value = docImplicits
+    docSettings.docImplicitsDebug.value = docImplicitsDebug
+    docSettings.docImplicitsShowAll.value = docImplicitsShowAll
+    docSettings.docDiagrams.value = docDiagrams
+    docSettings.docDiagramsDebug.value = docDiagramsDebug
+    docSettings.docRawOutput.value = docRawOutput
+    docSettings.docNoPrefixes.value = docNoPrefixes
+    docSettings.docGroups.value = docGroups
+    docSettings.docSkipPackages.value = docSkipPackages
+    if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
+
+    if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
+    if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
+    log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
+
+    docSettings processArgumentString addParams
+    (docSettings, sourceFiles)
+  }
+
+  def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
+
+  /** Performs the compilation. */
+  override def execute() = {
+    val (docSettings, sourceFiles) = initialize
+    val reporter = new ConsoleReporter(docSettings)
+    try {
+      val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
+      docProcessor.document(sourceFiles.map (_.toString))
+      if (reporter.ERROR.count > 0)
+        safeBuildError(
+          "Document failed with " +
+          reporter.ERROR.count + " error" +
+          (if (reporter.ERROR.count > 1) "s" else "") +
+          "; see the documenter error output for details.")
+      else if (reporter.WARNING.count > 0)
+        log(
+          "Document succeeded with " +
+          reporter.WARNING.count + " warning" +
+          (if (reporter.WARNING.count > 1) "s" else "") +
+          "; see the documenter output for details.")
+      reporter.printSummary()
+    } catch {
+      case exception: Throwable =>
+        exception.printStackTrace()
+        val msg = Option(exception.getMessage) getOrElse "no error message provided"
+        safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.")
+    }
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
new file mode 100644
index 0000000..52a0c20
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -0,0 +1,72 @@
+/* scaladoc, a documentation generator for Scala
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ * @author  Geoffrey Washburn
+ */
+
+package scala.tools.nsc
+
+import java.io.File.pathSeparator
+import scala.tools.nsc.doc.DocFactory
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.reflect.internal.util.FakePos
+
+/** The main class for scaladoc, a front-end for the Scala compiler
+ *  that generates documentation from source files.
+ */
+class ScalaDoc {
+  val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString)
+
+  def process(args: Array[String]): Boolean = {
+    var reporter: ConsoleReporter = null
+    val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n  scaladoc -help  gives more information"),
+                                       msg => reporter.printMessage(msg))
+    reporter = new ConsoleReporter(docSettings) {
+      // need to do this so that the Global instance doesn't trash all the
+      // symbols just because there was an error
+      override def hasErrors = false
+    }
+    val command = new ScalaDoc.Command(args.toList, docSettings)
+    def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
+
+    if (docSettings.version.value)
+      reporter.echo(versionMsg)
+    else if (docSettings.Xhelp.value)
+      reporter.echo(command.xusageMsg)
+    else if (docSettings.Yhelp.value)
+      reporter.echo(command.yusageMsg)
+    else if (docSettings.showPlugins.value)
+      reporter.warning(null, "Plugins are not available when using Scaladoc")
+    else if (docSettings.showPhases.value)
+      reporter.warning(null, "Phases are restricted when using Scaladoc")
+    else if (docSettings.help.value || !hasFiles)
+      reporter.echo(command.usageMsg)
+    else
+      try { new DocFactory(reporter, docSettings) document command.files }
+    catch {
+      case ex @ FatalError(msg) =>
+        if (docSettings.debug.value) ex.printStackTrace()
+        reporter.error(null, "fatal error: " + msg)
+    }
+    finally reporter.printSummary()
+
+    // not much point in returning !reporter.hasErrors when it has
+    // been overridden with constant false.
+    true
+  }
+}
+
+object ScalaDoc extends ScalaDoc {
+  class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
+    override def cmdName = "scaladoc"
+    override def usageMsg = (
+      createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
+      "\n\nStandard scalac options also available:" +
+      createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name))
+    )
+  }
+
+  def main(args: Array[String]): Unit = sys exit {
+    if (process(args)) 0 else 1
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
new file mode 100644
index 0000000..dce52af
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -0,0 +1,132 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.util.control.ControlThrowable
+import reporters.Reporter
+import scala.reflect.internal.util.BatchSourceFile
+
+/** A documentation processor controls the process of generating Scala
+  * documentation, which is as follows.
+  *
+  * * A simplified compiler instance (with only the front-end phases enabled)
+  * * is created, and additional `sourceless` comments are registered.
+  * * Documentable files are compiled, thereby filling the compiler's symbol table.
+  * * A documentation model is extracted from the post-compilation symbol table.
+  * * A generator is used to transform the model into the correct final format (HTML).
+  *
+  * A processor contains a single compiler instantiated from the processor's
+  * `settings`. Each call to `document` uses the same compiler instance with
+  * the same symbol table. In particular, this implies that the scaladoc site
+  * obtained from a call to `run` will contain documentation about files compiled
+  * during previous calls to the same processor's `run` method.
+  *
+  * @param reporter The reporter to which both documentation and compilation errors will be reported.
+  * @param settings The settings to be used by the documenter and compiler for generating documentation.
+  *
+  * @author Gilles Dubochet */
+class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
+  /** The unique compiler instance used by this processor and constructed from its `settings`. */
+  object compiler extends ScaladocGlobal(settings, reporter)
+
+  /** Creates a scaladoc site for all symbols defined in this call's `source`,
+    * as well as those defined in `sources` of previous calls to the same processor.
+    * @param source The list of paths (relative to the compiler's source path,
+    *        or absolute) of files to document or the source code. */
+  def makeUniverse(source: Either[List[String], String]): Option[Universe] = {
+    assert(settings.docformat.value == "html")
+    source match {
+      case Left(files) =>
+        new compiler.Run() compile files
+      case Right(sourceCode) =>
+        new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
+    }
+
+    if (reporter.hasErrors)
+      return None
+
+    val extraTemplatesToDocument: Set[compiler.Symbol] = {
+      if (settings.docUncompilable.isDefault) Set()
+      else {
+        val uncompilable = new {
+          val global: compiler.type = compiler
+          val settings = processor.settings
+        } with Uncompilable { }
+
+        compiler.docComments ++= uncompilable.comments
+        docdbg("" + uncompilable)
+
+        uncompilable.templates
+      }
+    }
+
+    val modelFactory = (
+      new { override val global: compiler.type = compiler }
+        with model.ModelFactory(compiler, settings)
+        with model.ModelFactoryImplicitSupport
+        with model.ModelFactoryTypeSupport
+        with model.diagram.DiagramFactory
+        with model.CommentFactory
+        with model.TreeFactory
+        with model.MemberLookup {
+          override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) =
+            extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl)
+        }
+    )
+
+    modelFactory.makeModel match {
+      case Some(madeModel) =>
+        if (!settings.scaladocQuietRun)
+          println("model contains " + modelFactory.templatesCount + " documentable templates")
+        Some(madeModel)
+      case None =>
+        if (!settings.scaladocQuietRun)
+          println("no documentable class found in compilation units")
+        None
+    }
+  }
+
+  object NoCompilerRunException extends ControlThrowable { }
+
+  val documentError: PartialFunction[Throwable, Unit] = {
+    case NoCompilerRunException =>
+      reporter.info(null, "No documentation generated with unsuccessful compiler run", force = false)
+    case _: ClassNotFoundException =>
+      ()
+  }
+
+  /** Generate document(s) for all `files` containing scaladoc documenataion.
+    * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
+  def document(files: List[String]) {
+    def generate() = {
+      import doclet._
+      val docletClass    = Class.forName(settings.docgenerator.value) // default is html.Doclet
+      val docletInstance = docletClass.newInstance().asInstanceOf[Generator]
+
+      docletInstance match {
+        case universer: Universer =>
+          val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
+          universer setUniverse universe
+
+          docletInstance match {
+            case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe)
+            case _                => ()
+          }
+        case _ => ()
+      }
+      docletInstance.generate()
+    }
+
+    try generate()
+    catch documentError
+  }
+
+  private[doc] def docdbg(msg: String) {
+    if (settings.Ydocdebug)
+      println(msg)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
new file mode 100644
index 0000000..6dc3e5a
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -0,0 +1,69 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools
+package nsc
+package doc
+
+import reporters._
+import scala.reflect.internal.util._
+import DocParser.Parsed
+
+/** A very minimal global customized for extracting `DocDefs`.  It stops
+ *  right after parsing so it can read `DocDefs` from source code which would
+ *  otherwise cause the compiler to go haywire.
+ */
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
+  def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
+  def this() = this(new Settings(Console println _))
+
+  // the usual global initialization
+  locally { new Run() }
+
+  override protected def computeInternalPhases() {
+    phasesSet += syntaxAnalyzer
+  }
+
+  /** Returns a list of `DocParser.Parseds`, which hold the DocDefs found
+   *  in the given code along with the surrounding trees.
+   */
+  def docDefs(code: String) = {
+    def loop(enclosing: List[Tree], tree: Tree): List[Parsed] = tree match {
+      case x: PackageDef => x.stats flatMap (t => loop(enclosing :+ x, t))
+      case x: DocDef     => new Parsed(enclosing, x) :: loop(enclosing :+ x.definition, x.definition)
+      case x             => x.children flatMap (t => loop(enclosing, t))
+    }
+    loop(Nil, docUnit(code))
+  }
+
+  /** A compilation unit containing parsed source.
+   */
+  def docUnit(code: String) = {
+    val unit    = new CompilationUnit(new BatchSourceFile("<console>", code))
+    val scanner = newUnitParser(unit)
+
+    scanner.compilationUnit()
+  }
+}
+
+/** Since the DocParser's whole reason for existing involves trashing a
+ *  global, it is designed to bottle up general `Global#Tree` types rather
+ *  than path dependent ones.  The recipient will have to deal.
+ */
+object DocParser {
+  type Tree    = Global#Tree
+  type DefTree = Global#DefTree
+  type DocDef  = Global#DocDef
+  type Name    = Global#Name
+
+  class Parsed(val enclosing: List[Tree], val docDef: DocDef) {
+    def nameChain: List[Name] = (enclosing :+ docDef.definition) collect { case x: DefTree => x.name }
+    def raw: String           = docDef.comment.raw
+
+    override def toString = (
+      nameChain.init.map(x => if (x.isTypeName) x + "#" else x + ".").mkString + nameChain.last
+    )
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
new file mode 100644
index 0000000..84545e9
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.tools.nsc.doc
+
+import scala.collection._
+
+trait Index {
+  type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
+
+  def firstLetterIndex: Map[Char, SymbolMap]
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
new file mode 100644
index 0000000..e5c64c6
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -0,0 +1,261 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import typechecker.Analyzer
+import scala.reflect.internal.Chars._
+import scala.reflect.internal.util.{ BatchSourceFile, Position }
+import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }
+
+trait ScaladocAnalyzer extends Analyzer {
+  val global : Global // generally, a ScaladocGlobal
+  import global._
+
+  override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper
+
+  trait ScaladocTyper extends Typer {
+    private def unit = context.unit
+
+    override def canAdaptConstantTypeToLiteral = false
+
+    override protected def macroImplementationNotFoundMessage(name: Name): String = (
+        super.macroImplementationNotFoundMessage(name)
+      + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+    )
+
+    override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
+      val sym = docDef.symbol
+
+      if ((sym ne null) && (sym ne NoSymbol)) {
+        val comment = docDef.comment
+        docComments(sym) = comment
+        comment.defineVariables(sym)
+        val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
+        for (useCase <- comment.useCases) {
+          typer1.silent(_.asInstanceOf[ScaladocTyper].defineUseCases(useCase)) match {
+            case SilentTypeError(err) =>
+              unit.warning(useCase.pos, err.errMsg)
+            case _ =>
+          }
+          for (useCaseSym <- useCase.defined) {
+            if (sym.name != useCaseSym.name)
+              unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+          }
+        }
+      }
+
+      super.typedDocDef(docDef, mode, pt)
+    }
+
+    def defineUseCases(useCase: UseCase): List[Symbol] = {
+      def stringParser(str: String): syntaxAnalyzer.Parser = {
+        val file = new BatchSourceFile(context.unit.source.file, str) {
+          override def positionInUltimateSource(pos: Position) = {
+            pos withSource context.unit.source withShift useCase.pos.start
+          }
+        }
+        newUnitParser(new CompilationUnit(file))
+      }
+
+      val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
+      val enclClass = context.enclClass.owner
+
+      def defineAlias(name: Name) = (
+        if (context.scope.lookup(name) == NoSymbol) {
+          lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
+            silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
+              val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
+              val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
+              val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+              alias setInfo newInfo
+              context.scope.enter(alias)
+            }
+          }
+        }
+      )
+
+      for (tree <- trees; t <- tree)
+        t match {
+          case Ident(name) if name startsWith '$' => defineAlias(name)
+          case _ =>
+        }
+
+      useCase.aliases = context.scope.toList
+      namer.enterSyms(trees)
+      typedStats(trees, NoSymbol)
+      useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
+
+      if (settings.debug)
+        useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
+
+      useCase.defined
+    }
+  }
+}
+
+abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
+  import global._
+
+  class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
+    override val in = new ScaladocJavaUnitScanner(unit)
+  } with JavaUnitParser(unit) { }
+
+  class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
+    /** buffer for the documentation comment
+     */
+    var docBuffer: StringBuilder = null
+
+    /** add the given character to the documentation buffer
+     */
+    protected def putDocChar(c: Char) {
+      if (docBuffer ne null) docBuffer.append(c)
+    }
+
+    override protected def skipComment(): Boolean = {
+      if (in.ch == '/') {
+        do {
+          in.next
+        } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
+        true
+      } else if (in.ch == '*') {
+        docBuffer = null
+        in.next
+        val scalaDoc = ("/**", "*/")
+        if (in.ch == '*')
+          docBuffer = new StringBuilder(scalaDoc._1)
+        do {
+          do {
+            if (in.ch != '*' && in.ch != SU) {
+              in.next; putDocChar(in.ch)
+            }
+          } while (in.ch != '*' && in.ch != SU)
+          while (in.ch == '*') {
+            in.next; putDocChar(in.ch)
+          }
+        } while (in.ch != '/' && in.ch != SU)
+        if (in.ch == '/') in.next
+        else incompleteInputError("unclosed comment")
+        true
+      } else {
+        false
+      }
+    }
+  }
+
+  class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
+
+    private var docBuffer: StringBuilder = null        // buffer for comments (non-null while scanning)
+    private var inDocComment             = false       // if buffer contains double-star doc comment
+    private var lastDoc: DocComment      = null        // last comment if it was double-star doc
+
+    private object unmooredParser extends {                // minimalist comment parser
+      val global: Global = ScaladocSyntaxAnalyzer.this.global
+    }
+    with CommentFactoryBase with MemberLookupBase {
+      import global.{ settings, Symbol }
+      def parseComment(comment: DocComment) = {
+        val nowarnings = settings.nowarn.value
+        settings.nowarn.value = true
+        try parseAtSymbol(comment.raw, comment.raw, comment.pos)
+        finally settings.nowarn.value = nowarnings
+      }
+
+      override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
+      override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption.orNull
+      override def toString(link: LinkTo): String = "No link"
+      override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
+      override def warnNoLink: Boolean = false
+    }
+
+    /**
+     * Warn when discarding buffered doc at the end of a block.
+     * This mechanism doesn't warn about arbitrary unmoored doc.
+     * Also warn under -Xlint, but otherwise only warn in the presence of suspicious
+     * tags that appear to be documenting API.  Warnings are suppressed while parsing
+     * the local comment so that comments of the form `[at] Martin` will not trigger a warning.
+     * By omission, tags for `see`, `todo`, `note` and `example` are ignored.
+     */
+    override def discardDocBuffer() = {
+      import scala.tools.nsc.doc.base.comment.Comment
+      val doc = flushDoc
+      // tags that make a local double-star comment look unclean, as though it were API
+      def unclean(comment: Comment): Boolean = {
+        import comment._
+        authors.nonEmpty || result.nonEmpty || throws.nonEmpty || valueParams.nonEmpty ||
+        typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
+      }
+      def isDirty = unclean(unmooredParser parseComment doc)
+      if ((doc ne null) && (settings.lint || isDirty))
+        unit.warning(doc.pos, "discarding unmoored doc comment")
+    }
+
+    override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
+
+    override protected def putCommentChar() {
+      if (inDocComment)
+        docBuffer append ch
+
+      nextChar()
+    }
+    override def skipDocComment(): Unit = {
+      inDocComment = true
+      docBuffer = new StringBuilder("/**")
+      super.skipDocComment()
+    }
+    override def skipBlockComment(): Unit = {
+      inDocComment = false
+      docBuffer = new StringBuilder("/*")
+      super.skipBlockComment()
+    }
+    override def skipComment(): Boolean = {
+      // emit a block comment; if it's double-star, make Doc at this pos
+      def foundStarComment(start: Int, end: Int) = try {
+        val str = docBuffer.toString
+        val pos = Position.range(unit.source, start, start, end)
+        unit.comment(pos, str)
+        if (inDocComment)
+          lastDoc = DocComment(str, pos)
+        true
+      } finally {
+        docBuffer    = null
+        inDocComment = false
+      }
+      super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
+    }
+  }
+  class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
+    override def newScanner() = new ScaladocUnitScanner(unit, patches)
+    override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches)
+
+    override def joinComment(trees: => List[Tree]): List[Tree] = {
+      val doc = in.flushDoc
+      if ((doc ne null) && doc.raw.length > 0) {
+        log(s"joinComment(doc=$doc)")
+        val joined = trees map {
+          t =>
+            DocDef(doc, t) setPos {
+              if (t.pos.isDefined) {
+                val pos = doc.pos.withEnd(t.pos.end)
+                // always make the position transparent
+                pos.makeTransparent
+              } else {
+                t.pos
+              }
+            }
+        }
+        joined.find(_.pos.isOpaqueRange) foreach {
+          main =>
+            val mains = List(main)
+            joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+        }
+        joined
+      }
+      else trees
+    }
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
new file mode 100644
index 0000000..2ea3a0e
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocGlobalTrait extends Global {
+  outer =>
+
+  override val useOffsetPositions = false
+  override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil)
+
+  override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) {
+    val runsAfter = List[String]()
+    val runsRightAfter = None
+  }
+
+  override lazy val loaders = new {
+    val global: outer.type = outer
+    val platform: outer.platform.type = outer.platform
+  } with GlobalSymbolLoaders {
+    // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+    // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+    // that are not in their correct place (see bug for details)
+    override protected def signalError(root: Symbol, ex: Throwable) {
+      log(s"Suppressing error involving $root: $ex")
+    }
+  }
+}
+
+class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
+  override protected def computeInternalPhases() {
+    phasesSet += syntaxAnalyzer
+    phasesSet += analyzer.namerFactory
+    phasesSet += analyzer.packageObjects
+    phasesSet += analyzer.typerFactory
+  }
+  override def forScaladoc = true
+  override lazy val analyzer = new {
+    val global: ScaladocGlobal.this.type = ScaladocGlobal.this
+  } with ScaladocAnalyzer
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
new file mode 100644
index 0000000..67529f4
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -0,0 +1,350 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala.tools.nsc
+package doc
+
+import java.io.File
+import scala.language.postfixOps
+
+/** An extended version of compiler settings, with additional Scaladoc-specific options.
+  * @param error A function that prints a string to the appropriate error stream
+  * @param printMsg A function that prints the string, without any extra boilerplate of error */
+class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
+
+  /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
+    * `html`. */
+  val docformat = ChoiceSetting (
+    "-doc-format",
+    "format",
+    "Selects in which format documentation is rendered",
+    List("html"),
+    "html"
+  )
+
+  /** A setting that defines the overall title of the documentation, typically the name of the library being
+    * documented. ''Note:'' This setting is currently not used. */
+  val doctitle = StringSetting (
+    "-doc-title",
+    "title",
+    "The overall name of the Scaladoc site",
+    ""
+  )
+
+  /** A setting that defines the overall version number of the documentation, typically the version of the library being
+    * documented. ''Note:'' This setting is currently not used. */
+  val docversion = StringSetting (
+    "-doc-version",
+    "version",
+    "An optional version number, to be appended to the title",
+    ""
+  )
+
+  val docfooter = StringSetting (
+    "-doc-footer",
+    "footer",
+    "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.",
+    ""
+  )
+
+  val docUncompilable = StringSetting (
+    "-doc-no-compile",
+    "path",
+    "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
+    ""
+  )
+
+  lazy val uncompilableFiles = docUncompilable.value match {
+    case ""     => Nil
+    case path   => io.Directory(path).deepFiles filter (_ hasExtension "scala") toList
+  }
+
+  /** A setting that defines a URL to be concatenated with source locations and show a link to source files.
+   * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
+  val docsourceurl = StringSetting (
+    "-doc-source-url",
+    "url",
+    "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')",
+    ""
+  )
+
+  val docExternalDoc = MultiStringSetting (
+    "-doc-external-doc",
+    "external-doc",
+    "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+  )
+
+  val useStupidTypes = BooleanSetting (
+    "-Yuse-stupid-types",
+    "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!"
+  )
+
+  val docgenerator = StringSetting (
+    "-doc-generator",
+    "class-name",
+    "The fully qualified name of a doclet class, which will be used to generate the documentation",
+    "scala.tools.nsc.doc.html.Doclet"
+  )
+
+  val docRootContent = PathSetting (
+    "-doc-root-content",
+    "The file from which the root package documentation should be imported.",
+    ""
+  )
+
+  val docImplicits = BooleanSetting (
+    "-implicits",
+    "Document members inherited by implicit conversions."
+  )
+
+  val docImplicitsDebug = BooleanSetting (
+    "-implicits-debug",
+    "Show debugging information for members inherited by implicit conversions."
+  )
+
+  val docImplicitsShowAll = BooleanSetting (
+    "-implicits-show-all",
+    "Show members inherited by implicit conversions that are impossible in the default scope. " +
+    "(for example conversions that require Numeric[String] to be in scope)"
+  )
+
+  val docImplicitsSoundShadowing = BooleanSetting (
+    "-implicits-sound-shadowing",
+    "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
+    "only use it if you haven't defined usecase for implicitly inherited members."
+  )
+
+  val docImplicitsHide = MultiStringSetting (
+	  "-implicits-hide",
+    "implicit(s)",
+    "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
+  )
+
+  val docAuthor = BooleanSetting (
+    "-author",
+    "Include authors."
+  )
+
+  val docDiagrams = BooleanSetting (
+    "-diagrams",
+    "Create inheritance diagrams for classes, traits and packages."
+  )
+
+  val docDiagramsDebug = BooleanSetting (
+    "-diagrams-debug",
+    "Show debugging information for the diagram creation process."
+  )
+
+  val docDiagramsDotPath = PathSetting (
+    "-diagrams-dot-path",
+    "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
+    "dot" // by default, just pick up the system-wide dot
+  )
+
+  /** The maxium nuber of normal classes to show in the diagram */
+  val docDiagramsMaxNormalClasses = IntSetting(
+    "-diagrams-max-classes",
+    "The maximum number of superclasses or subclasses to show in a diagram",
+    15,
+    None,
+    _ => None
+  )
+
+  /** The maxium nuber of implcit classes to show in the diagram */
+  val docDiagramsMaxImplicitClasses = IntSetting(
+    "-diagrams-max-implicits",
+    "The maximum number of implicitly converted classes to show in a diagram",
+    10,
+    None,
+    _ => None
+  )
+
+  val docDiagramsDotTimeout = IntSetting(
+    "-diagrams-dot-timeout",
+    "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)",
+    10,
+    None,
+    _ => None
+  )
+
+  val docDiagramsDotRestart = IntSetting(
+    "-diagrams-dot-restart",
+    "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
+    5,
+    None,
+    _ => None
+  )
+
+  val docRawOutput = BooleanSetting (
+    "-raw-output",
+    "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)"
+  )
+
+  val docNoPrefixes = BooleanSetting (
+    "-no-prefixes",
+    "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc."
+  )
+
+  val docNoLinkWarnings = BooleanSetting (
+    "-no-link-warnings",
+    "Avoid warnings for ambiguous and incorrect links."
+  )
+
+  val docSkipPackages = StringSetting (
+    "-skip-packages",
+    "<package1>:...:<packageN>",
+    "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.",
+    ""
+  )
+
+  val docExpandAllTypes = BooleanSetting (
+    "-expand-all-types",
+    "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
+  )
+
+  val docGroups = BooleanSetting (
+    "-groups",
+    "Group similar functions together (based on the @group annotation)"
+  )
+
+  // For improved help output.
+  def scaladocSpecific = Set[Settings#Setting](
+    docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
+    docAuthor, docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+    docDiagramsDotTimeout, docDiagramsDotRestart,
+    docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
+    docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
+    docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
+    docExpandAllTypes, docGroups
+  )
+  val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
+
+  override def isScaladoc = true
+
+  // set by the testsuite, when checking test output
+  var scaladocQuietRun = false
+
+  lazy val skipPackageNames =
+    if (docSkipPackages.value == "")
+      Set[String]()
+    else
+      docSkipPackages.value.toLowerCase.split(':').toSet
+
+  def skipPackage(qname: String) =
+    skipPackageNames(qname.toLowerCase)
+
+  lazy val hiddenImplicits: Set[String] = {
+    if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets
+    else docImplicitsHide.value.toSet flatMap { name: String =>
+      if(name == ".") hardcoded.commonConversionTargets
+      else Set(name)
+    }
+  }
+
+  def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
+
+  lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
+    val idx = s.indexOf("#")
+    if (idx > 0) {
+      val (first, last) = s.splitAt(idx)
+      Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
+    } else {
+      error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
+      None
+    }
+  } toMap
+
+  /**
+   *  This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
+   *  but ultimately scaladoc has to be useful. :)
+   */
+  object hardcoded {
+
+    /** The common context bounds and some humanly explanations. Feel free to add more explanations
+     *  `<root>.scala.package.Numeric` is the type class
+     *  `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
+     *  the function result should be a humanly-understandable description of the type class
+     */
+    val knownTypeClasses: Map[String, String => String] = Map() +
+      ("scala.math.Numeric"                     -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+      ("scala.math.Integral"                    -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+      ("scala.math.Fractional"                  -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+      ("scala.reflect.Manifest"                 -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+      ("scala.reflect.ClassManifest"            -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+      ("scala.reflect.OptManifest"              -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
+      ("scala.reflect.ClassTag"                 -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
+      ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
+      ("scala.reflect.api.TypeTags.TypeTag"     -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
+
+    /**
+     * Set of classes to exclude from index and diagrams
+     * TODO: Should be configurable
+     */
+    def isExcluded(qname: String) = {
+      ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+         qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+       ) && !(
+        qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+        qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+        qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+        qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+        qname == "scala.runtime.AbstractFunction2"
+      )
+     )
+    }
+
+    /** Common conversion targets that affect any class in Scala */
+    val commonConversionTargets = Set(
+      "scala.Predef.StringFormat",
+      "scala.Predef.any2stringadd",
+      "scala.Predef.ArrowAssoc",
+      "scala.Predef.Ensuring",
+      "scala.collection.TraversableOnce.alternateImplicit")
+
+    /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
+    val arraySkipConversions = List(
+      "scala.Predef.refArrayOps",
+      "scala.Predef.intArrayOps",
+      "scala.Predef.doubleArrayOps",
+      "scala.Predef.longArrayOps",
+      "scala.Predef.floatArrayOps",
+      "scala.Predef.charArrayOps",
+      "scala.Predef.byteArrayOps",
+      "scala.Predef.shortArrayOps",
+      "scala.Predef.booleanArrayOps",
+      "scala.Predef.unitArrayOps",
+      "scala.LowPriorityImplicits.wrapRefArray",
+      "scala.LowPriorityImplicits.wrapIntArray",
+      "scala.LowPriorityImplicits.wrapDoubleArray",
+      "scala.LowPriorityImplicits.wrapLongArray",
+      "scala.LowPriorityImplicits.wrapFloatArray",
+      "scala.LowPriorityImplicits.wrapCharArray",
+      "scala.LowPriorityImplicits.wrapByteArray",
+      "scala.LowPriorityImplicits.wrapShortArray",
+      "scala.LowPriorityImplicits.wrapBooleanArray",
+      "scala.LowPriorityImplicits.wrapUnitArray",
+      "scala.LowPriorityImplicits.genericWrapArray")
+
+    // included as names as here we don't have access to a Global with Definitions :(
+    def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
+    def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
+
+    /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
+     *  but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
+     *  know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
+    def valueClassFilter(value: String, conversionName: String): Boolean = {
+      val valueName = value.toLowerCase
+      val otherValues = valueClassList.filterNot(_ == valueName)
+
+      for (prefix <- valueClassFilterPrefixes)
+        if (conversionName.startsWith(prefix))
+          for (otherValue <- otherValues)
+            if (conversionName.startsWith(prefix + "." + otherValue))
+              return false
+
+      true
+    }
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
new file mode 100644
index 0000000..ea45ca1
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
@@ -0,0 +1,51 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+import scala.language.implicitConversions
+import scala.language.postfixOps
+
+/** Some glue between DocParser (which reads source files which can't be compiled)
+ *  and the scaladoc model.
+ */
+trait Uncompilable {
+  val global: Global
+  val settings: Settings
+
+  import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol }
+  import global.definitions.AnyRefClass
+  import global.rootMirror.RootClass
+
+  private implicit def translateName(name: Global#Name) =
+    if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
+
+  def docSymbol(p: DocParser.Parsed) = p.nameChain.foldLeft(RootClass: Symbol)(_.tpe member _)
+  def docDefs(code: String)          = new DocParser(settings, reporter) docDefs code
+  def docPairs(code: String)         = docDefs(code) map (p => (docSymbol(p), new DocComment(p.raw)))
+
+  lazy val pairs = files flatMap { f =>
+    val comments = docPairs(f.slurp())
+    if (settings.verbose)
+      inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", ")))
+
+    comments
+  }
+  def files     = settings.uncompilableFiles
+  def symbols   = pairs map (_._1)
+  def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
+  def comments = {
+    if (settings.debug || settings.verbose)
+      inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
+
+    if (pairs.isEmpty)
+      warning("no doc comments read from " + settings.docUncompilable.value)
+
+    pairs
+  }
+  override def toString = pairs.size + " uncompilable symbols:\n" + (
+    symbols filterNot (_ == NoSymbol) map (x => "  " + x.owner.fullName + " " + x.defString) mkString "\n"
+  )
+}
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/Universe.scala
rename to src/scaladoc/scala/tools/nsc/doc/Universe.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
new file mode 100755
index 0000000..a933c35
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -0,0 +1,932 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+
+import base.comment._
+import scala.collection._
+import scala.util.matching.Regex
+import scala.reflect.internal.util.Position
+import scala.language.postfixOps
+
+/** The comment parser transforms raw comment strings into `Comment` objects.
+  * Call `parse` to run the parser. Note that the parser is stateless and
+  * should only be built once for a given Scaladoc run.
+  *
+  * @author Manohar Jonnalagedda
+  * @author Gilles Dubochet */
+trait CommentFactoryBase { this: MemberLookupBase =>
+
+  val global: Global
+  import global.{ reporter, Symbol, NoSymbol }
+
+  /* Creates comments with necessary arguments */
+  def createComment (
+    body0:           Option[Body]     = None,
+    authors0:        List[Body]       = List.empty,
+    see0:            List[Body]       = List.empty,
+    result0:         Option[Body]     = None,
+    throws0:         Map[String,Body] = Map.empty,
+    valueParams0:    Map[String,Body] = Map.empty,
+    typeParams0:     Map[String,Body] = Map.empty,
+    version0:        Option[Body]     = None,
+    since0:          Option[Body]     = None,
+    todo0:           List[Body]       = List.empty,
+    deprecated0:     Option[Body]     = None,
+    note0:           List[Body]       = List.empty,
+    example0:        List[Body]       = List.empty,
+    constructor0:    Option[Body]     = None,
+    source0:         Option[String]   = None,
+    inheritDiagram0: List[String]     = List.empty,
+    contentDiagram0: List[String]     = List.empty,
+    group0:          Option[Body]     = None,
+    groupDesc0:      Map[String,Body] = Map.empty,
+    groupNames0:     Map[String,Body] = Map.empty,
+    groupPrio0:      Map[String,Body] = Map.empty
+  ) : Comment = new Comment{
+    val body           = if(body0 isDefined) body0.get else Body(Seq.empty)
+    val authors        = authors0
+    val see            = see0
+    val result         = result0
+    val throws         = throws0
+    val valueParams    = valueParams0
+    val typeParams     = typeParams0
+    val version        = version0
+    val since          = since0
+    val todo           = todo0
+    val deprecated     = deprecated0
+    val note           = note0
+    val example        = example0
+    val constructor    = constructor0
+    val inheritDiagram = inheritDiagram0
+    val contentDiagram = contentDiagram0
+    val groupDesc      = groupDesc0
+    val group          =
+      group0 match {
+        case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim)
+        case _                                                                => None
+      }
+    val groupPrio      = groupPrio0 flatMap {
+      case (group, body) =>
+        try {
+          body match {
+            case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt)
+            case _                                                       => List()
+          }
+        } catch {
+          case _: java.lang.NumberFormatException => List()
+        }
+    }
+    val groupNames     = groupNames0 flatMap {
+      case (group, body) =>
+        try {
+          body match {
+            case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim))
+            case _                                                       => List()
+          }
+        } catch {
+          case _: java.lang.NumberFormatException => List()
+        }
+    }
+
+  }
+
+  private val endOfText = '\u0003'
+  private val endOfLine = '\u000A'
+
+  /** Something that should not have happened, happened, and Scaladoc should exit. */
+  private def oops(msg: String): Nothing =
+    throw FatalError("program logic: " + msg)
+
+  /** The body of a line, dropping the (optional) start star-marker,
+    * one leading whitespace and all trailing whitespace. */
+  private val CleanCommentLine =
+    new Regex("""(?:\s*\*\s?)?(.*)""")
+
+  /** Dangerous HTML tags that should be replaced by something safer,
+    * such as wiki syntax, or that should be dropped. */
+  private val DangerousTags =
+    new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
+
+  /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
+    * if it cannot be salvaged. */
+  private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+    case "p" | "div" => "\n\n"
+    case "h1"  => "\n= "
+    case "/h1" => " =\n"
+    case "h2"  => "\n== "
+    case "/h2" => " ==\n"
+    case "h3"  => "\n=== "
+    case "/h3" => " ===\n"
+    case "h4" | "h5" | "h6" => "\n==== "
+    case "/h4" | "/h5" | "/h6" => " ====\n"
+    case "li" => "\n *  - "
+    case _ => ""
+  }
+
+  /** Javadoc tags that should be replaced by something useful, such as wiki
+    * syntax, or that should be dropped. */
+  private val JavadocTags =
+    new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
+
+  /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
+  private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+    case "code" => "`" + mtch.group(2) + "`"
+    case "docRoot"  => ""
+    case "inheritDoc" => ""
+    case "link"  => "`" + mtch.group(2) + "`"
+    case "linkplain" => "`" + mtch.group(2) + "`"
+    case "literal"  => mtch.group(2)
+    case "value" => "`" + mtch.group(2) + "`"
+    case _ => ""
+  }
+
+  /** Safe HTML tags that can be kept. */
+  private val SafeTags =
+    new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+
+  private val safeTagMarker = '\u000E'
+
+  /** A Scaladoc tag not linked to a symbol and not followed by text */
+  private val SingleTagRegex =
+    new Regex("""\s*@(\S+)\s*""")
+
+  /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
+  private val SimpleTagRegex =
+    new Regex("""\s*@(\S+)\s+(.*)""")
+
+  /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
+    * of the symbol, and the rest of the line. */
+  private val SymbolTagRegex =
+    new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
+
+  /** The start of a scaladoc code block */
+  private val CodeBlockStartRegex =
+    new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
+
+  /** The end of a scaladoc code block */
+  private val CodeBlockEndRegex =
+    new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
+
+  /** A key used for a tag map. The key is built from the name of the tag and
+    * from the linked symbol if the tag has one.
+    * Equality on tag keys is structural. */
+  private sealed abstract class TagKey {
+    def name: String
+  }
+
+  private final case class SimpleTagKey(name: String) extends TagKey
+  private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+
+  /** Parses a raw comment string into a `Comment` object.
+    * @param comment The expanded comment string (including start and end markers) to be parsed.
+    * @param src     The raw comment source string.
+    * @param pos     The position of the comment in source. */
+  protected def parseAtSymbol(comment: String, src: String, pos: Position, site: Symbol = NoSymbol): Comment = {
+    /** The cleaned raw comment as a list of lines. Cleaning removes comment
+      * start and end markers, line start markers  and unnecessary whitespace. */
+    def clean(comment: String): List[String] = {
+      def cleanLine(line: String): String = {
+        //replaceAll removes trailing whitespaces
+        line.replaceAll("""\s+$""", "") match {
+          case CleanCommentLine(ctl) => ctl
+          case tl => tl
+        }
+      }
+      val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
+      val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
+      val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
+      val markedTagComment =
+        SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+          java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
+        })
+      markedTagComment.lines.toList map (cleanLine(_))
+    }
+
+    /** Parses a comment (in the form of a list of lines) to a `Comment`
+      * instance, recursively on lines. To do so, it splits the whole comment
+      * into main body and tag bodies, then runs the `WikiParser` on each body
+      * before creating the comment instance.
+      *
+      * @param docBody     The body of the comment parsed until now.
+      * @param tags        All tags parsed until now.
+      * @param lastTagKey  The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
+      *                    are part of the previous tag or, if none exists, of the body.
+      * @param remaining   The lines that must still recursively be parsed.
+      * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
+    def parse0 (
+      docBody: StringBuilder,
+      tags: Map[TagKey, List[String]],
+      lastTagKey: Option[TagKey],
+      remaining: List[String],
+      inCodeBlock: Boolean
+    ): Comment = remaining match {
+
+      case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
+        if (!before.trim.isEmpty && !after.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false)
+        else if (!before.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false)
+        else if (!after.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true)
+        else lastTagKey match {
+          case Some(key) =>
+            val value =
+              ((tags get key): @unchecked) match {
+                case Some(b :: bs) => (b + endOfLine + marker) :: bs
+                case None => oops("lastTagKey set when no tag exists for key")
+              }
+            parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true)
+          case None =>
+            parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
+        }
+
+      case CodeBlockEndRegex(before, marker, after) :: ls =>
+        if (!before.trim.isEmpty && !after.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
+        if (!before.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true)
+        else if (!after.trim.isEmpty)
+          parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false)
+        else lastTagKey match {
+          case Some(key) =>
+            val value =
+              ((tags get key): @unchecked) match {
+                case Some(b :: bs) => (b + endOfLine + marker) :: bs
+                case None => oops("lastTagKey set when no tag exists for key")
+              }
+            parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false)
+          case None =>
+            parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
+        }
+
+      case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
+        val key = SymbolTagKey(name, sym)
+        val value = body :: tags.getOrElse(key, Nil)
+        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+      case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
+        val key = SimpleTagKey(name)
+        val value = body :: tags.getOrElse(key, Nil)
+        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+      case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
+        val key = SimpleTagKey(name)
+        val value = "" :: tags.getOrElse(key, Nil)
+        parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+      case line :: ls if (lastTagKey.isDefined) =>
+        val key = lastTagKey.get
+        val value =
+          ((tags get key): @unchecked) match {
+            case Some(b :: bs) => (b + endOfLine + line) :: bs
+            case None => oops("lastTagKey set when no tag exists for key")
+          }
+        parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
+
+      case line :: ls =>
+        if (docBody.length > 0) docBody append endOfLine
+        docBody append line
+        parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
+
+      case Nil =>
+        // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
+        val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
+        val contentDiagramTag = SimpleTagKey("contentDiagram")
+
+        val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
+          case Some(list) => list
+          case None => List.empty
+        }
+
+        val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
+          case Some(list) => list
+          case None => List.empty
+        }
+
+        val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
+        val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
+
+        val bodyTags: mutable.Map[TagKey, List[Body]] =
+          mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, site))} toSeq: _*)
+
+        def oneTag(key: SimpleTagKey): Option[Body] =
+          ((bodyTags remove key): @unchecked) match {
+            case Some(r :: rs) =>
+              if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
+              Some(r)
+            case None => None
+          }
+
+        def allTags(key: SimpleTagKey): List[Body] =
+          (bodyTags remove key) getOrElse Nil
+
+        def allSymsOneTag(key: TagKey): Map[String, Body] = {
+          val keys: Seq[SymbolTagKey] =
+            bodyTags.keys.toSeq flatMap {
+              case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+              case stk: SimpleTagKey if (stk.name == key.name) =>
+                reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
+                None
+              case _ => None
+            }
+          val pairs: Seq[(String, Body)] =
+            for (key <- keys) yield {
+              val bs = (bodyTags remove key).get
+              if (bs.length > 1)
+                reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
+              (key.symbol, bs.head)
+            }
+          Map.empty[String, Body] ++ pairs
+        }
+
+        val com = createComment (
+          body0           = Some(parseWikiAtSymbol(docBody.toString, pos, site)),
+          authors0        = allTags(SimpleTagKey("author")),
+          see0            = allTags(SimpleTagKey("see")),
+          result0         = oneTag(SimpleTagKey("return")),
+          throws0         = allSymsOneTag(SimpleTagKey("throws")),
+          valueParams0    = allSymsOneTag(SimpleTagKey("param")),
+          typeParams0     = allSymsOneTag(SimpleTagKey("tparam")),
+          version0        = oneTag(SimpleTagKey("version")),
+          since0          = oneTag(SimpleTagKey("since")),
+          todo0           = allTags(SimpleTagKey("todo")),
+          deprecated0     = oneTag(SimpleTagKey("deprecated")),
+          note0           = allTags(SimpleTagKey("note")),
+          example0        = allTags(SimpleTagKey("example")),
+          constructor0    = oneTag(SimpleTagKey("constructor")),
+          source0         = Some(clean(src).mkString("\n")),
+          inheritDiagram0 = inheritDiagramText,
+          contentDiagram0 = contentDiagramText,
+          group0          = oneTag(SimpleTagKey("group")),
+          groupDesc0      = allSymsOneTag(SimpleTagKey("groupdesc")),
+          groupNames0     = allSymsOneTag(SimpleTagKey("groupname")),
+          groupPrio0      = allSymsOneTag(SimpleTagKey("groupprio"))
+        )
+
+        for ((key, _) <- bodyTags)
+          reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
+
+        com
+
+    }
+
+    parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false)
+
+  }
+
+  /** Parses a string containing wiki syntax into a `Comment` object.
+    * Note that the string is assumed to be clean:
+    *  - Removed Scaladoc start and end markers.
+    *  - Removed start-of-line star and one whitespace afterwards (if present).
+    *  - Removed all end-of-line whitespace.
+    *  - Only `endOfLine` is used to mark line endings. */
+  def parseWikiAtSymbol(string: String, pos: Position, site: Symbol): Body = new WikiParser(string, pos, site).document()
+
+  /** TODO
+    *
+    * @author Ingo Maier
+    * @author Manohar Jonnalagedda
+    * @author Gilles Dubochet */
+  protected final class WikiParser(val buffer: String, pos: Position, site: Symbol) extends CharReader(buffer) { wiki =>
+    var summaryParsed = false
+
+    def document(): Body = {
+      val blocks = new mutable.ListBuffer[Block]
+      while (char != endOfText)
+        blocks += block()
+      Body(blocks.toList)
+    }
+
+    /* BLOCKS */
+
+    /** {{{ block ::= code | title | hrule | para }}} */
+    def block(): Block = {
+      if (checkSkipInitWhitespace("{{{"))
+        code()
+      else if (checkSkipInitWhitespace('='))
+        title()
+      else if (checkSkipInitWhitespace("----"))
+        hrule()
+      else if (checkList)
+        listBlock
+      else {
+        para()
+      }
+    }
+
+    /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+      * Characters used to build lists and their constructors */
+    protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
+      "- "  -> ( UnorderedList(_) ),
+      "1. " -> ( OrderedList(_,"decimal") ),
+      "I. " -> ( OrderedList(_,"upperRoman") ),
+      "i. " -> ( OrderedList(_,"lowerRoman") ),
+      "A. " -> ( OrderedList(_,"upperAlpha") ),
+      "a. " -> ( OrderedList(_,"lowerAlpha") )
+    )
+
+    /** Checks if the current line is formed with more than one space and one the listStyles */
+    def checkList =
+      (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
+
+    /** {{{
+      * nListBlock ::= nLine { mListBlock }
+      *      nLine ::= nSpc listStyle para '\n'
+      * }}}
+      * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
+    def listBlock: Block = {
+
+      /** Consumes one list item block and returns it, or None if the block is
+        * not a list or a different list. */
+      def listLine(indent: Int, style: String): Option[Block] =
+        if (countWhitespace > indent && checkList)
+          Some(listBlock)
+        else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
+          None
+        else {
+          jumpWhitespace()
+          jump(style)
+          val p = Paragraph(inline(isInlineEnd = false))
+          blockEnded("end of list line ")
+          Some(p)
+        }
+
+      /** Consumes all list item blocks (possibly with nested lists) of the
+        * same list and returns the list block. */
+      def listLevel(indent: Int, style: String): Block = {
+        val lines = mutable.ListBuffer.empty[Block]
+        var line: Option[Block] = listLine(indent, style)
+        while (line.isDefined) {
+          lines += line.get
+          line = listLine(indent, style)
+        }
+        val constructor = listStyles(style)
+        constructor(lines)
+      }
+
+      val indent = countWhitespace
+      val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
+      listLevel(indent, style)
+    }
+
+    def code(): Block = {
+      jumpWhitespace()
+      jump("{{{")
+      val str = readUntil("}}}")
+      if (char == endOfText)
+        reportError(pos, "unclosed code block")
+      else
+        jump("}}}")
+      blockEnded("code block")
+      Code(normalizeIndentation(str))
+    }
+
+    /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
+    def title(): Block = {
+      jumpWhitespace()
+      val inLevel = repeatJump('=')
+      val text = inline(check("=" * inLevel))
+      val outLevel = repeatJump('=', inLevel)
+      if (inLevel != outLevel)
+        reportError(pos, "unbalanced or unclosed heading")
+      blockEnded("heading")
+      Title(text, inLevel)
+    }
+
+    /** {{{ hrule ::= "----" { '-' } '\n' }}} */
+    def hrule(): Block = {
+      jumpWhitespace()
+      repeatJump('-')
+      blockEnded("horizontal rule")
+      HorizontalRule()
+    }
+
+    /** {{{ para ::= inline '\n' }}} */
+    def para(): Block = {
+      val p =
+        if (summaryParsed)
+          Paragraph(inline(isInlineEnd = false))
+        else {
+          val s = summary()
+          val r =
+            if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false))
+          summaryParsed = true
+          Paragraph(Chain(r))
+        }
+      while (char == endOfLine && char != endOfText)
+        nextChar()
+      p
+    }
+
+    /* INLINES */
+
+    val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
+    val CLOSE_TAG = "^</([A-Za-z]+)>$".r
+    private def readHTMLFrom(begin: HtmlTag): String = {
+      val list = mutable.ListBuffer.empty[String]
+      val stack = mutable.ListBuffer.empty[String]
+
+      begin.close match {
+        case Some(HtmlTag(CLOSE_TAG(s))) =>
+          stack += s
+        case _ =>
+          return ""
+      }
+
+      do {
+        val str = readUntil { char == safeTagMarker || char == endOfText }
+        nextChar()
+
+        list += str
+
+        str match {
+          case OPEN_TAG(s, _, standalone) => {
+            if (standalone != "/") {
+              stack += s
+            }
+          }
+          case CLOSE_TAG(s) => {
+            if (s == stack.last) {
+              stack.remove(stack.length-1)
+            }
+          }
+          case _ => ;
+        }
+      } while (stack.length > 0 && char != endOfText)
+
+      list mkString ""
+    }
+
+    def inline(isInlineEnd: => Boolean): Inline = {
+
+      def inline0(): Inline = {
+        if (char == safeTagMarker) {
+          val tag = htmlTag()
+          HtmlTag(tag.data + readHTMLFrom(tag))
+        }
+        else if (check("'''")) bold()
+        else if (check("''")) italic()
+        else if (check("`"))  monospace()
+        else if (check("__")) underline()
+        else if (check("^"))  superscript()
+        else if (check(",,")) subscript()
+        else if (check("[[")) link()
+        else {
+          val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
+          Text(str)
+        }
+      }
+
+      val inlines: List[Inline] = {
+        val iss = mutable.ListBuffer.empty[Inline]
+        iss += inline0()
+        while (!isInlineEnd && !checkParaEnded) {
+          val skipEndOfLine = if (char == endOfLine) {
+            nextChar()
+            true
+          } else {
+            false
+          }
+
+          val current = inline0()
+          (iss.last, current) match {
+            case (Text(t1), Text(t2)) if skipEndOfLine =>
+              iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
+            case (i1, i2) if skipEndOfLine =>
+              iss ++= List(Text(endOfLine.toString), i2)
+            case _ => iss += current
+          }
+        }
+        iss.toList
+      }
+
+      inlines match {
+        case Nil => Text("")
+        case i :: Nil => i
+        case is => Chain(is)
+      }
+
+    }
+
+    def htmlTag(): HtmlTag = {
+      jump(safeTagMarker)
+      val read = readUntil(safeTagMarker)
+      if (char != endOfText) jump(safeTagMarker)
+      HtmlTag(read)
+    }
+
+    def bold(): Inline = {
+      jump("'''")
+      val i = inline(check("'''"))
+      jump("'''")
+      Bold(i)
+    }
+
+    def italic(): Inline = {
+      jump("''")
+      val i = inline(check("''"))
+      jump("''")
+      Italic(i)
+    }
+
+    def monospace(): Inline = {
+      jump("`")
+      val i = inline(check("`"))
+      jump("`")
+      Monospace(i)
+    }
+
+    def underline(): Inline = {
+      jump("__")
+      val i = inline(check("__"))
+      jump("__")
+      Underline(i)
+    }
+
+    def superscript(): Inline = {
+      jump("^")
+      val i = inline(check("^"))
+      if (jump("^")) {
+        Superscript(i)
+      } else {
+        Chain(Seq(Text("^"), i))
+      }
+    }
+
+    def subscript(): Inline = {
+      jump(",,")
+      val i = inline(check(",,"))
+      jump(",,")
+      Subscript(i)
+    }
+
+    def summary(): Inline = {
+      val i = inline(check("."))
+      Summary(
+        if (jump("."))
+          Chain(List(i, Text(".")))
+        else
+          i
+      )
+    }
+
+    def link(): Inline = {
+      val SchemeUri = """([a-z]+:.*)""".r
+      jump("[[")
+      val parens = 2 + repeatJump('[')
+      val stop  = "]" * parens
+      //println("link with " + parens + " matching parens")
+      val target = readUntil { check(stop) || check(" ") }
+      val title =
+        if (!check(stop)) Some({
+          jump(" ")
+          inline(check(stop))
+        })
+        else None
+      jump(stop)
+
+      (target, title) match {
+        case (SchemeUri(uri), optTitle) =>
+          Link(uri, optTitle getOrElse Text(uri))
+        case (qualName, optTitle) =>
+          makeEntityLink(optTitle getOrElse Text(target), pos, target, site)
+      }
+    }
+
+    /* UTILITY */
+
+    /** {{{ eol ::= { whitespace } '\n' }}} */
+    def blockEnded(blockType: String): Unit = {
+      if (char != endOfLine && char != endOfText) {
+        reportError(pos, "no additional content on same line after " + blockType)
+        jumpUntil(endOfLine)
+      }
+      while (char == endOfLine)
+        nextChar()
+    }
+
+    /**
+     *  Eliminates the (common) leading spaces in all lines, based on the first line
+     *  For indented pieces of code, it reduces the indent to the least whitespace prefix:
+     *    {{{
+     *       indented example
+     *       another indented line
+     *       if (condition)
+     *         then do something;
+     *       ^ this is the least whitespace prefix
+     *    }}}
+     */
+    def normalizeIndentation(_code: String): String = {
+
+      val code = _code.trim
+      var maxSkip = Integer.MAX_VALUE
+      var crtSkip = 0
+      var wsArea = true
+      var index = 0
+      var firstLine = true
+      var emptyLine = true
+
+      while (index < code.length) {
+        code(index) match {
+          case ' ' =>
+            if (wsArea)
+              crtSkip += 1
+          case c =>
+            wsArea = (c == '\n')
+            maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+            crtSkip = if (c == '\n') 0 else crtSkip
+            firstLine = if (c == '\n') false else firstLine
+            emptyLine = if (c == '\n') true else false
+        }
+        index += 1
+      }
+
+      if (maxSkip == 0)
+        code
+      else {
+        index = 0
+        val builder = new StringBuilder
+        while (index < code.length) {
+          builder.append(code(index))
+          if (code(index) == '\n') {
+            // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
+            // over-consume them)
+            index += 1
+            val limit = index + maxSkip
+            while ((index < code.length) && (code(index) == ' ') && index < limit)
+              index += 1
+          }
+          else
+            index += 1
+        }
+        builder.toString
+      }
+    }
+
+    def checkParaEnded(): Boolean = {
+      (char == endOfText) ||
+      ((char == endOfLine) && {
+        val poff = offset
+        nextChar() // read EOL
+        val ok = {
+          checkSkipInitWhitespace(endOfLine) ||
+          checkSkipInitWhitespace('=') ||
+          checkSkipInitWhitespace("{{{") ||
+          checkList ||
+          checkSkipInitWhitespace('\u003D')
+        }
+        offset = poff
+        ok
+      })
+    }
+
+    def reportError(pos: Position, message: String) {
+      reporter.warning(pos, message)
+    }
+  }
+
+  protected sealed class CharReader(buffer: String) { reader =>
+
+    var offset: Int = 0
+    def char: Char =
+      if (offset >= buffer.length) endOfText else buffer charAt offset
+
+    final def nextChar() {
+      offset += 1
+    }
+
+    final def check(chars: String): Boolean = {
+      val poff = offset
+      val ok = jump(chars)
+      offset = poff
+      ok
+    }
+
+    def checkSkipInitWhitespace(c: Char): Boolean = {
+      val poff = offset
+      jumpWhitespace()
+      val ok = jump(c)
+      offset = poff
+      ok
+    }
+
+    def checkSkipInitWhitespace(chars: String): Boolean = {
+      val poff = offset
+      jumpWhitespace()
+      val (ok0, chars0) =
+        if (chars.charAt(0) == ' ')
+          (offset > poff, chars substring 1)
+        else
+          (true, chars)
+      val ok = ok0 && jump(chars0)
+      offset = poff
+      ok
+    }
+
+    def countWhitespace: Int = {
+      var count = 0
+      val poff = offset
+      while (isWhitespace(char) && char != endOfText) {
+        nextChar()
+        count += 1
+      }
+      offset = poff
+      count
+    }
+
+    /* JUMPERS */
+
+    /** jumps a character and consumes it
+      * @return true only if the correct character has been jumped */
+    final def jump(ch: Char): Boolean = {
+      if (char == ch) {
+        nextChar()
+        true
+      }
+      else false
+    }
+
+    /** jumps all the characters in chars, consuming them in the process.
+      * @return true only if the correct characters have been jumped */
+    final def jump(chars: String): Boolean = {
+      var index = 0
+      while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
+        nextChar()
+        index += 1
+      }
+      index == chars.length
+    }
+
+    final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
+      var count = 0
+      while (jump(c) && count < max)
+        count += 1
+      count
+    }
+
+    final def jumpUntil(ch: Char): Int = {
+      var count = 0
+      while (char != ch && char != endOfText) {
+        nextChar()
+        count += 1
+      }
+      count
+    }
+
+    final def jumpUntil(pred: => Boolean): Int = {
+      var count = 0
+      while (!pred && char != endOfText) {
+        nextChar()
+        count += 1
+      }
+      count
+    }
+
+    def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+
+    /* READERS */
+
+    final def readUntil(c: Char): String = {
+      withRead {
+        while (char != c && char != endOfText) {
+          nextChar()
+        }
+      }
+    }
+
+    final def readUntil(chars: String): String = {
+      assert(chars.length > 0)
+      withRead {
+        val c = chars.charAt(0)
+        while (!check(chars) && char != endOfText) {
+          nextChar()
+          while (char != c && char != endOfText)
+            nextChar()
+        }
+      }
+    }
+
+    final def readUntil(pred: => Boolean): String = {
+      withRead {
+        while (char != endOfText && !pred) {
+          nextChar()
+        }
+      }
+    }
+
+    private def withRead(read: => Unit): String = {
+      val start = offset
+      read
+      buffer.substring(start, offset)
+    }
+
+    /* CHARS CLASSES */
+
+    def isWhitespace(c: Char) = c == ' ' || c == '\t'
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
new file mode 100755
index 0000000..3d80f9d
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
@@ -0,0 +1,13 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc
+package doc
+package base
+
+sealed trait LinkTo
+final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo
+final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo
+final case class LinkToExternal(name: String, url: String) extends LinkTo
+final case class Tooltip(name: String) extends LinkTo
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
new file mode 100755
index 0000000..cc217d2
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -0,0 +1,202 @@
+package scala.tools.nsc
+package doc
+package base
+
+import comment._
+
+/** This trait extracts all required information for documentation from compilation units.
+ *  The base trait has been extracted to allow getting light-weight documentation
+  * for a particular symbol in the IDE.*/
+trait MemberLookupBase {
+
+  val global: Global
+  import global._
+
+  def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
+  def chooseLink(links: List[LinkTo]): LinkTo
+  def toString(link: LinkTo): String
+  def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
+  def warnNoLink: Boolean
+
+  import global._
+  import rootMirror.{RootPackage, EmptyPackage}
+
+  private def isRoot(s: Symbol) = (s eq NoSymbol) || s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
+
+  def makeEntityLink(title: Inline, pos: Position, query: String, site: Symbol) =
+    new EntityLink(title) { lazy val link = memberLookup(pos, query, site) }
+
+  private var showExplanation = true
+  private def explanation: String =
+    if (showExplanation) {
+      showExplanation = false
+      """
+      |Quick crash course on using Scaladoc links
+      |==========================================
+      |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use:
+      | - [[scala.collection.immutable.List!.apply class List's apply method]] and
+      | - [[scala.collection.immutable.List$.apply object List's apply method]]
+      |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *:
+      | - [[[scala.collection.immutable.List$.fill[A](Int)(⇒A):List[A]* Fill with a single parameter]]]
+      | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(⇒A):List[List[A]]* Fill with a two parameters]]]
+      |Notes:
+      | - you can use any number of matching square brackets to avoid interference with the signature
+      | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!)
+      | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
+    } else ""
+
+  def memberLookup(pos: Position, query: String, site: Symbol): LinkTo = {
+    val members = breakMembers(query)
+
+    // (1) First look in the root package, as most of the links are qualified
+    val fromRoot = lookupInRootPackage(pos, members)
+
+    // (2) Or recursively go into each containing template.
+    val fromParents = Stream.iterate(site)(_.owner) takeWhile (!isRoot(_)) map (lookupInTemplate(pos, members, _))
+
+    val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
+
+    val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match {
+      case Nil =>
+        // (3) Look at external links
+        syms.flatMap { case (sym, owner) =>
+          // reconstruct the original link
+          def linkName(sym: Symbol) = {
+            def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
+            val packageSuffix = if (sym.isPackage) ".package" else ""
+
+            sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
+          }
+
+          if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+            findExternalLink(sym, linkName(sym))
+          else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+            findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
+          else
+            None
+        }
+      case links => links
+    }
+    links match {
+      case Nil =>
+        if (warnNoLink)
+          reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
+        // (4) if we still haven't found anything, create a tooltip
+        Tooltip(query)
+      case List(l) => l
+      case links =>
+        val chosen = chooseLink(links)
+        def linkToString(link: LinkTo) = {
+          val chosenInfo =
+            if (link == chosen) " [chosen]" else ""
+          toString(link) + chosenInfo + "\n"
+        }
+        if (warnNoLink) {
+          val allLinks = links.map(linkToString).mkString
+          reporter.warning(pos,
+            s"""The link target \"$query\" is ambiguous. Several members fit the target:
+            |$allLinks
+            |$explanation""".stripMargin)
+        }
+        chosen
+    }
+  }
+
+  private sealed trait SearchStrategy
+  private case object BothTypeAndTerm extends SearchStrategy
+  private case object OnlyType extends SearchStrategy
+  private case object OnlyTerm extends SearchStrategy
+
+  private def lookupInRootPackage(pos: Position, members: List[String]) =
+    lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage)
+
+  private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = {
+    // Maintaining compatibility with previous links is a bit tricky here:
+    // we have a preference for term names for all terms except for the last, where we prefer a class:
+    // How to do this:
+    //  - at each step we do a DFS search with the prefered strategy
+    //  - if the search doesn't return any members, we backtrack on the last decision
+    //     * we look for terms with the last member's name
+    //     * we look for types with the same name, all the way up
+    val result = members match {
+      case Nil => Nil
+      case mbrName::Nil =>
+        var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container))
+        if (syms.isEmpty)
+          syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container))
+        syms
+
+      case tplName::rest =>
+        def completeSearch(syms: List[Symbol]) =
+          syms flatMap (lookupInTemplate(pos, rest, _))
+
+        completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match {
+          case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType))
+          case syms => syms
+      }
+    }
+    //println("lookupInTemplate(" + members + ", " + container + ") => " + result)
+    result
+  }
+
+  private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = {
+    val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*")
+    def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name)
+
+    // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves
+    // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info
+    // and removing NoType classes
+    def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) }
+
+    def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives
+    def termSyms = cleanupBogusClasses(syms(newTermName(name)))
+    def typeSyms = cleanupBogusClasses(syms(newTypeName(name)))
+
+    val result = if (member.endsWith("$"))
+      termSyms
+    else if (member.endsWith("!"))
+      typeSyms
+    else if (member.endsWith("*"))
+      cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch
+    else
+      strategy match {
+        case BothTypeAndTerm => termSyms ::: typeSyms
+        case OnlyType => typeSyms
+        case OnlyTerm => termSyms
+      }
+
+    //println("lookupInTemplate(" + member + ", " + container + ") => " + result)
+    result
+  }
+
+  private def breakMembers(query: String): List[String] = {
+    // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex
+    // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\."))
+    // The same code, just faster:
+    var members = List[String]()
+    var index = 0
+    var last_index = 0
+    val length = query.length
+    while (index < length) {
+      if ((query.charAt(index) == '.' || query.charAt(index) == '#') &&
+          ((index == 0) || (query.charAt(index-1) != '\\'))) {
+
+        val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
+        // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
+        // elemnt in the list
+        if ((member != "") || (!members.isEmpty))
+          members ::= member
+        last_index = index + 1
+      }
+      index += 1
+    }
+    if (last_index < length)
+      members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".")
+    members.reverse
+  }
+
+  def externalSignature(sym: Symbol) = {
+    sym.info // force it, otherwise we see lazy types
+    (sym.nameString + sym.signatureString).replaceAll("\\s", "")
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
new file mode 100755
index 0000000..ac5fec8
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
@@ -0,0 +1,93 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+package comment
+
+import scala.collection._
+
+/** A body of text. A comment has a single body, which is composed of
+  * at least one block. Inside every body is exactly one summary (see
+  * [[scala.tools.nsc.doc.model.comment.Summary]]). */
+final case class Body(blocks: Seq[Block]) {
+
+  /** The summary text of the comment body. */
+  lazy val summary: Option[Inline] = {
+    def summaryInBlock(block: Block): Seq[Inline] = block match {
+      case Title(text, _)        => summaryInInline(text)
+      case Paragraph(text)       => summaryInInline(text)
+      case UnorderedList(items)  => items flatMap summaryInBlock
+      case OrderedList(items, _) => items flatMap summaryInBlock
+      case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
+      case _                     => Nil
+    }
+    def summaryInInline(text: Inline): Seq[Inline] = text match {
+      case Summary(text)     => List(text)
+      case Chain(items)      => items flatMap summaryInInline
+      case Italic(text)      => summaryInInline(text)
+      case Bold(text)        => summaryInInline(text)
+      case Underline(text)   => summaryInInline(text)
+      case Superscript(text) => summaryInInline(text)
+      case Subscript(text)   => summaryInInline(text)
+      case Link(_, title)    => summaryInInline(title)
+      case _                 => Nil
+    }
+    (blocks flatMap { summaryInBlock(_) }).toList match {
+      case Nil => None
+      case inline :: Nil => Some(inline)
+      case inlines => Some(Chain(inlines))
+    }
+  }
+}
+
+/** A block-level element of text, such as a paragraph or code block. */
+sealed abstract class Block
+
+final case class Title(text: Inline, level: Int) extends Block
+final case class Paragraph(text: Inline) extends Block
+final case class Code(data: String) extends Block
+final case class UnorderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
+final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
+final case class HorizontalRule() extends Block
+
+/** An section of text inside a block, possibly with formatting. */
+sealed abstract class Inline
+
+final case class Chain(items: Seq[Inline]) extends Inline
+final case class Italic(text: Inline) extends Inline
+final case class Bold(text: Inline) extends Inline
+final case class Underline(text: Inline) extends Inline
+final case class Superscript(text: Inline) extends Inline
+final case class Subscript(text: Inline) extends Inline
+final case class Link(target: String, title: Inline) extends Inline
+final case class Monospace(text: Inline) extends Inline
+final case class Text(text: String) extends Inline
+abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
+object EntityLink {
+  def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
+  def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
+}
+final case class HtmlTag(data: String) extends Inline {
+  private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+  private val (isEnd, tagName) = data match {
+    case Pattern(s1, s2) =>
+      (! s1.isEmpty, Some(s2.toLowerCase))
+    case _ =>
+      (false, None)
+  }
+
+  def canClose(open: HtmlTag) = {
+    isEnd && tagName == open.tagName
+  }
+
+  private val TagsNotToClose = Set("br", "img")
+  def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
+}
+
+/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
+final case class Summary(text: Inline) extends Inline
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
new file mode 100644
index 0000000..a3d05ae
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
@@ -0,0 +1,131 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+package comment
+
+import scala.collection._
+
+/** A Scaladoc comment and all its tags.
+  *
+  * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
+  *
+  * @author Manohar Jonnalagedda
+  * @author Gilles Dubochet */
+abstract class Comment {
+
+  /** The main body of the comment that describes what the entity does and is.  */
+  def body: Body
+
+  private def closeHtmlTags(inline: Inline) = {
+    val stack = mutable.ListBuffer.empty[HtmlTag]
+    def scan(i: Inline) {
+      i match {
+        case Chain(list) =>
+          list foreach scan
+        case tag: HtmlTag => {
+          if (stack.length > 0 && tag.canClose(stack.last)) {
+            stack.remove(stack.length-1)
+          } else {
+            tag.close match {
+              case Some(t) =>
+                stack += t
+              case None =>
+                ;
+            }
+          }
+        }
+        case _ =>
+          ;
+      }
+    }
+    scan(inline)
+    Chain(List(inline) ++ stack.reverse)
+  }
+
+  /** A shorter version of the body. Usually, this is the first sentence of the body. */
+  def short: Inline = {
+    body.summary match {
+      case Some(s) =>
+        closeHtmlTags(s)
+      case _ =>
+        Text("")
+    }
+  }
+
+  /** A list of authors. The empty list is used when no author is defined. */
+  def authors: List[Body]
+
+  /** A list of other resources to see, including links to other entities or
+    * to external documentation. The empty list is used when no other resource
+    * is mentionned. */
+  def see: List[Body]
+
+  /** A description of the result of the entity. Typically, this provides additional
+    * information on the domain of the result, contractual post-conditions, etc. */
+  def result: Option[Body]
+
+  /** A map of exceptions that the entity can throw when accessed, and a
+    * description of what they mean. */
+  def throws: Map[String, Body]
+
+  /** A map of value parameters, and a description of what they are. Typically,
+    * this provides additional information on the domain of the parameters,
+    * contractual pre-conditions, etc. */
+  def valueParams: Map[String, Body]
+
+  /** A map of type parameters, and a description of what they are. Typically,
+    * this provides additional information on the domain of the parameters. */
+  def typeParams: Map[String, Body]
+
+  /** The version number of the entity. There is no formatting or further
+    * meaning attached to this value. */
+  def version: Option[Body]
+
+  /** A version number of a containing entity where this member-entity was introduced. */
+  def since: Option[Body]
+
+  /** An annotation as to expected changes on this entity. */
+  def todo: List[Body]
+
+  /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute
+    * is prefereable to using this Scaladoc tag. */
+  def deprecated: Option[Body]
+
+  /** An additional note concerning the contract of the entity. */
+  def note: List[Body]
+
+  /** A usage example related to the entity. */
+  def example: List[Body]
+
+  /** A description for the primary constructor */
+  def constructor: Option[Body]
+
+  /** A set of diagram directives for the inheritance diagram */
+  def inheritDiagram: List[String]
+
+  /** A set of diagram directives for the content diagram */
+  def contentDiagram: List[String]
+
+  /** The group this member is part of */
+  def group: Option[String]
+
+  /** Member group descriptions */
+  def groupDesc: Map[String,Body]
+
+  /** Member group names (overriding the short tag) */
+  def groupNames: Map[String,String]
+
+  /** Member group priorities */
+  def groupPrio: Map[String,Int]
+
+  override def toString =
+    body.toString + "\n" +
+    (authors map ("@author " + _.toString)).mkString("\n") +
+    (result map ("@return " + _.toString)).mkString("\n") +
+    (version map ("@version " + _.toString)).mkString
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
new file mode 100644
index 0000000..42b56aa
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
@@ -0,0 +1,30 @@
+package scala.tools.nsc.doc
+package doclet
+
+import scala.collection._
+
+/** Custom Scaladoc generators must implement the `Generator` class. A custom generator can be selected in Scaladoc
+  * using the `-doc-generator` command line option.
+  * The `Generator` class does not provide data about the documented code. A number of data provider traits can be used
+  * to configure what data is actually available to the generator:
+  *  - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented
+  *    program.
+  *  - An `Indexer` provides precalculated indexing information about a universe.
+  * To implement this class only requires defining method `generateImpl`. */
+abstract class Generator {
+
+  /** A series of tests that must be true before generation can be done. This is used by data provider traits to
+    * confirm that they have been correctly initialised before allowing generation to proceed. */
+  protected val checks: mutable.Set[()=>Boolean] =
+    mutable.Set.empty[()=>Boolean]
+
+  /** Outputs documentation (as a side effect). */
+  def generate(): Unit = {
+    assert(checks forall { check => check() })
+    generateImpl()
+  }
+
+  /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */
+  protected def generateImpl(): Unit
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
rename to src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
rename to src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
new file mode 100644
index 0000000..21c5f6b
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
@@ -0,0 +1,19 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc.doc
+package html
+
+import doclet._
+
+/** The default doclet used by the scaladoc command line tool
+  * when no user-provided doclet is provided. */
+class Doclet extends Generator with Universer with Indexer {
+
+  def generateImpl() {
+    new html.HtmlFactory(universe, index).generate()
+  }
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
new file mode 100644
index 0000000..d721a96
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -0,0 +1,152 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+
+import model._
+import java.io.{ File => JFile }
+import io.{ Streamable, Directory }
+import scala.collection._
+import page.diagram._
+
+import html.page.diagram.DiagramGenerator
+
+/** A class that can generate Scaladoc sites to some fixed root folder.
+  * @author David Bernard
+  * @author Gilles Dubochet */
+class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
+
+  /** The character encoding to be used for generated Scaladoc sites.
+    * This value is currently always UTF-8. */
+  def encoding: String = "UTF-8"
+
+  def siteRoot: JFile = new JFile(universe.settings.outdir.value)
+
+  def libResources = List(
+    "index.js",
+    "jquery-ui.js",
+    "jquery.js",
+    "jquery.layout.js",
+    "scheduler.js",
+    "diagrams.js",
+    "template.js",
+    "tools.tooltip.js",
+    "modernizr.custom.js",
+
+    "index.css",
+    "ref-index.css",
+    "template.css",
+    "diagrams.css",
+
+    "class.png",
+    "class_big.png",
+    "class_diagram.png",
+    "object.png",
+    "object_big.png",
+    "object_diagram.png",
+    "package.png",
+    "package_big.png",
+    "trait.png",
+    "trait_big.png",
+    "trait_diagram.png",
+    "type.png",
+    "type_big.png",
+    "type_diagram.png",
+
+    "class_to_object_big.png",
+    "object_to_class_big.png",
+    "trait_to_object_big.png",
+    "object_to_trait_big.png",
+    "type_to_object_big.png",
+    "object_to_type_big.png",
+
+    "arrow-down.png",
+    "arrow-right.png",
+    "filter_box_left.png",
+    "filter_box_left2.gif",
+    "filter_box_right.png",
+    "filterbg.gif",
+    "filterboxbarbg.gif",
+    "filterboxbg.gif",
+
+    "constructorsbg.gif",
+    "defbg-blue.gif",
+    "defbg-green.gif",
+    "filterboxbarbg.png",
+    "fullcommenttopbg.gif",
+    "ownderbg2.gif",
+    "ownerbg.gif",
+    "ownerbg2.gif",
+    "packagesbg.gif",
+    "signaturebg.gif",
+    "signaturebg2.gif",
+    "typebg.gif",
+    "conversionbg.gif",
+    "valuemembersbg.gif",
+
+    "navigation-li-a.png",
+    "navigation-li.png",
+    "remove.png",
+    "selected-right.png",
+    "selected.png",
+    "selected2-right.png",
+    "selected2.png",
+    "selected-right-implicits.png",
+    "selected-implicits.png",
+    "unselected.png"
+  )
+
+  /** Generates the Scaladoc site for a model into the site root.
+    * A scaladoc site is a set of HTML and related files
+    * that document a model extracted from a compiler run.
+    */
+  def generate() {
+
+    def copyResource(subPath: String) {
+      val bytes = new Streamable.Bytes {
+        val p = "/scala/tools/nsc/doc/html/resource/" + subPath
+        val inputStream = getClass.getResourceAsStream(p)
+        assert(inputStream != null, p)
+      }.toByteArray()
+      val dest = Directory(siteRoot) / subPath
+      dest.parent.createDirectory()
+      val out = dest.toFile.bufferedOutput()
+      try out.write(bytes, 0, bytes.length)
+      finally out.close()
+    }
+
+    DiagramGenerator.initialize(universe.settings)
+
+    libResources foreach (s => copyResource("lib/" + s))
+
+    new page.Index(universe, index) writeFor this
+    new page.IndexScript(universe, index) writeFor this
+
+    writeTemplates(_ writeFor this)
+
+    for (letter <- index.firstLetterIndex) {
+      new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
+    }
+
+    DiagramGenerator.cleanup()
+  }
+
+  def writeTemplates(writeForThis: HtmlPage => Unit) {
+    val written = mutable.HashSet.empty[DocTemplateEntity]
+    val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
+
+    def writeTemplate(tpl: DocTemplateEntity) {
+      if (!(written contains tpl)) {
+        writeForThis(new page.Template(universe, diagramGenerator, tpl))
+        written += tpl
+        tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
+      }
+    }
+
+    writeTemplate(universe.rootPackage)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
new file mode 100644
index 0000000..f6373e9
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -0,0 +1,222 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+
+import base._
+import base.comment._
+import model._
+
+import scala.xml.NodeSeq
+import scala.xml.dtd.{DocType, PublicID}
+import scala.collection._
+import java.io.Writer
+
+/** An html page that is part of a Scaladoc site.
+  * @author David Bernard
+  * @author Gilles Dubochet */
+abstract class HtmlPage extends Page { thisPage =>
+  /** The title of this page. */
+  protected def title: String
+
+  /** The page description */
+  protected def description: String =
+    // unless overwritten, will display the title in a spaced format, keeping - and .
+    title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
+
+  /** The page keywords */
+  protected def keywords: String =
+    // unless overwritten, same as description, minus the " - "
+    description.replaceAll(" - ", " ")
+
+  /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
+  protected def headers: NodeSeq
+
+  /** The body of this page. */
+  def body: NodeSeq
+
+  def writeFor(site: HtmlFactory) {
+    val doctype = DocType("html")
+    val html =
+      <html>
+        <head>
+          <title>{ title }</title>
+          <meta name="description" content={ description }/>
+          <meta name="keywords" content={ keywords }/>
+          <meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
+          { headers }
+        </head>
+        { body }
+      </html>
+
+    writeFile(site) { (w: Writer) =>
+      w.write(doctype.toString + "\n")
+      w.write(xml.Xhtml.toXhtml(html))
+    }
+
+    if (site.universe.settings.docRawOutput)
+      writeFile(site, ".raw") {
+        // we're only interested in the body, as this will go into the diff
+        _.write(body.text)
+      }
+  }
+
+  /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
+    * node sequence if it is not. */
+  def commentToHtml(comment: Option[Comment]): NodeSeq =
+    (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty
+
+  /** Transforms a comment into an styled HTML tree representing its body. */
+  def commentToHtml(comment: Comment): NodeSeq =
+    bodyToHtml(comment.body)
+
+  def bodyToHtml(body: Body): NodeSeq =
+    body.blocks flatMap (blockToHtml(_))
+
+  def blockToHtml(block: Block): NodeSeq = block match {
+    case Title(in, 1) => <h3>{ inlineToHtml(in) }</h3>
+    case Title(in, 2) => <h4>{ inlineToHtml(in) }</h4>
+    case Title(in, 3) => <h5>{ inlineToHtml(in) }</h5>
+    case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
+    case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
+    case Code(data) =>
+      <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
+    case UnorderedList(items) =>
+      <ul>{ listItemsToHtml(items) }</ul>
+    case OrderedList(items, listStyle) =>
+      <ol class={ listStyle }>{ listItemsToHtml(items) }</ol>
+    case DefinitionList(items) =>
+      <dl>{items map { case (t, d) => <dt>{ inlineToHtml(t) }</dt><dd>{ blockToHtml(d) }</dd> } }</dl>
+    case HorizontalRule() =>
+      <hr/>
+  }
+
+  def listItemsToHtml(items: Seq[Block]) =
+    items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) =>
+      item match {
+        case OrderedList(_, _) | UnorderedList(_) =>  // html requires sub ULs to be put into the last LI
+          xmlList.init ++ <li>{ xmlList.last.child ++ blockToHtml(item) }</li>
+        case Paragraph(inline) =>
+          xmlList :+ <li>{ inlineToHtml(inline) }</li>  // LIs are blocks, no need to use Ps
+        case block =>
+          xmlList :+ <li>{ blockToHtml(block) }</li>
+      }
+  }
+
+  def inlineToHtml(inl: Inline): NodeSeq = inl match {
+    case Chain(items) => items flatMap (inlineToHtml(_))
+    case Italic(in) => <i>{ inlineToHtml(in) }</i>
+    case Bold(in) => <b>{ inlineToHtml(in) }</b>
+    case Underline(in) => <u>{ inlineToHtml(in) }</u>
+    case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
+    case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
+    case Link(raw, title) => <a href={ raw } target="_blank">{ inlineToHtml(title) }</a>
+    case Monospace(in) => <code>{ inlineToHtml(in) }</code>
+    case Text(text) => scala.xml.Text(text)
+    case Summary(in) => inlineToHtml(in)
+    case HtmlTag(tag) => scala.xml.Unparsed(tag)
+    case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true)
+  }
+
+  def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
+    case LinkToTpl(dtpl: TemplateEntity) =>
+      if (hasLinks)
+        <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</a>
+      else
+        <span class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</span>
+    case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) =>
+      if (hasLinks)
+        <a href={ relativeLinkTo(inTpl) + "#" + mbr.signature } class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</a>
+      else
+        <span class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</span>
+    case Tooltip(tooltip) =>
+      <span class="extype" name={ tooltip }>{ inlineToHtml(text) }</span>
+    case LinkToExternal(name, url) =>
+      <a href={ url } class="extype" target="_top">{ inlineToHtml(text) }</a>
+    case _ =>
+      inlineToHtml(text)
+  }
+
+  def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
+    case Nil =>
+      NodeSeq.Empty
+    case List(tpe) =>
+      typeToHtml(tpe, hasLinks)
+    case tpe :: rest =>
+      typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
+  }
+
+  def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
+    val string = tpe.name
+    def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = {
+      if (starts.isEmpty && (inPos == string.length))
+        NodeSeq.Empty
+      else if (starts.isEmpty)
+        scala.xml.Text(string.slice(inPos, string.length))
+      else if (inPos == starts.head)
+        toLinksIn(inPos, starts)
+      else {
+        scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
+      }
+    }
+    def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
+      val (link, width) = tpe.refEntity(inPos)
+      val text = comment.Text(string.slice(inPos, inPos + width))
+      linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail)
+    }
+    if (hasLinks)
+      toLinksOut(0, tpe.refEntity.keySet.toList)
+    else
+      scala.xml.Text(string)
+  }
+
+  def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
+    case Nil         => NodeSeq.Empty
+    case tpe :: Nil  => typeToHtml(tpe, hasLinks)
+    case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
+  }
+
+  def hasPage(e: DocTemplateEntity) = {
+    e.isPackage || e.isTrait || e.isClass || e.isObject || e.isCaseClass
+  }
+
+  /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
+  def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
+    case dTpl: DocTemplateEntity =>
+      if (hasPage(dTpl)) {
+        <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
+      } else {
+        scala.xml.Text(if (name eq null) dTpl.name else name)
+      }
+    case ndTpl: NoDocTemplate =>
+      scala.xml.Text(if (name eq null) ndTpl.name else name)
+  }
+
+  /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
+  def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match {
+    case Nil         => NodeSeq.Empty
+    case tpl :: Nil  => templateToHtml(tpl)
+    case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
+  }
+
+  /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
+  def docEntityKindToBigImage(ety: DocTemplateEntity) =
+    if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
+    else if (ety.isTrait) "trait_big.png"
+    else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
+    else if (ety.isClass) "class_big.png"
+    else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
+    else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
+    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
+    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
+    else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
+    else if (ety.isObject) "object_big.png"
+    else if (ety.isPackage) "package_big.png"
+    else "class_big.png"  // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
new file mode 100644
index 0000000..93950fd
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -0,0 +1,103 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala
+package tools.nsc.doc.html
+
+import scala.tools.nsc.doc.model._
+import java.io.{FileOutputStream, File}
+import scala.reflect.NameTransformer
+import java.nio.channels.Channels
+import java.io.Writer
+
+abstract class Page {
+  thisPage =>
+
+  /** The path of this page, relative to the API site. `path.tail` is a list
+    * of folder names leading to this page (from closest package to
+    * one-above-root package), `path.head` is the file name of this page.
+    * Note that `path` has a length of at least one. */
+  def path: List[String]
+
+  def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
+
+  def createFileOutputStream(site: HtmlFactory, suffix: String = "") = {
+    val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix)
+    val folder = file.getParentFile
+    if (! folder.exists) {
+      folder.mkdirs
+    }
+    new FileOutputStream(file.getPath)
+  }
+
+  def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = {
+    val fos = createFileOutputStream(site, suffix)
+    val w = Channels.newWriter(fos.getChannel, site.encoding)
+    try {
+      fn(w)
+    }
+    finally {
+      w.close()
+      fos.close()
+    }
+  }
+
+  /** Writes this page as a file. The file's location is relative to the
+    * generator's site root, and the encoding is also defined by the generator.
+    * @param site The generator that is writing this page. */
+  def writeFor(site: HtmlFactory): Unit
+
+  def kindToString(mbr: MemberEntity) =
+    mbr match {
+      case c: Class => if (c.isCaseClass) "case class" else "class"
+      case _: Trait => "trait"
+      case _: Package => "package"
+      case _: Object => "object"
+      case _: AbstractType => "type"
+      case _: AliasType => "type"
+      case _: Constructor => "new"
+      case v: Def => "def"
+      case v: Val if (v.isLazyVal) => "lazy val"
+      case v: Val if (v.isVal) => "val"
+      case v: Val if (v.isVar) => "var"
+      case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass)
+    }
+
+  def templateToPath(tpl: TemplateEntity): List[String] = {
+    def doName(tpl: TemplateEntity): String =
+      (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
+    def downPacks(pack: Package): List[String] =
+      if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
+    def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
+      tpl.inTemplate match {
+        case inPkg: Package => (nme + ".html", inPkg)
+        case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
+      }
+    }
+    val (file, pack) =
+      tpl match {
+        case p: Package => ("package.html", p)
+        case _ => downInner(doName(tpl), tpl)
+      }
+    file :: downPacks(pack)
+  }
+
+  /** A relative link from this page to some destination class entity.
+    * @param destClass The class or object entity that the link will point to. */
+  def relativeLinkTo(destClass: TemplateEntity): String =
+    relativeLinkTo(templateToPath(destClass))
+
+  /** A relative link from this page to some destination path.
+    * @param destPath The path that the link will point to. */
+  def relativeLinkTo(destPath: List[String]): String = {
+    def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
+      case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
+        relativize(fs, ts)
+      case (fss, tss) =>
+        List.fill(fss.length - 1)("..") ::: tss
+    }
+    relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
new file mode 100644
index 0000000..9101485
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -0,0 +1,287 @@
+/* NSC -- new Scala compiler
+ * Copyright 2010-2013 LAMP/EPFL
+ * @author  Stephane Micheloud
+ */
+
+package scala
+package tools.nsc.doc.html
+
+import scala.xml.NodeSeq
+import scala.annotation.tailrec
+
+/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
+  * (see method `HtmlPage.blockToHtml`).
+  *
+  * @author Stephane Micheloud
+  * @version 1.0
+  */
+private[html] object SyntaxHigh {
+
+  /** Reserved words, sorted alphabetically
+    * (see [[scala.reflect.internal.StdNames]]) */
+  val reserved = Array(
+    "abstract", "case", "catch", "class", "def",
+    "do", "else", "extends", "false", "final", "finally",
+    "for", "if", "implicit", "import", "lazy", "match",
+    "new", "null", "object", "override", "package",
+    "private", "protected", "return", "sealed", "super",
+    "this", "throw", "trait", "true", "try", "type",
+    "val", "var", "while", "with", "yield")
+
+  /** Annotations, sorted alphabetically */
+  val annotations = Array(
+    "BeanProperty", "SerialVersionUID",
+    "beanGetter", "beanSetter", "bridge",
+    "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance",
+    "elidable", "field", "getter", "inline",
+    "migration", "native", "noinline", "param",
+    "remote", "setter", "specialized", "strictfp", "switch",
+    "tailrec", "throws", "transient",
+    "unchecked", "uncheckedStable", "uncheckedVariance",
+    "varargs", "volatile")
+
+  /** Standard library classes/objects, sorted alphabetically */
+  val standards = Array (
+    "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array",
+    "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
+    "Console", "Double", "Enumeration", "Float", "Function", "Int",
+    "List", "Long", "Manifest", "Map",
+    "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
+    "Pair", "Predef",
+    "Seq", "Set", "Short", "Some", "String", "Symbol",
+    "Triple", "TypeTag", "Unit")
+
+  def apply(data: String): NodeSeq = {
+    val buf = data.getBytes
+    val out = new StringBuilder
+
+    def compare(offset: Int, key: String): Int = {
+      var i = offset
+      var j = 0
+      val l = key.length
+      while (i < buf.length && j < l) {
+        val bch = buf(i).toChar
+        val kch = key charAt j
+        if (bch < kch) return -1
+        else if (bch > kch) return 1
+        i += 1
+        j += 1
+      }
+      if (j < l) -1
+      else if (i < buf.length &&
+               ('A' <= buf(i) && buf(i) <= 'Z' ||
+                'a' <= buf(i) && buf(i) <= 'z' ||
+                '0' <= buf(i) && buf(i) <= '9' ||
+                buf(i) == '_')) 1
+      else 0
+    }
+
+    def lookup(a: Array[String], i: Int): Int = {
+      var lo = 0
+      var hi = a.length - 1
+      while (lo <= hi) {
+        val m = (hi + lo) / 2
+        val d = compare(i, a(m))
+        if (d < 0) hi = m - 1
+        else if (d > 0) lo = m + 1
+        else return m
+      }
+      -1
+    }
+
+    def comment(i: Int): String = {
+      val out = new StringBuilder("/")
+      def line(i: Int): Int =
+        if (i == buf.length || buf(i) == '\n') i
+        else {
+          out append buf(i).toChar
+          line(i+1)
+        }
+      var level = 0
+      def multiline(i: Int, star: Boolean): Int = {
+        if (i == buf.length) return i
+        val ch = buf(i).toChar
+        out append ch
+        ch match {
+          case '*' =>
+            if (star) level += 1
+            multiline(i+1, !star)
+          case '/' =>
+            if (star) {
+              if (level > 0) level -= 1
+              if (level == 0) i else multiline(i+1, star = true)
+            } else
+              multiline(i+1, star = false)
+          case _ =>
+            multiline(i+1, star = false)
+        }
+      }
+      if (buf(i) == '/') line(i) else multiline(i, star = true)
+      out.toString
+    }
+
+    /* e.g. `val endOfLine = '\u000A'`*/
+    def charlit(j: Int): String = {
+      val out = new StringBuilder("'")
+      def charlit0(i: Int, bslash: Boolean): Int = {
+        if (i == buf.length) i
+        else if (i > j+6) { out setLength 0; j }
+        else {
+          val ch = buf(i).toChar
+          out append ch
+          ch match {
+            case '\\' =>
+              charlit0(i+1, bslash = true)
+            case '\'' if !bslash =>
+              i
+            case _ =>
+              if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true)
+              else charlit0(i+1, bslash = false)
+          }
+        }
+      }
+      charlit0(j, bslash = false)
+      out.toString
+    }
+
+    def strlit(i: Int): String = {
+      val out = new StringBuilder("\"")
+      def strlit0(i: Int, bslash: Boolean): Int = {
+        if (i == buf.length) return i
+        val ch = buf(i).toChar
+        out append ch
+        ch match {
+          case '\\' =>
+            strlit0(i+1, bslash = true)
+          case '"' if !bslash =>
+            i
+          case _ =>
+            strlit0(i+1, bslash = false)
+        }
+      }
+      strlit0(i, bslash = false)
+      out.toString
+    }
+
+    def numlit(i: Int): String = {
+      val out = new StringBuilder
+      def intg(i: Int): Int = {
+        if (i == buf.length) return i
+        val ch = buf(i).toChar
+        ch match {
+          case '.' =>
+            out append ch
+            frac(i+1)
+          case _ =>
+            if (Character.isDigit(ch)) {
+              out append ch
+              intg(i+1)
+            } else i
+        }
+      }
+      def frac(i: Int): Int = {
+        if (i == buf.length) return i
+        val ch = buf(i).toChar
+        ch match {
+          case 'e' | 'E' =>
+            out append ch
+            expo(i+1, signed = false)
+          case _ =>
+            if (Character.isDigit(ch)) {
+              out append ch
+              frac(i+1)
+            } else i
+        }
+      }
+      def expo(i: Int, signed: Boolean): Int = {
+        if (i == buf.length) return i
+        val ch = buf(i).toChar
+        ch match {
+          case '+' | '-' if !signed =>
+            out append ch
+            expo(i+1, signed = true)
+          case _ =>
+            if (Character.isDigit(ch)) {
+              out append ch
+              expo(i+1, signed)
+            } else i
+        }
+      }
+      intg(i)
+      out.toString
+    }
+
+    @tailrec def parse(pre: String, i: Int): Unit = {
+      out append pre
+      if (i == buf.length) return
+      buf(i) match {
+        case '\n' =>
+          parse("\n", i+1)
+        case ' ' =>
+          parse(" ", i+1)
+        case '&' =>
+          parse("&", i+1)
+        case '<' if i+1 < buf.length =>
+          val ch = buf(i+1).toChar
+          if (ch == '-' || ch == ':' || ch == '%')
+            parse("<span class=\"kw\"><"+ch+"</span>", i+2)
+          else
+            parse("<", i+1)
+        case '>' =>
+          if (i+1 < buf.length && buf(i+1) == ':')
+            parse("<span class=\"kw\">>:</span>", i+2)
+          else
+            parse(">", i+1)
+        case '=' =>
+          if (i+1 < buf.length && buf(i+1) == '>')
+            parse("<span class=\"kw\">=></span>", i+2)
+          else
+            parse(buf(i).toChar.toString, i+1)
+        case '/' =>
+          if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
+            val c = comment(i+1)
+            parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
+          } else
+            parse(buf(i).toChar.toString, i+1)
+        case '\'' =>
+          val s = charlit(i+1)
+          if (s.length > 0)
+            parse("<span class=\"lit\">"+s+"</span>", i+s.length)
+          else
+            parse(buf(i).toChar.toString, i+1)
+        case '"' =>
+          val s = strlit(i+1)
+          parse("<span class=\"lit\">"+s+"</span>", i+s.length)
+        case '@' =>
+          val k = lookup(annotations, i+1)
+          if (k >= 0)
+            parse("<span class=\"ano\">@"+annotations(k)+"</span>", i+annotations(k).length+1)
+          else
+            parse(buf(i).toChar.toString, i+1)
+        case _ =>
+          if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
+            if (Character.isDigit(buf(i).toInt) ||
+                (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1).toInt))) {
+              val s = numlit(i)
+              parse("<span class=\"num\">"+s+"</span>", i+s.length)
+            } else {
+              val k = lookup(reserved, i)
+              if (k >= 0)
+                parse("<span class=\"kw\">"+reserved(k)+"</span>", i+reserved(k).length)
+              else {
+                val k = lookup(standards, i)
+                if (k >= 0)
+                  parse("<span class=\"std\">"+standards(k)+"</span>", i+standards(k).length)
+                else
+                  parse(buf(i).toChar.toString, i+1)
+              }
+            }
+          } else
+            parse(buf(i).toChar.toString, i+1)
+      }
+    }
+
+    parse("", 0)
+    scala.xml.Unparsed(out.toString)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
new file mode 100644
index 0000000..ce3a5eb
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -0,0 +1,133 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+package page
+
+import model._
+import scala.collection._
+import scala.xml._
+
+class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
+
+  def path = List("index.html")
+
+  def title = {
+    val s = universe.settings
+    ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+  }
+
+  val headers =
+    <xml:group>
+      <link href={ relativeLinkTo{List("index.css", "lib")} }  media="screen" type="text/css" rel="stylesheet"/>
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
+    </xml:group>
+
+  val body =
+    <body>
+      <div id="library">
+        <img class='class icon' alt='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
+        <img class='trait icon' alt='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
+        <img class='object icon' alt='trait icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
+        <img class='package icon' alt='trait icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
+      </div>
+      { browser }
+      <div id="content" class="ui-layout-center">
+        <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
+      </div>
+    </body>
+
+  def letters: NodeSeq =
+    '_' +: ('a' to 'z') map {
+      char => {
+        val label = if (char == '_') '#' else char.toUpper
+
+        index.firstLetterIndex.get(char) match {
+          case Some(_) =>
+            <a target="template" href={ "index/index-" + char + ".html" }>{
+              label
+            }</a>
+          case None => <span>{ label }</span>
+        }
+      }
+    }
+
+  def browser =
+    <div id="browser" class="ui-layout-west">
+      <div class="ui-west-center">
+      <div id="filter">
+          <div id="textfilter"></div>
+          <div id="letters">{ letters }</div>
+      </div>
+      <div class="pack" id="tpl">{
+        def packageElem(pack: model.Package): NodeSeq = {
+          <xml:group>
+            { if (!pack.isRootPackage)
+                <a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a>
+              else NodeSeq.Empty
+            }
+            <ol class="templates">{
+              val tpls: Map[String, Seq[DocTemplateEntity]] =
+                (pack.templates collect {
+                  case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
+                }) groupBy (_.name)
+
+              val placeholderSeq: NodeSeq = <div class="placeholder"></div>
+
+              def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
+                val entityType = kindToString(entity)
+                val linkContent = (
+                  { if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
+                  ++
+                  { if (includeText) <span class="tplLink">{ Text(packageQualifiedName(entity)) }</span> else NodeSeq.Empty }
+                )
+                <a class="tplshow" href={ relativeLinkTo(entity) } target="template"><span class={ entityType }>({ Text(entityType) })</span>{ linkContent }</a>
+              }
+
+              for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
+                val entities = tpls(tn)
+                val row = (entities find (e => e.isPackage || e.isObject), entities find (e => e.isTrait || e.isClass))
+
+                val itemContents = row match {
+                  case (Some(obj), None) => createLink(obj, includePlaceholder = true, includeText = true)
+
+                  case (maybeObj, Some(template)) =>
+                    val firstLink = maybeObj match {
+                      case Some(obj) => createLink(obj, includePlaceholder = false, includeText = false)
+                      case None => placeholderSeq
+                    }
+
+                    firstLink ++ createLink(template, includePlaceholder = false, includeText = true)
+
+                  case _ => // FIXME: this default case should not be necessary. For some reason AnyRef is not a package, object, trait, or class
+                    val entry = entities.head
+                    placeholderSeq ++ createLink(entry, includePlaceholder = false, includeText = true)
+                }
+
+                <li title={ entities.head.qualifiedName }>{ itemContents }</li>
+              }
+            }</ol>
+            <ol class="packages"> {
+              for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
+                <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
+            }</ol>
+          </xml:group>
+        }
+        packageElem(universe.rootPackage)
+      }</div></div><script src="index.js"></script>
+    </div>
+
+  def packageQualifiedName(ety: DocTemplateEntity): String =
+    if (ety.inTemplate.isPackage) ety.name
+    else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
new file mode 100644
index 0000000..e3c9450
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -0,0 +1,69 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc.doc.html.page
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
+import scala.tools.nsc.doc.html.{Page, HtmlFactory}
+import scala.util.parsing.json.{JSONObject, JSONArray}
+
+class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
+  def path = List("index.js")
+
+  override def writeFor(site: HtmlFactory) {
+    writeFile(site) {
+      _.write("Index.PACKAGES = " + packages.toString() + ";")
+    }
+  }
+
+  val packages = {
+    val pairs = allPackagesWithTemplates.toIterable.map(_ match {
+      case (pack, templates) => {
+        val merged = mergeByQualifiedName(templates)
+
+        val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
+          val pairs = merged(key).map(
+            t => kindToString(t) -> relativeLinkTo(t)
+          ) :+ ("name" -> key)
+
+          JSONObject(scala.collection.immutable.Map(pairs : _*))
+        })
+
+        pack.qualifiedName -> JSONArray(ary)
+      }
+    }).toSeq
+
+    JSONObject(scala.collection.immutable.Map(pairs : _*))
+  }
+
+  def mergeByQualifiedName(source: List[DocTemplateEntity]) = {
+    var result = Map[String, List[DocTemplateEntity]]()
+
+    for (t <- source) {
+      val k = t.qualifiedName
+      result += k -> (result.getOrElse(k, List()) :+ t)
+    }
+
+    result
+  }
+
+  def allPackages = {
+    def f(parent: Package): List[Package] = {
+      parent.packages.flatMap(
+        p => f(p) :+ p
+      )
+    }
+    f(universe.rootPackage).sortBy(_.toString)
+  }
+
+  def allPackagesWithTemplates = {
+    Map(allPackages.map((key) => {
+      key -> key.templates.collect {
+        case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
+      }
+    }) : _*)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
new file mode 100755
index 0000000..84ee82f
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -0,0 +1,61 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Pedro Furlanetto
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+
+import doc.model._
+
+class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
+
+  def path = List("index-"+letter+".html", "index")
+
+  def title = {
+    val s = universe.settings
+    ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+  }
+
+  def headers =
+    <xml:group>
+      <link href={ relativeLinkTo(List("ref-index.css", "lib")) }  media="screen" type="text/css" rel="stylesheet"/>
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+    </xml:group>
+
+
+  private def entry(name: String, methods: Iterable[MemberEntity]) = {
+    val occurrences = methods.map(method => {
+      val html = templateToHtml(method.inDefinitionTemplates.head)
+      if (method.deprecation.isDefined) {
+        <strike>{ html }</strike>
+      } else {
+        html
+      }
+    }).toList.distinct
+
+    <div class="entry">
+      <div class="name">{
+        if (methods.find { ! _.deprecation.isDefined } != None)
+          name
+        else
+          <strike>{ name }</strike>
+      }</div>
+      <div class="occurrences">{
+        for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
+      }</div>
+    </div>
+  }
+
+  def body =
+    <body>{
+      for(groups <- index.firstLetterIndex(letter)) yield
+        entry(groups._1, groups._2.view)
+    }</body>
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
new file mode 100644
index 0000000..3714575
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
@@ -0,0 +1,127 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+package page
+
+import scala.xml.NodeSeq
+import java.io.File
+
+class Source(sourceFile: File) extends HtmlPage {
+
+  val path = List("source.html")
+
+  val title = "Scaladoc: page source"
+
+  val headers =
+    NodeSeq.Empty
+
+  val body =
+    <body>
+      <h1>Page source is not implemented yet</h1>
+    </body>
+
+    /*
+
+
+    def readTextFromSrcDir(subPath: String) :Option[String] = {
+      readTextFromFile(new File(sourceDir, subPath))
+    }
+
+    def readTextFromFile(f : File) :Option[String] = {
+      if (f.exists) {
+        Some(Source.fromFile(f)(Codec.default).getLines().mkString(""))
+      } else {
+        None
+      }
+    }
+
+
+    def writeTextToFile(f : File, txt : String, header: Option[String], footer: Option[String]) {
+      val out = new FileOutputStream(f)
+      try {
+        val enc = "UTF-8"
+        header.foreach(s => out.write(s.getBytes(enc)))
+        out.write(txt.getBytes(enc))
+        footer.foreach(s => out.write(s.getBytes(enc)))
+      } finally {
+        try {
+          out.close()
+        } catch {
+          case _ => //ignore
+        }
+      }
+    }
+
+    trait SourceHtmlizer {
+      def scalaToHtml(src :File) : Option[File]
+    }
+
+    lazy val sourceHtmlizer : SourceHtmlizer = {
+      if (cfg.htmlizeSource) {
+        new SourceHtmlizer {
+
+          val inDir: File = cfg.sourcedir
+          val outDir: File = cfg.outputdir
+
+          private def relativize(uri: URI, from: URI) = linkHelper.relativize(uri, from).getOrElse("__notFound__" + uri.getPath)
+
+          def header(dest: URI) = Some("""
+          <html>
+          <head>
+            <link href='""" + relativize(new URI("site:/_highlighter/SyntaxHighlighter.css"), dest) + """' rel='stylesheet' type='text/css'/>
+            <script language='javascript' src='""" + relativize(new URI("site:/_highlighter/shAll.js"), dest) + """'></script>
+          </head>
+          <body>
+            <pre name="code" class="scala" style="width:100%">
+        """)
+
+          def footer(dest: URI) = Some("""</pre>
+            <script language='javascript'>
+              dp.SyntaxHighlighter.ClipboardSwf = '""" + relativize(new URI("site:/_highlighter/clipboard.swf"), dest) + """';
+              dp.SyntaxHighlighter.HighlightAll('code');
+            </script>
+          </body>
+          </html>
+        """)
+
+          //TODO: escape the source code
+          def scalaToHtml(src :File) = {
+            val dest = new File(outDir, fileHelper.relativePathUnderDir(src, inDir) + ".html")
+            if (!dest.exists || dest.lastModified < src.lastModified) {
+
+              //we need to verify whether the directory we are trying to write to has already been created or not
+              if(!dest.getParentFile.exists) dest.getParentFile.mkdirs
+
+              val uri = linkHelper.uriFor(dest).get
+              var txt = fileHelper.readTextFromFile(src).getOrElse("")
+              txt = txt.replace("<", "<")
+              fileHelper.writeTextToFile(dest, txt, header(uri), footer(uri))
+            }
+            Some(dest)
+          }
+
+          def copyResources() {
+            val loader = this.getClass().getClassLoader()
+            val buf = new Array[Byte](1024)
+            def copyResource(name: String) = fileHelper.copyResource("/scala/tools/nsc/doc/html/resource/", name, outDir, loader, buf)
+            copyResource("_highlighter/clipboard.swf")
+            copyResource("_highlighter/shAll.js")
+            copyResource("_highlighter/SyntaxHighlighter.css")
+          }
+
+          copyResources()
+        }
+      } else {
+        new SourceHtmlizer {
+          def scalaToHtml(src :File) = None
+        }
+      }
+    }
+    */
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
new file mode 100644
index 0000000..26ee005
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -0,0 +1,988 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  David Bernard, Manohar Jonnalagedda
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+
+import base._
+import base.comment._
+
+import model._
+import model.diagram._
+import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
+import scala.language.postfixOps
+import scala.collection.mutable. { Set, HashSet }
+
+import model._
+import model.diagram._
+import diagram._
+
+class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
+
+  val path =
+    templateToPath(tpl)
+
+  def title = {
+    val s = universe.settings
+
+    tpl.name +
+    ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
+    ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
+    " - " + tpl.qualifiedName
+  }
+
+  val headers =
+    <xml:group>
+      <link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
+      <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+      { if (universe.settings.docDiagrams.value) {
+      <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
+      <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
+      } else NodeSeq.Empty }
+      <script type="text/javascript">
+         if(top === self) {{
+            var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
+            var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
+            var anchor = window.location.hash;
+            var anchor_opt = '';
+            if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
+              anchor_opt = '@' + anchor.substring(1);
+            window.location.href = url + '#' + hash + anchor_opt;
+         }}
+   	  </script>
+    </xml:group>
+
+  val valueMembers =
+    tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
+
+  val (absValueMembers, nonAbsValueMembers) =
+    valueMembers partition (_.isAbstract)
+
+  val (deprValueMembers, nonDeprValueMembers) =
+    nonAbsValueMembers partition (_.deprecation.isDefined)
+
+  val (concValueMembers, shadowedImplicitMembers) =
+    nonDeprValueMembers partition (!_.isShadowedOrAmbiguousImplicit)
+
+  val typeMembers =
+    tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted (implicitly[Ordering[MemberEntity]])
+
+  val constructors = (tpl match {
+    case cls: Class => (cls.constructors: List[MemberEntity]).sorted
+    case _ => Nil
+  })
+
+  /* for body, there is a special case for AnyRef, otherwise AnyRef appears
+   * like a package/object this problem should be fixed, this implementation
+   * is just a patch. */
+  val body = {
+    val templateName = if (tpl.isRootPackage) "root package" else tpl.name
+    val displayName = tpl.companion match {
+      case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
+        <a href={relativeLinkTo(companion)} title="Go to companion">{ templateName }</a>
+      case _ =>
+        templateName
+    }
+    val owner = {
+      if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
+        NodeSeq.Empty
+      else
+        <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
+    }
+
+    <body class={ if (tpl.isType) "type" else "value" }>
+      <div id="definition">
+        {
+          tpl.companion match {
+            case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
+              <a href={relativeLinkTo(companion)} title="Go to companion"><img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/></a>
+            case _ =>
+              <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
+        }}
+        { owner }
+        <h1>{ displayName }</h1>
+      </div>
+
+      { signature(tpl, isSelf = true) }
+      { memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) }
+
+      <div id="mbrsel">
+        <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
+        { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty && (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1)))
+            NodeSeq.Empty
+          else
+            <div id="order">
+              <span class="filtertype">Ordering</span>
+              <ol>
+                {
+                  if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
+                    NodeSeq.Empty
+                  else
+                    <li class="group out"><span>Grouped</span></li>
+                }
+                <li class="alpha in"><span>Alphabetic</span></li>
+                {
+                  if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
+                    NodeSeq.Empty
+                  else
+                    <li class="inherit out"><span>By inheritance</span></li>
+                }
+              </ol>
+            </div>
+        }
+        { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+          {
+            if (!tpl.linearizationTemplates.isEmpty)
+              <div id="ancestors">
+                <span class="filtertype">Inherited<br/>
+                </span>
+                <ol id="linearization">
+                  { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
+                </ol>
+              </div>
+            else NodeSeq.Empty
+          } ++ {
+            if (!tpl.conversions.isEmpty)
+              <div id="ancestors">
+                <span class="filtertype">Implicitly<br/>
+                </span>
+                <ol id="implicits"> {
+                  tpl.conversions.map { conv =>
+                    val name = conv.conversionQualifiedName
+                    val hide = universe.settings.hiddenImplicits(name)
+                    <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
+                  }
+                }
+                </ol>
+              </div>
+            else NodeSeq.Empty
+          } ++
+          <div id="ancestors">
+            <span class="filtertype"></span>
+            <ol>
+              <li class="hideall out"><span>Hide All</span></li>
+              <li class="showall in"><span>Show all</span></li>
+            </ol>
+            <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
+          </div>
+        }
+        {
+          <div id="visbl">
+            <span class="filtertype">Visibility</span>
+            <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
+          </div>
+        }
+      </div>
+
+      <div id="template">
+        <div id="allMembers">
+        { if (constructors.isEmpty) NodeSeq.Empty else
+            <div id="constructors" class="members">
+              <h3>Instance Constructors</h3>
+              <ol>{ constructors map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+
+        { if (typeMembers.isEmpty) NodeSeq.Empty else
+            <div id="types" class="types members">
+              <h3>Type Members</h3>
+              <ol>{ typeMembers map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+
+        { if (absValueMembers.isEmpty) NodeSeq.Empty else
+            <div id="values" class="values members">
+              <h3>Abstract Value Members</h3>
+              <ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+
+        { if (concValueMembers.isEmpty) NodeSeq.Empty else
+            <div id="values" class="values members">
+              <h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
+              <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+
+        { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
+            <div id="values" class="values members">
+              <h3>Shadowed Implicit Value Members</h3>
+              <ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+
+        { if (deprValueMembers.isEmpty) NodeSeq.Empty else
+            <div id="values" class="values members">
+              <h3>Deprecated Value Members</h3>
+              <ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
+            </div>
+        }
+        </div>
+
+        <div id="inheritedMembers">
+        {
+          // linearization
+          NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
+            <div class="parent" name={ superTpl.qualifiedName }>
+              <h3>Inherited from {
+                typeToHtmlWithStupidTypes(tpl, superTpl, superType)
+              }</h3>
+            </div>
+          )
+        }
+        {
+          // implicitly inherited
+          NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
+            <div class="conversion" name={ conversion.conversionQualifiedName }>
+              <h3>Inherited by implicit conversion { conversion.conversionShortName } from
+                { typeToHtml(tpl.resultType, hasLinks = true) } to { typeToHtml(conversion.targetType, hasLinks = true) }
+              </h3>
+            </div>
+          )
+        }
+        </div>
+
+        <div id="groupedMembers">
+        {
+          val allGroups = tpl.members.map(_.group).distinct
+          val orderedGroups = allGroups.map(group => (tpl.groupPriority(group), group)).sorted.map(_._2)
+          // linearization
+          NodeSeq fromSeq (for (group <- orderedGroups) yield
+            <div class="group" name={ group }>
+              <h3>{ tpl.groupName(group) }</h3>
+              {
+                tpl.groupDescription(group) match {
+                  case Some(body) => <div class="comment cmt">{ bodyToHtml(body) }</div>
+                  case _ => NodeSeq.Empty
+                }
+              }
+            </div>
+          )
+        }
+        </div>
+
+      </div>
+
+      <div id="tooltip" ></div>
+
+      {
+        if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value))
+          <div id="footer">Scala programming documentation. Copyright (c) 2003-2013 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
+        else
+          <div id="footer"> { tpl.universe.settings.docfooter.value } </div>
+      }
+
+
+    </body>
+  }
+
+  def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
+    val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
+    <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
+      data-isabs={ mbr.isAbstract.toString }
+      fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
+      group={ mbr.group }>
+      <a id={ mbr.signature }/>
+      <a id={ mbr.signatureCompat }/>
+      { signature(mbr, isSelf = false) }
+      { memberComment }
+    </li>
+  }
+
+  def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = {
+    mbr match {
+      case dte: DocTemplateEntity if isSelf =>
+        // comment of class itself
+        <xml:group>
+          <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
+        </xml:group>
+      case dte: DocTemplateEntity if mbr.comment.isDefined =>
+        // comment of inner, documented class (only short comment, full comment is on the class' own page)
+        memberToInlineCommentHtml(mbr, isSelf)
+      case _ =>
+        // comment of non-class member or non-documentented inner class
+        val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
+        if (commentBody.isEmpty)
+          NodeSeq.Empty
+        else {
+          val shortComment = memberToShortCommentHtml(mbr, isSelf)
+          val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf)
+
+          val includedLongComment = if (shortComment.text.trim == longComment.text.trim)
+            NodeSeq.Empty
+          else
+            <div class="fullcomment">{ longComment }</div>
+
+          shortComment ++ includedLongComment
+        }
+    }
+  }
+
+  def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
+    mbr match {
+      case nte: NonTemplateMemberEntity if nte.isUseCase =>
+        inlineToHtml(comment.Text("[use case] "))
+      case _ => NodeSeq.Empty
+    }
+  }
+
+  def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+    mbr.comment.fold(NodeSeq.Empty) { comment =>
+      <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(comment.short) }</p>
+    }
+
+  def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+    <p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
+
+  def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+    val s = universe.settings
+
+    val memberComment =
+      if (mbr.comment.isEmpty) NodeSeq.Empty
+      else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
+
+    val authorComment =
+      if (! s.docAuthor || mbr.comment.isEmpty ||
+        mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NodeSeq.Empty
+      else <div class="comment cmt">
+        {if (mbr.comment.get.authors.size > 1) <h6>Authors:</h6> else <h6>Author:</h6>}
+        { mbr.comment.get.authors map bodyToHtml}
+      </div>
+
+    val paramComments = {
+      val prs: List[ParameterEntity] = mbr match {
+        case cls: Class => cls.typeParams ::: cls.valueParams.flatten
+        case trt: Trait => trt.typeParams
+        case dfe: Def => dfe.typeParams ::: dfe.valueParams.flatten
+        case ctr: Constructor => ctr.valueParams.flatten
+        case _ => Nil
+      }
+
+      def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): NodeSeq = prs match {
+
+        case (tp: TypeParam) :: rest =>
+          val paramEntry: NodeSeq = {
+            <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(comment.typeParams(tp.name)) }</dd>
+          }
+          paramEntry ++ paramCommentToHtml(rest, comment)
+
+        case (vp: ValueParam) :: rest  =>
+          val paramEntry: NodeSeq = {
+            <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(comment.valueParams(vp.name)) }</dd>
+          }
+          paramEntry ++ paramCommentToHtml(rest, comment)
+
+        case _ =>
+          NodeSeq.Empty
+      }
+
+      mbr.comment.fold(NodeSeq.Empty) { comment =>
+        val cmtedPrs = prs filter {
+          case tp: TypeParam => comment.typeParams isDefinedAt tp.name
+          case vp: ValueParam => comment.valueParams isDefinedAt vp.name
+        }
+        if (cmtedPrs.isEmpty && comment.result.isEmpty) NodeSeq.Empty
+        else {
+          <dl class="paramcmts block">{
+            paramCommentToHtml(cmtedPrs, comment) ++ (
+            comment.result match {
+              case None => NodeSeq.Empty
+              case Some(cmt) =>
+                <dt>returns</dt><dd class="cmt">{ bodyToHtml(cmt) }</dd>
+            })
+          }</dl>
+        }
+      }
+    }
+
+    val implicitInformation = mbr.byConversion match {
+      case Some(conv) =>
+        <dt class="implicit">Implicit information</dt> ++
+        {
+          val targetType = typeToHtml(conv.targetType, hasLinks = true)
+          val conversionMethod = conv.convertorMethod match {
+            case Left(member) => Text(member.name)
+            case Right(name)  => Text(name)
+          }
+
+          // strip off the package object endings, they make things harder to follow
+          val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
+          val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
+
+          val constraintText = conv.constraints match {
+            case Nil =>
+              NodeSeq.Empty
+            case List(constraint) =>
+              scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
+            case List(constraint1, constraint2) =>
+              scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+                scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
+            case constraints =>
+              <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
+                var index = 0
+                constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+              }
+          }
+
+          <dd>
+            This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, hasLinks = true) } to
+            { targetType } performed by method { conversionMethod } in { conversionOwner }.
+            { constraintText }
+          </dd>
+        } ++ {
+          if (mbr.isShadowedOrAmbiguousImplicit) {
+            // These are the members that are shadowing or ambiguating the current implicit
+            // see ImplicitMemberShadowing trait for more information
+            val shadowingSuggestion = {
+              val params = mbr match {
+                case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
+                case _      => "" // no parameters
+              }
+              <br/> ++ scala.xml.Text("To access this member you can use a ") ++
+              <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
+                target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
+              <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
+            }
+
+            val shadowingWarning: NodeSeq =
+              if (mbr.isShadowedImplicit)
+                  scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
+                  "class.") ++ shadowingSuggestion
+              else if (mbr.isAmbiguousImplicit)
+                  scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
+                  "inherited members have similar signatures, so calling this member may produce an ambiguous " +
+                  "implicit conversion compiler error.") ++ shadowingSuggestion
+              else NodeSeq.Empty
+
+            <dt class="implicit">Shadowing</dt> ++
+            <dd>{ shadowingWarning }</dd>
+
+          } else NodeSeq.Empty
+        }
+      case _ =>
+        NodeSeq.Empty
+    }
+
+    // --- start attributes block vals
+    val attributes: NodeSeq = {
+      val fvs: List[comment.Paragraph] = visibility(mbr).toList
+      if (fvs.isEmpty || isReduced) NodeSeq.Empty
+      else {
+        <dt>Attributes</dt>
+        <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
+      }
+    }
+
+    val definitionClasses: NodeSeq = {
+      val inDefTpls = mbr.inDefinitionTemplates
+      if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
+      else {
+        <dt>Definition Classes</dt>
+        <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }</dd>
+      }
+    }
+
+    val fullSignature: NodeSeq = {
+      mbr match {
+        case nte: NonTemplateMemberEntity if nte.isUseCase =>
+          <div class="full-signature-block toggleContainer">
+            <span class="toggle">Full Signature</span>
+            <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,isSelf = true) }</div>
+          </div>
+        case _ => NodeSeq.Empty
+      }
+    }
+
+    val selfType: NodeSeq = mbr match {
+      case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
+        <dt>Self Type</dt>
+        <dd>{ typeToHtml(dtpl.selfType.get, hasLinks = true) }</dd>
+      case _ => NodeSeq.Empty
+    }
+
+    val annotations: NodeSeq = {
+      // A list of annotations which don't show their arguments, e. g. because they are shown separately.
+      val annotationsWithHiddenArguments = List("deprecated", "Deprecated", "migration")
+
+      def showArguments(annotation: Annotation) =
+        !(annotationsWithHiddenArguments.contains(annotation.qualifiedName))
+
+      if (!mbr.annotations.isEmpty) {
+        <dt>Annotations</dt>
+        <dd>{
+            mbr.annotations.map { annot =>
+              <xml:group>
+                <span class="name">@{ templateToHtml(annot.annotationClass) }</span>{
+                  if (showArguments(annot)) argumentsToHtml(annot.arguments) else NodeSeq.Empty
+                }
+              </xml:group>
+            }
+          }
+        </dd>
+      } else NodeSeq.Empty
+    }
+
+    val sourceLink: NodeSeq = mbr match {
+      case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
+        val (absFile, _) = dtpl.inSource.get
+        <dt>Source</dt>
+        <dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
+      case _ => NodeSeq.Empty
+    }
+
+    val deprecation: NodeSeq =
+      mbr.deprecation match {
+        case Some(deprecation) if !isReduced =>
+          <dt>Deprecated</dt>
+          <dd class="cmt">{ bodyToHtml(deprecation) }</dd>
+        case _ => NodeSeq.Empty
+      }
+
+    val migration: NodeSeq =
+      mbr.migration match {
+        case Some(migration) if !isReduced =>
+          <dt>Migration</dt>
+          <dd class="cmt">{ bodyToHtml(migration) }</dd>
+        case _ => NodeSeq.Empty
+      }
+
+    val mainComment: NodeSeq = mbr.comment match {
+      case Some(comment) if (! isReduced) =>
+        def orEmpty[T](it: Iterable[T])(gen:  =>NodeSeq): NodeSeq =
+          if (it.isEmpty) NodeSeq.Empty else gen
+
+        val example =
+          orEmpty(comment.example) {
+            <div class="block">Example{ if (comment.example.length > 1) "s" else ""}:
+               <ol>{
+                 val exampleXml: List[NodeSeq] = for (ex <- comment.example) yield
+                   <li class="cmt">{ bodyToHtml(ex) }</li>
+                 exampleXml.reduceLeft(_ ++ Text(", ") ++ _)
+              }</ol>
+            </div>
+	  }
+
+        val version: NodeSeq =
+          orEmpty(comment.version) {
+            <dt>Version</dt>
+            <dd>{ for(body <- comment.version.toList) yield bodyToHtml(body) }</dd>
+          }
+
+        val sinceVersion: NodeSeq =
+          orEmpty(comment.since) {
+            <dt>Since</dt>
+            <dd>{ for(body <- comment.since.toList) yield bodyToHtml(body) }</dd>
+          }
+
+        val note: NodeSeq =
+          orEmpty(comment.note) {
+            <dt>Note</dt>
+            <dd>{
+              val noteXml: List[NodeSeq] =  for(note <- comment.note ) yield <span class="cmt">{bodyToHtml(note)}</span>
+              noteXml.reduceLeft(_ ++ Text(", ") ++ _)
+            }</dd>
+          }
+
+        val seeAlso: NodeSeq =
+          orEmpty(comment.see) {
+            <dt>See also</dt>
+            <dd>{
+              val seeXml: List[NodeSeq] = for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span>
+              seeXml.reduceLeft(_ ++ _)
+            }</dd>
+          }
+
+        val exceptions: NodeSeq =
+          orEmpty(comment.throws) {
+            <dt>Exceptions thrown</dt>
+            <dd>{
+              val exceptionsXml: List[NodeSeq] =
+                for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
+                  <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
+              exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
+            }</dd>
+          }
+
+        val todo: NodeSeq =
+          orEmpty(comment.todo) {
+            <dt>To do</dt>
+            <dd>{
+              val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
+              todoXml.reduceLeft(_ ++ Text(", ") ++ _)
+            }</dd>
+          }
+
+        example ++ version ++ sinceVersion ++ exceptions ++ todo ++ note ++ seeAlso
+
+      case _ => NodeSeq.Empty
+    }
+    // end attributes block vals ---
+
+    val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+    val attributesBlock =
+      if (attributesInfo.isEmpty)
+        NodeSeq.Empty
+      else
+        <dl class="attributes block"> { attributesInfo }</dl>
+
+    val linearization = mbr match {
+      case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty =>
+        <div class="toggleContainer block">
+          <span class="toggle">Linear Supertypes</span>
+          <div class="superTypes hiddenContent">{
+            typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
+          }</div>
+        </div>
+      case _ => NodeSeq.Empty
+    }
+
+    val subclasses = mbr match {
+      case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+        val subs: Set[DocTemplateEntity] = HashSet.empty
+        def transitive(dtpl: DocTemplateEntity) {
+          for (sub <- dtpl.directSubClasses if !(subs contains sub)) {
+            subs add sub
+            transitive(sub)
+          }
+        }
+        transitive(dtpl)
+        if (subs.nonEmpty)
+          <div class="toggleContainer block">
+            <span class="toggle">Known Subclasses</span>
+            <div class="subClasses hiddenContent">{
+              templatesToHtml(subs.toList.sortBy(_.name), scala.xml.Text(", "))
+            }</div>
+          </div>
+        else NodeSeq.Empty
+      case _ => NodeSeq.Empty
+    }
+
+    def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq =
+      if (s.docDiagrams.value) mbr match {
+        case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+          val diagram = f(dtpl)
+          if (diagram.isDefined) {
+            val diagramSvg = generator.generate(diagram.get, tpl, this)
+            if (diagramSvg != NodeSeq.Empty) {
+              <div class="toggleContainer block diagram-container" id={ id + "-container"}>
+                <span class="toggle diagram-link">{ description }</span>
+                <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
+                <div class="diagram" id={ id }>{
+                  diagramSvg
+                }</div>
+              </div>
+            } else NodeSeq.Empty
+          } else NodeSeq.Empty
+        case _ => NodeSeq.Empty
+      } else NodeSeq.Empty // diagrams not generated
+
+    val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
+    val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
+
+    memberComment ++ authorComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
+  }
+
+  def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
+    def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
+      case None => NodeSeq.Empty
+      case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
+    }
+    bound0(lo, " >: ") ++ bound0(hi, " <: ")
+  }
+
+  def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
+    import comment._
+    import comment.{ Text => CText }
+    mbr.visibility match {
+      case PrivateInInstance() =>
+        Some(Paragraph(CText("private[this]")))
+      case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
+        Some(Paragraph(CText("private")))
+      case PrivateInTemplate(owner) =>
+        Some(Paragraph(Chain(List(CText("private["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
+      case ProtectedInInstance() =>
+        Some(Paragraph(CText("protected[this]")))
+      case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
+        Some(Paragraph(CText("protected")))
+      case ProtectedInTemplate(owner) =>
+        Some(Paragraph(Chain(List(CText("protected["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
+      case Public() =>
+        None
+    }
+  }
+
+  /** name, tparams, params, result */
+  def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+    def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
+      <xml:group>
+      <span class="modifier_kind">
+        <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
+        <span class="kind">{ kindToString(mbr) }</span>
+      </span>
+      <span class="symbol">
+        {
+          val nameClass =
+            if (mbr.isImplicitlyInherited)
+              if (mbr.isShadowedOrAmbiguousImplicit)
+                "implicit shadowed"
+              else
+                "implicit"
+            else
+              "name"
+
+          val nameHtml = {
+            val value = if (mbr.isConstructor) tpl.name else mbr.name
+            val span = if (mbr.deprecation.isDefined)
+              <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
+            else
+              <span class={ nameClass }>{ value }</span>
+            val encoded = scala.reflect.NameTransformer.encode(value)
+            if (encoded != value) {
+              span % new UnprefixedAttribute("title",
+                                             "gt4s: " + encoded +
+                                             span.attribute("title").map(
+                                               node => ". " + node
+                                             ).getOrElse(""),
+                                             scala.xml.Null)
+            } else {
+              span
+            }
+          }
+          if (!nameLink.isEmpty)
+            <a href={nameLink}>{nameHtml}</a>
+          else nameHtml
+        }{
+          def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
+            case hk: HigherKinded =>
+              val tpss = hk.typeParams
+              if (tpss.isEmpty) NodeSeq.Empty else {
+                def tparam0(tp: TypeParam): NodeSeq =
+                  <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
+                def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
+                  case tp :: Nil => tparam0(tp)
+                  case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
+                }
+                <span class="tparams">[{ tparams0(tpss) }]</span>
+              }
+            case _ => NodeSeq.Empty
+          }
+          tparamsToHtml(mbr)
+        }{
+          if (isReduced) NodeSeq.Empty else {
+            def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
+              def param0(vl: ValueParam): NodeSeq =
+                // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output
+                <span name={ vl.name }>{
+                  Text(vl.name)
+                }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
+                  vl.defaultValue match {
+                    case Some(v) => Text(" = ") ++ treeToHtml(v)
+                    case None => NodeSeq.Empty
+                  }
+                }</span>
+
+              def params0(vlss: List[ValueParam]): NodeSeq = vlss match {
+                case Nil => NodeSeq.Empty
+                case vl :: Nil => param0(vl)
+                case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls)
+              }
+              def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match {
+                case vl :: vls => if(vl.isImplicit) { <span class="implicit">implicit </span> } else Text("")
+                case _ => Text("")
+              }
+              vlsss map { vlss => <span class="params">({implicitCheck(vlss) ++ params0(vlss) })</span> }
+            }
+            mbr match {
+              case cls: Class => paramsToHtml(cls.valueParams)
+              case ctr: Constructor => paramsToHtml(ctr.valueParams)
+              case dfe: Def => paramsToHtml(dfe.valueParams)
+              case _ => NodeSeq.Empty
+            }
+          }
+        }{ if (isReduced) NodeSeq.Empty else {
+          mbr match {
+            case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
+              <span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
+
+            case abt: MemberEntity with AbstractType =>
+              val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks)
+              if (b2s != NodeSeq.Empty)
+                <span class="result">{ b2s }</span>
+              else NodeSeq.Empty
+
+            case alt: MemberEntity with AliasType =>
+              <span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
+
+            case tpl: MemberTemplateEntity if !tpl.parentTypes.isEmpty =>
+              <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
+
+            case _ => NodeSeq.Empty
+          }
+        }}
+      </span>
+      </xml:group>
+    mbr match {
+      case dte: DocTemplateEntity if !isSelf =>
+        <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4>
+      case _ if isSelf =>
+        <h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
+      case _ =>
+        <h4 class="signature">{ inside(hasLinks = true) }</h4>
+    }
+
+  }
+
+  /** */
+  def treeToHtml(tree: TreeEntity): NodeSeq = {
+
+    /** Makes text good looking in the html page : newlines and basic indentation,
+     * You must change this function if you want to improve pretty printing of default Values
+     */
+    def codeStringToXml(text: String): NodeSeq = {
+      var goodLookingXml: NodeSeq = NodeSeq.Empty
+      var indent = 0
+      for (c <- text) c match {
+        case '{' => indent+=1
+          goodLookingXml ++= Text("{")
+        case '}' => indent-=1
+          goodLookingXml ++= Text("}")
+        case '\n' =>
+          goodLookingXml++= <br/> ++ indentation
+        case _ => goodLookingXml ++= Text(c.toString)
+      }
+      def indentation:NodeSeq = {
+        var indentXml = NodeSeq.Empty
+        for (x <- 1 to indent) indentXml ++= Text("  ")
+        indentXml
+      }
+      goodLookingXml
+    }
+
+    var index = 0
+    val str = tree.expression
+    val length = str.length
+    var myXml: NodeSeq = NodeSeq.Empty
+    for ((from, (member, to)) <- tree.refEntity.toSeq) {
+      if (index < from) {
+        myXml ++= codeStringToXml(str.substring(index,from))
+        index = from
+      }
+      if (index == from) {
+        member match {
+          case mbr: DocTemplateEntity =>
+            val link = relativeLinkTo(mbr)
+            myXml ++= <span class="name"><a href={link}>{str.substring(from, to)}</a></span>
+          case mbr: MemberEntity =>
+            val anchor = "#" + mbr.signature
+            val link = relativeLinkTo(mbr.inTemplate)
+            myXml ++= <span class="name"><a href={link ++ anchor}>{str.substring(from, to)}</a></span>
+        }
+        index = to
+      }
+    }
+
+    if (index <= length-1)
+      myXml ++= codeStringToXml(str.substring(index, length ))
+
+    if (length < 36)
+      <span class="symbol">{ myXml }</span>
+    else
+      <span class="defval" name={ myXml }>{ "..." }</span>
+  }
+
+  private def argumentsToHtml(argss: List[ValueArgument]): NodeSeq = {
+    def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
+      case Nil         => NodeSeq.Empty
+      case arg :: Nil  => argumentToHtml(arg)
+      case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
+    }
+    <span class="args">({ argumentsToHtml0(argss) })</span>
+  }
+
+  private def argumentToHtml(arg: ValueArgument): NodeSeq = {
+    <span>
+      {
+        arg.parameter match {
+          case Some(param) => Text(param.name + " = ")
+          case None => NodeSeq.Empty
+        }
+      }
+      { treeToHtml(arg.value) }
+    </span>
+  }
+
+  private def bodyToStr(body: comment.Body): String =
+    body.blocks flatMap (blockToStr(_)) mkString ""
+
+  private def blockToStr(block: comment.Block): String = block match {
+    case comment.Paragraph(in) => inlineToStr(in)
+    case _ => block.toString
+  }
+
+  private def inlineToStr(inl: comment.Inline): String = inl match {
+    case comment.Chain(items) => items flatMap (inlineToStr(_)) mkString ""
+    case comment.Italic(in) => inlineToStr(in)
+    case comment.Bold(in) => inlineToStr(in)
+    case comment.Underline(in) => inlineToStr(in)
+    case comment.Monospace(in) => inlineToStr(in)
+    case comment.Text(text) => text
+    case comment.Summary(in) => inlineToStr(in)
+    case _ => inl.toString
+  }
+
+  private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
+    if (tpl.universe.settings.useStupidTypes.value)
+      superTpl match {
+        case dtpl: DocTemplateEntity =>
+          val sig = signature(dtpl, isSelf = false, isReduced = true) \ "_"
+          sig
+        case tpl: TemplateEntity =>
+          Text(tpl.name)
+      }
+  else
+    typeToHtml(superType, hasLinks = true)
+
+  private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
+    case ktcc: KnownTypeClassConstraint =>
+      scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+        templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
+    case tcc: TypeClassConstraint =>
+      scala.xml.Text(tcc.typeParamName + " is ") ++
+        <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
+        context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+        templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
+    case impl: ImplicitInScopeConstraint =>
+      scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ scala.xml.Text(" is in scope")
+    case eq: EqualTypeParamConstraint =>
+      scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+        typeToHtml(eq.rhs, hasLinks = true) ++ scala.xml.Text(")")
+    case bt: BoundedTypeParamConstraint =>
+      scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+        bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
+        typeToHtml(bt.lowerBound, hasLinks = true) ++ scala.xml.Text(" <: ") ++
+        typeToHtml(bt.upperBound, hasLinks = true) ++ scala.xml.Text(")")
+    case lb: LowerBoundedTypeParamConstraint =>
+      scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+        typeToHtml(lb.lowerBound, hasLinks = true) ++ scala.xml.Text(")")
+    case ub: UpperBoundedTypeParamConstraint =>
+      scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+        typeToHtml(ub.upperBound, hasLinks = true) ++ scala.xml.Text(")")
+  }
+}
+
+object Template {
+  /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
+   * it won't be garbage collected and you'll end up filling the heap with garbage */
+
+  def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
rename to src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
new file mode 100644
index 0000000..ab8e9e2
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
@@ -0,0 +1,66 @@
+/**
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc.doc
+package html.page.diagram
+
+object DiagramStats {
+
+  class TimeTracker(title: String) {
+    var totalTime: Long = 0l
+    var maxTime: Long = 0l
+    var instances: Int = 0
+
+    def addTime(ms: Long) = {
+      if (maxTime < ms)
+        maxTime = ms
+      totalTime += ms
+      instances += 1
+    }
+
+    def printStats(print: String => Unit) = {
+      if (instances == 0)
+        print(title + ": no stats gathered")
+      else {
+        print("  " + title)
+        print("  " + "=" * title.length)
+        print("    count:        " + instances + " items")
+        print("    total time:   " + totalTime + " ms")
+        print("    average time: " + (totalTime/instances) + " ms")
+        print("    maximum time: " + maxTime + " ms")
+        print("")
+      }
+    }
+  }
+
+  private[this] val filterTrack = new TimeTracker("diagrams model filtering")
+  private[this] val modelTrack = new TimeTracker("diagrams model generation")
+  private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
+  private[this] val dotRunTrack = new TimeTracker("dot process runnning")
+  private[this] val svgTrack = new TimeTracker("svg processing")
+  private[this] var brokenImages = 0
+  private[this] var fixedImages = 0
+
+  def printStats(settings: Settings) = {
+    if (settings.docDiagramsDebug) {
+      settings.printMsg("\nDiagram generation running time breakdown:\n")
+      filterTrack.printStats(settings.printMsg)
+      modelTrack.printStats(settings.printMsg)
+      dotGenTrack.printStats(settings.printMsg)
+      dotRunTrack.printStats(settings.printMsg)
+      svgTrack.printStats(settings.printMsg)
+      println("  Broken images: " + brokenImages)
+      println("  Fixed images: " + fixedImages)
+      println("")
+    }
+  }
+
+  def addFilterTime(ms: Long) = filterTrack.addTime(ms)
+  def addModelTime(ms: Long) = modelTrack.addTime(ms)
+  def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
+  def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
+  def addSvgTime(ms: Long) = svgTrack.addTime(ms)
+
+  def addBrokenImage(): Unit = brokenImages += 1
+  def addFixedImage(): Unit = fixedImages += 1
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
new file mode 100644
index 0000000..4ff436b
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -0,0 +1,508 @@
+/**
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+package diagram
+
+import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
+import scala.collection.immutable._
+import model._
+import model.diagram._
+
+class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
+
+  // the page where the diagram will be embedded
+  private var page: HtmlPage = null
+  // path to the "lib" folder relative to the page
+  private var pathToLib: String = null
+  // maps nodes to unique indices
+  private var node2Index: Map[Node, Int] = null
+  // true if the current diagram is a class diagram
+  private var isInheritanceDiagram = false
+  // incoming implicit nodes (needed for determining the CSS class of a node)
+  private var incomingImplicitNodes: List[Node] = List()
+  // the suffix used when there are two many classes to show
+  private final val MultiSuffix = " classes/traits"
+  // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
+  private var counter = 0
+
+  def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
+    counter = counter + 1
+    this.page = page
+    pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
+    val dot = generateDot(diagram)
+    val result = generateSVG(dot, template)
+    // clean things up a bit, so we don't leave garbage on the heap
+    this.page = null
+    node2Index = null
+    incomingImplicitNodes = List()
+    result
+  }
+
+  /**
+   * Generates a dot string for a given diagram.
+   */
+  private def generateDot(d: Diagram) = {
+    // inheritance nodes (all nodes except thisNode and implicit nodes)
+    var nodes: List[Node] = null
+    // inheritance edges (all edges except implicit edges)
+    var edges: List[(Node, List[Node])] = null
+
+    // timing
+    var tDot = -System.currentTimeMillis
+
+    // variables specific to class diagrams:
+    // current node of a class diagram
+    var thisNode:Node = null
+    var subClasses = List[Node]()
+    var superClasses = List[Node]()
+    var incomingImplicits = List[Node]()
+    var outgoingImplicits = List[Node]()
+    isInheritanceDiagram = false
+
+    d match {
+      case InheritanceDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
+
+        def textTypeEntity(text: String) =
+          new TypeEntity {
+            val name = text
+            def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap()
+          }
+
+        // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
+        // conservatively, we'll limit at 4000, to be sure:
+        def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
+
+        // avoid overcrowding the diagram:
+        //   if there are too many super / sub / implicit nodes, represent
+        //   them by on node with a corresponding tooltip
+        superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+          val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
+          List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None)(superClassesTooltip))
+        } else _superClasses
+
+        subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+          val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
+          List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None)(subClassesTooltip))
+        } else _subClasses
+
+        incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+          val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
+          List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None)(incomingImplicitsTooltip))
+        } else _incomingImplicits
+
+        outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+          val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
+          List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None)(outgoingImplicitsTooltip))
+        } else _outgoingImplicits
+
+        thisNode = _thisNode
+        nodes = List()
+        edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
+        node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
+        isInheritanceDiagram = true
+        incomingImplicitNodes = incomingImplicits
+      case _ =>
+        nodes = d.nodes
+        edges = d.edges
+        node2Index = d.nodes.zipWithIndex.toMap
+        incomingImplicitNodes = List()
+    }
+
+    val implicitsDot = {
+      if (!isInheritanceDiagram) ""
+      else {
+        // dot cluster containing thisNode
+        val thisCluster = "subgraph clusterThis {\n" +
+          "style=\"invis\"\n" +
+          node2Dot(thisNode) +
+        "}"
+        // dot cluster containing incoming implicit nodes, if any
+        val incomingCluster = {
+          if(incomingImplicits.isEmpty) ""
+          else "subgraph clusterIncoming {\n" +
+            "style=\"invis\"\n" +
+            incomingImplicits.reverse.map(n => node2Dot(n)).mkString +
+            (if (incomingImplicits.size > 1)
+              incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+              " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+            else "") +
+          "}"
+        }
+        // dot cluster containing outgoing implicit nodes, if any
+        val outgoingCluster = {
+          if(outgoingImplicits.isEmpty) ""
+          else "subgraph clusterOutgoing {\n" +
+            "style=\"invis\"\n" +
+            outgoingImplicits.reverse.map(n => node2Dot(n)).mkString +
+            (if (outgoingImplicits.size > 1)
+              outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+              " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+            else "") +
+          "}"
+        }
+
+        // assemble clusters into another cluster
+        val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
+        val outgoingTooltip =  thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
+        "subgraph clusterAll {\n" +
+      	"style=\"invis\"\n" +
+          outgoingCluster + "\n" +
+      	  thisCluster + "\n" +
+      	  incomingCluster + "\n" +
+      	  // incoming implicit edge
+      	  (if (!incomingImplicits.isEmpty) {
+      	    val n = incomingImplicits.last
+      	    "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
+      	    " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
+      	    ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
+      	  } else "") +
+      	  // outgoing implicit edge
+      	  (if (!outgoingImplicits.isEmpty) {
+      	    val n = outgoingImplicits.head
+      	    "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
+      	    " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
+      	    ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
+      	  } else "") +
+        "}"
+      }
+    }
+
+    // assemble graph
+    val graph = "digraph G {\n" +
+      // graph / node / edge attributes
+      graphAttributesStr +
+      "node [" + nodeAttributesStr + "];\n" +
+      "edge [" + edgeAttributesStr + "];\n" +
+      implicitsDot + "\n" +
+      // inheritance nodes
+      nodes.map(n => node2Dot(n)).mkString +
+      subClasses.map(n => node2Dot(n)).mkString +
+      superClasses.map(n => node2Dot(n)).mkString +
+      // inheritance edges
+      edges.map{ case (from, tos) => tos.map(to => {
+        val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
+        // the X -> Y edge is inverted twice to keep the diagram flowing the right way
+        // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
+        "node" + node2Index(to) + " -> node" + node2Index(from) +
+        " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
+        "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
+          to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
+      }).mkString}.mkString +
+    "}"
+
+    tDot += System.currentTimeMillis
+    DiagramStats.addDotGenerationTime(tDot)
+
+    graph
+  }
+
+  /**
+   * Generates the dot string of a given node.
+   */
+  private def node2Dot(node: Node) = {
+
+    // escape HTML characters in node names
+    def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">")
+
+    // assemble node attribues in a map
+    val attr = scala.collection.mutable.Map[String, String]()
+
+    // link
+    node.doctpl match {
+      case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
+      case _ =>
+    }
+
+    // tooltip
+    node.tooltip match {
+      case Some(text) => attr += "tooltip" -> text
+      // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
+      case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
+      case _ =>
+    }
+
+    // styles
+    if(node.isImplicitNode)
+      attr ++= implicitStyle
+    else if(node.isOutsideNode)
+      attr ++= outsideStyle
+    else if(node.isTraitNode)
+      attr ++= traitStyle
+    else if(node.isClassNode)
+      attr ++= classStyle
+    else if(node.isObjectNode)
+      attr ++= objectStyle
+    else if(node.isTypeNode)
+      attr ++= typeStyle
+    else
+      attr ++= defaultStyle
+
+    // HTML label
+    var name = escape(node.name)
+    var img = ""
+    if(node.isTraitNode)
+      img = "trait_diagram.png"
+    else if(node.isClassNode)
+      img = "class_diagram.png"
+    else if(node.isObjectNode)
+      img = "object_diagram.png"
+    else if(node.isTypeNode)
+      img = "type_diagram.png"
+
+    if(!img.equals("")) {
+      img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
+      name = name + " "
+    }
+    val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
+    		       "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
+    		    "</TABLE>>"
+
+    // dot does not allow to specify a CSS class, therefore
+    // set the id to "{class}|{id}", which will be used in
+    // the transform method
+    val id = "graph" + counter + "_" + node2Index(node)
+    attr += ("id" -> (cssClass(node) + "|" + id))
+
+    // return dot string
+    "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
+  }
+
+  /**
+   * Returns the CSS class for an edge connecting node1 and node2.
+   */
+  private def cssClass(node1: Node, node2: Node): String = {
+    if (node1.isImplicitNode && node2.isThisNode)
+      "implicit-incoming"
+    else if (node1.isThisNode && node2.isImplicitNode)
+      "implicit-outgoing"
+    else
+      "inheritance"
+  }
+
+  /**
+   * Returns the CSS class for a node.
+   */
+  private def cssClass(node: Node): String =
+    if (node.isImplicitNode && incomingImplicitNodes.contains(node))
+      "implicit-incoming" + cssBaseClass(node, "", " ")
+    else if (node.isImplicitNode)
+      "implicit-outgoing" + cssBaseClass(node, "", " ")
+    else if (node.isThisNode)
+      "this" + cssBaseClass(node, "", " ")
+    else if (node.isOutsideNode)
+      "outside" + cssBaseClass(node, "", " ")
+    else
+      cssBaseClass(node, "default", "")
+
+  private def cssBaseClass(node: Node, default: String, space: String) =
+    if (node.isClassNode)
+      space + "class"
+    else if (node.isTraitNode)
+      space + "trait"
+    else if (node.isObjectNode)
+      space + "object"
+    else if (node.isTypeNode)
+      space + "type"
+    else
+      default
+
+  /**
+   * Calls dot with a given dot string and returns the SVG output.
+   */
+  private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
+    val dotOutput = DiagramGenerator.getDotRunner().feedToDot(dotInput, template)
+    var tSVG = -System.currentTimeMillis
+
+    val result = if (dotOutput != null) {
+      val src = scala.io.Source.fromString(dotOutput)
+      try {
+        val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, preserveWS = false)
+        val doc = cpa.document()
+        if (doc != null)
+          transform(doc.docElem)
+        else
+          NodeSeq.Empty
+      } catch {
+        case exc: Exception =>
+          if (settings.docDiagramsDebug) {
+            settings.printMsg("\n\n**********************************************************************")
+            settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
+            settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
+            settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
+            settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
+            settings.printMsg("\n\n**********************************************************************")
+          } else {
+            settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
+            settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
+          }
+          NodeSeq.Empty
+      }
+    } else
+      NodeSeq.Empty
+
+    tSVG += System.currentTimeMillis
+    DiagramStats.addSvgTime(tSVG)
+
+    result
+  }
+
+  /**
+   * Transforms the SVG generated by dot:
+   * - adds a class attribute to the SVG element
+   * - changes the path of the node images from absolute to relative
+   * - assigns id and class attributes to nodes and edges
+   * - removes title elements
+   */
+  private def transform(e:scala.xml.Node): scala.xml.Node = e match {
+    // add an id and class attribute to the SVG element
+    case Elem(prefix, "svg", attribs, scope, child @ _*) => {
+      val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
+      Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+      new UnprefixedAttribute("id", "graph" + counter, Null) %
+      new UnprefixedAttribute("class", klass, Null)
+    }
+    // change the path of the node images from absolute to relative
+    case img @  => {
+      val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
+      val file = href.substring(href.lastIndexOf("/") + 1, href.size)
+      img.asInstanceOf[Elem] %
+      new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
+    }
+    // assign id and class attributes to edges and nodes:
+    // the id attribute generated by dot has the format: "{class}|{id}"
+    case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
+      var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+      val dotId = (g \ "@id").toString
+      if (dotId.count(_ == '|') == 1) {
+        val Array(klass, id) = dotId.toString.split("\\|")
+        /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
+         * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
+         * back in the node */
+        val kind = getKind(klass)
+        if (kind != "")
+          if (((g \ "a" \ "image").isEmpty)) {
+            DiagramStats.addBrokenImage()
+            val xposition = getPosition(g, "x", -22)
+            val yposition = getPosition(g, "y", -11.3334)
+            if (xposition.isDefined && yposition.isDefined) {
+              val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
+              val anchorNode = (g \ "a") match {
+                case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
+                  transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+                case _ =>
+                  g \ "a"
+              }
+              res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+              DiagramStats.addFixedImage()
+            }
+          }
+        res % new UnprefixedAttribute("id", id, Null) %
+        new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
+      }
+      else res
+    }
+    // remove titles
+    case <title>{ _* }</title> =>
+      scala.xml.Text("")
+    // apply recursively
+    case Elem(prefix, label, attribs, scope, child @ _*) =>
+      Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+    case x => x
+  }
+
+  def getKind(klass: String): String =
+    if (klass.contains("class")) "class"
+    else if (klass.contains("trait")) "trait"
+    else if (klass.contains("object")) "object"
+    else ""
+
+  def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
+    val node = g \ "a" \ "text" \ ("@" + axis)
+    if (node.isEmpty)
+      None
+    else
+      Some(node.toString.toDouble + offset)
+  }
+
+  /* graph / node / edge attributes */
+
+  private val graphAttributes: Map[String, String] = Map(
+      "compound" -> "true",
+      "rankdir" -> "TB"
+  )
+
+  private val nodeAttributes = Map(
+    "shape" -> "rectangle",
+    "style" -> "filled",
+    "penwidth" -> "1",
+    "margin" -> "0.08,0.01",
+    "width" -> "0.0",
+    "height" -> "0.0",
+    "fontname" -> "Arial",
+    "fontsize" -> "10.00"
+  )
+
+  private val edgeAttributes = Map(
+    "color" -> "#d4d4d4",
+    "arrowsize" -> "0.5",
+    "fontcolor" -> "#aaaaaa",
+    "fontsize" -> "10.00",
+    "fontname" -> "Arial"
+  )
+
+  private val defaultStyle = Map(
+    "color" -> "#ababab",
+    "fillcolor" -> "#e1e1e1",
+    "fontcolor" -> "#7d7d7d",
+    "margin" -> "0.1,0.04"
+  )
+
+  private val implicitStyle = Map(
+    "color" -> "#ababab",
+    "fillcolor" -> "#e1e1e1",
+    "fontcolor" -> "#7d7d7d"
+  )
+
+  private val outsideStyle = Map(
+    "color" -> "#ababab",
+    "fillcolor" -> "#e1e1e1",
+    "fontcolor" -> "#7d7d7d"
+  )
+
+  private val traitStyle = Map(
+    "color" -> "#37657D",
+    "fillcolor" -> "#498AAD",
+    "fontcolor" -> "#ffffff"
+  )
+
+  private val classStyle = Map(
+    "color" -> "#115F3B",
+    "fillcolor" -> "#0A955B",
+    "fontcolor" -> "#ffffff"
+  )
+
+  private val objectStyle = Map(
+    "color" -> "#102966",
+    "fillcolor" -> "#3556a7",
+    "fontcolor" -> "#ffffff"
+  )
+
+  private val typeStyle = Map(
+    "color" -> "#115F3B",
+    "fillcolor" -> "#0A955B",
+    "fontcolor" -> "#ffffff"
+  )
+
+  private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
+
+  private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
+  private val nodeAttributesStr = flatten(nodeAttributes)
+  private val edgeAttributesStr = flatten(edgeAttributes)
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
new file mode 100644
index 0000000..4bed106
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -0,0 +1,225 @@
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import java.io.InputStream
+import java.io.OutputStream
+import java.io.InputStreamReader
+import java.io.OutputStreamWriter
+import java.io.BufferedWriter
+import java.io.BufferedReader
+import scala.sys.process._
+import scala.concurrent.SyncVar
+
+import model._
+
+/** This class takes care of running the graphviz dot utility */
+class DotRunner(settings: doc.Settings) {
+
+  private[this] var dotRestarts = 0
+  private[this] var dotProcess: DotProcess  = null
+
+  def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
+
+    if (dotProcess == null) {
+      if (dotRestarts < settings.docDiagramsDotRestart.value) {
+        if (dotRestarts != 0)
+          settings.printMsg("Graphviz will be restarted...\n")
+        dotRestarts += 1
+        dotProcess = new DotProcess(settings)
+      } else
+        return null
+    }
+
+    val tStart = System.currentTimeMillis
+    val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
+    val tFinish = System.currentTimeMillis
+    DiagramStats.addDotRunningTime(tFinish - tStart)
+
+    if (result == null) {
+      dotProcess.cleanup()
+      dotProcess = null
+      if (dotRestarts == settings.docDiagramsDotRestart.value) {
+        settings.printMsg("\n")
+        settings.printMsg("**********************************************************************")
+        settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool")
+        settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
+        settings.printMsg("")
+        val baseList = List(settings.docDiagramsDebug,
+                            settings.docDiagramsDotPath,
+                            settings.docDiagramsDotRestart,
+                            settings.docDiagramsDotTimeout)
+        val width    = (baseList map (_.helpSyntax.length)).max
+        def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + "  " + s.helpDescription
+        baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
+        settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
+        settings.printMsg("**********************************************************************\n\n")
+
+      }
+    }
+
+    result
+  }
+
+  def cleanup() =
+    if (dotProcess != null)
+      dotProcess.cleanup()
+}
+
+class DotProcess(settings: doc.Settings) {
+
+  @volatile var error: Boolean = false           // signal an error
+  val inputString = new SyncVar[String]                 // used for the dot process input
+  val outputString = new SyncVar[String]                // used for the dot process output
+  val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
+
+  // set in only one place, in the main thread
+  var process: Process = null
+  var templateName: String = ""
+  var templateInput: String = ""
+
+  def feedToDot(input: String, template: String): String = {
+
+    templateName = template
+    templateInput = input
+
+    try {
+
+      // process creation
+      if (process == null) {
+        val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
+        val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
+        process = processBuilder.run(procIO)
+      }
+
+      // pass the input and wait for the output
+      assert(!inputString.isSet)
+      assert(!outputString.isSet)
+      inputString.put(input)
+      var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000L)
+      if (error) result = null
+
+      result
+
+    } catch {
+      case exc: Throwable =>
+        errorBuffer.append("  Main thread in " + templateName + ": " +
+          (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
+        error = true
+        return null
+    }
+  }
+
+  def cleanup(): Unit = {
+
+    // we'll need to know if there was any error for reporting
+    val _error = error
+
+    if (process != null) {
+      // if there's no error, this should exit cleanly
+      if (!error) feedToDot("<finish>", "<finishing>")
+
+      // just in case there's any thread hanging, this will take it out of the loop
+      error = true
+      process.destroy()
+      // we'll need to unblock the input again
+      if (!inputString.isSet) inputString.put("")
+      if (outputString.isSet) outputString.take()
+    }
+
+    if (_error) {
+      if (settings.docDiagramsDebug.value) {
+        settings.printMsg("\n**********************************************************************")
+        settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
+        settings.printMsg("\nThe following is the log of the failure:")
+        settings.printMsg(errorBuffer.toString)
+        settings.printMsg("  Cleanup: Last template: " + templateName)
+        settings.printMsg("  Cleanup: Last dot input: \n    " + templateInput.replaceAll("\n","\n    ") + "\n")
+        settings.printMsg("  Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
+        if (process != null)
+          settings.printMsg("  Cleanup: Dot exit code: " + process.exitValue)
+        settings.printMsg("**********************************************************************")
+      } else {
+        // we shouldn't just sit there for 50s not reporting anything, no?
+        settings.printMsg("Graphviz dot encountered an error when generating the diagram for:")
+        settings.printMsg(templateName)
+        settings.printMsg("These are usually spurious errors, but if you notice a persistant error on")
+        settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.")
+      }
+    }
+  }
+
+  /* The standard input passing function */
+  private[this] def inputFn(stdin: OutputStream): Unit = {
+    val writer = new BufferedWriter(new OutputStreamWriter(stdin))
+    try {
+      var input = inputString.take()
+
+      while (!error) {
+        if (input == "<finish>") {
+          // empty => signal to finish
+          stdin.close()
+          return
+        } else {
+          // send output to dot
+          writer.write(input + "\n\n")
+          writer.flush()
+        }
+
+        if (!error) input = inputString.take()
+      }
+      stdin.close()
+    } catch {
+      case exc: Throwable =>
+        error = true
+        stdin.close()
+        errorBuffer.append("  Input thread in " + templateName + ": Exception: " + exc + "\n")
+    }
+  }
+
+  private[this] def outputFn(stdOut: InputStream): Unit = {
+    val reader = new BufferedReader(new InputStreamReader(stdOut))
+    val buffer: StringBuilder = new StringBuilder()
+    try {
+      var line = reader.readLine
+      while (!error && line != null) {
+        buffer.append(line + "\n")
+        // signal the last element in the svg (only for output)
+        if (line == "</svg>") {
+          outputString.put(buffer.toString)
+          buffer.setLength(0)
+        }
+        if (error) { stdOut.close(); return }
+        line = reader.readLine
+      }
+      assert(!outputString.isSet)
+      outputString.put(buffer.toString)
+      stdOut.close()
+    } catch {
+      case exc: Throwable =>
+        error = true
+        stdOut.close()
+        errorBuffer.append("  Output thread in " + templateName + ": Exception: " + exc + "\n")
+    }
+  }
+
+  private[this] def errorFn(stdErr: InputStream): Unit = {
+    val reader = new BufferedReader(new InputStreamReader(stdErr))
+    try {
+      var line = reader.readLine
+      while (line != null) {
+        errorBuffer.append("  DOT <error console>: " + line + "\n")
+        error = true
+        line = reader.readLine
+      }
+      stdErr.close()
+    } catch {
+      case exc: Throwable =>
+        error = true
+        stdErr.close()
+        errorBuffer.append("  Error thread in " + templateName + ": Exception: " + exc + "\n")
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
new file mode 100644
index 0000000..c201b32
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -0,0 +1,541 @@
+// © 2009–2010 EPFL/LAMP
+// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
+
+var topLevelTemplates = undefined;
+var topLevelPackages = undefined;
+
+var scheduler = undefined;
+
+var kindFilterState = undefined;
+var focusFilterState = undefined;
+
+var title = $(document).attr('title');
+
+var lastHash = "";
+
+$(document).ready(function() {
+    $('body').layout({
+        west__size: '20%',
+        center__maskContents: true
+    });
+    $('#browser').layout({
+        center__paneSelector: ".ui-west-center"
+        //,center__initClosed:true
+        ,north__paneSelector: ".ui-west-north"
+    });
+    $('iframe').bind("load", function(){
+        var subtitle = $(this).contents().find('title').text();
+        $(document).attr('title', (title ? title + " - " : "") + subtitle);
+
+        setUrlFragmentFromFrameSrc();
+    });
+
+    // workaround for IE's iframe sizing lack of smartness
+    if($.browser.msie) {
+        function fixIFrame() {
+            $('iframe').height($(window).height() )
+        }
+        $('iframe').bind("load",fixIFrame)
+        $('iframe').bind("resize",fixIFrame)
+    }
+
+    scheduler = new Scheduler();
+    scheduler.addLabel("init", 1);
+    scheduler.addLabel("focus", 2);
+    scheduler.addLabel("filter", 4);
+
+    prepareEntityList();
+
+    configureTextFilter();
+    configureKindFilter();
+    configureEntityList();
+
+    setFrameSrcFromUrlFragment();
+
+    // If the url fragment changes, adjust the src of iframe "template".
+    $(window).bind('hashchange', function() {
+      if(lastFragment != window.location.hash) {
+        lastFragment = window.location.hash;
+        setFrameSrcFromUrlFragment();
+      }
+    });
+});
+
+// Set the iframe's src according to the fragment of the current url.
+// fragment = "#scala.Either" => iframe url = "scala/Either.html"
+// fragment = "#scala.Either at isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
+function setFrameSrcFromUrlFragment() {
+  var fragment = location.hash.slice(1);
+  if(fragment) {
+    var loc = fragment.split("@")[0].replace(/\./g, "/");
+    if(loc.indexOf(".html") < 0) loc += ".html";
+    if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]);
+    frames["template"].location.replace(loc);
+  }
+  else
+    frames["template"].location.replace("package.html");
+}
+
+// Set the url fragment according to the src of the iframe "template".
+// iframe url = "scala/Either.html"  =>  url fragment = "#scala.Either"
+// iframe url = "scala/Either.html#isRight:Boolean"  =>  url fragment = "#scala.Either at isRight:Boolean"
+function setUrlFragmentFromFrameSrc() {
+  try {
+    var commonLength = location.pathname.lastIndexOf("/");
+    var frameLocation = frames["template"].location;
+    var relativePath = frameLocation.pathname.slice(commonLength + 1);
+
+    if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
+      return;
+
+    // Add #, remove ".html" and replace "/" with "."
+    fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
+
+    // Add the frame's hash after an @
+    if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
+
+    // Use replace to not add history items
+    lastFragment = fragment;
+    location.replace(fragment);
+  }
+  catch(e) {
+    // Chrome doesn't allow reading the iframe's location when
+    // used on the local file system.
+  }
+}
+
+var Index = {};
+
+(function (ns) {
+    function openLink(t, type) {
+        var href;
+        if (type == 'object') {
+            href = t['object'];
+        } else {
+            href = t['class'] || t['trait'] || t['case class'] || t['type'];
+        }
+        return [
+            '<a class="tplshow" target="template" href="',
+            href,
+            '"><img width="13" height="13" class="',
+            type,
+            ' icon" src="lib/',
+            type,
+            '.png" />'
+        ].join('');
+    }
+
+    function createPackageHeader(pack) {
+        return [
+            '<li class="pack">',
+            '<a class="packfocus">focus</a><a class="packhide">hide</a>',
+            '<a class="tplshow" target="template" href="',
+            pack.replace(/\./g, '/'),
+            '/package.html">',
+            pack,
+            '</a></li>'
+        ].join('');
+    };
+
+    function createListItem(template) {
+        var inner = '';
+
+
+        if (template.object) {
+            inner += openLink(template, 'object');
+        }
+
+        if (template['class'] || template['trait'] || template['case class'] || template['type']) {
+            inner += (inner == '') ?
+                '<div class="placeholder" />' : '</a>';
+            inner += openLink(template, template['trait'] ? 'trait' : template['type'] ? 'type' : 'class');
+        } else {
+            inner += '<div class="placeholder"/>';
+        }
+
+        return [
+            '<li>',
+            inner,
+            '<span class="tplLink">',
+            template.name.replace(/^.*\./, ''),
+            '</span></a></li>'
+        ].join('');
+    }
+
+
+    ns.createPackageTree = function (pack, matched, focused) {
+        var html = $.map(matched, function (child, i) {
+            return createListItem(child);
+        }).join('');
+
+        var header;
+        if (focused && pack == focused) {
+            header = '';
+        } else {
+            header = createPackageHeader(pack);
+        }
+
+        return [
+            '<ol class="packages">',
+            header,
+            '<ol class="templates">',
+            html,
+            '</ol></ol>'
+        ].join('');
+    }
+
+    ns.keys = function (obj) {
+        var result = [];
+        var key;
+        for (key in obj) {
+            result.push(key);
+        }
+        return result;
+    }
+
+    var hiddenPackages = {};
+
+    function subPackages(pack) {
+        return $.grep($('#tpl ol.packages'), function (element, index) {
+            var pack = $('li.pack > .tplshow', element).text();
+            return pack.indexOf(pack + '.') == 0;
+        });
+    }
+
+    ns.hidePackage = function (ol) {
+        var selected = $('li.pack > .tplshow', ol).text();
+        hiddenPackages[selected] = true;
+
+        $('ol.templates', ol).hide();
+
+        $.each(subPackages(selected), function (index, element) {
+            $(element).hide();
+        });
+    }
+
+    ns.showPackage = function (ol, state) {
+        var selected = $('li.pack > .tplshow', ol).text();
+        hiddenPackages[selected] = false;
+
+        $('ol.templates', ol).show();
+
+        $.each(subPackages(selected), function (index, element) {
+            $(element).show();
+
+            // When the filter is in "packs" state,
+            // we don't want to show the `.templates`
+            var key = $('li.pack > .tplshow', element).text();
+            if (hiddenPackages[key] || state == 'packs') {
+                $('ol.templates', element).hide();
+            }
+        });
+    }
+
+})(Index);
+
+function configureEntityList() {
+    kindFilterSync();
+    configureHideFilter();
+    configureFocusFilter();
+    textFilter();
+}
+
+/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
+   form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
+   the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
+   topLevelPackages) to serve as reference for resetting the list when needed.
+   Be advised: this function should only be called once, on page load. */
+function prepareEntityList() {
+    var classIcon = $("#library > img.class");
+    var traitIcon = $("#library > img.trait");
+    var typeIcon = $("#library > img.type");
+    var objectIcon = $("#library > img.object");
+    var packageIcon = $("#library > img.package");
+
+    $('#tpl li.pack > a.tplshow').attr("target", "template");
+    $('#tpl li.pack').each(function () {
+        $("span.class", this).each(function() { $(this).replaceWith(classIcon.clone()); });
+        $("span.trait", this).each(function() { $(this).replaceWith(traitIcon.clone()); });
+        $("span.type", this).each(function() { $(this).replaceWith(typeIcon.clone()); });
+        $("span.object", this).each(function() { $(this).replaceWith(objectIcon.clone()); });
+        $("span.package", this).each(function() { $(this).replaceWith(packageIcon.clone()); });
+    });
+    $('#tpl li.pack')
+        .prepend("<a class='packhide'>hide</a>")
+        .prepend("<a class='packfocus'>focus</a>");
+}
+
+/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
+function keyboardScrolldownLeftPane() {
+    scheduler.add("init", function() {
+        $("#textfilter input").blur();
+        var $items = $("#tpl li");
+        $items.first().addClass('selected');
+
+        $(window).bind("keydown", function(e) {
+            var $old = $items.filter('.selected'),
+                $new;
+
+            switch ( e.keyCode ) {
+
+            case 9: // tab
+                $old.removeClass('selected');
+                break;
+
+            case 13: // enter
+                $old.removeClass('selected');
+                var $url = $old.children().filter('a:last').attr('href');
+                $("#template").attr("src",$url);
+                break;
+
+            case 27: // escape
+                $old.removeClass('selected');
+                $(window).unbind(e);
+                $("#textfilter input").focus();
+
+                break;
+
+            case 38: // up
+                $new = $old.prev();
+
+                if (!$new.length) {
+                    $new = $old.parent().prev();
+                }
+
+                if ($new.is('ol') && $new.children(':last').is('ol')) {
+                    $new = $new.children().children(':last');
+                } else if ($new.is('ol')) {
+                    $new = $new.children(':last');
+                }
+
+                break;
+
+            case 40: // down
+                $new = $old.next();
+                if (!$new.length) {
+                    $new = $old.parent().parent().next();
+                }
+                if ($new.is('ol')) {
+                    $new = $new.children(':first');
+                }
+                break;
+            }
+
+            if ($new.is('li')) {
+                $old.removeClass('selected');
+                $new.addClass('selected');
+            } else if (e.keyCode == 38) {
+                $(window).unbind(e);
+                $("#textfilter input").focus();
+            }
+        });
+    });
+}
+
+/* Configures the text filter  */
+function configureTextFilter() {
+    scheduler.add("init", function() {
+        $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
+        var input = $("#textfilter input");
+        resizeFilterBlock();
+        input.bind('keyup', function(event) {
+            if (event.keyCode == 27) { // escape
+                input.attr("value", "");
+            }
+            if (event.keyCode == 40) { // down arrow
+                $(window).unbind("keydown");
+                keyboardScrolldownLeftPane();
+                return false;
+            }
+            textFilter();
+        });
+        input.bind('keydown', function(event) {
+            if (event.keyCode == 9) { // tab
+                $("#template").contents().find("#mbrsel-input").focus();
+                input.attr("value", "");
+                return false;
+            }
+            textFilter();
+        });
+        input.focus(function(event) { input.select(); });
+    });
+    scheduler.add("init", function() {
+        $("#textfilter > .post").click(function(){
+            $("#textfilter input").attr("value", "");
+            textFilter();
+        });
+    });
+}
+
+function compilePattern(query) {
+    var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1');
+
+    if (query.toLowerCase() != query) {
+        // Regexp that matches CamelCase subbits: "BiSe" is
+        // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
+        return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1"));
+    }
+    else { // if query is all lower case make a normal case insensitive search
+        return new RegExp(escaped, "i");
+    }
+}
+
+// Filters all focused templates and packages. This function should be made less-blocking.
+//   @param query The string of the query
+function textFilter() {
+    var query = $("#textfilter input").attr("value") || '';
+    var queryRegExp = compilePattern(query);
+
+    if ((typeof textFilter.lastQuery === "undefined") || (textFilter.lastQuery !== query)) {
+
+        textFilter.lastQuery = query;
+
+        scheduler.clear("filter");
+
+        $('#tpl').html('');
+
+        var index = 0;
+
+        var searchLoop = function () {
+            var packages = Index.keys(Index.PACKAGES).sort();
+
+            while (packages[index]) {
+                var pack = packages[index];
+                var children = Index.PACKAGES[pack];
+                index++;
+
+                if (focusFilterState) {
+                    if (pack == focusFilterState ||
+                        pack.indexOf(focusFilterState + '.') == 0) {
+                        ;
+                    } else {
+                        continue;
+                    }
+                }
+
+                var matched = $.grep(children, function (child, i) {
+                    return queryRegExp.test(child.name);
+                });
+
+                if (matched.length > 0) {
+                    $('#tpl').append(Index.createPackageTree(pack, matched,
+                                                             focusFilterState));
+                    scheduler.add('filter', searchLoop);
+                    return;
+                }
+            }
+
+            $('#tpl a.packfocus').click(function () {
+                focusFilter($(this).parent().parent());
+            });
+            configureHideFilter();
+        };
+
+        scheduler.add('filter', searchLoop);
+    }
+}
+
+/* Configures the hide tool by adding the hide link to all packages. */
+function configureHideFilter() {
+    $('#tpl li.pack a.packhide').click(function () {
+        var packhide = $(this)
+        var action = packhide.text();
+
+        var ol = $(this).parent().parent();
+
+        if (action == "hide") {
+            Index.hidePackage(ol);
+            packhide.text("show");
+        }
+        else {
+            Index.showPackage(ol, kindFilterState);
+            packhide.text("hide");
+        }
+        return false;
+    });
+}
+
+/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
+   link to all packages. */
+function configureFocusFilter() {
+    scheduler.add("init", function() {
+        focusFilterState = null;
+        if ($("#focusfilter").length == 0) {
+            $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
+            $("#focusfilter > .focusremove").click(function(event) {
+                textFilter();
+
+                $("#focusfilter").hide();
+                $("#kindfilter").show();
+                resizeFilterBlock();
+                focusFilterState = null;
+            });
+            $("#focusfilter").hide();
+            resizeFilterBlock();
+        }
+    });
+    scheduler.add("init", function() {
+        $('#tpl li.pack a.packfocus').click(function () {
+            focusFilter($(this).parent());
+            return false;
+        });
+    });
+}
+
+/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
+   focuses package into the top-level templates and packages position of the index. The original top-level
+     @param package The <li> element that corresponds to the package in the entity index */
+function focusFilter(package) {
+    scheduler.clear("filter");
+
+    var currentFocus = $('li.pack > .tplshow', package).text();
+    $("#focusfilter > .focuscoll").empty();
+    $("#focusfilter > .focuscoll").append(currentFocus);
+
+    $("#focusfilter").show();
+    $("#kindfilter").hide();
+    resizeFilterBlock();
+    focusFilterState = currentFocus;
+    kindFilterSync();
+
+    textFilter();
+}
+
+function configureKindFilter() {
+    scheduler.add("init", function() {
+        kindFilterState = "all";
+        $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
+        $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+        resizeFilterBlock();
+    });
+}
+
+function kindFilter(kind) {
+    if (kind == "packs") {
+        kindFilterState = "packs";
+        kindFilterSync();
+        $("#kindfilter > a").replaceWith("<a>display all entities</a>");
+        $("#kindfilter > a").click(function(event) { kindFilter("all"); });
+    }
+    else {
+        kindFilterState = "all";
+        kindFilterSync();
+        $("#kindfilter > a").replaceWith("<a>display packages only</a>");
+        $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+    }
+}
+
+/* Applies the kind filter. */
+function kindFilterSync() {
+    if (kindFilterState == "all" || focusFilterState != null) {
+        $("#tpl a.packhide").text('hide');
+        $("#tpl ol.templates").show();
+    } else {
+        $("#tpl a.packhide").text('show');
+        $("#tpl ol.templates").hide();
+    }
+}
+
+function resizeFilterBlock() {
+    $("#tpl").css("top", $("#filter").outerHeight(true));
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
rename to src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
new file mode 100644
index 0000000..fe157c1
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
@@ -0,0 +1,98 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import base.comment._
+
+import scala.collection._
+import scala.reflect.internal.util.Position
+
+/** The comment parser transforms raw comment strings into `Comment` objects.
+  * Call `parse` to run the parser. Note that the parser is stateless and
+  * should only be built once for a given Scaladoc run.
+  *
+  * @author Manohar Jonnalagedda
+  * @author Gilles Dubochet */
+trait CommentFactory extends base.CommentFactoryBase {
+  thisFactory: ModelFactory with CommentFactory with MemberLookup =>
+
+  val global: Global
+  import global.{ Symbol, NoSymbol }
+
+  protected val commentCache = mutable.HashMap.empty[(Symbol, DocTemplateImpl), Option[Comment]]
+
+  def comment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl): Option[Comment] =
+    commentCache.getOrElseUpdate((sym, inTpl), {
+      defineComment(sym, linkTarget, inTpl)
+    })
+
+  /** A comment is usualy created by the parser, however for some special
+    * cases we have to give some `inTpl` comments (parent class for example)
+    * to the comment of the symbol.
+    * This function manages some of those cases : Param accessor and Primary constructor */
+  def defineComment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl):Option[Comment] = {
+
+    //param accessor case
+    // We just need the @param argument, we put it into the body
+    if( sym.isParamAccessor &&
+        inTpl.comment.isDefined &&
+        inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
+      val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
+      Some(createComment(body0 = comContent))
+    }
+
+    // Primary constructor case
+    // We need some content of the class definition : @constructor for the body,
+    // @param and @deprecated, we can add some more if necessary
+    else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
+      val tplComment = inTpl.comment.get
+      // If there is nothing to put into the comment there is no need to create it
+      if(tplComment.constructor.isDefined ||
+        tplComment.throws != Map.empty ||
+        tplComment.valueParams != Map.empty ||
+        tplComment.typeParams != Map.empty ||
+        tplComment.deprecated.isDefined
+        )
+        Some(createComment( body0 = tplComment.constructor,
+                            throws0 = tplComment.throws,
+                            valueParams0 = tplComment.valueParams,
+                            typeParams0 = tplComment.typeParams,
+                            deprecated0 = tplComment.deprecated
+                            ))
+      else None
+    }
+
+    //other comment cases
+    // parse function will make the comment
+    else {
+      val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
+      if (rawComment != "") {
+        val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), linkTarget)
+        Some(c)
+      }
+      else None
+    }
+
+  }
+
+  protected def parse(comment: String, src: String, pos: Position, linkTarget: DocTemplateImpl): Comment = {
+    val sym = if (linkTarget eq null) NoSymbol else linkTarget.sym
+    parseAtSymbol(comment, src, pos, sym)
+  }
+
+  /** Parses a string containing wiki syntax into a `Comment` object.
+    * Note that the string is assumed to be clean:
+    *  - Removed Scaladoc start and end markers.
+    *  - Removed start-of-line star and one whitespace afterwards (if present).
+    *  - Removed all end-of-line whitespace.
+    *  - Only `endOfLine` is used to mark line endings. */
+  def parseWiki(string: String, pos: Position, inTpl: DocTemplateImpl): Body = {
+    val sym = if (inTpl eq null) NoSymbol else inTpl.sym
+    parseWikiAtSymbol(string,pos, sym)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
new file mode 100644
index 0000000..6932f01
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -0,0 +1,597 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+import base.comment._
+import diagram._
+
+/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
+  * compiler. Entities model the following Scala concepts:
+  *  - classes and traits;
+  *  - objects and package;
+  *  - constructors;
+  *  - methods;
+  *  - values, lazy values, and variables;
+  *  - abstract type members and type aliases;
+  *  - type and value parameters;
+  *  - annotations. */
+trait Entity {
+  /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
+    * instead. */
+  def name : String
+
+  /** The qualified name of the entity. This is this entity's name preceded by the qualified name of the template
+    * of which this entity is a member. The qualified name is unique to this entity. */
+  def qualifiedName: String
+
+  /** The template of which this entity is a member. */
+  def inTemplate: TemplateEntity
+
+  /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
+    * entity, the last the root package entity. */
+  def toRoot: List[Entity]
+
+  /** The qualified name of this entity. */
+  override def toString = qualifiedName
+
+  /** The Scaladoc universe of which this entity is a member. */
+  def universe: Universe
+
+  /** The annotations attached to this entity, if any. */
+  def annotations: List[Annotation]
+
+  /** The kind of the entity */
+  def kind: String
+
+  /** Whether or not the template was defined in a package object */
+  def inPackageObject: Boolean
+
+  /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
+  def isType: Boolean
+}
+
+object Entity {
+  private def isDeprecated(x: Entity) = x match {
+    case x: MemberEntity  => x.deprecation.isDefined
+    case _                => false
+  }
+  /** Ordering deprecated things last. */
+  implicit lazy val EntityOrdering: Ordering[Entity] =
+    Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name))
+}
+
+/** A template, which is either a class, trait, object or package. Depending on whether documentation is available
+  * or not, the template will be modeled as a [scala.tools.nsc.doc.model.NoDocTemplate] or a
+  * [scala.tools.nsc.doc.model.DocTemplateEntity]. */
+trait TemplateEntity extends Entity {
+
+  /** Whether this template is a package (including the root package). */
+  def isPackage: Boolean
+
+  /** Whether this template is the root package. */
+  def isRootPackage: Boolean
+
+  /** Whether this template is a trait. */
+  def isTrait: Boolean
+
+  /** Whether this template is a class. */
+  def isClass: Boolean
+
+  /** Whether this template is an object. */
+  def isObject: Boolean
+
+  /** Whether documentation is available for this template. */
+  def isDocTemplate: Boolean
+
+  /** Whether this template is a case class. */
+  def isCaseClass: Boolean
+
+  /** The self-type of this template, if it differs from the template type. */
+  def selfType : Option[TypeEntity]
+}
+
+
+/** An entity that is a member of a template. All entities, including templates, are member of another entity
+  * except for parameters and annotations. Note that all members of a template are modelled, including those that are
+  * inherited and not declared locally. */
+trait MemberEntity extends Entity {
+
+  /** The comment attached to this member, if any. */
+  def comment: Option[Comment]
+
+  /** The group this member is from */
+  def group: String
+
+  /** The template of which this entity is a member. */
+  def inTemplate: DocTemplateEntity
+
+  /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
+    * member, the last the root package entity. */
+  def toRoot: List[MemberEntity]
+
+  /** The templates in which this member has been declared. The first element of the list is the template that contains
+    * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If
+    * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All
+    * elements of this list are in the linearization of `inTemplate`. */
+  def inDefinitionTemplates: List[TemplateEntity]
+
+  /** The qualified name of the member in its currently active declaration template. */
+  def definitionName: String
+
+  /** The visibility of this member. Note that members with restricted visibility may not be modeled in some
+    * universes. */
+  def visibility: Visibility
+
+  /** The flags that have been set for this entity. The following flags are supported: `implicit`, `sealed`, `abstract`,
+    * and `final`. */
+  def flags: List[Paragraph]
+
+  /** Some deprecation message if this member is deprecated, or none otherwise. */
+  def deprecation: Option[Body]
+
+  /** Some migration warning if this member has a migration annotation, or none otherwise. */
+  def migration: Option[Body]
+
+  /** For members representing values: the type of the value returned by this member; for members
+    * representing types: the type itself. */
+  def resultType: TypeEntity
+
+  /** Whether this member is a method. */
+  def isDef: Boolean
+
+  /** Whether this member is a value (this excludes lazy values). */
+  def isVal: Boolean
+
+  /** Whether this member is a lazy value. */
+  def isLazyVal: Boolean
+
+  /** Whether this member is a variable. */
+  def isVar: Boolean
+
+  /** Whether this member is a constructor. */
+  def isConstructor: Boolean
+
+  /** Whether this member is an alias type. */
+  def isAliasType: Boolean
+
+  /** Whether this member is an abstract type. */
+  def isAbstractType: Boolean
+
+  /** Whether this member is abstract. */
+  def isAbstract: Boolean
+
+  /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
+    * signature and the complete parameter descriptions. */
+  def useCaseOf: Option[MemberEntity]
+
+  /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
+  def byConversion: Option[ImplicitConversion]
+
+  /** The identity of this member, used for linking */
+  def signature: String
+
+  /** Compatibility signature, will be removed from future versions */
+  def signatureCompat: String
+
+  /** Indicates whether the member is inherited by implicit conversion */
+  def isImplicitlyInherited: Boolean
+
+  /** Indicates whether there is another member with the same name in the template that will take precendence */
+  def isShadowedImplicit: Boolean
+
+  /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all
+   *  become ambiguous) */
+  def isAmbiguousImplicit: Boolean
+
+  /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */
+  def isShadowedOrAmbiguousImplicit: Boolean
+}
+
+object MemberEntity {
+  // Oh contravariance, contravariance, wherefore art thou contravariance?
+  // Note: the above works for both the commonly misunderstood meaning of the line and the real one.
+  implicit lazy val MemberEntityOrdering: Ordering[MemberEntity] = Entity.EntityOrdering on (x => x)
+}
+
+/** An entity that is parameterized by types */
+trait HigherKinded {
+
+  /** The type parameters of this entity. */
+  def typeParams: List[TypeParam]
+}
+
+
+/** A template (class, trait, object or package) which is referenced in the universe, but for which no further
+  * documentation is available. Only templates for which a source file is given are documented by Scaladoc. */
+trait NoDocTemplate extends TemplateEntity {
+  def kind =
+    if (isClass) "class"
+    else if (isTrait) "trait"
+    else if (isObject) "object"
+    else ""
+}
+
+/** An inherited template that was not documented in its original owner - example:
+ *  in classpath:  trait T { class C } -- T (and implicitly C) are not documented
+ *  in the source: trait U extends T -- C appears in U as a MemberTemplateImpl
+ *    -- that is, U has a member for it but C doesn't get its own page */
+trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded {
+
+  /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value
+    * parameters cannot be curried, the outer list has exactly one element. */
+  def valueParams: List[List[ValueParam]]
+
+  /** The direct super-type of this template
+      e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
+      NOTE: we are dropping the refinement here! */
+  def parentTypes: List[(TemplateEntity, TypeEntity)]
+}
+
+/** A template (class, trait, object or package) for which documentation is available. Only templates for which
+  * a source file is given are documented by Scaladoc. */
+trait DocTemplateEntity extends MemberTemplateEntity {
+
+  /** The list of templates such that each is a member of the template that follows it; the first template is always
+    * this template, the last the root package entity. */
+  def toRoot: List[DocTemplateEntity]
+
+  /** The source file in which the current template is defined and the line where the definition starts, if they exist.
+    * A source file exists for all templates, except for those that are generated synthetically by Scaladoc. */
+  def inSource: Option[(io.AbstractFile, Int)]
+
+  /** An HTTP address at which the source of this template is available, if it is available. An address is available
+    * only if the `docsourceurl` setting has been set. */
+  def sourceUrl: Option[java.net.URL]
+
+  /** All class, trait and object templates which are part of this template's linearization, in lineratization order.
+    * This template's linearization contains all of its direct and indirect super-classes and super-traits. */
+  def linearizationTemplates: List[TemplateEntity]
+
+  /** All instantiated types which are part of this template's linearization, in lineratization order.
+    * This template's linearization contains all of its direct and indirect super-types. */
+  def linearizationTypes: List[TypeEntity]
+
+  /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
+   *  Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+  def directSubClasses: List[DocTemplateEntity]
+
+  /** All members of this template. If this template is a package, only templates for which documentation is available
+    * in the universe (`DocTemplateEntity`) are listed. */
+  def members: List[MemberEntity]
+
+  /** All templates that are members of this template. If this template is a package, only templates for which
+    * documentation is available  in the universe (`DocTemplateEntity`) are listed. */
+  def templates: List[TemplateEntity with MemberEntity]
+
+  /** All methods that are members of this template. */
+  def methods: List[Def]
+
+  /** All values, lazy values and variables that are members of this template. */
+  def values: List[Val]
+
+  /** All abstract types that are members of this template. */
+  def abstractTypes: List[AbstractType]
+
+  /** All type aliases that are members of this template. */
+  def aliasTypes: List[AliasType]
+
+  /** The primary constructor of this class, if it has been defined. */
+  def primaryConstructor: Option[Constructor]
+
+  /** All constructors of this class, including the primary constructor. */
+  def constructors: List[Constructor]
+
+  /** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the
+    * other entity of the pair is the companion. */
+  def companion: Option[DocTemplateEntity]
+
+  /** The implicit conversions this template (class or trait, objects and packages are not affected) */
+  def conversions: List[ImplicitConversion]
+
+  /** The shadowing information for the implicitly added members */
+  def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
+
+  /** Classes that can be implcitly converted to this class */
+  def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
+
+  /** Classes to which this class can be implicitly converted to
+      NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
+  def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)]
+
+  /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
+  def inheritanceDiagram: Option[Diagram]
+
+  /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
+  def contentDiagram: Option[Diagram]
+
+  /** Returns the group description taken either from this template or its linearizationTypes */
+  def groupDescription(group: String): Option[Body]
+
+  /** Returns the group description taken either from this template or its linearizationTypes */
+  def groupPriority(group: String): Int
+
+  /** Returns the group description taken either from this template or its linearizationTypes */
+  def groupName(group: String): String
+}
+
+/** A trait template. */
+trait Trait extends MemberTemplateEntity {
+  def kind = "trait"
+}
+
+/** A class template. */
+trait Class extends MemberTemplateEntity {
+  override def kind = "class"
+}
+
+/** An object template. */
+trait Object extends MemberTemplateEntity {
+  def kind = "object"
+}
+
+/** A package template. A package is in the universe if it is declared as a package object, or if it
+  * contains at least one template. */
+trait Package extends DocTemplateEntity {
+
+  /** The package of which this package is a member. */
+  def inTemplate: Package
+
+  /** The package such that each is a member of the package that follows it; the first package is always this
+    * package, the last the root package. */
+  def toRoot: List[Package]
+
+  /** All packages that are member of this package. */
+  def packages: List[Package]
+
+  override def kind = "package"
+}
+
+
+/** The root package, which contains directly or indirectly all members in the universe. A universe
+  * contains exactly one root package. */
+trait RootPackage extends Package
+
+
+/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */
+trait NonTemplateMemberEntity extends MemberEntity {
+  /** Whether this member is a use case. A use case is a member which does not exist in the documented code.
+    * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
+  def isUseCase: Boolean
+}
+
+
+/** A method (`def`) of a template. */
+trait Def extends NonTemplateMemberEntity with HigherKinded {
+
+  /** The value parameters of this method. Each parameter block of a curried method is an element of the list.
+    * Each parameter block is a list of value parameters. */
+  def valueParams : List[List[ValueParam]]
+
+  def kind = "method"
+}
+
+
+/** A constructor of a class. */
+trait Constructor extends NonTemplateMemberEntity {
+
+  /** Whether this is the primary constructor of a class. The primary constructor is defined syntactically as part of
+    * the declaration of the class. */
+  def isPrimary: Boolean
+
+  /** The value parameters of this constructor. As constructors cannot be curried, the outer list has exactly one
+    * element. */
+  def valueParams : List[List[ValueParam]]
+
+  def kind = "constructor"
+}
+
+
+/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */
+trait Val extends NonTemplateMemberEntity {
+  def kind = "[lazy] value/variable"
+}
+
+
+/** An abstract type member of a template. */
+trait AbstractType extends MemberTemplateEntity with HigherKinded {
+
+  /** The lower bound for this abstract type, if it has been defined. */
+  def lo: Option[TypeEntity]
+
+  /** The upper bound for this abstract type, if it has been defined. */
+  def hi: Option[TypeEntity]
+
+  def kind = "abstract type"
+}
+
+
+/** An type alias of a template. */
+trait AliasType extends MemberTemplateEntity with HigherKinded {
+
+  /** The type aliased by this type alias. */
+  def alias: TypeEntity
+
+  def kind = "type alias"
+}
+
+
+/** A parameter to an entity. */
+trait ParameterEntity {
+
+  def name: String
+}
+
+
+/** A type parameter to a class, trait, or method. */
+trait TypeParam extends ParameterEntity with HigherKinded {
+
+  /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */
+  def variance: String
+
+  /** The lower bound for this type parameter, if it has been defined. */
+  def lo: Option[TypeEntity]
+
+  /** The upper bound for this type parameter, if it has been defined. */
+  def hi: Option[TypeEntity]
+}
+
+
+/** A value parameter to a constructor or method. */
+trait ValueParam extends ParameterEntity {
+
+  /** The type of this value parameter. */
+  def resultType: TypeEntity
+
+  /** The devault value of this value parameter, if it has been defined. */
+  def defaultValue: Option[TreeEntity]
+
+  /** Whether this value parameter is implicit. */
+  def isImplicit: Boolean
+}
+
+
+/** An annotation to an entity. */
+trait Annotation extends Entity {
+
+  /** The class of this annotation. */
+  def annotationClass: TemplateEntity
+
+  /** The arguments passed to the constructor of the annotation class. */
+  def arguments: List[ValueArgument]
+
+  def kind = "annotation"
+}
+
+/** A trait that signals the member results from an implicit conversion */
+trait ImplicitConversion {
+
+  /** The source of the implicit conversion*/
+  def source: DocTemplateEntity
+
+  /** The result type after the conversion */
+  def targetType: TypeEntity
+
+  /** The components of the implicit conversion type parents */
+  def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
+
+  /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
+  def convertorMethod: Either[MemberEntity, String]
+
+  /** A short name of the convertion */
+  def conversionShortName: String
+
+  /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
+  def conversionQualifiedName: String
+
+  /** The entity that performed the conversion */
+  def convertorOwner: TemplateEntity
+
+  /** The constraints that the transformations puts on the type parameters */
+  def constraints: List[Constraint]
+
+  /** The members inherited by this implicit conversion */
+  def members: List[MemberEntity]
+
+  /** Is this a hidden implicit conversion (as specified in the settings) */
+  def isHiddenConversion: Boolean
+}
+
+/** Shadowing captures the information that the member is shadowed by some other members
+ *  There are two cases of implicitly added member shadowing:
+ *  1) shadowing from a original class member (the class already has that member)
+ *     in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
+ *     the call arguments (or if they fit it will call the original class' method)
+ *  2) shadowing from other possible implicit conversions ()
+ *     this will result in an ambiguous implicit converion error
+ */
+trait ImplicitMemberShadowing {
+  /** The members that shadow the current entry use .inTemplate to get to the template name */
+  def shadowingMembers: List[MemberEntity]
+
+  /** The members that ambiguate this implicit conversion
+      Note: for ambiguatingMembers you have the following invariant:
+      assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
+  def ambiguatingMembers: List[MemberEntity]
+
+  def isShadowed: Boolean = !shadowingMembers.isEmpty
+  def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty
+}
+
+/** A trait that encapsulates a constraint necessary for implicit conversion */
+trait Constraint
+
+/** A constraint involving a type parameter which must be in scope */
+trait ImplicitInScopeConstraint extends Constraint {
+  /** The type of the implicit value required */
+  def implicitType: TypeEntity
+
+  /** toString for debugging */
+  override def toString = "an implicit _: " + implicitType.name + " must be in scope"
+}
+
+trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
+  /** Type class name */
+  def typeClassEntity: TemplateEntity
+
+  /** toString for debugging */
+  override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
+    typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+trait KnownTypeClassConstraint extends TypeClassConstraint {
+  /** Type explanation, takes the type parameter name and generates the explanation */
+  def typeExplanation: (String) => String
+
+  /** toString for debugging */
+  override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+/** A constraint involving a type parameter */
+trait TypeParamConstraint extends Constraint {
+  /** The type parameter involved */
+  def typeParamName: String
+}
+
+trait EqualTypeParamConstraint extends TypeParamConstraint {
+  /** The rhs */
+  def rhs: TypeEntity
+  /** toString for debugging */
+  override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
+}
+
+trait BoundedTypeParamConstraint extends TypeParamConstraint {
+  /** The lower bound */
+  def lowerBound: TypeEntity
+
+  /** The upper bound */
+  def upperBound: TypeEntity
+
+  /** toString for debugging */
+  override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
+    upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
+}
+
+trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
+  /** The lower bound */
+  def lowerBound: TypeEntity
+
+  /** toString for debugging */
+  override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
+    lowerBound.name + ")"
+}
+
+trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
+  /** The lower bound */
+  def upperBound: TypeEntity
+
+  /** toString for debugging */
+  override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
+    upperBound.name + ")"
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
new file mode 100755
index 0000000..643a089
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -0,0 +1,56 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Pedro Furlanetto
+ */
+
+package scala
+package tools.nsc
+package doc
+package model
+
+import scala.collection._
+
+object IndexModelFactory {
+
+  def makeIndex(universe: Universe): Index = new Index {
+
+    lazy val firstLetterIndex: Map[Char, SymbolMap] = {
+
+      object result extends mutable.HashMap[Char,SymbolMap] {
+
+        /* symbol name ordering */
+        implicit def orderingMap = math.Ordering.String
+
+        def addMember(d: MemberEntity) = {
+          val firstLetter = {
+            val ch = d.name.head.toLower
+            if(ch.isLetterOrDigit) ch else '_'
+          }
+          val letter = this.get(firstLetter).getOrElse {
+            immutable.SortedMap[String, SortedSet[MemberEntity]]()
+          }
+          val members = letter.get(d.name).getOrElse {
+            SortedSet.empty[MemberEntity](Ordering.by { _.toString })
+          } + d
+          this(firstLetter) = letter + (d.name -> members)
+        }
+      }
+
+      //@scala.annotation.tailrec // TODO
+      def gather(owner: DocTemplateEntity): Unit =
+        for(m <- owner.members if m.inDefinitionTemplates.isEmpty || m.inDefinitionTemplates.head == owner)
+          m match {
+            case tpl: DocTemplateEntity =>
+              result.addMember(tpl)
+              gather(tpl)
+            case non: MemberEntity if !non.isConstructor =>
+              result.addMember(non)
+            case x @ _ =>
+          }
+
+      gather(universe.rootPackage)
+
+      result.toMap
+    }
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
new file mode 100644
index 0000000..339129b
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -0,0 +1,56 @@
+package scala.tools.nsc
+package doc
+package model
+
+import base._
+
+/** This trait extracts all required information for documentation from compilation units */
+trait MemberLookup extends base.MemberLookupBase {
+  thisFactory: ModelFactory =>
+
+  import global._
+  import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
+
+  override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
+    findTemplateMaybe(sym) match {
+      case Some(tpl) => Some(LinkToTpl(tpl))
+      case None =>
+        findTemplateMaybe(site) flatMap { inTpl =>
+          inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl))
+        }
+    }
+
+  override def chooseLink(links: List[LinkTo]): LinkTo = {
+    val mbrs = links.collect {
+      case lm at LinkToMember(mbr: MemberEntity, _) => (mbr, lm)
+    }
+    if (mbrs.isEmpty)
+      links.head
+    else
+      mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2
+  }
+
+  override def toString(link: LinkTo) = link match {
+    case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString
+    case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) =>
+      mbr.sym.signatureString + " in " + inTpl.sym.toString
+    case _ => link.toString
+  }
+
+  override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
+    val sym1 =
+      if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
+      else if (sym.isPackage) 
+        /* Get package object which has associatedFile ne null */
+        sym.info.member(newTermName("package"))
+      else sym
+    Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
+      val path = src.path
+      settings.extUrlMapping get path map { url =>
+        LinkToExternal(name, url + "#" + name)
+      }
+    }
+  }
+
+  override def warnNoLink = !settings.docNoLinkWarnings.value
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
new file mode 100644
index 0000000..ef84ac4
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -0,0 +1,1024 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
+
+package scala
+package tools.nsc
+package doc
+package model
+
+import base.comment._
+import diagram._
+
+import scala.collection._
+import scala.util.matching.Regex
+import scala.reflect.macros.internal.macroImpl
+import symtab.Flags
+
+import io._
+import model.{ RootPackage => RootPackageEntity }
+
+/** This trait extracts all required information for documentation from compilation units */
+class ModelFactory(val global: Global, val settings: doc.Settings) {
+  thisFactory: ModelFactory
+               with ModelFactoryImplicitSupport
+               with ModelFactoryTypeSupport
+               with DiagramFactory
+               with CommentFactory
+               with TreeFactory
+               with MemberLookup =>
+
+  import global._
+  import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
+  import rootMirror.{ RootPackage, RootClass, EmptyPackage }
+
+  // Defaults for member grouping, that may be overridden by the template
+  val defaultGroup = "Ungrouped"
+  val defaultGroupName = "Ungrouped"
+  val defaultGroupDesc = None
+  val defaultGroupPriority = 1000
+
+  def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size
+
+  private var _modelFinished = false
+  def modelFinished: Boolean = _modelFinished
+  private var universe: Universe = null
+
+  def makeModel: Option[Universe] = {
+    val universe = new Universe { thisUniverse =>
+      thisFactory.universe = thisUniverse
+      val settings = thisFactory.settings
+      val rootPackage = modelCreation.createRootPackage
+    }
+    _modelFinished = true
+    // complete the links between model entities, everthing that couldn't have been done before
+    universe.rootPackage.completeModel()
+
+    Some(universe) filter (_.rootPackage != null)
+  }
+
+  // state:
+  var ids = 0
+  private val droppedPackages = mutable.Set[PackageImpl]()
+  protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
+  protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl]
+  def packageDropped(tpl: DocTemplateImpl) = tpl match {
+    case p: PackageImpl => droppedPackages(p)
+    case _ => false
+  }
+
+  def optimize(str: String): String =
+    if (str.length < 16) str.intern else str
+
+  /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+  abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
+    val name = optimize(sym.nameString)
+    val universe = thisFactory.universe
+
+    // Debugging:
+    // assert(id != 36, sym + "  " + sym.getClass)
+    //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString("."))
+
+    def inTemplate: TemplateImpl = inTpl
+    def toRoot: List[EntityImpl] = this :: inTpl.toRoot
+    def qualifiedName = name
+    def annotations = sym.annotations.filterNot(_.tpe =:= typeOf[macroImpl]).map(makeAnnotation)
+    def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
+    def isType = sym.name.isTypeName
+  }
+
+  trait TemplateImpl extends EntityImpl with TemplateEntity {
+    override def qualifiedName: String =
+      if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+    def isPackage = sym.isPackage
+    def isTrait = sym.isTrait
+    def isClass = sym.isClass && !sym.isTrait
+    def isObject = sym.isModule && !sym.isPackage
+    def isCaseClass = sym.isCaseClass
+    def isRootPackage = false
+    def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
+  }
+
+  abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+    // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
+    // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
+    // in the doc comment of MyClass
+    def linkTarget: DocTemplateImpl = inTpl
+
+    lazy val comment = {
+      val documented = if (sym.hasAccessorFlag) sym.accessed else sym
+      thisFactory.comment(documented, linkTarget, inTpl)
+    }
+    def group = comment flatMap (_.group) getOrElse defaultGroup
+    override def inTemplate = inTpl
+    override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
+    def inDefinitionTemplates =
+        if (inTpl == null)
+          docTemplatesCache(RootPackage) :: Nil
+        else
+          makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+    def visibility = {
+      if (sym.isPrivateLocal) PrivateInInstance()
+      else if (sym.isProtectedLocal) ProtectedInInstance()
+      else {
+        val qual =
+          if (sym.hasAccessBoundary)
+            Some(makeTemplate(sym.privateWithin))
+          else None
+        if (sym.isPrivate) PrivateInTemplate(inTpl)
+        else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
+        else qual match {
+          case Some(q) => PrivateInTemplate(q)
+          case None => Public()
+        }
+      }
+    }
+    def flags = {
+      val fgs = mutable.ListBuffer.empty[Paragraph]
+      if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
+      if (sym.isSealed) fgs += Paragraph(Text("sealed"))
+      if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
+      /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
+       * {{{
+       *     implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+       *       def isParallel = ...
+       * }}}
+       * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
+       * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
+      if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
+      if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
+      if (sym.isMacro) fgs += Paragraph(Text("macro"))
+      fgs.toList
+    }
+    def deprecation =
+      if (sym.isDeprecated)
+        Some((sym.deprecationMessage, sym.deprecationVersion) match {
+          case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, inTpl)
+          case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+          case (None, Some(ver)) =>  parseWiki("''(Since version " + ver + ")''", NoPosition, inTpl)
+          case (None, None) => Body(Nil)
+        })
+      else
+        comment flatMap { _.deprecated }
+    def migration =
+      if(sym.hasMigrationAnnotation)
+        Some((sym.migrationMessage, sym.migrationVersion) match {
+          case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, inTpl)
+          case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+          case (None, Some(ver)) =>  parseWiki("''(Changed in version " + ver + ")''", NoPosition, inTpl)
+          case (None, None) => Body(Nil)
+        })
+      else
+        None
+
+    def resultType = {
+      def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
+        case PolyType(_, res) => resultTpe(res)
+        case MethodType(_, res) => resultTpe(res)
+        case NullaryMethodType(res) => resultTpe(res)
+        case _ => tpe
+      }
+      val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym)
+      makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
+    }
+    def isDef = false
+    def isVal = false
+    def isLazyVal = false
+    def isVar = false
+    def isConstructor = false
+    def isAliasType = false
+    def isAbstractType = false
+    def isAbstract =
+      // for the explanation of conversion == null see comment on flags
+      ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
+      sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
+
+    def signature = externalSignature(sym)
+    lazy val signatureCompat = {
+
+      def defParams(mbr: Any): String = mbr match {
+        case d: MemberEntity with Def =>
+          val paramLists: List[String] =
+            if (d.valueParams.isEmpty) Nil
+            else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")"))
+          paramLists.mkString
+        case _ => ""
+      }
+
+      def tParams(mbr: Any): String = mbr match {
+        case hk: HigherKinded if !hk.typeParams.isEmpty =>
+          def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
+            def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
+              case None => ""
+              case Some(tpe) => pre ++ tpe.toString
+            }
+            bound0(hi, "<:") ++ bound0(lo, ">:")
+          }
+          "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]"
+        case _ => ""
+      }
+
+      (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
+    }
+    // these only apply for NonTemplateMemberEntities
+    def useCaseOf: Option[MemberImpl] = None
+    def byConversion: Option[ImplicitConversionImpl] = None
+    def isImplicitlyInherited = false
+    def isShadowedImplicit    = false
+    def isAmbiguousImplicit   = false
+    def isShadowedOrAmbiguousImplicit = false
+  }
+
+  /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class
+   *  exists, but should not be documented (either it's not included in the source or it's not visible)
+   */
+  class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
+    assert(modelFinished, this)
+    assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym)))
+    noDocTemplatesCache += (sym -> this)
+    def isDocTemplate = false
+  }
+
+  /** An inherited template that was not documented in its original owner - example:
+   *  in classpath:  trait T { class C } -- T (and implicitly C) are not documented
+   *  in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it
+   *  but C doesn't get its own page
+   */
+  abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
+    // no templates cache for this class, each owner gets its own instance
+    def isDocTemplate = false
+    lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
+    def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
+
+    def parentTypes =
+      if (sym.isPackage || sym == AnyClass) List() else {
+        val tps = (this match {
+          case a: AliasType => sym.tpe.dealias.parents
+          case a: AbstractType => sym.info.bounds match {
+            case TypeBounds(lo, RefinedType(parents, decls)) => parents
+            case TypeBounds(lo, hi) => hi :: Nil
+            case _ => Nil
+          }
+          case _ => sym.tpe.parents
+        }) map { _.asSeenFrom(sym.thisType, sym) }
+        makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl)
+      }
+  }
+
+   /** The instantiation of `TemplateImpl` triggers the creation of the following entities:
+    *  All ancestors of the template and all non-package members.
+    */
+  abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
+    assert(!modelFinished, (sym, inTpl))
+    assert(!(docTemplatesCache isDefinedAt sym), sym)
+    docTemplatesCache += (sym -> this)
+
+    if (settings.verbose)
+      inform("Creating doc template for " + sym)
+
+    override def linkTarget: DocTemplateImpl = this
+    override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
+
+    protected def reprSymbol: Symbol = sym
+
+    def inSource =
+      if (reprSymbol.sourceFile != null && ! reprSymbol.isSynthetic)
+        Some((reprSymbol.sourceFile, reprSymbol.pos.line))
+      else
+        None
+
+    def sourceUrl = {
+      def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
+      val assumedSourceRoot  = fixPath(settings.sourcepath.value) stripSuffix "/"
+
+      if (!settings.docsourceurl.isDefault)
+        inSource map { case (file, _) =>
+          val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala")
+          val tplOwner = this.inTemplate.qualifiedName
+          val tplName = this.name
+          val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""")
+          def substitute(name: String): String = name match {
+            case "FILE_PATH" => filePath
+            case "TPL_OWNER" => tplOwner
+            case "TPL_NAME" => tplName
+          }
+          val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) )
+          new java.net.URL(patchedString)
+        }
+      else None
+    }
+
+    private def templateAndType(ancestor: Symbol): (TemplateImpl, TypeEntity) = (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
+    lazy val (linearizationTemplates, linearizationTypes) =
+      (reprSymbol.ancestors map templateAndType).unzip
+
+    /* Subclass cache */
+    private lazy val subClassesCache = (
+      if (sym == AnyRefClass) null
+      else mutable.ListBuffer[DocTemplateEntity]()
+    )
+    def registerSubClass(sc: DocTemplateEntity): Unit = {
+      if (subClassesCache != null)
+        subClassesCache += sc
+    }
+    def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
+
+    /* Implicitly convertible class cache */
+    private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
+    def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
+      if (implicitlyConvertibleClassesCache == null)
+        implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]()
+      implicitlyConvertibleClassesCache += ((dtpl, conv))
+    }
+
+    def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] =
+      if (implicitlyConvertibleClassesCache == null)
+        List()
+      else
+        implicitlyConvertibleClassesCache.toList
+
+    // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
+    // lazily, on completeModel
+    val conversions: List[ImplicitConversionImpl] =
+      if (settings.docImplicits) makeImplicitConversions(sym, this) else Nil
+
+    // members as given by the compiler
+    lazy val memberSyms      = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
+
+    // the inherited templates (classes, traits or objects)
+    val memberSymsLazy  = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
+    // the direct members (methods, values, vars, types and directly contained templates)
+    val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
+    // the members generated by the symbols in memberSymsEager
+    val ownMembers      = (memberSymsEager.flatMap(makeMember(_, None, this)))
+
+    // all the members that are documentented PLUS the members inherited by implicit conversions
+    var members: List[MemberImpl] = ownMembers
+
+    def templates       = members collect { case c: TemplateEntity with MemberEntity => c }
+    def methods         = members collect { case d: Def => d }
+    def values          = members collect { case v: Val => v }
+    def abstractTypes   = members collect { case t: AbstractType => t }
+    def aliasTypes      = members collect { case t: AliasType => t }
+
+    /**
+     * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
+     * inherited templates and implicit members are added to the members at this point.
+     */
+    def completeModel(): Unit = {
+      // DFS completion
+      // since alias types and abstract types have no own members, there's no reason for them to call completeModel
+      if (!sym.isAliasType && !sym.isAbstractType)
+        for (member <- members)
+          member match {
+            case d: DocTemplateImpl => d.completeModel()
+            case _ =>
+          }
+
+      members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
+
+      outgoingImplicitlyConvertedClasses
+
+      for (pt <- sym.info.parents; parentTemplate <- findTemplateMaybe(pt.typeSymbol)) parentTemplate registerSubClass this
+
+      // the members generated by the symbols in memberSymsEager PLUS the members from the usecases
+      val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf).distinct
+      implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
+      // finally, add the members generated by implicit conversions
+      members :::= conversions.flatMap(_.memberImpls)
+    }
+
+    var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]()
+
+    lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] =
+      conversions flatMap (conv =>
+        if (!implicitExcluded(conv.conversionQualifiedName))
+          conv.targetTypeComponents map {
+            case (template, tpe) =>
+              template match {
+                case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
+                case _ => // nothing
+              }
+              (template, tpe, conv)
+          }
+        else List()
+      )
+
+    override def isDocTemplate = true
+    private[this] lazy val companionSymbol =
+      if (sym.isAliasType || sym.isAbstractType) {
+        inTpl.sym.info.member(sym.name.toTermName) match {
+          case NoSymbol => NoSymbol
+          case s =>
+            s.info match {
+              case ot: OverloadedType =>
+                NoSymbol
+              case _ =>
+                // that's to navigate from val Foo: FooExtractor to FooExtractor :)
+                s.info.resultType.typeSymbol
+            }
+        }
+      }
+      else
+        sym.companionSymbol
+
+    def companion =
+      companionSymbol match {
+        case NoSymbol => None
+        case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
+          makeTemplate(comSym) match {
+            case d: DocTemplateImpl => Some(d)
+            case _ => None
+          }
+        case _ => None
+      }
+
+    def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil
+    def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None
+    override def valueParams =
+      // we don't want params on a class (non case class) signature
+      if (isCaseClass) primaryConstructor match {
+        case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this)))
+        case None => List()
+      }
+      else List.empty
+
+    // These are generated on-demand, make sure you don't call them more than once
+    def inheritanceDiagram = makeInheritanceDiagram(this)
+    def contentDiagram = makeContentDiagram(this)
+
+    def groupSearch[T](extractor: Comment => Option[T]): Option[T] = {
+      val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment }
+      comments.flatten.map(extractor).flatten.headOption orElse {
+        Option(inTpl) flatMap (_.groupSearch(extractor))
+      }
+    }
+
+    def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None }
+    def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 }
+    def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group }
+  }
+
+  abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
+    override def inTemplate = inTpl
+    override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
+    override def reprSymbol = sym.info.members.find (_.isPackageObject) getOrElse sym
+
+    def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
+  }
+
+  abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
+
+  abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
+                                       override val useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
+           extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+    override lazy val comment = {
+      def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] =
+        if (sym eq RootPackage) None else findTemplateMaybe(sym)
+      /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+       * 1. the target of the implicit conversion
+       * 2. the definition template (owner)
+       * 3. the current template
+       */
+      val inRealTpl = conversion.flatMap { conv =>
+        nonRootTemplate(conv.toType.typeSymbol)
+      } orElse nonRootTemplate(sym.owner) orElse Option(inTpl)
+      inRealTpl flatMap { tpl =>
+        thisFactory.comment(sym, tpl, tpl)
+      }
+    }
+
+    override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates)
+
+    override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
+    lazy val definitionName = {
+      val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName)
+      optimize(qualifiedName + "#" + name)
+    }
+    def isUseCase = useCaseOf.isDefined
+    override def byConversion: Option[ImplicitConversionImpl] = conversion
+    override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
+    override def isShadowedImplicit    = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false)
+    override def isAmbiguousImplicit   = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false)
+    override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit
+  }
+
+  abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
+                                            useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
+           extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
+    def valueParams = {
+      val info = conversion.fold(sym.info)(_.toType memberInfo sym)
+      info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
+        if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
+      }}
+    }
+  }
+
+  abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity {
+    val name = optimize(sym.nameString)
+  }
+
+  private trait AliasImpl {
+    def sym: Symbol
+    def inTpl: TemplateImpl
+    def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
+  }
+
+  private trait TypeBoundsImpl {
+    def sym: Symbol
+    def inTpl: TemplateImpl
+    def lo = sym.info.bounds match {
+      case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
+        Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym))
+      case _ => None
+    }
+    def hi = sym.info.bounds match {
+      case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
+        Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym))
+      case _ => None
+    }
+  }
+
+  trait HigherKindedImpl extends HigherKinded {
+    def sym: Symbol
+    def inTpl: TemplateImpl
+    def typeParams =
+      sym.typeParams map (makeTypeParam(_, inTpl))
+  }
+  /* ============== MAKER METHODS ============== */
+
+  /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the
+   * package object abstraction and placing members directly in the package.
+   *
+   * Here's the explanation of what we do. The code:
+   *
+   * package foo {
+   *   object `package` {
+   *     class Bar
+   *   }
+   * }
+   *
+   * will yield this Symbol structure:
+   *                                       +---------+ (2)
+   *                                       |         |
+   * +---------------+         +---------- v ------- | ---+                              +--------+ (2)
+   * | package foo#1 <---(1)---- module class foo#2  |    |                              |        |
+   * +---------------+         | +------------------ | -+ |         +------------------- v ---+   |
+   *                           | | package object foo#3 <-----(1)---- module class package#4  |   |
+   *                           | +----------------------+ |         | +---------------------+ |   |
+   *                           +--------------------------+         | | class package$Bar#5 | |   |
+   *                                                                | +----------------- | -+ |   |
+   *                                                                +------------------- | ---+   |
+   *                                                                                     |        |
+   *                                                                                     +--------+
+   * (1) sourceModule
+   * (2) you get out of owners with .owner
+   *
+   * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object.
+   */
+  def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
+    case null | rootMirror.EmptyPackage | NoSymbol =>
+      normalizeTemplate(RootPackage)
+    case ObjectClass =>
+      normalizeTemplate(AnyRefClass)
+    case _ if aSym.isPackageObject =>
+      normalizeTemplate(aSym.owner)
+    case _ if aSym.isModuleClass =>
+      normalizeTemplate(aSym.sourceModule)
+    case _ =>
+      aSym
+  }
+
+  /**
+   * These are all model construction methods. Please do not use them directly, they are calling each other recursively
+   * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used
+   * after the model was created (modelFinished=true) otherwise assertions will start failing.
+   */
+  object modelCreation {
+
+    def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match {
+      case Some(root: PackageImpl) => root
+      case _ => modelCreation.createTemplate(RootPackage, null) match {
+        case Some(root: PackageImpl) => root
+        case _ => sys.error("Scaladoc: Unable to create root package!")
+      }
+    }
+
+    /**
+     *  Create a template, either a package, class, trait or object
+     */
+    def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
+      // don't call this after the model finished!
+      assert(!modelFinished, (aSym, inTpl))
+
+      def createRootPackageComment: Option[Comment] =
+        if(settings.docRootContent.isDefault) None
+        else {
+          import Streamable._
+          Path(settings.docRootContent.value) match {
+            case f : File => {
+              val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, inTpl))
+              Some(rootComment)
+            }
+            case _ => None
+          }
+        }
+
+      def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
+        assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished
+        if (bSym.isAliasType && bSym != AnyRefClass)
+          new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
+        else if (bSym.isAbstractType)
+          new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true }
+        else if (bSym.isModule)
+          new DocTemplateImpl(bSym, inTpl) with Object {}
+        else if (bSym.isTrait)
+          new DocTemplateImpl(bSym, inTpl) with Trait {}
+        else if (bSym.isClass || bSym == AnyRefClass)
+          new DocTemplateImpl(bSym, inTpl) with Class {}
+        else
+          sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.")
+      }
+
+      val bSym = normalizeTemplate(aSym)
+      if (docTemplatesCache isDefinedAt bSym)
+        return Some(docTemplatesCache(bSym))
+
+      /* Three cases of templates:
+       * (1) root package -- special cased for bootstrapping
+       * (2) package
+       * (3) class/object/trait
+       */
+      if (bSym == RootPackage) // (1)
+        Some(new RootPackageImpl(bSym) {
+          override lazy val comment = createRootPackageComment
+          override val name = "root"
+          override def inTemplate = this
+          override def toRoot = this :: Nil
+          override def qualifiedName = "_root_"
+          override def isRootPackage = true
+          override lazy val memberSyms =
+            (bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
+              s != EmptyPackage && s != RootPackage
+            }
+        })
+      else if (bSym.isPackage) // (2)
+        if (settings.skipPackage(makeQualifiedName(bSym)))
+          None
+        else
+          inTpl match {
+            case inPkg: PackageImpl =>
+              val pack = new PackageImpl(bSym, inPkg) {}
+              // Used to check package pruning works:
+              //println(pack.qualifiedName)
+              if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) {
+                droppedPackages += pack
+                None
+              } else
+                Some(pack)
+            case _ =>
+              sys.error("'" + bSym + "' must be in a package")
+          }
+      else {
+        // no class inheritance at this point
+        assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl)
+        Some(createDocTemplate(bSym, inTpl))
+      }
+    }
+
+    /**
+     *  After the model is completed, no more DocTemplateEntities are created.
+     *  Therefore any symbol that still appears is:
+     *   - MemberTemplateEntity (created here)
+     *   - NoDocTemplateEntity (created in makeTemplate)
+     */
+    def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = {
+
+      // Code is duplicate because the anonymous classes are created statically
+      def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = {
+        assert(modelFinished) // only created AFTER the model is finished
+        if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
+          new MemberTemplateImpl(bSym, inTpl) with Object {}
+        else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
+          new MemberTemplateImpl(bSym, inTpl) with Trait {}
+        else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
+          new MemberTemplateImpl(bSym, inTpl) with Class {}
+        else
+          sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.")
+      }
+
+      assert(modelFinished)
+      val bSym = normalizeTemplate(aSym)
+
+      if (docTemplatesCache isDefinedAt bSym)
+        docTemplatesCache(bSym)
+      else
+        docTemplatesCache.get(bSym.owner) match {
+          case Some(inTpl) =>
+            val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr })
+            assert(mbrs.length == 1)
+            mbrs.head
+          case _ =>
+            // move the class completely to the new location
+            createNoDocMemberTemplate(bSym, inTpl)
+        }
+    }
+  }
+
+  // TODO: Should be able to override the type
+  def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
+
+    def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
+      if (bSym.isGetter && bSym.isLazy)
+          Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
+            override def isLazyVal = true
+          })
+      else if (bSym.isGetter && bSym.accessed.isMutable)
+        Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
+          override def isVar = true
+        })
+      else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) {
+        val cSym = { // This unsightly hack closes issue #4086.
+          if (bSym == definitions.Object_synchronized) {
+            val cSymInfo = (bSym.info: @unchecked) match {
+              case PolyType(ts, MethodType(List(bp), mt)) =>
+                val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
+                PolyType(ts, MethodType(List(cp), mt))
+            }
+            bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
+          }
+          else bSym
+        }
+        Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def {
+          override def isDef = true
+        })
+      }
+      else if (bSym.isConstructor)
+        if (conversion.isDefined)
+          None // don't list constructors inherted by implicit conversion
+        else
+          Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
+            override def isConstructor = true
+            def isPrimary = sym.isPrimaryConstructor
+          })
+      else if (bSym.isGetter) // Scala field accessor or Java field
+        Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
+          override def isVal = true
+        })
+      else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl))
+        Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
+          override def isAbstractType = true
+        })
+      else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl))
+        Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
+          override def isAliasType = true
+        })
+      else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
+        modelCreation.createTemplate(bSym, inTpl)
+      else
+        None
+    }
+
+    if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
+      Nil
+    else {
+      val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
+        docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
+        bSym
+      }
+
+      val member = makeMember0(aSym, None)
+      if (allSyms.isEmpty)
+        member.toList
+      else
+        // Use cases replace the original definitions - SI-5054
+        allSyms flatMap { makeMember0(_, member) }
+    }
+  }
+
+  def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
+    normalizeTemplate(aSym.owner)
+    inTpl.members.find(_.sym == aSym)
+  }
+
+  def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
+    assert(modelFinished)
+    docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
+  }
+
+  def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None)
+
+  def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
+    assert(modelFinished)
+
+    def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl =
+      noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl))
+
+    findTemplateMaybe(aSym) getOrElse {
+      val bSym = normalizeTemplate(aSym)
+      makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner))
+    }
+  }
+
+  def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
+    val aSym = annot.symbol
+    new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
+      lazy val annotationClass =
+        makeTemplate(annot.symbol)
+      val arguments = {
+        val paramsOpt: Option[List[ValueParam]] = annotationClass match {
+          case aClass: DocTemplateEntity with Class =>
+            val constr = aClass.constructors collectFirst {
+              case c: MemberImpl if c.sym == annot.original.symbol => c
+            }
+            constr flatMap (_.valueParams.headOption)
+          case _ => None
+        }
+        val argTrees = annot.args map makeTree
+        paramsOpt match {
+          case Some (params) =>
+            params zip argTrees map { case (param, tree) =>
+              new ValueArgument {
+                def parameter = Some(param)
+                def value = tree
+              }
+            }
+          case None =>
+            argTrees map { tree =>
+              new ValueArgument {
+                def parameter = None
+                def value = tree
+              }
+            }
+        }
+      }
+    }
+  }
+
+  /** */
+  def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam =
+    new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
+      def variance: String = {
+        if (sym hasFlag Flags.COVARIANT) "+"
+        else if (sym hasFlag Flags.CONTRAVARIANT) "-"
+        else ""
+      }
+    }
+
+  /** */
+  def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = {
+    makeValueParam(aSym, inTpl, aSym.nameString)
+  }
+
+
+  /** */
+  def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam =
+    new ParameterImpl(aSym, inTpl) with ValueParam {
+      override val name = newName
+      def defaultValue =
+        if (aSym.hasDefault) {
+          // units.filter should return only one element
+          (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
+            case List(unit) =>
+              // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol
+              //         of the parameter in the tree, as can happen with type parametric methods.
+              def isCorrespondingParam(sym: Symbol) = (
+                sym != null &&
+                sym != NoSymbol &&
+                sym.owner == aSym.owner &&
+                sym.name == aSym.name &&
+                sym.isParamWithDefault
+              )
+              unit.body find (t => isCorrespondingParam(t.symbol)) collect {
+                case ValDef(_,_,_,rhs) if rhs ne EmptyTree  => makeTree(rhs)
+              }
+            case _ => None
+          }
+        }
+        else None
+      def resultType =
+        makeTypeInTemplateContext(aSym.tpe, inTpl, aSym)
+      def isImplicit = aSym.isImplicit
+    }
+
+  /** */
+  def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = {
+    def ownerTpl(sym: Symbol): Symbol =
+      if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
+    val tpe =
+      if (thisFactory.settings.useStupidTypes) aType else {
+        def ownerTpl(sym: Symbol): Symbol =
+          if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
+        val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
+        aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym))
+      }
+    makeType(tpe, inTpl)
+  }
+
+  /** Get the types of the parents of the current class, ignoring the refinements */
+  def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
+    case RefinedType(parents, defs) =>
+      val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
+      val filtParents =
+        // we don't want to expose too many links to AnyRef, that will just be redundant information
+        tpl match {
+          case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents
+          case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+        }
+
+      /** Returns:
+       *   - a DocTemplate if the type's symbol is documented
+       *   - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template
+       *   - a NoDocTemplate if the type's symbol is not documented at all */
+      def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = {
+        def noDocTemplate = makeTemplate(parent.typeSymbol)
+        findTemplateMaybe(parent.typeSymbol) match {
+          case Some(tpl) => tpl
+          case None => parent match {
+            case TypeRef(pre, sym, args) =>
+              findTemplateMaybe(pre.typeSymbol) match {
+                case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate)
+                case None => noDocTemplate
+              }
+            case _ => noDocTemplate
+          }
+        }
+      }
+
+      filtParents.map(parent => {
+        val templateEntity = makeTemplateOrMemberTemplate(parent)
+        val typeEntity = makeType(parent, inTpl)
+        (templateEntity, typeEntity)
+      })
+    case _ =>
+      List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
+  }
+
+  def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
+    val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]()
+    var sym1 = sym
+    val path = new StringBuilder()
+    // var path = List[Symbol]()
+
+    while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
+      val sym1Norm = normalizeTemplate(sym1)
+      if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) {
+        if (path.length != 0)
+          path.insert(0, ".")
+        path.insert(0, sym1Norm.nameString)
+        // path::= sym1Norm
+      }
+      sym1 = sym1.owner
+    }
+
+    optimize(path.toString)
+    //path.mkString(".")
+  }
+
+  def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean =
+    normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym)
+
+  def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean =
+    (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) &&
+    localShouldDocument(aSym) &&
+    !isEmptyJavaObject(aSym) &&
+    // either it's inside the original owner or we can document it later:
+    (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null)))
+
+  def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = {
+    // pruning modules that shouldn't be documented
+    // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc
+    // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we
+    // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored
+    // during typer and refchecks, it's not necessary for the current application and there's no need to explore it.
+    (!sym.isModule || sym.moduleClass.isInitialized) &&
+    // documenting only public and protected members
+    localShouldDocument(sym) &&
+    // Only this class's constructors are part of its members, inherited constructors are not.
+    (!sym.isConstructor || sym.owner == inTpl.sym) &&
+    // If the @bridge annotation overrides a normal member, show it
+    !isPureBridge(sym)
+  }
+
+  def isEmptyJavaObject(aSym: Symbol): Boolean =
+    aSym.isModule && aSym.isJavaDefined &&
+    aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
+
+  def localShouldDocument(aSym: Symbol): Boolean =
+    !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
+
+  /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
+  def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
+
+  // the classes that are excluded from the index should also be excluded from the diagrams
+  def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
+
+  // the implicit conversions that are excluded from the pages should not appear in the diagram
+  def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod)
+
+  // whether or not to create a page for an {abstract,alias} type
+  def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
+    (settings.docExpandAllTypes && (bSym.sourceFile != null)) ||
+    (bSym.isAliasType || bSym.isAbstractType) &&
+    { val rawComment = global.expandedDocComment(bSym, inTpl.sym)
+      rawComment.contains("@template") || rawComment.contains("@documentable") }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
new file mode 100644
index 0000000..f984b45
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -0,0 +1,575 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL
+ *
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * @author Vlad Ureche
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+import symtab.Flags
+
+/**
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * Let's take this as an example:
+ * {{{
+ *    object Test {
+ *      class A
+ *
+ *      class B {
+ *        def foo = 1
+ *      }
+ *
+ *      class C extends B {
+ *        def bar = 2
+ *        class implicit
+ *      }
+ *
+ *      D def conv(a: A) = new C
+ *    }
+ * }}}
+ *
+ * Overview:
+ * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
+ * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
+ * `A` (see makeMember0 in ModelFactory, last 3 cases)
+ * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
+ * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
+ * `definitionName` in MemberImpl
+ *
+ * Internals:
+ * TODO: Give an overview here
+ */
+trait ModelFactoryImplicitSupport {
+  thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory =>
+
+  import global._
+  import global.analyzer._
+  import global.definitions._
+  import settings.hardcoded
+
+  // debugging:
+  val DEBUG: Boolean = settings.docImplicitsDebug.value
+  val ERROR: Boolean = true // currently we show all errors
+  @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
+  @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
+
+  /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
+   * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
+   *  {{{
+   *     class A[T]
+   *     class B extends A[Int]
+   *     class C extends A[String]
+   *     implicit def enrichA[T: Numeric](a: A[T]): D
+   *  }}}
+   *  For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
+   *  conversion from C to D, depending on -implicits-show-all, the conversion can:
+   *   - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
+   *   - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
+   */
+  class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
+
+  /* ============== MAKER METHODS ============== */
+
+  /**
+   *  Make the implicit conversion objects
+   *
+   *  A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
+   *  default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
+   *  future we might want to extend this to more complex scopes.
+   */
+  def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
+    // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
+    // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
+    if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
+    else {
+      val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+
+      val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
+      var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+      // also keep empty conversions, so they appear in diagrams
+      // conversions = conversions.filter(!_.members.isEmpty)
+
+      // Filter out specialized conversions from array
+      if (sym == ArrayClass)
+        conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
+          hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
+
+      // Filter out non-sensical conversions from value types
+      if (isPrimitiveValueType(sym.tpe_*))
+        conversions = conversions.filter((ic: ImplicitConversionImpl) =>
+          hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
+
+      // Put the visible conversions in front
+      val (ownConversions, commonConversions) =
+        conversions.partition(!_.isHiddenConversion)
+
+      ownConversions ::: commonConversions
+    }
+
+  /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
+   * - for each possible conversion function (also called view)
+   *    * figures out the final result of the view (to what is our class transformed?)
+   *    * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
+   *    * lists all inherited members
+   *
+   * What? in details:
+   *  - say we start from a class A[T1, T2, T3, T4]
+   *  - we have an implicit function (view) in scope:
+   *     def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA
+   *  - A is converted to EnrichedA ONLY if a couple of constraints are satisfied:
+   *     * T1 must be equal to Int
+   *     * T2 must be equal to Foo[Bar[X]]
+   *     * T3 must be upper bounded by Long
+   *     * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
+   *  - the final type is EnrichedA and A therefore inherits a couple of members from enrichA
+   *
+   * How?
+   * some notes:
+   *  - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
+   * to maintain generality
+   *  - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
+   * but are never solved down to a type
+   *  - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
+   * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
+   *  - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
+   * appears as a constraint
+   */
+  def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
+    if (result.tree == EmptyTree) Nil
+    else {
+      // `result` will contain the type of the view (= implicit conversion method)
+      // the search introduces untouchable type variables, but we want to get back to type parameters
+      val viewFullType = result.tree.tpe
+      // set the previously implicit parameters to being explicit
+
+      val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
+
+      // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
+      if (viewSimplifiedType.params.length != 1) {
+        // This is known to be caused by the `<%<` object in Predef:
+        // {{{
+        //    sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
+        //    object <%< {
+        //      implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
+        //    }
+        // }}}
+        // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
+        return Nil
+      }
+
+      // type the view application so we get the exact type of the result (not the formal type)
+      val viewTree = result.tree.setType(viewSimplifiedType)
+      val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
+      val appliedTreeTyped: Tree = {
+        val newContext = context.makeImplicit(context.ambiguousErrors)
+        newContext.macrosEnabled = false
+        val newTyper = global.analyzer.newTyper(newContext)
+          newTyper.silent(_.typed(appliedTree), reportAmbiguousErrors = false) match {
+
+          case global.analyzer.SilentResultValue(t: Tree) => t
+          case global.analyzer.SilentTypeError(err) =>
+            global.reporter.warning(sym.pos, err.toString)
+            return Nil
+        }
+      }
+
+      // now we have the final type:
+      val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
+
+      try {
+        // Transform bound constraints into scaladoc constraints
+        val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
+        val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
+        // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
+        val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
+        val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
+
+        List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
+      } catch {
+        case i: ImplicitNotFound =>
+          //println("  Eliminating: " + toType)
+          Nil
+      }
+    }
+
+  def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] =
+    types.flatMap((tpe:Type) => {
+      // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
+      val implType = typeVarToOriginOrWildcard(tpe)
+      val qualifiedName = makeQualifiedName(implType.typeSymbol)
+
+      var available: Option[Boolean] = None
+
+      // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
+      //
+      // println(implType + " => " + implType.isTrivial)
+      // var tpes: List[Type] = List(implType)
+      // while (!tpes.isEmpty) {
+      //   val tpe = tpes.head
+      //   tpes = tpes.tail
+      //   tpe match {
+      //     case TypeRef(pre, sym, args) =>
+      //       tpes = pre :: args ::: tpes
+      //       println(tpe + " => " + tpe.isTrivial)
+      //     case _ =>
+      //       println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
+      //   }
+      // }
+      // println("\n")
+
+      // look for type variables in the type. If there are none, we can decide if the implicit is there or not
+      if (implType.isTrivial) {
+        try {
+          // TODO: Not sure if `owner = sym.owner` is the right thing to do -- seems similar to what scalac should be doing
+          val silentContext = context.make(owner = sym.owner).makeSilent(reportAmbiguousErrors = false)
+          val search = inferImplicit(EmptyTree, tpe, false, false, silentContext, false)
+          available = Some(search.tree != EmptyTree)
+        } catch {
+          case _: TypeError =>
+        }
+      }
+
+      available match {
+        case Some(true) =>
+          Nil
+        case Some(false) if !settings.docImplicitsShowAll =>
+          // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
+          throw new ImplicitNotFound(implType)
+        case _ =>
+          val typeParamNames = sym.typeParams.map(_.name)
+
+          // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
+          // learn more about symbols, it'll have to do.
+          implType match {
+            case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
+              hardcoded.knownTypeClasses.get(qualifiedName) match {
+                case Some(explanation) =>
+                  List(new KnownTypeClassConstraint {
+                    val typeParamName = targ.nameString
+                    lazy val typeExplanation = explanation
+                    lazy val typeClassEntity = makeTemplate(sym)
+                    lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+                  })
+                case None =>
+                  List(new TypeClassConstraint {
+                    val typeParamName = targ.nameString
+                    lazy val typeClassEntity = makeTemplate(sym)
+                    lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+                  })
+              }
+            case _ =>
+              List(new ImplicitInScopeConstraint{
+                lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+              })
+          }
+      }
+    })
+
+  def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] =
+    (subst.from zip subst.to) map {
+      case (from, to) =>
+        new EqualTypeParamConstraint {
+          error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
+          val typeParamName = from.toString
+          val rhs = makeType(to, inTpl)
+        }
+    }
+
+  def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] =
+    (tparams zip constrs) flatMap {
+      case (tparam, constr) => {
+        uniteConstraints(constr) match {
+          case (loBounds, upBounds) => (loBounds filter (_ != NothingTpe), upBounds filter (_ != AnyTpe)) match {
+            case (Nil, Nil) =>
+              Nil
+            case (List(lo), List(up)) if (lo == up) =>
+              List(new EqualTypeParamConstraint {
+                val typeParamName = tparam.nameString
+                lazy val rhs = makeType(lo, inTpl)
+              })
+            case (List(lo), List(up)) =>
+              List(new BoundedTypeParamConstraint {
+                val typeParamName = tparam.nameString
+                lazy val lowerBound = makeType(lo, inTpl)
+                lazy val upperBound = makeType(up, inTpl)
+              })
+            case (List(lo), Nil) =>
+              List(new LowerBoundedTypeParamConstraint {
+                val typeParamName = tparam.nameString
+                lazy val lowerBound = makeType(lo, inTpl)
+              })
+            case (Nil, List(up)) =>
+              List(new UpperBoundedTypeParamConstraint {
+                val typeParamName = tparam.nameString
+                lazy val upperBound = makeType(up, inTpl)
+              })
+            case other =>
+              // this is likely an error on the lub/glb side
+              error("Scaladoc implicits: Error computing lub/glb for: " + ((tparam, constr)) + ":\n" + other)
+              Nil
+          }
+        }
+      }
+    }
+
+  /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+  class ImplicitConversionImpl(
+    val sym: Symbol,
+    val convSym: Symbol,
+    val toType: Type,
+    val constrs: List[Constraint],
+    inTpl: DocTemplateImpl)
+      extends ImplicitConversion {
+
+    def source: DocTemplateEntity = inTpl
+
+    def targetType: TypeEntity = makeType(toType, inTpl)
+
+    def convertorOwner: TemplateEntity = {
+      if (convSym eq NoSymbol)
+        error("Scaladoc implicits: " + toString + " = NoSymbol!")
+
+      makeTemplate(convSym.owner)
+    }
+
+    def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
+
+    def convertorMethod: Either[MemberEntity, String] = {
+      var convertor: MemberEntity = null
+
+      convertorOwner match {
+        case doc: DocTemplateImpl =>
+          val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
+          if (convertors.length == 1)
+            convertor = convertors.head
+        case _ =>
+      }
+      if (convertor ne null)
+        Left(convertor)
+      else
+        Right(convSym.nameString)
+    }
+
+    def conversionShortName = convSym.nameString
+
+    def conversionQualifiedName = makeQualifiedName(convSym)
+
+    lazy val constraints: List[Constraint] = constrs
+
+    lazy val memberImpls: List[MemberImpl] = {
+      // Obtain the members inherited by the implicit conversion
+      val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
+
+      // Debugging part :)
+      debug(sym.nameString + "\n" + "=" * sym.nameString.length())
+      debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+
+      debug("   -> full type: " + toType)
+      if (constraints.length != 0) {
+        debug("   -> constraints: ")
+        constraints foreach { constr => debug("      - " + constr) }
+      }
+      debug("   -> members:")
+      memberSyms foreach (sym => debug("      - "+ sym.decodedName +" : " + sym.info))
+      debug("")
+
+      memberSyms.flatMap({ aSym =>
+        // we can't just pick up nodes from the original template, although that would be very convenient:
+        // they need the byConversion field to be attached to themselves and the types to be transformed by
+        // asSeenFrom
+
+        // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up
+        // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change
+        // the template when expanding variables in the comment :)
+        makeMember(aSym, Some(this), inTpl)
+      })
+    }
+
+    lazy val members: List[MemberEntity] = memberImpls
+
+    def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
+
+    override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
+  }
+
+  /* ========================= HELPER METHODS ========================== */
+  /**
+   *  Computes the shadowing table for all the members in the implicit conversions
+   *  @param members All template's members, including usecases and full signature members
+   *  @param convs All the conversions the template takes part in
+   *  @param inTpl the usual :)
+   */
+  def makeShadowingTable(members: List[MemberImpl],
+                         convs: List[ImplicitConversionImpl],
+                         inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
+    assert(modelFinished)
+
+    val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]()
+    val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name)
+    val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) {
+      case (map, conv) => map ++ conv.memberImpls.map (_ -> conv)
+    }
+
+    for (conv <- convs) {
+      val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name)
+
+      for (member <- conv.memberImpls) {
+        val sym1 = member.sym
+        val tpe1 = conv.toType.memberInfo(sym1)
+
+        // check if it's shadowed by a member in the original class.
+        val shadowed = membersByName.get(sym1.name).toList.flatten filter { other =>
+          !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym))
+        }
+
+        // check if it's shadowed by another conversion.
+        val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other =>
+          val tpe2 = convsByMember(other).toType.memberInfo(other.sym)
+          !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
+        }
+
+        // we finally have the shadowing info
+        if (!shadowed.isEmpty || !ambiguous.isEmpty) {
+          val shadowing = new ImplicitMemberShadowing {
+            def shadowingMembers: List[MemberEntity] = shadowed
+            def ambiguatingMembers: List[MemberEntity] = ambiguous
+          }
+
+          shadowingTable += (member -> shadowing)
+        }
+      }
+    }
+
+    shadowingTable.toMap
+  }
+
+
+  /**
+   * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
+   *
+   * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
+   * upper bound. Here are a couple of catches we need to be aware of:
+   *  - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
+   * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
+   * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
+   * applicable -- now, we want to transform those type variables back to the original type parameters
+   *  - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
+   * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
+   * into thinking there's nothing there
+   *  - we don't want the wildcard types surviving the unification so we replace them back to Nothings
+   */
+  def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
+    try {
+      (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
+       List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
+    } catch {
+      // does this actually ever happen? (probably when type vars occur in the bounds)
+      case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
+    }
+
+  /**
+   *  Make implicits explicit - Not used curently
+   */
+  // object implicitToExplicit extends TypeMap {
+  //   def apply(tp: Type): Type = mapOver(tp) match {
+  //     case MethodType(params, resultType) =>
+  //       MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+  //     case other =>
+  //       other
+  //   }
+  // }
+
+  /**
+   * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
+   * returns the simplified type of the view
+   *
+   * for the example view:
+   *   implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
+   * the implicit view result type is:
+   *   (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
+   * and the simplified type will be:
+   *   MyClass[T] => EnrichedMyClass[T]
+   */
+  def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
+
+    val params = viewType.paramss.flatten
+    val (normalParams, implParams) = params.partition(!_.isImplicit)
+    val simplifiedType = MethodType(normalParams, viewType.finalResultType)
+    val implicitTypes = implParams.map(_.tpe)
+
+    (simplifiedType, implicitTypes)
+  }
+
+  /**
+   * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
+   * type parameters) or into wildcard types if nothing matches
+   */
+  object typeVarToOriginOrWildcard extends TypeMap {
+    def apply(tp: Type): Type = mapOver(tp) match {
+      case tv: TypeVar =>
+        if (tv.constr.inst.typeSymbol == NothingClass)
+          WildcardType
+        else
+          tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
+      case other =>
+        if (other.typeSymbol == NothingClass)
+          WildcardType
+        else
+          other
+    }
+  }
+
+  /**
+   * wildcardToNothing transforms wildcard types back to Nothing
+   */
+  object wildcardToNothing extends TypeMap {
+    def apply(tp: Type): Type = mapOver(tp) match {
+      case WildcardType =>
+        NothingTpe
+      case other =>
+        other
+    }
+  }
+
+  /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
+  def implicitShouldDocument(aSym: Symbol): Boolean = {
+    // We shouldn't document:
+    // - constructors
+    // - common methods (in Any, AnyRef, Object) as they are automatically removed
+    // - private and protected members (not accessible following an implicit conversion)
+    // - members starting with _ (usually reserved for internal stuff)
+    localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
+    (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
+    (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
+    (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
+    (aSym.nameString != "getClass")
+  }
+
+  /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the
+   * class. We suppose the name of the two members coincides
+   *
+   * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
+   * structure (A => B => C may not override (A, B) => C) and that all the types involved are
+   * of the implcit conversion's member are subtypes of the parent members' parameters */
+  def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
+    // Vlad: I tried using matches but it's not exactly what we need:
+    // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
+    // !(t1 matches t2)
+    if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
+      for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
+       if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
+         return true // if on the corresponding parameter you give a type that is in t1 but not in t2
+                     // def foo(a: Either[Int, Double]): Int = 3
+                     // def foo(b: Left[T1]): Int = 6
+                     // a.foo(Right(4.5d)) prints out 3 :)
+      false
+    } else true // the member structure is different foo(3, 5) vs foo(3)(5)
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
new file mode 100644
index 0000000..2b7e250
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -0,0 +1,315 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+
+import base._
+import diagram._
+
+import scala.collection._
+
+/** This trait extracts all required information for documentation from compilation units */
+trait ModelFactoryTypeSupport {
+  thisFactory: ModelFactory
+               with ModelFactoryImplicitSupport
+               with ModelFactoryTypeSupport
+               with DiagramFactory
+               with CommentFactory
+               with TreeFactory
+               with MemberLookup =>
+
+  import global._
+  import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
+
+  protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
+
+  /** */
+  def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
+    def createTypeEntity = new TypeEntity {
+      private var nameBuffer = new StringBuilder
+      private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
+      private def appendTypes0(types: List[Type], sep: String): Unit = types match {
+        case Nil =>
+        case tp :: Nil =>
+          appendType0(tp)
+        case tp :: tps =>
+          appendType0(tp)
+          nameBuffer append sep
+          appendTypes0(tps, sep)
+      }
+
+      private def appendType0(tpe: Type): Unit = tpe match {
+        /* Type refs */
+        case tp: TypeRef if definitions.isFunctionTypeDirect(tp) =>
+          val args = tp.typeArgs
+          nameBuffer append '('
+          appendTypes0(args.init, ", ")
+          nameBuffer append ") ⇒ "
+          appendType0(args.last)
+        case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) =>
+          appendType0(tp.args.head)
+          nameBuffer append '*'
+        case tp: TypeRef if definitions.isByNameParamType(tp) =>
+          nameBuffer append "⇒ "
+          appendType0(tp.args.head)
+        case tp: TypeRef if definitions.isTupleTypeDirect(tp) =>
+          val args = tp.typeArgs
+          nameBuffer append '('
+          appendTypes0(args, ", ")
+          nameBuffer append ')'
+        case TypeRef(pre, aSym, targs) =>
+          val preSym = pre.widen.typeSymbol
+
+          // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another:
+          // class Enum { abstract class Value }
+          // class Day extends Enum { object Mon extends Value /*...*/ }
+          // ===> in such cases we have two options:
+          // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly
+          // (1) if we generate the doc template for Day, we can link to the correct member
+          // (2) If the symbol comes from an external library for which we know the documentation URL, point to it.
+          // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
+          val bSym = normalizeTemplate(aSym)
+          val owner =
+            if ((preSym != NoSymbol) &&                  /* it needs a prefix */
+                (preSym != bSym.owner) &&                /* prefix is different from owner */
+                (aSym == bSym))                          /* normalization doesn't play tricks on us */
+              preSym
+            else
+              bSym.owner
+
+          val link =
+            findTemplateMaybe(bSym) match {
+              case Some(bTpl) if owner == bSym.owner =>
+                // (0) the owner's class is linked AND has a template - lovely
+                bTpl match {
+                  case dtpl: DocTemplateEntity => new LinkToTpl(dtpl)
+                  case _ => new Tooltip(bTpl.qualifiedName)
+                }
+              case _ =>
+                val oTpl = findTemplateMaybe(owner)
+                (oTpl, oTpl flatMap (findMember(bSym, _))) match {
+                  case (Some(oTpl), Some(bMbr)) =>
+                    // (1) the owner's class
+                    LinkToMember(bMbr, oTpl)
+                  case _ =>
+                    val name = makeQualifiedName(bSym)
+                    if (!bSym.owner.isPackage)
+                      Tooltip(name)
+                    else
+                      findExternalLink(bSym, name).getOrElse (
+                        // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name
+                        Tooltip(name)
+                      )
+                }
+            }
+
+          // SI-4360 Showing prefixes when necessary
+          // We check whether there's any directly accessible type with the same name in the current template OR if the
+          // type is inherited from one template to another. There may be multiple symbols with the same name in scope,
+          // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing
+          // the prefix only for ambiguous references, not for overloaded ones.
+          def needsPrefix: Boolean = {
+            if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym))
+              return true
+            // don't get tricked into prefixng method type params and existentials:
+            // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale,
+            // as ValueParams are independent of their parent member, and I really don't want to add this information to
+            // all terms, as we're already over the allowed memory footprint
+            if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */)
+              return false
+
+            for (tpl <- inTpl.sym.ownerChain) {
+              tpl.info.member(bSym.name) match {
+                case NoSymbol =>
+                  // No syms with that name, look further inside the owner chain
+                case sym =>
+                  // Symbol found -- either the correct symbol, another one OR an overloaded alternative
+                  if (sym == bSym)
+                    return false
+                  else sym.info match {
+                    case OverloadedType(owner, alternatives) =>
+                      return alternatives.contains(bSym)
+                    case _ =>
+                      return true
+                  }
+              }
+            }
+            // if it's not found in the owner chain, we can safely leave out the prefix
+            false
+          }
+
+          val prefix =
+            if (!settings.docNoPrefixes && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
+              if (!owner.isRefinementClass) {
+                val qName = makeQualifiedName(owner, Some(inTpl.sym))
+                if (qName != "") qName + "." else ""
+              }
+              else {
+                nameBuffer append "("
+                appendType0(pre)
+                nameBuffer append ")#"
+                "" // we already appended the prefix
+              }
+            } else ""
+
+          //DEBUGGING:
+          //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + "  and prefix=" + prefix)
+
+          val name = prefix + bSym.nameString
+          val pos0 = nameBuffer.length
+          refBuffer += pos0 -> ((link, name.length))
+          nameBuffer append name
+
+          if (!targs.isEmpty) {
+            nameBuffer append '['
+            appendTypes0(targs, ", ")
+            nameBuffer append ']'
+          }
+        /* Refined types */
+        case RefinedType(parents, defs) =>
+          val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
+          val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
+            case Nil    => parents
+            case ps     => ps
+          }
+          appendTypes0(filtParents, " with ")
+          // XXX Still todo: properly printing refinements.
+          // Since I didn't know how to go about displaying a multi-line type, I went with
+          // printing single method refinements (which should be the most common) and printing
+          // the number of members if there are more.
+          defs.toList match {
+            case Nil      => ()
+            case x :: Nil => nameBuffer append (" { " + x.defString + " }")
+            case xs       => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size)
+          }
+        /* Eval-by-name types */
+        case NullaryMethodType(result) =>
+          nameBuffer append '⇒'
+          appendType0(result)
+
+        /* Polymorphic types */
+        case PolyType(tparams, result) => assert(tparams.nonEmpty)
+          def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else
+            tps.map{tparam =>
+              tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
+            }.mkString("[", ", ", "]")
+          nameBuffer append typeParamsToString(tparams)
+          appendType0(result)
+
+        case et at ExistentialType(quantified, underlying) =>
+
+          def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = {
+            if (sym.isType && !sym.isAliasType && !sym.isClass) {
+                tp match {
+                  case PolyType(tparams, _) =>
+                    nameBuffer append "["
+                    appendTypes0(tparams.map(_.tpe), ", ")
+                    nameBuffer append "]"
+                  case _ =>
+                }
+                tp.resultType match {
+                  case rt @ TypeBounds(_, _) =>
+                    appendType0(rt)
+                  case rt                    =>
+                    nameBuffer append " <: "
+                    appendType0(rt)
+                }
+            } else {
+              // fallback to the Symbol infoString
+              nameBuffer append sym.infoString(tp)
+            }
+          }
+
+          def appendClauses = {
+            nameBuffer append " forSome {"
+            var first = true
+            for (sym <- quantified) {
+              if (!first) { nameBuffer append ", " } else first = false
+              if (sym.isSingletonExistential) {
+                nameBuffer append "val "
+                nameBuffer append tpnme.dropSingletonName(sym.name)
+                nameBuffer append ": "
+                appendType0(dropSingletonType(sym.info.bounds.hi))
+              } else {
+                if (sym.flagString != "") nameBuffer append (sym.flagString + " ")
+                if (sym.keyString != "") nameBuffer append (sym.keyString + " ")
+                nameBuffer append sym.varianceString
+                nameBuffer append sym.nameString
+                appendInfoStringReduced(sym, sym.info)
+              }
+            }
+            nameBuffer append "}"
+          }
+
+          underlying match {
+            case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards =>
+              appendType0(typeRef(pre, sym, Nil))
+              nameBuffer append "["
+              var first = true
+              val qset = quantified.toSet
+              for (arg <- args) {
+                if (!first) { nameBuffer append ", " } else first = false
+                arg match {
+                  case TypeRef(_, sym, _) if (qset contains sym) =>
+                    nameBuffer append "_"
+                    appendInfoStringReduced(sym, sym.info)
+                  case arg =>
+                    appendType0(arg)
+                }
+              }
+              nameBuffer append "]"
+            case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
+              nameBuffer append "("
+              appendType0(underlying)
+              nameBuffer append ")"
+              appendClauses
+            case _ =>
+              appendType0(underlying)
+              appendClauses
+          }
+
+        case tb at TypeBounds(lo, hi) =>
+          if (tb.lo != TypeBounds.empty.lo) {
+            nameBuffer append " >: "
+            appendType0(lo)
+          }
+          if (tb.hi != TypeBounds.empty.hi) {
+            nameBuffer append " <: "
+            appendType0(hi)
+          }
+        // case tpen: ThisType | SingleType | SuperType =>
+        //   if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) {
+        //     appendType0 typeRef(NoPrefix, sym, Nil)
+        //   } else {
+        //     val underlying =
+        //     val pre = underlying.typeSymbol.skipPackageObject
+        //     if (pre.isOmittablePrefix) pre.fullName + ".type"
+        //     else prefixString + "type"
+        case tpen at ThisType(sym) =>
+          appendType0(typeRef(NoPrefix, sym, Nil))
+          nameBuffer append ".this"
+          if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
+        case tpen at SuperType(thistpe, supertpe) =>
+          nameBuffer append "super["
+          appendType0(supertpe)
+          nameBuffer append "]"
+        case tpen at SingleType(pre, sym) =>
+          appendType0(typeRef(pre, sym, Nil))
+          if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
+        case tpen =>
+          nameBuffer append tpen.toString
+      }
+      appendType0(aType)
+      val refEntity = refBuffer
+      val name = optimize(nameBuffer.toString)
+      nameBuffer = null
+    }
+
+    // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
+    // same type based on the template the type is shown in.
+    if (settings.docNoPrefixes)
+      typeCache.getOrElseUpdate(aType, createTypeEntity)
+    else createTypeEntity
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
rename to src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
new file mode 100755
index 0000000..86a7a67
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -0,0 +1,95 @@
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile}
+
+/** The goal of this trait is , using makeTree,
+  * to browse a tree to
+  * 1- have the String of the complete tree (tree.expression)
+  * 2- fill references to create hyperLinks later in html.pageTemplate
+  *
+  * It is applied in ModelFactory => makeTree
+  *
+  */
+
+trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
+
+  val global: Global
+  import global._
+
+  def makeTree(rhs: Tree): TreeEntity = {
+
+    val expr = new StringBuilder
+    var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
+
+    rhs.pos match {
+      case pos: RangePosition => {
+        val source: SourceFile = pos.source
+        val firstIndex = pos.start
+        val lastIndex = pos.end
+
+        assert(firstIndex < lastIndex, "Invalid position indices for tree " + rhs + " (" + firstIndex + ", " + lastIndex + ")")
+        expr.appendAll(source.content, firstIndex, lastIndex - firstIndex)
+
+        val traverser = new Traverser {
+
+          /** Finds the Entity on which we will later create a link on,
+           * stores it in tree.refs with its position
+           */
+          def makeLink(rhs: Tree){
+            val start = pos.start - firstIndex
+            val end = pos.end - firstIndex
+            if(start != end) {
+              var asym = rhs.symbol
+              if (asym.isClass) makeTemplate(asym) match{
+                case docTmpl: DocTemplateImpl =>
+                  refs += ((start, (docTmpl,end)))
+                case _ =>
+              }
+              else if (asym.isTerm && asym.owner.isClass){
+                if (asym.isSetter) asym = asym.getter(asym.owner)
+                makeTemplate(asym.owner) match {
+                  case docTmpl: DocTemplateImpl =>
+                    val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
+                    mbrs foreach { mbr => refs += ((start, (mbr,end))) }
+                  case _ =>
+                }
+              }
+            }
+          }
+          /**
+           * Goes through the tree and makes links when a Select occurs,
+           * The case of New(_) is ignored because the object we want to create a link on
+           * will be reached with recursivity and we don't want a link on the "new" string
+           * If a link is not created, its case is probably not defined in here
+           */
+          override def traverse(tree: Tree) = tree match {
+            case Select(qualifier, name) =>
+              qualifier match {
+                case New(_) =>
+                  case _ => makeLink(tree)
+              }
+            traverse(qualifier)
+            case Ident(_) => makeLink(tree)
+            case _ =>
+              super.traverse(tree)
+          }
+        }
+
+        traverser.traverse(rhs)
+
+        new TreeEntity {
+          val expression = expr.toString
+          val refEntity = refs
+        }
+      }
+      case _ =>
+        new TreeEntity {
+          val expression = rhs.toString
+          val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
+        }
+    }
+  }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
rename to src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
rename to src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
similarity index 100%
rename from src/compiler/scala/tools/nsc/doc/model/Visibility.scala
rename to src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
new file mode 100644
index 0000000..1846f37
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -0,0 +1,137 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+
+/**
+ *  The diagram base classes
+ *
+ *  @author Damien Obrist
+ *  @author Vlad Ureche
+ */
+sealed abstract class Diagram {
+  def nodes: List[Node]
+  def edges: List[(Node, List[Node])]
+  def isContentDiagram = false     // Implemented by ContentDiagram
+  def isInheritanceDiagram = false // Implemented by InheritanceDiagram
+  def depthInfo: DepthInfo
+}
+
+case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
+  override def isContentDiagram = true
+  lazy val depthInfo = new ContentDiagramDepth(this)
+}
+
+/** A class diagram */
+case class InheritanceDiagram(thisNode: ThisNode,
+                        superClasses: List[/*Class*/Node],
+                        subClasses: List[/*Class*/Node],
+                        incomingImplicits: List[ImplicitNode],
+                        outgoingImplicits: List[ImplicitNode]) extends Diagram {
+  def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
+  def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
+              (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
+
+  override def isInheritanceDiagram = true
+  lazy val depthInfo = new DepthInfo {
+    def maxDepth = 3
+  }
+}
+
+trait DepthInfo {
+  /** Gives the maximum depth */
+  def maxDepth: Int
+}
+
+sealed abstract class Node {
+  def name = tpe.name
+  def tpe: TypeEntity
+  def tpl: Option[TemplateEntity]
+  /** shortcut to get a DocTemplateEntity */
+  def doctpl: Option[DocTemplateEntity] = tpl match {
+    case Some(tpl) => tpl match {
+      case d: DocTemplateEntity => Some(d)
+      case _ => None
+    }
+    case _ => None
+  }
+  /* shortcuts to find the node type without matching */
+  def isThisNode = false
+  def isNormalNode = false
+  def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
+  def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
+  def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
+  def isTypeNode  = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false
+  def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode)
+  def isImplicitNode = false
+  def isOutsideNode = false
+  def tooltip: Option[String]
+}
+
+// different matchers, allowing you to use the pattern matcher against any node
+// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
+// case class specification -- thus a complete match would be:
+//   node match {
+//     case ThisNode(tpe, _) =>     /* case for this node, you can still use .isClass, .isTrait and .isOther */
+//     case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
+//     case _ => node match {
+//       case ClassNode(tpe, _) =>  /* case for a non-this, non-implicit Class node */
+//       case TraitNode(tpe, _) =>  /* case for a non-this, non-implicit Trait node */
+//       case OtherNode(tpe, _) =>  /* case for a non-this, non-implicit Other node */
+//     }
+//   }
+object Node        { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
+object ClassNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode)   Some((n.tpe, n.tpl)) else None }
+object TraitNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode)   Some((n.tpe, n.tpl)) else None }
+object TypeNode    { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode)    Some((n.tpe, n.tpl)) else None }
+object ObjectNode  { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode)  Some((n.tpe, n.tpl)) else None }
+object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
+object OtherNode   { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode)   Some((n.tpe, n.tpl)) else None }
+
+
+
+/** The node for the current class */
+case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true }
+
+/** The usual node */
+case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true }
+
+/** A class or trait the thisnode can be converted to by an implicit conversion
+ *  TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
+ *  since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
+ */
+case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true }
+
+/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
+ * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */
+case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true }
+
+
+// Computing and offering node depth information
+class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
+  private[this] var _maxDepth = 0
+  private[this] var _nodeDepth = Map[Node, Int]()
+  private[this] var seedNodes = Set[Node]()
+  private[this] val invertedEdges: Map[Node, List[Node]] =
+    pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
+  private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
+
+  // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
+  seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
+
+  while (!seedNodes.isEmpty) {
+    var newSeedNodes = Set[Node]()
+    for (node <- seedNodes) {
+      val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
+      if (depth != _nodeDepth.getOrElse(node, -1)) {
+        _nodeDepth += (node -> depth)
+        newSeedNodes ++= invertedEdges(node)
+        if (depth > _maxDepth) _maxDepth = depth
+      }
+    }
+    seedNodes = newSeedNodes
+  }
+
+  val maxDepth = _maxDepth
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
new file mode 100644
index 0000000..44d8886
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -0,0 +1,257 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+import java.util.regex.{Pattern, Matcher}
+import scala.util.matching.Regex
+
+/**
+ *  This trait takes care of parsing @{inheritance, content}Diagram annotations
+ *
+ *  @author Damien Obrist
+ *  @author Vlad Ureche
+ */
+trait DiagramDirectiveParser {
+  this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
+
+  import this.global.definitions.AnyRefClass
+
+  ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
+
+  /**
+   *  The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
+   *
+   *  Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
+   *  all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
+   *  reason is you would break the separate scaladoc compilation:
+   *  If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
+   *  A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
+   *  instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
+   *  diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
+   *  information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
+   *  file, could we? (Turns out we could, but that's another story)
+   *
+   *  Any flaming for this decision should go to scala-internals at googlegroups.com
+   */
+  trait DiagramFilter {
+    /** A flag to hide the diagram completely */
+    def hideDiagram: Boolean
+    /** Hide incoming implicit conversions (for type hierarchy diagrams) */
+    def hideIncomingImplicits: Boolean
+    /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
+    def hideOutgoingImplicits: Boolean
+    /** Hide superclasses (for type hierarchy diagrams) */
+    def hideSuperclasses: Boolean
+    /** Hide subclasses (for type hierarchy diagrams) */
+    def hideSubclasses: Boolean
+    /** Show related classes from other objects/traits/packages (for content diagrams) */
+    def hideInheritedNodes: Boolean
+    /** Hide a node from the diagram */
+    def hideNode(clazz: Node): Boolean
+    /** Hide an edge from the diagram */
+    def hideEdge(clazz1: Node, clazz2: Node): Boolean
+  }
+
+  /** Main entry point into this trait: generate the filter for inheritance diagrams */
+  def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+
+    val defaultFilter =
+      if (template.isClass || template.isTrait || template.sym == AnyRefClass)
+        FullDiagram
+      else
+        NoDiagramAtAll
+
+    if (template.comment.isDefined)
+      makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true)
+    else
+      defaultFilter
+  }
+
+  /** Main entry point into this trait: generate the filter for content diagrams */
+  def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+    val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
+    if (template.comment.isDefined)
+      makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false)
+    else
+      defaultFilter
+  }
+
+  protected var tFilter = 0l
+  protected var tModel = 0l
+
+  /** Show the entire diagram, no filtering */
+  case object FullDiagram extends DiagramFilter {
+    val hideDiagram: Boolean = false
+    val hideIncomingImplicits: Boolean = false
+    val hideOutgoingImplicits: Boolean = false
+    val hideSuperclasses: Boolean = false
+    val hideSubclasses: Boolean = false
+    val hideInheritedNodes: Boolean = false
+    def hideNode(clazz: Node): Boolean = false
+    def hideEdge(clazz1: Node, clazz2: Node): Boolean = false
+  }
+
+  /** Hide the diagram completely, no need for special filtering */
+  case object NoDiagramAtAll extends DiagramFilter {
+    val hideDiagram: Boolean = true
+    val hideIncomingImplicits: Boolean = true
+    val hideOutgoingImplicits: Boolean = true
+    val hideSuperclasses: Boolean = true
+    val hideSubclasses: Boolean = true
+    val hideInheritedNodes: Boolean = true
+    def hideNode(clazz: Node): Boolean = true
+    def hideEdge(clazz1: Node, clazz2: Node): Boolean = true
+  }
+
+  /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
+   *  TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
+  case class AnnotationDiagramFilter(hideDiagram: Boolean,
+                                             hideIncomingImplicits: Boolean,
+                                             hideOutgoingImplicits: Boolean,
+                                             hideSuperclasses: Boolean,
+                                             hideSubclasses: Boolean,
+                                             hideInheritedNodes: Boolean,
+                                             hideNodesFilter: List[Pattern],
+                                             hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
+
+    private[this] def getName(n: Node): String =
+      if (n.tpl.isDefined)
+        n.tpl.get.qualifiedName
+      else
+        n.name
+
+    def hideNode(clazz: Node): Boolean = {
+      val qualifiedName = getName(clazz)
+      for (hideFilter <- hideNodesFilter)
+        if (hideFilter.matcher(qualifiedName).matches) {
+          // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
+          return true
+        }
+      false
+    }
+
+    def hideEdge(clazz1: Node, clazz2: Node): Boolean = {
+      val clazz1Name = getName(clazz1)
+      val clazz2Name = getName(clazz2)
+      for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
+        if (clazz1Filter.matcher(clazz1Name).matches &&
+            clazz2Filter.matcher(clazz2Name).matches) {
+          // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
+          // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
+          return true
+        }
+      }
+      false
+    }
+  }
+
+  // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
+  private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
+  private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
+  private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
+  // And the composed regexes:
+  private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
+  private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
+
+  private def makeDiagramFilter(template: DocTemplateImpl,
+                                directives: List[String],
+                                defaultFilter: DiagramFilter,
+                                isInheritanceDiagram: Boolean): DiagramFilter = directives match {
+
+    // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
+    case Nil =>
+      defaultFilter
+
+    // compute the exact filters. By including the annotation, the diagram is autmatically added
+    case _ =>
+      tFilter -= System.currentTimeMillis
+      var hideDiagram0: Boolean = false
+      var hideIncomingImplicits0: Boolean = false
+      var hideOutgoingImplicits0: Boolean = false
+      var hideSuperclasses0: Boolean = false
+      var hideSubclasses0: Boolean = false
+      var hideInheritedNodes0: Boolean = false
+      var hideNodesFilter0: List[Pattern] = Nil
+      var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
+
+      def warning(message: String) = {
+        // we need the position from the package object (well, ideally its comment, but yeah ...)
+        val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+        assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
+        global.reporter.warning(sym.pos, message)
+      }
+
+      def preparePattern(className: String) =
+        "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
+
+      // separate entries:
+      val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
+      for (entry <- entries)
+        entry match {
+          case "hideDiagram" =>
+              hideDiagram0 = true
+          case "hideIncomingImplicits" if isInheritanceDiagram =>
+              hideIncomingImplicits0 = true
+          case "hideOutgoingImplicits" if isInheritanceDiagram  =>
+              hideOutgoingImplicits0 = true
+          case "hideSuperclasses" if isInheritanceDiagram =>
+              hideSuperclasses0 = true
+          case "hideSubclasses" if isInheritanceDiagram =>
+              hideSubclasses0 = true
+          case "hideInheritedNodes" if !isInheritanceDiagram =>
+              hideInheritedNodes0 = true
+          case HideNodesRegex(last) =>
+            val matcher = NodeSpecPattern.matcher(entry)
+            while (matcher.find()) {
+              val classPattern = Pattern.compile(preparePattern(matcher.group()))
+              hideNodesFilter0 ::= classPattern
+            }
+          case HideEdgesRegex(last) =>
+            val matcher = NodeSpecPattern.matcher(entry)
+            while (matcher.find()) {
+              val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
+              assert(matcher.find()) // it's got to be there, just matched it!
+              val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
+              hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
+            }
+          case "" =>
+            // don't need to do anything about it
+          case _ =>
+            warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName +
+              ": unmatched entry \"" + entry + "\".\n" +
+              "  This could be because:\n" +
+              "   - you forgot to separate entries by commas\n" +
+              "   - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
+              "   - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
+        }
+      val result =
+        if  (hideDiagram0)
+          NoDiagramAtAll
+        else if ((hideNodesFilter0.isEmpty) &&
+                 (hideEdgesFilter0.isEmpty) &&
+                 (hideIncomingImplicits0 == false) &&
+                 (hideOutgoingImplicits0 == false) &&
+                 (hideSuperclasses0 == false) &&
+                 (hideSubclasses0 == false) &&
+                 (hideInheritedNodes0 == false) &&
+                 (hideDiagram0 == false))
+          FullDiagram
+        else
+          AnnotationDiagramFilter(
+            hideDiagram = hideDiagram0,
+            hideIncomingImplicits = hideIncomingImplicits0,
+            hideOutgoingImplicits = hideOutgoingImplicits0,
+            hideSuperclasses = hideSuperclasses0,
+            hideSubclasses = hideSubclasses0,
+            hideInheritedNodes = hideInheritedNodes0,
+            hideNodesFilter = hideNodesFilter0,
+            hideEdgesFilter = hideEdgesFilter0)
+
+      if (settings.docDiagramsDebug && result != NoDiagramAtAll && result != FullDiagram)
+        settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
+      tFilter += System.currentTimeMillis
+
+      result
+  }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
new file mode 100644
index 0000000..87d7ece
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -0,0 +1,270 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+
+// statistics
+import  html.page.diagram.DiagramStats
+
+import scala.collection.immutable.SortedMap
+
+/**
+ *  This trait takes care of generating the diagram for classes and packages
+ *
+ *  @author Damien Obrist
+ *  @author Vlad Ureche
+ */
+trait DiagramFactory extends DiagramDirectiveParser {
+  this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory =>
+
+  import this.global.definitions._
+  import this.global._
+
+  // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes
+  def aggregationNode(text: String) =
+    NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)()
+
+  /** Create the inheritance diagram for this template */
+  def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
+
+    tFilter = 0
+    tModel = -System.currentTimeMillis
+
+    // the diagram filter
+    val diagramFilter = makeInheritanceDiagramFilter(tpl)
+
+    def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) =
+      Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method "
+        + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName)
+
+    val result =
+      if (diagramFilter == NoDiagramAtAll)
+        None
+      else {
+        // the main node
+        val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
+
+        // superclasses
+        val superclasses: List[Node] =
+          tpl.parentTypes.collect {
+            case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
+          }.reverse
+
+        // incoming implcit conversions
+        lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
+          case (incomingTpl, conv) =>
+            ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv))
+        }
+
+        // subclasses
+        var subclasses: List[Node] =
+          tpl.directSubClasses.collect {
+            case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))()
+          }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
+
+        // outgoing implicit coversions
+        lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
+          case (outgoingTpl, outgoingType, conv) =>
+            ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv))
+        }
+
+        // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+        // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges
+        // The implementation would need to add the annotations and the logic to select nodes (or create new ones)
+        // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days
+        // at most) and it would be a great add to the diagrams.
+        if (tpl.sym == AnyRefClass)
+          subclasses = List(aggregationNode("All user-defined classes and traits"))
+
+        val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
+        val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
+        val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
+        val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes
+
+        // final diagram filter
+        filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
+      }
+
+    tModel += System.currentTimeMillis
+    DiagramStats.addFilterTime(tFilter)
+    DiagramStats.addModelTime(tModel-tFilter)
+
+    result
+  }
+
+  /** Create the content diagram for this template */
+  def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
+
+    tFilter = 0
+    tModel = -System.currentTimeMillis
+
+    // the diagram filter
+    val diagramFilter = makeContentDiagramFilter(pack)
+
+    val result =
+      if (diagramFilter == NoDiagramAtAll)
+        None
+      else {
+        var mapNodes = Map[TemplateEntity, Node]()
+        var nodesShown = Set[TemplateEntity]()
+        var edgesAll = List[(TemplateEntity, List[TemplateEntity])]()
+
+        // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
+        // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
+        val nodesAll = pack.members collect {
+          case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
+        }
+
+        def listSuperClasses(member: MemberTemplateImpl) = {
+          // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
+          (pack.sym, member.sym) match {
+            case (ScalaPackage, NullClass) =>
+              List(makeTemplate(AnyRefClass))
+            case (ScalaPackage, NothingClass) =>
+              (List(NullClass) ::: ScalaValueClasses) map { makeTemplate(_) }
+            case _ =>
+              member.parentTypes map {
+                case (template, tpe) => template
+              } filter {
+                nodesAll.contains(_)
+              }
+          }
+        }
+
+        // for each node, add its subclasses
+        for (node <- nodesAll if !classExcluded(node)) {
+          node match {
+            case dnode: MemberTemplateImpl =>
+              val superClasses = listSuperClasses(dnode)
+
+              if (!superClasses.isEmpty) {
+                nodesShown += dnode
+                nodesShown ++= superClasses
+              }
+              edgesAll ::= dnode -> superClasses
+            case _ =>
+          }
+
+          mapNodes += node -> (
+            if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType))
+              NormalNode(node.resultType, Some(node))()
+            else
+              OutsideNode(node.resultType, Some(node))()
+          )
+        }
+
+        if (nodesShown.isEmpty)
+          None
+        else {
+          val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
+          val edges = edgesAll.map {
+            case (entity, superClasses) => {
+              (mapNodes(entity), superClasses flatMap { mapNodes.get(_) })
+            }
+          } filterNot {
+            case (node, superClassNodes) => superClassNodes.isEmpty
+          }
+
+          val diagram =
+            // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+            if (pack.sym == ScalaPackage) {
+              // Tried it, but it doesn't look good:
+              // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass)))
+              // var dirty = true
+              // do {
+              //   val length = anyRefSubtypes.length
+              //   anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 }
+              //   anyRefSubtypes = anyRefSubtypes.distinct
+              //   dirty = (anyRefSubtypes.length != length)
+              // } while (dirty)
+              // println(anyRefSubtypes)
+              val anyRefSubtypes = Nil
+              val allAnyRefTypes = aggregationNode("All AnyRef subtypes")
+              val nullTemplate = makeTemplate(NullClass)
+              if (nullTemplate.isDocTemplate)
+                ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate)))
+              else
+                ContentDiagram(nodes, edges)
+            } else
+              ContentDiagram(nodes, edges)
+
+          filterDiagram(diagram, diagramFilter)
+        }
+      }
+
+    tModel += System.currentTimeMillis
+    DiagramStats.addFilterTime(tFilter)
+    DiagramStats.addModelTime(tModel-tFilter)
+
+    result
+  }
+
+  /** Diagram filtering logic */
+  private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
+    tFilter -= System.currentTimeMillis
+
+    val result =
+      if (diagramFilter == FullDiagram)
+        Some(diagram)
+      else if (diagramFilter == NoDiagramAtAll)
+        None
+      else {
+        // Final diagram, with the filtered nodes and edges
+        diagram match {
+          case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) =>
+            None
+
+          case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
+
+            def hideIncoming(node: Node): Boolean =
+              diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode)
+
+            def hideOutgoing(node: Node): Boolean =
+              diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node)
+
+            // println(thisNode)
+            // println(superClasses.map(cl => "super: " + cl + "  " + hideOutgoing(cl)).mkString("\n"))
+            // println(subClasses.map(cl => "sub: " + cl + "  " + hideIncoming(cl)).mkString("\n"))
+            Some(InheritanceDiagram(thisNode,
+                             superClasses.filterNot(hideOutgoing(_)),
+                             subClasses.filterNot(hideIncoming(_)),
+                             incomingImplicits.filterNot(hideIncoming(_)),
+                             outgoingImplicits.filterNot(hideOutgoing(_))))
+
+          case ContentDiagram(nodes0, edges0) =>
+            // Filter out all edges that:
+            // (1) are sources of hidden classes
+            // (2) are manually hidden by the user
+            // (3) are destinations of hidden classes
+            val edges: List[(Node, List[Node])] =
+              diagram.edges.flatMap({
+                case (source, dests) if !diagramFilter.hideNode(source) =>
+                  val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest })
+                  if (dests2 != Nil)
+                    List((source, dests2))
+                  else
+                    Nil
+                case _ => Nil
+              })
+
+            // Only show the the non-isolated nodes
+            // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
+            // TODO: Does .distinct cause any stability issues?
+            val sourceNodes = edges.map(_._1)
+            val sinkNodes = edges.map(_._2).flatten
+            val nodes = (sourceNodes ::: sinkNodes).distinct
+            Some(ContentDiagram(nodes, edges))
+        }
+      }
+
+    tFilter += System.currentTimeMillis
+
+    // eliminate all empty diagrams
+    if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
+      None
+    else
+      result
+  }
+
+}
diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
new file mode 100644
index 0000000..70423cc
--- /dev/null
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -0,0 +1,203 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Vlad Ureche
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc
+import scala.tools.nsc._
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.doc.{ DocFactory, Universe }
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.reporters.ConsoleReporter
+
+/** A class for testing scaladoc model generation
+ *   - you need to specify the code in the `code` method
+ *   - you need to override the testModel method to test the model
+ *   - you may specify extra parameters to send to scaladoc in `scaladocSettings`
+ * {{{
+      import scala.tools.nsc.doc.model._
+      import scala.tools.partest.ScaladocModelTest
+
+      object Test extends ScaladocModelTest {
+
+        override def code = """ ... """ // or override def resourceFile = "<file>.scala" (from test/scaladoc/resources)
+        def scaladocSettings = " ... "
+        def testModel(rootPackage: Package) = {
+          // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+          import access._
+
+          // just need to check the member exists, access methods will throw an error if there's a problem
+          rootPackage._package("scala")._package("test")._class("C")._method("foo")
+        }
+      }
+ * }}}
+ */
+abstract class ScaladocModelTest extends DirectTest {
+
+  /** Override this to give scaladoc command line parameters */
+  def scaladocSettings: String
+
+  /** Override this to test the model */
+  def testModel(root: Package): Unit
+
+  /** Override to feed a file in resources to scaladoc*/
+  def resourceFile: String = null
+
+  /** Override to feed code into scaladoc */
+  override def code =
+    if (resourceFile ne null)
+      io.File(resourcePath + "/" + resourceFile).slurp()
+    else
+      sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!")
+
+  def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources")
+
+  // Implementation follows:
+  override def extraSettings: String = "-usejavacp"
+
+  override def show(): Unit = {
+    // redirect err to out, for logging
+    val prevErr = System.err
+    System.setErr(System.out)
+
+    try {
+      // 1 - compile with scaladoc and get the model out
+      val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
+      // 2 - check the model generated
+      testModel(universe.rootPackage)
+      println("Done.")
+    } catch {
+      case e: Exception =>
+        println(e)
+        e.printStackTrace
+    }
+    // set err back to the real err handler
+    System.setErr(prevErr)
+  }
+
+  private[this] var settings: doc.Settings = null
+
+  // create a new scaladoc compiler
+  private[this] def newDocFactory: DocFactory = {
+    settings = new doc.Settings(_ => ())
+    settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
+    val args = extraSettings + " " + scaladocSettings
+    new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think
+    val docFact = new DocFactory(new ConsoleReporter(settings), settings)
+    docFact
+  }
+
+  // compile with scaladoc and output the result
+  def model: Option[Universe] = newDocFactory.makeUniverse(Right(code))
+
+  // so we don't get the newSettings warning
+  override def isDebug = false
+
+  // finally, enable easy navigation inside the entities
+  object access {
+
+    implicit class TemplateAccess(tpl: DocTemplateEntity) {
+      def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
+      def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: DocTemplateEntity with Class => c})
+
+      def _classMbr(name: String): MemberTemplateEntity = getTheFirst(_classesMbr(name), tpl.qualifiedName + ".classMember(" + name + ")")
+      def _classesMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: MemberTemplateEntity if c.isClass => c})
+
+      def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
+      def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t})
+
+      def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")")
+      def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t})
+
+      def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
+      def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: DocTemplateEntity with Object => o})
+
+      def _objectMbr(name: String): MemberTemplateEntity = getTheFirst(_objectsMbr(name), tpl.qualifiedName + ".objectMember(" + name + ")")
+      def _objectsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: MemberTemplateEntity if o.isObject => o})
+
+      def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
+      def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
+
+      def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
+      def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
+
+      def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")")
+      def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name)
+
+      def _absType(name: String): MemberEntity = getTheFirst(_absTypes(name), tpl.qualifiedName + ".abstractType(" + name + ")")
+      def _absTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAbstractType)
+
+      def _absTypeTpl(name: String): DocTemplateEntity = getTheFirst(_absTypeTpls(name), tpl.qualifiedName + ".abstractType(" + name + ")")
+      def _absTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AbstractType if dtpl.name == name => dtpl })
+
+      def _aliasType(name: String): MemberEntity = getTheFirst(_aliasTypes(name), tpl.qualifiedName + ".aliasType(" + name + ")")
+      def _aliasTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAliasType)
+
+      def _aliasTypeTpl(name: String): DocTemplateEntity = getTheFirst(_aliasTypeTpls(name), tpl.qualifiedName + ".aliasType(" + name + ")")
+      def _aliasTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AliasType if dtpl.name == name => dtpl })
+    }
+
+    trait WithMembers {
+      def members: List[MemberEntity]
+      def _member(name: String): MemberEntity = getTheFirst(_members(name), this.toString + ".member(" + name + ")")
+      def _members(name: String): List[MemberEntity] = members.filter(_.name == name)
+    }
+    implicit class PackageAccess(pack: Package) extends TemplateAccess(pack) {
+      def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")")
+      def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
+    }
+    implicit class DocTemplateEntityMembers(val underlying: DocTemplateEntity) extends WithMembers {
+      def members = underlying.members
+    }
+    implicit class ImplicitConversionMembers(val underlying: ImplicitConversion) extends WithMembers {
+      def members = underlying.members
+    }
+
+    def getTheFirst[T](list: List[T], expl: String): T = list.length match {
+      case 1 => list.head
+      case 0 => sys.error("Error getting " + expl + ": No such element.")
+      case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
+                  "All elements in list: [" + list.map({
+                    case ent: Entity => ent.kind + " " + ent.qualifiedName
+                    case other => other.toString
+                  }).mkString(", ") + "]")
+    }
+
+    def extractCommentText(c: Any) = {
+      def extractText(body: Any): String = body match {
+        case s: String  => s
+        case s: Seq[_]  => s.toList.map(extractText(_)).mkString
+        case p: Product => p.productIterator.toList.map(extractText(_)).mkString
+        case _          => ""
+      }
+      c match {
+        case c: Comment =>
+          extractText(c.body)
+        case b: Body =>
+          extractText(b)
+      }
+    }
+
+    def countLinks(c: Comment, p: EntityLink => Boolean) = {
+      def countLinks(body: Any): Int = body match {
+        case el: EntityLink if p(el) => 1
+        case s: Seq[_]  => s.toList.map(countLinks(_)).sum
+        case p: Product => p.productIterator.toList.map(countLinks(_)).sum
+        case _          => 0
+      }
+      countLinks(c.body)
+    }
+
+    def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = {
+      assert(diag.isDefined, doc.qualifiedName + " diagram missing")
+      assert(diag.get.nodes.length == nodes,
+             doc.qualifiedName + "'s diagram: node count " + diag.get.nodes.length + " == " + nodes)
+      assert(diag.get.edges.map(_._2.length).sum == edges,
+             doc.qualifiedName + "'s diagram: edge count " + diag.get.edges.length + " == " + edges)
+    }
+  }
+}
diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties
index 5fcbfaf..961c60f 100644
--- a/src/scalap/decoder.properties
+++ b/src/scalap/decoder.properties
@@ -1,2 +1,2 @@
 version.number=2.0.1
-copyright.string=(c) 2002-2011 LAMP/EPFL
+copyright.string=(c) 2002-2013 LAMP/EPFL
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index a151e30..c375a5b 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -47,8 +47,8 @@ object Arguments {
     }
 
     def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match {
-      case -1   => argumentError("missing '" + separator + "' in binding '" + str + "'") ; Pair("", "")
-      case idx  => Pair((str take idx).trim, (str drop (idx + 1)).trim)
+      case -1   => argumentError("missing '" + separator + "' in binding '" + str + "'") ; ("", "")
+      case idx  => ((str take idx).trim, (str drop (idx + 1)).trim)
     }
 
     def parse(args: Array[String]): Arguments = {
@@ -87,7 +87,7 @@ object Arguments {
               i += 2
             }
           } else {
-            var iter = prefixes.iterator
+            val iter = prefixes.iterator
             val j = i
             while ((i == j) && iter.hasNext) {
               val prefix = iter.next
@@ -142,7 +142,7 @@ class Arguments {
     if (key.length > 0)
       bindings.getOrElseUpdate(tag, new mutable.HashMap)(key) = value
 
-  def addBinding(tag: String, binding: Pair[String, String]): Unit =
+  def addBinding(tag: String, binding: Tuple2[String, String]): Unit =
     addBinding(tag, binding._1, binding._2)
 
   def addOther(arg: String): Unit = others += arg
@@ -164,5 +164,4 @@ class Arguments {
     bindings get option flatMap (_ get key)
 
   def getOthers: List[String] = others.toList
-
 }
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index bb00162..cf16087 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -6,11 +6,11 @@
 */
 
 
-package scala.tools.scalap
+package scala
+package tools.scalap
 
 
 class ByteArrayReader(content: Array[Byte]) {
-  import java.io._
 
   /** the buffer containing the file
    */
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index 8082b6b..f62df28 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -32,7 +32,7 @@ class Classfile(in: ByteArrayReader) {
     var attribs: List[Attribute] = Nil
     var i = 0
     while (i < n) {
-      attribs = Attribute(in.nextChar, in.nextBytes(in.nextInt)) :: attribs
+      attribs = Attribute(in.nextChar.toInt, in.nextBytes(in.nextInt)) :: attribs
       i = i + 1
     }
     attribs
@@ -43,7 +43,7 @@ class Classfile(in: ByteArrayReader) {
     var members: List[Member] = Nil
     var i = 0
     while (i < n) {
-      members = Member(field, in.nextChar, in.nextChar, in.nextChar, readAttribs) :: members
+      members = Member(field, in.nextChar.toInt, in.nextChar.toInt, in.nextChar.toInt, readAttribs) :: members
       i = i + 1
     }
     members
@@ -54,7 +54,7 @@ class Classfile(in: ByteArrayReader) {
     var intfs: List[Int] = Nil
     var i = 0
     while (i < n) {
-      intfs = in.nextChar :: intfs
+      intfs = in.nextChar.toInt :: intfs
       i = i + 1
     }
     intfs
@@ -81,7 +81,7 @@ class Classfile(in: ByteArrayReader) {
     case object Empty extends PoolEntry(0) { }
 
     val entries = {
-      val pool = new Array[PoolEntry](in.nextChar)
+      val pool = new Array[PoolEntry](in.nextChar.toInt)
       var i = 1
       while (i < pool.length) {
         val tag = in.nextByte
@@ -92,7 +92,7 @@ class Classfile(in: ByteArrayReader) {
           pool(i) = Empty
         }
         else pool(i) = tag match {
-          case CONSTANT_UTF8            => UTF8(in.nextUTF8(in.nextChar))
+          case CONSTANT_UTF8            => UTF8(in.nextUTF8(in.nextChar.toInt))
           case CONSTANT_UNICODE         => in.skip(in.nextChar) ; Empty
           case CONSTANT_CLASS           => ClassRef(in.nextChar)
           case CONSTANT_STRING          => StringConst(in.nextChar)
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 8254c2d..1680500 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -6,7 +6,8 @@
 */
 
 
-package scala.tools.scalap
+package scala
+package tools.scalap
 
 import java.io._
 
@@ -97,9 +98,9 @@ class CodeWriter(writer: Writer) {
 
   def print(value: Boolean): CodeWriter = print(String.valueOf(value))
 
-  def print(value: Byte): CodeWriter = print(String.valueOf(value))
+  def print(value: Byte): CodeWriter = print(String.valueOf(value.toInt))
 
-  def print(value: Short): CodeWriter = print(String.valueOf(value))
+  def print(value: Short): CodeWriter = print(String.valueOf(value.toInt))
 
   def print(value: Char): CodeWriter = print(String.valueOf(value))
 
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index d64c54a..772cf6e 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -17,16 +17,15 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
 
   def flagsToStr(clazz: Boolean, flags: Int): String = {
     val buffer = new StringBuffer()
-    var x: StringBuffer = buffer
     if (((flags & 0x0007) == 0) &&
       ((flags & 0x0002) != 0))
-      x = buffer.append("private ")
+      buffer.append("private ")
     if ((flags & 0x0004) != 0)
-      x = buffer.append("protected ")
+      buffer.append("protected ")
     if ((flags & 0x0010) != 0)
-      x = buffer.append("final ")
+      buffer.append("final ")
     if ((flags & 0x0400) != 0)
-      x = if (clazz) buffer.append("abstract ")
+      if (clazz) buffer.append("abstract ")
           else buffer.append("/*deferred*/ ")
     buffer.toString()
   }
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 90f8cb8..c72f416 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -5,7 +5,8 @@
 **
 */
 
-package scala.tools.scalap
+package scala
+package tools.scalap
 
 import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
 import scala.reflect.NameTransformer
@@ -13,7 +14,7 @@ import scalax.rules.scalasig._
 import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
 import scala.tools.util.PathResolver
 import ClassPath.DefaultJavaContext
-import scala.tools.nsc.io.{ PlainFile, AbstractFile }
+import scala.tools.nsc.io.AbstractFile
 
 /**The main object used to execute scalap on the command-line.
  *
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 00678ab..1ebf862 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -6,9 +6,9 @@
 */
 
 
-package scala.tools.scalap
+package scala
+package tools.scalap
 
-import java.io._
 import java.util._
 
 
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala b/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala
deleted file mode 100644
index f8761ca..0000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package scala.tools.scalap
-package scalax
-package rules
-
-trait Arrows extends UnitFunctors  {
-  type Arr[-A, +B] <: Arrow[A, B]
-  type M[+B] = Arr[Nothing, B]
-
-  def arrow[A, B](f : A => B) : Arr[A, B]
-  def diag[A] = arrow[A, (A, A)] { a => (a, a) }
-
-  override def unit[B](b : => B) : M[B] = arrow { any : Any => b }
-
-  trait Arrow[-A, +B] extends Functor[B] { this : Arr[A, B] =>
-
-    def map[C](f : B => C) = comp(arrow(f))
-    def comp[C](bc : => Arr[B, C]) : Arr[A, C]
-    def fst[C] : Arr[(A, C), (B, C)]
-  }
-}
-
-trait ApplicativeArrows extends Arrows {
-  type Arr[-A, +B] <: ApplicativeArrow[A, B]
-
-  def app[A, B] : Arr[(Arr[A, B], A), B]
-
-  trait ApplicativeArrow[-A, +B] extends Arrow[A, B] { self : Arr[A, B] =>
-    def flatMap[SubA <: A, C](f : B => Arr[SubA, C]) : Arr[SubA, C] =
-      diag[SubA].comp(map(f).fst[SubA]).comp(app[SubA, C])
-  }
-}
-
-trait ArrowMonads extends ApplicativeArrows with Monads {
-  type Arr[-A, +B] <: ApplicativeArrow[A, B] with Monad[B]
-
-  override def unit[A](a : => A) : M[A] = arrow[Unit, A](Unit => a)
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
deleted file mode 100644
index aa852c1..0000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-//  Scalax - The Scala Community Library
-//  Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-//  The primary distribution site is http://scalax.scalaforge.org/
-//
-//  This software is released under the terms of the Revised BSD License.
-//  There is NO WARRANTY.  See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules
-
-trait Functor[+A] {
-  type M[+A] <: Functor[A]
-  def map[B](f : A => B) : M[B]
-}
-
-trait Filter[+A] {
-  type M[+A] <: Filter[A]
-  def filter(f : A => Boolean) : M[A]
-}
-
-trait Plus[+A] {
-  type M[+A] <: Plus[A]
-  def plus[B >: A](other : => M[B]) : M[B]
-}
-
-trait OrElse[+A] {
-  type M[+A] <: OrElse[A]
-  def orElse[B >: A](other : => M[B]) : M[B]
-}
-
-trait Units {
-  type M[+A]
-  def unit : M[Unit]
-  def unit[A](a : => A) : M[A]
-}
-
-trait Zero {
-  type M[+A]
-  def zero : M[Nothing]
-}
-
-trait Functors {
-  type M[+A] <: Functor[A]
-
-  trait Functor[+A] extends rules.Functor[A] { this : M[A] =>
-    type M[+A] = Functors.this.M[A]
-  }
-
-  trait ZeroFunctor extends Functor[Nothing] { this : M[Nothing] =>
-    override def map[B](f : Nothing => B) : M[B] = this
-    def filter(f : Nothing => Boolean) : M[Nothing] = this
-    def plus[B](other : => M[B]) : M[B] = other
-    def orElse[B](other : => M[B]) : M[B] = other
-  }
-}
-
-/** One of the 'unit' definitions must be overridden in concrete subclasses */
-trait UnitFunctors extends Units with Functors {
-  def unit : M[Unit] = unit(())
-  def unit[A](a : => A) : M[A] = unit map { Unit => a }
-}
-
-
-trait Monoidals extends UnitFunctors {
-  type M[+A] <: Monoidal[A]
-
-  implicit def app[A, B](fab : M[A => B]) = (fa : M[A]) => fa applyTo fab
-  implicit def appUnit[A, B](a2b : A => B) = app(unit(a2b))
-
-  /** One of 'and' and 'applyTo' definitions must be overridden in concrete subclasses */
-  trait Monoidal[+A] extends Functor[A] { self : M[A] =>
-    def and[B](fb : => M[B]) : M[(A, B)] = ((a : A) => (b : B) => (a, b))(this)(fb)
-    def applyTo[B](fab : M[A => B]) : M[B] = fab and this map { case (f, a) => f(a) }
-  }
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Input.scala b/src/scalap/scala/tools/scalap/scalax/rules/Input.scala
deleted file mode 100644
index 370eb0d..0000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Input.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-//  Scalax - The Scala Community Library
-//  Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-//  The primary distribution site is http://scalax.scalaforge.org/
-//
-//  This software is released under the terms of the Revised BSD License.
-//  There is NO WARRANTY.  See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules
-
-trait Input[+A] extends Iterable[A] {
-
-  def next : Result[Input[A], A, Nothing]
-  def index : Int
-
-  def iterator = new Iterator[A] {
-    private var input : Input[A] = Input.this
-    private var result = input.next
-
-    def hasNext = result != Failure
-    def next = {
-      val Success(input, value) = result
-      this.input = input
-      this.result = input.next
-      value
-    }
-  }
-}
-
-
-class ArrayInput[A](val array : Array[A], val index : Int) extends Input[A] {
-  def this(array : Array[A]) = this(array, 0)
-
-  lazy val next : Result[ArrayInput[A], A, Nothing] = if (index >= array.length) Failure
-      else Success(new ArrayInput[A](array, index + 1), array(index))
-
-  override lazy val toString = this.iterator.mkString("\"", "", "\"")
-}
-
-
-class IterableInput[A](iterator : Iterator[A], val index : Int) extends Input[A] {
-  def this(iterable : Iterable[A]) = this(iterable.iterator, 0)
-
-  lazy val next : Result[IterableInput[A], A, Nothing] = if (!iterator.hasNext) Failure
-      else Success(new IterableInput(iterator, index + 1), iterator.next)
-
-  override lazy val toString = this.iterator.mkString("\"", "", "\"")
-}
-
-
-/** View one type of input as another based on a transformation rule */
-class View[A, B](
-    transform : Input[A] => Result[Input[A], B, Nothing],
-    val input : Input[A],
-    val index : Int)
-    extends Input[B] {
-
-  def next : Result[Input[B], B, Nothing] = transform(input) match {
-    case Success(context, b) => Success(new View(transform, context, index + 1), b)
-    case _ => Failure
-  }
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
index b4ce8ca..bdd1761 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
@@ -17,19 +17,19 @@ package rules
 import scala.collection.mutable
 
 trait MemoisableRules extends Rules {
-  def memo[In <: Memoisable, Out, A, X](key : AnyRef)(toRule : => In => Result[Out, A, X]) = {
+  def memo[In <: Memoisable, Out, A, X](key: AnyRef)(toRule: => In => Result[Out, A, X]) = {
     lazy val rule = toRule
     from[In] { in => in.memo(key, rule(in)) }
   }
 
-  override def ruleWithName[In, Out, A, X](name : String, f : In => rules.Result[Out, A, X]) = super.ruleWithName(name, (in : In) => in match {
-      case s : Memoisable => s.memo(name, f(in))
+  override def ruleWithName[In, Out, A, X](name: String, f: In => rules.Result[Out, A, X]) = super.ruleWithName(name, (in: In) => in match {
+      case s: Memoisable => s.memo(name, f(in))
       case _ => f(in)
     })
 }
 
 trait Memoisable {
-  def memo[A](key : AnyRef, a : => A) : A
+  def memo[A](key: AnyRef, a: => A): A
 }
 
 
@@ -40,18 +40,18 @@ object DefaultMemoisable {
 trait DefaultMemoisable extends Memoisable {
   protected val map = new mutable.HashMap[AnyRef, Any]
 
-  def memo[A](key : AnyRef, a : => A) = {
+  def memo[A](key: AnyRef, a: => A) = {
     map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
   }
 
-  protected def compute[A](key : AnyRef, a : => A): Any = a match {
-    case success : Success[_, _] => onSuccess(key, success); success
+  protected def compute[A](key: AnyRef, a: => A): Any = a match {
+    case success: Success[_, _] => onSuccess(key, success); success
     case other =>
       if(DefaultMemoisable.debug) println(key + " -> " + other)
       other
   }
 
-  protected def onSuccess[S, T](key : AnyRef,  result : Success[S, T])  {
+  protected def onSuccess[S, T](key: AnyRef,  result: Success[S, T])  {
     val Success(out, t) = result
     if(DefaultMemoisable.debug) println(key + " -> " + t + " (" + out + ")")
   }
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala b/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala
deleted file mode 100644
index 639c414..0000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-//  Scalax - The Scala Community Library
-//  Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-//  The primary distribution site is http://scalax.scalaforge.org/
-//
-//  This software is released under the terms of the Revised BSD License.
-//  There is NO WARRANTY.  See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules
-
-trait Monad[+A] extends Functor[A] {
-  type M[+A] <: Monad[A]
-  def flatMap[B](f : A => M[B]) : M[B]
-}
-
-trait Monads extends UnitFunctors {
-  type M[+A] <: Monad[A]
-
-  trait Monad[+A] extends Functor[A] with rules.Monad[A] { this : M[A] =>
-    def map[B](f : A => B) = flatMap { a => unit(f(a)) }
-  }
-
-  trait ZeroMonad extends Monad[Nothing] with ZeroFunctor { this : M[Nothing] =>
-    def flatMap[B](f : Nothing => M[B]) : M[B] = this
-  }
-}
-
-
-trait StateReader extends Monads {
-  type S
-
-  def get : M[S]
-  def read[A](f : S => A) : M[A]
-  def set(s : => S) : M[S]
-  def update(f : S => S) : M[S]
-}
-
-
-
-
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
index 17ad4bd..f37340e 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
@@ -18,35 +18,35 @@ package rules;
  *
  * @see the Scala parser combinator
  */
-case class ~[+A, +B](_1 : A, _2 : B) {
+case class ~[+A, +B](_1: A, _2: B) {
   override def toString = "(" + _1 + " ~ " + _2 + ")"
 }
 
 
 sealed abstract class Result[+Out, +A, +X] {
-  def out : Out
-  def value : A
-  def error : X
+  def out: Out
+  def value: A
+  def error: X
 
-  implicit def toOption : Option[A]
+  implicit def toOption: Option[A]
 
-  def map[B](f : A => B) : Result[Out, B, X]
-  def mapOut[Out2](f : Out => Out2) : Result[Out2, A, X]
-  def map[Out2, B](f : (Out, A) => (Out2, B)) : Result[Out2, B, X]
-  def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, X]
-  def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, X]
+  def map[B](f: A => B): Result[Out, B, X]
+  def mapOut[Out2](f: Out => Out2): Result[Out2, A, X]
+  def map[Out2, B](f: (Out, A) => (Out2, B)): Result[Out2, B, X]
+  def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, X]
+  def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, X]
 }
 
-case class Success[+Out, +A](out : Out, value : A) extends Result[Out, A, Nothing] {
+case class Success[+Out, +A](out: Out, value: A) extends Result[Out, A, Nothing] {
   def error = throw new ScalaSigParserError("No error")
 
   def toOption = Some(value)
 
-  def map[B](f : A => B) : Result[Out, B, Nothing] = Success(out, f(value))
-  def mapOut[Out2](f : Out => Out2) : Result[Out2, A, Nothing] = Success(f(out), value)
-  def map[Out2, B](f : (Out, A) => (Out2, B)) : Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
-  def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing]= f(out, value)
-  def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing] = this
+  def map[B](f: A => B): Result[Out, B, Nothing] = Success(out, f(value))
+  def mapOut[Out2](f: Out => Out2): Result[Out2, A, Nothing] = Success(f(out), value)
+  def map[Out2, B](f: (Out, A) => (Out2, B)): Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
+  def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, Nothing]= f(out, value)
+  def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, Nothing] = this
 }
 
 sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
@@ -55,11 +55,11 @@ sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
 
   def toOption = None
 
-  def map[B](f : Nothing => B) = this
-  def mapOut[Out2](f : Nothing => Out2) = this
-  def map[Out2, B](f : (Nothing, Nothing) => (Out2, B)) = this
-  def flatMap[Out2, B](f : (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
-  def orElse[Out2, B](other : => Result[Out2, B, Nothing]) = other
+  def map[B](f: Nothing => B) = this
+  def mapOut[Out2](f: Nothing => Out2) = this
+  def map[Out2, B](f: (Nothing, Nothing) => (Out2, B)) = this
+  def flatMap[Out2, B](f: (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
+  def orElse[Out2, B](other: => Result[Out2, B, Nothing]) = other
 }
 
 case object Failure extends NoSuccess[Nothing] {
@@ -68,5 +68,4 @@ case object Failure extends NoSuccess[Nothing] {
 
 case class ScalaSigParserError(msg: String) extends RuntimeException(msg)
 
-case class Error[+X](error : X) extends NoSuccess[X] {
-}
+case class Error[+X](error: X) extends NoSuccess[X]
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
index 1500b81..307458f 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
@@ -26,138 +26,138 @@ package rules
   * Inspired by the Scala parser combinator.
   */
 trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
-  val factory : Rules
+  val factory: Rules
   import factory._
 
-  def as(name : String) = ruleWithName(name, this)
+  def as(name: String) = ruleWithName(name, this)
 
-  def flatMap[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = mapResult {
+  def flatMap[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = mapResult {
       case Success(out, a) => fa2ruleb(a)(out)
       case Failure => Failure
       case err @ Error(_) => err
   }
 
-  def map[B](fa2b : A => B) = flatMap { a => out => Success(out, fa2b(a)) }
+  def map[B](fa2b: A => B) = flatMap { a => out => Success(out, fa2b(a)) }
 
-  def filter(f : A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
+  def filter(f: A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
 
-  def mapResult[Out2, B, Y](f : Result[Out, A, X] => Result[Out2, B, Y]) = rule {
-    in : In => f(apply(in))
+  def mapResult[Out2, B, Y](f: Result[Out, A, X] => Result[Out2, B, Y]) = rule {
+    in: In => f(apply(in))
   }
 
-  def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+  def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
     val factory = Rule.this.factory
     lazy val choices = Rule.this :: other :: Nil
   }
 
-  def orError[In2 <: In] = this orElse(error[In2])
+  def orError[In2 <: In] = this orElse error[Any]
 
-  def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other)
+  def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]) = orElse(other)
 
-  def ^^[B](fa2b : A => B) = map(fa2b)
+  def ^^[B](fa2b: A => B) = map(fa2b)
 
-  def ^^?[B](pf : PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
+  def ^^?[B](pf: PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
 
-  def ??(pf : PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
+  def ??(pf: PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
 
-  def -^[B](b : B) = map { any => b }
+  def -^[B](b: B) = map { any => b }
 
   /** Maps an Error */
-  def !^[Y](fx2y : X => Y) = mapResult {
+  def !^[Y](fx2y: X => Y) = mapResult {
     case s @ Success(_, _) => s
     case Failure => Failure
     case Error(x) => Error(fx2y(x))
   }
 
-  def >>[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
+  def >>[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
 
-  def >->[Out2, B, X2 >: X](fa2resultb : A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
+  def >->[Out2, B, X2 >: X](fa2resultb: A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
 
-  def >>?[Out2, B, X2 >: X](pf : PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
+  def >>?[Out2, B, X2 >: X](pf: PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
 
-  def >>&[B, X2 >: X](fa2ruleb : A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
+  def >>&[B, X2 >: X](fa2ruleb: A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
 
-  def ~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
+  def ~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
 
-  def ~-[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
+  def ~-[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
 
-  def -~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
+  def -~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
 
-  def ~++[Out2, B >: A, X2 >: X](next : => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
+  def ~++[Out2, B >: A, X2 >: X](next: => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
 
   /** Apply the result of this rule to the function returned by the next rule */
-  def ~>[Out2, B, X2 >: X](next : => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
+  def ~>[Out2, B, X2 >: X](next: => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
 
   /** Apply the result of this rule to the function returned by the previous rule */
-  def <~:[InPrev, B, X2 >: X](prev : => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
+  def <~:[InPrev, B, X2 >: X](prev: => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
 
-  def ~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield new ~(a, b)
+  def ~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield new ~(a, b)
 
-  def ~-![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield a
+  def ~-![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield a
 
-  def -~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield b
+  def -~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield b
 
-  def -[In2 <: In](exclude : => Rule[In2, Any, Any, Any]) = !exclude -~ this
+  def -[In2 <: In](exclude: => Rule[In2, Any, Any, Any]) = !exclude -~ this
 
   /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) }
    */
-  def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f : (B1, B2) => C) = map { a =>
-    (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+  def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f: (B1, B2) => C) = map { a =>
+    (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
   }
 
   /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
    */
-  def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
-    (a : B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
+  def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+    (a: B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
   }
 
   /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
    */
-  def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f : (B1, B2, B3, B4) => C) = map { a =>
-    (a : B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
+  def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f: (B1, B2, B3, B4) => C) = map { a =>
+    (a: B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
   }
 
   /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
    */
-  def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f : (B1, B2, B3, B4, B5) => C) = map { a =>
-    (a : B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
+  def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f: (B1, B2, B3, B4, B5) => C) = map { a =>
+    (a: B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
   }
 
   /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
    */
-  def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f : (B1, B2, B3, B4, B5, B6) => C) = map { a =>
-    (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+  def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f: (B1, B2, B3, B4, B5, B6) => C) = map { a =>
+    (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
   }
 
   /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
    */
-  def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f : (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
-    (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
+  def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f: (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
+    (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
   }
 
   /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) }
    */
-  def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f : (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
-    (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+  def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f: (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
+    (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
   }
 
   /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) }
    */
-  def ^-^ [B1, B2 >: A, C](f : (B1, B2) => C) = map { b2 : B2 => b1 : B1 => f(b1, b2) }
+  def ^-^ [B1, B2 >: A, C](f: (B1, B2) => C) = map { b2: B2 => b1: B1 => f(b1, b2) }
 
  /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) }
   */
- def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
-   (a : B2 ~ B3) match { case b2 ~ b3 => b1 : B1 => f(b1, b2, b3) }
+ def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+   (a: B2 ~ B3) match { case b2 ~ b3 => b1: B1 => f(b1, b2, b3) }
  }
 }
 
 
 trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
-  def choices : List[Rule[In, Out, A, X]]
+  def choices: List[Rule[In, Out, A, X]]
 
-  def apply(in : In) = {
-    def oneOf(list : List[Rule[In, Out, A, X]]) : Result[Out, A, X] = list match {
+  def apply(in: In) = {
+    def oneOf(list: List[Rule[In, Out, A, X]]): Result[Out, A, X] = list match {
       case Nil => Failure
       case first :: rest => first(in) match {
         case Failure => oneOf(rest)
@@ -167,7 +167,7 @@ trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
     oneOf(choices)
   }
 
-  override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+  override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
     val factory = Choice.this.factory
     lazy val choices = Choice.this.choices ::: other :: Nil
   }
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index 7092620..dd17c46 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -14,8 +14,10 @@ package scala.tools.scalap
 package scalax
 package rules
 
+import language.postfixOps
+
 trait Name {
-  def name : String
+  def name: String
   override def toString = name
 }
 
@@ -26,13 +28,18 @@ trait Name {
   * Inspired by the Scala parser combinator.
   */
 trait Rules {
-  implicit def rule[In, Out, A, X](f : In => Result[Out, A, X]) : Rule[In, Out, A, X] = new DefaultRule(f)
 
-  implicit def inRule[In, Out, A, X](rule : Rule[In, Out, A, X]) : InRule[In, Out, A, X] = new InRule(rule)
-  implicit def seqRule[In, A, X](rule : Rule[In, In, A, X]) : SeqRule[In, A, X] = new SeqRule(rule)
+  import scala.language.implicitConversions
+  implicit def rule[In, Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X] = new DefaultRule(f)
+  implicit def inRule[In, Out, A, X](rule: Rule[In, Out, A, X]): InRule[In, Out, A, X] = new InRule(rule)
+  implicit def seqRule[In, A, X](rule: Rule[In, In, A, X]): SeqRule[In, A, X] = new SeqRule(rule)
+
+  trait FromRule[In] {
+    def apply[Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X]
+  }
 
-  def from[In] = new {
-    def apply[Out, A, X](f : In => Result[Out, A, X]) = rule(f)
+  def from[In] = new FromRule[In] {
+    def apply[Out, A, X](f: In => Result[Out, A, X]) = rule(f)
   }
 
   def state[s] = new StateRules {
@@ -40,30 +47,30 @@ trait Rules {
     val factory = Rules.this
   }
 
-  def success[Out, A](out : Out, a : A) = rule { in : Any => Success(out, a) }
+  def success[Out, A](out: Out, a: A) = rule { in: Any => Success(out, a) }
 
-  def failure = rule { in : Any => Failure }
+  def failure = rule { in: Any => Failure }
 
-  def error[In] = rule { in : In => Error(in) }
-  def error[X](err : X) = rule { in : Any => Error(err) }
+  def error[In] = rule { in: In => Error(in) }
+  def error[X](err: X) = rule { in: Any => Error(err) }
 
-  def oneOf[In, Out, A, X](rules : Rule[In, Out, A, X] *) : Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
+  def oneOf[In, Out, A, X](rules: Rule[In, Out, A, X] *): Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
     val factory = Rules.this
     val choices = rules.toList
   }
 
-  def ruleWithName[In, Out, A, X](_name : String, f : In => Result[Out, A, X]) : Rule[In, Out, A, X] with Name =
+  def ruleWithName[In, Out, A, X](_name: String, f: In => Result[Out, A, X]): Rule[In, Out, A, X] with Name =
     new DefaultRule(f) with Name {
       val name = _name
     }
 
-  class DefaultRule[In, Out, A, X](f : In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
+  class DefaultRule[In, Out, A, X](f: In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
     val factory = Rules.this
-    def apply(in : In) = f(in)
+    def apply(in: In) = f(in)
   }
 
  /** Converts a rule into a function that throws an Exception on failure. */
-  def expect[In, Out, A, Any](rule : Rule[In, Out, A, Any]) : In => A = (in) => rule(in) match {
+  def expect[In, Out, A, Any](rule: Rule[In, Out, A, Any]): In => A = (in) => rule(in) match {
     case Success(_, a) => a
     case Failure => throw new ScalaSigParserError("Unexpected failure")
     case Error(x) => throw new ScalaSigParserError("Unexpected error: " + x)
@@ -82,30 +89,30 @@ trait StateRules {
   type S
   type Rule[+A, +X] = rules.Rule[S, S, A, X]
 
-  val factory : Rules
+  val factory: Rules
   import factory._
 
-  def apply[A, X](f : S => Result[S, A, X]) = rule(f)
+  def apply[A, X](f: S => Result[S, A, X]) = rule(f)
 
-  def unit[A](a : => A) = apply { s => Success(s, a) }
-  def read[A](f : S => A) = apply { s => Success(s, f(s)) }
+  def unit[A](a: => A) = apply { s => Success(s, a) }
+  def read[A](f: S => A) = apply { s => Success(s, f(s)) }
 
   def get = apply { s => Success(s, s) }
-  def set(s : => S) = apply { oldS => Success(s, oldS) }
+  def set(s: => S) = apply { oldS => Success(s, oldS) }
 
-  def update(f : S => S) = apply { s => Success(s, f(s)) }
+  def update(f: S => S) = apply { s => Success(s, f(s)) }
 
   def nil = unit(Nil)
   def none = unit(None)
 
   /** Create a rule that identities if f(in) is true. */
-  def cond(f : S => Boolean) = get filter f
+  def cond(f: S => Boolean) = get filter f
 
   /** Create a rule that succeeds if all of the given rules succeed.
       @param rules the rules to apply in sequence.
   */
-  def allOf[A, X](rules : Seq[Rule[A, X]]) = {
-    def rep(in : S, rules : List[Rule[A, X]], results : List[A]) : Result[S, List[A], X] = {
+  def allOf[A, X](rules: Seq[Rule[A, X]]) = {
+    def rep(in: S, rules: List[Rule[A, X]], results: List[A]): Result[S, List[A], X] = {
       rules match {
         case Nil => Success(in, results.reverse)
         case rule::tl => rule(in) match {
@@ -115,19 +122,19 @@ trait StateRules {
         }
       }
     }
-    in : S => rep(in, rules.toList, Nil)
+    in: S => rep(in, rules.toList, Nil)
   }
 
 
   /** Create a rule that succeeds with a list of all the provided rules that succeed.
       @param rules the rules to apply in sequence.
   */
-  def anyOf[A, X](rules : Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
+  def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
 
   /** Repeatedly apply a rule from initial value until finished condition is met. */
-  def repeatUntil[T, X](rule : Rule[T => T, X])(finished : T => Boolean)(initial : T) = apply {
+  def repeatUntil[T, X](rule: Rule[T => T, X])(finished: T => Boolean)(initial: T) = apply {
     // more compact using HoF but written this way so it's tail-recursive
-    def rep(in : S, t : T) : Result[S, T, X] = {
+    def rep(in: S, t: T): Result[S, T, X] = {
       if (finished(t)) Success(in, t)
       else rule(in) match {
         case Success(out, f) => rep(out, f(t)) // SI-5189 f.asInstanceOf[T => T]
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 51a789e..f3c0235 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -14,37 +14,39 @@ package scala.tools.scalap
 package scalax
 package rules
 
+import language.postfixOps
+
 /**
  * A workaround for the difficulties of dealing with
  * a contravariant 'In' parameter type...
  */
-class InRule[In, +Out, +A, +X](rule : Rule[In, Out, A, X]) {
+class InRule[In, +Out, +A, +X](rule: Rule[In, Out, A, X]) {
 
-  def mapRule[Out2, B, Y](f : Result[Out, A, X] => In => Result[Out2, B, Y]) : Rule[In, Out2, B, Y] = rule.factory.rule {
-    in : In => f(rule(in))(in)
+  def mapRule[Out2, B, Y](f: Result[Out, A, X] => In => Result[Out2, B, Y]): Rule[In, Out2, B, Y] = rule.factory.rule {
+    in: In => f(rule(in))(in)
   }
 
   /** Creates a rule that succeeds only if the original rule would fail on the given context. */
   def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
-    case Success(_, _) => in : In => Failure
-    case _ => in : In => Success(in, ())
+    case Success(_, _) => in: In => Failure
+    case _ => in: In => Success(in, ())
   }
 
   /** Creates a rule that succeeds if the original rule succeeds, but returns the original input. */
   def & : Rule[In, In, A, X] = mapRule {
-    case Success(_, a) => in : In => Success(in, a)
-    case Failure => in : In => Failure
-    case Error(x) => in : In => Error(x)
+    case Success(_, a) => in: In => Success(in, a)
+    case Failure => in: In => Failure
+    case Error(x) => in: In => Error(x)
   }
 }
 
-class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
+class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) {
   import rule.factory._
 
   def ? = rule mapRule {
-    case Success(out, a) => in : S => Success(out, Some(a))
-    case Failure => in : S => Success(in, None)
-    case Error(x) => in : S => Error(x)
+    case Success(out, a) => in: S => Success(out, Some(a))
+    case Failure => in: S => Success(in, None)
+    case Error(x) => in: S => Error(x)
   }
 
   /** Creates a rule that always succeeds with a Boolean value.
@@ -53,38 +55,38 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
 
   def * = from[S] {
     // tail-recursive function with reverse list accumulator
-    def rep(in : S, acc : List[A]) : Result[S, List[A], X] = rule(in) match {
+    def rep(in: S, acc: List[A]): Result[S, List[A], X] = rule(in) match {
        case Success(out, a) => rep(out, a :: acc)
        case Failure => Success(in, acc.reverse)
-       case err : Error[_] => err
+       case err: Error[_] => err
     }
     in => rep(in, Nil)
   }
 
   def + = rule ~++ *
 
-  def ~>?[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+  def ~>?[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
 
-  def ~>*[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+  def ~>*[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
 
-  def ~*~[B >: A, X2 >: X](join : => Rule[S, S, (B, B) => B, X2]) = {
-    this ~>* (for (f <- join; a <- rule) yield f(_ : B, a))
+  def ~*~[B >: A, X2 >: X](join: => Rule[S, S, (B, B) => B, X2]) = {
+    this ~>* (for (f <- join; a <- rule) yield f(_: B, a))
   }
 
   /** Repeats this rule one or more times with a separator (which is discarded) */
-  def +/[X2 >: X](sep : => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
+  def +/[X2 >: X](sep: => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
 
   /** Repeats this rule zero or more times with a separator (which is discarded) */
-  def */[X2 >: X](sep : => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
+  def */[X2 >: X](sep: => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
 
-  def *~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
-  def +~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
+  def *~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
+  def +~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
 
   /** Repeats this rule num times */
-  def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
+  def times(num: Int): Rule[S, S, Seq[A], X] = from[S] {
     val result = new scala.collection.mutable.ArraySeq[A](num)
     // more compact using HoF but written this way so it's tail-recursive
-    def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
+    def rep(i: Int, in: S): Result[S, Seq[A], X] = {
       if (i == num) Success(in, result)
       else rule(in) match {
        case Success(out, a) => {
@@ -92,7 +94,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
          rep(i + 1, out)
        }
        case Failure => Failure
-       case err : Error[_] => err
+       case err: Error[_] => err
       }
     }
     in => rep(0, in)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
deleted file mode 100644
index b1cc18f..0000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.tools.scalap
-package scalax
-
-package object rules {
-  implicit lazy val higherKinds         = scala.language.higherKinds
-  implicit lazy val postfixOps          = scala.language.postfixOps
-  implicit lazy val implicitConversions = scala.language.implicitConversions
-  implicit lazy val reflectiveCalls     = scala.language.reflectiveCalls
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 1a4b345..cfd7500 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -3,16 +3,14 @@ package scalax
 package rules
 package scalasig
 
+import language.postfixOps
 
 import java.io.IOException
 
-import scala._
-import scala.Predef._
-
 object ByteCode {
-  def apply(bytes : Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
+  def apply(bytes: Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
 
-  def forClass(clazz : Class[_]) = {
+  def forClass(clazz: Class[_]) = {
     val name = clazz.getName
     val subPath = name.substring(name.lastIndexOf('.') + 1) + ".class"
     val in = clazz.getResourceAsStream(subPath)
@@ -35,17 +33,17 @@ object ByteCode {
 
 /** Represents a chunk of raw bytecode.  Used as input for the parsers
  */
-class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
+class ByteCode(val bytes: Array[Byte], val pos: Int, val length: Int) {
 
   assert(pos >= 0 && length >= 0 && pos + length <= bytes.length)
 
   def nextByte = if (length == 0) Failure else Success(drop(1), bytes(pos))
-  def next(n : Int) = if (length >= n) Success(drop(n), take(n)) else Failure
+  def next(n: Int) = if (length >= n) Success(drop(n), take(n)) else Failure
 
-  def take(n : Int) = new ByteCode(bytes, pos, n)
-  def drop(n : Int) = new ByteCode(bytes, pos + n, length - n)
+  def take(n: Int) = new ByteCode(bytes, pos, n)
+  def drop(n: Int) = new ByteCode(bytes, pos + n, length - n)
 
-  def fold[X](x : X)(f : (X, Byte) => X) : X = {
+  def fold[X](x: X)(f: (X, Byte) => X): X = {
     var result = x
     var i = pos
     while (i < pos + length) {
@@ -72,7 +70,7 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
     StringBytesPair(str, chunk)
   }
 
-  def byte(i : Int) = bytes(pos) & 0xFF
+  def byte(i: Int) = bytes(pos) & 0xFF
 }
 
 /**
@@ -92,11 +90,11 @@ trait ByteCodeReader extends RulesWithState {
   val u2 = bytes(2) ^^ (_ toInt)
   val u4 = bytes(4) ^^ (_ toInt) // should map to Long??
 
-  def bytes(n : Int) = apply(_ next n)
+  def bytes(n: Int) = apply(_ next n)
 }
 
 object ClassFileParser extends ByteCodeReader {
-  def parse(byteCode : ByteCode) = expect(classFile)(byteCode)
+  def parse(byteCode: ByteCode) = expect(classFile)(byteCode)
   def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
 
   val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
@@ -169,19 +167,19 @@ object ClassFileParser extends ByteCodeReader {
   val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ ClassFile
 
   // TODO create a useful object, not just a string
-  def memberRef(description : String) = u2 ~ u2 ^^ add1 {
+  def memberRef(description: String) = u2 ~ u2 ^^ add1 {
     case classRef ~ nameAndTypeRef => pool => description + ": " + pool(classRef) + ", " + pool(nameAndTypeRef)
   }
 
-  def add1[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw)
-  def add2[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw) add { pool => "<empty>" }
+  def add1[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw)
+  def add2[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw) add { pool => "<empty>" }
 }
 
 case class ClassFile(
-    header : ClassFileHeader,
-    fields : Seq[Field],
-    methods : Seq[Method],
-    attributes : Seq[Attribute]) {
+    header: ClassFileHeader,
+    fields: Seq[Field],
+    methods: Seq[Method],
+    attributes: Seq[Attribute]) {
 
   def majorVersion = header.major
   def minorVersion = header.minor
@@ -190,14 +188,14 @@ case class ClassFile(
   def superClass = constant(header.superClassIndex)
   def interfaces = header.interfaces.map(constant)
 
-  def constant(index : Int) = header.constants(index) match {
+  def constant(index: Int) = header.constants(index) match {
     case StringBytesPair(str, _) => str
     case z => z
   }
 
   def constantWrapped(index: Int) = header.constants(index)
 
-  def attribute(name : String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
+  def attribute(name: String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
 
   val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
   def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
@@ -206,23 +204,23 @@ case class ClassFile(
   def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
 }
 
-case class Attribute(nameIndex : Int, byteCode : ByteCode)
-case class Field(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
-case class Method(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
+case class Attribute(nameIndex: Int, byteCode: ByteCode)
+case class Field(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
+case class Method(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
 
 case class ClassFileHeader(
-    minor : Int,
-    major : Int,
-    constants : ConstantPool,
-    flags : Int,
-    classIndex : Int,
-    superClassIndex : Int,
-    interfaces : Seq[Int]) {
-
-  def constant(index : Int) = constants(index)
+    minor: Int,
+    major: Int,
+    constants: ConstantPool,
+    flags: Int,
+    classIndex: Int,
+    superClassIndex: Int,
+    interfaces: Seq[Int]) {
+
+  def constant(index: Int) = constants(index)
 }
 
-case class ConstantPool(len : Int) {
+case class ConstantPool(len: Int) {
   val size = len - 1
 
   private val buffer = new scala.collection.mutable.ArrayBuffer[ConstantPool => Any]
@@ -230,7 +228,7 @@ case class ConstantPool(len : Int) {
 
   def isFull = buffer.length >= size
 
-  def apply(index : Int) = {
+  def apply(index: Int) = {
     // Note constant pool indices are 1-based
     val i = index - 1
     values(i) getOrElse {
@@ -241,7 +239,7 @@ case class ConstantPool(len : Int) {
     }
   }
 
-  def add(f : ConstantPool => Any) = {
+  def add(f: ConstantPool => Any) = {
     buffer += f
     this
   }
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala
index 218639e..050317c 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala
@@ -4,7 +4,7 @@ package rules
 package scalasig
 
 trait Flags {
-  def hasFlag(flag : Long) : Boolean
+  def hasFlag(flag: Long): Boolean
 
   def isImplicit = hasFlag(0x00000001)
   def isFinal = hasFlag(0x00000002)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index aa5acbb..e307632 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -11,6 +11,9 @@ package scalax
 package rules
 package scalasig
 
+import scala.language.postfixOps
+import scala.language.implicitConversions
+
 import ClassFileParser.{ ConstValueIndex, Annotation }
 import scala.reflect.internal.pickling.ByteCodecs
 
@@ -31,7 +34,7 @@ object ScalaSigParser {
     }
   }
 
-  def scalaSigFromAttribute(classFile: ClassFile) : Option[ScalaSig] =
+  def scalaSigFromAttribute(classFile: ClassFile): Option[ScalaSig] =
     classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
 
   def parse(classFile: ClassFile): Option[ScalaSig] = {
@@ -45,7 +48,7 @@ object ScalaSigParser {
     }
   }
 
-  def parse(clazz : Class[_]): Option[ScalaSig] = {
+  def parse(clazz: Class[_]): Option[ScalaSig] = {
     val byteCode  = ByteCode.forClass(clazz)
     val classFile = ClassFileParser.parse(byteCode)
 
@@ -54,10 +57,10 @@ object ScalaSigParser {
 }
 
 object ScalaSigAttributeParsers extends ByteCodeReader  {
-  def parse(byteCode : ByteCode) = expect(scalaSig)(byteCode)
+  def parse(byteCode: ByteCode) = expect(scalaSig)(byteCode)
 
   val nat = apply {
-    def natN(in : ByteCode, x : Int) : Result[ByteCode, Int, Nothing] = in.nextByte match {
+    def natN(in: ByteCode, x: Int): Result[ByteCode, Int, Nothing] = in.nextByte match {
       case Success(out, b) => {
         val y = (x << 7) + (b & 0x7f)
         if ((b & 0x80) == 0) Success(out, y) else natN(out, y)
@@ -76,33 +79,33 @@ object ScalaSigAttributeParsers extends ByteCodeReader  {
   val longValue = read(_ toLong)
 }
 
-case class ScalaSig(majorVersion : Int, minorVersion : Int, table : Seq[Int ~ ByteCode]) extends DefaultMemoisable {
+case class ScalaSig(majorVersion: Int, minorVersion: Int, table: Seq[Int ~ ByteCode]) extends DefaultMemoisable {
 
-  case class Entry(index : Int, entryType : Int, byteCode : ByteCode) extends DefaultMemoisable {
+  case class Entry(index: Int, entryType: Int, byteCode: ByteCode) extends DefaultMemoisable {
     def scalaSig = ScalaSig.this
 
-    def setByteCode(byteCode : ByteCode) = Entry(index, entryType, byteCode)
+    def setByteCode(byteCode: ByteCode) = Entry(index, entryType, byteCode)
   }
 
-  def hasEntry(index : Int) = table isDefinedAt index
+  def hasEntry(index: Int) = table isDefinedAt index
 
-  def getEntry(index : Int) = {
+  def getEntry(index: Int) = {
     val entryType ~ byteCode = table(index)
     Entry(index, entryType, byteCode)
   }
 
-  def parseEntry(index : Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
+  def parseEntry(index: Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
 
-  implicit def applyRule[A](parser : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
+  implicit def applyRule[A](parser: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
 
   override def toString = "ScalaSig version " + majorVersion + "." + minorVersion + {
     for (i <- 0 until table.size) yield i + ":\t" + parseEntry(i) // + "\n\t" + getEntry(i)
   }.mkString("\n", "\n", "")
 
-  lazy val symbols : Seq[Symbol] = ScalaSigParsers.symbols
+  lazy val symbols: Seq[Symbol] = ScalaSigParsers.symbols
 
-  lazy val topLevelClasses : List[ClassSymbol] = ScalaSigParsers.topLevelClasses
-  lazy val topLevelObjects : List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
+  lazy val topLevelClasses: List[ClassSymbol] = ScalaSigParsers.topLevelClasses
+  lazy val topLevelObjects: List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
 }
 
 object ScalaSigParsers extends RulesWithState with MemoisableRules {
@@ -112,14 +115,14 @@ object ScalaSigParsers extends RulesWithState with MemoisableRules {
   val symTab = read(_.table)
   val size = symTab ^^ (_.size)
 
-  def entry(index : Int) = memo(("entry", index)) {
+  def entry(index: Int) = memo(("entry", index)) {
     cond(_ hasEntry index) -~ read(_ getEntry index) >-> { entry => Success(entry, entry.entryType) }
   }
 
-  def parseEntry[A](parser : ScalaSigEntryParsers.EntryParser[A])(index : Int) : Parser[A] =
+  def parseEntry[A](parser: ScalaSigEntryParsers.EntryParser[A])(index: Int): Parser[A] =
     entry(index) -~ parser >> { a => entry => Success(entry.scalaSig, a) }
 
-  def allEntries[A](f : ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
+  def allEntries[A](f: ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
 
   lazy val entries = allEntries(ScalaSigEntryParsers.entry) as "entries"
   lazy val symbols = allEntries(ScalaSigEntryParsers.symbol) as "symbols"
@@ -136,20 +139,20 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
   type S = ScalaSig#Entry
   type EntryParser[A] = Rule[A, String]
 
-  implicit def byteCodeEntryParser[A](rule : ScalaSigAttributeParsers.Parser[A]) : EntryParser[A] = apply { entry =>
+  implicit def byteCodeEntryParser[A](rule: ScalaSigAttributeParsers.Parser[A]): EntryParser[A] = apply { entry =>
     rule(entry.byteCode) mapOut (entry setByteCode _)
   }
 
-  def toEntry[A](index : Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
+  def toEntry[A](index: Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
 
-  def parseEntry[A](parser : EntryParser[A])(index : Int) = (toEntry(index) -~ parser)
+  def parseEntry[A](parser: EntryParser[A])(index: Int) = (toEntry(index) -~ parser)
 
-  implicit def entryType(code : Int) = key filter (_ == code)
+  implicit def entryType(code: Int) = key filter (_ == code)
 
   val index = read(_.index)
   val key = read(_.entryType)
 
-  lazy val entry : EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
+  lazy val entry: EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
 
   val ref = byteCodeEntryParser(nat)
 
@@ -158,7 +161,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
 
   val name = termName | typeName as "name"
 
-  def refTo[A](rule : EntryParser[A]) : EntryParser[A] = ref >>& parseEntry(rule)
+  def refTo[A](rule: EntryParser[A]): EntryParser[A] = ref >>& parseEntry(rule)
 
   lazy val nameRef = refTo(name)
   lazy val symbolRef = refTo(symbol)
@@ -167,57 +170,10 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
 
   val symbolInfo = nameRef ~ symbolRef ~ nat ~ (symbolRef?) ~ ref ~ get ^~~~~~^ SymbolInfo
 
-  def symHeader(key: Int) = (key -~ none | (key + 64) -~ nat)
+  def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat)
 
-  def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo
+  def symbolEntry(key: Int) = symHeader(key) -~ symbolInfo
 
-  /***************************************************
-   * Symbol table attribute format:
-   *   Symtab         = nentries_Nat {Entry}
-   *   Entry          = 1 TERMNAME len_Nat NameInfo
-   *                  | 2 TYPENAME len_Nat NameInfo
-   *                  | 3 NONEsym len_Nat
-   *                  | 4 TYPEsym len_Nat SymbolInfo
-   *                  | 5 ALIASsym len_Nat SymbolInfo
-   *                  | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
-   *                  | 7 MODULEsym len_Nat SymbolInfo
-   *                  | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
-   *                  | 9 EXTref len_Nat name_Ref [owner_Ref]
-   *                  | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
-   *                  | 11 NOtpe len_Nat
-   *                  | 12 NOPREFIXtpe len_Nat
-   *                  | 13 THIStpe len_Nat sym_Ref
-   *                  | 14 SINGLEtpe len_Nat type_Ref sym_Ref
-   *                  | 15 CONSTANTtpe len_Nat constant_Ref
-   *                  | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
-   *                  | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
-   *                  | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
-   *                  | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
-   *                  | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
-   *                  | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
-   *                  | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
-   *                  | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
-   *                  | 24 LITERALunit len_Nat
-   *                  | 25 LITERALboolean len_Nat value_Long
-   *                  | 26 LITERALbyte len_Nat value_Long
-   *                  | 27 LITERALshort len_Nat value_Long
-   *                  | 28 LITERALchar len_Nat value_Long
-   *                  | 29 LITERALint len_Nat value_Long
-   *                  | 30 LITERALlong len_Nat value_Long
-   *                  | 31 LITERALfloat len_Nat value_Long
-   *                  | 32 LITERALdouble len_Nat value_Long
-   *                  | 33 LITERALstring len_Nat name_Ref
-   *                  | 34 LITERALnull len_Nat
-   *                  | 35 LITERALclass len_Nat tpe_Ref
-   *                  | 36 LITERALenum len_Nat sym_Ref
-   *                  | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
-   *                  | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
-   *                  | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
-   *                  | 43 ANNOTINFO len_Nat AnnotInfoBody
-   *                  | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
-   *                  | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
-   *                  | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
-   */
   val noSymbol = 3 -^ NoSymbol
   val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
   val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias"
@@ -227,7 +183,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
   val extRef = 9 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extRef"
   val extModClassRef = 10 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extModClassRef"
 
-  lazy val symbol : EntryParser[Symbol] = oneOf(
+  lazy val symbol: EntryParser[Symbol] = oneOf(
       noSymbol,
       typeSymbol,
       aliasSymbol,
@@ -242,7 +198,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
   val typeLevel = nat
   val typeIndex = nat
 
-  lazy val typeEntry : EntryParser[Type] = oneOf(
+  lazy val typeEntry: EntryParser[Type] = oneOf(
       11 -^ NoType,
       12 -^ NoPrefixType,
       13 -~ symbolRef ^^ ThisType,
@@ -260,10 +216,9 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
       22 -~ typeRef ~ (symbolRef*) ^~^ MethodType,
       42 -~ typeRef ~ (attribTreeRef*) ^~^ AnnotatedType,
       51 -~ typeRef ~ symbolRef ~ (attribTreeRef*) ^~~^ AnnotatedWithSelfType,
-      47 -~ typeLevel ~ typeIndex ^~^ DeBruijnIndexType,
       48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type"
 
-  lazy val literal = oneOf(
+  lazy val literal: EntryParser[Any] = oneOf(
       24 -^ (()),
       25 -~ longValue ^^ (_ != 0L),
       26 -~ longValue ^^ (_.toByte),
@@ -284,17 +239,17 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
   lazy val topLevelClass = classSymbol filter isTopLevelClass
   lazy val topLevelObject = objectSymbol filter isTopLevel
 
-  def isTopLevel(symbol : Symbol) = symbol.parent match {
-    case Some(ext : ExternalSymbol) => true
+  def isTopLevel(symbol: Symbol) = symbol.parent match {
+    case Some(ext: ExternalSymbol) => true
     case _ => false
   }
-  def isTopLevelClass (symbol : Symbol) = !symbol.isModule && isTopLevel(symbol)
+  def isTopLevelClass (symbol: Symbol) = !symbol.isModule && isTopLevel(symbol)
 }
 
-  case class AttributeInfo(symbol : Symbol, typeRef : Type, value : Option[Any], values : Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
-  case class Children(symbolRefs : Seq[Int]) //sym_Ref {sym_Ref}
+case class AttributeInfo(symbol: Symbol, typeRef: Type, value: Option[Any], values: Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
+case class Children(symbolRefs: Seq[Int]) //sym_Ref {sym_Ref}
 
-  case class AnnotInfo(refs : Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
+case class AnnotInfo(refs: Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
 
   /***************************************************
    *                  | 49 TREE len_Nat 1 EMPTYtree
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index cfe615a..dd17c39 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -11,6 +11,8 @@ package scalax
 package rules
 package scalasig
 
+import language.postfixOps
+
 import java.io.{PrintStream, ByteArrayOutputStream}
 import java.util.regex.Pattern
 import scala.tools.scalap.scalax.util.StringUtil
@@ -70,7 +72,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
   }
 
   def isCaseClassObject(o: ObjectSymbol): Boolean = {
-    val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+    val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
     o.isFinal && (classSymbol.children.find(x => x.isCase && x.isInstanceOf[MethodSymbol]) match {
       case Some(_) => true
       case None => false
@@ -136,7 +138,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
       print(" {")
       //Print class selftype
       c.selfType match {
-        case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
+        case Some(t: Type) => print("\n"); print(" this: " + toString(t) + " =>")
         case None =>
       }
       print("\n")
@@ -167,7 +169,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
     print("object ")
     val poName = o.symbolInfo.owner.name
     print(processName(poName))
-    val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+    val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
     printType(classSymbol)
     print(" {\n")
     printChildren(level, classSymbol)
@@ -179,29 +181,19 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
     printModifiers(o)
     print("object ")
     print(processName(o.name))
-    val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+    val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
     printType(classSymbol)
     print(" {\n")
     printChildren(level, classSymbol)
     printWithIndent(level, "}\n")
   }
 
-  def genParamNames(t: {def paramTypes: Seq[Type]}): List[String] = t.paramTypes.toList.map(x => {
-    var str = toString(x)
-    val j = str.indexOf("[")
-    if (j > 0) str = str.substring(0, j)
-    str = StringUtil.trimStart(str, "=> ")
-    var i = str.lastIndexOf(".")
-    val res = if (i > 0) str.substring(i + 1) else str
-    if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
-  })
-
   def printMethodType(t: Type, printResult: Boolean)(cont: => Unit): Unit = {
 
-    def _pmt(mt: Type {def resultType: Type; def paramSymbols: Seq[Symbol]}) = {
+    def _pmt(mt: MethodType) = {
 
       val paramEntries = mt.paramSymbols.map({
-        case ms: MethodSymbol => ms.name + " : " + toString(ms.infoType)(TypeFlags(true))
+        case ms: MethodSymbol => ms.name + ": " + toString(ms.infoType)(TypeFlags(true))
         case _ => "^___^"
       })
       val implicitWord = mt.paramSymbols.headOption match {
@@ -216,21 +208,21 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
       mt.resultType match {
         case mt: MethodType => printMethodType(mt, printResult)({})
         case x => if (printResult) {
-          print(" : ");
+          print(": ");
           printType(x)
         }
       }
     }
 
     t match {
-      case NullaryMethodType(resType) => if (printResult) { print(" : "); printType(resType) }
+      case NullaryMethodType(resType) => if (printResult) { print(": "); printType(resType) }
       case mt at MethodType(resType, paramSymbols) => _pmt(mt)
       case pt at PolyType(mt, typeParams) => {
         print(typeParamString(typeParams))
         printMethodType(mt, printResult)({})
       }
       //todo consider another method types
-      case x => print(" : "); printType(x)
+      case x => print(": "); printType(x)
     }
 
     // Print rest of the symbol output
@@ -381,7 +373,6 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
         toString(typeRef, sep)
       }
       case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
-      //case DeBruijnIndexType(typeLevel, typeIndex) =>
       case ExistentialType(typeRef, symbols) => {
         val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _)
         toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
index dee1cf8..6c38687 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
@@ -6,39 +6,39 @@ package scalasig
 import ScalaSigEntryParsers._
 
 trait Symbol extends Flags {
-  def name : String
-  def parent : Option[Symbol]
-  def children : Seq[Symbol]
+  def name: String
+  def parent: Option[Symbol]
+  def children: Seq[Symbol]
 
-  def path : String = parent.map(_.path + ".").getOrElse("") + name
+  def path: String = parent.map(_.path + ".").getOrElse("") + name
 }
 
 case object NoSymbol extends Symbol {
   def name = "<no symbol>"
   def parent = None
-  def hasFlag(flag : Long) = false
+  def hasFlag(flag: Long) = false
   def children = Nil
 }
 
 abstract class ScalaSigSymbol extends Symbol {
-  def applyRule[A](rule : EntryParser[A]) : A = expect(rule)(entry)
-  def applyScalaSigRule[A](rule : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
+  def applyRule[A](rule: EntryParser[A]): A = expect(rule)(entry)
+  def applyScalaSigRule[A](rule: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
 
-  def entry : ScalaSig#Entry
+  def entry: ScalaSig#Entry
   def index = entry.index
 
-  lazy val children : Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
-  lazy val attributes : Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
+  lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
+  lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
 }
 
-case class ExternalSymbol(name : String, parent : Option[Symbol], entry : ScalaSig#Entry) extends ScalaSigSymbol {
+case class ExternalSymbol(name: String, parent: Option[Symbol], entry: ScalaSig#Entry) extends ScalaSigSymbol {
   override def toString = path
-  def hasFlag(flag : Long) = false
+  def hasFlag(flag: Long) = false
 }
 
-case class SymbolInfo(name : String, owner : Symbol, flags : Int, privateWithin : Option[AnyRef], info : Int, entry : ScalaSig#Entry) {
-  def symbolString(any : AnyRef) = any match {
-    case sym : SymbolInfoSymbol => sym.index.toString
+case class SymbolInfo(name: String, owner: Symbol, flags: Int, privateWithin: Option[AnyRef], info: Int, entry: ScalaSig#Entry) {
+  def symbolString(any: AnyRef) = any match {
+    case sym: SymbolInfoSymbol => sym.index.toString
     case other => other.toString
   }
 
@@ -49,25 +49,25 @@ case class SymbolInfo(name : String, owner : Symbol, flags : Int, privateWithin
 }
 
 abstract class SymbolInfoSymbol extends ScalaSigSymbol {
-  def symbolInfo : SymbolInfo
+  def symbolInfo: SymbolInfo
 
   def entry = symbolInfo.entry
   def name = symbolInfo.name
   def parent = Some(symbolInfo.owner)
-  def hasFlag(flag : Long) = (symbolInfo.flags & flag) != 0L
+  def hasFlag(flag: Long) = (symbolInfo.flags & flag) != 0L
 
   lazy val infoType = applyRule(parseEntry(typeEntry)(symbolInfo.info))
 }
 
-case class TypeSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
+case class TypeSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
   override def path = name
 }
 
-case class AliasSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
+case class AliasSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
   override def path = name
 }
-case class ClassSymbol(symbolInfo : SymbolInfo, thisTypeRef : Option[Int]) extends SymbolInfoSymbol {
+case class ClassSymbol(symbolInfo: SymbolInfo, thisTypeRef: Option[Int]) extends SymbolInfoSymbol {
   lazy val selfType = thisTypeRef.map{(x: Int) => applyRule(parseEntry(typeEntry)(x))}
 }
-case class ObjectSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol
-case class MethodSymbol(symbolInfo : SymbolInfo, aliasRef : Option[Int]) extends SymbolInfoSymbol
+case class ObjectSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol
+case class MethodSymbol(symbolInfo: SymbolInfo, aliasRef: Option[Int]) extends SymbolInfoSymbol
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
index 543ddbe..22d9032 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
@@ -8,19 +8,18 @@ abstract class Type
 case object NoType extends Type
 case object NoPrefixType extends Type
 
-case class ThisType(symbol : Symbol) extends Type
-case class SingleType(typeRef : Type, symbol : Symbol) extends Type
-case class ConstantType(constant : Any) extends Type
-case class TypeRefType(prefix : Type, symbol : Symbol, typeArgs : Seq[Type]) extends Type
-case class TypeBoundsType(lower : Type, upper : Type) extends Type
-case class RefinedType(classSym : Symbol, typeRefs : List[Type]) extends Type
-case class ClassInfoType(symbol : Symbol, typeRefs : Seq[Type]) extends Type
-case class ClassInfoTypeWithCons(symbol : Symbol, typeRefs : Seq[Type], cons: String) extends Type
-case class MethodType(resultType : Type, paramSymbols : Seq[Symbol]) extends Type
-case class NullaryMethodType(resultType : Type) extends Type
-case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
-case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
-case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type
-case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type
-case class DeBruijnIndexType(typeLevel : Int, typeIndex : Int) extends Type
-case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type
+case class ThisType(symbol: Symbol) extends Type
+case class SingleType(typeRef: Type, symbol: Symbol) extends Type
+case class ConstantType(constant: Any) extends Type
+case class TypeRefType(prefix: Type, symbol: Symbol, typeArgs: Seq[Type]) extends Type
+case class TypeBoundsType(lower: Type, upper: Type) extends Type
+case class RefinedType(classSym: Symbol, typeRefs: List[Type]) extends Type
+case class ClassInfoType(symbol: Symbol, typeRefs: Seq[Type]) extends Type
+case class ClassInfoTypeWithCons(symbol: Symbol, typeRefs: Seq[Type], cons: String) extends Type
+case class MethodType(resultType: Type, paramSymbols: Seq[Symbol]) extends Type
+case class NullaryMethodType(resultType: Type) extends Type
+case class PolyType(typeRef: Type, symbols: Seq[TypeSymbol]) extends Type
+case class PolyTypeWithCons(typeRef: Type, symbols: Seq[TypeSymbol], cons: String) extends Type
+case class AnnotatedType(typeRef: Type, attribTreeRefs: List[Int]) extends Type
+case class AnnotatedWithSelfType(typeRef: Type, symbol: Symbol, attribTreeRefs: List[Int]) extends Type
+case class ExistentialType(typeRef: Type, symbols: Seq[Symbol]) extends Type
diff --git a/src/swing/doc/README b/src/swing/doc/README
deleted file mode 100644
index cdfee01..0000000
--- a/src/swing/doc/README
+++ /dev/null
@@ -1,39 +0,0 @@
-scala.swing BETA
-
-This is a UI library that will wrap most of Java Swing for Scala in a straightforward manner. 
-The widget class hierarchy loosely resembles that of Java Swing. The main differences are:
-
-  In Java Swing all components are containers per default. This doesn't make much sense for 
-  a number of components, like TextField, CheckBox, RadioButton, and so on. Our guess is that 
-  this architecture was chosen because Java lacks multiple inheritance. 
-  In scala.swing, components that can have child components extend the Container trait.
-  
-  Layout managers and panels are coupled. There is no way to exchange the layout manager 
-  of a panel. As a result, the layout constraints for widgets can be typed. 
-  (Note that you gain more type-safety and don't loose much flexibility here. Besides 
-  being not a common operation, exchanging the layout manager of a panel in Java 
-  Swing almost always leads to exchanging the layout constraints for every of the panel's 
-  child component. In the end, it is not more work to move all children to a newly created 
-  panel.)
-   
-  The event system. TODO
-  
-
-The library comprises three main packages:
-
-  scala.swing
-    All widget classes and traits.
-    
-  scala.swing.event
-    The event hierarchy.
-
-  scala.swing.test
-    A set of demos.
-    
-
-Notes:
-
-Visual appearance of combo boxes using the GTK LaF is broken on JDKs < 1.7b30. 
-This is a Java Swing problem.
-
-To download the latest version, go to http://lamp.epfl.ch/~imaier or use sbaz.
diff --git a/src/swing/doc/build.xml b/src/swing/doc/build.xml
deleted file mode 100644
index 20e1411..0000000
--- a/src/swing/doc/build.xml
+++ /dev/null
@@ -1,83 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="swing" default="build">
-
-  <property name="scala.home" value="${user.home}/apps/scala-recent"/>
-
-  <property file="swing/swing.version.properties"/>
-  <property name="version" value="${version.major}.${version.minor}"/>
-	
-  <taskdef
-      resource="scala/tools/ant/antlib.xml">
-    <classpath>
-      <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
-      <pathelement location="${scala.home}/lib/scala-library.jar"/>
-      <pathelement location="${scala.home}/lib/junit.jar"/>
-    </classpath>
-  </taskdef>
-
-
-  <target name="build">
-    <mkdir dir="build/build.main"/>
-    <scalac srcdir="swing"
-            destdir="build/build.main">
-      <classpath>
-        <pathelement location="${scala.home}/lib/scala-library.jar"/>
-        <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
-      </classpath>
-      <include name="**/*.scala"/>
-    </scalac>
-
-    <jar destfile="build/scala-swing.jar">
-    	<fileset dir="build/build.main"/>
-    	<fileset file="swing.version.properties"/>
-    </jar>
-    <jar destfile="build/scala-swing-src.jar"
-      basedir="swing"
-      includes="**/*.scala"
-    />
-  </target>
-	
-  <!--<target name="build.tests" depends="build">
-    <mkdir dir="build/build.tests"/>
-    <scalac srcdir="tests" destdir="build/build.tests">
-        <classpath>
-          <pathelement location="${scala.home}/lib/scala-library.jar"/>
-          <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
-          <pathelement location="${scala.home}/lib/junit.jar"/>
-          <pathelement location="build/swing.jar"/>
-        </classpath>
-    </scalac>
-  	
-    <jar destfile="build/swing-tests.jar" basedir="build/build.tests"/>
-  </target>-->
-
-  <target name="dist" depends="build">
-    <mkdir dir="build"/>
-    <sbaz file="build/scala-swing-${version}.sbp"
-          adfile="build/scala-swing-${version}.advert"
-          name="scala-swing"
-          version="${version}"
-	  depends="scala"
-          desc="A Scala Swing library"
-	  link="http://scala.epfl.ch/downloads/packages/scala-swing-${version}.sbp">
-      <libset dir="build" includes="scala-swing.jar"/>
-      <libset dir="build" includes="scala-swing-tests.jar"/>
-      <srcset dir="build" includes="scala-swing-src.jar"/>
-      <docset dir="swing/doc" includes="Manual.txt"/>
-    </sbaz>
-  </target>
-
-  <target name="install" depends="dist">
-    <exec executable="sbaz">
-      <arg line="-v install -f build/scala-swing-${version}.sbp"/>
-    </exec>
-  </target>
-
-  <target name="clean">
-    <delete dir="build"
-            includeemptydirs="yes"
-            quiet="yes"
-            failonerror="no"/>
-  </target>
-</project>
diff --git a/src/swing/scala/swing/AbstractButton.scala b/src/swing/scala/swing/AbstractButton.scala
deleted file mode 100644
index fd84d6f..0000000
--- a/src/swing/scala/swing/AbstractButton.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing.{AbstractButton => JAbstractButton, Icon}
-
-/**
- * Base class of all button-like widgets, such as push buttons,
- * check boxes, and radio buttons.
- *
- * @see javax.swing.AbstractButton
- */
-abstract class AbstractButton extends Component with Action.Trigger.Wrapper with Publisher {
-  override lazy val peer: JAbstractButton = new JAbstractButton with SuperMixin {}
-
-  def text: String = peer.getText
-  def text_=(s: String) = peer.setText(s)
-
-  def icon: Icon = peer.getIcon
-  def icon_=(i: Icon) = peer.setIcon(i)
-  def pressedIcon: Icon = peer.getPressedIcon
-  def pressedIcon_=(i: Icon) = peer.setPressedIcon(i)
-  def selectedIcon: Icon = peer.getSelectedIcon
-  def selectedIcon_=(i: Icon) = peer.setSelectedIcon(i)
-  def disabledIcon: Icon = peer.getDisabledIcon
-  def disabledIcon_=(i: Icon) = peer.setDisabledIcon(i)
-  def disabledSelectedIcon: Icon = peer.getDisabledSelectedIcon
-  def disabledSelectedIcon_=(i: Icon) = peer.setDisabledSelectedIcon(i)
-  def rolloverIcon: Icon = peer.getRolloverIcon
-  def rolloverIcon_=(b: Icon) = peer.setRolloverIcon(b)
-  def rolloverSelectedIcon: Icon = peer.getRolloverSelectedIcon
-  def rolloverSelectedIcon_=(b: Icon) = peer.setRolloverSelectedIcon(b)
-
-  peer.addActionListener(Swing.ActionListener { e =>
-    publish(ButtonClicked(AbstractButton.this))
-  })
-
-  def selected: Boolean = peer.isSelected
-  def selected_=(b: Boolean) = peer.setSelected(b)
-
-  def contentAreaFilled: Boolean = peer.isContentAreaFilled
-  def contentAreaFilled_=(b: Boolean) { peer.setContentAreaFilled(b) }
-
-  def borderPainted: Boolean = peer.isBorderPainted
-  def borderPainted_=(b: Boolean) { peer.setBorderPainted(b) }
-  def focusPainted: Boolean = peer.isFocusPainted
-  def focusPainted_=(b: Boolean) { peer.setFocusPainted(b) }
-
-  def rolloverEnabled: Boolean = peer.isRolloverEnabled
-  def rolloverEnabled_=(b: Boolean) = peer.setRolloverEnabled(b)
-
-  def verticalTextPosition: Alignment.Value = Alignment(peer.getVerticalTextPosition)
-  def verticalTextPosition_=(a: Alignment.Value) { peer.setVerticalTextPosition(a.id) }
-  def verticalAlignment: Alignment.Value = Alignment(peer.getVerticalAlignment)
-  def verticalAlignment_=(a: Alignment.Value) { peer.setVerticalAlignment(a.id) }
-
-  def horizontalTextPosition: Alignment.Value = Alignment(peer.getHorizontalTextPosition)
-  def horizontalTextPosition_=(a: Alignment.Value) { peer.setHorizontalTextPosition(a.id) }
-  def horizontalAlignment: Alignment.Value = Alignment(peer.getHorizontalAlignment)
-  def horizontalAlignment_=(a: Alignment.Value) { peer.setHorizontalAlignment(a.id) }
-
-  def iconTextGap: Int = peer.getIconTextGap
-  def iconTextGap_=(x: Int) { peer.setIconTextGap(x) }
-
-  def mnemonic: Key.Value = Key(peer.getMnemonic)
-  def mnemonic_=(k: Key.Value) { peer.setMnemonic(k.id) }
-  def displayedMnemonicIndex: Int = peer.getDisplayedMnemonicIndex
-  def displayedMnemonicIndex_=(n: Int) { peer.setDisplayedMnemonicIndex(n) }
-
-  def multiClickThreshold: Long = peer.getMultiClickThreshhold
-  def multiClickThreshold_=(n: Long) { peer.setMultiClickThreshhold(n) }
-
-  def doClick() { peer.doClick() }
-  def doClick(times: Int) { peer.doClick(times) }
-
-  def margin: Insets = peer.getMargin
-  def margin_=(i: Insets) { peer.setMargin(i) }
-}
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
deleted file mode 100644
index 8740f63..0000000
--- a/src/swing/scala/swing/Action.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.{KeyStroke, Icon}
-import java.awt.event.ActionListener
-
-object Action {
-  /**
-   * Special action that has an empty title and all default properties and does nothing.
-   * Use this as a "null action", i.e., to tell components that they do not have any
-   * associated action. A component may then obtain its properties from its direct members
-   * instead of from its action.
-   * In Java Swing, one would use `null` instead of a designated action.
-   */
-  case object NoAction extends Action("") { def apply() {} }
-
-  object Trigger {
-    trait Wrapper extends Action.Trigger {
-      def peer: javax.swing.JComponent {
-        def addActionListener(a: ActionListener)
-        def removeActionListener(a: ActionListener)
-        def setAction(a: javax.swing.Action)
-        def getAction(): javax.swing.Action
-      }
-
-      // TODO: we need an action cache
-      private var _action: Action = Action.NoAction
-      def action: Action = _action
-      def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
-
-      //1.6: def hideActionText: Boolean = peer.getHideActionText
-      //def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
-    }
-  }
-
-  /**
-   * Something that triggers an action.
-   */
-  trait Trigger {
-    def action: Action
-    def action_=(a: Action)
-
-    //1.6: def hideActionText: Boolean
-    //def hideActionText_=(b: Boolean)
-  }
-
-  /**
-   * Convenience method to create an action with a given title and body to run.
-   */
-  def apply(title: String)(body: =>Unit) = new Action(title) {
-    def apply() { body }
-  }
-}
-
-/**
- * An abstract action to be performed in reaction to user input.
- *
- * Not every action component will honor every property of its action.
- * An action itself can generally be configured so that certain properties
- * should be ignored and instead taken from the component directly. In the
- * end, it is up to a component which property it uses in which way.
- *
- * @see javax.swing.Action
- */
-abstract class Action(title0: String) {
-  import Swing._
-
-  lazy val peer: javax.swing.Action = new javax.swing.AbstractAction(title0) {
-    def actionPerformed(a: java.awt.event.ActionEvent) = apply()
-  }
-
-  /**
-   * Title is not optional.
-   */
-  def title: String = ifNull(peer.getValue(javax.swing.Action.NAME),"")
-  def title_=(t: String) { peer.putValue(javax.swing.Action.NAME, t) }
-
-  /**
-   * None if large icon and small icon are not equal.
-   */
-  def icon: Icon = smallIcon //if(largeIcon == smallIcon) largeIcon else None
-  def icon_=(i: Icon) { /*largeIcon = i;*/ smallIcon = i }
-  // 1.6: def largeIcon: Icon = toNoIcon(peer.getValue(javax.swing.Action.LARGE_ICON_KEY).asInstanceOf[Icon])
-  // def largeIcon_=(i: Icon) { peer.putValue(javax.swing.Action.LARGE_ICON_KEY, toNullIcon(i)) }
-  def smallIcon: Icon = toNoIcon(peer.getValue(javax.swing.Action.SMALL_ICON).asInstanceOf[Icon])
-  def smallIcon_=(i: Icon) { peer.putValue(javax.swing.Action.SMALL_ICON, toNullIcon(i)) }
-
-  /**
-   * For all components.
-   */
-  def toolTip: String =
-    ifNull(peer.getValue(javax.swing.Action.SHORT_DESCRIPTION), "")
-  def toolTip_=(t: String) {
-    peer.putValue(javax.swing.Action.SHORT_DESCRIPTION, t)
-  }
-  /**
-   * Can be used for status bars, for example.
-   */
-  def longDescription: String =
-    ifNull(peer.getValue(javax.swing.Action.LONG_DESCRIPTION), "")
-  def longDescription_=(t: String) {
-    peer.putValue(javax.swing.Action.LONG_DESCRIPTION, t)
-  }
-
-  /**
-   * Default: java.awt.event.KeyEvent.VK_UNDEFINED, i.e., no mnemonic key.
-   * For all buttons and thus menu items.
-   */
-  def mnemonic: Int = ifNull(peer.getValue(javax.swing.Action.MNEMONIC_KEY),
-                             java.awt.event.KeyEvent.VK_UNDEFINED)
-  def mnemonic_=(m: Int) { peer.putValue(javax.swing.Action.MNEMONIC_KEY, m) }
-
-  /*/**
-   * Indicates which character of the title should be underlined to indicate the mnemonic key.
-   * Ignored if out of bounds of the title string. Default: -1, i.e., ignored.
-   * For all buttons and thus menu items.
-   */
-   1.6: def mnemonicIndex: Int =
-   ifNull(peer.getValue(javax.swing.Action.DISPLAYED_MNEMONIC_INDEX_KEY), -1)
-   def mnemonicIndex_=(n: Int) { peer.putValue(javax.swing.Action.DISPLAYED_MNEMONIC_INDEX_KEY, n) }
-  */
-
-  /**
-   * For menus.
-   */
-  def accelerator: Option[KeyStroke] =
-    toOption(peer.getValue(javax.swing.Action.ACCELERATOR_KEY))
-  def accelerator_=(k: Option[KeyStroke]) {
-    peer.putValue(javax.swing.Action.ACCELERATOR_KEY, k.orNull)
-  }
-
-  /**
-   * For all components.
-   */
-  def enabled: Boolean = peer.isEnabled
-  def enabled_=(b: Boolean) { peer.setEnabled(b) }
-
-  /*/**
-   * Only honored if not <code>None</code>. For various buttons.
-   */
-   1.6: def selected: Option[Boolean] = Option(peer.getValue(javax.swing.Action.SELECTED_KEY))
-   def selected_=(b: Option[Boolean]) {
-   peer.putValue(javax.swing.Action.SELECTED_KEY,
-                 if (b == None) null else new java.lang.Boolean(b.get))
-  }*/
-
-  def apply()
-}
diff --git a/src/swing/scala/swing/Adjustable.scala b/src/swing/scala/swing/Adjustable.scala
deleted file mode 100644
index 590153f..0000000
--- a/src/swing/scala/swing/Adjustable.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.swing
-
-import java.awt.{Adjustable => JAdjustable}
-
-object Adjustable {
-  trait Wrapper extends Oriented.Wrapper with Adjustable {
-    def peer: JAdjustable with OrientedMixin
-
-    def unitIncrement = peer.getUnitIncrement
-    def unitIncrement_=(i: Int) = peer.setUnitIncrement(i)
-    def blockIncrement = peer.getBlockIncrement
-    def blockIncrement_=(i: Int) = peer.setBlockIncrement(i)
-
-    def value = peer.getValue
-    def value_=(v: Int) = peer.setValue(v)
-
-    def visibleAmount = peer.getVisibleAmount
-    def visibleAmount_=(v: Int) = peer.setVisibleAmount(v)
-
-    def minimum = peer.getMinimum
-    def minimum_=(m: Int) = peer.setMinimum(m)
-    def maximum = peer.getMaximum
-    def maximum_=(m: Int) = peer.setMaximum(m)
-  }
-}
-
-trait Adjustable extends Oriented {
-  def unitIncrement: Int
-  def unitIncrement_=(i: Int)
-  def blockIncrement: Int
-  def blockIncrement_=(i: Int)
-
-  def value: Int
-  def value_=(v : Int)
-
-  def visibleAmount: Int
-  def visibleAmount_=(v: Int)
-
-  def minimum: Int
-  def minimum_=(m: Int)
-  def maximum: Int
-  def maximum_=(m: Int)
-
-// Needs implementation of AdjustmentEvent
-//
-//    val adjustments: Publisher = new Publisher {
-//		peer.addAdjustmentListener(new AdjustmentListener {
-//			def adjustmentValueChanged(e: java.awt.event.AdjustmentEvent) {
-//				publish(new AdjustmentEvent(e))
-//			}
-//		})
-//   	}
-}
diff --git a/src/swing/scala/swing/Alignment.scala b/src/swing/scala/swing/Alignment.scala
deleted file mode 100644
index b49e89d..0000000
--- a/src/swing/scala/swing/Alignment.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.SwingConstants._
-
-/**
- * Horizontal and vertical alignments. We sacrifice a bit of type-safety
- * for simplicity here.
- *
- * @see javax.swing.SwingConstants
- */
-object Alignment extends Enumeration {
-  val Left = Value(LEFT)
-  val Right = Value(RIGHT)
-  val Center = Value(CENTER)
-  val Top = Value(TOP)
-  val Bottom = Value(BOTTOM)
-  //1.6: val Baseline = Value(BASELINE)
-
-  val Leading = Value(LEADING)
-  val Trailing = Value(TRAILING)
-}
-
diff --git a/src/swing/scala/swing/Applet.scala b/src/swing/scala/swing/Applet.scala
deleted file mode 100644
index b8ba4ea..0000000
--- a/src/swing/scala/swing/Applet.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.JApplet
-
-/** <p>
- *    Clients should implement the ui field. See the <code>SimpleApplet</code>
- *    demo for an example.
- *  </p>
- *  <p>
- *    <b>Note</b>: <code>Applet</code> extends <code>javax.swing.JApplet</code>
- *    to satisfy Java's applet loading mechanism. The usual component wrapping
- *    scheme doesn't  work here.
- *  </p>
- *
- *  @see javax.swing.JApplet
- */
-abstract class Applet extends JApplet { outer =>
-  val ui: UI
-
-  override def init() { ui.init() }
-  override def start() { ui.start() }
-  override def stop() { ui.stop() }
-
-  abstract class UI extends RootPanel {
-    def peer = outer
-    override def contents_=(c: Component) {
-      super.contents_=(c)
-      peer.validate()
-    }
-
-    def init()
-    def start() {}
-    def stop() {}
-  }
-}
-
diff --git a/src/swing/scala/swing/BorderPanel.scala b/src/swing/scala/swing/BorderPanel.scala
deleted file mode 100644
index 75bb721..0000000
--- a/src/swing/scala/swing/BorderPanel.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import java.awt.BorderLayout
-
-object BorderPanel {
-  /**
-   * The position of a component in a <code>BorderPanel</code>
-   */
-  object Position extends Enumeration {
-    val North = Value(BorderLayout.NORTH)
-    val South = Value(BorderLayout.SOUTH)
-    val West = Value(BorderLayout.WEST)
-    val East = Value(BorderLayout.EAST)
-    val Center = Value(BorderLayout.CENTER)
-  }
-  private[swing] def wrapPosition(s: String): Position.Value = s match {
-    case BorderLayout.NORTH => Position.North
-    case BorderLayout.SOUTH => Position.South
-    case BorderLayout.WEST => Position.West
-    case BorderLayout.EAST => Position.East
-    case BorderLayout.CENTER => Position.Center
-  }
-}
-
-/**
- * A container that arranges its children around a central component that
- * takes most of the space. The other children are placed on one of four
- * borders: north, east, south, west.
- *
- * @see javax.swing.BorderLayout
- */
-class BorderPanel extends Panel with LayoutContainer {
-  import BorderPanel._
-  def layoutManager = peer.getLayout.asInstanceOf[BorderLayout]
-  override lazy val peer = new javax.swing.JPanel(new BorderLayout) with SuperMixin
-
-  type Constraints = Position.Value
-
-  protected def constraintsFor(comp: Component) =
-    wrapPosition(layoutManager.getConstraints(comp.peer).asInstanceOf[String])
-
-  protected def areValid(c: Constraints): (Boolean, String) = (true, "")
-  protected def add(c: Component, l: Constraints) {
-    // we need to remove previous components with the same constraints as the new one,
-    // otherwise the layout manager loses track of the old one
-    val old = layoutManager.getLayoutComponent(l.toString)
-    if(old != null) peer.remove(old)
-    peer.add(c.peer, l.toString)
-  }
-}
diff --git a/src/swing/scala/swing/BoxPanel.scala b/src/swing/scala/swing/BoxPanel.scala
deleted file mode 100644
index f5859a8..0000000
--- a/src/swing/scala/swing/BoxPanel.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-/**
- * A panel that lays out its contents one after the other,
- * either horizontally or vertically.
- *
- * @see javax.swing.BoxLayout
- */
-class BoxPanel(orientation: Orientation.Value) extends Panel with SequentialContainer.Wrapper {
-  override lazy val peer = {
-    val p = new javax.swing.JPanel with SuperMixin
-    val l = new javax.swing.BoxLayout(p, orientation.id)
-    p.setLayout(l)
-    p
-  }
-}
diff --git a/src/swing/scala/swing/BufferWrapper.scala b/src/swing/scala/swing/BufferWrapper.scala
deleted file mode 100644
index 38230ba..0000000
--- a/src/swing/scala/swing/BufferWrapper.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import scala.collection.mutable.Buffer
-import scala.collection.Iterator
-
-/**
- * Default partial implementation for buffer adapters.
- */
-protected[swing] abstract class BufferWrapper[A] extends Buffer[A] { outer =>
-  def clear() { for (i <- 0 until length) remove(0) }
-  def update(n: Int, a: A) {
-    remove(n)
-    insertAt(n, a)
-  }
-  def insertAll(n: Int, elems: Traversable[A]) {
-    var i = n
-    for (el <- elems) {
-      insertAt(i, el)
-      i += 1
-    }
-  }
-  protected def insertAt(n: Int, a: A)
-
-  def +=:(a: A): this.type = { insertAt(0, a); this }
-  def iterator = Iterator.range(0,length).map(apply(_))
-}
diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala
deleted file mode 100644
index f10d49d..0000000
--- a/src/swing/scala/swing/Button.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-
-object Button {
-  def apply(text0: String)(op: => Unit) = new Button(Action(text0)(op))
-}
-
-/**
- * A button that can be clicked, usually to perform some action.
- *
- * @see javax.swing.JButton
- */
-class Button(text0: String) extends AbstractButton with Publisher {
-  override lazy val peer: JButton = new JButton(text0) with SuperMixin
-  def this() = this("")
-  def this(a: Action) = {
-    this("")
-    action = a
-  }
-
-  def defaultButton: Boolean = peer.isDefaultButton
-
-  def defaultCapable: Boolean = peer.isDefaultCapable
-  def defaultCapable_=(capable: Boolean) { peer.setDefaultCapable(capable) }
-}
diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala
deleted file mode 100644
index 2075df7..0000000
--- a/src/swing/scala/swing/ButtonGroup.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import event._
-import javax.swing.{AbstractButton => JAbstractButton,Icon}
-import scala.collection.{ mutable, immutable }
-
-/**
- * A button mutex. At most one of its associated buttons is selected
- * at a time.
- *
- * @see javax.swing.ButtonGroup
- */
-class ButtonGroup(initialButtons: AbstractButton*) {
-  val peer: javax.swing.ButtonGroup = new javax.swing.ButtonGroup
-
-  val buttons: mutable.Set[AbstractButton] = new mutable.Set[AbstractButton] {
-    def -=(b: AbstractButton): this.type = { peer.remove(b.peer); this }
-    def +=(b: AbstractButton): this.type = { peer.add(b.peer); this }
-    def contains(b: AbstractButton) = this.iterator.contains(b)
-    override def size = peer.getButtonCount
-    def iterator: Iterator[AbstractButton] = new Iterator[AbstractButton] {
-      val enum = peer.getElements
-      def next = UIElement.cachedWrapper[AbstractButton](enum.nextElement)
-      def hasNext = enum.hasMoreElements
-    }
-  }
-  buttons ++= initialButtons
-
-  //1.6: def deselectAll() { peer.clearSelection }
-  def selected: Option[AbstractButton] = buttons.find(_.selected)
-  def select(b: AbstractButton) { peer.setSelected(b.peer.getModel, true) }
-}
diff --git a/src/swing/scala/swing/CheckBox.scala b/src/swing/scala/swing/CheckBox.scala
deleted file mode 100644
index 7287c95..0000000
--- a/src/swing/scala/swing/CheckBox.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing._
-
-/**
- * Two state button that can either be checked or unchecked.
- *
- * @see javax.swing.JCheckBox
- */
-class CheckBox(text: String) extends ToggleButton {
-  override lazy val peer: JCheckBox = new JCheckBox(text) with SuperMixin
-  def this() = this("")
-
-  def borderPaintedFlat: Boolean = peer.isBorderPaintedFlat
-  def borderPaintedFlat_=(flat: Boolean) { peer.setBorderPaintedFlat(flat) }
-}
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
deleted file mode 100644
index 5b70f6f..0000000
--- a/src/swing/scala/swing/ComboBox.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import event._
-import javax.swing.{JList, JComponent, JComboBox, JTextField, ComboBoxModel, AbstractListModel, ListCellRenderer}
-import java.awt.event.ActionListener
-
-object ComboBox {
-  /**
-   * An editor for a combo box. Let's you edit the currently selected item.
-   * It is highly recommended to use the BuiltInEditor class. For anything
-   * else, one cannot guarantee that it integrates nicely with the current
-   * LookAndFeel.
-   *
-   * Publishes action events.
-   */
-  trait Editor[A] extends Publisher {
-    lazy val comboBoxPeer: javax.swing.ComboBoxEditor = new javax.swing.ComboBoxEditor with Publisher {
-      def addActionListener(l: ActionListener) {
-        this match {
-          // TODO case w: Action.Trigger.Wrapper =>
-          //  w.peer.addActionListener(l)
-          case _ =>
-            this.subscribe(new Reactions.Wrapper(l) ({
-               case ActionEvent(c) => l.actionPerformed(new java.awt.event.ActionEvent(c.peer, 0, ""))
-            }))
-         }
-      }
-      def removeActionListener(l: ActionListener) {
-        this match {
-          // TODO case w: Action.Trigger.Wrapper =>
-          //  w.peer.removeActionListener(l)
-          case _ =>
-            this.unsubscribe(new Reactions.Wrapper(l)({ case _ => }))
-        }
-      }
-      def getEditorComponent: JComponent = Editor.this.component.peer
-      def getItem(): AnyRef = item.asInstanceOf[AnyRef]
-      def selectAll() { startEditing() }
-      def setItem(a: Any) { item = a.asInstanceOf[A] }
-    }
-    def component: Component
-    def item: A
-    def item_=(a: A)
-    def startEditing()
-  }
-
-  /**
-   * Use this editor, if you want to reuse the builtin editor supplied by the current
-   * Look and Feel. This is restricted to a text field as the editor widget. The
-   * conversion from and to a string is done by the supplied functions.
-   *
-   * It's okay if string2A throws exceptions. They are caught by an input verifier.
-   */
-  class BuiltInEditor[A](comboBox: ComboBox[A])(string2A: String => A,
-                         a2String: A => String) extends ComboBox.Editor[A] {
-    protected[swing] class DelegatedEditor(editor: javax.swing.ComboBoxEditor) extends javax.swing.ComboBoxEditor {
-      var value: A = {
-        val v = comboBox.peer.getSelectedItem
-        try {
-          v match {
-            case s: String => string2A(s)
-            case _ => v.asInstanceOf[A]
-          }
-        } catch {
-          case _: Exception =>
-            throw new IllegalArgumentException("ComboBox not initialized with a proper value, was '" + v + "'.")
-        }
-      }
-      def addActionListener(l: ActionListener) {
-        editor.addActionListener(l)
-      }
-      def removeActionListener(l: ActionListener) {
-       editor.removeActionListener(l)
-      }
-
-      def getEditorComponent: JComponent = editor.getEditorComponent.asInstanceOf[JComponent]
-      def selectAll() { editor.selectAll() }
-      def getItem(): AnyRef = { verifier.verify(getEditorComponent); value.asInstanceOf[AnyRef] }
-      def setItem(a: Any) { editor.setItem(a) }
-
-      val verifier = new javax.swing.InputVerifier {
-        // TODO: should chain with potentially existing verifier in editor
-        def verify(c: JComponent) = try {
-          value = string2A(c.asInstanceOf[JTextField].getText)
-          true
-  	    }
-  	    catch {
-          case e: Exception => false
-        }
-      }
-
-      def textEditor = getEditorComponent.asInstanceOf[JTextField]
-      textEditor.setInputVerifier(verifier)
-      textEditor.addActionListener(Swing.ActionListener{ a =>
-        getItem() // make sure our value is updated
-        textEditor.setText(a2String(value))
-      })
-    }
-
-    override lazy val comboBoxPeer: javax.swing.ComboBoxEditor = new DelegatedEditor(comboBox.peer.getEditor)
-
-    def component = Component.wrap(comboBoxPeer.getEditorComponent.asInstanceOf[JComponent])
-    def item: A = { comboBoxPeer.asInstanceOf[DelegatedEditor].value }
-    def item_=(a: A) { comboBoxPeer.setItem(a2String(a)) }
-    def startEditing() { comboBoxPeer.selectAll() }
-  }
-
-  implicit def stringEditor(c: ComboBox[String]): Editor[String] = new BuiltInEditor(c)(s => s, s => s)
-  implicit def intEditor(c: ComboBox[Int]): Editor[Int] = new BuiltInEditor(c)(s => s.toInt, s => s.toString)
-  implicit def floatEditor(c: ComboBox[Float]): Editor[Float] = new BuiltInEditor(c)(s => s.toFloat, s => s.toString)
-  implicit def doubleEditor(c: ComboBox[Double]): Editor[Double] = new BuiltInEditor(c)(s => s.toDouble, s => s.toString)
-
-  def newConstantModel[A](items: Seq[A]): ComboBoxModel = {
-    new AbstractListModel with ComboBoxModel {
-      private var selected: A = if (items.isEmpty) null.asInstanceOf[A] else items(0)
-      def getSelectedItem: AnyRef = selected.asInstanceOf[AnyRef]
-      def setSelectedItem(a: Any) {
-        if ((selected != null && selected != a) ||
-            selected == null && a != null) {
-          selected = a.asInstanceOf[A]
-          fireContentsChanged(this, -1, -1)
-        }
-      }
-      def getElementAt(n: Int) = items(n).asInstanceOf[AnyRef]
-      def getSize = items.size
-    }
-  }
-
-  /*def newMutableModel[A, Self](items: Seq[A] with scala.collection.mutable.Publisher[scala.collection.mutable.Message[A], Self]): ComboBoxModel = {
-    new AbstractListModel with ComboBoxModel {
-      private var selected = items(0)
-      def getSelectedItem: AnyRef = selected.asInstanceOf[AnyRef]
-      def setSelectedItem(a: Any) { selected = a.asInstanceOf[A] }
-      def getElementAt(n: Int) = items(n).asInstanceOf[AnyRef]
-      def getSize = items.size
-    }
-  }
-
-  def newConstantModel[A](items: Seq[A]): ComboBoxModel = items match {
-    case items: Seq[A] with scala.collection.mutable.Publisher[scala.collection.mutable.Message[A], Self] => newMutableModel
-    case _ => newConstantModel(items)
-  }*/
-}
-
-/**
- * Let's the user make a selection from a list of predefined items. Visually,
- * this is implemented as a button-like component with a pull-down menu.
- *
- * @see javax.swing.JComboBox
- */
-class ComboBox[A](items: Seq[A]) extends Component with Publisher {
-  override lazy val peer: JComboBox = new JComboBox(ComboBox.newConstantModel(items)) with SuperMixin
-
-  object selection extends Publisher {
-    def index: Int = peer.getSelectedIndex
-    def index_=(n: Int) { peer.setSelectedIndex(n) }
-    def item: A = peer.getSelectedItem.asInstanceOf[A]
-    def item_=(a: A) { peer.setSelectedItem(a) }
-
-    peer.addActionListener(Swing.ActionListener { e =>
-      publish(event.SelectionChanged(ComboBox.this))
-    })
-  }
-
-  /**
-   * Sets the renderer for this combo box's items. Index -1 is
-   * passed to the renderer for the selected item (not in the pull-down menu).
-   *
-   * The underlying combo box renders all items in a <code>ListView</code>
-   * (both, in the pull-down menu as well as in the box itself), hence the
-   * <code>ListView.Renderer</code>.
-   *
-   * Note that the UI peer of a combo box usually changes the colors
-   * of the component to its own defaults _after_ the renderer has been
-   * configured. That's Swing's principle of most suprise.
-   */
-  def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getRenderer)
-  def renderer_=(r: ListView.Renderer[A]) { peer.setRenderer(r.peer) }
-
-  /* XXX: currently not safe to expose:
-  def editor: ComboBox.Editor[A] =
-  def editor_=(r: ComboBox.Editor[A]) { peer.setEditor(r.comboBoxPeer) }
-  */
-  def editable: Boolean = peer.isEditable
-
-  /**
-   * Makes this combo box editable. In order to do so, this combo needs an
-   * editor which is supplied by the implicit argument. For default
-   * editors, see ComboBox companion object.
-   */
-  def makeEditable()(implicit editor: ComboBox[A] => ComboBox.Editor[A]) {
-    peer.setEditable(true)
-    peer.setEditor(editor(this).comboBoxPeer)
-  }
-
-  def prototypeDisplayValue: Option[A] = toOption[A](peer.getPrototypeDisplayValue)
-  def prototypeDisplayValue_=(v: Option[A]) {
-    peer.setPrototypeDisplayValue((v map toAnyRef).orNull)
-  }
-}
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
deleted file mode 100644
index b7dd856..0000000
--- a/src/swing/scala/swing/Component.scala
+++ /dev/null
@@ -1,295 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import event._
-
-import java.awt.Graphics
-import java.awt.event._
-import javax.swing.JComponent
-import javax.swing.border.Border
-
-/**
- * Utility methods, mostly for wrapping components.
- */
-object Component {
-  /**
-   * Wraps a given Java Swing Component into a new wrapper.
-   */
-  def wrap(c: JComponent): Component = {
-    val w = UIElement.cachedWrapper[Component](c)
-    if (w != null) w
-    else new Component { override lazy val peer = c }
-  }
-}
-
-/**
- * Base class for all UI elements that can be displayed in a window.
- * Components are publishers that fire the following event classes:
- * ComponentEvent, FocusEvent, FontChanged, ForegroundChanged, BackgroundChanged.
- *
- * @note [Java Swing] Unlike in Java Swing, not all components are also containers.
- *
- * @see javax.swing.JComponent
- * @see http://java.sun.com/products/jfc/tsc/articles/painting/ for the component
- * painting mechanism
- */
-abstract class Component extends UIElement {
-  override lazy val peer: javax.swing.JComponent = new javax.swing.JComponent with SuperMixin {}
-  var initP: JComponent = null
-
-  /**
-   * This trait is used to redirect certain calls from the peer to the wrapper
-   * and back. Useful to expose methods that can be customized by overriding.
-   */
-  protected trait SuperMixin extends JComponent {
-    override def paintComponent(g: Graphics) {
-      Component.this.paintComponent(g.asInstanceOf[Graphics2D])
-    }
-    def __super__paintComponent(g: Graphics) {
-      super.paintComponent(g)
-    }
-    override def paintBorder(g: Graphics) {
-      Component.this.paintBorder(g.asInstanceOf[Graphics2D])
-    }
-    def __super__paintBorder(g: Graphics) {
-      super.paintBorder(g)
-    }
-    override def paintChildren(g: Graphics) {
-      Component.this.paintChildren(g.asInstanceOf[Graphics2D])
-    }
-    def __super__paintChildren(g: Graphics) {
-      super.paintChildren(g)
-    }
-
-    override def paint(g: Graphics) {
-      Component.this.paint(g.asInstanceOf[Graphics2D])
-    }
-    def __super__paint(g: Graphics) {
-      super.paint(g)
-    }
-  }
-
-  def name: String = peer.getName
-  def name_=(s: String) = peer.setName(s)
-
-  /**
-   * Used by certain layout managers, e.g., BoxLayout or OverlayLayout to
-   * align components relative to each other.
-   */
-  def xLayoutAlignment: Double = peer.getAlignmentX
-  def xLayoutAlignment_=(x: Double) = peer.setAlignmentX(x.toFloat)
-  def yLayoutAlignment: Double = peer.getAlignmentY
-  def yLayoutAlignment_=(y: Double) = peer.setAlignmentY(y.toFloat)
-
-  def border: Border = peer.getBorder
-  def border_=(b: Border) { peer.setBorder(b) }
-
-  def opaque: Boolean = peer.isOpaque
-  def opaque_=(b: Boolean) = peer.setOpaque(b)
-
-  def enabled: Boolean = peer.isEnabled
-  def enabled_=(b: Boolean) = peer.setEnabled(b)
-
-  def tooltip: String = peer.getToolTipText
-  def tooltip_=(t: String) = peer.setToolTipText(t)
-
-  def inputVerifier: Component => Boolean = { a =>
-    Option(peer.getInputVerifier) forall (_ verify a.peer)
-  }
-  def inputVerifier_=(v: Component => Boolean) {
-    peer.setInputVerifier(new javax.swing.InputVerifier {
-      def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper[Component](c))
-    })
-  }
-
-  /*def verifyOnTraversal: (Component, Component) => Boolean = { a =>
-    peer.getInputVerifier().verify(a.peer)
-  }
-  def verifyOnTraversal_=(v: (Component, Component) => Boolean) {
-    peer.setInputVerifier(new javax.swing.InputVerifier {
-      def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper[Component](c))
-    })
-  }*/
-  /**
-   * Contains publishers for various mouse events. They are separated for
-   * efficiency reasons.
-   */
-  object mouse {
-    /**
-     * Publishes clicks, presses and releases.
-     */
-    val clicks: Publisher = new LazyPublisher {
-      lazy val l = new MouseListener {
-        def mouseEntered(e: java.awt.event.MouseEvent) {}
-        def mouseExited(e: java.awt.event.MouseEvent) {}
-        def mouseClicked(e: java.awt.event.MouseEvent) {
-          publish(new MouseClicked(e))
-        }
-        def mousePressed(e: java.awt.event.MouseEvent) {
-          publish(new MousePressed(e))
-        }
-        def mouseReleased(e: java.awt.event.MouseEvent) {
-          publish(new MouseReleased(e))
-        }
-      }
-
-      def onFirstSubscribe() = peer.addMouseListener(l)
-      def onLastUnsubscribe() = peer.removeMouseListener(l)
-    }
-    /**
-     * Publishes enters, exits, moves, and drags.
-     */
-    val moves: Publisher = new LazyPublisher {
-      lazy val mouseListener = new MouseListener {
-        def mouseEntered(e: java.awt.event.MouseEvent) {
-          publish(new MouseEntered(e))
-        }
-        def mouseExited(e: java.awt.event.MouseEvent) {
-          publish(new MouseExited(e))
-        }
-        def mouseClicked(e: java.awt.event.MouseEvent) {}
-        def mousePressed(e: java.awt.event.MouseEvent) {}
-        def mouseReleased(e: java.awt.event.MouseEvent) {}
-      }
-
-      lazy val mouseMotionListener = new MouseMotionListener {
-        def mouseMoved(e: java.awt.event.MouseEvent) {
-          publish(new MouseMoved(e))
-        }
-        def mouseDragged(e: java.awt.event.MouseEvent) {
-          publish(new MouseDragged(e))
-        }
-      }
-      def onFirstSubscribe() {
-        peer.addMouseListener(mouseListener)
-        peer.addMouseMotionListener(mouseMotionListener)
-      }
-      def onLastUnsubscribe() {
-        peer.removeMouseListener(mouseListener)
-        peer.removeMouseMotionListener(mouseMotionListener)
-      }
-    }
-    /**
-     * Publishes mouse wheel moves.
-     */
-    val wheel: Publisher = new LazyPublisher {
-      // We need to subscribe lazily and unsubscribe, since components in scroll panes capture
-      // mouse wheel events if there is a listener installed. See ticket #1442.
-      lazy val l = new MouseWheelListener {
-        def mouseWheelMoved(e: java.awt.event.MouseWheelEvent) {
-          publish(new MouseWheelMoved(e))
-        }
-      }
-      def onFirstSubscribe() = peer.addMouseWheelListener(l)
-      def onLastUnsubscribe() = peer.removeMouseWheelListener(l)
-    }
-  }
-
-  object keys extends Publisher {
-    peer.addKeyListener(new KeyListener {
-      def keyPressed(e: java.awt.event.KeyEvent) { publish(new KeyPressed(e)) }
-      def keyReleased(e: java.awt.event.KeyEvent) { publish(new KeyReleased(e)) }
-      def keyTyped(e: java.awt.event.KeyEvent) { publish(new KeyTyped(e)) }
-    })
-  }
-
-  def focusable: Boolean = peer.isFocusable
-  def focusable_=(b: Boolean) = peer.setFocusable(b)
-  def requestFocus() = peer.requestFocus()
-  def requestFocusInWindow() = peer.requestFocusInWindow()
-  def hasFocus: Boolean = peer.isFocusOwner
-
-  protected override def onFirstSubscribe() {
-    super.onFirstSubscribe
-    // TODO: deprecated, remove after 2.8
-    peer.addComponentListener(new java.awt.event.ComponentListener {
-      def componentHidden(e: java.awt.event.ComponentEvent) {
-        publish(UIElementHidden(Component.this))
-      }
-      def componentShown(e: java.awt.event.ComponentEvent) {
-        publish(UIElementShown(Component.this))
-      }
-      def componentMoved(e: java.awt.event.ComponentEvent) {
-        publish(UIElementMoved(Component.this))
-      }
-      def componentResized(e: java.awt.event.ComponentEvent) {
-        publish(UIElementResized(Component.this))
-      }
-    })
-
-    peer.addFocusListener(new java.awt.event.FocusListener {
-      def other(e: java.awt.event.FocusEvent) = e.getOppositeComponent match {
-        case c: JComponent => Some(UIElement.cachedWrapper[Component](c))
-        case _ => None
-      }
-
-      def focusGained(e: java.awt.event.FocusEvent) {
-        publish(FocusGained(Component.this, other(e), e.isTemporary))
-      }
-      def focusLost(e: java.awt.event.FocusEvent) {
-        publish(FocusLost(Component.this, other(e), e.isTemporary))
-      }
-    })
-
-    peer.addPropertyChangeListener(new java.beans.PropertyChangeListener {
-      def propertyChange(e: java.beans.PropertyChangeEvent) {
-        e.getPropertyName match {
-          case "font" => publish(FontChanged(Component.this))
-          case "background" => publish(BackgroundChanged(Component.this))
-          case "foreground" => publish(ForegroundChanged(Component.this))
-          case _ =>
-          /*case "focusable" =>
-          case "focusTraversalKeysEnabled" =>
-          case "forwardFocusTraversalKeys" =>
-          case "backwardFocusTraversalKeys" =>
-          case "upCycleFocusTraversalKeys" =>
-          case "downCycleFocusTraversalKeys" =>
-          case "focusTraversalPolicy" =>
-          case "focusCycleRoot" =>*/
-        }
-      }
-    })
-  }
-
-  def revalidate() { peer.revalidate() }
-
-  /**
-   * For custom painting, users should usually override this method.
-   */
-  protected def paintComponent(g: Graphics2D) {
-    peer match {
-      case peer: SuperMixin => peer.__super__paintComponent(g)
-      case _ =>
-    }
-  }
-
-  protected def paintBorder(g: Graphics2D) {
-    peer match {
-      case peer: SuperMixin => peer.__super__paintBorder(g)
-      case _ =>
-    }
-  }
-
-  protected def paintChildren(g: Graphics2D) {
-    peer match {
-      case peer: SuperMixin => peer.__super__paintChildren(g)
-      case _ =>
-    }
-  }
-
-  def paint(g: Graphics2D) {
-    peer match {
-      case peer: SuperMixin => peer.__super__paint(g)
-      case _ => peer.paint(g)
-    }
-  }
-
-  override def toString = "scala.swing wrapper " + peer.toString
-}
diff --git a/src/swing/scala/swing/Container.scala b/src/swing/scala/swing/Container.scala
deleted file mode 100644
index 24889f0..0000000
--- a/src/swing/scala/swing/Container.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import scala.collection.mutable.Buffer
-
-object Container {
-  /**
-   * Utility trait for wrapping containers. Provides an immutable
-   * implementation of the contents member.
-   */
-  trait Wrapper extends Container with Publisher {
-    override def peer: javax.swing.JComponent
-
-    protected val _contents = new Content
-    def contents: Seq[Component] = _contents
-
-    protected class Content extends BufferWrapper[Component] {
-      override def clear() { peer.removeAll() }
-      override def remove(n: Int): Component = {
-        val c = peer.getComponent(n)
-        peer.remove(n)
-        UIElement.cachedWrapper[Component](c)
-      }
-      protected def insertAt(n: Int, c: Component) { peer.add(c.peer, n) }
-      def +=(c: Component): this.type = { peer.add(c.peer) ; this }
-      def length = peer.getComponentCount
-      def apply(n: Int) = UIElement.cachedWrapper[Component](peer.getComponent(n))
-    }
-
-    peer.addContainerListener(new java.awt.event.ContainerListener {
-      def componentAdded(e: java.awt.event.ContainerEvent) {
-        publish(ComponentAdded(Wrapper.this,
-          UIElement.cachedWrapper[Component](e.getChild.asInstanceOf[javax.swing.JComponent])))
-      }
-      def componentRemoved(e: java.awt.event.ContainerEvent) {
-        publish(ComponentRemoved(Wrapper.this,
-          UIElement.cachedWrapper[Component](e.getChild.asInstanceOf[javax.swing.JComponent])))
-      }
-    })
-  }
-}
-
-/**
- * The base traits for UI elements that can contain <code>Component</code>s.
- *
- * @note [Java Swing] This is not the wrapper for java.awt.Container but a trait
- * that extracts a common interface for components, menus, and windows.
- */
-trait Container extends UIElement {
-  /**
-   * The child components of this container.
-   */
-  def contents: Seq[Component]
-}
diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala
deleted file mode 100644
index b8c506d..0000000
--- a/src/swing/scala/swing/EditorPane.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import javax.swing.text._
-import java.awt.event._
-
-/**
- * A text component that allows multiline text input and display.
- *
- * @see javax.swing.JEditorPane
- */
-class EditorPane(contentType0: String, text0: String) extends TextComponent {
-	override lazy val peer: JEditorPane = new JEditorPane(contentType0, text0) with SuperMixin
-	def this() = this("text/plain", "")
-
-	def contentType: String = peer.getContentType
-	def contentType_=(t: String) = peer.setContentType(t)
-
-	def editorKit: EditorKit = peer.getEditorKit
-	def editorKit_=(k: EditorKit) = peer.setEditorKit(k)
-}
diff --git a/src/swing/scala/swing/FileChooser.scala b/src/swing/scala/swing/FileChooser.scala
deleted file mode 100644
index e731c67..0000000
--- a/src/swing/scala/swing/FileChooser.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import java.io.File
-import javax.swing._
-import javax.swing.filechooser._
-
-object FileChooser {
-  /**
-   * The result of a file dialog. The precise meaning of the `Approve`
-   * result depends on the specific dialog type. Could be `"save"` or
-   * `"open"` for instance.
-   */
-  object Result extends Enumeration {
-    val Cancel = Value(JFileChooser.CANCEL_OPTION)
-    val Approve = Value(JFileChooser.APPROVE_OPTION)
-    val Error = Value(JFileChooser.ERROR_OPTION)
-  }
-
-  /**
-   * The kind of elements a user can select in a file dialog.
-   */
-  object SelectionMode extends Enumeration {
-    val FilesOnly = Value(JFileChooser.FILES_ONLY)
-    val DirectoriesOnly = Value(JFileChooser.DIRECTORIES_ONLY)
-    val FilesAndDirectories = Value(JFileChooser.FILES_AND_DIRECTORIES)
-  }
-}
-
-/**
- * Used to open file dialogs.
- *
- * @see [[javax.swing.JFileChooser]]
- */
-class FileChooser(dir: File) {
-  import FileChooser._
-  lazy val peer: JFileChooser = new JFileChooser(dir)
-
-  def this() = this(null)
-
-  import Swing._
-  def showOpenDialog(over: Component): Result.Value = Result(peer.showOpenDialog(nullPeer(over)))
-  def showSaveDialog(over: Component): Result.Value = Result(peer.showSaveDialog(nullPeer(over)))
-  def showDialog(over: Component, approveText: String): Result.Value = Result(peer.showDialog(nullPeer(over), approveText))
-
-  def controlButtonsAreShown: Boolean = peer.getControlButtonsAreShown
-  def controlButtonsAreShown_=(b: Boolean) { peer.setControlButtonsAreShown(b) }
-
-  def title: String = peer.getDialogTitle
-  def title_=(t: String) { peer.setDialogTitle(t) }
-
-  def accessory: Component = UIElement.cachedWrapper[Component](peer.getAccessory)
-  def accessory_=(c: Component) { peer.setAccessory(c.peer) }
-
-  def fileHidingEnabled: Boolean = peer.isFileHidingEnabled
-  def fileHidingEnabled_=(b: Boolean) { peer.setFileHidingEnabled(b) }
-  def fileSelectionMode: SelectionMode.Value = SelectionMode(peer.getFileSelectionMode)
-  def fileSelectionMode_=(s: SelectionMode.Value) { peer.setFileSelectionMode(s.id) }
-  def fileFilter: FileFilter = peer.getFileFilter
-  def fileFilter_=(f: FileFilter) { peer setFileFilter f }
-
-  def selectedFile: File = peer.getSelectedFile
-  def selectedFile_=(file: File) { peer.setSelectedFile(file) }
-  def selectedFiles: Seq[File] = peer.getSelectedFiles
-  def selectedFiles_=(files: File*) { peer.setSelectedFiles(files.toArray) }
-
-  def multiSelectionEnabled: Boolean = peer.isMultiSelectionEnabled
-  def multiSelectionEnabled_=(b: Boolean) { peer.setMultiSelectionEnabled(b) }
-
-  def iconFor(f: File) = peer.getIcon(f)
-  def descriptionFor(f: File) = peer.getDescription(f)
-  def nameFor(f: File) = peer.getName(f)
-  def typeDescriptionFor(f: File) = peer.getTypeDescription(f)
-  def traversable(f: File) = peer.isTraversable(f)
-
-  def acceptAllFileFilter = peer.getAcceptAllFileFilter
-
-  /*peer.addPropertyChangeListener(new java.beans.PropertyChangeListener {
-    def propertyChange(e: java.beans.PropertyChangeEvent) {
-      import JFileChooser._
-      e.getPropertyName match {
-        case APPROVE_BUTTON_TEXT_CHANGED_PROPERTY =>
-        case ACCESSORY_CHANGED_PROPERTY =>
-        case APPROVE_BUTTON_MNEMONIC_CHANGED_PROPERTY =>
-        case APPROVE_BUTTON_TEXT_CHANGED_PROPERTY =>
-        case APPROVE_BUTTON_TOOL_TIP_TEXT_CHANGED_PROPERTY =>
-        case CHOOSABLE_FILE_FILTER_CHANGED_PROPERTY =>
-        case CONTROL_BUTTONS_ARE_SHOWN_CHANGED_PROPERTY =>
-        case DIALOG_TITLE_CHANGED_PROPERTY =>
-        case DIALOG_TYPE_CHANGED_PROPERTY =>
-        case DIRECTORY_CHANGED_PROPERTY =>
-        case FILE_FILTER_CHANGED_PROPERTY =>
-        case FILE_HIDING_CHANGED_PROPERTY =>
-        case FILE_SELECTION_MODE_CHANGED_PROPERTY =>
-        case FILE_SYSTEM_VIEW_CHANGED_PROPERTY =>
-        case FILE_VIEW_CHANGED_PROPERTY =>
-        case MULTI_SELECTION_ENABLED_CHANGED_PROPERTY =>
-        case SELECTED_FILE_CHANGED_PROPERTY =>
-        case SELECTED_FILES_CHANGED_PROPERTY =>
-        case _ =>
-      }
-    }
-  })*/
-}
diff --git a/src/swing/scala/swing/FlowPanel.scala b/src/swing/scala/swing/FlowPanel.scala
deleted file mode 100644
index feeb3d4..0000000
--- a/src/swing/scala/swing/FlowPanel.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import java.awt.FlowLayout
-import javax.swing.JPanel
-
-object FlowPanel {
-  object Alignment extends Enumeration {
-    val Leading = Value(FlowLayout.LEADING)
-    val Trailing = Value(FlowLayout.TRAILING)
-    val Left = Value(FlowLayout.LEFT)
-    val Right = Value(FlowLayout.RIGHT)
-    val Center = Value(FlowLayout.CENTER)
-  }
-}
-
-/**
- * A panel that arranges its contents horizontally, one after the other.
- * If they don't fit, this panel will try to insert line breaks.
- *
- * @see java.awt.FlowLayout
- */
-class FlowPanel(alignment: FlowPanel.Alignment.Value)(contents0: Component*) extends Panel with SequentialContainer.Wrapper {
-  override lazy val peer: JPanel =
-    new JPanel(new java.awt.FlowLayout(alignment.id)) with SuperMixin
-  def this(contents0: Component*) = this(FlowPanel.Alignment.Center)(contents0: _*)
-  def this() = this(FlowPanel.Alignment.Center)()
-
-  contents ++= contents0
-
-  private def layoutManager = peer.getLayout.asInstanceOf[java.awt.FlowLayout]
-
-  def vGap: Int = layoutManager.getVgap
-  def vGap_=(n: Int) { layoutManager.setVgap(n) }
-  def hGap: Int = layoutManager.getHgap
-  def hGap_=(n: Int) { layoutManager.setHgap(n) }
-}
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled
deleted file mode 100644
index 9e21eb8..0000000
--- a/src/swing/scala/swing/Font.scala.disabled
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.swing
-
-/*object Font {
-  def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
-  def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
-  def decode(str: String) = java.awt.Font.decode(str)
-
-  /* TODO: finish implementation
-  /**
-   * See [java.awt.Font.getFont].
-   */
-  def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
-    java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
-  import java.{util => ju}
-  private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] {
-    self =>
-    override def size = underlying.size
-
-    override def put(k : A, v : B) =
-      throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-    override def remove(k : AnyRef) =
-      throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
-    override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
-      def size = self.size
-
-      def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
-        val ui = underlying.iterator
-        var prev : Option[A] = None
-
-        def hasNext = ui.hasNext
-
-        def next = {
-          val (k, v) = ui.next
-          prev = Some(k)
-          new ju.Map.Entry[A, B] {
-            def getKey = k
-            def getValue = v
-            def setValue(v1 : B) = self.put(k, v1)
-            override def equals(other : Any) = other match {
-              case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
-              case _ => false
-            }
-          }
-        }
-
-        def remove = prev match {
-          case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
-          case _ => throw new IllegalStateException("next must be called at least once before remove")
-        }
-      }
-    }
-  }
-  */
-
-  /**
-   * See [java.awt.Font.getFont].
-   */
-  def get(nm: String) = java.awt.Font.getFont(nm)
-  /**
-   * See [java.awt.Font.getFont].
-   */
-  def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
-  def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
-  def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
-  def Point(x: Int, y: Int) = new Point(x, y)
-  def Dimension(x: Int, y: Int) = new Dimension(x, y)
-}*/
\ No newline at end of file
diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala
deleted file mode 100644
index 311ff42..0000000
--- a/src/swing/scala/swing/FormattedTextField.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import java.awt.event._
-
-object FormattedTextField {
-  /**
-   * The behavior of a formatted text field when it loses its focus.
-   */
-  object FocusLostBehavior extends Enumeration {
-    val Commit = Value(JFormattedTextField.COMMIT)
-    val CommitOrRevert = Value(JFormattedTextField.COMMIT_OR_REVERT)
-    val Persist = Value(JFormattedTextField.PERSIST)
-    val Revert = Value(JFormattedTextField.REVERT)
-  }
-}
-
-/**
- * A text field with formatted input.
- *
- * @see javax.swing.JFormattedTextField
- */
-class FormattedTextField(format: java.text.Format) extends TextComponent {
-  override lazy val peer: JFormattedTextField = new JFormattedTextField(format) with SuperMixin
-
-  import FormattedTextField._
-
-  def commitEdit() { peer.commitEdit() }
-  def editValid: Boolean = peer.isEditValid
-
-  def focusLostBehavior: FocusLostBehavior.Value = FocusLostBehavior(peer.getFocusLostBehavior)
-  def focusLostBehavior_=(b: FocusLostBehavior.Value) { peer.setFocusLostBehavior(b.id) }
-}
diff --git a/src/swing/scala/swing/GridBagPanel.scala b/src/swing/scala/swing/GridBagPanel.scala
deleted file mode 100644
index 7d181af..0000000
--- a/src/swing/scala/swing/GridBagPanel.scala
+++ /dev/null
@@ -1,113 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import java.awt.{GridBagConstraints, GridBagLayout}
-
-object GridBagPanel {
-  object Fill extends Enumeration {
-    val None = Value(GridBagConstraints.NONE)
-    val Horizontal = Value(GridBagConstraints.HORIZONTAL)
-    val Vertical = Value(GridBagConstraints.VERTICAL)
-    val Both = Value(GridBagConstraints.BOTH)
-  }
-  object Anchor extends Enumeration {
-    val North = Value(GridBagConstraints.NORTH)
-    val NorthEast = Value(GridBagConstraints.NORTHEAST)
-    val East = Value(GridBagConstraints.EAST)
-    val SouthEast = Value(GridBagConstraints.SOUTHEAST)
-    val South = Value(GridBagConstraints.SOUTH)
-    val SouthWest = Value(GridBagConstraints.SOUTHWEST)
-    val West = Value(GridBagConstraints.WEST)
-    val NorthWest = Value(GridBagConstraints.NORTHWEST)
-    val Center = Value(GridBagConstraints.CENTER)
-
-    val PageStart = Value(GridBagConstraints.PAGE_START)
-    val PageEnd = Value(GridBagConstraints.PAGE_END)
-    val LineStart = Value(GridBagConstraints.LINE_START)
-    val LineEnd = Value(GridBagConstraints.LINE_END)
-    val FirstLineStart = Value(GridBagConstraints.FIRST_LINE_START)
-    val FirstLineEnd = Value(GridBagConstraints.FIRST_LINE_END)
-    val LastLineStart = Value(GridBagConstraints.LAST_LINE_START)
-    val LastLineEnd = Value(GridBagConstraints.LAST_LINE_END)
-  }
-}
-
-/**
- * A panel that arranges its children in a grid. Layout details can be
- * given for each cell of the grid.
- *
- * @see java.awt.GridBagLayout
- */
-class GridBagPanel extends Panel with LayoutContainer {
-  override lazy val peer = new javax.swing.JPanel(new GridBagLayout) with SuperMixin
-  import GridBagPanel._
-
-  private def layoutManager = peer.getLayout.asInstanceOf[GridBagLayout]
-
-  /**
-   * Convenient conversion from xy-coords given as pairs to
-   * grid bag constraints.
-   */
-  implicit def pair2Constraints(p: (Int, Int)): Constraints = {
-    val c = new Constraints
-    c.gridx = p._1
-    c.gridy = p._2
-    c
-  }
-
-  class Constraints(val peer: GridBagConstraints) extends Proxy {
-    def self = peer
-    def this(gridx: Int, gridy: Int,
-             gridwidth: Int, gridheight: Int,
-             weightx: Double, weighty: Double,
-             anchor: Int, fill: Int, insets: Insets,
-             ipadx: Int, ipady: Int) =
-      this(new GridBagConstraints(gridx, gridy,
-                                  gridwidth, gridheight,
-                                  weightx, weighty,
-                                  anchor, fill, insets,
-                                  ipadx, ipady))
-    def this() = this(new GridBagConstraints())
-    def gridx: Int = peer.gridx
-    def gridx_=(x: Int) { peer.gridx = x }
-    def gridy: Int = peer.gridy
-    def gridy_=(y: Int) { peer.gridy = y }
-    def grid: (Int, Int) = (gridx, gridy)
-    def grid_=(c: (Int, Int)) = {
-      gridx = c._1
-      gridy = c._2
-    }
-
-    def gridwidth: Int = peer.gridwidth
-    def gridwidth_=(w: Int) { peer.gridwidth = w }
-    def gridheight: Int = peer.gridheight
-    def gridheight_=(h: Int) { peer.gridheight = h }
-    def weightx: Double = peer.weightx
-    def weightx_=(x: Double) { peer.weightx = x }
-    def weighty: Double = peer.weighty
-    def weighty_=(y: Double) { peer.weighty = y }
-    def anchor: Anchor.Value = Anchor(peer.anchor)
-    def anchor_=(a: Anchor.Value) { peer.anchor = a.id }
-    def fill: Fill.Value = Fill(peer.fill)
-    def fill_=(f: Fill.Value) { peer.fill = f.id }
-    def insets: Insets = peer.insets
-    def insets_=(i: Insets) { peer.insets = i }
-    def ipadx: Int = peer.ipadx
-    def ipadx_=(x: Int) { peer.ipadx = x }
-    def ipady: Int = peer.ipady
-    def ipady_=(y: Int) { peer.ipady = y }
-  }
-
-  protected def constraintsFor(comp: Component) =
-    new Constraints(layoutManager.getConstraints(comp.peer))
-
-  protected def areValid(c: Constraints): (Boolean, String) = (true, "")
-  protected def add(c: Component, l: Constraints) { peer.add(c.peer, l.peer) }
-}
diff --git a/src/swing/scala/swing/GridPanel.scala b/src/swing/scala/swing/GridPanel.scala
deleted file mode 100644
index d41f9e1..0000000
--- a/src/swing/scala/swing/GridPanel.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-object GridPanel {
-  val Adapt = 0
-}
-
-/**
- * A panel that lays out its contents in a uniform grid.
- *
- * @see java.awt.GridLayout
- */
-class GridPanel(rows0: Int, cols0: Int) extends Panel with SequentialContainer.Wrapper {
-  override lazy val peer =
-    new javax.swing.JPanel(new java.awt.GridLayout(rows0, cols0)) with SuperMixin
-
-  /*type Constraints = (Int, Int)
-
-  protected def constraintsFor(comp: Component) = {
-    assert(peer.getComponentOrientation.isHorizontal)
-    val idx = contents.indexOf(comp)
-    val (r, c) = (((idx-1)/columns)+1, ((idx-1)%columns)+1)
-    if (peer.getComponentOrientation.isLeftToRight) (r, c)
-    else (r, columns-c+1)
-  }
-
-  protected def add(c: Component, l: Constraints) { peer.add(c.peer, (l._1-1)*columns+l._2) }
-  protected def areValid(c: Constraints): (Boolean, String) =
-    ((c._1 > 0 && c._2 > 0), "Grid coordinates (row,col) must be >= 1 but where " + c)*/
-
-  private def layoutManager = peer.getLayout.asInstanceOf[java.awt.GridLayout]
-
-  def rows: Int = layoutManager.getRows
-  def rows_=(n: Int) { layoutManager.setRows(n) }
-  def columns: Int = layoutManager.getColumns
-  def columns_=(n: Int) { layoutManager.setColumns(n) }
-
-  def vGap: Int = layoutManager.getVgap
-  def vGap_=(n: Int) { layoutManager.setVgap(n) }
-  def hGap: Int = layoutManager.getHgap
-  def hGap_=(n: Int) { layoutManager.setHgap(n) }
-}
diff --git a/src/swing/scala/swing/Label.scala b/src/swing/scala/swing/Label.scala
deleted file mode 100644
index 65d43cb..0000000
--- a/src/swing/scala/swing/Label.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing._
-import scala.swing.Swing._
-
-/**
- * A label component that display either a text, an icon, or both.
- *
- * @see javax.swing.JLabel
- */
-class Label(text0: String, icon0: Icon, align: Alignment.Value) extends Component {
-  override lazy val peer: JLabel =
-    new JLabel(text0, toNullIcon(icon0), align.id) with SuperMixin
-
-  def this() = this("", EmptyIcon, Alignment.Center)
-  def this(s: String) = this(s, EmptyIcon, Alignment.Center)
-  def text: String = peer.getText
-  def text_=(s: String) = peer.setText(s)
-  def icon: Icon = peer.getIcon
-  def icon_=(i: Icon) = peer.setIcon(i)
-
-  /**
-   * The alignment of the label's contents relative to its bounding box.
-   */
-  def xAlignment: Alignment.Value = Alignment(peer.getHorizontalAlignment)
-  def xAlignment_=(x: Alignment.Value) { peer.setHorizontalAlignment(x.id) }
-  def yAlignment: Alignment.Value = Alignment(peer.getVerticalAlignment)
-  def yAlignment_=(x: Alignment.Value) { peer.setVerticalAlignment(x.id) }
-
-  /** @see javax.swing.JLabel#getHorizontalAlignment() */
-  def horizontalAlignment: Alignment.Value = Alignment(peer.getHorizontalAlignment)
-  /** @see javax.swing.JLabel#setHorizontalAlignment() */
-  def horizontalAlignment_=(x: Alignment.Value) { peer.setHorizontalAlignment(x.id) }
-
-  def verticalAlignment: Alignment.Value = Alignment(peer.getVerticalAlignment)
-  def verticalAlignment_=(x: Alignment.Value) { peer.setVerticalAlignment(x.id) }
-
-  def horizontalTextPosition: Alignment.Value = Alignment(peer.getHorizontalTextPosition)
-  def horizontalTextPosition_=(x: Alignment.Value) { peer.setHorizontalTextPosition(x.id) }
-
-  def verticalTextPosition: Alignment.Value = Alignment(peer.getVerticalTextPosition)
-  def verticalTextPosition_=(x: Alignment.Value) { peer.setVerticalTextPosition(x.id) }
-
-  def disabledIcon: Icon = peer.getDisabledIcon
-  def disabledIcon_=(icon: Icon) { peer.setDisabledIcon(icon) }
-
-  def iconTextGap: Int = peer.getIconTextGap
-  def iconTextGap_=(gap: Int) { peer.setIconTextGap(gap) }
-
-  def displayedMnemonicIndex: Int = peer.getDisplayedMnemonicIndex
-  def displayedMnemonicIndex_=(index: Int) { peer.setDisplayedMnemonicIndex(index) }
-}
diff --git a/src/swing/scala/swing/LayoutContainer.scala b/src/swing/scala/swing/LayoutContainer.scala
deleted file mode 100644
index 37d3514..0000000
--- a/src/swing/scala/swing/LayoutContainer.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import javax.swing.JComponent
-import scala.collection.mutable
-
-/** A container that associates layout constraints of member type
- *  `Constraints` with its children.
- *
- *  See `GridBagPanel` for an example container with custom constraints.
- *
- *  @note [Java Swing] In scala.swing, panels and layout managers are
- *  combined into subclasses of this base class. This approach allows for
- *  typed component constraints.
- */
-trait LayoutContainer extends Container.Wrapper {
-  /**
-   * The type of component constraints for this container.
-   */
-  type Constraints <: AnyRef
-
-  /**
-   * Obtains the constraints for the given component from the underlying
-   * Swing layout manager.
-   */
-  protected def constraintsFor(c: Component): Constraints
-  /**
-   * Checks whether the given constraints are valid. Additionally returns
-   * an error string that is only fetched if the constraints aren't valid.
-   */
-  protected def areValid(c: Constraints): (Boolean, String)
-  /**
-   * Adds a component with the given constraints to the underlying layout
-   * manager and the component peer. This method needs to interact properly
-   * with method `constraintsFor`, i.e., it might need to remove previously
-   * held components in order to maintain layout consistency. See `BorderPanel`
-   * for an example.
-   */
-  protected def add(comp: Component, c: Constraints)
-
-  /**
-   * A map of components to the associated layout constraints.
-   * Any element in this map is automatically added to the contents of this
-   * panel. Therefore, specifying the layout of a component via
-   *
-   * layout(myComponent) = myConstraints
-   *
-   * also ensures that myComponent is properly added to this container.
-   */
-  def layout: mutable.Map[Component, Constraints] = new mutable.Map[Component, Constraints] {
-    def -= (c: Component): this.type = { _contents -= c; this }
-    def += (cl: (Component, Constraints)): this.type = { update(cl._1, cl._2); this }
-    override def update (c: Component, l: Constraints) {
-      val (v, msg) = areValid(l)
-      if (!v) throw new IllegalArgumentException(msg)
-      add(c, l)
-    }
-    def get(c: Component) = Option(constraintsFor(c))
-    override def size = peer.getComponentCount
-    def iterator: Iterator[(Component, Constraints)] =
-      peer.getComponents.iterator.map { c =>
-        val comp = UIElement.cachedWrapper[Component](c.asInstanceOf[JComponent])
-        (comp, constraintsFor(comp))
-      }
-  }
-}
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
deleted file mode 100644
index 40639aa..0000000
--- a/src/swing/scala/swing/ListView.scala
+++ /dev/null
@@ -1,248 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import javax.swing.event._
-
-object ListView {
-  /**
-   * The supported modes of user selections.
-   */
-  object IntervalMode extends Enumeration {
-    val Single = Value(ListSelectionModel.SINGLE_SELECTION)
-    val SingleInterval = Value(ListSelectionModel.SINGLE_INTERVAL_SELECTION)
-    val MultiInterval = Value(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION)
-  }
-
-  def wrap[A](c: JList) = new ListView[A] {
-    override lazy val peer = c
-  }
-
-  object Renderer {
-    def wrap[A](r: ListCellRenderer): Renderer[A] = new Wrapped[A](r)
-
-    /**
-     * Wrapper for <code>javax.swing.ListCellRenderer<code>s
-     */
-  	class Wrapped[A](override val peer: ListCellRenderer) extends Renderer[A] {
-  	  def componentFor(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int) = {
-        Component.wrap(peer.getListCellRendererComponent(list.peer, a, index, isSelected, focused).asInstanceOf[JComponent])
-      }
-  	}
-
-    /**
-     * Returns a renderer for items of type <code>A</code>. The given function
-     * converts items of type <code>A</code> to items of type <code>B</code>
-     * for which a renderer is implicitly given. This allows chaining of
-     * renderers, e.g.:
-     *
-     * <code>
-     * case class Person(name: String, email: String)
-     * val persons = List(Person("John", "j.doe at a.com"), Person("Mary", "m.jane at b.com"))
-     * new ListView(persons) {
-     *   renderer = ListView.Renderer(_.name)
-     * }
-     * </code>
-     */
-    def apply[A,B](f: A => B)(implicit renderer: Renderer[B]): Renderer[A] = new Renderer[A] {
-      def componentFor(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int): Component =
-        renderer.componentFor(list, isSelected, focused, f(a), index)
-    }
-  }
-
-  /**
-   * Item renderer for a list view. This is contravariant on the type of the
-   * items, so a more general renderer can be used in place of a more specific
-   * one. For instance, an <code>Any</code> renderer can be used for a list view
-   * of strings.
-   *
-   * @see javax.swing.ListCellRenderer
-   */
-  abstract class Renderer[-A] {
-    def peer: ListCellRenderer = new ListCellRenderer {
-      def getListCellRendererComponent(list: JList, a: Any, index: Int, isSelected: Boolean, focused: Boolean) =
-        componentFor(ListView.wrap[A](list), isSelected, focused, a.asInstanceOf[A], index).peer
-    }
-    def componentFor(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int): Component
-  }
-
-  /**
-   * A default renderer that maintains a single component for item rendering
-   * and preconfigures it to sensible defaults. It is polymorphic on the
-   * component's type so clients can easily use component specific attributes
-   * during configuration.
-   */
-  abstract class AbstractRenderer[-A, C<:Component](protected val component: C) extends Renderer[A] {
-    // The renderer component is responsible for painting selection
-    // backgrounds. Hence, make sure it is opaque to let it draw
-    // the background.
-    component.opaque = true
-
-    /**
-     * Standard preconfiguration that is commonly done for any component.
-     * This includes foreground and background colors, as well as colors
-     * of item selections.
-     */
-    def preConfigure(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int) {
-      if (isSelected) {
-        component.background = list.selectionBackground
-        component.foreground = list.selectionForeground
-      } else {
-        component.background = list.background
-        component.foreground = list.foreground
-      }
-    }
-    /**
-     * Configuration that is specific to the component and this renderer.
-     */
-    def configure(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int)
-
-    /**
-     * Configures the component before returning it.
-     */
-    def componentFor(list: ListView[_], isSelected: Boolean, focused: Boolean, a: A, index: Int): Component = {
-      preConfigure(list, isSelected, focused, a, index)
-      configure(list, isSelected, focused, a, index)
-      component
-    }
-  }
-
-  /**
-   * A generic renderer that uses Swing's built-in renderers. If there is no
-   * specific renderer for a type, this renderer falls back to a renderer
-   * that renders the string returned from an item's <code>toString</code>.
-   */
-  implicit object GenericRenderer extends Renderer[Any] {
-    override lazy val peer: ListCellRenderer = new DefaultListCellRenderer
-    def componentFor(list: ListView[_], isSelected: Boolean, focused: Boolean, a: Any, index: Int): Component = {
-      val c = peer.getListCellRendererComponent(list.peer, a, index, isSelected, focused).asInstanceOf[JComponent]
-      Component.wrap(c)
-    }
-  }
-}
-
-/**
- * A component that displays a number of elements in a list. A list view does
- * not support inline editing of items. If you need it, use a table view instead.
- *
- * Named <code>ListView</code> to avoid a clash with the frequently used
- * <code>scala.List</code>
- *
- * @see javax.swing.JList
- */
-class ListView[A] extends Component {
-  import ListView._
-  override lazy val peer: JList = new JList with SuperMixin
-
-  def this(items: Seq[A]) = {
-    this()
-    listData = items
-  }
-
-  protected class ModelWrapper(val items: Seq[A]) extends AbstractListModel {
-    def getElementAt(n: Int) = items(n).asInstanceOf[AnyRef]
-    def getSize = items.size
-  }
-
-  def listData: Seq[A] = peer.getModel match {
-    case model: ModelWrapper => model.items
-    case model @ _ => new Seq[A] { selfSeq =>
-     def length = model.getSize
-     def iterator = new Iterator[A] {
-       var idx = 0
-       def next = { idx += 1; apply(idx-1) }
-       def hasNext = idx < selfSeq.length
-     }
-     def apply(n: Int) = model.getElementAt(n).asInstanceOf[A]
-    }
-  }
-
-  def listData_=(items: Seq[A]) {
-    peer.setModel(new AbstractListModel {
-      def getElementAt(n: Int) = items(n).asInstanceOf[AnyRef]
-      def getSize = items.size
-    })
-  }
-
-  /**
-   * The current item selection.
-   */
-  object selection extends Publisher {
-    protected abstract class Indices[A](a: =>Seq[A]) extends scala.collection.mutable.Set[A] {
-      def -=(n: A): this.type
-      def +=(n: A): this.type
-      def contains(n: A) = a.contains(n)
-      override def size = a.length
-      def iterator = a.iterator
-    }
-
-    def leadIndex: Int = peer.getSelectionModel.getLeadSelectionIndex
-    def anchorIndex: Int = peer.getSelectionModel.getAnchorSelectionIndex
-
-    /**
-     * The indices of the currently selected items.
-     */
-    object indices extends Indices(peer.getSelectedIndices) {
-      def -=(n: Int): this.type = { peer.removeSelectionInterval(n,n); this }
-      def +=(n: Int): this.type = { peer.addSelectionInterval(n,n); this }
-    }
-
-    /**
-     * The currently selected items.
-     */
-    object items extends scala.collection.SeqProxy[A] {
-      def self = peer.getSelectedValues.map(_.asInstanceOf[A])
-    }
-
-    def intervalMode: IntervalMode.Value = IntervalMode(peer.getSelectionModel.getSelectionMode)
-    def intervalMode_=(m: IntervalMode.Value) { peer.getSelectionModel.setSelectionMode(m.id) }
-
-    peer.getSelectionModel.addListSelectionListener(new ListSelectionListener {
-      def valueChanged(e: javax.swing.event.ListSelectionEvent) {
-        publish(new ListSelectionChanged(ListView.this, e.getFirstIndex to e.getLastIndex, e.getValueIsAdjusting))
-      }
-    })
-
-    def adjusting = peer.getSelectionModel.getValueIsAdjusting
-  }
-
-  def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getCellRenderer)
-  def renderer_=(r: ListView.Renderer[A]) { peer.setCellRenderer(r.peer) }
-
-  def fixedCellWidth = peer.getFixedCellWidth
-  def fixedCellWidth_=(x: Int) = peer.setFixedCellWidth(x)
-
-  def fixedCellHeight = peer.getFixedCellHeight
-  def fixedCellHeight_=(x: Int) = peer.setFixedCellHeight(x)
-
-  def prototypeCellValue: A = peer.getPrototypeCellValue.asInstanceOf[A]
-  def prototypeCellValue_=(a: A) { peer.setPrototypeCellValue(a) }
-
-  def visibleRowCount = peer.getVisibleRowCount
-  def visibleRowCount_=(n: Int) = peer.setVisibleRowCount(n)
-
-  def ensureIndexIsVisible(idx: Int) = peer.ensureIndexIsVisible(idx)
-
-  def selectionForeground: Color = peer.getSelectionForeground
-  def selectionForeground_=(c: Color) = peer.setSelectionForeground(c)
-  def selectionBackground: Color = peer.getSelectionBackground
-  def selectionBackground_=(c: Color) = peer.setSelectionBackground(c)
-
-  def selectIndices(ind: Int*) = peer.setSelectedIndices(ind.toArray)
-
-  peer.getModel.addListDataListener(new ListDataListener {
-    def contentsChanged(e: ListDataEvent) { publish(ListChanged(ListView.this)) }
-    def intervalRemoved(e: ListDataEvent) { publish(ListElementsRemoved(ListView.this, e.getIndex0 to e.getIndex1)) }
-    def intervalAdded(e: ListDataEvent) { publish(ListElementsAdded(ListView.this, e.getIndex0 to e.getIndex1)) }
-  })
-}
diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala
deleted file mode 100644
index 85ce075..0000000
--- a/src/swing/scala/swing/MainFrame.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-
-/**
- * A frame that can be used for main application windows. Shuts down the
- * framework and quits the application when closed.
- */
-class MainFrame(gc: java.awt.GraphicsConfiguration = null) extends Frame(gc) {
-  override def closeOperation() { sys.exit(0) }
-}
diff --git a/src/swing/scala/swing/Menu.scala b/src/swing/scala/swing/Menu.scala
deleted file mode 100644
index 38b1787..0000000
--- a/src/swing/scala/swing/Menu.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import scala.collection.mutable
-import javax.swing._
-
-object MenuBar {
-  case object NoMenuBar extends MenuBar
-}
-
-/**
- * A menu bar. Each window can contain at most one. Contains a number of menus.
- *
- * @see javax.swing.JMenuBar
- */
-class MenuBar extends Component with SequentialContainer.Wrapper {
-  override lazy val peer: JMenuBar = new JMenuBar with SuperMixin
-
-  def menus: mutable.Seq[Menu] = contents.filter(_.isInstanceOf[Menu]).map(_.asInstanceOf[Menu])
-
-  // Not implemented by Swing
-  //def helpMenu: Menu = UIElement.cachedWrapper(peer.getHelpMenu)
-  //def helpMenu_=(m: Menu) { peer.setHelpMenu(m.peer) }
-}
-
-/**
- * A menu item that can be used in a menu.
- *
- * @see javax.swing.JMenuItem
- */
-class MenuItem(title0: String) extends AbstractButton {
-  override lazy val peer: JMenuItem = new JMenuItem(title0)
-  def this(a: Action) = {
-    this("")
-    action = a
-  }
-}
-
-/**
- * A menu. Contains menu items. Being a menu item itself, menus can be nested.
- *
- * @see javax.swing.JMenu
- */
-class Menu(title0: String) extends MenuItem(title0) with SequentialContainer.Wrapper { self: Menu =>
-  override lazy val peer: JMenu = new JMenu(title0)
-}
-
-/**
- * A menu item with a radio button.
- *
- * @see javax.swing.JRadioButtonMenuItem
- */
-class RadioMenuItem(title0: String) extends MenuItem(title0) {
-  override lazy val peer: JRadioButtonMenuItem = new JRadioButtonMenuItem(title0)
-}
-/**
- * A menu item with a check box.
- *
- * @see javax.swing.JCheckBoxMenuItem
- */
-class CheckMenuItem(title0: String) extends MenuItem(title0) {
-  override lazy val peer: JCheckBoxMenuItem = new JCheckBoxMenuItem(title0)
-}
diff --git a/src/swing/scala/swing/Orientable.scala b/src/swing/scala/swing/Orientable.scala
deleted file mode 100644
index a73bafb..0000000
--- a/src/swing/scala/swing/Orientable.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-object Orientable {
-  trait Wrapper extends Oriented.Wrapper with Orientable {
-    def orientation_=(o: Orientation.Value) { peer.setOrientation(o.id) }
-  }
-}
-
-/**
- * An <code>Oriented</code> whose orientation can be changed.
- */
-trait Orientable extends Oriented {
-  def orientation_=(o: Orientation.Value)
-}
diff --git a/src/swing/scala/swing/Orientation.scala b/src/swing/scala/swing/Orientation.scala
deleted file mode 100644
index ad616ec..0000000
--- a/src/swing/scala/swing/Orientation.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import java.awt.Adjustable._
-
-object Orientation extends Enumeration {
-  val Horizontal = Value(HORIZONTAL)
-  val Vertical = Value(VERTICAL)
-  val NoOrientation = Value(NO_ORIENTATION)
-}
diff --git a/src/swing/scala/swing/Oriented.scala b/src/swing/scala/swing/Oriented.scala
deleted file mode 100644
index 7996d21..0000000
--- a/src/swing/scala/swing/Oriented.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-object Oriented {
-  trait Wrapper extends Oriented {
-    def peer: OrientedMixin
-
-    /*
-     * Need to revert to structural type, since scroll bars are oriented
-     * and these are created by scroll panes. Shouldn't be a bootleneck.
-     */
-    protected type OrientedMixin = {
-      def getOrientation(): Int
-      def setOrientation(n: Int)
-    }
-    def orientation: Orientation.Value = Orientation(peer.getOrientation)
-  }
-}
-
-/**
- * Something that can have an orientation.
- */
-trait Oriented {
-  def orientation: Orientation.Value
-}
diff --git a/src/swing/scala/swing/Panel.scala b/src/swing/scala/swing/Panel.scala
deleted file mode 100644
index 89ad4d3..0000000
--- a/src/swing/scala/swing/Panel.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-/**
- * A component that can contain other components.
- *
- * @see javax.swing.JPanel
- */
-abstract class Panel extends Component with Container.Wrapper {
-  override lazy val peer: javax.swing.JPanel = new javax.swing.JPanel with SuperMixin
-}
diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala
deleted file mode 100644
index d2fdd0d..0000000
--- a/src/swing/scala/swing/PasswordField.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import java.awt.event._
-
-/**
- * A password field, that displays a replacement character for each character in the password.
- *
- * @see javax.swing.JPasswordField
- */
-class PasswordField(text0: String, columns0: Int) extends TextField(text0, columns0) {
-  override lazy val peer: JPasswordField = new JPasswordField(text0, columns0) with SuperMixin
-  def this(text: String) = this(text, 0)
-  def this(columns: Int) = this("", columns)
-  def this() = this("")
-
-  def echoChar: Char = peer.getEchoChar
-  def echoChar_=(c: Char) = peer.setEchoChar(c)
-
-  /**
-   * The text property should not be used on a password field for
-   * security reasons.
-   */
-  override def text: String = ""
-  override def text_=(s: String) {}
-  def password: Array[Char] = peer.getPassword
-}
diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala
deleted file mode 100644
index 33dd716..0000000
--- a/src/swing/scala/swing/ProgressBar.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-
-/**
- * A bar indicating progress of some action. Can be in indeterminate mode,
- * in which it indicates that the action is in progress (usually by some
- * animation) but does not indicate the amount of work done or to be done.
- *
- * @see javax.swing.JProgressBar
- */
-class ProgressBar extends Component with Orientable.Wrapper {
-  override lazy val peer: javax.swing.JProgressBar =
-    new javax.swing.JProgressBar with SuperMixin
-
-  def min: Int = peer.getMinimum
-  def min_=(v: Int) { peer.setMinimum(v) }
-  def max: Int = peer.getMaximum
-  def max_=(v: Int) { peer.setMaximum(v) }
-  def value: Int = peer.getValue
-  def value_=(v: Int) { peer.setValue(v) }
-
-  def labelPainted: Boolean = peer.isStringPainted
-  def labelPainted_=(v: Boolean) { peer.setStringPainted(v) }
-
-  def label: String = peer.getString
-  def label_=(v: String) = peer.setString(v)
-
-  def indeterminate: Boolean = peer.isIndeterminate
-  def indeterminate_=(v: Boolean) { peer.setIndeterminate(v) }
-
-  def paintBorder: Boolean = peer.isBorderPainted
-  def paintBorder(v: Boolean) { peer.setBorderPainted(v) }
-}
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
deleted file mode 100644
index 578ef71..0000000
--- a/src/swing/scala/swing/Publisher.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import scala.collection.mutable
-import mutable.Buffer
-import event.Event
-
-/** <p>
- *    Notifies registered reactions when an event is published. Publishers are
- *    also reactors and listen to themselves per default as a convenience.
- *  </p>
- *  <p>
- *    In order to reduce memory leaks, reactions are weakly referenced by default,
- *    unless they implement <code>Reactions.StronglyReferenced</code>. That way,
- *    the lifetime of reactions are more easily bound to the registering object,
- *    which are reactors in common client code and hold strong references to their
- *    reactions. As a result, reactors can be garbage collected even though they
- *    still have reactions registered at some publisher, but not vice versa
- *    since reactors (strongly) reference publishers they are interested in.
- *  </p>
- */
-trait Publisher extends Reactor {
-  import Reactions._
-
-  protected val listeners = new RefSet[Reaction] {
-    import scala.ref._
-    val underlying = new mutable.HashSet[Reference[Reaction]]
-    protected def Ref(a: Reaction) = a match {
-      case a: StronglyReferenced => new StrongReference[Reaction](a) with super.Ref[Reaction]
-      case _ => new WeakReference[Reaction](a, referenceQueue) with super.Ref[Reaction]
-    }
-  }
-
-  private[swing] def subscribe(listener: Reaction) { listeners += listener }
-  private[swing] def unsubscribe(listener: Reaction) { listeners -= listener }
-
-  /**
-   * Notify all registered reactions.
-   */
-  def publish(e: Event) { for (l <- listeners) if (l.isDefinedAt(e)) l(e) }
-
-  listenTo(this)
-}
-
-/**
- * A publisher that subscribes itself to an underlying event source not before the first
- * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled.
- */
-private[swing] trait LazyPublisher extends Publisher {
-  import Reactions._
-
-  protected def onFirstSubscribe()
-  protected def onLastUnsubscribe()
-
-  override def subscribe(listener: Reaction) {
-    if(listeners.size == 1) onFirstSubscribe()
-    super.subscribe(listener)
-  }
-  override def unsubscribe(listener: Reaction) {
-    super.unsubscribe(listener)
-    if(listeners.size == 1) onLastUnsubscribe()
-  }
-}
-
-
-
-import scala.ref._
-
-private[swing] trait SingleRefCollection[+A <: AnyRef] extends Iterable[A] { self =>
-
-  trait Ref[+A <: AnyRef] extends Reference[A] {
-    override def hashCode() = get match {
-      case Some(x)  => x.##
-      case _        => 0
-    }
-    override def equals(that: Any) = that match {
-      case that: ReferenceWrapper[_] =>
-        val v1 = this.get
-        val v2 = that.get
-        v1 == v2
-      case _ => false
-    }
-  }
-
-  //type Ref <: Reference[A] // TODO: could use higher kinded types, but currently crashes
-  protected[this] def Ref(a: A): Ref[A]
-  protected[this] val referenceQueue = new ReferenceQueue[A]
-
-  protected val underlying: Iterable[Reference[A]]
-
-  def purgeReferences() {
-    var ref = referenceQueue.poll
-    while (ref != None) {
-      removeReference(ref.get.asInstanceOf[Reference[A]])
-      ref = referenceQueue.poll
-    }
-  }
-
-  protected[this] def removeReference(ref: Reference[A])
-
-  def iterator = new Iterator[A] {
-    private val elems = self.underlying.iterator
-    private var hd: A = _
-    private var ahead: Boolean = false
-    private def skip(): Unit =
-      while (!ahead && elems.hasNext) {
-        // make sure we have a reference to the next element,
-        // otherwise it might be garbage collected
-        val next = elems.next.get
-        ahead = next != None
-        if (ahead) hd = next.get
-      }
-    def hasNext: Boolean = { skip; ahead }
-    def next(): A =
-      if (hasNext) { ahead = false; hd }
-      else throw new NoSuchElementException("next on empty iterator")
-  }
-}
-
-private[swing] class StrongReference[+T <: AnyRef](value: T) extends Reference[T] {
-    private[this] var ref: Option[T] = Some(value)
-    def isValid: Boolean = ref != None
-    def apply(): T = ref.get
-    def get : Option[T] = ref
-    override def toString = get.map(_.toString).getOrElse("<deleted>")
-    def clear() { ref = None }
-    def enqueue(): Boolean = false
-    def isEnqueued(): Boolean = false
-  }
-
-abstract class RefBuffer[A <: AnyRef] extends Buffer[A] with SingleRefCollection[A] { self =>
-  protected val underlying: Buffer[Reference[A]]
-
-  def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
-  def +=:(el: A) = { purgeReferences(); Ref(el) +=: underlying; this }
-  def remove(el: A) { underlying -= Ref(el); purgeReferences(); }
-  def remove(n: Int) = { val el = apply(n); remove(el); el }
-  def insertAll(n: Int, iter: Iterable[A]) {
-    purgeReferences()
-    underlying.insertAll(n, iter.view.map(Ref(_)))
-  }
-  def update(n: Int, el: A) { purgeReferences(); underlying(n) = Ref(el) }
-  def apply(n: Int) = {
-    purgeReferences()
-    var el = underlying(n).get
-    while (el == None) {
-      purgeReferences(); el = underlying(n).get
-    }
-    el.get
-  }
-
-  def length = { purgeReferences(); underlying.length }
-  def clear() { underlying.clear(); purgeReferences() }
-
-  protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
-
-private[swing] abstract class RefSet[A <: AnyRef] extends mutable.Set[A] with SingleRefCollection[A] { self =>
-  protected val underlying: mutable.Set[Reference[A]]
-
-  def -=(el: A): this.type = { underlying -= Ref(el); purgeReferences(); this }
-  def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
-  def contains(el: A): Boolean = { purgeReferences(); underlying.contains(Ref(el)) }
-  override def size = { purgeReferences(); underlying.size }
-
-  protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
diff --git a/src/swing/scala/swing/RadioButton.scala b/src/swing/scala/swing/RadioButton.scala
deleted file mode 100644
index 64f8b23..0000000
--- a/src/swing/scala/swing/RadioButton.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing._
-
-/**
- * A two state button that is usually used in a <code>ButtonGroup</code>
- * together with other <code>RadioButton</code>s, in order to indicate
- * that at most one of them can be selected.
- *
- * @see javax.swing.JRadioButton
- */
-class RadioButton(text0: String) extends ToggleButton {
-  override lazy val peer: JRadioButton = new JRadioButton(text0) with SuperMixin
-  def this() = this("")
-}
diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala
deleted file mode 100644
index d8a62aa..0000000
--- a/src/swing/scala/swing/Reactions.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event.Event
-import scala.collection.mutable.{Buffer, ListBuffer}
-
-object Reactions {
-  import scala.ref._
-
-  class Impl extends Reactions {
-    private val parts: Buffer[Reaction] = new ListBuffer[Reaction]
-    def isDefinedAt(e: Event) = parts.exists(_ isDefinedAt e)
-    def += (r: Reaction): this.type = { parts += r; this }
-    def -= (r: Reaction): this.type = { parts -= r; this }
-    def apply(e: Event) {
-      for (p <- parts) if (p isDefinedAt e) p(e)
-    }
-  }
-
-  type Reaction = PartialFunction[Event, Unit]
-
-  /**
-   * A Reaction implementing this trait is strongly referenced in the reaction list
-   */
-  trait StronglyReferenced
-
-  class Wrapper(listener: Any)(r: Reaction) extends Reaction with StronglyReferenced with Proxy {
-    def self = listener
-    def isDefinedAt(e: Event) = r.isDefinedAt(e)
-    def apply(e: Event) { r(e) }
-  }
-}
-
-/**
- * Used by reactors to let clients register custom event reactions.
- */
-abstract class Reactions extends Reactions.Reaction {
-  /**
-   * Add a reaction.
-   */
-  def += (r: Reactions.Reaction): this.type
-
-  /**
-   * Remove the given reaction.
-   */
-  def -= (r: Reactions.Reaction): this.type
-}
diff --git a/src/swing/scala/swing/Reactor.scala b/src/swing/scala/swing/Reactor.scala
deleted file mode 100644
index 8f74831..0000000
--- a/src/swing/scala/swing/Reactor.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-/**
- * The counterpart to publishers. Listens to events from registered publishers.
- */
-trait Reactor {
-  /**
-   * All reactions of this reactor.
-   */
-  val reactions: Reactions = new Reactions.Impl
-  /**
-   * Listen to the given publisher as long as <code>deafTo</code> isn't called for
-   * them.
-   */
-  def listenTo(ps: Publisher*) = for (p <- ps) p.subscribe(reactions)
-  /**
-   * Installed reaction won't receive events from the given publisher anylonger.
-   */
-  def deafTo(ps: Publisher*) = for (p <- ps) p.unsubscribe(reactions)
-}
diff --git a/src/swing/scala/swing/RichWindow.scala b/src/swing/scala/swing/RichWindow.scala
deleted file mode 100644
index a60cdd3..0000000
--- a/src/swing/scala/swing/RichWindow.scala
+++ /dev/null
@@ -1,195 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import java.awt.{ Window => AWTWindow, Frame => AWTFrame }
-import javax.swing._
-import Swing._
-
-object RichWindow {
-  /**
-   * Mixin this trait if you want an undecorated window.
-   */
-  trait Undecorated extends RichWindow {
-    // we do a mixin here, since setUndecorated is only allowed to be called
-    // when the component is not displayable.
-    peer.setUndecorated(true)
-  }
-}
-
-/**
- * A window that adds some functionality to the plain Window class and serves as
- * the common base class for frames and dialogs.
- *
- * Implementation note: this class is sealed since we need to know that a rich
- * window is either a dialog or a frame at some point.
- */
-sealed trait RichWindow extends Window {
-  def peer: AWTWindow with InterfaceMixin
-
-  trait InterfaceMixin extends super.InterfaceMixin {
-    def getJMenuBar(): JMenuBar
-    def setJMenuBar(b: JMenuBar)
-    def setUndecorated(b: Boolean)
-    def setTitle(s: String)
-    def getTitle(): String
-    def setResizable(b: Boolean)
-    def isResizable(): Boolean
-  }
-
-  def title: String = peer.getTitle
-  def title_=(s: String) = peer.setTitle(s)
-
-  /**
-   * The menu bar of this frame or `NoMenuBar` if no menu bar is set.
-   */
-  def menuBar: MenuBar = {
-    val m = UIElement.cachedWrapper[MenuBar](peer.getJMenuBar)
-    if (m != null) m else MenuBar.NoMenuBar
-  }
-  /**
-   * Set the current menu bar of this frame. Pass `NoMenuBar` if this frame
-   * should not show a menu bar.
-   */
-  def menuBar_=(m: MenuBar) =
-    peer.setJMenuBar(if (m == MenuBar.NoMenuBar) null else m.peer)
-
-  def resizable_=(b: Boolean) { peer.setResizable(b) }
-  def resizable = peer.isResizable
-}
-
-/**
- * A window with decoration such as a title, border, and action buttons.
- *
- * An AWT window cannot be wrapped dynamically with this class, i.e., you cannot
- * write something like new Window { def peer = myAWTWindow }
- *
- * @see javax.swing.JFrame
- */
-class Frame(gc: java.awt.GraphicsConfiguration = null) extends RichWindow {
-  override lazy val peer: JFrame with InterfaceMixin = new JFrame(gc) with InterfaceMixin with SuperMixin
-
-  def iconify() { peer.setExtendedState(peer.getExtendedState | AWTFrame.ICONIFIED) }
-  def uniconify() { peer.setExtendedState(peer.getExtendedState & ~AWTFrame.ICONIFIED) }
-  def iconified: Boolean = (peer.getExtendedState & AWTFrame.ICONIFIED) != 0
-  def maximize() { peer.setExtendedState(peer.getExtendedState | AWTFrame.MAXIMIZED_BOTH) }
-  def unmaximize() { peer.setExtendedState(peer.getExtendedState & ~AWTFrame.MAXIMIZED_BOTH) }
-  def maximized: Boolean = (peer.getExtendedState & AWTFrame.MAXIMIZED_BOTH) != 0
-
-  def iconImage: Image = peer.getIconImage
-  def iconImage_=(i: Image) { peer.setIconImage(i) }
-}
-
-/**
- * Simple predefined dialogs.
- *
- * @see javax.swing.JOptionPane
- */
-object Dialog {
-  /**
-   * The message type of a dialog.
-   */
-  object Message extends Enumeration {
-    val Error = Value(JOptionPane.ERROR_MESSAGE)
-    val Info = Value(JOptionPane.INFORMATION_MESSAGE)
-    val Warning = Value(JOptionPane.WARNING_MESSAGE)
-    val Question = Value(JOptionPane.QUESTION_MESSAGE)
-    val Plain = Value(JOptionPane.PLAIN_MESSAGE)
-  }
-
-  /**
-   * The possible answers a user can select.
-   */
-  object Options extends Enumeration {
-    val Default = Value(JOptionPane.DEFAULT_OPTION)
-    val YesNo = Value(JOptionPane.YES_NO_OPTION)
-    val YesNoCancel = Value(JOptionPane.YES_NO_CANCEL_OPTION)
-    val OkCancel = Value(JOptionPane.OK_CANCEL_OPTION)
-  }
-
-  /**
-   * The selected result of dialog.
-   */
-  object Result extends Enumeration {
-    val Yes = Value(JOptionPane.YES_OPTION)
-    val Ok = Yes
-    val No = Value(JOptionPane.NO_OPTION)
-    val Cancel = Value(JOptionPane.CANCEL_OPTION)
-    val Closed = Value(JOptionPane.CLOSED_OPTION)
-  }
-
-  private def uiString(txt: String) = UIManager.getString(txt)
-
-  def showConfirmation(parent: Component = null,
-                       message: Any,
-                       title: String = uiString("OptionPane.titleText"),
-                       optionType: Options.Value = Options.YesNo,
-                       messageType: Message.Value = Message.Question,
-                       icon: Icon = EmptyIcon): Result.Value =
-    Result(JOptionPane.showConfirmDialog(nullPeer(parent), message, title,
-      optionType.id, messageType.id, Swing.wrapIcon(icon)))
-
-  def showOptions(parent: Component = null,
-                  message: Any,
-                  title: String = uiString("OptionPane.titleText"),
-                  optionType: Options.Value = Options.YesNo,
-                  messageType: Message.Value = Message.Question,
-                  icon: Icon = EmptyIcon,
-                  entries: Seq[Any],
-                  initial: Int): Result.Value = {
-    val r = JOptionPane.showOptionDialog(nullPeer(parent), message, title,
-      optionType.id, messageType.id, Swing.wrapIcon(icon),
-      (entries map toAnyRef).toArray, entries(initial))
-    Result(r)
-  }
-
-  def showInput[A](parent: Component = null,
-                   message: Any,
-                   title: String = uiString("OptionPane.inputDialogTitle"),
-                   messageType: Message.Value = Message.Question,
-                   icon: Icon = EmptyIcon,
-                   entries: Seq[A] = Nil,
-                   initial: A): Option[A] = {
-    val e = if (entries.isEmpty) null
-    else (entries map toAnyRef).toArray
-    val r = JOptionPane.showInputDialog(nullPeer(parent), message, title,
-      messageType.id, Swing.wrapIcon(icon),
-      e, initial)
-
-    toOption[A](r)
-  }
-  def showMessage(parent: Component = null,
-                  message: Any,
-                  title: String = uiString("OptionPane.messageDialogTitle"),
-                  messageType: Message.Value = Message.Info,
-                  icon: Icon = EmptyIcon) {
-    JOptionPane.showMessageDialog(nullPeer(parent), message, title,
-      messageType.id, Swing.wrapIcon(icon))
-  }
-}
-
-/**
- * A dialog window.
- *
- * @see javax.swing.JDialog
- */
-class Dialog(owner: Window, gc: java.awt.GraphicsConfiguration = null) extends RichWindow {
-  override lazy val peer: JDialog with InterfaceMixin =
-    if (owner == null) new JDialog with InterfaceMixin with SuperMixin
-    else owner match {
-      case f: Frame => new JDialog(f.peer, "", false, gc) with InterfaceMixin with SuperMixin
-      case d: Dialog => new JDialog(d.peer, "", false, gc) with InterfaceMixin with SuperMixin
-    }
-
-  def this() = this(null)
-
-  def modal_=(b: Boolean) { peer.setModal(b) }
-  def modal = peer.isModal
-}
-
diff --git a/src/swing/scala/swing/RootPanel.scala b/src/swing/scala/swing/RootPanel.scala
deleted file mode 100644
index 7e4882d..0000000
--- a/src/swing/scala/swing/RootPanel.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-/**
- * The root of a component hierarchy. Contains at most one component.
- *
- * @see javax.swing.RootPaneContainer
- */
-trait RootPanel extends Container {
-  def peer: java.awt.Component with javax.swing.RootPaneContainer
-
-  /**
-   * At most one component.
-   */
-  def contents: Seq[Component] =
-    if (peer.getContentPane.getComponentCount == 0) Nil
-    else {
-      val c = peer.getContentPane.getComponent(0).asInstanceOf[javax.swing.JComponent]
-      List(UIElement.cachedWrapper[Component](c))
-    }
-
-  def contents_=(c: Component) {
-    if (peer.getContentPane.getComponentCount > 0) {
-      val old = peer.getContentPane.getComponent(0)
-      peer.getContentPane.remove(old)
-    }
-    peer.getContentPane.add(c.peer)
-  }
-}
diff --git a/src/swing/scala/swing/ScrollBar.scala b/src/swing/scala/swing/ScrollBar.scala
deleted file mode 100644
index 6a1acdc..0000000
--- a/src/swing/scala/swing/ScrollBar.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.{JScrollBar, BoundedRangeModel}
-import java.awt.event.{AdjustmentListener}
-
-object ScrollBar {
-  def wrap(c: JScrollBar): ScrollBar = {
-    val w = UIElement.cachedWrapper[ScrollBar](c)
-    if (w != null) w
-    else new ScrollBar { override lazy val peer = c }
-  }
-}
-
-class ScrollBar extends Component with Orientable.Wrapper with Adjustable.Wrapper {
-	override lazy val peer: JScrollBar = new JScrollBar with SuperMixin
-
-	def valueIsAjusting = peer.getValueIsAdjusting
-	def valueIsAjusting_=(b : Boolean) = peer.setValueIsAdjusting(b)
-
-	// TODO: can we find a better interface?
-	//def setValues(value: Int = this.value, visible: Int = visibleAmount,
-	//             min: Int = minimum, max: Int = maximum) =
-	//  peer.setValues(value, visible, min, max)
-
-// Not currently needed, requires wrapper for BoundedRangeModel
-//
-//    	    def model = peer.getModel
-//    	    def model_=(m : BoundedRangeModel) = peer.setModel(m)
-}
diff --git a/src/swing/scala/swing/ScrollPane.scala b/src/swing/scala/swing/ScrollPane.scala
deleted file mode 100644
index afd6cf2..0000000
--- a/src/swing/scala/swing/ScrollPane.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.{JScrollPane, ScrollPaneConstants}
-
-object ScrollPane {
-  object BarPolicy extends Enumeration {
-    import ScrollPaneConstants._
-    val AsNeeded = new Value(HORIZONTAL_SCROLLBAR_AS_NEEDED,
-                             VERTICAL_SCROLLBAR_AS_NEEDED)
-    val Never = new Value(HORIZONTAL_SCROLLBAR_NEVER,
-                          VERTICAL_SCROLLBAR_NEVER)
-    val Always = new Value(HORIZONTAL_SCROLLBAR_ALWAYS,
-                           VERTICAL_SCROLLBAR_ALWAYS)
-
-    def wrap(id: Int) = id match {
-      case HORIZONTAL_SCROLLBAR_AS_NEEDED | VERTICAL_SCROLLBAR_AS_NEEDED => AsNeeded
-      case HORIZONTAL_SCROLLBAR_NEVER | VERTICAL_SCROLLBAR_NEVER => Never
-      case HORIZONTAL_SCROLLBAR_ALWAYS | VERTICAL_SCROLLBAR_ALWAYS => Always
-    }
-    class Value(val horizontalPeer: Int, val verticalPeer: Int) extends super.Val {
-      override def id = horizontalPeer
-    }
-  }
-}
-
-/**
- * Can have at most a single child component, which will be put inside a canvas (the viewport)
- * that can be scrolled.
- *
- * @see javax.swing.JScrollPane
- */
-class ScrollPane extends Component with Container {
-  import ScrollPane._
-
-  override lazy val peer: JScrollPane = new JScrollPane with SuperMixin
-  def this(c: Component) = {
-    this()
-    contents = c
-  }
-  def contents: Seq[Component] =
-    List(UIElement.cachedWrapper[Component](peer.getViewport.getView.asInstanceOf[javax.swing.JComponent]))
-
-  /**
-   * Sets the single child.
-   */
-  def contents_=(c: Component) { peer.setViewportView(c.peer) }
-
-  /**
-   * The component being displayed in this pane's row header.
-   *
-   * If you want to create a row header for lists or tables, you probably
-   * want to let the row header be a list view with the same row height as
-   * the viewport component.
-   */
-  def rowHeaderView: Option[Component] =
-    Option(peer.getRowHeader.getView) map UIElement.cachedWrapper[Component]
-  def rowHeaderView_=(c: Component) = peer.setRowHeaderView(c.peer)
-  def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(c.map(_.peer).orNull)
-
-  def columnHeaderView: Option[Component] =
-    Option(peer.getColumnHeader.getView) map UIElement.cachedWrapper[Component]
-  def columnHeaderView_=(c: Component) = peer.setColumnHeaderView(c.peer)
-  def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(c.map(_.peer).orNull)
-
-  def viewportView: Option[Component] =
-    Option(peer.getViewport.getView) map UIElement.cachedWrapper[Component]
-  def viewportView_=(c: Component) = peer.setViewportView(c.peer)
-  def viewportView_=(c: Option[Component]) = peer.setViewportView(c.map(_.peer).orNull)
-
-  def verticalScrollBarPolicy = BarPolicy.wrap(peer.getVerticalScrollBarPolicy)
-  def verticalScrollBarPolicy_=(p: BarPolicy.Value) = peer.setVerticalScrollBarPolicy(p.verticalPeer)
-
-  def horizontalScrollBarPolicy = BarPolicy.wrap(peer.getHorizontalScrollBarPolicy)
-  def horizontalScrollBarPolicy_=(p: BarPolicy.Value) = peer.setHorizontalScrollBarPolicy(p.horizontalPeer)
-
-  def horizontalScrollBar = ScrollBar.wrap(peer.getHorizontalScrollBar)
-  def verticalScrollBar = ScrollBar.wrap(peer.getVerticalScrollBar)
-}
diff --git a/src/swing/scala/swing/Scrollable.scala b/src/swing/scala/swing/Scrollable.scala
deleted file mode 100644
index 1253ac8..0000000
--- a/src/swing/scala/swing/Scrollable.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-object Scrollable {
-  trait Wrapper extends Scrollable {
-    protected def scrollablePeer: javax.swing.Scrollable
-    def preferredViewportSize = scrollablePeer.getPreferredScrollableViewportSize
-
-    def tracksViewportHeight: Boolean = scrollablePeer.getScrollableTracksViewportHeight
-    def tracksViewportWidth: Boolean = scrollablePeer.getScrollableTracksViewportWidth
-
-    def blockIncrement(visibleRect: Rectangle, orientation: Orientation.Value, direction: Int): Int =
-      scrollablePeer.getScrollableBlockIncrement(visibleRect, orientation.id, direction)
-
-    def unitIncrement(visibleRect: Rectangle, orientation: Orientation.Value, direction: Int): Int =
-      scrollablePeer.getScrollableUnitIncrement(visibleRect, orientation.id, direction)
-  }
-}
-
-/**
- * A component that is specially suitable for being placed inside a
- * <code>ScrollPane</code>.
- *
- * @see javax.swing.Scrollable
- */
-trait Scrollable extends Component {
-  def preferredViewportSize: Dimension
-
-  def tracksViewportHeight: Boolean
-  def tracksViewportWidth: Boolean
-
-  def blockIncrement(visibleRect: Rectangle, orientation: Orientation.Value, direction: Int): Int
-  def unitIncrement(visibleRect: Rectangle, orientation: Orientation.Value, direction: Int): Int
-}
diff --git a/src/swing/scala/swing/Separator.scala b/src/swing/scala/swing/Separator.scala
deleted file mode 100644
index 32d209d..0000000
--- a/src/swing/scala/swing/Separator.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing._
-
-/**
- * A bar that can be used a separator, most commonly in menus.
- *
- * @see javax.swing.JSeparator
- */
-class Separator(o: Orientation.Value) extends Component with Oriented.Wrapper {
-  override lazy val peer: JSeparator = new JSeparator(o.id) with SuperMixin
-  def this() = this(Orientation.Horizontal)
-}
diff --git a/src/swing/scala/swing/SequentialContainer.scala b/src/swing/scala/swing/SequentialContainer.scala
deleted file mode 100644
index 5f32b08..0000000
--- a/src/swing/scala/swing/SequentialContainer.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import scala.collection.mutable.Buffer
-
-object SequentialContainer {
-  /**
-   * Utility trait for wrapping sequential containers.
-   */
-  trait Wrapper extends SequentialContainer with Container.Wrapper {
-    override val contents: Buffer[Component] = new Content
-    //def contents_=(c: Component*)  { contents.clear(); contents ++= c }
-  }
-}
-
-/**
- * A container for which a sequential order of children makes sense, such as
- * flow panels, or menus. Its contents are mutable.
- */
-trait SequentialContainer extends Container {
-  /**
-   * The mutable child components of this container. The order matters and
-   * usually indicates the layout of the children.
-   */
-  override def contents: Buffer[Component]
-  //def contents_=(c: Component*)
-}
diff --git a/src/swing/scala/swing/SimpleSwingApplication.scala b/src/swing/scala/swing/SimpleSwingApplication.scala
deleted file mode 100644
index cd0f2be..0000000
--- a/src/swing/scala/swing/SimpleSwingApplication.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.swing
-
-/**
- * Extend this class for most simple UI applications. Clients need to
- * implement the `top` method. Framework initialization is done by this class.
- *
- * In order to conform to Swing's threading policy, never implement top or any
- * additional member that created Swing components as a value unless component
- * creation happens on the EDT (see `Swing.onEDT` and `Swing.onEDTWait`).
- * Lazy values are okay for the same reason if they are initialized on the EDT
- * always.
- */
-abstract class SimpleSwingApplication extends SwingApplication {
-
-  /**
-   * A GUI application's version of the main method. Called by the default
-   * main method implementation provided by this class.
-   * Implement to return the top-level frame of this application.
-   */
-  def top: Frame
-
-  /**
-   * Calls `top`, packs the frame, and displays it.
-   */
-  override def startup(args: Array[String]) {
-    val t = top
-    if (t.size == new Dimension(0,0)) t.pack()
-    t.visible = true
-  }
-
-  def resourceFromClassloader(path: String): java.net.URL =
-    this.getClass.getResource(path)
-
-  def resourceFromUserDirectory(path: String): java.io.File =
-    new java.io.File(util.Properties.userDir, path)
-}
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
deleted file mode 100644
index e329c31..0000000
--- a/src/swing/scala/swing/Slider.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import javax.swing.{JSlider, JLabel}
-import event._
-
-/**
- * Lets users select a value from a given range. Visually, this is represented
- * as a draggable knob on a horizontal or vertical bar.
- *
- * Fires a ValueChanged event whenever the slider's value changes and
- * when the knob is released.
- *
- * @see javax.swing.JSlider
- */
-class Slider extends Component with Orientable.Wrapper with Publisher {
-  override lazy val peer: JSlider = new JSlider with SuperMixin
-
-  def min: Int = peer.getMinimum
-  def min_=(v: Int) { peer.setMinimum(v) }
-  def max: Int = peer.getMaximum
-  def max_=(v: Int) { peer.setMaximum(v) }
-  def value: Int = peer.getValue
-  def value_=(v: Int) { peer.setValue(v) }
-  def extent: Int = peer.getExtent
-  def extent_=(v: Int) { peer.setExtent(v) }
-
-  def paintLabels: Boolean = peer.getPaintLabels
-  def paintLabels_=(v: Boolean) { peer.setPaintLabels(v) }
-  def paintTicks: Boolean = peer.getPaintTicks
-  def paintTicks_=(v: Boolean) { peer.setPaintTicks(v) }
-  def paintTrack: Boolean = peer.getPaintTrack
-  def paintTrack_=(v: Boolean) { peer.setPaintTrack(v) }
-
-  def snapToTicks: Boolean = peer.getSnapToTicks
-  def snapToTicks_=(v: Boolean) { peer.setSnapToTicks(v) }
-
-  def minorTickSpacing: Int = peer.getMinorTickSpacing
-  def minorTickSpacing_=(v: Int) { peer.setMinorTickSpacing(v) }
-  def majorTickSpacing: Int = peer.getMajorTickSpacing
-  def majorTickSpacing_=(v: Int) { peer.setMajorTickSpacing(v) }
-
-  def adjusting = peer.getValueIsAdjusting
-
-  def labels: scala.collection.Map[Int, Label] = {
-    val labelTable = peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]]
-    new scala.collection.JavaConversions.JMapWrapper(labelTable)
-      .mapValues(v => UIElement.cachedWrapper[Label](v))
-  }
-  def labels_=(l: scala.collection.Map[Int, Label]) {
-    // TODO: do some lazy wrapping
-    val table = new java.util.Hashtable[Any, Any]
-    for ((k,v) <- l) table.put(k, v.peer)
-    peer.setLabelTable(table)
-  }
-
-  peer.addChangeListener(new javax.swing.event.ChangeListener {
-    def stateChanged(e: javax.swing.event.ChangeEvent) {
-      publish(new ValueChanged(Slider.this))
-    }
-  })
-}
diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala
deleted file mode 100644
index dd4f290..0000000
--- a/src/swing/scala/swing/SplitPane.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import Swing._
-
-/**
- * A container with exactly two children. Arranges them side by side, either
- * horizontally or vertically. Displays a draggable divider component between
- * them that lets the user adjust the size ratio of the children.
- *
- * @see javax.swing.JSplitPane
- */
-class SplitPane(o: Orientation.Value, left: Component, right: Component) extends Component with Container with Orientable.Wrapper {
-  override lazy val peer: javax.swing.JSplitPane =
-    new javax.swing.JSplitPane(o.id, left.peer, right.peer) with SuperMixin
-  def this(o: Orientation.Value) = this(o, new Component {}, new Component {})
-  def this() = this(Orientation.Horizontal)
-
-  def contents: Seq[Component] = List(leftComponent, rightComponent)
-  def contents_=(left: Component, right: Component) {
-    peer.setLeftComponent(nullPeer(left))
-    peer.setRightComponent(nullPeer(right))
-  }
-
-  def topComponent: Component =
-    UIElement.cachedWrapper[Component](peer.getTopComponent.asInstanceOf[javax.swing.JComponent])
-  def topComponent_=(c: Component) { peer.setTopComponent(nullPeer(c)) }
-  def bottomComponent: Component =
-    UIElement.cachedWrapper[Component](peer.getBottomComponent.asInstanceOf[javax.swing.JComponent])
-  def bottomComponent_=(c: Component) { peer.setBottomComponent(nullPeer(c)) }
-
-  def leftComponent: Component = topComponent
-  def leftComponent_=(c: Component) { topComponent = c }
-  def rightComponent: Component = bottomComponent
-  def rightComponent_=(c: Component) { bottomComponent = c }
-
-  def dividerLocation: Int = peer.getDividerLocation
-  def dividerLocation_=(n: Int) { peer.setDividerLocation(n) }
-
-  /*def proportionalDividerLocation: Double =
-    if (orientation == Orientation.Vertical) dividerLocation / (size.height - dividerSize)
-    else dividerLocation / (size.width - dividerSize)*/
-  def dividerLocation_=(f: Double) { peer.setDividerLocation(f) }
-
-  def dividerSize: Int = peer.getDividerSize
-  def dividerSize_=(n: Int) { peer.setDividerSize(n) }
-  def resizeWeight: Double = peer.getResizeWeight
-  def resizeWeight_=(n: Double) { peer.setResizeWeight(n) }
-
-  def resetToPreferredSizes() { peer.resetToPreferredSizes() }
-
-  def oneTouchExpandable: Boolean = peer.isOneTouchExpandable
-  def oneTouchExpandable_=(b: Boolean) { peer.setOneTouchExpandable(b) }
-  def continuousLayout: Boolean = peer.isContinuousLayout
-  def continuousLayout_=(b: Boolean) { peer.setContinuousLayout(b) }
-}
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
deleted file mode 100644
index cd5bbf2..0000000
--- a/src/swing/scala/swing/Swing.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import java.awt.event._
-import javax.swing.event._
-import javax.swing.border._
-import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
-
-
-/**
- * Helpers for this package.
- */
-object Swing {
-  protected[swing] def toNoIcon(i: Icon): Icon = if(i == null) EmptyIcon else i
-  protected[swing] def toNullIcon(i: Icon): Icon = if(i == EmptyIcon) null else i
-  protected[swing] def nullPeer(c: Component) = if (c != null) c.peer else null
-
-  implicit def pair2Dimension(p: (Int, Int)): Dimension = new Dimension(p._1, p._2)
-  implicit def pair2Point(p: (Int, Int)): Point = new Point(p._1, p._2)
-  implicit def pair2Point(p: (Int, Int, Int, Int)): Rectangle = new Rectangle(p._1, p._2, p._3, p._4)
-
-  @inline final def Runnable(@inline block: =>Unit) = new Runnable {
-    def run = block
-  }
-  final def ChangeListener(f: ChangeEvent => Unit) = new ChangeListener {
-    def stateChanged(e: ChangeEvent) { f(e) }
-  }
-  final def ActionListener(f: ActionEvent => Unit) = new ActionListener {
-    def actionPerformed(e: ActionEvent) { f(e) }
-  }
-
-  def Box(min: Dimension, pref: Dimension, max: Dimension) = new Component {
-    override lazy val peer = new javax.swing.Box.Filler(min, pref, max)
-  }
-  def HGlue = new Component {
-    override lazy val peer = javax.swing.Box.createHorizontalGlue.asInstanceOf[JComponent]
-  }
-  def VGlue = new Component {
-    override lazy val peer = javax.swing.Box.createVerticalGlue.asInstanceOf[JComponent]
-  }
-  def Glue = new Component {
-    override lazy val peer = javax.swing.Box.createGlue.asInstanceOf[JComponent]
-  }
-  def RigidBox(dim: Dimension) = new Component {
-    override lazy val peer = javax.swing.Box.createRigidArea(dim).asInstanceOf[JComponent]
-  }
-  def HStrut(width: Int) = new Component {
-    override lazy val peer = javax.swing.Box.createHorizontalStrut(width).asInstanceOf[JComponent]
-  }
-  def VStrut(height: Int) = new Component {
-    override lazy val peer = javax.swing.Box.createVerticalStrut(height).asInstanceOf[JComponent]
-  }
-
-  def Icon(image: java.awt.Image) = new javax.swing.ImageIcon(image)
-  def Icon(filename: String) = new javax.swing.ImageIcon(filename)
-  def Icon(url: java.net.URL) = new javax.swing.ImageIcon(url)
-
-  /**
-   * The empty icon. Use this icon instead of <code>null</code> to indicate
-   * that you don't want an icon.
-   */
-  case object EmptyIcon extends Icon {
-    def getIconHeight: Int = 0
-    def getIconWidth: Int = 0
-    def paintIcon(c: java.awt.Component, g: java.awt.Graphics, x: Int, y: Int) {}
-  }
-
-  def unwrapIcon(icon: Icon): Icon = if (icon == null) EmptyIcon else icon
-  def wrapIcon(icon: Icon): Icon = if (icon == EmptyIcon) null else icon
-
-  def EmptyBorder = BorderFactory.createEmptyBorder()
-  def EmptyBorder(weight: Int) =
-    BorderFactory.createEmptyBorder(weight, weight, weight, weight)
-  def EmptyBorder(top: Int, left: Int, bottom: Int, right: Int) =
-    BorderFactory.createEmptyBorder(top, left, bottom, right)
-
-  def LineBorder(c: Color) = BorderFactory.createLineBorder(c)
-  def LineBorder(c: Color, weight: Int) = BorderFactory.createLineBorder(c, weight)
-
-  def BeveledBorder(kind: Embossing) = BorderFactory.createBevelBorder(kind.bevelPeer)
-  def BeveledBorder(kind: Embossing, highlight: Color, shadow: Color) =
-    BorderFactory.createBevelBorder(kind.bevelPeer, highlight, shadow)
-  def BeveledBorder(kind: Embossing,
-              highlightOuter: Color, highlightInner: Color,
-              shadowOuter: Color, shadowInner: Color) =
-    BorderFactory.createBevelBorder(kind.bevelPeer,
-          highlightOuter, highlightInner,
-          shadowOuter, shadowInner)
-
-  sealed abstract class Embossing {
-    def bevelPeer: Int
-    def etchPeer: Int
-  }
-  case object Lowered extends Embossing {
-    def bevelPeer = BevelBorder.LOWERED
-    def etchPeer = javax.swing.border.EtchedBorder.LOWERED
-  }
-  case object Raised extends Embossing {
-    def bevelPeer = BevelBorder.RAISED
-    def etchPeer = javax.swing.border.EtchedBorder.RAISED
-  }
-
-  def EtchedBorder = BorderFactory.createEtchedBorder()
-  def EtchedBorder(kind: Embossing) =
-    BorderFactory.createEtchedBorder(kind.etchPeer)
-  def EtchedBorder(kind: Embossing, highlight: Color, shadow: Color) =
-    BorderFactory.createEtchedBorder(kind.etchPeer, highlight, shadow)
-
-  def MatteBorder(top: Int, left: Int, bottom: Int, right: Int, color: Color) =
-    BorderFactory.createMatteBorder(top, left, bottom, right, color)
-  def MatteBorder(top: Int, left: Int, bottom: Int, right: Int, icon: Icon) =
-    BorderFactory.createMatteBorder(top, left, bottom, right, icon)
-
-  def CompoundBorder(outside: Border, inside: Border) =
-    BorderFactory.createCompoundBorder(outside, inside)
-
-  def TitledBorder(border: Border, title: String) =
-    BorderFactory.createTitledBorder(border, title)
-
-  /**
-   * Schedule the given code to be executed on the Swing event dispatching
-   * thread (EDT). Returns immediately.
-   */
-  @inline final def onEDT(op: =>Unit) = SwingUtilities invokeLater Runnable(op)
-
-  /**
-   * Schedule the given code to be executed on the Swing event dispatching
-   * thread (EDT). Blocks until after the code has been run.
-   */
-  @inline final def onEDTWait(op: =>Unit) = SwingUtilities invokeAndWait Runnable(op)
-}
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/SwingActor.scala
deleted file mode 100644
index 035e979..0000000
--- a/src/swing/scala/swing/SwingActor.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-// Dummy to keep ant from recompiling on every run.
- at deprecated("Will be removed in 2.11.0", "2.10.1")
-trait SwingActor { }
diff --git a/src/swing/scala/swing/SwingApplication.scala b/src/swing/scala/swing/SwingApplication.scala
deleted file mode 100644
index 214001f..0000000
--- a/src/swing/scala/swing/SwingApplication.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.swing
-
-/** Convenience class with utility methods for GUI applications. */
-abstract class SwingApplication extends Reactor {
-
-  /** Initializes the application and runs the given program. */
-  def main(args: Array[String]) = Swing.onEDT { startup(args) }
-
-  /** Called before the GUI is created. Override to customize. */
-  def startup(args: Array[String])
-
-  /** Finalizes the application by calling `shutdown` and exits.*/
-  def quit() { shutdown(); sys.exit(0) }
-
-  /** Called before the application is exited. Override to customize. */
-  def shutdown() {}
-}
diff --git a/src/swing/scala/swing/SwingWorker.scala b/src/swing/scala/swing/SwingWorker.scala
deleted file mode 100644
index f4eeb58..0000000
--- a/src/swing/scala/swing/SwingWorker.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.swing
-
-import scala.actors._
-
- at deprecated("Will be removed in 2.11.0", "2.10.1")
-object SwingWorker {
-
-}
-
- at deprecated("Depends on the deprecated package scala.actors. Will be removed in 2.11.0", "2.10.1")
-abstract class SwingWorker extends Actor {
-  def queue() {
-
-  }
-
-  def done() {
-
-  }
-
-  private var _cancelled = false
-  def cancelled: Boolean = _cancelled
-  def cancelled_=(b: Boolean) { _cancelled = b }
-}
diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala
deleted file mode 100644
index 3380505..0000000
--- a/src/swing/scala/swing/TabbedPane.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import scala.collection.mutable.Buffer
-import javax.swing.{JTabbedPane, JComponent}
-
-
-object TabbedPane {
-  object Layout extends Enumeration {
-    val Wrap = Value(JTabbedPane.WRAP_TAB_LAYOUT)
-    val Scroll = Value(JTabbedPane.SCROLL_TAB_LAYOUT)
-  }
-
-  class Page protected[TabbedPane](parent0: TabbedPane, title0: String, content0: Component, tip0: String) extends Proxy {
-    def self = content0
-
-    def this(title0: String, content0: Component, tip0: String) =
-      this(null, title0, content0, tip0)
-    def this(title0: String, content0: Component) =
-      this(title0, content0, "")
-    content = content0 // first add component, *then* set other things
-    title = title0
-    tip = tip0
-
-    protected[TabbedPane] var parent: TabbedPane = parent0
-
-    protected var _title = title0
-    def title: String = _title
-    def title_=(t: String) {
-      // beware to keep this order since, index depends on the _old_ title
-      if (parent != null) parent.peer.setTitleAt(index, t)
-      _title = t
-    }
-    protected var _content = content0
-    def content: Component = _content//UIElement.cachedWrapper(peer.getComponentAt(index).asInstanceOf[JComponent])
-    def content_=(c: Component) { _content = c; if (parent != null) parent.peer.setComponentAt(index, c.peer) }
-    protected var _tip = tip0
-    def tip: String = _tip//peer.getToolTipTextAt(index)
-    def tip_=(t: String) { _tip = t; if (parent != null) parent.peer.setToolTipTextAt(index, t) }
-    protected var _enabled = true
-    def enabled: Boolean = _enabled//peer.isEnabledAt(index)
-    def enabled_=(b: Boolean) { _enabled = b; if (parent != null) parent.peer.setEnabledAt(index, b) }
-    protected var _mnemonic = -1
-    def mnemonic: Int = _mnemonic//peer.getMnemonicAt(index)
-    def mnemonic_=(k: Int) { _mnemonic = k; if (parent != null) parent.peer.setMnemonicAt(index, k)}
-    protected var _foreground: Color = null
-    def foreground: Color = _foreground//peer.getForegroundAt(index)
-    def foreground_=(c: Color) { _foreground = c; if (parent != null) parent.peer.setForegroundAt(index, c)}
-    protected var _background: Color = null
-    def background: Color = _background //peer.getBackgroundAt(index)
-    def background_=(c: Color) { _background = c; if (parent != null) parent.peer.setBackgroundAt(index, c)}
-    def bounds: Rectangle = parent.peer.getBoundsAt(index)
-
-    // TODO: icon, disabledIcon
-
-    def index = if(parent != null) parent.peer.indexOfTab(title) else 0//_index
-    //protected[TabbedPane] var _index: Int = index0
-  }
-}
-
-/**
- * Displays the contents of one of several pages at a time. For each page a tab is
- * visible at all times. The user can click on one of these tabs to move the
- * corresponding page to the front.
- *
- * @see javax.swing.JTabbedPane
- */
-class TabbedPane extends Component with Publisher {
-  override lazy val peer: JTabbedPane = new JTabbedPane with SuperMixin
-  import TabbedPane._
-
-  object pages extends BufferWrapper[Page] {
-    def runCount: Int = peer.getTabRunCount
-
-    def remove(n: Int): Page = {
-      val t = apply(n)
-      peer.removeTabAt(n)
-      t.parent = null
-      //for(i <- n to length) apply(i)._index -= 1
-      t
-    }
-    protected def insertAt(n: Int, t: Page) {
-      //for(i <- n to length) apply(i)._index += 1
-      t.parent = TabbedPane.this
-      peer.insertTab(t.title, null, t.content.peer, t.tip, n)
-    }
-
-    def +=(t: Page): this.type = { t.parent = TabbedPane.this; peer.addTab(t.title, null, t.content.peer, t.tip); this }
-    def length = peer.getTabCount
-    def apply(n: Int) = new Page(TabbedPane.this, peer.getTitleAt(n),
-      UIElement.cachedWrapper[Component](peer.getComponentAt(n).asInstanceOf[javax.swing.JComponent]),
-      peer.getToolTipTextAt(n))
-  }
-
-  def tabLayoutPolicy: Layout.Value = Layout(peer.getTabLayoutPolicy)
-  def tabLayoutPolicy_=(p: Layout.Value) { peer.setTabLayoutPolicy(p.id) }
-
-
-  def tabPlacement: Alignment.Value = Alignment(peer.getTabPlacement)
-  /**
-   * Possible values are Left, Right, Top, Bottom.
-   */
-  def tabPlacement_=(b: Alignment.Value) { peer.setTabPlacement(b.id) }
-
-  @deprecated("Use tabPlacement_=() instead.", "2.9.1")
-  def tabPlacement(b: Alignment.Value) { peer.setTabPlacement(b.id) }
-
-  /**
-   * The current page selection
-   */
-  object selection extends Publisher {
-    def page: Page = pages(index)
-    def page_=(p: Page) { index = p.index }
-
-    def index: Int = peer.getSelectedIndex
-    def index_=(n: Int) { peer.setSelectedIndex(n) }
-
-    peer.addChangeListener(new javax.swing.event.ChangeListener {
-      def stateChanged(e: javax.swing.event.ChangeEvent) {
-        publish(SelectionChanged(TabbedPane.this))
-      }
-    })
-  }
-}
diff --git a/src/swing/scala/swing/Table.scala b/src/swing/scala/swing/Table.scala
deleted file mode 100644
index 45053f0..0000000
--- a/src/swing/scala/swing/Table.scala
+++ /dev/null
@@ -1,320 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import javax.swing.table._
-import javax.swing.event._
-import scala.collection.mutable
-
-object Table {
-  object AutoResizeMode extends Enumeration {
-    import JTable._
-    val Off = Value(AUTO_RESIZE_OFF, "Off")
-    val NextColumn = Value(AUTO_RESIZE_NEXT_COLUMN, "NextColumn")
-    val SubsequentColumns = Value(AUTO_RESIZE_SUBSEQUENT_COLUMNS, "SubsequentColumns")
-    val LastColumn = Value(AUTO_RESIZE_LAST_COLUMN, "LastColumn")
-    val AllColumns = Value(AUTO_RESIZE_ALL_COLUMNS, "AllColumns")
-  }
-
-  object IntervalMode extends Enumeration {
-    val Single = Value(ListSelectionModel.SINGLE_SELECTION)
-    val SingleInterval = Value(ListSelectionModel.SINGLE_INTERVAL_SELECTION)
-    val MultiInterval = Value(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION)
-  }
-  object ElementMode extends Enumeration {
-    val Row, Column, Cell, None = Value
-  }
-
-  /**
-   * A table item renderer.
-   *
-   * @see javax.swing.table.TableCellRenderer
-   */
-  abstract class Renderer[-A] {
-    def peer: TableCellRenderer = new TableCellRenderer {
-      def getTableCellRendererComponent(table: JTable, value: AnyRef, isSelected: Boolean, hasFocus: Boolean, row: Int, column: Int) = {
-        componentFor(table match {
-          case t: JTableMixin => t.tableWrapper
-          case _ => assert(false); null
-        }, isSelected, hasFocus, value.asInstanceOf[A], row, column).peer
-      }
-    }
-    def componentFor(table: Table, isSelected: Boolean, hasFocus: Boolean, a: A, row: Int, column: Int): Component
-  }
-
-  abstract class AbstractRenderer[-A, C<:Component](val component: C) extends Renderer[A] {
-    // The renderer component is responsible for painting selection
-    // backgrounds. Hence, make sure it is opaque to let it draw
-    // the background.
-    component.opaque = true
-
-    /**
-     * Standard preconfiguration that is commonly done for any component.
-     */
-    def preConfigure(table: Table, isSelected: Boolean, hasFocus: Boolean, a: A, row: Int, column: Int) {
-      if (isSelected) {
-        component.background = table.selectionBackground
-        component.foreground = table.selectionForeground
-      } else {
-        component.background = table.background
-        component.foreground = table.foreground
-      }
-    }
-    /**
-     * Configuration that is specific to the component and this renderer.
-     */
-    def configure(table: Table, isSelected: Boolean, hasFocus: Boolean, a: A, row: Int, column: Int)
-
-    /**
-     * Configures the component before returning it.
-     */
-    def componentFor(table: Table, isSelected: Boolean, hasFocus: Boolean, a: A, row: Int, column: Int): Component = {
-      preConfigure(table, isSelected, hasFocus, a, row, column)
-      configure(table, isSelected, hasFocus, a, row, column)
-      component
-    }
-  }
-
-  class LabelRenderer[A](convert: A => (Icon, String)) extends AbstractRenderer[A, Label](new Label) {
-    def this() {
-      this{ a => (null, a.toString) }
-    }
-
-    def configure(table: Table, isSelected: Boolean, hasFocus: Boolean, a: A, row: Int, column: Int) {
-      val (icon, text) = convert(a)
-      component.icon = icon
-      component.text = text
-    }
-  }
-
-  private[swing] trait JTableMixin { def tableWrapper: Table }
-}
-
-/**
- * Displays a matrix of items.
- *
- * To obtain a scrollable table or row and columns headers,
- * wrap the table in a scroll pane.
- *
- * @see javax.swing.JTable
- */
-class Table extends Component with Scrollable.Wrapper {
-  override lazy val peer: JTable = new JTable with Table.JTableMixin with SuperMixin {
-    def tableWrapper = Table.this
-    override def getCellRenderer(r: Int, c: Int) = new TableCellRenderer {
-      def getTableCellRendererComponent(table: JTable, value: AnyRef, isSelected: Boolean, hasFocus: Boolean, row: Int, column: Int) =
-        Table.this.rendererComponent(isSelected, hasFocus, row, column).peer
-    }
-    override def getCellEditor(r: Int, c: Int) = editor(r, c)
-    override def getValueAt(r: Int, c: Int) = Table.this.apply(r,c).asInstanceOf[AnyRef]
-  }
-  import Table._
-
-  // TODO: use IndexedSeq[_ <: IndexedSeq[Any]], see ticket #2005
-  def this(rowData: Array[Array[Any]], columnNames: Seq[_]) = {
-    this()
-    model = new AbstractTableModel {
-      override def getColumnName(column: Int) = columnNames(column).toString
-      def getRowCount() = rowData.length
-      def getColumnCount() = columnNames.length
-      def getValueAt(row: Int, col: Int): AnyRef = rowData(row)(col).asInstanceOf[AnyRef]
-      override def isCellEditable(row: Int, column: Int) = true
-      override def setValueAt(value: Any, row: Int, col: Int) {
-        rowData(row)(col) = value
-        fireTableCellUpdated(row, col)
-      }
-    }
-  }
-  def this(rows: Int, columns: Int) = {
-    this()
-    model = new DefaultTableModel(rows, columns) {
-      override def setValueAt(value: Any, row: Int, col: Int) {
-        super.setValueAt(value, row, col)
-      }
-    }
-  }
-
-  protected def scrollablePeer = peer
-
-  def rowHeight = peer.getRowHeight
-  def rowHeight_=(x: Int) = peer.setRowHeight(x)
-
-  def rowCount = peer.getRowCount
-
-  def model = peer.getModel()
-  def model_=(x: TableModel) = {
-    peer.setModel(x)
-    model.removeTableModelListener(modelListener)
-    model.addTableModelListener(modelListener)
-  }
-
-  def autoResizeMode: AutoResizeMode.Value = AutoResizeMode(peer.getAutoResizeMode)
-  def autoResizeMode_=(x: Table.AutoResizeMode.Value) = peer.setAutoResizeMode(x.id)
-
-  def showGrid = peer.getShowHorizontalLines && peer.getShowVerticalLines
-  def showGrid_=(grid: Boolean) = peer.setShowGrid(grid)
-
-  def gridColor = peer.getGridColor
-  def gridColor_=(color: Color) = peer.setGridColor(color)
-
-  def preferredViewportSize_=(dim: Dimension) = peer.setPreferredScrollableViewportSize(dim)
-  //1.6: def fillsViewportHeight: Boolean = peer.getFillsViewportHeight
-  //def fillsViewportHeight_=(b: Boolean) = peer.setFillsViewportHeight(b)
-
-  object selection extends Publisher {
-    // TODO: could be a sorted set
-    protected abstract class SelectionSet[A](a: =>Seq[A]) extends mutable.Set[A] {
-      def -=(n: A): this.type
-      def +=(n: A): this.type
-      def contains(n: A) = a.contains(n)
-      override def size = a.length
-      def iterator = a.iterator
-    }
-
-    object rows extends SelectionSet(peer.getSelectedRows) {
-      def -=(n: Int) = { peer.removeRowSelectionInterval(n,n); this }
-      def +=(n: Int) = { peer.addRowSelectionInterval(n,n); this }
-
-      def leadIndex: Int = peer.getSelectionModel.getLeadSelectionIndex
-      def anchorIndex: Int = peer.getSelectionModel.getAnchorSelectionIndex
-    }
-
-    object columns extends SelectionSet(peer.getSelectedColumns) {
-      def -=(n: Int) = { peer.removeColumnSelectionInterval(n,n); this }
-      def +=(n: Int) = { peer.addColumnSelectionInterval(n,n); this }
-
-      def leadIndex: Int = peer.getColumnModel.getSelectionModel.getLeadSelectionIndex
-      def anchorIndex: Int = peer.getColumnModel.getSelectionModel.getAnchorSelectionIndex
-    }
-
-    def cells: mutable.Set[(Int, Int)] =
-      new SelectionSet[(Int, Int)]((for(r <- selection.rows; c <- selection.columns) yield (r,c)).toSeq) { outer =>
-        def -=(n: (Int, Int)) = {
-          peer.removeRowSelectionInterval(n._1,n._1)
-          peer.removeColumnSelectionInterval(n._2,n._2)
-          this
-        }
-        def +=(n: (Int, Int)) = {
-          peer.addRowSelectionInterval(n._1,n._1)
-          peer.addColumnSelectionInterval(n._2,n._2)
-          this
-        }
-        override def size = peer.getSelectedRowCount * peer.getSelectedColumnCount
-      }
-
-    /**
-     * From the JTable Swing tutorial:
-     * You can specify selection by cell in multiple interval selection mode,
-     * but the result is a table that does not produce useful selections.
-     */
-    def intervalMode: IntervalMode.Value = IntervalMode(peer.getSelectionModel.getSelectionMode)
-    def intervalMode_=(m: IntervalMode.Value) { peer.setSelectionMode(m.id) }
-    def elementMode: ElementMode.Value =
-      if(peer.getColumnSelectionAllowed && peer.getRowSelectionAllowed) ElementMode.Cell
-      else if(peer.getColumnSelectionAllowed) ElementMode.Column
-      else if(peer.getRowSelectionAllowed) ElementMode.Row
-      else ElementMode.None
-    def elementMode_=(m: ElementMode.Value) {
-      m match {
-        case ElementMode.Cell => peer.setCellSelectionEnabled(true)
-        case ElementMode.Column => peer.setRowSelectionAllowed(false); peer.setColumnSelectionAllowed(true)
-        case ElementMode.Row => peer.setRowSelectionAllowed(true); peer.setColumnSelectionAllowed(false)
-        case ElementMode.None => peer.setRowSelectionAllowed(false); peer.setColumnSelectionAllowed(false)
-      }
-    }
-
-    peer.getColumnModel.getSelectionModel.addListSelectionListener(new ListSelectionListener {
-      def valueChanged(e: javax.swing.event.ListSelectionEvent) {
-        publish(TableColumnsSelected(Table.this, e.getFirstIndex to e.getLastIndex, e.getValueIsAdjusting))
-      }
-    })
-    peer.getSelectionModel.addListSelectionListener(new ListSelectionListener {
-      def valueChanged(e: javax.swing.event.ListSelectionEvent) {
-        publish(TableRowsSelected(Table.this, e.getFirstIndex to e.getLastIndex, e.getValueIsAdjusting))
-      }
-    })
-  }
-
-  /**
-   * Supplies a renderer component for a given cell.
-   */
-  protected def rendererComponent(isSelected: Boolean, focused: Boolean, row: Int, column: Int): Component =
-    new Component {
-      override lazy val peer = {
-        val v = apply(row, column).asInstanceOf[AnyRef]
-        if (v != null)
-          Table.this.peer.getDefaultRenderer(v.getClass).getTableCellRendererComponent(Table.this.peer,
-                 v, isSelected, focused, row, column).asInstanceOf[JComponent]
-        else Table.this.peer.getDefaultRenderer(classOf[Object]).getTableCellRendererComponent(Table.this.peer,
-                 v, isSelected, focused, row, column).asInstanceOf[JComponent]
-      }
-    }
-
-  // TODO: a public API for setting editors
-  protected def editor(row: Int, column: Int) = {
-    val v = apply(row, column).asInstanceOf[AnyRef]
-    if (v != null)
-      Table.this.peer.getDefaultEditor(v.getClass)
-    else
-      Table.this.peer.getDefaultEditor(classOf[Object])
-  }
-
-  /**
-   * Get the current value of the given cell.
-   * The given cell coordinates are in view coordinates and thus not
-   * necessarily the same as for the model.
-   */
-  def apply(row: Int, column: Int): Any = model.getValueAt(row, viewToModelColumn(column))
-
-  // TODO: this is Java 6 stuff
-  // def apply(row: Int, column: Int): Any = model.getValueAt(viewToModelRow(row), viewToModelColumn(column))
-  //def viewToModelRow(idx: Int) = peer.convertRowIndexToModel(idx)
-  //def modelToViewRow(idx: Int) = peer.convertRowIndexToView(idx)
-
-  def viewToModelColumn(idx: Int) = peer.convertColumnIndexToModel(idx)
-  def modelToViewColumn(idx: Int) = peer.convertColumnIndexToView(idx)
-
-
-  /**
-   * Change the value of the given cell.
-   */
-  def update(row: Int, column: Int, value: Any) { model.setValueAt(value, row, viewToModelColumn(column)) }
-
-  /**
-   * Visually update the given cell.
-   */
-  def updateCell(row: Int, column: Int) = update(row, column, apply(row, column))
-
-  def selectionForeground: Color = peer.getSelectionForeground
-  def selectionForeground_=(c: Color) = peer.setSelectionForeground(c)
-  def selectionBackground: Color = peer.getSelectionBackground
-  def selectionBackground_=(c: Color) = peer.setSelectionBackground(c)
-
-  protected val modelListener = new TableModelListener {
-    def tableChanged(e: TableModelEvent) = publish(
-      e.getType match {
-        case TableModelEvent.UPDATE =>
-          if (e.getFirstRow == 0 && e.getLastRow == Int.MaxValue && e.getColumn == TableModelEvent.ALL_COLUMNS)
-            TableChanged(Table.this)
-          else if (e.getFirstRow == TableModelEvent.HEADER_ROW)
-            TableStructureChanged(Table.this)
-          else
-            TableUpdated(Table.this, e.getFirstRow to e.getLastRow, e.getColumn)
-        case TableModelEvent.INSERT =>
-          TableRowsAdded(Table.this, e.getFirstRow to e.getLastRow)
-        case TableModelEvent.DELETE =>
-          TableRowsRemoved(Table.this, e.getFirstRow to e.getLastRow)
-      }
-    )
-  }
-}
diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala
deleted file mode 100644
index 01bf115..0000000
--- a/src/swing/scala/swing/TextArea.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import java.awt.event._
-
-/**
- * A text component that allows multiline text input and display.
- *
- * @see javax.swing.JTextArea
- */
-class TextArea(text0: String, rows0: Int, columns0: Int) extends TextComponent
-    with TextComponent.HasColumns with TextComponent.HasRows {
-  override lazy val peer: JTextArea = new JTextArea(text0, rows0, columns0) with SuperMixin
-  def this(text: String) = this(text, 0, 0)
-  def this(rows: Int, columns: Int) = this("", rows, columns)
-  def this() = this("", 0, 0)
-
-  // TODO: we could make contents StringBuilder-like
-  def append(t: String) { peer.append(t) }
-
-  def rows: Int = peer.getRows
-  def rows_=(n: Int) = peer.setRows(n)
-  def columns: Int = peer.getColumns
-  def columns_=(n: Int) = peer.setColumns(n)
-
-  def tabSize: Int = peer.getTabSize
-  def tabSize_=(n: Int) = peer.setTabSize(n)
-  def lineCount: Int = peer.getLineCount
-
-  def lineWrap: Boolean = peer.getLineWrap
-  def lineWrap_=(w: Boolean) = peer.setLineWrap(w)
-  def wordWrap: Boolean = peer.getWrapStyleWord
-  def wordWrap_=(w: Boolean) = peer.setWrapStyleWord(w)
-  def charWrap: Boolean = !peer.getWrapStyleWord
-  def charWrap_=(w: Boolean) = peer.setWrapStyleWord(!w)
-}
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
deleted file mode 100644
index 48c03a5..0000000
--- a/src/swing/scala/swing/TextComponent.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import javax.swing.text._
-import javax.swing.event._
-
-object TextComponent {
-  trait HasColumns extends TextComponent {
-    def columns: Int
-    def columns_=(n: Int)
-  }
-  trait HasRows extends TextComponent {
-    def rows: Int
-    def rows_=(n: Int)
-  }
-}
-
-/**
- * A component that allows some kind of text input and display.
- *
- * @see javax.swing.JTextComponent
- */
-class TextComponent extends Component with Publisher {
-  override lazy val peer: JTextComponent = new JTextComponent with SuperMixin {}
-  def text: String = peer.getText
-  def text_=(t: String) = peer.setText(t)
-
-  class Caret extends Publisher {
-    def dot: Int = peer.getCaret.getDot
-    def dot_=(n: Int) { peer.getCaret.setDot(n) }
-    def mark: Int = peer.getCaret.getMark
-    def moveDot(n: Int) { peer.getCaret.moveDot(n) }
-    def visible: Boolean = peer.getCaret.isVisible
-    def visible_=(b: Boolean) { peer.getCaret.setVisible(b) }
-    def selectionVisible: Boolean = peer.getCaret.isSelectionVisible
-    def selectionVisible_=(b: Boolean) { peer.getCaret.setSelectionVisible(b) }
-    def blinkRate: Int = peer.getCaret.getBlinkRate
-    def blinkRate_=(n: Int) { peer.getCaret.setBlinkRate(n) }
-    def color: Color = peer.getCaretColor
-    def color_=(c: Color) = peer.setCaretColor(c)
-    def position: Int = peer.getCaretPosition
-    def position_=(p: Int) = peer.setCaretPosition(p)
-
-    peer.addCaretListener {
-      new CaretListener {
-        def caretUpdate(e: CaretEvent) { publish(CaretUpdate(TextComponent.this)) }
-      }
-    }
-  }
-
-  object caret extends Caret
-
-  def editable: Boolean = peer.isEditable
-  def editable_=(x: Boolean) = peer.setEditable(x)
-  def cut() { peer.cut() }
-  def copy() { peer.copy() }
-  def paste() { peer.paste() }
-  def selected: String = peer.getSelectedText
-
-  def selectAll() { peer.selectAll() }
-
-  peer.getDocument.addDocumentListener(new DocumentListener {
-    def changedUpdate(e:DocumentEvent) { publish(new ValueChanged(TextComponent.this)) }
-    def insertUpdate(e:DocumentEvent) { publish(new ValueChanged(TextComponent.this)) }
-    def removeUpdate(e:DocumentEvent) { publish(new ValueChanged(TextComponent.this)) }
-  })
-}
diff --git a/src/swing/scala/swing/TextField.scala b/src/swing/scala/swing/TextField.scala
deleted file mode 100644
index a28e8f8..0000000
--- a/src/swing/scala/swing/TextField.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-import java.awt.event._
-
-
-/*object TextField {
-  object FocusLostBehavior extends Enumeration {
-    val Revert = Value(JFormattedTextField.REVERT)
-    val Commit = Value(JFormattedTextField.REVERT)
-    val CommitOrRevert = Value(JFormattedTextField.REVERT)
-    val Persist = Value(JFormattedTextField.REVERT)
-  }
-}*/
-
-/**
- * A text component that allows single line text input and display.
- *
- * @see javax.swing.JTextField
- */
-class TextField(text0: String, columns0: Int) extends TextComponent with TextComponent.HasColumns with Action.Trigger.Wrapper {
-  override lazy val peer: JTextField = new JTextField(text0, columns0) with SuperMixin
-  def this(text: String) = this(text, 0)
-  def this(columns: Int) = this("", columns)
-  def this() = this("")
-
-  def columns: Int = peer.getColumns
-  def columns_=(n: Int) = peer.setColumns(n)
-
-  /** @see javax.swing.JTextField#getHorizontalAlignment() */
-  def horizontalAlignment: Alignment.Value = Alignment(peer.getHorizontalAlignment)
-  /** @see javax.swing.JTextField#setHorizontalAlignment() */
-  def horizontalAlignment_=(x: Alignment.Value) { peer.setHorizontalAlignment(x.id) }
-
-  private lazy val actionListener = Swing.ActionListener { e =>
-    publish(EditDone(TextField.this))
-  }
-
-  protected override def onFirstSubscribe() {
-    super.onFirstSubscribe
-    peer.addActionListener(actionListener)
-    peer.addFocusListener(new FocusAdapter {
-      override def focusLost(e: java.awt.event.FocusEvent) { publish(EditDone(TextField.this)) }
-    })
-  }
-
-  protected override def onLastUnsubscribe() {
-    super.onLastUnsubscribe
-    peer.removeActionListener(actionListener)
-  }
-
-  def verifier: String => Boolean = s => Option(peer.getInputVerifier) forall (_ verify peer)
-  def verifier_=(v: String => Boolean) {
-    peer.setInputVerifier(new InputVerifier {
-      private val old = Option(peer.getInputVerifier)
-      def verify(c: JComponent) = v(text)
-      override def shouldYieldFocus(c: JComponent) = old forall (_ shouldYieldFocus c)
-    })
-  }
-  def shouldYieldFocus: String => Boolean = s => Option(peer.getInputVerifier) forall (_ shouldYieldFocus peer)
-  def shouldYieldFocus_=(y: String=>Boolean) {
-    peer.setInputVerifier(new InputVerifier {
-      private val old = peer.getInputVerifier
-      def verify(c: JComponent) = old.verify(c)
-      override def shouldYieldFocus(c: JComponent) = y(text)
-    })
-  }
-}
diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala
deleted file mode 100644
index 3d3d0b9..0000000
--- a/src/swing/scala/swing/ToggleButton.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import event._
-import javax.swing._
-
-/**
- * A two state button with a push button like user interface.
- * Usually used in tool bars.
- *
- * @see javax.swing.JToggleButton
- */
-class ToggleButton(text0: String) extends AbstractButton {
-  override lazy val peer: JToggleButton = new JToggleButton(text0) with SuperMixin
-  def this() = this("")
-}
diff --git a/src/swing/scala/swing/UIElement.scala b/src/swing/scala/swing/UIElement.scala
deleted file mode 100644
index 16b8738..0000000
--- a/src/swing/scala/swing/UIElement.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-package scala.swing
-
-import java.awt.Cursor
-import event._
-import scala.ref._
-import java.util.WeakHashMap
-
-object UIElement {
-  private val ClientKey = "scala.swingWrapper"
-  private[this] val wrapperCache = new WeakHashMap[java.awt.Component, WeakReference[UIElement]]
-
-  private def cache(e: UIElement) = e.peer match {
-    case p: javax.swing.JComponent => p.putClientProperty(ClientKey, e)
-    case _ => wrapperCache.put(e.peer, new WeakReference(e))
-  }
-
-  /**
-   * Looks up the internal component cache for a wrapper of the given
-   * Java Swing peer. If this method finds one of the given type `C`,
-   * it will return that wrapper. Otherwise it returns `null`. This
-   * method never throws an exception.
-   *
-   * Clients should be extremely careful with type parameter `C` and
-   * its interaction with type inference. Better err on the side of caution
-   * and explicitly specify `C`.
-   */
-  private[swing] def cachedWrapper[C>:Null<:UIElement](c: java.awt.Component): C = {
-    val w = c match {
-      case c: javax.swing.JComponent => c.getClientProperty(ClientKey)
-      case _ => wrapperCache.get(c)
-    }
-    try { w.asInstanceOf[C] } catch { case _: Exception => null }
-  }
-
-  /**
-   * Returns a wrapper for a given Java Swing peer. If there is a
-   * compatible wrapper in use, this method will return it.
-   *
-   * `wrap` methods in companion objects of subclasses of `UIElement` have
-   * the  same behavior, except that they return more specific wrappers.
-   */
-  def wrap(c: java.awt.Component): UIElement = {
-    val w = cachedWrapper[UIElement](c)
-    if (w != null) w
-    else new UIElement { def peer = c }
-  }
-}
-
-/**
- * The base trait of all user interface elements. Subclasses belong to one
- * of two groups: top-level elements such as windows and dialogs, or
- * `Component`s.
- *
- * @note [Java Swing] This trait does not have an exact counterpart in
- * Java Swing. The peer is of type java.awt.Component since this is the
- * least common upper bound of possible underlying peers.
- *
- * @note [Implementation] A UIElement automatically adds itself to the
- * component cache on creation.
- *
- * @see java.awt.Component
- */
-trait UIElement extends Proxy with LazyPublisher {
-  /**
-   * The underlying Swing peer.
-   */
-  def peer: java.awt.Component
-  def self = peer
-
-  UIElement.cache(this)
-
-  def foreground: Color = peer.getForeground
-  def foreground_=(c: Color) = peer setForeground c
-  def background: Color = peer.getBackground
-  def background_=(c: Color) = peer setBackground c
-
-  def minimumSize = peer.getMinimumSize
-  def minimumSize_=(x: Dimension) = peer setMinimumSize x
-  def maximumSize = peer.getMaximumSize
-  def maximumSize_=(x: Dimension) = peer setMaximumSize x
-  def preferredSize = peer.getPreferredSize
-  def preferredSize_=(x: Dimension) = peer setPreferredSize x
-
-  def font: Font = peer.getFont
-  def font_=(f: Font) = peer setFont f
-
-  def locationOnScreen = peer.getLocationOnScreen
-  def location = peer.getLocation
-  def bounds = peer.getBounds
-  def size = peer.getSize
-
-  def locale = peer.getLocale
-  def toolkit = peer.getToolkit
-
-  def cursor: Cursor = peer.getCursor
-  def cursor_=(c: Cursor) { peer.setCursor(c) }
-
-  def visible: Boolean = peer.isVisible
-  def visible_=(b: Boolean) { peer.setVisible(b) }
-  def showing: Boolean = peer.isShowing
-  def displayable: Boolean = peer.isDisplayable
-
-  def repaint() { peer.repaint }
-  def repaint(rect: Rectangle) { peer.repaint(rect.x, rect.y, rect.width, rect.height) }
-  def ignoreRepaint: Boolean = peer.getIgnoreRepaint
-  def ignoreRepaint_=(b: Boolean) { peer.setIgnoreRepaint(b) }
-
-  protected def onFirstSubscribe() {
-    peer.addComponentListener(new java.awt.event.ComponentListener {
-      def componentHidden(e: java.awt.event.ComponentEvent) {
-        publish(UIElementHidden(UIElement.this))
-      }
-      def componentShown(e: java.awt.event.ComponentEvent) {
-        publish(UIElementShown(UIElement.this))
-      }
-      def componentMoved(e: java.awt.event.ComponentEvent) {
-        publish(UIElementMoved(UIElement.this))
-      }
-      def componentResized(e: java.awt.event.ComponentEvent) {
-        publish(UIElementResized(UIElement.this))
-      }
-    })
-  }
-  protected def onLastUnsubscribe() {}
-}
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
deleted file mode 100644
index 5bdb50e..0000000
--- a/src/swing/scala/swing/Window.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-
-import java.awt.{Window => AWTWindow}
-import event._
-import javax.swing._
-
-/**
- * A window with decoration such as a title, border, and action buttons.
- *
- * An AWT window cannot be wrapped dynamically with this class, i.e., you cannot
- * write something like new Window { def peer = myAWTWindow }
- *
- * @see javax.swing.JFrame
- */
-abstract class Window extends UIElement with RootPanel with Publisher { outer =>
-  def peer: AWTWindow with InterfaceMixin
-
-  protected trait InterfaceMixin extends javax.swing.RootPaneContainer
-
-  protected trait SuperMixin extends AWTWindow {
-    override protected def processWindowEvent(e: java.awt.event.WindowEvent) {
-      super.processWindowEvent(e)
-      if (e.getID() == java.awt.event.WindowEvent.WINDOW_CLOSING)
-        closeOperation()
-    }
-  }
-
-  /**
-   * This method is called when the window is closing, after all other window
-   * event listeners have been processed.
-   */
-  def closeOperation() {}
-
-  override def contents_=(c: Component) {
-    super.contents_=(c)
-    peer.pack() // pack also validates, which is generally required after an add
-  }
-  def defaultButton: Option[Button] =
-    toOption(peer.getRootPane.getDefaultButton) map UIElement.cachedWrapper[Button]
-  def defaultButton_=(b: Button) {
-    peer.getRootPane.setDefaultButton(b.peer)
-  }
-  def defaultButton_=(b: Option[Button]) {
-    peer.getRootPane.setDefaultButton(b.map(_.peer).orNull)
-  }
-
-  def dispose() { peer.dispose() }
-
-  def pack(): this.type = { peer.pack(); this }
-
-  def setLocationRelativeTo(c: UIElement) { peer.setLocationRelativeTo(c.peer) }
-  def centerOnScreen() { peer.setLocationRelativeTo(null) }
-  def location_=(p: Point) { peer.setLocation(p) }
-  def size_=(size: Dimension) { peer.setSize(size) }
-  def bounds_=(rect: Rectangle) { peer.setBounds(rect) }
-
-  def owner: Window = UIElement.cachedWrapper[Window](peer.getOwner)
-
-  def open() { peer setVisible true }
-  def close() { peer setVisible false }
-
-  peer.addWindowListener(new java.awt.event.WindowListener {
-    def windowActivated(e: java.awt.event.WindowEvent) { publish(WindowActivated(outer)) }
-    def windowClosed(e: java.awt.event.WindowEvent) { publish(WindowClosed(outer)) }
-    def windowClosing(e: java.awt.event.WindowEvent) { publish(WindowClosing(outer)) }
-    def windowDeactivated(e: java.awt.event.WindowEvent) { publish(WindowDeactivated(outer)) }
-    def windowDeiconified(e: java.awt.event.WindowEvent) { publish(WindowDeiconified(outer)) }
-    def windowIconified(e: java.awt.event.WindowEvent) { publish(WindowIconified(outer)) }
-    def windowOpened(e: java.awt.event.WindowEvent) { publish(WindowOpened(outer)) }
-  })
-}
diff --git a/src/swing/scala/swing/event/ActionEvent.scala b/src/swing/scala/swing/event/ActionEvent.scala
deleted file mode 100644
index 7b2de43..0000000
--- a/src/swing/scala/swing/event/ActionEvent.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-object ActionEvent {
-  def unapply(a: ActionEvent): Option[Component] = Some(a.source)
-}
-
-class ActionEvent(override val source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/AdjustingEvent.scala b/src/swing/scala/swing/event/AdjustingEvent.scala
deleted file mode 100644
index a4b7d29..0000000
--- a/src/swing/scala/swing/event/AdjustingEvent.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-/** An event that indicates some editing operation that can be still
- *  in progress.
- *
- *  Example: dragging a slider creates a number of `AdjustmentEvents`
- *  with `adjusting == '''true'''` until the user finally releases the
- *  mouse button.
- */
-trait AdjustingEvent extends ComponentEvent {
-  def adjusting: Boolean
-  def committed: Boolean = !adjusting
-}
diff --git a/src/swing/scala/swing/event/BackgroundChanged.scala b/src/swing/scala/swing/event/BackgroundChanged.scala
deleted file mode 100644
index bdd67f9..0000000
--- a/src/swing/scala/swing/event/BackgroundChanged.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class BackgroundChanged(override val source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/ButtonClicked.scala b/src/swing/scala/swing/event/ButtonClicked.scala
deleted file mode 100644
index d022019..0000000
--- a/src/swing/scala/swing/event/ButtonClicked.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class ButtonClicked(override val source: AbstractButton) extends ActionEvent(source)
-
diff --git a/src/swing/scala/swing/event/CaretUpdate.scala b/src/swing/scala/swing/event/CaretUpdate.scala
deleted file mode 100644
index 2821175..0000000
--- a/src/swing/scala/swing/event/CaretUpdate.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class CaretUpdate(override val source: TextComponent) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/ComponentEvent.scala b/src/swing/scala/swing/event/ComponentEvent.scala
deleted file mode 100644
index 701b962..0000000
--- a/src/swing/scala/swing/event/ComponentEvent.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-trait ComponentEvent extends UIEvent {
-  val source: Component
-}
diff --git a/src/swing/scala/swing/event/ContainerEvent.scala b/src/swing/scala/swing/event/ContainerEvent.scala
deleted file mode 100644
index 46f3768..0000000
--- a/src/swing/scala/swing/event/ContainerEvent.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-abstract class ContainerEvent(override val source: Container) extends UIEvent
-
-case class ComponentAdded(override val source: Container, child: Component) extends ContainerEvent(source)
-case class ComponentRemoved(override val source: Container, child: Component) extends ContainerEvent(source)
diff --git a/src/swing/scala/swing/event/EditDone.scala b/src/swing/scala/swing/event/EditDone.scala
deleted file mode 100644
index 9d38234..0000000
--- a/src/swing/scala/swing/event/EditDone.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class EditDone(override val source: TextField) extends ValueChanged(source)
diff --git a/src/swing/scala/swing/event/Event.scala b/src/swing/scala/swing/event/Event.scala
deleted file mode 100644
index fd11356..0000000
--- a/src/swing/scala/swing/event/Event.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-trait Event
diff --git a/src/swing/scala/swing/event/FocusEvent.scala b/src/swing/scala/swing/event/FocusEvent.scala
deleted file mode 100644
index 5c29d8f..0000000
--- a/src/swing/scala/swing/event/FocusEvent.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-/**
- * The other component is None if it is a non Swing, i.e., AWT or native, component.
- */
-abstract class FocusEvent(override val source: Component, val other: Option[Component], val temporary: Boolean) extends ComponentEvent
-
-case class FocusGained(override val source: Component, override val other: Option[Component], override val temporary: Boolean)
-           extends FocusEvent(source, other, temporary)
-
-case class FocusLost(override val source: Component, override val other: Option[Component], override val temporary: Boolean)
-           extends FocusEvent(source, other, temporary)
diff --git a/src/swing/scala/swing/event/FontChanged.scala b/src/swing/scala/swing/event/FontChanged.scala
deleted file mode 100644
index ca936e1..0000000
--- a/src/swing/scala/swing/event/FontChanged.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class FontChanged(override val source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/ForegroundChanged.scala b/src/swing/scala/swing/event/ForegroundChanged.scala
deleted file mode 100644
index 42b45aa..0000000
--- a/src/swing/scala/swing/event/ForegroundChanged.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class ForegroundChanged(override val source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/InputEvent.scala b/src/swing/scala/swing/event/InputEvent.scala
deleted file mode 100644
index b515b01..0000000
--- a/src/swing/scala/swing/event/InputEvent.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-trait InputEvent extends ComponentEvent {
-  def peer: java.awt.event.InputEvent
-  def when: Long = peer.getWhen
-  def modifiers: Key.Modifiers
-  def consume() { peer.consume() }
-  def consumed: Boolean = peer.isConsumed
-}
diff --git a/src/swing/scala/swing/event/Key.scala b/src/swing/scala/swing/event/Key.scala
deleted file mode 100644
index 5e9e0cb..0000000
--- a/src/swing/scala/swing/event/Key.scala
+++ /dev/null
@@ -1,232 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-/**
- * Enumeration of key codes used by key events.
- */
-object Key extends Enumeration {
-  import java.awt.event.KeyEvent._
-
-  object Location extends Enumeration {
-    val Left = Value(java.awt.event.KeyEvent.KEY_LOCATION_LEFT)
-    val Right = Value(java.awt.event.KeyEvent.KEY_LOCATION_RIGHT)
-    val Numpad = Value(java.awt.event.KeyEvent.KEY_LOCATION_NUMPAD)
-    val Standard = Value(java.awt.event.KeyEvent.KEY_LOCATION_STANDARD)
-    val Unknown = Value(java.awt.event.KeyEvent.KEY_LOCATION_UNKNOWN)
-  }
-
-  type Modifiers = Int
-
-  object Modifier {
-    import java.awt.event.InputEvent._
-    val Shift = SHIFT_DOWN_MASK
-    val Control = CTRL_DOWN_MASK
-    val Alt = ALT_DOWN_MASK
-    val AltGraph = ALT_GRAPH_DOWN_MASK
-    val Meta = META_DOWN_MASK
-    def text(mods: Int) = java.awt.event.KeyEvent.getKeyModifiersText(mods)
-  }
-
-  //def text(k: Value) = java.awt.event.KeyEvent.getKeyText(k.id)
-
-  val Shift = Value(VK_SHIFT, getKeyText(VK_SHIFT))
-  val Control = Value(VK_CONTROL, getKeyText(VK_CONTROL))
-  val Alt = Value(VK_ALT, getKeyText(VK_ALT))
-  val AltGraph = Value(VK_ALT_GRAPH, getKeyText(VK_ALT_GRAPH))
-  val Meta = Value(VK_META, getKeyText(VK_META))
-
-  val Enter = Value(VK_ENTER, getKeyText(VK_ENTER))
-  val BackSpace = Value(VK_BACK_SPACE, getKeyText(VK_BACK_SPACE))
-  val Tab = Value(VK_TAB, getKeyText(VK_TAB))
-  val Cancel = Value(VK_CANCEL, getKeyText(VK_CANCEL))
-  val Clear = Value(VK_CLEAR, getKeyText(VK_CLEAR))
-
-  val Pause = Value(VK_PAUSE, getKeyText(VK_PAUSE))
-  val CapsLock = Value(VK_CAPS_LOCK, getKeyText(VK_CAPS_LOCK))
-  val Escape = Value(VK_ESCAPE, getKeyText(VK_ESCAPE))
-  val Space = Value(VK_SPACE, getKeyText(VK_SPACE))
-  val PageUp = Value(VK_PAGE_UP, getKeyText(VK_PAGE_UP))
-  val PageDown = Value(VK_PAGE_DOWN, getKeyText(VK_PAGE_DOWN))
-  val End = Value(VK_END, getKeyText(VK_END))
-  val Home = Value(VK_HOME, getKeyText(VK_HOME))
-  val Left = Value(VK_LEFT, getKeyText(VK_LEFT))
-  val Up = Value(VK_UP, getKeyText(VK_UP))
-  val Right = Value(VK_RIGHT, getKeyText(VK_RIGHT))
-  val Down = Value(VK_DOWN, getKeyText(VK_DOWN))
-  val Comma = Value(VK_COMMA, getKeyText(VK_COMMA))
-  val Minus = Value(VK_MINUS, getKeyText(VK_MINUS))
-  val Period = Value(VK_PERIOD, getKeyText(VK_PERIOD))
-  val Slash = Value(VK_SLASH, getKeyText(VK_SLASH))
-  val Key0 = Value(VK_0, getKeyText(VK_0))
-  val Key1 = Value(VK_1, getKeyText(VK_1))
-  val Key2 = Value(VK_2, getKeyText(VK_2))
-  val Key3 = Value(VK_3, getKeyText(VK_3))
-  val Key4 = Value(VK_4, getKeyText(VK_4))
-  val Key5 = Value(VK_5, getKeyText(VK_5))
-  val Key6 = Value(VK_6, getKeyText(VK_6))
-  val Key7 = Value(VK_7, getKeyText(VK_7))
-  val Key8 = Value(VK_8, getKeyText(VK_8))
-  val Key9 = Value(VK_9, getKeyText(VK_9))
-  val Semicolon = Value(VK_SEMICOLON, getKeyText(VK_SEMICOLON))
-  val Equals = Value(VK_EQUALS, getKeyText(VK_EQUALS))
-  val A = Value(VK_A, getKeyText(VK_A))
-  val B = Value(VK_B, getKeyText(VK_B))
-  val C = Value(VK_C, getKeyText(VK_C))
-  val D = Value(VK_D, getKeyText(VK_D))
-  val E = Value(VK_E, getKeyText(VK_E))
-  val F = Value(VK_F, getKeyText(VK_F))
-  val G = Value(VK_G, getKeyText(VK_G))
-  val H = Value(VK_H, getKeyText(VK_H))
-  val I = Value(VK_I, getKeyText(VK_I))
-  val J = Value(VK_J, getKeyText(VK_J))
-  val K = Value(VK_K, getKeyText(VK_K))
-  val L = Value(VK_L, getKeyText(VK_L))
-  val M = Value(VK_M, getKeyText(VK_M))
-  val N = Value(VK_N, getKeyText(VK_N))
-  val O = Value(VK_O, getKeyText(VK_O))
-  val P = Value(VK_P, getKeyText(VK_P))
-  val Q = Value(VK_Q, getKeyText(VK_Q))
-  val R = Value(VK_R, getKeyText(VK_R))
-  val S = Value(VK_S, getKeyText(VK_S))
-  val T = Value(VK_T, getKeyText(VK_T))
-  val U = Value(VK_U, getKeyText(VK_U))
-  val V = Value(VK_V, getKeyText(VK_V))
-  val W = Value(VK_W, getKeyText(VK_W))
-  val X = Value(VK_X, getKeyText(VK_X))
-  val Y = Value(VK_Y, getKeyText(VK_Y))
-  val Z = Value(VK_Z, getKeyText(VK_Z))
-  val OpenBracket = Value(VK_OPEN_BRACKET, getKeyText(VK_OPEN_BRACKET))
-  val BackSlash = Value(VK_BACK_SLASH, getKeyText(VK_BACK_SLASH))
-  val CloseBracket = Value(VK_CLOSE_BRACKET, getKeyText(VK_CLOSE_BRACKET))
-  val Numpad0 = Value(VK_NUMPAD0, getKeyText(VK_NUMPAD0))
-  val Numpad1 = Value(VK_NUMPAD1, getKeyText(VK_NUMPAD1))
-  val Numpad2 = Value(VK_NUMPAD2, getKeyText(VK_NUMPAD2))
-  val Numpad3 = Value(VK_NUMPAD3, getKeyText(VK_NUMPAD3))
-  val Numpad4 = Value(VK_NUMPAD4, getKeyText(VK_NUMPAD4))
-  val Numpad5 = Value(VK_NUMPAD5, getKeyText(VK_NUMPAD5))
-  val Numpad6 = Value(VK_NUMPAD6, getKeyText(VK_NUMPAD6))
-  val Numpad7 = Value(VK_NUMPAD7, getKeyText(VK_NUMPAD7))
-  val Numpad8 = Value(VK_NUMPAD8, getKeyText(VK_NUMPAD8))
-  val Numpad9 = Value(VK_NUMPAD9, getKeyText(VK_NUMPAD9))
-  val Multiply = Value(VK_MULTIPLY, getKeyText(VK_MULTIPLY))
-  val Add = Value(VK_ADD, getKeyText(VK_ADD))
-  val Separator = Value(VK_SEPARATOR, getKeyText(VK_SEPARATOR))
-  val Subtract = Value(VK_SUBTRACT, getKeyText(VK_SUBTRACT))
-  val Decimal = Value(VK_DECIMAL, getKeyText(VK_DECIMAL))
-  val Divide = Value(VK_DIVIDE, getKeyText(VK_DIVIDE))
-  val Delete = Value(VK_DELETE, getKeyText(VK_DELETE))
-  val NumLock = Value(VK_NUM_LOCK, getKeyText(VK_NUM_LOCK))
-  val ScrollLock = Value(VK_SCROLL_LOCK, getKeyText(VK_SCROLL_LOCK))
-  val F1 = Value(VK_F1, getKeyText(VK_F1))
-  val F2 = Value(VK_F2, getKeyText(VK_F2))
-  val F3 = Value(VK_F3, getKeyText(VK_F3))
-  val F4 = Value(VK_F4, getKeyText(VK_F4))
-  val F5 = Value(VK_F5, getKeyText(VK_F5))
-  val F6 = Value(VK_F6, getKeyText(VK_F6))
-  val F7 = Value(VK_F7, getKeyText(VK_F7))
-  val F8 = Value(VK_F8, getKeyText(VK_F8))
-  val F9 = Value(VK_F9, getKeyText(VK_F9))
-  val F10 = Value(VK_F10, getKeyText(VK_F10))
-  val F11 = Value(VK_F11, getKeyText(VK_F11))
-  val F12 = Value(VK_F12, getKeyText(VK_F12))
-  val F13 = Value(VK_F13, getKeyText(VK_F13))
-  val F14 = Value(VK_F14, getKeyText(VK_F14))
-  val F15 = Value(VK_F15, getKeyText(VK_F15))
-  val F16 = Value(VK_F16, getKeyText(VK_F16))
-  val F17 = Value(VK_F17, getKeyText(VK_F17))
-  val F18 = Value(VK_F18, getKeyText(VK_F18))
-  val F19 = Value(VK_F19, getKeyText(VK_F19))
-  val F20 = Value(VK_F20, getKeyText(VK_F20))
-  val F21 = Value(VK_F21, getKeyText(VK_F21))
-  val F22 = Value(VK_F22, getKeyText(VK_F22))
-  val F23 = Value(VK_F23, getKeyText(VK_F23))
-  val F24 = Value(VK_F24, getKeyText(VK_F24))
-  val Printscreen = Value(VK_PRINTSCREEN, getKeyText(VK_PRINTSCREEN))
-  val Insert = Value(VK_INSERT, getKeyText(VK_INSERT))
-  val Help = Value(VK_HELP, getKeyText(VK_HELP))
-  val BackQuote = Value(VK_BACK_QUOTE, getKeyText(VK_BACK_QUOTE))
-  val Quote = Value(VK_QUOTE, getKeyText(VK_QUOTE))
-  val KpUp = Value(VK_KP_UP, getKeyText(VK_KP_UP))
-  val KpDown = Value(VK_KP_DOWN, getKeyText(VK_KP_DOWN))
-  val KpLeft = Value(VK_KP_LEFT, getKeyText(VK_KP_LEFT))
-  val KpRight = Value(VK_KP_RIGHT, getKeyText(VK_KP_RIGHT))
-  val DeadGrave = Value(VK_DEAD_GRAVE, getKeyText(VK_DEAD_GRAVE))
-  val DeadAcute = Value(VK_DEAD_ACUTE, getKeyText(VK_DEAD_ACUTE))
-  val DeadCircumflex = Value(VK_DEAD_CIRCUMFLEX, getKeyText(VK_DEAD_CIRCUMFLEX))
-  val DeadTilde = Value(VK_DEAD_TILDE, getKeyText(VK_DEAD_TILDE))
-  val DeadMacron = Value(VK_DEAD_MACRON, getKeyText(VK_DEAD_MACRON))
-  val DeadBreve = Value(VK_DEAD_BREVE, getKeyText(VK_DEAD_BREVE))
-  val DeadAbovedot = Value(VK_DEAD_ABOVEDOT, getKeyText(VK_DEAD_ABOVEDOT))
-  val DeadDiaeresis = Value(VK_DEAD_DIAERESIS, getKeyText(VK_DEAD_DIAERESIS))
-  val DeadAbovering = Value(VK_DEAD_ABOVERING, getKeyText(VK_DEAD_ABOVERING))
-  val DeadDoubleacute = Value(VK_DEAD_DOUBLEACUTE, getKeyText(VK_DEAD_DOUBLEACUTE))
-  val DeadCaron = Value(VK_DEAD_CARON, getKeyText(VK_DEAD_CARON))
-  val DeadCedilla = Value(VK_DEAD_CEDILLA, getKeyText(VK_DEAD_CEDILLA))
-  val DeadOgonek = Value(VK_DEAD_OGONEK, getKeyText(VK_DEAD_OGONEK))
-  val DeadIota = Value(VK_DEAD_IOTA, getKeyText(VK_DEAD_IOTA))
-  val DeadVoicedSound = Value(VK_DEAD_VOICED_SOUND, getKeyText(VK_DEAD_VOICED_SOUND))
-  val DeadSemivoicedSound = Value(VK_DEAD_SEMIVOICED_SOUND, getKeyText(VK_DEAD_SEMIVOICED_SOUND))
-  val Ampersand = Value(VK_AMPERSAND, getKeyText(VK_AMPERSAND))
-  val Asterisk = Value(VK_ASTERISK, getKeyText(VK_ASTERISK))
-  val Quotedbl = Value(VK_QUOTEDBL, getKeyText(VK_QUOTEDBL))
-  val Less = Value(VK_LESS, getKeyText(VK_LESS))
-  val Greater = Value(VK_GREATER, getKeyText(VK_GREATER))
-  val Braceleft = Value(VK_BRACELEFT, getKeyText(VK_BRACELEFT))
-  val Braceright = Value(VK_BRACERIGHT, getKeyText(VK_BRACERIGHT))
-  val At = Value(VK_AT, getKeyText(VK_AT))
-  val Colon = Value(VK_COLON, getKeyText(VK_COLON))
-  val Circumflex = Value(VK_CIRCUMFLEX, getKeyText(VK_CIRCUMFLEX))
-  val Dollar = Value(VK_DOLLAR, getKeyText(VK_DOLLAR))
-  val EuroSign = Value(VK_EURO_SIGN, getKeyText(VK_EURO_SIGN))
-  val ExclamationMark = Value(VK_EXCLAMATION_MARK, getKeyText(VK_EXCLAMATION_MARK))
-  val InvertedExclamationMark = Value(VK_INVERTED_EXCLAMATION_MARK, getKeyText(VK_INVERTED_EXCLAMATION_MARK))
-  val LeftParenthesis = Value(VK_LEFT_PARENTHESIS, getKeyText(VK_LEFT_PARENTHESIS))
-  val NumberSign = Value(VK_NUMBER_SIGN, getKeyText(VK_NUMBER_SIGN))
-  val Plus = Value(VK_PLUS, getKeyText(VK_PLUS))
-  val RightParenthesis = Value(VK_RIGHT_PARENTHESIS, getKeyText(VK_RIGHT_PARENTHESIS))
-  val Underscore = Value(VK_UNDERSCORE, getKeyText(VK_UNDERSCORE))
-  val Windows = Value(VK_WINDOWS, getKeyText(VK_WINDOWS))
-  val ContextMenu = Value(VK_CONTEXT_MENU, getKeyText(VK_CONTEXT_MENU))
-  val Final = Value(VK_FINAL, getKeyText(VK_FINAL))
-  val Convert = Value(VK_CONVERT, getKeyText(VK_CONVERT))
-  val Nonconvert = Value(VK_NONCONVERT, getKeyText(VK_NONCONVERT))
-  val Accept = Value(VK_ACCEPT, getKeyText(VK_ACCEPT))
-  val Modechange = Value(VK_MODECHANGE, getKeyText(VK_MODECHANGE))
-  val Kana = Value(VK_KANA, getKeyText(VK_KANA))
-  val Kanji = Value(VK_KANJI, getKeyText(VK_KANJI))
-  val Alphanumeric = Value(VK_ALPHANUMERIC, getKeyText(VK_ALPHANUMERIC))
-  val Katakana = Value(VK_KATAKANA, getKeyText(VK_KATAKANA))
-  val Hiragana = Value(VK_HIRAGANA, getKeyText(VK_HIRAGANA))
-  val FullWidth = Value(VK_FULL_WIDTH, getKeyText(VK_FULL_WIDTH))
-  val HalfWidth = Value(VK_HALF_WIDTH, getKeyText(VK_HALF_WIDTH))
-  val RomanCharacters = Value(VK_ROMAN_CHARACTERS, getKeyText(VK_ROMAN_CHARACTERS))
-  val AllCandidates = Value(VK_ALL_CANDIDATES, getKeyText(VK_ALL_CANDIDATES))
-  val PreviousCandidate = Value(VK_PREVIOUS_CANDIDATE, getKeyText(VK_PREVIOUS_CANDIDATE))
-  val CodeInput = Value(VK_CODE_INPUT, getKeyText(VK_CODE_INPUT))
-  val JapaneseKatakana = Value(VK_JAPANESE_KATAKANA, getKeyText(VK_JAPANESE_KATAKANA))
-  val JapaneseHiragana = Value(VK_JAPANESE_HIRAGANA, getKeyText(VK_JAPANESE_HIRAGANA))
-  val JapaneseRoman = Value(VK_JAPANESE_ROMAN, getKeyText(VK_JAPANESE_ROMAN))
-  val KanaLock = Value(VK_KANA_LOCK, getKeyText(VK_KANA_LOCK))
-  val InputMethodOnOff = Value(VK_INPUT_METHOD_ON_OFF, getKeyText(VK_INPUT_METHOD_ON_OFF))
-  val Cut = Value(VK_CUT, getKeyText(VK_CUT))
-  val Copy = Value(VK_COPY, getKeyText(VK_COPY))
-  val Paste = Value(VK_PASTE, getKeyText(VK_PASTE))
-  val Undo = Value(VK_UNDO, getKeyText(VK_UNDO))
-  val Again = Value(VK_AGAIN, getKeyText(VK_AGAIN))
-  val Find = Value(VK_FIND, getKeyText(VK_FIND))
-  val Props = Value(VK_PROPS, getKeyText(VK_PROPS))
-  val Stop = Value(VK_STOP, getKeyText(VK_STOP))
-  val Compose = Value(VK_COMPOSE, getKeyText(VK_COMPOSE))
-  val Begin = Value(VK_BEGIN, getKeyText(VK_BEGIN))
-  val Undefined = Value(VK_UNDEFINED, getKeyText(VK_UNDEFINED))
-}
diff --git a/src/swing/scala/swing/event/KeyEvent.scala b/src/swing/scala/swing/event/KeyEvent.scala
deleted file mode 100644
index 1345c77..0000000
--- a/src/swing/scala/swing/event/KeyEvent.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-import javax.swing.JComponent
-
-sealed abstract class KeyEvent extends InputEvent {
-  def peer: java.awt.event.KeyEvent
-}
-
-case class KeyTyped(val source: Component, char: Char, val modifiers: Key.Modifiers,
-                    location: Key.Location.Value)
-                   (val peer: java.awt.event.KeyEvent) extends KeyEvent {
-  def this(e: java.awt.event.KeyEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getKeyChar, e.getModifiersEx,
-        Key.Location(e.getKeyLocation))(e)
-}
-
-case class KeyPressed(val source: Component, key: Key.Value, val modifiers: Key.Modifiers,
-                    location: Key.Location.Value)
-                   (val peer: java.awt.event.KeyEvent) extends KeyEvent {
-  def this(e: java.awt.event.KeyEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
-}
-
-case class KeyReleased(val source: Component, key: Key.Value, val modifiers: Key.Modifiers,
-                    location: Key.Location.Value)
-                   (val peer: java.awt.event.KeyEvent) extends KeyEvent {
-  def this(e: java.awt.event.KeyEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
-}
diff --git a/src/swing/scala/swing/event/ListEvent.scala b/src/swing/scala/swing/event/ListEvent.scala
deleted file mode 100644
index bdb769d..0000000
--- a/src/swing/scala/swing/event/ListEvent.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-trait ListEvent[A] extends ComponentEvent {
-  override val source: ListView[A]
-}
-
-//case class ElementSelected[A](override val source: ListView[A], range: Range, live: Boolean)
-//           extends ListEvent[A] with AdjustingEvent with ListSelectionEvent
-
-abstract class ListChange[A](override val source: ListView[A]) extends ListEvent[A]
-
-object ListChanged {
-  def unapply[A](e: ListChanged[A]) = Some(e.source)
-  def apply[A](source: ListView[A]) = new ListChanged(source)
-}
-
-class ListChanged[A](override val source: ListView[A]) extends ListChange(source)
-
-object ListElementsAdded {
-  def unapply[A](e: ListElementsAdded[A]) = Some((e.source, e.range))
-  def apply[A](source: ListView[A], range: Range) = new ListElementsAdded(source, range)
-}
-
-class ListElementsAdded[A](override val source: ListView[A], val range: Range)
-           extends ListChange(source)
-
-object ListElementsRemoved {
-  def unapply[A](e: ListElementsRemoved[A]) = Some((e.source, e.range))
-  def apply[A](source: ListView[A], range: Range) = new ListElementsRemoved(source, range)
-}
-class ListElementsRemoved[A](override val source: ListView[A], val range: Range)
-           extends ListChange(source)
diff --git a/src/swing/scala/swing/event/MouseEvent.scala b/src/swing/scala/swing/event/MouseEvent.scala
deleted file mode 100644
index 8629d71..0000000
--- a/src/swing/scala/swing/event/MouseEvent.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-import java.awt.Point
-import javax.swing.JComponent
-
-sealed abstract class MouseEvent extends InputEvent {
-  def peer: java.awt.event.MouseEvent
-  def point: Point
-}
-
-sealed abstract class MouseButtonEvent extends MouseEvent {
-  def clicks: Int
-  def triggersPopup: Boolean
-}
-case class MouseClicked(val source: Component, point: Point, val modifiers: Key.Modifiers,
-                     clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
-           extends MouseButtonEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
-}
-case class MousePressed(val source: Component, point: Point, val modifiers: Key.Modifiers,
-                        clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
-           extends MouseButtonEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
-}
-case class MouseReleased(val source: Component, point: Point, val modifiers: Key.Modifiers,
-                        clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
-           extends MouseButtonEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
-}
-
-sealed abstract class MouseMotionEvent extends MouseEvent
-case class MouseMoved(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
-           extends MouseMotionEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx)(e)
-}
-case class MouseDragged(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
-           extends MouseMotionEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx)(e)
-}
-case class MouseEntered(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
-           extends MouseMotionEvent {
-  def this(e: java.awt.event.MouseEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx)(e)
-}
-case class MouseExited(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
-           extends MouseMotionEvent {
-  def this(e: java.awt.event.MouseEvent) =
-      this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-          e.getPoint, e.getModifiersEx)(e)
-}
-
-case class MouseWheelMoved(val source: Component, point: Point, val modifiers: Key.Modifiers, rotation: Int)(val peer: java.awt.event.MouseEvent)
-           extends MouseEvent {
-  def this(e: java.awt.event.MouseWheelEvent) =
-    this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
-        e.getPoint, e.getModifiersEx, e.getWheelRotation)(e)
-}
diff --git a/src/swing/scala/swing/event/SelectionEvent.scala b/src/swing/scala/swing/event/SelectionEvent.scala
deleted file mode 100644
index 39d6a13..0000000
--- a/src/swing/scala/swing/event/SelectionEvent.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-/**
- * An event that indicates a change in a selection such as in a list view or a table.
- */
-trait SelectionEvent
-
-/**
- * An event that indicates a selection of a range of indices.
- */
-trait ListSelectionEvent extends SelectionEvent {
-  def range: Range
-}
-
-case class SelectionChanged(override val source: Component) extends ComponentEvent with SelectionEvent
-
-object ListSelectionChanged {
-  def unapply[A](e: ListSelectionChanged[A]): Option[(ListView[A], Range, Boolean)] =
-    Some((e.source, e.range, e.live))
-}
-
-class ListSelectionChanged[A](override val source: ListView[A], val range: Range, val live: Boolean)
-  extends SelectionChanged(source) with ListEvent[A]
diff --git a/src/swing/scala/swing/event/TableEvent.scala b/src/swing/scala/swing/event/TableEvent.scala
deleted file mode 100644
index c420ea2..0000000
--- a/src/swing/scala/swing/event/TableEvent.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-abstract class TableEvent(override val source: Table) extends ComponentEvent
-
-abstract class TableChange(override val source: Table) extends TableEvent(source)
-
-/**
- * The most general table change. The table might have changed completely,
- * i.e., columns might have been reordered, rows added or removed, etc.
- * No other event indicates that the structure might have changed.
- */
-case class TableStructureChanged(override val source: Table) extends TableChange(source)
-/**
- * The table structure, i.e., the column order, names, and types stay the same,
- * but anything else might have changed.
- */
-case class TableChanged(override val source: Table) extends TableChange(source)
-/**
- * The size of the table stays the same, but the given range of rows might
- * have changed but only in the given column. A value of -1 for the column
- * denotes all columns.
- */
-case class TableUpdated(override val source: Table, range: Range, column: Int)
-           extends TableChange(source)
-/**
- * Any change that caused the table to change it's size
- */
-class TableResized(override val source: Table) extends TableChange(source)
-case class TableRowsAdded(override val source: Table, range: Range) extends TableResized(source)
-case class TableRowsRemoved(override val source: Table, range: Range) extends TableResized(source)
-
-case class TableColumnsSelected(override val source: Table, range: Range, adjusting: Boolean)
-           extends TableEvent(source) with AdjustingEvent with ListSelectionEvent
-case class TableRowsSelected(override val source: Table, range: Range, adjusting: Boolean)
-           extends TableEvent(source) with AdjustingEvent with ListSelectionEvent
diff --git a/src/swing/scala/swing/event/UIEvent.scala b/src/swing/scala/swing/event/UIEvent.scala
deleted file mode 100644
index a4644b0..0000000
--- a/src/swing/scala/swing/event/UIEvent.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-trait UIEvent extends Event {
-  val source: UIElement
-}
-
-case class UIElementMoved(source: UIElement) extends UIEvent
-case class UIElementResized(source: UIElement) extends UIEvent
-case class UIElementShown(source: UIElement) extends UIEvent
-case class UIElementHidden(source: UIElement) extends UIEvent
diff --git a/src/swing/scala/swing/event/ValueChanged.scala b/src/swing/scala/swing/event/ValueChanged.scala
deleted file mode 100644
index ef08085..0000000
--- a/src/swing/scala/swing/event/ValueChanged.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-object ValueChanged {
-  def unapply(a: ValueChanged): Option[Component] = Some(a.source)
-}
-
-class ValueChanged(override val source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/WindowActivated.scala b/src/swing/scala/swing/event/WindowActivated.scala
deleted file mode 100644
index 1473242..0000000
--- a/src/swing/scala/swing/event/WindowActivated.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowActivated(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowClosed.scala b/src/swing/scala/swing/event/WindowClosed.scala
deleted file mode 100644
index 80afe1a..0000000
--- a/src/swing/scala/swing/event/WindowClosed.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package scala.swing
-package event
-
-case class WindowClosed(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowClosing.scala b/src/swing/scala/swing/event/WindowClosing.scala
deleted file mode 100644
index 3c64aeb..0000000
--- a/src/swing/scala/swing/event/WindowClosing.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowClosing(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowDeactivated.scala b/src/swing/scala/swing/event/WindowDeactivated.scala
deleted file mode 100644
index f0eec57..0000000
--- a/src/swing/scala/swing/event/WindowDeactivated.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowDeactivated(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowDeiconified.scala b/src/swing/scala/swing/event/WindowDeiconified.scala
deleted file mode 100644
index 6e07f85..0000000
--- a/src/swing/scala/swing/event/WindowDeiconified.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowDeiconified(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowEvent.scala b/src/swing/scala/swing/event/WindowEvent.scala
deleted file mode 100644
index b8ca329..0000000
--- a/src/swing/scala/swing/event/WindowEvent.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-abstract class WindowEvent(override val source: Window) extends UIEvent
diff --git a/src/swing/scala/swing/event/WindowIconified.scala b/src/swing/scala/swing/event/WindowIconified.scala
deleted file mode 100644
index 3b5139f..0000000
--- a/src/swing/scala/swing/event/WindowIconified.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowIconified(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/event/WindowOpened.scala b/src/swing/scala/swing/event/WindowOpened.scala
deleted file mode 100644
index f5854ed..0000000
--- a/src/swing/scala/swing/event/WindowOpened.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-
-package scala.swing
-package event
-
-case class WindowOpened(override val source: Window) extends WindowEvent(source)
diff --git a/src/swing/scala/swing/model/Matrix.scala b/src/swing/scala/swing/model/Matrix.scala
deleted file mode 100644
index 664d44a..0000000
--- a/src/swing/scala/swing/model/Matrix.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/*                     __                                               *\
-**     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2007-2013, LAMP/EPFL             **
-**  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
-** /____/\___/_/ |_/____/_/ | |                                         **
-**                          |/                                          **
-\*                                                                      */
-
-
-package scala.swing
-package model
-
-// Dummy to keep ant from recompiling on every run.
-trait Matrix { }
-
-/*trait Matrix[A] extends Function2[Int, Int, A] {
-
-  val width: Int
-  val height: Int
-
-  assert(width > 0 && height > 0)
-
-  private val delegate = new Array[A](width * height)
-
-  override def apply(col: Int, row: Int): A =
-    delegate(col * height + row)
-
-  def apply(coord: (Int, Int)): A =
-    apply(coord._1, coord._2)
-
-  def col(index: Int): Matrix.FlatSeq[A] =
-    new Matrix.SubArray[A](delegate, index * height, height)
-
-  def row(index: Int): Matrix.FlatSeq[A] =
-    new Matrix.SparseArray[A](delegate, index, height)
-
-  def update(xpos: Int, ypos: Int, elem: A) {
-    delegate(xpos % width * height + ypos % height) = elem
-  }
-
-  def update(coord: (Int, Int), elem: A) {
-    update(coord._1, coord._2, elem)
-  }
-
-  def initializeWith(f: (Int, Int) => A): this.type = {
-    for (index <- 0 until (width * height))
-      delegate(index) = f(index / height, index % height)
-    this
-  }
-
-  def initializeTo(v: => A): this.type = {
-    for (index <- 0 until (width * height))
-      delegate(index) = v
-    this
-  }
-
-  def size: (Int, Int) = (width, height)
-
-  /** A flattened view of the matrix. The flattening is done on columns i.e.
-    * the first values of the flattened sequence are the cells of the first
-    * column. As this is a view of the matrix, any change to the matrix will
-    * also be visible in the flattened array, and vice-versa. */
-  def flat: Array[A] = delegate
-
-}
-
-object Matrix {
-
-  def apply[A](columns: Int, rows: Int) = new Matrix[A] {
-    val width = columns
-    val height = rows
-  }
-
-  def apply[A](default: (Int, Int) => A, columns: Int, rows: Int) = new Matrix[A] {
-    val width = columns
-    val height = rows
-    initializeWith(default)
-  }
-
-  def apply[A](default: => A, columns: Int, rows: Int) = new Matrix[A] {
-    val width = columns
-    val height = rows
-    initializeTo(default)
-  }
-
-  trait FlatSeq[A] extends RandomAccessSeq[A] {
-    def        update (index: Int, elem: A): Unit
-  }
-
-  private class SubArray[A](delegate: Array[A], start: Int, val length: Int) extends FlatSeq[A] {
-    def apply(index: Int): A =
-      if (index < length)
-        delegate(index + start)
-      else throw new IndexOutOfBoundsException
-    def update(index: Int, elem: A): Unit =
-      if (index < length)
-        delegate(index + start) = elem
-      else throw new IndexOutOfBoundsException
-  }
-
-  private class SparseArray[A](delegate: Array[A], start: Int, span: Int) extends FlatSeq[A] {
-    def apply(index: Int): A = {
-      if (index < length)
-        delegate((index * span) + start)
-      else throw new IndexOutOfBoundsException
-    }
-    def length: Int = delegate.length / span
-    def update(index: Int, elem: A): Unit =
-      if (index < length)
-        delegate((index * span) + start) = elem
-      else throw new IndexOutOfBoundsException
-  }
-
-  implicit def MatrixToSeqs[A](matrix: Matrix[A]): Seq[Seq[A]] = {
-    val result = new Array[SubArray[A]](matrix.width)
-    for (col <- 0 until matrix.width)
-      result(col) = new SubArray[A](matrix.delegate, col * matrix.height, matrix.height)
-    result
-  }
-
-}*/
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
deleted file mode 100644
index 4549766..0000000
--- a/src/swing/scala/swing/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala
-
-/**
- * Useful imports that don't have wrappers.
- */
-package object swing {
-  type Point = java.awt.Point
-  type Dimension = java.awt.Dimension
-  type Rectangle = java.awt.Rectangle
-  type Insets = java.awt.Insets
-
-  type Graphics2D = java.awt.Graphics2D
-  type Color = java.awt.Color
-  type Image = java.awt.Image
-  type Font = java.awt.Font
-
-  implicit lazy val reflectiveCalls     = scala.language.reflectiveCalls
-  implicit lazy val implicitConversions = scala.language.implicitConversions
-
-  private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
-  private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
-  private[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
-}
diff --git a/src/swing/swing.version.properties b/src/swing/swing.version.properties
deleted file mode 100644
index ba3675f..0000000
--- a/src/swing/swing.version.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-version.major=0
-version.minor=4
diff --git a/starr.number b/starr.number
deleted file mode 100644
index 8f1f615..0000000
--- a/starr.number
+++ /dev/null
@@ -1,2 +0,0 @@
-starr.version=2.10.3
-starr.use.released=1
\ No newline at end of file
diff --git a/test/attic/files/cli/test1/Main.check.j9vm5 b/test/attic/files/cli/test1/Main.check.j9vm5
deleted file mode 100644
index de454ef..0000000
--- a/test/attic/files/cli/test1/Main.check.j9vm5
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test1.Main: No such file or directory
-env: -cp: No such file or directory
-1: test 3 passed
diff --git a/test/attic/files/cli/test1/Main.check.java b/test/attic/files/cli/test1/Main.check.java
deleted file mode 100644
index 64410de..0000000
--- a/test/attic/files/cli/test1/Main.check.java
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.java5 b/test/attic/files/cli/test1/Main.check.java5
deleted file mode 100644
index 64410de..0000000
--- a/test/attic/files/cli/test1/Main.check.java5
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.java5_api b/test/attic/files/cli/test1/Main.check.java5_api
deleted file mode 100644
index 8693a5d..0000000
--- a/test/attic/files/cli/test1/Main.check.java5_api
+++ /dev/null
@@ -1,19 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-|   `-- inherit.gif
-|-- stylesheet.css
-`-- test1
-    |-- Main.html
-    |-- package-frame.html
-    |-- package-summary.html
-    `-- package-tree.html
-
-2 directories, 15 files
diff --git a/test/attic/files/cli/test1/Main.check.java5_j9 b/test/attic/files/cli/test1/Main.check.java5_j9
deleted file mode 100644
index de454ef..0000000
--- a/test/attic/files/cli/test1/Main.check.java5_j9
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test1.Main: No such file or directory
-env: -cp: No such file or directory
-1: test 3 passed
diff --git a/test/attic/files/cli/test1/Main.check.javac b/test/attic/files/cli/test1/Main.check.javac
deleted file mode 100644
index ba25d9b..0000000
--- a/test/attic/files/cli/test1/Main.check.javac
+++ /dev/null
@@ -1,19 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                        Generate all debugging info
-  -g:none                   Generate no debugging info
-  -g:{lines,vars,source}    Generate only some debugging info
-  -nowarn                   Generate no warnings
-  -verbose                  Output messages about what the compiler is doing
-  -deprecation              Output source locations where deprecated APIs are used
-  -classpath <path>         Specify where to find user class files
-  -sourcepath <path>        Specify where to find input source files
-  -bootclasspath <path>     Override location of bootstrap class files
-  -extdirs <dirs>           Override location of installed extensions
-  -d <directory>            Specify where to place generated class files
-  -encoding <encoding>      Specify character encoding used by source files
-  -source <release>         Provide source compatibility with specified release
-  -target <release>         Generate class files for specific VM version
-  -help                     Print a synopsis of standard options
-
diff --git a/test/attic/files/cli/test1/Main.check.javac5 b/test/attic/files/cli/test1/Main.check.javac5
deleted file mode 100644
index 0cb29d3..0000000
--- a/test/attic/files/cli/test1/Main.check.javac5
+++ /dev/null
@@ -1,24 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files
-  -cp <path>                 Specify where to find user class files
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -d <directory>             Specify where to place generated class files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
diff --git a/test/attic/files/cli/test1/Main.check.javac6 b/test/attic/files/cli/test1/Main.check.javac6
deleted file mode 100644
index 8f37a05..0000000
--- a/test/attic/files/cli/test1/Main.check.javac6
+++ /dev/null
@@ -1,29 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files and annotation processors
-  -cp <path>                 Specify where to find user class files and annotation processors
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -proc:{none, only}         Control whether annotation processing and/or compilation is done.
-  -processor <class>         Name of the annotation processor to run; bypasses default discovery process
-  -processorpath <path>      Specify where to find annotation processors
-  -d <directory>             Specify where to place generated class files
-  -s <directory>             Specify where to place generated source files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -A[key[=value]]            Options to pass to annotation processors
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
diff --git a/test/attic/files/cli/test1/Main.check.jikes b/test/attic/files/cli/test1/Main.check.jikes
deleted file mode 100644
index cd89168..0000000
--- a/test/attic/files/cli/test1/Main.check.jikes
+++ /dev/null
@@ -1,3 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
diff --git a/test/attic/files/cli/test1/Main.check.jikes5 b/test/attic/files/cli/test1/Main.check.jikes5
deleted file mode 100644
index cd89168..0000000
--- a/test/attic/files/cli/test1/Main.check.jikes5
+++ /dev/null
@@ -1,3 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
diff --git a/test/attic/files/cli/test1/Main.check.scala b/test/attic/files/cli/test1/Main.check.scala
deleted file mode 100644
index 43b200a..0000000
--- a/test/attic/files/cli/test1/Main.check.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.scala_api b/test/attic/files/cli/test1/Main.check.scala_api
deleted file mode 100644
index 6fac39d..0000000
--- a/test/attic/files/cli/test1/Main.check.scala_api
+++ /dev/null
@@ -1,33 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-|   |-- Any.html
-|   |-- AnyRef.html
-|   |-- AnyVal.html
-|   |-- Boolean.html
-|   |-- Byte.html
-|   |-- Char.html
-|   |-- Double.html
-|   |-- Float.html
-|   |-- Int.html
-|   |-- Long.html
-|   |-- Nothing.html
-|   |-- Null.html
-|   |-- Short.html
-|   |-- Unit.html
-|   `-- runtime
-|       |-- BoxedFloat.html
-|       |-- BoxedInt.html
-|       |-- BoxedLong.html
-|       `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-|   `-- Main$object.html
-|-- test1$content.html
-`-- test1$package.html
-
-3 directories, 28 files
diff --git a/test/attic/files/cli/test1/Main.check.scala_j9 b/test/attic/files/cli/test1/Main.check.scala_j9
deleted file mode 100644
index 65d5dda..0000000
--- a/test/attic/files/cli/test1/Main.check.scala_j9
+++ /dev/null
@@ -1,15 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.scalac b/test/attic/files/cli/test1/Main.check.scalac
deleted file mode 100644
index 8465810..0000000
--- a/test/attic/files/cli/test1/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
-  scalac -help  gives more information
-Usage: scalac <options | source files>
-where possible options include: 
-  -doc                            Generate documentation
-  -g:<g>                          Generate debugging info (none,source,line,vars,notc)
-  -nowarn                         Generate no warnings
-  -noassert                       Generate no assertions and assumptions
-  -verbose                        Output messages about what the compiler is doing
-  -classpath <path>               Specify where to find user class files
-  -sourcepath <path>              Specify where to find input source files
-  -bootclasspath <path>           Override location of bootstrap class files
-  -extdirs <dirs>                 Override location of installed extensions
-  -d <directory>                  Specify where to place generated class files
-  -encoding <encoding>            Specify character encoding used by source files
-  -windowtitle <windowtitle>      Specify window title of generated HTML documentation
-  -documenttitle <documenttitle>  Specify document title of generated HTML documentation
-  -target:<target>                Specify which backend to use (jvm-1.5,msil)
-  -migrate                        Assist in migrating from Scala version 1.0
-  -o <file>                       Name of the output assembly (only relevant with -target:msil)
-  -r <path>                       List of assemblies referenced by the program (only relevant with -target:msil)
-  -debug                          Output debugging messages
-  -deprecation                    enable detailed deprecation warnings
-  -unchecked                      enable detailed unchecked warnings
-  -statistics                     Print compiler statistics
-  -explaintypes                   Explain type errors in more detail
-  -resident                       Compiler stays resident, files to compile are read from standard input
-  -uniqid                         Print identifiers with unique names (debugging option)
-  -printtypes                     Print tree types (debugging option)
-  -prompt                         Display a prompt after each error (debugging option)
-  -noimports                      Compile without any implicit imports
-  -nopredefs                      Compile without any implicit predefined values
-  -skip:<phase>                   Skip <phase>
-  -check:<phase>                  Check the tree at start of <phase>
-  -print:<phase>                  Print out program after <phase>
-  -printer:<printer>              Printer to use (text,html)
-  -printfile <file>               Specify file in which to print trees
-  -graph:<phase>                  Graph the program after <phase>
-  -browse:<phase>                 Browse the abstract syntax tree after <phase>
-  -stop:<phase>                   Stop after phase <phase>
-  -log:<phase>                    Log operations in <phase>
-  -logall                         Log all operations
-  -version                        Print product version and exit
-  -help                           Print a synopsis of standard options
-  -nouescape                      disables handling of \u unicode escapes
-  -Xinline                        Perform inlining when possible
-  -XO                             Optimize. implies -Xinline, -Xcloselim and -Xdce
-  -Xcloselim                      Perform closure elimination
-  -Xdce                           Perform dead code elimination
-  -Xwarndeadcode                  Emit warnings for dead code
-  -XbytecodeRead                  Enable bytecode reader.
-  -Xdetach                        Perform detaching of remote closures
-  -Xshowcls <class>               Show class info
-  -Xshowobj <object>              Show object info
-  -Xlinearizer:<Xlinearizer>      Linearizer to use (normal,dfs,rpo,dump)
-  -Xgenerics                      Use generic Java types
-  -Xprintpos                      Print tree positions (as offsets)
-  -Xscript                        compile script file
-  -Xexperimental                  enable experimental extensions
-  -Xplugtypes                     parse but ignore annotations in more locations
-  -Xkilloption                    optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test1/Main.check.scalaint b/test/attic/files/cli/test1/Main.check.scalaint
deleted file mode 100644
index 88345d1..0000000
--- a/test/attic/files/cli/test1/Main.check.scalaint
+++ /dev/null
@@ -1,45 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
diff --git a/test/attic/files/cli/test1/Main.java b/test/attic/files/cli/test1/Main.java
deleted file mode 100644
index 8850b87..0000000
--- a/test/attic/files/cli/test1/Main.java
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info no dependency
-package test1;
-public class Main {
-  public static void main(String args[]) {
-    String arg = (args.length > 0) ? args[0] : "?";
-    System.out.println("1: test " + arg + " passed (" + args.length + ")");
-  }
-}
diff --git a/test/attic/files/cli/test1/Main.scala b/test/attic/files/cli/test1/Main.scala
deleted file mode 100644
index f7dd8a0..0000000
--- a/test/attic/files/cli/test1/Main.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info no dependency
-package test1
-object Main {
-  def main(args: Array[String]) = {
-    val arg = if (args != null && args.length > 0) args(0) else "?"
-    Console.println("1: test " + arg + " passed (" + args.length + ")")
-  }
-}
diff --git a/test/attic/files/cli/test2/Main.check.j9vm5 b/test/attic/files/cli/test2/Main.check.j9vm5
deleted file mode 100644
index 8f4fdf8..0000000
--- a/test/attic/files/cli/test2/Main.check.j9vm5
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test2.Main: No such file or directory
-env: -cp: No such file or directory
-2: 1: test 3 passed
diff --git a/test/attic/files/cli/test2/Main.check.java b/test/attic/files/cli/test2/Main.check.java
deleted file mode 100644
index aca383d..0000000
--- a/test/attic/files/cli/test2/Main.check.java
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.java5 b/test/attic/files/cli/test2/Main.check.java5
deleted file mode 100644
index aca383d..0000000
--- a/test/attic/files/cli/test2/Main.check.java5
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.java5_api b/test/attic/files/cli/test2/Main.check.java5_api
deleted file mode 100644
index 4ff775c..0000000
--- a/test/attic/files/cli/test2/Main.check.java5_api
+++ /dev/null
@@ -1,24 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-|   `-- inherit.gif
-|-- stylesheet.css
-|-- test1
-|   |-- Main.html
-|   |-- package-frame.html
-|   |-- package-summary.html
-|   `-- package-tree.html
-`-- test2
-    |-- Main.html
-    |-- package-frame.html
-    |-- package-summary.html
-    `-- package-tree.html
-
-3 directories, 19 files
diff --git a/test/attic/files/cli/test2/Main.check.java5_j9 b/test/attic/files/cli/test2/Main.check.java5_j9
deleted file mode 100644
index 2dcb6e8..0000000
--- a/test/attic/files/cli/test2/Main.check.java5_j9
+++ /dev/null
@@ -1,36 +0,0 @@
-JVMJ9VM007E Command-line option unrecognised: -cpp
-Could not create the Java virtual machine.
-
-Usage: java [-options] class [args...]
-           (to execute a class)
-   or  java [-jar] [-options] jarfile [args...]
-           (to execute a jar file)
-
-where options include:
-    -cp -classpath <directories and zip/jar files separated by :>
-              set search path for application classes and resources
-    -D<name>=<value>
-              set a system property
-    -verbose[:class|gc|jni]
-              enable verbose output
-    -version  print product version
-    -version:<value>
-        require the specified version to run
-    -showversion  print product version and continue
-    -jre-restrict-search | -no-jre-restrict-search
-              include/exclude user private JREs in the version search
-    -agentlib:<libname>[=<options>]
-              load native agent library <libname>, e.g. -agentlib:hprof
-              see also, -agentlib:jdwp=help and -agentlib:hprof=help
-    -agentpath:<pathname>[=<options>]
-              load native agent library by full pathname
-    -javaagent:<jarpath>[=<options>]
-              load Java programming language agent, see java.lang.instrument
-    -? -help  print this help message
-    -X        print help on non-standard options
-    -assert   print help on assert options
-
-The java class is not found:  test2.Main
-The java class is not found:  test2.Main
-The java class is not found:  test2.Main
-The java class is not found:  test2.Main
diff --git a/test/attic/files/cli/test2/Main.check.javac b/test/attic/files/cli/test2/Main.check.javac
deleted file mode 100644
index c40c0a7..0000000
--- a/test/attic/files/cli/test2/Main.check.javac
+++ /dev/null
@@ -1,27 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                        Generate all debugging info
-  -g:none                   Generate no debugging info
-  -g:{lines,vars,source}    Generate only some debugging info
-  -nowarn                   Generate no warnings
-  -verbose                  Output messages about what the compiler is doing
-  -deprecation              Output source locations where deprecated APIs are used
-  -classpath <path>         Specify where to find user class files
-  -sourcepath <path>        Specify where to find input source files
-  -bootclasspath <path>     Override location of bootstrap class files
-  -extdirs <dirs>           Override location of installed extensions
-  -d <directory>            Specify where to place generated class files
-  -encoding <encoding>      Specify character encoding used by source files
-  -source <release>         Provide source compatibility with specified release
-  -target <release>         Generate class files for specific VM version
-  -help                     Print a synopsis of standard options
-
-files/cli/test2/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-1 error
-files/cli/test2/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.javac5 b/test/attic/files/cli/test2/Main.check.javac5
deleted file mode 100644
index 0ac32b0..0000000
--- a/test/attic/files/cli/test2/Main.check.javac5
+++ /dev/null
@@ -1,28 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files
-  -cp <path>                 Specify where to find user class files
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -d <directory>             Specify where to place generated class files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
-files/cli/test2/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.javac6 b/test/attic/files/cli/test2/Main.check.javac6
deleted file mode 100644
index 350d325..0000000
--- a/test/attic/files/cli/test2/Main.check.javac6
+++ /dev/null
@@ -1,33 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files and annotation processors
-  -cp <path>                 Specify where to find user class files and annotation processors
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -proc:{none, only}         Control whether annotation processing and/or compilation is done.
-  -processor <class>         Name of the annotation processor to run; bypasses default discovery process
-  -processorpath <path>      Specify where to find annotation processors
-  -d <directory>             Specify where to place generated class files
-  -s <directory>             Specify where to place generated source files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -A[key[=value]]            Options to pass to annotation processors
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
-files/cli/test2/Main.java:5: package test1 does not exist
-    test1.Main.main(args);
-         ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.jikes b/test/attic/files/cli/test2/Main.check.jikes
deleted file mode 100644
index 97943e8..0000000
--- a/test/attic/files/cli/test2/Main.check.jikes
+++ /dev/null
@@ -1,9 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 1 semantic error compiling "files/cli/test2/Main.java":
-
-     6.     test1.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test2.Main".
diff --git a/test/attic/files/cli/test2/Main.check.jikes5 b/test/attic/files/cli/test2/Main.check.jikes5
deleted file mode 100644
index 97943e8..0000000
--- a/test/attic/files/cli/test2/Main.check.jikes5
+++ /dev/null
@@ -1,9 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 1 semantic error compiling "files/cli/test2/Main.java":
-
-     6.     test1.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test2.Main".
diff --git a/test/attic/files/cli/test2/Main.check.scala b/test/attic/files/cli/test2/Main.check.scala
deleted file mode 100644
index 7e5f176..0000000
--- a/test/attic/files/cli/test2/Main.check.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.scala_api b/test/attic/files/cli/test2/Main.check.scala_api
deleted file mode 100644
index bcb0f0c..0000000
--- a/test/attic/files/cli/test2/Main.check.scala_api
+++ /dev/null
@@ -1,37 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-|   |-- Any.html
-|   |-- AnyRef.html
-|   |-- AnyVal.html
-|   |-- Boolean.html
-|   |-- Byte.html
-|   |-- Char.html
-|   |-- Double.html
-|   |-- Float.html
-|   |-- Int.html
-|   |-- Long.html
-|   |-- Nothing.html
-|   |-- Null.html
-|   |-- Short.html
-|   |-- Unit.html
-|   `-- runtime
-|       |-- BoxedFloat.html
-|       |-- BoxedInt.html
-|       |-- BoxedLong.html
-|       `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-|   `-- Main$object.html
-|-- test1$content.html
-|-- test1$package.html
-|-- test2
-|   `-- Main$object.html
-|-- test2$content.html
-`-- test2$package.html
-
-4 directories, 31 files
diff --git a/test/attic/files/cli/test2/Main.check.scala_j9 b/test/attic/files/cli/test2/Main.check.scala_j9
deleted file mode 100644
index 80cbb50..0000000
--- a/test/attic/files/cli/test2/Main.check.scala_j9
+++ /dev/null
@@ -1,15 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.scalac b/test/attic/files/cli/test2/Main.check.scalac
deleted file mode 100644
index 8465810..0000000
--- a/test/attic/files/cli/test2/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
-  scalac -help  gives more information
-Usage: scalac <options | source files>
-where possible options include: 
-  -doc                            Generate documentation
-  -g:<g>                          Generate debugging info (none,source,line,vars,notc)
-  -nowarn                         Generate no warnings
-  -noassert                       Generate no assertions and assumptions
-  -verbose                        Output messages about what the compiler is doing
-  -classpath <path>               Specify where to find user class files
-  -sourcepath <path>              Specify where to find input source files
-  -bootclasspath <path>           Override location of bootstrap class files
-  -extdirs <dirs>                 Override location of installed extensions
-  -d <directory>                  Specify where to place generated class files
-  -encoding <encoding>            Specify character encoding used by source files
-  -windowtitle <windowtitle>      Specify window title of generated HTML documentation
-  -documenttitle <documenttitle>  Specify document title of generated HTML documentation
-  -target:<target>                Specify which backend to use (jvm-1.5,msil)
-  -migrate                        Assist in migrating from Scala version 1.0
-  -o <file>                       Name of the output assembly (only relevant with -target:msil)
-  -r <path>                       List of assemblies referenced by the program (only relevant with -target:msil)
-  -debug                          Output debugging messages
-  -deprecation                    enable detailed deprecation warnings
-  -unchecked                      enable detailed unchecked warnings
-  -statistics                     Print compiler statistics
-  -explaintypes                   Explain type errors in more detail
-  -resident                       Compiler stays resident, files to compile are read from standard input
-  -uniqid                         Print identifiers with unique names (debugging option)
-  -printtypes                     Print tree types (debugging option)
-  -prompt                         Display a prompt after each error (debugging option)
-  -noimports                      Compile without any implicit imports
-  -nopredefs                      Compile without any implicit predefined values
-  -skip:<phase>                   Skip <phase>
-  -check:<phase>                  Check the tree at start of <phase>
-  -print:<phase>                  Print out program after <phase>
-  -printer:<printer>              Printer to use (text,html)
-  -printfile <file>               Specify file in which to print trees
-  -graph:<phase>                  Graph the program after <phase>
-  -browse:<phase>                 Browse the abstract syntax tree after <phase>
-  -stop:<phase>                   Stop after phase <phase>
-  -log:<phase>                    Log operations in <phase>
-  -logall                         Log all operations
-  -version                        Print product version and exit
-  -help                           Print a synopsis of standard options
-  -nouescape                      disables handling of \u unicode escapes
-  -Xinline                        Perform inlining when possible
-  -XO                             Optimize. implies -Xinline, -Xcloselim and -Xdce
-  -Xcloselim                      Perform closure elimination
-  -Xdce                           Perform dead code elimination
-  -Xwarndeadcode                  Emit warnings for dead code
-  -XbytecodeRead                  Enable bytecode reader.
-  -Xdetach                        Perform detaching of remote closures
-  -Xshowcls <class>               Show class info
-  -Xshowobj <object>              Show object info
-  -Xlinearizer:<Xlinearizer>      Linearizer to use (normal,dfs,rpo,dump)
-  -Xgenerics                      Use generic Java types
-  -Xprintpos                      Print tree positions (as offsets)
-  -Xscript                        compile script file
-  -Xexperimental                  enable experimental extensions
-  -Xplugtypes                     parse but ignore annotations in more locations
-  -Xkilloption                    optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test2/Main.check.scalaint b/test/attic/files/cli/test2/Main.check.scalaint
deleted file mode 100644
index 89b6766..0000000
--- a/test/attic/files/cli/test2/Main.check.scalaint
+++ /dev/null
@@ -1,45 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
diff --git a/test/attic/files/cli/test2/Main.java b/test/attic/files/cli/test2/Main.java
deleted file mode 100644
index f679763..0000000
--- a/test/attic/files/cli/test2/Main.java
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info 1 dependency
-package test2;
-public class Main {
-  public static void main(String args[]) {
-    System.out.print("2: ");
-    test1.Main.main(args);
-  }
-}
diff --git a/test/attic/files/cli/test2/Main.scala b/test/attic/files/cli/test2/Main.scala
deleted file mode 100644
index 11c878b..0000000
--- a/test/attic/files/cli/test2/Main.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info 1 dependency
-package test2 
-object Main {
-  def main(args: Array[String]) = {
-    Console.print("2: ")
-    test1.Main.main(args)
-  }
-}
diff --git a/test/attic/files/cli/test3/Main.check.j9vm5 b/test/attic/files/cli/test3/Main.check.j9vm5
deleted file mode 100644
index a094dc8..0000000
--- a/test/attic/files/cli/test3/Main.check.j9vm5
+++ /dev/null
@@ -1,5 +0,0 @@
-env: -cpp: No such file or directory
-env: test3.Main: No such file or directory
-env: -cp: No such file or directory
-3: 1: test 3 passed
-3: 2: 1: test 3 passed
diff --git a/test/attic/files/cli/test3/Main.check.java b/test/attic/files/cli/test3/Main.check.java
deleted file mode 100644
index de3eb7b..0000000
--- a/test/attic/files/cli/test3/Main.check.java
+++ /dev/null
@@ -1,10 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.java5 b/test/attic/files/cli/test3/Main.check.java5
deleted file mode 100644
index de3eb7b..0000000
--- a/test/attic/files/cli/test3/Main.check.java5
+++ /dev/null
@@ -1,10 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.java5_api b/test/attic/files/cli/test3/Main.check.java5_api
deleted file mode 100644
index f611221..0000000
--- a/test/attic/files/cli/test3/Main.check.java5_api
+++ /dev/null
@@ -1,29 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-|   `-- inherit.gif
-|-- stylesheet.css
-|-- test1
-|   |-- Main.html
-|   |-- package-frame.html
-|   |-- package-summary.html
-|   `-- package-tree.html
-|-- test2
-|   |-- Main.html
-|   |-- package-frame.html
-|   |-- package-summary.html
-|   `-- package-tree.html
-`-- test3
-    |-- Main.html
-    |-- package-frame.html
-    |-- package-summary.html
-    `-- package-tree.html
-
-4 directories, 23 files
diff --git a/test/attic/files/cli/test3/Main.check.java5_j9 b/test/attic/files/cli/test3/Main.check.java5_j9
deleted file mode 100644
index 9e228d7..0000000
--- a/test/attic/files/cli/test3/Main.check.java5_j9
+++ /dev/null
@@ -1,36 +0,0 @@
-JVMJ9VM007E Command-line option unrecognised: -cpp
-Could not create the Java virtual machine.
-
-Usage: java [-options] class [args...]
-           (to execute a class)
-   or  java [-jar] [-options] jarfile [args...]
-           (to execute a jar file)
-
-where options include:
-    -cp -classpath <directories and zip/jar files separated by :>
-              set search path for application classes and resources
-    -D<name>=<value>
-              set a system property
-    -verbose[:class|gc|jni]
-              enable verbose output
-    -version  print product version
-    -version:<value>
-        require the specified version to run
-    -showversion  print product version and continue
-    -jre-restrict-search | -no-jre-restrict-search
-              include/exclude user private JREs in the version search
-    -agentlib:<libname>[=<options>]
-              load native agent library <libname>, e.g. -agentlib:hprof
-              see also, -agentlib:jdwp=help and -agentlib:hprof=help
-    -agentpath:<pathname>[=<options>]
-              load native agent library by full pathname
-    -javaagent:<jarpath>[=<options>]
-              load Java programming language agent, see java.lang.instrument
-    -? -help  print this help message
-    -X        print help on non-standard options
-    -assert   print help on assert options
-
-The java class is not found:  test3.Main
-The java class is not found:  test3.Main
-The java class is not found:  test3.Main
-The java class is not found:  test3.Main
diff --git a/test/attic/files/cli/test3/Main.check.javac b/test/attic/files/cli/test3/Main.check.javac
deleted file mode 100644
index 8d235b6..0000000
--- a/test/attic/files/cli/test3/Main.check.javac
+++ /dev/null
@@ -1,33 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                        Generate all debugging info
-  -g:none                   Generate no debugging info
-  -g:{lines,vars,source}    Generate only some debugging info
-  -nowarn                   Generate no warnings
-  -verbose                  Output messages about what the compiler is doing
-  -deprecation              Output source locations where deprecated APIs are used
-  -classpath <path>         Specify where to find user class files
-  -sourcepath <path>        Specify where to find input source files
-  -bootclasspath <path>     Override location of bootstrap class files
-  -extdirs <dirs>           Override location of installed extensions
-  -d <directory>            Specify where to place generated class files
-  -encoding <encoding>      Specify character encoding used by source files
-  -source <release>         Provide source compatibility with specified release
-  -target <release>         Generate class files for specific VM version
-  -help                     Print a synopsis of standard options
-
-files/cli/test3/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-files/cli/test3/Main.java:8: package test2 does not exist
-    test2.Main.main(args);
-         ^
-2 errors
-files/cli/test3/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-files/cli/test3/Main.java:8: package test2 does not exist
-    test2.Main.main(args);
-         ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.javac5 b/test/attic/files/cli/test3/Main.check.javac5
deleted file mode 100644
index 3a48fa0..0000000
--- a/test/attic/files/cli/test3/Main.check.javac5
+++ /dev/null
@@ -1,31 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files
-  -cp <path>                 Specify where to find user class files
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -d <directory>             Specify where to place generated class files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
-files/cli/test3/Main.java:6: package test1 does not exist
-    test1.Main.main(args);
-         ^
-files/cli/test3/Main.java:8: package test2 does not exist
-    test2.Main.main(args);
-         ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.javac6 b/test/attic/files/cli/test3/Main.check.javac6
deleted file mode 100644
index 677b950..0000000
--- a/test/attic/files/cli/test3/Main.check.javac6
+++ /dev/null
@@ -1,36 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
-  -g                         Generate all debugging info
-  -g:none                    Generate no debugging info
-  -g:{lines,vars,source}     Generate only some debugging info
-  -nowarn                    Generate no warnings
-  -verbose                   Output messages about what the compiler is doing
-  -deprecation               Output source locations where deprecated APIs are used
-  -classpath <path>          Specify where to find user class files and annotation processors
-  -cp <path>                 Specify where to find user class files and annotation processors
-  -sourcepath <path>         Specify where to find input source files
-  -bootclasspath <path>      Override location of bootstrap class files
-  -extdirs <dirs>            Override location of installed extensions
-  -endorseddirs <dirs>       Override location of endorsed standards path
-  -proc:{none, only}         Control whether annotation processing and/or compilation is done.
-  -processor <class>         Name of the annotation processor to run; bypasses default discovery process
-  -processorpath <path>      Specify where to find annotation processors
-  -d <directory>             Specify where to place generated class files
-  -s <directory>             Specify where to place generated source files
-  -encoding <encoding>       Specify character encoding used by source files
-  -source <release>          Provide source compatibility with specified release
-  -target <release>          Generate class files for specific VM version
-  -version                   Version information
-  -help                      Print a synopsis of standard options
-  -A[key[=value]]            Options to pass to annotation processors
-  -X                         Print a synopsis of nonstandard options
-  -J<flag>                   Pass <flag> directly to the runtime system
-
-files/cli/test3/Main.java:5: package test1 does not exist
-    test1.Main.main(args);
-         ^
-files/cli/test3/Main.java:7: package test2 does not exist
-    test2.Main.main(args);
-         ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.jikes b/test/attic/files/cli/test3/Main.check.jikes
deleted file mode 100644
index 604333e..0000000
--- a/test/attic/files/cli/test3/Main.check.jikes
+++ /dev/null
@@ -1,14 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 2 semantic errors compiling "files/cli/test3/Main.java":
-
-     6.     test1.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test3.Main".
-
-
-     8.     test2.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test2" was found in type "test3.Main".
diff --git a/test/attic/files/cli/test3/Main.check.jikes5 b/test/attic/files/cli/test3/Main.check.jikes5
deleted file mode 100644
index 604333e..0000000
--- a/test/attic/files/cli/test3/Main.check.jikes5
+++ /dev/null
@@ -1,14 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 2 semantic errors compiling "files/cli/test3/Main.java":
-
-     6.     test1.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test3.Main".
-
-
-     8.     test2.Main.main(args);
-            ^---^
-*** Semantic Error: No accessible field named "test2" was found in type "test3.Main".
diff --git a/test/attic/files/cli/test3/Main.check.scala b/test/attic/files/cli/test3/Main.check.scala
deleted file mode 100644
index f78729b..0000000
--- a/test/attic/files/cli/test3/Main.check.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.scala_api b/test/attic/files/cli/test3/Main.check.scala_api
deleted file mode 100644
index 4552819..0000000
--- a/test/attic/files/cli/test3/Main.check.scala_api
+++ /dev/null
@@ -1,41 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-|   |-- Any.html
-|   |-- AnyRef.html
-|   |-- AnyVal.html
-|   |-- Boolean.html
-|   |-- Byte.html
-|   |-- Char.html
-|   |-- Double.html
-|   |-- Float.html
-|   |-- Int.html
-|   |-- Long.html
-|   |-- Nothing.html
-|   |-- Null.html
-|   |-- Short.html
-|   |-- Unit.html
-|   `-- runtime
-|       |-- BoxedFloat.html
-|       |-- BoxedInt.html
-|       |-- BoxedLong.html
-|       `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-|   `-- Main$object.html
-|-- test1$content.html
-|-- test1$package.html
-|-- test2
-|   `-- Main$object.html
-|-- test2$content.html
-|-- test2$package.html
-|-- test3
-|   `-- Main$object.html
-|-- test3$content.html
-`-- test3$package.html
-
-5 directories, 34 files
diff --git a/test/attic/files/cli/test3/Main.check.scala_j9 b/test/attic/files/cli/test3/Main.check.scala_j9
deleted file mode 100644
index 3804c17..0000000
--- a/test/attic/files/cli/test3/Main.check.scala_j9
+++ /dev/null
@@ -1,19 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.scalac b/test/attic/files/cli/test3/Main.check.scalac
deleted file mode 100644
index 8465810..0000000
--- a/test/attic/files/cli/test3/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
-  scalac -help  gives more information
-Usage: scalac <options | source files>
-where possible options include: 
-  -doc                            Generate documentation
-  -g:<g>                          Generate debugging info (none,source,line,vars,notc)
-  -nowarn                         Generate no warnings
-  -noassert                       Generate no assertions and assumptions
-  -verbose                        Output messages about what the compiler is doing
-  -classpath <path>               Specify where to find user class files
-  -sourcepath <path>              Specify where to find input source files
-  -bootclasspath <path>           Override location of bootstrap class files
-  -extdirs <dirs>                 Override location of installed extensions
-  -d <directory>                  Specify where to place generated class files
-  -encoding <encoding>            Specify character encoding used by source files
-  -windowtitle <windowtitle>      Specify window title of generated HTML documentation
-  -documenttitle <documenttitle>  Specify document title of generated HTML documentation
-  -target:<target>                Specify which backend to use (jvm-1.5,msil)
-  -migrate                        Assist in migrating from Scala version 1.0
-  -o <file>                       Name of the output assembly (only relevant with -target:msil)
-  -r <path>                       List of assemblies referenced by the program (only relevant with -target:msil)
-  -debug                          Output debugging messages
-  -deprecation                    enable detailed deprecation warnings
-  -unchecked                      enable detailed unchecked warnings
-  -statistics                     Print compiler statistics
-  -explaintypes                   Explain type errors in more detail
-  -resident                       Compiler stays resident, files to compile are read from standard input
-  -uniqid                         Print identifiers with unique names (debugging option)
-  -printtypes                     Print tree types (debugging option)
-  -prompt                         Display a prompt after each error (debugging option)
-  -noimports                      Compile without any implicit imports
-  -nopredefs                      Compile without any implicit predefined values
-  -skip:<phase>                   Skip <phase>
-  -check:<phase>                  Check the tree at start of <phase>
-  -print:<phase>                  Print out program after <phase>
-  -printer:<printer>              Printer to use (text,html)
-  -printfile <file>               Specify file in which to print trees
-  -graph:<phase>                  Graph the program after <phase>
-  -browse:<phase>                 Browse the abstract syntax tree after <phase>
-  -stop:<phase>                   Stop after phase <phase>
-  -log:<phase>                    Log operations in <phase>
-  -logall                         Log all operations
-  -version                        Print product version and exit
-  -help                           Print a synopsis of standard options
-  -nouescape                      disables handling of \u unicode escapes
-  -Xinline                        Perform inlining when possible
-  -XO                             Optimize. implies -Xinline, -Xcloselim and -Xdce
-  -Xcloselim                      Perform closure elimination
-  -Xdce                           Perform dead code elimination
-  -Xwarndeadcode                  Emit warnings for dead code
-  -XbytecodeRead                  Enable bytecode reader.
-  -Xdetach                        Perform detaching of remote closures
-  -Xshowcls <class>               Show class info
-  -Xshowobj <object>              Show object info
-  -Xlinearizer:<Xlinearizer>      Linearizer to use (normal,dfs,rpo,dump)
-  -Xgenerics                      Use generic Java types
-  -Xprintpos                      Print tree positions (as offsets)
-  -Xscript                        compile script file
-  -Xexperimental                  enable experimental extensions
-  -Xplugtypes                     parse but ignore annotations in more locations
-  -Xkilloption                    optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test3/Main.check.scalaint b/test/attic/files/cli/test3/Main.check.scalaint
deleted file mode 100644
index cffa02c..0000000
--- a/test/attic/files/cli/test3/Main.check.scalaint
+++ /dev/null
@@ -1,48 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed.  See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
-    script: it is a script file
-    object: it is an object name
-    guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala> 
diff --git a/test/attic/files/cli/test3/Main.java b/test/attic/files/cli/test3/Main.java
deleted file mode 100644
index 208863d..0000000
--- a/test/attic/files/cli/test3/Main.java
+++ /dev/null
@@ -1,10 +0,0 @@
-// @info 2 dependency
-package test3;
-public class Main {
-  public static void main(String args[]) {
-    System.out.print("3: ");
-    test1.Main.main(args);
-    System.out.print("3: ");
-    test2.Main.main(args);
-  }
-}
diff --git a/test/attic/files/cli/test3/Main.scala b/test/attic/files/cli/test3/Main.scala
deleted file mode 100644
index 63fc11b..0000000
--- a/test/attic/files/cli/test3/Main.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// @info 2 dependencies
-package test3
-object Main {
-  def main(args: Array[String]) = {
-    Console.print("3: ")
-    test1.Main.main(args)
-    Console.print("3: ")
-    test2.Main.main(args)
-  }
-}
diff --git a/test/files/disabled/A.scala b/test/disabled/buildmanager/overloaded_1/A.scala
similarity index 100%
rename from test/files/disabled/A.scala
rename to test/disabled/buildmanager/overloaded_1/A.scala
diff --git a/test/files/disabled/overloaded_1.check b/test/disabled/buildmanager/overloaded_1/overloaded_1.check
similarity index 100%
rename from test/files/disabled/overloaded_1.check
rename to test/disabled/buildmanager/overloaded_1/overloaded_1.check
diff --git a/test/files/disabled/overloaded_1.test b/test/disabled/buildmanager/overloaded_1/overloaded_1.test
similarity index 100%
rename from test/files/disabled/overloaded_1.test
rename to test/disabled/buildmanager/overloaded_1/overloaded_1.test
diff --git a/test/files/disabled/t4245/A.scala b/test/disabled/buildmanager/t4245/A.scala
similarity index 100%
rename from test/files/disabled/t4245/A.scala
rename to test/disabled/buildmanager/t4245/A.scala
diff --git a/test/files/disabled/t4245/t4245.check b/test/disabled/buildmanager/t4245/t4245.check
similarity index 100%
rename from test/files/disabled/t4245/t4245.check
rename to test/disabled/buildmanager/t4245/t4245.check
diff --git a/test/files/disabled/t4245/t4245.test b/test/disabled/buildmanager/t4245/t4245.test
similarity index 100%
rename from test/files/disabled/t4245/t4245.test
rename to test/disabled/buildmanager/t4245/t4245.test
diff --git a/test/disabled/continuations-neg/infer0.check b/test/disabled/continuations-neg/infer0.check
deleted file mode 100644
index 1dd072e..0000000
--- a/test/disabled/continuations-neg/infer0.check
+++ /dev/null
@@ -1,4 +0,0 @@
-infer0.scala:11: error: cannot cps-transform expression 8: type arguments [Int(8),String,Int] do not conform to method shiftUnit's type parameter bounds [A,B,C >: B]
-    test(8)
-         ^
-one error found
diff --git a/test/disabled/continuations-neg/infer0.scala b/test/disabled/continuations-neg/infer0.scala
deleted file mode 100644
index 6d97d75..0000000
--- a/test/disabled/continuations-neg/infer0.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def test(x: => Int @cpsParam[String,Int]) = 7
-
-  def main(args: Array[String]) {
-    test(8)
-  }
-}
diff --git a/test/disabled/pos/spec-List.scala b/test/disabled/pos/spec-List.scala
index 81e55f4..b31e035 100644
--- a/test/disabled/pos/spec-List.scala
+++ b/test/disabled/pos/spec-List.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2003-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2003-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/disabled/presentation/akka.flags b/test/disabled/presentation/akka.flags
index 56d026a..9bf2878 100644
--- a/test/disabled/presentation/akka.flags
+++ b/test/disabled/presentation/akka.flags
@@ -12,7 +12,7 @@
 # running partest from. Run it from the root scala checkout for these files to resolve correctly
 # (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
 # framework translates them to the platform dependent representation.
-# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
+# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
 
 # the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
+# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
index 7dd1bf6..a567d0b 100644
--- a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
+++ b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
@@ -89,7 +89,7 @@ object Dispatchers {
     new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
 
   /**
-   * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -97,7 +97,7 @@ object Dispatchers {
     ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenDispatcher(name, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -106,7 +106,7 @@ object Dispatchers {
       new ExecutorBasedEventDrivenDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -115,7 +115,7 @@ object Dispatchers {
       new ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -123,7 +123,7 @@ object Dispatchers {
     ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenWorkStealingDispatcher(name, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -132,7 +132,7 @@ object Dispatchers {
       new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -141,7 +141,7 @@ object Dispatchers {
       new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
 
   /**
-   * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+   * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
    * <p/>
    * Has a fluent builder interface for configuring its semantics.
    */
@@ -224,4 +224,4 @@ class ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator extends Message
       mailboxType(config),
       threadPoolConfig)).build
   }
-}
\ No newline at end of file
+}
diff --git a/test/disabled/presentation/doc/doc.scala b/test/disabled/presentation/doc/doc.scala
index 371b825..f2233f1 100755
--- a/test/disabled/presentation/doc/doc.scala
+++ b/test/disabled/presentation/doc/doc.scala
@@ -1,9 +1,9 @@
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
 import scala.tools.nsc.doc
 import scala.tools.nsc.doc.base._
 import scala.tools.nsc.doc.base.comment._
 import scala.tools.nsc.interactive._
 import scala.tools.nsc.interactive.tests._
-import scala.tools.nsc.util._
 
 object Test extends InteractiveTest {
   val tags = Seq(
@@ -37,12 +37,20 @@ object Test extends InteractiveTest {
     prepre + docComment(nTags) + prepost + post
   }
 
-
-
   override lazy val compiler = {
     prepareSettings(settings)
-    new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+    new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait {
+      outer =>
+
       val global: this.type = this
+
+      override lazy val analyzer = new {
+        val global: outer.type = outer
+      } with doc.ScaladocAnalyzer with InteractiveAnalyzer {
+        override def newTyper(context: Context): InteractiveTyper with ScaladocTyper =
+          new Typer(context) with InteractiveTyper with ScaladocTyper
+      }
+
       def chooseLink(links: List[LinkTo]): LinkTo = links.head
       def internalLink(sym: Symbol, site: Symbol) = None
       def toString(link: LinkTo) = link.toString
@@ -59,7 +67,7 @@ object Test extends InteractiveTest {
             if (expanded.isEmpty)
               None
             else
-              Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) })
+              Some(ask { () => parseAtSymbol(expanded, raw, pos, sym.owner) })
         }
       }
     }
@@ -88,12 +96,11 @@ object Test extends InteractiveTest {
               println("Couldn't parse")
             case Some(_) =>
               val sym = compiler.ask { () =>
-                val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name))
+                val toplevel = compiler.rootMirror.EmptyPackage.info.decl(TypeName(name))
                 if (toplevel eq NoSymbol) {
-                  val clazz = definitions.EmptyPackage.info.decl(newTypeName(className))
-
-                  val term = clazz.info.decl(newTermName(name))
-                  if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else
+                  val clazz = compiler.rootMirror.EmptyPackage.info.decl(TypeName(className))
+                  val term = clazz.info.decl(TermName(name))
+                  if (term eq NoSymbol) clazz.info.decl(TypeName(name)) else
                     if (term.isAccessor) term.accessed else term
                 } else toplevel
               }
@@ -115,16 +122,17 @@ object Test extends InteractiveTest {
     val baseSource = findSource("Base.scala")
     val derivedSource = findSource("Derived.scala")
     def existsText(where: Any, text: String): Boolean = where match {
-      case `text` => true
+      case s: String => s contains text
       case s: Seq[_] => s exists (existsText(_, text))
       case p: Product => p.productIterator exists (existsText(_, text))
+      case c: Comment => existsText(c.body, text)
     }
-    val (derived, base) = compiler.ask { () => 
-      val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
+    val (derived, base) = compiler.ask { () =>
+      val derived = compiler.rootMirror.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
       (derived, derived.ancestors(0))
     }
     val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil)
-    if (!existsText(cmt1, "Derived comment."))
+    if (!existsText(cmt1, "This is Derived comment"))
       println("Unexpected Derived class comment:"+cmt1)
 
     val (fooDerived, fooBase) = compiler.ask { () =>
@@ -133,7 +141,7 @@ object Test extends InteractiveTest {
     }
 
     val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil)
-    if (!existsText(cmt2, "Base method has documentation."))
+    if (!existsText(cmt2, "Base method has documentation"))
       println("Unexpected foo method comment:"+cmt2)
   }
 }
diff --git a/test/disabled/presentation/doc/src/p/Base.scala b/test/disabled/presentation/doc/src/p/Base.scala
index 9031de3..d91632b 100755
--- a/test/disabled/presentation/doc/src/p/Base.scala
+++ b/test/disabled/presentation/doc/src/p/Base.scala
@@ -1,7 +1,7 @@
 package p
 
 /**
- * @define BaseComment $BaseVar comment.
+ * @define BaseComment This is $BaseVar comment.
  */
 trait Base {
   /**
diff --git a/test/disabled/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check
index cdb80ed..0f72cb5 100644
--- a/test/disabled/presentation/simple-tests.check
+++ b/test/disabled/presentation/simple-tests.check
@@ -187,8 +187,6 @@ TypeMember(value Xshowobj,Tester.this.settings.StringSetting,false,true,<none>)
 TypeMember(value Xshowtrees,Tester.this.settings.BooleanSetting,false,true,<none>)
 TypeMember(value Xwarnfatal,Tester.this.settings.BooleanSetting,false,true,<none>)
 TypeMember(value Xwarninit,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ybuilderdebug,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(value Ybuildmanagerdebug,Tester.this.settings.BooleanSetting,false,true,<none>)
 TypeMember(value Ycompacttrees,Tester.this.settings.BooleanSetting,false,true,<none>)
 TypeMember(value Ycompletion,Tester.this.settings.BooleanSetting,false,true,<none>)
 TypeMember(value YdepMethTpes,Tester.this.settings.BooleanSetting,false,true,<none>)
diff --git a/test/disabled/presentation/simple-tests.opts b/test/disabled/presentation/simple-tests.opts
index 8529bbf..d651316 100644
--- a/test/disabled/presentation/simple-tests.opts
+++ b/test/disabled/presentation/simple-tests.opts
@@ -12,7 +12,7 @@
 # running partest from. Run it from the root scala checkout for these files to resolve correctly
 # (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
 # framework translates them to the platform dependent representation.
--bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
+-bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
 
 # the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
+# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/disabled/run/lisp.scala b/test/disabled/run/lisp.scala
index 06e68f5..73f24da 100644
--- a/test/disabled/run/lisp.scala
+++ b/test/disabled/run/lisp.scala
@@ -12,11 +12,11 @@ class LispTokenizer(s: String) extends Iterator[String] {
     while (i < s.length() && s.charAt(i) <= ' ') i += 1
     i < s.length()
   }
-  def next: String = 
+  def next: String =
     if (hasNext) {
       val start = i
       if (isDelimiter(s charAt i)) i += 1
-      else 
+      else
         do i = i + 1
         while (!isDelimiter(s charAt i))
       s.substring(start, i)
@@ -190,10 +190,10 @@ object LispCaseClasses extends Lisp {
 
     def extendEnv(env: Environment,
                   ps: List[String], args: List[Data]): Environment =
-      Pair(ps, args) match {
-        case Pair(List(), List()) =>
+      (ps, args) match {
+        case (List(), List()) =>
           env
-        case Pair(p :: ps1, arg :: args1) =>
+        case (p :: ps1, arg :: args1) =>
           extendEnv(env.extend(p, arg), ps1, args1)
         case _ =>
           lispError("wrong number of arguments")
@@ -381,10 +381,10 @@ object LispAny extends Lisp {
 
     def extendEnv(env: Environment,
                   ps: List[String], args: List[Data]): Environment =
-      Pair(ps, args) match {
-        case Pair(List(), List()) =>
+      (ps, args) match {
+        case (List(), List()) =>
           env
-        case Pair(p :: ps1, arg :: args1) =>
+        case (p :: ps1, arg :: args1) =>
           extendEnv(env.extend(p, arg), ps1, args1)
         case _ =>
           lispError("wrong number of arguments")
diff --git a/test/disabled/run/t4146.scala b/test/disabled/run/t4146.scala
new file mode 100644
index 0000000..a17de50
--- /dev/null
+++ b/test/disabled/run/t4146.scala
@@ -0,0 +1,7 @@
+object bob extends App {
+  var name = "Bob"
+}
+
+object Test extends App {
+  assert(bob.name == "Bob")
+}
diff --git a/test/disabled/run/t4602.scala b/test/disabled/run/t4602.scala
new file mode 100644
index 0000000..655c350
--- /dev/null
+++ b/test/disabled/run/t4602.scala
@@ -0,0 +1,57 @@
+import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+  val startupLatch = new CountDownLatch(1)
+  // we have to explicitly launch our server because when the client launches a server it uses
+  // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+  // happens to be in the path gets used
+  val t = new Thread(new Runnable {
+    def run() = {
+      CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+    }
+  })
+  t setDaemon true
+  t.start()
+  if (!startupLatch.await(2, TimeUnit.MINUTES))
+    sys error "Timeout waiting for server to start"
+
+  val baos = new ByteArrayOutputStream()
+  val ps = new PrintStream(baos)
+
+  val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
+
+  val dirNameAndPath = (1 to 2).toList map {number =>
+    val name = s"Hello${number}"
+    val dir = outdir / number.toString
+    (dir, name, dir / s"${name}.scala")
+  }
+
+  dirNameAndPath foreach {case (dir, name, path) =>
+    dir.createDirectory()
+    val file = path.jfile
+    val out = new FileWriter(file)
+    try
+      out.write(s"object ${name}\n")
+    finally
+      out.close
+  }
+
+  val success = (scala.Console withOut ps) {
+    dirNameAndPath foreach {case (path, name, _) =>
+      CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
+    }
+
+    CompileClient.process(Array("-shutdown"))
+  }
+
+  // now make sure we got success and the correct normalized paths
+  val msg = baos.toString()
+
+  assert(success, s"got a failure. Full results were: \n${msg}")
+  dirNameAndPath foreach {case (_, _, path) =>
+    val expected = s"Input files after normalizing paths: ${path}"
+    assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
+  }
+}
diff --git a/test/disabled/run/t6026.check b/test/disabled/run/t6026.check
deleted file mode 100644
index 779bb3a..0000000
--- a/test/disabled/run/t6026.check
+++ /dev/null
@@ -1,9 +0,0 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> class Foo
-defined class Foo
-
-scala> :javap Foo
-Compiled from "<console>"public class Foo extends java.lang.Object{    public Foo();}
-scala> 
diff --git a/test/disabled/run/t6026.scala b/test/disabled/run/t6026.scala
deleted file mode 100644
index bee27bc..0000000
--- a/test/disabled/run/t6026.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
-  override def code =
-"""|class Foo
-   |:javap Foo
-   |""".stripMargin
-}
diff --git a/test/files/ant/README b/test/files/ant/README
deleted file mode 100644
index 8cd8745..0000000
--- a/test/files/ant/README
+++ /dev/null
@@ -1,42 +0,0 @@
-README
-======
-
-Test cases in directory test/files/ant/ are executed by invoking an
-Ant script whose name ends with "build.xml" (eg. "fsc001-build.xml").
-
-The Scala Ant tasks fsc/scalac/scaladoc are instantiated from various
-binaries (quick/pack/latest/installed) and are executed with different
-combinations of Ant attributes/elements:
-
-            +---------------------------+--------------------------+
-            |       Attributes          |      Nested elements     |
-------------+---------------------------+--------------------------+
-fsc001      | srcdir,classpath      (1) | compilerarg              |
-fsc002      | srcref,classpathref   (1) | compilerarg              |
-fsc003      |                       (2) | compilerarg,src,include  |
-------------+---------------------------+--------------------------+
-scalac001   | srcdir,classpath      (1) |                          |
-scalac002   | srcref,classpathref   (1) |                          |
-scalac003   |                       (2) | src,include              |
-scalac004   | deprecation,unchecked (3) |                          |
-------------+---------------------------+--------------------------+
-scaladoc    | srcdir,classpathref       |                          |
-------------+---------------------------+--------------------------+
-
-Other attributes:
-(1) includes,destdir
-(2) destdir,classpathref
-(3) srcdir,includes,destdir,classpath
-
-
-The above test cases can also be run from the command prompt using one of
-the following shell commands:
-
-1) For quick/pack/latest binaries (-Dbinary=quick|pack|latest)
-
-$ ant -Dbinary=quick -Dproject.dir=$HOME/workspace/scala -f scalac001-build.xml
-
-2) For installed binaries (-Dbinary=installed)
-
-$ ant -Dbinary=installed -Dinstalled.dir=/opt/scala -f scalac001-build.xml
-
diff --git a/test/files/ant/fsc001-build.check b/test/files/ant/fsc001-build.check
deleted file mode 100644
index b5141f5..0000000
--- a/test/files/ant/fsc001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/fsc001-ant.obj
-      [fsc] Compiling 1 source file to [...]/files/ant/fsc001-ant.obj
diff --git a/test/files/ant/fsc001-build.xml b/test/files/ant/fsc001-build.xml
deleted file mode 100644
index 0130f36..0000000
--- a/test/files/ant/fsc001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc001" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <pathconvert property="classpath" refid="build.classpath"/>
-    <fsc
-      srcdir="${source.dir}"
-      includes="**/${ant.project.name}*.scala"
-      destdir="${build.dir}"
-      classpath="${classpath}">
-    </fsc>
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/fsc001.scala b/test/files/ant/fsc001.scala
deleted file mode 100644
index 6ede598..0000000
--- a/test/files/ant/fsc001.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    println(args mkString " ")
-  }
-}
diff --git a/test/files/ant/fsc002-build.check b/test/files/ant/fsc002-build.check
deleted file mode 100644
index 0c9c30d..0000000
--- a/test/files/ant/fsc002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/fsc002-ant.obj
-      [fsc] Compiling 1 source file to [...]/files/ant/fsc002-ant.obj
diff --git a/test/files/ant/fsc002-build.xml b/test/files/ant/fsc002-build.xml
deleted file mode 100644
index db91070..0000000
--- a/test/files/ant/fsc002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc002" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <path id="source.ref">
-      <pathelement location="${source.dir}"/>
-    </path>
-    <fsc
-      srcref="source.ref"
-      includes="**/${ant.project.name}*.scala"
-      destdir="${build.dir}"
-      classpathref="build.classpath">
-    </fsc>
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/fsc002.scala b/test/files/ant/fsc002.scala
deleted file mode 100644
index 47131da..0000000
--- a/test/files/ant/fsc002.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]): Unit =
-    Console.println(args.toList)
-}
diff --git a/test/files/ant/fsc003-build.check b/test/files/ant/fsc003-build.check
deleted file mode 100644
index c8c9ed8..0000000
--- a/test/files/ant/fsc003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/fsc003-ant.obj
-      [fsc] Compiling 1 source file to [...]/files/ant/fsc003-ant.obj
diff --git a/test/files/ant/fsc003-build.xml b/test/files/ant/fsc003-build.xml
deleted file mode 100644
index 5f71770..0000000
--- a/test/files/ant/fsc003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc003" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <fsc
-      destdir="${build.dir}"
-      classpathref="build.classpath">
-      <src path="${source.dir}"/>
-      <include name="**/${ant.project.name}*.scala"/>
-    </fsc>
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/fsc003.scala b/test/files/ant/fsc003.scala
deleted file mode 100644
index 6ede598..0000000
--- a/test/files/ant/fsc003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    println(args mkString " ")
-  }
-}
diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml
deleted file mode 100644
index 5a4dfc3..0000000
--- a/test/files/ant/imported.xml
+++ /dev/null
@@ -1,155 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="imported">
-
-  <!-- This file is imported by the main Ant script. -->
-
-  <!-- Prevents system classpath from being used -->
-  <property name="build.sysclasspath" value="ignore"/>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
-  <property name="source.dir" value="${basedir}"/>
-
-  <property file="${basedir}/build.properties"/>
-
-  <property name="build.dir" location="${source.dir}/${ant.project.name}-ant.obj"/>
-  <property name="log.dir" location="${source.dir}"/>
-  <property name="log.file" value="${log.dir}/${ant.project.name}-build-ant.log"/>
-  <property name="project.dir" value="../../.."/>
-
-  <condition property="quick.binary">
-    <equals arg1="${binary}" arg2="quick"/>
-  </condition>
-  <condition property="pack.binary">
-    <equals arg1="${binary}" arg2="pack"/>
-  </condition>
-  <condition property="latest.binary">
-    <equals arg1="${binary}" arg2="latest"/>
-  </condition>
-  <condition property="installed.binary">
-    <equals arg1="${binary}" arg2="installed"/>
-  </condition>
-
-  <fail message="Property 'binary' must be set to either 'quick', 'pack', 'latest' or 'installed'.">
-    <condition><not><or>
-      <isset property="quick.binary"/>
-      <isset property="pack.binary"/>
-      <isset property="latest.binary"/>
-      <isset property="installed.binary"/>
-    </or></not></condition>
-  </fail>
-  <echo level="verbose" message="binary=${binary}"/>
-  <echo level="verbose" message="build.dir=${build.dir}"/>
-
-<!-- ===========================================================================
-INITIALISATION
-============================================================================ -->
-
-  <target name="quick.init" if="quick.binary">
-    <property name="quick.dir" value="${project.dir}/build/quick"/>
-    <fail message="Quick build could not be found.">
-      <condition><not><available file="${quick.dir}"/></not></condition>
-    </fail>
-    <property name="scala.dir" value="${quick.dir}"/>
-    <property name="scala-library.lib" value="${scala.dir}/classes/library/"/>
-    <property name="scala-compiler.lib" value="${scala.dir}/classes/compiler/"/>
-    <property name="fjbg.lib" value="${project.dir}/lib/fjbg.jar"/>
-  </target>
-
-  <target name="pack.init" if="pack.binary">
-    <property name="pack.dir" value="${project.dir}/build/pack"/>
-    <fail message="Pack build could not be found.">
-      <condition><not><available file="${pack.dir}"/></not></condition>
-    </fail>
-    <property name="scala.dir" value="${pack.dir}"/>
-    <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
-    <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
-    <property name="fjbg.lib" value=""/>
-  </target>
-
-  <target name="latest.init" if="latest.binary">
-    <property name="latest.dir" value="${project.dir}/dists/latest"/>
-    <fail message="Latest build could not be found.">
-      <condition><not><available file="${latest.dir}"/></not></condition>
-    </fail>
-    <property name="scala.dir" value="${latest.dir}"/>
-    <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
-    <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
-    <property name="fjbg.lib" value=""/>
-  </target>
-
-  <target name="installed.init" if="installed.binary">
-    <property name="installed.dir" value="/opt/scala"/>
-    <fail message="Installed distribution could not be found.">
-      <condition><not><available file="${installed.dir}"/></not></condition>
-    </fail>
-    <property name="scala.dir" value="${installed.dir}"/>
-    <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
-    <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
-    <property name="fjbg.lib" value=""/>
-  </target>
-
-  <target name="init" depends="quick.init, pack.init, latest.init, installed.init">
-    <echo level="verbose" message="scala.dir=${scala.dir}"/>
-
-    <path id="scala.classpath">
-      <pathelement location="${scala-library.lib}"/>
-      <pathelement location="${scala-compiler.lib}"/>
-      <pathelement location="${fjbg.lib}"/> <!-- only present for 'quick' -->
-    </path>
-
-    <fail message="Scala library '${scala-library.lib}' or '${scala-compiler.lib}' is missing/broken">
-      <condition><not><and>
-        <available classname="scala.Predef"
-                   classpathref="scala.classpath"/>
-        <available classname="scala.Option"
-                   classpathref="scala.classpath"/>
-        <available classname="scala.runtime.ObjectRef"
-                   classpathref="scala.classpath"/>
-        <available classname="scala.tools.ant.Scalac"
-                   classpathref="scala.classpath"/>
-        <available classname="scala.tools.nsc.Main"
-                   classpathref="scala.classpath"/>
-        <available classname="scala.tools.util.StringOps"
-                   classpathref="scala.classpath"/>
-      </and></not></condition>
-    </fail>
-    <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scala.classpath"/>
-
-    <path id="build.classpath">
-      <!--<pathelement location="${scala-actors.lib}"/>-->
-      <pathelement location="${scala-library.lib}"/>
-      <pathelement location="${build.dir}"/>
-    </path>
-
-    <!-- make sure the log file exists when the Ant build scripts -->
-    <!-- are run manually from the command prompt -->
-    <touch file="${log.file}"/>
-  </target>
-
-<!-- ===========================================================================
-RUN
-============================================================================ -->
-
-  <target name="run" depends="build, clean"/>
-
-<!-- ===========================================================================
-CLEAN
-============================================================================ -->
-
-  <macrodef name="remove">
-    <attribute name="dir"/>
-    <sequential>
-      <delete dir="@{dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
-    </sequential>
-  </macrodef>
-
-  <target name="clean">
-    <remove dir="${build.dir}"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001-build.check b/test/files/ant/scalac001-build.check
deleted file mode 100644
index 05a43ba..0000000
--- a/test/files/ant/scalac001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/scalac001-ant.obj
-   [scalac] Compiling 1 source file to [...]/files/ant/scalac001-ant.obj
diff --git a/test/files/ant/scalac001-build.xml b/test/files/ant/scalac001-build.xml
deleted file mode 100644
index 4ec7fc8..0000000
--- a/test/files/ant/scalac001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac001" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <pathconvert property="classpath" refid="build.classpath"/>
-    <scalac
-      srcdir="${source.dir}"
-      includes="**/${ant.project.name}*.scala"
-      destdir="${build.dir}"
-      classpath="${classpath}"
-    />
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001.scala b/test/files/ant/scalac001.scala
deleted file mode 100644
index 47131da..0000000
--- a/test/files/ant/scalac001.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]): Unit =
-    Console.println(args.toList)
-}
diff --git a/test/files/ant/scalac002-build.check b/test/files/ant/scalac002-build.check
deleted file mode 100644
index e7b3670..0000000
--- a/test/files/ant/scalac002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/scalac002-ant.obj
-   [scalac] Compiling 1 source file to [...]/files/ant/scalac002-ant.obj
diff --git a/test/files/ant/scalac002-build.xml b/test/files/ant/scalac002-build.xml
deleted file mode 100644
index 07628af..0000000
--- a/test/files/ant/scalac002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac002" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <path id="source.ref">
-      <pathelement location="${source.dir}"/>
-    </path>
-    <scalac
-      srcref="source.ref"
-      includes="**/${ant.project.name}*.scala"
-      destdir="${build.dir}"
-      classpathref="build.classpath"
-    />
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scalac002.scala b/test/files/ant/scalac002.scala
deleted file mode 100644
index 6ede598..0000000
--- a/test/files/ant/scalac002.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    println(args mkString " ")
-  }
-}
diff --git a/test/files/ant/scalac003-build.check b/test/files/ant/scalac003-build.check
deleted file mode 100644
index 7b0d336..0000000
--- a/test/files/ant/scalac003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/scalac003-ant.obj
-   [scalac] Compiling 1 source file to [...]/files/ant/scalac003-ant.obj
diff --git a/test/files/ant/scalac003-build.xml b/test/files/ant/scalac003-build.xml
deleted file mode 100644
index 1d70aa1..0000000
--- a/test/files/ant/scalac003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac003" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <scalac
-      destdir="${build.dir}"
-      classpathref="build.classpath">
-      <src path="${source.dir}"/>
-      <include name="**/${ant.project.name}*.scala"/>
-    </scalac>
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scalac003.scala b/test/files/ant/scalac003.scala
deleted file mode 100644
index 6ede598..0000000
--- a/test/files/ant/scalac003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    println(args mkString " ")
-  }
-}
diff --git a/test/files/ant/scalac004-build.check b/test/files/ant/scalac004-build.check
deleted file mode 100644
index ffe9e8c..0000000
--- a/test/files/ant/scalac004-build.check
+++ /dev/null
@@ -1,24 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/scalac004-ant.obj
-   [scalac] Compiling 1 source file to [...]/files/ant/scalac004-ant.obj
-   [scalac] [...]/files/ant/scalac004.scala:9: warning: method exit in object Predef is deprecated: Use sys.exit(status) instead
-   [scalac]     Predef.exit(0) //deprecated in 2.9.0
-   [scalac]            ^
-   [scalac] [...]/files/ant/scalac004.scala:6: warning: match is not exhaustive!
-   [scalac] missing combination            Nil
-   [scalac] 
-   [scalac]     xs match { //(xs: @unchecked) match {
-   [scalac]     ^
-   [scalac] two warnings found
-   [scalac] Compile succeeded with 2 warnings; see the compiler output for details.
diff --git a/test/files/ant/scalac004-build.xml b/test/files/ant/scalac004-build.xml
deleted file mode 100644
index 66c19a3..0000000
--- a/test/files/ant/scalac004-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac004" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <scalac
-      deprecation="yes" unchecked="yes"
-      srcdir="${source.dir}"
-      includes="**/${ant.project.name}*.scala"
-      destdir="${build.dir}"
-      classpathref="build.classpath"
-    />
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scalac004.scala b/test/files/ant/scalac004.scala
deleted file mode 100644
index 66b2ba7..0000000
--- a/test/files/ant/scalac004.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    val xs = List(1, 2, 3, 4)
-    xs match { //(xs: @unchecked) match {
-      case x::xs => println(x)
-    }
-    Predef.exit(0) //deprecated in 2.9.0
-  }
-}
diff --git a/test/files/ant/scaladoc-build.check b/test/files/ant/scaladoc-build.check
deleted file mode 100644
index 1c82456..0000000
--- a/test/files/ant/scaladoc-build.check
+++ /dev/null
@@ -1,15 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
-    [mkdir] Created dir: [...]/files/ant/scaladoc-ant.obj
- [scaladoc] Documenting 1 source file to [...]/files/ant/scaladoc-ant.obj
- [scaladoc] model contains 3 documentable templates
diff --git a/test/files/ant/scaladoc-build.xml b/test/files/ant/scaladoc-build.xml
deleted file mode 100644
index fb4dc6f..0000000
--- a/test/files/ant/scaladoc-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scaladoc" default="run">
-
-  <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
-  <target name="build" depends="init">
-    <echo level="verbose" message="build.dir=${build.dir}"/>
-    <mkdir dir="${build.dir}"/>
-    <scaladoc
-      srcdir="${source.dir}"
-      includes="**/${ant.project.name}*.scala"
-      deprecation="yes" unchecked="yes"
-      destdir="${build.dir}"
-      classpathref="build.classpath"
-    />
-    <echo level="verbose" message="log.file=${log.file}"/>
-    <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
-  </target>
-
-</project>
-
diff --git a/test/files/ant/scaladoc.scala b/test/files/ant/scaladoc.scala
deleted file mode 100644
index 6ede598..0000000
--- a/test/files/ant/scaladoc.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
-  def main(args: Array[String]) {
-    println(args mkString " ")
-  }
-}
diff --git a/test/files/bench/equality/eqeq.eqlog b/test/files/bench/equality/eqeq.eqlog
index d1e27ac..55a5eb4 100644
--- a/test/files/bench/equality/eqeq.eqlog
+++ b/test/files/bench/equality/eqeq.eqlog
@@ -1,42 +1,42 @@
-Banchmark results for testing equality operations:
-eq.scala: Base case, use eq equality only
-eqeq.scala: Test case, use == instead of eq.
-All tests run on Thinkpad T400, 1.6.0_12 client VM.
-Test command: java eq 5 5
-              java eqeq 5 5
-eq.scala, no -optimise
-eq$		109		78		79		63		63
-eq$		94		63		63		78		78
-eq$		94		62		62		62		78
-eq$		94		78		78		78		78
-eq$		94		78		78		78		78
-eq.scala, with -optimise
-eq$		421		63		62		47		63
-eq$		406		62		62		63		62
-eq$		407		62		62		78		63
-eq$		406		63		63		62		62
-eq$		407		62		62		63		47
-eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
-eqeq$		562		516		516		516		515
-eqeq$		547		515		515		531		532
-eqeq$		532		516		516		515		516
-eqeq$		547		531		531		516		531
-eqeq$		547		515		515		516		516
-eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
-eqeq$		1031		390		391		391		391
-eqeq$		1031		391		391		391		390
-eqeq$		1031		390		390		391		391
-eqeq$		1031		406		407		391		390
-eqeq$		1031		390		390		391		391
-eqeq.scala with 1st optimized of Nov 14th, no -optimise
-eqeq$		484		421		438		438		437
-eqeq$		484		438		437		437		438
-eqeq$		469		437		453		454		438
-eqeq$		468		437		438		468		438
-eqeq$		485		437		437		422		438
-eqeq.scala with 1st optimized of Nov 14th, with -optimise
-eqeq$		1016		375		391		375		375
-eqeq$		1016		375		391		390		375
-eqeq$		1016		390		391		375		375
-eqeq$		1015		375		391		390		375
-eqeq$		1016		390		375		375		375
+Banchmark results for testing equality operations:
+eq.scala: Base case, use eq equality only
+eqeq.scala: Test case, use == instead of eq.
+All tests run on Thinkpad T400, 1.6.0_12 client VM.
+Test command: java eq 5 5
+              java eqeq 5 5
+eq.scala, no -optimise
+eq$		109		78		79		63		63
+eq$		94		63		63		78		78
+eq$		94		62		62		62		78
+eq$		94		78		78		78		78
+eq$		94		78		78		78		78
+eq.scala, with -optimise
+eq$		421		63		62		47		63
+eq$		406		62		62		63		62
+eq$		407		62		62		78		63
+eq$		406		63		63		62		62
+eq$		407		62		62		63		47
+eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
+eqeq$		562		516		516		516		515
+eqeq$		547		515		515		531		532
+eqeq$		532		516		516		515		516
+eqeq$		547		531		531		516		531
+eqeq$		547		515		515		516		516
+eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
+eqeq$		1031		390		391		391		391
+eqeq$		1031		391		391		391		390
+eqeq$		1031		390		390		391		391
+eqeq$		1031		406		407		391		390
+eqeq$		1031		390		390		391		391
+eqeq.scala with 1st optimized of Nov 14th, no -optimise
+eqeq$		484		421		438		438		437
+eqeq$		484		438		437		437		438
+eqeq$		469		437		453		454		438
+eqeq$		468		437		438		468		438
+eqeq$		485		437		437		422		438
+eqeq.scala with 1st optimized of Nov 14th, with -optimise
+eqeq$		1016		375		391		375		375
+eqeq$		1016		375		391		390		375
+eqeq$		1016		390		391		375		375
+eqeq$		1015		375		391		390		375
+eqeq$		1016		390		375		375		375
diff --git a/test/files/buildmanager/annotated/A.scala b/test/files/buildmanager/annotated/A.scala
deleted file mode 100644
index 4130cf2..0000000
--- a/test/files/buildmanager/annotated/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-case class A[T](x: String, y: T)
diff --git a/test/files/buildmanager/annotated/annotated.check b/test/files/buildmanager/annotated/annotated.check
deleted file mode 100644
index ce92c9a..0000000
--- a/test/files/buildmanager/annotated/annotated.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), object A -> List())
diff --git a/test/files/buildmanager/annotated/annotated.test b/test/files/buildmanager/annotated/annotated.test
deleted file mode 100644
index 392e0d3..0000000
--- a/test/files/buildmanager/annotated/annotated.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/freshnames/A.scala b/test/files/buildmanager/freshnames/A.scala
deleted file mode 100644
index e8ab26c..0000000
--- a/test/files/buildmanager/freshnames/A.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-abstract class A {
-
-    var t: List[B]
-    
-    def foo(n: String): Option[B] = {
-        t.reverse find (_.names contains n)
-    }
-    
-    def bar(n: Int): Option[B] = {
-        t.reverse find (_.names contains n)
-    }
-}
-
-//class A
-case class B(names: List[String])
-
diff --git a/test/files/buildmanager/freshnames/B.scala b/test/files/buildmanager/freshnames/B.scala
deleted file mode 100644
index d700225..0000000
--- a/test/files/buildmanager/freshnames/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-abstract class C extends A {
-    def test(n: Int) = bar(n)
-}
-
diff --git a/test/files/buildmanager/freshnames/freshnames.check b/test/files/buildmanager/freshnames/freshnames.check
deleted file mode 100644
index 9f05fb8..0000000
--- a/test/files/buildmanager/freshnames/freshnames.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > B.scala A.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class B -> List(), object B -> List())
diff --git a/test/files/buildmanager/freshnames/freshnames.test b/test/files/buildmanager/freshnames/freshnames.test
deleted file mode 100644
index 20b2029..0000000
--- a/test/files/buildmanager/freshnames/freshnames.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile B.scala A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/infer/A.scala b/test/files/buildmanager/infer/A.scala
deleted file mode 100644
index 46b5391..0000000
--- a/test/files/buildmanager/infer/A.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-class Foo(flag: Boolean) {
-    val classpath = 
-        if (flag)
-            new AClasspath
-        else
-            new BClasspath
-}
-
-class AClasspath extends MergedClasspath[A]
-
-class BClasspath extends MergedClasspath[B]
-
-abstract class MergedClasspath[T]
-
-class A
-class B
diff --git a/test/files/buildmanager/infer/infer.check b/test/files/buildmanager/infer/infer.check
deleted file mode 100644
index 1f73697..0000000
--- a/test/files/buildmanager/infer/infer.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class AClasspath -> List(), class B -> List(), class BClasspath -> List(), class Foo -> List(), class MergedClasspath -> List())
diff --git a/test/files/buildmanager/infer/infer.test b/test/files/buildmanager/infer/infer.test
deleted file mode 100644
index 392e0d3..0000000
--- a/test/files/buildmanager/infer/infer.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/namesdefaults/defparam-use.scala b/test/files/buildmanager/namesdefaults/defparam-use.scala
deleted file mode 100644
index 5b5bbb3..0000000
--- a/test/files/buildmanager/namesdefaults/defparam-use.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-
-object Test extends App {
-  val outer = new Outer
-  new outer.Inner
-}
diff --git a/test/files/buildmanager/namesdefaults/defparam.scala b/test/files/buildmanager/namesdefaults/defparam.scala
deleted file mode 100644
index d817c71..0000000
--- a/test/files/buildmanager/namesdefaults/defparam.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Outer {
-
-  class Inner(val x: List[Int] = Nil)
-
-//  lazy val Inner = "abc"
-}
-
diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.check b/test/files/buildmanager/namesdefaults/namesdefaults.check
deleted file mode 100644
index 4a94d1f..0000000
--- a/test/files/buildmanager/namesdefaults/namesdefaults.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > defparam.scala defparam-use.scala
-compiling Set(defparam-use.scala, defparam.scala)
-Changes: Map()
-builder > defparam-use.scala
-compiling Set(defparam-use.scala)
-Changes: Map(class Test$delayedInit$body -> List(), object Test -> List())
-builder > defparam-use.scala
-compiling Set(defparam-use.scala)
-Changes: Map(class Test$delayedInit$body -> List(), object Test -> List())
diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.test b/test/files/buildmanager/namesdefaults/namesdefaults.test
deleted file mode 100644
index 84ccc36..0000000
--- a/test/files/buildmanager/namesdefaults/namesdefaults.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile defparam.scala defparam-use.scala
->>compile defparam-use.scala
->>compile defparam-use.scala
diff --git a/test/files/buildmanager/simpletest/A.scala b/test/files/buildmanager/simpletest/A.scala
deleted file mode 100644
index ef70470..0000000
--- a/test/files/buildmanager/simpletest/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
-  def foo = 2
-}
diff --git a/test/files/buildmanager/simpletest/B.scala b/test/files/buildmanager/simpletest/B.scala
deleted file mode 100644
index 364dc6e..0000000
--- a/test/files/buildmanager/simpletest/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class B extends A {
-  override def foo = 2
-}
diff --git a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
deleted file mode 100644
index 83d15dc..0000000
--- a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
+++ /dev/null
@@ -1 +0,0 @@
-class A
diff --git a/test/files/buildmanager/simpletest/simpletest.check b/test/files/buildmanager/simpletest/simpletest.check
deleted file mode 100644
index 95ea2c4..0000000
--- a/test/files/buildmanager/simpletest/simpletest.check
+++ /dev/null
@@ -1,11 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Removed(Definition(A.foo))))
-invalidate B.scala because inherited method removed [Removed(Definition(A.foo))]
-compiling Set(B.scala)
-B.scala:2: error: method foo overrides nothing
-  override def foo = 2
-               ^
diff --git a/test/files/buildmanager/simpletest/simpletest.test b/test/files/buildmanager/simpletest/simpletest.test
deleted file mode 100644
index 2c0be15..0000000
--- a/test/files/buildmanager/simpletest/simpletest.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A1.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2280/A.scala b/test/files/buildmanager/t2280/A.scala
deleted file mode 100644
index 5febade..0000000
--- a/test/files/buildmanager/t2280/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-class A extends B
diff --git a/test/files/buildmanager/t2280/B.java b/test/files/buildmanager/t2280/B.java
deleted file mode 100644
index aef8e10..0000000
--- a/test/files/buildmanager/t2280/B.java
+++ /dev/null
@@ -1,2 +0,0 @@
-public class B {}
-
diff --git a/test/files/buildmanager/t2280/t2280.check b/test/files/buildmanager/t2280/t2280.check
deleted file mode 100644
index 7ea7511..0000000
--- a/test/files/buildmanager/t2280/t2280.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.java
-compiling Set(A.scala, B.java)
-Changes: Map()
-builder > B.java
-compiling Set(B.java)
-Changes: Map(class B -> List())
diff --git a/test/files/buildmanager/t2280/t2280.test b/test/files/buildmanager/t2280/t2280.test
deleted file mode 100644
index 2eda777..0000000
--- a/test/files/buildmanager/t2280/t2280.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala B.java
->>compile B.java
diff --git a/test/files/buildmanager/t2556_1/A.scala b/test/files/buildmanager/t2556_1/A.scala
deleted file mode 100644
index c6e200b..0000000
--- a/test/files/buildmanager/t2556_1/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
-  def x(i: Int) = i+"3"
-}
diff --git a/test/files/buildmanager/t2556_1/B.scala b/test/files/buildmanager/t2556_1/B.scala
deleted file mode 100644
index 8529587..0000000
--- a/test/files/buildmanager/t2556_1/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class B extends A {
-  def x(s: String) = s+"5"
-}
diff --git a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
deleted file mode 100644
index 4ac1045..0000000
--- a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
-  def x(i: String) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check
deleted file mode 100644
index 2e501c8..0000000
--- a/test/files/buildmanager/t2556_1/t2556_1.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-compiling Set(B.scala)
-B.scala:2: error: overriding method x in class A of type (i: String)String;
- method x needs `override' modifier
-  def x(s: String) = s+"5"
-      ^
diff --git a/test/files/buildmanager/t2556_1/t2556_1.test b/test/files/buildmanager/t2556_1/t2556_1.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2556_1/t2556_1.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2556_2/A.scala b/test/files/buildmanager/t2556_2/A.scala
deleted file mode 100644
index b8da5c8..0000000
--- a/test/files/buildmanager/t2556_2/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
-  def x(i: Int) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_2/B.scala b/test/files/buildmanager/t2556_2/B.scala
deleted file mode 100644
index 80ff25d..0000000
--- a/test/files/buildmanager/t2556_2/B.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class B extends A
-
diff --git a/test/files/buildmanager/t2556_2/C.scala b/test/files/buildmanager/t2556_2/C.scala
deleted file mode 100644
index 0ab13e3..0000000
--- a/test/files/buildmanager/t2556_2/C.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class C extends B {
-  def x(s: String) = s+"5"
-}
-
diff --git a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
deleted file mode 100644
index 4ac1045..0000000
--- a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
-  def x(i: String) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check
deleted file mode 100644
index cae4f72..0000000
--- a/test/files/buildmanager/t2556_2/t2556_2.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala C.scala
-compiling Set(A.scala, B.scala, C.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-compiling Set(B.scala, C.scala)
-C.scala:2: error: overriding method x in class A of type (i: String)String;
- method x needs `override' modifier
-  def x(s: String) = s+"5"
-      ^
diff --git a/test/files/buildmanager/t2556_2/t2556_2.test b/test/files/buildmanager/t2556_2/t2556_2.test
deleted file mode 100644
index 9f31bb6..0000000
--- a/test/files/buildmanager/t2556_2/t2556_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2556_3/A.scala b/test/files/buildmanager/t2556_3/A.scala
deleted file mode 100644
index 089a05f..0000000
--- a/test/files/buildmanager/t2556_3/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
-  def x = 3
-}
-class B extends A
-
diff --git a/test/files/buildmanager/t2556_3/B.scala b/test/files/buildmanager/t2556_3/B.scala
deleted file mode 100644
index 0ec5ae4..0000000
--- a/test/files/buildmanager/t2556_3/B.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object E {
-  def main(args: Array[String]) =
-    println( (new C).x )
-}
-
diff --git a/test/files/buildmanager/t2556_3/C.scala b/test/files/buildmanager/t2556_3/C.scala
deleted file mode 100644
index 403df84..0000000
--- a/test/files/buildmanager/t2556_3/C.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class C extends B
-
diff --git a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
deleted file mode 100644
index 21cb277..0000000
--- a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
-  def x = 3
-}
-class B
-
diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check
deleted file mode 100644
index 34f90f7..0000000
--- a/test/files/buildmanager/t2556_3/t2556_3.check
+++ /dev/null
@@ -1,18 +0,0 @@
-builder > A.scala B.scala C.scala
-compiling Set(A.scala, B.scala, C.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,Object))]))
-invalidate C.scala because parents have changed [Changed(Class(B))[List((A,Object))]]
-invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))]
-compiling Set(B.scala, C.scala)
-B.scala:3: error: type mismatch;
- found   : C
- required: ?{def x: ?}
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
- and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from C to ?{def x: ?}
-    println( (new C).x )
-              ^
diff --git a/test/files/buildmanager/t2556_3/t2556_3.test b/test/files/buildmanager/t2556_3/t2556_3.test
deleted file mode 100644
index 9f31bb6..0000000
--- a/test/files/buildmanager/t2556_3/t2556_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2557/A.scala b/test/files/buildmanager/t2557/A.scala
deleted file mode 100644
index 3be55f1..0000000
--- a/test/files/buildmanager/t2557/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
-    def x = 3
-}
-
diff --git a/test/files/buildmanager/t2557/B.scala b/test/files/buildmanager/t2557/B.scala
deleted file mode 100644
index ea86a90..0000000
--- a/test/files/buildmanager/t2557/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait B extends A {
-    override def x = super.x * 2
-}
-
diff --git a/test/files/buildmanager/t2557/C.scala b/test/files/buildmanager/t2557/C.scala
deleted file mode 100644
index dd575ac..0000000
--- a/test/files/buildmanager/t2557/C.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait C extends A {
-    override def x = super.x + 5
-}
diff --git a/test/files/buildmanager/t2557/D.scala b/test/files/buildmanager/t2557/D.scala
deleted file mode 100644
index 4e662a8..0000000
--- a/test/files/buildmanager/t2557/D.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait D extends C with B
diff --git a/test/files/buildmanager/t2557/E.scala b/test/files/buildmanager/t2557/E.scala
deleted file mode 100644
index 2aee552..0000000
--- a/test/files/buildmanager/t2557/E.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait E extends D
diff --git a/test/files/buildmanager/t2557/F.scala b/test/files/buildmanager/t2557/F.scala
deleted file mode 100644
index e199670..0000000
--- a/test/files/buildmanager/t2557/F.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object F extends E {
-    def main(args: Array[String]) =
-        println(x)
-}
diff --git a/test/files/buildmanager/t2557/t2557.changes/D2.scala b/test/files/buildmanager/t2557/t2557.changes/D2.scala
deleted file mode 100644
index 67295f8..0000000
--- a/test/files/buildmanager/t2557/t2557.changes/D2.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-trait D extends B with C
-
diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check
deleted file mode 100644
index 736ef36..0000000
--- a/test/files/buildmanager/t2557/t2557.check
+++ /dev/null
@@ -1,10 +0,0 @@
-builder > A.scala B.scala C.scala D.scala E.scala F.scala
-compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala)
-Changes: Map()
-builder > D.scala
-compiling Set(D.scala)
-Changes: Map(trait D -> List(Changed(Class(D))[List((Object,Object), (C,B), (B,C))]))
-invalidate E.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
-invalidate F.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
-compiling Set(E.scala, F.scala)
-Changes: Map(object F -> List(), trait E -> List())
diff --git a/test/files/buildmanager/t2557/t2557.test b/test/files/buildmanager/t2557/t2557.test
deleted file mode 100644
index 6b01030..0000000
--- a/test/files/buildmanager/t2557/t2557.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala D.scala E.scala F.scala
->>update D.scala=>D2.scala
->>compile D.scala
diff --git a/test/files/buildmanager/t2559/A.scala b/test/files/buildmanager/t2559/A.scala
deleted file mode 100644
index fb4f6e3..0000000
--- a/test/files/buildmanager/t2559/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-sealed trait A
-class B extends A
-class C extends A
-//class E extends A
-
diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala
deleted file mode 100644
index 62dc542..0000000
--- a/test/files/buildmanager/t2559/D.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object D {
-  def x(a: A) = if (a.isInstanceOf[B] || a.isInstanceOf[C]) ()
-}
-
diff --git a/test/files/buildmanager/t2559/t2559.changes/A2.scala b/test/files/buildmanager/t2559/t2559.changes/A2.scala
deleted file mode 100644
index 8e90594..0000000
--- a/test/files/buildmanager/t2559/t2559.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-sealed trait A
-class B extends A
-class C extends A
-class E extends A
-
diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check
deleted file mode 100644
index 4d43838..0000000
--- a/test/files/buildmanager/t2559/t2559.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala D.scala
-compiling Set(A.scala, D.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List())
-invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]]
-compiling Set(D.scala)
-Changes: Map(object D -> List())
diff --git a/test/files/buildmanager/t2559/t2559.test b/test/files/buildmanager/t2559/t2559.test
deleted file mode 100644
index b787c5b..0000000
--- a/test/files/buildmanager/t2559/t2559.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala D.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2562/A.scala b/test/files/buildmanager/t2562/A.scala
deleted file mode 100644
index 740cd1e..0000000
--- a/test/files/buildmanager/t2562/A.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object A
-{
-  def x0 = B.x0                                                                   
-  def x1 = B.x1
-  def x2 = B.x2
-  def x3 = 3
-}
diff --git a/test/files/buildmanager/t2562/B.scala b/test/files/buildmanager/t2562/B.scala
deleted file mode 100644
index a524e5c..0000000
--- a/test/files/buildmanager/t2562/B.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object B
-{
-  def x0 = A.x1                                                                   
-  def x1 = A.x2
-  def x2 = A.x3
-}
-
-
diff --git a/test/files/buildmanager/t2562/t2562.changes/A2.scala b/test/files/buildmanager/t2562/t2562.changes/A2.scala
deleted file mode 100644
index c560e1e..0000000
--- a/test/files/buildmanager/t2562/t2562.changes/A2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object A
-{
-  def x0 = B.x0                                                                   
-  def x1 = B.x1
-  def x2 = B.x2
-  def x3 = "3"
-}
-
diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check
deleted file mode 100644
index 74575f2..0000000
--- a/test/files/buildmanager/t2562/t2562.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]))
-invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]]
-compiling Set(A.scala, B.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: <method> <triedcooking>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags:  [...]
diff --git a/test/files/buildmanager/t2562/t2562.test b/test/files/buildmanager/t2562/t2562.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2562/t2562.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2649/A.scala b/test/files/buildmanager/t2649/A.scala
deleted file mode 100644
index 86cc3f2..0000000
--- a/test/files/buildmanager/t2649/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
-  def x(zz: Int, yy: Int) = yy - zz
-}
diff --git a/test/files/buildmanager/t2649/B.scala b/test/files/buildmanager/t2649/B.scala
deleted file mode 100644
index 26c8951..0000000
--- a/test/files/buildmanager/t2649/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
-  def main(args: Array[String]): Unit =
-    println( A.x(zz = 3, yy = 4) )
-}
diff --git a/test/files/buildmanager/t2649/t2649.changes/A2.scala b/test/files/buildmanager/t2649/t2649.changes/A2.scala
deleted file mode 100644
index 9a6309f..0000000
--- a/test/files/buildmanager/t2649/t2649.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
-  def x(yy: Int, zz: Int) = yy - zz
-}
-
diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check
deleted file mode 100644
index d0f41f3..0000000
--- a/test/files/buildmanager/t2649/t2649.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2649/t2649.test b/test/files/buildmanager/t2649/t2649.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2649/t2649.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_1/A.scala b/test/files/buildmanager/t2650_1/A.scala
deleted file mode 100644
index 74714a3..0000000
--- a/test/files/buildmanager/t2650_1/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
-    type S[_]
-}
-
diff --git a/test/files/buildmanager/t2650_1/B.scala b/test/files/buildmanager/t2650_1/B.scala
deleted file mode 100644
index 80f0e30..0000000
--- a/test/files/buildmanager/t2650_1/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait B extends A {
-    type F = S[Int]
-}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
deleted file mode 100644
index 2b8ead4..0000000
--- a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A {
-    type S
-}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check
deleted file mode 100644
index 2f9dd12..0000000
--- a/test/files/buildmanager/t2650_1/t2650_1.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-warning: there were 1 feature warning(s); re-run with -feature for details
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]]
-compiling Set(B.scala)
-B.scala:2: error: B.this.S does not take type parameters
-    type F = S[Int]
-             ^
diff --git a/test/files/buildmanager/t2650_1/t2650_1.test b/test/files/buildmanager/t2650_1/t2650_1.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2650_1/t2650_1.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_2/A.scala b/test/files/buildmanager/t2650_2/A.scala
deleted file mode 100644
index bcea634..0000000
--- a/test/files/buildmanager/t2650_2/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A {
-  type S = Int
-}
diff --git a/test/files/buildmanager/t2650_2/B.scala b/test/files/buildmanager/t2650_2/B.scala
deleted file mode 100644
index 22a3a9a..0000000
--- a/test/files/buildmanager/t2650_2/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait B extends A {
-  def x: S
-  def y: Int = x
-}
diff --git a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
deleted file mode 100644
index 8274c1b..0000000
--- a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
-  type S = Long
-}
-
diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check
deleted file mode 100644
index 53a0287..0000000
--- a/test/files/buildmanager/t2650_2/t2650_2.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]]
-compiling Set(B.scala)
-B.scala:3: error: type mismatch;
- found   : B.this.S
-    (which expands to)  Long
- required: Int
-  def y: Int = x
-               ^
diff --git a/test/files/buildmanager/t2650_2/t2650_2.test b/test/files/buildmanager/t2650_2/t2650_2.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2650_2/t2650_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_3/A.scala b/test/files/buildmanager/t2650_3/A.scala
deleted file mode 100644
index cd13843..0000000
--- a/test/files/buildmanager/t2650_3/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
-  type T = Int
-  def x: T
-}
diff --git a/test/files/buildmanager/t2650_3/B.scala b/test/files/buildmanager/t2650_3/B.scala
deleted file mode 100644
index 46a8cf2..0000000
--- a/test/files/buildmanager/t2650_3/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
-  def x(a: A): Int = a.x
-}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
deleted file mode 100644
index e5667b2..0000000
--- a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
-  type T = Long
-  def x: T
-}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
deleted file mode 100644
index 5c6326d..0000000
--- a/test/files/buildmanager/t2650_3/t2650_3.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : a.T
-    (which expands to)  Long
- required: Int
-  def x(a: A): Int = a.x
-                       ^
diff --git a/test/files/buildmanager/t2650_3/t2650_3.test b/test/files/buildmanager/t2650_3/t2650_3.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2650_3/t2650_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_4/A.scala b/test/files/buildmanager/t2650_4/A.scala
deleted file mode 100644
index b9a519e..0000000
--- a/test/files/buildmanager/t2650_4/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A {
-  type T = Int
-  type T2 = T
-  def x: T2
-}
diff --git a/test/files/buildmanager/t2650_4/B.scala b/test/files/buildmanager/t2650_4/B.scala
deleted file mode 100644
index 46a8cf2..0000000
--- a/test/files/buildmanager/t2650_4/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
-  def x(a: A): Int = a.x
-}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
deleted file mode 100644
index 0220e7b..0000000
--- a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A {
-  type T = Long
-  type T2 = T
-  def x: T2
-}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
deleted file mode 100644
index a4aeadd..0000000
--- a/test/files/buildmanager/t2650_4/t2650_4.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : a.T2
-    (which expands to)  Long
- required: Int
-  def x(a: A): Int = a.x
-                       ^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.test b/test/files/buildmanager/t2650_4/t2650_4.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2650_4/t2650_4.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_2/A.scala b/test/files/buildmanager/t2651_2/A.scala
deleted file mode 100644
index d712f6f..0000000
--- a/test/files/buildmanager/t2651_2/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait A[T]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
deleted file mode 100644
index 7fb573e..0000000
--- a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait A[S]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.check b/test/files/buildmanager/t2651_2/t2651_2.check
deleted file mode 100644
index dd789b7..0000000
--- a/test/files/buildmanager/t2651_2/t2651_2.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List())
diff --git a/test/files/buildmanager/t2651_2/t2651_2.test b/test/files/buildmanager/t2651_2/t2651_2.test
deleted file mode 100644
index d061447..0000000
--- a/test/files/buildmanager/t2651_2/t2651_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_3/A.scala b/test/files/buildmanager/t2651_3/A.scala
deleted file mode 100644
index 14f9e46..0000000
--- a/test/files/buildmanager/t2651_3/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A[T, S] {
-    def x: T
-}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
deleted file mode 100644
index 51bf27d..0000000
--- a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A[T, S] {
-    def x: S
-}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
deleted file mode 100644
index 2a60e3d..0000000
--- a/test/files/buildmanager/t2651_3/t2651_3.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <method> <deferred>]))
diff --git a/test/files/buildmanager/t2651_3/t2651_3.test b/test/files/buildmanager/t2651_3/t2651_3.test
deleted file mode 100644
index d061447..0000000
--- a/test/files/buildmanager/t2651_3/t2651_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_4/A.scala b/test/files/buildmanager/t2651_4/A.scala
deleted file mode 100644
index 63f2a16..0000000
--- a/test/files/buildmanager/t2651_4/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A[T, S] {
-  def x: T
-  def y(a: T)
-  def z[B <: T]
-}
diff --git a/test/files/buildmanager/t2651_4/B.scala b/test/files/buildmanager/t2651_4/B.scala
deleted file mode 100644
index b33dbde..0000000
--- a/test/files/buildmanager/t2651_4/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait B extends A[Int, String] {
-  def x = 3
-}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
deleted file mode 100644
index f155129..0000000
--- a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A[S, T] {
-  def x: T
-  def y(a: T)
-  def z[B <: T]
-}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
deleted file mode 100644
index 74e5d8f..0000000
--- a/test/files/buildmanager/t2651_4/t2651_4.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <method> <deferred>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <method> <deferred>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : Int(3)
- required: String
-  def x = 3
-          ^
diff --git a/test/files/buildmanager/t2651_4/t2651_4.test b/test/files/buildmanager/t2651_4/t2651_4.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2651_4/t2651_4.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2653/A.scala b/test/files/buildmanager/t2653/A.scala
deleted file mode 100644
index fb17a15..0000000
--- a/test/files/buildmanager/t2653/A.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A[+T]
-
diff --git a/test/files/buildmanager/t2653/B.scala b/test/files/buildmanager/t2653/B.scala
deleted file mode 100644
index 8f55a88..0000000
--- a/test/files/buildmanager/t2653/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
-  val a: A[Any] = new A[Int]
-}
diff --git a/test/files/buildmanager/t2653/t2653.changes/A2.scala b/test/files/buildmanager/t2653/t2653.changes/A2.scala
deleted file mode 100644
index 51d13cc..0000000
--- a/test/files/buildmanager/t2653/t2653.changes/A2.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A[T]
-
diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check
deleted file mode 100644
index 3678152..0000000
--- a/test/files/buildmanager/t2653/t2653.check
+++ /dev/null
@@ -1,15 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : A[Int]
- required: A[Any]
-Note: Int <: Any, but class A is invariant in type T.
-You may wish to define T as +T instead. (SLS 4.5)
-  val a: A[Any] = new A[Int]
-                  ^
diff --git a/test/files/buildmanager/t2653/t2653.test b/test/files/buildmanager/t2653/t2653.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2653/t2653.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2654/A.scala b/test/files/buildmanager/t2654/A.scala
deleted file mode 100644
index 75f396d..0000000
--- a/test/files/buildmanager/t2654/A.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A
-
diff --git a/test/files/buildmanager/t2654/B.scala b/test/files/buildmanager/t2654/B.scala
deleted file mode 100644
index a18aec3..0000000
--- a/test/files/buildmanager/t2654/B.scala
+++ /dev/null
@@ -1 +0,0 @@
-class B extends A
diff --git a/test/files/buildmanager/t2654/t2654.changes/A2.scala b/test/files/buildmanager/t2654/t2654.changes/A2.scala
deleted file mode 100644
index c302edb..0000000
--- a/test/files/buildmanager/t2654/t2654.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
-  private def x = 5
-}
-
diff --git a/test/files/buildmanager/t2654/t2654.check b/test/files/buildmanager/t2654/t2654.check
deleted file mode 100644
index 68f6e8e..0000000
--- a/test/files/buildmanager/t2654/t2654.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List())
diff --git a/test/files/buildmanager/t2654/t2654.test b/test/files/buildmanager/t2654/t2654.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2654/t2654.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2655/A.scala b/test/files/buildmanager/t2655/A.scala
deleted file mode 100644
index b2c54ac..0000000
--- a/test/files/buildmanager/t2655/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
-  def x(i: => String) = ()
-}
-
diff --git a/test/files/buildmanager/t2655/B.scala b/test/files/buildmanager/t2655/B.scala
deleted file mode 100644
index 6c1918c..0000000
--- a/test/files/buildmanager/t2655/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
-  val x = A.x("3")
-}
diff --git a/test/files/buildmanager/t2655/t2655.changes/A2.scala b/test/files/buildmanager/t2655/t2655.changes/A2.scala
deleted file mode 100644
index 0d6a7c6..0000000
--- a/test/files/buildmanager/t2655/t2655.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
-  def x(i: Function0[String]) = ()
-}
-
diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check
deleted file mode 100644
index 41ce65a..0000000
--- a/test/files/buildmanager/t2655/t2655.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : String("3")
- required: () => String
-  val x = A.x("3")
-              ^
diff --git a/test/files/buildmanager/t2655/t2655.test b/test/files/buildmanager/t2655/t2655.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2655/t2655.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2657/A.scala b/test/files/buildmanager/t2657/A.scala
deleted file mode 100644
index 2a6c62d..0000000
--- a/test/files/buildmanager/t2657/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
-  implicit def y(i: Int): String = i.toString
-}
diff --git a/test/files/buildmanager/t2657/B.scala b/test/files/buildmanager/t2657/B.scala
deleted file mode 100644
index 7786989..0000000
--- a/test/files/buildmanager/t2657/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B extends A {
-  val x: String = 3
-}
-
diff --git a/test/files/buildmanager/t2657/t2657.changes/A2.scala b/test/files/buildmanager/t2657/t2657.changes/A2.scala
deleted file mode 100644
index 7dc99d4..0000000
--- a/test/files/buildmanager/t2657/t2657.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
-  def y(i: Int): String = i.toString
-}
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
deleted file mode 100644
index 7bff078..0000000
--- a/test/files/buildmanager/t2657/t2657.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-warning: there were 1 feature warning(s); re-run with -feature for details
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : Int(3)
- required: String
-  val x: String = 3
-                  ^
diff --git a/test/files/buildmanager/t2657/t2657.test b/test/files/buildmanager/t2657/t2657.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2657/t2657.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2789/A.scala b/test/files/buildmanager/t2789/A.scala
deleted file mode 100644
index 08d5bc8..0000000
--- a/test/files/buildmanager/t2789/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
-  implicit def e: E = new E
-  def x(i: Int)(implicit y: E): String = ""
-}
-class E
diff --git a/test/files/buildmanager/t2789/B.scala b/test/files/buildmanager/t2789/B.scala
deleted file mode 100644
index dcefbee..0000000
--- a/test/files/buildmanager/t2789/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B extends A {
-  val y = x(3)
-}
diff --git a/test/files/buildmanager/t2789/t2789.changes/A2.scala b/test/files/buildmanager/t2789/t2789.changes/A2.scala
deleted file mode 100644
index 4ba3814..0000000
--- a/test/files/buildmanager/t2789/t2789.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
-  def e: E = new E
-  def x(i: Int)(implicit y: E): String = ""
-}
-class E
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
deleted file mode 100644
index 066561a..0000000
--- a/test/files/buildmanager/t2789/t2789.check
+++ /dev/null
@@ -1,11 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]), class E -> List())
-invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: could not find implicit value for parameter y: E
-  val y = x(3)
-           ^
diff --git a/test/files/buildmanager/t2789/t2789.test b/test/files/buildmanager/t2789/t2789.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2789/t2789.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2790/A.scala b/test/files/buildmanager/t2790/A.scala
deleted file mode 100644
index 6e9c1a9..0000000
--- a/test/files/buildmanager/t2790/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object A {
-  def x(f: String, g: Int): Int = g
-  def x(f: Int, g: Int = 3): Int = g
-}
-
diff --git a/test/files/buildmanager/t2790/B.scala b/test/files/buildmanager/t2790/B.scala
deleted file mode 100644
index 441055c..0000000
--- a/test/files/buildmanager/t2790/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
-  val y = A.x(5)
-}
-
diff --git a/test/files/buildmanager/t2790/t2790.changes/A2.scala b/test/files/buildmanager/t2790/t2790.changes/A2.scala
deleted file mode 100644
index 704ef4e..0000000
--- a/test/files/buildmanager/t2790/t2790.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
-  def x(f: String, g: Int = 3): Int = g
-  def x(f: Int, g: Int): Int = g
-}
diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check
deleted file mode 100644
index 13d61da..0000000
--- a/test/files/buildmanager/t2790/t2790.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found   : Int(5)
- required: String
-  val y = A.x(5)
-              ^
diff --git a/test/files/buildmanager/t2790/t2790.test b/test/files/buildmanager/t2790/t2790.test
deleted file mode 100644
index 6f3bd03..0000000
--- a/test/files/buildmanager/t2790/t2790.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2792/A1.scala b/test/files/buildmanager/t2792/A1.scala
deleted file mode 100644
index 96dc0ef..0000000
--- a/test/files/buildmanager/t2792/A1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
-  val x = new C
-}
diff --git a/test/files/buildmanager/t2792/A2.scala b/test/files/buildmanager/t2792/A2.scala
deleted file mode 100644
index e55e681..0000000
--- a/test/files/buildmanager/t2792/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
-  import A.x.y
-  val z = y
-}
diff --git a/test/files/buildmanager/t2792/A3.scala b/test/files/buildmanager/t2792/A3.scala
deleted file mode 100644
index cd083cd..0000000
--- a/test/files/buildmanager/t2792/A3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class C {
-  val y = 4
-}
diff --git a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala b/test/files/buildmanager/t2792/t2792.changes/A1_1.scala
deleted file mode 100644
index 00ee05f..0000000
--- a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
-  var x = new C
-}
diff --git a/test/files/buildmanager/t2792/t2792.check b/test/files/buildmanager/t2792/t2792.check
deleted file mode 100644
index 00a2b83..0000000
--- a/test/files/buildmanager/t2792/t2792.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A1.scala A2.scala A3.scala
-compiling Set(A1.scala, A2.scala, A3.scala)
-Changes: Map()
-builder > A1.scala
-compiling Set(A1.scala)
-Changes: Map(object A -> List(Added(Definition(A.x_$eq)), Changed(Definition(A.x))[value x changed to variable x]))
-invalidate A2.scala because it references changed definition [Changed(Definition(A.x))[value x changed to variable x]]
-compiling Set(A2.scala)
-A2.scala:2: error: stable identifier required, but A.x found.
-  import A.x.y
-           ^
-A2.scala:3: error: not found: value y
-  val z = y
-          ^
diff --git a/test/files/buildmanager/t2792/t2792.test b/test/files/buildmanager/t2792/t2792.test
deleted file mode 100644
index f199950..0000000
--- a/test/files/buildmanager/t2792/t2792.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A1.scala A2.scala A3.scala
->>update A1.scala=>A1_1.scala
->>compile A1.scala
diff --git a/test/files/buildmanager/t3045/A.java b/test/files/buildmanager/t3045/A.java
deleted file mode 100644
index d1acb00..0000000
--- a/test/files/buildmanager/t3045/A.java
+++ /dev/null
@@ -1,7 +0,0 @@
-public interface A {
-  public class C implements A {}
-}
-
-class B {
-  static class C {}
-}
diff --git a/test/files/buildmanager/t3045/t3045.check b/test/files/buildmanager/t3045/t3045.check
deleted file mode 100644
index 5e4e71e..0000000
--- a/test/files/buildmanager/t3045/t3045.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > A.java
-compiling Set(A.java)
-Changes: Map()
diff --git a/test/files/buildmanager/t3045/t3045.test b/test/files/buildmanager/t3045/t3045.test
deleted file mode 100644
index 6cf7e35..0000000
--- a/test/files/buildmanager/t3045/t3045.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile A.java
diff --git a/test/files/buildmanager/t3054/bar/Bar.java b/test/files/buildmanager/t3054/bar/Bar.java
deleted file mode 100644
index e1b056d..0000000
--- a/test/files/buildmanager/t3054/bar/Bar.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package bar;
-import foo.Foo$;
-
-
-public class Bar {
-  void bar() { Foo$.MODULE$.foo(); }
-}
diff --git a/test/files/buildmanager/t3054/foo/Foo.scala b/test/files/buildmanager/t3054/foo/Foo.scala
deleted file mode 100644
index c0fcd97..0000000
--- a/test/files/buildmanager/t3054/foo/Foo.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package foo
-
-class Foo {
-  def foo() = println("foo")
-}
diff --git a/test/files/buildmanager/t3054/t3054.check b/test/files/buildmanager/t3054/t3054.check
deleted file mode 100644
index 97cca88..0000000
--- a/test/files/buildmanager/t3054/t3054.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > bar/Bar.java foo/Foo.scala
-compiling Set(bar/Bar.java, foo/Foo.scala)
-Changes: Map()
diff --git a/test/files/buildmanager/t3054/t3054.test b/test/files/buildmanager/t3054/t3054.test
deleted file mode 100644
index 903df24..0000000
--- a/test/files/buildmanager/t3054/t3054.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile bar/Bar.java foo/Foo.scala
diff --git a/test/files/buildmanager/t3059/A.scala b/test/files/buildmanager/t3059/A.scala
deleted file mode 100644
index 0dd25f6..0000000
--- a/test/files/buildmanager/t3059/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A extends B {
-    private def getBar = List(1,2,3)
-    lazy val bar: List[Int] = getBar
-}
diff --git a/test/files/buildmanager/t3059/B.scala b/test/files/buildmanager/t3059/B.scala
deleted file mode 100644
index 4659687..0000000
--- a/test/files/buildmanager/t3059/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-abstract class B {
-    private def getFoo = 12
-    lazy val foo: Int = getFoo
-}
diff --git a/test/files/buildmanager/t3059/t3059.check b/test/files/buildmanager/t3059/t3059.check
deleted file mode 100644
index 4a8076a..0000000
--- a/test/files/buildmanager/t3059/t3059.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List())
\ No newline at end of file
diff --git a/test/files/buildmanager/t3059/t3059.test b/test/files/buildmanager/t3059/t3059.test
deleted file mode 100644
index 6f3749d..0000000
--- a/test/files/buildmanager/t3059/t3059.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala B.scala
->>compile A.scala
\ No newline at end of file
diff --git a/test/files/buildmanager/t3133/A.java b/test/files/buildmanager/t3133/A.java
deleted file mode 100644
index c4e7f3a..0000000
--- a/test/files/buildmanager/t3133/A.java
+++ /dev/null
@@ -1,7 +0,0 @@
-public class A {
-  class Foo {} 
-    
-  public A(Foo a) {}
-    
-  private void bar(Foo z) {}
-}
diff --git a/test/files/buildmanager/t3133/t3133.check b/test/files/buildmanager/t3133/t3133.check
deleted file mode 100644
index 5e4e71e..0000000
--- a/test/files/buildmanager/t3133/t3133.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > A.java
-compiling Set(A.java)
-Changes: Map()
diff --git a/test/files/buildmanager/t3133/t3133.test b/test/files/buildmanager/t3133/t3133.test
deleted file mode 100644
index 6cf7e35..0000000
--- a/test/files/buildmanager/t3133/t3133.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile A.java
diff --git a/test/files/buildmanager/t3140/A.scala b/test/files/buildmanager/t3140/A.scala
deleted file mode 100644
index f776804..0000000
--- a/test/files/buildmanager/t3140/A.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class As {
-  trait A {
-      def foo(parents: String): A = {
-        (() => parents)
-        null
-    }
-  }
-}
diff --git a/test/files/buildmanager/t3140/t3140.check b/test/files/buildmanager/t3140/t3140.check
deleted file mode 100644
index 008d5a9..0000000
--- a/test/files/buildmanager/t3140/t3140.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class As -> List(), object As$A$class -> List(), trait As$A -> List())
diff --git a/test/files/buildmanager/t3140/t3140.test b/test/files/buildmanager/t3140/t3140.test
deleted file mode 100644
index 392e0d3..0000000
--- a/test/files/buildmanager/t3140/t3140.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t4215/A.scala b/test/files/buildmanager/t4215/A.scala
deleted file mode 100644
index 9db40b0..0000000
--- a/test/files/buildmanager/t4215/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
-  def B() {
-    object C
-  }
-}
diff --git a/test/files/buildmanager/t4215/t4215.check b/test/files/buildmanager/t4215/t4215.check
deleted file mode 100644
index d9ec9a7..0000000
--- a/test/files/buildmanager/t4215/t4215.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), object A$C$2 -> List())
diff --git a/test/files/buildmanager/t4215/t4215.test b/test/files/buildmanager/t4215/t4215.test
deleted file mode 100644
index 392e0d3..0000000
--- a/test/files/buildmanager/t4215/t4215.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/continuations-neg/function0.check b/test/files/continuations-neg/function0.check
deleted file mode 100644
index 0a66763..0000000
--- a/test/files/continuations-neg/function0.check
+++ /dev/null
@@ -1,6 +0,0 @@
-function0.scala:11: error: type mismatch;
- found   : () => Int @scala.util.continuations.cpsParam[Int,Int]
- required: () => Int
-    val g: () => Int = f
-                       ^
-one error found
diff --git a/test/files/continuations-neg/function0.scala b/test/files/continuations-neg/function0.scala
deleted file mode 100644
index 6ef0d98..0000000
--- a/test/files/continuations-neg/function0.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val f = () => shift { k: (Int=>Int) => k(7) }
-    val g: () => Int = f
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/function2.check b/test/files/continuations-neg/function2.check
deleted file mode 100644
index 4b1a622..0000000
--- a/test/files/continuations-neg/function2.check
+++ /dev/null
@@ -1,6 +0,0 @@
-function2.scala:11: error: type mismatch;
- found   : () => Int
- required: () => Int @scala.util.continuations.cpsParam[Int,Int]
-    val g: () => Int @cps[Int] = f
-                                 ^
-one error found
diff --git a/test/files/continuations-neg/function2.scala b/test/files/continuations-neg/function2.scala
deleted file mode 100644
index 402c6dc..0000000
--- a/test/files/continuations-neg/function2.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val f = () => 7
-    val g: () => Int @cps[Int] = f
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/function3.check b/test/files/continuations-neg/function3.check
deleted file mode 100644
index 4705ad9..0000000
--- a/test/files/continuations-neg/function3.check
+++ /dev/null
@@ -1,6 +0,0 @@
-function3.scala:10: error: type mismatch;
- found   : Int @scala.util.continuations.cpsParam[Int,Int]
- required: Int
-    val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
-                                   ^
-one error found
diff --git a/test/files/continuations-neg/function3.scala b/test/files/continuations-neg/function3.scala
deleted file mode 100644
index c4acc4c..0000000
--- a/test/files/continuations-neg/function3.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/infer2.check b/test/files/continuations-neg/infer2.check
deleted file mode 100644
index 59eb670..0000000
--- a/test/files/continuations-neg/infer2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-infer2.scala:14: error: illegal answer type modification: scala.util.continuations.cpsParam[String,Int] andThen scala.util.continuations.cpsParam[String,Int]
-    test { sym(); sym() }    
-         ^
-one error found
diff --git a/test/files/continuations-neg/infer2.scala b/test/files/continuations-neg/infer2.scala
deleted file mode 100644
index eaffbc1..0000000
--- a/test/files/continuations-neg/infer2.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x: => Int @cpsParam[String,Int]) = 7
-  
-  def sym() = shift { k: (Int => String) => 9 }
-  
-  
-  def main(args: Array[String]): Any = {
-    test { sym(); sym() }    
-  }
-  
-}
-
-
diff --git a/test/files/continuations-neg/lazy.check b/test/files/continuations-neg/lazy.check
deleted file mode 100644
index 3c46054..0000000
--- a/test/files/continuations-neg/lazy.check
+++ /dev/null
@@ -1,4 +0,0 @@
-lazy.scala:6: error: implementation restriction: cps annotations not allowed on lazy value definitions
-    lazy val x = shift((k:Unit=>Unit)=>k())
-             ^
-one error found
diff --git a/test/files/continuations-neg/lazy.scala b/test/files/continuations-neg/lazy.scala
deleted file mode 100644
index a2fad83..0000000
--- a/test/files/continuations-neg/lazy.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.util.continuations._ 
- 
-object Test { 
-
-  def foo() = {
-    lazy val x = shift((k:Unit=>Unit)=>k())
-    println(x)
-  }
- 
-  def main(args: Array[String]) { 
-    reset {
-      foo()
-    }
-  } 
-	
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/t1929.check b/test/files/continuations-neg/t1929.check
deleted file mode 100644
index b04a5b9..0000000
--- a/test/files/continuations-neg/t1929.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t1929.scala:8: error: type mismatch;
- found   : Int @scala.util.continuations.cpsParam[String,String] @scala.util.continuations.cpsSynth
- required: Int @scala.util.continuations.cpsParam[Int,String]
-    reset {
-          ^
-one error found
diff --git a/test/files/continuations-neg/t1929.scala b/test/files/continuations-neg/t1929.scala
deleted file mode 100644
index 02eda91..0000000
--- a/test/files/continuations-neg/t1929.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def main(args : Array[String]) {
-    reset {
-      println("up")
-      val x = shift((k:Int=>String) => k(8) + k(2))
-      println("down " + x)
-      val y = shift((k:Int=>String) => k(3))
-      println("down2 " + y)
-      y + x
-  	}
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/t2285.check b/test/files/continuations-neg/t2285.check
deleted file mode 100644
index d5dff6a..0000000
--- a/test/files/continuations-neg/t2285.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t2285.scala:9: error: type mismatch;
- found   : Int @scala.util.continuations.cpsParam[String,String] @scala.util.continuations.cpsSynth
- required: Int @scala.util.continuations.cpsParam[Int,String]
-  def foo() = reset { bar(); 7 }
-                    ^
-one error found
diff --git a/test/files/continuations-neg/t2285.scala b/test/files/continuations-neg/t2285.scala
deleted file mode 100644
index b906dc4..0000000
--- a/test/files/continuations-neg/t2285.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-object Test {
-
-  def bar() = shift { k: (String => String) => k("1") }
- 
-  def foo() = reset { bar(); 7 }
-        
-}
diff --git a/test/files/continuations-neg/t2949.check b/test/files/continuations-neg/t2949.check
deleted file mode 100644
index dd97688..0000000
--- a/test/files/continuations-neg/t2949.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t2949.scala:13: error: type mismatch;
- found   : Int
- required: ? @scala.util.continuations.cpsParam[List[?],Any]
-    x * y
-      ^
-one error found
diff --git a/test/files/continuations-neg/t2949.scala b/test/files/continuations-neg/t2949.scala
deleted file mode 100644
index 2d426a4..0000000
--- a/test/files/continuations-neg/t2949.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-object Test {
-
-  def reflect[A,B](xs : List[A]) = shift{ xs.flatMap[B, List[B]] }
-  def reify[A, B](x : A @cpsParam[List[A], B]) = reset{ List(x) }
-
-  def main(args: Array[String]): Unit = println(reify {
-    val x = reflect[Int, Int](List(1,2,3)) 
-    val y = reflect[Int, Int](List(2,4,8))
-    x * y
-  })
-}
diff --git a/test/files/continuations-neg/t3628.check b/test/files/continuations-neg/t3628.check
deleted file mode 100644
index 6e39055..0000000
--- a/test/files/continuations-neg/t3628.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3628.scala:4: error: not found: type Actor
-  val impl: Actor = actor {
-            ^
-one error found
diff --git a/test/files/continuations-neg/t3628.scala b/test/files/continuations-neg/t3628.scala
deleted file mode 100644
index 3fdd32a..0000000
--- a/test/files/continuations-neg/t3628.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.actors.Actor._
-
-object Test {
-  val impl: Actor = actor {
-    loop {
-      react { 
-        case 1 => impl ! 2
-      }
-    }
-  }
-}
diff --git a/test/files/continuations-neg/t3718.check b/test/files/continuations-neg/t3718.check
deleted file mode 100644
index 659104c..0000000
--- a/test/files/continuations-neg/t3718.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3718.scala:2: error: cannot cps-transform malformed (possibly in shift/reset placement) expression
-  scala.util.continuations.reset((_: Any).##)
-                                          ^
-one error found
diff --git a/test/files/continuations-neg/t3718.scala b/test/files/continuations-neg/t3718.scala
deleted file mode 100644
index a0fcb9d..0000000
--- a/test/files/continuations-neg/t3718.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
-  scala.util.continuations.reset((_: Any).##)
-}
diff --git a/test/files/continuations-neg/t5314-missing-result-type.check b/test/files/continuations-neg/t5314-missing-result-type.check
deleted file mode 100644
index 341e580..0000000
--- a/test/files/continuations-neg/t5314-missing-result-type.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5314-missing-result-type.scala:6: error: method bar has return statement; needs result type
-  def bar(x:Int) = return foo(x)
-                   ^
-one error found
diff --git a/test/files/continuations-neg/t5314-missing-result-type.scala b/test/files/continuations-neg/t5314-missing-result-type.scala
deleted file mode 100644
index d7c5043..0000000
--- a/test/files/continuations-neg/t5314-missing-result-type.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.util.continuations._
-
-object Test extends App {
-  def foo(x:Int): Int @cps[Int] = x
-
-  def bar(x:Int) = return foo(x)
-
-  reset {
-    val res = bar(8)
-    println(res)
-    res
-  }
-}
diff --git a/test/files/continuations-neg/t5314-npe.check b/test/files/continuations-neg/t5314-npe.check
deleted file mode 100644
index b5f024a..0000000
--- a/test/files/continuations-neg/t5314-npe.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5314-npe.scala:2: error: method bar has return statement; needs result type
-  def bar(x:Int) = { return x; x }  // NPE
-                     ^
-one error found
diff --git a/test/files/continuations-neg/t5314-npe.scala b/test/files/continuations-neg/t5314-npe.scala
deleted file mode 100644
index 2b5966e..0000000
--- a/test/files/continuations-neg/t5314-npe.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  def bar(x:Int) = { return x; x }  // NPE
-}
diff --git a/test/files/continuations-neg/t5314-return-reset.check b/test/files/continuations-neg/t5314-return-reset.check
deleted file mode 100644
index 4c817ae..0000000
--- a/test/files/continuations-neg/t5314-return-reset.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5314-return-reset.scala:14: error: return expression not allowed, since method calls CPS method
-      if (rnd.nextInt(100) > 50) return 5 // not allowed, since method is calling `reset`
-                                 ^
-one error found
diff --git a/test/files/continuations-neg/t5314-return-reset.scala b/test/files/continuations-neg/t5314-return-reset.scala
deleted file mode 100644
index df9d58e..0000000
--- a/test/files/continuations-neg/t5314-return-reset.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.util.continuations._
-import scala.util.Random
-
-object Test extends App {
-  val rnd = new Random
-
-  def foo(x: Int): Int @cps[Int] = shift { k => k(x) }
-
-  def bar(x: Int): Int @cps[Int] = return foo(x)
-
-  def caller(): Int = {
-    val v: Int = reset {
-      val res: Int = bar(8)
-      if (rnd.nextInt(100) > 50) return 5 // not allowed, since method is calling `reset`
-      42
-    }
-    v
-  }
-
-  caller()
-}
diff --git a/test/files/continuations-neg/t5314-type-error.check b/test/files/continuations-neg/t5314-type-error.check
deleted file mode 100644
index e66c9d8..0000000
--- a/test/files/continuations-neg/t5314-type-error.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t5314-type-error.scala:7: error: type mismatch;
- found   : Int @scala.util.continuations.cpsParam[Int,Int]
- required: Int @scala.util.continuations.cpsParam[String,String]
-  def bar(x:Int): Int @cps[String] = return foo(x)
-                                               ^
-one error found
diff --git a/test/files/continuations-neg/t5314-type-error.scala b/test/files/continuations-neg/t5314-type-error.scala
deleted file mode 100644
index e36ce6c..0000000
--- a/test/files/continuations-neg/t5314-type-error.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.continuations._
-
-object Test extends App {
-  def foo(x:Int): Int @cps[Int] = shift { k => k(x) }
-
-  // should be a type error
-  def bar(x:Int): Int @cps[String] = return foo(x)
-
-  def caller(): Unit = {
-    val v: String = reset {
-      val res: Int = bar(8)
-      "hello"
-    }
-  }
-
-  caller()
-}
diff --git a/test/files/continuations-neg/t5445.check b/test/files/continuations-neg/t5445.check
deleted file mode 100644
index eb2943b..0000000
--- a/test/files/continuations-neg/t5445.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5445.scala:4: error: cps annotations not allowed on by-value parameters or value definitions
-  def foo(block: Unit @suspendable ): Unit @suspendable = {}
-          ^
-one error found
diff --git a/test/files/continuations-neg/t5445.scala b/test/files/continuations-neg/t5445.scala
deleted file mode 100644
index cb6f8f6..0000000
--- a/test/files/continuations-neg/t5445.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  def foo(block: Unit @suspendable ): Unit @suspendable = {}
-}
diff --git a/test/files/continuations-neg/trycatch2.check b/test/files/continuations-neg/trycatch2.check
deleted file mode 100644
index 5ff2838..0000000
--- a/test/files/continuations-neg/trycatch2.check
+++ /dev/null
@@ -1,7 +0,0 @@
-trycatch2.scala:11: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
-  def foo1 = try {
-             ^
-trycatch2.scala:19: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
-  def foo2 = try {
-             ^
-two errors found
diff --git a/test/files/continuations-neg/trycatch2.scala b/test/files/continuations-neg/trycatch2.scala
deleted file mode 100644
index d329a3b..0000000
--- a/test/files/continuations-neg/trycatch2.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-object Test {
-
-  def fatal[T]: T = throw new Exception
-  def cpsIntStringInt = shift { k:(Int=>String) => k(3); 7 }
-  def cpsIntIntString = shift { k:(Int=>Int) => k(3); "7" }
-  
-  def foo1 = try {
-    fatal[Int]
-    cpsIntStringInt
-  } catch {
-    case ex: Throwable =>
-      cpsIntStringInt
-  }
-
-  def foo2 = try {
-    fatal[Int]
-    cpsIntStringInt
-  } catch {
-    case ex: Throwable =>
-      cpsIntStringInt
-  }
-
-
-  def main(args: Array[String]): Unit = {
-    println(reset { foo1; "3" })
-    println(reset { foo2; "3" })
-  }
-
-}
diff --git a/test/files/continuations-run/basics.check b/test/files/continuations-run/basics.check
deleted file mode 100755
index 54c059f..0000000
--- a/test/files/continuations-run/basics.check
+++ /dev/null
@@ -1,2 +0,0 @@
-28
-28
\ No newline at end of file
diff --git a/test/files/continuations-run/basics.scala b/test/files/continuations-run/basics.scala
deleted file mode 100755
index b63710b..0000000
--- a/test/files/continuations-run/basics.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-
-  def m0() = {
-    shift((k:Int => Int) => k(k(7))) * 2
-  }
-
-  def m1() = {
-    2 * shift((k:Int => Int) => k(k(7)))
-  }
-
-  def main(args: Array[String]) = {
-    
-    println(reset(m0()))
-    println(reset(m1()))
-    
-  }
-  
-}
diff --git a/test/files/continuations-run/function1.scala b/test/files/continuations-run/function1.scala
deleted file mode 100644
index fbd413e..0000000
--- a/test/files/continuations-run/function1.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val f = () => shift { k: (Int=>Int) => k(7) }
-    val g: () => Int @cps[Int] = f
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/function4.check b/test/files/continuations-run/function4.check
deleted file mode 100644
index c793025..0000000
--- a/test/files/continuations-run/function4.check
+++ /dev/null
@@ -1 +0,0 @@
-7
\ No newline at end of file
diff --git a/test/files/continuations-run/function4.scala b/test/files/continuations-run/function4.scala
deleted file mode 100644
index 2ccd0b4..0000000
--- a/test/files/continuations-run/function4.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val g: () => Int @cps[Int] = () => shift { k: (Int=>Int) => k(7) }
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/function5.check b/test/files/continuations-run/function5.check
deleted file mode 100644
index c793025..0000000
--- a/test/files/continuations-run/function5.check
+++ /dev/null
@@ -1 +0,0 @@
-7
\ No newline at end of file
diff --git a/test/files/continuations-run/function5.scala b/test/files/continuations-run/function5.scala
deleted file mode 100644
index fe528e1..0000000
--- a/test/files/continuations-run/function5.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val g: () => Int @cps[Int] = () => 7
-    
-    println(reset(g()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/function6.check b/test/files/continuations-run/function6.check
deleted file mode 100644
index c793025..0000000
--- a/test/files/continuations-run/function6.check
+++ /dev/null
@@ -1 +0,0 @@
-7
\ No newline at end of file
diff --git a/test/files/continuations-run/function6.scala b/test/files/continuations-run/function6.scala
deleted file mode 100644
index 54a6ffc..0000000
--- a/test/files/continuations-run/function6.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def main(args: Array[String]): Any = {
-    
-    val g: PartialFunction[Int, Int @cps[Int]] = { case x => 7 }
-    
-    println(reset(g(2)))
-    
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse0.check b/test/files/continuations-run/ifelse0.check
deleted file mode 100644
index f8bc798..0000000
--- a/test/files/continuations-run/ifelse0.check
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-9
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse0.scala b/test/files/continuations-run/ifelse0.scala
deleted file mode 100644
index 2facab4..0000000
--- a/test/files/continuations-run/ifelse0.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x:Int) = if (x <= 7)
-    shift { k: (Int=>Int) => k(k(k(x))) }
-  else
-    shift { k: (Int=>Int) => k(x) }
-  
-  def main(args: Array[String]): Any = {
-    println(reset(1 + test(7)))
-    println(reset(1 + test(8)))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse1.check b/test/files/continuations-run/ifelse1.check
deleted file mode 100644
index 86a3fbc..0000000
--- a/test/files/continuations-run/ifelse1.check
+++ /dev/null
@@ -1,4 +0,0 @@
-10
-9
-8
-11
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse1.scala b/test/files/continuations-run/ifelse1.scala
deleted file mode 100644
index c624b84..0000000
--- a/test/files/continuations-run/ifelse1.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test1(x:Int) = if (x <= 7)
-    shift { k: (Int=>Int) => k(k(k(x))) }
-  else
-    x
-  
-  def test2(x:Int) = if (x <= 7)
-    x
-  else
-    shift { k: (Int=>Int) => k(k(k(x))) }
-
-  def main(args: Array[String]): Any = {
-    println(reset(1 + test1(7)))
-    println(reset(1 + test1(8)))
-    println(reset(1 + test2(7)))
-    println(reset(1 + test2(8)))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse2.check b/test/files/continuations-run/ifelse2.check
deleted file mode 100644
index f97a95b..0000000
--- a/test/files/continuations-run/ifelse2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-abort
-()
-alive
-()
diff --git a/test/files/continuations-run/ifelse2.scala b/test/files/continuations-run/ifelse2.scala
deleted file mode 100644
index 506acc4..0000000
--- a/test/files/continuations-run/ifelse2.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x:Int) = if (x <= 7)
-    shift { k: (Unit=>Unit) => println("abort") }
-  
-  def main(args: Array[String]): Any = {
-    println(reset{ test(7); println("alive") })
-    println(reset{ test(8); println("alive") })
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse3.check b/test/files/continuations-run/ifelse3.check
deleted file mode 100644
index 95b562c..0000000
--- a/test/files/continuations-run/ifelse3.check
+++ /dev/null
@@ -1,2 +0,0 @@
-6
-9
diff --git a/test/files/continuations-run/ifelse3.scala b/test/files/continuations-run/ifelse3.scala
deleted file mode 100644
index 54566a4..0000000
--- a/test/files/continuations-run/ifelse3.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def util(x: Boolean) = shift { k: (Boolean=>Int) => k(x) }
- 
-  def test(x:Int) = if (util(x <= 7))
-    x - 1
-  else
-    x + 1
-    
-  
-  def main(args: Array[String]): Any = {
-    println(reset(test(7)))
-    println(reset(test(8)))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/ifelse4.check b/test/files/continuations-run/ifelse4.check
deleted file mode 100644
index 2545dd4..0000000
--- a/test/files/continuations-run/ifelse4.check
+++ /dev/null
@@ -1,4 +0,0 @@
-10
-10
-10
-10
diff --git a/test/files/continuations-run/ifelse4.scala b/test/files/continuations-run/ifelse4.scala
deleted file mode 100644
index 8360375..0000000
--- a/test/files/continuations-run/ifelse4.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  def sh(x1:Int) = shift( (k: Int => Int) => k(k(k(x1))))
-  
-  def testA(x1: Int): Int @cps[Int] = {
-      sh(x1)
-      if (x1==42) x1 else sh(x1)
-  }
-
-  def testB(x1: Int): Int @cps[Int] = {
-      if (sh(x1)==43) x1 else x1
-  }
-  
-  def testC(x1: Int): Int @cps[Int] = {
-      sh(x1)
-      if (sh(x1)==44) x1 else x1
-  }
-
-  def testD(x1: Int): Int @cps[Int] = {
-      sh(x1)
-      if (sh(x1)==45) x1 else sh(x1)
-  }
-
-  def main(args: Array[String]): Any = {
-    println(reset(1 + testA(7)))
-    println(reset(1 + testB(9)))
-    println(reset(1 + testC(9)))
-    println(reset(1 + testD(7)))
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/infer1.scala b/test/files/continuations-run/infer1.scala
deleted file mode 100644
index 1082250..0000000
--- a/test/files/continuations-run/infer1.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x: => Int @cpsParam[String,Int]) = 7
-  
-  def test2() = {
-    val x = shift { k: (Int => String) => 9 }
-    x
-  }
-
-  def test3(x: => Int @cpsParam[Int,Int]) = 7
-
-  
-  def util() = shift { k: (String => String) => "7" }
-  
-  def main(args: Array[String]): Any = {
-    test { shift { k: (Int => String) => 9 } }
-    test { shift { k: (Int => String) => 9 }; 2 }
-//    test { shift { k: (Int => String) => 9 }; util() }  <-- doesn't work
-    test { shift { k: (Int => String) => 9 }; util(); 2 }
-
-
-    test { shift { k: (Int => String) => 9 }; { test3(0); 2 } }
-
-    test3 { { test3(0); 2 } }
-
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/match0.check b/test/files/continuations-run/match0.check
deleted file mode 100644
index f8bc798..0000000
--- a/test/files/continuations-run/match0.check
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-9
\ No newline at end of file
diff --git a/test/files/continuations-run/match0.scala b/test/files/continuations-run/match0.scala
deleted file mode 100644
index b65d343..0000000
--- a/test/files/continuations-run/match0.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x:Int) = x match {
-    case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
-    case 8 => shift { k: (Int=>Int) => k(x) }
-  }
-  
-  def main(args: Array[String]): Any = {
-    println(reset(1 + test(7)))
-    println(reset(1 + test(8)))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/match1.check b/test/files/continuations-run/match1.check
deleted file mode 100644
index 73053d3..0000000
--- a/test/files/continuations-run/match1.check
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-9
diff --git a/test/files/continuations-run/match1.scala b/test/files/continuations-run/match1.scala
deleted file mode 100644
index 20671f2..0000000
--- a/test/files/continuations-run/match1.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test(x:Int) = x match {
-    case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
-    case _ => x
-  }
-  
-  def main(args: Array[String]): Any = {
-    println(reset(1 + test(7)))
-    println(reset(1 + test(8)))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/match2.check b/test/files/continuations-run/match2.check
deleted file mode 100644
index cbf9134..0000000
--- a/test/files/continuations-run/match2.check
+++ /dev/null
@@ -1,2 +0,0 @@
-B
-B
diff --git a/test/files/continuations-run/match2.scala b/test/files/continuations-run/match2.scala
deleted file mode 100644
index 5092ce3..0000000
--- a/test/files/continuations-run/match2.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def test1() = {
-    val (a, b) = shift { k: (((String,String)) => String) => k("A","B") }
-    b
-  }
-
-  case class Elem[T,U](a: T, b: U)
-  
-  def test2() = {
-    val Elem(a,b) = shift { k: (Elem[String,String] => String) => k(Elem("A","B")) }
-    b
-  }
-
-  
-  def main(args: Array[String]): Unit = {
-    println(reset(test1()))
-    println(reset(test2()))
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/patvirt.check b/test/files/continuations-run/patvirt.check
deleted file mode 100644
index b5fa014..0000000
--- a/test/files/continuations-run/patvirt.check
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-11
diff --git a/test/files/continuations-run/patvirt.scala b/test/files/continuations-run/patvirt.scala
deleted file mode 100644
index 5b4d312..0000000
--- a/test/files/continuations-run/patvirt.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  def sh(x1:Int) = shift( (k: Int => Int) => k(k(k(x1))))
-  
-  def test(x1: Int) = {
-    val o7 = {
-      val o6 = {
-        val o3 = 
-          if (7 == x1) Some(x1)
-          else None
-
-        if (o3.isEmpty) None
-        else Some(sh(x1))
-      }
-      if (o6.isEmpty) {
-        val o5 =
-          if (8 == x1) Some(x1)
-          else None
-
-        if (o5.isEmpty) None
-        else Some(sh(x1))
-      } else o6
-    }
-    o7.get
-  }
-
-  def main(args: Array[String]): Any = {
-    println(reset(1 + test(7)))
-    println(reset(1 + test(8)))
-  }
-}
diff --git a/test/files/continuations-run/shift-pct.check b/test/files/continuations-run/shift-pct.check
deleted file mode 100644
index fb190e7..0000000
--- a/test/files/continuations-run/shift-pct.check
+++ /dev/null
@@ -1,25 +0,0 @@
-d = 1, d2 = 1.0, pct = 1.000
-d = 2, d2 = 4.0, pct = 0.500
-d = 3, d2 = 9.0, pct = 0.333
-d = 4, d2 = 16.0, pct = 0.250
-d = 5, d2 = 25.0, pct = 0.200
-d = 6, d2 = 36.0, pct = 0.167
-d = 7, d2 = 49.0, pct = 0.143
-d = 8, d2 = 64.0, pct = 0.125
-d = 9, d2 = 81.0, pct = 0.111
-d = 10, d2 = 100.0, pct = 0.100
-d = 11, d2 = 121.0, pct = 0.091
-d = 12, d2 = 144.0, pct = 0.083
-d = 13, d2 = 169.0, pct = 0.077
-d = 14, d2 = 196.0, pct = 0.071
-d = 15, d2 = 225.0, pct = 0.067
-d = 16, d2 = 256.0, pct = 0.063
-d = 17, d2 = 289.0, pct = 0.059
-d = 18, d2 = 324.0, pct = 0.056
-d = 19, d2 = 361.0, pct = 0.053
-d = 20, d2 = 400.0, pct = 0.050
-d = 21, d2 = 441.0, pct = 0.048
-d = 22, d2 = 484.0, pct = 0.045
-d = 23, d2 = 529.0, pct = 0.043
-d = 24, d2 = 576.0, pct = 0.042
-d = 25, d2 = 625.0, pct = 0.040
diff --git a/test/files/continuations-run/shift-pct.scala b/test/files/continuations-run/shift-pct.scala
deleted file mode 100644
index 7ef9922..0000000
--- a/test/files/continuations-run/shift-pct.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  abstract class IfReturnRepro {
-    def s1: Double @cpsParam[Any, Unit]
-    def s2: Double @cpsParam[Any, Unit]
-
-    def p(i: Int): Double @cpsParam[Unit, Any] = {
-      val px = s1
-      val pct = if (px > 100) px else px / s2
-      println("pct = %.3f".format(pct))
-      pct
-    }
-  }
-
-  def main(args: Array[String]) : Unit = {
-    var d: Double = 0d
-    def d2 = d * d
-
-    val irr = new IfReturnRepro {
-      def s1 = shift(f => f(d))
-      def s2 = shift(f => f(d2))
-    }
-    1 to 25 foreach { i =>
-      d = i
-      print("d = " + i + ", d2 = " + d2 + ", ")
-      run(irr p i)
-    }
-  }
-}
diff --git a/test/files/continuations-run/t1807.check b/test/files/continuations-run/t1807.check
deleted file mode 100644
index 56a6051..0000000
--- a/test/files/continuations-run/t1807.check
+++ /dev/null
@@ -1 +0,0 @@
-1
\ No newline at end of file
diff --git a/test/files/continuations-run/t1807.scala b/test/files/continuations-run/t1807.scala
deleted file mode 100644
index 278b3a9..0000000
--- a/test/files/continuations-run/t1807.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    val z = reset {
-      val f: (() => Int @cps[Int]) = () => 1
-      f()
-    }
-    println(z)
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t1808.scala b/test/files/continuations-run/t1808.scala
deleted file mode 100644
index 125c7c1..0000000
--- a/test/files/continuations-run/t1808.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    reset0 { 0 }
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t1820.scala b/test/files/continuations-run/t1820.scala
deleted file mode 100644
index 893ddab..0000000
--- a/test/files/continuations-run/t1820.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def shifted: Unit @suspendable = shift { (k: Unit => Unit) => () }
-  def test1(b: => Boolean) = {
-    reset {
-      if (b) shifted
-    }
-  }
-  def main(args: Array[String]) = test1(true)
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t1821.check b/test/files/continuations-run/t1821.check
deleted file mode 100644
index f7b7611..0000000
--- a/test/files/continuations-run/t1821.check
+++ /dev/null
@@ -1,4 +0,0 @@
-()
-()
-()
-()
\ No newline at end of file
diff --git a/test/files/continuations-run/t1821.scala b/test/files/continuations-run/t1821.scala
deleted file mode 100644
index 0d5fb55..0000000
--- a/test/files/continuations-run/t1821.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-  def suspended[A](x: A): A @suspendable = x
-  def test1[A](x: A): A @suspendable = suspended(x) match { case x => x }
-  def test2[A](x: List[A]): A @suspendable = suspended(x) match { case List(x) => x }
-
-  def test3[A](x: A): A @suspendable = x match { case x => x }
-  def test4[A](x: List[A]): A @suspendable = x match { case List(x) => x }
-
-  def main(args: Array[String]) = {
-    println(reset(test1()))
-    println(reset(test2(List(()))))
-    println(reset(test3()))
-    println(reset(test4(List(()))))
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t2864.check b/test/files/continuations-run/t2864.check
deleted file mode 100644
index d411bb7..0000000
--- a/test/files/continuations-run/t2864.check
+++ /dev/null
@@ -1 +0,0 @@
-400
diff --git a/test/files/continuations-run/t2864.scala b/test/files/continuations-run/t2864.scala
deleted file mode 100644
index 7a2579e..0000000
--- a/test/files/continuations-run/t2864.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-import scala.util.continuations._
-object Test {
-
-  def double[B](n : Int)(k : Int => B) : B = k(n * 2)
-
-  def main(args : Array[String]) {
-     reset {
-       val result1 = shift(double[Unit](100))
-       val result2 = shift(double[Unit](result1))
-       println(result2)
-     }
-  }
-
-  def foo: Int @cps[Int] = {
-    val a0 = shift((k:Int=>Int) => k(0))
-    val x0 = 2
-    val a1 = shift((k:Int=>Int) => x0)
-    0
-  }
-
-/*
-  def bar: ControlContext[Int,Int,Int] = {
-    shiftR((k:Int=>Int) => k(0)).flatMap { a0 =>
-    val x0 = 2
-    shiftR((k:Int=>Int) => x0).map { a1 =>
-    0
-    }}
-  }
-*/
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t2934.check b/test/files/continuations-run/t2934.check
deleted file mode 100644
index a925865..0000000
--- a/test/files/continuations-run/t2934.check
+++ /dev/null
@@ -1 +0,0 @@
-List(3, 4, 5)
diff --git a/test/files/continuations-run/t2934.scala b/test/files/continuations-run/t2934.scala
deleted file mode 100644
index a1b8ca9..0000000
--- a/test/files/continuations-run/t2934.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  def main(args : Array[String]) {
-   println(reset {
-     val x = shift(List(1,2,3).flatMap[Int, List[Int]])
-     List(x + 2)
-   })
-  }
-}
diff --git a/test/files/continuations-run/t3199.check b/test/files/continuations-run/t3199.check
deleted file mode 100644
index a065247..0000000
--- a/test/files/continuations-run/t3199.check
+++ /dev/null
@@ -1 +0,0 @@
-Right(7)
diff --git a/test/files/continuations-run/t3199.scala b/test/files/continuations-run/t3199.scala
deleted file mode 100644
index 7b54793..0000000
--- a/test/files/continuations-run/t3199.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import _root_.scala.collection.Seq 
-import _root_.scala.util.control.Exception 
-import _root_.scala.util.continuations._ 
- 
-object Test { 
- 
-  trait AbstractResource[+R <: AnyRef]  { 
-    def reflect[B] : R @cpsParam[B,Either[Throwable, B]] = shift(acquireFor) 
-    def acquireFor[B](f :  R => B) : Either[Throwable, B] = { 
-	      import Exception._ 
-	      catching(List(classOf[Throwable]) : _*) either (f(null.asInstanceOf[R])) 
-	    } 
-	  }   
-	 
-	  def main(args: Array[String]) : Unit = { 
-	     val x = new AbstractResource[String] { } 
-	     val result = x.acquireFor( x =>  7 ) 
-	     println(result) 
-	  } 
-	} 
diff --git a/test/files/continuations-run/t3199b.scala b/test/files/continuations-run/t3199b.scala
deleted file mode 100644
index 2122c96..0000000
--- a/test/files/continuations-run/t3199b.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
-  
-  def test() = {
-    java.util.Arrays.asList(Array(1,2,3):_*)
-  }
-  
-  def main(args: Array[String]) = {
-    println(test())
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t3223.check b/test/files/continuations-run/t3223.check
deleted file mode 100644
index ec63514..0000000
--- a/test/files/continuations-run/t3223.check
+++ /dev/null
@@ -1 +0,0 @@
-9
diff --git a/test/files/continuations-run/t3223.scala b/test/files/continuations-run/t3223.scala
deleted file mode 100644
index efed1ff..0000000
--- a/test/files/continuations-run/t3223.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.util.continuations._
-object Test {
-
-  def foo(x:Int) = {
-    try { 
-      throw new Exception
-      shiftUnit0[Int,Int](7)
-    } catch {
-      case ex =>  
-        val g = (a:Int)=>a
-        9
-    }
-  }
-  
-  def main(args: Array[String]) {
-    println(reset(foo(0)))
-  }
-
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/t3225.check b/test/files/continuations-run/t3225.check
deleted file mode 100644
index df1a8a9..0000000
--- a/test/files/continuations-run/t3225.check
+++ /dev/null
@@ -1,12 +0,0 @@
-8
-8
-9
-9
-8
-9
-8
-8
-9
-9
-8
-9
diff --git a/test/files/continuations-run/t3225.scala b/test/files/continuations-run/t3225.scala
deleted file mode 100644
index 5b6259c..0000000
--- a/test/files/continuations-run/t3225.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-
-  class Bla {
-    val x = 8
-    def y[T] = 9
-  }
-
-/*
-  def bla[A] = shift { k:(Bla=>A) => k(new Bla) }
-*/
-
-  def bla1 = shift { k:(Bla=>Bla) => k(new Bla) }
-  def bla2 = shift { k:(Bla=>Int) => k(new Bla) }
-
-  def fooA = bla2.x
-  def fooB[T] = bla2.y[T]
-
-  def testMono() = {
-    println(reset(bla1).x)
-    println(reset(bla2.x))
-    println(reset(bla2.y[Int]))
-    println(reset(bla2.y))
-    println(reset(fooA))
-    println(reset(fooB))
-    0
-  }
-
-  def blaX[A] = shift { k:(Bla=>A) => k(new Bla) }
-  
-  def fooX[A] = blaX[A].x
-  def fooY[A] = blaX[A].y[A]
-  
-  def testPoly() = {
-    println(reset(blaX[Bla]).x)
-    println(reset(blaX[Int].x))
-    println(reset(blaX[Int].y[Int]))
-    println(reset(blaX[Int].y))
-    println(reset(fooX[Int]))
-    println(reset(fooY[Int]))
-    0
-  }
-
-
-  // TODO: check whether this also applies to a::shift { k => ... }
-
-  def main(args: Array[String]) = {
-    testMono()
-    testPoly()
-  }
-  
-}
diff --git a/test/files/continuations-run/t3501.check b/test/files/continuations-run/t3501.check
deleted file mode 100644
index 08adcfe..0000000
--- a/test/files/continuations-run/t3501.check
+++ /dev/null
@@ -1,5 +0,0 @@
-42
-42
-42
-42
-42
diff --git a/test/files/continuations-run/t3501.scala b/test/files/continuations-run/t3501.scala
deleted file mode 100644
index c43b332..0000000
--- a/test/files/continuations-run/t3501.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-  def capture(): Int @suspendable = 42
-
-  def main(args: Array[String]): Unit = reset {
-    var i = 0
-    while (i < 5) {
-      i += 1
-      val y = capture()
-      val s = y
-      println(s)
-    }
-  }
-}
diff --git a/test/files/continuations-run/t5314-2.check b/test/files/continuations-run/t5314-2.check
deleted file mode 100644
index 35b3c93..0000000
--- a/test/files/continuations-run/t5314-2.check
+++ /dev/null
@@ -1,5 +0,0 @@
-8
-hi
-8
-from try
-8
diff --git a/test/files/continuations-run/t5314-2.scala b/test/files/continuations-run/t5314-2.scala
deleted file mode 100644
index 8a896de..0000000
--- a/test/files/continuations-run/t5314-2.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro { 
-  def s1: Int @cps[Any] = shift { k => k(5) } 
-  def caller = reset { println(p(3)) }
-  def caller2 = reset { println(p2(3)) }
-  def caller3 = reset { println(p3(3)) }
-
-  def p(i: Int): Int @cps[Any] = { 
-    val v= s1 + 3 
-    return v 
-  } 
-
-  def p2(i: Int): Int @cps[Any] = {
-    val v = s1 + 3
-    if (v > 0) {
-      println("hi")
-      return v
-    } else {
-      println("hi")
-      return 8
-    }
-  }
-
-  def p3(i: Int): Int @cps[Any] = {
-    val v = s1 + 3
-    try {
-      println("from try")
-      return v
-    } catch {
-      case e: Exception =>
-        println("from catch")
-        return 7
-    }
-  }
-
-}
-
-object Test extends App {
-  val repro = new ReturnRepro
-  repro.caller
-  repro.caller2
-  repro.caller3
-}
diff --git a/test/files/continuations-run/t5314-3.check b/test/files/continuations-run/t5314-3.check
deleted file mode 100644
index 71489f0..0000000
--- a/test/files/continuations-run/t5314-3.check
+++ /dev/null
@@ -1,4 +0,0 @@
-enter return expr
-8
-hi
-8
diff --git a/test/files/continuations-run/t5314-3.scala b/test/files/continuations-run/t5314-3.scala
deleted file mode 100644
index 62c547f..0000000
--- a/test/files/continuations-run/t5314-3.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro { 
-  def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) } 
-  def caller = reset { println(p(3)) }
-  def caller2 = reset { println(p2(3)) }
-
-  def p(i: Int): Int @cpsParam[Unit, Any] = { 
-    val v= s1 + 3 
-    return { println("enter return expr"); v }
-  } 
-
-  def p2(i: Int): Int @cpsParam[Unit, Any] = {
-    val v = s1 + 3
-    if (v > 0) {
-      return { println("hi"); v }
-    } else {
-      return { println("hi"); 8 }
-    }
-  }
-}
-
-object Test extends App {
-  val repro = new ReturnRepro
-  repro.caller
-  repro.caller2
-}
diff --git a/test/files/continuations-run/t5314-with-if.check b/test/files/continuations-run/t5314-with-if.check
deleted file mode 100644
index 7f8f011..0000000
--- a/test/files/continuations-run/t5314-with-if.check
+++ /dev/null
@@ -1 +0,0 @@
-7
diff --git a/test/files/continuations-run/t5314-with-if.scala b/test/files/continuations-run/t5314-with-if.scala
deleted file mode 100644
index 5840199..0000000
--- a/test/files/continuations-run/t5314-with-if.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.continuations._
-
-object Test extends App {
-
-  def foo(x:Int): Int @cps[Int] = 7
-
-  def bar(x:Int): Int @cps[Int] = {
-    val v = foo(x)
-    if (v > 0)
-      return v
-    else
-      return 10
-  }
-
-  println(reset { bar(10) })
-
-}
diff --git a/test/files/continuations-run/t5314.check b/test/files/continuations-run/t5314.check
deleted file mode 100644
index 4b35d8e..0000000
--- a/test/files/continuations-run/t5314.check
+++ /dev/null
@@ -1,8 +0,0 @@
-7
-7
-7
-8
-8
-hi
-8
-8
diff --git a/test/files/continuations-run/t5314.scala b/test/files/continuations-run/t5314.scala
deleted file mode 100644
index d611016..0000000
--- a/test/files/continuations-run/t5314.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro { 
-  def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) } 
-  def caller = reset { println(p(3)) }
-  def caller2 = reset { println(p2(3)) }
-
-  def p(i: Int): Int @cpsParam[Unit, Any] = { 
-    val v= s1 + 3 
-    return v 
-  } 
-
-  def p2(i: Int): Int @cpsParam[Unit, Any] = {
-    val v = s1 + 3
-    if (v > 0) {
-      println("hi")
-      return v
-    } else {
-      println("hi")
-      return 8
-    }
-  }
-}
-
-object Test extends App {
-  def foo(x:Int): Int @cps[Int] = shift { k => k(x) }
-
-  def bar(x:Int): Int @cps[Int] = return foo(x)
-
-  def nocps(x: Int): Int = { return x; x }
-
-  def foo2(x:Int): Int @cps[Int] = 7
-  def bar2(x:Int): Int @cps[Int] = { foo2(x); return 7 }
-  def bar3(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else return foo2(x) }
-  def bar4(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else foo2(x) }
-  def bar5(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else 8 }
-  println(reset { bar2(10) })
-  println(reset { bar3(10) })
-  println(reset { bar4(10) })
-  println(reset { bar5(10) })
-  
-  /* original test case */
-  val repro = new ReturnRepro
-  repro.caller
-  repro.caller2
-
-  reset {
-    val res = bar(8)
-    println(res)
-    res
-  }
-}
diff --git a/test/files/continuations-run/t5472.check b/test/files/continuations-run/t5472.check
deleted file mode 100644
index d42e80c..0000000
--- a/test/files/continuations-run/t5472.check
+++ /dev/null
@@ -1 +0,0 @@
-List(23, 23)
diff --git a/test/files/continuations-run/t5472.scala b/test/files/continuations-run/t5472.scala
deleted file mode 100644
index 3e3c76b..0000000
--- a/test/files/continuations-run/t5472.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-import scala.annotation._
-import scala.util.continuations._
-import java.util.concurrent.atomic._
-
-object Test {
-  def main(args: Array[String]) {
-    val map = Map("foo" -> 1, "bar" -> 2)
-    reset {
-      val mapped =
-        for {
-          (location, accessors) <- new ContinuationizedParallelIterable(map)
-        } yield {
-          shiftUnit0[Int, Unit](23)
-        }
-      println(mapped.toList)
-    }
-  }
-}
-
-final class ContinuationizedParallelIterable[+A](protected val underline: Iterable[A]) {
-  def toList = underline.toList.sortBy(_.toString)
-
-  final def filter(p: A => Boolean @suspendable): ContinuationizedParallelIterable[A] @suspendable =
-    shift(
-      new AtomicInteger(1) with ((ContinuationizedParallelIterable[A] => Unit) => Unit) {
-        private val results = new AtomicReference[List[A]](Nil)
-
-        @tailrec
-        private def add(element: A) {
-          val old = results.get
-          if (!results.compareAndSet(old, element :: old)) {
-            add(element)
-          }
-        }
-
-        override final def apply(continue: ContinuationizedParallelIterable[A] => Unit) {
-          for (element <- underline) {
-            super.incrementAndGet()
-            reset {
-              val pass = p(element)
-              if (pass) {
-                add(element)
-              }
-              if (super.decrementAndGet() == 0) {
-                continue(new ContinuationizedParallelIterable(results.get))
-              }
-            }
-          }
-          if (super.decrementAndGet() == 0) {
-            continue(new ContinuationizedParallelIterable(results.get))
-          }
-        }
-      })
-
-  final def foreach[U](f: A => U @suspendable): Unit @suspendable =
-    shift(
-      new AtomicInteger(1) with ((Unit => Unit) => Unit) {
-        override final def apply(continue: Unit => Unit) {
-          for (element <- underline) {
-            super.incrementAndGet()
-            reset {
-              f(element)
-              if (super.decrementAndGet() == 0) {
-                continue()
-              }
-            }
-          }
-          if (super.decrementAndGet() == 0) {
-            continue()
-          }
-        }
-      })
-
-  final def map[B: Manifest](f: A => B @suspendable): ContinuationizedParallelIterable[B] @suspendable =
-    shift(
-      new AtomicInteger(underline.size) with ((ContinuationizedParallelIterable[B] => Unit) => Unit) {
-        override final def apply(continue: ContinuationizedParallelIterable[B] => Unit) {
-          val results = new Array[B](super.get)
-          for ((element, i) <- underline.view zipWithIndex) {
-            reset {
-              val result = f(element)
-              results(i) = result
-              if (super.decrementAndGet() == 0) {
-                continue(new ContinuationizedParallelIterable(results))
-              }
-            }
-          }
-        }
-      })
-}
diff --git a/test/files/continuations-run/t5506.check b/test/files/continuations-run/t5506.check
deleted file mode 100644
index 38b76c6..0000000
--- a/test/files/continuations-run/t5506.check
+++ /dev/null
@@ -1,7 +0,0 @@
-List(1, 2, 3)
-List(1, 2, 3)
-List(1, 2, 3)
-List(1, 2, 3)
-List(1, 2, 3)
-List(1, 2, 3)
-List(1, 2, 3)
diff --git a/test/files/continuations-run/t5506.scala b/test/files/continuations-run/t5506.scala
deleted file mode 100644
index 2b5c111..0000000
--- a/test/files/continuations-run/t5506.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-import scala.util.continuations._
-
-object Test {
-
-def g: List[Int] @suspendable = List(1,2,3)
-
-def fp10: List[Int] @suspendable = {  
-g.map(x => x)
-}
-
-def fp11: List[Int] @suspendable = {  
-val z = g.map(x => x)
-z
-}
-
-
-def fp12: List[Int] @suspendable = {
-val z = List(1,2,3)
-z.map(x => x)
-}
-
-
-
-def fp20: List[Int] @suspendable = {
-g.map[Int,List[Int]](x => x)
-}
-
-
-def fp21: List[Int] @suspendable = {
-val z = g.map[Int,List[Int]](x => x)
-z
-}
-
-def fp22: List[Int] @suspendable = {
-val z = g.map[Int,List[Int]](x => x)(List.canBuildFrom[Int])
-z
-}
-
-def fp23: List[Int] @suspendable = {
-val z = g.map(x => x)(List.canBuildFrom[Int])
-z
-}
-
-
-def main(args: Array[String]) = {
-  reset {
-    println(fp10)
-    println(fp11)
-    println(fp12)
-    
-    println(fp20)
-    println(fp21)
-    println(fp22)
-    println(fp23)
-  }
-}
-
-}
diff --git a/test/files/continuations-run/t5538.check b/test/files/continuations-run/t5538.check
deleted file mode 100644
index 457721d..0000000
--- a/test/files/continuations-run/t5538.check
+++ /dev/null
@@ -1 +0,0 @@
-Future(Future(Future(Future(Future(List(1, 2, 3, 4, 5))))))
diff --git a/test/files/continuations-run/t5538.scala b/test/files/continuations-run/t5538.scala
deleted file mode 100644
index 42f8163..0000000
--- a/test/files/continuations-run/t5538.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-import scala.util.continuations._
-import scala.collection.generic.CanBuildFrom
-
-object Test {
-
-  class ExecutionContext
-
-  implicit def defaultExecutionContext = new ExecutionContext
-
-  case class Future[+T](x:T) {
-    final def map[A](f: T => A): Future[A] = new Future[A](f(x))
-    final def flatMap[A](f: T => Future[A]): Future[A] = f(x)
-  }
-
-  class PromiseStream[A] {
-    override def toString = xs.toString
-    
-    var xs: List[A] = Nil
-  
-    final def +=(elem: A): this.type = { xs :+= elem; this }
-
-    final def ++=(elem: Traversable[A]): this.type = { xs ++= elem; this }
-  
-    final def <<(elem: Future[A]): PromiseStream[A] @cps[Future[Any]] =
-      shift { cont: (PromiseStream[A] => Future[Any]) => elem map (a => cont(this += a)) }
-
-    final def <<(elem1: Future[A], elem2: Future[A], elems: Future[A]*): PromiseStream[A] @cps[Future[Any]] =
-      shift { cont: (PromiseStream[A] => Future[Any]) => Future.flow(this << elem1 << elem2 <<< Future.sequence(elems.toSeq)) map cont }
-
-    final def <<<(elems: Traversable[A]): PromiseStream[A] @cps[Future[Any]] =
-      shift { cont: (PromiseStream[A] => Future[Any]) => cont(this ++= elems) }
-
-    final def <<<(elems: Future[Traversable[A]]): PromiseStream[A] @cps[Future[Any]] =
-      shift { cont: (PromiseStream[A] => Future[Any]) => elems map (as => cont(this ++= as)) }
-  }
-
-  object Future {
-
-    def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] =
-      new Future(in.asInstanceOf[Traversable[Future[A]]].map((f:Future[A])=>f.x)(cbf.asInstanceOf[CanBuildFrom[Traversable[Future[A]], A, M[A]]]))
-    
-    def flow[A](body: => A @cps[Future[Any]])(implicit executor: ExecutionContext): Future[A] = reset(Future(body)).asInstanceOf[Future[A]]
-
-  }
-
-  def main(args: Array[String]) = {
-    val p = new PromiseStream[Int]
-    println(Future.flow(p << (Future(1), Future(2), Future(3), Future(4), Future(5))))
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/trycatch0.check b/test/files/continuations-run/trycatch0.check
deleted file mode 100644
index 3680690..0000000
--- a/test/files/continuations-run/trycatch0.check
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-10
\ No newline at end of file
diff --git a/test/files/continuations-run/trycatch0.scala b/test/files/continuations-run/trycatch0.scala
deleted file mode 100644
index ec39863..0000000
--- a/test/files/continuations-run/trycatch0.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-object Test {
-  
-  def foo = try {
-    shift((k: Int=>Int) => k(7))
-  } catch {
-    case ex =>
-      9
-  }
-
-  def bar = try {
-    7
-  } catch {
-    case ex =>
-    shiftUnit0[Int,Int](9)
-  }
-  
-  def main(args: Array[String]): Unit = {
-    println(reset { foo + 3 })
-    println(reset { bar + 3 })
-  }
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/trycatch1.check b/test/files/continuations-run/trycatch1.check
deleted file mode 100644
index a028d2b..0000000
--- a/test/files/continuations-run/trycatch1.check
+++ /dev/null
@@ -1,4 +0,0 @@
-12
-12
-12
-12
\ No newline at end of file
diff --git a/test/files/continuations-run/trycatch1.scala b/test/files/continuations-run/trycatch1.scala
deleted file mode 100644
index 10dfd30..0000000
--- a/test/files/continuations-run/trycatch1.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-object Test {
-
-  def fatal: Int = throw new Exception()
-  
-  def foo1 = try {
-    fatal
-    shift((k: Int=>Int) => k(7))
-  } catch {
-    case ex =>
-      9
-  }
-
-  def foo2 = try {
-    shift((k: Int=>Int) => k(7))
-    fatal
-  } catch {
-    case ex =>
-      9
-  }
-
-  def bar1 = try {
-    fatal
-    7
-  } catch {
-    case ex =>
-      shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
-  }
-
-  def bar2 = try {
-    7
-    fatal
-  } catch {
-    case ex =>
-      shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
-  }
-
-  def main(args: Array[String]): Unit = {
-    println(reset { foo1 + 3 })
-    println(reset { foo2 + 3 })
-    println(reset { bar1 + 3 })
-    println(reset { bar2 + 3 })
-  }
-
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/while0.check b/test/files/continuations-run/while0.check
deleted file mode 100644
index d58c55a..0000000
--- a/test/files/continuations-run/while0.check
+++ /dev/null
@@ -1 +0,0 @@
-9000
diff --git a/test/files/continuations-run/while0.scala b/test/files/continuations-run/while0.scala
deleted file mode 100644
index 46005a4..0000000
--- a/test/files/continuations-run/while0.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def foo(): Int @cps[Unit] = 2
-  
-  def test(): Unit @cps[Unit] = {
-    var x = 0
-    while (x < 9000) { // pick number large enough to require tail-call opt
-      x += foo()
-    }
-    println(x)
-  }
-
-  def main(args: Array[String]): Any = {
-    reset(test())
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/while1.check b/test/files/continuations-run/while1.check
deleted file mode 100644
index 3d5f0b9..0000000
--- a/test/files/continuations-run/while1.check
+++ /dev/null
@@ -1,11 +0,0 @@
-up
-up
-up
-up
-up
-10
-down
-down
-down
-down
-down
diff --git a/test/files/continuations-run/while1.scala b/test/files/continuations-run/while1.scala
deleted file mode 100644
index fd41ab3..0000000
--- a/test/files/continuations-run/while1.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def foo(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
-  
-  def test(): Unit @cps[Unit] = {
-    var x = 0
-    while (x < 9) {
-      x += foo()
-    }
-    println(x)
-  }
-
-  def main(args: Array[String]): Any = {
-    reset(test())
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/while2.check b/test/files/continuations-run/while2.check
deleted file mode 100644
index 9fe5151..0000000
--- a/test/files/continuations-run/while2.check
+++ /dev/null
@@ -1,19 +0,0 @@
-up
-up
-up
-up
-up
-up
-up
-up
-up
-9000
-down
-down
-down
-down
-down
-down
-down
-down
-down
diff --git a/test/files/continuations-run/while2.scala b/test/files/continuations-run/while2.scala
deleted file mode 100644
index 63f9cb9..0000000
--- a/test/files/continuations-run/while2.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
- 
-  def foo1(): Int @cps[Unit] = 2
-  def foo2(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
-  
-  def test(): Unit @cps[Unit] = {
-    var x = 0
-    while (x < 9000) { // pick number large enough to require tail-call opt
-      x += (if (x % 1000 != 0) foo1() else foo2())
-    }
-    println(x)
-  }
-
-  def main(args: Array[String]): Any = {
-    reset(test())
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/continuations-run/z1673.scala b/test/files/continuations-run/z1673.scala
deleted file mode 100644
index 716b374..0000000
--- a/test/files/continuations-run/z1673.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.util.continuations._
-
-class MatchRepro {
-  def s: String @cps[Any] = shift { k => k("foo") }
-
-  def p = {
-    val k = s
-    s match { case lit0 => }
-  }
-
-  def q = {
-    val k = s
-    k match { case lit1 => }
-  }
-
-  def r = {
-    s match { case "FOO" => }
-  }
-
-  def t = {
-    val k = s
-    k match { case "FOO" => }
-  }
-}
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    val m = new MatchRepro
-    ()
-  }
-}
diff --git a/test/files/detach-neg/det_bar.check b/test/files/detach-neg/det_bar.check
deleted file mode 100644
index 70b4758..0000000
--- a/test/files/detach-neg/det_bar.check
+++ /dev/null
@@ -1,4 +0,0 @@
-det_bar.scala:7: error: detach inapplicable for method bar
-  detach(bar)
-         ^
-one error found
diff --git a/test/files/detach-neg/det_bar.scala b/test/files/detach-neg/det_bar.scala
deleted file mode 100644
index 862afb1..0000000
--- a/test/files/detach-neg/det_bar.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.remoting._
-class A(y: Int) {
-  var z = 2
-  var bar = (x: Int) => x + y + z
-  def foo(x: Int): Int = x + y + z
-  bar = (x: Int) => x * y
-  detach(bar)
-}
-
-object test extends App {
-  val a = new A(1)
-  println(a.bar(2))
-}
diff --git a/test/files/detach-run/actor-run.check b/test/files/detach-run/actor-run.check
deleted file mode 100644
index 9448ddd..0000000
--- a/test/files/detach-run/actor-run.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Server.main 8889
-Client.main 127.0.0.1 8889
-yInstVal = 10
-zLocVal = 1000
-result received: 11111
diff --git a/test/files/detach-run/actor/Client.scala b/test/files/detach-run/actor/Client.scala
deleted file mode 100644
index 12573e2..0000000
--- a/test/files/detach-run/actor/Client.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import scala.actors.Actor._, ClientHelper._
-import scala.actors.remote._, RemoteActor._
-import scala.remoting._, Debug._
-
-object Foo {
-  def trace(msg: String) { info("[Foo.trace] "+msg)}
-}
-object Client {
-  val yInstVal: Int = 10
-  var yInstVar: Int = 99
-  object Bar {
-    def trace(msg: String) { info("[Bar.trace] "+msg) }
-  }
-  def main(args: Array[String]) {
-    init(args)
-    actor {
-      val server = select(Node(host, port), 'Server)
-      val zLocVal: Int = 1000
-      var zLocVar: Int = 9998
-      server ! detach(
-        (x: Int) => {
-          println("yInstVal = "+yInstVal)
-          this.trace("yInstVar = "+yInstVar)
-          Bar.trace("zLocVal = "+zLocVal)
-          Foo.trace("zLocVar = "+zLocVar)
-          zLocVar += 2
-          System.out.println("zLocVal = "+zLocVal)
-          Debug.info("zLocVar = "+zLocVar)
-          x + yInstVal + yInstVar + zLocVal + zLocVar
-        })
-      react {
-        case result: Int =>
-          println("result received: " + result)
-          Predef.exit(0)
-      }
-    }
-  }
-  private def trace(msg: String) { info("[Client.trace] "+msg) }
-}
-
-object ClientHelper {
-  private var _host = "127.0.0.1"
-  private var _port = 8888
-  def host = _host
-  def port = _port
-  def init(args: Array[String]) {
-    try { _host = args(0) } catch { case _ => }
-    try { _port = args(1).toInt } catch { case _ => }
-  }
-}
diff --git a/test/files/detach-run/actor/Server.scala b/test/files/detach-run/actor/Server.scala
deleted file mode 100644
index b56d22f..0000000
--- a/test/files/detach-run/actor/Server.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import scala.actors.Actor._
-import scala.actors.remote.RemoteActor._
-
-object Server extends ServerConsole {
-  private def computation(f: Int => Int): Int = {
-    //some time-consuming task
-    f(2)
-  }
-  def main(args: Array[String]) {
-    actor {
-      classLoader = serverClassLoader
-      alive(args(0).toInt)
-      register('Server, self)
-      loopWhile(isRunning) {
-        react {
-          case f: (Int => Int) =>
-            val result = computation(f)
-            sender ! result
-        }
-      }
-    }
-  }
-}
diff --git a/test/files/detach-run/actor/ServerConsole.scala b/test/files/detach-run/actor/ServerConsole.scala
deleted file mode 100644
index 8ebd9d4..0000000
--- a/test/files/detach-run/actor/ServerConsole.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import java.io.{BufferedReader, InputStreamReader}
-
-import scala.compat.Platform.currentTime
-import scala.remoting.Debug, Debug._
-
-trait ServerConsole extends Thread {
-  private val startTime = currentTime
-  actors.Debug.level = // e.g. 3 // info+warning+error
-    try { System.getProperty("scala.actors.logLevel", "0").toInt }
-    catch { case e => 0 }
-
-  start()
-
-  val serverClassLoader = {
-    import java.rmi.server.RMIClassLoader
-    val codebase = System.getProperty("java.rmi.server.codebase")
-    info("[ServerConsole] codebase="+codebase)
-    RMIClassLoader getClassLoader codebase
-  }
-
-  private var isTerminated = false
-
-  def terminate() { isTerminated = false }
-
-  def isRunning = !isTerminated
-
-  override def run() {
-    val in = new BufferedReader(new InputStreamReader(System.in))
-    var quit = false
-    while (!quit) {
-      val args = getArgs(in)
-      if (args contains "quit")
-        quit = true
-      if (args contains "cls") {
-        println(ERASE_SCREEN)
-        println(CURSOR_HOME)
-      }
-      if (args contains "warning")
-        Debug.level = Level.WARNING
-      if (args contains "info")
-        Debug.level = Level.INFO
-      if (args contains "silent")
-        Debug.level = Level.SILENT
-    }
-    terminate()
-    println("Server exited ("+mkTimeString(currentTime - startTime)+")")
-    sys.exit(0)
-  }
-
-  protected def trace(msg: String) {
-    Debug.info("[ServerConsole.trace] "+msg)
-  }
-
-  private def getArgs(in: BufferedReader): List[String] = {
-    val input = try { in.readLine() } catch { case _ => null }
-    if (input != null) (input.trim split "\\s+").toList else Nil
-  }
-
-  private def mkTimeString(time: Long): String = {
-    def twoDigits(i: Long) = (if (i < 10) "0" else "")+i
-    val sec = time / 1000
-    val min = sec / 60
-    val h = min / 60
-    twoDigits(h) +":"+
-    twoDigits(min - h * 60)+":"+
-    twoDigits(sec - min * 60)
-  }
-
-  private val ERASE_SCREEN = "\033[2J"
-  private val CURSOR_HOME  = "\033[H"
-}
diff --git a/test/files/detach-run/actor/actor.flags b/test/files/detach-run/actor/actor.flags
deleted file mode 100644
index 55eed8b..0000000
--- a/test/files/detach-run/actor/actor.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable
diff --git a/test/files/detach-run/actor/actor.scala b/test/files/detach-run/actor/actor.scala
deleted file mode 100644
index 23a10d6..0000000
--- a/test/files/detach-run/actor/actor.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-object Test {
-
-  val name = "actor"
-  val host = "127.0.0.1"
-  val port = 8889
-
-  def main(args: Array[String]) {
-    setenv()
-    println("Server.main "+port)
-    Server.main(Array(port.toString))
-    println("Client.main "+host+" "+port)
-    Client.main(Array(host, port.toString))
-    Server.terminate()
-  }
-
-  private def setenv() {
-    import Env._
-
-    // Java properties for server & client
-    System.setProperty("scala.actors.logLevel", actors_logLevel)
-    System.setProperty("scala.remoting.logLevel", logLevel)
-    System.setProperty("java.security.manager", "")
-    System.setProperty("java.security.policy", policyFile)
-    // Java properties for server only
-    System.setProperty("java.rmi.server.codebase", deployUrl)
-    System.setProperty("java.rmi.server.hostname", host)
-    System.setProperty("java.rmi.server.useCodebaseOnly", "true")
-
-    // application-specific classes to be deployed and accessed via URL
-    // (i.e. detached closure, proxy interfaces and proxy stubs)
-    val classNames = List(
-      "$anonfun$main$1$proxy",
-      "$anonfun$main$1$proxyImpl_Stub",
-      "Bar$proxy",
-      "Bar$proxyImpl_Stub",
-      "Client$$anonfun$main$1$$anonfun$apply$1$detach",
-      "Client$proxy",
-      "Client$proxyImpl_Stub",
-      "Foo$proxy",
-      "Foo$proxyImpl_Stub")
-
-    val proxyImplNames =
-      for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0)
-      yield n.substring(0, i)
-
-    generatePolicyFile()
-    generateRmiStubs(proxyImplNames)
-    generateJarFile(classNames)
-  }
-}
-
-object Env {
-  import java.io._, java.util.jar._
-
-  val actors_logLevel = "0"
-                   // = "3" // info+warning+error
-  val logLevel = "silent"
-            // = "info"     // debug user code only
-            // = "info,lib" // debug user & library code
-
-  // we assume an Apache server is running locally for deployment
-  private val sep = File.separator
-  val docPath = System.getProperty("user.home")+sep+"public_html"
-  val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name")
-
-  private val policyTmpl =
-    System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy"
-  val outPath = System.getProperty("partest.output")
-  val libPath = System.getProperty("partest.lib")
-  val policyFile = outPath+sep+"java.policy"
-  val codebaseDir = outPath+sep+"-"
-
-  assert((new File(docPath)).isDirectory,
-         "Root directory \""+docPath+"\" not found")
-  val deployJar = docPath+sep+Test.name+"_deploy.jar"
-  val deployUrl = docRoot+"/"+Test.name+"_deploy.jar"
-
-  def generatePolicyFile() {
-    val in = new BufferedReader(new FileReader(policyTmpl))
-    val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile)))
-    var line = in.readLine()
-    while (line != null) {
-      val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir)
-      out.println(line1)
-      line = in.readLine()
-    }
-    in.close()
-    out.close()
-  }
-
-  def generateRmiStubs(classNames: List[String]) {
-    val options = List(
-      "-v1.2",
-      "-classpath "+libPath+File.pathSeparator+outPath,
-      "-d "+outPath)
-    rmic(options, classNames)
-    //ls(outPath)
-  }
-
-  def generateJarFile(classNames: List[String]) {
-    val out = new JarOutputStream(new FileOutputStream(deployJar))
-    classNames foreach (name => try {
-      val classFile = name+".class"
-      val in = new FileInputStream(outPath+sep+classFile)
-      out putNextEntry new JarEntry(classFile)
-      val buf = new Array[Byte](512)
-      var len = in read buf
-      while (len != -1) {
-        out.write(buf, 0, len)
-        len = in read buf
-      }
-      in.close()
-    } catch {
-      case e: FileNotFoundException => println(e)   
-    })
-    out.close()
-  }
-
-  private def ls(path: String) { exec("ls -al "+path) }
-
-  private def rmic(options: List[String], classNames: List[String]) {
-    val javaHome = scala.util.Properties.javaHome
-    val jdkHome =
-      if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4)
-      else javaHome
-    val rmicExt = if (scala.util.Properties.isWin) ".exe" else ""
-    val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt
-    val cmdLine = rmicCmd+options.mkString(" ", " ", "")+
-                          classNames.mkString(" "," ","")
-    // println(cmdLine)
-    exec(cmdLine)
-  }
-
-  private def exec(command: String) {
-    val proc = Runtime.getRuntime exec command
-    proc.waitFor()
-    val out = new BufferedReader(new InputStreamReader(proc.getInputStream))
-    var line = out.readLine()
-    while (line != null) {
-      println(line)
-      line = out.readLine()
-    }
-    out.close()
-    val err = new BufferedReader(new InputStreamReader(proc.getErrorStream))
-    line = err.readLine()
-    while (line != null) {
-      println(line)
-      line = err.readLine()
-    }
-    err.close()
-  }
-}
-
diff --git a/test/files/detach-run/actor/java.policy b/test/files/detach-run/actor/java.policy
deleted file mode 100644
index 4beb2ca..0000000
--- a/test/files/detach-run/actor/java.policy
+++ /dev/null
@@ -1,25 +0,0 @@
-// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html
-// See http://mindprod.com/jgloss/policyfile.html
-// The policy expands ${/} to the correct path or folder delimiter on your host platform.
-
-// Actions available with SocketPermission: accept, connect, listen, resolve
-// 1) The "resolve" action is implied when any of the other actions are present.
-// 2) The "listen" action is only meaningful when used with "localhost".
-
-grant {
-    permission java.net.SocketPermission "*:80", "connect,accept,listen";
-    permission java.net.SocketPermission "*:1024-", "connect,accept,listen";
-    permission java.util.PropertyPermission "scala.remoting.logLevel", "read";
-    permission java.util.PropertyPermission "scala.remoting.port", "read";
-};
-
-grant codeBase "@PROJECT_LIB_BASE@" {
-    permission java.lang.RuntimePermission "getClassLoader";
-    permission java.util.PropertyPermission "java.rmi.server.codebase", "read";
-    permission java.util.PropertyPermission "java.rmi.server.hostname", "read";
-    permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write";
-};
-
-//grant {
-//    permission java.security.AllPermission;
-//};
diff --git a/test/files/detach-run/basic-run.check b/test/files/detach-run/basic-run.check
deleted file mode 100644
index 6463d97..0000000
--- a/test/files/detach-run/basic-run.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Server.main 8889
-> Client.main 127.0.0.1 8889
-yInstVal = 10
-zLocVal = 1000
-result received: 11111
diff --git a/test/files/detach-run/basic/Client.scala b/test/files/detach-run/basic/Client.scala
deleted file mode 100644
index f8eddb0..0000000
--- a/test/files/detach-run/basic/Client.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import java.net._, Thread._, ClientHelper._
-import scala.remoting._, Debug._
-
-object Foo {
-  def trace(s: String) { info("[Foo.trace] "+s)}
-}
-object Client {
-  val yInstVal: Int = 10
-  var yInstVar: Int = 99
-  object Bar {
-    def trace(s: String) { info("[Bar.trace] "+s) }
-  }
-  def main(args: Array[String]) {
-    init(args)
-    val server = new Channel(host, port)
-    val zLocVal: Int = 1000
-    var zLocVar: Int = 9998
-    server ! detach(
-      (x: Int) => {
-        println("yInstVal = "+yInstVal)
-        this.trace("yInstVar = "+yInstVar)
-        Bar.trace("zLocVal = "+zLocVal)
-        Foo.trace("zLocVar = "+zLocVar)
-        zLocVar += 2
-        System.out.println("zLocVal = "+zLocVal)
-        Debug.info("zLocVar = "+zLocVar)
-        x + yInstVal + yInstVar + zLocVal + zLocVar
-      })
-    val result = server.receiveInt
-    println("result received: " + result)
-  }
-  private def trace(s: String) { info("[Client.trace] "+s) }
-}
-
-object ClientHelper {
-  private var _host = "127.0.0.1"
-  private var _port = 8888
-  def host = _host
-  def port = _port
-  def init(args: Array[String]) {
-    try { _host = args(0) } catch { case _ => }
-    try { _port = args(1).toInt } catch { case _ => }
-  }
-}
diff --git a/test/files/detach-run/basic/Server.scala b/test/files/detach-run/basic/Server.scala
deleted file mode 100644
index f8aa02a..0000000
--- a/test/files/detach-run/basic/Server.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import scala.remoting.ServerChannel
-
-object Server extends ServerConsole {
-  private def computation(f: Int => Int): Int = {
-    //some time-consuming task
-    f(2)
-  }
-  def main(args: Array[String]) {
-    val server = new ServerChannel(args(0).toInt)
-    loop {
-      val client = server.accept
-      val f = client.receive[Int => Int]
-      val result = computation(f)
-      client ! result
-    }
-    server.close()
-  }
-}
diff --git a/test/files/detach-run/basic/ServerConsole.scala b/test/files/detach-run/basic/ServerConsole.scala
deleted file mode 100644
index 65b81c0..0000000
--- a/test/files/detach-run/basic/ServerConsole.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-import java.io._
-
-import scala.compat.Platform.currentTime
-import scala.remoting.Debug, Debug._
-
-trait ServerConsole extends Thread {
-  private val startTime = currentTime
-
-  start()
-
-  private var isTerminated = false
-
-  def terminate() { isTerminated = true }
-
-  protected def loop(block: => Unit) {
-    while (!isTerminated) {
-      try {
-        block
-      }
-      catch {
-        case e: ObjectStreamException =>
-          trace("Object stream error ("+e.getMessage+")")
-        case e: EOFException =>
-          trace("Connection lost")
-        case e: ClassNotFoundException =>
-          trace("Class not found")
-        case e =>
-          trace("Server error: "+e)
-      }
-    }
-  }
-
-  override def run() {
-    val in = new BufferedReader(new InputStreamReader(System.in))
-    var quit = false
-    while (!quit) {
-      val args = getArgs(in)
-      if (args contains "quit")
-        quit = true
-      if (args contains "cls") {
-        println(ERASE_SCREEN)
-        println(CURSOR_HOME)
-      }
-      if (args contains "warning")
-        Debug.level = Level.WARNING
-      if (args contains "info")
-        Debug.level = Level.INFO
-      if (args contains "silent")
-        Debug.level = Level.SILENT
-    }
-    terminate()
-    println("Server exited ("+mkTimeString(currentTime - startTime)+")")
-    exit(0)
-
-  }
-
-  protected def trace(msg: String) {
-    Debug.info("[ServerConsole.trace] "+msg)
-  }
-
-  private def getArgs(in: BufferedReader): List[String] = {
-    print("> ")
-    val input = try { in.readLine() } catch { case _ => null }
-    if (input != null) (input.trim split "\\s+").toList else Nil
-  }
-
-  private def mkTimeString(time: Long): String = {
-    def twoDigits(i: Long) = (if (i < 10) "0" else "")+i
-    val sec = time / 1000
-    val min = sec / 60
-    val h = min / 60
-    twoDigits(h) +":"+
-    twoDigits(min - h * 60)+":"+
-    twoDigits(sec - min * 60)
-  }
-
-  private val ERASE_SCREEN = "\033[2J"
-  private val CURSOR_HOME  = "\033[H"
-}
diff --git a/test/files/detach-run/basic/basic.flags b/test/files/detach-run/basic/basic.flags
deleted file mode 100644
index 55eed8b..0000000
--- a/test/files/detach-run/basic/basic.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable
diff --git a/test/files/detach-run/basic/basic.scala b/test/files/detach-run/basic/basic.scala
deleted file mode 100644
index 4d0fc2d..0000000
--- a/test/files/detach-run/basic/basic.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- *  @author Stephane Micheloud
- */
-
-object Test {
-
-  val name = "basic"
-  val host = "127.0.0.1"
-  val port = 8889
-
-  def main(args: Array[String]) {
-    setenv()
-    println("Server.main "+port)
-    server.start()
-    println("Client.main "+host+" "+port)
-    client.start()
-    server.terminate()
-  }
-
-  private var server = new ServerThread(port)
-  private var client = new ClientThread(host, port)
-
-  private class ServerThread(port: Int) extends Runnable {
-    private var th = new Thread(this)
-    def start() { th.start(); Thread.sleep(1000) }
-    def run() { Server.main(Array(port.toString)) }
-    def terminate() { Server.terminate(); sys.exit(0) }
-  }
-
-  private class ClientThread(host: String, port: Int) extends Runnable {
-    private var th = new Thread(this)
-    def start() { th.start(); th.join() }
-    def run() { Client.main(Array(host, port.toString)) }
-  }
-
-  private def setenv() {
-    import Env._
-
-    // Java properties for server & client
-    System.setProperty("scala.remoting.logLevel", logLevel)
-    System.setProperty("java.security.manager", "")
-    System.setProperty("java.security.policy", policyFile)
-    // Java properties for server only
-    System.setProperty("java.rmi.server.codebase", deployUrl)
-    System.setProperty("java.rmi.server.hostname", host)
-    System.setProperty("java.rmi.server.useCodebaseOnly", "true")
-
-    // application-secific classes to be deployed and accessed via URL
-    // (i.e. detached closure, proxy interfaces and proxy stubs)
-    val classNames = List(
-      "Bar$proxy",
-      "Bar$proxyImpl_Stub",
-      "Client$$anonfun$main$1$detach",
-      "Client$proxy",
-      "Client$proxyImpl_Stub",
-      "Foo$proxy",
-      "Foo$proxyImpl_Stub")
-
-    val proxyImplNames =
-      for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0)
-      yield n.substring(0, i)
-
-    generatePolicyFile()
-    generateRmiStubs(proxyImplNames)
-    generateJarFile(classNames)
-  }
-}
-
-object Env {
-  import java.io._, java.util.jar._
-
-  val actors_logLevel = "0"
-                   // = "3" // info+warning+error
-  val logLevel = "silent"
-            // = "info"     // debug user code only
-            // = "info,lib" // debug user & library code
-
-  // we assume an Apache server is running locally for deployment
-  private val sep = File.separator
-  val docPath = System.getProperty("user.home")+sep+"public_html"
-  val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name")
-
-  private val policyTmpl =
-    System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy"
-  val outPath = System.getProperty("partest.output")
-  val libPath = System.getProperty("partest.lib")
-  val policyFile = outPath+sep+"java.policy"
-  val codebaseDir = outPath+sep+"-"
-
-  assert((new File(docPath)).isDirectory,
-         "Root directory \""+docPath+"\" not found")
-  val deployJar = docPath+sep+Test.name+"_deploy.jar"
-  val deployUrl = docRoot+"/"+Test.name+"_deploy.jar"
-
-  def generatePolicyFile() {
-    val in = new BufferedReader(new FileReader(policyTmpl))
-    val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile)))
-    var line = in.readLine()
-    while (line != null) {
-      val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir)
-      out.println(line1)
-      line = in.readLine()
-    }
-    in.close()
-    out.close()
-  }
-
-  def generateRmiStubs(classNames: List[String]) {
-    val options = List(
-      "-v1.2",
-      "-classpath "+libPath+File.pathSeparator+outPath,
-      "-d "+outPath)
-    rmic(options, classNames)
-    //ls(outPath)
-  }
-
-  def generateJarFile(classNames: List[String]) {
-    val out = new JarOutputStream(new FileOutputStream(deployJar))
-    classNames foreach (name => try {
-      val classFile = name+".class"
-      val in = new FileInputStream(outPath+sep+classFile)
-      out putNextEntry new JarEntry(classFile)
-      val buf = new Array[Byte](512)
-      var len = in read buf
-      while (len != -1) {
-        out.write(buf, 0, len)
-        len = in read buf
-      }
-      in.close()
-    } catch {
-      case e: FileNotFoundException => println(e)
-    })
-    out.close()
-  }
-
-  private def ls(path: String) { exec("ls -al "+path) }
-
-  private def rmic(options: List[String], classNames: List[String]) {
-    val javaHome = scala.util.Properties.javaHome
-    val jdkHome =
-      if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4)
-      else javaHome
-    val rmicExt = if (scala.util.Properties.isWin) ".exe" else ""
-    val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt
-    val cmdLine = rmicCmd+options.mkString(" ", " ", "")+
-                          classNames.mkString(" "," ","")
-    // println(cmdLine)
-    exec(cmdLine)
-  }
-
-  private def exec(command: String) {
-    val proc = Runtime.getRuntime exec command
-    proc.waitFor()
-    val out = new BufferedReader(new InputStreamReader(proc.getInputStream))
-    var line = out.readLine()
-    while (line != null) {
-      println(line)
-      line = out.readLine()
-    }
-    out.close()
-    val err = new BufferedReader(new InputStreamReader(proc.getErrorStream))
-    line = err.readLine()
-    while (line != null) {
-      println(line)
-      line = err.readLine()
-    }
-    err.close()
-  }
-}
diff --git a/test/files/detach-run/basic/java.policy b/test/files/detach-run/basic/java.policy
deleted file mode 100644
index 92c1045..0000000
--- a/test/files/detach-run/basic/java.policy
+++ /dev/null
@@ -1,26 +0,0 @@
-// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html
-// See http://mindprod.com/jgloss/policyfile.html
-// The policy expands ${/} to the correct path or folder delimiter on your host platform.
-
-// Actions available with SocketPermission: accept, connect, listen, resolve
-// 1) The "resolve" action is implied when any of the other actions are present.
-// 2) The "listen" action is only meaningful when used with "localhost".
-
-grant {
-    permission java.net.SocketPermission "*:80", "connect,accept,listen";
-    permission java.net.SocketPermission "*:1024-", "connect,accept,listen";
-    permission java.util.PropertyPermission "scala.remoting.logLevel", "read";
-    permission java.util.PropertyPermission "scala.remoting.port", "read";
-};
-
-grant codeBase "@PROJECT_LIB_BASE@" {
-    permission java.lang.RuntimePermission "getClassLoader";
-    permission java.lang.RuntimePermission "createClassLoader";
-    permission java.util.PropertyPermission "java.rmi.server.codebase", "read";
-    permission java.util.PropertyPermission "java.rmi.server.hostname", "read";
-    permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write";
-};
-
-//grant {
-//    permission java.security.AllPermission;
-//};
diff --git a/test/files/disabled/run/t4602.scala b/test/files/disabled/run/t4602.scala
deleted file mode 100644
index 73ba231..0000000
--- a/test/files/disabled/run/t4602.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
-import tools.nsc.{CompileClient, CompileServer}
-import java.util.concurrent.{CountDownLatch, TimeUnit}
-
-object Test extends App {
-  val startupLatch = new CountDownLatch(1)
-  // we have to explicitly launch our server because when the client launches a server it uses 
-  // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
-  // happens to be in the path gets used
-  val t = new Thread(new Runnable {
-    def run() = {
-      CompileServer.execute(() => startupLatch.countDown(), Array[String]())
-    }
-  })
-  t setDaemon true
-  t.start()
-  if (!startupLatch.await(2, TimeUnit.MINUTES))
-    sys error "Timeout waiting for server to start"
-
-  val baos = new ByteArrayOutputStream()
-  val ps = new PrintStream(baos) 
-
-  val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
-  
-  val dirNameAndPath = (1 to 2).toList map {number =>
-    val name = s"Hello${number}"
-    val dir = outdir / number.toString
-    (dir, name, dir / s"${name}.scala")
-  }
-
-  dirNameAndPath foreach {case (dir, name, path) =>
-    dir.createDirectory()
-    val file = path.jfile
-    val out = new FileWriter(file)
-    try 
-      out.write(s"object ${name}\n")
-    finally 
-      out.close
-  }
-
-  val success = (scala.Console withOut ps) {
-    dirNameAndPath foreach {case (path, name, _) =>
-      CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
-    }
-
-    CompileClient.process(Array("-shutdown"))
-  }
-
-  // now make sure we got success and the correct normalized paths
-  val msg = baos.toString()
-
-  assert(success, s"got a failure. Full results were: \n${msg}")
-  dirNameAndPath foreach {case (_, _, path) => 
-    val expected = s"Input files after normalizing paths: ${path}"
-    assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
-  }
-}
diff --git a/test/files/disabled/t7020.check b/test/files/disabled/t7020.check
deleted file mode 100644
index a869b12..0000000
--- a/test/files/disabled/t7020.check
+++ /dev/null
@@ -1,17 +0,0 @@
-t7020.scala:3: error: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
-  List(5) match {
-      ^
-t7020.scala:10: error: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
-  List(5) match {
-      ^
-t7020.scala:17: error: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
-  List(5) match {
-      ^
-t7020.scala:24: error: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
-  List(5) match {
-      ^
-four errors found
diff --git a/test/files/filters b/test/files/filters
new file mode 100644
index 0000000..51a7507
--- /dev/null
+++ b/test/files/filters
@@ -0,0 +1,8 @@
+#
+#Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28).
+Java HotSpot\(TM\) .* warning:
+# Hotspot receiving VM options through the $_JAVA_OPTIONS
+# env variable outputs them on stderr
+Picked up _JAVA_OPTIONS:
+# Filter out a message caused by this bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=8021205
+objc\[\d+\]: Class JavaLaunchHelper is implemented in both .* and .*\. One of the two will be used\. Which one is undefined\.
diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check
index f0f4475..c82d16b 100644
--- a/test/files/instrumented/InstrumentationTest.check
+++ b/test/files/instrumented/InstrumentationTest.check
@@ -1,8 +1,14 @@
+#partest !avian
 true
 Method call statistics:
     1  Foo1.<init>()V
     1  Foo1.someMethod()I
     1  instrumented/Foo2.<init>()V
     1  instrumented/Foo2.someMethod()I
+    1  scala/DeprecatedConsole.<init>()V
     1  scala/Predef$.println(Ljava/lang/Object;)V
+    1  scala/io/AnsiColor$class.$init$(Lscala/io/AnsiColor;)V
     1  scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;
+#partest avian
+!!!TEST SKIPPED!!!
+Instrumentation is not supported on Avian.
diff --git a/test/files/instrumented/InstrumentationTest.scala b/test/files/instrumented/InstrumentationTest.scala
index 0e53f80..458fd49 100644
--- a/test/files/instrumented/InstrumentationTest.scala
+++ b/test/files/instrumented/InstrumentationTest.scala
@@ -15,16 +15,21 @@ package instrumented {
 /** Tests if instrumentation itself works correctly */
 object Test {
   def main(args: Array[String]) {
-    // force predef initialization before profiling
-    Predef
-    startProfiling()
-    val foo1 = new Foo1
-    foo1.someMethod
-    val foo2 = new instrumented.Foo2
-    foo2.someMethod
-    // should box the boolean
-    println(true)
-    stopProfiling()
-    printStatistics()
+    if (scala.tools.partest.utils.Properties.isAvian) {
+      println("!!!TEST SKIPPED!!!")
+      println("Instrumentation is not supported on Avian.")
+    } else {
+      // force predef initialization before profiling
+      Predef
+      startProfiling()
+      val foo1 = new Foo1
+      foo1.someMethod
+      val foo2 = new instrumented.Foo2
+      foo2.someMethod
+      // should box the boolean
+      println(true)
+      stopProfiling()
+      printStatistics()
+    }
   }
 }
diff --git a/test/files/instrumented/inline-in-constructors.check b/test/files/instrumented/inline-in-constructors.check
index c6c9ae4..b58c1d7 100644
--- a/test/files/instrumented/inline-in-constructors.check
+++ b/test/files/instrumented/inline-in-constructors.check
@@ -1,3 +1,7 @@
+#partest !avian
 Method call statistics:
     1  instrumented/Bar.<init>(Z)V
     1  instrumented/Foo.<init>(I)V
+#partest avian
+!!!TEST SKIPPED!!!
+Instrumentation is not supported on Avian.
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags
index c9b68d7..068318e 100644
--- a/test/files/instrumented/inline-in-constructors.flags
+++ b/test/files/instrumented/inline-in-constructors.flags
@@ -1 +1 @@
--optimise
+-optimise -Ydelambdafy:inline
diff --git a/test/files/instrumented/inline-in-constructors/test_3.scala b/test/files/instrumented/inline-in-constructors/test_3.scala
index c4d4cc5..949e994 100644
--- a/test/files/instrumented/inline-in-constructors/test_3.scala
+++ b/test/files/instrumented/inline-in-constructors/test_3.scala
@@ -3,13 +3,18 @@ import instrumented._
 
 object Test {
   def main(args: Array[String]) {
-    // force predef initialization before profiling
-    Predef
-    MyPredef
-    startProfiling()
-    val a = new Foo(2)
-    val b = new Bar(true)
-    stopProfiling()
-    printStatistics()
+    if (scala.tools.partest.utils.Properties.isAvian) {
+      println("!!!TEST SKIPPED!!!")
+      println("Instrumentation is not supported on Avian.")
+    } else {
+      // force predef initialization before profiling
+      Predef
+      MyPredef
+      startProfiling()
+      val a = new Foo(2)
+      val b = new Bar(true)
+      stopProfiling()
+      printStatistics()
+    }
   }
 }
diff --git a/test/files/jvm/actor-exceptions.scala b/test/files/jvm/actor-exceptions.scala
index 3ee4db9..bdd983a 100644
--- a/test/files/jvm/actor-exceptions.scala
+++ b/test/files/jvm/actor-exceptions.scala
@@ -1,4 +1,7 @@
 
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, Exit}
 import Actor._
 
@@ -58,7 +61,6 @@ object Slave extends Actor {
 
 case object A
 
-object Test {
   def main(args: Array[String]) {
     Master.start()
   }
diff --git a/test/files/jvm/actor-executor.scala b/test/files/jvm/actor-executor.scala
index b1f9cae..0fc28b4 100644
--- a/test/files/jvm/actor-executor.scala
+++ b/test/files/jvm/actor-executor.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import java.util.concurrent.Executors
 import scala.actors.{Actor, SchedulerAdapter}
 import Actor._
@@ -50,7 +54,6 @@ object Two extends AdaptedActor {
   }
 }
 
-object Test {
   val executor =
     Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
 
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
index f8fcaef..5badf2a 100644
--- a/test/files/jvm/actor-executor2.scala
+++ b/test/files/jvm/actor-executor2.scala
@@ -1,3 +1,8 @@
+
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, SchedulerAdapter, Exit}
 import Actor._
 import java.util.concurrent.{Executors, RejectedExecutionException}
@@ -48,7 +53,6 @@ trait AdaptedActor extends Actor {
     Test.scheduler
 }
 
-object Test {
   val NUM_MSG = 100000
 
   val executor =
diff --git a/test/files/jvm/actor-executor3.scala b/test/files/jvm/actor-executor3.scala
index 4fde2c6..f8b57d8 100644
--- a/test/files/jvm/actor-executor3.scala
+++ b/test/files/jvm/actor-executor3.scala
@@ -1,3 +1,8 @@
+
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.Actor
 import scala.actors.scheduler.ExecutorScheduler
 import java.util.concurrent.Executors
@@ -48,7 +53,6 @@ trait AdaptedActor extends Actor {
     Test.scheduler
 }
 
-object Test {
   val NUM_MSG = 100000
 
   val executor =
diff --git a/test/files/jvm/actor-getstate.scala b/test/files/jvm/actor-getstate.scala
index a6e15a8..425efbe 100644
--- a/test/files/jvm/actor-getstate.scala
+++ b/test/files/jvm/actor-getstate.scala
@@ -1,7 +1,9 @@
-import scala.actors.{Reactor, Actor, TIMEOUT}
-import Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.{Reactor, Actor, TIMEOUT}
+  import Actor._
 
   def assert(cond: => Boolean, hint: String) {
     if (!cond)
diff --git a/test/files/jvm/actor-link-getstate.scala b/test/files/jvm/actor-link-getstate.scala
index c24daf2..d8b8ada 100644
--- a/test/files/jvm/actor-link-getstate.scala
+++ b/test/files/jvm/actor-link-getstate.scala
@@ -1,5 +1,9 @@
-import scala.actors.{Actor, Exit}
-import scala.actors.Actor._
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
+  import scala.actors.{Actor, Exit}
+  import scala.actors.Actor._
 
 case class MyException(text: String) extends Exception(text) {
   override def fillInStackTrace() = this
@@ -39,7 +43,6 @@ object Master extends Actor {
   }
 }
 
-object Test {
 
   def main(args: Array[String]) {
     actor {
diff --git a/test/files/jvm/actor-looping.scala b/test/files/jvm/actor-looping.scala
index 475d475..7bc6f1e 100644
--- a/test/files/jvm/actor-looping.scala
+++ b/test/files/jvm/actor-looping.scala
@@ -1,7 +1,8 @@
 
-import scala.actors.Actor._
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Actor._
   case object A
 
   def main(args: Array[String]) {
diff --git a/test/files/jvm/actor-normal-exit.scala b/test/files/jvm/actor-normal-exit.scala
index 20863d5..9049586 100644
--- a/test/files/jvm/actor-normal-exit.scala
+++ b/test/files/jvm/actor-normal-exit.scala
@@ -1,7 +1,9 @@
 
-import scala.actors.{Actor, Exit}
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.{Actor, Exit}
   object Master extends Actor {
     trapExit = true
     def act() {
diff --git a/test/files/jvm/actor-receivewithin.scala b/test/files/jvm/actor-receivewithin.scala
index a5c87c2..5982462 100644
--- a/test/files/jvm/actor-receivewithin.scala
+++ b/test/files/jvm/actor-receivewithin.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, TIMEOUT}
 
 object A extends Actor {
@@ -62,7 +66,6 @@ object B extends Actor {
   }
 }
 
-object Test {
   def main(args:Array[String]) {
     B.start()
   }
diff --git a/test/files/jvm/actor-sync-send-timeout.scala b/test/files/jvm/actor-sync-send-timeout.scala
index 21e624b..66a0b0a 100644
--- a/test/files/jvm/actor-sync-send-timeout.scala
+++ b/test/files/jvm/actor-sync-send-timeout.scala
@@ -1,10 +1,11 @@
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.Actor
 
 /* This test is a regression test for SI-4759.
  */
-object Test { 
   val Runs = 5
-  
+
   def main(args: Array[String]) = {
     var i = 0
     while (i < Runs) {
@@ -14,7 +15,6 @@ object Test {
     }
     //println("done sending to A1")
   }
-}
 
 object A2 extends Actor {
   this.start()
@@ -45,3 +45,4 @@ object A1 extends Actor {
     }
   }
 }
+}
diff --git a/test/files/jvm/actor-termination.scala b/test/files/jvm/actor-termination.scala
index d8e44a2..4a6bf92 100644
--- a/test/files/jvm/actor-termination.scala
+++ b/test/files/jvm/actor-termination.scala
@@ -1,8 +1,9 @@
-import scala.actors.Actor
 
 /* Test that an actor that hasn't finished prevents termination */
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Actor
   def main(args: Array[String]) {
     Actor.actor {
       try {
diff --git a/test/files/jvm/actor-uncaught-exception.scala b/test/files/jvm/actor-uncaught-exception.scala
index 5ae66de..c28ad2f 100644
--- a/test/files/jvm/actor-uncaught-exception.scala
+++ b/test/files/jvm/actor-uncaught-exception.scala
@@ -1,63 +1,64 @@
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, Exit}
 
 class MyException(msg: String) extends Exception(msg) {
   override def fillInStackTrace() = this
 }
 
-object Test { 
 
-  case object StartError extends Actor { 
-    def act() { 
+  case object StartError extends Actor {
+    def act() {
       try {
-      throw new MyException("I don't want to run!") 
+      throw new MyException("I don't want to run!")
       } catch {
         case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
                               !e.isInstanceOf[MyException]) =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  case object MessageError extends Actor { 
-    def act() { 
+  case object MessageError extends Actor {
+    def act() {
       try {
-      react { 
-        case _ => throw new MyException("No message for me!") 
-      } 
+      react {
+        case _ => throw new MyException("No message for me!")
+      }
       } catch {
         case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  case object Supervisor extends Actor { 
-    def act() { 
+  case object Supervisor extends Actor {
+    def act() {
       try {
-      trapExit = true 
+      trapExit = true
       link(StartError)
       link(MessageError)
-      StartError.start() 
+      StartError.start()
       MessageError.start()
 
-      Actor.loop { 
-        react { 
+      Actor.loop {
+        react {
           case Exit(actor, reason) =>
             println("OK")
             if (actor == StartError)
               MessageError ! 'ping
             else
               exit()
-        } 
-      } 
+        }
+      }
       } catch {
         case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  def main(args: Array[String]) { 
-    Supervisor.start() 
-  } 
-} 
+  def main(args: Array[String]) {
+    Supervisor.start()
+  }
+}
diff --git a/test/files/jvm/actor-uncaught-exception2.check b/test/files/jvm/actor-uncaught-exception2.check
index 870a5d3..a54f374 100644
--- a/test/files/jvm/actor-uncaught-exception2.check
+++ b/test/files/jvm/actor-uncaught-exception2.check
@@ -1,2 +1,2 @@
-UncaughtException(StartError,None,None,MyException: I don't want to run!)
-UncaughtException(MessageError,Some('ping),Some(Supervisor),MyException: No message for me!)
+UncaughtException(StartError,None,None,Test$MyException: I don't want to run!)
+UncaughtException(MessageError,Some('ping),Some(Supervisor),Test$MyException: No message for me!)
diff --git a/test/files/jvm/actor-uncaught-exception2.scala b/test/files/jvm/actor-uncaught-exception2.scala
index 0364cbe..8327b4e 100644
--- a/test/files/jvm/actor-uncaught-exception2.scala
+++ b/test/files/jvm/actor-uncaught-exception2.scala
@@ -1,63 +1,63 @@
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, Exit, Debug}
 
 class MyException(msg: String) extends Exception(msg) {
   override def fillInStackTrace() = this
 }
 
-object Test { 
-
-  case object StartError extends Actor { 
-    def act() { 
+  case object StartError extends Actor {
+    def act() {
       try {
-      throw new MyException("I don't want to run!") 
+      throw new MyException("I don't want to run!")
       } catch {
         case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
                               !e.isInstanceOf[MyException]) =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  case object MessageError extends Actor { 
-    def act() { 
+  case object MessageError extends Actor {
+    def act() {
       try {
-      react { 
-        case _ => throw new MyException("No message for me!") 
-      } 
+      react {
+        case _ => throw new MyException("No message for me!")
+      }
       } catch {
         case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  case object Supervisor extends Actor { 
-    def act() { 
+  case object Supervisor extends Actor {
+    def act() {
       try {
-      trapExit = true 
+      trapExit = true
       link(StartError)
       link(MessageError)
-      StartError.start() 
+      StartError.start()
       MessageError.start()
 
-      Actor.loop { 
-        react { 
+      Actor.loop {
+        react {
           case Exit(actor, reason) =>
             println(reason)
             if (actor == StartError)
               MessageError ! 'ping
             else
               exit()
-        } 
-      } 
+        }
+      }
       } catch {
         case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
           e.printStackTrace()
       }
-    } 
-  } 
+    }
+  }
 
-  def main(args: Array[String]) { 
-    Supervisor.start() 
-  } 
-} 
+  def main(args: Array[String]) {
+    Supervisor.start()
+  }
+}
diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations.scala
index 77a45fa..c42ecee 100644
--- a/test/files/jvm/annotations.scala
+++ b/test/files/jvm/annotations.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, reflectiveCalls }
+
 object Test1 {
   class Foo {
     @remote
diff --git a/test/files/jvm/backendBugUnapply.check b/test/files/jvm/backendBugUnapply.check
deleted file mode 100644
index 9d1e7b2..0000000
--- a/test/files/jvm/backendBugUnapply.check
+++ /dev/null
@@ -1,2 +0,0 @@
-baz
-null
diff --git a/test/files/jvm/backendBugUnapply.scala b/test/files/jvm/backendBugUnapply.scala
deleted file mode 100644
index 45ee6f7..0000000
--- a/test/files/jvm/backendBugUnapply.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-object Test { 
-  import scala.xml.{Node,UnprefixedAttribute}
-  
-  def domatch(x:Node) =
-    x match {
-      case Node("foo", UnprefixedAttribute("bar", z, _), _*) => z
-      case _ => null
-    }
-  
-  def main(args: Array[String]): Unit = {
-    println(domatch(<foo bar="baz"><hi/></foo>))
-    println(domatch(<foo bingo="donkey"><hi/></foo>))
-    // 
-    // assert(domatch(<foo bar="baz"><hi/></foo>).toString == "baz")
-    // assert(domatch(<foo bar="baz2"><hi/></foo>) == null)//, domatch(<foo bar="baz2"><hi/></foo>))
-  }
-}
diff --git a/test/files/jvm/bigints.scala b/test/files/jvm/bigints.scala
index f0d05f8..06197cb 100644
--- a/test/files/jvm/bigints.scala
+++ b/test/files/jvm/bigints.scala
@@ -31,7 +31,6 @@ object Test_BigDecimal {
 
     val xi: BigDecimal = 1
     val xd: BigDecimal = 1.0
-    val xf: BigDecimal = BigDecimal(1.0f)
     val xs: BigDecimal = BigDecimal("1.0")
     val xbi: BigDecimal = BigDecimal(scala.BigInt(1))
 
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.flags b/test/files/jvm/bytecode-test-example/Foo_1.flags
new file mode 100644
index 0000000..49f2d2c
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Foo_1.flags
@@ -0,0 +1 @@
+-Ybackend:GenASM
diff --git a/test/files/jvm/console.scala b/test/files/jvm/console.scala
index 7544941..0ac43f2 100644
--- a/test/files/jvm/console.scala
+++ b/test/files/jvm/console.scala
@@ -9,6 +9,6 @@ object Test extends App {
   flush
   println("..")
   println(1)
-  printf("Argument nr. %d has value %1.2f\n", 
+  printf("Argument nr. %d has value %1.2f\n",
          1, 10.0/3)
 }
diff --git a/test/files/jvm/constant-optimization/Foo_1.flags b/test/files/jvm/constant-optimization/Foo_1.flags
new file mode 100644
index 0000000..86f52af
--- /dev/null
+++ b/test/files/jvm/constant-optimization/Foo_1.flags
@@ -0,0 +1 @@
+-Ynooptimise -Yconst-opt
\ No newline at end of file
diff --git a/test/files/jvm/constant-optimization/Foo_1.scala b/test/files/jvm/constant-optimization/Foo_1.scala
new file mode 100644
index 0000000..cb67ad4
--- /dev/null
+++ b/test/files/jvm/constant-optimization/Foo_1.scala
@@ -0,0 +1,9 @@
+class Foo_1 {
+  def foo() {
+  	// constant optimization should eliminate all branches
+    val i = 1
+    val x = if (i != 1) null else "good"
+    val y = if (x == null) "good" else x + ""
+    println(y)
+  }
+}
\ No newline at end of file
diff --git a/test/files/jvm/constant-optimization/Test.scala b/test/files/jvm/constant-optimization/Test.scala
new file mode 100644
index 0000000..dc0f8f6
--- /dev/null
+++ b/test/files/jvm/constant-optimization/Test.scala
@@ -0,0 +1,27 @@
+
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  val comparisons = Set(asm.Opcodes.IF_ACMPEQ, asm.Opcodes.IF_ACMPNE, asm.Opcodes.IF_ICMPEQ, asm.Opcodes.IF_ICMPGE, asm.Opcodes.IF_ICMPGT, asm.Opcodes.IF_ICMPLE,
+    asm.Opcodes.IF_ICMPLT, asm.Opcodes.IF_ICMPNE, asm.Opcodes.IFEQ, asm.Opcodes.IFGE, asm.Opcodes.IFGT, asm.Opcodes.IFLE, asm.Opcodes.IFLT,
+    asm.Opcodes.IFNE, asm.Opcodes.IFNONNULL, asm.Opcodes.IFNULL)
+
+  def show: Unit = {
+    val classNode = loadClassNode("Foo_1")
+    val methodNode = getMethod(classNode, "foo")
+    // after optimization there should be no comparisons left
+    val expected = 0
+
+    val got = countComparisons(methodNode.instructions)
+    assert(got == expected, s"expected $expected but got $got comparisons")
+  }
+
+  def countComparisons(insnList: InsnList): Int = {
+    def isComparison(node: asm.tree.AbstractInsnNode): Boolean =
+      (comparisons contains node.getOpcode)
+    insnList.iterator.asScala count isComparison
+  }
+}
\ No newline at end of file
diff --git a/test/files/jvm/daemon-actor-termination.scala b/test/files/jvm/daemon-actor-termination.scala
index 6ddfc31..9bac634 100644
--- a/test/files/jvm/daemon-actor-termination.scala
+++ b/test/files/jvm/daemon-actor-termination.scala
@@ -1,15 +1,17 @@
-import scala.actors.{Actor, DaemonActor}
 
 /* Test that a daemon Actor that hasn't finished does not prevent termination */
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
 
+  import scala.actors.{Actor, DaemonActor}
   class MyDaemon extends DaemonActor {
     def act() {
       try {
       react {
         case 'hello =>
           println("MSG1")
-          reply()
+          reply(())
           react {
             case 'bye =>
               println("done")
diff --git a/test/files/jvm/deprecation.check b/test/files/jvm/deprecation.check
new file mode 100644
index 0000000..d116778
--- /dev/null
+++ b/test/files/jvm/deprecation.check
@@ -0,0 +1,3 @@
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
+Note: deprecation/Use_2.java uses or overrides a deprecated API.
+Note: Recompile with -Xlint:deprecation for details.
diff --git a/test/files/jvm/deprecation/Test_1.scala b/test/files/jvm/deprecation/Test_1.scala
index b68a40c..0a5b607 100644
--- a/test/files/jvm/deprecation/Test_1.scala
+++ b/test/files/jvm/deprecation/Test_1.scala
@@ -7,7 +7,7 @@ class Test {
     val i = new d.Inner
     val w = i.buz()
   }
-  
+
   @deprecated("no longer!", "") class Inner {
     @deprecated("uncool", "") def f: Int = 1
     @deprecated("this one as well!", "") var g = -1
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
index b257344..3bc8a2c 100644
--- a/test/files/jvm/duration-tck.scala
+++ b/test/files/jvm/duration-tck.scala
@@ -6,6 +6,8 @@ import scala.concurrent.duration._
 import scala.reflect._
 import scala.tools.partest.TestUtil.intercept
 
+import scala.language.{ postfixOps }
+
 object Test extends App {
 
   implicit class Assert(val left: Any) extends AnyVal {
diff --git a/test/files/jvm/future-alarm.scala b/test/files/jvm/future-alarm.scala
index 8ee902b..3e71fa6 100644
--- a/test/files/jvm/future-alarm.scala
+++ b/test/files/jvm/future-alarm.scala
@@ -1,6 +1,8 @@
-import scala.actors.Futures
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Futures
   def main(args: Array[String]) {
     try {
     for (i <- 1 to 100000) {
diff --git a/test/files/jvm/future-awaitall-zero.scala b/test/files/jvm/future-awaitall-zero.scala
index cd6ba17..56f4bab 100644
--- a/test/files/jvm/future-awaitall-zero.scala
+++ b/test/files/jvm/future-awaitall-zero.scala
@@ -1,7 +1,9 @@
-import scala.actors.Futures._
-import scala.actors.Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Futures._
+  import scala.actors.Actor._
   def main(args: Array[String]) {
     try {
     val ft1 = future { reactWithin(10000) {
diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check
new file mode 100644
index 0000000..844ca54
--- /dev/null
+++ b/test/files/jvm/future-spec.check
@@ -0,0 +1 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index ddd819c..a290af9 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -1,6 +1,3 @@
-
-
-
 import scala.concurrent._
 import scala.concurrent.duration._
 import scala.concurrent.duration.Duration.Inf
@@ -10,18 +7,18 @@ import scala.util.{Try,Success,Failure}
 
 
 
-object FutureTests extends MinimalScalaTest {
+class FutureTests extends MinimalScalaTest {
 
   /* some utils */
-  
+
   def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = s match {
-    case "Hello"   => future { "World" }
+    case "Hello"   => Future { "World" }
     case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance"))
     case "NoReply" => Promise[String]().future
   }
-  
+
   val defaultTimeout = 5 seconds
-  
+
   /* future specification */
 
   "A future with custom ExecutionContext" should {
@@ -31,41 +28,41 @@ object FutureTests extends MinimalScalaTest {
         t =>
         ms += t
       })
-      
+
       class ThrowableTest(m: String) extends Throwable(m)
-      
-      val f1 = future[Any] {
+
+      val f1 = Future[Any] {
         throw new ThrowableTest("test")
       }
-      
+
       intercept[ThrowableTest] {
         Await.result(f1, defaultTimeout)
       }
-      
+
       val latch = new TestLatch
-      val f2 = future {
+      val f2 = Future {
         Await.ready(latch, 5 seconds)
         "success"
       }
       val f3 = f2 map { s => s.toUpperCase }
-      
+
       f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") }
       f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") }
-      
+
       latch.open()
-      
+
       Await.result(f2, defaultTimeout) mustBe ("success")
-      
+
       f2 foreach { _ => throw new ThrowableTest("current thread foreach") }
       f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") }
-      
+
       Await.result(f3, defaultTimeout) mustBe ("SUCCESS")
-      
-      val waiting = future {
+
+      val waiting = Future {
         Thread.sleep(1000)
       }
       Await.ready(waiting, 2000 millis)
-      
+
       ms.size mustBe (4)
       //FIXME should check
     }
@@ -96,7 +93,7 @@ object FutureTests extends MinimalScalaTest {
       val logThrowable: Throwable => Unit = p.trySuccess(_)
       val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable)
 
-      val t = new NotImplementedError("foo")
+      val t = new InterruptedException()
       val f = Future(throw t)(ec)
       Await.result(p.future, 2.seconds) mustBe t
     }
@@ -106,73 +103,73 @@ object FutureTests extends MinimalScalaTest {
     import ExecutionContext.Implicits._
 
     "compose with for-comprehensions" in {
-      def async(x: Int) = future { (x * 2).toString }
-      val future0 = future[Any] {
+      def async(x: Int) = Future { (x * 2).toString }
+      val future0 = Future[Any] {
         "five!".length
       }
-      
+
       val future1 = for {
         a <- future0.mapTo[Int]  // returns 5
         b <- async(a)            // returns "10"
         c <- async(7)            // returns "14"
       } yield b + "-" + c
-      
+
       val future2 = for {
         a <- future0.mapTo[Int]
-        b <- (future { (a * 2).toString }).mapTo[Int]
-        c <- future { (7 * 2).toString } 
+        b <- (Future { (a * 2).toString }).mapTo[Int]
+        c <- Future { (7 * 2).toString }
       } yield b + "-" + c
-      
+
       Await.result(future1, defaultTimeout) mustBe ("10-14")
       assert(checkType(future1, manifest[String]))
       intercept[ClassCastException] { Await.result(future2, defaultTimeout) }
     }
-    
+
     "support pattern matching within a for-comprehension" in {
       case class Req[T](req: T)
       case class Res[T](res: T)
       def async[T](req: Req[T]) = req match {
-        case Req(s: String) => future { Res(s.length) }
-        case Req(i: Int)    => future { Res((i * 2).toString) }
+        case Req(s: String) => Future { Res(s.length) }
+        case Req(i: Int)    => Future { Res((i * 2).toString) }
       }
-      
+
       val future1 = for {
         Res(a: Int) <- async(Req("Hello"))
         Res(b: String) <- async(Req(a))
         Res(c: String) <- async(Req(7))
       } yield b + "-" + c
-      
+
       val future2 = for {
         Res(a: Int) <- async(Req("Hello"))
         Res(b: Int) <- async(Req(a))
         Res(c: Int) <- async(Req(7))
       } yield b + "-" + c
-      
+
       Await.result(future1, defaultTimeout) mustBe ("10-14")
       intercept[NoSuchElementException] { Await.result(future2, defaultTimeout) }
     }
-    
+
     "recover from exceptions" in {
       val future1 = Future(5)
       val future2 = future1 map (_ / 0)
       val future3 = future2 map (_.toString)
-      
+
       val future4 = future1 recover {
         case e: ArithmeticException => 0
       } map (_.toString)
-      
+
       val future5 = future2 recover {
         case e: ArithmeticException => 0
       } map (_.toString)
-      
+
       val future6 = future2 recover {
         case e: MatchError => 0
       } map (_.toString)
-      
+
       val future7 = future3 recover {
         case e: ArithmeticException => "You got ERROR"
       }
-      
+
       val future8 = testAsync("Failure")
       val future9 = testAsync("Failure") recover {
         case e: RuntimeException => "FAIL!"
@@ -183,7 +180,7 @@ object FutureTests extends MinimalScalaTest {
       val future11 = testAsync("Failure") recover {
         case _ => "Oops!"
       }
-      
+
       Await.result(future1, defaultTimeout) mustBe (5)
       intercept[ArithmeticException] { Await.result(future2, defaultTimeout) }
       intercept[ArithmeticException] { Await.result(future3, defaultTimeout) }
@@ -196,23 +193,23 @@ object FutureTests extends MinimalScalaTest {
       Await.result(future10, defaultTimeout) mustBe ("World")
       Await.result(future11, defaultTimeout) mustBe ("Oops!")
     }
-    
+
     "recoverWith from exceptions" in {
       val o = new IllegalStateException("original")
       val r = new IllegalStateException("recovered")
-      
+
       intercept[IllegalStateException] {
         val failed = Future.failed[String](o) recoverWith {
           case _ if false == true => Future.successful("yay!")
         }
         Await.result(failed, defaultTimeout)
       } mustBe (o)
-      
+
       val recovered = Future.failed[String](o) recoverWith {
         case _ => Future.successful("yay!")
       }
       Await.result(recovered, defaultTimeout) mustBe ("yay!")
-      
+
       intercept[IllegalStateException] {
         val refailed = Future.failed[String](o) recoverWith {
           case _ => Future.failed[String](r)
@@ -220,11 +217,11 @@ object FutureTests extends MinimalScalaTest {
         Await.result(refailed, defaultTimeout)
       } mustBe (r)
     }
-    
+
     "andThen like a boss" in {
       val q = new java.util.concurrent.LinkedBlockingQueue[Int]
       for (i <- 1 to 1000) {
-        val chained = future {
+        val chained = Future {
           q.add(1); 3
         } andThen {
           case _ => q.add(2)
@@ -240,28 +237,28 @@ object FutureTests extends MinimalScalaTest {
         q.clear()
       }
     }
-    
+
     "firstCompletedOf" in {
       def futures = Vector.fill[Future[Int]](10) {
         Promise[Int]().future
       } :+ Future.successful[Int](5)
-      
+
       Await.result(Future.firstCompletedOf(futures), defaultTimeout) mustBe (5)
       Await.result(Future.firstCompletedOf(futures.iterator), defaultTimeout) mustBe (5)
     }
-    
+
     "find" in {
-      val futures = for (i <- 1 to 10) yield future {
+      val futures = for (i <- 1 to 10) yield Future {
         i
       }
-      
+
       val result = Future.find[Int](futures)(_ == 3)
       Await.result(result, defaultTimeout) mustBe (Some(3))
 
       val notFound = Future.find[Int](futures.iterator)(_ == 11)
       Await.result(notFound, defaultTimeout) mustBe (None)
     }
-    
+
     "zip" in {
       val timeout = 10000 millis
       val f = new IllegalStateException("test")
@@ -269,48 +266,48 @@ object FutureTests extends MinimalScalaTest {
         val failed = Future.failed[String](f) zip Future.successful("foo")
         Await.result(failed, timeout)
       } mustBe (f)
-      
+
       intercept[IllegalStateException] {
         val failed = Future.successful("foo") zip Future.failed[String](f)
         Await.result(failed, timeout)
       } mustBe (f)
-      
+
       intercept[IllegalStateException] {
         val failed = Future.failed[String](f) zip Future.failed[String](f)
         Await.result(failed, timeout)
       } mustBe (f)
-      
+
       val successful = Future.successful("foo") zip Future.successful("foo")
       Await.result(successful, timeout) mustBe (("foo", "foo"))
     }
-    
+
     "fold" in {
       val timeout = 10000 millis
-      def async(add: Int, wait: Int) = future {
+      def async(add: Int, wait: Int) = Future {
         Thread.sleep(wait)
         add
       }
-      
+
       val futures = (0 to 9) map {
         idx => async(idx, idx * 20)
       }
       val folded = Future.fold(futures)(0)(_ + _)
       Await.result(folded, timeout) mustBe (45)
-      
+
       val futuresit = (0 to 9) map {
         idx => async(idx, idx * 20)
       }
       val foldedit = Future.fold(futures)(0)(_ + _)
       Await.result(foldedit, timeout) mustBe (45)
     }
-    
+
     "fold by composing" in {
       val timeout = 10000 millis
-      def async(add: Int, wait: Int) = future {
+      def async(add: Int, wait: Int) = Future {
         Thread.sleep(wait)
         add
       }
-      def futures = (0 to 9) map { 
+      def futures = (0 to 9) map {
         idx => async(idx, idx * 20)
       }
       val folded = futures.foldLeft(Future(0)) {
@@ -318,10 +315,10 @@ object FutureTests extends MinimalScalaTest {
       }
       Await.result(folded, timeout) mustBe (45)
     }
-    
+
     "fold with an exception" in {
       val timeout = 10000 millis
-      def async(add: Int, wait: Int) = future {
+      def async(add: Int, wait: Int) = Future {
         Thread.sleep(wait)
         if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
         add
@@ -334,7 +331,7 @@ object FutureTests extends MinimalScalaTest {
         Await.result(folded, timeout)
       }.getMessage mustBe ("shouldFoldResultsWithException: expected")
     }
-    
+
     "fold mutable zeroes safely" in {
       import scala.collection.mutable.ArrayBuffer
       def test(testNumber: Int) {
@@ -344,36 +341,36 @@ object FutureTests extends MinimalScalaTest {
           case (l, _)               => l
         }
         val result = Await.result(f.mapTo[ArrayBuffer[Int]], 10000 millis).sum
-        
+
         assert(result == 250500)
       }
 
       (1 to 100) foreach test //Make sure it tries to provoke the problem
     }
-    
+
     "return zero value if folding empty list" in {
       val zero = Future.fold(List[Future[Int]]())(0)(_ + _)
       Await.result(zero, defaultTimeout) mustBe (0)
     }
-    
+
     "shouldReduceResults" in {
-      def async(idx: Int) = future {
+      def async(idx: Int) = Future {
         Thread.sleep(idx * 20)
         idx
       }
       val timeout = 10000 millis
-      
+
       val futures = (0 to 9) map { async }
       val reduced = Future.reduce(futures)(_ + _)
       Await.result(reduced, timeout) mustBe (45)
-      
+
       val futuresit = (0 to 9) map { async }
       val reducedit = Future.reduce(futuresit)(_ + _)
       Await.result(reducedit, timeout) mustBe (45)
     }
-    
+
     "shouldReduceResultsWithException" in {
-      def async(add: Int, wait: Int) = future {
+      def async(add: Int, wait: Int) = Future {
         Thread.sleep(wait)
         if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
         else add
@@ -387,14 +384,14 @@ object FutureTests extends MinimalScalaTest {
         Await.result(failed, timeout)
       }.getMessage mustBe ("shouldFoldResultsWithException: expected")
     }
-    
+
     "shouldReduceThrowNSEEOnEmptyInput" in {
       intercept[java.util.NoSuchElementException] {
         val emptyreduced = Future.reduce(List[Future[Int]]())(_ + _)
         Await.result(emptyreduced, defaultTimeout)
       }
     }
-    
+
     "shouldTraverseFutures" in {
       object counter {
         var count = -1
@@ -403,54 +400,54 @@ object FutureTests extends MinimalScalaTest {
           count
         }
       }
-      
-      val oddFutures = List.fill(100)(future { counter.incAndGet() }).iterator
+
+      val oddFutures = List.fill(100)(Future { counter.incAndGet() }).iterator
       val traversed = Future.sequence(oddFutures)
       Await.result(traversed, defaultTimeout).sum mustBe (10000)
-      
+
       val list = (1 to 100).toList
       val traversedList = Future.traverse(list)(x => Future(x * 2 - 1))
       Await.result(traversedList, defaultTimeout).sum mustBe (10000)
-      
+
       val iterator = (1 to 100).toList.iterator
       val traversedIterator = Future.traverse(iterator)(x => Future(x * 2 - 1))
       Await.result(traversedIterator, defaultTimeout).sum mustBe (10000)
     }
-    
+
     "shouldBlockUntilResult" in {
       val latch = new TestLatch
-      
-      val f = future {
+
+      val f = Future {
         Await.ready(latch, 5 seconds)
         5
       }
-      val f2 = future {
+      val f2 = Future {
         val res = Await.result(f, Inf)
         res + 9
       }
-      
+
       intercept[TimeoutException] {
         Await.ready(f2, 100 millis)
       }
-      
+
       latch.open()
-      
+
       Await.result(f2, defaultTimeout) mustBe (14)
-      
-      val f3 = future {
+
+      val f3 = Future {
         Thread.sleep(100)
         5
       }
-      
+
       intercept[TimeoutException] {
         Await.ready(f3, 0 millis)
       }
     }
-    
+
     "run callbacks async" in {
       val latch = Vector.fill(10)(new TestLatch)
-      
-      val f1 = future {
+
+      val f1 = Future {
         latch(0).open()
         Await.ready(latch(1), TestLatch.DefaultTimeout)
         "Hello"
@@ -462,18 +459,18 @@ object FutureTests extends MinimalScalaTest {
         s.length
       }
       for (_ <- f2) latch(4).open()
-      
+
       Await.ready(latch(0), TestLatch.DefaultTimeout)
-      
+
       f1.isCompleted mustBe (false)
       f2.isCompleted mustBe (false)
-      
+
       latch(1).open()
       Await.ready(latch(2), TestLatch.DefaultTimeout)
-      
+
       f1.isCompleted mustBe (true)
       f2.isCompleted mustBe (false)
-      
+
       val f3 = f1 map {
         s =>
         latch(5).open()
@@ -481,17 +478,17 @@ object FutureTests extends MinimalScalaTest {
         s.length * 2
       }
       for (_ <- f3) latch(3).open()
-      
+
       Await.ready(latch(5), TestLatch.DefaultTimeout)
-      
+
       f3.isCompleted mustBe (false)
-      
+
       latch(6).open()
       Await.ready(latch(4), TestLatch.DefaultTimeout)
-      
+
       f2.isCompleted mustBe (true)
       f3.isCompleted mustBe (true)
-      
+
       val p1 = Promise[String]()
       val f4 = p1.future map {
         s =>
@@ -500,34 +497,34 @@ object FutureTests extends MinimalScalaTest {
         s.length
       }
       for (_ <- f4) latch(9).open()
-      
+
       p1.future.isCompleted mustBe (false)
       f4.isCompleted mustBe (false)
-      
+
       p1 complete Success("Hello")
-      
+
       Await.ready(latch(7), TestLatch.DefaultTimeout)
-      
+
       p1.future.isCompleted mustBe (true)
       f4.isCompleted mustBe (false)
-      
+
       latch(8).open()
       Await.ready(latch(9), TestLatch.DefaultTimeout)
-      
+
       Await.ready(f4, defaultTimeout).isCompleted mustBe (true)
     }
-    
+
     "should not deadlock with nested await (ticket 1313)" in {
-      val simple = Future() map {
+      val simple = Future(()) map {
         _ =>
         val unit = Future(())
         val umap = unit map { _ => () }
         Await.result(umap, Inf)
       }
       Await.ready(simple, Inf).isCompleted mustBe (true)
-      
+
       val l1, l2 = new TestLatch
-      val complex = Future() map {
+      val complex = Future(()) map {
         _ =>
         blocking {
           val nested = Future(())
@@ -542,12 +539,10 @@ object FutureTests extends MinimalScalaTest {
 
     "should not throw when Await.ready" in {
       val expected = try Success(5 / 0) catch { case a: ArithmeticException => Failure(a) }
-      val f = future(5).map(_ / 0)
+      val f = Future(5).map(_ / 0)
       Await.ready(f, defaultTimeout).value.get.toString mustBe expected.toString
     }
-    
-  }
-  
-}
 
+  }
 
+}
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
index 48f9466..12b9168 100644
--- a/test/files/jvm/future-spec/PromiseTests.scala
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -9,33 +9,33 @@ import scala.runtime.NonLocalReturnControl
 import scala.util.{Try,Success,Failure}
 
 
-object PromiseTests extends MinimalScalaTest {
+class PromiseTests extends MinimalScalaTest {
   import ExecutionContext.Implicits._
 
   val defaultTimeout = Inf
-  
+
   /* promise specification */
-  
+
   "An empty Promise" should {
-    
+
     "not be completed" in {
       val p = Promise()
       p.future.isCompleted mustBe (false)
       p.isCompleted mustBe (false)
     }
-    
+
     "have no value" in {
       val p = Promise()
       p.future.value mustBe (None)
       p.isCompleted mustBe (false)
     }
-    
+
     "return supplied value on timeout" in {
       val failure = Promise.failed[String](new RuntimeException("br0ken")).future
       val otherFailure = Promise.failed[String](new RuntimeException("last")).future
       val empty = Promise[String]().future
       val timedOut = Promise.successful[String]("Timedout").future
-      
+
       Await.result(failure fallbackTo timedOut, defaultTimeout) mustBe ("Timedout")
       Await.result(timedOut fallbackTo empty, defaultTimeout) mustBe ("Timedout")
       Await.result(otherFailure fallbackTo failure fallbackTo timedOut, defaultTimeout) mustBe ("Timedout")
@@ -43,47 +43,47 @@ object PromiseTests extends MinimalScalaTest {
         Await.result(failure fallbackTo otherFailure, defaultTimeout)
       }.getMessage mustBe ("br0ken")
     }
-    
+
   }
-  
+
   "A successful Promise" should {
     val result = "test value"
     val promise = Promise[String]().complete(Success(result))
     promise.isCompleted mustBe (true)
     futureWithResult(_(promise.future, result))
   }
-  
+
   "A failed Promise" should {
     val message = "Expected Exception"
     val promise = Promise[String]().complete(Failure(new RuntimeException(message)))
     promise.isCompleted mustBe (true)
     futureWithException[RuntimeException](_(promise.future, message))
   }
-  
+
   "An interrupted Promise" should {
     val message = "Boxed InterruptedException"
     val future = Promise[String]().complete(Failure(new InterruptedException(message))).future
     futureWithException[ExecutionException](_(future, message))
   }
-  
+
   "A NonLocalReturnControl failed Promise" should {
     val result = "test value"
     val future = Promise[String]().complete(Failure(new NonLocalReturnControl[String]("test", result))).future
     futureWithResult(_(future, result))
   }
-  
+
   def futureWithResult(f: ((Future[Any], Any) => Unit) => Unit) {
-    
+
     "be completed" in { f((future, _) => future.isCompleted mustBe (true)) }
-    
+
     "contain a value" in { f((future, result) => future.value mustBe (Some(Success(result)))) }
-    
+
     "return when ready with 'Await.ready'" in { f((future, result) => Await.ready(future, defaultTimeout).isCompleted mustBe (true)) }
-    
+
     "return result with 'Await.result'" in { f((future, result) => Await.result(future, defaultTimeout) mustBe (result)) }
-    
+
     "not timeout" in { f((future, _) => Await.ready(future, 0 millis)) }
-    
+
     "filter result" in {
       f {
         (future, result) =>
@@ -93,16 +93,16 @@ object PromiseTests extends MinimalScalaTest {
         }
       }
     }
-    
+
     "transform result with map" in { f((future, result) => Await.result((future map (_.toString.length)), defaultTimeout) mustBe (result.toString.length)) }
-    
+
     "compose result with flatMap" in {
       f { (future, result) =>
         val r = for (r <- future; p <- Promise.successful("foo").future) yield r.toString + p
         Await.result(r, defaultTimeout) mustBe (result.toString + "foo")
       }
     }
-    
+
     "perform action with foreach" in {
       f {
         (future, result) =>
@@ -111,7 +111,7 @@ object PromiseTests extends MinimalScalaTest {
         Await.result(p.future, defaultTimeout) mustBe (result)
       }
     }
-    
+
     "zip properly" in {
       f {
         (future, result) =>
@@ -121,9 +121,9 @@ object PromiseTests extends MinimalScalaTest {
         }.getMessage mustBe ("ohnoes")
       }
     }
-    
+
     "not recover from exception" in { f((future, result) => Await.result(future.recover({ case _ => "pigdog" }), defaultTimeout) mustBe (result)) }
-    
+
     "perform action on result" in {
       f {
         (future, result) =>
@@ -132,7 +132,7 @@ object PromiseTests extends MinimalScalaTest {
         Await.result(p.future, defaultTimeout) mustBe (result)
       }
     }
-    
+
     "not project a failure" in {
       f {
         (future, result) =>
@@ -141,34 +141,34 @@ object PromiseTests extends MinimalScalaTest {
           }.getMessage mustBe ("Future.failed not completed with a throwable.")
       }
     }
-    
+
     "cast using mapTo" in {
       f {
         (future, result) =>
         Await.result(future.mapTo[Boolean].recover({ case _: ClassCastException ⇒ false }), defaultTimeout) mustBe (false)
       }
     }
-    
+
   }
 
   def futureWithException[E <: Throwable: Manifest](f: ((Future[Any], String) => Unit) => Unit) {
-    
+
     "be completed" in {
       f((future, _) => future.isCompleted mustBe (true))
     }
-    
+
     "contain a value" in {
       f((future, message) => {
         future.value.get.failed.get.getMessage mustBe (message)
       })
     }
-    
+
     "throw not throw exception with 'Await.ready'" in {
       f {
         (future, message) => Await.ready(future, defaultTimeout).isCompleted mustBe (true)
       }
     }
-    
+
     "throw exception with 'Await.result'" in {
       f {
         (future, message) =>
@@ -177,7 +177,7 @@ object PromiseTests extends MinimalScalaTest {
         }.getMessage mustBe (message)
       }
     }
-    
+
     "retain exception with filter" in {
       f {
         (future, message) =>
@@ -185,21 +185,21 @@ object PromiseTests extends MinimalScalaTest {
         intercept[E] { Await.result(future filter (_ => false), defaultTimeout) }.getMessage mustBe (message)
       }
     }
-    
+
     "retain exception with map" in {
       f {
         (future, message) =>
         intercept[E] { Await.result(future map (_.toString.length), defaultTimeout) }.getMessage mustBe (message)
       }
     }
-    
+
     "retain exception with flatMap" in {
       f {
         (future, message) =>
         intercept[E] { Await.result(future flatMap (_ => Promise.successful("foo").future), defaultTimeout) }.getMessage mustBe (message)
       }
     }
-    
+
     "zip properly" in {
       f {
         (future, message) =>
@@ -208,18 +208,18 @@ object PromiseTests extends MinimalScalaTest {
         }.getMessage mustBe (message)
       }
     }
-    
+
     "recover from exception" in {
       f {
         (future, message) =>
         Await.result(future.recover({ case e if e.getMessage == message ⇒ "pigdog" }), defaultTimeout) mustBe ("pigdog")
       }
     }
-    
+
     "project a failure" in {
       f((future, message) => Await.result(future.failed, defaultTimeout).getMessage mustBe (message))
     }
-    
+
     "perform action on exception" in {
       f {
         (future, message) =>
@@ -228,7 +228,7 @@ object PromiseTests extends MinimalScalaTest {
         Await.result(p.future, defaultTimeout) mustBe (message)
       }
     }
-    
+
     "always cast successfully using mapTo" in {
       f {
         (future, message) =>
diff --git a/test/files/jvm/future-spec/TryTests.scala b/test/files/jvm/future-spec/TryTests.scala
index 5d1b9b8..01bb3c9 100644
--- a/test/files/jvm/future-spec/TryTests.scala
+++ b/test/files/jvm/future-spec/TryTests.scala
@@ -5,7 +5,7 @@
 
 import scala.util.{Try,Success,Failure}
 
-object TryTests extends MinimalScalaTest {
+class TryTests extends MinimalScalaTest {
   class MyException extends Exception
   val e = new Exception("this is an exception")
 
diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala
index 90048cc..697d0fe 100644
--- a/test/files/jvm/future-spec/main.scala
+++ b/test/files/jvm/future-spec/main.scala
@@ -8,40 +8,46 @@ import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit }
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
-    FutureTests.check()
-    PromiseTests.check()
-    TryTests.check()
+    (new FutureTests).check()
+    (new PromiseTests).check()
+    (new TryTests).check()
   }
-  
+
+}
+
+trait Features {
+  implicit def implicitously = scala.language.implicitConversions
+  implicit def reflectively  = scala.language.reflectiveCalls
+  implicit def postulously   = scala.language.postfixOps
 }
 
 
 trait Output {
   val buffer = new StringBuilder
-  
+
   def bufferPrintln(a: Any) = buffer.synchronized {
     buffer.append(a.toString + "\n")
   }
 }
 
 
-trait MinimalScalaTest extends Output {
-  
+trait MinimalScalaTest extends Output with Features {
+
   val throwables = mutable.ArrayBuffer[Throwable]()
-  
+
   def check() {
     if (throwables.nonEmpty) println(buffer.toString)
   }
-  
+
   implicit def stringops(s: String) = new {
-    
+
     def should[U](snippets: =>U) = {
       bufferPrintln(s + " should:")
       snippets
     }
-    
+
     def in[U](snippet: =>U) = {
       try {
         bufferPrintln("- " + s)
@@ -54,27 +60,27 @@ trait MinimalScalaTest extends Output {
           throwables += e
       }
     }
-    
+
   }
-  
+
   implicit def objectops(obj: Any) = new {
-    
+
     def mustBe(other: Any) = assert(obj == other, obj + " is not " + other)
     def mustEqual(other: Any) = mustBe(other)
-    
+
   }
-  
+
   def intercept[T <: Throwable: Manifest](body: =>Any): T = {
     try {
       body
       throw new Exception("Exception of type %s was not thrown".format(manifest[T]))
     } catch {
       case t: Throwable =>
-        if (manifest[T].erasure != t.getClass) throw t
+        if (manifest[T].runtimeClass != t.getClass) throw t
         else t.asInstanceOf[T]
     }
   }
-  
+
   def checkType[T: Manifest, S](in: Future[T], refmanifest: Manifest[S]): Boolean = manifest[T] == refmanifest
 }
 
@@ -88,23 +94,23 @@ object TestLatch {
 
 class TestLatch(count: Int = 1) extends Awaitable[Unit] {
   private var latch = new CountDownLatch(count)
-  
+
   def countDown() = latch.countDown()
   def isOpen: Boolean = latch.getCount == 0
   def open() = while (!isOpen) countDown()
   def reset() = latch = new CountDownLatch(count)
-  
+
   @throws(classOf[TimeoutException])
   def ready(atMost: Duration)(implicit permit: CanAwait) = {
     val opened = latch.await(atMost.toNanos, TimeUnit.NANOSECONDS)
     if (!opened) throw new TimeoutException("Timeout of %s." format (atMost.toString))
     this
   }
-  
+
   @throws(classOf[Exception])
   def result(atMost: Duration)(implicit permit: CanAwait): Unit = {
     ready(atMost)
   }
-  
+
 }
 
diff --git a/test/files/jvm/future-termination.scala b/test/files/jvm/future-termination.scala
index f51642c..90ea336 100644
--- a/test/files/jvm/future-termination.scala
+++ b/test/files/jvm/future-termination.scala
@@ -1,19 +1,21 @@
-import scala.actors.Futures 
- 
-/* Test that unevaluated futures do not prevent program termination */ 
- 
-object Test { 
-  def main(args: Array[String]) { 
+
+/* Test that unevaluated futures do not prevent program termination */
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
+  import scala.actors.Futures
+  def main(args: Array[String]) {
     try {
-    val meaningOfLife = Futures.future { 
-      Thread.sleep(5000) // pretend this is a harder problem than it is 
-      println("I have the answer!") 
-      42 
-    } 
-    println("I can't wait that long, bye.") 
+    val meaningOfLife = Futures.future {
+      Thread.sleep(5000) // pretend this is a harder problem than it is
+      println("I have the answer!")
+      42
+    }
+    println("I can't wait that long, bye.")
     } catch {
       case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
         e.printStackTrace()
     }
-  } 
+  }
 }
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index 009d52e..c05e803 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -14,7 +14,7 @@ class A {
 
   trait Itf {
     def method1(x: Int): Int
-    
+
     trait Itf2 extends Itf {
       def method2: Unit
     }
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 96b57c7..b55ecc1 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> // basics
 
 scala> 3+4
@@ -60,7 +58,7 @@ t1513: Array[Null] = Array(null)
 
 scala> // ambiguous toString problem from #547
 
-scala> val atom = new scala.xml.Atom()
+scala> val atom = new scala.xml.Atom(())
 atom: scala.xml.Atom[Unit] = ()
 
 scala> // overriding toString problem from #1404
@@ -301,7 +299,7 @@ scala> <a>
   c="c"
   d="dd"
 /></a>
-res8: scala.xml.Elem = 
+res8: scala.xml.Elem =
 <a>
 <b c="c" d="dd"/></a>
 
@@ -324,7 +322,7 @@ scala> """
 hello
 there
 """
-res12: String = 
+res12: String =
 "
 hello
 there
@@ -357,17 +355,13 @@ defined class Term
 scala> def f(e: Exp) = e match {  // non-exhaustive warning here
   case _:Fact => 3
 }
-<console>:18: warning: match is not exhaustive!
-missing combination            Exp
-missing combination           Term
-
+<console>:18: warning: match may not be exhaustive.
+It would fail on the following inputs: Exp(), Term()
        def f(e: Exp) = e match {  // non-exhaustive warning here
                        ^
 f: (e: Exp)Int
 
 scala> 
-
-scala> 
 plusOne: (x: Int)Int
 res0: Int = 6
 res0: String = after reset
diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala
index f45eb03..c68c064 100644
--- a/test/files/jvm/interpreter.scala
+++ b/test/files/jvm/interpreter.scala
@@ -2,7 +2,7 @@ import scala.tools.nsc._
 import scala.tools.partest.ReplTest
 
 object Test extends ReplTest {
-  override def extraSettings = "-deprecation -Xoldpatmat"
+  override def extraSettings = "-deprecation"
   def code = <code>
 // basics
 3+4
@@ -25,7 +25,7 @@ println("hello")
 // ticket #1513
 val t1513 = Array(null)
 // ambiguous toString problem from #547
-val atom = new scala.xml.Atom()
+val atom = new scala.xml.Atom(())
 // overriding toString problem from #1404
 class S(override val toString : String)
 val fish = new S("fish")
diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check
index a1ff949..7b229ba 100644
--- a/test/files/jvm/manifests-new.check
+++ b/test/files/jvm/manifests-new.check
@@ -31,8 +31,8 @@ x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
 x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
 x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
 
-x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
-x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<refinement of Bar[String]>
+x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<refinement of Bar[String]>
 
 ()=()
 true=true
diff --git a/test/files/jvm/manifests-new.scala b/test/files/jvm/manifests-new.scala
index f730be6..3937fde 100644
--- a/test/files/jvm/manifests-new.scala
+++ b/test/files/jvm/manifests-new.scala
@@ -56,7 +56,7 @@ object Test1 extends TestUtil {
 }
 
 object Test2 {
-  import scala.util.Marshal._
+  import Marshal._
   println("()="+load[Unit](dump(())))
   println("true="+load[Boolean](dump(true)))
   println("a="+load[Char](dump('a')))
@@ -88,6 +88,38 @@ object Test2 {
   println()
 }
 
+object Marshal {
+  import java.io._
+  import scala.reflect.ClassTag
+
+  def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
+    val ba = new ByteArrayOutputStream(512)
+    val out = new ObjectOutputStream(ba)
+    out.writeObject(t)
+    out.writeObject(o)
+    out.close()
+    ba.toByteArray()
+  }
+
+  @throws(classOf[IOException])
+  @throws(classOf[ClassCastException])
+  @throws(classOf[ClassNotFoundException])
+  def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
+    val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+    val found = in.readObject.asInstanceOf[ClassTag[_]]
+    try {
+      found.runtimeClass.asSubclass(expected.runtimeClass)
+      in.readObject.asInstanceOf[A]
+    } catch {
+      case _: ClassCastException =>
+        in.close()
+        throw new ClassCastException("type mismatch;"+
+          "\n found : "+found+
+          "\n required: "+expected)
+    }
+  }
+}
+
 trait TestUtil {
   import java.io._
   def write[A](o: A): Array[Byte] = {
diff --git a/test/files/jvm/manifests-old.scala b/test/files/jvm/manifests-old.scala
index 241966f..bb1928f 100644
--- a/test/files/jvm/manifests-old.scala
+++ b/test/files/jvm/manifests-old.scala
@@ -55,7 +55,7 @@ object Test1 extends TestUtil {
 }
 
 object Test2 {
-  import scala.util.Marshal._
+  import Marshal._
   println("()="+load[Unit](dump(())))
   println("true="+load[Boolean](dump(true)))
   println("a="+load[Char](dump('a')))
@@ -87,6 +87,38 @@ object Test2 {
   println()
 }
 
+object Marshal {
+  import java.io._
+  import scala.reflect.ClassTag
+
+  def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
+    val ba = new ByteArrayOutputStream(512)
+    val out = new ObjectOutputStream(ba)
+    out.writeObject(t)
+    out.writeObject(o)
+    out.close()
+    ba.toByteArray()
+  }
+
+  @throws(classOf[IOException])
+  @throws(classOf[ClassCastException])
+  @throws(classOf[ClassNotFoundException])
+  def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
+    val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+    val found = in.readObject.asInstanceOf[ClassTag[_]]
+    try {
+      found.runtimeClass.asSubclass(expected.runtimeClass)
+      in.readObject.asInstanceOf[A]
+    } catch {
+      case _: ClassCastException =>
+        in.close()
+        throw new ClassCastException("type mismatch;"+
+          "\n found : "+found+
+          "\n required: "+expected)
+    }
+  }
+}
+
 trait TestUtil {
   import java.io._
   def write[A](o: A): Array[Byte] = {
diff --git a/test/files/jvm/methvsfield.java b/test/files/jvm/methvsfield.java
index c1b2b87..dadc986 100644
--- a/test/files/jvm/methvsfield.java
+++ b/test/files/jvm/methvsfield.java
@@ -1,11 +1,11 @@
 // This should be compiled with javac and saved
 // in ../lib/methvsfield.jar .
-class MethVsField 
+class MethVsField
 {
   int three = 3;
 
-  int three() 
-  { 
+  int three()
+  {
     return 3;
   }
 }
diff --git a/test/files/jvm/named-args-in-order.check b/test/files/jvm/named-args-in-order.check
new file mode 100644
index 0000000..29a3ba5
--- /dev/null
+++ b/test/files/jvm/named-args-in-order.check
@@ -0,0 +1,3 @@
+bytecode identical
+bytecode identical
+bytecode identical
diff --git a/test/files/jvm/named-args-in-order/SameBytecode.scala b/test/files/jvm/named-args-in-order/SameBytecode.scala
new file mode 100644
index 0000000..c006417
--- /dev/null
+++ b/test/files/jvm/named-args-in-order/SameBytecode.scala
@@ -0,0 +1,9 @@
+class SameBytecode {
+  def foo(a: Int, b: String) = 0
+  def foo(a: Int, b: Any) = 0
+
+  def a = foo(0, "")
+  def b = foo(a = 0, "")
+  def c = foo(0, b = "")
+  def d = foo(a = 0, b = "")
+}
\ No newline at end of file
diff --git a/test/files/jvm/named-args-in-order/Test.scala b/test/files/jvm/named-args-in-order/Test.scala
new file mode 100644
index 0000000..36b9cbc
--- /dev/null
+++ b/test/files/jvm/named-args-in-order/Test.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("SameBytecode")
+    def sameAsA(meth: String) =
+      sameBytecode(getMethod(classNode, "a"), getMethod(classNode, meth))
+    Seq("b", "c", "d").foreach(sameAsA)
+  }
+}
diff --git a/test/files/jvm/natives.scala b/test/files/jvm/natives.scala
index ba868dc..14ee4e1 100644
--- a/test/files/jvm/natives.scala
+++ b/test/files/jvm/natives.scala
@@ -1,16 +1,16 @@
 object Test {
 
   //println("java.library.path=" + System.getProperty("java.library.path"))
-  
+
   val sysWordSize = System.getProperty("sun.arch.data.model", "32")
   val sysType = System.getProperty("os.name")
-  
+
   val libName =
     if (sysType == "Mac OS X")
       "natives"
     else
       "natives-" + sysWordSize
-  
+
   System.loadLibrary(libName)
 
   @native
diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala
index 471a9d2..1ff7ee5 100644
--- a/test/files/jvm/non-fatal-tests.scala
+++ b/test/files/jvm/non-fatal-tests.scala
@@ -4,19 +4,19 @@ trait NonFatalTests {
 
 	//NonFatals
     val nonFatals: Seq[Throwable] =
-      Seq(new StackOverflowError,
-          new RuntimeException,
+      Seq(new RuntimeException,
           new Exception,
-          new Throwable)
-    
+          new Throwable,
+          new NotImplementedError)
+
     //Fatals
     val fatals: Seq[Throwable] =
       Seq(new InterruptedException,
+          new StackOverflowError,
           new OutOfMemoryError,
           new LinkageError,
           new VirtualMachineError {},
-          new Throwable with scala.util.control.ControlThrowable,
-          new NotImplementedError)
+          new Throwable with scala.util.control.ControlThrowable)
 
 	def testFatalsUsingApply(): Unit = {
 	   fatals foreach { t => assert(NonFatal(t) == false) }
diff --git a/test/files/jvm/nooptimise/Foo_1.flags b/test/files/jvm/nooptimise/Foo_1.flags
new file mode 100644
index 0000000..f493cf9
--- /dev/null
+++ b/test/files/jvm/nooptimise/Foo_1.flags
@@ -0,0 +1 @@
+-Ybackend:GenASM -optimise -Ynooptimise
\ No newline at end of file
diff --git a/test/files/jvm/nooptimise/Foo_1.scala b/test/files/jvm/nooptimise/Foo_1.scala
new file mode 100644
index 0000000..896d569
--- /dev/null
+++ b/test/files/jvm/nooptimise/Foo_1.scala
@@ -0,0 +1,8 @@
+class Foo_1 {
+  def foo() {
+    // optimization will remove this magic 3 from appearing in the source
+    // so -Ynooptimize should prevent that
+    val x = 3
+
+  }
+}
diff --git a/test/files/jvm/nooptimise/Test.scala b/test/files/jvm/nooptimise/Test.scala
new file mode 100644
index 0000000..7b7ecd6
--- /dev/null
+++ b/test/files/jvm/nooptimise/Test.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("Foo_1")
+    val methodNode = getMethod(classNode, "foo")
+    // if optimization didn't run then
+    // there should be some useless instructions
+    // with the magic constant 3
+    val expected = 1
+    val got = countMagicThrees(methodNode.instructions)
+    assert(got == expected, s"expected $expected but got $got magic threes")
+  }
+
+  def countMagicThrees(insnList: InsnList): Int = {
+    def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean =
+      (node.getOpcode == asm.Opcodes.ICONST_3)
+    insnList.iterator.asScala.count(isMagicThree)
+  }
+}
diff --git a/test/files/jvm/opt_value_class.check b/test/files/jvm/opt_value_class.check
new file mode 100644
index 0000000..a0c18c5
--- /dev/null
+++ b/test/files/jvm/opt_value_class.check
@@ -0,0 +1,2 @@
+[ok]          <init> ()V                            public
+[ok]         unapply (Ljava/lang/Object;)Ljava/lang/String; public (Ljava/lang/Object;)Ljava/lang/String;
diff --git a/test/files/jvm/opt_value_class/Value_1.scala b/test/files/jvm/opt_value_class/Value_1.scala
new file mode 100644
index 0000000..2440609
--- /dev/null
+++ b/test/files/jvm/opt_value_class/Value_1.scala
@@ -0,0 +1,28 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+  def get: A  = value
+  def isEmpty = value == null
+}
+object Opt {
+  final val None = new Opt[Null](null)
+  def unapply[A >: Null](x: A): Opt[A] = if (x == null) None else Opt(x)
+  def empty[A >: Null] = None
+  def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+class ValueExtract {
+  def unapply(x: Any): Opt[String] = x match {
+    case _: String  => Opt("String")
+    case _: List[_] => Opt("List")
+    case _: Int     => Opt("Int")
+    case _          => Opt.None
+  }
+}
+
+class Direct {
+  def unapply(x: Any): String = x match {
+    case _: String  => "String"
+    case _: List[_] => "List"
+    case _: Int     => "Int"
+    case _          => null
+  }
+}
diff --git a/test/files/jvm/opt_value_class/test.scala b/test/files/jvm/opt_value_class/test.scala
new file mode 100644
index 0000000..7aea7de
--- /dev/null
+++ b/test/files/jvm/opt_value_class/test.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.BytecodeTest
+
+// import scala.tools.nsc.util.JavaClassPath
+// import java.io.InputStream
+// import scala.tools.asm
+// import asm.ClassReader
+// import asm.tree.{ClassNode, InsnList}
+// import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode1 = loadClassNode("ValueExtract")
+    val classNode2 = loadClassNode("Direct")
+    sameMethodAndFieldDescriptors(classNode1, classNode2)
+  }
+}
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
index fa36393..b050601 100644
--- a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
+++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
@@ -6,7 +6,7 @@
 class SameBytecode {
   case class Foo(x: Any, y: String)
 
-  def a = 
+  def a =
     Foo(1, "a") match {
       case Foo(_: String, y) => y
     }
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
index 3a594c4..1e4d564 100644
--- a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
+++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
@@ -3,7 +3,7 @@
 case class Foo(x: Any)
 
 class SameBytecode {
-  def a = 
+  def a =
     (Foo(1): Any) match {
       case Foo(_: String) =>
     }
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
index e5db6c4..c961082 100644
--- a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
+++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
@@ -3,7 +3,7 @@
 class SameBytecode {
   case class Foo(x: Int, y: String)
 
-  def a = 
+  def a =
     Foo(1, "a") match {
       case Foo(_: Int, y) => y
     }
diff --git a/test/files/jvm/protectedacc.scala b/test/files/jvm/protectedacc.scala
index f213e0d..89e70b9 100644
--- a/test/files/jvm/protectedacc.scala
+++ b/test/files/jvm/protectedacc.scala
@@ -16,7 +16,7 @@ object Test {
 
     val ji = new p.b.JavaInteraction(Array('a', 'b', 'c'));
     (new ji.Inner).m;
-    
+
     (new p.b.OuterObj.Inner).m
   }
 }
@@ -36,13 +36,13 @@ package p {
 
       def getA: this.type = this;
     }
-    
+
     /** Test type members */
     trait HighlighterXXX {
       type Node;
       protected def highlight(node : Node) : Unit;
     }
-    
+
     /** Test type parameters */
     abstract class PolyA[a] {
       protected def m(x: a): Unit;
@@ -119,22 +119,22 @@ package p {
 
           val inc = meth2(1)_;
           Console.println("100 = " + inc("10"));
-          
+
           getA.x;
         }
       }
     }
-    
+
     trait ScalaAutoEditXXX extends HighlighterXXX {
-      trait NodeImpl { 
+      trait NodeImpl {
         def self : Node;
         highlight(self);
       }
     }
-    
+
     abstract class X[T] extends PolyA[T] {
 
-      trait Inner extends B { 
+      trait Inner extends B {
         def self: T;
         def self2: Node;
         def getB: Inner;
diff --git a/test/files/jvm/reactor-exceptionOnSend.scala b/test/files/jvm/reactor-exceptionOnSend.scala
index 3d9a042..6d79fc9 100644
--- a/test/files/jvm/reactor-exceptionOnSend.scala
+++ b/test/files/jvm/reactor-exceptionOnSend.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.Reactor
 import scala.actors.Actor._
 
@@ -48,7 +52,6 @@ object B extends Reactor[Any] {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     B.start()
   }
diff --git a/test/files/jvm/reactor-producer-consumer.scala b/test/files/jvm/reactor-producer-consumer.scala
index 8a6b17c..ec34feb 100644
--- a/test/files/jvm/reactor-producer-consumer.scala
+++ b/test/files/jvm/reactor-producer-consumer.scala
@@ -1,6 +1,8 @@
-import scala.actors.Reactor
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Reactor
   case class Stop()
   case class Get(from: Reactor[Any])
   case class Put(x: Int)
diff --git a/test/files/jvm/reactor.scala b/test/files/jvm/reactor.scala
index dbc9a6b..91ded27 100644
--- a/test/files/jvm/reactor.scala
+++ b/test/files/jvm/reactor.scala
@@ -1,3 +1,11 @@
+/**
+ * Ping pong example for Reactor.
+ *
+ * @author  Philipp Haller
+ */
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 
 import scala.actors.Reactor
 
@@ -5,19 +13,12 @@ case class Ping(from: Reactor[Any])
 case object Pong
 case object Stop
 
-/**
- * Ping pong example for Reactor.
- *
- * @author  Philipp Haller
- */
-object Test {
   def main(args: Array[String]) {
     val pong = new PongActor
     val ping = new PingActor(100000, pong)
     ping.start
     pong.start
   }
-}
 
 class PingActor(count: Int, pong: Reactor[Any]) extends Reactor[Any] {
   def act() {
@@ -68,3 +69,4 @@ class PongActor extends Reactor[Any] {
     }
   }
 }
+}
diff --git a/test/files/jvm/replyablereactor.scala b/test/files/jvm/replyablereactor.scala
index e1fabc9..4c4e13d 100644
--- a/test/files/jvm/replyablereactor.scala
+++ b/test/files/jvm/replyablereactor.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.ReplyReactor
 
 class MyActor extends ReplyReactor {
@@ -18,7 +22,6 @@ class MyActor extends ReplyReactor {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     val a = new MyActor
     a.start()
diff --git a/test/files/jvm/replyablereactor2.scala b/test/files/jvm/replyablereactor2.scala
index da9e0e2..21f33cc 100644
--- a/test/files/jvm/replyablereactor2.scala
+++ b/test/files/jvm/replyablereactor2.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors._
 import scala.actors.Actor._
 
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     val a = new MyActor
     a.start()
diff --git a/test/files/jvm/replyablereactor3.scala b/test/files/jvm/replyablereactor3.scala
index 2c26b8a..5810ed0 100644
--- a/test/files/jvm/replyablereactor3.scala
+++ b/test/files/jvm/replyablereactor3.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors._
 import scala.actors.Actor._
 
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     val a = new MyActor
     a.start()
diff --git a/test/files/jvm/replyablereactor4.scala b/test/files/jvm/replyablereactor4.scala
index 8776cf6..95d6368 100644
--- a/test/files/jvm/replyablereactor4.scala
+++ b/test/files/jvm/replyablereactor4.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors._
 import scala.actors.Actor._
 
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     val a = new MyActor
     a.start()
diff --git a/test/files/jvm/replyreactor-react-sender.scala b/test/files/jvm/replyreactor-react-sender.scala
index c988429..fdcea09 100644
--- a/test/files/jvm/replyreactor-react-sender.scala
+++ b/test/files/jvm/replyreactor-react-sender.scala
@@ -1,7 +1,9 @@
-import scala.actors.ReplyReactor
-import scala.actors.Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.ReplyReactor
+  import scala.actors.Actor._
 
   val NUM = 2000
 
diff --git a/test/files/jvm/replyreactor.scala b/test/files/jvm/replyreactor.scala
index 0cecf29..7512fb0 100644
--- a/test/files/jvm/replyreactor.scala
+++ b/test/files/jvm/replyreactor.scala
@@ -1,6 +1,8 @@
-import scala.actors.ReplyReactor
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.ReplyReactor
   def main(args: Array[String]) {
     val a = new ReplyReactor {
       def act() {
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index a306a7d..ce86d4a 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -8,7 +8,7 @@ import scala.concurrent.{
   CanAwait,
   Await
 }
-import scala.concurrent.{ future, promise, blocking }
+import scala.concurrent.blocking
 import scala.util.{ Try, Success, Failure }
 import scala.concurrent.duration.Duration
 import scala.reflect.{ classTag, ClassTag }
@@ -35,15 +35,15 @@ trait FutureCallbacks extends TestBase {
   def testOnSuccess(): Unit = once {
     done =>
     var x = 0
-    val f = future { x = 1 }
+    val f = Future { x = 1 }
     f onSuccess { case _ => done(x == 1) }
   }
-  
+
   def testOnSuccessWhenCompleted(): Unit = once {
     done =>
     var x = 0
-    val f = future { x = 1 }
-    f onSuccess { 
+    val f = Future { x = 1 }
+    f onSuccess {
       case _ if x == 1 =>
       x = 2
       f onSuccess {  case _ => done(x == 2) }
@@ -52,31 +52,31 @@ trait FutureCallbacks extends TestBase {
 
   def testOnSuccessWhenFailed(): Unit = once {
     done =>
-    val f = future[Unit] { throw new Exception }
+    val f = Future[Unit] { throw new Exception }
     f onSuccess { case _ => done(false) }
     f onFailure { case _ => done(true) }
   }
-  
+
   def testOnFailure(): Unit = once {
     done =>
-    val f = future[Unit] { throw new Exception }
+    val f = Future[Unit] { throw new Exception }
     f onSuccess { case _ => done(false) }
     f onFailure { case _ => done(true) }
   }
 
   def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
     done =>
-    val f = future[Unit] { throw cause }
+    val f = Future[Unit] { throw cause }
     f onSuccess { case _ => done(false) }
     f onFailure {
       case e: ExecutionException if e.getCause == cause => done(true)
       case _ => done(false)
     }
   }
-  
+
   def testOnFailureWhenTimeoutException(): Unit = once {
     done =>
-    val f = future[Unit] { throw new TimeoutException() }
+    val f = Future[Unit] { throw new TimeoutException() }
     f onSuccess { case _ => done(false) }
     f onFailure {
       case e: TimeoutException => done(true)
@@ -89,7 +89,7 @@ trait FutureCallbacks extends TestBase {
     (0 to 10000).map(Future(_)).foldLeft(promise.future)((f1, f2) => f2.flatMap(i => f1))
     promise.success(-1)
   }
-  
+
   testOnSuccess()
   testOnSuccessWhenCompleted()
   testOnSuccessWhenFailed()
@@ -108,7 +108,7 @@ trait FutureCombinators extends TestBase {
 
   def testMapSuccess(): Unit = once {
     done =>
-      val f = future { 5 }
+      val f = Future { 5 }
       val g = f map { x => "result: " + x }
       g onSuccess { case s => done(s == "result: 5") }
       g onFailure { case _ => done(false) }
@@ -116,7 +116,7 @@ trait FutureCombinators extends TestBase {
 
   def testMapFailure(): Unit = once {
     done =>
-      val f = future[Unit] { throw new Exception("exception message") }
+      val f = Future[Unit] { throw new Exception("exception message") }
       val g = f map { x => "result: " + x }
       g onSuccess { case _ => done(false) }
       g onFailure { case t => done(t.getMessage() == "exception message") }
@@ -124,7 +124,7 @@ trait FutureCombinators extends TestBase {
 
   def testMapSuccessPF(): Unit = once {
     done =>
-      val f = future { 5 }
+      val f = Future { 5 }
       val g = f map { case r => "result: " + r }
       g onSuccess { case s => done(s == "result: 5") }
       g onFailure { case _ => done(false) }
@@ -132,7 +132,7 @@ trait FutureCombinators extends TestBase {
 
   def testTransformSuccess(): Unit = once {
     done =>
-      val f = future { 5 }
+      val f = Future { 5 }
       val g = f.transform(r => "result: " + r, identity)
       g onSuccess { case s => done(s == "result: 5") }
       g onFailure { case _ => done(false) }
@@ -140,7 +140,7 @@ trait FutureCombinators extends TestBase {
 
   def testTransformSuccessPF(): Unit = once {
     done =>
-      val f = future { 5 }
+      val f = Future { 5 }
       val g = f.transform( { case r => "result: " + r }, identity)
       g onSuccess { case s => done(s == "result: 5") }
       g onFailure { case _ => done(false) }
@@ -149,7 +149,7 @@ trait FutureCombinators extends TestBase {
 def testTransformFailure(): Unit = once {
     done =>
       val transformed = new Exception("transformed")
-      val f = future { throw new Exception("expected") }
+      val f = Future { throw new Exception("expected") }
       val g = f.transform(identity, _ => transformed)
       g onSuccess { case _ => done(false) }
       g onFailure { case e => done(e eq transformed) }
@@ -159,7 +159,7 @@ def testTransformFailure(): Unit = once {
     done =>
       val e = new Exception("expected")
       val transformed = new Exception("transformed")
-      val f = future[Unit] { throw e }
+      val f = Future[Unit] { throw e }
       val g = f.transform(identity, { case `e` => transformed })
       g onSuccess { case _ => done(false) }
       g onFailure { case e => done(e eq transformed) }
@@ -167,7 +167,7 @@ def testTransformFailure(): Unit = once {
 
   def testFoldFailure(): Unit = once {
     done =>
-      val f = future[Unit] { throw new Exception("expected") }
+      val f = Future[Unit] { throw new Exception("expected") }
       val g = f.transform(r => "result: " + r, identity)
       g onSuccess { case _ => done(false) }
       g onFailure { case t => done(t.getMessage() == "expected") }
@@ -175,23 +175,23 @@ def testTransformFailure(): Unit = once {
 
   def testFlatMapSuccess(): Unit = once {
     done =>
-      val f = future { 5 }
-      val g = f flatMap { _ => future { 10 } }
+      val f = Future { 5 }
+      val g = f flatMap { _ => Future { 10 } }
       g onSuccess { case x => done(x == 10) }
       g onFailure { case _ => done(false) }
   }
 
   def testFlatMapFailure(): Unit = once {
     done =>
-      val f = future[Unit] { throw new Exception("expected") }
-      val g = f flatMap { _ => future { 10 } }
+      val f = Future[Unit] { throw new Exception("expected") }
+      val g = f flatMap { _ => Future { 10 } }
       g onSuccess { case _ => done(false) }
       g onFailure { case t => done(t.getMessage() == "expected") }
   }
 
   def testFilterSuccess(): Unit = once {
     done =>
-      val f = future { 4 }
+      val f = Future { 4 }
       val g = f filter { _ % 2 == 0 }
       g onSuccess { case x: Int => done(x == 4) }
       g onFailure { case _ => done(false) }
@@ -199,7 +199,7 @@ def testTransformFailure(): Unit = once {
 
   def testFilterFailure(): Unit = once {
     done =>
-      val f = future { 4 }
+      val f = Future { 4 }
       val g = f filter { _ % 2 == 1 }
       g onSuccess { case x: Int => done(false) }
       g onFailure {
@@ -210,7 +210,7 @@ def testTransformFailure(): Unit = once {
 
   def testCollectSuccess(): Unit = once {
     done =>
-      val f = future { -5 }
+      val f = Future { -5 }
       val g = f collect { case x if x < 0 => -x }
       g onSuccess { case x: Int => done(x == 5) }
       g onFailure { case _ => done(false) }
@@ -218,7 +218,7 @@ def testTransformFailure(): Unit = once {
 
   def testCollectFailure(): Unit = once {
     done =>
-      val f = future { -5 }
+      val f = Future { -5 }
       val g = f collect { case x if x > 0 => x * 2 }
       g onSuccess { case _ => done(false) }
       g onFailure {
@@ -232,23 +232,23 @@ def testTransformFailure(): Unit = once {
 
   def testForeachSuccess(): Unit = once {
     done =>
-      val p = promise[Int]()
-      val f = future[Int] { 5 }
+      val p = Promise[Int]()
+      val f = Future[Int] { 5 }
       f foreach { x => p.success(x * 2) }
       val g = p.future
-      
+
       g.onSuccess { case res: Int => done(res == 10) }
       g.onFailure { case _ => done(false) }
   }
 
   def testForeachFailure(): Unit = once {
     done =>
-      val p = promise[Int]()
-      val f = future[Int] { throw new Exception }
+      val p = Promise[Int]()
+      val f = Future[Int] { throw new Exception }
       f foreach { x => p.success(x * 2) }
       f onFailure { case _ => p.failure(new Exception) }
       val g = p.future
-      
+
       g.onSuccess { case _ => done(false) }
       g.onFailure { case _ => done(true) }
   }
@@ -256,7 +256,7 @@ def testTransformFailure(): Unit = once {
   def testRecoverSuccess(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future {
+    val f = Future {
       throw cause
     } recover {
       case re: RuntimeException =>
@@ -268,7 +268,7 @@ def testTransformFailure(): Unit = once {
   def testRecoverFailure(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future {
+    val f = Future {
       throw cause
     } recover {
       case te: TimeoutException => "timeout"
@@ -276,15 +276,15 @@ def testTransformFailure(): Unit = once {
     f onSuccess { case _ => done(false) }
     f onFailure { case any => done(any == cause) }
   }
-  
+
   def testRecoverWithSuccess(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future {
+    val f = Future {
       throw cause
     } recoverWith {
       case re: RuntimeException =>
-        future { "recovered" }
+        Future { "recovered" }
     }
     f onSuccess { case x => done(x == "recovered") }
     f onFailure { case any => done(false) }
@@ -293,20 +293,20 @@ def testTransformFailure(): Unit = once {
   def testRecoverWithFailure(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future {
+    val f = Future {
       throw cause
     } recoverWith {
       case te: TimeoutException =>
-        future { "timeout" }
+        Future { "timeout" }
     }
     f onSuccess { case x => done(false) }
     f onFailure { case any => done(any == cause) }
   }
- 
+
   def testZipSuccess(): Unit = once {
     done =>
-    val f = future { 5 }
-    val g = future { 6 }
+    val f = Future { 5 }
+    val g = Future { 6 }
     val h = f zip g
     h onSuccess { case (l: Int, r: Int) => done(l+r == 11) }
     h onFailure { case _ => done(false) }
@@ -315,8 +315,8 @@ def testTransformFailure(): Unit = once {
   def testZipFailureLeft(): Unit = once {
     done =>
     val cause = new Exception("expected")
-    val f = future { throw cause }
-    val g = future { 6 }
+    val f = Future { throw cause }
+    val g = Future { 6 }
     val h = f zip g
     h onSuccess { case _ => done(false) }
     h onFailure { case e: Exception => done(e.getMessage == "expected") }
@@ -325,8 +325,8 @@ def testTransformFailure(): Unit = once {
   def testZipFailureRight(): Unit = once {
     done =>
     val cause = new Exception("expected")
-    val f = future { 5 }
-    val g = future { throw cause }
+    val f = Future { 5 }
+    val g = Future { throw cause }
     val h = f zip g
     h onSuccess { case _ => done(false) }
     h onFailure { case e: Exception => done(e.getMessage == "expected") }
@@ -334,8 +334,8 @@ def testTransformFailure(): Unit = once {
 
   def testFallbackTo(): Unit = once {
     done =>
-    val f = future { sys.error("failed") }
-    val g = future { 5 }
+    val f = Future { sys.error("failed") }
+    val g = Future { 5 }
     val h = f fallbackTo g
     h onSuccess { case x: Int => done(x == 5) }
     h onFailure { case _ => done(false) }
@@ -344,8 +344,8 @@ def testTransformFailure(): Unit = once {
   def testFallbackToFailure(): Unit = once {
     done =>
     val cause = new Exception
-    val f = future { throw cause }
-    val g = future { sys.error("failed") }
+    val f = Future { throw cause }
+    val g = Future { sys.error("failed") }
     val h = f fallbackTo g
 
     h onSuccess { case _ => done(false) }
@@ -382,49 +382,49 @@ trait FutureProjections extends TestBase {
   def testFailedFailureOnComplete(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future { throw cause }
+    val f = Future { throw cause }
     f.failed onComplete {
       case Success(t) => done(t == cause)
       case Failure(t) => done(false)
     }
   }
-  
+
   def testFailedFailureOnSuccess(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future { throw cause }
+    val f = Future { throw cause }
     f.failed onSuccess { case t => done(t == cause) }
   }
-  
+
   def testFailedSuccessOnComplete(): Unit = once {
     done =>
-    val f = future { 0 }
+    val f = Future { 0 }
     f.failed onComplete {
       case Failure(_: NoSuchElementException) => done(true)
       case _ => done(false)
     }
   }
-  
+
   def testFailedSuccessOnFailure(): Unit = once {
     done =>
-    val f = future { 0 }
+    val f = Future { 0 }
     f.failed onFailure {
       case e: NoSuchElementException => done(true)
       case _ => done(false)
     }
     f.failed onSuccess { case _ => done(false) }
   }
-  
+
   def testFailedFailureAwait(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future { throw cause }
+    val f = Future { throw cause }
     done(Await.result(f.failed, Duration(500, "ms")) == cause)
   }
-  
+
   def testFailedSuccessAwait(): Unit = once {
     done =>
-    val f = future { 0 }
+    val f = Future { 0 }
     try {
       Await.result(f.failed, Duration(500, "ms"))
       done(false)
@@ -437,7 +437,7 @@ trait FutureProjections extends TestBase {
   def testAwaitPositiveDuration(): Unit = once { done =>
     val p = Promise[Int]()
     val f = p.future
-    future {
+    Future {
       intercept[IllegalArgumentException] { Await.ready(f, Duration.Undefined) }
       p.success(0)
       Await.ready(f, Duration.Zero)
@@ -449,14 +449,14 @@ trait FutureProjections extends TestBase {
 
   def testAwaitNegativeDuration(): Unit = once { done =>
     val f = Promise().future
-    future {
+    Future {
       intercept[TimeoutException] { Await.ready(f, Duration.Zero) }
       intercept[TimeoutException] { Await.ready(f, Duration.MinusInf) }
       intercept[TimeoutException] { Await.ready(f, Duration(-500, "ms")) }
       done(true)
     } onFailure { case x => done(throw x) }
   }
-  
+
   testFailedFailureOnComplete()
   testFailedFailureOnSuccess()
   testFailedSuccessOnComplete()
@@ -473,14 +473,14 @@ trait Blocking extends TestBase {
 
   def testAwaitSuccess(): Unit = once {
     done =>
-    val f = future { 0 }
+    val f = Future { 0 }
     done(Await.result(f, Duration(500, "ms")) == 0)
   }
-  
+
   def testAwaitFailure(): Unit = once {
     done =>
     val cause = new RuntimeException
-    val f = future { throw cause }
+    val f = Future { throw cause }
     try {
       Await.result(f, Duration(500, "ms"))
       done(false)
@@ -488,13 +488,13 @@ trait Blocking extends TestBase {
       case  t: Throwable => done(t == cause)
     }
   }
-  
+
   def testFQCNForAwaitAPI(): Unit = once {
     done =>
-    done(classOf[CanAwait].getName == "scala.concurrent.CanAwait" && 
+    done(classOf[CanAwait].getName == "scala.concurrent.CanAwait" &&
          Await.getClass.getName == "scala.concurrent.Await")
   }
-  
+
   testAwaitSuccess()
   testAwaitFailure()
   testFQCNForAwaitAPI()
@@ -562,24 +562,24 @@ trait Promises extends TestBase {
 
   def testSuccess(): Unit = once {
     done =>
-    val p = promise[Int]()
+    val p = Promise[Int]()
     val f = p.future
-    
+
     f onSuccess { case x => done(x == 5) }
     f onFailure { case any => done(false) }
-    
+
     p.success(5)
   }
 
   def testFailure(): Unit = once {
     done =>
     val e = new Exception("expected")
-    val p = promise[Int]()
+    val p = Promise[Int]()
     val f = p.future
-    
+
     f onSuccess { case x => done(false) }
     f onFailure { case any => done(any eq e) }
-    
+
     p.failure(e)
   }
 
@@ -644,7 +644,7 @@ trait CustomExecutionContext extends TestBase {
     val count = countExecs { implicit ec =>
       blocking {
         once { done =>
-          val f = future(assertNoEC())(defaultEC)
+          val f = Future(assertNoEC())(defaultEC)
           f onSuccess {
             case _ =>
               assertEC()
@@ -715,13 +715,13 @@ trait ExecutionContextPrepare extends TestBase {
   val theLocal = new ThreadLocal[String] {
     override protected def initialValue(): String = ""
   }
-  
+
   class PreparingExecutionContext extends ExecutionContext {
     def delegate = ExecutionContext.global
-    
+
     override def execute(runnable: Runnable): Unit =
       delegate.execute(runnable)
-    
+
     override def prepare(): ExecutionContext = {
       // save object stored in ThreadLocal storage
       val localData = theLocal.get
@@ -739,27 +739,27 @@ trait ExecutionContextPrepare extends TestBase {
         }
       }
     }
-    
+
     override def reportFailure(t: Throwable): Unit =
       delegate.reportFailure(t)
   }
-  
+
   implicit val ec = new PreparingExecutionContext
-  
+
   def testOnComplete(): Unit = once {
     done =>
     theLocal.set("secret")
-    val fut = future { 42 }
+    val fut = Future { 42 }
     fut onComplete { case _ => done(theLocal.get == "secret") }
   }
-  
+
   def testMap(): Unit = once {
     done =>
     theLocal.set("secret2")
-    val fut = future { 42 }
+    val fut = Future { 42 }
     fut map { x => done(theLocal.get == "secret2") }
   }
-  
+
   testOnComplete()
   testMap()
 }
diff --git a/test/files/jvm/scheduler-adapter.scala b/test/files/jvm/scheduler-adapter.scala
index d6a8a44..1c9cfe7 100644
--- a/test/files/jvm/scheduler-adapter.scala
+++ b/test/files/jvm/scheduler-adapter.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, SchedulerAdapter}
 
 trait AdaptedActor extends Actor {
@@ -36,7 +40,6 @@ object Two extends AdaptedActor {
   }
 }
 
-object Test {
   val adapted =
     new SchedulerAdapter {
       def execute(block: => Unit) {
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index f886cfe..47d7bfd 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -1,3 +1,4 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 a1 = Array[1,2,3]
 _a1 = Array[1,2,3]
 arrayEquals(a1, _a1): true
@@ -220,60 +221,6 @@ x = TrieMap(1 -> one, 2 -> two, 3 -> three)
 y = TrieMap(1 -> one, 2 -> two, 3 -> three)
 x equals y: true, y equals x: true
 
-x =  xml:src="hello"
-y =  xml:src="hello"
-x equals y: true, y equals x: true
-
-x = <title></title>
-y = <title></title>
-x equals y: true, y equals x: true
-
-x = <html><title>title</title><body></body></html>
-y = <html><title>title</title><body></body></html>
-x equals y: true, y equals x: true
-
-x = <html>
-      <body>
-        <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        <tr>
-          <td> Tom </td>
-          <td> 20 </td>
-        </tr><tr>
-          <td> Bob </td>
-          <td> 22 </td>
-        </tr><tr>
-          <td> James </td>
-          <td> 19 </td>
-        </tr>
-      </table>
-      </body>
-      </html>
-y = <html>
-      <body>
-        <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        <tr>
-          <td> Tom </td>
-          <td> 20 </td>
-        </tr><tr>
-          <td> Bob </td>
-          <td> 22 </td>
-        </tr><tr>
-          <td> James </td>
-          <td> 19 </td>
-        </tr>
-      </table>
-      </body>
-      </html>
-x equals y: true, y equals x: true
-
 x = Tim
 y = Tim
 x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala
index 1522fc8..1b5e856 100644
--- a/test/files/jvm/serialization-new.scala
+++ b/test/files/jvm/serialization-new.scala
@@ -419,70 +419,6 @@ object Test3_mutable {
   }
 }
 
-
-//############################################################################
-// Test classes in package "scala.xml"
-
-object Test4_xml {
-  import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
-
-  case class Person(name: String, age: Int)
-
-  try {
-    // Attribute
-    val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
-    val _a1: Attribute = read(write(a1))
-    check(a1, _a1)
-
-    // Document
-    val d1 = new Document
-    d1.docElem = <title></title>
-    d1.encoding = Some("UTF-8")
-    val _d1: Document = read(write(d1))
-    check(d1, _d1)
-
-    // Elem
-    val e1 = <html><title>title</title><body></body></html>;
-    val _e1: Elem = read(write(e1))
-    check(e1, _e1)
-
-    class AddressBook(a: Person*) {
-      private val people: List[Person] = a.toList
-      def toXHTML =
-      <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        { for (p <- people) yield
-        <tr>
-          <td> { p.name } </td>
-          <td> { p.age.toString() } </td>
-        </tr> }
-      </table>;
-    }
-
-    val people = new AddressBook(
-      Person("Tom", 20),
-      Person("Bob", 22),
-      Person("James", 19))
-
-    val e2 =
-      <html>
-      <body>
-        { people.toXHTML }
-      </body>
-      </html>;
-    val _e2: Elem = read(write(e2))
-    check(e2, _e2)
-  }
-  catch {
-    case e: Exception =>
-      println("Error in Test4_xml: " + e)
-      throw e
-  }
-}
-
 //############################################################################
 // Test user-defined classes WITHOUT nesting
 
@@ -594,11 +530,10 @@ object Test8 {
 // Test code
 
 object Test {
-  def main(args: Array[String]) {
+  def main(args: Array[String]): Unit = {
     Test1_scala
     Test2_immutable
     Test3_mutable
-    Test4_xml
     Test5
     Test6
     Test7
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index f886cfe..47d7bfd 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,3 +1,4 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 a1 = Array[1,2,3]
 _a1 = Array[1,2,3]
 arrayEquals(a1, _a1): true
@@ -220,60 +221,6 @@ x = TrieMap(1 -> one, 2 -> two, 3 -> three)
 y = TrieMap(1 -> one, 2 -> two, 3 -> three)
 x equals y: true, y equals x: true
 
-x =  xml:src="hello"
-y =  xml:src="hello"
-x equals y: true, y equals x: true
-
-x = <title></title>
-y = <title></title>
-x equals y: true, y equals x: true
-
-x = <html><title>title</title><body></body></html>
-y = <html><title>title</title><body></body></html>
-x equals y: true, y equals x: true
-
-x = <html>
-      <body>
-        <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        <tr>
-          <td> Tom </td>
-          <td> 20 </td>
-        </tr><tr>
-          <td> Bob </td>
-          <td> 22 </td>
-        </tr><tr>
-          <td> James </td>
-          <td> 19 </td>
-        </tr>
-      </table>
-      </body>
-      </html>
-y = <html>
-      <body>
-        <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        <tr>
-          <td> Tom </td>
-          <td> 20 </td>
-        </tr><tr>
-          <td> Bob </td>
-          <td> 22 </td>
-        </tr><tr>
-          <td> James </td>
-          <td> 19 </td>
-        </tr>
-      </table>
-      </body>
-      </html>
-x equals y: true, y equals x: true
-
 x = Tim
 y = Tim
 x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 34b6493..bc61235 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -281,6 +281,7 @@ object Test2_immutable {
 //############################################################################
 // Test classes in package "scala.collection.mutable"
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test3_mutable {
   import scala.reflect.ClassManifest
   import scala.collection.mutable.{
@@ -296,7 +297,7 @@ object Test3_mutable {
     ab1 ++= List("one", "two")
     val _ab1: ArrayBuffer[String] = read(write(ab1))
     check(ab1, _ab1)
-    
+
     // ArrayBuilder
     val abu1 = ArrayBuilder.make[Long]
     val _abu1: ArrayBuilder[ClassManifest[Long]] = read(write(abu1))
@@ -305,12 +306,12 @@ object Test3_mutable {
     val abu2 = ArrayBuilder.make[Float]
     val _abu2: ArrayBuilder[ClassManifest[Float]] = read(write(abu2))
     check(abu2, _abu2)
-    
+
     // ArraySeq
     val aq1 = ArraySeq(1, 2, 3)
     val _aq1: ArraySeq[Int] = read(write(aq1))
     check(aq1, _aq1)
-    
+
     // ArrayStack
     val as1 = new ArrayStack[Int]
     as1 ++= List(20, 2, 3).iterator
@@ -401,12 +402,12 @@ object Test3_mutable {
     val wa1 = WrappedArray.make(Array(1, 2, 3))
     val _wa1: WrappedArray[Int] = read(write(wa1))
     check(wa1, _wa1)
-    
+
     // TreeSet
     val ts1 = TreeSet[Int]() ++= Array(1, 2, 3)
     val _ts1: TreeSet[Int] = read(write(ts1))
     check(ts1, _ts1)
-    
+
     // concurrent.TrieMap
     val ct1 = TrieMap[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
     val _ct1: TrieMap[Int, String] = read(write(ct1))
@@ -419,70 +420,6 @@ object Test3_mutable {
   }
 }
 
-
-//############################################################################
-// Test classes in package "scala.xml"
-
-object Test4_xml {
-  import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
-
-  case class Person(name: String, age: Int)
-
-  try {
-    // Attribute
-    val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
-    val _a1: Attribute = read(write(a1))
-    check(a1, _a1)
-
-    // Document
-    val d1 = new Document
-    d1.docElem = <title></title>
-    d1.encoding = Some("UTF-8")
-    val _d1: Document = read(write(d1))
-    check(d1, _d1)
-
-    // Elem
-    val e1 = <html><title>title</title><body></body></html>;
-    val _e1: Elem = read(write(e1))
-    check(e1, _e1)
-
-    class AddressBook(a: Person*) {
-      private val people: List[Person] = a.toList
-      def toXHTML =
-      <table cellpadding="2" cellspacing="0">
-        <tr>
-          <th>Last Name</th>
-          <th>First Name</th>
-        </tr>
-        { for (p <- people) yield
-        <tr>
-          <td> { p.name } </td>
-          <td> { p.age.toString() } </td>
-        </tr> }
-      </table>;
-    }
-
-    val people = new AddressBook(
-      Person("Tom", 20),
-      Person("Bob", 22),
-      Person("James", 19))
-
-    val e2 =
-      <html>
-      <body>
-        { people.toXHTML }
-      </body>
-      </html>;
-    val _e2: Elem = read(write(e2))
-    check(e2, _e2)
-  }
-  catch {
-    case e: Exception =>
-      println("Error in Test4_xml: " + e)
-      throw e
-  }
-}
-
 //############################################################################
 // Test user-defined classes WITHOUT nesting
 
@@ -568,7 +505,7 @@ class WithTransient extends Serializable {
   @transient private lazy val a2 = 2
   @transient object B extends Serializable
   @transient private object C extends Serializable
-    
+
   def test = {
     println(a1)
     println(a2)
@@ -593,17 +530,18 @@ object Test8 {
 //############################################################################
 // Test code
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
   def main(args: Array[String]) {
     Test1_scala
     Test2_immutable
     Test3_mutable
-    Test4_xml
     Test5
     Test6
     Test7
     Test8
     Test9_parallel
+    Test10_util
   }
 }
 
@@ -614,58 +552,72 @@ object Test {
 // Test classes in package "scala.collection.parallel" and subpackages
 object Test9_parallel {
   import scala.collection.parallel._
-  
+
   try {
     println()
-    
+
     // UnrolledBuffer
     val ub = new collection.mutable.UnrolledBuffer[String]
     ub ++= List("one", "two")
     val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub))
     check(ub, _ub)
-    
+
     // mutable.ParArray
     val pa = mutable.ParArray("abc", "def", "etc")
     val _pa: mutable.ParArray[String] = read(write(pa))
     check(pa, _pa)
-    
+
     // mutable.ParHashMap
     val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4)
     val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
     check(mpm, _mpm)
-    
+
     // mutable.ParTrieMap
     val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4)
     val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc))
     check(mpc, _mpc)
-    
+
     // mutable.ParHashSet
     val mps = mutable.ParHashSet(1, 2, 3)
     val _mps: mutable.ParHashSet[Int] = read(write(mps))
     check(mps, _mps)
-    
+
     // immutable.ParRange
     val pr1 = immutable.ParRange(0, 4, 1, true)
     val _pr1: immutable.ParRange = read(write(pr1))
     check(pr1, _pr1)
-    
+
     val pr2 = immutable.ParRange(0, 4, 1, false)
     val _pr2: immutable.ParRange = read(write(pr2))
     check(pr2, _pr2)
-    
+
     // immutable.ParHashMap
     val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2)
     val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm))
     check(ipm, _ipm)
-    
+
     // immutable.ParHashSet
     val ips = immutable.ParHashSet("one", "two")
     val _ips: immutable.ParHashSet[String] = read(write(ips))
     check(ips, _ips)
-    
+
   } catch {
     case e: Exception =>
       println("Error in Test5_parallel: " + e)
       throw e
   }
 }
+
+//############################################################################
+// Test classes in package scala.util
+
+object Test10_util {
+  import scala.util.Random
+  def rep[A](n: Int)(f: => A) { if (n > 0) { f; rep(n-1)(f) } }
+
+  {
+    val random = new Random(345)
+    val random2: Random = read(write(random))
+    rep(5) { assert(random.nextInt == random2.nextInt) }
+  }
+}
diff --git a/test/files/jvm/si5471.scala b/test/files/jvm/si5471.scala
deleted file mode 100644
index 2c8c420..0000000
--- a/test/files/jvm/si5471.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-
-object Test {
-
-  def main(args: Array[String]) {
-    import scala.math.Numeric
-    import scala.math.Numeric.Implicits._
-    
-    val b = BigInt(Long.MaxValue) + 1
-
-    def dbl[N :Numeric](n: N) = n.toDouble
-    def flt[N :Numeric](n: N) = n.toFloat
-    
-    println(dbl(b) == b.toDouble)
-    println(flt(b) == b.toFloat)
-  }
-
-}
diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala
index 54951d6..1fbf61a 100644
--- a/test/files/jvm/stringbuilder.scala
+++ b/test/files/jvm/stringbuilder.scala
@@ -2,6 +2,7 @@
  *
  *  @author Stephane Micheloud
  */
+import scala.language.{ postfixOps }
 object Test {
   def main(args: Array[String]) {
     Test1.run() //ctor, reverse
diff --git a/test/files/jvm/t0632.check b/test/files/jvm/t0632.check
deleted file mode 100755
index 681bc9d..0000000
--- a/test/files/jvm/t0632.check
+++ /dev/null
@@ -1,12 +0,0 @@
-<foo x="&"/>
-<foo x="&"/>
-<foo x="&"/>
-<foo x="&"/>
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&&"/>
-<foo x="&&"/>
-<foo x="&&"/>
-<foo x="&&"/>
diff --git a/test/files/jvm/t0632.scala b/test/files/jvm/t0632.scala
deleted file mode 100644
index a2bb5aa..0000000
--- a/test/files/jvm/t0632.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-object Test {
-import scala.io.Source.fromString
-import scala.xml.parsing.ConstructingParser.fromSource
-import scala.xml.TopScope
-  def parse(s:String) = fromSource(fromString(s), false).element(TopScope)
-	def main(argv : Array[String]) : Unit = {
-
-                println(parse("<foo x='&'/>"))
-		println(xml.XML.loadString("<foo x='&'/>"))
-		println(<foo x="&"/>)
-		println(<foo x={ "&" }/>)
-
-		println(xml.XML.loadString("<foo x='&amp;'/>"))
-                println(parse("<foo x='&amp;'/>"))
-		println(<foo x="&amp;"/>)
-		println(<foo x={ "&" }/>)
-		println(xml.XML.loadString("<foo x='&&'/>"))
-                println(parse("<foo x='&&'/>"))
-		println(<foo x="&&"/>)
-		println(<foo x={ "&&" }/>)
-	}
-}
diff --git a/test/files/jvm/t1118.check b/test/files/jvm/t1118.check
deleted file mode 100755
index d676b41..0000000
--- a/test/files/jvm/t1118.check
+++ /dev/null
@@ -1,11 +0,0 @@
-
-<hi/> <!-- literal short -->
-<there></there> <!-- literal long -->
-<guys who="you all"></guys> <!-- literal long with attribute-->
-<hows it="going"/> <!-- literal short with attribute -->
-<this>is pretty cool</this> <!-- literal not empty -->
-
-<emptiness></emptiness> <!--programmatic long-->
-<vide/> <!--programmatic short-->
-<elem attr="value"/> <!--programmatic short with attribute-->
-<elem2 attr2="value2"></elem2> <!--programmatic long with attribute-->
diff --git a/test/files/jvm/t1118.scala b/test/files/jvm/t1118.scala
deleted file mode 100755
index 3c86547..0000000
--- a/test/files/jvm/t1118.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.xml._
-
-object Test {
-  def main(args: Array[String]) {
-    println(<xml:group>
-<hi/> <!-- literal short -->
-<there></there> <!-- literal long -->
-<guys who="you all"></guys> <!-- literal long with attribute-->
-<hows it="going"/> <!-- literal short with attribute -->
-<this>is pretty cool</this> <!-- literal not empty -->
-</xml:group>)
-
-    println(Elem(null, "emptiness", Null, TopScope, false) ++ Text(" ") ++ Comment("programmatic long"))
-
-    println(Elem(null, "vide", Null, TopScope, true) ++ Text(" ") ++ Comment("programmatic short"))
-
-    println(Elem(null, "elem", Attribute("attr", Text("value"), Null), TopScope, true) ++ Text(" ") ++ Comment ("programmatic short with attribute"))
-
-    println(Elem(null, "elem2", Attribute("attr2", Text("value2"), Null), TopScope, false) ++ Text(" ") ++ Comment ("programmatic long with attribute"))
-  }
-}
\ No newline at end of file
diff --git a/test/files/jvm/t1143-2/t1143-2.scala b/test/files/jvm/t1143-2/t1143-2.scala
index 44b1feb..13ab13b 100644
--- a/test/files/jvm/t1143-2/t1143-2.scala
+++ b/test/files/jvm/t1143-2/t1143-2.scala
@@ -16,43 +16,39 @@ object Serialize {
   }
 }
 
- at serializable
 @SerialVersionUID(1L)
-class VarModel[T]( getter: => T, setter: T => Unit )
-{
+class VarModel[T](getter: => T, setter: T => Unit) extends Serializable {
   Serialize.write(getter)
   Serialize.write(setter)
 
-  def this( getter: => T ) = this( getter, null )
+  def this(getter: => T) = this(getter, null)
 
   def getObject: AnyRef = getter.asInstanceOf[AnyRef]
 
-  def setObject( v: AnyRef ) = {
-    if( setter==null )
-      throw new RuntimeException( "Tried to set readonly model!")
-    setter( v.asInstanceOf[T] )
+  def setObject(v: AnyRef) = {
+    if(setter==null)
+      throw new RuntimeException("Tried to set readonly model!")
+    setter(v.asInstanceOf[T])
   }
 
   def detach = ()
 }
 
- at serializable
 @SerialVersionUID(1L)
-class Printer( p: VarModel[String] ) {
-  def print = println( p.getObject );
+class Printer(p: VarModel[String]) extends Serializable {
+  def print = println(p.getObject)
 }
 
 class Component extends Marker { }
 
 class Form extends Component { }
 
- at serializable
 @SerialVersionUID(1L)
-class Main {
+class Main extends Serializable {
   var pass = "pass"
-  def main(args : Array[String]) : Unit = {
+  def main(args: Array[String]): Unit = {
     val f = new Form {
-      val p = new Printer( new VarModel( pass, s => pass = s ) );
+      val p = new Printer(new VarModel(pass, s => pass = s))
       p.print
     }
     ()
diff --git a/test/files/jvm/t1143.scala b/test/files/jvm/t1143.scala
index 7dd374f..eb03c72 100644
--- a/test/files/jvm/t1143.scala
+++ b/test/files/jvm/t1143.scala
@@ -16,9 +16,8 @@ object Serialize {
   }
 }
 
- at serializable
 @SerialVersionUID(1L)
-class VarModel[T](getter: => T, setter: T => Unit) {
+class VarModel[T](getter: => T, setter: T => Unit) extends Serializable {
   Serialize.write(getter)
   Serialize.write(setter)
 
@@ -35,23 +34,20 @@ class VarModel[T](getter: => T, setter: T => Unit) {
   def detach = ()
 }
 
- at serializable
 @SerialVersionUID(1L)
-class Printer(p: VarModel[String]) {
+class Printer(p: VarModel[String]) extends Serializable {
   def print = println(p.getObject)
 }
 
- at serializable
 @SerialVersionUID(1L)
-class Component {
+class Component extends Serializable {
 }
 
 class Form extends Component {
 }
 
- at serializable
 @SerialVersionUID(1L)
-class Main {
+class Main extends Serializable {
   var pass = "pass"
   def main(args: Array[String]) {
     val f = new Form {
diff --git a/test/files/jvm/t1342/SI.scala b/test/files/jvm/t1342/SI.scala
index 8e3b753..7c37d4b 100644
--- a/test/files/jvm/t1342/SI.scala
+++ b/test/files/jvm/t1342/SI.scala
@@ -4,7 +4,7 @@ class SI extends JI {
    }
 }
 
-object Test extends Application {
+object Test extends App {
   val x: JI = new SI
   x.varArgsMethod("one", "two")
 }
diff --git a/test/files/jvm/t1449.scala b/test/files/jvm/t1449.scala
index 3822cf7..7917d6f 100644
--- a/test/files/jvm/t1449.scala
+++ b/test/files/jvm/t1449.scala
@@ -1,7 +1,10 @@
-import scala.actors.Actor._
-import scala.actors.Future
-import scala.actors.Futures._
+
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Actor._
+  import scala.actors.Future
+  import scala.actors.Futures._
   def main(args: Array[String]) {
     val a = actor {
       try {
diff --git a/test/files/jvm/t1461.scala b/test/files/jvm/t1461.scala
index a963ec6..f0e3cea 100644
--- a/test/files/jvm/t1461.scala
+++ b/test/files/jvm/t1461.scala
@@ -3,7 +3,7 @@ object Test {
   def main(args: Array[String]) {
     val jl = classOf[Foo].getMethod("jl", classOf[Baz[_]])
     jl.getGenericParameterTypes // works fine
-    
+
     val l = classOf[Foo].getMethod("l", classOf[Baz[_]])
     // By debugger inspection l.signature is (Ltest/Baz<J>;)V
     l.getGenericParameterTypes // throws GenericSignatureFormatError
diff --git a/test/files/jvm/t1464/MyTrait.scala b/test/files/jvm/t1464/MyTrait.scala
index 014ddf8..0b8ccc4 100644
--- a/test/files/jvm/t1464/MyTrait.scala
+++ b/test/files/jvm/t1464/MyTrait.scala
@@ -1,5 +1,5 @@
 trait MyTrait {
   type K
   def findChildByClass[T <: K with MyTrait]: Unit
-  
+
 }
diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala
index 7e23687..5a1b290 100644
--- a/test/files/jvm/t1600.scala
+++ b/test/files/jvm/t1600.scala
@@ -4,23 +4,23 @@
  * changes on deserialization.
  */
 object Test {
-  
+
   import collection._
   def main(args: Array[String]) {
     for (i <- Seq(0, 1, 2, 10, 100)) {
       def entries = (0 until i).map(i => (new Foo, i)).toList
       def elements = entries.map(_._1)
-      
+
       val maps = Seq[Map[Foo, Int]](new mutable.HashMap, new mutable.LinkedHashMap,
           immutable.HashMap.empty).map(_ ++ entries)
       test[Map[Foo, Int]](maps, entries.size, assertMap _)
-      
+
       val sets = Seq[Set[Foo]](new mutable.HashSet, new mutable.LinkedHashSet,
           immutable.HashSet.empty).map(_ ++ elements)
       test[Set[Foo]](sets, entries.size, assertSet _)
     }
   }
-  
+
   private def test[A <: AnyRef](collections: Seq[A], expectedSize: Int, assertFunction: (A, Int) => Unit) {
     for (collection <- collections) {
       assertFunction(collection, expectedSize)
@@ -28,19 +28,19 @@ object Test {
       val bytes = toBytes(collection)
       Foo.hashCodeModifier = 1
       val deserializedCollection = toObject[A](bytes)
-      
+
       assertFunction(deserializedCollection, expectedSize)
       assert(deserializedCollection.getClass == collection.getClass,
           "collection class should remain the same after deserialization ("+deserializedCollection.getClass+" != "+collection.getClass+")")
       Foo.hashCodeModifier = 0
     }
   }
-  
+
   private def toObject[A](bytes: Array[Byte]): A = {
     val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(bytes))
     in.readObject.asInstanceOf[A]
   }
-  
+
   private def toBytes(o: AnyRef): Array[Byte] = {
     val bos = new java.io.ByteArrayOutputStream
     val out = new java.io.ObjectOutputStream(bos)
@@ -48,7 +48,7 @@ object Test {
     out.close
     bos.toByteArray
   }
-  
+
   private def assertMap[A, B](map: Map[A, B], expectedSize: Int) {
     assert(expectedSize == map.size, "expected map size: " + expectedSize + ", actual size: " + map.size)
     map.foreach { case (k, v) =>
@@ -56,21 +56,20 @@ object Test {
       assert(map(k) == v)
     }
   }
-  
+
   private def assertSet[A](set: Set[A], expectedSize: Int) {
     assert(expectedSize == set.size, "expected set size: " + expectedSize + ", actual size: " + set.size)
     set.foreach { e => assert(set.contains(e), "contains should return true for element in the set, element: " + e) }
   }
-  
+
   object Foo {
-    /* Used to simulate a hashCode change caused by deserializing an instance with an 
+    /* Used to simulate a hashCode change caused by deserializing an instance with an
      * identity-based hashCode in another JVM.
      */
     var hashCodeModifier = 0
   }
-  
-  @serializable
-  class Foo {
+
+  class Foo extends Serializable {
     override def hashCode = System.identityHashCode(this) + Foo.hashCodeModifier
   }
 }
diff --git a/test/files/jvm/t1948.scala b/test/files/jvm/t1948.scala
index 084c956..95777b8 100644
--- a/test/files/jvm/t1948.scala
+++ b/test/files/jvm/t1948.scala
@@ -1,7 +1,9 @@
-import scala.actors._
-import scala.actors.Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors._
+  import scala.actors.Actor._
 
   def main (args: Array[String]) {
     val actors = (1 to 1000).toList map { x => actor {
diff --git a/test/files/jvm/t2163/t2163.java b/test/files/jvm/t2163/t2163.java
new file mode 100644
index 0000000..83bd37d
--- /dev/null
+++ b/test/files/jvm/t2163/t2163.java
@@ -0,0 +1,9 @@
+import java.util.*;
+
+public class t2163 {
+    public void test() {
+      List<Integer> array = new ArrayList<Integer>();
+      T2163Scala<List> foo = new T2163Scala<List>(array);
+      foo.bar(array);
+    }
+}
diff --git a/test/files/jvm/t2163/t2163.scala b/test/files/jvm/t2163/t2163.scala
new file mode 100644
index 0000000..fdf19c4
--- /dev/null
+++ b/test/files/jvm/t2163/t2163.scala
@@ -0,0 +1,10 @@
+
+import scala.language.{ higherKinds }
+
+class T2163Scala[CC[X]](x: CC[Int]) {
+  def bar[DD[X]](meh: DD[Int]): CC[Int] = x
+}
+
+object Test extends App {
+  new t2163().test()
+}
diff --git a/test/files/jvm/t2359.scala b/test/files/jvm/t2359.scala
index 69c69d7..76b78d4 100644
--- a/test/files/jvm/t2359.scala
+++ b/test/files/jvm/t2359.scala
@@ -1,6 +1,8 @@
-import scala.actors.Futures._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Futures._
   def main(args: Array[String]) {
     val x = future {
       try {
diff --git a/test/files/jvm/t2470.cmds b/test/files/jvm/t2470.cmds
deleted file mode 100644
index b4ef0f4..0000000
--- a/test/files/jvm/t2470.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Action.java Task.java
-scalac Test_1.scala
-scalac Read_Classfile_2.scala
diff --git a/test/files/jvm/t2530.scala b/test/files/jvm/t2530.scala
index c2925a9..b41661e 100644
--- a/test/files/jvm/t2530.scala
+++ b/test/files/jvm/t2530.scala
@@ -1,6 +1,8 @@
-import scala.actors.{Future, Futures}
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.{Future, Futures}
 
   def main(args:Array[String]) : Unit = {
     //scala.actors.Debug.level = 3
@@ -29,7 +31,6 @@ object Test {
     }
     println("Test done with no deadlock. Try again, it will not occur...")
   }
-}
 
 case class Matrix(numRows: Int, numCols: Int, values: Array[Double])  {
 
@@ -94,3 +95,4 @@ case class Matrix(numRows: Int, numCols: Int, values: Array[Double])  {
   }
 
 }
+}
diff --git a/test/files/jvm/t2570/Test.scala b/test/files/jvm/t2570/Test.scala
index 7944aed..ad4d29d 100644
--- a/test/files/jvm/t2570/Test.scala
+++ b/test/files/jvm/t2570/Test.scala
@@ -1,3 +1,3 @@
 class Test2 extends Test1[Test3[Test4]]
 class Test4
-object Test extends Application {} 
\ No newline at end of file
+object Test extends App {}
\ No newline at end of file
diff --git a/test/files/jvm/t3003.cmds b/test/files/jvm/t3003.cmds
deleted file mode 100644
index c003966..0000000
--- a/test/files/jvm/t3003.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Annot.java
-scalac Test_1.scala
diff --git a/test/files/jvm/t3003/Test_1.scala b/test/files/jvm/t3003/Test_1.scala
index 38af66a..8ec08be 100644
--- a/test/files/jvm/t3003/Test_1.scala
+++ b/test/files/jvm/t3003/Test_1.scala
@@ -9,7 +9,7 @@ object Test {
         . map(f => f.getAnnotations.toList)
         . filterNot (_.isEmpty) // there are extra fields under -Xcheckinit
     )
-    
+
     println(xs)
   }
 }
diff --git a/test/files/jvm/t3102.scala b/test/files/jvm/t3102.scala
index fbcf2e6..d0e0704 100644
--- a/test/files/jvm/t3102.scala
+++ b/test/files/jvm/t3102.scala
@@ -1,7 +1,10 @@
-import scala.actors.{Actor, TIMEOUT}
-import Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.{Actor, TIMEOUT}
+  import Actor._
+
   def main(args: Array[String]) {
     val a = actor {
       try {
diff --git a/test/files/jvm/t3356.scala b/test/files/jvm/t3356.scala
index a9e83fe..53bfd73 100644
--- a/test/files/jvm/t3356.scala
+++ b/test/files/jvm/t3356.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{Actor, Exit, !, UncaughtException}
 import Actor._
 
@@ -10,7 +14,6 @@ case class ImageInfo(text: String) {
 case class ImageData(text: String)
 case class Download(info: ImageInfo)
 
-object Test {
 
   def scanForImageInfo(url: String): List[ImageInfo] =
     List(ImageInfo("A"), ImageInfo("B"))
diff --git a/test/files/jvm/t3365.scala b/test/files/jvm/t3365.scala
index b94e804..8321428 100644
--- a/test/files/jvm/t3365.scala
+++ b/test/files/jvm/t3365.scala
@@ -1,3 +1,7 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
+object Test {
 import scala.actors.{ReplyReactor, Channel, Actor, Future}
 
 case class ChannelMsg(chan: Channel[Any])
@@ -27,7 +31,6 @@ class MyActor extends Actor {
   }
 }
 
-object Test {
   def main(args: Array[String]) {
     val a = new MyActor
     a.start()
diff --git a/test/files/jvm/t3407.scala b/test/files/jvm/t3407.scala
index 6c2ce85..757fa3a 100644
--- a/test/files/jvm/t3407.scala
+++ b/test/files/jvm/t3407.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors._, scala.actors.Actor._
 
   def main(args: Array[String]) {
     for (i <- 1 to 10) {
diff --git a/test/files/jvm/t3412-channel.scala b/test/files/jvm/t3412-channel.scala
index fcc439b..af319d2 100644
--- a/test/files/jvm/t3412-channel.scala
+++ b/test/files/jvm/t3412-channel.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
 
   def main(args: Array[String]) {
 
diff --git a/test/files/jvm/t3412.scala b/test/files/jvm/t3412.scala
index ced15ab..fde6c04 100644
--- a/test/files/jvm/t3412.scala
+++ b/test/files/jvm/t3412.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
 
   def main(args: Array[String]) {
 
diff --git a/test/files/jvm/t3415/HelloWorld.scala b/test/files/jvm/t3415/HelloWorld.scala
index 53bf55e..5ef0123 100644
--- a/test/files/jvm/t3415/HelloWorld.scala
+++ b/test/files/jvm/t3415/HelloWorld.scala
@@ -1,4 +1,4 @@
-object Test extends Application {
+object Test extends App {
   @Hello
   def foo() { }
 }
diff --git a/test/files/jvm/t3470.scala b/test/files/jvm/t3470.scala
index 5e4242c..bcb1d4f 100644
--- a/test/files/jvm/t3470.scala
+++ b/test/files/jvm/t3470.scala
@@ -1,6 +1,8 @@
-import scala.actors._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors._
 
   def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
     var done = false
diff --git a/test/files/jvm/t3838.scala b/test/files/jvm/t3838.scala
index ba8f15f..a1a71d1 100644
--- a/test/files/jvm/t3838.scala
+++ b/test/files/jvm/t3838.scala
@@ -1,6 +1,8 @@
-import scala.actors.Actor._
 
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
+  import scala.actors.Actor._
   def main(args: Array[String]) {
     actor {
       try {
diff --git a/test/files/jvm/ticket4283/AbstractFoo.java b/test/files/jvm/t4283/AbstractFoo.java
similarity index 100%
rename from test/files/jvm/ticket4283/AbstractFoo.java
rename to test/files/jvm/t4283/AbstractFoo.java
diff --git a/test/files/jvm/ticket4283/ScalaBipp.scala b/test/files/jvm/t4283/ScalaBipp.scala
similarity index 100%
rename from test/files/jvm/ticket4283/ScalaBipp.scala
rename to test/files/jvm/t4283/ScalaBipp.scala
diff --git a/test/files/jvm/ticket4283/Test.scala b/test/files/jvm/t4283/Test.scala
similarity index 100%
rename from test/files/jvm/ticket4283/Test.scala
rename to test/files/jvm/t4283/Test.scala
diff --git a/test/files/jvm/si5471.check b/test/files/jvm/t5471.check
similarity index 100%
rename from test/files/jvm/si5471.check
rename to test/files/jvm/t5471.check
diff --git a/test/files/jvm/t5471.scala b/test/files/jvm/t5471.scala
new file mode 100644
index 0000000..2efd869
--- /dev/null
+++ b/test/files/jvm/t5471.scala
@@ -0,0 +1,17 @@
+
+object Test {
+
+  def main(args: Array[String]) {
+    import scala.math.Numeric
+    import scala.math.Numeric.Implicits._
+
+    val b = BigInt(Long.MaxValue) + 1
+
+    def dbl[N :Numeric](n: N) = n.toDouble
+    def flt[N :Numeric](n: N) = n.toFloat
+
+    println(dbl(b) == b.toDouble)
+    println(flt(b) == b.toFloat)
+  }
+
+}
diff --git a/test/files/jvm/t560bis.check b/test/files/jvm/t560bis.check
deleted file mode 100644
index 91eb4c1..0000000
--- a/test/files/jvm/t560bis.check
+++ /dev/null
@@ -1,2 +0,0 @@
-cool!
-cool!
diff --git a/test/files/jvm/t560bis.scala b/test/files/jvm/t560bis.scala
deleted file mode 100644
index b04303c..0000000
--- a/test/files/jvm/t560bis.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-object Test {
-import scala.xml._;
-
-  def bar(args: Seq[String]) = args match {
-    case Seq(a,b,c,d @ _*) => Console.println("cool!")
-    case _ => Console.println("bah")
-  }
-  def foo(args: List[String]) = 
-    Elem(null,"bla",Null, TopScope, (args map {x => Text(x)}):_*) match {
-      case Elem(_,_,_,_,Text("1"),_*) =>
-        Console.println("cool!")
-      case _ =>
-        Console.println("bah")
-    }
-  
-  def main(args: Array[String]) = {
-    val li = List("1","2","3","4")
-    bar(li)
-    foo(li)
-  }
-}
diff --git a/test/files/jvm/t6941/Analyzed_1.flags b/test/files/jvm/t6941/Analyzed_1.flags
new file mode 100644
index 0000000..ad51758
--- /dev/null
+++ b/test/files/jvm/t6941/Analyzed_1.flags
@@ -0,0 +1 @@
+-nowarn
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
index 549abd5..b6951f7 100644
--- a/test/files/jvm/t6941/Analyzed_1.scala
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -6,6 +6,6 @@ class SameBytecode {
   }
 
   def b(xs: List[Int]) = xs match {
-    case xs: ::[Int] => xs.hd$1
+    case xs: ::[Int] => xs.head
   }
 }
\ No newline at end of file
diff --git a/test/files/jvm/t7006.check b/test/files/jvm/t7006.check
new file mode 100644
index 0000000..6294b14
--- /dev/null
+++ b/test/files/jvm/t7006.check
@@ -0,0 +1,29 @@
+[running phase parser on Foo_1.scala]
+[running phase namer on Foo_1.scala]
+[running phase packageobjects on Foo_1.scala]
+[running phase typer on Foo_1.scala]
+[running phase patmat on Foo_1.scala]
+[running phase superaccessors on Foo_1.scala]
+[running phase extmethods on Foo_1.scala]
+[running phase pickler on Foo_1.scala]
+[running phase refchecks on Foo_1.scala]
+[running phase uncurry on Foo_1.scala]
+[running phase tailcalls on Foo_1.scala]
+[running phase specialize on Foo_1.scala]
+[running phase explicitouter on Foo_1.scala]
+[running phase erasure on Foo_1.scala]
+[running phase posterasure on Foo_1.scala]
+[running phase lazyvals on Foo_1.scala]
+[running phase lambdalift on Foo_1.scala]
+[running phase constructors on Foo_1.scala]
+[running phase flatten on Foo_1.scala]
+[running phase mixin on Foo_1.scala]
+[running phase cleanup on Foo_1.scala]
+[running phase delambdafy on Foo_1.scala]
+[running phase icode on Foo_1.scala]
+[running phase inliner on Foo_1.scala]
+[running phase inlinehandlers on Foo_1.scala]
+[running phase closelim on Foo_1.scala]
+[running phase constopt on Foo_1.scala]
+[running phase dce on Foo_1.scala]
+[running phase jvm on icode]
diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags
new file mode 100644
index 0000000..37b2116
--- /dev/null
+++ b/test/files/jvm/t7006/Foo_1.flags
@@ -0,0 +1 @@
+-optimise -Ydebug -Xfatal-warnings
diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala
new file mode 100644
index 0000000..995619c
--- /dev/null
+++ b/test/files/jvm/t7006/Foo_1.scala
@@ -0,0 +1,10 @@
+class Foo_1 {
+  def foo {
+    try {
+      val x = 3 // this will be optimized away, leaving a useless jump only block
+    } finally {
+      print("hello")
+    }
+    while(true){} // ensure infinite loop doesn't break the algoirthm
+  }
+}
diff --git a/test/files/jvm/t7006/Test.scala b/test/files/jvm/t7006/Test.scala
new file mode 100644
index 0000000..065a235
--- /dev/null
+++ b/test/files/jvm/t7006/Test.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("Foo_1")
+    val methodNode = getMethod(classNode, "foo")
+    assert(count(methodNode.instructions, asm.Opcodes.NOP) == 0)
+    assert(count(methodNode.instructions, asm.Opcodes.GOTO) == 1)
+  }
+
+  def count(insnList: InsnList, opcode: Int): Int = {
+    def isNop(node: asm.tree.AbstractInsnNode): Boolean =
+      (node.getOpcode == opcode)
+    insnList.iterator.asScala.count(isNop)
+  }
+}
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
index 2bd03d6..aaa3dc7 100644
--- a/test/files/jvm/t7146.scala
+++ b/test/files/jvm/t7146.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
 import java.util.concurrent.Executor
 import scala.concurrent._
 import scala.util.control.NoStackTrace
diff --git a/test/files/jvm/t7181/Foo_1.scala b/test/files/jvm/t7181/Foo_1.scala
new file mode 100644
index 0000000..f9dfdd4
--- /dev/null
+++ b/test/files/jvm/t7181/Foo_1.scala
@@ -0,0 +1,26 @@
+class Exception1 extends RuntimeException
+class Exception2 extends RuntimeException
+
+class Foo_1 {
+  def foo(baz: Baz) {
+    try {
+      baz.bar
+    } catch {
+      case _: Exception1 => println("exception 1")
+      case _: Exception2 => println("exception 2")
+    } finally {
+      // this should be the only copy of the magic constant 3
+      // making it easy to detect copies of this finally block
+      println(s"finally ${3}")
+    }
+    println(s"normal flow")
+  }
+}
+
+trait Baz {
+  // does it throw? who knows? This way
+  // I can ensure that no optimization that honors
+  // separate compilation could ever
+  // change the exception handling structure
+  def bar: Unit
+}
diff --git a/test/files/jvm/t7181/Test.scala b/test/files/jvm/t7181/Test.scala
new file mode 100644
index 0000000..35dba43
--- /dev/null
+++ b/test/files/jvm/t7181/Test.scala
@@ -0,0 +1,24 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("Foo_1")
+    val methodNode = getMethod(classNode, "foo")
+    // there should be 2 copies of the finally block, each with the magic constant 3
+    // one for the "normal" exit
+    // one for the uncaught exception exit
+    // prior to this PR there would have been 4 since each exception handler would also get a copy
+    val expected = 2
+    val got = countMagicThrees(methodNode.instructions)
+    assert(got == expected, s"expected $expected but got $got magic threes")
+  }
+
+  def countMagicThrees(insnList: InsnList): Int = {
+    def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean =
+      (node.getOpcode == asm.Opcodes.ICONST_3)
+    insnList.iterator.asScala.count(isMagicThree)
+  }
+}
diff --git a/test/files/jvm/throws-annot.scala b/test/files/jvm/throws-annot.scala
index b679b6c..90b58b9 100644
--- a/test/files/jvm/throws-annot.scala
+++ b/test/files/jvm/throws-annot.scala
@@ -43,24 +43,24 @@ object TestThrows {
 
 /** Test the top-level mirror that is has the annotations. */
 object TL {
-  
+
   @throws(classOf[IOException])
   def read(): Int = 0
-  
+
   @throws(classOf[ClassCastException])
   @throws(classOf[IOException])
   def readWith2(): Int = 0
-  
+
   @throws(classOf[IOException])
   @Deprecated
   @throws(classOf[NullPointerException])
   def readMixed(): Int = 0
-  
+
   @Deprecated
   @throws(classOf[IOException])
   @throws(classOf[NullPointerException])
   def readMixed2(): Int = 0
-  
+
   @Deprecated
   def readNoEx(): Int = 0
 }
diff --git a/test/files/jvm/ticket2163/ticket2163.java b/test/files/jvm/ticket2163/ticket2163.java
deleted file mode 100644
index b6511d2..0000000
--- a/test/files/jvm/ticket2163/ticket2163.java
+++ /dev/null
@@ -1,9 +0,0 @@
-import java.util.*;
-
-public class ticket2163 {
-    public void test() {
-      List<Integer> array = new ArrayList<Integer>();
-      Ticket2163Scala<List> foo = new Ticket2163Scala<List>(array);
-      foo.bar(array);
-    }
-}
diff --git a/test/files/jvm/ticket2163/ticket2163.scala b/test/files/jvm/ticket2163/ticket2163.scala
deleted file mode 100644
index d30bfe2..0000000
--- a/test/files/jvm/ticket2163/ticket2163.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class Ticket2163Scala[CC[X]](x: CC[Int]) {
-  def bar[DD[X]](meh: DD[Int]): CC[Int] = x
-}
-
-object Test extends Application {}
diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala
index 17811f6..962afbd 100644
--- a/test/files/jvm/try-type-tests.scala
+++ b/test/files/jvm/try-type-tests.scala
@@ -3,142 +3,143 @@ import scala.util.{Try, Success, Failure}
 // tests the basic combinators on Try
 trait TryStandard {
 
-	def testForeachSuccess(): Unit = {
-		val t = Success(1)
-		var res = 0
-		t.foreach(x => res = x * 10)
-		assert(res == 10)
-	}
-
-	def testForeachFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		t.foreach(x => assert(false))
-	}
-
-	def testFlatMapSuccess(): Unit = {
-		val t = Success(1)
-		val n = t.flatMap(x => Try(x * 10))
-		assert(n.get == 10)
-	}
-
-	def testFlatMapFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.flatMap{ x => assert(false); Try() }
-	}
-
-	def testMapSuccess(): Unit = {
-		val t = Success(1)
-		val n = t.map(x => x * 10)
-		assert(n.get == 10)
-	}
-
-	def testMapFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.map(x => assert(false))
-	}
-
-	def testFilterSuccessTrue(): Unit = {
-		val t = Success(1)
-		val n = t.filter(x => x > 0)
-		assert(n.get == 1)
-	}
-
-	def testFilterSuccessFalse(): Unit = {
-		val t = Success(1)
-		val n = t.filter(x => x < 0)
-		n match {
-			case Success(v) => assert(false)
-			case Failure(e: NoSuchElementException) => assert(true)
-		}
-	}
-
-	def testFilterFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.filter{ x => assert(false); true }
-	}
-
-	def testRescueSuccess(): Unit = {
-		val t = Success(1)
-		t.recoverWith{ case x => assert(false); Try() }
-	}
-
-	def testRescueFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.recoverWith{ case x => Try(1) }
-		assert(n.get == 1)
-	}
-
-	def testRecoverSuccess(): Unit = {
-		val t = Success(1)
-		t.recover{ case x => assert(false); 99 }
-	}
-
-	def testRecoverFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.recover{ case x => 1 }
-		assert(n.get == 1)
-	}
-
-	def testFlattenSuccess(): Unit = {
-		val f = Failure(new Exception("foo"))
-		val t = Success(f)
-		assert(t.flatten == f)
-	}
-
-	def testFailedSuccess(): Unit = {
-		val t = Success(1)
-		val n = t.failed
-		n match {
-			case Failure(e: UnsupportedOperationException) => assert(true)
-			case _ => assert(false)
-		}
-	}
-
-	def testFailedFailure(): Unit = {
-		val t = Failure(new Exception("foo"))
-		val n = t.failed
-		n match {
-			case Success(e: Exception) => assert(true)
-			case _ => assert(false)
-		}
-	}
-
-	def testSuccessTransform(): Unit = {
-		val s = Success(1)
-		val succ = (x: Int) => Success(x * 10)
-		val fail = (x: Throwable) => Success(0)
-		assert(s.transform(succ, fail).get == 10)
-	}
-
-	def testFailureTransform(): Unit = {
-		val f = Failure(new Exception("foo"))
-		val succ = (x: Int) => Success(x * 10)
-		val fail = (x: Throwable) => Success(0)
-		assert(f.transform(succ, fail).get == 0)
-	}
-
-	testForeachSuccess()
-	testForeachFailure()
-	testFlatMapSuccess()
-	testFlatMapFailure()
-	testMapSuccess()
-	testMapFailure()
-	testFilterSuccessTrue()
-	testFilterSuccessFalse()
-	testFilterFailure()
-	testRescueSuccess()
-	testRescueFailure()
-	testRecoverSuccess()
-	testRecoverFailure()
-	testFlattenSuccess()
-	testFailedSuccess()
-	testFailedFailure()
-	testSuccessTransform()
-	testFailureTransform()
+  def testForeachSuccess(): Unit = {
+    val t = Success(1)
+    var res = 0
+    t.foreach(x => res = x * 10)
+    assert(res == 10)
+  }
+
+  def testForeachFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    t.foreach(x => assert(false))
+  }
+
+  def testFlatMapSuccess(): Unit = {
+    val t = Success(1)
+    val n = t.flatMap(x => Try(x * 10))
+    assert(n.get == 10)
+  }
+
+  def testFlatMapFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.flatMap{ x => assert(false); Try(()) }
+  }
+
+  def testMapSuccess(): Unit = {
+    val t = Success(1)
+    val n = t.map(x => x * 10)
+    assert(n.get == 10)
+  }
+
+  def testMapFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.map(x => assert(false))
+  }
+
+  def testFilterSuccessTrue(): Unit = {
+    val t = Success(1)
+    val n = t.filter(x => x > 0)
+    assert(n.get == 1)
+  }
+
+  def testFilterSuccessFalse(): Unit = {
+    val t = Success(1)
+    val n = t.filter(x => x < 0)
+    n match {
+      case Success(v) => assert(false)
+      case Failure(e: NoSuchElementException) => assert(true)
+      case _          => assert(false)
+    }
+  }
+
+  def testFilterFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.filter{ x => assert(false); true }
+  }
+
+  def testRescueSuccess(): Unit = {
+    val t = Success(1)
+    t.recoverWith{ case x => assert(false); Try(()) }
+  }
+
+  def testRescueFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.recoverWith{ case x => Try(1) }
+    assert(n.get == 1)
+  }
+
+  def testRecoverSuccess(): Unit = {
+    val t = Success(1)
+    t.recover{ case x => assert(false); 99 }
+  }
+
+  def testRecoverFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.recover{ case x => 1 }
+    assert(n.get == 1)
+  }
+
+  def testFlattenSuccess(): Unit = {
+    val f = Failure(new Exception("foo"))
+    val t = Success(f)
+    assert(t.flatten == f)
+  }
+
+  def testFailedSuccess(): Unit = {
+    val t = Success(1)
+    val n = t.failed
+    n match {
+      case Failure(e: UnsupportedOperationException) => assert(true)
+      case _ => assert(false)
+    }
+  }
+
+  def testFailedFailure(): Unit = {
+    val t = Failure(new Exception("foo"))
+    val n = t.failed
+    n match {
+      case Success(e: Exception) => assert(true)
+      case _ => assert(false)
+    }
+  }
+
+  def testSuccessTransform(): Unit = {
+    val s = Success(1)
+    val succ = (x: Int) => Success(x * 10)
+    val fail = (x: Throwable) => Success(0)
+    assert(s.transform(succ, fail).get == 10)
+  }
+
+  def testFailureTransform(): Unit = {
+    val f = Failure(new Exception("foo"))
+    val succ = (x: Int) => Success(x * 10)
+    val fail = (x: Throwable) => Success(0)
+    assert(f.transform(succ, fail).get == 0)
+  }
+
+  testForeachSuccess()
+  testForeachFailure()
+  testFlatMapSuccess()
+  testFlatMapFailure()
+  testMapSuccess()
+  testMapFailure()
+  testFilterSuccessTrue()
+  testFilterSuccessFalse()
+  testFilterFailure()
+  testRescueSuccess()
+  testRescueFailure()
+  testRecoverSuccess()
+  testRecoverFailure()
+  testFlattenSuccess()
+  testFailedSuccess()
+  testFailedFailure()
+  testSuccessTransform()
+  testFailureTransform()
 }
 
 object Test
 extends App
 with TryStandard {
   System.exit(0)
-}
\ No newline at end of file
+}
diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala
index 3befc7f..4f900d9 100644
--- a/test/files/jvm/typerep.scala
+++ b/test/files/jvm/typerep.scala
@@ -86,7 +86,7 @@ object testArrays {
 
 object testTuples {
   println(getType((3, "abc")))
-  println(getType(Triple('a', 'b', "c")))
+  println(getType(('a', 'b', "c")))
   println(getType(((3, "abc"), (4, "xyz"))))
   println(getType(((Some('b'), 3), (Some('a'), 4))))
   //println(getType(((Some('b'), 3), (None, 4))))
@@ -109,7 +109,7 @@ object testFuncs {
   def f5(f: Int => Int, x: Int) = f(x)
   println(getType(f5 _))
   println(getType(f5(f1, 1)))
-  println  
+  println
 }
 
 class Foo {
@@ -280,100 +280,74 @@ object TypeRep {
     override def toString = "Nothing"
   }
 
-  @serializable
   case class ClassRep[A](elemRep: TypeRep[A]) extends TypeRep[Class[A]] {
     override def toString = "Class[" + elemRep + "]"
   }
-  @serializable
   case class SomeRep[A](elemRep: TypeRep[A]) extends TypeRep[Some[A]] {
     override def toString = "Some[" + elemRep + "]"
   }
-  @serializable
   case class NoneRep[A](elemRep: TypeRep[A]) extends TypeRep[Option[A]] {
     override def toString = "None[" + elemRep + "]"
   }
-
-  @serializable
   case class ListRep[A](elemRep: TypeRep[A]) extends TypeRep[List[A]] {
     override def toString = "List[" + elemRep + "]"
   }
-
-  @serializable
   case class ArrayRep[A](elemRep: TypeRep[A]) extends TypeRep[Array[A]] {
     override def toString = "Array[" + elemRep + "]"
   }
-
-  @serializable
   case class Tuple2Rep[A1, A2](_1: TypeRep[A1], _2: TypeRep[A2]) extends TypeRep[(A1, A2)] {
     override def toString = "Tuple2[" + _1 + ", " + _2 + "]"
   }
-  @serializable
   case class Tuple3Rep[A1, A2, A3](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3]) extends TypeRep[Tuple3[A1, A2, A3]] {
     override def toString = "Tuple3[" + _1 + ", " + _2 + ", " + _3 + "]"
   }
-  @serializable
   case class Tuple4Rep[A1, A2, A3, A4](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4]) extends TypeRep[Tuple4[A1, A2, A3, A4]] {
     override def toString = "Tuple4[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + "]"
   }
-  @serializable
   case class Tuple5Rep[A1, A2, A3, A4, A5](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4], _5: TypeRep[A5]) extends TypeRep[Tuple5[A1, A2, A3, A4, A5]] {
     override def toString = "Tuple5[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + "]"
   }
-  @serializable
   case class Tuple6Rep[A1, A2, A3, A4, A5, A6](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6]) extends TypeRep[Tuple6[A1, A2, A3, A4, A5, A6]] {
     override def toString = "Tuple6[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + "]"
   }
-  @serializable
   case class Tuple7Rep[A1, A2, A3, A4, A5, A6, A7](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7]) extends TypeRep[Tuple7[A1, A2, A3, A4, A5, A6, A7]] {
     override def toString = "Tuple7[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + "]"
   }
-  @serializable
   case class Tuple8Rep[A1, A2, A3, A4, A5, A6, A7, A8](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8]) extends TypeRep[Tuple8[A1, A2, A3, A4, A5, A6, A7, A8]] {
     override def toString = "Tuple8[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + "]"
   }
-  @serializable
   case class Tuple9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8], val _9: TypeRep[A9]) extends TypeRep[Tuple9[A1, A2, A3, A4, A5, A6, A7, A8, A9]] {
     override def toString = "Tuple9[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + ", " + _9 + "]"
   }
 
-  @serializable
   case class Function1Rep[A1, B](a1: TypeRep[A1], b: TypeRep[B]) extends TypeRep[Function1[A1, B]] {
     override def toString = "Function1[" + a1 + ", " + b + "]"
   }
-  @serializable
   case class Function2Rep[A1, A2, B](a1: TypeRep[A1], a2: TypeRep[A2], b: TypeRep[B]) extends TypeRep[Function2[A1, A2, B]] {
     override def toString = "Function2[" + a1 + ", " + a2 + ", " + b + "]"
   }
-  @serializable
   case class Function3Rep[A1, A2, A3, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], b: TypeRep[B]) extends TypeRep[Function3[A1, A2, A3, B]] {
     override def toString = "Function3[" + a1 + ", " + a2 + ", " + a3 + ", " + b + "]"
   }
-  @serializable
   case class Function4Rep[A1, A2, A3, A4, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], b: TypeRep[B]) extends TypeRep[Function4[A1, A2, A3, A4, B]] {
     override def toString = "Function4[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + b + "]"
   }
-  @serializable
   case class Function5Rep[A1, A2, A3, A4, A5, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], b: TypeRep[B]) extends TypeRep[Function5[A1, A2, A3, A4, A5, B]] {
     override def toString = "Function5[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + b + "]"
   }
-  @serializable
   case class Function6Rep[A1, A2, A3, A4, A5, A6, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], b: TypeRep[B]) extends TypeRep[Function6[A1, A2, A3, A4, A5, A6, B]] {
     override def toString = "Function6[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + b + "]"
   }
-  @serializable
   case class Function7Rep[A1, A2, A3, A4, A5, A6, A7, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], b: TypeRep[B]) extends TypeRep[Function7[A1, A2, A3, A4, A5, A6, A7, B]] {
     override def toString = "Function7[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + b + "]"
   }
-  @serializable
   case class Function8Rep[A1, A2, A3, A4, A5, A6, A7, A8, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], b: TypeRep[B]) extends TypeRep[Function8[A1, A2, A3, A4, A5, A6, A7, A8, B]] {
     override def toString = "Function8[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + b + "]"
   }
-  @serializable
   case class Function9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], a9: TypeRep[A9], b: TypeRep[B]) extends TypeRep[Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, B]] {
     override def toString = "Function9[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + ", " + b + "]"
   }
 /*
-  @serializable
   case class ObjectRep[A](c: Class) extends TypeRep[A] {
     override def toString = c.getName
   }
diff --git a/test/files/jvm/unittest_io_Jvm.check b/test/files/jvm/unittest_io_Jvm.check
index d6e855f..bcfce8c 100644
--- a/test/files/jvm/unittest_io_Jvm.check
+++ b/test/files/jvm/unittest_io_Jvm.check
@@ -1,5 +1,5 @@
 lines.size = 5
- 
+
 This is a file
 it is split on several lines.
 
diff --git a/test/files/jvm/unittest_io_Jvm.scala b/test/files/jvm/unittest_io_Jvm.scala
index 1484774..7c8ef13 100644
--- a/test/files/jvm/unittest_io_Jvm.scala
+++ b/test/files/jvm/unittest_io_Jvm.scala
@@ -3,7 +3,7 @@ import scala.io.Source
 object Test {
   def main(args: Array[String]) {
     val lines = Source.fromString(
-      """| 
+      """|
          |This is a file
          |it is split on several lines.
          |
diff --git a/test/files/jvm/unittest_xml.scala b/test/files/jvm/unittest_xml.scala
deleted file mode 100644
index 106334e..0000000
--- a/test/files/jvm/unittest_xml.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-import scala.xml.{ MetaData, Null, Utility, PrefixedAttribute, UnprefixedAttribute }
-
-object Test {
-
-  def main(args:Array[String]) = {
-    MetaDataTest.run()
-    UtilityTest.run()
-  }
-
-  object MetaDataTest {
-
-    import scala.xml.{ TopScope, NamespaceBinding, Node, Atom, Text }
-
-    def domatch(x:Node): Node = {
-      x match {
-            case Node("foo", md @ UnprefixedAttribute(_, value, _), _*) if !value.isEmpty =>
-                 md("bar")(0)
-            case _ => new Atom(3)
-      }
-    }
-
-    def run() {
-
-      var x: MetaData         = Null
-      var s: NamespaceBinding = TopScope
-
-      // testing method def apply(uri:String, scp:NamespaceBinding, k:String): Seq[Node] 
-      //                def apply(k:String): Seq[Node] 
-
-      assert(null == x("za://foo.com", s, "bar" ), "absent element (prefixed) 1")
-      assert(null == x("bar"), "absent element (unprefix) 1")
-
-      assert(None == x.get("za://foo.com", s, "bar" ), "absent element (prefixed) 2")
-      assert(None == x.get("bar"), "absent element (unprefix) 2")
-
-      x = new PrefixedAttribute("zo","bar", new Atom(42), x)
-      s = new NamespaceBinding("zo","za://foo.com",s)
-
-      assert(new Atom(42) == x("za://foo.com", s, "bar" ), "present element (prefixed) 3")
-      assert(null == x("bar"), "present element (unprefix) 3")
-
-      assert(Some(new Atom(42)) == x.get("za://foo.com", s, "bar" ), "present element (prefixed) 4")
-      assert(None == x.get("bar"), "present element (unprefix) 4")
-
-      x = new UnprefixedAttribute("bar","meaning", x)
-
-      assert(null == x(null, s, "bar"), "present element (prefixed) 5")
-      assert(Text("meaning") == x("bar"), "present element (unprefix) 5")
-
-      assert(None == x.get(null, s, "bar" ), "present element (prefixed) 6")
-      assert(Some(Text("meaning")) == x.get("bar"), "present element (unprefix) 6")
-
-      val z =  <foo bar="gar"/>
-      val z2 = <foo/>
-
-      assert(Text("gar") == domatch(z), "attribute extractor 1") 
-      assert(new Atom(3) == domatch(z2), "attribute extractor 2") 
-
-    }
-  }
-
-  object UtilityTest {
-    def run() {
-      assert(Utility.isNameStart('b'))
-      assert(!Utility.isNameStart(':'))
-
-      val x = <foo>
-                 <toomuchws/>
-              </foo>
-
-      val y = xml.Utility.trim(x)
-
-      assert(1 == (y match { case <foo><toomuchws/></foo> => 1 }), "trim 1")
-
-      val x2 = <foo>
-                 <toomuchws>  a b  b a  </toomuchws>
-              </foo>
-
-      val y2 = xml.Utility.trim(x2)
-
-      assert(2 == (y2 match { case <foo><toomuchws>a b b a</toomuchws></foo> => 2 }), "trim 2")
-
-      val z = <bar>''</bar>
-      val z1 = z.toString
-
-      assert("<bar>''</bar>" == z1, "apos unescaped")
-
-      val q = xml.Utility.sort(<a g='3' j='2' oo='2' a='2'/>)
-      assert(" a=\"2\" g=\"3\" j=\"2\" oo=\"2\"" == xml.Utility.sort(q.attributes).toString)
-
-      val pp = new xml.PrettyPrinter(80,5)
-      assert("<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"/>" == pp.format(q))
-
-      <hi>
-        <there/>
-        <guys/>
-      </hi>.hashCode // Bug #777
-    }
-  }
-
-}
diff --git a/test/files/jvm/unreachable/Foo_1.flags b/test/files/jvm/unreachable/Foo_1.flags
new file mode 100644
index 0000000..ce6e93b
--- /dev/null
+++ b/test/files/jvm/unreachable/Foo_1.flags
@@ -0,0 +1 @@
+-Ynooptimise
\ No newline at end of file
diff --git a/test/files/jvm/unreachable/Foo_1.scala b/test/files/jvm/unreachable/Foo_1.scala
new file mode 100644
index 0000000..600b96b
--- /dev/null
+++ b/test/files/jvm/unreachable/Foo_1.scala
@@ -0,0 +1,112 @@
+import scala.sys.error
+
+class Foo_1 {
+  def unreachableNormalExit: Int = {
+    return 42
+    0
+  }
+
+  def unreachableIf: Int = {
+    return 42
+    if (util.Random.nextInt % 2 == 0)
+      0
+    else
+      1
+  }
+
+  def unreachableIfBranches: Int = {
+    if (util.Random.nextInt % 2 == 0)
+      return 42
+    else
+      return 42
+
+    return 0
+  }
+
+  def unreachableOneLegIf: Int = {
+    if (util.Random.nextInt % 2 == 0)
+      return 42
+
+    return 42
+  }
+
+  def unreachableLeftBranch: Int = {
+    val result = if (util.Random.nextInt % 2 == 0)
+      return 42
+    else
+      42
+
+    return result
+  }
+
+  def unreachableRightBranch: Int = {
+    val result = if (util.Random.nextInt % 2 == 0)
+      42
+    else
+      return 42
+
+    return result
+  }
+
+  def unreachableTryCatchFinally: Int = {
+    return 42
+    try {
+      return 0
+    } catch {
+      case x: Throwable => return 1
+    } finally {
+      return 2
+    }
+    return 3
+  }
+
+  def unreachableAfterTry: Int = {
+    try {
+      return 42
+    } catch {
+      case x: Throwable => return 2
+    }
+    return 3
+  }
+
+  def unreachableAfterCatch: Int = {
+    try {
+      error("haha")
+    } catch {
+      case x: Throwable => return 42
+    }
+    return 3
+  }
+
+  def unreachableAfterFinally: Int = {
+    try {
+      return 1
+    } catch {
+      case x: Throwable => return 2
+    } finally {
+      return 42
+    }
+    return 3
+  }
+
+  def unreachableSwitch: Int = {
+  	return 42
+    val x = util.Random.nextInt % 2
+    x match {
+      case 0 => return 0
+      case 1 => return 1
+      case _ => error("wtf")
+    }
+    2
+  }
+
+  def unreachableAfterSwitch: Int = {
+    val x = util.Random.nextInt % 2
+    x match {
+      case 0 => return 42
+      case 1 => return 41 + x
+      case _ => error("wtf")
+    }
+    2
+  }
+}
diff --git a/test/files/jvm/unreachable/Test.scala b/test/files/jvm/unreachable/Test.scala
new file mode 100644
index 0000000..3f520eb
--- /dev/null
+++ b/test/files/jvm/unreachable/Test.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("Foo_1")
+    // Foo_1 is full of unreachable code which if not elimintated
+    // will result in NOPs as can be confirmed by adding -Ydisable-unreachable-prevention
+    // to Foo_1.flags
+    for (methodNode <- classNode.methods.asScala) {
+      val got = count(methodNode.instructions, asm.Opcodes.NOP)
+      if (got != 0) println(s"Found $got NOP(s) in ${methodNode.name}")
+    }
+  }
+
+  def count(insnList: InsnList, opcode: Int): Int = {
+    def isNop(node: asm.tree.AbstractInsnNode): Boolean =
+      (node.getOpcode == opcode)
+    insnList.iterator.asScala.count(isNop)
+  }
+}
\ No newline at end of file
diff --git a/test/files/jvm/value-class-boxing.check b/test/files/jvm/value-class-boxing.check
new file mode 100644
index 0000000..20a9fe2
--- /dev/null
+++ b/test/files/jvm/value-class-boxing.check
@@ -0,0 +1,7 @@
+a2 and a1: bytecode identical
+a3 and a1: bytecode identical
+a4 and a1: bytecode identical
+b2 and b1: bytecode identical
+b3 and b1: bytecode identical
+b4 and b1: bytecode identical
+b5 and b1: bytecode identical
diff --git a/test/files/jvm/value-class-boxing/Analyzed_1.scala b/test/files/jvm/value-class-boxing/Analyzed_1.scala
new file mode 100644
index 0000000..dec8565
--- /dev/null
+++ b/test/files/jvm/value-class-boxing/Analyzed_1.scala
@@ -0,0 +1,17 @@
+class Wrap(val x: Int) extends AnyVal {
+  def ***(other: Bip): Wrap = new Wrap(x * other.x)
+}
+class Bip(val x: Int) extends AnyVal
+
+class SameBytecode {
+  def a1(x: Int, y: Int): Int = x + y
+  def a2(x: Wrap, y: Wrap): Wrap = new Wrap(x.x + y.x)
+  def a3(x: Int, y: Wrap): Wrap = new Wrap(x + y.x)
+  def a4(x: Int, y: Wrap): Int = x + y.x
+
+  def b1(x: Wrap, y: Int): Int = (x *** new Bip(y)).x
+  def b2(x: Wrap, y: Bip): Wrap = x *** y
+  def b3(x: Wrap, y: Int): Wrap = x *** new Bip(y)
+  def b4(x: Wrap, y: Bip): Bip = new Bip((x *** y).x)
+  def b5(x: Wrap, y: Int): Bip = new Bip((x *** new Bip(y)).x)
+}
diff --git a/test/files/jvm/value-class-boxing/test.scala b/test/files/jvm/value-class-boxing/test.scala
new file mode 100644
index 0000000..cf33183
--- /dev/null
+++ b/test/files/jvm/value-class-boxing/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+  def show: Unit = {
+    val classNode = loadClassNode("SameBytecode")
+    List("a2", "a3", "a4") foreach { m =>
+      print(m + " and a1: ")
+      sameBytecode(getMethod(classNode, "a1"), getMethod(classNode, m))
+    }
+    List("b2", "b3", "b4", "b5") foreach { m =>
+      print(m + " and b1: ")
+      sameBytecode(getMethod(classNode, "b1"), getMethod(classNode, m))
+    }
+  }
+}
diff --git a/test/files/jvm/varargs/JavaClass.java b/test/files/jvm/varargs/JavaClass.java
index 536e9a3..9851e1b 100644
--- a/test/files/jvm/varargs/JavaClass.java
+++ b/test/files/jvm/varargs/JavaClass.java
@@ -4,7 +4,7 @@
 public class JavaClass {
     public static <T> void varargz(int i, T... v) {
     }
-    
+
     public static void callSomeAnnotations() {
 	VaClass va = new VaClass();
 	va.vs(4, "", "", "");
diff --git a/test/files/jvm/varargs/VaClass.scala b/test/files/jvm/varargs/VaClass.scala
index 8e9cbdb..6343f9c 100644
--- a/test/files/jvm/varargs/VaClass.scala
+++ b/test/files/jvm/varargs/VaClass.scala
@@ -5,9 +5,9 @@ import annotation.varargs
 
 
 class VaClass {
-  
+
   @varargs def vs(a: Int, b: String*) = println(a + b.length)
   @varargs def vi(a: Int, b: Int*) = println(a + b.sum)
   @varargs def vt[T](a: Int, b: T*) = println(a + b.length)
-  
+
 }
diff --git a/test/files/jvm/xml01.check b/test/files/jvm/xml01.check
deleted file mode 100755
index d78e6df..0000000
--- a/test/files/jvm/xml01.check
+++ /dev/null
@@ -1,8 +0,0 @@
-equality
-xpath \
-xpath \\ DESCENDANTS
-<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>
--- group nodes
-<f><a/><b/><c/></f>
-<a/><f><a/><b/><c/></f><a/><b/><c/>
-attribute value normalization
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
deleted file mode 100644
index 2fab650..0000000
--- a/test/files/jvm/xml01.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-import java.io.StringReader
-import org.xml.sax.InputSource
-
-import scala.util.logging._
-import scala.xml._
-
-object Test extends App {
-  val e:  scala.xml.MetaData         = Null  //Node.NoAttributes
-  val sc: scala.xml.NamespaceBinding = TopScope
-
-  val xmlFile1    = "<hello><world/></hello>";
-  val isrc1       = new InputSource(new StringReader(xmlFile1))
-  val parsedxml1  = XML.load(isrc1)
-  val isrc11      = new InputSource(new StringReader(xmlFile1))
-  val parsedxml11 = XML.load(isrc11)
-
-  val c = new Node {
-    def label = "hello"
-    override def hashCode() = 
-      Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child);
-    def child = Elem(null, "world", e, sc);
-    //def attributes = e;
-    override def text = ""
-  }
-
-  println("equality")
-  assert(c == parsedxml11)
-  assert(parsedxml1 == parsedxml11)
-  assert(List(parsedxml1) sameElements List(parsedxml11))
-  assert(Array(parsedxml1).toList sameElements List(parsedxml11))
-
-  val x2 = "<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>";
-
-  val i = new InputSource(new StringReader(x2))
-  val x2p = XML.load(i)
-
-  assert(x2p == Elem(null, "book"  , e, sc,
-                     Elem(null, "author", e, sc,Text("Peter Buneman")), 
-                     Elem(null, "author", e, sc,Text("Dan Suciu")), 
-                     Elem(null, "title" , e, sc,Text("Data on ze web"))))
-
-  val xmlFile2 = "<bib><book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book><book><author>John Mitchell</author><title>Foundations of Programming Languages</title></book></bib>";
-  val isrc2 = new InputSource(new StringReader(xmlFile2))
-  val parsedxml2 = XML.load(isrc2)
-
-  println("xpath \\")
-
-  assert(parsedxml1 \ "_" sameElements List(Elem(null,"world", e, sc)))
-
-  assert(parsedxml1 \ "world"  sameElements List(Elem(null,"world", e, sc)))
-
-  assert( 
-      (parsedxml2 \ "_") sameElements List(
-        Elem(null,"book", e, sc,
-             Elem(null,"author", e, sc, Text("Peter Buneman")), 
-             Elem(null,"author", e, sc, Text("Dan Suciu")), 
-             Elem(null,"title" , e, sc, Text("Data on ze web"))),
-        Elem(null,"book",e,sc,
-             Elem(null,"author",e,sc,Text("John Mitchell")),
-             Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))))
-  )
-  assert((parsedxml2 \ "author").isEmpty)
-
-  assert( 
-      (parsedxml2 \ "book") sameElements List(
-        Elem(null,"book",e,sc,
-             Elem(null,"author", e, sc, Text("Peter Buneman")), 
-             Elem(null,"author", e, sc, Text("Dan Suciu")), 
-             Elem(null,"title" , e, sc, Text("Data on ze web"))),
-        Elem(null,"book",e,sc,
-             Elem(null,"author", e, sc, Text("John Mitchell")),
-             Elem(null,"title" , e, sc, Text("Foundations of Programming Languages")))
-      )
-  )
-
-  assert( 
-    (parsedxml2 \ "_" \ "_") sameElements List(
-      Elem(null,"author", e, sc, Text("Peter Buneman")), 
-      Elem(null,"author", e, sc, Text("Dan Suciu")), 
-      Elem(null,"title" , e, sc, Text("Data on ze web")),
-      Elem(null,"author", e, sc, Text("John Mitchell")),
-      Elem(null,"title" , e, sc, Text("Foundations of Programming Languages"))
-    )
-  )
-
-  assert( 
-      (parsedxml2 \ "_" \ "author") sameElements List(
-        Elem(null,"author", e, sc, Text("Peter Buneman")), 
-        Elem(null,"author", e, sc, Text("Dan Suciu")), 
-        Elem(null,"author", e, sc, Text("John Mitchell"))
-      )
-  )
-
-  assert((parsedxml2 \ "_" \ "_" \ "author").isEmpty)
-
-  Console.println("xpath \\\\ DESCENDANTS");
-
-  assert( 
-      (parsedxml2 \\ "author") sameElements List(
-        Elem(null,"author", e, sc, Text("Peter Buneman")), 
-        Elem(null,"author", e, sc, Text("Dan Suciu")), 
-        Elem(null,"author", e, sc, Text("John Mitchell"))
-      )
- )
-
-  assert( 
-      (parsedxml2 \\ "title") sameElements List(
-        Elem(null,"title", e, sc, Text("Data on ze web")),
-        Elem(null,"title", e, sc, Text("Foundations of Programming Languages")))
-  )
-
-
-  println(
-    (parsedxml2 \\ "book" ){ n:Node => (n \ "title") xml_== "Data on ze web" }
-  )
-
-  assert( 
-      ((new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_") sameElements List(
-        Elem(null,"bib",e,sc,
-             Elem(null,"book",e,sc,
-                  Elem(null, "author", e, sc, Text("Peter Buneman")), 
-                  Elem(null, "author", e, sc, Text("Dan Suciu")), 
-                  Elem(null, "title" , e, sc, Text("Data on ze web"))),
-             Elem(null,"book",e,sc,
-                  Elem(null,"author",e,sc,Text("John Mitchell")),
-                  Elem(null,"title",e,sc,Text("Foundations of Programming Languages")))),
-        Elem(null,"book",e,sc,
-             Elem(null,"author",e,sc,Text("Peter Buneman")), 
-             Elem(null,"author",e,sc,Text("Dan Suciu")), 
-             Elem(null,"title",e,sc,Text("Data on ze web"))),
-        Elem(null,"author",e,sc,Text("Peter Buneman")),
-        Elem(null,"author",e,sc,Text("Dan Suciu")),
-        Elem(null,"title",e,sc,Text("Data on ze web")),
-        Elem(null,"book",e,sc,
-             Elem(null,"author",e,sc,Text("John Mitchell")),
-             Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))),
-        Elem(null,"author",e,sc,Text("John Mitchell")),
-        Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))
-      )
-  )
-
-  // test group node
-  Console println "-- group nodes"
-  val zx1: Node = Group { <a/><b/><c/> }
-  val zy1 = <f>{zx1}</f>
-  Console println zy1.toString()
-
-  val zx2: Node = Group { List(<a/>,zy1,zx1) }
-  Console println zx2.toString()
-
-  val zz1 = <xml:group><a/><b/><c/></xml:group>
-
-  assert(zx1 xml_== zz1)
-  assert(zz1.length == 3)
-
-  // unparsed
-
-  println("attribute value normalization")
-  val xmlAttrValueNorm = "<personne id='p0003' nom='&#x015e;ahingöz' />";
-  {
-    val isrcA       = new InputSource( new StringReader(xmlAttrValueNorm) );
-    val parsedxmlA  = XML.load(isrcA);
-    val c = (parsedxmlA \ "@nom").text.charAt(0);
-    assert(c == '\u015e');
-  }
-  // buraq: if the following test fails with 'character x not allowed', it is
-  //        related to the mutable variable in a closures in MarkupParser.parsecharref
-  {
-    val isr  = scala.io.Source.fromString(xmlAttrValueNorm);
-    val pxmlB  = scala.xml.parsing.ConstructingParser.fromSource(isr,false);
-    val parsedxmlB  = pxmlB.element(TopScope);
-    val c = (parsedxmlB \ "@nom").text.charAt(0);
-    assert(c == '\u015e');
-  }
-
-  // #60 test by round trip
-
-  val p = scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<foo bar:attr='&'/>"),true)
-  val n = p.element(new scala.xml.NamespaceBinding("bar","BAR",scala.xml.TopScope))(0)
-  assert( n.attributes.get("BAR", n, "attr").nonEmpty)
-}
diff --git a/test/files/jvm/xml02.scala b/test/files/jvm/xml02.scala
deleted file mode 100644
index b830a0e..0000000
--- a/test/files/jvm/xml02.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-object Test {
-
-  def main(args: Array[String]) {
-    XmlEx.run()
-    XmlEy.run()
-    XmlPat.run()
-    DodgyNamespace.run()
-  }
-
-  import scala.xml.{NodeSeq, Utility}
-  import NodeSeq.seqToNodeSeq
-
-  val ax = <hello foo="bar" x:foo="baz" xmlns:x="the namespace from outer space">
-             <world/>
-           </hello>
-
-  val cx = <z:hello foo="bar" xmlns:z="z" x:foo="baz" xmlns:x="the namespace from outer space">
-             crazy text world
-           </z:hello>
-
-  val bx = <hello foo="bar&x"></hello>
-
-  object XmlEx {
-
-    def run() {
-      assert((ax \ "@foo") xml_== "bar")              // uses NodeSeq.view!
-      assert((ax \ "@foo") xml_== xml.Text("bar"))    // dto.
-      assert((bx \ "@foo") xml_== "bar&x")            // dto.
-      assert((bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
-      assert("<hello foo=\"bar&x\"></hello>" == bx.toString)
-    }
-  }
-
-  object XmlEy {
-    def run() {
-      val z = ax \ "@{the namespace from outer space}foo"
-      assert((ax \ "@{the namespace from outer space}foo") xml_== "baz")
-      assert((cx \ "@{the namespace from outer space}foo") xml_== "baz")
- 
-      try {
-        ax \ "@"
-        assert(false)
-      } catch {
-        case _: IllegalArgumentException => 
-      }
-      try {
-        ax \ "@{"
-        assert(false)
-      } catch {
-        case _: IllegalArgumentException => 
-      }
-      try {
-        ax \ "@{}"
-        assert(false)
-      } catch {
-        case _: IllegalArgumentException => 
-      }
- 
-    }
-  }
-
-  object XmlPat {
-    def run() {
-      assert(<hello/> match { case <hello/> => true; case _ => false; })
-      assert(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
-      assert(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
-      assert(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
-    }
-  }
-
-  object DodgyNamespace {
-    def run() {
-      val x = <flog xmlns:ee="http://ee.com"><foo xmlns:dog="http://dog.com"><dog:cat/></foo></flog>
-      assert(x.toString.matches(".*xmlns:dog=\"http://dog.com\".*"));
-    }
-  }
-
-}
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
deleted file mode 100755
index edcdbdd..0000000
--- a/test/files/jvm/xml03syntax.check
+++ /dev/null
@@ -1,26 +0,0 @@
-true
-true
-true
-<hello>world</hello>
-true
-<hello>1.5</hello>
-true
-<hello>5</hello>
-true
-<hello>true</hello>
-true
-<hello>5</hello>
-true
-<hello>27</hello>
-true
-<hello>1 2 3 4</hello>
-1
-2
-3
-4
-<hello>2 4</hello>
-2
-4
-
-node=<elem key="<b>hello</b>"/>, key=Some(<b>hello</b>)
-node=<elem/>, key=None
diff --git a/test/files/jvm/xml03syntax.scala b/test/files/jvm/xml03syntax.scala
deleted file mode 100644
index 2c93f7c..0000000
--- a/test/files/jvm/xml03syntax.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-import scala.xml._
-
-object Test {
-
-  private def handle[A](x: Node): A = {
-    println(x)
-    x.child(0).asInstanceOf[Atom[A]].data
-  }
-
-  def main(args: Array[String]) {
-    test1()
-    test2()
-    test3()
-  }
-
-  private def test1() {
-    val xNull = <hello>{null}</hello> // these used to be Atom(unit), changed to empty children
-
-    println(xNull.child sameElements Nil)
-
-    val x0 = <hello>{}</hello> // these used to be Atom(unit), changed to empty children
-    val x00 = <hello>{ }</hello> //  dto.
-
-    val xa = <hello>{ "world" }</hello>
-
-
-    println(x0.child sameElements Nil)
-    println(x00.child sameElements Nil)
-    println(handle[String](xa) == "world")
-
-    val xb = <hello>{ 1.5 }</hello>
-
-    println(handle[Double](xb) == 1.5)
-
-    val xc = <hello>{ 5 }</hello>
-
-    println(handle[Int](xc) == 5)
-
-    val xd = <hello>{ true }</hello>
-
-    println(handle[Boolean](xd) == true)
-
-    val xe = <hello>{ 5:Short }</hello>
-
-    println(handle[Short](xe) == (5:Short))
-
-    val xf = <hello>{ val x = 27; x }</hello>
-
-    println(handle[Int](xf) == 27)
-
-    val xg = <hello>{ List(1,2,3,4) }</hello>
-
-    println(xg)
-    for (z <- xg.child) {
-      println(z.toString() + {if (z.isInstanceOf[Text]) "(is text node ' ')" else ""})
-    }
-
-    val xh = <hello>{ for(x <- List(1,2,3,4) if x % 2 == 0) yield x }</hello>
-
-    println(xh)
-    for (z <- xh.child) {
-      println(z.toString() + {if (z.isInstanceOf[Text]) "(is text node ' ')" else ""})
-    }
-    println
-  }
-
-  /** see SVN r13821 (emir): support for <elem key={x:Option[Seq[Node]]} />,
-   *  so that Options can be used for optional attributes.
-   */
-  private def test2() {
-    val x1: Option[Seq[Node]] = Some(<b>hello</b>)
-    val n1 = <elem key={x1} />;
-    println("node="+n1+", key="+n1.attribute("key"))
-
-    val x2: Option[Seq[Node]] = None
-    val n2 = <elem key={x2} />;
-    println("node="+n2+", key="+n2.attribute("key"))
-  }
-
-  private def test3() {
-    // this demonstrates how to handle entities
-    val s = io.Source.fromString("<a> </a>")
-    object parser extends xml.parsing.ConstructingParser(s, false /*ignore ws*/) {
-      override def replacementText(entityName: String): io.Source = {
-        entityName match {
-          case "nbsp" => io.Source.fromString("\u0160");
-          case _ => super.replacementText(entityName);
-        }
-      }
-      nextch; // !!important, to initialize the parser
-    }
-    val parsed = parser.element(TopScope) // parse the source as element
-    // alternatively, we could call document()
-    parsed
-  }
-
-}
diff --git a/test/files/jvm/xml04embed.check b/test/files/jvm/xml04embed.check
deleted file mode 100644
index e71e645..0000000
--- a/test/files/jvm/xml04embed.check
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-}
-{}{}{}
diff --git a/test/files/jvm/xml04embed.scala b/test/files/jvm/xml04embed.scala
deleted file mode 100644
index fa453e4..0000000
--- a/test/files/jvm/xml04embed.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test {
-  def main(args: Array[String]) {
-    val ya = <x>{{</x>
-    println(ya.text)
-    val ua = <x>}}</x>
-    println(ua.text)
-    val za = <x>{{}}{{}}{{}}</x>
-    println(za.text)
-  }
-}
diff --git a/test/files/jvm/xml05.check b/test/files/jvm/xml05.check
index 8d3e803..92ea995 100644
--- a/test/files/jvm/xml05.check
+++ b/test/files/jvm/xml05.check
@@ -1,11 +1,7 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> <city name="San José"/>
 res0: scala.xml.Elem = <city name="San José"/>
 
 scala> 
-
-scala> 
diff --git a/test/files/jvm/xmlattr.check b/test/files/jvm/xmlattr.check
deleted file mode 100644
index a87420d..0000000
--- a/test/files/jvm/xmlattr.check
+++ /dev/null
@@ -1,18 +0,0 @@
-true
-true
-true
-true
-true
-true
-removal of duplicates for unprefixed attributes in append = 1
-true
-true
-true
-true
-true
-true
-true
-true
-true
-<b x="&"/>
-<b x="&"/>
diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala
deleted file mode 100644
index d214642..0000000
--- a/test/files/jvm/xmlattr.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-import xml.{ NodeSeq, Null, Text, UnprefixedAttribute }
-
-object Test {
-
-  def main(args: Array[String]) {
-    UnprefixedAttributeTest()
-    AttributeWithOptionTest()
-    AttributeOutputTest()
-  }
-  
-  object UnprefixedAttributeTest {
-    def apply() {
-      val x = new UnprefixedAttribute("foo","bar", Null)
-      println(Some(Text("bar")) == x.get("foo"))
-      println(Text("bar") == x("foo"))
-      println(None == x.get("no_foo"))
-      println(null == x("no_foo"))
-      
-      val y = x.remove("foo")
-      println(Null == y)
-
-      val z = new UnprefixedAttribute("foo", null:NodeSeq, x)
-      println(None == z.get("foo"))
-      
-      var appended = x append x append x append x
-      var len = 0; while (appended ne Null) {
-        appended = appended.next
-        len = len + 1
-      }
-      println("removal of duplicates for unprefixed attributes in append = " + len)
-    }
-  }
-
-  object AttributeWithOptionTest {
-    def apply() {
-      val x = new UnprefixedAttribute("foo", Some(Text("bar")), Null)
-
-      println(Some(Text("bar")) == x.get("foo"))
-      println(Text("bar") == x("foo"))
-      println(None == x.get("no_foo"))
-      println(null == x("no_foo"))
-
-      val attr1 = Some(Text("foo value"))
-      val attr2 = None
-      val y = <b foo={attr1} bar={attr2} />
-      println(Some(Text("foo value")) == y.attributes.get("foo"));
-      println(Text("foo value") == y.attributes("foo"))
-      println(None == y.attributes.get("bar"))
-      println(null == y.attributes("bar"))
-
-      val z = new UnprefixedAttribute("foo", None, x)
-      println(None == z.get("foo"))
-    }
-  }
-
-  object AttributeOutputTest {
-    def apply() {
-      println(<b x="&"/>)
-      println(<b x={"&"}/>)
-    }
-  }
-
-}
diff --git a/test/files/jvm/xmlmore.check b/test/files/jvm/xmlmore.check
deleted file mode 100644
index 29f144c..0000000
--- a/test/files/jvm/xmlmore.check
+++ /dev/null
@@ -1,10 +0,0 @@
-<!-- thissa comment -->
-<?this is a pi foo bar = && {{ ?>
-
- "Come, come again, whoever you are, come!
-Heathen, fire worshipper or idolatrous, come!
-Come even if you broke your penitence a hundred times,
-Ours is the portal of hope, come as you are."
-                              Mevlana Celaleddin Rumi
-<foo><br /></foo>
-End Test
diff --git a/test/files/jvm/xmlmore.scala b/test/files/jvm/xmlmore.scala
deleted file mode 100644
index 04d0a6c..0000000
--- a/test/files/jvm/xmlmore.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-object myBreak extends scala.xml.Unparsed("<br />")
-
-object Test extends App {
-  val com = <!-- thissa comment -->
-  val pi  = <?this is a pi foo bar = && {{ ?>
-  val crz = <![CDATA[
- "Come, come again, whoever you are, come!
-Heathen, fire worshipper or idolatrous, come!
-Come even if you broke your penitence a hundred times,
-Ours is the portal of hope, come as you are."
-                              Mevlana Celaleddin Rumi]]>
-
-  val nazim = <foo>{myBreak}</foo> // shows use of unparsed
-                                          
-  Console println com
-  Console println pi
-  Console println crz // this guy will escaped, and rightly so
-  Console println nazim
-  Console println "End Test"
-
-  <x:foo xmlns:x="gaga"/> match {
-    case scala.xml.QNode("gaga","foo",md,child at _*) =>
-  }
-
-  <x:foo xmlns:x="gaga"/> match {
-    case scala.xml.Node("foo",md,child at _*) =>
-  }
-
-}
diff --git a/test/files/jvm/xmlpull.scala b/test/files/jvm/xmlpull.scala
deleted file mode 100644
index 9ba7d4c..0000000
--- a/test/files/jvm/xmlpull.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.xml._
-import scala.xml.pull._
-import scala.io.Source
-
-object Test {
-
-  val src = Source.fromString("<hello><world/>!</hello>")
- 
-  def main(args: Array[String]) {
-    var er = new XMLEventReader(src)
-    er.next match {
-      case EvElemStart(_, "hello", _, _) => //println("1")
-    }
-    er.next match {
-      case EvElemStart(_, "world", _, _) => //println("2")
-    }
-    er.next match {
-      case EvElemEnd(_, "world") => //println("3")
-    }
-    er.next match {
-      case EvText("!") => //println("4")
-    }
-    er.next match {
-      case EvElemEnd(_, "hello") => //println("5")
-    }
-    // you get the picture...
-    er.stop  // allow thread to be garbage-collected
-    //println("6")
-  }
-}
- 
diff --git a/test/files/jvm/xmlstuff.check b/test/files/jvm/xmlstuff.check
deleted file mode 100644
index e122247..0000000
--- a/test/files/jvm/xmlstuff.check
+++ /dev/null
@@ -1,22 +0,0 @@
-NodeSeq
-<result>
-     <title>Blabla</title>
-     <remarks> Hallo Welt. </remarks>
-</result><result>
-     <title>Blubabla</title>
-     <remarks> Hello Blu </remarks>
-</result><result>
-     <title>Blubabla</title>
-     <remarks> rem 2 </remarks>
-</result>
-List(<book><title>Blabla</title></book>)
-<result>
-     <name>John</name>
-     <street> Elm Street</street>
-     <city>Dolphin City</city>
-     <phone where="work"> +41 21 693 68 67</phone>
-     <phone where="mobile">+41 79 602 23 23</phone>
-</result>
-namespaces
-validation - elements
-validation - attributes
diff --git a/test/files/jvm/xmlstuff.scala b/test/files/jvm/xmlstuff.scala
deleted file mode 100644
index 45234c7..0000000
--- a/test/files/jvm/xmlstuff.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-import java.io.StringReader
-import org.xml.sax.InputSource
-import scala.xml.{Node, NodeSeq, Elem, Text, XML}
-
-object Test {
-
-  /** returns true if exception was thrown */
-  def catcher(att: Function1[Unit, scala.xml.MetaData]): Boolean = {
-    var ex = false
-    try {
-      att.apply({})
-    } catch {
-      case scala.xml.MalformedAttributeException(msg) =>
-        println(msg)
-        ex = true
-    }
-    ex
-  }
-
-  def main(args: Array[String]) {
-
-    println("NodeSeq")
-
-    val p = <foo>
-    <bar gt='ga' value="3"/>
-    <baz bazValue="8"/>
-    <bar value="5" gi='go'/>
-    </foo>;
-    
-    val pelems_1 = for (x <- p \ "bar"; y <- p \ "baz" ) yield {
-      Text(x.attributes("value").toString + y.attributes("bazValue").toString+ "!")
-    };
-    val pelems_2 = new NodeSeq { val theSeq = List(Text("38!"),Text("58!")) };
-    assert(pelems_1 sameElements pelems_2)
-
-    assert(Text("8") sameElements (p \\ "@bazValue"))
-
-    val books = 
-      <bks>
-    <book><title>Blabla</title></book>
-    <book><title>Blubabla</title></book>
-    <book><title>Baaaaaaalabla</title></book>
-    </bks>;
-
-    val reviews = 
-      <reviews>
-    <entry><title>Blabla</title>
-    <remarks>
-    Hallo Welt.
-    </remarks>
-    </entry>
-    <entry><title>Blubabla</title>
-    <remarks>
-    Hello Blu
-    </remarks>
-    </entry>
-    <entry><title>Blubabla</title>
-    <remarks>
-    rem 2
-    </remarks>
-    </entry>
-    </reviews>;
-
-    println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
-      for (t <- books \\ "title";
-           r <- reviews \\ "entry"
-           if (r \ "title") xml_== t) yield
-             <result>
-      { t }
-      { r \ "remarks" }
-      </result>
-    ));
-
-    // example
-    println( 
-      for (t @ <book><title>Blabla</title></book> <- new NodeSeq { val theSeq = books.child }.toList)
-      yield t
-    );
-    val phoneBook =  
-      <phonebook>
-    <descr>
-    This is the <b>phonebook</b> of the 
-    <a href="http://acme.org">ACME</a> corporation.
-    </descr>
-    <entry>
-    <name>John</name> 
-    <phone where="work">  +41 21 693 68 67</phone>
-    <phone where="mobile">+41 79 602 23 23</phone>
-    </entry>
-    </phonebook>;
-
-
-    val addrBook =  
-      <addrbook>
-    <descr>
-    This is the <b>addressbook</b> of the 
-    <a href="http://acme.org">ACME</a> corporation.
-    </descr>
-    <entry>
-    <name>John</name> 
-    <street> Elm Street</street>
-    <city>Dolphin City</city>
-    </entry>
-    </addrbook>;
-
-    println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
-      for (t <- addrBook \\ "entry";
-           r <- phoneBook \\ "entry"
-           if (t \ "name") xml_== (r \ "name")) yield
-             <result>
-      { t.child }
-      { r \ "phone" }
-      </result>
-    ));
-
-    
-    /* namespaces */
-    // begin tmp
-    println("namespaces")
-    val cuckoo = <cuckoo xmlns="http://cuckoo.com">
-    <foo/>
-    <bar/>
-    </cuckoo>;
-    assert(cuckoo.namespace == "http://cuckoo.com")
-    for (n <- cuckoo \ "_" ) {
-      //println("n = "+n);
-      //println("n.prefix = "+n.prefix);
-      //.println("n.scope = "+n.scope);
-      assert( n.namespace == "http://cuckoo.com")
-    }
-
-    println("validation - elements")
-    val vtor = new scala.xml.dtd.ElementValidator();
-    {
-      import scala.xml.dtd.ELEMENTS
-      import scala.xml.dtd.ContentModel._
-      vtor.setContentModel(
-	ELEMENTS( 
-	  Sequ(
-	    Letter(ElemName("bar")), 
-	    Star(Letter(ElemName("baz"))) )));
-
-    }
-    assert(vtor( <foo><bar/><baz/><baz/></foo> ))
-
-    {
-      import scala.xml.dtd.MIXED
-      import scala.xml.dtd.ContentModel._
-      
-      vtor.setContentModel(
-        MIXED(
-          Alt(Letter(ElemName("bar")), 
-              Letter(ElemName("baz")), 
-              Letter(ElemName("bal")))));
-    }
-
-    assert(vtor(<foo><bar/><baz/><baz/></foo> ))
-    assert(vtor(<foo>ab<bar/>cd<baz/>ed<baz/>gh</foo> ))
-    assert(!vtor(<foo> <ugha/> <bugha/> </foo> ))
-
-    println("validation - attributes")
-    vtor.setContentModel(null)
-    vtor.setMetaData(List())
-    assert(!vtor( <foo bar="hello"/> ))
-    
-    { 
-      import scala.xml.dtd._ 
-      vtor setMetaData List(AttrDecl("bar", "CDATA", IMPLIED))
-    }
-    assert(!vtor(<foo href="http://foo.com" bar="hello"/>))
-    assert(vtor(<foo bar="hello"/>))
-
-    { 
-      import scala.xml.dtd._
-      vtor.setMetaData(List(AttrDecl("bar","CDATA",REQUIRED)))
-    }
-    assert(!vtor( <foo href="http://foo.com" /> ))
-    assert( vtor( <foo bar="http://foo.com" /> ))
-    
-  }
-}
diff --git a/test/files/lib/jsoup-1.3.1.jar.desired.sha1 b/test/files/lib/jsoup-1.3.1.jar.desired.sha1
new file mode 100644
index 0000000..46fa3da
--- /dev/null
+++ b/test/files/lib/jsoup-1.3.1.jar.desired.sha1
@@ -0,0 +1 @@
+346d3dff4088839d6b4d163efa2892124039d216 ?jsoup-1.3.1.jar
diff --git a/test/files/lib/macro210.jar.desired.sha1 b/test/files/lib/macro210.jar.desired.sha1
new file mode 100644
index 0000000..ff87a55
--- /dev/null
+++ b/test/files/lib/macro210.jar.desired.sha1
@@ -0,0 +1 @@
+3794ec22d9b27f2b179bd34e9b46db771b934ec3 ?macro210.jar
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
deleted file mode 100644
index 2f15402..0000000
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6f4dbb29f0c2ec1eba682414f60d52fea84f703 *scalacheck.jar
diff --git a/test/files/neg/abstract-class-2.scala b/test/files/neg/abstract-class-2.scala
index be45a09..19f74f3 100644
--- a/test/files/neg/abstract-class-2.scala
+++ b/test/files/neg/abstract-class-2.scala
@@ -1,7 +1,7 @@
 class P {
   trait S1
   val p = new P
-  
+
   trait S2 {
     def f(x: p.S1): Int
   }
@@ -10,5 +10,5 @@ class P {
 class P2 extends P {
   object O2 extends S2 {
     def f(x: S1) = 5
-  }   
+  }
 }
diff --git a/test/files/neg/abstract-explaintypes.check b/test/files/neg/abstract-explaintypes.check
new file mode 100644
index 0000000..e303b45
--- /dev/null
+++ b/test/files/neg/abstract-explaintypes.check
@@ -0,0 +1,15 @@
+abstract-explaintypes.scala:6: error: type mismatch;
+ found   : A
+ required: A.this.T
+  def foo2: T = bar().baz();
+                         ^
+A <: A.this.T?
+false
+abstract-explaintypes.scala:9: error: type mismatch;
+ found   : A
+ required: A.this.T
+  def foo5: T = baz().baz();
+                         ^
+A <: A.this.T?
+false
+two errors found
diff --git a/test/files/neg/abstract-explaintypes.flags b/test/files/neg/abstract-explaintypes.flags
new file mode 100644
index 0000000..b36707c
--- /dev/null
+++ b/test/files/neg/abstract-explaintypes.flags
@@ -0,0 +1 @@
+-explaintypes
diff --git a/test/files/neg/abstract-explaintypes.scala b/test/files/neg/abstract-explaintypes.scala
new file mode 100644
index 0000000..f8ecae1
--- /dev/null
+++ b/test/files/neg/abstract-explaintypes.scala
@@ -0,0 +1,11 @@
+trait A {
+  type T <: A;
+  def baz(): A;
+  def bar(): T;
+  def foo1: A = bar().bar();
+  def foo2: T = bar().baz();
+  def foo3 = bar().baz();
+  def foo4: A = baz().bar();
+  def foo5: T = baz().baz();
+  def foo6 = baz().baz();
+}
diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check
index 42b98ac..d56f569 100644
--- a/test/files/neg/abstract-inaccessible.check
+++ b/test/files/neg/abstract-inaccessible.check
@@ -1,13 +1,15 @@
-abstract-inaccessible.scala:5: error: method implementMe in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:5: warning: method implementMe in trait YourTrait references private[foo] trait Bippy.
 Classes which cannot access Bippy may be unable to provide a concrete implementation of implementMe.
     def implementMe(f: Int => (String, Bippy)): Unit
         ^
-abstract-inaccessible.scala:6: error: method overrideMe in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:6: warning: method overrideMe in trait YourTrait references private[foo] trait Bippy.
 Classes which cannot access Bippy may be unable to override overrideMe.
     def overrideMe[T <: Bippy](x: T): T = x
         ^
-abstract-inaccessible.scala:7: error: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:7: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
 Classes which cannot access Bippy may be unable to override overrideMeAlso.
     def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
         ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/abstract-inaccessible.scala b/test/files/neg/abstract-inaccessible.scala
index 7eaaf2d..3c80f30 100644
--- a/test/files/neg/abstract-inaccessible.scala
+++ b/test/files/neg/abstract-inaccessible.scala
@@ -1,6 +1,6 @@
 package foo {
   private[foo] trait Bippy { }
-  
+
   trait YourTrait {
     def implementMe(f: Int => (String, Bippy)): Unit
     def overrideMe[T <: Bippy](x: T): T = x
diff --git a/test/files/neg/abstract-report.check b/test/files/neg/abstract-report.check
index bd550f3..1ffeac0 100644
--- a/test/files/neg/abstract-report.check
+++ b/test/files/neg/abstract-report.check
@@ -7,7 +7,7 @@ it has 12 unimplemented members.
   def isTraversableAgain: Boolean = ???
   def toIterator: Iterator[String] = ???
   def toStream: Stream[String] = ???
-  
+
   // Members declared in scala.collection.TraversableOnce
   def copyToArray[B >: String](xs: Array[B],start: Int,len: Int): Unit = ???
   def exists(p: String => Boolean): Boolean = ???
diff --git a/test/files/neg/abstract-report2.check b/test/files/neg/abstract-report2.check
index 35a99bd..9be3d82 100644
--- a/test/files/neg/abstract-report2.check
+++ b/test/files/neg/abstract-report2.check
@@ -61,7 +61,7 @@ it has 13 unimplemented members.
 
 class Baz[T] extends Collection[T]
       ^
-abstract-report2.scala:11: error: class Dingus needs to be abstract, since:
+abstract-report2.scala:15: error: class Dingus needs to be abstract, since:
 it has 24 unimplemented members.
 /** As seen from class Dingus, the missing signatures are as follows.
  *  For convenience, these are usable as stub implementations.
@@ -78,15 +78,12 @@ it has 24 unimplemented members.
   def retainAll(x$1: java.util.Collection[_]): Boolean = ???
   def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
   def toArray(): Array[Object] = ???
-  
+
   // Members declared in scala.collection.GenTraversableOnce
   def isTraversableAgain: Boolean = ???
   def toIterator: Iterator[(Set[Int], String)] = ???
   def toStream: Stream[(Set[Int], String)] = ???
-  
-  // Members declared in scala.math.Ordering
-  def compare(x: List[Int],y: List[Int]): Int = ???
-  
+
   // Members declared in scala.collection.TraversableOnce
   def copyToArray[B >: (Set[Int], String)](xs: Array[B],start: Int,len: Int): Unit = ???
   def exists(p: ((Set[Int], String)) => Boolean): Boolean = ???
@@ -98,6 +95,9 @@ it has 24 unimplemented members.
   def seq: scala.collection.TraversableOnce[(Set[Int], String)] = ???
   def toTraversable: Traversable[(Set[Int], String)] = ???
 
+  // Members declared in Xyz
+  def foo(x: List[Int]): Boolean = ???
+
 class Dingus extends Bippy[String, Set[Int], List[Int]]
       ^
 four errors found
diff --git a/test/files/neg/abstract-report2.scala b/test/files/neg/abstract-report2.scala
index b6327b0..8825340 100644
--- a/test/files/neg/abstract-report2.scala
+++ b/test/files/neg/abstract-report2.scala
@@ -6,6 +6,10 @@ class Bar extends Collection[List[_ <: String]]
 
 class Baz[T] extends Collection[T]
 
-trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Ordering[T3]
+trait Xyz[T] {
+  def foo(x: T): Boolean
+}
+
+trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Xyz[T3]
 
 class Dingus extends Bippy[String, Set[Int], List[Int]]
\ No newline at end of file
diff --git a/test/files/neg/accesses.scala b/test/files/neg/accesses.scala
index 2902915..b1df6c0 100644
--- a/test/files/neg/accesses.scala
+++ b/test/files/neg/accesses.scala
@@ -6,7 +6,7 @@ abstract class A {
   private[p1] def f4(): Unit
   protected[p1] def f5(): Unit
 }
-  
+
 abstract class OK1 extends A {
   private[p1] def f2(): Unit
   protected[p2] def f3(): Unit
diff --git a/test/files/neg/accesses2.check b/test/files/neg/accesses2.check
new file mode 100644
index 0000000..66cf9a1
--- /dev/null
+++ b/test/files/neg/accesses2.check
@@ -0,0 +1,12 @@
+accesses2.scala:6: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+    private def f2(): Int = 1
+                ^
+accesses2.scala:5: error: class B1 needs to be abstract, since method f2 in class A of type ()Int is not defined
+  class B1 extends A {
+        ^
+accesses2.scala:9: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+    private def f2(): Int = 1
+                ^
+three errors found
diff --git a/test/files/neg/accesses2.scala b/test/files/neg/accesses2.scala
new file mode 100644
index 0000000..c7640f8
--- /dev/null
+++ b/test/files/neg/accesses2.scala
@@ -0,0 +1,11 @@
+package p2 {
+  abstract class A {
+    private[p2] def f2(): Int
+  }
+  class B1 extends A {
+    private def f2(): Int = 1
+  }
+  abstract class B2 extends A {
+    private def f2(): Int = 1
+  }
+}
diff --git a/test/files/neg/ambiguous-float-dots.check b/test/files/neg/ambiguous-float-dots.check
deleted file mode 100644
index 6c21056..0000000
--- a/test/files/neg/ambiguous-float-dots.check
+++ /dev/null
@@ -1,16 +0,0 @@
-ambiguous-float-dots.scala:2: error: This lexical syntax is deprecated.  From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
-  val x0 = 5.
-           ^
-ambiguous-float-dots.scala:6: error: This lexical syntax is deprecated.  From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
-  val x1 = 5.f
-           ^
-ambiguous-float-dots.scala:7: error: Treating numbers with a leading zero as octal is deprecated.
-  val y0 = 055
-           ^
-ambiguous-float-dots.scala:11: error: This lexical syntax is deprecated.  From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
-  1.+(2)
-  ^
-ambiguous-float-dots.scala:12: error: This lexical syntax is deprecated.  From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
-  1. + 2
-  ^
-5 errors found
diff --git a/test/files/neg/ambiguous-float-dots.flags b/test/files/neg/ambiguous-float-dots.flags
deleted file mode 100644
index 65faf53..0000000
--- a/test/files/neg/ambiguous-float-dots.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -deprecation
\ No newline at end of file
diff --git a/test/files/neg/ambiguous-float-dots.scala b/test/files/neg/ambiguous-float-dots.scala
deleted file mode 100644
index 87e948d..0000000
--- a/test/files/neg/ambiguous-float-dots.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-class A {
-  val x0 = 5.
-}
-
-class B {
-  val x1 = 5.f
-  val y0 = 055
-}
-
-class D {
-  1.+(2)
-  1. + 2
-  1 + 2
-}
diff --git a/test/files/neg/ambiguous-float-dots2.check b/test/files/neg/ambiguous-float-dots2.check
index 8919d2c..40c9b41 100644
--- a/test/files/neg/ambiguous-float-dots2.check
+++ b/test/files/neg/ambiguous-float-dots2.check
@@ -1,10 +1,7 @@
-ambiguous-float-dots2.scala:7: error: Non-zero numbers may not have a leading zero.
-  val y0 = 055
-           ^
 ambiguous-float-dots2.scala:3: error: identifier expected but '}' found.
 }
 ^
-ambiguous-float-dots2.scala:12: error: ';' expected but integer literal found.
+ambiguous-float-dots2.scala:11: error: ';' expected but integer literal found.
   1. + 2
        ^
-three errors found
+two errors found
diff --git a/test/files/neg/ambiguous-float-dots2.scala b/test/files/neg/ambiguous-float-dots2.scala
index 87e948d..b1615c9 100644
--- a/test/files/neg/ambiguous-float-dots2.scala
+++ b/test/files/neg/ambiguous-float-dots2.scala
@@ -4,7 +4,6 @@ class A {
 
 class B {
   val x1 = 5.f
-  val y0 = 055
 }
 
 class D {
diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check
index b43e58a..5b3da7a 100644
--- a/test/files/neg/annot-nonconst.check
+++ b/test/files/neg/annot-nonconst.check
@@ -8,7 +8,7 @@ make your annotation visible at runtime.  If that is what
 you want, you must write the annotation class in Java.
 class Ann2(value: String) extends annotation.ClassfileAnnotation
       ^
-annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: n
+annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n
   @Length(n) def foo = "foo"
           ^
 annot-nonconst.scala:7: error: annotation argument cannot be null
diff --git a/test/files/neg/any-vs-anyref.check b/test/files/neg/any-vs-anyref.check
index 63c4853..7378f04 100644
--- a/test/files/neg/any-vs-anyref.check
+++ b/test/files/neg/any-vs-anyref.check
@@ -36,12 +36,28 @@ Such types can participate in value classes, but instances
 cannot appear in singleton types or in reference comparisons.
   def foo5(x: Quux with Product)                                    = (x eq "abc") && ("abc" eq x)
                                                                          ^
+any-vs-anyref.scala:10: error: type mismatch;
+ found   : Quux with Product
+ required: AnyRef
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+  def foo5(x: Quux with Product)                                    = (x eq "abc") && ("abc" eq x)
+                                                                                                ^
 any-vs-anyref.scala:11: error: value eq is not a member of Quux with Product{def f: Int}
 Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
 Such types can participate in value classes, but instances
 cannot appear in singleton types or in reference comparisons.
   def foo6(x: Quux with Product { def f: Int })                     = (x eq "abc") && ("abc" eq x)
                                                                          ^
+any-vs-anyref.scala:11: error: type mismatch;
+ found   : Quux with Product{def f: Int}
+ required: AnyRef
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+  def foo6(x: Quux with Product { def f: Int })                     = (x eq "abc") && ("abc" eq x)
+                                                                                                ^
 any-vs-anyref.scala:12: error: type mismatch;
  found   : Quux with Product{def eq(other: String): Boolean}
  required: AnyRef
@@ -61,4 +77,4 @@ any-vs-anyref.scala:27: error: type mismatch;
  required: Quux{def g(x: Int): Int}
   f(new Quux { def g(x: String) = x })
     ^
-9 errors found
+11 errors found
diff --git a/test/files/neg/anytrait.scala b/test/files/neg/anytrait.scala
index 1501486..e76164f 100644
--- a/test/files/neg/anytrait.scala
+++ b/test/files/neg/anytrait.scala
@@ -1,7 +1,7 @@
 trait T extends Any {
 
   var x = 1
-  
+
   { x += 1 }
 
   type T = Int
diff --git a/test/files/neg/anyval-anyref-parent.check b/test/files/neg/anyval-anyref-parent.check
index fe20e5d..8a00fb3 100644
--- a/test/files/neg/anyval-anyref-parent.check
+++ b/test/files/neg/anyval-anyref-parent.check
@@ -3,10 +3,10 @@ trait Foo2 extends AnyVal // fail
       ^
 anyval-anyref-parent.scala:5: error: Any does not have a constructor
 class Bar1 extends Any      // fail
-           ^
-anyval-anyref-parent.scala:6: error: value class needs to have exactly one public val parameter
+                   ^
+anyval-anyref-parent.scala:6: error: value class parameter must be a val and not be private[this]
 class Bar2(x: Int) extends AnyVal // fail
-      ^
+           ^
 anyval-anyref-parent.scala:10: error: illegal inheritance; superclass Any
  is not a subclass of the superclass Object
  of the mixin trait Immutable
diff --git a/test/files/neg/applydynamic_sip.check b/test/files/neg/applydynamic_sip.check
index b508583..2cb2e7f 100644
--- a/test/files/neg/applydynamic_sip.check
+++ b/test/files/neg/applydynamic_sip.check
@@ -4,9 +4,18 @@ applydynamic_sip.scala:7: error: applyDynamic does not support passing a vararg
 applydynamic_sip.scala:8: error: applyDynamicNamed does not support passing a vararg parameter
   qual.sel(arg = a, a2: _*)
        ^
+applydynamic_sip.scala:8: error: not found: value arg
+  qual.sel(arg = a, a2: _*)
+           ^
 applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter
   qual.sel(arg, arg2 = "a2", a2: _*)
        ^
+applydynamic_sip.scala:9: error: not found: value arg
+  qual.sel(arg, arg2 = "a2", a2: _*)
+           ^
+applydynamic_sip.scala:9: error: not found: value arg2
+  qual.sel(arg, arg2 = "a2", a2: _*)
+                ^
 applydynamic_sip.scala:18: error: type mismatch;
  found   : String("sel")
  required: Int
@@ -28,6 +37,9 @@ error after rewriting to Test.this.bad1.applyDynamicNamed("sel")
 possible cause: maybe a wrong Dynamic method signature?
   bad1.sel(a = 1)
        ^
+applydynamic_sip.scala:20: error: reassignment to val
+  bad1.sel(a = 1)
+             ^
 applydynamic_sip.scala:21: error: type mismatch;
  found   : String("sel")
  required: Int
@@ -50,9 +62,12 @@ error after rewriting to Test.this.bad2.applyDynamicNamed("sel")
 possible cause: maybe a wrong Dynamic method signature?
   bad2.sel(a = 1)
   ^
+applydynamic_sip.scala:31: error: reassignment to val
+  bad2.sel(a = 1)
+             ^
 applydynamic_sip.scala:32: error: Int does not take parameters
 error after rewriting to Test.this.bad2.updateDynamic("sel")
 possible cause: maybe a wrong Dynamic method signature?
   bad2.sel = 1
   ^
-11 errors found
+16 errors found
diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check
deleted file mode 100644
index a3a639e..0000000
--- a/test/files/neg/array-not-seq.check
+++ /dev/null
@@ -1,13 +0,0 @@
-array-not-seq.scala:2: error: An Array will no longer match as Seq[_].
-  def f1(x: Any) = x.isInstanceOf[Seq[_]]
-                                 ^
-array-not-seq.scala:4: error: An Array will no longer match as Seq[_].
-    case _: Seq[_]  => true
-          ^
-array-not-seq.scala:16: error: An Array will no longer match as Seq[_].
-    case (Some(_: Seq[_]), Nil, _)        => 1
-                ^
-array-not-seq.scala:17: error: An Array will no longer match as Seq[_].
-    case (None, List(_: List[_], _), _)   => 2
-                    ^
-four errors found
diff --git a/test/files/neg/array-not-seq.flags b/test/files/neg/array-not-seq.flags
deleted file mode 100644
index 4e9f7e4..0000000
--- a/test/files/neg/array-not-seq.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/array-not-seq.scala b/test/files/neg/array-not-seq.scala
deleted file mode 100644
index 5f367bd..0000000
--- a/test/files/neg/array-not-seq.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-object Test {
-  def f1(x: Any) = x.isInstanceOf[Seq[_]]
-  def f2(x: Any) = x match {
-    case _: Seq[_]  => true
-    case _          => false
-  }
-
-  def f3(x: Any) = x match {
-    case _: Array[_]  => true
-    case _            => false
-  }
-  
-  def f4(x: Any) = x.isInstanceOf[Traversable[_]]
-  
-  def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
-    case (Some(_: Seq[_]), Nil, _)        => 1
-    case (None, List(_: List[_], _), _)   => 2
-    case _                                => 3
-  }
-
-  def main(args: Array[String]): Unit = {
-    // println(f1(Array(1)))
-    // println(f2(Array(1)))
-    // println(f3(Array(1))
-  }
-}
diff --git a/test/files/neg/bad-advice.check b/test/files/neg/bad-advice.check
new file mode 100644
index 0000000..03b3e4f
--- /dev/null
+++ b/test/files/neg/bad-advice.check
@@ -0,0 +1,6 @@
+bad-advice.scala:4: error: pattern type is incompatible with expected type;
+ found   : Bip.type
+ required: Int
+    case Bip => true
+         ^
+one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/bad-advice.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/bad-advice.flags
diff --git a/test/files/neg/bad-advice.scala b/test/files/neg/bad-advice.scala
new file mode 100644
index 0000000..b195533
--- /dev/null
+++ b/test/files/neg/bad-advice.scala
@@ -0,0 +1,6 @@
+object Bip
+object Test {
+  def f(x: Int) = x match {
+    case Bip => true
+  }
+}
diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check
index 4edc6f1..22cf105 100644
--- a/test/files/neg/case-collision.check
+++ b/test/files/neg/case-collision.check
@@ -1,10 +1,12 @@
-case-collision.scala:5: error: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
 class BIPPY
       ^
-case-collision.scala:11: error: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
 object HyRaX
        ^
-case-collision.scala:8: error: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
 object DINGO
        ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/case-collision.flags
index 85d8eb2..14c1069 100644
--- a/test/files/neg/case-collision.flags
+++ b/test/files/neg/case-collision.flags
@@ -1 +1 @@
--Xfatal-warnings
+-Ybackend:GenASM -Xfatal-warnings
diff --git a/test/files/neg/case-collision2.check b/test/files/neg/case-collision2.check
new file mode 100644
index 0000000..b8481f4
--- /dev/null
+++ b/test/files/neg/case-collision2.check
@@ -0,0 +1,12 @@
+case-collision2.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
+class BIPPY
+      ^
+case-collision2.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
+object DINGO
+       ^
+case-collision2.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+object HyRaX
+       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/case-collision2.flags b/test/files/neg/case-collision2.flags
new file mode 100644
index 0000000..5bfa9da
--- /dev/null
+++ b/test/files/neg/case-collision2.flags
@@ -0,0 +1 @@
+-Ynooptimize -Ybackend:GenBCode -Xfatal-warnings
diff --git a/test/files/neg/case-collision2.scala b/test/files/neg/case-collision2.scala
new file mode 100644
index 0000000..924e330
--- /dev/null
+++ b/test/files/neg/case-collision2.scala
@@ -0,0 +1,12 @@
+package foo
+
+class Bippy
+
+class BIPPY
+
+object Dingo
+object DINGO
+
+case class Hyrax()
+object HyRaX
+
diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check
index 62f895c..aaf5148 100644
--- a/test/files/neg/catch-all.check
+++ b/test/files/neg/catch-all.check
@@ -1,10 +1,12 @@
-catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
+catch-all.scala:2: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
   try { "warn" } catch { case _ => }
                               ^
-catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+catch-all.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
   try { "warn" } catch { case x => }
                               ^
-catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+catch-all.scala:6: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
   try { "warn" } catch { case _: RuntimeException => ; case x => }
                                                             ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/check-dead.check b/test/files/neg/check-dead.check
index 29601c1..2150a94 100644
--- a/test/files/neg/check-dead.check
+++ b/test/files/neg/check-dead.check
@@ -1,13 +1,15 @@
-check-dead.scala:7: error: dead code following this construct
+check-dead.scala:7: warning: dead code following this construct
   def z1 = y1(throw new Exception)  // should warn
               ^
-check-dead.scala:10: error: dead code following this construct
+check-dead.scala:10: warning: dead code following this construct
   def z2 = y2(throw new Exception)  // should warn
               ^
-check-dead.scala:29: error: dead code following this construct
+check-dead.scala:29: warning: dead code following this construct
     throw new Exception // should warn
     ^
-check-dead.scala:33: error: dead code following this construct
+check-dead.scala:33: warning: dead code following this construct
     throw new Exception // should warn
     ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/check-dead.scala b/test/files/neg/check-dead.scala
index aadd788..2d5bccb 100644
--- a/test/files/neg/check-dead.scala
+++ b/test/files/neg/check-dead.scala
@@ -5,11 +5,11 @@ object Other {
 class NoDeads {
   def y1(arg: Any) = println("foo")
   def z1 = y1(throw new Exception)  // should warn
-  
+
   def y2[T](arg: T) = println("foo")
   def z2 = y2(throw new Exception)  // should warn
-  
-  def y3[T](arg: => T) = println("foo")    
+
+  def y3[T](arg: => T) = println("foo")
   def z3 = y3(throw new Exception)  // should not warn: by name arg
 
   def nowarn1 = synchronized { throw new Exception } // should not warn: synchronized should be by name
@@ -22,9 +22,9 @@ class NoDeads {
       val i = 10 + 2
       i
   }
-  
+
   def nowarn4: String = Other.oops("don't warn about me") // should not warn
-  
+
   def yeswarn1 = synchronized {
     throw new Exception // should warn
     5 * 5
@@ -32,6 +32,6 @@ class NoDeads {
   def yeswarn2: Int = synchronized {
     throw new Exception // should warn
     return 5
-  }  
+  }
 }
 
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index d785179..e5f1a38 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -1,100 +1,102 @@
-checksensible.scala:13: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:13: warning: comparing a fresh object using `eq' will always yield false
   (new AnyRef) eq (new AnyRef)
                ^
-checksensible.scala:14: error: comparing a fresh object using `ne' will always yield true
+checksensible.scala:14: warning: comparing a fresh object using `ne' will always yield true
   (new AnyRef) ne (new AnyRef)
                ^
-checksensible.scala:15: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:15: warning: comparing a fresh object using `eq' will always yield false
   Shmoopie eq (new AnyRef)
            ^
-checksensible.scala:16: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:16: warning: comparing a fresh object using `eq' will always yield false
   (Shmoopie: AnyRef) eq (new AnyRef)
                      ^
-checksensible.scala:17: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:17: warning: comparing a fresh object using `eq' will always yield false
   (new AnyRef) eq Shmoopie
                ^
-checksensible.scala:18: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:18: warning: comparing a fresh object using `eq' will always yield false
   (new AnyRef) eq null
                ^
-checksensible.scala:19: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:19: warning: comparing a fresh object using `eq' will always yield false
   null eq new AnyRef
        ^
-checksensible.scala:26: error: comparing values of types Unit and Int using `==' will always yield false
+checksensible.scala:26: warning: comparing values of types Unit and Int using `==' will always yield false
   (c = 1) == 0
           ^
-checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false
+checksensible.scala:27: warning: comparing values of types Int and Unit using `==' will always yield false
   0 == (c = 1)
     ^
-checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
+checksensible.scala:29: warning: comparing values of types Int and String using `==' will always yield false
   1 == "abc"
     ^
-checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false
+checksensible.scala:33: warning: comparing values of types Some[Int] and Int using `==' will always yield false
   Some(1) == 1      // as above
           ^
-checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:38: warning: comparing a fresh object using `==' will always yield false
   new AnyRef == 1
              ^
-checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false
+checksensible.scala:41: warning: comparing values of types Int and Boolean using `==' will always yield false
   1 == (new java.lang.Boolean(true))
     ^
-checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true
+checksensible.scala:43: warning: comparing values of types Int and Boolean using `!=' will always yield true
   1 != true
     ^
-checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false
+checksensible.scala:44: warning: comparing values of types Unit and Boolean using `==' will always yield false
   () == true
      ^
-checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:45: warning: comparing values of types Unit and Unit using `==' will always yield true
   () == ()
      ^
-checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:46: warning: comparing values of types Unit and Unit using `==' will always yield true
   () == println
      ^
-checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
+checksensible.scala:47: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
   () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
      ^
-checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
+checksensible.scala:48: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
   scala.runtime.BoxedUnit.UNIT != ()
                                ^
-checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true
+checksensible.scala:51: warning: comparing values of types Int and Unit using `!=' will always yield true
   (1 != println)
      ^
-checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true
+checksensible.scala:52: warning: comparing values of types Int and Symbol using `!=' will always yield true
   (1 != 'sym)
      ^
-checksensible.scala:58: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:58: warning: comparing a fresh object using `==' will always yield false
   ((x: Int) => x + 1) == null
                       ^
-checksensible.scala:59: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:59: warning: comparing a fresh object using `==' will always yield false
   Bep == ((_: Int) + 1)
       ^
-checksensible.scala:61: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:61: warning: comparing a fresh object using `==' will always yield false
   new Object == new Object
              ^
-checksensible.scala:62: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:62: warning: comparing a fresh object using `==' will always yield false
   new Object == "abc"
              ^
-checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true
+checksensible.scala:63: warning: comparing a fresh object using `!=' will always yield true
   new Exception() != new Exception()
                   ^
-checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false
+checksensible.scala:66: warning: comparing values of types Int and Null using `==' will always yield false
   if (foo.length == null) "plante" else "plante pas"
                  ^
-checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false
+checksensible.scala:71: warning: comparing values of types Bip and Bop using `==' will always yield false
   (x1 == x2)
       ^
-checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
+checksensible.scala:81: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
   c3 == z1
      ^
-checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
+checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
   z1 == c3
      ^
-checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
+checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
   z1 != c3
      ^
-checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
+checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
   c3 != "abc"
      ^
-checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
+checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true
     while ((c = in.read) != -1)
                          ^
-33 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+33 warnings found
+one error found
diff --git a/test/files/neg/checksensible.scala b/test/files/neg/checksensible.scala
index 27ee908..b6083f7 100644
--- a/test/files/neg/checksensible.scala
+++ b/test/files/neg/checksensible.scala
@@ -9,7 +9,7 @@ final class Zing {
 // 7 warnings
 class RefEqTest {
   object Shmoopie
-  
+
   (new AnyRef) eq (new AnyRef)
   (new AnyRef) ne (new AnyRef)
   Shmoopie eq (new AnyRef)
@@ -22,10 +22,10 @@ class RefEqTest {
 // 13 warnings
 class EqEqValTest {
   var c = 0
-  
+
   (c = 1) == 0
   0 == (c = 1)
-  
+
   1 == "abc"
   1 == ("abc": Any) // doesn't warn because an Any may be a boxed Int
   1 == (1: Any)     // as above
@@ -34,12 +34,12 @@ class EqEqValTest {
 
   true == new java.lang.Boolean(true) // none of these should warn
   new java.lang.Boolean(true) == true
-  
+
   new AnyRef == 1
   1 == new AnyRef                 // doesn't warn because it could be...
   1 == (new java.lang.Integer(1)) // ...something like this
   1 == (new java.lang.Boolean(true))
-  
+
   1 != true
   () == true
   () == ()
@@ -47,13 +47,13 @@ class EqEqValTest {
   () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
   scala.runtime.BoxedUnit.UNIT != ()
   (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn
-  
+
   (1 != println)
   (1 != 'sym)
 }
 
 // 12 warnings
-class EqEqRefTest {  
+class EqEqRefTest {
   val ref = new Bop
   ((x: Int) => x + 1) == null
   Bep == ((_: Int) + 1)
@@ -67,9 +67,9 @@ class EqEqRefTest {
 
   // final classes with default equals
   val x1 = new Bip
-  val x2 = new Bop  
+  val x2 = new Bop
   (x1 == x2)
-  
+
   class C1 { }
   class C2 extends C1 { }
   final class Z1 extends C2 { }
@@ -84,14 +84,14 @@ class EqEqRefTest {
   c3 != "abc"
   // this should warn when feeling chatty
   c3 != z1
-  
+
   // non-warners
   (null: AnyRef) == (null: AnyRef)
   (x1 <= x2)
-  
+
   def main(args: Array[String]) = {
     val in = new java.io.FileInputStream(args(0))
-    var c = 0    
+    var c = 0
     while ((c = in.read) != -1)
       print(c.toChar)
 
diff --git a/test/files/neg/choices.check b/test/files/neg/choices.check
index 3e63f99..b114394 100644
--- a/test/files/neg/choices.check
+++ b/test/files/neg/choices.check
@@ -1,2 +1,2 @@
-partest error: bad flags: -Ylinearizer
+error: bad options: -Yresolve-term-conflict
 one error found
diff --git a/test/files/neg/choices.flags b/test/files/neg/choices.flags
index 5464a18..9718467 100644
--- a/test/files/neg/choices.flags
+++ b/test/files/neg/choices.flags
@@ -1 +1 @@
--Ylinearizer
\ No newline at end of file
+-Yresolve-term-conflict
diff --git a/test/files/neg/choices.scala b/test/files/neg/choices.scala
index fe9236f..8827494 100644
--- a/test/files/neg/choices.scala
+++ b/test/files/neg/choices.scala
@@ -1,5 +1,5 @@
 object Test {
   def main(args: Array[String]): Unit = {
-    
+
   }
 }
diff --git a/test/files/neg/class-of-double-targs.check b/test/files/neg/class-of-double-targs.check
new file mode 100644
index 0000000..f7e2094
--- /dev/null
+++ b/test/files/neg/class-of-double-targs.check
@@ -0,0 +1,4 @@
+class-of-double-targs.scala:2: error: expression of type Class[Int](classOf[scala.Int]) does not take type parameters.
+  classOf[Int][Int]
+              ^
+one error found
diff --git a/test/files/neg/class-of-double-targs.scala b/test/files/neg/class-of-double-targs.scala
new file mode 100644
index 0000000..26a2fa8
--- /dev/null
+++ b/test/files/neg/class-of-double-targs.scala
@@ -0,0 +1,3 @@
+object Test {
+  classOf[Int][Int]
+}
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
index 4ad4a12..fd1e272 100644
--- a/test/files/neg/classmanifests_new_deprecations.check
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -1,25 +1,27 @@
-classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
   def cm1[T: ClassManifest] = ???
            ^
-classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
   def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
                                   ^
-classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
   val cm3: ClassManifest[Int] = null
            ^
-classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
   def rcm1[T: scala.reflect.ClassManifest] = ???
             ^
-classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
   def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
                                                  ^
-classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
   val rcm3: scala.reflect.ClassManifest[Int] = null
                           ^
-classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
   type CM[T] = ClassManifest[T]
                ^
-classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
   type RCM[T] = scala.reflect.ClassManifest[T]
                               ^
-8 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check
new file mode 100644
index 0000000..9bc96f6
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.check
@@ -0,0 +1,79 @@
+compile-time-only-a.scala:10: error: C3
+ at compileTimeOnly("C3") case class C3(x: Int)
+                                  ^
+compile-time-only-a.scala:12: error: C4
+ at compileTimeOnly("C4") case class C4(x: Int)
+                                  ^
+compile-time-only-a.scala:17: error: C5
+  implicit class C5(val x: Int) {
+                 ^
+compile-time-only-a.scala:32: error: C1
+  new C1()
+      ^
+compile-time-only-a.scala:36: error: C2
+  C2
+  ^
+compile-time-only-a.scala:38: error: C3
+  new C3(2)
+      ^
+compile-time-only-a.scala:41: error: C4
+  new C4(2)
+      ^
+compile-time-only-a.scala:45: error: C5
+  2.ext
+  ^
+compile-time-only-a.scala:46: error: C5
+  C5(2)
+  ^
+compile-time-only-a.scala:49: error: C6.x
+  val _ = c6.x
+             ^
+compile-time-only-a.scala:50: error: C6.foo
+  c6.foo
+     ^
+compile-time-only-a.scala:51: error: C6.Foo
+  type Foo = c6.Foo
+                ^
+compile-time-only-a.scala:52: error: C6.y
+  c6.y = c6.y
+     ^
+compile-time-only-a.scala:52: error: C6.y
+  c6.y = c6.y
+            ^
+compile-time-only-a.scala:54: error: C7
+  val c701: (C7, C7) = ???
+            ^
+compile-time-only-a.scala:55: error: C7
+  val c702: (C7 => C7) = ???
+                ^
+compile-time-only-a.scala:56: error: C7
+  val c703: { val x: C7 } = ???
+            ^
+compile-time-only-a.scala:57: error: C7
+  val c704: AnyRef with C7 = ???
+            ^
+compile-time-only-a.scala:60: error: C7
+  val c706: C7 Either C7 = ???
+               ^
+compile-time-only-a.scala:61: error: C7
+  val c707a: List[C7] = ???
+             ^
+compile-time-only-a.scala:63: error: C7
+  val c708a: T forSome { type T <: C7 } = ???
+               ^
+compile-time-only-a.scala:66: error: C8
+  val c709: (C8[Int], C8[C7]) = ???
+            ^
+compile-time-only-a.scala:67: error: C8
+  val c710: (C8[_] => C8[_]) = ???
+                   ^
+compile-time-only-a.scala:74: error: placebo
+class Test {
+      ^
+compile-time-only-a.scala:75: error: placebo
+  @placebo def x = (2: @placebo)
+               ^
+compile-time-only-a.scala:75: error: placebo
+  @placebo def x = (2: @placebo)
+                        ^
+26 errors found
diff --git a/test/files/neg/compile-time-only-a.scala b/test/files/neg/compile-time-only-a.scala
new file mode 100644
index 0000000..533175a
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.scala
@@ -0,0 +1,76 @@
+import scala.annotation.compileTimeOnly
+import scala.language.existentials
+
+ at compileTimeOnly("C1") class C1
+object C1
+
+class C2
+ at compileTimeOnly("C2") object C2
+
+ at compileTimeOnly("C3") case class C3(x: Int)
+
+ at compileTimeOnly("C4") case class C4(x: Int)
+object C4
+
+object pkg {
+  @compileTimeOnly("C5")
+  implicit class C5(val x: Int) {
+    def ext = ???
+  }
+}
+
+class C6(@compileTimeOnly("C6.x") val x: Int) {
+  @compileTimeOnly("C6.foo") def foo = 2
+  @compileTimeOnly("C6.Foo") type Foo = Int
+  @compileTimeOnly("C6.y") var y = 3
+}
+
+ at compileTimeOnly("C7") class C7
+ at compileTimeOnly("C8") class C8[T]
+
+object Test extends App {
+  new C1()
+  C1
+
+  new C2()
+  C2
+
+  new C3(2)
+  C3(2)
+
+  new C4(2)
+  C4(2)
+
+  import pkg._
+  2.ext
+  C5(2)
+
+  val c6 = new C6(2)
+  val _ = c6.x
+  c6.foo
+  type Foo = c6.Foo
+  c6.y = c6.y
+
+  val c701: (C7, C7) = ???
+  val c702: (C7 => C7) = ???
+  val c703: { val x: C7 } = ???
+  val c704: AnyRef with C7 = ???
+  // https://groups.google.com/forum/#!topic/scala-internals/5n07TiCnBZU
+  // val c705: ({ @compileTimeOnly("C7") type C7[T] = List[T] })#C7[_] = ???
+  val c706: C7 Either C7 = ???
+  val c707a: List[C7] = ???
+  val c707b = List[C7]()
+  val c708a: T forSome { type T <: C7 } = ???
+  // https://groups.google.com/forum/#!topic/scala-internals/5n07TiCnBZU
+  // val c708b: T forSome { @compileTimeOnly("C7") type T } = ???
+  val c709: (C8[Int], C8[C7]) = ???
+  val c710: (C8[_] => C8[_]) = ???
+}
+
+ at compileTimeOnly("placebo")
+class placebo extends scala.annotation.StaticAnnotation
+
+ at placebo
+class Test {
+  @placebo def x = (2: @placebo)
+}
\ No newline at end of file
diff --git a/test/files/neg/compile-time-only-b.check b/test/files/neg/compile-time-only-b.check
new file mode 100644
index 0000000..50cdf57
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.check
@@ -0,0 +1,13 @@
+compile-time-only-b.scala:9: error: splice must be enclosed within a reify {} block
+  val ignored1 = expr.splice
+                      ^
+compile-time-only-b.scala:10: error: cannot use value except for signatures of macro implementations
+  val ignored2 = expr.value
+                      ^
+compile-time-only-b.scala:13: error: splice must be enclosed within a reify {} block
+  val ignored3 = reify(fortyTwo).splice
+                                 ^
+compile-time-only-b.scala:14: error: cannot use value except for signatures of macro implementations
+  val ignored4 = reify(fortyTwo).value
+                                 ^
+four errors found
diff --git a/test/files/neg/compile-time-only-b.scala b/test/files/neg/compile-time-only-b.scala
new file mode 100644
index 0000000..d5568db
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+  // HAHA!!!
+  // no compileTimeOnly errors here, because scalac does constant folding
+  // the type of reify(42) is Expr[42.type]
+  // therefore the type of expr.splice is 42.type, which is then constfolded
+  val expr = reify(42)
+  val ignored1 = expr.splice
+  val ignored2 = expr.value
+
+  val fortyTwo = 42
+  val ignored3 = reify(fortyTwo).splice
+  val ignored4 = reify(fortyTwo).value
+}
\ No newline at end of file
diff --git a/test/files/neg/constructor-init-order.check b/test/files/neg/constructor-init-order.check
new file mode 100644
index 0000000..9ab6ac5
--- /dev/null
+++ b/test/files/neg/constructor-init-order.check
@@ -0,0 +1,9 @@
+constructor-init-order.scala:7: warning: Reference to uninitialized value baz
+  val bar1         = baz     // warn
+                     ^
+constructor-init-order.scala:17: warning: Reference to uninitialized variable baz
+  var bar1         = baz     // warn
+                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/constructor-init-order.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/constructor-init-order.flags
diff --git a/test/files/neg/constructor-init-order.scala b/test/files/neg/constructor-init-order.scala
new file mode 100644
index 0000000..fe8fec8
--- /dev/null
+++ b/test/files/neg/constructor-init-order.scala
@@ -0,0 +1,23 @@
+trait Foo0 {
+  val quux1: String
+  val quux2 = quux1  // warning here is "future work"
+}
+
+class Foo1 extends Foo0 {
+  val bar1         = baz     // warn
+  val bar2         = lazybaz // no warn
+  val bar3         = defbaz  // no warn
+  val baz          = "oops"
+  lazy val lazybaz = "ok"
+  def defbaz       = "ok"
+  val quux1        = "oops"
+}
+
+class Foo2 {
+  var bar1         = baz     // warn
+  var bar2         = lazybaz // no warn
+  var bar3         = defbaz  // no warn
+  var baz          = "oops"
+  lazy val lazybaz = "ok"
+  def defbaz       = "ok"
+}
diff --git a/test/files/neg/cycle-bounds.check b/test/files/neg/cycle-bounds.check
new file mode 100644
index 0000000..d924838
--- /dev/null
+++ b/test/files/neg/cycle-bounds.check
@@ -0,0 +1,4 @@
+cycle-bounds.scala:5: error: illegal cyclic reference involving type T
+class NotOk[T <: Comparable[_ <: T]]
+                            ^
+one error found
diff --git a/test/files/neg/cycle-bounds.flags b/test/files/neg/cycle-bounds.flags
new file mode 100644
index 0000000..ca20f55
--- /dev/null
+++ b/test/files/neg/cycle-bounds.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/neg/cycle-bounds.scala b/test/files/neg/cycle-bounds.scala
new file mode 100644
index 0000000..0b43bc7
--- /dev/null
+++ b/test/files/neg/cycle-bounds.scala
@@ -0,0 +1,5 @@
+// This should be allowed
+class Ok[T <: Comparable[_ >: T]]
+
+// This is (il)legitimately a cyclic reference
+class NotOk[T <: Comparable[_ <: T]]
diff --git a/test/files/neg/cyclics-import.check b/test/files/neg/cyclics-import.check
index ef355fa..be09fca 100644
--- a/test/files/neg/cyclics-import.check
+++ b/test/files/neg/cyclics-import.check
@@ -3,13 +3,4 @@ Note: this is often due in part to a class depending on a definition nested with
 If applicable, you may wish to try moving some members into another object.
 import User.UserStatus._
             ^
-cyclics-import.scala:12: error: not found: type Value
-    type UserStatus = Value
-                      ^
-cyclics-import.scala:14: error: not found: value Value
-    val Active = Value("1")
-                 ^
-cyclics-import.scala:15: error: not found: value Value
-    val Disabled = Value("2")
-                   ^
-four errors found
+one error found
diff --git a/test/files/neg/dbldef.check b/test/files/neg/dbldef.check
index 3ee6347..b896c4c 100644
--- a/test/files/neg/dbldef.check
+++ b/test/files/neg/dbldef.check
@@ -6,9 +6,7 @@ dbldef.scala:1: error: type mismatch;
  required: Int
 case class test0(x: Int, x: Float)
                  ^
-dbldef.scala:1: error: type mismatch;
- found   : Float
- required: Int
+dbldef.scala:1: error: in class test0, multiple overloaded alternatives of x define default arguments
 case class test0(x: Int, x: Float)
            ^
 three errors found
diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check
index 42ccabe..90bc027 100644
--- a/test/files/neg/delayed-init-ref.check
+++ b/test/files/neg/delayed-init-ref.check
@@ -1,10 +1,16 @@
-delayed-init-ref.scala:17: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+delayed-init-ref.scala:17: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
   println(O.vall)     // warn
             ^
-delayed-init-ref.scala:19: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
   println(vall)       // warn
           ^
-delayed-init-ref.scala:40: error: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
+delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated: DelayedInit semantics can be surprising. Support for `App` will continue.
+See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1
+trait Before extends DelayedInit {
+                     ^
+delayed-init-ref.scala:40: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
     println({locally(()); this}.foo)  // warn (spurious, but we can't discriminate)
                                 ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/delayed-init-ref.flags
index 7949c2a..88a3e4c 100644
--- a/test/files/neg/delayed-init-ref.flags
+++ b/test/files/neg/delayed-init-ref.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-deprecation -Xlint -Xfatal-warnings
diff --git a/test/files/neg/divergent-implicit.check b/test/files/neg/divergent-implicit.check
index 5f20df1..d4a3ddf 100644
--- a/test/files/neg/divergent-implicit.check
+++ b/test/files/neg/divergent-implicit.check
@@ -4,15 +4,17 @@ divergent-implicit.scala:4: error: type mismatch;
   val x1: String = 1
                    ^
 divergent-implicit.scala:5: error: diverging implicit expansion for type Int => String
-starting with method cast in object Test1
+starting with method $conforms in object Predef
   val x2: String = cast[Int, String](1)
                                     ^
-divergent-implicit.scala:14: error: diverging implicit expansion for type Test2.Baz => Test2.Bar
-starting with method baz2bar in object Test2
+divergent-implicit.scala:14: error: type mismatch;
+ found   : Test2.Foo
+ required: Test2.Bar
   val x: Bar = new Foo
                ^
-divergent-implicit.scala:15: error: diverging implicit expansion for type Test2.Foo => Test2.Bar
-starting with method foo2bar in object Test2
+divergent-implicit.scala:15: error: type mismatch;
+ found   : Test2.Baz
+ required: Test2.Bar
   val y: Bar = new Baz
                ^
 four errors found
diff --git a/test/files/neg/dotless-targs.check b/test/files/neg/dotless-targs.check
new file mode 100644
index 0000000..4aab939
--- /dev/null
+++ b/test/files/neg/dotless-targs.check
@@ -0,0 +1,4 @@
+dotless-targs.scala:2: error: type application is not allowed for postfix operators
+  def f1 = "f1" isInstanceOf[String] // not ok
+                ^
+one error found
diff --git a/test/files/neg/dotless-targs.scala b/test/files/neg/dotless-targs.scala
new file mode 100644
index 0000000..eff63cb
--- /dev/null
+++ b/test/files/neg/dotless-targs.scala
@@ -0,0 +1,5 @@
+class A {
+  def f1 = "f1" isInstanceOf[String] // not ok
+  def f2 = "f2".isInstanceOf[String] // ok
+  def f3 = "f3" toList               // ok
+}
diff --git a/test/files/neg/eta-expand-star-deprecation.check b/test/files/neg/eta-expand-star-deprecation.check
new file mode 100644
index 0000000..a79f0df
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.check
@@ -0,0 +1,4 @@
+warning: -Yeta-expand-keeps-star is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/eta-expand-star-deprecation.flags b/test/files/neg/eta-expand-star-deprecation.flags
new file mode 100644
index 0000000..5ac8b63
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.flags
@@ -0,0 +1 @@
+-Yeta-expand-keeps-star -deprecation -Xfatal-warnings
diff --git a/test/files/neg/eta-expand-star-deprecation.scala b/test/files/neg/eta-expand-star-deprecation.scala
new file mode 100644
index 0000000..5749692
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.scala
@@ -0,0 +1,8 @@
+object Test {
+  def f[T](xs: T*): Unit = ()
+  def g[T] = f[T] _
+
+  def main(args: Array[String]): Unit = {
+    g(1, 2)
+  }
+}
diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check
index 0f0d13c..6198496 100644
--- a/test/files/neg/exhausting.check
+++ b/test/files/neg/exhausting.check
@@ -1,25 +1,27 @@
-exhausting.scala:21: error: match may not be exhaustive.
-It would fail on the following input: List(_, _, _)
+exhausting.scala:21: warning: match may not be exhaustive.
+It would fail on the following inputs: List(_), List(_, _, _)
   def fail1[T](xs: List[T]) = xs match {
                               ^
-exhausting.scala:27: error: match may not be exhaustive.
+exhausting.scala:27: warning: match may not be exhaustive.
 It would fail on the following input: Nil
   def fail2[T](xs: List[T]) = xs match {
                               ^
-exhausting.scala:32: error: match may not be exhaustive.
+exhausting.scala:32: warning: match may not be exhaustive.
 It would fail on the following input: List((x: Int forSome x not in (1, 2)))
   def fail3a(xs: List[Int]) = xs match {
                               ^
-exhausting.scala:39: error: match may not be exhaustive.
+exhausting.scala:39: warning: match may not be exhaustive.
 It would fail on the following input: Bar3
   def fail3[T](x: Foo[T]) = x match {
                             ^
-exhausting.scala:47: error: match may not be exhaustive.
+exhausting.scala:47: warning: match may not be exhaustive.
 It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
   def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
                                                  ^
-exhausting.scala:56: error: match may not be exhaustive.
+exhausting.scala:56: warning: match may not be exhaustive.
 It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
   def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
                                        ^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/exhausting.scala b/test/files/neg/exhausting.scala
index 5554ee2..c00569c 100644
--- a/test/files/neg/exhausting.scala
+++ b/test/files/neg/exhausting.scala
@@ -3,7 +3,7 @@ object Test {
   case object Bar1 extends Foo[Int]
   case object Bar2 extends Foo[String]
   case object Bar3 extends Foo[Any]
-  
+
   def ex1[T](xs: List[T]) = xs match {
     case ys: List[_]  => "ok"
   }
@@ -17,7 +17,7 @@ object Test {
     case (_: Foo[_], _: Foo[_]) => ()
   }
 
-  // fails for: ::(_, ::(_, ::(_, _)))
+  // fails for: ::(_, Nil), ::(_, ::(_, ::(_, _))), ...
   def fail1[T](xs: List[T]) = xs match {
     case Nil            => "ok"
     case x :: y :: Nil  => "ok"
diff --git a/test/files/neg/forgot-interpolator.check b/test/files/neg/forgot-interpolator.check
new file mode 100644
index 0000000..8988458
--- /dev/null
+++ b/test/files/neg/forgot-interpolator.check
@@ -0,0 +1,27 @@
+forgot-interpolator.scala:4: warning: `$bippy` looks like an interpolated identifier! Did you forget the interpolator?
+  def f = "Put the $bippy in the $bippy!" // warn 1
+          ^
+forgot-interpolator.scala:14: warning: That looks like an interpolated expression! Did you forget the interpolator?
+  def f = """Put the ${println("bippy")} in the bippy!""" // warn 2
+          ^
+forgot-interpolator.scala:30: warning: `$beppo` looks like an interpolated identifier! Did you forget the interpolator?
+      def f = "$beppo was a marx bros who saw dollars."  // warn 3
+              ^
+forgot-interpolator.scala:34: warning: `$aleppo` looks like an interpolated identifier! Did you forget the interpolator?
+    def f = "$aleppo is a pepper and a city."     // warn 4
+            ^
+forgot-interpolator.scala:47: warning: `$hippo` looks like an interpolated identifier! Did you forget the interpolator?
+    def h = "$hippo takes an implicit"  // warn 6
+            ^
+forgot-interpolator.scala:88: warning: `$groucho` looks like an interpolated identifier! Did you forget the interpolator?
+    def f2 = "I salute $groucho" // warn 7
+             ^
+forgot-interpolator.scala:89: warning: `$dingo` looks like an interpolated identifier! Did you forget the interpolator?
+    def f3 = "I even salute $dingo" // warn 8
+             ^
+forgot-interpolator.scala:90: warning: `$calico` looks like an interpolated identifier! Did you forget the interpolator?
+    def f4 = "I also salute $calico" // warn 9
+             ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/forgot-interpolator.flags
similarity index 100%
copy from test/files/neg/delayed-init-ref.flags
copy to test/files/neg/forgot-interpolator.flags
diff --git a/test/files/neg/forgot-interpolator.scala b/test/files/neg/forgot-interpolator.scala
new file mode 100644
index 0000000..a53054d
--- /dev/null
+++ b/test/files/neg/forgot-interpolator.scala
@@ -0,0 +1,93 @@
+class A {
+  val bippy = 123
+
+  def f = "Put the $bippy in the $bippy!" // warn 1
+}
+
+class B {
+  val dingus = 123
+
+  def f = "Put the $bippy in the $bippy!" // no warn
+}
+
+class C {
+  def f = """Put the ${println("bippy")} in the bippy!""" // warn 2
+}
+
+package object test {
+  def aleppo = 9
+  def greppo(n: Int) = ???
+  def zappos(n: Int)(implicit ord: math.Ordering[Int]) = ???
+  def hippo(implicit n: Int) = ???
+}
+
+package test {
+  // not sure if overloading is kosher in pkg obj yet
+  class Doo {
+    def beppo(i: Int) = 8 * i
+    def beppo = 8
+    class Dah extends Doo {
+      def f = "$beppo was a marx bros who saw dollars."  // warn 3
+    }
+  }
+  class E {
+    def f = "$aleppo is a pepper and a city."     // warn 4
+    def k = s"Just an interpolation of $aleppo"   // no warn
+  }
+  class Bar {
+    private def bar = 8
+    if (bar > 8) ???       // use it to avoid extra warning
+  }
+  class Baz extends Bar {
+    def f = "$bar is private, shall we warn just in case?" // no longer a warning, private members aren't inherited!
+  }
+  class G {
+    def g = "$greppo takes an arg"  // no warn
+    def z = "$zappos takes an arg too"  // no warn
+    def h = "$hippo takes an implicit"  // warn 6
+  }
+  class J {
+    def j = 8
+    class J2 {
+      def j(i: Int) = 2 * i
+      def jj = "shadowed $j"  // no warn
+    }
+  }
+  import annotation._
+  @implicitNotFound("No Z in ${A}")   // no warn
+  class Z[A]
+}
+
+
+package inf1 {
+  import scala.annotation.implicitNotFound
+
+  @implicitNotFound(msg = "Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${From}.") // no warn
+  trait CannotBuildFrom[-From, -Elem, +To]
+}
+
+package inf2 {
+  @scala.annotation.implicitNotFound(msg = "Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${From}.") // no warn
+  trait CannotBuildFrom[-From, -Elem, +To]
+}
+
+package inf3 {
+  @scala.annotation.implicitNotFound("Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${From}.") // no warn
+  trait CannotBuildFrom[-From, -Elem, +To]
+}
+
+package curry {
+  class A {
+    def bunko()(x: Int): Int = 5
+    def groucho(): Int = 5
+    def dingo()()()()()(): Int = 5 // kind of nuts this can be evaluated with just 'dingo', but okay
+    def calico[T1, T2]()()(): Int = 5 // even nutsier
+    def palomino[T1, T2]()(y: Int = 5)(): Int = 5 // even nutsier
+
+    def f1 = "I was picked up by the $bunko squad" // no warn
+    def f2 = "I salute $groucho" // warn 7
+    def f3 = "I even salute $dingo" // warn 8
+    def f4 = "I also salute $calico" // warn 9
+    def f5 = "I draw the line at $palomino" // no warn
+  }
+}
diff --git a/test/files/neg/forward.scala b/test/files/neg/forward.scala
index 3774fa8..d5c0851 100644
--- a/test/files/neg/forward.scala
+++ b/test/files/neg/forward.scala
@@ -5,20 +5,20 @@ object Test {
   {
     def f: Int = x;
     val x: Int = f;
-  }    
+  }
   {
     def f: Int = g;
     val x: Int = f;
     def g: Int = x;
-  }    
+  }
   {
     def f: Int = g;
     var x: Int = f;
     def g: Int = x;
-  }    
+  }
   {
     def f: Int = g;
     Console.println("foo");
     def g: Int = f;
-  }    
+  }
 }
diff --git a/test/files/neg/found-req-variance.scala b/test/files/neg/found-req-variance.scala
index fd3b111..024b24c 100644
--- a/test/files/neg/found-req-variance.scala
+++ b/test/files/neg/found-req-variance.scala
@@ -27,7 +27,7 @@ object Test {
   def f7 = Set[Inv[C]]() + new Inv[A]
   def f8 = Set[Inv[C]]() + new Inv[B]
   def f9 = Set[Inv[C]]() + new Inv[C]
-  
+
   def g1 = Set[Multi[A, B, C]]() + new MultiCov[A]
   def g2 = Set[Multi[A, B, C]]() + new MultiCov[B]
   def g3 = Set[Multi[A, B, C]]() + new MultiCov[C]
@@ -43,12 +43,12 @@ object Functions {
   object Set1 {
     def f[T, R](x: FF1[T, R]) = ()
     def h[T, R] : FF1[T, R] = sys.error("")
-    
+
     def ff1 = f[B, B](h[A, A]) // fail
     def ff2 = f[B, B](h[B, A]) // fail
     def ff3 = f[B, B](h[C, A]) // fail
     def ff4 = f[B, B](h[A, B]) // suggest
-    def ff5 = f[B, B](h[B, B]) // ok 
+    def ff5 = f[B, B](h[B, B]) // ok
     def ff6 = f[B, B](h[C, B]) // suggest
     def ff7 = f[B, B](h[A, C]) // suggest
     def ff8 = f[B, B](h[B, C]) // ok
@@ -57,7 +57,7 @@ object Functions {
   object Set2 {
     def f[T, R](x: FF2[T, R]) = ()
     def h[T, R] : FF2[T, R] = sys.error("")
-    
+
     def ff1 = f[B, B](h[A, A]) // suggest
     def ff2 = f[B, B](h[B, A]) // suggest
     def ff3 = f[B, B](h[C, A]) // fail
@@ -74,7 +74,7 @@ object Functions {
 // object TypeAlias {
 //   type LL[T] = List[T]
 //   val LL = List
-//   
+//
 //   def f1 = Set[LL[B]]() + LL[A](new A)
 //   def f2 = Set[LL[B]]() + LL[C](new C)
 // }
@@ -82,12 +82,12 @@ object Functions {
 object Javas {
   def f[T](x: java.util.List[T]) = ()
   def g[T](x: java.util.Comparator[T]) = ()
-  
+
   def g1 = f[AnyRef](new java.util.ArrayList[String] { })
   def g2 = g[String](Ordering.fromLessThan[AnyRef](_.toString < _.toString))
 }
 
-object Misc {  
+object Misc {
   // original motivation
   class Data[A <: AnyVal]
   class MyData extends Data[Int] { }
diff --git a/test/files/neg/gadts1.check b/test/files/neg/gadts1.check
index a5e3e0d..9b7ea55 100644
--- a/test/files/neg/gadts1.check
+++ b/test/files/neg/gadts1.check
@@ -1,8 +1,3 @@
-gadts1.scala:15: error: type mismatch;
- found   : Test.Double
- required: a
-    case NumTerm(n) => c.x = Double(1.0) 
-                                   ^
 gadts1.scala:20: error: Test.Cell[a] does not take parameters
     case Cell[a](x: Int) => c.x = 5
                 ^
@@ -11,4 +6,4 @@ gadts1.scala:20: error: type mismatch;
  required: a
     case Cell[a](x: Int) => c.x = 5
                                   ^
-three errors found
+two errors found
diff --git a/test/files/neg/gadts1.scala b/test/files/neg/gadts1.scala
index 1fb6e57..08403e6 100644
--- a/test/files/neg/gadts1.scala
+++ b/test/files/neg/gadts1.scala
@@ -11,8 +11,8 @@ class IntTerm(n: Int) extends NumTerm(n) with Term[Int]
 
 
 def f[a](t:Term[a], c:Cell[a]): Unit = {
-  t match {  
-    case NumTerm(n) => c.x = Double(1.0) 
+  t match {
+    case NumTerm(n) => c.x = Double(1.0)
   }
   t match {
     // presently testing that this gets past the parser: eventually
diff --git a/test/files/neg/gadts2-strict.check b/test/files/neg/gadts2-strict.check
new file mode 100644
index 0000000..960b35e
--- /dev/null
+++ b/test/files/neg/gadts2-strict.check
@@ -0,0 +1,6 @@
+gadts2-strict.scala:14: error: type mismatch;
+ found   : Test.MyDouble
+ required: a
+      case NumTerm(n) => c.x = MyDouble(1.0)
+                                       ^
+one error found
diff --git a/test/files/neg/gadts2-strict.flags b/test/files/neg/gadts2-strict.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/neg/gadts2-strict.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/neg/gadts2-strict.scala b/test/files/neg/gadts2-strict.scala
new file mode 100644
index 0000000..54978b7
--- /dev/null
+++ b/test/files/neg/gadts2-strict.scala
@@ -0,0 +1,26 @@
+// A copy of pos/gadts2, which must fail under -Xstrict-inference.
+object Test {
+
+  abstract class Number
+  case class MyInt(n: Int) extends Number
+  case class MyDouble(d: Double) extends Number
+
+  trait Term[a]
+  case class Cell[a](var x: a) extends Term[a]
+  final case class NumTerm(val n: Number) extends Term[Number]
+
+  def f[a](t: Term[a], c: Cell[a]) {
+    t match {
+      case NumTerm(n) => c.x = MyDouble(1.0)
+    }
+  }
+
+  val x: Term[Number] = NumTerm(MyInt(5))
+
+  def main(args: Array[String]) {
+    val cell = Cell[Number](MyInt(6))
+    Console.println(cell)
+    f[Number](new NumTerm(MyInt(5)), cell)
+    Console.println(cell)
+  }
+}
diff --git a/test/files/neg/gadts2.check b/test/files/neg/gadts2.check
new file mode 100644
index 0000000..dc21f3f
--- /dev/null
+++ b/test/files/neg/gadts2.check
@@ -0,0 +1,6 @@
+gadts2.scala:7: error: type mismatch;
+ found   : String("abc")
+ required: B
+  (s1: Super[Any]) match { case Sub(f) => f("abc") }
+                                            ^
+one error found
diff --git a/test/files/neg/gadts2.flags b/test/files/neg/gadts2.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/neg/gadts2.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/neg/gadts2.scala b/test/files/neg/gadts2.scala
new file mode 100644
index 0000000..156944b
--- /dev/null
+++ b/test/files/neg/gadts2.scala
@@ -0,0 +1,12 @@
+trait Super[+A]
+case class Sub[B](f: B => B) extends Super[B]
+
+object Test extends App {
+  val s1 = Sub((x: Int) => x)
+
+  (s1: Super[Any]) match { case Sub(f) => f("abc") }
+}
+// java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Integer
+//   at scala.runtime.BoxesRunTime.unboxToInt(BoxesRunTime.java:105)
+//   at Test$$anonfun$1.apply(a.scala:5)
+//   at Test$.delayedEndpoint$Test$1(a.scala:7)
diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala
index 878d3a7..22633a1 100644
--- a/test/files/neg/implicits.scala
+++ b/test/files/neg/implicits.scala
@@ -4,14 +4,14 @@ class Super
 
 object Super {
   implicit def pos2int(p: Pos): Int = 0
-}  
+}
 
 object Sub extends Super {
   class Plus(x: Any) {
     def +(y: String): String = x.toString + y
   }
   implicit def any2plus(x: Any): Plus = new Plus(x)
-}  
+}
 
 object Test {
   import Super._
@@ -33,7 +33,7 @@ object test2 {
   val set = HEmpty + 3 + "3"
   implicit def select[T](t: HSome[T,_]) = t.head
   implicit def selectTail[L](t: HSome[_,L]) = t.tail
-  
+
   def foo(x: Int) = 3
   foo(set)
 }
@@ -55,7 +55,7 @@ class Mxml {
 
     }
 
-} 
+}
 
 // SI-5316
 class Test3 {
diff --git a/test/files/neg/import-precedence.check b/test/files/neg/import-precedence.check
new file mode 100644
index 0000000..5f99611
--- /dev/null
+++ b/test/files/neg/import-precedence.check
@@ -0,0 +1,19 @@
+import-precedence.scala:18: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2._
+and import uniq1.X
+    object Y { def f = X }
+                       ^
+import-precedence.scala:61: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2._
+and import uniq1._
+  object Y { def f = X }
+                     ^
+import-precedence.scala:67: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2.X
+and import uniq1.X
+  object Y { def f = X }
+                     ^
+three errors found
diff --git a/test/files/neg/import-precedence.scala b/test/files/neg/import-precedence.scala
new file mode 100644
index 0000000..0401635
--- /dev/null
+++ b/test/files/neg/import-precedence.scala
@@ -0,0 +1,68 @@
+package uniq1 {
+  object X
+  package uniq2 {
+    object X
+    package uniq3 {
+      object X
+      package uniq4 {
+        object X
+      }
+    }
+  }
+}
+
+package p1 {
+  import uniq1.X
+  package p2 {
+    import uniq1.uniq2._
+    object Y { def f = X }
+  }
+}
+
+package p2 {
+  import uniq1.uniq2._
+  package p2 {
+    import uniq1.X
+    object Y { def f = X }
+  }
+}
+
+package p3 {
+  import uniq1.X
+  import uniq1.uniq2._
+  object Y { def f = X }
+}
+
+package p4 {
+  import uniq1.uniq2._
+  import uniq1.X
+  object Y { def f = X }
+}
+
+package p5 {
+  import uniq1.X
+  package p6 {
+    import uniq1.uniq2.X
+    object Y { def f = X }
+  }
+}
+
+package p6 {
+  import uniq1._
+  package p5 {
+    import uniq1.uniq2._
+    object Y { def f = X }
+  }
+}
+
+package p7 {
+  import uniq1._
+  import uniq1.uniq2._
+  object Y { def f = X }
+}
+
+package p8 {
+  import uniq1.X
+  import uniq1.uniq2.X
+  object Y { def f = X }
+}
diff --git a/test/files/neg/java-access-neg/J.java b/test/files/neg/java-access-neg/J.java
index 4f20246..b6bc336 100644
--- a/test/files/neg/java-access-neg/J.java
+++ b/test/files/neg/java-access-neg/J.java
@@ -4,11 +4,11 @@ public abstract class J {
   public J() { }
   J(int x1) { }
   protected J(int x1, int x2) { }
-  
+
   abstract void packageAbstract();
   protected abstract void protectedAbstract();
   public abstract void publicAbstract();
-  
+
   void packageConcrete() { return; }
   protected void protectedConcrete() { return; }
   public void publicConcrete() { return; }
diff --git a/test/files/neg/java-access-neg/S2.scala b/test/files/neg/java-access-neg/S2.scala
index dd0af8d..b082bb7 100644
--- a/test/files/neg/java-access-neg/S2.scala
+++ b/test/files/neg/java-access-neg/S2.scala
@@ -12,7 +12,7 @@ class S1 extends J {
   override private[b] def packageAbstract() = ()      // fail
   override protected[b] def protectedAbstract() = ()
   override def publicAbstract() = ()
-  
+
   override private[b] def packageConcrete() = ()      // fail
   override protected[b] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -32,7 +32,7 @@ class S3 extends J {
   protected[b] def packageAbstract() = ()   // fail
   protected[b] def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override protected[b] def packageConcrete() = ()    // fail
   override protected[b] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -43,7 +43,7 @@ class S4 extends J {
   private[a] def packageAbstract() = ()         // fail
   protected[a] def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override private[a] def packageConcrete() = ()        // fail
   override protected[a] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -54,7 +54,7 @@ class S5 extends J {
   def packageAbstract() = ()      // fail
   def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override def packageConcrete() = ()   // fail
   override def protectedConcrete() = ()
   override def publicConcrete() = ()
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.check b/test/files/neg/javaConversions-2.10-ambiguity.check
deleted file mode 100644
index c064a22..0000000
--- a/test/files/neg/javaConversions-2.10-ambiguity.check
+++ /dev/null
@@ -1,6 +0,0 @@
-javaConversions-2.10-ambiguity.scala:8: error: type mismatch;
- found   : scala.collection.concurrent.Map[String,String]
- required: scala.collection.mutable.ConcurrentMap[String,String]
-  assertType[mutable.ConcurrentMap[String, String]](a)
-                                                    ^
-one error found
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.scala b/test/files/neg/javaConversions-2.10-ambiguity.scala
deleted file mode 100644
index e856846..0000000
--- a/test/files/neg/javaConversions-2.10-ambiguity.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import collection.{JavaConversions, mutable, concurrent}
-import JavaConversions._
-import java.util.concurrent.{ConcurrentHashMap => CHM}
-
-object Bar {
-  def assertType[T](t: T) = t
-  val a = new CHM[String, String]() += (("", ""))
-  assertType[mutable.ConcurrentMap[String, String]](a)
-}
-// vim: set et:
diff --git a/test/files/neg/lazy-override.scala b/test/files/neg/lazy-override.scala
index a0f6e3b..f41d7f0 100644
--- a/test/files/neg/lazy-override.scala
+++ b/test/files/neg/lazy-override.scala
@@ -5,7 +5,7 @@
     lazy val y: Int = { print("/*A.y*/"); 2 }
   }
 
-  
+
   class B extends A {
     // lazy overrides strict val
     override lazy val x: Int = { print("/*B.x*/"); 3 }
diff --git a/test/files/neg/lazyvals.scala b/test/files/neg/lazyvals.scala
index bbc4fe1..f92534f 100644
--- a/test/files/neg/lazyvals.scala
+++ b/test/files/neg/lazyvals.scala
@@ -1,7 +1,7 @@
 
 /** Test which should fail compilation */
 class Lazy {
-  
+
   // no abstract lazy values
   lazy val t: Int
 
@@ -31,7 +31,7 @@ object T2 {
     lazy val y: Int = { print("/*A.y*/"); 2 }
   }
 
-  
+
   class B extends A {
     // lazy overrides strict val
     override lazy val x: Int = { print("/*B.x*/"); 3 }
diff --git a/test/files/neg/literate_existentials.check b/test/files/neg/literate_existentials.check
new file mode 100644
index 0000000..c98f976
--- /dev/null
+++ b/test/files/neg/literate_existentials.check
@@ -0,0 +1,4 @@
+literate_existentials.scala:189: error: Cannot prove that Int <:< M forSome { type M <: String }.
+  implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails
+            ^
+one error found
diff --git a/test/files/neg/literate_existentials.scala b/test/files/neg/literate_existentials.scala
new file mode 100644
index 0000000..8580347
--- /dev/null
+++ b/test/files/neg/literate_existentials.scala
@@ -0,0 +1,224 @@
+
+object LiterateExistentials {
+
+//  Let's play with Scala's type system a bit.
+//
+//  From adriaanm, we have the following substitution rule, which allows us to
+//  determine whether a type is a subtype of an existential in Scala:
+//
+//
+//  T <: subst(U)    for all i: subst(Li) <: Vi /\ Vi <: subst(Hi)
+//  --------------------------------------------------------------
+//  T <: U forSome {type X1 :> L1 <: H1; ...; type Xn :> Ln <: Hn}
+//
+//  where subst(T) = T.subst(Xi, Vi) // Vi fresh type variables
+//
+//  T is a subtype of some existential if all constraints of the existential hold
+//  after substituting Vi for the existentially quantified type variables Xi,
+//  and T is a subtype of the underlying type U with the same substitution applied.
+//
+//
+//  Since we are not a formal substitution system, we will actually be using
+//  this rule 'backward' in order to determine whether it allows us to
+//  truthfully make claims; In each example, we will start with the proposition
+//  that a type is a subtype of an existential. Then, we will fit the
+//  proposition into the form on the bottom rule by creating a set of bindings
+//  which allow one to be transformed into the other. Next, we will express the
+//  top of the substitution rule in terms of a series of constraints. We will
+//  simplify those constraints until simple inspection can determine whether
+//  they are consistent. From this, we can conclude whether the type system /
+//  environment admit the top of the substitution rule (and thus, the bottom). If
+//  they do, we can say that the proposition is true.
+
+
+// In each case, we will also probe the compiler to see whether _it_ thinks that
+// the proposition holds, using an uncommented implicitly[_ <:< _] line.
+
+
+
+
+//  Proposition: Nothing :< (A forSome { type A >: String <: Any })
+//
+//
+//  Bindings:
+//  T  :=  Nothing
+//  U  := A
+//  X1 := A
+//  L1 := String
+//  H1 := Any
+//
+//  We need:
+//
+//  Nothing <: V1 // (U, which is "A", which V1 substituted for all instances of A)
+//  String <: V1
+//  V1 <: Any
+//
+//  Which simplify to:
+//  V1 >: String <: Any 
+//
+//  That's not inconsistent, so we can say that:
+//  T <: U forSome { type X1 >: L1 <: H1 }
+//  which means (under our mappings):
+//  Nothing <: A forSome { type A >: String <: Any }
+
+// Now to ask the compiler:
+  
+  implicitly[Nothing <:< (A forSome { type A >: String <: Any })]
+
+
+//  Let's try another:
+//
+//  Proposition: Int :< (M forSome { type M >: String <: Any })
+//
+//  Bindings:
+//  T := Int
+//  U := M
+//  X1 := M
+//  L1 := String
+//  H1 := Any
+//
+//  We need:
+//
+//  Int <: V1
+//  String <: V1
+//  V1 <: Any
+//
+//  Which simplify to:
+//
+//  V1 >: lub(Int, String) <: Any 
+//
+//  V1 >: Any <: Any 
+//
+//  We have demonstrated consistency! We can say that:
+//    T :< (U forSome { type U >: L1 <: H1 })
+//  Under our bindings, this is:
+//    Int :< (M forSome { type M >: String <: Any })
+  
+  implicitly[Int <:< (M forSome { type M >: String <: Any })]
+
+
+
+//  Now, let's do a more complicated one:
+//
+//  Proposition: (Nothing, List[String]) <: ((A, B) forSome { type A >: String <: AnyRef; type B >: Null <: List[A] })
+//
+//  Bindings:
+//  T  := (Nothing, List[String])
+//  U  := (A, B)
+//  X1 := A
+//  X2 := B
+//  L1 := String
+//  H1 := AnyRef
+//  L2 := Null
+//  H2 := List[A]
+//
+//  We need:
+//
+//  (Nothing, List[String]) <: (V1, V2)
+//  String <: V1
+//  V1 <: AnyRef
+//  Null <: V2
+//  V2 <: List[V1]
+//
+//  Of course, we can split the first line to make:
+//
+//  Nothing <: V1
+//  List[String]) <: V2
+//  String <: V1
+//  V1 <: AnyRef
+//  Null <: V2
+//  V2 <: List[V1]
+//
+//  Which reorder to:
+//
+//  Nothing <: V1
+//  String <: V1
+//  V1 <: AnyRef
+//  List[String]) <: V2
+//  Null <: V2
+//  V2 <: List[V1]
+//
+//  Which simplify to:
+//
+//  String <: V1
+//  V1 <: AnyRef
+//  List[String]) <: V2
+//  V2 <: List[V1]
+//
+//  String <: V1
+//  V1 <: AnyRef
+//  String <: V1
+//
+//  V1 >: String <: AnyRef
+//
+//  Consistency demonstrated! We can say that:
+//  T <: U forSome {type X1 :> L1 <: H1; type X2 :> L2 <: H2}
+//  meaning:
+//  (Nothing, List[String]) <: ((A, B) forSome { type A >: String <: AnyRef; type B >: Null <: List[A] })
+
+  implicitly[
+    (Nothing, List[String]) <:< ((A, B) forSome { type A >: String <: AnyRef; type B >: Null <: List[A] })
+   ]
+
+
+
+//  Now let's try one that isn't true:
+//
+//  Proposition: Int :< (M forSome { type M >: Nothing <: String })
+//
+//  Bindings:
+//  T  := Int
+//  U  := M
+//  X1 := M
+//  L1 := Nothing
+//  H1 := String
+//
+//  We need:
+//
+//  Int <: V1
+//  Nothing <: V1
+//  V1 <: String
+//
+//  V1 >: Int <: String 
+//
+//  Alas! These are inconsistent! There is no supertype of Int that is a
+//  subtype of String! Our substitution rule does not allow us to claim that our
+//  proposition is true.
+//
+
+  implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails
+// The preceeding line causes the compiler to generate an error message.
+
+
+
+//  Let's look at one final example, courtesy of paulp.
+//  Proposition: String :< X forSome { type X >: Nothing <: String }
+//
+//  Bindings:
+//  T  := String
+//  U  := X
+//  X1 := X
+//  L1 := Nothing
+//  H1 := String
+//
+//  We need:
+//
+//  String <: V1
+//  Nothing <: V1
+//  V1 <: String
+//
+//  Which simplify to:
+//
+//  String <: V1
+//  V1 <: String
+//
+//  V1 >: String <: String
+//
+//  So, we can say:
+//  T <: U forSome { type X1 >: L1 <: H1 }
+//  which means:
+//  String :< X forSome { type X >: Nothing <: String }
+
+  implicitly[String <:< (X forSome { type X >: Nothing <: String })]
+
+}
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
index 54afc6f..270882b 100644
--- a/test/files/neg/logImplicits.check
+++ b/test/files/neg/logImplicits.check
@@ -7,10 +7,10 @@ logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def ma
 logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
   math.max(122, x: Int)
                 ^
-logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A]
+logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def ArrowAssoc[A](self: A): ArrowAssoc[A]
   def f = (1 -> 2) + "c"
            ^
-logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd
+logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def any2stringadd[A](self: A): any2stringadd[A]
   def f = (1 -> 2) + "c"
              ^
 logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined
diff --git a/test/files/neg/logImplicits.scala b/test/files/neg/logImplicits.scala
index fb5dd8a..caf8271 100644
--- a/test/files/neg/logImplicits.scala
+++ b/test/files/neg/logImplicits.scala
@@ -9,9 +9,9 @@ class B {
 
 object C {
   final val x = "abc"
-  
+
   implicit def convert(p: x.type): Int = 123
- 
+
   math.max(122, x: Int)
 }
 
diff --git a/test/files/neg/lubs.check b/test/files/neg/lubs.check
index 77ab201..affbd49 100644
--- a/test/files/neg/lubs.check
+++ b/test/files/neg/lubs.check
@@ -1,5 +1,10 @@
+lubs.scala:10: error: type mismatch;
+ found   : test1.A[test1.A[Object]]
+ required: test1.A[test1.A[test1.A[Any]]]
+  val x3: A[A[A[Any]]] = f
+                         ^
 lubs.scala:11: error: type mismatch;
- found   : test1.A[test1.A[test1.A[Any]]]
+ found   : test1.A[test1.A[Object]]
  required: test1.A[test1.A[test1.A[test1.A[Any]]]]
   val x4: A[A[A[A[Any]]]] = f
                             ^
@@ -13,4 +18,4 @@ lubs.scala:25: error: type mismatch;
  required: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A}}}
   val x4: A { type T >: Null <: A { type T >: Null <: A { type T >: Null <: A } } } = f
                                                                                       ^
-three errors found
+four errors found
diff --git a/test/files/neg/lubs.scala b/test/files/neg/lubs.scala
index 639117f..3524fa4 100644
--- a/test/files/neg/lubs.scala
+++ b/test/files/neg/lubs.scala
@@ -18,7 +18,7 @@ object test2 {
   class D extends A { type T = D }
 
   def f = if (1 == 2) new C else new D
-    
+
   val x1: A { type T } = f
   val x2: A { type T >: Null <: A } = f
   val x3: A { type T >: Null <: A { type T >: Null <: A } } = f
diff --git a/test/files/neg/macro-abort/Macros_1.scala b/test/files/neg/macro-abort/Macros_1.scala
index 676c112..2077e99 100644
--- a/test/files/neg/macro-abort/Macros_1.scala
+++ b/test/files/neg/macro-abort/Macros_1.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index c7b58d7..61df513 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,4 +1,5 @@
-Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Impls_Macros_Test_1.scala:33: error: macro implementation not found: quux
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
   println(foo(2) + Macros.bar(2) * new Macros().quux(4))
-             ^
+                                                    ^
 one error found
diff --git a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
index 908438c..325bb72 100644
--- a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
+++ b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
@@ -1,22 +1,19 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
-    c.Expr[Int](body)
+    c.Expr[Int](q"$x + 1")
   }
 
-  def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
-    c.Expr[Int](body)
+    c.Expr[Int](q"$x + 2")
   }
 
-  def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
-    c.Expr[Int](body)
+    c.Expr[Int](q"$x + 3")
   }
 }
 
diff --git a/test/files/neg/macro-blackbox-dynamic-materialization.check b/test/files/neg/macro-blackbox-dynamic-materialization.check
new file mode 100644
index 0000000..f6c73f7
--- /dev/null
+++ b/test/files/neg/macro-blackbox-dynamic-materialization.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: I don't like classes that contain integers
+  println(implicitly[Foo[C1]])
+                    ^
+one error found
diff --git a/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala b/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala
new file mode 100644
index 0000000..fc2907b
--- /dev/null
+++ b/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+trait Foo[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: Foo[T] = null
+}
+
+object Foo extends LowPriority {
+  implicit def moreSpecific[T]: Foo[T] = macro Macros.impl[T]
+}
+
+object Macros {
+  def impl[T: c.WeakTypeTag](c: Context) = {
+    import c.universe._
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new Foo[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala b/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala
new file mode 100644
index 0000000..bf19209
--- /dev/null
+++ b/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  println(implicitly[Foo[C1]])
+  println(implicitly[Foo[C2]])
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-blackbox-extractor.check b/test/files/neg/macro-blackbox-extractor.check
new file mode 100644
index 0000000..4c53ff1
--- /dev/null
+++ b/test/files/neg/macro-blackbox-extractor.check
@@ -0,0 +1,4 @@
+Test_2.scala:3: error: extractor macros can only be whitebox
+    case Extractor(x) => println(x)
+                  ^
+one error found
diff --git a/test/files/neg/macro-blackbox-extractor/Macros_1.scala b/test/files/neg/macro-blackbox-extractor/Macros_1.scala
new file mode 100644
index 0000000..64b6270
--- /dev/null
+++ b/test/files/neg/macro-blackbox-extractor/Macros_1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Extractor {
+  def unapply(x: Int): Any = macro Macros.unapplyImpl
+}
+
+object Macros {
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/macro-blackbox-extractor/Test_2.scala b/test/files/neg/macro-blackbox-extractor/Test_2.scala
new file mode 100644
index 0000000..41be6f9
--- /dev/null
+++ b/test/files/neg/macro-blackbox-extractor/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  42 match {
+    case Extractor(x) => println(x)
+  }
+}
diff --git a/test/files/neg/macro-blackbox-fundep-materialization.check b/test/files/neg/macro-blackbox-fundep-materialization.check
new file mode 100644
index 0000000..3c03064
--- /dev/null
+++ b/test/files/neg/macro-blackbox-fundep-materialization.check
@@ -0,0 +1,8 @@
+Test_2.scala:7: error: type mismatch;
+ found   : Iso[Test.Foo,(Int, String, Boolean)]
+ required: Iso[Test.Foo,Nothing]
+Note: (Int, String, Boolean) >: Nothing, but trait Iso is invariant in type U.
+You may wish to define U as -U instead. (SLS 4.5)
+    val equiv = foo(Foo(23, "foo", true))
+                   ^
+one error found
diff --git a/test/files/neg/macro-blackbox-fundep-materialization.flags b/test/files/neg/macro-blackbox-fundep-materialization.flags
new file mode 100644
index 0000000..4c6cdb7
--- /dev/null
+++ b/test/files/neg/macro-blackbox-fundep-materialization.flags
@@ -0,0 +1 @@
+-Xlog-implicits
\ No newline at end of file
diff --git a/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala b/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala
new file mode 100644
index 0000000..8d77638
--- /dev/null
+++ b/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala
@@ -0,0 +1,39 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+trait Iso[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+object Iso {
+  implicit def materializeIso[T, U]: Iso[T, U] = macro impl[T, U]
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context): c.Expr[Iso[T, U]] = {
+    import c.universe._
+    import definitions._
+    import Flag._
+
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("Iso")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[Iso[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
diff --git a/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala
new file mode 100644
index 0000000..40ca1d5
--- /dev/null
+++ b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala
@@ -0,0 +1,12 @@
+// see the comments for macroExpand.onDelayed for an explanation of what's tested here
+object Test extends App {
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+
+  {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-blackbox-structural.check b/test/files/neg/macro-blackbox-structural.check
new file mode 100644
index 0000000..86a2185
--- /dev/null
+++ b/test/files/neg/macro-blackbox-structural.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: value x is not a member of Any
+  println(Macros.foo.x)
+                     ^
+one error found
diff --git a/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala b/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala
new file mode 100644
index 0000000..a86a26d
--- /dev/null
+++ b/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: scala.reflect.macros.blackbox.Context) = {
+    import c.universe._
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-blackbox-structural/Test_2.scala b/test/files/neg/macro-blackbox-structural/Test_2.scala
new file mode 100644
index 0000000..ea6a817
--- /dev/null
+++ b/test/files/neg/macro-blackbox-structural/Test_2.scala
@@ -0,0 +1,5 @@
+import Macros._
+
+object Test extends App {
+  println(Macros.foo.x)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-abstract.check b/test/files/neg/macro-bundle-abstract.check
new file mode 100644
index 0000000..1e51a00
--- /dev/null
+++ b/test/files/neg/macro-bundle-abstract.check
@@ -0,0 +1,4 @@
+macro-bundle-abstract.scala:10: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def foo = macro Bundle.impl
+                  ^
+one error found
diff --git a/test/files/neg/macro-bundle-abstract.scala b/test/files/neg/macro-bundle-abstract.scala
new file mode 100644
index 0000000..0afeaaf
--- /dev/null
+++ b/test/files/neg/macro-bundle-abstract.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+abstract class Bundle(c: Context) {
+  def deferred: Int
+  def impl = ???
+}
+
+object Macros {
+  def foo = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-ambiguous.check b/test/files/neg/macro-bundle-ambiguous.check
new file mode 100644
index 0000000..8430496
--- /dev/null
+++ b/test/files/neg/macro-bundle-ambiguous.check
@@ -0,0 +1,5 @@
+macro-bundle-ambiguous.scala:13: error: macro implementation reference is ambiguous: makes sense both as
+a macro bundle method reference and a vanilla object method reference
+  def foo: Unit = macro Macros.impl
+                               ^
+one error found
diff --git a/test/files/neg/macro-bundle-ambiguous.scala b/test/files/neg/macro-bundle-ambiguous.scala
new file mode 100644
index 0000000..92c359d
--- /dev/null
+++ b/test/files/neg/macro-bundle-ambiguous.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  def impl = ???
+}
+
+object Macros {
+  def impl(c: Context) = ???
+}
+
+object Test extends App {
+  def foo: Unit = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-need-qualifier.check b/test/files/neg/macro-bundle-need-qualifier.check
new file mode 100644
index 0000000..6a74ee6
--- /dev/null
+++ b/test/files/neg/macro-bundle-need-qualifier.check
@@ -0,0 +1,4 @@
+macro-bundle-need-qualifier.scala:10: error: not found: value impl
+  def foo: Any = macro impl
+                       ^
+one error found
diff --git a/test/files/neg/macro-bundle-need-qualifier.scala b/test/files/neg/macro-bundle-need-qualifier.scala
new file mode 100644
index 0000000..0d021e3
--- /dev/null
+++ b/test/files/neg/macro-bundle-need-qualifier.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  import c.universe._
+  def impl = q"()"
+}
+
+object Macros {
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-noncontext.check b/test/files/neg/macro-bundle-noncontext.check
new file mode 100644
index 0000000..bb5d085
--- /dev/null
+++ b/test/files/neg/macro-bundle-noncontext.check
@@ -0,0 +1,4 @@
+macro-bundle-noncontext.scala:8: error: not found: value Bundle
+  def foo = Bundle.impl
+            ^
+one error found
diff --git a/test/files/neg/macro-bundle-noncontext.scala b/test/files/neg/macro-bundle-noncontext.scala
new file mode 100644
index 0000000..c228827
--- /dev/null
+++ b/test/files/neg/macro-bundle-noncontext.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+
+class Bundle {
+  def impl = ???
+}
+
+object Macros {
+  def foo = Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-nonpublic-c.check b/test/files/neg/macro-bundle-nonpublic-c.check
new file mode 100644
index 0000000..1dfcee5
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonpublic-c.check
@@ -0,0 +1,4 @@
+macro-bundle-nonpublic-c.scala:6: error: private value c escapes its defining scope as part of type Macros.this.c.universe.Literal
+  def impl = q"()"
+      ^
+one error found
diff --git a/test/files/neg/macro-bundle-nonpublic-c.scala b/test/files/neg/macro-bundle-nonpublic-c.scala
new file mode 100644
index 0000000..86a2039
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonpublic-c.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(c: Context) {
+  import c.universe._
+  def impl = q"()"
+}
+
+object Macros {
+  def foo: Any = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-nonpublic-impl.check b/test/files/neg/macro-bundle-nonpublic-impl.check
new file mode 100644
index 0000000..7a4e151
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonpublic-impl.check
@@ -0,0 +1,4 @@
+macro-bundle-nonpublic-impl.scala:10: error: bundle implementation must be public
+  def foo: Any = macro Macros.impl
+                              ^
+one error found
diff --git a/test/files/neg/macro-bundle-nonpublic-impl.scala b/test/files/neg/macro-bundle-nonpublic-impl.scala
new file mode 100644
index 0000000..5857cc6
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonpublic-impl.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  import c.universe._
+  private def impl = q"()"
+}
+
+object Macros {
+  def foo: Any = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-nonstatic.check b/test/files/neg/macro-bundle-nonstatic.check
new file mode 100644
index 0000000..36bccc5
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonstatic.check
@@ -0,0 +1,13 @@
+macro-bundle-nonstatic.scala:12: error: value Bundle is not a member of object Module
+  def foo1 = macro Module.Bundle.impl
+                          ^
+macro-bundle-nonstatic.scala:13: error: value Bundle is not a member of Module
+  def foo2 = macro new Module().Bundle.impl
+                                ^
+macro-bundle-nonstatic.scala:17: error: macro bundles must be static
+  def foo = macro Bundle.impl
+                  ^
+macro-bundle-nonstatic.scala:23: error: macro bundles must be static
+  def foo = macro Bundle.impl
+                  ^
+four errors found
diff --git a/test/files/neg/macro-bundle-nonstatic.scala b/test/files/neg/macro-bundle-nonstatic.scala
new file mode 100644
index 0000000..dfba796
--- /dev/null
+++ b/test/files/neg/macro-bundle-nonstatic.scala
@@ -0,0 +1,36 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+class Module {
+  class Bundle(val c: Context) {
+    import c.universe._
+    def impl = q"()"
+  }
+}
+
+object Macros1 {
+  def foo1 = macro Module.Bundle.impl
+  def foo2 = macro new Module().Bundle.impl
+}
+
+object Macros2 extends Module {
+  def foo = macro Bundle.impl
+}
+
+object Macros3 {
+  val module = new Module
+  import module._
+  def foo = macro Bundle.impl
+}
+
+object Module {
+  class GoodBundle(val c: Context) {
+    import c.universe._
+    def impl = q"()"
+  }
+}
+
+object Macros4 {
+  import Module._
+  def foo: Unit = macro GoodBundle.impl
+}
diff --git a/test/files/neg/macro-bundle-object.check b/test/files/neg/macro-bundle-object.check
new file mode 100644
index 0000000..b880010
--- /dev/null
+++ b/test/files/neg/macro-bundle-object.check
@@ -0,0 +1,8 @@
+macro-bundle-object.scala:10: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : : Nothing
+number of parameter sections differ
+  def foo = macro Bundle.impl
+                         ^
+one error found
diff --git a/test/files/neg/macro-bundle-object.scala b/test/files/neg/macro-bundle-object.scala
new file mode 100644
index 0000000..6e1eec1
--- /dev/null
+++ b/test/files/neg/macro-bundle-object.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Bundle {
+  val c: Context = ???
+  def impl = ???
+}
+
+object Macros {
+  def foo = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-overloaded.check b/test/files/neg/macro-bundle-overloaded.check
new file mode 100644
index 0000000..499068a
--- /dev/null
+++ b/test/files/neg/macro-bundle-overloaded.check
@@ -0,0 +1,4 @@
+macro-bundle-overloaded.scala:11: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def foo = macro Bundle.impl
+                  ^
+one error found
diff --git a/test/files/neg/macro-bundle-overloaded.scala b/test/files/neg/macro-bundle-overloaded.scala
new file mode 100644
index 0000000..a4bc66f
--- /dev/null
+++ b/test/files/neg/macro-bundle-overloaded.scala
@@ -0,0 +1,12 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+
+class Bundle(val c: BlackboxContext) {
+  def this(c: WhiteboxContext) = this(c: BlackboxContext)
+  def impl = ???
+}
+
+object Macros {
+  def foo = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-polymorphic.check b/test/files/neg/macro-bundle-polymorphic.check
new file mode 100644
index 0000000..60a4d59
--- /dev/null
+++ b/test/files/neg/macro-bundle-polymorphic.check
@@ -0,0 +1,19 @@
+macro-bundle-polymorphic.scala:36: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def black1: Any = macro BlackboxBundle1.impl
+                          ^
+macro-bundle-polymorphic.scala:37: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def black2: Any = macro BlackboxBundle2.impl
+                          ^
+macro-bundle-polymorphic.scala:38: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def black3: Any = macro BlackboxBundle3.impl
+                          ^
+macro-bundle-polymorphic.scala:40: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def white1: Any = macro WhiteboxBundle1.impl
+                          ^
+macro-bundle-polymorphic.scala:41: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def white2: Any = macro WhiteboxBundle2.impl
+                          ^
+macro-bundle-polymorphic.scala:42: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def white3: Any = macro WhiteboxBundle3.impl
+                          ^
+6 errors found
diff --git a/test/files/neg/macro-bundle-polymorphic.scala b/test/files/neg/macro-bundle-polymorphic.scala
new file mode 100644
index 0000000..2ba91aa
--- /dev/null
+++ b/test/files/neg/macro-bundle-polymorphic.scala
@@ -0,0 +1,43 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+
+class BlackboxBundle1[T](val c: BlackboxContext) {
+  import c.universe._
+  def impl = q"()"
+}
+
+class BlackboxBundle2[T <: BlackboxContext](val c: T) {
+  import c.universe._
+  def impl = q"()"
+}
+
+class BlackboxBundle3[T <: BlackboxContext, U <: T](val c: U) {
+  import c.universe._
+  def impl = q"()"
+}
+
+class WhiteboxBundle1[T](val c: WhiteboxContext) {
+  import c.universe._
+  def impl = q"()"
+}
+
+class WhiteboxBundle2[T <: WhiteboxContext](val c: T) {
+  import c.universe._
+  def impl = q"()"
+}
+
+class WhiteboxBundle3[T <: WhiteboxContext, U <: T](val c: U) {
+  import c.universe._
+  def impl = q"()"
+}
+
+object Macros {
+  def black1: Any = macro BlackboxBundle1.impl
+  def black2: Any = macro BlackboxBundle2.impl
+  def black3: Any = macro BlackboxBundle3.impl
+
+  def white1: Any = macro WhiteboxBundle1.impl
+  def white2: Any = macro WhiteboxBundle2.impl
+  def white3: Any = macro WhiteboxBundle3.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-priority-bundle.check b/test/files/neg/macro-bundle-priority-bundle.check
new file mode 100644
index 0000000..c6cea72
--- /dev/null
+++ b/test/files/neg/macro-bundle-priority-bundle.check
@@ -0,0 +1,8 @@
+macro-bundle-priority-bundle.scala:13: error: bundle implementation has incompatible shape:
+ required: : Macros.this.c.Expr[Unit]
+ or      : : Macros.this.c.Tree
+ found   : (x: Macros.this.c.Tree): Nothing
+number of parameter sections differ
+  def foo: Unit = macro Macros.impl
+                               ^
+one error found
diff --git a/test/files/neg/macro-bundle-priority-bundle.scala b/test/files/neg/macro-bundle-priority-bundle.scala
new file mode 100644
index 0000000..ce831a7
--- /dev/null
+++ b/test/files/neg/macro-bundle-priority-bundle.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  def impl(x: c.Tree) = ???
+}
+
+object Macros {
+  def impl(c: Context)(x: c.Tree) = ???
+}
+
+object Test extends App {
+  def foo: Unit = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-priority-nonbundle.check b/test/files/neg/macro-bundle-priority-nonbundle.check
new file mode 100644
index 0000000..0d03b50
--- /dev/null
+++ b/test/files/neg/macro-bundle-priority-nonbundle.check
@@ -0,0 +1,8 @@
+macro-bundle-priority-nonbundle.scala:13: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.whitebox.Context): c.Expr[Unit]
+ or      : (c: scala.reflect.macros.whitebox.Context): c.Tree
+ found   : (c: scala.reflect.macros.whitebox.Context)(x: c.Tree): Nothing
+number of parameter sections differ
+  def foo: Unit = macro Macros.impl
+                               ^
+one error found
diff --git a/test/files/neg/macro-bundle-priority-nonbundle.scala b/test/files/neg/macro-bundle-priority-nonbundle.scala
new file mode 100644
index 0000000..8dc00f6
--- /dev/null
+++ b/test/files/neg/macro-bundle-priority-nonbundle.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: scala.reflect.api.Universe) {
+  def impl(x: c.Tree) = ???
+}
+
+object Macros {
+  def impl(c: Context)(x: c.Tree) = ???
+}
+
+object Test extends App {
+  def foo: Unit = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-trait.check b/test/files/neg/macro-bundle-trait.check
new file mode 100644
index 0000000..869c67e
--- /dev/null
+++ b/test/files/neg/macro-bundle-trait.check
@@ -0,0 +1,4 @@
+macro-bundle-trait.scala:10: error: not found: value Bundle
+  def foo = macro Bundle.impl
+                  ^
+one error found
diff --git a/test/files/neg/macro-bundle-trait.scala b/test/files/neg/macro-bundle-trait.scala
new file mode 100644
index 0000000..2aa6321
--- /dev/null
+++ b/test/files/neg/macro-bundle-trait.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+trait Bundle {
+  val c: Context = ???
+  def impl = ???
+}
+
+object Macros {
+  def foo = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-whitebox-use-raw.check b/test/files/neg/macro-bundle-whitebox-use-raw.check
new file mode 100644
index 0000000..5792e31
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-raw.check
@@ -0,0 +1,17 @@
+Test_2.scala:2: error: value x is not a member of Any
+  println(ReturnTypeRefinement.foo.x)
+                                   ^
+Test_2.scala:7: error: type mismatch;
+ found   : FundepMaterialization[Test.Foo,(Int, String, Boolean)]
+ required: FundepMaterialization[Test.Foo,Nothing]
+Note: (Int, String, Boolean) >: Nothing, but trait FundepMaterialization is invariant in type U.
+You may wish to define U as -U instead. (SLS 4.5)
+    val equiv = foo(Foo(23, "foo", true))
+                   ^
+Test_2.scala:13: error: I don't like classes that contain integers
+  println(implicitly[DynamicMaterialization[C1]])
+                    ^
+Test_2.scala:17: error: extractor macros can only be whitebox
+    case ExtractorMacro(x) => println(x)
+                       ^
+four errors found
diff --git a/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala b/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala
new file mode 100644
index 0000000..61bf73e
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala
@@ -0,0 +1,108 @@
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+// whitebox use case #1: return type refinement
+
+class ReturnTypeRefinementBundle(val c: Context) {
+  import c.universe._
+  def impl = {
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+}
+
+object ReturnTypeRefinement {
+  def foo: Any = macro ReturnTypeRefinementBundle.impl
+}
+
+// whitebox use case #2: fundep materialization
+
+trait FundepMaterialization[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+class FundepMaterializationBundle(val c: Context) {
+  import c.universe._
+  import definitions._
+  import Flag._
+
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag]: c.Expr[FundepMaterialization[T, U]] = {
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("FundepMaterialization")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[FundepMaterialization[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
+
+object FundepMaterialization {
+  implicit def materializeIso[T, U]: FundepMaterialization[T, U] = macro FundepMaterializationBundle.impl[T, U]
+}
+
+// whitebox use case #3: dynamic materialization
+
+trait DynamicMaterialization[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: DynamicMaterialization[T] = null
+}
+
+object DynamicMaterialization extends LowPriority {
+  implicit def moreSpecific[T]: DynamicMaterialization[T] = macro DynamicMaterializationBundle.impl[T]
+}
+
+class DynamicMaterializationBundle(val c: Context) {
+  import c.universe._
+  def impl[T: c.WeakTypeTag] = {
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new DynamicMaterialization[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
+
+// whitebox use case #4: extractor macros
+
+object ExtractorMacro {
+  def unapply(x: Int): Any = macro ExtractorBundle.unapplyImpl
+}
+
+class ExtractorBundle(val c: Context) {
+  import c.universe._
+  def unapplyImpl(x: Tree) = {
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala b/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala
new file mode 100644
index 0000000..3a81700
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+  println(ReturnTypeRefinement.foo.x)
+
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c)
+  locally {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+
+  println(implicitly[DynamicMaterialization[C1]])
+  println(implicitly[DynamicMaterialization[C2]])
+
+  42 match {
+    case ExtractorMacro(x) => println(x)
+  }
+}
diff --git a/test/files/neg/macro-bundle-whitebox-use-refined.check b/test/files/neg/macro-bundle-whitebox-use-refined.check
new file mode 100644
index 0000000..5792e31
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-refined.check
@@ -0,0 +1,17 @@
+Test_2.scala:2: error: value x is not a member of Any
+  println(ReturnTypeRefinement.foo.x)
+                                   ^
+Test_2.scala:7: error: type mismatch;
+ found   : FundepMaterialization[Test.Foo,(Int, String, Boolean)]
+ required: FundepMaterialization[Test.Foo,Nothing]
+Note: (Int, String, Boolean) >: Nothing, but trait FundepMaterialization is invariant in type U.
+You may wish to define U as -U instead. (SLS 4.5)
+    val equiv = foo(Foo(23, "foo", true))
+                   ^
+Test_2.scala:13: error: I don't like classes that contain integers
+  println(implicitly[DynamicMaterialization[C1]])
+                    ^
+Test_2.scala:17: error: extractor macros can only be whitebox
+    case ExtractorMacro(x) => println(x)
+                       ^
+four errors found
diff --git a/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala b/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala
new file mode 100644
index 0000000..1866044
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala
@@ -0,0 +1,108 @@
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+// whitebox use case #1: return type refinement
+
+class ReturnTypeRefinementBundle(val c: Context { type PrefixType = Nothing }) {
+  import c.universe._
+  def impl = {
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+}
+
+object ReturnTypeRefinement {
+  def foo: Any = macro ReturnTypeRefinementBundle.impl
+}
+
+// whitebox use case #2: fundep materialization
+
+trait FundepMaterialization[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+class FundepMaterializationBundle(val c: Context { type PrefixType = Nothing }) {
+  import c.universe._
+  import definitions._
+  import Flag._
+
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag]: c.Expr[FundepMaterialization[T, U]] = {
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("FundepMaterialization")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[FundepMaterialization[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
+
+object FundepMaterialization {
+  implicit def materializeIso[T, U]: FundepMaterialization[T, U] = macro FundepMaterializationBundle.impl[T, U]
+}
+
+// whitebox use case #3: dynamic materialization
+
+trait DynamicMaterialization[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: DynamicMaterialization[T] = null
+}
+
+object DynamicMaterialization extends LowPriority {
+  implicit def moreSpecific[T]: DynamicMaterialization[T] = macro DynamicMaterializationBundle.impl[T]
+}
+
+class DynamicMaterializationBundle(val c: Context { type PrefixType = Nothing }) {
+  import c.universe._
+  def impl[T: c.WeakTypeTag] = {
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new DynamicMaterialization[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
+
+// whitebox use case #4: extractor macros
+
+object ExtractorMacro {
+  def unapply(x: Int): Any = macro ExtractorBundle.unapplyImpl
+}
+
+class ExtractorBundle(val c: Context { type PrefixType = Nothing }) {
+  import c.universe._
+  def unapplyImpl(x: Tree) = {
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala b/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala
new file mode 100644
index 0000000..3a81700
--- /dev/null
+++ b/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+  println(ReturnTypeRefinement.foo.x)
+
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c)
+  locally {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+
+  println(implicitly[DynamicMaterialization[C1]])
+  println(implicitly[DynamicMaterialization[C2]])
+
+  42 match {
+    case ExtractorMacro(x) => println(x)
+  }
+}
diff --git a/test/files/neg/macro-bundle-wrongcontext-a.check b/test/files/neg/macro-bundle-wrongcontext-a.check
new file mode 100644
index 0000000..10aadb0
--- /dev/null
+++ b/test/files/neg/macro-bundle-wrongcontext-a.check
@@ -0,0 +1,4 @@
+macro-bundle-wrongcontext-a.scala:12: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def foo: Any = macro Bundle.impl
+                       ^
+one error found
diff --git a/test/files/neg/macro-bundle-wrongcontext-a.scala b/test/files/neg/macro-bundle-wrongcontext-a.scala
new file mode 100644
index 0000000..ed566fd
--- /dev/null
+++ b/test/files/neg/macro-bundle-wrongcontext-a.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+abstract class MyContext extends Context
+
+class Bundle(val c: MyContext) {
+  import c.universe._
+  def impl = q"()"
+}
+
+object Macros {
+  def foo: Any = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-bundle-wrongcontext-b.check b/test/files/neg/macro-bundle-wrongcontext-b.check
new file mode 100644
index 0000000..e9700d3
--- /dev/null
+++ b/test/files/neg/macro-bundle-wrongcontext-b.check
@@ -0,0 +1,4 @@
+macro-bundle-wrongcontext-b.scala:10: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter
+  def foo: Any = macro Bundle.impl
+                       ^
+one error found
diff --git a/test/files/neg/macro-bundle-wrongcontext-b.scala b/test/files/neg/macro-bundle-wrongcontext-b.scala
new file mode 100644
index 0000000..0b4ff7e
--- /dev/null
+++ b/test/files/neg/macro-bundle-wrongcontext-b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Bundle(val c: Context { type Foo <: Int }) {
+  import c.universe._
+  def impl = q"()"
+}
+
+object Macros {
+  def foo: Any = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-cyclic/Impls_Macros_1.scala b/test/files/neg/macro-cyclic/Impls_Macros_1.scala
index ac9b793..ad68901 100644
--- a/test/files/neg/macro-cyclic/Impls_Macros_1.scala
+++ b/test/files/neg/macro-cyclic/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check
index 22b667c..c5902ae 100644
--- a/test/files/neg/macro-deprecate-idents.check
+++ b/test/files/neg/macro-deprecate-idents.check
@@ -1,52 +1,67 @@
-macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is disallowed
   val macro = ???
       ^
-macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is disallowed
   var macro = ???
       ^
-macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is disallowed
   type macro = Int
        ^
-macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is disallowed
   class macro
         ^
-macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is disallowed
   class macro
         ^
-macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is disallowed
   object macro
          ^
-macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is disallowed
   object macro
          ^
-macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is disallowed
   trait macro
         ^
-macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is disallowed
   trait macro
         ^
-macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is disallowed
 package macro {
         ^
-macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is disallowed
   package macro.bar {
           ^
-macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is disallowed
   package macro.foo {
           ^
-macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is disallowed
   val Some(macro) = Some(42)
            ^
-macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is disallowed
   macro match {
   ^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is disallowed
     case macro => println(macro)
          ^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is disallowed
     case macro => println(macro)
                           ^
-macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is disallowed
   def macro = 2
       ^
-17 errors found
+macro-deprecate-idents.scala:3: error: '=' expected but '}' found.
+}
+^
+macro-deprecate-idents.scala:7: error: '=' expected but '}' found.
+}
+^
+macro-deprecate-idents.scala:42: error: '{' expected but ';' found.
+package foo {
+^
+macro-deprecate-idents.scala:45: error: '{' expected but '}' found.
+}
+^
+macro-deprecate-idents.scala:52: error: ')' expected but '}' found.
+}
+^
+22 errors found
diff --git a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
index cdea310..5c04503 100644
--- a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
+++ b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
 import language.experimental.macros
 
 trait Complex[T]
@@ -9,13 +9,13 @@ object Complex {
   def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
     import c.universe._
     val tpe = weakTypeOf[T]
-    for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
-      val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
-      if (c.openImplicits.tail.exists(ic => ic._1 =:= trecur)) c.abort(c.enclosingPosition, "diverging implicit expansion. reported by a macro!")
+    for (f <- tpe.decls.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+      val trecur = appliedType(typeOf[Complex[_]], List(f.info))
+      if (c.openImplicits.tail.exists(ic => ic.pt =:= trecur)) c.abort(c.enclosingPosition, "diverging implicit expansion. reported by a macro!")
       val recur = c.inferImplicitValue(trecur, silent = true)
       if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
     }
-    c.literalNull
+    c.Expr[Null](q"null")
   }
 
   implicit object ComplexString extends Complex[String]
diff --git a/test/files/neg/macro-exception.check b/test/files/neg/macro-exception.check
index cee8b32..dca97ae 100644
--- a/test/files/neg/macro-exception.check
+++ b/test/files/neg/macro-exception.check
@@ -1,4 +1,4 @@
-Test_2.scala:2: error: exception during macro expansion: 
+Test_2.scala:2: error: exception during macro expansion:
 java.lang.Exception
 	at Macros$.impl(Macros_1.scala:6)
 
diff --git a/test/files/neg/macro-exception/Macros_1.scala b/test/files/neg/macro-exception/Macros_1.scala
index 60e4020..3d6109d 100644
--- a/test/files/neg/macro-exception/Macros_1.scala
+++ b/test/files/neg/macro-exception/Macros_1.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
index 6dc2ea1..a97dfd4 100644
--- a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
+++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Helper {
   def unapplySeq[T](x: List[T]): Option[Seq[T]] =
diff --git a/test/files/neg/macro-incompatible-macro-engine-a.check b/test/files/neg/macro-incompatible-macro-engine-a.check
new file mode 100644
index 0000000..8ae08bd
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a.check
@@ -0,0 +1,7 @@
+Test_3.scala:2: error: macro cannot be expanded, because it was compiled by an incompatible macro engine
+  Macros.foo
+  ^
+Test_3.scala:3: error: macro cannot be expanded, because it was compiled by an incompatible macro engine
+  Macros.foo
+  ^
+two errors found
diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.flags b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala
new file mode 100644
index 0000000..39708ee
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala
@@ -0,0 +1,7 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context) = c.universe.Literal(c.universe.Constant(()))
+  def foo: Unit = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala b/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala
new file mode 100644
index 0000000..44ed91d
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala
@@ -0,0 +1,35 @@
+package incompatibleMacroEngine
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+
+  val name = "incompatibleMacroEngine"
+  val description = "A sample analyzer plugin that crafts a macro impl binding with a non-standard macro engine."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    def fixupBinding(tree: Tree) = new Transformer {
+      override def transform(tree: Tree) = {
+        tree match {
+          case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const)
+          case _ if tree.tpe == null => tree setType NoType
+          case _ => ;
+        }
+        super.transform(tree)
+      }
+    }.transform(tree)
+
+    override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = {
+      val result = standardTypedMacroBody(typer, ddef)
+      val List(AnnotationInfo(atp, List(Apply(nucleus, _ :: others)), Nil)) = ddef.symbol.annotations
+      val updatedBinding = Apply(nucleus, Assign(Literal(Constant("macroEngine")), Literal(Constant("vxxx (implemented in the incompatibleMacroEngine plugin)"))) :: others)
+      ddef.symbol.setAnnotations(List(AnnotationInfo(atp, List(fixupBinding(updatedBinding)), Nil)))
+      Some(result)
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala b/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala
new file mode 100644
index 0000000..7e4fae5
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-a/scalac-plugin.xml b/test/files/neg/macro-incompatible-macro-engine-a/scalac-plugin.xml
new file mode 100644
index 0000000..42b9cdd
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-a/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>incompatible-macro-engine</name>
+  <classname>incompatibleMacroEngine.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b.check b/test/files/neg/macro-incompatible-macro-engine-b.check
new file mode 100644
index 0000000..2a7510c
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b.check
@@ -0,0 +1,7 @@
+Test_3.scala:2: error: macro cannot be expanded, because it was compiled by an incompatible macro engine (internal diagnostic: expected = v7.0 (implemented in Scala 2.11.0-M8), actual = vxxx (implemented in the incompatibleMacroEngine plugin))
+  Macros.foo
+  ^
+Test_3.scala:3: error: macro cannot be expanded, because it was compiled by an incompatible macro engine (internal diagnostic: expected = v7.0 (implemented in Scala 2.11.0-M8), actual = vxxx (implemented in the incompatibleMacroEngine plugin))
+  Macros.foo
+  ^
+two errors found
diff --git a/test/files/neg/macro-incompatible-macro-engine-b.flags b/test/files/neg/macro-incompatible-macro-engine-b.flags
new file mode 100644
index 0000000..037a693
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b.flags
@@ -0,0 +1 @@
+-Ymacro-debug-lite
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.flags b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala
new file mode 100644
index 0000000..39708ee
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala
@@ -0,0 +1,7 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context) = c.universe.Literal(c.universe.Constant(()))
+  def foo: Unit = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala b/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala
new file mode 100644
index 0000000..44ed91d
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala
@@ -0,0 +1,35 @@
+package incompatibleMacroEngine
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+
+  val name = "incompatibleMacroEngine"
+  val description = "A sample analyzer plugin that crafts a macro impl binding with a non-standard macro engine."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    def fixupBinding(tree: Tree) = new Transformer {
+      override def transform(tree: Tree) = {
+        tree match {
+          case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const)
+          case _ if tree.tpe == null => tree setType NoType
+          case _ => ;
+        }
+        super.transform(tree)
+      }
+    }.transform(tree)
+
+    override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = {
+      val result = standardTypedMacroBody(typer, ddef)
+      val List(AnnotationInfo(atp, List(Apply(nucleus, _ :: others)), Nil)) = ddef.symbol.annotations
+      val updatedBinding = Apply(nucleus, Assign(Literal(Constant("macroEngine")), Literal(Constant("vxxx (implemented in the incompatibleMacroEngine plugin)"))) :: others)
+      ddef.symbol.setAnnotations(List(AnnotationInfo(atp, List(fixupBinding(updatedBinding)), Nil)))
+      Some(result)
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala b/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala
new file mode 100644
index 0000000..7e4fae5
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-b/scalac-plugin.xml b/test/files/neg/macro-incompatible-macro-engine-b/scalac-plugin.xml
new file mode 100644
index 0000000..42b9cdd
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-b/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>incompatible-macro-engine</name>
+  <classname>incompatibleMacroEngine.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine-c.check b/test/files/neg/macro-incompatible-macro-engine-c.check
new file mode 100644
index 0000000..fb6c59a
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-c.check
@@ -0,0 +1,4 @@
+macro-incompatible-macro-engine-c.scala:2: error: can't expand macros compiled by previous versions of Scala
+  MacroLibCompiledByScala210x.foo
+  ^
+one error found
diff --git a/test/files/neg/macro-incompatible-macro-engine-c.scala b/test/files/neg/macro-incompatible-macro-engine-c.scala
new file mode 100644
index 0000000..037ac5f
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine-c.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  MacroLibCompiledByScala210x.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-a.check b/test/files/neg/macro-invalidimpl-a.check
deleted file mode 100644
index 7f11f3b..0000000
--- a/test/files/neg/macro-invalidimpl-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
-  def foo(x: Any) = macro impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-a/Impls_1.scala b/test/files/neg/macro-invalidimpl-a/Impls_1.scala
deleted file mode 100644
index cfa1218..0000000
--- a/test/files/neg/macro-invalidimpl-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
deleted file mode 100644
index 2220dda..0000000
--- a/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
-  val impls = new Impls
-  def foo(x: Any) = macro impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-b.check b/test/files/neg/macro-invalidimpl-b.check
deleted file mode 100644
index 7f11f3b..0000000
--- a/test/files/neg/macro-invalidimpl-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
-  def foo(x: Any) = macro impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-b/Impls_1.scala b/test/files/neg/macro-invalidimpl-b/Impls_1.scala
deleted file mode 100644
index 4467021..0000000
--- a/test/files/neg/macro-invalidimpl-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
deleted file mode 100644
index 81e4083..0000000
--- a/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
-  val impls = Impls
-  def foo(x: Any) = macro impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c.check b/test/files/neg/macro-invalidimpl-c.check
deleted file mode 100644
index 9e0181c..0000000
--- a/test/files/neg/macro-invalidimpl-c.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Impls_Macros_1.scala:8: error: macro implementation must be in statically accessible object
-  def foo(x: Any) = macro Impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala b/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
deleted file mode 100644
index 67a0eb3..0000000
--- a/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class Macros {
-  object Impls {
-    def foo(c: Ctx)(x: c.Expr[Any]) = ???
-  }
-
-  def foo(x: Any) = macro Impls.foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c/Test_2.scala b/test/files/neg/macro-invalidimpl-c/Test_2.scala
deleted file mode 100644
index e75a8ba..0000000
--- a/test/files/neg/macro-invalidimpl-c/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  new Macros().foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d.check b/test/files/neg/macro-invalidimpl-d.check
deleted file mode 100644
index 76a5ba9..0000000
--- a/test/files/neg/macro-invalidimpl-d.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation must be in statically accessible object
-  def foo(x: Any) = macro Impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-d/Impls_1.scala b/test/files/neg/macro-invalidimpl-d/Impls_1.scala
deleted file mode 100644
index e0819c9..0000000
--- a/test/files/neg/macro-invalidimpl-d/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-trait MacroHelpers {
-  object Impls {
-    def foo(c: Ctx)(x: c.Expr[Any]) = x
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
deleted file mode 100644
index 067ab1d..0000000
--- a/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Macros extends MacroHelpers {
-  def foo(x: Any) = macro Impls.foo
-}
-
-object Test extends App {
-  println(new Macros().foo(42))
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-e.check b/test/files/neg/macro-invalidimpl-e.check
deleted file mode 100644
index e0910b2..0000000
--- a/test/files/neg/macro-invalidimpl-e.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Macros_Test_2.scala:2: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and  method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
-  def foo(x: Any) = macro Impls.foo
-                                ^
-Macros_Test_2.scala:3: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and  method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
-  def foo(x: Any, y: Any) = macro Impls.foo
-                                        ^
-two errors found
diff --git a/test/files/neg/macro-invalidimpl-e/Impls_1.scala b/test/files/neg/macro-invalidimpl-e/Impls_1.scala
deleted file mode 100644
index fd40119..0000000
--- a/test/files/neg/macro-invalidimpl-e/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-  def foo(c: Ctx)(x: c.Expr[Any], y: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
deleted file mode 100644
index 6edde08..0000000
--- a/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
-  def foo(x: Any) = macro Impls.foo
-  def foo(x: Any, y: Any) = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f.check b/test/files/neg/macro-invalidimpl-f.check
deleted file mode 100644
index 8820e05..0000000
--- a/test/files/neg/macro-invalidimpl-f.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
- found   : (c: scala.reflect.macros.Context): c.Expr[Unit]
-number of parameter sections differ
-  def bar1() = macro Impls.fooNullary
-                           ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-f/Impls_1.scala b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
deleted file mode 100644
index 334ee71..0000000
--- a/test/files/neg/macro-invalidimpl-f/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def fooNullary(c: Ctx) = {
-    import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
-    c.Expr[Unit](body)
-  }
-
-  def fooEmpty(c: Ctx)() = fooNullary(c)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
deleted file mode 100644
index 493edf1..0000000
--- a/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
-  def bar1() = macro Impls.fooNullary
-}
-
-object Test extends App {
-  Macros.bar1
-  Macros.bar1()
-  println("kkthxbai")
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g.check b/test/files/neg/macro-invalidimpl-g.check
deleted file mode 100644
index c063803..0000000
--- a/test/files/neg/macro-invalidimpl-g.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Unit]
- found   : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
-number of parameter sections differ
-  def foo1 = macro Impls.fooEmpty
-                         ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-g/Impls_1.scala b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
deleted file mode 100644
index 334ee71..0000000
--- a/test/files/neg/macro-invalidimpl-g/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def fooNullary(c: Ctx) = {
-    import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
-    c.Expr[Unit](body)
-  }
-
-  def fooEmpty(c: Ctx)() = fooNullary(c)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
deleted file mode 100644
index 5561db9..0000000
--- a/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo1 = macro Impls.fooEmpty
-}
-
-object Test extends App {
-  Macros.foo1
-  println("kkthxbai")
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-h.check b/test/files/neg/macro-invalidimpl-h.check
deleted file mode 100644
index ea76e1a..0000000
--- a/test/files/neg/macro-invalidimpl-h.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
-  def foo = macro Impls.foo[String]
-                           ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-h/Impls_1.scala b/test/files/neg/macro-invalidimpl-h/Impls_1.scala
deleted file mode 100644
index 427fd3d..0000000
--- a/test/files/neg/macro-invalidimpl-h/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: Int](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala
deleted file mode 100644
index 218c7ae..0000000
--- a/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo[String]
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i.check b/test/files/neg/macro-invalidimpl-i.check
deleted file mode 100644
index 846ed8d..0000000
--- a/test/files/neg/macro-invalidimpl-i.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:4: error: macro implementation must be public
-  def foo = macro Impls.impl
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-i/Impls_1.scala b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
deleted file mode 100644
index c35d8ab..0000000
--- a/test/files/neg/macro-invalidimpl-i/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package foo
-
-import scala.reflect.macros.Context
-
-object Impls {
-  private[foo] def impl(c: Context) = ???
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
deleted file mode 100644
index fb129c7..0000000
--- a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package foo
-
-object Test extends App {
-  def foo = macro Impls.impl
-}
diff --git a/test/files/neg/macro-invalidimpl.check b/test/files/neg/macro-invalidimpl.check
new file mode 100644
index 0000000..ea7d71c
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl.check
@@ -0,0 +1,53 @@
+Macros_Test_2.scala:5: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo(x: Any) = macro impls.foo
+                                ^
+Macros_Test_2.scala:10: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo(x: Any) = macro impls.foo
+                                ^
+Macros_Test_2.scala:18: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo(x: Any) = macro Impls3.foo
+                                 ^
+Macros_Test_2.scala:22: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo(x: Any) = macro Impls4.foo
+                                 ^
+Macros_Test_2.scala:26: error: ambiguous reference to overloaded definition,
+both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and  method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+  def foo(x: Any) = macro Impls5.foo
+                                 ^
+Macros_Test_2.scala:27: error: ambiguous reference to overloaded definition,
+both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and  method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+  def foo(x: Any, y: Any) = macro Impls5.foo
+                                         ^
+Macros_Test_2.scala:31: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Unit]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(): c.Expr[Unit]
+number of parameter sections differ
+  def foo1 = macro Impls6.fooEmpty
+                          ^
+Macros_Test_2.scala:32: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(): c.Expr[Unit]
+ or      : (c: scala.reflect.macros.blackbox.Context)(): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context): c.Expr[Unit]
+number of parameter sections differ
+  def bar1() = macro Impls6.fooNullary
+                            ^
+Macros_Test_2.scala:36: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
+  def foo = macro Impls7.foo[String]
+                            ^
+Macros_Test_2.scala:53: error: macro implementation must be public
+    def foo = macro Impls8.impl
+                           ^
+10 errors found
diff --git a/test/files/neg/macro-invalidimpl-a.flags b/test/files/neg/macro-invalidimpl.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-a.flags
rename to test/files/neg/macro-invalidimpl.flags
diff --git a/test/files/neg/macro-invalidimpl/Impls_1.scala b/test/files/neg/macro-invalidimpl/Impls_1.scala
new file mode 100644
index 0000000..a1c885a
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl/Impls_1.scala
@@ -0,0 +1,39 @@
+import scala.reflect.macros.blackbox.Context
+
+class Impls1 {
+  def foo(c: Context)(x: c.Expr[Any]) = ???
+}
+
+object Impls2 {
+  def foo(c: Context)(x: c.Expr[Any]) = ???
+}
+
+trait MacroHelpers {
+  object Impls4 {
+    def foo(c: Context)(x: c.Expr[Any]) = x
+  }
+}
+
+object Impls5 {
+  def foo(c: Context)(x: c.Expr[Any]) = ???
+  def foo(c: Context)(x: c.Expr[Any], y: c.Expr[Any]) = ???
+}
+
+object Impls6 {
+  def fooNullary(c: Context) = {
+    import c.universe._
+    c.Expr[Unit](q"""Predef.println("it works")""")
+  }
+
+  def fooEmpty(c: Context)() = fooNullary(c)
+}
+
+object Impls7 {
+  def foo[U <: Int](c: Context) = ???
+}
+
+package foo {
+  object Impls8 {
+    private[foo] def impl(c: Context) = ???
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala
new file mode 100644
index 0000000..6760d99
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala
@@ -0,0 +1,55 @@
+import scala.reflect.macros.blackbox.Context
+
+object Macros1 {
+  val impls = new Impls1
+  def foo(x: Any) = macro impls.foo
+}
+
+object Macros2 {
+  val impls = Impls2
+  def foo(x: Any) = macro impls.foo
+}
+
+class Macros3 {
+  object Impls3 {
+    def foo(c: Context)(x: c.Expr[Any]) = ???
+  }
+
+  def foo(x: Any) = macro Impls3.foo
+}
+
+class Macros4 extends MacroHelpers {
+  def foo(x: Any) = macro Impls4.foo
+}
+
+object Macros5 {
+  def foo(x: Any) = macro Impls5.foo
+  def foo(x: Any, y: Any) = macro Impls5.foo
+}
+
+object Macros6 {
+  def foo1 = macro Impls6.fooEmpty
+  def bar1() = macro Impls6.fooNullary
+}
+
+object Macros7 {
+  def foo = macro Impls7.foo[String]
+}
+
+object Test extends App {
+  println(Macros1.foo(42))
+  println(Macros2.foo(42))
+  println(new Macros3().foo(42))
+  println(new Macros4().foo(42))
+  println(Macros5.foo(42))
+  println(Macros6.foo1)
+  println(Macros6.bar1)
+  println(Macros6.bar1())
+  println(Macros7.foo)
+}
+
+package foo {
+  object Test extends App {
+    def foo = macro Impls8.impl
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nontree.check b/test/files/neg/macro-invalidret-nontree.check
deleted file mode 100644
index 74e6f33..0000000
--- a/test/files/neg/macro-invalidret-nontree.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context): Int
-type mismatch for return type: Int does not conform to c.Expr[Any]
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidret-nontree/Impls_1.scala b/test/files/neg/macro-invalidret-nontree/Impls_1.scala
deleted file mode 100644
index ef19b1b..0000000
--- a/test/files/neg/macro-invalidret-nontree/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = 2
-}
diff --git a/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
deleted file mode 100644
index 81c4114..0000000
--- a/test/files/neg/macro-invalidret-nonuniversetree.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context): reflect.runtime.universe.Literal
-type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
deleted file mode 100644
index f98376a..0000000
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-import scala.reflect.runtime.{universe => ru}
-
-object Impls {
-  def foo(c: Ctx) = ru.Literal(ru.Constant(42))
-}
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check
new file mode 100644
index 0000000..568cc7c
--- /dev/null
+++ b/test/files/neg/macro-invalidret.check
@@ -0,0 +1,35 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context): Int
+type mismatch for return type: Int does not conform to c.Expr[Any]
+  def foo1 = macro Impls.foo1
+                         ^
+Macros_Test_2.scala:3: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context): reflect.runtime.universe.Literal
+type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
+  def foo2 = macro Impls.foo2
+                         ^
+Macros_Test_2.scala:6: error: macro defs must have explicitly specified return types
+  def foo5 = macro Impls.foo5
+      ^
+Macros_Test_2.scala:7: warning: macro defs must have explicitly specified return types (inference of Int from macro impl's c.Expr[Int] is deprecated and is going to stop working in 2.12)
+  def foo6 = macro Impls.foo6
+      ^
+Macros_Test_2.scala:14: error: exception during macro expansion:
+scala.NotImplementedError: an implementation is missing
+	at scala.Predef$.$qmark$qmark$qmark(Predef.scala:225)
+	at Impls$.foo3(Impls_1.scala:7)
+
+  foo3
+  ^
+Macros_Test_2.scala:15: error: macro implementation is missing
+  foo4
+  ^
+Macros_Test_2.scala:17: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  foo6
+  ^
+two warnings found
+5 errors found
diff --git a/test/files/neg/macro-invalidret.flags b/test/files/neg/macro-invalidret.flags
new file mode 100644
index 0000000..946c53e
--- /dev/null
+++ b/test/files/neg/macro-invalidret.flags
@@ -0,0 +1,3 @@
+-language:experimental.macros
+-Xfatal-warnings
+-deprecation
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret/Impls_1.scala b/test/files/neg/macro-invalidret/Impls_1.scala
new file mode 100644
index 0000000..434aeef
--- /dev/null
+++ b/test/files/neg/macro-invalidret/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.runtime.{universe => ru}
+
+object Impls {
+  def foo1(c: Context) = 2
+  def foo2(c: Context) = ru.Literal(ru.Constant(42))
+  def foo3(c: Context) = ???
+  def foo5(c: Context) = c.universe.Literal(c.universe.Constant(42))
+  def foo6(c: Context) = c.Expr[Int](c.universe.Literal(c.universe.Constant(42)))
+}
diff --git a/test/files/neg/macro-invalidret/Macros_Test_2.scala b/test/files/neg/macro-invalidret/Macros_Test_2.scala
new file mode 100644
index 0000000..8840f49
--- /dev/null
+++ b/test/files/neg/macro-invalidret/Macros_Test_2.scala
@@ -0,0 +1,18 @@
+object Macros {
+  def foo1 = macro Impls.foo1
+  def foo2 = macro Impls.foo2
+  def foo3 = macro Impls.foo3
+  def foo4 = macro ???
+  def foo5 = macro Impls.foo5
+  def foo6 = macro Impls.foo6
+}
+
+object Test extends App {
+  import Macros._
+  foo1
+  foo2
+  foo3
+  foo4
+  foo5
+  foo6
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-a.check b/test/files/neg/macro-invalidshape-a.check
deleted file mode 100644
index f38a908..0000000
--- a/test/files/neg/macro-invalidshape-a.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro [<implementation object>].<method name>[[<type args>]]
-  def foo(x: Any) = macro 2
-      ^
-one error found
diff --git a/test/files/neg/macro-invalidshape-a/Impls_1.scala b/test/files/neg/macro-invalidshape-a/Impls_1.scala
deleted file mode 100644
index 4467021..0000000
--- a/test/files/neg/macro-invalidshape-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
deleted file mode 100644
index ffff17d..0000000
--- a/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo(x: Any) = macro 2
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-b.check b/test/files/neg/macro-invalidshape-b.check
deleted file mode 100644
index 976685c..0000000
--- a/test/files/neg/macro-invalidshape-b.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro [<implementation object>].<method name>[[<type args>]]
-  def foo(x: Any) = macro Impls.foo(null)(null)
-      ^
-one error found
diff --git a/test/files/neg/macro-invalidshape-b/Impls_1.scala b/test/files/neg/macro-invalidshape-b/Impls_1.scala
deleted file mode 100644
index 4467021..0000000
--- a/test/files/neg/macro-invalidshape-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
deleted file mode 100644
index b67cd32..0000000
--- a/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo(x: Any) = macro Impls.foo(null)(null)
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-c.check b/test/files/neg/macro-invalidshape-c.check
deleted file mode 100644
index 0b2e9cf..0000000
--- a/test/files/neg/macro-invalidshape-c.check
+++ /dev/null
@@ -1,9 +0,0 @@
-Macros_Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
-  def foo(x: Any) = macro {2; Impls.foo}
-                           ^
-Macros_Test_2.scala:2: error: missing arguments for method foo in object Impls;
-follow this method with `_' if you want to treat it as a partially applied function
-  def foo(x: Any) = macro {2; Impls.foo}
-                                    ^
-one warning found
-one error found
diff --git a/test/files/neg/macro-invalidshape-c/Impls_1.scala b/test/files/neg/macro-invalidshape-c/Impls_1.scala
deleted file mode 100644
index 4467021..0000000
--- a/test/files/neg/macro-invalidshape-c/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
deleted file mode 100644
index 552c371..0000000
--- a/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo(x: Any) = macro {2; Impls.foo}
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-d.check b/test/files/neg/macro-invalidshape-d.check
deleted file mode 100644
index e43a2ca..0000000
--- a/test/files/neg/macro-invalidshape-d.check
+++ /dev/null
@@ -1,8 +0,0 @@
-Macros_Test_2.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
-  def foo(x: Any) = {2; macro Impls.foo}
-                        ^
-Macros_Test_2.scala:2: error: ';' expected but '.' found.
-  def foo(x: Any) = {2; macro Impls.foo}
-                                   ^
-one warning found
-one error found
diff --git a/test/files/neg/macro-invalidshape-d.flags b/test/files/neg/macro-invalidshape-d.flags
deleted file mode 100644
index 83b7265..0000000
--- a/test/files/neg/macro-invalidshape-d.flags
+++ /dev/null
@@ -1 +0,0 @@
--deprecation -language:experimental.macros
diff --git a/test/files/neg/macro-invalidshape-d/Impls_1.scala b/test/files/neg/macro-invalidshape-d/Impls_1.scala
deleted file mode 100644
index 4467021..0000000
--- a/test/files/neg/macro-invalidshape-d/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
deleted file mode 100644
index bacd9a6..0000000
--- a/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo(x: Any) = {2; macro Impls.foo}
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape.check b/test/files/neg/macro-invalidshape.check
new file mode 100644
index 0000000..aa694df
--- /dev/null
+++ b/test/files/neg/macro-invalidshape.check
@@ -0,0 +1,20 @@
+Macros_Test_2.scala:2: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo1(x: Any) = macro 2
+                           ^
+Macros_Test_2.scala:3: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+  def foo2(x: Any) = macro Impls.foo(null)(null)
+                                 ^
+Macros_Test_2.scala:4: error: missing arguments for method foo in object Impls;
+follow this method with `_' if you want to treat it as a partially applied function
+  def foo3(x: Any) = macro {2; Impls.foo}
+                                     ^
+Macros_Test_2.scala:7: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+    def foo = macro impl
+                    ^
+four errors found
diff --git a/test/files/neg/macro-invalidimpl-b.flags b/test/files/neg/macro-invalidshape.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-b.flags
rename to test/files/neg/macro-invalidshape.flags
diff --git a/test/files/neg/macro-invalidshape/Impls_1.scala b/test/files/neg/macro-invalidshape/Impls_1.scala
new file mode 100644
index 0000000..acc6b52
--- /dev/null
+++ b/test/files/neg/macro-invalidshape/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidshape/Macros_Test_2.scala b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
new file mode 100644
index 0000000..160bbf5
--- /dev/null
+++ b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
@@ -0,0 +1,17 @@
+object Macros {
+  def foo1(x: Any) = macro 2
+  def foo2(x: Any) = macro Impls.foo(null)(null)
+  def foo3(x: Any) = macro {2; Impls.foo}
+  {
+    def impl(c: scala.reflect.macros.blackbox.Context) = { import c.universe._; c.Expr[Unit](q"()") }
+    def foo = macro impl
+    foo
+  }
+}
+
+object Test extends App {
+  import Macros._
+  foo1(42)
+  foo2(42)
+  foo3(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds.check b/test/files/neg/macro-invalidsig-context-bounds.check
deleted file mode 100644
index cbb2b06..0000000
--- a/test/files/neg/macro-invalidsig-context-bounds.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_1.scala:2: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
-  def foo[U] = macro Impls.foo[U]
-                           ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
deleted file mode 100644
index c066c48..0000000
--- a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U: c.WeakTypeTag: Numeric](c: Ctx) = {
-    import c.universe._
-    Literal(Constant(42))
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
deleted file mode 100644
index 5b4602f..0000000
--- a/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  println(foo[String])
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc.check b/test/files/neg/macro-invalidsig-ctx-badargc.check
deleted file mode 100644
index 7e8bcba..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badargc.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : : Nothing
-number of parameter sections differ
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
deleted file mode 100644
index 4d5d291..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Impls {
-  def foo = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype.check b/test/files/neg/macro-invalidsig-ctx-badtype.check
deleted file mode 100644
index 837ec3e..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badtype.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : (c: scala.reflect.api.Universe): Nothing
-type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
deleted file mode 100644
index cf1a4cf..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.api.{Universe => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs.check b/test/files/neg/macro-invalidsig-ctx-badvarargs.check
deleted file mode 100644
index a96421a..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : (cs: scala.reflect.macros.Context*): Nothing
-types incompatible for parameter cs: corresponding is not a vararg parameter
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
deleted file mode 100644
index c4ed8be..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(cs: Ctx*) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx.check b/test/files/neg/macro-invalidsig-ctx-noctx.check
deleted file mode 100644
index fd3632e..0000000
--- a/test/files/neg/macro-invalidsig-ctx-noctx.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context): Nothing
-number of parameter sections differ
-  def foo(x: Any) = macro Impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
deleted file mode 100644
index 6904cfb..0000000
--- a/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
deleted file mode 100644
index e053cf9..0000000
--- a/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo(x: Any) = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params.check b/test/files/neg/macro-invalidsig-implicit-params.check
deleted file mode 100644
index 900098f..0000000
--- a/test/files/neg/macro-invalidsig-implicit-params.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Impls_Macros_1.scala:18: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
-  def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
-                                         ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
deleted file mode 100644
index 7a72934..0000000
--- a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo_targs[T, U: c.WeakTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val body = Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
-      Literal(Constant(())))
-    c.Expr[Unit](body)
-  }
-}
-
-class Macros[T] {
-  def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala b/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
deleted file mode 100644
index 90e850d..0000000
--- a/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  println("foo_targs:")
-  new Macros[Int]().foo_targs[String](42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badargc.check b/test/files/neg/macro-invalidsig-params-badargc.check
deleted file mode 100644
index bb26b24..0000000
--- a/test/files/neg/macro-invalidsig-params-badargc.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
-parameter lists have different length, found extra parameter y: c.Expr[Int]
-  def foo(x: Int) = macro Impls.foo
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
deleted file mode 100644
index ae16612..0000000
--- a/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = ???
-}
-
-object Macros {
-  def foo(x: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala b/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
deleted file mode 100644
index cbd6232..0000000
--- a/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check
index 8227614..159754c 100644
--- a/test/files/neg/macro-invalidsig-params-badtype.check
+++ b/test/files/neg/macro-invalidsig-params-badtype.check
@@ -1,7 +1,8 @@
 Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context)(x: c.universe.Tree): Nothing
-type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Tree
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(x: Int): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to Int
   def foo(x: Int) = macro Impls.foo
                                 ^
 one error found
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
index ab90b85..e549cc9 100644
--- a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
+++ b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.universe.Tree) = ???
+  def foo(c: Context)(x: Int) = ???
 }
 
 object Macros {
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala b/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
deleted file mode 100644
index cbd6232..0000000
--- a/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  import Macros._
-  foo(42)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs.check b/test/files/neg/macro-invalidsig-params-badvarargs.check
deleted file mode 100644
index cb4d2d9..0000000
--- a/test/files/neg/macro-invalidsig-params-badvarargs.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
-parameter lists have different length, required extra parameter y: c.Expr[Int]
-  def foo(x: Int, y: Int) = macro Impls.foo
-                                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
deleted file mode 100644
index b4c75ad..0000000
--- a/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = ???
-}
-
-object Macros {
-  def foo(x: Int, y: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
deleted file mode 100644
index fa50ac4..0000000
--- a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  import Macros._
-  foo(42, 100)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch.check b/test/files/neg/macro-invalidsig-params-namemismatch.check
deleted file mode 100644
index 82612a9..0000000
--- a/test/files/neg/macro-invalidsig-params-namemismatch.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
-parameter names differ: x != y
-  def foo(x: Int, y: Int) = macro Impls.foo
-                                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
deleted file mode 100644
index c7cf0b0..0000000
--- a/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(y: c.Expr[Int], x: c.Expr[Int]) = ???
-}
-
-object Macros {
-  def foo(x: Int, y: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
deleted file mode 100644
index fa50ac4..0000000
--- a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  import Macros._
-  foo(42, 100)
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype.check b/test/files/neg/macro-invalidsig-tparams-badtype.check
deleted file mode 100644
index 273d011..0000000
--- a/test/files/neg/macro-invalidsig-tparams-badtype.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found   : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
-number of parameter sections differ
-  def foo[U] = macro Impls.foo[U]
-                           ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
deleted file mode 100644
index dbeca17..0000000
--- a/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U](c: Ctx)(U: c.universe.Type) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
deleted file mode 100644
index a82e813..0000000
--- a/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  foo[Int]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a.check b/test/files/neg/macro-invalidsig-tparams-bounds-a.check
deleted file mode 100644
index b6248a1..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
-  def foo[U] = macro Impls.foo[U]
-                              ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
deleted file mode 100644
index 89020de..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: String](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
deleted file mode 100644
index a82e813..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  foo[Int]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b.check b/test/files/neg/macro-invalidsig-tparams-bounds-b.check
deleted file mode 100644
index 74eb522..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
-  def foo[U <: Int] = macro Impls.foo[U]
-                                     ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
deleted file mode 100644
index 89020de..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: String](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
deleted file mode 100644
index eed6369..0000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U <: Int] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  foo[Int]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a.check b/test/files/neg/macro-invalidsig-tparams-notparams-a.check
deleted file mode 100644
index c731259..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation reference has too few type arguments for method foo: [U](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[U])Nothing
-  def foo = macro Impls.foo
-                        ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
deleted file mode 100644
index f8b3c92..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U: c.WeakTypeTag](c: Ctx) = ???
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b.check b/test/files/neg/macro-invalidsig-tparams-notparams-b.check
deleted file mode 100644
index e3d4505..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: macro implementation reference has too few type arguments for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
-    def foo[V] = macro Impls.foo
-                             ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
deleted file mode 100644
index baf3aab..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
-    println(implicitly[c.WeakTypeTag[T]])
-    println(implicitly[c.WeakTypeTag[U]])
-    println(V)
-    c.literalUnit
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
deleted file mode 100644
index 7d02bf6..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
-  class C[U] {
-    def foo[V] = macro Impls.foo
-  }
-}
-
-object Test extends App {
-  val outer1 = new D[Int]
-  val outer2 = new outer1.C[String]
-  outer2.foo[Boolean]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c.check b/test/files/neg/macro-invalidsig-tparams-notparams-c.check
deleted file mode 100644
index 0be0b6f..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
-    def foo[V] = macro Impls.foo[V]
-                                ^
-one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
deleted file mode 100644
index 44b4ed6..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
-    import c.universe._
-    println(implicitly[c.WeakTypeTag[T]])
-    println(implicitly[c.WeakTypeTag[U]])
-    println(V)
-    c.literalUnit
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
deleted file mode 100644
index 109e142..0000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
-  class C[U] {
-    def foo[V] = macro Impls.foo[V]
-  }
-}
-
-object Test extends App {
-  val outer1 = new D[Int]
-  val outer2 = new outer1.C[String]
-  outer2.foo[Boolean]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig.check b/test/files/neg/macro-invalidsig.check
new file mode 100644
index 0000000..8898ffc
--- /dev/null
+++ b/test/files/neg/macro-invalidsig.check
@@ -0,0 +1,85 @@
+Macros_Test_2.scala:2: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+  def foo[U]: Int = macro Impls1.foo[U]
+                                 ^
+Macros_Test_2.scala:6: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : : Nothing
+number of parameter sections differ
+  def foo = macro Impls2.foo
+                         ^
+Macros_Test_2.scala:10: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (c: scala.reflect.api.Universe): Nothing
+type mismatch for parameter c: scala.reflect.macros.blackbox.Context does not conform to scala.reflect.api.Universe
+  def foo = macro Impls3.foo
+                         ^
+Macros_Test_2.scala:14: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (cs: scala.reflect.macros.blackbox.Context*): Nothing
+types incompatible for parameter cs: corresponding is not a vararg parameter
+  def foo = macro Impls4.foo
+                         ^
+Macros_Test_2.scala:18: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context): Nothing
+number of parameter sections differ
+  def foo(x: Any) = macro Impls5.foo
+                                 ^
+Macros_Test_2.scala:22: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+  def foo[U](x: Int) = macro Impls6.foo[T, U]
+                                    ^
+Macros_Test_2.scala:26: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
+parameter lists have different length, found extra parameter y: c.Expr[Int]
+  def foo(x: Int) = macro Impls7.foo
+                                 ^
+Macros_Test_2.scala:30: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(x: c.universe.Symbol): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Symbol
+  def foo(x: Int) = macro Impls8.foo
+                                 ^
+Macros_Test_2.scala:34: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree, y: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[Int]*): Nothing
+parameter lists have different length, required extra parameter y: c.Expr[Int]
+  def foo(x: Int, y: Int) = macro Impls9.foo
+                                         ^
+Macros_Test_2.scala:38: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree, y: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
+parameter names differ: x != y
+  def foo(x: Int, y: Int) = macro Impls10.foo
+                                          ^
+Macros_Test_2.scala:42: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing]
+ or      : (c: scala.reflect.macros.blackbox.Context): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(U: c.universe.Type): Nothing
+number of parameter sections differ
+  def foo[U] = macro Impls11.foo[U]
+                             ^
+Macros_Test_2.scala:46: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+  def foo[U] = macro Impls12.foo[U]
+                                ^
+Macros_Test_2.scala:50: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+  def foo[U <: Int] = macro Impls13.foo[U]
+                                       ^
+Macros_Test_2.scala:54: error: macro implementation reference has too few type arguments for method foo: [U](c: scala.reflect.macros.blackbox.Context)(implicit evidence$4: c.WeakTypeTag[U])Nothing
+  def foo = macro Impls14.foo
+                          ^
+Macros_Test_2.scala:59: error: macro implementation reference has too few type arguments for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$5: c.WeakTypeTag[T], implicit evidence$6: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+    def foo15[V]: Unit = macro Impls15.foo
+                                       ^
+Macros_Test_2.scala:60: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$7: c.WeakTypeTag[T], implicit evidence$8: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+    def foo16[V]: Unit = macro Impls16.foo[V]
+                                          ^
+16 errors found
diff --git a/test/files/neg/macro-invalidimpl-c.flags b/test/files/neg/macro-invalidsig.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-c.flags
rename to test/files/neg/macro-invalidsig.flags
diff --git a/test/files/neg/macro-invalidsig/Impls_1.scala b/test/files/neg/macro-invalidsig/Impls_1.scala
new file mode 100644
index 0000000..b0a3912
--- /dev/null
+++ b/test/files/neg/macro-invalidsig/Impls_1.scala
@@ -0,0 +1,86 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
+
+object Impls1 {
+  def foo[U: c.WeakTypeTag: Numeric](c: Context) = { import c.universe._; q"42" }
+}
+
+object Impls2 {
+  def foo = ???
+}
+
+object Impls3 {
+  def foo(c: scala.reflect.api.Universe) = ???
+}
+
+object Impls4 {
+  def foo(cs: Context*) = ???
+}
+
+object Impls5 {
+  def foo(c: Context) = ???
+}
+
+object Impls6 {
+  def foo[T, U: c.WeakTypeTag](c: Context)(implicit x: c.Expr[Int]) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    c.Expr[Unit](q"""
+      println("invoking foo_targs...")
+      println("type of prefix is: " + ${prefix.staticType.toString})
+      println("U is: " + ${implicitly[c.WeakTypeTag[U]].tpe.toString})
+    """)
+  }
+}
+
+object Impls7 {
+  def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = ???
+}
+
+object Impls8 {
+  def foo(c: Context)(x: c.universe.Symbol) = ???
+}
+
+object Impls9 {
+  def foo(c: Context)(xs: c.Expr[Int]*) = ???
+}
+
+object Impls10 {
+  def foo(c: Context)(y: c.Expr[Int], x: c.Expr[Int]) = ???
+}
+
+object Impls11 {
+  def foo[U](c: Context)(U: c.universe.Type) = ???
+}
+
+object Impls12 {
+  def foo[U <: String](c: Context) = ???
+}
+
+object Impls13 {
+  def foo[U <: String](c: Context) = ???
+}
+
+object Impls14 {
+  def foo[U: c.WeakTypeTag](c: Context) = ???
+}
+
+object Impls15 {
+  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+    import c.universe._
+    println(implicitly[c.WeakTypeTag[T]])
+    println(implicitly[c.WeakTypeTag[U]])
+    println(V)
+    c.Expr[Unit](q"()")
+  }
+}
+
+object Impls16 {
+  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+    import c.universe._
+    println(implicitly[c.WeakTypeTag[T]])
+    println(implicitly[c.WeakTypeTag[U]])
+    println(V)
+    c.Expr[Unit](q"()")
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig/Macros_Test_2.scala b/test/files/neg/macro-invalidsig/Macros_Test_2.scala
new file mode 100644
index 0000000..89a5302
--- /dev/null
+++ b/test/files/neg/macro-invalidsig/Macros_Test_2.scala
@@ -0,0 +1,83 @@
+object Macros1 {
+  def foo[U]: Int = macro Impls1.foo[U]
+}
+
+object Macros2 {
+  def foo = macro Impls2.foo
+}
+
+object Macros3 {
+  def foo = macro Impls3.foo
+}
+
+object Macros4 {
+  def foo = macro Impls4.foo
+}
+
+object Macros5 {
+  def foo(x: Any) = macro Impls5.foo
+}
+
+class Macros6[T] {
+  def foo[U](x: Int) = macro Impls6.foo[T, U]
+}
+
+object Macros7 {
+  def foo(x: Int) = macro Impls7.foo
+}
+
+object Macros8 {
+  def foo(x: Int) = macro Impls8.foo
+}
+
+object Macros9 {
+  def foo(x: Int, y: Int) = macro Impls9.foo
+}
+
+object Macros10 {
+  def foo(x: Int, y: Int) = macro Impls10.foo
+}
+
+object Macros11 {
+  def foo[U] = macro Impls11.foo[U]
+}
+
+object Macros12 {
+  def foo[U] = macro Impls12.foo[U]
+}
+
+object Macros13 {
+  def foo[U <: Int] = macro Impls13.foo[U]
+}
+
+object Macros14 {
+  def foo = macro Impls14.foo
+}
+
+class D[T] {
+  class C[U] {
+    def foo15[V]: Unit = macro Impls15.foo
+    def foo16[V]: Unit = macro Impls16.foo[V]
+  }
+}
+
+object Test extends App {
+  println(Macros1.foo[String])
+  println(Macros2.foo)
+  println(Macros3.foo)
+  println(Macros4.foo)
+  println(Macros5.foo(42))
+  println(new Macros6[Int]().foo[String](42))
+  println(Macros7.foo(42))
+  println(Macros8.foo)
+  println(Macros9.foo(4, 2))
+  println(Macros10.foo(4, 2))
+  println(Macros11.foo[Int])
+  println(Macros12.foo[Int])
+  println(Macros13.foo[Int])
+  println(Macros14.foo)
+  val outer1 = new D[Int]
+  val outer2 = new outer1.C[String]
+  outer2.foo15[Boolean]
+  outer2.foo16[Boolean]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
index 294cfd0..4c11154 100644
--- a/test/files/neg/macro-invalidusage-badargs.check
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -1,6 +1,18 @@
-Macros_Test_2.scala:7: error: type mismatch;
+Macros_Test_2.scala:5: error: type mismatch;
  found   : String("42")
  required: Int
-  val s: String = foo("42")
-                      ^
-one error found
+  foo("42")
+      ^
+Macros_Test_2.scala:6: error: too few argument lists for macro invocation
+  foo
+  ^
+Macros_Test_2.scala:7: error: Int does not take parameters
+  foo(4)(2)
+        ^
+Macros_Test_2.scala:8: error: macro applications do not support named and/or default arguments
+  foo()
+     ^
+Macros_Test_2.scala:9: error: too many arguments for macro method foo: (x: Int)Int
+  foo(4, 2)
+     ^
+5 errors found
diff --git a/test/files/neg/macro-invalidusage-badargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala
index 52c9f9c..8765cfb 100644
--- a/test/files/neg/macro-invalidusage-badargs/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = x
+  def foo(c: Context)(x: c.Expr[Int]) = x
 }
diff --git a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
index a6af1bb..cf8accf 100644
--- a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
@@ -1,8 +1,10 @@
-object Macros {
-  def foo(x: Int) = macro Impls.foo
-}
+object Macros { def foo(x: Int): Int = macro Impls.foo }
+import Macros._
 
 object Test extends App {
-  import Macros._
-  val s: String = foo("42")
+  foo("42")
+  foo
+  foo(4)(2)
+  foo()
+  foo(4, 2)
 }
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
deleted file mode 100644
index 6ee71a3..0000000
--- a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: String](c: Ctx) = c.literalUnit
-}
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
deleted file mode 100644
index 3139599..0000000
--- a/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U <: String] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  foo[Int]
-}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.check b/test/files/neg/macro-invalidusage-badbounds.check
similarity index 100%
rename from test/files/neg/macro-invalidusage-badbounds-a.check
rename to test/files/neg/macro-invalidusage-badbounds.check
diff --git a/test/files/neg/macro-invalidimpl-d.flags b/test/files/neg/macro-invalidusage-badbounds.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-d.flags
rename to test/files/neg/macro-invalidusage-badbounds.flags
diff --git a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
new file mode 100644
index 0000000..1769da9
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo[U <: String](c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
+}
diff --git a/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala
new file mode 100644
index 0000000..7639770
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+  def foo[U <: String]: Unit = macro Impls.foo[U]
+}
+
+object Test extends App {
+  import Macros._
+  foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badtargs.check b/test/files/neg/macro-invalidusage-badtargs.check
index 73801ab..722ec03 100644
--- a/test/files/neg/macro-invalidusage-badtargs.check
+++ b/test/files/neg/macro-invalidusage-badtargs.check
@@ -1,4 +1,18 @@
-Macros_Test_2.scala:7: error: macro method foo: (x: Int)Int does not take type parameters.
-  val s: String = foo[String](42)
-                     ^
-one error found
+Macros_Test_2.scala:13: error: macro method foo1: (x: Int)Int does not take type parameters.
+  foo1[String](42)
+      ^
+Macros_Test_2.scala:14: error: wrong number of type parameters for macro method foo2: [T](x: Int)Int
+  foo2[String, String](42)
+      ^
+Macros_Test_2.scala:15: error: wrong number of type parameters for macro method foo3: [T, U](x: Int)Int
+  foo3[String](42)
+      ^
+Macros_Test_2.scala:16: error: String takes no type parameters, expected: one
+  foo4[String](42)
+       ^
+Macros_Test_2.scala:17: error: kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type T).
+List's type parameters do not match type T's expected parameters:
+type A has no type parameters, but type U has one
+  foo5[List](42)
+      ^
+5 errors found
diff --git a/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala
index 52c9f9c..8765cfb 100644
--- a/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = x
+  def foo(c: Context)(x: c.Expr[Int]) = x
 }
diff --git a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
index c54093b..47e51bb 100644
--- a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
@@ -1,8 +1,18 @@
+import scala.language.higherKinds
+
 object Macros {
-  def foo(x: Int) = macro Impls.foo
+  def foo1(x: Int): Int = macro Impls.foo
+  def foo2[T](x: Int): Int = macro Impls.foo
+  def foo3[T, U](x: Int): Int = macro Impls.foo
+  def foo4[T[_]](x: Int): Int = macro Impls.foo
+  def foo5[T[U[_]]](x: Int): Int = macro Impls.foo
 }
 
 object Test extends App {
   import Macros._
-  val s: String = foo[String](42)
+  foo1[String](42)
+  foo2[String, String](42)
+  foo3[String](42)
+  foo4[String](42)
+  foo5[List](42)
 }
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
index 8d7fdf3..776f8bf 100644
--- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
@@ -1,9 +1,8 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx) = {
+  def foo(c: Context) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
-    c.Expr[Unit](body)
+    c.Expr[Unit](q"""println("it works")""")
   }
 }
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala
index 343cec9..578aa45 100644
--- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Macros {
-  def foo = macro Impls.foo
+  def foo: Unit = macro Impls.foo
 }
 
 object Test extends App {
diff --git a/test/files/neg/macro-invalidusage-nontypeable.check b/test/files/neg/macro-invalidusage-nontypeable.check
new file mode 100644
index 0000000..88e6057
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-nontypeable.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: not found: value IDoNotExist
+  Macros.foo
+         ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-e.flags b/test/files/neg/macro-invalidusage-nontypeable.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-e.flags
rename to test/files/neg/macro-invalidusage-nontypeable.flags
diff --git a/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala
new file mode 100644
index 0000000..b6b9611
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.universe._
+    val body = Ident(TermName("IDoNotExist"))
+    c.Expr[Int](body)
+  }
+}
+
+object Macros {
+  def foo = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper.check b/test/files/neg/macro-invalidusage-presuper.check
new file mode 100644
index 0000000..c0b1ec0
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: only concrete field definitions allowed in early object initialization section
+class D extends { def x = macro impl } with AnyRef
+                      ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-f.flags b/test/files/neg/macro-invalidusage-presuper.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-f.flags
rename to test/files/neg/macro-invalidusage-presuper.flags
diff --git a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
new file mode 100644
index 0000000..ea98f01
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala
new file mode 100644
index 0000000..ff46a59
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala
@@ -0,0 +1,3 @@
+import Impls._
+
+class D extends { def x = macro impl } with AnyRef
\ No newline at end of file
diff --git a/test/files/neg/macro-noexpand/Impls_1.scala b/test/files/neg/macro-noexpand/Impls_1.scala
index 4467021..acc6b52 100644
--- a/test/files/neg/macro-noexpand/Impls_1.scala
+++ b/test/files/neg/macro-noexpand/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
+  def foo(c: Context)(x: c.Expr[Any]) = ???
 }
diff --git a/test/files/neg/macro-nontypeablebody/Impls_1.scala b/test/files/neg/macro-nontypeablebody/Impls_1.scala
index 4467021..acc6b52 100644
--- a/test/files/neg/macro-nontypeablebody/Impls_1.scala
+++ b/test/files/neg/macro-nontypeablebody/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Any]) = ???
+  def foo(c: Context)(x: c.Expr[Any]) = ???
 }
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
index 895e0dc..6b5d301 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
@@ -1,5 +1,5 @@
 Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
- macro method foo cannot override an abstract method
-  def foo(x: Int) = macro Impls.impl
+ macro method foo cannot be used here - term macros cannot override abstract methods
+  def foo(x: Int): Int = macro Impls.impl
       ^
 one error found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala
index e43264f..916b454 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Ctx)(x: c.Expr[Int]) = x
+  def impl(c: Context)(x: c.Expr[Int]) = x
 }
 
 trait Foo {
@@ -9,5 +9,5 @@ trait Foo {
 }
 
 object Macros extends Foo {
-  def foo(x: Int) = macro Impls.impl
+  def foo(x: Int): Int = macro Impls.impl
 }
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
index cde3dbd..c733555 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
@@ -1,11 +1,11 @@
-Test_2.scala:3: error: anonymous class $anon inherits conflicting members:
+Test_2.scala:3: error: <$anon: C with A> inherits conflicting members:
   macro method t in trait C of type ()Unit  and
   method t in trait A of type ()Unit
-(Note: this can be resolved by declaring an override in anonymous class $anon.)
+(Note: this can be resolved by declaring an override in <$anon: C with A>.)
   val c2 = new C with A {}
                ^
 Test_2.scala:5: error: overriding macro method t in trait C of type ()Unit;
- method t cannot override a macro
+ method t cannot be used here - only term macros can override term macros
   val c4 = new C with A { override def t(): Unit = () }
                                        ^
 two errors found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala
index f5b2555..17827ab 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 trait T { def t(): Unit }
diff --git a/test/files/neg/macro-override-method-overrides-macro.check b/test/files/neg/macro-override-method-overrides-macro.check
index 66dc11b..e396d65 100644
--- a/test/files/neg/macro-override-method-overrides-macro.check
+++ b/test/files/neg/macro-override-method-overrides-macro.check
@@ -1,5 +1,5 @@
 Macros_Test_2.scala:8: error: overriding macro method foo in class B of type (x: String)Unit;
- method foo cannot override a macro
-  override def foo(x: String) = println("fooDString")
+ method foo cannot be used here - only term macros can override term macros
+  override def foo(x: String): Unit = println("fooDString")
                ^
 one error found
diff --git a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
index ec93dd4..f3917e3 100644
--- a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
+++ b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
@@ -1,15 +1,14 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+  def impl(c: Context)(tag: String, x: c.Expr[_]) = {
     import c.{prefix => prefix}
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
-    c.Expr[Unit](body)
+    c.Expr[Unit](q"println($tag, ${prefix.toString}, $x)")
   }
 
-  def fooBString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBString", x)
-  def fooBInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBInt", x)
-  def fooDInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooDInt", x)
-  def fooZString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooZString", x)
+  def fooBString(c: Context)(x: c.Expr[_]) = impl(c)("fooBString", x)
+  def fooBInt(c: Context)(x: c.Expr[_]) = impl(c)("fooBInt", x)
+  def fooDInt(c: Context)(x: c.Expr[_]) = impl(c)("fooDInt", x)
+  def fooZString(c: Context)(x: c.Expr[_]) = impl(c)("fooZString", x)
 }
\ No newline at end of file
diff --git a/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala
index 36821b0..d471577 100644
--- a/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala
+++ b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala
@@ -1,15 +1,15 @@
 class B {
-  def foo(x: String) = macro Impls.fooBString
-  def foo(x: Int) = macro Impls.fooBInt
-  def foo(x: Boolean) = println("fooBBoolean")
+  def foo(x: String): Unit = macro Impls.fooBString
+  def foo(x: Int): Unit = macro Impls.fooBInt
+  def foo(x: Boolean): Unit = println("fooBBoolean")
 }
 
 class D extends B {
-  override def foo(x: String) = println("fooDString")
-  override def foo(x: Int) = macro Impls.fooDInt
+  override def foo(x: String): Unit = println("fooDString")
+  override def foo(x: Int): Unit = macro Impls.fooDInt
 }
 
 class Z extends D {
-  override def foo(x: String) = macro Impls.fooZString
-  override def foo(x: Boolean) = println("fooZBoolean")
+  override def foo(x: String): Unit = macro Impls.fooZString
+  override def foo(x: Boolean): Unit = println("fooZBoolean")
 }
diff --git a/test/files/neg/macro-qmarkqmarkqmark.check b/test/files/neg/macro-qmarkqmarkqmark.check
index afd49e7..bc3e25e 100644
--- a/test/files/neg/macro-qmarkqmarkqmark.check
+++ b/test/files/neg/macro-qmarkqmarkqmark.check
@@ -1,7 +1,7 @@
 macro-qmarkqmarkqmark.scala:5: error: macro implementation is missing
   foo1
   ^
-macro-qmarkqmarkqmark.scala:8: error: macros cannot be partially applied
+macro-qmarkqmarkqmark.scala:8: error: too few argument lists for macro invocation
   foo2
   ^
 macro-qmarkqmarkqmark.scala:9: error: macro implementation is missing
diff --git a/test/files/neg/macro-quasiquotes.check b/test/files/neg/macro-quasiquotes.check
new file mode 100644
index 0000000..a985aee
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes.check
@@ -0,0 +1,8 @@
+Macros_1.scala:14: error: bundle implementation has incompatible shape:
+ required: (x: Impls.this.c.Expr[Int]): Impls.this.c.Expr[Unit]
+ or      : (x: Impls.this.c.Tree): Impls.this.c.Tree
+ found   : (x: Impls.this.c.universe.Block): Impls.this.c.Tree
+type mismatch for parameter x: Impls.this.c.Expr[Int] does not conform to Impls.this.c.universe.Block
+  def m3(x: Int): Unit = macro Impls.impl3
+                                     ^
+one error found
diff --git a/test/files/neg/macro-quasiquotes/Macros_1.scala b/test/files/neg/macro-quasiquotes/Macros_1.scala
new file mode 100644
index 0000000..b123c47
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes/Macros_1.scala
@@ -0,0 +1,15 @@
+import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+class Impls(val c: Context) {
+  import c.universe._
+  def impl1(x: Expr[Int]) = q"println(x)"
+  def impl2(x: Tree) = q"println(x)"
+  def impl3(x: Block) = q"println(x)"
+}
+
+object Macros {
+  def m1(x: Int): Unit = macro Impls.impl1
+  def m2(x: Int): Unit = macro Impls.impl2
+  def m3(x: Int): Unit = macro Impls.impl3
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-quasiquotes/Test_2.scala b/test/files/neg/macro-quasiquotes/Test_2.scala
new file mode 100644
index 0000000..c7b8948
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  Macros.m1
+  Macros.m2
+  Macros.m3
+}
diff --git a/test/files/neg/macro-reify-splice-splice.check b/test/files/neg/macro-reify-splice-splice.check
new file mode 100644
index 0000000..bd1ea7a
--- /dev/null
+++ b/test/files/neg/macro-reify-splice-splice.check
@@ -0,0 +1,7 @@
+Macros_1.scala:8: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+      { c.universe.reify(c.universe.reify("hello world")) }.splice.splice
+                                                                   ^
+one error found
diff --git a/test/files/run/macro-reify-splice-splice.flags b/test/files/neg/macro-reify-splice-splice.flags
similarity index 100%
rename from test/files/run/macro-reify-splice-splice.flags
rename to test/files/neg/macro-reify-splice-splice.flags
diff --git a/test/files/neg/macro-reify-splice-splice/Macros_1.scala b/test/files/neg/macro-reify-splice-splice/Macros_1.scala
new file mode 100644
index 0000000..306e78a
--- /dev/null
+++ b/test/files/neg/macro-reify-splice-splice/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def foo = macro Impls.foo
+
+  object Impls {
+    def foo(c: Context) = c.universe.reify {
+      { c.universe.reify(c.universe.reify("hello world")) }.splice.splice
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-splice-splice/Test_2.scala b/test/files/neg/macro-reify-splice-splice/Test_2.scala
similarity index 100%
rename from test/files/run/macro-reify-splice-splice/Test_2.scala
rename to test/files/neg/macro-reify-splice-splice/Test_2.scala
diff --git a/test/files/neg/macro-without-xmacros-a/Impls_1.scala b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
index 8976f8e..035913f 100644
--- a/test/files/neg/macro-without-xmacros-a/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
@@ -1,18 +1,18 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+    c.Expr(q"$x + 1")
   }
 
-  def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+    c.Expr(q"$x + 2")
   }
 
-  def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+    c.Expr(q"$x + 3")
   }
 }
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b/Impls_1.scala b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
index 8976f8e..035913f 100644
--- a/test/files/neg/macro-without-xmacros-b/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
@@ -1,18 +1,18 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+    c.Expr(q"$x + 1")
   }
 
-  def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+    c.Expr(q"$x + 2")
   }
 
-  def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux_impl(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+    c.Expr(q"$x + 3")
   }
 }
\ No newline at end of file
diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check
index 1a7a13e..b745105 100644
--- a/test/files/neg/main1.check
+++ b/test/files/neg/main1.check
@@ -1,26 +1,28 @@
-main1.scala:3: error: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program.
+main1.scala:3: warning: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program.
   Reason: companion is a trait, which means no static forwarder can be generated.
 
   object Foo {  // companion is trait
          ^
-main1.scala:10: error: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program.
+main1.scala:10: warning: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program.
   Reason: companion contains its own main method, which means no static forwarder can be generated.
 
   object Foo {  // companion has its own main
          ^
-main1.scala:22: error: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program.
+main1.scala:22: warning: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program.
   Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
 
   object Foo {  // Companion contains main, but not an interfering main.
          ^
-main1.scala:31: error: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program.
+main1.scala:31: warning: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program.
   Reason: companion contains its own main method, which means no static forwarder can be generated.
 
   object Foo extends Foo {  // Inherits main from the class
          ^
-main1.scala:39: error: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program.
+main1.scala:39: warning: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program.
   Reason: companion contains its own main method, which means no static forwarder can be generated.
 
   object Foo extends Foo {  // Overrides main from the class
          ^
-5 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
index d7dfacf..afb4db6 100644
--- a/test/files/neg/migration28.check
+++ b/test/files/neg/migration28.check
@@ -1,5 +1,7 @@
-migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+migration28.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
 The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
   List(1,2,3,4,5).scanRight(0)(_+_)
                   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/migration28.scala b/test/files/neg/migration28.scala
index ba73eea..facc9b3 100644
--- a/test/files/neg/migration28.scala
+++ b/test/files/neg/migration28.scala
@@ -1,9 +1,9 @@
 object Test {
   import scala.collection.mutable._
-  
+
   List(1,2,3,4,5).scanRight(0)(_+_)
-  
+
   def main(args: Array[String]): Unit = {
-    
+
   }
 }
diff --git a/test/files/neg/missing-param-type-tuple.check b/test/files/neg/missing-param-type-tuple.check
new file mode 100644
index 0000000..3a4258f
--- /dev/null
+++ b/test/files/neg/missing-param-type-tuple.check
@@ -0,0 +1,31 @@
+missing-param-type-tuple.scala:3: error: missing parameter type
+Note: The expected type requires a one-argument function accepting a 2-Tuple.
+      Consider a pattern matching anonymous function, `{ case (a, b) =>  ... }`
+  val x: ((Int, Int)) => Int = (a, b) => 0
+                                ^
+missing-param-type-tuple.scala:3: error: missing parameter type
+  val x: ((Int, Int)) => Int = (a, b) => 0
+                                   ^
+missing-param-type-tuple.scala:5: error: missing parameter type
+Note: The expected type requires a one-argument function accepting a 3-Tuple.
+      Consider a pattern matching anonymous function, `{ case (param1, ..., param3) =>  ... }`
+  val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
+                                     ^
+missing-param-type-tuple.scala:5: error: missing parameter type
+  val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
+                                        ^
+missing-param-type-tuple.scala:5: error: missing parameter type
+  val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
+                                           ^
+missing-param-type-tuple.scala:7: error: missing parameter type
+Note: The expected type requires a one-argument function accepting a 3-Tuple.
+      Consider a pattern matching anonymous function, `{ case (param1, ..., param3) =>  ... }`
+  val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
+                                     ^
+missing-param-type-tuple.scala:7: error: missing parameter type
+  val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
+                                        ^
+missing-param-type-tuple.scala:7: error: missing parameter type
+  val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
+                                                                 ^
+8 errors found
diff --git a/test/files/neg/missing-param-type-tuple.scala b/test/files/neg/missing-param-type-tuple.scala
new file mode 100644
index 0000000..72c0c82
--- /dev/null
+++ b/test/files/neg/missing-param-type-tuple.scala
@@ -0,0 +1,8 @@
+class C {
+
+  val x: ((Int, Int)) => Int = (a, b) => 0
+
+  val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
+
+  val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
+}
diff --git a/test/files/neg/name-lookup-stable.check b/test/files/neg/name-lookup-stable.check
new file mode 100644
index 0000000..751df95
--- /dev/null
+++ b/test/files/neg/name-lookup-stable.check
@@ -0,0 +1,11 @@
+name-lookup-stable.scala:15: error: reference to PrimaryKey is ambiguous;
+it is both defined in class A and imported subsequently by
+import ColumnOption._
+    (null: Any) match { case PrimaryKey => }
+                             ^
+name-lookup-stable.scala:17: error: reference to PrimaryKey is ambiguous;
+it is both defined in class A and imported subsequently by
+import ColumnOption._
+    PrimaryKey // was already ambigious in 2.10.3
+    ^
+two errors found
diff --git a/test/files/neg/name-lookup-stable.scala b/test/files/neg/name-lookup-stable.scala
new file mode 100644
index 0000000..0d862f0
--- /dev/null
+++ b/test/files/neg/name-lookup-stable.scala
@@ -0,0 +1,20 @@
+// This used to compile under 2.10.3 but the ambiguity is now noticed
+// in 2.11.x (after a70c8219). I think the new behaviour is correct;
+// we shouldn't discard names based on "expected stability" before
+// evaluating ambiguity.
+object ColumnOption {
+  object PrimaryKey
+}
+
+class A {
+  def PrimaryKey: Any = ???
+
+  {
+    import ColumnOption._
+
+    (null: Any) match { case PrimaryKey => }
+
+    PrimaryKey // was already ambigious in 2.10.3
+  }
+}
+
diff --git a/test/files/neg/names-defaults-neg-ref.check b/test/files/neg/names-defaults-neg-ref.check
index 00052c7..61d66fd 100644
--- a/test/files/neg/names-defaults-neg-ref.check
+++ b/test/files/neg/names-defaults-neg-ref.check
@@ -1,4 +1,4 @@
-names-defaults-neg-ref.scala:3: error: in anonymous class $anon, multiple overloaded alternatives of method f define default arguments.
+names-defaults-neg-ref.scala:3: error: in <$anon: A2235 with B2235>, multiple overloaded alternatives of method f define default arguments.
 The members with defaults are defined in trait B2235 and trait A2235.
   new A2235 with B2235
       ^
diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check
index e1085ac..0f4edef 100644
--- a/test/files/neg/names-defaults-neg-warn.check
+++ b/test/files/neg/names-defaults-neg-warn.check
@@ -1,7 +1,9 @@
-names-defaults-neg-warn.scala:11: error: the parameter name s has been deprecated. Use x instead.
+names-defaults-neg-warn.scala:11: warning: the parameter name s has been deprecated. Use x instead.
   deprNam2.f(s = "dlfkj")
                ^
-names-defaults-neg-warn.scala:12: error: the parameter name x has been deprecated. Use s instead.
+names-defaults-neg-warn.scala:12: warning: the parameter name x has been deprecated. Use s instead.
   deprNam2.g(x = "dlkjf")
                ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index ea7c323..20ddd55 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -7,6 +7,11 @@ names-defaults-neg.scala:5: error: type mismatch;
  required: Int
   test1(b = 2, a = "#")
                    ^
+names-defaults-neg.scala:5: error: type mismatch;
+ found   : Int(2)
+ required: String
+  test1(b = 2, a = "#")
+            ^
 names-defaults-neg.scala:8: error: positional after named argument.
   test1(b = "(*", 23)
                   ^
@@ -83,7 +88,7 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T
  --- because ---
 argument expression's type is not compatible with formal parameter type;
  found   : List[Int]
- required: ?T
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
 Error occurred in an application involving default arguments.
   test4()
   ^
@@ -100,7 +105,7 @@ Error occurred in an application involving default arguments.
   ^
 names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
     object C extends C()
-             ^
+                     ^
 names-defaults-neg.scala:90: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not).
   def deprNam1(x: Int, @deprecatedName('x) y: String) = 0
                                            ^
@@ -122,12 +127,24 @@ names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both
 names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1)
   val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
                                          ^
+names-defaults-neg.scala:134: error: not found: value a
+  val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
+                                     ^
+names-defaults-neg.scala:134: error: not found: value get
+  val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
+                                                ^
 names-defaults-neg.scala:135: error: parameter 'a' is already specified at parameter position 1
   val taf3 = testAnnFun(b = _: String, a = get(8))
                                          ^
-names-defaults-neg.scala:136: error: wrong number of parameters; expected = 2
+names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$3) => testAnnFun(x$3, ((x$4) => b = x$4)))
   val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
-                                              ^
+                                               ^
+names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$4) => b = x$4)
+  val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
+                                                      ^
+names-defaults-neg.scala:136: error: not found: value b
+  val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
+                                                  ^
 names-defaults-neg.scala:144: error: variable definition needs type because 'x' is used as a named argument in its body.
   def t3 { var x = t.f(x = 1) }
                ^
@@ -165,4 +182,4 @@ names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a m
   class u18 { var x: Int = u.f(x = 1) }
                                  ^
 four warnings found
-41 errors found
+46 errors found
diff --git a/test/files/neg/nested-annotation.check b/test/files/neg/nested-annotation.check
new file mode 100644
index 0000000..ca26394
--- /dev/null
+++ b/test/files/neg/nested-annotation.check
@@ -0,0 +1,10 @@
+nested-annotation.scala:3: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation
+      ^
+nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline
+  @ComplexAnnotation(new inline) def bippy(): Int = 1
+                     ^
+one warning found
+one error found
diff --git a/test/files/neg/nested-annotation.scala b/test/files/neg/nested-annotation.scala
new file mode 100644
index 0000000..35c0cd3
--- /dev/null
+++ b/test/files/neg/nested-annotation.scala
@@ -0,0 +1,9 @@
+import annotation._
+
+class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation
+
+class A {
+  // It's hard to induce this error because @ComplexAnnotation(@inline) is a parse
+  // error so it never gets out of the parser, but:
+  @ComplexAnnotation(new inline) def bippy(): Int = 1
+}
diff --git a/test/files/neg/nested-fn-print.scala b/test/files/neg/nested-fn-print.scala
index 9a4bd16..c599a23 100644
--- a/test/files/neg/nested-fn-print.scala
+++ b/test/files/neg/nested-fn-print.scala
@@ -2,7 +2,7 @@ object Test {
   var x1: Int => Float => Double = _
   var x2: (Int => Float) => Double = _
   var x3: Int => Double
-  
+
   def main(args: Array[String]): Unit = {
     x1 = "a"
     x2 = "b"
diff --git a/test/files/neg/newpat_unreachable.check b/test/files/neg/newpat_unreachable.check
index 08453ca..4463e2f 100644
--- a/test/files/neg/newpat_unreachable.check
+++ b/test/files/neg/newpat_unreachable.check
@@ -1,27 +1,35 @@
-newpat_unreachable.scala:6: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+newpat_unreachable.scala:6: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
 If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
       case b => println("matched b")
            ^
-newpat_unreachable.scala:7: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:7: warning: unreachable code due to variable pattern 'b' on line 6
 If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
       case c => println("matched c")
                        ^
-newpat_unreachable.scala:8: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:8: warning: unreachable code due to variable pattern 'b' on line 6
 If you intended to match against value d in class A, you must use backticks, like: case `d` =>
       case d => println("matched d")
                        ^
-newpat_unreachable.scala:9: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:9: warning: unreachable code due to variable pattern 'b' on line 6
       case _ => println("matched neither")
                        ^
-newpat_unreachable.scala:22: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+newpat_unreachable.scala:7: warning: unreachable code
+      case c => println("matched c")
+                       ^
+newpat_unreachable.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
 If you intended to match against parameter b of method g, you must use backticks, like: case `b` =>
           case b => 1
                ^
-newpat_unreachable.scala:23: error: unreachable code due to variable pattern 'b' on line 22
+newpat_unreachable.scala:23: warning: unreachable code due to variable pattern 'b' on line 22
 If you intended to match against parameter c of method h, you must use backticks, like: case `c` =>
           case c => 2
                     ^
-newpat_unreachable.scala:24: error: unreachable code due to variable pattern 'b' on line 22
+newpat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 22
           case _ => 3
                     ^
-7 errors found
+newpat_unreachable.scala:23: warning: unreachable code
+          case c => 2
+                    ^
+error: No warnings can be incurred under -Xfatal-warnings.
+9 warnings found
+one error found
diff --git a/test/files/neg/no-implicit-to-anyref-any-val.check b/test/files/neg/no-implicit-to-anyref-any-val.check
new file mode 100644
index 0000000..5953e1b
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref-any-val.check
@@ -0,0 +1,34 @@
+no-implicit-to-anyref-any-val.scala:11: error: the result type of an implicit conversion must be more specific than AnyRef
+    1: AnyRef
+    ^
+no-implicit-to-anyref-any-val.scala:17: error: type mismatch;
+ found   : Any
+ required: AnyRef
+    (null: Any): AnyRef
+         ^
+no-implicit-to-anyref-any-val.scala:21: error: type mismatch;
+ found   : AnyVal
+ required: AnyRef
+    (0: AnyVal): AnyRef
+      ^
+no-implicit-to-anyref-any-val.scala:27: error: type mismatch;
+ found   : Test.AV
+ required: AnyRef
+Note that AV extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+    new AV(0): AnyRef
+    ^
+no-implicit-to-anyref-any-val.scala:30: error: the result type of an implicit conversion must be more specific than AnyVal
+  "": AnyVal
+  ^
+no-implicit-to-anyref-any-val.scala:32: error: type mismatch;
+ found   : Object
+ required: AnyVal
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method ArrowAssoc in object Predef of type [A](self: A)ArrowAssoc[A]
+ and method Ensuring in object Predef of type [A](self: A)Ensuring[A]
+ are possible conversion functions from Object to AnyVal
+  new Object() : AnyVal
+  ^
+6 errors found
diff --git a/test/files/neg/no-implicit-to-anyref-any-val.scala b/test/files/neg/no-implicit-to-anyref-any-val.scala
new file mode 100644
index 0000000..f5daf54
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref-any-val.scala
@@ -0,0 +1,33 @@
+// Checks that the state of standard implicits in Predef._ and scala._
+// doesn't allow us to unambiguously and implicitly convert AnyVal
+// and subtypes to AnyRef.
+//
+// In the days before value classes, this was precariously held be
+// the competing implicits Any => StringAdd and Any => StringFormat.
+// Since then, these have both become value classes, but seeing as
+// this happened simultaneously, we're still okay.
+object Test {
+  locally {
+    1: AnyRef
+  }
+
+  locally {
+    // before this test case was added and ContextErrors was tweaked, this
+    // emitted: "Note that Any extends Any, not AnyRef."
+    (null: Any): AnyRef
+  }
+
+  locally {
+    (0: AnyVal): AnyRef
+  }
+
+  class AV(val a: Int) extends AnyVal
+
+  locally {
+    new AV(0): AnyRef
+  }
+
+  "": AnyVal
+
+  new Object() : AnyVal
+}
diff --git a/test/files/neg/no-implicit-to-anyref.check b/test/files/neg/no-implicit-to-anyref.check
deleted file mode 100644
index d94b57a..0000000
--- a/test/files/neg/no-implicit-to-anyref.check
+++ /dev/null
@@ -1,28 +0,0 @@
-no-implicit-to-anyref.scala:11: error: type mismatch;
- found   : Int(1)
- required: AnyRef
-Note: an implicit exists from scala.Int => java.lang.Integer, but
-methods inherited from Object are rendered ambiguous.  This is to avoid
-a blanket implicit which would convert any scala.Int to any AnyRef.
-You may wish to use a type ascription: `x: java.lang.Integer`.
-    1: AnyRef
-    ^
-no-implicit-to-anyref.scala:17: error: type mismatch;
- found   : Any
- required: AnyRef
-    (null: Any): AnyRef
-         ^
-no-implicit-to-anyref.scala:21: error: type mismatch;
- found   : AnyVal
- required: AnyRef
-    (0: AnyVal): AnyRef
-      ^
-no-implicit-to-anyref.scala:27: error: type mismatch;
- found   : Test.AV
- required: AnyRef
-Note that AV extends Any, not AnyRef.
-Such types can participate in value classes, but instances
-cannot appear in singleton types or in reference comparisons.
-    new AV(0): AnyRef
-    ^
-four errors found
diff --git a/test/files/neg/no-implicit-to-anyref.scala b/test/files/neg/no-implicit-to-anyref.scala
deleted file mode 100644
index 3e3d373..0000000
--- a/test/files/neg/no-implicit-to-anyref.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-// Checks that the state of standard implicits in Predef._ and scala._
-// doesn't allow us to unambiguously and implicitly convert AnyVal
-// and subtypes to AnyRef.
-//
-// In the days before value classes, this was precariously held be
-// the competing implicits Any => StringAdd and Any => StringFormat.
-// Since then, these have both become value classes, but seeing as
-// this happened simultaneously, we're still okay.
-object Test {
-  locally {
-    1: AnyRef
-  }
-
-  locally {
-    // before this test case was added and ContextErrors was tweaked, this
-    // emitted: "Note that Any extends Any, not AnyRef."
-    (null: Any): AnyRef
-  }
-
-  locally {
-    (0: AnyVal): AnyRef
-  }
-
-  class AV(val a: Int) extends AnyVal
-
-  locally {
-    new AV(0): AnyRef
-  }
-}
diff --git a/test/files/neg/nonlocal-warning.check b/test/files/neg/nonlocal-warning.check
new file mode 100644
index 0000000..5202df6
--- /dev/null
+++ b/test/files/neg/nonlocal-warning.check
@@ -0,0 +1,9 @@
+nonlocal-warning.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+    catch { case x => 11 }
+                 ^
+nonlocal-warning.scala:2: warning: catch block may intercept non-local return from method foo
+  def foo(l: List[Int]): Int = {
+                               ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/pending/pos/t4649.flags b/test/files/neg/nonlocal-warning.flags
similarity index 100%
copy from test/pending/pos/t4649.flags
copy to test/files/neg/nonlocal-warning.flags
diff --git a/test/files/neg/nonlocal-warning.scala b/test/files/neg/nonlocal-warning.scala
new file mode 100644
index 0000000..f908a86
--- /dev/null
+++ b/test/files/neg/nonlocal-warning.scala
@@ -0,0 +1,18 @@
+class Foo {
+  def foo(l: List[Int]): Int = {
+    try l foreach { _ => return 5 }
+    catch { case x => 11 }
+    22
+  }
+
+  val pf: PartialFunction[Throwable, Unit] = {
+    case x if false => ()
+  }
+
+  def bar(l: List[Int]): Int = {
+    try l foreach { _ => return 5 }
+    catch pf
+    finally println()
+    22
+  }
+}
diff --git a/test/files/neg/not-a-legal-formal-parameter-tuple.check b/test/files/neg/not-a-legal-formal-parameter-tuple.check
new file mode 100644
index 0000000..2b906b8
--- /dev/null
+++ b/test/files/neg/not-a-legal-formal-parameter-tuple.check
@@ -0,0 +1,19 @@
+not-a-legal-formal-parameter-tuple.scala:2: error: not a legal formal parameter.
+Note: Tuples cannot be directly destructured in method or function parameters.
+      Either create a single parameter accepting the Tuple2,
+      or consider a pattern matching anonymous function: `{ case (a, b) => ... }
+  val x: ((Int, Int) => Int) = (((a, b)) => a)
+                                 ^
+not-a-legal-formal-parameter-tuple.scala:3: error: not a legal formal parameter.
+Note: Tuples cannot be directly destructured in method or function parameters.
+      Either create a single parameter accepting the Tuple2,
+      or consider a pattern matching anonymous function: `{ case (param1, param2) => ... }
+  val y: ((Int, Int, Int) => Int) = (((a, !!)) => a)
+                                      ^
+not-a-legal-formal-parameter-tuple.scala:4: error: not a legal formal parameter.
+Note: Tuples cannot be directly destructured in method or function parameters.
+      Either create a single parameter accepting the Tuple3,
+      or consider a pattern matching anonymous function: `{ case (param1, ..., param3) => ... }
+  val z: ((Int, Int, Int) => Int) = (((a, NotAPatternVariableName, c)) => a)
+                                      ^
+three errors found
diff --git a/test/files/neg/not-a-legal-formal-parameter-tuple.scala b/test/files/neg/not-a-legal-formal-parameter-tuple.scala
new file mode 100644
index 0000000..c7a1355
--- /dev/null
+++ b/test/files/neg/not-a-legal-formal-parameter-tuple.scala
@@ -0,0 +1,5 @@
+class C {
+  val x: ((Int, Int) => Int) = (((a, b)) => a)
+  val y: ((Int, Int, Int) => Int) = (((a, !!)) => a)
+  val z: ((Int, Int, Int) => Int) = (((a, NotAPatternVariableName, c)) => a)
+}
diff --git a/test/files/neg/null-unsoundness.scala b/test/files/neg/null-unsoundness.scala
index 3f7e42f..0f8ed5e 100644
--- a/test/files/neg/null-unsoundness.scala
+++ b/test/files/neg/null-unsoundness.scala
@@ -12,4 +12,3 @@ object Test extends A with App {
   type A = C
   y = 42
 }
-  
diff --git a/test/files/neg/nullary-override.check b/test/files/neg/nullary-override.check
index 6b2ded2..f032f4a 100644
--- a/test/files/neg/nullary-override.check
+++ b/test/files/neg/nullary-override.check
@@ -1,4 +1,6 @@
-nullary-override.scala:2: error: non-nullary method overrides nullary method
+nullary-override.scala:2: warning: non-nullary method overrides nullary method
 class B extends A { override def x(): Int = 4 }
                                  ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/object-not-a-value.scala b/test/files/neg/object-not-a-value.scala
index 2f894a3..207b271 100644
--- a/test/files/neg/object-not-a-value.scala
+++ b/test/files/neg/object-not-a-value.scala
@@ -1,6 +1,6 @@
 object Test {
   import java.util._
-  
+
   def main(args: Array[String]): Unit = {
     List(1) map (_ + 1)
   }
diff --git a/test/files/neg/overload-msg.scala b/test/files/neg/overload-msg.scala
index 8967222..8715c15 100644
--- a/test/files/neg/overload-msg.scala
+++ b/test/files/neg/overload-msg.scala
@@ -1,4 +1,4 @@
 // type parameter shadows actual type, massive overload error confuses.
-class A(x: Int) { 
+class A(x: Int) {
   def f[Int](y: Int) = x + y
 }
diff --git a/test/files/neg/overloaded-implicit.check b/test/files/neg/overloaded-implicit.check
index bdbe6a8..ca08707 100644
--- a/test/files/neg/overloaded-implicit.check
+++ b/test/files/neg/overloaded-implicit.check
@@ -1,7 +1,9 @@
-overloaded-implicit.scala:2: error: parameterized overloaded implicit methods are not visible as view bounds
+overloaded-implicit.scala:2: warning: parameterized overloaded implicit methods are not visible as view bounds
   implicit def imp1[T](x: List[T]): Map[T, T] = Map()
                ^
-overloaded-implicit.scala:3: error: parameterized overloaded implicit methods are not visible as view bounds
+overloaded-implicit.scala:3: warning: parameterized overloaded implicit methods are not visible as view bounds
   implicit def imp1[T](x: Set[T]): Map[T, T] = Map()
                ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/overloaded-implicit.flags b/test/files/neg/overloaded-implicit.flags
index 7949c2a..9c1e74e 100644
--- a/test/files/neg/overloaded-implicit.flags
+++ b/test/files/neg/overloaded-implicit.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-Xlint -Xfatal-warnings -Xdev
diff --git a/test/files/neg/overloaded-implicit.scala b/test/files/neg/overloaded-implicit.scala
index 9ab66cf..68b1cea 100644
--- a/test/files/neg/overloaded-implicit.scala
+++ b/test/files/neg/overloaded-implicit.scala
@@ -1,15 +1,15 @@
 object Test {
   implicit def imp1[T](x: List[T]): Map[T, T] = Map()
   implicit def imp1[T](x: Set[T]): Map[T, T] = Map()
-  
+
   def f[T <% Map[Int, Int]](x: T): Double = 1.0d
 
   // not parameterized, no warning
-  implicit def imp2(x: List[Int]): String = "a" 
+  implicit def imp2(x: List[Int]): String = "a"
   implicit def imp2(x: Set[Int]): String = "b"
-  
+
   def g[T <% String](x: T): Double = 2.0d
-  
+
   def main(args: Array[String]): Unit = {
     // println(f(List(1)))
     println(g(List(1)))
diff --git a/test/files/neg/override-object-no.scala b/test/files/neg/override-object-no.scala
index 45961e4..745cdb2 100644
--- a/test/files/neg/override-object-no.scala
+++ b/test/files/neg/override-object-no.scala
@@ -25,7 +25,7 @@ package case1 {
   trait Quux4 extends Quux3 { override object Bar  } // err
 }
 
-// type parameter as-seen-from business 
+// type parameter as-seen-from business
 package case2 {
   // invariance (see pos for the covariant case)
   class Bar[T]
diff --git a/test/files/neg/package-ob-case.check b/test/files/neg/package-ob-case.check
deleted file mode 100644
index e6b2f85..0000000
--- a/test/files/neg/package-ob-case.check
+++ /dev/null
@@ -1,5 +0,0 @@
-package-ob-case.scala:3: error: it is not recommended to define classes/objects inside of package objects.
-If possible, define class X in package foo instead.
-    case class X(z: Int) { }
-               ^
-one error found
diff --git a/test/files/neg/package-ob-case.flags b/test/files/neg/package-ob-case.flags
deleted file mode 100644
index 6c1dd10..0000000
--- a/test/files/neg/package-ob-case.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check
index c5706b7..374ee4e 100644
--- a/test/files/neg/pat_unreachable.check
+++ b/test/files/neg/pat_unreachable.check
@@ -1,13 +1,17 @@
-pat_unreachable.scala:5: error: unreachable code
-    case Seq(x, y, z, w) => List(z,w) // redundant!
-                                ^
-pat_unreachable.scala:9: error: unreachable code
-    case Seq(x, y) => List(x, y)
-                          ^
-pat_unreachable.scala:23: error: unreachable code
+pat_unreachable.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
+    case b => println("matched b")
+         ^
+pat_unreachable.scala:23: warning: unreachable code due to variable pattern 'b' on line 22
+If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
     case c => println("matched c")
                      ^
-pat_unreachable.scala:24: error: unreachable code
+pat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 22
     case _ => println("matched neither")
                      ^
-four errors found
+pat_unreachable.scala:23: warning: unreachable code
+    case c => println("matched c")
+                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/pat_unreachable.flags b/test/files/neg/pat_unreachable.flags
index cb8324a..85d8eb2 100644
--- a/test/files/neg/pat_unreachable.flags
+++ b/test/files/neg/pat_unreachable.flags
@@ -1 +1 @@
--Xoldpatmat
\ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/patmat-classtag-compound.check b/test/files/neg/patmat-classtag-compound.check
new file mode 100644
index 0000000..8a54c93
--- /dev/null
+++ b/test/files/neg/patmat-classtag-compound.check
@@ -0,0 +1,6 @@
+patmat-classtag-compound.scala:12: warning: abstract type pattern A is unchecked since it is eliminated by erasure
+    case b: A with Bar => true
+            ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/patmat-classtag-compound.flags b/test/files/neg/patmat-classtag-compound.flags
new file mode 100644
index 0000000..144ddac
--- /dev/null
+++ b/test/files/neg/patmat-classtag-compound.flags
@@ -0,0 +1 @@
+-unchecked -Xfatal-warnings
diff --git a/test/files/neg/patmat-classtag-compound.scala b/test/files/neg/patmat-classtag-compound.scala
new file mode 100644
index 0000000..e2d0df0
--- /dev/null
+++ b/test/files/neg/patmat-classtag-compound.scala
@@ -0,0 +1,17 @@
+object Test extends App{
+  trait Bar
+  trait Foo
+  // Failed to give an unchecked warning pre: https://github.com/scala/scala/pull/2848
+  //
+  // Features interacting:
+  //   - implicit class tags to enable type patterns on abstract types
+  //   - type tests on compound types.
+  //
+  // We could try make these work together, but an unchecked warning is okay for now.
+  def x[A: reflect.ClassTag](a: Any): Boolean = a match{
+    case b: A with Bar => true
+    case _ => false
+  }
+  println(x[Foo](new Bar{}))
+  println(x[String](""))
+}
diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check
index 721217c..fedac3b 100644
--- a/test/files/neg/patmat-type-check.check
+++ b/test/files/neg/patmat-type-check.check
@@ -1,12 +1,27 @@
 patmat-type-check.scala:11: warning: fruitless type test: a value of type Test.Bop4[T] cannot also be a Seq[A]
   def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
                                             ^
+patmat-type-check.scala:11: error: pattern type is incompatible with expected type;
+ found   : Seq[A]
+ required: Test.Bop4[T]
+  def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
+                                            ^
 patmat-type-check.scala:15: warning: fruitless type test: a value of type Test.Bop5[_$1,T1,T2] cannot also be a Seq[A]
   def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
                                                          ^
+patmat-type-check.scala:15: error: pattern type is incompatible with expected type;
+ found   : Seq[A]
+ required: Test.Bop5[_$1,T1,T2] where type _$1
+  def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
+                                                         ^
 patmat-type-check.scala:19: warning: fruitless type test: a value of type Test.Bop3[T] cannot also be a Seq[A]
   def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
                                             ^
+patmat-type-check.scala:19: error: pattern type is incompatible with expected type;
+ found   : Seq[A]
+ required: Test.Bop3[T]
+  def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
+                                            ^
 patmat-type-check.scala:22: error: scrutinee is incompatible with pattern type;
  found   : Seq[A]
  required: String
@@ -28,4 +43,4 @@ patmat-type-check.scala:30: error: scrutinee is incompatible with pattern type;
   def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail
                                                ^
 three warnings found
-four errors found
+7 errors found
diff --git a/test/files/neg/patmat-type-check.scala b/test/files/neg/patmat-type-check.scala
index cc35089..26d0409 100644
--- a/test/files/neg/patmat-type-check.scala
+++ b/test/files/neg/patmat-type-check.scala
@@ -1,15 +1,15 @@
 object Test
 {
   def s1 = "bob".toList  match { case Seq('b', 'o', 'b') => true }  // list ok
-  
+
   // not final, allowed
-  class Bop 
+  class Bop
   def s2(x: Bop) = x match { case Seq('b', 'o', 'b') => true }
-  
+
   // covariance, allowed
   final class Bop4[+T]
   def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
-  
+
   // contravariance, allowed
   final class Bop5[T, U, -V]
   def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
@@ -17,7 +17,7 @@ object Test
   // free type parameter, allowed
   final class Bop3[T]
   def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
-  
+
   // String and Array are final/invariant, disallowed
   def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
   def f2 = "bob".toArray match { case Seq('b', 'o', 'b') => true } // fail
@@ -25,7 +25,7 @@ object Test
   // final, no type parameters, should be disallowed
   final class Bop2
   def f3(x: Bop2) = x match { case Seq('b', 'o', 'b') => true } // fail
-  
+
   // final, invariant type parameter, should be disallowed
   def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail
 }
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 4556e66..2dad608 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -1,40 +1,42 @@
-patmatexhaust.scala:7: error: match may not be exhaustive.
+patmatexhaust.scala:7: warning: match may not be exhaustive.
 It would fail on the following input: Baz
     def ma1(x:Foo) = x match {
                      ^
-patmatexhaust.scala:11: error: match may not be exhaustive.
+patmatexhaust.scala:11: warning: match may not be exhaustive.
 It would fail on the following input: Bar(_)
     def ma2(x:Foo) = x match {
                      ^
-patmatexhaust.scala:23: error: match may not be exhaustive.
+patmatexhaust.scala:23: warning: match may not be exhaustive.
 It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult())
     def ma3(x:Mult) = (x,x) match { // not exhaustive
                       ^
-patmatexhaust.scala:49: error: match may not be exhaustive.
+patmatexhaust.scala:49: warning: match may not be exhaustive.
 It would fail on the following inputs: Gp(), Gu
     def ma4(x:Deep) = x match { // missing cases: Gu, Gp
                       ^
-patmatexhaust.scala:55: error: unreachable code
-      case _ if 1 == 0 => 
+patmatexhaust.scala:55: warning: unreachable code
+      case _ if 1 == 0 =>
                        ^
-patmatexhaust.scala:53: error: match may not be exhaustive.
+patmatexhaust.scala:53: warning: match may not be exhaustive.
 It would fail on the following input: Gp()
     def ma5(x:Deep) = x match {
                       ^
-patmatexhaust.scala:75: error: match may not be exhaustive.
+patmatexhaust.scala:75: warning: match may not be exhaustive.
 It would fail on the following input: B()
   def ma9(x: B) = x match {
                   ^
-patmatexhaust.scala:100: error: match may not be exhaustive.
+patmatexhaust.scala:100: warning: match may not be exhaustive.
 It would fail on the following input: C1()
     def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
                      ^
-patmatexhaust.scala:114: error: match may not be exhaustive.
+patmatexhaust.scala:114: warning: match may not be exhaustive.
 It would fail on the following inputs: D1, D2()
     def ma10(x: C) = x match {  // not exhaustive: C1 has subclasses.
                      ^
-patmatexhaust.scala:126: error: match may not be exhaustive.
+patmatexhaust.scala:126: warning: match may not be exhaustive.
 It would fail on the following input: C1()
     def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
                      ^
-10 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+10 warnings found
+one error found
diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala
index ceb960e..f937197 100644
--- a/test/files/neg/patmatexhaust.scala
+++ b/test/files/neg/patmatexhaust.scala
@@ -22,14 +22,14 @@ class TestSealedExhaustive { // compile only
 
     def ma3(x:Mult) = (x,x) match { // not exhaustive
       case (Kult(_), Qult())    => // Kult missing
-      //case Pair(Kult(_), Kult(_))    => 
+      //case (Kult(_), Kult(_))    =>
       case (Qult(), Kult(_))    => // Qult missing
-      //case Pair(Qult(), Qult())    => 
+      //case (Qult(), Qult())    =>
     }
 
     def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked!
-      case (Kult(_), Qult())    => 
-      case (Qult(), Kult(_))    => 
+      case (Kult(_), Qult())    =>
+      case (Qult(), Kult(_))    =>
     }
 
     sealed abstract class Deep
@@ -37,22 +37,22 @@ class TestSealedExhaustive { // compile only
     case object Ga extends Deep
     sealed class Gp extends Deep
     case object Gu extends Gp
-   
+
     def zma3(x:Deep) = x match { // exhaustive!
       case _ =>
     }
     def zma4(x:Deep) = x match { // exhaustive!
-      case Ga => 
+      case Ga =>
       case _ =>
     }
 
     def ma4(x:Deep) = x match { // missing cases: Gu, Gp
-      case Ga => 
+      case Ga =>
     }
 
     def ma5(x:Deep) = x match {
       case Gu =>
-      case _ if 1 == 0 => 
+      case _ if 1 == 0 =>
       case Ga =>
     }
 
@@ -65,7 +65,7 @@ class TestSealedExhaustive { // compile only
     case 1::2::Nil =>
       case _ =>
   }
-  
+
   sealed class B
   case class B1() extends B
   case object B2 extends B
@@ -76,7 +76,7 @@ class TestSealedExhaustive { // compile only
     case B1() => true       // missing B, which is not abstract so must be included
     case B2   => true
   }
-  
+
   object ob1 {
     sealed abstract class C
     sealed abstract class C1 extends C
@@ -89,7 +89,7 @@ class TestSealedExhaustive { // compile only
       case C2 | C4  => true
     }
   }
-  
+
   object ob2 {
     sealed abstract class C
     abstract class C1 extends C
diff --git a/test/files/neg/patternalts.scala b/test/files/neg/patternalts.scala
index 56b682b..539df43 100644
--- a/test/files/neg/patternalts.scala
+++ b/test/files/neg/patternalts.scala
@@ -1,5 +1,5 @@
 object Test {
   List(1) match {
     case List(x) | List() => Console.println(x)
-  }  
+  }
 }
diff --git a/test/files/neg/permanent-blindness.check b/test/files/neg/permanent-blindness.check
index 18b4543..cdde201 100644
--- a/test/files/neg/permanent-blindness.check
+++ b/test/files/neg/permanent-blindness.check
@@ -1,10 +1,12 @@
-permanent-blindness.scala:10: error: imported `Bippy' is permanently hidden by definition of class Bippy in package bar
+permanent-blindness.scala:10: warning: imported `Bippy' is permanently hidden by definition of class Bippy in package bar
   import foo.{ Bippy, Bop, Dingus }
                ^
-permanent-blindness.scala:10: error: imported `Bop' is permanently hidden by definition of object Bop in package bar
+permanent-blindness.scala:10: warning: imported `Bop' is permanently hidden by definition of object Bop in package bar
   import foo.{ Bippy, Bop, Dingus }
                       ^
-permanent-blindness.scala:10: error: imported `Dingus' is permanently hidden by definition of object Dingus in package bar
+permanent-blindness.scala:10: warning: imported `Dingus' is permanently hidden by definition of object Dingus in package bar
   import foo.{ Bippy, Bop, Dingus }
                            ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check
index f137158..f44d7db 100644
--- a/test/files/neg/protected-constructors.check
+++ b/test/files/neg/protected-constructors.check
@@ -3,23 +3,20 @@ protected-constructors.scala:17: error: too many arguments for constructor Foo1:
                ^
 protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P
  Access to protected constructor Foo2 not permitted because
- enclosing object P in package hungus is not a subclass of 
+ enclosing object P in package hungus is not a subclass of
  class Foo2 in package dingus where target is defined
     val foo2 = new Foo2("abc")
                ^
 protected-constructors.scala:19: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
  Access to protected class Foo3 not permitted because
- enclosing object P in package hungus is not a subclass of 
+ enclosing object P in package hungus is not a subclass of
  object Ding in package dingus where target is defined
     val foo3 = new Ding.Foo3("abc")
                         ^
 protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
  Access to protected class Foo3 not permitted because
- enclosing object P in package hungus is not a subclass of 
+ enclosing object P in package hungus is not a subclass of
  object Ding in package dingus where target is defined
     class Bar3 extends Ding.Foo3("abc")
                             ^
-protected-constructors.scala:15: error: too many arguments for constructor Object: ()Object
-    class Bar3 extends Ding.Foo3("abc")
-               ^
-5 errors found
+four errors found
diff --git a/test/files/neg/protected-constructors.scala b/test/files/neg/protected-constructors.scala
index f2662ee..2838caf 100644
--- a/test/files/neg/protected-constructors.scala
+++ b/test/files/neg/protected-constructors.scala
@@ -8,7 +8,7 @@ package dingus {
 
 package hungus {
   import dingus._
-  
+
   object P {
     class Bar1 extends Foo1("abc")
     class Bar2 extends Foo2("abc")
diff --git a/test/files/neg/protected-static-fail.check b/test/files/neg/protected-static-fail.check
index e149bc0..9f0bc92 100644
--- a/test/files/neg/protected-static-fail.check
+++ b/test/files/neg/protected-static-fail.check
@@ -3,13 +3,13 @@ S.scala:5: error: method f in object J cannot be accessed in object bippy.J
       ^
 S.scala:6: error: method f1 in object S1 cannot be accessed in object bippy.S1
  Access to protected method f1 not permitted because
- enclosing object Test in package bippy is not a subclass of 
+ enclosing object Test in package bippy is not a subclass of
  object S1 in package bippy where target is defined
     S1.f1()
        ^
 S.scala:8: error: method f2 in class S2 cannot be accessed in bippy.S2
  Access to protected method f2 not permitted because
- enclosing object Test in package bippy is not a subclass of 
+ enclosing object Test in package bippy is not a subclass of
  class S2 in package bippy where target is defined
     x.f2()
       ^
diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check
new file mode 100644
index 0000000..9fd6ce0
--- /dev/null
+++ b/test/files/neg/quasiquotes-syntax-error-position.check
@@ -0,0 +1,47 @@
+quasiquotes-syntax-error-position.scala:5: error: '=' expected but identifier found.
+  q"def $a f"
+           ^
+quasiquotes-syntax-error-position.scala:6: error: illegal start of simple expression
+  q"$a("
+       ^
+quasiquotes-syntax-error-position.scala:7: error: '}' expected but end of quote found.
+  q"class $t { def foo = $a"
+                           ^
+quasiquotes-syntax-error-position.scala:8: error: '.' expected but unquotee found.
+  q"import $t $t"
+              ^
+quasiquotes-syntax-error-position.scala:9: error: '{' expected but end of quote found.
+  q"package p"
+             ^
+quasiquotes-syntax-error-position.scala:10: error: ';' expected but '@' found.
+  q"foo@$a"
+       ^
+quasiquotes-syntax-error-position.scala:11: error: case classes without a parameter list are not allowed;
+use either case objects or case classes with an explicit `()' as a parameter list.
+  q"case class A"
+                ^
+quasiquotes-syntax-error-position.scala:12: error: identifier expected but ']' found.
+  tq"$t => $t $t]"
+                ^
+quasiquotes-syntax-error-position.scala:13: error: end of quote expected but 'case' found.
+  cq"pattern => body ; case pattern2 =>"
+                       ^
+quasiquotes-syntax-error-position.scala:14: error: ')' expected but end of quote found.
+  pq"$a(bar"
+           ^
+quasiquotes-syntax-error-position.scala:15: error: ':' expected but ')' found.
+  q"def foo(x)"
+             ^
+quasiquotes-syntax-error-position.scala:16: error: illegal start of simple expression
+  q"$a(])"
+       ^
+quasiquotes-syntax-error-position.scala:17: error: in XML literal: '>' expected instead of '$'
+  q"foo bar <xml$a>"
+                ^
+quasiquotes-syntax-error-position.scala:19: error: ';' expected but '<:' found.
+  q"val $x: $x <: $x"
+               ^
+quasiquotes-syntax-error-position.scala:20: error: '=' expected but '.' found.
+  q"def f ( $x  ) . $x"
+                  ^
+15 errors found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.scala b/test/files/neg/quasiquotes-syntax-error-position.scala
new file mode 100644
index 0000000..823fe9a
--- /dev/null
+++ b/test/files/neg/quasiquotes-syntax-error-position.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+object test extends App {
+  val a = TermName("a")
+  val t = TypeName("t")
+  q"def $a f"
+  q"$a("
+  q"class $t { def foo = $a"
+  q"import $t $t"
+  q"package p"
+  q"foo@$a"
+  q"case class A"
+  tq"$t => $t $t]"
+  cq"pattern => body ; case pattern2 =>"
+  pq"$a(bar"
+  q"def foo(x)"
+  q"$a(])"
+  q"foo bar <xml$a>"
+  val x = q"x"
+  q"val $x: $x <: $x"
+  q"def f ( $x  ) . $x"
+}
diff --git a/test/files/neg/quasiquotes-unliftable-not-found.check b/test/files/neg/quasiquotes-unliftable-not-found.check
new file mode 100644
index 0000000..5594aa1
--- /dev/null
+++ b/test/files/neg/quasiquotes-unliftable-not-found.check
@@ -0,0 +1,4 @@
+quasiquotes-unliftable-not-found.scala:4: error: Can't find reflect.runtime.universe.Unliftable[Test.C], consider providing it
+  val q"${c: C}" = q"()"
+      ^
+one error found
diff --git a/test/files/neg/quasiquotes-unliftable-not-found.scala b/test/files/neg/quasiquotes-unliftable-not-found.scala
new file mode 100644
index 0000000..6a5efae
--- /dev/null
+++ b/test/files/neg/quasiquotes-unliftable-not-found.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  import scala.reflect.runtime.universe._
+  class C
+  val q"${c: C}" = q"()"
+}
\ No newline at end of file
diff --git a/test/files/neg/raw-types-stubs.check b/test/files/neg/raw-types-stubs.check
new file mode 100644
index 0000000..f1b26a2
--- /dev/null
+++ b/test/files/neg/raw-types-stubs.check
@@ -0,0 +1,11 @@
+S_3.scala:1: error: class Sub needs to be abstract, since:
+it has 2 unimplemented members.
+/** As seen from class Sub, the missing signatures are as follows.
+ *  For convenience, these are usable as stub implementations.
+ */
+  def raw(x$1: M_1[_ <: String]): Unit = ???
+  def raw(x$1: Any): Unit = ???
+
+class Sub extends Raw_2 { }
+      ^
+one error found
diff --git a/test/files/neg/raw-types-stubs/M_1.java b/test/files/neg/raw-types-stubs/M_1.java
new file mode 100644
index 0000000..6ea0d2e
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/M_1.java
@@ -0,0 +1,3 @@
+public class M_1<K extends String> { }
+
+
diff --git a/test/files/neg/raw-types-stubs/Raw_2.java b/test/files/neg/raw-types-stubs/Raw_2.java
new file mode 100644
index 0000000..eff7df7
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/Raw_2.java
@@ -0,0 +1,4 @@
+public abstract class Raw_2 {
+  public abstract void raw(Object list);
+  public abstract void raw(M_1 list);
+}
diff --git a/test/files/neg/raw-types-stubs/S_3.scala b/test/files/neg/raw-types-stubs/S_3.scala
new file mode 100644
index 0000000..618eedc
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/S_3.scala
@@ -0,0 +1 @@
+class Sub extends Raw_2 { }
diff --git a/test/files/neg/reflection-names-neg.check b/test/files/neg/reflection-names-neg.check
index a56a19e..f941ec8 100644
--- a/test/files/neg/reflection-names-neg.check
+++ b/test/files/neg/reflection-names-neg.check
@@ -7,4 +7,7 @@ Note that implicit conversions are not applicable because they are ambiguous:
  are possible conversion functions from String("abc") to reflect.runtime.universe.Name
   val x2 = ("abc": Name) drop 1         // error
             ^
-one error found
+reflection-names-neg.scala:5: error: value drop is not a member of reflect.runtime.universe.Name
+  val x2 = ("abc": Name) drop 1         // error
+                         ^
+two errors found
diff --git a/test/files/neg/run-gadts-strict.check b/test/files/neg/run-gadts-strict.check
new file mode 100644
index 0000000..b4d36c4
--- /dev/null
+++ b/test/files/neg/run-gadts-strict.check
@@ -0,0 +1,21 @@
+run-gadts-strict.scala:12: error: type mismatch;
+ found   : n.type (with underlying type Int)
+ required: T
+    case Lit(n)        => n
+                          ^
+run-gadts-strict.scala:13: error: type mismatch;
+ found   : Int
+ required: T
+    case Succ(u)       => eval(u) + 1
+                                  ^
+run-gadts-strict.scala:14: error: type mismatch;
+ found   : Boolean
+ required: T
+    case IsZero(u)     => eval(u) == 0
+                                  ^
+run-gadts-strict.scala:15: error: type mismatch;
+ found   : T(in class If)
+ required: T(in method eval)
+    case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
+                              ^
+four errors found
diff --git a/test/files/neg/run-gadts-strict.flags b/test/files/neg/run-gadts-strict.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/neg/run-gadts-strict.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/neg/run-gadts-strict.scala b/test/files/neg/run-gadts-strict.scala
new file mode 100644
index 0000000..041d10d
--- /dev/null
+++ b/test/files/neg/run-gadts-strict.scala
@@ -0,0 +1,18 @@
+// A copy of run/gadts.scala, which must fail under -Xstrict-inference.
+abstract class Term[T]
+case class Lit(x: Int) extends Term[Int]
+case class Succ(t: Term[Int]) extends Term[Int]
+case class IsZero(t: Term[Int]) extends Term[Boolean]
+case class If[T](c: Term[Boolean],
+                 t1: Term[T],
+                 t2: Term[T]) extends Term[T]
+
+object Test extends App {
+  def eval[T](t: Term[T]): T = t match {
+    case Lit(n)        => n
+    case Succ(u)       => eval(u) + 1
+    case IsZero(u)     => eval(u) == 0
+    case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
+  }
+  println(eval(If(IsZero(Lit(1)), Lit(41), Succ(Lit(41)))))
+}
diff --git a/test/files/neg/saito.scala b/test/files/neg/saito.scala
index bfabb4e..b4fcd59 100644
--- a/test/files/neg/saito.scala
+++ b/test/files/neg/saito.scala
@@ -2,13 +2,13 @@ class B {}
 class A { self: B =>
     def m(): B = {
         this
-    }   
+    }
 }
 
 object Exec{
     def main(args: Array[String]): Unit = {
         val a: A = new A; // should not be allowed
-        val b: B = a.m(); 
+        val b: B = a.m();
     }
 }
 
diff --git a/test/files/neg/sammy_restrictions.check b/test/files/neg/sammy_restrictions.check
new file mode 100644
index 0000000..8cc49f9
--- /dev/null
+++ b/test/files/neg/sammy_restrictions.check
@@ -0,0 +1,49 @@
+sammy_restrictions.scala:31: error: type mismatch;
+ found   : () => Int
+ required: NoAbstract
+  (() => 0)      : NoAbstract
+      ^
+sammy_restrictions.scala:32: error: type mismatch;
+ found   : Int => Int
+ required: TwoAbstract
+  ((x: Int) => 0): TwoAbstract
+            ^
+sammy_restrictions.scala:34: error: class type required but DerivedOneAbstract with OneAbstract found
+  ((x: Int) => 0): NonClassType                 // "class type required". I think we should avoid SAM translation here.
+            ^
+sammy_restrictions.scala:35: error: type mismatch;
+ found   : Int => Int
+ required: NoEmptyConstructor
+  ((x: Int) => 0): NoEmptyConstructor
+            ^
+sammy_restrictions.scala:37: error: type mismatch;
+ found   : Int => Int
+ required: OneEmptySecondaryConstructor
+  ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
+            ^
+sammy_restrictions.scala:38: error: type mismatch;
+ found   : Int => Int
+ required: MultipleConstructorLists
+  ((x: Int) => 0): MultipleConstructorLists
+            ^
+sammy_restrictions.scala:39: error: type mismatch;
+ found   : Int => Int
+ required: MultipleMethodLists
+  ((x: Int) => 0): MultipleMethodLists
+            ^
+sammy_restrictions.scala:40: error: type mismatch;
+ found   : Int => Int
+ required: ImplicitConstructorParam
+  ((x: Int) => 0): ImplicitConstructorParam
+            ^
+sammy_restrictions.scala:41: error: type mismatch;
+ found   : Int => Int
+ required: ImplicitMethodParam
+  ((x: Int) => 0): ImplicitMethodParam
+            ^
+sammy_restrictions.scala:44: error: type mismatch;
+ found   : Int => Int
+ required: PolyMethod
+  ((x: Int) => 0): PolyMethod
+            ^
+10 errors found
diff --git a/test/files/run/interpolation.flags b/test/files/neg/sammy_restrictions.flags
similarity index 100%
copy from test/files/run/interpolation.flags
copy to test/files/neg/sammy_restrictions.flags
diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala
new file mode 100644
index 0000000..5f1a04c
--- /dev/null
+++ b/test/files/neg/sammy_restrictions.scala
@@ -0,0 +1,45 @@
+class NoAbstract
+
+class TwoAbstract { def ap(a: Int): Int; def pa(a: Int): Int }
+
+class Base // check that the super class constructor isn't considered.
+class NoEmptyConstructor(a: Int) extends Base { def this(a: String) = this(0); def ap(a: Int): Int }
+
+class OneEmptyConstructor() { def this(a: Int) = this(); def ap(a: Int): Int }
+
+class OneEmptySecondaryConstructor(a: Int) { def this() = this(0); def ap(a: Int): Int }
+
+class MultipleConstructorLists()() { def ap(a: Int): Int }
+
+class MultipleMethodLists()() { def ap(a: Int)(): Int }
+
+class ImplicitConstructorParam()(implicit a: String) { def ap(a: Int): Int }
+
+class ImplicitMethodParam() { def ap(a: Int)(implicit b: String): Int }
+
+class PolyClass[T] { def ap(a: T): T }
+
+class PolyMethod { def ap[T](a: T): T }
+
+class OneAbstract { def ap(a: Any): Any }
+class DerivedOneAbstract extends OneAbstract
+
+object Test {
+  implicit val s: String = ""
+  type NonClassType = DerivedOneAbstract with OneAbstract
+
+  (() => 0)      : NoAbstract
+  ((x: Int) => 0): TwoAbstract
+  ((x: Int) => 0): DerivedOneAbstract           // okay
+  ((x: Int) => 0): NonClassType                 // "class type required". I think we should avoid SAM translation here.
+  ((x: Int) => 0): NoEmptyConstructor
+  ((x: Int) => 0): OneEmptyConstructor          // okay
+  ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
+  ((x: Int) => 0): MultipleConstructorLists
+  ((x: Int) => 0): MultipleMethodLists
+  ((x: Int) => 0): ImplicitConstructorParam
+  ((x: Int) => 0): ImplicitMethodParam
+
+  ((x: Int) => 0): PolyClass[Int]               // okay
+  ((x: Int) => 0): PolyMethod
+}
diff --git a/test/files/neg/sammy_wrong_arity.check b/test/files/neg/sammy_wrong_arity.check
new file mode 100644
index 0000000..af547a2
--- /dev/null
+++ b/test/files/neg/sammy_wrong_arity.check
@@ -0,0 +1,52 @@
+sammy_wrong_arity.scala:6: error: type mismatch;
+ found   : () => Int
+ required: T1
+  (() => 0): T1
+      ^
+sammy_wrong_arity.scala:7: error: type mismatch;
+ found   : Any => Int
+ required: T2
+  ((x: Any) => 0): T2
+            ^
+sammy_wrong_arity.scala:9: error: type mismatch;
+ found   : Any => Int
+ required: T0
+  ((x: Any) => 0): T0
+            ^
+sammy_wrong_arity.scala:10: error: type mismatch;
+ found   : Any => Int
+ required: T2
+  ((x: Any) => 0): T2
+            ^
+sammy_wrong_arity.scala:12: error: type mismatch;
+ found   : (Any, Any) => Int
+ required: T0
+  ((x: Any, y: Any) => 0): T0
+                    ^
+sammy_wrong_arity.scala:13: error: type mismatch;
+ found   : (Any, Any) => Int
+ required: T1
+  ((x: Any, y: Any) => 0): T1
+                    ^
+sammy_wrong_arity.scala:15: error: missing parameter type
+  ((x) => 0): T2
+    ^
+sammy_wrong_arity.scala:17: error: missing parameter type
+  ((x) => 0): T0
+    ^
+sammy_wrong_arity.scala:18: error: missing parameter type
+  ((x) => 0): T2
+    ^
+sammy_wrong_arity.scala:20: error: missing parameter type
+  ((x, y) => 0): T0
+    ^
+sammy_wrong_arity.scala:20: error: missing parameter type
+  ((x, y) => 0): T0
+       ^
+sammy_wrong_arity.scala:21: error: missing parameter type
+  ((x, y) => 0): T1
+    ^
+sammy_wrong_arity.scala:21: error: missing parameter type
+  ((x, y) => 0): T1
+       ^
+13 errors found
diff --git a/test/files/run/interpolationMultiline1.flags b/test/files/neg/sammy_wrong_arity.flags
similarity index 100%
rename from test/files/run/interpolationMultiline1.flags
rename to test/files/neg/sammy_wrong_arity.flags
diff --git a/test/files/neg/sammy_wrong_arity.scala b/test/files/neg/sammy_wrong_arity.scala
new file mode 100644
index 0000000..d03d266
--- /dev/null
+++ b/test/files/neg/sammy_wrong_arity.scala
@@ -0,0 +1,22 @@
+trait T0 { def ap(): Int }
+trait T1 { def ap(a: Any): Int }
+trait T2 { def ap(a: Any, b: Any): Int }
+
+class Test {
+  (() => 0): T1
+  ((x: Any) => 0): T2
+
+  ((x: Any) => 0): T0
+  ((x: Any) => 0): T2
+
+  ((x: Any, y: Any) => 0): T0
+  ((x: Any, y: Any) => 0): T1
+
+  ((x) => 0): T2
+
+  ((x) => 0): T0
+  ((x) => 0): T2
+
+  ((x, y) => 0): T0
+  ((x, y) => 0): T1
+}
diff --git a/test/files/neg/sealed-final-neg.check b/test/files/neg/sealed-final-neg.check
new file mode 100644
index 0000000..500d23f
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.check
@@ -0,0 +1,4 @@
+sealed-final-neg.scala:41: error: expected class or object definition
+"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
+^
+one error found
diff --git a/test/files/neg/sealed-final-neg.flags b/test/files/neg/sealed-final-neg.flags
new file mode 100644
index 0000000..cfabf7a
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Yinline-warnings -optimise
\ No newline at end of file
diff --git a/test/files/neg/sealed-final-neg.scala b/test/files/neg/sealed-final-neg.scala
new file mode 100644
index 0000000..bc25330
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.scala
@@ -0,0 +1,41 @@
+package neg1 {
+  sealed abstract class Foo {
+    @inline def bar(x: Int) = x + 1
+  }
+  object Foo {
+    def mkFoo(): Foo = new Baz2
+  }
+
+  object Baz1 extends Foo
+  final class Baz2 extends Foo
+  final class Baz3 extends Foo {
+    override def bar(x: Int) = x - 1
+  }
+
+  object Test {
+    // bar can't be inlined - it is overridden in Baz3
+    def f = Foo.mkFoo() bar 10
+  }
+}
+
+package neg2 {
+  sealed abstract class Foo {
+    @inline def bar(x: Int) = x + 1
+  }
+  object Foo {
+    def mkFoo(): Foo = new Baz2
+  }
+
+  object Baz1 extends Foo
+  final class Baz2 extends Foo
+  class Baz3 extends Foo {
+    override def bar(x: Int) = x - 1
+  }
+
+  object Test {
+    // bar can't be inlined - Baz3 is not final
+    def f = Foo.mkFoo() bar 10
+  }
+}
+
+"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check
index 20d00c8..a3c39ec 100644
--- a/test/files/neg/sealed-java-enums.check
+++ b/test/files/neg/sealed-java-enums.check
@@ -1,5 +1,7 @@
-sealed-java-enums.scala:5: error: match may not be exhaustive.
+sealed-java-enums.scala:5: warning: match may not be exhaustive.
 It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING
   def f(state: State) = state match {
                         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/sensitive.scala b/test/files/neg/sensitive.scala
index fa1b940..a084a0a 100644
--- a/test/files/neg/sensitive.scala
+++ b/test/files/neg/sensitive.scala
@@ -5,8 +5,8 @@ object Admin extends Certificate;
 class SecurityViolationException extends Exception
 
 object Sensitive {
-  def makeSensitive(credentials: Certificate): Sensitive = 
-    if (credentials == Admin) new Sensitive() 
+  def makeSensitive(credentials: Certificate): Sensitive =
+    if (credentials == Admin) new Sensitive()
     else throw new SecurityViolationException
 }
 class Sensitive private () {
@@ -16,4 +16,3 @@ object Attacker {
   val x = Sensitive.makeSensitive(null)
   val y = new Sensitive()
 }
-  
diff --git a/test/files/neg/serialversionuid-not-const.check b/test/files/neg/serialversionuid-not-const.check
new file mode 100644
index 0000000..9c383d9
--- /dev/null
+++ b/test/files/neg/serialversionuid-not-const.check
@@ -0,0 +1,10 @@
+serialversionuid-not-const.scala:1: error: annotation argument needs to be a constant; found: 13L.toLong
+ at SerialVersionUID(13l.toLong) class C1 extends Serializable
+                      ^
+serialversionuid-not-const.scala:3: error: annotation argument needs to be a constant; found: 13.asInstanceOf[Long]
+ at SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable
+                                 ^
+serialversionuid-not-const.scala:4: error: annotation argument needs to be a constant; found: Test.bippy
+ at SerialVersionUID(Test.bippy) class C4 extends Serializable
+                       ^
+three errors found
diff --git a/test/files/neg/serialversionuid-not-const.scala b/test/files/neg/serialversionuid-not-const.scala
new file mode 100644
index 0000000..f0e3ef4
--- /dev/null
+++ b/test/files/neg/serialversionuid-not-const.scala
@@ -0,0 +1,16 @@
+ at SerialVersionUID(13l.toLong) class C1 extends Serializable
+ at SerialVersionUID(13l) class C2 extends Serializable
+ at SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable
+ at SerialVersionUID(Test.bippy) class C4 extends Serializable
+
+object Test {
+  val bippy = 13L
+
+  def show(c: Class[_]) = println(java.io.ObjectStreamClass.lookup(c).getSerialVersionUID)
+  def main(args: Array[String]): Unit = {
+    show(classOf[C1])
+    show(classOf[C2])
+    show(classOf[C3])
+    show(classOf[C4])
+  }
+}
diff --git a/test/files/neg/spec-overrides.scala b/test/files/neg/spec-overrides.scala
index f77dade..713ce27 100644
--- a/test/files/neg/spec-overrides.scala
+++ b/test/files/neg/spec-overrides.scala
@@ -15,7 +15,7 @@ class FX2 extends P {
 object Test extends App {
   val fx = new FX
   val p = new P
- 
+
   println(fx.a(3))
   println((fx: P).a(3))
   println((fx: P).a(3.0))
diff --git a/test/files/neg/specification-scopes.check b/test/files/neg/specification-scopes.check
index 7af9842..ab98613 100644
--- a/test/files/neg/specification-scopes.check
+++ b/test/files/neg/specification-scopes.check
@@ -1,5 +1,5 @@
 P_2.scala:14: error: reference to x is ambiguous;
-it is both defined in object C and imported subsequently by 
+it is both defined in object C and imported subsequently by
 import Q.X._
           println("L14: "+x)   // reference to 'x' is ambiguous here
                           ^
diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check
index 2d6420a..1207e6d 100644
--- a/test/files/neg/stmt-expr-discard.check
+++ b/test/files/neg/stmt-expr-discard.check
@@ -1,7 +1,9 @@
-stmt-expr-discard.scala:3: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
     + 2
     ^
-stmt-expr-discard.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
     - 4
       ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
index 457f497..703846a 100644
--- a/test/files/neg/stringinterpolation_macro-neg.check
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -1,61 +1,61 @@
-stringinterpolation_macro-neg.scala:8: error: too few parts
+stringinterpolation_macro-neg.scala:13: error: there are no parts
   new StringContext().f()
   ^
-stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:14: error: too few arguments for interpolated string
   new StringContext("", " is ", "%2d years old").f(s)
                                                    ^
-stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string
+stringinterpolation_macro-neg.scala:15: error: too many arguments for interpolated string
   new StringContext("", " is ", "%2d years old").f(s, d, d)
                                                          ^
-stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:16: error: too few arguments for interpolated string
   new StringContext("", "").f()
                              ^
-stringinterpolation_macro-neg.scala:14: error: type mismatch;
+stringinterpolation_macro-neg.scala:19: error: type mismatch;
  found   : String
  required: Boolean
   f"$s%b"
      ^
-stringinterpolation_macro-neg.scala:15: error: type mismatch;
+stringinterpolation_macro-neg.scala:20: error: type mismatch;
  found   : String
  required: Char
   f"$s%c"
      ^
-stringinterpolation_macro-neg.scala:16: error: type mismatch;
+stringinterpolation_macro-neg.scala:21: error: type mismatch;
  found   : Double
  required: Char
   f"$f%c"
      ^
-stringinterpolation_macro-neg.scala:17: error: type mismatch;
+stringinterpolation_macro-neg.scala:22: error: type mismatch;
  found   : String
  required: Int
   f"$s%x"
      ^
-stringinterpolation_macro-neg.scala:18: error: type mismatch;
+stringinterpolation_macro-neg.scala:23: error: type mismatch;
  found   : Boolean
  required: Int
   f"$b%d"
      ^
-stringinterpolation_macro-neg.scala:19: error: type mismatch;
+stringinterpolation_macro-neg.scala:24: error: type mismatch;
  found   : String
  required: Int
   f"$s%d"
      ^
-stringinterpolation_macro-neg.scala:20: error: type mismatch;
+stringinterpolation_macro-neg.scala:25: error: type mismatch;
  found   : Double
  required: Int
   f"$f%o"
      ^
-stringinterpolation_macro-neg.scala:21: error: type mismatch;
+stringinterpolation_macro-neg.scala:26: error: type mismatch;
  found   : String
  required: Double
   f"$s%e"
      ^
-stringinterpolation_macro-neg.scala:22: error: type mismatch;
+stringinterpolation_macro-neg.scala:27: error: type mismatch;
  found   : Boolean
  required: Double
   f"$b%f"
      ^
-stringinterpolation_macro-neg.scala:27: error: type mismatch;
+stringinterpolation_macro-neg.scala:32: error: type mismatch;
  found   : String
  required: Int
 Note that implicit conversions are not applicable because they are ambiguous:
@@ -64,7 +64,109 @@ Note that implicit conversions are not applicable because they are ambiguous:
  are possible conversion functions from String to Int
     f"$s%d"
        ^
-stringinterpolation_macro-neg.scala:30: error: illegal conversion character
+stringinterpolation_macro-neg.scala:35: error: illegal conversion character 'i'
   f"$s%i"
        ^
-15 errors found
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '+'
+  f"$s%+ 0,(s"
+       ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ' '
+  f"$s%+ 0,(s"
+        ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '0'
+  f"$s%+ 0,(s"
+         ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ','
+  f"$s%+ 0,(s"
+          ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '('
+  f"$s%+ 0,(s"
+           ^
+stringinterpolation_macro-neg.scala:39: error: Only '-' allowed for c conversion
+  f"$c%#+ 0,(c"
+       ^
+stringinterpolation_macro-neg.scala:40: error: # not allowed for d conversion
+  f"$d%#d"
+       ^
+stringinterpolation_macro-neg.scala:41: error: ',' only allowed for d conversion of integral types
+  f"$d%,x"
+       ^
+stringinterpolation_macro-neg.scala:42: error: only use '+' for BigInt conversions to o, x, X
+  f"$d%+ (x"
+       ^
+stringinterpolation_macro-neg.scala:42: error: only use ' ' for BigInt conversions to o, x, X
+  f"$d%+ (x"
+        ^
+stringinterpolation_macro-neg.scala:42: error: only use '(' for BigInt conversions to o, x, X
+  f"$d%+ (x"
+         ^
+stringinterpolation_macro-neg.scala:43: error: ',' not allowed for a, A
+  f"$f%,(a"
+       ^
+stringinterpolation_macro-neg.scala:43: error: '(' not allowed for a, A
+  f"$f%,(a"
+        ^
+stringinterpolation_macro-neg.scala:44: error: Only '-' allowed for date/time conversions
+  f"$t%#+ 0,(tT"
+       ^
+stringinterpolation_macro-neg.scala:47: error: precision not allowed
+  f"$c%.2c"
+       ^
+stringinterpolation_macro-neg.scala:48: error: precision not allowed
+  f"$d%.2d"
+       ^
+stringinterpolation_macro-neg.scala:49: error: precision not allowed
+  f"%.2%"
+     ^
+stringinterpolation_macro-neg.scala:50: error: precision not allowed
+  f"%.2n"
+     ^
+stringinterpolation_macro-neg.scala:51: error: precision not allowed
+  f"$f%.2a"
+       ^
+stringinterpolation_macro-neg.scala:52: error: precision not allowed
+  f"$t%.2tT"
+       ^
+stringinterpolation_macro-neg.scala:55: error: No last arg
+  f"%<s"
+     ^
+stringinterpolation_macro-neg.scala:56: error: No last arg
+  f"%<c"
+     ^
+stringinterpolation_macro-neg.scala:57: error: No last arg
+  f"%<tT"
+     ^
+stringinterpolation_macro-neg.scala:58: error: Argument index out of range
+  f"${8}%d ${9}%d%3$$d"
+                  ^
+stringinterpolation_macro-neg.scala:59: error: Argument index out of range
+  f"${8}%d ${9}%d%0$$d"
+                  ^
+stringinterpolation_macro-neg.scala:62: warning: Index is not this arg
+  f"${8}%d ${9}%1$$d"
+                ^
+stringinterpolation_macro-neg.scala:63: warning: Argument index ignored if '<' flag is present
+  f"$s%s $s%s %1$$<s"
+               ^
+stringinterpolation_macro-neg.scala:64: warning: Index is not this arg
+  f"$s%s $s%1$$s"
+            ^
+stringinterpolation_macro-neg.scala:67: error: type mismatch;
+ found   : String
+ required: java.util.Formattable
+  f"$s%#s"
+     ^
+stringinterpolation_macro-neg.scala:70: error: 'G' doesn't seem to be a date or time conversion
+  f"$t%tG"
+        ^
+stringinterpolation_macro-neg.scala:71: error: Date/time conversion must have two characters
+  f"$t%t"
+       ^
+stringinterpolation_macro-neg.scala:72: error: Missing conversion operator in '%10.5'; use %% for literal %, %n for newline
+  f"$s%10.5"
+      ^
+stringinterpolation_macro-neg.scala:75: error: conversions must follow a splice; use %% for literal %, %n for newline
+  f"${d}random-leading-junk%d"
+                           ^
+three warnings found
+45 errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala
index ac9d97d..3869d42 100644
--- a/test/files/neg/stringinterpolation_macro-neg.scala
+++ b/test/files/neg/stringinterpolation_macro-neg.scala
@@ -3,6 +3,11 @@ object Test extends App {
   val d = 8
   val b = false
   val f = 3.14159
+  val c = 'c'
+  val t = new java.util.Date
+  val x = new java.util.Formattable {
+    def formatTo(ff: java.util.Formatter, g: Int, w: Int, p: Int): Unit = ff format "xxx"
+  }
 
   // 1) number of arguments
   new StringContext().f()
@@ -28,4 +33,44 @@ object Test extends App {
   }
 
   f"$s%i"
+
+  // 3) flag mismatches
+  f"$s%+ 0,(s"
+  f"$c%#+ 0,(c"
+  f"$d%#d"
+  f"$d%,x"
+  f"$d%+ (x"
+  f"$f%,(a"
+  f"$t%#+ 0,(tT"
+
+  // 4) bad precisions
+  f"$c%.2c"
+  f"$d%.2d"
+  f"%.2%"
+  f"%.2n"
+  f"$f%.2a"
+  f"$t%.2tT"
+
+  // 5) bad indexes
+  f"%<s"
+  f"%<c"
+  f"%<tT"
+  f"${8}%d ${9}%d%3$$d"
+  f"${8}%d ${9}%d%0$$d"
+
+  // warnings
+  f"${8}%d ${9}%1$$d"
+  f"$s%s $s%s %1$$<s"
+  f"$s%s $s%1$$s"
+
+  // 6) bad arg types
+  f"$s%#s"
+
+  // 7) misunderstood conversions
+  f"$t%tG"
+  f"$t%t"
+  f"$s%10.5"
+
+  // 8) other brain failures
+  f"${d}random-leading-junk%d"
 }
diff --git a/test/files/neg/structural.scala b/test/files/neg/structural.scala
index bfca37b..d783399 100644
--- a/test/files/neg/structural.scala
+++ b/test/files/neg/structural.scala
@@ -1,54 +1,54 @@
 object Test extends App {
-  
+
   def f(x: { type D; def m: D }): Null = null
-    
+
   class Tata
-  
+
   abstract class Toto[A <: Object] {
     type B <: Object
-    
+
     def f1[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: A): Object; val x: A }) = x.m[Tata](x.x) //fail
     def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail
     def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail
     def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail
     def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //suceed
-    
+
     def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //suceed
     def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //suceed
     def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //suceed
     def f9[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): D }) = x.m[Tata](null) //fail
     def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //suceed
-    
+
   }
-  
+
   val tata = new Tata
   val toto = new Toto[Tata] {
     type B = Tata
   }
-  
+
   //toto.f1[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
   //toto.f2[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
   //toto.f3[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
   //toto.f4[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: D): Object = null; val x = tata })
   toto.f5[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: E): Object = null; val x: Test.Tata = tata })
-  
+
   toto.f6[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
   toto.f7[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
   toto.f8[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
   //toto.f9[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): D = null })
   toto.f0[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): E = null })
-  
+
   /* Bug #1246 */
   type Summable[T] = { def +(v : T) : T }
   def sum[T <: Summable[T]](xs : List[T]) = xs.reduceLeft[T](_ + _)
-  
+
   /* Bug #1004 & #967 */
   type S1 = { def f(p: this.type): Unit }
   val s1 = new { def f(p: this.type): Unit = () }
-  
+
   type S2 = { type T; def f(p: T): Unit }
   //val s2: S2 = new { type T = A; def f(p: T): Unit = () }
-  
+
   def s3[U >: Null <: Object](p: { def f(p: U): Unit; def u: U }) = ()
-  
+
 }
diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check
index e4730b6..f968d3a 100644
--- a/test/files/neg/switch.check
+++ b/test/files/neg/switch.check
@@ -1,7 +1,9 @@
-switch.scala:38: error: could not emit switch for @switch annotated match
+switch.scala:38: warning: could not emit switch for @switch annotated match
   def fail2(c: Char) = (c: @switch @unchecked) match {
                                     ^
-switch.scala:45: error: could not emit switch for @switch annotated match
+switch.scala:45: warning: could not emit switch for @switch annotated match
   def fail3(c: Char) = (c: @unchecked @switch) match {
                                        ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/switch.scala b/test/files/neg/switch.scala
index 198583f..a66ed76 100644
--- a/test/files/neg/switch.scala
+++ b/test/files/neg/switch.scala
@@ -16,44 +16,44 @@ object Main {
     case 'f' | 'g'        => true
     case _                => false
   }
-  
+
   def succ2(c: Char) = (c: @switch) match {
     case 'A' | 'B' | 'C'  => true
     case Other.C2         => true
     case Other.C4         => true
     case _                => false
   }
-  
+
   // has a guard, but since SI-5830 that's ok
   def succ_guard(c: Char) = (c: @switch) match {
     case 'A' | 'B' | 'C'  => true
     case x if x == 'A'    => true
     case _                => false
   }
-  
+
   // throwing in @unchecked on the next two to make sure
   // multiple annotations are processed correctly
-  
+
   // thinks a val in an object is constant... so naive
   def fail2(c: Char) = (c: @switch @unchecked) match {
     case 'A'        => true
     case Other.C1   => true
     case _          => false
   }
-  
+
   // more naivete
   def fail3(c: Char) = (c: @unchecked @switch) match {
     case 'A'        => true
     case Other.C3   => true
     case _          => false
   }
-  
+
   // guard case done correctly
   def succ3(c: Char) = (c: @switch) match {
     case 'A' | 'B' | 'C'  => true
     case x                => x == 'A'
   }
-  
+
   // some ints just to mix it up a bit
   def succ4(x: Int, y: Int) = ((x+y): @switch) match {
     case  1 => 5
@@ -62,5 +62,5 @@ object Main {
     case  4 => 50
     case 5|6|7|8 => 100
     case _  => -1
-  }    
+  }
 }
diff --git a/test/files/neg/t0117.scala b/test/files/neg/t0117.scala
index c73227d..4cd9ad6 100644
--- a/test/files/neg/t0117.scala
+++ b/test/files/neg/t0117.scala
@@ -3,4 +3,3 @@ trait B extends A { println(super[A].a) }
 object Test extends App {
   new B {}
 }
- 
diff --git a/test/files/neg/t0152.scala b/test/files/neg/t0152.scala
index dc77ac3..d86d59e 100644
--- a/test/files/neg/t0152.scala
+++ b/test/files/neg/t0152.scala
@@ -8,6 +8,6 @@ trait PlusOne extends Value[Int] {
 
 object Test extends App {
  object boom extends Value[java.lang.String]("foo") with PlusOne
-       
+
  println(boom.value) // class cast exception!
 }
diff --git a/test/files/neg/t0218.check b/test/files/neg/t0218.check
index 6bb7591..a22583d 100644
--- a/test/files/neg/t0218.check
+++ b/test/files/neg/t0218.check
@@ -1,4 +1,4 @@
 t0218.scala:10: error: class type required but APQ.this.P found
-    List(new PP) 
+    List(new PP)
              ^
 one error found
diff --git a/test/files/neg/t0218.scala b/test/files/neg/t0218.scala
index 3d1f52e..319be82 100644
--- a/test/files/neg/t0218.scala
+++ b/test/files/neg/t0218.scala
@@ -1,4 +1,4 @@
-trait APQ {  
+trait APQ {
   class Placement {
   }
 
@@ -7,6 +7,6 @@ trait APQ {
   type PP = P
 
   def pq(numQueens: Int, numRows: Int) : List[Placement] = {
-    List(new PP) 
+    List(new PP)
   }
 }
diff --git a/test/files/neg/t0259.check b/test/files/neg/t0259.check
index 24e35e6..8c15d98 100644
--- a/test/files/neg/t0259.check
+++ b/test/files/neg/t0259.check
@@ -1,6 +1,6 @@
 t0259.scala:4: error: double definition:
-constructor TestCase3:(groups: String*)test.TestCase3 and
-constructor TestCase3:(groups: (String, Int)*)test.TestCase3 at line 3
+constructor TestCase3: (groups: (String, Int)*)test.TestCase3 at line 3 and
+constructor TestCase3: (groups: String*)test.TestCase3 at line 4
 have same type after erasure: (groups: Seq)test.TestCase3
   def this( groups: String*) = this()
       ^
diff --git a/test/files/neg/t0418.check b/test/files/neg/t0418.check
index 4e9ad2f..b95f8e4 100644
--- a/test/files/neg/t0418.check
+++ b/test/files/neg/t0418.check
@@ -1,7 +1,4 @@
 t0418.scala:2: error: not found: value Foo12340771
   null match { case Foo12340771.Bar(x) => x }
                     ^
-t0418.scala:2: error: not found: value x
-  null match { case Foo12340771.Bar(x) => x }
-                                          ^
-two errors found
+one error found
diff --git a/test/files/neg/t0503.scala b/test/files/neg/t0503.scala
index a9b5dcb..322e1ad 100644
--- a/test/files/neg/t0503.scala
+++ b/test/files/neg/t0503.scala
@@ -1,3 +1,3 @@
 val x = new { } with { }
-trait A 
+trait A
 val y = new { } with A
diff --git a/test/files/neg/t0764.check b/test/files/neg/t0764.check
index e14c770..0c7cff1 100644
--- a/test/files/neg/t0764.check
+++ b/test/files/neg/t0764.check
@@ -1,6 +1,7 @@
 t0764.scala:13: error: type mismatch;
  found   : Node{type T = _1.type} where val _1: Node{type T = NextType}
  required: Node{type T = Main.this.AType}
-	new Main[AType]( (value: AType).prepend )
+    (which expands to)  Node{type T = Node{type T = NextType}}
+        new Main[AType]( (value: AType).prepend )
                                         ^
 one error found
diff --git a/test/files/neg/t0764.scala b/test/files/neg/t0764.scala
index 9aebe04..9f77a59 100644
--- a/test/files/neg/t0764.scala
+++ b/test/files/neg/t0764.scala
@@ -1,14 +1,45 @@
 class Top[A] {
-	type AType = A
+        type AType = A
 }
 
-trait Node extends NotNull { outer =>
-	type T <: Node
-	def prepend = new Node { type T = outer.type }
+trait Node { outer =>
+        type T <: Node
+        def prepend = new Node { type T = outer.type }
 }
 
 class Main[NextType <: Node](value: Node { type T = NextType })
-	extends Top[Node { type T = NextType }] {
-	
-	new Main[AType]( (value: AType).prepend )
+        extends Top[Node { type T = NextType }] {
+
+        new Main[AType]( (value: AType).prepend )
+}
+
+/* we've been back-and-forth on this one -- see PRs on SI-8177 for the reasoning
+I think it should compile and that the following error is due to broken =:= on existentials
+ found   : Node{type T = _1.type} where val _1: Node{type T = NextType}
+ required: Node{type T = Main.this.AType}
+    (which expands to)  Node{type T = Node{type T = NextType}}
+
+I claim (omitting the forSome for brevity, even though the premature skolemization is probably the issue)
+_1.type =:= Main.this.AType
+because
+(1) _1.type <:< Main.this.AType and (2) Main.this.AType  <:< _1.type
+(1), because:
+_1.type <:< Node{type T = NextType} (because skolemization and _1's upper bound)
+(2), because:
+Node{type T = NextType} <:< _1.type forSome val _1: Node{type T = NextType}
+because:
+Node{type T = NextType} <:< T forSome {type T <: Node{type T = NextType} with Singleton}
+because 
+Node{type T = NextType} <:< Node{type T = NextType} with Singleton
+
+hmmm.. might the with Singleton be throwing a wrench in our existential house?
+
+Behold the equivalent program which type checks without the fix for SI-8177.
+(Expand type alias, convert type member to type param;
+note the covariance to encode subtyping on type members.)
+
+class Node[+T <: Node[_]] { def prepend = new Node[this.type] }
+class Main[NextType <: Node[_]](value: Node[NextType]) {
+  new Main(value.prepend)
 }
+*/
\ No newline at end of file
diff --git a/test/files/neg/t0764b.check b/test/files/neg/t0764b.check
new file mode 100644
index 0000000..4040954
--- /dev/null
+++ b/test/files/neg/t0764b.check
@@ -0,0 +1,47 @@
+t0764b.scala:27: error: type mismatch;
+ found   : p1.t0764.Node{type T = p1.t0764.<refinement>.type}
+ required: p1.t0764.NodeAlias[p1.t0764.NodeAlias[A]]
+    (which expands to)  p1.t0764.Node{type T = p1.t0764.Node{type T = A}}
+      private[this] def f2 = new Main1[NodeAlias[A]](v.prepend)          // fail
+                                                       ^
+t0764b.scala:28: error: type mismatch;
+ found   : p1.t0764.Node{type T = p1.t0764.<refinement>.type}
+ required: p1.t0764.NodeAlias[p1.t0764.Node{type T = A}]
+    (which expands to)  p1.t0764.Node{type T = p1.t0764.Node{type T = A}}
+      private[this] def f3 = new Main1[Node { type T = A }](v.prepend)   // fail
+                                                              ^
+t0764b.scala:34: error: type mismatch;
+ found   : p1.t0764.Node{type T = p1.t0764.<refinement>.type}
+ required: p1.t0764.Node{type T = p1.t0764.NodeAlias[A]}
+    (which expands to)  p1.t0764.Node{type T = p1.t0764.Node{type T = A}}
+      private[this] def f2 = new Main2[NodeAlias[A]](v.prepend)          // fail
+                                                       ^
+t0764b.scala:35: error: type mismatch;
+ found   : p1.t0764.Node{type T = p1.t0764.<refinement>.type}
+ required: p1.t0764.Node{type T = p1.t0764.Node{type T = A}}
+      private[this] def f3 = new Main2[Node { type T = A }](v.prepend)   // fail
+                                                              ^
+t0764b.scala:51: error: type mismatch;
+ found   : p2.t0764.Node{type T = p2.t0764.<refinement>.type}
+ required: p2.t0764.NodeAlias[p2.t0764.NodeAlias[A]]
+    (which expands to)  p2.t0764.Node{type T = p2.t0764.Node{type T = A}}
+      private[this] def f2 = new Main1[NodeAlias[A]](v.prepend)          // fail
+                                                       ^
+t0764b.scala:52: error: type mismatch;
+ found   : p2.t0764.Node{type T = p2.t0764.<refinement>.type}
+ required: p2.t0764.NodeAlias[p2.t0764.Node{type T = A}]
+    (which expands to)  p2.t0764.Node{type T = p2.t0764.Node{type T = A}}
+      private[this] def f3 = new Main1[Node { type T = A }](v.prepend)   // fail
+                                                              ^
+t0764b.scala:58: error: type mismatch;
+ found   : p2.t0764.Node{type T = p2.t0764.<refinement>.type}
+ required: p2.t0764.Node{type T = p2.t0764.NodeAlias[A]}
+    (which expands to)  p2.t0764.Node{type T = p2.t0764.Node{type T = A}}
+      private[this] def f2 = new Main2[NodeAlias[A]](v.prepend)          // fail
+                                                       ^
+t0764b.scala:59: error: type mismatch;
+ found   : p2.t0764.Node{type T = p2.t0764.<refinement>.type}
+ required: p2.t0764.Node{type T = p2.t0764.Node{type T = A}}
+      private[this] def f3 = new Main2[Node { type T = A }](v.prepend)   // fail
+                                                              ^
+8 errors found
diff --git a/test/files/neg/t0764b.scala b/test/files/neg/t0764b.scala
new file mode 100644
index 0000000..14c623c
--- /dev/null
+++ b/test/files/neg/t0764b.scala
@@ -0,0 +1,63 @@
+// see neg/t0764 why this should probably be a pos/ test -- alas something's wrong with existential subtyping (?)
+
+// In all cases when calling "prepend" the receiver 'v'
+// has static type NodeAlias[A] or (equivalently) Node { type T = A }.
+// Since prepend explicitly returns the singleton type of the receiver,
+// the return type of prepend in all cases is "v.type", and so the call
+// to "new Main" can be parameterized with any of the following, in order
+// of decreasing specificity with a tie for second place:
+//
+//   new Main[v.type](v.prepend)
+//   new Main[NodeAlias[A]](v.prepend)
+//   new Main[Node { type T = A }](v.prepend)
+//   new Main(v.prepend)
+
+// the `fail` comments below denote what didn't compile before SI-8177 fixed all of them
+
+package p1 {
+  object t0764 {
+    type NodeAlias[A] = Node { type T = A }
+    trait Node { outer =>
+      type T <: Node
+      def prepend: Node { type T = outer.type } = ???
+    }
+
+    class Main1[A <: Node](v: NodeAlias[A]) {
+      private[this] def f1 = new Main1(v.prepend)                        // fail
+      private[this] def f2 = new Main1[NodeAlias[A]](v.prepend)          // fail
+      private[this] def f3 = new Main1[Node { type T = A }](v.prepend)   // fail
+      private[this] def f4 = new Main1[v.type](v.prepend)                // ok
+    }
+
+    class Main2[A <: Node](v: Node { type T = A }) {
+      private[this] def f1 = new Main2(v.prepend)                        // fail
+      private[this] def f2 = new Main2[NodeAlias[A]](v.prepend)          // fail
+      private[this] def f3 = new Main2[Node { type T = A }](v.prepend)   // fail
+      private[this] def f4 = new Main2[v.type](v.prepend)                // ok
+    }
+  }
+}
+
+package p2 {
+  object t0764 {
+    type NodeAlias[A] = Node { type T = A }
+    trait Node { outer =>
+      type T <: Node
+      def prepend: NodeAlias[outer.type] = ???
+    }
+
+    class Main1[A <: Node](v: NodeAlias[A]) {
+      private[this] def f1 = new Main1(v.prepend)                        // ok!  <<========== WOT
+      private[this] def f2 = new Main1[NodeAlias[A]](v.prepend)          // fail
+      private[this] def f3 = new Main1[Node { type T = A }](v.prepend)   // fail
+      private[this] def f4 = new Main1[v.type](v.prepend)                // ok
+    }
+
+    class Main2[A <: Node](v: Node { type T = A }) {
+      private[this] def f1 = new Main2(v.prepend)                        // fail
+      private[this] def f2 = new Main2[NodeAlias[A]](v.prepend)          // fail
+      private[this] def f3 = new Main2[Node { type T = A }](v.prepend)   // fail
+      private[this] def f4 = new Main2[v.type](v.prepend)                // ok
+    }
+  }
+}
diff --git a/test/files/neg/t0816.scala b/test/files/neg/t0816.scala
index 738a634..0128a0a 100644
--- a/test/files/neg/t0816.scala
+++ b/test/files/neg/t0816.scala
@@ -6,7 +6,7 @@ case class Ctest(override val data: String) extends Btest(data, true)
 
 class testCaseClass {
   def test(x: Atest) = x match {
-    case Ctest(data) => Console.println("C")  
+    case Ctest(data) => Console.println("C")
     case Btest(data, b) => Console.println("B")
   }
 }
diff --git a/test/files/neg/t1010.scala b/test/files/neg/t1010.scala
index 7a1e661..fd14297 100644
--- a/test/files/neg/t1010.scala
+++ b/test/files/neg/t1010.scala
@@ -6,9 +6,9 @@ class MailBox {
 abstract class Actor {
  private val in = new MailBox
 
- def send(msg: in.Message) =  error("foo")
+ def send(msg: in.Message) =  sys.error("foo")
 
- def unstable: Actor = error("foo")
+ def unstable: Actor = sys.error("foo")
 
  def dubiousSend(msg: MailBox#Message): Nothing =
    unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
diff --git a/test/files/neg/t1011.check b/test/files/neg/t1011.check
deleted file mode 100644
index d9c8123..0000000
--- a/test/files/neg/t1011.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1011.scala:8: error: not found: value entity
-       <dl><code>{Text(entity)}</code>
-                       ^
-one error found
diff --git a/test/files/neg/t1011.scala b/test/files/neg/t1011.scala
deleted file mode 100644
index 57a6ad7..0000000
--- a/test/files/neg/t1011.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-package test;
-import scala.xml._;
-
-abstract class Test {
-  //val entity : String;
-  def primitiveHeader : NodeSeq = 
-    Group({
-       <dl><code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code>
-           <code>{Text(entity)}</code></dl>
-    } ++ // 3 seconds
-    {}++ // 5 seconds
-    {}++ // 10 seconds
-    {}++ // 20 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 5 seconds
-    {}++ // 10 seconds
-    {}++ // 20 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 5 seconds
-    {}++ // 10 seconds
-    {}++ // 20 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    {}++ // 40 seconds
-    <hr/>);
-}
diff --git a/test/files/neg/t1017.check b/test/files/neg/t1017.check
deleted file mode 100644
index 52101c7..0000000
--- a/test/files/neg/t1017.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1017.scala:3: error: not found: value foo
-<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
-                                                        ^
-one error found
diff --git a/test/files/neg/t1017.scala b/test/files/neg/t1017.scala
deleted file mode 100644
index e389f30..0000000
--- a/test/files/neg/t1017.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-// 'foo' is not defined
-object Test {
-<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
-}
diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check
index e69be3e..5e3821b 100644
--- a/test/files/neg/t1112.check
+++ b/test/files/neg/t1112.check
@@ -1,4 +1,4 @@
-t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => () => Unit)Unit
+t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => Test.this.Type1)Unit
     call(0,() => System.out.println("here we are"))
         ^
 one error found
diff --git a/test/files/neg/t1112.scala b/test/files/neg/t1112.scala
index b2a374c..1a88629 100644
--- a/test/files/neg/t1112.scala
+++ b/test/files/neg/t1112.scala
@@ -1,13 +1,13 @@
 // checks that error doesn't crash the compiler
-// (due to isFunctionType normalizing Type1 to a function type, 
+// (due to isFunctionType normalizing Type1 to a function type,
 //  but then the code that used that test not using the normalized type for further operations)
 class Test {
   type Type1 = () => Unit
-  
+
   def call(p: Int)(f: => Type1) = {
     f()
   }
-  
+
   def run = {
     call(0,() => System.out.println("here we are"))
   }
diff --git a/test/files/neg/t112706A.check b/test/files/neg/t112706A.check
index 30d0c3e..ad403ab 100644
--- a/test/files/neg/t112706A.check
+++ b/test/files/neg/t112706A.check
@@ -1,6 +1,6 @@
 t112706A.scala:5: error: constructor cannot be instantiated to expected type;
  found   : (T1, T2)
  required: String
-    case Tuple2(node,_) =>   
+    case Tuple2(node,_) =>
          ^
 one error found
diff --git a/test/files/neg/t112706A.scala b/test/files/neg/t112706A.scala
index b7799af..1130472 100644
--- a/test/files/neg/t112706A.scala
+++ b/test/files/neg/t112706A.scala
@@ -2,7 +2,7 @@ package test;
 trait Test {
   def foo(p : List[Tuple2[String,String]]) = {
     for (t <- p) t._1 match {
-    case Tuple2(node,_) =>   
+    case Tuple2(node,_) =>
     }
   }
 }
diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check
index 3724752..13b73d5 100644
--- a/test/files/neg/t1181.check
+++ b/test/files/neg/t1181.check
@@ -1,8 +1,10 @@
 t1181.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
      case (Nil, Nil) => map
                         ^
-t1181.scala:9: error: missing parameter type
+t1181.scala:9: error: type mismatch;
+ found   : scala.collection.immutable.Map[Symbol,Symbol]
+ required: Symbol
      _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
-     ^
+                  ^
 one warning found
 one error found
diff --git a/test/files/neg/t1183.scala b/test/files/neg/t1183.scala
index 024c4ab..23868ab 100644
--- a/test/files/neg/t1183.scala
+++ b/test/files/neg/t1183.scala
@@ -9,7 +9,7 @@ object Test {
     case class Bar(i:Int)
   }
 
-  
+
   class Test717 {
     val foo1 = new Foo(1)
 
@@ -27,7 +27,7 @@ object Test {
     class Baz
     object Bam
     object Bar
-    
+
     def unapply(s : String) : Option[Bar] = Some(new Bar(s))
   }
 
diff --git a/test/files/neg/t1224.check b/test/files/neg/t1224.check
index fb61275..ab8a6f1 100644
--- a/test/files/neg/t1224.check
+++ b/test/files/neg/t1224.check
@@ -1,4 +1,4 @@
-t1224.scala:4: error: illegal cyclic reference involving type T
+t1224.scala:4: error: lower bound C[A.this.T] does not conform to upper bound C[C[A.this.T]]
  type T >: C[T] <: C[C[T]]
       ^
 one error found
diff --git a/test/files/neg/t1224.flags b/test/files/neg/t1224.flags
new file mode 100644
index 0000000..ca20f55
--- /dev/null
+++ b/test/files/neg/t1224.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/neg/t1432.check b/test/files/neg/t1432.check
index 180cb05..e41f345 100644
--- a/test/files/neg/t1432.check
+++ b/test/files/neg/t1432.check
@@ -1,6 +1,8 @@
-t1432.scala:10: error: type mismatch;
- found   : (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double)
- required: (Int, Unit => Double)
+t1432.scala:12: error: type mismatch;
+ found   : (Int, Bug_NoUnique.Alias2[Bug_NoUnique.Wrap[Unit]] => Double)
+    (which expands to)  (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double)
+ required: Bug_NoUnique.TypeCon[Unit]
+    (which expands to)  (Int, Unit => Double)
   def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
                                                           ^
 one error found
diff --git a/test/files/neg/t1432.scala b/test/files/neg/t1432.scala
index 638f365..bdf2331 100644
--- a/test/files/neg/t1432.scala
+++ b/test/files/neg/t1432.scala
@@ -4,7 +4,9 @@ object Bug_NoUnique {
 
   case class Wrap[E](parent:E) {}
 
-  def wrap[E,A,Y](v : (A,E=>Y)) : (A,Wrap[E]=>Y) =
+  type Alias2[E] = Wrap[E]
+
+  def wrap[E,A,Y](v : (A,E=>Y)) : (A,Alias2[E]=>Y) =
 	throw new Error("Body here")
 
   def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
diff --git a/test/files/neg/t1477.scala b/test/files/neg/t1477.scala
index ab13f14..a9a6d67 100644
--- a/test/files/neg/t1477.scala
+++ b/test/files/neg/t1477.scala
@@ -8,7 +8,7 @@ object Test extends App {
     type V <: D
     val y: V#T = new B { }
   }
- 
+
   trait Middle extends C {
     type V <: (D with U)
   }
diff --git a/test/files/neg/t1503.check b/test/files/neg/t1503.check
new file mode 100644
index 0000000..7adeea2
--- /dev/null
+++ b/test/files/neg/t1503.check
@@ -0,0 +1,8 @@
+t1503.scala:7: warning: The value matched by Whatever is bound to n, which may be used under the
+unsound assumption that it has type Whatever.type, whereas we can only safely
+count on it having type Any, as the pattern is matched using `==` (see SI-1503).
+  def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n }
+                                                    ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t6675.flags b/test/files/neg/t1503.flags
similarity index 100%
copy from test/files/neg/t6675.flags
copy to test/files/neg/t1503.flags
diff --git a/test/files/neg/t1503.scala b/test/files/neg/t1503.scala
new file mode 100644
index 0000000..9877f99
--- /dev/null
+++ b/test/files/neg/t1503.scala
@@ -0,0 +1,8 @@
+object Whatever {
+  override def equals(x: Any) = true
+}
+
+class Test {
+  // when left to its own devices, and not under -Xfuture, the return type is Whatever.type
+  def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n }
+}
\ No newline at end of file
diff --git a/test/files/neg/t1523.scala b/test/files/neg/t1523.scala
index b8754f4..219fb0c 100644
--- a/test/files/neg/t1523.scala
+++ b/test/files/neg/t1523.scala
@@ -1,5 +1,5 @@
-object test {  
+object test {
   def bug(x: Any) = x
-  
+
   def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
 }
diff --git a/test/files/neg/t1623.scala b/test/files/neg/t1623.scala
index d02fc2e..f5189aa 100644
--- a/test/files/neg/t1623.scala
+++ b/test/files/neg/t1623.scala
@@ -1,7 +1,7 @@
 package test
 
-trait A 
-trait B 
+trait A
+trait B
 
 class BImpl extends B {
   this: A =>
diff --git a/test/files/neg/t1705.scala b/test/files/neg/t1705.scala
index bf1fcea..fabdca0 100644
--- a/test/files/neg/t1705.scala
+++ b/test/files/neg/t1705.scala
@@ -17,7 +17,7 @@ object crashing {
     }
   }
 }
-/* 
+/*
 
 Infinite loop in Typer.addLocals. Printing all calls to it:
 
diff --git a/test/files/neg/t1845.scala b/test/files/neg/t1845.scala
index dab448b..4d39664 100644
--- a/test/files/neg/t1845.scala
+++ b/test/files/neg/t1845.scala
@@ -1,10 +1,10 @@
-import scala.util.parsing.combinator.syntactical.TokenParsers
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.token._
+class Tokens { abstract class Token }
+trait TokenParsers { val lexical: Tokens }
+
 
 class MyTokenParsers extends TokenParsers {
   import lexical._
-  type Tokens = StdTokens
-  type Elem = lexical.Token
-  val lexical = new StdLexical
+
+
+  val lexical = new Tokens
 }
diff --git a/test/files/neg/t1878-typer.check b/test/files/neg/t1878-typer.check
deleted file mode 100644
index e3a20d0..0000000
--- a/test/files/neg/t1878-typer.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1878-typer.scala:4: error: _* may only come last
-    case <p> { _* } </p> =>
-                ^
-one error found
diff --git a/test/files/neg/t1878-typer.scala b/test/files/neg/t1878-typer.scala
deleted file mode 100644
index 1eb0cb7..0000000
--- a/test/files/neg/t1878-typer.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
-  // illegal - bug #1764
-  null match {
-    case <p> { _* } </p> =>
-  }
-}
diff --git a/test/files/neg/t1878.check b/test/files/neg/t1878.check
index ac2071c..5814375 100644
--- a/test/files/neg/t1878.check
+++ b/test/files/neg/t1878.check
@@ -1,7 +1,7 @@
-t1878.scala:3: error: bad use of _* (a sequence pattern must be the last pattern)
+t1878.scala:3: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern)
   val err1 = "" match { case Seq(f @ _*, ',') => f }
                                        ^
-t1878.scala:9: error: bad use of _* (a sequence pattern must be the last pattern)
+t1878.scala:9: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern)
   val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
                   ^
 two errors found
diff --git a/test/files/neg/t1878.scala b/test/files/neg/t1878.scala
index 99fee48..b29186a 100644
--- a/test/files/neg/t1878.scala
+++ b/test/files/neg/t1878.scala
@@ -1,13 +1,13 @@
 object Test extends App {
-  // illegal 
+  // illegal
   val err1 = "" match { case Seq(f @ _*, ',') => f }
-  
+
   // no error
   val List(List(arg1, _*), _) = List(List(1,2,3), List(4,5,6))
-  
+
   // illegal
   val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
-  
+
   /* see t1878-typer.scala
   // illegal - bug #1764
   null match {
diff --git a/test/files/neg/t1909-object.check b/test/files/neg/t1909-object.check
new file mode 100644
index 0000000..401c1f7
--- /dev/null
+++ b/test/files/neg/t1909-object.check
@@ -0,0 +1,4 @@
+t1909-object.scala:4: error: !!! SI-1909 Unable to STATICally lift object InnerTrouble$1, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely.
+      object InnerTrouble
+             ^
+one error found
diff --git a/test/files/neg/t1909-object.flags b/test/files/neg/t1909-object.flags
new file mode 100644
index 0000000..eb8b406
--- /dev/null
+++ b/test/files/neg/t1909-object.flags
@@ -0,0 +1 @@
+-Xdev -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t1909-object.scala b/test/files/neg/t1909-object.scala
new file mode 100644
index 0000000..d6011ba
--- /dev/null
+++ b/test/files/neg/t1909-object.scala
@@ -0,0 +1,12 @@
+class Kaboom(a: Any) {
+  def this() = {
+    this({
+      object InnerTrouble
+      InnerTrouble
+    })
+  }
+}
+
+object Test extends App {
+  new Kaboom()
+}
\ No newline at end of file
diff --git a/test/files/neg/t1980.check b/test/files/neg/t1980.check
new file mode 100644
index 0000000..2fa27fa
--- /dev/null
+++ b/test/files/neg/t1980.check
@@ -0,0 +1,12 @@
+t1980.scala:2: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+  def op1_:(x: => Any) = ()                 // warn
+      ^
+t1980.scala:3: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+  def op2_:(x: Any, y: => Any) = ()         // warn
+      ^
+t1980.scala:4: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+  def op3_:(x: Any, y: => Any)(a: Any) = () // warn
+      ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/t1980.flags
similarity index 100%
copy from test/files/neg/delayed-init-ref.flags
copy to test/files/neg/t1980.flags
diff --git a/test/files/neg/t1980.scala b/test/files/neg/t1980.scala
new file mode 100644
index 0000000..132865e
--- /dev/null
+++ b/test/files/neg/t1980.scala
@@ -0,0 +1,9 @@
+object Test {
+  def op1_:(x: => Any) = ()                 // warn
+  def op2_:(x: Any, y: => Any) = ()         // warn
+  def op3_:(x: Any, y: => Any)(a: Any) = () // warn
+
+  def op4() = ()                            // no warn
+  def op5(x: => Any) = ()                   // no warn
+  def op6_:(x: Any)(a: => Any) = ()         // no warn
+}
diff --git a/test/files/neg/t2066.check b/test/files/neg/t2066.check
new file mode 100644
index 0000000..efade87
--- /dev/null
+++ b/test/files/neg/t2066.check
@@ -0,0 +1,21 @@
+t2066.scala:6: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+  override def f[T[+_]] = ()
+               ^
+t2066.scala:10: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+  override def f[T[-_]] = ()
+               ^
+t2066.scala:23: error: overriding method f in trait A2 of type [T[+_]]=> Unit;
+ method f has incompatible type
+  override def f[T[-_]] = ()
+               ^
+t2066.scala:45: error: overriding method f in trait A4 of type [T[X[+_]]]=> Unit;
+ method f has incompatible type
+  override def f[T[X[_]]] = ()
+               ^
+t2066.scala:53: error: overriding method f in trait A5 of type [T[X[-_]]]=> Unit;
+ method f has incompatible type
+  override def f[T[X[_]]] = ()
+               ^
+5 errors found
diff --git a/test/files/neg/t2066.scala b/test/files/neg/t2066.scala
new file mode 100644
index 0000000..7f15d39
--- /dev/null
+++ b/test/files/neg/t2066.scala
@@ -0,0 +1,70 @@
+trait A1 {
+  def f[T[_]] = ()
+}
+
+trait B1 extends A1 {
+  override def f[T[+_]] = ()
+}
+
+trait C1 extends A1 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A2 {
+  def f[T[+_]] = ()
+}
+
+trait B2 extends A2 {
+  override def f[T[_]] = () // okay
+}
+
+trait C2 extends A2 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+  def f[T[-_]] = ()
+}
+
+trait B3 extends A3 {
+  override def f[T[_]] = () // okay
+}
+
+trait C3 extends A3 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A4 {
+  def f[T[X[+_]]] = ()
+}
+
+trait B4 extends A4 {
+  override def f[T[X[_]]] = ()
+}
+
+trait A5 {
+  def f[T[X[-_]]] = ()
+}
+
+trait B5 extends A5 {
+  override def f[T[X[_]]] = ()
+}
+
+
+
+trait A6 {
+  def f[T[X[_]]] = ()
+}
+
+trait B6 extends A6 {
+  override def f[T[X[+_]]] = () // okay
+}
+trait C6 extends A6 {
+  override def f[T[X[_]]] = () // okay
+}
+trait D6 extends A6 {
+  override def f[T[X[-_]]] = ()
+}
diff --git a/test/files/neg/t2066b.check b/test/files/neg/t2066b.check
new file mode 100644
index 0000000..097c44f
--- /dev/null
+++ b/test/files/neg/t2066b.check
@@ -0,0 +1,5 @@
+t2066b.scala:7: error: overriding method f in trait A of type [T[_]](x: T[Int])T[Any];
+ method f has incompatible type
+	 def f[T[+_]](x : T[Int]) : T[Any] = x
+             ^
+one error found
diff --git a/test/pending/neg/t2066.scala b/test/files/neg/t2066b.scala
similarity index 100%
rename from test/pending/neg/t2066.scala
rename to test/files/neg/t2066b.scala
diff --git a/test/files/neg/t2148.check b/test/files/neg/t2148.check
index 5113b48..27b5dce 100644
--- a/test/files/neg/t2148.check
+++ b/test/files/neg/t2148.check
@@ -1,4 +1,4 @@
-t2148.scala:9: error: type A is not a stable prefix
+t2148.scala:9: error: A is not a legal prefix for a constructor
   val b = new A with A#A1
                        ^
 one error found
diff --git a/test/files/neg/t2148.scala b/test/files/neg/t2148.scala
index c0521d9..25788be 100644
--- a/test/files/neg/t2148.scala
+++ b/test/files/neg/t2148.scala
@@ -1,6 +1,6 @@
-class A { 
+class A {
   var i = 0
-  trait A1 extends A { 
+  trait A1 extends A {
     i += 1
   }
 }
diff --git a/test/files/neg/t2421b.scala b/test/files/neg/t2421b.scala
index a8d22f2..d8159a8 100644
--- a/test/files/neg/t2421b.scala
+++ b/test/files/neg/t2421b.scala
@@ -12,6 +12,6 @@ object Test {
   f
 }
 
-/* bug: 
+/* bug:
 error: type arguments [Test2.A] do not conform to method b's type parameter bounds [X <: Test2.B]
 */
\ No newline at end of file
diff --git a/test/files/neg/t2441.scala b/test/files/neg/t2441.scala
index a8c9e72..6784ebb 100644
--- a/test/files/neg/t2441.scala
+++ b/test/files/neg/t2441.scala
@@ -1,5 +1,5 @@
 trait X
-trait A { 
+trait A {
   def f: Option[X]
   def g: Option[X]
 }
@@ -8,7 +8,7 @@ object B {
 }
 class B extends A {
   private class Bippy
-  
+
   override def f = Some(new B.Y)
   override def g: Option[X] = Some(new B.Y)
 }
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
index 714816f..9ff0b44 100644
--- a/test/files/neg/t2442.check
+++ b/test/files/neg/t2442.check
@@ -1,9 +1,11 @@
-t2442.scala:4: error: match may not be exhaustive.
+t2442.scala:4: warning: match may not be exhaustive.
 It would fail on the following input: THREE
   def f(e: MyEnum) = e match {
                      ^
-t2442.scala:11: error: match may not be exhaustive.
+t2442.scala:11: warning: match may not be exhaustive.
 It would fail on the following input: BLUE
   def g(e: MySecondEnum) = e match {
                            ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t2462b.check b/test/files/neg/t2462b.check
index bc0d9aa..b3b8007 100644
--- a/test/files/neg/t2462b.check
+++ b/test/files/neg/t2462b.check
@@ -6,9 +6,6 @@ t2462b.scala:9: warning: Invalid implicitNotFound message for trait Meh2 in pack
 The type parameter Elem referenced in the message of the @implicitNotFound annotation is not defined by trait Meh2.
 trait Meh2[-From, +To]
       ^
-t2462b.scala:12: error: overriding method x in class thankyoupartest of type => Int;
- method x needs `override' modifier
-class testmustfail extends thankyoupartest { def x = 43 }
-                                                 ^
+error: No warnings can be incurred under -Xfatal-warnings.
 two warnings found
 one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t2462b.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t2462b.flags
diff --git a/test/files/neg/t2462b.scala b/test/files/neg/t2462b.scala
index 7a1389c..576db4b 100644
--- a/test/files/neg/t2462b.scala
+++ b/test/files/neg/t2462b.scala
@@ -7,6 +7,3 @@ trait Meh[-From, +To]
 
 @implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.")
 trait Meh2[-From, +To]
-
-class thankyoupartest { def x = 42 }
-class testmustfail extends thankyoupartest { def x = 43 }
diff --git a/test/files/neg/t2462c.check b/test/files/neg/t2462c.check
new file mode 100644
index 0000000..edeead5
--- /dev/null
+++ b/test/files/neg/t2462c.check
@@ -0,0 +1,7 @@
+t2462c.scala:18: error: No C of X$Y
+  f[X$Y]
+   ^
+t2462c.scala:24: error: No C of Foo[Int]
+  f[Foo[Int]]
+   ^
+two errors found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t2462c.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t2462c.flags
diff --git a/test/files/neg/t2462c.scala b/test/files/neg/t2462c.scala
new file mode 100644
index 0000000..acf04af
--- /dev/null
+++ b/test/files/neg/t2462c.scala
@@ -0,0 +1,25 @@
+
+import annotation._
+
+ at implicitNotFound("No C of ${ A }")
+class C[A]
+
+trait X$Y
+/* using the $$ separator for expanded names is unwise
+trait X$$Y
+trait X$$$Y
+trait X$$$$Y
+ */
+
+trait Foo[A]
+
+class Test {
+  def f[A: C] = ???
+  f[X$Y]
+/* using the $$ separator for expanded names is unwise
+  f[X$$Y]
+  f[X$$$Y]
+  f[X$$$$Y]
+ */
+  f[Foo[Int]]
+}
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
index 909f4f0..a0a960f 100644
--- a/test/files/neg/t2641.check
+++ b/test/files/neg/t2641.check
@@ -1,15 +1,7 @@
 t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2
         with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
                                                          ^
-t2641.scala:16: error: illegal inheritance;
- self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A]
-        extends ManagedSeqStrict[A]
-                ^
-t2641.scala:17: error: illegal inheritance;
- self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]]
-        with TraversableView[A, ManagedSeqStrict[A]]
-             ^
-t2641.scala:27: error: value managedIterator is not a member of ManagedSeq
+t2641.scala:27: error: value managedIterator is not a member of ManagedSeq[A,Coll]
     override def managedIterator = self.managedIterator slice (from, until)
                                         ^
-four errors found
+two errors found
diff --git a/test/files/neg/t2641.scala b/test/files/neg/t2641.scala
index 626d5d7..bc048e0 100644
--- a/test/files/neg/t2641.scala
+++ b/test/files/neg/t2641.scala
@@ -8,7 +8,7 @@ abstract class ManagedSeqStrict[+A]
         with GenericTraversableTemplate[A, ManagedSeqStrict]
 {
     override def companion: GenericCompanion[ManagedSeqStrict] = null
-   
+
     override def foreach[U](f: A => U): Unit = ()
 }
 
@@ -17,9 +17,9 @@ trait ManagedSeq[+A, +Coll]
         with TraversableView[A, ManagedSeqStrict[A]]
         with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
 { self =>
-   
+
     override def underlying = throw new Exception("no underlying")
-   
+
   //trait Transformed[+B] extends ManagedSeq[B] with super.Transformed[B]
   trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
 
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
index aeb1849..22ee35a 100644
--- a/test/files/neg/t2796.check
+++ b/test/files/neg/t2796.check
@@ -1,4 +1,9 @@
-t2796.scala:7: error: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+t2796.scala:11: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+  type X = Int                       // warn
+       ^
+t2796.scala:7: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
   val abstractVal = "T1.abstractVal" // warn
       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
 one error found
diff --git a/test/files/neg/t2796.flags b/test/files/neg/t2796.flags
index e8fb65d..d1b831e 100644
--- a/test/files/neg/t2796.flags
+++ b/test/files/neg/t2796.flags
@@ -1 +1 @@
--Xfatal-warnings
\ No newline at end of file
+-deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala
index 3bcc9df..fa2f235 100644
--- a/test/files/neg/t2796.scala
+++ b/test/files/neg/t2796.scala
@@ -8,10 +8,9 @@ trait T1 extends {
 } with Base
 
 trait T2 extends {
-  type X = Int                       // okay
+  type X = Int                       // warn
 } with Base
 
-
 class C1 extends {
   val abstractVal = "C1.abstractVal" // okay
 } with Base
diff --git a/test/files/neg/t284.check b/test/files/neg/t284.check
index 37801af..7c2e9be 100644
--- a/test/files/neg/t284.check
+++ b/test/files/neg/t284.check
@@ -1,8 +1,6 @@
 t284.scala:2: warning: Detected apparent refinement of Unit; are you missing an '=' sign?
-    def f1(a: T): Unit { }
-                       ^
-t284.scala:5: error: Unmatched closing brace '}' ignored here
-  }
-  ^
+  def f1(a: T): Unit { }
+                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
 one warning found
 one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/t284.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/t284.flags
diff --git a/test/files/neg/t284.scala b/test/files/neg/t284.scala
index a210130..f75bc3d 100644
--- a/test/files/neg/t284.scala
+++ b/test/files/neg/t284.scala
@@ -1,6 +1,5 @@
 trait B[T] {
-    def f1(a: T): Unit { }
-    def f2(a: T): Unit
-    def f3(a: T) { }
-  }
+  def f1(a: T): Unit { }
+  def f2(a: T): Unit
+  def f3(a: T) { }
 }
diff --git a/test/files/neg/t2870.scala b/test/files/neg/t2870.scala
index 59fba3e..4de1924 100755
--- a/test/files/neg/t2870.scala
+++ b/test/files/neg/t2870.scala
@@ -1,9 +1,9 @@
 class Jars(jar: Jar)
 
-object Jars {  
+object Jars {
   import scala.util.Properties.javaClassPath
 
-  val scala = fromClasspathString(javaClassPath) 
-    
+  val scala = fromClasspathString(javaClassPath)
+
   def fromClasspathString(s: String): Jars = null
 }
diff --git a/test/files/neg/t2910.scala b/test/files/neg/t2910.scala
index fa51038..d9a7810 100644
--- a/test/files/neg/t2910.scala
+++ b/test/files/neg/t2910.scala
@@ -10,7 +10,7 @@ object Junk {
     val z = 0
     lazy val s = "abc"
   }
-    
+
   def test4() {
     lazy val x = {
       x
@@ -35,5 +35,5 @@ object Test {
     lazy val f: Int = g
     var x: Int = f
     lazy val g: Int = x
-  }     
+  }
 }
diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check
index 6948392..729db84 100644
--- a/test/files/neg/t3015.check
+++ b/test/files/neg/t3015.check
@@ -1,6 +1,6 @@
 t3015.scala:7: error: scrutinee is incompatible with pattern type;
  found   : _$1
  required: String
-  val b(foo) = "foo" 
+  val b(foo) = "foo"
        ^
 one error found
diff --git a/test/files/neg/t3015.scala b/test/files/neg/t3015.scala
index 9af8f30..adfa15b 100644
--- a/test/files/neg/t3015.scala
+++ b/test/files/neg/t3015.scala
@@ -4,5 +4,5 @@ class UnApp[P] {
 
 object Test extends App {
   val b: UnApp[_] = new UnApp[String]
-  val b(foo) = "foo" 
+  val b(foo) = "foo"
 }
diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check
index 8582974..5343b12 100644
--- a/test/files/neg/t3098.check
+++ b/test/files/neg/t3098.check
@@ -1,5 +1,7 @@
-b.scala:3: error: match may not be exhaustive.
+b.scala:3: warning: match may not be exhaustive.
 It would fail on the following input: (_ : C)
   def f = (null: T) match {
                ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t3118.scala b/test/files/neg/t3118.scala
index 75f3b8f..9be24c1 100644
--- a/test/files/neg/t3118.scala
+++ b/test/files/neg/t3118.scala
@@ -1,8 +1,8 @@
 class O1 {
   private[this] case class C()
-  
+
   val x = new O1
-  
+
   println(x.C())   // should not be accessible
   println(new x.C) // is correctly not accessible
 }
diff --git a/test/files/neg/t3160ambiguous.check b/test/files/neg/t3160ambiguous.check
new file mode 100644
index 0000000..73a0c6d
--- /dev/null
+++ b/test/files/neg/t3160ambiguous.check
@@ -0,0 +1,7 @@
+t3160ambiguous.scala:8: error: reference to List is ambiguous;
+it is imported twice in the same scope by
+import scala.collection.immutable._
+and import Bippy._
+  def f(x: List[Any]): String = ???  // ambiguous, because Bippy.List is accessible
+           ^
+one error found
diff --git a/test/files/neg/t3160ambiguous.scala b/test/files/neg/t3160ambiguous.scala
new file mode 100644
index 0000000..57745e6
--- /dev/null
+++ b/test/files/neg/t3160ambiguous.scala
@@ -0,0 +1,15 @@
+object Bippy {
+  private class List[+T]
+}
+class Bippy {
+  import Bippy._
+  import scala.collection.immutable._
+
+  def f(x: List[Any]): String = ???  // ambiguous, because Bippy.List is accessible
+}
+class Other {
+  import Bippy._
+  import scala.collection.immutable._
+
+  def f(x: List[Any]): String = ???  // unambiguous, because Bippy.List is inaccessible
+}
diff --git a/test/files/neg/t3189.check b/test/files/neg/t3189.check
index 3913c52..122af56 100644
--- a/test/files/neg/t3189.check
+++ b/test/files/neg/t3189.check
@@ -1,4 +1,4 @@
-t3189.scala:2: error: use _* to match a sequence
+t3189.scala:2: error: bad simple pattern: use _* to match a sequence
   val Array(a,b*) = ("": Any)
                 ^
 one error found
diff --git a/test/files/neg/t3224.check b/test/files/neg/t3224.check
index 29304c5..69b02c8 100644
--- a/test/files/neg/t3224.check
+++ b/test/files/neg/t3224.check
@@ -1,6 +1,26 @@
-t3224.scala:29: error: polymorphic expression cannot be instantiated to expected type;
+t3224.scala:30: error: polymorphic expression cannot be instantiated to expected type;
  found   : [T]Array[T]
  required: List[?]
-  println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
-                           ^
-one error found
+    println(Texts textL Array())
+                             ^
+t3224.scala:34: error: type mismatch;
+ found   : List[Nothing]
+ required: Array[?]
+    println(Texts textA List())
+                            ^
+t3224.scala:35: error: type mismatch;
+ found   : List[Int]
+ required: Array[?]
+    println(Texts textA List(1))
+                            ^
+t3224.scala:36: error: type mismatch;
+ found   : List[Int]
+ required: Array[?]
+    println(Texts textA List(1, 1));
+                            ^
+t3224.scala:48: error: polymorphic expression cannot be instantiated to expected type;
+ found   : [T]Array[T]
+ required: List[?]
+    assert(size(Array()) == 0)
+                     ^
+5 errors found
diff --git a/test/files/neg/t3224.scala b/test/files/neg/t3224.scala
index 774de33..b7af8a6 100755
--- a/test/files/neg/t3224.scala
+++ b/test/files/neg/t3224.scala
@@ -1,30 +1,50 @@
 object Texts{
-  def textL[T](list: List[T]) = {     
-    list match{                        
-      case List() => "Empty"              
-      case List(_) => "One"      
+  def textL[T](list: List[T]) = {
+    list match{
+      case List() => "Empty"
+      case List(_) => "One"
       case List(_*) => "Many"
     }
   }
 
-  def textA[T](array: Array[T]) = {     
-    array match{                        
-      case Array() => "Empty"              
-      case Array(_) => "One"      
+  def textA[T](array: Array[T]) = {
+    array match{
+      case Array() => "Empty"
+      case Array(_) => "One"
       case Array(_*) => "Many"
     }
   }
 }
 
 object Test extends App {
+  {
+    implicit def array2list[T](array: Array[T]) = {
+      println(array.toList.size)
+      array.toList
+    }
+
+    println(Texts textL List())
+    println(Texts textL List(1))
+    println(Texts textL List(1, 1));
+
+    println(Texts textL Array())
+    println(Texts textL Array(1))
+    println(Texts textL Array(1, 1))
 
-  implicit def array2list[T](array: Array[T]) = {
-    println(array.toList.size)
-    array.toList
+    println(Texts textA List())
+    println(Texts textA List(1))
+    println(Texts textA List(1, 1));
+
+    println(Texts textA Array())
+    println(Texts textA Array(1))
+    println(Texts textA Array(1, 1))
   }
 
-  
-  println(Texts textL List()); println(Texts textL List(1)); println(Texts textL List(1, 1));
+  {
+    implicit def array2list[T](array: Array[T]) = array.toList
+    def size[T](list: List[T]) = list.size
 
-  println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
+    assert(size(array2list(Array())) == 0)
+    assert(size(Array()) == 0)
+  }
 }
diff --git a/test/files/neg/t3234.check b/test/files/neg/t3234.check
index 4339950..8f0d624 100644
--- a/test/files/neg/t3234.check
+++ b/test/files/neg/t3234.check
@@ -1,2 +1,6 @@
-error: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+t3234.scala:17: warning: At the end of the day, could not inline @inline-marked method foo3
+    println(foo(42) + foo2(11) + foo3(2))
+                                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t3234.flags b/test/files/neg/t3234.flags
index c9cefdc..cc3d9fb 100644
--- a/test/files/neg/t3234.flags
+++ b/test/files/neg/t3234.flags
@@ -1 +1 @@
--Yinline -Xfatal-warnings
\ No newline at end of file
+-Yinline -Yinline-warnings -Xfatal-warnings
diff --git a/test/files/neg/t3234.scala b/test/files/neg/t3234.scala
index 443d046..1553f1f 100644
--- a/test/files/neg/t3234.scala
+++ b/test/files/neg/t3234.scala
@@ -1,6 +1,6 @@
 trait Trait1 {
   // need more work before this one works
-  // @inline 
+  // @inline
   def foo2(n: Int) = n*n
 }
 
diff --git a/test/files/neg/t3346b.check b/test/files/neg/t3346b.check
new file mode 100644
index 0000000..bcde6d9
--- /dev/null
+++ b/test/files/neg/t3346b.check
@@ -0,0 +1,4 @@
+t3346b.scala:14: error: could not find implicit value for evidence parameter of type TC[Any]
+  val y = foo(1)
+             ^
+one error found
diff --git a/test/files/neg/t3346b.scala b/test/files/neg/t3346b.scala
new file mode 100644
index 0000000..8ea8970
--- /dev/null
+++ b/test/files/neg/t3346b.scala
@@ -0,0 +1,15 @@
+import scala.language.implicitConversions
+
+trait T[X]
+trait U[X]
+trait TC[M[_]]
+
+object Test extends App {
+  def foo[M[_]: TC, A](ma: M[A]) = ()
+  implicit val TCofT: TC[T] = new TC[T] {}
+  implicit def any2T[A](a: A): T[A] = new T[A] {}
+  implicit def any2U[A](a: A): U[A] = new U[A] {}
+
+  val x = foo[T, Int](1)
+  val y = foo(1)
+}
\ No newline at end of file
diff --git a/test/files/neg/t3346c.check b/test/files/neg/t3346c.check
new file mode 100644
index 0000000..575379d
--- /dev/null
+++ b/test/files/neg/t3346c.check
@@ -0,0 +1,4 @@
+t3346c.scala:60: error: value bar is not a member of Either[Int,String]
+  eii.bar
+      ^
+one error found
diff --git a/test/files/neg/t3346c.scala b/test/files/neg/t3346c.scala
new file mode 100644
index 0000000..a5ac166
--- /dev/null
+++ b/test/files/neg/t3346c.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+  //
+  // An attempt to workaround SI-2712, foiled by SI-3346
+  //
+  trait TC[M[_]]
+
+  type EitherInt[A] = Either[Int, A]
+
+  implicit object EitherTC extends TC[EitherInt]
+
+  def foo[M[_]: TC, A](ma: M[A]) = ()
+
+  val eii: Either[Int, String] = Right("")
+
+  foo[EitherInt, String](eii)
+
+  // This one needs SI-2712 Higher Order Unification
+  //foo(eii) // not inferred
+
+  // A workaround is to provide a set of implicit conversions that take values
+  // based on type constructors of various shapes, and search for the
+  // type class instances.
+  //
+  // This is the approach taken by scalaz7.
+
+  trait TCValue[M[_], A] {
+    implicit def self: M[A]
+    def M: TC[M]
+
+    // instead of `foo(eii)`, we'll try `eii.foo`
+    def foo[M[_], A] = ()
+  }
+
+
+  implicit def ToTCValue[M[_], A](ma: M[A])(implicit M0: TC[M]) = new TCValue[M, A] {
+    implicit val M = M0
+    val self = ma
+  }
+  implicit def ToTCValueBin1[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[A, α]})#λ]): TCValue[({type λ[α] = M[A, α]})#λ, B] = new TCValue[({type λ[α]=M[A, α]})#λ, B] {
+    implicit val M = M0
+    val self = ma
+  }
+  implicit def ToTCValueBin2[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[α, B]})#λ]): TCValue[({type λ[α]=M[α, B]})#λ, A] = new TCValue[({type λ[α]=M[α, B]})#λ, A] {
+    implicit val M = M0
+    val self = ma
+  }
+
+
+  ToTCValueBin1(eii).foo
+
+  // as expected, could not find implicit parameter
+  // ToTCValueBin2(eii).bar
+
+  // error: implicit conversions are not applicable because they are ambiguous, both method ToTCValueBin1 ... and  method ToTCValueBin2
+  //        annoying!!
+  //        https://issues.scala-lang.org/browse/SI-3346
+  //
+  // Works if we remove ToTCValueBin2
+  //
+  eii.bar
+}
diff --git a/test/files/neg/t3346i.check b/test/files/neg/t3346i.check
new file mode 100644
index 0000000..cc17ab7
--- /dev/null
+++ b/test/files/neg/t3346i.check
@@ -0,0 +1,7 @@
+t3346i.scala:28: error: value a is not a member of Test.A[T]
+  (new A).a
+          ^
+t3346i.scala:29: error: value a is not a member of Test.A[Nothing]
+  (new A[Nothing]).a
+                   ^
+two errors found
diff --git a/test/files/neg/t3346i.scala b/test/files/neg/t3346i.scala
new file mode 100644
index 0000000..9ad2544
--- /dev/null
+++ b/test/files/neg/t3346i.scala
@@ -0,0 +1,30 @@
+import scala.language.implicitConversions
+
+// the classes involved
+case class Z[U](a: U)
+case class Intermediate[T, U](t: T, u: U)
+class Implicit1[T](b: Implicit2[T])
+class Implicit2[T](c: Implicit3[T])
+class Implicit3[T](/* and so on */)
+
+object Test extends App {
+  // the base conversion
+  implicit def convertToZ[T](a: A[T])(implicit b: Implicit1[T]): Z[A[T]] = Z(a)
+
+  // and the implicit chaining, don't you just love it? :D
+  // implicit1, with one alternative
+  implicit def implicit1[T <: Intermediate[_, _]](implicit b: Implicit2[T])                = new Implicit1[T](b)
+  // implicit2, with two alternatives
+  implicit def implicit2alt1[T <: Intermediate[_ <: String, _]](implicit c: Implicit3[T])  = new Implicit2[T](c)
+  implicit def implicit2alt2[T <: Intermediate[_ <: Double, _]](implicit c: Implicit3[T])  = new Implicit2[T](c)
+  // implicit3, with two alternatives
+  implicit def implicit3alt1[T <: Intermediate[_, _ <: Int]]                               = new Implicit3[T]()
+  implicit def implicit3alt2[T <: Intermediate[_ <: Double, _ <: AnyRef],X]                = new Implicit3[T]()
+
+  // and our targets
+  /** conversion here, with constraints */
+  class A[T]()
+
+  (new A).a
+  (new A[Nothing]).a
+}
diff --git a/test/files/neg/t3399.scala b/test/files/neg/t3399.scala
index b1fe4e5..3edaa07 100644
--- a/test/files/neg/t3399.scala
+++ b/test/files/neg/t3399.scala
@@ -10,7 +10,7 @@ object Nats {
         type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] =
           F#Apply[Succ[N], N#FoldR[Init, Type, F]]
     }
-    
+
     type Add[A <: Nat, B <: Nat] = A#FoldR[B, Nat, Inc]
     trait Fold[-Elem, Value] {
         type Apply[N <: Elem, Acc <: Value] <: Value
@@ -18,7 +18,7 @@ object Nats {
     type Inc = Fold[Any, Nat] {
         type Apply[N <: Any, Acc <: Nat] = Succ[Acc]
     }
-    
+
     type _1 = Succ[_0]
     implicitly[ Add[_1, _1] =:= _1]
 }
\ No newline at end of file
diff --git a/test/files/neg/t3403.scala b/test/files/neg/t3403.scala
index 8be6ab2..7cf0c3e 100644
--- a/test/files/neg/t3403.scala
+++ b/test/files/neg/t3403.scala
@@ -1,2 +1,2 @@
-import scala.reflect.{BeanProperty => bp}
+import scala.beans.{BeanProperty => bp}
 class Foo { @bp var bar: Int = 1 }
diff --git a/test/files/neg/t3453.scala b/test/files/neg/t3453.scala
index 0f1c6e0..090b777 100644
--- a/test/files/neg/t3453.scala
+++ b/test/files/neg/t3453.scala
@@ -25,7 +25,7 @@ object O {
 
 class T2a {
     import O._
-    
+
     def x: B = {
         val aToB = 3
         // ok: doesn't compile, because aToB method requires 'T.this.' prefix
@@ -39,7 +39,7 @@ class T2a {
 
 class T2b {
     import O.aToB
-    
+
     def x: B = {
         val aToB = 3
         // ok: doesn't compile, because aToB method requires 'T.this.' prefix
@@ -53,7 +53,7 @@ class T2b {
 
 class T3 {
     implicit def aToB(a: A): B = new B
-    
+
     def x: B = {
         val aToB = 3
         // ok: doesn't compile, because aToB method requires 'T.this.' prefix
diff --git a/test/files/neg/t3507-old.scala b/test/files/neg/t3507-old.scala
index 32688d3..9a8c7c5 100644
--- a/test/files/neg/t3507-old.scala
+++ b/test/files/neg/t3507-old.scala
@@ -8,7 +8,7 @@ class A {
 object Test {
   var a: A = new A // mutable
   val c /*: object _1.b.c forSome { val _1: A } */ = a.m // widening using existential
-  
+
   def mani[T: Manifest](x: T) = ()
   mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
   // --> _1 is not in scope here
diff --git a/test/files/neg/t3631.check b/test/files/neg/t3631.check
deleted file mode 100644
index 6d8feca..0000000
--- a/test/files/neg/t3631.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
-case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
-           ^
-one error found
diff --git a/test/files/neg/t3653.check b/test/files/neg/t3653.check
index ac6e2ca..ad68e29 100644
--- a/test/files/neg/t3653.check
+++ b/test/files/neg/t3653.check
@@ -1,7 +1,7 @@
 t3653.scala:3: error: double definition:
-method x:(implicit x: Int)Int and
-method x:(i: Int)Int at line 2
-have same type after erasure: (x: Int)Int
+def x(i: Int): Int at line 2 and
+def x(implicit x: Int): Int at line 3
+have same type after erasure: (i: Int)Int
    def x(implicit x: Int) = 5
        ^
 one error found
diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check
index 3de3ad7..6386265 100644
--- a/test/files/neg/t3683a.check
+++ b/test/files/neg/t3683a.check
@@ -1,5 +1,7 @@
-t3683a.scala:14: error: match may not be exhaustive.
+t3683a.scala:14: warning: match may not be exhaustive.
 It would fail on the following input: XX()
     w match {
     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t3683b.scala b/test/files/neg/t3683b.scala
index fbf75b6..646e418 100644
--- a/test/files/neg/t3683b.scala
+++ b/test/files/neg/t3683b.scala
@@ -8,7 +8,7 @@ case class Z[T >: Bar <: Foo](
   z1: W[T]
 ) extends W[T]
 
-object Main {  
+object Main {
   // should fail for including X()
   def f1(w: W[Bar]): Int = {
     w match {
diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check
index 5aa991c..bb8692f 100644
--- a/test/files/neg/t3692-new.check
+++ b/test/files/neg/t3692-new.check
@@ -1,14 +1,19 @@
-t3692-new.scala:14: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure
+t3692-new.scala:14: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,Int] (the underlying of Map[Int,Int]) is unchecked since it is eliminated by erasure
       case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
                ^
-t3692-new.scala:15: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure
+t3692-new.scala:15: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) is unchecked since it is eliminated by erasure
       case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
                ^
-t3692-new.scala:16: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
+t3692-new.scala:16: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) is unchecked since it is eliminated by erasure
       case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
                ^
-t3692-new.scala:16: error: unreachable code
-      case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+t3692-new.scala:15: warning: unreachable code
+      case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
                               ^
-three warnings found
+t3692-new.scala:4: warning: Tester has a main method with parameter type Array[String], but Tester will not be a runnable program.
+  Reason: main method must have exact signature (Array[String])Unit
+object Tester {
+       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
 one error found
diff --git a/test/files/neg/t3692-new.flags b/test/files/neg/t3692-new.flags
index cb8324a..85d8eb2 100644
--- a/test/files/neg/t3692-new.flags
+++ b/test/files/neg/t3692-new.flags
@@ -1 +1 @@
--Xoldpatmat
\ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/t3692-old.check b/test/files/neg/t3692-old.check
deleted file mode 100644
index 9f3ae51..0000000
--- a/test/files/neg/t3692-old.check
+++ /dev/null
@@ -1,14 +0,0 @@
-t3692-old.scala:13: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure
-      case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
-               ^
-t3692-old.scala:14: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure
-      case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
-               ^
-t3692-old.scala:15: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
-      case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
-               ^
-t3692-old.scala:15: error: unreachable code
-      case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
-                              ^
-three warnings found
-one error found
diff --git a/test/files/neg/t3692-old.flags b/test/files/neg/t3692-old.flags
deleted file mode 100644
index cb8324a..0000000
--- a/test/files/neg/t3692-old.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/t3692-old.scala b/test/files/neg/t3692-old.scala
deleted file mode 100644
index 151535a..0000000
--- a/test/files/neg/t3692-old.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import java.lang.Integer
-
-object ManifestTester {
-  def main(args: Array[String]) = {
-    val map = Map("John" -> 1, "Josh" -> 2)
-    new ManifestTester().toJavaMap(map)
-  }
-}
-
-class ManifestTester {
-  private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = {
-    map match {
-      case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
-      case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
-      case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
-      case _ => new java.util.HashMap[T, V]
-    }
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/t3714-neg.check b/test/files/neg/t3714-neg.check
index 2db0655..4f29716 100644
--- a/test/files/neg/t3714-neg.check
+++ b/test/files/neg/t3714-neg.check
@@ -1,12 +1,12 @@
 t3714-neg.scala:17: error: value break in class BreakImpl cannot be accessed in BreakImpl
  Access to protected value break not permitted because
- enclosing object Test is not a subclass of 
+ enclosing object Test is not a subclass of
  class BreakImpl where target is defined
     case b: BreakImpl => b.break
                            ^
 t3714-neg.scala:25: error: value break in class BreakImpl cannot be accessed in BreakImpl
  Access to protected value break not permitted because
- enclosing object Test is not a subclass of 
+ enclosing object Test is not a subclass of
  class BreakImpl where target is defined
     case b: BreakImpl => b.break
                            ^
diff --git a/test/files/neg/t3714-neg.scala b/test/files/neg/t3714-neg.scala
index 4b56f93..753b367 100644
--- a/test/files/neg/t3714-neg.scala
+++ b/test/files/neg/t3714-neg.scala
@@ -29,7 +29,7 @@ object Test {
     case BreakImpl(x) => x
     case _            => -1
   }
-  
+
   def main(args: Array[String]) {
     val break = BreakImpl(22)
     assert(f1(break) == 22)
diff --git a/test/files/neg/t3736.scala b/test/files/neg/t3736.scala
index 1efa397..cf09209 100644
--- a/test/files/neg/t3736.scala
+++ b/test/files/neg/t3736.scala
@@ -7,14 +7,14 @@ object Test {
     def f5 = super.!=(new AnyRef)
     def f6 = super.##
   }
-  
+
   // Ill-advised overloads to be sure...
   class B {
     def ##(x: String) = true
     def ==(x1: String, xs: List[_]) = true
     def !=(x1: String, xs: List[_]) = true
   }
-  
+
   class C extends B {
     override def ##(x: String) = super.##(x)
     override def ==(x1: String, xs: List[_]) = super.==(x1, xs)
diff --git a/test/files/neg/t3757/B.scala b/test/files/neg/t3757/B.scala
index 68766a9..7c78fb6 100644
--- a/test/files/neg/t3757/B.scala
+++ b/test/files/neg/t3757/B.scala
@@ -1,5 +1,5 @@
 package b
 
-class B extends a.A { 
+class B extends a.A {
   override def foo = "B"
 }
\ No newline at end of file
diff --git a/test/files/neg/t3776.check b/test/files/neg/t3776.check
index 4a5284c..0dfe129 100644
--- a/test/files/neg/t3776.check
+++ b/test/files/neg/t3776.check
@@ -1,4 +1,4 @@
 t3776.scala:8: error: value someOperation is not a member of _$1
-    def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v 
+    def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v
                                                            ^
 one error found
diff --git a/test/files/neg/t3776.scala b/test/files/neg/t3776.scala
index 454f914..e24b2fe 100644
--- a/test/files/neg/t3776.scala
+++ b/test/files/neg/t3776.scala
@@ -1,10 +1,10 @@
-import util.parsing.combinator.{PackratParsers, RegexParsers}
-
-object MyParser extends RegexParsers with PackratParsers {
+object MyParser {
+  implicit def literal(s: String): Parser[String] = ???
+  trait Parser[+T]
+  def parse[T](p: Parser[T], in: java.lang.CharSequence): Option[T] = ???
 }
-
 object Test {
   class ParsedAs(a: String) (implicit pattern: MyParser.Parser[_]) {
-    def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v 
+    def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v
   }
 }
diff --git a/test/files/neg/t3816.scala b/test/files/neg/t3816.scala
index b7f3fde..31b0825 100644
--- a/test/files/neg/t3816.scala
+++ b/test/files/neg/t3816.scala
@@ -1,7 +1,7 @@
 class B {
     def ::(a: List[Int]) {
         a match {
-            case x::xs => 
+            case x::xs =>
             case _ =>
         }
     }
@@ -32,11 +32,11 @@ object Test {
     }
   }
 
-  var foo = 0   
+  var foo = 0
   def testFail2( x: Any ) = {
     x match {
       case Some( `foo` ) =>
       case _ =>
     }
-  }   
+  }
 }
diff --git a/test/files/neg/t3871.check b/test/files/neg/t3871.check
new file mode 100644
index 0000000..b920357
--- /dev/null
+++ b/test/files/neg/t3871.check
@@ -0,0 +1,7 @@
+t3871.scala:4: error: variable foo in class Sub2 cannot be accessed in Sub2
+ Access to protected method foo not permitted because
+ enclosing class Base is not a subclass of
+ class Sub2 where target is defined
+    s.foo = true
+      ^
+one error found
diff --git a/test/files/neg/t3871.scala b/test/files/neg/t3871.scala
new file mode 100644
index 0000000..fc45986
--- /dev/null
+++ b/test/files/neg/t3871.scala
@@ -0,0 +1,11 @@
+class Base {
+  def mkNew() = {
+    val s = new Sub2
+    s.foo = true
+    s
+  }
+}
+
+class Sub2 extends Base {
+  protected var foo = false
+}
diff --git a/test/files/neg/t3871b.check b/test/files/neg/t3871b.check
new file mode 100644
index 0000000..6ab5ddf
--- /dev/null
+++ b/test/files/neg/t3871b.check
@@ -0,0 +1,97 @@
+t3871b.scala:61: error: not found: value protOT
+    protOT // not allowed
+    ^
+t3871b.scala:77: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ prefix type E.this.A does not conform to
+ class B in class E where the access take place
+      a.prot    // not allowed, prefix type `A` does not conform to `B`
+        ^
+t3871b.scala:79: error: value protT is not a member of E.this.B
+      b.protT   // not allowed
+        ^
+t3871b.scala:80: error: value protT is not a member of E.this.C
+      c.protT   // not allowed
+        ^
+t3871b.scala:81: error: value protT is not a member of E.this.A
+      a.protT   // not allowed
+        ^
+t3871b.scala:91: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ prefix type E.this.A does not conform to
+ object B in class E where the access take place
+      a.prot    // not allowed
+        ^
+t3871b.scala:93: error: value protT is not a member of E.this.B
+      b.protT   // not allowed
+        ^
+t3871b.scala:94: error: value protT is not a member of E.this.C
+      c.protT   // not allowed
+        ^
+t3871b.scala:95: error: value protT is not a member of E.this.A
+      a.protT   // not allowed
+        ^
+t3871b.scala:102: error: method prot in class A cannot be accessed in E.this.B
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+      b.prot    // not allowed
+        ^
+t3871b.scala:103: error: method prot in class A cannot be accessed in E.this.C
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+      c.prot    // not allowed
+        ^
+t3871b.scala:104: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+      a.prot    // not allowed
+        ^
+t3871b.scala:109: error: value protT is not a member of E.this.B
+      b.protT   // not allowed
+        ^
+t3871b.scala:110: error: value protT is not a member of E.this.C
+      c.protT   // not allowed
+        ^
+t3871b.scala:111: error: value protT is not a member of E.this.A
+      a.protT   // not allowed
+        ^
+t3871b.scala:120: error: method prot in class A cannot be accessed in Other.this.e.B
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    b.prot    // not allowed
+      ^
+t3871b.scala:121: error: method prot in class A cannot be accessed in Other.this.e.C
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    c.prot    // not allowed
+      ^
+t3871b.scala:122: error: method prot in class A cannot be accessed in Other.this.e.A
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    a.prot    // not allowed
+      ^
+t3871b.scala:123: error: method protE in class A cannot be accessed in Other.this.e.B
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    b.protE   // not allowed
+      ^
+t3871b.scala:124: error: method protE in class A cannot be accessed in Other.this.e.A
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    a.protE   // not allowed
+      ^
+t3871b.scala:125: error: method protE in class A cannot be accessed in Other.this.e.C
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+    c.protE   // not allowed
+      ^
+21 errors found
diff --git a/test/files/neg/t3871b.scala b/test/files/neg/t3871b.scala
new file mode 100644
index 0000000..b490b77
--- /dev/null
+++ b/test/files/neg/t3871b.scala
@@ -0,0 +1,127 @@
+/**
+
+The protected modifier applies to class member definitions. Protected members of a class can be accessed from within
+
+  0a. the companion module of any of those classes
+
+A protected identifier x may be used as a member name in a selection r.x only
+if one of the following applies:
+  1a. The access is within the template defining the member, or,
+  if a qualification C is given,
+  1b. inside the package C, or
+  1c. the class C , or its companion module, or
+  2.  r is one of the reserved words this and super, or
+  3.  r’s type conforms to a type-instance of the class which contains the access.
+
+  4. A different form of qualification is protected[this]. A member M marked with this
+     modifier is called object-protected; it can be accessed only from within the object
+     in which it is defined. That is, a selection p.M is only legal if the prefix is this
+     or O.this, for some class O enclosing the reference. In addition, the restrictions
+     for unqualified protected apply.
+*/
+
+object E {
+  val e = new E
+  import e._
+  def n(a: A, b: B, c: C) = {
+    b.protE    // 1c
+    c.protE    // 1c
+    a.protE    // 1c
+    A.protOE   // 1c
+  }
+}
+
+class E {
+  object A {
+    protected def protO = 2
+    protected[E] def protOE = 3
+    protected[this] def protOT = 3
+  }
+  class A {
+    protected def prot = 2
+    protected[E] def protE = 3
+    protected[this] def protT = 4
+
+    // 1a
+    prot; protE; protT
+    def foo = {prot; protE; protT}
+    new { prot; protE }
+    def this(a: Any) = {this(); prot; protE; protT}
+    object B extends A {
+      A.this.prot
+      A.this.protE
+      A.this.protT
+    }
+
+    import A._
+    // 0a
+    protO
+    // 3
+    protOE
+    protOT // not allowed
+  }
+
+  class B extends A {
+    // 1b
+    this.prot; this.protE;
+    super.prot; super.protE;
+
+    // 4
+    this.protT
+    // 4 !!! "or the super keyword"
+    super.protT
+
+    def n(a: A, b: B, c: C) = {
+      b.prot    // 3
+      c.prot    // 3
+      a.prot    // not allowed, prefix type `A` does not conform to `B`
+
+      b.protT   // not allowed
+      c.protT   // not allowed
+      a.protT   // not allowed
+    }
+  }
+  object B {
+    def n(a: A, b: B, c: C) = {
+      b.prot    // 3 !!!
+      c.prot    // 3 !!!
+      // Wording of 3 seems insufficient, missing:
+      // "... (if the access is from a class), or
+      // the type instance of companion class (if the access is from a module)"
+      a.prot    // not allowed
+
+      b.protT   // not allowed
+      c.protT   // not allowed
+      a.protT   // not allowed
+    }
+  }
+  class C extends B
+
+  class Z {
+    def n(a: A, b: B, c: C) = {
+      b.prot    // not allowed
+      c.prot    // not allowed
+      a.prot    // not allowed
+      b.protE   // 2
+      a.protE   // 2
+      c.protE   // 2
+
+      b.protT   // not allowed
+      c.protT   // not allowed
+      a.protT   // not allowed
+    }
+  }
+}
+
+class Other {
+  val e = new E
+  import e._
+  def n(a: A, b: B, c: C) = {
+    b.prot    // not allowed
+    c.prot    // not allowed
+    a.prot    // not allowed
+    b.protE   // not allowed
+    a.protE   // not allowed
+    c.protE   // not allowed
+  }
+}
diff --git a/test/files/neg/t3873.check b/test/files/neg/t3873.check
index 54d6abd..f9f413a 100644
--- a/test/files/neg/t3873.check
+++ b/test/files/neg/t3873.check
@@ -1,6 +1,6 @@
 t3873.scala:11: error: type mismatch;
  found   : Test.a.B
- required: a.B
-  wrongf(new A)(a.b) // should not compile -- TODO: improve error message? the "a" is ambiguous
+ required: a.B where val a: A
+  wrongf(new A)(a.b) // should not compile
                   ^
 one error found
diff --git a/test/files/neg/t3873.scala b/test/files/neg/t3873.scala
index e7815f0..b27b4e9 100644
--- a/test/files/neg/t3873.scala
+++ b/test/files/neg/t3873.scala
@@ -8,5 +8,5 @@ object Test {
 
   val a = new A
   wrongf(a)(a.b)
-  wrongf(new A)(a.b) // should not compile -- TODO: improve error message? the "a" is ambiguous
+  wrongf(new A)(a.b) // should not compile
 }
\ No newline at end of file
diff --git a/test/files/neg/t3934.check b/test/files/neg/t3934.check
index 405ed2e..8b06799 100644
--- a/test/files/neg/t3934.check
+++ b/test/files/neg/t3934.check
@@ -1,6 +1,6 @@
 t3934.scala:15: error: method f2 in class J cannot be accessed in test.J
  Access to protected method f2 not permitted because
- enclosing class S1 in package nest is not a subclass of 
+ enclosing class S1 in package nest is not a subclass of
  class J in package test where target is defined
   def g2(x: J) = x.f2()
                    ^
diff --git a/test/files/neg/t3971.check b/test/files/neg/t3971.check
new file mode 100644
index 0000000..8685119
--- /dev/null
+++ b/test/files/neg/t3971.check
@@ -0,0 +1,21 @@
+t3971.scala:6: error: type mismatch;
+ found   : Int
+ required: String
+  f(g("abc")("def")) // g returns Int, needs String
+     ^
+t3971.scala:7: error: type mismatch;
+ found   : Int(5)
+ required: String
+  f(5)
+    ^
+t3971.scala:8: error: type mismatch;
+ found   : Int
+ required: String
+  f(h("abc"))
+     ^
+t3971.scala:11: error: type mismatch;
+ found   : Boolean
+ required: String
+  ({"ab".reverse; "ba".equals})(0): String
+                               ^
+four errors found
diff --git a/test/files/neg/t3971.scala b/test/files/neg/t3971.scala
new file mode 100644
index 0000000..35f64fd
--- /dev/null
+++ b/test/files/neg/t3971.scala
@@ -0,0 +1,12 @@
+class A {
+  def f(x: String) = x
+  def g(x: String)(y: String): Int = x.length + y.length
+  def h(x: String) = x.length
+
+  f(g("abc")("def")) // g returns Int, needs String
+  f(5)
+  f(h("abc"))
+
+  // a perverse piece of code from a perverse coder
+  ({"ab".reverse; "ba".equals})(0): String
+}
diff --git a/test/files/neg/t3977.check b/test/files/neg/t3977.check
index 9da118e..72335a0 100644
--- a/test/files/neg/t3977.check
+++ b/test/files/neg/t3977.check
@@ -1,4 +1,4 @@
 t3977.scala:12: error: could not find implicit value for parameter w: False#If[E]
-  new NotNull
+  new NoNull
   ^
 one error found
diff --git a/test/files/neg/t3977.scala b/test/files/neg/t3977.scala
index f55a832..11a8cdb 100644
--- a/test/files/neg/t3977.scala
+++ b/test/files/neg/t3977.scala
@@ -7,7 +7,7 @@ trait False extends Bool {
 }
 
 class Field[E, N <: Bool](implicit val w: N#If[E]) {
-  type NotNull = Field[E, False]
+  type NoNull = Field[E, False]
 
-  new NotNull
-}
\ No newline at end of file
+  new NoNull
+}
diff --git a/test/files/neg/t3987.scala b/test/files/neg/t3987.scala
index c97d57b..1226d80 100644
--- a/test/files/neg/t3987.scala
+++ b/test/files/neg/t3987.scala
@@ -1,7 +1,7 @@
 class Gox {
   object Zed { }
   class Zed  { }
-}          
+}
 
 object Test {
   type GoxZed = t#Zed forSome { type t <: Gox }
diff --git a/test/files/neg/t4079/t4079_1.scala b/test/files/neg/t4079/t4079_1.scala
index 8dc7355..cbae864 100644
--- a/test/files/neg/t4079/t4079_1.scala
+++ b/test/files/neg/t4079/t4079_1.scala
@@ -9,7 +9,7 @@ trait ComposeT[F[_],G[_]] {
 case class Compose[F[_],G[_]]() {
   def Functor(implicit f: Functor[F], g: Functor[G]): Functor[ComposeT[F,G]#Apply] =
     new Functor[ComposeT[F,G]#Apply] {
-      def map[A,B](c: ComposeT[F,G]#Apply[A], h: A => B) = 
+      def map[A,B](c: ComposeT[F,G]#Apply[A], h: A => B) =
         f.map(c, (x:G[A]) => g.map(x,h))
     }
 }
@@ -19,10 +19,10 @@ object Cat {
 }
 
 object Functors {
-  implicit val List = new Functor[List] { 
+  implicit val List = new Functor[List] {
     def map[A,B](fa: List[A], f: A => B): List[B] = fa map f
   }
-  implicit val Option = new Functor[Option] { 
+  implicit val Option = new Functor[Option] {
     def map[A,B](fa: Option[A], f: A => B): Option[B] = fa map f
   }
 }
diff --git a/test/files/neg/t409.check b/test/files/neg/t409.check
index 433d64d..0edc0d0 100644
--- a/test/files/neg/t409.check
+++ b/test/files/neg/t409.check
@@ -1,4 +1,4 @@
-t409.scala:6: error: traits or objects may not have parameters
+t409.scala:6: error: class Case1 needs to be a trait to be mixed in
 class Toto extends Expr with Case1(12);
-                                  ^
+                             ^
 one error found
diff --git a/test/files/neg/t4098.check b/test/files/neg/t4098.check
index 7d69cf1..232c082 100644
--- a/test/files/neg/t4098.check
+++ b/test/files/neg/t4098.check
@@ -1,5 +1,5 @@
 t4098.scala:3: error: forward reference not allowed from self constructor invocation
-    this(b)       
+    this(b)
          ^
 t4098.scala:8: error: forward reference not allowed from self constructor invocation
     this(b)
diff --git a/test/files/neg/t4098.scala b/test/files/neg/t4098.scala
index 744d619..2e6d167 100644
--- a/test/files/neg/t4098.scala
+++ b/test/files/neg/t4098.scala
@@ -1,6 +1,6 @@
-class A(a: Any) { 
-  def this() = {  
-    this(b)       
+class A(a: Any) {
+  def this() = {
+    this(b)
     def b = new {}
   }
 
diff --git a/test/files/neg/t4134.scala b/test/files/neg/t4134.scala
index 678e480..18f813d 100644
--- a/test/files/neg/t4134.scala
+++ b/test/files/neg/t4134.scala
@@ -3,16 +3,16 @@
 
 trait T1 {
   def f: String
-}                                                                               
+}
 
 trait T2 extends T1 {
   abstract override def f: String = "goo"
   def something = super.f  // So the "abstract override" is needed
-}                                                                               
+}
 
 trait Q1 {
   def f: String = "bippy"
-}                                                                               
+}
 
 //trait T3 extends Q1 with T2 {
 trait T3 extends T2 with Q1 {
diff --git a/test/files/neg/t414.scala b/test/files/neg/t414.scala
index 2bc83ee..86646d1 100644
--- a/test/files/neg/t414.scala
+++ b/test/files/neg/t414.scala
@@ -1,9 +1,9 @@
 case class Empty[a]() extends IntMap[a];
-case class Node[a](left: IntMap[a], keyVal: Pair[Int, a], right: IntMap[a]) extends IntMap[a];
+case class Node[a](left: IntMap[a], keyVal: Tuple2[Int, a], right: IntMap[a]) extends IntMap[a];
 abstract class IntMap[a] {
         def lookup(key: Int): a = this match {
                 case Empty =>
-                        error("clef inexistante")
+                        sys.error("clef inexistante")
                 case _ =>
         };
 
diff --git a/test/files/neg/t4158.check b/test/files/neg/t4158.check
index 3ee2627..af281c5 100644
--- a/test/files/neg/t4158.check
+++ b/test/files/neg/t4158.check
@@ -1,19 +1,7 @@
-t4158.scala:3: error: type mismatch;
- found   : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null(null) to Int
+t4158.scala:3: error: an expression of type Null is ineligible for implicit conversion
   var y = null: Int
           ^
-t4158.scala:2: error: type mismatch;
- found   : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null(null) to Int
+t4158.scala:2: error: an expression of type Null is ineligible for implicit conversion
   var x: Int = null
                ^
 two errors found
diff --git a/test/files/neg/t4174.scala b/test/files/neg/t4174.scala
index f524295..b4a5ab2 100644
--- a/test/files/neg/t4174.scala
+++ b/test/files/neg/t4174.scala
@@ -2,7 +2,7 @@ class C
 
 object Test {
   def foo(c: C) = 0
-  
+
   def main(args: Array[String]): Unit = {
     foo(new C { override def bar = 1 })
   }
diff --git a/test/files/neg/t418.check b/test/files/neg/t418.check
index 1489547..1b99717 100644
--- a/test/files/neg/t418.check
+++ b/test/files/neg/t418.check
@@ -1,7 +1,4 @@
 t418.scala:2: error: not found: value Foo12340771
   null match { case Foo12340771.Bar(x) => x }
                     ^
-t418.scala:2: error: not found: value x
-  null match { case Foo12340771.Bar(x) => x }
-                                          ^
-two errors found
+one error found
diff --git a/test/files/neg/t4196.scala b/test/files/neg/t4196.scala
index ac00b19..06e1f28 100644
--- a/test/files/neg/t4196.scala
+++ b/test/files/neg/t4196.scala
@@ -1,6 +1,6 @@
 object Weird {
-  { (s: String) => 
+  { (s: String) =>
       val foo = Some(s); // to illustrate that vals are printed in the error
-      foo 
+      foo
   }.apply("first param") ("spurious param")
 }
\ No newline at end of file
diff --git a/test/files/neg/t421.check b/test/files/neg/t421.check
index e81df52..dc5fa42 100644
--- a/test/files/neg/t421.check
+++ b/test/files/neg/t421.check
@@ -1,4 +1,4 @@
 t421.scala:5: error: star patterns must correspond with varargs parameters
-    case Bar("foo",_*) => error("huh?");
+    case Bar("foo",_*) => sys.error("huh?");
                     ^
 one error found
diff --git a/test/files/neg/t421.scala b/test/files/neg/t421.scala
index 43f6c9d..9a327be 100644
--- a/test/files/neg/t421.scala
+++ b/test/files/neg/t421.scala
@@ -2,7 +2,7 @@ object foo  {
   case class Bar(a:String, b:AnyRef, c:String*);
 
   Bar("foo","meets","bar") match {
-    case Bar("foo",_*) => error("huh?");
+    case Bar("foo",_*) => sys.error("huh?");
   }
 
 }
diff --git a/test/files/neg/t4217.check b/test/files/neg/t4217.check
index e8cd5fd..6c49ec3 100644
--- a/test/files/neg/t4217.check
+++ b/test/files/neg/t4217.check
@@ -1,4 +1,4 @@
 t4217.scala:2: error: 'case' expected but '}' found.
-  42 match { } 
+  42 match { }
              ^
 one error found
diff --git a/test/files/neg/t4217.scala b/test/files/neg/t4217.scala
index 9343a9b..0817df2 100644
--- a/test/files/neg/t4217.scala
+++ b/test/files/neg/t4217.scala
@@ -1,3 +1,3 @@
 object A extends App {
-  42 match { } 
+  42 match { }
 }
diff --git a/test/files/neg/t4221.scala b/test/files/neg/t4221.scala
index c979f79..0a8b8ad 100644
--- a/test/files/neg/t4221.scala
+++ b/test/files/neg/t4221.scala
@@ -1,5 +1,5 @@
 class Cl {
-        class Sub[TheSub <: Sub[TheSub]] 
+        class Sub[TheSub <: Sub[TheSub]]
 }
 
 case class Wrapper[T](v: T)
diff --git a/test/files/neg/t4271.scala b/test/files/neg/t4271.scala
index 50526c8..46ae3ad 100644
--- a/test/files/neg/t4271.scala
+++ b/test/files/neg/t4271.scala
@@ -1,11 +1,11 @@
 object foo {
   object Donotuseme
-  implicit def any2Ensuring[A](x: A) = Donotuseme
+  implicit def Ensuring[A](x: A) = Donotuseme
   implicit def doubleWrapper(x: Int) = Donotuseme
   implicit def floatWrapper(x: Int) = Donotuseme
   implicit def intWrapper(x: Int) = Donotuseme
   implicit def longWrapper(x: Int) = Donotuseme
-  implicit def any2ArrowAssoc[A](x: A) = Donotuseme
+  implicit def ArrowAssoc[A](x: A) = Donotuseme
   3 to 5
   5 ensuring true
   3 -> 5
diff --git a/test/files/neg/t4302.check b/test/files/neg/t4302.check
index 450d28b..ea48729 100644
--- a/test/files/neg/t4302.check
+++ b/test/files/neg/t4302.check
@@ -1,4 +1,6 @@
-t4302.scala:2: error: abstract type T is unchecked since it is eliminated by erasure
+t4302.scala:2: warning: abstract type T is unchecked since it is eliminated by erasure
   def hasMatch[T](x: AnyRef) = x.isInstanceOf[T]
                                              ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t4417.check b/test/files/neg/t4417.check
index 4e3f6c0..dbd0f1d 100644
--- a/test/files/neg/t4417.check
+++ b/test/files/neg/t4417.check
@@ -1,6 +1,6 @@
 t4417.scala:11: error: constructor Pixel$mcD$sp in class Pixel$mcD$sp cannot be accessed in object Pixel
  Access to protected constructor Pixel$mcD$sp not permitted because
- enclosing object Pixel is not a subclass of 
+ enclosing object Pixel is not a subclass of
  class Pixel$mcD$sp where target is defined
   def apply(v: Double): Pixel1d = new Pixel1d(v)
                                   ^
diff --git a/test/files/neg/t4417.scala b/test/files/neg/t4417.scala
index 7f104e5..3f6ddc8 100644
--- a/test/files/neg/t4417.scala
+++ b/test/files/neg/t4417.scala
@@ -2,12 +2,12 @@
 
 
 
-class Pixel[@specialized T] protected (var v: T) 
+class Pixel[@specialized T] protected (var v: T)
 
 
 object Pixel {
   type Pixel1d = Pixel[Double]
-  
+
   def apply(v: Double): Pixel1d = new Pixel1d(v)
 }
 
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
index 0f2fe6f..00006c0 100644
--- a/test/files/neg/t4425.check
+++ b/test/files/neg/t4425.check
@@ -1,4 +1,13 @@
-t4425.scala:3: error: isInstanceOf cannot test if value types are references.
+t4425.scala:3: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: Int)(y: Option[Int]): None.type exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
   42 match { case _ X _ => () }
                     ^
-one error found
+t4425.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: Int)(y: Int): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+  42 match { case _ X _ => () }
+                    ^
+t4425.scala:13: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+  "" match { case _ X _ => () }
+                    ^
+three errors found
diff --git a/test/files/neg/t4425.scala b/test/files/neg/t4425.scala
index d8cc692..1714955 100644
--- a/test/files/neg/t4425.scala
+++ b/test/files/neg/t4425.scala
@@ -2,3 +2,13 @@ object Foo {
   object X { def unapply(x : Int)(y : Option[Int] = None) = None }
   42 match { case _ X _ => () }
 }
+
+object Foo2 {
+  object X { def unapply(x : Int)(y: Int) = Some((2,2)) }
+  42 match { case _ X _ => () }
+}
+
+object Foo3 {
+  object X { def unapply(x : String)(y: String) = Some((2,2)) }
+  "" match { case _ X _ => () }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check
new file mode 100644
index 0000000..8418b4f
--- /dev/null
+++ b/test/files/neg/t4425b.check
@@ -0,0 +1,49 @@
+t4425b.scala:5: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:6: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:7: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println(      "" match { case X(_)    => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println((X: Any) match { case X(_)    => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:9: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:10: error: object X is not a case class, nor does it have an unapply/unapplySeq member
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:18: error: too many patterns for object X: expected 1, found 2
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:19: error: too many patterns for object X: expected 1, found 2
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:22: error: too many patterns for object X: expected 1, found 2
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:23: error: too many patterns for object X: expected 1, found 2
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:31: error: too many patterns for object X offering Nothing: expected 1, found 2
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:32: error: too many patterns for object X offering Nothing: expected 1, found 2
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+                                    ^
+t4425b.scala:35: error: too many patterns for object X offering Nothing: expected 1, found 2
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+t4425b.scala:36: error: too many patterns for object X offering Nothing: expected 1, found 2
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+                                  ^
+14 errors found
diff --git a/test/files/neg/t4425b.scala b/test/files/neg/t4425b.scala
new file mode 100644
index 0000000..861e952
--- /dev/null
+++ b/test/files/neg/t4425b.scala
@@ -0,0 +1,38 @@
+object Test1 {
+  object X { def unapply(x : String)(y: String) = throw new Exception }
+
+  def f1() {
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_)    => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_)    => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+  }
+}
+
+object Test2 {
+  object X { def unapply(x : String) = throw new Exception }
+
+  def f1() {
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_)    => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_)    => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+  }
+}
+
+object Test3 {
+  object X { def unapply(x : String) = None }
+
+  def f1() {
+    println(      "" match { case _ X _   => "ok" ; case _ => "fail" })
+    println((X: Any) match { case _ X _   => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_)    => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_)    => "ok" ; case _ => "fail" })
+    println(      "" match { case X(_, _) => "ok" ; case _ => "fail" })
+    println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+  }
+}
diff --git a/test/files/neg/t4431.scala b/test/files/neg/t4431.scala
index 91e4abf..5fbb239 100644
--- a/test/files/neg/t4431.scala
+++ b/test/files/neg/t4431.scala
@@ -7,7 +7,7 @@ object Test {
   // this earns a VerifyError.
   trait C { def wait (): Unit }
   class D { }
-  
+
   def main(args: Array[String]): Unit = {
     new B with A { }
     new BB
diff --git a/test/files/neg/t4440.check b/test/files/neg/t4440.check
index 2861dc3..10e7188 100644
--- a/test/files/neg/t4440.check
+++ b/test/files/neg/t4440.check
@@ -1,13 +1,15 @@
-t4440.scala:12: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:12: warning: The outer reference in this type test cannot be checked at run time.
     case _: b.Inner => println("b")
           ^
-t4440.scala:13: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:13: warning: The outer reference in this type test cannot be checked at run time.
     case _: a.Inner => println("a") // this is the case we want
           ^
-t4440.scala:16: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:16: warning: The outer reference in this type test cannot be checked at run time.
     case _: a.Inner => println("a")
           ^
-t4440.scala:17: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:17: warning: The outer reference in this type test cannot be checked at run time.
     case _: b.Inner => println("b") // this is the case we want
           ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t4457_1.scala b/test/files/neg/t4457_1.scala
index 91d9bc9..11f1237 100644
--- a/test/files/neg/t4457_1.scala
+++ b/test/files/neg/t4457_1.scala
@@ -22,7 +22,7 @@ object ImplicitConvAmbiguity2 {
   def aFunc[A](a: NQ[A]) = new BB[A]
 
   def bFunc[T](e1: N[T]) = {}
-  
+
   def typeMe1 {
     val x = aFunc(4F)
     bFunc(x)
diff --git a/test/files/neg/t4457_2.scala b/test/files/neg/t4457_2.scala
index b2e7505..f3a170f 100644
--- a/test/files/neg/t4457_2.scala
+++ b/test/files/neg/t4457_2.scala
@@ -22,7 +22,7 @@ object ImplicitConvAmbiguity2 {
   def aFunc[A](a: NQ[A]) = new BB[A]
 
   def bFunc[T](e1: N[T]) = {}
-  
+
   def typeMe2 {
     val x = aFunc(4F)
     bFunc(x)
diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check
new file mode 100644
index 0000000..b711e7a
--- /dev/null
+++ b/test/files/neg/t4460a.check
@@ -0,0 +1,4 @@
+t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition
+  def this() = this() // was binding to Predef.<init> !!
+               ^
+one error found
diff --git a/test/files/neg/t4460a.scala b/test/files/neg/t4460a.scala
new file mode 100644
index 0000000..0a7a221
--- /dev/null
+++ b/test/files/neg/t4460a.scala
@@ -0,0 +1,7 @@
+trait A
+
+class B(val x: Int) {
+  self: A =>
+
+  def this() = this() // was binding to Predef.<init> !!
+}
diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check
new file mode 100644
index 0000000..f0e703f
--- /dev/null
+++ b/test/files/neg/t4460b.check
@@ -0,0 +1,4 @@
+t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition
+	  def this() = this() // was binding to Predef.<init> !!
+                       ^
+one error found
diff --git a/test/files/neg/t4460b.scala b/test/files/neg/t4460b.scala
new file mode 100644
index 0000000..1233017
--- /dev/null
+++ b/test/files/neg/t4460b.scala
@@ -0,0 +1,9 @@
+trait A
+
+class Outer() {
+	class B(val x: Int) {
+	  self: A =>
+
+	  def this() = this() // was binding to Predef.<init> !!
+	}
+}
diff --git a/test/files/neg/t4460c.check b/test/files/neg/t4460c.check
new file mode 100644
index 0000000..4e96711
--- /dev/null
+++ b/test/files/neg/t4460c.check
@@ -0,0 +1,7 @@
+t4460c.scala:4: error: overloaded method constructor B with alternatives:
+  (a: String)B <and>
+  (x: Int)B
+ cannot be applied to ()
+  def this(a: String) = this()
+                        ^
+one error found
diff --git a/test/files/neg/t4460c.scala b/test/files/neg/t4460c.scala
new file mode 100644
index 0000000..1ae2585
--- /dev/null
+++ b/test/files/neg/t4460c.scala
@@ -0,0 +1,7 @@
+class B(val x: Int) {
+  self: A =>
+
+  def this(a: String) = this()
+}
+
+class A()
diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check
index a60d162..708fcfb 100644
--- a/test/files/neg/t4515.check
+++ b/test/files/neg/t4515.check
@@ -1,6 +1,16 @@
 t4515.scala:37: error: type mismatch;
- found   : _0(in value $anonfun) where type _0(in value $anonfun)
- required: (some other)_0(in value $anonfun)
+ found   : _$1 where type _$1
+ required: _$2
         handler.onEvent(target, ctx.getEvent, node, ctx)
                                     ^
-one error found
+t4515.scala:37: error: type mismatch;
+ found   : Main.DerivedPushNode[_$1] where type _$1
+ required: Main.PushNode[_$2]
+        handler.onEvent(target, ctx.getEvent, node, ctx)
+                                              ^
+t4515.scala:37: error: type mismatch;
+ found   : Main.PushEventContext[_$1] where type _$1
+ required: Main.PushEventContext[_$2]
+        handler.onEvent(target, ctx.getEvent, node, ctx)
+                                                    ^
+three errors found
diff --git a/test/files/neg/t4515.scala b/test/files/neg/t4515.scala
index 63049f2..4efe45f 100644
--- a/test/files/neg/t4515.scala
+++ b/test/files/neg/t4515.scala
@@ -16,7 +16,7 @@ object Main {
                 ctx: PushEventContext[EventType]): Unit
                                             }
   val handlers = new HashMap[DerivedPushNode[_], HandlerBase[_]]
-  
+
   object TimerPushService {
     private val INSTANCE: TimerPushService = new TimerPushService
     def get: TimerPushService = INSTANCE
diff --git a/test/files/neg/t4537.check b/test/files/neg/t4537.check
deleted file mode 100644
index 931bcd0..0000000
--- a/test/files/neg/t4537.check
+++ /dev/null
@@ -1,4 +0,0 @@
-c.scala:7: error: object Settings in package a cannot be accessed in package a
- println(Settings.Y)
-         ^
-one error found
diff --git a/test/files/neg/t4537/a.scala b/test/files/neg/t4537/a.scala
deleted file mode 100644
index 65e183c..0000000
--- a/test/files/neg/t4537/a.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package a
-
-private[a] object Settings {
-  val X = 0
-}
\ No newline at end of file
diff --git a/test/files/neg/t4537/b.scala b/test/files/neg/t4537/b.scala
deleted file mode 100644
index bb9dd4e..0000000
--- a/test/files/neg/t4537/b.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package b
-
-object Settings {
-  val Y = 0
-}
\ No newline at end of file
diff --git a/test/files/neg/t4537/c.scala b/test/files/neg/t4537/c.scala
deleted file mode 100644
index 3795991..0000000
--- a/test/files/neg/t4537/c.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package b
-package c
-
-import a._
-
-object Test {
- println(Settings.Y)
-}
\ No newline at end of file
diff --git a/test/files/neg/t4541b.scala b/test/files/neg/t4541b.scala
index 7a21ffc..ba406ca 100644
--- a/test/files/neg/t4541b.scala
+++ b/test/files/neg/t4541b.scala
@@ -8,7 +8,7 @@ final class SparseArray[@specialized(Int) T](private var data: Array[T]) extends
   def use(inData: Array[T]) = {
     data = inData;
   }
-  
+
   def set(that: SparseArray[T]) = {
     use(that.data.clone)
   }
diff --git a/test/files/neg/t4584.check b/test/files/neg/t4584.check
index 419f570..97d07af 100644
--- a/test/files/neg/t4584.check
+++ b/test/files/neg/t4584.check
@@ -1,7 +1,7 @@
 t4584.scala:1: error: error in unicode escape
-class A { val /u2
+class A { val \u2
                  ^
-t4584.scala:1: error: illegal character '/uffff'
-class A { val /u2
+t4584.scala:1: error: illegal character '\uffff'
+class A { val \u2
                 ^
 two errors found
diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check
index cd12e56..6396944 100644
--- a/test/files/neg/t4691_exhaust_extractor.check
+++ b/test/files/neg/t4691_exhaust_extractor.check
@@ -1,13 +1,15 @@
-t4691_exhaust_extractor.scala:17: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:17: warning: match may not be exhaustive.
 It would fail on the following input: Bar3()
   def f1(x: Foo) = x match {
                    ^
-t4691_exhaust_extractor.scala:23: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:23: warning: match may not be exhaustive.
 It would fail on the following input: Bar3()
   def f2(x: Foo) = x match {
                    ^
-t4691_exhaust_extractor.scala:29: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:29: warning: match may not be exhaustive.
 It would fail on the following input: Bar3()
   def f3(x: Foo) = x match {
                    ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t4727.check b/test/files/neg/t4727.check
index 8a4536f..a17cdde 100644
--- a/test/files/neg/t4727.check
+++ b/test/files/neg/t4727.check
@@ -1,10 +1,4 @@
-t4727.scala:5: error: type mismatch;
- found   : Null
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null to Int
+t4727.scala:5: error: an expression of type Null is ineligible for implicit conversion
 Error occurred in an application involving default arguments.
     new C[Int]
     ^
diff --git a/test/files/neg/t4728.check b/test/files/neg/t4728.check
new file mode 100644
index 0000000..c6ef182
--- /dev/null
+++ b/test/files/neg/t4728.check
@@ -0,0 +1,7 @@
+t4728.scala:10: error: ambiguous reference to overloaded definition,
+both method f in object Ambiguous of type (ys: Y*)Int
+and  method f in object Ambiguous of type (x: X)Int
+match argument types (Y) and expected result type Any
+  println(Ambiguous.f(new Y))
+                    ^
+one error found
diff --git a/test/pending/run/t4728.scala b/test/files/neg/t4728.scala
similarity index 100%
rename from test/pending/run/t4728.scala
rename to test/files/neg/t4728.scala
diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check
index 93ad393..3539140 100644
--- a/test/files/neg/t4749.check
+++ b/test/files/neg/t4749.check
@@ -1,28 +1,34 @@
-t4749.scala:2: error: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program.
+t4749.scala:2: warning: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program.
   Reason: main method must have exact signature (Array[String])Unit
   object Fail1 {
          ^
-t4749.scala:6: error: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program.
+t4749.scala:6: warning: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program.
   Reason: main methods cannot be generic.
   object Fail2 {
          ^
-t4749.scala:13: error: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program.
+t4749.scala:13: warning: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program.
   Reason: main methods cannot refer to type parameters or abstract types.
   object Fail3 extends Bippy[Unit] { }
          ^
-t4749.scala:16: error: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program.
+t4749.scala:16: warning: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program.
   Reason: companion is a trait, which means no static forwarder can be generated.
 
   object Fail4 {
          ^
-t4749.scala:21: error: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program.
+t4749.scala:21: warning: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program.
   Reason: companion contains its own main method, which means no static forwarder can be generated.
 
-  object Fail5 extends Fail5 { }    
+  object Fail5 extends Fail5 { }
          ^
-t4749.scala:26: error: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program.
+t4749.scala:26: warning: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program.
   Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
 
   object Fail6 {
          ^
-6 errors found
+t4749.scala:42: warning: Win3 has a main method with parameter type Array[String], but bippy.Win3 will not be a runnable program.
+  Reason: main method must have exact signature (Array[String])Unit
+  object Win3 extends WinBippy[Unit] { }
+         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+7 warnings found
+one error found
diff --git a/test/files/neg/t4749.scala b/test/files/neg/t4749.scala
index 0973c36..2c67e2e 100644
--- a/test/files/neg/t4749.scala
+++ b/test/files/neg/t4749.scala
@@ -18,7 +18,7 @@ package bippy {
   }
   trait Fail4 { }
 
-  object Fail5 extends Fail5 { }    
+  object Fail5 extends Fail5 { }
   class Fail5 {
     def main(args: Array[String]): Unit = ()
   }
diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check
index 5e67f20..a0525f6 100644
--- a/test/files/neg/t4762.check
+++ b/test/files/neg/t4762.check
@@ -1,7 +1,9 @@
-t4762.scala:15: error: private[this] value x in class B shadows mutable x inherited from class A.  Changes to x will not be visible within class B - you may want to give them distinct names.
+t4762.scala:15: warning: private[this] value x in class B shadows mutable x inherited from class A.  Changes to x will not be visible within class B - you may want to give them distinct names.
     /* (99,99) */  (this.x, this.y),
                          ^
-t4762.scala:48: error: private[this] value x in class Derived shadows mutable x inherited from class Base.  Changes to x will not be visible within class Derived - you may want to give them distinct names.
+t4762.scala:48: warning: private[this] value x in class Derived shadows mutable x inherited from class Base.  Changes to x will not be visible within class Derived - you may want to give them distinct names.
   class Derived( x : Int ) extends Base( x ) { override def toString = x.toString }
                                                                        ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check
index 9633fdf..132dd91 100644
--- a/test/files/neg/t4851.check
+++ b/test/files/neg/t4851.check
@@ -1,49 +1,51 @@
-S.scala:2: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+S.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
         signature: J(x: Any): J
   given arguments: <none>
  after adaptation: new J((): Unit)
   val x1 = new J
            ^
-S.scala:3: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+S.scala:3: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
         signature: J(x: Any): J
   given arguments: <none>
  after adaptation: new J((): Unit)
   val x2 = new J()
            ^
-S.scala:4: error: Adapting argument list by creating a 5-tuple: this may not be what you want.
+S.scala:4: warning: Adapting argument list by creating a 5-tuple: this may not be what you want.
         signature: J(x: Any): J
   given arguments: 1, 2, 3, 4, 5
  after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int))
   val x3 = new J(1, 2, 3, 4, 5)
            ^
-S.scala:6: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:6: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
         signature: Some.apply[A](x: A): Some[A]
   given arguments: 1, 2, 3
  after adaptation: Some((1, 2, 3): (Int, Int, Int))
   val y1 = Some(1, 2, 3)
                ^
-S.scala:7: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
         signature: Some(x: A): Some[A]
   given arguments: 1, 2, 3
  after adaptation: new Some((1, 2, 3): (Int, Int, Int))
   val y2 = new Some(1, 2, 3)
            ^
-S.scala:9: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+S.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
         signature: J2[T](x: T): J2[T]
   given arguments: <none>
  after adaptation: new J2((): Unit)
   val z1 = new J2
            ^
-S.scala:10: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+S.scala:10: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
         signature: J2[T](x: T): J2[T]
   given arguments: <none>
  after adaptation: new J2((): Unit)
   val z2 = new J2()
            ^
-S.scala:14: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:14: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
         signature: Test.anyId(a: Any): Any
   given arguments: 1, 2, 3
  after adaptation: Test.anyId((1, 2, 3): (Int, Int, Int))
   val w1 = anyId(1, 2 ,3)
                 ^
-8 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/t4851.flags b/test/files/neg/t4851.flags
index 0545cb8..ca0d0a0 100644
--- a/test/files/neg/t4851.flags
+++ b/test/files/neg/t4851.flags
@@ -1 +1 @@
--Ywarn-adapted-args -Xfatal-warnings
+-Ywarn-adapted-args -Xfatal-warnings -deprecation
diff --git a/test/files/neg/t4851/J.java b/test/files/neg/t4851/J.java
index dbf8b82..9c35b8a 100644
--- a/test/files/neg/t4851/J.java
+++ b/test/files/neg/t4851/J.java
@@ -1,14 +1,14 @@
 public class J {
   Object x;
-  
+
   public J(Object x) {
     this.x = x;
   }
-  
+
   public J(int x1, int x2, int x3, int x4, int x5, int x6) {
     this.x = null;
   }
-  
+
   public String toString() {
     return "J:" + x.getClass();
   }
diff --git a/test/files/neg/t4851/J2.java b/test/files/neg/t4851/J2.java
index c3a7231..82954d9 100644
--- a/test/files/neg/t4851/J2.java
+++ b/test/files/neg/t4851/J2.java
@@ -1,10 +1,10 @@
 public class J2<T> {
   T x;
-  
+
   public <T> J(T x) {
     this.x = x;
   }
-  
+
   public String toString() {
     return "J2:" + x.getClass();
   }
diff --git a/test/files/neg/t4851/S.scala b/test/files/neg/t4851/S.scala
index 0a442ac..779ba37 100644
--- a/test/files/neg/t4851/S.scala
+++ b/test/files/neg/t4851/S.scala
@@ -5,7 +5,7 @@ object Test {
 
   val y1 = Some(1, 2, 3)
   val y2 = new Some(1, 2, 3)
-  
+
   val z1 = new J2
   val z2 = new J2()
   val z3 = new J2(())
@@ -18,7 +18,7 @@ object Test {
     println(x2)
     println(x3)
     println(y1)
-    
+
     println(z1)
     println(z2)
     println(z3)
diff --git a/test/files/neg/t4877.scala b/test/files/neg/t4877.scala
index 9cad156..5d97877 100644
--- a/test/files/neg/t4877.scala
+++ b/test/files/neg/t4877.scala
@@ -13,7 +13,7 @@ class B {
     def bar(x: Int): Mom
     def bippy(): List[Mom]
   }
-  
+
   val x: Bippy = new AnyRef {
     type Mom = String
     def bar(x: Int) = 55
diff --git a/test/files/neg/t4928.check b/test/files/neg/t4928.check
index 06d4f22..18a5d57 100644
--- a/test/files/neg/t4928.check
+++ b/test/files/neg/t4928.check
@@ -1,5 +1,5 @@
 t4928.scala:3: error: parameter 'a' is already specified at parameter position 1
-Note that that 'z' is not a parameter name of the invoked method.
+Note that 'z' is not a parameter name of the invoked method.
   f(z = 0, a = 1)
              ^
 one error found
diff --git a/test/files/neg/t512.check b/test/files/neg/t512.check
index 814e65e..051e5ee 100644
--- a/test/files/neg/t512.check
+++ b/test/files/neg/t512.check
@@ -1,4 +1,7 @@
 t512.scala:3: error: not found: value something
     val xxx = something ||
               ^
-one error found
+t512.scala:4: error: not found: value something_else
+        something_else;
+        ^
+two errors found
diff --git a/test/files/neg/t5120.scala b/test/files/neg/t5120.scala
index c7063b7..f28b2cf 100644
--- a/test/files/neg/t5120.scala
+++ b/test/files/neg/t5120.scala
@@ -6,7 +6,7 @@ class Cell[T](x0: T) {
 object Test {
   val str: Cell[String] = new Cell("a")
   val other: Cell[Int]  = new Cell(0)
-  
+
   def main(args: Array[String]): Unit = {
     List(str, other) foreach (_.x1 = new AnyRef)
     str.x1.length
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
index 25107c4..8a667f4 100644
--- a/test/files/neg/t5148.check
+++ b/test/files/neg/t5148.check
@@ -1,9 +1,5 @@
-error: bad symbolic reference. A signature in Imports.class refers to term global
-in class scala.tools.nsc.interpreter.IMain which is not available.
-It may be completely missing from the current classpath, or the version on
-the classpath might be incompatible with the version used when compiling Imports.class.
-error: bad symbolic reference. A signature in Imports.class refers to term memberHandlers
-in class scala.tools.nsc.interpreter.IMain which is not available.
-It may be completely missing from the current classpath, or the version on
-the classpath might be incompatible with the version used when compiling Imports.class.
-two errors found
+error: bad symbolic reference to scala.tools.nsc.interpreter.IMain.Request encountered in class file 'Imports.class'.
+Cannot access type Request in class scala.tools.nsc.interpreter.IMain. The current classpath may be
+missing a definition for scala.tools.nsc.interpreter.IMain.Request, or Imports.class may have been compiled against a version that's
+incompatible with the one found on the current classpath.
+one error found
diff --git a/test/files/neg/t5152.scala b/test/files/neg/t5152.scala
index 5efc76a..56df31e 100644
--- a/test/files/neg/t5152.scala
+++ b/test/files/neg/t5152.scala
@@ -2,16 +2,16 @@ object Test {
   new C
   new C1
   new C2
-  
+
   class A[E[_]] { }
   class B[E[_]] extends A[B] { }  // B is depth 2 but A requires 1
-  class C extends B { } 
-  
+  class C extends B { }
+
   class A1[E[F[G[_]]]] { }
   class B1[E[_]] extends A1[B1]   // B1 is depth 2 but A1 requires 3
   class C1 extends B1 { }
-  
+
   class A2[E[_]] { }
   class B2[E] extends A2[B2] { }  // this one is correct
-  class C2 extends B2 { } 
+  class C2 extends B2 { }
 }
diff --git a/test/files/neg/t5182.check b/test/files/neg/t5182.check
new file mode 100644
index 0000000..3161f92
--- /dev/null
+++ b/test/files/neg/t5182.check
@@ -0,0 +1,7 @@
+t5182.scala:2: error: unknown annotation argument name: qwe
+  @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1
+                            ^
+t5182.scala:3: error: classfile annotation arguments have to be supplied as named arguments
+  @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1
+                        ^
+two errors found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t5182.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t5182.flags
diff --git a/test/files/neg/t5182.scala b/test/files/neg/t5182.scala
new file mode 100644
index 0000000..0687e99
--- /dev/null
+++ b/test/files/neg/t5182.scala
@@ -0,0 +1,5 @@
+class test {
+  @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1
+  @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1
+  @java.lang.Deprecated() def bippy(q:Int) = 1
+}
diff --git a/test/files/neg/t5189.check b/test/files/neg/t5189.check
index 7762f46..4885de9 100644
--- a/test/files/neg/t5189.check
+++ b/test/files/neg/t5189.check
@@ -3,4 +3,4 @@ t5189.scala:3: error: type mismatch;
  required: Any => Any
   def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
                                                          ^
-one error found
\ No newline at end of file
+one error found
diff --git a/test/files/neg/t520.scala b/test/files/neg/t520.scala
index 949a509..076aca3 100644
--- a/test/files/neg/t520.scala
+++ b/test/files/neg/t520.scala
@@ -4,6 +4,6 @@ object test {
       assert(keyword != null);
     }
 
-    def verifyKeyword(source : java.io.File, pos : Int) = 
+    def verifyKeyword(source : java.io.File, pos : Int) =
       verifyKeyword("", source, pos);
 }
diff --git a/test/files/neg/t5352.check b/test/files/neg/t5352.check
index d24b0e8..1675da9 100644
--- a/test/files/neg/t5352.check
+++ b/test/files/neg/t5352.check
@@ -6,7 +6,7 @@ t5352.scala:11: error: type mismatch;
          ^
 t5352.scala:14: error: method f in class Bar1 cannot be accessed in boop.Bar1
  Access to protected method f not permitted because
- enclosing object boop is not a subclass of 
+ enclosing object boop is not a subclass of
  class Bar1 in object boop where target is defined
   (new Bar1).f
              ^
diff --git a/test/files/neg/t5352.scala b/test/files/neg/t5352.scala
index 6ee41f5..ed74a84 100644
--- a/test/files/neg/t5352.scala
+++ b/test/files/neg/t5352.scala
@@ -2,7 +2,7 @@ object boop {
   abstract class Bar { protected def f(): Any }
   class Bar1 extends Bar { protected def f(): Int = 5 }
   class Bar2 extends Bar { protected def f(): Int = 5 }
-  
+
   val xs = List(new Bar1, new Bar2)
 
   type BarF = { def f(): Int }
diff --git a/test/files/neg/t5357.scala b/test/files/neg/t5357.scala
index 369a556..6a52283 100644
--- a/test/files/neg/t5357.scala
+++ b/test/files/neg/t5357.scala
@@ -1,7 +1,7 @@
 trait M
 
 case class N() extends M {
-  def mytest(x: M) = x match { 
+  def mytest(x: M) = x match {
     case A: N => 1
     case _    => 0
   }
diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check
index d9e192d..98f3dda 100644
--- a/test/files/neg/t5426.check
+++ b/test/files/neg/t5426.check
@@ -1,13 +1,15 @@
-t5426.scala:2: error: comparing values of types Some[Int] and Int using `==' will always yield false
+t5426.scala:2: warning: comparing values of types Some[Int] and Int using `==' will always yield false
   def f1 = Some(5) == 5
                    ^
-t5426.scala:3: error: comparing values of types Int and Some[Int] using `==' will always yield false
+t5426.scala:3: warning: comparing values of types Int and Some[Int] using `==' will always yield false
   def f2 = 5 == Some(5)
              ^
-t5426.scala:8: error: comparing values of types Int and Some[Int] using `==' will always yield false
+t5426.scala:8: warning: comparing values of types Int and Some[Int] using `==' will always yield false
   (x1 == x2)
       ^
-t5426.scala:9: error: comparing values of types Some[Int] and Int using `==' will always yield false
+t5426.scala:9: warning: comparing values of types Some[Int] and Int using `==' will always yield false
   (x2 == x1)
       ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t5426.scala b/test/files/neg/t5426.scala
index f2fb5cc..af0f981 100644
--- a/test/files/neg/t5426.scala
+++ b/test/files/neg/t5426.scala
@@ -1,10 +1,10 @@
 class A {
   def f1 = Some(5) == 5
   def f2 = 5 == Some(5)
-  
+
   val x1 = 5
   val x2 = Some(5)
-  
+
   (x1 == x2)
   (x2 == x1)
 }
diff --git a/test/files/neg/t5440.check b/test/files/neg/t5440.check
index a862350..1c4592c 100644
--- a/test/files/neg/t5440.check
+++ b/test/files/neg/t5440.check
@@ -1,5 +1,7 @@
-t5440.scala:3: error: match may not be exhaustive.
+t5440.scala:3: warning: match may not be exhaustive.
 It would fail on the following inputs: (List(_), Nil), (Nil, List(_))
     (list1, list2) match {
     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t545.check b/test/files/neg/t545.check
index 8ebbf9b..aae575f 100644
--- a/test/files/neg/t545.check
+++ b/test/files/neg/t545.check
@@ -1,7 +1,4 @@
 t545.scala:4: error: value blah is not a member of Test.Foo
   val x = foo.blah match {
               ^
-t545.scala:5: error: recursive value x needs type
-    case List(x) => x
-                    ^
-two errors found
+one error found
diff --git a/test/files/neg/t5455.scala b/test/files/neg/t5455.scala
index 22d6c44..6e54335 100644
--- a/test/files/neg/t5455.scala
+++ b/test/files/neg/t5455.scala
@@ -1,13 +1,13 @@
 trait Test {
   def root: Test
-  
+
   @annotation.tailrec final lazy val bar: Thing[Int] = {
     if (this eq root)
       Thing(() => System.identityHashCode(bar))
     else
       root.bar
   }
-  
+
   def f = bar.f()
 }
 
diff --git a/test/files/neg/t5497.check b/test/files/neg/t5497.check
index fef6d38..4d6d52b 100644
--- a/test/files/neg/t5497.check
+++ b/test/files/neg/t5497.check
@@ -1,4 +1,4 @@
 t5497.scala:3: error: not found: value sq
-    case other => println(null.asInstanceOf[sq.Filter].tableName) 
+    case other => println(null.asInstanceOf[sq.Filter].tableName)
                                             ^
 one error found
diff --git a/test/files/neg/t5497.scala b/test/files/neg/t5497.scala
index 40d47de..c846b1b 100644
--- a/test/files/neg/t5497.scala
+++ b/test/files/neg/t5497.scala
@@ -1,5 +1,5 @@
 object TestQueryable extends App{
   ({
-    case other => println(null.asInstanceOf[sq.Filter].tableName) 
+    case other => println(null.asInstanceOf[sq.Filter].tableName)
   } : Any => Unit)(null)
 }
diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check
index 5d2175f..da3f84e 100644
--- a/test/files/neg/t5529.check
+++ b/test/files/neg/t5529.check
@@ -4,7 +4,4 @@ t5529.scala:12: error: File is already defined as class File
 t5529.scala:10: error: class type required but test.Test.File found
   sealed class Dir extends File { }
                            ^
-t5529.scala:10: error: test.Test.File does not have a constructor
-  sealed class Dir extends File { }
-                   ^
-three errors found
+two errors found
diff --git a/test/files/neg/t556.check b/test/files/neg/t556.check
index c278e13..30cc296 100644
--- a/test/files/neg/t556.check
+++ b/test/files/neg/t556.check
@@ -1,4 +1,7 @@
-t556.scala:3: error: wrong number of parameters; expected = 1
+t556.scala:3: error: missing parameter type
   def g:Int = f((x,y)=>x)
-                     ^
-one error found
+                 ^
+t556.scala:3: error: missing parameter type
+  def g:Int = f((x,y)=>x)
+                   ^
+two errors found
diff --git a/test/files/neg/t5572.check b/test/files/neg/t5572.check
index 7b1e290..3c9adf4 100644
--- a/test/files/neg/t5572.check
+++ b/test/files/neg/t5572.check
@@ -3,9 +3,14 @@ t5572.scala:16: error: type mismatch;
  required: A
     Z.transf(a, b) match {
              ^
+t5572.scala:16: error: type mismatch;
+ found   : A
+ required: B
+    Z.transf(a, b) match {
+                ^
 t5572.scala:18: error: type mismatch;
  found   : A
  required: B
         run(sth, b)
                  ^
-two errors found
+three errors found
diff --git a/test/files/neg/t5572.scala b/test/files/neg/t5572.scala
index 2da1209..4169df4 100644
--- a/test/files/neg/t5572.scala
+++ b/test/files/neg/t5572.scala
@@ -16,7 +16,7 @@ class Test {
     Z.transf(a, b) match {
       case sth =>
         run(sth, b)
-    }  
+    }
   }
 
   def run(x: X, z: B): Unit = ()
diff --git a/test/files/neg/t5578.check b/test/files/neg/t5578.check
index d803adb..56123d2 100644
--- a/test/files/neg/t5578.check
+++ b/test/files/neg/t5578.check
@@ -1,4 +1,7 @@
-t5578.scala:33: error: No Manifest available for T.
+t5578.scala:33: error: type mismatch;
+ found   : NumericOpsExp.this.Plus[T]
+ required: NumericOpsExp.this.Rep[T]
+    (which expands to)  NumericOpsExp.this.Exp[T]
   def plus[T: Numeric](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x,y)
                                                               ^
 one error found
diff --git a/test/files/neg/t558.scala b/test/files/neg/t558.scala
index 4941a06..58b0303 100644
--- a/test/files/neg/t558.scala
+++ b/test/files/neg/t558.scala
@@ -11,7 +11,7 @@ abstract class NewModel {
     val parent : SymbolURL;
     final val top = parent.top;
     final val source = top.file;
-    
+
   }
   abstract class RootURL extends SymbolURL {
     final val top   : RootURL = this;
diff --git a/test/files/neg/t5580b.check b/test/files/neg/t5580b.check
new file mode 100644
index 0000000..45fde46
--- /dev/null
+++ b/test/files/neg/t5580b.check
@@ -0,0 +1,6 @@
+t5580b.scala:11: error: polymorphic expression cannot be instantiated to expected type;
+ found   : [A]scala.collection.mutable.Set[A]
+ required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]]
+    if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+                                                                ^
+one error found
diff --git a/test/files/neg/t5580b.scala b/test/files/neg/t5580b.scala
new file mode 100644
index 0000000..2161da4
--- /dev/null
+++ b/test/files/neg/t5580b.scala
@@ -0,0 +1,13 @@
+import scala.collection.mutable.WeakHashMap
+import scala.collection.JavaConversions._
+
+class bar { }
+
+class foo {
+  val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
+
+  def test={
+    val tmp:bar=null
+    if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+  }
+}
diff --git a/test/files/neg/t563.scala b/test/files/neg/t563.scala
index d367e2a..204ad3c 100644
--- a/test/files/neg/t563.scala
+++ b/test/files/neg/t563.scala
@@ -1,6 +1,6 @@
 object Test {
     def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
-    
+
     def split(sn : Iterable[List[Option[Int]]]) : Unit =
         for (n <- sn)
             map(n,ptr => Option(ptr.get));
diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check
index 242be8d..732e4f4 100644
--- a/test/files/neg/t5663-badwarneq.check
+++ b/test/files/neg/t5663-badwarneq.check
@@ -1,40 +1,42 @@
-t5663-badwarneq.scala:47: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false
+t5663-badwarneq.scala:47: warning: comparing case class values of types Some[Int] and None.type using `==' will always yield false
     println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
                         ^
-t5663-badwarneq.scala:48: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false
+t5663-badwarneq.scala:48: warning: comparing case class values of types Some[Int] and Thing using `==' will always yield false
     println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
                     ^
-t5663-badwarneq.scala:56: error: ThingOne and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:56: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal
     println(t1 == t2) // true, but apparently unrelated, a compromise warning
                ^
-t5663-badwarneq.scala:57: error: ThingThree and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:57: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal
     println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
                ^
-t5663-badwarneq.scala:60: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
+t5663-badwarneq.scala:60: warning: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
     println(t3 == Some(1)) // false, warn on different cases
                ^
-t5663-badwarneq.scala:61: error: comparing values of types ThingOne and Cousin using `==' will always yield false
+t5663-badwarneq.scala:61: warning: comparing values of types ThingOne and Cousin using `==' will always yield false
     println(t1 == c) // should warn
                ^
-t5663-badwarneq.scala:69: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
+t5663-badwarneq.scala:69: warning: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
     println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
                          ^
-t5663-badwarneq.scala:72: error: ValueClass1 and Int are unrelated: they will never compare equal
+t5663-badwarneq.scala:72: warning: ValueClass1 and Int are unrelated: they will never compare equal
     println(new ValueClass1(5) == 5) // bad
                                ^
-t5663-badwarneq.scala:74: error: comparing values of types Int and ValueClass1 using `==' will always yield false
+t5663-badwarneq.scala:74: warning: comparing values of types Int and ValueClass1 using `==' will always yield false
     println(5 == new ValueClass1(5)) // bad
               ^
-t5663-badwarneq.scala:78: error: ValueClass2[String] and String are unrelated: they will never compare equal
+t5663-badwarneq.scala:78: warning: ValueClass2[String] and String are unrelated: they will never compare equal
     println(new ValueClass2("abc") == "abc") // bad
                                    ^
-t5663-badwarneq.scala:79: error: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal
+t5663-badwarneq.scala:79: warning: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal
     println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
                                ^
-t5663-badwarneq.scala:81: error: comparing values of types ValueClass3 and ValueClass2[Int] using `==' will always yield false
+t5663-badwarneq.scala:81: warning: comparing values of types ValueClass3 and ValueClass2[Int] using `==' will always yield false
     println(ValueClass3(5) == new ValueClass2(5)) // bad
                            ^
-t5663-badwarneq.scala:82: error: comparing values of types ValueClass3 and Int using `==' will always yield false
+t5663-badwarneq.scala:82: warning: comparing values of types ValueClass3 and Int using `==' will always yield false
     println(ValueClass3(5) == 5) // bad
                            ^
-13 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+13 warnings found
+one error found
diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check
index e497e3b..7d4f7fb 100644
--- a/test/files/neg/t5689.check
+++ b/test/files/neg/t5689.check
@@ -1,6 +1,7 @@
 t5689.scala:4: error: macro implementation has incompatible shape:
- required: (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[String]
- found   : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
+ required: (c: scala.reflect.macros.blackbox.Context)(i: c.Expr[Double]): c.Expr[String]
+ or      : (c: scala.reflect.macros.blackbox.Context)(i: c.Tree): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(i: c.Expr[Double]): c.Expr[Int]
 type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
   def returnsString(i: Double): String = macro returnsIntImpl
                                                ^
diff --git a/test/files/neg/t5689.scala b/test/files/neg/t5689.scala
index 3266039..d757a55 100644
--- a/test/files/neg/t5689.scala
+++ b/test/files/neg/t5689.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def returnsString(i: Double): String = macro returnsIntImpl
diff --git a/test/files/neg/t5696.check b/test/files/neg/t5696.check
index 72b7781..e0fb61b 100644
--- a/test/files/neg/t5696.check
+++ b/test/files/neg/t5696.check
@@ -15,5 +15,5 @@ t5696.scala:38: error: too many argument lists for constructor invocation
       ^
 t5696.scala:46: error: too many argument lists for constructor invocation
   object x extends G(1)(2) {}
-           ^
+                   ^
 6 errors found
diff --git a/test/files/neg/t5702-neg-bad-and-wild.check b/test/files/neg/t5702-neg-bad-and-wild.check
index eae81ad..a52136d 100644
--- a/test/files/neg/t5702-neg-bad-and-wild.check
+++ b/test/files/neg/t5702-neg-bad-and-wild.check
@@ -1,4 +1,4 @@
-t5702-neg-bad-and-wild.scala:10: error: bad use of _* (a sequence pattern must be the last pattern)
+t5702-neg-bad-and-wild.scala:10: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern)
       case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern)
                      ^
 t5702-neg-bad-and-wild.scala:10: error: illegal start of simple pattern
@@ -7,22 +7,22 @@ t5702-neg-bad-and-wild.scala:10: error: illegal start of simple pattern
 t5702-neg-bad-and-wild.scala:12: error: illegal start of simple pattern
       case List(1, _*3,) => // illegal start of simple pattern
                        ^
-t5702-neg-bad-and-wild.scala:14: error: use _* to match a sequence
+t5702-neg-bad-and-wild.scala:14: error: bad simple pattern: use _* to match a sequence
       case List(1, x*) => // use _* to match a sequence
                      ^
-t5702-neg-bad-and-wild.scala:15: error: trailing * is not a valid pattern
+t5702-neg-bad-and-wild.scala:15: error: bad simple pattern: trailing * is not a valid pattern
       case List(x*, 1) => // trailing * is not a valid pattern
                   ^
-t5702-neg-bad-and-wild.scala:16: error: trailing * is not a valid pattern
+t5702-neg-bad-and-wild.scala:16: error: bad simple pattern: trailing * is not a valid pattern
       case (1, x*) => // trailing * is not a valid pattern
                  ^
-t5702-neg-bad-and-wild.scala:17: error: bad use of _* (sequence pattern not allowed)
+t5702-neg-bad-and-wild.scala:17: error: bad simple pattern: bad use of _* (sequence pattern not allowed)
       case (1, x at _*) => // bad use of _* (sequence pattern not allowed)
                    ^
-t5702-neg-bad-and-wild.scala:23: error: bad use of _* (a sequence pattern must be the last pattern)
+t5702-neg-bad-and-wild.scala:23: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern)
     val K(ns @ _*, x) = k // bad use of _* (a sequence pattern must be the last pattern)
                  ^
-t5702-neg-bad-and-wild.scala:24: error: bad use of _* (sequence pattern not allowed)
-    val (b, _ * ) = Pair(5,6) // bad use of _* (sequence pattern not allowed)
+t5702-neg-bad-and-wild.scala:24: error: bad simple pattern: bad use of _* (sequence pattern not allowed)
+    val (b, _ * ) = (5,6) // bad use of _* (sequence pattern not allowed)
                 ^
 9 errors found
diff --git a/test/files/neg/t5702-neg-bad-and-wild.scala b/test/files/neg/t5702-neg-bad-and-wild.scala
index 3833a00..aadda37 100644
--- a/test/files/neg/t5702-neg-bad-and-wild.scala
+++ b/test/files/neg/t5702-neg-bad-and-wild.scala
@@ -21,7 +21,7 @@ object Test {
 //gowild.scala:14: error: star patterns must correspond with varargs parameters
     val K(is @ _*) = k
     val K(ns @ _*, x) = k // bad use of _* (a sequence pattern must be the last pattern)
-    val (b, _ * ) = Pair(5,6) // bad use of _* (sequence pattern not allowed)
+    val (b, _ * ) = (5,6) // bad use of _* (sequence pattern not allowed)
 // no longer complains
 //bad-and-wild.scala:15: error: ')' expected but '}' found.
   }
diff --git a/test/files/neg/t5702-neg-bad-xbrace.check b/test/files/neg/t5702-neg-bad-xbrace.check
index d88638a..9240abe 100644
--- a/test/files/neg/t5702-neg-bad-xbrace.check
+++ b/test/files/neg/t5702-neg-bad-xbrace.check
@@ -1,7 +1,7 @@
-t5702-neg-bad-xbrace.scala:19: error: bad brace or paren after _*
+t5702-neg-bad-xbrace.scala:19: error: bad simple pattern: bad brace or paren after _*
           case <year>{_*)}</year> => y
                         ^
-t5702-neg-bad-xbrace.scala:28: error: bad brace or paren after _*
+t5702-neg-bad-xbrace.scala:28: error: bad simple pattern: bad brace or paren after _*
     val <top>{a, z at _*)}</top> = xml
                      ^
 two errors found
diff --git a/test/files/neg/t5702-neg-ugly-xbrace.check b/test/files/neg/t5702-neg-ugly-xbrace.check
index 7d80bbf..cdd2438 100644
--- a/test/files/neg/t5702-neg-ugly-xbrace.check
+++ b/test/files/neg/t5702-neg-ugly-xbrace.check
@@ -1,4 +1,4 @@
-t5702-neg-ugly-xbrace.scala:11: error: bad brace or paren after _*
+t5702-neg-ugly-xbrace.scala:11: error: bad simple pattern: bad brace or paren after _*
     val <top>{a, z at _*)</top> = xml
                      ^
 t5702-neg-ugly-xbrace.scala:12: error: Missing closing brace `}' assumed here
diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check
index 76602de..379416c 100644
--- a/test/files/neg/t5753.check
+++ b/test/files/neg/t5753.check
@@ -1,4 +1,5 @@
-Test_2.scala:9: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Test_2.scala:9: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
  println(foo(42))
             ^
 one error found
diff --git a/test/files/neg/t5753/Impls_Macros_1.scala b/test/files/neg/t5753/Impls_Macros_1.scala
index 1d9c264..9872c69 100644
--- a/test/files/neg/t5753/Impls_Macros_1.scala
+++ b/test/files/neg/t5753/Impls_Macros_1.scala
@@ -1,6 +1,6 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 trait Impls {
-def impl(c: Ctx)(x: c.Expr[Any]) = x
+  def impl(c: Context)(x: c.Expr[Any]) = x
 }
 
diff --git a/test/files/neg/t5753/Test_2.scala b/test/files/neg/t5753/Test_2.scala
index 2369b18..d52ed65 100644
--- a/test/files/neg/t5753/Test_2.scala
+++ b/test/files/neg/t5753/Test_2.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros extends Impls {
- def foo(x: Any) = macro impl
+ def foo(x: Any): Any = macro impl
 }
 
 object Test extends App {
diff --git a/test/files/neg/t576.scala b/test/files/neg/t576.scala
index fa7ee60..fd83217 100644
--- a/test/files/neg/t576.scala
+++ b/test/files/neg/t576.scala
@@ -4,16 +4,16 @@ abstract class BaseListXXX {
   type Node <: BaseNode;
   abstract class BaseNode {
   }
-}  
+}
 trait PriorityTreeXXX extends BaseListXXX {
 	type Node <: BasicTreeNode;
-  
+
   trait BasicTreeNode extends BaseNode {
-    def sibling: Node; 
+    def sibling: Node;
     def insert(dir : Int, node : Node) = {
       if (true) sibling.insert(node);
       //else insert(node);
-      
+
     }
     def insert(node : Node) : Unit  = {}
   }
diff --git a/test/files/neg/t5760-pkgobj-warn.check b/test/files/neg/t5760-pkgobj-warn.check
deleted file mode 100644
index a89398c..0000000
--- a/test/files/neg/t5760-pkgobj-warn.check
+++ /dev/null
@@ -1,4 +0,0 @@
-stalepkg_2.scala:6: error: Foo is already defined as class Foo in package object stalepkg
-  class Foo
-        ^
-one error found
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
index 89d766f..2d66af2 100644
--- a/test/files/neg/t5761.check
+++ b/test/files/neg/t5761.check
@@ -13,4 +13,7 @@ Unspecified value parameter x.
 t5761.scala:13: error: not found: type Tread
   new Tread("sth") { }.run()
       ^
-four errors found
+t5761.scala:13: error: value run is not a member of AnyRef
+  new Tread("sth") { }.run()
+                       ^
+5 errors found
diff --git a/test/files/neg/t5762.check b/test/files/neg/t5762.check
index 1006403..2a2f121 100644
--- a/test/files/neg/t5762.check
+++ b/test/files/neg/t5762.check
@@ -1,13 +1,15 @@
-t5762.scala:6: error: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
+t5762.scala:6: warning: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
     case _: D[Int]    if bippy => 1
             ^
-t5762.scala:7: error: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure
+t5762.scala:7: warning: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure
     case _: D[String]          => 2
             ^
-t5762.scala:20: error: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure
+t5762.scala:20: warning: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure
     case _: D[D[Int]]    if bippy => 1
             ^
-t5762.scala:21: error: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure
+t5762.scala:21: warning: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure
     case _: D[D[String]]          => 2
             ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check
index 726fac2..58c3a1b 100644
--- a/test/files/neg/t5830.check
+++ b/test/files/neg/t5830.check
@@ -1,7 +1,9 @@
-t5830.scala:6: error: unreachable code
+t5830.scala:6: warning: unreachable code
     case 'a' => println("b") // unreachable
                        ^
-t5830.scala:4: error: could not emit switch for @switch annotated match
+t5830.scala:4: warning: could not emit switch for @switch annotated match
   def unreachable(ch: Char) = (ch: @switch) match {
                                     ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t5845.check b/test/files/neg/t5845.check
deleted file mode 100644
index 8c6100d..0000000
--- a/test/files/neg/t5845.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t5845.scala:9: error: value +++ is not a member of Int
-  println(5 +++ 5)
-            ^
-t5845.scala:15: error: value +++ is not a member of Int
-  println(5 +++ 5)
-            ^
-two errors found
diff --git a/test/files/neg/t588.check b/test/files/neg/t588.check
index f8b5516..ff08f77 100644
--- a/test/files/neg/t588.check
+++ b/test/files/neg/t588.check
@@ -1,13 +1,13 @@
 t588.scala:3: error: double definition:
-method visit:(f: Int => String)Boolean and
-method visit:(f: Int => Unit)Boolean at line 2
+def visit(f: Int => Unit): Boolean at line 2 and
+def visit(f: Int => String): Boolean at line 3
 have same type after erasure: (f: Function1)Boolean
   def visit(f: Int => String): Boolean
       ^
 t588.scala:10: error: double definition:
-method f:(brac: Test.this.TypeB)Unit and
-method f:(node: Test.this.TypeA)Unit at line 9
-have same type after erasure: (brac: Test#TraitA)Unit
+def f(node: Test.this.TypeA): Unit at line 9 and
+def f(brac: Test.this.TypeB): Unit at line 10
+have same type after erasure: (node: Test#TraitA)Unit
   def f(brac : TypeB) : Unit;
       ^
 two errors found
diff --git a/test/files/neg/t588.scala b/test/files/neg/t588.scala
index 1bc6d26..f309373 100644
--- a/test/files/neg/t588.scala
+++ b/test/files/neg/t588.scala
@@ -1,15 +1,15 @@
 abstract class Test0 {
-  def visit(f: Int => Unit): Boolean  
+  def visit(f: Int => Unit): Boolean
   def visit(f: Int => String): Boolean
 }
 trait Test {
   type TypeA <: TraitA;
   type TypeB <: TypeA with TraitB;
-  
+
   def f(node : TypeA) : Unit;
   def f(brac : TypeB) : Unit;
-  
+
   trait TraitA;
   trait TraitB;
-  
+
 }
diff --git a/test/files/neg/t5903a.check b/test/files/neg/t5903a.check
new file mode 100644
index 0000000..34003b0
--- /dev/null
+++ b/test/files/neg/t5903a.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: too many patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3
+    case nq"$x + $y + $z" => println((x, y))
+         ^
+one error found
diff --git a/test/files/neg/t5903a/Macros_1.scala b/test/files/neg/t5903a/Macros_1.scala
new file mode 100644
index 0000000..5d084ce
--- /dev/null
+++ b/test/files/neg/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+  implicit class QuasiquoteInterpolation(c: StringContext) {
+    object nq {
+      def unapply(t: Tree): Any = macro QuasiquoteMacros.unapplyImpl
+    }
+  }
+}
+
+object QuasiquoteMacros {
+  def unapplyImpl(c: Context)(t: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def isEmpty = false
+        def get = this
+        def _1 = SomeTree
+        def _2 = SomeTree
+        def unapply(t: Tree) = this
+      }.unapply($t)
+    """
+  }
+}
diff --git a/test/files/neg/t5903a/Test_2.scala b/test/files/neg/t5903a/Test_2.scala
new file mode 100644
index 0000000..4d78dfb
--- /dev/null
+++ b/test/files/neg/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import NewQuasiquotes._
+  SomeTree match {
+    case nq"$x + $y + $z" => println((x, y))
+  }
+}
diff --git a/test/files/neg/t5903b.check b/test/files/neg/t5903b.check
new file mode 100644
index 0000000..e7637d3
--- /dev/null
+++ b/test/files/neg/t5903b.check
@@ -0,0 +1,6 @@
+Test_2.scala:4: error: type mismatch;
+ found   : Int
+ required: String
+    case t"$x" => println(x)
+         ^
+one error found
diff --git a/test/files/neg/t5903b/Macros_1.scala b/test/files/neg/t5903b/Macros_1.scala
new file mode 100644
index 0000000..6ce49c0
--- /dev/null
+++ b/test/files/neg/t5903b/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply[T](x: T): Any = macro Macros.unapplyImpl[T]
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def isEmpty = false
+        def get = "2"
+        def unapply(x: String) = this
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/t5903b/Test_2.scala b/test/files/neg/t5903b/Test_2.scala
new file mode 100644
index 0000000..0f6f80d
--- /dev/null
+++ b/test/files/neg/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  2 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/neg/t5903c.check b/test/files/neg/t5903c.check
new file mode 100644
index 0000000..05bd775
--- /dev/null
+++ b/test/files/neg/t5903c.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: String is not supported
+    case t"$x" => println(x)
+         ^
+one error found
diff --git a/test/files/neg/t5903c/Macros_1.scala b/test/files/neg/t5903c/Macros_1.scala
new file mode 100644
index 0000000..4792f00
--- /dev/null
+++ b/test/files/neg/t5903c/Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply[T](x: T): Any = macro Macros.unapplyImpl[T]
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+    import c.universe._
+    if (!(c.weakTypeOf[Int] =:= c.weakTypeOf[T])) c.abort(c.enclosingPosition, s"${c.weakTypeOf[T]} is not supported")
+    else {
+      q"""
+        new {
+          def isEmpty = false
+          def get = 2
+          def unapply(x: Int) = this
+        }.unapply($x)
+      """
+    }
+  }
+}
diff --git a/test/files/neg/t5903c/Test_2.scala b/test/files/neg/t5903c/Test_2.scala
new file mode 100644
index 0000000..a1fd31d
--- /dev/null
+++ b/test/files/neg/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  "2" match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/neg/t5903d.check b/test/files/neg/t5903d.check
new file mode 100644
index 0000000..54a91a7
--- /dev/null
+++ b/test/files/neg/t5903d.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: extractor macros can only be whitebox
+    case t"$x" => println(x)
+         ^
+one error found
diff --git a/test/files/neg/t5903d/Macros_1.scala b/test/files/neg/t5903d/Macros_1.scala
new file mode 100644
index 0000000..3500c2a
--- /dev/null
+++ b/test/files/neg/t5903d/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply(x: Int): Any = macro Macros.unapplyImpl
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      class Match(x: Int) {
+        def isEmpty = false
+        def get = x
+      }
+      new { def unapply(x: Int) = new Match(x) }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/t5903d/Test_2.scala b/test/files/neg/t5903d/Test_2.scala
new file mode 100644
index 0000000..95c717a
--- /dev/null
+++ b/test/files/neg/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  42 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/neg/t5903e.check b/test/files/neg/t5903e.check
new file mode 100644
index 0000000..3bdeb09
--- /dev/null
+++ b/test/files/neg/t5903e.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: value class may not be a member of another class
+    case t"$x" => println(x)
+         ^
+one error found
diff --git a/test/files/neg/t5903e/Macros_1.scala b/test/files/neg/t5903e/Macros_1.scala
new file mode 100644
index 0000000..a64ff7e
--- /dev/null
+++ b/test/files/neg/t5903e/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply(x: Int): Any = macro Macros.unapplyImpl
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        class Match(x: Int) extends AnyVal {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/neg/t5903e/Test_2.scala b/test/files/neg/t5903e/Test_2.scala
new file mode 100644
index 0000000..d69d472
--- /dev/null
+++ b/test/files/neg/t5903e/Test_2.scala
@@ -0,0 +1,6 @@
+class C {
+  import Interpolation._
+  42 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/neg/t591.scala b/test/files/neg/t591.scala
index 5f2397e..0f0b023 100644
--- a/test/files/neg/t591.scala
+++ b/test/files/neg/t591.scala
@@ -1,17 +1,17 @@
 abstract class BaseList {
   type Node <: BaseNode;
-  
-  
+
+
   abstract class BaseNode {
     protected def self : Node;
     private[BaseList] def self00 = self;
     def dirty : Unit = {}
     def replaceWith(node : Node) = {}
   }
-  
+
   implicit def baseNode2Node(bnode : BaseNode): Node = bnode.self00;
 
- 
+
 }
 
 
@@ -20,22 +20,22 @@ trait BaseFlow extends BaseList {
   type Flow <: FlowBase;
   type Output <: OutputBase;
   type  Input <:  InputBase;
-  
+
   abstract class FlowBase {
-    
+
   }
   trait OutputBase extends FlowBase {
-    
+
   }
   trait  InputBase extends FlowBase {
-    
+
   }
-  
+
   trait BFNode extends BaseNode {
     private var  input : Input  = _;
     private var output : Output = _;
-    
+
     def input_=(in : Input) = {}
-    
+
   }
 }
diff --git a/test/files/neg/t5956.check b/test/files/neg/t5956.check
index 6641dac..f5ae42c 100644
--- a/test/files/neg/t5956.check
+++ b/test/files/neg/t5956.check
@@ -1,20 +1,7 @@
-t5956.scala:1: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object O { case class C[T]; class C }
-                          ^
-t5956.scala:2: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object T { case class C[T]; case class C }
-                          ^
-t5956.scala:2: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object T { case class C[T]; case class C }
-                                        ^
 t5956.scala:1: error: C is already defined as case class C
-object O { case class C[T]; class C }
-                                  ^
+object O { case class C[T](); class C() }
+                                    ^
 t5956.scala:2: error: C is already defined as case class C
-object T { case class C[T]; case class C }
-                                       ^
-three warnings found
+object T { case class C[T](); case class C() }
+                                         ^
 two errors found
diff --git a/test/files/neg/t5956.scala b/test/files/neg/t5956.scala
index d985fa9..3cc10f3 100644
--- a/test/files/neg/t5956.scala
+++ b/test/files/neg/t5956.scala
@@ -1,2 +1,2 @@
-object O { case class C[T]; class C }
-object T { case class C[T]; case class C }
+object O { case class C[T](); class C() }
+object T { case class C[T](); case class C() }
diff --git a/test/files/neg/t5969.scala b/test/files/neg/t5969.scala
index 62f87fd..d010cac 100644
--- a/test/files/neg/t5969.scala
+++ b/test/files/neg/t5969.scala
@@ -4,7 +4,7 @@ class A {
   def f(x: Any) = x
   def g(x: C1): String = "A"
   def g(x: C2): String = "B"
-  
+
   def crash() = f(List[String]() flatMap { x =>
     if (false) List(g(x)) else List[C1]() map g
   })
diff --git a/test/files/neg/t6011.check b/test/files/neg/t6011.check
index 5b5a861..cb7f189 100644
--- a/test/files/neg/t6011.check
+++ b/test/files/neg/t6011.check
@@ -1,10 +1,12 @@
-t6011.scala:4: error: unreachable code
+t6011.scala:4: warning: unreachable code
     case 'a' | 'c' => 1 // unreachable
                       ^
-t6011.scala:10: error: unreachable code
+t6011.scala:10: warning: unreachable code
     case 'b' | 'a'  => 1 // unreachable
                        ^
-t6011.scala:8: error: could not emit switch for @switch annotated match
+t6011.scala:8: warning: could not emit switch for @switch annotated match
   def f2(ch: Char): Any = (ch: @annotation.switch) match {
                                 ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check
index 5bdf2ec..f8eddf5 100644
--- a/test/files/neg/t6048.check
+++ b/test/files/neg/t6048.check
@@ -1,13 +1,18 @@
-t6048.scala:3: error: unreachable code
+t6048.scala:3: warning: unreachable code
     case _ if false => x // unreachable
                        ^
-t6048.scala:8: error: unreachable code
+t6048.scala:8: warning: unreachable code
     case _ if false => x // unreachable
                        ^
-t6048.scala:13: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+t6048.scala:13: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
     case _ => x
          ^
-t6048.scala:14: error: unreachable code due to variable pattern on line 13
+t6048.scala:14: warning: unreachable code due to variable pattern on line 13
     case 5 if true  => x // unreachable
                        ^
-four errors found
+t6048.scala:14: warning: unreachable code
+    case 5 if true  => x // unreachable
+                       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t608.scala b/test/files/neg/t608.scala
index 1f12764..34dc4c0 100644
--- a/test/files/neg/t608.scala
+++ b/test/files/neg/t608.scala
@@ -2,7 +2,7 @@ trait CrashDueToTypeError {
   def id[a](x :a) :a = x
 
   trait Bifunctor {
-    type a; //   content 
+    type a; //   content
     type s <: Bifunctor
 
     // uncomment        this-vvvvvvvvvvvvvvvvvvvvvvvvvvvv, and it compiles
diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check
new file mode 100644
index 0000000..c9b5ba0
--- /dev/null
+++ b/test/files/neg/t6083.check
@@ -0,0 +1,10 @@
+t6083.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class annot(value: String) extends annotation.ClassfileAnnotation
+      ^
+t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101)
+ at annot(101) class C
+       ^
+one warning found
+one error found
diff --git a/test/files/neg/t6083.scala b/test/files/neg/t6083.scala
new file mode 100644
index 0000000..1de18e6
--- /dev/null
+++ b/test/files/neg/t6083.scala
@@ -0,0 +1,7 @@
+object conv {
+  implicit def i2s(i: Int): String = ""
+}
+import conv._
+
+class annot(value: String) extends annotation.ClassfileAnnotation
+ at annot(101) class C
diff --git a/test/files/neg/t6120.check b/test/files/neg/t6120.check
new file mode 100644
index 0000000..a7d17e2
--- /dev/null
+++ b/test/files/neg/t6120.check
@@ -0,0 +1,20 @@
+t6120.scala:5: warning: postfix operator bippy should be enabled
+by making the implicit value scala.language.postfixOps visible.
+This can be achieved by adding the import clause 'import scala.language.postfixOps'
+or by setting the compiler option -language:postfixOps.
+See the Scala docs for value scala.language.postfixOps for a discussion
+why the feature should be explicitly enabled.
+  def f = null == null bippy
+                       ^
+t6120.scala:5: warning: method bippy in class BooleanOps is deprecated: bobo
+  def f = null == null bippy
+                       ^
+t6120.scala:5: warning: comparing values of types Null and Null using `==' will always yield true
+  def f = null == null bippy
+               ^
+t6120.scala:6: warning: method bippy in class BooleanOps is deprecated: bobo
+  def g = true.bippy
+               ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t6120.flags b/test/files/neg/t6120.flags
new file mode 100644
index 0000000..04d7c7d
--- /dev/null
+++ b/test/files/neg/t6120.flags
@@ -0,0 +1 @@
+-feature -deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6120.scala b/test/files/neg/t6120.scala
new file mode 100644
index 0000000..425f09d
--- /dev/null
+++ b/test/files/neg/t6120.scala
@@ -0,0 +1,7 @@
+class A {
+  implicit class BooleanOps(val b: Boolean) {
+    @deprecated("bobo", "2.11.0") def bippy() = 5
+  }
+  def f = null == null bippy
+  def g = true.bippy
+}
diff --git a/test/files/neg/t6123-explaintypes-macros.check b/test/files/neg/t6123-explaintypes-macros.check
new file mode 100644
index 0000000..2c86f3c
--- /dev/null
+++ b/test/files/neg/t6123-explaintypes-macros.check
@@ -0,0 +1,10 @@
+c.universe.Expr[Any]* <: c.universe.Expr[String]*?
+false
+BadMac_2.scala:6: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.blackbox.Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit]
+ or      : (c: scala.reflect.macros.blackbox.Context)(format: c.Tree, params: Tree*): c.Tree
+ found   : (c: scala.reflect.macros.blackbox.Context)(format: c.Expr[String], params: c.Expr[String]*): c.Expr[Unit]
+type mismatch for parameter params: c.Expr[Any]* does not conform to c.Expr[String]*
+  def printf(format: String, params: Any*): Unit = macro printf_impl
+                                                         ^
+one error found
diff --git a/test/files/neg/t6123-explaintypes-macros/BadMac_2.flags b/test/files/neg/t6123-explaintypes-macros/BadMac_2.flags
new file mode 100644
index 0000000..b36707c
--- /dev/null
+++ b/test/files/neg/t6123-explaintypes-macros/BadMac_2.flags
@@ -0,0 +1 @@
+-explaintypes
diff --git a/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala
new file mode 100644
index 0000000..75ded4e
--- /dev/null
+++ b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala
@@ -0,0 +1,8 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+// explain some macro types to me
+object BadMac {
+  def printf(format: String, params: Any*): Unit = macro printf_impl
+  def printf_impl(c: Context)(format: c.Expr[String], params: c.Expr[String]*): c.Expr[Unit] = ???
+}
diff --git a/test/files/neg/t6123-explaintypes-macros/Macros.flags b/test/files/neg/t6123-explaintypes-macros/Macros.flags
new file mode 100644
index 0000000..b36707c
--- /dev/null
+++ b/test/files/neg/t6123-explaintypes-macros/Macros.flags
@@ -0,0 +1 @@
+-explaintypes
diff --git a/test/files/neg/t6123-explaintypes-macros/Macros.scala b/test/files/neg/t6123-explaintypes-macros/Macros.scala
new file mode 100644
index 0000000..f2238b3
--- /dev/null
+++ b/test/files/neg/t6123-explaintypes-macros/Macros.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def printf(format: String, params: Any*): Unit = macro printf_impl
+  def printf_impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = ???
+}
+
+// something trivial to run
+object Test extends App
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
index a7d3cc3..13c7803 100644
--- a/test/files/neg/t6162-inheritance.check
+++ b/test/files/neg/t6162-inheritance.check
@@ -1,10 +1,18 @@
-t6162-inheritance.scala:6: error: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
 class SubFoo extends Foo
                      ^
-t6162-inheritance.scala:11: error: inheritance from trait T in package t6126 is deprecated
+usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
 object SubT extends T
                     ^
-t6162-inheritance.scala:17: error: inheritance from trait S in package t6126 is deprecated
+usage.scala:8: warning: inheritance from trait S in package t6126 is deprecated
   new S {
       ^
-three errors found
+usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+class SubFoo extends Foo
+             ^
+usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
+object SubT extends T
+            ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t6162-inheritance.scala b/test/files/neg/t6162-inheritance.scala
deleted file mode 100644
index 7b47b92..0000000
--- a/test/files/neg/t6162-inheritance.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.t6126
-
- at deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
-class Foo
-
-class SubFoo extends Foo
-
- at deprecatedInheritance()
-trait T
-
-object SubT extends T
-
- at deprecatedInheritance()
-trait S
-
-object O {
-  new S {
-  }
-}
diff --git a/test/files/neg/t6162-inheritance/defn.scala b/test/files/neg/t6162-inheritance/defn.scala
new file mode 100644
index 0000000..bb582d2
--- /dev/null
+++ b/test/files/neg/t6162-inheritance/defn.scala
@@ -0,0 +1,10 @@
+package scala.t6126
+
+ at deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
+class Foo
+
+ at deprecatedInheritance()
+trait T
+
+ at deprecatedInheritance()
+trait S
diff --git a/test/files/neg/t6162-inheritance/usage.scala b/test/files/neg/t6162-inheritance/usage.scala
new file mode 100644
index 0000000..097e4f5
--- /dev/null
+++ b/test/files/neg/t6162-inheritance/usage.scala
@@ -0,0 +1,10 @@
+package scala.t6126
+
+class SubFoo extends Foo
+
+object SubT extends T
+
+object O {
+  new S {
+  }
+}
diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check
index e774888..6bff75d 100644
--- a/test/files/neg/t6162-overriding.check
+++ b/test/files/neg/t6162-overriding.check
@@ -1,7 +1,9 @@
-t6162-overriding.scala:14: error: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
+t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
   override def bar = 43
                ^
-t6162-overriding.scala:15: error: overriding method baz in class Bar is deprecated
+t6162-overriding.scala:15: warning: overriding method baz in class Bar is deprecated
   override def baz = 43
                ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t6231.check b/test/files/neg/t6231.check
deleted file mode 100644
index b27961d..0000000
--- a/test/files/neg/t6231.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t6231.scala:4: error: Implementation restriction: local trait Bug$X$1 is unable to automatically capture the
-free variable value ev$1 on behalf of anonymous class anonfun$qux$1. You can manually assign it to a val inside the trait,
-and refer that that val in anonymous class anonfun$qux$1. For more details, see SI-6231.
-      def qux = { () => ev }
-                        ^
-one error found
diff --git a/test/files/neg/t6260-named.check b/test/files/neg/t6260-named.check
new file mode 100644
index 0000000..ed6ab5e
--- /dev/null
+++ b/test/files/neg/t6260-named.check
@@ -0,0 +1,13 @@
+t6260-named.scala:12: error: bridge generated for member method apply: (a: C[Any])C[Any] in object O
+which overrides method apply: (v1: T1)R in trait Function1
+clashes with definition of the member itself;
+both have erased type (v1: Object)Object
+    def apply(a: C[Any]) = a
+        ^
+t6260-named.scala:14: error: bridge generated for member method apply: (a: C[Any])C[Any] in class X
+which overrides method apply: (a: A)A in trait T
+clashes with definition of the member itself;
+both have erased type (a: Object)Object
+  class X extends T[C[Any]] { def apply(a: C[Any]) = a }
+                                  ^
+two errors found
diff --git a/test/files/neg/t6260-named.scala b/test/files/neg/t6260-named.scala
new file mode 100644
index 0000000..7cd9ce8
--- /dev/null
+++ b/test/files/neg/t6260-named.scala
@@ -0,0 +1,15 @@
+class C[A](private val a: Any) extends AnyVal
+trait T[A] {
+  def apply(a: A): A
+}
+
+object Test {
+  (x: C[Any]) => {println(s"f($x)"); x} // okay
+  new T[C[Any]] { def apply(a: C[Any]) = a } // okay
+
+  // we can't rename the specific apply method to avoid the clash
+  object O extends Function1[C[Any], C[Any]] {
+    def apply(a: C[Any]) = a
+  }
+  class X extends T[C[Any]] { def apply(a: C[Any]) = a }
+}
diff --git a/test/files/neg/t6260.check b/test/files/neg/t6260.check
deleted file mode 100644
index 46e9bd1..0000000
--- a/test/files/neg/t6260.check
+++ /dev/null
@@ -1,13 +0,0 @@
-t6260.scala:3: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun
-which overrides method apply: (v1: T1)R in trait Function1
-clashes with definition of the member itself;
-both have erased type (v1: Object)Object
-    ((bx: Box[X]) => new Box(f(bx.x)))(this)
-                  ^
-t6260.scala:8: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun
-which overrides method apply: (v1: T1)R in trait Function1
-clashes with definition of the member itself;
-both have erased type (v1: Object)Object
-    ((bx: Box[X]) => new Box(f(bx.x)))(self)
-                  ^
-two errors found
diff --git a/test/files/neg/t6260c.check b/test/files/neg/t6260c.check
new file mode 100644
index 0000000..cbbcfd1
--- /dev/null
+++ b/test/files/neg/t6260c.check
@@ -0,0 +1,7 @@
+t6260c.scala:4: error: bridge generated for member method f: ()Option[A] in class Bar1
+which overrides method f: ()A in class Foo1
+clashes with definition of the member itself;
+both have erased type ()Object
+         class Bar1[A] extends Foo1[Option[A]] { def f(): Option[A] = ??? }
+                                                     ^
+one error found
diff --git a/test/files/neg/t6260c.scala b/test/files/neg/t6260c.scala
new file mode 100644
index 0000000..02bf152
--- /dev/null
+++ b/test/files/neg/t6260c.scala
@@ -0,0 +1,4 @@
+final class Option[+A](val value: A) extends AnyVal
+
+abstract class Foo1[A]                         { def f(): A }
+         class Bar1[A] extends Foo1[Option[A]] { def f(): Option[A] = ??? }
diff --git a/test/files/neg/t6264.check b/test/files/neg/t6264.check
index 438be4c..c0975a8 100644
--- a/test/files/neg/t6264.check
+++ b/test/files/neg/t6264.check
@@ -1,4 +1,6 @@
-t6264.scala:3: error: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
+t6264.scala:3: warning: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
     x.isInstanceOf[Tuple2[_, Tuple1[_]]]
                   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t6276.check b/test/files/neg/t6276.check
index 0b3dfa5..f275de9 100644
--- a/test/files/neg/t6276.check
+++ b/test/files/neg/t6276.check
@@ -1,19 +1,21 @@
-t6276.scala:4: error: method a in class C does nothing other than call itself recursively
+t6276.scala:4: warning: method a in class C does nothing other than call itself recursively
       def a: Any = a // warn
                    ^
-t6276.scala:5: error: value b in class C does nothing other than call itself recursively
+t6276.scala:5: warning: value b in class C does nothing other than call itself recursively
       val b: Any = b // warn
                    ^
-t6276.scala:7: error: method c in class C does nothing other than call itself recursively
+t6276.scala:7: warning: method c in class C does nothing other than call itself recursively
       def c: Any = this.c // warn
                         ^
-t6276.scala:8: error: method d in class C does nothing other than call itself recursively
+t6276.scala:8: warning: method d in class C does nothing other than call itself recursively
       def d: Any = C.this.d // warn
                           ^
-t6276.scala:13: error: method a does nothing other than call itself recursively
+t6276.scala:13: warning: method a does nothing other than call itself recursively
       def a: Any = a // warn
                    ^
-t6276.scala:22: error: method a does nothing other than call itself recursively
+t6276.scala:22: warning: method a does nothing other than call itself recursively
       def a = a // warn
               ^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check
new file mode 100644
index 0000000..f6f43ca
--- /dev/null
+++ b/test/files/neg/t6289.check
@@ -0,0 +1,10 @@
+#partest java6
+t6289/J.java:2: method does not override or implement a method from a supertype
+  @Override public void foo() { }
+  ^
+1 error
+#partest java7
+t6289/J.java:2: error: method does not override or implement a method from a supertype
+  @Override public void foo() { }
+  ^
+1 error
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t6289.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t6289.flags
diff --git a/test/files/neg/t6289/J.java b/test/files/neg/t6289/J.java
new file mode 100644
index 0000000..83f50c9
--- /dev/null
+++ b/test/files/neg/t6289/J.java
@@ -0,0 +1,5 @@
+public class J {
+  @Override public void foo() { }
+
+  public void bar() { foo(); }
+}
diff --git a/test/files/neg/t6289/SUT_5.scala b/test/files/neg/t6289/SUT_5.scala
new file mode 100644
index 0000000..0a99635
--- /dev/null
+++ b/test/files/neg/t6289/SUT_5.scala
@@ -0,0 +1,5 @@
+
+/** The System Under Test.
+ *  We bail on the earlier round that generates the first error.
+ */
+class SUT extends J
diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check
index 4d682e5..261a60e 100644
--- a/test/files/neg/t6323a.check
+++ b/test/files/neg/t6323a.check
@@ -5,8 +5,8 @@ t6323a.scala:11: materializing requested reflect.runtime.universe.type.TypeTag[T
       val value = u.typeOf[Test]
                           ^
 t6323a.scala:11: `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
-failed to typecheck the materialized tag: 
-cannot create a TypeTag referring to local class Test.Test: use WeakTypeTag instead
+failed to typecheck the materialized tag:
+cannot create a TypeTag referring to class Test.Test local to the reifee: use WeakTypeTag instead
       val value = u.typeOf[Test]
                           ^
 t6323a.scala:11: error: No TypeTag available for Test
diff --git a/test/files/neg/t6355a.check b/test/files/neg/t6355a.check
new file mode 100644
index 0000000..5768d31
--- /dev/null
+++ b/test/files/neg/t6355a.check
@@ -0,0 +1,7 @@
+t6355a.scala:12: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)
+  def applyDynamic(name: String)(x: Int): Int = 2
+      ^
+t6355a.scala:18: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)
+  def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3
+      ^
+two errors found
diff --git a/test/files/neg/t6355a.scala b/test/files/neg/t6355a.scala
new file mode 100644
index 0000000..0500ed0
--- /dev/null
+++ b/test/files/neg/t6355a.scala
@@ -0,0 +1,19 @@
+package foo
+
+import scala.language.dynamics
+
+class DoesntExtendDynamic {
+  def applyDynamic(name: String)(s: String): Int = 1
+  def applyDynamic(name: String)(x: Int): Int = 2
+}
+
+class A extends Dynamic {
+  def applyDynamic(name: String)(s: String): Int = 1
+  def applyDynamic(name: String)(x: Int): Int = 2
+}
+
+class B extends Dynamic {
+  def applyDynamic[T1](name: String)(x: T1): Int = 1
+  def applyDynamic[T1, T2](name: String)(x: T1, y: T2): Int = 2
+  def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3
+}
diff --git a/test/files/neg/t6355b.check b/test/files/neg/t6355b.check
new file mode 100644
index 0000000..f827f07
--- /dev/null
+++ b/test/files/neg/t6355b.check
@@ -0,0 +1,11 @@
+t6355b.scala:14: error: value applyDynamic is not a member of A
+error after rewriting to x.<applyDynamic: error>("bippy")
+possible cause: maybe a wrong Dynamic method signature?
+    println(x.bippy(42))
+            ^
+t6355b.scala:15: error: value applyDynamic is not a member of A
+error after rewriting to x.<applyDynamic: error>("bippy")
+possible cause: maybe a wrong Dynamic method signature?
+    println(x.bippy("42"))
+            ^
+two errors found
diff --git a/test/files/neg/t6355b.scala b/test/files/neg/t6355b.scala
new file mode 100644
index 0000000..5f3c97c
--- /dev/null
+++ b/test/files/neg/t6355b.scala
@@ -0,0 +1,17 @@
+import scala.language.dynamics
+
+class A extends Dynamic {
+  def selectDynamic(method: String): B = new B(method)
+}
+class B(method: String) {
+  def apply(x: Int) = s"$method(x: Int) called with x = $x"
+  def apply(x: String) = s"""$method(x: String) called with x = "$x""""
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    val x = new A
+    println(x.bippy(42))
+    println(x.bippy("42"))
+  }
+}
diff --git a/test/files/neg/t6375.check b/test/files/neg/t6375.check
new file mode 100644
index 0000000..89d7d80
--- /dev/null
+++ b/test/files/neg/t6375.check
@@ -0,0 +1,27 @@
+t6375.scala:6: warning: no valid targets for annotation on value x1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @getter)
+  @Bippy           val x1: Int    // warn
+   ^
+t6375.scala:7: warning: no valid targets for annotation on value x2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @getter)
+  @(Bippy @field)  val x2: Int    // warn
+   ^
+t6375.scala:9: warning: no valid targets for annotation on value x4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @getter)
+  @(Bippy @setter) val x4: Int    // warn
+   ^
+t6375.scala:10: warning: no valid targets for annotation on value x5 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.param @getter)
+  @(Bippy @param)  val x5: Int    // warn
+   ^
+t6375.scala:20: warning: no valid targets for annotation on value q1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @field)
+  @(Bippy @getter) private[this] val q1: Int = 1 // warn
+   ^
+t6375.scala:40: warning: no valid targets for annotation on value p2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @param)
+  @(Bippy @getter) p2: Int,   // warn
+   ^
+t6375.scala:41: warning: no valid targets for annotation on value p3 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @param)
+  @(Bippy @setter) p3: Int,   // warn
+   ^
+t6375.scala:42: warning: no valid targets for annotation on value p4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @param)
+  @(Bippy @field) p4: Int     // warn
+   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t6375.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t6375.flags
diff --git a/test/files/neg/t6375.scala b/test/files/neg/t6375.scala
new file mode 100644
index 0000000..21634df
--- /dev/null
+++ b/test/files/neg/t6375.scala
@@ -0,0 +1,67 @@
+import scala.annotation.meta._
+
+class Bippy extends scala.annotation.StaticAnnotation
+
+abstract class Foo {
+  @Bippy           val x1: Int    // warn
+  @(Bippy @field)  val x2: Int    // warn
+  @(Bippy @getter) val x3: Int    // no warn
+  @(Bippy @setter) val x4: Int    // warn
+  @(Bippy @param)  val x5: Int    // warn
+}
+
+object Bar extends Foo {
+  val x1 = 1
+  val x2 = 2
+  val x3 = 3
+  val x4 = 4
+  val x5 = 5
+
+  @(Bippy @getter) private[this] val q1: Int = 1 // warn
+  @(Bippy @getter) private val q2: Int = 1       // no warn
+
+  def f1(@(Bippy @param) x: Int): Int = 0   // no warn
+  def f2(@(Bippy @getter) x: Int): Int = 0  // warn - todo
+  def f3(@(Bippy @setter) x: Int): Int = 0  // warn - todo
+  def f4(@(Bippy @field) x: Int): Int = 0   // warn - todo
+  def f5(@Bippy x: Int): Int = 0            // no warn
+
+  @(Bippy @companionClass)  def g1(x: Int): Int = 0   // warn - todo
+  @(Bippy @companionObject) def g2(x: Int): Int = 0   // warn - todo
+  @(Bippy @companionMethod) def g3(x: Int): Int = 0   // no warn
+                     @Bippy def g4(x: Int): Int = 0   // no warn
+
+  @(Bippy @companionObject @companionMethod) def g5(x: Int): Int = 0   // no warn
+}
+
+class Dingo(
+  @Bippy p0: Int,             // no warn
+  @(Bippy @param) p1: Int,    // no warn
+  @(Bippy @getter) p2: Int,   // warn
+  @(Bippy @setter) p3: Int,   // warn
+  @(Bippy @field) p4: Int     // warn
+)
+
+class ValDingo(
+  @Bippy val p0: Int,             // no warn
+  @(Bippy @param) val p1: Int,    // no warn
+  @(Bippy @getter) val p2: Int,   // no warn
+  @(Bippy @setter) val p3: Int,   // warn - todo
+  @(Bippy @field) val p4: Int     // no warn
+)
+
+class VarDingo(
+  @Bippy var p0: Int,             // no warn
+  @(Bippy @param) var p1: Int,    // no warn
+  @(Bippy @getter) var p2: Int,   // no warn
+  @(Bippy @setter) var p3: Int,   // no warn
+  @(Bippy @field) var p4: Int     // no warn
+)
+
+case class CaseDingo(
+  @Bippy  p0: Int,            // no warn
+  @(Bippy @param) p1: Int,    // no warn
+  @(Bippy @getter) p2: Int,   // no warn
+  @(Bippy @setter) p3: Int,   // warn - todo
+  @(Bippy @field) p4: Int     // no warn
+)
diff --git a/test/files/neg/t6385.check b/test/files/neg/t6385.check
deleted file mode 100644
index 93e51e8..0000000
--- a/test/files/neg/t6385.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t6385.scala:12: error: bridge generated for member method x: ()C[T] in class C
-which overrides method x: ()C[T] in trait AA
-clashes with definition of the member itself;
-both have erased type ()Object
-   def x = this
-       ^
-one error found
diff --git a/test/files/neg/t6385.scala b/test/files/neg/t6385.scala
deleted file mode 100644
index cec58ee..0000000
--- a/test/files/neg/t6385.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object N {
-   def main(args: Array[String]) {
-      val y: AA[Int] = C(2)
-      val c: Int = y.x.y
-      println(c)
-   }
-}
-trait AA[T] extends Any {
-   def x: C[T]
-}
-case class C[T](val y: T) extends AnyVal with AA[T] {
-   def x = this
-}
diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check
new file mode 100644
index 0000000..19425a6
--- /dev/null
+++ b/test/files/neg/t6406-regextract.check
@@ -0,0 +1,6 @@
+t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated
+  List(1) collect { case r(i) => i }
+                         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t6406-regextract.flags b/test/files/neg/t6406-regextract.flags
new file mode 100644
index 0000000..7de3c0f
--- /dev/null
+++ b/test/files/neg/t6406-regextract.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
diff --git a/test/files/neg/t6406-regextract.scala b/test/files/neg/t6406-regextract.scala
new file mode 100644
index 0000000..0f5dad9
--- /dev/null
+++ b/test/files/neg/t6406-regextract.scala
@@ -0,0 +1,5 @@
+
+object Test extends App {
+  val r = "(\\d+)".r
+  List(1) collect { case r(i) => i }
+}
diff --git a/test/files/neg/t6443c.check b/test/files/neg/t6443c.check
index 7cf8d23..7b7f419 100644
--- a/test/files/neg/t6443c.check
+++ b/test/files/neg/t6443c.check
@@ -1,6 +1,6 @@
 t6443c.scala:16: error: double definition:
-method foo:(d: B.D)(a: Any)(d2: d.type)Unit and
-method foo:(d: B.D)(a: Any, d2: d.type)Unit at line 11
+def foo(d: B.D)(a: Any,d2: d.type): Unit at line 11 and
+def foo(d: B.D)(a: Any)(d2: d.type): Unit at line 16
 have same type after erasure: (d: B.D, a: Object, d2: B.D)Unit
   def foo(d: D)(a: Any)(d2: d.type): Unit = ()
       ^
diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check
new file mode 100755
index 0000000..a87af2f
--- /dev/null
+++ b/test/files/neg/t6446-additional.check
@@ -0,0 +1,39 @@
+    phase name  id  description
+    ----------  --  -----------
+        parser   1  parse source into ASTs, perform simple desugaring
+         namer   2  resolve names, attach symbols to named trees
+packageobjects   3  load package objects
+         typer   4  the meat and potatoes: type the trees
+        patmat   5  translate match expressions
+superaccessors   6  add super accessors in traits and nested classes
+    extmethods   7  add extension methods for inline classes
+       pickler   8  serialize symbol tables
+     refchecks   9  reference/override checking, translate nested objects
+       uncurry  10  uncurry, translate function values to anonymous classes
+     tailcalls  11  replace tail calls by jumps
+    specialize  12  @specialized-driven class and method specialization
+ explicitouter  13  this refs to outer pointers
+       erasure  14  erase types, add interfaces for traits
+   posterasure  15  clean up erased inline classes
+      lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
+    lambdalift  17  move nested functions to top level
+  constructors  18  move field definitions into constructors
+       flatten  19  eliminate inner classes
+         mixin  20  mixin composition
+       cleanup  21  platform-specific cleanups, generate reflective calls
+    delambdafy  22  remove lambdas
+         icode  23  generate portable intermediate code
+#partest -optimise
+       inliner  24  optimization: do inlining
+inlinehandlers  25  optimization: inline exception handlers
+      closelim  26  optimization: eliminate uncalled closures
+      constopt  27  optimization: optimize null and other constants
+           dce  28  optimization: eliminate dead code
+           jvm  29  generate JVM bytecode
+       ploogin  30  A sample phase that does so many things it's kind of hard...
+      terminal  31  the last phase during a compilation run
+#partest !-optimise
+           jvm  24  generate JVM bytecode
+       ploogin  25  A sample phase that does so many things it's kind of hard...
+      terminal  26  the last phase during a compilation run
+#partest
diff --git a/test/files/neg/t6446-additional/ploogin_1.scala b/test/files/neg/t6446-additional/ploogin_1.scala
new file mode 100644
index 0000000..ed6adfc
--- /dev/null
+++ b/test/files/neg/t6446-additional/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin.  */
+class Ploogin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "ploogin"
+  val description = "A sample plugin for testing."
+  val components = List[PluginComponent](TestComponent)
+
+  private object TestComponent extends PluginComponent {
+    val global: Ploogin.this.global.type = Ploogin.this.global
+    //override val runsBefore = List("refchecks")
+    val runsAfter = List("jvm")
+    val phaseName = Ploogin.this.name
+    override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+    def newPhase(prev: Phase) = new TestPhase(prev)
+    class TestPhase(prev: Phase) extends StdPhase(prev) {
+      override def description = TestComponent.this.description
+      def apply(unit: CompilationUnit) {
+        // kewl kode
+      }
+    }
+  }
+}
diff --git a/test/files/neg/t6446-additional/sample_2.flags b/test/files/neg/t6446-additional/sample_2.flags
new file mode 100644
index 0000000..4d518c2
--- /dev/null
+++ b/test/files/neg/t6446-additional/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases
diff --git a/test/files/neg/t6446-additional/sample_2.scala b/test/files/neg/t6446-additional/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t6446-additional/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-additional/scalac-plugin.xml b/test/files/neg/t6446-additional/scalac-plugin.xml
new file mode 100644
index 0000000..e849bb5
--- /dev/null
+++ b/test/files/neg/t6446-additional/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-list.check b/test/files/neg/t6446-list.check
new file mode 100755
index 0000000..fa5c581
--- /dev/null
+++ b/test/files/neg/t6446-list.check
@@ -0,0 +1 @@
+ploogin - A sample plugin for testing.
diff --git a/test/files/neg/t6446-list/ploogin_1.scala b/test/files/neg/t6446-list/ploogin_1.scala
new file mode 100644
index 0000000..ed6adfc
--- /dev/null
+++ b/test/files/neg/t6446-list/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin.  */
+class Ploogin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "ploogin"
+  val description = "A sample plugin for testing."
+  val components = List[PluginComponent](TestComponent)
+
+  private object TestComponent extends PluginComponent {
+    val global: Ploogin.this.global.type = Ploogin.this.global
+    //override val runsBefore = List("refchecks")
+    val runsAfter = List("jvm")
+    val phaseName = Ploogin.this.name
+    override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+    def newPhase(prev: Phase) = new TestPhase(prev)
+    class TestPhase(prev: Phase) extends StdPhase(prev) {
+      override def description = TestComponent.this.description
+      def apply(unit: CompilationUnit) {
+        // kewl kode
+      }
+    }
+  }
+}
diff --git a/test/files/neg/t6446-list/sample_2.flags b/test/files/neg/t6446-list/sample_2.flags
new file mode 100644
index 0000000..9cb3232
--- /dev/null
+++ b/test/files/neg/t6446-list/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-list
diff --git a/test/files/neg/t6446-list/sample_2.scala b/test/files/neg/t6446-list/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t6446-list/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-list/scalac-plugin.xml b/test/files/neg/t6446-list/scalac-plugin.xml
new file mode 100644
index 0000000..e849bb5
--- /dev/null
+++ b/test/files/neg/t6446-list/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check
new file mode 100755
index 0000000..029c805
--- /dev/null
+++ b/test/files/neg/t6446-missing.check
@@ -0,0 +1,38 @@
+Error: unable to load class: t6446.Ploogin
+    phase name  id  description
+    ----------  --  -----------
+        parser   1  parse source into ASTs, perform simple desugaring
+         namer   2  resolve names, attach symbols to named trees
+packageobjects   3  load package objects
+         typer   4  the meat and potatoes: type the trees
+        patmat   5  translate match expressions
+superaccessors   6  add super accessors in traits and nested classes
+    extmethods   7  add extension methods for inline classes
+       pickler   8  serialize symbol tables
+     refchecks   9  reference/override checking, translate nested objects
+       uncurry  10  uncurry, translate function values to anonymous classes
+     tailcalls  11  replace tail calls by jumps
+    specialize  12  @specialized-driven class and method specialization
+ explicitouter  13  this refs to outer pointers
+       erasure  14  erase types, add interfaces for traits
+   posterasure  15  clean up erased inline classes
+      lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
+    lambdalift  17  move nested functions to top level
+  constructors  18  move field definitions into constructors
+       flatten  19  eliminate inner classes
+         mixin  20  mixin composition
+       cleanup  21  platform-specific cleanups, generate reflective calls
+    delambdafy  22  remove lambdas
+         icode  23  generate portable intermediate code
+#partest !-optimise
+           jvm  24  generate JVM bytecode
+      terminal  25  the last phase during a compilation run
+#partest -optimise
+       inliner  24  optimization: do inlining
+inlinehandlers  25  optimization: inline exception handlers
+      closelim  26  optimization: eliminate uncalled closures
+      constopt  27  optimization: optimize null and other constants
+           dce  28  optimization: eliminate dead code
+           jvm  29  generate JVM bytecode
+      terminal  30  the last phase during a compilation run
+#partest
diff --git a/test/files/neg/t6446-missing/sample_2.flags b/test/files/neg/t6446-missing/sample_2.flags
new file mode 100644
index 0000000..4d518c2
--- /dev/null
+++ b/test/files/neg/t6446-missing/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases
diff --git a/test/files/neg/t6446-missing/sample_2.scala b/test/files/neg/t6446-missing/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t6446-missing/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-missing/scalac-plugin.xml b/test/files/neg/t6446-missing/scalac-plugin.xml
new file mode 100644
index 0000000..9c34d63
--- /dev/null
+++ b/test/files/neg/t6446-missing/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>missing-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check
new file mode 100644
index 0000000..3ae3f96
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.check
@@ -0,0 +1,37 @@
+    phase name  id  description
+    ----------  --  -----------
+        parser   1  parse source into ASTs, perform simple desugaring
+         namer   2  resolve names, attach symbols to named trees
+packageobjects   3  load package objects
+         typer   4  the meat and potatoes: type the trees
+        patmat   5  translate match expressions
+superaccessors   6  add super accessors in traits and nested classes
+    extmethods   7  add extension methods for inline classes
+       pickler   8  serialize symbol tables
+     refchecks   9  reference/override checking, translate nested objects
+       uncurry  10  uncurry, translate function values to anonymous classes
+     tailcalls  11  replace tail calls by jumps
+    specialize  12  @specialized-driven class and method specialization
+ explicitouter  13  this refs to outer pointers
+       erasure  14  erase types, add interfaces for traits
+   posterasure  15  clean up erased inline classes
+      lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
+    lambdalift  17  move nested functions to top level
+  constructors  18  move field definitions into constructors
+       flatten  19  eliminate inner classes
+         mixin  20  mixin composition
+       cleanup  21  platform-specific cleanups, generate reflective calls
+    delambdafy  22  remove lambdas
+         icode  23  generate portable intermediate code
+#partest !-optimise
+           jvm  24  generate JVM bytecode
+      terminal  25  the last phase during a compilation run
+#partest -optimise
+       inliner  24  optimization: do inlining
+inlinehandlers  25  optimization: inline exception handlers
+      closelim  26  optimization: eliminate uncalled closures
+      constopt  27  optimization: optimize null and other constants
+           dce  28  optimization: eliminate dead code
+           jvm  29  generate JVM bytecode
+      terminal  30  the last phase during a compilation run
+#partest
diff --git a/test/files/neg/t6446-show-phases.flags b/test/files/neg/t6446-show-phases.flags
new file mode 100644
index 0000000..845666e
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.flags
@@ -0,0 +1 @@
+-Xshow-phases
diff --git a/test/files/neg/t6446-show-phases.scala b/test/files/neg/t6446-show-phases.scala
new file mode 100644
index 0000000..a9afb04
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.scala
@@ -0,0 +1,3 @@
+
+// testing compiler flag output only
+object Test extends App
diff --git a/test/files/neg/t6455.check b/test/files/neg/t6455.check
new file mode 100644
index 0000000..8f2aad0
--- /dev/null
+++ b/test/files/neg/t6455.check
@@ -0,0 +1,4 @@
+t6455.scala:5: error: value withFilter is not a member of object O
+  O.withFilter(f => true)
+    ^
+one error found
diff --git a/test/files/neg/ambiguous-float-dots2.flags b/test/files/neg/t6455.flags
similarity index 100%
copy from test/files/neg/ambiguous-float-dots2.flags
copy to test/files/neg/t6455.flags
diff --git a/test/files/neg/t6455.scala b/test/files/neg/t6455.scala
new file mode 100644
index 0000000..ebbb37f
--- /dev/null
+++ b/test/files/neg/t6455.scala
@@ -0,0 +1,6 @@
+object O { def filter(p: Int => Boolean): O.type = this }
+
+class Test {
+  // should not compile because we no longer rewrite withFilter => filter under -Xfuture
+  O.withFilter(f => true)
+}
\ No newline at end of file
diff --git a/test/files/neg/t6534.check b/test/files/neg/t6534.check
index 52e70cf..c2e80b3 100644
--- a/test/files/neg/t6534.check
+++ b/test/files/neg/t6534.check
@@ -1,9 +1,3 @@
-t6534.scala:4: warning: Implementation of equals inherited from trait Foo overridden in class Bippy1 to enforce value class semantics
-class Bippy1(val x: Int) extends AnyVal with Foo { }  // warn
-      ^
-t6534.scala:5: warning: Implementation of hashCode inherited from trait Ding overridden in class Bippy2 to enforce value class semantics
-class Bippy2(val x: Int) extends AnyVal with Ding { } // warn
-      ^
 t6534.scala:6: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
 class Bippy3(val x: Int) extends AnyVal { override def equals(x: Any) = false } // error
                                                        ^
@@ -13,5 +7,4 @@ class Bippy4(val x: Int) extends AnyVal { override def hashCode = -1 }
 t6534.scala:9: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
 case class Bippy6(val x: Int) extends AnyVal { override def productPrefix = "Dingo" ; override def equals(x: Any) = false } // error
                                                                                                    ^
-two warnings found
 three errors found
diff --git a/test/files/neg/t6539.check b/test/files/neg/t6539.check
index b647636..8c94a8a 100644
--- a/test/files/neg/t6539.check
+++ b/test/files/neg/t6539.check
@@ -7,4 +7,10 @@ Test_2.scala:3: error: cto may only be used as an argument to m
 Test_2.scala:5: error: cto may only be used as an argument to m
   M.cto // error
     ^
-three errors found
+Test_2.scala:9: error: splice must be enclosed within a reify {} block
+    val splice = expr.splice
+                      ^
+Test_2.scala:10: error: cannot use value except for signatures of macro implementations
+    val value = expr.value
+                     ^
+5 errors found
diff --git a/test/files/neg/t6539/Macro_1.scala b/test/files/neg/t6539/Macro_1.scala
index 4f7d289..60db669 100644
--- a/test/files/neg/t6539/Macro_1.scala
+++ b/test/files/neg/t6539/Macro_1.scala
@@ -1,9 +1,9 @@
 import language.experimental.macros
-import reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object M {
   def m(a: Any, b: Any): Any = macro mImpl
-  def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = a
+  def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = c.universe.reify(println(a.splice))
 
   @reflect.internal.annotations.compileTimeOnly("cto may only be used as an argument to " + "m")
   def cto = 0
diff --git a/test/files/neg/t6566a.check b/test/files/neg/t6566a.check
new file mode 100644
index 0000000..7668f9d
--- /dev/null
+++ b/test/files/neg/t6566a.check
@@ -0,0 +1,4 @@
+t6566a.scala:2: error: covariant type T occurs in invariant position in type T of type MyType
+  class TypeCheat[+T] { type MyType = T }
+                             ^
+one error found
diff --git a/test/files/neg/t6566a.scala b/test/files/neg/t6566a.scala
new file mode 100644
index 0000000..74a0b38
--- /dev/null
+++ b/test/files/neg/t6566a.scala
@@ -0,0 +1,17 @@
+object WhatsYourTypeIsMyType {
+  class TypeCheat[+T] { type MyType = T }
+
+  class Foo {
+    val tc = new TypeCheat[Foo]
+    var x: tc.MyType = _
+    def setX() = x = new Foo
+  }
+  class Bar extends Foo {
+    override val tc = new TypeCheat[Bar]
+    def unsound = this
+
+    setX()
+    println(x.unsound)
+  }
+  def main(args: Array[String]): Unit = new Bar
+}
diff --git a/test/files/neg/t6566b.check b/test/files/neg/t6566b.check
new file mode 100644
index 0000000..fb3fe81
--- /dev/null
+++ b/test/files/neg/t6566b.check
@@ -0,0 +1,4 @@
+t6566b.scala:3: error: covariant type T occurs in invariant position in type T of type MyType
+    type MyType = T
+         ^
+one error found
diff --git a/test/files/neg/t6566b.scala b/test/files/neg/t6566b.scala
new file mode 100644
index 0000000..18ddebf
--- /dev/null
+++ b/test/files/neg/t6566b.scala
@@ -0,0 +1,19 @@
+object WhatsYourTypeIsMyType {
+  trait WithMyType[+T] {
+    type MyType = T
+  }
+
+  class Foo extends WithMyType[Foo] {
+    var x: MyType = _
+    def setX() = x = new Foo
+  }
+
+  class Bar extends Foo with WithMyType[Bar] {
+    def unsound { println("iAmABar") }
+
+    setX()
+    println(x.unsound)
+  }
+
+  def main(args: Array[String]): Unit = new Bar
+}
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
index 4c513e6..a733d75 100644
--- a/test/files/neg/t6567.check
+++ b/test/files/neg/t6567.check
@@ -1,7 +1,9 @@
-t6567.scala:8: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+t6567.scala:8: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
   Option[B](a)
            ^
-t6567.scala:10: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+t6567.scala:10: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
   val b: Option[B] = Option(a)
                            ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t6574.check b/test/files/neg/t6574.check
new file mode 100644
index 0000000..c67b4ed
--- /dev/null
+++ b/test/files/neg/t6574.check
@@ -0,0 +1,7 @@
+t6574.scala:4: error: could not optimize @tailrec annotated method notTailPos$extension: it contains a recursive call not in tail position
+    println("tail")
+    ^
+t6574.scala:8: error: could not optimize @tailrec annotated method differentTypeArgs$extension: it is called recursively with different type arguments
+    {(); new Bad[String, Unit](0)}.differentTypeArgs
+                                   ^
+two errors found
diff --git a/test/files/neg/t6574.scala b/test/files/neg/t6574.scala
new file mode 100644
index 0000000..bba97ad
--- /dev/null
+++ b/test/files/neg/t6574.scala
@@ -0,0 +1,10 @@
+class Bad[X, Y](val v: Int) extends AnyVal {
+  @annotation.tailrec final def notTailPos[Z](a: Int)(b: String) {
+    this.notTailPos[Z](a)(b)
+    println("tail")
+  }
+
+  @annotation.tailrec final def differentTypeArgs {
+    {(); new Bad[String, Unit](0)}.differentTypeArgs
+  }
+}
diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check
new file mode 100644
index 0000000..1410e1b
--- /dev/null
+++ b/test/files/neg/t6601.check
@@ -0,0 +1,4 @@
+AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor
+  new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
+  ^
+one error found
diff --git a/test/files/neg/t6601/AccessPrivateConstructor_2.scala b/test/files/neg/t6601/AccessPrivateConstructor_2.scala
new file mode 100644
index 0000000..816bc10
--- /dev/null
+++ b/test/files/neg/t6601/AccessPrivateConstructor_2.scala
@@ -0,0 +1,3 @@
+class AccessPrivateConstructor {
+  new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
+}
diff --git a/test/files/neg/t6601/PrivateConstructor_1.scala b/test/files/neg/t6601/PrivateConstructor_1.scala
new file mode 100644
index 0000000..f09d7ad
--- /dev/null
+++ b/test/files/neg/t6601/PrivateConstructor_1.scala
@@ -0,0 +1 @@
+class PrivateConstructor private(val s: String) extends AnyVal
diff --git a/test/files/neg/t663.check b/test/files/neg/t663.check
index 40161fb..633e27e 100644
--- a/test/files/neg/t663.check
+++ b/test/files/neg/t663.check
@@ -1,7 +1,7 @@
 t663.scala:11: error: name clash between defined and inherited member:
-method asMatch:(m: Test.this.Node)Any and
-method asMatch:(node: Test.this.Matchable)Any in trait MatchableImpl
-have same type after erasure: (m: test.Test#NodeImpl)Object
+def asMatch(node: Test.this.Matchable): Any in trait MatchableImpl and
+def asMatch(m: Test.this.Node): Any at line 11
+have same type after erasure: (node: test.Test#NodeImpl)Object
     def asMatch(m : Node) : Any = {
         ^
 one error found
diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check
index 6337d4c..43c8252 100644
--- a/test/files/neg/t6666.check
+++ b/test/files/neg/t6666.check
@@ -1,28 +1,28 @@
-t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from anonymous class 2, would require illegal premature access to object O1
+t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1
   F.byname(x)
            ^
-t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from anonymous class 3, would require illegal premature access to object O2
+t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2
   F.byname(x)
            ^
-t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from anonymous class 4, would require illegal premature access to object O3
+t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3
   F.hof(() => x)
               ^
-t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from anonymous class 7, would require illegal premature access to the unconstructed `this` of class C1
+t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1
   F.byname(x)
            ^
-t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from anonymous class 8, would require illegal premature access to the unconstructed `this` of class C2
+t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2
   F.byname(x)
            ^
-t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from anonymous class 9, would require illegal premature access to the unconstructed `this` of class C3
+t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3
   F.hof(() => x)
               ^
 t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C4
   object Nested { def xx = x}
                            ^
-t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from anonymous class 12, would require illegal premature access to the unconstructed `this` of class C11
+t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C11
       F.byname(x)
                ^
-t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from anonymous class 13, would require illegal premature access to the unconstructed `this` of class C13
+t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C13
       F.hof(() => x)
                   ^
 t6666.scala:104: error: Implementation restriction: access of method x$13 in class C14 from object Nested$5, would require illegal premature access to the unconstructed `this` of class C14
diff --git a/test/files/neg/t6666.flags b/test/files/neg/t6666.flags
new file mode 100644
index 0000000..2349d82
--- /dev/null
+++ b/test/files/neg/t6666.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6666.scala b/test/files/neg/t6666.scala
index 1919ea3..58c5be5 100644
--- a/test/files/neg/t6666.scala
+++ b/test/files/neg/t6666.scala
@@ -118,4 +118,4 @@ class CEarly(a: Any) extends {
     object Nested { def xx = x}
     Nested.xx
   }
-} with AnyRef 
\ No newline at end of file
+} with AnyRef
\ No newline at end of file
diff --git a/test/files/neg/t6666c.check b/test/files/neg/t6666c.check
index 8fb9f4b..384e52a 100644
--- a/test/files/neg/t6666c.check
+++ b/test/files/neg/t6666c.check
@@ -4,7 +4,7 @@ class D extends C({def x = 0; object X { x }})
 t6666c.scala:5: error: Implementation restriction: access of method x$2 in class D1 from object X$5, would require illegal premature access to the unconstructed `this` of class D1
 class D1 extends C1({def x = 0; () => {object X { x }}})
                                                   ^
-t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of anonymous class 2
+t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of <$anon: Function0>
 class D2 extends C2({def x = 0; object X { x }})
                                            ^
 three errors found
diff --git a/test/files/neg/t6666c.flags b/test/files/neg/t6666c.flags
new file mode 100644
index 0000000..2349d82
--- /dev/null
+++ b/test/files/neg/t6666c.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6666e.check b/test/files/neg/t6666e.check
index 9fcc3ab..3189612 100644
--- a/test/files/neg/t6666e.check
+++ b/test/files/neg/t6666e.check
@@ -1,4 +1,4 @@
-t6666e.scala:8: error: Implementation restriction: anonymous class $anonfun requires premature access to class Crash.
+t6666e.scala:8: error: Implementation restriction: <$anon: Nothing => Unit> requires premature access to class Crash.
     this(Nil.collect{case x =>})
                     ^
 one error found
diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check
index b04251d..43313fa 100644
--- a/test/files/neg/t6667.check
+++ b/test/files/neg/t6667.check
@@ -1,5 +1,4 @@
-t6667.scala:8: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. 
-ambiguous implicit values:
+t6667.scala:8: error: ambiguous implicit values:
  both value inScope1 in object Test of type => C
  and value inScope2 in object Test of type => C
  match expected type C
diff --git a/test/files/neg/t6667.flags b/test/files/neg/t6667.flags
deleted file mode 100644
index 6c1dd10..0000000
--- a/test/files/neg/t6667.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check
index 5d56e77..99cea9a 100644
--- a/test/files/neg/t6667b.check
+++ b/test/files/neg/t6667b.check
@@ -4,8 +4,7 @@ t6667b.scala:16: error: ambiguous implicit values:
  match expected type Test.Box
       new Test()
       ^
-t6667b.scala:19: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. 
-ambiguous implicit values:
+t6667b.scala:19: error: ambiguous implicit values:
  both value a in object Test of type => Test.Box
  and value b of type Test.Box
  match expected type Test.Box
diff --git a/test/files/neg/t6667b.flags b/test/files/neg/t6667b.flags
deleted file mode 100644
index 6c1dd10..0000000
--- a/test/files/neg/t6667b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/t667.check b/test/files/neg/t667.check
index d4367bc..e68c6de 100644
--- a/test/files/neg/t667.check
+++ b/test/files/neg/t667.check
@@ -1,4 +1,4 @@
-t667.scala:8: error: class Ni inherits itself
+t667.scala:8: error: illegal cyclic reference involving class Ni
     class Ni extends super.Ni with Ni;
-                                   ^
+             ^
 one error found
diff --git a/test/files/neg/t6675-old-patmat.check b/test/files/neg/t6675-old-patmat.check
deleted file mode 100644
index bc3920d..0000000
--- a/test/files/neg/t6675-old-patmat.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t6675-old-patmat.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
-  "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
-                  ^
-one error found
diff --git a/test/files/neg/t6675-old-patmat.flags b/test/files/neg/t6675-old-patmat.flags
deleted file mode 100644
index 604de64..0000000
--- a/test/files/neg/t6675-old-patmat.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xlint -Xfatal-warnings -Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/t6675-old-patmat.scala b/test/files/neg/t6675-old-patmat.scala
deleted file mode 100644
index 4d500b7..0000000
--- a/test/files/neg/t6675-old-patmat.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object X {
-  def unapply(s: String): Option[(Int,Int,Int)] = Some((1,2,3))
-}
-
-object Y {
-  def unapplySeq(s: String): Option[Seq[(Int,Int,Int)]] = Some(Seq((1,2,3)))
-}
-
-object Test {
-  "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
-
-  "" match { case Y(b) => b } // no warning
-}
diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check
index 7b271de..aecf04c 100644
--- a/test/files/neg/t6675.check
+++ b/test/files/neg/t6675.check
@@ -1,4 +1,6 @@
-t6675.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+t6675.scala:10: warning: object X expects 3 patterns to hold (Int, Int, Int) but crushing into 3-tuple to fit single pattern (SI-6675)
   "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
                   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t6675.flags b/test/files/neg/t6675.flags
index e93641e..2843ea9 100644
--- a/test/files/neg/t6675.flags
+++ b/test/files/neg/t6675.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
\ No newline at end of file
+-deprecation -Xlint -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check
new file mode 100644
index 0000000..77f6b3c
--- /dev/null
+++ b/test/files/neg/t6675b.check
@@ -0,0 +1,37 @@
+t6675b.scala:17: warning: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (SI-6675)
+  def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a  }          // warn
+                                                                       ^
+t6675b.scala:19: error: constructor cannot be instantiated to expected type;
+ found   : (T1, T2, T3)
+ required: (Int, Int)
+  def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a  }  // fail
+                                                                                   ^
+t6675b.scala:24: warning: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (SI-6675)
+  def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a  }          // warn
+                                                                        ^
+t6675b.scala:26: error: constructor cannot be instantiated to expected type;
+ found   : (T1, T2, T3)
+ required: (?A11, ?A12) where type ?A12 <: A (this is a GADT skolem), type ?A11 <: A (this is a GADT skolem)
+  def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a  }  // fail
+                                                                                    ^
+t6675b.scala:30: warning: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (SI-6675)
+  def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a  }          // warn
+                                                                       ^
+t6675b.scala:32: error: constructor cannot be instantiated to expected type;
+ found   : (T1, T2, T3)
+ required: ((Int, Int), (Int, Int))
+  def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a  }  // fail
+                                                                                   ^
+t6675b.scala:36: warning: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (SI-6675)
+  def f1[A](x: A) = (Left(x): Either[A, A])                match { case NativelyTwo(a) => a  }          // warn
+                                                                        ^
+t6675b.scala:37: warning: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (SI-6675)
+  def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a  }          // warn
+                                                                        ^
+t6675b.scala:39: error: constructor cannot be instantiated to expected type;
+ found   : (T1, T2, T3)
+ required: ((?A17, ?A18), (?A19, ?A20)) where type ?A20 <: A (this is a GADT skolem), type ?A19 <: A (this is a GADT skolem), type ?A18 <: A (this is a GADT skolem), type ?A17 <: A (this is a GADT skolem)
+  def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a  }  // fail
+                                                                                    ^
+5 warnings found
+four errors found
diff --git a/test/files/neg/t6675b.flags b/test/files/neg/t6675b.flags
new file mode 100644
index 0000000..2fcfa0c
--- /dev/null
+++ b/test/files/neg/t6675b.flags
@@ -0,0 +1 @@
+-deprecation -Xlint
diff --git a/test/files/neg/t6675b.scala b/test/files/neg/t6675b.scala
new file mode 100644
index 0000000..c86c9c3
--- /dev/null
+++ b/test/files/neg/t6675b.scala
@@ -0,0 +1,40 @@
+object LeftOrRight {
+  def unapply[A](value: Either[A, A]): Option[A] = value match {
+    case scala.Left(x)  => Some(x)
+    case scala.Right(x) => Some(x)
+  }
+}
+
+object NativelyTwo {
+  def unapply[A](value: Either[A, A]): Option[(A, A)] = value match {
+    case scala.Left(x)  => Some(x -> x)
+    case scala.Right(x) => Some(x -> x)
+  }
+}
+
+
+class A {
+  def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a  }          // warn
+  def f2 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b)) => a  }     // no warn
+  def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a  }  // fail
+}
+
+class B {
+  def f1[A](x: A) = (Left(x): Either[A, A])                match { case LeftOrRight(a) => a  }          // no warn
+  def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a  }          // warn
+  def f3[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b)) => a  }     // no warn
+  def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a  }  // fail
+}
+
+class C {
+  def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a  }          // warn
+  def f2 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b)) => a  }     // no warn
+  def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a  }  // fail
+}
+
+class D {
+  def f1[A](x: A) = (Left(x): Either[A, A])                match { case NativelyTwo(a) => a  }          // warn
+  def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a  }          // warn
+  def f3[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b)) => a  }     // no warn
+  def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a  }  // fail
+}
diff --git a/test/files/neg/t6680a.check b/test/files/neg/t6680a.check
new file mode 100644
index 0000000..03e4df1
--- /dev/null
+++ b/test/files/neg/t6680a.check
@@ -0,0 +1,11 @@
+t6680a.scala:10: error: type mismatch;
+ found   : String("abc")
+ required: A
+    y.x = "abc"
+          ^
+t6680a.scala:17: error: type mismatch;
+ found   : String("")
+ required: A
+  case class C[A](f:A=>A);def f(x:Any)=x match { case C(f)=>f("") };f(C[Int](x=>x))
+                                                              ^
+two errors found
diff --git a/test/files/neg/t6680a.flags b/test/files/neg/t6680a.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/neg/t6680a.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/neg/t6680a.scala b/test/files/neg/t6680a.scala
new file mode 100644
index 0000000..93b7964
--- /dev/null
+++ b/test/files/neg/t6680a.scala
@@ -0,0 +1,18 @@
+case class Cell[A](var x: A)
+object Test {
+  def f1(x: Any)        = x match { case y @ Cell(_) => y } // Inferred type is Cell[Any]
+  def f2(x: Cell[_])    = x match { case y @ Cell(_) => y } // Inferred type is Cell[_]
+  def f3[A](x: Cell[A]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[A]
+
+  def main(args: Array[String]): Unit = {
+    val x = new Cell(1)
+    val y = f1(x)
+    y.x = "abc"
+    println(x.x + 1)
+  }
+}
+
+// The tweetable variation
+object Tweet {
+  case class C[A](f:A=>A);def f(x:Any)=x match { case C(f)=>f("") };f(C[Int](x=>x))
+}
diff --git a/test/files/neg/t6815.check b/test/files/neg/t6815.check
new file mode 100644
index 0000000..fae3819
--- /dev/null
+++ b/test/files/neg/t6815.check
@@ -0,0 +1,5 @@
+t6815.scala:15: error: stable identifier required, but Test.this.u.emptyValDef found.
+ Note that value emptyValDef is not stable because its type, Test.u.ValDef, is volatile.
+    case _: u.emptyValDef.T => // and, unlike in pos/t6185.scala, we shouldn't allow this.
+              ^
+one error found
diff --git a/test/files/neg/t6815.scala b/test/files/neg/t6815.scala
new file mode 100644
index 0000000..ff973a7
--- /dev/null
+++ b/test/files/neg/t6815.scala
@@ -0,0 +1,17 @@
+trait U {
+  trait ValOrDefDefApi {
+    def name: Any
+  }
+  type ValOrDefDef <: ValOrDefDefApi
+  type ValDef <: ValOrDefDef with ValDefApi { type T }
+  trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+  val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+  val u: U = ???
+
+  (null: Any) match {
+    case _: u.emptyValDef.T => // and, unlike in pos/t6185.scala, we shouldn't allow this.
+  }
+}
diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check
index 7c3c66e..914a1c9 100644
--- a/test/files/neg/t6829.check
+++ b/test/files/neg/t6829.check
@@ -20,11 +20,31 @@ t6829.scala:50: error: type mismatch;
  required: _53.State where val _53: G
         val r = rewards(agent).r(s,a,s2)
                                  ^
+t6829.scala:50: error: type mismatch;
+ found   : a.type (with underlying type Any)
+ required: _53.Action where val _53: G
+        val r = rewards(agent).r(s,a,s2)
+                                   ^
+t6829.scala:50: error: type mismatch;
+ found   : s2.type (with underlying type Any)
+ required: _53.State where val _53: G
+        val r = rewards(agent).r(s,a,s2)
+                                     ^
 t6829.scala:51: error: type mismatch;
  found   : s.type (with underlying type Any)
  required: _50.State
         agent.learn(s,a,s2,r): G#Agent
                     ^
+t6829.scala:51: error: type mismatch;
+ found   : a.type (with underlying type Any)
+ required: _50.Action
+        agent.learn(s,a,s2,r): G#Agent
+                      ^
+t6829.scala:51: error: type mismatch;
+ found   : s2.type (with underlying type Any)
+ required: _50.State
+        agent.learn(s,a,s2,r): G#Agent
+                        ^
 t6829.scala:53: error: not found: value nextState
 Error occurred in an application involving default arguments.
       copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory)
@@ -33,4 +53,4 @@ t6829.scala:53: error: not found: value currentHistory
 Error occurred in an application involving default arguments.
       copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory)
                                                                     ^
-9 errors found
+13 errors found
diff --git a/test/files/neg/t6844.check b/test/files/neg/t6844.check
new file mode 100644
index 0000000..1fc2485
--- /dev/null
+++ b/test/files/neg/t6844.check
@@ -0,0 +1,6 @@
+t6844.scala:4: error: type mismatch;
+ found   : reflect.runtime.universe.TermName
+ required: reflect.runtime.universe.Tree
+  q"def foo($x)"
+             ^
+one error found
diff --git a/test/files/neg/t6844.scala b/test/files/neg/t6844.scala
new file mode 100644
index 0000000..809d9d0
--- /dev/null
+++ b/test/files/neg/t6844.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+object Test extends App {
+  val x = TermName("x")
+  q"def foo($x)"
+}
diff --git a/test/files/neg/t6889.check b/test/files/neg/t6889.check
new file mode 100644
index 0000000..a77e8a0
--- /dev/null
+++ b/test/files/neg/t6889.check
@@ -0,0 +1,7 @@
+t6889.scala:16: error: the result type of an implicit conversion must be more specific than AnyRef
+  def f(x: Dingo): AnyRef = x   // fail - no conversion to AnyRef
+                            ^
+t6889.scala:17: error: an expression of type Null is ineligible for implicit conversion
+  var x: Int = null             // fail - no conversion from Null
+               ^
+two errors found
diff --git a/test/files/neg/t6889.scala b/test/files/neg/t6889.scala
new file mode 100644
index 0000000..ef19636
--- /dev/null
+++ b/test/files/neg/t6889.scala
@@ -0,0 +1,18 @@
+package bippy {
+  trait Bippy[A] extends Any
+}
+package foo {
+  package object unrelated {
+    implicit def bippyDingo[A](x: bippy.Bippy[A]): AnyRef = Nil
+  }
+  package unrelated {
+    trait Unrelated
+  }
+}
+
+object Test {
+  trait Dingo extends Any with bippy.Bippy[foo.unrelated.Unrelated]
+
+  def f(x: Dingo): AnyRef = x   // fail - no conversion to AnyRef
+  var x: Int = null             // fail - no conversion from Null
+}
diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check
index 8ad7fd3..ed0ed75 100644
--- a/test/files/neg/t6902.check
+++ b/test/files/neg/t6902.check
@@ -1,10 +1,12 @@
-t6902.scala:4: error: unreachable code
+t6902.scala:4: warning: unreachable code
     case Some(b) => 3 // no warning was emitted
                     ^
-t6902.scala:9: error: unreachable code
+t6902.scala:9: warning: unreachable code
     case Some(b) => 3 // no warning was emitted
                     ^
-t6902.scala:21: error: unreachable code
+t6902.scala:21: warning: unreachable code
     case 1 => 3 // crash
               ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t692.check b/test/files/neg/t692.check
index 4149366..0ca9971 100644
--- a/test/files/neg/t692.check
+++ b/test/files/neg/t692.check
@@ -11,7 +11,7 @@ t692.scala:13: error: class Foo takes type parameters
   case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
                            ^
 t692.scala:14: error: class Foo takes type parameters
-  implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] = 
+  implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
                                ^
 t692.scala:19: error: class Foo takes type parameters
   class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo;
diff --git a/test/files/neg/t692.scala b/test/files/neg/t692.scala
index f230a6b..24e1d2f 100644
--- a/test/files/neg/t692.scala
+++ b/test/files/neg/t692.scala
@@ -5,15 +5,15 @@ abstract class test3 {
   abstract class RefType[C <: AnyRef] extends Type[C];
   case class ObjectType() extends RefType[AnyRef];
   abstract class ClassType[C <: Z, Z <: AnyRef](zuper : RefType[Z]) extends RefType[C];
-  
+
 
   case class FooType() extends ClassType[Foo,AnyRef](ObjectType());
   implicit def typeOfFoo = FooType();
 
   case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
-  implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] = 
+  implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
     BarType(elem);
-  
+
 
   class Foo[A <: AnyRef];
   class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo;
diff --git a/test/files/neg/t6920.check b/test/files/neg/t6920.check
new file mode 100644
index 0000000..ee4eafb
--- /dev/null
+++ b/test/files/neg/t6920.check
@@ -0,0 +1,6 @@
+t6920.scala:9: error: too many arguments for method applyDynamicNamed: (values: Seq[(String, Any)])String
+error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2))
+possible cause: maybe a wrong Dynamic method signature?
+  test.crushTheCompiler(a = 1, b = 2)
+                       ^
+one error found
diff --git a/test/files/neg/t6920.scala b/test/files/neg/t6920.scala
new file mode 100644
index 0000000..b79d641
--- /dev/null
+++ b/test/files/neg/t6920.scala
@@ -0,0 +1,10 @@
+import scala.language.dynamics
+
+class DynTest extends Dynamic {
+  def applyDynamicNamed(name: String)(values: Seq[(String, Any)]) = "test"
+}
+
+class CompilerError {
+  val test = new DynTest
+  test.crushTheCompiler(a = 1, b = 2)
+}
\ No newline at end of file
diff --git a/test/files/neg/t693.check b/test/files/neg/t693.check
index 62325b5..25bd141 100644
--- a/test/files/neg/t693.check
+++ b/test/files/neg/t693.check
@@ -1,4 +1,4 @@
 t693.scala:4: error: x is already defined as value x
-  val x : Int = 10; 
+  val x : Int = 10;
       ^
 one error found
diff --git a/test/files/neg/t693.scala b/test/files/neg/t693.scala
index d2074be..3a9e624 100644
--- a/test/files/neg/t693.scala
+++ b/test/files/neg/t693.scala
@@ -1,5 +1,5 @@
 abstract class test4 {
   trait Type;
   val x : Type = null;
-  val x : Int = 10; 
+  val x : Int = 10;
 }
diff --git a/test/files/neg/t6931.check b/test/files/neg/t6931.check
new file mode 100644
index 0000000..7cf804a
--- /dev/null
+++ b/test/files/neg/t6931.check
@@ -0,0 +1,10 @@
+Test_2.scala:3: error: 1
+  err"123"
+      ^
+Test_2.scala:3: error: 2
+  err"123"
+       ^
+Test_2.scala:3: error: 3
+  err"123"
+        ^
+three errors found
diff --git a/test/files/neg/t6931/Macros_1.scala b/test/files/neg/t6931/Macros_1.scala
new file mode 100644
index 0000000..56da075
--- /dev/null
+++ b/test/files/neg/t6931/Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  implicit class Error(ctx: StringContext) {
+    def err(args: Any*): Unit = macro impl
+  }
+
+  def impl(c: Context)(args: c.Tree*): c.Tree = {
+    import c.universe._
+    val q"Macros.Error(scala.StringContext.apply($arg)).err()" = c.macroApplication
+    for (i <- 1 to 3) c.error(arg.pos.withPoint(arg.pos.point + i - 1), i.toString)
+    q"()"
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/t6931/Test_2.scala b/test/files/neg/t6931/Test_2.scala
new file mode 100644
index 0000000..6a6f645
--- /dev/null
+++ b/test/files/neg/t6931/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  import Macros._
+  err"123"
+}
\ No newline at end of file
diff --git a/test/files/neg/t696.check b/test/files/neg/t696.check
new file mode 100644
index 0000000..b7bc5cd
--- /dev/null
+++ b/test/files/neg/t696.check
@@ -0,0 +1,9 @@
+t696.scala:5: error: diverging implicit expansion for type TypeUtil0.Type[Any]
+starting with method WithType in object TypeUtil0
+  as[Any](null)
+         ^
+t696.scala:6: error: diverging implicit expansion for type TypeUtil0.Type[X]
+starting with method WithType in object TypeUtil0
+  def foo[X]() = as[X](null)
+                      ^
+two errors found
diff --git a/test/files/neg/t696b.scala b/test/files/neg/t696.scala
similarity index 100%
rename from test/files/neg/t696b.scala
rename to test/files/neg/t696.scala
diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check
index 159896f..5858e77 100644
--- a/test/files/neg/t6963a.check
+++ b/test/files/neg/t6963a.check
@@ -1,5 +1,7 @@
-t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+t6963a.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
 The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
   List(1,2,3,4,5).scanRight(0)(_+_)
                   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/t6963a.scala b/test/files/neg/t6963a.scala
index b3366b2..6808e54 100644
--- a/test/files/neg/t6963a.scala
+++ b/test/files/neg/t6963a.scala
@@ -1,5 +1,5 @@
 object Test {
   import scala.collection.mutable._
-  
+
   List(1,2,3,4,5).scanRight(0)(_+_)
 }
diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check
deleted file mode 100644
index 7e205a4..0000000
--- a/test/files/neg/t6963b.check
+++ /dev/null
@@ -1,13 +0,0 @@
-t6963b.scala:2: error: An Array will no longer match as Seq[_].
-  def f1(x: Any) = x.isInstanceOf[Seq[_]]
-                                 ^
-t6963b.scala:4: error: An Array will no longer match as Seq[_].
-    case _: Seq[_]  => true
-          ^
-t6963b.scala:16: error: An Array will no longer match as Seq[_].
-    case (Some(_: Seq[_]), Nil, _)        => 1
-                ^
-t6963b.scala:17: error: An Array will no longer match as Seq[_].
-    case (None, List(_: List[_], _), _)   => 2
-                    ^
-four errors found
diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags
deleted file mode 100644
index 83caa2b..0000000
--- a/test/files/neg/t6963b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration:2.7 -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala
deleted file mode 100644
index 3cfa8f0..0000000
--- a/test/files/neg/t6963b.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-object Test {
-  def f1(x: Any) = x.isInstanceOf[Seq[_]]
-  def f2(x: Any) = x match {
-    case _: Seq[_]  => true
-    case _          => false
-  }
-
-  def f3(x: Any) = x match {
-    case _: Array[_]  => true
-    case _            => false
-  }
-  
-  def f4(x: Any) = x.isInstanceOf[Traversable[_]]
-  
-  def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
-    case (Some(_: Seq[_]), Nil, _)        => 1
-    case (None, List(_: List[_], _), _)   => 2
-    case _                                => 3
-  }
-}
diff --git a/test/files/neg/t696a.check b/test/files/neg/t696a.check
deleted file mode 100644
index 490fc1a..0000000
--- a/test/files/neg/t696a.check
+++ /dev/null
@@ -1,5 +0,0 @@
-t696a.scala:4: error: diverging implicit expansion for type TypeUtil0.Type[Any]
-starting with method WithType in object TypeUtil0
-  as[Any](null);
-         ^
-one error found
diff --git a/test/files/neg/t696a.scala b/test/files/neg/t696a.scala
deleted file mode 100644
index a06a321..0000000
--- a/test/files/neg/t696a.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object TypeUtil0 {
-  trait Type[+T];
-  implicit def WithType[S,T](implicit tpeS : Type[S], tpeT : Type[T]) : Type[S with T] = null
-  as[Any](null);
-  def as[T](x : Any)(implicit tpe : Type[T]) = null;
-}
diff --git a/test/files/neg/t696b.check b/test/files/neg/t696b.check
deleted file mode 100644
index fcdb544..0000000
--- a/test/files/neg/t696b.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t696b.scala:5: error: diverging implicit expansion for type TypeUtil0.Type[Any]
-starting with method WithType in object TypeUtil0
-  as[Any](null)
-         ^
-t696b.scala:6: error: diverging implicit expansion for type TypeUtil0.Type[X]
-starting with method WithType in object TypeUtil0
-  def foo[X]() = as[X](null)
-                      ^
-two errors found
diff --git a/test/files/neg/t696b.flags b/test/files/neg/t696b.flags
deleted file mode 100644
index d564f2b..0000000
--- a/test/files/neg/t696b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xdivergence211
\ No newline at end of file
diff --git a/test/files/neg/t7007.check b/test/files/neg/t7007.check
new file mode 100644
index 0000000..e22ecb9
--- /dev/null
+++ b/test/files/neg/t7007.check
@@ -0,0 +1,7 @@
+t7007.scala:5: error: Implementation restriction: <$anon: A => B> requires premature access to class Crash.
+  def this(a: Seq[A]) = this(a.collect{ case b: B => b}, a.collect{ case b: B => b})
+                                      ^
+t7007.scala:5: error: Implementation restriction: <$anon: A => B> requires premature access to class Crash.
+  def this(a: Seq[A]) = this(a.collect{ case b: B => b}, a.collect{ case b: B => b})
+                                                                  ^
+two errors found
diff --git a/test/files/neg/t7007.scala b/test/files/neg/t7007.scala
new file mode 100644
index 0000000..e41dccf
--- /dev/null
+++ b/test/files/neg/t7007.scala
@@ -0,0 +1,14 @@
+class A
+class B extends A
+
+class Crash(b1: Seq[B], b2: Seq[B]) {
+  def this(a: Seq[A]) = this(a.collect{ case b: B => b}, a.collect{ case b: B => b})
+}
+
+object Main extends App {
+
+  // runtime exception with either constructor
+  val c1 = new Crash(Seq(new B, new B))
+  val c2 = new Crash(Seq(new B), Seq(new B))
+
+}
diff --git a/test/files/neg/t7020.check b/test/files/neg/t7020.check
new file mode 100644
index 0000000..76390b2
--- /dev/null
+++ b/test/files/neg/t7020.check
@@ -0,0 +1,19 @@
+t7020.scala:3: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
+  List(5) match {
+      ^
+t7020.scala:10: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
+  List(5) match {
+      ^
+t7020.scala:17: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
+  List(5) match {
+      ^
+t7020.scala:24: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
+  List(5) match {
+      ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/t7020.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/t7020.flags
diff --git a/test/files/disabled/t7020.scala b/test/files/neg/t7020.scala
similarity index 100%
rename from test/files/disabled/t7020.scala
rename to test/files/neg/t7020.scala
diff --git a/test/files/neg/t7110.check b/test/files/neg/t7110.check
new file mode 100644
index 0000000..e484dc4
--- /dev/null
+++ b/test/files/neg/t7110.check
@@ -0,0 +1,6 @@
+t7110.scala:2: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.
+  try { ??? } // warn
+  ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/t7110.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/t7110.flags
diff --git a/test/files/neg/t7110.scala b/test/files/neg/t7110.scala
new file mode 100644
index 0000000..79ac325
--- /dev/null
+++ b/test/files/neg/t7110.scala
@@ -0,0 +1,6 @@
+object Test {
+  try { ??? } // warn
+
+  try { ??? } finally ??? // no warn
+  try { ??? } catch { case _: Throwable => } // no warn
+}
diff --git a/test/files/neg/t712.check b/test/files/neg/t712.check
index 6819dc0..831e943 100644
--- a/test/files/neg/t712.check
+++ b/test/files/neg/t712.check
@@ -1,4 +1,5 @@
 t712.scala:10: error: value self is not a member of B.this.ParentImpl
+ Note: implicit method coerce is not applicable here because it comes after the application point and it lacks an explicit result type
   implicit def coerce(p : ParentImpl) = p.self;
                                           ^
 one error found
diff --git a/test/files/neg/t715.scala b/test/files/neg/t715.scala
index a5ccd56..87b2525 100644
--- a/test/files/neg/t715.scala
+++ b/test/files/neg/t715.scala
@@ -1,4 +1,4 @@
-package test; 
+package test;
 trait B {
   type Node <: NodeImpl;
   trait NodeImpl {
diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check
new file mode 100644
index 0000000..c6a7af9
--- /dev/null
+++ b/test/files/neg/t7157.check
@@ -0,0 +1,73 @@
+Test_2.scala:5: error: too many arguments for macro method m1_0_0: ()Unit
+  m1_0_0(1)
+        ^
+Test_2.scala:6: error: too many arguments for macro method m1_0_0: ()Unit
+  m1_0_0(1, 2)
+        ^
+Test_2.scala:7: error: too many arguments for macro method m1_0_0: ()Unit
+  m1_0_0(1, 2, 3)
+        ^
+Test_2.scala:9: error: macro applications do not support named and/or default arguments
+  m1_1_1()
+        ^
+Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+  m1_1_1(1, 2)
+        ^
+Test_2.scala:12: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+  m1_1_1(1, 2, 3)
+        ^
+Test_2.scala:14: error: macro applications do not support named and/or default arguments
+  m1_2_2()
+        ^
+Test_2.scala:15: error: macro applications do not support named and/or default arguments
+  m1_2_2(1)
+        ^
+Test_2.scala:17: error: too many arguments for macro method m1_2_2: (x: Int, y: Int)Unit
+  m1_2_2(1, 2, 3)
+        ^
+Test_2.scala:24: error: macro applications do not support named and/or default arguments
+  m1_1_inf()
+          ^
+Test_2.scala:29: error: macro applications do not support named and/or default arguments
+  m1_2_inf()
+          ^
+Test_2.scala:30: error: macro applications do not support named and/or default arguments
+  m1_2_inf(1)
+          ^
+Test_2.scala:35: error: too many arguments for macro method m2_0_0: ()Unit
+  m2_0_0()(1)
+          ^
+Test_2.scala:36: error: too many arguments for macro method m2_0_0: ()Unit
+  m2_0_0()(1, 2)
+          ^
+Test_2.scala:37: error: too many arguments for macro method m2_0_0: ()Unit
+  m2_0_0()(1, 2, 3)
+          ^
+Test_2.scala:39: error: macro applications do not support named and/or default arguments
+  m2_1_1()()
+          ^
+Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+  m2_1_1()(1, 2)
+          ^
+Test_2.scala:42: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+  m2_1_1()(1, 2, 3)
+          ^
+Test_2.scala:44: error: macro applications do not support named and/or default arguments
+  m2_2_2()()
+          ^
+Test_2.scala:45: error: macro applications do not support named and/or default arguments
+  m2_2_2()(1)
+          ^
+Test_2.scala:47: error: too many arguments for macro method m2_2_2: (x: Int, y: Int)Unit
+  m2_2_2()(1, 2, 3)
+          ^
+Test_2.scala:54: error: macro applications do not support named and/or default arguments
+  m2_1_inf()()
+            ^
+Test_2.scala:59: error: macro applications do not support named and/or default arguments
+  m2_2_inf()()
+            ^
+Test_2.scala:60: error: macro applications do not support named and/or default arguments
+  m2_2_inf()(1)
+            ^
+24 errors found
diff --git a/test/files/neg/t7157/Impls_Macros_1.scala b/test/files/neg/t7157/Impls_Macros_1.scala
new file mode 100644
index 0000000..31d4d78
--- /dev/null
+++ b/test/files/neg/t7157/Impls_Macros_1.scala
@@ -0,0 +1,32 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Macros {
+  def impl1_0_0(c: Context)() = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl1_1_1(c: Context)(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl1_2_2(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def m1_0_0() = macro impl1_0_0
+  def m1_1_1(x: Int) = macro impl1_1_1
+  def m1_2_2(x: Int, y: Int) = macro impl1_2_2
+
+  def impl1_0_inf(c: Context)(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl1_1_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl1_2_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def m1_0_inf(x: Int*) = macro impl1_0_inf
+  def m1_1_inf(x: Int, y: Int*) = macro impl1_1_inf
+  def m1_2_inf(x: Int, y: Int, z: Int*) = macro impl1_2_inf
+
+  def impl2_0_0(c: Context)()() = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl2_1_1(c: Context)()(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl2_2_2(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def m2_0_0()() = macro impl2_0_0
+  def m2_1_1()(x: Int) = macro impl2_1_1
+  def m2_2_2()(x: Int, y: Int) = macro impl2_2_2
+
+  def impl2_0_inf(c: Context)()(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl2_1_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def impl2_2_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+  def m2_0_inf()(x: Int*) = macro impl2_0_inf
+  def m2_1_inf()(x: Int, y: Int*) = macro impl2_1_inf
+  def m2_2_inf()(x: Int, y: Int, z: Int*) = macro impl2_2_inf
+}
\ No newline at end of file
diff --git a/test/files/neg/t7157/Test_2.scala b/test/files/neg/t7157/Test_2.scala
new file mode 100644
index 0000000..45a6026
--- /dev/null
+++ b/test/files/neg/t7157/Test_2.scala
@@ -0,0 +1,63 @@
+import Macros._
+
+object Test extends App {
+  m1_0_0()
+  m1_0_0(1)
+  m1_0_0(1, 2)
+  m1_0_0(1, 2, 3)
+
+  m1_1_1()
+  m1_1_1(1)
+  m1_1_1(1, 2)
+  m1_1_1(1, 2, 3)
+
+  m1_2_2()
+  m1_2_2(1)
+  m1_2_2(1, 2)
+  m1_2_2(1, 2, 3)
+
+  m1_0_inf()
+  m1_0_inf(1)
+  m1_0_inf(1, 2)
+  m1_0_inf(1, 2, 3)
+
+  m1_1_inf()
+  m1_1_inf(1)
+  m1_1_inf(1, 2)
+  m1_1_inf(1, 2, 3)
+
+  m1_2_inf()
+  m1_2_inf(1)
+  m1_2_inf(1, 2)
+  m1_2_inf(1, 2, 3)
+
+  m2_0_0()()
+  m2_0_0()(1)
+  m2_0_0()(1, 2)
+  m2_0_0()(1, 2, 3)
+
+  m2_1_1()()
+  m2_1_1()(1)
+  m2_1_1()(1, 2)
+  m2_1_1()(1, 2, 3)
+
+  m2_2_2()()
+  m2_2_2()(1)
+  m2_2_2()(1, 2)
+  m2_2_2()(1, 2, 3)
+
+  m2_0_inf()()
+  m2_0_inf()(1)
+  m2_0_inf()(1, 2)
+  m2_0_inf()(1, 2, 3)
+
+  m2_1_inf()()
+  m2_1_inf()(1)
+  m2_1_inf()(1, 2)
+  m2_1_inf()(1, 2, 3)
+
+  m2_2_inf()()
+  m2_2_inf()(1)
+  m2_2_inf()(1, 2)
+  m2_2_inf()(1, 2, 3)
+}
\ No newline at end of file
diff --git a/test/files/neg/t7166.check b/test/files/neg/t7166.check
deleted file mode 100644
index c87198c..0000000
--- a/test/files/neg/t7166.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Test_2.scala:2: error: silent = true does work!
-  println(implicitly[Complex[Foo]])
-                    ^
-one error found
diff --git a/test/files/neg/t7166/Impls_Macros_1.scala b/test/files/neg/t7166/Impls_Macros_1.scala
deleted file mode 100644
index 62a1565..0000000
--- a/test/files/neg/t7166/Impls_Macros_1.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-import scala.reflect.macros.Context
-import language.experimental.macros
-
-trait Complex[T]
-
-class Foo
-
-object Complex {
-  def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
-    import c.universe._
-    def shout(msg: String) = {
-      val cannotShutMeUp = c.asInstanceOf[scala.reflect.macros.runtime.Context].universe.currentRun.currentUnit.error _
-      cannotShutMeUp(c.enclosingPosition.asInstanceOf[scala.reflect.internal.util.Position], msg)
-    }
-    try {
-      val complexOfT = appliedType(typeOf[Complex[_]], List(weakTypeOf[T]))
-      val infiniteRecursion = c.inferImplicitValue(complexOfT, silent = true)
-      shout("silent = true does work!")
-    } catch {
-      case ex: Exception => shout(ex.toString)
-    }
-    c.literalNull
-  }
-
-  implicit def genComplex[T]: Complex[T] = macro impl[T]
-}
diff --git a/test/files/neg/t7166/Test_2.scala b/test/files/neg/t7166/Test_2.scala
deleted file mode 100644
index dcc4593..0000000
--- a/test/files/neg/t7166/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println(implicitly[Complex[Foo]])
-}
\ No newline at end of file
diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check
index 8bdf081..ecd768a 100644
--- a/test/files/neg/t7171.check
+++ b/test/files/neg/t7171.check
@@ -1,7 +1,6 @@
-t7171.scala:2: error: The outer reference in this type test cannot be checked at run time.
+t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
   final case class A()
                    ^
-t7171.scala:9: error: The outer reference in this type test cannot be checked at run time.
-    case _: A => true; case _ => false
-          ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check
index bd6b2bc..bf695af 100644
--- a/test/files/neg/t7171b.check
+++ b/test/files/neg/t7171b.check
@@ -1,10 +1,6 @@
-t7171b.scala:2: error: The outer reference in this type test cannot be checked at run time.
+t7171b.scala:2: warning: The outer reference in this type test cannot be checked at run time.
   final case class A()
                    ^
-t7171b.scala:8: error: The outer reference in this type test cannot be checked at run time.
-    case _: A => true; case _ => false
-          ^
-t7171b.scala:13: error: The outer reference in this type test cannot be checked at run time.
-    case _: A => true; case _ => false
-          ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7185.check b/test/files/neg/t7185.check
deleted file mode 100644
index 46f2cc7..0000000
--- a/test/files/neg/t7185.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t7185.scala:2: error: overloaded method value apply with alternatives:
-  (f: scala.xml.Node => Boolean)scala.xml.NodeSeq <and>
-  (i: Int)scala.xml.Node
- cannot be applied to ()
-  <e></e>()
-   ^
-one error found
diff --git a/test/files/neg/t7185.scala b/test/files/neg/t7185.scala
deleted file mode 100644
index 2f9284b..0000000
--- a/test/files/neg/t7185.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
-  <e></e>()
-}
diff --git a/test/files/neg/t7214neg.check b/test/files/neg/t7214neg.check
new file mode 100644
index 0000000..291af04
--- /dev/null
+++ b/test/files/neg/t7214neg.check
@@ -0,0 +1,4 @@
+t7214neg.scala:28: error: not enough patterns for object Extractor offering Any: expected 1, found 0
+    case Extractor() =>
+         ^
+one error found
diff --git a/test/files/run/t7214.scala b/test/files/neg/t7214neg.scala
similarity index 100%
copy from test/files/run/t7214.scala
copy to test/files/neg/t7214neg.scala
diff --git a/test/files/neg/t7239.check b/test/files/neg/t7239.check
new file mode 100644
index 0000000..80b14f8
--- /dev/null
+++ b/test/files/neg/t7239.check
@@ -0,0 +1,4 @@
+t7239.scala:10: error: not found: value foBar
+    fooBar = foBar.toInt
+             ^
+one error found
diff --git a/test/files/neg/t7239.scala b/test/files/neg/t7239.scala
new file mode 100644
index 0000000..f62cac0
--- /dev/null
+++ b/test/files/neg/t7239.scala
@@ -0,0 +1,12 @@
+class Foo {
+  def toInt = 12
+}
+case class Bar( fooBar : Int )
+
+// spurious "erroneous or inaccessible type" error in 2.10.1
+class Test {
+  var fooBar : Foo = null
+  def build = Bar(
+    fooBar = foBar.toInt
+  )
+}
diff --git a/test/files/neg/t7285.check b/test/files/neg/t7285.check
index 108f429..a38772b 100644
--- a/test/files/neg/t7285.check
+++ b/test/files/neg/t7285.check
@@ -1,13 +1,15 @@
-t7285.scala:15: error: match may not be exhaustive.
+t7285.scala:15: warning: match may not be exhaustive.
 It would fail on the following input: (Up, Down)
       (d1, d2) match {
       ^
-t7285.scala:33: error: match may not be exhaustive.
+t7285.scala:33: warning: match may not be exhaustive.
 It would fail on the following input: Down
       (d1) match {
        ^
-t7285.scala:51: error: match may not be exhaustive.
+t7285.scala:51: warning: match may not be exhaustive.
 It would fail on the following input: (Up, Down)
     (d1, d2) match {
     ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t7290.check b/test/files/neg/t7290.check
index 85bedba..ad2d0e2 100644
--- a/test/files/neg/t7290.check
+++ b/test/files/neg/t7290.check
@@ -1,10 +1,12 @@
-t7290.scala:4: error: Pattern contains duplicate alternatives: 0
+t7290.scala:4: warning: Pattern contains duplicate alternatives: 0
     case 0 | 0 => 0
          ^
-t7290.scala:5: error: Pattern contains duplicate alternatives: 2, 3
+t7290.scala:5: warning: Pattern contains duplicate alternatives: 2, 3
     case 2 | 2 | 2 | 3 | 2 | 3 => 0
          ^
-t7290.scala:6: error: Pattern contains duplicate alternatives: 4
+t7290.scala:6: warning: Pattern contains duplicate alternatives: 4
     case 4 | (_ @ 4) => 0
          ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t7292-deprecation.check b/test/files/neg/t7292-deprecation.check
new file mode 100644
index 0000000..17f010d
--- /dev/null
+++ b/test/files/neg/t7292-deprecation.check
@@ -0,0 +1,12 @@
+t7292-deprecation.scala:2: warning: Octal escape literals are deprecated, use \u0000 instead.
+  val chr1 = '\0'
+              ^
+t7292-deprecation.scala:3: warning: Octal escape literals are deprecated, use \u0053 instead.
+  val str1 = "abc\123456"
+                 ^
+t7292-deprecation.scala:4: warning: Octal escape literals are deprecated, use \n instead.
+  val lf = '\012'
+            ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t7292-deprecation.flags b/test/files/neg/t7292-deprecation.flags
new file mode 100644
index 0000000..7de3c0f
--- /dev/null
+++ b/test/files/neg/t7292-deprecation.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
diff --git a/test/files/neg/t7292-deprecation.scala b/test/files/neg/t7292-deprecation.scala
new file mode 100644
index 0000000..d857f0e
--- /dev/null
+++ b/test/files/neg/t7292-deprecation.scala
@@ -0,0 +1,5 @@
+object OctalEscapes {
+  val chr1 = '\0'
+  val str1 = "abc\123456"
+  val lf = '\012'
+}
diff --git a/test/files/neg/t7292-removal.check b/test/files/neg/t7292-removal.check
new file mode 100644
index 0000000..1cd59b0
--- /dev/null
+++ b/test/files/neg/t7292-removal.check
@@ -0,0 +1,10 @@
+t7292-removal.scala:2: error: Octal escape literals are unsupported, use \u0000 instead.
+  val chr1 = '\0'
+              ^
+t7292-removal.scala:3: error: Octal escape literals are unsupported, use \u0053 instead.
+  val str1 = "abc\123456"
+                 ^
+t7292-removal.scala:4: error: Octal escape literals are unsupported, use \n instead.
+  val lf = '\012'
+            ^
+three errors found
diff --git a/test/files/neg/t7292-removal.flags b/test/files/neg/t7292-removal.flags
new file mode 100644
index 0000000..29f4ede
--- /dev/null
+++ b/test/files/neg/t7292-removal.flags
@@ -0,0 +1 @@
+-Xfuture
diff --git a/test/files/neg/t7292-removal.scala b/test/files/neg/t7292-removal.scala
new file mode 100644
index 0000000..d857f0e
--- /dev/null
+++ b/test/files/neg/t7292-removal.scala
@@ -0,0 +1,5 @@
+object OctalEscapes {
+  val chr1 = '\0'
+  val str1 = "abc\123456"
+  val lf = '\012'
+}
diff --git a/test/files/neg/t7294.check b/test/files/neg/t7294.check
new file mode 100644
index 0000000..f15289c
--- /dev/null
+++ b/test/files/neg/t7294.check
@@ -0,0 +1,6 @@
+t7294.scala:4: warning: fruitless type test: a value of type (Int, Int) cannot also be a Seq[A]
+  (1, 2) match { case Seq() => 0; case _ => 1 }
+                         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7294.flags b/test/files/neg/t7294.flags
new file mode 100644
index 0000000..3f3381a
--- /dev/null
+++ b/test/files/neg/t7294.flags
@@ -0,0 +1 @@
+-Xfuture -Xfatal-warnings
diff --git a/test/files/neg/t7294.scala b/test/files/neg/t7294.scala
new file mode 100644
index 0000000..335d071
--- /dev/null
+++ b/test/files/neg/t7294.scala
@@ -0,0 +1,5 @@
+object Test {
+  // Treat TupleN as final under -Xfuture for the for the purposes
+  // of the "fruitless type test" warning.
+  (1, 2) match { case Seq() => 0; case _ => 1 }
+}
diff --git a/test/files/neg/t7294b.check b/test/files/neg/t7294b.check
new file mode 100644
index 0000000..0033b72
--- /dev/null
+++ b/test/files/neg/t7294b.check
@@ -0,0 +1,6 @@
+t7294b.scala:1: warning: inheritance from class Tuple2 in package scala is deprecated: Tuples will be made final in a future version.
+class C extends Tuple2[Int, Int](0, 0)
+                ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7294b.flags b/test/files/neg/t7294b.flags
new file mode 100644
index 0000000..d1b831e
--- /dev/null
+++ b/test/files/neg/t7294b.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t7294b.scala b/test/files/neg/t7294b.scala
new file mode 100644
index 0000000..2ab86a8
--- /dev/null
+++ b/test/files/neg/t7294b.scala
@@ -0,0 +1 @@
+class C extends Tuple2[Int, Int](0, 0)
\ No newline at end of file
diff --git a/test/files/neg/t7324.check b/test/files/neg/t7324.check
new file mode 100644
index 0000000..586947d
--- /dev/null
+++ b/test/files/neg/t7324.check
@@ -0,0 +1,4 @@
+t7324.scala:2: error: Platform restriction: a parameter list's length cannot exceed 254.
+class Bar(
+         ^
+one error found
diff --git a/test/files/neg/t7324.scala b/test/files/neg/t7324.scala
new file mode 100644
index 0000000..81d7674
--- /dev/null
+++ b/test/files/neg/t7324.scala
@@ -0,0 +1,57 @@
+object Bar extends App
+class Bar(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int, _255: Int
+)
+
+class BarOK(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int)
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
index 709ab6d..61c33f9 100644
--- a/test/files/neg/t7325.check
+++ b/test/files/neg/t7325.check
@@ -1,19 +1,19 @@
-t7325.scala:2: error: percent signs not directly following splicees must be escaped
+t7325.scala:2: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
   println(f"%")
             ^
-t7325.scala:4: error: percent signs not directly following splicees must be escaped
+t7325.scala:4: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
   println(f"%%%")
               ^
-t7325.scala:6: error: percent signs not directly following splicees must be escaped
+t7325.scala:6: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
   println(f"%%%%%")
                 ^
-t7325.scala:16: error: wrong conversion string
+t7325.scala:16: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
   println(f"${0}%")
                 ^
-t7325.scala:19: error: percent signs not directly following splicees must be escaped
+t7325.scala:19: error: conversions must follow a splice; use %% for literal %, %n for newline
   println(f"${0}%%%d")
                   ^
-t7325.scala:21: error: percent signs not directly following splicees must be escaped
+t7325.scala:21: error: conversions must follow a splice; use %% for literal %, %n for newline
   println(f"${0}%%%%%d")
                     ^
 6 errors found
diff --git a/test/files/neg/t7369.check b/test/files/neg/t7369.check
index 4f101e1..a4e99f4 100644
--- a/test/files/neg/t7369.check
+++ b/test/files/neg/t7369.check
@@ -1,13 +1,15 @@
-t7369.scala:6: error: unreachable code
+t7369.scala:6: warning: unreachable code
     case Tuple1(X) => // unreachable
                    ^
-t7369.scala:13: error: unreachable code
+t7369.scala:13: warning: unreachable code
     case Tuple1(true) => // unreachable
                       ^
-t7369.scala:31: error: unreachable code
+t7369.scala:31: warning: unreachable code
     case Tuple1(X) => // unreachable
                    ^
-t7369.scala:40: error: unreachable code
+t7369.scala:40: warning: unreachable code
     case Tuple1(null) => // unreachable
                       ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t7475c.check b/test/files/neg/t7475c.check
new file mode 100644
index 0000000..4728081
--- /dev/null
+++ b/test/files/neg/t7475c.check
@@ -0,0 +1,7 @@
+t7475c.scala:6: error: value a is not a member of A.this.B
+    println(this.a)     // wait, what?
+                 ^
+t7475c.scala:7: error: value b is not a member of A.this.B
+    println(this.b)     // wait, what?
+                 ^
+two errors found
diff --git a/test/files/neg/t7475c.scala b/test/files/neg/t7475c.scala
new file mode 100644
index 0000000..cd4a876
--- /dev/null
+++ b/test/files/neg/t7475c.scala
@@ -0,0 +1,9 @@
+class A {
+  private val a: Int = 0
+  private[this] val b: Int = 0
+  class B extends A {
+    def foo(a: A) = a.a // okay
+    println(this.a)     // wait, what?
+    println(this.b)     // wait, what?
+  }
+}
diff --git a/test/files/neg/t7475d.check b/test/files/neg/t7475d.check
new file mode 100644
index 0000000..6bd1da0
--- /dev/null
+++ b/test/files/neg/t7475d.check
@@ -0,0 +1,7 @@
+t7475d.scala:4: error: value priv is not a member of T.this.TT
+  (??? : TT).priv
+             ^
+t7475d.scala:10: error: value priv is not a member of U.this.UU
+  (??? : UU).priv
+             ^
+two errors found
diff --git a/test/files/neg/t7475e.check b/test/files/neg/t7475e.check
new file mode 100644
index 0000000..48af2be
--- /dev/null
+++ b/test/files/neg/t7475e.check
@@ -0,0 +1,4 @@
+t7475e.scala:8: error: value priv is not a member of Base.this.TT
+  (??? : TT).priv
+             ^
+one error found
diff --git a/test/files/neg/t7475e.scala b/test/files/neg/t7475e.scala
new file mode 100644
index 0000000..e5c4877
--- /dev/null
+++ b/test/files/neg/t7475e.scala
@@ -0,0 +1,12 @@
+trait U {
+}
+
+trait Base {
+  private val priv = 0
+
+  type TT = U with T // should exclude `priv`
+  (??? : TT).priv
+}
+
+trait T extends Base {
+}
diff --git a/test/files/neg/t7475f.check b/test/files/neg/t7475f.check
new file mode 100644
index 0000000..a07a448
--- /dev/null
+++ b/test/files/neg/t7475f.check
@@ -0,0 +1,10 @@
+t7475f.scala:12: error: method c1 in class C cannot be accessed in C[T]
+  c1 // a member, but inaccessible.
+  ^
+t7475f.scala:13: error: not found: value c2
+  c2 // a member, but inaccessible.
+  ^
+t7475f.scala:26: error: value d2 is not a member of D[Any]
+    other.d2 // not a member
+          ^
+three errors found
diff --git a/test/files/neg/t7475f.scala b/test/files/neg/t7475f.scala
new file mode 100644
index 0000000..6c5fead
--- /dev/null
+++ b/test/files/neg/t7475f.scala
@@ -0,0 +1,28 @@
+class C[T] extends D[T] {
+  private def c1 = 0
+  private[this] def c2 = 0
+}
+
+trait D[T] {
+  self: C[T] =>
+
+  private def d1 = 0
+  private[this] def d2 = 0
+
+  c1 // a member, but inaccessible.
+  c2 // a member, but inaccessible.
+
+  d1 // okay
+  d2 // okay
+
+
+  class C {
+    d1
+    d2
+  }
+
+  def x(other: D[Any]) {
+    other.d1
+    other.d2 // not a member
+  }
+}
diff --git a/test/pending/neg/plugin-after-terminal.check b/test/files/neg/t7494-after-terminal.check
similarity index 100%
rename from test/pending/neg/plugin-after-terminal.check
rename to test/files/neg/t7494-after-terminal.check
diff --git a/test/files/neg/t7494-after-terminal/ThePlugin.scala b/test/files/neg/t7494-after-terminal/ThePlugin.scala
new file mode 100644
index 0000000..f3c9130
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/ThePlugin.scala
@@ -0,0 +1,31 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "afterterminal"
+  val description = "Declares one plugin that wants to be after the terminal phase"
+  val components = List[PluginComponent](thePhase)
+
+  private object thePhase extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]("terminal")
+
+    val phaseName = ThePlugin.this.name
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = ThePlugin.this.name
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7494-after-terminal/sample_2.flags b/test/files/neg/t7494-after-terminal/sample_2.flags
new file mode 100644
index 0000000..b8a476e
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:afterterminal
diff --git a/test/files/neg/t7494-after-terminal/sample_2.scala b/test/files/neg/t7494-after-terminal/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-after-terminal/scalac-plugin.xml b/test/files/neg/t7494-after-terminal/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/pending/neg/plugin-before-parser.check b/test/files/neg/t7494-before-parser.check
similarity index 100%
rename from test/pending/neg/plugin-before-parser.check
rename to test/files/neg/t7494-before-parser.check
diff --git a/test/files/neg/t7494-before-parser/ThePlugin.scala b/test/files/neg/t7494-before-parser/ThePlugin.scala
new file mode 100644
index 0000000..8714a55
--- /dev/null
+++ b/test/files/neg/t7494-before-parser/ThePlugin.scala
@@ -0,0 +1,32 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "beforeparser"
+  val description = "Declares one plugin that wants to be before the parser phase"
+  val components = List[PluginComponent](thePhase)
+
+  private object thePhase extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]()
+    override val runsBefore = List[String]("parser")
+
+    val phaseName = ThePlugin.this.name
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = ThePlugin.this.name
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7494-before-parser/sample_2.flags b/test/files/neg/t7494-before-parser/sample_2.flags
new file mode 100644
index 0000000..0c92fc8
--- /dev/null
+++ b/test/files/neg/t7494-before-parser/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:beforeparser
diff --git a/test/files/neg/t7494-before-parser/sample_2.scala b/test/files/neg/t7494-before-parser/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-before-parser/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml b/test/files/neg/t7494-before-parser/scalac-plugin.xml
similarity index 100%
rename from test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml
rename to test/files/neg/t7494-before-parser/scalac-plugin.xml
diff --git a/test/files/neg/t7494-multi-right-after.check b/test/files/neg/t7494-multi-right-after.check
new file mode 100644
index 0000000..151d177
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after.check
@@ -0,0 +1 @@
+error: Multiple phases want to run right after explicitouter; followers: erasure,multi-rafter; created phase-order.dot
diff --git a/test/files/neg/t7494-multi-right-after/ThePlugin.scala b/test/files/neg/t7494-multi-right-after/ThePlugin.scala
new file mode 100644
index 0000000..4c76151
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/ThePlugin.scala
@@ -0,0 +1,31 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "multi-rafter"
+  val description = ""
+  val components = List[PluginComponent](thePhase)
+
+  private object thePhase extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]()
+    override val runsRightAfter = Some("explicitouter")
+    val phaseName = ThePlugin.this.name
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = ThePlugin.this.name
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7494-multi-right-after/sample_2.flags b/test/files/neg/t7494-multi-right-after/sample_2.flags
new file mode 100644
index 0000000..9273fb9
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:multi-rafter
diff --git a/test/files/neg/t7494-multi-right-after/sample_2.scala b/test/files/neg/t7494-multi-right-after/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-multi-right-after/scalac-plugin.xml b/test/files/neg/t7494-multi-right-after/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check
new file mode 100644
index 0000000..e3316f5
--- /dev/null
+++ b/test/files/neg/t7494-no-options.check
@@ -0,0 +1,40 @@
+error: Error: ploogin takes no options
+    phase name  id  description
+    ----------  --  -----------
+        parser   1  parse source into ASTs, perform simple desugaring
+         namer   2  resolve names, attach symbols to named trees
+packageobjects   3  load package objects
+         typer   4  the meat and potatoes: type the trees
+        patmat   5  translate match expressions
+superaccessors   6  add super accessors in traits and nested classes
+    extmethods   7  add extension methods for inline classes
+       pickler   8  serialize symbol tables
+     refchecks   9  reference/override checking, translate nested objects
+       uncurry  10  uncurry, translate function values to anonymous classes
+     tailcalls  11  replace tail calls by jumps
+    specialize  12  @specialized-driven class and method specialization
+ explicitouter  13  this refs to outer pointers
+       erasure  14  erase types, add interfaces for traits
+   posterasure  15  clean up erased inline classes
+      lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
+    lambdalift  17  move nested functions to top level
+  constructors  18  move field definitions into constructors
+       flatten  19  eliminate inner classes
+         mixin  20  mixin composition
+       cleanup  21  platform-specific cleanups, generate reflective calls
+    delambdafy  22  remove lambdas
+         icode  23  generate portable intermediate code
+#partest !-optimise
+           jvm  24  generate JVM bytecode
+       ploogin  25  A sample phase that does so many things it's kind of hard...
+      terminal  26  the last phase during a compilation run
+#partest -optimise
+       inliner  24  optimization: do inlining
+inlinehandlers  25  optimization: inline exception handlers
+      closelim  26  optimization: eliminate uncalled closures
+      constopt  27  optimization: optimize null and other constants
+           dce  28  optimization: eliminate dead code
+           jvm  29  generate JVM bytecode
+       ploogin  30  A sample phase that does so many things it's kind of hard...
+      terminal  31  the last phase during a compilation run
+#partest
diff --git a/test/files/neg/t7494-no-options/ploogin_1.scala b/test/files/neg/t7494-no-options/ploogin_1.scala
new file mode 100644
index 0000000..ed6adfc
--- /dev/null
+++ b/test/files/neg/t7494-no-options/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin.  */
+class Ploogin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "ploogin"
+  val description = "A sample plugin for testing."
+  val components = List[PluginComponent](TestComponent)
+
+  private object TestComponent extends PluginComponent {
+    val global: Ploogin.this.global.type = Ploogin.this.global
+    //override val runsBefore = List("refchecks")
+    val runsAfter = List("jvm")
+    val phaseName = Ploogin.this.name
+    override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+    def newPhase(prev: Phase) = new TestPhase(prev)
+    class TestPhase(prev: Phase) extends StdPhase(prev) {
+      override def description = TestComponent.this.description
+      def apply(unit: CompilationUnit) {
+        // kewl kode
+      }
+    }
+  }
+}
diff --git a/test/files/neg/t7494-no-options/sample_2.flags b/test/files/neg/t7494-no-options/sample_2.flags
new file mode 100644
index 0000000..7f0f7af
--- /dev/null
+++ b/test/files/neg/t7494-no-options/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases -P:ploogin:inploog
diff --git a/test/files/neg/t7494-no-options/sample_2.scala b/test/files/neg/t7494-no-options/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-no-options/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-no-options/scalac-plugin.xml b/test/files/neg/t7494-no-options/scalac-plugin.xml
new file mode 100644
index 0000000..e849bb5
--- /dev/null
+++ b/test/files/neg/t7494-no-options/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t7494-right-after-before.check b/test/files/neg/t7494-right-after-before.check
new file mode 100644
index 0000000..7e83daa
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before.check
@@ -0,0 +1 @@
+error: Phase erasure can't follow explicitouter, created phase-order.dot
diff --git a/test/files/neg/t7494-right-after-before/ThePlugin.scala b/test/files/neg/t7494-right-after-before/ThePlugin.scala
new file mode 100644
index 0000000..c42a914
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/ThePlugin.scala
@@ -0,0 +1,31 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "rafter-before-1"
+  val description = ""
+  val components = List[PluginComponent](thePhase1)
+
+  private object thePhase1 extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]("refchecks")
+    override val runsBefore = List[String]("erasure")
+    val phaseName = ThePlugin.this.name
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = ThePlugin.this.name
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7494-right-after-before/sample_2.flags b/test/files/neg/t7494-right-after-before/sample_2.flags
new file mode 100644
index 0000000..97d0f5b
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:rafter-before-1
diff --git a/test/files/neg/t7494-right-after-before/sample_2.scala b/test/files/neg/t7494-right-after-before/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-right-after-before/scalac-plugin.xml b/test/files/neg/t7494-right-after-before/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/pending/neg/plugin-rightafter-terminal.check b/test/files/neg/t7494-right-after-terminal.check
similarity index 100%
rename from test/pending/neg/plugin-rightafter-terminal.check
rename to test/files/neg/t7494-right-after-terminal.check
diff --git a/test/files/neg/t7494-right-after-terminal/ThePlugin.scala b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala
new file mode 100644
index 0000000..47dd06e
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala
@@ -0,0 +1,32 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "rightafterterminal"
+  val description = "Declares one plugin that wants to be right after the terminal phase"
+  val components = List[PluginComponent](thePhase)
+
+  private object thePhase extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]()
+    override val runsRightAfter = Some("terminal")
+
+    val phaseName = ThePlugin.this.name
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = ThePlugin.this.name
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7494-right-after-terminal/sample_2.flags b/test/files/neg/t7494-right-after-terminal/sample_2.flags
new file mode 100644
index 0000000..da046ba
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:rightafterterminal
diff --git a/test/files/neg/t7494-right-after-terminal/sample_2.scala b/test/files/neg/t7494-right-after-terminal/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-right-after-terminal/scalac-plugin.xml b/test/files/neg/t7494-right-after-terminal/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/neg/t7501.check b/test/files/neg/t7501.check
new file mode 100644
index 0000000..2ded07c
--- /dev/null
+++ b/test/files/neg/t7501.check
@@ -0,0 +1,7 @@
+t7501_2.scala:2: error: value name is not a member of A
+  def foo(a: A) = a.name
+                    ^
+t7501_2.scala:4: error: not found: type X
+  type TP = X // already failed before this fix
+            ^
+two errors found
diff --git a/test/files/neg/t7501/t7501_1.scala b/test/files/neg/t7501/t7501_1.scala
new file mode 100644
index 0000000..323c327
--- /dev/null
+++ b/test/files/neg/t7501/t7501_1.scala
@@ -0,0 +1,12 @@
+object Test2 {
+ def test[X](name: String) = 12
+}
+class strangeTest(x: Int) extends scala.annotation.StaticAnnotation
+
+trait A {
+  // When picking the type of `test`, the value parameter
+  // `x` was pickled with the owner `trait A`. On unpickling,
+  // it was taken to be a member!
+  @strangeTest(Test2.test("test"))
+  def test(x: String): Unit
+}
diff --git a/test/files/neg/t7501/t7501_2.scala b/test/files/neg/t7501/t7501_2.scala
new file mode 100644
index 0000000..044caea
--- /dev/null
+++ b/test/files/neg/t7501/t7501_2.scala
@@ -0,0 +1,5 @@
+object Test {
+  def foo(a: A) = a.name
+
+  type TP = X // already failed before this fix
+}
diff --git a/test/files/neg/t7507.check b/test/files/neg/t7507.check
index d402869..de30fc7 100644
--- a/test/files/neg/t7507.check
+++ b/test/files/neg/t7507.check
@@ -1,4 +1,4 @@
-t7507.scala:6: error: value bippy in trait Cake cannot be accessed in Cake
+t7507.scala:6: error: not found: value bippy
   locally(bippy)
           ^
 one error found
diff --git a/test/files/neg/t7519-b.check b/test/files/neg/t7519-b.check
index ad554b8..bc8500b 100644
--- a/test/files/neg/t7519-b.check
+++ b/test/files/neg/t7519-b.check
@@ -1,4 +1,6 @@
-Use_2.scala:6: error: No implicit view available from String => K.
+Use_2.scala:8: error: type mismatch;
+ found   : String
+ required: Q
   val x: Q = ex.Mac.mac("asdf")
                        ^
 one error found
diff --git a/test/files/neg/t7519-b/Use_2.scala b/test/files/neg/t7519-b/Use_2.scala
index 413e40e..0d63eee 100644
--- a/test/files/neg/t7519-b/Use_2.scala
+++ b/test/files/neg/t7519-b/Use_2.scala
@@ -1,3 +1,5 @@
+import scala.language.implicitConversions
+
 trait Q
 trait K
 
diff --git a/test/files/neg/t7519.check b/test/files/neg/t7519.check
index 164d67f..df54aba 100644
--- a/test/files/neg/t7519.check
+++ b/test/files/neg/t7519.check
@@ -1,7 +1,11 @@
-t7519.scala:5: error: could not find implicit value for parameter nada: Nothing
+t7519.scala:5: error: type mismatch;
+ found   : Int(0)
+ required: String
     locally(0 : String) // was: "value conversion is not a member of C.this.C"
             ^
-t7519.scala:15: error: could not find implicit value for parameter nada: Nothing
+t7519.scala:15: error: type mismatch;
+ found   : Int(0)
+ required: String
       locally(0 : String) // was: "value conversion is not a member of U"
               ^
 two errors found
diff --git a/test/files/neg/t7605-deprecation.check b/test/files/neg/t7605-deprecation.check
new file mode 100644
index 0000000..6db9461
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.check
@@ -0,0 +1,15 @@
+t7605-deprecation.scala:2: warning: Procedure syntax is deprecated. Convert procedure `bar` to method by adding `: Unit =`.
+  def bar {}
+          ^
+t7605-deprecation.scala:3: warning: Procedure syntax is deprecated. Convert procedure `baz` to method by adding `: Unit`.
+  def baz
+         ^
+t7605-deprecation.scala:4: warning: Procedure syntax is deprecated. Convert procedure `boo` to method by adding `: Unit`.
+  def boo(i: Int, l: Long)
+                          ^
+t7605-deprecation.scala:5: warning: Procedure syntax is deprecated. Convert procedure `boz` to method by adding `: Unit =`.
+  def boz(i: Int, l: Long) {}
+                           ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t7605-deprecation.flags b/test/files/neg/t7605-deprecation.flags
new file mode 100644
index 0000000..0a7cb7d
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfuture -Xfatal-warnings
diff --git a/test/files/neg/t7605-deprecation.scala b/test/files/neg/t7605-deprecation.scala
new file mode 100644
index 0000000..2b3362f
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.scala
@@ -0,0 +1,8 @@
+abstract class Foo {
+  def bar {}
+  def baz
+  def boo(i: Int, l: Long)
+  def boz(i: Int, l: Long) {}
+  def this(i: Int) { this() } // Don't complain here!
+  def foz: Unit               // Don't complain here!
+}
diff --git a/test/files/neg/t7622-cyclic-dependency.check b/test/files/neg/t7622-cyclic-dependency.check
new file mode 100644
index 0000000..3546964
--- /dev/null
+++ b/test/files/neg/t7622-cyclic-dependency.check
@@ -0,0 +1 @@
+error: Cycle in phase dependencies detected at cyclicdependency1, created phase-cycle.dot
diff --git a/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala
new file mode 100644
index 0000000..35c0ff8
--- /dev/null
+++ b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala
@@ -0,0 +1,40 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "cyclicdependency"
+  val description = "Declares two phases that have a cyclic dependency"
+  val components = List[PluginComponent](thePhase1,thePhase2)
+
+  private object thePhase1 extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]("tailcalls","cyclicdependency2")
+
+    val phaseName = ThePlugin.this.name + "1"
+
+    def newPhase(prev: Phase) = new ThePhase(prev, phaseName)
+  }
+
+  private object thePhase2 extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]("dce","cyclicdependency1")
+
+    val phaseName = ThePlugin.this.name + "2"
+
+    def newPhase(prev: Phase) = new ThePhase(prev, phaseName)
+  }
+
+  private class ThePhase(prev: Phase, val name: String) extends Phase(prev) {
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7622-cyclic-dependency/sample_2.flags b/test/files/neg/t7622-cyclic-dependency/sample_2.flags
new file mode 100644
index 0000000..db25b88
--- /dev/null
+++ b/test/files/neg/t7622-cyclic-dependency/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:cyclicdependency
diff --git a/test/files/neg/t7622-cyclic-dependency/sample_2.scala b/test/files/neg/t7622-cyclic-dependency/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7622-cyclic-dependency/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7622-cyclic-dependency/scalac-plugin.xml b/test/files/neg/t7622-cyclic-dependency/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7622-cyclic-dependency/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/neg/t7622-missing-dependency.check b/test/files/neg/t7622-missing-dependency.check
new file mode 100644
index 0000000..a0d0e30
--- /dev/null
+++ b/test/files/neg/t7622-missing-dependency.check
@@ -0,0 +1,2 @@
+error: Phase 'myplugin' requires: List(missing)
+one error found
diff --git a/test/files/neg/t7622-missing-dependency/ThePlugin.scala b/test/files/neg/t7622-missing-dependency/ThePlugin.scala
new file mode 100644
index 0000000..fa634a6
--- /dev/null
+++ b/test/files/neg/t7622-missing-dependency/ThePlugin.scala
@@ -0,0 +1,33 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "myplugin"
+  val description = "Declares one plugin with a missing requirement"
+  val components = List[PluginComponent](thePhase)
+
+  private object thePhase extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]("typer")
+
+    val phaseName = ThePlugin.this.name
+
+    override val requires = List("missing")
+
+    def newPhase(prev: Phase) = new ThePhase(prev)
+  }
+
+  private class ThePhase(prev: Phase) extends Phase(prev) {
+    def name = thePhase.phaseName
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7622-missing-dependency/sample_2.flags b/test/files/neg/t7622-missing-dependency/sample_2.flags
new file mode 100644
index 0000000..d690351
--- /dev/null
+++ b/test/files/neg/t7622-missing-dependency/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:myplugin
diff --git a/test/files/neg/t7622-missing-dependency/sample_2.scala b/test/files/neg/t7622-missing-dependency/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7622-missing-dependency/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7622-missing-dependency/scalac-plugin.xml b/test/files/neg/t7622-missing-dependency/scalac-plugin.xml
new file mode 100644
index 0000000..3c14061
--- /dev/null
+++ b/test/files/neg/t7622-missing-dependency/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>myplugin</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/neg/t7622-missing-required.check b/test/files/neg/t7622-missing-required.check
new file mode 100644
index 0000000..5982178
--- /dev/null
+++ b/test/files/neg/t7622-missing-required.check
@@ -0,0 +1,2 @@
+error: Missing required plugin: special-plugin
+one error found
diff --git a/test/files/neg/t7622-missing-required.flags b/test/files/neg/t7622-missing-required.flags
new file mode 100644
index 0000000..65deac6
--- /dev/null
+++ b/test/files/neg/t7622-missing-required.flags
@@ -0,0 +1 @@
+-Xplugin-require:special-plugin
diff --git a/test/files/neg/t7622-missing-required.scala b/test/files/neg/t7622-missing-required.scala
new file mode 100644
index 0000000..a0ba487
--- /dev/null
+++ b/test/files/neg/t7622-missing-required.scala
@@ -0,0 +1,4 @@
+
+// the amazing features of this trait
+// are unlocked by compiling with a special plugin.
+trait Amazing
diff --git a/test/files/neg/t7622-multi-followers.check b/test/files/neg/t7622-multi-followers.check
new file mode 100644
index 0000000..d123853
--- /dev/null
+++ b/test/files/neg/t7622-multi-followers.check
@@ -0,0 +1 @@
+error: Multiple phases want to run right after parser; followers: multi1,multi2; created phase-order.dot
diff --git a/test/files/neg/t7622-multi-followers/ThePlugin.scala b/test/files/neg/t7622-multi-followers/ThePlugin.scala
new file mode 100644
index 0000000..cbd28d0
--- /dev/null
+++ b/test/files/neg/t7622-multi-followers/ThePlugin.scala
@@ -0,0 +1,44 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+  import global._
+
+  val name = "multi"
+  val description = "Declares two phases that both follow parser"
+  val components = List[PluginComponent](thePhase1,thePhase2)
+
+  private object thePhase1 extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]()
+
+    override val runsRightAfter = Some("parser")
+
+    val phaseName = ThePlugin.this.name + "1"
+
+    def newPhase(prev: Phase) = new ThePhase(prev, phaseName)
+  }
+
+  private object thePhase2 extends PluginComponent {
+    val global = ThePlugin.this.global
+
+    val runsAfter = List[String]()
+
+    override val runsRightAfter = Some("parser")
+
+    val phaseName = ThePlugin.this.name + "2"
+
+    def newPhase(prev: Phase) = new ThePhase(prev, phaseName)
+  }
+
+  private class ThePhase(prev: Phase, val name: String) extends Phase(prev) {
+    def run {}
+  }
+}
+
diff --git a/test/files/neg/t7622-multi-followers/sample_2.flags b/test/files/neg/t7622-multi-followers/sample_2.flags
new file mode 100644
index 0000000..d2e83e9
--- /dev/null
+++ b/test/files/neg/t7622-multi-followers/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:multi
diff --git a/test/files/neg/t7622-multi-followers/sample_2.scala b/test/files/neg/t7622-multi-followers/sample_2.scala
new file mode 100644
index 0000000..73cdc64
--- /dev/null
+++ b/test/files/neg/t7622-multi-followers/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7622-multi-followers/scalac-plugin.xml b/test/files/neg/t7622-multi-followers/scalac-plugin.xml
new file mode 100644
index 0000000..2558d6f
--- /dev/null
+++ b/test/files/neg/t7622-multi-followers/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+  <name>ignored</name>
+  <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/neg/t7629-view-bounds-deprecation.check b/test/files/neg/t7629-view-bounds-deprecation.check
new file mode 100644
index 0000000..ed77c15
--- /dev/null
+++ b/test/files/neg/t7629-view-bounds-deprecation.check
@@ -0,0 +1,11 @@
+t7629-view-bounds-deprecation.scala:2: warning: View bounds are deprecated. Use an implicit parameter instead.
+Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
+  def f[A <% Int](a: A) = null
+          ^
+t7629-view-bounds-deprecation.scala:3: warning: View bounds are deprecated. Use an implicit parameter instead.
+Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
+  def g[C, B <: C, A <% B : Numeric](a: A) = null
+                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t7629-view-bounds-deprecation.flags b/test/files/neg/t7629-view-bounds-deprecation.flags
new file mode 100644
index 0000000..43a25d4
--- /dev/null
+++ b/test/files/neg/t7629-view-bounds-deprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings -Xfuture
diff --git a/test/files/neg/t7629-view-bounds-deprecation.scala b/test/files/neg/t7629-view-bounds-deprecation.scala
new file mode 100644
index 0000000..a6ede1f
--- /dev/null
+++ b/test/files/neg/t7629-view-bounds-deprecation.scala
@@ -0,0 +1,4 @@
+object Test {
+  def f[A <% Int](a: A) = null
+  def g[C, B <: C, A <% B : Numeric](a: A) = null
+}
diff --git a/test/files/neg/t7669.check b/test/files/neg/t7669.check
new file mode 100644
index 0000000..c090ed1
--- /dev/null
+++ b/test/files/neg/t7669.check
@@ -0,0 +1,7 @@
+t7669.scala:9: warning: match may not be exhaustive.
+It would fail on the following input: NotHandled(_)
+  def exhausto(expr: Expr): Unit = expr match {
+                                   ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t7669.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t7669.flags
diff --git a/test/files/neg/t7669.scala b/test/files/neg/t7669.scala
new file mode 100644
index 0000000..12441ec
--- /dev/null
+++ b/test/files/neg/t7669.scala
@@ -0,0 +1,13 @@
+object Test {
+
+  sealed abstract class Expr
+  // Change type of `arg` to `Any` and the exhaustiveness warning
+  // is issued below
+  case class Op(arg: Expr) extends Expr
+  case class NotHandled(num: Double) extends Expr
+
+  def exhausto(expr: Expr): Unit = expr match {
+    case Op(Op(_)) =>
+    case Op(_) =>
+  }
+}
diff --git a/test/files/neg/t7694b.check b/test/files/neg/t7694b.check
deleted file mode 100644
index ea3d773..0000000
--- a/test/files/neg/t7694b.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t7694b.scala:8: error: type arguments [_3,_4] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
-  def d = if (true) (null: L[A, A]) else (null: L[B, B])
-      ^
-t7694b.scala:9: error: type arguments [_1,_2] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
-  val v = if (true) (null: L[A, A]) else (null: L[B, B])
-      ^
-two errors found
diff --git a/test/files/neg/t7715.check b/test/files/neg/t7715.check
new file mode 100644
index 0000000..4ee6b6c
--- /dev/null
+++ b/test/files/neg/t7715.check
@@ -0,0 +1,13 @@
+t7715.scala:8: error: error in interpolated string: identifier or block expected
+  days map s"On the $_th day of Christmas" foreach println
+                     ^
+t7715.scala:10: error: error in interpolated string: identifier or block expected
+  val rf = (n: Int) => s"\\*{$_}"(n).r
+                              ^
+t7715.scala:17: error: unbound placeholder parameter
+  days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+                        ^
+t7715.scala:17: error: unbound placeholder parameter
+  days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+                                     ^
+four errors found
diff --git a/test/files/neg/t7715.scala b/test/files/neg/t7715.scala
new file mode 100644
index 0000000..637ab8d
--- /dev/null
+++ b/test/files/neg/t7715.scala
@@ -0,0 +1,18 @@
+
+import PartialFunction.cond
+import util._
+
+object Test extends App {
+  val days = (1 to 12).toList
+
+  days map s"On the $_th day of Christmas" foreach println
+
+  val rf = (n: Int) => s"\\*{$_}"(n).r
+  def stars(n: Int)(s: String) = {
+    val r = rf(n)
+    cond(s) { case r(_*) => true }
+  }
+  Console println stars(5)("*****")
+
+  days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+}
diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check
new file mode 100644
index 0000000..ade1ca3
--- /dev/null
+++ b/test/files/neg/t7721.check
@@ -0,0 +1,27 @@
+t7721.scala:11: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+    case x: Foo with Concrete => x.bippy + x.conco
+            ^
+t7721.scala:15: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+    case x: Concrete with Foo => x.bippy + x.conco
+            ^
+t7721.scala:19: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+    case x: Foo with Bar => x.bippy + x.barry
+            ^
+t7721.scala:19: warning: abstract type pattern A.this.Bar is unchecked since it is eliminated by erasure
+    case x: Foo with Bar => x.bippy + x.barry
+            ^
+t7721.scala:39: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+    case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+            ^
+t7721.scala:43: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+    case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+            ^
+t7721.scala:47: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+    case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+            ^
+t7721.scala:47: warning: abstract type pattern B.this.Bar is unchecked since it is eliminated by erasure
+    case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+            ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/t7721.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/t7721.flags
diff --git a/test/files/neg/t7721.scala b/test/files/neg/t7721.scala
new file mode 100644
index 0000000..27884c9
--- /dev/null
+++ b/test/files/neg/t7721.scala
@@ -0,0 +1,140 @@
+import scala.language.reflectiveCalls
+
+trait A {
+  trait Concrete { def conco: Int = 1 }
+  type Foo <: { def bippy: Int }
+  type Bar <: { def barry: Int }
+
+  implicit def barTag: scala.reflect.ClassTag[Bar]
+
+  def f1(x: Any) = x match {
+    case x: Foo with Concrete => x.bippy + x.conco
+    case _                    => -1
+  }
+  def f2(x: Any) = x match {
+    case x: Concrete with Foo => x.bippy + x.conco
+    case _                    => -1
+  }
+  def f3(x: Any) = x match {
+    case x: Foo with Bar => x.bippy + x.barry
+    case _               => -1
+  }
+  def f4(x: Any) = x match {
+    case x: (Foo @unchecked) => x.bippy  // warns, suppressed
+    case _                   => -1
+  }
+  def f5(x: Any) = x match {
+    case x: (Bar @unchecked) => x.barry // warns (but about the "outer reference"), suppressed
+    case _      => -1
+  }
+}
+
+trait B extends A {
+  type Foo <: { def bippy: Int ; def dingo: Int }
+  type Bar <: { def barry: Int ; def bongo: Int }
+
+  override implicit def barTag: scala.reflect.ClassTag[Bar]
+
+  override def f1(x: Any) = x match {
+    case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+    case _                    => -1
+  }
+  override def f2(x: Any) = x match {
+    case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+    case _                    => -1
+  }
+  override def f3(x: Any) = x match {
+    case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+    case _                             => -1
+  }
+  override def f4(x: Any) = x match {
+    case x: (Foo @unchecked) => x.bippy + x.dingo // warns, suppressed
+    case _                   => -1
+  }
+  override def f5(x: Any) = x match {
+    case x: (Bar @unchecked) => x.barry + x.bongo // warns (but about the "outer reference"), suppressed
+    case _                   => -1
+  }
+}
+
+object Test {
+  abstract class Base extends A {
+    trait Foo {
+      def bippy = 2
+      def dingo = 3
+    }
+    trait Bar {
+      def barry = 2
+      def bongo = 3
+    }
+    implicit def barTag: scala.reflect.ClassTag[Bar] = scala.reflect.ClassTag(classOf[Bar])
+
+    def run() {
+      println("f1")
+      wrap(f1(new Concrete {}))
+      wrap(f1(new Foo {}))
+      wrap(f1(new Bar {}))
+      wrap(f1(new Foo with Concrete {}))
+      wrap(f1(new Concrete with Foo {}))
+
+      println("\nf2")
+      wrap(f2(new Concrete {}))
+      wrap(f2(new Foo {}))
+      wrap(f2(new Bar {}))
+      wrap(f2(new Foo with Concrete {}))
+      wrap(f2(new Concrete with Foo {}))
+      wrap(f2(new Bar with Concrete {}))
+      wrap(f2(new Concrete with Bar {}))
+      wrap(f2(new Concrete with Foo with Bar {}))
+      wrap(f2(new Foo with Bar with Concrete {}))
+
+      println("\nf3")
+      wrap(f3(new Concrete {}))
+      wrap(f3(new Foo {}))
+      wrap(f3(new Bar {}))
+      wrap(f3(new Foo with Concrete {}))
+      wrap(f3(new Concrete with Foo {}))
+      wrap(f3(new Bar with Concrete {}))
+      wrap(f3(new Concrete with Bar {}))
+      wrap(f3(new Concrete with Foo with Bar {}))
+      wrap(f3(new Foo with Bar with Concrete {}))
+
+      println("\nf4")
+      wrap(f4(new Concrete {}))
+      wrap(f4(new Foo {}))
+      wrap(f4(new Bar {}))
+      wrap(f4(new Foo with Concrete {}))
+      wrap(f4(new Concrete with Foo {}))
+      wrap(f4(new Bar with Concrete {}))
+      wrap(f4(new Concrete with Bar {}))
+      wrap(f4(new Concrete with Foo with Bar {}))
+      wrap(f4(new Foo with Bar with Concrete {}))
+
+      println("\nf5")
+      wrap(f5(new Concrete {}))
+      wrap(f5(new Foo {}))
+      wrap(f5(new Bar {}))
+      wrap(f5(new Foo with Concrete {}))
+      wrap(f5(new Concrete with Foo {}))
+      wrap(f5(new Bar with Concrete {}))
+      wrap(f5(new Concrete with Bar {}))
+      wrap(f5(new Concrete with Foo with Bar {}))
+      wrap(f5(new Foo with Bar with Concrete {}))
+    }
+  }
+
+  object ao extends Base
+  object bo extends Base with B
+
+  private def wrap(body: => Any) {
+    try println(body)
+    catch { case ex: NoSuchMethodException => println(ex) }
+  }
+
+  def main(args: Array[String]) {
+    ao.run()
+    bo.run()
+  }
+}
+
+// java.lang.NoSuchMethodException: Test$$anon$1.bippy()
\ No newline at end of file
diff --git a/test/files/neg/t7756a.check b/test/files/neg/t7756a.check
new file mode 100644
index 0000000..8d42717
--- /dev/null
+++ b/test/files/neg/t7756a.check
@@ -0,0 +1,7 @@
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+        locally(null: TA[Object])
+        ^
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+        locally(null: TA[Object])
+                      ^
+two errors found
diff --git a/test/files/neg/t7756a.scala b/test/files/neg/t7756a.scala
new file mode 100644
index 0000000..4453e84
--- /dev/null
+++ b/test/files/neg/t7756a.scala
@@ -0,0 +1,11 @@
+object Test {
+  def test: Unit = {
+    trait TA[X <: CharSequence]
+    0 match {
+      case _ =>
+        // the bounds violation isn't reported. RefChecks seems to be too broadly disabled under virtpatmat: see 65340ed4ad2e
+        locally(null: TA[Object])
+        ()
+    }
+  }
+}
diff --git a/test/files/neg/t7756b.check b/test/files/neg/t7756b.check
new file mode 100644
index 0000000..2817a7e
--- /dev/null
+++ b/test/files/neg/t7756b.check
@@ -0,0 +1,6 @@
+t7756b.scala:3: warning: comparing values of types Int and String using `==' will always yield false
+    case _ => 0 == ""
+                ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/t7756b.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/neg/t7756b.flags
diff --git a/test/files/neg/t7756b.scala b/test/files/neg/t7756b.scala
new file mode 100644
index 0000000..a2de29c
--- /dev/null
+++ b/test/files/neg/t7756b.scala
@@ -0,0 +1,5 @@
+object Test {
+  0 match {
+    case _ => 0 == ""
+  }
+}
diff --git a/test/files/neg/t7757a.check b/test/files/neg/t7757a.check
new file mode 100644
index 0000000..de24e23
--- /dev/null
+++ b/test/files/neg/t7757a.check
@@ -0,0 +1,4 @@
+t7757a.scala:1: error: ';' expected but '@' found.
+trait Foo @annot
+          ^
+one error found
diff --git a/test/files/neg/t7757a.scala b/test/files/neg/t7757a.scala
new file mode 100644
index 0000000..24f6c16
--- /dev/null
+++ b/test/files/neg/t7757a.scala
@@ -0,0 +1 @@
+trait Foo @annot
\ No newline at end of file
diff --git a/test/files/neg/t7757b.check b/test/files/neg/t7757b.check
new file mode 100644
index 0000000..3e5a0f1
--- /dev/null
+++ b/test/files/neg/t7757b.check
@@ -0,0 +1,4 @@
+t7757b.scala:2: error: expected start of definition
+ at annot2
+       ^
+one error found
diff --git a/test/files/neg/t7757b.scala b/test/files/neg/t7757b.scala
new file mode 100644
index 0000000..e9a537d
--- /dev/null
+++ b/test/files/neg/t7757b.scala
@@ -0,0 +1,2 @@
+trait Foo2
+ at annot2
\ No newline at end of file
diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check
index f489b3c..647cfee 100644
--- a/test/files/neg/t7783.check
+++ b/test/files/neg/t7783.check
@@ -1,16 +1,18 @@
-t7783.scala:1: error: type D in object O is deprecated: 
+t7783.scala:1: warning: type D in object O is deprecated:
 object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil }
                                                              ^
-t7783.scala:11: error: type D in object O is deprecated: 
+t7783.scala:11: warning: type D in object O is deprecated:
   type T = O.D
              ^
-t7783.scala:12: error: type D in object O is deprecated: 
+t7783.scala:12: warning: type D in object O is deprecated:
   locally(null: O.D)
                   ^
-t7783.scala:13: error: type D in object O is deprecated: 
+t7783.scala:13: warning: type D in object O is deprecated:
   val x: O.D = null
            ^
-t7783.scala:14: error: type D in object O is deprecated: 
+t7783.scala:14: warning: type D in object O is deprecated:
   locally(null.asInstanceOf[O.D])
                               ^
-5 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t783.scala b/test/files/neg/t783.scala
index 5d20bc6..59f7c7f 100644
--- a/test/files/neg/t783.scala
+++ b/test/files/neg/t783.scala
@@ -18,7 +18,7 @@ object Main extends App {
     def globalInit : Global;
     final def globalInit0 = globalInit.asInstanceOf[global.type];
   }
-  
+
   object global0 extends Global {
     object analyzer extends Analyzer {
       type Global = global0.type;
diff --git a/test/files/neg/t7834neg.check b/test/files/neg/t7834neg.check
new file mode 100644
index 0000000..569df4b
--- /dev/null
+++ b/test/files/neg/t7834neg.check
@@ -0,0 +1,41 @@
+t7834neg.scala:48: error: type mismatch;
+ found   : C.super.q.type (with underlying type M2)
+ required: C.super.q.type
+  x1 = x2  // fail
+       ^
+t7834neg.scala:50: error: type mismatch;
+ found   : C.super.q.type (with underlying type M1)
+ required: C.super.q.type
+  x2 = x1  // fail
+       ^
+t7834neg.scala:53: error: type mismatch;
+ found   : C.super.q.type (with underlying type M1)
+ required: C.this.q.type
+  x3 = x1  // fail
+       ^
+t7834neg.scala:54: error: type mismatch;
+ found   : C.super.q.type (with underlying type M2)
+ required: C.this.q.type
+  x3 = x2  // fail
+       ^
+t7834neg.scala:69: error: type mismatch;
+ found   : C.super.q.type (with underlying type M2)
+ required: C.super.q.type
+  x1 = super[S2].q  // fail
+                 ^
+t7834neg.scala:71: error: type mismatch;
+ found   : C.super.q.type (with underlying type M1)
+ required: C.super.q.type
+  x2 = super[S1].q  // fail
+                 ^
+t7834neg.scala:74: error: type mismatch;
+ found   : C.super.q.type (with underlying type M1)
+ required: C.this.q.type
+  x3 = super[S1].q  // fail
+                 ^
+t7834neg.scala:75: error: type mismatch;
+ found   : C.super.q.type (with underlying type M2)
+ required: C.this.q.type
+  x3 = super[S2].q  // fail
+                 ^
+8 errors found
diff --git a/test/files/neg/t7834neg.scala b/test/files/neg/t7834neg.scala
new file mode 100644
index 0000000..d35a84e
--- /dev/null
+++ b/test/files/neg/t7834neg.scala
@@ -0,0 +1,76 @@
+class M1
+class M2 extends M1
+class M3 extends M2
+
+trait S1 { val q = new M1 ; val q1: q.type = q }
+trait S2 { val q = new M2 ; val q2: q.type = q }
+
+class B extends S1 with S2 {
+  override val q = new M3
+  val q3: q.type = q
+
+  var x1: B.super[S1].q1.type = null
+  var x2: B.super[S2].q2.type = null
+  var x3: B.this.q3.type      = null
+
+  x1 = x1
+  x1 = x2
+  x1 = x3
+  x2 = x1
+  x2 = x2
+  x2 = x3
+  x3 = x1
+  x3 = x2
+  x3 = x3
+
+  x1 = q1
+  x1 = q2
+  x1 = q3
+  x2 = q1
+  x2 = q2
+  x2 = q3
+  x3 = q1
+  x3 = q2
+  x3 = x3
+}
+
+class C extends S1 with S2 {
+  override val q = new M3
+  val q3: q.type = q
+
+  // x1's type and x2's type are incompatible
+  // x3's is assignable to x1 or x2, but not vice versa
+  var x1: C.super[S1].q.type = null
+  var x2: C.super[S2].q.type = null
+  var x3: C.this.q.type      = null
+
+  x1 = x1
+  x1 = x2  // fail
+  x1 = x3
+  x2 = x1  // fail
+  x2 = x2
+  x2 = x3
+  x3 = x1  // fail
+  x3 = x2  // fail
+  x3 = x3
+
+  x1 = q1
+  x1 = q2
+  x1 = q3
+  x2 = q1
+  x2 = q2
+  x2 = q3
+  x3 = q1
+  x3 = q2
+  x3 = x3
+
+  x1 = q
+  x1 = super[S1].q
+  x1 = super[S2].q  // fail
+  x2 = q
+  x2 = super[S1].q  // fail
+  x2 = super[S2].q
+  x3 = q
+  x3 = super[S1].q  // fail
+  x3 = super[S2].q  // fail
+}
diff --git a/test/files/neg/t7848-interp-warn.check b/test/files/neg/t7848-interp-warn.check
new file mode 100644
index 0000000..b7df6d8
--- /dev/null
+++ b/test/files/neg/t7848-interp-warn.check
@@ -0,0 +1,12 @@
+t7848-interp-warn.scala:8: warning: `$foo` looks like an interpolated identifier! Did you forget the interpolator?
+    "An important $foo message!"
+    ^
+t7848-interp-warn.scala:12: warning: That looks like an interpolated expression! Did you forget the interpolator?
+    "A doubly important ${foo * 2} message!"
+    ^
+t7848-interp-warn.scala:16: warning: `$bar` looks like an interpolated identifier! Did you forget the interpolator?
+  def j = s"Try using '${ "something like $bar" }' instead."  // warn
+                          ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/t7848-interp-warn.flags
similarity index 100%
copy from test/files/neg/delayed-init-ref.flags
copy to test/files/neg/t7848-interp-warn.flags
diff --git a/test/files/neg/t7848-interp-warn.scala b/test/files/neg/t7848-interp-warn.scala
new file mode 100644
index 0000000..3887aff
--- /dev/null
+++ b/test/files/neg/t7848-interp-warn.scala
@@ -0,0 +1,18 @@
+
+package test
+
+object Test {
+  def bar = "bar"
+  def f = {
+    val foo = "bar"
+    "An important $foo message!"
+  }
+  def g = {
+    val foo = "bar"
+    "A doubly important ${foo * 2} message!"
+  }
+  def h = s"Try using '$$bar' instead."  // no warn
+  def i = s"Try using '${ "$bar" }' instead."  // no warn on space test
+  def j = s"Try using '${ "something like $bar" }' instead."  // warn
+  def k = f"Try using '$bar' instead."  // no warn on other std interps
+}
diff --git a/test/files/neg/t7850.check b/test/files/neg/t7850.check
new file mode 100644
index 0000000..317be2b
--- /dev/null
+++ b/test/files/neg/t7850.check
@@ -0,0 +1,7 @@
+t7850.scala:11: error: an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: Casey)
+    val Casey(x1) = new Casey(1)
+        ^
+t7850.scala:12: error: an unapply result must have a member `def isEmpty: Boolean
+    val Dingy(x2) = new Dingy(1)
+        ^
+two errors found
diff --git a/test/files/neg/t7850.scala b/test/files/neg/t7850.scala
new file mode 100644
index 0000000..04edad8
--- /dev/null
+++ b/test/files/neg/t7850.scala
@@ -0,0 +1,16 @@
+// isEmpty returns non-boolean
+class Casey(a: Int) { def isEmpty = this; def get = this }
+object Casey { def unapply(a: Casey) = a }
+
+// no isEmpty method at all
+class Dingy(a: Int) { def get = this }
+object Dingy { def unapply(a: Dingy) = a }
+
+object Test {
+  def main(args: Array[String]) {
+    val Casey(x1) = new Casey(1)
+    val Dingy(x2) = new Dingy(1)
+    println(s"$x1 $x2")
+  }
+}
+
diff --git a/test/files/neg/t7859.check b/test/files/neg/t7859.check
new file mode 100644
index 0000000..5789e2a
--- /dev/null
+++ b/test/files/neg/t7859.check
@@ -0,0 +1,19 @@
+B_2.scala:6: error: not found: value x
+  new p1.A(x).x
+           ^
+B_2.scala:6: error: value x in class A cannot be accessed in p1.A
+  new p1.A(x).x
+              ^
+B_2.scala:7: error: not found: value x
+  new B(x).x
+        ^
+B_2.scala:7: error: value x is not a member of B
+  new B(x).x
+           ^
+B_2.scala:8: error: not found: value x
+  new C(x).x
+        ^
+B_2.scala:8: error: value x in class C cannot be accessed in C
+  new C(x).x
+           ^
+6 errors found
diff --git a/test/files/neg/t7859/A_1.scala b/test/files/neg/t7859/A_1.scala
new file mode 100644
index 0000000..e5b32d1
--- /dev/null
+++ b/test/files/neg/t7859/A_1.scala
@@ -0,0 +1,5 @@
+package p1 {
+  class A(private[p1] val x: Any) extends AnyVal
+}
+class B(private val x: Any) extends AnyVal
+
diff --git a/test/files/neg/t7859/B_2.scala b/test/files/neg/t7859/B_2.scala
new file mode 100644
index 0000000..2e0556b
--- /dev/null
+++ b/test/files/neg/t7859/B_2.scala
@@ -0,0 +1,9 @@
+class C(private val x: Any) extends AnyVal
+
+// Checking that makeNotPrivate(paramAccessor) doesn't make this visible during typer.
+// The output is identical with/without `extends AnyVal`.
+object Test {
+  new p1.A(x).x
+  new B(x).x
+  new C(x).x
+}
diff --git a/test/files/neg/t7870.check b/test/files/neg/t7870.check
new file mode 100644
index 0000000..d9db911
--- /dev/null
+++ b/test/files/neg/t7870.check
@@ -0,0 +1,4 @@
+t7870.scala:1: error: in class C, multiple overloaded alternatives of constructor C define default arguments.
+class C(a: Int = 0, b: Any) {
+      ^
+one error found
diff --git a/test/files/neg/t7870.scala b/test/files/neg/t7870.scala
new file mode 100644
index 0000000..5d48d43
--- /dev/null
+++ b/test/files/neg/t7870.scala
@@ -0,0 +1,3 @@
+class C(a: Int = 0, b: Any) {
+  def this(a: Int = 0) = this(???, ???)
+}
diff --git a/test/files/neg/t7872.check b/test/files/neg/t7872.check
new file mode 100644
index 0000000..57d9772
--- /dev/null
+++ b/test/files/neg/t7872.check
@@ -0,0 +1,10 @@
+t7872.scala:6: error: contravariant type a occurs in covariant position in type [-a]Cov[a] of type l
+  type x = {type l[-a] = Cov[a]}
+                 ^
+t7872.scala:8: error: covariant type a occurs in contravariant position in type [+a]Inv[a] of type l
+  foo[({type l[+a] = Inv[a]})#l]
+             ^
+t7872.scala:5: error: contravariant type a occurs in covariant position in type [-a]Cov[a] of type l
+  type l[-a] = Cov[a]
+       ^
+three errors found
diff --git a/test/files/neg/t7872.scala b/test/files/neg/t7872.scala
new file mode 100644
index 0000000..66d22a0
--- /dev/null
+++ b/test/files/neg/t7872.scala
@@ -0,0 +1,9 @@
+trait Cov[+A]
+trait Inv[-A]
+
+object varianceExploit {  
+  type l[-a] = Cov[a]
+  type x = {type l[-a] = Cov[a]}
+  def foo[M[_]] = ()
+  foo[({type l[+a] = Inv[a]})#l]
+}
diff --git a/test/files/neg/t7872b.check b/test/files/neg/t7872b.check
new file mode 100644
index 0000000..0dc4e76
--- /dev/null
+++ b/test/files/neg/t7872b.check
@@ -0,0 +1,7 @@
+t7872b.scala:8: error: contravariant type a occurs in covariant position in type [-a]List[a] of type l
+  def oops1 = down[({type l[-a] = List[a]})#l](List('whatever: Object)).head + "oops"
+                          ^
+t7872b.scala:19: error: covariant type a occurs in contravariant position in type [+a]coinv.Stringer[a] of type l
+  def oops2 = up[({type l[+a] = Stringer[a]})#l]("printed: " + _)
+                        ^
+two errors found
diff --git a/test/files/neg/t7872b.scala b/test/files/neg/t7872b.scala
new file mode 100644
index 0000000..307a147
--- /dev/null
+++ b/test/files/neg/t7872b.scala
@@ -0,0 +1,23 @@
+object coinv {
+  def up[F[+_]](fa: F[String]): F[Object] = fa
+  def down[F[-_]](fa: F[Object]): F[String] = fa
+ 
+  up(List("hi"))
+ 
+  // should not compile; `l' is unsound
+  def oops1 = down[({type l[-a] = List[a]})#l](List('whatever: Object)).head + "oops"
+  // scala> oops1
+  // java.lang.ClassCastException: scala.Symbol cannot be cast to java.lang.String
+  //         at com.nocandysw.coinv$.oops1(coinv.scala:12)
+ 
+  type Stringer[-A] = A => String
+  down[Stringer](_.toString)
+  // [error] type A is contravariant, but type _ is declared covariant
+  // up[Stringer]("printed: " + _)
+ 
+  // should not compile; `l' is unsound
+  def oops2 = up[({type l[+a] = Stringer[a]})#l]("printed: " + _)
+  // scala> oops2(Some(33))
+  // java.lang.ClassCastException: scala.Some cannot be cast to java.lang.String
+  //         at com.nocandysw.coinv$$anonfun$oops2$1.apply(coinv.scala:20)
+}
diff --git a/test/files/neg/t7872c.check b/test/files/neg/t7872c.check
new file mode 100644
index 0000000..469449d
--- /dev/null
+++ b/test/files/neg/t7872c.check
@@ -0,0 +1,11 @@
+t7872c.scala:7: error: inferred kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type F).
+List's type parameters do not match type F's expected parameters:
+type A is covariant, but type _ is declared contravariant
+  down(List('whatever: Object))
+  ^
+t7872c.scala:7: error: type mismatch;
+ found   : List[Object]
+ required: F[Object]
+  down(List('whatever: Object))
+           ^
+two errors found
diff --git a/test/files/neg/t7872c.scala b/test/files/neg/t7872c.scala
new file mode 100644
index 0000000..fa12a52
--- /dev/null
+++ b/test/files/neg/t7872c.scala
@@ -0,0 +1,8 @@
+object coinv {
+  def up[F[+_]](fa: F[String]): F[Object] = fa
+  def down[F[-_]](fa: F[Object]): F[String] = fa
+ 
+  up(List("hi"))
+  // [error] type A is covariant, but type _ is declared contravariant
+  down(List('whatever: Object))
+}
diff --git a/test/files/neg/t7877.check b/test/files/neg/t7877.check
new file mode 100644
index 0000000..7f7f832
--- /dev/null
+++ b/test/files/neg/t7877.check
@@ -0,0 +1,7 @@
+t7877.scala:6: error: not found: value Y
+    case Y() => ()             // not allowed
+         ^
+t7877.scala:7: error: OnNext[Any] does not take parameters
+    case OnNext[Any]() => ()   // should *not* be allowed, but was.
+                    ^
+two errors found
diff --git a/test/files/neg/t7877.scala b/test/files/neg/t7877.scala
new file mode 100644
index 0000000..52e167f
--- /dev/null
+++ b/test/files/neg/t7877.scala
@@ -0,0 +1,13 @@
+class Test {
+  val X: OnNext[Any] = null
+  def Y: OnNext[Any] = null
+  (null: Any) match {
+    case X() => ()             // allowed
+    case Y() => ()             // not allowed
+    case OnNext[Any]() => ()   // should *not* be allowed, but was.
+  }
+}
+
+class OnNext[+T] {
+  def unapply(x: Any) = false
+}
diff --git a/test/files/neg/t7895.check b/test/files/neg/t7895.check
new file mode 100644
index 0000000..1a58e24
--- /dev/null
+++ b/test/files/neg/t7895.check
@@ -0,0 +1,4 @@
+t7895.scala:4: error: not found: value Goop
+    case Goop(a, b, c) => Tuple2(a, b)
+         ^
+one error found
diff --git a/test/files/neg/t7895.scala b/test/files/neg/t7895.scala
new file mode 100644
index 0000000..87a586a
--- /dev/null
+++ b/test/files/neg/t7895.scala
@@ -0,0 +1,6 @@
+class A {
+  (null: Any) match {
+    // We don't want "symbol not found errors" for `a` and `b` in the case body.
+    case Goop(a, b, c) => Tuple2(a, b)
+  }
+}
diff --git a/test/files/neg/t7895b.check b/test/files/neg/t7895b.check
new file mode 100644
index 0000000..87ea727
--- /dev/null
+++ b/test/files/neg/t7895b.check
@@ -0,0 +1,7 @@
+t7895b.scala:4: error: not found: value a
+  foo(a, b)
+      ^
+t7895b.scala:4: error: not found: value b
+  foo(a, b)
+         ^
+two errors found
diff --git a/test/files/neg/t7895b.scala b/test/files/neg/t7895b.scala
new file mode 100644
index 0000000..1603027
--- /dev/null
+++ b/test/files/neg/t7895b.scala
@@ -0,0 +1,5 @@
+object Test {
+  def foo(a: Any*) = ()
+
+  foo(a, b)
+}
diff --git a/test/files/neg/t7895c.check b/test/files/neg/t7895c.check
new file mode 100644
index 0000000..d4745b1
--- /dev/null
+++ b/test/files/neg/t7895c.check
@@ -0,0 +1,13 @@
+t7895c.scala:2: error: not found: value bong
+  def booboo = bong + booble + bippity - bazingo
+               ^
+t7895c.scala:2: error: not found: value booble
+  def booboo = bong + booble + bippity - bazingo
+                      ^
+t7895c.scala:2: error: not found: value bippity
+  def booboo = bong + booble + bippity - bazingo
+                               ^
+t7895c.scala:2: error: not found: value bazingo
+  def booboo = bong + booble + bippity - bazingo
+                                         ^
+four errors found
diff --git a/test/files/neg/t7895c.scala b/test/files/neg/t7895c.scala
new file mode 100644
index 0000000..53d2a86
--- /dev/null
+++ b/test/files/neg/t7895c.scala
@@ -0,0 +1,3 @@
+class A {
+  def booboo = bong + booble + bippity - bazingo
+}
diff --git a/test/files/neg/t7897.check b/test/files/neg/t7897.check
new file mode 100644
index 0000000..48eff51
--- /dev/null
+++ b/test/files/neg/t7897.check
@@ -0,0 +1,4 @@
+t7897.scala:19: error: value length is not a member of p0.Single
+      case p0.Single(x) => println(s"`$x` has ${x.length} chars")
+                                                  ^
+one error found
diff --git a/test/files/neg/t7897.scala b/test/files/neg/t7897.scala
new file mode 100644
index 0000000..87c966b
--- /dev/null
+++ b/test/files/neg/t7897.scala
@@ -0,0 +1,23 @@
+package p0 {
+  class Single(val x: Any) extends AnyRef with Product1[String] {
+    private def s = "" + x
+    override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+    def isEmpty = false
+    def get = this
+    def _1 = s + " only"
+
+    override def toString = s"Single(${_1})"
+  }
+
+  object Single {
+    def unapply(x: Any): Single = new Single(x)
+  }
+}
+object Test {
+  def main(args: Array[String]): Unit = {
+    "catdog" match {
+      case p0.Single(x) => println(s"`$x` has ${x.length} chars")
+      case x            => println("fail: " + x)
+    }
+  }
+}
diff --git a/test/files/neg/t7899.check b/test/files/neg/t7899.check
new file mode 100644
index 0000000..febfe76
--- /dev/null
+++ b/test/files/neg/t7899.check
@@ -0,0 +1,6 @@
+t7899.scala:5: error: type mismatch;
+ found   : Int => Int
+ required: (=> Int) => ?
+    foo(identity)()
+        ^
+one error found
diff --git a/test/files/neg/t7899.scala b/test/files/neg/t7899.scala
new file mode 100644
index 0000000..f2dea3a
--- /dev/null
+++ b/test/files/neg/t7899.scala
@@ -0,0 +1,7 @@
+object Test {
+  def foo[B](f: (=> Int) => B): () => B = () => f(0)
+
+  def main(args: Array[String]) {
+    foo(identity)()
+  }
+}
diff --git a/test/files/neg/t7967.check b/test/files/neg/t7967.check
new file mode 100644
index 0000000..cde950d
--- /dev/null
+++ b/test/files/neg/t7967.check
@@ -0,0 +1,9 @@
+t7967.scala:6: error: illegal inheritance;
+ self-type C does not conform to C's selftype C with B
+  new C {} // fails
+      ^
+t7967.scala:8: error: illegal inheritance;
+ self-type Test.CC does not conform to Test.CC's selftype Test.CC
+  new CC {} // should fail, doesn't
+      ^
+two errors found
diff --git a/test/files/neg/t7967.scala b/test/files/neg/t7967.scala
new file mode 100644
index 0000000..4f13347
--- /dev/null
+++ b/test/files/neg/t7967.scala
@@ -0,0 +1,9 @@
+
+trait B
+trait C {self: B =>}
+
+object Test {
+  new C {} // fails
+  type CC = C
+  new CC {} // should fail, doesn't
+}
diff --git a/test/files/neg/t798.scala b/test/files/neg/t798.scala
index b4a1939..a2bf66d 100644
--- a/test/files/neg/t798.scala
+++ b/test/files/neg/t798.scala
@@ -4,5 +4,5 @@ trait Test[Bracks <: Bracks] {
   class C[T]
   val bracks : Bracks;
   val singletons = f(bracks);
-  
+
 }
diff --git a/test/files/neg/t7980.check b/test/files/neg/t7980.check
new file mode 100644
index 0000000..031c23d
--- /dev/null
+++ b/test/files/neg/t7980.check
@@ -0,0 +1,4 @@
+t7980.scala:7: error: Can't unquote Nothing, bottom type values often indicate programmer mistake
+  println(q"class ${Name(X)} { }")
+                        ^
+one error found
diff --git a/test/files/neg/t7980.scala b/test/files/neg/t7980.scala
new file mode 100644
index 0000000..b21907d
--- /dev/null
+++ b/test/files/neg/t7980.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+  import scala.reflect.runtime.universe._
+  def Name[T:TypeTag](name:String): T = implicitly[TypeTag[T]] match {
+    case t => newTypeName(name).asInstanceOf[T]
+  }
+  val X = "ASDF"
+  println(q"class ${Name(X)} { }")
+}
diff --git a/test/files/neg/t7984.check b/test/files/neg/t7984.check
new file mode 100644
index 0000000..0cfd7d1
--- /dev/null
+++ b/test/files/neg/t7984.check
@@ -0,0 +1,6 @@
+t7984.scala:4: warning: non-variable type argument Int in type pattern List[Int] (the underlying of Test.this.ListInt) is unchecked since it is eliminated by erasure
+    case is: ListInt => is.head
+             ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/disabled/t7020.flags b/test/files/neg/t7984.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/neg/t7984.flags
diff --git a/test/files/neg/t7984.scala b/test/files/neg/t7984.scala
new file mode 100644
index 0000000..ca09a89
--- /dev/null
+++ b/test/files/neg/t7984.scala
@@ -0,0 +1,7 @@
+class Test {
+  type ListInt = List[Int]
+  List[Any]("") match {
+    case is: ListInt => is.head
+    case _ =>
+  }
+}
diff --git a/test/files/neg/t8006.check b/test/files/neg/t8006.check
new file mode 100644
index 0000000..fbac26e
--- /dev/null
+++ b/test/files/neg/t8006.check
@@ -0,0 +1,6 @@
+t8006.scala:3: error: too many arguments for method applyDynamicNamed: (value: (String, Any))String
+error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100))
+possible cause: maybe a wrong Dynamic method signature?
+  d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed
+        ^
+one error found
diff --git a/test/files/neg/t8006.scala b/test/files/neg/t8006.scala
new file mode 100644
index 0000000..b2f71c1
--- /dev/null
+++ b/test/files/neg/t8006.scala
@@ -0,0 +1,8 @@
+object X {
+  val d = new D
+  d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed
+}
+import language.dynamics
+class D extends Dynamic {
+  def applyDynamicNamed(name: String)(value: (String, Any)) = name
+}
\ No newline at end of file
diff --git a/test/files/neg/t8015-ffa.check b/test/files/neg/t8015-ffa.check
new file mode 100644
index 0000000..0f28be7
--- /dev/null
+++ b/test/files/neg/t8015-ffa.check
@@ -0,0 +1,6 @@
+t8015-ffa.scala:7: error: type mismatch;
+ found   : String("3")
+ required: Int
+  val i: Int = "3"      // error line 7 (was 8)
+               ^
+one error found
diff --git a/test/files/neg/t8015-ffa.scala b/test/files/neg/t8015-ffa.scala
new file mode 100644
index 0000000..60876d9
--- /dev/null
+++ b/test/files/neg/t8015-ffa.scala
@@ -0,0 +1,8 @@
+
+package foo
+
+//-------
object Next
+
+trait F {
+  val i: Int = "3"      // error line 7 (was 8)
+}
diff --git a/test/files/neg/t8015-ffb.check b/test/files/neg/t8015-ffb.check
new file mode 100644
index 0000000..9b2171e
--- /dev/null
+++ b/test/files/neg/t8015-ffb.check
@@ -0,0 +1,6 @@
+t8015-ffb.scala:10: warning: side-effecting nullary methods are discouraged: suggest defining as `def w()` instead
+  def w = { x\u000c() }       // ^L is colored blue on this screen, hardly visible
+      ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/t8015-ffb.flags
similarity index 100%
copy from test/files/neg/delayed-init-ref.flags
copy to test/files/neg/t8015-ffb.flags
diff --git a/test/files/neg/t8015-ffb.scala b/test/files/neg/t8015-ffb.scala
new file mode 100644
index 0000000..dbdd942
--- /dev/null
+++ b/test/files/neg/t8015-ffb.scala
@@ -0,0 +1,11 @@
+
+trait G {
+  val c: Char = '\u000a'   // disallowed!
+  def x\u000d\u000a = 9    // as nl
+  def y() = x
+  def z() = {
+    y()\u000a()           // was Int does not take parameters
+  }
+  def v = y()\u000c()     // was Int does not take parameters
+  def w = { x
() }       // ^L is colored blue on this screen, hardly visible
+}
diff --git a/test/files/neg/t8024.check b/test/files/neg/t8024.check
new file mode 100644
index 0000000..bd551aa
--- /dev/null
+++ b/test/files/neg/t8024.check
@@ -0,0 +1,6 @@
+t8024.scala:13: error: reference to sqrt is ambiguous;
+it is both defined in package object p and imported subsequently by
+import java.lang.Math.sqrt
+  sqrt(0d)
+  ^
+one error found
diff --git a/test/files/neg/t8024.scala b/test/files/neg/t8024.scala
new file mode 100644
index 0000000..b4c2c5e
--- /dev/null
+++ b/test/files/neg/t8024.scala
@@ -0,0 +1,14 @@
+package p
+
+trait NRoot[A]
+
+object `package` {
+  final def sqrt(x: Double): Double = Math.sqrt(x)
+  final def sqrt[A](a: A)(implicit ev: NRoot[A]): A = ???
+}
+
+object FastComplex {
+  import java.lang.Math.sqrt
+
+  sqrt(0d)
+}
diff --git a/test/files/neg/t8024b.check b/test/files/neg/t8024b.check
new file mode 100644
index 0000000..9cd89bc
--- /dev/null
+++ b/test/files/neg/t8024b.check
@@ -0,0 +1,6 @@
+t8024b.scala:15: error: reference to sqrt is ambiguous;
+it is both defined in object FastComplex and imported subsequently by
+import java.lang.Math.sqrt
+    sqrt(0d)
+    ^
+one error found
diff --git a/test/files/neg/t8024b.scala b/test/files/neg/t8024b.scala
new file mode 100644
index 0000000..cf3d496
--- /dev/null
+++ b/test/files/neg/t8024b.scala
@@ -0,0 +1,17 @@
+package p
+
+trait NRoot[A]
+
+object FastComplex {
+  final def sqrt(x: Double): Double = Math.sqrt(x)
+  final def sqrt[A](a: A)(implicit ev: NRoot[A]): A = ???
+
+  object Inner {
+    import java.lang.Math.sqrt
+
+    // wrong message:
+    // error: reference to sqrt is ambiguous;
+    //        it is both defined in object FastComplex and imported subsequently by
+    sqrt(0d)
+  }
+}
diff --git a/test/files/neg/t8035-deprecated.check b/test/files/neg/t8035-deprecated.check
new file mode 100644
index 0000000..01f27e5
--- /dev/null
+++ b/test/files/neg/t8035-deprecated.check
@@ -0,0 +1,21 @@
+t8035-deprecated.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+        signature: GenSetLike.apply(elem: A): Boolean
+  given arguments: <none>
+ after adaptation: GenSetLike((): Unit)
+  List(1,2,3).toSet()
+                   ^
+t8035-deprecated.scala:5: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+        signature: A(x: T): Foo.A[T]
+  given arguments: <none>
+ after adaptation: new A((): Unit)
+  new A
+  ^
+t8035-deprecated.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
+        signature: Format.format(x$1: Any): String
+  given arguments: <none>
+ after adaptation: Format.format((): Unit)
+  sdf.format()
+            ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t8035-deprecated.flags b/test/files/neg/t8035-deprecated.flags
new file mode 100644
index 0000000..c6bfaf1
--- /dev/null
+++ b/test/files/neg/t8035-deprecated.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t8035-deprecated.scala b/test/files/neg/t8035-deprecated.scala
new file mode 100644
index 0000000..6423157
--- /dev/null
+++ b/test/files/neg/t8035-deprecated.scala
@@ -0,0 +1,10 @@
+object Foo {
+  List(1,2,3).toSet()
+
+  class A[T](val x: T)
+  new A
+
+  import java.text.SimpleDateFormat
+  val sdf = new SimpleDateFormat("yyyyMMdd-HH0000")
+  sdf.format()
+}
diff --git a/test/files/neg/t8035-removed.check b/test/files/neg/t8035-removed.check
new file mode 100644
index 0000000..e24a0b4
--- /dev/null
+++ b/test/files/neg/t8035-removed.check
@@ -0,0 +1,16 @@
+t8035-removed.scala:2: error: Adaptation of argument list by inserting () has been removed.
+        signature: GenSetLike.apply(elem: A): Boolean
+  given arguments: <none>
+  List(1,2,3).toSet()
+                   ^
+t8035-removed.scala:5: error: Adaptation of argument list by inserting () has been removed.
+        signature: A(x: T): Foo.A[T]
+  given arguments: <none>
+  new A
+  ^
+t8035-removed.scala:9: error: Adaptation of argument list by inserting () has been removed.
+        signature: Format.format(x$1: Any): String
+  given arguments: <none>
+  sdf.format()
+            ^
+three errors found
diff --git a/test/files/neg/t8035-removed.flags b/test/files/neg/t8035-removed.flags
new file mode 100644
index 0000000..29f4ede
--- /dev/null
+++ b/test/files/neg/t8035-removed.flags
@@ -0,0 +1 @@
+-Xfuture
diff --git a/test/files/neg/t8035-removed.scala b/test/files/neg/t8035-removed.scala
new file mode 100644
index 0000000..6423157
--- /dev/null
+++ b/test/files/neg/t8035-removed.scala
@@ -0,0 +1,10 @@
+object Foo {
+  List(1,2,3).toSet()
+
+  class A[T](val x: T)
+  new A
+
+  import java.text.SimpleDateFormat
+  val sdf = new SimpleDateFormat("yyyyMMdd-HH0000")
+  sdf.format()
+}
diff --git a/test/files/neg/t8072.check b/test/files/neg/t8072.check
new file mode 100644
index 0000000..9267010
--- /dev/null
+++ b/test/files/neg/t8072.check
@@ -0,0 +1,4 @@
+t8072.scala:4: error: value ifParSeq is not a member of List[Int]
+  val y = x.ifParSeq[Int](throw new Exception).otherwise(0)  // Shouldn't compile
+            ^
+one error found
diff --git a/test/files/neg/t8072.scala b/test/files/neg/t8072.scala
new file mode 100644
index 0000000..2c8213e
--- /dev/null
+++ b/test/files/neg/t8072.scala
@@ -0,0 +1,6 @@
+class NoIfParSeq {
+  import collection.parallel._
+  val x = List(1,2)
+  val y = x.ifParSeq[Int](throw new Exception).otherwise(0)  // Shouldn't compile
+  val z = x.toParArray
+}
\ No newline at end of file
diff --git a/test/files/neg/t8104.check b/test/files/neg/t8104.check
new file mode 100644
index 0000000..69b3461
--- /dev/null
+++ b/test/files/neg/t8104.check
@@ -0,0 +1,4 @@
+Test_2.scala:20: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)]
+  implicitly[Generic.Aux[C, (Int, Int)]]
+            ^
+one error found
diff --git a/test/files/neg/t8104/Macros_1.scala b/test/files/neg/t8104/Macros_1.scala
new file mode 100644
index 0000000..e135bd8
--- /dev/null
+++ b/test/files/neg/t8104/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+  def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
+    import c.universe._
+    import definitions._
+    val fields = T.tpe.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+    val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.info))
+    q"new Generic[$T]{ type Repr = $Repr }"
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/t8104/Test_2.scala b/test/files/neg/t8104/Test_2.scala
new file mode 100644
index 0000000..a3bd940
--- /dev/null
+++ b/test/files/neg/t8104/Test_2.scala
@@ -0,0 +1,21 @@
+trait Generic[T] { type Repr }
+object Generic {
+  type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
+  import scala.language.experimental.macros
+  implicit def materializeGeneric[T]: Generic[T] = macro Macros.impl[T]
+}
+
+object Test extends App {
+  case class C(x: Int, y: Int)
+
+  import scala.reflect.runtime.universe._
+  def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr], tag: WeakTypeTag[Repr]) = println(tag)
+  reprify(C(40, 2))
+
+  // this is a compilation error at the moment as explained in SI-8104
+  // because matchesPt in implicit search says that depoly(<type of materializeGeneric>) isn't a subtype of Generic.Aux[C, (Int, Int)]
+  // which is rightfully so, because depoly only replaces type parameters, not type members with wildcard types
+  // however in the future we might want to relax the matchesPt check, so this might start compiling
+  // therefore, if you've broken this test, then you should be happy, because most likely you've just enabled an interesting use case!
+  implicitly[Generic.Aux[C, (Int, Int)]]
+}
diff --git a/test/files/neg/t8104a.check b/test/files/neg/t8104a.check
deleted file mode 100644
index ef92c2e..0000000
--- a/test/files/neg/t8104a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Test_2.scala:19: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)]
-  implicitly[Generic.Aux[C, (Int, Int)]]
-            ^
-one error found
diff --git a/test/files/neg/t8104a/Macros_1.scala b/test/files/neg/t8104a/Macros_1.scala
deleted file mode 100644
index 688d069..0000000
--- a/test/files/neg/t8104a/Macros_1.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.reflect.macros.Context
-
-object Macros {
-  def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
-    import c.universe._
-    import Flag._
-    import definitions._
-    val fields = T.tpe.declarations.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
-    val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.typeSignature))
-    c.Expr(Block(
-      List(ClassDef(
-        Modifiers(FINAL),
-        newTypeName("$anon"),
-        List(),
-        Template(
-          List(AppliedTypeTree(Ident(newTypeName("Generic")), List(TypeTree(T.tpe)))),
-          emptyValDef,
-          List(
-            DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
-            TypeDef(Modifiers(), newTypeName("Repr"), List(), TypeTree(Repr)))))),
-      Apply(Select(New(Ident(newTypeName("$anon"))), nme.CONSTRUCTOR), List())))
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/t8104a/Test_2.scala b/test/files/neg/t8104a/Test_2.scala
deleted file mode 100644
index f601fc3..0000000
--- a/test/files/neg/t8104a/Test_2.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-trait Generic[T] { type Repr }
-object Generic {
-  type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
-  import scala.language.experimental.macros
-  implicit def materializeGeneric[T]: Generic[T] = macro Macros.impl[T]
-}
-
-object Test extends App {
-  case class C(x: Int, y: Int)
-
-  def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr]) = ???
-  reprify(C(40, 2))
-
-  // this is a compilation error at the moment as explained in SI-8104
-  // because matchesPt in implicit search says that depoly(<type of materializeGeneric>) isn't a subtype of Generic.Aux[C, (Int, Int)]
-  // which is rightfully so, because depoly only replaces type parameters, not type members with wildcard types
-  // however in the future we might want to relax the matchesPt check, so this might start compiling
-  // therefore, if you've broken this test, then you should be happy, because most likely you've just enabled an interesting use case!
-  implicitly[Generic.Aux[C, (Int, Int)]]
-}
diff --git a/test/files/neg/t8104b.check b/test/files/neg/t8104b.check
deleted file mode 100644
index 3214a13..0000000
--- a/test/files/neg/t8104b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Test_2.scala:16: error: could not find implicit value for parameter generic: Generic.Aux[Test.C,Repr]
-  reprify(C(40, 2))
-         ^
-one error found
diff --git a/test/files/neg/t8104b/Macros_1.scala b/test/files/neg/t8104b/Macros_1.scala
deleted file mode 100644
index 688d069..0000000
--- a/test/files/neg/t8104b/Macros_1.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.reflect.macros.Context
-
-object Macros {
-  def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
-    import c.universe._
-    import Flag._
-    import definitions._
-    val fields = T.tpe.declarations.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
-    val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.typeSignature))
-    c.Expr(Block(
-      List(ClassDef(
-        Modifiers(FINAL),
-        newTypeName("$anon"),
-        List(),
-        Template(
-          List(AppliedTypeTree(Ident(newTypeName("Generic")), List(TypeTree(T.tpe)))),
-          emptyValDef,
-          List(
-            DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
-            TypeDef(Modifiers(), newTypeName("Repr"), List(), TypeTree(Repr)))))),
-      Apply(Select(New(Ident(newTypeName("$anon"))), nme.CONSTRUCTOR), List())))
-  }
-}
\ No newline at end of file
diff --git a/test/files/neg/t8104b/Test_2.scala b/test/files/neg/t8104b/Test_2.scala
deleted file mode 100644
index a0d3594..0000000
--- a/test/files/neg/t8104b/Test_2.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-trait Generic[T] { type Repr }
-object Generic {
-  type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
-  import scala.language.experimental.macros
-  implicit def materializeGeneric[T, Repr]: Generic.Aux[T, Repr] = macro Macros.impl[T]
-}
-
-object Test extends App {
-  case class C(x: Int, y: Int)
-
-  // this doesn't work because of SI-7470
-  // well, in fact SI-7470 has been fixed: https://github.com/scala/scala/pull/2499
-  // it's just that the fix hasn't been backported to 2.10.x
-  // if you've made this compile, consider taking a look at the aforementioned pull request
-  def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr]) = ???
-  reprify(C(40, 2))
-
-  // this is a compilation error at the moment as explained in SI-8104
-  // because matchesPt in implicit search says that depoly(<type of materializeGeneric>) isn't a subtype of Generic.Aux[C, (Int, Int)]
-  // which is rightfully so, because depoly only replaces type parameters, not type members with wildcard types
-  // however in the future we might want to relax the matchesPt check, so this might start compiling
-  // therefore, if you've broken this test, then you should be happy, because most likely you've just enabled an interesting use case!
-  implicitly[Generic.Aux[C, (Int, Int)]]
-}
diff --git a/test/files/neg/t8143a.check b/test/files/neg/t8143a.check
new file mode 100644
index 0000000..4e11000
--- /dev/null
+++ b/test/files/neg/t8143a.check
@@ -0,0 +1,5 @@
+t8143a.scala:2: error: overriding method f in class Foo of type => Int;
+ method f has weaker access privileges; it should not be private
+class Bar extends Foo { private def f = 10 }
+                                    ^
+one error found
diff --git a/test/files/neg/t8143a.scala b/test/files/neg/t8143a.scala
new file mode 100644
index 0000000..4ec539e
--- /dev/null
+++ b/test/files/neg/t8143a.scala
@@ -0,0 +1,15 @@
+class Foo { def f = 5 }
+class Bar extends Foo { private def f = 10 }
+
+
+class Foo1 { private def f = 5 }
+class Bar1 extends Foo1 { def f = 10 } // okay
+
+class Foo2 { private def f = 5 }
+class Bar2 extends Foo2 { private def f = 10 } // okay
+
+class Foo3 { private[this] def f = 5 }
+class Bar3 extends Foo3 { private def f = 10 } // okay
+
+class Foo4 { private def f = 5 }
+class Bar4 extends Foo4 { private[this] def f = 10 } // okay
\ No newline at end of file
diff --git a/test/files/neg/t8157.check b/test/files/neg/t8157.check
new file mode 100644
index 0000000..9a21a49
--- /dev/null
+++ b/test/files/neg/t8157.check
@@ -0,0 +1,4 @@
+t8157.scala:1: error: in object Test, multiple overloaded alternatives of method foo define default arguments.
+object Test {
+       ^
+one error found
diff --git a/test/files/neg/t8157.scala b/test/files/neg/t8157.scala
new file mode 100644
index 0000000..462d4fa
--- /dev/null
+++ b/test/files/neg/t8157.scala
@@ -0,0 +1,4 @@
+object Test {
+  def foo(printer: Any, question: => String, show: Boolean = false)(op: => Any): Any = ???
+  def foo[T](question: => String, show: Boolean)(op: => Any = ()): Any = ???
+}
diff --git a/test/files/neg/t8158.check b/test/files/neg/t8158.check
new file mode 100644
index 0000000..fa6b744
--- /dev/null
+++ b/test/files/neg/t8158.check
@@ -0,0 +1,4 @@
+Test_2.scala:10: error: not enough patterns for <$anon: AnyRef> offering AnyRef{def isEmpty: Boolean; def get: $anon; def unapply(x: String): $anon}: expected 1, found 0
+      case X() =>
+           ^
+one error found
diff --git a/test/files/neg/t8158/Macros_1.scala b/test/files/neg/t8158/Macros_1.scala
new file mode 100644
index 0000000..b84e3ed
--- /dev/null
+++ b/test/files/neg/t8158/Macros_1.scala
@@ -0,0 +1,34 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+object Max {
+  def impl(c: Context)(any: c.Expr[Any]): c.Expr[Any] = {
+    import c.universe._
+    def fail(msg: String) = c.abort(c.enclosingPosition, msg)
+    val t = c.macroApplication match {
+      case q"$_.unapply($unargs)" =>
+        /* hangs
+        */
+        q"""
+          new {
+            def isEmpty = false
+            def get = this
+            def unapply(x: String) = this
+          }.unapply($unargs)
+        """
+        /*
+        if get returns Unit or Boolean:
+        wrong number of patterns for <$anon: AnyRef> offering Unit: expected 1, found 0
+        */
+        /* straightforward
+        q"""
+          new {
+            def unapply(x: String) = true
+          }.unapply($unargs)
+        """
+        */
+      case _ => fail("bad appl")
+    }
+    c.Expr[Any](t)
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/t8158/Test_2.scala b/test/files/neg/t8158/Test_2.scala
new file mode 100644
index 0000000..f5ac661
--- /dev/null
+++ b/test/files/neg/t8158/Test_2.scala
@@ -0,0 +1,14 @@
+import scala.language.experimental.macros
+
+object X {
+  def unapply(any: Any): Any = macro Max.impl
+}
+
+class BugTest {
+  def bug(): Unit = {
+    "any" match {
+      case X() =>
+      case _ => ???
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/t8177a.check b/test/files/neg/t8177a.check
new file mode 100644
index 0000000..0d01206
--- /dev/null
+++ b/test/files/neg/t8177a.check
@@ -0,0 +1,6 @@
+t8177a.scala:5: error: type mismatch;
+ found   : A{type Result = Int}
+ required: A{type Result = String}
+             : A { type Result = String} = x
+                                           ^
+one error found
diff --git a/test/files/neg/t8177a.scala b/test/files/neg/t8177a.scala
new file mode 100644
index 0000000..d1e47f8
--- /dev/null
+++ b/test/files/neg/t8177a.scala
@@ -0,0 +1,6 @@
+trait A { type Result }
+
+class PolyTests {
+  def wrong(x: A { type Result = Int })
+             : A { type Result = String} = x
+}
\ No newline at end of file
diff --git a/test/files/neg/t8182.check b/test/files/neg/t8182.check
new file mode 100644
index 0000000..a156d70
--- /dev/null
+++ b/test/files/neg/t8182.check
@@ -0,0 +1,22 @@
+t8182.scala:4: error: illegal start of simple pattern
+}
+^
+t8182.scala:7: error: illegal start of simple pattern
+}
+^
+t8182.scala:6: error: type application is not allowed in pattern
+  val a b[B]  // error then continue as for X
+        ^
+t8182.scala:10: error: illegal start of simple pattern
+    case a b[B] => // bumpy recovery
+                ^
+t8182.scala:10: error: type application is not allowed in pattern
+    case a b[B] => // bumpy recovery
+           ^
+t8182.scala:11: error: '=>' expected but '}' found.
+  }
+  ^
+t8182.scala:16: error: type application is not allowed in pattern
+    case a B[T] b =>
+           ^
+7 errors found
diff --git a/test/files/neg/t8182.scala b/test/files/neg/t8182.scala
new file mode 100644
index 0000000..1b3bc98
--- /dev/null
+++ b/test/files/neg/t8182.scala
@@ -0,0 +1,18 @@
+
+trait X {
+  val a b     // something missing
+}
+trait Y {
+  val a b[B]  // error then continue as for X
+}
+trait Z {
+  (null: Any) match {
+    case a b[B] => // bumpy recovery
+  }
+}
+object B { def unapply[W](a: Any) = Some((1,2)) }
+trait Z {
+  (null: Any) match {
+    case a B[T] b =>
+  }
+}
diff --git a/test/files/neg/t8207.check b/test/files/neg/t8207.check
new file mode 100644
index 0000000..59facd8
--- /dev/null
+++ b/test/files/neg/t8207.check
@@ -0,0 +1,7 @@
+t8207.scala:1: error: '.' expected but '}' found.
+class C { import C.this.toString }
+                                 ^
+t8207.scala:3: error: '.' expected but '}' found.
+class D { import D.this.toString }
+                                 ^
+two errors found
diff --git a/test/files/neg/t8207.scala b/test/files/neg/t8207.scala
new file mode 100644
index 0000000..738ce38
--- /dev/null
+++ b/test/files/neg/t8207.scala
@@ -0,0 +1,3 @@
+class C { import C.this.toString }
+
+class D { import D.this.toString }
diff --git a/test/files/neg/t8219-any-any-ref-equals.check b/test/files/neg/t8219-any-any-ref-equals.check
new file mode 100644
index 0000000..95d2536
--- /dev/null
+++ b/test/files/neg/t8219-any-any-ref-equals.check
@@ -0,0 +1,10 @@
+t8219-any-any-ref-equals.scala:5: error: method ==: (x$1: Any)Boolean does not take type parameters.
+  "".==[Int]
+       ^
+t8219-any-any-ref-equals.scala:6: error: method ==: (x$1: Any)Boolean does not take type parameters.
+  ("": AnyRef).==[Int]
+                 ^
+t8219-any-any-ref-equals.scala:7: error: method ==: (x$1: Any)Boolean does not take type parameters.
+  ("": Object).==[Int]
+                 ^
+three errors found
diff --git a/test/files/neg/t8219-any-any-ref-equals.scala b/test/files/neg/t8219-any-any-ref-equals.scala
new file mode 100644
index 0000000..f1b81fa
--- /dev/null
+++ b/test/files/neg/t8219-any-any-ref-equals.scala
@@ -0,0 +1,8 @@
+object Test {
+  // The error message tells us that AnyRef#== and Any#== are overloaded.
+  // A real class couldn't define such an overload, why do we allow AnyRef
+  // to do so?
+  "".==[Int]
+  ("": AnyRef).==[Int]
+  ("": Object).==[Int]
+}
diff --git a/test/files/neg/t8228.check b/test/files/neg/t8228.check
new file mode 100644
index 0000000..02eff4b
--- /dev/null
+++ b/test/files/neg/t8228.check
@@ -0,0 +1,4 @@
+t8228.scala:4: error: recursive value foo needs type
+    val foo = foo(null)
+                 ^
+one error found
diff --git a/test/files/neg/t8228.scala b/test/files/neg/t8228.scala
new file mode 100644
index 0000000..19d71ae
--- /dev/null
+++ b/test/files/neg/t8228.scala
@@ -0,0 +1,7 @@
+object X {
+  def bar = {
+    def foo(x: Any) = ""
+    val foo = foo(null)
+    foo(null) // cycle in isApplicableBasedOnArity
+  }
+}
diff --git a/test/files/neg/t8229.check b/test/files/neg/t8229.check
new file mode 100644
index 0000000..cc504fa
--- /dev/null
+++ b/test/files/neg/t8229.check
@@ -0,0 +1,4 @@
+t8229.scala:5: error: value + is not a member of Object
+  o + ""
+    ^
+one error found
diff --git a/test/files/neg/t8229.scala b/test/files/neg/t8229.scala
new file mode 100644
index 0000000..9196631
--- /dev/null
+++ b/test/files/neg/t8229.scala
@@ -0,0 +1,6 @@
+import Predef.{any2stringadd => _, _}
+
+object Test {
+  val o = new Object()
+  o + ""
+}
diff --git a/test/files/neg/t8237-default.check b/test/files/neg/t8237-default.check
new file mode 100644
index 0000000..59fe21e
--- /dev/null
+++ b/test/files/neg/t8237-default.check
@@ -0,0 +1,13 @@
+t8237-default.scala:5: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found   : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
+  test4(test4$default$1)
+  ^
+t8237-default.scala:5: error: type mismatch;
+ found   : List[Int]
+ required: T[T[List[T[X forSome { type X }]]]]
+  test4(test4$default$1)
+        ^
+two errors found
diff --git a/test/files/neg/t8237-default.scala b/test/files/neg/t8237-default.scala
new file mode 100644
index 0000000..f695aa5
--- /dev/null
+++ b/test/files/neg/t8237-default.scala
@@ -0,0 +1,29 @@
+// This test case was extracte from `names-defaults-neg.scala`
+// It pinpoints an improvement an error message that results from
+// a type inference failure
+object Test extends App {
+  test4(test4$default$1)
+
+  def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ???
+  def test4$default$1[T[P]]: List[Int] = ???
+}
+
+/*
+OLD:
+ no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found   : List[Int]
+ required: ?T
+  test4(test4$default$1)
+  ^
+
+NEW:
+
+no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found   : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]
+  test4(test4$default$1)
+*/
diff --git a/test/files/neg/t8244.check b/test/files/neg/t8244.check
new file mode 100644
index 0000000..90b2bf6
--- /dev/null
+++ b/test/files/neg/t8244.check
@@ -0,0 +1,4 @@
+Test_2.scala:9: error: value exxx is not a member of ?0
+    raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X
+          ^
+one error found
diff --git a/test/files/neg/t8244/Raw_1.java b/test/files/neg/t8244/Raw_1.java
new file mode 100644
index 0000000..0c667f1
--- /dev/null
+++ b/test/files/neg/t8244/Raw_1.java
@@ -0,0 +1,4 @@
+public abstract class Raw_1<T>{
+        public Raw_1 raw() { return new Raw_1<String>() { public String t() { return ""; } }; }
+        public abstract T t();
+}
diff --git a/test/files/neg/t8244/Test_2.scala b/test/files/neg/t8244/Test_2.scala
new file mode 100644
index 0000000..152bb0b
--- /dev/null
+++ b/test/files/neg/t8244/Test_2.scala
@@ -0,0 +1,12 @@
+class X extends Raw_1[X] {
+  override def t = this
+  def exxx = 0
+}
+
+object Test extends App {
+  def c(s: X) = {
+    val raw = s.raw
+    raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X
+  }
+  c(new X())
+}
diff --git a/test/files/neg/t8244b.check b/test/files/neg/t8244b.check
new file mode 100644
index 0000000..f6cbf99
--- /dev/null
+++ b/test/files/neg/t8244b.check
@@ -0,0 +1,4 @@
+t8244b.scala:15: error: value exxx is not a member of _$1
+    raw.t.exxx
+          ^
+one error found
diff --git a/test/files/neg/t8244b.scala b/test/files/neg/t8244b.scala
new file mode 100644
index 0000000..2fb4f45
--- /dev/null
+++ b/test/files/neg/t8244b.scala
@@ -0,0 +1,18 @@
+class Raw_1[T]{
+  def raw(): Raw_1[_] = { new Raw_1[String] { def t() = "" } }
+  def t(): T
+}
+
+
+class X extends Raw_1[X] {
+  override def t = this
+  def exxx = 0
+}
+
+object Test extends App {
+  def c(s: X) = {
+    val raw = s.raw
+    raw.t.exxx
+  }
+  c(new X())
+}
diff --git a/test/files/neg/t8244c.check b/test/files/neg/t8244c.check
new file mode 100644
index 0000000..fd58a58
--- /dev/null
+++ b/test/files/neg/t8244c.check
@@ -0,0 +1,4 @@
+t8244c.scala:15: error: value exxx is not a member of _$1
+    raw.t.exxx
+          ^
+one error found
diff --git a/test/files/neg/t8244c.scala b/test/files/neg/t8244c.scala
new file mode 100644
index 0000000..2fb4f45
--- /dev/null
+++ b/test/files/neg/t8244c.scala
@@ -0,0 +1,18 @@
+class Raw_1[T]{
+  def raw(): Raw_1[_] = { new Raw_1[String] { def t() = "" } }
+  def t(): T
+}
+
+
+class X extends Raw_1[X] {
+  override def t = this
+  def exxx = 0
+}
+
+object Test extends App {
+  def c(s: X) = {
+    val raw = s.raw
+    raw.t.exxx
+  }
+  c(new X())
+}
diff --git a/test/files/neg/t8244e.check b/test/files/neg/t8244e.check
new file mode 100644
index 0000000..ebd7403
--- /dev/null
+++ b/test/files/neg/t8244e.check
@@ -0,0 +1,4 @@
+Test.scala:9: error: value exxx is not a member of ?0
+    raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X
+          ^
+one error found
diff --git a/test/files/neg/t8244e/Raw.java b/test/files/neg/t8244e/Raw.java
new file mode 100644
index 0000000..53202e3
--- /dev/null
+++ b/test/files/neg/t8244e/Raw.java
@@ -0,0 +1,4 @@
+public abstract class Raw<T>{
+        public Raw raw() { return new Raw<String>() { public String t() { return ""; } }; }
+        public abstract T t();
+}
diff --git a/test/files/neg/t8244e/Test.scala b/test/files/neg/t8244e/Test.scala
new file mode 100644
index 0000000..ca2a905
--- /dev/null
+++ b/test/files/neg/t8244e/Test.scala
@@ -0,0 +1,12 @@
+class X extends Raw[X] {
+  override def t = this
+  def exxx = 0
+}
+
+object Test extends App {
+  def c(s: X) = {
+    val raw = s.raw
+    raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X
+  }
+  c(new X())
+}
diff --git a/test/files/neg/t8265.check b/test/files/neg/t8265.check
new file mode 100644
index 0000000..7b1db1c
--- /dev/null
+++ b/test/files/neg/t8265.check
@@ -0,0 +1,6 @@
+t8265.scala:1: warning: Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond
+class Foo[+CC[X]] { type Coll = CC[_] }
+                         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8265.flags b/test/files/neg/t8265.flags
new file mode 100644
index 0000000..9d7ba7a
--- /dev/null
+++ b/test/files/neg/t8265.flags
@@ -0,0 +1 @@
+-Xsource:2.10 -deprecation -language:higherKinds -Xfatal-warnings
diff --git a/test/files/neg/t8265.scala b/test/files/neg/t8265.scala
new file mode 100644
index 0000000..a215903
--- /dev/null
+++ b/test/files/neg/t8265.scala
@@ -0,0 +1 @@
+class Foo[+CC[X]] { type Coll = CC[_] }
diff --git a/test/files/neg/t8266-invalid-interp.check b/test/files/neg/t8266-invalid-interp.check
new file mode 100644
index 0000000..70dd408
--- /dev/null
+++ b/test/files/neg/t8266-invalid-interp.check
@@ -0,0 +1,10 @@
+t8266-invalid-interp.scala:4: error: Trailing '\' escapes nothing.
+    f"a\",
+       ^
+t8266-invalid-interp.scala:5: error: invalid escape character at index 1 in "a\xc"
+    f"a\xc",
+       ^
+t8266-invalid-interp.scala:7: error: invalid escape character at index 1 in "a\vc"
+    f"a\vc"
+       ^
+three errors found
diff --git a/test/files/neg/t8266-invalid-interp.scala b/test/files/neg/t8266-invalid-interp.scala
new file mode 100644
index 0000000..4b26546
--- /dev/null
+++ b/test/files/neg/t8266-invalid-interp.scala
@@ -0,0 +1,9 @@
+
+trait X {
+  def f = Seq(
+    f"a\",
+    f"a\xc",
+    // following could suggest \u000b for vertical tab, similar for \a alert
+    f"a\vc"
+  )
+}
diff --git a/test/files/neg/t8300-overloading.check b/test/files/neg/t8300-overloading.check
new file mode 100644
index 0000000..edd34d4
--- /dev/null
+++ b/test/files/neg/t8300-overloading.check
@@ -0,0 +1,7 @@
+t8300-overloading.scala:15: error: double definition:
+def foo(name: Test.u.Name): Nothing at line 14 and
+def foo(name: Test.u.TermName): Nothing at line 15
+have same type after erasure: (name: Universe#NameApi)Nothing
+  def foo(name: TermName) = ???
+      ^
+one error found
diff --git a/test/files/neg/t8300-overloading.scala b/test/files/neg/t8300-overloading.scala
new file mode 100644
index 0000000..eb39315
--- /dev/null
+++ b/test/files/neg/t8300-overloading.scala
@@ -0,0 +1,16 @@
+// cf. pos/t8300-overloading.scala
+trait Universe {
+  type Name >: Null <: AnyRef with NameApi
+  trait NameApi
+
+  type TermName >: Null <: Name with TermNameApi
+  trait TermNameApi extends NameApi
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  def foo(name: Name) = ???
+  def foo(name: TermName) = ???
+}
\ No newline at end of file
diff --git a/test/files/neg/t836.scala b/test/files/neg/t836.scala
index de23cf5..3633b81 100644
--- a/test/files/neg/t836.scala
+++ b/test/files/neg/t836.scala
@@ -10,7 +10,7 @@ abstract class A {
 }
 
 class B extends A {
-  type MyObj = ObjImpl 
+  type MyObj = ObjImpl
   val myString:   S = "hello"
   val realString: String = myString   // error: type missmatch
 }
diff --git a/test/files/neg/t8372.check b/test/files/neg/t8372.check
new file mode 100644
index 0000000..6a6424a
--- /dev/null
+++ b/test/files/neg/t8372.check
@@ -0,0 +1,7 @@
+t8372.scala:7: error: No ClassTag available for T1
+  def unzip[T1, T2](a: Array[(T1, T2)]) = a.unzip
+                                            ^
+t8372.scala:9: error: No ClassTag available for T1
+  def unzip3[T1, T2, T3](a: Array[(T1, T2, T3)]): (Array[T1], Array[T2], Array[T3]) = a.unzip3
+                                                                                        ^
+two errors found
diff --git a/test/files/neg/t8372.scala b/test/files/neg/t8372.scala
new file mode 100644
index 0000000..60a674f
--- /dev/null
+++ b/test/files/neg/t8372.scala
@@ -0,0 +1,10 @@
+class t8372 {
+  // failed with "error: tpe T1 is an unresolved spliceable type"; that was caused by
+  // misguided type inference of type parameters in ArrayOps.unzip
+  // the type inference failed because the order of implicit arguments was wrong
+  // the evidence that T <: (T1, T2) came as last argument so it couldn't guide the
+  // type inference early enough
+  def unzip[T1, T2](a: Array[(T1, T2)]) = a.unzip
+  // the same as above
+  def unzip3[T1, T2, T3](a: Array[(T1, T2, T3)]): (Array[T1], Array[T2], Array[T3]) = a.unzip3
+}
diff --git a/test/files/neg/t8376.check b/test/files/neg/t8376.check
new file mode 100644
index 0000000..22ed942
--- /dev/null
+++ b/test/files/neg/t8376.check
@@ -0,0 +1,7 @@
+S.scala:2: error: overloaded method value m with alternatives:
+  (a: J*)Unit <and>
+  (a: String*)Unit
+ cannot be applied to (Int)
+  J.m(0)
+    ^
+one error found
diff --git a/test/files/neg/t8376/J.java b/test/files/neg/t8376/J.java
new file mode 100644
index 0000000..29aa23d
--- /dev/null
+++ b/test/files/neg/t8376/J.java
@@ -0,0 +1,4 @@
+class J {
+  public static void m(String... a) { }
+  public static void m(J... a) { }
+}
diff --git a/test/files/neg/t8376/S.scala b/test/files/neg/t8376/S.scala
new file mode 100644
index 0000000..a19f0d3
--- /dev/null
+++ b/test/files/neg/t8376/S.scala
@@ -0,0 +1,4 @@
+object S {
+  J.m(0)
+  // the error message should show `T*` in the method signatures rather than `<repeated>[T]`
+}
diff --git a/test/files/neg/t8431.check b/test/files/neg/t8431.check
new file mode 100644
index 0000000..75351a8
--- /dev/null
+++ b/test/files/neg/t8431.check
@@ -0,0 +1,27 @@
+t8431.scala:24: error: type mismatch;
+ found   : CanBuildFrom[Invariant[Nothing]]
+ required: CanBuildFrom[Invariant[G]]
+  s.combined // fail
+  ^
+t8431.scala:24: error: value combined is not a member of Invariant[Nothing]
+  s.combined // fail
+    ^
+t8431.scala:35: error: type mismatch;
+ found   : CanBuildFrom[Invariant[Nothing]]
+ required: CanBuildFrom[Invariant[G]]
+  s.combined // was okay!
+  ^
+t8431.scala:35: error: value combined is not a member of Invariant[Nothing]
+  s.combined // was okay!
+    ^
+t8431.scala:45: error: type mismatch;
+ found   : CanBuildFrom[Invariant[Nothing]]
+ required: CanBuildFrom[Invariant[G]]
+  convert2(s).combined
+          ^
+t8431.scala:48: error: type mismatch;
+ found   : CanBuildFrom[Invariant[Nothing]]
+ required: CanBuildFrom[Invariant[G]]
+  {val c1 = convert2(s); c1.combined}
+                    ^
+6 errors found
diff --git a/test/files/neg/t8431.scala b/test/files/neg/t8431.scala
new file mode 100644
index 0000000..032a1f3
--- /dev/null
+++ b/test/files/neg/t8431.scala
@@ -0,0 +1,63 @@
+trait Covariant[+A]
+trait Invariant[A] extends Covariant[A @annotation.unchecked.uncheckedVariance] 
+ 
+trait Combinable[G] {
+  def combined = 0
+}
+
+trait CanBuildFrom[+C]
+ 
+object C {
+  implicit def convert1[G, TRAVONCE[+e] <: Covariant[e]]
+    (xs: TRAVONCE[G]): Combinable[G] = ???
+ 
+  implicit def convert2[G, SET[e] <: Invariant[e]]
+    (xs: SET[_ <: G])
+    (implicit cbf: CanBuildFrom[SET[G]]): Combinable[G] = ???
+
+  implicit def cbf[A]: CanBuildFrom[Invariant[A]] = ???
+}
+// always failed
+class Test1 {
+  import C.{cbf, convert1, convert2}
+  val s: Invariant[Nothing] = ???
+  s.combined // fail
+}
+// didn't fail, now correctly fails
+class Test2 {
+  import C.{cbf, convert2, convert1}
+
+  val s: Invariant[Nothing] = ???
+
+  // Non-uniformity with Test1 was due to order of typechecking implicit candidates:
+  // the last candidate typechecked was the only one that could contribute undetermined type parameters
+  // to the enclosing context, due to mutation of `Context#undetparam` in `doTypedApply`.
+  s.combined // was okay!
+}
+
+
+class TestExplicit {
+  import C.{cbf, convert2}
+
+  val s: Invariant[Nothing] = ???
+
+  // Now the implicit Test fail uniformly as per this explicit conversion
+  convert2(s).combined
+
+  // Breaking this expression down doesn't make it work.
+  {val c1 = convert2(s); c1.combined}
+}
+
+// These ones work before and after; infering G=Null doesn't need to contribute an undetermined type param.
+class Test3 {
+   import C.{cbf, convert1, convert2}
+   val s: Invariant[Null] = ???
+   s.combined // okay
+}
+
+class Test4 {
+   import C.{cbf, convert2, convert1}
+
+   val s: Invariant[Null] = ???
+   s.combined // okay
+}
diff --git a/test/files/neg/t856.check b/test/files/neg/t856.check
index 02978e1..fb93f96 100644
--- a/test/files/neg/t856.check
+++ b/test/files/neg/t856.check
@@ -5,7 +5,7 @@ it has 2 unimplemented members.
  */
   // Members declared in scala.Equals
   def canEqual(that: Any): Boolean = ???
-  
+
   // Members declared in scala.Product2
   def _2: Double = ???
 
diff --git a/test/files/neg/t856.scala b/test/files/neg/t856.scala
index e50084b..fea216b 100644
--- a/test/files/neg/t856.scala
+++ b/test/files/neg/t856.scala
@@ -1,4 +1,4 @@
-trait Complex extends Product2[Double,Double] 
+trait Complex extends Product2[Double,Double]
 
 class ComplexRect(val _1:Double, _2:Double) extends Complex {
   override def toString = "ComplexRect("+_1+","+_2+")"
diff --git a/test/files/neg/t876.scala b/test/files/neg/t876.scala
index fb1461a..1f6a90d 100644
--- a/test/files/neg/t876.scala
+++ b/test/files/neg/t876.scala
@@ -11,11 +11,11 @@ object AssertionError extends AnyRef with App
     class Manager
     {
         final class B {}
-    
+
         val map = new HashMap[A, B]
     }
-    
-        
+
+
     def test[T](f: => T) { f }
 
     test {
diff --git a/test/files/neg/t877.check b/test/files/neg/t877.check
index 5f25bd4..c3d4ab6 100644
--- a/test/files/neg/t877.check
+++ b/test/files/neg/t877.check
@@ -1,7 +1,7 @@
 t877.scala:3: error: Invalid literal number
 trait Foo extends A(22A, Bug!) {}
                     ^
-t877.scala:3: error: parents of traits may not have parameters
+t877.scala:3: error: ')' expected but eof found.
 trait Foo extends A(22A, Bug!) {}
-                   ^
+                                 ^
 two errors found
diff --git a/test/files/neg/t877.scala b/test/files/neg/t877.scala
index 8cb9827..5e132a1 100644
--- a/test/files/neg/t877.scala
+++ b/test/files/neg/t877.scala
@@ -1,3 +1,3 @@
-class A 
+class A
 
 trait Foo extends A(22A, Bug!) {}
diff --git a/test/files/neg/t935.check b/test/files/neg/t935.check
index 8b73700..af634a2 100644
--- a/test/files/neg/t935.check
+++ b/test/files/neg/t935.check
@@ -4,7 +4,4 @@ t935.scala:7: error: type arguments [Test3.B] do not conform to class E's type p
 t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
   val b: String @E[B](new B) = "hi"
                  ^
-t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
-  val b: String @E[B](new B) = "hi"
-      ^
-three errors found
+two errors found
diff --git a/test/files/neg/t944.scala b/test/files/neg/t944.scala
index 352269a..dc80e5f 100644
--- a/test/files/neg/t944.scala
+++ b/test/files/neg/t944.scala
@@ -1,6 +1,6 @@
 object TooManyArgsFunction {
-  val f = (a1:Int, a2:Int, a3:Int, a4:Int, a5:Int, a6:Int, a7:Int, a8:Int, 
-           a9:Int, a10:Int, a11:Int, a12:Int, a13:Int, a14:Int, a15:Int, 
-           a16:Int, a17:Int, a18:Int, a19:Int, a20:Int, a21:Int, a22:Int, 
+  val f = (a1:Int, a2:Int, a3:Int, a4:Int, a5:Int, a6:Int, a7:Int, a8:Int,
+           a9:Int, a10:Int, a11:Int, a12:Int, a13:Int, a14:Int, a15:Int,
+           a16:Int, a17:Int, a18:Int, a19:Int, a20:Int, a21:Int, a22:Int,
            a23:Int) => 1
 }
diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check
index 186095f..b118792 100644
--- a/test/files/neg/t997.check
+++ b/test/files/neg/t997.check
@@ -1,7 +1,4 @@
-t997.scala:13: error: wrong number of arguments for object Foo
+t997.scala:13: error: too many patterns for object Foo offering (String, String): expected 2, found 3
 "x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
-                    ^
-t997.scala:13: error: not found: value a
-"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
-                                                  ^
-two errors found
+                 ^
+one error found
diff --git a/test/files/neg/t997.scala b/test/files/neg/t997.scala
index e8d10f4..1198738 100644
--- a/test/files/neg/t997.scala
+++ b/test/files/neg/t997.scala
@@ -1,5 +1,5 @@
 // An extractor with 2 results
-object Foo { def unapply(x : String)  = Some(Pair(x, x)) }
+object Foo { def unapply(x : String)  = Some((x, x)) }
 
 object Test extends App {
 
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
index d3432a7..1daad69 100644
--- a/test/files/neg/tailrec-2.check
+++ b/test/files/neg/tailrec-2.check
@@ -1,4 +1,4 @@
-tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targeting supertype Super[A]
+tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targeting a supertype
   @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
                                                                                     ^
 tailrec-2.scala:9: error: @tailrec annotated method contains no recursive calls
diff --git a/test/files/neg/tailrec.scala b/test/files/neg/tailrec.scala
index 5b9018a..e0ebde9 100644
--- a/test/files/neg/tailrec.scala
+++ b/test/files/neg/tailrec.scala
@@ -9,21 +9,21 @@ object Winners {
 
   @tailrec def loopsucc1(x: Int): Int = loopsucc1(x - 1)
   @tailrec def loopsucc2[T](x: Int): Int = loopsucc2[T](x - 1)
-  
+
   def ding() {
     object dong {
       @tailrec def loopsucc3(x: Int): Int = loopsucc3(x)
     }
     ()
   }
-  
+
   def inner(q: Int) = {
     @tailrec
     def loopsucc4(x: Int): Int = loopsucc4(x + 1)
-    
+
     loopsucc4(q)
   }
-  
+
   object innerBob {
     @tailrec def loopsucc5(x: Int): Int = loopsucc5(x)
   }
@@ -45,19 +45,19 @@ object Failures {
     else n * facfail(n - 1)
 }
 
-class Failures {  
+class Failures {
   // not private, not final
   @tailrec def fail1(x: Int): Int = fail1(x)
-  
+
   // a typical between-chair-and-keyboard error
   @tailrec final def fail2[T](xs: List[T]): List[T] = xs match {
     case Nil      => Nil
     case x :: xs  => x :: fail2[T](xs)
   }
-  
+
   // unsafe
   @tailrec final def fail3[T](x: Int): Int = fail3(x - 1)
-  
+
   // unsafe
   class Tom[T](x: Int) {
     @tailrec final def fail4[U](other: Tom[U], x: Int): Int = other.fail4[U](other, x - 1)
diff --git a/test/files/neg/tcpoly_infer_ticket1162.scala b/test/files/neg/tcpoly_infer_ticket1162.scala
index b88bd35..0552b42 100644
--- a/test/files/neg/tcpoly_infer_ticket1162.scala
+++ b/test/files/neg/tcpoly_infer_ticket1162.scala
@@ -1,8 +1,8 @@
 object Test {
-  trait Expression[A,B] 
+  trait Expression[A,B]
 
   case class Lift[A,B,F[_]]() extends Expression[F[A],F[B]]
-  
+
   def simplify[A,B]: Expression[A,B] = Lift[A,B]()
 }
 
diff --git a/test/files/neg/tcpoly_ticket2101.scala b/test/files/neg/tcpoly_ticket2101.scala
index 3af07ac..68f061c 100644
--- a/test/files/neg/tcpoly_ticket2101.scala
+++ b/test/files/neg/tcpoly_ticket2101.scala
@@ -4,10 +4,10 @@ class T2[X] extends T[T2, X] // ill-typed
 // Forall Y. T2[Y] <: T[T2, X]
 
 // debugging before fix:
-// def isSubType0 -->       
+// def isSubType0 -->
 // case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => println("<:<PT: "+((tparams1, res1), (tparams2, res2))) //@MDEBUG
 //   (tparams1.length == tparams2.length &&
-//    List.forall2(tparams1, tparams2) 
+//    List.forall2(tparams1, tparams2)
 //      ((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
 //    res1 <:< res2.substSym(tparams2, tparams1))
 
@@ -22,7 +22,7 @@ class T2[X] extends T[T2, X] // ill-typed
 //   (tparams1.length == tparams2.length &&
 //    {
 //      val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
-//      List.forall2(tparams1, tparams2) 
+//      List.forall2(tparams1, tparams2)
 //         ((p1, p2) => p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
-//       res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)   
+//       res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
 //    })
diff --git a/test/files/neg/tcpoly_typealias.scala b/test/files/neg/tcpoly_typealias.scala
index 96e9349..6c7f80c 100644
--- a/test/files/neg/tcpoly_typealias.scala
+++ b/test/files/neg/tcpoly_typealias.scala
@@ -12,7 +12,7 @@ trait A3 {
 
 trait FooCov[+x]
 trait FooCon[-x]
-trait FooBound[+x <: String] 
+trait FooBound[+x <: String]
 
 trait BOk1 extends A {
   type m[+x] = FooCov[x]
@@ -30,8 +30,8 @@ trait BOk4 extends A3 {
   type m[+x] = FooCov[x] // weaker variance
 }
 
-// there are two aspects to check: 
- // does type alias signature (not considering RHS) correspond to abstract type member in super class 
+// there are two aspects to check:
+ // does type alias signature (not considering RHS) correspond to abstract type member in super class
  // does RHS correspond to the type alias sig
 trait BInv extends A{
   type m[x] = FooCov[x] // error: invariant x in alias def
diff --git a/test/files/neg/tcpoly_variance_enforce.scala b/test/files/neg/tcpoly_variance_enforce.scala
index 0db7b69..ddff0e9 100644
--- a/test/files/neg/tcpoly_variance_enforce.scala
+++ b/test/files/neg/tcpoly_variance_enforce.scala
@@ -6,7 +6,7 @@ trait coll3[m[x]]
 
 trait coll4[m[x <: y], y]
 
-class FooInvar[x]                 
+class FooInvar[x]
 class FooContra[-x]
 class FooCov[+x]
 class FooString[+x <: String]
@@ -15,15 +15,15 @@ object fcollok extends coll[FooCov]
 object fcollinv extends coll[FooInvar]      // error
 object fcollcon extends coll[FooContra]     // error
 object fcollwb extends coll[FooString]      // error
-                                            
+
 object fcoll2ok extends coll2[FooCov]       // error
 object fcoll2inv extends coll2[FooInvar]    // error
-object fcoll2con extends coll2[FooContra]   
+object fcoll2con extends coll2[FooContra]
 object fcoll2wb extends coll2[FooString]      // error
-                                            
-object fcoll3ok extends  coll3[FooCov]      
-object fcoll3inv extends coll3[FooInvar]    
-object fcoll3con extends coll3[FooContra]   
+
+object fcoll3ok extends  coll3[FooCov]
+object fcoll3inv extends coll3[FooInvar]
+object fcoll3con extends coll3[FooContra]
 object fcoll3wb extends  coll3[FooString]   // error
 
 object fcoll4ok extends  coll4[FooString, String]
@@ -33,7 +33,7 @@ object fcoll4_2 extends  coll4[FooString, Any] // error
 
 object test {
   var ok: coll[FooCov] = _
-   
+
   def x: coll[FooInvar] = sys.error("foo") // error
   def y: coll[FooContra] = sys.error("foo") // error
 }
diff --git a/test/files/neg/type-diagnostics.scala b/test/files/neg/type-diagnostics.scala
index de7e7ad..c417132 100644
--- a/test/files/neg/type-diagnostics.scala
+++ b/test/files/neg/type-diagnostics.scala
@@ -6,14 +6,14 @@ object SetVsSet {
 
 object TParamConfusion {
   def strings(xs: List[String]) = xs
-  
+
   def f1[a <% Ordered[a]](x: List[a]) = {
     def f2[b >: List[a] <% Ordered[b]](x: List[a], y: b): Int = {
       def f3(xs: List[a], ys: List[a]) = -1
       y match { case y1: List[a] => f3(x, y1) }
     }
   }
-  
+
   def f2[String](s: String) = strings(List(s))
 }
 
diff --git a/test/files/neg/typeerror.check b/test/files/neg/typeerror.check
index 3ce11da..f117e70 100644
--- a/test/files/neg/typeerror.check
+++ b/test/files/neg/typeerror.check
@@ -3,4 +3,9 @@ typeerror.scala:6: error: type mismatch;
  required: scala.Long
     else add2(x.head, y.head) :: add(x.tail, y.tail)
                 ^
-one error found
+typeerror.scala:6: error: type mismatch;
+ found   : Long(in method add)
+ required: scala.Long
+    else add2(x.head, y.head) :: add(x.tail, y.tail)
+                        ^
+two errors found
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
index 6e811dc..7201908 100644
--- a/test/files/neg/unchecked-abstract.check
+++ b/test/files/neg/unchecked-abstract.check
@@ -1,25 +1,27 @@
-unchecked-abstract.scala:16: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:16: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Contravariant[H]])
                                        ^
-unchecked-abstract.scala:21: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:21: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Contravariant[H]])
                                        ^
-unchecked-abstract.scala:27: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:27: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[T]])
                                        ^
-unchecked-abstract.scala:28: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:28: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[L]])
                                        ^
-unchecked-abstract.scala:31: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:31: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[H]])
                                        ^
-unchecked-abstract.scala:33: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:33: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[L]])
                                        ^
-unchecked-abstract.scala:36: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:36: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[H]])
                                        ^
-unchecked-abstract.scala:37: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:37: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
     /*   warn */ println(x.isInstanceOf[Invariant[T]])
                                        ^
-8 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/unchecked-impossible.check b/test/files/neg/unchecked-impossible.check
index 0ab371d..d150a5a 100644
--- a/test/files/neg/unchecked-impossible.check
+++ b/test/files/neg/unchecked-impossible.check
@@ -1,4 +1,10 @@
-unchecked-impossible.scala:5: error: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A]
+unchecked-impossible.scala:5: warning: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A]
     case Seq(x) =>
             ^
+unchecked-impossible.scala:5: error: pattern type is incompatible with expected type;
+ found   : Seq[A]
+ required: T2[Int,Int]
+    case Seq(x) =>
+            ^
+one warning found
 one error found
diff --git a/test/files/neg/unchecked-knowable.check b/test/files/neg/unchecked-knowable.check
index d279427..327a5f2 100644
--- a/test/files/neg/unchecked-knowable.check
+++ b/test/files/neg/unchecked-knowable.check
@@ -1,7 +1,9 @@
-unchecked-knowable.scala:18: error: fruitless type test: a value of type Bippy cannot also be a A1
+unchecked-knowable.scala:18: warning: fruitless type test: a value of type Bippy cannot also be a A1
   /*   warn */ (new Bippy).isInstanceOf[A1]
                                        ^
-unchecked-knowable.scala:19: error: fruitless type test: a value of type Bippy cannot also be a B1
+unchecked-knowable.scala:19: warning: fruitless type test: a value of type Bippy cannot also be a B1
   /*   warn */ (new Bippy).isInstanceOf[B1]
                                        ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check
index d815174..e85a51f 100644
--- a/test/files/neg/unchecked-refinement.check
+++ b/test/files/neg/unchecked-refinement.check
@@ -1,13 +1,15 @@
-unchecked-refinement.scala:17: error: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure
+unchecked-refinement.scala:17: warning: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Foo[U, U, V] if b       => ()
                          ^
-unchecked-refinement.scala:19: error: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure
+unchecked-refinement.scala:19: warning: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Foo[Any, U, V] if b     => ()
                          ^
-unchecked-refinement.scala:23: error: a pattern match on a refinement type is unchecked
+unchecked-refinement.scala:23: warning: a pattern match on a refinement type is unchecked
     /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy  // this could/should do an instance check and not warn
                                 ^
-unchecked-refinement.scala:24: error: a pattern match on a refinement type is unchecked
+unchecked-refinement.scala:24: warning: a pattern match on a refinement type is unchecked
     /* nowarn - todo */ case x: AnyRef { def size: Int } if b  => x.size   // this could/should do a static conformance test and not warn
                                 ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/unchecked-suppress.check b/test/files/neg/unchecked-suppress.check
index 2e23d21..d3dc860 100644
--- a/test/files/neg/unchecked-suppress.check
+++ b/test/files/neg/unchecked-suppress.check
@@ -1,10 +1,12 @@
-unchecked-suppress.scala:4: error: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:4: warning: non-variable type argument Int in type pattern scala.collection.immutable.Set[Int] (the underlying of Set[Int]) is unchecked since it is eliminated by erasure
     case xs: Set[Int]                              => xs.head   // unchecked
              ^
-unchecked-suppress.scala:5: error: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:5: warning: non-variable type argument String in type pattern scala.collection.immutable.Map[String @unchecked,String] (the underlying of Map[String @unchecked,String]) is unchecked since it is eliminated by erasure
     case xs: Map[String @unchecked, String]        => xs.head   // one unchecked, one okay
              ^
-unchecked-suppress.scala:7: error: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:7: warning: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure
     case f: ((Int, Int) => Int)                    =>           // unchecked
                         ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/unchecked.check b/test/files/neg/unchecked.check
index 2883b71..033cffb 100644
--- a/test/files/neg/unchecked.check
+++ b/test/files/neg/unchecked.check
@@ -1,19 +1,21 @@
-unchecked.scala:18: error: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure
+unchecked.scala:18: warning: non-variable type argument String in type pattern Iterable[String] (the underlying of Iterable[String]) is unchecked since it is eliminated by erasure
     case xs: Iterable[String] => xs.head // unchecked
              ^
-unchecked.scala:22: error: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure
+unchecked.scala:22: warning: non-variable type argument Any in type pattern scala.collection.immutable.Set[Any] (the underlying of Set[Any]) is unchecked since it is eliminated by erasure
     case xs: Set[Any] => xs.head // unchecked
              ^
-unchecked.scala:26: error: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure
+unchecked.scala:26: warning: non-variable type argument Any in type pattern scala.collection.immutable.Map[Any,Any] (the underlying of Map[Any,Any]) is unchecked since it is eliminated by erasure
     case xs: Map[Any, Any] => xs.head // unchecked
              ^
-unchecked.scala:35: error: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure
+unchecked.scala:35: warning: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure
     case xs: Contra[List[Nothing]] => xs.head // unchecked
              ^
-unchecked.scala:50: error: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure
+unchecked.scala:50: warning: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure
     case ArrayApply(x: Exp[Array[T]], _, j: Exp[String]) => x // unchecked
                                             ^
-unchecked.scala:55: error: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure
+unchecked.scala:55: warning: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure
     case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked
                        ^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check
index 68fdfa8..a7b8391 100644
--- a/test/files/neg/unchecked2.check
+++ b/test/files/neg/unchecked2.check
@@ -1,43 +1,45 @@
-unchecked2.scala:4: error: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
+unchecked2.scala:4: warning: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
   /*   warn */ Some(List(1)).isInstanceOf[Option[List[String]]]
                                          ^
-unchecked2.scala:5: error: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
+unchecked2.scala:5: warning: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
   /*   warn */ Some(123).isInstanceOf[Option[Option[_]]]
                                      ^
-unchecked2.scala:6: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
+unchecked2.scala:6: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
   /*   warn */ Some(123).isInstanceOf[Option[String]]
                                      ^
-unchecked2.scala:7: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
+unchecked2.scala:7: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
   /*   warn */ Some(123).isInstanceOf[Option[List[String]]]
                                      ^
-unchecked2.scala:8: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
+unchecked2.scala:8: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
   /*   warn */ Some(123).isInstanceOf[Option[List[Int => String]]]
                                      ^
-unchecked2.scala:9: error: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
+unchecked2.scala:9: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
   /*   warn */ Some(123).isInstanceOf[Option[(String, Double)]]
                                      ^
-unchecked2.scala:10: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
+unchecked2.scala:10: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
   /*   warn */ Some(123).isInstanceOf[Option[String => Double]]
                                      ^
-unchecked2.scala:14: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:14: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(List(1)): Any).isInstanceOf[Option[List[String]]]
                                                 ^
-unchecked2.scala:15: error: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
+unchecked2.scala:15: warning: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[Int]]
                                             ^
-unchecked2.scala:16: error: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
+unchecked2.scala:16: warning: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[String]]
                                             ^
-unchecked2.scala:17: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:17: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[List[String]]]
                                             ^
-unchecked2.scala:18: error: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:18: warning: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[List[Int => String]]]
                                             ^
-unchecked2.scala:19: error: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
+unchecked2.scala:19: warning: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[(String, Double)]]
                                             ^
-unchecked2.scala:20: error: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
+unchecked2.scala:20: warning: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
   /*   warn */ (Some(123): Any).isInstanceOf[Option[String => Double]]
                                             ^
-14 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+14 warnings found
+one error found
diff --git a/test/files/neg/unchecked3.check b/test/files/neg/unchecked3.check
index f4f0c74..0a52605 100644
--- a/test/files/neg/unchecked3.check
+++ b/test/files/neg/unchecked3.check
@@ -1,37 +1,42 @@
-unchecked3.scala:24: error: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure
+unchecked3.scala:24: warning: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure
   /*   warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true }
                                                               ^
-unchecked3.scala:25: error: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure
+unchecked3.scala:25: warning: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure
   /*   warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true }
                                                             ^
-unchecked3.scala:28: error: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure
+unchecked3.scala:28: warning: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure
   /*   warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true }
                                                                    ^
-unchecked3.scala:32: error: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure
+unchecked3.scala:32: warning: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure
   /*   warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true }
                                                               ^
-unchecked3.scala:40: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:40: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[List[String]]         => ()
                          ^
-unchecked3.scala:43: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:43: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[Array[List[String]]]  => ()
                          ^
-unchecked3.scala:50: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:50: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[List[String]]         => ()
                          ^
-unchecked3.scala:53: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:53: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[Array[List[String]]]  => ()
                          ^
-unchecked3.scala:60: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:60: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[List[String]]        => ()
                          ^
-unchecked3.scala:62: error: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:62: warning: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[List[Array[String]]] => ()
                          ^
-unchecked3.scala:63: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:63: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
     /*   warn */ case _: Array[Array[List[String]]] => ()
                          ^
-unchecked3.scala:75: error: abstract type A in type pattern Set[Q.this.A] is unchecked since it is eliminated by erasure
+unchecked3.scala:75: warning: abstract type A in type pattern scala.collection.immutable.Set[Q.this.A] (the underlying of Set[Q.this.A]) is unchecked since it is eliminated by erasure
       /*   warn */ case xs: Set[A]  => xs.head
                             ^
-12 errors found
+unchecked3.scala:62: warning: unreachable code
+    /*   warn */ case _: Array[List[Array[String]]] => ()
+                                                       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+13 warnings found
+one error found
diff --git a/test/files/neg/unicode-unterminated-quote.check b/test/files/neg/unicode-unterminated-quote.check
index 5085505..1664887 100644
--- a/test/files/neg/unicode-unterminated-quote.check
+++ b/test/files/neg/unicode-unterminated-quote.check
@@ -1,7 +1,7 @@
 unicode-unterminated-quote.scala:2: error: unclosed string literal
-  val x = /u0022
+  val x = \u0022
                ^
 unicode-unterminated-quote.scala:2: error: '}' expected but eof found.
-  val x = /u0022
+  val x = \u0022
                 ^
 two errors found
diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check
index ab458a3..f30a506 100644
--- a/test/files/neg/unit-returns-value.check
+++ b/test/files/neg/unit-returns-value.check
@@ -1,7 +1,15 @@
-unit-returns-value.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+unit-returns-value.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
     if (b) return 5
                   ^
-unit-returns-value.scala:4: error: enclosing method f has result type Unit: return value discarded
+unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded
     if (b) return 5
            ^
-two errors found
+unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    i1 // warn
+    ^
+unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    i2 // warn
+    ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/unit-returns-value.scala b/test/files/neg/unit-returns-value.scala
index ecc981f..fc5a370 100644
--- a/test/files/neg/unit-returns-value.scala
+++ b/test/files/neg/unit-returns-value.scala
@@ -3,9 +3,30 @@ object Test {
     var b = false
     if (b) return 5
   }
-  
+
   // no warning
   def g {
     return println("hello")
   }
 }
+
+class UnusedValues {
+  var i1 = 2
+  val i2 = 2
+  lazy val i3 = 2
+  object i4 { }
+  def i5 = 2
+  final def i6 = 2
+
+  def x = {
+    i1 // warn
+    i2 // warn
+    i3 // no warn
+    i4 // no warn
+    i5 // no warn
+    i6 // could warn someday, if i6 returned 2.type instead of Int
+
+    5
+  }
+}
+
diff --git a/test/files/neg/unreachablechar.check b/test/files/neg/unreachablechar.check
index 58ce1a7..a621196 100644
--- a/test/files/neg/unreachablechar.check
+++ b/test/files/neg/unreachablechar.check
@@ -1,4 +1,12 @@
-unreachablechar.scala:5: error: unreachable code
+unreachablechar.scala:4: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
+    case _ => println("stuff");
+         ^
+unreachablechar.scala:5: warning: unreachable code due to variable pattern on line 4
     case 'f' => println("not stuff?");
                        ^
+unreachablechar.scala:5: warning: unreachable code
+    case 'f' => println("not stuff?");
+                       ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
 one error found
diff --git a/test/files/neg/unreachablechar.flags b/test/files/neg/unreachablechar.flags
index 809e9ff..85d8eb2 100644
--- a/test/files/neg/unreachablechar.flags
+++ b/test/files/neg/unreachablechar.flags
@@ -1 +1 @@
- -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/valueclasses-doubledefs.check b/test/files/neg/valueclasses-doubledefs.check
index 556d7a0..ec513ac 100644
--- a/test/files/neg/valueclasses-doubledefs.check
+++ b/test/files/neg/valueclasses-doubledefs.check
@@ -1,6 +1,6 @@
 valueclasses-doubledefs.scala:5: error: double definition:
-method apply:(x: Meter)String and
-method apply:(x: Double)String at line 4
+def apply(x: Double): String at line 4 and
+def apply(x: Meter): String at line 5
 have same type after erasure: (x: Double)String
  def apply(x: Meter) = x.toString
      ^
diff --git a/test/files/neg/valueclasses-pavlov.check b/test/files/neg/valueclasses-pavlov.check
index 031589e..17102a0 100644
--- a/test/files/neg/valueclasses-pavlov.check
+++ b/test/files/neg/valueclasses-pavlov.check
@@ -1,6 +1,6 @@
 valueclasses-pavlov.scala:8: error: double definition:
-method foo:(x: Box2)String and
-method foo:(x: String)String at line 7
+def foo(x: String): String at line 7 and
+def foo(x: Box2): String at line 8
 have same type after erasure: (x: String)String
   def foo(x: Box2) = "foo(Box2): ok"
       ^
diff --git a/test/files/neg/valueclasses.check b/test/files/neg/valueclasses.check
index 3b82a83..35d38aa 100644
--- a/test/files/neg/valueclasses.check
+++ b/test/files/neg/valueclasses.check
@@ -4,40 +4,43 @@ trait T extends AnyVal // fail
 valueclasses.scala:6: error: value class may not be a member of another class
   class Bar(x: Int) extends AnyVal // fail
         ^
+valueclasses.scala:6: error: value class parameter must be a val and not be private[this]
+  class Bar(x: Int) extends AnyVal // fail
+            ^
 valueclasses.scala:8: error: value class may not be a local class
     class Baz(x: Int) extends AnyVal // fail
           ^
-valueclasses.scala:12: error: value class needs to have exactly one public val parameter
+valueclasses.scala:8: error: value class parameter must be a val and not be private[this]
+    class Baz(x: Int) extends AnyVal // fail
+              ^
+valueclasses.scala:12: error: value class needs to have exactly one val parameter
 class V1 extends AnyVal // fail
       ^
-valueclasses.scala:14: error: value class needs to have a publicly accessible val parameter
-class V2(private[test] val x: Int) extends AnyVal // fail
-                           ^
-valueclasses.scala:15: error: value class needs to have a publicly accessible val parameter
-class V3(protected[test] val x: Int) extends AnyVal // fail
-                             ^
-valueclasses.scala:16: error: value class needs to have a publicly accessible val parameter
-class V4(protected val x: Int) extends AnyVal // fail
-                       ^
-valueclasses.scala:17: error: value class needs to have a publicly accessible val parameter
-class V5(private val x: Int) extends AnyVal // fail
-                     ^
-valueclasses.scala:19: error: value class needs to have exactly one public val parameter
+valueclasses.scala:19: error: value class needs to have exactly one val parameter
 class V6(val x: Int, val y: String) extends AnyVal // fail
       ^
-valueclasses.scala:20: error: field definition is not allowed in value class
+valueclasses.scala:20: error: value class needs to have exactly one val parameter
 class V7(val x: Int, private[this] val y: String) extends AnyVal // fail
-                                       ^
-valueclasses.scala:21: error: value class needs to have exactly one public val parameter
-class V8(var x: Int) extends AnyVal // fail
       ^
+valueclasses.scala:21: error: value class parameter must not be a var
+class V8(var x: Int) extends AnyVal // fail
+             ^
 valueclasses.scala:24: error: field definition is not allowed in value class
   val y = x    // fail
       ^
 valueclasses.scala:29: error: type parameter of value class may not be specialized
 class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
                        ^
-valueclasses.scala:31: error: value class needs to have exactly one public val parameter
+valueclasses.scala:31: error: value class parameter must be a val and not be private[this]
 class V13(x: Int) extends AnyVal // fail
+          ^
+valueclasses.scala:33: error: value class parameter must be a val and not be private[this]
+class V14(private[this] val x: Int) extends AnyVal // fail
+                            ^
+valueclasses.scala:34: error: value class parameter must not be protected[this]
+class V15(protected[this] val x: Int) extends AnyVal // fail
+                              ^
+valueclasses.scala:36: error: value class needs to have exactly one val parameter
+class V16()(val a: Any) extends AnyVal // fail, was allowed 2.10.x
       ^
-14 errors found
+15 errors found
diff --git a/test/files/neg/valueclasses.scala b/test/files/neg/valueclasses.scala
index 7cac94a..06fde40 100644
--- a/test/files/neg/valueclasses.scala
+++ b/test/files/neg/valueclasses.scala
@@ -11,10 +11,10 @@ class Foo {
 
 class V1 extends AnyVal // fail
 
-class V2(private[test] val x: Int) extends AnyVal // fail
-class V3(protected[test] val x: Int) extends AnyVal // fail
-class V4(protected val x: Int) extends AnyVal // fail
-class V5(private val x: Int) extends AnyVal // fail
+class V2(private[test] val x: Int) extends AnyVal // okay, wasn't allowed in 2.10.x
+class V3(protected[test] val x: Int) extends AnyVal // okay, wasn't allowed in 2.10.x
+class V4(protected val x: Int) extends AnyVal // okay, wasn't allowed in 2.10.x
+class V5(private val x: Int) extends AnyVal // okay, wasn't allowed in 2.10.x
 
 class V6(val x: Int, val y: String) extends AnyVal // fail
 class V7(val x: Int, private[this] val y: String) extends AnyVal // fail
@@ -29,3 +29,8 @@ class V11[T](val x: List[T]) extends AnyVal // ok
 class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
 
 class V13(x: Int) extends AnyVal // fail
+
+class V14(private[this] val x: Int) extends AnyVal // fail
+class V15(protected[this] val x: Int) extends AnyVal // fail
+
+class V16()(val a: Any) extends AnyVal // fail, was allowed 2.10.x
diff --git a/test/files/neg/varargs.scala b/test/files/neg/varargs.scala
index 657750e..be75e9b 100644
--- a/test/files/neg/varargs.scala
+++ b/test/files/neg/varargs.scala
@@ -7,21 +7,21 @@ import annotation.varargs
 
 // Failing varargs annotation
 object Test {
-  
+
   trait A {
     def v1(a: Int, b: Array[String]) = a
   }
-  
+
   trait B extends A {
     @varargs def v1(a: Int, b: String*) = a + b.length
   }
-  
+
   @varargs def nov(a: Int) = 0
   @varargs def v(a: Int, b: String*) = a + b.length
   @varargs def v2(a: Int, b: String*) = 0
   def v2(a: Int, b: Array[String]) = 0
-  
-  def main(args: Array[String]) {    
+
+  def main(args: Array[String]) {
   }
-  
+
 }
diff --git a/test/files/neg/variances-refinement.check b/test/files/neg/variances-refinement.check
new file mode 100644
index 0000000..2bed3ff
--- /dev/null
+++ b/test/files/neg/variances-refinement.check
@@ -0,0 +1,22 @@
+variances-refinement.scala:17: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A} of method fail1
+  def fail1() = { object O { def f0(x: A): A = ??? } ; O } // fail
+      ^
+variances-refinement.scala:18: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): A} of method fail2
+  def fail2() = { object O { def f0(x: B): A = ??? } ; O } // fail
+      ^
+variances-refinement.scala:19: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): B} of method fail3
+  def fail3() = { object O { def f0(x: B): B = ??? } ; O } // fail
+      ^
+variances-refinement.scala:20: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): C} of method fail4
+  def fail4() = { object O { def f0(x: B): C = ??? } ; O } // fail
+      ^
+variances-refinement.scala:21: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: C): A} of method fail5
+  def fail5() = { object O { def f0(x: C): A = ??? } ; O } // fail
+      ^
+variances-refinement.scala:23: error: contravariant type A occurs in covariant position in type ()O1.type forSome { val O1: AnyRef with O0; type O0 <: AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} } of method fail6
+  def fail6() = { // fail
+      ^
+variances-refinement.scala:32: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} of method fail7
+  def fail7() = { // fail
+      ^
+7 errors found
diff --git a/test/files/neg/variances-refinement.scala b/test/files/neg/variances-refinement.scala
new file mode 100644
index 0000000..6bfd336
--- /dev/null
+++ b/test/files/neg/variances-refinement.scala
@@ -0,0 +1,40 @@
+trait Trait[-A, +B, C] {
+  def ok() = { // ok
+    object O {
+      private def f0(x: A): A = ???
+      def f1(x: A): B = ???
+      def f2(x: A): C = ???
+      private def f3(x: B): A = ???
+      private def f4(x: B): B = ???
+      private def f5(x: B): C = ???
+      private def f6(x: C): A = ???
+      def f7(x: C): B = ???
+      def f8(x: C): C = ???
+    }
+    O
+  }
+
+  def fail1() = { object O { def f0(x: A): A = ??? } ; O } // fail
+  def fail2() = { object O { def f0(x: B): A = ??? } ; O } // fail
+  def fail3() = { object O { def f0(x: B): B = ??? } ; O } // fail
+  def fail4() = { object O { def f0(x: B): C = ??? } ; O } // fail
+  def fail5() = { object O { def f0(x: C): A = ??? } ; O } // fail
+
+  def fail6() = { // fail
+    trait O0 {
+      def f0(x: A): A = ???
+      def f1(x: A): B = ???
+      def f2(x: A): C = ???
+    }
+    object O1 extends O0
+    O1
+  }
+  def fail7() = { // fail
+    trait O0 {
+      def f0(x: A): A = ???
+      def f1(x: A): B = ???
+      def f2(x: A): C = ???
+    }
+    new O0 { }
+  }
+}
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index 0643e53..cb1a60a 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -1,6 +1,9 @@
 variances.scala:4: error: covariant type A occurs in contravariant position in type test.Vector[A] of value x
   def append(x: Vector[A]): Vector[A]
              ^
+variances.scala:75: error: covariant type A occurs in contravariant position in type => A => A of value m
+      val m: A => A
+          ^
 variances.scala:18: error: covariant type A occurs in contravariant position in type A of value a
     private def setA3(a : A) = this.a = a
                       ^
@@ -13,7 +16,10 @@ variances.scala:21: error: covariant type A occurs in invariant position in supe
 variances.scala:74: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x
     val x: T[A] {
         ^
+variances.scala:89: error: covariant type T occurs in invariant position in type T of type A
+    type A = T
+         ^
 variances.scala:90: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
     def foo: B[A]
         ^
-6 errors found
+8 errors found
diff --git a/test/files/neg/variances2.check b/test/files/neg/variances2.check
new file mode 100644
index 0000000..433cc12
--- /dev/null
+++ b/test/files/neg/variances2.check
@@ -0,0 +1,229 @@
+variances2.scala:9: error: covariant type B occurs in contravariant position in type B of value x
+    def f1(x: B): Unit = ()
+           ^
+variances2.scala:12: error: covariant type E occurs in contravariant position in type E of value x
+    def f4(x: E): Unit = ()
+           ^
+variances2.scala:15: error: contravariant type A occurs in covariant position in type ()A of method f6
+    def f6(): A = ???
+        ^
+variances2.scala:18: error: contravariant type D occurs in covariant position in type ()D of method f9
+    def f9(): D = ???
+        ^
+variances2.scala:22: error: contravariant type A occurs in covariant position in type A => A of value f
+    def f12(f: A => A): Unit = ()
+            ^
+variances2.scala:23: error: contravariant type A occurs in covariant position in type A => B of value f
+    def f13(f: A => B): Unit = ()
+            ^
+variances2.scala:24: error: contravariant type A occurs in covariant position in type A => C of value f
+    def f14(f: A => C): Unit = ()
+            ^
+variances2.scala:25: error: contravariant type A occurs in covariant position in type A => D of value f
+    def f15(f: A => D): Unit = ()
+            ^
+variances2.scala:26: error: contravariant type A occurs in covariant position in type A => E of value f
+    def f16(f: A => E): Unit = ()
+            ^
+variances2.scala:27: error: contravariant type A occurs in covariant position in type A => F of value f
+    def f17(f: A => F): Unit = ()
+            ^
+variances2.scala:29: error: covariant type B occurs in contravariant position in type B => B of value f
+    def f19(f: B => B): Unit = ()
+            ^
+variances2.scala:32: error: covariant type E occurs in contravariant position in type B => E of value f
+    def f22(f: B => E): Unit = ()
+            ^
+variances2.scala:35: error: covariant type B occurs in contravariant position in type C => B of value f
+    def f25(f: C => B): Unit = ()
+            ^
+variances2.scala:38: error: covariant type E occurs in contravariant position in type C => E of value f
+    def f28(f: C => E): Unit = ()
+            ^
+variances2.scala:40: error: contravariant type D occurs in covariant position in type D => A of value f
+    def f30(f: D => A): Unit = ()
+            ^
+variances2.scala:41: error: contravariant type D occurs in covariant position in type D => B of value f
+    def f31(f: D => B): Unit = ()
+            ^
+variances2.scala:42: error: contravariant type D occurs in covariant position in type D => C of value f
+    def f32(f: D => C): Unit = ()
+            ^
+variances2.scala:43: error: contravariant type D occurs in covariant position in type D => D of value f
+    def f33(f: D => D): Unit = ()
+            ^
+variances2.scala:44: error: contravariant type D occurs in covariant position in type D => E of value f
+    def f34(f: D => E): Unit = ()
+            ^
+variances2.scala:45: error: contravariant type D occurs in covariant position in type D => F of value f
+    def f35(f: D => F): Unit = ()
+            ^
+variances2.scala:47: error: covariant type B occurs in contravariant position in type E => B of value f
+    def f37(f: E => B): Unit = ()
+            ^
+variances2.scala:50: error: covariant type E occurs in contravariant position in type E => E of value f
+    def f40(f: E => E): Unit = ()
+            ^
+variances2.scala:53: error: covariant type B occurs in contravariant position in type F => B of value f
+    def f43(f: F => B): Unit = ()
+            ^
+variances2.scala:56: error: covariant type E occurs in contravariant position in type F => E of value f
+    def f46(f: F => E): Unit = ()
+            ^
+variances2.scala:59: error: contravariant type A occurs in covariant position in type ()A => A of method f48
+    def f48(): A => A = null
+        ^
+variances2.scala:62: error: contravariant type D occurs in covariant position in type ()A => D of method f51
+    def f51(): A => D = null
+        ^
+variances2.scala:65: error: covariant type B occurs in contravariant position in type ()B => A of method f54
+    def f54(): B => A = null
+        ^
+variances2.scala:66: error: covariant type B occurs in contravariant position in type ()B => B of method f55
+    def f55(): B => B = null
+        ^
+variances2.scala:67: error: covariant type B occurs in contravariant position in type ()B => C of method f56
+    def f56(): B => C = null
+        ^
+variances2.scala:68: error: covariant type B occurs in contravariant position in type ()B => D of method f57
+    def f57(): B => D = null
+        ^
+variances2.scala:69: error: covariant type B occurs in contravariant position in type ()B => E of method f58
+    def f58(): B => E = null
+        ^
+variances2.scala:70: error: covariant type B occurs in contravariant position in type ()B => F of method f59
+    def f59(): B => F = null
+        ^
+variances2.scala:71: error: contravariant type A occurs in covariant position in type ()C => A of method f60
+    def f60(): C => A = null
+        ^
+variances2.scala:74: error: contravariant type D occurs in covariant position in type ()C => D of method f63
+    def f63(): C => D = null
+        ^
+variances2.scala:77: error: contravariant type A occurs in covariant position in type ()D => A of method f66
+    def f66(): D => A = null
+        ^
+variances2.scala:80: error: contravariant type D occurs in covariant position in type ()D => D of method f69
+    def f69(): D => D = null
+        ^
+variances2.scala:83: error: covariant type E occurs in contravariant position in type ()E => A of method f72
+    def f72(): E => A = null
+        ^
+variances2.scala:84: error: covariant type E occurs in contravariant position in type ()E => B of method f73
+    def f73(): E => B = null
+        ^
+variances2.scala:85: error: covariant type E occurs in contravariant position in type ()E => C of method f74
+    def f74(): E => C = null
+        ^
+variances2.scala:86: error: covariant type E occurs in contravariant position in type ()E => D of method f75
+    def f75(): E => D = null
+        ^
+variances2.scala:87: error: covariant type E occurs in contravariant position in type ()E => E of method f76
+    def f76(): E => E = null
+        ^
+variances2.scala:88: error: covariant type E occurs in contravariant position in type ()E => F of method f77
+    def f77(): E => F = null
+        ^
+variances2.scala:89: error: contravariant type A occurs in covariant position in type ()F => A of method f78
+    def f78(): F => A = null
+        ^
+variances2.scala:92: error: contravariant type D occurs in covariant position in type ()F => D of method f81
+    def f81(): F => D = null
+        ^
+variances2.scala:96: error: contravariant type A occurs in covariant position in type (x: A)A of method f84
+    def f84(x: A): A = ???
+        ^
+variances2.scala:99: error: contravariant type D occurs in covariant position in type (x: A)D of method f87
+    def f87(x: A): D = ???
+        ^
+variances2.scala:102: error: contravariant type A occurs in covariant position in type (x: B)A of method f90
+    def f90(x: B): A = ???
+        ^
+variances2.scala:102: error: covariant type B occurs in contravariant position in type B of value x
+    def f90(x: B): A = ???
+            ^
+variances2.scala:103: error: covariant type B occurs in contravariant position in type B of value x
+    def f91(x: B): B = ???
+            ^
+variances2.scala:104: error: covariant type B occurs in contravariant position in type B of value x
+    def f92(x: B): C = ???
+            ^
+variances2.scala:105: error: contravariant type D occurs in covariant position in type (x: B)D of method f93
+    def f93(x: B): D = ???
+        ^
+variances2.scala:105: error: covariant type B occurs in contravariant position in type B of value x
+    def f93(x: B): D = ???
+            ^
+variances2.scala:106: error: covariant type B occurs in contravariant position in type B of value x
+    def f94(x: B): E = ???
+            ^
+variances2.scala:107: error: covariant type B occurs in contravariant position in type B of value x
+    def f95(x: B): F = ???
+            ^
+variances2.scala:108: error: contravariant type A occurs in covariant position in type (x: C)A of method f96
+    def f96(x: C): A = ???
+        ^
+variances2.scala:111: error: contravariant type D occurs in covariant position in type (x: C)D of method f99
+    def f99(x: C): D = ???
+        ^
+variances2.scala:114: error: contravariant type A occurs in covariant position in type (x: D)A of method f102
+    def f102(x: D): A = ???
+        ^
+variances2.scala:117: error: contravariant type D occurs in covariant position in type (x: D)D of method f105
+    def f105(x: D): D = ???
+        ^
+variances2.scala:120: error: contravariant type A occurs in covariant position in type (x: E)A of method f108
+    def f108(x: E): A = ???
+        ^
+variances2.scala:120: error: covariant type E occurs in contravariant position in type E of value x
+    def f108(x: E): A = ???
+             ^
+variances2.scala:121: error: covariant type E occurs in contravariant position in type E of value x
+    def f109(x: E): B = ???
+             ^
+variances2.scala:122: error: covariant type E occurs in contravariant position in type E of value x
+    def f110(x: E): C = ???
+             ^
+variances2.scala:123: error: contravariant type D occurs in covariant position in type (x: E)D of method f111
+    def f111(x: E): D = ???
+        ^
+variances2.scala:123: error: covariant type E occurs in contravariant position in type E of value x
+    def f111(x: E): D = ???
+             ^
+variances2.scala:124: error: covariant type E occurs in contravariant position in type E of value x
+    def f112(x: E): E = ???
+             ^
+variances2.scala:125: error: covariant type E occurs in contravariant position in type E of value x
+    def f113(x: E): F = ???
+             ^
+variances2.scala:126: error: contravariant type A occurs in covariant position in type (x: F)A of method f114
+    def f114(x: F): A = ???
+        ^
+variances2.scala:129: error: contravariant type D occurs in covariant position in type (x: F)D of method f117
+    def f117(x: F): D = ???
+        ^
+variances2.scala:133: error: contravariant type A occurs in covariant position in supertype Cov[A] of object O1
+    object O1 extends Cov[A]
+           ^
+variances2.scala:136: error: contravariant type D occurs in covariant position in supertype Cov[D] of object O4
+    object O4 extends Cov[D]
+           ^
+variances2.scala:140: error: covariant type B occurs in contravariant position in supertype Con[B] of object O8
+    object O8 extends Con[B]
+           ^
+variances2.scala:143: error: covariant type E occurs in contravariant position in supertype Con[E] of object O11
+    object O11 extends Con[E]
+           ^
+variances2.scala:145: error: contravariant type A occurs in invariant position in supertype Inv[A] of object O13
+    object O13 extends Inv[A]
+           ^
+variances2.scala:146: error: covariant type B occurs in invariant position in supertype Inv[B] of object O14
+    object O14 extends Inv[B]
+           ^
+variances2.scala:148: error: contravariant type D occurs in invariant position in supertype Inv[D] of object O16
+    object O16 extends Inv[D]
+           ^
+variances2.scala:149: error: covariant type E occurs in invariant position in supertype Inv[E] of object O17
+    object O17 extends Inv[E]
+           ^
+76 errors found
diff --git a/test/files/neg/variances2.scala b/test/files/neg/variances2.scala
new file mode 100644
index 0000000..d30345d
--- /dev/null
+++ b/test/files/neg/variances2.scala
@@ -0,0 +1,303 @@
+trait Cov[+A]
+trait Con[-A]
+trait Inv[A]
+
+trait Trait[-A, +B, C] {
+  // trait Inner[-D <: C, +E >: C, F] {
+  trait Inner[-D <: C, +E >: C, F] {
+    def f0(x: A): Unit = ()
+    def f1(x: B): Unit = ()
+    def f2(x: C): Unit = ()
+    def f3(x: D): Unit = ()
+    def f4(x: E): Unit = ()
+    def f5(x: F): Unit = ()
+
+    def f6(): A = ???
+    def f7(): B = ???
+    def f8(): C = ???
+    def f9(): D = ???
+    def f10(): E = ???
+    def f11(): F = ???
+
+    def f12(f: A => A): Unit = ()
+    def f13(f: A => B): Unit = ()
+    def f14(f: A => C): Unit = ()
+    def f15(f: A => D): Unit = ()
+    def f16(f: A => E): Unit = ()
+    def f17(f: A => F): Unit = ()
+    def f18(f: B => A): Unit = ()
+    def f19(f: B => B): Unit = ()
+    def f20(f: B => C): Unit = ()
+    def f21(f: B => D): Unit = ()
+    def f22(f: B => E): Unit = ()
+    def f23(f: B => F): Unit = ()
+    def f24(f: C => A): Unit = ()
+    def f25(f: C => B): Unit = ()
+    def f26(f: C => C): Unit = ()
+    def f27(f: C => D): Unit = ()
+    def f28(f: C => E): Unit = ()
+    def f29(f: C => F): Unit = ()
+    def f30(f: D => A): Unit = ()
+    def f31(f: D => B): Unit = ()
+    def f32(f: D => C): Unit = ()
+    def f33(f: D => D): Unit = ()
+    def f34(f: D => E): Unit = ()
+    def f35(f: D => F): Unit = ()
+    def f36(f: E => A): Unit = ()
+    def f37(f: E => B): Unit = ()
+    def f38(f: E => C): Unit = ()
+    def f39(f: E => D): Unit = ()
+    def f40(f: E => E): Unit = ()
+    def f41(f: E => F): Unit = ()
+    def f42(f: F => A): Unit = ()
+    def f43(f: F => B): Unit = ()
+    def f44(f: F => C): Unit = ()
+    def f45(f: F => D): Unit = ()
+    def f46(f: F => E): Unit = ()
+    def f47(f: F => F): Unit = ()
+
+    def f48(): A => A = null
+    def f49(): A => B = null
+    def f50(): A => C = null
+    def f51(): A => D = null
+    def f52(): A => E = null
+    def f53(): A => F = null
+    def f54(): B => A = null
+    def f55(): B => B = null
+    def f56(): B => C = null
+    def f57(): B => D = null
+    def f58(): B => E = null
+    def f59(): B => F = null
+    def f60(): C => A = null
+    def f61(): C => B = null
+    def f62(): C => C = null
+    def f63(): C => D = null
+    def f64(): C => E = null
+    def f65(): C => F = null
+    def f66(): D => A = null
+    def f67(): D => B = null
+    def f68(): D => C = null
+    def f69(): D => D = null
+    def f70(): D => E = null
+    def f71(): D => F = null
+    def f72(): E => A = null
+    def f73(): E => B = null
+    def f74(): E => C = null
+    def f75(): E => D = null
+    def f76(): E => E = null
+    def f77(): E => F = null
+    def f78(): F => A = null
+    def f79(): F => B = null
+    def f80(): F => C = null
+    def f81(): F => D = null
+    def f82(): F => E = null
+    def f83(): F => F = null
+
+    def f84(x: A): A = ???
+    def f85(x: A): B = ???
+    def f86(x: A): C = ???
+    def f87(x: A): D = ???
+    def f88(x: A): E = ???
+    def f89(x: A): F = ???
+    def f90(x: B): A = ???
+    def f91(x: B): B = ???
+    def f92(x: B): C = ???
+    def f93(x: B): D = ???
+    def f94(x: B): E = ???
+    def f95(x: B): F = ???
+    def f96(x: C): A = ???
+    def f97(x: C): B = ???
+    def f98(x: C): C = ???
+    def f99(x: C): D = ???
+    def f100(x: C): E = ???
+    def f101(x: C): F = ???
+    def f102(x: D): A = ???
+    def f103(x: D): B = ???
+    def f104(x: D): C = ???
+    def f105(x: D): D = ???
+    def f106(x: D): E = ???
+    def f107(x: D): F = ???
+    def f108(x: E): A = ???
+    def f109(x: E): B = ???
+    def f110(x: E): C = ???
+    def f111(x: E): D = ???
+    def f112(x: E): E = ???
+    def f113(x: E): F = ???
+    def f114(x: F): A = ???
+    def f115(x: F): B = ???
+    def f116(x: F): C = ???
+    def f117(x: F): D = ???
+    def f118(x: F): E = ???
+    def f119(x: F): F = ???
+
+    object O1 extends Cov[A]
+    object O2 extends Cov[B]
+    object O3 extends Cov[C]
+    object O4 extends Cov[D]
+    object O5 extends Cov[E]
+    object O6 extends Cov[F]
+    object O7 extends Con[A]
+    object O8 extends Con[B]
+    object O9 extends Con[C]
+    object O10 extends Con[D]
+    object O11 extends Con[E]
+    object O12 extends Con[F]
+    object O13 extends Inv[A]
+    object O14 extends Inv[B]
+    object O15 extends Inv[C]
+    object O16 extends Inv[D]
+    object O17 extends Inv[E]
+    object O18 extends Inv[F]
+  }
+}
+
+trait Trait2[-A, +B, C] {
+  // trait Inner[-D <: C, +E >: C, F] {
+  def method[D <: A, E >: B, F]() {
+    def f0(x: A): Unit = ()
+    def f1(x: B): Unit = ()
+    def f2(x: C): Unit = ()
+    def f3(x: D): Unit = ()
+    def f4(x: E): Unit = ()
+    def f5(x: F): Unit = ()
+
+    def f6(): A = ???
+    def f7(): B = ???
+    def f8(): C = ???
+    def f9(): D = ???
+    def f10(): E = ???
+    def f11(): F = ???
+
+    def f12(f: A => A): Unit = ()
+    def f13(f: A => B): Unit = ()
+    def f14(f: A => C): Unit = ()
+    def f15(f: A => D): Unit = ()
+    def f16(f: A => E): Unit = ()
+    def f17(f: A => F): Unit = ()
+    def f18(f: B => A): Unit = ()
+    def f19(f: B => B): Unit = ()
+    def f20(f: B => C): Unit = ()
+    def f21(f: B => D): Unit = ()
+    def f22(f: B => E): Unit = ()
+    def f23(f: B => F): Unit = ()
+    def f24(f: C => A): Unit = ()
+    def f25(f: C => B): Unit = ()
+    def f26(f: C => C): Unit = ()
+    def f27(f: C => D): Unit = ()
+    def f28(f: C => E): Unit = ()
+    def f29(f: C => F): Unit = ()
+    def f30(f: D => A): Unit = ()
+    def f31(f: D => B): Unit = ()
+    def f32(f: D => C): Unit = ()
+    def f33(f: D => D): Unit = ()
+    def f34(f: D => E): Unit = ()
+    def f35(f: D => F): Unit = ()
+    def f36(f: E => A): Unit = ()
+    def f37(f: E => B): Unit = ()
+    def f38(f: E => C): Unit = ()
+    def f39(f: E => D): Unit = ()
+    def f40(f: E => E): Unit = ()
+    def f41(f: E => F): Unit = ()
+    def f42(f: F => A): Unit = ()
+    def f43(f: F => B): Unit = ()
+    def f44(f: F => C): Unit = ()
+    def f45(f: F => D): Unit = ()
+    def f46(f: F => E): Unit = ()
+    def f47(f: F => F): Unit = ()
+
+    def f48(): A => A = null
+    def f49(): A => B = null
+    def f50(): A => C = null
+    def f51(): A => D = null
+    def f52(): A => E = null
+    def f53(): A => F = null
+    def f54(): B => A = null
+    def f55(): B => B = null
+    def f56(): B => C = null
+    def f57(): B => D = null
+    def f58(): B => E = null
+    def f59(): B => F = null
+    def f60(): C => A = null
+    def f61(): C => B = null
+    def f62(): C => C = null
+    def f63(): C => D = null
+    def f64(): C => E = null
+    def f65(): C => F = null
+    def f66(): D => A = null
+    def f67(): D => B = null
+    def f68(): D => C = null
+    def f69(): D => D = null
+    def f70(): D => E = null
+    def f71(): D => F = null
+    def f72(): E => A = null
+    def f73(): E => B = null
+    def f74(): E => C = null
+    def f75(): E => D = null
+    def f76(): E => E = null
+    def f77(): E => F = null
+    def f78(): F => A = null
+    def f79(): F => B = null
+    def f80(): F => C = null
+    def f81(): F => D = null
+    def f82(): F => E = null
+    def f83(): F => F = null
+
+    def f84(x: A): A = ???
+    def f85(x: A): B = ???
+    def f86(x: A): C = ???
+    def f87(x: A): D = ???
+    def f88(x: A): E = ???
+    def f89(x: A): F = ???
+    def f90(x: B): A = ???
+    def f91(x: B): B = ???
+    def f92(x: B): C = ???
+    def f93(x: B): D = ???
+    def f94(x: B): E = ???
+    def f95(x: B): F = ???
+    def f96(x: C): A = ???
+    def f97(x: C): B = ???
+    def f98(x: C): C = ???
+    def f99(x: C): D = ???
+    def f100(x: C): E = ???
+    def f101(x: C): F = ???
+    def f102(x: D): A = ???
+    def f103(x: D): B = ???
+    def f104(x: D): C = ???
+    def f105(x: D): D = ???
+    def f106(x: D): E = ???
+    def f107(x: D): F = ???
+    def f108(x: E): A = ???
+    def f109(x: E): B = ???
+    def f110(x: E): C = ???
+    def f111(x: E): D = ???
+    def f112(x: E): E = ???
+    def f113(x: E): F = ???
+    def f114(x: F): A = ???
+    def f115(x: F): B = ???
+    def f116(x: F): C = ???
+    def f117(x: F): D = ???
+    def f118(x: F): E = ???
+    def f119(x: F): F = ???
+
+    object O1 extends Cov[A]
+    object O2 extends Cov[B]
+    object O3 extends Cov[C]
+    object O4 extends Cov[D]
+    object O5 extends Cov[E]
+    object O6 extends Cov[F]
+    object O7 extends Con[A]
+    object O8 extends Con[B]
+    object O9 extends Con[C]
+    object O10 extends Con[D]
+    object O11 extends Con[E]
+    object O12 extends Con[F]
+    object O13 extends Inv[A]
+    object O14 extends Inv[B]
+    object O15 extends Inv[C]
+    object O16 extends Inv[D]
+    object O17 extends Inv[E]
+    object O18 extends Inv[F]
+
+    ()
+  }
+}
diff --git a/test/files/neg/viewtest.scala b/test/files/neg/viewtest.scala
index ddb7fa4..5e7d624 100644
--- a/test/files/neg/viewtest.scala
+++ b/test/files/neg/viewtest.scala
@@ -37,7 +37,7 @@ object O {
     }
   }
 
-  implicit def view3[a <% Ordered[a]](x: List[a]): Ordered[List[a]] = 
+  implicit def view3[a <% Ordered[a]](x: List[a]): Ordered[List[a]] =
     new Ordered[List[a]] {
       def compareTo [b >: List[a] <% Ordered[b]](y: b): Int = y match {
         case y1: List[a] => compareLists(x, y1)
@@ -72,7 +72,7 @@ class Node[a <% Ordered[a]](elem: a, l: Tree[a], r: Tree[a]) extends Tree[a] {
     if (x == elem) this
     else if (x < elem) new Node(elem, l insert x, r)
     else new Node(elem, l, r insert x)
-  def elements: List[a] = 
+  def elements: List[a] =
     l.elements ::: List(elem) ::: r.elements
 }
 
@@ -86,7 +86,7 @@ case class Str(elem: String) extends Ordered[Str] {
 object Test {
   import O._
 
-  private def toCharList(s: String): List[Char] = 
+  private def toCharList(s: String): List[Char] =
     if (s.length() == 0) List()
     else s.charAt(0) :: toCharList(s.substring(1))
 
diff --git a/test/files/neg/virtpatmat_reach_null.check b/test/files/neg/virtpatmat_reach_null.check
index 595c8ec..e0c36c8 100644
--- a/test/files/neg/virtpatmat_reach_null.check
+++ b/test/files/neg/virtpatmat_reach_null.check
@@ -1,4 +1,6 @@
-virtpatmat_reach_null.scala:13: error: unreachable code
+virtpatmat_reach_null.scala:13: warning: unreachable code
       case _                              =>  // unreachable
                                           ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.check b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
index 10638ef..064a12b 100644
--- a/test/files/neg/virtpatmat_reach_sealed_unsealed.check
+++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
@@ -1,14 +1,16 @@
-virtpatmat_reach_sealed_unsealed.scala:16: error: match may not be exhaustive.
+virtpatmat_reach_sealed_unsealed.scala:16: warning: match may not be exhaustive.
 It would fail on the following input: false
   (true: Boolean) match { case true => } // not exhaustive, but reachable
        ^
-virtpatmat_reach_sealed_unsealed.scala:18: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:18: warning: unreachable code
   (true: Boolean) match { case true => case false =>  case _ => } // exhaustive, last case is unreachable
                                                              ^
-virtpatmat_reach_sealed_unsealed.scala:19: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code
   (true: Boolean) match { case true => case false =>  case _: Boolean => } // exhaustive, last case is unreachable
                                                                       ^
-virtpatmat_reach_sealed_unsealed.scala:20: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:20: warning: unreachable code
   (true: Boolean) match { case true => case false =>  case _: Any => } // exhaustive, last case is unreachable
                                                                   ^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/virtpatmat_unreach_select.check b/test/files/neg/virtpatmat_unreach_select.check
index 3771971..4fc78cd 100644
--- a/test/files/neg/virtpatmat_unreach_select.check
+++ b/test/files/neg/virtpatmat_unreach_select.check
@@ -1,4 +1,6 @@
-virtpatmat_unreach_select.scala:10: error: unreachable code
+virtpatmat_unreach_select.scala:10: warning: unreachable code
     case WARNING.id => // unreachable
                     ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
 one error found
diff --git a/test/files/neg/volatile_no_override.check b/test/files/neg/volatile_no_override.check
new file mode 100644
index 0000000..a9a60ab
--- /dev/null
+++ b/test/files/neg/volatile_no_override.check
@@ -0,0 +1,5 @@
+volatile_no_override.scala:13: error: overriding value x in class A of type Volatile.this.D;
+ value x has a volatile type; cannot override a member with non-volatile type
+  val x: A with D = null
+      ^
+one error found
diff --git a/test/files/neg/volatile_no_override.scala b/test/files/neg/volatile_no_override.scala
new file mode 100644
index 0000000..9fad082
--- /dev/null
+++ b/test/files/neg/volatile_no_override.scala
@@ -0,0 +1,14 @@
+class B
+class C(x: String) extends B
+
+abstract class A {
+  class D { type T >: C <: B }
+  val x: D
+  var y: x.T = new C("abc")
+}
+
+class Volatile extends A {
+  type A >: Null
+  // test (1.4), pt 2 in RefChecks
+  val x: A with D = null
+}
diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check
new file mode 100644
index 0000000..4628033
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.check
@@ -0,0 +1,12 @@
+warn-inferred-any.scala:8: warning: a type was inferred to be `Any`; this may indicate a programming error.
+  { List(1, 2, 3) contains "a" }  // only this warns
+                  ^
+warn-inferred-any.scala:16: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
+  { 1l to 5l contains 5 }
+             ^
+warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
+  { 1l to 5l contains 5d }
+             ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/warn-inferred-any.flags b/test/files/neg/warn-inferred-any.flags
new file mode 100644
index 0000000..a3127d3
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-infer-any
diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala
new file mode 100644
index 0000000..b853e6e
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.scala
@@ -0,0 +1,19 @@
+trait Foo[-A <: AnyRef, +B <: AnyRef] {
+  def run[U](x: A)(action: B => U): Boolean = ???
+
+  { run(_: A)(_: B => String) }
+}
+
+trait Xs[+A] {
+  { List(1, 2, 3) contains "a" }  // only this warns
+  { List(1, 2, 3) contains 1 }
+  { identity(List(1, 2, 3) contains 1) }
+  { List("a") foreach println }
+}
+
+trait Ys[+A] {
+  { 1 to 5 contains 5l }
+  { 1l to 5l contains 5 }
+  { 1l to 5l contains 5d }
+  { 1l to 5l contains 5l }
+}
diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check
new file mode 100644
index 0000000..36c6dd0
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.check
@@ -0,0 +1,33 @@
+warn-unused-imports.scala:57: warning: Unused import
+    import p1.A // warn
+              ^
+warn-unused-imports.scala:62: warning: Unused import
+    import p1.{ A, B } // warn on A
+                ^
+warn-unused-imports.scala:67: warning: Unused import
+    import p1.{ A, B } // warn on both
+                ^
+warn-unused-imports.scala:67: warning: Unused import
+    import p1.{ A, B } // warn on both
+                   ^
+warn-unused-imports.scala:73: warning: Unused import
+    import c._  // warn
+             ^
+warn-unused-imports.scala:78: warning: Unused import
+    import p1._ // warn
+              ^
+warn-unused-imports.scala:85: warning: Unused import
+    import c._  // warn
+             ^
+warn-unused-imports.scala:91: warning: Unused import
+    import p1.c._  // warn
+                ^
+warn-unused-imports.scala:98: warning: Unused import
+    import p1._   // warn
+              ^
+warn-unused-imports.scala:118: warning: Unused import
+    import p1.A   // warn
+              ^
+error: No warnings can be incurred under -Xfatal-warnings.
+10 warnings found
+one error found
diff --git a/test/files/neg/warn-unused-imports.flags b/test/files/neg/warn-unused-imports.flags
new file mode 100644
index 0000000..24db705
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-unused-import
diff --git a/test/files/neg/warn-unused-imports.scala b/test/files/neg/warn-unused-imports.scala
new file mode 100644
index 0000000..b7a2f1c
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.scala
@@ -0,0 +1,125 @@
+class Bippo {
+  def length: Int = 123
+  class Tree
+}
+
+package object p1 {
+  class A
+  implicit class B(val s: String) { def bippy = s }
+  val c: Bippo = new Bippo
+  type D = String
+}
+package object p2 {
+  class A
+  implicit class B(val s: String) { def bippy = s }
+  val c: Bippo = new Bippo
+  type D = Int
+}
+
+trait NoWarn {
+  {
+    import p1._ // no warn
+    println("abc".bippy)
+  }
+
+  {
+    import p1._ // no warn
+    println(new A)
+  }
+
+  {
+    import p1.B // no warn
+    println("abc".bippy)
+  }
+
+  {
+    import p1._ // no warn
+    import c._  // no warn
+    println(length)
+  }
+
+  {
+    import p1._ // no warn
+    import c._  // no warn
+    val x: Tree = null
+    println(x)
+  }
+
+  {
+    import p1.D // no warn
+    val x: D = null
+    println(x)
+  }
+}
+
+trait Warn {
+  {
+    import p1.A // warn
+    println(123)
+  }
+
+  {
+    import p1.{ A, B } // warn on A
+    println("abc".bippy)
+  }
+
+  {
+    import p1.{ A, B } // warn on both
+    println(123)
+  }
+
+  {
+    import p1._ // no warn (technically this could warn, but not worth the effort to unroll unusedness transitively)
+    import c._  // warn
+    println(123)
+  }
+
+  {
+    import p1._ // warn
+    println(123)
+  }
+
+  {
+    class Tree
+    import p1._ // no warn
+    import c._  // warn
+    val x: Tree = null
+    println(x)
+  }
+
+  {
+    import p1.c._  // warn
+    println(123)
+  }
+}
+
+trait Nested {
+  {
+    import p1._   // warn
+    trait Warn {  // warn about unused local trait for good measure
+      import p2._
+      println(new A)
+      println("abc".bippy)
+    }
+    println("")
+  }
+
+  {
+    import p1._   // no warn
+    trait NoWarn {
+      import p2.B  // no warn
+      println("abc".bippy)
+      println(new A)
+    }
+    println(new NoWarn { })
+  }
+
+  {
+    import p1.A   // warn
+    trait Warn {
+      import p2.A
+      println(new A)
+    }
+    println(new Warn { })
+  }
+}
diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check
new file mode 100644
index 0000000..d012869
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.check
@@ -0,0 +1,66 @@
+warn-unused-privates.scala:2: warning: private constructor in class Bippy is never used
+  private def this(c: Int) = this(c, c)           // warn
+              ^
+warn-unused-privates.scala:4: warning: private method in class Bippy is never used
+  private def boop(x: Int)            = x+a+b     // warn
+              ^
+warn-unused-privates.scala:6: warning: private val in class Bippy is never used
+  final private val MILLIS2: Int      = 1000      // warn
+                    ^
+warn-unused-privates.scala:13: warning: private val in object Bippy is never used
+  private val HEY_INSTANCE: Int = 1000    // warn
+              ^
+warn-unused-privates.scala:35: warning: private val in class Boppy is never used
+  private val hummer = "def" // warn
+              ^
+warn-unused-privates.scala:42: warning: private var in trait Accessors is never used
+  private var v1: Int = 0 // warn
+              ^
+warn-unused-privates.scala:42: warning: private setter in trait Accessors is never used
+  private var v1: Int = 0 // warn
+              ^
+warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used
+  private var v2: Int = 0 // warn, never set
+              ^
+warn-unused-privates.scala:44: warning: private var in trait Accessors is never used
+  private var v3: Int = 0 // warn, never got
+              ^
+warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+  private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+                             ^
+warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+  private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+                                           ^
+warn-unused-privates.scala:67: warning: local var in method f0 is never used
+    var x = 1 // warn
+        ^
+warn-unused-privates.scala:74: warning: local val in method f1 is never used
+    val b = new Outer // warn
+        ^
+warn-unused-privates.scala:84: warning: private object in object Types is never used
+  private object Dongo { def f = this } // warn
+                 ^
+warn-unused-privates.scala:94: warning: local object in method l1 is never used
+    object HiObject { def f = this } // warn
+           ^
+warn-unused-privates.scala:78: warning: local var x in method f2 is never set - it could be a val
+    var x = 100 // warn about it being a var
+        ^
+warn-unused-privates.scala:85: warning: private class Bar1 in object Types is never used
+  private class Bar1 // warn
+                ^
+warn-unused-privates.scala:87: warning: private type Alias1 in object Types is never used
+  private type Alias1 = String // warn
+               ^
+warn-unused-privates.scala:95: warning: local class Hi is never used
+    class Hi { // warn
+          ^
+warn-unused-privates.scala:99: warning: local class DingDongDoobie is never used
+    class DingDongDoobie // warn
+          ^
+warn-unused-privates.scala:102: warning: local type OtherThing is never used
+    type OtherThing = String // warn
+         ^
+error: No warnings can be incurred under -Xfatal-warnings.
+21 warnings found
+one error found
diff --git a/test/files/neg/warn-unused-privates.flags b/test/files/neg/warn-unused-privates.flags
new file mode 100644
index 0000000..25474ae
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.flags
@@ -0,0 +1 @@
+-Ywarn-unused -Xfatal-warnings
diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala
new file mode 100644
index 0000000..cb6e946
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.scala
@@ -0,0 +1,105 @@
+class Bippy(a: Int, b: Int) {
+  private def this(c: Int) = this(c, c)           // warn
+  private def bippy(x: Int): Int      = bippy(x)  // TODO: could warn
+  private def boop(x: Int)            = x+a+b     // warn
+  final private val MILLIS1           = 2000      // no warn, might have been inlined
+  final private val MILLIS2: Int      = 1000      // warn
+  final private val HI_COMPANION: Int = 500       // no warn, accessed from companion
+  def hi() = Bippy.HI_INSTANCE
+}
+object Bippy {
+  def hi(x: Bippy) = x.HI_COMPANION
+  private val HI_INSTANCE: Int = 500      // no warn, accessed from instance
+  private val HEY_INSTANCE: Int = 1000    // warn
+}
+
+class A(val msg: String)
+class B1(msg: String) extends A(msg)
+class B2(msg0: String) extends A(msg0)
+class B3(msg0: String) extends A("msg")
+
+/*** Early defs warnings disabled primarily due to SI-6595.
+ *   The test case is here to assure we aren't issuing false positives;
+ *   the ones labeled "warn" don't warn.
+ ***/
+class Boppy extends {
+  private val hmm: String = "abc"       // no warn, used in early defs
+  private val hom: String = "def"       // no warn, used in body
+  private final val him   = "ghi"       // no warn, might have been (was) inlined
+  final val him2          = "ghi"       // no warn, same
+  final val himinline     = him
+  private val hum: String = "jkl"       // warn
+  final val ding = hmm.length
+} with Mutable {
+  val dinger = hom
+  private val hummer = "def" // warn
+
+  private final val bum   = "ghi"       // no warn, might have been (was) inlined
+  final val bum2          = "ghi"       // no warn, same
+}
+
+trait Accessors {
+  private var v1: Int = 0 // warn
+  private var v2: Int = 0 // warn, never set
+  private var v3: Int = 0 // warn, never got
+  private var v4: Int = 0 // no warn
+
+  def bippy(): Int = {
+    v3 = 5
+    v4 = 6
+    v2 + v4
+  }
+}
+
+trait DefaultArgs {
+  // warn about default getters for x2 and x3
+  private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+
+  def boppy() = bippy(5, 100, 200)
+}
+
+class Outer {
+  class Inner
+}
+
+trait Locals {
+  def f0 = {
+    var x = 1 // warn
+    var y = 2
+    y = 3
+    y + y
+  }
+  def f1 = {
+    val a = new Outer // no warn
+    val b = new Outer // warn
+    new a.Inner
+  }
+  def f2 = {
+    var x = 100 // warn about it being a var
+    x
+  }
+}
+
+object Types {
+  private object Dongo { def f = this } // warn
+  private class Bar1 // warn
+  private class Bar2 // no warn
+  private type Alias1 = String // warn
+  private type Alias2 = String // no warn
+  def bippo = (new Bar2).toString
+
+  def f(x: Alias2) = x.length
+
+  def l1() = {
+    object HiObject { def f = this } // warn
+    class Hi { // warn
+      def f1: Hi = new Hi
+      def f2(x: Hi) = x
+    }
+    class DingDongDoobie // warn
+    class Bippy // no warn
+    type Something = Bippy // no warn
+    type OtherThing = String // warn
+    (new Bippy): Something
+  }
+}
diff --git a/test/files/neg/wellkinded_wrongarity.check b/test/files/neg/wellkinded_wrongarity.check
index 1dc38db..b9f033b 100644
--- a/test/files/neg/wellkinded_wrongarity.check
+++ b/test/files/neg/wellkinded_wrongarity.check
@@ -1,4 +1,4 @@
-wellkinded_wrongarity.scala:5: error: Pair takes two type parameters, expected: one
-object mp extends Monad[Pair]
+wellkinded_wrongarity.scala:5: error: Tuple2 takes two type parameters, expected: one
+object mp extends Monad[Tuple2]
                         ^
 one error found
diff --git a/test/files/neg/wellkinded_wrongarity.scala b/test/files/neg/wellkinded_wrongarity.scala
index 2bb0e2c..39c7601 100644
--- a/test/files/neg/wellkinded_wrongarity.scala
+++ b/test/files/neg/wellkinded_wrongarity.scala
@@ -2,4 +2,4 @@
 
 class Monad[m[x]]
 
-object mp extends Monad[Pair]
+object mp extends Monad[Tuple2]
diff --git a/test/files/neg/xmltruncated6.check b/test/files/neg/xmltruncated6.check
index 6123114..f638f2f 100644
--- a/test/files/neg/xmltruncated6.check
+++ b/test/files/neg/xmltruncated6.check
@@ -1,4 +1,4 @@
-xmltruncated6.scala:2: error: ';' expected but eof found.
+xmltruncated6.scala:2: error: in XML literal:  expected end of Scala block
   val stuff = <a>{ "no closing brace"
                                      ^
 one error found
diff --git a/test/files/pos/CustomGlobal.scala b/test/files/pos/CustomGlobal.scala
index 30bf227..a5668bd 100644
--- a/test/files/pos/CustomGlobal.scala
+++ b/test/files/pos/CustomGlobal.scala
@@ -22,7 +22,7 @@ class CustomGlobal(currentSettings: Settings, reporter: Reporter) extends Global
     override def newTyper(context: Context): Typer = new CustomTyper(context)
 
     class CustomTyper(context : Context) extends Typer(context) {
-      override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+      override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
         if (tree.summaryString contains "Bippy")
           println("I'm typing a Bippy! It's a " + tree.shortClass + ".")
 
diff --git a/test/files/pos/List1.scala b/test/files/pos/List1.scala
index 9d3a51f..30ebf5e 100644
--- a/test/files/pos/List1.scala
+++ b/test/files/pos/List1.scala
@@ -9,15 +9,15 @@ object lists {
 
   def Nil[b] = new List[b] {
     def isEmpty: Boolean = true;
-    def head = error("head of Nil");
-    def tail = error("tail of Nil");
+    def head = sys.error("head of Nil");
+    def tail = sys.error("tail of Nil");
   }
 
   def Cons[c](x: c, xs: List[c]): List[c] = new List[c] {
     def isEmpty = false;
     def head = x;
     def tail = xs;
-  } 
+  }
 
   def foo = {
     val intnil = Nil[Int];
diff --git a/test/files/pos/MailBox.scala b/test/files/pos/MailBox.scala
index 2a3f02d..8e27bd3 100644
--- a/test/files/pos/MailBox.scala
+++ b/test/files/pos/MailBox.scala
@@ -24,7 +24,7 @@ class MailBox {
   private val sent = new LinkedList[Any];
   private var lastSent = sent;
   private val receivers = new LinkedList[Receiver];
-  private var lastReceiver = receivers; 
+  private var lastReceiver = receivers;
 
   def send(msg: Any): Unit = synchronized {
     var r = receivers;
@@ -59,7 +59,7 @@ class MailBox {
     }
     f(msg)
   }
-  
+
   def receiveWithin[a](msec: Long)(f: PartialFunction[Any, a]): a = {
     val msg: Any = synchronized {
       var s = sent;
diff --git a/test/files/pos/SI-7638.scala b/test/files/pos/SI-7638.scala
index da16e0b..831475d 100644
--- a/test/files/pos/SI-7638.scala
+++ b/test/files/pos/SI-7638.scala
@@ -9,7 +9,7 @@ trait ArrayVectorOrder[@specialized(Int) A] extends Ordering[A] {
 }
 
 object vectorOrder {
-  implicit def arrayOrder[@specialized(Int) A]() = 
+  implicit def arrayOrder[@specialized(Int) A]() =
   /*
    * Before applying patch:
    *
@@ -17,8 +17,8 @@ object vectorOrder {
    *         during phase: mixin
    *      library version: version 2.10.3-20130625-164027-d22e8d282c
    *     compiler version: version 2.10.3-20130627-153946-54cb6af7db
-   *   reconstructed args: 
-   * 
+   *   reconstructed args:
+   *
    *   last tree to typer: TypeTree(class Array)
    *               symbol: class Array in package scala (flags: final)
    *    symbol definition: final class Array[T >: ? <: ?] extends Object
@@ -27,12 +27,12 @@ object vectorOrder {
    *       context owners: anonymous class anon$1 -> package compile
    *
    * == Expanded type of tree ==
-   * 
+   *
    * TypeRef(
    *   TypeSymbol(final class Array[T >: ? <: ?] extends Object)
    *   args = List(TypeRef(TypeSymbol(final abstract class Int extends )))
    * )
-   * 
+   *
    * unhandled exception while transforming SI-7638.scala
    * error: uncaught exception during compilation: java.lang.UnsupportedOperationException
    * error: java.lang.UnsupportedOperationException: tail of empty list
diff --git a/test/files/pos/Transactions.scala b/test/files/pos/Transactions.scala
index 525eff7..32889f8 100644
--- a/test/files/pos/Transactions.scala
+++ b/test/files/pos/Transactions.scala
@@ -28,7 +28,7 @@ class Transaction {
   var next: Transaction = null
 
   def this(hd: Transaction, tl: Transaction) = { this(); this.head = head; this.next = next }
-  
+
   def makeAbort() = synchronized {
     while (status != Transaction.Aborted && status != Transaction.Committed) {
       status = Transaction.Abortable
@@ -48,7 +48,7 @@ class Transaction {
       case ex: AbortException => abort(); None
       case ex: Throwable => abort(); throw ex
     }
-  
+
 }
 
 trait Transactional {
@@ -58,7 +58,7 @@ trait Transactional {
 
   /** copy back snapshot */
   def rollBack(): Unit
-  
+
   var readers: Transaction
   var writer: Transaction
 
@@ -66,11 +66,11 @@ trait Transactional {
     if (writer == null) null
     else if (writer.status == Transaction.Running) writer
     else {
-      if (writer.status != Transaction.Committed) rollBack(); 
-      writer = null; 
-      null 
+      if (writer.status != Transaction.Committed) rollBack();
+      writer = null;
+      null
     }
-  
+
   def getter(thisTrans: Transaction) {
     if (writer == thisTrans) return
     var r = readers
@@ -96,7 +96,7 @@ trait Transactional {
     synchronized {
       val w = currentWriter()
       if (w != null)
-        if (thisTrans.id < w.id) { w.makeAbort(); rollBack() } 
+        if (thisTrans.id < w.id) { w.makeAbort(); rollBack() }
         else throw new AbortException
       var r = readers
       while (r != null && r.head.status != Transaction.Running) { r = r.next; readers = r }
@@ -111,4 +111,3 @@ trait Transactional {
     }
   }
 }
-      
diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala
index 01654e0..84a01bc 100644
--- a/test/files/pos/annotated-original/M_1.scala
+++ b/test/files/pos/annotated-original/M_1.scala
@@ -1,7 +1,7 @@
 import language.experimental.macros
-import reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object M {
-  def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.resetLocalAttrs(a.tree))
+  def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree))
   def m(a: Any) = macro impl
 }
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index d92fbca..79edbff 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import collection.mutable.ListBuffer
 import collection.mutable.Stack
 
@@ -15,14 +15,15 @@ object Macros {
   def tree_impl[T:c.WeakTypeTag,U:c.WeakTypeTag](c: Context)
       (f:c.Expr[Function1[T,U]]): c.Expr[Function1[T,U]] = {
     import c.universe._
+    import internal._
     val ttag = c.weakTypeTag[U]
     f match {
       case Expr(Function(List(ValDef(_,n,tp,_)),b)) =>
         // normalize argument name
         var b1 = new Transformer {
           override def transform(tree: Tree): Tree = tree match {
-            case Ident(x) if (x==n) => Ident(newTermName("_arg"))
-            case tt @ TypeTree() if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
+            case Ident(x) if (x==n) => Ident(TermName("_arg"))
+            case tt: TypeTree if tt.original != null => setOriginal(TypeTree(tt.tpe), transform(tt.original))
             // without the fix to LazyTreeCopier.Annotated, we would need to uncomment the line below to make the macro work
             // that's because the pattern match in the input expression gets expanded into Typed(<x>, TypeTree(<Int @unchecked>))
             // with the original of the TypeTree being Annotated(<@unchecked>, Ident(<x>))
@@ -34,17 +35,17 @@ object Macros {
           }
         }.transform(b)
 
-        val reifiedTree = c.reifyTree(treeBuild.mkRuntimeUniverseRef, EmptyTree, b1)
+        val reifiedTree = c.reifyTree(gen.mkRuntimeUniverseRef, EmptyTree, b1)
         val reifiedExpr = c.Expr[scala.reflect.runtime.universe.Expr[T => U]](reifiedTree)
         val template =
           c.universe.reify(new (T => U) with TypedFunction {
-            override def toString = c.literal(tp+" => "+ttag.tpe+" { "+b1.toString+" } ").splice // DEBUG
+            override def toString = c.Expr[String](q"""${tp+" => "+ttag.tpe+" { "+b1.toString+" } "}""").splice // DEBUG
             def tree = reifiedExpr.splice.tree
-            val typeIn = c.literal(tp.toString).splice
-            val typeOut = c.literal(ttag.tpe.toString).splice
+            val typeIn = c.Expr[String](q"${tp.toString}").splice
+            val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice
             def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice
           })
-        val untyped = c.resetLocalAttrs(template.tree)
+        val untyped = c.untypecheck(template.tree)
 
         c.Expr[T => U](untyped)
       case _ => sys.error("Bad function type")
diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala
index 706a715..4832ce4 100644
--- a/test/files/pos/annotations.scala
+++ b/test/files/pos/annotations.scala
@@ -2,7 +2,7 @@ class ann(i: Int) extends scala.annotation.Annotation
 class cfann(x: String) extends annotation.ClassfileAnnotation
 
 // annotations on abstract types
-abstract class C1[@serializable @cloneable +T, U, V[_]]
+abstract class C1[@annotation.elidable(0) +T, U, V[_]]
 abstract class C2[@deprecated
                   @ann(1) T <: Number,
                   V]
diff --git a/test/files/pos/annotations2.scala b/test/files/pos/annotations2.scala
new file mode 100644
index 0000000..3bce7f8
--- /dev/null
+++ b/test/files/pos/annotations2.scala
@@ -0,0 +1,31 @@
+
+class B[T](x: (T, T)) {
+  def this(xx: (T, Any, Any)) = this((xx._1, xx._1))
+}
+class BAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation {
+  def this(xx: (T, Any, Any)) = this((xx._1, xx._1))
+}
+class CAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation {
+  def this(xx: Class[T]) = this((xx.newInstance(), xx.newInstance()))
+}
+
+class A1 {
+  val b1 = new B((1, 2, 3))
+  val b2 = new B((1, 2))
+  val b3 = new B[Int]((1, 2, 3))
+  val b4 = new B[Int]((1, 2))
+}
+
+class A2 {
+  @BAnn((1, 2, 3)) val b1 = null
+  @BAnn((1, 2)) val b2 = null
+  @BAnn[Int]((1, 2, 3)) val b3 = null
+  @BAnn[Int]((1, 2)) val b4 = null
+}
+
+class A3 {
+  @CAnn(classOf[Int]) val b1 = null
+  @CAnn((1, 2)) val b2 = null
+  @CAnn[Int](classOf[Int]) val b3 = null
+  @CAnn[Int]((1, 2)) val b4 = null
+}
diff --git a/test/files/pos/array-interfaces.scala b/test/files/pos/array-interfaces.scala
index 4955911..70cafd2 100644
--- a/test/files/pos/array-interfaces.scala
+++ b/test/files/pos/array-interfaces.scala
@@ -1,7 +1,7 @@
 object s {
   def f(x: Cloneable) = ()
   def g(x: java.io.Serializable) = ()
-  
+
   def main(args: Array[String]): Unit = {
     f(args)
     g(args)
diff --git a/test/files/pos/arrays2.scala b/test/files/pos/arrays2.scala
index 2d5409c..795c486 100644
--- a/test/files/pos/arrays2.scala
+++ b/test/files/pos/arrays2.scala
@@ -11,8 +11,8 @@ object arrays2 {
 
 // #2422
 object arrays4 {
-  val args = Array[String]("World") 
-  "Hello %1$s".format(args: _*) 
+  val args = Array[String]("World")
+  "Hello %1$s".format(args: _*)
 }
 
 // #2461
diff --git a/test/files/neg/macro-invalidimpl-g.flags b/test/files/pos/attachments-typed-another-ident.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-g.flags
rename to test/files/pos/attachments-typed-another-ident.flags
diff --git a/test/files/pos/attachments-typed-another-ident/Impls_1.scala b/test/files/pos/attachments-typed-another-ident/Impls_1.scala
new file mode 100644
index 0000000..98062a9
--- /dev/null
+++ b/test/files/pos/attachments-typed-another-ident/Impls_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object MyAttachment
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    import internal._
+    val ident = updateAttachment(Ident(TermName("bar")), MyAttachment)
+    assert(attachments(ident).get[MyAttachment.type].isDefined, attachments(ident))
+    val typed = c.typecheck(ident)
+    assert(attachments(typed).get[MyAttachment.type].isDefined, attachments(typed))
+    c.Expr[Int](typed)
+  }
+
+  def foo = macro impl
+}
diff --git a/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala
new file mode 100644
index 0000000..022639b
--- /dev/null
+++ b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  def bar = 2
+  Macros.foo
+}
+
diff --git a/test/files/pos/attachments-typed-ident/Impls_1.scala b/test/files/pos/attachments-typed-ident/Impls_1.scala
index cc40893..25c0891 100644
--- a/test/files/pos/attachments-typed-ident/Impls_1.scala
+++ b/test/files/pos/attachments-typed-ident/Impls_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 object MyAttachment
@@ -6,10 +6,11 @@ object MyAttachment
 object Macros {
   def impl(c: Context) = {
     import c.universe._
-    val ident = Ident(newTermName("bar")) updateAttachment MyAttachment
-    assert(ident.attachments.get[MyAttachment.type].isDefined, ident.attachments)
-    val typed = c.typeCheck(ident)
-    assert(typed.attachments.get[MyAttachment.type].isDefined, typed.attachments)
+    import internal._
+    val ident = updateAttachment(Ident(TermName("bar")), MyAttachment)
+    assert(attachments(ident).get[MyAttachment.type].isDefined, attachments(ident))
+    val typed = c.typecheck(ident)
+    assert(attachments(typed).get[MyAttachment.type].isDefined, attachments(typed))
     c.Expr[Int](typed)
   }
 
diff --git a/test/files/pos/attributes.scala b/test/files/pos/attributes.scala
index ec735d0..60e00bf 100644
--- a/test/files/pos/attributes.scala
+++ b/test/files/pos/attributes.scala
@@ -1,3 +1,5 @@
+class serializable extends annotation.StaticAnnotation
+
 @serializable  class C1;
 @serializable @volatile  class C2;
 @serializable @volatile  class C3;
diff --git a/test/files/pos/bcode_throw_null/TN.scala b/test/files/pos/bcode_throw_null/TN.scala
new file mode 100644
index 0000000..ed38b59
--- /dev/null
+++ b/test/files/pos/bcode_throw_null/TN.scala
@@ -0,0 +1,7 @@
+object TN {
+
+  def pre1(b: Boolean) {
+    println(if (b) 1 else throw null)
+  }
+
+}
diff --git a/test/files/pos/bounds.scala b/test/files/pos/bounds.scala
index cfea462..26bc84a 100644
--- a/test/files/pos/bounds.scala
+++ b/test/files/pos/bounds.scala
@@ -1,11 +1,11 @@
 trait Map[A, +C] {
-  def ++ [B1 >: C] (kvs: Iterable[Pair[A, B1]]): Map[A, B1] = this
-  def ++ [B1 >: C] (kvs: Iterator[Pair[A, B1]]): Map[A, B1] = this
+  def ++ [B1 >: C] (kvs: Iterable[Tuple2[A, B1]]): Map[A, B1] = this
+  def ++ [B1 >: C] (kvs: Iterator[Tuple2[A, B1]]): Map[A, B1] = this
 }
 
 class ListMap[A, +B] extends Map[A, B] {}
 
 object ListMap {
   def empty[X, Y] = new ListMap[X, Y]
-  def apply[A1, B2](elems: Pair[A1, B2]*): Map[A1, B2] = empty[A1,B2].++(elems.iterator)
+  def apply[A1, B2](elems: Tuple2[A1, B2]*): Map[A1, B2] = empty[A1,B2].++(elems.iterator)
 }
diff --git a/test/files/pos/builders.scala b/test/files/pos/builders.scala
index 51d8af8..0b62076 100644
--- a/test/files/pos/builders.scala
+++ b/test/files/pos/builders.scala
@@ -18,16 +18,16 @@ object builders {
     def += (elem: B) { buf += elem }
     def result: List[B] = buf.toList
   }
-/*  
+/*
   def fill[A, Dim1, Dim2, Coll](n1: Int, n2: Int, elem: A)(implicit b1: Builder[Coll, Dim1, A], b2: Builder[Coll, Dim2, Dim1]) = {
     for (i <- 0 until n1) {
       for (j <- 0 until n2) {
         b1 += elem
-      }	
+      }
       b2 += b1.result
     }
     b2.result
-  }	
+  }
 */
 /*
   implicit def arrayBuilder[A, B] = new Builder[Array[A], Array[B], B] {
@@ -35,7 +35,7 @@ object builders {
     private val buf = new scala.collection.mutable.ListBuffer[B]
     def += (elem: B) { buf += elem }
     def result: Array[B] = buf.toArray
-  }	
+  }
 */
   class Iter[A, C](elems: List[A]) {
     def ++ [B  >: A, D](xs: Iterable[B])(implicit b: Builder[C, D, B]): D = {
@@ -48,7 +48,7 @@ object builders {
       b.result
     }
   }
-  
+
   def main(args : Array[String]) : Unit = {
     val x1 = new Iter[Int, List[Int]](List(1, 2, 3))
 //    val x2 = new Iter[Int, Array[Int]](List(1, 2, 3))
diff --git a/test/files/pos/chang/Test.scala b/test/files/pos/chang/Test.scala
index 9bb745e..f74c635 100644
--- a/test/files/pos/chang/Test.scala
+++ b/test/files/pos/chang/Test.scala
@@ -1,3 +1,3 @@
-object Test extends Application {
+object Test extends App {
   new com.netgents.hello.Outer[String]
 }
diff --git a/test/files/pos/channels.scala b/test/files/pos/channels.scala
index 0a2274c..b2f0cdc 100644
--- a/test/files/pos/channels.scala
+++ b/test/files/pos/channels.scala
@@ -6,7 +6,7 @@ case class ![a](chan: Channel[a], data: a)
 
 /*
 object Bang {
-  def unapply[a](x: ![a]): Option[{Channel[a], a}] = 
+  def unapply[a](x: ![a]): Option[{Channel[a], a}] =
     Some(x.chan, x.data)
 }
 
@@ -14,7 +14,7 @@ object Bang {
 object Test extends App {
   object IC extends Channel[Int]
   def f[b](x: ![b]): Int = x match {
-    case send: ![c] => 
+    case send: ![c] =>
       send.chan match {
         case IC => send.data
       }
@@ -27,4 +27,3 @@ object Test2 extends App {
     case IC ! x => x
   }
 }
- 
diff --git a/test/files/pos/clsrefine.scala b/test/files/pos/clsrefine.scala
index b29c01d..0a016de 100644
--- a/test/files/pos/clsrefine.scala
+++ b/test/files/pos/clsrefine.scala
@@ -25,9 +25,9 @@ object test {
     val y1, y2 = 1;
   }
   val a: A { type X1 = Int; type X2 = Int } = b;
-  val a1 = new A { 
+  val a1 = new A {
     type X1 = Int;
-    type X2 = String; 
+    type X2 = String;
     val x1 = 1;
     val x2 = "hello"
   }
diff --git a/test/files/pos/collectGenericCC.scala b/test/files/pos/collectGenericCC.scala
index 8201c6a..5c51a50 100644
--- a/test/files/pos/collectGenericCC.scala
+++ b/test/files/pos/collectGenericCC.scala
@@ -7,8 +7,8 @@ object Test {
     r foreach ((a: A) => b += a)
     b.result
   }
-  
-  collect[Int, Vector[Int]](List(1,2,3,4)) 
+
+  collect[Int, Vector[Int]](List(1,2,3,4))
   collect[Char, String](List('1','2','3','4'))
-  collect[Char, Array[Char]](List('1','2','3','4'))  
+  collect[Char, Array[Char]](List('1','2','3','4'))
 }
\ No newline at end of file
diff --git a/test/files/pos/context.scala b/test/files/pos/context.scala
index 13f6bb4..4e11d07 100644
--- a/test/files/pos/context.scala
+++ b/test/files/pos/context.scala
@@ -12,10 +12,10 @@ class Context {
 abstract class SymbolWrapper {
   val context: Context;
   import context._;
-  
+
   class Symbols {
     self: context.symbols.type =>
-    
+
     abstract class Symbol {
       def typ: types.Type;
       def sym: Symbol = typ.sym;
@@ -29,7 +29,7 @@ abstract class TypeWrapper {
 
   class Types {
     self: context.types.type =>
-    
+
     abstract class Type {
       def sym: symbols.Symbol;
       def typ: Type = sym.typ;
diff --git a/test/files/pos/cycle-jsoup.flags b/test/files/pos/cycle-jsoup.flags
new file mode 100644
index 0000000..ca20f55
--- /dev/null
+++ b/test/files/pos/cycle-jsoup.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/cycle-jsoup.scala b/test/files/pos/cycle-jsoup.scala
new file mode 100644
index 0000000..879e693
--- /dev/null
+++ b/test/files/pos/cycle-jsoup.scala
@@ -0,0 +1,5 @@
+object Test {
+  def main(args : Array[String]) {
+    org.jsoup.Jsoup.parse(null: java.net.URL, 3000)
+  }
+}
diff --git a/test/files/pos/cycle.flags b/test/files/pos/cycle.flags
new file mode 100644
index 0000000..ca20f55
--- /dev/null
+++ b/test/files/pos/cycle.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/cycle/J_1.java b/test/files/pos/cycle/J_1.java
new file mode 100644
index 0000000..0cc218e
--- /dev/null
+++ b/test/files/pos/cycle/J_1.java
@@ -0,0 +1,16 @@
+package bar;
+
+public class J_1 {
+  public void f(C.D arg) {
+  }
+}
+
+class B extends J_1 {
+  public void g(C.D arg) {
+  }
+}
+
+class C extends B {
+  public class D {
+  }
+}
diff --git a/test/files/pos/cycle/X_2.scala b/test/files/pos/cycle/X_2.scala
new file mode 100644
index 0000000..c1840f3
--- /dev/null
+++ b/test/files/pos/cycle/X_2.scala
@@ -0,0 +1,3 @@
+import bar.J_1._ //<--- illegal cyclic reference involving
+
+class X
diff --git a/test/files/pos/cyclics-pos.scala b/test/files/pos/cyclics-pos.scala
index 051bdd6..395e888 100644
--- a/test/files/pos/cyclics-pos.scala
+++ b/test/files/pos/cyclics-pos.scala
@@ -1,26 +1,26 @@
 trait Param[T]
 trait Abs { type T }
 trait Cyclic1[A <: Param[A]]    // works
-trait Cyclic2[A <: Abs { type T <: A }]   
-trait Cyclic3 { type A <: Abs { type T = A } }    
+trait Cyclic2[A <: Abs { type T <: A }]
+trait Cyclic3 { type A <: Abs { type T = A } }
 trait Cyclic4 { type A <: Param[A] }   // works
-trait Cyclic5 { type AA <: Abs; type A <: AA { type T = A } }    
+trait Cyclic5 { type AA <: Abs; type A <: AA { type T = A } }
 
 
 trait IterableTemplate {
   type Elem
   type Constr <: IterableTemplate
   type ConstrOf[A] = Constr { type Elem = A }
-  
+
   def iterator: Iterator[Elem]
-  
+
   def map [B] (f: Elem => B): ConstrOf[B]
-  
+
   def foreach(f: Elem => Unit) = iterator.foreach(f)
 }
 
 
 trait Iterable[A] extends IterableTemplate { self =>
-  type Elem 
+  type Elem
   type Constr <: Iterable[A] { type Constr <: Iterable.this.Constr }
 }
diff --git a/test/files/pos/debug-reset-local-attrs.flags b/test/files/pos/debug-reset-local-attrs.flags
new file mode 100644
index 0000000..9c7d640
--- /dev/null
+++ b/test/files/pos/debug-reset-local-attrs.flags
@@ -0,0 +1 @@
+-Ydebug
diff --git a/test/files/pos/debug-reset-local-attrs.scala b/test/files/pos/debug-reset-local-attrs.scala
new file mode 100644
index 0000000..8348657
--- /dev/null
+++ b/test/files/pos/debug-reset-local-attrs.scala
@@ -0,0 +1 @@
+case class FT(f : Float)
diff --git a/test/files/pos/delambdafy-lambdalift.scala b/test/files/pos/delambdafy-lambdalift.scala
new file mode 100644
index 0000000..e9da24e
--- /dev/null
+++ b/test/files/pos/delambdafy-lambdalift.scala
@@ -0,0 +1,8 @@
+class LambdaLift {
+
+  def enclosingMethod(capturedArg: Int): Unit = {
+    def innerMethod(x: Int): Int = x + capturedArg
+    val f = (y: Int) => innerMethod(y)
+  }
+
+}
diff --git a/test/files/pos/delambdafy-patterns.scala b/test/files/pos/delambdafy-patterns.scala
new file mode 100644
index 0000000..95d4986
--- /dev/null
+++ b/test/files/pos/delambdafy-patterns.scala
@@ -0,0 +1,15 @@
+class DelambdafyPatterns {
+  def bar: Unit = ()
+  def wildcardPatternInTryCatch: Unit => Unit = (x: Unit) =>
+    // patterns in try..catch are preserved so we need to be
+    // careful when it comes to free variable detction
+    // in particular a is _not_ free variable, also the
+    // `_` identifier has no symbol attached to it
+    try bar catch {
+      case a@(_:java.lang.reflect.InvocationTargetException) =>
+        // refer to a so we trigger a bug where a is considered
+        // to be a free variable for enclosing lambda
+        val b = a
+        ()
+    }
+}
diff --git a/test/files/pos/delambdafy_t6260_method.check b/test/files/pos/delambdafy_t6260_method.check
new file mode 100644
index 0000000..f5cd694
--- /dev/null
+++ b/test/files/pos/delambdafy_t6260_method.check
@@ -0,0 +1,13 @@
+delambdafy_t6260_method.scala:3: error: bridge generated for member method apply: (bx: Object)Object in class map$extension1
+which overrides method apply: (v1: Object)Object in trait Function1
+clashes with definition of the member itself;
+both have erased type (bx: Object)Object
+    ((bx: Box[X]) => new Box(f(bx.x)))(this)
+                  ^
+delambdafy_t6260_method.scala:8: error: bridge generated for member method apply: (bx: Object)Object in class map21
+which overrides method apply: (v1: Object)Object in trait Function1
+clashes with definition of the member itself;
+both have erased type (bx: Object)Object
+    ((bx: Box[X]) => new Box(f(bx.x)))(self)
+                  ^
+two errors found
diff --git a/test/files/pos/delambdafy_t6260_method.flags b/test/files/pos/delambdafy_t6260_method.flags
new file mode 100644
index 0000000..48b438d
--- /dev/null
+++ b/test/files/pos/delambdafy_t6260_method.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/neg/t6260.scala b/test/files/pos/delambdafy_t6260_method.scala
similarity index 100%
copy from test/files/neg/t6260.scala
copy to test/files/pos/delambdafy_t6260_method.scala
diff --git a/test/files/pos/depmet_implicit_chaining_zw.scala b/test/files/pos/depmet_implicit_chaining_zw.scala
index 93da3b0..ce5ea47 100644
--- a/test/files/pos/depmet_implicit_chaining_zw.scala
+++ b/test/files/pos/depmet_implicit_chaining_zw.scala
@@ -3,7 +3,7 @@ trait Succ[N]
 
 trait ZipWith[N, S] {
   type T
-  val x: T = error("")
+  val x: T = sys.error("")
 }
 
 object ZipWith {
@@ -15,7 +15,7 @@ object ZipWith {
     type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
   }
 
-  // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]], 
+  // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]],
   // since that will chop of the {type T = ... } refinement in adapt (pt = ZipWith[Succ[Succ[Zero]], Int => String => Boolean])
   // this works
   // def zipWith(implicit zw: ZipWith[Succ[Succ[Zero]], Int => String => Boolean]): zw.T = zw.x
@@ -25,4 +25,4 @@ object ZipWith {
   type _2 = Succ[Succ[Zero]]
   val zw = ?[ZipWith[_2, Int => String => Boolean]].x // : Stream[Int] => Stream[String] => Stream[Boolean]
   // val zw = implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]{type T = Stream[Int] => Stream[String] => Stream[Boolean]}].x
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/depmet_implicit_norm_ret.scala b/test/files/pos/depmet_implicit_norm_ret.scala
index bafd2f7..0c587cf 100644
--- a/test/files/pos/depmet_implicit_norm_ret.scala
+++ b/test/files/pos/depmet_implicit_norm_ret.scala
@@ -1,29 +1,29 @@
 object Test{
   def ?[S <: AnyRef](implicit w : S) : w.type = w
-  
+
   // fallback, lower priority (overloading rules apply: pick alternative in subclass lowest in subtyping lattice)
   class ZipWithDefault {
     implicit def ZeroZipWith[S] = new ZipWith[S] {
       type T = Stream[S]
-    }    
+    }
   }
-  
+
   object ZipWith extends ZipWithDefault {
     // def apply[S: ZipWith](s : S) = ?[ZipWith[S]].zipWith(s) // TODO: bug return type should be inferred
     def apply[S](s : S)(implicit zw: ZipWith[S]): zw.T = zw.zipWith(s)
 
     implicit def SuccZipWith[S,R](implicit zWith : ZipWith[R]) = new ZipWith[S => R] {
       type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
-    }    
+    }
   }
-  
+
   trait ZipWith[S] {
     type T
-    def zipWith : S => T = error("")
+    def zipWith : S => T = sys.error("")
   }
-  
+
   // bug: inferred return type = (Stream[A]) => java.lang.Object with Test.ZipWith[B]{type T = Stream[B]}#T
   // this seems incompatible with vvvvvvvvvvvvvvvvvvvvvv   -- #3731
-  def map[A,B](f : A => B)   /* : Stream[A] => Stream[B]*/ = ZipWith(f) 
-  val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))  
-}  
\ No newline at end of file
+  def map[A,B](f : A => B)   /* : Stream[A] => Stream[B]*/ = ZipWith(f)
+  val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))
+}
diff --git a/test/files/pos/depmet_implicit_oopsla_session.scala b/test/files/pos/depmet_implicit_oopsla_session.scala
index e2c67d7..21588a5 100644
--- a/test/files/pos/depmet_implicit_oopsla_session.scala
+++ b/test/files/pos/depmet_implicit_oopsla_session.scala
@@ -11,19 +11,19 @@ object Sessions {
     def run(p: Stop, dp: Stop): Unit = {}
   }
 
-  implicit def InDual[A, B](implicit sessionDIn: Session[B]) = 
+  implicit def InDual[A, B](implicit sessionDIn: Session[B]) =
     new Session[In[A, B]] {
       type Dual = Out[A, sessionDIn.Dual]
 
-      def run(p: In[A, B], dp: Dual): Unit = 
+      def run(p: In[A, B], dp: Dual): Unit =
         sessionDIn.run(p.func(dp.x), dp.y)
   }
 
-  implicit def OutDual[A, B](implicit sessionDOut: Session[B]) = 
+  implicit def OutDual[A, B](implicit sessionDOut: Session[B]) =
     new Session[Out[A, B]] {
      type Dual = In[A, sessionDOut.Dual]
 
-     def run(p: Out[A, B], dp: Dual): Unit = 
+     def run(p: Out[A, B], dp: Dual): Unit =
        sessionDOut.run(p.y, dp.func(p.x))
   }
 
@@ -32,7 +32,7 @@ object Sessions {
   sealed case class Out[+A, +B](x: A, y: B)
 
   def addServer =
-    In{x: Int => 
+    In{x: Int =>
     In{y: Int => System.out.println("Thinking")
     Out(x+y,
     Stop())}}
@@ -48,7 +48,7 @@ object Sessions {
 
   // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]#HasDual[D]) =
   //   s.run(p, dp)
-  // 
+  //
   // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]{type Dual=D}) =
   //   s.run(p, dp)
 
diff --git a/test/files/pos/depmet_implicit_oopsla_session_2.scala b/test/files/pos/depmet_implicit_oopsla_session_2.scala
index 8d7daa6..5c3b78e 100644
--- a/test/files/pos/depmet_implicit_oopsla_session_2.scala
+++ b/test/files/pos/depmet_implicit_oopsla_session_2.scala
@@ -36,7 +36,7 @@ object Sessions {
   implicit def InDual[Data, Cont](implicit cont: Session[Cont]) = new Session[In[Data, Cont]] {
     type Dual = Out[Data, cont.Dual]
 
-    def run(self: Self, dual: Dual): Unit = 
+    def run(self: Self, dual: Dual): Unit =
       cont.run(self.recv(dual.data), dual.cont)
   }
 
@@ -46,13 +46,13 @@ object Sessions {
   implicit def OutDual[Data, Cont](implicit cont: Session[Cont]) = new Session[Out[Data, Cont]] {
     type Dual = In[Data, cont.Dual]
 
-    def run(self: Self, dual: Dual): Unit = 
+    def run(self: Self, dual: Dual): Unit =
       cont.run(self.cont, dual.recv(self.data))
   }
 
   // a concrete session
   def addServer =
-    In{x: Int => 
+    In{x: Int =>
     In{y: Int => System.out.println("Thinking")
     Out(x+y,
     Stop())}}
@@ -71,7 +71,7 @@ object Sessions {
 
   // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]#HasDual[D]) =
   //   s.run(p, dp)
-  // 
+  //
   // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]{type Dual=D}) =
   //   s.run(p, dp)
 
diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
index d2986ef..04b8f94 100644
--- a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
+++ b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
@@ -5,7 +5,7 @@ object Sessions {
     def run(dp: Dual): Unit
   }
 
-  sealed case class Stop extends Session {
+  sealed case class Stop() extends Session {
     type Dual = Stop
 
     def run(dp: Dual): Unit = {}
@@ -14,7 +14,7 @@ object Sessions {
   // can't write B <: Session{type Dual = BDual} due to limitations in type inference algorithm
   // (type variables cannot occur on both sides of <:)
   // using B#Dual instead of BDual is too imprecise, since it is disconnected from the actual argument that is passed for B
-  // would be nice if we could introduce a universal quantification over BDual that is not part of the 
+  // would be nice if we could introduce a universal quantification over BDual that is not part of the
   // type parameter list
   sealed case class In[A, B <: Session, BDual <: Session](recv: A => B)(implicit dual: B <:< Session{type Dual=BDual}) extends Session {
     type Dual = Out[A, BDual]
@@ -29,7 +29,7 @@ object Sessions {
   }
 
   def addServer =
-    In{x: Int => 
+    In{x: Int =>
     In{y: Int => System.out.println("Thinking")
     Out(x+y,
     Stop())}}
diff --git a/test/files/pos/depmet_implicit_oopsla_zipwith.scala b/test/files/pos/depmet_implicit_oopsla_zipwith.scala
index fe69802..c76d02c 100644
--- a/test/files/pos/depmet_implicit_oopsla_zipwith.scala
+++ b/test/files/pos/depmet_implicit_oopsla_zipwith.scala
@@ -15,7 +15,7 @@ object ZipWith {
     def manyApp = n => xs => xs
   }
 
-  implicit def SuccZipWith[N, S, R](implicit zw: ZipWith[N, R]) = 
+  implicit def SuccZipWith[N, S, R](implicit zw: ZipWith[N, R]) =
     new ZipWith[Succ[N],S => R] {
       type T = Stream[S] => zw.T
 
@@ -33,12 +33,12 @@ object ZipWith {
 object Test {
   def zWith[N, S](n: N, s: S)(implicit zw: ZipWith[N, S]): zw.T = zw.zipWith(n)(s)
 
-  def zipWith0: Stream[Int] = zWith(Zero(),0) 
+  def zipWith0: Stream[Int] = zWith(Zero(),0)
 
 // (Stream[A]) => java.lang.Object with ZipWith[Zero,B]{type T = Stream[B]}#T
 // should normalise to: Stream[A] => Stream[B]
   def map[A, B](f: A => B) = zWith(Succ(Zero()),f)
-                               
+
   def zipWith3[A, B, C, D](f: A => B => C => D) = //: Stream[A] => Stream[B] => Stream[C] => Stream[D] = // BUG why do we need a return type?
     zWith(Succ(Succ(Succ(Zero()))),f)
 }
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_tpbetareduce.scala b/test/files/pos/depmet_implicit_tpbetareduce.scala
index c0b9b4e..35d2606 100644
--- a/test/files/pos/depmet_implicit_tpbetareduce.scala
+++ b/test/files/pos/depmet_implicit_tpbetareduce.scala
@@ -1,10 +1,10 @@
 trait HOSeq {
   trait Accumulator[+coll[x], elT]
   trait Iterable[+t] {
-    type m[+x] 
+    type m[+x]
     def accumulator[t]: Accumulator[m, t]
   }
-  implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {}  
+  implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {}
   trait List[+t] extends Iterable[t] {
     type m[+x] = List[x]
     def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
diff --git a/test/files/pos/dotless-targs.scala b/test/files/pos/dotless-targs.scala
new file mode 100644
index 0000000..8c0e244
--- /dev/null
+++ b/test/files/pos/dotless-targs.scala
@@ -0,0 +1,9 @@
+class A {
+  def fn1 = List apply 1
+  def fn2 = List apply[Int] 2
+
+  def g1: Char = "g1" toList 0
+  def g2: Char = "g2" apply 1
+
+  def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x)
+}
diff --git a/test/files/pos/elidable-tparams.scala b/test/files/pos/elidable-tparams.scala
index e47951f..23b1cba 100644
--- a/test/files/pos/elidable-tparams.scala
+++ b/test/files/pos/elidable-tparams.scala
@@ -3,7 +3,7 @@ import elidable._
 
 class ElidableCrashTest {
   trait My
-  
+
   @elidable(MINIMUM) def foo[a >: My <: My]: scala.Unit = ()
 
   foo[My] // crash
diff --git a/test/files/pos/erasure-nsquared.scala b/test/files/pos/erasure-nsquared.scala
new file mode 100644
index 0000000..b0e30ad
--- /dev/null
+++ b/test/files/pos/erasure-nsquared.scala
@@ -0,0 +1,35 @@
+trait BigCast {
+  def bar(x: Int): AnyRef = (
+    null
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+      .asInstanceOf[List[AnyRef]].head
+  )
+}
diff --git a/test/pending/pos/exhaust_2.scala b/test/files/pos/exhaust_2.scala
similarity index 100%
rename from test/pending/pos/exhaust_2.scala
rename to test/files/pos/exhaust_2.scala
diff --git a/test/files/pos/existential-java-case-class/Client.scala b/test/files/pos/existential-java-case-class/Client.scala
new file mode 100644
index 0000000..3688998
--- /dev/null
+++ b/test/files/pos/existential-java-case-class/Client.scala
@@ -0,0 +1,3 @@
+case class CC(x: J[_])
+
+case class CC1(x: Any => J[_])
diff --git a/test/files/pos/existential-java-case-class/J.java b/test/files/pos/existential-java-case-class/J.java
new file mode 100644
index 0000000..7fd7848
--- /dev/null
+++ b/test/files/pos/existential-java-case-class/J.java
@@ -0,0 +1 @@
+public class J<T extends String> {}
diff --git a/test/files/pos/existentials.scala b/test/files/pos/existentials.scala
index 0adbc70..9ca86d1 100644
--- a/test/files/pos/existentials.scala
+++ b/test/files/pos/existentials.scala
@@ -11,7 +11,7 @@ class A {
   //   lazy val quux3a = f()
   //   quux3a
   // }
-  
+
   val bippy0 = f _
   def bippy1 = f _
   // lazy val bippy2 = f _
diff --git a/test/files/pos/exponential-spec.scala b/test/files/pos/exponential-spec.scala
index 83aef58..54515c1 100644
--- a/test/files/pos/exponential-spec.scala
+++ b/test/files/pos/exponential-spec.scala
@@ -5,7 +5,7 @@ trait Exp[T]
 
 object Test {
   def f[T](exp: Exp[T]): Exp[T] = (
-    f[T] _ 
+    f[T] _
       compose f[T]
       compose f[T]
       compose f[T]
@@ -42,6 +42,6 @@ object Test {
       compose f[T]
       compose f[T]
       compose f[T]
-      compose f[T]      
+      compose f[T]
     )(exp)
 }
diff --git a/test/files/pos/extractor-types.scala b/test/files/pos/extractor-types.scala
new file mode 100644
index 0000000..bb9659a
--- /dev/null
+++ b/test/files/pos/extractor-types.scala
@@ -0,0 +1,30 @@
+package p1 {
+  object Ex  { def unapply(p: Any): Option[_ <: Int] = null }
+  object Foo { val Ex(_) = null }
+}
+// a.scala:2: error: error during expansion of this match (this is a scalac bug).
+// The underlying error was: type mismatch;
+//  found   : Some[_$1(in value x$1)] where type _$1(in value x$1)
+//  required: Some[_$1(in method unapply)]
+// object Foo { val Ex(_) = null }
+//                    ^
+// one error found
+
+package p2 {
+  trait Other {
+    class Quux
+    object Baz { def unapply(x: Any): Option[Quux] = None }
+  }
+  trait Reifiers {
+    def f() {
+      val u2: Other = null
+      (null: Any) match { case u2.Baz(x) => println(x) } //: u2.Quux) }
+      // The underlying error was: type mismatch;
+      //  found   : Other#Quux
+      //  required: u2.Quux
+      //     x match { case u2.Baz(x) => println(x: u2.Quux) }
+      //       ^
+      // one error found
+    }
+  }
+}
diff --git a/test/files/pos/gadt-gilles.scala b/test/files/pos/gadt-gilles.scala
index 309168c..662be90 100644
--- a/test/files/pos/gadt-gilles.scala
+++ b/test/files/pos/gadt-gilles.scala
@@ -6,7 +6,7 @@ object Test {
 
   val x: A[C with D] = new B[C, D] {}
   val y: A[C with D] = x match { case b: B[u, v] => (new B[u, v] {}): A[u with v] } // OK
-  
+
 
   def f[T, U](p: A[T with U]): A[T with U] = p match { case b: B[u, v] => new A[u with v] {} } // Not OK
 }
diff --git a/test/files/pos/gadts2.scala b/test/files/pos/gadts2.scala
index 2263cf1..d77c8a7 100644
--- a/test/files/pos/gadts2.scala
+++ b/test/files/pos/gadts2.scala
@@ -4,13 +4,13 @@ object Test {
   case class MyInt(n: Int) extends Number
   case class MyDouble(d: Double) extends Number
 
-  trait Term[+a]
+  trait Term[a]
   case class Cell[a](var x: a) extends Term[a]
   final case class NumTerm(val n: Number) extends Term[Number]
 
   def f[a](t: Term[a], c: Cell[a]) {
-    t match {  
-      case NumTerm(n) => c.x = MyDouble(1.0) 
+    t match {
+      case NumTerm(n) => c.x = MyDouble(1.0)
     }
   }
 
diff --git a/test/files/pos/gen-traversable-methods.scala b/test/files/pos/gen-traversable-methods.scala
index 2604a09..bc72074 100644
--- a/test/files/pos/gen-traversable-methods.scala
+++ b/test/files/pos/gen-traversable-methods.scala
@@ -6,7 +6,7 @@ import collection._
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val gen: GenTraversable[Int] = List(1, 2, 3)
     gen.head
@@ -16,5 +16,5 @@ object Test {
     gen.lastOption
     gen.init
   }
-  
+
 }
diff --git a/test/files/pos/generic-sigs.scala b/test/files/pos/generic-sigs.scala
index b112766..98c50b8 100644
--- a/test/files/pos/generic-sigs.scala
+++ b/test/files/pos/generic-sigs.scala
@@ -6,7 +6,7 @@ object A {
   def f3(x: Class[_ <: Int]) = x
   def f4(x: Class[_ <: String with Int]) = x
   def f5(x: Class[_ <: Int with String]) = x
-  
+
   class Bippy[T]
   def f6(x: Int) = new Bippy[t forSome { type t <: Int }]
   def f7(x: T forSome { type T <: Float }) = x
@@ -14,7 +14,7 @@ object A {
   def f9(x: T forSome { type T <: runtime.BoxedUnit }) = x
   def f10(x: Int) = new Bippy[t forSome { type t <: Unit }]
   def f11(x: Int) = new Bippy[t forSome { type t >: Null }]
-  
+
   class Boppy[+T1,-T2]
   def g1 = new Boppy[t forSome { type t <: Int }, u forSome { type u <: String }]
 }
diff --git a/test/files/pos/gosh.scala b/test/files/pos/gosh.scala
index 427df4d..98fae8a 100644
--- a/test/files/pos/gosh.scala
+++ b/test/files/pos/gosh.scala
@@ -7,35 +7,35 @@ object ShapeTest extends App {
   abstract class Shape {
     def draw(): Unit
   }
-    
+
   class Line(s: Point, e: Point) extends Shape {
     def draw() { Console.println("draw line " + s + "," + e) }
   }
-    
+
   abstract class Foo {
     type T <: Object
- 
+
     def show(o: T): Unit
     def print() { Console.println("in Foo") }
   }
-    
+
   abstract class ShapeFoo extends Foo {
     type T <: Shape
     def show(o: T) { o.draw() }
     override def print() { Console.println("in ShapeFoo") }
   }
-    
+
   class LineFoo extends ShapeFoo {
     type T = Line
     override def print() { Console.println("in LineFoo") }
   }
-    
+
   val p1 = new Point(1,4)
   val p2 = new Point(12, 28)
-    
+
   val l1 = new Line(p1, p2)
 
-    
+
   val l = new ShapeFoo {  // ** //
     type T = Line  // ** //
     override def print() { Console.println("in LineFoo") } // ** //
diff --git a/test/files/pos/gui.scala b/test/files/pos/gui.scala
index 322e0a6..3b4f49c 100644
--- a/test/files/pos/gui.scala
+++ b/test/files/pos/gui.scala
@@ -2,7 +2,7 @@ object Geom {
   trait Shape
   case class Point(x: Int, y: Int) extends Shape
   case class Rectangle(ll: Point, ur: Point) extends Shape {
-    def inset(delta: Int) = 
+    def inset(delta: Int) =
       Rectangle(Point(ll.x - delta, ll.y - delta), Point(ur.x + delta, ur.y + delta));
   }
 }
@@ -20,7 +20,7 @@ trait Screen {
 }
 
 object DummyScreen extends Screen {
-  def drawRect(r: Geom.Rectangle, c: Color) { 
+  def drawRect(r: Geom.Rectangle, c: Color) {
     Console.println("draw " + r + " with " + c)
   }
   def fillRect(r: Geom.Rectangle, c: Color) {
@@ -55,7 +55,7 @@ object GUI {
     def mouseDown(p: Geom.Point): Unit
   }
 
-  abstract class Button(scr: Screen, p: Geom.Point, name: String) 
+  abstract class Button(scr: Screen, p: Geom.Point, name: String)
   extends Glyph with MouseCtl {
     var enabled: Boolean = false
     val label = new Label(scr, p, name)
@@ -86,7 +86,7 @@ object GUIClient {
     def quit() { Console.println("application exited") }
   }
 
-  class QuitButton (scr: Screen, p: Geom.Point, name: String, a: App) 
+  class QuitButton (scr: Screen, p: Geom.Point, name: String, a: App)
   extends GUI.Button(scr, p, name) {
     def doit() { a.quit() }
   }
diff --git a/test/files/pos/hk-infer.scala b/test/files/pos/hk-infer.scala
index 7834282..30e3476 100644
--- a/test/files/pos/hk-infer.scala
+++ b/test/files/pos/hk-infer.scala
@@ -31,7 +31,7 @@ object DoesWorkHK {
     def BOOP(ys: Seq[M[_]]) = new Booper[M](xs ++ ys)
   }
   implicit def mkBoop[M[_]](xs: Seq[M[_]]) = new Booper[M](xs)
-  
+
   def f1 = x BOOP y BOOP x1 BOOP x2
 }
 
diff --git a/test/files/pos/hkarray.scala b/test/files/pos/hkarray.scala
index 3faae18..af11603 100644
--- a/test/files/pos/hkarray.scala
+++ b/test/files/pos/hkarray.scala
@@ -1,5 +1,5 @@
 trait Foo[CC[_]] { }
 
 class Bip {
-  val x = new Foo[Array] { } 
+  val x = new Foo[Array] { }
 }
\ No newline at end of file
diff --git a/test/files/pos/hkrange.scala b/test/files/pos/hkrange.scala
index 8d61167..a680323 100644
--- a/test/files/pos/hkrange.scala
+++ b/test/files/pos/hkrange.scala
@@ -1,5 +1,5 @@
 class A {
   def f[CC[X] <: Traversable[X]](x: CC[Int]) = ()
-  
+
   f(1 to 5)
 }
diff --git a/test/files/pos/imp2-pos.scala b/test/files/pos/imp2-pos.scala
index 407b07f..5460c60 100644
--- a/test/files/pos/imp2-pos.scala
+++ b/test/files/pos/imp2-pos.scala
@@ -1,5 +1,5 @@
 object Test {
-  import collection.mutable._  
+  import collection.mutable._
   import collection.mutable._
   val x = new HashMap
 }
diff --git a/test/files/pos/implicit-anyval-2.10.flags b/test/files/pos/implicit-anyval-2.10.flags
new file mode 100644
index 0000000..94c8056
--- /dev/null
+++ b/test/files/pos/implicit-anyval-2.10.flags
@@ -0,0 +1 @@
+-Xsource:2.10
diff --git a/test/files/pos/implicit-anyval-2.10.scala b/test/files/pos/implicit-anyval-2.10.scala
new file mode 100644
index 0000000..3082af7
--- /dev/null
+++ b/test/files/pos/implicit-anyval-2.10.scala
@@ -0,0 +1,3 @@
+object Test {
+  "": AnyVal // newly prohibited in 2.11, allowed under -Xsourse:2.10
+}
\ No newline at end of file
diff --git a/test/files/pos/implicit-infix-ops.scala b/test/files/pos/implicit-infix-ops.scala
index d7519e6..66f3718 100644
--- a/test/files/pos/implicit-infix-ops.scala
+++ b/test/files/pos/implicit-infix-ops.scala
@@ -1,7 +1,7 @@
 object Test {
   import Ordering.Implicits._
   import Numeric.Implicits._
-  
+
   def f1[T: Numeric](x: T, y: T, z: T)  = x + y + z
   def f2[T: Ordering](x: T, y: T, z: T) = if (x < y) (z > y) else (x < z)
 }
@@ -9,7 +9,7 @@ object Test {
 object Int {
   import Ordering.Implicits._
   import math.Integral.Implicits._
-  
+
   def f1[T: Integral](x: T, y: T, z: T)  = (x + y + z) / z
   def f2[T: Ordering](x: T, y: T, z: T) = if (x < y) (z > y) else (x < z)
 }
@@ -17,7 +17,7 @@ object Int {
 object Frac {
   import Ordering.Implicits._
   import math.Fractional.Implicits._
-  
+
   def f1[T: Fractional](x: T, y: T, z: T)  = (x + y + z) / z
   def f2[T: Ordering](x: T, y: T, z: T) = if (x < y) (z > y) else (x < z)
 }
\ No newline at end of file
diff --git a/test/files/pos/implicits-new.scala b/test/files/pos/implicits-new.scala
index ffc3871..7b4f20c 100644
--- a/test/files/pos/implicits-new.scala
+++ b/test/files/pos/implicits-new.scala
@@ -3,9 +3,9 @@ import scala.reflect.{ClassTag, classTag}
 
 // #1435
 object t1435 {
-  implicit def a(s:String):String = error("")
-  implicit def a(i:Int):String = error("")
-  implicit def b(i:Int):String = error("")
+  implicit def a(s:String):String = sys.error("")
+  implicit def a(i:Int):String = sys.error("")
+  implicit def b(i:Int):String = sys.error("")
 }
 
 class C1435 {
@@ -89,4 +89,4 @@ package foo2709 {
 // Problem with specs
 object specsProblem {
   println(implicitly[TypeTag[Class[_]]])
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/implicits-old.scala b/test/files/pos/implicits-old.scala
index 2c01dd0..62ae6b8 100644
--- a/test/files/pos/implicits-old.scala
+++ b/test/files/pos/implicits-old.scala
@@ -1,8 +1,8 @@
 // #1435
 object t1435 {
-  implicit def a(s:String):String = error("")
-  implicit def a(i:Int):String = error("")
-  implicit def b(i:Int):String = error("")
+  implicit def a(s:String):String = sys.error("")
+  implicit def a(i:Int):String = sys.error("")
+  implicit def b(i:Int):String = sys.error("")
 }
 
 class C1435 {
@@ -45,7 +45,7 @@ object Test1625 {
   implicit def byName[A](x: =>A) = new Wrapped(x)
 
   implicit def byVal[A](x: A) = x
-  
+
   def main(args: Array[String]) = {
 
 //    val res:Wrapped = 7 // works
@@ -57,7 +57,7 @@ object Test1625 {
 }
 
 object Test2188 {
-  implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)   
+  implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
 
   val x: java.util.List[String] = List("foo")
 }
@@ -67,21 +67,21 @@ object TestNumericWidening {
   val x: java.lang.Long = y
 }
 
-// #2709 
-package foo2709 { 
-  class A 
-  class B 
- 
-  package object bar { 
-    implicit def a2b(a: A): B = new B 
-  } 
- 
-  package bar { 
-    object test { 
-      new A: B 
-    } 
-  } 
-} 
+// #2709
+package foo2709 {
+  class A
+  class B
+
+  package object bar {
+    implicit def a2b(a: A): B = new B
+  }
+
+  package bar {
+    object test {
+      new A: B
+    }
+  }
+}
 
 // Problem with specs
 object specsProblem {
diff --git a/test/files/pos/imports-pos.scala b/test/files/pos/imports-pos.scala
index 5f70a5d..f6a55e5 100644
--- a/test/files/pos/imports-pos.scala
+++ b/test/files/pos/imports-pos.scala
@@ -7,10 +7,10 @@ object test {
 
   val foo = 1;
 
-  p("hello"); print("world"); S.out.println("!"); 
+  p("hello"); print("world"); S.out.println("!");
   S.out.flush();
 }
 object test1 {
   import test._;
   foo
-} 
+}
diff --git a/test/files/pos/infer2-pos.scala b/test/files/pos/infer2-pos.scala
index 06d0f58..2ce88be 100644
--- a/test/files/pos/infer2-pos.scala
+++ b/test/files/pos/infer2-pos.scala
@@ -1,8 +1,7 @@
 package test
 class Lst[T]
 case class cons[T](x: T, xs: Lst[T]) extends Lst[T]
-case class nil[T] extends Lst[T]
+case class nil[T]() extends Lst[T]
 object test {
   Console.println(cons(1, nil()))
 }
-  
diff --git a/test/files/pos/inferbroadtype.scala b/test/files/pos/inferbroadtype.scala
index 467bd0f..de8f7aa 100644
--- a/test/files/pos/inferbroadtype.scala
+++ b/test/files/pos/inferbroadtype.scala
@@ -2,7 +2,7 @@ object Test {
   abstract class Base { val changesBaseClasses: Boolean }
   class Concrete extends Base { val changesBaseClasses = true }
   def getBase : Base = new Concrete
-  
+
   var c = new Base { val changesBaseClasses = true }
   c = getBase
 }
diff --git a/test/files/pos/infersingle.scala b/test/files/pos/infersingle.scala
index 6830fcd..60f4ff0 100644
--- a/test/files/pos/infersingle.scala
+++ b/test/files/pos/infersingle.scala
@@ -1,5 +1,52 @@
-object Test {
+object Test1 {
   def one[T](x: T): Option[T] = Some(x)
   val x = "one"
   val y: Option[x.type] = one(x)
-}
\ No newline at end of file
+}
+
+object Test2 {
+  // Has never worked, but seems desirable given the recent changes to
+  // pattern type inference.
+  val a = ""
+  object Id {
+    def unapply(xxxx: Any): Some[a.type] = Some[a.type](a)
+  }
+  val b: a.type = (a: a.type) match {
+    case Id(x) => x
+  }
+}
+
+object Test3 {
+  val a = ""
+  object Id {
+    def unapply(xxxx: Any): Some[Test3.type] = Some[Test3.type](Test3)
+  }
+  val b: Test3.type = a match {
+    case Id(x) => x
+  }
+}
+
+class Test4 {
+  val a = ""
+  object Id {
+    def unapply(xxxx: Any): Some[Test4.this.type] = Some[Test4.this.type](Test4.this)
+  }
+  val b: Test4.this.type = a match {
+    case Id(x) => x
+  }
+}
+
+class Super5 {
+  final val q = ""
+  def q1: q.type = q
+}
+
+class Test5 extends Super5 {
+  val a = ""
+  object Id {
+    def unapply(xxxx: Any): Some[Test5.super.q.type] = Some[Test5.super.q.type](q1)
+  }
+  val b: Test5.super.q.type = a match {
+    case Id(x) => x
+  }
+}
diff --git a/test/files/pos/inliner2.scala b/test/files/pos/inliner2.scala
index fe231ec..bc83e04 100644
--- a/test/files/pos/inliner2.scala
+++ b/test/files/pos/inliner2.scala
@@ -10,7 +10,7 @@ class A {
   final def bob2() = if (debug) 1 else 2
 }
 // Cool:
-// 
+//
 // % ls -1 /tmp/2901/
 // A$$anonfun$bob1$1.class
 // A$$anonfun$bob1$2.class
@@ -20,7 +20,7 @@ class A {
 // A.class
 //
 // Observations:
-// 
+//
 // (1) The inlined version accesses the field: the explicit one calls the accessor.
 // (2) The inlined version fails to eliminate boxing.  With reference types it emits
 //     an unneeded checkcast.
@@ -30,7 +30,7 @@ class A {
 //     inlined at all sites.
 //
 // Generated bytecode for the above:
-// 
+//
 // public final int bob1();
 //   Code:
 //    Stack=1, Locals=1, Args_size=1
@@ -44,7 +44,7 @@ class A {
 //    15: invokestatic  #41; //Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer;
 //    18: invokestatic  #45; //Method scala/runtime/BoxesRunTime.unboxToInt:(Ljava/lang/Object;)I
 //    21: ireturn
-// 
+//
 // public final int bob2();
 //   Code:
 //    Stack=1, Locals=1, Args_size=1
diff --git a/test/files/pos/java-access-pos/J.java b/test/files/pos/java-access-pos/J.java
index 4f20246..b6bc336 100644
--- a/test/files/pos/java-access-pos/J.java
+++ b/test/files/pos/java-access-pos/J.java
@@ -4,11 +4,11 @@ public abstract class J {
   public J() { }
   J(int x1) { }
   protected J(int x1, int x2) { }
-  
+
   abstract void packageAbstract();
   protected abstract void protectedAbstract();
   public abstract void publicAbstract();
-  
+
   void packageConcrete() { return; }
   protected void protectedConcrete() { return; }
   public void publicConcrete() { return; }
diff --git a/test/files/pos/java-access-pos/S1.scala b/test/files/pos/java-access-pos/S1.scala
index cc739d9..10730e3 100644
--- a/test/files/pos/java-access-pos/S1.scala
+++ b/test/files/pos/java-access-pos/S1.scala
@@ -6,7 +6,7 @@ class S1 extends J {
   override private[b] def packageAbstract() = ()
   override protected[b] def protectedAbstract() = ()
   override def publicAbstract() = ()
-  
+
   override private[b] def packageConcrete() = ()
   override protected[b] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -26,7 +26,7 @@ class S3 extends J {
   protected[b] def packageAbstract() = ()
   protected[b] def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override protected[b] def packageConcrete() = ()
   override protected[b] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -37,7 +37,7 @@ class S4 extends J {
   private[a] def packageAbstract() = ()
   protected[a] def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override private[a] def packageConcrete() = ()
   override protected[a] def protectedConcrete() = ()
   override def publicConcrete() = ()
@@ -48,14 +48,14 @@ class S5 extends J {
   def packageAbstract() = ()
   def protectedAbstract() = ()
   def publicAbstract() = ()
-  
+
   override def packageConcrete() = ()
   override def protectedConcrete() = ()
   override def publicConcrete() = ()
 }
 /** Constructors.
  */
-class S6 extends J(1) {  
+class S6 extends J(1) {
   def packageAbstract() = ()
   def protectedAbstract() = ()
   def publicAbstract() = ()
diff --git a/test/files/pos/javaConversions-2.10-ambiguity.scala b/test/files/pos/javaConversions-2.10-ambiguity.scala
new file mode 100644
index 0000000..c4aad6c
--- /dev/null
+++ b/test/files/pos/javaConversions-2.10-ambiguity.scala
@@ -0,0 +1,10 @@
+import collection.{JavaConversions, mutable, concurrent}
+import JavaConversions._
+import java.util.concurrent.{ConcurrentHashMap => CHM}
+
+object Bar {
+  def assertType[T](t: T) = t
+  val a = new CHM[String, String]() += (("", ""))
+  assertType[concurrent.Map[String, String]](a)
+}
+// vim: set et:
diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala
index e1b8101..7c7ff03 100644
--- a/test/files/pos/javaConversions-2.10-regression.scala
+++ b/test/files/pos/javaConversions-2.10-regression.scala
@@ -3,10 +3,10 @@ import JavaConversions._
 import java.util.concurrent.{ConcurrentHashMap => CHM}
 
 object Foo {
-  def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
-    asScalaConcurrentMap(new CHM())
+  def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
+    mapAsScalaConcurrentMap(new CHM())
 
-  def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+  def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
     new CHM[K, V]()
 }
 
diff --git a/test/files/pos/javaReadsSigs/fromjava.java b/test/files/pos/javaReadsSigs/fromjava.java
index eca6396..92441b0 100644
--- a/test/files/pos/javaReadsSigs/fromjava.java
+++ b/test/files/pos/javaReadsSigs/fromjava.java
@@ -22,11 +22,11 @@ class B { };
 class Contra {
   // Not an Ordering<Character>.
   static Ordering<Object> charOrd = scala.math.Ordering.Char$.MODULE$;
-  
+
   public boolean useCharOrd() {
     return charOrd.compare(new Object(), new Object()) == 0;
   }
-  
+
   static Numeric<?> intNum = scala.math.Numeric.IntIsIntegral$.MODULE$;
 }
 
@@ -36,13 +36,13 @@ public class fromjava {
       return null;
     }
   };
-  
+
   public static Function1<Tuple2<? extends Object, B>, B> f2 = new scala.runtime.AbstractFunction1<Tuple2<? extends Object, B>, B>() {
     public B apply(Tuple2<? extends Object, B> tup) {
       return tup._2();
     }
   };
-  
+
   public static String vector(Vector<String> x) {
     Vector<String> y = x.take(2);
     return y.head();
diff --git a/test/files/pos/kinds.scala b/test/files/pos/kinds.scala
new file mode 100644
index 0000000..6d6da0c
--- /dev/null
+++ b/test/files/pos/kinds.scala
@@ -0,0 +1,13 @@
+trait IllKind1 {
+  def g(s: String): String = s
+  def f: String = ???
+  def f[C](c: C): String = g(f)
+}
+
+trait IllKind2 {
+  def b1: Char = ???
+  def b2: Byte = ???
+
+  def f1 = "abc" contains b1
+  def f2 = "abc" contains b2
+}
diff --git a/test/files/pos/lambdalift.scala b/test/files/pos/lambdalift.scala
index 10bbf3a..bc997d6 100644
--- a/test/files/pos/lambdalift.scala
+++ b/test/files/pos/lambdalift.scala
@@ -11,5 +11,5 @@ object test {
       def h() = x;
     }
     g() + new inner().g();
-  }		
+  }
 }
diff --git a/test/files/pos/liftcode_polymorphic.scala b/test/files/pos/liftcode_polymorphic.scala
index 8f537d2..249f5a0 100644
--- a/test/files/pos/liftcode_polymorphic.scala
+++ b/test/files/pos/liftcode_polymorphic.scala
@@ -1,6 +1,6 @@
 import scala.reflect.runtime.universe._
 
-object Append extends Application {
+object Append extends App {
 
   def append[A](l1: List[A], l2: List[A]):List[A] =
     l1 match {
diff --git a/test/files/pos/list-optim-check.flags b/test/files/pos/list-optim-check.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/pos/list-optim-check.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/pos/list-optim-check.scala b/test/files/pos/list-optim-check.scala
new file mode 100644
index 0000000..f6e6dde
--- /dev/null
+++ b/test/files/pos/list-optim-check.scala
@@ -0,0 +1,21 @@
+// Tests a map known to crash in optimizer with faster List map in SI-8240.
+// Equivalent tests for collect and flatmap do not crash, but are provided
+// anyway.
+// See ticket SI-8334 for optimizer bug.
+// TODO - Remove this test once SI-8334 is fixed and has its own test.
+class A {
+  def f: Boolean = {
+    val xs = Nil map (_ => return false)
+    true
+  }
+
+  def g: Boolean = {
+    val xs = Nil collect { case _ => return false }
+    true
+  }
+
+  def h: Boolean = {
+    val xs = Nil flatMap { _ => return false }
+    true
+  }
+}
diff --git a/test/files/pos/listpattern.scala b/test/files/pos/listpattern.scala
index b640684..47145bf 100644
--- a/test/files/pos/listpattern.scala
+++ b/test/files/pos/listpattern.scala
@@ -1,7 +1,7 @@
 trait Value {}
 case class FloatValue(x: Double) extends Value
 object Test {
-  def applyNumeric(op: (Double, Double) => Double): 
+  def applyNumeric(op: (Double, Double) => Double):
     PartialFunction[List[Value], Value] = {
     case List(FloatValue(x), FloatValue(y)) => FloatValue(op(x, y))
   }
diff --git a/test/files/pos/lookupswitch.scala b/test/files/pos/lookupswitch.scala
index 33594c0..5d48251 100644
--- a/test/files/pos/lookupswitch.scala
+++ b/test/files/pos/lookupswitch.scala
@@ -34,4 +34,3 @@ class A {
     case 20 => "20"
   }
 }
-  
\ No newline at end of file
diff --git a/test/files/pos/looping-jsig.scala b/test/files/pos/looping-jsig.scala
index e2d9e76..6e3313c 100644
--- a/test/files/pos/looping-jsig.scala
+++ b/test/files/pos/looping-jsig.scala
@@ -1,15 +1,15 @@
 import scala.collection.mutable._
 
 trait BugTrack {
-    trait B[+T] 
+    trait B[+T]
     val cache : HashMap[A[_], B[_]] = HashMap.empty
 
-    def A[T](f: Int => B[T]): A[T] 
+    def A[T](f: Int => B[T]): A[T]
         = new A[T]{def apply(in: Int) = f(in)}
-        
+
     abstract class A[+T] extends (Int => B[T]) {
       def giveMeSame = this
-    }  
+    }
 
     def amethod[T](p: =>A[T]): A[T] = A(in => cache.get(p) match {
            case Some(res) => res
diff --git a/test/files/pos/lub-dealias-widen.scala b/test/files/pos/lub-dealias-widen.scala
index 38854fb..8d26708 100644
--- a/test/files/pos/lub-dealias-widen.scala
+++ b/test/files/pos/lub-dealias-widen.scala
@@ -27,7 +27,7 @@ object Test {
      = p2 >> { (xs: List[String]) => 0 }
 
   // This works after https://github.com/scala/scala/commit/a06d31f6a
-  // Before: error: inferred type arguments [List[String] => String,List[String] => String] 
+  // Before: error: inferred type arguments [List[String] => String,List[String] => String]
   //         do not conform to method &'s type parameter bounds
   //         [G <: H,H >: Int => (Int => String)]
   val s = r & r2
diff --git a/test/disabled/presentation/ide-bug-1000450.check b/test/files/pos/macro-bundle-disambiguate-bundle.check
similarity index 100%
rename from test/disabled/presentation/ide-bug-1000450.check
rename to test/files/pos/macro-bundle-disambiguate-bundle.check
diff --git a/test/files/pos/macro-bundle-disambiguate-bundle.scala b/test/files/pos/macro-bundle-disambiguate-bundle.scala
new file mode 100644
index 0000000..0480931
--- /dev/null
+++ b/test/files/pos/macro-bundle-disambiguate-bundle.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  def impl = ???
+}
+
+object Macros {
+  def impl(c: Context)(x: c.Tree) = ???
+}
+
+object Test extends App {
+  def foo: Unit = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000545.check b/test/files/pos/macro-bundle-disambiguate-nonbundle.check
similarity index 100%
rename from test/disabled/presentation/ide-bug-1000545.check
rename to test/files/pos/macro-bundle-disambiguate-nonbundle.check
diff --git a/test/files/pos/macro-bundle-disambiguate-nonbundle.scala b/test/files/pos/macro-bundle-disambiguate-nonbundle.scala
new file mode 100644
index 0000000..cb66f28
--- /dev/null
+++ b/test/files/pos/macro-bundle-disambiguate-nonbundle.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Macros(val c: Context) {
+  def impl(x: c.Tree) = ???
+}
+
+object Macros {
+  def impl(c: Context) = ???
+}
+
+object Test extends App {
+  def foo: Unit = macro Macros.impl
+}
\ No newline at end of file
diff --git a/test/files/continuations-run/z1673.check b/test/files/pos/macro-implicit-invalidate-on-error.check
similarity index 100%
rename from test/files/continuations-run/z1673.check
rename to test/files/pos/macro-implicit-invalidate-on-error.check
diff --git a/test/files/pos/macro-implicit-invalidate-on-error.scala b/test/files/pos/macro-implicit-invalidate-on-error.scala
new file mode 100644
index 0000000..bb83e3c
--- /dev/null
+++ b/test/files/pos/macro-implicit-invalidate-on-error.scala
@@ -0,0 +1,25 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+trait LegacyLiftable[T] {
+  def apply(universe: scala.reflect.api.Universe, value: T): universe.Tree
+}
+
+object LegacyLiftable {
+  implicit def liftCaseClass[T <: Product]: LegacyLiftable[T] = macro liftCaseClassImpl[T]
+
+  def liftCaseClassImpl[T: c.WeakTypeTag](c: Context): c.Expr[LegacyLiftable[T]] = {
+    import c.universe._
+    val tpe = weakTypeOf[T]
+    if (!tpe.typeSymbol.asClass.isCaseClass) c.abort(c.enclosingPosition, "denied")
+    val p = List(q"Literal(Constant(1))")
+    c.Expr[LegacyLiftable[T]] { q"""
+      new LegacyLiftable[$tpe] {
+        def apply(universe: scala.reflect.api.Universe, value: $tpe): universe.Tree = {
+          import universe._
+          Apply(Select(Ident(TermName("C")), TermName("apply")), List(..$p))
+        }
+      }
+    """ }
+  }
+}
diff --git a/test/files/pos/matchStarlift.scala b/test/files/pos/matchStarlift.scala
deleted file mode 100644
index dab46ea..0000000
--- a/test/files/pos/matchStarlift.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Tet {
-  import scala.xml._;
-  def fooz(x: Node=>String) = {}
-    def foo( m:Node ):Unit = fooz {
-      case Elem(_,_,_,_,n,_*) if (n == m) => "gaga"
-    }
-}
diff --git a/test/files/pos/michel6.scala b/test/files/pos/michel6.scala
index f312bf1..b32e8be 100644
--- a/test/files/pos/michel6.scala
+++ b/test/files/pos/michel6.scala
@@ -1,6 +1,6 @@
 object M {
    def f(x: Int): Unit = {}
- 
+
    def g(): Int => Unit =
      if (0 == 0) f else g()
  }
diff --git a/test/files/pos/needstypeearly.scala b/test/files/pos/needstypeearly.scala
index bd93b5c..a90c257 100644
--- a/test/files/pos/needstypeearly.scala
+++ b/test/files/pos/needstypeearly.scala
@@ -1,4 +1,4 @@
-abstract class NeedsXEarly { 
-  val x: Int 
+abstract class NeedsXEarly {
+  val x: Int
 }
 class Foo extends { val x = 1 } with NeedsXEarly
diff --git a/test/files/pos/nothing_manifest_disambig-old.scala b/test/files/pos/nothing_manifest_disambig-old.scala
index 0767420..9a3db0c 100644
--- a/test/files/pos/nothing_manifest_disambig-old.scala
+++ b/test/files/pos/nothing_manifest_disambig-old.scala
@@ -1,10 +1,10 @@
 object Test {
   def mani[T: Manifest](xs: T) = xs
   mani(List())
- 
+
   def listElMani[T: Manifest](xs: List[T]) = xs
   listElMani(List())
- 
+
   def foo[A, C](m : C)(implicit ev: C <:< Traversable[A], mani: Manifest[A]): (C, A, Manifest[A]) = (m, m.head, mani)
-  foo(List(1,2,3)) 
+  foo(List(1,2,3))
 }
\ No newline at end of file
diff --git a/test/files/pos/nullary.scala b/test/files/pos/nullary.scala
index 8e5a834..614fcdf 100644
--- a/test/files/pos/nullary.scala
+++ b/test/files/pos/nullary.scala
@@ -2,7 +2,7 @@ abstract class NullaryTest[T, m[s]] {
   def nullary: String = "a"
   val x = nullary
 
-  def nullary2: T 
+  def nullary2: T
   val x2 = nullary2
 
   def nullary3: m[T]
@@ -16,5 +16,5 @@ class Concrete extends NullaryTest[Int, List] {
 
 object test {
         (new Concrete).nullary2
-        (new Concrete).nullary3        
+        (new Concrete).nullary3
 }
diff --git a/test/files/pos/nullary_poly.scala b/test/files/pos/nullary_poly.scala
index 4de7235..d2e1e12 100644
--- a/test/files/pos/nullary_poly.scala
+++ b/test/files/pos/nullary_poly.scala
@@ -2,9 +2,9 @@
 class A {
   // built-in
   synchronized {}
-  
+
   val x: String = "a".asInstanceOf[String]
-  
+
   // user-defined:
   def polyNullary[T]: List[T] = Nil
 }
diff --git a/test/files/pos/optmatch.scala b/test/files/pos/optmatch.scala
new file mode 100644
index 0000000..354be65
--- /dev/null
+++ b/test/files/pos/optmatch.scala
@@ -0,0 +1,33 @@
+// final case class NonZeroLong(value: Long) extends AnyVal {
+//   def get: Long = value
+//   def isEmpty: Boolean = get == 0l
+// }
+
+class NonZeroLong(val value: Long) extends AnyVal {
+  def get: Long = value
+  def isEmpty: Boolean = get == 0l
+}
+object NonZeroLong {
+  def unapply(value: Long): NonZeroLong = new NonZeroLong(value)
+}
+
+
+object Foo {
+  def unapply(x: Int): NonZeroLong = new NonZeroLong(1L << x)
+  // public long unapply(int);
+  //        0: lconst_1
+  //        1: iload_1
+  //        2: lshl
+  //        3: lreturn
+}
+
+object Test {
+  def f(x: Int): Int = x match {
+    case Foo(1024l) => 1
+    case _          => 2
+  }
+  def main(args: Array[String]): Unit = {
+    println(f(10))
+    println(f(11))
+  }
+}
diff --git a/test/files/pos/overloaded-unapply.scala b/test/files/pos/overloaded-unapply.scala
new file mode 100644
index 0000000..4105a25
--- /dev/null
+++ b/test/files/pos/overloaded-unapply.scala
@@ -0,0 +1,8 @@
+trait Baz {
+  type Type >: Null
+
+  case class HoleType(a: String, b: String, c: String)
+  object HoleType { def unapply(tpe: Type): Option[HoleType] = ??? }
+
+  (null: Type) match { case HoleType(holeTpe) => holeTpe }
+}
diff --git a/test/files/pos/override-object-yes.scala b/test/files/pos/override-object-yes.scala
index b0563df..858f9b2 100644
--- a/test/files/pos/override-object-yes.scala
+++ b/test/files/pos/override-object-yes.scala
@@ -14,8 +14,8 @@ package case1 {
       override def f = 3
     }
   }
-  
-  trait Foo3 {      
+
+  trait Foo3 {
     object Bar {
       def g: Traversable[Int] = Nil
     }
diff --git a/test/files/pos/overzealous-assert-genbcode.scala b/test/files/pos/overzealous-assert-genbcode.scala
new file mode 100644
index 0000000..ddd70b0
--- /dev/null
+++ b/test/files/pos/overzealous-assert-genbcode.scala
@@ -0,0 +1,10 @@
+object Test {
+
+  def main(args: Array[String]) {
+    args(0) match {
+      case a: String => while(a == null) {}
+    }
+  }
+
+}
+
diff --git a/test/files/neg/case-collision.flags b/test/files/pos/package-ob-case.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/pos/package-ob-case.flags
diff --git a/test/files/neg/package-ob-case.scala b/test/files/pos/package-ob-case/A_1.scala
similarity index 100%
copy from test/files/neg/package-ob-case.scala
copy to test/files/pos/package-ob-case/A_1.scala
diff --git a/test/files/neg/package-ob-case.scala b/test/files/pos/package-ob-case/B_2.scala
similarity index 100%
rename from test/files/neg/package-ob-case.scala
rename to test/files/pos/package-ob-case/B_2.scala
diff --git a/test/files/pos/partialfun.scala b/test/files/pos/partialfun.scala
index d8971e5..9f32a22 100644
--- a/test/files/pos/partialfun.scala
+++ b/test/files/pos/partialfun.scala
@@ -1,6 +1,6 @@
 object partialfun {
 
-  def applyPartial[b](f: PartialFunction[Option[String], b])(x: Option[String]) = 
+  def applyPartial[b](f: PartialFunction[Option[String], b])(x: Option[String]) =
     if (f.isDefinedAt(x)) f(x) else "<undefined>";
 
   applyPartial {
diff --git a/test/files/pos/pat_gilles.scala b/test/files/pos/pat_gilles.scala
index 567d700..704d5b9 100644
--- a/test/files/pos/pat_gilles.scala
+++ b/test/files/pos/pat_gilles.scala
@@ -1,7 +1,7 @@
 abstract class Table2 {
 
 
-  val x: Any => Unit = { zz:Any => 
+  val x: Any => Unit = { zz:Any =>
     zz match {
     case Table2.CellUpdated(row, column) =>
       val foo = Table2.CellUpdated(2,2)
diff --git a/test/files/pos/patmat-extract-tparam.scala b/test/files/pos/patmat-extract-tparam.scala
new file mode 100644
index 0000000..6417b49
--- /dev/null
+++ b/test/files/pos/patmat-extract-tparam.scala
@@ -0,0 +1,13 @@
+trait Bip[T] { def h: T }
+trait BoolBip extends Bip[Boolean]
+
+class A {
+  def g(x: Boolean): Unit = ()
+  def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
+
+class B {
+  def g(x: Boolean): Unit = ()
+  def g(x: Int): Unit = ()
+  def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
diff --git a/test/files/pos/patmat.scala b/test/files/pos/patmat.scala
index 4e652b1..51b879a 100644
--- a/test/files/pos/patmat.scala
+++ b/test/files/pos/patmat.scala
@@ -3,8 +3,8 @@
 
 object ZipFun {
   //just compilation
-  def zipFun[a, b](xs: List[a], ys: List[b]): List[Pair[a, b]] = (Pair(xs, ys): @unchecked) match {
-    // !!! case Pair(List(), _), Pair(_, List()) => List()
+  def zipFun[a, b](xs: List[a], ys: List[b]): List[Tuple2[a, b]] = ((xs, ys): @unchecked) match {
+    // !!! case (List(), _), (_, List()) => List()
     case (x :: xs1, y :: ys1) => (x, y) :: zipFun(xs1, ys1)
   }
 }
diff --git a/test/files/pos/private-types-after-typer.scala b/test/files/pos/private-types-after-typer.scala
new file mode 100644
index 0000000..79ef934
--- /dev/null
+++ b/test/files/pos/private-types-after-typer.scala
@@ -0,0 +1,9 @@
+// Testing that the type of the outer accessor in O2
+// doesn't crash the compiler over private type escaping scope.
+trait T {
+  class C {
+     private object O1 {
+        object O2
+     }
+  }
+}
\ No newline at end of file
diff --git a/test/files/pos/propagate.scala b/test/files/pos/propagate.scala
index a7f9d6c..5881920 100644
--- a/test/files/pos/propagate.scala
+++ b/test/files/pos/propagate.scala
@@ -14,4 +14,3 @@ class C {
 
 
 
-  
diff --git a/test/files/jvm/t1116.check b/test/files/pos/reflection-compat-api-universe.check
similarity index 100%
rename from test/files/jvm/t1116.check
rename to test/files/pos/reflection-compat-api-universe.check
diff --git a/test/files/pos/reflection-compat-api-universe.scala b/test/files/pos/reflection-compat-api-universe.scala
new file mode 100644
index 0000000..0aee8bc
--- /dev/null
+++ b/test/files/pos/reflection-compat-api-universe.scala
@@ -0,0 +1,136 @@
+object Test extends App {
+  val u: scala.reflect.api.Universe = ???
+  import u._
+  import scala.reflect.ClassTag
+  import compat._
+
+  val tree: Tree = ???
+  val ttree: TypeTree = ???
+  val stree: SymTree = ???
+  val trees: List[Tree] = ???
+  val mods: Modifiers = ???
+  val impl: Template = ???
+  val vparamss: List[List[ValDef]] = ???
+  val rhs: Tree = ???
+  val sym: Symbol = ???
+  val tsym: TypeSymbol = ???
+  val syms: List[Symbol] = ???
+  val params: List[Symbol] = ???
+  val tparams: List[Symbol] = ???
+  val tpe: Type = ???
+  val tpes: List[Type] = ???
+  val manifest: Manifest[Int] = ???
+  val tag: TypeTag[Int] = ???
+  val mirror: Mirror = ???
+  val decls: Scope = ???
+  val pos: Position = ???
+  val ann: Annotation = ???
+  val anns: List[Annotation] = ???
+  val const: Constant = ???
+  val name: Name = ???
+  val tyname: TypeName = ???
+  val tename: TermName = ???
+  val flags: FlagSet = ???
+  val str: String = ???
+  val i: Int = ???
+  val b: Boolean = ???
+
+  // abstract class BuildApi
+  // abstract class ReferenceToBoxedExtractor
+  // abstract trait AttachableApi
+  // abstract trait FreeTermSymbolApi
+  // abstract trait FreeTypeSymbolApi
+  // abstract trait IdentContextApi
+  // abstract trait ReferenceToBoxedApi
+  // abstract trait SymTreeContextApi
+  // abstract trait SymbolContextApi
+  // abstract trait TreeContextApi
+  // abstract trait TypeTreeContextApi
+  locally(ClassDef(sym, impl): ClassDef)
+  locally(DefDef(sym, mods, vparamss, rhs): DefDef)
+  locally(DefDef(sym, vparamss, rhs): DefDef)
+  locally(DefDef(sym, mods, rhs): DefDef)
+  locally(DefDef(sym, rhs): DefDef)
+  locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef)
+  locally(LabelDef(sym, params, rhs): LabelDef)
+  locally(ModuleDef(sym, impl): ModuleDef)
+  locally(TypeDef(sym, rhs): TypeDef)
+  locally(TypeDef(sym): TypeDef)
+  locally(ValDef(sym, rhs): ValDef)
+  locally(ValDef(sym): ValDef)
+  locally(AnnotatedType(anns, tpe): AnnotatedType)
+  locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType)
+  locally(TypeBounds(tpe, tpe): TypeBounds)
+  locally(MethodType(params, tpe): MethodType)
+  locally(RefinedType(tpes, decls): RefinedType)
+  locally(RefinedType(tpes, decls, sym): RefinedType)
+  locally(ClassInfoType(tpes, decls, sym): ClassInfoType)
+  locally(SingleType(tpe, sym): Type)
+  locally(TypeRef(tpe, sym, tpes): Type)
+  locally(ExistentialType(syms, tpe): ExistentialType)
+  locally(NullaryMethodType(tpe): NullaryMethodType)
+  locally(ThisType(sym): Type)
+  locally(SuperType(tpe, tpe): Type)
+  locally(PolyType(syms, tpe): PolyType)
+  locally(ConstantType(const): ConstantType)
+  locally(sym.asFreeTerm: FreeTermSymbol)
+  locally(sym.asFreeType: FreeTypeSymbol)
+  locally(existentialAbstraction(tparams, tpe): Type)
+  locally(tree.freeTerms: List[FreeTermSymbol])
+  locally(tree.freeTypes: List[FreeTypeSymbol])
+  locally(intersectionType(tpes): Type)
+  locally(intersectionType(tpes, sym): Type)
+  locally(sym.isErroneous: Boolean)
+  locally(sym.isFreeTerm: Boolean)
+  locally(sym.isFreeType: Boolean)
+  locally(sym.isLocal: Boolean)
+  locally(sym.isOverride: Boolean)
+  locally(tsym.isSkolem: Boolean)
+  locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int])
+  locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type})
+  locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol)
+  locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol)
+  locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol))
+  locally(newScopeWith(sym, sym, sym): Scope)
+  locally(sym.newTermSymbol(tename, pos, flags): TermSymbol)
+  locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol)
+  locally(polyType(tparams, tpe): Type)
+  locally(sym.pos: Position)
+  locally(refinedType(tpes, sym): Type)
+  locally(refinedType(tpes, sym, decls, pos): Type)
+  locally(singleType(tpe, sym): Type)
+  locally(tree.substituteSymbols(syms, syms): Tree)
+  locally(tree.substituteThis(sym, tree): Tree)
+  locally(tree.substituteTypes(syms, tpes): Tree)
+  locally(typeRef(tpe, sym, tpes): Type)
+  locally(typeTagToManifest(mirror, tag): Manifest[Int])
+  locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol])
+  locally((??? : FreeTermSymbol).origin)
+  locally((??? : FreeTermSymbol).value)
+  locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol])
+  locally((??? : FreeTypeSymbol).origin)
+  locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed])
+  locally(build: BuildApi)
+  locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed)
+  locally((??? : ReferenceToBoxed).ident: Tree)
+  locally(ReferenceToBoxed.unapply(???): Option[Ident])
+  locally(build.selectType(sym, str): TypeSymbol)
+  locally(build.selectTerm(sym, str): TermSymbol)
+  locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol)
+  locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol)
+  locally(build.newFreeTerm(str, i): FreeTermSymbol)
+  locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol)
+  locally(build.newFreeType(str): FreeTypeSymbol)
+  locally(build.newFreeType(str, flags, str): FreeTypeSymbol)
+  locally(build.setTypeSignature(sym, tpe): Symbol)
+  locally(build.setAnnotations(sym, anns): Symbol)
+  locally(build.flagsFromBits(??? : Long): FlagSet)
+  locally(build.emptyValDef: ValDef)
+  locally(build.This(sym): Tree)
+  locally(build.Select(tree, sym): Select)
+  locally(build.Ident(sym): Ident)
+  locally(build.TypeTree(tpe): TypeTree)
+  locally(build.thisPrefix(sym): Type)
+  locally(build.setType(tree, tpe): Tree)
+  locally(build.setSymbol(tree, sym): Tree)
+}
\ No newline at end of file
diff --git a/test/files/jvm/t1143.check b/test/files/pos/reflection-compat-c.check
similarity index 100%
rename from test/files/jvm/t1143.check
rename to test/files/pos/reflection-compat-c.check
diff --git a/test/files/pos/reflection-compat-c.scala b/test/files/pos/reflection-compat-c.scala
new file mode 100644
index 0000000..73158de
--- /dev/null
+++ b/test/files/pos/reflection-compat-c.scala
@@ -0,0 +1,139 @@
+import scala.reflect.macros.Context
+
+object Test extends App {
+  def impl(c: Context) = {
+    import c.universe._
+    import scala.reflect.ClassTag
+    import compat._
+
+    val tree: Tree = ???
+    val ttree: TypeTree = ???
+    val stree: SymTree = ???
+    val trees: List[Tree] = ???
+    val mods: Modifiers = ???
+    val impl: Template = ???
+    val vparamss: List[List[ValDef]] = ???
+    val rhs: Tree = ???
+    val sym: Symbol = ???
+    val tsym: TypeSymbol = ???
+    val syms: List[Symbol] = ???
+    val params: List[Symbol] = ???
+    val tparams: List[Symbol] = ???
+    val tpe: Type = ???
+    val tpes: List[Type] = ???
+    val manifest: Manifest[Int] = ???
+    val tag: TypeTag[Int] = ???
+    val mirror: Mirror = ???
+    val decls: Scope = ???
+    val pos: Position = ???
+    val ann: Annotation = ???
+    val anns: List[Annotation] = ???
+    val const: Constant = ???
+    val name: Name = ???
+    val tyname: TypeName = ???
+    val tename: TermName = ???
+    val flags: FlagSet = ???
+    val str: String = ???
+    val i: Int = ???
+    val b: Boolean = ???
+
+    // abstract class BuildApi
+    // abstract class ReferenceToBoxedExtractor
+    // abstract trait AttachableApi
+    // abstract trait FreeTermSymbolApi
+    // abstract trait FreeTypeSymbolApi
+    // abstract trait IdentContextApi
+    // abstract trait ReferenceToBoxedApi
+    // abstract trait SymTreeContextApi
+    // abstract trait SymbolContextApi
+    // abstract trait TreeContextApi
+    // abstract trait TypeTreeContextApi
+    locally(ClassDef(sym, impl): ClassDef)
+    locally(DefDef(sym, mods, vparamss, rhs): DefDef)
+    locally(DefDef(sym, vparamss, rhs): DefDef)
+    locally(DefDef(sym, mods, rhs): DefDef)
+    locally(DefDef(sym, rhs): DefDef)
+    locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef)
+    locally(LabelDef(sym, params, rhs): LabelDef)
+    locally(ModuleDef(sym, impl): ModuleDef)
+    locally(TypeDef(sym, rhs): TypeDef)
+    locally(TypeDef(sym): TypeDef)
+    locally(ValDef(sym, rhs): ValDef)
+    locally(ValDef(sym): ValDef)
+    locally(AnnotatedType(anns, tpe): AnnotatedType)
+    locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType)
+    locally(TypeBounds(tpe, tpe): TypeBounds)
+    locally(MethodType(params, tpe): MethodType)
+    locally(RefinedType(tpes, decls): RefinedType)
+    locally(RefinedType(tpes, decls, sym): RefinedType)
+    locally(ClassInfoType(tpes, decls, sym): ClassInfoType)
+    locally(SingleType(tpe, sym): Type)
+    locally(TypeRef(tpe, sym, tpes): Type)
+    locally(ExistentialType(syms, tpe): ExistentialType)
+    locally(NullaryMethodType(tpe): NullaryMethodType)
+    locally(ThisType(sym): Type)
+    locally(SuperType(tpe, tpe): Type)
+    locally(PolyType(syms, tpe): PolyType)
+    locally(ConstantType(const): ConstantType)
+    locally(sym.asFreeTerm: FreeTermSymbol)
+    locally(sym.asFreeType: FreeTypeSymbol)
+    locally(existentialAbstraction(tparams, tpe): Type)
+    locally(tree.freeTerms: List[FreeTermSymbol])
+    locally(tree.freeTypes: List[FreeTypeSymbol])
+    locally(intersectionType(tpes): Type)
+    locally(intersectionType(tpes, sym): Type)
+    locally(sym.isErroneous: Boolean)
+    locally(sym.isFreeTerm: Boolean)
+    locally(sym.isFreeType: Boolean)
+    locally(sym.isLocal: Boolean)
+    locally(sym.isOverride: Boolean)
+    locally(tsym.isSkolem: Boolean)
+    locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int])
+    locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type})
+    locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol)
+    locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol)
+    locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol))
+    locally(newScopeWith(sym, sym, sym): Scope)
+    locally(sym.newTermSymbol(tename, pos, flags): TermSymbol)
+    locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol)
+    locally(polyType(tparams, tpe): Type)
+    locally(sym.pos: Position)
+    locally(refinedType(tpes, sym): Type)
+    locally(refinedType(tpes, sym, decls, pos): Type)
+    locally(singleType(tpe, sym): Type)
+    locally(tree.substituteSymbols(syms, syms): Tree)
+    locally(tree.substituteThis(sym, tree): Tree)
+    locally(tree.substituteTypes(syms, tpes): Tree)
+    locally(typeRef(tpe, sym, tpes): Type)
+    locally(typeTagToManifest(mirror, tag): Manifest[Int])
+    locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol])
+    locally((??? : FreeTermSymbol).origin)
+    locally((??? : FreeTermSymbol).value)
+    locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol])
+    locally((??? : FreeTypeSymbol).origin)
+    locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed])
+    locally(build: BuildApi)
+    locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed)
+    locally((??? : ReferenceToBoxed).ident: Tree)
+    locally(ReferenceToBoxed.unapply(???): Option[Ident])
+    locally(build.selectType(sym, str): TypeSymbol)
+    locally(build.selectTerm(sym, str): TermSymbol)
+    locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol)
+    locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol)
+    locally(build.newFreeTerm(str, i): FreeTermSymbol)
+    locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol)
+    locally(build.newFreeType(str): FreeTypeSymbol)
+    locally(build.newFreeType(str, flags, str): FreeTypeSymbol)
+    locally(build.setTypeSignature(sym, tpe): Symbol)
+    locally(build.setAnnotations(sym, anns): Symbol)
+    locally(build.flagsFromBits(??? : Long): FlagSet)
+    locally(build.emptyValDef: ValDef)
+    locally(build.This(sym): Tree)
+    locally(build.Select(tree, sym): Select)
+    locally(build.Ident(sym): Ident)
+    locally(build.TypeTree(tpe): TypeTree)
+    locally(build.thisPrefix(sym): Type)
+    locally(build.setType(tree, tpe): Tree)
+    locally(build.setSymbol(tree, sym): Tree)
+  }
+}
\ No newline at end of file
diff --git a/test/files/jvm/t1948.check b/test/files/pos/reflection-compat-macro-universe.check
similarity index 100%
rename from test/files/jvm/t1948.check
rename to test/files/pos/reflection-compat-macro-universe.check
diff --git a/test/files/pos/reflection-compat-macro-universe.scala b/test/files/pos/reflection-compat-macro-universe.scala
new file mode 100644
index 0000000..89ca36d
--- /dev/null
+++ b/test/files/pos/reflection-compat-macro-universe.scala
@@ -0,0 +1,177 @@
+object Test extends App {
+  val u: scala.reflect.macros.Universe = ???
+  import u._
+  import scala.reflect.macros.Attachments
+  import scala.reflect.ClassTag
+  import compat._
+
+  val tree: Tree = ???
+  val ttree: TypeTree = ???
+  val stree: SymTree = ???
+  val trees: List[Tree] = ???
+  val mods: Modifiers = ???
+  val impl: Template = ???
+  val vparamss: List[List[ValDef]] = ???
+  val rhs: Tree = ???
+  val sym: Symbol = ???
+  val tsym: TypeSymbol = ???
+  val syms: List[Symbol] = ???
+  val params: List[Symbol] = ???
+  val tparams: List[Symbol] = ???
+  val tpe: Type = ???
+  val tpes: List[Type] = ???
+  val manifest: Manifest[Int] = ???
+  val tag: TypeTag[Int] = ???
+  val mirror: Mirror = ???
+  val decls: Scope = ???
+  val pos: Position = ???
+  val ann: Annotation = ???
+  val anns: List[Annotation] = ???
+  val const: Constant = ???
+  val name: Name = ???
+  val tyname: TypeName = ???
+  val tename: TermName = ???
+  val flags: FlagSet = ???
+  val str: String = ???
+  val i: Int = ???
+  val b: Boolean = ???
+
+  // abstract class BuildApi
+  // abstract class ReferenceToBoxedExtractor
+  // abstract trait AttachableApi
+  // abstract trait FreeTermSymbolApi
+  // abstract trait FreeTypeSymbolApi
+  // abstract trait IdentContextApi
+  // abstract trait ReferenceToBoxedApi
+  // abstract trait SymTreeContextApi
+  // abstract trait SymbolContextApi
+  // abstract trait TreeContextApi
+  // abstract trait TypeTreeContextApi
+  locally(ClassDef(sym, impl): ClassDef)
+  locally(DefDef(sym, mods, vparamss, rhs): DefDef)
+  locally(DefDef(sym, vparamss, rhs): DefDef)
+  locally(DefDef(sym, mods, rhs): DefDef)
+  locally(DefDef(sym, rhs): DefDef)
+  locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef)
+  locally(LabelDef(sym, params, rhs): LabelDef)
+  locally(ModuleDef(sym, impl): ModuleDef)
+  locally(TypeDef(sym, rhs): TypeDef)
+  locally(TypeDef(sym): TypeDef)
+  locally(ValDef(sym, rhs): ValDef)
+  locally(ValDef(sym): ValDef)
+  locally(AnnotatedType(anns, tpe): AnnotatedType)
+  locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType)
+  locally(TypeBounds(tpe, tpe): TypeBounds)
+  locally(MethodType(params, tpe): MethodType)
+  locally(RefinedType(tpes, decls): RefinedType)
+  locally(RefinedType(tpes, decls, sym): RefinedType)
+  locally(ClassInfoType(tpes, decls, sym): ClassInfoType)
+  locally(SingleType(tpe, sym): Type)
+  locally(TypeRef(tpe, sym, tpes): Type)
+  locally(ExistentialType(syms, tpe): ExistentialType)
+  locally(NullaryMethodType(tpe): NullaryMethodType)
+  locally(ThisType(sym): Type)
+  locally(SuperType(tpe, tpe): Type)
+  locally(PolyType(syms, tpe): PolyType)
+  locally(ConstantType(const): ConstantType)
+  locally(sym.asFreeTerm: FreeTermSymbol)
+  locally(sym.asFreeType: FreeTypeSymbol)
+  locally(sym.attachments: Attachments { type Pos = Position })
+  locally(tree.attachments: Attachments { type Pos = Position })
+  locally(captureVariable(sym): Unit)
+  locally(capturedVariableType(sym): Type)
+  locally(sym.deSkolemize: Symbol)
+  locally(tree.defineType(tpe): Tree)
+  locally(existentialAbstraction(tparams, tpe): Type)
+  locally(tree.freeTerms: List[FreeTermSymbol])
+  locally(tree.freeTypes: List[FreeTypeSymbol])
+  locally(intersectionType(tpes): Type)
+  locally(intersectionType(tpes, sym): Type)
+  locally(sym.isErroneous: Boolean)
+  locally(sym.isFreeTerm: Boolean)
+  locally(sym.isFreeType: Boolean)
+  locally(sym.isLocal: Boolean)
+  locally(sym.isOverride: Boolean)
+  locally(tsym.isSkolem: Boolean)
+  locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int])
+  locally(treeBuild.mkAttributedIdent(sym): RefTree)
+  locally(treeBuild.mkAttributedQualifier(tpe): Tree)
+  locally(treeBuild.mkAttributedQualifier(tpe, sym): Tree)
+  locally(treeBuild.mkAttributedRef(tpe, sym): RefTree)
+  locally(treeBuild.mkAttributedRef(sym): RefTree)
+  locally(treeBuild.mkAttributedSelect(tree, sym): RefTree)
+  locally(treeBuild.mkAttributedThis(sym): This)
+  locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type})
+  locally(treeBuild.mkMethodCall(sym, trees): Tree)
+  locally(treeBuild.mkMethodCall(sym, tpes, trees): Tree)
+  locally(treeBuild.mkMethodCall(sym, name, trees): Tree)
+  locally(treeBuild.mkMethodCall(sym, name, tpes, trees): Tree)
+  locally(treeBuild.mkMethodCall(tree, sym, tpes, trees): Tree)
+  locally(treeBuild.mkMethodCall(tree, trees): Tree)
+  locally(treeBuild.mkMethodCall(tree, tpes, trees): Tree)
+  locally(treeBuild.mkNullaryCall(sym, tpes): Tree)
+  locally(treeBuild.mkRuntimeUniverseRef: Tree)
+  locally(treeBuild.mkUnattributedRef(name): RefTree)
+  locally(treeBuild.mkUnattributedRef(sym): RefTree)
+  locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol)
+  locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol)
+  locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol))
+  locally(newScopeWith(sym, sym, sym): Scope)
+  locally(sym.newTermSymbol(tename, pos, flags): TermSymbol)
+  locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol)
+  locally(polyType(tparams, tpe): Type)
+  locally(sym.pos: Position)
+  locally((tree.pos = pos): Unit)
+  locally(referenceCapturedVariable(sym): Tree)
+  locally(refinedType(tpes, sym): Type)
+  locally(refinedType(tpes, sym, decls, pos): Type)
+  locally(sym.removeAttachment[Int]: Symbol)
+  locally(tree.removeAttachment[Int]: Tree)
+  locally(sym.setAnnotations(ann, ann, ann): Symbol)
+  locally(sym.setName(name): Symbol)
+  locally(ttree.setOriginal(tree): TypeTree)
+  locally(tree.setPos(pos): Tree)
+  locally(sym.setPrivateWithin(sym): Symbol)
+  locally(tree.setSymbol(sym): Tree)
+  locally(tree.setType(tpe): Tree)
+  locally(sym.setTypeSignature(tpe): Symbol)
+  locally(singleType(tpe, sym): Type)
+  locally(tree.substituteSymbols(syms, syms): Tree)
+  locally(tree.substituteThis(sym, tree): Tree)
+  locally(tree.substituteTypes(syms, tpes): Tree)
+  locally((tree.symbol = sym): Unit)
+  locally((tree.tpe = tpe): Unit)
+  locally(typeRef(tpe, sym, tpes): Type)
+  locally(typeTagToManifest(mirror, tag): Manifest[Int])
+  locally(sym.updateAttachment(42): Symbol)
+  locally(tree.updateAttachment(42): Tree)
+  locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol])
+  locally((??? : FreeTermSymbol).origin)
+  locally((??? : FreeTermSymbol).value)
+  locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol])
+  locally((??? : FreeTypeSymbol).origin)
+  locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed])
+  locally(build: BuildApi)
+  locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed)
+  locally((??? : ReferenceToBoxed).ident: Tree)
+  locally(ReferenceToBoxed.unapply(???): Option[Ident])
+  locally(build.selectType(sym, str): TypeSymbol)
+  locally(build.selectTerm(sym, str): TermSymbol)
+  locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol)
+  locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol)
+  locally(build.newFreeTerm(str, i): FreeTermSymbol)
+  locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol)
+  locally(build.newFreeType(str): FreeTypeSymbol)
+  locally(build.newFreeType(str, flags, str): FreeTypeSymbol)
+  locally(build.setTypeSignature(sym, tpe): Symbol)
+  locally(build.setAnnotations(sym, anns): Symbol)
+  locally(build.flagsFromBits(??? : Long): FlagSet)
+  locally(build.emptyValDef: ValDef)
+  locally(build.This(sym): Tree)
+  locally(build.Select(tree, sym): Select)
+  locally(build.Ident(sym): Ident)
+  locally(build.TypeTree(tpe): TypeTree)
+  locally(build.thisPrefix(sym): Type)
+  locally(build.setType(tree, tpe): Tree)
+  locally(build.setSymbol(tree, sym): Tree)
+}
\ No newline at end of file
diff --git a/test/files/jvm/t2104.check b/test/files/pos/reflection-compat-ru.check
similarity index 100%
rename from test/files/jvm/t2104.check
rename to test/files/pos/reflection-compat-ru.check
diff --git a/test/files/pos/reflection-compat-ru.scala b/test/files/pos/reflection-compat-ru.scala
new file mode 100644
index 0000000..9ff72d1
--- /dev/null
+++ b/test/files/pos/reflection-compat-ru.scala
@@ -0,0 +1,135 @@
+object Test extends App {
+  import scala.reflect.runtime.universe._
+  import scala.reflect.ClassTag
+  import compat._
+
+  val tree: Tree = ???
+  val ttree: TypeTree = ???
+  val stree: SymTree = ???
+  val trees: List[Tree] = ???
+  val mods: Modifiers = ???
+  val impl: Template = ???
+  val vparamss: List[List[ValDef]] = ???
+  val rhs: Tree = ???
+  val sym: Symbol = ???
+  val tsym: TypeSymbol = ???
+  val syms: List[Symbol] = ???
+  val params: List[Symbol] = ???
+  val tparams: List[Symbol] = ???
+  val tpe: Type = ???
+  val tpes: List[Type] = ???
+  val manifest: Manifest[Int] = ???
+  val tag: TypeTag[Int] = ???
+  val mirror: Mirror = ???
+  val decls: Scope = ???
+  val pos: Position = ???
+  val ann: Annotation = ???
+  val anns: List[Annotation] = ???
+  val const: Constant = ???
+  val name: Name = ???
+  val tyname: TypeName = ???
+  val tename: TermName = ???
+  val flags: FlagSet = ???
+  val str: String = ???
+  val i: Int = ???
+  val b: Boolean = ???
+
+  // abstract class BuildApi
+  // abstract class ReferenceToBoxedExtractor
+  // abstract trait AttachableApi
+  // abstract trait FreeTermSymbolApi
+  // abstract trait FreeTypeSymbolApi
+  // abstract trait IdentContextApi
+  // abstract trait ReferenceToBoxedApi
+  // abstract trait SymTreeContextApi
+  // abstract trait SymbolContextApi
+  // abstract trait TreeContextApi
+  // abstract trait TypeTreeContextApi
+  locally(ClassDef(sym, impl): ClassDef)
+  locally(DefDef(sym, mods, vparamss, rhs): DefDef)
+  locally(DefDef(sym, vparamss, rhs): DefDef)
+  locally(DefDef(sym, mods, rhs): DefDef)
+  locally(DefDef(sym, rhs): DefDef)
+  locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef)
+  locally(LabelDef(sym, params, rhs): LabelDef)
+  locally(ModuleDef(sym, impl): ModuleDef)
+  locally(TypeDef(sym, rhs): TypeDef)
+  locally(TypeDef(sym): TypeDef)
+  locally(ValDef(sym, rhs): ValDef)
+  locally(ValDef(sym): ValDef)
+  locally(AnnotatedType(anns, tpe): AnnotatedType)
+  locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType)
+  locally(TypeBounds(tpe, tpe): TypeBounds)
+  locally(MethodType(params, tpe): MethodType)
+  locally(RefinedType(tpes, decls): RefinedType)
+  locally(RefinedType(tpes, decls, sym): RefinedType)
+  locally(ClassInfoType(tpes, decls, sym): ClassInfoType)
+  locally(SingleType(tpe, sym): Type)
+  locally(TypeRef(tpe, sym, tpes): Type)
+  locally(ExistentialType(syms, tpe): ExistentialType)
+  locally(NullaryMethodType(tpe): NullaryMethodType)
+  locally(ThisType(sym): Type)
+  locally(SuperType(tpe, tpe): Type)
+  locally(PolyType(syms, tpe): PolyType)
+  locally(ConstantType(const): ConstantType)
+  locally(sym.asFreeTerm: FreeTermSymbol)
+  locally(sym.asFreeType: FreeTypeSymbol)
+  locally(existentialAbstraction(tparams, tpe): Type)
+  locally(tree.freeTerms: List[FreeTermSymbol])
+  locally(tree.freeTypes: List[FreeTypeSymbol])
+  locally(intersectionType(tpes): Type)
+  locally(intersectionType(tpes, sym): Type)
+  locally(sym.isErroneous: Boolean)
+  locally(sym.isFreeTerm: Boolean)
+  locally(sym.isFreeType: Boolean)
+  locally(sym.isLocal: Boolean)
+  locally(sym.isOverride: Boolean)
+  locally(tsym.isSkolem: Boolean)
+  locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int])
+  locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type})
+  locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol)
+  locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol)
+  locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol))
+  locally(newScopeWith(sym, sym, sym): Scope)
+  locally(sym.newTermSymbol(tename, pos, flags): TermSymbol)
+  locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol)
+  locally(polyType(tparams, tpe): Type)
+  locally(sym.pos: Position)
+  locally(refinedType(tpes, sym): Type)
+  locally(refinedType(tpes, sym, decls, pos): Type)
+  locally(singleType(tpe, sym): Type)
+  locally(tree.substituteSymbols(syms, syms): Tree)
+  locally(tree.substituteThis(sym, tree): Tree)
+  locally(tree.substituteTypes(syms, tpes): Tree)
+  locally(typeRef(tpe, sym, tpes): Type)
+  locally(typeTagToManifest(mirror, tag): Manifest[Int])
+  locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol])
+  locally((??? : FreeTermSymbol).origin)
+  locally((??? : FreeTermSymbol).value)
+  locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol])
+  locally((??? : FreeTypeSymbol).origin)
+  locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed])
+  locally(build: BuildApi)
+  locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed)
+  locally((??? : ReferenceToBoxed).ident: Tree)
+  locally(ReferenceToBoxed.unapply(???): Option[Ident])
+  locally(build.selectType(sym, str): TypeSymbol)
+  locally(build.selectTerm(sym, str): TermSymbol)
+  locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol)
+  locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol)
+  locally(build.newFreeTerm(str, i): FreeTermSymbol)
+  locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol)
+  locally(build.newFreeType(str): FreeTypeSymbol)
+  locally(build.newFreeType(str, flags, str): FreeTypeSymbol)
+  locally(build.setTypeSignature(sym, tpe): Symbol)
+  locally(build.setAnnotations(sym, anns): Symbol)
+  locally(build.flagsFromBits(??? : Long): FlagSet)
+  locally(build.emptyValDef: ValDef)
+  locally(build.This(sym): Tree)
+  locally(build.Select(tree, sym): Select)
+  locally(build.Ident(sym): Ident)
+  locally(build.TypeTree(tpe): TypeTree)
+  locally(build.thisPrefix(sym): Type)
+  locally(build.setType(tree, tpe): Tree)
+  locally(build.setSymbol(tree, sym): Tree)
+}
\ No newline at end of file
diff --git a/test/files/pos/relax_implicit_divergence.scala b/test/files/pos/relax_implicit_divergence.scala
index 8525c84..f17d023 100644
--- a/test/files/pos/relax_implicit_divergence.scala
+++ b/test/files/pos/relax_implicit_divergence.scala
@@ -1,7 +1,7 @@
 class A(val options: Seq[String])
 
 object Test {
-  implicit def ss: Equiv[Seq[String]] = error("dummy")
-  implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = error("dummy")
+  implicit def ss: Equiv[Seq[String]] = sys.error("dummy")
+  implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = sys.error("dummy")
   implicitly[Equiv[A]]
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/return_thistype.scala b/test/files/pos/return_thistype.scala
index f164e06..c0736c0 100644
--- a/test/files/pos/return_thistype.scala
+++ b/test/files/pos/return_thistype.scala
@@ -1,8 +1,8 @@
 // tests transformation of return type in typedTypeApply (see also tcpoly_gm.scala)
-class As { 
-  class A { 
+class As {
+  class A {
     def foo: A.this.type = bar.asInstanceOf[A.this.type]
     def foo2: this.type = bar.asInstanceOf[this.type]
-    def bar: A = null 
+    def bar: A = null
   }
 }
diff --git a/test/files/run/interpolation.flags b/test/files/pos/sammy_poly.flags
similarity index 100%
copy from test/files/run/interpolation.flags
copy to test/files/pos/sammy_poly.flags
diff --git a/test/files/pos/sammy_poly.scala b/test/files/pos/sammy_poly.scala
new file mode 100644
index 0000000..c629be7
--- /dev/null
+++ b/test/files/pos/sammy_poly.scala
@@ -0,0 +1,7 @@
+// test synthesizeSAMFunction where the sam type is not fully defined
+class T {
+  trait F[T, U] { def apply(x: T): U }
+  // NOTE: the f(x) desugaring for now assumes the single abstract method is called 'apply'
+  def app[T, U](x: T)(f: F[T, U]): U = f(x)
+  app(1)(x => List(x))
+}
\ No newline at end of file
diff --git a/test/files/run/interpolation.flags b/test/files/pos/sammy_scope.flags
similarity index 100%
copy from test/files/run/interpolation.flags
copy to test/files/pos/sammy_scope.flags
diff --git a/test/files/pos/sammy_scope.scala b/test/files/pos/sammy_scope.scala
new file mode 100644
index 0000000..8f1fe70
--- /dev/null
+++ b/test/files/pos/sammy_scope.scala
@@ -0,0 +1,8 @@
+// test synthesizeSAMFunction: scope hygiene
+abstract class SamFun[T1, R] { self =>
+  def apply(v1: T1): R
+
+  // this should type check, as the apply ref is equivalent to self.apply
+  // it shouldn't resolve to the sam's apply that's synthesized (that wouldn't type check, hence the pos test)
+  def compose[A](g: SamFun[A, T1]): SamFun[A, R] = { x => apply(g(x)) }
+}
\ No newline at end of file
diff --git a/test/files/run/interpolation.flags b/test/files/pos/sammy_single.flags
similarity index 100%
copy from test/files/run/interpolation.flags
copy to test/files/pos/sammy_single.flags
diff --git a/test/files/pos/sammy_single.scala b/test/files/pos/sammy_single.scala
new file mode 100644
index 0000000..7a3d272
--- /dev/null
+++ b/test/files/pos/sammy_single.scala
@@ -0,0 +1,9 @@
+// test that dependent types work
+// TODO: def apply(x: String): x.type does NOT work yet
+object Test {
+  val s: String = ""
+
+  trait T { def apply(x: s.type): s.type }
+
+  val preservedResult: s.type = ((x => x): T)(s)
+}
\ No newline at end of file
diff --git a/test/files/run/interpolation.flags b/test/files/pos/sammy_twice.flags
similarity index 100%
copy from test/files/run/interpolation.flags
copy to test/files/pos/sammy_twice.flags
diff --git a/test/files/pos/sammy_twice.scala b/test/files/pos/sammy_twice.scala
new file mode 100644
index 0000000..c91f5b9
--- /dev/null
+++ b/test/files/pos/sammy_twice.scala
@@ -0,0 +1,9 @@
+// test repeated synthesizeSAMFunction where the sam type is not fully defined
+// the naive implementation would enter the same apply$body in the same scope twice
+trait F[T, U] { def apply(x: T): U }
+
+class C {
+  def app[T, U](x: T)(f: F[T, U]): U = f(x)
+  app(1)(x => List(x))
+  app(2)(x => List(x))
+}
\ No newline at end of file
diff --git a/test/files/pos/scala-singleton.scala b/test/files/pos/scala-singleton.scala
index 5e0baa0..08038db 100644
--- a/test/files/pos/scala-singleton.scala
+++ b/test/files/pos/scala-singleton.scala
@@ -3,12 +3,12 @@
 object Test {
   def f1(x: AnyRef with Singleton): AnyRef with Singleton = x
   def f2[T <: AnyRef with Singleton](x: T): T = x
-    
+
   val x1: AnyRef with Singleton = "abc"
   val x2 = "def"
   final val x3 = "ghi"
   val x4: String = "jkl"
-  
+
   // compiles...
   def narrow1(x: AnyRef): AnyRef with Singleton = x
 
@@ -17,26 +17,26 @@ object Test {
 
   // fails, wait, what? This fails and narrow1 compiles?
   def narrow3(x: AnyRef): AnyRef with Singleton = x.asInstanceOf[AnyRef with Singleton]
-  
+
   // ok
   def narrow4[T <: AnyRef](x: T): AnyRef with Singleton = x
-  
+
   object imp {
     implicit def narrow4[T <: AnyRef](x: T): AnyRef with Singleton = x
     val x5: String = "mno"
     def imp1 = f1(x5)
-    
+
     // f2(x5)   // doesn't work but I think it should
     def imp2 = f2(narrow4(x5))
   }
-  
+
   def main(args: Array[String]): Unit = {
     // compiles
     f1(x1)
     f1(x2)
     f1(x3)
     f1(x4)
-    
+
     f2(x1)
     // f2(x2)
     // f2(x3)   // maybe this one should work
diff --git a/test/files/pos/scoping1.scala b/test/files/pos/scoping1.scala
index 548f154..9fe1b5f 100644
--- a/test/files/pos/scoping1.scala
+++ b/test/files/pos/scoping1.scala
@@ -9,4 +9,4 @@ object This extends App {
     def foo() = ()
   }
   val c: C = new D
-}   
+}
diff --git a/test/files/pos/sealed-final.flags b/test/files/pos/sealed-final.flags
new file mode 100644
index 0000000..cfabf7a
--- /dev/null
+++ b/test/files/pos/sealed-final.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Yinline-warnings -optimise
\ No newline at end of file
diff --git a/test/files/pos/sealed-final.scala b/test/files/pos/sealed-final.scala
new file mode 100644
index 0000000..bdedb5c
--- /dev/null
+++ b/test/files/pos/sealed-final.scala
@@ -0,0 +1,14 @@
+sealed abstract class Foo {
+  @inline def bar(x: Int) = x + 1
+}
+object Foo {
+  def mkFoo(): Foo = new Baz2
+}
+
+object Baz1 extends Foo
+final class Baz2 extends Foo
+
+object Test {
+  // bar should be inlined now
+  def f = Foo.mkFoo() bar 10
+}
diff --git a/test/files/pos/selftails.scala b/test/files/pos/selftails.scala
index 7c58543..a4253b8 100644
--- a/test/files/pos/selftails.scala
+++ b/test/files/pos/selftails.scala
@@ -1,10 +1,10 @@
 package net.liftweb.util
- 
+
 /**
 * This trait adds functionality to Scala standard types
 */
 trait BasicTypesHelpers { self: StringHelpers with ControlHelpers =>
- 
+
   /**
    * Compare two arrays of Byte for byte equality.
    * @return true if two Byte arrays contain the same bytes
@@ -19,5 +19,5 @@ trait BasicTypesHelpers { self: StringHelpers with ControlHelpers =>
   }
 }
 
-trait StringHelpers 
+trait StringHelpers
 trait ControlHelpers
diff --git a/test/files/pos/seq-ordering.scala b/test/files/pos/seq-ordering.scala
index 8f1d293..517d8ae 100644
--- a/test/files/pos/seq-ordering.scala
+++ b/test/files/pos/seq-ordering.scala
@@ -2,7 +2,7 @@ import Ordering.Implicits._
 
 class A {
   import Predef.{ implicitly => ? }
-  
+
   ?[Ordering[List[Int]]]
   ?[Ordering[IndexedSeq[(Int, String)]]]
   ?[Ordering[Seq[Seq[Int]]]]
diff --git a/test/files/pos/signatures/Test.java b/test/files/pos/signatures/Test.java
index 78c1965..3d1e375 100644
--- a/test/files/pos/signatures/Test.java
+++ b/test/files/pos/signatures/Test.java
@@ -3,7 +3,7 @@ import test.Outer;
 
 /* Test correct generation of java signatures. The Outer class should not
  * have a Java signature attribute for the inner method definition. Trait
- * Mutable should have one, even though it is also a nested definition. 
+ * Mutable should have one, even though it is also a nested definition.
  * (but for classes there is a way to tell about nesting to the JVM).
  */
 class Test {
diff --git a/test/files/pos/signatures/sig.scala b/test/files/pos/signatures/sig.scala
index 3feb9c4..4236f27 100644
--- a/test/files/pos/signatures/sig.scala
+++ b/test/files/pos/signatures/sig.scala
@@ -1,7 +1,7 @@
 package test
 
 /* Tests correct generation of Java signatures. The local method 'bar' should
- * not get a generic signature, as it may refer to type parameters of the enclosing 
+ * not get a generic signature, as it may refer to type parameters of the enclosing
  * method, and the JVM does not know about nested methods.
  */
 class Outer {
diff --git a/test/files/pos/simple-exceptions.scala b/test/files/pos/simple-exceptions.scala
index 38f2fc8..a9f16bf 100644
--- a/test/files/pos/simple-exceptions.scala
+++ b/test/files/pos/simple-exceptions.scala
@@ -8,7 +8,7 @@ object Test {
     try {
       try {
 	      Console.println("hi!")
-        error("xx")
+        sys.error("xx")
       }
       finally Console.println("ho!")
     }
diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala
index 5b6af67..5a11550 100644
--- a/test/files/pos/spec-Function1.scala
+++ b/test/files/pos/spec-Function1.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
@@ -35,11 +35,11 @@ package scalabip
 trait Function1[@specialized -T1, @specialized +R] extends AnyRef { self =>
   def apply(v1:T1): R
   override def toString() = "<function>"
-  
+
   /** (f compose g)(x) ==  f(g(x))
    */
   def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
-  
+
   /** (f andThen g)(x) ==  g(f(x))
    */
   def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala
index 48281e5..b23abf4 100644
--- a/test/files/pos/spec-annotations.scala
+++ b/test/files/pos/spec-annotations.scala
@@ -1,7 +1,7 @@
 class ann(i: Int) extends scala.annotation.Annotation
 
 // annotations on abstract types
-abstract class C1[@serializable @cloneable +T, U, V[_]]
+abstract class C1[@annotation.elidable(0) +T, U, V[_]]
 abstract class C2[@deprecated
                   @ann(1) T <: Number,
                   V]
diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala
index 84f6eef..883bc2f 100644
--- a/test/files/pos/spec-arrays.scala
+++ b/test/files/pos/spec-arrays.scala
@@ -20,7 +20,7 @@ abstract class AbsArray[T] {
   def updateBoolean(idx: Int, elem: Boolean) = update(idx, elem.asInstanceOf[T])
   def applyObject(idx: Int): Object = apply(idx).asInstanceOf[Object]
   def updateObject(idx: Int, elem: Object) = update(idx, elem.asInstanceOf[T])
-}  
+}
 
 final class IntArray(arr: Array[Int]) extends AbsArray[Int] {
   def apply(idx: Int): Int = applyInt(idx)
@@ -64,7 +64,7 @@ class ScalaSpecTest extends Test {
     while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -74,7 +74,7 @@ class ScalaSpecTest extends Test {
     }
   }
 }
-    
+
 class ScalaSpec2Test extends Test {
   val arr: AbsArray[Int] = new IntArray(new Array[Int](1000))
 
@@ -84,7 +84,7 @@ class ScalaSpec2Test extends Test {
     while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -94,7 +94,7 @@ class ScalaSpec2Test extends Test {
     }
   }
 }
-    
+
 class ScalaWrapTest extends Test {
   val arr: AbsArray[Int] = new ArraySeq(new Array[Int](1000))
 
@@ -104,7 +104,7 @@ class ScalaWrapTest extends Test {
     while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -114,7 +114,7 @@ class ScalaWrapTest extends Test {
     }
   }
 }
-    
+
 class ScalaGenTest extends Test {
   val arr: AbsArray[Integer] = new ArraySeq(new Array[Integer](1000))
   for (i <- 0 until arr.length) arr(i) = new Integer(0)
@@ -125,7 +125,7 @@ class ScalaGenTest extends Test {
     while (i < arr.length) { acc = acc + arr.apply(i).intValue; i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -135,7 +135,7 @@ class ScalaGenTest extends Test {
     }
   }
 }
-    
+
 class JavaTest extends Test {
   val arr = new Array[Int](1000)
 
@@ -145,7 +145,7 @@ class JavaTest extends Test {
     while (i < arr.length) { acc = acc + arr(i); i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -166,7 +166,7 @@ class ScalaSpec3Test extends Test {
     while (i < arr.length) { acc = acc + arr(i); i += 1 }
     acc
   }
-  
+
   def modify(j: Int) = {
     val base = j * 100 % 1000
     var i = 0
@@ -177,38 +177,11 @@ class ScalaSpec3Test extends Test {
   }
 }
 
-object TestJava extends scala.testing.Benchmark {
-  def run() {
-    (new JavaTest).run()
-  }
-}
-
-object TestSpec extends scala.testing.Benchmark {
-  def run() {
-    (new ScalaSpecTest).run()
-  }
-}
-     
-object TestSpec2 extends scala.testing.Benchmark {
-  def run() {
-    (new ScalaSpec2Test).run()
-  }
-}
-     
-object TestGen extends scala.testing.Benchmark {
-  def run() {
-    (new ScalaGenTest).run()
-  }
-}
-     
-object TestWrap extends scala.testing.Benchmark {
-  def run() {
-    (new ScalaWrapTest).run()
-  }
-}
-     
-object TestSpec3 extends scala.testing.Benchmark {
-  def run() {
-    (new ScalaSpec3Test).run()
-  }
+object TestRunner {
+  (new JavaTest).run()
+  (new ScalaSpecTest).run()
+  (new ScalaSpec2Test).run()
+  (new ScalaGenTest).run()
+  (new ScalaWrapTest).run()
+  (new ScalaSpec3Test).run()
 }
diff --git a/test/files/pos/spec-asseenfrom.scala b/test/files/pos/spec-asseenfrom.scala
index cf20fc5..ede5791 100644
--- a/test/files/pos/spec-asseenfrom.scala
+++ b/test/files/pos/spec-asseenfrom.scala
@@ -1,8 +1,8 @@
-class Automaton[@specialized(Double) W,State] { 
+class Automaton[@specialized(Double) W,State] {
 
-  def finalWeight(s: State): W = error("todo");
+  def finalWeight(s: State): W = sys.error("todo");
 
-  def allStates: Set[State] = error("toodo");
+  def allStates: Set[State] = sys.error("toodo");
 
   /**
    * Returns a map from states to its final weight. may expand all nodes.
diff --git a/test/files/pos/spec-cyclic.scala b/test/files/pos/spec-cyclic.scala
index b983caa..6cd7685 100644
--- a/test/files/pos/spec-cyclic.scala
+++ b/test/files/pos/spec-cyclic.scala
@@ -6,25 +6,25 @@ trait MyPartialFunction[-A, +B] extends AnyRef with AbsFun[A, B]
 
 trait ColMap[A, +B] extends MyPartialFunction[A, B] /*with Collection[(A, B)] */
 
-trait ColSorted[K,+A] extends ColRanged[K,A] 
+trait ColSorted[K,+A] extends ColRanged[K,A]
 
-trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]] 
+trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]]
 
 trait MutMap[A, B] extends AnyRef
       with ColMap[A, B]
 
-trait ColRanged[K, +A] //extends Iterable[A] 
+trait ColRanged[K, +A] //extends Iterable[A]
 
 trait JclRanged[K,A] extends ColRanged[K,A] //with MutableIterable[A] {
 
-trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E] 
+trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E]
 
 trait JclSorted[K,A] extends ColSorted[K,A] with JclRanged[K,A]
 
 trait JclSortedMap[K,E] extends ColSortedMap[K,E] with JclMap[K,E] with JclSorted[K,Tuple2[K,E]]
 
 class Foo[A, B] extends JclSortedMap[A, B] {
-  def apply(x: A): B = error("NYI")
+  def apply(x: A): B = sys.error("NYI")
 }
 
 class Bar {
diff --git a/test/files/pos/spec-doubledef-new.scala b/test/files/pos/spec-doubledef-new.scala
index ad9c639..589ceb3 100644
--- a/test/files/pos/spec-doubledef-new.scala
+++ b/test/files/pos/spec-doubledef-new.scala
@@ -19,12 +19,12 @@ abstract class B[T, @specialized(scala.Int) U : TypeTag, @specialized(scala.Int)
     val u: U
     val v: V
 
-    def f(t: T, v2: V): Pair[U, V] = {
+    def f(t: T, v2: V): Tuple2[U, V] = {
         val m: Array[U] = null
         if (m.isEmpty) {
-            Pair(u, v)
+            (u, v)
         } else {
-            Pair(u, v2)
+            (u, v2)
         }
     }
 }
\ No newline at end of file
diff --git a/test/files/pos/spec-doubledef-old.scala b/test/files/pos/spec-doubledef-old.scala
index 86b0d85..bde259e 100644
--- a/test/files/pos/spec-doubledef-old.scala
+++ b/test/files/pos/spec-doubledef-old.scala
@@ -17,12 +17,12 @@ abstract class B[T, @specialized(scala.Int) U : Manifest, @specialized(scala.Int
     val u: U
     val v: V
 
-    def f(t: T, v2: V): Pair[U, V] = {
+    def f(t: T, v2: V): Tuple2[U, V] = {
         val m: Array[U] = null
         if (m.isEmpty) {
-            Pair(u, v)
+            (u, v)
         } else {
-            Pair(u, v2)
+            (u, v2)
         }
     }
 }
diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala
index 611ec0e..f92ae98 100644
--- a/test/files/pos/spec-funs.scala
+++ b/test/files/pos/spec-funs.scala
@@ -19,7 +19,7 @@ final class IntTest {
     val xs = new Array[Int](10000)
     val f = new AbsFunction1[Int, Int] {
       def apply(x: Int): Int = x * x
-    }    
+    }
     for (j <- 0 until niters) {
       transF(xs, f)
     }
@@ -54,10 +54,7 @@ final class ClosureTest {
   }
 }
 
-object TestInt extends scala.testing.Benchmark {
-  def run() = (new IntTest).run()
-}
-
-object TestClosure extends scala.testing.Benchmark {
-  def run() = (new ClosureTest).run()
+object TestRunner {
+  (new IntTest).run()
+  (new ClosureTest).run()
 }
diff --git a/test/files/pos/spec-params-old.scala b/test/files/pos/spec-params-old.scala
index f522512..33a2521 100644
--- a/test/files/pos/spec-params-old.scala
+++ b/test/files/pos/spec-params-old.scala
@@ -10,7 +10,7 @@ class Foo[@specialized A: ClassManifest] {
   def m2[@specialized B <: String](x: B) = x.concat("a")
 
   // conflicting in bounds, no mention of other spec members
-  // expect an overload here plus implementation in 
+  // expect an overload here plus implementation in
   // compatible specialized subclasses
   def m3[@specialized B >: A](x: B) = ()
 
@@ -19,10 +19,10 @@ class Foo[@specialized A: ClassManifest] {
 
   // non-conflicting, expect a normalized overload implementation here
   def m5[@specialized B](x: B) = x
-  
+
   // non-conflicting, expect a normalized implementation here
   // and specialized implementations for all expansions in specialized subclasses
-  def m6[@specialized B](x: B, y: A) = 
+  def m6[@specialized B](x: B, y: A) =
     goal(y)
 
   def goal(x: A) = {
diff --git a/test/files/pos/spec-sealed.scala b/test/files/pos/spec-sealed.scala
index 5782930..d7ecfaa 100644
--- a/test/files/pos/spec-sealed.scala
+++ b/test/files/pos/spec-sealed.scala
@@ -2,13 +2,13 @@ sealed abstract class MyList[@specialized +A] {
   def head: A
   def tail: MyList[A]
 
-  def ::[@specialized B >: A](x: B): MyList[B] = 
+  def ::[@specialized B >: A](x: B): MyList[B] =
     new Cons[B](x, this)
 }
 
 case object MyNil extends MyList[Nothing] {
-  def head = error("nil")
-  def tail = error("nil")
+  def head = sys.error("nil")
+  def tail = sys.error("nil")
 }
 
 case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList[a] {
@@ -19,7 +19,7 @@ case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList
 abstract class IntList extends MyList[Int]
 
 object Main extends App {
-  val xs = 1 :: 2 :: 3 :: MyNil 
+  val xs = 1 :: 2 :: 3 :: MyNil
   println(xs)
 }
 
diff --git a/test/files/pos/spec-short.scala b/test/files/pos/spec-short.scala
index 94a8007..71e56a4 100644
--- a/test/files/pos/spec-short.scala
+++ b/test/files/pos/spec-short.scala
@@ -5,7 +5,7 @@ abstract class AbsFun[@specialized T, @specialized U] {
   // abstract function, specialized
   def sum(xs: List[T]): Int
 
-  def prod(xs: List[T], mul: (Int, T) => Int): Int = 
+  def prod(xs: List[T], mul: (Int, T) => Int): Int =
     (1 /: xs)(mul)
 
   // concrete function, not specialized
@@ -18,9 +18,9 @@ abstract class AbsFun[@specialized T, @specialized U] {
 class Square extends AbsFun[Int, Int] {
   def apply(x: Int): Int = x * x
 
-  def sum(xs: List[Int]): Int = 
+  def sum(xs: List[Int]): Int =
     (0 /: xs) (_ + _)
 
-  def abs(m: Int): Int = 
+  def abs(m: Int): Int =
     sum(List(1, 2, 3))
 }
diff --git a/test/files/pos/spec-sparsearray-new.scala b/test/files/pos/spec-sparsearray-new.scala
index 7b3934c..df31089 100644
--- a/test/files/pos/spec-sparsearray-new.scala
+++ b/test/files/pos/spec-sparsearray-new.scala
@@ -4,7 +4,7 @@ import scala.collection.mutable.MapLike
 class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
   override def get(x: Int) = {
     val ind = findOffset(x)
-    if(ind < 0) None else Some(error("ignore"))
+    if(ind < 0) None else Some(sys.error("ignore"))
   }
 
   /**
@@ -13,13 +13,13 @@ class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[I
    * negative and can be converted into an insertion point with -(rv+1).
    */
   private def findOffset(i : Int) : Int = {
-    error("impl doesn't matter")
+    sys.error("impl doesn't matter")
   }
 
-  override def apply(i : Int) : T = { error("ignore") }
-  override def update(i : Int, value : T) = error("ignore")
+  override def apply(i : Int) : T = { sys.error("ignore") }
+  override def update(i : Int, value : T) = sys.error("ignore")
   override def empty = new SparseArray[T]
-  def -=(ind: Int) = error("ignore")
-  def +=(kv: (Int,T)) = error("ignore")
-  override final def iterator = error("ignore")
-}
\ No newline at end of file
+  def -=(ind: Int) = sys.error("ignore")
+  def +=(kv: (Int,T)) = sys.error("ignore")
+  override final def iterator = sys.error("ignore")
+}
diff --git a/test/files/pos/spec-sparsearray-old.scala b/test/files/pos/spec-sparsearray-old.scala
index ea7710a..e10dabd 100644
--- a/test/files/pos/spec-sparsearray-old.scala
+++ b/test/files/pos/spec-sparsearray-old.scala
@@ -3,7 +3,7 @@ import scala.collection.mutable.MapLike
 class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
   override def get(x: Int) = {
     val ind = findOffset(x)
-    if(ind < 0) None else Some(error("ignore"))
+    if(ind < 0) None else Some(sys.error("ignore"))
   }
 
   /**
@@ -12,13 +12,13 @@ class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.
    * negative and can be converted into an insertion point with -(rv+1).
    */
   private def findOffset(i : Int) : Int = {
-    error("impl doesn't matter")
+    sys.error("impl doesn't matter")
   }
 
-  override def apply(i : Int) : T = { error("ignore") }
-  override def update(i : Int, value : T) = error("ignore")
+  override def apply(i : Int) : T = { sys.error("ignore") }
+  override def update(i : Int, value : T) = sys.error("ignore")
   override def empty = new SparseArray[T]
-  def -=(ind: Int) = error("ignore")
-  def +=(kv: (Int,T)) = error("ignore")
-  override final def iterator = error("ignore")
+  def -=(ind: Int) = sys.error("ignore")
+  def +=(kv: (Int,T)) = sys.error("ignore")
+  override final def iterator = sys.error("ignore")
 }
diff --git a/test/files/pos/spec-t3497.scala b/test/files/pos/spec-t3497.scala
index 6cc0e24..ff054aa 100644
--- a/test/files/pos/spec-t3497.scala
+++ b/test/files/pos/spec-t3497.scala
@@ -10,7 +10,7 @@ object B extends A[ Array[Byte], Int ] {
         return -1
       }
     }
-    
+
     return 0
   }
 }
diff --git a/test/files/pos/spec-tailcall.scala b/test/files/pos/spec-tailcall.scala
index d91e2f5..703ec01 100644
--- a/test/files/pos/spec-tailcall.scala
+++ b/test/files/pos/spec-tailcall.scala
@@ -1,5 +1,5 @@
 class TailCall[@specialized T] {
-  final def dropLeft(n: Int, xs: List[T]): List[T] = 
+  final def dropLeft(n: Int, xs: List[T]): List[T] =
     if (n == 0) xs
     else dropLeft(n - 1, xs.tail)
 /*
@@ -7,7 +7,7 @@ class TailCall[@specialized T] {
 
   def crash(o: Option[String]) = filter {
     case None if {
-      def dropLeft[T](n: Int, xs: List[T]): List[T] = 
+      def dropLeft[T](n: Int, xs: List[T]): List[T] =
         if (n == 0) xs
         else dropLeft(n - 1, xs.tail)
       dropLeft(2, List(1, 2, 3)).isEmpty
diff --git a/test/files/pos/spec-traits.scala b/test/files/pos/spec-traits.scala
index c6cc292..074f6c3 100644
--- a/test/files/pos/spec-traits.scala
+++ b/test/files/pos/spec-traits.scala
@@ -11,19 +11,19 @@ class Lazy {
 
 // issue 3307
 class Bug3307 {
-  def f[Z](block: String => Z) { 
-    block("abc") 
+  def f[Z](block: String => Z) {
+    block("abc")
   }
-  
-  ({ () => 
-    f { implicit x => println(x) } })() 
+
+  ({ () =>
+    f { implicit x => println(x) } })()
 }
 
 // issue 3301
   trait T[X]
 
 class Bug3301 {
-  def t[A]: T[A] = error("stub")
+  def t[A]: T[A] = sys.error("stub")
 
   () => {
     type X = Int
diff --git a/test/files/pos/spec-vector.scala b/test/files/pos/spec-vector.scala
index 06e49b5..392949c 100644
--- a/test/files/pos/spec-vector.scala
+++ b/test/files/pos/spec-vector.scala
@@ -1,4 +1,4 @@
 // ticket #3379, abstract overrides
 trait Vector extends (Int=>Double) {
-  override def apply(i: Int): Double  
+  override def apply(i: Int): Double
 }
diff --git a/test/files/pos/spec.scala b/test/files/pos/spec.scala
index 093d3cd..cc060ff 100644
--- a/test/files/pos/spec.scala
+++ b/test/files/pos/spec.scala
@@ -7,7 +7,7 @@ class Bar[@specialized(Int, AnyRef) A](a: A) {
 }
 
 
-class WithInner[@specialized(Int, AnyRef) A](a: A) { 
+class WithInner[@specialized(Int, AnyRef) A](a: A) {
   class Inner {
     def meth = a
   }
@@ -42,7 +42,7 @@ class Qux[@specialized(AnyRef) A] {
 
 class Foo[@specialized(Int, AnyRef) A](val a: Array[A]) {
   a(0)
-  
+
   def id(elem: A) = a(0) = elem
 }
 
@@ -52,13 +52,13 @@ object Test {
   def main(arg: Array[String]) {
     val f = new Foo(new Array[String](5))
     f.id("")
-    
+
     val z = new Baz[Int, Double]
     z.ab(1, 1.0)
-    
+
     testspec(new Array[String](5))
     testspec(new Array[Int](5))
   }
-  
+
   def testspec[@specialized(Int, AnyRef) T](arr: Array[T]) = arr(0)
 }
diff --git a/test/files/pos/specializes-sym-crash.scala b/test/files/pos/specializes-sym-crash.scala
index c46f435..7778ba2 100644
--- a/test/files/pos/specializes-sym-crash.scala
+++ b/test/files/pos/specializes-sym-crash.scala
@@ -2,11 +2,11 @@ import scala.collection._
 
 trait Foo[+A,
                      +Coll,
-                     +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
-extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
+                     +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
+extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] {
 self =>
 
-  trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
+  trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
     def length: Int
     def apply(idx: Int): B
     override def toString = viewToString
diff --git a/test/files/pos/strings.scala b/test/files/pos/strings.scala
index 83b8179..9fe8cfd 100644
--- a/test/files/pos/strings.scala
+++ b/test/files/pos/strings.scala
@@ -6,5 +6,5 @@ object test {
 }
 // #1000
 object A {
-  println("""This a "raw" string ending with a "double quote"""") 
+  println("""This a "raw" string ending with a "double quote"""")
 }
diff --git a/test/files/pos/sudoku.scala b/test/files/pos/sudoku.scala
index 336dc75..9435f50 100644
--- a/test/files/pos/sudoku.scala
+++ b/test/files/pos/sudoku.scala
@@ -3,22 +3,22 @@ object SudokuSolver extends App {
   // held in a global variable m. The program begins by reading 9 lines
   // of input to fill the board
   var m: Array[Array[Char]] = Array.tabulate(9)((x: Int) => readLine.toArray)
- 
+
   // For printing m, a method print is defined
   def print = { println(""); m map (carr => println(new String(carr))) }
- 
+
   // The test for validity is performed by looping over i=0..8 and
   // testing the row, column and 3x3 square containing the given
   // coordinate
   def invalid(i: Int, x: Int, y: Int, n: Char): Boolean =
     i<9 && (m(y)(i) == n || m(i)(x) == n ||
       m(y/3*3 + i/3)(x/3*3 + i % 3) == n || invalid(i+1, x, y, n))
- 
+
   // Looping over a half-closed range of consecutive integers [l..u)
   // is factored out into a higher-order function
   def fold(f: (Int, Int) => Int, accu: Int, l: Int, u: Int): Int =
     if(l==u) accu else fold(f, f(accu, l), l+1, u)
- 
+
   // The search function examines each position on the board in turn,
   // trying the numbers 1..9 in each unfilled position
   // The function is itself a higher-order fold, accumulating the value
@@ -34,7 +34,7 @@ object SudokuSolver extends App {
              val newaccu = search(x+1, y, f, accu);
              m(y)(x) = '0';
              newaccu}, accu, 1, 10)}
- 
+
   // The main part of the program uses the search function to accumulate
   // the total number of solutions
   println("\n"+search(0,0,i => {print; i+1},0)+" solution(s)")
diff --git a/test/files/pos/super.cmds b/test/files/pos/super.cmds
deleted file mode 100644
index 8f3f8a4..0000000
--- a/test/files/pos/super.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Super_1.java
-scalac Super_2.scala
diff --git a/test/files/pos/super/Super_1.java b/test/files/pos/super/Super_1.java
index 418ae96..9acbba0 100644
--- a/test/files/pos/super/Super_1.java
+++ b/test/files/pos/super/Super_1.java
@@ -1,6 +1,6 @@
 // A.java
 interface Inter<T> { }
 
-class Super implements Inter<Super.Inner> {  
+class Super implements Inter<Super.Inner> {
   public class Inner { };
 }
diff --git a/test/files/neg/case-collision.flags b/test/files/pos/switch-small.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/pos/switch-small.flags
diff --git a/test/files/pos/t0031.scala b/test/files/pos/t0031.scala
index ec6eae9..d4050c8 100644
--- a/test/files/pos/t0031.scala
+++ b/test/files/pos/t0031.scala
@@ -4,17 +4,17 @@ object Main {
         def ensure(postcondition: a => Boolean): a
     }
 
-    def require[a](precondition: => Boolean)(command: => a): Ensure[a] = 
+    def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
         if (precondition)
             new Ensure[a] {
 	        def ensure(postcondition: a => Boolean): a = {
 	            val result = command;
 	            if (postcondition(result)) result
-	            else error("Assertion error")
+	            else sys.error("Assertion error")
                 }
 	    }
         else
-            error("Assertion error");
+            sys.error("Assertion error");
 
     def arb[a](s: List[a]) =
         require (! s.isEmpty) {
diff --git a/test/files/pos/t0064.scala b/test/files/pos/t0064.scala
index c2ce4bf..1eeca8d 100644
--- a/test/files/pos/t0064.scala
+++ b/test/files/pos/t0064.scala
@@ -1,6 +1,6 @@
 object B {
   def main(Args:Array[String]) = {
-    val Pair(_,x) = Pair(1,2);
+    val (_,x) = (1,2);
     x + 1;
   }
 }
diff --git a/test/files/pos/t0066.scala b/test/files/pos/t0066.scala
index 9317da7..2153264 100644
--- a/test/files/pos/t0066.scala
+++ b/test/files/pos/t0066.scala
@@ -3,5 +3,5 @@ class GBTree[A, B] /*with Map[A, B, GBTree[A,B]]*/ {
     case class Node[A,B](key:A,value:B,smaller:Node[A,B],bigger:Node[A,B])
 	extends Tree[A,B];
     case class Nil[A,B]() extends Tree[A,B];
-    
+
 }
diff --git a/test/files/pos/t0069.scala b/test/files/pos/t0069.scala
index 5a8c15c..e4c242c 100644
--- a/test/files/pos/t0069.scala
+++ b/test/files/pos/t0069.scala
@@ -8,4 +8,3 @@ object testCQ  {
   */
 
 }
- 
diff --git a/test/files/pos/t0227.scala b/test/files/pos/t0227.scala
index 8650350..806b20d 100644
--- a/test/files/pos/t0227.scala
+++ b/test/files/pos/t0227.scala
@@ -5,7 +5,7 @@ final class Settings {
 abstract class Factory {
     type libraryType <: Base
 
-    final def apply(settings: Settings): libraryType = error("bla")
+    final def apply(settings: Settings): libraryType = sys.error("bla")
 }
 
 abstract class Base {
@@ -19,7 +19,7 @@ class SA(val settings: Settings) extends Base {
             SD
         ) :::  settings.f(
             SC
-        )    
+        )
 }
 
 object SC extends Factory {
diff --git a/test/files/pos/t0288/Foo.scala b/test/files/pos/t0288/Foo.scala
index 1f7d813..778ba65 100644
--- a/test/files/pos/t0288/Foo.scala
+++ b/test/files/pos/t0288/Foo.scala
@@ -6,4 +6,4 @@ class Foo extends Outer{
 
     val bar = new Inner(); // Shouldn't this work?
 
-} 
+}
diff --git a/test/files/pos/t0288/Outer.java b/test/files/pos/t0288/Outer.java
index 7a3754f..bea3e3f 100644
--- a/test/files/pos/t0288/Outer.java
+++ b/test/files/pos/t0288/Outer.java
@@ -6,4 +6,4 @@ public class Outer{
 
     }
 
-} 
+}
diff --git a/test/files/pos/t0301.scala b/test/files/pos/t0301.scala
index cb68f38..24b4776 100644
--- a/test/files/pos/t0301.scala
+++ b/test/files/pos/t0301.scala
@@ -1,7 +1,7 @@
 package fos
 
 abstract class Expr
-case class Var extends Expr
+case class Var() extends Expr
 
 object Analyzer {
   def substitution(expr: Expr, cls: (Var,Var)): Expr =
diff --git a/test/files/pos/t0305.scala b/test/files/pos/t0305.scala
index 4d37a02..4838b1f 100644
--- a/test/files/pos/t0305.scala
+++ b/test/files/pos/t0305.scala
@@ -1,5 +1,5 @@
 object Test extends App {
- 
+
   def foo(is:Int*) = 1;
   def foo(i:Int) = 2;
 
diff --git a/test/files/pos/t0422.scala b/test/files/pos/t0422.scala
deleted file mode 100644
index cb3ba27..0000000
--- a/test/files/pos/t0422.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.regexp.WordExp;
-import scala.util.automata.WordBerrySethi;
-
-object BoolWordExp extends WordExp {
-  type _labelT = MyLabels;
-  type _regexpT = RegExp;
-  abstract class MyLabels extends Label ;
-  case class MyLabel(c:Char) extends MyLabels;
-}
-
-object MyTranslator extends WordBerrySethi {
-  override val lang = BoolWordExp;
-  import lang._;
-  override protected def seenLabel( r:RegExp, i:Int, label: _labelT ): Unit = {
-    super.seenLabel(r,i,label)
-  }
-}
diff --git a/test/files/pos/t0438.scala b/test/files/pos/t0438.scala
index 33b7efe..fa5b771 100644
--- a/test/files/pos/t0438.scala
+++ b/test/files/pos/t0438.scala
@@ -1,9 +1,9 @@
 class Foo {
-  implicit def pair2fun2[A, B, C](f: (A, B) => C) = 
+  implicit def pair2fun2[A, B, C](f: (A, B) => C) =
      {p: (A, B) => f(p._1, p._2) }
 
   def foo(f: ((Int, Int)) => Int) = f
-  def bar(x: Int, y: Int) = x + y 
+  def bar(x: Int, y: Int) = x + y
 
   foo({ (x: Int, y: Int) => x + y }) // works
   foo(pair2fun2(bar _)) // works
diff --git a/test/files/pos/t0453.scala b/test/files/pos/t0453.scala
index d59a3d2..dfacc5e 100644
--- a/test/files/pos/t0453.scala
+++ b/test/files/pos/t0453.scala
@@ -1,5 +1,5 @@
 object Test {
-  val foo = new { 
+  val foo = new {
     trait Bar
     def l () : Bar = { new Bar {} }
   }
diff --git a/test/files/pos/t0599.scala b/test/files/pos/t0599.scala
index 6125b99..885159a 100644
--- a/test/files/pos/t0599.scala
+++ b/test/files/pos/t0599.scala
@@ -15,4 +15,4 @@ abstract class FooA {
       a.xxx;
       doB.xxx;
     }
-  } 
+  }
diff --git a/test/files/pos/t0625.scala b/test/files/pos/t0625.scala
index bda463d..5614542 100644
--- a/test/files/pos/t0625.scala
+++ b/test/files/pos/t0625.scala
@@ -1,6 +1,6 @@
 object Test {
   def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t)
-  
+
   def main(args: Array[String]): Unit = {
     idMap(Some(5))
     idMap(Responder.constant(5))
diff --git a/test/files/pos/t0646.scala b/test/files/pos/t0646.scala
deleted file mode 100644
index 6146e60..0000000
--- a/test/files/pos/t0646.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-object xfor {
-
-  import scala.xml.NodeSeq
-
-    val books = 
-    <bks>
-      <title>Blabla</title>
-      <title>Blubabla</title>
-      <title>Baaaaaaalabla</title>
-    </bks>;
-
-  new NodeSeq { val theSeq = books.child }  match {
-    case t @ Seq(<title>Blabla</title>) => t
-  }
-
-  //val n: NodeSeq = new NodeSeq { val theSeq = books.child } 
-  //n match {
-  //  case t @ <title>Blabla</title> => t
-  //}
-
-}
diff --git a/test/files/pos/t0770.scala b/test/files/pos/t0770.scala
index bb438f1..7a0a2bf 100644
--- a/test/files/pos/t0770.scala
+++ b/test/files/pos/t0770.scala
@@ -1,7 +1,7 @@
 trait A
 {
 	private[this] val p = 5
-	
+
 	def f = (b: Byte) => p
 }
 
diff --git a/test/files/pos/t0774/unrelated.scala b/test/files/pos/t0774/unrelated.scala
index 483f836..1efdb25 100644
--- a/test/files/pos/t0774/unrelated.scala
+++ b/test/files/pos/t0774/unrelated.scala
@@ -1,8 +1,8 @@
 object Outer {
   import Inner._
-  
+
   deathname
-  
+
   object Inner {
     def deathname: Int = 1
   }
diff --git a/test/files/pos/t0786.scala b/test/files/pos/t0786.scala
index 4d9f1d0..f40cf7d 100644
--- a/test/files/pos/t0786.scala
+++ b/test/files/pos/t0786.scala
@@ -2,15 +2,15 @@ object ImplicitProblem {
   class M[T]
 
   def nullval[T] = null.asInstanceOf[T];
-    
+
   trait Rep[T] {
     def eval: Int
   }
-    
+
   implicit def toRep0(n: Int) = new Rep[Int] {
     def eval = 0
   }
-    
+
   implicit def toRepN[T](n: M[T])(implicit f: T => Rep[T]) = new Rep[M[T]] {
     def eval = f(nullval[T]).eval + 1
   }
@@ -18,11 +18,11 @@ object ImplicitProblem {
   def depth[T <% Rep[T]](n: T) = n.eval
 
   def main(args: Array[String]) {
-    println(depth(nullval[M[Int]]))  // (1) this works   
+    println(depth(nullval[M[Int]]))  // (1) this works
     println(nullval[M[Int]].eval)    // (2) this works
-      
+
     type m = M[Int]
-    println(depth(nullval[m]))     // (3) this doesn't compile on 2.7.RC1    
+    println(depth(nullval[m]))     // (3) this doesn't compile on 2.7.RC1
     println(nullval[m].eval)       // (4) this works
   }
 
diff --git a/test/files/pos/t0851.scala b/test/files/pos/t0851.scala
new file mode 100644
index 0000000..fc7109d
--- /dev/null
+++ b/test/files/pos/t0851.scala
@@ -0,0 +1,14 @@
+package test
+
+object test1 {
+  case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
+    def apply(t : T) = (s:T2) => f(t,s)
+    def apply(p : (T,T2)) = f(p._1,p._2)
+  }
+  implicit def g[T](f : (T,String) => String) = Foo(f)
+  def main(args : Array[String]) : Unit = {
+    val f = (x:Int,s:String) => s + x
+    println(f(1))
+    ()
+  }
+}
diff --git a/test/files/pos/t0872.scala b/test/files/pos/t0872.scala
new file mode 100644
index 0000000..8f4c1c4
--- /dev/null
+++ b/test/files/pos/t0872.scala
@@ -0,0 +1,8 @@
+object Main {
+  def main(args : Array[String]) {
+    val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
+    implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
+    println(fn(1))
+    ()
+  }
+}
diff --git a/test/files/pos/t1000.scala b/test/files/pos/t1000.scala
index fabef94..613af76 100644
--- a/test/files/pos/t1000.scala
+++ b/test/files/pos/t1000.scala
@@ -1,5 +1,5 @@
 object A {
-  println("""This a "raw" string ending with a "double quote"""") 
+  println("""This a "raw" string ending with a "double quote"""")
 }
 
 object Test extends App {
diff --git a/test/files/pos/t1014.scala b/test/files/pos/t1014.scala
index 3fc10d1..6fb7f7b 100644
--- a/test/files/pos/t1014.scala
+++ b/test/files/pos/t1014.scala
@@ -1,9 +1,10 @@
-import scala.xml.{NodeSeq, Elem}
+class NodeSeq
+class Elem extends NodeSeq
 
 class EO extends App with Moo {
   // return type is Flog, inherited from overridden method.
   // implicit conversions are applied because expected type `pt` is `Flog` when `computeType(rhs, pt)`.
-  def cat = <cat>dog</cat>
+  def cat = (??? : Elem)
 
   implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
 }
diff --git a/test/files/pos/t1029.cmds b/test/files/pos/t1029.cmds
deleted file mode 100644
index 06b863d..0000000
--- a/test/files/pos/t1029.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac Test_1.scala
-scalac Test_2.scala
diff --git a/test/files/pos/t1035.scala b/test/files/pos/t1035.scala
index bd693d9..e0a9379 100644
--- a/test/files/pos/t1035.scala
+++ b/test/files/pos/t1035.scala
@@ -7,7 +7,7 @@ class A  {
   var name:String = _
   def getName() = name
   def this(name:String, age:Int){this();this.name=name}
-  
+
 }
 
 class B(name:String) extends A(name,0){
@@ -18,15 +18,15 @@ class D {
    object A {
      def unapply(p:A) = Some(p.getName)
    }
-   
+
    object B {
      def unapply(p:B) = Some(p.getName)
    }
    def foo(p:Any) = p match {
-      case B(n)    => println("B") 
-      case A(n)    => println("A")  
-      
-        
+      case B(n)    => println("B")
+      case A(n)    => println("A")
+
+
    }
 
 }
diff --git a/test/files/pos/t1048.scala b/test/files/pos/t1048.scala
index ce57e72..cd16db5 100644
--- a/test/files/pos/t1048.scala
+++ b/test/files/pos/t1048.scala
@@ -1,7 +1,7 @@
 trait T[U] {
   def x: T[V] forSome { type V <: U }
 }
-  
+
 object T {
   def unapply[U](t: T[U]): Option[T[V] forSome { type V <: U }] = Some(t.x)
 }
@@ -12,4 +12,3 @@ object Test {
   }
 }
 
-  
diff --git a/test/files/pos/t1059.scala b/test/files/pos/t1059.scala
deleted file mode 100644
index bcd8f03..0000000
--- a/test/files/pos/t1059.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com;
-
-import scala.xml._
-
-object Main {
-
-    def main(args : Array[String]) : Unit = {
-
-        var m : PartialFunction[Any, Any] = {
-
-            case SafeNodeSeq(s @ _*) => println(s) }
-
-        println(m(<a/> ++ <b/>))
-        println(m.isDefinedAt(<a/> ++ <b/>))
-
-    }
-
-}
-
-object SafeNodeSeq {
-
-    def unapplySeq(any: Any) : Option[Seq[Node]] = any match { case s: Seq[_] => Some(s flatMap ( _ match {
-
-        case n: Node => n case _ => NodeSeq.Empty
-
-    })) case _ => None }
-
-} 
diff --git a/test/files/pos/t1071.scala b/test/files/pos/t1071.scala
index 7fb802f..59149a0 100644
--- a/test/files/pos/t1071.scala
+++ b/test/files/pos/t1071.scala
@@ -12,6 +12,6 @@ object Test {
 
   val c = new C
   (c: D).a // works
-  c.a // error  
+  c.a // error
 }
 
diff --git a/test/files/pos/t1090.scala b/test/files/pos/t1090.scala
index dca762a..a9bce90 100644
--- a/test/files/pos/t1090.scala
+++ b/test/files/pos/t1090.scala
@@ -10,7 +10,7 @@ object Test {
       type Node = Core.this.Node
     }
     def f(manager : Manager) = manager.iterator.foreach{
-      case node : NodeImpl => 
+      case node : NodeImpl =>
     }
   }
 }
diff --git a/test/files/pos/t1107.scala b/test/files/pos/t1107a.scala
similarity index 100%
rename from test/files/pos/t1107.scala
rename to test/files/pos/t1107a.scala
diff --git a/test/files/pos/t1107b/O.scala b/test/files/pos/t1107b/O.scala
index aa605a6..0198867 100644
--- a/test/files/pos/t1107b/O.scala
+++ b/test/files/pos/t1107b/O.scala
@@ -4,10 +4,10 @@ object O
     case s: Sub => true
     case _ => false
   }
-  
+
   def main(args: Array[String]): Unit = {
     val c = new AnyRef with C
 
     c.bob.toString + c.bob2.toString
-  }  
+  }
 }
diff --git a/test/files/pos/t1107b/T.scala b/test/files/pos/t1107b/T.scala
index 1f3712d..0dff0b9 100644
--- a/test/files/pos/t1107b/T.scala
+++ b/test/files/pos/t1107b/T.scala
@@ -1,6 +1,6 @@
 sealed trait Top
 sealed trait Sub extends Top
-trait C { 
+trait C {
   private object P extends Sub
   def bob() = P.getClass
   def bob2() = O.d(P)
diff --git a/test/files/pos/t1123.scala b/test/files/pos/t1123.scala
index a7b009c..3812fa3 100644
--- a/test/files/pos/t1123.scala
+++ b/test/files/pos/t1123.scala
@@ -7,5 +7,5 @@ object Test {
     }
     def f = extraListener.h
   }
-  def main(args : Array[String]) : Unit = (new Editor).f    
+  def main(args : Array[String]) : Unit = (new Editor).f
 }
diff --git a/test/files/pos/t1133.scala b/test/files/pos/t1133.scala
index 4538de5..562b528 100644
--- a/test/files/pos/t1133.scala
+++ b/test/files/pos/t1133.scala
@@ -8,21 +8,21 @@ object Match
         case _ => println("fail")
     }
   }
-  
+
   object Extractor1 {
     def unapply(x: Any) = x match {
         case x: String => Some(x, x+x, x+x+x, x+x, x)
         case _ => None
     }
   }
-  
+
   object Extractor2 {
     def unapply(x: Any) = x match {
         case x: String => Some(x, x+x, x+x+x)
         case _ => None
     }
   }
-  
+
   object Extractor3 {
     def unapply(x: Any) = x match {
         case x: String => Some(x, x, x)
diff --git a/test/files/pos/t1164.scala b/test/files/pos/t1164.scala
index 307ca92..ab58c1d 100644
--- a/test/files/pos/t1164.scala
+++ b/test/files/pos/t1164.scala
@@ -1,29 +1,29 @@
 
 
-object test {  		
+object test {
 
-	class Foo[a](val arg : a) 
+        class Foo[a](val arg : a)
+
+        object Foo  {
+        def apply [a](arg : a, right :a) = new Foo[a](arg)
+        def unapply [a](m : Foo[a]) = Some (m.arg)
+        }
 
-	object Foo  { 
-	def apply [a](arg : a, right :a) = new Foo[a](arg) 
-	def unapply [a](m : Foo[a]) = Some (m.arg) 
-	} 
-	
 	def matchAndGetArgFromFoo[a]( e:Foo[a]):a = {e match { case Foo(x) => x }}
-	
-	
+
+
 	//  Try the same thing as above but use function as argument to Bar
 	// constructor
-	
+
 	type FunIntToA [a] = (Int) => a
-	class Bar[a] (var f: FunIntToA[a]) 
-	
+        class Bar[a] (var f: FunIntToA[a])
+
 	object Bar {
 		def apply[a](f: FunIntToA[a]) = new Bar[a](f)
 		def unapply[a](m: Bar[a]) = Some (m.f)
 	}
-	
+
 	def matchAndGetFunFromBar[a](b:Bar[a]) : FunIntToA[a] = { b match { case Bar(x) => x}}
-	 
+
 
 }
diff --git a/test/files/pos/t1168.scala b/test/files/pos/t1168.scala
index d9f3871..75638e7 100644
--- a/test/files/pos/t1168.scala
+++ b/test/files/pos/t1168.scala
@@ -1,5 +1,5 @@
 object Test extends App {
-  
+
   trait SpecialException {}
 
   try {
diff --git a/test/files/pos/t1203.scala b/test/files/pos/t1203.scala
deleted file mode 100644
index 062ef93..0000000
--- a/test/files/pos/t1203.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-case class ant(t: String) extends scala.annotation.Annotation
-object Test {
-   def main(args: Array[String]): Unit = {
-     val a: scala.xml.NodeSeq @ant("12") = Nil
-     println(a)
-   }
-}
diff --git a/test/files/pos/t1203a.scala b/test/files/pos/t1203a.scala
new file mode 100644
index 0000000..cf5ab9f
--- /dev/null
+++ b/test/files/pos/t1203a.scala
@@ -0,0 +1,13 @@
+class Node
+object NodeSeq {
+  implicit def seqToNodeSeq(s: Seq[Node]): NodeSeq = ???
+}
+abstract class NodeSeq extends collection.immutable.Seq[Node]
+
+case class ant(t: String) extends scala.annotation.Annotation
+object Test {
+   def main(args: Array[String]): Unit = {
+     val a: NodeSeq @ant("12") = Nil
+     println(a)
+   }
+}
diff --git a/test/files/pos/t1210a.scala b/test/files/pos/t1210a.scala
index fbb0a61..b3492f9 100644
--- a/test/files/pos/t1210a.scala
+++ b/test/files/pos/t1210a.scala
@@ -1,9 +1,9 @@
 // both styles of abstraction should behave the same
 // related to 1210 because that bug broke the OO version below
-trait OO {  
+trait OO {
   abstract class Test { self =>
     type T
-  
+
     val v: Test {type T = self.T} = self.v.v
   }
 }
diff --git a/test/files/pos/t122.scala b/test/files/pos/t122.scala
index e3daeef..630e24c 100644
--- a/test/files/pos/t122.scala
+++ b/test/files/pos/t122.scala
@@ -1,4 +1,4 @@
 class L {
-  val List(v:Int, 2) = List(2, v:Int) 
+  val List(v:Int, 2) = List(2, v:Int)
   val (a:Int, b:Int) = (1, a)
 }
diff --git a/test/files/pos/t1230/S.scala b/test/files/pos/t1230/S.scala
index f8a691b..530dd4b 100644
--- a/test/files/pos/t1230/S.scala
+++ b/test/files/pos/t1230/S.scala
@@ -1 +1 @@
-object S extends Application { (new J).foo = 5 }
+object S extends App { (new J).foo = 5 }
diff --git a/test/files/pos/t1231/S.scala b/test/files/pos/t1231/S.scala
index ee08866..f14aa25 100644
--- a/test/files/pos/t1231/S.scala
+++ b/test/files/pos/t1231/S.scala
@@ -1 +1 @@
-object S extends Application { println(J.j1) }
+object S extends App { println(J.j1) }
diff --git a/test/files/pos/t1236.scala b/test/files/pos/t1236.scala
index 7028162..5e221ce 100644
--- a/test/files/pos/t1236.scala
+++ b/test/files/pos/t1236.scala
@@ -1,5 +1,5 @@
 trait Empty[E[_]] {
-  def e[A]: E[A] 
+  def e[A]: E[A]
 }
 
 object T {
diff --git a/test/files/pos/t1237.scala b/test/files/pos/t1237.scala
index 0d1dd05..7777372 100644
--- a/test/files/pos/t1237.scala
+++ b/test/files/pos/t1237.scala
@@ -1,11 +1,11 @@
-class HelloWorld {  
-  def main(args: Array[String]) { 
+class HelloWorld {
+  def main(args: Array[String]) {
 
     object TypeBool;
 
     trait Fct {
       def g(x : Int) = TypeBool // breaks.
-      
+
       //    def g(x : Int) = 3 // fine.
     }
 
diff --git a/test/files/pos/t1254/t1254.java b/test/files/pos/t1254/t1254.java
index 17dc391..17e1c60 100644
--- a/test/files/pos/t1254/t1254.java
+++ b/test/files/pos/t1254/t1254.java
@@ -11,7 +11,7 @@ class NothingBug3 {
 	scala.Option<?> o = scala.None$.MODULE$;
 
         test(o);
-        None.toLeft(new scala.runtime.AbstractFunction0<Integer>() { 
+        None.toLeft(new scala.runtime.AbstractFunction0<Integer>() {
                 public Integer apply() { return 0; }
             });
     }
diff --git a/test/files/pos/t1263/test.scala b/test/files/pos/t1263/test.scala
index 7ced590..92d8c1c 100644
--- a/test/files/pos/t1263/test.scala
+++ b/test/files/pos/t1263/test.scala
@@ -2,7 +2,7 @@ package test
 
 trait Map[A, +B] {
   def plus(key: A): MapTo = new MapTo(key)
-    
+
   class MapTo(key: A) {
     def arrow [B1 >: B](value: B1) = null
   }
diff --git a/test/files/pos/t1272.scala b/test/files/pos/t1272.scala
index aab1a88..d86a909 100644
--- a/test/files/pos/t1272.scala
+++ b/test/files/pos/t1272.scala
@@ -2,8 +2,8 @@ object ImplicitTest {
   implicit val i : Int = 10
   implicit def a(implicit i : Int) : Array[Byte] = null
   implicit def b[T](implicit i : Int) : Array[T] = null
-  
+
   def fn[T](implicit x : T) = 0
-  
+
   val x = fn[Array[Byte]]
 }
\ No newline at end of file
diff --git a/test/files/pos/t1292.scala b/test/files/pos/t1292.scala
index 3ed153a..83a996d 100644
--- a/test/files/pos/t1292.scala
+++ b/test/files/pos/t1292.scala
@@ -1,5 +1,5 @@
 trait Foo[T <: Foo[T, Enum], Enum <: Enumeration] {
-  type StV = Enum#Value  
+  type StV = Enum#Value
   type Meta = MegaFoo[T, Enum]
 
   type Slog <: Enumeration
diff --git a/test/files/pos/t1318.scala b/test/files/pos/t1318.scala
index 3fc6e30..f3d2f7a 100644
--- a/test/files/pos/t1318.scala
+++ b/test/files/pos/t1318.scala
@@ -19,7 +19,7 @@ object A extends A0 {}
 
 abstract class B0 extends M {
     type mType = B0
-    def fs: List[fType] = Nil 
+    def fs: List[fType] = Nil
 }
 
 object B extends B0 {}
diff --git a/test/files/pos/t1357.scala b/test/files/pos/t1357.scala
index 7bc6d45..fcdecb3 100644
--- a/test/files/pos/t1357.scala
+++ b/test/files/pos/t1357.scala
@@ -6,7 +6,7 @@ object NonEmptyCons {
 object Main {
 
   type BT[+H, +T <: Tuple2[Tuple2[H, T], Tuple2[H, T]]] = Tuple2[H, T]
-  
+
   // type T = Tuple2[String,String]
   type BinaryTree[+E] = BT[E, T forSome { type T <: Tuple2[BT[E, T], BT[E, T]] }]
 
diff --git a/test/files/pos/t1385.scala b/test/files/pos/t1385.scala
index 59953bc..aefd9c3 100644
--- a/test/files/pos/t1385.scala
+++ b/test/files/pos/t1385.scala
@@ -1,3 +1,3 @@
- at serializable object Test {                    
-  private def readResolve:AnyRef = this
+object Test extends Serializable {
+  private def readResolve: AnyRef = this
 }
diff --git a/test/files/pos/t1439.flags b/test/files/pos/t1439.flags
index 1e70f5c..bca57e4 100644
--- a/test/files/pos/t1439.flags
+++ b/test/files/pos/t1439.flags
@@ -1 +1 @@
--unchecked -Xfatal-warnings -Xoldpatmat -language:higherKinds
+-unchecked -Xfatal-warnings -language:higherKinds
diff --git a/test/files/pos/t1480.scala b/test/files/pos/t1480.scala
index 3dc3062..1d9f94d 100644
--- a/test/files/pos/t1480.scala
+++ b/test/files/pos/t1480.scala
@@ -1,6 +1,6 @@
 class Foo{
   def compare(newP : Any, oldP : Any) : Boolean = (newP,oldP) match {
-    case (newP : AnyRef, oldP : AnyRef) if newP == oldP => newP == oldP 
-    case (newS : Symbol, oldS: Symbol) if newS == oldS => newS == oldS 
+    case (newP : AnyRef, oldP : AnyRef) if newP == oldP => newP == oldP
+    case (newS : Symbol, oldS: Symbol) if newS == oldS => newS == oldS
   }
 }
diff --git a/test/files/pos/t1560.scala b/test/files/pos/t1560.scala
index fb55920..2af299a 100644
--- a/test/files/pos/t1560.scala
+++ b/test/files/pos/t1560.scala
@@ -1,13 +1,13 @@
 object Test extends App {
-  
+
   trait C[T] {
     def t: T
   }
-  
+
   def b: Option[C[x] forSome { type x }] = null
-  
+
   def c = b match {
     case Some(b) => b.t
   }
-  
+
 }
diff --git a/test/files/pos/t1565.scala b/test/files/pos/t1565.scala
index 030086c..df33315 100644
--- a/test/files/pos/t1565.scala
+++ b/test/files/pos/t1565.scala
@@ -3,7 +3,7 @@ object Bug1565 {
   def x() = { 0; (a : Int, b : Int) => println(List(a, b)) ; 0  }
 
   (a : Int, b : Int) => println(List(a, b))
-  
+
   // various function syntaxes to exercise the parser
   val xs = List(1,2,3)
   xs.filter(x => x < 2)
diff --git a/test/files/pos/t1591b.scala b/test/files/pos/t1591b.scala
index 84372bb..c671ad6 100644
--- a/test/files/pos/t1591b.scala
+++ b/test/files/pos/t1591b.scala
@@ -1,10 +1,10 @@
 import scala.tools.nsc._
 
-class SemanticTokens(val compiler: Global) {   
-  import compiler._  
+class SemanticTokens(val compiler: Global) {
+  import compiler._
 
   def build() = ErrorType
-  
+
   class Process {
     def f() = analyzer
     // or to crash the compiler instead of a nice message,
diff --git a/test/files/pos/t1626.scala b/test/files/pos/t1626.scala
deleted file mode 100644
index 200be47..0000000
--- a/test/files/pos/t1626.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object o {
-  val n = <a xmlns=""/>
-  n.namespace == null
-}
diff --git a/test/files/pos/t1648.scala b/test/files/pos/t1648.scala
deleted file mode 100644
index 6d53ce1..0000000
--- a/test/files/pos/t1648.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test {
-  class MyClass extends scala.util.logging.Logged { }
-  val x = new MyClass with scala.util.logging.ConsoleLogger
-}
diff --git a/test/files/pos/t1711/Seq.scala b/test/files/pos/t1711/Seq.scala
index 5f426ea..c18f05c 100644
--- a/test/files/pos/t1711/Seq.scala
+++ b/test/files/pos/t1711/Seq.scala
@@ -3,7 +3,7 @@ package com
 object Sequence {
 
   def filteringFunction[V](filter: V => Boolean): List[V] => List[V] = {
-    def include(v: V) = 
+    def include(v: V) =
       filter(v)
     (l: List[V]) => l.filter(include)
   }
diff --git a/test/files/pos/t1722-A.scala b/test/files/pos/t1722-A.scala
index 9e522a5..d059bf2 100644
--- a/test/files/pos/t1722-A.scala
+++ b/test/files/pos/t1722-A.scala
@@ -1,8 +1,8 @@
 sealed trait Top
 trait C {
   private object P extends Top
-}                                                                                                                     
-/*  
+}
+/*
 $ scala -e 'new AnyRef with C'
 error: error while loading Top, class file '/private/tmp/bobobo/./Top.class' is broken
 (error reading Scala signature of /private/tmp/bobobo/./Top.class: malformed Scala signature of Top at 185; reference value P of trait C refers to nonexisting symbol.)
diff --git a/test/files/pos/t1722/Test.scala b/test/files/pos/t1722/Test.scala
index 5685d8f..f236d3f 100755
--- a/test/files/pos/t1722/Test.scala
+++ b/test/files/pos/t1722/Test.scala
@@ -1,5 +1,5 @@
 package t1722
 
 object Test {
-  val x = new AnyRef with C 
+  val x = new AnyRef with C
 }
diff --git a/test/files/pos/t1722/Top.scala b/test/files/pos/t1722/Top.scala
index cec4c53..4ac5241 100755
--- a/test/files/pos/t1722/Top.scala
+++ b/test/files/pos/t1722/Top.scala
@@ -3,8 +3,8 @@ package t1722
 sealed trait Top
 trait C {
   private object P extends Top
-}                                                                                                                     
-/*  
+}
+/*
 $ scala -e 'new AnyRef with C'
 error: error while loading Top, class file '/private/tmp/bobobo/./Top.class' is broken
 (error reading Scala signature of /private/tmp/bobobo/./Top.class: malformed Scala signature of Top at 185; reference value P of trait C refers to nonexisting symbol.)
diff --git a/test/files/pos/t1745/J.java b/test/files/pos/t1745/J.java
index d95efe8..8444eab 100644
--- a/test/files/pos/t1745/J.java
+++ b/test/files/pos/t1745/J.java
@@ -1,9 +1,9 @@
-class J { 
+class J {
   S1 s1;
   S2 s2;
-  
+
   String s = bar(S3.foo(), S3.bar("def"));
-  
+
   private String bar(String s1, String s2) {
     return s1 + s2;
   }
diff --git a/test/pending/pos/t1751/A1_2.scala b/test/files/pos/t1751/A1_2.scala
similarity index 100%
rename from test/pending/pos/t1751/A1_2.scala
rename to test/files/pos/t1751/A1_2.scala
diff --git a/test/pending/pos/t1751/A2_1.scala b/test/files/pos/t1751/A2_1.scala
similarity index 100%
rename from test/pending/pos/t1751/A2_1.scala
rename to test/files/pos/t1751/A2_1.scala
diff --git a/test/pending/pos/t1751/SuiteClasses.java b/test/files/pos/t1751/SuiteClasses.java
similarity index 100%
rename from test/pending/pos/t1751/SuiteClasses.java
rename to test/files/pos/t1751/SuiteClasses.java
diff --git a/test/files/pos/t1756.scala b/test/files/pos/t1756.scala
index 2e09c8a..1d067c3 100755
--- a/test/files/pos/t1756.scala
+++ b/test/files/pos/t1756.scala
@@ -1,5 +1,5 @@
 
-/** 
+/**
 This is a tricky issue which has to do with the fact that too much conflicting
 type information is propagated into a single implicit search, where the intended
 solution applies two implicit searches.
@@ -35,20 +35,20 @@ class Poly[C <: Ring[C]](val c: C) extends Ring[Poly[C]] {
 }
 
 object Test extends App {
-  
+
   implicit def coef2poly[C <: Ring[C]](c: C): Poly[C] = new Poly(c)
 
   val a = new A
   val x = new Poly(new A)
-  
+
   println(x+a) // works
   println(a+x) // works
-  
+
   val y = new Poly(new Poly(new A))
-  
+
   println(x+y*x) // works
   println(x*y+x) // works
   println(y*x+x) // works
-  
+
   println(x+x*y) // failed before
 }
diff --git a/test/files/pos/t1761.scala b/test/files/pos/t1761.scala
deleted file mode 100644
index 2af7280..0000000
--- a/test/files/pos/t1761.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.xml._
-
-class Foo {
-  val elements: Seq[Node] = Nil
-  val innerTransform: PartialFunction[Elem, String] = {
-    case Elem(_, l: String, _, _, _ @ _*) if elements.exists(_.label == l) => 
-      l
-  }
-}
-
diff --git a/test/pending/pos/t1782/Ann.java b/test/files/pos/t1782/Ann.java
similarity index 100%
rename from test/pending/pos/t1782/Ann.java
rename to test/files/pos/t1782/Ann.java
diff --git a/test/pending/pos/t1782/Days.java b/test/files/pos/t1782/Days.java
similarity index 100%
rename from test/pending/pos/t1782/Days.java
rename to test/files/pos/t1782/Days.java
diff --git a/test/pending/pos/t1782/ImplementedBy.java b/test/files/pos/t1782/ImplementedBy.java
similarity index 100%
rename from test/pending/pos/t1782/ImplementedBy.java
rename to test/files/pos/t1782/ImplementedBy.java
diff --git a/test/pending/pos/t1782/Test_1.scala b/test/files/pos/t1782/Test_1.scala
similarity index 100%
rename from test/pending/pos/t1782/Test_1.scala
rename to test/files/pos/t1782/Test_1.scala
diff --git a/test/files/pos/t1786-counter.scala b/test/files/pos/t1786-counter.scala
new file mode 100644
index 0000000..c1ad2c2
--- /dev/null
+++ b/test/files/pos/t1786-counter.scala
@@ -0,0 +1,38 @@
+trait ShapeLevel
+
+object Fail {
+  abstract class ProductNodeShape[Level <: ShapeLevel, C, M <: C, U <: C, P <: C] extends Shape[Level, M, U, P] {
+    def copy(shapes: Seq[Shape[_, _, _, _]]): Shape[Level, _, _, _]
+  }
+
+  abstract class Shape[Level <: ShapeLevel, -Mixed_, Unpacked_, Packed_]
+
+  final class TupleShape[Level <: ShapeLevel, M <: Product, U <: Product, P <: Product](val shapes: Shape[_, _, _, _]*) extends ProductNodeShape[Level, Product, M, U, P] {
+    def copy(shapes: Seq[Shape[_, _, _, _]]): Shape[Level, _, _, _] = ???
+  }
+
+  trait ShapeLevel
+}
+
+object Ok {
+  abstract class Shape[Level <: ShapeLevel, -Mixed_, Unpacked_, Packed_]
+
+  abstract class ProductNodeShape[Level <: ShapeLevel, C, M <: C, U <: C, P <: C] extends Shape[Level, M, U, P] {
+    def copy(shapes: Seq[Shape[_, _, _, _]]): Shape[Level, _, _, _]
+  }
+
+  final class TupleShape[Level <: ShapeLevel, M <: Product, U <: Product, P <: Product](val shapes: Shape[_, _, _, _]*) extends ProductNodeShape[Level, Product, M, U, P] {
+    def copy(shapes: Seq[Shape[_, _, _, _]]): Shape[Level, _, _, _] = ???
+  }
+}
+
+// This is why we reverted the fix for SI-1786 -- see SI-6169 for a potential alternative that could be extended to cover this.
+// both objects type check on 2.10.3, but only Ok was accepted by 2.11 after the original fix to SI-1786.
+// Fail results in:
+/*
+t1786-counter.scala:10: error: class TupleShape needs to be abstract, since method copy in class ProductNodeShape of type (shapes: Seq[Fail.Shape[_, _, _, _]])Fail.Shape[Level, _, _, _] is not defined
+(Note that Seq[Fail.Shape[_, _, _, _]] does not match Seq[Fail.Shape[_ <: Fail.ShapeLevel, _, _, _]]: their type parameters differ)
+  final class TupleShape[Level <: ShapeLevel, M <: Product, U <: Product, P <: Product](val shapes: Shape[_, _, _, _]*) extends ProductNodeShape[Level, Product, M, U, P] {
+              ^
+one error found
+*/
\ No newline at end of file
diff --git a/test/files/pos/t1786-cycle.scala b/test/files/pos/t1786-cycle.scala
new file mode 100644
index 0000000..af5d892
--- /dev/null
+++ b/test/files/pos/t1786-cycle.scala
@@ -0,0 +1,57 @@
+trait GenTraversableLike[+A, +Repr] extends Any
+
+object O {
+  (null: Any) match {
+    case _: LongTraversableLike[_] =>
+  }
+}
+
+trait LongTraversable extends LongTraversableLike[LongTraversable]
+
+trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenTraversableLike[Any, Repr]
+
+/*
+% scalac-hash v2.11.0-M8 test/files/pos/t1786-cycle.scala
+[warn] v2.11.0-M8 failed, using closest available
+test/files/pos/t1786-cycle.scala:11: error: illegal cyclic reference involving trait LongTraversableLike
+trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenTraversableLike[Any, Repr]
+                                                                      ^
+one error found
+
+Okay again after SI-1786 was reverted.
+
+
+|-- object O BYVALmode-EXPRmode (site: package <empty>)
+|    |-- super EXPRmode-POLYmode-QUALmode (silent: <init> in O)
+|    |    |-- this EXPRmode (silent: <init> in O)
+|    |    |    \-> O.type
+|    |    \-> O.type
+|    |-- (null: Any) match { case (_: LongTraversableLike[(_ @ <em... BYVALmode-EXPRmode (site: value <local O> in O)
+|    |    |-- (null: Any) BYVALmode-EXPRmode (site: value <local O> in O)
+|    |    |    |-- Any TYPEmode (site: value <local O> in O)
+|    |    |    |    \-> Any
+|    |    |    |-- null : pt=Any EXPRmode (site: value <local O> in O)
+|    |    |    |    \-> Null(null)
+|    |    |    \-> Any
+|    |    |-- (_: LongTraversableLike[(_ @ <empty>)]) : pt=Any PATTERNmode (site: value <local O> in O) enrichment only
+|    |    |    |-- LongTraversableLike[(_ @ <empty>)] TYPEPATmode-TYPEmode (site: value <local O> in O) enrichment only
+|    |    |    |    |--  <: LongTraversableLike[Repr] TYPEmode (site: type Repr in <empty>)
+|    |    |    |    |    |-- LongTraversableLike[Repr] TYPEmode (site: type Repr in <empty>)
+|    |    |    |    |    |    |-- Repr NOmode (site: type Repr in <empty>)
+|    |    |    |    |    |    |    \-> Repr
+|    |    |    |    |    |    \-> LongTraversableLike[Repr]
+|    |    |    |    |    [adapt]  <: LongTraversableLike[Repr] is now a TypeTree( <: LongTraversableLike[Repr])
+|    |    |    |    |    \->  <: LongTraversableLike[Repr]
+|    |    |    |    |-- (_ @ <empty>) TYPEPATmode-TYPEmode (site: value <local O> in O) enrichment only
+|    |    |    |    |    \-> _
+|    |    |    |    |-- GenTraversableLike FUNmode-TYPEmode (site: trait LongTraversableLike)
+|    |    |    |    |    \-> GenTraversableLike
+|    |    |    |    |-- GenTraversableLike[Any, Repr] TYPEmode (site: trait LongTraversableLike)
+|    |    |    |    |    |-- Any TYPEmode (site: trait LongTraversableLike)
+|    |    |    |    |    |    \-> Any
+|    |    |    |    |    |-- Repr TYPEmode (site: trait LongTraversableLike)
+|    |    |    |    |    |    \-> Repr
+|    |    |    |    |    caught scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving trait LongTraversableLike: while typing GenTraversableLike[Any, Repr]
+test/files/pos/t1786-cycle.scala:11: error: illegal cyclic reference involving trait LongTraversableLike
+trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenT
+*/
\ No newline at end of file
diff --git a/test/files/pos/t1798.scala b/test/files/pos/t1798.scala
index 93df61e..1624e30 100644
--- a/test/files/pos/t1798.scala
+++ b/test/files/pos/t1798.scala
@@ -2,7 +2,7 @@ object Foo { private def bar(): Int = 55 }
 class Foo(x: Int) { def this() = this(Foo.bar()) }
 
 /*
- * scalac28 a.scala 
+ * scalac28 a.scala
 a.scala:2: error: method bar cannot be accessed in object Foo
 class Foo(x: Int) { def this() = this(Foo.bar()) }
                                           ^
diff --git a/test/files/pos/t1832.scala b/test/files/pos/t1832.scala
index c7b1ffb..f3bb556 100644
--- a/test/files/pos/t1832.scala
+++ b/test/files/pos/t1832.scala
@@ -1,7 +1,7 @@
 trait Cloning {
   trait Foo
   def fn(g: Any => Unit): Foo
-  
+
   implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null }
 
   val pool = 4 * fn { case ghostSYMBOL: Int => ghostSYMBOL * 2 }
diff --git a/test/files/pos/t1840/J.java b/test/files/pos/t1840/J.java
index a697596..fd98b6c 100644
--- a/test/files/pos/t1840/J.java
+++ b/test/files/pos/t1840/J.java
@@ -1,4 +1,4 @@
 package p;
-class J { 
-  J() {} 
+class J {
+  J() {}
 }
diff --git a/test/files/pos/t1909.scala b/test/files/pos/t1909.scala
deleted file mode 100644
index 01213f6..0000000
--- a/test/files/pos/t1909.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// Until #1909 is fixed, if this compiles the bytecode
-// will trigger a VerifyError.  This liftings and the one
-// in 1909b.scala actually happen in two different places
-// (uncurry and lambdalifter.)
-class Ticket1909 {
-  def this(value: Int) = this()
-  def this(p: String) = this(try 0)
-}
diff --git a/test/files/pos/t1909b-pos.scala b/test/files/pos/t1909b-pos.scala
deleted file mode 100644
index b914bee..0000000
--- a/test/files/pos/t1909b-pos.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class Ticket1909 (x: Int) {
-  def this() = this({
-    def bar() = 5
-    bar
-  })
-}
\ No newline at end of file
diff --git a/test/files/pos/t1942.cmds b/test/files/pos/t1942.cmds
deleted file mode 100644
index c143110..0000000
--- a/test/files/pos/t1942.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac A_1.scala
-scalac Test_2.scala
diff --git a/test/files/pos/t1957.scala b/test/files/pos/t1957.scala
index f80cf73..711ce17 100644
--- a/test/files/pos/t1957.scala
+++ b/test/files/pos/t1957.scala
@@ -23,7 +23,7 @@ object Test {
         final type commonModuleType = Module {type settingsType = self.settingsType}
         type selfType >: self.type <: commonModuleType
 
-        // BTW: if we use the commented out type declarations, the code compiles successfully
+        // BTW: if we use the commented out type decls, the code compiles successfully
         // type gristType = Grist {type settingsType <: self.settingsType; type moduleType <: commonModuleType }
 
         val tools: List[Tool {type settingsType = self.settingsType}]
diff --git a/test/files/pos/t1974.scala b/test/files/pos/t1974.scala
index 3d28478..a0daa13 100644
--- a/test/files/pos/t1974.scala
+++ b/test/files/pos/t1974.scala
@@ -1,20 +1,20 @@
 object Broken {
   private var map = Map[Class[_], String]()
-  
+
   def addToMap(c : Class[_], s : String) = map += (c -> s)
   def fetch(c : Class[_]) = map(c)
 }
 
 object Works {
   private var map = Map[Class[_], String]()
-  
+
   def addToMap(c : Class[_], s : String) = map += ((c, s))
   def fetch(c : Class[_]) = map(c)
 }
 
 object Works2 {
   private var map = Map[Class[_], String]()
-  
+
   def addToMap(c : Class[_], s : String) = map += ((c : Class[_]) -> s)
   def fetch(c : Class[_]) = map(c)
 }
\ No newline at end of file
diff --git a/test/files/pos/t2023.scala b/test/files/pos/t2023.scala
index de3e848..21c6fc9 100644
--- a/test/files/pos/t2023.scala
+++ b/test/files/pos/t2023.scala
@@ -3,11 +3,11 @@ trait C[A]
 object C {
   implicit def ipl[A](implicit from: A => Ordered[A]): C[A] = null
 }
-	
+
 object P {
   def foo[A](i: A, j: A)(implicit c: C[A]): Unit = ()
 }
-	
+
 class ImplicitChainTest {
   def testTrivial: Unit = {
     P.foo('0', '9')
diff --git a/test/files/pos/t2060.scala b/test/files/pos/t2060.scala
index cf7250f..2c70115 100755
--- a/test/files/pos/t2060.scala
+++ b/test/files/pos/t2060.scala
@@ -4,7 +4,7 @@
  * line':
  *
  *   val failure = 1.0 + new Op[Int]
- * 
+ *
  * we reduce the problem to finding a function from Double to
  * {+: _ >: Op[Int] <: Any}, that is, a method which takes
  * an argument which is an Op[Int] or a supertype thereof.
diff --git a/test/files/pos/t2066-2.10-compat.flags b/test/files/pos/t2066-2.10-compat.flags
new file mode 100644
index 0000000..94c8056
--- /dev/null
+++ b/test/files/pos/t2066-2.10-compat.flags
@@ -0,0 +1 @@
+-Xsource:2.10
diff --git a/test/files/pos/t2066-2.10-compat.scala b/test/files/pos/t2066-2.10-compat.scala
new file mode 100644
index 0000000..fb8103e
--- /dev/null
+++ b/test/files/pos/t2066-2.10-compat.scala
@@ -0,0 +1,71 @@
+import language._
+trait A1 {
+  def f[T[_]] = ()
+}
+
+trait B1 extends A1 {
+  override def f[T[+_]] = ()
+}
+
+trait C1 extends A1 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A2 {
+  def f[T[+_]] = ()
+}
+
+trait B2 extends A2 {
+  override def f[T[_]] = () // okay
+}
+
+trait C2 extends A2 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+  def f[T[-_]] = ()
+}
+
+trait B3 extends A3 {
+  override def f[T[_]] = () // okay
+}
+
+trait C3 extends A3 {
+  override def f[T[-_]] = ()
+}
+
+
+trait A4 {
+  def f[T[X[+_]]] = ()
+}
+
+trait B4 extends A4 {
+  override def f[T[X[_]]] = ()
+}
+
+trait A5 {
+  def f[T[X[-_]]] = ()
+}
+
+trait B5 extends A5 {
+  override def f[T[X[_]]] = ()
+}
+
+
+
+trait A6 {
+  def f[T[X[_]]] = ()
+}
+
+trait B6 extends A6 {
+  override def f[T[X[+_]]] = () // okay
+}
+trait C6 extends A6 {
+  override def f[T[X[_]]] = () // okay
+}
+trait D6 extends A6 {
+  override def f[T[X[-_]]] = ()
+}
diff --git a/test/files/pos/t2066.scala b/test/files/pos/t2066.scala
new file mode 100644
index 0000000..30cb99d
--- /dev/null
+++ b/test/files/pos/t2066.scala
@@ -0,0 +1,25 @@
+trait A1 {
+  def f[T[+_]] = ()
+}
+
+trait B1 extends A1 {
+  override def f[T[_]] = ()
+}
+
+
+trait A2 {
+  def f[T[-_]] = ()
+}
+
+trait B2 extends A2 {
+  override def f[T[_]] = ()
+}
+
+
+trait A3 {
+  def f[T[X[_]]] = ()
+}
+
+trait B3 extends A3 {
+  override def f[T[X[+_]]] = ()
+}
diff --git a/test/files/pos/t2081.scala b/test/files/pos/t2081.scala
index d772c02..395134f 100644
--- a/test/files/pos/t2081.scala
+++ b/test/files/pos/t2081.scala
@@ -7,5 +7,5 @@ object ScalaForRubyists {
 
   val x = 10.days
   // a couple parser corner cases I wanted not to break
-  val y = 5.e0 + 5e7  
+  val y = 5.0e0 + 5e7
 }
diff --git a/test/files/pos/t2082.scala b/test/files/pos/t2082.scala
index 38937d7..3a16061 100755
--- a/test/files/pos/t2082.scala
+++ b/test/files/pos/t2082.scala
@@ -1,10 +1,10 @@
 
 trait Mapper[T <: Mapper[T]]
 
-trait KeyedMapper[KeyType, T <: KeyedMapper[KeyType, T]] extends Mapper[T] 
+trait KeyedMapper[KeyType, T <: KeyedMapper[KeyType, T]] extends Mapper[T]
 
 
-trait KeyedMetaMapper[KeyType, T <: KeyedMapper[KeyType, T]] 
+trait KeyedMetaMapper[KeyType, T <: KeyedMapper[KeyType, T]]
 
 trait MappedForeignKey[KeyType, Owner <: Mapper[Owner], Other <: KeyedMapper[KeyType, Other]]
 
@@ -19,19 +19,19 @@ class TestRun extends KeyedMapper[Long, TestRun] with IdPK {
 object TestRun extends TestRun with KeyedMetaMapper[Long, TestRun]
 
 class MetaTestSubject extends TestSubject with KeyedMetaMapper[Long, TestSubject]
-object TestSubject extends MetaTestSubject 
+object TestSubject extends MetaTestSubject
 
 object Main {
-  
+
   def oneToOneJoin[PType <: KeyedMapper[Long, PType] with IdPK,
                    CType <: KeyedMapper[Long, CType] with IdPK,
-                   CMetaType <: CType with KeyedMetaMapper[Long, CType], 
+                   CMetaType <: CType with KeyedMetaMapper[Long, CType],
                    FKType <: MappedForeignKey[Long, PType, CType]]
-  (parents: List[PType], metaMapper: CMetaType, keyGetter: (PType) => FKType ): 
+  (parents: List[PType], metaMapper: CMetaType, keyGetter: (PType) => FKType ):
   Map[Long, CType] = Map.empty
-  
+
   def callIt {
-    oneToOneJoin[TestRun, TestSubject, MetaTestSubject, 
+    oneToOneJoin[TestRun, TestSubject, MetaTestSubject,
                  MappedForeignKey[Long, TestRun, TestSubject]](
     List(), TestSubject, (tr: TestRun) => tr.testSubject)
   }
diff --git a/test/files/pos/t2130-2.scala b/test/files/pos/t2130-2.scala
index 464f5e0..1d0b33c 100644
--- a/test/files/pos/t2130-2.scala
+++ b/test/files/pos/t2130-2.scala
@@ -10,7 +10,7 @@ package object bar {
     class Dingus
     object Dingus
     case class Dongus(x: Float)
-    
+
     def apply(xs: Int*) = new Bippy(xs.sum)
     def apply() = new Bippy(5)
   }
diff --git a/test/files/pos/t2133.scala b/test/files/pos/t2133.scala
index 99bac5c..c74d0a4 100644
--- a/test/files/pos/t2133.scala
+++ b/test/files/pos/t2133.scala
@@ -13,6 +13,6 @@ trait Foo2 {
 class Bob extends AnyRef with Foo with Foo2 {
   import bip._
   import bar._
-  
+
   def go() = fn()
 }
diff --git a/test/files/pos/t2168.scala b/test/files/pos/t2168.scala
index 845c5b7..21afb23 100644
--- a/test/files/pos/t2168.scala
+++ b/test/files/pos/t2168.scala
@@ -2,5 +2,3 @@ object Test extends App {
   def foo1(x: AnyRef) = x match { case x: Function0[_] => x() }
   def foo2(x: AnyRef) = x match { case x: Function0[Any] => x() }
 }
-  
-  
diff --git a/test/files/pos/t2171.scala b/test/files/pos/t2171.scala
index a5663c9..6c754c7 100644
--- a/test/files/pos/t2171.scala
+++ b/test/files/pos/t2171.scala
@@ -3,5 +3,5 @@ final object test {
     try 0 catch { case ex => println(msg) }
 
     def main (args: Array[String]): Unit =
-      while (true) logIgnoredException ("...") 
+      while (true) logIgnoredException ("...")
 }
diff --git a/test/files/pos/t2261.scala b/test/files/pos/t2261.scala
index af24234..aac5c9e 100644
--- a/test/files/pos/t2261.scala
+++ b/test/files/pos/t2261.scala
@@ -5,5 +5,5 @@ object Test {
   x = List(1,2,3)
   // the problem here was that somehow the type variable that was used to infer the type argument for List.apply
   // would accumulate several conflicting constraints
-  // can't reproduce with 
+  // can't reproduce with
 }
\ No newline at end of file
diff --git a/test/files/pos/t2281.scala b/test/files/pos/t2281.scala
deleted file mode 100644
index 3515d2e..0000000
--- a/test/files/pos/t2281.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import scala.collection.mutable.ArrayBuffer
-
-class A {
-  def f(x: Boolean) = if (x) <br/><br/> else <br/>
-}
-
-class B {
-  def splitSentences(text : String) : ArrayBuffer[String] = {
-     val outarr = new ArrayBuffer[String]
-     var outstr = new StringBuffer
-     var prevspace = false
-     val ctext = text.replaceAll("\n+","\n")
-  		ctext foreach {c =>
-  		    outstr append c
-  			if(c == '.' || c == '!' || c == '?' || c == '\n' || c == ':' || c == ';' || (prevspace && c == '-') ){
-  				outarr += outstr.toString
-  				outstr = new StringBuffer
-  			}
-  		    if(c == '\n'){
-  		    	outarr += "\n\n"
-  		    }
-  		    prevspace = c == ' '
-  		}
-  	    if(outstr.length > 0){
-  	    	outarr += outstr.toString
-  	    }
-  	    outarr
-  	  }
-
-  def spanForSentence(x : String,picktext : String) = 
-    if(x == "\n\n"){
-      <br/><br/>
-    }else{
-      <span class='clicksentence' style={if(x == picktext) "background-color: yellow" else ""}>{x}</span>    		
-    }
-
-  def selectableSentences(text : String, picktext : String) = {
-    val sentences = splitSentences(text)
-    sentences.map(x => spanForSentence(x,picktext))
-  }
-}
\ No newline at end of file
diff --git a/test/files/pos/t2305.scala b/test/files/pos/t2305.scala
index d0b103f..6b66c5d 100644
--- a/test/files/pos/t2305.scala
+++ b/test/files/pos/t2305.scala
@@ -1,6 +1,6 @@
 import java.util.ArrayList
 
-trait Bind[Z[_]] 
+trait Bind[Z[_]]
 
 class MySerializable[X] extends java.io.Serializable
 
@@ -17,7 +17,7 @@ object works {
 
 object breaks {
 	def runbind(implicit bind: Bind[ArrayList]) {}
-	runbind  
+        runbind
 	/*java.lang.AssertionError: assertion failed: java.io.Serializable
 		at scala.Predef$.assert(Predef.scala:107)
 		at scala.tools.nsc.symtab.Types$TypeRef.transform(Types.scala:1417)
diff --git a/test/files/pos/t2310.scala b/test/files/pos/t2310.scala
index e08411a..68912b4 100644
--- a/test/files/pos/t2310.scala
+++ b/test/files/pos/t2310.scala
@@ -1,15 +1,15 @@
 import scala.Stream._
 
 object consistencyError {
-  /* this gives an error: 
+  /* this gives an error:
   Consistency problem compiling (virtual file)!
   Trying to call method body%1(List(scala.collection.immutable.Stream[A])) with arguments (List(tp2, temp6, temp5))
       case (l #:: ls, rs) => None
                               ^
   scala.tools.nsc.symtab.Types$TypeError: too many arguments for method body%1: (val rs: scala.collection.immutable.Stream[A])None.type
-  
+
   two errors found
-  vss(0) = 
+  vss(0) =
   args = List(tp2, temp6, temp5)
   vss(1) = value rs, value ls, value l
   args = List(tp2, temp6, temp5)
@@ -18,19 +18,19 @@ object consistencyError {
   labels(1) = method body%1
   labels(0) = method body%0
   bx = 1
-  label.tpe = (val rs: scala.collection.immutable.Stream[A])None.type    
+  label.tpe = (val rs: scala.collection.immutable.Stream[A])None.type
   */
   def crash[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
     case (Stream.Empty, Stream.Empty) => None
     case (l #:: ls, rs) => None
   }
-  
+
   // These work
   // def works1[A](lefts: Stream[A]) = lefts match {
   //   case Stream.Empty => None
   //   case l #:: ls => None
   // }
-  // 
+  //
   // def works2[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
   //   case (Stream.Empty, Stream.Empty) => None
   //   case (ls, rs) => None
diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala
index 9a15b5c..a7f80ac 100644
--- a/test/files/pos/t2331.scala
+++ b/test/files/pos/t2331.scala
@@ -4,8 +4,8 @@ trait C {
 
 object Test {
   val o /*: C --> no crash*/ = new C {
-    def m[T]: Nothing /*: T --> no crash*/ = error("omitted")
+    def m[T]: Nothing /*: T --> no crash*/ = sys.error("omitted")
   }
 
   o.m[Nothing]
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t2399.scala b/test/files/pos/t2399.scala
index b009f78..07882dd 100644
--- a/test/files/pos/t2399.scala
+++ b/test/files/pos/t2399.scala
@@ -3,12 +3,12 @@ trait That2[A, R <: That2[A, R]]
 
 trait T[A, This >: Null <: That1[A] with T[A, This]] extends That2[A, This] {
   self: This =>
-  
+
   private var next: This = _
   def isEmpty = next eq null
-  
+
   def length: Int = {
     def loop(x: This, cnt: Int): Int = if (x.isEmpty) cnt else loop(x.next, cnt + 1)
     loop(self, 0)
-  }  
+  }
 }
\ No newline at end of file
diff --git a/test/files/pos/t2413/TestScalac.scala b/test/files/pos/t2413/TestScalac.scala
index 6992a30..098e852 100644
--- a/test/files/pos/t2413/TestScalac.scala
+++ b/test/files/pos/t2413/TestScalac.scala
@@ -4,7 +4,7 @@ class Foo extends TestJava {
 
    // THIS METHOD YIELDS TO CRASH
 /*   def foomethod : Option[String] => Unit = {
-      case None =>  
+      case None =>
         val path = repeatParam("s","a","b","c")
         ()
       case Some(error) =>
diff --git a/test/files/pos/t2421.scala b/test/files/pos/t2421.scala
index 26e485c..2544a1c 100644
--- a/test/files/pos/t2421.scala
+++ b/test/files/pos/t2421.scala
@@ -1,14 +1,14 @@
 object Test {
   abstract class <~<[-From, +To] extends (From => To)
-  implicit def trivial[A]: A <~< A = error("")
+  implicit def trivial[A]: A <~< A = sys.error("")
 
 
   trait Forcible[T]
-  implicit val forcibleInt: (Int <~< Forcible[Int]) = error("")
+  implicit val forcibleInt: (Int <~< Forcible[Int]) = sys.error("")
 
-  def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = error("")
-  
-  headProxy 
-  // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int, 
+  def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = sys.error("")
+
+  headProxy
+  // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int,
   // and headProxy's type parameter P cannot be instantiated to Int
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t2421_delitedsl.scala b/test/files/pos/t2421_delitedsl.scala
index ad6afa7..a058870 100644
--- a/test/files/pos/t2421_delitedsl.scala
+++ b/test/files/pos/t2421_delitedsl.scala
@@ -1,10 +1,10 @@
 trait DeliteDSL {
   abstract class <~<[-From, +To] extends (From => To)
-  implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x}  
+  implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x}
 
   trait Forcible[T]
   object Forcible {
-    def factory[T](f: T => Forcible[T]) = new (T <~< Forcible[T]){def apply(x: T) = f(x)}  
+    def factory[T](f: T => Forcible[T]) = new (T <~< Forcible[T]){def apply(x: T) = f(x)}
   }
 
   case class DeliteInt(x: Int) extends Forcible[Int]
@@ -22,16 +22,16 @@ trait DeliteDSL {
   // If T is already a proxy (it is forcible), the compiler should use
   // forcibleIdentity to deduce that P=T.  If T is Int, the compiler
   // should use intToForcible to deduce that P=DeliteInt.
-  // 
+  //
   // Without this feature, the user must write 'xs.proxyOfFirst[DeliteInt]',
   // with the feature they can write 'xs.proxyOfFirst', which is shorter and
   // avoids exposing internal DELITE types to the world.
 
   object Test {
-    val x = new DeliteCollection(List(1,2,3)).headProxy 
+    val x = new DeliteCollection(List(1,2,3)).headProxy
     // inferred: val x: Forcible[Int] = new DeliteCollection[Int](List.apply[Int](1, 2, 3)).headProxy[Forcible[Int]](forcibleInt);
 
-    val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy 
+    val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy
     // inferred: val xAlready: DeliteInt = new DeliteCollection[DeliteInt](List.apply[DeliteInt](DeliteInt(1), DeliteInt(2), DeliteInt(3))).headProxy[DeliteInt](trivial[DeliteInt]);
   }
 }
\ No newline at end of file
diff --git a/test/files/pos/t2421b_pos.scala b/test/files/pos/t2421b_pos.scala
index 8b848ab..0df3461 100644
--- a/test/files/pos/t2421b_pos.scala
+++ b/test/files/pos/t2421b_pos.scala
@@ -11,7 +11,7 @@ object Test {
 
   f
 }
-/* bug: 
+/* bug:
 error: ambiguous implicit values:
  both method b in object Test1 of type [X <: Test1.B]Test1.F[X]
  and method a in object Test1 of type => Test1.F[Test1.A]
diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala
index 3ea3f9e..550681b 100755
--- a/test/files/pos/t2429.scala
+++ b/test/files/pos/t2429.scala
@@ -1,10 +1,10 @@
 object Msg {
   trait T
-  
+
   trait TSeq
-  
+
   object TSeq {
-    implicit def fromSeq(s: Seq[T]): TSeq = error("stub")
+    implicit def fromSeq(s: Seq[T]): TSeq = sys.error("stub")
   }
 
   def render {
@@ -12,7 +12,7 @@ object Msg {
       case (a, b) => {
         a match {
           case _ => b match {
-            case _ => error("stub")
+            case _ => sys.error("stub")
           }
         }
       }
@@ -20,6 +20,6 @@ object Msg {
   }
 }
 object Oops {
- implicit def someImplicit(s: Seq[_]): String = error("stub")
+ implicit def someImplicit(s: Seq[_]): String = sys.error("stub")
  def item: String = Nil map { case e: Any => e }
 }
diff --git a/test/files/pos/t2435.scala b/test/files/pos/t2435.scala
index 2db931b..697e9e1 100644
--- a/test/files/pos/t2435.scala
+++ b/test/files/pos/t2435.scala
@@ -9,7 +9,7 @@ object Bug {
   case class FConstant[E <: FChain](constant:String, tail:E) extends FChain {
     type T = tail.T
   }
-  
+
   object FNil extends FChain {
     type T = Unit
   }
diff --git a/test/files/pos/t2444.scala b/test/files/pos/t2444.scala
index a052270..fac1e95 100644
--- a/test/files/pos/t2444.scala
+++ b/test/files/pos/t2444.scala
@@ -2,14 +2,14 @@ object Test {
 
   trait Foo
 
-  class Bar { 
+  class Bar {
     object baz extends Foo
   }
 
-  def frob[P1, P2<:Foo](f:P1 => P2) = () 
+  def frob[P1, P2<:Foo](f:P1 => P2) = ()
 
   def main(args:Array[String]) : Unit = {
-  	frob((p:Bar) => p.baz) 
+	frob((p:Bar) => p.baz)
   }
 
 }
diff --git a/test/files/pos/t2464.cmds b/test/files/pos/t2464.cmds
deleted file mode 100644
index ca733ef..0000000
--- a/test/files/pos/t2464.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac JavaOne.java
-scalac ScalaOne_1.scala
-scalac t2464_2.scala
diff --git a/test/files/pos/t2464/ScalaOne_1.scala b/test/files/pos/t2464/ScalaOne_1.scala
index 0271b9c..1caf8ec 100644
--- a/test/files/pos/t2464/ScalaOne_1.scala
+++ b/test/files/pos/t2464/ScalaOne_1.scala
@@ -1,6 +1,6 @@
 class ScalaClassOne extends ClassTwo.Child {
   def func4() = {
     func2
-  }	
+  }
 }
 
diff --git a/test/files/pos/t247.scala b/test/files/pos/t247.scala
index 983b799..fdcafeb 100644
--- a/test/files/pos/t247.scala
+++ b/test/files/pos/t247.scala
@@ -12,15 +12,15 @@ class TreeMapFactory[KEY](newOrder:Order[KEY]) extends MapFactory[KEY] {
   def Empty[V] = new TreeMap[KEY,V](new TreeMapFactory[KEY](order));
 }
 
-class Tree[KEY,Entry](order:Order[KEY]) { 
+class Tree[KEY,Entry](order:Order[KEY]) {
   def size =0;
 }
 
-class TreeMap[KEY,VALUE](_factory:TreeMapFactory[KEY]) extends Tree[KEY,Pair[KEY,VALUE]](_factory.order) with scala.collection.DefaultMap[KEY, VALUE] with Map[KEY, VALUE] {
+class TreeMap[KEY,VALUE](_factory:TreeMapFactory[KEY]) extends Tree[KEY,Tuple2[KEY,VALUE]](_factory.order) with scala.collection.DefaultMap[KEY, VALUE] with Map[KEY, VALUE] {
   val factory = _factory
   val order = _factory.order;
   def this(newOrder:Order[KEY]) = this(new TreeMapFactory[KEY](newOrder));
   def get(key:KEY) = null;
-  def iterator:Iterator[Pair[KEY,VALUE]] = null;
+  def iterator:Iterator[Tuple2[KEY,VALUE]] = null;
   override def size = super[Tree].size
 }
diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala
index 7d1b7cb..88da6aa 100755
--- a/test/files/pos/t2484.scala
+++ b/test/files/pos/t2484.scala
@@ -1,7 +1,9 @@
+import concurrent.ExecutionContext.Implicits.global
+
 class Admin extends javax.swing.JApplet {
   val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
   def t2484: Unit = {
-    scala.concurrent.ops.spawn {jScrollPane.synchronized {
+    scala.concurrent.Future {jScrollPane.synchronized {
       def someFunction () = {}
       //scala.concurrent.ops.spawn {someFunction ()}
       jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
diff --git a/test/files/pos/t2504.scala b/test/files/pos/t2504.scala
index 0abe7dd..67f8226 100755
--- a/test/files/pos/t2504.scala
+++ b/test/files/pos/t2504.scala
@@ -1,5 +1,5 @@
 object Test {
   val ys: Iterable[_] = Array("abc")
-  val xs = Array("abc")        
+  val xs = Array("abc")
   xs sameElements Array("abc")
 }
diff --git a/test/files/pos/t2545.scala b/test/files/pos/t2545.scala
index b4238fb..6ad9942 100755
--- a/test/files/pos/t2545.scala
+++ b/test/files/pos/t2545.scala
@@ -1,6 +1,6 @@
 trait Frog[T] {
-      def hello: T 
-      def size: Int                                                                                                                                                          
+      def hello: T
+      def size: Int
     }
 
     trait OnlyWithFrogs {
diff --git a/test/files/pos/t2569/Child.scala b/test/files/pos/t2569/Child.scala
index 3d7f424..64f4dc1 100644
--- a/test/files/pos/t2569/Child.scala
+++ b/test/files/pos/t2569/Child.scala
@@ -1,9 +1,9 @@
 package varargs
-  
+
   class Child extends Parent {
-  
+
     override def concatenate(strings: String*): String =
       strings map("\"" + _ + "\"") mkString("(", ", ", ")")
-  
+
   }
 
diff --git a/test/files/pos/t2569/Parent.java b/test/files/pos/t2569/Parent.java
index 133f2ee..89421be 100644
--- a/test/files/pos/t2569/Parent.java
+++ b/test/files/pos/t2569/Parent.java
@@ -1,7 +1,7 @@
 package varargs;
-  
+
   public class Parent {
-  
+
       public String concatenate(String... strings) {
           StringBuilder builder = new StringBuilder();
           for (String s : strings) {
@@ -9,5 +9,5 @@ package varargs;
           }
           return builder.toString();
       }
-  
+
   }
diff --git a/test/files/pos/t261-ab.scala b/test/files/pos/t261-ab.scala
deleted file mode 100644
index df641e8..0000000
--- a/test/files/pos/t261-ab.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-trait A { val foo: String = "A" }
-trait B {
-  private val foo: String = "B"
-  def f = println(foo)
-}
-object Test extends App with B with A {
-  println(foo) // prints "A", as expected
-  f            // prints "B", as expected
-}
diff --git a/test/files/pos/t261-ba.scala b/test/files/pos/t261-ba.scala
deleted file mode 100644
index 6c9c5b1..0000000
--- a/test/files/pos/t261-ba.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-trait B {
-  private val foo: String = "B"
-  def f = println(foo)
-}
-trait A { val foo: String = "A" }
-object Test extends App with B with A {
-  println(foo) // prints "A", as expected
-  f            // prints "B", as expected
-}
diff --git a/test/files/pos/t2613.scala b/test/files/pos/t2613.scala
new file mode 100644
index 0000000..3a64dbc
--- /dev/null
+++ b/test/files/pos/t2613.scala
@@ -0,0 +1,11 @@
+import language.existentials
+
+object Test {
+  class Row
+
+  abstract class MyRelation [R <: Row, +Relation <: MyRelation[R, Relation]]
+
+  type M = MyRelation[R, Relation] forSome {type R <: Row; type Relation <: MyRelation[R, Relation]}
+
+  var (x,y): (String, M) = null
+}
diff --git a/test/files/pos/t262.scala b/test/files/pos/t262.scala
index b814909..ec6187b 100644
--- a/test/files/pos/t262.scala
+++ b/test/files/pos/t262.scala
@@ -1,11 +1,11 @@
 object O {
   abstract class A {
-    def f:A; 
+    def f:A;
   }
   class B extends A {
     def f = if(1 == 2) new C else new D;
   }
-  class C extends A { 
+  class C extends A {
     def f = this;
   }
   class D extends A {
diff --git a/test/files/pos/t2665.scala b/test/files/pos/t2665.scala
index 108daf5..3163e31 100644
--- a/test/files/pos/t2665.scala
+++ b/test/files/pos/t2665.scala
@@ -1,3 +1,3 @@
 object Test {
-  val x: Unit = Array("") 
+  val x: Unit = Array("")
 }
\ No newline at end of file
diff --git a/test/files/pos/t2669.scala b/test/files/pos/t2669.scala
index e34f08f..72e9311 100644
--- a/test/files/pos/t2669.scala
+++ b/test/files/pos/t2669.scala
@@ -23,6 +23,6 @@ import java.util.Vector
 
 // scalac cannot detect lack of type params, but then throws AssertionError later:
 class TVector2639 {
-  val b = new Vector  // this line passed without error detected 
+  val b = new Vector  // this line passed without error detected
   val a = new Vector(1) // this line caused throwing AssertionError when scalac
 }
diff --git a/test/files/pos/t2691.scala b/test/files/pos/t2691.scala
index 94012a8..5f0ddd1 100644
--- a/test/files/pos/t2691.scala
+++ b/test/files/pos/t2691.scala
@@ -1,5 +1,5 @@
 object Breakdown {
-  def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there")) 
+  def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
 }
 object Test {
   42 match {
diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala
index 0e2662d..bce02e4 100644
--- a/test/files/pos/t2698.scala
+++ b/test/files/pos/t2698.scala
@@ -1,5 +1,9 @@
+class WordExp {
+  abstract class Label
+  type _labelT <: Label
+}
+
 import scala.collection._
-import scala.util.regexp._
 
 abstract class S2 {
   val lang: WordExp
diff --git a/test/files/pos/t2726.cmds b/test/files/pos/t2726.cmds
deleted file mode 100644
index 5fcb18b..0000000
--- a/test/files/pos/t2726.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac SQLBuilder_1.scala
-scalac test_2.scala
diff --git a/test/files/pos/t2726/SQLBuilder_1.scala b/test/files/pos/t2726/SQLBuilder_1.scala
index 7b3e3d8..8d07a88 100644
--- a/test/files/pos/t2726/SQLBuilder_1.scala
+++ b/test/files/pos/t2726/SQLBuilder_1.scala
@@ -1,7 +1,7 @@
 class SQLBuilder extends SQLBuilder.Segment
 
-object SQLBuilder { 
-  trait Segment 
+object SQLBuilder {
+  trait Segment
 }
 
 
diff --git a/test/files/pos/t2797.scala b/test/files/pos/t2797.scala
index 4323664..cf579d8 100644
--- a/test/files/pos/t2797.scala
+++ b/test/files/pos/t2797.scala
@@ -1,9 +1,9 @@
 class MyVector[A] {
-  def map[B](f: A => B): MyVector[B] = error("")
+  def map[B](f: A => B): MyVector[B] = sys.error("")
 }
 
 object Test {
   def unzip[B, C](_this: MyVector[(B, C)]): (MyVector[B], MyVector[C]) = {
     (_this.map{ bc => bc._1 }, _this.map{ bc => bc._2 })
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t2910.scala b/test/files/pos/t2910.scala
index 17a6a64..d4d92fa 100644
--- a/test/files/pos/t2910.scala
+++ b/test/files/pos/t2910.scala
@@ -9,9 +9,9 @@ object Test {
     lazy val s = "abc"
   }
 
-  def test3 { 
+  def test3 {
     lazy val lazyBar = bar
-    object bar {        
+    object bar {
       val foo = 12
     }
     lazy val lazyBar2 = bar
@@ -29,5 +29,5 @@ object Test {
     lazy val f: Int = g
     Console.println("foo")
     lazy val g: Int = f
-  } 
+  }
 }
\ No newline at end of file
diff --git a/test/files/pos/t2913.scala b/test/files/pos/t2913.scala
index ee86b9e..11d8b92 100755
--- a/test/files/pos/t2913.scala
+++ b/test/files/pos/t2913.scala
@@ -11,13 +11,13 @@ class RichA {
 object Test {
 
   implicit def AToRichA(a: A) = new RichA
-  
+
   val a = new A
   a.foo()
   a.foo(1)
 
   a.foo("")       // Without implicits, a type error regarding invalid argument types is generated at `""`. This is
-                  // the same position as an argument, so the 'second try' typing with an Implicit View is tried, 
+                  // the same position as an argument, so the 'second try' typing with an Implicit View is tried,
                   // and AToRichA(a).foo("") is found.
                   //
                   // My reading of the spec "7.3 Views" is that `a.foo` denotes a member of `a`, so the view should
@@ -48,6 +48,6 @@ object Main {
     val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
     implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
     println(fn(1))
-    ()  
+    ()
   }
 }
diff --git a/test/files/pos/t2939.scala b/test/files/pos/t2939.scala
index 67677f2..3be4d4d 100644
--- a/test/files/pos/t2939.scala
+++ b/test/files/pos/t2939.scala
@@ -4,10 +4,10 @@ object Proxies {
   class C1 extends MapProxy[Int,Int] { def self = Map[Int,Int]() }
   class C2 extends mutable.MapProxy[Int,Int] { def self = mutable.Map[Int,Int]() }
   class C3 extends immutable.MapProxy[Int,Int] { def self = immutable.Map[Int,Int]() }
-  
+
   class C4 extends SetProxy[Int] { def self = Set[Int]() }
   class C5 extends mutable.SetProxy[Int] { def self = mutable.Set[Int]() }
   class C6 extends immutable.SetProxy[Int] { def self = immutable.Set[Int]() }
-  
+
   class C7 extends SeqProxy[Int] { def self = Seq[Int]() }
 }
\ No newline at end of file
diff --git a/test/pending/pos/t294/Ann.java b/test/files/pos/t294/Ann.java
similarity index 100%
rename from test/pending/pos/t294/Ann.java
rename to test/files/pos/t294/Ann.java
diff --git a/test/pending/pos/t294/Ann2.java b/test/files/pos/t294/Ann2.java
similarity index 100%
rename from test/pending/pos/t294/Ann2.java
rename to test/files/pos/t294/Ann2.java
diff --git a/test/pending/pos/t294/Test_1.scala b/test/files/pos/t294/Test_1.scala
similarity index 100%
rename from test/pending/pos/t294/Test_1.scala
rename to test/files/pos/t294/Test_1.scala
diff --git a/test/pending/pos/t294/Test_2.scala b/test/files/pos/t294/Test_2.scala
similarity index 100%
rename from test/pending/pos/t294/Test_2.scala
rename to test/files/pos/t294/Test_2.scala
diff --git a/test/files/pos/t2940/Error.scala b/test/files/pos/t2940/Error.scala
index 7c60066..bf5a6bd 100644
--- a/test/files/pos/t2940/Error.scala
+++ b/test/files/pos/t2940/Error.scala
@@ -5,8 +5,8 @@ abstract class Error {
 object Test {
   trait Quux[T] extends Cycle[Quux[T]]
   val x = new Quux[Int] { def doStuff() { } }
-  
+
   def main(args: Array[String]): Unit = {
-        
+
   }
 }
diff --git a/test/files/pos/t2994a.scala b/test/files/pos/t2994a.scala
index f2d57c3..cb4a389 100644
--- a/test/files/pos/t2994a.scala
+++ b/test/files/pos/t2994a.scala
@@ -17,8 +17,8 @@ object Naturals {
   type _5 = SUCC[_4]
   type _6 = SUCC[_5]
 
-  
-  // crashes scala-2.8.0 beta1  
+
+  // crashes scala-2.8.0 beta1
   trait MUL[n <: NAT, m <: NAT] extends NAT {
     trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] }
     type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z]
diff --git a/test/files/pos/t3020.scala b/test/files/pos/t3020.scala
index cb429cd..016563e 100644
--- a/test/files/pos/t3020.scala
+++ b/test/files/pos/t3020.scala
@@ -1,7 +1,7 @@
 object Test {
   def main(args: Array[String]): Unit = {
     var x = true
-    
+
     ( { if (x) new scala.util.Random() } .asInstanceOf[Runnable] )
   }
 }
diff --git a/test/files/pos/t3079.scala b/test/files/pos/t3079.scala
index 4bead34..fa732ea 100644
--- a/test/files/pos/t3079.scala
+++ b/test/files/pos/t3079.scala
@@ -10,8 +10,8 @@ object Coerce {
   def IdentityCoerce[B] = new Coerce[Identity[B], B] {
      // java.lang.Error: A in trait Identity cannot be instantiated from ?x$1.type
      def unwrap = _.value
-     
+
      // Providing the type of _ works around the problem.
-     //def unwrap = (_: Identity[B]).value 
+     //def unwrap = (_: Identity[B]).value
   }
 }
\ No newline at end of file
diff --git a/test/files/pos/t3106.scala b/test/files/pos/t3106.scala
index cf7b507..162e933 100644
--- a/test/files/pos/t3106.scala
+++ b/test/files/pos/t3106.scala
@@ -3,5 +3,5 @@ class Sample[A] (val d0: ((A,A)) => A) {}
 object Sample {
   implicit def apply[A] (x:A): Sample[A] = {
     new Sample(p => p._1)
-  }       
+  }
 }
\ No newline at end of file
diff --git a/test/files/pos/t3108.scala b/test/files/pos/t3108.scala
deleted file mode 100644
index 6a1da73..0000000
--- a/test/files/pos/t3108.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object A {
-  val a: NotNull = ""
-  val b: NotNull = 41
-}
-
diff --git a/test/files/pos/t3136.scala b/test/files/pos/t3136.scala
index 6a5850a..33d42c2 100644
--- a/test/files/pos/t3136.scala
+++ b/test/files/pos/t3136.scala
@@ -11,7 +11,7 @@ object NullaryMethodType {
 }
 
 object Test {
-  def TEST(tp: Type): String = 
+  def TEST(tp: Type): String =
     tp match {
       case PolyType(ps1, PolyType(ps2, res @ PolyType(a, b))) => "1"+tp // couldn't find a simpler version that still crashes
       case NullaryMethodType(meh) => "2"+meh
diff --git a/test/files/pos/t3152.scala b/test/files/pos/t3152.scala
index a20428d..3d1dcbd 100644
--- a/test/files/pos/t3152.scala
+++ b/test/files/pos/t3152.scala
@@ -1,13 +1,13 @@
 trait Applicative[M[_]]
 
 sealed trait MA[M[_], A] {
-  def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
-  // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+  def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub")
+  // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub")
 }
 
 object test {
-  implicit def ListMA[A](l: List[A]): MA[List, A] = error("stub")
-  implicit val ao: Applicative[Option] = error("stub")
+  implicit def ListMA[A](l: List[A]): MA[List, A] = sys.error("stub")
+  implicit val ao: Applicative[Option] = sys.error("stub")
 
   /* This compiles OK:
   (Nil: List[Option[Int]]).sequence3(): Option[List[Int]]
@@ -17,4 +17,4 @@ object test {
   // !!! No line number is reported with the error
   (Nil: List[Option[Int]]).sequence: Option[List[Int]]
   (List[Option[Int]]()).sequence: Option[List[Int]]
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t3160.scala b/test/files/pos/t3160.scala
new file mode 100644
index 0000000..cc007dc
--- /dev/null
+++ b/test/files/pos/t3160.scala
@@ -0,0 +1,6 @@
+import scala.collection.mutable._
+class Node
+
+class A {
+  def f(x: Node): Node = ???
+}
diff --git a/test/files/pos/t3174b.scala b/test/files/pos/t3174b.scala
index 002c4f0..4df1bfe 100755
--- a/test/files/pos/t3174b.scala
+++ b/test/files/pos/t3174b.scala
@@ -2,9 +2,9 @@ trait Foo[X] { def foo : Map[String,Foo[X]] }
 
 object Test {
   def f[T]() : Foo[T] = {
-    class Anon extends Foo[T] { 
-      var foo: Map[String, Foo[T]] = Map[String,Foo[T]]() 
-      //def foo = Map[String,Foo[T]]() 
+    class Anon extends Foo[T] {
+      var foo: Map[String, Foo[T]] = Map[String,Foo[T]]()
+      //def foo = Map[String,Foo[T]]()
       //def foo_=(x: Map[String,Foo[T]]) {}
     }
     new Anon
diff --git a/test/files/pos/t3175-pos.scala b/test/files/pos/t3175-pos.scala
index 497ff82..89bbf8b 100644
--- a/test/files/pos/t3175-pos.scala
+++ b/test/files/pos/t3175-pos.scala
@@ -1,7 +1,7 @@
 object Test {
-  def f(g:{val update:Unit}) = g.update 
-  
+  def f(g:{val update:Unit}) = g.update
+
   def main(args: Array[String]): Unit = {
-    
+
   }
 }
diff --git a/test/files/pos/t3177.scala b/test/files/pos/t3177.scala
index 21893c9..9f9528f 100644
--- a/test/files/pos/t3177.scala
+++ b/test/files/pos/t3177.scala
@@ -1,17 +1,17 @@
 trait InvariantFunctor[F[_]] {
   def xmap[A, B](ma: F[A], f: A => B, g: B => A): F[B]
 }
- 
+
 object InvariantFunctor {
   import Endo._
-  
+
   implicit val EndoInvariantFunctor = new InvariantFunctor[Endo] {
-    def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))    
+    def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
   }
-  
+
   // The definition about fails with:
   // anon-type.scala:9: error: not found: value b
-  //       def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))    
+  //       def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
   //                                                                                     ^
   //   anon-type.scala:8: error: not found: type $anon
   //     implicit val EndoInvariantFunctor = new InvariantFunctor[Endo] {
@@ -20,9 +20,9 @@ object InvariantFunctor {
 
   // These both work:
   // implicit val EndoInvariantFunctorAscribed: InvariantFunctor[Endo] = new InvariantFunctor[Endo] {
-  //   def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))    
+  //   def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
   // }
-  // 
+  //
   // implicit val EndoInvariantFunctorStubbed = new InvariantFunctor[Endo] {
   //   def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = error("stub")
   // }
diff --git a/test/files/pos/t3252.scala b/test/files/pos/t3252.scala
index 4b8e862..3ecc1e7 100644
--- a/test/files/pos/t3252.scala
+++ b/test/files/pos/t3252.scala
@@ -8,8 +8,8 @@ class A {
         }
     }
 
-    private def g[T](block : => T) = error("")
+    private def g[T](block : => T) = sys.error("")
 }
 object B {
-    def h(block : => Unit) : Nothing = error("")
-}
\ No newline at end of file
+    def h(block : => Unit) : Nothing = sys.error("")
+}
diff --git a/test/files/pos/t3274.scala b/test/files/pos/t3274.scala
index dfa6a4e..1572318 100644
--- a/test/files/pos/t3274.scala
+++ b/test/files/pos/t3274.scala
@@ -1,7 +1,7 @@
-trait A { this: B => 
-  trait X { 
+trait A { this: B =>
+  trait X {
     class Y1 extends Y
-  } 
+  }
 }
 
 trait B extends A {
diff --git a/test/files/pos/t3312.scala b/test/files/pos/t3312.scala
index 9bf3e23..aef965d 100644
--- a/test/files/pos/t3312.scala
+++ b/test/files/pos/t3312.scala
@@ -12,6 +12,6 @@ trait B extends Root {
 
 object Foo extends A with B  {
   override def say: String = foo(super[A].say)
-  
+
   def foo(p: => String): String = p
 }
diff --git a/test/files/pos/t3349/AbstractTupleSet.java b/test/files/pos/t3349/AbstractTupleSet.java
index 47b440a..38e4743 100644
--- a/test/files/pos/t3349/AbstractTupleSet.java
+++ b/test/files/pos/t3349/AbstractTupleSet.java
@@ -5,5 +5,5 @@ public abstract class AbstractTupleSet implements TupleSet {
 
   public void addColumn(String name, String expr) {
     throw new UnsupportedOperationException();
-  }    
+  }
 }
diff --git a/test/files/pos/t3349/Test.scala b/test/files/pos/t3349/Test.scala
index 8174e4c..595bead 100644
--- a/test/files/pos/t3349/Test.scala
+++ b/test/files/pos/t3349/Test.scala
@@ -1,5 +1,5 @@
 object Test {
   val label = "name"
-  val table: Table = error("")
+  val table: Table = sys.error("")
   table.addColumn( label, label.getClass )
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t3363-new.scala b/test/files/pos/t3363-new.scala
index e609f4d..fef2bf8 100644
--- a/test/files/pos/t3363-new.scala
+++ b/test/files/pos/t3363-new.scala
@@ -9,7 +9,7 @@ object TestCase {
         //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
         implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
           //if you remove this line, then code compiles
-      lazy val m: TypeTag[T] = error("just something to make it compile")
+      lazy val m: TypeTag[T] = sys.error("just something to make it compile")
       def is(xs: List[T]) = List(xs)
     }
 
@@ -17,4 +17,4 @@ object TestCase {
       println(Map(1 -> "2") is List(2))
     }
 
-  }
\ No newline at end of file
+  }
diff --git a/test/files/pos/t3363-old.scala b/test/files/pos/t3363-old.scala
index bae5408..c08cf2a 100644
--- a/test/files/pos/t3363-old.scala
+++ b/test/files/pos/t3363-old.scala
@@ -7,7 +7,7 @@ object TestCase {
         //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
         implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
           //if you remove this line, then code compiles
-	    lazy val m: Manifest[T] = error("just something to make it compile")
+	    lazy val m: Manifest[T] = sys.error("just something to make it compile")
 	    def is(xs: List[T]) = List(xs)
 	  }
 
diff --git a/test/files/pos/t3411.scala b/test/files/pos/t3411.scala
index b76fec6..b58e52d 100644
--- a/test/files/pos/t3411.scala
+++ b/test/files/pos/t3411.scala
@@ -1,6 +1,6 @@
 object A  {
   def g(c: PartialFunction[Any,Unit]) {}
-  
+
   def f {
     lazy val x = 0
     g { case `x` => }
diff --git a/test/files/pos/t3417.scala b/test/files/pos/t3417.scala
deleted file mode 100644
index d2de160..0000000
--- a/test/files/pos/t3417.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-trait X extends NotNull {
-    def foo = 1
-}
-
-trait Y extends Object with NotNull {
-    def bar = 1
-}
-
-class Z extends NotNull
-
-class W extends Object with NotNull
diff --git a/test/files/pos/t342.scala b/test/files/pos/t342.scala
deleted file mode 100644
index 752b24d..0000000
--- a/test/files/pos/t342.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Main extends App {
-
-  object Foo extends Enumeration(0, "Bar") {  // 2
-    val Bar = Value
-  }
-  import Foo._;
-  Console.println(Bar)
-}
diff --git a/test/files/pos/t3429/A.scala b/test/files/pos/t3429/A.scala
index 4b70580..80785db 100644
--- a/test/files/pos/t3429/A.scala
+++ b/test/files/pos/t3429/A.scala
@@ -2,8 +2,8 @@ class A {
   @Test(exc = classOf[Exception])
   def myTestMethod = 0
 }
-// rytz at chara:~/scala/trunk/sandbox$ javac Test.java 
-// rytz at chara:~/scala/trunk/sandbox$ ../build/pack/bin/scalac A.scala 
+// rytz at chara:~/scala/trunk/sandbox$ javac Test.java
+// rytz at chara:~/scala/trunk/sandbox$ ../build/pack/bin/scalac A.scala
 // A.scala:2: error: type mismatch;
 //  found   : java.lang.Class[Exception](classOf[java.lang.Exception])
 //  required: java.lang.Class
diff --git a/test/files/pos/t3430.scala b/test/files/pos/t3430.scala
index 4990abb..3129c62 100644
--- a/test/files/pos/t3430.scala
+++ b/test/files/pos/t3430.scala
@@ -1,6 +1,6 @@
 // package com.example
 
-object A {    
+object A {
   def f1(f: String => Boolean) = f("a")
 
   def f2(): Boolean =
diff --git a/test/files/pos/t344.scala b/test/files/pos/t344.scala
index 8a6ad91..449a763 100644
--- a/test/files/pos/t344.scala
+++ b/test/files/pos/t344.scala
@@ -1,7 +1,7 @@
 object Bug {
   class A;
-  case class A1 extends A;
-  case class A2 extends A;
+  case class A1() extends A;
+  case class A2() extends A;
   def f: A =
     if (true)
       A1()
diff --git a/test/files/pos/t3440.scala b/test/files/pos/t3440.scala
index 46bba1b..0e7ca6b 100644
--- a/test/files/pos/t3440.scala
+++ b/test/files/pos/t3440.scala
@@ -4,15 +4,15 @@ object test {
   }
 
   case object Int8 extends SampleFormat1 {
-    def readerFactory = error("")
+    def readerFactory = sys.error("")
   }
   case object Int16 extends SampleFormat1 {
-    def readerFactory = error("")
+    def readerFactory = sys.error("")
   }
-	
+
   (new {}: Any) match {
    case 8   => Int8
    case 16  => Int16
-   case _   => error("")
+   case _   => sys.error("")
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t3452f.scala b/test/files/pos/t3452f.scala
new file mode 100644
index 0000000..efe25a6
--- /dev/null
+++ b/test/files/pos/t3452f.scala
@@ -0,0 +1,10 @@
+class Base[Coll] {
+  trait Transformed[S] {
+    lazy val underlying: Coll = ???
+  }
+}
+
+class Derived extends Base[String] {
+  class C extends Transformed[Any]
+}
+
diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala
index 660aa55..6a94baa 100644
--- a/test/files/pos/t3477.scala
+++ b/test/files/pos/t3477.scala
@@ -1,7 +1,7 @@
 class J3 {
-  def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("")
+  def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = sys.error("")
 }
 
 object Test {
   (new J3).f(Map[Int, Int]())
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t3521/DoubleValue.java b/test/files/pos/t3521/DoubleValue.java
index 28f05cd..e8c0938 100644
--- a/test/files/pos/t3521/DoubleValue.java
+++ b/test/files/pos/t3521/DoubleValue.java
@@ -4,4 +4,4 @@ import java.lang.annotation.*;
 @Target(ElementType.FIELD)
 public @interface DoubleValue {
   double value();
-} 
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/test/files/pos/t3528.scala b/test/files/pos/t3528.scala
index b1c4344..ff49b3e 100644
--- a/test/files/pos/t3528.scala
+++ b/test/files/pos/t3528.scala
@@ -4,5 +4,5 @@ class A {
   // 3528 comments
   def f2 = List(Set(1,2,3), List(1,2,3))
   // 2322
-  def f3 = List(null: Range, null: List[Int])  
+  def f3 = List(null: Range, null: List[Int])
 }
diff --git a/test/files/pos/t3568.scala b/test/files/pos/t3568.scala
index 0f26e2f..c8e3fcc 100755
--- a/test/files/pos/t3568.scala
+++ b/test/files/pos/t3568.scala
@@ -14,7 +14,7 @@ package buffer {
     // ArrayVec2 can be compiled, instantiated and used.
     def main(args: Array[String]) { println(works) }
   }
-  
+
   trait ElemType { type Element; type Component <: ElemType }
   trait Float1 extends ElemType { type Element = Float; type Component = Float1}
   class Vec2 extends ElemType { type Element = Vec2;  type Component = Float1 }
diff --git a/test/files/pos/t3578.scala b/test/files/pos/t3578.scala
index 306cde8..d984118 100644
--- a/test/files/pos/t3578.scala
+++ b/test/files/pos/t3578.scala
@@ -24,7 +24,7 @@ object Test {
   case class JInt(num: BigInt) extends JValue
   case class JBool(value: Boolean) extends JValue
   case class JField(name: String, value: JValue) extends JValue
-  case class JObject(obj: List[JField]) extends JValue 
+  case class JObject(obj: List[JField]) extends JValue
   case class JArray(arr: List[JValue]) extends JValue
 }
 
diff --git a/test/files/pos/t3582.scala b/test/files/pos/t3582.scala
index e20af5e..0ac112e 100644
--- a/test/files/pos/t3582.scala
+++ b/test/files/pos/t3582.scala
@@ -6,7 +6,7 @@ object Test {
 // [[syntax trees at end of typer]]
 // abstract trait C#5[A#9116 >: Nothing#5832 <: Any#52] extends scala#33.AnyRef#2780;
 // final object Test#15 extends java.lang.Object#2485 with ScalaObject#1913 {
-//   def ImplicitParamCA#9123[CC#9124[A#10858 >: Nothing#5832 <: Any#52] >: [A#10858]Nothing#5832 <: [A#10858]Any#52, 
+//   def ImplicitParamCA#9123[CC#9124[A#10858 >: Nothing#5832 <: Any#52] >: [A#10858]Nothing#5832 <: [A#10858]Any#52,
 //                            A#9125 >: Nothing#5832 <: Any#52](implicit ev#10856: C#5[A#9127]): Unit#3818
 //         = scala#34.this.Predef#1683.implicitly#8816[C#5[A#10858]]()
 // }
diff --git a/test/files/neg/t3631.scala b/test/files/pos/t3631.scala
similarity index 100%
rename from test/files/neg/t3631.scala
rename to test/files/pos/t3631.scala
diff --git a/test/files/pos/t3636.scala b/test/files/pos/t3636.scala
index dbfc7a2..24d18c6 100644
--- a/test/files/pos/t3636.scala
+++ b/test/files/pos/t3636.scala
@@ -5,11 +5,11 @@ class CTxnLocal[ T ] {
 }
 
 trait Txn
-    
+
 trait ProcTxn {
     def ccstm: Txn
 }
-    
+
 trait TxnLocal[ @specialized T ] {
    def apply()( implicit tx: ProcTxn ) : T
    def set( v: T )( implicit tx: ProcTxn ) : Unit
diff --git a/test/files/pos/t3670.scala b/test/files/pos/t3670.scala
index 19959f9..ec4fbe5 100644
--- a/test/files/pos/t3670.scala
+++ b/test/files/pos/t3670.scala
@@ -1,4 +1,4 @@
-class A {    
+class A {
   val n = {
         val z = {
             lazy val bb = 1
diff --git a/test/files/pos/t3671.scala b/test/files/pos/t3671.scala
index 75559f8..1ca9327 100644
--- a/test/files/pos/t3671.scala
+++ b/test/files/pos/t3671.scala
@@ -2,6 +2,6 @@ object Crash {
   def crash(value: Int): Unit =
       value match {
         case java.lang.Integer.MAX_VALUE => println("MAX_VALUE")
-        case java.lang.Integer.MIN_VALUE => println("MIN_VALUE") 
+        case java.lang.Integer.MIN_VALUE => println("MIN_VALUE")
        }
 }
\ No newline at end of file
diff --git a/test/files/pos/t3688-redux.scala b/test/files/pos/t3688-redux.scala
deleted file mode 100644
index e601cf2..0000000
--- a/test/files/pos/t3688-redux.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import collection.JavaConverters._
-import java.{ util => ju }
-import scala.collection.{ mutable, immutable }
-
-object Test {
-  def m[P <% AsJava[ju.List[Int]]](l: P) = 1
-  m(List(1))
-}
\ No newline at end of file
diff --git a/test/files/pos/t3731.scala b/test/files/pos/t3731.scala
index 7593854..7a3cbec 100644
--- a/test/files/pos/t3731.scala
+++ b/test/files/pos/t3731.scala
@@ -1,8 +1,8 @@
 object Test{
   trait ZW[S]{type T}
-  def ZipWith[S, M <: ZW[S]]: M#T = error("ZW")
+  def ZipWith[S, M <: ZW[S]]: M#T = sys.error("ZW")
 
-  // meh must be parameterised to force an asSeenFrom that 
+  // meh must be parameterised to force an asSeenFrom that
   // duplicates the refinement in the TR's pre without updating its sym
   def meh[A] = ZipWith[A, ZW[A]{type T=Stream[A]}]
 
diff --git a/test/files/pos/t3837.scala b/test/files/pos/t3837.scala
index e1797db..bcaf63c 100644
--- a/test/files/pos/t3837.scala
+++ b/test/files/pos/t3837.scala
@@ -1,8 +1,8 @@
 class BipClass { }
 trait BipTrait {
   self: BipClass =>
-  
-  private[this] def foo() = 5  
+
+  private[this] def foo() = 5
   def bar() = this.foo()
 }
 // error: value foo is not a member of BipTrait with BipClass
diff --git a/test/files/pos/t3856.scala b/test/files/pos/t3856.scala
index 5ea4b84..132c95c 100644
--- a/test/files/pos/t3856.scala
+++ b/test/files/pos/t3856.scala
@@ -1,4 +1,4 @@
-case class C[T](x: T) 
+case class C[T](x: T)
 
 case class CS(xs: C[_]*)
 
diff --git a/test/files/pos/t3864/tuples_1.scala b/test/files/pos/t3864/tuples_1.scala
index 1d19af6..5e97f84 100644
--- a/test/files/pos/t3864/tuples_1.scala
+++ b/test/files/pos/t3864/tuples_1.scala
@@ -1,11 +1,11 @@
-trait PimpedType[X] {
+trait EnrichedType[X] {
   val value: X
 }
 
 trait Tuples {
 
-  
-trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] {
+
+trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends EnrichedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple15[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N [...]
@@ -13,8 +13,8 @@ trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[T
 
 implicit def ToTuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)): Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = new { val value = t } with Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]
 
-  
-trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] {
+
+trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends EnrichedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple16[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14 [...]
@@ -22,8 +22,8 @@ trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedTyp
 
 implicit def ToTuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)): Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = new { val value = t } with Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]
 
-  
-trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends PimpedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] {
+
+trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends EnrichedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple17[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, [...]
@@ -31,8 +31,8 @@ trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends Pimped
 
 implicit def ToTuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)): Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = new { val value = t } with Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]
 
-  
-trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends PimpedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] {
+
+trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends EnrichedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple18[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M [...]
@@ -40,8 +40,8 @@ trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends Pim
 
 implicit def ToTuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)): Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = new { val value = t } with Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]
 
-  
-trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends PimpedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] {
+
+trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends EnrichedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple19[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identi [...]
@@ -49,8 +49,8 @@ trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends
 
 implicit def ToTuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)): Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = new { val value = t } with Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]
 
-  
-trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends PimpedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
+
+trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends EnrichedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple20[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = id [...]
@@ -58,8 +58,8 @@ trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] exten
 
 implicit def ToTuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)): Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = new { val value = t } with Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]
 
-  
-trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends PimpedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] {
+
+trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends EnrichedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple21[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM)  [...]
@@ -67,12 +67,12 @@ trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] ex
 
 implicit def ToTuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)): Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = new { val value = t } with Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
 
-  
-trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends PimpedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] {
+
+trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends EnrichedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] {
   def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
   def toIndexedSeq[Z](implicit ev: value.type <:< Tuple22[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
   def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M =>  [...]
 }
 
 implicit def ToTuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)): Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = new { val value = t } with Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/t3866.scala b/test/files/pos/t3866.scala
index 5fe7e3f..5d366cc 100644
--- a/test/files/pos/t3866.scala
+++ b/test/files/pos/t3866.scala
@@ -13,5 +13,5 @@ abstract class ImplicitRepeated {
   f("A", 1, 2) // should be implicitly resolved to alternative b)
   f( 1, 2 )    // should be implicitly resolved to alternative a)
     // ImplicitRepeated.this.f[Int, Nothing]("A", ImplicitRepeated.this.anyToN[Int](1), ImplicitRepeated.this.anyToN[Int](2));
-    // ImplicitRepeated.this.f[Int, Nothing](ImplicitRepeated.this.anyToN[Int](1), ImplicitRepeated.this.anyToN[Int](2))  
+    // ImplicitRepeated.this.f[Int, Nothing](ImplicitRepeated.this.anyToN[Int](1), ImplicitRepeated.this.anyToN[Int](2))
 }
\ No newline at end of file
diff --git a/test/files/pos/t3880.scala b/test/files/pos/t3880.scala
index b6f06c4..cd5f3c0 100644
--- a/test/files/pos/t3880.scala
+++ b/test/files/pos/t3880.scala
@@ -1,6 +1,6 @@
 abstract class Bar[+B] {
 }
-abstract class C1[+B] extends Bar[B] {  
+abstract class C1[+B] extends Bar[B] {
   private[this] def g(x: C1[B]): Unit = ()
 
   // this method is fine: notice that it allows the call to g,
diff --git a/test/files/pos/t3883.scala b/test/files/pos/t3883.scala
index adde052..1b62c0c 100644
--- a/test/files/pos/t3883.scala
+++ b/test/files/pos/t3883.scala
@@ -1,14 +1,14 @@
 // need to test both orders
 object A1 {
-  implicit def i: Equiv[Boolean] = error("")
-  implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("")
+  implicit def i: Equiv[Boolean] = sys.error("")
+  implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("")
 
   implicitly[Equiv[Boolean]]
 }
 
 object A2 {
-  implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("")
-  implicit def i: Equiv[Boolean] = error("")
+  implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("")
+  implicit def i: Equiv[Boolean] = sys.error("")
 
   implicitly[Equiv[Boolean]]
 }
diff --git a/test/files/pos/t3898.scala b/test/files/pos/t3898.scala
index 075692e..ab47bbd 100644
--- a/test/files/pos/t3898.scala
+++ b/test/files/pos/t3898.scala
@@ -2,5 +2,5 @@ trait Atomic[@specialized(Boolean) T] {
   def x: T
 
   def f(fn: T => T): Boolean = f(fn(x), true)
-  def f[R](a: T, b: R): R = b 
+  def f[R](a: T, b: R): R = b
 }
diff --git a/test/files/pos/t3927.scala b/test/files/pos/t3927.scala
index eb4c4b3..f5869c5 100644
--- a/test/files/pos/t3927.scala
+++ b/test/files/pos/t3927.scala
@@ -1,6 +1,6 @@
 object A {
   def x {
-    implicit lazy val e: Equiv[Int] = error("")
+    implicit lazy val e: Equiv[Int] = sys.error("")
     implicitly[Equiv[Int]]
   }
-} 
+}
diff --git a/test/files/pos/t3936/BlockingQueue.java b/test/files/pos/t3936/BlockingQueue.java
new file mode 100644
index 0000000..b902d45
--- /dev/null
+++ b/test/files/pos/t3936/BlockingQueue.java
@@ -0,0 +1,3 @@
+package pack;
+import java.util.Queue;
+public interface BlockingQueue<E> extends Queue<E> { }
diff --git a/test/files/pos/t3936/Queue.java b/test/files/pos/t3936/Queue.java
new file mode 100644
index 0000000..25c9087
--- /dev/null
+++ b/test/files/pos/t3936/Queue.java
@@ -0,0 +1,2 @@
+package pack;
+public interface Queue { }
diff --git a/test/files/pos/t3936/Test.scala b/test/files/pos/t3936/Test.scala
new file mode 100644
index 0000000..c867a05
--- /dev/null
+++ b/test/files/pos/t3936/Test.scala
@@ -0,0 +1,4 @@
+package pack
+trait Test {
+  val b: BlockingQueue[Nothing]
+}
diff --git a/test/files/pos/t3938/Parent.java b/test/files/pos/t3938/Parent.java
index a35f435..08fae33 100644
--- a/test/files/pos/t3938/Parent.java
+++ b/test/files/pos/t3938/Parent.java
@@ -1,7 +1,7 @@
 public class Parent<A>{
     class I1 {}
     class I2 extends Parent.I1 {}
-    
+
     // OKAY:
     class I3 extends I1 {}
     static class I4 {}
diff --git a/test/files/pos/t3938/UseParent.scala b/test/files/pos/t3938/UseParent.scala
index 3f4c229..685d1a0 100644
--- a/test/files/pos/t3938/UseParent.scala
+++ b/test/files/pos/t3938/UseParent.scala
@@ -1,6 +1,6 @@
 object UseParent {
   classOf[Parent[AnyRef]#I2]
-  
+
   // OKAY
   classOf[Parent[AnyRef]#I3]
   classOf[Parent.I5]
diff --git a/test/files/pos/t3943/Client_2.scala b/test/files/pos/t3943/Client_2.scala
new file mode 100644
index 0000000..650ac9b
--- /dev/null
+++ b/test/files/pos/t3943/Client_2.scala
@@ -0,0 +1,7 @@
+object Test {
+  val x: Child = new Child
+  x.getInner.foo("meh")
+// error: type mismatch;
+//  found   : java.lang.String("meh")
+//  required: E
+}
diff --git a/test/files/pos/t3943/Outer_1.java b/test/files/pos/t3943/Outer_1.java
new file mode 100644
index 0000000..1d38c5e
--- /dev/null
+++ b/test/files/pos/t3943/Outer_1.java
@@ -0,0 +1,14 @@
+class Outer<E> {
+  abstract class Inner {
+    abstract public void foo(E e);
+  }
+}
+
+class Child extends Outer<String> {
+  // the implicit prefix for Inner is Outer<E> instead of Outer<String>
+  public Inner getInner() {
+    return new Inner() {
+     public void foo(String e) { System.out.println("meh "+e); }
+    };
+  }
+}
diff --git a/test/files/pos/t3972.scala b/test/files/pos/t3972.scala
index d6cbb3d..5dfc10f 100644
--- a/test/files/pos/t3972.scala
+++ b/test/files/pos/t3972.scala
@@ -2,7 +2,7 @@ object CompilerCrash {
   def main(args: Array[String]) {
     args match {
       case Array("a", a @ _*) => { } // The code compiles fine if this line is commented out or "@ _*" is deleted or this line is swapped for the next line
-      case Array("b") => { } // The code compiles fine if this line is commented out 
+      case Array("b") => { } // The code compiles fine if this line is commented out
       case Array("c", c) => {
         0 // The code compiles fine if this line is commented out
       }
diff --git a/test/files/pos/t4020.scala b/test/files/pos/t4020.scala
index 8a758d5..f976460 100644
--- a/test/files/pos/t4020.scala
+++ b/test/files/pos/t4020.scala
@@ -14,11 +14,11 @@ class B {
     def mthd(foo: a2.Foo) = {
         foo match {
             case a2.Foo2(i) => i
-            
-            // Note: This case is impossible.  In fact, scalac 
+
+            // Note: This case is impossible.  In fact, scalac
             // will (correctly) report an error if it is uncommented,
             // but a warning if it is commented.
-            
+
             // case a1.Foo1(i) => i
         }
     }
diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala
index 29c8d16..a9777f0 100644
--- a/test/files/pos/t4070.scala
+++ b/test/files/pos/t4070.scala
@@ -21,14 +21,14 @@ package b {
 
 /*
 // With crash below the clasess:
-% scalac -Dscalac.debug.tvar ./a.scala 
+% scalac -Dscalac.debug.tvar ./a.scala
 [    create] ?_$1                     ( In Foo#crash )
 [   setInst] tv[Int]                  ( In Foo#crash, _$1=tv[Int] )
 [    create] tv[Int]                  ( In Foo#crash )
 [     clone] tv[Int]                  ( Foo#crash )
 
 // With crash above the classes:
-% scalac -Dscalac.debug.tvar ./a.scala 
+% scalac -Dscalac.debug.tvar ./a.scala
 [    create] ?tv                      ( In Foo#crash )
 ./a.scala:2: error: Invalid type application in TypeVar: List(), List(Int)
   def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
diff --git a/test/files/pos/t4202.scala b/test/files/pos/t4202.scala
index aca7d50..b2a0c01 100644
--- a/test/files/pos/t4202.scala
+++ b/test/files/pos/t4202.scala
@@ -2,7 +2,7 @@ object t4202_1 {
   () => {
     trait T {
       def t = ()
-    } 
+    }
   }
 }
 
diff --git a/test/files/pos/t422.scala b/test/files/pos/t422.scala
deleted file mode 100644
index cb3ba27..0000000
--- a/test/files/pos/t422.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.regexp.WordExp;
-import scala.util.automata.WordBerrySethi;
-
-object BoolWordExp extends WordExp {
-  type _labelT = MyLabels;
-  type _regexpT = RegExp;
-  abstract class MyLabels extends Label ;
-  case class MyLabel(c:Char) extends MyLabels;
-}
-
-object MyTranslator extends WordBerrySethi {
-  override val lang = BoolWordExp;
-  import lang._;
-  override protected def seenLabel( r:RegExp, i:Int, label: _labelT ): Unit = {
-    super.seenLabel(r,i,label)
-  }
-}
diff --git a/test/files/pos/t4220.scala b/test/files/pos/t4220.scala
index 8fb999e..98f2649 100644
--- a/test/files/pos/t4220.scala
+++ b/test/files/pos/t4220.scala
@@ -1,4 +1,4 @@
-// don't know if our half-working sbt build is meaningfully 
+// don't know if our half-working sbt build is meaningfully
 // tested for #4220 with this, but it can't hurt.
 class Boo(a: Int = 0)
 
diff --git a/test/files/pos/t4243.scala b/test/files/pos/t4243.scala
index 5fa8665..e6c66fa 100644
--- a/test/files/pos/t4243.scala
+++ b/test/files/pos/t4243.scala
@@ -3,16 +3,16 @@
 
 
 object wrap {
-  
+
   trait DomainLike[@specialized(Int) A, +This <: Domain[A]]
-  
+
   trait Domain[@specialized(Int) B]
   extends DomainLike[B, Domain[B]]
-  
+
   trait IterableDomainLike[@specialized(Int) C, +This <: IterableDomain[C]]
   extends DomainLike[C, This]
-  
+
   trait IterableDomain[@specialized(Int) D]
   extends Domain[D] with IterableDomainLike[D, IterableDomain[D]]
-  
+
 }
diff --git a/test/files/pos/t4266.scala b/test/files/pos/t4266.scala
index 301cc26..222f65e 100644
--- a/test/files/pos/t4266.scala
+++ b/test/files/pos/t4266.scala
@@ -1,21 +1,21 @@
 object Test {
-  
+
   trait Tensor2Like[
-    @specialized(Int) A1, 
-    +D1 <: DomainLike[A1], 
+    @specialized(Int) A1,
+    +D1 <: DomainLike[A1],
     +D <: Product2DomainLike[D1]
   ] {
     def domain: D;
-    
+
     def checkKey(k1: A1) {
       domain._1.contains(k1)
     }
   }
-  
+
   trait DomainLike[A] {
     def contains(key: A): Boolean;
   }
-  
+
   // trait DomainLike[@specialized(Int) A] {
   //   def contains(key: A): Boolean;
   // }
diff --git a/test/files/pos/t4269.scala b/test/files/pos/t4269.scala
index 70f0471..99a3078 100644
--- a/test/files/pos/t4269.scala
+++ b/test/files/pos/t4269.scala
@@ -1,5 +1,5 @@
-class A { 
-  PartialFunction.condOpt(Nil) {  
-    case items at List(_*) if true => 
+class A {
+  PartialFunction.condOpt(Nil) {
+    case items at List(_*) if true =>
   }
 }
diff --git a/test/files/pos/t4273.scala b/test/files/pos/t4273.scala
index 9a942e8..a4d3717 100644
--- a/test/files/pos/t4273.scala
+++ b/test/files/pos/t4273.scala
@@ -1,8 +1,8 @@
 class A {
   implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.Ops(x)
-  
+
   class Bippy
   implicit val bippyOrdering = new Ordering[Bippy] { def compare(x: Bippy, y: Bippy) = util.Random.nextInt }
-  
+
   (new Bippy) < (new Bippy)
 }
\ No newline at end of file
diff --git a/test/files/pos/t4275.scala b/test/files/pos/t4275.scala
index 183cb51..1938ace 100644
--- a/test/files/pos/t4275.scala
+++ b/test/files/pos/t4275.scala
@@ -1,6 +1,6 @@
 object Test {
   def f = "abc".count(_ > 'a')
-  
+
   class A {
     private val count: Int = 0
   }
diff --git a/test/files/pos/t430-feb09.scala b/test/files/pos/t430-feb09.scala
index bba8996..1499f32 100644
--- a/test/files/pos/t430-feb09.scala
+++ b/test/files/pos/t430-feb09.scala
@@ -13,12 +13,12 @@ package c.scala {
   case class C[T]()
 }
 
-// Doesn't compile: type Nothing is not a member of d.scala 
+// Doesn't compile: type Nothing is not a member of d.scala
 package d.scala.d {
   case class D[T]()
 }
 
-// Doesn't compile: type Any is not a member of e.scala 
+// Doesn't compile: type Any is not a member of e.scala
 package e.scala {
   case class E[T >: Nothing]()
 }
diff --git a/test/files/pos/t4351.scala b/test/files/pos/t4351.scala
deleted file mode 100644
index 2d57588..0000000
--- a/test/files/pos/t4351.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-object Test {
-  def main(args: Array[String]): Unit = {
-    try new BooleanPropImpl() value
-    catch {
-      case e: RuntimeException => println("runtime exception")
-    }
-  }
-}
-
-trait Prop[@specialized(Boolean) +T] {
-  def value: T
-}
-
-class PropImpl[+T] extends Prop[T] {
-  def value: T = scala.sys.error("")
-}
-
-trait BooleanProp extends Prop[Boolean]
-
-class BooleanPropImpl() extends PropImpl[Boolean] with BooleanProp
diff --git a/test/files/pos/t4365/a_1.scala b/test/files/pos/t4365/a_1.scala
new file mode 100644
index 0000000..a24b577
--- /dev/null
+++ b/test/files/pos/t4365/a_1.scala
@@ -0,0 +1,18 @@
+import scala.collection._
+
+trait SeqViewLike[+A,
+                  +Coll,
+                  +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, Nothing]]
+  extends Seq[A]   with GenSeqViewLike[A, Coll, Nothing]
+{
+
+  trait Transformed[+B] extends super[GenSeqViewLike].Transformed[B]
+
+  abstract class AbstractTransformed[+B] extends Seq[B] with Transformed[B] {
+    def underlying: Coll = error("")
+  }
+
+  trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
+
+  protected def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
+}
diff --git a/test/files/pos/t4365/b_1.scala b/test/files/pos/t4365/b_1.scala
new file mode 100644
index 0000000..e142381
--- /dev/null
+++ b/test/files/pos/t4365/b_1.scala
@@ -0,0 +1,24 @@
+import scala.collection._
+
+trait GenSeqView0[+A, +Coll]
+
+trait GenSeqViewLike[+A,
+                     +Coll,
+                     +This <: GenSeqView0[A, Coll] with GenSeqViewLike[A, Coll, Nothing]]
+extends GenSeq[A]  {
+self =>
+
+  trait Transformed[+B] {
+    def length: Int = 0
+    def apply(idx: Int): B = error("")
+  }
+
+  trait Reversed extends Transformed[A] {
+    def iterator: Iterator[A] = createReversedIterator
+
+    private def createReversedIterator: Iterator[A] = {
+      self.foreach(_ => ())
+      null
+    }
+  }
+}
diff --git a/test/files/pos/t443.scala b/test/files/pos/t443.scala
index 5b5e3ea..cdaefe9 100644
--- a/test/files/pos/t443.scala
+++ b/test/files/pos/t443.scala
@@ -1,10 +1,10 @@
 object Test {
 
-  def lookup(): Option[Pair[String, String]] =
-    ((null: Option[Pair[String, String]]) : @unchecked) match {
-      case Some(Pair(_, _)) =>
+  def lookup(): Option[Tuple2[String, String]] =
+    ((null: Option[Tuple2[String, String]]) : @unchecked) match {
+      case Some((_, _)) =>
 	if (true)
-	  Some(Pair(null, null))
+	  Some((null, null))
 	else
 	  lookup() match {
 	    case Some(_) => Some(null)
diff --git a/test/files/pos/t4432.scala b/test/files/pos/t4432.scala
index 09f4c2a..1063123 100644
--- a/test/files/pos/t4432.scala
+++ b/test/files/pos/t4432.scala
@@ -9,7 +9,7 @@ object Main {
     }
     new A
   }
-  
+
   def foo2 = {
     class B {
       val x = {
@@ -38,5 +38,5 @@ object Main {
     }
     new D
   }
-  
+
 }
diff --git a/test/files/pos/t4457_1.scala b/test/files/pos/t4457_1.scala
index 4442f28..32edd6c 100644
--- a/test/files/pos/t4457_1.scala
+++ b/test/files/pos/t4457_1.scala
@@ -15,7 +15,7 @@ object ImplicitConvAmbiguity2 {
   def aFunc[A](a: NN[A]) = new BB[A]
 
   def bFunc[T](e1: N[T]) = {}
-  
+
   def typeMe1 {
     val x = aFunc(4F)
     bFunc(x)
diff --git a/test/files/pos/t4501.scala b/test/files/pos/t4501.scala
index dac2524..40628f1 100644
--- a/test/files/pos/t4501.scala
+++ b/test/files/pos/t4501.scala
@@ -6,7 +6,7 @@ class A {
   def f1 = foo(ListBuffer(), List())
   def f2 = foo(ListBuffer(), ListBuffer())
   def f3 = foo(List(), List())
-  
+
   // scalap
   // def f1 : scala.collection.Seq[scala.Nothing] = { /* compiled code */ }
   // def f2 : scala.collection.mutable.ListBuffer[scala.Nothing] = { /* compiled code */ }
diff --git a/test/files/pos/t4579.scala b/test/files/pos/t4579.scala
index 8951ec0..cd1553f 100644
--- a/test/files/pos/t4579.scala
+++ b/test/files/pos/t4579.scala
@@ -190,10 +190,10 @@ object LispCaseClasses extends Lisp {
 
     def extendEnv(env: Environment,
                   ps: List[String], args: List[Data]): Environment =
-      Pair(ps, args) match {
-        case Pair(List(), List()) =>
+      (ps, args) match {
+        case (List(), List()) =>
           env
-        case Pair(p :: ps1, arg :: args1) =>
+        case (p :: ps1, arg :: args1) =>
           extendEnv(env.extend(p, arg), ps1, args1)
         case _ =>
           lispError("wrong number of arguments")
@@ -381,10 +381,10 @@ object LispAny extends Lisp {
 
     def extendEnv(env: Environment,
                   ps: List[String], args: List[Data]): Environment =
-      Pair(ps, args) match {
-        case Pair(List(), List()) =>
+      (ps, args) match {
+        case (List(), List()) =>
           env
-        case Pair(p :: ps1, arg :: args1) =>
+        case (p :: ps1, arg :: args1) =>
           extendEnv(env.extend(p, arg), ps1, args1)
         case _ =>
           lispError("wrong number of arguments")
diff --git a/test/files/pos/t460.scala b/test/files/pos/t460.scala
index 466d06c..3fc13e4 100644
--- a/test/files/pos/t460.scala
+++ b/test/files/pos/t460.scala
@@ -1,8 +1,8 @@
 object Bug460 {
   def testFun(x : Int, y : Int) = x + y
-  val fn = testFun _ 
-  
-  fn(1, 2) // Ok 
+  val fn = testFun _
+
+  fn(1, 2) // Ok
   (testFun(_, _))(1, 2) // Ok
   (testFun _).apply(1, 2)
   (testFun _)(1, 2) // Error! (but no longer)
diff --git a/test/files/pos/t4603/S.scala b/test/files/pos/t4603/S.scala
index 9e22819..c7d809d 100644
--- a/test/files/pos/t4603/S.scala
+++ b/test/files/pos/t4603/S.scala
@@ -1,7 +1,7 @@
 // S.scala
 class S extends J[AnyRef]
 
-object Test {    
+object Test {
   def main(args:Array[String]) {
     J.f(classOf[S])
   }
diff --git a/test/pending/pos/t4649.flags b/test/files/pos/t4649.flags
similarity index 100%
rename from test/pending/pos/t4649.flags
rename to test/files/pos/t4649.flags
diff --git a/test/pending/pos/t4649.scala b/test/files/pos/t4649.scala
similarity index 100%
rename from test/pending/pos/t4649.scala
rename to test/files/pos/t4649.scala
diff --git a/test/files/pos/t4716.scala b/test/files/pos/t4716.scala
index d4bd55c..ec29e8d 100644
--- a/test/files/pos/t4716.scala
+++ b/test/files/pos/t4716.scala
@@ -2,7 +2,7 @@
 
 
 
-trait Bug2[@specialized(Int) +A] extends TraversableOnce[A] {  
+trait Bug2[@specialized(Int) +A] extends TraversableOnce[A] {
   def ++[B >: A](that: TraversableOnce[B]) = {
     lazy val it = that.toIterator
     it
diff --git a/test/files/pos/t4717.scala b/test/files/pos/t4717.scala
index 4acfe48..ed35a8a 100644
--- a/test/files/pos/t4717.scala
+++ b/test/files/pos/t4717.scala
@@ -6,13 +6,13 @@
 
 
 trait Bug1[@specialized(Boolean) A] extends TraversableOnce[A] {
-  
+
   def ++[B >: A](that: TraversableOnce[B]): Iterator[B] = new Iterator[B] {
     lazy val it = that.toIterator
     def hasNext = it.hasNext
     def next = it.next
   }
-  
+
 }
 
 
diff --git a/test/files/pos/t4744.flags b/test/files/pos/t4744.flags
new file mode 100644
index 0000000..ca20f55
--- /dev/null
+++ b/test/files/pos/t4744.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/t4744/Bar.scala b/test/files/pos/t4744/Bar.scala
new file mode 100644
index 0000000..1fb6d78
--- /dev/null
+++ b/test/files/pos/t4744/Bar.scala
@@ -0,0 +1 @@
+class Bar { val quux = new Foo[java.lang.Integer]() }
diff --git a/test/files/pos/t4744/Foo.java b/test/files/pos/t4744/Foo.java
new file mode 100644
index 0000000..6c764d0
--- /dev/null
+++ b/test/files/pos/t4744/Foo.java
@@ -0,0 +1 @@
+public class Foo<T extends Comparable<? super T>> {}
diff --git a/test/files/pos/t4760.scala b/test/files/pos/t4760.scala
index 767e384..d4407a8 100644
--- a/test/files/pos/t4760.scala
+++ b/test/files/pos/t4760.scala
@@ -19,7 +19,7 @@ class Test {
       import scala.util.Properties.lineSeparator
     }
   }
-  
+
   // parses
   def f3 = {
     import scala._
diff --git a/test/pending/pos/t4786.scala b/test/files/pos/t4786.scala
similarity index 100%
rename from test/pending/pos/t4786.scala
rename to test/files/pos/t4786.scala
diff --git a/test/files/pos/t4840.scala b/test/files/pos/t4840.scala
index eefa3b2..bf44f71 100644
--- a/test/files/pos/t4840.scala
+++ b/test/files/pos/t4840.scala
@@ -1,6 +1,6 @@
 class Crashy {
   def g(): Option[Any] = None
-  
+
   def crashy() = {
     for (_ <- g()) {
       (null: Any) match {
diff --git a/test/files/pos/t4853.scala b/test/files/pos/t4853.scala
index ed9b320..c91f2d6 100644
--- a/test/files/pos/t4853.scala
+++ b/test/files/pos/t4853.scala
@@ -3,7 +3,7 @@ object Animal {
 }
 
 class Animal[A <: AwakeOrAsleep] {
-  def goToSleep[B >: A <: Awake]: Animal[Asleep] = new Animal[Asleep] 
+  def goToSleep[B >: A <: Awake]: Animal[Asleep] = new Animal[Asleep]
   def wakeUp[B >: A <: Asleep]: Animal[Awake] = new Animal[Awake]
 }
 
diff --git a/test/files/pos/t4859.scala b/test/files/pos/t4859.scala
new file mode 100644
index 0000000..284a39b
--- /dev/null
+++ b/test/files/pos/t4859.scala
@@ -0,0 +1,17 @@
+object O {
+  // error: C is not a legal prefix for a constructor
+  C().CC()
+  // but this works.
+  D().DD()
+}
+
+case class C() {
+  case class CC()
+}
+
+case class D() {
+  class DD()
+  object DD {
+    def apply() = new DD()
+  }
+}
diff --git a/test/files/pos/t4970b.scala b/test/files/pos/t4970b.scala
new file mode 100644
index 0000000..cf9a6a6
--- /dev/null
+++ b/test/files/pos/t4970b.scala
@@ -0,0 +1,32 @@
+object Traits {
+  trait OuterClass[V <: OuterClass[V]#InnerClass] {
+  trait InnerClass {self: V =>
+      def method = ()
+    }
+  }
+
+  trait SubOuterClass[T <: SubOuterClass[T]#SubInnerClass] extends OuterClass[T] {
+    trait SubInnerClass extends super.InnerClass {self: T =>  }
+  }
+
+  trait SubOuterClass2[T <: SubOuterClass2[T]#SubInnerClass2] extends OuterClass[T] {
+    trait SubInnerClass2 extends super.InnerClass {self: InnerClass with T =>  }
+  }
+
+}
+
+// object Classes {
+//   class OuterClass[V <: OuterClass[V]#InnerClass] {
+//   class InnerClass {self: V =>
+//       def method = ()
+//     }
+//   }
+
+//   class SubOuterClass[T <: SubOuterClass[T]#SubInnerClass] extends OuterClass[T] {
+//     class SubInnerClass extends super.InnerClass {self: T =>  }
+//   }
+
+//   class SubOuterClass2[T <: SubOuterClass2[T]#SubInnerClass2] extends OuterClass[T] {
+//     class SubInnerClass2 extends super.InnerClass {self: InnerClass with T =>  }
+//   }
+// }
diff --git a/test/files/pos/t5022.scala b/test/files/pos/t5022.scala
new file mode 100644
index 0000000..b9a085f
--- /dev/null
+++ b/test/files/pos/t5022.scala
@@ -0,0 +1,22 @@
+class ForSomeVsUnapply {
+  def test {
+    def makeWrap: Wrap = ???
+    def useRep[e](rep: (e, X[e])) = ()
+
+    val repUnapply = Wrap.unapply(makeWrap).get
+    useRep(repUnapply)  // okay
+
+    val Wrap(rep0) = makeWrap
+    useRep(rep0) // error
+
+    val rep = makeWrap match {
+      case Wrap(r) => r
+    };
+
+    useRep(rep) // error
+  }
+}
+
+class X[e]
+
+case class Wrap(rep: (e, X[e]) forSome { type e })
diff --git a/test/files/pos/t5031_2.scala b/test/files/pos/t5031_2.scala
index ded3e82..e51215d 100644
--- a/test/files/pos/t5031_2.scala
+++ b/test/files/pos/t5031_2.scala
@@ -1,4 +1,4 @@
-package object t5031 {	
+package object t5031 {
   class ID
 }
 
diff --git a/test/files/pos/t5120.scala b/test/files/pos/t5120.scala
index 2c193d1..86d4470 100644
--- a/test/files/pos/t5120.scala
+++ b/test/files/pos/t5120.scala
@@ -1,13 +1,13 @@
-// An example extracted from SBT by Iulian 
-// that showed that the previous fix to t5120 
+// An example extracted from SBT by Iulian
+// that showed that the previous fix to t5120
 // was too strict.
 class Test {
   class ScopedKey[T]
   class Value[T]
 
-  class Compiled[T](val settings: Seq[Pair[T]])
-  
-  case class Pair[T](k: ScopedKey[T], v: ScopedKey[T])
+  class Compiled[T](val settings: Seq[Tuple2[T]])
+
+  case class Tuple2[T](k: ScopedKey[T], v: ScopedKey[T])
 
   def transform[T](x: T) = x
 
diff --git a/test/files/pos/t5127.scala b/test/files/pos/t5127.scala
index e90b8d0..c562025 100644
--- a/test/files/pos/t5127.scala
+++ b/test/files/pos/t5127.scala
@@ -4,5 +4,5 @@ package foo {
   class Parametrized1[T] extends Abstract1[Parametrized2[T]] {
     def bar(a: AnyRef) { a match { case d: Parametrized1[_] => println("ok") } }
   }
-  class Parametrized2[T] extends Parametrized1[T] with Abstract2[Parametrized2[T]] 
+  class Parametrized2[T] extends Parametrized1[T] with Abstract2[Parametrized2[T]]
 }
diff --git a/test/files/pos/t5156.scala b/test/files/pos/t5156.scala
index e7912ef..52412ad 100644
--- a/test/files/pos/t5156.scala
+++ b/test/files/pos/t5156.scala
@@ -1,6 +1,6 @@
 sealed trait HList
-final case class HCons[H, T <: HList](head : H, tail : T) extends HList 
-case object HNil extends HList 
+final case class HCons[H, T <: HList](head : H, tail : T) extends HList
+case object HNil extends HList
 
 object HList {
   type ::[H, T <: HList] = HCons[H, T]
diff --git a/test/files/pos/t516.scala b/test/files/pos/t516.scala
index 735b259..5561b76 100644
--- a/test/files/pos/t516.scala
+++ b/test/files/pos/t516.scala
@@ -4,7 +4,7 @@ import scala.collection.script._;
 class Members;
 
 object subscriber extends Subscriber[Message[String] with Undoable, Members] {
- def notify(pub: Members, event: Message[String] with Undoable): Unit = 
+ def notify(pub: Members, event: Message[String] with Undoable): Unit =
   (event: Message[String]) match {
    case Include(l, elem) => Console.println("ADD: " + elem);
    case  Remove(l, elem) => Console.println("REM: " + elem);
@@ -12,4 +12,3 @@ object subscriber extends Subscriber[Message[String] with Undoable, Members] {
    //case r : Remove [HasTree] with Undoable  =>
   }
  }
-  
diff --git a/test/files/pos/t5165b/TestAnnotation_1.java b/test/files/pos/t5165b/TestAnnotation_1.java
new file mode 100644
index 0000000..02eb3f9
--- /dev/null
+++ b/test/files/pos/t5165b/TestAnnotation_1.java
@@ -0,0 +1,11 @@
+import java.lang.annotation.*;
+
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface TestAnnotation_1 {
+  public enum TestEnumOne { A, B }
+  public enum TestEnumTwo { C, D }
+
+  public TestEnumOne one();
+  public TestEnumTwo two();
+  public String strVal();
+}
diff --git a/test/files/pos/t5165b/TestObject_3.scala b/test/files/pos/t5165b/TestObject_3.scala
new file mode 100644
index 0000000..eaf244e
--- /dev/null
+++ b/test/files/pos/t5165b/TestObject_3.scala
@@ -0,0 +1,3 @@
+
+object TestObject extends TestTrait
+
diff --git a/test/files/pos/t5165b/TestTrait_2.scala b/test/files/pos/t5165b/TestTrait_2.scala
new file mode 100644
index 0000000..ab4face
--- /dev/null
+++ b/test/files/pos/t5165b/TestTrait_2.scala
@@ -0,0 +1,3 @@
+
+ at TestAnnotation_1(one=TestAnnotation_1.TestEnumOne.A, two=TestAnnotation_1.TestEnumTwo.C, strVal="something")
+trait TestTrait
diff --git a/test/files/pos/t5178.scala b/test/files/pos/t5178.scala
index 26c008d..ed0f814 100644
--- a/test/files/pos/t5178.scala
+++ b/test/files/pos/t5178.scala
@@ -4,7 +4,7 @@ abstract class FileOps {
 
 trait DefaultFileOps {
   self: DefaultPath =>
-  
+
   override def withLock[R](start: Long = 5): Option[R] = None
 }
 
diff --git a/test/files/pos/t5223.scala b/test/files/pos/t5223.scala
index 0b2528e..d81daa9 100644
--- a/test/files/pos/t5223.scala
+++ b/test/files/pos/t5223.scala
@@ -2,5 +2,5 @@ import scala.reflect.runtime.universe._
 
 object Foo extends App {
   reify{def printf(format: String, args: Any*): String = null }
-  reify{def printf(format: String, args: Any*): String = ("abc": @cloneable)}
+  reify{def printf(format: String, args: Any*): String = ("abc": @deprecated)}
 }
\ No newline at end of file
diff --git a/test/files/pos/t5240.scala b/test/files/pos/t5240.scala
index 2db689c..065d175 100644
--- a/test/files/pos/t5240.scala
+++ b/test/files/pos/t5240.scala
@@ -5,7 +5,7 @@
 
 
 package object foo {
-  
+
   var labels: Array[_ <: String] = null
-  
+
 }
diff --git a/test/files/pos/t5317.scala b/test/files/pos/t5317.scala
index 8c9c9d8..052e844 100644
--- a/test/files/pos/t5317.scala
+++ b/test/files/pos/t5317.scala
@@ -2,11 +2,11 @@ object Test {
   trait S { type T; val x: AnyRef }
   trait A extends S { type T <: A; val x: A = null }
   trait B extends S { type T <: B; val x: B = null }
-  
+
   val a = new A{}
   val b = new B{}
   val y = if (true) a else b
-  
+
   // lub of y should allow for this
   println(y.x.x)
 }
diff --git a/test/pending/pos/t5399a.scala b/test/files/pos/t5399a.scala
similarity index 100%
rename from test/pending/pos/t5399a.scala
rename to test/files/pos/t5399a.scala
diff --git a/test/files/pos/t5508-min-okay.scala b/test/files/pos/t5508-min-okay.scala
new file mode 100644
index 0000000..3a38b9c
--- /dev/null
+++ b/test/files/pos/t5508-min-okay.scala
@@ -0,0 +1,6 @@
+object Test {
+  trait NestedTrait { // must be nested and a trait
+    private val _st : Int = 0 // crashes if changed to private[this]
+    val escape = { () => _st }
+  }
+}
diff --git a/test/files/pos/t5508-min-okay2.scala b/test/files/pos/t5508-min-okay2.scala
new file mode 100644
index 0000000..935f286
--- /dev/null
+++ b/test/files/pos/t5508-min-okay2.scala
@@ -0,0 +1,4 @@
+trait TopTrait { // must be nested and a trait
+  private[this] val _st : Int = 0 // crashes if TopTrait is not top level
+  val escape = { () => _st }
+}
diff --git a/test/files/pos/t5508-min.scala b/test/files/pos/t5508-min.scala
new file mode 100644
index 0000000..f59d2bd
--- /dev/null
+++ b/test/files/pos/t5508-min.scala
@@ -0,0 +1,6 @@
+object Test {
+  trait NestedTrait { // must be nested and a trait
+    private[this] val _st : Int = 0 // must be private[this]
+    val escape = { () => _st }
+  }
+}
diff --git a/test/files/pos/t5508.scala b/test/files/pos/t5508.scala
new file mode 100644
index 0000000..2b49758
--- /dev/null
+++ b/test/files/pos/t5508.scala
@@ -0,0 +1,83 @@
+package TestTestters
+
+trait Test1 {
+  private[this] var _st : Int = 0
+  def close : PartialFunction[Any,Any] = {
+    case x : Int =>
+      _st = identity(_st)
+  }
+}
+
+object Base1 {
+  trait Test2 {
+    private[this] var _st : Int = 0
+    def close : PartialFunction[Any,Any] = {
+      case x : Int =>
+  _st = identity(_st)
+    }
+  }
+}
+
+class Test3 {
+  private[this] var _st : Int = 0
+  def close : PartialFunction[Any,Any] = {
+    case x : Int =>
+      _st = 1
+  }
+}
+
+object Base2 {
+  class Test4 {
+    private[this] var _st : Int = 0
+    def close : PartialFunction[Any,Any] = {
+      case x : Int =>
+  _st = 1
+    }
+  }
+}
+
+class Base3 {
+  trait Test5 {
+    private[this] var _st : Int = 0
+    def close : PartialFunction[Any,Any] = {
+      case x : Int =>
+  _st = 1
+    }
+  }
+}
+
+object Base4 {
+  trait Test6 {
+    private[this] var _st : Int = 0
+    def close : PartialFunction[Any,Any] = {
+      case x : Int => ()
+    }
+  }
+}
+
+object Base5 {
+  trait Test7 {
+    private[this] var _st : Int = 0
+    def close = () => {
+      _st = 1
+    }
+  }
+}
+
+object Base6 {
+  class Test8 {
+    private[this] var _st : Int = 0
+    def close = () => {
+      _st = 1
+    }
+  }
+}
+
+object Base7 {
+  trait Test9 {
+    var st : Int = 0
+    def close = () => {
+      st = 1
+    }
+  }
+}
diff --git a/test/files/pos/t5541.scala b/test/files/pos/t5541.scala
index 39682a2..90e5e41 100644
--- a/test/files/pos/t5541.scala
+++ b/test/files/pos/t5541.scala
@@ -27,7 +27,7 @@ object HASkipList {
     def asBranch : Branch[ S, A ] = this
   }
 }
-sealed trait HASkipList[ S <: Sys[ S ], @specialized( Int ) A ] 
+sealed trait HASkipList[ S <: Sys[ S ], @specialized( Int ) A ]
 
 class HASkipListView[ S <: Sys[ S ], A ]( private val l: HASkipList[ S, A ])( implicit system: S ) {
   import HASkipList.Node
diff --git a/test/files/pos/t5580b.scala b/test/files/pos/t5580b.scala
deleted file mode 100644
index d5a4a0a..0000000
--- a/test/files/pos/t5580b.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/** It's a pos test because it does indeed compile,
- *  not so much because I'm glad it does.  Testing
- *  that error messages created and discarded during
- *  implicit search don't blow it up.
- */
-
-import scala.collection.mutable.WeakHashMap
-import scala.collection.JavaConversions._
-
-class bar { }
-
-class foo {
-  val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
-
-  def test={
-    val tmp:bar=null
-    if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
-  }
-}
diff --git a/test/pending/pos/t5606.scala b/test/files/pos/t5606.scala
similarity index 100%
rename from test/pending/pos/t5606.scala
rename to test/files/pos/t5606.scala
diff --git a/test/pending/pos/t5639/Bar.scala b/test/files/pos/t5639/Bar.scala
similarity index 100%
rename from test/pending/pos/t5639/Bar.scala
rename to test/files/pos/t5639/Bar.scala
diff --git a/test/files/pos/t5639/Foo.scala b/test/files/pos/t5639/Foo.scala
new file mode 100644
index 0000000..1a07734
--- /dev/null
+++ b/test/files/pos/t5639/Foo.scala
@@ -0,0 +1,7 @@
+package pack.age
+
+class Baz
+
+object Implicits  {
+  implicit def Baz(n: Int): Baz = new Baz
+}
diff --git a/test/files/pos/t5644/BoxesRunTime.java b/test/files/pos/t5644/BoxesRunTime.java
index 241bf79..74c4c6b 100644
--- a/test/files/pos/t5644/BoxesRunTime.java
+++ b/test/files/pos/t5644/BoxesRunTime.java
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2006-2011, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2006-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/files/pos/t5692a/Macros_1.scala b/test/files/pos/t5692a/Macros_1.scala
index 06b5a3d..440e37d 100644
--- a/test/files/pos/t5692a/Macros_1.scala
+++ b/test/files/pos/t5692a/Macros_1.scala
@@ -1,6 +1,6 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
-  def impl[T](c: Context) = c.literalUnit
+  def impl[T](c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
   def foo[T] = macro impl[T]
 }
\ No newline at end of file
diff --git a/test/files/pos/t5692b/Macros_1.scala b/test/files/pos/t5692b/Macros_1.scala
index b28d19f..98fb882 100644
--- a/test/files/pos/t5692b/Macros_1.scala
+++ b/test/files/pos/t5692b/Macros_1.scala
@@ -1,6 +1,6 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
-  def impl[T, U](c: Context) = c.literalUnit
+  def impl[T, U](c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
   def foo[T, U] = macro impl[T, U]
 }
\ No newline at end of file
diff --git a/test/files/pos/t5706.scala b/test/files/pos/t5706.scala
index 20a8b25..6f02073 100644
--- a/test/files/pos/t5706.scala
+++ b/test/files/pos/t5706.scala
@@ -1,10 +1,15 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
 
 class Logger {
-  def error(message: String) = macro Impls.error
+  def error1(message: String) = macro Impls.error1
+  def error2(message: String) = macro Impls.error2
 }
 
 object Impls {
-  type LoggerContext = Context { type PrefixType = Logger }
-  def error(c: LoggerContext)(message: c.Expr[String]): c.Expr[Unit] = ???
+  type LoggerContext1 = BlackboxContext { type PrefixType = Logger }
+  def error1(c: LoggerContext1)(message: c.Expr[String]): c.Expr[Unit] = ???
+
+  type LoggerContext2 = WhiteboxContext { type PrefixType = Logger }
+  def error2(c: LoggerContext2)(message: c.Expr[String]): c.Expr[Unit] = ???
 }
diff --git a/test/files/pos/t573.scala b/test/files/pos/t573.scala
index 7b9d377..694d001 100644
--- a/test/files/pos/t573.scala
+++ b/test/files/pos/t573.scala
@@ -16,15 +16,15 @@ import DirX._;
 
 abstract class Linked {
   type Node <: Node0;
-  
+
   abstract class Node0 {
     self: Node =>
-    
+
   	var next : Node = _;
     var prev : Node = _;
-    
+
     def get(dir : Dir) = if (dir == BEFORE) prev; else next;
-    private def set(dir : Dir, node : Node) = 
+    private def set(dir : Dir, node : Node) =
       if (dir == BEFORE) prev = node; else next = node;
 
     def link(dir : Dir, node : Node) = {
@@ -34,7 +34,7 @@ abstract class Linked {
       node.set(dir.reverse, self);
     }
 
-    
+
     def end(dir : Dir) : Node = {
       if (get(dir) == null) this;
       else get(dir).end(dir);
diff --git a/test/files/pos/t5744/Macros_1.scala b/test/files/pos/t5744/Macros_1.scala
index 288a886..6e2bf48 100644
--- a/test/files/pos/t5744/Macros_1.scala
+++ b/test/files/pos/t5744/Macros_1.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[U: Numeric](x: U) = macro foo_impl[U]
diff --git a/test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala b/test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala
similarity index 100%
rename from test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala
rename to test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala
diff --git a/test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala b/test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala
similarity index 100%
rename from test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala
rename to test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala
diff --git a/test/files/pos/t577.scala b/test/files/pos/t577.scala
index 236c139..ede4539 100644
--- a/test/files/pos/t577.scala
+++ b/test/files/pos/t577.scala
@@ -1,15 +1,15 @@
 trait PriorityTree {
   type Node <: BasicTreeNode;
-  
+
   val top = initTree;
   top.next = (initTree);
   top.next.prev = (top);
-  
+
   def initTree : Node;
-  
-  
 
-  
+
+
+
   trait BasicTreeNode {
     private[PriorityTree] var next  : Node = _;
     private[PriorityTree] var prev  : Node = _;
diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala
index 133e13c..6101f54 100644
--- a/test/files/pos/t5809.scala
+++ b/test/files/pos/t5809.scala
@@ -1,5 +1,6 @@
 package object foo {
-  implicit class PimpedInt(foo: Int) {
+  implicit class EnrichedInt(foo: Int) {
     def bar = ???
+    def bippy = foo
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/neg/t5845.scala b/test/files/pos/t5845.scala
similarity index 100%
rename from test/files/neg/t5845.scala
rename to test/files/pos/t5845.scala
diff --git a/test/files/pos/t5846.scala b/test/files/pos/t5846.scala
index b06f5ac..05cabed 100644
--- a/test/files/pos/t5846.scala
+++ b/test/files/pos/t5846.scala
@@ -4,7 +4,7 @@
 
 /** Return the most general sorted map type. */
 object Test extends App {
-  
+
   val empty: collection.SortedMap[String, String] = collection.SortedMap.empty[String, String]
-  
+
 }
diff --git a/test/files/pos/t5853.scala b/test/files/pos/t5853.scala
index 21d8020..2ebb666 100644
--- a/test/files/pos/t5853.scala
+++ b/test/files/pos/t5853.scala
@@ -41,7 +41,7 @@ object Arrow {
   implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
     @inline def ->>[B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
   }
-  
+
   def foo = 1 ->> 2
 }
 
@@ -50,6 +50,6 @@ object SpecArrow {
   implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
     @inline def ->> [@specialized(Int) B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
   }
-  
+
   def foo = 1 ->> 2
 }
diff --git a/test/files/pos/t5877.scala b/test/files/pos/t5877.scala
index c7827df..939013c 100644
--- a/test/files/pos/t5877.scala
+++ b/test/files/pos/t5877.scala
@@ -7,8 +7,8 @@ package foo {
 }
 
 package object foo {
-  // Crasher: No synthetics for method PimpedFoo2: synthetics contains
-  implicit class PimpedFoo2(value: Foo) {
+  // Crasher: No synthetics for method EnrichedFoo2: synthetics contains
+  implicit class EnrichedFoo2(value: Foo) {
     def huzzah = ""
   }
 }
diff --git a/test/files/pos/t5877b.scala b/test/files/pos/t5877b.scala
index 6b8cbd4..43a2ea2 100644
--- a/test/files/pos/t5877b.scala
+++ b/test/files/pos/t5877b.scala
@@ -7,7 +7,7 @@ object Test {
 }
 
 object `package` {
-  implicit class PimpedFoo2(value: Foo) {
+  implicit class EnrichedFoo2(value: Foo) {
     def huzzah = ""
   }
 }
diff --git a/test/files/pos/t5900a.scala b/test/files/pos/t5900a.scala
new file mode 100644
index 0000000..cb02f67
--- /dev/null
+++ b/test/files/pos/t5900a.scala
@@ -0,0 +1,9 @@
+case class Transition[S](x: S)
+
+object C
+
+object Test {
+  (??? : Any) match {
+    case Transition(C) =>
+  }
+}
diff --git a/test/files/pos/t5954a/A_1.scala b/test/files/pos/t5954a/A_1.scala
new file mode 100644
index 0000000..10ead0b
--- /dev/null
+++ b/test/files/pos/t5954a/A_1.scala
@@ -0,0 +1,6 @@
+package p1 {
+  object `package` {
+    implicit class Foo(a: Any)
+    object Foo
+  }
+}
diff --git a/test/files/pos/t5954a/B_2.scala b/test/files/pos/t5954a/B_2.scala
new file mode 100644
index 0000000..10ead0b
--- /dev/null
+++ b/test/files/pos/t5954a/B_2.scala
@@ -0,0 +1,6 @@
+package p1 {
+  object `package` {
+    implicit class Foo(a: Any)
+    object Foo
+  }
+}
diff --git a/test/files/pos/t5954b/A_1.scala b/test/files/pos/t5954b/A_1.scala
new file mode 100644
index 0000000..8465e8f
--- /dev/null
+++ b/test/files/pos/t5954b/A_1.scala
@@ -0,0 +1,6 @@
+package p {
+  package object base {
+    class B
+    object B
+  }
+}
diff --git a/test/files/pos/t5954b/B_2.scala b/test/files/pos/t5954b/B_2.scala
new file mode 100644
index 0000000..f7e4704
--- /dev/null
+++ b/test/files/pos/t5954b/B_2.scala
@@ -0,0 +1,5 @@
+package p {
+  package object base {
+    case class B()
+  }
+}
diff --git a/test/files/neg/case-collision.flags b/test/files/pos/t5954c.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/files/pos/t5954c.flags
diff --git a/test/files/pos/t5954c/A_1.scala b/test/files/pos/t5954c/A_1.scala
new file mode 100644
index 0000000..29ad954
--- /dev/null
+++ b/test/files/pos/t5954c/A_1.scala
@@ -0,0 +1,18 @@
+package object A {
+  // these used to should be prevented by the implementation restriction
+  // but are now allowed
+  class B
+  object B
+  trait C
+  object C
+  case class D()
+  // all the rest of these should be ok
+  class E
+  object F
+  val g = "omg"
+  var h = "wtf"
+  def i = "lol"
+  type j = String
+  class K(val k : Int) extends AnyVal
+  implicit class L(val l : Int)
+}
diff --git a/test/files/pos/t5954c/B_2.scala b/test/files/pos/t5954c/B_2.scala
new file mode 100644
index 0000000..29ad954
--- /dev/null
+++ b/test/files/pos/t5954c/B_2.scala
@@ -0,0 +1,18 @@
+package object A {
+  // these used to should be prevented by the implementation restriction
+  // but are now allowed
+  class B
+  object B
+  trait C
+  object C
+  case class D()
+  // all the rest of these should be ok
+  class E
+  object F
+  val g = "omg"
+  var h = "wtf"
+  def i = "lol"
+  type j = String
+  class K(val k : Int) extends AnyVal
+  implicit class L(val l : Int)
+}
diff --git a/test/files/pos/t5954d.flags b/test/files/pos/t5954d.flags
new file mode 100644
index 0000000..6ced0e7
--- /dev/null
+++ b/test/files/pos/t5954d.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xdev
diff --git a/test/files/pos/t5954d/A_1.scala b/test/files/pos/t5954d/A_1.scala
new file mode 100644
index 0000000..8465e8f
--- /dev/null
+++ b/test/files/pos/t5954d/A_1.scala
@@ -0,0 +1,6 @@
+package p {
+  package object base {
+    class B
+    object B
+  }
+}
diff --git a/test/files/pos/t5954d/B_2.scala b/test/files/pos/t5954d/B_2.scala
new file mode 100644
index 0000000..a4aa2eb
--- /dev/null
+++ b/test/files/pos/t5954d/B_2.scala
@@ -0,0 +1,7 @@
+package p {
+  trait T {
+    class B
+    object B
+  }
+  package object base extends T
+}
diff --git a/test/files/pos/t599.scala b/test/files/pos/t599.scala
index 53f205a..968e2de 100644
--- a/test/files/pos/t599.scala
+++ b/test/files/pos/t599.scala
@@ -16,4 +16,4 @@ abstract class FooA {
       val aaa: InnerB.this.B = doB
       aaa.xxx;
     }
-  } 
+  }
diff --git a/test/files/pos/t602.scala b/test/files/pos/t602.scala
index 6062b97..18dd405 100644
--- a/test/files/pos/t602.scala
+++ b/test/files/pos/t602.scala
@@ -10,5 +10,5 @@ case class Span[K <: Ordered[K]](low: Option[K], high: Option[K]) extends Functi
     case Span(Some(low), None) => (k >= low)
     case Span(None, Some(high)) => (k <= high)
     case _ => false
-  } 
+  }
 }
diff --git a/test/files/pos/t6047.scala b/test/files/pos/t6047.scala
index bc5f856..8c3dd18 100644
--- a/test/files/pos/t6047.scala
+++ b/test/files/pos/t6047.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import java.io.InputStream
 
 object Macros {
diff --git a/test/files/pos/t6123-explaintypes-implicits.flags b/test/files/pos/t6123-explaintypes-implicits.flags
new file mode 100644
index 0000000..b36707c
--- /dev/null
+++ b/test/files/pos/t6123-explaintypes-implicits.flags
@@ -0,0 +1 @@
+-explaintypes
diff --git a/test/files/pos/t6123-explaintypes-implicits.scala b/test/files/pos/t6123-explaintypes-implicits.scala
new file mode 100644
index 0000000..5242b44
--- /dev/null
+++ b/test/files/pos/t6123-explaintypes-implicits.scala
@@ -0,0 +1,13 @@
+object ImplicitBugReport {
+  trait Exp[+T]
+  trait CanBuildExp[-Elem, +To] extends (Exp[Elem] => To)
+  trait TraversableExp[T, ExpT <: Exp[T]] extends Exp[Traversable[T]]
+
+  implicit def canBuildExp[T]: CanBuildExp[T, Exp[T]] = ???
+  implicit def canBuildExpTrav[T, ExpT <: Exp[T]](implicit c: CanBuildExp[T, ExpT]): CanBuildExp[Traversable[T], TraversableExp[T, ExpT]] = ???
+  def toExpTempl[T, That](t: T)(implicit c: CanBuildExp[T, That]): That = ???
+
+  def testBug() {
+    val a1 = toExpTempl(Seq(1, 2, 3, 5))
+  }
+}
diff --git a/test/files/pos/t613.scala b/test/files/pos/t613.scala
index 6e3841d..e140833 100644
--- a/test/files/pos/t613.scala
+++ b/test/files/pos/t613.scala
@@ -3,9 +3,9 @@ class Outer extends App {
   abstract class C {
     val x: Int
   }
-  val foo = new C { 
+  val foo = new C {
     class I {
-      val z = y 
+      val z = y
     }
     val x = (new I).z
   }
diff --git a/test/files/pos/t616.scala b/test/files/pos/t616.scala
index 074ad19..bb91c73 100644
--- a/test/files/pos/t616.scala
+++ b/test/files/pos/t616.scala
@@ -1,7 +1,7 @@
 object testImplicit {
   implicit def foo2bar(foo: Foo): Bar = foo.bar
   class Foo(val bar: Bar) {
-    def testCoercion = {val a = this; a.baz} // here, foo2bar is inferred by the compiler, as expected 
+    def testCoercion = {val a = this; a.baz} // here, foo2bar is inferred by the compiler, as expected
     //def testCoercionThisImplicit = baz  // --> error: not found: value baz
     def testCoercionThisExplicit: Any = this.baz  // --> error: value baz is not a  member of Foo
   }
diff --git a/test/files/pos/t6162-inheritance.flags b/test/files/pos/t6162-inheritance.flags
new file mode 100644
index 0000000..c6bfaf1
--- /dev/null
+++ b/test/files/pos/t6162-inheritance.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/pos/t6162-inheritance.scala b/test/files/pos/t6162-inheritance.scala
new file mode 100644
index 0000000..fca751e
--- /dev/null
+++ b/test/files/pos/t6162-inheritance.scala
@@ -0,0 +1,22 @@
+package scala.t6126
+
+// Don't warn about inheritance in the same file.
+// We might use that as a prelude to sealing a class.
+
+ at deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
+class Foo
+
+class SubFoo extends Foo
+
+ at deprecatedInheritance()
+trait T
+
+object SubT extends T
+
+ at deprecatedInheritance()
+trait S
+
+object O {
+  new S {
+  }
+}
diff --git a/test/files/pos/t6169/Exist.java b/test/files/pos/t6169/Exist.java
new file mode 100644
index 0000000..dfc6b36
--- /dev/null
+++ b/test/files/pos/t6169/Exist.java
@@ -0,0 +1,4 @@
+public class Exist<T extends String> {
+  // java helpfully re-interprets Exist<?> as Exist<? extends String>
+  public Exist<?> foo() { throw new RuntimeException(); }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6169/ExistF.java b/test/files/pos/t6169/ExistF.java
new file mode 100644
index 0000000..70fabd7
--- /dev/null
+++ b/test/files/pos/t6169/ExistF.java
@@ -0,0 +1,4 @@
+public class ExistF<T extends ExistF<T>> {
+  // java helpfully re-interprets ExistF<?> as ExistF<?0 extends ExistF<?0>>
+  public ExistF<?> foo() { throw new RuntimeException(); }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6169/ExistIndir.java b/test/files/pos/t6169/ExistIndir.java
new file mode 100644
index 0000000..e66d169
--- /dev/null
+++ b/test/files/pos/t6169/ExistIndir.java
@@ -0,0 +1,4 @@
+public class ExistIndir<T extends String, U extends T> {
+  // java helpfully re-interprets ExistIndir<?> as ExistIndir<? extends String>
+  public ExistIndir<?, ?> foo() { throw new RuntimeException(); }
+}
diff --git a/test/files/pos/t6169/OP.java b/test/files/pos/t6169/OP.java
new file mode 100644
index 0000000..15e4c56
--- /dev/null
+++ b/test/files/pos/t6169/OP.java
@@ -0,0 +1 @@
+public abstract class OP<T> { }
diff --git a/test/files/pos/t6169/Skin.java b/test/files/pos/t6169/Skin.java
new file mode 100644
index 0000000..780de1e
--- /dev/null
+++ b/test/files/pos/t6169/Skin.java
@@ -0,0 +1 @@
+public interface Skin<C extends Skinnable> { }
diff --git a/test/files/pos/t6169/Skinnable.java b/test/files/pos/t6169/Skinnable.java
new file mode 100644
index 0000000..f91eaa3
--- /dev/null
+++ b/test/files/pos/t6169/Skinnable.java
@@ -0,0 +1,3 @@
+public interface Skinnable {
+  OP<Skin<?>>	skinProperty();
+}
diff --git a/test/files/pos/t6169/skinnable.scala b/test/files/pos/t6169/skinnable.scala
new file mode 100644
index 0000000..3ba2734
--- /dev/null
+++ b/test/files/pos/t6169/skinnable.scala
@@ -0,0 +1,14 @@
+object ObjectProperty {
+  implicit def jfxObjectProperty2sfx[T](p: OP[T]) = new ObjectProperty[T](p)
+}
+
+class ObjectProperty[T](val delegate: OP[T])
+
+trait TestWildcardBoundInference {
+  def delegate: Skinnable
+  def skin: ObjectProperty[Skin[_ /* inferred: <: Skinnable */]] = ObjectProperty.jfxObjectProperty2sfx(delegate.skinProperty)
+  skin: ObjectProperty[Skin[_  <: Skinnable]]
+
+  def skinCheckInference = delegate.skinProperty
+  skinCheckInference: ObjectProperty[Skin[_  <: Skinnable]]
+}
\ No newline at end of file
diff --git a/test/files/pos/t6169/t6169.scala b/test/files/pos/t6169/t6169.scala
new file mode 100644
index 0000000..37f4261
--- /dev/null
+++ b/test/files/pos/t6169/t6169.scala
@@ -0,0 +1,7 @@
+class Test {
+  class MyExist extends ExistF[MyExist]
+  // SI-8197, SI-6169: java infers the bounds of existentials, so we have to as well now that SI-1786 is fixed...
+  def stringy: Exist[_ <: String] = (new Exist[String]).foo
+  def fbounded: (ExistF[t] forSome {type t <: ExistF[t] }) = (new MyExist).foo
+  def indir: ExistIndir[_ <: String, _ <: String] = (new ExistIndir[String, String]).foo
+}
\ No newline at end of file
diff --git a/test/files/pos/t6201.scala b/test/files/pos/t6201.scala
index 366c1f2..d4e5bce 100644
--- a/test/files/pos/t6201.scala
+++ b/test/files/pos/t6201.scala
@@ -1,13 +1,19 @@
+// probably needs xml's weirdness to reproduce
+// (specifically, _root_.scala.xml.Null being in the root package)
+class Elem
+
 class Test {
+  def elem: Elem = ???
+
   class Foo1 {
-    def must(x: scala.xml.Elem) = ()
+    def must(x: Elem) = ()
   }
 
   class Foo2 {
     def must(x: Int) = ()
   }
-  implicit def toFoo1(s: scala.xml.Elem) = new Foo1()
-  implicit def toFoo2(s: scala.xml.Elem) = new Foo2()
+  implicit def toFoo1(s: Elem) = new Foo1()
+  implicit def toFoo2(s: Elem) = new Foo2()
 
-  def is: Unit = { (<a>{"a"}</a>).must(<a>{"b"}</a>) }
+  def is: Unit = { (elem) }
 }
\ No newline at end of file
diff --git a/test/files/pos/t6210.scala b/test/files/pos/t6210.scala
index 1ce8493..855c621 100644
--- a/test/files/pos/t6210.scala
+++ b/test/files/pos/t6210.scala
@@ -10,9 +10,9 @@ case class TBool() extends Ty
 object Foo {
   def checkExpr(ast: AExpr): Ty = {
     var astTy:Ty = ast match {
-      case AAssign(nm: String, v:AExpr) => TBool() 
+      case AAssign(nm: String, v:AExpr) => TBool()
 
-      case AConstBool(v: Boolean) => TBool() 
+      case AConstBool(v: Boolean) => TBool()
 
       case _                          => throw new Exception(s"Unhandled case check(ast: ${ast.getClass})")
     }
diff --git a/test/files/pos/t6221.scala b/test/files/pos/t6221.scala
new file mode 100644
index 0000000..34f0285
--- /dev/null
+++ b/test/files/pos/t6221.scala
@@ -0,0 +1,33 @@
+class MyFunc[-A, +B] extends (A => B) { def apply(x: A): B = ??? }
+
+class MyCollection[A] {
+  def map[B](f: MyFunc[A, B]): MyCollection[B] = new MyCollection[B]
+}
+
+class OtherFunc[-A, +B] {}
+
+object Test {
+  implicit def functionToMyFunc[A, B](f: A => B): MyFunc[A, B] = new MyFunc // = new MyFunc[A,Nothing]();
+
+  implicit def otherFuncToMyFunc[A, B](f: OtherFunc[A, B]): MyFunc[A, B] = new MyFunc // = new MyFunc[A,Nothing]();
+
+  def main(args: Array[String]) {
+    val col = new MyCollection[Int]
+
+    // Doesn't compile: error: missing parameter type for expanded function ((x$1) => x$1.toString)
+    println(col.map(_.toString))
+    // scala.this.Predef.println(col.map[String](Test.this.functionToMyFunc[Int, String](((x$1: Int) => x$1.toString()))));
+
+    // Doesn't compile: error: missing parameter type
+    println(col.map(x => x.toString))
+    // scala.this.Predef.println(col.map[String](Test.this.functionToMyFunc[Int, String](((x: Int) => x.toString()))));
+
+    // Does compile
+    println(col.map((x: Int) => x.toString))
+    // scala.this.Predef.println(col.map[String](Test.this.functionToMyFunc[Int, String](((x: Int) => x.toString()))));
+
+    // Does compile (even though type params of OtherFunc not given)
+    println(col.map(new OtherFunc))
+    // scala.this.Predef.println(col.map[Nothing](Test.this.otherFuncToMyFunc[Any, Nothing](new OtherFunc[Any,Nothing]())))
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/t6231.scala b/test/files/pos/t6231.scala
similarity index 100%
rename from test/files/neg/t6231.scala
rename to test/files/pos/t6231.scala
diff --git a/test/files/pos/t6231b.scala b/test/files/pos/t6231b.scala
new file mode 100644
index 0000000..b4ddfe7
--- /dev/null
+++ b/test/files/pos/t6231b.scala
@@ -0,0 +1,8 @@
+class Test {
+  def f1(t: String) = {
+    trait T {
+      def xs = Nil map (_ => t)
+    }
+    ()
+  }
+}
diff --git a/test/files/pos/t6260.flags b/test/files/pos/t6260.flags
new file mode 100644
index 0000000..2349d82
--- /dev/null
+++ b/test/files/pos/t6260.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6260.scala b/test/files/pos/t6260.scala
similarity index 100%
rename from test/files/neg/t6260.scala
rename to test/files/pos/t6260.scala
diff --git a/test/files/pos/t6260a.scala b/test/files/pos/t6260a.scala
new file mode 100644
index 0000000..194294e
--- /dev/null
+++ b/test/files/pos/t6260a.scala
@@ -0,0 +1,15 @@
+final class Option[+A](val value: A) extends AnyVal
+
+// Was: sandbox/test.scala:21: error: bridge generated for member method f: ()Option[A] in class Bar
+//      which overrides method f: ()Option[A] in class Foo" 
+abstract class Foo[A]                { def f(): Option[A] }
+         class Bar[A] extends Foo[A] { def f(): Option[A] = ??? }
+
+// User reported this as erroneous but I couldn't reproduce with 2.10.{0,1,2,3}
+// https://issues.scala-lang.org/browse/SI-6260?focusedCommentId=64764&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-64764
+// I suspect he whittled down the example too far.
+class Wrapper(val value: Int) extends AnyVal
+abstract class Test { def check(the: Wrapper): Boolean }
+object T {
+  new Test { def check(the: Wrapper) = true }
+}
diff --git a/test/files/pos/t6260b.scala b/test/files/pos/t6260b.scala
new file mode 100644
index 0000000..73e2e58
--- /dev/null
+++ b/test/files/pos/t6260b.scala
@@ -0,0 +1,3 @@
+
+class X(val value: Object) extends AnyVal { def or(alt: => X): X = this }
+class Y { def f = new X("") or new X("") }
diff --git a/test/files/pos/t6301.scala b/test/files/pos/t6301.scala
new file mode 100644
index 0000000..fa81bbf
--- /dev/null
+++ b/test/files/pos/t6301.scala
@@ -0,0 +1,9 @@
+trait LoadedOver[@specialized(Int) A] {
+  def foo(x: Any): A
+  def foo(xs: String): A
+}
+
+object Test {
+  def loaded: AnyRef with LoadedOver[Int] = sys.error("")
+  loaded.foo("")
+}
diff --git a/test/files/pos/t6355pos.scala b/test/files/pos/t6355pos.scala
new file mode 100644
index 0000000..c0e740d
--- /dev/null
+++ b/test/files/pos/t6355pos.scala
@@ -0,0 +1,16 @@
+import scala.language.dynamics
+
+class A extends Dynamic {
+  def applyDynamic[T1](method: String)(x1: T1): Any = 1
+  def applyDynamic[T1, T2](method: String)(x: T1, y: T2): Any = 2
+  def applyDynamic[T1, T2, T3](method: String)(x: T1, y: T2, z: T3): Any = 3
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    val x = new A
+    println(x[Int](5))
+    println(x[Int, String](5, "a"))
+    println(x[Int, String, Int](5, "a", 5))
+  }
+}
diff --git a/test/files/pos/t640.scala b/test/files/pos/t640.scala
index 55f61df..45608bc 100644
--- a/test/files/pos/t640.scala
+++ b/test/files/pos/t640.scala
@@ -1,2 +1,2 @@
- at serializable class A
- at serializable class B extends A
+class A extends Serializable
+class B extends A with Serializable
diff --git a/test/files/pos/t6447.scala b/test/files/pos/t6447.scala
new file mode 100644
index 0000000..6ef69d4
--- /dev/null
+++ b/test/files/pos/t6447.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+class X { type T }
+
+object X {
+  // this works
+  def foo(x: X): x.T = macro fooImpl
+  def fooImpl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ???
+
+  // this doesn't
+  def bar(x: X, y: X): (x.T, y.T) = macro barImpl
+  def barImpl(c: Context)(x: c.Expr[X], y: c.Expr[X]): c.Expr[(x.value.T, y.value.T)] = ???
+
+  // neither does this
+  def baz(x: X)(xs: List[x.T]): Unit = macro bazImpl
+  def bazImpl(c: Context)(x: c.Expr[X])(xs: c.Expr[List[x.value.T]]): c.Expr[Unit] = ???
+}
diff --git a/test/files/pos/t6485a/Macros_1.scala b/test/files/pos/t6485a/Macros_1.scala
index 85c2d5d..570c987 100644
--- a/test/files/pos/t6485a/Macros_1.scala
+++ b/test/files/pos/t6485a/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def crash(c: Context): c.Expr[Unit] = c.universe.reify(())
diff --git a/test/files/pos/t6485b/Test.scala b/test/files/pos/t6485b/Test.scala
index 382df1c..3b81c6f 100644
--- a/test/files/pos/t6485b/Test.scala
+++ b/test/files/pos/t6485b/Test.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 final class Ops[T](val x: T) extends AnyVal {
   def f = macro Macros.crash
diff --git a/test/files/pos/t651.scala b/test/files/pos/t651.scala
index 44d20ad..c146446 100644
--- a/test/files/pos/t651.scala
+++ b/test/files/pos/t651.scala
@@ -4,12 +4,12 @@ trait Test3 {
   trait MatchableImpl {
     trait MatchImpl;
   }
-  
+
   trait BracePairImpl {
     trait BraceImpl extends MatchableImpl {
       private object MyMatch1 extends MatchImpl;
       protected def match0 : MatchImpl = MyMatch1;
-      
+
     }
   }
 }
diff --git a/test/files/pos/t6516.scala b/test/files/pos/t6516.scala
index c004055..2980d83 100644
--- a/test/files/pos/t6516.scala
+++ b/test/files/pos/t6516.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import scala.collection.TraversableLike
 
 // This one compiles
@@ -11,7 +11,7 @@ object Test {
 
 // This one doesn't
 object Test2 {
-  type Ctx = scala.reflect.macros.Context
+  type Ctx = scala.reflect.macros.blackbox.Context
   type Alias[T, CC[_]] = Ctx { type PrefixType = TraversableLike[T, CC[T]] }
 
   def f() = macro f_impl
diff --git a/test/files/pos/t6574.scala b/test/files/pos/t6574.scala
new file mode 100644
index 0000000..59c1701
--- /dev/null
+++ b/test/files/pos/t6574.scala
@@ -0,0 +1,19 @@
+class Bad[X, Y](val v: Int) extends AnyVal {
+  def vv = v
+  @annotation.tailrec final def foo[Z](a: Int)(b: String) {
+    this.foo[Z](a)(b)
+  }
+
+  @annotation.tailrec final def differentReceiver {
+    {(); new Bad[X, Y](0)}.differentReceiver
+  }
+
+  @annotation.tailrec final def dependent[Z](a: Int)(b: String): b.type = {
+    this.dependent[Z](a)(b)
+  }
+}
+
+class HK[M[_]](val v: Int) extends AnyVal {
+  def hk[N[_]]: Unit = if (false) hk[M] else ()
+}
+
diff --git a/test/files/pos/t6624.scala b/test/files/pos/t6624.scala
index 1a92b92..44554c5 100644
--- a/test/files/pos/t6624.scala
+++ b/test/files/pos/t6624.scala
@@ -10,7 +10,7 @@ object Test {
   val klist: KCons[Option, KCons[Option, KCons[Option, KNil[Nothing]]]] = ???
 
   // crashes with
-  // "Exception in thread "main" scala.reflect.internal.Types$TypeError: value _1 is not a member 
+  // "Exception in thread "main" scala.reflect.internal.Types$TypeError: value _1 is not a member
   // of KCons[Option,KCons[Option,KNil[Nothing]]]"
   klist match {
    case KCons(KCons(KCons(_))) =>
diff --git a/test/files/pos/t6664.scala b/test/files/pos/t6664.scala
new file mode 100644
index 0000000..7eb85f6
--- /dev/null
+++ b/test/files/pos/t6664.scala
@@ -0,0 +1,4 @@
+final case class A(i: Int, s: String) {
+    protected def copy(s2: String): A = A(i, s2)
+    protected def copy(i2: Int): A = A(i2, s)
+}
diff --git a/test/files/pos/t6664b.scala b/test/files/pos/t6664b.scala
new file mode 100644
index 0000000..a622866
--- /dev/null
+++ b/test/files/pos/t6664b.scala
@@ -0,0 +1,5 @@
+object T {
+  def A(s: String): A = new A(3, s)
+  def A(i: Int): A = A(i, "abc")
+  case class A(i: Int, s: String)
+}
diff --git a/test/files/pos/t6675.flags b/test/files/pos/t6675.flags
index e8fb65d..d1b831e 100644
--- a/test/files/pos/t6675.flags
+++ b/test/files/pos/t6675.flags
@@ -1 +1 @@
--Xfatal-warnings
\ No newline at end of file
+-deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t6745.scala b/test/files/pos/t6745.scala
new file mode 100644
index 0000000..2ab8e6d
--- /dev/null
+++ b/test/files/pos/t6745.scala
@@ -0,0 +1,4 @@
+class Bar(val i: Int) {
+  self: Any with AnyRef =>
+  def this() = this(0)
+}
diff --git a/test/files/pos/t675.scala b/test/files/pos/t675.scala
index c284c0e..905d29d 100644
--- a/test/files/pos/t675.scala
+++ b/test/files/pos/t675.scala
@@ -7,7 +7,7 @@ trait T {
   }
   trait X {
     def foo : Foo = FOO_0;
-  }  
+  }
 }
 
 object Test extends App {
@@ -15,5 +15,3 @@ object Test extends App {
   val x = new t.X{}
   Console.println(x.foo)
 }
-  
-  
diff --git a/test/files/pos/t6780.scala b/test/files/pos/t6780.scala
new file mode 100644
index 0000000..4a35804
--- /dev/null
+++ b/test/files/pos/t6780.scala
@@ -0,0 +1,20 @@
+object O {
+  implicit def i: Int = 0
+}
+
+import O._
+
+trait Foo {
+  implicit val v1: Any
+  implicit def d1: Any
+           val v2: Any
+  implicit val v3: Any
+}
+
+trait Bar1 extends Foo {
+  implicit val v1      = {implicitly[Int]; ()} // failed due to cycle in Context#implicits being broken with Nil.
+           def d1      = {implicitly[Int]; ()} // okay
+  implicit val v2      = {implicitly[Int]; ()} // okay
+  implicit val v3: Any = {implicitly[Int]; ()} // okay
+
+}
diff --git a/test/files/pos/t6797.scala b/test/files/pos/t6797.scala
new file mode 100644
index 0000000..ef1afa1
--- /dev/null
+++ b/test/files/pos/t6797.scala
@@ -0,0 +1,4 @@
+object Test extends App /* workaround: don't extend App */ {
+  private class Matcher(aParam: Option[String] = None)
+  private val stringMatcher = new Matcher
+}
diff --git a/test/files/pos/t6815.scala b/test/files/pos/t6815.scala
new file mode 100644
index 0000000..9244b3d
--- /dev/null
+++ b/test/files/pos/t6815.scala
@@ -0,0 +1,17 @@
+trait U {
+  trait ValOrDefDefApi {
+    def name: Any
+  }
+  type ValOrDefDef <: ValOrDefDefApi
+  type ValDef <: ValOrDefDef with ValDefApi
+  trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+  val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+  val u: U = ???
+
+  u.emptyValDef match {
+    case u.emptyValDef => // but we shouldn't let that stop us from treating it as a stable identifier pattern.
+  }
+}
diff --git a/test/files/pos/t6815_import.scala b/test/files/pos/t6815_import.scala
new file mode 100644
index 0000000..56f4358
--- /dev/null
+++ b/test/files/pos/t6815_import.scala
@@ -0,0 +1,16 @@
+trait U {
+  trait ValOrDefDefApi {
+    def name: Any
+  }
+  type ValOrDefDef <: ValOrDefDefApi
+  type ValDef <: ValOrDefDef with ValDefApi
+  trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+  val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+  val u: U = ???
+
+  // but we shouldn't let that stop us from treating it as a stable identifier for import
+  import u.emptyValDef.name
+}
diff --git a/test/files/pos/t6897.scala b/test/files/pos/t6897.scala
deleted file mode 100644
index a7a03a1..0000000
--- a/test/files/pos/t6897.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class A {
-  val html = (null: Any) match {
-    case 1 => <xml:group></xml:group>
-    case 2 => <p></p>
-  }
-}
diff --git a/test/files/pos/t690.scala b/test/files/pos/t690.scala
index 3fcdca7..a93c54f 100644
--- a/test/files/pos/t690.scala
+++ b/test/files/pos/t690.scala
@@ -10,5 +10,5 @@ trait test {
     override def foo(t : T) = super.foo(t);
   }
   def t : T;
-  M0.foo(t);  
+  M0.foo(t);
 }
diff --git a/test/files/pos/t6948.scala b/test/files/pos/t6948.scala
new file mode 100644
index 0000000..12a1d7e
--- /dev/null
+++ b/test/files/pos/t6948.scala
@@ -0,0 +1,10 @@
+object t6948 {
+  val rand = new scala.util.Random()
+  def a1 = rand.shuffle(0 to 5)
+  // Tis not to be
+  // def a2 = rand.shuffle(0 until 5)
+  def a3 = rand.shuffle(Vector(1, 2, 3))
+  def a4 = rand.shuffle(scala.collection.Seq(1, 2, 3))
+  def a5 = rand.shuffle(scala.collection.immutable.Seq(1, 2, 3))
+  def a6 = rand.shuffle(scala.collection.mutable.Seq(1, 2, 3))
+}
diff --git a/test/files/pos/t6963c.scala b/test/files/pos/t6963c.scala
index 0b6b5c7..d3c3616 100644
--- a/test/files/pos/t6963c.scala
+++ b/test/files/pos/t6963c.scala
@@ -9,9 +9,9 @@ object Test {
     case _: Array[_]  => true
     case _            => false
   }
-  
+
   def f4(x: Any) = x.isInstanceOf[Traversable[_]]
-  
+
   def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
     case (Some(_: Seq[_]), Nil, _)        => 1
     case (None, List(_: List[_], _), _)   => 2
diff --git a/test/files/pos/t6966.scala b/test/files/pos/t6966.scala
new file mode 100644
index 0000000..23adc6d
--- /dev/null
+++ b/test/files/pos/t6966.scala
@@ -0,0 +1,17 @@
+import Ordering.{Byte, comparatorToOrdering}
+trait Format[T]
+trait InputCache[T]
+object CacheIvy {
+	implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = null
+	implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = null
+	implicit def hNilCache: InputCache[HNil] = null
+	implicit def ByteArrayFormat: Format[Array[Byte]] = null
+	type :+:[H, T <: HList] = HCons[H,T]
+	implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = null
+	hConsCache[Array[Byte], HNil]
+}
+
+sealed trait HList
+sealed trait HNil extends HList
+object HNil extends HNil
+final class HCons[H, T <: HList](head : H, tail : T) extends HList
\ No newline at end of file
diff --git a/test/files/pos/t6976/ImplicitBug_1.scala b/test/files/pos/t6976/ImplicitBug_1.scala
index c9031ba..50bc247 100644
--- a/test/files/pos/t6976/ImplicitBug_1.scala
+++ b/test/files/pos/t6976/ImplicitBug_1.scala
@@ -1,4 +1,4 @@
-// This one is weird and nasty. Not sure if this is scalac or sbt 
+// This one is weird and nasty. Not sure if this is scalac or sbt
 // (tried with 0.12 & 0.12.2-RC2) bug.
 //
 // A level of indirection is required to trigger this bug.
@@ -11,12 +11,12 @@
 // 4. sbt run (it fails)
 // 5. Switch it back & sbt run. It still fails.
 //
-// In this project sbt clean helps. However in a large project where this 
+// In this project sbt clean helps. However in a large project where this
 // bug was found compiler crashed even after doing sbt clean. The only
-// way to work around this was to reference Exts object explicitly (C) in 
+// way to work around this was to reference Exts object explicitly (C) in
 // the source file using its implicit classes.
 
-// Lets suppose this is a mega-trait combining all sorts of helper 
+// Lets suppose this is a mega-trait combining all sorts of helper
 // functionality.
 trait Support extends Exts
 
diff --git a/test/files/pos/t7014/t7014.scala b/test/files/pos/t7014/t7014.scala
index faec4c7..7c73f70 100644
--- a/test/files/pos/t7014/t7014.scala
+++ b/test/files/pos/t7014/t7014.scala
@@ -1,4 +1,3 @@
 package t7014
 
 import ThreadSafetyLevel.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
- 
\ No newline at end of file
diff --git a/test/files/pos/t711.scala b/test/files/pos/t711.scala
index 10b410e..4dd6040 100644
--- a/test/files/pos/t711.scala
+++ b/test/files/pos/t711.scala
@@ -2,7 +2,7 @@ abstract class Component
 
 class Button extends Component {
   def sayHey: Unit = Console.println("Hey, I'm a button") }
-  
+
 abstract class Origin {
   val delegate: Component }
 
diff --git a/test/files/pos/t715.cmds b/test/files/pos/t715.cmds
deleted file mode 100644
index 2836967..0000000
--- a/test/files/pos/t715.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac meredith_1.scala
-scalac runner_2.scala
diff --git a/test/files/pos/t715/meredith_1.scala b/test/files/pos/t715/meredith_1.scala
deleted file mode 100644
index 8261b98..0000000
--- a/test/files/pos/t715/meredith_1.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-package com.sap.dspace.model.othello;
-
-import scala.xml._
-
-trait XMLRenderer {
-  type T <: Any {def getClass() : java.lang.Class[_]}
-  val valueTypes =
-    List(
-      classOf[java.lang.Boolean],
-      classOf[java.lang.Integer],
-      classOf[java.lang.Float],
-      classOf[java.lang.String] 
-      // more to come
-      )
-
-  def value2XML(
-    value : Object,
-    field : java.lang.reflect.Field,
-    pojo : T
-    ) : Node = {
-      value match {
-	case null => Text( "null" )
-	case vUnmatched =>
-	  if (value.isInstanceOf[java.lang.Boolean]) 
-	    Text( value.asInstanceOf[java.lang.Boolean].toString )
-	  else if (value.isInstanceOf[java.lang.Integer]) 
-	    Text( value.asInstanceOf[java.lang.Integer].toString )
-	  else if (value.isInstanceOf[java.lang.Float]) 
-	    Text( value.asInstanceOf[java.lang.Float].toString )
-    // else if (value.isInstanceOf[T]) 
-    //   pojo2XML( value.asInstanceOf[T] ) 
-	  else
-	  <unmatchedType>
-	    <theType>
-	      {vUnmatched.getClass.toString}
-	    </theType>
-	    <theValue>
-	      {vUnmatched.toString}
-	    </theValue>
-	  </unmatchedType>
-      }
-    }
-
-  def field2XML(
-    field : java.lang.reflect.Field,
-    pojo : T
-  ) : Elem = {
-
-    val accessible = field.isAccessible;
-    field.setAccessible( true );
-    // BUGBUG lgm need to disambiguate on type and possibly make
-    // recursive call to pojo2XML
-    val fldValXML = value2XML( field.get( pojo ), field, pojo );
-    field.setAccessible( accessible );
-
-    Elem(
-      null,
-      field.getName,
-      null,
-      TopScope, 
-      fldValXML
-      )
-  }
-
-  def pojo2XML( pojo : T ) : Elem = {
-    val progeny =
-      for (field <- pojo.getClass.getDeclaredFields)
-      yield field2XML( field, pojo );
-
-    Elem(
-      null,
-      pojo.getClass.getName,
-      null,
-      TopScope,
-      progeny.asInstanceOf[Array[scala.xml.Node]] : _*
-      )    
-  }
-}
-
-case class POJO2XMLRenderer( recurse : Boolean )
-     extends XMLRenderer {
-       type T = java.io.Serializable
-  override def value2XML(
-    value : Object,
-    field : java.lang.reflect.Field,
-    pojo : java.io.Serializable
-    ) : Node = {
-      if (recurse) super.value2XML( value, field, pojo )
-      else Text( value + "" )
-    }
-}
-
-object thePOJO2XMLRenderer extends POJO2XMLRenderer( true ) {
-}
-
-object Test extends Application {
-  println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
-}
diff --git a/test/files/pos/t715/runner_2.scala b/test/files/pos/t715/runner_2.scala
deleted file mode 100644
index 1e4f40d..0000000
--- a/test/files/pos/t715/runner_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
-  println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
-}
diff --git a/test/files/pos/t7228.scala b/test/files/pos/t7228.scala
new file mode 100644
index 0000000..5d936f6
--- /dev/null
+++ b/test/files/pos/t7228.scala
@@ -0,0 +1,75 @@
+object AdaptWithWeaklyConformantType {
+  implicit class D(d: Double) { def double = d*2 }
+
+  val x1: Int = 1
+  var x2: Int = 2
+  val x3 = 3
+  var x4 = 4
+  final val x5 = 5
+  final var x6 = 6
+
+  def f1 = x1.double
+  def f2 = x2.double
+  def f3 = x3.double
+  def f4 = x4.double
+  def f5 = x5.double
+  def f6 = x6.double
+}
+
+object AdaptAliasWithWeaklyConformantType {
+  implicit class D(d: Double) { def double = d*2 }
+  type T = Int
+
+  val x1: T = 1
+  var x2: T = 2
+  val x3 = (3: T)
+  var x4 = (4: T)
+  final val x5 = (5: T)
+  final var x6 = (6: T)
+
+  def f1 = x1.double
+  def f2 = x2.double
+  def f3 = x3.double
+  def f4 = x4.double
+  def f5 = x5.double
+  def f6 = x6.double
+}
+
+object AdaptToAliasWithWeaklyConformantType {
+  type U = Double
+  implicit class D(d: U) { def double = d*2 }
+
+  val x1: Int = 1
+  var x2: Int = 2
+  val x3 = (3: Int)
+  var x4 = (4: Int)
+  final val x5 = (5: Int)
+  final var x6 = (6: Int)
+
+  def f1 = x1.double
+  def f2 = x2.double
+  def f3 = x3.double
+  def f4 = x4.double
+  def f5 = x5.double
+  def f6 = x6.double
+}
+
+object AdaptAliasToAliasWithWeaklyConformantType {
+  type U = Double
+  type T = Int
+  implicit class D(d: U) { def double = d*2 }
+
+  val x1: T = 1
+  var x2: T = 2
+  val x3 = (3: T)
+  var x4 = (4: T)
+  final val x5 = (5: T)
+  final var x6 = (6: T)
+
+  def f1 = x1.double
+  def f2 = x2.double
+  def f3 = x3.double
+  def f4 = x4.double
+  def f5 = x5.double
+  def f6 = x6.double
+}
diff --git a/test/files/pos/t7264/A_1.scala b/test/files/pos/t7264/A_1.scala
new file mode 100644
index 0000000..044d011
--- /dev/null
+++ b/test/files/pos/t7264/A_1.scala
@@ -0,0 +1,11 @@
+object Foo {
+  object Values {
+    implicit def fromInt(x: Int): Values = ???
+  }
+  trait Values
+}
+final class Foo(name: String) {
+  def bar(values: Foo.Values): Bar = ???
+}
+
+trait Bar
diff --git a/test/files/pos/t7264/B_2.scala b/test/files/pos/t7264/B_2.scala
new file mode 100644
index 0000000..a8af2e7
--- /dev/null
+++ b/test/files/pos/t7264/B_2.scala
@@ -0,0 +1,7 @@
+object Test {
+  // if the following line is uncommented, things compile
+  // type X = Foo.Values
+
+
+  def foo(f: Foo) = f.bar(0 /* : Foo.Values */)
+}
diff --git a/test/files/pos/t7294.scala b/test/files/pos/t7294.scala
new file mode 100644
index 0000000..ccac2b1
--- /dev/null
+++ b/test/files/pos/t7294.scala
@@ -0,0 +1,6 @@
+object Test {
+  // no fruitless warning as Tuple2 isn't (yet) final.
+  // The corresponding `neg` test will treat it as final
+  // for the purposes of these tests under -Xfuture.
+  (1, 2) match { case Seq() => 0; case _ => 1 }
+}
diff --git a/test/files/pos/t7296.scala b/test/files/pos/t7296.scala
new file mode 100644
index 0000000..0c078d3
--- /dev/null
+++ b/test/files/pos/t7296.scala
@@ -0,0 +1,6 @@
+object Test {
+  type A = Int
+  // Emits the implementation restriction but then proceeds to crash
+  // when creating the Foo.unapply.
+  case class Foo(a: A, b: A, c: A, d: A, e: A, f: A, g: A, h: A, i: A, j: A, k: A, l: A, m: A, n: A, o: A, p: A, q: A, r: A, s: A, t: A, u: A, v: A, w: A, x: A, y: A, Z: A)
+}
diff --git a/test/files/pos/t7315.flags b/test/files/pos/t7315.flags
new file mode 100644
index 0000000..d1b831e
--- /dev/null
+++ b/test/files/pos/t7315.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t7315.scala b/test/files/pos/t7315.scala
new file mode 100644
index 0000000..0abcea2
--- /dev/null
+++ b/test/files/pos/t7315.scala
@@ -0,0 +1,4 @@
+package scala.pack
+
+ at deprecatedInheritance
+class C[@specialized A]
\ No newline at end of file
diff --git a/test/files/pos/t7322.scala b/test/files/pos/t7322.scala
new file mode 100644
index 0000000..006bf89
--- /dev/null
+++ b/test/files/pos/t7322.scala
@@ -0,0 +1,11 @@
+
+package object t7322 {
+  implicit class X(sc: StringContext) {
+    def x_?(args: Any*) = "hi there"
+  }
+}
+package t7322 {
+  trait Y {
+    x_?"junk"  // assume that if it compiles, it works
+  }
+}
diff --git a/test/files/pos/t7364/BadList.java b/test/files/pos/t7364/BadList.java
new file mode 100644
index 0000000..2692fa0
--- /dev/null
+++ b/test/files/pos/t7364/BadList.java
@@ -0,0 +1,3 @@
+public class BadList extends java.util.ArrayList {
+	public java.util.ArrayList foo() { return null; }
+}
diff --git a/test/files/pos/t7364/UseIt.scala b/test/files/pos/t7364/UseIt.scala
new file mode 100644
index 0000000..3847165
--- /dev/null
+++ b/test/files/pos/t7364/UseIt.scala
@@ -0,0 +1,4 @@
+class UseIt {
+  val list = new BadList
+  list.foo()
+}
diff --git a/test/files/pos/t7364b/BadList_1.java b/test/files/pos/t7364b/BadList_1.java
new file mode 100644
index 0000000..fbb428a
--- /dev/null
+++ b/test/files/pos/t7364b/BadList_1.java
@@ -0,0 +1,3 @@
+public class BadList_1 extends java.util.ArrayList {
+	public java.util.ArrayList foo() { return null; }
+}
diff --git a/test/files/pos/t7364b/UseIt_2.scala b/test/files/pos/t7364b/UseIt_2.scala
new file mode 100644
index 0000000..06b50f6
--- /dev/null
+++ b/test/files/pos/t7364b/UseIt_2.scala
@@ -0,0 +1,5 @@
+class UseIt {
+  val list = new BadList_1
+  list.foo()
+  list.set(0, list.get(0))
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
index a0ec1d8..b38687c 100644
--- a/test/files/pos/t7377/Macro_1.scala
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -1,7 +1,7 @@
 import language.experimental._
-import reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object M {
-  def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typeCheck(c.resetLocalAttrs(expr.tree)))
+  def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.untypecheck(expr.tree)))
   def noop[A](expr: A): A = macro noopImpl[A]
 }
diff --git a/test/files/pos/t7377b.flags b/test/files/pos/t7377b.flags
deleted file mode 100644
index cb8324a..0000000
--- a/test/files/pos/t7377b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xoldpatmat
\ No newline at end of file
diff --git a/test/files/pos/t7427.flags b/test/files/pos/t7427.flags
new file mode 100644
index 0000000..9c7d640
--- /dev/null
+++ b/test/files/pos/t7427.flags
@@ -0,0 +1 @@
+-Ydebug
diff --git a/test/files/pos/t7427.scala b/test/files/pos/t7427.scala
new file mode 100644
index 0000000..cca5295
--- /dev/null
+++ b/test/files/pos/t7427.scala
@@ -0,0 +1,4 @@
+// Compiles with no options
+// Compiles with -Ydebug -Ydisable-unreachable-prevention
+// Crashes with -Ydebug
+trait Bippy { 3 match { case 3 => } }
diff --git a/test/files/disabled/t7020.flags b/test/files/pos/t7433.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/pos/t7433.flags
diff --git a/test/files/pos/t7433.scala b/test/files/pos/t7433.scala
new file mode 100644
index 0000000..f2109f4
--- /dev/null
+++ b/test/files/pos/t7433.scala
@@ -0,0 +1,10 @@
+object Test {
+  def foo() {
+    try {
+      for (i <- 1 until 5) return
+    } catch {
+      case _: NullPointerException | _: RuntimeException =>
+        // was: "catch block may intercept non-local return from method check"
+    }
+  }
+}
diff --git a/test/files/pos/t7461/Macros_1.scala b/test/files/pos/t7461/Macros_1.scala
index 353dec6..ca84d75 100644
--- a/test/files/pos/t7461/Macros_1.scala
+++ b/test/files/pos/t7461/Macros_1.scala
@@ -1,12 +1,12 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 object Macros {
   def impl(c: Context) = {
     import c.universe._
-    val wut = c.typeCheck(Select(Literal(Constant(10)), newTermName("$minus")), silent = true)
+    val wut = c.typecheck(Select(Literal(Constant(10)), newTermName("$minus")), silent = true)
     // println(showRaw(wut, printIds = true, printTypes = true))
-    c.literalUnit
+    c.Expr[Unit](q"()")
   }
 
   def foo = macro impl
diff --git a/test/files/pos/t7475a.scala b/test/files/pos/t7475a.scala
new file mode 100644
index 0000000..810ce9a
--- /dev/null
+++ b/test/files/pos/t7475a.scala
@@ -0,0 +1,11 @@
+trait AbstractPublic {
+  def queue: Any
+}
+trait ConcretePrivate {
+  private val queue: Any = ()
+}
+
+abstract class Mix
+  extends ConcretePrivate with AbstractPublic {
+  final def queue: Any = ()
+}
diff --git a/test/files/pos/t7475b.scala b/test/files/pos/t7475b.scala
new file mode 100644
index 0000000..a34743b
--- /dev/null
+++ b/test/files/pos/t7475b.scala
@@ -0,0 +1,8 @@
+trait U {
+}
+
+trait T {
+  type TT = Any with T with U
+  private val priv = 0
+  (??? : TT).priv
+}
diff --git a/test/files/pos/t7475d.scala b/test/files/pos/t7475d.scala
new file mode 100644
index 0000000..497c2bf
--- /dev/null
+++ b/test/files/pos/t7475d.scala
@@ -0,0 +1,11 @@
+trait T {
+  type TT = T with Any
+  private val priv = 0
+  (??? : TT).priv
+}
+
+trait U {
+  type UU = Any with U
+  private val priv = 0
+  (??? : UU).priv
+}
diff --git a/test/files/pos/t7475e.scala b/test/files/pos/t7475e.scala
new file mode 100644
index 0000000..fbc965c
--- /dev/null
+++ b/test/files/pos/t7475e.scala
@@ -0,0 +1,13 @@
+trait U {
+  private val priv = 0
+  type TT = U with T // should allow `priv`
+  (??? : TT).priv
+}
+
+trait Base {
+
+}
+
+trait T extends Base {
+
+}
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
index 3bba199..3bd477d 100644
--- a/test/files/pos/t7516/A_1.scala
+++ b/test/files/pos/t7516/A_1.scala
@@ -3,7 +3,7 @@ import scala.reflect._,macros._, scala.language.experimental.macros
 object A {
   def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
     val r = c.universe.reify { List(t.splice) }
-    c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+    c.Expr[List[T]]( c.untypecheck(r.tree) )
   }
   def demo[T](t: T): List[T] = macro impl[T]
 }
diff --git a/test/files/pos/t7520.scala b/test/files/pos/t7520.scala
new file mode 100644
index 0000000..747f527
--- /dev/null
+++ b/test/files/pos/t7520.scala
@@ -0,0 +1,10 @@
+class A {
+  val x: Singleton with this.type = this
+  val y: this.type = x
+}
+
+class B {
+  val x = ""
+  val xs: x.type with Singleton = x
+  val y: x.type = xs
+}
diff --git a/test/files/pos/t757.scala b/test/files/pos/t757.scala
index 7513910..fd7624c 100644
--- a/test/files/pos/t757.scala
+++ b/test/files/pos/t757.scala
@@ -1,4 +1,4 @@
-package foo { 
+package foo {
   object C {
     def foo {
       Console.println("foo")
@@ -6,7 +6,7 @@ package foo {
   }
 }
 
-package bar { 
+package bar {
   object Main extends App {
     foo.C.foo
   }
diff --git a/test/files/pos/t758.scala b/test/files/pos/t758.scala
index 44769d5..160bf37 100644
--- a/test/files/pos/t758.scala
+++ b/test/files/pos/t758.scala
@@ -1,7 +1,7 @@
 trait A { type T; type M >: T }
-trait B extends A { 
-  val x : String; 
-  val u : A { type T = B.this.T } ; 
-  type T = x.type; 
-  type M = u.M 
+trait B extends A {
+  val x : String;
+  val u : A { type T = B.this.T } ;
+  type T = x.type;
+  type M = u.M
 }
diff --git a/test/files/pos/t7591/Demo.scala b/test/files/pos/t7591/Demo.scala
new file mode 100644
index 0000000..696d535
--- /dev/null
+++ b/test/files/pos/t7591/Demo.scala
@@ -0,0 +1,83 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+import scala.tools.cmd._
+
+/** A sample command specification for illustrative purposes.
+ *  First take advantage of the meta-options:
+ *
+ *    // this command creates an executable runner script "demo"
+ *    % scala scala.tools.cmd.Demo --self-update demo
+ *
+ *    // this one creates and sources a completion file - note backticks
+ *    % `./demo --bash`
+ *
+ *    // and now you have a runner with working completion
+ *    % ./demo --<tab>
+ *       --action           --defint           --int
+ *       --bash             --defstr           --str
+ *       --defenv           --self-update      --unary
+ *
+ *  The normal option configuration is plausibly self-explanatory.
+ */
+trait DemoSpec extends Spec with Meta.StdOpts with Interpolation {
+  lazy val referenceSpec  = DemoSpec
+  lazy val programInfo    = Spec.Info("demo", "Usage: demo [<options>]", "scala.tools.cmd.Demo")
+
+  help("""Usage: demo [<options>]""")
+  heading("Unary options:")
+
+  val optIsUnary      = "unary"         / "a unary option"              --?  ;
+  ("action" / "a body which may be run") --> println("Hello, I am the --action body.")
+
+  heading("Binary options:")
+  val optopt          = "str"       / "an optional String"        --|
+  val optoptInt       = ("int"      / "an optional Int") .        --^[Int]
+  val optEnv          = "defenv"    / "an optional String"        defaultToEnv  "PATH"
+  val optDefault      = "defstr"    / "an optional String"        defaultTo     "default"
+  val optDefaultInt   = "defint"    / "an optional Int"           defaultTo     -1
+  val optExpand       = "alias"     / "an option which expands"   expandTo      ("--int", "15")
+}
+
+object DemoSpec extends DemoSpec with Property {
+  lazy val propMapper = new PropertyMapper(DemoSpec)
+
+  type ThisCommandLine = SpecCommandLine
+  def creator(args: List[String]) =
+    new SpecCommandLine(args) {
+      override def errorFn(msg: String) = { println("Error: " + msg) ; sys.exit(0) }
+    }
+}
+
+class Demo(args: List[String]) extends {
+  val parsed = DemoSpec(args: _*)
+} with DemoSpec with Instance {
+  import java.lang.reflect._
+
+  def helpMsg = DemoSpec.helpMsg
+  def demoSpecMethods = this.getClass.getMethods.toList
+  private def isDemo(m: Method) = (m.getName startsWith "opt") && !(m.getName contains "$") && (m.getParameterTypes.isEmpty)
+
+  def demoString(ms: List[Method]) = {
+    val longest   = ms map (_.getName.length) max
+    val formatStr = "    %-" + longest + "s: %s"
+    val xs        = ms map (m => formatStr.format(m.getName, m.invoke(this)))
+
+    xs mkString ("Demo(\n  ", "\n  ", "\n)\n")
+  }
+
+  override def toString = demoString(demoSpecMethods filter isDemo)
+}
+
+object Demo {
+  def main(args: Array[String]): Unit = {
+    val runner = new Demo(args.toList)
+
+    if (args.isEmpty)
+      println(runner.helpMsg)
+
+    println(runner)
+  }
+}
diff --git a/test/files/pos/t7649.scala b/test/files/pos/t7649.scala
index a1b02f6..d70dc05 100644
--- a/test/files/pos/t7649.scala
+++ b/test/files/pos/t7649.scala
@@ -1,10 +1,10 @@
 object Test {
-  val c: reflect.macros.Context = ???
+  val c: scala.reflect.macros.blackbox.Context = ???
   import c.universe._
   reify {
     // The lookup of the implicit WeakTypeTag[Any]
     // was triggering an unpositioned tree.
-    c.Expr[Any](Literal(Constant(0))).splice
+    c.Expr[Any](q"0").splice
   }
 
   import scala.reflect.ClassTag
diff --git a/test/files/pos/t7668.scala b/test/files/pos/t7668.scala
new file mode 100644
index 0000000..222a13d
--- /dev/null
+++ b/test/files/pos/t7668.scala
@@ -0,0 +1,12 @@
+trait Space {
+  type T
+  val x: T
+}
+
+trait Extractor {
+  def extract(s: Space): s.T
+}
+
+class Sub extends Extractor {
+  def extract(s: Space) = s.x
+}
diff --git a/test/files/pos/t767.scala b/test/files/pos/t767.scala
index d4d7eae..0c4067f 100644
--- a/test/files/pos/t767.scala
+++ b/test/files/pos/t767.scala
@@ -4,7 +4,7 @@ abstract class AbsCell {
   private var value: T = init
   def get: T = value
   def set (x: T) { value = x }
-    
+
   class Node {
     val foo = 1
   }
diff --git a/test/files/pos/t7688.scala b/test/files/pos/t7688.scala
new file mode 100644
index 0000000..5a846b9
--- /dev/null
+++ b/test/files/pos/t7688.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros._
+
+class A[C <: Context with Singleton](position: C#Position)
+
+object A {
+  def apply(c: Context)(in: c.Tree): A[c.type] = new A(in.pos)
+}
diff --git a/test/files/pos/t7689.scala b/test/files/pos/t7689.scala
new file mode 100644
index 0000000..022e7ab
--- /dev/null
+++ b/test/files/pos/t7689.scala
@@ -0,0 +1,7 @@
+object A {
+  // The default getter must have an explicit return type (List[_] => Int)
+  // This wasn't happening since e28c3edda4. That commit encoded upper/lower
+  // bounds of Any/Nothing as EmptyTree, which were triggering an .isEmpty
+  // check in Namers#TypeTreeSubstitutor
+  def x(f: List[_] => Int = _ => 3) = 9
+}
diff --git a/test/files/pos/t7690.scala b/test/files/pos/t7690.scala
new file mode 100644
index 0000000..e8911a9
--- /dev/null
+++ b/test/files/pos/t7690.scala
@@ -0,0 +1,17 @@
+object A
+trait B[T]
+
+object C {
+  implicit def notUsed[L[x]](in: L[Int]): B[L[Int]] = ???
+
+  class E(val ls: Int) {
+    def x(f: Int => Boolean): Boolean = f(ls)
+  }
+  implicit def isUsed(ls: Int): E = new E(ls)
+
+  def amethod(in: Int): Boolean =
+    in.x { i =>
+      import A._
+      "asdf" == i.toString
+    }
+}
\ No newline at end of file
diff --git a/test/files/pos/t7753.scala b/test/files/pos/t7753.scala
new file mode 100644
index 0000000..93ad23f
--- /dev/null
+++ b/test/files/pos/t7753.scala
@@ -0,0 +1,36 @@
+import scala.language.{ higherKinds, implicitConversions }
+
+trait Foo { type Out }
+
+trait SI {
+  val instance: Foo
+  type Out
+}
+
+object Test {
+  def test {
+    def indirect(si: SI)(v: si.instance.Out) = v
+
+    val foo: Foo { type Out = Int } = ???
+    def conv(i: Foo): SI { type Out = i.Out; val instance: i.type } = ???
+
+    val converted = conv(foo)
+
+    val v1: Int = indirect(converted)(23)  // Okay (after refining the return type `instance` in the return type of `conv`)
+    /*
+    indirect(converted){(v: converted.instance.Out)converted.instance.Out}(
+      23{Int(23)}
+    ){converted.instance.Out};
+    */
+
+    val v2: Int = indirect(conv(foo))(23)  // Used to fail as follows:
+    /*
+    indirect(
+        conv(foo){si.SI{type Out = foo.Out; val instance: si.Test.<refinement>.type}}
+    ){(v: si.instance.Out)si.instance.Out}(
+      23{<error>}
+    ){<error>};
+    */
+
+  }
+}
diff --git a/test/files/pos/t7776.scala b/test/files/pos/t7776.scala
index 0340fac..a36497a 100644
--- a/test/files/pos/t7776.scala
+++ b/test/files/pos/t7776.scala
@@ -1,5 +1,5 @@
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 class MacroErasure {
   def app(f: Any => Any, x: Any): Any = macro MacroErasure.appMacro
@@ -7,6 +7,14 @@ class MacroErasure {
 }
 
 object MacroErasure {
-  def appMacro(c: Context)(f: c.Expr[Any => Any], x: c.Expr[Any]): c.Expr[Any] = ???
-  def appMacroA[A](c: Context)(f: c.Expr[A => Any], x: c.Expr[Any])(implicit tt: c.WeakTypeTag[A]): c.Expr[Any] = ???
+  def appMacro(c: Context)(
+    f: c.Expr[Any => Any], x: c.Expr[Any]): c.Expr[Any] = {
+    import c.universe._
+    c.Expr(q"$f($x)")
+  }
+  def appMacroA[A](c: Context)(f: c.Expr[A => Any], x: c.Expr[Any])(
+    implicit tt: c.WeakTypeTag[A]): c.Expr[Any] = {
+    import c.universe._
+    c.Expr(q"$f[${tt.tpe}]($x)")
+  }
 }
\ No newline at end of file
diff --git a/test/files/pos/t7785.scala b/test/files/pos/t7785.scala
new file mode 100644
index 0000000..1de693d
--- /dev/null
+++ b/test/files/pos/t7785.scala
@@ -0,0 +1,34 @@
+import scala.language._
+
+trait R[+Repr]
+
+trait TraversableOps {
+  implicit val R: R[Nothing] = ???
+
+  // Removing the implicit parameter in both fixes the crash
+  // removing it into one only gives a valid compiler error.
+  trait OpsDup1[Repr] {
+    def force(implicit bf: R[Repr]): Any
+  }
+
+  trait Ops[Repr] extends OpsDup1[Repr] {
+    def force(implicit bf: R[Repr], dummy: DummyImplicit): Any
+  }
+
+  implicit def ct2ops[T, C[+X]](t: C[T]):
+    Ops[C[T]]
+
+  def force[T](t: Option[T]) =
+    // ct2ops(t).force
+    t.force //Fails compilation on 2.10.2.
+
+
+  /* To get a closer look at the crash:
+  :power
+  val foo = typeOf[C].member(TermName("foo"))
+  val pt = analyzer.HasMember(TermName("force"))
+  val instantiated = foo.info.finalResultType.instantiateTypeParams(foo.typeParams, foo.typeParams.map(TypeVar(_)))
+  instantiated <:< pt
+  */
+  def foo[T, C[+X]]: Ops[C[T]]
+}
diff --git a/test/files/pos/t7788.scala b/test/files/pos/t7788.scala
new file mode 100644
index 0000000..81eada9
--- /dev/null
+++ b/test/files/pos/t7788.scala
@@ -0,0 +1,8 @@
+class Test {
+  // Predef used to define a method `conforms` to produce the implicit evidence below
+  // all this does is ensure we don't rename Predef.$conforms back to conforms when $ goes out of fashion
+  // or that there is some other way of generating the implicit value that witnesses T => U for T <: U
+  def conforms(x: Int, y: Int) = x < y
+  def foo[A](implicit ev: Int => A) = ???
+  foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/pos/t7834.scala b/test/files/pos/t7834.scala
new file mode 100644
index 0000000..fc9a0aa
--- /dev/null
+++ b/test/files/pos/t7834.scala
@@ -0,0 +1,6 @@
+class S { val q = "" }
+
+class B extends S {
+  val x1: B.super.q.type = q
+  val x2: B.this.q.type  = q
+}
diff --git a/test/files/pos/t7847/A.scala b/test/files/pos/t7847/A.scala
new file mode 100644
index 0000000..b6cce6e
--- /dev/null
+++ b/test/files/pos/t7847/A.scala
@@ -0,0 +1,5 @@
+case class Blah(a: Int)
+
+object Blah {
+  def apply2(a: Int) = apply(a)
+}
diff --git a/test/files/pos/t7847/B.java b/test/files/pos/t7847/B.java
new file mode 100644
index 0000000..c214f2d
--- /dev/null
+++ b/test/files/pos/t7847/B.java
@@ -0,0 +1,10 @@
+public final class B {
+  void blah() {
+    Blah x = Blah.apply2(1);
+    Blah y = Blah.apply(1);
+    Blah z = Blah$.MODULE$.apply(1);
+
+    scala.Option un1 = Blah.unapply(null);
+    scala.Option un2 = Blah$.MODULE$.unapply(null);
+  }
+}
diff --git a/test/files/pos/t7853-partial-function.scala b/test/files/pos/t7853-partial-function.scala
new file mode 100644
index 0000000..b09254e
--- /dev/null
+++ b/test/files/pos/t7853-partial-function.scala
@@ -0,0 +1,7 @@
+object Test  {
+
+  def testCons: Unit = {
+    def x[A](a: PartialFunction[Any, A]): A = a(0)
+    val eval0 = x { case list: List[Int @unchecked] => list }
+  }
+}
diff --git a/test/files/pos/t7853.scala b/test/files/pos/t7853.scala
new file mode 100644
index 0000000..b0e9221
--- /dev/null
+++ b/test/files/pos/t7853.scala
@@ -0,0 +1,11 @@
+trait S {
+  trait T {
+    this: Any =>
+
+    trait U {
+      trait V {
+        S.this
+      }
+    }
+  }
+}
diff --git a/test/files/pos/t7864.flags b/test/files/pos/t7864.flags
new file mode 100644
index 0000000..7ccd561
--- /dev/null
+++ b/test/files/pos/t7864.flags
@@ -0,0 +1 @@
+-Xlint
\ No newline at end of file
diff --git a/test/files/pos/t7864.scala b/test/files/pos/t7864.scala
new file mode 100644
index 0000000..b2d8911
--- /dev/null
+++ b/test/files/pos/t7864.scala
@@ -0,0 +1,5 @@
+object Test {
+  val f = 0;
+  ({ toString; (x: Any) => x})("$f ")
+}
+
diff --git a/test/files/pos/t788.scala b/test/files/pos/t788.scala
index 3da88a2..19638dd 100644
--- a/test/files/pos/t788.scala
+++ b/test/files/pos/t788.scala
@@ -4,7 +4,7 @@ trait Test {
   type Node <: NodeImpl;
   trait NodeImpl;
   type Expression <: Node with ExpressionImpl;
-  trait ExpressionImpl extends NodeImpl { 
+  trait ExpressionImpl extends NodeImpl {
     def self : Expression;
   }
   type Named <: Node with NamedImpl;
diff --git a/test/files/pos/t7919.scala b/test/files/pos/t7919.scala
new file mode 100644
index 0000000..64f261e
--- /dev/null
+++ b/test/files/pos/t7919.scala
@@ -0,0 +1,6 @@
+
+object X {
+  val x = s""
+  val y = true
+}
+
diff --git a/test/files/pos/t7928.scala b/test/files/pos/t7928.scala
new file mode 100644
index 0000000..d9e2993
--- /dev/null
+++ b/test/files/pos/t7928.scala
@@ -0,0 +1,16 @@
+trait OuterTrait {
+  trait InnerTrait {
+    type Element
+    type Collection <: Iterable[Inner.Element]
+  }
+
+  val Inner: InnerTrait
+
+}
+
+object OuterObject extends OuterTrait {
+  object Inner extends InnerTrait {
+    type Element = String
+    override type Collection = Seq[Inner.Element]
+  }
+}
diff --git a/test/files/pos/t7944.scala b/test/files/pos/t7944.scala
new file mode 100644
index 0000000..2fe2c58
--- /dev/null
+++ b/test/files/pos/t7944.scala
@@ -0,0 +1,24 @@
+class M[+A, +B]
+
+object Test {
+  implicit class EitherOps[A, B](self: Either[A, B]) {
+    def disjunction: M[A, B] = null
+  }
+
+  def foo = {
+    val l: Either[Int, Nothing] = Left[Int, Nothing](1)
+
+    var ok = EitherOps(l).disjunction
+
+    val runawayTypeVar = l.disjunction
+
+    // reported bug:
+    // found   : M[Int,B]; required: M[Int,Nothing]
+    val assign: M[Int, Nothing] = runawayTypeVar
+
+    // variations on the theme, all failed before similarly.
+    val assign1: M[Int, Nothing] = {val temp = runawayTypeVar; temp}
+    val assign2: M[Int, String] = runawayTypeVar
+    val assign3: M[Int, Nothing] = {val temp = Left(1).disjunction; temp}
+  }
+}
diff --git a/test/files/pos/t7983.scala b/test/files/pos/t7983.scala
new file mode 100644
index 0000000..a583e53
--- /dev/null
+++ b/test/files/pos/t7983.scala
@@ -0,0 +1,31 @@
+package foo.bar.baz // the package nesting level material to this bug
+ 
+class DivergenceTest {
+ 
+  trait ColumnBase[T]
+ 
+  trait ShapeLevel
+  trait Flat extends ShapeLevel
+  trait Lower extends Flat
+ 
+  class Shape2[Level <: ShapeLevel, -M, U]
+ 
+  implicit final def columnBaseShape[Level >: Flat <: ShapeLevel, T, C <: ColumnBase[_]]
+                                    (implicit ev: C <:< ColumnBase[T]
+                                    ): Shape2[Level, C, T] = ???
+
+  implicit final def intShape[Level <: ShapeLevel, T]: Shape2[Level, Int, Int] = ???
+  implicit final def tuple2Shape[Level <: ShapeLevel, M1,M2, U1,U2]
+                                (implicit u1: Shape2[_ <: Level, M1, U1],
+                                          u2: Shape2[_ <: Level, M2, U2]
+                                ): Shape2[Level, (M1,M2), (U1,U2)] = ???
+ 
+  def foo {
+    class Coffees extends ColumnBase[Int]
+ 
+    def map1[F, T](f: F)(implicit shape: Shape2[_ <: Flat, F, T]) = ???
+ 
+    map1(((1, null: Coffees), 1))
+    map1(((null: Coffees, 1), 1)) // fails with implicit divergence error in 2.11.0-M6, works under 2.10.3
+  }
+}
diff --git a/test/files/pos/t7987/Macro_1.scala b/test/files/pos/t7987/Macro_1.scala
new file mode 100644
index 0000000..81f717b
--- /dev/null
+++ b/test/files/pos/t7987/Macro_1.scala
@@ -0,0 +1,6 @@
+import scala.language.experimental._
+
+object Macro {
+  def apply[A](a: A): A = macro impl[A]
+  def impl[A](c: reflect.macros.Context)(a: c.Expr[A]): c.Expr[A] = a
+}
diff --git a/test/files/pos/t7987/Test_2.scala b/test/files/pos/t7987/Test_2.scala
new file mode 100644
index 0000000..5896fdb
--- /dev/null
+++ b/test/files/pos/t7987/Test_2.scala
@@ -0,0 +1,12 @@
+class C[T] {                                     
+  def foo = 0                                    
+}                                                
+
+object Test {
+  implicit def AnyToC[T](a: Any): C[T] = new C[T] 
+  // was: "macro not expanded"
+  Macro {                                         
+    "".foo                                         
+     ()                                            
+  }
+}
diff --git a/test/files/jvm/t2570.check b/test/files/pos/t8001.check
similarity index 100%
rename from test/files/jvm/t2570.check
rename to test/files/pos/t8001.check
diff --git a/test/files/disabled/t7020.flags b/test/files/pos/t8001.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/pos/t8001.flags
diff --git a/test/files/pos/t8001/Macros_1.scala b/test/files/pos/t8001/Macros_1.scala
new file mode 100644
index 0000000..3b80b88
--- /dev/null
+++ b/test/files/pos/t8001/Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def foo: Unit = macro impl
+  def impl(c: Context) = {
+    import c.universe._
+    q"()"
+  }
+}
\ No newline at end of file
diff --git a/test/files/pos/t8001/Test_2.scala b/test/files/pos/t8001/Test_2.scala
new file mode 100644
index 0000000..6d72d96
--- /dev/null
+++ b/test/files/pos/t8001/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo
+  (): Unit
+}
\ No newline at end of file
diff --git a/test/files/pos/t8002-nested-scope.scala b/test/files/pos/t8002-nested-scope.scala
new file mode 100644
index 0000000..a2088bc
--- /dev/null
+++ b/test/files/pos/t8002-nested-scope.scala
@@ -0,0 +1,20 @@
+//  This test serves to capture the status quo, but should really
+// emit an accessibiltiy error.
+
+// `Namers#companionSymbolOf` seems too lenient, and currently doesn't
+// implement the same-scope checks mentioned:
+//
+// https://github.com/scala/scala/pull/2816#issuecomment-22555206
+//
+class C {
+  def foo = {
+    class C { private def x = 0 }
+
+    {
+      val a = 0
+      object C {
+        new C().x
+      }
+    }
+  }
+}
diff --git a/test/files/pos/t8011.scala b/test/files/pos/t8011.scala
new file mode 100644
index 0000000..76bd7ac
--- /dev/null
+++ b/test/files/pos/t8011.scala
@@ -0,0 +1,8 @@
+class ThingOps1(val x: String) extends AnyVal {
+  def fn[A]: Any = {
+    new X[A] { def foo(a: A) = a }
+    0
+  }
+}
+
+trait X[B] { def foo(a: B): Any }
\ No newline at end of file
diff --git a/test/files/pos/t8013.flags b/test/files/pos/t8013.flags
new file mode 100644
index 0000000..954eaba
--- /dev/null
+++ b/test/files/pos/t8013.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/pos/t8013/inpervolated_2.scala b/test/files/pos/t8013/inpervolated_2.scala
new file mode 100644
index 0000000..90e571b
--- /dev/null
+++ b/test/files/pos/t8013/inpervolated_2.scala
@@ -0,0 +1,11 @@
+/*
+ * scalac: -Xfatal-warnings -Xlint
+ */
+package t8013
+
+// unsuspecting user of perverse macro
+trait User {
+  import Perverse.Impervolator
+  val foo = "bar"
+  Console println p"Hello, $foo"
+}
diff --git a/test/files/pos/t8013/inpervolator_1.scala b/test/files/pos/t8013/inpervolator_1.scala
new file mode 100644
index 0000000..612e1d7
--- /dev/null
+++ b/test/files/pos/t8013/inpervolator_1.scala
@@ -0,0 +1,33 @@
+
+package t8013
+
+// perverse macro to confuse Xlint
+
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Perverse {
+
+  implicit class Impervolator(sc: StringContext) {
+    def p(args: Any*): String = macro pImpl
+  }
+
+  // turn a nice interpolation into something that looks
+  // nothing like an interpolation or anything we might
+  // recognize, but which includes a "$id" in an apply.
+  def pImpl(c: Context)(args: c.Expr[Any]*): c.Expr[String] = {
+    import c.universe._
+    val macroPos = c.macroApplication.pos
+    val text = macroPos.source.lineToString(macroPos.line - 1) substring macroPos.column
+    val tt = Literal(Constant(text))
+    val tree = q"t8013.Perverse.pervert($tt)"
+    c.Expr[String](tree)
+  }
+
+  // identity doesn't seem very perverse in this context
+  //def pervert(text: String): String = text
+  def pervert(text: String): String = {
+    Console println s"Perverting [$text]"
+    text
+  }
+}
diff --git a/test/files/pos/t802.scala b/test/files/pos/t802.scala
index 124d491..2dea703 100644
--- a/test/files/pos/t802.scala
+++ b/test/files/pos/t802.scala
@@ -1,17 +1,17 @@
 package test;
 trait Test {
-  abstract class BracesImpl { 
+  abstract class BracesImpl {
     type Singleton;
     type Brace <: Singleton with BraceImpl;
-    trait BraceImpl; 
+    trait BraceImpl;
     trait ForFile;
   }
-  abstract class ParensImpl extends BracesImpl { 
+  abstract class ParensImpl extends BracesImpl {
     type Brace <: Singleton with BraceImpl;
     trait BraceImpl extends super.BraceImpl;
   }
   val parens : ParensImpl;
-  abstract class BracksImpl extends BracesImpl { 
+  abstract class BracksImpl extends BracesImpl {
     type Brace <: Singleton with BraceImpl;
     trait BraceImpl extends super.BraceImpl;
   }
diff --git a/test/files/pos/t8023.scala b/test/files/pos/t8023.scala
new file mode 100644
index 0000000..8682408
--- /dev/null
+++ b/test/files/pos/t8023.scala
@@ -0,0 +1,22 @@
+import language._
+
+
+object Test {
+  def foo = (null: Any) match {
+    case a: A[k] =>
+      // error: kinds of the type arguments (k) do not conform to the 
+      // expected kinds of the type parameters (type K) in class B.
+      new B[k]()
+  }
+}
+
+class A[K[L[_]]]
+
+class B[K[M[_]]]
+
+
+object Test2 {
+  def foo = (null: Any) match {
+    case a: A[k] => new B[k]() // this one worked before as the info of `A` was complete
+  }
+}
diff --git a/test/files/pos/t8023b.scala b/test/files/pos/t8023b.scala
new file mode 100644
index 0000000..94c9b2f
--- /dev/null
+++ b/test/files/pos/t8023b.scala
@@ -0,0 +1,2 @@
+// this fails with naive attempts to fix SI-8023
+trait T[A <: T[A]]
diff --git a/test/files/pos/t8045.scala b/test/files/pos/t8045.scala
new file mode 100644
index 0000000..21154e3
--- /dev/null
+++ b/test/files/pos/t8045.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+  case class Number(i: Int)
+
+  object UnliftNumber {
+    def unapply(t: Any): Option[Number] = t match {
+      case i: Int => Some(Number(i))
+      case _ => None
+    }
+  }
+
+  def eval(expr: Any): Option[Number] = expr match {
+    case UnliftNumber(n) => Some(n)
+    case _ => None
+  }
+
+  println(eval(1))
+}
diff --git a/test/files/pos/t8046.scala b/test/files/pos/t8046.scala
new file mode 100644
index 0000000..304d70b
--- /dev/null
+++ b/test/files/pos/t8046.scala
@@ -0,0 +1,20 @@
+trait One {
+  type Op[A]
+  type Alias[A] = Op[A]
+}
+ 
+trait Two extends One {
+  trait Op[A] extends (A => A)
+ 
+  // This compiles
+  class View1 extends Op[Int] { def apply(xs: Int) = xs }
+ 
+  // ??? base class View2 not found in basetypes of class View2
+  // ./a.scala:9: error: class View2 needs to be abstract, since \
+  //   method apply in trait Function1 of type (v1: T1)R is not defined
+  // (Note that T1 does not match Int)
+  //   class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+  //         ^
+  // one error found
+  class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+}
diff --git a/test/files/pos/t8046b.scala b/test/files/pos/t8046b.scala
new file mode 100644
index 0000000..45b99fd
--- /dev/null
+++ b/test/files/pos/t8046b.scala
@@ -0,0 +1,16 @@
+trait One {
+  type Op[A]
+  type Alias = Op[Int]
+}
+ 
+trait Two extends One {
+  trait Op[A] extends M[A]
+  //(a: Alias) => a.value.toChar               // okay
+                                             // (=> A).asSeenFrom(a.type, trait M): => Int
+  class View2 extends Alias { value.toChar } // toChar is not a member of type parameter A
+                                             // (=> A).asSeenFrom(View2.this.type, trait M): => A
+  
+  // override type Alias = Op[Int]           // works with this
+}
+
+trait M[A] { def value: A = sys.error("") }
diff --git a/test/files/pos/t8046c.scala b/test/files/pos/t8046c.scala
new file mode 100644
index 0000000..f05b4c1
--- /dev/null
+++ b/test/files/pos/t8046c.scala
@@ -0,0 +1,19 @@
+trait One {
+  type Op[A]
+  type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+  trait Op[A] extends (A => A)
+
+  def f1(f: Op[Int])            = f(5)
+  def f2(f: Alias[Int])         = f(5)
+  def f3[T <: Op[Int]](f: T)    = f(5)
+  def f4[T <: Alias[Int]](f: T) = f(5)
+  // ./a.scala:12: error: type mismatch;
+  //  found   : Int(5)
+  //  required: T1
+  //   def f4[T <: Alias[Int]](f: T) = f(5)
+  //                                     ^
+}
+
diff --git a/test/files/pos/t8054.scala b/test/files/pos/t8054.scala
new file mode 100644
index 0000000..a7bb44b
--- /dev/null
+++ b/test/files/pos/t8054.scala
@@ -0,0 +1,31 @@
+trait D {
+  trait Manifest {
+    class Entry
+  }
+
+  val M: Manifest
+
+  def m: M.Entry = ???
+}
+
+object D1 extends D {
+  object M extends Manifest
+}
+
+object D2 extends D {
+  val M: Manifest = ???
+}
+
+object Hello {
+
+  def main(args: Array[String]) {
+    // 2.10.3 - ok
+    // 2.11.0-M7 - type mismatch; found : Seq[DB1.MANIFEST.Entry]
+    // required: Seq[DB1.MANIFEST.Entry]
+    val t1: D1.M.Entry = D1.m
+
+    // 2.10.3 - ok
+    // 2.11.0-M7 - ok
+    val t2: D2.M.Entry = D2.m
+  }
+}
diff --git a/test/files/pos/t8064.flags b/test/files/pos/t8064.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/t8064.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8064/Client_2.scala b/test/files/pos/t8064/Client_2.scala
new file mode 100644
index 0000000..4410678
--- /dev/null
+++ b/test/files/pos/t8064/Client_2.scala
@@ -0,0 +1,8 @@
+object Test {
+  Macro {
+    def s = ""
+    Macro(s): @unchecked
+    ???
+  }
+}
+// Was: a range position validation error (unpositioned tree)
\ No newline at end of file
diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala
new file mode 100644
index 0000000..9f1e695
--- /dev/null
+++ b/test/files/pos/t8064/Macro_1.scala
@@ -0,0 +1,10 @@
+import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macro {
+  def apply(a: Any): Any = macro impl
+
+  def impl(c: Context)(a: c.Tree): c.Tree = {
+    c.untypecheck(a)
+  }
+}
diff --git a/test/files/pos/t8064b.flags b/test/files/pos/t8064b.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/t8064b.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8064b/Client_2.scala b/test/files/pos/t8064b/Client_2.scala
new file mode 100644
index 0000000..a7bf2b9
--- /dev/null
+++ b/test/files/pos/t8064b/Client_2.scala
@@ -0,0 +1,6 @@
+object Test {
+  Macro {
+    "".reverse
+  }
+}
+// Was: a range position validation error (tree with offset position enclosing tree with range position)
\ No newline at end of file
diff --git a/test/files/pos/t8064b/Macro_1.scala b/test/files/pos/t8064b/Macro_1.scala
new file mode 100644
index 0000000..60996bf
--- /dev/null
+++ b/test/files/pos/t8064b/Macro_1.scala
@@ -0,0 +1,11 @@
+import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macro {
+  def apply(a: Any): Any = macro impl
+  def impl(c: Context)(a: c.Tree): c.Tree = {
+    import c.universe._
+
+    q"{$a; true}"
+  }
+}
diff --git a/test/files/pos/t807.scala b/test/files/pos/t807.scala
index ed73fe3..0eeb92e 100644
--- a/test/files/pos/t807.scala
+++ b/test/files/pos/t807.scala
@@ -6,7 +6,7 @@ trait Matcher {
   trait HasLinks {
     def link(b : Boolean) : Link = null;
   }
-  
+
 }
 trait BraceMatcher extends Matcher {
   trait BracePair {
diff --git a/test/files/pos/t8120.scala b/test/files/pos/t8120.scala
new file mode 100644
index 0000000..e06f38d
--- /dev/null
+++ b/test/files/pos/t8120.scala
@@ -0,0 +1,9 @@
+object A {
+  class C {
+    def m(a: Nothing): Int = 0
+  }
+  implicit class RichAny(a: Any) {
+    def m(a: Any): Int = 0
+  }
+  (new C).m({ case (x, y) => x } : Any => Any)
+}
diff --git a/test/files/pos/t8128.scala b/test/files/pos/t8128.scala
new file mode 100644
index 0000000..b6f7669
--- /dev/null
+++ b/test/files/pos/t8128.scala
@@ -0,0 +1,15 @@
+object G {
+  def unapply(m: Any): Option[_] = Some("")
+}
+
+object H {
+  def unapplySeq(m: Any): Option[Seq[_]] = None
+}
+
+object Test {
+  (0: Any) match {
+    case G(v) => v
+    case H(v) => v
+    case _ =>
+  }
+}
diff --git a/test/files/pos/t8132.scala b/test/files/pos/t8132.scala
new file mode 100644
index 0000000..b4d6fd9
--- /dev/null
+++ b/test/files/pos/t8132.scala
@@ -0,0 +1,5 @@
+trait T {
+  protected def s: String
+}
+
+case class G(override protected val s: String) extends T
diff --git a/test/files/pos/t8134/A_1.scala b/test/files/pos/t8134/A_1.scala
new file mode 100644
index 0000000..32bce00
--- /dev/null
+++ b/test/files/pos/t8134/A_1.scala
@@ -0,0 +1,4 @@
+// a.scala
+package object pkg {
+  class AnyOps(val x: Any) extends AnyVal
+}
diff --git a/test/files/pos/t8134/B_2.scala b/test/files/pos/t8134/B_2.scala
new file mode 100644
index 0000000..32bce00
--- /dev/null
+++ b/test/files/pos/t8134/B_2.scala
@@ -0,0 +1,4 @@
+// a.scala
+package object pkg {
+  class AnyOps(val x: Any) extends AnyVal
+}
diff --git a/test/files/pos/t8152-performance.scala b/test/files/pos/t8152-performance.scala
deleted file mode 100644
index b6d2ecd..0000000
--- a/test/files/pos/t8152-performance.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-class HListBench {
-
-  class A[H, T]
-
-  type B[H, T] = A[H, T]
-
-  // was okay
-  type T1 = A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, Nothing]]]]]]]]]]]]]]]]]]]]]]]]]]]]
-
-  // Took over a minute to validate variance in 2.10.3!
-  type T2 = B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, Nothing]]]]]]]]]]]]]]]]]]]]]]]]]]]]
-
-}
\ No newline at end of file
diff --git a/test/files/pos/t8170.scala b/test/files/pos/t8170.scala
new file mode 100644
index 0000000..b65f4b8
--- /dev/null
+++ b/test/files/pos/t8170.scala
@@ -0,0 +1,27 @@
+object O {
+  trait X
+  trait B extends A {
+    override type T[F1 <: X] = F1
+  }
+  trait A {
+    type T[F <: X]
+  }
+}
+
+object Test {
+  import O._
+  val a: B = ???
+  val b: a.T[X] = ???
+  b.ensuring(x => true) // trigger an implicit search
+}
+
+
+/*
+this = {AliasArgsTypeRef at 3004}"Test#7680.a#14899.T#14823[O#7702.X#7793]"
+  sym = type T#14823
+    info = namer: [F#14824 <: O#7703.X#7793]F#14824
+result = {AbstractNoArgsTypeRef at 3237}"F#24451"
+tp = {PolyType at 3235}"[F#14824 <: O#7703.X#7793]F#14824"
+tparams = 
+  (0)  = {AbstractTypeSymbol at 3247}"type F#24451"
+*/
\ No newline at end of file
diff --git a/test/files/pos/t8170b.scala b/test/files/pos/t8170b.scala
new file mode 100644
index 0000000..53036f6
--- /dev/null
+++ b/test/files/pos/t8170b.scala
@@ -0,0 +1,25 @@
+import language._
+
+object ScalaZeee {
+  trait HFold[M[_], U] {
+    type Apply[E, A <: U] <: U
+  }
+  trait GenericCons[M[_], H, +T <: GenericList[M]] extends GenericList[M] {
+    val tail: T
+    override type Folded[N[X] >: M[X], U, F <: HFold[N, U]] = F#Apply[H, tail.Folded[N, U, F]]
+  }
+  val KNil: GenericList[Nothing] = ???
+  sealed trait GenericList[+M[_]] {
+     type Folded[N[X] >: M[X], U, F <: HFold[N, U]] <: U
+  }
+}
+ 
+object TypelevelUsage {
+  import ScalaZeee._
+  type T = GenericCons[Some, String, KNil.type]
+  val klist1: T = ???
+  type T2 = klist1.Folded[Option, Int, HFold[Option, Int]]
+  val count2: T2 = ???
+   
+  count2.ensuring(x => true).toChar // trigger an implicit search
+}
diff --git a/test/files/pos/t8177.scala b/test/files/pos/t8177.scala
new file mode 100644
index 0000000..fe265f8
--- /dev/null
+++ b/test/files/pos/t8177.scala
@@ -0,0 +1,12 @@
+// exercise coevolveSym: SingleType with an underlying RefinedType
+trait Thing { type A }
+object IntThing extends Thing { type A = Int }
+
+// The following erroneously failed with  error: method f overrides nothing.
+// because asSeenFrom produced a typeref of the shape T'#A where A referred to a symbol defined in a T of times past
+// More precisely, the TypeRef case of TypeMap's mapOver correctly modified prefix
+// from having an underlying type of { type A = Ain } to { type A = Int }, with a new symbol for A (now with info Int),
+// but the symbol in the outer type ref wasn't co-evolved (so it still referred to the { type A = AIn } underlying the old prefix)
+// coEvolveSym used to only look at prefixes that were directly RefinedTypes, but they could also be SingleTypes with an underlying RefinedType
+class View[AIn](val in: Thing { type A = AIn }) {          def f(p: in.A): in.A = p }
+class SubView extends View[Int](IntThing)       { override def f(p: in.A): in.A = p }
diff --git a/test/files/pos/t8177a.scala b/test/files/pos/t8177a.scala
new file mode 100644
index 0000000..7e2cfb3
--- /dev/null
+++ b/test/files/pos/t8177a.scala
@@ -0,0 +1,9 @@
+// exercise coevolveSym
+trait Thing { type A; var p: A = _ }
+class AA[T](final val x: Thing { type A = T }) {
+  def foo: x.A = ???
+}
+
+class B extends AA[Int](null) {
+  override def foo: B.this.x.A = super.foo
+}
diff --git a/test/files/pos/t8177b.scala b/test/files/pos/t8177b.scala
new file mode 100644
index 0000000..b7ed934
--- /dev/null
+++ b/test/files/pos/t8177b.scala
@@ -0,0 +1,13 @@
+// exercise coevolveSym: SingleType with an underlying RefinedType, via a type alias
+trait Thing { type A }
+object IntThing extends Thing { type A = Int }
+object ThingHolder { type Alias[AIn] = Thing { type A = AIn } }
+
+// The following erroneously failed with  error: method f overrides nothing.
+// because asSeenFrom produced a typeref of the shape T'#A where A referred to a symbol defined in a T of times past
+// More precisely, the TypeRef case of TypeMap's mapOver correctly modified prefix
+// from having an underlying type of { type A = Ain } to { type A = Int }, with a new symbol for A (now with info Int),
+// but the symbol in the outer type ref wasn't co-evolved (so it still referred to the { type A = AIn } underlying the old prefix)
+// coEvolveSym used to only look at prefixes that were directly RefinedTypes, but they could also be SingleTypes with an underlying RefinedType
+class View[AIn](val in: ThingHolder.Alias[AIn]) {          def f(p: in.A): in.A = p }
+class SubView extends View[Int](IntThing)       { override def f(p: in.A): in.A = p }
\ No newline at end of file
diff --git a/test/files/pos/t8177d.scala b/test/files/pos/t8177d.scala
new file mode 100644
index 0000000..d15a05a
--- /dev/null
+++ b/test/files/pos/t8177d.scala
@@ -0,0 +1,12 @@
+// exercise coevolveSym
+trait HasElem { type A }
+trait View[AIn] {
+  val tc: HasElem { type A = AIn }
+  def f2(p: tc.A): tc.A = p
+}
+
+object Test {
+  val view: View[Int] = null
+
+  view f2 5  // fails
+}
diff --git a/test/files/pos/t8177e.scala b/test/files/pos/t8177e.scala
new file mode 100644
index 0000000..cb1136f
--- /dev/null
+++ b/test/files/pos/t8177e.scala
@@ -0,0 +1,3 @@
+// exercise coevolveSym
+trait T[A] { val foo: { type B = A } = ???; def bar(b: foo.B) = () }
+object O extends T[Int] { bar(0) }
diff --git a/test/files/pos/t8177g.scala b/test/files/pos/t8177g.scala
new file mode 100644
index 0000000..bb66d32
--- /dev/null
+++ b/test/files/pos/t8177g.scala
@@ -0,0 +1,11 @@
+// exercise coevolveSym: ThisType
+trait HasA { type A }
+class AA[T] {
+  type HasAT[T] = HasA{ type A = T }
+  val x: HasAT[T] = ???
+  def foo: x.A = ???
+}
+
+class B extends AA[Int] {
+  override def foo: B.this.x.A = super.foo
+}
\ No newline at end of file
diff --git a/test/files/pos/t8177h.scala b/test/files/pos/t8177h.scala
new file mode 100644
index 0000000..90b8a26
--- /dev/null
+++ b/test/files/pos/t8177h.scala
@@ -0,0 +1,5 @@
+class Module { self =>
+  type settingsType <: Any
+  final type commonModuleType = Module {type settingsType = self.settingsType}
+  def foo(s: self.type): commonModuleType = s
+}
diff --git a/test/files/jvm/t2585.check b/test/files/pos/t8187.check
similarity index 100%
rename from test/files/jvm/t2585.check
rename to test/files/pos/t8187.check
diff --git a/test/files/pos/t8187.scala b/test/files/pos/t8187.scala
new file mode 100644
index 0000000..99b10c6
--- /dev/null
+++ b/test/files/pos/t8187.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+  val tyn: TypeName = (??? : TypeSymbol).name
+  val ten: TermName = (??? : TermSymbol).name
+}
\ No newline at end of file
diff --git a/test/files/pos/t8207.scala b/test/files/pos/t8207.scala
new file mode 100644
index 0000000..680b40f
--- /dev/null
+++ b/test/files/pos/t8207.scala
@@ -0,0 +1,6 @@
+class C { me =>
+  import me.{toString => ts}
+  locally(this: me.type)
+  trait T
+  type X = me.T
+}
diff --git a/test/files/jvm/t680.check b/test/files/pos/t8209a.check
similarity index 100%
rename from test/files/jvm/t680.check
rename to test/files/pos/t8209a.check
diff --git a/test/files/pos/t8209a/Macros_1.scala b/test/files/pos/t8209a/Macros_1.scala
new file mode 100644
index 0000000..17014b4
--- /dev/null
+++ b/test/files/pos/t8209a/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.language.experimental.macros
+import scala.language.implicitConversions
+import scala.reflect.macros.blackbox.Context
+
+class A
+object A { implicit def a2b(a: A): B = ??? }
+class B
+class C extends A
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    q"new C"
+  }
+
+  def foo: A = macro impl
+}
\ No newline at end of file
diff --git a/test/files/pos/t8209a/Test_2.scala b/test/files/pos/t8209a/Test_2.scala
new file mode 100644
index 0000000..e19d572
--- /dev/null
+++ b/test/files/pos/t8209a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  val a: A = Macros.foo
+  val b: B = Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/jvm/xml02.check b/test/files/pos/t8209b.check
similarity index 100%
rename from test/files/jvm/xml02.check
rename to test/files/pos/t8209b.check
diff --git a/test/files/pos/t8209b/Macros_1.scala b/test/files/pos/t8209b/Macros_1.scala
new file mode 100644
index 0000000..705f7d6
--- /dev/null
+++ b/test/files/pos/t8209b/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.language.experimental.macros
+import scala.language.implicitConversions
+import scala.reflect.macros.whitebox.Context
+
+class A
+object A { implicit def a2b(a: A): B = ??? }
+class B
+class C extends A
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    q"new C"
+  }
+
+  def foo: A = macro impl
+}
\ No newline at end of file
diff --git a/test/files/pos/t8209b/Test_2.scala b/test/files/pos/t8209b/Test_2.scala
new file mode 100644
index 0000000..e19d572
--- /dev/null
+++ b/test/files/pos/t8209b/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  val a: A = Macros.foo
+  val b: B = Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/pos/t8219.scala b/test/files/pos/t8219.scala
new file mode 100644
index 0000000..e1653b6
--- /dev/null
+++ b/test/files/pos/t8219.scala
@@ -0,0 +1,15 @@
+trait Equalizer[T]
+trait Gen[A]
+
+class Broken {
+  implicit def const[T](x: T): Gen[T] = ???
+  implicit def convertToEqualizer[T](left: T): Equalizer[T] = ???
+
+  def in(a: Any) = ()
+  in {
+    import scala.None // any import will do..
+    "" == "" // this no longer triggers the bug, as Object#== now overrides Any#==
+  }
+
+  // We can still trigger the bug with a structural type, see pending/neg/t8219.scala
+}
diff --git a/test/files/pos/t8219b.scala b/test/files/pos/t8219b.scala
new file mode 100644
index 0000000..d55d313
--- /dev/null
+++ b/test/files/pos/t8219b.scala
@@ -0,0 +1,49 @@
+trait Equalizer[T]
+trait Gen[A]
+
+class Broken {
+  implicit def const[T](x: T): Gen[T] = ???
+  implicit def convertToEqualizer[T](left: T): Equalizer[T] = ???
+
+  def in(a: Any) = ()
+  in {
+    import scala.None // any import will do..
+    "" == "" // no longer a problem, see pos/t8129.scala
+  }
+
+  // We used to fall into the errant code path above when `Any#==` and `AnyRef#==`
+  // were overloaded.
+  //
+  // Real classes couldn't get away with that overloading; it would result in
+  // a compiler error because the variants would collapse into an overriding
+  // relationship after erasure.
+  //
+  //
+  // But, a structural type can! This triggers the same error, and served as
+  // a backstop for this test if we change the signatures of `AnyRef#==` to
+  // override `Any#==`.
+  type T = {
+    def a(a: AnyRef): Boolean
+    def a(a: Any): Boolean
+  }
+
+  def t: T = ???
+
+  in {
+    import scala.None // any import will do..
+    t.a("")
+  }
+
+  // Or, we can get here with ambiguous implicits from the formal parameter
+  // type of the less specific overload to that of the more specific.
+  object T {
+    def foo(a: Any) = true
+    def foo(a: String) = true
+  }
+  in {
+    import scala.None
+    implicit def any2str1(a: Any) = ""
+    implicit def any2str2(a: Any) = ""
+    T.foo("")
+  }
+}
diff --git a/test/files/pos/t8223.scala b/test/files/pos/t8223.scala
new file mode 100644
index 0000000..52d6b00
--- /dev/null
+++ b/test/files/pos/t8223.scala
@@ -0,0 +1,29 @@
+package p {
+  class ViewEnv[AIn] {
+    type A = AIn
+    class SubView { def has(x: A): Boolean = ??? }
+    def get: SubView = new SubView
+  }
+
+  trait HasA { type A }
+  trait Indexable[R] extends HasA
+  class ArrayTC[AIn] extends Indexable[Array[AIn]] { type A = AIn }
+}
+
+package object p {
+  implicit def arrayTypeClass[A] : ArrayTC[A] = new ArrayTC[A]
+  object intArrayTC extends ArrayTC[Int]
+
+  type EnvAlias[W <: HasA] = ViewEnv[W#A]
+  type SubAlias[W <: HasA] = ViewEnv[W#A]#SubView
+
+  def f0[R](xs: R)(implicit tc: Indexable[R]): ViewEnv[tc.A]#SubView     = new ViewEnv[tc.A]() get
+  def f1[R](xs: R)(implicit tc: Indexable[R]): EnvAlias[tc.type]#SubView = new ViewEnv[tc.A]() get
+  def f2[R](xs: R)(implicit tc: Indexable[R]): SubAlias[tc.type]         = new ViewEnv[tc.A]() get
+
+  def g0 = f0(Array(1)) has 2                   // ok
+  def g1 = f1(Array(1)) has 2                   // ok
+  def g2 = f2(Array(1)) has 2                   // "found: Int(2), required: tc.A"
+  def g3 = f2(Array(1))(new ArrayTC[Int]) has 2 // "found: Int(2), required: tc.A"
+  def g4 = f2(Array(1))(intArrayTC) has 2       // ok
+}
diff --git a/test/files/pos/t8224.scala b/test/files/pos/t8224.scala
new file mode 100644
index 0000000..2fae925
--- /dev/null
+++ b/test/files/pos/t8224.scala
@@ -0,0 +1,12 @@
+import language.higherKinds
+
+trait P  [N1, +E1[X <: N1]]
+trait PIn[N2, +E2[X <: N2]] extends P[Int,Any]
+
+trait EI extends PIn[Int, Nothing]
+trait NI extends PIn[Int, Nothing]
+
+object Test {
+  val lub = if (true) ??? : EI else ??? : NI
+  val pin: PIn[Int,Nothing] = lub
+}
diff --git a/test/files/pos/t8237.scala b/test/files/pos/t8237.scala
new file mode 100644
index 0000000..0050890
--- /dev/null
+++ b/test/files/pos/t8237.scala
@@ -0,0 +1,29 @@
+import scala.language.higherKinds
+
+object TestExplicit {
+  trait TC[A]
+  def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+  implicit def tc[T]: TC[T] = ???
+
+  // Typechecking results in SOE in TypeVar.isGround
+  fTt(1)(tc)
+  // fun = TestExplicit.this.fTt[Int, E](1)
+  // args = TestExplicit.this.tc[E[Int]]
+  // argTpes.head.instantiateTypeParams = TC[?E#1[Int]]
+  // formals.head.instantiateTypeParams = TC[?E#2[Int]]
+  //   (where ?E#1 and ?E#2 as distinct AppliedTypeVars that resulted
+  //    from separate applications of type args to the same HKTypeVar, ?E)
+  //
+  // As we check if the argument conforms to the formal, we would have
+  // AppliedTypeVars sharing the same TypeConstraints on the LHS and RHS,
+  // which leads to a cyclic constraint.
+}
+
+object TestImplicit    {
+  trait TC[A]
+  def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+  implicit def tc[T]: TC[T] = ???
+
+  // Oddly enough, this one works.
+  fTt(1)
+}
diff --git a/test/files/pos/t8237b.scala b/test/files/pos/t8237b.scala
new file mode 100644
index 0000000..52bb310
--- /dev/null
+++ b/test/files/pos/t8237b.scala
@@ -0,0 +1,10 @@
+import scala.language.higherKinds
+import scala.reflect.runtime.universe._
+object Test {
+
+  def fTt[A,E[X]<:List[X]](a: A)(implicit tt: TypeTag[E[A]]) = a
+
+  trait TC[A]
+  implicit def TCListInt[A]: TC[A] = ???
+  fTt(1)
+}
diff --git a/test/files/pos/t8244d/InodeBase_1.java b/test/files/pos/t8244d/InodeBase_1.java
new file mode 100644
index 0000000..36c2123
--- /dev/null
+++ b/test/files/pos/t8244d/InodeBase_1.java
@@ -0,0 +1,6 @@
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+abstract class INodeBase_1<K, V> {
+    @SuppressWarnings("rawtypes")
+    public static final AtomicReferenceFieldUpdater<INodeBase_1, Object> updater = null;
+}
diff --git a/test/files/pos/t8244d/Test_2.scala b/test/files/pos/t8244d/Test_2.scala
new file mode 100644
index 0000000..cb39c96
--- /dev/null
+++ b/test/files/pos/t8244d/Test_2.scala
@@ -0,0 +1,3 @@
+class INodeX[K, V] extends INodeBase_1[K, V] {
+  INodeBase_1.updater.set(this, null)
+}
diff --git a/test/files/pos/t8300-conversions-a.scala b/test/files/pos/t8300-conversions-a.scala
new file mode 100644
index 0000000..248a8b7
--- /dev/null
+++ b/test/files/pos/t8300-conversions-a.scala
@@ -0,0 +1,23 @@
+// cf. pos/t8300-conversions-b.scala
+trait Universe {
+  type Symbol >: Null <: AnyRef with SymbolApi
+  trait SymbolApi
+
+  type TypeSymbol >: Null <: Symbol with TypeSymbolApi
+  trait TypeSymbolApi extends SymbolApi
+
+  type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
+  trait FreeTypeSymbolApi extends TypeSymbolApi
+
+  implicit class CompatibleSymbol(sym: Symbol) {
+    def asFreeType: FreeTypeSymbol = ???
+  }
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  val sym: Symbol = ???
+  sym.asFreeType
+}
\ No newline at end of file
diff --git a/test/files/pos/t8300-conversions-b.scala b/test/files/pos/t8300-conversions-b.scala
new file mode 100644
index 0000000..0524ee3
--- /dev/null
+++ b/test/files/pos/t8300-conversions-b.scala
@@ -0,0 +1,23 @@
+// cf. pos/t8300-conversions-a.scala
+trait Universe {
+  type Symbol >: Null <: AnyRef with SymbolApi
+  trait SymbolApi
+
+  type TypeSymbol >: Null <: TypeSymbolApi with Symbol
+  trait TypeSymbolApi extends SymbolApi
+
+  type FreeTypeSymbol >: Null <: FreeTypeSymbolApi with TypeSymbol
+  trait FreeTypeSymbolApi extends TypeSymbolApi
+
+  implicit class CompatibleSymbol(sym: Symbol) {
+    def asFreeType: FreeTypeSymbol = ???
+  }
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  val sym: Symbol = ???
+  sym.asFreeType
+}
\ No newline at end of file
diff --git a/test/files/pos/t8300-overloading.scala b/test/files/pos/t8300-overloading.scala
new file mode 100644
index 0000000..ae9699a
--- /dev/null
+++ b/test/files/pos/t8300-overloading.scala
@@ -0,0 +1,16 @@
+// cf. neg/t8300-overloading.scala
+trait Universe {
+  type Name >: Null <: AnyRef with NameApi
+  trait NameApi
+
+  type TermName >: Null <: TermNameApi with Name
+  trait TermNameApi extends NameApi
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  def foo(name: Name) = ???
+  def foo(name: TermName) = ???
+}
\ No newline at end of file
diff --git a/test/files/pos/t8300-patmat-a.scala b/test/files/pos/t8300-patmat-a.scala
new file mode 100644
index 0000000..4421c0a
--- /dev/null
+++ b/test/files/pos/t8300-patmat-a.scala
@@ -0,0 +1,20 @@
+// cf. pos/t8300-patmat-b.scala
+trait Universe {
+  type Name >: Null <: AnyRef with NameApi
+  trait NameApi
+
+  type TermName >: Null <: Name with TermNameApi
+  trait TermNameApi extends NameApi
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  locally {
+    val ScalaName: TermName = ???
+    ??? match {
+      case ScalaName => ???
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/pos/t8300-patmat-b.scala b/test/files/pos/t8300-patmat-b.scala
new file mode 100644
index 0000000..c01aeb9
--- /dev/null
+++ b/test/files/pos/t8300-patmat-b.scala
@@ -0,0 +1,20 @@
+// cf. pos/t8300-patmat-a.scala
+trait Universe {
+  type Name >: Null <: AnyRef with NameApi
+  trait NameApi
+
+  type TermName >: Null <: TermNameApi with Name
+  trait TermNameApi extends NameApi
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  locally {
+    val ScalaName: TermName = ???
+    ??? match {
+      case ScalaName => ???
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/pos/t8301.scala b/test/files/pos/t8301.scala
new file mode 100644
index 0000000..2d10864
--- /dev/null
+++ b/test/files/pos/t8301.scala
@@ -0,0 +1,19 @@
+trait Universe {
+  type Symbol >: Null <: AnyRef with SymbolApi
+  trait SymbolApi
+
+  type TypeSymbol >: Null <: TypeSymbolApi with Symbol
+  trait TypeSymbolApi
+
+  implicit class CompatibleSymbol(sym: Symbol) {
+    def asFreeType: TypeSymbol = ???
+  }
+}
+
+object Test extends App {
+  val u: Universe = ???
+  import u._
+
+  val sym: Symbol = ???
+  sym.asFreeType
+}
diff --git a/test/files/pos/t8301b.scala b/test/files/pos/t8301b.scala
new file mode 100644
index 0000000..5641547
--- /dev/null
+++ b/test/files/pos/t8301b.scala
@@ -0,0 +1,36 @@
+// cf. pos/t8300-patmat.scala
+trait Universe {
+  type Name >: Null <: AnyRef with NameApi
+  trait NameApi
+ 
+  type TermName >: Null <: TermNameApi with Name
+  trait TermNameApi extends NameApi
+}
+ 
+object Test extends App {
+  val u: Universe = ???
+  import u._
+ 
+  val ScalaName: TermName = ???
+  locally {
+    
+    ??? match {
+      case Test.ScalaName => ???
+    }
+    import Test.ScalaName._
+
+    ??? match {
+      case ScalaName => ???
+    }
+    import ScalaName._
+
+    // both the pattern and import led to
+    // stable identifier required, but SN found. Note that value SN 
+    // is not stable because its type, Test.u.TermName, is volatile.
+    val SN = ScalaName
+    ??? match {
+      case SN => ???
+    }
+    import SN._
+  }
+}
diff --git a/test/files/pos/t8306.flags b/test/files/pos/t8306.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/pos/t8306.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/pos/t8306.scala b/test/files/pos/t8306.scala
new file mode 100644
index 0000000..e04b054
--- /dev/null
+++ b/test/files/pos/t8306.scala
@@ -0,0 +1,8 @@
+class Si8306 {
+  def foo: Int = 123
+  lazy val extension: Int =
+      foo match {
+          case idx if idx != -1 => 15
+          case _ => 17
+      }
+}
diff --git a/test/files/pos/t8315.flags b/test/files/pos/t8315.flags
new file mode 100644
index 0000000..c926ad6
--- /dev/null
+++ b/test/files/pos/t8315.flags
@@ -0,0 +1 @@
+-Yinline -Ydead-code
diff --git a/test/files/pos/t8315.scala b/test/files/pos/t8315.scala
new file mode 100644
index 0000000..2f7742e
--- /dev/null
+++ b/test/files/pos/t8315.scala
@@ -0,0 +1,12 @@
+object Test {
+  def crash(as: Listt): Unit = {
+    map(as, (_: Any) => return)
+  }
+ 
+  final def map(x: Listt, f: Any => Any): Any = {
+    if (x eq Nill) "" else f("")
+  }
+}
+ 
+object Nill extends Listt
+class Listt
diff --git a/test/files/pos/t8315b.flags b/test/files/pos/t8315b.flags
new file mode 100644
index 0000000..c926ad6
--- /dev/null
+++ b/test/files/pos/t8315b.flags
@@ -0,0 +1 @@
+-Yinline -Ydead-code
diff --git a/test/files/pos/t8315b.scala b/test/files/pos/t8315b.scala
new file mode 100644
index 0000000..d7a2bf5
--- /dev/null
+++ b/test/files/pos/t8315b.scala
@@ -0,0 +1,11 @@
+object Test extends Object {
+  def crash: Unit = {
+    val key = ""
+    try map(new F(key))
+    catch { case _: Throwable => }
+  };
+  final def map(f: F): Any = f.apply("");
+};
+final class F(key: String) {
+  final def apply(a: Any): Any = throw new RuntimeException(key);
+}
diff --git a/test/files/pos/t8324.scala b/test/files/pos/t8324.scala
new file mode 100644
index 0000000..2cb1562
--- /dev/null
+++ b/test/files/pos/t8324.scala
@@ -0,0 +1,16 @@
+package p1
+
+private abstract class ProjectDef(val autoPlugins: Any) extends ProjectDefinition
+sealed trait ResolvedProject extends ProjectDefinition {
+  def autoPlugins: Any
+}
+
+sealed trait ProjectDefinition {
+  private[p1] def autoPlugins: Any
+}
+
+
+object Test {
+  // was "error: value autoPlugins in class ProjectDef of type Any cannot override final member"
+  new ProjectDef(null) with ResolvedProject
+}
diff --git a/test/files/neg/t696a.flags b/test/files/pos/t8352.check
similarity index 100%
rename from test/files/neg/t696a.flags
rename to test/files/pos/t8352.check
diff --git a/test/files/pos/t8352/Macros_1.scala b/test/files/pos/t8352/Macros_1.scala
new file mode 100644
index 0000000..f5c8ce5
--- /dev/null
+++ b/test/files/pos/t8352/Macros_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context)(x: c.Expr[Boolean]): c.Expr[Boolean] = x
+  def foo(x: Boolean): Boolean = macro impl
+}
\ No newline at end of file
diff --git a/test/files/pos/t8352/Test_2.scala b/test/files/pos/t8352/Test_2.scala
new file mode 100644
index 0000000..b5bfe92
--- /dev/null
+++ b/test/files/pos/t8352/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  def expectUnit() {
+    Macros.foo(true)
+  }
+}
\ No newline at end of file
diff --git a/test/files/pos/t8363.flags b/test/files/pos/t8363.flags
new file mode 100644
index 0000000..48b438d
--- /dev/null
+++ b/test/files/pos/t8363.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/pos/t8363.scala b/test/files/pos/t8363.scala
new file mode 100644
index 0000000..639faf4
--- /dev/null
+++ b/test/files/pos/t8363.scala
@@ -0,0 +1,7 @@
+class C(a: Any)
+class Test {  
+  def foo: Any = {
+    def form = 0
+    class C1 extends C(() => form)
+  }
+}
diff --git a/test/files/pos/annotated-treecopy.check b/test/files/pos/t8364.check
similarity index 100%
rename from test/files/pos/annotated-treecopy.check
rename to test/files/pos/t8364.check
diff --git a/test/files/pos/t8364.scala b/test/files/pos/t8364.scala
new file mode 100644
index 0000000..7a7ea1f
--- /dev/null
+++ b/test/files/pos/t8364.scala
@@ -0,0 +1,12 @@
+import scala.language.dynamics
+
+object MyDynamic extends Dynamic {
+  def selectDynamic(name: String): Any = ???
+}
+
+object Test extends App {
+  locally {
+    import java.lang.String
+    MyDynamic.id
+  }
+}
diff --git a/test/files/pos/t8367.scala b/test/files/pos/t8367.scala
new file mode 100644
index 0000000..cae2415
--- /dev/null
+++ b/test/files/pos/t8367.scala
@@ -0,0 +1,11 @@
+package java.lang
+
+// SI-8367 shows something is wrong with primaryConstructor and it was made worse with the fix for SI-8192
+// perhaps primaryConstructor should not return NoSymbol when isJavaDefined
+// or, perhaps isJavaDefined should be refined (the package definition above is pretty sneaky) 
+// also, why does this only happen for a (scala-defined!) class with this special name?
+// (there are a couple of others: CloneNotSupportedException,InterruptedException)
+class Throwable
+
+// class CloneNotSupportedException 
+// class InterruptedException
\ No newline at end of file
diff --git a/test/files/pos/attachments-typed-ident.check b/test/files/pos/t8369a.check
similarity index 100%
rename from test/files/pos/attachments-typed-ident.check
rename to test/files/pos/t8369a.check
diff --git a/test/files/pos/t8369a.scala b/test/files/pos/t8369a.scala
new file mode 100644
index 0000000..0596fda
--- /dev/null
+++ b/test/files/pos/t8369a.scala
@@ -0,0 +1,5 @@
+object Bug {
+  trait Sys[S]
+  def test[S <: Sys[S]] = read[S]()
+  def read[S <: Sys[S]](baz: Any = 0): Some[S] = ???
+}
\ No newline at end of file
diff --git a/test/files/pos/macro-qmarkqmarkqmark.check b/test/files/pos/t8369b.check
similarity index 100%
rename from test/files/pos/macro-qmarkqmarkqmark.check
rename to test/files/pos/t8369b.check
diff --git a/test/files/pos/t8369b.scala b/test/files/pos/t8369b.scala
new file mode 100644
index 0000000..8145911
--- /dev/null
+++ b/test/files/pos/t8369b.scala
@@ -0,0 +1,18 @@
+object Bug {
+  trait Sys[S] {
+    type Tx
+  }
+
+  trait Baz[-Tx]
+
+  trait Foo[S <: Sys[S]] {
+    def bar: Bar[S] = Bar.read[S]()
+  }
+
+  object Bar {
+    object NoBaz extends Baz[Any]
+
+    def read[S <: Sys[S]](baz: Baz[S#Tx] = NoBaz): Bar[S] = ???
+  }
+  trait Bar[S <: Sys[S]]
+}
\ No newline at end of file
diff --git a/test/files/pos/t8376/BindingsX.java b/test/files/pos/t8376/BindingsX.java
new file mode 100644
index 0000000..165fdaf
--- /dev/null
+++ b/test/files/pos/t8376/BindingsX.java
@@ -0,0 +1,13 @@
+/**
+ * A simple Java class implementing methods similar to new JavaFX `Bindings`.
+ */
+public final class BindingsX {
+
+    public static void select(String root, String... steps) {
+        throw new UnsupportedOperationException("Not implemented");
+    }
+
+    public static void select(Object root, String... steps) {
+        throw new UnsupportedOperationException("Not implemented");
+    }
+}
diff --git a/test/files/pos/t8376/Test.scala b/test/files/pos/t8376/Test.scala
new file mode 100644
index 0000000..ba078a3
--- /dev/null
+++ b/test/files/pos/t8376/Test.scala
@@ -0,0 +1,10 @@
+class Test {
+  BindingsX.select("", "") // okay in 2.10, fails in 2.11
+
+  BindingsY.select1("", "") // okay in both
+}
+
+object BindingsY {
+  def select1(root: String, steps: String*) = ()  
+  def select1(root: Any, steps: String*) = ()
+}
diff --git a/test/files/pos/t8403.scala b/test/files/pos/t8403.scala
new file mode 100644
index 0000000..eea60ed
--- /dev/null
+++ b/test/files/pos/t8403.scala
@@ -0,0 +1,9 @@
+trait Bug {
+  val u: { type Amb } = ???
+  import u._
+ 
+  class Amb { def x = 0 }
+  class C(x: Amb) { // after dbd8457e4, "reference to Amb is ambiguous"
+    x.x
+  }
+}
diff --git a/test/files/pos/t8411/Macros_1.scala b/test/files/pos/t8411/Macros_1.scala
new file mode 100644
index 0000000..c5319c5
--- /dev/null
+++ b/test/files/pos/t8411/Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def defaultZeroCase(pf: PartialFunction[Int, Int]): PartialFunction[Int, Int] = macro impl
+  def impl(c: Context)(pf: c.Tree) = { import c.universe._
+    val q"{ case ..$cases }" = pf
+    q"{ case ..$cases case _ => 0 }"
+  }
+}
diff --git a/test/files/pos/t8411/Test_2.scala b/test/files/pos/t8411/Test_2.scala
new file mode 100644
index 0000000..a0ad30f
--- /dev/null
+++ b/test/files/pos/t8411/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  val pf = Macros.defaultZeroCase { case 1 => 2 }
+  assert(pf(2) == 0)
+}
diff --git a/test/files/pos/t8460.scala b/test/files/pos/t8460.scala
new file mode 100644
index 0000000..10d2ed4
--- /dev/null
+++ b/test/files/pos/t8460.scala
@@ -0,0 +1,25 @@
+object tan extends UFunc {
+  implicit def ImplDouble: Impl[Double, Double] = ???
+}
+
+trait UFunc {
+  trait TC[+A]
+  type Impl[V, VR] = UFunc.UImpl[this.type, V, VR]
+}
+
+object UFunc {
+  class UImpl[A, B, C]
+  implicit def implicitDoubleUTag[Tag, V, VR](implicit conv: V=>Double, impl: UImpl[Tag, Double, VR]):UImpl[Tag, V, VR] = ???
+
+}
+
+object Test {
+  implicitly[tan.Impl[Double, Double]]
+  // we should discard the one and only divergent implicit (`implicitDoubleUTag`)
+  // This is done under `scalac-hash v2.10.4 test.scala`, but not under
+  // `scalac-hash v2.10.4 -Xdivergence211 test.scala`
+  //
+  // This seems to be because the companion implicits contain redundant entries
+  //
+
+}
diff --git a/test/files/pos/t880.scala b/test/files/pos/t880.scala
deleted file mode 100644
index cceb53c..0000000
--- a/test/files/pos/t880.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.xml.Null
-
-class Test[A >: Null]
-{
-    val x : A = null
-}
diff --git a/test/files/pos/t911.scala b/test/files/pos/t911.scala
index 224b14c..cfa4f49 100644
--- a/test/files/pos/t911.scala
+++ b/test/files/pos/t911.scala
@@ -1,6 +1,6 @@
 object Test {
-def foo : Any = {
-  case class Foo {}
-  Foo;
-}
+  def foo: Any = {
+    case class Foo() {}
+    Foo;
+  }
 }
diff --git a/test/files/pos/t927.scala b/test/files/pos/t927.scala
index 534f355..c903f19 100644
--- a/test/files/pos/t927.scala
+++ b/test/files/pos/t927.scala
@@ -7,5 +7,5 @@ object Test {
     }
   val str: Stream[Int] = List(1,2,3).iterator.toStream
   assert(sum(str) == 6)
-  
+
 }
diff --git a/test/files/pos/t946.scala b/test/files/pos/t946.scala
index 9f4cdbc..c4bd6e9 100644
--- a/test/files/pos/t946.scala
+++ b/test/files/pos/t946.scala
@@ -1,7 +1,7 @@
 object pmbugbounds {
   trait Bar
   class Foo[t <: Bar] {}
-            
+
   (new Foo[Bar]) match {
     case _ : Foo[x] => null
   }
diff --git a/test/files/pos/tcpoly_boundedmonad.scala b/test/files/pos/tcpoly_boundedmonad.scala
index 24a9117..8c605dc 100644
--- a/test/files/pos/tcpoly_boundedmonad.scala
+++ b/test/files/pos/tcpoly_boundedmonad.scala
@@ -1,19 +1,19 @@
 trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
-  def map[S <: Bound[S]](f: T => S): MyType[S] 
+  def map[S <: Bound[S]](f: T => S): MyType[S]
 
-  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
               Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-              (f: T => Result[S]): Result[S] 
+              (f: T => Result[S]): Result[S]
 
   def filter(p: T => Boolean): MyType[T]
 }
 
 class Set[T <: Ordered[T]] extends Monad[T, Set, Ordered] {
-  def map[S <: Ordered[S]](f: T => S): Set[S] = error("TODO") 
-  
-  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+  def map[S <: Ordered[S]](f: T => S): Set[S] = sys.error("TODO")
+
+  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
               Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-              (f: T => Result[S]): Result[S]  = error("TODO") 
-              
-  def filter(p: T => Boolean): Set[T] = error("TODO")               
+              (f: T => Result[S]): Result[S]  = sys.error("TODO")
+
+  def filter(p: T => Boolean): Set[T] = sys.error("TODO")
 }
diff --git a/test/files/pos/tcpoly_bounds1.scala b/test/files/pos/tcpoly_bounds1.scala
index 142c0b7..63263cb 100644
--- a/test/files/pos/tcpoly_bounds1.scala
+++ b/test/files/pos/tcpoly_bounds1.scala
@@ -1,7 +1,7 @@
-class Foo[t[x]<: Pair[Int, x]]      
+class Foo[t[x]<: Tuple2[Int, x]]
 
-// 
-class MyPair[z](a: Int, b: z) extends Pair[Int, z](a,b)
+//
+class MyPair[z](a: Int, b: z) extends Tuple2[Int, z](a,b)
 
 object foo extends Foo[MyPair]
 
diff --git a/test/files/pos/tcpoly_checkkinds_mix.scala b/test/files/pos/tcpoly_checkkinds_mix.scala
index 2d265da..3734405 100644
--- a/test/files/pos/tcpoly_checkkinds_mix.scala
+++ b/test/files/pos/tcpoly_checkkinds_mix.scala
@@ -2,9 +2,9 @@ trait Iterable[A <: Bound[A], Bound[_]] {
   type MyType[x <: Bound[x]] <: Iterable[x, Bound]
   def map[B <: Bound[B]](f: A => B): MyType[B]
   def flatMap[B <: Bound[B]](f: A => MyType[B]): MyType[B]
-  def filter(p: A => Boolean): MyType[A] 
+  def filter(p: A => Boolean): MyType[A]
 }
 
-trait OrderedSet[T <: Ordered[T]] extends Iterable[T, Ordered] { 
+trait OrderedSet[T <: Ordered[T]] extends Iterable[T, Ordered] {
   type MyType[x <: Ordered[x]] = OrderedSet[x]
-} 
+}
diff --git a/test/files/pos/tcpoly_gm.scala b/test/files/pos/tcpoly_gm.scala
index ecaeef9..89b66cf 100644
--- a/test/files/pos/tcpoly_gm.scala
+++ b/test/files/pos/tcpoly_gm.scala
@@ -1,4 +1,4 @@
-trait Rep[a] {           
+trait Rep[a] {
   def rep[m[x]]: m[a] // typedTypeApply must use asSeenFrom to adapt the return type
    // since rep is called on x: Rep[t]
    // a must become t
@@ -9,7 +9,6 @@ case class ShowBin[b](app: b => String)
 object foo {
   def showBin[t](x: Rep[t], y: t): String = {
     val r: ShowBin[t] = x.rep[ShowBin]
-    r.app(y) 
+    r.app(y)
   }
 }
- 
diff --git a/test/files/pos/tcpoly_higherorder_bound_method.scala b/test/files/pos/tcpoly_higherorder_bound_method.scala
index 090bb8f..3905b3b 100644
--- a/test/files/pos/tcpoly_higherorder_bound_method.scala
+++ b/test/files/pos/tcpoly_higherorder_bound_method.scala
@@ -1,3 +1,3 @@
 trait SkolemisationOfHigherOrderBoundInMethod {
  def method[A, N[X <: A], M[X <: N[A]]]: Unit
-}  
+}
diff --git a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
index 97594d5..f719972 100644
--- a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
+++ b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
@@ -2,15 +2,15 @@ import scala.collection.generic.GenericTraversableTemplate
 import scala.collection.Iterable
 
 class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) {
-  def unzip: (CC[A1], CC[A2]) = error("foo")
+  def unzip: (CC[A1], CC[A2]) = sys.error("foo")
 }
 
 object Test {
 
-  implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) 
+  implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2]))
       = new IterableOps[CC, A1, A2](tuple)
-    
+
   val t = (List(1, 2, 3), List(6, 5, 4))
 
   tupleOfIterableWrapper(t) unzip
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
index 3073b29..1924350 100644
--- a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
+++ b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
@@ -2,7 +2,7 @@ import scala.collection.generic.GenericTraversableTemplate
 import scala.collection.Iterable
 
 class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) {
-  def unzip: (CC[A1], CC[A2]) = error("foo")
+  def unzip: (CC[A1], CC[A2]) = sys.error("foo")
 }
 
 object Test {
@@ -15,4 +15,4 @@ object Test {
   tupleOfIterableWrapper(t) unzip
 
   t unzip
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/tcpoly_late_method_params.scala b/test/files/pos/tcpoly_late_method_params.scala
index c929891..e2f0bcf 100644
--- a/test/files/pos/tcpoly_late_method_params.scala
+++ b/test/files/pos/tcpoly_late_method_params.scala
@@ -1,5 +1,5 @@
 trait Foo {
-  def flatMap[RT <: RBound[RT], RBound[_], Result[x <: RBound[x]]]: Result[RT] 
+  def flatMap[RT <: RBound[RT], RBound[_], Result[x <: RBound[x]]]: Result[RT]
 // bounds for RT& = >: scala.this.Nothing <: RBound&[RT&]
                                    // bounds for x = >: scala.this.Nothing <: RBound&[x]
 }
diff --git a/test/files/pos/tcpoly_method.scala b/test/files/pos/tcpoly_method.scala
index 80dc048..294b53b 100644
--- a/test/files/pos/tcpoly_method.scala
+++ b/test/files/pos/tcpoly_method.scala
@@ -1,6 +1,6 @@
 trait Iterable[m[+x], +t] {
   def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s]): resColl[s]
-  
+
   def foo[a[x]] = "a"
   val x = foo[List]
 }
diff --git a/test/files/pos/tcpoly_overloaded.scala b/test/files/pos/tcpoly_overloaded.scala
index 4240074..4f63346 100644
--- a/test/files/pos/tcpoly_overloaded.scala
+++ b/test/files/pos/tcpoly_overloaded.scala
@@ -1,10 +1,10 @@
 trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
-  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
               Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-              (f: T => Result[S]): Result[S] 
-  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+              (f: T => Result[S]): Result[S]
+  def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
               Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-              (f: T => Result[S], foo: String): Result[S] 
+              (f: T => Result[S], foo: String): Result[S]
   def flatMap[S <: Bound[S]]
               (f: T => MyType[S], foo: Int): MyType[S]
 }
@@ -12,14 +12,14 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
 trait Test {
   def moo: MList[Int]
   class MList[T](el: T) extends Monad[T, List, Any] {
-    def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+    def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
             Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-            (f: T => Result[S]): Result[S] = error("foo")
-    def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],  
+            (f: T => Result[S]): Result[S] = sys.error("foo")
+    def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
             Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
-            (f: T => Result[S], foo: String): Result[S]  = error("foo")
+            (f: T => Result[S], foo: String): Result[S]  = sys.error("foo")
     def flatMap[S]
-            (f: T => List[S], foo: Int): List[S]  = error("foo")              
+            (f: T => List[S], foo: Int): List[S]  = sys.error("foo")
   }
   val l: MList[String] = moo.flatMap[String, List, Any, MList]((x: Int) => new MList("String"))
 }
diff --git a/test/files/pos/tcpoly_poly.scala b/test/files/pos/tcpoly_poly.scala
index 50ffc78..1ba04e2 100644
--- a/test/files/pos/tcpoly_poly.scala
+++ b/test/files/pos/tcpoly_poly.scala
@@ -1,3 +1,3 @@
-class Monad[m[x]] 
+class Monad[m[x]]
 
 object ml extends Monad[List]
diff --git a/test/files/pos/tcpoly_return_overriding.scala b/test/files/pos/tcpoly_return_overriding.scala
index 0814e73..57ec8da 100644
--- a/test/files/pos/tcpoly_return_overriding.scala
+++ b/test/files/pos/tcpoly_return_overriding.scala
@@ -2,7 +2,7 @@ trait Generic[g[x]] {
   def unit: g[Unit]
 }
 
-trait Rep[t] {           
+trait Rep[t] {
   def rep[m[x]](implicit gen: Generic[m]): m[t]
 }
 
diff --git a/test/files/pos/tcpoly_seq.scala b/test/files/pos/tcpoly_seq.scala
index b5f46f6..48b3e1c 100644
--- a/test/files/pos/tcpoly_seq.scala
+++ b/test/files/pos/tcpoly_seq.scala
@@ -6,40 +6,40 @@ trait HOSeq {
   // values implementing this interface, in order to provide more performant ways of building that structure
   trait Accumulator[+coll[x], elT] {
     def += (el: elT): Unit
-    def result: coll[elT]                     
+    def result: coll[elT]
   }
-  
-  
+
+
   // Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
-  // m[x] is intentionally unbounded: fold can then be defined nicely 
-  // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type 
+  // m[x] is intentionally unbounded: fold can then be defined nicely
+  // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
   //           is an invariant position -- should probably rule that out?
   trait Iterable[+m[+x], +t] {
     //def unit[a](orig: a): m[a]
     def iterator: Iterator[t]
-    
+
     // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
     def accumulator[t]: Accumulator[m, t]
-                                    
+
     def filter(p: t => Boolean): m[t] = {
       val buf = accumulator[t]
       val elems = iterator
       while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
       buf.result
     }
-    
+
     def map[s](f: t => s): m[s] = {
       val buf = accumulator[s]
       val elems = iterator
       while (elems.hasNext) buf += f(elems.next)
       buf.result
     }
-    
+
     // flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
     // which are then added to the result one by one
     // the compiler should be able to find the right accumulator (implicit buf) to build the result
     // to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
-    def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = { 
+    def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
         // TODO:  would a viewbound for resColl[x] be better?
         // -- 2nd-order type params are not yet in scope in view bound
       val elems = iterator
@@ -48,9 +48,9 @@ trait HOSeq {
         while (elemss.hasNext) buf += elemss.next
       }
       buf.result
-    } 
+    }
   }
-  
+
   final class ListBuffer[A] {
     private var start: List[A] = Nil
     private var last: ::[A] = _
@@ -78,7 +78,7 @@ trait HOSeq {
       exported = !start.isEmpty
       start
     }
-    
+
     /** Clears the buffer contents.
      */
     def clear {
@@ -97,13 +97,13 @@ trait HOSeq {
       }
     }
   }
-  
+
   implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
     private[this] val buff = new ListBuffer[elT]
     def += (el: elT): Unit = buff += el
     def result: List[elT] = buff.toList
   }
-  
+
   trait List[+t] extends Iterable[List, t] {
     def head: t
     def tail: List[t]
@@ -121,14 +121,14 @@ trait HOSeq {
     // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
     def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
   }
-  
+
   // TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
   final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
     def head = hd
     def tail = if(tl==null) this else tl // hack
     override def isEmpty: Boolean = false
   }
-  
+
   case object Nil extends List[Nothing] {
     def isEmpty = true
     def head: Nothing =
@@ -157,18 +157,18 @@ trait HOSeq {
     def filter(f: T=>Boolean): FilterResult
     def subseq(from: Int, to: Int): Subseq
     def flatMap[S <: Seq[K], K](f: T => S): S#Concat  // legal?
-    def concat(others: Seq[T]): Concat 
+    def concat(others: Seq[T]): Concat
      */
-     
+
 /*trait Iterator[t] {
   // @post hasAdvanced implies hasNext
   // model def hasAdvanced: Boolean
-  
+
   def hasNext: Boolean // pure
-  
+
   // @pre hasAdvanced
   def current: t       // pure
-  
+
   // @pre hasNext
   // @post hasAdvanced
   def advance: Unit
diff --git a/test/files/pos/tcpoly_seq_typealias.scala b/test/files/pos/tcpoly_seq_typealias.scala
index 0651ad9..fb48126 100644
--- a/test/files/pos/tcpoly_seq_typealias.scala
+++ b/test/files/pos/tcpoly_seq_typealias.scala
@@ -6,42 +6,42 @@ trait HOSeq {
   // values implementing this interface, in order to provide more performant ways of building that structure
   trait Accumulator[+coll[x], elT] {
     def += (el: elT): Unit
-    def result: coll[elT]                     
+    def result: coll[elT]
   }
-  
-  
+
+
   // Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
-  // m[x] is intentionally unbounded: fold can then be defined nicely 
-  // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type 
+  // m[x] is intentionally unbounded: fold can then be defined nicely
+  // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
   //           is an invariant position -- should probably rule that out?
   trait Iterable[+t] {
-    type m[+x] 
-    
+    type m[+x]
+
     //def unit[a](orig: a): m[a]
     def iterator: Iterator[t]
-    
+
     // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
     def accumulator[t]: Accumulator[m, t]
-                                    
+
     def filter(p: t => Boolean): m[t] = {
       val buf = accumulator[t]
       val elems = iterator
       while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
       buf.result
     }
-    
+
     def map[s](f: t => s): m[s] = {
       val buf = accumulator[s]
       val elems = iterator
       while (elems.hasNext) buf += f(elems.next)
       buf.result
     }
-    
+
     // flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
     // which are then added to the result one by one
     // the compiler should be able to find the right accumulator (implicit buf) to build the result
     // to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
-    def flatMap[resColl[+x] <: Iterable[x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = { 
+    def flatMap[resColl[+x] <: Iterable[x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
         // TODO:  would a viewbound for resColl[x] be better?
         // -- 2nd-order type params are not yet in scope in view bound
       val elems = iterator
@@ -50,9 +50,9 @@ trait HOSeq {
         while (elemss.hasNext) buf += elemss.next
       }
       buf.result
-    } 
+    }
   }
-  
+
   final class ListBuffer[A] {
     private var start: List[A] = Nil
     private var last: ::[A] = _
@@ -80,7 +80,7 @@ trait HOSeq {
       exported = !start.isEmpty
       start
     }
-    
+
     /** Clears the buffer contents.
      */
     def clear {
@@ -99,16 +99,16 @@ trait HOSeq {
       }
     }
   }
-  
+
   implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
     private[this] val buff = new ListBuffer[elT]
     def += (el: elT): Unit = buff += el
     def result: List[elT] = buff.toList
   }
-  
+
   trait List[+t] extends Iterable[t] {
     type m[+x] = List[x]
-  
+
     def head: t
     def tail: List[t]
     def isEmpty: Boolean
@@ -125,14 +125,14 @@ trait HOSeq {
     // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
     def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
   }
-  
+
   // TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
   final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
     def head = hd
     def tail = if(tl==null) this else tl // hack
     override def isEmpty: Boolean = false
   }
-  
+
   case object Nil extends List[Nothing] {
     def isEmpty = true
     def head: Nothing =
diff --git a/test/files/pos/tcpoly_subst.scala b/test/files/pos/tcpoly_subst.scala
index f8ddb9a..88cc4d0 100644
--- a/test/files/pos/tcpoly_subst.scala
+++ b/test/files/pos/tcpoly_subst.scala
@@ -1,4 +1,4 @@
 object test {
-  def make[m[x], b]: m[b] = error("foo")
+  def make[m[x], b]: m[b] = sys.error("foo")
   val lst: List[Int] = make[List, Int]
 }
diff --git a/test/files/pos/tcpoly_variance_pos.scala b/test/files/pos/tcpoly_variance_pos.scala
index b641716..b63abce 100644
--- a/test/files/pos/tcpoly_variance_pos.scala
+++ b/test/files/pos/tcpoly_variance_pos.scala
@@ -1,7 +1,7 @@
 class A[m[+x]] {
- def str: m[Object] = error("foo")
+ def str: m[Object] = sys.error("foo")
 }
 
 class B[m[+x]] extends A[m] {
- override def str: m[String]  = error("foo")
+ override def str: m[String]  = sys.error("foo")
 }
diff --git a/test/files/pos/tcpoly_wildcards.scala b/test/files/pos/tcpoly_wildcards.scala
index d3bb86b..f6d1b66 100644
--- a/test/files/pos/tcpoly_wildcards.scala
+++ b/test/files/pos/tcpoly_wildcards.scala
@@ -1,3 +1,3 @@
 trait test[b[_,_]] {
-  def moo[a[_, _]] = error("a")
+  def moo[a[_, _]] = sys.error("a")
 }
diff --git a/test/files/pos/ted.scala b/test/files/pos/ted.scala
index d8ae64f..314f109 100644
--- a/test/files/pos/ted.scala
+++ b/test/files/pos/ted.scala
@@ -9,7 +9,7 @@ object App
         case (b, e) => b * exponentiate(b, e - 1)
       }
 
- 
+
 
   def main(args : Array[String]) =
     System.out.println(exponentiate(2, 2))
diff --git a/test/files/pos/test5.scala b/test/files/pos/test5.scala
index b04de5d..4dbafc9 100644
--- a/test/files/pos/test5.scala
+++ b/test/files/pos/test5.scala
@@ -53,7 +53,7 @@ object test {
 
       // Check type j.P
       j.chk_ip(val_mp);
-      j.chk_ip(val_np); 
+      j.chk_ip(val_np);
 
       // Check type i.X
       i.chk_ix(i.val_ix);
@@ -63,6 +63,6 @@ object test {
       // Check j.X
       j.chk_ix(j.val_ix);
       j.chk_ix(j.val_jx);
-      j.chk_ix(val_njx); 
+      j.chk_ix(val_njx);
   }
 }
diff --git a/test/files/pos/test5refine.scala b/test/files/pos/test5refine.scala
index 290449c..5459b3b 100644
--- a/test/files/pos/test5refine.scala
+++ b/test/files/pos/test5refine.scala
@@ -60,7 +60,7 @@ object test {
 
       // Check type j.P
       j.chk_ip(val_mp);
-      j.chk_ip(val_np); 
+      j.chk_ip(val_np);
 
       // Check type i.X
       i.chk_ix(i.val_ix);
@@ -70,6 +70,6 @@ object test {
       // Check j.X
       j.chk_ix(j.val_ix);
       j.chk_ix(j.val_jx);
-      j.chk_ix(val_njx); 
+      j.chk_ix(val_njx);
   }
 }
diff --git a/test/files/pos/testCoercionThis.scala b/test/files/pos/testCoercionThis.scala
index 8bbfdcd..5631b33 100644
--- a/test/files/pos/testCoercionThis.scala
+++ b/test/files/pos/testCoercionThis.scala
@@ -11,9 +11,9 @@ object Test {
     // PP: is that something we really want to work? Seems like sketchville.
     //
     // These work, so I moved this out of pending.
-    def testCoercionThis1 = this.baz 
+    def testCoercionThis1 = this.baz
     def testCoercionThis2 = (this: Foo).baz
   }
-  
-  class Bar { def baz = System.out.println("baz") } 
+
+  class Bar { def baz = System.out.println("baz") }
 }
diff --git a/test/files/pos/thistypes.scala b/test/files/pos/thistypes.scala
index 26339e0..7319cc1 100644
--- a/test/files/pos/thistypes.scala
+++ b/test/files/pos/thistypes.scala
@@ -5,4 +5,4 @@ trait B {
 
 trait C extends B {
   def foo: C.this.I;
-} 
+}
diff --git a/test/files/pos/ticket0137.scala b/test/files/pos/ticket0137.scala
index 72f955f..94ef8e4 100644
--- a/test/files/pos/ticket0137.scala
+++ b/test/files/pos/ticket0137.scala
@@ -1,7 +1,7 @@
-trait AbsM {    
-  abstract class MonadCompanion[M[_]] 
+trait AbsM {
+  abstract class MonadCompanion[M[_]]
   abstract class AbsMonadCompanion extends MonadCompanion[AM] {
-    def newTag: Int 
+    def newTag: Int
   }
 
   type AM[_] // to trigger the bug, this must be an abstract type member that comes after the reference to it
diff --git a/test/files/pos/ticket2251.scala b/test/files/pos/ticket2251.scala
index b3afee4..c220e85 100644
--- a/test/files/pos/ticket2251.scala
+++ b/test/files/pos/ticket2251.scala
@@ -22,4 +22,18 @@ lub of List(D, C) is B[_2] forSome { type _2 >: D with C{} <: B[_1] forSome { ty
   // should be: B[X] forSome {type X <: B[X]} -- can this be done automatically? for now, just detect f-bounded polymorphism and fall back to more coarse approximation
 
   val data: List[A] = List(new C, new D)
+
+  val data2 = List(new C, new D)
+
+  val data3: List[B[X] forSome { type X <: B[_ <: A] }] = List(new C, new D)
+
+  // Not yet --
+  // val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D)
+  // <console>:7: error: type mismatch;
+  //  found   : List[B[_ >: D with C <: B[_ >: D with C <: A]]]
+  //  required: List[B[X] forSome { type X <: B[X] }]
+  //        val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D)
+
+  // works
+  val data5 = List[B[X] forSome { type X <: B[X] }](new C, new D)
 }
diff --git a/test/files/pos/trait-force-info.scala b/test/files/pos/trait-force-info.scala
index e01d225..c2b3386 100644
--- a/test/files/pos/trait-force-info.scala
+++ b/test/files/pos/trait-force-info.scala
@@ -8,7 +8,7 @@ trait MyContextTrees {
   val self: Global
   val NoContext = self.analyzer.NoContext
 }
-// 
+//
 // error: java.lang.AssertionError: assertion failed: trait Contexts.NoContext$ linkedModule: <none>List()
 //  at scala.Predef$.assert(Predef.scala:160)
 //  at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.innerSymbol$1(ClassfileParser.scala:1211)
diff --git a/test/files/pos/trait-parents.scala b/test/files/pos/trait-parents.scala
index f6a2688..c5908cd 100644
--- a/test/files/pos/trait-parents.scala
+++ b/test/files/pos/trait-parents.scala
@@ -6,11 +6,11 @@ trait Quux
 object Test {
   def f(x: Bip) = 1
   def g1(x: Foo with Bip) = f(x)
-  
+
   def main(args: Array[String]): Unit = {
-    f(new Bip with Foo { }) 
+    f(new Bip with Foo { })
     f(new Foo with Bip { })
-    g1(new Bip with Foo { }) 
+    g1(new Bip with Foo { })
     g1(new Foo with Bip { })
   }
 }
diff --git a/test/files/pos/traits.scala b/test/files/pos/traits.scala
index bd64d72..3c6f943 100644
--- a/test/files/pos/traits.scala
+++ b/test/files/pos/traits.scala
@@ -20,8 +20,8 @@ object Test {
   trait BorderedColoredShape extends Shape with Bordered with Colored {
     override def equals(other: Any) = other match {
       case that: BorderedColoredShape => (
-	super.equals(that) && 
-        super[Bordered].equals(that) && 
+        super.equals(that) &&
+        super[Bordered].equals(that) &&
         super[Colored].equals(that))
       case _ => false
     }
diff --git a/test/files/pos/typealias_dubious.scala b/test/files/pos/typealias_dubious.scala
index 587453a..cdba1a6 100644
--- a/test/files/pos/typealias_dubious.scala
+++ b/test/files/pos/typealias_dubious.scala
@@ -1,15 +1,15 @@
 class MailBox {
-  //class Message 
+  //class Message
   type Message = AnyRef
-}   
-   
+}
+
 abstract class Actor {
   private val in = new MailBox
 
-  def send(msg: in.Message) =  error("foo")
+  def send(msg: in.Message) =  sys.error("foo")
 
-  def unstable: Actor = error("foo")
+  def unstable: Actor = sys.error("foo")
 
-  def dubiousSend(msg: MailBox#Message) = 
+  def dubiousSend(msg: MailBox#Message) =
     unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
-}      
+}
diff --git a/test/files/pos/typealiases.scala b/test/files/pos/typealiases.scala
index 5974921..93d1dce 100644
--- a/test/files/pos/typealiases.scala
+++ b/test/files/pos/typealiases.scala
@@ -3,18 +3,18 @@ package foo
 trait Test[T] {
   type Check[T] = Array[T] => Unit;
   type MyPair[S] = (T, S)
-                        
+
   val pair1: (T, Int)
   val pair: MyPair[Int] = pair1
-  
+
   def check(xs: Array[T], c: Check[T]) = c(xs)
-  def check2[S](xs: Array[S], c: Check[S]) = c(xs)            
-}                                                             
+  def check2[S](xs: Array[S], c: Check[S]) = c(xs)
+}
 
-object main extends Test[Int] {     
-  val pair1 = (1,1)            
+object main extends Test[Int] {
+  val pair1 = (1,1)
 
-  implicit def topair(x: Int): Pair[Int, Int] = (x,x)
-  val pair2: MyPair[Int] = 1    
+  implicit def topair(x: Int): Tuple2[Int, Int] = (x,x)
+  val pair2: MyPair[Int] = 1
   val x: Short = 1
 }
diff --git a/test/files/pos/unapplyNeedsMemberType.scala b/test/files/pos/unapplyNeedsMemberType.scala
index 2581512..3a96e18 100644
--- a/test/files/pos/unapplyNeedsMemberType.scala
+++ b/test/files/pos/unapplyNeedsMemberType.scala
@@ -8,7 +8,7 @@ trait Gunk[a] {
     def unapply(s: Seq) = unapply_Cons(s)
   }
   def unapply_Cons(s: Any): Option[Tuple2[a, Seq]]
-}  
+}
 
 class Join[a] extends Gunk[a] {
   type Seq = JoinSeq
@@ -19,7 +19,7 @@ class Join[a] extends Gunk[a] {
   def append(s1: Seq, s2: Seq): Seq = s1 // mock implementation
 
   def unapply_Cons(s: Any) = s match {
-    case App(Cons(x, xs), ys) => Some(Pair(x, append(xs, ys)))
+    case App(Cons(x, xs), ys) => Some((x, append(xs, ys)))
     case _ => null
   }
 }
diff --git a/test/files/pos/unapplySeq.scala b/test/files/pos/unapplySeq.scala
index 4d5409c..6d13cc8 100644
--- a/test/files/pos/unapplySeq.scala
+++ b/test/files/pos/unapplySeq.scala
@@ -4,7 +4,7 @@ object FooSeq {
       val y = x.asInstanceOf[Bar]
       Some(y.size, y.name)
     } else None
-  } 
+  }
 
   def main(args:Array[String]) = {
     val b = new Bar
diff --git a/test/files/pos/unapplyVal.scala b/test/files/pos/unapplyVal.scala
index 0d6394a..368b9b9 100644
--- a/test/files/pos/unapplyVal.scala
+++ b/test/files/pos/unapplyVal.scala
@@ -10,7 +10,7 @@ class Buffer {
 
   def joinPat(x: Any): Unit = {
     x match {
-      case Put => 
+      case Put =>
       case Put(y) =>
         println("returning "+y)
     }
@@ -29,7 +29,7 @@ object unapplyJoins extends App { // bug #1257
     object Get extends Sync
 
     val jp: PartialFunction[Any, Any] = {
-      case Get() => 
+      case Get() =>
     }
   }
 
diff --git a/test/files/pos/valdefs.scala b/test/files/pos/valdefs.scala
index 85ffa13..c8f78cd 100644
--- a/test/files/pos/valdefs.scala
+++ b/test/files/pos/valdefs.scala
@@ -11,6 +11,6 @@ object test {
     }
 
     abstract class Sub2() extends Base() {
-      override val Pair(x, y) = Pair("abc", 2.0);
+      override val (x, y) = ("abc", 2.0);
     }
 }
diff --git a/test/files/pos/variances-flip.scala b/test/files/pos/variances-flip.scala
new file mode 100644
index 0000000..c3ea7b5
--- /dev/null
+++ b/test/files/pos/variances-flip.scala
@@ -0,0 +1,7 @@
+trait Foo[-A, +B, -C, +D] {
+  private[this] def b: B = ???
+  private[this] def d: D = ???
+
+  def f(p1: B => A, p2: D => C) = g(p1(b), p2(d))
+  def g(x: A, y: C) = ((b, d))
+}
diff --git a/test/files/pos/variances-local.scala b/test/files/pos/variances-local.scala
new file mode 100644
index 0000000..35e3950
--- /dev/null
+++ b/test/files/pos/variances-local.scala
@@ -0,0 +1,7 @@
+class Foo1[+T] {
+  private[this] type MyType = T
+}
+
+class Foo2[+T] {
+  protected[this] type MyType = T
+}
diff --git a/test/files/pos/virtpatmat_anonfun_for.flags b/test/files/pos/virtpatmat_anonfun_for.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala
index 962e3d7..8ec931f 100644
--- a/test/files/pos/virtpatmat_binding_opt.scala
+++ b/test/files/pos/virtpatmat_binding_opt.scala
@@ -4,8 +4,8 @@ class Test {
     case that: Test2 =>
       println(that)
       this
-    case _ => error("meh")
+    case _ => sys.error("meh")
   }
 }
 
-class Test2 extends Test
\ No newline at end of file
+class Test2 extends Test
diff --git a/test/files/pos/virtpatmat_castbinder.scala b/test/files/pos/virtpatmat_castbinder.scala
index 53e937e..be26963 100644
--- a/test/files/pos/virtpatmat_castbinder.scala
+++ b/test/files/pos/virtpatmat_castbinder.scala
@@ -6,7 +6,7 @@ trait IntMapIterator[V, T] {
   def valueOf(tip: Tip[V]): T
   def pop: IntMap[V]
 
-  def next: T = 
+  def next: T =
     pop match {
       case Bin(t at Tip(_)) => {
         valueOf(t)
diff --git a/test/files/pos/virtpatmat_exist1.scala b/test/files/pos/virtpatmat_exist1.scala
index ccb9129..6cad017 100644
--- a/test/files/pos/virtpatmat_exist1.scala
+++ b/test/files/pos/virtpatmat_exist1.scala
@@ -13,7 +13,7 @@ object Test {
     // without type ascription for the one in the body of the last flatmap of each alternative, type inference borks on the existentials
     // def splitArray[T >: Nothing <: Any](ad: Array[Iterable[T]]): Any = { import OptionMatching._
     //   runOrElse(ad.apply(0))(((x1: Iterable[T]) => (
-    //     or(((x4: Iterable[T]) => one(null)), 
+    //     or(((x4: Iterable[T]) => one(null)),
     //       guard(x1.isInstanceOf[Iterable[T] with Test.HashMapCollision1[_,_]], x1.asInstanceOf[Iterable[T] with Test.HashMapCollision1[_,_]]).flatMap(((x2: Iterable[T] with Test.HashMapCollision1[_,_]) => one(x2))),
     //       guard(x1.isInstanceOf[Test.HashSetCollision1[_]], x1.asInstanceOf[Iterable[T] with Test.HashSetCollision1[_]]).flatMap(((x3: Iterable[T] with Test.HashSetCollision1[_]) => one(x3)))): Option[Any]).orElse(
     //     (zero: Option[Any])))
diff --git a/test/files/pos/virtpatmat_exist2.scala b/test/files/pos/virtpatmat_exist2.scala
index b0e4c66..ee18607 100644
--- a/test/files/pos/virtpatmat_exist2.scala
+++ b/test/files/pos/virtpatmat_exist2.scala
@@ -9,11 +9,11 @@ object Test {
   // what's the _$1 doing there?
   // def grow[T >: Nothing <: Any]: ParseResult[T] = {
   //   import OptionMatching._
-  //   runOrElse[MemoEntry[T], ParseResult[T]]((null: MemoEntry[T]))(((x1: MemoEntry[T]) => 
-  //     (MemoEntry.unapply[T](x1).flatMap[ParseResult[T]](((x4: Either[Nothing,ParseResult[_]]) => 
-  //       guard[Right[Nothing,ParseResult[_]]](x4.isInstanceOf[Right[Nothing,ParseResult[_]]], x4.asInstanceOf[Right[Nothing,ParseResult[_]]]).flatMap[ParseResult[T]](((cp3: Right[Nothing,ParseResult[_]]) => 
-  //         scala.Right.unapply[Nothing, ParseResult[_]](cp3).flatMap[ParseResult[T]](((x5: ParseResult[_]) => 
-  //           guard[ParseResult[_$1]](x5.ne(null), x5.asInstanceOf[ParseResult[_]]).flatMap[ParseResult[T]](((x6: ParseResult[_]) => 
+  //   runOrElse[MemoEntry[T], ParseResult[T]]((null: MemoEntry[T]))(((x1: MemoEntry[T]) =>
+  //     (MemoEntry.unapply[T](x1).flatMap[ParseResult[T]](((x4: Either[Nothing,ParseResult[_]]) =>
+  //       guard[Right[Nothing,ParseResult[_]]](x4.isInstanceOf[Right[Nothing,ParseResult[_]]], x4.asInstanceOf[Right[Nothing,ParseResult[_]]]).flatMap[ParseResult[T]](((cp3: Right[Nothing,ParseResult[_]]) =>
+  //         scala.Right.unapply[Nothing, ParseResult[_]](cp3).flatMap[ParseResult[T]](((x5: ParseResult[_]) =>
+  //           guard[ParseResult[_$1]](x5.ne(null), x5.asInstanceOf[ParseResult[_]]).flatMap[ParseResult[T]](((x6: ParseResult[_]) =>
   //             one[ParseResult[T]](x6.asInstanceOf[ParseResult[T]]))))))))): Option[ParseResult[T]]
   //     ).orElse[ParseResult[T]]((zero: Option[ParseResult[T]]))))
   // }
diff --git a/test/files/pos/virtpatmat_exist3.scala b/test/files/pos/virtpatmat_exist3.scala
index c8f8738..94385f3 100644
--- a/test/files/pos/virtpatmat_exist3.scala
+++ b/test/files/pos/virtpatmat_exist3.scala
@@ -4,8 +4,8 @@ class ReferenceQueue[T] {
       case null => null
     }
 
-  // def wrapper(jref: ReferenceQueue[_]): ReferenceQueue[T] = OptionMatching.runOrElse(jref)(((x1: ReferenceQueue[_]) => 
-  //   (OptionMatching.guard(null.==(x1), x1.asInstanceOf[ReferenceQueue[_]]).flatMap(((x2: ReferenceQueue[_]) => 
+  // def wrapper(jref: ReferenceQueue[_]): ReferenceQueue[T] = OptionMatching.runOrElse(jref)(((x1: ReferenceQueue[_]) =>
+  //   (OptionMatching.guard(null.==(x1), x1.asInstanceOf[ReferenceQueue[_]]).flatMap(((x2: ReferenceQueue[_]) =>
   //     OptionMatching.one(null))): Option[ReferenceQueue[T]]).orElse(
   //   (OptionMatching.zero: Option[ReferenceQueue[T]])))
   // )
diff --git a/test/files/pos/virtpatmat_gadt_array.scala b/test/files/pos/virtpatmat_gadt_array.scala
index 27e72aa..f3332a8 100644
--- a/test/files/pos/virtpatmat_gadt_array.scala
+++ b/test/files/pos/virtpatmat_gadt_array.scala
@@ -4,12 +4,12 @@ object Test {
     case x: Array[AnyRef]  => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]]
     case null              => null
   }
-  // def genericArrayOps[T >: Nothing <: Any](xs: Array[T]): scala.collection.mutable.ArrayOps[T] 
-  //   = OptionMatching.runOrElse(xs)(((x1: Array[T]) => 
-  //     ((OptionMatching.guard(x1.isInstanceOf[Array[AnyRef]], x1.asInstanceOf[Array[T] with Array[AnyRef]]).flatMap(((x2: Array[T] with Array[AnyRef]) => 
+  // def genericArrayOps[T >: Nothing <: Any](xs: Array[T]): scala.collection.mutable.ArrayOps[T]
+  //   = OptionMatching.runOrElse(xs)(((x1: Array[T]) =>
+  //     ((OptionMatching.guard(x1.isInstanceOf[Array[AnyRef]], x1.asInstanceOf[Array[T] with Array[AnyRef]]).flatMap(((x2: Array[T] with Array[AnyRef]) =>
   //       OptionMatching.one(Test.this.refArrayOps[AnyRef](x2).asInstanceOf[scala.collection.mutable.ArrayOps[T]]))): Option[scala.collection.mutable.ArrayOps[T]]).orElse(
-  //     (OptionMatching.guard(null.==(x1), x1.asInstanceOf[Array[T]]).flatMap(((x3: Array[T]) => 
+  //     (OptionMatching.guard(null.==(x1), x1.asInstanceOf[Array[T]]).flatMap(((x3: Array[T]) =>
   //         OptionMatching.one(null))): Option[scala.collection.mutable.ArrayOps[T]])): Option[scala.collection.mutable.ArrayOps[T]]).orElse((OptionMatching.zero: Option[scala.collection.mutable.ArrayOps[T]]))))
-  
+
   def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
 }
\ No newline at end of file
diff --git a/test/files/positions/Anon.scala b/test/files/positions/Anon.scala
index 65eb7ae..940fff0 100644
--- a/test/files/positions/Anon.scala
+++ b/test/files/positions/Anon.scala
@@ -2,7 +2,7 @@ object Anon {
   trait Foo {
 	val bar : Int
   }
-  
+
   def foo = new Foo {
 	override val bar = 23
   }
diff --git a/test/files/positions/Enclosing1.scala b/test/files/positions/Enclosing1.scala
index 7c8fbaf..e170187 100644
--- a/test/files/positions/Enclosing1.scala
+++ b/test/files/positions/Enclosing1.scala
@@ -1,5 +1,5 @@
 object Enclosing1 {
   do {
-	  
+
   } while (true)
 }
diff --git a/test/files/positions/ExcludedPrefix1.scala b/test/files/positions/ExcludedPrefix1.scala
index 72d9756..b3182ea 100644
--- a/test/files/positions/ExcludedPrefix1.scala
+++ b/test/files/positions/ExcludedPrefix1.scala
@@ -5,16 +5,16 @@ object ExcludedPrefix1 {
   case
   object
   BLAH
-  
+
   val
   a = 1
-  
+
   var
   b = 2
-  
+
   def
   c = 23
-  
+
   private
   def
   d = 23
@@ -22,20 +22,20 @@ object ExcludedPrefix1 {
   lazy
   val
   e = 23
-  
+
   private
   type
   f = Int
-  
+
   val
   g,
   h = 23
-  
+
   val
   (i,
    j) = (0, 0)
-   
-  val Pair(
+
+  val (
    k,
    l) = (0, 0)
 }
diff --git a/test/files/positions/Overlap3.scala b/test/files/positions/Overlap3.scala
index 657c12d..4c5f8af 100644
--- a/test/files/positions/Overlap3.scala
+++ b/test/files/positions/Overlap3.scala
@@ -1,3 +1,3 @@
 object Overlap3 {
-  val (a, b) = (0, 0) 
+  val (a, b) = (0, 0)
 }
diff --git a/test/files/positions/Overlap4.scala b/test/files/positions/Overlap4.scala
index 0049293..f548372 100644
--- a/test/files/positions/Overlap4.scala
+++ b/test/files/positions/Overlap4.scala
@@ -1,3 +1,3 @@
 object Overlap4 {
-  val Pair(a, b) = (0, 0)
+  val (a, b) = (0, 0)
 }
diff --git a/test/files/positions/Scaladoc2.scala b/test/files/positions/Scaladoc2.scala
index 78bc4ac..e52263d 100644
--- a/test/files/positions/Scaladoc2.scala
+++ b/test/files/positions/Scaladoc2.scala
@@ -4,13 +4,13 @@ object Scaladoc2 {
      * Foo
      */
     def g {}
-    
+
     /*
      * Blah blah
      */
     def h{}
     h
   }
-  
+
   def h {}
 }
diff --git a/test/files/positions/Scaladoc3.scala b/test/files/positions/Scaladoc3.scala
index bb9d66f..c331b7e 100644
--- a/test/files/positions/Scaladoc3.scala
+++ b/test/files/positions/Scaladoc3.scala
@@ -3,6 +3,6 @@ object Scaladoc3 {
    * Foo
    */
   import scala.collection.mutable.ArrayBuffer
-  
+
   def f {}
 }
diff --git a/test/files/positions/Scaladoc4.scala b/test/files/positions/Scaladoc4.scala
index f613dda..133cde1 100644
--- a/test/files/positions/Scaladoc4.scala
+++ b/test/files/positions/Scaladoc4.scala
@@ -3,6 +3,6 @@ object Scaladoc4 {
    * Foo
    */
   2+2
-  
+
   def f {}
 }
diff --git a/test/files/positions/Scaladoc6.scala b/test/files/positions/Scaladoc6.scala
index 5c230ed..8beda62 100644
--- a/test/files/positions/Scaladoc6.scala
+++ b/test/files/positions/Scaladoc6.scala
@@ -5,6 +5,6 @@ object Scaladoc6 {
      */
     val i = 23
   }
-  
+
   def f {}
 }
diff --git a/test/files/positions/Scaladoc7.scala b/test/files/positions/Scaladoc7.scala
index 6175222..0198d4d 100644
--- a/test/files/positions/Scaladoc7.scala
+++ b/test/files/positions/Scaladoc7.scala
@@ -2,5 +2,5 @@ object Scaladoc7 {
   /**
    * Foo
    */
-  val Pair(i, j) = (1, 2)
+  val (i, j) = (1, 2)
 }
diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check
index 9a92c40..4bf68b3 100644
--- a/test/files/presentation/callcc-interpreter.check
+++ b/test/files/presentation/callcc-interpreter.check
@@ -1,94 +1,88 @@
 reload: CallccInterpreter.scala
 
-askTypeCompletion at CallccInterpreter.scala(51,38)
+askTypeCompletion at CallccInterpreter.scala(51,34)
 ================================================================================
-[response] askTypeCompletion at (51,38)
-retrieved 64 members
-[accessible:  true] `class AddcallccInterpreter.Add`
-[accessible:  true] `class AppcallccInterpreter.App`
-[accessible:  true] `class CcccallccInterpreter.Ccc`
-[accessible:  true] `class ConcallccInterpreter.Con`
-[accessible:  true] `class FuncallccInterpreter.Fun`
-[accessible:  true] `class LamcallccInterpreter.Lam`
-[accessible:  true] `class McallccInterpreter.M`
-[accessible:  true] `class NumcallccInterpreter.Num`
-[accessible:  true] `class VarcallccInterpreter.Var`
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(callccInterpreter.type, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method add(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value]`
-[accessible:  true] `method apply(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[callccInterpreter.Value]`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A])callccInterpreter.M[A]`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)callccInterpreter.type`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)callccInterpreter.type`
-[accessible:  true] `method ensuring(cond: callccInterpreter.type => Boolean)callccInterpreter.type`
-[accessible:  true] `method ensuring(cond: callccInterpreter.type => Boolean, msg: => Any)callccInterpreter.type`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method id[A]=> A => A`
-[accessible:  true] `method interp(t: callccInterpreter.Term, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method lookup(x: callccInterpreter.Name, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
-[accessible:  true] `method main(args: Array[String])Unit`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method showM(m: callccInterpreter.M[callccInterpreter.Value])String`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method test(t: callccInterpreter.Term)String`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method unitM[A](a: A)callccInterpreter.M[A]`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> callccInterpreter.type`
-[accessible:  true] `method →[B](y: B)(callccInterpreter.type, B)`
-[accessible:  true] `object WrongcallccInterpreter.Wrong.type`
-[accessible:  true] `trait TermcallccInterpreter.Term`
-[accessible:  true] `trait ValuecallccInterpreter.Value`
-[accessible:  true] `type AnswercallccInterpreter.Answer`
-[accessible:  true] `type EnvironmentcallccInterpreter.Environment`
-[accessible:  true] `type NamecallccInterpreter.Name`
-[accessible:  true] `value __leftOfArrowcallccInterpreter.type`
-[accessible:  true] `value __resultOfEnsuringcallccInterpreter.type`
-[accessible:  true] `value selfAny`
-[accessible:  true] `value term0callccInterpreter.App`
-[accessible:  true] `value term1callccInterpreter.App`
-[accessible:  true] `value term2callccInterpreter.Add`
+[response] askTypeCompletion at (51,34)
+retrieved 57 members
+abstract trait Term extends AnyRef
+abstract trait Value extends AnyRef
+case class Add extends callccInterpreter.Term with Product with Serializable
+case class App extends callccInterpreter.Term with Product with Serializable
+case class Ccc extends callccInterpreter.Term with Product with Serializable
+case class Con extends callccInterpreter.Term with Product with Serializable
+case class Fun extends callccInterpreter.Value with Product with Serializable
+case class Lam extends callccInterpreter.Term with Product with Serializable
+case class M[A] extends Product with Serializable
+case class Num extends callccInterpreter.Value with Product with Serializable
+case class Var extends callccInterpreter.Term with Product with Serializable
+case object Wrong
+def +(other: String): String
+def ->[B](y: B): (callccInterpreter.type, B)
+def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value]
+def apply(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value]
+def callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A]): callccInterpreter.M[A]
+def ensuring(cond: Boolean): callccInterpreter.type
+def ensuring(cond: Boolean,msg: => Any): callccInterpreter.type
+def ensuring(cond: callccInterpreter.type => Boolean): callccInterpreter.type
+def ensuring(cond: callccInterpreter.type => Boolean,msg: => Any): callccInterpreter.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def id[A]: A => A
+def interp(t: callccInterpreter.Term,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value]
+def lookup(x: callccInterpreter.Name,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value]
+def main(args: Array[String]): Unit
+def showM(m: callccInterpreter.M[callccInterpreter.Value]): String
+def test(t: callccInterpreter.Term): String
+def toString(): String
+def unitM[A](a: A): callccInterpreter.M[A]
+def →[B](y: B): (callccInterpreter.type, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val term0: callccInterpreter.App
+private[this] val term1: callccInterpreter.App
+private[this] val term2: callccInterpreter.Add
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
+type Answer = callccInterpreter.Answer
+type Environment = callccInterpreter.Environment
+type Name = callccInterpreter.Name
 ================================================================================
 
 askType at CallccInterpreter.scala(14,21)
 ================================================================================
-[response] askTypeAt at (14,21)
-def unitM[A >: Nothing <: Any](a: A): callccInterpreter.M[A] = callccInterpreter.this.M.apply[A](((c: A => callccInterpreter.Answer) => c.apply(a)))
+[response] askTypeAt (14,21)
+def unitM[A](a: A): callccInterpreter.M[A] = callccInterpreter.this.M.apply[A](((c: A => callccInterpreter.Answer) => c.apply(a)))
 ================================================================================
 
 askType at CallccInterpreter.scala(16,12)
 ================================================================================
-[response] askTypeAt at (16,12)
-def id[A >: Nothing <: Any]: A => A = ((x: A) => x)
+[response] askTypeAt (16,12)
+def id[A]: A => A = ((x: A) => x)
 ================================================================================
 
 askType at CallccInterpreter.scala(17,25)
 ================================================================================
-[response] askTypeAt at (17,25)
+[response] askTypeAt (17,25)
 def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply(callccInterpreter.this.id[callccInterpreter.Value]).toString()
 ================================================================================
 
 askType at CallccInterpreter.scala(50,30)
 ================================================================================
-[response] askTypeAt at (50,30)
-def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.this.Predef.Pair.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match {
-  case scala.this.Predef.Pair.unapply[callccInterpreter.Value, callccInterpreter.Value](<unapply-selector>) <unapply> ((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n)))
+[response] askTypeAt (50,30)
+def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match {
+  case (_1: callccInterpreter.Value, _2: callccInterpreter.Value)(callccInterpreter.Value, callccInterpreter.Value)((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n)))
   case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong)
 }
 ================================================================================
diff --git a/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala
index 0e96dfa..d498fe0 100644
--- a/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala
+++ b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala
@@ -2,8 +2,8 @@ object callccInterpreter {
 
   type Answer = Value
 
-  /** 
-   * A continuation monad. 
+  /**
+   * A continuation monad.
    */
   case class M[A](in: (A => Answer) => Answer) {
     def bind[B](k: A => M[B])          = M[B](c => in (a => k(a) in c))
@@ -40,15 +40,15 @@ object callccInterpreter {
     override def toString() = "<function>"
   }
 
-  type Environment = List[Pair[Name, Value]]
+  type Environment = List[Tuple2[Name, Value]]
 
   def lookup(x: Name, e: Environment): M[Value] = e match {
     case List() => unitM(Wrong)
-    case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+    case (y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
   }
 
-  def add(a: Value, b: Value) /*?*/ = Pair(a, b) match {
-    case Pair(Num(m), Num(n)) => this./*!*/unitM(Num(m + n))
+  def add(a: Value, b: Value) /*?*/ = (a, b) match {
+    case (Num(m), Num(n)) => this./*!*/unitM(Num(m + n))
     case _ => unitM(Wrong)
   }
 
@@ -60,16 +60,20 @@ object callccInterpreter {
   def interp(t: Term, e: Environment): M[Value] = t match {
     case Var(x) => lookup(x, e)
     case Con(n) => unitM(Num(n))
-    case Add(l, r) => for (val a <- interp(l, e);
-			   val b <- interp(r, e);
-			   val c <- add(a, b))
-                      yield c
-    case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
-    case App(f, t) => for (val a <- interp(f, e);
-			   val b <- interp(t, e);
-			   val c <- apply(a, b))
-		      yield c
-    case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
+    case Add(l, r) =>
+      for {
+        a <- interp(l, e)
+        b <- interp(r, e)
+        c <- add(a, b)
+      } yield c
+    case Lam(x, t) => unitM(Fun(a => interp(t, (x, a) :: e)))
+    case App(f, t) =>
+      for {
+        a <- interp(f, e)
+        b <- interp(t, e)
+        c <- apply(a, b)
+      } yield c
+    case Ccc(x, t) => callCC(k => interp(t, (x, Fun(k)) :: e))
   }
 
   def test(t: Term): String = showM(interp(t, List()))
diff --git a/test/files/presentation/completion-implicit-chained.check b/test/files/presentation/completion-implicit-chained.check
index 24417cf..c583b78 100644
--- a/test/files/presentation/completion-implicit-chained.check
+++ b/test/files/presentation/completion-implicit-chained.check
@@ -3,27 +3,25 @@ reload: Completions.scala
 askTypeCompletion at Completions.scala(11,16)
 ================================================================================
 [response] askTypeCompletion at (11,16)
-retrieved 24 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method map(x: Int => Int)(implicit a: DummyImplicit)test.O.type`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `value prefix123Int`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+retrieved 22 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def equals(x$1: Any): Boolean
+def hashCode(): Int
+def map(x: Int => Int)(implicit a: DummyImplicit): test.O.type
+def toString(): String
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val prefix123: Int
 ================================================================================
diff --git a/test/files/presentation/hyperlinks-macro.check b/test/files/presentation/hyperlinks-macro.check
new file mode 100644
index 0000000..80d2268
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro.check
@@ -0,0 +1,11 @@
+reload: MacroCall.scala
+
+askHyperlinkPos for `foo` at (5,7) MacroCall.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (2,7) MacroCall.scala
+================================================================================
+
+askHyperlinkPos for `foo` at (9,7) MacroCall.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (2,7) MacroCall.scala
+================================================================================
diff --git a/test/files/presentation/hyperlinks-macro/Runner.scala b/test/files/presentation/hyperlinks-macro/Runner.scala
new file mode 100644
index 0000000..c2f89bd
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro/Runner.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+  override def runDefaultTests() {
+    sourceFiles foreach (src => askLoadedTyped(src).get)
+    super.runDefaultTests()
+  }
+}
diff --git a/test/files/presentation/hyperlinks-macro/src/MacroCall.scala b/test/files/presentation/hyperlinks-macro/src/MacroCall.scala
new file mode 100644
index 0000000..d9676b3
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro/src/MacroCall.scala
@@ -0,0 +1,11 @@
+object Test {
+  def foo = 0
+
+  scala.reflect.runtime.universe.reify {
+    foo/*#*/
+  }
+
+  identity {
+    foo/*#*/
+  }
+}
diff --git a/test/files/presentation/hyperlinks/Runner.scala b/test/files/presentation/hyperlinks/Runner.scala
index 61da49a..b78e13c 100644
--- a/test/files/presentation/hyperlinks/Runner.scala
+++ b/test/files/presentation/hyperlinks/Runner.scala
@@ -7,5 +7,5 @@ object Test extends InteractiveTest {
     sourceFiles foreach (src => askLoadedTyped(src).get)
     super.runDefaultTests()
   }
-  
+
 }
\ No newline at end of file
diff --git a/test/files/presentation/hyperlinks/src/NameDefaultTests.scala b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala
index b218040..340d223 100644
--- a/test/files/presentation/hyperlinks/src/NameDefaultTests.scala
+++ b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala
@@ -2,11 +2,11 @@
 class NameDefaults {
   val someString = "abc"
   val someInt = 42
- 
+
   def foo(x: String, y: Int)(implicit logger: Int): Int = y
-  
+
   implicit val l = 42
-  
+
   def bar {
     println()
     val someOtherInt = 10
diff --git a/test/files/presentation/hyperlinks/src/PatMatTests.scala b/test/files/presentation/hyperlinks/src/PatMatTests.scala
index bbd0f2e..7184106 100644
--- a/test/files/presentation/hyperlinks/src/PatMatTests.scala
+++ b/test/files/presentation/hyperlinks/src/PatMatTests.scala
@@ -6,22 +6,22 @@ case class CaseOne(x: Int, y: List[Int]) extends BaseType
 case class CaseTwo(str: String) extends BaseType
 
 class PatMatTests {
-  
+
   def foo(x: BaseType) {
     x match {
       case CaseOne/*#*/(10, first :: second :: Nil) =>
         val tmp = 23
         println(first/*#*/)
         println(tmp/*#*/)
-        
+
       case CaseTwo/*#*/(mystring) =>
         println(mystring/*#*/)
     }
   }
-  
+
   def multipleAssign() {
     val (x, y) = ("abc", "def")
-    
+
     println(x/*#*/, y/*#*/)
   }
 
diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check
index ada307d..79bfde5 100644
--- a/test/files/presentation/ide-bug-1000349.check
+++ b/test/files/presentation/ide-bug-1000349.check
@@ -3,38 +3,32 @@ reload: CompletionOnEmptyArgMethod.scala
 askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17)
 ================================================================================
 [response] askTypeCompletion at (2,17)
-retrieved 37 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Foo, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)Foo`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Foo`
-[accessible:  true] `method ensuring(cond: Foo => Boolean)Foo`
-[accessible:  true] `method ensuring(cond: Foo => Boolean, msg: => Any)Foo`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method foo=> Foo`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Foo`
-[accessible:  true] `method →[B](y: B)(Foo, B)`
-[accessible:  true] `value __leftOfArrowFoo`
-[accessible:  true] `value __resultOfEnsuringFoo`
-[accessible:  true] `value selfAny`
+retrieved 30 members
+def +(other: String): String
+def ->[B](y: B): (Foo, B)
+def ensuring(cond: Boolean): Foo
+def ensuring(cond: Boolean,msg: => Any): Foo
+def ensuring(cond: Foo => Boolean): Foo
+def ensuring(cond: Foo => Boolean,msg: => Any): Foo
+def equals(x$1: Any): Boolean
+def foo: Foo
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Foo, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
 ================================================================================
diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check
index 0790272..4fb7f18 100644
--- a/test/files/presentation/ide-bug-1000475.check
+++ b/test/files/presentation/ide-bug-1000475.check
@@ -3,113 +3,95 @@ reload: Foo.scala
 askTypeCompletion at Foo.scala(3,7)
 ================================================================================
 [response] askTypeCompletion at (3,7)
-retrieved 36 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Object, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)Object`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Object`
-[accessible:  true] `method →[B](y: B)(Object, B)`
-[accessible:  true] `value __leftOfArrowObject`
-[accessible:  true] `value __resultOfEnsuringObject`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+retrieved 29 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
 ================================================================================
 
 askTypeCompletion at Foo.scala(6,10)
 ================================================================================
 [response] askTypeCompletion at (6,10)
-retrieved 36 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Object, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)Object`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Object`
-[accessible:  true] `method →[B](y: B)(Object, B)`
-[accessible:  true] `value __leftOfArrowObject`
-[accessible:  true] `value __resultOfEnsuringObject`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+retrieved 29 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
 ================================================================================
 
 askTypeCompletion at Foo.scala(7,7)
 ================================================================================
 [response] askTypeCompletion at (7,7)
-retrieved 36 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Object, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)Object`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean)Object`
-[accessible:  true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Object`
-[accessible:  true] `method →[B](y: B)(Object, B)`
-[accessible:  true] `value __leftOfArrowObject`
-[accessible:  true] `value __resultOfEnsuringObject`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+retrieved 29 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
 ================================================================================
diff --git a/test/files/presentation/ide-bug-1000475/src/Foo.scala b/test/files/presentation/ide-bug-1000475/src/Foo.scala
index b963bb7..5dd6b7d 100644
--- a/test/files/presentation/ide-bug-1000475/src/Foo.scala
+++ b/test/files/presentation/ide-bug-1000475/src/Foo.scala
@@ -1,7 +1,7 @@
 class Foo {
   val v = new Object
   v.toS/*!*/
-  
+
   val m = Map(1 -> new Object)
   m(1).toS/*!*/
   m(1)./*!*/
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index a28ecb3..d8c7a36 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -3,127 +3,120 @@ reload: CrashOnLoad.scala
 askTypeCompletion at CrashOnLoad.scala(6,12)
 ================================================================================
 [response] askTypeCompletion at (6,12)
-retrieved 126 members
-[accessible:  true] `class GroupedIteratorIterator[B]#GroupedIterator`
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ++[B >: B](that: => scala.collection.GenTraversableOnce[B])Iterator[B]`
-[accessible:  true] `method ->[B](y: B)(java.util.Iterator[B], B)`
-[accessible:  true] `method /:[B](z: B)(op: (B, B) => B)B`
-[accessible:  true] `method /:\[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
-[accessible:  true] `method :\[B](z: B)(op: (B, B) => B)B`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method addString(b: StringBuilder)StringBuilder`
-[accessible:  true] `method addString(b: StringBuilder, sep: String)StringBuilder`
-[accessible:  true] `method addString(b: StringBuilder, start: String, sep: String, end: String)StringBuilder`
-[accessible:  true] `method aggregate[B](z: B)(seqop: (B, B) => B, combop: (B, B) => B)B`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method buffered=> scala.collection.BufferedIterator[B]`
-[accessible:  true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
-[accessible:  true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
-[accessible:  true] `method contains(elem: Any)Boolean`
-[accessible:  true] `method copyToArray[B >: B](xs: Array[B])Unit`
-[accessible:  true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
-[accessible:  true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
-[accessible:  true] `method copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B])Unit`
-[accessible:  true] `method corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean)Boolean`
-[accessible:  true] `method count(p: B => Boolean)Int`
-[accessible:  true] `method drop(n: Int)Iterator[B]`
-[accessible:  true] `method dropWhile(p: B => Boolean)Iterator[B]`
-[accessible:  true] `method duplicate=> (Iterator[B], Iterator[B])`
-[accessible:  true] `method ensuring(cond: Boolean)java.util.Iterator[B]`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)java.util.Iterator[B]`
-[accessible:  true] `method ensuring(cond: java.util.Iterator[B] => Boolean)java.util.Iterator[B]`
-[accessible:  true] `method ensuring(cond: java.util.Iterator[B] => Boolean, msg: => Any)java.util.Iterator[B]`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method exists(p: B => Boolean)Boolean`
-[accessible:  true] `method filter(p: B => Boolean)Iterator[B]`
-[accessible:  true] `method filterNot(p: B => Boolean)Iterator[B]`
-[accessible:  true] `method find(p: B => Boolean)Option[B]`
-[accessible:  true] `method flatMap[B](f: B => scala.collection.GenTraversableOnce[B])Iterator[B]`
-[accessible:  true] `method foldLeft[B](z: B)(op: (B, B) => B)B`
-[accessible:  true] `method foldRight[B](z: B)(op: (B, B) => B)B`
-[accessible:  true] `method fold[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
-[accessible:  true] `method forall(p: B => Boolean)Boolean`
-[accessible:  true] `method foreach[U](f: B => U)Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method grouped[B >: B](size: Int)Iterator[B]#GroupedIterator[B]`
-[accessible:  true] `method hasDefiniteSize=> Boolean`
-[accessible:  true] `method hasNext()Boolean`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method indexOf[B >: B](elem: B)Int`
-[accessible:  true] `method indexWhere(p: B => Boolean)Int`
-[accessible:  true] `method isEmpty=> Boolean`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method isTraversableAgain=> Boolean`
-[accessible:  true] `method length=> Int`
-[accessible:  true] `method map[B](f: B => B)Iterator[B]`
-[accessible:  true] `method maxBy[B](f: B => B)(implicit cmp: Ordering[B])B`
-[accessible:  true] `method max[B >: B](implicit cmp: Ordering[B])B`
-[accessible:  true] `method minBy[B](f: B => B)(implicit cmp: Ordering[B])B`
-[accessible:  true] `method min[B >: B](implicit cmp: Ordering[B])B`
-[accessible:  true] `method mkString(sep: String)String`
-[accessible:  true] `method mkString(start: String, sep: String, end: String)String`
-[accessible:  true] `method mkString=> String`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method next()B`
-[accessible:  true] `method nonEmpty=> Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method padTo[A1 >: B](len: Int, elem: A1)Iterator[A1]`
-[accessible:  true] `method partition(p: B => Boolean)(Iterator[B], Iterator[B])`
-[accessible:  true] `method patch[B >: B](from: Int, patchElems: Iterator[B], replaced: Int)Iterator[B]`
-[accessible:  true] `method product[B >: B](implicit num: Numeric[B])B`
-[accessible:  true] `method reduceLeftOption[B >: B](op: (B, B) => B)Option[B]`
-[accessible:  true] `method reduceLeft[B >: B](op: (B, B) => B)B`
-[accessible:  true] `method reduceOption[A1 >: B](op: (A1, A1) => A1)Option[A1]`
-[accessible:  true] `method reduceRightOption[B >: B](op: (B, B) => B)Option[B]`
-[accessible:  true] `method reduceRight[B >: B](op: (B, B) => B)B`
-[accessible:  true] `method reduce[A1 >: B](op: (A1, A1) => A1)A1`
-[accessible:  true] `method remove()Unit`
-[accessible:  true] `method sameElements(that: Iterator[_])Boolean`
-[accessible:  true] `method scanLeft[B](z: B)(op: (B, B) => B)Iterator[B]`
-[accessible:  true] `method scanRight[B](z: B)(op: (B, B) => B)Iterator[B]`
-[accessible:  true] `method seq=> Iterator[B]`
-[accessible:  true] `method size=> Int`
-[accessible:  true] `method slice(from: Int, until: Int)Iterator[B]`
-[accessible:  true] `method sliding[B >: B](size: Int, step: Int)Iterator[B]#GroupedIterator[B]`
-[accessible:  true] `method span(p: B => Boolean)(Iterator[B], Iterator[B])`
-[accessible:  true] `method sum[B >: B](implicit num: Numeric[B])B`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method take(n: Int)Iterator[B]`
-[accessible:  true] `method takeWhile(p: B => Boolean)Iterator[B]`
-[accessible:  true] `method toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B])Array[B]`
-[accessible:  true] `method toBuffer[B >: B]=> scala.collection.mutable.Buffer[B]`
-[accessible:  true] `method toIndexedSeq=> scala.collection.immutable.IndexedSeq[B]`
-[accessible:  true] `method toIterable=> Iterable[B]`
-[accessible:  true] `method toIterator=> Iterator[B]`
-[accessible:  true] `method toList=> List[B]`
-[accessible:  true] `method toMap[T, U](implicit ev: <:<[B,(T, U)])scala.collection.immutable.Map[T,U]`
-[accessible:  true] `method toSeq=> Seq[B]`
-[accessible:  true] `method toSet[B >: B]=> scala.collection.immutable.Set[B]`
-[accessible:  true] `method toStream=> scala.collection.immutable.Stream[B]`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method toTraversable=> Traversable[B]`
-[accessible:  true] `method toVector=> Vector[B]`
-[accessible:  true] `method to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method withFilter(p: B => Boolean)Iterator[B]`
-[accessible:  true] `method x=> java.util.Iterator[B]`
-[accessible:  true] `method zipAll[B, A1 >: B, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1)Iterator[(A1, B1)]`
-[accessible:  true] `method zipWithIndex=> Iterator[(B, Int)]`
-[accessible:  true] `method zip[B](that: Iterator[B])Iterator[(B, B)]`
-[accessible:  true] `method →[B](y: B)(java.util.Iterator[B], B)`
-[accessible:  true] `value __leftOfArrowjava.util.Iterator[B]`
-[accessible:  true] `value __resultOfEnsuringjava.util.Iterator[B]`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method reversed=> List[B]`
+retrieved 117 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+[inaccessible] protected[this] def reversed: List[B]
+class GroupedIterator[B >: A] extends AbstractIterator[Seq[B]] with Iterator[Seq[B]]
+def +(other: String): String
+def ++[B >: B](that: => scala.collection.GenTraversableOnce[B]): Iterator[B]
+def ->[B](y: B): (java.util.Iterator[B], B)
+def /:[B](z: B)(op: (B, B) => B): B
+def :\[B](z: B)(op: (B, B) => B): B
+def addString(b: StringBuilder): StringBuilder
+def addString(b: StringBuilder,sep: String): StringBuilder
+def addString(b: StringBuilder,start: String,sep: String,end: String): StringBuilder
+def aggregate[B](z: => B)(seqop: (B, B) => B,combop: (B, B) => B): B
+def buffered: scala.collection.BufferedIterator[B]
+def collectFirst[B](pf: PartialFunction[B,B]): Option[B]
+def collect[B](pf: PartialFunction[B,B]): Iterator[B]
+def contains(elem: Any): Boolean
+def copyToArray[B >: B](xs: Array[B]): Unit
+def copyToArray[B >: B](xs: Array[B],start: Int): Unit
+def copyToArray[B >: B](xs: Array[B],start: Int,len: Int): Unit
+def copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B]): Unit
+def corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean): Boolean
+def count(p: B => Boolean): Int
+def drop(n: Int): Iterator[B]
+def dropWhile(p: B => Boolean): Iterator[B]
+def duplicate: (Iterator[B], Iterator[B])
+def ensuring(cond: Boolean): java.util.Iterator[B]
+def ensuring(cond: Boolean,msg: => Any): java.util.Iterator[B]
+def ensuring(cond: java.util.Iterator[B] => Boolean): java.util.Iterator[B]
+def ensuring(cond: java.util.Iterator[B] => Boolean,msg: => Any): java.util.Iterator[B]
+def equals(x$1: Any): Boolean
+def exists(p: B => Boolean): Boolean
+def filter(p: B => Boolean): Iterator[B]
+def filterNot(p: B => Boolean): Iterator[B]
+def find(p: B => Boolean): Option[B]
+def flatMap[B](f: B => scala.collection.GenTraversableOnce[B]): Iterator[B]
+def foldLeft[B](z: B)(op: (B, B) => B): B
+def foldRight[B](z: B)(op: (B, B) => B): B
+def fold[A1 >: B](z: A1)(op: (A1, A1) => A1): A1
+def forall(p: B => Boolean): Boolean
+def foreach[U](f: B => U): Unit
+def formatted(fmtstr: String): String
+def grouped[B >: B](size: Int): Iterator[B]#GroupedIterator[B]
+def hasDefiniteSize: Boolean
+def hasNext(): Boolean
+def hashCode(): Int
+def indexOf[B >: B](elem: B): Int
+def indexWhere(p: B => Boolean): Int
+def isEmpty: Boolean
+def isTraversableAgain: Boolean
+def length: Int
+def map[B](f: B => B): Iterator[B]
+def maxBy[B](f: B => B)(implicit cmp: Ordering[B]): B
+def max[B >: B](implicit cmp: Ordering[B]): B
+def minBy[B](f: B => B)(implicit cmp: Ordering[B]): B
+def min[B >: B](implicit cmp: Ordering[B]): B
+def mkString(sep: String): String
+def mkString(start: String,sep: String,end: String): String
+def mkString: String
+def next(): B
+def nonEmpty: Boolean
+def padTo[A1 >: B](len: Int,elem: A1): Iterator[A1]
+def partition(p: B => Boolean): (Iterator[B], Iterator[B])
+def patch[B >: B](from: Int,patchElems: Iterator[B],replaced: Int): Iterator[B]
+def product[B >: B](implicit num: Numeric[B]): B
+def reduceLeftOption[B >: B](op: (B, B) => B): Option[B]
+def reduceLeft[B >: B](op: (B, B) => B): B
+def reduceOption[A1 >: B](op: (A1, A1) => A1): Option[A1]
+def reduceRightOption[B >: B](op: (B, B) => B): Option[B]
+def reduceRight[B >: B](op: (B, B) => B): B
+def reduce[A1 >: B](op: (A1, A1) => A1): A1
+def remove(): Unit
+def sameElements(that: Iterator[_]): Boolean
+def scanLeft[B](z: B)(op: (B, B) => B): Iterator[B]
+def scanRight[B](z: B)(op: (B, B) => B): Iterator[B]
+def seq: Iterator[B]
+def size: Int
+def slice(from: Int,until: Int): Iterator[B]
+def sliding[B >: B](size: Int,step: Int): Iterator[B]#GroupedIterator[B]
+def span(p: B => Boolean): (Iterator[B], Iterator[B])
+def sum[B >: B](implicit num: Numeric[B]): B
+def take(n: Int): Iterator[B]
+def takeWhile(p: B => Boolean): Iterator[B]
+def toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B]): Array[B]
+def toBuffer[B >: B]: scala.collection.mutable.Buffer[B]
+def toIndexedSeq: scala.collection.immutable.IndexedSeq[B]
+def toIterable: Iterable[B]
+def toIterator: Iterator[B]
+def toList: List[B]
+def toMap[T, U](implicit ev: <:<[B,(T, U)]): scala.collection.immutable.Map[T,U]
+def toSeq: Seq[B]
+def toSet[B >: B]: scala.collection.immutable.Set[B]
+def toStream: scala.collection.immutable.Stream[B]
+def toString(): String
+def toTraversable: Traversable[B]
+def toVector: Vector[B]
+def to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]]): Col[B]
+def withFilter(p: B => Boolean): Iterator[B]
+def zipAll[B, A1 >: B, B1 >: B](that: Iterator[B],thisElem: A1,thatElem: B1): Iterator[(A1, B1)]
+def zipWithIndex: Iterator[(B, Int)]
+def zip[B](that: Iterator[B]): Iterator[(B, B)]
+def →[B](y: B): (java.util.Iterator[B], B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
 ================================================================================
diff --git a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
index 21d39c8..878bbfa 100644
--- a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
+++ b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
@@ -1,7 +1,7 @@
 /** When this files is opened within the IDE, a typing error is reported. */
 class A[B] extends java.lang.Iterable[B] {
   import scala.collection.JavaConversions._
-  def iterator = Iterator.empty 
-  
+  def iterator = Iterator.empty
+
   iterator. /*!*/
 }
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326.check b/test/files/presentation/ide-t1001326.check
deleted file mode 100644
index 0ac15fa..0000000
--- a/test/files/presentation/ide-t1001326.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Unique OK
-Unattributed OK
-NeverModify OK
-AlwaysParseTree OK
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/Test.scala b/test/files/presentation/ide-t1001326/Test.scala
deleted file mode 100644
index 3091da4..0000000
--- a/test/files/presentation/ide-t1001326/Test.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-import scala.tools.nsc.interactive.tests.InteractiveTest
-import scala.reflect.internal.util.SourceFile
-import scala.tools.nsc.interactive.Response
-
-object Test extends InteractiveTest {
-    
-  override def execute(): Unit = {
-    val sf = sourceFiles.find(_.file.name == "A.scala").head
-    uniqueParseTree_t1001326(sf)
-    unattributedParseTree_t1001326(sf)
-    neverModifyParseTree_t1001326(sf)
-    shouldAlwaysReturnParseTree_t1001326(sf)
-  }
-  
-  /**
-   * Asking twice for a parseTree on the same source should always return a new tree
-   */
-   private def uniqueParseTree_t1001326(sf: SourceFile) {
-    val parseTree1 = compiler.parseTree(sf)
-    val parseTree2 = compiler.parseTree(sf)
-    if (parseTree1 != parseTree2) {
-      reporter.println("Unique OK")
-    } else {
-      reporter.println("Unique FAILED")
-    }
-  }
-  
-  /**
-   * A parseTree should never contain any symbols or types
-   */
-  private def unattributedParseTree_t1001326(sf: SourceFile) {
-    if (noSymbolsOrTypes(compiler.parseTree(sf))) {
-      reporter.println("Unattributed OK")
-    } else {
-      reporter.println("Unattributed FAILED")
-    }
-  }
-  
-  /**
-   * Once you have obtained a parseTree it should never change
-   */  
-  private def neverModifyParseTree_t1001326(sf: SourceFile) {
-    val parsedTree = compiler.parseTree(sf)
-    loadSourceAndWaitUntilTypechecked(sf)
-    if (noSymbolsOrTypes(parsedTree)) {
-      reporter.println("NeverModify OK")
-    } else {
-      reporter.println("NeverModify FAILED")
-    }
-  }
-  
-  /**
-   * Should always return a parse tree
-   */
-   private def shouldAlwaysReturnParseTree_t1001326(sf: SourceFile) {
-     loadSourceAndWaitUntilTypechecked(sf)
-     if (noSymbolsOrTypes(compiler.parseTree(sf))) {
-       reporter.println("AlwaysParseTree OK")
-     } else {
-       reporter.println("AlwaysParseTree FAILED")
-     }
-   }
-  
-  /**
-   * Load a source and block while it is type-checking.
-   */
-  private def loadSourceAndWaitUntilTypechecked(sf: SourceFile): Unit = {
-    compiler.askToDoFirst(sf)
-    val res = new Response[Unit]
-    compiler.askReload(List(sf), res)
-    res.get
-    askLoadedTyped(sf).get
-  }
-  
-  /**
-   * Traverses a tree and makes sure that there are no types or symbols present in the tree with
-   * the exception of the symbol for the package 'scala'. This is because that symbol will be
-   * present in some of the nodes that the compiler generates.
-   */
-  private def noSymbolsOrTypes(tree: compiler.Tree): Boolean = {
-    tree.forAll { t =>
-      (t.symbol == null || 
-       t.symbol == compiler.NoSymbol || 
-       t.symbol == compiler.definitions.ScalaPackage // ignore the symbol for the scala package for now
-      ) && (
-       t.tpe == null || 
-       t.tpe == compiler.NoType)
-    }
-  }
-  
-}
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/src/a/A.scala b/test/files/presentation/ide-t1001326/src/a/A.scala
deleted file mode 100644
index c82ca02..0000000
--- a/test/files/presentation/ide-t1001326/src/a/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package a
-
-class A {
-  def foo(s: String) = s + s
-}
\ No newline at end of file
diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check
index 111d06d..3bd3d8a 100644
--- a/test/files/presentation/implicit-member.check
+++ b/test/files/presentation/implicit-member.check
@@ -3,40 +3,34 @@ reload: ImplicitMember.scala
 askTypeCompletion at ImplicitMember.scala(7,7)
 ================================================================================
 [response] askTypeCompletion at (7,7)
-retrieved 39 members
-[accessible:  true] `class AppliedImplicitImplicit.AppliedImplicit`
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Implicit.type, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method AppliedImplicit[A](x: A)Implicit.AppliedImplicit[A]`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)Implicit.type`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Implicit.type`
-[accessible:  true] `method ensuring(cond: Implicit.type => Boolean)Implicit.type`
-[accessible:  true] `method ensuring(cond: Implicit.type => Boolean, msg: => Any)Implicit.type`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Implicit.type`
-[accessible:  true] `method →[B](y: B)(Implicit.type, B)`
-[accessible:  true] `value __leftOfArrowImplicit.type`
-[accessible:  true] `value __resultOfEnsuringImplicit.type`
-[accessible:  true] `value selfAny`
-[accessible:  true] `value xImplicit.type`
+retrieved 32 members
+def +(other: String): String
+def ->[B](y: B): (Implicit.type, B)
+def ensuring(cond: Boolean): Implicit.type
+def ensuring(cond: Boolean,msg: => Any): Implicit.type
+def ensuring(cond: Implicit.type => Boolean): Implicit.type
+def ensuring(cond: Implicit.type => Boolean,msg: => Any): Implicit.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Implicit.type, B)
+final class AppliedImplicit[A] extends AnyRef
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+implicit def AppliedImplicit[A](x: A): Implicit.AppliedImplicit[A]
+private[this] val x: Implicit.type
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
 ================================================================================
diff --git a/test/files/presentation/implicit-member/src/ImplicitMember.scala b/test/files/presentation/implicit-member/src/ImplicitMember.scala
index 06732f6..a547b65 100644
--- a/test/files/presentation/implicit-member/src/ImplicitMember.scala
+++ b/test/files/presentation/implicit-member/src/ImplicitMember.scala
@@ -1,8 +1,8 @@
 object Implicit {
 
   final class AppliedImplicit[A](val x: A)
-  
+
   implicit def AppliedImplicit[A](x: A): AppliedImplicit[A] = new AppliedImplicit(x)
-  
+
   this./*!*/x
 }
\ No newline at end of file
diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
index 950569c..f09c6f8 100644
--- a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
+++ b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
@@ -2,8 +2,9 @@ import java.io.PrintWriter
 import java.io.FileOutputStream
 import java.util.Calendar
 
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.nsc.interactive
 import scala.tools.nsc.interactive.tests._
-import scala.tools.nsc.util._
 import scala.tools.nsc.io._
 import scala.tools.nsc.doc
 
@@ -25,7 +26,21 @@ import scala.tools.nsc.doc
 object Test extends InteractiveTest {
   final val mega = 1024 * 1024
 
-  override val withDocComments = true
+  import interactive.Global
+  trait InteractiveScaladocAnalyzer extends interactive.InteractiveAnalyzer with doc.ScaladocAnalyzer {
+    val global : Global
+    override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper {
+      override def canAdaptConstantTypeToLiteral = false
+    }
+  }
+
+  private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) {
+    override lazy val analyzer = new {
+      val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this
+    } with InteractiveScaladocAnalyzer
+  }
+
+  override def createGlobal: Global = new ScaladocEnabledGlobal
 
   override def execute(): Unit = memoryConsumptionTest()
 
diff --git a/test/files/presentation/parse-invariants.check b/test/files/presentation/parse-invariants.check
new file mode 100644
index 0000000..32e9c84
--- /dev/null
+++ b/test/files/presentation/parse-invariants.check
@@ -0,0 +1,5 @@
+NoNewSymbolsEntered OK
+Unique OK
+Unattributed OK
+NeverModify OK
+AlwaysParseTree OK
diff --git a/test/files/presentation/parse-invariants/Test.scala b/test/files/presentation/parse-invariants/Test.scala
new file mode 100644
index 0000000..128896c
--- /dev/null
+++ b/test/files/presentation/parse-invariants/Test.scala
@@ -0,0 +1,107 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+  override def execute(): Unit = {
+    val sf = sourceFiles.find(_.file.name == "A.scala").head
+    noNewSymbols(sf)
+    uniqueParseTree(sf)
+    unattributedParseTree(sf)
+    neverModifyParseTree(sf)
+    shouldAlwaysReturnParseTree(sf)
+  }
+
+  /**
+   * Asking for a parseTree should not enter any new symbols.
+   */
+  private def noNewSymbols(sf: SourceFile) {
+    def nextId() = compiler.NoSymbol.newTermSymbol(compiler.TermName("dummy"), compiler.NoPosition, compiler.NoFlags).id
+    val id = nextId()
+    val tree = compiler.parseTree(sf)
+    val id2 = nextId()
+    if (id2 == id + 1) {
+      reporter.println("NoNewSymbolsEntered OK")
+    } else {
+      reporter.println("NoNewSymbolsEntered FAILED")
+    }
+  }
+
+  /**
+   * Asking twice for a parseTree on the same source should always return a new tree
+   */
+  private def uniqueParseTree(sf: SourceFile) {
+    val parseTree1 = compiler.parseTree(sf)
+    val parseTree2 = compiler.parseTree(sf)
+    if (parseTree1 != parseTree2) {
+      reporter.println("Unique OK")
+    } else {
+      reporter.println("Unique FAILED")
+    }
+  }
+
+  /**
+   * A parseTree should never contain any symbols or types
+   */
+  private def unattributedParseTree(sf: SourceFile) {
+    if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+      reporter.println("Unattributed OK")
+    } else {
+      reporter.println("Unattributed FAILED")
+    }
+  }
+
+  /**
+   * Once you have obtained a parseTree it should never change
+   */
+  private def neverModifyParseTree(sf: SourceFile) {
+    val parsedTree = compiler.parseTree(sf)
+    loadSourceAndWaitUntilTypechecked(sf)
+    if (noSymbolsOrTypes(parsedTree)) {
+      reporter.println("NeverModify OK")
+    } else {
+      reporter.println("NeverModify FAILED")
+    }
+  }
+
+  /**
+   * Should always return a parse tree
+   */
+   private def shouldAlwaysReturnParseTree(sf: SourceFile) {
+     loadSourceAndWaitUntilTypechecked(sf)
+     if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+       reporter.println("AlwaysParseTree OK")
+     } else {
+       reporter.println("AlwaysParseTree FAILED")
+     }
+   }
+
+  /**
+   * Load a source and block while it is type-checking.
+   */
+  private def loadSourceAndWaitUntilTypechecked(sf: SourceFile): Unit = {
+    compiler.askToDoFirst(sf)
+    val res = new Response[Unit]
+    compiler.askReload(List(sf), res)
+    res.get
+    askLoadedTyped(sf).get
+  }
+
+  /**
+   * Traverses a tree and makes sure that there are no types or symbols present in the tree with
+   * the exception of the symbol for the package 'scala'. This is because that symbol will be
+   * present in some of the nodes that the compiler generates.
+   */
+  private def noSymbolsOrTypes(tree: compiler.Tree): Boolean = {
+    tree.forAll { t =>
+      (t.symbol == null ||
+       t.symbol == compiler.NoSymbol ||
+       t.symbol == compiler.definitions.ScalaPackage // ignore the symbol for the scala package for now
+      ) && (
+       t.tpe == null ||
+       t.tpe == compiler.NoType)
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/test/files/presentation/parse-invariants/src/a/A.scala b/test/files/presentation/parse-invariants/src/a/A.scala
new file mode 100644
index 0000000..1ae78ca
--- /dev/null
+++ b/test/files/presentation/parse-invariants/src/a/A.scala
@@ -0,0 +1,138 @@
+package syntax
+
+object Terms {
+  object Literals {
+    0
+    0l
+    0f
+    0d
+    0xb33f
+    'c'
+    "string"
+    """
+      multi-line
+      string
+    """
+    'symbol
+    true
+    false
+    null
+    ()
+  }
+
+  object Patterns {
+    0             match { case 0                               =>               }
+    1             match { case (0 | 1)                         =>               }
+    2             match { case _: Int                          =>               }
+    3             match { case _                               =>               }
+    Some(0)       match { case Some(0)                         =>               }
+    Some(0)       match { case name @ Some(_)                  =>               }
+    Some(Some(0)) match { case nested @ Some(deeper @ Some(_)) =>               }
+    List(1, 2, 3) match { case unapplySeq @ List(1, 2, _*)     =>               }
+    0             match { case i if i > 0                      =>               }
+    List(1)       match { case _: List[t]                      => List.empty[t] }
+  }
+
+  object New {
+    class Foo
+    trait Bar
+    new Foo
+    new Foo { selfie => }
+    new Foo with Bar
+    new { val early = 1 } with Bar
+    new { val name = "anon "}
+  }
+
+  def tuple         = (1, 'two, "three")
+  def lambda        = (x: Int, y: Int) => x + y
+  def lambda2       = (_: Int) + (_: Int)
+  def blocks        = { { {}; {} }; {} }
+  def ascription    = (1: Int)
+  def select        = Nil.size
+  def method1       = "s".replace("foo", "bar")
+  def method2       = "s" + "s"
+  def method3       = Nil.foreach { e => }
+  def method4       = 1 :: 2 :: 3 :: Nil
+  def if1           = if (true) true else false
+  def if2           = if (true) true
+  def `return`: Int = { return 0 }
+  def `throw`       = throw new Exception
+  def `match`       = 0 match { case 0 => case _ => }
+  def `try`         = try 0 catch { case _ => } finally 0
+  def `while`       = while(true) 0
+  def `do while`    = do 0 while(true)
+  def `for`         = for (i <- 1 to 10; if i % 2 == 0; j = i + 1) j
+  def `for yield`   = for (a <- List(List(1)); b <- a; c = b * 2) yield b
+  def interpolation = s"$tuple and maybe also $blocks"
+  def assign        = { var x = 1; x = 2 }
+  def assign2       = { object o { var x = 1 }; o.x = 2 }
+  def update        = { val v = collection.mutable.Seq(1); v(0) = 2 }
+  def `this`        = this
+}
+
+object Types {
+  type Reference    = scala.App
+  type Tuple        = (Int, String, Double)
+  type Function     = (Int, String) => Double
+  type Refined      = Int { val meta: Any }
+  type Lambda       = ({ type F[T] = List[T] })#F[_]
+  type Infix        = Int Either String
+  type Application  = List[Int]
+  type Existential  = List[T] forSome { type T }
+  object O { type T = Int }
+  type Dependent    = O.T
+  class O { type T  = Int }
+  type Selection    = O#T
+}
+
+object Definitions {
+  private val x1 = 0
+  private[this] val x2 = 0
+  private[Definitions] val x3 = 0
+  protected val x4 = 0
+  protected[AcessManagement] val x5 = 0
+  val x1 = 1
+  val x2: Int = 1
+  val x3, y3 = 1
+  lazy val x4 = 1
+  implicit val x5 = 1
+  final val x6 = 1
+  lazy final val x7 = 1
+  val Some(x8) = Some(0)
+  var x9 = 1
+  var x10, y10 = 1
+  var x11: Int = 1
+
+  implicit def implicitView: Option[Int] = None
+  def implicitArg1(implicit i: Int) = i + 2
+  def implicitArg2[T: Fooable] = implicitly[Fooable[T]]
+  def bound1[T <: Int](x: T): T = x
+  def bound2[T >: Any](x: T): T = x
+  def bound3[T <% Int](x: T): Int = x
+  def vararg(args: Int*) = args.toList
+  def sum(x: Int, y: Int) = x + y
+  def multipleArgLists(x: Int)(y: Int) = x + y
+
+  type _0 = Int
+  type _1[T] = List[T]
+  type _2[A, B] = Either[A, B]
+
+  class Val(value: Int) extends AnyVal
+  implicit class Impl(value: Int) { def foo = "foo" }
+  abstract class Abs
+  sealed class Sealed
+  class Child extends Sealed
+  case class Point(x: Int, y: Int)
+
+  trait Fooable[T]
+  trait Barable with Fooable[Barable]
+
+  object Foo
+  object Foo with Fooable[Foo]
+  case object Zero
+}
+
+package Packages {
+  package object PackageObject
+  package Nested { package Deeper { } }
+}
diff --git a/test/files/presentation/partial-fun.check b/test/files/presentation/partial-fun.check
new file mode 100644
index 0000000..0352d5e
--- /dev/null
+++ b/test/files/presentation/partial-fun.check
@@ -0,0 +1,2 @@
+reload: PartialFun.scala
+ArrayBuffer()
diff --git a/test/files/presentation/partial-fun/Runner.scala b/test/files/presentation/partial-fun/Runner.scala
new file mode 100644
index 0000000..3edd5bb
--- /dev/null
+++ b/test/files/presentation/partial-fun/Runner.scala
@@ -0,0 +1,10 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+  override def runDefaultTests() {
+    sourceFiles foreach (src => askLoadedTyped(src).get)
+    super.runDefaultTests()
+
+    println(compiler.unitOfFile.values.map(_.problems).mkString("", "\n", ""))
+  }
+}
diff --git a/test/files/presentation/partial-fun/src/PartialFun.scala b/test/files/presentation/partial-fun/src/PartialFun.scala
new file mode 100644
index 0000000..4657898
--- /dev/null
+++ b/test/files/presentation/partial-fun/src/PartialFun.scala
@@ -0,0 +1,5 @@
+class A {
+  def foo {
+    val x: PartialFunction[Int, Int] = ({ case 0 => 0 })
+  }
+}
diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check
index f714c1b..220bdf3 100644
--- a/test/files/presentation/ping-pong.check
+++ b/test/files/presentation/ping-pong.check
@@ -3,100 +3,88 @@ reload: PingPong.scala
 askTypeCompletion at PingPong.scala(10,23)
 ================================================================================
 [response] askTypeCompletion at (10,23)
-retrieved 40 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Pong, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)Pong`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Pong`
-[accessible:  true] `method ensuring(cond: Pong => Boolean)Pong`
-[accessible:  true] `method ensuring(cond: Pong => Boolean, msg: => Any)Pong`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method poke()Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Pong`
-[accessible:  true] `method →[B](y: B)(Pong, B)`
-[accessible:  true] `value __leftOfArrowPong`
-[accessible:  true] `value __resultOfEnsuringPong`
-[accessible:  true] `value nameString`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `value pingPing`
+retrieved 32 members
+[inaccessible] private[this] val ping: Ping
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Pong, B)
+def ensuring(cond: Boolean): Pong
+def ensuring(cond: Boolean,msg: => Any): Pong
+def ensuring(cond: Pong => Boolean): Pong
+def ensuring(cond: Pong => Boolean,msg: => Any): Pong
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def poke(): Unit
+def →[B](y: B): (Pong, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def toString(): String
+private[this] val name: String
 ================================================================================
 
 askTypeCompletion at PingPong.scala(19,20)
 ================================================================================
 [response] askTypeCompletion at (19,20)
-retrieved 40 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(Ping, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)Ping`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)Ping`
-[accessible:  true] `method ensuring(cond: Ping => Boolean)Ping`
-[accessible:  true] `method ensuring(cond: Ping => Boolean, msg: => Any)Ping`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method loop=> Unit`
-[accessible:  true] `method name=> String`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method poke=> Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> Ping`
-[accessible:  true] `method →[B](y: B)(Ping, B)`
-[accessible:  true] `value __leftOfArrowPing`
-[accessible:  true] `value __resultOfEnsuringPing`
-[accessible:  true] `value pongPong`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+retrieved 33 members
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Ping, B)
+def ensuring(cond: Boolean): Ping
+def ensuring(cond: Boolean,msg: => Any): Ping
+def ensuring(cond: Ping => Boolean): Ping
+def ensuring(cond: Ping => Boolean,msg: => Any): Ping
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def loop: Unit
+def name: String
+def poke: Unit
+def →[B](y: B): (Ping, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def toString(): String
+private[this] val pong: Pong
 ================================================================================
 
 askType at PingPong.scala(8,10)
 ================================================================================
-[response] askTypeAt at (8,10)
+[response] askTypeAt (8,10)
 def loop: Unit = Ping.this.poke()
 ================================================================================
 
 askType at PingPong.scala(10,10)
 ================================================================================
-[response] askTypeAt at (10,10)
+[response] askTypeAt (10,10)
 def poke: Unit = Ping.this.pong.poke()
 ================================================================================
 
 askType at PingPong.scala(17,10)
 ================================================================================
-[response] askTypeAt at (17,10)
+[response] askTypeAt (17,10)
 private[this] val name: String = "pong"
 ================================================================================
diff --git a/test/files/presentation/ping-pong/src/PingPong.scala b/test/files/presentation/ping-pong/src/PingPong.scala
index 08bb4e3..94f52c4 100644
--- a/test/files/presentation/ping-pong/src/PingPong.scala
+++ b/test/files/presentation/ping-pong/src/PingPong.scala
@@ -2,21 +2,21 @@
 class Ping {
 
   val pong = new Pong(this)
-  
+
   def name = "ping"
 
-  def loop/*?*/ { poke() }  
-    
+  def loop/*?*/ { poke() }
+
   def poke/*?*/ { pong./*!*/poke() }
-  
-  override def toString = name 
+
+  override def toString = name
 }
 
 class Pong(ping: Ping) {
 
   val name/*?*/ = "pong"
-    
+
   def poke() { ping./*!*/poke() }
-  
+
   override def toString = name
 }
\ No newline at end of file
diff --git a/test/files/presentation/random.check b/test/files/presentation/random.check
index fce4b69..fb3500a 100644
--- a/test/files/presentation/random.check
+++ b/test/files/presentation/random.check
@@ -2,7 +2,7 @@ reload: Random.scala
 
 askType at Random.scala(18,14)
 ================================================================================
-[response] askTypeAt at (18,14)
+[response] askTypeAt (18,14)
 val filter: Int => Boolean = try {
   java.this.lang.Integer.parseInt(args.apply(0)) match {
     case 1 => ((x: Int) => x.%(2).!=(0))
@@ -16,12 +16,12 @@ val filter: Int => Boolean = try {
 
 askType at Random.scala(19,30)
 ================================================================================
-[response] askTypeAt at (19,30)
+[response] askTypeAt (19,30)
 0
 ================================================================================
 
 askType at Random.scala(26,12)
 ================================================================================
-[response] askTypeAt at (26,12)
+[response] askTypeAt (26,12)
 _
 ================================================================================
diff --git a/test/files/presentation/random/src/Random.scala b/test/files/presentation/random/src/Random.scala
index 4fff783..af76a28 100644
--- a/test/files/presentation/random/src/Random.scala
+++ b/test/files/presentation/random/src/Random.scala
@@ -4,16 +4,16 @@ import java.io._
 import java.net.{InetAddress,ServerSocket,Socket,SocketException}
 import java.util.Random
 
-/** 
- * Simple client/server application using Java sockets. 
- * 
- * The server simply generates random integer values and 
- * the clients provide a filter function to the server 
- * to get only values they interested in (eg. even or 
- * odd values, and so on). 
+/**
+ * Simple client/server application using Java sockets.
+ *
+ * The server simply generates random integer values and
+ * the clients provide a filter function to the server
+ * to get only values they interested in (eg. even or
+ * odd values, and so on).
  */
 object randomclient {
-     
+
   def main(args: Array[String]) {
     val filter/*?*/ = try {
       Integer.parseInt(args(0)/*?*/) match {
diff --git a/test/files/presentation/scope-completion-1.check b/test/files/presentation/scope-completion-1.check
index 93c6373..63f956b 100644
--- a/test/files/presentation/scope-completion-1.check
+++ b/test/files/presentation/scope-completion-1.check
@@ -4,16 +4,16 @@ askScopeCompletion at Completions.scala(6,2)
 ================================================================================
 [response] askScopeCompletion at (6,2)
 retrieved 3 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `object Completion2test.Completion2.type`
+class Completion1 extends AnyRef
+def <init>(): test.Completion1
+object Completion2
 ================================================================================
 
 askScopeCompletion at Completions.scala(10,2)
 ================================================================================
 [response] askScopeCompletion at (10,2)
 retrieved 3 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `constructor Completion2()test.Completion2.type`
-[accessible:  true] `object Completion2test.Completion2.type`
+class Completion1 extends AnyRef
+def <init>(): test.Completion2.type
+object Completion2
 ================================================================================
diff --git a/test/files/presentation/scope-completion-2.check b/test/files/presentation/scope-completion-2.check
index 462671d..d94f7a4 100644
--- a/test/files/presentation/scope-completion-2.check
+++ b/test/files/presentation/scope-completion-2.check
@@ -4,30 +4,30 @@ askScopeCompletion at Completions.scala(15,2)
 ================================================================================
 [response] askScopeCompletion at (15,2)
 retrieved 10 members
-[accessible:  true] `class Cc1Completion1.this.Cc1`
-[accessible:  true] `class Co1test.Completion1.Co1`
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method fc1=> Int`
-[accessible:  true] `method fo1=> Int`
-[accessible:  true] `object Completion1test.Completion1.type`
-[accessible:  true] `value ctest.Completion1`
-[accessible:  true] `value vc1Int`
-[accessible:  true] `value vo1Int`
+class Completion1 extends AnyRef
+def <init>(): test.Completion1
+object Completion1
+private class Cc1 extends AnyRef
+private class Co1 extends AnyRef
+private def fc1: Int
+private def fo1: Int
+private[this] val c: test.Completion1
+private[this] val vc1: Int
+private[this] val vo1: Int
 ================================================================================
 
 askScopeCompletion at Completions.scala(29,2)
 ================================================================================
 [response] askScopeCompletion at (29,2)
 retrieved 10 members
-[accessible:  true] `class Cc1test.Completion1.c.Cc1`
-[accessible:  true] `class Co1test.Completion1.Co1`
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `constructor Completion1()test.Completion1.type`
-[accessible:  true] `method fc1=> Int`
-[accessible:  true] `method fo1=> Int`
-[accessible:  true] `object Completion1test.Completion1.type`
-[accessible:  true] `value ctest.Completion1`
-[accessible:  true] `value vc1Int`
-[accessible:  true] `value vo1Int`
+class Completion1 extends AnyRef
+def <init>(): test.Completion1.type
+object Completion1
+private class Cc1 extends AnyRef
+private class Co1 extends AnyRef
+private def fc1: Int
+private def fo1: Int
+private[this] val c: test.Completion1
+private[this] val vc1: Int
+private[this] val vo1: Int
 ================================================================================
diff --git a/test/files/presentation/scope-completion-3.check b/test/files/presentation/scope-completion-3.check
index 119fc1d..b70a7d5 100644
--- a/test/files/presentation/scope-completion-3.check
+++ b/test/files/presentation/scope-completion-3.check
@@ -3,109 +3,85 @@ reload: Completions.scala
 askScopeCompletion at Completions.scala(75,2)
 ================================================================================
 [response] askScopeCompletion at (75,2)
-retrieved 49 members
-[accessible:  true] `class Base1test.Base1`
-[accessible:  true] `class Cb1Completion1.this.Cb1`
-[accessible:  true] `class Cc1Completion1.this.Cc1`
-[accessible:  true] `class Cc2Completion1.this.Cc2`
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class Ct1Completion1.this.Ct1`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method fb1=> Int`
-[accessible:  true] `method fb3=> Int`
-[accessible:  true] `method fc1=> Int`
-[accessible:  true] `method fc2=> Int`
-[accessible:  true] `method ft1=> Int`
-[accessible:  true] `method ft3=> Int`
-[accessible:  true] `object Completion2test.Completion2.type`
-[accessible:  true] `object Ob1Completion1.this.Ob1.type`
-[accessible:  true] `object Oc1Completion1.this.Oc1.type`
-[accessible:  true] `object Oc2Completion1.this.Oc2.type`
-[accessible:  true] `object Ot1Completion1.this.Ot1.type`
-[accessible:  true] `trait Trait1test.Trait1`
-[accessible:  true] `type tb1Completion1.this.tb1`
-[accessible:  true] `type tb3Completion1.this.tb3`
-[accessible:  true] `type tc1Completion1.this.tc1`
-[accessible:  true] `type tc2Completion1.this.tc2`
-[accessible:  true] `type tt1Completion1.this.tt1`
-[accessible:  true] `type tt3Completion1.this.tt3`
-[accessible:  true] `value vb1Int`
-[accessible:  true] `value vb3Int`
-[accessible:  true] `value vc1Int`
-[accessible:  true] `value vc2Int`
-[accessible:  true] `value vt1Int`
-[accessible:  true] `value vt3Int`
-[accessible:  true] `variable rb1Int`
-[accessible:  true] `variable rb3Int`
-[accessible:  true] `variable rc1Int`
-[accessible:  true] `variable rc2Int`
-[accessible:  true] `variable rt1Int`
-[accessible:  true] `variable rt3Int`
-[accessible: false] `class Cb2Completion1.this.Cb2`
-[accessible: false] `class Ct2Completion1.this.Ct2`
-[accessible: false] `method fb2=> Int`
-[accessible: false] `method ft2=> Int`
-[accessible: false] `object Ob2Completion1.this.Ob2.type`
-[accessible: false] `object Ot2Completion1.this.Ot2.type`
-[accessible: false] `type tb2Completion1.this.tb2`
-[accessible: false] `type tt2Completion1.this.tt2`
-[accessible: false] `value vb2Int`
-[accessible: false] `value vt2Int`
-[accessible: false] `variable rb2Int`
-[accessible: false] `variable rt2Int`
+retrieved 37 members
+abstract class Base1 extends AnyRef
+abstract trait Trait1 extends AnyRef
+class Cb1 extends AnyRef
+class Cc1 extends AnyRef
+class Completion1 extends Base1 with Trait1
+class Ct1 extends AnyRef
+def <init>(): test.Completion1
+def fb1: Int
+def fc1: Int
+def ft1: Int
+object Completion2
+object Ob1
+object Oc1
+object Ot1
+override def fb3: Int
+override def ft3: Int
+override type tb3 = Completion1.this.tb3
+override type tt3 = Completion1.this.tt3
+private class Cc2 extends AnyRef
+private def fc2: Int
+private object Oc2
+private type tc2 = Completion1.this.tc2
+private[this] val vb1: Int
+private[this] val vb3: Int
+private[this] val vc1: Int
+private[this] val vc2: Int
+private[this] val vt1: Int
+private[this] val vt3: Int
+private[this] var rb1: Int
+private[this] var rb3: Int
+private[this] var rc1: Int
+private[this] var rc2: Int
+private[this] var rt1: Int
+private[this] var rt3: Int
+type tb1 = Completion1.this.tb1
+type tc1 = Completion1.this.tc1
+type tt1 = Completion1.this.tt1
 ================================================================================
 
 askScopeCompletion at Completions.scala(104,2)
 ================================================================================
 [response] askScopeCompletion at (104,2)
-retrieved 49 members
-[accessible:  true] `class Base1test.Base1`
-[accessible:  true] `class Cb1test.Completion2.Cb1`
-[accessible:  true] `class Co1test.Completion2.Co1`
-[accessible:  true] `class Co2test.Completion2.Co2`
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class Ct1test.Completion2.Ct1`
-[accessible:  true] `constructor Completion2()test.Completion2.type`
-[accessible:  true] `method fb1=> Int`
-[accessible:  true] `method fb3=> Int`
-[accessible:  true] `method fo1=> Int`
-[accessible:  true] `method fo2=> Int`
-[accessible:  true] `method ft1=> Int`
-[accessible:  true] `method ft3=> Int`
-[accessible:  true] `object Completion2test.Completion2.type`
-[accessible:  true] `object Ob1test.Completion2.Ob1.type`
-[accessible:  true] `object Oo1test.Completion2.Oo1.type`
-[accessible:  true] `object Oo2test.Completion2.Oo2.type`
-[accessible:  true] `object Ot1test.Completion2.Ot1.type`
-[accessible:  true] `trait Trait1test.Trait1`
-[accessible:  true] `type tb1test.Completion2.tb1`
-[accessible:  true] `type tb3test.Completion2.tb3`
-[accessible:  true] `type to1test.Completion2.to1`
-[accessible:  true] `type to2test.Completion2.to2`
-[accessible:  true] `type tt1test.Completion2.tt1`
-[accessible:  true] `type tt3test.Completion2.tt3`
-[accessible:  true] `value vb1Int`
-[accessible:  true] `value vb3Int`
-[accessible:  true] `value vo1Int`
-[accessible:  true] `value vo2Int`
-[accessible:  true] `value vt1Int`
-[accessible:  true] `value vt3Int`
-[accessible:  true] `variable rb1Int`
-[accessible:  true] `variable rb3Int`
-[accessible:  true] `variable ro1Int`
-[accessible:  true] `variable ro2Int`
-[accessible:  true] `variable rt1Int`
-[accessible:  true] `variable rt3Int`
-[accessible: false] `class Cb2test.Completion2.Cb2`
-[accessible: false] `class Ct2test.Completion2.Ct2`
-[accessible: false] `method fb2=> Int`
-[accessible: false] `method ft2=> Int`
-[accessible: false] `object Ob2test.Completion2.Ob2.type`
-[accessible: false] `object Ot2test.Completion2.Ot2.type`
-[accessible: false] `type tb2test.Completion2.tb2`
-[accessible: false] `type tt2test.Completion2.tt2`
-[accessible: false] `value vb2Int`
-[accessible: false] `value vt2Int`
-[accessible: false] `variable rb2Int`
-[accessible: false] `variable rt2Int`
+retrieved 37 members
+abstract class Base1 extends AnyRef
+abstract trait Trait1 extends AnyRef
+class Cb1 extends AnyRef
+class Co1 extends AnyRef
+class Completion1 extends Base1 with Trait1
+class Ct1 extends AnyRef
+def <init>(): test.Completion2.type
+def fb1: Int
+def fo1: Int
+def ft1: Int
+object Completion2
+object Ob1
+object Oo1
+object Ot1
+override def fb3: Int
+override def ft3: Int
+override type tb3 = test.Completion2.tb3
+override type tt3 = test.Completion2.tt3
+private class Co2 extends AnyRef
+private def fo2: Int
+private object Oo2
+private type to2 = test.Completion2.to2
+private[this] val vb1: Int
+private[this] val vb3: Int
+private[this] val vo1: Int
+private[this] val vo2: Int
+private[this] val vt1: Int
+private[this] val vt3: Int
+private[this] var rb1: Int
+private[this] var rb3: Int
+private[this] var ro1: Int
+private[this] var ro2: Int
+private[this] var rt1: Int
+private[this] var rt3: Int
+type tb1 = test.Completion2.tb1
+type to1 = test.Completion2.to1
+type tt1 = test.Completion2.tt1
 ================================================================================
diff --git a/test/files/presentation/scope-completion-4.check b/test/files/presentation/scope-completion-4.check
index f6241cf..59c4746 100644
--- a/test/files/presentation/scope-completion-4.check
+++ b/test/files/presentation/scope-completion-4.check
@@ -4,290 +4,290 @@ askScopeCompletion at Completions.scala(12,8)
 ================================================================================
 [response] askScopeCompletion at (12,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `class ffcffc`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
-[accessible:  true] `method fff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class ffc extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
+def ff: Unit
+def fff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(15,6)
 ================================================================================
 [response] askScopeCompletion at (15,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `class ffcffc`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
-[accessible:  true] `method fff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class ffc extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
+def ff: Unit
+def fff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(18,8)
 ================================================================================
 [response] askScopeCompletion at (18,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `class ffcffc`
-[accessible:  true] `constructor ffc()ffc`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
-[accessible:  true] `method fff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class ffc extends AnyRef
+def <init>(): ffc
+def f: Unit
+def ff: Unit
+def fff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(21,6)
 ================================================================================
 [response] askScopeCompletion at (21,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `class ffcffc`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
-[accessible:  true] `method fff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class ffc extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
+def ff: Unit
+def fff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(24,4)
 ================================================================================
 [response] askScopeCompletion at (24,4)
 retrieved 6 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(29,8)
 ================================================================================
 [response] askScopeCompletion at (29,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fccfc.this.fcc`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor fc()fc`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method fcf=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class fcc extends AnyRef
+def <init>(): fc
+def f: Unit
+def fcf: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(32,6)
 ================================================================================
 [response] askScopeCompletion at (32,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fccfc.this.fcc`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor fc()fc`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method fcf=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class fcc extends AnyRef
+def <init>(): fc
+def f: Unit
+def fcf: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(35,8)
 ================================================================================
 [response] askScopeCompletion at (35,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fccfc.this.fcc`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor fcc()fc.this.fcc`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method fcf=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class fcc extends AnyRef
+def <init>(): fc.this.fcc
+def f: Unit
+def fcf: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(38,6)
 ================================================================================
 [response] askScopeCompletion at (38,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fccfc.this.fcc`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor fc()fc`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method fcf=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+class fcc extends AnyRef
+def <init>(): fc
+def f: Unit
+def fcf: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(41,4)
 ================================================================================
 [response] askScopeCompletion at (41,4)
 retrieved 6 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class fcfc`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
-[accessible:  true] `method ff=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class fc extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
+def ff: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(44,2)
 ================================================================================
 [response] askScopeCompletion at (44,2)
 retrieved 4 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(51,8)
 ================================================================================
 [response] askScopeCompletion at (51,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class ccccc.this.ccc`
-[accessible:  true] `constructor ccc()cc.this.ccc`
-[accessible:  true] `method ccf=> Unit`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class ccc extends AnyRef
+def <init>(): cc.this.ccc
+def ccf: Unit
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(54,6)
 ================================================================================
 [response] askScopeCompletion at (54,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class ccccc.this.ccc`
-[accessible:  true] `constructor cc()c.this.cc`
-[accessible:  true] `method ccf=> Unit`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class ccc extends AnyRef
+def <init>(): c.this.cc
+def ccf: Unit
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(57,8)
 ================================================================================
 [response] askScopeCompletion at (57,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class ccccc.this.ccc`
-[accessible:  true] `constructor cc()c.this.cc`
-[accessible:  true] `method ccf=> Unit`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class ccc extends AnyRef
+def <init>(): c.this.cc
+def ccf: Unit
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(60,6)
 ================================================================================
 [response] askScopeCompletion at (60,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class ccccc.this.ccc`
-[accessible:  true] `constructor cc()c.this.cc`
-[accessible:  true] `method ccf=> Unit`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class ccc extends AnyRef
+def <init>(): c.this.cc
+def ccf: Unit
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(63,4)
 ================================================================================
 [response] askScopeCompletion at (63,4)
 retrieved 6 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `constructor c()Completion1.this.c`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+def <init>(): Completion1.this.c
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(68,8)
 ================================================================================
 [response] askScopeCompletion at (68,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class cfccfc`
-[accessible:  true] `constructor cfc()cfc`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method cff=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class cfc extends AnyRef
+def <init>(): cfc
+def cf: Unit
+def cff: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(71,6)
 ================================================================================
 [response] askScopeCompletion at (71,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class cfccfc`
-[accessible:  true] `constructor c()Completion1.this.c`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method cff=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class cfc extends AnyRef
+def <init>(): Completion1.this.c
+def cf: Unit
+def cff: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(74,8)
 ================================================================================
 [response] askScopeCompletion at (74,8)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class cfccfc`
-[accessible:  true] `constructor c()Completion1.this.c`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method cff=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class cfc extends AnyRef
+def <init>(): Completion1.this.c
+def cf: Unit
+def cff: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(77,6)
 ================================================================================
 [response] askScopeCompletion at (77,6)
 retrieved 8 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `class cfccfc`
-[accessible:  true] `constructor c()Completion1.this.c`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method cff=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+class cfc extends AnyRef
+def <init>(): Completion1.this.c
+def cf: Unit
+def cff: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(80,4)
 ================================================================================
 [response] askScopeCompletion at (80,4)
 retrieved 6 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `class ccc.this.cc`
-[accessible:  true] `constructor c()Completion1.this.c`
-[accessible:  true] `method cf=> Unit`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+class cc extends AnyRef
+def <init>(): Completion1.this.c
+def cf: Unit
+def f: Unit
 ================================================================================
 
 askScopeCompletion at Completions.scala(83,2)
 ================================================================================
 [response] askScopeCompletion at (83,2)
 retrieved 4 members
-[accessible:  true] `class Completion1test.Completion1`
-[accessible:  true] `class cCompletion1.this.c`
-[accessible:  true] `constructor Completion1()test.Completion1`
-[accessible:  true] `method f=> Unit`
+class Completion1 extends AnyRef
+class c extends AnyRef
+def <init>(): test.Completion1
+def f: Unit
 ================================================================================
diff --git a/test/files/presentation/scope-completion-import.check b/test/files/presentation/scope-completion-import.check
index 33b498c..50197e5 100644
--- a/test/files/presentation/scope-completion-import.check
+++ b/test/files/presentation/scope-completion-import.check
@@ -3,191 +3,183 @@ reload: Completions.scala
 askScopeCompletion at Completions.scala(23,4)
 ================================================================================
 [response] askScopeCompletion at (23,4)
-retrieved 18 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `method fOOO=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value otest.O.type`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `value vOOOInt`
-[accessible:  true] `variable rCCCInt`
-[accessible:  true] `variable rOOOInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `value pVOOOInt`
-[accessible: false] `variable pRCCCInt`
-[accessible: false] `variable pROOOInt`
+retrieved 16 members
+[inaccessible] private[this] val pVOOO: Int
+[inaccessible] private[this] var pROOO: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+def fCCC: Int
+def fOOO: Int
+object O
+private[this] val vCCC: Int
+private[this] val vOOO: Int
+private[this] var rCCC: Int
+private[this] var rOOO: Int
+val o: test.O.type
 ================================================================================
 
 askScopeCompletion at Completions.scala(27,4)
 ================================================================================
 [response] askScopeCompletion at (27,4)
-retrieved 17 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `method fOOO=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `value vOOOInt`
-[accessible:  true] `variable rCCCInt`
-[accessible:  true] `variable rOOOInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `value pVOOOInt`
-[accessible: false] `variable pRCCCInt`
-[accessible: false] `variable pROOOInt`
+retrieved 15 members
+[inaccessible] private[this] val pVOOO: Int
+[inaccessible] private[this] var pROOO: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+def fCCC: Int
+def fOOO: Int
+object O
+private[this] val vCCC: Int
+private[this] val vOOO: Int
+private[this] var rCCC: Int
+private[this] var rOOO: Int
 ================================================================================
 
 askScopeCompletion at Completions.scala(32,4)
 ================================================================================
 [response] askScopeCompletion at (32,4)
 retrieved 13 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value ctest.C`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `variable rCCCInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `variable pRCCCInt`
+[inaccessible] private[this] val pVCCC: Int
+[inaccessible] private[this] var pRCCC: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+def fCCC: Int
+object O
+private[this] val vCCC: Int
+private[this] var rCCC: Int
+val c: test.C
 ================================================================================
 
 askScopeCompletion at Completions.scala(35,5)
 ================================================================================
 [response] askScopeCompletion at (35,5)
 retrieved 8 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value ctest.C`
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+object O
+val c: test.C
 ================================================================================
 
 askScopeCompletion at Completions.scala(38,5)
 ================================================================================
 [response] askScopeCompletion at (38,5)
 retrieved 13 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value ctest.C`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `variable rCCCInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `variable pRCCCInt`
+[inaccessible] private[this] val pVCCC: Int
+[inaccessible] private[this] var pRCCC: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+def fCCC: Int
+object O
+private[this] val vCCC: Int
+private[this] var rCCC: Int
+val c: test.C
 ================================================================================
 
 askScopeCompletion at Completions.scala(40,5)
 ================================================================================
 [response] askScopeCompletion at (40,5)
 retrieved 18 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo()test.Foo`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `method fOOO=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value ctest.C`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `value vOOOInt`
-[accessible:  true] `variable rCCCInt`
-[accessible:  true] `variable rOOOInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `value pVOOOInt`
-[accessible: false] `variable pRCCCInt`
-[accessible: false] `variable pROOOInt`
+[inaccessible] private[this] val pVCCC: Int
+[inaccessible] private[this] val pVOOO: Int
+[inaccessible] private[this] var pRCCC: Int
+[inaccessible] private[this] var pROOO: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo
+def fCCC: Int
+def fOOO: Int
+object O
+private[this] val vCCC: Int
+private[this] val vOOO: Int
+private[this] var rCCC: Int
+private[this] var rOOO: Int
+val c: test.C
 ================================================================================
 
 askScopeCompletion at Completions.scala(49,4)
 ================================================================================
 [response] askScopeCompletion at (49,4)
-retrieved 18 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo_1()test.Foo_1`
-[accessible:  true] `method bar=> Unit`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `method fOOO=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `value vOOOInt`
-[accessible:  true] `variable rCCCInt`
-[accessible:  true] `variable rOOOInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `value pVOOOInt`
-[accessible: false] `variable pRCCCInt`
-[accessible: false] `variable pROOOInt`
+retrieved 16 members
+[inaccessible] private[this] val pVOOO: Int
+[inaccessible] private[this] var pROOO: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo_1
+def bar: Unit
+def fCCC: Int
+def fOOO: Int
+object O
+private[this] val vCCC: Int
+private[this] val vOOO: Int
+private[this] var rCCC: Int
+private[this] var rOOO: Int
 ================================================================================
 
 askScopeCompletion at Completions.scala(59,4)
 ================================================================================
 [response] askScopeCompletion at (59,4)
-retrieved 19 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo_2()test.Foo_2`
-[accessible:  true] `method bar=> Unit`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `method fOOO=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value otest.O.type`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `value vOOOInt`
-[accessible:  true] `variable rCCCInt`
-[accessible:  true] `variable rOOOInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `value pVOOOInt`
-[accessible: false] `variable pRCCCInt`
-[accessible: false] `variable pROOOInt`
+retrieved 17 members
+[inaccessible] private[this] val pVOOO: Int
+[inaccessible] private[this] var pROOO: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo_2
+def bar: Unit
+def fCCC: Int
+def fOOO: Int
+object O
+private[this] val o: test.O.type
+private[this] val vCCC: Int
+private[this] val vOOO: Int
+private[this] var rCCC: Int
+private[this] var rOOO: Int
 ================================================================================
 
 askScopeCompletion at Completions.scala(69,4)
 ================================================================================
 [response] askScopeCompletion at (69,4)
 retrieved 14 members
-[accessible:  true] `class Ctest.C`
-[accessible:  true] `class Foo_1test.Foo_1`
-[accessible:  true] `class Foo_2test.Foo_2`
-[accessible:  true] `class Foo_3test.Foo_3`
-[accessible:  true] `class Footest.Foo`
-[accessible:  true] `constructor Foo_3()test.Foo_3`
-[accessible:  true] `method bar=> Unit`
-[accessible:  true] `method fCCC=> Int`
-[accessible:  true] `object Otest.O.type`
-[accessible:  true] `value ctest.C`
-[accessible:  true] `value vCCCInt`
-[accessible:  true] `variable rCCCInt`
-[accessible: false] `value pVCCCInt`
-[accessible: false] `variable pRCCCInt`
+[inaccessible] private[this] val pVCCC: Int
+[inaccessible] private[this] var pRCCC: Int
+class C extends AnyRef
+class Foo extends AnyRef
+class Foo_1 extends AnyRef
+class Foo_2 extends AnyRef
+class Foo_3 extends AnyRef
+def <init>(): test.Foo_3
+def bar: Unit
+def fCCC: Int
+object O
+private[this] val c: test.C
+private[this] val vCCC: Int
+private[this] var rCCC: Int
 ================================================================================
diff --git a/test/files/presentation/t1207.check b/test/files/presentation/t1207.check
new file mode 100644
index 0000000..0eed4ec
--- /dev/null
+++ b/test/files/presentation/t1207.check
@@ -0,0 +1,53 @@
+reload: Completions.scala
+
+askTypeCompletion at Completions.scala(10,15)
+================================================================================
+[response] askTypeCompletion at (10,15)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(11,16)
+================================================================================
+[response] askTypeCompletion at (11,16)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(12,19)
+================================================================================
+[response] askTypeCompletion at (12,19)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(13,19)
+================================================================================
+[response] askTypeCompletion at (13,19)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(14,23)
+================================================================================
+[response] askTypeCompletion at (14,23)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(15,10)
+================================================================================
+[response] askTypeCompletion at (15,10)
+retrieved 0 members
+
+================================================================================
diff --git a/test/files/presentation/t1207/Test.scala b/test/files/presentation/t1207/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/t1207/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/t1207/src/Completions.scala b/test/files/presentation/t1207/src/Completions.scala
new file mode 100644
index 0000000..804d4fd
--- /dev/null
+++ b/test/files/presentation/t1207/src/Completions.scala
@@ -0,0 +1,20 @@
+package other {
+  package bongo { }
+  package lang { }
+  package util {
+    package boogly
+  }
+}
+
+package ticket_1001207 {
+  import other./*!*/
+  import other.u/*!*/
+  import other.uti /*!*/
+  import other.util/*!*/
+  import other.{lang, u/*!*/}
+  import j/*!*/
+
+  class T1207 {
+
+  }
+}
diff --git a/test/files/presentation/t4287.check b/test/files/presentation/t4287.check
new file mode 100644
index 0000000..a922421
--- /dev/null
+++ b/test/files/presentation/t4287.check
@@ -0,0 +1,11 @@
+reload: Foo.scala
+
+askHyperlinkPos for `B` at (1,24) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `B` at (3,8) Foo.scala
+================================================================================
+
+askHyperlinkPos for `a` at (1,31) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `a` at (4,7) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287/Test.scala b/test/files/presentation/t4287/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/t4287/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/t4287/src/Foo.scala b/test/files/presentation/t4287/src/Foo.scala
new file mode 100644
index 0000000..a744eaa
--- /dev/null
+++ b/test/files/presentation/t4287/src/Foo.scala
@@ -0,0 +1,5 @@
+class Baz(val f: Int = B/*#*/.a/*#*/)
+
+object B {
+  val a = 2
+}
diff --git a/test/files/presentation/t4287b.check b/test/files/presentation/t4287b.check
new file mode 100644
index 0000000..d4b3365
--- /dev/null
+++ b/test/files/presentation/t4287b.check
@@ -0,0 +1,6 @@
+reload: Foo.scala
+
+askHyperlinkPos for `i` at (14,11) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `i` at (10,9) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287b/Test.scala b/test/files/presentation/t4287b/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/t4287b/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/t4287b/src/Foo.scala b/test/files/presentation/t4287b/src/Foo.scala
new file mode 100644
index 0000000..47c676e
--- /dev/null
+++ b/test/files/presentation/t4287b/src/Foo.scala
@@ -0,0 +1,15 @@
+trait Greeting {
+  val name: String
+  val msg = "How are you, "+name
+}
+
+object Greeting {
+  val hello = "hello"
+}
+
+class C(i: Int) extends {
+  val nameElse = "Bob"
+} with Greeting {
+  val name = "avc"
+  println(i/*#*/)
+}
\ No newline at end of file
diff --git a/test/files/presentation/t4287c.check b/test/files/presentation/t4287c.check
new file mode 100644
index 0000000..42fc309
--- /dev/null
+++ b/test/files/presentation/t4287c.check
@@ -0,0 +1,11 @@
+reload: Foo.scala
+
+askHyperlinkPos for `A` at (1,18) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (3,8) Foo.scala
+================================================================================
+
+askHyperlinkPos for `a` at (1,25) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `a` at (4,7) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287c.flags b/test/files/presentation/t4287c.flags
new file mode 100644
index 0000000..d1a8244
--- /dev/null
+++ b/test/files/presentation/t4287c.flags
@@ -0,0 +1 @@
+-Yinfer-argument-types
\ No newline at end of file
diff --git a/test/files/presentation/t4287c/Test.scala b/test/files/presentation/t4287c/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/t4287c/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/t4287c/src/Foo.scala b/test/files/presentation/t4287c/src/Foo.scala
new file mode 100644
index 0000000..26870b5
--- /dev/null
+++ b/test/files/presentation/t4287c/src/Foo.scala
@@ -0,0 +1,9 @@
+class A(a: Int = A/*#*/.a/*#*/)
+
+object A {
+  val a = 2
+}
+
+class B extends A {
+ def this(a) = this()
+}
\ No newline at end of file
diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check
index fe43f83..4b33893 100644
--- a/test/files/presentation/t5708.check
+++ b/test/files/presentation/t5708.check
@@ -3,45 +3,39 @@ reload: Completions.scala
 askTypeCompletion at Completions.scala(17,9)
 ================================================================================
 [response] askTypeCompletion at (17,9)
-retrieved 44 members
-[accessible:  true] `lazy value fooInt`
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(test.Compat.type, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)test.Compat.type`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)test.Compat.type`
-[accessible:  true] `method ensuring(cond: test.Compat.type => Boolean)test.Compat.type`
-[accessible:  true] `method ensuring(cond: test.Compat.type => Boolean, msg: => Any)test.Compat.type`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method pkgPrivateM=> String`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> test.Compat.type`
-[accessible:  true] `method →[B](y: B)(test.Compat.type, B)`
-[accessible:  true] `value CONST_STRINGString("constant")`
-[accessible:  true] `value __leftOfArrowtest.Compat.type`
-[accessible:  true] `value __resultOfEnsuringtest.Compat.type`
-[accessible:  true] `value pkgPrivateVString`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method privateM=> String`
-[accessible: false] `method protectedValM=> String`
-[accessible: false] `value privateVString`
-[accessible: false] `value protectedVString`
+retrieved 37 members
+[inaccessible] private def privateM: String
+[inaccessible] private[this] val privateV: String
+[inaccessible] private[this] val protectedV: String
+[inaccessible] protected def protectedValM: String
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (test.Compat.type, B)
+def ensuring(cond: Boolean): test.Compat.type
+def ensuring(cond: Boolean,msg: => Any): test.Compat.type
+def ensuring(cond: test.Compat.type => Boolean): test.Compat.type
+def ensuring(cond: test.Compat.type => Boolean,msg: => Any): test.Compat.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (test.Compat.type, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+final private[this] val CONST_STRING: String("constant")
+lazy private[this] var foo: Int
+private[package test] def pkgPrivateM: String
+private[this] val pkgPrivateV: String
 ================================================================================
diff --git a/test/files/presentation/t7678.check b/test/files/presentation/t7678.check
new file mode 100644
index 0000000..f06434b
--- /dev/null
+++ b/test/files/presentation/t7678.check
@@ -0,0 +1 @@
+reload: TypeTag.scala
diff --git a/test/files/presentation/t7678/Runner.scala b/test/files/presentation/t7678/Runner.scala
new file mode 100644
index 0000000..14d6dc2
--- /dev/null
+++ b/test/files/presentation/t7678/Runner.scala
@@ -0,0 +1,62 @@
+import scala.tools.nsc.interactive.tests._
+import scala.reflect.internal.util._
+
+object Test extends InteractiveTest {
+
+  import compiler._, definitions._
+
+  override def runDefaultTests() {
+    def resolveTypeTagHyperlink() {
+      val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.left.get
+      val r = new Response[Position]
+      compiler.askLinkPos(sym, new BatchSourceFile("", source), r)
+      r.get
+    }
+
+    def checkTypeTagSymbolConsistent() {
+      compiler.askForResponse {
+        () => {
+          val runDefinitions = currentRun.runDefinitions
+          import runDefinitions._
+          assert(TypeTagsClass.map(sym => getMemberClass(sym, tpnme.TypeTag)) == TypeTagClass)
+          assert(TypeTagsClass.map(sym => getMemberClass(sym, tpnme.WeakTypeTag)) == WeakTypeTagClass)
+          assert(TypeTagsClass.map(sym => getMemberModule(sym, nme.WeakTypeTag)) == WeakTypeTagModule)
+          assert(getMemberMethod(ReflectPackage, nme.materializeClassTag) == materializeClassTag)
+          assert(ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeWeakTypeTag)) == materializeWeakTypeTag)
+          assert(ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeTypeTag)) == materializeTypeTag)
+          ()
+        }
+      }.get match {
+        case Right(t) => t.printStackTrace
+        case Left(_) =>
+      }
+    }
+    resolveTypeTagHyperlink()
+    // The presentation compiler loads TypeTags from source; we'll get new symbols for its members.
+    // Need to make sure we didn't cache the old ones in Definitions.
+    checkTypeTagSymbolConsistent()
+  }
+
+  def source =
+    """
+      |package scala
+      |package reflect
+      |package api
+      |
+      |trait TypeTags { self: Universe =>
+      |  import definitions._
+      |
+      |  @annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
+      |  trait WeakTypeTag[T] extends Equals with Serializable {
+      |    val mirror: Mirror
+      |    def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # WeakTypeTag[T]
+      |    def tpe: Type
+      |  }
+      |  object WeakTypeTag
+      |
+      |  trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
+      |  }
+      |  object TypeTag
+      |
+    """.stripMargin
+}
diff --git a/test/files/presentation/t7678/src/TypeTag.scala b/test/files/presentation/t7678/src/TypeTag.scala
new file mode 100644
index 0000000..0b222f8
--- /dev/null
+++ b/test/files/presentation/t7678/src/TypeTag.scala
@@ -0,0 +1,9 @@
+package test
+
+object Test {
+  import scala.reflect.runtime.{ universe => ru }
+  def getTypeTag(implicit tag: ru.TypeTag[Int]  ) = ()
+  locally {
+    getTypeTag/*?*/
+  }
+}
diff --git a/test/files/presentation/t8085.check b/test/files/presentation/t8085.check
index 79c1b2a..921ca75 100644
--- a/test/files/presentation/t8085.check
+++ b/test/files/presentation/t8085.check
@@ -1,3 +1,3 @@
 reload: NodeScalaSuite.scala
-open package module: package nodescala
+open package module: package object nodescala
 Test OK
diff --git a/test/files/presentation/t8085b.check b/test/files/presentation/t8085b.check
index 79c1b2a..921ca75 100644
--- a/test/files/presentation/t8085b.check
+++ b/test/files/presentation/t8085b.check
@@ -1,3 +1,3 @@
 reload: NodeScalaSuite.scala
-open package module: package nodescala
+open package module: package object nodescala
 Test OK
diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check
index 221e3fc..b77887f 100644
--- a/test/files/presentation/visibility.check
+++ b/test/files/presentation/visibility.check
@@ -3,219 +3,188 @@ reload: Completions.scala
 askTypeCompletion at Completions.scala(14,12)
 ================================================================================
 [response] askTypeCompletion at (14,12)
-retrieved 42 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method secretPrivate()Unit`
-[accessible:  true] `method secretProtected()Unit`
-[accessible:  true] `method secretProtectedInPackage()Unit`
-[accessible:  true] `method secretPublic()Unit`
-[accessible:  true] `method someTests(other: accessibility.Foo)Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> accessibility.Foo`
-[accessible:  true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `value __leftOfArrowaccessibility.Foo`
-[accessible:  true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible:  true] `value selfAny`
-[accessible: false] `method secretPrivateThis()Unit`
+retrieved 35 members
+[inaccessible] private[this] def secretPrivateThis(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private def secretPrivate(): Unit
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
 ================================================================================
 
 askTypeCompletion at Completions.scala(16,11)
 ================================================================================
 [response] askTypeCompletion at (16,11)
-retrieved 42 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method secretPrivate()Unit`
-[accessible:  true] `method secretPrivateThis()Unit`
-[accessible:  true] `method secretProtected()Unit`
-[accessible:  true] `method secretProtectedInPackage()Unit`
-[accessible:  true] `method secretPublic()Unit`
-[accessible:  true] `method someTests(other: accessibility.Foo)Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> accessibility.Foo`
-[accessible:  true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `value __leftOfArrowaccessibility.Foo`
-[accessible:  true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible:  true] `value selfAny`
+retrieved 35 members
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private def secretPrivate(): Unit
+private[this] def secretPrivateThis(): Unit
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
 ================================================================================
 
 askTypeCompletion at Completions.scala(22,11)
 ================================================================================
 [response] askTypeCompletion at (22,11)
-retrieved 42 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(accessibility.AccessibilityChecks, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method clone()Object`
-[accessible:  true] `method ensuring(cond: Boolean)accessibility.AccessibilityChecks`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)accessibility.AccessibilityChecks`
-[accessible:  true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean)accessibility.AccessibilityChecks`
-[accessible:  true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean, msg: => Any)accessibility.AccessibilityChecks`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method finalize()Unit`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method secretProtected()Unit`
-[accessible:  true] `method secretProtectedInPackage()Unit`
-[accessible:  true] `method secretPublic()Unit`
-[accessible:  true] `method someTests(other: accessibility.Foo)Unit`
-[accessible:  true] `method someTests=> Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> accessibility.AccessibilityChecks`
-[accessible:  true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)`
-[accessible:  true] `value __leftOfArrowaccessibility.AccessibilityChecks`
-[accessible:  true] `value __resultOfEnsuringaccessibility.AccessibilityChecks`
-[accessible:  true] `value selfAny`
-[accessible: false] `method secretPrivate()Unit`
+retrieved 34 members
+def +(other: String): String
+def ->[B](y: B): (accessibility.AccessibilityChecks, B)
+def ensuring(cond: Boolean): accessibility.AccessibilityChecks
+def ensuring(cond: Boolean,msg: => Any): accessibility.AccessibilityChecks
+def ensuring(cond: accessibility.AccessibilityChecks => Boolean): accessibility.AccessibilityChecks
+def ensuring(cond: accessibility.AccessibilityChecks => Boolean,msg: => Any): accessibility.AccessibilityChecks
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def someTests: Unit
+def toString(): String
+def →[B](y: B): (accessibility.AccessibilityChecks, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
 ================================================================================
 
 askTypeCompletion at Completions.scala(28,10)
 ================================================================================
 [response] askTypeCompletion at (28,10)
-retrieved 42 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method secretProtectedInPackage()Unit`
-[accessible:  true] `method secretPublic()Unit`
-[accessible:  true] `method someTests(other: accessibility.Foo)Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> accessibility.Foo`
-[accessible:  true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `value __leftOfArrowaccessibility.Foo`
-[accessible:  true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method secretPrivate()Unit`
-[accessible: false] `method secretPrivateThis()Unit`
-[accessible: false] `method secretProtected()Unit`
+retrieved 35 members
+[inaccessible] private def secretPrivate(): Unit
+[inaccessible] private[this] def secretPrivateThis(): Unit
+[inaccessible] protected def secretProtected(): Unit
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
 ================================================================================
 
 askTypeCompletion at Completions.scala(37,8)
 ================================================================================
 [response] askTypeCompletion at (37,8)
-retrieved 42 members
-[accessible:  true] `method !=(x$1: Any)Boolean`
-[accessible:  true] `method !=(x$1: AnyRef)Boolean`
-[accessible:  true] `method ##()Int`
-[accessible:  true] `method +(other: String)String`
-[accessible:  true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `method ==(x$1: Any)Boolean`
-[accessible:  true] `method ==(x$1: AnyRef)Boolean`
-[accessible:  true] `method asInstanceOf[T0]=> T0`
-[accessible:  true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible:  true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible:  true] `method eq(x$1: AnyRef)Boolean`
-[accessible:  true] `method equals(x$1: Any)Boolean`
-[accessible:  true] `method formatted(fmtstr: String)String`
-[accessible:  true] `method hashCode()Int`
-[accessible:  true] `method isInstanceOf[T0]=> Boolean`
-[accessible:  true] `method ne(x$1: AnyRef)Boolean`
-[accessible:  true] `method notify()Unit`
-[accessible:  true] `method notifyAll()Unit`
-[accessible:  true] `method secretPublic()Unit`
-[accessible:  true] `method someTests(other: accessibility.Foo)Unit`
-[accessible:  true] `method synchronized[T0](x$1: T0)T0`
-[accessible:  true] `method toString()String`
-[accessible:  true] `method wait()Unit`
-[accessible:  true] `method wait(x$1: Long)Unit`
-[accessible:  true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible:  true] `method x=> accessibility.Foo`
-[accessible:  true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible:  true] `value __leftOfArrowaccessibility.Foo`
-[accessible:  true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible:  true] `value selfAny`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method secretPrivate()Unit`
-[accessible: false] `method secretPrivateThis()Unit`
-[accessible: false] `method secretProtected()Unit`
-[accessible: false] `method secretProtectedInPackage()Unit`
+retrieved 35 members
+[inaccessible] private def secretPrivate(): Unit
+[inaccessible] private[this] def secretPrivateThis(): Unit
+[inaccessible] protected def secretProtected(): Unit
+[inaccessible] protected[package accessibility] def secretProtectedInPackage(): Unit
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
 ================================================================================
diff --git a/test/files/presentation/visibility/src/Completions.scala b/test/files/presentation/visibility/src/Completions.scala
index 098b98a..8c07934 100644
--- a/test/files/presentation/visibility/src/Completions.scala
+++ b/test/files/presentation/visibility/src/Completions.scala
@@ -22,7 +22,7 @@ package accessibility {
       this./*!*/ // should not list secretPrivate*
     }
   }
-  
+
   class UnrelatedClass {
     def someTests(foo: Foo) {
       foo./*!*/ // should list public and protected[accessiblity]
diff --git a/test/files/res/t597/Test.scala b/test/files/res/t597/Test.scala
index 45b90bb..2f63f46 100644
--- a/test/files/res/t597/Test.scala
+++ b/test/files/res/t597/Test.scala
@@ -2,7 +2,7 @@ package test;
 
 abstract class Base {
   type A <: Ax;
-  
+
   abstract class Ax {
     def a = null;
     def string = "A";
diff --git a/test/files/res/t687.check b/test/files/res/t687.check
index b741b26..5f72c98 100644
--- a/test/files/res/t687.check
+++ b/test/files/res/t687.check
@@ -1,8 +1,8 @@
 
 nsc> 
 nsc> t687/QueryB.scala:3: error: name clash between defined and inherited member:
-method equals:(o: Object)Boolean and
-method equals:(x$1: Any)Boolean in class Any
-have same type after erasure: (o: Object)Boolean
+def equals(x$1: Any): Boolean in class Any and
+override def equals(o: Object): Boolean at line 3
+have same type after erasure: (x$1: Object)Boolean
   override def equals(o : Object) = false;
                ^
diff --git a/test/files/res/t722/Parser.scala b/test/files/res/t722/Parser.scala
index 9f54358..5dfcd57 100644
--- a/test/files/res/t722/Parser.scala
+++ b/test/files/res/t722/Parser.scala
@@ -2,7 +2,7 @@
 package t722;
 trait Parser {
   trait Link  {
-    def foo() = {} 
+    def foo() = {}
   }
 }
 
diff --git a/test/files/res/t735/ScalaExpressions.scala b/test/files/res/t735/ScalaExpressions.scala
index 605ad51..f9c8ac7 100644
--- a/test/files/res/t735/ScalaExpressions.scala
+++ b/test/files/res/t735/ScalaExpressions.scala
@@ -1,6 +1,6 @@
 package t735;
 trait ScalaExpressions {
-  trait ExpressionFactory {   
+  trait ExpressionFactory {
     def foo = 10;
     def bar : Int;
   }
diff --git a/test/files/res/t743/BracesXXX.scala b/test/files/res/t743/BracesXXX.scala
index d3f6e28..ed7b386 100644
--- a/test/files/res/t743/BracesXXX.scala
+++ b/test/files/res/t743/BracesXXX.scala
@@ -1,6 +1,6 @@
 package t743;
 trait BracesXXX extends ParserXXX {
-  trait Matchable extends IsLinked { 
+  trait Matchable extends IsLinked {
     def foo : NodeImpl = null;
   }
 }
diff --git a/test/files/res/t743/ParserXXX.scala b/test/files/res/t743/ParserXXX.scala
index fd584b9..d132bdb 100644
--- a/test/files/res/t743/ParserXXX.scala
+++ b/test/files/res/t743/ParserXXX.scala
@@ -1,9 +1,9 @@
 package t743;
 trait ParserXXX {
-  val foo = null; 
-  trait NodeImpl { 
+  val foo = null;
+  trait NodeImpl {
     trait Link extends ParserXXX.this.Link {
-      val from = null; 
+      val from = null;
     }
   }
   trait Link {
diff --git a/test/files/res/t785/ScalaNewTyper.scala b/test/files/res/t785/ScalaNewTyper.scala
index 919e3b8..acdba0f 100644
--- a/test/files/res/t785/ScalaNewTyper.scala
+++ b/test/files/res/t785/ScalaNewTyper.scala
@@ -1,7 +1,7 @@
 package t785;
 trait ScalaNewTyper {
   private var typed : String = null;
-  trait HasSymbol { 
+  trait HasSymbol {
     protected def foo() : Unit = {}
   }
   trait HasArgsTypeParametersImpl extends HasSymbol {
diff --git a/test/files/res/t831/NewScalaParserXXX.scala b/test/files/res/t831/NewScalaParserXXX.scala
index ed9b9d3..958e4f9 100644
--- a/test/files/res/t831/NewScalaParserXXX.scala
+++ b/test/files/res/t831/NewScalaParserXXX.scala
@@ -10,33 +10,33 @@ trait ScalaNodeScannerXXX {
 //for (ns <-n; val i <- 0.until(ns)) yield f;
 
 
-trait NewScalaScannerXXX extends ScalaNodeScannerXXX { 
+trait NewScalaScannerXXX extends ScalaNodeScannerXXX {
   type Unfixed <: Node with UnfixedImpl;
   trait UnfixedImpl extends super.UnfixedImpl with NodeImpl;
   type Statement <: Unfixed with StatementImpl;
   trait StatementImpl extends UnfixedImpl { def self : Statement; }
   type NewLine <: Statement with NewLineImpl;
-  trait NewLineImpl extends StatementImpl { 
-    def self : NewLine; 
+  trait NewLineImpl extends StatementImpl {
+    def self : NewLine;
     def isActive : Boolean = true;
   }
   object ArrowMode extends Enumeration { val Def, Case, Expr = Value }
 }
 
-trait ScalaPrecedenceXXX extends NewScalaScannerXXX { 
+trait ScalaPrecedenceXXX extends NewScalaScannerXXX {
   type NewLine <: Statement with NewLineImpl;
-  trait NewLineImpl extends super.NewLineImpl with StatementImpl {  
-    def self : NewLine; 
+  trait NewLineImpl extends super.NewLineImpl with StatementImpl {
+    def self : NewLine;
     override def isActive = super[NewLineImpl].isActive;
   }
 }
 trait NewScalaParserXXX extends NewScalaScannerXXX with ScalaPrecedenceXXX {
   type NewLine <: Statement with NewLineImpl;
   trait MyNewLine extends super[NewScalaScannerXXX].NewLineImpl;
-  trait NewLineImpl extends MyNewLine with 
+  trait NewLineImpl extends MyNewLine with
     super[ScalaPrecedenceXXX].NewLineImpl with
-    StatementImpl { 
-    def self : NewLine; 
+    StatementImpl {
+    def self : NewLine;
     override def isActive = super[MyNewLine].isActive;
   }
 }
diff --git a/test/files/run/Course-2002-01.check b/test/files/run/Course-2002-01.check
index 17b30bf..16b491d 100644
--- a/test/files/run/Course-2002-01.check
+++ b/test/files/run/Course-2002-01.check
@@ -1,3 +1,6 @@
+Course-2002-01.scala:41: warning: method loop in object M0 does nothing other than call itself recursively
+  def loop: Int = loop;
+                  ^
 232
 667
 11
diff --git a/test/files/run/Course-2002-02.scala b/test/files/run/Course-2002-02.scala
index 56d7298..b865010 100644
--- a/test/files/run/Course-2002-02.scala
+++ b/test/files/run/Course-2002-02.scala
@@ -100,7 +100,7 @@ object M4 {
 
   def sumInts = sum(x => x)
   def sumCubes = sum(x => x * x * x)
-  def sumReciprocals = sum(1.0/_)  
+  def sumReciprocals = sum(1.0/_)
   def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) }
 
   Console.println(sumInts(1,4))
@@ -194,7 +194,7 @@ object M8 {
 //############################################################################
 
 object M9 {
-  def accumulate[t](combiner: (t, t) => t, nullValue: t, f: Int => t, 
+  def accumulate[t](combiner: (t, t) => t, nullValue: t, f: Int => t,
                     next: Int => Int)(a: Int, b: Int): t =
     if (a > b) nullValue
     else combiner(f(a), accumulate(combiner, nullValue, f, next)(next(a), b))
@@ -328,9 +328,9 @@ object MD {
     iter(a, zero)
   }
 
-  def plus (x:Double,y:Double) = x+y; 
+  def plus (x:Double,y:Double) = x+y;
   val sum: (Int => Double) => (Int, Int) => Double = reduce(plus , 0);
-  def times(x:Double,y:Double) = x*y; 
+  def times(x:Double,y:Double) = x*y;
   val product: (Int => Double) => (Int, Int) => Double = reduce(times, 1);
 
   def factorial(n: Int) = product(x => x)(1 , n)
diff --git a/test/files/run/Course-2002-05.scala b/test/files/run/Course-2002-05.scala
index 9457fae..80317bc 100644
--- a/test/files/run/Course-2002-05.scala
+++ b/test/files/run/Course-2002-05.scala
@@ -3,15 +3,15 @@
 //############################################################################
 
 object M0 {
-  def partition[a](xs: List[a], pred: a => Boolean): Pair[List[a], List[a]] = {
+  def partition[a](xs: List[a], pred: a => Boolean): Tuple2[List[a], List[a]] = {
     if (xs.isEmpty)
-      Pair(List(),List())
+      (List(),List())
     else {
       val tailPartition = partition(xs.tail, pred);
       if (pred(xs.head))
-        Pair(xs.head :: tailPartition._1, tailPartition._2)
+        (xs.head :: tailPartition._1, tailPartition._2)
       else
-        Pair(tailPartition._1, xs.head :: tailPartition._2)
+        (tailPartition._1, xs.head :: tailPartition._2)
     }
   }
 
@@ -49,9 +49,9 @@ object M0 {
 //############################################################################
 
 object M1 {
-  def partition[a](xs: List[a], pred: a => Boolean): Pair[List[a], List[a]] = {
-    xs.foldRight[Pair[List[a], List[a]]](Pair(List(), List())) {
-      (x, p) => if (pred (x)) Pair(x :: p._1, p._2) else Pair(p._1, x :: p._2)
+  def partition[a](xs: List[a], pred: a => Boolean): Tuple2[List[a], List[a]] = {
+    xs.foldRight[Tuple2[List[a], List[a]]]((List(), List())) {
+      (x, p) => if (pred (x)) (x :: p._1, p._2) else (p._1, x :: p._2)
     }
   }
 
@@ -129,14 +129,14 @@ object M3 {
       else {
         def isSafe(column: Int, placement: Placement): Boolean =
           placement forall {
-            pos => (pos._2 != column && 
+            pos => (pos._2 != column &&
               abs(pos._2 - column) != row - pos._1)
           }
 
         def adjoinRow(placement: Placement): List[Placement] =
           range(1, n)
             .filter (column => isSafe(column, placement))
-            .map (column => Pair(row, column) :: placement);
+            .map (column => (row, column) :: placement);
 
         placeQueens(row - 1) flatMap adjoinRow
       }
diff --git a/test/files/run/Course-2002-06.scala b/test/files/run/Course-2002-06.scala
index e4fb86a..908a934 100644
--- a/test/files/run/Course-2002-06.scala
+++ b/test/files/run/Course-2002-06.scala
@@ -55,7 +55,7 @@ abstract class Graphics(_width: Double, _height: Double) {
   }
 
   /** Draw a list of segments on the picture.*/
-  def drawSegments(frm: Frame)(segments: List[Pair[Vector, Vector]]): Unit =
+  def drawSegments(frm: Frame)(segments: List[Tuple2[Vector, Vector]]): Unit =
     if (segments.isEmpty) ()
     else {
       drawSegment(frm)(segments.head._1, segments.head._2);
diff --git a/test/files/run/Course-2002-07.scala b/test/files/run/Course-2002-07.scala
index 7848ae3..2d94576 100644
--- a/test/files/run/Course-2002-07.scala
+++ b/test/files/run/Course-2002-07.scala
@@ -16,13 +16,13 @@ object M0 {
     def isNumber: Boolean = true;
     def isSum: Boolean = false;
     def numValue: Int = n;
-    def leftOp: Expr = error("Number.leftOp");
-    def rightOp: Expr = error("Number.rightOp");
+    def leftOp: Expr = sys.error("Number.leftOp");
+    def rightOp: Expr = sys.error("Number.rightOp");
   }
   class Sum(e1: Expr, e2: Expr) extends Expr {
     def isNumber: Boolean = false;
     def isSum: Boolean = true;
-    def numValue: Int = error("Sum.numValue");
+    def numValue: Int = sys.error("Sum.numValue");
     def leftOp: Expr = e1;
     def rightOp: Expr = e2;
   }
@@ -30,7 +30,7 @@ object M0 {
   class Prod(e1: Expr, e2: Expr) extends Expr {
     def isNumber: Boolean = false;
     def isSum: Boolean = false;
-    def numValue: Int = error("Prod.numValue");
+    def numValue: Int = sys.error("Prod.numValue");
     def leftOp: Expr = e1;
     def rightOp: Expr = e2;
   }
@@ -38,15 +38,15 @@ object M0 {
   class Var(x: String) extends Expr {
     def isNumber: Boolean = false;
     def isSum: Boolean = false;
-    def numValue: Int = error("Var.numValue");
-    def leftOp: Expr = error("Var.leftOp");
-    def rightOp: Expr = error("Var.rightOp");
+    def numValue: Int = sys.error("Var.numValue");
+    def leftOp: Expr = sys.error("Var.leftOp");
+    def rightOp: Expr = sys.error("Var.rightOp");
   }
 
   def eval(e: Expr): Int = {
     if (e.isNumber) e.numValue
     else if (e.isSum) eval(e.leftOp) + eval(e.rightOp)
-    else error("unknown expression")
+    else sys.error("unknown expression")
   }
 
   def test = {
@@ -181,10 +181,10 @@ object M4 {
 
 object M5 {
 
-  def zipFun[a,b](xs:List[a], ys:List[b]):List[Pair[a,b]] = Pair(xs,ys) match {
-    case Pair(List(), _) => List()
-    case Pair(_, List()) => List()
-    case Pair(x :: xs1, y :: ys1) => Pair(x, y) :: zipFun(xs1, ys1)
+  def zipFun[a,b](xs:List[a], ys:List[b]):List[Tuple2[a,b]] = (xs,ys) match {
+    case (List(), _) => List()
+    case (_, List()) => List()
+    case (x :: xs1, y :: ys1) => (x, y) :: zipFun(xs1, ys1)
   }
 
   def test_zipFun[a,b](xs: List[a], ys: List[b]) = {
@@ -216,9 +216,9 @@ object M5 {
 
 object M6 {
 
-  def zipFun[a,b](xs:List[a], ys:List[b]):List[Pair[a,b]] = (Pair(xs,ys): @unchecked) match {
-    // !!! case Pair(List(), _), Pair(_, List()) => List()
-    case Pair(x :: xs1, y :: ys1) => Pair(x, y) :: zipFun(xs1, ys1)
+  def zipFun[a,b](xs:List[a], ys:List[b]):List[Tuple2[a,b]] = ((xs,ys): @unchecked) match {
+    // !!! case (List(), _), (_, List()) => List()
+    case (x :: xs1, y :: ys1) => (x, y) :: zipFun(xs1, ys1)
   }
 
   def test_zipFun[a,b](xs: List[a], ys: List[b]) = {
@@ -374,9 +374,9 @@ object M9 {
 
 object MA {
 
-  def lookup[k,v](xs: List[Pair[k,v]], k: k): v = xs match {
-    case List() => error("no value for " + k)
-    case Pair(k1,v1) :: xs1 => if (k1 == k) v1 else lookup(xs1, k)
+  def lookup[k,v](xs: List[Tuple2[k,v]], k: k): v = xs match {
+    case List() => sys.error("no value for " + k)
+    case (k1,v1) :: xs1 => if (k1 == k) v1 else lookup(xs1, k)
   }
 
   trait Expr {
@@ -410,7 +410,7 @@ object MA {
 
   def eval(e: Expr): Int = e match {
     case Number(n) => n
-    case Var(_) => error("cannot evaluate variable")
+    case Var(_) => sys.error("cannot evaluate variable")
     case Sum(e1, e2) => eval(e1) + eval(e2)
     case Prod(e1, e2) => eval(e1) * eval(e2)
   }
@@ -437,8 +437,8 @@ object MA {
     val g1 = g0 derive x;
     Console.println("g (x) = " + g0);
     Console.println("g'(x) = " + g1);
-    Console.println("g (3) = " + evalvars(List(Pair("x",3)))(g0));
-    Console.println("g'(3) = " + evalvars(List(Pair("x",3)))(g1));
+    Console.println("g (3) = " + evalvars(List(("x",3)))(g0));
+    Console.println("g'(3) = " + evalvars(List(("x",3)))(g1));
 
     Console.println;
   }
@@ -453,26 +453,26 @@ object Utils {
     if (y == 1) x else if (y % 2 == 0) power0(x*x,y/2) else x*power0(x, y-1);
 
   def power(x: Int, y: Int): Int = (x,y) match {
-    case Pair(0,0) => error("power(0,0)")
-    case Pair(0,_) => 0
-    case Pair(1,_) => 1
-    case Pair(_,0) => 1
-    case Pair(_,1) => x
-    case Pair(_,2) => x*x
-    case Pair(_,_) => if (y < 0) 1/power0(x,y) else power0(x,y)
+    case (0,0) => sys.error("power(0,0)")
+    case (0,_) => 0
+    case (1,_) => 1
+    case (_,0) => 1
+    case (_,1) => x
+    case (_,2) => x*x
+    case (_,_) => if (y < 0) 1/power0(x,y) else power0(x,y)
   }
 
   def lookup(entries: List[(String,Int)], key: String): Int = entries match {
-    case List()                       => error("no value for " + key)
-    case Pair(k,v) :: _ if (k == key) => v
+    case List()                       => sys.error("no value for " + key)
+    case (k,v) :: _ if (k == key) => v
     case _ :: rest                    => lookup(rest, key)
   }
 
   def compare(xs: List[String], ys: List[String]): Int = (xs, ys) match {
-    case Pair(List(), List()) =>  0
-    case Pair(List(), _     ) => -1
-    case Pair(_     , List()) => +1
-    case Pair(x::xs , y::ys ) => {
+    case (List(), List()) =>  0
+    case (List(), _     ) => -1
+    case (_     , List()) => +1
+    case (x::xs , y::ys ) => {
       val diff = x.compareTo(y);
       if (diff != 0) diff else compare(xs,ys)
     }
@@ -508,18 +508,18 @@ object MB {
 
     private def +<  (that: Expr): Boolean = (this +<? that) <  0;
     private def +<= (that: Expr): Boolean = (this +<? that) <= 0;
-    private def +<? (that: Expr): Int = Pair(this,that) match {
-      case Pair(Add(_,_), _       ) =>  0
-      case Pair(_       , Add(_,_)) =>  0
-      case Pair(_       , _       ) => compare(this.vars,that.vars)
+    private def +<? (that: Expr): Int = (this,that) match {
+      case (Add(_,_), _       ) =>  0
+      case (_       , Add(_,_)) =>  0
+      case (_       , _       ) => compare(this.vars,that.vars)
     }
 
-    def + (that: Expr): Expr = if (that +<= this) Pair(this,that) match {
-      case Pair(_         , Lit(0)    )                  => this
-      case Pair(Lit(l)    , Lit(r)    )                  => Lit(l + r)
-      case Pair(_         , Add(rl,rr))                  => (this + rl) + rr
-      case Pair(Add(ll,lr), _         ) if (lr +<= that) => ll + (that + lr)
-      case Pair(_         , _         )                  => {
+    def + (that: Expr): Expr = if (that +<= this) (this,that) match {
+      case (_         , Lit(0)    )                  => this
+      case (Lit(l)    , Lit(r)    )                  => Lit(l + r)
+      case (_         , Add(rl,rr))                  => (this + rl) + rr
+      case (Add(ll,lr), _         ) if (lr +<= that) => ll + (that + lr)
+      case (_         , _         )                  => {
         val l = this.term;
         val r = that.term;
         if (l equ r) Lit(this.count + that.count) * r else Add(this, that)
@@ -528,41 +528,41 @@ object MB {
 
     private def *<  (that: Expr): Boolean = (this *<? that) <  0;
     private def *<= (that: Expr): Boolean = (this *<? that) <= 0;
-    private def *<? (that: Expr): Int = Pair(this,that) match {
-      case Pair(Mul(_,_), _       ) =>  0
-      case Pair(_       , Mul(_,_)) =>  0
-      case Pair(Add(_,_), Add(_,_)) =>  0
-      case Pair(Add(_,_), _       ) => -1
-      case Pair(_       , Add(_,_)) => +1
-      case Pair(Lit(_)  , Lit(_)  ) =>  0
-      case Pair(Lit(_)  , _       ) => -1
-      case Pair(_       , Lit(_)  ) => +1
-      case Pair(Var(l)  , Var(r)  ) => l.compareTo(r)
-      case Pair(Var(_)  , Pow(r,_)) => if (this *<= r) -1 else +1
-      case Pair(Pow(l,_), Var(_)  ) => if (l *<  that) -1 else +1
-      case Pair(Pow(l,_), Pow(r,_)) => l *<? r
+    private def *<? (that: Expr): Int = (this,that) match {
+      case (Mul(_,_), _       ) =>  0
+      case (_       , Mul(_,_)) =>  0
+      case (Add(_,_), Add(_,_)) =>  0
+      case (Add(_,_), _       ) => -1
+      case (_       , Add(_,_)) => +1
+      case (Lit(_)  , Lit(_)  ) =>  0
+      case (Lit(_)  , _       ) => -1
+      case (_       , Lit(_)  ) => +1
+      case (Var(l)  , Var(r)  ) => l.compareTo(r)
+      case (Var(_)  , Pow(r,_)) => if (this *<= r) -1 else +1
+      case (Pow(l,_), Var(_)  ) => if (l *<  that) -1 else +1
+      case (Pow(l,_), Pow(r,_)) => l *<? r
     }
 
-    def * (that: Expr): Expr = if (this *<= that) Pair(this,that) match {
-      case Pair(Lit(0)    , _         )                    => this
-      case Pair(Lit(1)    , _         )                    => that
-      case Pair(Mul(ll,lr), r         )                    => ll * (lr * r)
-      case Pair(Add(ll,lr), r         )                    => ll * r + lr * r
-      case Pair(Lit(l)    , Lit(r)    )                    => Lit(l * r)
-      case Pair(Var(_)    , Var(_)    ) if (this equ that) => Pow(this,2)
-      case Pair(Var(_)    , Pow(r,n)  ) if (this equ r)    => Pow(this,n + 1)
-      case Pair(Pow(ll,lr), Pow(rl,rr)) if (ll equ rl)     => Pow(ll,lr + rr)
-      case Pair(l         , Mul(rl,rr)) if (rl *<= l)      => (rl * l) * rr
-      case Pair(_         , _         )                    => Mul(this,that)
+    def * (that: Expr): Expr = if (this *<= that) (this,that) match {
+      case (Lit(0)    , _         )                    => this
+      case (Lit(1)    , _         )                    => that
+      case (Mul(ll,lr), r         )                    => ll * (lr * r)
+      case (Add(ll,lr), r         )                    => ll * r + lr * r
+      case (Lit(l)    , Lit(r)    )                    => Lit(l * r)
+      case (Var(_)    , Var(_)    ) if (this equ that) => Pow(this,2)
+      case (Var(_)    , Pow(r,n)  ) if (this equ r)    => Pow(this,n + 1)
+      case (Pow(ll,lr), Pow(rl,rr)) if (ll equ rl)     => Pow(ll,lr + rr)
+      case (l         , Mul(rl,rr)) if (rl *<= l)      => (rl * l) * rr
+      case (_         , _         )                    => Mul(this,that)
     } else that * this;
 
     def ^ (that: Int): Expr = (this,that) match {
-      case Pair(_       ,1) => this
-      case Pair(Lit(i)  ,n) => Lit(power(i,n))
-      case Pair(Var(_)  ,n) => Pow(this,n)
-      case Pair(Add(_,_),n) => this * (this ^ (n - 1))
-      case Pair(Mul(l,r),n) => (l ^ n) * (r ^ n)
-      case Pair(Pow(e,m),n) => Pow(e,m + n)
+      case (_       ,1) => this
+      case (Lit(i)  ,n) => Lit(power(i,n))
+      case (Var(_)  ,n) => Pow(this,n)
+      case (Add(_,_),n) => this * (this ^ (n - 1))
+      case (Mul(l,r),n) => (l ^ n) * (r ^ n)
+      case (Pow(e,m),n) => Pow(e,m + n)
     }
 
     def derive(v: Var): Expr = this match {
@@ -581,12 +581,12 @@ object MB {
       case Pow(l, r) => power(l.evaluate(vars), r)
     }
 
-    def equ(that: Expr): Boolean = Pair(this,that) match {
-      case Pair(Lit(l)    ,Lit(r))     => l == r
-      case Pair(Var(l)    ,Var(r))     => l == r
-      case Pair(Add(ll,lr),Add(rl,rr)) => (ll equ rl) && (lr equ rr)
-      case Pair(Mul(ll,lr),Mul(rl,rr)) => (ll equ rl) && (lr equ rr)
-      case Pair(Pow(ll,lr),Pow(rl,rr)) => (ll equ rl) && (lr == rr)
+    def equ(that: Expr): Boolean = (this,that) match {
+      case (Lit(l)    ,Lit(r))     => l == r
+      case (Var(l)    ,Var(r))     => l == r
+      case (Add(ll,lr),Add(rl,rr)) => (ll equ rl) && (lr equ rr)
+      case (Mul(ll,lr),Mul(rl,rr)) => (ll equ rl) && (lr equ rr)
+      case (Pow(ll,lr),Pow(rl,rr)) => (ll equ rl) && (lr == rr)
       case _ => false
     }
 
@@ -667,7 +667,7 @@ object MB {
     Console.println;
 
     def check(n: String, f: Expr, x: Int, e: Int) {
-      val a: Int = f.evaluate(List(Pair("x",x)));
+      val a: Int = f.evaluate(List(("x",x)));
       val s: String = if (a == e) "ok" else "KO(" + e + ")";
       Console.println(n + "(" + x + ") = " + a + " " + s);
     }
diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala
index 85a83e0..5e21eda 100644
--- a/test/files/run/Course-2002-08.scala
+++ b/test/files/run/Course-2002-08.scala
@@ -33,7 +33,7 @@ object M1 {
       if (0 < amount && amount <= balance) {
         balance = balance - amount;
         balance
-      } else error("insufficient funds");
+      } else sys.error("insufficient funds");
   }
 
   def test0 = {
@@ -163,7 +163,7 @@ object M5 {
   }
 
   abstract class Simulation() {
-    private type Agenda = List[Pair[Int, Action]];
+    private type Agenda = List[Tuple2[Int, Action]];
     private var agenda: Agenda = List();
     private var curtime = 0;
     def currentTime: Int = curtime;
@@ -171,17 +171,17 @@ object M5 {
     def afterDelay(delay: Int)(action: Action): Unit = {
       def insert(ag: Agenda, time: Int): Agenda = ag match {
         case List() =>
-          List(Pair(time, action))
-        case Pair(t, act) :: ag1 =>
-          if (time < t) Pair(time, action) :: ag
-          else Pair(t, act) :: insert(ag1, time)
+          List((time, action))
+        case (t, act) :: ag1 =>
+          if (time < t) (time, action) :: ag
+          else (t, act) :: insert(ag1, time)
       }
       agenda = insert(agenda, curtime + delay)
     }
 
     private def next: Unit = agenda match {
       case List() => ()
-      case Pair(time, action) :: ag1 => {
+      case (time, action) :: ag1 => {
         agenda = ag1;
         curtime = time;
         action();
@@ -413,7 +413,7 @@ object M5 {
 class Simulator() {
 
   type Action = () => Unit;
-  type Agenda = List[Pair[Int, Action]];
+  type Agenda = List[Tuple2[Int, Action]];
 
   private var agenda: Agenda = List();
   private var curtime = 0;
@@ -421,17 +421,17 @@ class Simulator() {
   def afterDelay(delay: Int)(action: Action) = {
     def insert(ag: Agenda, time: Int): Agenda = ag match {
       case List() =>
-        List(Pair(time, action))
-      case Pair(t, act) :: ag1 =>
-        if (time < t) Pair(time, action) :: ag
-        else Pair(t, act) :: insert(ag1, time)
+        List((time, action))
+      case (t, act) :: ag1 =>
+        if (time < t) (time, action) :: ag
+        else (t, act) :: insert(ag1, time)
     }
     agenda = insert(agenda, curtime + delay)
   }
 
   def next: Unit = agenda match {
     case List() => ()
-    case Pair(time, action) :: rest => {
+    case (time, action) :: rest => {
       agenda = rest;
       curtime = time;
       action();
@@ -520,7 +520,7 @@ abstract class CircuitSimulator() extends BasicCircuitSimulator() {
 	val w1 = new Wire();
 	val w2 = new Wire();
 	val w3 = new Wire();
-	
+
     andGate(in, ctrl(1), w3);
     andGate(in, ctrl(1), w2);
     andGate(in, ctrlN(1), w1);
@@ -567,8 +567,8 @@ class Main() extends CircuitSimulator() {
     demux(in, ctrl.reverse, out.reverse);
 
     probe("in", in);
-    for (Pair(x,c) <- range(0,n) zip ctrl) { probe("ctrl" + x, c) }
-    for (Pair(x,o) <- range(0,outNum) zip out) { probe("out" + x, o) }
+    for ((x,c) <- range(0,n) zip ctrl) { probe("ctrl" + x, c) }
+    for ((x,o) <- range(0,outNum) zip out) { probe("out" + x, o) }
 
     in.setSignal(true);
     run;
diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala
index 384a91e..704f2ec 100644
--- a/test/files/run/Course-2002-09.scala
+++ b/test/files/run/Course-2002-09.scala
@@ -8,16 +8,16 @@ trait Constraint {
 }
 
 object NoConstraint extends Constraint {
-  def newValue: Unit = error("NoConstraint.newValue");
-  def dropValue: Unit = error("NoConstraint.dropValue");
+  def newValue: Unit = sys.error("NoConstraint.newValue");
+  def dropValue: Unit = sys.error("NoConstraint.dropValue");
 }
 
 class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint {
-  def newValue = Triple(a1.getValue, a2.getValue, sum.getValue) match {
-    case Triple(Some(x1), Some(x2), _      ) => sum.setValue(x1 + x2, this)
-    case Triple(Some(x1), _       , Some(r)) => a2.setValue(r - x1, this)
-    case Triple(_       , Some(x2), Some(r)) => a1.setValue(r - x2, this)
-    case _                                   =>
+  def newValue = (a1.getValue, a2.getValue, sum.getValue) match {
+    case (Some(x1), Some(x2), _      ) => sum.setValue(x1 + x2, this)
+    case (Some(x1), _       , Some(r)) => a2.setValue(r - x1, this)
+    case (_       , Some(x2), Some(r)) => a1.setValue(r - x2, this)
+    case _                             =>
   }
   def dropValue: Unit = {
     a1.forgetValue(this); a2.forgetValue(this); sum.forgetValue(this);
@@ -29,13 +29,13 @@ class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint {
 
 class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity)
                 extends Constraint {
-  def newValue = Triple(m1.getValue, m2.getValue, prod.getValue) match {
-    case Triple(Some(0d), _       , _      ) => prod.setValue(0, this);
-    case Triple(_       , Some(0d), _      ) => prod.setValue(0, this);
-    case Triple(Some(x1), Some(x2), _      ) => prod.setValue(x1 * x2, this)
-    case Triple(Some(x1), _       , Some(r)) => m2.setValue(r / x1, this)
-    case Triple(_,        Some(x2), Some(r)) => m1.setValue(r / x2, this)
-    case _                                   =>
+  def newValue = (m1.getValue, m2.getValue, prod.getValue) match {
+    case (Some(0d), _       , _      ) => prod.setValue(0, this);
+    case (_       , Some(0d), _      ) => prod.setValue(0, this);
+    case (Some(x1), Some(x2), _      ) => prod.setValue(x1 * x2, this)
+    case (Some(x1), _       , Some(r)) => m2.setValue(r / x1, this)
+    case (_,        Some(x2), Some(r)) => m1.setValue(r / x2, this)
+    case _                             =>
   }
   def dropValue: Unit = {
     m1.forgetValue(this); m2.forgetValue(this); prod.forgetValue(this);
@@ -46,11 +46,11 @@ class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity)
 }
 
 class Squarer(square: Quantity, root: Quantity) extends Constraint {
-  def newValue: Unit = Pair(square.getValue, root.getValue) match {
-    case Pair(Some(x), _      )if (x < 0) => error("Square of negative number")
-    case Pair(Some(x), _      )           => root.setValue(Math.sqrt(x), this)
-    case Pair(_      , Some(x))           => square.setValue(x*x, this)
-    case _                                =>
+  def newValue: Unit = (square.getValue, root.getValue) match {
+    case (Some(x), _      )if (x < 0) => sys.error("Square of negative number")
+    case (Some(x), _      )           => root.setValue(Math.sqrt(x), this)
+    case (_      , Some(x))           => square.setValue(x*x, this)
+    case _                            =>
   }
   def dropValue: Unit = {
     square.forgetValue(this); root.forgetValue(this);
@@ -60,9 +60,9 @@ class Squarer(square: Quantity, root: Quantity) extends Constraint {
 }
 
 class Eq(a: Quantity, b: Quantity) extends Constraint {
-  def newValue = (Pair(a.getValue, b.getValue): @unchecked) match {
-    case Pair(Some(x), _      ) => b.setValue(x, this);
-    case Pair(_      , Some(y)) => a.setValue(y, this);
+  def newValue = ((a.getValue, b.getValue): @unchecked) match {
+    case (Some(x), _      ) => b.setValue(x, this);
+    case (_      , Some(y)) => a.setValue(y, this);
   }
   def dropValue {
     a.forgetValue(this); b.forgetValue(this);
@@ -72,8 +72,8 @@ class Eq(a: Quantity, b: Quantity) extends Constraint {
 }
 
 class Constant(q: Quantity, v: Double) extends Constraint {
-  def newValue: Unit = error("Constant.newValue");
-  def dropValue: Unit = error("Constant.dropValue");
+  def newValue: Unit = sys.error("Constant.newValue");
+  def dropValue: Unit = sys.error("Constant.dropValue");
   q connect this;
   q.setValue(v, this);
 }
@@ -100,7 +100,7 @@ class Quantity() {
 
   def setValue(v: Double, setter: Constraint) = value match {
     case Some(v1) =>
-      if (v != v1) error("Error! contradiction: " + v + " and " + v1);
+      if (v != v1) sys.error("Error! contradiction: " + v + " and " + v1);
     case None =>
       informant = setter; value = Some(v);
       for (c <- constraints; if !(c == informant)) {
diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala
index c266af8..a596a33 100644
--- a/test/files/run/Course-2002-13.scala
+++ b/test/files/run/Course-2002-13.scala
@@ -42,7 +42,7 @@ object Terms {
   }
 
   case class Binding(name: String, term: Term) {
-    term match { case Var(n) if (name == n) => error("bad binding") case _ => () }
+    term match { case Var(n) if (name == n) => sys.error("bad binding") case _ => () }
     override def toString() = name + " = " + term;
   }
 
@@ -74,18 +74,18 @@ object Terms {
 
   val NoTerm = Con("<none>", List());
 
-  def unify1(x: Term, y: Term, s: Subst): Option[Subst] = Pair(x, y) match {
-    case Pair(Var(a), Var(b)) if (a == b) =>
+  def unify1(x: Term, y: Term, s: Subst): Option[Subst] = (x, y) match {
+    case (Var(a), Var(b)) if (a == b) =>
       Some(s)
-    case Pair(Var(a), _) => lookup(s, a) match {
+    case (Var(a), _) => lookup(s, a) match {
       case Some(x1) => unify(x1, y, s)
       case None => if (y.tyvars contains a) None else Some(Binding(a, y) :: s)
     }
-    case Pair(_, Var(b)) => lookup(s, b) match {
+    case (_, Var(b)) => lookup(s, b) match {
       case Some(y1) => unify(x, y1, s)
       case None => if (x.tyvars contains b) None else Some(Binding(b, x) :: s)
     }
-    case Pair(Con(a, xs), Con(b, ys)) if (a == b) =>
+    case (Con(a, xs), Con(b, ys)) if (a == b) =>
       unify(xs, ys, s)
     case _ => None
   }
@@ -96,9 +96,9 @@ object Terms {
     ss
   }
 
-  def unify(xs: List[Term], ys: List[Term], s: Subst): Option[Subst] = Pair(xs, ys) match {
-    case Pair(List(), List()) => Some(s)
-    case Pair(x :: xs1, y :: ys1) =>
+  def unify(xs: List[Term], ys: List[Term], s: Subst): Option[Subst] = (xs, ys) match {
+    case (List(), List()) => Some(s)
+    case (x :: xs1, y :: ys1) =>
       unify(x, y, s) match {
 	case Some(s1) => unify(xs1, ys1, s1)
 	case None => None
@@ -168,7 +168,7 @@ class Parser(s: String) {
 
   var token: String = it.next;
 
-  def syntaxError(msg: String): Unit = error(msg + ", but " + token + " found");
+  def syntaxError(msg: String): Unit = sys.error(msg + ", but " + token + " found");
 
   def rep[a](p: => a): List[a] = {
     val t = p;
diff --git a/test/files/run/Meter.check b/test/files/run/Meter.check
index b7e2eac..c79c51a 100644
--- a/test/files/run/Meter.check
+++ b/test/files/run/Meter.check
@@ -1,3 +1,6 @@
+Meter.scala:72: warning: a.Meter and Int are unrelated: they will never compare equal
+  println("x == 1: "+(x == 1))
+                        ^
 2.0
 4.0m
 false
diff --git a/test/files/run/MeterCaseClass.check b/test/files/run/MeterCaseClass.check
index 2528753..2782704 100644
--- a/test/files/run/MeterCaseClass.check
+++ b/test/files/run/MeterCaseClass.check
@@ -1,3 +1,6 @@
+MeterCaseClass.scala:69: warning: comparing values of types a.Meter and Int using `==' will always yield false
+  println("x == 1: "+(x == 1))
+                        ^
 2.0
 Meter(4.0)
 false
diff --git a/test/files/run/OrderingTest.scala b/test/files/run/OrderingTest.scala
index 53448fb..8af18aa 100644
--- a/test/files/run/OrderingTest.scala
+++ b/test/files/run/OrderingTest.scala
@@ -6,7 +6,7 @@ object Test extends App {
     assert((cmp == 0) == (cmp2 == 0))
     assert((cmp > 0) == (cmp2 < 0))
     assert((cmp < 0) == (cmp2 > 0))
-  } 
+  }
 
   def testAll[T](t1 : T, t2 : T)(implicit ord : Ordering[T]) = {
     assert(ord.compare(t1, t2) < 0)
@@ -16,8 +16,8 @@ object Test extends App {
   }
 
   assert(Ordering[String].compare("australopithecus", "brontausaurus") < 0)
-  // assert(Ordering[Unit].compare((), ()) == 0) 
- 
+  // assert(Ordering[Unit].compare((), ()) == 0)
+
   testAll("bar", "foo");
   testAll[Byte](0, 1);
   testAll(false, true)
@@ -28,7 +28,7 @@ object Test extends App {
   testAll[Iterable[Int]](List(1, 2), List(2));
   testAll((1, "bar"), (1, "foo"))
   testAll((1, "foo"), (2, "bar"))
-  
+
   // sortBy
   val words = "The quick brown fox jumped over the lazy dog".split(' ')
   val result = words.sortBy(x => (x.length, x.head))
diff --git a/test/files/run/Predef.readLine.scala b/test/files/run/Predef.readLine.scala
index 9f07936..ce85658 100644
--- a/test/files/run/Predef.readLine.scala
+++ b/test/files/run/Predef.readLine.scala
@@ -1,4 +1,5 @@
 import java.io.StringReader
+import scala.io.StdIn.readLine
 
 object Test extends App {
   Console.withIn(new StringReader("")) {
@@ -7,4 +8,4 @@ object Test extends App {
     readLine("%s prompt\n", "fancy")
     readLine("%s %s prompt\n", "immensely", "fancy")
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/ReplacementMatching.scala b/test/files/run/ReplacementMatching.scala
index faa4641..05040d9 100644
--- a/test/files/run/ReplacementMatching.scala
+++ b/test/files/run/ReplacementMatching.scala
@@ -7,12 +7,12 @@ import util.matching._
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     replacementMatching
     groupsMatching
   }
-  
+
   def replacementMatching {
     val regex = """\$\{(.+?)\}""".r
     val replaced = regex.replaceAllIn("Replacing: ${main}. And another method: ${foo}.",
@@ -21,7 +21,7 @@ object Test {
       identifier
     })
     assert(replaced == "Replacing: main. And another method: foo.")
-    
+
     val regex3 = """\$\{(.+?)\}""".r
     val replaced3 = regex3.replaceSomeIn("Replacing: ${main}. And another: ${foo}.", (m: util.matching.Regex.Match) => {
       val id = m.group(1)
@@ -29,7 +29,7 @@ object Test {
     })
     assert(replaced3 == "Replacing: main. And another: ${foo}.")
   }
-  
+
   def groupsMatching {
     val Date = """(\d+)/(\d+)/(\d+)""".r
     for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millenium. 31/12/2000 doesn't.") {
@@ -43,5 +43,5 @@ object Test {
       assert(c == "2001" || c == "2000")
     }
   }
-  
+
 }
diff --git a/test/files/run/ReverseSeqView.scala b/test/files/run/ReverseSeqView.scala
index 517f1cc..2004791 100644
--- a/test/files/run/ReverseSeqView.scala
+++ b/test/files/run/ReverseSeqView.scala
@@ -5,14 +5,14 @@
 
 
 object Test extends App {
-  
+
   val lstv = List(1, 2, 3).view
   val lstvr = lstv.reverse
   assert(lstvr.iterator.toList == List(3, 2, 1))
   assert(lstvr.reverse == List(1, 2, 3))
   assert(lstvr.reverseIterator.toList == List(1, 2, 3))
   assert(lstvr.reverseMap(_ + 1) == List(2, 3, 4))
-  
+
 }
 
 
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
index 53caa5e..d5948ea 100644
--- a/test/files/run/SymbolsTest.scala
+++ b/test/files/run/SymbolsTest.scala
@@ -1,6 +1,5 @@
 
-
-
+import scala.language.reflectiveCalls
 
 class Slazz {
   val s1 = 'myFirstSymbol
diff --git a/test/files/run/UnrolledBuffer.scala b/test/files/run/UnrolledBuffer.scala
index fe08e81..62a1f7d 100644
--- a/test/files/run/UnrolledBuffer.scala
+++ b/test/files/run/UnrolledBuffer.scala
@@ -7,12 +7,12 @@ import collection.mutable.UnrolledBuffer
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val u1 = new UnrolledBuffer[Int]
     assert(u1.isEmpty)
     assert(u1.size == 0)
-    
+
     u1 += 1
     u1 += 2
     u1 += 3
@@ -20,11 +20,11 @@ object Test {
     assert(u1.toList == List(1, 2, 3))
     assert(u1.nonEmpty)
     assert(u1.size == 3)
-    
+
     u1.clear
     assert(u1.isEmpty)
     assert(u1.size == 0)
-    
+
     u1 += 1
     u1 += 2
     u1 += 3
@@ -33,56 +33,56 @@ object Test {
     assert(u1.size == 2)
     assert(u1 == UnrolledBuffer(1, 3))
     assert(u1.toList == List(1, 3))
-    
+
     u1 concat UnrolledBuffer(5, 7, 9)
     assert(u1 == UnrolledBuffer(1, 3, 5, 7, 9))
-    
+
     val u2 = u1 map { x => (x - 1) / 2 }
     assert(u2 == UnrolledBuffer(0, 1, 2, 3, 4))
-    
+
     u1.clear
     u2.clear
     assert(u1.size == 0)
     assert(u2.size == 0)
-    
+
     for (i <- 0 until 500) u1 += i
     for (i <- 500 until 1000) u2 += i
     assert(u1.size == 500)
     assert(u2.size == 500)
     assert(u1.iterator.toList == (0 until 500).toList)
     assert((for (elem <- u1) yield elem) sameElements (0 until 500))
-    
+
     u1 concat u2
     assert(u1.size == 1000)
     assert(u2.size == 0)
     assertCorrect(u1)
-    
+
     u1 concat UnrolledBuffer()
     assertCorrect(u1)
-    
+
     val u3 = u1 map { x => x }
     var i = 0
     for (elem <- u1) {
       assert(elem == u3(i))
       i += 1
     }
-    
+
     u1.remove(999)
     assert(u1.size == 999)
     assertCorrect(u1)
-    
+
     u1.remove(500)
     assert(u1.size == 998)
     assertCorrect(u1)
-    
+
     u1.remove(5)
     assert(u1.size == 997)
     assertCorrect(u1)
-    
+
     u1.remove(0)
     assert(u1.size == 996)
     assertCorrect(u1)
-    
+
     u1.insert(0, 0)
     assert(u1.size == 997)
     assertCorrect(u1)
@@ -90,15 +90,15 @@ object Test {
     u1.insert(5, 5)
     assert(u1.size == 998)
     assertCorrect(u1)
-    
+
     u1.insert(500, 500)
     assert(u1.size == 999)
     assertCorrect(u1)
-    
+
     u1.insert(999, 999)
     assert(u1.size == 1000)
     assertCorrect(u1)
-    
+
     for (i <- -100 until 0) {
       i +=: u1
       assertCorrect(u1)
@@ -106,7 +106,7 @@ object Test {
     assert(u1.size == 1100)
     assertCorrect(u1)
   }
-  
+
   def assertCorrect(u1: UnrolledBuffer[Int]) {
     val sz = u1.size
     val store = new Array[Int](sz)
@@ -117,9 +117,9 @@ object Test {
     for (i <- 0 until sz) assert(u1(i) == (sz - i))
     for (i <- 0 until sz) u1(i) = store(i)
     for (i <- 0 until sz) assert(store(i) == u1(i))
-    
+
     assert((u1 map { x => x }) == u1)
     assert(u1.iterator.toSeq.size == u1.size)
   }
-  
+
 }
diff --git a/test/files/run/WeakHashSetTest.scala b/test/files/run/WeakHashSetTest.scala
index 3c8f380..8072aa9 100644
--- a/test/files/run/WeakHashSetTest.scala
+++ b/test/files/run/WeakHashSetTest.scala
@@ -33,7 +33,7 @@ package scala.reflect.internal.util {
     def checkEmpty {
       val hs = new WeakHashSet[String]()
       assert(hs.size == 0)
-      hs.diagnostics.fullyValidate    
+      hs.diagnostics.fullyValidate
     }
 
     // make sure += works
@@ -85,7 +85,7 @@ package scala.reflect.internal.util {
       val hs = new WeakHashSet[Collider]()
       val elements = (0 until size).toList map {x => Collider("a" + x)}
       elements foreach (hs += _)
-      // don't throw the following into a retained collection so gc 
+      // don't throw the following into a retained collection so gc
       // can remove them
       for (i <- 0 until size) {
         hs += Collider("b" + i)
@@ -150,9 +150,9 @@ package scala.reflect.internal.util {
       hs.clear()
       assert(hs.size == 0)
       elements foreach {i => assert(!(hs contains i))}
-      hs.diagnostics.fullyValidate    
+      hs.diagnostics.fullyValidate
     }
-    
+
     // check that the iterator covers all the contents
     def checkIterator {
       val hs = new WeakHashSet[String]()
@@ -161,14 +161,14 @@ package scala.reflect.internal.util {
       assert(elements.iterator.toList.sorted == elements.sorted)
       hs.diagnostics.fullyValidate
     }
-    
+
     // check that the iterator covers all the contents even when there is a collision
     def checkIteratorCollisions {
       val hs = new WeakHashSet[Collider]
       val elements = (0 until 20).toList map {x => Collider("a" + x)}
       elements foreach (hs += _)
-      assert(elements.iterator.toList.sorted == elements.sorted) 
+      assert(elements.iterator.toList.sorted == elements.sorted)
       hs.diagnostics.fullyValidate
     }
   }
-}  
+}
diff --git a/test/files/run/absoverride.scala b/test/files/run/absoverride.scala
index 8c6de09..a3c03df 100644
--- a/test/files/run/absoverride.scala
+++ b/test/files/run/absoverride.scala
@@ -26,16 +26,16 @@ trait SyncIterator extends AbsIterator {
     }
 }
 trait LoggedIterator extends AbsIterator {
-  abstract override def next: T = { 
-    val x = super.next; println("log: " + x); x 
+  abstract override def next: T = {
+    val x = super.next; println("log: " + x); x
   }
 }
-class Iter2(s: String) extends StringIterator(s) 
-               with SyncIterator with LoggedIterator;         
+class Iter2(s: String) extends StringIterator(s)
+               with SyncIterator with LoggedIterator;
 object Test {
   def main(args: Array[String]) {
     class Iter extends StringIterator(args(0)) with RichIterator with SyncIterator with LoggedIterator
     val iter = new Iter
-    iter foreach Console.println 
+    iter foreach Console.println
   }
 }
diff --git a/test/files/run/abstypetags_serialize.scala b/test/files/run/abstypetags_serialize.scala
index 93fb5dc..6ec9710 100644
--- a/test/files/run/abstypetags_serialize.scala
+++ b/test/files/run/abstypetags_serialize.scala
@@ -1,3 +1,4 @@
+import scala.language.higherKinds
 import java.io._
 import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{universe => ru}
@@ -30,4 +31,4 @@ object Test extends App {
   }
 
   qwe
-}
\ No newline at end of file
+}
diff --git a/test/files/run/all-overridden.check b/test/files/run/all-overridden.check
new file mode 100644
index 0000000..1b620b1
--- /dev/null
+++ b/test/files/run/all-overridden.check
@@ -0,0 +1 @@
+method g
diff --git a/test/files/run/all-overridden.scala b/test/files/run/all-overridden.scala
new file mode 100644
index 0000000..ff51fa1
--- /dev/null
+++ b/test/files/run/all-overridden.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+  trait Foo { def f: Int = 5 ; def g: Int }
+  trait Bar extends Foo { def f: Int ; def g: Int = 5 }
+
+  def main(args: Array[String]): Unit = {
+    // We should see g, but not f or $init$.
+    typeOf[Bar].decls.toList.flatMap(_.overrides) foreach println
+  }
+}
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index 7d8d181..e3ab554 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -7,7 +7,7 @@ annotationsConform(Int @testAnn, Int) [2]
 annotationsConform(Int(1) @testAnn, Int) [1]
 annotationsConform(Int(1), Int @testAnn) [1]
 annotationsConform(Nothing, Int @testAnn) [2]
-annotationsConform(String @testAnn, String) [1]
+annotationsConform(String @testAnn, String) [2]
 canAdaptAnnotations(Trees$Ident, String) [1]
 canAdaptAnnotations(Trees$Select, ?) [1]
 canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
@@ -19,27 +19,27 @@ canAdaptAnnotations(Trees$Typed, Any) [1]
 canAdaptAnnotations(Trees$Typed, Int) [1]
 lub(List(Int @testAnn, Int)) [1]
 pluginsPt(?, Trees$Annotated) [7]
-pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$Apply) [9]
 pluginsPt(?, Trees$ApplyImplicitView) [2]
 pluginsPt(?, Trees$Assign) [7]
-pluginsPt(?, Trees$Block) [7]
+pluginsPt(?, Trees$Block) [4]
 pluginsPt(?, Trees$ClassDef) [2]
 pluginsPt(?, Trees$DefDef) [14]
-pluginsPt(?, Trees$Ident) [49]
+pluginsPt(?, Trees$Ident) [50]
 pluginsPt(?, Trees$If) [2]
-pluginsPt(?, Trees$Literal) [20]
+pluginsPt(?, Trees$Literal) [16]
 pluginsPt(?, Trees$New) [5]
 pluginsPt(?, Trees$PackageDef) [1]
 pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [51]
+pluginsPt(?, Trees$Select) [48]
 pluginsPt(?, Trees$Super) [2]
 pluginsPt(?, Trees$This) [20]
-pluginsPt(?, Trees$TypeApply) [3]
+pluginsPt(?, Trees$TypeApply) [4]
 pluginsPt(?, Trees$TypeBoundsTree) [2]
 pluginsPt(?, Trees$TypeDef) [1]
-pluginsPt(?, Trees$TypeTree) [37]
+pluginsPt(?, Trees$TypeTree) [39]
 pluginsPt(?, Trees$Typed) [1]
-pluginsPt(?, Trees$ValDef) [23]
+pluginsPt(?, Trees$ValDef) [21]
 pluginsPt(Any, Trees$Literal) [2]
 pluginsPt(Any, Trees$Typed) [1]
 pluginsPt(Array[Any], Trees$ArrayValue) [1]
@@ -53,7 +53,7 @@ pluginsPt(Int @testAnn, Trees$Literal) [1]
 pluginsPt(Int, Trees$Apply) [1]
 pluginsPt(Int, Trees$Ident) [2]
 pluginsPt(Int, Trees$If) [1]
-pluginsPt(Int, Trees$Literal) [6]
+pluginsPt(Int, Trees$Literal) [5]
 pluginsPt(Int, Trees$Select) [3]
 pluginsPt(List, Trees$Apply) [1]
 pluginsPt(List[Any], Trees$Select) [1]
@@ -65,7 +65,7 @@ pluginsPt(String, Trees$Literal) [1]
 pluginsPt(String, Trees$Select) [1]
 pluginsPt(String, Trees$Typed) [1]
 pluginsPt(Unit, Trees$Assign) [1]
-pluginsPt(scala.annotation.Annotation, Trees$Apply) [5]
+pluginsPt(testAnn, Trees$Apply) [5]
 pluginsTypeSig(<none>, Trees$Template) [2]
 pluginsTypeSig(class A, Trees$ClassDef) [1]
 pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
@@ -82,8 +82,8 @@ pluginsTypeSig(value lub1, Trees$ValDef) [2]
 pluginsTypeSig(value lub2, Trees$ValDef) [2]
 pluginsTypeSig(value param, Trees$ValDef) [2]
 pluginsTypeSig(value str, Trees$ValDef) [1]
-pluginsTypeSig(value x, Trees$ValDef) [5]
-pluginsTypeSig(value y, Trees$ValDef) [5]
+pluginsTypeSig(value x, Trees$ValDef) [4]
+pluginsTypeSig(value y, Trees$ValDef) [4]
 pluginsTypeSig(variable count, Trees$ValDef) [3]
 pluginsTypeSigAccessor(value annotField) [1]
 pluginsTypeSigAccessor(value inferField) [1]
@@ -98,6 +98,7 @@ pluginsTyped(()String, Trees$Ident) [1]
 pluginsTyped(()String, Trees$TypeApply) [1]
 pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
 pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped(()type, Trees$TypeApply) [1]
 pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
 pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
 pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
@@ -110,8 +111,7 @@ pluginsTyped(<notype>, Trees$ClassDef) [2]
 pluginsTyped(<notype>, Trees$DefDef) [14]
 pluginsTyped(<notype>, Trees$PackageDef) [1]
 pluginsTyped(<notype>, Trees$TypeDef) [1]
-pluginsTyped(<notype>, Trees$ValDef) [23]
-pluginsTyped(<root>, Trees$Ident) [1]
+pluginsTyped(<notype>, Trees$ValDef) [21]
 pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
 pluginsTyped(=> Double, Trees$Select) [4]
 pluginsTyped(=> Int, Trees$Select) [5]
@@ -124,7 +124,7 @@ pluginsTyped(A, Trees$TypeTree) [4]
 pluginsTyped(A.super.type, Trees$Super) [1]
 pluginsTyped(A.this.type, Trees$This) [11]
 pluginsTyped(Any, Trees$TypeTree) [1]
-pluginsTyped(AnyRef, Trees$Select) [2]
+pluginsTyped(AnyRef, Trees$Select) [4]
 pluginsTyped(Array[Any], Trees$ArrayValue) [1]
 pluginsTyped(Boolean @testAnn, Trees$Select) [1]
 pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
@@ -137,12 +137,12 @@ pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
 pluginsTyped(Int @testAnn, Trees$Typed) [2]
 pluginsTyped(Int(0), Trees$Literal) [3]
 pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
-pluginsTyped(Int(1), Trees$Literal) [9]
+pluginsTyped(Int(1), Trees$Literal) [8]
 pluginsTyped(Int(2), Trees$Literal) [1]
 pluginsTyped(Int, Trees$Apply) [1]
 pluginsTyped(Int, Trees$Ident) [2]
 pluginsTyped(Int, Trees$If) [2]
-pluginsTyped(Int, Trees$Select) [17]
+pluginsTyped(Int, Trees$Select) [15]
 pluginsTyped(Int, Trees$TypeTree) [13]
 pluginsTyped(List, Trees$Apply) [1]
 pluginsTyped(List, Trees$Select) [1]
@@ -150,7 +150,6 @@ pluginsTyped(List[Any], Trees$Apply) [1]
 pluginsTyped(List[Any], Trees$Select) [1]
 pluginsTyped(List[Any], Trees$TypeTree) [3]
 pluginsTyped(Nothing, Trees$Return) [1]
-pluginsTyped(Nothing, Trees$Select) [2]
 pluginsTyped(Object, Trees$Apply) [1]
 pluginsTyped(String @testAnn, Trees$Ident) [1]
 pluginsTyped(String @testAnn, Trees$Select) [1]
@@ -160,31 +159,30 @@ pluginsTyped(String("huhu"), Trees$Literal) [1]
 pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
 pluginsTyped(String("str"), Trees$Literal) [1]
 pluginsTyped(String("str"), Trees$Typed) [1]
-pluginsTyped(String("two"), Trees$Literal) [3]
+pluginsTyped(String("two"), Trees$Literal) [2]
 pluginsTyped(String, Trees$Apply) [2]
 pluginsTyped(String, Trees$Block) [2]
 pluginsTyped(String, Trees$Ident) [1]
 pluginsTyped(String, Trees$Select) [9]
-pluginsTyped(String, Trees$TypeTree) [8]
+pluginsTyped(String, Trees$TypeTree) [7]
 pluginsTyped(Unit, Trees$Apply) [2]
 pluginsTyped(Unit, Trees$Assign) [8]
-pluginsTyped(Unit, Trees$Block) [7]
+pluginsTyped(Unit, Trees$Block) [4]
 pluginsTyped(Unit, Trees$If) [1]
-pluginsTyped(Unit, Trees$Literal) [8]
+pluginsTyped(Unit, Trees$Literal) [5]
 pluginsTyped(Unit, Trees$TypeTree) [1]
 pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
 pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
-pluginsTyped([T0 >: ? <: ?]()T0, Trees$Select) [1]
+pluginsTyped([T0]()T0, Trees$Select) [2]
 pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
-pluginsTyped(annotation.type, Trees$Select) [2]
+pluginsTyped(annotation.type, Trees$Select) [4]
 pluginsTyped(math.type, Trees$Select) [9]
 pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1]
 pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$TypeTree) [2]
 pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
 pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
 pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
-pluginsTyped(scala.type, Trees$Ident) [1]
-pluginsTyped(scala.type, Trees$Select) [1]
 pluginsTyped(str.type, Trees$Ident) [3]
 pluginsTyped(testAnn, Trees$Apply) [5]
 pluginsTyped(testAnn, Trees$Ident) [5]
@@ -192,5 +190,7 @@ pluginsTyped(testAnn, Trees$New) [5]
 pluginsTyped(testAnn, Trees$This) [1]
 pluginsTyped(testAnn, Trees$TypeTree) [2]
 pluginsTyped(testAnn.super.type, Trees$Super) [1]
+pluginsTyped(type, Trees$Apply) [1]
 pluginsTyped(type, Trees$Select) [1]
+pluginsTyped(type, Trees$TypeTree) [1]
 pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
index daef83f..4b297ff 100644
--- a/test/files/run/analyzerPlugins.scala
+++ b/test/files/run/analyzerPlugins.scala
@@ -8,7 +8,9 @@ object Test extends DirectTest {
   def code = """
     class testAnn extends annotation.TypeConstraint
 
-    class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+    class A(param: Double) extends { val x: Int = 1; val y = "two" } with AnyRef {
+      type T = A
+
       val inferField = ("str": @testAnn)
       val annotField: Boolean @testAnn = false
 
@@ -77,12 +79,12 @@ object Test extends DirectTest {
     object analyzerPlugin extends AnalyzerPlugin {
       def treeClass(t: Tree) = t.getClass.toString.split('.').last
 
-      override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = {
+      override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = {
         output += s"pluginsPt($pt, ${treeClass(tree)})"
         pt
       }
-  
-      override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+
+      override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
         output += s"pluginsTyped($tpe, ${treeClass(tree)})"
         tpe
       }
@@ -98,7 +100,7 @@ object Test extends DirectTest {
       }
 
 
-      override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+      override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
         output += s"canAdaptAnnotations(${treeClass(tree)}, $pt)"
         false
       }
diff --git a/test/files/run/annotatedRetyping.scala b/test/files/run/annotatedRetyping.scala
index cf1b0f2..9b9ebd5 100644
--- a/test/files/run/annotatedRetyping.scala
+++ b/test/files/run/annotatedRetyping.scala
@@ -40,7 +40,7 @@ object Test extends DirectTest {
         defTree match {
           case impl: Template =>
             templates += typer.context.owner -> (impl, typer)
-  
+
           case dd: DefDef if dd.symbol.isPrimaryConstructor && templates.contains(dd.symbol.owner) =>
             val (impl, templTyper) = templates(dd.symbol.owner)
             for (stat <- impl.body.filterNot(_.isDef)) {
@@ -50,7 +50,7 @@ object Test extends DirectTest {
               tpr.typed(stat)
             }
 
-          case _ => 
+          case _ =>
         }
         tpe
       }
diff --git a/test/files/run/array-addition.check b/test/files/run/array-addition.check
new file mode 100644
index 0000000..7bfbd9c
--- /dev/null
+++ b/test/files/run/array-addition.check
@@ -0,0 +1,4 @@
+Array(1, 2, 3, 4)
+Array(1, 2, 3, 4)
+Array(1)
+Array(1)
diff --git a/test/files/run/array-addition.scala b/test/files/run/array-addition.scala
new file mode 100644
index 0000000..8def48e
--- /dev/null
+++ b/test/files/run/array-addition.scala
@@ -0,0 +1,11 @@
+object Test {
+  def prettyPrintArray(x: Array[_]) = println("Array(" + x.mkString(", ") + ")")
+
+  def main(args: Array[String]): Unit = {
+    prettyPrintArray(Array(1,2,3) :+ 4)
+    prettyPrintArray(1 +: Array(2,3,4))
+    prettyPrintArray(Array() :+ 1)
+    prettyPrintArray(1 +: Array())
+  }
+}
+
diff --git a/test/files/run/array-charSeq.scala b/test/files/run/array-charSeq.scala
index f7d0586..53796bb 100644
--- a/test/files/run/array-charSeq.scala
+++ b/test/files/run/array-charSeq.scala
@@ -6,6 +6,7 @@ object Test {
   def check(chars: CharSequence) {
     println("\n[check '" + chars + "'] len = " + chars.length)
     chars match {
+      case x: Predef.ArrayCharSequence  => assert(x.__arrayOfChars eq arr, ((x.__arrayOfChars, arr)))
       case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr)))
       case x                            => assert(false, x)
     }
diff --git a/test/files/run/array-existential-bound.scala b/test/files/run/array-existential-bound.scala
index bc442d3..cc105d8 100644
--- a/test/files/run/array-existential-bound.scala
+++ b/test/files/run/array-existential-bound.scala
@@ -7,11 +7,11 @@ object Test extends Fooz[Array[Int]] {
   val f2 = new Fooz[Array[Int]] { }
   val f3 = new Fooz[Array[Any]] { }
   val f4 = new Fooz[Array[_]] { }
-  
+
   def main(args: Array[String]): Unit = {
     println(f1.f0(Array[String]("a", "b")))
-    println(f2.f0(1 to 1000 toArray))
+    println(f2.f0((1 to 1000).toArray))
     println(f3.f0((1 to 1000).toArray[Any]))
-    println(f4.f0('a' to 'z' toArray))
+    println(f4.f0(('a' to 'z').toArray))
   }
 }
diff --git a/test/files/run/arrayclone-old.scala b/test/files/run/arrayclone-old.scala
index c9f7556..fbca38b 100644
--- a/test/files/run/arrayclone-old.scala
+++ b/test/files/run/arrayclone-old.scala
@@ -16,7 +16,7 @@ object BooleanArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = false;
-  assert(it(0) == true) 
+  assert(it(0) == true)
 }
 
 object ByteArrayClone{
@@ -24,7 +24,7 @@ object ByteArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object ShortArrayClone{
@@ -32,7 +32,7 @@ object ShortArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object CharArrayClone{
@@ -40,7 +40,7 @@ object CharArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object IntArrayClone{
@@ -48,7 +48,7 @@ object IntArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object LongArrayClone{
@@ -56,7 +56,7 @@ object LongArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object FloatArrayClone{
@@ -64,7 +64,7 @@ object FloatArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object DoubleArrayClone{
@@ -72,7 +72,7 @@ object DoubleArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = 0;
-  assert(it(0) == 1) 
+  assert(it(0) == 1)
 }
 
 object ObjectArrayClone{
@@ -80,7 +80,7 @@ object ObjectArrayClone{
   val cloned = it.clone();
   assert(cloned.sameElements(it));
   cloned(0) = "0";
-  assert(it(0) == "1") 
+  assert(it(0) == "1")
 }
 
 object PolymorphicArrayClone{
@@ -88,14 +88,14 @@ object PolymorphicArrayClone{
     val cloned = it.clone();
     assert(cloned.sameElements(it));
     cloned(0) = zero;
-    assert(it(0) == one) 
-  }  
+    assert(it(0) == one)
+  }
 
   testIt(Array("one", "two"), "one", "two");
 
   class Mangler[T: Manifest](ts : T*){
     // this will always be a BoxedAnyArray even after we've unboxed its contents.
-    val it = ts.toArray[T]; 
+    val it = ts.toArray[T];
   }
 
   val mangled = new Mangler[Int](0, 1);
diff --git a/test/files/run/arraycopy.scala b/test/files/run/arraycopy.scala
index 82c34c2..bb06200 100644
--- a/test/files/run/arraycopy.scala
+++ b/test/files/run/arraycopy.scala
@@ -5,7 +5,7 @@ object Test {
     val a = new Array[Int](10)
     val b = new Array[Any](10)
     for (i <- 0 until 10) b(i) = i
-    
+
     Array.copy(b, 3, a, 3, 7)
     assert(a.toSeq == List(0, 0, 0, 3, 4, 5, 6, 7, 8, 9))
   }
diff --git a/test/files/run/arrays.check b/test/files/run/arrays.check
index b1f7fae..c9a3a87 100644
--- a/test/files/run/arrays.check
+++ b/test/files/run/arrays.check
@@ -1 +1,7 @@
+arrays.scala:248: warning: comparing values of types Unit and Unit using `==' will always yield true
+    check(xs(0) == u0, xs(0), u0);
+                ^
+arrays.scala:249: warning: comparing values of types Unit and Unit using `==' will always yield true
+    check(xs(1) == u1, xs(1), u1);
+                ^
 checks: 2302
diff --git a/test/files/run/arrays.scala b/test/files/run/arrays.scala
index ecebc78..c8bf80e 100644
--- a/test/files/run/arrays.scala
+++ b/test/files/run/arrays.scala
@@ -107,7 +107,7 @@ object Test {
       val s1 = if (test1) "ok" else "KO";
       val s2 = actual.toString();
       val s3 = expected.toString();
-      error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
+      sys.error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
     }
     checks += 1
   }
diff --git a/test/files/run/arrayview.scala b/test/files/run/arrayview.scala
index 42ced5e..97e840f 100644
--- a/test/files/run/arrayview.scala
+++ b/test/files/run/arrayview.scala
@@ -1,6 +1,6 @@
 object Test {
   def f = (1 to 100).toArray.view
-  
+
   def main(args: Array[String]): Unit = {
     val xs = (f filter (_ < 50)).reverse.filter(_ % 2 == 0).map(_ / 2).flatMap(x => Array(1, x))
     assert(xs.size == 48)
diff --git a/test/files/run/bigDecimalCache.scala b/test/files/run/bigDecimalCache.scala
index e8ebefe..c0c709a 100644
--- a/test/files/run/bigDecimalCache.scala
+++ b/test/files/run/bigDecimalCache.scala
@@ -1,9 +1,9 @@
-object Test {  
+object Test {
   def main(args: Array[String]): Unit = {
     val bd5a = BigDecimal(5)
     val mc = java.math.MathContext.DECIMAL32
     val bd5b = BigDecimal(5,mc)
-    
+
     assert(bd5b.mc == mc)
   }
 }
diff --git a/test/files/run/bigDecimalTest.check b/test/files/run/bigDecimalTest.check
index 6d11c23..36db6aa 100644
--- a/test/files/run/bigDecimalTest.check
+++ b/test/files/run/bigDecimalTest.check
@@ -3,4 +3,4 @@
 0
 0
 0
-14
+15
diff --git a/test/files/run/bigDecimalTest.scala b/test/files/run/bigDecimalTest.scala
index 07b524c..480305d 100644
--- a/test/files/run/bigDecimalTest.scala
+++ b/test/files/run/bigDecimalTest.scala
@@ -28,7 +28,7 @@ object Test {
 
     // SI-4547: implicit conversion
     assert(5 + BigDecimal(3) == BigDecimal(8))
-    
+
     // meaningless sanity check
     List[BigDecimal](a, b, c, d, e, f) map (_.scale) foreach println
   }
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index 3f01d2a..41c2ccd 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -37,6 +37,11 @@ m2_r1 = true
 m2_r2 = true
 m2_r3 = true
 
+b1:BitSet(5, 6, 7)
+b2:BitSet(5)
+b3:BitSet(5, 7)
+b4:BitSet(7)
+b0:BitSet(5, 6, 7)
 is0 = BitSet()
 is1 = BitSet()
 is2 = BitSet(2)
diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala
index 2739568..5d49220 100644
--- a/test/files/run/bitsets.scala
+++ b/test/files/run/bitsets.scala
@@ -4,6 +4,8 @@
 
 //############################################################################
 
+import scala.language.postfixOps
+
 object TestMutable {
   import scala.collection.mutable.BitSet
 
@@ -37,6 +39,19 @@ object TestMutable {
   Console.println("mi1 = " + ms1.toImmutable)
   Console.println("mi2 = " + ms2.toImmutable)
   Console.println
+
+  val N = 257
+  val gen = 3
+  val bs = BitSet((1 until N): _*)
+  (1 until N).foldLeft(gen) {
+    case (acc, i) =>
+      assert(bs.size == N-i, s"Bad size for $bs, expected ${N-i} actual ${bs.size}")
+      assert(!bs.isEmpty, s"Unexpected isEmpty for $bs")
+      bs -= acc
+      acc*gen % N
+  }
+  assert(bs.size == 0, s"Expected size == 0 for $bs")
+  assert(bs.isEmpty, s"Expected isEmpty for $bs")
 }
 
 object TestMutable2 {
@@ -81,12 +96,51 @@ object TestMutable2 {
   println
 }
 
+object TestMutable3 {
+  import scala.collection.mutable.BitSet
+
+  val b0 = BitSet(5, 6)
+  val b1 = BitSet(7)
+  val b2 = BitSet(1, 5)
+  val b3 = BitSet(6, 7)
+  val b4 = BitSet(6, 7)
+
+  b1 |= b0
+  println(s"b1:$b1")
+  b2 &= b0
+  println(s"b2:$b2")
+  b3 ^= b0
+  println(s"b3:$b3")
+  b4 &~= b0
+  println(s"b4:$b4")
+  b0 ^= b0 |= b1
+  println(s"b0:$b0")
+}
+
+/***
+The memory requirements here are way beyond
+what a test should exercise.
+
+object TestMutable4 {
+  import scala.collection.mutable.BitSet
+
+  val bMax = BitSet(Int.MaxValue)
+  println(s"bMax:$bMax")
+  bMax.foreach(println)
+
+  val bLarge = BitSet(2000000001)
+  println(s"bLarge:$bLarge")
+
+  println(bMax == bLarge)
+}
+***/
+
 object TestImmutable {
   import scala.collection.immutable.BitSet
 
   val is0 = BitSet()
-  val is1 = BitSet.fromArray(Array())
-  val is2 = BitSet.fromArray(Array(4))
+  val is1 = BitSet.fromBitMask(Array())
+  val is2 = BitSet.fromBitMask(Array(4))
   val is3 = BitSet.empty
 
   Console.println("is0 = " + is0)
@@ -155,6 +209,8 @@ object TestImmutable2 {
 object Test extends App {
   TestMutable
   TestMutable2
+  TestMutable3
+  // TestMutable4
   TestImmutable
   TestImmutable2
 }
diff --git a/test/files/run/blame_eye_triple_eee-double.check b/test/files/run/blame_eye_triple_eee-double.check
new file mode 100644
index 0000000..5e46d91
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-double.check
@@ -0,0 +1,9 @@
+if (NaN == NaN) is good
+if (x == x) is good
+if (x == NaN) is good
+if (NaN != NaN) is good
+if (x != x) is good
+if (NaN != x) is good
+x matching was good
+NaN matching was good
+loop with NaN was goood
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/run/blame_eye_triple_eee-double.flags
similarity index 100%
copy from test/files/instrumented/inline-in-constructors.flags
copy to test/files/run/blame_eye_triple_eee-double.flags
diff --git a/test/files/run/blame_eye_triple_eee-double.scala b/test/files/run/blame_eye_triple_eee-double.scala
new file mode 100644
index 0000000..1640aea
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-double.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+  import Double.NaN
+
+  // NaN must not equal NaN no matter what optimizations are applied
+  // All the following will seem redundant, but to an optimizer
+  // they can appear different
+
+  val x = NaN
+
+  if (NaN == NaN)
+    println("if (NaN == NaN) is broken")
+  else
+    println("if (NaN == NaN) is good")
+
+  if (x == x)
+    println("if (x == x) is broken")
+  else
+    println("if (x == x) is good")
+
+  if (x == NaN)
+    println("if (x == NaN) is broken")
+  else
+    println("if (x == NaN) is good")
+
+  if (NaN != NaN)
+    println("if (NaN != NaN) is good")
+  else
+    println("if (NaN != NaN) broken")
+
+  if (x != x)
+    println("if (x != x) is good")
+  else
+    println("if (x != x) broken")
+
+  if (NaN != x)
+    println("if (NaN != x) is good")
+  else
+    println("if (NaN != x) is broken")
+
+  x match {
+    case 0.0d => println("x matched 0!")
+    case NaN => println("x matched NaN!")
+    case _ => println("x matching was good")
+  }
+
+  NaN match {
+    case 0.0d => println("NaN matched 0!")
+    case NaN => println("NaN matched NaN!")
+    case _ => println("NaN matching was good")
+  }
+
+  var z = 0.0d
+  var i = 0
+  while (i < 10) {
+    if (i % 2 == 0) z = NaN
+    else z = NaN
+    i += 1
+  }
+  if (z.isNaN && i == 10) println("loop with NaN was goood")
+  else println("loop with NaN was broken")
+}
diff --git a/test/files/run/blame_eye_triple_eee-float.check b/test/files/run/blame_eye_triple_eee-float.check
new file mode 100644
index 0000000..5e46d91
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-float.check
@@ -0,0 +1,9 @@
+if (NaN == NaN) is good
+if (x == x) is good
+if (x == NaN) is good
+if (NaN != NaN) is good
+if (x != x) is good
+if (NaN != x) is good
+x matching was good
+NaN matching was good
+loop with NaN was goood
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/run/blame_eye_triple_eee-float.flags
similarity index 100%
copy from test/files/instrumented/inline-in-constructors.flags
copy to test/files/run/blame_eye_triple_eee-float.flags
diff --git a/test/files/run/blame_eye_triple_eee-float.scala b/test/files/run/blame_eye_triple_eee-float.scala
new file mode 100644
index 0000000..4deb9f3
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-float.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+  import Float.NaN
+
+  // NaN must not equal NaN no matter what optimizations are applied
+  // All the following will seem redundant, but to an optimizer
+  // they can appear different
+
+  val x = NaN
+
+  if (NaN == NaN)
+    println("if (NaN == NaN) is broken")
+  else
+    println("if (NaN == NaN) is good")
+
+  if (x == x)
+    println("if (x == x) is broken")
+  else
+    println("if (x == x) is good")
+
+  if (x == NaN)
+    println("if (x == NaN) is broken")
+  else
+    println("if (x == NaN) is good")
+
+  if (NaN != NaN)
+    println("if (NaN != NaN) is good")
+  else
+    println("if (NaN != NaN) broken")
+
+  if (x != x)
+    println("if (x != x) is good")
+  else
+    println("if (x != x) broken")
+
+  if (NaN != x)
+    println("if (NaN != x) is good")
+  else
+    println("if (NaN != x) is broken")
+
+  x match {
+    case 0.0f => println("x matched 0!")
+    case NaN => println("x matched NaN!")
+    case _ => println("x matching was good")
+  }
+
+  NaN match {
+    case 0.0f => println("NaN matched 0!")
+    case NaN => println("NaN matched NaN!")
+    case _ => println("NaN matching was good")
+  }
+
+  var z = 0.0f
+  var i = 0
+  while (i < 10) {
+    if (i % 2 == 0) z = NaN
+    else z = NaN
+    i += 1
+  }
+  if (z.isNaN && i == 10) println("loop with NaN was goood")
+  else println("loop with NaN was broken")
+}
diff --git a/test/files/run/boolexprs.scala b/test/files/run/boolexprs.scala
index 4f1c4b1..b9b4fae 100644
--- a/test/files/run/boolexprs.scala
+++ b/test/files/run/boolexprs.scala
@@ -10,7 +10,7 @@ class Counter {
 
 object Test1 {
   var flag = false;
-  def flip: Boolean = { val tmp = flag; flag = !flag; tmp } 
+  def flip: Boolean = { val tmp = flag; flag = !flag; tmp }
   def run: Int = {
     val c = new Counter;
     c.incrThen(flip || flip);
diff --git a/test/files/run/bridges.scala b/test/files/run/bridges.scala
index fda86ea..eb036bd 100644
--- a/test/files/run/bridges.scala
+++ b/test/files/run/bridges.scala
@@ -3588,7 +3588,7 @@ object Test {
         errors = errors + 1;
       }
     } catch {
-      case exception => {
+      case exception: Throwable => {
         Console.print(name + " raised exception " + exception);
         Console.println;
         errors = errors + 1;
diff --git a/test/files/run/bugs.scala b/test/files/run/bugs.scala
index ca59860..02849b5 100644
--- a/test/files/run/bugs.scala
+++ b/test/files/run/bugs.scala
@@ -46,7 +46,7 @@ object Bug135Test {
 
   def test(args: Array[String]) {
     val myMap:TreeMap[Int, String] = new TreeMap
-    val map1 = myMap + Pair(42, "The answer")
+    val map1 = myMap + ((42, "The answer"))
     println(map1.get(42))
   }
 
@@ -304,7 +304,7 @@ object Bug250Test {
 // Bug 257
 
 object Bug257Test {
-  def sayhello(): Unit = { Console.println("I should come 1st and 2nd"); };  
+  def sayhello(): Unit = { Console.println("I should come 1st and 2nd"); };
   def sayhi(): Unit = { Console.println("I should come last"); };
 
   def f1(x: Unit): Unit = ();
@@ -444,7 +444,7 @@ object Test  {
     try {
       test;
     } catch {
-      case exception =>
+      case exception: Throwable =>
         Console.print("Exception in thread \"" + Thread.currentThread + "\" " + exception);
         Console.println;
         errors += 1
diff --git a/test/files/run/case-class-23.check b/test/files/run/case-class-23.check
new file mode 100644
index 0000000..888ed2c
--- /dev/null
+++ b/test/files/run/case-class-23.check
@@ -0,0 +1,2 @@
+23
+(1,23)
diff --git a/test/files/run/case-class-23.scala b/test/files/run/case-class-23.scala
new file mode 100644
index 0000000..92b7195
--- /dev/null
+++ b/test/files/run/case-class-23.scala
@@ -0,0 +1,33 @@
+case class TwentyThree(
+  _1: Int,
+  _2: Int,
+  _3: Int,
+  _4: Int,
+  _5: Int,
+  _6: Int,
+  _7: Int,
+  _8: Int,
+  _9: Int,
+  _10: Int,
+  _11: Int,
+  _12: Int,
+  _13: Int,
+  _14: Int,
+  _15: Int,
+  _16: Int,
+  _17: Int,
+  _18: Int,
+  _19: Int,
+  _20: Int,
+  _21: Int,
+  _22: Int,
+  _23: Int
+)
+
+object Test extends App {
+  val x = new TwentyThree(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)
+  println(x._23)
+  assert(x.copy(_1 = 1) == x)
+  val TwentyThree(a, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, b) = x
+  println((a, b))
+}
diff --git a/test/files/run/caseClassEquality.scala b/test/files/run/caseClassEquality.scala
index 4940d80..c11d7ad 100644
--- a/test/files/run/caseClassEquality.scala
+++ b/test/files/run/caseClassEquality.scala
@@ -11,25 +11,25 @@ object Test {
       case _      => false
     }
   }
-  
+
   case class CS1(xs: Any*)
   class CS2(xs: Seq[_]*) extends CS1(xs: _*)
   class CS3(xs: IndexedSeq[Int]*) extends CS2(xs: _*)
-  
+
   case class H1(x: Int, y: Double)
   class H2(x: Double, y: Int) extends H1(y, x)
-  
+
   def main(args: Array[String]): Unit = {
     assert(C1(5) == new C2(5))
     assert(new C2(5) == C1(5))
     assert(C1(5).hashCode == new C2(5).hashCode)
     assert(new C2(5).hashCode == C1(5).hashCode)
-  
+
     assert(C1(5) != new C3(5))
     assert(new C3(5) != C1(5))
-    
+
     assert(CS1(List(1d,2d), Seq[Float](3f, 4f)) == new CS3(IndexedSeq(1,2), IndexedSeq(3, 4)))
-    
+
     assert(H1(5, 10d) == new H2(10d, 5))
     assert(H1(5, 10d).hashCode == new H2(10d, 5).hashCode)
   }
diff --git a/test/files/run/caseclasses.scala b/test/files/run/caseclasses.scala
index 5aafea5..668c984 100644
--- a/test/files/run/caseclasses.scala
+++ b/test/files/run/caseclasses.scala
@@ -1,6 +1,6 @@
 case class Foo(x: Int)(y: Int)
 
-case class Bar
+case class Bar()
 
 abstract class Base
 abstract case class Abs(x: Int) extends Base
diff --git a/test/files/run/castsingleton.scala b/test/files/run/castsingleton.scala
index 47bd613..339f5e0 100644
--- a/test/files/run/castsingleton.scala
+++ b/test/files/run/castsingleton.scala
@@ -8,4 +8,4 @@ object Test extends App {
   }
 
   empty(L())
-} 
+}
diff --git a/test/files/run/checked.scala b/test/files/run/checked.scala
index 06bc0c0..e4db9c0 100644
--- a/test/files/run/checked.scala
+++ b/test/files/run/checked.scala
@@ -23,9 +23,9 @@ trait T {
 // Should not throw
 class D extends B with T {
   val sum = x + y + z + b1 + b2 + t1 + t2
-  override def toString = 
+  override def toString =
     "sum = " + sum
-    
+
 }
 
 abstract class NeedsXEarly {
@@ -91,7 +91,7 @@ class TestInterference extends {
 
 
 object Test extends App {
-  
+
   def shouldThrow(t: => Unit) = try {
     t
     println("[FAIL]: No UFE thrown")
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index 378caa7..f92382d 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -91,14 +91,14 @@ object Test extends DirectTest {
     val bytes = cw.toByteArray()
 
     val fos = new FileOutputStream(new File(s"${testOutput.path}/$invokerClassName.class"))
-    try 
+    try
       fos write bytes
     finally
       fos.close()
 
   }
 
-  def code = 
+  def code =
 """
 object Driver {
   val invoker = new DynamicInvoker()
@@ -112,14 +112,14 @@ object Driver {
     System.setErr(System.out)
     try {
       // this test is only valid under JDK 1.7+
-      testUnderJavaAtLeast("1.7") {  
+      testUnderJavaAtLeast("1.7") {
         generateClass()
         compile()
         ()
       } otherwise {
         ()
       }
-    } 
+    }
     finally
       System.setErr(prevErr)
   }
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
index 7afa09a..e12c841 100644
--- a/test/files/run/classfile-format-52.scala
+++ b/test/files/run/classfile-format-52.scala
@@ -7,7 +7,7 @@ import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor,
 import Opcodes._
 
 // This test ensures that we can read JDK 8 (classfile format 52) files, including those
-// with default methods. To do that it first uses ASM to generate an interface called 
+// with default methods. To do that it first uses ASM to generate an interface called
 // HasDefaultMethod. Then it runs a normal compile on Scala source that extends that
 // interface. Any failure will be dumped to std out.
 //
@@ -40,14 +40,14 @@ object Test extends DirectTest {
     val bytes = cw.toByteArray()
 
     val fos = new FileOutputStream(new File(s"${testOutput.path}/$interfaceName.class"))
-    try 
+    try
       fos write bytes
     finally
       fos.close()
 
   }
 
-  def code = 
+  def code =
 """
 class Driver extends HasDefaultMethod {
   println(publicMethod())
@@ -65,12 +65,12 @@ class Driver extends HasDefaultMethod {
         generateInterface()
         compile()
         Class.forName("Driver").newInstance()
-        ()   
+        ()
       } otherwise {
         println("hello from publicMethod")
-        println("hello from staticMethod")        
+        println("hello from staticMethod")
       }
-    } 
+    }
     finally
       System.setErr(prevErr)
   }
diff --git a/test/files/run/classmanifests_new_alias.scala b/test/files/run/classmanifests_new_alias.scala
index 12bd93b..777bd5d 100644
--- a/test/files/run/classmanifests_new_alias.scala
+++ b/test/files/run/classmanifests_new_alias.scala
@@ -1,5 +1,7 @@
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   type CM[T] = ClassManifest[T]
   println(implicitly[CM[Int]])
   println(implicitly[CM[Int]] eq Manifest.Int)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/classmanifests_new_core.scala b/test/files/run/classmanifests_new_core.scala
index 63dbfab..0a9c58e 100644
--- a/test/files/run/classmanifests_new_core.scala
+++ b/test/files/run/classmanifests_new_core.scala
@@ -1,4 +1,5 @@
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   println(classManifest[Int])
   println(classManifest[Int] eq Manifest.Int)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/classof.check b/test/files/run/classof.check
index 0d650b8..83e2923 100644
--- a/test/files/run/classof.check
+++ b/test/files/run/classof.check
@@ -17,6 +17,6 @@ class [Lscala.runtime.BoxedUnit;
 class [I
 class [D
 class [Lscala.collection.immutable.List;
-Functions: 
+Functions:
 interface scala.Function2
 interface scala.Function1
diff --git a/test/files/run/classof.scala b/test/files/run/classof.scala
index 10c07d2..257829e 100644
--- a/test/files/run/classof.scala
+++ b/test/files/run/classof.scala
@@ -13,19 +13,19 @@ object Test {
     println(classOf[Long])
     println(classOf[Float])
     println(classOf[Double])
-    
+
     println("Class types")
     println(classOf[SomeClass])
     println(classOf[List[Array[Float]]])
     println(classOf[(String, Map[Int, String])])
 
     println("Arrays:")
-    println(classOf[Array[Unit]])    
+    println(classOf[Array[Unit]])
     println(classOf[Array[Int]])
     println(classOf[Array[Double]])
     println(classOf[Array[List[String]]])
 
-    println("Functions: ")
+    println("Functions:")
     println(classOf[(Int, Int) => Unit])
     println(classOf[Int => Boolean])
   }
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
index d842742..cd05f68 100644
--- a/test/files/run/collection-conversions.scala
+++ b/test/files/run/collection-conversions.scala
@@ -8,11 +8,11 @@ object Test {
 
   def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = {
     print("  :" + msg +": ")
-    val isArray = obj match { 
-      case x: Array[Int] => true 
+    val isArray = obj match {
+      case x: Array[Int] => true
       case _ => false
     }
-    val expectedEquals = 
+    val expectedEquals =
       if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq
       else obj == expected
     val tagEquals = tag == tag2
@@ -49,7 +49,7 @@ object Test {
     printResult("[Copy]   ParVector", col.to[ParVector], testParVector)
     printResult("[Copy]   ParArray ", col.to[ParArray], testParArray)
   }
-  
+
   def main(args: Array[String]): Unit = {
     testConversion("iterator", (1 to 3).iterator)
     testConversion("Vector", Vector(1,2,3))
diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check
new file mode 100644
index 0000000..895bde3
--- /dev/null
+++ b/test/files/run/collection-stacks.check
@@ -0,0 +1,15 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+3-2-1: true
+3-2-1: true
+apply
+3: true
+3: true
+1: true
+1: true
+top
+3: true
+3: true
+pop
+2-1: true
+3: true
+2-1: true
diff --git a/test/files/run/collection-stacks.scala b/test/files/run/collection-stacks.scala
new file mode 100644
index 0000000..be9fbbf
--- /dev/null
+++ b/test/files/run/collection-stacks.scala
@@ -0,0 +1,38 @@
+import scala.collection.{ immutable, mutable }
+
+object Test extends App {
+  def mutableStack[T](xs: T*): mutable.Stack[T] = {
+    val s = new mutable.Stack[T]
+    s.pushAll(xs)
+    s
+  }
+
+  def immutableStack[T](xs: T*): immutable.Stack[T] = {
+    immutable.Stack.empty[T] pushAll xs
+  }
+
+  def check[T](expected: T, got: T) {
+    println(got + ": " + (expected == got))
+  }
+
+  // check #957
+  check("3-2-1", immutableStack(1, 2, 3).iterator.mkString("-"))
+  check("3-2-1", mutableStack(1, 2, 3).iterator.mkString("-"))
+
+  println("apply")
+  check(3, immutableStack(1, 2, 3).apply(0))
+  check(3, mutableStack(1, 2, 3).apply(0))
+  check(1, immutableStack(1, 2, 3).apply(2))
+  check(1, mutableStack(1, 2, 3).apply(2))
+
+  println("top")
+  check(3, immutableStack(1, 2, 3).top)
+  check(3, mutableStack(1, 2, 3).top)
+
+  println("pop")
+  check("2-1", immutableStack(1, 2, 3).pop.mkString("-"))
+  check(3, mutableStack(1, 2, 3).pop())
+  check("2-1", { val s = mutableStack(1, 2, 3); s.pop(); s.toList.mkString("-") })
+}
+
+// vim: set ts=2 sw=2 et:
diff --git a/test/files/run/collections-toSelf.scala b/test/files/run/collections-toSelf.scala
index 2adbc22..02f1dd6 100644
--- a/test/files/run/collections-toSelf.scala
+++ b/test/files/run/collections-toSelf.scala
@@ -2,7 +2,7 @@ object Test {
   val map = Map(1 -> 2)
   val set = Set(1, 2)
   val seq = collection.immutable.Seq(1, 2)
-  
+
   def main(args: Array[String]): Unit = {
     assert(map.toMap eq map)
     assert(set.toSet eq set)
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index 69c40fa..2b19ff4 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -1,5 +1,6 @@
-import collection._
+import scala.collection._
 import scala.compat.Platform.currentTime
+import scala.language.postfixOps
 
 object Test extends App {
 
@@ -61,7 +62,7 @@ object Test extends App {
     }
     time {
       var x = 0
-      for (i <- 0 to 10000) 
+      for (i <- 0 to 10000)
         s get i match {
           case Some(i) => x += i
           case None =>
@@ -96,7 +97,7 @@ object Test extends App {
     }
     time {
       var x = 0
-      for (i <- 0 to 10000) 
+      for (i <- 0 to 10000)
         s get i match {
           case Some(i) => x += i
           case None =>
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index e5bb013..1e850bb 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -1,3 +1,4 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 true
 false
 true
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 54adeb7..8dce69a 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -1,4 +1,8 @@
-import collection._
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
+import scala.collection._
+import scala.language.postfixOps
 
 object Test extends App {
 
@@ -61,7 +65,7 @@ object Test extends App {
     assert(ten.toStream == ten)
     assert(ten.toString endsWith "(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)")
     assert(ten.mkString("[", "; ", "]") endsWith "[1; 2; 3; 4; 5; 6; 7; 8; 9; 10]")
-  }                               
+  }
 
   def orderedIterableTest(empty: Iterable[Int]) {
     orderedTraversableTest(empty)
@@ -84,7 +88,7 @@ object Test extends App {
     assert(ten(0) == 1 && ten(9) == 10)
     assert((ten lengthCompare 10) == 0 && (ten lengthCompare 1) > 0 && (ten lengthCompare 11) < 0)
     assert((ten isDefinedAt 0) && (ten isDefinedAt 9))
-    assert(!(ten isDefinedAt -1)); 
+    assert(!(ten isDefinedAt -1));
     assert(!(ten isDefinedAt 10))
     val tenten = ten zip ten
     assert((tenten map (_._1)) == ten)
@@ -174,8 +178,8 @@ object Test extends App {
     m ++= ('J' to 'Z') map (x => (x.toString -> x.toString))
     println(m.toList.sorted)
     assert(!m.isEmpty)
-    assert(m.keySet forall (k => (m get k) == Some(k))) 
-    assert(m.keySet forall (k => (m apply k) == k)) 
+    assert(m.keySet forall (k => (m get k) == Some(k)))
+    assert(m.keySet forall (k => (m apply k) == k))
     assert(m.keySet forall (m contains))
     assert(m.getOrElse("7", "@") == "@")
     assert(m.keySet.size == 26)
diff --git a/test/files/run/comparable-comparator.scala b/test/files/run/comparable-comparator.scala
index ac943c6..f059cc5 100644
--- a/test/files/run/comparable-comparator.scala
+++ b/test/files/run/comparable-comparator.scala
@@ -1,7 +1,7 @@
 
 object Test {
   import java.util.Comparator
-  
+
   class C1(val s: String) extends Comparable[C1] {
     def compareTo(other: C1) = s compareTo other.s
     override def toString = s
@@ -10,18 +10,19 @@ object Test {
     def compareTo(other: C2) = s compareTo other.s
     override def toString = s
   }
-  
+
   implicit val cmp: Comparator[C2] = new Comparator[C2] {
     def compare(p1: C2, p2: C2) = p2.s compareTo p1.s
   }
 
-  val strs = "zip foo bar baz aggle bing bong" split ' ' toList
+  val words = "zip foo bar baz aggle bing bong" split ' '
+  val strs = words.toList
   val c1s = strs map (x => new C1(x))
   val c2s = strs map (x => new C2(x))
-  
+
   val sorted1 = c1s.sorted map (_.s)
   val sorted2 = c2s.sorted map (_.s)
-  
+
   def main(args: Array[String]): Unit = {
     assert(sorted1 == sorted2.reverse)
   }
diff --git a/test/files/run/compiler-asSeenFrom.check b/test/files/run/compiler-asSeenFrom.check
index 47d40b0..7305504 100644
--- a/test/files/run/compiler-asSeenFrom.check
+++ b/test/files/run/compiler-asSeenFrom.check
@@ -1,6 +1,54 @@
 class C {
   type                       seen from prefix           is
   ----                       ----------------           --
+  C.this.I[Int]              C[List[T3]]                C[List[T3]]#I[Int]
+  C.this.I[Int]              C[T1]                      C[T1]#I[Int]
+  C.this.I[Int]              D[A1]                      D[A1]#I[Int]
+  C.this.I[Int]              D[T3]                      D[T3]#I[Int]
+  C.this.I[List[Int]]        C[List[T3]]                C[List[T3]]#I[List[Int]]
+  C.this.I[List[Int]]        C[T1]                      C[T1]#I[List[Int]]
+  C.this.I[List[Int]]        D[A1]                      D[A1]#I[List[Int]]
+  C.this.I[List[Int]]        D[T3]                      D[T3]#I[List[Int]]
+  C.this.I[T1]               C[List[T3]]                C[List[T3]]#I[List[T3]]
+  C.this.I[T1]               C[T1]                      C[T1]#I[T1]
+  C.this.I[T1]               D[A1]                      D[A1]#I[A1]
+  C.this.I[T1]               D[T3]                      D[T3]#I[T3]
+  C.this.I[T2]               C[List[T3]]                C[List[T3]]#I[T2]
+  C.this.I[T2]               C[T1]                      C[T1]#I[T2]
+  C.this.I[T2]               D[A1]                      D[A1]#I[T2]
+  C.this.I[T2]               D[T3]                      D[T3]#I[T2]
+  C.this.I[T3]               C[List[T3]]                C[List[T3]]#I[T3]
+  C.this.I[T3]               C[T1]                      C[T1]#I[T3]
+  C.this.I[T3]               D[A1]                      D[A1]#I[T3]
+  C.this.I[T3]               D[T3]                      D[T3]#I[T3]
+  C.this.I[T4]               C[List[T3]]                C[List[T3]]#I[T4]
+  C.this.I[T4]               C[T1]                      C[T1]#I[T4]
+  C.this.I[T4]               D[A1]                      D[A1]#I[T4]
+  C.this.I[T4]               D[T3]                      D[T3]#I[T4]
+  C.this.J[Int]              C[List[T3]]                C[List[T3]]#J[Int]
+  C.this.J[Int]              C[T1]                      C[T1]#J[Int]
+  C.this.J[Int]              D[A1]                      D[A1]#J[Int]
+  C.this.J[Int]              D[T3]                      D[T3]#J[Int]
+  C.this.J[List[Int]]        C[List[T3]]                C[List[T3]]#J[List[Int]]
+  C.this.J[List[Int]]        C[T1]                      C[T1]#J[List[Int]]
+  C.this.J[List[Int]]        D[A1]                      D[A1]#J[List[Int]]
+  C.this.J[List[Int]]        D[T3]                      D[T3]#J[List[Int]]
+  C.this.J[T1]               C[List[T3]]                C[List[T3]]#J[List[T3]]
+  C.this.J[T1]               C[T1]                      C[T1]#J[T1]
+  C.this.J[T1]               D[A1]                      D[A1]#J[A1]
+  C.this.J[T1]               D[T3]                      D[T3]#J[T3]
+  C.this.J[T2]               C[List[T3]]                C[List[T3]]#J[T2]
+  C.this.J[T2]               C[T1]                      C[T1]#J[T2]
+  C.this.J[T2]               D[A1]                      D[A1]#J[T2]
+  C.this.J[T2]               D[T3]                      D[T3]#J[T2]
+  C.this.J[T3]               C[List[T3]]                C[List[T3]]#J[T3]
+  C.this.J[T3]               C[T1]                      C[T1]#J[T3]
+  C.this.J[T3]               D[A1]                      D[A1]#J[T3]
+  C.this.J[T3]               D[T3]                      D[T3]#J[T3]
+  C.this.J[T4]               C[List[T3]]                C[List[T3]]#J[T4]
+  C.this.J[T4]               C[T1]                      C[T1]#J[T4]
+  C.this.J[T4]               D[A1]                      D[A1]#J[T4]
+  C.this.J[T4]               D[T3]                      D[T3]#J[T4]
   C[List[T3]]#I[T1]          D[A1]                      C[List[T3]]#I[A1]
   C[List[T3]]#I[T1]          D[T3]                      C[List[T3]]#I[T3]
   C[List[T3]]#J[T1]          D[A1]                      C[List[T3]]#J[A1]
@@ -49,6 +97,8 @@ class C {
 class D {
   type                       seen from prefix           is
   ----                       ----------------           --
+  C.this.I[T3]               D[A1]                      C.this.I[A1]
+  C.this.J[T3]               D[A1]                      C.this.J[A1]
   C[List[T3]]#I[Int]         D[A1]                      C[List[A1]]#I[Int]
   C[List[T3]]#I[List[Int]]   D[A1]                      C[List[A1]]#I[List[Int]]
   C[List[T3]]#I[T1]          D[A1]                      C[List[A1]]#I[T1]
@@ -73,6 +123,42 @@ class D {
 class I {
   type                       seen from prefix           is
   ----                       ----------------           --
+  C.this.I[Int]              D.this.J[T4]               D.this.cD.I[Int]
+  C.this.I[Int]              Z.dZ.J[A2]                 Z.dZ.cD.I[Int]
+  C.this.I[Int]              Z.dZ.J[P]                  Z.dZ.cD.I[Int]
+  C.this.I[List[Int]]        D.this.J[T4]               D.this.cD.I[List[Int]]
+  C.this.I[List[Int]]        Z.dZ.J[A2]                 Z.dZ.cD.I[List[Int]]
+  C.this.I[List[Int]]        Z.dZ.J[P]                  Z.dZ.cD.I[List[Int]]
+  C.this.I[T1]               D.this.J[T4]               D.this.cD.I[List[T3]]
+  C.this.I[T1]               Z.dZ.J[A2]                 Z.dZ.cD.I[List[A1]]
+  C.this.I[T1]               Z.dZ.J[P]                  Z.dZ.cD.I[List[A1]]
+  C.this.I[T2]               D.this.J[T4]               D.this.cD.I[T4]
+  C.this.I[T2]               Z.dZ.J[A2]                 Z.dZ.cD.I[A2]
+  C.this.I[T2]               Z.dZ.J[P]                  Z.dZ.cD.I[P]
+  C.this.I[T3]               D.this.J[T4]               D.this.cD.I[T3]
+  C.this.I[T3]               Z.dZ.J[A2]                 Z.dZ.cD.I[T3]
+  C.this.I[T3]               Z.dZ.J[P]                  Z.dZ.cD.I[T3]
+  C.this.I[T4]               D.this.J[T4]               D.this.cD.I[T4]
+  C.this.I[T4]               Z.dZ.J[A2]                 Z.dZ.cD.I[T4]
+  C.this.I[T4]               Z.dZ.J[P]                  Z.dZ.cD.I[T4]
+  C.this.J[Int]              D.this.J[T4]               D.this.cD.J[Int]
+  C.this.J[Int]              Z.dZ.J[A2]                 Z.dZ.cD.J[Int]
+  C.this.J[Int]              Z.dZ.J[P]                  Z.dZ.cD.J[Int]
+  C.this.J[List[Int]]        D.this.J[T4]               D.this.cD.J[List[Int]]
+  C.this.J[List[Int]]        Z.dZ.J[A2]                 Z.dZ.cD.J[List[Int]]
+  C.this.J[List[Int]]        Z.dZ.J[P]                  Z.dZ.cD.J[List[Int]]
+  C.this.J[T1]               D.this.J[T4]               D.this.cD.J[List[T3]]
+  C.this.J[T1]               Z.dZ.J[A2]                 Z.dZ.cD.J[List[A1]]
+  C.this.J[T1]               Z.dZ.J[P]                  Z.dZ.cD.J[List[A1]]
+  C.this.J[T2]               D.this.J[T4]               D.this.cD.J[T4]
+  C.this.J[T2]               Z.dZ.J[A2]                 Z.dZ.cD.J[A2]
+  C.this.J[T2]               Z.dZ.J[P]                  Z.dZ.cD.J[P]
+  C.this.J[T3]               D.this.J[T4]               D.this.cD.J[T3]
+  C.this.J[T3]               Z.dZ.J[A2]                 Z.dZ.cD.J[T3]
+  C.this.J[T3]               Z.dZ.J[P]                  Z.dZ.cD.J[T3]
+  C.this.J[T4]               D.this.J[T4]               D.this.cD.J[T4]
+  C.this.J[T4]               Z.dZ.J[A2]                 Z.dZ.cD.J[T4]
+  C.this.J[T4]               Z.dZ.J[P]                  Z.dZ.cD.J[T4]
   C[List[T3]]#I[T1]          D.this.J[T4]               C[List[T3]]#I[List[T3]]
   C[List[T3]]#I[T1]          Z.dZ.J[A2]                 C[List[T3]]#I[List[A1]]
   C[List[T3]]#I[T1]          Z.dZ.J[P]                  C[List[T3]]#I[List[A1]]
@@ -137,6 +223,14 @@ class I {
 class J {
   type                       seen from prefix           is
   ----                       ----------------           --
+  C.this.I[T3]               Z.dZ.J[A2]                 C.this.I[A1]
+  C.this.I[T3]               Z.dZ.J[P]                  C.this.I[A1]
+  C.this.I[T4]               Z.dZ.J[A2]                 C.this.I[A2]
+  C.this.I[T4]               Z.dZ.J[P]                  C.this.I[P]
+  C.this.J[T3]               Z.dZ.J[A2]                 C.this.J[A1]
+  C.this.J[T3]               Z.dZ.J[P]                  C.this.J[A1]
+  C.this.J[T4]               Z.dZ.J[A2]                 C.this.J[A2]
+  C.this.J[T4]               Z.dZ.J[P]                  C.this.J[P]
   C[List[T3]]#I[Int]         Z.dZ.J[A2]                 C[List[A1]]#I[Int]
   C[List[T3]]#I[Int]         Z.dZ.J[P]                  C[List[A1]]#I[Int]
   C[List[T3]]#I[List[Int]]   Z.dZ.J[A2]                 C[List[A1]]#I[List[Int]]
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
index 19feb45..ea96c6f 100644
--- a/test/files/run/compiler-asSeenFrom.scala
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -1,6 +1,56 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 import scala.tools.nsc._
-import scala.tools.partest.CompilerTest
+import scala.tools.partest.DirectTest
 import scala.collection.{ mutable, immutable, generic }
+import scala.language.{postfixOps, implicitConversions}
+import scala.reflect.runtime.{universe => ru}
+
+// necessary to avoid bincompat with scala-partest compiled against the old compiler
+abstract class CompilerTest extends DirectTest {
+  def check(source: String, unit: global.CompilationUnit): Unit
+
+  lazy val global: Global = newCompiler()
+  lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
+  import global._
+  import definitions.{ compilerTypeFromTag }
+
+  override def extraSettings = "-feature -usejavacp -d " + testOutput.path
+
+  def show() = (sources, units).zipped foreach check
+
+  // Override at least one of these...
+  def code = ""
+  def sources: List[String] = List(code)
+
+  // Utility functions
+  class MkType(sym: Symbol) {
+    def apply[M](implicit t: ru.TypeTag[M]): Type =
+      if (sym eq NoSymbol) NoType
+      else appliedType(sym, compilerTypeFromTag(t))
+  }
+  implicit def mkMkType(sym: Symbol) = new MkType(sym)
+
+  def allMembers(root: Symbol): List[Symbol] = {
+    def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
+      val latest = roots flatMap (_.info.members) filterNot (seen contains _)
+      if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
+      else loop(seen ++ latest, latest)
+    }
+    loop(Set(), List(root))
+  }
+
+  class SymsInPackage(pkgName: String) {
+    def pkg     = rootMirror.getPackage(TermName(pkgName))
+    def classes = allMembers(pkg) filter (_.isClass)
+    def modules = allMembers(pkg) filter (_.isModule)
+    def symbols = classes ++ terms filterNot (_ eq NoSymbol)
+    def terms   = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
+    def tparams = classes flatMap (_.info.typeParams)
+    def tpes    = symbols map (_.tpe) distinct
+  }
+}
 
 /** It's too messy but it's better than not having it.
  */
@@ -107,7 +157,7 @@ package ll {
   def check(source: String, unit: global.CompilationUnit) = {
     import syms._
 
-    afterTyper {
+    exitingTyper {
       val typeArgs = List[Type](IntClass.tpe, ListClass[Int]) ++ tparams.map(_.tpe)
       permute(typeArgs) foreach println
     }
@@ -117,6 +167,5 @@ package ll {
           println(sigs.mkString(x + " { // after " + ph + "\n  ", "\n  ", "\n}\n"))
       }
     }
-    true
   }
 }
diff --git a/test/files/run/concat-two-strings.scala b/test/files/run/concat-two-strings.scala
index ad796fe..c8881aa 100644
--- a/test/files/run/concat-two-strings.scala
+++ b/test/files/run/concat-two-strings.scala
@@ -8,7 +8,7 @@ object Test {
   def f4(x: List[Int])   = "" + x
   def f5(x: Any)         = "" + x
   def f6(x: AnyVal)      = "" + x
-  
+
   def main(args: Array[String]): Unit = {
     List(f1("a"), f2(5), f3(null), f3(Array('a')), f4(List(1)), f5(null), f6(55d)) mkString ""
   }
diff --git a/test/files/run/concurrent-map-conversions.scala b/test/files/run/concurrent-map-conversions.scala
index 0350b69..d23d5bb 100644
--- a/test/files/run/concurrent-map-conversions.scala
+++ b/test/files/run/concurrent-map-conversions.scala
@@ -4,33 +4,33 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     testConversions()
     testConverters()
   }
-  
+
   def needPackageConcurrentMap(map: collection.concurrent.Map[Int, Int]) {
   }
   def needJavaConcurrent(map: java.util.concurrent.ConcurrentMap[Int, Int]) {
   }
-  
+
   def testConversions() {
     import collection.JavaConversions._
     val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int]
     val ctrie = new collection.concurrent.TrieMap[Int, Int]
-    
+
     needPackageConcurrentMap(skiplist)
     needJavaConcurrent(ctrie)
   }
-  
+
   def testConverters() {
     import collection.JavaConverters._
     val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int]
     val ctrie = new collection.concurrent.TrieMap[Int, Int]
-    
+
     needPackageConcurrentMap(skiplist.asScala)
     needJavaConcurrent(ctrie.asJava)
   }
-  
+
 }
diff --git a/test/files/run/concurrent-stream.scala b/test/files/run/concurrent-stream.scala
index 42c6959..9d5ba04 100644
--- a/test/files/run/concurrent-stream.scala
+++ b/test/files/run/concurrent-stream.scala
@@ -1,32 +1,33 @@
 // test concurrent calls to Stream.tail
+ at deprecated("Suppress warnings", since="2.11")
 object Test  {
 
-def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
-  var current = from
-  def next: Stream[Int] = {
-    Thread.sleep(100)
-    if (current >= until) Stream.empty
-    else {
-      val stream = cons(current, next)
-      current += 1
-      stream
+  def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
+    var current = from
+    def next: Stream[Int] = {
+      Thread.sleep(100)
+      if (current >= until) Stream.empty
+      else {
+        val stream = cons(current, next)
+        current += 1
+        stream
+      }
     }
+    next
   }
-  next
-}
 
-def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
-  import scala.actors.Actor._
+  def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
+    import scala.actors.Actor._
 
-  val stream = slowRange(0, 10, cons)
-  val main = self
-  actor { main ! stream.toList }
-  actor { main ! stream.toList }
-  val eval0 = receive { case list: List[Int] => list }
-  val eval1 = receive { case list: List[Int] => list }
-  println("Evaluation 0: " + eval0)
-  println("Evaluation 1: " + eval1)
-}
+    val stream = slowRange(0, 10, cons)
+    val main = self
+    actor { main ! stream.toList }
+    actor { main ! stream.toList }
+    val eval0 = receive { case list: List[Int @unchecked] => list }
+    val eval1 = receive { case list: List[Int @unchecked] => list }
+    println("Evaluation 0: " + eval0)
+    println("Evaluation 1: " + eval1)
+  }
 
   def main(args: Array[String]) {
     println("Testing standard cons.")
diff --git a/test/files/run/constant-optimization.check b/test/files/run/constant-optimization.check
new file mode 100644
index 0000000..957ffc5
--- /dev/null
+++ b/test/files/run/constant-optimization.check
@@ -0,0 +1,5 @@
+testBothReachable: good
+testOneReachable: good
+testAllReachable: good
+testOneUnreachable: good
+testDefaultUnreachable: good
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/run/constant-optimization.flags
similarity index 100%
copy from test/files/instrumented/inline-in-constructors.flags
copy to test/files/run/constant-optimization.flags
diff --git a/test/files/run/constant-optimization.scala b/test/files/run/constant-optimization.scala
new file mode 100644
index 0000000..5d13272
--- /dev/null
+++ b/test/files/run/constant-optimization.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+  def testBothReachable() {
+    val i = util.Random.nextInt
+    val x = if (i % 2 == 0) null else "good"
+    val y = if (x == null) "good" else x + ""
+    println(s"testBothReachable: $y")
+  }
+
+  def testOneReachable() {
+    val i = 1
+    val x = if (i != 1) null else "good"
+    val y = if (x == null) "good" else x + ""
+    println(s"testOneReachable: $y")
+  }
+
+  def testAllReachable() {
+    val i = util.Random.nextInt
+    val y = (i % 2) match {
+      case 0 => "good"
+      case 1 => "good"
+      case _ => "good"
+    }
+    println(s"testAllReachable: $y")
+  }
+
+  def testOneUnreachable() {
+    val i = util.Random.nextInt
+    val x = if (i % 2 == 0) {
+      1
+    } else {
+      2
+    }
+    val y = x match {
+      case 0 => "good"
+      case 1 => "good"
+      case _ => "good"
+    }
+    println(s"testOneUnreachable: $y")
+  }
+
+  def testDefaultUnreachable() {
+    val i = util.Random.nextInt
+    val x = if (i % 2 == 0) {
+      1
+    } else {
+      2
+    }
+    val y = x match {
+      case 1 => "good"
+      case 2 => "good"
+      case _ => "good"
+    }
+    println(s"testDefaultUnreachable: $y")
+  }
+
+  testBothReachable()
+  testOneReachable()
+  testAllReachable()
+  testOneUnreachable()
+  testDefaultUnreachable()
+}
diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check
index dfd8be5..77bdf61 100644
--- a/test/files/run/constant-type.check
+++ b/test/files/run/constant-type.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> :power
 ** Power User mode enabled - BEEP WHIR GYVE **
 ** :phase has been set to 'typer'.          **
@@ -13,18 +11,16 @@ scala> :power
 scala> val s = transformedType(StringClass.toType).asInstanceOf[Type]
 s: $r.intp.global.Type = String
 
-scala> { println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+scala> { println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
 Class[String](classOf[java.lang.String])
 
-scala> { afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+scala> { exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
 Class(classOf[java.lang.String])
 
-scala> { ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+scala> { ConstantType(Constant(s)); println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
 Class[String](classOf[java.lang.String])
 
-scala> { ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+scala> { ConstantType(Constant(s)); exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
 Class(classOf[java.lang.String])
 
 scala> 
-
-scala> 
diff --git a/test/files/run/constant-type.scala b/test/files/run/constant-type.scala
index 84539e2..373746a 100644
--- a/test/files/run/constant-type.scala
+++ b/test/files/run/constant-type.scala
@@ -9,9 +9,9 @@ object Test extends ReplTest {
   def code = """
 :power
 val s = transformedType(StringClass.toType).asInstanceOf[Type]
-{ println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
-{ afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
-{ ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
-{ ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+{ println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+{ exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+{ ConstantType(Constant(s)); println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+{ ConstantType(Constant(s)); exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
   """
 }
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 85c4f41..9a10678 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -1,10 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
-scala> 
-
 scala> class Annot(obj: Any) extends annotation.Annotation with annotation.TypeConstraint
 defined class Annot
 
@@ -14,7 +10,7 @@ scala> class A {
   val x = "hello"
   val y: Int @Annot(x) = 10
   override def toString = "an A"
-} 
+}
 defined class A
 
 scala> 
@@ -22,7 +18,7 @@ scala>
 scala> val a = new A
 a: A = an A
 
-scala> val y = a.y   // should rewrite "this.x" to "a.x" 
+scala> val y = a.y   // should rewrite "this.x" to "a.x"
 y: Int @Annot(a.x) = 10
 
 scala> var a2 = new A
@@ -37,7 +33,7 @@ scala> object Stuff {
   val x = "hello"
   val y : Int @Annot(x) = 10
 }
-defined module Stuff
+defined object Stuff
 
 scala> 
 
@@ -127,7 +123,7 @@ defined class rep
 scala> 
 
 scala> object A { val x = "hello" : String @ rep }
-defined module A
+defined object A
 warning: previously defined class A is not a companion to object A.
 Companions must be defined together; you may wish to use :paste mode for this.
 
@@ -142,17 +138,14 @@ scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
 <console>:8: error: not found: value e
        val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
                               ^
-
-scala> 
-
-scala> class Where(condition: Boolean) extends annotation.Annotation
-defined class Where
-
-scala> 
-
-scala> val x : Int @Where(self > 0 && self < 100) = 3
-x: Int @Where(self.>(0).&&(self.<(100))) = 3
-
-scala> 
+<console>:8: error: not found: value f
+       val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
+                                ^
+<console>:8: error: not found: value g
+       val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
+                                  ^
+<console>:8: error: not found: value h
+       val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
+                                    ^
 
 scala> 
diff --git a/test/files/run/constrained-types.scala b/test/files/run/constrained-types.scala
index 38ae076..7ec8f93 100644
--- a/test/files/run/constrained-types.scala
+++ b/test/files/run/constrained-types.scala
@@ -15,10 +15,10 @@ class A {
   val x = "hello"
   val y: Int @Annot(x) = 10
   override def toString = "an A"
-} 
+}
 
 val a = new A
-val y = a.y   // should rewrite "this.x" to "a.x" 
+val y = a.y   // should rewrite "this.x" to "a.x"
 var a2 = new A
 val y2 = a2.y   // should drop the annotation
 
@@ -72,16 +72,10 @@ object A { val x = "hello" : String @ rep }
 val y = a.x // should drop the annotation
 
 val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
-
-class Where(condition: Boolean) extends annotation.Annotation
-
-val x : Int @Where(self > 0 && self < 100) = 3
-
 """
 
   override def transformSettings(s: Settings): Settings = {
     s.Xexperimental.value = true
-    s.selfInAnnots.value = true
     s.deprecation.value = true
     // when running that compiler, give it a scala-library to the classpath
     s.classpath.value = sys.props("java.class.path")
diff --git a/test/files/run/contrib674.check b/test/files/run/contrib674.check
new file mode 100644
index 0000000..78325c1
--- /dev/null
+++ b/test/files/run/contrib674.check
@@ -0,0 +1,3 @@
+contrib674.scala:15: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    1
+    ^
diff --git a/test/files/run/contrib674.scala b/test/files/run/contrib674.scala
index f6b46d1..45c9871 100644
--- a/test/files/run/contrib674.scala
+++ b/test/files/run/contrib674.scala
@@ -5,11 +5,11 @@ object Test extends App {
     try {
       1
     } catch {
-      case e =>
+      case e: Throwable =>
     } finally {
       try {
       } catch {
-        case e =>
+        case e: Throwable =>
       }
     }
     1
diff --git a/test/files/run/ctor-order.scala b/test/files/run/ctor-order.scala
index a223ff7..5f58716 100644
--- a/test/files/run/ctor-order.scala
+++ b/test/files/run/ctor-order.scala
@@ -8,7 +8,7 @@ class Outer {
 
   class X extends {
     /* The constructor of X should set this.$outer to the outer instance
-     * *before* calling the super constructors. This is tested by 
+     * *before* calling the super constructors. This is tested by
      * mixin M1, which tries to access global from the enclosing class.
      */
     val outer = Outer.this
diff --git a/test/files/run/ctries-new/concmap.scala b/test/files/run/ctries-new/concmap.scala
index 3ec0256..7691656 100644
--- a/test/files/run/ctries-new/concmap.scala
+++ b/test/files/run/ctries-new/concmap.scala
@@ -5,17 +5,17 @@ import collection.concurrent.TrieMap
 
 
 object ConcurrentMapSpec extends Spec {
-  
+
   val initsz = 500
   val secondsz = 750
-  
+
   def test() {
     "support put" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
       for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
     }
-    
+
     "support put if absent" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -24,7 +24,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None)
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i))
     }
-    
+
     "support remove if mapped to a specific value" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -32,7 +32,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
       for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false)
     }
-    
+
     "support replace if mapped to a specific value" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -41,7 +41,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false)
       for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false)
     }
-    
+
     "support replace if present" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -49,17 +49,17 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
       for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None)
     }
-    
+
     def assertEqual(a: Any, b: Any) = {
       if (a != b) println(a, b)
       assert(a == b)
     }
-    
+
     "support replace if mapped to a specific value, using several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 55000
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       class Updater(index: Int, offs: Int) extends Thread {
         override def run() {
           var repeats = 0
@@ -74,24 +74,24 @@ object ConcurrentMapSpec extends Spec {
           //println("Thread %d repeats: %d".format(index, repeats))
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i)
-      
+
       val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i)
       threads2.foreach(_.start())
       threads2.foreach(_.join())
-      
+
       for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i)
     }
-    
+
     "support put if absent, several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 110000
-      
+
       class Updater(offs: Int) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -101,19 +101,19 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assert(ct(new Wrap(i)) == i)
     }
-    
+
     "support remove if mapped to a specific value, several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 55000
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       class Remover(offs: Int) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -123,19 +123,19 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None)
     }
-    
+
     "have all or none of the elements depending on the oddity" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 65000
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       class Modifier(index: Int, offs: Int) extends Thread {
         override def run() {
           for (j <- 0 until sz) {
@@ -151,38 +151,38 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       def modify(n: Int) = {
         val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i)
         threads.foreach(_.start())
         threads.foreach(_.join())
       }
-      
+
       modify(16)
       for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i))
       modify(15)
       for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None)
     }
-    
+
     "compute size correctly" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 36450
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       assertEqual(ct.size, sz)
       assertEqual(ct.size, sz)
     }
-    
+
     "compute size correctly in parallel" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 36450
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val pct = ct.par
-      
+
       assertEqual(pct.size, sz)
       assertEqual(pct.size, sz)
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-new/iterator.scala b/test/files/run/ctries-new/iterator.scala
index b953a40..bb1175e 100644
--- a/test/files/run/ctries-new/iterator.scala
+++ b/test/files/run/ctries-new/iterator.scala
@@ -1,144 +1,134 @@
-
-
-
-
 import collection._
 import collection.concurrent.TrieMap
 
-
-
 object IteratorSpec extends Spec {
-  
+
   def test() {
     "work for an empty trie" in {
       val ct = new TrieMap
       val it = ct.iterator
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
     }
-    
+
     def nonEmptyIteratorCheck(sz: Int) {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       val it = ct.iterator
       val tracker = mutable.Map[Wrap, Int]()
       for (i <- 0 until sz) {
         assert(it.hasNext == true)
         tracker += it.next
       }
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
       tracker.size shouldEqual (sz)
       tracker shouldEqual (ct)
     }
-    
+
     "work for a 1 element trie" in {
       nonEmptyIteratorCheck(1)
     }
-    
+
     "work for a 2 element trie" in {
       nonEmptyIteratorCheck(2)
     }
-    
+
     "work for a 3 element trie" in {
       nonEmptyIteratorCheck(3)
     }
-    
+
     "work for a 5 element trie" in {
       nonEmptyIteratorCheck(5)
     }
-    
+
     "work for a 10 element trie" in {
       nonEmptyIteratorCheck(10)
     }
-    
+
     "work for a 20 element trie" in {
       nonEmptyIteratorCheck(20)
     }
-    
+
     "work for a 50 element trie" in {
       nonEmptyIteratorCheck(50)
     }
-     
+
     "work for a 100 element trie" in {
       nonEmptyIteratorCheck(100)
     }
-    
+
     "work for a 1k element trie" in {
       nonEmptyIteratorCheck(1000)
     }
-    
+
     "work for a 5k element trie" in {
       nonEmptyIteratorCheck(5000)
     }
-    
+
     "work for a 75k element trie" in {
       nonEmptyIteratorCheck(75000)
     }
-    
+
     "work for a 250k element trie" in {
       nonEmptyIteratorCheck(500000)
     }
-    
+
     def nonEmptyCollideCheck(sz: Int) {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until sz) ct.put(new DumbHash(i), i)
-      
+
       val it = ct.iterator
       val tracker = mutable.Map[DumbHash, Int]()
       for (i <- 0 until sz) {
         assert(it.hasNext == true)
         tracker += it.next
       }
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
       tracker.size shouldEqual (sz)
       tracker shouldEqual (ct)
     }
-    
+
     "work for colliding hashcodes, 2 element trie" in {
       nonEmptyCollideCheck(2)
     }
-    
+
     "work for colliding hashcodes, 3 element trie" in {
       nonEmptyCollideCheck(3)
     }
-    
+
     "work for colliding hashcodes, 5 element trie" in {
       nonEmptyCollideCheck(5)
     }
-    
+
     "work for colliding hashcodes, 10 element trie" in {
       nonEmptyCollideCheck(10)
     }
-    
+
     "work for colliding hashcodes, 100 element trie" in {
       nonEmptyCollideCheck(100)
     }
-    
+
     "work for colliding hashcodes, 500 element trie" in {
       nonEmptyCollideCheck(500)
     }
-    
+
     "work for colliding hashcodes, 5k element trie" in {
       nonEmptyCollideCheck(5000)
     }
-    
+
     def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
       if (a != b) {
         println(a.size + " vs " + b.size)
-        // println(a)
-        // println(b)
-        // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
-        // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
       }
       assert(a == b)
     }
-    
+
     "be consistent when taken with concurrent modifications" in {
       val sz = 25000
       val W = 15
@@ -146,40 +136,40 @@ object IteratorSpec extends Spec {
       val checks = 5
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       class Modifier extends Thread {
         override def run() {
           for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
             case Some(_) => ct.remove(new Wrap(i))
-            case None => 
+            case None =>
           }
         }
       }
-      
+
       def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) {
         class Iter extends Thread {
           override def run() {
             val snap = ct.readOnlySnapshot()
             val initial = mutable.Map[Wrap, Int]()
             for (kv <- snap) initial += kv
-            
+
             for (i <- 0 until checks) {
               assertEqual(snap.iterator.toMap, initial)
             }
           }
         }
-        
+
         val iter = new Iter
         iter.start()
         iter.join()
       }
-      
+
       val threads = for (_ <- 0 until W) yield new Modifier
       threads.foreach(_.start())
       for (_ <- 0 until S) consistentIteration(ct, checks)
       threads.foreach(_.join())
     }
-    
+
     "be consistent with a concurrent removal with a well defined order" in {
       val sz = 150000
       val sgroupsize = 10
@@ -187,17 +177,16 @@ object IteratorSpec extends Spec {
       val removerslowdown = 50
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       class Remover extends Thread {
         override def run() {
           for (i <- 0 until sz) {
             assert(ct.remove(new Wrap(i)) == Some(i))
             for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate
           }
-          //println("done removing")
         }
       }
-      
+
       def consistentIteration(it: Iterator[(Wrap, Int)]) = {
         class Iter extends Thread {
           override def run() {
@@ -210,7 +199,7 @@ object IteratorSpec extends Spec {
         }
         new Iter
       }
-      
+
       val remover = new Remover
       remover.start()
       for (_ <- 0 until sgroupnum) {
@@ -218,27 +207,25 @@ object IteratorSpec extends Spec {
         iters.foreach(_.start())
         iters.foreach(_.join())
       }
-      //println("done with iterators")
       remover.join()
     }
-    
+
     "be consistent with a concurrent insertion with a well defined order" in {
       val sz = 150000
       val sgroupsize = 10
       val sgroupnum = 10
       val inserterslowdown = 50
       val ct = new TrieMap[Wrap, Int]
-      
+
       class Inserter extends Thread {
         override def run() {
           for (i <- 0 until sz) {
             assert(ct.put(new Wrap(i), i) == None)
             for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate
           }
-          //println("done inserting")
         }
       }
-      
+
       def consistentIteration(it: Iterator[(Wrap, Int)]) = {
         class Iter extends Thread {
           override def run() {
@@ -251,7 +238,7 @@ object IteratorSpec extends Spec {
         }
         new Iter
       }
-      
+
       val inserter = new Inserter
       inserter.start()
       for (_ <- 0 until sgroupnum) {
@@ -259,31 +246,30 @@ object IteratorSpec extends Spec {
         iters.foreach(_.start())
         iters.foreach(_.join())
       }
-      //println("done with iterators")
       inserter.join()
     }
-    
+
     "work on a yet unevaluated snapshot" in {
       val sz = 50000
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       val snap = ct.snapshot()
       val it = snap.iterator
-      
+
       while (it.hasNext) it.next()
     }
-    
+
     "be duplicated" in {
       val sz = 50
       val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*)
       val it = ct.splitter
       for (_ <- 0 until (sz / 2)) it.next()
       val dupit = it.dup
-      
+
       it.toList shouldEqual dupit.toList
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-new/lnode.scala b/test/files/run/ctries-new/lnode.scala
index 92a3108..4cc9705 100644
--- a/test/files/run/ctries-new/lnode.scala
+++ b/test/files/run/ctries-new/lnode.scala
@@ -5,23 +5,23 @@ import collection.concurrent.TrieMap
 
 
 object LNodeSpec extends Spec {
-  
+
   val initsz = 1500
   val secondsz = 1750
-  
+
   def test() {
     "accept elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
     }
-    
+
     "lookup elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
       for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
       for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
     }
-    
+
     "remove elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
@@ -31,7 +31,7 @@ object LNodeSpec extends Spec {
       }
       for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None)
     }
-    
+
     "put elements with the same hash codes if absent" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
@@ -40,7 +40,7 @@ object LNodeSpec extends Spec {
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None)
       for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i)
     }
-    
+
     "replace elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
@@ -49,13 +49,13 @@ object LNodeSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i)
       for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true)
     }
-    
+
     "remove elements with the same hash codes if mapped to a specific value" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
       for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-new/main.scala b/test/files/run/ctries-new/main.scala
index d7fe087..34f3ec2 100644
--- a/test/files/run/ctries-new/main.scala
+++ b/test/files/run/ctries-new/main.scala
@@ -21,6 +21,9 @@ object Test {
 
 trait Spec {
 
+  implicit def implicitously = scala.language.implicitConversions
+  implicit def reflectively  = scala.language.reflectiveCalls
+
   implicit def str2ops(s: String) = new {
     def in[U](body: =>U) {
       // just execute body
@@ -37,11 +40,11 @@ trait Spec {
       var produced = false
       try body
       catch {
-        case e => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true
+        case e: Throwable => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true
       } finally {
         assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]])
       }
     }
   }
 
-}
\ No newline at end of file
+}
diff --git a/test/files/run/ctries-new/snapshot.scala b/test/files/run/ctries-new/snapshot.scala
index 5fe77d4..57155d4 100644
--- a/test/files/run/ctries-new/snapshot.scala
+++ b/test/files/run/ctries-new/snapshot.scala
@@ -8,22 +8,22 @@ import collection.concurrent.TrieMap
 
 
 object SnapshotSpec extends Spec {
-  
+
   def test() {
     "support snapshots" in {
       val ctn = new TrieMap
       ctn.snapshot()
       ctn.readOnlySnapshot()
-      
+
       val ct = new TrieMap[Int, Int]
       for (i <- 0 until 100) ct.put(i, i)
       ct.snapshot()
       ct.readOnlySnapshot()
     }
-    
+
     "empty 2 quiescent snapshots in isolation" in {
       val sz = 4000
-      
+
       class Worker(trie: TrieMap[Wrap, Int]) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -34,46 +34,46 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
       val snapt = ct.snapshot()
-      
+
       val original = new Worker(ct)
       val snapshot = new Worker(snapt)
       original.start()
       snapshot.start()
       original.join()
       snapshot.join()
-      
+
       for (i <- 0 until sz) {
         assert(ct.get(new Wrap(i)) == None)
         assert(snapt.get(new Wrap(i)) == None)
       }
     }
-    
+
     def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) {
       @volatile var e: Exception = null
-      
+
       // reads possible entries once and stores them
       // then reads all these N more times to check if the
       // state stayed the same
       class Reader(trie: Map[Wrap, Int]) extends Thread {
         setName("Reader " + name)
-        
+
         override def run() =
           try check()
           catch {
             case ex: Exception => e = ex
           }
-        
+
         def check() {
           val initial = mutable.Map[Wrap, Int]()
           for (i <- 0 until sz) trie.get(new Wrap(i)) match {
             case Some(i) => initial.put(new Wrap(i), i)
             case None => // do nothing
           }
-          
+
           for (k <- 0 until N) {
             for (i <- 0 until sz) {
               val tres = trie.get(new Wrap(i))
@@ -84,21 +84,21 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val reader = new Reader(readonly)
       reader.start()
       reader.join()
-      
+
       if (e ne null) {
         e.printStackTrace()
         throw e
       }
     }
-    
+
     // traverses the trie `rep` times and modifies each entry
     class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
       setName("Modifier %d".format(index))
-      
+
       override def run() {
         for (k <- 0 until rep) {
           for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match {
@@ -108,85 +108,85 @@ object SnapshotSpec extends Spec {
         }
       }
     }
-    
+
     // removes all the elements from the trie
     class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
       setName("Remover %d".format(index))
-      
+
       override def run() {
         for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz))
       }
     }
-    
+
     "have a consistent quiescent read-only snapshot" in {
       val sz = 10000
       val N = 100
       val W = 10
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val readonly = ct.readOnlySnapshot()
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       consistentReadOnly("qm", readonly, sz, N)
       threads.foreach(_.join())
     }
-    
+
     // now, we check non-quiescent snapshots, as these permit situations
     // where a thread is caught in the middle of the update when a snapshot is taken
-    
+
     "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in {
       val sz = 1250
       val W = 100
       val S = 5000
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5)
       threads.foreach(_.join())
     }
-    
+
     "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in {
       val sz = 1000
       val N = 7000
       val W = 10
       val S = 7000
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5)
       threads.foreach(_.join())
     }
-    
+
     def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) {
       @volatile var e: Exception = null
-      
+
       // reads possible entries once and stores them
       // then reads all these N more times to check if the
       // state stayed the same
       class Worker extends Thread {
         setName("Worker " + name)
-        
+
         override def run() =
           try check()
           catch {
             case ex: Exception => e = ex
           }
-        
+
         def check() {
           val initial = mutable.Map[Wrap, Int]()
           for (i <- 0 until sz) trie.get(new Wrap(i)) match {
             case Some(i) => initial.put(new Wrap(i), i)
             case None => // do nothing
           }
-          
+
           for (k <- 0 until N) {
             // modify
             for ((key, value) <- initial) {
@@ -194,7 +194,7 @@ object SnapshotSpec extends Spec {
               val newv = -oldv
               trie.replace(key, oldv, newv)
             }
-            
+
             // check
             for (i <- 0 until sz) if (initial.contains(new Wrap(i))) {
               val expected = if (k % 2 == 0) -i else i
@@ -206,27 +206,27 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val worker = new Worker
       worker.start()
       worker.join()
-      
+
       if (e ne null) {
         e.printStackTrace()
         throw e
       }
     }
-    
+
     "have a consistent non-quiescent snapshot, concurrent with modifications" in {
       val sz = 9000
       val N = 1000
       val W = 10
       val S = 400
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) {
         consistentReadOnly("non-qm", ct.snapshot(), sz, 5)
@@ -234,7 +234,7 @@ object SnapshotSpec extends Spec {
       }
       threads.foreach(_.join())
     }
-    
+
     "work when many concurrent snapshots are taken, concurrent with modifications" in {
       val sz = 12000
       val W = 10
@@ -243,7 +243,7 @@ object SnapshotSpec extends Spec {
       val snaptimes = 600
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       class Snapshooter extends Thread {
         setName("Snapshooter")
         override def run() {
@@ -254,14 +254,14 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz)
       val shooters = for (i <- 0 until S) yield new Snapshooter
       val threads = mods ++ shooters
       threads.foreach(_.start())
       threads.foreach(_.join())
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-old/concmap.scala b/test/files/run/ctries-old/concmap.scala
index 3ec0256..affc6fe 100644
--- a/test/files/run/ctries-old/concmap.scala
+++ b/test/files/run/ctries-old/concmap.scala
@@ -2,20 +2,21 @@
 
 
 import collection.concurrent.TrieMap
+import Test.Spec
 
 
 object ConcurrentMapSpec extends Spec {
-  
+
   val initsz = 500
   val secondsz = 750
-  
+
   def test() {
     "support put" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
       for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
     }
-    
+
     "support put if absent" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -24,7 +25,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None)
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i))
     }
-    
+
     "support remove if mapped to a specific value" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -32,7 +33,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
       for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false)
     }
-    
+
     "support replace if mapped to a specific value" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -41,7 +42,7 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false)
       for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false)
     }
-    
+
     "support replace if present" in {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until initsz) ct.update(new Wrap(i), i)
@@ -49,17 +50,17 @@ object ConcurrentMapSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
       for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None)
     }
-    
+
     def assertEqual(a: Any, b: Any) = {
       if (a != b) println(a, b)
       assert(a == b)
     }
-    
+
     "support replace if mapped to a specific value, using several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 55000
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       class Updater(index: Int, offs: Int) extends Thread {
         override def run() {
           var repeats = 0
@@ -74,24 +75,24 @@ object ConcurrentMapSpec extends Spec {
           //println("Thread %d repeats: %d".format(index, repeats))
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i)
-      
+
       val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i)
       threads2.foreach(_.start())
       threads2.foreach(_.join())
-      
+
       for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i)
     }
-    
+
     "support put if absent, several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 110000
-      
+
       class Updater(offs: Int) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -101,19 +102,19 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assert(ct(new Wrap(i)) == i)
     }
-    
+
     "support remove if mapped to a specific value, several threads" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 55000
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       class Remover(offs: Int) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -123,19 +124,19 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i)
       threads.foreach(_.start())
       threads.foreach(_.join())
-      
+
       for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None)
     }
-    
+
     "have all or none of the elements depending on the oddity" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 65000
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       class Modifier(index: Int, offs: Int) extends Thread {
         override def run() {
           for (j <- 0 until sz) {
@@ -151,38 +152,38 @@ object ConcurrentMapSpec extends Spec {
           }
         }
       }
-      
+
       def modify(n: Int) = {
         val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i)
         threads.foreach(_.start())
         threads.foreach(_.join())
       }
-      
+
       modify(16)
       for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i))
       modify(15)
       for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None)
     }
-    
+
     "compute size correctly" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 36450
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       assertEqual(ct.size, sz)
       assertEqual(ct.size, sz)
     }
-    
+
     "compute size correctly in parallel" in {
       val ct = new TrieMap[Wrap, Int]
       val sz = 36450
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val pct = ct.par
-      
+
       assertEqual(pct.size, sz)
       assertEqual(pct.size, sz)
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-old/iterator.scala b/test/files/run/ctries-old/iterator.scala
index b953a40..127f6c9 100644
--- a/test/files/run/ctries-old/iterator.scala
+++ b/test/files/run/ctries-old/iterator.scala
@@ -5,129 +5,130 @@
 import collection._
 import collection.concurrent.TrieMap
 
+import Test.Spec
 
 
 object IteratorSpec extends Spec {
-  
+
   def test() {
     "work for an empty trie" in {
       val ct = new TrieMap
       val it = ct.iterator
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
     }
-    
+
     def nonEmptyIteratorCheck(sz: Int) {
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       val it = ct.iterator
       val tracker = mutable.Map[Wrap, Int]()
       for (i <- 0 until sz) {
         assert(it.hasNext == true)
         tracker += it.next
       }
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
       tracker.size shouldEqual (sz)
       tracker shouldEqual (ct)
     }
-    
+
     "work for a 1 element trie" in {
       nonEmptyIteratorCheck(1)
     }
-    
+
     "work for a 2 element trie" in {
       nonEmptyIteratorCheck(2)
     }
-    
+
     "work for a 3 element trie" in {
       nonEmptyIteratorCheck(3)
     }
-    
+
     "work for a 5 element trie" in {
       nonEmptyIteratorCheck(5)
     }
-    
+
     "work for a 10 element trie" in {
       nonEmptyIteratorCheck(10)
     }
-    
+
     "work for a 20 element trie" in {
       nonEmptyIteratorCheck(20)
     }
-    
+
     "work for a 50 element trie" in {
       nonEmptyIteratorCheck(50)
     }
-     
+
     "work for a 100 element trie" in {
       nonEmptyIteratorCheck(100)
     }
-    
+
     "work for a 1k element trie" in {
       nonEmptyIteratorCheck(1000)
     }
-    
+
     "work for a 5k element trie" in {
       nonEmptyIteratorCheck(5000)
     }
-    
+
     "work for a 75k element trie" in {
       nonEmptyIteratorCheck(75000)
     }
-    
+
     "work for a 250k element trie" in {
       nonEmptyIteratorCheck(500000)
     }
-    
+
     def nonEmptyCollideCheck(sz: Int) {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until sz) ct.put(new DumbHash(i), i)
-      
+
       val it = ct.iterator
       val tracker = mutable.Map[DumbHash, Int]()
       for (i <- 0 until sz) {
         assert(it.hasNext == true)
         tracker += it.next
       }
-      
+
       it.hasNext shouldEqual (false)
       evaluating { it.next() }.shouldProduce [NoSuchElementException]
       tracker.size shouldEqual (sz)
       tracker shouldEqual (ct)
     }
-    
+
     "work for colliding hashcodes, 2 element trie" in {
       nonEmptyCollideCheck(2)
     }
-    
+
     "work for colliding hashcodes, 3 element trie" in {
       nonEmptyCollideCheck(3)
     }
-    
+
     "work for colliding hashcodes, 5 element trie" in {
       nonEmptyCollideCheck(5)
     }
-    
+
     "work for colliding hashcodes, 10 element trie" in {
       nonEmptyCollideCheck(10)
     }
-    
+
     "work for colliding hashcodes, 100 element trie" in {
       nonEmptyCollideCheck(100)
     }
-    
+
     "work for colliding hashcodes, 500 element trie" in {
       nonEmptyCollideCheck(500)
     }
-    
+
     "work for colliding hashcodes, 5k element trie" in {
       nonEmptyCollideCheck(5000)
     }
-    
+
     def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
       if (a != b) {
         println(a.size + " vs " + b.size)
@@ -138,7 +139,7 @@ object IteratorSpec extends Spec {
       }
       assert(a == b)
     }
-    
+
     "be consistent when taken with concurrent modifications" in {
       val sz = 25000
       val W = 15
@@ -146,40 +147,40 @@ object IteratorSpec extends Spec {
       val checks = 5
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       class Modifier extends Thread {
         override def run() {
           for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
             case Some(_) => ct.remove(new Wrap(i))
-            case None => 
+            case None =>
           }
         }
       }
-      
+
       def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) {
         class Iter extends Thread {
           override def run() {
             val snap = ct.readOnlySnapshot()
             val initial = mutable.Map[Wrap, Int]()
             for (kv <- snap) initial += kv
-            
+
             for (i <- 0 until checks) {
               assertEqual(snap.iterator.toMap, initial)
             }
           }
         }
-        
+
         val iter = new Iter
         iter.start()
         iter.join()
       }
-      
+
       val threads = for (_ <- 0 until W) yield new Modifier
       threads.foreach(_.start())
       for (_ <- 0 until S) consistentIteration(ct, checks)
       threads.foreach(_.join())
     }
-    
+
     "be consistent with a concurrent removal with a well defined order" in {
       val sz = 150000
       val sgroupsize = 10
@@ -187,7 +188,7 @@ object IteratorSpec extends Spec {
       val removerslowdown = 50
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
-      
+
       class Remover extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -197,7 +198,7 @@ object IteratorSpec extends Spec {
           //println("done removing")
         }
       }
-      
+
       def consistentIteration(it: Iterator[(Wrap, Int)]) = {
         class Iter extends Thread {
           override def run() {
@@ -210,7 +211,7 @@ object IteratorSpec extends Spec {
         }
         new Iter
       }
-      
+
       val remover = new Remover
       remover.start()
       for (_ <- 0 until sgroupnum) {
@@ -221,14 +222,14 @@ object IteratorSpec extends Spec {
       //println("done with iterators")
       remover.join()
     }
-    
+
     "be consistent with a concurrent insertion with a well defined order" in {
       val sz = 150000
       val sgroupsize = 10
       val sgroupnum = 10
       val inserterslowdown = 50
       val ct = new TrieMap[Wrap, Int]
-      
+
       class Inserter extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -238,7 +239,7 @@ object IteratorSpec extends Spec {
           //println("done inserting")
         }
       }
-      
+
       def consistentIteration(it: Iterator[(Wrap, Int)]) = {
         class Iter extends Thread {
           override def run() {
@@ -251,7 +252,7 @@ object IteratorSpec extends Spec {
         }
         new Iter
       }
-      
+
       val inserter = new Inserter
       inserter.start()
       for (_ <- 0 until sgroupnum) {
@@ -262,28 +263,28 @@ object IteratorSpec extends Spec {
       //println("done with iterators")
       inserter.join()
     }
-    
+
     "work on a yet unevaluated snapshot" in {
       val sz = 50000
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.update(new Wrap(i), i)
-      
+
       val snap = ct.snapshot()
       val it = snap.iterator
-      
+
       while (it.hasNext) it.next()
     }
-    
+
     "be duplicated" in {
       val sz = 50
       val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*)
       val it = ct.splitter
       for (_ <- 0 until (sz / 2)) it.next()
       val dupit = it.dup
-      
+
       it.toList shouldEqual dupit.toList
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-old/lnode.scala b/test/files/run/ctries-old/lnode.scala
index 92a3108..f9eb9ce 100644
--- a/test/files/run/ctries-old/lnode.scala
+++ b/test/files/run/ctries-old/lnode.scala
@@ -3,25 +3,26 @@
 
 import collection.concurrent.TrieMap
 
+import Test.Spec
 
 object LNodeSpec extends Spec {
-  
+
   val initsz = 1500
   val secondsz = 1750
-  
+
   def test() {
     "accept elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
     }
-    
+
     "lookup elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
       for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
       for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
     }
-    
+
     "remove elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
@@ -31,7 +32,7 @@ object LNodeSpec extends Spec {
       }
       for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None)
     }
-    
+
     "put elements with the same hash codes if absent" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
@@ -40,7 +41,7 @@ object LNodeSpec extends Spec {
       for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None)
       for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i)
     }
-    
+
     "replace elements with the same hash codes" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
@@ -49,13 +50,13 @@ object LNodeSpec extends Spec {
       for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i)
       for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true)
     }
-    
+
     "remove elements with the same hash codes if mapped to a specific value" in {
       val ct = new TrieMap[DumbHash, Int]
       for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
       for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/ctries-old/main.scala b/test/files/run/ctries-old/main.scala
index 78ba7f0..77161fe 100644
--- a/test/files/run/ctries-old/main.scala
+++ b/test/files/run/ctries-old/main.scala
@@ -5,6 +5,7 @@
 
 
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
 
   def main(args: Array[String]) {
@@ -14,11 +15,13 @@ object Test {
     SnapshotSpec.test()
   }
 
-}
 
 
 trait Spec {
 
+  implicit def implicitously = scala.language.implicitConversions
+  implicit def reflectively  = scala.language.reflectiveCalls
+
   implicit def str2ops(s: String) = new {
     def in[U](body: =>U) {
       // just execute body
@@ -35,7 +38,7 @@ trait Spec {
       var produced = false
       try body
       catch {
-        case e => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true
+        case e: Throwable => if (e.getClass == implicitly[ClassManifest[T]].runtimeClass) produced = true
       } finally {
         assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]])
       }
@@ -43,3 +46,4 @@ trait Spec {
   }
 
 }
+}
diff --git a/test/files/run/ctries-old/snapshot.scala b/test/files/run/ctries-old/snapshot.scala
index 5fe77d4..dfc2034 100644
--- a/test/files/run/ctries-old/snapshot.scala
+++ b/test/files/run/ctries-old/snapshot.scala
@@ -5,25 +5,26 @@
 import collection._
 import collection.concurrent.TrieMap
 
+import Test.Spec
 
 
 object SnapshotSpec extends Spec {
-  
+
   def test() {
     "support snapshots" in {
       val ctn = new TrieMap
       ctn.snapshot()
       ctn.readOnlySnapshot()
-      
+
       val ct = new TrieMap[Int, Int]
       for (i <- 0 until 100) ct.put(i, i)
       ct.snapshot()
       ct.readOnlySnapshot()
     }
-    
+
     "empty 2 quiescent snapshots in isolation" in {
       val sz = 4000
-      
+
       class Worker(trie: TrieMap[Wrap, Int]) extends Thread {
         override def run() {
           for (i <- 0 until sz) {
@@ -34,46 +35,46 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct.put(new Wrap(i), i)
       val snapt = ct.snapshot()
-      
+
       val original = new Worker(ct)
       val snapshot = new Worker(snapt)
       original.start()
       snapshot.start()
       original.join()
       snapshot.join()
-      
+
       for (i <- 0 until sz) {
         assert(ct.get(new Wrap(i)) == None)
         assert(snapt.get(new Wrap(i)) == None)
       }
     }
-    
+
     def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) {
       @volatile var e: Exception = null
-      
+
       // reads possible entries once and stores them
       // then reads all these N more times to check if the
       // state stayed the same
       class Reader(trie: Map[Wrap, Int]) extends Thread {
         setName("Reader " + name)
-        
+
         override def run() =
           try check()
           catch {
             case ex: Exception => e = ex
           }
-        
+
         def check() {
           val initial = mutable.Map[Wrap, Int]()
           for (i <- 0 until sz) trie.get(new Wrap(i)) match {
             case Some(i) => initial.put(new Wrap(i), i)
             case None => // do nothing
           }
-          
+
           for (k <- 0 until N) {
             for (i <- 0 until sz) {
               val tres = trie.get(new Wrap(i))
@@ -84,21 +85,21 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val reader = new Reader(readonly)
       reader.start()
       reader.join()
-      
+
       if (e ne null) {
         e.printStackTrace()
         throw e
       }
     }
-    
+
     // traverses the trie `rep` times and modifies each entry
     class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
       setName("Modifier %d".format(index))
-      
+
       override def run() {
         for (k <- 0 until rep) {
           for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match {
@@ -108,85 +109,85 @@ object SnapshotSpec extends Spec {
         }
       }
     }
-    
+
     // removes all the elements from the trie
     class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
       setName("Remover %d".format(index))
-      
+
       override def run() {
         for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz))
       }
     }
-    
+
     "have a consistent quiescent read-only snapshot" in {
       val sz = 10000
       val N = 100
       val W = 10
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val readonly = ct.readOnlySnapshot()
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       consistentReadOnly("qm", readonly, sz, N)
       threads.foreach(_.join())
     }
-    
+
     // now, we check non-quiescent snapshots, as these permit situations
     // where a thread is caught in the middle of the update when a snapshot is taken
-    
+
     "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in {
       val sz = 1250
       val W = 100
       val S = 5000
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5)
       threads.foreach(_.join())
     }
-    
+
     "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in {
       val sz = 1000
       val N = 7000
       val W = 10
       val S = 7000
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5)
       threads.foreach(_.join())
     }
-    
+
     def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) {
       @volatile var e: Exception = null
-      
+
       // reads possible entries once and stores them
       // then reads all these N more times to check if the
       // state stayed the same
       class Worker extends Thread {
         setName("Worker " + name)
-        
+
         override def run() =
           try check()
           catch {
             case ex: Exception => e = ex
           }
-        
+
         def check() {
           val initial = mutable.Map[Wrap, Int]()
           for (i <- 0 until sz) trie.get(new Wrap(i)) match {
             case Some(i) => initial.put(new Wrap(i), i)
             case None => // do nothing
           }
-          
+
           for (k <- 0 until N) {
             // modify
             for ((key, value) <- initial) {
@@ -194,7 +195,7 @@ object SnapshotSpec extends Spec {
               val newv = -oldv
               trie.replace(key, oldv, newv)
             }
-            
+
             // check
             for (i <- 0 until sz) if (initial.contains(new Wrap(i))) {
               val expected = if (k % 2 == 0) -i else i
@@ -206,27 +207,27 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val worker = new Worker
       worker.start()
       worker.join()
-      
+
       if (e ne null) {
         e.printStackTrace()
         throw e
       }
     }
-    
+
     "have a consistent non-quiescent snapshot, concurrent with modifications" in {
       val sz = 9000
       val N = 1000
       val W = 10
       val S = 400
-      
+
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
       val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
-      
+
       threads.foreach(_.start())
       for (i <- 0 until S) {
         consistentReadOnly("non-qm", ct.snapshot(), sz, 5)
@@ -234,7 +235,7 @@ object SnapshotSpec extends Spec {
       }
       threads.foreach(_.join())
     }
-    
+
     "work when many concurrent snapshots are taken, concurrent with modifications" in {
       val sz = 12000
       val W = 10
@@ -243,7 +244,7 @@ object SnapshotSpec extends Spec {
       val snaptimes = 600
       val ct = new TrieMap[Wrap, Int]
       for (i <- 0 until sz) ct(new Wrap(i)) = i
-      
+
       class Snapshooter extends Thread {
         setName("Snapshooter")
         override def run() {
@@ -254,14 +255,14 @@ object SnapshotSpec extends Spec {
           }
         }
       }
-      
+
       val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz)
       val shooters = for (i <- 0 until S) yield new Snapshooter
       val threads = mods ++ shooters
       threads.foreach(_.start())
       threads.foreach(_.join())
     }
-    
+
   }
-  
+
 }
diff --git a/test/files/run/dead-code-elimination.check b/test/files/run/dead-code-elimination.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala
index 1af17c9..fd3f2a9 100644
--- a/test/files/run/dead-code-elimination.scala
+++ b/test/files/run/dead-code-elimination.scala
@@ -1,9 +1,9 @@
 
-// This testcase is a snippet that did not compile correctly under 
-// pre-release 2.10.x. The relevant discussion around it can be 
+// This testcase is a snippet that did not compile correctly under
+// pre-release 2.10.x. The relevant discussion around it can be
 // found at:
 // https://groups.google.com/forum/?fromgroups#!topic/scala-internals/qcyTjk8euUI[1-25]
-// 
+//
 // The reason it did not compile is related to the fact that ICode
 // ops did not correctly define the stack entries they consumed and
 // the dead code elimination phase was unable to correctly reconstruct
@@ -12,7 +12,7 @@
 // Originally, this did not compile, but I included it in the run
 // tests because this was ASM-dependand and did not happen for GenJVM.
 //
-// Thus, we run the code and force the loading of class B -- if the 
+// Thus, we run the code and force the loading of class B -- if the
 // bytecode is incorrect, it will fail the test.
 
 final class A {
@@ -27,7 +27,7 @@ final class A {
 
 object Test {
   def main(args: Array[String]): Unit = {
-    // force the loading of B 
+    // force the loading of B
     (new A).f
   }
 }
diff --git a/test/files/run/deeps.check b/test/files/run/deeps.check
new file mode 100644
index 0000000..a68e474
--- /dev/null
+++ b/test/files/run/deeps.check
@@ -0,0 +1,87 @@
+testEquals1
+false
+false
+true
+
+testEquals2
+false
+false
+true
+
+testEquals3
+x=Array(1)
+y=Array(1)
+false
+false
+true
+
+x=Array(Array(1), Array(1))
+y=Array(Array(1), Array(1))
+false
+false
+true
+
+x=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1)))
+y=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1)))
+false
+false
+true
+
+testEquals4
+false
+false
+true
+false
+false
+true
+Array(true, false)
+Array(true, false)
+[true;false]
+true;false
+
+Array(Array(true, false), Array(true, false))
+Array(Array(true, false), Array(true, false))
+[Array(true, false);Array(true, false)]
+Array(true, false);Array(true, false)
+
+Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false)))
+Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false)))
+[Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false))]
+Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false))
+
+Array(1.0, 0.0)
+Array(1.0, 0.0)
+[1.0;0.0]
+1.0;0.0
+
+Array(Array(1.0, 0.0), Array(1.0, 0.0))
+Array(Array(1.0, 0.0), Array(1.0, 0.0))
+[Array(1.0, 0.0);Array(1.0, 0.0)]
+Array(1.0, 0.0);Array(1.0, 0.0)
+
+Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0)))
+Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0)))
+[Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0))]
+Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0))
+
+Array(a, b)
+Array(a, b)
+[a;b]
+a;b
+
+Array(Array(a, b), Array(a, b))
+Array(Array(a, b), Array(a, b))
+[Array(a, b);Array(a, b)]
+Array(a, b);Array(a, b)
+
+Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b)))
+Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b)))
+[Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b))]
+Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b))
+
+[Array(true, false); Array(false)]
+[Array(1, 2); Array(3)]
+[Array(1, 2); Array(3)]
+
+Array(boo, and, foo)
+Array(a)
diff --git a/test/files/run/deeps.scala b/test/files/run/deeps.scala
new file mode 100644
index 0000000..6049cc6
--- /dev/null
+++ b/test/files/run/deeps.scala
@@ -0,0 +1,114 @@
+//############################################################################
+// deepEquals / deep.toString
+//############################################################################
+
+//############################################################################
+// need to revisit array equqality
+object Test {
+
+  def testEquals1 {
+    println(Array(1) == Array(1))
+    println(Array(1) equals Array(1))
+    println(Array(1).deep == Array(1).deep)
+    println
+  }
+
+  def testEquals2 {
+    println(Array(Array(1), Array(2)) == Array(Array(1), Array(2)))
+    println(Array(Array(1), Array(2)) equals Array(Array(1), Array(2)))
+    println(Array(Array(1), Array(2)).deep equals Array(Array(1), Array(2)).deep)
+    println
+  }
+
+  def testEquals3 {
+    val a1 = Array(1)
+    val b1 = Array(1)
+    val a2 = Array(a1, b1)
+    val b2 = Array(a1, b1)
+    val a3 = Array(a2, b2)
+    val b3 = Array(a2, b2)
+    def test[T](x: Array[T], y: Array[T]) {
+      println("x=" + x.deep.toString)
+      println("y=" + y.deep.toString)
+      println(x == y)
+      println(x equals y)
+      println(x.deep == y.deep)
+      println
+    }
+    test(a1, b1)
+    test(a2, b2)
+    test(a3, b3)
+  }
+
+  def testEquals4 {
+    println("boo:and:foo".split(':') == "boo:and:foo".split(':'))
+    println("boo:and:foo".split(':') equals "boo:and:foo".split(':'))
+    println("boo:and:foo".split(':').deep == "boo:and:foo".split(':').deep)
+
+    val xs = new java.util.ArrayList[String](); xs.add("a")
+    val ys = new java.util.ArrayList[String](); ys.add("a")
+    println(xs.toArray == ys.toArray)
+    println(xs.toArray equals ys.toArray)
+    println(xs.toArray.deep == ys.toArray.deep)
+  }
+
+  def testToString1 {
+    def sweep(s: String) = (
+      s.replaceAll("D@[0-9a-fA-F]+", "D at 0000000")
+       .replaceAll("Z@[0-9a-fA-F]+", "Z at 0000000")
+       .replaceAll(";@[0-9a-fA-F]+", ";@0000000")
+    )
+    def test[T](a: Array[T]) {
+      println(sweep(a.deep.toString))
+      println(a.deep.toString)
+      println(a.deep.mkString("[", ";", "]"))
+      println(a.deep.mkString(";"))
+      println
+    }
+
+    val ba1 = Array(true, false)
+    val ba2 = Array(ba1, ba1)
+    val ba3 = Array(ba2, ba2)
+    test(ba1)
+    test(ba2)
+    test(ba3)
+
+    val da1 = Array(1.0d, 0.0d)
+    val da2 = Array(da1, da1)
+    val da3 = Array(da2, da2)
+    test(da1)
+    test(da2)
+    test(da3)
+
+    val sa1 = Array("a", "b")
+    val sa2 = Array(sa1, sa1)
+    val sa3 = Array(sa2, sa2)
+    test(sa1)
+    test(sa2)
+    test(sa3)
+  }
+
+  def testToString2 {
+    println(Array(Array(true, false), Array(false)).deep.mkString("[", "; ", "]"))
+    println(Array(Array('1', '2'), Array('3')).deep.mkString("[", "; ", "]"))
+    println(Array(Array(1, 2), Array(3)).deep.mkString("[", "; ", "]"))
+    println
+  }
+
+  def testToString3 {
+    println("boo:and:foo".split(':').deep.toString)
+
+    val xs = new java.util.ArrayList[String](); xs.add("a")
+    println(xs.toArray.deep.toString)
+  }
+
+  def main(args: Array[String]): Unit = {
+    println("testEquals1") ; testEquals1
+    println("testEquals2") ; testEquals2
+    println("testEquals3") ; testEquals3
+    println("testEquals4") ; testEquals4
+    testToString1
+    testToString2
+    testToString3
+  }
+}
diff --git a/test/files/run/delambdafy-dependent-on-param-subst-2.scala b/test/files/run/delambdafy-dependent-on-param-subst-2.scala
new file mode 100644
index 0000000..7b6fc59
--- /dev/null
+++ b/test/files/run/delambdafy-dependent-on-param-subst-2.scala
@@ -0,0 +1,20 @@
+trait M[-X] {
+  def m(x: X): Boolean
+}
+
+class C
+class A { class C }
+
+object Test {
+  def main(args: Array[String]) {
+    val a = new A
+
+    // class O extends M[a.C] { def m(x: a.C) = true }
+    // (new O: M[Null]).m(null) // Okay
+
+    ((a: A) => {
+      class N extends M[a.C] { def m(x: a.C) = true }
+      new N: M[Null]
+    }).apply(a).m(null) // NPE, missing bridge
+  }
+}
diff --git a/test/files/run/delambdafy-dependent-on-param-subst.flags b/test/files/run/delambdafy-dependent-on-param-subst.flags
new file mode 100644
index 0000000..2b27e19
--- /dev/null
+++ b/test/files/run/delambdafy-dependent-on-param-subst.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
\ No newline at end of file
diff --git a/test/files/run/delambdafy-dependent-on-param-subst.scala b/test/files/run/delambdafy-dependent-on-param-subst.scala
new file mode 100644
index 0000000..7b6fc59
--- /dev/null
+++ b/test/files/run/delambdafy-dependent-on-param-subst.scala
@@ -0,0 +1,20 @@
+trait M[-X] {
+  def m(x: X): Boolean
+}
+
+class C
+class A { class C }
+
+object Test {
+  def main(args: Array[String]) {
+    val a = new A
+
+    // class O extends M[a.C] { def m(x: a.C) = true }
+    // (new O: M[Null]).m(null) // Okay
+
+    ((a: A) => {
+      class N extends M[a.C] { def m(x: a.C) = true }
+      new N: M[Null]
+    }).apply(a).m(null) // NPE, missing bridge
+  }
+}
diff --git a/test/files/run/delambdafy-nested-by-name.check b/test/files/run/delambdafy-nested-by-name.check
new file mode 100644
index 0000000..94954ab
--- /dev/null
+++ b/test/files/run/delambdafy-nested-by-name.check
@@ -0,0 +1,2 @@
+hello
+world
diff --git a/test/files/run/delambdafy-nested-by-name.scala b/test/files/run/delambdafy-nested-by-name.scala
new file mode 100644
index 0000000..4498b33
--- /dev/null
+++ b/test/files/run/delambdafy-nested-by-name.scala
@@ -0,0 +1,11 @@
+// during development of delayed delambdafication I created a bug where calling a by-name method with a by-name argument that
+// itself contained a by-name argument would cause a class cast exception. That bug wasn't found in the existing test suite
+// so this test covers that case
+object Test {
+  def meth1(arg1: => String) = arg1
+  def meth2(arg2: => String) = meth1({println("hello"); arg2})
+
+  def main(args: Array[String]) {
+    println(meth2("world"))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/delambdafy-two-lambdas.check b/test/files/run/delambdafy-two-lambdas.check
new file mode 100644
index 0000000..ed9ea40
--- /dev/null
+++ b/test/files/run/delambdafy-two-lambdas.check
@@ -0,0 +1,2 @@
+13
+24
diff --git a/test/files/run/delambdafy-two-lambdas.scala b/test/files/run/delambdafy-two-lambdas.scala
new file mode 100644
index 0000000..decede7
--- /dev/null
+++ b/test/files/run/delambdafy-two-lambdas.scala
@@ -0,0 +1,12 @@
+/*
+ * Tests if two lambdas defined in the same class do not lead to
+ * name clashes.
+ */
+object Test {
+	def takeLambda(f: Int => Int ): Int = f(12)
+
+	def main(args: Array[String]): Unit = {
+		println(takeLambda(x => x+1))
+		println(takeLambda(x => x*2))
+	}
+}
diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check
new file mode 100644
index 0000000..92cfbae
--- /dev/null
+++ b/test/files/run/delambdafy_t6028.check
@@ -0,0 +1,57 @@
+[[syntax trees at end of                lambdalift]] // newSource1.scala
+package <empty> {
+  class T extends Object {
+    <paramaccessor> private[this] val classParam: Int = _;
+    def <init>(classParam: Int): T = {
+      T.super.<init>();
+      ()
+    };
+    private[this] val field: Int = 0;
+    <stable> <accessor> def field(): Int = T.this.field;
+    def foo(methodParam: Int): Function0 = {
+      val methodLocal: Int = 0;
+      {
+        (() => T.this.$anonfun$1(methodParam, methodLocal)).$asInstanceOf[Function0]()
+      }
+    };
+    def bar(barParam: Int): Object = {
+      @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
+      T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
+    };
+    def tryy(tryyParam: Int): Function0 = {
+      var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0);
+      {
+        (() => T.this.$anonfun$2(tryyParam, tryyLocal)).$asInstanceOf[Function0]()
+      }
+    };
+    final <artifact> private[this] def $anonfun$1(methodParam$1: Int, methodLocal$1: Int): Int = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
+    abstract trait MethodLocalTrait$1 extends Object {
+      <synthetic> <stable> <artifact> def $outer(): T
+    };
+    object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
+      def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
+        MethodLocalObject$2.super.<init>();
+        MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
+        ()
+      };
+      <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+      <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
+      <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
+    };
+    final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
+      MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
+      MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
+    };
+    abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
+      def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = {
+        ()
+      };
+      scala.this.Predef.print(scala.Int.box(barParam$1))
+    };
+    final <artifact> private[this] def $anonfun$2(tryyParam$1: Int, tryyLocal$1: runtime.IntRef): Unit = try {
+      tryyLocal$1.elem = tryyParam$1
+    } finally ()
+  }
+}
+
+warning: there were 1 feature warning(s); re-run with -feature for details
diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala
new file mode 100644
index 0000000..0b7ef48
--- /dev/null
+++ b/test/files/run/delambdafy_t6028.scala
@@ -0,0 +1,21 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Xprint:lambdalift -d " + testOutput.path
+
+  override def code = """class T(classParam: Int) {
+                        |  val field: Int = 0
+                        |  def foo(methodParam: Int) = {val methodLocal = 0 ; () => classParam + field + methodParam + methodLocal }
+                        |  def bar(barParam: Int) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject }
+                        |  def tryy(tryyParam: Int) = { var tryyLocal = 0; () => try { tryyLocal = tryyParam } finally () }
+                        |}
+                        |""".stripMargin.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check
new file mode 100644
index 0000000..6b174c0
--- /dev/null
+++ b/test/files/run/delambdafy_t6555.check
@@ -0,0 +1,15 @@
+[[syntax trees at end of                specialize]] // newSource1.scala
+package <empty> {
+  class Foo extends Object {
+    def <init>(): Foo = {
+      Foo.super.<init>();
+      ()
+    };
+    private[this] val f: Int => Int = {
+      final <artifact> def $anonfun(param: Int): Int = param;
+      ((param: Int) => $anonfun(param))
+    };
+    <stable> <accessor> def f(): Int => Int = Foo.this.f
+  }
+}
+
diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala
new file mode 100644
index 0000000..a1dcfe7
--- /dev/null
+++ b/test/files/run/delambdafy_t6555.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:method -d " + testOutput.path
+
+  override def code = "class Foo { val f = (param: Int) => param } "
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delambdafy_uncurry_byname_inline.check b/test/files/run/delambdafy_uncurry_byname_inline.check
new file mode 100644
index 0000000..0dc69b3
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_inline.check
@@ -0,0 +1,21 @@
+[[syntax trees at end of                   uncurry]] // newSource1.scala
+package <empty> {
+  class Foo extends Object {
+    def <init>(): Foo = {
+      Foo.super.<init>();
+      ()
+    };
+    def bar(x: () => Int): Int = x.apply();
+    def foo(): Int = Foo.this.bar({
+      @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction0[Int] with Serializable {
+        def <init>(): <$anon: () => Int> = {
+          $anonfun.super.<init>();
+          ()
+        };
+        final def apply(): Int = 1
+      };
+      (new <$anon: () => Int>(): () => Int)
+    })
+  }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_byname_inline.scala b/test/files/run/delambdafy_uncurry_byname_inline.scala
new file mode 100644
index 0000000..8f480fa
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_inline.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path
+
+  override def code = """class Foo {
+                        |  def bar(x: => Int) = x
+                        |  
+                        |  def foo = bar(1)
+                        |}
+                        |""".stripMargin.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check
new file mode 100644
index 0000000..cd3edc7
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_method.check
@@ -0,0 +1,15 @@
+[[syntax trees at end of                   uncurry]] // newSource1.scala
+package <empty> {
+  class Foo extends Object {
+    def <init>(): Foo = {
+      Foo.super.<init>();
+      ()
+    };
+    def bar(x: () => Int): Int = x.apply();
+    def foo(): Int = Foo.this.bar({
+      final <artifact> def $anonfun(): Int = 1;
+      (() => $anonfun())
+    })
+  }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala
new file mode 100644
index 0000000..1adeec8
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_method.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
+
+  override def code = """class Foo {
+                        |  def bar(x: => Int) = x
+                        |  
+                        |  def foo = bar(1)
+                        |}
+                        |""".stripMargin.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delambdafy_uncurry_inline.check b/test/files/run/delambdafy_uncurry_inline.check
new file mode 100644
index 0000000..e2b024b
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_inline.check
@@ -0,0 +1,23 @@
+[[syntax trees at end of                   uncurry]] // newSource1.scala
+package <empty> {
+  class Foo extends Object {
+    def <init>(): Foo = {
+      Foo.super.<init>();
+      ()
+    };
+    def bar(): Unit = {
+      val f: Int => Int = {
+        @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with Serializable {
+          def <init>(): <$anon: Int => Int> = {
+            $anonfun.super.<init>();
+            ()
+          };
+          final def apply(x: Int): Int = x.+(1)
+        };
+        (new <$anon: Int => Int>(): Int => Int)
+      };
+      ()
+    }
+  }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_inline.scala b/test/files/run/delambdafy_uncurry_inline.scala
new file mode 100644
index 0000000..b42b65f
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_inline.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path
+
+  override def code = """class Foo {
+                        |  def bar = {
+                        |    val f = {x: Int => x + 1}
+                        |  }
+                        |}
+                        |""".stripMargin.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delambdafy_uncurry_method.check b/test/files/run/delambdafy_uncurry_method.check
new file mode 100644
index 0000000..5ee3d17
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_method.check
@@ -0,0 +1,17 @@
+[[syntax trees at end of                   uncurry]] // newSource1.scala
+package <empty> {
+  class Foo extends Object {
+    def <init>(): Foo = {
+      Foo.super.<init>();
+      ()
+    };
+    def bar(): Unit = {
+      val f: Int => Int = {
+        final <artifact> def $anonfun(x: Int): Int = x.+(1);
+        ((x: Int) => $anonfun(x))
+      };
+      ()
+    }
+  }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_method.scala b/test/files/run/delambdafy_uncurry_method.scala
new file mode 100644
index 0000000..a988fb2
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_method.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
+
+  override def code = """class Foo {
+                        |  def bar = {
+                        |    val f = {x: Int => x + 1}
+                        |  }
+                        |}
+                        |""".stripMargin.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
index 9d9c828..5d8c5fa 100644
--- a/test/files/run/delay-bad.check
+++ b/test/files/run/delay-bad.check
@@ -1,3 +1,10 @@
+delay-bad.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    f(new C { 5 })
+              ^
+delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    f(new { val x = 5 } with E() { 5 })
+                                   ^
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 
 
 // new C { }
diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check
index 8eb04c7..b4f6b04 100644
--- a/test/files/run/delay-good.check
+++ b/test/files/run/delay-good.check
@@ -1,3 +1,9 @@
+delay-good.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    f(new C { 5 })
+              ^
+delay-good.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    f(new { val x = 5 } with E() { 5 })
+                                   ^
 
 
 // new C { }
diff --git a/test/files/run/deprecate-early-type-defs.check b/test/files/run/deprecate-early-type-defs.check
new file mode 100644
index 0000000..1ee01df
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.check
@@ -0,0 +1,3 @@
+deprecate-early-type-defs.scala:1: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+object Test extends { type T = Int } with App
+                           ^
diff --git a/test/files/run/deprecate-early-type-defs.flags b/test/files/run/deprecate-early-type-defs.flags
new file mode 100644
index 0000000..c36e713
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.flags
@@ -0,0 +1 @@
+-deprecation
\ No newline at end of file
diff --git a/test/files/run/deprecate-early-type-defs.scala b/test/files/run/deprecate-early-type-defs.scala
new file mode 100644
index 0000000..99e4216
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.scala
@@ -0,0 +1 @@
+object Test extends { type T = Int } with App
\ No newline at end of file
diff --git a/test/files/run/distinct.scala b/test/files/run/distinct.scala
index 09e5a07..0b8971e 100644
--- a/test/files/run/distinct.scala
+++ b/test/files/run/distinct.scala
@@ -3,9 +3,9 @@
  */
 object Test {
   val alphabet = 'a' to 'z' mkString ""
-  val alphaList = 'a' to 'z' toList
+  val alphaList = ('a' to 'z').toList
   def shuffled = util.Random.shuffle(alphaList)
-  
+
   def main(args: Array[String]): Unit = {
     val longList = alphaList ++ (1 to 9 flatMap (_ => shuffled))
     val result = longList.distinct mkString ""
diff --git a/test/files/run/duration-coarsest.scala b/test/files/run/duration-coarsest.scala
new file mode 100644
index 0000000..51cb792
--- /dev/null
+++ b/test/files/run/duration-coarsest.scala
@@ -0,0 +1,28 @@
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+object Test extends App {
+  List(
+    (60 minutes,    1 hour),
+    (2000 millis,   2 seconds),
+    (2000 micros,   2 millis),
+    (2000 nanos,    2 micros),
+    (2000000 nanos, 2 millis),
+    (48 hours,      2 days),
+    (5 seconds,     5 seconds),
+    (1 second,      1 second)
+  ) foreach {
+    case (x, expected) =>
+      val actual = x.toCoarsest
+      assert(actual.unit == expected.unit, s"$actual, $expected")
+      assert(actual.length == expected.length, s"$actual, $expected")
+  }
+
+  List(
+    45 minutes,
+    500 millis,
+    1500 millis,
+    23 hours,
+    40 days
+  ) foreach (x => assert(x == x.toCoarsest, x))
+}
\ No newline at end of file
diff --git a/test/files/run/dynamic-applyDynamic.check b/test/files/run/dynamic-applyDynamic.check
index 89a0d55..ae6996f 100644
--- a/test/files/run/dynamic-applyDynamic.check
+++ b/test/files/run/dynamic-applyDynamic.check
@@ -1,8 +1,8 @@
 [[syntax trees at end of                     typer]] // newSource1.scala
 [0:67]package [0:0]<empty> {
   [0:67]object X extends [9:67][67]scala.AnyRef {
-    [9]def <init>(): [9]X.type = [9]{
-      [9][9][9]X.super.<init>();
+    [67]def <init>(): [9]X.type = [67]{
+      [67][67][67]X.super.<init>();
       [9]()
     };
     [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
diff --git a/test/files/run/dynamic-applyDynamicNamed.check b/test/files/run/dynamic-applyDynamicNamed.check
index 17fa496..c4e050b 100644
--- a/test/files/run/dynamic-applyDynamicNamed.check
+++ b/test/files/run/dynamic-applyDynamicNamed.check
@@ -1,14 +1,14 @@
 [[syntax trees at end of                     typer]] // newSource1.scala
 [0:97]package [0:0]<empty> {
   [0:97]object X extends [9:97][97]scala.AnyRef {
-    [9]def <init>(): [9]X.type = [9]{
-      [9][9][9]X.super.<init>();
+    [97]def <init>(): [9]X.type = [97]{
+      [97][97][97]X.super.<init>();
       [9]()
     };
     [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
     [21]<stable> <accessor> def d: [21]D = [21][21]X.this.d;
-    [37:70][37:38][37:38][37]X.this.d.applyDynamicNamed(<39:43>"meth")([44:55][44][44][44]scala.this.Tuple2.apply[[44]String, [44]Int]([44:50]"value1", [53:55]10), [57:69][57][57][57]scala.this.Tuple2.apply[[57]String, [57]Int]([57:63]"value2", [66:69]100));
-    [77:91]<77:78><77:78>[77]X.this.d.applyDynamicNamed(<77:78>"apply")([79:90][79][79][79]scala.this.Tuple2.apply[[79]String, [79]Int]([79:85]"value1", [88:90]10))
+    [37:70][37:38][37:38][37]X.this.d.applyDynamicNamed(<39:43>"meth")([44:55][44][44]scala.Tuple2.apply[[44]String, [44]Int]([44:50]"value1", [53:55]10), [57:69][57][57]scala.Tuple2.apply[[57]String, [57]Int]([57:63]"value2", [66:69]100));
+    [77:91]<77:78><77:78>[77]X.this.d.applyDynamicNamed(<77:78>"apply")([79:90][79][79]scala.Tuple2.apply[[79]String, [79]Int]([79:85]"value1", [88:90]10))
   }
 }
 
diff --git a/test/files/run/dynamic-selectDynamic.check b/test/files/run/dynamic-selectDynamic.check
index 7f95ed3..9635ca4 100644
--- a/test/files/run/dynamic-selectDynamic.check
+++ b/test/files/run/dynamic-selectDynamic.check
@@ -1,8 +1,8 @@
 [[syntax trees at end of                     typer]] // newSource1.scala
 [0:50]package [0:0]<empty> {
   [0:50]object X extends [9:50][50]scala.AnyRef {
-    [9]def <init>(): [9]X.type = [9]{
-      [9][9][9]X.super.<init>();
+    [50]def <init>(): [9]X.type = [50]{
+      [50][50][50]X.super.<init>();
       [9]()
     };
     [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
diff --git a/test/files/run/dynamic-updateDynamic.check b/test/files/run/dynamic-updateDynamic.check
index 3e21b7d..154fea3 100644
--- a/test/files/run/dynamic-updateDynamic.check
+++ b/test/files/run/dynamic-updateDynamic.check
@@ -1,8 +1,8 @@
 [[syntax trees at end of                     typer]] // newSource1.scala
 [0:69]package [0:0]<empty> {
   [0:69]object X extends [9:69][69]scala.AnyRef {
-    [9]def <init>(): [9]X.type = [9]{
-      [9][9][9]X.super.<init>();
+    [69]def <init>(): [9]X.type = [69]{
+      [69][69][69]X.super.<init>();
       [9]()
     };
     [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
diff --git a/test/files/run/elidable-noflags.scala b/test/files/run/elidable-noflags.scala
index 5192e34..1b9c511 100644
--- a/test/files/run/elidable-noflags.scala
+++ b/test/files/run/elidable-noflags.scala
@@ -9,7 +9,7 @@ object Test {
   @elidable(100000) def f5() = println("Good for me, I was not elided.")
   @elidable(OFF) def f6() = println("Good for me, I was not elided.")
   @elidable(ALL) def f7() = println("ESPECIALLY good for me, I was not elided.")
-  
+
   def main(args: Array[String]): Unit = {
     f1()
     f2()
diff --git a/test/files/run/emptypf.scala b/test/files/run/emptypf.scala
index 8aa0906..eb3e3e6 100644
--- a/test/files/run/emptypf.scala
+++ b/test/files/run/emptypf.scala
@@ -5,10 +5,10 @@ object Test {
       case s      => s.length
     }
   }
-  
+
   def main(args: Array[String]): Unit = {
     println(f("abc"))
-    println(f("def")) 
+    println(f("def"))
     println(PartialFunction.empty[String, Int] isDefinedAt "abc")
   }
 }
diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala
index 52eded5..36412e6 100644
--- a/test/files/run/enrich-gentraversable.scala
+++ b/test/files/run/enrich-gentraversable.scala
@@ -1,3 +1,6 @@
+import scala.language.implicitConversions
+import scala.language.postfixOps
+
 object Test extends App {
   import scala.collection.{GenTraversableOnce,GenTraversableLike}
   import scala.collection.generic._
@@ -10,22 +13,22 @@ object Test extends App {
     }
     implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] =
       new FilterMapImpl[fr.A, Repr](fr.conversion(r))
-  
+
     val l = List(1, 2, 3, 4, 5)
     val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
     typed[List[Int]](fml)
     println(fml)
-  
+
     val a = Array(1, 2, 3, 4, 5)
     val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
     typed[Array[Int]](fma)
     println(fma.deep)
-    
+
     val s = "Hello World"
     val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
     typed[String](fms1)
     println(fms1)
-    
+
     val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
     typed[IndexedSeq[Int]](fms2)
     println(fms2)
@@ -35,28 +38,28 @@ object Test extends App {
       final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
         val b = cbf()
         for(e <- r.seq) f(e) foreach (b +=)
-  
+
         b.result
       }
     }
     implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
       new FilterMapImpl[fr.A, Repr](fr.conversion(r))
-  
+
     val l = List(1, 2, 3, 4, 5)
     val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
     typed[List[Int]](fml)
     println(fml)
-  
+
     val a = Array(1, 2, 3, 4, 5)
     val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
     typed[Array[Int]](fma)
     println(fma.deep)
-    
+
     val s = "Hello World"
     val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
     typed[String](fms1)
     println(fms1)
-    
+
     val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
     typed[IndexedSeq[Int]](fms2)
     println(fms2)
diff --git a/test/files/run/enums.scala b/test/files/run/enums.scala
index 9cdeed2..3aad7ec 100644
--- a/test/files/run/enums.scala
+++ b/test/files/run/enums.scala
@@ -36,8 +36,11 @@ object Test2 {
 
 object Test3 {
 
-  object Direction extends Enumeration("North", "South", "East", "West") {
-    val North, South, East, West = Value;
+  object Direction extends Enumeration {
+    val North = Value("North")
+    val South = Value("South")
+    val East = Value("East")
+    val West = Value("West")
   }
 
   def run: Int = {
@@ -48,8 +51,11 @@ object Test3 {
 
 object Test4 {
 
-  object Direction extends Enumeration("North", "South", "East", "West") {
-    val North, South, East, West = Value;
+  object Direction extends Enumeration {
+    val North = Value("North")
+    val South = Value("South")
+    val East = Value("East")
+    val West = Value("West")
   }
 
   def run: Int = {
diff --git a/test/files/run/equality.scala b/test/files/run/equality.scala
index 68055fd..ff59898 100644
--- a/test/files/run/equality.scala
+++ b/test/files/run/equality.scala
@@ -2,7 +2,7 @@
 object Test
 {
   import scala.runtime.ScalaRunTime.hash
-  
+
   def makeFromInt(x: Int) = List(
     x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x)
   ) ::: (
@@ -11,26 +11,26 @@ object Test
   def makeFromDouble(x: Double) = List(
     x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x.toInt), BigDecimal(x)
   )
-  
+
   def main(args: Array[String]): Unit = {
     var xs = makeFromInt(5)
     for (x <- xs ; y <- xs) {
       assert(x == y, x + " == " + y)
       assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
     }
-    
+
     xs = makeFromInt(-5)
     for (x <- xs ; y <- xs) {
       assert(x == y, x + " == " + y)
       assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
     }
-    
+
     xs = makeFromDouble(500.0)
     for (x <- xs ; y <- xs) {
       assert(x == y, x + " == " + y)
       assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
     }
-    
+
     // negatives
     val bigLong = new java.util.concurrent.atomic.AtomicLong(Long.MaxValue)
     assert(-1 != bigLong && bigLong != -1)  // bigLong.intValue() == -1
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/eta-expand-star2.check
index ce01362..cbf4781 100644
--- a/test/files/run/eta-expand-star2.check
+++ b/test/files/run/eta-expand-star2.check
@@ -1 +1,2 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 hello
diff --git a/test/files/run/exceptions-2.check b/test/files/run/exceptions-2.check
index 9a3044c..4f82448 100644
--- a/test/files/run/exceptions-2.check
+++ b/test/files/run/exceptions-2.check
@@ -1,3 +1,6 @@
+exceptions-2.scala:267: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    try { 1 } catch { case e: java.io.IOException => () }
+          ^
 nested1: 
 Innermost finally
 Outermost finally
diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala
index d0312a4..8d755c3 100644
--- a/test/files/run/exceptions-2.scala
+++ b/test/files/run/exceptions-2.scala
@@ -42,14 +42,14 @@ object NoExcep {
   def method4 = try {
     Console.println("..");
   } catch {
-    case _ => error("..");
+    case _: Throwable => sys.error("..");
   }
 }
 
 object Test {
   def nested1: Unit = try {
     try {
-      error("nnnnoooo");
+      sys.error("nnnnoooo");
     } finally {
       Console.println("Innermost finally");
     }
@@ -59,7 +59,7 @@ object Test {
 
   def nested2 =  try {
     try {
-      error("nnnnoooo");
+      sys.error("nnnnoooo");
     } finally {
       Console.println("Innermost finally");
     }
@@ -68,7 +68,7 @@ object Test {
     Console.println("Outermost finally");
   }
 
-  def mixed = 
+  def mixed =
     try {
       if (10 > 0)
         throw Leaf(10);
@@ -107,7 +107,7 @@ object Test {
       case Leaf(a) => Console.println(a);
     }
   } catch {
-    case npe: NullPointerException => 
+    case npe: NullPointerException =>
       Console.println("Caught an NPE");
   }
 
@@ -134,74 +134,74 @@ object Test {
       ()
     } finally {
       try {
-        error("a");
+        sys.error("a");
       } catch {
-        case _ => Console.println("Silently ignore exception in finally");
+        case _: Throwable => Console.println("Silently ignore exception in finally");
       }
     }
   }
 
-  def valInFinally: Unit = 
-    try {    
+  def valInFinally: Unit =
+    try {
     } finally {
       val fin = "Abc";
       Console.println(fin);
-    };
+    }
 
-  def tryAndValInFinally: Unit = 
+  def tryAndValInFinally: Unit =
     try {
     } finally {
       val fin = "Abc";
       try {
         Console.println(fin);
-      } catch { case _ => () }
-    };
+      } catch { case _: Throwable => () }
+    }
 
-  def returnInBody: Unit = try { 
+  def returnInBody: Unit = try {
     try {
       Console.println("Normal execution...");
-      return 
+      return
       Console.println("non reachable code");
     } finally {
       Console.println("inner finally");
     }
-  } finally { 
+  } finally {
     Console.println("Outer finally");
   }
 
-  def returnInBodySynch: Unit = try { 
+  def returnInBodySynch: Unit = try {
     synchronized {
       try {
         Console.println("Synchronized normal execution...");
-        return 
+        return
         Console.println("non reachable code");
       } finally {
         Console.println("inner finally");
       }
     }
-  } finally { 
+  } finally {
     Console.println("Outer finally");
   }
 
 
-  def returnInBodyAndInFinally: Unit = try { 
+  def returnInBodyAndInFinally: Unit = try {
     try {
       Console.println("Normal execution...");
-      return 
+      return
       Console.println("non reachable code");
     } finally {
       Console.println("inner finally");
       return
     }
-  } finally { 
+  } finally {
     Console.println("Outer finally");
     return
   }
 
-  def returnInBodyAndInFinally2: Unit = try { 
+  def returnInBodyAndInFinally2: Unit = try {
     try {
       Console.println("Normal execution...");
-      return 
+      return
       Console.println("non reachable code");
     } finally {
       try {
@@ -211,7 +211,7 @@ object Test {
         Console.println("finally inside finally");
       }
     }
-  } finally { 
+  } finally {
     Console.println("Outer finally");
     return
   }
@@ -249,11 +249,11 @@ object Test {
   def execute(f: => Unit) = try {
     f;
   } catch {
-    case _ => ();
+    case _: Throwable => ()
   }
 
 
-  def returnWithFinallyClean: Int = try { 
+  def returnWithFinallyClean: Int = try {
     try {
       Console.println("Normal execution...");
       return 10
@@ -262,7 +262,7 @@ object Test {
     } finally {
       Console.println("inner finally");
     }
-  } finally { 
+  } finally {
     Console.println("Outer finally");
     try { 1 } catch { case e: java.io.IOException => () }
   }
@@ -294,7 +294,7 @@ object Test {
 
     Console.println("mixed: ");
     execute(mixed);
-    
+
     Console.println("withValue1:");
     execute(withValue1);
 
@@ -322,7 +322,7 @@ object Test {
 
     Console.println("NoExcep.method3:");
     execute(NoExcep.method3);
-    
+
     Console.println("NoExcep.method4:");
     execute(NoExcep.method4);
 
diff --git a/test/files/run/exceptions-nest.scala b/test/files/run/exceptions-nest.scala
index 841e6b1..432d600 100644
--- a/test/files/run/exceptions-nest.scala
+++ b/test/files/run/exceptions-nest.scala
@@ -5,9 +5,9 @@ object Test extends App {
   println(test3)
   println(test4)
   println(test5)
-  try { println(test6) } catch { case _ => println("OK") }
+  try { println(test6) } catch { case _: Throwable => println("OK") }
   println(test7)
-  try { println(test8) } catch { case _ => println("OK") }
+  try { println(test8) } catch { case _: Throwable => println("OK") }
   println(test9)
   println(test10)
   println(test11)
@@ -19,7 +19,7 @@ object Test extends App {
       x = 2
     } catch {
       case _: NullPointerException => x = 3
-      case _ => x = 4
+      case _: Throwable => x = 4
     }
     x
   }
@@ -31,12 +31,12 @@ object Test extends App {
       try {
         x = 21
       } catch {
-        case _ => x = 22
+        case _: Throwable => x = 22
       }
       x = 23
     } catch {
       case _: NullPointerException => x = 3
-      case _ => x = 4
+      case _: Throwable => x = 4
     }
     x
   }
@@ -44,10 +44,10 @@ object Test extends App {
   def test3 = {
     var x = 1
     try {
-      try{x = 2} catch { case _ => x = 4 }
+      try{x = 2} catch { case _: Throwable => x = 4 }
     } catch {
       case _: NullPointerException => x = 3
-      case _ => x = 4
+      case _: Throwable => x = 4
     }
     x
   }
@@ -58,7 +58,7 @@ object Test extends App {
       x = 2
     } catch {
       case _: NullPointerException => x = 3
-      case _ => x = 4
+      case _: Throwable => x = 4
     }
     try {
       x = 5
@@ -73,8 +73,8 @@ object Test extends App {
     try {
       x = 2
     } catch {
-      case _: NullPointerException => try { x = 3 } catch { case f => throw f }
-      case _ => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
+      case _: NullPointerException => try { x = 3 } catch { case f: Throwable => throw f }
+      case _: Throwable => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
     }
     x
   }
@@ -87,7 +87,7 @@ object Test extends App {
     } catch {
       case e: NullPointerException =>
         throw e
-      case _ =>
+      case _: Throwable =>
         x = 3
         return 1000
     } finally {
@@ -105,7 +105,7 @@ object Test extends App {
       try {
         x = 4
       } catch {
-        case _ => x = 5
+        case _: Throwable => x = 5
       }
     }
     x
@@ -116,7 +116,7 @@ object Test extends App {
     try {
       throw new NullPointerException
     } catch {
-      case e => throw e
+      case e: Throwable => throw e
     }
     x
   }
@@ -124,7 +124,7 @@ object Test extends App {
   def test9 = {
     try { "" match {
       case s: String => 10
-    }} catch { case _ => 20 }
+    }} catch { case _: Throwable => 20 }
   }
 
   var x10 = 1
@@ -135,7 +135,7 @@ object Test extends App {
 
    def test11 {
     try { () }
-    catch { case e => () }
+    catch { case e: Throwable => () }
   }
 
   class E1 extends Exception
diff --git a/test/files/run/exceptions.scala b/test/files/run/exceptions.scala
index fc3566f..f0fe769 100644
--- a/test/files/run/exceptions.scala
+++ b/test/files/run/exceptions.scala
@@ -6,8 +6,8 @@
 
 abstract class IntMap[A] {
     def lookup(key: Int): A = this match {
-        case Empty() => error("KO")
-        case _ => error("ok")
+        case Empty() => sys.error("KO")
+        case _ => sys.error("ok")
     }
 }
 
@@ -32,7 +32,7 @@ object exceptions {
         val value = try {
             map.lookup(key)
         } catch {
-            case e => e.getMessage()
+            case e: Throwable => e.getMessage()
         }
         check("lookup(" + key + ")", value, "KO");
     }
diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check
new file mode 100644
index 0000000..1212b60
--- /dev/null
+++ b/test/files/run/existential-rangepos.check
@@ -0,0 +1,13 @@
+[[syntax trees at end of                    patmat]] // newSource1.scala
+[0:76]package [0:0]<empty> {
+  [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][76]scala.AnyRef {
+    [76]def <init>(): [20]A[T] = [76]{
+      [76][76][76]A.super.<init>();
+      [20]()
+    };
+    [24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null;
+    [28]<stable> <accessor> def foo: [28]Set[_ <: T] = [28][28]A.this.foo;
+    [54:74]<stable> <accessor> def bar: [58]Set[_ <: T]
+  }
+}
+
diff --git a/test/files/run/existential-rangepos.scala b/test/files/run/existential-rangepos.scala
new file mode 100644
index 0000000..7d2b081
--- /dev/null
+++ b/test/files/run/existential-rangepos.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+  override def extraSettings: String = "-usejavacp -Yrangepos -Xprint:patmat -Xprint-pos -d " + testOutput.path
+
+  override def code = """
+abstract class A[T] {
+  val foo: Set[_ <: T] = null
+  val bar: Set[_ <: T]
+}""".trim
+
+  override def show(): Unit = Console.withErr(System.out)(compile())
+}
diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check
index 0d7a929..b0d8528 100644
--- a/test/files/run/existentials-in-compiler.check
+++ b/test/files/run/existentials-in-compiler.check
@@ -8,22 +8,22 @@ abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B
     extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] }
 
 abstract trait Contra[-A >: AnyRef, -B] extends AnyRef
-    extest.Contra[_ >: AnyRef, _]
+    extest.Contra[AnyRef, _]
 
 abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends AnyRef
     extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] }
 
 abstract trait Cov01[+A <: AnyRef, +B] extends AnyRef
-    extest.Cov01[_ <: AnyRef, _]
+    extest.Cov01[AnyRef,Any]
 
 abstract trait Cov02[+A <: AnyRef, B] extends AnyRef
-    extest.Cov02[_ <: AnyRef, _]
+    extest.Cov02[AnyRef, _]
 
 abstract trait Cov03[+A <: AnyRef, -B] extends AnyRef
-    extest.Cov03[_ <: AnyRef, _]
+    extest.Cov03[AnyRef, _]
 
 abstract trait Cov04[A <: AnyRef, +B] extends AnyRef
-    extest.Cov04[_ <: AnyRef, _]
+    extest.Cov04[_ <: AnyRef, Any]
 
 abstract trait Cov05[A <: AnyRef, B] extends AnyRef
     extest.Cov05[_ <: AnyRef, _]
@@ -32,7 +32,7 @@ abstract trait Cov06[A <: AnyRef, -B] extends AnyRef
     extest.Cov06[_ <: AnyRef, _]
 
 abstract trait Cov07[-A <: AnyRef, +B] extends AnyRef
-    extest.Cov07[_ <: AnyRef, _]
+    extest.Cov07[_ <: AnyRef, Any]
 
 abstract trait Cov08[-A <: AnyRef, B] extends AnyRef
     extest.Cov08[_ <: AnyRef, _]
@@ -41,16 +41,16 @@ abstract trait Cov09[-A <: AnyRef, -B] extends AnyRef
     extest.Cov09[_ <: AnyRef, _]
 
 abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends AnyRef
-    extest.Cov11[_ <: AnyRef, _ <: List[_]]
+    extest.Cov11[AnyRef,List[_]]
 
 abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends AnyRef
-    extest.Cov12[_ <: AnyRef, _ <: List[_]]
+    extest.Cov12[AnyRef, _ <: List[_]]
 
 abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends AnyRef
-    extest.Cov13[_ <: AnyRef, _ <: List[_]]
+    extest.Cov13[AnyRef, _ <: List[_]]
 
 abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends AnyRef
-    extest.Cov14[_ <: AnyRef, _ <: List[_]]
+    extest.Cov14[_ <: AnyRef, List[_]]
 
 abstract trait Cov15[A <: AnyRef, B <: List[_]] extends AnyRef
     extest.Cov15[_ <: AnyRef, _ <: List[_]]
@@ -59,7 +59,7 @@ abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends AnyRef
     extest.Cov16[_ <: AnyRef, _ <: List[_]]
 
 abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends AnyRef
-    extest.Cov17[_ <: AnyRef, _ <: List[_]]
+    extest.Cov17[_ <: AnyRef, List[_]]
 
 abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends AnyRef
     extest.Cov18[_ <: AnyRef, _ <: List[_]]
@@ -68,16 +68,16 @@ abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends AnyRef
     extest.Cov19[_ <: AnyRef, _ <: List[_]]
 
 abstract trait Cov21[+A, +B] extends AnyRef
-    extest.Cov21[_, _]
+    extest.Cov21[Any,Any]
 
 abstract trait Cov22[+A, B] extends AnyRef
-    extest.Cov22[_, _]
+    extest.Cov22[Any, _]
 
 abstract trait Cov23[+A, -B] extends AnyRef
-    extest.Cov23[_, _]
+    extest.Cov23[Any, _]
 
 abstract trait Cov24[A, +B] extends AnyRef
-    extest.Cov24[_, _]
+    extest.Cov24[_, Any]
 
 abstract trait Cov25[A, B] extends AnyRef
     extest.Cov25[_, _]
@@ -86,7 +86,7 @@ abstract trait Cov26[A, -B] extends AnyRef
     extest.Cov26[_, _]
 
 abstract trait Cov27[-A, +B] extends AnyRef
-    extest.Cov27[_, _]
+    extest.Cov27[_, Any]
 
 abstract trait Cov28[-A, B] extends AnyRef
     extest.Cov28[_, _]
@@ -122,16 +122,16 @@ abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends AnyRef
     extest.Cov39[_, _, _ <: Tuple2[_, _]]
 
 abstract trait Cov41[+A >: Null, +B] extends AnyRef
-    extest.Cov41[_ >: Null, _]
+    extest.Cov41[Any,Any]
 
 abstract trait Cov42[+A >: Null, B] extends AnyRef
-    extest.Cov42[_ >: Null, _]
+    extest.Cov42[Any, _]
 
 abstract trait Cov43[+A >: Null, -B] extends AnyRef
-    extest.Cov43[_ >: Null, _]
+    extest.Cov43[Any, _]
 
 abstract trait Cov44[A >: Null, +B] extends AnyRef
-    extest.Cov44[_ >: Null, _]
+    extest.Cov44[_ >: Null, Any]
 
 abstract trait Cov45[A >: Null, B] extends AnyRef
     extest.Cov45[_ >: Null, _]
@@ -140,7 +140,7 @@ abstract trait Cov46[A >: Null, -B] extends AnyRef
     extest.Cov46[_ >: Null, _]
 
 abstract trait Cov47[-A >: Null, +B] extends AnyRef
-    extest.Cov47[_ >: Null, _]
+    extest.Cov47[_ >: Null, Any]
 
 abstract trait Cov48[-A >: Null, B] extends AnyRef
     extest.Cov48[_ >: Null, _]
@@ -149,7 +149,7 @@ abstract trait Cov49[-A >: Null, -B] extends AnyRef
     extest.Cov49[_ >: Null, _]
 
 abstract trait Covariant[+A <: AnyRef, +B] extends AnyRef
-    extest.Covariant[_ <: AnyRef, _]
+    extest.Covariant[AnyRef,Any]
 
 abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends AnyRef
     extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] }
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
index c69d121..d019d56 100644
--- a/test/files/run/existentials-in-compiler.scala
+++ b/test/files/run/existentials-in-compiler.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 import scala.tools.nsc._
 import scala.tools.partest.CompilerTest
 import scala.collection.{ mutable, immutable, generic }
@@ -71,14 +74,13 @@ package extest {
 }
   """
 
-  def check(source: String, unit: global.CompilationUnit) = {
-    getRequiredPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
-      afterTyper {
+  override def check(source: String, unit: global.CompilationUnit) {
+    getPackage(TermName("extest")).moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
+      exitingTyper {
         clazz.info
         println(clazz.defString)
         println("    " + classExistentialType(clazz) + "\n")
       }
     }
-    true
   }
 }
diff --git a/test/files/run/existentials.scala b/test/files/run/existentials.scala
index 3977d47..bdd6fb9 100644
--- a/test/files/run/existentials.scala
+++ b/test/files/run/existentials.scala
@@ -1,8 +1,11 @@
+import scala.language.existentials
+import scala.language.reflectiveCalls
+
 class Foo {
   class Line {
     case class Cell[T](var x: T)
     def f[T](x: Any): Cell[t1] forSome { type t1 } = x match { case y: Cell[t] => y }
-    
+
     var x: Cell[T] forSome { type T } = new Cell(1)
     println({ x = new Cell("abc"); x })
   }
@@ -12,7 +15,7 @@ class FooW {
   class Line {
     case class Cell[T](var x: T)
     def f[T](x: Any): Cell[ _ ] = x match { case y: Cell[t] => y }
-    
+
     var x: Cell[_] = new Cell(1)
     println({ x = new Cell("abc"); x })
   }
diff --git a/test/files/run/existentials3-new.check b/test/files/run/existentials3-new.check
index 8f7dd70..7f02866 100644
--- a/test/files/run/existentials3-new.check
+++ b/test/files/run/existentials3-new.check
@@ -1,8 +1,8 @@
 Bar.type, t=TypeRef, s=type Bar.type
 Bar, t=TypeRef, s=type Bar
-Test.ToS, t=RefinedType, s=f3
-Test.ToS, t=RefinedType, s=f4
-Test.ToS, t=RefinedType, s=f5
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
 () => Test.ToS, t=TypeRef, s=trait Function0
 () => Test.ToS, t=TypeRef, s=trait Function0
 $anon, t=TypeRef, s=type $anon
@@ -12,9 +12,9 @@ List[Seq[Int]], t=TypeRef, s=class List
 List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
 Bar.type, t=TypeRef, s=type Bar.type
 Bar, t=TypeRef, s=type Bar
-Test.ToS, t=RefinedType, s=g3
-Test.ToS, t=RefinedType, s=g4
-Test.ToS, t=RefinedType, s=g5
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
+Test.ToS, t=RefinedType, s=<refinement of Test.ToS>
 () => Test.ToS, t=TypeRef, s=trait Function0
 () => Test.ToS, t=TypeRef, s=trait Function0
 $anon, t=TypeRef, s=type $anon
diff --git a/test/files/run/existentials3-new.scala b/test/files/run/existentials3-new.scala
index 110c8ef..5dfd7fb 100644
--- a/test/files/run/existentials3-new.scala
+++ b/test/files/run/existentials3-new.scala
@@ -1,4 +1,6 @@
+import scala.language.existentials
 import scala.reflect.runtime.universe._
+import internal._
 
 object Test {
   trait ToS { final override def toString = getClass.getName }
@@ -34,7 +36,7 @@ object Test {
   val g12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
 
   def printTpe(t: Type) = {
-    val s = if (t.typeSymbol.isFreeType) t.typeSymbol.typeSignature.toString else t.typeSymbol.toString
+    val s = if (isFreeType(t.typeSymbol)) t.typeSymbol.info.toString else t.typeSymbol.toString
     println("%s, t=%s, s=%s".format(t, t.asInstanceOf[Product].productPrefix, s))
   }
   def m[T: TypeTag](x: T) = printTpe(typeOf[T])
@@ -77,4 +79,4 @@ object Misc {
   }
   def g1 = o1.f1 _
   def g2 = o1.f2 _
-}
\ No newline at end of file
+}
diff --git a/test/files/run/existentials3-old.scala b/test/files/run/existentials3-old.scala
index 944160f..c021c0e 100644
--- a/test/files/run/existentials3-old.scala
+++ b/test/files/run/existentials3-old.scala
@@ -1,3 +1,5 @@
+import scala.language.existentials
+
 object Test {
   trait ToS { final override def toString = getClass.getName }
 
diff --git a/test/files/run/exoticnames.check b/test/files/run/exoticnames.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/exoticnames.scala b/test/files/run/exoticnames.scala
index fa0e5e6..98f9a88 100644
--- a/test/files/run/exoticnames.scala
+++ b/test/files/run/exoticnames.scala
@@ -1,7 +1,7 @@
 // this is a run-test because the compiler should emit bytecode that'll pass the JVM's verifier
 object Test extends App {
-  def `(` = error("bla")
-  def `.` = error("bla")
-  def `)` = error("bla")
-  def `,` = error("bla")
+  def `(` = sys.error("bla")
+  def `.` = sys.error("bla")
+  def `)` = sys.error("bla")
+  def `,` = sys.error("bla")
 }
diff --git a/test/files/run/fail-non-value-types.scala b/test/files/run/fail-non-value-types.scala
index 51198a5..d9a69e1 100644
--- a/test/files/run/fail-non-value-types.scala
+++ b/test/files/run/fail-non-value-types.scala
@@ -18,8 +18,8 @@ object Test {
   def tcon[T: TypeTag](args: Type*) = appliedType(typeOf[T].typeConstructor, args.toList)
 
   def cil      = typeOf[CompletelyIndependentList[Int]]
-  def map      = cil.member("map": TermName).asMethod
-  def distinct = cil.member("distinct": TermName).asMethod
+  def map      = cil.member(TermName("map")).asMethod
+  def distinct = cil.member(TermName("distinct")).asMethod
 
   def main(args: Array[String]): Unit = {
     // Need the assert in there to fail.
@@ -32,9 +32,9 @@ object Test {
     // [B <: <?>, That <: <?>](f: <?>)(implicit cbf: <?>)That
     //
 
-    println(map.typeSignature)
-    println(map.typeSignatureIn(cil))
-    println(distinct.typeSignature)
+    println(map.info)
+    println(map.infoIn(cil))
+    println(distinct.info)
     if (failed) sys.exit(1)
   }
 }
diff --git a/test/files/run/finally.scala b/test/files/run/finally.scala
index 635123c..2c01eda 100644
--- a/test/files/run/finally.scala
+++ b/test/files/run/finally.scala
@@ -7,17 +7,17 @@ object Test extends App {
     try {
       bar
     } catch {
-      case e => println(e)
+      case e: Throwable => println(e)
     }
   }
-  
+
   // test that finally is not covered by any exception handlers.
   def bar {
     try {
       println("hi")
     }
     catch {
-      case e => println("SHOULD NOT GET HERE")
+      case e: Throwable => println("SHOULD NOT GET HERE")
     }
     finally {
       println("In Finally")
@@ -26,33 +26,33 @@ object Test extends App {
   }
 
   // return in catch (finally is executed)
-  def retCatch {      
+  def retCatch {
     try {
       throw new Exception
     } catch {
-      case e =>
+      case e: Throwable =>
         println(e);
         return
     } finally println("in finally")
   }
 
   // throw in catch (finally is executed, exception propagated)
-  def throwCatch {      
+  def throwCatch {
     try {
       throw new Exception
     } catch {
-      case e =>
+      case e: Throwable =>
         println(e);
         throw e
     } finally println("in finally")
   }
 
   // return inside body (finally is executed)
-  def retBody {      
+  def retBody {
     try {
       return
     } catch {
-      case e =>
+      case e: Throwable =>
         println(e);
         throw e
     } finally println("in finally")
@@ -63,7 +63,7 @@ object Test extends App {
     try {
       throw new Exception
     } catch {
-      case e =>
+      case e: Throwable =>
         println(e);
     } finally println("in finally")
   }
@@ -75,7 +75,7 @@ object Test extends App {
       finally {
         println("in finally 1")
         return
-      } 
+      }
     } finally println("in finally 2")
   }
 
@@ -89,17 +89,17 @@ object Test extends App {
         throw new Exception
       }
     } catch {
-      case e => println(e)
+      case e: Throwable => println(e)
     }
   }
 
   // nested finallies with return value
-  def nestedFinalies: Int = 
+  def nestedFinalies: Int =
     try {
       try {
         return 10
       } finally {
-        try { () } catch { case _ => () }
+        try { () } catch { case _: Throwable => () }
         println("in finally 1")
       }
     } finally {
@@ -111,7 +111,7 @@ object Test extends App {
     try {
       m
     } catch {
-      case e => println("COUGHT: " + e)
+      case e: Throwable => println("COUGHT: " + e)
     }
     println("-" * 40)
   }
diff --git a/test/files/run/flat-flat-flat.scala b/test/files/run/flat-flat-flat.scala
index d57696b..80868b9 100644
--- a/test/files/run/flat-flat-flat.scala
+++ b/test/files/run/flat-flat-flat.scala
@@ -2,7 +2,7 @@ object Test {
   def f1 = List(Iterator(Some(1), None, Some(2)), Iterator(Some(3), None))
   def f2 = Iterator(List(Some(1), None, Some(2)), List(Some(3), None), Nil)
   def f3 = List(Some(Iterator(1)), None, Some(Iterator(2, 3)))
-  
+
   def main(args: Array[String]): Unit = {
     assert(f1.flatten.flatten.toList == List(1, 2, 3))
     assert(f2.flatten.flatten.toList == List(1, 2, 3))
diff --git a/test/files/run/fors.check b/test/files/run/fors.check
index 08ecc8e..b459f00 100644
--- a/test/files/run/fors.check
+++ b/test/files/run/fors.check
@@ -13,15 +13,6 @@ a b c
 b c 
 b c 
 
-      
-<head><title>Scala</title></head>
-
-      
-<body>1 2 3</body>
-
-    
-<head><title>Scala</title></head>
-
 testNew
 3
 1 2 3 
@@ -35,12 +26,3 @@ testNew
 0 2 4 6 8 
 0 2 4 6 8 
 a b c 
-
-      
-<head><title>Scala</title></head>
-
-      
-<body>1 2 3</body>
-
-    
-<head><title>Scala</title></head>
diff --git a/test/files/run/fors.scala b/test/files/run/fors.scala
index 54afdc7..c778df3 100644
--- a/test/files/run/fors.scala
+++ b/test/files/run/fors.scala
@@ -12,12 +12,6 @@ object Test extends App {
 
   val ar = "abc".toCharArray
 
-  val xml =
-    <html>
-      <head><title>Scala</title></head>
-      <body>{xs}</body>
-    </html>;
-
   /////////////////// old syntax ///////////////////
 
   def testOld {
@@ -48,10 +42,6 @@ object Test extends App {
     for {x <- ar
          if x.toInt > 97} print(x + " "); println
 
-    // sequences
-    for (x <- xml.child) println(x)
-    for (x <- xml.child;
-         if x.label == "head") println(x)
   }
 
   /////////////////// new syntax ///////////////////
@@ -85,9 +75,6 @@ object Test extends App {
     // arrays
     for (x <- ar) print(x + " "); println
 
-    // sequences
-    for (x <- xml.child) println(x)
-    for (x <- xml.child if x.label == "head") println(x)
   }
 
   ////////////////////////////////////////////////////
diff --git a/test/files/run/forvaleq.scala b/test/files/run/forvaleq.scala
index 2a95880..8c1824a 100644
--- a/test/files/run/forvaleq.scala
+++ b/test/files/run/forvaleq.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.immutable.Queue
 import scala.{List=>L}
-  
+
 object Test {
   // redefine some symbols to make it extra hard
   class List
@@ -16,11 +16,11 @@ object Test {
     case _ if (x<10) => x
     case _ => firstDigit(x / 10)
   }
-  
-  
+
+
   {
-    // a basic test case 
-    
+    // a basic test case
+
     val input = L.range(0,20)
     val oddFirstTimesTwo =
       for {x <- input
@@ -32,7 +32,7 @@ object Test {
 
   {
     // a test case with patterns
-    
+
     val input = L.range(0, 20)
     val oddFirstTimesTwo =
       for {x <- input
@@ -43,10 +43,10 @@ object Test {
         yield a + b
     println(oddFirstTimesTwo)
   }
-  
+
   {
     // make sure it works on non-Ls
-    
+
  //   val input: Queue = Queue.Empty[int].incl(L.range(0,20))
     val input = L.range(0, 20).iterator
     val oddFirstTimesTwo =
@@ -54,36 +54,36 @@ object Test {
           xf = firstDigit(x)
           if xf % 2 == 1}
         yield x*2
-    println(oddFirstTimesTwo.toList)    
+    println(oddFirstTimesTwo.toList)
   }
-  
+
   {
     // yield the computed value
-    
+
     val input = L.range(0,20)
     val oddFirstTimesTwo =
       for {x <- input
           xf = firstDigit(x)
           if xf % 2 == 1}
         yield xf*2
-    println(oddFirstTimesTwo)    
+    println(oddFirstTimesTwo)
   }
 
   {
     // make sure the function is only called once
     var count: Int = 0
-    
+
     def fdct(x: Int) = {
       count += 1
       firstDigit(x)
     }
-    
+
     val input = L.range(0,20)
     for {x <- input
          xf = fdct(x)
          if xf % 2 == 1}
       yield xf
-      
+
     println("called " + count + " times")
   }
 
diff --git a/test/files/run/freetypes_false_alarm2.scala b/test/files/run/freetypes_false_alarm2.scala
index 3499f13..a517f73 100644
--- a/test/files/run/freetypes_false_alarm2.scala
+++ b/test/files/run/freetypes_false_alarm2.scala
@@ -1,8 +1,9 @@
 import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{universe => ru}
 import scala.tools.reflect.Eval
+import internal._
 
 object Test extends App {
   val tpe = typeOf[ru.Type]
-  println(tpe.typeSymbol.isFreeType)
+  println(isFreeType(tpe.typeSymbol))
 }
\ No newline at end of file
diff --git a/test/files/run/gadts.scala b/test/files/run/gadts.scala
index 4ab3ef6..57c7fc8 100644
--- a/test/files/run/gadts.scala
+++ b/test/files/run/gadts.scala
@@ -2,8 +2,8 @@ abstract class Term[T]
 case class Lit(x: Int) extends Term[Int]
 case class Succ(t: Term[Int]) extends Term[Int]
 case class IsZero(t: Term[Int]) extends Term[Boolean]
-case class If[T](c: Term[Boolean], 
-                 t1: Term[T], 
+case class If[T](c: Term[Boolean],
+                 t1: Term[T],
                  t2: Term[T]) extends Term[T]
 
 object Test extends App {
diff --git a/test/files/run/genericValueClass.scala b/test/files/run/genericValueClass.scala
index 68162bb..9398390 100644
--- a/test/files/run/genericValueClass.scala
+++ b/test/files/run/genericValueClass.scala
@@ -1,11 +1,14 @@
-final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
-  @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
-  def →[B](y: B): Tuple2[A, B] = ->(y)
-}
+
+import scala.language.implicitConversions
 
 object Test extends App {
+  class ArrowAssocClass[A](val __leftOfArrow: A) extends AnyVal {
+    @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+    def →[B](y: B): Tuple2[A, B] = ->(y)
+  }
+
   {
-  @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+  @inline implicit def ArrowAssoc[A](x: A): ArrowAssocClass[A] = new ArrowAssocClass(x)
   val x = 1 -> "abc"
   println(x)
   }
diff --git a/test/files/run/getClassTest-old.scala b/test/files/run/getClassTest-old.scala
index 951cc8d..cd1b6b0 100644
--- a/test/files/run/getClassTest-old.scala
+++ b/test/files/run/getClassTest-old.scala
@@ -50,9 +50,10 @@ class MoreAnyRefs {
   def f4 = (new A { def bippy() = 5 }).getClass()
 }
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
   def returnTypes[T: Manifest] = (
-    manifest[T].erasure.getMethods.toList
+    manifest[T].runtimeClass.getMethods.toList
       filter (_.getName startsWith "f")
       sortBy (_.getName)
       map (m => m.getName + ": " + m.getGenericReturnType.toString)
diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala
index 71ba7b8..1d4891f 100644
--- a/test/files/run/global-showdef.scala
+++ b/test/files/run/global-showdef.scala
@@ -1,6 +1,7 @@
 import scala.tools.nsc._
-import io.{ AbstractFile }
-import util.{ SourceFile, BatchSourceFile, stringFromStream }
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.stringFromStream
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
 import scala.tools.nsc.reporters.ConsoleReporter
 
 object Test {
@@ -39,8 +40,8 @@ object Bippy {
 
     new Global(settings)
   }
-  
-  def slurp(body: => Unit): String = stringFromStream { stream => 
+
+  def slurp(body: => Unit): String = stringFromStream { stream =>
     Console.withOut(stream) {
       Console.withErr(stream) {
         body
@@ -53,15 +54,15 @@ object Bippy {
       val run = new compiler.Run()
       run.compileSources(List(src))
     }
-    output split "\\n" toList
+    output.lines.toList
   }
   def showClass(name: String) = lines("-Yshow:typer", "-Xshow-class", name)
   def showObject(name: String) = lines("-Yshow:typer", "-Xshow-object", name)
-  
+
   def show(xs: List[String]) = {
     xs filter (x => (x contains "def showdefTestMember") || (x startsWith "<<-- ")) foreach println
   }
-  
+
   def main(args: Array[String]) {
     show(List("Bippy", "Bippy#BippyType", "Bippy.BippyType", "Bippy#Boppity", "Bippy#Boppity#Boo") flatMap showClass)
     show(List("Bippy", "Bippy#Boppity#Boo") flatMap showObject)
diff --git a/test/files/run/groupby.scala b/test/files/run/groupby.scala
index fe08f52..a751e65 100644
--- a/test/files/run/groupby.scala
+++ b/test/files/run/groupby.scala
@@ -3,8 +3,8 @@
 
 // Fixes #3422
 object Test {
-  
-  def main(args: Array[String]) { 
+
+  def main(args: Array[String]) {
     val arr = Array.range(0,10)
     val map = arr groupBy (_%2)
     val v1 = map(0)
@@ -14,5 +14,5 @@ object Test {
     // hash map by default.
     assert(v1 eq v2)
   }
-  
+
 }
diff --git a/test/files/run/hashCodeBoxesRunTime.scala b/test/files/run/hashCodeBoxesRunTime.scala
index 081a733..ba1a30f 100644
--- a/test/files/run/hashCodeBoxesRunTime.scala
+++ b/test/files/run/hashCodeBoxesRunTime.scala
@@ -4,16 +4,16 @@ object Test
 {
   import java.{ lang => jl }
   import scala.runtime.BoxesRunTime.{ hashFromNumber, hashFromObject }
-  
+
   def allSame[T](xs: List[T]) = assert(xs.distinct.size == 1, "failed: " + xs)
-  
+
   def mkNumbers(x: Int): List[Number] =
     List(x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble)
-  
+
   def testLDF(x: Long) = allSame(List[Number](x, x.toDouble, x.toFloat) map hashFromNumber)
-   
+
   def main(args: Array[String]): Unit = {
-    List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n => 
+    List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n =>
       val hashes = mkNumbers(n) map hashFromNumber
       allSame(hashes)
       if (n >= 0) {
@@ -21,7 +21,7 @@ object Test
         assert(charCode == hashes.head)
       }
     }
-    
+
     testLDF(Short.MaxValue.toLong)
     testLDF(Short.MinValue.toLong)
   }
diff --git a/test/files/run/hashhash.scala b/test/files/run/hashhash.scala
index f9fc067..3b9d147 100644
--- a/test/files/run/hashhash.scala
+++ b/test/files/run/hashhash.scala
@@ -1,7 +1,7 @@
 object Test {
   def confirmSame(x: Any)       = assert(x.## == x.hashCode, "%s.## != %s.hashCode".format(x, x))
   def confirmDifferent(x: Any)  = assert(x.## != x.hashCode, "%s.## == %s.hashCode (but should not)".format(x, x))
-    
+
   def main(args: Array[String]): Unit = {
     /** Just a little sanity check, not to be confused with a unit test. */
     List(5, 5.5f, "abc", new AnyRef, ()) foreach confirmSame
diff --git a/test/files/run/hashset.check b/test/files/run/hashset.check
new file mode 100644
index 0000000..9542a1f
--- /dev/null
+++ b/test/files/run/hashset.check
@@ -0,0 +1,26 @@
+*** HashSet primitives
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19
+
+*** HashSet Strings with null
+null true
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null
+null false
+0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+
+*** ParHashSet primitives
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19
+
+*** ParHashSet Strings with null
+null true
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null
+null false
+0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+
diff --git a/test/files/run/hashset.scala b/test/files/run/hashset.scala
new file mode 100644
index 0000000..a4d49c1
--- /dev/null
+++ b/test/files/run/hashset.scala
@@ -0,0 +1,48 @@
+import scala.collection.generic.{Growable, Shrinkable}
+import scala.collection.GenSet
+import scala.collection.mutable.FlatHashTable
+import scala.collection.mutable.HashSet
+import scala.collection.parallel.mutable.ParHashSet
+
+object Test extends App {
+  test(new Creator{
+    def create[A] = new HashSet[A]
+    def hashSetType = "HashSet"
+  })
+
+  test(new Creator{
+    def create[A] = new ParHashSet[A]
+    def hashSetType = "ParHashSet"
+  })
+
+
+  def test(creator : Creator) {
+    println("*** " + creator.hashSetType + " primitives")
+    val h1 = creator.create[Int]
+    for (i <- 0 until 20) h1 += i
+    println((for (i <- 0 until 20) yield i + " " + (h1 contains i)).toList.sorted mkString(","))
+    println((for (i <- 20 until 40) yield i + " " + (h1 contains i)).toList.sorted mkString(","))
+    println(h1.toList.sorted mkString ",")
+    println
+
+    println("*** " + creator.hashSetType + " Strings with null")
+    val h2 = creator.create[String]
+    h2 += null
+    for (i <- 0 until 20) h2 +=  "" + i
+    println("null " + (h2 contains null))
+    println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+    println((for (i <- 20 until 40) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+    println((h2.toList map {x => "" + x}).sorted mkString ",")
+
+    h2 -= null
+    h2 -= "" + 0
+    println("null " + (h2 contains null))
+    println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+    println
+  }
+
+   trait Creator {
+     def create[A] : GenSet[A] with Cloneable with FlatHashTable[A] with Growable[A] with Shrinkable[A]
+     def hashSetType : String
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/hashsetremove.check b/test/files/run/hashsetremove.check
new file mode 100644
index 0000000..8de9826
--- /dev/null
+++ b/test/files/run/hashsetremove.check
@@ -0,0 +1,6 @@
+remove 0 should be false, was false
+contains 1 should be true, was true
+remove 1 should be true, was true
+contains 1 should be false, was false
+remove 1 should be false, was false
+contains 1 should be false, was false
diff --git a/test/files/run/hashsetremove.scala b/test/files/run/hashsetremove.scala
new file mode 100644
index 0000000..7b82a99
--- /dev/null
+++ b/test/files/run/hashsetremove.scala
@@ -0,0 +1,13 @@
+import scala.collection.mutable.HashSet
+
+
+object Test extends App {
+  val h = new HashSet[Int]
+  h += 1
+  println(s"remove 0 should be false, was ${h remove 0}")
+  println(s"contains 1 should be true, was ${h contains 1}")
+  println(s"remove 1 should be true, was ${h remove 1}")
+  println(s"contains 1 should be false, was ${h contains 1}")
+  println(s"remove 1 should be false, was ${h remove 1}")
+  println(s"contains 1 should be false, was ${h contains 1}")
+ }
\ No newline at end of file
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
index e045388..5a8d0ad 100644
--- a/test/files/run/idempotency-case-classes.check
+++ b/test/files/run/idempotency-case-classes.check
@@ -47,7 +47,7 @@ C(2,3)
     case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
       scala.this.None
     else
-      Some.apply[(Int, Int)](Tuple2.apply[Int, Int](x$0.x, x$0.y));
+      Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y));
     <synthetic> private def readResolve(): Object = C
   };
   Predef.println(C.apply(2, 3))
diff --git a/test/files/run/idempotency-case-classes.scala b/test/files/run/idempotency-case-classes.scala
index 4da8393..4ad1321 100644
--- a/test/files/run/idempotency-case-classes.scala
+++ b/test/files/run/idempotency-case-classes.scala
@@ -10,9 +10,9 @@ object Test extends App {
   }
   println(casee.eval)
   val tb = cm.mkToolBox()
-  val tcasee = tb.typeCheck(casee.tree)
+  val tcasee = tb.typecheck(casee.tree)
   println(tcasee)
-  val rtcasee = tb.resetAllAttrs(tcasee)
+  val rtcasee = tb.untypecheck(tcasee)
   try {
     println(tb.eval(rtcasee))
   } catch {
diff --git a/test/files/run/idempotency-extractors.scala b/test/files/run/idempotency-extractors.scala
index fe03329..8c0a0b1 100644
--- a/test/files/run/idempotency-extractors.scala
+++ b/test/files/run/idempotency-extractors.scala
@@ -10,9 +10,9 @@ object Test extends App {
   }
   println(extractor.eval)
   val tb = cm.mkToolBox()
-  val textractor = tb.typeCheck(extractor.tree)
+  val textractor = tb.typecheck(extractor.tree)
   println(textractor)
-  val rtextractor = tb.resetAllAttrs(textractor)
+  val rtextractor = tb.untypecheck(textractor)
   try {
     println(tb.eval(rtextractor))
   } catch {
diff --git a/test/files/run/idempotency-labels.scala b/test/files/run/idempotency-labels.scala
index 82d0097..084c93d 100644
--- a/test/files/run/idempotency-labels.scala
+++ b/test/files/run/idempotency-labels.scala
@@ -11,9 +11,9 @@ object Test extends App {
   }
   println(label.eval)
   val tb = cm.mkToolBox()
-  val tlabel = tb.typeCheck(label.tree)
+  val tlabel = tb.typecheck(label.tree)
   println(tlabel)
-  val rtlabel = tb.resetAllAttrs(tlabel)
+  val rtlabel = tb.untypecheck(tlabel)
   try {
     println(tb.eval(rtlabel))
   } catch {
diff --git a/test/files/run/idempotency-lazy-vals.scala b/test/files/run/idempotency-lazy-vals.scala
index 3531f9f..9d677ca 100644
--- a/test/files/run/idempotency-lazy-vals.scala
+++ b/test/files/run/idempotency-lazy-vals.scala
@@ -15,9 +15,9 @@ object Test extends App {
   }
   println(lazee.eval)
   val tb = cm.mkToolBox()
-  val tlazee = tb.typeCheck(lazee.tree)
+  val tlazee = tb.typecheck(lazee.tree)
   println(tlazee)
-  val rtlazee = tb.resetAllAttrs(tlazee)
+  val rtlazee = tb.untypecheck(tlazee)
   try {
     println(tb.eval(rtlazee))
   } catch {
diff --git a/test/files/run/idempotency-partial-functions.check b/test/files/run/idempotency-partial-functions.check
deleted file mode 100644
index 5c8a411..0000000
--- a/test/files/run/idempotency-partial-functions.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error!!
-error!
diff --git a/test/files/run/idempotency-this.check b/test/files/run/idempotency-this.check
index 8faf703..88b8288 100644
--- a/test/files/run/idempotency-this.check
+++ b/test/files/run/idempotency-this.check
@@ -1,4 +1,4 @@
 List()
 List.apply[String]("")
-Apply(TypeApply(Select(Ident(scala.collection.immutable.List), newTermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), newTypeName("String"))))), List(Literal(Constant(""))))
+Apply(TypeApply(Select(Ident(scala.collection.immutable.List), TermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), TypeName("String"))))), List(Literal(Constant(""))))
 List()
diff --git a/test/files/run/idempotency-this.scala b/test/files/run/idempotency-this.scala
index 5cd4226..26917ab 100644
--- a/test/files/run/idempotency-this.scala
+++ b/test/files/run/idempotency-this.scala
@@ -9,10 +9,10 @@ object Test extends App {
   }
   println(thiss.eval)
   val tb = cm.mkToolBox()
-  val tthiss = tb.typeCheck(thiss.tree)
+  val tthiss = tb.typecheck(thiss.tree)
   println(tthiss)
   println(showRaw(tthiss))
-  val rtthiss = tb.resetAllAttrs(tthiss)
+  val rtthiss = tb.untypecheck(tthiss)
   try {
     println(tb.eval(rtthiss))
   } catch {
diff --git a/test/files/run/impconvtimes.scala b/test/files/run/impconvtimes.scala
index 8c5ab61..477a16a 100644
--- a/test/files/run/impconvtimes.scala
+++ b/test/files/run/impconvtimes.scala
@@ -1,3 +1,5 @@
+import scala.language.implicitConversions
+
 object Test {
   abstract class Unit
   object NoUnit extends Unit
diff --git a/test/files/run/implicits.scala b/test/files/run/implicits.scala
index a30f60f..5681a9d 100644
--- a/test/files/run/implicits.scala
+++ b/test/files/run/implicits.scala
@@ -1,3 +1,5 @@
+import scala.language.implicitConversions
+
 object A {
   object B {
     implicit def int2string(x: Int) = "["+x.toString+"]"
diff --git a/test/files/run/indexedSeq.scala b/test/files/run/indexedSeq.scala
index 9744f47..b1a2b1b 100644
--- a/test/files/run/indexedSeq.scala
+++ b/test/files/run/indexedSeq.scala
@@ -1,10 +1,11 @@
 object Test {
-  import scala.collection.{ mutable, immutable, generic }
-  
+  import scala.collection.immutable
+
   def checkIdentity[A](xs: immutable.IndexedSeq[A]) = assert(xs.toIndexedSeq eq xs)
-  
+
   def main(args: Array[String]): Unit = {
-    checkIdentity(immutable.Vector(1 to 10: _*))
-    checkIdentity(1 to 10 toIndexedSeq)
+    def r = 1 to 10
+    checkIdentity(immutable.Vector(r: _*))
+    checkIdentity(r.toIndexedSeq)
   }
 }
diff --git a/test/files/run/inferred-type-constructors.check b/test/files/run/inferred-type-constructors.check
new file mode 100644
index 0000000..5992ef0
--- /dev/null
+++ b/test/files/run/inferred-type-constructors.check
@@ -0,0 +1,56 @@
+warning: there were 2 feature warning(s); re-run with -feature for details
+                p.Iterable[Int]
+                p.Set[Int]
+                p.Seq[Int]
+                p.m.Set[Int]
+                p.m.Seq[Int]
+     private[m] p.m.ASet[Int]
+                p.i.Seq[Int]
+     private[i] p.i.ASet[Int]
+     private[i] p.i.ASeq[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Set[Int]
+                p.Iterable[Int]
+                p.Set[Int]
+                p.Iterable[Int]
+                p.Set[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Seq[Int]
+                p.Iterable[Int]
+                p.Seq[Int]
+                p.Iterable[Int]
+                p.Seq[Int]
+                p.Iterable[Int]
+                p.m.Set[Int]
+                p.Iterable[Int]
+                p.Set[Int]
+                p.Iterable[Int]
+                p.Iterable[Int]
+                p.Seq[Int]
+                p.Iterable[Int]
+                p.Seq[Int]
+                p.Iterable[Int]
+     private[p] p.ASet[Int]
+     private[p] p.AIterable[Int]
+                p.Iterable[Int]
+                p.i.Seq[Int]
+     private[p] p.AIterable[Int]
+                List[Nothing]
+                scala.collection.immutable.Vector[Nothing]
+                scala.collection.immutable.Iterable[(Int, Int)]
+                scala.collection.immutable.Set[Int]
+                Seq[Int]
+                Array[Int]
+                scala.collection.AbstractSet[Int]
+                Comparable[java.lang.String]
+                scala.collection.immutable.LinearSeq[Int]
+                Iterable[Int]
diff --git a/test/files/run/inferred-type-constructors.scala b/test/files/run/inferred-type-constructors.scala
new file mode 100644
index 0000000..79a8653
--- /dev/null
+++ b/test/files/run/inferred-type-constructors.scala
@@ -0,0 +1,125 @@
+package p {
+  trait TCon[+CC[X]] {
+    def fPublic: CC[Int]                        = ???
+    private[p] def fPackagePrivate: CC[Int]     = ???
+    protected[p] def fPackageProtected: CC[Int] = ???
+  }
+  trait Iterable[+A] extends TCon[Iterable]
+  trait Set[A] extends Iterable[A] with TCon[Set]
+  trait Seq[+A] extends Iterable[A] with TCon[Seq]
+
+  private[p] abstract class AIterable[+A] extends Iterable[A]
+  private[p] abstract class ASeq[+A] extends AIterable[A] with Seq[A]
+  private[p] abstract class ASet[A] extends AIterable[A] with Set[A]
+
+  package m {
+    private[m] abstract class ASeq[A] extends p.ASeq[A] with Seq[A]
+    private[m] abstract class ASet[A] extends p.ASet[A] with Set[A]
+    trait Set[A] extends p.Set[A] with TCon[Set]
+    trait Seq[A] extends p.Seq[A] with TCon[Seq]
+    trait BitSet extends ASet[Int]
+    trait IntSeq extends ASeq[Int]
+  }
+
+  package i {
+    private[i] abstract class ASeq[+A] extends p.ASeq[A] with Seq[A]
+    private[i] abstract class ASet[A] extends p.ASet[A] with Set[A]
+    trait Set[A] extends p.Set[A] with TCon[Set]
+    trait Seq[+A] extends p.Seq[A] with TCon[Seq]
+    trait BitSet extends ASet[Int]
+    trait IntSeq extends ASeq[Int]
+  }
+}
+
+object Test {
+  import scala.reflect.runtime.universe._
+  // Complicated by the absence of usable type constructor type tags.
+  def extract[A, CC[X]](xs: CC[A]): CC[A] = xs
+  def whatis[T: TypeTag](x: T): Unit = {
+    val tpe = typeOf[T]
+    val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replaceAllLiterally("package ", "")
+    println(f"$access%15s $tpe")
+  }
+
+  trait IntIterable extends p.Iterable[Int]
+  trait IntSet extends p.Set[Int]
+  trait IntSeq extends p.Seq[Int]
+
+  trait MutableIntSet extends p.m.Set[Int]
+  trait MutableIntSeq extends p.m.Seq[Int]
+
+  trait ImmutableIntSet extends p.i.Set[Int]
+  trait ImmutableIntSeq extends p.i.Seq[Int]
+
+  def f1: IntIterable = null
+  def f2: IntSet = null
+  def f3: IntSeq = null
+
+  def g1: MutableIntSet = null
+  def g2: MutableIntSeq = null
+  def g3: p.m.BitSet = null
+
+  def h1: ImmutableIntSeq = null
+  def h2: p.i.BitSet = null
+  def h3: p.i.IntSeq = null
+
+  def main(args: Array[String]): Unit = {
+    whatis(extract(f1))
+    whatis(extract(f2))
+    whatis(extract(f3))
+    whatis(extract(g1))
+    whatis(extract(g2))
+    whatis(extract(g3))
+    whatis(extract(h1))
+    whatis(extract(h2))
+    whatis(extract(h3))
+
+    whatis(extract(if (true) f1 else f2))
+    whatis(extract(if (true) f1 else f3))
+    whatis(extract(if (true) f1 else g1))
+    whatis(extract(if (true) f1 else g2))
+    whatis(extract(if (true) f1 else g3))
+    whatis(extract(if (true) f1 else h1))
+    whatis(extract(if (true) f1 else h2))
+    whatis(extract(if (true) f1 else h3))
+    whatis(extract(if (true) f2 else f3))
+    whatis(extract(if (true) f2 else g1))
+    whatis(extract(if (true) f2 else g2))
+    whatis(extract(if (true) f2 else g3))
+    whatis(extract(if (true) f2 else h1))
+    whatis(extract(if (true) f2 else h2))
+    whatis(extract(if (true) f2 else h3))
+    whatis(extract(if (true) f3 else g1))
+    whatis(extract(if (true) f3 else g2))
+    whatis(extract(if (true) f3 else g3))
+    whatis(extract(if (true) f3 else h1))
+    whatis(extract(if (true) f3 else h2))
+    whatis(extract(if (true) f3 else h3))
+    whatis(extract(if (true) g1 else g2))
+    whatis(extract(if (true) g1 else g3))
+    whatis(extract(if (true) g1 else h1))
+    whatis(extract(if (true) g1 else h2))
+    whatis(extract(if (true) g1 else h3))
+    whatis(extract(if (true) g2 else g3))
+    whatis(extract(if (true) g2 else h1))
+    whatis(extract(if (true) g2 else h2))
+    whatis(extract(if (true) g2 else h3))
+    whatis(extract(if (true) g3 else h1))
+    whatis(extract(if (true) g3 else h2))
+    whatis(extract(if (true) g3 else h3))
+    whatis(extract(if (true) h1 else h2))
+    whatis(extract(if (true) h1 else h3))
+    whatis(extract(if (true) h2 else h3))
+
+    whatis(extract(Nil))
+    whatis(extract(Vector()))
+    whatis(extract(Map[Int,Int]()))
+    whatis(extract(Set[Int]()))
+    whatis(extract(Seq[Int]()))
+    whatis(extract(Array[Int]()))
+    whatis(extract(scala.collection.immutable.BitSet(1)))
+    whatis(extract("abc"))
+    whatis(extract(if (true) Stream(1) else List(1)))
+    whatis(extract(if (true) Seq(1) else Set(1)))
+  }
+}
diff --git a/test/files/run/infix.scala b/test/files/run/infix.scala
index 700e434..a867d03 100644
--- a/test/files/run/infix.scala
+++ b/test/files/run/infix.scala
@@ -9,4 +9,3 @@ object Test extends App {
     case null op (0, 0) op (1, 1) op (2, 2) => Console.println("OK")
   }
 }
-  
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 50a9d87..7c885d2 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -21,8 +21,8 @@
      92	RETURN(REF(class Object))
 @@ -246,3 +245,3 @@
    startBlock: 1
--  blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18]
-+  blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18]
+-  blocks: [1,2,3,4,5,6,7,8,11,12,13,14,15,16,17,18]
++  blocks: [1,2,3,4,5,6,8,11,12,13,14,15,16,17,18]
    
 @@ -257,5 +256,2 @@
      92	SCOPE_ENTER value x1
@@ -72,8 +72,8 @@
      106	CALL_METHOD scala.Predef.println (dynamic)
 @@ -518,3 +517,3 @@
    startBlock: 1
--  blocks: [1,2,3,4,6,7,8,9,10]
-+  blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
+-  blocks: [1,2,3,4,6,7,9,10]
++  blocks: [1,3,4,6,7,9,10,11,12,13]
    
 @@ -547,4 +546,9 @@
      306	CALL_METHOD MyException.<init> (static-instance)
@@ -104,8 +104,12 @@
 +    ?	JUMP 13
 +    
    3: 
-@@ -575,2 +586,14 @@
+@@ -573,5 +584,14 @@
+     310	CALL_METHOD scala.Predef.println (dynamic)
+-    310	JUMP 2
++    300	RETURN(UNIT)
      
+-  2: 
 +  13: 
 +    310	LOAD_MODULE object Predef
 +    310	CALL_PRIMITIVE(StartConcat)
@@ -116,44 +120,42 @@
 +    310	CALL_PRIMITIVE(StringConcat(REF(class String)))
 +    310	CALL_PRIMITIVE(EndConcat)
 +    310	CALL_METHOD scala.Predef.println (dynamic)
-+    310	JUMP 2
-+    
-   2: 
-@@ -583,6 +606,6 @@
+     300	RETURN(UNIT)
+@@ -583,6 +603,6 @@
        with finalizer: null
--    catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
-+    catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
+-    catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6
++    catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6
        consisting of blocks: List(6)
        with finalizer: null
--    catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
-+    catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
+-    catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3
++    catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3
        consisting of blocks: List(3)
-@@ -618,3 +641,3 @@
+@@ -618,3 +638,3 @@
    startBlock: 1
--  blocks: [1,2,3,4,5,6,7,9,10]
-+  blocks: [1,2,3,4,5,6,7,9,10,11,12]
+-  blocks: [1,3,4,5,6,8,9]
++  blocks: [1,3,4,5,6,8,9,10,11]
    
-@@ -642,4 +665,10 @@
+@@ -642,4 +662,10 @@
      78	CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
 -    78	THROW(IllegalArgumentException)
 +    ?	STORE_LOCAL(value e)
-+    ?	JUMP 11
++    ?	JUMP 10
      
-+  11: 
++  10: 
 +    81	LOAD_LOCAL(value e)
 +    ?	STORE_LOCAL(variable exc1)
-+    ?	JUMP 12
++    ?	JUMP 11
 +    
-   9: 
-@@ -671,3 +700,4 @@
+   8: 
+@@ -668,3 +694,4 @@
      81	LOAD_LOCAL(value e)
 -    81	THROW(Exception)
 +    ?	STORE_LOCAL(variable exc1)
-+    ?	JUMP 12
++    ?	JUMP 11
      
-@@ -688,2 +718,15 @@
+@@ -685,2 +712,15 @@
      
-+  12: 
++  11: 
 +    83	LOAD_MODULE object Predef
 +    83	CONSTANT("finally")
 +    83	CALL_METHOD scala.Predef.println (dynamic)
@@ -167,33 +169,33 @@
 +    84	THROW(Throwable)
 +    
    }
-@@ -693,3 +736,3 @@
+@@ -690,3 +730,3 @@
        with finalizer: null
--    catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
-+    catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
+-    catch (<none>) in ArrayBuffer(4, 5, 6, 8) starting at: 3
++    catch (<none>) in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3
        consisting of blocks: List(3)
-@@ -717,5 +760,5 @@
+@@ -714,5 +754,5 @@
    def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
 -  locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
 +  locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
    startBlock: 1
--  blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
-+  blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
+-  blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24]
++  blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24,25,26,27]
    
-@@ -743,4 +786,11 @@
+@@ -740,4 +780,11 @@
      172	CALL_METHOD MyException.<init> (static-instance)
 -    172	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
-+    ?	JUMP 26
++    ?	JUMP 25
      
-+  26: 
++  25: 
 +    170	LOAD_LOCAL(value ex6)
 +    170	STORE_LOCAL(value x4)
 +    170	SCOPE_ENTER value x4
-+    170	JUMP 15
++    170	JUMP 14
 +    
-   24: 
-@@ -786,8 +836,5 @@
+   23: 
+@@ -780,8 +827,5 @@
      175	SCOPE_ENTER value x5
 -    175	LOAD_LOCAL(value x5)
 -    175	CALL_METHOD MyException.message (dynamic)
@@ -204,7 +206,7 @@
 +    ?	LOAD_LOCAL(value x5)
 +    176	CALL_METHOD MyException.message (dynamic)
      176	CALL_METHOD scala.Predef.println (dynamic)
-@@ -795,5 +842,7 @@
+@@ -789,5 +833,7 @@
      177	DUP(REF(class MyException))
 -    177	LOAD_LOCAL(value message)
 +    ?	LOAD_LOCAL(value x5)
@@ -212,24 +214,24 @@
      177	CALL_METHOD MyException.<init> (static-instance)
 -    177	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
-+    ?	JUMP 27
++    ?	JUMP 26
      
-@@ -801,3 +850,4 @@
+@@ -795,3 +841,4 @@
      170	LOAD_LOCAL(value ex6)
 -    170	THROW(Throwable)
 +    ?	STORE_LOCAL(value ex6)
-+    ?	JUMP 27
++    ?	JUMP 26
      
-@@ -811,2 +861,8 @@
+@@ -805,2 +852,8 @@
      
-+  27: 
++  26: 
 +    169	LOAD_LOCAL(value ex6)
 +    169	STORE_LOCAL(value x4)
 +    169	SCOPE_ENTER value x4
 +    169	JUMP 5
 +    
    5: 
-@@ -821,8 +877,5 @@
+@@ -815,8 +868,5 @@
      180	SCOPE_ENTER value x5
 -    180	LOAD_LOCAL(value x5)
 -    180	CALL_METHOD MyException.message (dynamic)
@@ -240,7 +242,7 @@
 +    ?	LOAD_LOCAL(value x5)
 +    181	CALL_METHOD MyException.message (dynamic)
      181	CALL_METHOD scala.Predef.println (dynamic)
-@@ -830,5 +883,7 @@
+@@ -824,5 +874,7 @@
      182	DUP(REF(class MyException))
 -    182	LOAD_LOCAL(value message)
 +    ?	LOAD_LOCAL(value x5)
@@ -248,17 +250,17 @@
      182	CALL_METHOD MyException.<init> (static-instance)
 -    182	THROW(MyException)
 +    ?	STORE_LOCAL(variable exc2)
-+    ?	JUMP 28
++    ?	JUMP 27
      
-@@ -836,3 +891,4 @@
+@@ -830,3 +882,4 @@
      169	LOAD_LOCAL(value ex6)
 -    169	THROW(Throwable)
 +    ?	STORE_LOCAL(variable exc2)
-+    ?	JUMP 28
++    ?	JUMP 27
      
-@@ -853,2 +909,15 @@
+@@ -847,2 +900,15 @@
      
-+  28: 
++  27: 
 +    184	LOAD_MODULE object Predef
 +    184	CONSTANT("finally")
 +    184	CALL_METHOD scala.Predef.println (dynamic)
@@ -272,16 +274,16 @@
 +    185	THROW(Throwable)
 +    
    }
-@@ -858,6 +927,6 @@
+@@ -852,6 +918,6 @@
        with finalizer: null
--    catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
-+    catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
-       consisting of blocks: List(9, 6, 5, 4)
+-    catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4
++    catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
+       consisting of blocks: List(9, 8, 6, 5, 4)
        with finalizer: null
--    catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
-+    catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
+-    catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
++    catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
        consisting of blocks: List(3)
-@@ -885,5 +954,5 @@
+@@ -879,5 +945,5 @@
    def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
 -  locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
 +  locals: value args, variable result, value e, value ex6, value x4, value x5, value x
@@ -289,7 +291,7 @@
 -  blocks: [1,2,3,6,7,8,11,13,14,16]
 +  blocks: [1,2,3,6,7,8,11,13,14,16,17]
    
-@@ -911,4 +980,11 @@
+@@ -905,4 +971,11 @@
      124	CALL_METHOD MyException.<init> (static-instance)
 -    124	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
@@ -302,7 +304,7 @@
 +    122	JUMP 7
 +    
    16: 
-@@ -936,8 +1012,5 @@
+@@ -930,8 +1003,5 @@
      127	SCOPE_ENTER value x5
 -    127	LOAD_LOCAL(value x5)
 -    127	CALL_METHOD MyException.message (dynamic)
@@ -313,12 +315,12 @@
 +    ?	LOAD_LOCAL(value x5)
 +    127	CALL_METHOD MyException.message (dynamic)
      127	CALL_METHOD scala.Predef.println (dynamic)
-@@ -970,3 +1043,3 @@
+@@ -964,3 +1034,3 @@
        with finalizer: null
 -    catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
 +    catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
        consisting of blocks: List(3)
-@@ -994,5 +1067,5 @@
+@@ -988,5 +1058,5 @@
    def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
 -  locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
 +  locals: value args, variable result, value ex6, value x4, value x5, value x, value e
@@ -326,7 +328,7 @@
 -  blocks: [1,2,3,4,5,8,12,13,14,16]
 +  blocks: [1,2,3,5,8,12,13,14,16,17]
    
-@@ -1020,4 +1093,13 @@
+@@ -1014,4 +1084,13 @@
      148	CALL_METHOD MyException.<init> (static-instance)
 -    148	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
@@ -341,13 +343,13 @@
 +    154	CZJUMP (BOOL)NE ? 5 : 8
 +    
    16: 
-@@ -1041,5 +1123,2 @@
+@@ -1035,5 +1114,2 @@
      145	SCOPE_ENTER value x4
 -    145	JUMP 4
 -    
 -  4: 
      154	LOAD_LOCAL(value x4)
-@@ -1053,8 +1132,5 @@
+@@ -1047,8 +1123,5 @@
      154	SCOPE_ENTER value x5
 -    154	LOAD_LOCAL(value x5)
 -    154	CALL_METHOD MyException.message (dynamic)
@@ -358,12 +360,12 @@
 +    ?	LOAD_LOCAL(value x5)
 +    154	CALL_METHOD MyException.message (dynamic)
      154	CALL_METHOD scala.Predef.println (dynamic)
-@@ -1275,3 +1351,3 @@
+@@ -1269,3 +1342,3 @@
    startBlock: 1
 -  blocks: [1,2,3,4,5,7]
 +  blocks: [1,2,3,4,5,7,8]
    
-@@ -1299,4 +1375,11 @@
+@@ -1293,4 +1366,11 @@
      38	CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
 -    38	THROW(IllegalArgumentException)
 +    ?	STORE_LOCAL(value e)
@@ -376,7 +378,7 @@
 +    42	JUMP 2
 +    
    7: 
-@@ -1346,5 +1429,5 @@
+@@ -1340,5 +1420,5 @@
    def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
 -  locals: value args, variable result, value ex6, value x4, value x5, value message, value x
 +  locals: value args, variable result, value ex6, value x4, value x5, value x
@@ -384,13 +386,13 @@
 -  blocks: [1,2,3,4,5,8,10,11,13,14,16]
 +  blocks: [1,2,3,5,8,10,11,13,14,16,17]
    
-@@ -1372,3 +1455,4 @@
+@@ -1366,3 +1446,4 @@
      203	CALL_METHOD MyException.<init> (static-instance)
 -    203	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
 +    ?	JUMP 17
      
-@@ -1392,4 +1476,13 @@
+@@ -1386,4 +1467,13 @@
      209	CALL_METHOD MyException.<init> (static-instance)
 -    209	THROW(MyException)
 +    ?	STORE_LOCAL(value ex6)
@@ -405,13 +407,13 @@
 +    212	CZJUMP (BOOL)NE ? 5 : 8
 +    
    16: 
-@@ -1405,5 +1498,2 @@
+@@ -1399,5 +1489,2 @@
      200	SCOPE_ENTER value x4
 -    200	JUMP 4
 -    
 -  4: 
      212	LOAD_LOCAL(value x4)
-@@ -1417,8 +1507,5 @@
+@@ -1411,8 +1498,5 @@
      212	SCOPE_ENTER value x5
 -    212	LOAD_LOCAL(value x5)
 -    212	CALL_METHOD MyException.message (dynamic)
@@ -422,12 +424,12 @@
 +    ?	LOAD_LOCAL(value x5)
 +    213	CALL_METHOD MyException.message (dynamic)
      213	CALL_METHOD scala.Predef.println (dynamic)
-@@ -1466,3 +1553,3 @@
+@@ -1460,3 +1544,3 @@
    startBlock: 1
 -  blocks: [1,2,3,4,5,7]
 +  blocks: [1,2,3,4,5,7,8]
    
-@@ -1490,4 +1577,11 @@
+@@ -1484,4 +1568,11 @@
      58	CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
 -    58	THROW(IllegalArgumentException)
 +    ?	STORE_LOCAL(value e)
@@ -440,12 +442,12 @@
 +    62	JUMP 2
 +    
    7: 
-@@ -1539,3 +1633,3 @@
+@@ -1533,3 +1624,3 @@
    startBlock: 1
--  blocks: [1,2,3,4]
-+  blocks: [1,2,3,4,5]
+-  blocks: [1,3,4]
++  blocks: [1,3,4,5]
    
-@@ -1559,4 +1653,9 @@
+@@ -1553,4 +1644,9 @@
      229	CALL_METHOD MyException.<init> (static-instance)
 -    229	THROW(MyException)
 +    ?	JUMP 5
@@ -456,20 +458,20 @@
 +    228	THROW(Throwable)
 +    
    3: 
-@@ -1565,3 +1664,3 @@
+@@ -1559,3 +1655,3 @@
      228	MONITOR_EXIT
 -    ?	THROW(Throwable)
 +    228	THROW(Throwable)
      
-@@ -1593,5 +1692,5 @@
+@@ -1587,5 +1683,5 @@
    def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
 -  locals: value args, variable result, variable monitor2, variable monitorResult1
 +  locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
    startBlock: 1
--  blocks: [1,2,3,4]
-+  blocks: [1,2,3,4,5]
+-  blocks: [1,3,4]
++  blocks: [1,3,4,5]
    
-@@ -1618,4 +1717,12 @@
+@@ -1612,4 +1708,12 @@
      245	CALL_METHOD MyException.<init> (static-instance)
 -    245	THROW(MyException)
 +    ?	STORE_LOCAL(value exception$1)
@@ -483,7 +485,7 @@
 +    244	THROW(Throwable)
 +    
    3: 
-@@ -1624,3 +1731,3 @@
+@@ -1618,3 +1722,3 @@
      244	MONITOR_EXIT
 -    ?	THROW(Throwable)
 +    244	THROW(Throwable)
diff --git a/test/files/run/inline-ex-handlers.scala b/test/files/run/inline-ex-handlers.scala
index a96b938..964594d 100644
--- a/test/files/run/inline-ex-handlers.scala
+++ b/test/files/run/inline-ex-handlers.scala
@@ -1,7 +1,7 @@
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
-object Test extends IcodeTest {
-  override def printIcodeAfterPhase = "inlineExceptionHandlers"
+object Test extends IcodeComparison {
+  override def printIcodeAfterPhase = "inlinehandlers"
 }
 
 import scala.util.Random._
diff --git a/test/files/run/inliner-infer.scala b/test/files/run/inliner-infer.scala
index ea83966..e41d6ae 100644
--- a/test/files/run/inliner-infer.scala
+++ b/test/files/run/inliner-infer.scala
@@ -7,8 +7,8 @@ object Test extends App {
 
   @annotation.tailrec
   def walk(xs: MyList): Unit = {
-    if (xs.isEmpty) 
-      println("empty") 
+    if (xs.isEmpty)
+      println("empty")
     else {
       println("non-empty")
       walk(MyNil)
@@ -26,4 +26,3 @@ object MyNil extends MyList {
   override def isEmpty = true
 }
 
-  
diff --git a/test/files/run/inner-obj-auto.scala b/test/files/run/inner-obj-auto.scala
index aa2e293..00ea511 100644
--- a/test/files/run/inner-obj-auto.scala
+++ b/test/files/run/inner-obj-auto.scala
@@ -3,15 +3,15 @@
 /* ================================================================================
          Automatically generated on 2011-05-11. Do Not Edit (unless you have to).
          (2-level nesting)
-   ================================================================================ */ 
+   ================================================================================ */
 
 
 
 class Class2_1 {
-  
+
   class Class1_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -25,22 +25,22 @@ class Class2_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class1_2).run }
 }
 
 
 object Object3_1 {
-  
+
   class Class1_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -54,22 +54,22 @@ object Object3_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class1_2).run } // trigger
 }
 
 
 trait Trait4_1 {
-  
+
   class Class1_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -83,22 +83,22 @@ trait Trait4_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class1_2).run }
 }
 
 
 class Class6_1 {
-  
+
   object Object5_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -112,22 +112,22 @@ class Class6_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object5_2.run }
 }
 
 
 object Object7_1 {
-  
+
   object Object5_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -141,22 +141,22 @@ object Object7_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object5_2.run } // trigger
 }
 
 
 trait Trait8_1 {
-  
+
   object Object5_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -170,22 +170,22 @@ trait Trait8_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object5_2.run }
 }
 
 
 class Class10_1 {
-  
+
   trait Trait9_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -199,22 +199,22 @@ class Class10_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait9_2 {}).run }
 }
 
 
 object Object11_1 {
-  
+
   trait Trait9_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -228,22 +228,22 @@ object Object11_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait9_2 {}).run } // trigger
 }
 
 
 trait Trait12_1 {
-  
+
   trait Trait9_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -257,22 +257,22 @@ trait Trait12_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait9_2 {}).run }
 }
 
 
 class Class14_1 {
-  
+
   def method13_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -286,22 +286,22 @@ class Class14_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method13_2 }
 }
 
 
 object Object15_1 {
-  
+
   def method13_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -315,22 +315,22 @@ object Object15_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method13_2 } // trigger
 }
 
 
 trait Trait16_1 {
-  
+
   def method13_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -344,22 +344,22 @@ trait Trait16_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method13_2 }
 }
 
 
 class Class18_1 {
-  
+
   private def method17_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -373,22 +373,22 @@ class Class18_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method17_2 }
 }
 
 
 object Object19_1 {
-  
+
   private def method17_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -402,22 +402,22 @@ object Object19_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method17_2 } // trigger
 }
 
 
 trait Trait20_1 {
-  
+
   private def method17_2 {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -431,22 +431,22 @@ trait Trait20_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method17_2 }
 }
 
 
 class Class22_1 {
-  
+
   val fun21_2 = () => {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -460,22 +460,22 @@ class Class22_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun21_2() }
 }
 
 
 object Object23_1 {
-  
+
   val fun21_2 = () => {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -489,22 +489,22 @@ object Object23_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun21_2() } // trigger
 }
 
 
 trait Trait24_1 {
-  
+
   val fun21_2 = () => {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -518,23 +518,23 @@ trait Trait24_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun21_2() }
 }
 
 
 class Class26_1 {
-  
+
   class Class25_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -548,24 +548,24 @@ class Class26_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class25_2) }
 }
 
 
 object Object27_1 {
-  
+
   class Class25_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -579,24 +579,24 @@ object Object27_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class25_2) } // trigger
 }
 
 
 trait Trait28_1 {
-  
+
   class Class25_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -610,24 +610,24 @@ trait Trait28_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class25_2) }
 }
 
 
 class Class30_1 {
-  
+
   trait Trait29_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -641,24 +641,24 @@ class Class30_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait29_2 {}) }
 }
 
 
 object Object31_1 {
-  
+
   trait Trait29_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -672,24 +672,24 @@ object Object31_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait29_2 {}) } // trigger
 }
 
 
 trait Trait32_1 {
-  
+
   trait Trait29_2 {
-    { // in primary constructor 
+    { // in primary constructor
       var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
       Obj // one
 
@@ -703,23 +703,23 @@ trait Trait32_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("failed "); e.printStackTrace()
+          case e: Throwable =>  print("failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait29_2 {}) }
 }
 
 
 class Class34_1 {
-  
+
   lazy val lzvalue33_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -733,22 +733,22 @@ class Class34_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { lzvalue33_2 }
 }
 
 
 object Object35_1 {
-  
+
   lazy val lzvalue33_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -762,22 +762,22 @@ object Object35_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { lzvalue33_2 } // trigger
 }
 
 
 trait Trait36_1 {
-  
+
   lazy val lzvalue33_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -791,22 +791,22 @@ trait Trait36_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { lzvalue33_2 }
 }
 
 
 class Class38_1 {
-  
+
   val value37_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -820,22 +820,22 @@ class Class38_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { value37_2 }
 }
 
 
 object Object39_1 {
-  
+
   val value37_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -849,22 +849,22 @@ object Object39_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { value37_2 } // trigger
 }
 
 
 trait Trait40_1 {
-  
+
   val value37_2 = {
     var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -878,22 +878,22 @@ trait Trait40_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { value37_2 }
 }
 
 
 class Class42_1 {
-  
+
   class Class41_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -907,22 +907,22 @@ class Class42_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class41_2).run }
 }
 
 
 object Object43_1 {
-  
+
   class Class41_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -936,22 +936,22 @@ object Object43_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class41_2).run } // trigger
 }
 
 
 trait Trait44_1 {
-  
+
   class Class41_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -965,22 +965,22 @@ trait Trait44_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class41_2).run }
 }
 
 
 class Class46_1 {
-  
+
   object Object45_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -994,22 +994,22 @@ class Class46_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object45_2.run }
 }
 
 
 object Object47_1 {
-  
+
   object Object45_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -1023,22 +1023,22 @@ object Object47_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object45_2.run } // trigger
 }
 
 
 trait Trait48_1 {
-  
+
   object Object45_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -1052,22 +1052,22 @@ trait Trait48_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object45_2.run }
 }
 
 
 class Class50_1 {
-  
+
   trait Trait49_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -1081,22 +1081,22 @@ class Class50_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait49_2 {}).run }
 }
 
 
 object Object51_1 {
-  
+
   trait Trait49_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -1110,22 +1110,22 @@ object Object51_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait49_2 {}).run } // trigger
 }
 
 
 trait Trait52_1 {
-  
+
   trait Trait49_2 {
     var ObjCounter = 0
-    
+
     private object Obj  { ObjCounter += 1}
     Obj // one
 
@@ -1139,22 +1139,22 @@ trait Trait52_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("failed "); e.printStackTrace()
+        case e: Throwable =>  print("failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait49_2 {}).run }
 }
 
 
 class Class54_1 {
-  
+
   class Class53_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1172,22 +1172,22 @@ class Class54_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class53_2).run }
 }
 
 
 object Object55_1 {
-  
+
   class Class53_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1205,22 +1205,22 @@ object Object55_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class53_2).run } // trigger
 }
 
 
 trait Trait56_1 {
-  
+
   class Class53_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1238,22 +1238,22 @@ trait Trait56_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Class53_2).run }
 }
 
 
 class Class58_1 {
-  
+
   object Object57_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1271,22 +1271,22 @@ class Class58_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object57_2.run }
 }
 
 
 object Object59_1 {
-  
+
   object Object57_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1304,22 +1304,22 @@ object Object59_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object57_2.run } // trigger
 }
 
 
 trait Trait60_1 {
-  
+
   object Object57_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1337,22 +1337,22 @@ trait Trait60_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest } // trigger
   }
-  
+
   def run { Object57_2.run }
 }
 
 
 class Class62_1 {
-  
+
   trait Trait61_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1370,22 +1370,22 @@ class Class62_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait61_2 {}).run }
 }
 
 
 object Object63_1 {
-  
+
   trait Trait61_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1403,22 +1403,22 @@ object Object63_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait61_2 {}).run } // trigger
 }
 
 
 trait Trait64_1 {
-  
+
   trait Trait61_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1436,22 +1436,22 @@ trait Trait64_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     def run { runTest }
   }
-  
+
   def run { (new Trait61_2 {}).run }
 }
 
 
 class Class66_1 {
-  
+
   def method65_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1469,22 +1469,22 @@ class Class66_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method65_2 }
 }
 
 
 object Object67_1 {
-  
+
   def method65_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1502,22 +1502,22 @@ object Object67_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method65_2 } // trigger
 }
 
 
 trait Trait68_1 {
-  
+
   def method65_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1535,22 +1535,22 @@ trait Trait68_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method65_2 }
 }
 
 
 class Class70_1 {
-  
+
   private def method69_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1568,22 +1568,22 @@ class Class70_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method69_2 }
 }
 
 
 object Object71_1 {
-  
+
   private def method69_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1601,22 +1601,22 @@ object Object71_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method69_2 } // trigger
 }
 
 
 trait Trait72_1 {
-  
+
   private def method69_2 {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1634,22 +1634,22 @@ trait Trait72_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { method69_2 }
 }
 
 
 class Class74_1 {
-  
+
   val fun73_2 = () => {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1667,22 +1667,22 @@ class Class74_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun73_2() }
 }
 
 
 object Object75_1 {
-  
+
   val fun73_2 = () => {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1700,22 +1700,22 @@ object Object75_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun73_2() } // trigger
 }
 
 
 trait Trait76_1 {
-  
+
   val fun73_2 = () => {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1733,23 +1733,23 @@ trait Trait76_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { fun73_2() }
 }
 
 
 class Class78_1 {
-  
+
   class Class77_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1767,24 +1767,24 @@ class Class78_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class77_2) }
 }
 
 
 object Object79_1 {
-  
+
   class Class77_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1802,24 +1802,24 @@ object Object79_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class77_2) } // trigger
 }
 
 
 trait Trait80_1 {
-  
+
   class Class77_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1837,24 +1837,24 @@ trait Trait80_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Class77_2) }
 }
 
 
 class Class82_1 {
-  
+
   trait Trait81_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1872,24 +1872,24 @@ class Class82_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait81_2 {}) }
 }
 
 
 object Object83_1 {
-  
+
   trait Trait81_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1907,24 +1907,24 @@ object Object83_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait81_2 {}) } // trigger
 }
 
 
 trait Trait84_1 {
-  
+
   trait Trait81_2 {
-    { // in primary constructor 
+    { // in primary constructor
       @volatile var ObjCounter = 0
-      
+
       object Obj  { ObjCounter += 1}
 
       def multiThreadedAccess() {
@@ -1942,23 +1942,23 @@ trait Trait84_1 {
           assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
           println("ok")
         } catch {
-          case e =>  print("multi-threaded failed "); e.printStackTrace()
+          case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
         }
       }
 
       runTest // trigger
-    } 
+    }
   }
-  
+
   def run { (new Trait81_2 {}) }
 }
 
 
 class Class90_1 {
-  
+
   val value89_2 = {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -1976,22 +1976,22 @@ class Class90_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { value89_2 }
 }
 
 
 trait Trait92_1 {
-  
+
   val value89_2 = {
     @volatile var ObjCounter = 0
-    
+
     object Obj  { ObjCounter += 1}
 
     def multiThreadedAccess() {
@@ -2009,13 +2009,13 @@ trait Trait92_1 {
         assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
         println("ok")
       } catch {
-        case e =>  print("multi-threaded failed "); e.printStackTrace()
+        case e: Throwable =>  print("multi-threaded failed "); e.printStackTrace()
       }
     }
 
     runTest // trigger
   }
-  
+
   def run { value89_2 }
 }
 
diff --git a/test/files/run/interop_classtags_are_classmanifests.scala b/test/files/run/interop_classtags_are_classmanifests.scala
index 91b9d89..62d85c3 100644
--- a/test/files/run/interop_classtags_are_classmanifests.scala
+++ b/test/files/run/interop_classtags_are_classmanifests.scala
@@ -1,5 +1,6 @@
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   def classTagIsClassManifest[T: ClassTag] = {
     println(classManifest[T])
@@ -8,4 +9,4 @@ object Test extends App {
   classTagIsClassManifest[Int]
   classTagIsClassManifest[String]
   classTagIsClassManifest[Array[Int]]
-}
\ No newline at end of file
+}
diff --git a/test/files/run/interop_manifests_are_classtags.scala b/test/files/run/interop_manifests_are_classtags.scala
index 03479e5..705038e 100644
--- a/test/files/run/interop_manifests_are_classtags.scala
+++ b/test/files/run/interop_manifests_are_classtags.scala
@@ -1,5 +1,6 @@
 import scala.reflect.{ClassTag, classTag}
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   def classManifestIsClassTag[T: ClassManifest] = {
     println(classTag[T])
@@ -20,4 +21,4 @@ object Test extends App {
   manifestIsClassTag[Int]
   manifestIsClassTag[String]
   manifestIsClassTag[Array[Int]]
-}
\ No newline at end of file
+}
diff --git a/test/files/run/interop_typetags_are_manifests.scala b/test/files/run/interop_typetags_are_manifests.scala
index 1aca7f5..6dc5437 100644
--- a/test/files/run/interop_typetags_are_manifests.scala
+++ b/test/files/run/interop_typetags_are_manifests.scala
@@ -1,5 +1,6 @@
 import scala.reflect.runtime.universe._
 import scala.reflect.ClassTag
+import internal._
 
 object Test extends App {
   def typeTagIsManifest[T: TypeTag : ClassTag] = {
diff --git a/test/files/run/interpolationArgs.check b/test/files/run/interpolationArgs.check
index 155991e..983214c 100644
--- a/test/files/run/interpolationArgs.check
+++ b/test/files/run/interpolationArgs.check
@@ -1,2 +1,2 @@
-java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
-java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
+java.lang.IllegalArgumentException: wrong number of arguments (1) for interpolated string with 3 parts
+java.lang.IllegalArgumentException: wrong number of arguments (1) for interpolated string with 1 parts
diff --git a/test/files/run/interpolationArgs.flags b/test/files/run/interpolationArgs.flags
deleted file mode 100644
index e1b3744..0000000
--- a/test/files/run/interpolationArgs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
\ No newline at end of file
diff --git a/test/files/run/interpolationArgs.scala b/test/files/run/interpolationArgs.scala
index eb13767..ffb254b 100644
--- a/test/files/run/interpolationArgs.scala
+++ b/test/files/run/interpolationArgs.scala
@@ -1,5 +1,5 @@
 object Test extends App {
-  try { scala.StringContext("p1", "p2", "p3").s("e1") } catch { case ex => println(ex) }
-  try { scala.StringContext("p1").s("e1") } catch { case ex => println(ex) }
+  try { scala.StringContext("p1", "p2", "p3").s("e1") } catch { case ex: Throwable => println(ex) }
+  try { scala.StringContext("p1").s("e1") } catch { case ex: Throwable => println(ex) }
 }
 
diff --git a/test/files/run/interpolationMultiline2.flags b/test/files/run/interpolationMultiline2.flags
deleted file mode 100644
index e1b3744..0000000
--- a/test/files/run/interpolationMultiline2.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
\ No newline at end of file
diff --git a/test/files/run/interpolationMultiline2.scala b/test/files/run/interpolationMultiline2.scala
index f6a682c..2de4c4b 100644
--- a/test/files/run/interpolationMultiline2.scala
+++ b/test/files/run/interpolationMultiline2.scala
@@ -2,14 +2,15 @@ object Test extends App {
 
   def test1(n: Int) = {
     val old = "old"
-    try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
-    try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+    val catcher: PartialFunction[Throwable, Unit] = { case e => println(e) }
+    try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch catcher
+    try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch catcher
+    try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch catcher
+    try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch catcher
+    try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch catcher
+    try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch catcher
+    try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch catcher
+    try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch catcher
   }
 
   test1(1)
diff --git a/test/files/run/intmap.check b/test/files/run/intmap.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/io-position.check b/test/files/run/io-position.check
deleted file mode 100644
index 09f743d..0000000
Binary files a/test/files/run/io-position.check and /dev/null differ
diff --git a/test/files/run/io-position.scala b/test/files/run/io-position.scala
deleted file mode 100644
index 1093704..0000000
--- a/test/files/run/io-position.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test {
-  Console.setErr(Console.out)
-  
-  def main(args: Array[String]): Unit = { 
-    try {
-      xml.parsing.ConstructingParser.fromSource(io.Source.fromString("<foo>"), false).document()
-    } catch {
-      case e:Exception => println(e.getMessage)
-    }
-  } 
-
-} 
-
diff --git a/test/files/run/iq.check b/test/files/run/iq.check
index 81114ea..311bf83 100644
--- a/test/files/run/iq.check
+++ b/test/files/run/iq.check
@@ -1,4 +1,8 @@
 Empty
+q2: Queue(42, 0)
+qa: Queue(42, 0)
+qb: Queue(42, 0)
+qc: Queue(42, 0)
 Head: 42
 q5: Queue(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
 q5[5]: 5
diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala
index e5f9e47..1eb1d40 100644
--- a/test/files/run/iq.scala
+++ b/test/files/run/iq.scala
@@ -9,25 +9,36 @@ object iq {
     /* Create an empty queue. */
     val q: Queue[Int] = Queue.empty
 
-    /* Test isEmpty. 
-     * Expected: Empty 
+    /* Test isEmpty.
+     * Expected: Empty
      */
     if (q.isEmpty) {
       Console.println("Empty")
     }
 
-    /* Test infix enqueing. */
-    //val q2 = q + 42 + 0  // deprecated
+    /* Test enqueing. */
     val q2 = q.enqueue(42).enqueue(0)
+    val qa = q :+ 42 :+ 0
+    assert(q2 == qa)
+    
+    val qb = 42 +: 0 +: q
+    assert(q2 == qb)
+    val qc = 42 +: q :+ 0
+    assert(q2 == qc)
 
-    /* Test is empty and dequeue. 
+    Console.println("q2: " + q2)
+    Console.println("qa: " + qa)
+    Console.println("qb: " + qb)
+    Console.println("qc: " + qc)
+    
+    /* Test is empty and dequeue.
      * Expected: Head: 42
      */
     val q4 =
       if (q2.isEmpty) {
         Console.println("Empty")
         q2
-      } 
+      }
       else {
 	val (head, q3) = q2.dequeue
         Console.println("Head: " + head)
@@ -36,8 +47,8 @@ object iq {
 
     /* Test sequence enqueing. */
     val q5: Queue[Any] = q4.enqueue(List(1,2,3,4,5,6,7,8,9))
-    /* Test toString. 
-     * Expected: Head: q5: Queue(0,1,2,3,4,5,6,7,8,9) 
+    /* Test toString.
+     * Expected: q5: Queue(0,1,2,3,4,5,6,7,8,9)
      */
     Console.println("q5: " + q5)
     /* Test apply
@@ -59,7 +70,7 @@ object iq {
     //val q8 = q7 + 10 + 11  //deprecated
     val q8 = q7.enqueue(10).enqueue(11)
     /* Test dequeu
-     * Expected: q8: Queue(2,3,4,5,6,7,8,9,10,11)    
+     * Expected: q8: Queue(2,3,4,5,6,7,8,9,10,11)
      */
     Console.println("q8: " + q8)
     val q9 = Queue(2,3,4,5,6,7,8,9,10,11)
@@ -70,14 +81,14 @@ object iq {
     Console.println("q8 == q9: " + (q8 == q9))
 
     /* Testing elements
-     *  Expected: Elements:  1  2  3  4  5  6  7  8  9 
+     *  Expected: Elements:  1  2  3  4  5  6  7  8  9
      */
-    Console.print("Elements: "); 
+    Console.print("Elements: ");
     q6.iterator.foreach(e => Console.print(" "+ e + " "))
-    Console.println; 
+    Console.println;
 
    /* Testing mkString
-     *  Expected: String: <1-2-3-4-5-6-7-8-9> 
+     *  Expected: String: <1-2-3-4-5-6-7-8-9>
      */
     Console.println("String: " + q6.mkString("<","-",">"))
 
@@ -89,7 +100,7 @@ object iq {
     /* Testing front
      *  Expected: Front: 1
      */
-    Console.println("Front: " + q6.front); 
+    Console.println("Front: " + q6.front);
   }
 }
 
diff --git a/test/files/run/is-valid-num.scala b/test/files/run/is-valid-num.scala
index 402eff9..65e8cee 100644
--- a/test/files/run/is-valid-num.scala
+++ b/test/files/run/is-valid-num.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 object Test {
   def x = BigInt("10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
   def y = BigDecimal("" + (Short.MaxValue + 1) + ".0")
@@ -16,25 +19,27 @@ object Test {
     assert(!x.isValidChar, x)
     assert(!x.isValidShort, x)
     assert(!x.isValidByte, x)
-//    assert(y.isWhole, y)
+    assert(y.isWhole, y)
     assert(!y.isValidShort, y)
     assert(y.isValidChar, y)
     assert(y.isValidInt, y)
-    assert(y.isValidFloat, y)
-    assert(y.isValidDouble, y)
+    assert(y.isDecimalFloat, y)
+    assert(y.isDecimalDouble, y)
     assert(y.isValidLong, y)
     assert(!y.isValidByte, y)
-//    assert(!y1.isWhole)
+    assert(!y1.isWhole)
     assert(!y1.isValidLong, y1)
-    assert(!y1.isValidFloat, y1)
-    assert(!y1.isValidDouble, y1)
+    assert(y1.isDecimalFloat, y1)
+    assert(y1.isDecimalDouble, y1)
+    assert(!y1.isExactFloat, y1)
+    assert(!y1.isExactDouble, y1)
     assert(!y1.isValidInt, y1)
     assert(!y1.isValidChar, y1)
     assert(!y1.isValidShort, y1)
     assert(!y1.isValidByte, y1)
     assert(!y2.isValidLong, y2)
-    assert(y2.isValidFloat, y2)
-    assert(y2.isValidDouble, y2)
+    assert(y2.isExactFloat, y2)
+    assert(y2.isExactDouble, y2)
 
     assert(!l1.isValidInt && (l1 - 1).isValidInt, l1)
     assert(!l2.isValidInt && (l2 + 1).isValidInt, l2)
@@ -124,7 +129,7 @@ object Test {
     checkBigInt2(biExp2(128) - biExp2(128 - pf))
     checkBigInt2(biExp2(128) - biExp2(128 - pf - 1))
     checkBigInt2(biExp2(128))
-    
+
     checkBigInt2(biExp2(1023))
     checkBigInt2(biExp2(1024) - biExp2(1024 - pd))
     checkBigInt2(biExp2(1024) - biExp2(1024 - pd - 1))
@@ -167,8 +172,8 @@ object Test {
     if (!d.isInfinity) {
       val bd = BigDecimal(new java.math.BigDecimal(d))
 //      assert(!bd.isWhole, bd)
-      assert(bd.isValidDouble, bd)
-      assert(bd.isValidFloat == isFloat, bd)
+      assert(bd.isExactDouble, bd)
+      assert(bd.isExactFloat == isFloat, bd)
       assert(!bd.isValidLong, bd)
       assert(!bd.isValidInt, bd)
       assert(!bd.isValidChar, bd)
@@ -207,9 +212,9 @@ object Test {
     val isFloat = !bi.toFloat.isInfinity && bd.compare(BigDecimal(new java.math.BigDecimal(bi.toFloat))) == 0
     val isDouble = !bi.toDouble.isInfinity && bd.compare(BigDecimal(new java.math.BigDecimal(bi.toDouble))) == 0
 
-//    assert(bd.isWhole, bd)
-    assert(bd.isValidDouble == isDouble, bd)
-    assert(bd.isValidFloat == isFloat, bd)
+    assert(bd.isWhole, bd)
+    assert(bd.isBinaryDouble == isDouble, bd)
+    assert(bd.isBinaryFloat == isFloat, bd)
     assert(bd.isValidLong == isLong, bd)
     assert(bd.isValidInt == isInt, bd)
     assert(bd.isValidChar == isChar, bd)
diff --git a/test/files/run/issue192.scala b/test/files/run/issue192.scala
index d8db8b5..8e6d13e 100644
--- a/test/files/run/issue192.scala
+++ b/test/files/run/issue192.scala
@@ -1,16 +1,18 @@
+import scala.language.reflectiveCalls
+
 object Test extends App {
-  
+
   def f1(p: Any{def unary_+ : Int}) = +p
   def f2(p: Any{def unary_- : Int}) = -p
   def f3(p: Any{def unary_~ : Int}) = ~p
   def f4(p: Any{def unary_! : Boolean}) = !p
-  
+
   def f5(p: Any{def +(q: Int): Int}) = p + 7
   def f6(p: Any{def -(q: Int): Int}) = p - 7
   def f7(p: Any{def *(q: Int): Int}) = p * 7
   def f8(p: Any{def /(q: Int): Int}) = p / 7
   def f9(p: Any{def %(q: Int): Int}) = p % 7
-  
+
   def f10(p: Any{def |(q: Int): Int}) = p | 7
   def f11(p: Any{def |(q: Boolean): Boolean}) = p | true
   def f12(p: Any{def ^(q: Int): Int}) = p ^ 7
@@ -19,11 +21,11 @@ object Test extends App {
   def f15(p: Any{def &(q: Boolean): Boolean}) = p & true
   def f16(p: Any{def ||(q: Boolean): Boolean}) = p || true
   def f17(p: Any{def &&(q: Boolean): Boolean}) = p && true
-  
+
   def f18(p: Any{def <<(q: Int): Int}) = p << 7
   def f19(p: Any{def >>(q: Int): Int}) = p >> 7
   def f20(p: Any{def >>>(q: Int): Int}) = p >>> 7
-  
+
   def f21(p: Any{def toByte: Byte}) = p.toByte
   def f22(p: Any{def toShort: Short}) = p.toShort
   def f23(p: Any{def toChar: Char}) = p.toChar
@@ -31,28 +33,28 @@ object Test extends App {
   def f25(p: Any{def toLong: Long}) = p.toLong
   def f26(p: Any{def toFloat: Float}) = p.toFloat
   def f27(p: Any{def toDouble: Double}) = p.toDouble
-  
+
   def f28(p: Any{def ==(q: Int): Boolean}) = p == 7
   def f29(p: Any{def !=(q: Int): Boolean}) = p != 7
   def f30(p: Any{def ==(q: Boolean): Boolean}) = p == true
   def f31(p: Any{def !=(q: Boolean): Boolean}) = p != true
-  
+
   def f32(p: Any{def <(q: Int): Boolean}) = p < 7
   def f33(p: Any{def <=(q: Int): Boolean}) = p <= 7
   def f34(p: Any{def >=(q: Int): Boolean}) = p >= 7
   def f35(p: Any{def >(q: Int): Boolean}) = p > 7
-  
+
   print("f1 =  "); println(f1(1) == +1)
   print("f2 =  "); println(f2(1) == -1)
   print("f3 =  "); println(f3(1) == ~1)
   print("f4 =  "); println(f4(true) == !true)
-  
+
   print("f5 =  "); println(f5(4) == (4 + 7))
   print("f6 =  "); println(f6(4) == (4 - 7))
   print("f7 =  "); println(f7(4) == (4 * 7))
   print("f8 =  "); println(f8(4) == (4 / 7))
   print("f9 =  "); println(f9(4) == (4 % 7))
-  
+
   print("f10 = "); println(f10(4) == (4 | 7))
   print("f11 = "); println(f11(false) == (false | true))
   print("f12 = "); println(f12(4) == (4 ^ 7))
@@ -61,11 +63,11 @@ object Test extends App {
   print("f15 = "); println(f15(false) == (false & true))
   print("f16 = "); println(f16(false) == (false || true))
   print("f17 = "); println(f17(false) == (false && true))
-  
+
   print("f18 = "); println(f18(4) == (4 << 7))
   print("f19 = "); println(f19(-4) == (-4 >> 7))
   print("f20 = "); println(f20(-4) == (-4 >>> 7))
-             
+
   print("f21 = "); println(f21(4.2) == (4.2.toByte))
   print("f22 = "); println(f22(4.2) == (4.2.toShort))
   print("f23 = "); println(f23(4.2) == (4.2.toChar))
@@ -73,17 +75,17 @@ object Test extends App {
   print("f25 = "); println(f25(4.2) == (4.2.toLong))
   print("f26 = "); println(f26(4.2) == (4.2.toFloat))
   print("f27 = "); println(f27(4.2) == (4.2.toDouble))
-             
+
   print("f28 = "); println(f28(4) == (4 == 7))
   print("f29 = "); println(f29(4) == (4 != 7))
   print("f30 = "); println(f30(false) == (false == true))
   print("f31 = "); println(f31(false) == (false != true))
-             
+
   print("f32 = "); println(f32(4) == (4 < 7))
   print("f33 = "); println(f33(4) == (4 <= 7))
   print("f34 = "); println(f34(4) == (4 >= 7))
   print("f35 = "); println(f35(4) == (4 > 7))
-  
+
   println("ok")
-  
+
 }
diff --git a/test/files/run/iterator-concat.check b/test/files/run/iterator-concat.check
new file mode 100644
index 0000000..23835b0
--- /dev/null
+++ b/test/files/run/iterator-concat.check
@@ -0,0 +1,4 @@
+100
+1000
+10000
+100000
diff --git a/test/files/run/iterator-concat.scala b/test/files/run/iterator-concat.scala
new file mode 100644
index 0000000..f113634
--- /dev/null
+++ b/test/files/run/iterator-concat.scala
@@ -0,0 +1,15 @@
+object Test {
+  // Create `size` Function0s, each of which evaluates to an Iterator
+  // which produces 1. Then fold them over ++ to get a single iterator,
+  // which should sum to "size".
+  def mk(size: Int): Iterator[Int] = {
+    val closures = (1 to size).toList.map(x => (() => Iterator(1)))
+    closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+  }
+  def main(args: Array[String]): Unit = {
+    println(mk(100).sum)
+    println(mk(1000).sum)
+    println(mk(10000).sum)
+    println(mk(100000).sum)
+  }
+}
diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala
new file mode 100644
index 0000000..269e859
--- /dev/null
+++ b/test/files/run/iterator-from.scala
@@ -0,0 +1,71 @@
+/* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
+
+import scala.util.{Random => R}
+import scala.collection._
+import scala.math.Ordered
+
+object Test extends App {
+  val maxLength = 25
+  val maxKey = 50
+  val maxValue = 50
+
+  def testSet[A <% Ordered[A]](set: SortedSet[A], list: List[A]) {
+    val distinctSorted = list.distinct.sorted
+    assertEquals("Set size wasn't the same as list sze", set.size, distinctSorted.size)
+
+    for(key <- distinctSorted) {
+      val clazz = set.getClass
+      val iteratorFrom = (set iteratorFrom key).toList
+      check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set from key).iterator.toList)
+      check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (_ < key))
+      check(clazz, list, s"set iteratorFrom $key", s"set keysIterator from $key", iteratorFrom, (set keysIteratorFrom key).toList)
+    }
+  }
+
+  def testMap[A <% Ordered[A], B](map: SortedMap[A, B], list: List[(A, B)]) {
+    val distinctSorted = distinctByKey(list).sortBy(_._1)
+    assertEquals("Map size wasn't the same as list sze", map.size, distinctSorted.size)
+
+    for(keyValue <- distinctSorted) {
+      val key = keyValue._1
+      val clazz = map.getClass
+      val iteratorFrom = (map iteratorFrom key).toList
+      check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map from key).iterator.toList)
+      check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (_._1 < key))
+      check(clazz, list, s"map iteratorFrom $key map (_._1)", s"map keysIteratorFrom $key", iteratorFrom map (_._1), (map keysIteratorFrom key).toList)
+      check(clazz, list, s"map iteratorFrom $key map (_._2)", s"map valuesIteratorFrom $key", iteratorFrom map (_._2), (map valuesIteratorFrom key).toList)
+    }
+  }
+
+  def check[A](clazz: Class[_], list: List[_], m1: String, m2: String, l1: List[A], l2: List[A]) {
+    assertEquals(s"$clazz: `$m1` didn't match `$m2` on list $list", l1, l2)
+  }
+
+  def assertEquals[A](msg: String, x: A, y: A) {
+    assert(x == y, s"$msg\n1: $x\n2: $y")
+  }
+
+  def distinctByKey[A,B](list: List[(A, B)]) : List[(A,B)] = list.groupBy(_._1).map(_._2.last).toList
+
+  object Weekday extends Enumeration {
+    type Weekday = Value
+    val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+  }
+
+  0 until maxLength foreach {length =>
+    val keyValues = (0 until length map {_ => (R nextInt maxKey, R nextInt maxValue)}).toList
+    val keys = keyValues map (_._2)
+    testSet(immutable.BitSet(keys:_*), keys)
+    testSet(immutable.TreeSet(keys:_*), keys)
+    testSet(mutable.TreeSet(keys:_*), keys)
+    val days = keys map {n => Weekday(n % Weekday.values.size)}
+    testSet(Weekday.ValueSet(days:_*), days)
+
+    val treeMap = immutable.TreeMap(keyValues:_*)
+    testMap(treeMap, keyValues)
+    testMap(treeMap.filterKeys(_ % 2 == 0), keyValues  filter (_._1 % 2 == 0))
+    testMap(treeMap mapValues (_ + 1), keyValues map {case (k,v) => (k, v + 1)})
+  }
+}
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
index 73886f1..92b1700 100644
--- a/test/files/run/iterator-iterate-lazy.scala
+++ b/test/files/run/iterator-iterate-lazy.scala
@@ -1,5 +1,5 @@
 object Test {
   def main(args: Array[String]): Unit = {
-    Iterator.iterate(1 to 5 toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+    Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
   }
 }
diff --git a/test/files/run/iterator3444.scala b/test/files/run/iterator3444.scala
index 2d0643b..1d0713a 100644
--- a/test/files/run/iterator3444.scala
+++ b/test/files/run/iterator3444.scala
@@ -2,22 +2,22 @@
 
 // ticked #3444
 object Test {
-  
+
   def main(args: Array[String]) {
     val it = (1 to 12).toSeq.iterator
-    
+
     assert(it.next == 1)
     assert(it.take(2).toList == List(2, 3))
-    
+
     val jt = (4 to 12).toSeq.iterator
     assert(jt.next == 4)
     assert(jt.drop(5).toList == List(10, 11, 12))
-    
+
     val kt = (1 until 10).toSeq.iterator
     assert(kt.drop(50).toList == Nil)
-    
+
     val mt = (1 until 5).toSeq.iterator
     assert(mt.take(50).toList == List(1, 2, 3, 4))
   }
-  
+
 }
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
index b85291c..57e05d3 100644
--- a/test/files/run/iterators.scala
+++ b/test/files/run/iterators.scala
@@ -4,6 +4,8 @@
 
 //############################################################################
 
+import scala.language.postfixOps
+
 object Test {
 
   def check_from: Int = {
@@ -82,16 +84,16 @@ object Test {
     var xs4 = a.slice(0, 4).iterator.toList;
     xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
   }
-  
+
   def check_toSeq: String =
     List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
-  
+
   def check_indexOf: String = {
     val i = List(1, 2, 3, 4, 5).indexOf(4)
     val j = List(1, 2, 3, 4, 5).indexOf(16)
     "" + i + "x" + j
   }
-  
+
   def check_findIndexOf: String = {
     val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
     val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
diff --git a/test/files/run/java-erasure.scala b/test/files/run/java-erasure.scala
index 0441ad7..c9f9b0a 100644
--- a/test/files/run/java-erasure.scala
+++ b/test/files/run/java-erasure.scala
@@ -3,7 +3,7 @@ object Test {
   list add "a"
   list add "c"
   list add "b"
-  
+
   def main(args: Array[String]): Unit = {
     println(java.util.Collections.max(list))
   }
diff --git a/test/files/run/json.check b/test/files/run/json.check
deleted file mode 100644
index d4d2b41..0000000
--- a/test/files/run/json.check
+++ /dev/null
@@ -1,21 +0,0 @@
-Passed compare: {"name" : "value"}
-Passed compare: {"name" : "va1ue"}
-Passed compare: {"name" : {"name1" : "va1ue1", "name2" : "va1ue2"}}
-Passed parse  : {"name" : "\""}
-Passed compare: Map(function -> add_symbol)
-Passed compare: [{"a" : "team"}, {"b" : 52.0}]
-Passed compare: Map()
-Passed compare: List()
-Passed compare: [4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]
-Passed parse  : {"age" : 0.0}
-Passed compare: {"name" : "va1ue"}
-Passed compare: {"name" : {"name1" : "va1ue1", "name2" : "va1ue2"}}
-Passed compare: [4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]
-Passed compare: {"\u006e\u0061\u006d\u0065" : "\u0076\u0061\u006c"}
-
-Passed compare: Map(firstName -> John, lastName -> Smith, address -> Map(streetAddress -> 21 2nd Street, city -> New York, state -> NY, postalCode -> 10021.0), phoneNumbers -> List(212 732-1234, 646 123-4567))
-
-Passed parse  : {"addresses" : [{"format" : "us", "type" : "work", "value" : "1234 Main StnSpringfield, TX 78080-1216"}, {"format" : "us", "type" : "home", "value" : "5678 Main StnSpringfield, TX 78080-1316"}], "emailaddrs" : [{"type" : "work", "value" : "kelly at seankelly.biz"}, {"pref" : 1.0, "type" : "home", "value" : "kelly at seankelly.tv"}], "fullname" : "Sean Kelly", "org" : "SK Consulting", "telephones" : [{"pref" : 1.0, "type" : "work", "value" : "+1 214 555 1212"}, {"type" : "fax",  [...]
-
-Passed parse  : {"web-app" : {"servlet" : [{"init-param" : {"cachePackageTagsRefresh" : 60.0, "cachePackageTagsStore" : 200.0, "cachePackageTagsTrack" : 200.0, "cachePagesDirtyRead" : 10.0, "cachePagesRefresh" : 10.0, "cachePagesStore" : 100.0, "cachePagesTrack" : 200.0, "cacheTemplatesRefresh" : 15.0, "cacheTemplatesStore" : 50.0, "cacheTemplatesTrack" : 100.0, "configGlossary:adminEmail" : "ksm at pobox.com", "configGlossary:installationAt" : "Philadelphia, PA", "configGlossary:poweredBy" [...]
-
diff --git a/test/files/run/json.scala b/test/files/run/json.scala
deleted file mode 100644
index a81f125..0000000
--- a/test/files/run/json.scala
+++ /dev/null
@@ -1,283 +0,0 @@
-import scala.util.parsing.json._
-import scala.collection.immutable.TreeMap
-
-object Test extends App {
-  /* This method converts parsed JSON back into real JSON notation with objects in
-   * sorted-key order. Not required by the spec, but it allows us to do a stable
-   * toString comparison. */
-  def jsonToString(in : Any) : String = in match {
-    case l : List[_] => "[" + l.map(jsonToString).mkString(", ") + "]"
-    case m : Map[String,_] => "{" + m.iterator.toList
-         .sortWith({ (x,y) => x._1 < y._1 })
-         .map({ case (k,v) => "\"" + k + "\": " + jsonToString(v) })
-         .mkString(", ") + "}"
-    case s : String => "\"" + s + "\""
-    case x => x.toString
-  }
-
-  /*
-   * This method takes input JSON values and sorts keys on objects.
-   */
-  def sortJSON(in : Any) : Any = in match {
-    case l : List[_] => l.map(sortJSON)
-    case m : Map[String,_] => TreeMap(m.mapValues(sortJSON).iterator.toSeq : _*)
-    // For the object versions, sort their contents, ugly casts and all...
-    case JSONObject(data) => JSONObject(sortJSON(data).asInstanceOf[Map[String,Any]])
-    case JSONArray(data) => JSONArray(sortJSON(data).asInstanceOf[List[Any]])
-    case x => x
-  }
-
-  // For this one, just parsing should be considered a pass
-  def printJSON(given : String) {
-    JSON parseRaw given match {
-      case None => println("Parse failed for \"%s\"".format(given))
-      case Some(parsed) => println("Passed parse  : " + sortJSON(parsed))
-    }
-  }
-   
-  // For this usage, do a raw parse (to JSONObject/JSONArray)
-  def printJSON(given : String, expected : JSONType) {
-    printJSON(given, JSON.parseRaw, expected)
-  }
-
-  // For this usage, do a raw parse (to JSONType and subclasses)
-  def printJSONFull(given : String, expected : Any) {
-    printJSON(given, JSON.parseFull, expected)
-  }
-
-  // For this usage, do configurable parsing so that you can do raw if desired
-  def printJSON[T](given : String, parser : String => T, expected : Any) {
-    parser(given) match {
-      case None => println("Parse failed for \"%s\"".format(given))
-      case Some(parsed) => if (parsed == expected) {
-        println("Passed compare: " + parsed)
-      } else {
-        val eStr = sortJSON(expected).toString
-        val pStr = sortJSON(parsed).toString
-        stringDiff(eStr,pStr)
-      }
-    }
-  }
-
-  def stringDiff (expected : String, actual : String) {
-    if (expected != actual) {
-      // Figure out where the Strings differ and generate a marker
-        val mismatchPosition = expected.toList.zip(actual.toList).indexWhere({case (x,y) => x != y}) match {
-          case -1 => Math.min(expected.length, actual.length)
-          case x => x
-        }
-        val reason = (" " * mismatchPosition) + "^"
-        println("Expected: %s\nGot     : %s \n          %s".format(expected, actual, reason))
-
-    } else {
-      println("Passed compare: " + actual)
-    }
-  }
-
-
-  // The library should differentiate between lower case "l" and number "1" (ticket #136)
-  printJSON("{\"name\" : \"value\"}", JSONObject(Map("name" -> "value")))
-  printJSON("{\"name\" : \"va1ue\"}", JSONObject(Map("name" -> "va1ue")))
-  printJSON("{\"name\" : { \"name1\" : \"va1ue1\", \"name2\" : \"va1ue2\" } }",
-            JSONObject(Map("name" -> JSONObject(Map("name1" -> "va1ue1", "name2" -> "va1ue2")))))
-
-  // Unicode escapes should be handled properly
-  printJSON("{\"name\" : \"\\u0022\"}")
-
-  // The library should return a map for JSON objects (ticket #873)
-  printJSONFull("{\"function\" : \"add_symbol\"}", Map("function" -> "add_symbol"))
-
-  // The library should recurse into arrays to find objects (ticket #2207)
-  printJSON("[{\"a\" : \"team\"},{\"b\" : 52}]", JSONArray(List(JSONObject(Map("a" -> "team")), JSONObject(Map("b" -> 52.0)))))
-  
-  // The library should differentiate between empty maps and lists (ticket #3284)
-  printJSONFull("{}", Map()) 
-  printJSONFull("[]", List())
-  
-  // Lists should be returned in the same order as specified
-  printJSON("[4,1,3,2,6,5,8,7]", JSONArray(List[Double](4,1,3,2,6,5,8,7)))
-
-  // Additional tests
-  printJSON("{\"age\": 0}")
-
-  // The library should do a proper toString representation using default and custom renderers (ticket #3605)
-  stringDiff("{\"name\" : \"va1ue\"}", JSONObject(Map("name" -> "va1ue")).toString)
-  stringDiff("{\"name\" : {\"name1\" : \"va1ue1\", \"name2\" : \"va1ue2\"}}",
-             JSONObject(Map("name" -> JSONObject(TreeMap("name1" -> "va1ue1", "name2" -> "va1ue2")))).toString)
-
-  stringDiff("[4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]", JSONArray(List[Double](4,1,3,2,6,5,8,7)).toString)
-
-  // A test method that escapes all characters in strings
-  def escapeEverything (in : Any) : String = in match {
-    case s : String => "\"" + s.map(c => "\\u%04x".format(c : Int)).mkString + "\""
-    case jo : JSONObject => jo.toString(escapeEverything)
-    case ja : JSONArray => ja.toString(escapeEverything)
-    case other => other.toString
-  }
-
-  stringDiff("{\"\\u006e\\u0061\\u006d\\u0065\" : \"\\u0076\\u0061\\u006c\"}", JSONObject(Map("name" -> "val")).toString(escapeEverything))
-
-  println
-
-  // from http://en.wikipedia.org/wiki/JSON
-  val sample1 = """
-{
-    "firstName": "John",
-    "lastName": "Smith",
-    "address": {
-        "streetAddress": "21 2nd Street",
-        "city": "New York",
-        "state": "NY",
-        "postalCode": 10021
-    },
-    "phoneNumbers": [
-        "212 732-1234",
-        "646 123-4567"
-    ]
-}"""
-
-  // Should be equivalent to:
-  val sample1Obj = Map(
-    "firstName" -> "John",
-    "lastName" -> "Smith",
-    "address" -> Map(
-      "streetAddress" -> "21 2nd Street",
-      "city" -> "New York",
-      "state" -> "NY",
-      "postalCode" -> 10021
-    ),
-    "phoneNumbers"-> List(
-        "212 732-1234",
-        "646 123-4567"
-    )
-  )
-
-  
-  printJSONFull(sample1, sample1Obj)
-  println
-
-  // from http://www.developer.com/lang/jscript/article.php/3596836
-  val sample2 = """
-{
-   "fullname": "Sean Kelly",
-   "org": "SK Consulting",
-   "emailaddrs": [
-      {"type": "work", "value": "kelly at seankelly.biz"},
-      {"type": "home", "pref": 1, "value": "kelly at seankelly.tv"}
-   ],
-    "telephones": [
-      {"type": "work", "pref": 1, "value": "+1 214 555 1212"},
-      {"type": "fax", "value": "+1 214 555 1213"},
-      {"type": "mobile", "value": "+1 214 555 1214"}
-   ],
-   "addresses": [
-      {"type": "work", "format": "us",
-       "value": "1234 Main StnSpringfield, TX 78080-1216"},
-      {"type": "home", "format": "us",
-       "value": "5678 Main StnSpringfield, TX 78080-1316"}
-   ],
-    "urls": [
-      {"type": "work", "value": "http://seankelly.biz/"},
-      {"type": "home", "value": "http://seankelly.tv/"}
-   ]
-}"""
-
-  printJSON(sample2)
-  println
-
-  // from http://json.org/example.html
-  val sample3 = """
-{"web-app": {
-  "servlet": [   
-    {
-      "servlet-name": "cofaxCDS",
-      "servlet-class": "org.cofax.cds.CDSServlet",
-      "init-param": {
-        "configGlossary:installationAt": "Philadelphia, PA",
-        "configGlossary:adminEmail": "ksm at pobox.com",
-        "configGlossary:poweredBy": "Cofax",
-        "configGlossary:poweredByIcon": "/images/cofax.gif",
-        "configGlossary:staticPath": "/content/static",
-        "templateProcessorClass": "org.cofax.WysiwygTemplate",
-        "templateLoaderClass": "org.cofax.FilesTemplateLoader",
-        "templatePath": "templates",
-        "templateOverridePath": "",
-        "defaultListTemplate": "listTemplate.htm",
-        "defaultFileTemplate": "articleTemplate.htm",
-        "useJSP": false,
-        "jspListTemplate": "listTemplate.jsp",
-        "jspFileTemplate": "articleTemplate.jsp",
-        "cachePackageTagsTrack": 200,
-        "cachePackageTagsStore": 200,
-        "cachePackageTagsRefresh": 60,
-        "cacheTemplatesTrack": 100,
-        "cacheTemplatesStore": 50,
-        "cacheTemplatesRefresh": 15,
-        "cachePagesTrack": 200,
-        "cachePagesStore": 100,
-        "cachePagesRefresh": 10,
-        "cachePagesDirtyRead": 10,
-        "searchEngineListTemplate": "forSearchEnginesList.htm",
-        "searchEngineFileTemplate": "forSearchEngines.htm",
-        "searchEngineRobotsDb": "WEB-INF/robots.db",
-        "useDataStore": true,
-        "dataStoreClass": "org.cofax.SqlDataStore",
-        "redirectionClass": "org.cofax.SqlRedirection",
-        "dataStoreName": "cofax",
-        "dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver",
-        "dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon",
-        "dataStoreUser": "sa",
-        "dataStorePassword": "dataStoreTestQuery",
-        "dataStoreTestQuery": "SET NOCOUNT ON;select test='test';",
-        "dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log",
-        "dataStoreInitConns": 10,
-        "dataStoreMaxConns": 100,
-        "dataStoreConnUsageLimit": 100,
-        "dataStoreLogLevel": "debug",
-        "maxUrlLength": 500}},
-    {
-      "servlet-name": "cofaxEmail",
-      "servlet-class": "org.cofax.cds.EmailServlet",
-      "init-param": {
-      "mailHost": "mail1",
-      "mailHostOverride": "mail2"}},
-    {
-      "servlet-name": "cofaxAdmin",
-      "servlet-class": "org.cofax.cds.AdminServlet"},
- 
-    {
-      "servlet-name": "fileServlet",
-      "servlet-class": "org.cofax.cds.FileServlet"},
-    {
-      "servlet-name": "cofaxTools",
-      "servlet-class": "org.cofax.cms.CofaxToolsServlet",
-      "init-param": {
-        "templatePath": "toolstemplates/",
-        "log": 1,
-        "logLocation": "/usr/local/tomcat/logs/CofaxTools.log",
-        "logMaxSize": "",
-        "dataLog": 1,
-        "dataLogLocation": "/usr/local/tomcat/logs/dataLog.log",
-        "dataLogMaxSize": "",
-        "removePageCache": "/content/admin/remove?cache=pages&id=",
-        "removeTemplateCache": "/content/admin/remove?cache=templates&id=",
-        "fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder",
-        "lookInContext": 1,
-        "adminGroupID": 4,
-        "betaServer": true}}],
-  "servlet-mapping": {
-    "cofaxCDS": "/",
-    "cofaxEmail": "/cofaxutil/aemail/*",
-    "cofaxAdmin": "/admin/*",
-    "fileServlet": "/static/*",
-    "cofaxTools": "/tools/*"},
- 
-  "taglib": {
-    "taglib-uri": "cofax.tld",
-    "taglib-location": "/WEB-INF/tlds/cofax.tld"}
-  }
-}"""
-
-  printJSON(sample3)
-  println
-}
diff --git a/test/files/run/jtptest.check b/test/files/run/jtptest.check
deleted file mode 100644
index 95dbd28..0000000
--- a/test/files/run/jtptest.check
+++ /dev/null
@@ -1,7 +0,0 @@
-[1.4] parsed: 1.1
-[1.3] parsed: 1.
-[1.3] parsed: .1
-[1.1] failure: string matching regex `(\d+(\.\d*)?|\d*\.\d+)' expected but `!' found
-
-!1
-^
diff --git a/test/files/run/jtptest.scala b/test/files/run/jtptest.scala
deleted file mode 100644
index 4d0eef9..0000000
--- a/test/files/run/jtptest.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-
-import scala.util.parsing.combinator.JavaTokenParsers
-import scala.util.parsing.input.CharArrayReader
-
-object TestJavaTokenParsers extends JavaTokenParsers {
-}
-
-object Test {
-  import TestJavaTokenParsers._
-
-  def main(args : Array[String]) {
-    println(decimalNumber(new CharArrayReader("1.1".toCharArray)))
-    println(decimalNumber(new CharArrayReader("1.".toCharArray)))
-    println(decimalNumber(new CharArrayReader(".1".toCharArray)))
-    println(decimalNumber(new CharArrayReader("!1".toCharArray)))
-  }
-}
diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check
new file mode 100644
index 0000000..1c29257
--- /dev/null
+++ b/test/files/run/kind-repl-command.check
@@ -0,0 +1,28 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :kind scala.Option
+scala.Option's kind is F[+A]
+
+scala> :k (Int, Int) => Int
+scala.Function2's kind is F[-A1,-A2,+A3]
+
+scala> :k -v Either
+scala.util.Either's kind is F[+A1,+A2]
+* -(+)-> * -(+)-> *
+This is a type constructor: a 1st-order-kinded type.
+
+scala> :k -v scala.collection.generic.ImmutableSortedMapFactory
+scala.collection.generic.ImmutableSortedMapFactory's kind is X[CC[A,B] <: scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]]]
+(* -> * -> *(scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]])) -> *
+This is a type constructor that takes type constructor(s): a higher-kinded type.
+
+scala> :k new { def empty = false }
+AnyRef{def empty: Boolean}'s kind is A
+
+scala> :k Nonexisting
+<console>:8: error: not found: value Nonexisting
+              Nonexisting
+              ^
+
+scala> 
diff --git a/test/files/run/kind-repl-command.scala b/test/files/run/kind-repl-command.scala
new file mode 100644
index 0000000..df1fafb
--- /dev/null
+++ b/test/files/run/kind-repl-command.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = """
+  |:kind scala.Option
+  |:k (Int, Int) => Int
+  |:k -v Either
+  |:k -v scala.collection.generic.ImmutableSortedMapFactory
+  |:k new { def empty = false }
+  |:k Nonexisting
+  """.stripMargin
+}
diff --git a/test/files/run/kmpSliceSearch.scala b/test/files/run/kmpSliceSearch.scala
index 0f7e052..e72f78b 100644
--- a/test/files/run/kmpSliceSearch.scala
+++ b/test/files/run/kmpSliceSearch.scala
@@ -12,7 +12,7 @@ object Test {
   }
   def main(args: Array[String]) {
     val rng = new scala.util.Random(java.lang.Integer.parseInt("kmp",36))
-    
+
     // Make sure we agree with naive implementation
     for (h <- Array(2,5,1000)) {
       for (i <- 0 to 100) {
@@ -38,7 +38,7 @@ object Test {
         }
       }
     }
-    
+
     // Check performance^Wcorrectness of common small test cases
     val haystacks = List[Seq[Int]](
       Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15),
@@ -52,8 +52,8 @@ object Test {
       List(1,1,1,1,1,2),
       5 to 9
     )
-    (haystacks zip needles) foreach { 
-      case (hay, nee) => 
+    (haystacks zip needles) foreach {
+      case (hay, nee) =>
         println(hay.indexOfSlice(nee,2) + " " + hay.lastIndexOfSlice(nee,13))
     }
   }
diff --git a/test/files/run/lazy-exprs.check b/test/files/run/lazy-exprs.check
index e77d204..2efb8ce 100644
--- a/test/files/run/lazy-exprs.check
+++ b/test/files/run/lazy-exprs.check
@@ -1,3 +1,11 @@
+lazy-exprs.scala:38: warning: match may not be exhaustive.
+It would fail on the following input: Some((x: String forSome x not in Z1))
+    t match {
+    ^
+lazy-exprs.scala:62: warning: match may not be exhaustive.
+It would fail on the following input: Some((x: String forSome x not in LazyField))
+    t match {
+    ^
 forced <z1>
 lazy val in scrutinee: ok
 forced <z1>
diff --git a/test/files/run/lazy-exprs.scala b/test/files/run/lazy-exprs.scala
index fc724bd..204c4b5 100644
--- a/test/files/run/lazy-exprs.scala
+++ b/test/files/run/lazy-exprs.scala
@@ -2,7 +2,7 @@ object TestExpressions {
 
   def patmatchScrut {
     lazy val z1: Option[String] = { println("forced <z1>"); Some("lazy z1") }
-    
+
     val res = z1 match {
       case Some(msg) => msg
       case None => "failed"
@@ -17,10 +17,10 @@ object TestExpressions {
   def patmatchCase {
     val t: Option[String] = Some("test")
     val res = t match {
-      case Some(msg) => 
+      case Some(msg) =>
         lazy val z1 = { println("forced <z1>"); "lazy z1" }
         z1
-    
+
       case None => "failed"
     }
     print("lazy val in case: ")
@@ -36,9 +36,9 @@ object TestExpressions {
     print("lazy val in case: ")
     val t: Option[String] = Some("lazy Z1")
     t match {
-      case Some(Z1) => 
+      case Some(Z1) =>
         println("ok")
-    
+
       case None =>
         println("failed")
     }
@@ -60,13 +60,13 @@ object TestExpressions {
     print("lazy val in pattern: ")
     val t: Option[String] = Some("LazyField")
     t match {
-      case Some(LazyField) => 
+      case Some(LazyField) =>
         println("ok")
-    
+
       case None =>
         println("failed")
     }
-  }    
+  }
 
   lazy val (x, y) = ({print("x"); "x"}, {print("y"); "y"})
   def testPatLazyVal {
diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check
index d1cc754..9e88a55 100644
--- a/test/files/run/lazy-locals.check
+++ b/test/files/run/lazy-locals.check
@@ -1,3 +1,9 @@
+lazy-locals.scala:153: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  {
+  ^
+lazy-locals.scala:159: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    {
+    ^
 forced lazy val q
 q = 10
 forced lazy val t
diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala
index aca15d0..b28b28e 100644
--- a/test/files/run/lazy-locals.scala
+++ b/test/files/run/lazy-locals.scala
@@ -59,7 +59,7 @@ object Test extends App {
     val sum3 = t00 + t01 + t02 + t03 + t04 + t05 + t06 + t07 + t08 +
               t09 + t10 + t11 + t12 + t13 + t14 + t15 + t16 + t17 +
               t18 + t19 + t20 + t21 + t22 + t23 + t24 + t25 + t26 +
-              t27 + t28 + t29 + t30 + t31 
+              t27 + t28 + t29 + t30 + t31
 
 
 
@@ -120,7 +120,7 @@ object Test extends App {
       t
   }
 
-  /** test recursive method with lazy vals and a all vals forced */
+  /** test recursive method with lazy vals and all vals forced */
   def testLazyRecMany(n: Int): Int = {
     lazy val t = { println("forced lazy val t at n = " + n); 42 }
     if (n > 0) {
@@ -175,18 +175,18 @@ object Test extends App {
 
   // see #1589
   object NestedLazyVals {
-    lazy val x = { 
+    lazy val x = {
       lazy val y = { println("forcing y"); 42; }
       println("forcing x")
-      y 
+      y
     }
-    
+
     val x1 = 5 + { lazy val y = 10 ; y }
-    
+
     println(x)
     println(x1)
   }
-  
+
   trait TNestedLazyVals {
     lazy val x = { lazy val y = 42; y }
   }
diff --git a/test/files/run/lazy-override-run.scala b/test/files/run/lazy-override-run.scala
index d197408..6016c3c 100644
--- a/test/files/run/lazy-override-run.scala
+++ b/test/files/run/lazy-override-run.scala
@@ -20,7 +20,7 @@ object Test extends App {
 
  val b = new B
  print("b.x=")
- println(b.x) 
+ println(b.x)
  print("b.z=")
- println(b.z) 
+ println(b.z)
 }
diff --git a/test/files/run/lazy-traits.scala b/test/files/run/lazy-traits.scala
index f04c0b8..3820767 100644
--- a/test/files/run/lazy-traits.scala
+++ b/test/files/run/lazy-traits.scala
@@ -1,5 +1,5 @@
 trait A {
-  lazy val z1 = { 
+  lazy val z1 = {
     println("<forced z1>")
     "lazy z1"
   }
@@ -7,7 +7,7 @@ trait A {
 
 /** Simple class which mixes in one lazy val. */
 class Cls extends AnyRef with A {
-  override def toString = 
+  override def toString =
     "z1 = " + z1
 }
 
@@ -18,7 +18,7 @@ class Cls2 extends AnyRef with A {
     "lazy z2"
   }
 
-  override def toString = 
+  override def toString =
     "z1 = " + z1 + " z2 = " + z2
 }
 
@@ -34,7 +34,7 @@ class ClsB extends Object with B {
     println("<forced zc1>")
     "lazy zc1"
   }
-  override def toString = 
+  override def toString =
     "z1 = " + z1 + " zb1 = " + zb1 + " zc1 = " + zc1
 }
 
@@ -73,39 +73,39 @@ class OverflownLazyFields extends Object with A {
   lazy val zc30 = { println("<forced zc30>"); "lazy zc30" }
   lazy val zc31 = { println("<forced zc31>"); "lazy zc31" }
 
-  override def toString = 
-    "\nzc00 = " + zc00 + 
-    "\nzc01 = " + zc01 + 
-    "\nzc02 = " + zc02 + 
-    "\nzc03 = " + zc03 + 
-    "\nzc04 = " + zc04 + 
-    "\nzc05 = " + zc05 + 
-    "\nzc06 = " + zc06 + 
-    "\nzc07 = " + zc07 + 
-    "\nzc08 = " + zc08 + 
-    "\nzc09 = " + zc09 + 
-    "\nzc10 = " + zc10 + 
-    "\nzc11 = " + zc11 + 
-    "\nzc12 = " + zc12 + 
-    "\nzc13 = " + zc13 + 
-    "\nzc14 = " + zc14 + 
-    "\nzc15 = " + zc15 + 
-    "\nzc16 = " + zc16 + 
-    "\nzc17 = " + zc17 + 
-    "\nzc18 = " + zc18 + 
-    "\nzc19 = " + zc19 + 
-    "\nzc20 = " + zc20 + 
-    "\nzc21 = " + zc21 + 
-    "\nzc22 = " + zc22 + 
-    "\nzc23 = " + zc23 + 
-    "\nzc24 = " + zc24 + 
-    "\nzc25 = " + zc25 + 
-    "\nzc26 = " + zc26 + 
-    "\nzc27 = " + zc27 + 
-    "\nzc28 = " + zc28 + 
-    "\nzc29 = " + zc29 + 
-    "\nzc30 = " + zc30 + 
-    "\nzc31 = " + zc31 + 
+  override def toString =
+    "\nzc00 = " + zc00 +
+    "\nzc01 = " + zc01 +
+    "\nzc02 = " + zc02 +
+    "\nzc03 = " + zc03 +
+    "\nzc04 = " + zc04 +
+    "\nzc05 = " + zc05 +
+    "\nzc06 = " + zc06 +
+    "\nzc07 = " + zc07 +
+    "\nzc08 = " + zc08 +
+    "\nzc09 = " + zc09 +
+    "\nzc10 = " + zc10 +
+    "\nzc11 = " + zc11 +
+    "\nzc12 = " + zc12 +
+    "\nzc13 = " + zc13 +
+    "\nzc14 = " + zc14 +
+    "\nzc15 = " + zc15 +
+    "\nzc16 = " + zc16 +
+    "\nzc17 = " + zc17 +
+    "\nzc18 = " + zc18 +
+    "\nzc19 = " + zc19 +
+    "\nzc20 = " + zc20 +
+    "\nzc21 = " + zc21 +
+    "\nzc22 = " + zc22 +
+    "\nzc23 = " + zc23 +
+    "\nzc24 = " + zc24 +
+    "\nzc25 = " + zc25 +
+    "\nzc26 = " + zc26 +
+    "\nzc27 = " + zc27 +
+    "\nzc28 = " + zc28 +
+    "\nzc29 = " + zc29 +
+    "\nzc30 = " + zc30 +
+    "\nzc31 = " + zc31 +
     "\nz1 = " + z1
 }
 
diff --git a/test/files/run/lift-and-unlift.scala b/test/files/run/lift-and-unlift.scala
index a4a5d95..9cd8566 100644
--- a/test/files/run/lift-and-unlift.scala
+++ b/test/files/run/lift-and-unlift.scala
@@ -5,21 +5,21 @@ object Test {
   val evens2: PartialFunction[Int, Int] = {
     case x if x % 2 == 0  => x
   }
-  
+
   def main(args: Array[String]): Unit = {
     val f1 = evens1 _
     val f2 = evens2.lift
-    
+
     assert(1 to 10 forall (x => f1(x) == f2(x)))
-    
+
     val f3 = unlift(f1)
     val f4 = unlift(f2)
-    
+
     assert(1 to 10 forall { x =>
       if (!f3.isDefinedAt(x)) !f4.isDefinedAt(x)
       else f3(x) == f4(x)
     })
-    
+
     assert(f1 eq f3.lift)
     assert(f4 eq unlift(f2))
     assert(f4 eq evens2)
diff --git a/test/files/run/list_map.scala b/test/files/run/list_map.scala
new file mode 100755
index 0000000..fba3aae
--- /dev/null
+++ b/test/files/run/list_map.scala
@@ -0,0 +1,26 @@
+import collection.immutable.ListMap
+
+object Test {
+  def testImmutableMinus() {
+    val empty = ListMap.empty[Int, Int]
+
+    val m0 = ListMap(1 -> 1, 2 -> 2)
+    val m1 = m0 - 3
+    assert (m1 eq m0)
+    val m2 = m0 - 1
+    assert (m2.size == 1)
+    val m3 = m2 - 2
+    assert (m3 eq empty)
+
+    val m4 = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+    val m5 = m4 - 1
+    assert (m5 == ListMap(2 -> 2, 3 -> 3))
+    assert (m5.toList == (2, 2)::(3, 3)::Nil)
+
+    assert ((empty - 1) eq empty)
+  }
+
+  def main(args: Array[String]) {
+    testImmutableMinus()
+  }
+}
diff --git a/test/files/run/lists-run.scala b/test/files/run/lists-run.scala
index ccfe5bc..05767b9 100644
--- a/test/files/run/lists-run.scala
+++ b/test/files/run/lists-run.scala
@@ -2,9 +2,11 @@
  *
  *  @author Stephane Micheloud
  */
+import scala.language.postfixOps
+
 object Test {
   def main(args: Array[String]) {
-    Test_multiset.run() // multiset operations: union, intersect, diff 
+    Test_multiset.run() // multiset operations: union, intersect, diff
     Test1.run() //count, exists, filter, ..
     Test2.run() //#468
     Test3.run() //#1691
@@ -54,7 +56,7 @@ object Test_multiset {
     assert(List(3, 2) == (vs diff xs), "vs_diff_xs")
     assert(isSubListOf(xs filterNot (vs contains), xs diff vs), "xs_subset_vs")
 
-    // tests adapted from Thomas Jung 
+    // tests adapted from Thomas Jung
     assert({
         def sort(zs: List[Int]) = zs sortWith ( _ > _ )
         sort(xs intersect ys) == sort(ys intersect xs)
@@ -62,7 +64,7 @@ object Test_multiset {
     assert({
         def cardinality[A](zs: List[A], e: A): Int = zs count (e == _)
         val intersection = xs intersect ys
-        xs forall (e => cardinality(intersection, e) == (cardinality(xs, e) 
+        xs forall (e => cardinality(intersection, e) == (cardinality(xs, e)
 min cardinality(ys, e)))
       }, "obey min cardinality")
     assert({
@@ -133,7 +135,7 @@ object Test2 {
   def run() {
     val xs1 = List(1, 2, 3)
     val xs2 = List(0)
- 
+
     val ys1 = xs1 ::: List(4)
     assert(List(1, 2, 3, 4) == ys1, "check_:::")
 
@@ -153,7 +155,7 @@ object Test3 {
       List.range(1, 10, 0)
     } catch {
       case e: IllegalArgumentException => ()
-      case _ => throw new Error("List.range(1, 10, 0)")
+      case _: Throwable => throw new Error("List.range(1, 10, 0)")
     }
     assert(List.range(10, 0, -2) == List(10, 8, 6, 4, 2))
   }
diff --git a/test/files/run/literals.check b/test/files/run/literals.check
index f53c879..ed7c6ca 100644
--- a/test/files/run/literals.check
+++ b/test/files/run/literals.check
@@ -1,3 +1,4 @@
+warning: there were 5 deprecation warning(s); re-run with -deprecation for details
 test '\u0024' == '$' was successful
 test '\u005f' == '_' was successful
 test 65.asInstanceOf[Char] == 'A' was successful
@@ -6,28 +7,19 @@ test "\0x61\0x62".trim() == "x61\0x62" was successful
 
 test (65 : Byte) == 'A' was successful
 
-test 01 == 1 was successful
-test 010 == 8 was successful
 test 0X01 == 1 was successful
 test 0x01 == 1 was successful
 test 0x10 == 16 was successful
 test 0xa == 10 was successful
 test 0x0a == 10 was successful
-test +01 == 1 was successful
-test +010 == 8 was successful
 test +0x01 == 1 was successful
 test +0x10 == 16 was successful
 test +0xa == 10 was successful
 test +0x0a == 10 was successful
-test -01 == -1 was successful
-test -010 == -8 was successful
 test -0x01 == -1 was successful
 test -0x10 == -16 was successful
 test -0xa == -10 was successful
 test -0x0a == -10 was successful
-test 017777777777 == 2147483647 was successful
-test 020000000000 == -2147483648 was successful
-test 037777777777 == -1 was successful
 test 0x7fffffff == 2147483647 was successful
 test 0x80000000 == -2147483648 was successful
 test 0xffffffff == -1 was successful
@@ -35,17 +27,14 @@ test 0xffffffff == -1 was successful
 test 1l == 1L was successful
 test 1L == 1l was successful
 test 1.asInstanceOf[Long] == 1l was successful
-test 0777777777777777777777L == 9223372036854775807L was successful
-test 01000000000000000000000L == -9223372036854775808L was successful
-test 01777777777777777777777L == -1L was successful
 test 0x7fffffffffffffffL == 9223372036854775807L was successful
 test 0x8000000000000000L == -9223372036854775808L was successful
 test 0xffffffffffffffffL == -1L was successful
 
 test 1e1f == 10.0f was successful
-test 2.f == 2.0f was successful
 test .3f == 0.3f was successful
 test 0f == 0.0f was successful
+test 01.23f == 1.23f was successful
 test 3.14f == 3.14f was successful
 test 6.022e23f == 6.022e23f was successful
 test 09f == 9.0f was successful
@@ -53,11 +42,11 @@ test 1.asInstanceOf[Float] == 1.0 was successful
 test 1l.asInstanceOf[Float] == 1.0 was successful
 
 test 1e1 == 10.0 was successful
-test 2. == 2.0 was successful
-test 2.d == 2.0 was successful
 test .3 == 0.3 was successful
 test 0.0 == 0.0 was successful
 test 0d == 0.0 was successful
+test 01.23 == 1.23 was successful
+test 01.23d == 1.23d was successful
 test 3.14 == 3.14 was successful
 test 1e-9d == 1.0e-9 was successful
 test 1e137 == 1.0e137 was successful
diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala
index 32bc29f..5f23e6b 100644
--- a/test/files/run/literals.scala
+++ b/test/files/run/literals.scala
@@ -47,32 +47,22 @@ object Test {
     println
 
     // int
-    check_success("01 == 1", 01, 1)
-    check_success("010 == 8", 010, 8)
     check_success("0X01 == 1", 0X01, 1)
     check_success("0x01 == 1", 0x01, 1)
     check_success("0x10 == 16", 0x10, 16)
     check_success("0xa == 10", 0xa, 10)
     check_success("0x0a == 10", 0x0a, 10)
 
-    check_success("+01 == 1", +01, 1)
-    check_success("+010 == 8", +010, 8)
     check_success("+0x01 == 1", +0x01, 1)
     check_success("+0x10 == 16", +0x10, 16)
     check_success("+0xa == 10", +0xa, 10)
     check_success("+0x0a == 10", +0x0a, 10)
 
-    check_success("-01 == -1", -01, -1)
-    check_success("-010 == -8", -010, -8)
     check_success("-0x01 == -1", -0x01, -1)
     check_success("-0x10 == -16", -0x10, -16)
     check_success("-0xa == -10", -0xa, -10)
     check_success("-0x0a == -10", -0x0a, -10)
 
-    check_success("017777777777 == 2147483647", 017777777777, 2147483647)
-    check_success("020000000000 == -2147483648", 020000000000, -2147483648)
-    check_success("037777777777 == -1", 037777777777, -1)
-
     check_success("0x7fffffff == 2147483647", 0x7fffffff, 2147483647)
     check_success("0x80000000 == -2147483648", 0x80000000, -2147483648)
     check_success("0xffffffff == -1", 0xffffffff, -1)
@@ -84,13 +74,6 @@ object Test {
     check_success("1L == 1l", 1L, 1l)
     check_success("1.asInstanceOf[Long] == 1l", 1.asInstanceOf[Long], 1l)
 
-    check_success("0777777777777777777777L == 9223372036854775807L",
-      0777777777777777777777L, 9223372036854775807L)
-    check_success("01000000000000000000000L == -9223372036854775808L",
-      01000000000000000000000L, -9223372036854775808L)
-    check_success("01777777777777777777777L == -1L",
-      01777777777777777777777L, -1L)
-
     check_success("0x7fffffffffffffffL == 9223372036854775807L",
       0x7fffffffffffffffL, 9223372036854775807L)
     check_success("0x8000000000000000L == -9223372036854775808L",
@@ -105,9 +88,9 @@ object Test {
 
     // float
     check_success("1e1f == 10.0f", 1e1f, 10.0f)
-    check_success("2.f == 2.0f", 2.f, 2.0f)
     check_success(".3f == 0.3f", .3f, 0.3f)
     check_success("0f == 0.0f", 0f, 0.0f)
+    check_success("01.23f == 1.23f", 01.23f, 1.23f)
     check_success("3.14f == 3.14f", 3.14f, 3.14f)
     check_success("6.022e23f == 6.022e23f", 6.022e23f, 6.022e23f)
     check_success("09f == 9.0f", 09f, 9.0f)
@@ -118,11 +101,11 @@ object Test {
 
     // double
     check_success("1e1 == 10.0", 1e1, 10.0)
-    check_success("2. == 2.0", 2., 2.0)
-    check_success("2.d == 2.0", 2.d, 2.0)
     check_success(".3 == 0.3", .3, 0.3)
     check_success("0.0 == 0.0", 0.0, 0.0)
     check_success("0d == 0.0", 0d, 0.0)
+    check_success("01.23 == 1.23", 01.23, 1.23)
+    check_success("01.23d == 1.23d", 01.23d, 1.23d)
     check_success("3.14 == 3.14", 3.14, 3.14)
     check_success("1e-9d == 1.0e-9", 1e-9d, 1.0e-9)
     check_success("1e137 == 1.0e137", 1e137, 1.0e137)
diff --git a/test/files/run/longmap.scala b/test/files/run/longmap.scala
new file mode 100644
index 0000000..1f18eeb
--- /dev/null
+++ b/test/files/run/longmap.scala
@@ -0,0 +1,8 @@
+object Test extends App{
+  import scala.collection.immutable.LongMap;
+
+  val it = LongMap(8L -> 2, 11L -> 3, 1L -> 2, 7L -> 13);
+
+  assert(it.firstKey == 1L);
+  assert(it.lastKey == 11L);
+}
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 3461d1b..f765794 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -1,14 +1,11 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
 
 scala> // but reverted that for SI-5534.
 
 scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq{def dropRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def takeRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def drop(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.Abstr [...]
-scala> 
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
 
 scala> 
diff --git a/test/files/run/macro-abort-fresh.check b/test/files/run/macro-abort-fresh.check
index 75ad5e7..5064b96 100644
--- a/test/files/run/macro-abort-fresh.check
+++ b/test/files/run/macro-abort-fresh.check
@@ -1,6 +1,6 @@
-$1$
-qwe1
-qwe2
-reflective compilation has failed: 
+fresh$macro$1
+qwe$macro$2
+qwe$macro$3
+reflective compilation has failed:
 
 blargh
diff --git a/test/files/run/macro-abort-fresh/Macros_1.scala b/test/files/run/macro-abort-fresh/Macros_1.scala
index af1e292..2b03512 100644
--- a/test/files/run/macro-abort-fresh/Macros_1.scala
+++ b/test/files/run/macro-abort-fresh/Macros_1.scala
@@ -1,11 +1,11 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
   def impl(c: Context) = {
     import c.universe._
     println(c.fresh())
     println(c.fresh("qwe"))
-    println(c.fresh(newTypeName("qwe")))
+    println(c.fresh(TypeName("qwe")))
     c.abort(NoPosition, "blargh")
   }
 }
diff --git a/test/files/run/macro-abort-fresh/Test_2.scala b/test/files/run/macro-abort-fresh/Test_2.scala
index 0b9986e..61f0bdf 100644
--- a/test/files/run/macro-abort-fresh/Test_2.scala
+++ b/test/files/run/macro-abort-fresh/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+  val tree = Select(Ident(TermName("Macros")), TermName("foo"))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-auto-duplicate/Macros_1.scala b/test/files/run/macro-auto-duplicate/Macros_1.scala
index e3df05b..2c910e6 100644
--- a/test/files/run/macro-auto-duplicate/Macros_1.scala
+++ b/test/files/run/macro-auto-duplicate/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 object Macros {
diff --git a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
index 646634c..fc75b99 100644
--- a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
+++ b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
@@ -1,21 +1,21 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     c.Expr[Int](body)
   }
 
-  def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
     c.Expr[Int](body)
   }
 
-  def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
     c.Expr[Int](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
index aa1e52e..73a5a97 100644
--- a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
+++ b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
@@ -1,21 +1,21 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     c.Expr[Int](body)
   }
 
-  def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
     c.Expr[Int](body)
   }
 
-  def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
     c.Expr[Int](body)
   }
 }
diff --git a/test/files/run/macro-basic-mamd-mi/Impls_1.scala b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
index 061aa2d..0be915c 100644
--- a/test/files/run/macro-basic-mamd-mi/Impls_1.scala
+++ b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
@@ -1,19 +1,19 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def foo(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+    c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
   }
 
-  def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def bar(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+    c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
   }
 
-  def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+  def quux(c: Context)(x: c.Expr[Int]): c.Expr[Int] = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
     c.Expr[Int](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-blackbox-materialization.check b/test/files/run/macro-blackbox-materialization.check
new file mode 100644
index 0000000..7165b73
--- /dev/null
+++ b/test/files/run/macro-blackbox-materialization.check
@@ -0,0 +1,3 @@
+C(Int)
+C(String)
+C(Nothing)
diff --git a/test/files/run/macro-blackbox-materialization/Macros_1.scala b/test/files/run/macro-blackbox-materialization/Macros_1.scala
new file mode 100644
index 0000000..ea8d1be
--- /dev/null
+++ b/test/files/run/macro-blackbox-materialization/Macros_1.scala
@@ -0,0 +1,16 @@
+// For the full version of the test, take a look at run/t5923a
+
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+case class C[T](t: String)
+object C {
+  implicit def foo[T]: C[T] = macro Macros.impl[T]
+}
+
+object Macros {
+  def impl[T: c.WeakTypeTag](c: Context) = {
+    import c.universe._
+    reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-blackbox-materialization/Test_2.scala b/test/files/run/macro-blackbox-materialization/Test_2.scala
new file mode 100644
index 0000000..001ff9a
--- /dev/null
+++ b/test/files/run/macro-blackbox-materialization/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  println(implicitly[C[Int]])
+  println(implicitly[C[String]])
+  println(implicitly[C[Nothing]])
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
index 0ca0be5..d46af49 100644
--- a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
+++ b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
@@ -1,12 +1,16 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = x
+  def foo(c: BlackboxContext)(x: c.Expr[Int]) = x
 
-  def refToFoo(dummy: Int) = macro refToFoo_impl
-  def refToFoo_impl(c: Ctx)(dummy: c.Expr[Int]) = {
+  def refToFoo(dummy: Int): Int = macro refToFoo_impl
+  def refToFoo_impl(c: WhiteboxContext)(dummy: c.Expr[Int]) = {
     import c.universe._
-    val body = Select(Ident(newTermName("Impls")), newTermName("foo"))
+    val body = Select(Ident(TermName("Impls")), TermName("foo"))
+    val global = c.universe.asInstanceOf[scala.tools.nsc.Global]
+    global.analyzer.markMacroImplRef(body.asInstanceOf[global.Tree])
     c.Expr[Int](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala
index b589d4b..486e1de 100644
--- a/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala
+++ b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.language.experimental.macros
 
 object Macros {
-  def foo(x: Int) = macro Impls.refToFoo(42)
+  def foo(x: Int): Int = macro Impls.refToFoo(42)
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-bundle-context-alias.check b/test/files/run/macro-bundle-context-alias.check
new file mode 100644
index 0000000..55e4dfc
--- /dev/null
+++ b/test/files/run/macro-bundle-context-alias.check
@@ -0,0 +1,4 @@
+C
+C
+C
+C
diff --git a/test/files/run/macro-bundle-context-alias/Macros_1.scala b/test/files/run/macro-bundle-context-alias/Macros_1.scala
new file mode 100644
index 0000000..354c5e0
--- /dev/null
+++ b/test/files/run/macro-bundle-context-alias/Macros_1.scala
@@ -0,0 +1,38 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+
+object Module {
+  type BBC = BlackboxContext
+  type RBBC = BBC { type PrefixType = C }
+  type WBC = WhiteboxContext
+  type RWBC = WBC { type PrefixType = C }
+
+  class BlackboxBundle(val c: BBC) {
+    import c.universe._
+    def impl = q"${c.prefix}"
+  }
+
+  class RefinedBlackboxBundle(val c: RBBC) {
+    import c.universe._
+    def impl = reify(c.prefix.splice)
+  }
+
+  class WhiteboxBundle(val c: WBC) {
+    import c.universe._
+    def impl = q"${c.prefix}"
+  }
+
+  class RefinedWhiteboxBundle(val c: RWBC) {
+    import c.universe._
+    def impl = reify(c.prefix.splice)
+  }
+}
+
+class C {
+  def blackbox: C = macro Module.BlackboxBundle.impl
+  def refinedBlackbox: C = macro Module.RefinedBlackboxBundle.impl
+  def whitebox: C = macro Module.WhiteboxBundle.impl
+  def refinedWhitebox: C = macro Module.RefinedWhiteboxBundle.impl
+  override def toString = "C"
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-context-alias/Test_2.scala b/test/files/run/macro-bundle-context-alias/Test_2.scala
new file mode 100644
index 0000000..de499cc
--- /dev/null
+++ b/test/files/run/macro-bundle-context-alias/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  println(new C().blackbox)
+  println(new C().refinedBlackbox)
+  println(new C().whitebox)
+  println(new C().refinedWhitebox)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-context-refinement.check b/test/files/run/macro-bundle-context-refinement.check
new file mode 100644
index 0000000..10f9ebb
--- /dev/null
+++ b/test/files/run/macro-bundle-context-refinement.check
@@ -0,0 +1,2 @@
+C
+C
diff --git a/test/files/run/macro-bundle-context-refinement/Macros_1.scala b/test/files/run/macro-bundle-context-refinement/Macros_1.scala
new file mode 100644
index 0000000..d3a5d17
--- /dev/null
+++ b/test/files/run/macro-bundle-context-refinement/Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+
+class BlackboxBundle(val c: BlackboxContext { type PrefixType = C }) {
+  import c.universe._
+  def impl = reify(c.prefix.splice)
+}
+
+class WhiteboxBundle(val c: WhiteboxContext { type PrefixType = C }) {
+  import c.universe._
+  def impl = reify(c.prefix.splice)
+}
+
+class C {
+  def blackbox: C = macro BlackboxBundle.impl
+  def whitebox: C = macro WhiteboxBundle.impl
+  override def toString = "C"
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-context-refinement/Test_2.scala b/test/files/run/macro-bundle-context-refinement/Test_2.scala
new file mode 100644
index 0000000..43d641a
--- /dev/null
+++ b/test/files/run/macro-bundle-context-refinement/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  println(new C().blackbox)
+  println(new C().whitebox)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-repl.check b/test/files/run/macro-bundle-repl.check
new file mode 100644
index 0000000..4a0b421
--- /dev/null
+++ b/test/files/run/macro-bundle-repl.check
@@ -0,0 +1,24 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.language.experimental.macros
+import scala.language.experimental.macros
+
+scala> import scala.reflect.macros.blackbox.Context
+import scala.reflect.macros.blackbox.Context
+
+scala> class Bar(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } };def bar: Unit = macro Bar.impl
+defined class Bar
+defined term macro bar: Unit
+
+scala> bar
+
+scala> class Foo(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } }
+defined class Foo
+
+scala> def foo: Unit = macro Foo.impl
+defined term macro foo: Unit
+
+scala> foo
+
+scala> 
diff --git a/test/files/run/macro-bundle-repl.scala b/test/files/run/macro-bundle-repl.scala
new file mode 100644
index 0000000..8084418
--- /dev/null
+++ b/test/files/run/macro-bundle-repl.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = """
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+class Bar(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } };def bar: Unit = macro Bar.impl
+bar
+class Foo(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } }
+def foo: Unit = macro Foo.impl
+foo
+  """
+}
diff --git a/test/files/run/macro-bundle-static.check b/test/files/run/macro-bundle-static.check
new file mode 100644
index 0000000..37c8eaf
--- /dev/null
+++ b/test/files/run/macro-bundle-static.check
@@ -0,0 +1,6 @@
+()
+Int
+()
+true
+IntInt
+true
diff --git a/test/files/run/macro-bundle-static/Impls_Macros_1.scala b/test/files/run/macro-bundle-static/Impls_Macros_1.scala
new file mode 100644
index 0000000..0142e5d
--- /dev/null
+++ b/test/files/run/macro-bundle-static/Impls_Macros_1.scala
@@ -0,0 +1,30 @@
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+object Enclosing {
+  class Impl(val c: Context) {
+    def mono = { import c.universe._; c.Expr[Unit](q"()") }
+    def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") }
+    def weird = macro mono
+  }
+}
+
+object Macros {
+  def mono = macro Enclosing.Impl.mono
+  def poly[T] = macro Enclosing.Impl.poly[T]
+}
+
+package pkg {
+  object Enclosing {
+    class Impl(val c: Context) {
+      def mono = { import c.universe._; c.Expr[Boolean](q"true") }
+      def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") }
+      def weird = macro mono
+    }
+  }
+
+  object Macros {
+    def mono = macro Enclosing.Impl.mono
+    def poly[T] = macro Enclosing.Impl.poly[T]
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-static/Test_2.scala b/test/files/run/macro-bundle-static/Test_2.scala
new file mode 100644
index 0000000..e35260c
--- /dev/null
+++ b/test/files/run/macro-bundle-static/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+  println(Macros.mono)
+  println(Macros.poly[Int])
+  println(new Enclosing.Impl(???).weird)
+  println(pkg.Macros.mono)
+  println(pkg.Macros.poly[Int])
+  println(new pkg.Enclosing.Impl(???).weird)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-toplevel.check b/test/files/run/macro-bundle-toplevel.check
new file mode 100644
index 0000000..37c8eaf
--- /dev/null
+++ b/test/files/run/macro-bundle-toplevel.check
@@ -0,0 +1,6 @@
+()
+Int
+()
+true
+IntInt
+true
diff --git a/test/files/neg/macro-invalidimpl-h.flags b/test/files/run/macro-bundle-toplevel.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-h.flags
rename to test/files/run/macro-bundle-toplevel.flags
diff --git a/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala
new file mode 100644
index 0000000..6fd7be3
--- /dev/null
+++ b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.blackbox.Context
+
+class Impl(val c: Context) {
+  def mono = { import c.universe._; c.Expr[Unit](q"()") }
+  def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") }
+  def weird = macro mono
+}
+
+object Macros {
+  def mono = macro Impl.mono
+  def poly[T] = macro Impl.poly[T]
+}
+
+package pkg {
+  class Impl(val c: Context) {
+    def mono = { import c.universe._; c.Expr[Boolean](q"true") }
+    def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") }
+    def weird = macro mono
+  }
+
+  object Macros {
+    def mono = macro Impl.mono
+    def poly[T] = macro Impl.poly[T]
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-toplevel/Test_2.scala b/test/files/run/macro-bundle-toplevel/Test_2.scala
new file mode 100644
index 0000000..195fb49
--- /dev/null
+++ b/test/files/run/macro-bundle-toplevel/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+  println(Macros.mono)
+  println(Macros.poly[Int])
+  println(new Impl(???).weird)
+  println(pkg.Macros.mono)
+  println(pkg.Macros.poly[Int])
+  println(new pkg.Impl(???).weird)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-whitebox-decl.check b/test/files/run/macro-bundle-whitebox-decl.check
new file mode 100644
index 0000000..37c8eaf
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-decl.check
@@ -0,0 +1,6 @@
+()
+Int
+()
+true
+IntInt
+true
diff --git a/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala b/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala
new file mode 100644
index 0000000..5e1b118
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+class Impl(val c: Context) {
+  def mono = { import c.universe._; c.Expr[Unit](q"()") }
+  def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") }
+  def weird = macro mono
+}
+
+object Macros {
+  def mono = macro Impl.mono
+  def poly[T] = macro Impl.poly[T]
+}
+
+package pkg {
+  class Impl(val c: Context) {
+    def mono = { import c.universe._; c.Expr[Boolean](q"true") }
+    def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") }
+    def weird = macro mono
+  }
+
+  object Macros {
+    def mono = macro Impl.mono
+    def poly[T] = macro Impl.poly[T]
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-whitebox-decl/Test_2.scala b/test/files/run/macro-bundle-whitebox-decl/Test_2.scala
new file mode 100644
index 0000000..195fb49
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-decl/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+  println(Macros.mono)
+  println(Macros.poly[Int])
+  println(new Impl(???).weird)
+  println(pkg.Macros.mono)
+  println(pkg.Macros.poly[Int])
+  println(new pkg.Impl(???).weird)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bundle-whitebox-use-raw.check b/test/files/run/macro-bundle-whitebox-use-raw.check
new file mode 100644
index 0000000..5679c5f
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-raw.check
@@ -0,0 +1,5 @@
+2
+(23,foo,true)
+null
+C2
+42
diff --git a/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala b/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala
new file mode 100644
index 0000000..de18634
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala
@@ -0,0 +1,108 @@
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+// whitebox use case #1: return type refinement
+
+class ReturnTypeRefinementBundle(val c: Context) {
+  import c.universe._
+  def impl = {
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+}
+
+object ReturnTypeRefinement {
+  def foo: Any = macro ReturnTypeRefinementBundle.impl
+}
+
+// whitebox use case #2: fundep materialization
+
+trait FundepMaterialization[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+class FundepMaterializationBundle(val c: Context) {
+  import c.universe._
+  import definitions._
+  import Flag._
+
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag]: c.Expr[FundepMaterialization[T, U]] = {
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("FundepMaterialization")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[FundepMaterialization[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
+
+object FundepMaterialization {
+  implicit def materializeIso[T, U]: FundepMaterialization[T, U] = macro FundepMaterializationBundle.impl[T, U]
+}
+
+// whitebox use case #3: dynamic materialization
+
+trait DynamicMaterialization[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: DynamicMaterialization[T] = null
+}
+
+object DynamicMaterialization extends LowPriority {
+  implicit def moreSpecific[T]: DynamicMaterialization[T] = macro DynamicMaterializationBundle.impl[T]
+}
+
+class DynamicMaterializationBundle(val c: Context) {
+  import c.universe._
+  def impl[T: c.WeakTypeTag] = {
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new DynamicMaterialization[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
+
+// whitebox use case #4: extractor macros
+
+object ExtractorMacro {
+  def unapply(x: Int): Any = macro ExtractorBundle.unapplyImpl
+}
+
+class ExtractorBundle(val c: Context) {
+  import c.universe._
+  def unapplyImpl(x: Tree) = {
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala b/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala
new file mode 100644
index 0000000..3a81700
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+  println(ReturnTypeRefinement.foo.x)
+
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c)
+  locally {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+
+  println(implicitly[DynamicMaterialization[C1]])
+  println(implicitly[DynamicMaterialization[C2]])
+
+  42 match {
+    case ExtractorMacro(x) => println(x)
+  }
+}
diff --git a/test/files/run/macro-bundle-whitebox-use-refined.check b/test/files/run/macro-bundle-whitebox-use-refined.check
new file mode 100644
index 0000000..5679c5f
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-refined.check
@@ -0,0 +1,5 @@
+2
+(23,foo,true)
+null
+C2
+42
diff --git a/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala b/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala
new file mode 100644
index 0000000..de18634
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala
@@ -0,0 +1,108 @@
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+// whitebox use case #1: return type refinement
+
+class ReturnTypeRefinementBundle(val c: Context) {
+  import c.universe._
+  def impl = {
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+}
+
+object ReturnTypeRefinement {
+  def foo: Any = macro ReturnTypeRefinementBundle.impl
+}
+
+// whitebox use case #2: fundep materialization
+
+trait FundepMaterialization[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+class FundepMaterializationBundle(val c: Context) {
+  import c.universe._
+  import definitions._
+  import Flag._
+
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag]: c.Expr[FundepMaterialization[T, U]] = {
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("FundepMaterialization")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[FundepMaterialization[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
+
+object FundepMaterialization {
+  implicit def materializeIso[T, U]: FundepMaterialization[T, U] = macro FundepMaterializationBundle.impl[T, U]
+}
+
+// whitebox use case #3: dynamic materialization
+
+trait DynamicMaterialization[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: DynamicMaterialization[T] = null
+}
+
+object DynamicMaterialization extends LowPriority {
+  implicit def moreSpecific[T]: DynamicMaterialization[T] = macro DynamicMaterializationBundle.impl[T]
+}
+
+class DynamicMaterializationBundle(val c: Context) {
+  import c.universe._
+  def impl[T: c.WeakTypeTag] = {
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new DynamicMaterialization[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
+
+// whitebox use case #4: extractor macros
+
+object ExtractorMacro {
+  def unapply(x: Int): Any = macro ExtractorBundle.unapplyImpl
+}
+
+class ExtractorBundle(val c: Context) {
+  import c.universe._
+  def unapplyImpl(x: Tree) = {
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala b/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala
new file mode 100644
index 0000000..3a81700
--- /dev/null
+++ b/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+  println(ReturnTypeRefinement.foo.x)
+
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c)
+  locally {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+
+  println(implicitly[DynamicMaterialization[C1]])
+  println(implicitly[DynamicMaterialization[C2]])
+
+  42 match {
+    case ExtractorMacro(x) => println(x)
+  }
+}
diff --git a/test/files/run/macro-declared-in-annotation.flags b/test/files/run/macro-declared-in-annotation.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-annotation.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Impls_1.scala b/test/files/run/macro-declared-in-annotation/Impls_1.scala
deleted file mode 100644
index a11ee29..0000000
--- a/test/files/run/macro-declared-in-annotation/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Literal(Constant("this is deprecated")))
-    c.Expr[String](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous.flags b/test/files/run/macro-declared-in-anonymous.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-anonymous.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-declared-in-anonymous/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-anonymous/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala b/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala
deleted file mode 100644
index 8bd8c17..0000000
--- a/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  val macros = new { def foo = macro Impls.foo }
-  macros.foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block.flags b/test/files/run/macro-declared-in-block.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-block.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block/Impls_1.scala b/test/files/run/macro-declared-in-block/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-block/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block/Macros_Test_2.scala b/test/files/run/macro-declared-in-block/Macros_Test_2.scala
deleted file mode 100644
index 69088e2..0000000
--- a/test/files/run/macro-declared-in-block/Macros_Test_2.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
-  {
-    def foo = macro Impls.foo
-    foo
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class.flags b/test/files/run/macro-declared-in-class-class.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-class-class.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class/Impls_1.scala b/test/files/run/macro-declared-in-class-class/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-class-class/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala
deleted file mode 100644
index 871857a..0000000
--- a/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class Macros {
-  class Macros {
-    def foo = macro Impls.foo
-  }
-}
-
-object Test extends App {
-  val outer = new Macros()
-  new outer.Macros().foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object.flags b/test/files/run/macro-declared-in-class-object.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-class-object.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object/Impls_1.scala b/test/files/run/macro-declared-in-class-object/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-class-object/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala
deleted file mode 100644
index 994f9fe..0000000
--- a/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class Macros {
-  object Macros {
-    def foo = macro Impls.foo
-  }
-}
-
-object Test extends App {
-  val outer = new Macros()
-  outer.Macros.foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class.flags b/test/files/run/macro-declared-in-class.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-class.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class/Impls_1.scala b/test/files/run/macro-declared-in-class/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-class/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-class/Macros_Test_2.scala
deleted file mode 100644
index 1b9d13e..0000000
--- a/test/files/run/macro-declared-in-class/Macros_Test_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  new Macros().foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param.flags b/test/files/run/macro-declared-in-default-param.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-default-param.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param/Impls_1.scala b/test/files/run/macro-declared-in-default-param/Impls_1.scala
deleted file mode 100644
index db1e5c7..0000000
--- a/test/files/run/macro-declared-in-default-param/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Literal(Constant("it works")))
-    c.Expr[String](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala b/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala
deleted file mode 100644
index 356029e..0000000
--- a/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test extends App {
-  def foo(bar: String = { def foo = macro Impls.foo; foo }) = println(bar)
-
-  foo()
-  foo("it works")
-  foo()
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class.flags b/test/files/run/macro-declared-in-implicit-class.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-implicit-class.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
deleted file mode 100644
index 837b306..0000000
--- a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def toOptionOfInt(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Ident(definitions.SomeModule), List(Select(Select(prefix.tree, newTermName("x")), newTermName("toInt")))))
-    c.Expr[Option[Int]](body)
-  }
-}
-
-object Macros {
-  implicit def foo(x: String): Foo = new Foo(x)
-
-  class Foo(val x: String) {
-    def toOptionOfInt = macro Impls.toOptionOfInt
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method.flags b/test/files/run/macro-declared-in-method.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-method.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method/Impls_1.scala b/test/files/run/macro-declared-in-method/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-method/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method/Macros_Test_2.scala b/test/files/run/macro-declared-in-method/Macros_Test_2.scala
deleted file mode 100644
index ed5c8b7..0000000
--- a/test/files/run/macro-declared-in-method/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
-  def bar() = {
-    def foo = macro Impls.foo
-    foo
-  }
-
-  bar()
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class.flags b/test/files/run/macro-declared-in-object-class.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-object-class.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class/Impls_1.scala b/test/files/run/macro-declared-in-object-class/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-object-class/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala
deleted file mode 100644
index 204deed..0000000
--- a/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Macros {
-  class Macros {
-    def foo = macro Impls.foo
-  }
-}
-
-object Test extends App {
-  val outer = Macros
-  new outer.Macros().foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object.flags b/test/files/run/macro-declared-in-object-object.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-object-object.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object/Impls_1.scala b/test/files/run/macro-declared-in-object-object/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-object-object/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala
deleted file mode 100644
index e261a50..0000000
--- a/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Macros {
-  object Macros {
-    def foo = macro Impls.foo
-  }
-}
-
-object Test extends App {
-  val outer = Macros
-  outer.Macros.foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object.flags b/test/files/run/macro-declared-in-object.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-object.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object/Impls_1.scala b/test/files/run/macro-declared-in-object/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-object/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-object/Macros_Test_2.scala
deleted file mode 100644
index a5a4862..0000000
--- a/test/files/run/macro-declared-in-object/Macros_Test_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  Macros.foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object.flags b/test/files/run/macro-declared-in-package-object.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-package-object.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object/Impls_1.scala b/test/files/run/macro-declared-in-package-object/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-package-object/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala
deleted file mode 100644
index 54a5962..0000000
--- a/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package object Macros {
-  def foo = macro Impls.foo
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement.flags b/test/files/run/macro-declared-in-refinement.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-refinement.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement/Impls_1.scala b/test/files/run/macro-declared-in-refinement/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-refinement/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala b/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala
deleted file mode 100644
index f746c2d..0000000
--- a/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class Base
-
-object Test extends App {
-  val macros = new Base { def foo = macro Impls.foo }
-  macros.foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait.check b/test/files/run/macro-declared-in-trait.check
deleted file mode 100644
index 0d70ac7..0000000
--- a/test/files/run/macro-declared-in-trait.check
+++ /dev/null
@@ -1,15 +0,0 @@
-prefix = Expr[Nothing]({
-  final class $anon extends AnyRef with Base {
-    def <init>(): anonymous class $anon = {
-      $anon.super.<init>();
-      ()
-    };
-    <empty>
-  };
-  new $anon()
-})
-it works
-prefix = Expr[Nothing](Macros)
-it works
-prefix = Expr[Nothing](new Macros())
-it works
diff --git a/test/files/run/macro-declared-in-trait.flags b/test/files/run/macro-declared-in-trait.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-declared-in-trait.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait/Impls_1.scala b/test/files/run/macro-declared-in-trait/Impls_1.scala
deleted file mode 100644
index 6f06f6d..0000000
--- a/test/files/run/macro-declared-in-trait/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx) = {
-    import c.{prefix => prefix}
-    import c.universe._
-    val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
-    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait/Macros_Test_2.scala b/test/files/run/macro-declared-in-trait/Macros_Test_2.scala
deleted file mode 100644
index f75906b..0000000
--- a/test/files/run/macro-declared-in-trait/Macros_Test_2.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-trait Base {
-  def foo = macro Impls.foo
-}
-
-object Macros extends Base
-
-class Macros extends Base
-
-object Test extends App {
-  (new Base {}).foo
-  Macros.foo
-  new Macros().foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.check b/test/files/run/macro-def-infer-return-type-a.check
deleted file mode 100644
index f70d7bb..0000000
--- a/test/files/run/macro-def-infer-return-type-a.check
+++ /dev/null
@@ -1 +0,0 @@
-42
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.flags b/test/files/run/macro-def-infer-return-type-a.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-infer-return-type-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a/Impls_1.scala b/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
deleted file mode 100644
index 52c9f9c..0000000
--- a/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = x
-}
diff --git a/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
deleted file mode 100644
index 60fe9dc..0000000
--- a/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  def foo(x: Int) = macro Impls.foo
-  println(foo(42))
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b.check b/test/files/run/macro-def-infer-return-type-b.check
deleted file mode 100644
index ae2dc7a..0000000
--- a/test/files/run/macro-def-infer-return-type-b.check
+++ /dev/null
@@ -1,6 +0,0 @@
-reflective compilation has failed: 
-
-exception during macro expansion: 
-java.lang.Error: an implementation is missing
-	at Impls$.foo(Impls_Macros_1.scala:5)
-
diff --git a/test/files/run/macro-def-infer-return-type-b.flags b/test/files/run/macro-def-infer-return-type-b.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-infer-return-type-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala b/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
deleted file mode 100644
index 8a0f18c..0000000
--- a/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T](c: Ctx)(x: c.Expr[T]) =
-    throw new Error("an implementation is missing")
-}
-
-object Macros {
-  def foo[T](x: T) = macro Impls.foo[T]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Test_2.scala b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
deleted file mode 100644
index ea0fd4b..0000000
--- a/test/files/run/macro-def-infer-return-type-b/Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
-  import scala.reflect.runtime.universe._
-  import scala.reflect.runtime.{currentMirror => cm}
-  import scala.tools.reflect.ToolBox
-  val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
-  try cm.mkToolBox().eval(tree)
-  catch { case ex: Throwable =>  println(ex.getMessage) }
-}
diff --git a/test/files/run/macro-def-infer-return-type-c.check b/test/files/run/macro-def-infer-return-type-c.check
deleted file mode 100644
index f70d7bb..0000000
--- a/test/files/run/macro-def-infer-return-type-c.check
+++ /dev/null
@@ -1 +0,0 @@
-42
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c.flags b/test/files/run/macro-def-infer-return-type-c.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-infer-return-type-c.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c/Impls_1.scala b/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
deleted file mode 100644
index 78db67e..0000000
--- a/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T](c: Ctx)(x: c.Expr[T]): c.Expr[T] = x
-}
diff --git a/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
deleted file mode 100644
index 967d16f..0000000
--- a/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  def foo[T](x: T) = macro Impls.foo[T]
-  println(foo(42))
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-a.flags b/test/files/run/macro-def-path-dependent-a.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-path-dependent-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala
deleted file mode 100644
index 3a91e41..0000000
--- a/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-trait Exprs {
-  self: Universe =>
-
-  class Expr[T]
-}
-
-trait Reifiers {
-  self: Universe =>
-
-  type Expr[T]
-
-  def reify[T](expr: T) = macro Impls.reify[T]
-}
-
-trait Universe extends Exprs with Reifiers
-
-object Impls {
-  def reify[T](cc: Ctx{ type PrefixType = Reifiers })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
-}
diff --git a/test/files/run/macro-def-path-dependent-b.check b/test/files/run/macro-def-path-dependent-b.check
deleted file mode 100644
index 7658ad2..0000000
--- a/test/files/run/macro-def-path-dependent-b.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-b.flags b/test/files/run/macro-def-path-dependent-b.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-path-dependent-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala
deleted file mode 100644
index cf9f9eb..0000000
--- a/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-trait Exprs {
-  self: Universe =>
-
-  class Expr[T]
-}
-
-trait Reifiers {
-  self: Universe =>
-
-}
-
-trait Universe extends Exprs with Reifiers {
-  def reify[T](expr: T) = macro Impls.reify[T]
-}
-
-object Impls {
-  def reify[T](cc: Ctx{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
-}
diff --git a/test/files/run/macro-def-path-dependent-b/Test_2.scala b/test/files/run/macro-def-path-dependent-b/Test_2.scala
deleted file mode 100644
index 7dffc51..0000000
--- a/test/files/run/macro-def-path-dependent-b/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println("it works")
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c.check b/test/files/run/macro-def-path-dependent-c.check
deleted file mode 100644
index 7658ad2..0000000
--- a/test/files/run/macro-def-path-dependent-c.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-c.flags b/test/files/run/macro-def-path-dependent-c.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-path-dependent-c.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala
deleted file mode 100644
index 6cb374d..0000000
--- a/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-trait Exprs {
-  self: Universe =>
-
-  class Expr[T]
-}
-
-trait Reifiers {
-  self: Universe =>
-
-}
-
-trait Universe extends Exprs with Reifiers {
-  def reify[T](expr: T): Expr[T] = macro Impls.reify[T]
-}
-
-object Impls {
-  def reify[T](cc: Ctx{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
-}
diff --git a/test/files/run/macro-def-path-dependent-c/Test_2.scala b/test/files/run/macro-def-path-dependent-c/Test_2.scala
deleted file mode 100644
index 7dffc51..0000000
--- a/test/files/run/macro-def-path-dependent-c/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println("it works")
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1.check b/test/files/run/macro-def-path-dependent-d1.check
deleted file mode 100644
index 7658ad2..0000000
--- a/test/files/run/macro-def-path-dependent-d1.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-d1.flags b/test/files/run/macro-def-path-dependent-d1.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-path-dependent-d1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
deleted file mode 100644
index 69d9708..0000000
--- a/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.Context
-import scala.reflect.api.Universe
-
-object Test {
-  def materializeTypeTag[T](u: Universe)(e: T) = macro materializeTypeTag_impl[T]
-
-  def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1/Test_2.scala b/test/files/run/macro-def-path-dependent-d1/Test_2.scala
deleted file mode 100644
index 7dffc51..0000000
--- a/test/files/run/macro-def-path-dependent-d1/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println("it works")
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2.check b/test/files/run/macro-def-path-dependent-d2.check
deleted file mode 100644
index 7658ad2..0000000
--- a/test/files/run/macro-def-path-dependent-d2.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-d2.flags b/test/files/run/macro-def-path-dependent-d2.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-def-path-dependent-d2.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala b/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
deleted file mode 100644
index 7fa9c35..0000000
--- a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.Context
-import scala.reflect.api.Universe
-
-object Impls {
-  def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala b/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
deleted file mode 100644
index 65ce4d8..0000000
--- a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.Context
-import scala.reflect.api.Universe
-
-object Macros {
-  def materializeTypeTag[T](u: Universe)(e: T) = macro Impls.materializeTypeTag_impl[T]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Test_3.scala b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
deleted file mode 100644
index 7dffc51..0000000
--- a/test/files/run/macro-def-path-dependent-d2/Test_3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println("it works")
-}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation.check b/test/files/run/macro-def-path-dependent.check
similarity index 100%
rename from test/files/run/macro-declared-in-annotation.check
rename to test/files/run/macro-def-path-dependent.check
diff --git a/test/files/neg/macro-invalidimpl-i.flags b/test/files/run/macro-def-path-dependent.flags
similarity index 100%
rename from test/files/neg/macro-invalidimpl-i.flags
rename to test/files/run/macro-def-path-dependent.flags
diff --git a/test/files/run/macro-def-path-dependent-a/Test_2.scala b/test/files/run/macro-def-path-dependent/Dummy.scala
similarity index 100%
rename from test/files/run/macro-def-path-dependent-a/Test_2.scala
rename to test/files/run/macro-def-path-dependent/Dummy.scala
diff --git a/test/files/run/macro-def-path-dependent/Test_1.scala b/test/files/run/macro-def-path-dependent/Test_1.scala
new file mode 100644
index 0000000..4161a64
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_1.scala
@@ -0,0 +1,25 @@
+// NOTE: blocked by SI-8049
+
+// package test1
+//
+// import scala.reflect.macros.blackbox.Context
+//
+// trait Exprs {
+//   self: Universe =>
+//
+//   class Expr[T]
+// }
+//
+// trait Reifiers {
+//   self: Universe =>
+//
+//   type Expr[T]
+//
+//   def reify[T](expr: T): Expr[T] = macro Impls.reify[T]
+// }
+//
+// trait Universe extends Exprs with Reifiers
+//
+// object Impls {
+//   def reify[T](cc: Context{ type PrefixType = Reifiers })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+// }
diff --git a/test/files/run/macro-def-path-dependent/Test_2.scala b/test/files/run/macro-def-path-dependent/Test_2.scala
new file mode 100644
index 0000000..75a03b5
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_2.scala
@@ -0,0 +1,22 @@
+package test2
+
+import scala.reflect.macros.blackbox.Context
+
+trait Exprs {
+  self: Universe =>
+
+  class Expr[T]
+}
+
+trait Reifiers {
+  self: Universe =>
+
+}
+
+trait Universe extends Exprs with Reifiers {
+  def reify[T](expr: T): Expr[T] = macro Impls.reify[T]
+}
+
+object Impls {
+  def reify[T](cc: Context{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+}
diff --git a/test/files/run/macro-def-path-dependent/Test_3.scala b/test/files/run/macro-def-path-dependent/Test_3.scala
new file mode 100644
index 0000000..1a5da82
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_3.scala
@@ -0,0 +1,22 @@
+package test3
+
+import scala.reflect.macros.blackbox.Context
+
+trait Exprs {
+  self: Universe =>
+
+  class Expr[T]
+}
+
+trait Reifiers {
+  self: Universe =>
+
+}
+
+trait Universe extends Exprs with Reifiers {
+  def reify[T](expr: T): Expr[T] = macro Impls.reify[T]
+}
+
+object Impls {
+  def reify[T](cc: Context{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+}
diff --git a/test/files/run/macro-def-path-dependent/Test_4.scala b/test/files/run/macro-def-path-dependent/Test_4.scala
new file mode 100644
index 0000000..67cb88e
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_4.scala
@@ -0,0 +1,11 @@
+package test4
+
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.api.Universe
+
+object Test {
+  def materializeTypeTag[T](u: Universe)(e: T): u.TypeTag[T] = macro materializeTypeTag_impl[T]
+
+  def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent/Test_5.scala b/test/files/run/macro-def-path-dependent/Test_5.scala
new file mode 100644
index 0000000..b518ce8
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_5.scala
@@ -0,0 +1,9 @@
+package test56
+
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.api.Universe
+
+object Impls {
+  def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent/Test_6.scala b/test/files/run/macro-def-path-dependent/Test_6.scala
new file mode 100644
index 0000000..a8b50ce
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent/Test_6.scala
@@ -0,0 +1,9 @@
+package test56
+
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.api.Universe
+
+object Macros {
+  def materializeTypeTag[T](u: Universe)(e: T): u.TypeTag[T] = macro Impls.materializeTypeTag_impl[T]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-default-params.check b/test/files/run/macro-default-params.check
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/test/files/run/macro-default-params.check
@@ -0,0 +1 @@
+0
diff --git a/test/files/run/macro-default-params/Macros_1.scala b/test/files/run/macro-default-params/Macros_1.scala
new file mode 100644
index 0000000..74588a1
--- /dev/null
+++ b/test/files/run/macro-default-params/Macros_1.scala
@@ -0,0 +1,27 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+  def id[A]: A = null.asInstanceOf[A]
+
+  def foo: Any = macro impl
+  def impl(c: Context): c.Tree = {
+    import c.universe._
+    import Flag._
+
+    lazy val tpe = TypeTree(typeOf[Int])
+
+    /* If we used this line instead, it would work! */
+    // lazy val tpe = tq"Int"
+
+    lazy val param: ValDef = {
+      val p1 = q"val a: ${tpe.duplicate} = Macros.id[${tpe.duplicate}]"
+      ValDef(Modifiers(DEFAULTPARAM), p1.name, p1.tpt, p1.rhs)
+    }
+
+    q"""
+      class C { def f($param) = a }
+      println(new C().f())
+    """
+  }
+}
diff --git a/test/files/run/macro-default-params/Test_2.scala b/test/files/run/macro-default-params/Test_2.scala
new file mode 100644
index 0000000..5d19639
--- /dev/null
+++ b/test/files/run/macro-default-params/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  Macros.foo
+}
diff --git a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
index bc4a9fd..7ac8fcc 100644
--- a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
+++ b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 trait Complex[T]
@@ -10,12 +10,12 @@ object Complex {
   def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
     import c.universe._
     val tpe = weakTypeOf[T]
-    for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
-      val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
+    for (f <- tpe.decls.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+      val trecur = appliedType(typeOf[Complex[_]], List(f.info))
       val recur = c.inferImplicitValue(trecur, silent = true)
       if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
     }
-    c.literalNull
+    c.Expr[Null](Literal(Constant(null)))
   }
 
   implicit object ComplexString extends Complex[String]
diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check
index e69de29..58781b7 100644
--- a/test/files/run/macro-duplicate.check
+++ b/test/files/run/macro-duplicate.check
@@ -0,0 +1,3 @@
+Test_2.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  Macros.foo
+         ^
diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala
index de81923..84fb2c5 100644
--- a/test/files/run/macro-duplicate/Impls_Macros_1.scala
+++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
@@ -10,11 +10,11 @@ object Macros {
           case Template(_, _, ctor :: defs) =>
             val defs1 = defs collect {
               case ddef @ DefDef(mods, name, tparams, vparamss, tpt, body) =>
-                val future = Select(Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTermName("package")), newTermName("future"))
-                val Future = Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTypeName("Future"))
+                val future = Select(Select(Ident(TermName("scala")), TermName("concurrent")), TermName("Future"))
+                val Future = Select(Select(Ident(TermName("scala")), TermName("concurrent")), TypeName("Future"))
                 val tpt1 = if (tpt.isEmpty) tpt else AppliedTypeTree(Future, List(tpt))
                 val body1 = Apply(future, List(body))
-                val name1 = newTermName("async" + name.toString.capitalize)
+                val name1 = TermName("async" + name.toString.capitalize)
                 DefDef(mods, name1, tparams, vparamss, tpt1, body1)
             }
             Template(Nil, emptyValDef, ctor +: defs ::: defs1)
@@ -26,4 +26,4 @@ object Macros {
   }
 
   def foo = macro impl
-}
\ No newline at end of file
+}
diff --git a/test/files/run/macro-enclosingowner-detectvar.check b/test/files/run/macro-enclosingowner-detectvar.check
new file mode 100644
index 0000000..c8f86ec
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-detectvar.check
@@ -0,0 +1,16 @@
+(true,false,false,false)
+(true,false,false,false)
+(true,false,false,false)
+(true,false,false,false)
+(false,true,false,false)
+(false,true,false,false)
+(false,true,false,false)
+(false,true,false,false)
+(false,false,true,false)
+(false,false,true,false)
+(false,false,true,false)
+(false,false,true,false)
+(false,false,false,true)
+(false,false,false,true)
+(false,false,false,true)
+(false,false,false,true)
diff --git a/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala b/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala
new file mode 100644
index 0000000..26ed64d
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    def detectFlags(sym: TermSymbol): String = {
+      (sym.isVal, sym.isVar, !sym.isVal && !sym.isVar && !sym.isLazy, sym.isLazy).toString
+    }
+    q"println(${detectFlags(c.internal.enclosingOwner.asTerm)}); 42"
+  }
+
+  def foo: Int = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-enclosingowner-detectvar/Test_2.scala b/test/files/run/macro-enclosingowner-detectvar/Test_2.scala
new file mode 100644
index 0000000..58521d9
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-detectvar/Test_2.scala
@@ -0,0 +1,23 @@
+object Test extends App {
+  val a1 = Macros.foo
+  val a2 = Predef.identity(Predef.identity(Macros.foo))
+  val a3: Int = Macros.foo
+  val a4: Int = Predef.identity(Predef.identity(Macros.foo))
+
+  var b1 = Macros.foo
+  var b2 = Predef.identity(Predef.identity(Macros.foo))
+  var b3: Int = Macros.foo
+  var b4: Int = Predef.identity(Predef.identity(Macros.foo))
+
+  def c1 = Macros.foo
+  def c2 = Predef.identity(Predef.identity(Macros.foo))
+  def c3: Int = Macros.foo
+  def c4: Int = Predef.identity(Predef.identity(Macros.foo))
+  c1; c2; c3; c4;
+
+  lazy val d1 = Macros.foo
+  lazy val d2 = Predef.identity(Predef.identity(Macros.foo))
+  lazy val d3: Int = Macros.foo
+  lazy val d4: Int = Predef.identity(Predef.identity(Macros.foo))
+  d1; d2; d3; d4
+}
\ No newline at end of file
diff --git a/test/files/run/macro-enclosingowner-sbt.check b/test/files/run/macro-enclosingowner-sbt.check
new file mode 100644
index 0000000..3c95698
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-sbt.check
@@ -0,0 +1,16 @@
+a1
+a2
+a3
+a4
+b1
+b2
+b3
+b4
+c1
+c2
+c3
+c4
+d1
+d2
+d3
+d4
diff --git a/test/files/run/macro-enclosingowner-sbt/Macros_1.scala b/test/files/run/macro-enclosingowner-sbt/Macros_1.scala
new file mode 100644
index 0000000..a98a984
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-sbt/Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    def enclosingName(sym: Symbol): String = {
+      sym.name.toString.stripSuffix(termNames.LOCAL_SUFFIX_STRING)
+    }
+    q"println(${enclosingName(c.internal.enclosingOwner).toString}); 42"
+  }
+
+  def foo: Int = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-enclosingowner-sbt/Test_2.scala b/test/files/run/macro-enclosingowner-sbt/Test_2.scala
new file mode 100644
index 0000000..58521d9
--- /dev/null
+++ b/test/files/run/macro-enclosingowner-sbt/Test_2.scala
@@ -0,0 +1,23 @@
+object Test extends App {
+  val a1 = Macros.foo
+  val a2 = Predef.identity(Predef.identity(Macros.foo))
+  val a3: Int = Macros.foo
+  val a4: Int = Predef.identity(Predef.identity(Macros.foo))
+
+  var b1 = Macros.foo
+  var b2 = Predef.identity(Predef.identity(Macros.foo))
+  var b3: Int = Macros.foo
+  var b4: Int = Predef.identity(Predef.identity(Macros.foo))
+
+  def c1 = Macros.foo
+  def c2 = Predef.identity(Predef.identity(Macros.foo))
+  def c3: Int = Macros.foo
+  def c4: Int = Predef.identity(Predef.identity(Macros.foo))
+  c1; c2; c3; c4;
+
+  lazy val d1 = Macros.foo
+  lazy val d2 = Predef.identity(Predef.identity(Macros.foo))
+  lazy val d3: Int = Macros.foo
+  lazy val d4: Int = Predef.identity(Predef.identity(Macros.foo))
+  d1; d2; d3; d4
+}
\ No newline at end of file
diff --git a/test/files/run/macro-enclosures.check b/test/files/run/macro-enclosures.check
new file mode 100644
index 0000000..b6fe7a4
--- /dev/null
+++ b/test/files/run/macro-enclosures.check
@@ -0,0 +1,34 @@
+enclosingPackage = package test {
+  object Test extends scala.AnyRef {
+    def <init>() = {
+      super.<init>();
+      ()
+    };
+    def test = Macros.foo
+  }
+}
+enclosingClass = object Test extends scala.AnyRef {
+  def <init>() = {
+    super.<init>();
+    ()
+  };
+  def test = Macros.foo
+}
+enclosingImpl = object Test extends scala.AnyRef {
+  def <init>() = {
+    super.<init>();
+    ()
+  };
+  def test = Macros.foo
+}
+enclosingTemplate = scala.AnyRef {
+  def <init>() = {
+    super.<init>();
+    ()
+  };
+  def test = Macros.foo
+}
+enclosingMethod = def test = Macros.foo
+enclosingDef = def test = Macros.foo
+enclosingOwner = method test
+enclosingOwnerChain = List(method test, object Test, package test, package <root>)
diff --git a/test/files/neg/macro-invalidret-nontree.flags b/test/files/run/macro-enclosures.flags
similarity index 100%
rename from test/files/neg/macro-invalidret-nontree.flags
rename to test/files/run/macro-enclosures.flags
diff --git a/test/files/run/macro-enclosures/Impls_Macros_1.scala b/test/files/run/macro-enclosures/Impls_Macros_1.scala
new file mode 100644
index 0000000..564cdfa
--- /dev/null
+++ b/test/files/run/macro-enclosures/Impls_Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    def chain(sym: Symbol): List[Symbol] = sym.owner match {
+      case NoSymbol => sym :: Nil
+      case owner => sym :: chain(owner)
+    }
+    q"""
+      println("enclosingPackage = " + ${c.enclosingPackage.toString})
+      println("enclosingClass = " + ${c.enclosingClass.toString})
+      println("enclosingImpl = " + ${c.enclosingImpl.toString})
+      println("enclosingTemplate = " + ${c.enclosingTemplate.toString})
+      println("enclosingMethod = " + ${c.enclosingMethod.toString})
+      println("enclosingDef = " + ${c.enclosingDef.toString})
+      println("enclosingOwner = " + ${c.internal.enclosingOwner.toString})
+      println("enclosingOwnerChain = " + ${chain(c.internal.enclosingOwner).toString})
+    """
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-enclosures/Test_2.scala b/test/files/run/macro-enclosures/Test_2.scala
new file mode 100644
index 0000000..779fe52
--- /dev/null
+++ b/test/files/run/macro-enclosures/Test_2.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+  test.Test.test
+}
+
+package test {
+  object Test {
+    def test = {
+      Macros.foo
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Macros_1.scala b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
index d9fd5b8..465f313 100644
--- a/test/files/run/macro-expand-implicit-argument/Macros_1.scala
+++ b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
@@ -5,7 +5,7 @@ import scala.{specialized => spec}
 import language.experimental.macros
 
 import scala.reflect.ClassTag
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def alloc[@spec A:ClassTag](src:Array[A], s1:Int, len:Int) = {
@@ -41,14 +41,14 @@ object Macros {
     def const(x:Int) = Literal(Constant(x))
 
     val n = as.length
-    val arr = newTermName("arr")
+    val arr = TermName("arr")
 
-    val create = Apply(Select(ct.tree, newTermName("newArray")), List(const(n)))
+    val create = Apply(Select(ct.tree, TermName("newArray")), List(const(n)))
     val arrtpe = TypeTree(implicitly[c.WeakTypeTag[Array[A]]].tpe)
     val valdef = ValDef(Modifiers(), arr, arrtpe, create)
 
     val updates = (0 until n).map {
-      i => Apply(Select(Ident(arr), newTermName("update")), List(const(i), as(i).tree))
+      i => Apply(Select(Ident(arr), TermName("update")), List(const(i), as(i).tree))
     }
 
     val exprs = (Seq(valdef) ++ updates ++ Seq(Ident(arr))).toList
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
index 082e6b2..18c9795 100644
--- a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(x.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(x.tree))
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala
index ffb04dc..fec9146 100644
--- a/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Test extends App {
   implicit val x = 42
-  def foo(implicit x: Int) = macro Impls.foo
+  def foo(implicit x: Int): Unit = macro Impls.foo
   foo
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
index cceb038..aeceee5 100644
--- a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
+  def foo(c: Context)(x: c.Expr[String]): c.Expr[Option[Int]] = {
     import c.universe._
-    val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+    val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, TermName("toInt"))))
     c.Expr[Option[Int]](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
index 81ebd63..22047ee 100644
--- a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
@@ -1,4 +1,5 @@
 object Macros {
+  import scala.language.implicitConversions
   implicit def foo(x: String): Option[Int] = macro Impls.foo
 }
 
@@ -7,4 +8,4 @@ object Test extends App {
   println("2": Option[Int])
   val s: Int = "2" getOrElse 0
   println(s)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala
index fa717b2..fd267d3 100644
--- a/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx) = {
+  def foo(c: Context) = {
     import c.universe._
     val body = Literal(Constant(2))
     c.Expr[Int](body)
diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala
index b91b101..2f21785 100644
--- a/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Test extends App {
-  implicit def foo = macro Impls.foo
+  implicit def foo: Int = macro Impls.foo
   def bar(implicit x: Int) = println(x)
   bar
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view.flags b/test/files/run/macro-expand-implicit-macro-is-view.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-implicit-macro-is-view.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
index cceb038..aeceee5 100644
--- a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
+  def foo(c: Context)(x: c.Expr[String]): c.Expr[Option[Int]] = {
     import c.universe._
-    val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+    val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, TermName("toInt"))))
     c.Expr[Option[Int]](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
index 0ff1fb8..0d99f32 100644
--- a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
@@ -1,4 +1,7 @@
+
 object Macros {
+  import scala.language.experimental.macros
+  import scala.language.implicitConversions
   implicit def foo(x: String): Option[Int] = macro Impls.foo
 }
 
@@ -6,4 +9,4 @@ object Test extends App {
   import Macros._
   def bar[T <% Option[Int]](x: T) = println(x)
   println("2")
-}
\ No newline at end of file
+}
diff --git a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
index 11e0793..9278633 100644
--- a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
+++ b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = {
     import c.universe._
-    val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+    val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala
index fa4504b..54b9599 100644
--- a/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala
@@ -1,4 +1,4 @@
 object Test extends App {
-  def foo(x: Int)(y: Int) = macro Impls.foo
+  def foo(x: Int)(y: Int): Unit = macro Impls.foo
   foo(40)(2)
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic.check b/test/files/run/macro-expand-nullary-generic.check
index 42976f4..0470d23 100644
--- a/test/files/run/macro-expand-nullary-generic.check
+++ b/test/files/run/macro-expand-nullary-generic.check
@@ -1,6 +1,6 @@
-it works WeakTypeTag[Int]
-it works WeakTypeTag[Int]
-it works WeakTypeTag[Int]
-it works WeakTypeTag[Int]
-it works WeakTypeTag[Int]
+fooNullary[Int]
+fooEmpty[Int]
+fooEmpty[Int]
+barNullary[Int]
+barEmpty[Int]
 kkthxbai
diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
index 1180c83..9362d6c 100644
--- a/test/files/run/macro-expand-nullary-generic/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
@@ -1,15 +1,15 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl[T: c.WeakTypeTag](c: Ctx) = {
+  def impl[T: c.WeakTypeTag](c: Context)(meth: String) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(s"$meth[${c.weakTypeOf[T]}]"))))
     c.Expr[Unit](body)
   }
 
-  def fooNullary[T: c.WeakTypeTag](c: Ctx) = impl[T](c)
-  def fooEmpty[T: c.WeakTypeTag](c: Ctx)() = impl[T](c)
-  def barNullary[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)
-  def barEmpty[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)
+  def fooNullary[T: c.WeakTypeTag](c: Context) = impl[T](c)("fooNullary")
+  def fooEmpty[T: c.WeakTypeTag](c: Context)() = impl[T](c)("fooEmpty")
+  def barNullary[T: c.WeakTypeTag](c: Context)(x: c.Expr[Int]) = impl[T](c)("barNullary")
+  def barEmpty[T: c.WeakTypeTag](c: Context)(x: c.Expr[Int])() = impl[T](c)("barEmpty")
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala
index 2d5cf53..edd9051 100644
--- a/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala
@@ -1,8 +1,8 @@
 object Macros {
-  def foo1[T] = macro Impls.fooNullary[T]
-  def foo2[T]() = macro Impls.fooEmpty[T]
-  def bar1[T](x: Int) = macro Impls.barNullary[T]
-  def bar2[T](x: Int)() = macro Impls.barEmpty[T]
+  def foo1[T]: Unit = macro Impls.fooNullary[T]
+  def foo2[T](): Unit = macro Impls.fooEmpty[T]
+  def bar1[T](x: Int): Unit = macro Impls.barNullary[T]
+  def bar2[T](x: Int)(): Unit = macro Impls.barEmpty[T]
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-nullary-nongeneric.check b/test/files/run/macro-expand-nullary-nongeneric.check
index 9ab5f3a..cb7e766 100644
--- a/test/files/run/macro-expand-nullary-nongeneric.check
+++ b/test/files/run/macro-expand-nullary-nongeneric.check
@@ -1,6 +1,6 @@
-it works
-it works
-it works
-it works
-it works
+fooNullary
+fooEmpty
+fooEmpty
+barNullary
+barEmpty
 kkthxbai
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
index c6bd1cd..c8c3d25 100644
--- a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
@@ -1,14 +1,15 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Ctx) = {
+  def impl(c: Context)(meth: String) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(meth))))
     c.Expr[Unit](body)
   }
 
-  def fooNullary(c: Ctx) = impl(c)
-  def fooEmpty(c: Ctx)() = impl(c)
-  def barNullary(c: Ctx)(x: c.Expr[Int]) = impl(c)
-  def barEmpty(c: Ctx)(x: c.Expr[Int])() = impl(c)
+  def fooNullary(c: Context) = impl(c)("fooNullary")
+  def fooEmpty(c: Context)() = impl(c)("fooEmpty")
+  def barNullary(c: Context)(x: c.Expr[Int]) = impl(c)("barNullary")
+  def barEmpty(c: Context)(x: c.Expr[Int])() = impl(c)("barEmpty")
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala
index 1f6d717..51915df 100644
--- a/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala
@@ -1,8 +1,8 @@
 object Macros {
-  def foo1 = macro Impls.fooNullary
-  def foo2() = macro Impls.fooEmpty
-  def bar1(x: Int) = macro Impls.barNullary
-  def bar2(x: Int)() = macro Impls.barEmpty
+  def foo1: Unit = macro Impls.fooNullary
+  def foo2(): Unit = macro Impls.fooEmpty
+  def bar1(x: Int): Unit = macro Impls.barNullary
+  def bar2(x: Int)(): Unit = macro Impls.barEmpty
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-overload/Impls_1.scala b/test/files/run/macro-expand-overload/Impls_1.scala
index f7c240d..ef9d01d 100644
--- a/test/files/run/macro-expand-overload/Impls_1.scala
+++ b/test/files/run/macro-expand-overload/Impls_1.scala
@@ -1,15 +1,15 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+  def impl(c: Context)(tag: String, x: c.Expr[_]) = {
     import c.{prefix => prefix}
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
     c.Expr[Unit](body)
   }
 
-  def fooObjectString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooObjectString", x)
-  def fooObjectInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooObjectInt", x)
-  def fooClassString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooClassString", x)
-  def fooClassInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooClassInt", x)
+  def fooObjectString(c: Context)(x: c.Expr[_]) = impl(c)("fooObjectString", x)
+  def fooObjectInt(c: Context)(x: c.Expr[_]) = impl(c)("fooObjectInt", x)
+  def fooClassString(c: Context)(x: c.Expr[_]) = impl(c)("fooClassString", x)
+  def fooClassInt(c: Context)(x: c.Expr[_]) = impl(c)("fooClassInt", x)
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-overload/Macros_Test_2.scala b/test/files/run/macro-expand-overload/Macros_Test_2.scala
index 7f61f85..87cff2e 100644
--- a/test/files/run/macro-expand-overload/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-overload/Macros_Test_2.scala
@@ -1,13 +1,13 @@
 object Macros {
-  def foo(x: String) = macro Impls.fooObjectString
-  def foo(x: Int) = macro Impls.fooObjectInt
-  def foo(x: Boolean) = println("fooObjectBoolean")
+  def foo(x: String): Unit = macro Impls.fooObjectString
+  def foo(x: Int): Unit = macro Impls.fooObjectInt
+  def foo(x: Boolean): Unit = println("fooObjectBoolean")
 }
 
 class Macros {
-  def foo(x: String) = macro Impls.fooClassString
-  def foo(x: Int) = macro Impls.fooClassInt
-  def foo(x: Boolean) = println("fooClassBoolean")
+  def foo(x: String): Unit = macro Impls.fooClassString
+  def foo(x: Int): Unit = macro Impls.fooClassInt
+  def foo(x: Boolean): Unit = println("fooClassBoolean")
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-override/Impls_1.scala b/test/files/run/macro-expand-override/Impls_1.scala
index ec93dd4..e6ce18f 100644
--- a/test/files/run/macro-expand-override/Impls_1.scala
+++ b/test/files/run/macro-expand-override/Impls_1.scala
@@ -1,15 +1,15 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+  def impl(c: Context)(tag: String, x: c.Expr[_]) = {
     import c.{prefix => prefix}
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
     c.Expr[Unit](body)
   }
 
-  def fooBString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBString", x)
-  def fooBInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBInt", x)
-  def fooDInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooDInt", x)
-  def fooZString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooZString", x)
+  def fooBString(c: Context)(x: c.Expr[_]) = impl(c)("fooBString", x)
+  def fooBInt(c: Context)(x: c.Expr[_]) = impl(c)("fooBInt", x)
+  def fooDInt(c: Context)(x: c.Expr[_]) = impl(c)("fooDInt", x)
+  def fooZString(c: Context)(x: c.Expr[_]) = impl(c)("fooZString", x)
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-override/Macros_Test_2.scala b/test/files/run/macro-expand-override/Macros_Test_2.scala
index f162773..160831c 100644
--- a/test/files/run/macro-expand-override/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-override/Macros_Test_2.scala
@@ -1,17 +1,17 @@
 class B {
-  def foo(x: String) = macro Impls.fooBString
-  def foo(x: Int) = macro Impls.fooBInt
-  def foo(x: Boolean) = println("fooBBoolean")
+  def foo(x: String): Unit = macro Impls.fooBString
+  def foo(x: Int): Unit = macro Impls.fooBInt
+  def foo(x: Boolean): Unit = println("fooBBoolean")
 }
 
 class D extends B {
-  //override def foo(x: String) = println("fooDString") => method cannot override a macro
-  override def foo(x: Int) = macro Impls.fooDInt
+  //override def foo(x: String): Unit = println("fooDString") => method cannot override a macro
+  override def foo(x: Int): Unit = macro Impls.fooDInt
 }
 
 class Z extends D {
-  override def foo(x: String) = macro Impls.fooZString
-  override def foo(x: Boolean) = println("fooZBoolean")
+  override def foo(x: String): Unit = macro Impls.fooZString
+  override def foo(x: Boolean): Unit = println("fooZBoolean")
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-recursive/Impls_1.scala b/test/files/run/macro-expand-recursive/Impls_1.scala
index 61db5c4..3def2d2 100644
--- a/test/files/run/macro-expand-recursive/Impls_1.scala
+++ b/test/files/run/macro-expand-recursive/Impls_1.scala
@@ -1,15 +1,15 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx) = {
+  def foo(c: Context) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
     c.Expr[Unit](body)
   }
 
-  def fooFoo(c: Ctx) = {
+  def fooFoo(c: Context) = {
     import c.universe._
-    val body = Select(Ident(newTermName("Macros")), newTermName("foo"))
+    val body = Select(Ident(TermName("Macros")), TermName("foo"))
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-recursive/Macros_Test_2.scala b/test/files/run/macro-expand-recursive/Macros_Test_2.scala
index 6ff691b..5332fda 100644
--- a/test/files/run/macro-expand-recursive/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-recursive/Macros_Test_2.scala
@@ -1,6 +1,6 @@
 object Macros {
-  def foo = macro Impls.foo
-  def fooFoo = macro Impls.fooFoo
+  def foo: Unit = macro Impls.foo
+  def fooFoo: Unit = macro Impls.fooFoo
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-tparams-bounds-a.check b/test/files/run/macro-expand-tparams-bounds-a.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/macro-expand-tparams-bounds-a.flags b/test/files/run/macro-expand-tparams-bounds-a.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-bounds-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala
deleted file mode 100644
index 9b8dafa..0000000
--- a/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
-}
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
deleted file mode 100644
index b498e6f..0000000
--- a/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo[U <: String] = macro Impls.foo[U]
-}
-
-object Test extends App {
-  import Macros._
-  foo[String]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-b.check b/test/files/run/macro-expand-tparams-bounds-b.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/macro-expand-tparams-bounds-b.flags b/test/files/run/macro-expand-tparams-bounds-b.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-bounds-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
deleted file mode 100644
index c11c891..0000000
--- a/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class C
-
-object Impls {
-  def foo[U <: C](c: Ctx): c.Expr[Unit] = c.literalUnit
-}
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
deleted file mode 100644
index 1a261e9..0000000
--- a/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class D extends C
-
-object Macros {
-  def foo[T <: D] = macro Impls.foo[T]
-}
-
-object Test extends App {
-  import Macros._
-  foo[D]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds.check b/test/files/run/macro-expand-tparams-bounds.check
new file mode 100644
index 0000000..317e967
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.flags b/test/files/run/macro-expand-tparams-bounds.flags
similarity index 100%
rename from test/files/neg/macro-invalidret-nonuniversetree.flags
rename to test/files/run/macro-expand-tparams-bounds.flags
diff --git a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
new file mode 100644
index 0000000..95aaa1c
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls1 {
+  def foo[U <: String](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"""println("hello")""") }
+}
+
+class C
+class D extends C
+
+object Impls2 {
+  def foo[U <: C](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"""println("hello")""") }
+}
diff --git a/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala
new file mode 100644
index 0000000..6cb2b53
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala
@@ -0,0 +1,12 @@
+object Macros1 {
+  def foo[U <: String]: Unit = macro Impls1.foo[U]
+}
+
+object Macros2 {
+  def foo[T <: D]: Unit = macro Impls2.foo[T]
+}
+
+object Test extends App {
+  Macros1.foo[String]
+  Macros2.foo[D]
+}
diff --git a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
index 72b420d..c33ac6d 100644
--- a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
@@ -1,11 +1,11 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[U: c.WeakTypeTag](c: Ctx) = {
+  def foo[U: c.WeakTypeTag](c: Context) = {
     import c.universe._
     val U = implicitly[c.WeakTypeTag[U]]
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala
index e72c278..2cf7b19 100644
--- a/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala
@@ -1,4 +1,4 @@
 object Test extends App {
-  def foo[U] = macro Impls.foo[U]
+  def foo[U]: Unit = macro Impls.foo[U]
   foo[Int]
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
index 3377051..32cee0d 100644
--- a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
@@ -1,11 +1,11 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
+  def foo[U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
     import c.universe._
     val U = implicitly[c.WeakTypeTag[U]]
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala
index f8c573f..1192931 100644
--- a/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Test extends App {
-  def foo[U](x: U) = macro Impls.foo[U]
+  def foo[U](x: U): Unit = macro Impls.foo[U]
   foo(42)
   foo("42")
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-only-in-impl.flags b/test/files/run/macro-expand-tparams-only-in-impl.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-only-in-impl.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala b/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
deleted file mode 100644
index 9b8dafa..0000000
--- a/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
-}
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
deleted file mode 100644
index 218c7ae..0000000
--- a/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
-  def foo = macro Impls.foo[String]
-}
-
-object Test extends App {
-  import Macros._
-  foo
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional.flags b/test/files/run/macro-expand-tparams-optional.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-optional.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional/Impls_1.scala b/test/files/run/macro-expand-tparams-optional/Impls_1.scala
deleted file mode 100644
index 3b829e2..0000000
--- a/test/files/run/macro-expand-tparams-optional/Impls_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U](c: Ctx) = {
-    import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("don't know U"))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala
deleted file mode 100644
index e72c278..0000000
--- a/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
-  def foo[U] = macro Impls.foo[U]
-  foo[Int]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a.check b/test/files/run/macro-expand-tparams-prefix-a.check
deleted file mode 100644
index ca44a4f..0000000
--- a/test/files/run/macro-expand-tparams-prefix-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-WeakTypeTag[Int]
-WeakTypeTag[Int]
-WeakTypeTag[String]
-WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-a.flags b/test/files/run/macro-expand-tparams-prefix-a.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-prefix-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
deleted file mode 100644
index 3377051..0000000
--- a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
-    import c.universe._
-    val U = implicitly[c.WeakTypeTag[U]]
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
deleted file mode 100644
index 81ccb7f..0000000
--- a/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
-  class C[T] {
-    def foo[U](x: U) = macro Impls.foo[U]
-  }
-
-  new C[Int]().foo(42)
-  new C[Boolean]().foo(42)
-  new C[Int]().foo("42")
-  new C[String]().foo(true)
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b.check b/test/files/run/macro-expand-tparams-prefix-b.check
deleted file mode 100644
index 2ff2ce4..0000000
--- a/test/files/run/macro-expand-tparams-prefix-b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-WeakTypeTag[Boolean] WeakTypeTag[Int]
-WeakTypeTag[Boolean] WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-b.flags b/test/files/run/macro-expand-tparams-prefix-b.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-prefix-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
deleted file mode 100644
index 9378e67..0000000
--- a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
-    import c.universe._
-    val T = implicitly[c.WeakTypeTag[T]]
-    val U = implicitly[c.WeakTypeTag[U]]
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
-    c.Expr[Unit](body)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
deleted file mode 100644
index a4a0acf..0000000
--- a/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
-  class C[T] {
-    def foo[U](x: U) = macro Impls.foo[T, U]
-  }
-
-  object D extends C[Boolean]
-
-  D.foo(42)
-  D.foo("42")
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.check b/test/files/run/macro-expand-tparams-prefix-c1.check
deleted file mode 100644
index 0f24f74..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-WeakTypeTag[Int]
-WeakTypeTag[String]
-WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.flags b/test/files/run/macro-expand-tparams-prefix-c1.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
deleted file mode 100644
index afdd7d4..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
-    import c.universe._
-    c.Expr(Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
-      Literal(Constant(()))))
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
deleted file mode 100644
index 4fa0c8c..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
-  class C[U] {
-    def foo[V] = macro Impls.foo[T, U, V]
-  }
-}
-
-object Test extends App {
-  val outer1 = new D[Int]
-  val outer2 = new outer1.C[String]
-  outer2.foo[Boolean]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.check b/test/files/run/macro-expand-tparams-prefix-c2.check
deleted file mode 100644
index 0f24f74..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c2.check
+++ /dev/null
@@ -1,3 +0,0 @@
-WeakTypeTag[Int]
-WeakTypeTag[String]
-WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.flags b/test/files/run/macro-expand-tparams-prefix-c2.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c2.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
deleted file mode 100644
index 3c28382..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
-    import c.universe._
-    c.Expr(Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
-      Literal(Constant(()))))
-  }
-}
-
-class D[T] {
-  class C[U] {
-    def foo[V] = macro Impls.foo[T, U, V]
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
deleted file mode 100644
index e729d4a..0000000
--- a/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test extends App {
-  val outer1 = new D[Int]
-  val outer2 = new outer1.C[String]
-  outer2.foo[Boolean]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.check b/test/files/run/macro-expand-tparams-prefix-d1.check
deleted file mode 100644
index 7832503..0000000
--- a/test/files/run/macro-expand-tparams-prefix-d1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-WeakTypeTag[T]
-WeakTypeTag[U]
-WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.flags b/test/files/run/macro-expand-tparams-prefix-d1.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-expand-tparams-prefix-d1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
deleted file mode 100644
index afdd7d4..0000000
--- a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
-  def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
-    import c.universe._
-    c.Expr(Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
-      Literal(Constant(()))))
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
deleted file mode 100644
index 8222a6d..0000000
--- a/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test extends App {
-  class D[T] {
-    class C[U] {
-      def foo[V] = macro Impls.foo[T, U, V]
-      foo[Boolean]
-    }
-  }
-
-  val outer1 = new D[Int]
-  new outer1.C[String]
-}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix.check b/test/files/run/macro-expand-tparams-prefix.check
new file mode 100644
index 0000000..7397958
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix.check
@@ -0,0 +1,20 @@
+===Macros1===
+WeakTypeTag[Int]
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros2===
+WeakTypeTag[Boolean] WeakTypeTag[Int]
+WeakTypeTag[Boolean] WeakTypeTag[String]
+===Macros3===
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros4===
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros5===
+WeakTypeTag[T]
+WeakTypeTag[U]
+WeakTypeTag[Boolean]
diff --git a/test/files/neg/macro-invalidshape-a.flags b/test/files/run/macro-expand-tparams-prefix.flags
similarity index 100%
rename from test/files/neg/macro-invalidshape-a.flags
rename to test/files/run/macro-expand-tparams-prefix.flags
diff --git a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
new file mode 100644
index 0000000..289f071
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
@@ -0,0 +1,39 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.blackbox.Context
+
+object Impls1 {
+  def foo[U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
+    import c.universe._
+    val U = implicitly[c.WeakTypeTag[U]]
+    c.Expr[Unit](q"println(${U.toString})")
+  }
+}
+
+object Impls2 {
+  def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
+    import c.universe._
+    val T = implicitly[c.WeakTypeTag[T]]
+    val U = implicitly[c.WeakTypeTag[U]]
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
+    c.Expr[Unit](q"""println(${T.toString} + " " + ${U.toString})""")
+  }
+}
+
+object Impls345 {
+  def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+    import c.universe._
+    c.Expr(q"""
+      println(${T.toString})
+      println(${implicitly[c.WeakTypeTag[U]].toString})
+      println(${V.toString})
+    """)
+  }
+}
+
+object Macros4 {
+  class D[T] {
+    class C[U] {
+      def foo[V] = macro Impls345.foo[T, U, V]
+    }
+  }
+}
diff --git a/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala
new file mode 100644
index 0000000..c8f68b4
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala
@@ -0,0 +1,57 @@
+object Macros1 {
+  class C[T] {
+    def foo[U](x: U): Unit = macro Impls1.foo[U]
+  }
+}
+
+object Macros2 {
+  class C[T] {
+    def foo[U](x: U): Unit = macro Impls2.foo[T, U]
+  }
+}
+
+object Macros3 {
+  class D[T] {
+    class C[U] {
+      def foo[V]: Unit = macro Impls345.foo[T, U, V]
+    }
+  }
+}
+
+// object Macros4 is declared in Impls_1.scala
+
+object Macros5 {
+  class D[T] {
+    class C[U] {
+      def foo[V]: Unit = macro Impls345.foo[T, U, V]
+      foo[Boolean]
+    }
+  }
+}
+
+object Test extends App {
+  println("===Macros1===")
+  new Macros1.C[Int]().foo(42)
+  new Macros1.C[Boolean]().foo(42)
+  new Macros1.C[Int]().foo("42")
+  new Macros1.C[String]().foo(true)
+
+  println("===Macros2===")
+  object D2 extends Macros2.C[Boolean]
+  D2.foo(42)
+  D2.foo("42")
+
+  println("===Macros3===")
+  val outer31 = new Macros3.D[Int]
+  val outer32 = new outer31.C[String]
+  outer32.foo[Boolean]
+
+  println("===Macros4===")
+  val outer41 = new Macros4.D[Int]
+  val outer42 = new outer41.C[String]
+  outer42.foo[Boolean]
+
+  println("===Macros5===")
+  val outer1 = new Macros5.D[Int]
+  new outer1.C[String]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-a.check b/test/files/run/macro-expand-unapply-a.check
new file mode 100644
index 0000000..7c2976e
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a.check
@@ -0,0 +1,2 @@
+(1,2)
+(1,2,3)
diff --git a/test/files/neg/macro-invalidshape-b.flags b/test/files/run/macro-expand-unapply-a.flags
similarity index 100%
rename from test/files/neg/macro-invalidshape-b.flags
rename to test/files/run/macro-expand-unapply-a.flags
diff --git a/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala
new file mode 100644
index 0000000..64f16c6
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.whitebox.Context
+
+object Helper {
+  def unapplySeq[T](x: List[T]): Option[Seq[T]] = List.unapplySeq[T](x)
+}
+
+object Macros {
+  def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = {
+    c.universe.reify(Helper.unapplySeq(x.splice))
+  }
+
+  object UnapplyMacro {
+    def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T]
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-a/Test_2.scala b/test/files/run/macro-expand-unapply-a/Test_2.scala
new file mode 100644
index 0000000..6169d86
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a/Test_2.scala
@@ -0,0 +1,6 @@
+import Macros._
+
+object Test extends App {
+  List(1, 2) match { case UnapplyMacro(x, y) => println((x, y)) }
+  List(1, 2, 3) match { case UnapplyMacro(x, y, z) => println((x, y, z)) }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
index 1c3ecfd..2709b57 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
@@ -1,4 +1,4 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 no `: _*' annotation allowed here
 (such annotations are only allowed in arguments to *-parameters)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
index 2ef8f04..18af845 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+  def foo(c: Context)(xs: c.Expr[Int]*) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), xs.map(_.tree).toList)
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
index c832826..64aaa07 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
@@ -1,12 +1,12 @@
 object Macros {
-  def foo(xs: Int*) = macro Impls.foo
+  def foo(xs: Int*): Unit = macro Impls.foo
 }
 
 object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(tpnme.WILDCARD_STAR))))
+  val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(typeNames.WILDCARD_STAR))))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
index 3c7f94f..eb067c2 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
@@ -1,13 +1,13 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+  def foo(c: Context)(xs: c.Expr[Int]*) = {
     import c.universe._
     val stripped_xs = xs map (_.tree) toList match {
-      case List(Typed(stripped, Ident(wildstar))) if wildstar == tpnme.WILDCARD_STAR => List(stripped)
+      case List(Typed(stripped, Ident(wildstar))) if wildstar == typeNames.WILDCARD_STAR => List(stripped)
       case _ => ???
     }
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), stripped_xs)
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), stripped_xs)
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala
index f127ebc..13d7cd5 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Macros {
-  def foo(xs: Int*) = macro Impls.foo
+  def foo(xs: Int*): Unit = macro Impls.foo
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
index 2066893..64ab7de 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
@@ -1,13 +1,13 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
   def myprintln(xs: Int*) = {
     println(xs)
   }
 
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+  def foo(c: Context)(xs: c.Expr[Int]*) = {
     import c.universe._
-    val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+    val body = Apply(Select(Ident(TermName("Impls")), TermName("myprintln")), xs.map(_.tree).toList)
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala
index f127ebc..13d7cd5 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Macros {
-  def foo(xs: Int*) = macro Impls.foo
+  def foo(xs: Int*): Unit = macro Impls.foo
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
index 2ef8f04..18af845 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+  def foo(c: Context)(xs: c.Expr[Int]*) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), xs.map(_.tree).toList)
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala
index 2311ca0..9ab1be9 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Macros {
-  def foo(xs: Int*) = macro Impls.foo
+  def foo(xs: Int*): Unit = macro Impls.foo
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
index 2066893..64ab7de 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
@@ -1,13 +1,13 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
   def myprintln(xs: Int*) = {
     println(xs)
   }
 
-  def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+  def foo(c: Context)(xs: c.Expr[Int]*) = {
     import c.universe._
-    val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+    val body = Apply(Select(Ident(TermName("Impls")), TermName("myprintln")), xs.map(_.tree).toList)
     c.Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala
index 2311ca0..9ab1be9 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala
@@ -1,5 +1,5 @@
 object Macros {
-  def foo(xs: Int*) = macro Impls.foo
+  def foo(xs: Int*): Unit = macro Impls.foo
 }
 
 object Test extends App {
diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
index 7c40045..9b1d0ee 100644
--- a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
@@ -1,18 +1,17 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo_targs[T, U: c.WeakTypeTag](c: Ctx = null)(x: c.Expr[Int] = null) = {
+  def foo_targs[T, U: c.WeakTypeTag](c: Context = null)(x: c.Expr[Int] = null) = {
     import c.{prefix => prefix}
     import c.universe._
     val U = implicitly[c.WeakTypeTag[U]]
-    val body = Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix tree is: " + prefix.tree.tpe)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + U.tpe))))),
-      Literal(Constant(())))
-    c.Expr[Unit](body)
+    c.Expr[Unit](q"""
+      println("invoking foo_targs...")
+      println("type of prefix is: " + ${prefix.staticType.toString})
+      println("type of prefix tree is: " + ${prefix.tree.tpe.toString})
+      println("U is: " + ${U.tpe.toString})
+    """)
   }
 }
 
diff --git a/test/files/run/macro-impl-relaxed.check b/test/files/run/macro-impl-relaxed.check
new file mode 100644
index 0000000..487b116
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed.check
@@ -0,0 +1,4 @@
+2
+2
+2
+2
diff --git a/test/files/run/macro-impl-relaxed/Macros_1.scala b/test/files/run/macro-impl-relaxed/Macros_1.scala
new file mode 100644
index 0000000..420eb2a
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed/Macros_1.scala
@@ -0,0 +1,14 @@
+import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def implUU(c: Context)(x: c.Tree): c.Tree = x
+  def implTU(c: Context)(x: c.Expr[Int]): c.Tree = x.tree
+  def implUT(c: Context)(x: c.Tree): c.Expr[Int] = c.Expr[Int](x)
+  def implTT(c: Context)(x: c.Expr[Int]): c.Expr[Int] = x
+
+  def fooUU(x: Int): Int = macro implUU
+  def fooTU(x: Int): Int = macro implTU
+  def fooUT(x: Int): Int = macro implUT
+  def fooTT(x: Int): Int = macro implTT
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-relaxed/Test_2.scala b/test/files/run/macro-impl-relaxed/Test_2.scala
new file mode 100644
index 0000000..2eaeef0
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  println(Macros.fooUU(2))
+  println(Macros.fooTU(2))
+  println(Macros.fooUT(2))
+  println(Macros.fooTT(2))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
index 56c23f5..acc47fc 100644
--- a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
@@ -1,12 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(unconventionalName: Ctx)(x: unconventionalName.Expr[Int]) = {
+  def foo(unconventionalName: Context)(x: unconventionalName.Expr[Int]) = {
     import unconventionalName.universe._
-    val body = Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo..."))))),
-      Literal(Constant(())))
-    unconventionalName.Expr[Unit](body)
+    unconventionalName.Expr[Unit](q"""println("invoking foo...")""")
   }
 }
 
diff --git a/test/files/run/macro-reify-splice-splice.check b/test/files/run/macro-impl-tparam-only-in-impl.check
similarity index 100%
rename from test/files/run/macro-reify-splice-splice.check
rename to test/files/run/macro-impl-tparam-only-in-impl.check
diff --git a/test/files/neg/macro-invalidshape-c.flags b/test/files/run/macro-impl-tparam-only-in-impl.flags
similarity index 100%
rename from test/files/neg/macro-invalidshape-c.flags
rename to test/files/run/macro-impl-tparam-only-in-impl.flags
diff --git a/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
new file mode 100644
index 0000000..705defb
--- /dev/null
+++ b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo[U <: String](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") }
+}
diff --git a/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala
new file mode 100644
index 0000000..4901e24
--- /dev/null
+++ b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+  def foo: Unit = macro Impls.foo[String]
+}
+
+object Test extends App {
+  import Macros._
+  foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional.check b/test/files/run/macro-impl-tparam-typetag-is-optional.check
similarity index 100%
rename from test/files/run/macro-expand-tparams-optional.check
rename to test/files/run/macro-impl-tparam-typetag-is-optional.check
diff --git a/test/files/neg/macro-invalidsig-context-bounds.flags b/test/files/run/macro-impl-tparam-typetag-is-optional.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-context-bounds.flags
rename to test/files/run/macro-impl-tparam-typetag-is-optional.flags
diff --git a/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala
new file mode 100644
index 0000000..fc72e7a
--- /dev/null
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo[U](c: Context) = {
+    import c.universe._
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("don't know U"))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala
new file mode 100644
index 0000000..2cf7b19
--- /dev/null
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  def foo[U]: Unit = macro Impls.foo[U]
+  foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
index 9161951..1d531f6 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
@@ -1,4 +1,4 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 type mismatch;
  found   : String("42")
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala
index b3babd8..603500b 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx): c.Expr[Int] = {
+  def foo(c: Context): c.Expr[Int] = {
     import c.universe._
     c.Expr(Literal(Constant("42")))
   }
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
index 0b9986e..61f0bdf 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+  val tree = Select(Ident(TermName("Macros")), TermName("foo"))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable.check b/test/files/run/macro-invalidret-nontypeable.check
index cf7acb0..25cef2c 100644
--- a/test/files/run/macro-invalidret-nontypeable.check
+++ b/test/files/run/macro-invalidret-nontypeable.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 not found: value IDoNotExist
diff --git a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
index fb0d552..b6b9611 100644
--- a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx) = {
+  def foo(c: Context) = {
     import c.universe._
-    val body = Ident(newTermName("IDoNotExist"))
+    val body = Ident(TermName("IDoNotExist"))
     c.Expr[Int](body)
   }
 }
diff --git a/test/files/run/macro-invalidret-nontypeable/Test_2.scala b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
index 0daee49..7cd474f 100644
--- a/test/files/run/macro-invalidret-nontypeable/Test_2.scala
+++ b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
@@ -2,7 +2,7 @@
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+  val tree = Select(Ident(TermName("Macros")), TermName("foo"))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-badret.check b/test/files/run/macro-invalidusage-badret.check
index 221732e..e795500 100644
--- a/test/files/run/macro-invalidusage-badret.check
+++ b/test/files/run/macro-invalidusage-badret.check
@@ -1,5 +1,5 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 type mismatch;
- found   : Int(42)
+ found   : Int
  required: String
diff --git a/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala
index 0d840ee..0d4c575 100644
--- a/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = x
+  def foo(c: Context)(x: c.Expr[Int]) = x
 }
 
 object Macros {
diff --git a/test/files/run/macro-invalidusage-badret/Test_2.scala b/test/files/run/macro-invalidusage-badret/Test_2.scala
index 5cb0be5..fc71353 100644
--- a/test/files/run/macro-invalidusage-badret/Test_2.scala
+++ b/test/files/run/macro-invalidusage-badret/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Typed(Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42)))), Ident(newTypeName("String")))
+  val tree = Typed(Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42)))), Ident(TypeName("String")))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
index f1d5e92..6cbcb9e 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
-macros cannot be partially applied
+too few argument lists for macro invocation
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
index 4583a72..8b5c59b 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[T]) = {
+  def foo[T: c.WeakTypeTag](c: Context)(x: c.Expr[T]) = {
     import c.universe._
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(x.tree.toString))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(x.tree.toString))))
     c.Expr[Unit](body)
   }
 }
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
index e453d0b..9a34c62 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+  val tree = Select(Ident(TermName("Macros")), TermName("foo"))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
diff --git a/test/files/run/macro-invalidusage-partialapplication.check b/test/files/run/macro-invalidusage-partialapplication.check
index f1d5e92..6cbcb9e 100644
--- a/test/files/run/macro-invalidusage-partialapplication.check
+++ b/test/files/run/macro-invalidusage-partialapplication.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
-macros cannot be partially applied
+too few argument lists for macro invocation
diff --git a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
index 5866469..6970b4d 100644
--- a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = {
     import c.universe._
-    val sum = Apply(Select(x.tree, newTermName("$plus")), List(y.tree))
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+    val sum = Apply(Select(x.tree, TermName("$plus")), List(y.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
     c.Expr[Unit](body)
   }
 }
diff --git a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
index dc48c12..75b8c13 100644
--- a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(40))))
+  val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(40))))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala
index b863ac0..b60ca90 100644
--- a/test/files/run/macro-openmacros/Impls_Macros_1.scala
+++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context): c.Expr[Unit] = {
@@ -14,9 +14,9 @@ object Macros {
     }
 
     import c.universe._
-    val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), newTermName("foo"))) else c.literalUnit
+    val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), TermName("foo"))) else c.Expr[Unit](Literal(Constant(())))
     c.universe.reify {
-      println(c.literal(normalizePaths(c.enclosingMacros.toString)).splice)
+      println(c.Expr[String](Literal(Constant(normalizePaths(c.enclosingMacros.toString)))).splice)
       next.splice
     }
   }
diff --git a/test/files/run/macro-parse-position-malformed.check b/test/files/run/macro-parse-position-malformed.check
new file mode 100644
index 0000000..00f0bc5
--- /dev/null
+++ b/test/files/run/macro-parse-position-malformed.check
@@ -0,0 +1 @@
+failed with 'source-<macro>,line-1,offset=7' position and '')' expected but eof found.' message
diff --git a/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala b/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala
new file mode 100644
index 0000000..b623d88
--- /dev/null
+++ b/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.macros.ParseException
+
+object Macros {
+  def impl(c: Context)() = {
+    import c.universe._
+    val out = try {
+      c.parse("foo(bar")
+      "didn't fail"
+    } catch {
+      case e: ParseException =>
+        s"failed with '${e.pos}' position and '${e.msg}' message"
+    }
+    c.Expr[String](Literal(Constant(out)))
+  }
+  def foo(): String = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-parse-position-malformed/Test_2.scala b/test/files/run/macro-parse-position-malformed/Test_2.scala
new file mode 100644
index 0000000..cff569b
--- /dev/null
+++ b/test/files/run/macro-parse-position-malformed/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  println(Macros.foo)
+}
diff --git a/test/files/run/macro-parse-position.check b/test/files/run/macro-parse-position.check
new file mode 100644
index 0000000..3da0320
--- /dev/null
+++ b/test/files/run/macro-parse-position.check
@@ -0,0 +1,5 @@
+false
+source-<macro>,line-1,offset=4
+8
+foo bar
+
diff --git a/test/files/run/macro-parse-position/Impls_Macros_1.scala b/test/files/run/macro-parse-position/Impls_Macros_1.scala
new file mode 100644
index 0000000..dd20fd2
--- /dev/null
+++ b/test/files/run/macro-parse-position/Impls_Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context)() = {
+    import c.universe._
+    val t = c.parse("foo bar")
+    val out = s"${t.pos == NoPosition}\n${t.pos}\n${t.pos.source.content.length}\n${new String(t.pos.source.content)}"
+    c.Expr[String](Literal(Constant(out)))
+  }
+  def foo(): String = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-parse-position/Test_2.scala b/test/files/run/macro-parse-position/Test_2.scala
new file mode 100644
index 0000000..cff569b
--- /dev/null
+++ b/test/files/run/macro-parse-position/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  println(Macros.foo)
+}
diff --git a/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala
index 6c14428..df189b7 100644
--- a/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala
+++ b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala
@@ -1,8 +1,8 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   object Impls {
-    def foo(c: Ctx)(x: c.Expr[Any]) = x
+    def foo(c: Context)(x: c.Expr[Any]) = x
   }
 
   def foo(x: Any) = macro Impls.foo
diff --git a/test/files/run/macro-quasiquotes.check b/test/files/run/macro-quasiquotes.check
new file mode 100644
index 0000000..94ebaf9
--- /dev/null
+++ b/test/files/run/macro-quasiquotes.check
@@ -0,0 +1,4 @@
+1
+2
+3
+4
diff --git a/test/files/run/macro-quasiquotes/Macros_1.scala b/test/files/run/macro-quasiquotes/Macros_1.scala
new file mode 100644
index 0000000..764542a
--- /dev/null
+++ b/test/files/run/macro-quasiquotes/Macros_1.scala
@@ -0,0 +1,15 @@
+import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+class Impls(val c: Context) {
+  import c.universe._
+  def impl1 = q"println(1)"
+  def impl2 = q"{ println(2); println(3) }"
+  def impl3 = q"4"
+}
+
+object Macros {
+  def m1: Unit = macro Impls.impl1
+  def m2: Unit = macro Impls.impl2
+  def m3: Int = macro Impls.impl3
+}
\ No newline at end of file
diff --git a/test/files/run/macro-quasiquotes/Test_2.scala b/test/files/run/macro-quasiquotes/Test_2.scala
new file mode 100644
index 0000000..4be1939
--- /dev/null
+++ b/test/files/run/macro-quasiquotes/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  Macros.m1
+  Macros.m2
+  println(Macros.m3)
+}
diff --git a/test/files/run/macro-range/Common_1.scala b/test/files/run/macro-range/Common_1.scala
index 5c4bc21..35d2efd 100644
--- a/test/files/run/macro-range/Common_1.scala
+++ b/test/files/run/macro-range/Common_1.scala
@@ -1,4 +1,4 @@
-import reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 abstract class RangeDefault {
   val from, to: Int
@@ -12,6 +12,7 @@ abstract class RangeDefault {
 abstract class Utils {
   val context: Context
   import context.universe._
+  import internal._
 
   class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
     override def transform(tree: Tree): Tree = tree match {
@@ -23,7 +24,7 @@ abstract class Utils {
         subst(from, to)
       case _ =>
         val tree1 = super.transform(tree)
-        if (tree1 ne tree) tree1.tpe = null
+        if (tree1 ne tree) setType(tree1, null)
         tree1
     }
   }
@@ -43,5 +44,5 @@ abstract class Utils {
     LabelDef(lname, Nil, rhs)
   }
   def makeBinop(left: Tree, op: String, right: Tree): Tree =
-    Apply(Select(left, newTermName(op)), List(right))
+    Apply(Select(left, TermName(op)), List(right))
 }
diff --git a/test/files/run/macro-range/Expansion_Impossible_2.scala b/test/files/run/macro-range/Expansion_Impossible_2.scala
index 57e0cee..242e83a 100644
--- a/test/files/run/macro-range/Expansion_Impossible_2.scala
+++ b/test/files/run/macro-range/Expansion_Impossible_2.scala
@@ -1,4 +1,4 @@
-import reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
   def foreach(c: Context)(f: c.Expr[Int => Unit]): c.Expr[Unit] = {
@@ -9,18 +9,18 @@ object Impls {
     import c.universe._
     import Flag._
 
-    val initName = nme.CONSTRUCTOR
+    val initName = termNames.CONSTRUCTOR
     // Either:
     //   scala"{ var i = $low; val h = $hi; while (i < h) { $f(i); i = i + 1 } }
     // or:
     //   scala"($_this: RangeDefault).foreach($f)"
     c.Expr(c.prefix.tree match {
       case Apply(Select(New(tpt), initName), List(lo, hi)) if tpt.symbol.fullName == "Range" =>
-        val iname = newTermName("$i")
-        val hname = newTermName("$h")
+        val iname = TermName("$i")
+        val hname = TermName("$h")
         def iref = Ident(iname)
         def href = Ident(hname)
-        val labelname = newTermName("$while")
+        val labelname = TermName("$while")
         val cond = makeBinop(iref, "$less", href)
         val body = Block(
             List(makeApply(f.tree, List(iref))),
@@ -37,8 +37,8 @@ object Impls {
       case _ =>
         Apply(
           Select(
-            Typed(c.prefix.tree, Ident(newTypeName("RangeDefault"))),
-            newTermName("foreach")),
+            Typed(c.prefix.tree, Ident(TypeName("RangeDefault"))),
+            TermName("foreach")),
           List(f.tree))
     })
   }
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
index fa55933..e964da2 100644
--- a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     c.Expr[Int](body)
   }
 }
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
index 2e64c01..267d1bc 100644
--- a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
@@ -2,6 +2,6 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+  val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
   println(cm.mkToolBox().eval(tree))
 }
diff --git a/test/files/run/macro-reflective-mamd-normal-mi.check b/test/files/run/macro-reflective-mamd-normal-mi.check
index ac4213d..920a139 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi.check
+++ b/test/files/run/macro-reflective-mamd-normal-mi.check
@@ -1 +1 @@
-43
\ No newline at end of file
+43
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
index 5d7e077..89a818d 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
@@ -1,9 +1,9 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     c.Expr[Int](body)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
index 7056000..410ec1b 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
@@ -8,12 +8,12 @@ object Test extends App {
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
 
-  val macrobody = Select(Ident(newTermName("Impls")), newTermName("foo"))
-  val macroparam = ValDef(NoMods, newTermName("x"), TypeTree(definitions.IntClass.toType), EmptyTree)
-  val macrodef = DefDef(Modifiers(MACRO), newTermName("foo"), Nil, List(List(macroparam)), TypeTree(), macrobody)
-  val modulector = DefDef(NoMods, nme.CONSTRUCTOR, Nil, List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(()))))
-  val module = ModuleDef(NoMods, newTermName("Macros"), Template(Nil, emptyValDef, List(modulector, macrodef)))
-  val macroapp = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+  val macrobody = Select(Ident(TermName("Impls")), TermName("foo"))
+  val macroparam = ValDef(NoMods, TermName("x"), TypeTree(definitions.IntClass.toType), EmptyTree)
+  val macrodef = DefDef(Modifiers(MACRO), TermName("foo"), Nil, List(List(macroparam)), Ident(TypeName("Int")), macrobody)
+  val modulector = DefDef(NoMods, termNames.CONSTRUCTOR, Nil, List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(()))))
+  val module = ModuleDef(NoMods, TermName("Macros"), Template(Nil, noSelfType, List(modulector, macrodef)))
+  val macroapp = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
   val tree = Block(List(macrodef, module), macroapp)
   val toolbox = cm.mkToolBox(options = "-language:experimental.macros")
   println(toolbox.eval(tree))
diff --git a/test/files/run/macro-reify-basic/Macros_1.scala b/test/files/run/macro-reify-basic/Macros_1.scala
index 3f6720f..1cf2a8a 100644
--- a/test/files/run/macro-reify-basic/Macros_1.scala
+++ b/test/files/run/macro-reify-basic/Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo(s: String) = macro Impls.foo
 
   object Impls {
-    def foo(c: Ctx)(s: c.Expr[String]) = c.universe.reify {
+    def foo(c: Context)(s: c.Expr[String]) = c.universe.reify {
       println("hello " + s.splice)
     }
   }
diff --git a/test/files/run/macro-reify-chained1/Impls_Macros_1.scala b/test/files/run/macro-reify-chained1/Impls_Macros_1.scala
new file mode 100644
index 0000000..7f877b2
--- /dev/null
+++ b/test/files/run/macro-reify-chained1/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-chained1/Test_2.scala b/test/files/run/macro-reify-chained1/Test_2.scala
new file mode 100644
index 0000000..2adb07b
--- /dev/null
+++ b/test/files/run/macro-reify-chained1/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(x => x).map(x => x)
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(x => x).map(x => x)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-chained2/Impls_Macros_1.scala b/test/files/run/macro-reify-chained2/Impls_Macros_1.scala
new file mode 100644
index 0000000..965b191
--- /dev/null
+++ b/test/files/run/macro-reify-chained2/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-chained2/Test_2.scala b/test/files/run/macro-reify-chained2/Test_2.scala
new file mode 100644
index 0000000..2adb07b
--- /dev/null
+++ b/test/files/run/macro-reify-chained2/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(x => x).map(x => x)
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(x => x).map(x => x)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-freevars.check b/test/files/run/macro-reify-freevars.check
index f138068..f618e30 100644
--- a/test/files/run/macro-reify-freevars.check
+++ b/test/files/run/macro-reify-freevars.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 Macro expansion contains free term variable code defined by map in Macros_1.scala:9:9. Have you forgotten to use splice when splicing this variable into a reifee? If you have troubles tracking free term variables, consider using -Xlog-free-terms
diff --git a/test/files/run/macro-reify-freevars/Macros_1.scala b/test/files/run/macro-reify-freevars/Macros_1.scala
index 20f80c0..912f602 100644
--- a/test/files/run/macro-reify-freevars/Macros_1.scala
+++ b/test/files/run/macro-reify-freevars/Macros_1.scala
@@ -2,7 +2,7 @@ package scala.collection.slick
 
 object QueryableMacros{
   def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
-         (c: scala.reflect.macros.Context)
+         (c: scala.reflect.macros.blackbox.Context)
          (projection: c.Expr[T => S])
          : c.Expr[scala.collection.slick.Queryable[S]] = {
     import c.universe._
diff --git a/test/files/run/macro-reify-freevars/Test_2.scala b/test/files/run/macro-reify-freevars/Test_2.scala
index 7af9d89..c2d0118 100644
--- a/test/files/run/macro-reify-freevars/Test_2.scala
+++ b/test/files/run/macro-reify-freevars/Test_2.scala
@@ -2,10 +2,10 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val q = New(AppliedTypeTree(Select(Select(Select(Ident(newTermName("scala")), newTermName("collection")), newTermName("slick")), newTypeName("Queryable")), List(Ident(newTermName("Int")))))
-  val x = ValDef(NoMods, newTermName("x"), Ident(newTermName("Int")), EmptyTree)
-  val fn = Function(List(x), Apply(Select(Ident(newTermName("x")), newTermName("$plus")), List(Literal(Constant("5")))))
-  val tree = Apply(Select(q, newTermName("map")), List(fn))
+  val q = New(AppliedTypeTree(Select(Select(Select(Ident(TermName("scala")), TermName("collection")), TermName("slick")), TypeName("Queryable")), List(Ident(TermName("Int")))))
+  val x = ValDef(NoMods, TermName("x"), Ident(TermName("Int")), EmptyTree)
+  val fn = Function(List(x), Apply(Select(Ident(TermName("x")), TermName("$plus")), List(Literal(Constant("5")))))
+  val tree = Apply(Select(q, TermName("map")), List(fn))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a.check b/test/files/run/macro-reify-nested-a.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/macro-reify-nested-a.flags b/test/files/run/macro-reify-nested-a.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-reify-nested-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
deleted file mode 100644
index b4351c2..0000000
--- a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.macros.Context
-
-case class Utils[C <: Context]( c:C ) {
-  import c.universe._
-  import c.{Tree=>_}
-  object removeDoubleReify extends c.universe.Transformer {
-    def apply( tree:Tree ) = transform(tree)
-    override def transform(tree: Tree): Tree = {
-      super.transform {
-        tree match {
-          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
-                if termname.toString == "factory" => c.unreifyTree(reification)
-          case Apply(Select(_this, termname), reification::Nil )
-               if termname.toString == "factory" => c.unreifyTree(reification)
-          case _ => tree
-        }
-      }
-    }
-  }
-}
-object QueryableMacros{
-  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
-    import c.universe._
-    import treeBuild._
-    val element_type = implicitly[c.WeakTypeTag[S]].tpe
-    val foo = c.Expr[ru.Expr[Queryable[S]]](
-    c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
-      Utils[c.type](c).removeDoubleReify(
-        Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
-       ).asInstanceOf[Tree]
-      )))
-    c.universe.reify{ Queryable.factory[S]( foo.splice )}
-  }
-  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
-               (c: scala.reflect.macros.Context)
-               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
-}
-class Queryable[T]{
-  def _map[S]( projection: T => S ) : Queryable[S] = ???
-  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
-}
-object Queryable{
-  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
-}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Test_2.scala b/test/files/run/macro-reify-nested-a/Test_2.scala
deleted file mode 100644
index fa0eb37..0000000
--- a/test/files/run/macro-reify-nested-a/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App{
-  val q : Queryable[Any] = new Queryable[Any]
-  q.map(e1 => q.map(e2=>e1))
-}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala
new file mode 100644
index 0000000..7f877b2
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a1/Test_2.scala b/test/files/run/macro-reify-nested-a1/Test_2.scala
new file mode 100644
index 0000000..b99c4c5
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a1/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(e1 => q.map(e2=>e1))
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(e1 => q.map(e2=>e1))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala
new file mode 100644
index 0000000..965b191
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a2/Test_2.scala b/test/files/run/macro-reify-nested-a2/Test_2.scala
new file mode 100644
index 0000000..b99c4c5
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a2/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(e1 => q.map(e2=>e1))
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(e1 => q.map(e2=>e1))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b.check b/test/files/run/macro-reify-nested-b.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/macro-reify-nested-b.flags b/test/files/run/macro-reify-nested-b.flags
deleted file mode 100644
index cd66464..0000000
--- a/test/files/run/macro-reify-nested-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
deleted file mode 100644
index b4351c2..0000000
--- a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.macros.Context
-
-case class Utils[C <: Context]( c:C ) {
-  import c.universe._
-  import c.{Tree=>_}
-  object removeDoubleReify extends c.universe.Transformer {
-    def apply( tree:Tree ) = transform(tree)
-    override def transform(tree: Tree): Tree = {
-      super.transform {
-        tree match {
-          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
-                if termname.toString == "factory" => c.unreifyTree(reification)
-          case Apply(Select(_this, termname), reification::Nil )
-               if termname.toString == "factory" => c.unreifyTree(reification)
-          case _ => tree
-        }
-      }
-    }
-  }
-}
-object QueryableMacros{
-  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
-    import c.universe._
-    import treeBuild._
-    val element_type = implicitly[c.WeakTypeTag[S]].tpe
-    val foo = c.Expr[ru.Expr[Queryable[S]]](
-    c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
-      Utils[c.type](c).removeDoubleReify(
-        Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
-       ).asInstanceOf[Tree]
-      )))
-    c.universe.reify{ Queryable.factory[S]( foo.splice )}
-  }
-  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
-               (c: scala.reflect.macros.Context)
-               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
-}
-class Queryable[T]{
-  def _map[S]( projection: T => S ) : Queryable[S] = ???
-  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
-}
-object Queryable{
-  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
-}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b/Test_2.scala b/test/files/run/macro-reify-nested-b/Test_2.scala
deleted file mode 100644
index fa13f57..0000000
--- a/test/files/run/macro-reify-nested-b/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App{
-  val q : Queryable[Any] = new Queryable[Any]
-  q.map(e1 => q.map(e2=>e1).map(e2=>e1))
-}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala
new file mode 100644
index 0000000..7f877b2
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b1/Test_2.scala b/test/files/run/macro-reify-nested-b1/Test_2.scala
new file mode 100644
index 0000000..b199036
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b1/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(e1 => q.map(e2=>e1).map(e2=>e1))
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(e1 => q.map(e2=>e1).map(e2=>e1))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala
new file mode 100644
index 0000000..965b191
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala
@@ -0,0 +1,47 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+case class Utils[C <: Context]( c:C ) {
+  import c.universe._
+  import c.{Tree=>_}
+  object removeDoubleReify extends c.universe.Transformer {
+    def apply( tree:Tree ) = transform(tree)
+    override def transform(tree: Tree): Tree = {
+      super.transform {
+        tree match {
+          case  Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+                if termname.toString == "factory" => c.unreifyTree(reification)
+          case Apply(Select(_this, termname), reification::Nil )
+               if termname.toString == "factory" => c.unreifyTree(reification)
+          case _ => tree
+        }
+      }
+    }
+  }
+}
+object QueryableMacros{
+  def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+    import c.universe._
+    import internal._
+    val element_type = implicitly[c.WeakTypeTag[S]].tpe
+    val foo = c.Expr[ru.Expr[Queryable[S]]](
+    c.reifyTree( gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(
+      Utils[c.type](c).removeDoubleReify(
+        Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
+       ).asInstanceOf[Tree]
+      )))
+    c.universe.reify{ Queryable.factory[S]( foo.splice )}
+  }
+  def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+               (c: Context)
+               (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+  def _map[S]( projection: T => S ) : Queryable[S] = ???
+  def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+  def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b2/Test_2.scala b/test/files/run/macro-reify-nested-b2/Test_2.scala
new file mode 100644
index 0000000..b199036
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b2/Test_2.scala
@@ -0,0 +1,9 @@
+object Test extends App{
+  val q : Queryable[Any] = new Queryable[Any]
+  q.map(e1 => q.map(e2=>e1).map(e2=>e1))
+
+  locally {
+    val q : Queryable[Any] = new Queryable[Any]
+    q.map(e1 => q.map(e2=>e1).map(e2=>e1))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala
index f19fd23..38ec6f0 100644
--- a/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala
+++ b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala
@@ -1,6 +1,6 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
   val `Answer to the Ultimate Question of Life, the Universe, and Everything` = 42
-  def foo(c: Ctx) = c.universe.reify { `Answer to the Ultimate Question of Life, the Universe, and Everything` }
+  def foo(c: Context) = c.universe.reify { `Answer to the Ultimate Question of Life, the Universe, and Everything` }
 }
diff --git a/test/files/run/macro-reify-ref-to-packageless/Test_2.scala b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala
index 9d475f7..c167b16 100644
--- a/test/files/run/macro-reify-ref-to-packageless/Test_2.scala
+++ b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala
@@ -1,4 +1,4 @@
 object Test extends App {
-  def foo = macro Impls.foo
+  def foo: Int = macro Impls.foo
   println(foo)
 }
\ No newline at end of file
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
index 5330d0e..f038d87 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = {
-    val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
-// was:    c.literal(x1.splice)
-    c.literal(c.eval(x1))
+  def foo(c: Context)(x: c.Expr[Int]) = {
+    import c.universe._
+    val x1 = c.Expr[Int](c.untypecheck(x.tree))
+    c.Expr[Int](Literal(Constant(c.eval(x1))))
   }
 }
 
diff --git a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
index 54bd03f..dbc17e7 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
-  val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+  val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
   try println(cm.mkToolBox().eval(tree))
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
diff --git a/test/files/run/macro-reify-splice-splice/Macros_1.scala b/test/files/run/macro-reify-splice-splice/Macros_1.scala
deleted file mode 100644
index efdd5db..0000000
--- a/test/files/run/macro-reify-splice-splice/Macros_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Macros {
-  def foo = macro Impls.foo
-
-  object Impls {
-    def foo(c: Ctx) = c.universe.reify {
-      { c.universe.reify(c.universe.reify("hello world")) }.splice.splice
-    }
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-staticXXX/Macros_1.scala b/test/files/run/macro-reify-staticXXX/Macros_1.scala
index f12c8f7..2993218 100644
--- a/test/files/run/macro-reify-staticXXX/Macros_1.scala
+++ b/test/files/run/macro-reify-staticXXX/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object B { override def toString = "object" }
 class C { override def toString = "class" }
diff --git a/test/files/run/macro-reify-tagful-a/Macros_1.scala b/test/files/run/macro-reify-tagful-a/Macros_1.scala
index f2512dc..6f061fd 100644
--- a/test/files/run/macro-reify-tagful-a/Macros_1.scala
+++ b/test/files/run/macro-reify-tagful-a/Macros_1.scala
@@ -1,11 +1,11 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[T](s: T) = macro Impls.foo[T]
 
   object Impls {
-    def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+    def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify {
       List(s.splice)
     }
   }
diff --git a/test/files/run/macro-reify-tagless-a.check b/test/files/run/macro-reify-tagless-a.check
index 231741e..d160e80 100644
--- a/test/files/run/macro-reify-tagless-a.check
+++ b/test/files/run/macro-reify-tagless-a.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 Macro expansion contains free type variable T defined by foo in Impls_Macros_1.scala:7:13. Have you forgotten to use c.WeakTypeTag annotation for this type parameter? If you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala
index 96cfb75..faac3e3 100644
--- a/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[T](s: T) = macro Impls.foo[T]
 
   object Impls {
-    def foo[T](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+    def foo[T](c: Context)(s: c.Expr[T]) = c.universe.reify {
       List[T](s.splice)
     }
   }
diff --git a/test/files/run/macro-reify-tagless-a/Test_2.scala b/test/files/run/macro-reify-tagless-a/Test_2.scala
index 584c4bd..afb418a 100644
--- a/test/files/run/macro-reify-tagless-a/Test_2.scala
+++ b/test/files/run/macro-reify-tagless-a/Test_2.scala
@@ -6,9 +6,9 @@ object Test extends App {
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
   val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
-  val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
-  val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
-  val tree = Block(List(list), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+  val rhs = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant("hello world"))))
+  val list = ValDef(NoMods, TermName("list"), tpt, rhs)
+  val tree = Block(List(list), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Ident(list.name))))
   try cm.mkToolBox().eval(tree)
   catch { case ex: Throwable =>  println(ex.getMessage) }
 }
diff --git a/test/files/run/macro-reify-type/Macros_1.scala b/test/files/run/macro-reify-type/Macros_1.scala
index 06de057..c38cf8a 100644
--- a/test/files/run/macro-reify-type/Macros_1.scala
+++ b/test/files/run/macro-reify-type/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import scala.reflect.runtime.{universe => ru}
 
 object StaticReflect {
@@ -6,9 +6,10 @@ object StaticReflect {
 
   def methodImpl[A: c.WeakTypeTag](c: Context)(name: c.Expr[String]): c.Expr[ru.Type] = {
     import c.universe._
+    import internal._
 
     val nameName: TermName = name.tree match {
-      case Literal(Constant(str: String)) => newTermName(str)
+      case Literal(Constant(str: String)) => TermName(str)
       case _                              => c.error(c.enclosingPosition, s"Method name not constant.") ; return reify(ru.NoType)
     }
     val clazz  = weakTypeOf[A]
@@ -16,9 +17,9 @@ object StaticReflect {
     clazz member nameName match {
       case NoSymbol => c.error(c.enclosingPosition, s"No member called $nameName in $clazz.") ; reify(ru.NoType)
       case member   =>
-        val mtpe  = member typeSignatureIn clazz
-        val mtag  = c.reifyType(treeBuild.mkRuntimeUniverseRef, Select(treeBuild.mkRuntimeUniverseRef, newTermName("rootMirror")), mtpe)
-        val mtree = Select(mtag, newTermName("tpe"))
+        val mtpe  = member infoIn clazz
+        val mtag  = c.reifyType(gen.mkRuntimeUniverseRef, Select(gen.mkRuntimeUniverseRef, TermName("rootMirror")), mtpe)
+        val mtree = Select(mtag, TermName("tpe"))
 
         c.Expr[ru.Type](mtree)
     }
diff --git a/test/files/run/macro-reify-type/Test_2.scala b/test/files/run/macro-reify-type/Test_2.scala
index 9beaf98..8ec60e9 100644
--- a/test/files/run/macro-reify-type/Test_2.scala
+++ b/test/files/run/macro-reify-type/Test_2.scala
@@ -5,16 +5,16 @@ object Test extends App {
   println(method[List[Int]]("map"))
   //val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe;
   //val $m: $u.Mirror = scala.reflect.runtime.universe.rootMirror;
-  //import $u._, $m._, Flag._
+  //import $u._, $m._, Flag._, internal._
   //val tpe = {
-  //  val symdef$B2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("B"), NoPosition, DEFERRED | PARAM, false);
-  //  val symdef$That2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("That"), NoPosition, DEFERRED | PARAM, false);
-  //  val symdef$f2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("f"), NoPosition, PARAM, false);
-  //  val symdef$bf2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("bf"), NoPosition, IMPLICIT | PARAM, false);
-  //  build.setTypeSignature(symdef$B2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
-  //  build.setTypeSignature(symdef$That2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
-  //  build.setTypeSignature(symdef$f2, TypeRef(ThisType(staticPackage("scala").asModule.moduleClass), staticClass("scala.Function1"), List(staticClass("scala.Int").asType.toTypeConstructor, TypeRef(NoPrefix, symdef$B2, List()))));
-  //  build.setTypeSignature(symdef$bf2, TypeRef(ThisType(staticPackage("scala.collection.generic").asModule.moduleClass), staticClass("scala.collection.generic.CanBuildFrom"), List(TypeRef(ThisType(staticPackage("scala.collection.immutable").asModule.moduleClass), staticClass("scala.collection.immutable.List"), List(staticClass("scala.Int").asType.toTypeConstructor)), TypeRef(NoPrefix, symdef$B2, List()), TypeRef(NoPrefix, symdef$That2, List()))));
+  //  val symdef$B2 = reificationSupport.newNestedSymbol(reificationSupport.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TypeName("B"), NoPosition, DEFERRED | PARAM, false);
+  //  val symdef$That2 = reificationSupport.newNestedSymbol(reificationSupport.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TypeName("That"), NoPosition, DEFERRED | PARAM, false);
+  //  val symdef$f2 = reificationSupport.newNestedSymbol(reificationSupport.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TermName("f"), NoPosition, PARAM, false);
+  //  val symdef$bf2 = reificationSupport.newNestedSymbol(reificationSupport.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TermName("bf"), NoPosition, IMPLICIT | PARAM, false);
+  //  reificationSupport.setInfo(symdef$B2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
+  //  reificationSupport.setInfo(symdef$That2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
+  //  reificationSupport.setInfo(symdef$f2, TypeRef(ThisType(staticPackage("scala").asModule.moduleClass), staticClass("scala.Function1"), List(staticClass("scala.Int").asType.toTypeConstructor, TypeRef(NoPrefix, symdef$B2, List()))));
+  //  reificationSupport.setInfo(symdef$bf2, TypeRef(ThisType(staticPackage("scala.collection.generic").asModule.moduleClass), staticClass("scala.collection.generic.CanBuildFrom"), List(TypeRef(ThisType(staticPackage("scala.collection.immutable").asModule.moduleClass), staticClass("scala.collection.immutable.List"), List(staticClass("scala.Int").asType.toTypeConstructor)), TypeRef(NoPrefix, symdef$B2, List()), TypeRef(NoPrefix, symdef$That2, List()))));
   //  PolyType(List(symdef$B2, symdef$That2), MethodType(List(symdef$f2), MethodType(List(symdef$bf2), TypeRef(NoPrefix, symdef$That2, List()))))
   //}
   //println(tpe)
diff --git a/test/files/run/macro-reify-unreify/Macros_1.scala b/test/files/run/macro-reify-unreify/Macros_1.scala
index 9f04c13..d92dfa3 100644
--- a/test/files/run/macro-reify-unreify/Macros_1.scala
+++ b/test/files/run/macro-reify-unreify/Macros_1.scala
@@ -1,15 +1,15 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo(s: String) = macro Impls.foo
 
   object Impls {
-    def foo(c: Ctx)(s: c.Expr[String]) = {
+    def foo(c: Context)(s: c.Expr[String]) = {
       import c.universe._
-      import treeBuild._
+      import internal._
 
-      val world = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, s.tree)
-      val greeting = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), newTermName("$plus")), List(c.unreifyTree(world)))))
+      val world = c.reifyTree(gen.mkRuntimeUniverseRef, EmptyTree, s.tree)
+      val greeting = c.reifyTree(gen.mkRuntimeUniverseRef, EmptyTree, c.typecheck(Apply(Select(Literal(Constant("hello ")), TermName("$plus")), List(c.unreifyTree(world)))))
       val typedGreeting = c.Expr[String](greeting)
 
       c.universe.reify {
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
index 7deed4a..86b4d47 100644
--- a/test/files/run/macro-repl-basic.check
+++ b/test/files/run/macro-repl-basic.check
@@ -1,36 +1,34 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import language.experimental.macros
 import language.experimental.macros
 
-scala> import scala.reflect.macros.{Context => Ctx}
-import scala.reflect.macros.{Context=>Ctx}
+scala> import scala.reflect.macros.blackbox.Context
+import scala.reflect.macros.blackbox.Context
 
 scala> 
 
 scala> object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     c.Expr[Int](body)
   }
 
-  def bar(c: Ctx)(x: c.Expr[Int]) = {
+  def bar(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
     c.Expr[Int](body)
   }
 
-  def quux(c: Ctx)(x: c.Expr[Int]) = {
+  def quux(c: Context)(x: c.Expr[Int]) = {
     import c.universe._
-    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
     c.Expr[Int](body)
   }
 }
-defined module Impls
+defined object Impls
 
 scala> object Macros {
   object Shmacros {
@@ -40,7 +38,7 @@ scala> object Macros {
 }; class Macros {
   def quux(x: Int): Int = macro Impls.quux
 }
-defined module Macros
+defined object Macros
 defined class Macros
 
 scala> 
diff --git a/test/files/run/macro-repl-basic.scala b/test/files/run/macro-repl-basic.scala
index eae1feb..217f3bc 100644
--- a/test/files/run/macro-repl-basic.scala
+++ b/test/files/run/macro-repl-basic.scala
@@ -3,24 +3,24 @@ import scala.tools.partest.ReplTest
 object Test extends ReplTest {
   def code = """
     |import language.experimental.macros
-    |import scala.reflect.macros.{Context => Ctx}
+    |import scala.reflect.macros.blackbox.Context
     |
     |object Impls {
-    |  def foo(c: Ctx)(x: c.Expr[Int]) = {
+    |  def foo(c: Context)(x: c.Expr[Int]) = {
     |    import c.universe._
-    |    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+    |    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
     |    c.Expr[Int](body)
     |  }
     |
-    |  def bar(c: Ctx)(x: c.Expr[Int]) = {
+    |  def bar(c: Context)(x: c.Expr[Int]) = {
     |    import c.universe._
-    |    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+    |    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
     |    c.Expr[Int](body)
     |  }
     |
-    |  def quux(c: Ctx)(x: c.Expr[Int]) = {
+    |  def quux(c: Context)(x: c.Expr[Int]) = {
     |    import c.universe._
-    |    val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+    |    val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
     |    c.Expr[Int](body)
     |  }
     |}
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
index 628a914..20d3b2d 100644
--- a/test/files/run/macro-repl-dontexpand.check
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -1,12 +1,16 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
+scala> def bar1(c: scala.reflect.macros.blackbox.Context) = ???
+bar1: (c: scala.reflect.macros.blackbox.Context)Nothing
+
+scala> def foo1 = macro bar1
+defined term macro foo1: Nothing
 
-scala> def bar(c: scala.reflect.macros.Context) = ???
-bar: (c: scala.reflect.macros.Context)Nothing
+scala> def bar2(c: scala.reflect.macros.whitebox.Context) = ???
+bar2: (c: scala.reflect.macros.whitebox.Context)Nothing
 
-scala> def foo = macro bar
-foo: Any
+scala> def foo2 = macro bar2
+defined term macro foo2: Nothing
 
 scala> 
diff --git a/test/files/run/macro-repl-dontexpand.scala b/test/files/run/macro-repl-dontexpand.scala
index f3422d8..920f400 100644
--- a/test/files/run/macro-repl-dontexpand.scala
+++ b/test/files/run/macro-repl-dontexpand.scala
@@ -3,7 +3,9 @@ import scala.tools.partest.ReplTest
 object Test extends ReplTest {
   override def extraSettings = "-language:experimental.macros"
   def code = """
-    |def bar(c: scala.reflect.macros.Context) = ???
-    |def foo = macro bar
+    |def bar1(c: scala.reflect.macros.blackbox.Context) = ???
+    |def foo1 = macro bar1
+    |def bar2(c: scala.reflect.macros.whitebox.Context) = ???
+    |def foo2 = macro bar2
     |""".stripMargin
 }
diff --git a/test/files/run/macro-settings/Impls_Macros_1.scala b/test/files/run/macro-settings/Impls_Macros_1.scala
index 83d80a5..851a987 100644
--- a/test/files/run/macro-settings/Impls_Macros_1.scala
+++ b/test/files/run/macro-settings/Impls_Macros_1.scala
@@ -1,8 +1,11 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def impl(c: Context) = c.universe.reify {
-    println(c.literal(c.settings.toString).splice)
+  def impl(c: Context) = {
+    import c.universe._
+    reify {
+      println(c.Expr[String](Literal(Constant(c.settings.toString))).splice)
+    }
   }
 }
 
diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
index 5f3f61c..ded4d85 100644
--- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
 
 object Macros {
   def impl(c: Context) = {
@@ -7,11 +7,12 @@ object Macros {
     val inscope = c.inferImplicitValue(c.mirror.staticClass("SourceLocation").toType)
     val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null)))
 
-    val Apply(fun, args) = c.enclosingImplicits(0)._2
+    val Apply(fun, args) = c.enclosingImplicits(0).tree
     val fileName = fun.pos.source.file.file.getName
     val line = fun.pos.line
     val charOffset = fun.pos.point
-    c.universe.reify { SourceLocation1(outer.splice, c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+    def literal[T](x: T) = c.Expr[T](Literal(Constant(x)))
+    c.universe.reify { SourceLocation1(outer.splice, literal(fileName).splice, literal(line).splice, literal(charOffset).splice) }
   }
 
   implicit def sourceLocation: SourceLocation1 = macro impl
diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala
index 535ec2c..f66ab71 100644
--- a/test/files/run/macro-sip19/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19/Impls_Macros_1.scala
@@ -1,13 +1,14 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
 
 object Macros {
   def impl(c: Context) = {
     import c.universe._
-    val Apply(fun, args) = c.enclosingImplicits(0)._2
+    val Apply(fun, args) = c.enclosingImplicits(0).tree
     val fileName = fun.pos.source.file.file.getName
     val line = fun.pos.line
     val charOffset = fun.pos.point
-    c.universe.reify { SourceLocation(c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+    def literal[T](x: T) = c.Expr[T](Literal(Constant(x)))
+    c.universe.reify { SourceLocation(literal(fileName).splice, literal(line).splice, literal(charOffset).splice) }
   }
 
   implicit def sourceLocation: SourceLocation = macro impl
diff --git a/test/files/run/macro-subpatterns.check b/test/files/run/macro-subpatterns.check
new file mode 100644
index 0000000..4997146
--- /dev/null
+++ b/test/files/run/macro-subpatterns.check
@@ -0,0 +1,3 @@
+Some(List((a @ Extractor((b @ Extractor((c @ _)))))))
+Some(List((b @ Extractor((c @ _)))))
+Some(List((c @ _)))
diff --git a/test/files/run/macro-subpatterns/Macro_1.scala b/test/files/run/macro-subpatterns/Macro_1.scala
new file mode 100644
index 0000000..e009e41
--- /dev/null
+++ b/test/files/run/macro-subpatterns/Macro_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Extractor {
+  def unapply(x: Any): Any = macro unapplyImpl
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    import internal._
+    q"""
+      new {
+        def isEmpty = false
+        def get = ${subpatterns(x).toString}
+        def unapply(x: Any) = this
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/macro-subpatterns/Test_2.scala b/test/files/run/macro-subpatterns/Test_2.scala
new file mode 100644
index 0000000..dc6e668
--- /dev/null
+++ b/test/files/run/macro-subpatterns/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  42 match {
+    case Extractor(a @ Extractor(b @ Extractor(c))) => println(a); println(b); println(c)
+  }
+}
diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check
index dce976d..ffbd5a8 100644
--- a/test/files/run/macro-system-properties.check
+++ b/test/files/run/macro-system-properties.check
@@ -1,26 +1,22 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
-scala>     import language.experimental._, reflect.macros.Context
-import language.experimental._
-import reflect.macros.Context
+scala> import scala.language.experimental._, scala.reflect.macros.blackbox.Context
+import scala.language.experimental._
+import scala.reflect.macros.blackbox.Context
 
 scala>     object GrabContext {
       def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
       // System.properties lets you stash true globals (unlike statics which are classloader scoped)
-      def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
-      def grab() = macro impl
+      def impl(c: Context)() = { import c.universe._; System.getProperties.put("lastContext", c); c.Expr[Unit](q"()") }
+      def grab(): Unit = macro impl
     }
-defined module GrabContext
+defined object GrabContext
 
 scala>     object Test { class C(implicit a: Any) { GrabContext.grab } }
-defined module Test
+defined object Test
 
 scala>     object Test { class C(implicit a: Any) { GrabContext.grab } }
-defined module Test
-
-scala> 
+defined object Test
 
 scala> 
diff --git a/test/files/run/macro-system-properties.scala b/test/files/run/macro-system-properties.scala
index e182def..db88eb7 100644
--- a/test/files/run/macro-system-properties.scala
+++ b/test/files/run/macro-system-properties.scala
@@ -3,12 +3,12 @@ import scala.tools.partest.ReplTest
 
 object Test extends ReplTest {
   def code = """
-    import language.experimental._, reflect.macros.Context
+    import scala.language.experimental._, scala.reflect.macros.blackbox.Context
     object GrabContext {
       def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
       // System.properties lets you stash true globals (unlike statics which are classloader scoped)
-      def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
-      def grab() = macro impl
+      def impl(c: Context)() = { import c.universe._; System.getProperties.put("lastContext", c); c.Expr[Unit](q"()") }
+      def grab(): Unit = macro impl
     }
     object Test { class C(implicit a: Any) { GrabContext.grab } }
     object Test { class C(implicit a: Any) { GrabContext.grab } }
diff --git a/test/files/run/macro-def-path-dependent-a.check b/test/files/run/macro-term-declared-in-annotation.check
similarity index 100%
rename from test/files/run/macro-def-path-dependent-a.check
rename to test/files/run/macro-term-declared-in-annotation.check
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc.flags b/test/files/run/macro-term-declared-in-annotation.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-ctx-badargc.flags
rename to test/files/run/macro-term-declared-in-annotation.flags
diff --git a/test/files/run/macro-term-declared-in-annotation/Impls_1.scala b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala
new file mode 100644
index 0000000..c4bcfbc
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Literal(Constant("this is deprecated")))
+    c.Expr[String](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Macros_2.scala b/test/files/run/macro-term-declared-in-annotation/Macros_2.scala
similarity index 100%
rename from test/files/run/macro-declared-in-annotation/Macros_2.scala
rename to test/files/run/macro-term-declared-in-annotation/Macros_2.scala
diff --git a/test/files/run/macro-declared-in-annotation/Test_3.scala b/test/files/run/macro-term-declared-in-annotation/Test_3.scala
similarity index 100%
rename from test/files/run/macro-declared-in-annotation/Test_3.scala
rename to test/files/run/macro-term-declared-in-annotation/Test_3.scala
diff --git a/test/files/run/macro-declared-in-anonymous.check b/test/files/run/macro-term-declared-in-anonymous.check
similarity index 100%
rename from test/files/run/macro-declared-in-anonymous.check
rename to test/files/run/macro-term-declared-in-anonymous.check
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype.flags b/test/files/run/macro-term-declared-in-anonymous.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-ctx-badtype.flags
rename to test/files/run/macro-term-declared-in-anonymous.flags
diff --git a/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala
new file mode 100644
index 0000000..5039dff
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+import scala.language.reflectiveCalls
+
+object Test extends App {
+  val macros = new { def foo: Unit = macro Impls.foo }
+  macros.foo
+}
diff --git a/test/files/run/macro-declared-in-block.check b/test/files/run/macro-term-declared-in-block.check
similarity index 100%
rename from test/files/run/macro-declared-in-block.check
rename to test/files/run/macro-term-declared-in-block.check
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs.flags b/test/files/run/macro-term-declared-in-block.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-ctx-badvarargs.flags
rename to test/files/run/macro-term-declared-in-block.flags
diff --git a/test/files/run/macro-term-declared-in-block/Impls_1.scala b/test/files/run/macro-term-declared-in-block/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-block/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala
new file mode 100644
index 0000000..80bfc44
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  {
+    def foo: Unit = macro Impls.foo
+    foo
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class.check b/test/files/run/macro-term-declared-in-class-class.check
similarity index 100%
rename from test/files/run/macro-declared-in-class-class.check
rename to test/files/run/macro-term-declared-in-class-class.check
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx.flags b/test/files/run/macro-term-declared-in-class-class.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-ctx-noctx.flags
rename to test/files/run/macro-term-declared-in-class-class.flags
diff --git a/test/files/run/macro-term-declared-in-class-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala
new file mode 100644
index 0000000..d6b1f9f
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+class Macros {
+  class Macros {
+    def foo: Unit = macro Impls.foo
+  }
+}
+
+object Test extends App {
+  val outer = new Macros()
+  new outer.Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object.check b/test/files/run/macro-term-declared-in-class-object.check
similarity index 100%
rename from test/files/run/macro-declared-in-class-object.check
rename to test/files/run/macro-term-declared-in-class-object.check
diff --git a/test/files/neg/macro-invalidsig-implicit-params.flags b/test/files/run/macro-term-declared-in-class-object.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-implicit-params.flags
rename to test/files/run/macro-term-declared-in-class-object.flags
diff --git a/test/files/run/macro-term-declared-in-class-object/Impls_1.scala b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala
new file mode 100644
index 0000000..957f666
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+class Macros {
+  object Macros {
+    def foo: Unit = macro Impls.foo
+  }
+}
+
+object Test extends App {
+  val outer = new Macros()
+  outer.Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class.check b/test/files/run/macro-term-declared-in-class.check
similarity index 100%
rename from test/files/run/macro-declared-in-class.check
rename to test/files/run/macro-term-declared-in-class.check
diff --git a/test/files/neg/macro-invalidsig-params-badargc.flags b/test/files/run/macro-term-declared-in-class.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-params-badargc.flags
rename to test/files/run/macro-term-declared-in-class.flags
diff --git a/test/files/run/macro-term-declared-in-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala
new file mode 100644
index 0000000..5898d94
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+class Macros {
+  def foo: Unit = macro Impls.foo
+}
+
+object Test extends App {
+  new Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param.check b/test/files/run/macro-term-declared-in-default-param.check
similarity index 100%
rename from test/files/run/macro-declared-in-default-param.check
rename to test/files/run/macro-term-declared-in-default-param.check
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs.flags b/test/files/run/macro-term-declared-in-default-param.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-params-badvarargs.flags
rename to test/files/run/macro-term-declared-in-default-param.flags
diff --git a/test/files/run/macro-term-declared-in-default-param/Impls_1.scala b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala
new file mode 100644
index 0000000..ef0f136
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Literal(Constant("it works")))
+    c.Expr[String](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala
new file mode 100644
index 0000000..16bd95b
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+  def foo(bar: String = { def foo: String = macro Impls.foo; foo }) = println(bar)
+
+  foo()
+  foo("it works")
+  foo()
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class.check b/test/files/run/macro-term-declared-in-implicit-class.check
similarity index 100%
rename from test/files/run/macro-declared-in-implicit-class.check
rename to test/files/run/macro-term-declared-in-implicit-class.check
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch.flags b/test/files/run/macro-term-declared-in-implicit-class.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-params-namemismatch.flags
rename to test/files/run/macro-term-declared-in-implicit-class.flags
diff --git a/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala
new file mode 100644
index 0000000..ef00f6f
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def toOptionOfInt(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Ident(definitions.SomeModule), List(Select(Select(prefix.tree, TermName("x")), TermName("toInt")))))
+    c.Expr[Option[Int]](body)
+  }
+}
+
+object Macros {
+  implicit def foo(x: String): Foo = new Foo(x)
+
+  class Foo(val x: String) {
+    def toOptionOfInt = macro Impls.toOptionOfInt
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class/Test_2.scala b/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala
similarity index 100%
rename from test/files/run/macro-declared-in-implicit-class/Test_2.scala
rename to test/files/run/macro-term-declared-in-implicit-class/Test_2.scala
diff --git a/test/files/run/macro-declared-in-method.check b/test/files/run/macro-term-declared-in-method.check
similarity index 100%
rename from test/files/run/macro-declared-in-method.check
rename to test/files/run/macro-term-declared-in-method.check
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype.flags b/test/files/run/macro-term-declared-in-method.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-badtype.flags
rename to test/files/run/macro-term-declared-in-method.flags
diff --git a/test/files/run/macro-term-declared-in-method/Impls_1.scala b/test/files/run/macro-term-declared-in-method/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-method/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala
new file mode 100644
index 0000000..523989d
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+  def bar() = {
+    def foo: Unit = macro Impls.foo
+    foo
+  }
+
+  bar()
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class.check b/test/files/run/macro-term-declared-in-object-class.check
similarity index 100%
rename from test/files/run/macro-declared-in-object-class.check
rename to test/files/run/macro-term-declared-in-object-class.check
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a.flags b/test/files/run/macro-term-declared-in-object-class.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-bounds-a.flags
rename to test/files/run/macro-term-declared-in-object-class.flags
diff --git a/test/files/run/macro-term-declared-in-object-class/Impls_1.scala b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala
new file mode 100644
index 0000000..fe9dbef
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+  class Macros {
+    def foo: Unit = macro Impls.foo
+  }
+}
+
+object Test extends App {
+  val outer = Macros
+  new outer.Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object.check b/test/files/run/macro-term-declared-in-object-object.check
similarity index 100%
rename from test/files/run/macro-declared-in-object-object.check
rename to test/files/run/macro-term-declared-in-object-object.check
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b.flags b/test/files/run/macro-term-declared-in-object-object.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-bounds-b.flags
rename to test/files/run/macro-term-declared-in-object-object.flags
diff --git a/test/files/run/macro-term-declared-in-object-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala
new file mode 100644
index 0000000..4ba7551
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+  object Macros {
+    def foo: Unit = macro Impls.foo
+  }
+}
+
+object Test extends App {
+  val outer = Macros
+  outer.Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object.check b/test/files/run/macro-term-declared-in-object.check
similarity index 100%
rename from test/files/run/macro-declared-in-object.check
rename to test/files/run/macro-term-declared-in-object.check
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a.flags b/test/files/run/macro-term-declared-in-object.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-notparams-a.flags
rename to test/files/run/macro-term-declared-in-object.flags
diff --git a/test/files/run/macro-term-declared-in-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala
new file mode 100644
index 0000000..9ebf5d7
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Macros {
+  def foo: Unit = macro Impls.foo
+}
+
+object Test extends App {
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object.check b/test/files/run/macro-term-declared-in-package-object.check
similarity index 100%
rename from test/files/run/macro-declared-in-package-object.check
rename to test/files/run/macro-term-declared-in-package-object.check
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b.flags b/test/files/run/macro-term-declared-in-package-object.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-notparams-b.flags
rename to test/files/run/macro-term-declared-in-package-object.flags
diff --git a/test/files/run/macro-term-declared-in-package-object/Impls_1.scala b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala
new file mode 100644
index 0000000..1f378b8
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+package object Macros {
+  def foo: Unit = macro Impls.foo
+}
+
+object Test extends App {
+  import Macros._
+  foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement.check b/test/files/run/macro-term-declared-in-refinement.check
similarity index 100%
rename from test/files/run/macro-declared-in-refinement.check
rename to test/files/run/macro-term-declared-in-refinement.check
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c.flags b/test/files/run/macro-term-declared-in-refinement.flags
similarity index 100%
rename from test/files/neg/macro-invalidsig-tparams-notparams-c.flags
rename to test/files/run/macro-term-declared-in-refinement.flags
diff --git a/test/files/run/macro-term-declared-in-refinement/Impls_1.scala b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala
new file mode 100644
index 0000000..b38616b
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+import scala.language.reflectiveCalls
+
+class Base
+
+object Test extends App {
+  val macros = new Base { def foo: Unit = macro Impls.foo }
+  macros.foo
+}
diff --git a/test/files/run/macro-term-declared-in-trait.check b/test/files/run/macro-term-declared-in-trait.check
new file mode 100644
index 0000000..0f3756d
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-trait.check
@@ -0,0 +1,15 @@
+prefix = Expr[Nothing]({
+  final class $anon extends AnyRef with Base {
+    def <init>(): <$anon: Base> = {
+      $anon.super.<init>();
+      ()
+    };
+    <empty>
+  };
+  new $anon()
+})
+it works
+prefix = Expr[Nothing](Macros)
+it works
+prefix = Expr[Nothing](new Macros())
+it works
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.flags b/test/files/run/macro-term-declared-in-trait.flags
similarity index 100%
rename from test/files/neg/macro-invalidusage-badbounds-a.flags
rename to test/files/run/macro-term-declared-in-trait.flags
diff --git a/test/files/run/macro-term-declared-in-trait/Impls_1.scala b/test/files/run/macro-term-declared-in-trait/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-trait/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala
new file mode 100644
index 0000000..09c60df
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala
@@ -0,0 +1,13 @@
+trait Base {
+  def foo: Unit = macro Impls.foo
+}
+
+object Macros extends Base
+
+class Macros extends Base
+
+object Test extends App {
+  (new Base {}).foo
+  Macros.foo
+  new Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check
index c4fa2c5..91d8fab 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled.check
+++ b/test/files/run/macro-typecheck-implicitsdisabled.check
@@ -1,2 +1,2 @@
-scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+scala.this.Predef.ArrowAssoc[Int](1).->[Int](2)
 scala.reflect.macros.TypecheckException: value -> is not a member of Int
diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
index 633cb93..956331c 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
@@ -1,12 +1,12 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl_with_implicits_enabled(c: Context) = {
     import c.universe._
 
-    val tree1 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
-    val ttree1 = c.typeCheck(tree1, withImplicitViewsDisabled = false)
-    c.literal(ttree1.toString)
+    val tree1 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
+    val ttree1 = c.typecheck(tree1, withImplicitViewsDisabled = false)
+    c.Expr[String](Literal(Constant(ttree1.toString)))
   }
 
   def foo_with_implicits_enabled = macro impl_with_implicits_enabled
@@ -15,12 +15,12 @@ object Macros {
     import c.universe._
 
     try {
-      val tree2 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
-      val ttree2 = c.typeCheck(tree2, withImplicitViewsDisabled = true)
-      c.literal(ttree2.toString)
+      val tree2 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
+      val ttree2 = c.typecheck(tree2, withImplicitViewsDisabled = true)
+      c.Expr[String](Literal(Constant(ttree2.toString)))
     } catch {
       case ex: Throwable =>
-        c.literal(ex.toString)
+        c.Expr[String](Literal(Constant(ex.toString)))
     }
   }
 
diff --git a/test/files/run/macro-typecheck-macrosdisabled.check b/test/files/run/macro-typecheck-macrosdisabled.check
index 29a881f..c618d22 100644
--- a/test/files/run/macro-typecheck-macrosdisabled.check
+++ b/test/files/run/macro-typecheck-macrosdisabled.check
@@ -1,4 +1,4 @@
-{
+({
   val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
   val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
   $u.Expr.apply[Int(2)]($m, {
@@ -7,7 +7,7 @@
         $treecreator1.super.<init>();
         ()
       };
-      def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+      def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
         $u.Literal.apply($u.Constant.apply(2))
@@ -20,13 +20,13 @@
         $typecreator2.super.<init>();
         ()
       };
-      def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+      def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.ConstantType.apply($u.Constant.apply(2))
+        $u.internal.reificationSupport.ConstantType($u.Constant.apply(2))
       }
     };
     new $typecreator2()
   }))
-}
+}: reflect.runtime.universe.Expr[Int])
 ru.reify[Int](2)
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
index f693ad7..0e549f4 100644
--- a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
@@ -1,29 +1,30 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl_with_macros_enabled(c: Context) = {
     import c.universe._
 
-    val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
-    val tree1 = Apply(Select(ru, newTermName("reify")), List(Literal(Constant(2))))
-    val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
-    c.literal(ttree1.toString)
+    val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
+    val tree1 = Apply(Select(ru, TermName("reify")), List(Literal(Constant(2))))
+    val ttree1 = c.typecheck(tree1, withMacrosDisabled = false)
+    c.Expr[String](Literal(Constant(ttree1.toString)))
   }
 
   def foo_with_macros_enabled = macro impl_with_macros_enabled
 
   def impl_with_macros_disabled(c: Context) = {
     import c.universe._
+    import internal._
 
     val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
-    val rusym = build.selectTerm(rupkg, "universe")
-    val NullaryMethodType(rutpe) = rusym.typeSignature
-    val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
-    build.setTypeSignature(ru, rutpe)
+    val rusym = reificationSupport.selectTerm(rupkg, "universe")
+    val NullaryMethodType(rutpe) = rusym.info
+    val ru = reificationSupport.newFreeTerm("ru", scala.reflect.runtime.universe)
+    reificationSupport.setInfo(ru, rutpe)
 
-    val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
-    val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
-    c.literal(ttree2.toString)
+    val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
+    val ttree2 = c.typecheck(tree2, withMacrosDisabled = true)
+    c.Expr[String](Literal(Constant(ttree2.toString)))
   }
 
   def foo_with_macros_disabled = macro impl_with_macros_disabled
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.check b/test/files/run/macro-typecheck-macrosdisabled2.check
index 27d15d4..2e862a6 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2.check
+++ b/test/files/run/macro-typecheck-macrosdisabled2.check
@@ -1,4 +1,4 @@
-{
+({
   val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
   val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
   $u.Expr.apply[Array[Int]]($m, {
@@ -7,10 +7,10 @@
         $treecreator1.super.<init>();
         ()
       };
-      def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+      def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+        $u.Apply.apply($u.Select.apply($u.internal.reificationSupport.mkIdent($m.staticModule("scala.Array")), $u.TermName.apply("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
       }
     };
     new $treecreator1()
@@ -20,13 +20,13 @@
         $typecreator2.super.<init>();
         ()
       };
-      def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+      def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+        $u.internal.reificationSupport.TypeRef($u.internal.reificationSupport.ThisType($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
       }
     };
     new $typecreator2()
   }))
-}
+}: reflect.runtime.universe.Expr[Array[Int]])
 ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
index 1dbf5a1..f99f5d2 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
@@ -1,29 +1,30 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl_with_macros_enabled(c: Context) = {
     import c.universe._
 
-    val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
-    val tree1 = Apply(Select(ru, newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
-    val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
-    c.literal(ttree1.toString)
+    val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
+    val tree1 = Apply(Select(ru, TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
+    val ttree1 = c.typecheck(tree1, withMacrosDisabled = false)
+    c.Expr[String](Literal(Constant(ttree1.toString)))
   }
 
   def foo_with_macros_enabled = macro impl_with_macros_enabled
 
   def impl_with_macros_disabled(c: Context) = {
     import c.universe._
+    import internal._
 
     val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
-    val rusym = build.selectTerm(rupkg, "universe")
-    val NullaryMethodType(rutpe) = rusym.typeSignature
-    val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
-    build.setTypeSignature(ru, rutpe)
+    val rusym = reificationSupport.selectTerm(rupkg, "universe")
+    val NullaryMethodType(rutpe) = rusym.info
+    val ru = reificationSupport.newFreeTerm("ru", scala.reflect.runtime.universe)
+    reificationSupport.setInfo(ru, rutpe)
 
-    val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
-    val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
-    c.literal(ttree2.toString)
+    val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
+    val ttree2 = c.typecheck(tree2, withMacrosDisabled = true)
+    c.Expr[String](Literal(Constant(ttree2.toString)))
   }
 
   def foo_with_macros_disabled = macro impl_with_macros_disabled
diff --git a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
index bcbd128..5df5f96 100644
--- a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
@@ -1,15 +1,21 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
-  def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = c.universe.reify {
-    println("A = " + c.literal(implicitly[c.WeakTypeTag[A]].toString).splice)
-    x.splice :: xs.splice
+  def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = {
+    import c.universe._
+    reify {
+      println("A = " + c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[A]].toString))).splice)
+      x.splice :: xs.splice
+    }
   }
 
-  def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = c.universe.reify {
-    println("B = " + c.literal(implicitly[c.WeakTypeTag[B]].toString).splice)
-    Nil
+  def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = {
+    import c.universe._
+    reify {
+      println("B = " + c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[B]].toString))).splice)
+      Nil
+    }
   }
 
   def cons[A](x: A, xs: List[A]): List[A] = macro cons_impl[A]
diff --git a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
index 0244273..1eb257e 100644
--- a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
@@ -1,8 +1,11 @@
 import scala.reflect.runtime.universe._
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
-  def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = c.universe.reify { println(c.literal(implicitly[c.WeakTypeTag[T]].toString).splice) }
+  def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = {
+    import c.universe._
+    reify { println(c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[T]].toString))).splice) }
+  }
 
   def foo[T](foo: T) = macro impl[T]
 }
\ No newline at end of file
diff --git a/test/files/run/macro-vampire-false-warning.check b/test/files/run/macro-vampire-false-warning.check
new file mode 100644
index 0000000..4792e70
--- /dev/null
+++ b/test/files/run/macro-vampire-false-warning.check
@@ -0,0 +1,2 @@
+2
+3
diff --git a/test/files/disabled/t7020.flags b/test/files/run/macro-vampire-false-warning.flags
similarity index 100%
copy from test/files/disabled/t7020.flags
copy to test/files/run/macro-vampire-false-warning.flags
diff --git a/test/files/run/macro-vampire-false-warning/Macros_1.scala b/test/files/run/macro-vampire-false-warning/Macros_1.scala
new file mode 100644
index 0000000..63c34b3
--- /dev/null
+++ b/test/files/run/macro-vampire-false-warning/Macros_1.scala
@@ -0,0 +1,52 @@
+// As per http://meta.plasm.us/posts/2013/08/31/feeding-our-vampires/
+
+import scala.annotation.StaticAnnotation
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+class body(tree: Any) extends StaticAnnotation
+
+object Macros {
+  def selFieldImpl(c: Context) = {
+    import c.universe._
+    val field = c.macroApplication.symbol
+    val bodyAnn = field.annotations.filter(_.tree.tpe <:< typeOf[body]).head
+    c.Expr[Any](bodyAnn.tree.children(1))
+  }
+
+  def mkObjectImpl(c: Context)(xs: c.Expr[Any]*) = {
+    import c.universe._
+    import Flag._
+    // val kvps = xs.toList map { case q"${_}(${Literal(Constant(name: String))}).->[${_}]($value)" => name -> value }
+    val kvps = xs.map(_.tree).toList map { case Apply(TypeApply(Select(Apply(_, List(Literal(Constant(name: String)))), _), _), List(value)) => name -> value }
+    // val fields = kvps map { case (k, v) => q"@body($v) def ${TermName(k)} = macro Macros.selFieldImpl" }
+    val fields = kvps map { case (k, v) => DefDef(
+      Modifiers(MACRO, typeNames.EMPTY, List(Apply(Select(New(Ident(TypeName("body"))), termNames.CONSTRUCTOR), List(v)))),
+      TermName(k), Nil, Nil, Ident(TypeName("Any")), Select(Ident(TermName("Macros")), TermName("selFieldImpl"))) }
+    // q"import scala.language.experimental.macros; class Workaround { ..$fields }; new Workaround{}"
+    c.Expr[Any](Block(
+      List(
+        Import(Select(Select(Ident(TermName("scala")), TermName("language")), TermName("experimental")), List(ImportSelector(TermName("macros"), 51, TermName("macros"), 51))),
+        ClassDef(
+          NoMods, TypeName("Workaround"), Nil,
+          Template(
+            List(Select(Ident(TermName("scala")), TypeName("AnyRef"))), noSelfType,
+            DefDef(
+              NoMods, termNames.CONSTRUCTOR, Nil, List(Nil), TypeTree(),
+              Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(()))))
+            +: fields)),
+        ClassDef(
+          Modifiers(FINAL), TypeName("$anon"), Nil,
+          Template(
+            List(Ident(TypeName("Workaround"))), noSelfType,
+            List(
+              DefDef(
+                NoMods, termNames.CONSTRUCTOR, Nil, List(Nil), TypeTree(),
+                Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))))))),
+      Apply(Select(New(Ident(TypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
+
+object mkObject {
+  def apply(xs: Any*): Any = macro Macros.mkObjectImpl
+}
diff --git a/test/files/run/macro-vampire-false-warning/Test_2.scala b/test/files/run/macro-vampire-false-warning/Test_2.scala
new file mode 100644
index 0000000..6e44b68
--- /dev/null
+++ b/test/files/run/macro-vampire-false-warning/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  val foo = mkObject("x" -> "2", "y" -> 3)
+  println(foo.x)
+  println(foo.y)
+  // println(foo.z) => will result in a compilation error
+}
\ No newline at end of file
diff --git a/test/files/run/macro-whitebox-dynamic-materialization.check b/test/files/run/macro-whitebox-dynamic-materialization.check
new file mode 100644
index 0000000..ccec8e5
--- /dev/null
+++ b/test/files/run/macro-whitebox-dynamic-materialization.check
@@ -0,0 +1,2 @@
+null
+C2
diff --git a/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala b/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala
new file mode 100644
index 0000000..a2e925b
--- /dev/null
+++ b/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+trait Foo[T]
+
+class C1(val x: Int)
+class C2(val x: String)
+
+trait LowPriority {
+  implicit def lessSpecific[T]: Foo[T] = null
+}
+
+object Foo extends LowPriority {
+  implicit def moreSpecific[T]: Foo[T] = macro Macros.impl[T]
+}
+
+object Macros {
+  def impl[T: c.WeakTypeTag](c: Context) = {
+    import c.universe._
+    val tpe = weakTypeOf[T]
+    if (tpe.members.exists(_.info =:= typeOf[Int]))
+      c.abort(c.enclosingPosition, "I don't like classes that contain integers")
+    q"new Foo[$tpe]{ override def toString = ${tpe.toString} }"
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala b/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala
new file mode 100644
index 0000000..bf19209
--- /dev/null
+++ b/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  println(implicitly[Foo[C1]])
+  println(implicitly[Foo[C2]])
+}
\ No newline at end of file
diff --git a/test/files/run/macro-whitebox-extractor.check b/test/files/run/macro-whitebox-extractor.check
new file mode 100644
index 0000000..d81cc07
--- /dev/null
+++ b/test/files/run/macro-whitebox-extractor.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/macro-whitebox-extractor/Macros_1.scala b/test/files/run/macro-whitebox-extractor/Macros_1.scala
new file mode 100644
index 0000000..d394c02
--- /dev/null
+++ b/test/files/run/macro-whitebox-extractor/Macros_1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Extractor {
+  def unapply(x: Int): Any = macro Macros.unapplyImpl
+}
+
+object Macros {
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/macro-whitebox-extractor/Test_2.scala b/test/files/run/macro-whitebox-extractor/Test_2.scala
new file mode 100644
index 0000000..41be6f9
--- /dev/null
+++ b/test/files/run/macro-whitebox-extractor/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  42 match {
+    case Extractor(x) => println(x)
+  }
+}
diff --git a/test/files/run/macro-whitebox-fundep-materialization.check b/test/files/run/macro-whitebox-fundep-materialization.check
new file mode 100644
index 0000000..bed7429
--- /dev/null
+++ b/test/files/run/macro-whitebox-fundep-materialization.check
@@ -0,0 +1 @@
+(23,foo,true)
diff --git a/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala b/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala
new file mode 100644
index 0000000..5e89e6b
--- /dev/null
+++ b/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala
@@ -0,0 +1,39 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+trait Iso[T, U] {
+  def to(t : T) : U
+  // def from(u : U) : T
+}
+
+object Iso {
+  implicit def materializeIso[T, U]: Iso[T, U] = macro impl[T, U]
+  def impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context): c.Expr[Iso[T, U]] = {
+    import c.universe._
+    import definitions._
+    import Flag._
+
+    val sym = c.weakTypeOf[T].typeSymbol
+    if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+    val fields = sym.info.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+    def mkTpt() = {
+      val core = Ident(TupleClass(fields.length) orElse UnitClass)
+      if (fields.length == 0) core
+      else AppliedTypeTree(core, fields map (f => TypeTree(f.info)))
+    }
+
+    def mkFrom() = {
+      if (fields.length == 0) Literal(Constant(Unit))
+      else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+    }
+
+    val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+      List(AppliedTypeTree(Ident(newTypeName("Iso")), List(Ident(sym), mkTpt()))),
+      emptyValDef,
+      List(
+        DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
+        DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+    c.Expr[Iso[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), termNames.CONSTRUCTOR), List())))
+  }
+}
diff --git a/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala
new file mode 100644
index 0000000..40ca1d5
--- /dev/null
+++ b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala
@@ -0,0 +1,12 @@
+// see the comments for macroExpand.onDelayed for an explanation of what's tested here
+object Test extends App {
+  case class Foo(i: Int, s: String, b: Boolean)
+  def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+
+  {
+    val equiv = foo(Foo(23, "foo", true))
+    def typed[T](t: => T) {}
+    typed[(Int, String, Boolean)](equiv)
+    println(equiv)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t6955.check b/test/files/run/macro-whitebox-structural.check
similarity index 100%
copy from test/files/run/t6955.check
copy to test/files/run/macro-whitebox-structural.check
diff --git a/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala b/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala
new file mode 100644
index 0000000..45fdb79
--- /dev/null
+++ b/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala
@@ -0,0 +1,16 @@
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    q"""
+      trait Foo {
+        def x = 2
+      }
+      new Foo {}
+    """
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-whitebox-structural/Test_2.scala b/test/files/run/macro-whitebox-structural/Test_2.scala
new file mode 100644
index 0000000..ea6a817
--- /dev/null
+++ b/test/files/run/macro-whitebox-structural/Test_2.scala
@@ -0,0 +1,5 @@
+import Macros._
+
+object Test extends App {
+  println(Macros.foo.x)
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs.check b/test/files/run/macroPlugins-macroArgs.check
new file mode 100644
index 0000000..a68f806
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs.check
@@ -0,0 +1,2 @@
+hijacked 1
+hijacked 2
diff --git a/test/files/run/macroPlugins-macroArgs/Macros_2.scala b/test/files/run/macroPlugins-macroArgs/Macros_2.scala
new file mode 100644
index 0000000..b19b8f1
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Macros_2.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context)(arg: c.Tree) = {
+    import c.universe._
+    q"""println($arg)"""
+  }
+
+  def foo(arg: String): Unit = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Plugin_1.scala b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala
new file mode 100644
index 0000000..23e80ce
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala
@@ -0,0 +1,21 @@
+package macroArgs
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+
+  val name = "macroArgs"
+  val description = "A sample analyzer plugin that overrides macroArgs."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    override def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = {
+      val MacroArgs(c, List(Literal(Constant(s: String)))) = standardMacroArgs(typer, expandee)
+      Some(MacroArgs(c, List(Literal(Constant("hijacked " + s)))))
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Test_3.flags b/test/files/run/macroPlugins-macroArgs/Test_3.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Test_3.scala b/test/files/run/macroPlugins-macroArgs/Test_3.scala
new file mode 100644
index 0000000..a54d608
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo("1")
+  Macros.foo("2")
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml b/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml
new file mode 100644
index 0000000..0849f0f
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>macro-args</name>
+  <classname>macroArgs.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand.check b/test/files/run/macroPlugins-macroExpand.check
new file mode 100644
index 0000000..6f685c2
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand.check
@@ -0,0 +1,2 @@
+expanded into println("impl1")
+expanded into println("impl2")
diff --git a/test/files/run/macroPlugins-macroExpand/Macros_2.scala b/test/files/run/macroPlugins-macroExpand/Macros_2.scala
new file mode 100644
index 0000000..c9c88ad
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Macros_2.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl1(c: Context) = {
+    import c.universe._
+    q"""println("impl1")"""
+  }
+
+  def impl2(c: Context) = {
+    import c.universe._
+    q"""println("impl2")"""
+  }
+
+  def foo1: Unit = macro impl1
+
+  def foo2: Unit = macro impl2
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Plugin_1.scala b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala
new file mode 100644
index 0000000..13df85c
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala
@@ -0,0 +1,27 @@
+package macroExpand
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+  import scala.reflect.internal.Mode
+
+  val name = "macroExpand"
+  val description = "A sample analyzer plugin that overrides macroExpand."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    override def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = {
+      object expander extends DefMacroExpander(typer, expandee, mode, pt) {
+        override def onSuccess(expanded: Tree) = {
+          val message = s"expanded into ${expanded.toString}"
+          typer.typed(q"println($message)")
+        }
+      }
+      Some(expander(expandee))
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Test_3.flags b/test/files/run/macroPlugins-macroExpand/Test_3.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Test_3.scala b/test/files/run/macroPlugins-macroExpand/Test_3.scala
new file mode 100644
index 0000000..def9b56
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo1
+  Macros.foo2
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml b/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml
new file mode 100644
index 0000000..8601508
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>macro-expand</name>
+  <classname>macroExpand.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime.check b/test/files/run/macroPlugins-macroRuntime.check
new file mode 100644
index 0000000..af16d1a
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime.check
@@ -0,0 +1,2 @@
+hijacked
+hijacked
diff --git a/test/files/run/macroPlugins-macroRuntime/Macros_2.scala b/test/files/run/macroPlugins-macroRuntime/Macros_2.scala
new file mode 100644
index 0000000..b19b8f1
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Macros_2.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context)(arg: c.Tree) = {
+    import c.universe._
+    q"""println($arg)"""
+  }
+
+  def foo(arg: String): Unit = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala
new file mode 100644
index 0000000..a55adad
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala
@@ -0,0 +1,20 @@
+package macroRuntime
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+
+  val name = "macroRuntime"
+  val description = "A sample analyzer plugin that overrides macroRuntime."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    override def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = Some({
+      case MacroArgs(_, List(msg)) => q"""println("hijacked")"""
+    })
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Test_3.flags b/test/files/run/macroPlugins-macroRuntime/Test_3.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Test_3.scala b/test/files/run/macroPlugins-macroRuntime/Test_3.scala
new file mode 100644
index 0000000..a54d608
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo("1")
+  Macros.foo("2")
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml b/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml
new file mode 100644
index 0000000..8001af1
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>macro-runtime</name>
+  <classname>macroRuntime.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-namerHooks.check b/test/files/run/macroPlugins-namerHooks.check
new file mode 100644
index 0000000..c2db593
--- /dev/null
+++ b/test/files/run/macroPlugins-namerHooks.check
@@ -0,0 +1,45 @@
+enterSym(package <empty> {   case class C extends scala.Product with scala.Serializable {     <caseaccessor> <paramaccessor> val x: Int = _;     <caseaccessor> <paramaccessor> val y: Int = _;     def <init>(x: Int, y: Int) = {       super.<init>();       ()     }   } })
+enterSym(case class C extends scala.Product with scala.Serializable {   <caseaccessor> <paramaccessor> val x: Int = _;   <caseaccessor> <paramaccessor> val y: Int = _;   def <init>(x: Int, y: Int) = {     super.<init>();     ()   } })
+ensureCompanionObject(case class C extends scala.Product with scala.Serializable {   <caseaccessor> <paramaccessor> val x: Int = _;   <caseaccessor> <paramaccessor> val y: Int = _;   def <init>(x: Int, y: Int) = {     super.<init>();     ()   } }, ...)
+enterSym(<synthetic> object C extends runtime.this.AbstractFunction2[Int, Int, C] {   def <init>() = {     super.<init>();     ()   };   final override <synthetic> def toString() = "C" })
+enterStat(case class C extends scala.Product with scala.Serializable {   <caseaccessor> <paramaccessor> val x: Int = _;   <caseaccessor> <paramaccessor> val y: Int = _;   def <init>(x: Int, y: Int) = {     super.<init>();     ()   } })
+enterSym(<caseaccessor> <paramaccessor> val x: Int = _)
+enterSym(<caseaccessor> <paramaccessor> val y: Int = _)
+enterSym(def <init>(x: Int, y: Int) = {   super.<init>();   () })
+enterSym(<synthetic> def copy(x = x, y = y) = new C(x, y))
+enterStat(<caseaccessor> <paramaccessor> private[this] val x: Int = _)
+enterStat(<caseaccessor> <paramaccessor> private[this] val y: Int = _)
+enterStat(def <init>(x: Int, y: Int) = {   super.<init>();   () })
+enterSym(<caseaccessor> <paramaccessor> private[this] val x: Int = _)
+enterSym(<caseaccessor> <paramaccessor> private[this] val y: Int = _)
+enterSym(def <init>(x: Int, y: Int) = {   super.<init>();   () })
+enterSym(super.<init>())
+enterStat(super.<init>())
+enterSym(<synthetic> def copy$default$1 = x)
+enterSym(<synthetic> def copy$default$2 = y)
+enterSym(<synthetic> var acc: Int = -889275714)
+enterSym(acc = Statics.this.mix(acc, x))
+enterSym(acc = Statics.this.mix(acc, y))
+enterStat(<synthetic> var acc: Int = -889275714)
+enterStat(acc = Statics.this.mix(acc, x))
+enterStat(acc = Statics.this.mix(acc, y))
+enterSym(<synthetic> val C$1: C = x$1.asInstanceOf[C])
+enterStat(<synthetic> val C$1: C = x$1.asInstanceOf[C])
+enterSym(def <init>() = {   super.<init>();   () })
+enterSym(final override <synthetic> def toString() = "C")
+enterSym(case <synthetic> def apply(x: Int, y: Int): C = new C(x, y))
+enterSym(case <synthetic> def unapply(x$0: C) = if (x$0.==(null))   scala.this.None else   Some(scala.Tuple2(x$0.x, x$0.y)))
+enterStat(def <init>() = {   super.<init>();   () })
+enterStat(final override <synthetic> def toString() = "C")
+enterSym(def <init>() = {   super.<init>();   () })
+enterSym(final override <synthetic> def toString() = "C")
+enterSym(super.<init>())
+enterStat(super.<init>())
+enterSym(case <synthetic> val x1: Int = x$1)
+enterStat(case <synthetic> val x1: Int = x$1)
+enterSym(case <synthetic> val x1: Any = x$1)
+enterSym(case5(){   if (x1.isInstanceOf[C])     matchEnd4(true)   else     case6() })
+enterSym(case6(){   matchEnd4(false) })
+enterStat(case <synthetic> val x1: Any = x$1)
+enterStat(case5(){   if (x1.isInstanceOf[C])     matchEnd4(true)   else     case6() })
+enterStat(case6(){   matchEnd4(false) })
diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala
new file mode 100644
index 0000000..302429b
--- /dev/null
+++ b/test/files/run/macroPlugins-namerHooks.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+  override def extraSettings: String = "-usejavacp"
+
+  def code = """
+    case class C(x: Int, y: Int)
+  """.trim
+
+  def show() {
+    val global = newCompiler()
+    import global._
+    import analyzer._
+
+    val output = collection.mutable.ListBuffer[String]()
+    def log(what: String) = output += what.replace(String.format("%n"), " ")
+
+    object macroPlugin extends MacroPlugin {
+      override def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = {
+        log(s"enterSym($tree)")
+        namer.standardEnterSym(tree)
+        true
+      }
+      override def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = {
+        log(s"ensureCompanionObject($cdef, ...)")
+        Some(namer.standardEnsureCompanionObject(cdef, creator))
+      }
+      override def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+        stats.foreach(stat => log(s"enterStat($stat)"))
+        stats
+      }
+    }
+
+    addMacroPlugin(macroPlugin)
+    compileString(global)(code)
+    println(output.mkString("\n"))
+  }
+}
diff --git a/test/files/run/macroPlugins-typedMacroBody.check b/test/files/run/macroPlugins-typedMacroBody.check
new file mode 100644
index 0000000..b6f8436
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody.check
@@ -0,0 +1,2 @@
+impl1
+impl2
diff --git a/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags b/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags
new file mode 100644
index 0000000..966df73
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags
@@ -0,0 +1 @@
+-Xplugin:.
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala
new file mode 100644
index 0000000..80acfec
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl1(c: Context) = {
+    import c.universe._
+    q"""println("impl1")"""
+  }
+
+  def impl2(c: Context) = {
+    import c.universe._
+    q"""println("impl2")"""
+  }
+
+  def foo1: Unit = macro 1
+
+  def foo2: Unit = macro 2
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala
new file mode 100644
index 0000000..e99cf7f
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala
@@ -0,0 +1,21 @@
+package typedMacroBody
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+  import global._
+  import analyzer._
+
+  val name = "typedMacroBody"
+  val description = "A sample analyzer plugin that overrides typedMacroBody."
+  val components = Nil
+  addMacroPlugin(MacroPlugin)
+
+  object MacroPlugin extends MacroPlugin {
+    override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = {
+      val DefDef(_, _, _, _, _, Literal(Constant(num: Int))) = ddef
+      Some(standardTypedMacroBody(typer, copyDefDef(ddef)(rhs = Ident(TermName("impl" + num)))))
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Test_3.scala b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala
new file mode 100644
index 0000000..def9b56
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  Macros.foo1
+  Macros.foo2
+}
\ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml b/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml
new file mode 100644
index 0000000..e223fa5
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+  <name>typed-macro-body</name>
+  <classname>typedMacroBody.Plugin</classname>
+</plugin>
\ No newline at end of file
diff --git a/test/files/run/manifests-new.scala b/test/files/run/manifests-new.scala
index f1596de..8b42e3c 100644
--- a/test/files/run/manifests-new.scala
+++ b/test/files/run/manifests-new.scala
@@ -1,3 +1,6 @@
+
+
+import scala.language.{ higherKinds, postfixOps }
 import scala.reflect.runtime.universe._
 
 object Test
@@ -146,4 +149,4 @@ object Test
   }
 
   def main(args: Array[String]): Unit = runAllTests
-}
\ No newline at end of file
+}
diff --git a/test/files/run/manifests-old.scala b/test/files/run/manifests-old.scala
index 621689a..d8b1e75 100644
--- a/test/files/run/manifests-old.scala
+++ b/test/files/run/manifests-old.scala
@@ -1,3 +1,6 @@
+import scala.language.{ higherKinds, postfixOps }
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test
 {
   object Variances extends Enumeration {
@@ -144,4 +147,4 @@ object Test
   }
 
   def main(args: Array[String]): Unit = runAllTests
-}
\ No newline at end of file
+}
diff --git a/test/files/run/mapConserve.scala b/test/files/run/mapConserve.scala
index 013095b..d1d52f3 100644
--- a/test/files/run/mapConserve.scala
+++ b/test/files/run/mapConserve.scala
@@ -1,11 +1,14 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 import scala.annotation.tailrec
 import scala.collection.mutable.ListBuffer
 
 object Test {
   val maxListLength = 7 // up to 16, but larger is slower
   var testCount = 0
-  
-  def checkStackOverflow() = {  
+
+  def checkStackOverflow() = {
     var xs: List[String] = Nil
     for (i <- 0 until 250000)
         xs = "X" :: xs
@@ -47,7 +50,7 @@ object Test {
             // Behaves like existing mapConserve with respect to  eq
             checkBehaviourUnchanged(data, data mapConserve lastHexDigit, data mapConserve lastHexDigit)
         }
-        
+
         checkStackOverflow();
     }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/mapValues.scala b/test/files/run/mapValues.scala
index bd1794b..d3266bd 100644
--- a/test/files/run/mapValues.scala
+++ b/test/files/run/mapValues.scala
@@ -1,7 +1,7 @@
 object Test {
   val m = Map(1 -> 1, 2 -> 2)
   val mv = (m mapValues identity) - 1
-  
+
   def main(args: Array[String]): Unit = {
     assert(mv.size == 1)
   }
diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala
index 7714b2c..c007b3e 100644
--- a/test/files/run/map_java_conversions.scala
+++ b/test/files/run/map_java_conversions.scala
@@ -4,53 +4,53 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     import collection.JavaConversions._
-    
+
     test(new java.util.HashMap[String, String])
     test(new java.util.Properties)
     testConcMap
   }
-  
+
   def testConcMap {
     import collection.JavaConversions._
-    
+
     val concMap = new java.util.concurrent.ConcurrentHashMap[String, String]
-    
+
     test(concMap)
-    val cmap = asScalaConcurrentMap(concMap)
+    val cmap = mapAsScalaConcurrentMap(concMap)
     cmap.putIfAbsent("absentKey", "absentValue")
     cmap.put("somekey", "somevalue")
     assert(cmap.remove("somekey", "somevalue") == true)
     assert(cmap.replace("absentKey", "newAbsentValue") == Some("absentValue"))
     assert(cmap.replace("absentKey", "newAbsentValue", ".......") == true)
   }
-  
+
   def test(m: collection.mutable.Map[String, String]) {
     m.clear
     assert(m.size == 0)
-    
+
     m.put("key", "value")
     assert(m.size == 1)
-    
+
     assert(m.put("key", "anotherValue") == Some("value"))
     assert(m.put("key2", "value2") == None)
     assert(m.size == 2)
-    
+
     m += (("key3", "value3"))
     assert(m.size == 3)
-    
+
     m -= "key2"
     assert(m.size == 2)
     assert(m.nonEmpty)
     assert(m.remove("key") == Some("anotherValue"))
-    
+
     m.clear
     for (i <- 0 until 10) m += (("key" + i, "value" + i))
     for ((k, v) <- m) assert(k.startsWith("key"))
   }
-  
+
 }
 
 
diff --git a/test/files/run/map_test.scala b/test/files/run/map_test.scala
index 1ea864e..b76dfb4 100644
--- a/test/files/run/map_test.scala
+++ b/test/files/run/map_test.scala
@@ -20,7 +20,7 @@ object Test extends App {
     val map2 = map1.updated(17,"A small random number")
     val map3 = map2.updated(666,"A bigger random number")
     val map4 = map3.updated(4711,"A big random number")
-    map1 == myMap + Pair(42, "The answer")
+    map1 == myMap + ((42, "The answer"))
     var i = 0
     var map = map4
     while(i < 43) {
diff --git a/test/files/run/matchbytes.scala b/test/files/run/matchbytes.scala
index 7190886..f2cea06 100644
--- a/test/files/run/matchbytes.scala
+++ b/test/files/run/matchbytes.scala
@@ -4,4 +4,4 @@ object Test extends App{
     case 1 => println(1);
     case _ => println("????");
   }
-} 
+}
diff --git a/test/files/run/matchintasany.scala b/test/files/run/matchintasany.scala
index c6764b3..03f6ca2 100644
--- a/test/files/run/matchintasany.scala
+++ b/test/files/run/matchintasany.scala
@@ -5,4 +5,4 @@ object Test extends App{
     case 1L => println(1);
     case _ => println("????");
   }
-} 
+}
diff --git a/test/files/run/matchnull.scala b/test/files/run/matchnull.scala
index cfb3049..2cc8550 100644
--- a/test/files/run/matchnull.scala
+++ b/test/files/run/matchnull.scala
@@ -3,7 +3,7 @@ object Test
   def f1 = null match { case x: AnyRef => 1 case _ => -1 }
   def f2(x: Any) = x match { case 52 => 1 ; case null => -1 ; case _ => 0 }
   def f3(x: AnyRef) = x match { case x: String => 1 ; case List(_) => 0 ; case null => -1 ; case _ => -2 }
-  
+
   def main(args: Array[String]): Unit = {
     println(f1)
     println(f2(null))
diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala
index 49b406a..f6f3202 100644
--- a/test/files/run/matchonseq.scala
+++ b/test/files/run/matchonseq.scala
@@ -1,8 +1,8 @@
-object Test extends App{
-  Vector(1,2,3) match { 
-   case head +: tail => println("It worked! head=" + head) 
+object Test extends App {
+  Vector(1,2,3) match {
+    case head +: tail => println("It worked! head=" + head)
   }
-  Vector(1,2,3) match { 
-   case init :+ last => println("It worked! last=" + last) 
+  Vector(1,2,3) match {
+    case init :+ last => println("It worked! last=" + last)
   }
 }
diff --git a/test/files/run/memberpos.check b/test/files/run/memberpos.check
new file mode 100644
index 0000000..9e3a807
--- /dev/null
+++ b/test/files/run/memberpos.check
@@ -0,0 +1,11 @@
+newSource1.scala
+2,4                  class A
+6,28                 object A
+ 7,10                def bippy
+  8                  def hello
+ 11,27               class Dingo
+  12,26              def foooooz
+   22                val a
+30                   class B
+ 30                  def f
+
diff --git a/test/files/run/memberpos.scala b/test/files/run/memberpos.scala
new file mode 100644
index 0000000..f2b79c0
--- /dev/null
+++ b/test/files/run/memberpos.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+
+// Simple sanity test for -Yshow-member-pos.
+object Test extends DirectTest {
+  override def extraSettings: String = "-usejavacp -Ystop-after:parser -Yshow-member-pos \"\" -d " + testOutput.path
+  override def show() = compile()
+  override def code = """
+class A(val a: Int = 1) {
+
+}
+
+object A {
+  def bippy = {
+    def hello = 55
+    "" + hello
+  }
+  class Dingo {
+    def foooooz = /****
+
+
+
+
+
+    ****/ {
+
+
+
+      val a = 1
+
+
+      a
+    }
+  }
+}
+
+class B { def f = 1 }
+
+"""
+}
diff --git a/test/files/run/mirror_symbolof_x.check b/test/files/run/mirror_symbolof_x.check
new file mode 100644
index 0000000..cc9cad7
--- /dev/null
+++ b/test/files/run/mirror_symbolof_x.check
@@ -0,0 +1,13 @@
+class Int
+object C
+type T
+type Id
+class Nothing
+class Null
+class Int
+object C
+type T
+type Id
+class Nothing
+class Null
+exception: class C not found.
diff --git a/test/files/run/mirror_symbolof_x.scala b/test/files/run/mirror_symbolof_x.scala
new file mode 100644
index 0000000..8fec301
--- /dev/null
+++ b/test/files/run/mirror_symbolof_x.scala
@@ -0,0 +1,43 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.api.Mirror
+
+class C
+object C
+
+object Test extends App {
+  object test1 {
+    val m = cm
+    type T = Int
+    type Id[X] = X
+    println(m.symbolOf[Int]: ru.TypeSymbol)
+    println(m.symbolOf[C.type]: ru.TypeSymbol)
+    println(m.symbolOf[T]: ru.TypeSymbol)
+    println(m.symbolOf[Id[_]]: ru.TypeSymbol)
+    println(m.symbolOf[Nothing]: ru.TypeSymbol)
+    println(m.symbolOf[Null]: ru.TypeSymbol)
+  }
+
+  object test2 {
+    val m: Mirror[ru.type] = cm
+    type T = Int
+    type Id[X] = X
+    println(m.symbolOf[Int]: ru.TypeSymbol)
+    println(m.symbolOf[C.type]: ru.TypeSymbol)
+    println(m.symbolOf[T]: ru.TypeSymbol)
+    println(m.symbolOf[Id[_]]: ru.TypeSymbol)
+    println(m.symbolOf[Nothing]: ru.TypeSymbol)
+    println(m.symbolOf[Null]: ru.TypeSymbol)
+  }
+
+  object test3 {
+    val m = ru.runtimeMirror(classOf[Int].getClass.getClassLoader)
+    try println(m.symbolOf[C])
+    catch { case ex: ScalaReflectionException => println(s"exception: ${ex.getMessage}") }
+  }
+
+  test1
+  test2
+  test3
+}
diff --git a/test/files/run/misc.check b/test/files/run/misc.check
index 9fa7b72..56116f8 100644
--- a/test/files/run/misc.check
+++ b/test/files/run/misc.check
@@ -1,3 +1,27 @@
+misc.scala:46: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    42;
+    ^
+misc.scala:47: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    42l;
+    ^
+misc.scala:48: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    23.5f;
+    ^
+misc.scala:49: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    23.5;
+    ^
+misc.scala:50: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    "Hello";
+    ^
+misc.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    32 + 45;
+       ^
+misc.scala:62: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    x;
+    ^
+misc.scala:74: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    1 < 2;
+      ^
 ### Hello
 ### 17
 ### Bye
diff --git a/test/files/run/misc.scala b/test/files/run/misc.scala
index 139d2d7..2ae76bd 100644
--- a/test/files/run/misc.scala
+++ b/test/files/run/misc.scala
@@ -60,7 +60,7 @@ object Test {
     Console.println;
     val x = 13;
     x;
-    // !!! why are DefDef replaced by Block(Tree[0])? we should use Empty!    
+    // !!! why are DefDef replaced by Block(Tree[0])? we should use Empty!
     def f = 19;
     f;
     def f0() = 11;
diff --git a/test/files/run/missingparams.scala b/test/files/run/missingparams.scala
index e9b1d27..fbc4d2a 100644
--- a/test/files/run/missingparams.scala
+++ b/test/files/run/missingparams.scala
@@ -6,8 +6,8 @@ final class Foo(val x: Int) {
 
   // test that the closure elimination is not wrongly replacing
   // 'that' by 'this'
-  def intersect(that: Foo) = 
-    filter { dummy => 
+  def intersect(that: Foo) =
+    filter { dummy =>
 //      x // dummy
       that.x > 0
     }
diff --git a/test/files/run/mixin-signatures.check b/test/files/run/mixin-signatures.check
new file mode 100644
index 0000000..3031fe7
--- /dev/null
+++ b/test/files/run/mixin-signatures.check
@@ -0,0 +1,59 @@
+class Test$bar1$ {
+  public java.lang.String Test$bar1$.f(java.lang.Object)
+  public java.lang.Object Test$bar1$.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar1$.g(java.lang.String)
+  public java.lang.Object Test$bar1$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar1$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar1$.h(java.lang.Object)
+}
+
+class Test$bar2$ {
+  public java.lang.Object Test$bar2$.f(java.lang.String)
+  public java.lang.Object Test$bar2$.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar2$.g(java.lang.String)
+  public java.lang.Object Test$bar2$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar2$.g(java.lang.String) <bridge> <synthetic>
+  public java.lang.Object Test$bar2$.h(java.lang.Object)
+}
+
+class Test$bar3$ {
+  public java.lang.String Foo3.f(java.lang.Object)
+    generic: public java.lang.String Foo3.f(T)
+  public java.lang.Object Foo3.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar3$.g(java.lang.String)
+  public java.lang.Object Test$bar3$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar3$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Foo3.h(java.lang.Object)
+}
+
+class Test$bar4$ {
+  public java.lang.Object Foo4.f(java.lang.String)
+    generic: public R Foo4.f(java.lang.String)
+  public java.lang.Object Foo4.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar4$.g(java.lang.String)
+  public java.lang.Object Test$bar4$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar4$.g(java.lang.String) <bridge> <synthetic>
+  public java.lang.Object Foo4.h(java.lang.Object)
+}
+
+class Test$bar5$ {
+  public java.lang.String Test$bar5$.f(java.lang.String)
+  public java.lang.Object Test$bar5$.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar5$.f(java.lang.String) <bridge> <synthetic>
+  public java.lang.String Test$bar5$.f(java.lang.Object) <bridge> <synthetic>
+  public java.lang.String Test$bar5$.g(java.lang.String)
+  public java.lang.Object Test$bar5$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar5$.g(java.lang.String) <bridge> <synthetic>
+  public java.lang.String Test$bar5$.g(java.lang.Object) <bridge> <synthetic>
+  public java.lang.Object Test$bar5$.h(java.lang.Object)
+}
+
+class Foo1$class {
+  public static java.lang.String Foo1$class.f(Foo1,java.lang.Object)
+}
+
+class Foo2$class {
+  public static java.lang.Object Foo2$class.f(Foo2,java.lang.String)
+}
+
+000000000000000000000000000000000000
diff --git a/test/files/run/mixin-signatures.scala b/test/files/run/mixin-signatures.scala
new file mode 100644
index 0000000..afd3fad
--- /dev/null
+++ b/test/files/run/mixin-signatures.scala
@@ -0,0 +1,105 @@
+trait Base[T, R] {
+  def f(x: T): R
+  def g(x: T): R
+  def h(x: T): R = null.asInstanceOf[R]
+}
+
+trait Foo1[T] extends Base[T, String] {
+  def f(x: T): String = null
+  def g(x: T): String
+}
+trait Foo2[R] extends Base[String, R] {
+  def f(x: String): R = { print(x.length) ; null.asInstanceOf[R] }
+  def g(x: String): R
+}
+abstract class Foo3[T] extends Base[T, String] {
+  def f(x: T): String = ""
+  def g(x: T): String
+}
+abstract class Foo4[R] extends Base[String, R] {
+  def f(x: String): R = { print(x.length) ; null.asInstanceOf[R] }
+  def g(x: String): R
+}
+
+object Test {
+  object bar1 extends Foo1[String] { def g(x: String): String = { print(x.length) ; "" } }
+  object bar2 extends Foo2[String] { def g(x: String): String = { print(x.length) ; "" } }
+  object bar3 extends Foo3[String] { def g(x: String): String = { print(x.length) ; "" } }
+  object bar4 extends Foo4[String] { def g(x: String): String = { print(x.length) ; "" } }
+
+  // Notice that in bar5, f and g require THREE bridges, because the final
+  // implementation is (String)String, but:
+  //
+  //   inherited abstract signatures: T(R), (T)String, and (String)R
+  //   which erase to: (Object)Object, (Object)String, and (String)Object
+  //
+  // each of which must be bridged to the actual (String)String implementation.
+  //
+  // public java.lang.String Test$bar5$.g(java.lang.String)
+  // public java.lang.Object Test$bar5$.g(java.lang.String) <bridge> <synthetic>
+  // public java.lang.Object Test$bar5$.g(java.lang.Object) <bridge> <synthetic>
+  // public java.lang.String Test$bar5$.g(java.lang.Object) <bridge> <synthetic>
+  object bar5 extends Foo1[String] with Foo2[String] {
+    override def f(x: String): String = { print(x.length) ; x }
+    def g(x: String): String = { print(x.length) ; x }
+  }
+
+  final def m1[T, R](x: Base[T, R], y: T)   = { x.f(y) ; x.g(y) ; x.h(y) }
+  final def m2[T](x: Base[T, String], y: T) = { x.f(y) ; x.g(y) ; x.h(y) }
+  final def m3[R](x: Base[String, R])       = { x.f("") ; x.g("") ; x.h("") }
+  final def m4(x: Base[String, String])     = { x.f("") ; x.g("") ; x.h("") }
+
+  final def m11[T](x: Foo1[T], y: T) = { x.f(y) ; x.g(y) ; x.h(y) }
+  final def m12(x: Foo1[String])     = { x.f("") ; x.g("") ; x.h("") }
+  final def m21[T](x: Foo2[T], y: T) = { x.f("") ; x.g("") ; x.h("") }
+  final def m22(x: Foo2[String])     = { x.f("") ; x.g("") ; x.h("") }
+  final def m31[T](x: Foo3[T], y: T) = { x.f(y) ; x.g(y) ; x.h(y) }
+  final def m32(x: Foo3[String])     = { x.f("") ; x.g("") ; x.h("") }
+  final def m41[T](x: Foo4[T], y: T) = { x.f("") ; x.g("") ; x.h("") }
+  final def m42(x: Foo4[String])     = { x.f("") ; x.g("") ; x.h("") }
+
+  def go = {
+    m1(bar1, "") ; m2(bar1, "") ; m3(bar1) ; m4(bar1)
+    m1(bar2, "") ; m2(bar2, "") ; m3(bar2) ; m4(bar2)
+    m1(bar3, "") ; m2(bar3, "") ; m3(bar3) ; m4(bar3)
+    m1(bar4, "") ; m2(bar4, "") ; m3(bar4) ; m4(bar4)
+
+    m11(bar1, "") ; m12(bar1)
+    m21(bar2, "") ; m22(bar2)
+    m31(bar3, "") ; m32(bar3)
+    m41(bar4, "") ; m42(bar4)
+    ""
+  }
+
+  def flagsString(m: java.lang.reflect.Method) = {
+    val str = List(
+      if (m.isBridge) "<bridge>" else "",
+      if (m.isSynthetic) "<synthetic>" else ""
+    ) filterNot (_ == "") mkString " "
+
+    if (str == "") "" else " " + str
+    //
+    // val flags = scala.reflect.internal.ClassfileConstants.toScalaMethodFlags(m.getModifiers())
+    // scala.tools.nsc.symtab.Flags.flagsToString(flags)
+  }
+
+  def show(clazz: Class[_]) {
+    print(clazz + " {")
+    clazz.getMethods.sortBy(x => (x.getName, x.isBridge, x.toString)) filter (_.getName.length == 1) foreach { m =>
+      print("\n  " + m + flagsString(m))
+      if ("" + m != "" + m.toGenericString) {
+        print("\n    generic: " + m.toGenericString)
+      }
+    }
+    println("\n}")
+    println("")
+  }
+  def show(x: AnyRef) { show(x.getClass) }
+  def show(x: String) { show(Class.forName(x)) }
+
+  def main(args: Array[String]): Unit = {
+    List(bar1, bar2, bar3, bar4, bar5) foreach show
+    List("Foo1$class", "Foo2$class") foreach show
+    println(go)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/mutable-treeset.scala b/test/files/run/mutable-treeset.scala
new file mode 100644
index 0000000..100ab39
--- /dev/null
+++ b/test/files/run/mutable-treeset.scala
@@ -0,0 +1,145 @@
+import scala.collection.mutable.TreeSet
+
+object Test extends App {
+  val list = List(6,5,4,3,2,1,1,2,3,4,5,6,6,5,4,3,2,1)
+  val distinct = list.distinct
+  val sorted = distinct.sorted
+
+  // sublist stuff for a single level of slicing
+  val min = list.min
+  val max = list.max
+  val nonlist = ((min - 10) until (max + 20) filterNot list.contains).toList
+  val sublist = list filter {x => x >=(min + 1) && x < max}
+  val distinctSublist = sublist.distinct
+  val subnonlist = min :: max :: nonlist
+  val subsorted = distinctSublist.sorted
+
+  // subsublist for a 2nd level of slicing
+  val almostmin = sublist.min
+  val almostmax = sublist.max
+  val subsublist = sublist filter {x => x >=(almostmin + 1) && x < almostmax}
+  val distinctSubsublist = subsublist.distinct
+  val subsubnonlist = almostmin :: almostmax :: subnonlist
+  val subsubsorted = distinctSubsublist.sorted
+
+  def testSize {
+    def check(set : TreeSet[Int], list: List[Int]) {
+      assert(set.size == list.size, s"$set had size ${set.size} while $list had size ${list.size}")
+    }
+
+    check(TreeSet[Int](), List[Int]())
+    val set = TreeSet(list:_*)
+    check(set, distinct)
+    check(set.clone, distinct)
+
+    val subset = set from (min + 1) until max
+    check(subset, distinctSublist)
+    check(subset.clone, distinctSublist)
+
+    val subsubset = subset from (almostmin + 1) until almostmax
+    check(subsubset, distinctSubsublist)
+    check(subsubset.clone, distinctSubsublist)
+  }
+
+  def testContains {
+    def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) {
+      assert(list forall set.apply, s"$set did not contain all elements of $list using apply")
+      assert(list forall set.contains, s"$set did not contain all elements of $list using contains")
+      assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply")
+      assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains")
+    }
+
+    val set = TreeSet(list:_*)
+    check(set, list, nonlist)
+    check(set.clone, list, nonlist)
+
+    val subset = set from (min + 1) until max
+    check(subset, sublist, subnonlist)
+    check(subset.clone, sublist, subnonlist)
+
+    val subsubset = subset from (almostmin + 1) until almostmax
+    check(subsubset, subsublist, subsubnonlist)
+    check(subsubset.clone, subsublist, subsubnonlist)
+  }
+
+  def testAdd {
+    def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) {
+      var builtList = List[Int]()
+      for (x <- list) {
+        set += x
+        builtList = (builtList :+ x).distinct.sorted filterNot nonlist.contains
+        assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply")
+        assert(builtList.size == set.size, s"$set had size ${set.size} while $builtList had size ${builtList.size}")
+      }
+      assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply")
+      assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains")
+    }
+
+    val set = TreeSet[Int]()
+    val clone = set.clone
+    val subset = set.clone from (min + 1) until max
+    val subclone = subset.clone
+    val subsubset = subset.clone from (almostmin + 1) until almostmax
+    val subsubclone = subsubset.clone
+
+    check(set, list, nonlist)
+    check(clone, list, nonlist)
+
+    check(subset, list, subnonlist)
+    check(subclone, list, subnonlist)
+
+    check(subsubset, list, subsubnonlist)
+    check(subsubclone, list, subsubnonlist)
+  }
+
+  def testRemove {
+    def check(set: TreeSet[Int], sorted: List[Int]) {
+      var builtList = sorted
+      for (x <- list) {
+        set remove x
+        builtList = builtList filterNot (_ == x)
+        assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply")
+        assert(builtList.size == set.size, s"$set had size $set.size while $builtList had size $builtList.size")
+      }
+    }
+    val set = TreeSet(list:_*)
+    val clone = set.clone
+    val subset = set.clone from (min + 1) until max
+    val subclone = subset.clone
+    val subsubset = subset.clone from (almostmin + 1) until almostmax
+    val subsubclone = subsubset.clone
+
+    check(set, sorted)
+    check(clone, sorted)
+
+    check(subset, subsorted)
+    check(subclone, subsorted)
+
+    check(subsubset, subsubsorted)
+    check(subsubclone, subsubsorted)
+  }
+
+  def testIterator {
+    def check(set: TreeSet[Int], list: List[Int]) {
+      val it = set.iterator.toList
+      assert(it == list, s"$it did not equal $list")
+    }
+    val set = TreeSet(list: _*)
+    check(set, sorted)
+    check(set.clone, sorted)
+
+    val subset = set from (min + 1) until max
+    check(subset, subsorted)
+    check(subset.clone, subsorted)
+
+    val subsubset = subset from (almostmin + 1) until almostmax
+    check(subsubset, subsubsorted)
+    check(subsubset.clone, subsubsorted)
+  }
+
+  testSize
+  testContains
+  testAdd
+  testRemove
+  testIterator
+}
diff --git a/test/files/run/name-based-patmat.check b/test/files/run/name-based-patmat.check
new file mode 100644
index 0000000..3d5fc40
--- /dev/null
+++ b/test/files/run/name-based-patmat.check
@@ -0,0 +1,12 @@
+`catdog only` has 11 chars
+`catdog only, no product` has 23 chars
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+1
+1
+2
+3
diff --git a/test/files/run/name-based-patmat.scala b/test/files/run/name-based-patmat.scala
new file mode 100644
index 0000000..8e20940
--- /dev/null
+++ b/test/files/run/name-based-patmat.scala
@@ -0,0 +1,105 @@
+final class MiniSome[T](val get: T) extends AnyVal { def isEmpty = false }
+
+package p0 {
+  class Single(val x: Any) extends AnyRef with Product1[String] {
+    private def s = "" + x
+    override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+    def isEmpty = false
+    def get = this
+    def _1 = s + " only"
+
+    override def toString = s"Single(${_1})"
+  }
+
+  object Single {
+    def unapply(x: Any): Single = new Single(x)
+  }
+
+  class SingleNoProduct(val x: Any) extends AnyRef {
+    private def s = "" + x
+    def isEmpty = false
+    def get = s + " only, no product"
+
+    override def toString = s"SingleNoProduct($get)"
+  }
+
+  object SingleNoProduct {
+    def unapply(x: Any): SingleNoProduct = new SingleNoProduct(x)
+  }
+}
+
+package p1 {
+  class Triple(val x: Any) extends AnyRef with Product3[String, String, String] {
+    private def s = "" + x
+    override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+    def isEmpty = false
+    def get = this
+    def _1 = s
+    def _2 = "2 " + s + "s! A ha ha!"
+    def _3 = "3 " + s + "s! A ha ha!"
+
+    override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+  }
+
+  object Triple {
+    def unapply(x: Any): Triple = new Triple(x)
+  }
+}
+
+package p2 {
+  class Triple(val x: Any) {
+    private def s = "" + x
+    def isEmpty = false
+    def get = this
+    def _1 = s
+    def _2 = "2 " + s + "s! A ha ha!"
+    def _3 = "3 " + s + "s! A ha ha!"
+    override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+  }
+
+  object Triple {
+    def unapply(x: Any): Triple = new Triple(x)
+  }
+}
+
+package p3 {
+  case class Foo(x: Int, y: Int, zs: Int*)
+
+  object Bar {
+    def f(x: Foo) = x match {
+      case Foo(5, 10, 15, 20, _*) => 1
+      case Foo(5, 10, 15, _*)     => 2
+      case Foo(5, 10, _*)         => 3
+      case Foo(5, 10)             => 4 // should warn unreachable
+      case _                      => 5
+    }
+  }
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    "catdog" match {
+      case p0.Single(x) => println(s"`${x._1}` has ${x._1.length} chars")
+      case x            => println("fail: " + x)
+    }
+    "catdog" match {
+      case p0.SingleNoProduct(x) => println(s"`$x` has ${x.length} chars")
+      case x                     => println("fail: " + x)
+    }
+    "catdog" match {
+      case p1.Triple(x, y, z) => List(x, y, z) foreach println
+      case x                  => println("fail: " + x)
+    }
+    // TODO
+    "catdog" match {
+      case p2.Triple(x, y, z) => List(x, y, z) foreach println
+      case x                  => println("fail: " + x)
+    }
+
+    println(p3.Bar.f(p3.Foo(5, 10, 15, 20, 25)))
+    println(p3.Bar.f(p3.Foo(5, 10, 15, 20)))
+    println(p3.Bar.f(p3.Foo(5, 10, 15)))
+    println(p3.Bar.f(p3.Foo(5, 10)))
+    // println(p3.Bar.f(p3.Foo(5)))
+  }
+}
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index f253de7..0037822 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -1,3 +1,7 @@
+names-defaults.scala:269: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    spawn(b = { val ttt = 1; ttt }, a = 0)
+                             ^
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
 1: @
 get: $
 get: 2
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 220414f..05cd4a5 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, existentials }
+
 object Test extends App {
   def get[T](x: T) = { println("get: "+ x); x }
 
diff --git a/test/files/run/no-pickle-skolems.check b/test/files/run/no-pickle-skolems.check
new file mode 100644
index 0000000..d640661
--- /dev/null
+++ b/test/files/run/no-pickle-skolems.check
@@ -0,0 +1 @@
+OK!
diff --git a/test/files/run/no-pickle-skolems/Source_1.scala b/test/files/run/no-pickle-skolems/Source_1.scala
new file mode 100644
index 0000000..1b4cbfa
--- /dev/null
+++ b/test/files/run/no-pickle-skolems/Source_1.scala
@@ -0,0 +1,5 @@
+package s
+
+trait Foo { def to[CC[X]](implicit cc: CC[Int]): Unit }
+
+class Bar extends Foo { def to[CC[X]](implicit cc: CC[Int]): Unit = ??? }
diff --git a/test/files/run/no-pickle-skolems/Test_2.scala b/test/files/run/no-pickle-skolems/Test_2.scala
new file mode 100644
index 0000000..da55ad9
--- /dev/null
+++ b/test/files/run/no-pickle-skolems/Test_2.scala
@@ -0,0 +1,39 @@
+
+import scala.language.reflectiveCalls
+import scala.reflect.runtime.universe._
+
+object Test {
+  /** Collects symbols by the given name, even if they're not
+   *  named CC.
+   */
+  def collectSymbols[T: TypeTag](inMethod: TermName, name: String): List[String] = {
+    val m = typeOf[T] member inMethod infoIn typeOf[T]
+    var buf: List[Symbol] = Nil
+    var seen: Set[Symbol] = Set()
+    def id(s: Symbol): Int = s.asInstanceOf[{ def id: Int }].id
+
+    def check(s: Symbol) {
+      if (!seen(s)) {
+        seen += s
+        if (s.name.toString == name) buf ::= s
+      }
+    }
+    def loop(t: Type) {
+      t match {
+        case TypeRef(pre, sym, args)    => loop(pre) ; check(sym) ; args foreach loop
+        case PolyType(tparams, restpe)  => tparams foreach { tp => check(tp) ; check(tp.owner) ; loop(tp.info) } ; loop(restpe)
+        case MethodType(params, restpe) => params foreach { p => check(p) ; loop(p.info) } ; loop(restpe)
+        case _                          =>
+      }
+    }
+    loop(m)
+
+    buf.reverse.distinct map (s => s.name + "#" + id(s))
+  }
+
+  def main(args: Array[String]): Unit = {
+    val syms = collectSymbols[s.Bar](TermName("to"), "CC")
+    assert(syms.size == 1, syms)
+    println("OK!")
+  }
+}
diff --git a/test/files/run/nodebuffer-array.check b/test/files/run/nodebuffer-array.check
deleted file mode 100644
index 49f8bfa..0000000
--- a/test/files/run/nodebuffer-array.check
+++ /dev/null
@@ -1,3 +0,0 @@
-<entry>
-    <elem>a</elem><elem>b</elem><elem>c</elem>
-    </entry>
diff --git a/test/files/run/nodebuffer-array.scala b/test/files/run/nodebuffer-array.scala
deleted file mode 100644
index 4e1ffe1..0000000
--- a/test/files/run/nodebuffer-array.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
-  
-  def f(s: String) = {
-    <entry>
-    {
-      for (item <- s split ',') yield
-        <elem>{ item }</elem>
-    }
-    </entry>
-  }
-  
-  def main(args: Array[String]): Unit = {
-    println(f("a,b,c"))
-  }
-}
diff --git a/test/files/run/null-and-intersect.scala b/test/files/run/null-and-intersect.scala
index 1437fa4..7266dab 100644
--- a/test/files/run/null-and-intersect.scala
+++ b/test/files/run/null-and-intersect.scala
@@ -2,7 +2,7 @@ object Test {
   trait Immortal
   class Bippy extends Immutable with Immortal
   class Boppy extends Immutable
-  
+
   def f[T](x: Traversable[T]) = x match {
     case _: Map[_, _]   => 3
     case _: Seq[_]      => 2
@@ -23,10 +23,10 @@ object Test {
     println(f(Seq(1)))
     println(f(Map(1 -> 2)))
     println(f(null))
-    
+
     println(g(new Bippy))
     println(g(null))
-    
+
     println(h(new Bippy))
     println(h(new Boppy))
     println(h(null))
diff --git a/test/files/run/null-hash.scala b/test/files/run/null-hash.scala
index abf15e8..9b1f28b 100644
--- a/test/files/run/null-hash.scala
+++ b/test/files/run/null-hash.scala
@@ -2,7 +2,7 @@ object Test {
   def f1 = List(5, 10, null: String).##
   def f2(x: Any) = x.##
   def f3 = ((55, "abc", null: List[Int])).##
-  
+
   def main(args: Array[String]): Unit = {
     f1
     f2(null)
diff --git a/test/files/run/number-parsing.scala b/test/files/run/number-parsing.scala
index 21551a3..ad14810 100644
--- a/test/files/run/number-parsing.scala
+++ b/test/files/run/number-parsing.scala
@@ -2,12 +2,12 @@ object Test {
   def numTests() = {
     val MinusZero = Float.box(-0.0f)
     val PlusZero  = Float.box(0.0f)
-  
+
     assert(PlusZero match { case MinusZero => false ; case _ => true })
     assert(MinusZero match { case PlusZero => false ; case _ => true })
     assert((MinusZero: scala.Float) == (PlusZero: scala.Float))
     assert(!(MinusZero equals PlusZero))
-  
+
     List(
       -5f.max(2) ,
       -5f max 2 ,
diff --git a/test/files/run/numbereq.scala b/test/files/run/numbereq.scala
index a1f11da..7ce4b23 100644
--- a/test/files/run/numbereq.scala
+++ b/test/files/run/numbereq.scala
@@ -13,13 +13,13 @@ object Test {
       if (x >= Byte.MinValue && x <= Byte.MaxValue) List(new java.lang.Byte(x.toByte)) else Nil,
       if (x >= Char.MinValue && x <= Char.MaxValue) List(new java.lang.Character(x.toChar)) else Nil
     ).flatten
-    
+
     base ::: extras
   }
 
   def mkNumbers(x: BigInt): List[AnyRef] = {
     List(
-      List(BigDecimal(x, java.math.MathContext.UNLIMITED)), 
+      List(BigDecimal(x, java.math.MathContext.UNLIMITED)),
       List(x),
       if (x.isValidDouble) List(new java.lang.Double(x.toDouble)) else Nil,
       if (x.isValidFloat) List(new java.lang.Float(x.toFloat)) else Nil,
@@ -30,23 +30,40 @@ object Test {
       if (x.isValidChar) List(new java.lang.Character(x.toChar)) else Nil
     ).flatten
   }
-  
+
+  // Don't necessarily expect BigDecimal created from BigInt to agree with Double here.
+  def isIffy(x: Any, y: Any, canSwap: Boolean = true): Boolean = x match {
+    case bd: BigDecimal => y match {
+      case _: Float | _: Double => bd.toString.length > 15
+      case _ => false
+    }
+    case _ => canSwap && isIffy(y, x, false)
+  }
+
+  // Don't necessarily expect BigInt to agree with Float/Double beyond a Long
+  def isIffyB(x: Any, y: Any, canSwap: Boolean = true): Boolean = x match {
+    case bi: BigInt => y match {
+      case _: Float | _: Double => bi < Long.MinValue || bi > Long.MaxValue
+      case _ => false
+    }
+    case _ => canSwap && isIffyB(y, x, false)
+  }
+
   def main(args: Array[String]): Unit = {
     val ints    = (0 to 15).toList map (Short.MinValue >> _)
     val ints2   = ints map (x => -x)
     val ints3   = ints map (_ + 1)
     val ints4   = ints2 map (_ - 1)
-    
+
     val setneg1 = ints map mkNumbers
     val setneg2 = ints3 map mkNumbers
     val setpos1 = ints2 map mkNumbers
     val setpos2 = ints4 map mkNumbers
     val zero = mkNumbers(0)
-    
-    val sets = setneg1 ++ setneg2 ++ List(zero) ++ setpos1 ++ setpos2    
-    
+
+    val sets = setneg1 ++ setneg2 ++ List(zero) ++ setpos1 ++ setpos2
+
     for (set <- sets ; x <- set ; y <- set) {
-      // println("'%s' == '%s' (%s == %s) (%s == %s)".format(x, y, x.hashCode, y.hashCode, x.##, y.##))
       assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
       assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass))
     }
@@ -55,18 +72,20 @@ object Test {
     val bigInts2 = bigInts map (x => -x)
     val bigInts3 = bigInts map (_ + 1)
     val bigInts4 = bigInts2 map (_ - 1)
-    
+
     val setneg1b = bigInts map mkNumbers
     val setneg2b = bigInts3 map mkNumbers
     val setpos1b = bigInts2 map mkNumbers
     val setpos2b = bigInts4 map mkNumbers
 
     val sets2 = setneg1 ++ setneg1b ++ setneg2 ++ setneg2b ++ List(zero) ++ setpos1 ++ setpos1b ++ setpos2 ++ setpos2b
-    
+
     for (set <- sets2 ; x <- set ; y <- set) {
-//      println("'%s' == '%s' (%s == %s) (%s == %s)".format(x, y, x.hashCode, y.hashCode, x.##, y.##))
-      assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
-//      assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass))    Disable until Double.## is fixed (SI-5640)
+      if (!isIffy(x,y)) {
+        assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
+        // The following is blocked by SI-8150
+        // if (!isIffyB(x,y)) assert(x.## == y.##, "%x/%s != %x/%s from %s.## and %s.##".format(x.##, x.getClass, y.##, y.getClass, x, y))
+      }
     }
   }
 }
diff --git a/test/files/run/option-fold.scala b/test/files/run/option-fold.scala
index d554ba4..84e346e 100644
--- a/test/files/run/option-fold.scala
+++ b/test/files/run/option-fold.scala
@@ -2,18 +2,19 @@ object Test {
   sealed class A
   case object B extends A
   case class C(x: Int) extends A
-  
+
   def f[T](x: Option[T]) = x.fold(List.empty[T])(List(_))
   def g(x: Option[A]) = x.fold(-1) {
     case B    => 0
     case C(x) => x
+    case _    => ???
   }
 
   def main(args: Array[String]): Unit = {
-    println(f(None))
-    println(f(Some(5)))
-    println(g(None))
-    println(g(Some(B)))
-    println(g(Some(C(1))))
+    println(f(None))        //List()
+    println(f(Some(5)))     //List(5)
+    println(g(None))        //-1
+    println(g(Some(B)))     //0
+    println(g(Some(C(1))))  //1
   }
 }
diff --git a/test/files/run/origins.flags b/test/files/run/origins.flags
index a7e64e4..690753d 100644
--- a/test/files/run/origins.flags
+++ b/test/files/run/origins.flags
@@ -1 +1 @@
--no-specialization
\ No newline at end of file
+-no-specialization -Ydelambdafy:inline
\ No newline at end of file
diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala
index 0ad9229..6529351 100644
--- a/test/files/run/origins.scala
+++ b/test/files/run/origins.scala
@@ -14,7 +14,7 @@ object Test {
   def f1() = 1 to 5 map boop
   def f2() = 1 to 10 map boop
   def f3() = 1 to 50 map boop
-  
+
   def main(args: Array[String]): Unit = {
     f1() ; f2() ; f3()
   }
diff --git a/test/files/run/packrat1.check b/test/files/run/packrat1.check
deleted file mode 100644
index e9f797e..0000000
--- a/test/files/run/packrat1.check
+++ /dev/null
@@ -1,7 +0,0 @@
-1
-3
-5
-81
-4
-37
-9
diff --git a/test/files/run/packrat1.scala b/test/files/run/packrat1.scala
deleted file mode 100644
index b5a4687..0000000
--- a/test/files/run/packrat1.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test extends App{
-  import grammars._
-
-  val head = phrase(term)
-
-  println(extractResult(head(new lexical.Scanner("1"))))
-  println(extractResult(head(new lexical.Scanner("1+2"))))
-  println(extractResult(head(new lexical.Scanner("9-4"))))
-  println(extractResult(head(new lexical.Scanner("9*9"))))
-  println(extractResult(head(new lexical.Scanner("8/2"))))
-  println(extractResult(head(new lexical.Scanner("4*9-0/7+9-8*1"))))
-  println(extractResult(head(new lexical.Scanner("(1+2)*3"))))
-}
-
-object grammars extends StandardTokenParsers with PackratParsers{
-  
-  def extractResult(r : ParseResult[_]) = r match {
-    case Success(a,_) => a
-    case NoSuccess(a,_) => a
-  }
-  
-  lexical.delimiters ++= List("+","-","*","/","(",")")
-  lexical.reserved ++= List("Hello","World")
-  
-  /****
-   * term = term + fact | term - fact | fact
-   * fact = fact * num  | fact / num  | num
-   */
-
-
- val term: PackratParser[Int] = (term~("+"~>fact) ^^ {case x~y => x+y}
-           |term~("-"~>fact) ^^ {case x~y => x-y}
-           |fact)
-  
- val fact: PackratParser[Int] = (fact~("*"~>numericLit) ^^ {case x~y => x*y.toInt}
-           |fact~("/"~>numericLit) ^^ {case x~y => x/y.toInt}
-           |"("~>term<~")"
-           |numericLit ^^ {_.toInt})
- }
diff --git a/test/files/run/packrat2.check b/test/files/run/packrat2.check
deleted file mode 100644
index 55a32ac..0000000
--- a/test/files/run/packrat2.check
+++ /dev/null
@@ -1,7 +0,0 @@
-1
-3
-81
-43
-59
-188
-960
diff --git a/test/files/run/packrat2.scala b/test/files/run/packrat2.scala
deleted file mode 100644
index f55021a..0000000
--- a/test/files/run/packrat2.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test extends App{
-  import grammars2._
-
-  val head = phrase(exp)
-
-  println(extractResult(head(new lexical.Scanner("1"))))
-  println(extractResult(head(new lexical.Scanner("1+2"))))
-  println(extractResult(head(new lexical.Scanner("9*9"))))
-  println(extractResult(head(new lexical.Scanner("4*9+7"))))
-  println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3"))))
-  println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3+9*5+7*6*2"))))
-  println(extractResult(head(new lexical.Scanner("4*(9+7)*(2+3)*3"))))
-
-}
-
-object grammars2 extends StandardTokenParsers with PackratParsers{
-  
-  def extractResult(r : ParseResult[_]) = r match{
-    case Success(a,_) => a
-    case NoSuccess(a,_) => a
-  }
-  
-  lexical.delimiters ++= List("+","-","*","/","(",")")
-  lexical.reserved ++= List("Hello","World")
-  
-  /*
-   * exp = sum | prod | num
-   * sum = exp ~ "+" ~ num
-   * prod = exp ~ "*" ~ num
-   */
-
-  val exp : PackratParser[Int] = sum | prod | numericLit ^^{_.toInt} | "("~>exp<~")"
-  val sum : PackratParser[Int] = exp~("+"~>exp) ^^ {case x~y => x+y}
-  val prod: PackratParser[Int] = exp~("*"~>(numericLit ^^{_.toInt} | exp)) ^^ {case x~y => x*y}
-  
-   
- /* lexical.reserved ++= List("a","b", "c")
-  val a : PackratParser[Any] = numericLit^^{x => primeFactors(x.toInt)}
-  val b : PackratParser[Any] = memo("b")
-  val c : PackratParser[Any] = memo("c")
-  val AnBnCn : PackratParser[Any] = 
-    parseButDontEat(repMany1(a,b))~not(b)~>rep1(a)~repMany1(b,c)// ^^{case x~y => x:::y}
-  //val c : PackratParser[Any] = parseButDontEat(a)~a~a
-  //println(c((new PackratReader(new lexical.Scanner("45 24")))))
-  val r = new PackratReader(new lexical.Scanner("45 b c"))
-  println(AnBnCn(r))
-  println(r.getCache.size)
-*/ 
-}
diff --git a/test/files/run/packrat3.check b/test/files/run/packrat3.check
deleted file mode 100644
index 8c10626..0000000
--- a/test/files/run/packrat3.check
+++ /dev/null
@@ -1,7 +0,0 @@
-(((List(a, b)~())~List(a))~List(b, c))
-(((List(a, a, b, b)~())~List(a, a))~List(b, b, c, c))
-(((List(a, a, a, b, b, b)~())~List(a, a, a))~List(b, b, b, c, c, c))
-(((List(a, a, a, a, b, b, b, b)~())~List(a, a, a, a))~List(b, b, b, b, c, c, c, c))
-Expected failure
-``b'' expected but `c' found
-end of input
diff --git a/test/files/run/packrat3.scala b/test/files/run/packrat3.scala
deleted file mode 100644
index 216ef8f..0000000
--- a/test/files/run/packrat3.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    import grammars3._
-
-    val head = phrase(AnBnCn)
-
-    println(extractResult(head(new lexical.Scanner("a b c"))))
-    println(extractResult(head(new lexical.Scanner("a a b b c c"))))
-    println(extractResult(head(new lexical.Scanner("a a a b b b c c c"))))
-    println(extractResult(head(new lexical.Scanner("a a a a b b b b c c c c"))))
-
-    println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a b b b b c c c c")))))
-    println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a a b b b c c c c")))))
-    println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a a b b b b c c c")))))
-  }
-}
-
-object grammars3 extends StandardTokenParsers with PackratParsers {
-  
-  def extractResult(r: ParseResult[_]) = r match {
-    case Success(a,_) => a
-    case NoSuccess(a,_) => a
-  }
-  
-
-  lexical.reserved ++= List("a","b", "c")
-  val a: PackratParser[Any] = memo("a")
-  val b: PackratParser[Any] = memo("b")
-  val c: PackratParser[Any] = memo("c")
-
-  val AnBnCn: PackratParser[Any] = 
-    guard(repMany1(a,b) ~ not(b)) ~ rep1(a) ~ repMany1(b,c)// ^^{case x~y => x:::y}
-
-
-  private def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = 
-  ( p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)}
-   | success(Nil)
-  )
-
-  def repMany1[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = 
-   p~opt(repMany(p,q))~q ^^ {case x~Some(xs)~y => x::xs:::(y::Nil)}
-
-} 
diff --git a/test/files/run/parmap-ops.scala b/test/files/run/parmap-ops.scala
index f93bd7b..4274460 100644
--- a/test/files/run/parmap-ops.scala
+++ b/test/files/run/parmap-ops.scala
@@ -1,10 +1,10 @@
 import collection._
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par
-    
+
     // ops
     assert(gm.isDefinedAt(1))
     assert(gm.contains(1))
@@ -21,11 +21,11 @@ object Test {
     } catch {
       case e: NoSuchElementException => // ok
     }
-    
+
     assert(gm.filterKeys(_ % 2 == 0)(0) == 0)
     assert(gm.filterKeys(_ % 2 == 0).get(1) == None)
     assert(gm.mapValues(_ + 1)(0) == 1)
-    
+
     // with defaults
     val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1)
     val dm = pm.withDefault(x => -x)
@@ -37,12 +37,12 @@ object Test {
     assert(dm(3) == 3)
     assert(pm(3) == 3)
     assert(dm(4) == -4)
-    
+
     val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x)
     assert(imdm(0) == 0)
     assert(imdm(1) == 1)
     assert(imdm(2) == -2)
     assert(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2))
   }
-  
+
 }
diff --git a/test/files/run/parserFilter.check b/test/files/run/parserFilter.check
deleted file mode 100644
index be04454..0000000
--- a/test/files/run/parserFilter.check
+++ /dev/null
@@ -1,9 +0,0 @@
-[1.3] failure: Input doesn't match filter: false
-
-if false
-  ^
-[1.1] failure: Input doesn't match filter: not
-
-not true
-^
-[1.8] parsed: (if~true)
diff --git a/test/files/run/parserFilter.scala b/test/files/run/parserFilter.scala
deleted file mode 100644
index d007d44..0000000
--- a/test/files/run/parserFilter.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
-    val keywords = Set("if", "false")
-    def word: Parser[String] = "\\w+".r
-
-    def keyword: Parser[String] = word filter (keywords.contains)
-    def ident: Parser[String] = word filter(!keywords.contains(_))
-
-    def test = keyword ~ ident
-
-    def main(args: Array[String]) {
-      println(parseAll(test, "if false"))
-      println(parseAll(test, "not true"))
-      println(parseAll(test, "if true"))
-    }
-}
diff --git a/test/files/run/parserForFilter.check b/test/files/run/parserForFilter.check
deleted file mode 100644
index a53c147..0000000
--- a/test/files/run/parserForFilter.check
+++ /dev/null
@@ -1 +0,0 @@
-[1.13] parsed: (second,first)
diff --git a/test/files/run/parserForFilter.scala b/test/files/run/parserForFilter.scala
deleted file mode 100644
index 1bc44f8..0000000
--- a/test/files/run/parserForFilter.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
-  def word: Parser[String] = "\\w+".r
-
-  def twoWords = for {
-    (a ~ b) <- word ~ word
-  } yield (b, a)
-
-  def main(args: Array[String]) {
-    println(parseAll(twoWords, "first second"))
-  }
-}
-
diff --git a/test/files/run/parserJavaIdent.check b/test/files/run/parserJavaIdent.check
deleted file mode 100644
index 597ddbe..0000000
--- a/test/files/run/parserJavaIdent.check
+++ /dev/null
@@ -1,26 +0,0 @@
-[1.7] parsed: simple
-[1.8] parsed: with123
-[1.6] parsed: with$
-[1.10] parsed: withøßöèæ
-[1.6] parsed: with_
-[1.6] parsed: _with
-[1.1] failure: java identifier expected
-
-3start
-^
-[1.1] failure: java identifier expected
-
--start
-^
-[1.5] failure: java identifier expected
-
-with-s
-    ^
-[1.3] failure: java identifier expected
-
-we♥scala
-  ^
-[1.6] failure: java identifier expected
-
-with space
-     ^
diff --git a/test/files/run/parserJavaIdent.scala b/test/files/run/parserJavaIdent.scala
deleted file mode 100644
index c068075..0000000
--- a/test/files/run/parserJavaIdent.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-object Test extends scala.util.parsing.combinator.JavaTokenParsers {
-
-    def test[A](s: String) {
-      val res = parseAll(ident, s) match {
-        case Failure(_, in) => Failure("java identifier expected", in)
-        case o => o
-      }
-      println(res)
-    }
-
-    def main(args: Array[String]) {
-      // Happy tests
-      test("simple")
-      test("with123")
-      test("with$")
-      test("withøßöèæ")
-      test("with_")
-      test("_with")
-      // Sad tests
-      test("3start")
-      test("-start")
-      test("with-s")
-      test("we♥scala")
-      test("with space")
-    }
-}
diff --git a/test/files/run/parserNoSuccessMessage.check b/test/files/run/parserNoSuccessMessage.check
deleted file mode 100644
index fe00d2f..0000000
--- a/test/files/run/parserNoSuccessMessage.check
+++ /dev/null
@@ -1,20 +0,0 @@
-[1.2] failure: string matching regex `\d+' expected but `x' found
-
--x
- ^
-[1.1] failure: string matching regex `\d+' expected but `x' found
-
-x
-^
-[1.3] parsed: (Some(-)~5)
-[1.2] parsed: (None~5)
-[1.2] error: Number expected!
-
--x
- ^
-[1.1] error: Number expected!
-
-x
-^
-[1.3] parsed: (Some(-)~5)
-[1.2] parsed: (None~5)
diff --git a/test/files/run/parserNoSuccessMessage.scala b/test/files/run/parserNoSuccessMessage.scala
deleted file mode 100644
index 93aa252..0000000
--- a/test/files/run/parserNoSuccessMessage.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
-  def sign = "-"
-  def number = "\\d+".r
-  def p = sign.? ~ number withErrorMessage  "Number expected!"
-  def q = sign.? ~! number withErrorMessage  "Number expected!"
-
-  def main(args: Array[String]) {
-    println(parseAll(p, "-x"))
-    println(parseAll(p, "x"))
-    println(parseAll(p, "-5"))
-    println(parseAll(p, "5"))
-    println(parseAll(q, "-x"))
-    println(parseAll(q, "x"))
-    println(parseAll(q, "-5"))
-    println(parseAll(q, "5"))
-  }
-}
-
-
diff --git a/test/files/run/partialfun.scala b/test/files/run/partialfun.scala
index f3c53b9..71c7d3e 100644
--- a/test/files/run/partialfun.scala
+++ b/test/files/run/partialfun.scala
@@ -76,7 +76,7 @@ object Test {
     }
 
     val chained = pf0 orElse pf1 orElse pf2
-    chained()
+    chained(())
   }
 
   def main(args: Array[String]): Unit = {
diff --git a/test/files/run/patch-boundary.scala b/test/files/run/patch-boundary.scala
index 8381956..ed1a0e9 100644
--- a/test/files/run/patch-boundary.scala
+++ b/test/files/run/patch-boundary.scala
@@ -1,8 +1,8 @@
 object Test {
   def f = collection.mutable.ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8)
   def g = f.patch(4, List(1, 2), 10)
-  
+
   def main(args: Array[String]): Unit = {
-    assert(g.size == 6)    
+    assert(g.size == 6)
   }
 }
diff --git a/test/files/run/patmat-behavior-2.check b/test/files/run/patmat-behavior-2.check
new file mode 100644
index 0000000..a928fe7
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.check
@@ -0,0 +1,24 @@
+f1(Foo(1)) == true
+f1(Foo(1, 2)) == false
+f1(Foo(1, 2, 3)) == false
+
+f2(Foo(1)) == false
+f2(Foo(1, 2)) == true
+f2(Foo(1, 2, 3)) == false
+
+f3(Foo(1)) == false
+f3(Foo(1, 2)) == false
+f3(Foo(1, 2, 3)) == true
+
+f1seq(Foo(1)) == true
+f1seq(Foo(1, 2)) == true
+f1seq(Foo(1, 2, 3)) == true
+
+f2seq(Foo(1)) == false
+f2seq(Foo(1, 2)) == true
+f2seq(Foo(1, 2, 3)) == true
+
+f3seq(Foo(1)) == false
+f3seq(Foo(1, 2)) == false
+f3seq(Foo(1, 2, 3)) == true
+
diff --git a/test/files/run/patmat-behavior-2.scala b/test/files/run/patmat-behavior-2.scala
new file mode 100644
index 0000000..b31f773
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.scala
@@ -0,0 +1,50 @@
+case class Foo(x: Int, ys: Int*) {
+  // We write our own toString because of SI-7735
+  override def toString = (x +: ys).mkString("Foo(", ", ", ")")
+}
+
+object Test {
+  def f1(x: Any) = x match {
+    case Foo(x) => true
+    case _      => false
+  }
+  def f2(x: Any) = x match {
+    case Foo(x, y) => true
+    case _         => false
+  }
+  def f3(x: Any) = x match {
+    case Foo(x, y, z) => true
+    case _            => false
+  }
+  def f1seq(x: Any) = x match {
+    case Foo(x, ys @ _*) => true
+    case _               => false
+  }
+  def f2seq(x: Any) = x match {
+    case Foo(x, y, zs @ _*) => true
+    case _                  => false
+  }
+  def f3seq(x: Any) = x match {
+    case Foo(x, y, z, qs @ _*) => true
+    case _                     => false
+  }
+
+  val x1 = Foo(1)
+  val x2 = Foo(1, 2)
+  val x3 = Foo(1, 2, 3)
+
+  val fs = List[Any => Boolean](f1, f2, f3)
+  val fseqs = List[Any => Boolean](f1seq, f2seq, f3seq)
+  val xs = List[Foo](x1, x2, x3)
+
+  def main(args: Array[String]): Unit = {
+    for ((f, i) <- fs.zipWithIndex) {
+      xs foreach (x => println(s"f${i+1}($x) == ${f(x)}"))
+      println("")
+    }
+    for ((f, i) <- fseqs.zipWithIndex) {
+      xs foreach (x => println(s"f${i+1}seq($x) == ${f(x)}"))
+      println("")
+    }
+  }
+}
diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check
new file mode 100644
index 0000000..273a143
--- /dev/null
+++ b/test/files/run/patmat-behavior.check
@@ -0,0 +1,90 @@
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A]
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                  ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A]
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                     ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A]
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                           ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A]
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                          ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A]
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                                                      ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A]
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                              ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A]
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                     ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A]
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                           ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A]
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                          ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A]
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                                                      ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A]
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                              ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A]
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                  ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A]
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                           ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A]
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                          ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A]
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                                                      ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A]
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                              ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A]
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                  ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A]
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                     ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A]
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                          ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A]
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                                                      ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A]
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                              ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A]
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                  ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A]
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                     ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A]
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                           ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A]
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                                                      ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A]
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                              ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A]
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                  ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A]
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                     ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A]
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                           ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A]
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+                                                                                                                                          ^
diff --git a/test/files/run/patmat-behavior.scala b/test/files/run/patmat-behavior.scala
new file mode 100644
index 0000000..8b6370d
--- /dev/null
+++ b/test/files/run/patmat-behavior.scala
@@ -0,0 +1,95 @@
+package s {
+  sealed trait C[+A]
+
+  case class C00[+A]() extends C[A]
+  case class C10[+A](x: A) extends C[A]
+  case class C20[+A](x: A, y: A) extends C[A]
+  case class C01[+A](xs: A*) extends C[A]
+  case class C11[+A](x: A, ys: A*) extends C[A]
+  case class C21[+A](x: A, y: A, zs: A*) extends C[A]
+
+  object E00 { def unapply[A](x: Any): Boolean                   = ??? }
+  object E10 { def unapply[A](x: Any): Option[A]                 = ??? }
+  object E20 { def unapply[A](x: Any): Option[(A, A)]            = ??? }
+  object E01 { def unapplySeq[A](x: Any): Option[Seq[A]]         = ??? }
+  object E11 { def unapplySeq[A](x: Any): Option[(A, Seq[A])]    = ??? }
+  object E21 { def unapplySeq[A](x: Any): Option[(A, A, Seq[A])] = ??? }
+
+  object F00 { def unapply[A](x: C[A]): Boolean                   = ??? }
+  object F10 { def unapply[A](x: C[A]): Option[A]                 = ??? }
+  object F20 { def unapply[A](x: C[A]): Option[(A, A)]            = ??? }
+  object F01 { def unapplySeq[A](x: C[A]): Option[Seq[A]]         = ??? }
+  object F11 { def unapplySeq[A](x: C[A]): Option[(A, Seq[A])]    = ??? }
+  object F21 { def unapplySeq[A](x: C[A]): Option[(A, A, Seq[A])] = ??? }
+
+  object G00 { def unapply[A](x: C00[A]): Boolean                   = ??? }
+  object G10 { def unapply[A](x: C10[A]): Option[A]                 = ??? }
+  object G20 { def unapply[A](x: C20[A]): Option[(A, A)]            = ??? }
+  object G01 { def unapplySeq[A](x: C01[A]): Option[Seq[A]]         = ??? }
+  object G11 { def unapplySeq[A](x: C11[A]): Option[(A, Seq[A])]    = ??? }
+  object G21 { def unapplySeq[A](x: C21[A]): Option[(A, A, Seq[A])] = ??? }
+}
+import s._
+
+package pos {
+  object Test {
+    def ga1(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+    def ga2(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+    def ga3(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+    def ga4(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+    def ga5(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+    def ga6(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+
+    def gb1[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb2[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb3[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb4[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb5[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb6[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+    def gc1[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc2[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc3[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc4[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc5[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc6[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+    def gd1[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gd2[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gd3[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gd4[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gd5[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gd6[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+  }
+}
+
+package neg {
+  object Fail {
+    def gb1[A](x: C00[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb2[A](x: C10[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb3[A](x: C20[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb4[A](x: C01[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb5[A](x: C11[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+    def gb6[A](x: C21[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+    def gc1[A](x: C00[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc2[A](x: C10[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc3[A](x: C20[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc4[A](x: C01[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc5[A](x: C11[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+    def gc6[A](x: C21[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+    def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+    def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+    def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+    def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+    def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+    def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+  }
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+
+  }
+}
diff --git a/test/files/run/patmat-bind-typed.check b/test/files/run/patmat-bind-typed.check
new file mode 100644
index 0000000..8baef1b
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.check
@@ -0,0 +1 @@
+abc
diff --git a/test/files/run/patmat-bind-typed.scala b/test/files/run/patmat-bind-typed.scala
new file mode 100644
index 0000000..10de921
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.scala
@@ -0,0 +1,8 @@
+object Test {
+  def f(xs: List[Any]) = for (key @ (dummy: String) <- xs) yield key
+
+  def main(args: Array[String]): Unit = {
+    f("abc" :: Nil) foreach println
+    f(5 :: Nil) foreach println
+  }
+}
diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala
index dfc78e2..7ca5fd3 100644
--- a/test/files/run/patmat-exprs.scala
+++ b/test/files/run/patmat-exprs.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
 import runtime.ScalaRunTime
 
 object Test {
@@ -37,7 +39,7 @@ trait Pattern {
   //
   // type Numeric[T]
   // import java.io.Serializable
-  // 
+  //
   // implicit def compat27a[T](x: Iterable[T]) = new {
   //   def iterator: Iterator[T] = x.elements
   //   def sum: Int = 5
@@ -278,8 +280,8 @@ trait Pattern {
         case Mul(Mul(y, Const(z)), Const(x)) => Mul(const(num.mul(x, z)), y)
 
         case Const(x) if x == num.one => One[T]
-        case Const(x) if x == num.zero => Zero[T]      
-      
+        case Const(x) if x == num.zero => Zero[T]
+
         case Sub(x, Neg(y)) => Add(List(x, y))
         case Sub(Neg(x), y) => Neg(Add(List(x, y)))
         case Neg(Neg(x)) => x
@@ -576,4 +578,4 @@ trait Pattern {
     implicit def long2Constant[T](l: Long)(implicit num: NumericOps[T]): Leaf[T] =
       const(num.fromDouble(l.toDouble))
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/patmat-finally.scala b/test/files/run/patmat-finally.scala
index 6f769b3..dfea5ad 100644
--- a/test/files/run/patmat-finally.scala
+++ b/test/files/run/patmat-finally.scala
@@ -21,5 +21,5 @@ object Test extends App {
   }
 
   bar(null, null)
-  x  
+  x
 }
diff --git a/test/files/run/patmat-mix-case-extractor.check b/test/files/run/patmat-mix-case-extractor.check
new file mode 100644
index 0000000..a6e1bd2
--- /dev/null
+++ b/test/files/run/patmat-mix-case-extractor.check
@@ -0,0 +1,8 @@
+-1
+6
+4
+18
+-1
+1006
+1004
+1018
diff --git a/test/files/run/patmat-mix-case-extractor.scala b/test/files/run/patmat-mix-case-extractor.scala
new file mode 100644
index 0000000..964e6f7
--- /dev/null
+++ b/test/files/run/patmat-mix-case-extractor.scala
@@ -0,0 +1,110 @@
+trait CaseClass
+trait ProdCaseClass extends CaseClass { def x: Int }
+trait SeqCaseClass extends CaseClass { def xs: Seq[Int] }
+
+case class CaseClass1() extends CaseClass
+case class CaseClass2(xs: Int*) extends SeqCaseClass
+case class CaseClass3(x: Int) extends ProdCaseClass
+case class CaseClass4(x: Int, xs: Int*) extends ProdCaseClass with SeqCaseClass
+
+object Extractor1 { def unapply(x: CaseClass): Boolean = false }
+object Extractor2 { def unapplySeq(x: SeqCaseClass): Option[Seq[Int]] = Some(x.xs) }
+object Extractor3 { def unapply(x: ProdCaseClass): Option[Int] = Some(x.x) }
+object Extractor4 { def unapplySeq(x: ProdCaseClass with SeqCaseClass): Option[(Int, Seq[Int])] = Some(x.x, x.xs) }
+
+class A {
+  def f1(x: Any) = x match {
+    case CaseClass1()           => -1
+    case CaseClass2(xs @ _*)    => xs.sum
+    case CaseClass3(x)          => x
+    case CaseClass4(x, xs @ _*) => x + xs.sum
+    case Extractor4(x, xs @ _*) => 1000 + x + xs.sum
+    case Extractor3(x)          => 1000 + x
+    case Extractor2(xs @ _*)    => 1000 + xs.sum
+    case Extractor1()           => -3
+    case _                      => -2
+  }
+  def f2(x: Any) = x match {
+    case Extractor4(x, xs @ _*) => 1000 + x + xs.sum
+    case Extractor3(x)          => 1000 + x
+    case Extractor2(xs @ _*)    => 1000 + xs.sum
+    case Extractor1()           => -3
+    case CaseClass1()           => -1
+    case CaseClass2(xs @ _*)    => xs.sum
+    case CaseClass3(x)          => x
+    case CaseClass4(x, xs @ _*) => x + xs.sum
+    case _                      => -2
+  }
+  def run() {
+    List(
+      f1(CaseClass1()),
+      f1(CaseClass2(1, 2, 3)),
+      f1(CaseClass3(4)),
+      f1(CaseClass4(5, 6, 7)),
+      f2(CaseClass1()),
+      f2(CaseClass2(1, 2, 3)),
+      f2(CaseClass3(4)),
+      f2(CaseClass4(5, 6, 7))
+    ) foreach println
+  }
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    (new A).run
+  }
+}
+
+
+class B {
+  case class CaseClass0()
+  case class CaseClass0v(xs: Int*)
+
+  case class CaseClass(x: Int, y: Int)
+  object Extractor { def unapply(x: Any): Option[(Int, Int)] = Some((1, 1)) }
+
+  case class CaseSeq(x: Char, y: Double, zs: Int*)
+  object ExtractorSeq { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] = Some((1, 1, List(1))) }
+
+  def f1(x: CaseClass) = x match { case CaseClass(y, z) => y }
+  def f2(x: Any) = x match { case Extractor(y, z) => y }
+
+  def f3(x: CaseSeq) = x match {
+    case CaseSeq(x, y)    => y
+    case CaseSeq(x, y, z) => z
+  }
+  def f4(x: CaseSeq) = x match {
+    case CaseSeq(x, y, z)      => z :: Nil
+    case CaseSeq(x, y, z @ _*) => z
+  }
+
+  def f5(x: Any) = x match { case ExtractorSeq(x, y, z) => z }
+  def f6(x: Any) = x match { case ExtractorSeq(x, y, z @ _*) => z }
+
+  def g1(x: CaseClass0) = x match {
+    case CaseClass0() => true
+  }
+  def g2(x: CaseClass0v) = x match {
+    case CaseClass0v()        => true
+    case CaseClass0v(5)       => true
+    case CaseClass0v(x)       => true
+    case CaseClass0v(xs @ _*) => false
+  }
+}
+
+package p1 {
+  trait _X {
+    case class _Foo();
+    object _Bar {
+      def unapply(foo: _Foo): Boolean = true;
+    }
+  }
+
+  object Y extends _X {
+    val foo = _Foo()
+    foo match {
+      case _Bar() =>
+      case _ => assert(false)
+    }
+  }
+}
diff --git a/test/files/run/patmat-seqs.scala b/test/files/run/patmat-seqs.scala
index e237116..b5c47b4 100644
--- a/test/files/run/patmat-seqs.scala
+++ b/test/files/run/patmat-seqs.scala
@@ -7,14 +7,14 @@ object Test {
     case Seq(_, _, _, _, _, x: String) => "ss6"
     case _ => "d"
   }
-  
+
   def f2(x: Any) = x match {
     case Seq("a", "b", _*)  => "s2"
     case Seq(1, _*)         => "s1"
     case Seq(5, 6, 7, _*)   => "s3"
     case _                  => "d"
   }
-  
+
   def main(args: Array[String]): Unit = {
     val xs1 = List(
       List(1,2,3),
@@ -24,9 +24,9 @@ object Test {
       Seq(1, 2, 3, 4, 5, "abcd"),
       "abc"
     ) map f1
-    
+
     xs1 foreach println
-    
+
     val xs2 = List(
       Seq(5, 6, 7),
       Seq(5, 6, 7, 8, 9),
@@ -36,7 +36,7 @@ object Test {
       Nil,
       5
     ) map f2
-    
+
     xs2 foreach println
   }
 }
diff --git a/test/files/run/patmat_unapp_abstype-new.check b/test/files/run/patmat_unapp_abstype-new.check
index 42c5463..35447db 100644
--- a/test/files/run/patmat_unapp_abstype-new.check
+++ b/test/files/run/patmat_unapp_abstype-new.check
@@ -1,3 +1,9 @@
+patmat_unapp_abstype-new.scala:21: warning: abstract type pattern TypesUser.this.TypeRef is unchecked since it is eliminated by erasure
+      case TypeRef(x) => println("TypeRef")
+                  ^
+patmat_unapp_abstype-new.scala:53: warning: abstract type pattern Intermed.this.Foo is unchecked since it is eliminated by erasure
+     case Foo(x) => println("Foo")
+             ^
 TypeRef
 MethodType
 Bar
diff --git a/test/files/run/patmat_unapp_abstype-new.scala b/test/files/run/patmat_unapp_abstype-new.scala
index 1141177..c2927bd 100644
--- a/test/files/run/patmat_unapp_abstype-new.scala
+++ b/test/files/run/patmat_unapp_abstype-new.scala
@@ -18,7 +18,7 @@ trait TypesAPI {
 trait TypesUser extends TypesAPI {
   def shouldNotCrash(tp: Type): Unit = {
     tp match {
-      case TypeRef(x) => println("TypeRef") 
+      case TypeRef(x) => println("TypeRef")
       case MethodType(x) => println("MethodType")
       case _ => println("none of the above")
     }
@@ -27,7 +27,7 @@ trait TypesUser extends TypesAPI {
 
 trait TypesImpl extends TypesAPI {
   object TypeRef extends TypeRefExtractor  // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef])
-  case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef 
+  case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef
   // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser
 }
 
diff --git a/test/files/run/patmat_unapp_abstype-old.check b/test/files/run/patmat_unapp_abstype-old.check
deleted file mode 100644
index 72239d1..0000000
--- a/test/files/run/patmat_unapp_abstype-old.check
+++ /dev/null
@@ -1,4 +0,0 @@
-TypeRef
-none of the above
-Bar
-Foo
diff --git a/test/files/run/patmat_unapp_abstype-old.flags b/test/files/run/patmat_unapp_abstype-old.flags
deleted file mode 100644
index ba80cad..0000000
--- a/test/files/run/patmat_unapp_abstype-old.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xoldpatmat
diff --git a/test/files/run/patmat_unapp_abstype-old.scala b/test/files/run/patmat_unapp_abstype-old.scala
deleted file mode 100644
index 45496f0..0000000
--- a/test/files/run/patmat_unapp_abstype-old.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-// abstract types and extractors, oh my!
-trait TypesAPI {
-  trait Type
-
-  // an alternative fix (implemented in the virtual pattern matcher, is to replace the isInstanceOf by a manifest-based run-time test)
-  // that's what typeRefMani is for
-  type TypeRef <: Type //; implicit def typeRefMani: Manifest[TypeRef]
-  val TypeRef: TypeRefExtractor; trait TypeRefExtractor {
-    def apply(x: Int): TypeRef
-    def unapply(x: TypeRef): Option[(Int)]
-  }
-
-  // just for illustration, should follow the same pattern as TypeRef
-  case class MethodType(n: Int) extends Type
-}
-
-// user should not be exposed to the implementation
-trait TypesUser extends TypesAPI {
-  def shouldNotCrash(tp: Type): Unit = {
-    tp match {
-      case TypeRef(x) => println("TypeRef") 
-      // the above checks tp.isInstanceOf[TypeRef], which is erased to tp.isInstanceOf[Type]
-      //   before calling TypeRef.unapply(tp), which will then crash unless tp.isInstanceOf[TypesImpl#TypeRef] (which is not implied by tp.isInstanceOf[Type])
-      // tp.isInstanceOf[TypesImpl#TypeRef] is equivalent to classOf[TypesImpl#TypeRef].isAssignableFrom(tp.getClass)
-      // this is equivalent to manifest
-      // it is NOT equivalent to manifest[Type] <:< typeRefMani
-      case MethodType(x) => println("MethodType")
-      case _ => println("none of the above")
-    }
-  }
-}
-
-trait TypesImpl extends TypesAPI {
-  object TypeRef extends TypeRefExtractor  // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef])
-  case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef 
-  // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser
-  //lazy val typeRefMani = manifest[TypeRef]
-}
-
-trait Foos {
- trait Bar
- type Foo <: Bar
- trait FooExtractor {
-   def unapply(foo: Foo): Option[Int]
- }
- val Foo: FooExtractor
-}
-
-trait RealFoos extends Foos {
- class Foo(val x: Int) extends Bar
- object Foo extends FooExtractor {
-   def unapply(foo: Foo): Option[Int] = Some(foo.x)
- }
-}
-
-trait Intermed extends Foos {
- def crash(bar: Bar): Unit =
-   bar match {
-     case Foo(x) => println("Foo")
-     case _ => println("Bar")
-   }
-}
-
-object TestUnappStaticallyKnownSynthetic extends TypesImpl with TypesUser {
-  def test() = {
-    shouldNotCrash(TypeRef(10)) // should and does print "TypeRef"
-    // once  #1697/#2337 are fixed, this should generate the correct output
-    shouldNotCrash(MethodType(10)) // should print "MethodType" but prints "none of the above" -- good one, pattern matcher!
-  }
-}
-
-object TestUnappDynamicSynth extends RealFoos with Intermed {
- case class FooToo(n: Int) extends Bar
- def test() = {
-   crash(FooToo(10))
-   crash(new Foo(5))
- }
-}
-
-object Test extends App {
-  TestUnappStaticallyKnownSynthetic.test()
-  TestUnappDynamicSynth.test()
-}
diff --git a/test/files/run/patmatnew.check b/test/files/run/patmatnew.check
index e69de29..56b8ac2 100644
--- a/test/files/run/patmatnew.check
+++ b/test/files/run/patmatnew.check
@@ -0,0 +1,15 @@
+patmatnew.scala:351: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+        case 1 => "OK"
+                  ^
+patmatnew.scala:352: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+        case 2 => assert(false); "KO"
+                                 ^
+patmatnew.scala:353: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+        case 3 => assert(false); "KO"
+                                 ^
+patmatnew.scala:670: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning.
+            case e => {
+                 ^
+patmatnew.scala:489: warning: unreachable code
+        case _ if false =>
+                        ^
diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala
index a6f8199..3c0d00d 100644
--- a/test/files/run/patmatnew.scala
+++ b/test/files/run/patmatnew.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ postfixOps }
+
 object Test {
 
   def main(args: Array[String]) {
@@ -43,7 +46,7 @@ object Test {
   object SimpleUnapply {
     def run() { // from sortedmap, old version
       List((1, 2)).head match {
-        case kv at Pair(key, _) => kv.toString + " " + key.toString
+        case kv@(key, _) => kv.toString + " " + key.toString
       }
 
     }
@@ -111,7 +114,7 @@ object Test {
     val foo2 = new Foo(2)
     def run() {
       val res = (foo1.Bar(2): Any) match {
-        case foo2.Bar(2) => false
+
         case foo1.Bar(2) => true
       }
       assert(res)
@@ -251,7 +254,7 @@ object Test {
   }
 
   // (not regular) fancy guards / bug#644
-  object TestSequence06 { 
+  object TestSequence06 {
 
     case class A(i: Any)
 
@@ -397,9 +400,9 @@ object Test {
   // these are exhaustive matches
   //   should not generate any warnings
   def f[A](z: (Option[A], Option[A])) = z match {
-    case Pair(None, Some(x)) => 1
-    case Pair(Some(x), None) => 2
-    case Pair(Some(x), Some(y)) => 3
+    case (None, Some(x)) => 1
+    case (Some(x), None) => 2
+    case (Some(x), Some(y)) => 3
     case _ => 4
   }
 
@@ -416,9 +419,9 @@ object Test {
   }
 
   def h[A](x: (Option[A], Option[A])) = x match {
-    case Pair(None, _: Some[_]) => 1
-    case Pair(_: Some[_], None) => 2
-    case Pair(_: Some[_], _: Some[_]) => 3
+    case (None, _: Some[_]) => 1
+    case (_: Some[_], None) => 2
+    case (_: Some[_], _: Some[_]) => 3
     case _ => 4
   }
 
@@ -447,7 +450,7 @@ object Test {
       object Get extends Sync
 
       var ps: PartialFunction[Any, Any] = {
-        case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack 
+        case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack
       }
     }
     def run() {
@@ -536,17 +539,17 @@ object Test {
     case class Operator(x: Int);
     val EQ = new Operator(2);
 
-    def analyze(x: Pair[Operator, Int]) = x match {
-      case Pair(EQ, 0) => "0"
-      case Pair(EQ, 1) => "1"
-      case Pair(EQ, 2) => "2"
+    def analyze(x: Tuple2[Operator, Int]) = x match {
+      case (EQ, 0) => "0"
+      case (EQ, 1) => "1"
+      case (EQ, 2) => "2"
     }
     def run() {
-      val x = Pair(EQ, 0);
+      val x = (EQ, 0);
       assertEquals("0", analyze(x)); // should print "0"
-      val y = Pair(EQ, 1);
+      val y = (EQ, 1);
       assertEquals("1", analyze(y)); // should print "1"
-      val z = Pair(EQ, 2);
+      val z = (EQ, 2);
       assertEquals("2", analyze(z)); // should print "2"
     }
   }
diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala
index 60ee59c..19fef35 100644
--- a/test/files/run/pc-conversions.scala
+++ b/test/files/run/pc-conversions.scala
@@ -1,15 +1,17 @@
-
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 
 import collection._
 
 
 // test conversions between collections
 object Test {
-  
+
   def main(args: Array[String]) {
     testConversions
   }
-  
+
   def testConversions {
     // seq conversions
     assertSeq(parallel.mutable.ParArray(1, 2, 3))
@@ -18,7 +20,7 @@ object Test {
     assertSeq(parallel.immutable.ParRange(1, 50, 1, false))
     assertSeq(parallel.immutable.ParHashMap(1 -> 2, 2 -> 4))
     assertSeq(parallel.immutable.ParHashSet(1, 2, 3))
-    
+
     // par conversions
     assertPar(Array(1, 2, 3))
     assertPar(mutable.ArrayBuffer(1, 2, 3))
@@ -29,7 +31,7 @@ object Test {
     assertPar(immutable.Range(1, 50, 1))
     assertPar(immutable.HashMap(1 -> 1, 2 -> 2))
     assertPar(immutable.HashSet(1, 2, 3))
-    
+
     // par.to* and to*.par tests
     assertToPar(List(1 -> 1, 2 -> 2, 3 -> 3))
     assertToPar(Stream(1 -> 1, 2 -> 2))
@@ -47,19 +49,19 @@ object Test {
     assertToPar(parallel.mutable.ParHashSet(1 -> 2))
     assertToPar(parallel.immutable.ParHashMap(1 -> 2))
     assertToPar(parallel.immutable.ParHashSet(1 -> 3))
-    
+
     assertToParWoMap(immutable.Range(1, 10, 2))
-    
+
     // seq and par again conversions)
     assertSeqPar(parallel.mutable.ParArray(1, 2, 3))
   }
-  
+
   def assertSeqPar[T](pc: parallel.ParIterable[T]) = pc.seq.par == pc
-  
+
   def assertSeq[T](pc: parallel.ParIterable[T]) = assert(pc.seq == pc)
-  
+
   def assertPar[T, P <: Parallel](xs: GenIterable[T]) = assert(xs == xs.par)
-  
+
   def assertToPar[K, V](xs: GenTraversable[(K, V)]) {
     xs match {
       case _: Seq[_] =>
@@ -67,26 +69,26 @@ object Test {
         assert(xs.par.toIterable == xs)
       case _ =>
     }
-    
+
     assert(xs.toSeq.par == xs.toSeq)
     assert(xs.par.toSeq == xs.toSeq)
-    
+
     assert(xs.toSet.par == xs.toSet)
     assert(xs.par.toSet == xs.toSet)
-    
+
     assert(xs.toMap.par == xs.toMap)
     assert(xs.par.toMap == xs.toMap)
   }
-  
+
   def assertToParWoMap[T](xs: GenSeq[T]) {
     assert(xs.toIterable.par == xs.toIterable)
     assert(xs.par.toIterable == xs.toIterable)
-    
+
     assert(xs.toSeq.par == xs.toSeq)
     assert(xs.par.toSeq == xs.toSeq)
-    
+
     assert(xs.toSet.par == xs.toSet)
     assert(xs.par.toSet == xs.toSet)
   }
-  
+
 }
diff --git a/test/files/run/pf-catch.scala b/test/files/run/pf-catch.scala
index ba0781f..33982d0 100644
--- a/test/files/run/pf-catch.scala
+++ b/test/files/run/pf-catch.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
 object Test {
   def shortName(x: AnyRef) = x.getClass.getName split '.' last
   type Handler[+T] = PartialFunction[Throwable, T]
@@ -6,12 +8,12 @@ object Test {
     case x: java.util.NoSuchElementException    => shortName(x)
     case x: java.lang.IllegalArgumentException  => shortName(x)
   }
-  
+
   def fn[T: Handler](body: => T): T = {
     try body
     catch implicitly[Handler[T]]
   }
-  
+
   def f1 = {
     implicit val myHandler = standardHandler
     println(fn(Nil.head))
@@ -27,8 +29,8 @@ object Test {
 
   def main(args: Array[String]): Unit = {
     try f1
-    catch { case x => println(shortName(x) + " slipped by.") }
-    
+    catch { case x: Throwable => println(shortName(x) + " slipped by.") }
+
     f2
   }
 }
diff --git a/test/files/run/position-val-def.check b/test/files/run/position-val-def.check
new file mode 100644
index 0000000..a92c77c
--- /dev/null
+++ b/test/files/run/position-val-def.check
@@ -0,0 +1,30 @@
+val x = 0
+[0:9]val x = [8:9]0
+
+var x = 0
+[0:9]var x = [8:9]0
+
+val x, y = 0
+[NoPosition]{
+  [0:5]val x = [11]0;
+  [7:12]val y = [11:12]0;
+  [NoPosition]()
+}
+
+var x, y = 0
+[NoPosition]{
+  [0:5]var x = [11]0;
+  [7:12]var y = [11:12]0;
+  [NoPosition]()
+}
+
+val (x, y) = 0
+[NoPosition]{
+  <0:14><synthetic> <artifact> private[this] val x$1 = <4:14>[13:14][13:14]0: @[13]scala.unchecked match {
+    <4:10>case <4:10>[4]scala.Tuple2(<5:6>(x @ [5]_), <8:9>(y @ [8]_)) => <4:10><4:10>scala.Tuple2(<4:10>x, <4:10>y)
+  };
+  [5:6]val x = [5]x$1._1;
+  [8:9]val y = [8]x$1._2;
+  [NoPosition]()
+}
+
diff --git a/test/files/run/position-val-def.scala b/test/files/run/position-val-def.scala
new file mode 100644
index 0000000..62cb54a
--- /dev/null
+++ b/test/files/run/position-val-def.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test {
+  val toolbox = cm.mkToolBox(options = "-Yrangepos")
+
+  def main(args: Array[String]) {
+    def test(expr: String) {
+      val t = toolbox.parse(expr)
+      println(expr)
+      println(show(t, printPositions = true))
+      println()
+    }
+    val tests = """
+    val x = 0
+    var x = 0
+    val x, y = 0
+    var x, y = 0
+    val (x, y) = 0
+    """
+    val exprs = tests.split("\\n").map(_.trim).filterNot(_.isEmpty)
+    exprs foreach test
+  }
+}
diff --git a/test/files/run/preinits.check b/test/files/run/preinits.check
index 5584ab2..e97a14b 100644
--- a/test/files/run/preinits.check
+++ b/test/files/run/preinits.check
@@ -1,3 +1,9 @@
+preinits.scala:2: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+trait B extends { override val x = 1 } with A { println("B") }
+                               ^
+preinits.scala:3: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+trait C extends { override val x = 2 } with A
+                               ^
 A
 B
 2
diff --git a/test/files/run/primitive-sigs-2-new.flags b/test/files/run/primitive-sigs-2-new.flags
new file mode 100644
index 0000000..2349d82
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-new.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/run/primitive-sigs-2-new.scala b/test/files/run/primitive-sigs-2-new.scala
index cf6de9c..1f39667 100644
--- a/test/files/run/primitive-sigs-2-new.scala
+++ b/test/files/run/primitive-sigs-2-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
 import scala.reflect.{ClassTag, classTag}
 import java.{ lang => jl }
 
@@ -29,4 +31,4 @@ object Test {
     println(new C f)
     c3m.sorted foreach println
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/primitive-sigs-2-old.flags b/test/files/run/primitive-sigs-2-old.flags
new file mode 100644
index 0000000..ac96850
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-old.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
\ No newline at end of file
diff --git a/test/files/run/primitive-sigs-2-old.scala b/test/files/run/primitive-sigs-2-old.scala
index b7152f7..16fe5ae 100644
--- a/test/files/run/primitive-sigs-2-old.scala
+++ b/test/files/run/primitive-sigs-2-old.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
 import java.{ lang => jl }
 
 trait T[A] {
@@ -25,7 +27,7 @@ object Test {
   val c1: Class[_] = classOf[T[_]]
   val c2: Class[_] = classOf[C]
   val c3: Class[_] = classOf[Arr]
-  
+
   val c1m = c1.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
   val c2m = c2.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
   val c3m = c3.getDeclaredMethods.toList map (_.toGenericString)
diff --git a/test/files/run/priorityQueue.scala b/test/files/run/priorityQueue.scala
index edc0e32..327d8bf 100644
--- a/test/files/run/priorityQueue.scala
+++ b/test/files/run/priorityQueue.scala
@@ -31,23 +31,23 @@ object Test {
   //   val pq2 = new PriorityQueue[String]
   //   val pq3 = new PriorityQueue[String]
   //   val pq4 = new PriorityQueue[String]
-    
+
   //   val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
-    
+
   //   pq1 ++= strings
   //   pq2 ++= strings.reverse
   //   for (s <- strings) pq3 += s
   //   for (s <- strings.reverse) pq4 += s
-    
+
   //   val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
-  
+
   //   for (queue1 <- pqs ; queue2 <- pqs) {
   //     val l1: List[String] = queue1.dequeueAll[String, List[String]]
   //     val l2: List[String] = queue2.dequeueAll[String, List[String]]
   //     assert(l1 == l2)
   //     assert(queue1.max == queue2.max)
   //   }
-    
+
   //   assertPriorityDestructive(pq1)
   // }
 
@@ -83,7 +83,7 @@ object Test {
   //   }
   //   for (i <- 0 until 100) assert(intpq(i) == (99 - i))
   // }
-  
+
   // def testTails {
   //   val pq = new PriorityQueue[Int]
   //   for (i <- 0 until 10) pq += i * 4321 % 200
@@ -108,13 +108,13 @@ object Test {
   //     prev = curr
   //   }
   // }
-  
+
   // def testInits {
   //   val pq = new PriorityQueue[Long]
   //   for (i <- 0 until 20) pq += (i + 313) * 111 % 300
-    
+
   //   assert(pq.size == 20)
-    
+
   //   val initpq = pq.init
   //   assert(initpq.size == 19)
   //   assertPriorityDestructive(initpq)
@@ -123,19 +123,19 @@ object Test {
   // def testFilters {
   //   val pq = new PriorityQueue[String]
   //   for (i <- 0 until 100) pq += "Some " + (i * 312 % 200)
-    
+
   //   val filpq = pq.filter(_.indexOf('0') != -1)
   //   assertPriorityDestructive(filpq)
   // }
 
   // def testIntensiveEnqueueDequeue {
   //   val pq = new PriorityQueue[Int]
-    
+
   //   testIntensive(1000, pq)
   //   pq.clear
   //   testIntensive(200, pq)
   // }
-  
+
   // def testIntensive(sz: Int, pq: PriorityQueue[Int]) {
   //   val lst = new collection.mutable.ArrayBuffer[Int] ++ (0 until sz)
   //   val rand = new util.Random(7)
@@ -153,7 +153,7 @@ object Test {
   //   pq ++= (0 until 100)
   //   val droppq = pq.drop(50)
   //   assertPriority(droppq)
-    
+
   //   pq.clear
   //   pq ++= droppq
   //   assertPriorityDestructive(droppq)
@@ -173,7 +173,7 @@ object Test {
   // //   assertPriority(pq)
 
   // //   pq.clear
-    
+
   // //   pq ++= (1 to 100)
   // //   pq(5) = 200
   // //   assert(pq(0) == 200)
@@ -204,7 +204,7 @@ object Test {
   // def testEquality {
   //   val pq1 = new PriorityQueue[Int]
   //   val pq2 = new PriorityQueue[Int]
-    
+
   //   pq1 ++= (0 until 50)
   //   var i = 49
   //   while (i >= 0) {
@@ -213,7 +213,7 @@ object Test {
   //   }
   //   assert(pq1 == pq2)
   //   assertPriority(pq2)
-    
+
   //   pq1 += 100
   //   assert(pq1 != pq2)
   //   pq2 += 100
@@ -230,7 +230,7 @@ object Test {
   //   val pq = new PriorityQueue[Int]
   //   pq ++= (0 until 100)
   //   assert(pq.size == 100)
-    
+
   //   val (p1, p2) = pq.partition(_ < 50)
   //   assertPriorityDestructive(p1)
   //   assertPriorityDestructive(p2)
@@ -252,13 +252,13 @@ object Test {
   //   assert(pq.lastIndexWhere(_ == 9) == 0)
   //   assert(pq.lastIndexOf(8) == 1)
   //   assert(pq.lastIndexOf(7) == 2)
-    
+
   //   pq += 5
   //   pq += 9
   //   assert(pq.lastIndexOf(9) == 1)
   //   assert(pq.lastIndexWhere(_ % 2 == 1) == 10)
   //   assert(pq.lastIndexOf(5) == 6)
-    
+
   //   val lst = pq.reverseIterator.toList
   //   for (i <- 0 until 5) assert(lst(i) == i)
   //   assert(lst(5) == 5)
@@ -268,13 +268,13 @@ object Test {
   //   assert(lst(9) == 8)
   //   assert(lst(10) == 9)
   //   assert(lst(11) == 9)
-    
+
   //   pq.clear
   //   assert(pq.reverseIterator.toList.isEmpty)
-    
+
   //   pq ++= (50 to 75)
   //   assert(pq.lastIndexOf(70) == 5)
-    
+
   //   pq += 55
   //   pq += 70
   //   assert(pq.lastIndexOf(70) == 6)
@@ -284,11 +284,11 @@ object Test {
   //   assert(pq.lastIndexWhere(_ > 54, 21) == 21)
   //   assert(pq.lastIndexWhere(_ > 69, 5) == 5)
   // }
-  
+
   // def testReverse {
   //   val pq = new PriorityQueue[(Int, Int)]
   //   pq ++= (for (i <- 0 until 10) yield (i, i * i % 10))
-    
+
   //   assert(pq.reverse.size == pq.reverseIterator.toList.size)
   //   assert((pq.reverse zip pq.reverseIterator.toList).forall(p => p._1 == p._2))
   //   assert(pq.reverse.sameElements(pq.reverseIterator.toSeq))
@@ -296,19 +296,19 @@ object Test {
   //   assert(pq.reverse(1)._1 == pq(8)._1)
   //   assert(pq.reverse(4)._1 == pq(5)._1)
   //   assert(pq.reverse(9)._1 == pq(0)._1)
-    
+
   //   pq += ((7, 7))
   //   pq += ((7, 9))
   //   pq += ((7, 8))
   //   assert(pq.reverse.reverse == pq)
   //   assert(pq.reverse.lastIndexWhere(_._2 == 6) == 6)
   //   assertPriorityDestructive(pq.reverse.reverse)
-    
+
   //   val iq = new PriorityQueue[Int]
   //   iq ++= (0 until 50)
   //   assert(iq.reverse == iq.reverseIterator.toSeq)
   //   assert(iq.reverse.reverse == iq)
-    
+
   //   iq += 25
   //   iq += 40
   //   iq += 10
@@ -317,10 +317,10 @@ object Test {
   //   assert(iq.reverse.lastIndexWhere(_ == 10) == 11)
   //   assertPriorityDestructive(iq.reverse.reverse)
   // }
-  
+
   // def testToList {
   //   val pq = new PriorityQueue[Int]
-    
+
   //   pq += 1
   //   pq += 4
   //   pq += 0
@@ -330,16 +330,16 @@ object Test {
   //   assert(pq.toList == pq)
   //   assert(pq == List(5, 4, 3, 2, 1, 0))
   //   assert(pq.reverse == List(0, 1, 2, 3, 4, 5))
-    
+
   //   pq.clear
   //   for (i <- -50 until 50) pq += i
   //   assert(pq.toList == pq)
   //   assert(pq.toList == (-50 until 50).reverse)
   // }
-  
+
   // def testForeach {
   //   val pq = new PriorityQueue[Char]
-    
+
   //   pq += 't'
   //   pq += 'o'
   //   pq += 'b'
@@ -351,7 +351,7 @@ object Test {
   //   assert(sbf.toString == sbi.toString)
   //   assert(sbf.toString == "ytob")
   // }
-  
+
 }
 
 
diff --git a/test/files/run/private-inline.check b/test/files/run/private-inline.check
index 209e3ef..e71aec2 100644
--- a/test/files/run/private-inline.check
+++ b/test/files/run/private-inline.check
@@ -1 +1,13 @@
+private-inline.scala:24: warning: Could not inline required method wrapper1 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
+  def f1b() = identity(wrapper1(5))
+                               ^
+private-inline.scala:24: warning: At the end of the day, could not inline @inline-marked method wrapper1
+  def f1b() = identity(wrapper1(5))
+                               ^
+private-inline.scala:29: warning: Could not inline required method wrapper2 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
+  def f2b() = identity(wrapper2(5))
+                               ^
+private-inline.scala:29: warning: At the end of the day, could not inline @inline-marked method wrapper2
+  def f2b() = identity(wrapper2(5))
+                               ^
 20
diff --git a/test/files/run/private-inline.flags b/test/files/run/private-inline.flags
index eb4d19b..00d3643 100644
--- a/test/files/run/private-inline.flags
+++ b/test/files/run/private-inline.flags
@@ -1 +1 @@
--optimise
\ No newline at end of file
+-optimise -Yinline-warnings
diff --git a/test/files/run/private-inline.scala b/test/files/run/private-inline.scala
index a620077..60fef9e 100644
--- a/test/files/run/private-inline.scala
+++ b/test/files/run/private-inline.scala
@@ -2,7 +2,7 @@
 final class A {
   private var x1 = false
   var x2 = false
-  
+
   // manipulates private var
   @inline private def wrapper1[T](body: => T): T = {
     val saved = x1
@@ -17,35 +17,35 @@ final class A {
     try body
     finally x2 = saved
   }
-  
+
   // not inlined
   def f1a() = wrapper1(5)
   // inlined!
   def f1b() = identity(wrapper1(5))
-  
+
   // not inlined
   def f2a() = wrapper2(5)
   // inlined!
-  def f2b() = identity(wrapper2(5))  
+  def f2b() = identity(wrapper2(5))
 }
 
 object Test {
   def methodClasses = List("f1a", "f2a") map ("A$$anonfun$" + _ + "$1")
-  
+
   def main(args: Array[String]): Unit = {
     val a = new A
     import a._
     println(f1a() + f1b() + f2a() + f2b())
-    
+
     // Don't know how else to test this: all these should have been
     // inlined, so all should fail.
     methodClasses foreach { clazz =>
-      
+
       val foundClass = (
         try   Class.forName(clazz)
-        catch { case _ => null }
+        catch { case _: Throwable => null }
       )
-      
+
       assert(foundClass == null, foundClass)
     }
   }
diff --git a/test/files/run/t5284c.check b/test/files/run/private-override.check
similarity index 100%
copy from test/files/run/t5284c.check
copy to test/files/run/private-override.check
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
index bdf76dd..1cd94cc 100644
--- a/test/files/run/programmatic-main.check
+++ b/test/files/run/programmatic-main.check
@@ -1,31 +1,27 @@
-             phase name  id  description
-             ----------  --  -----------
-                 parser   1  parse source into ASTs, perform simple desugaring
-                  namer   2  resolve names, attach symbols to named trees
-         packageobjects   3  load package objects
-                  typer   4  the meat and potatoes: type the trees
-                 patmat   5  translate match expressions
-         superaccessors   6  add super accessors in traits and nested classes
-             extmethods   7  add extension methods for inline classes
-                pickler   8  serialize symbol tables
-              refchecks   9  reference/override checking, translate nested objects
-                uncurry  10  uncurry, translate function values to anonymous classes
-              tailcalls  11  replace tail calls by jumps
-             specialize  12  @specialized-driven class and method specialization
-          explicitouter  13  this refs to outer pointers, translate patterns
-                erasure  14  erase types, add interfaces for traits
-            posterasure  15  clean up erased inline classes
-               lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
-             lambdalift  17  move nested functions to top level
-           constructors  18  move field definitions into constructors
-                flatten  19  eliminate inner classes
-                  mixin  20  mixin composition
-                cleanup  21  platform-specific cleanups, generate reflective calls
-                  icode  22  generate portable intermediate code
-                inliner  23  optimization: do inlining
-inlineExceptionHandlers  24  optimization: inline exception handlers
-               closelim  25  optimization: eliminate uncalled closures
-                    dce  26  optimization: eliminate dead code
-                    jvm  27  generate JVM bytecode
-               terminal  28  The last phase in the compiler chain
-
+    phase name  id  description
+    ----------  --  -----------
+        parser   1  parse source into ASTs, perform simple desugaring
+         namer   2  resolve names, attach symbols to named trees
+packageobjects   3  load package objects
+         typer   4  the meat and potatoes: type the trees
+        patmat   5  translate match expressions
+superaccessors   6  add super accessors in traits and nested classes
+    extmethods   7  add extension methods for inline classes
+       pickler   8  serialize symbol tables
+     refchecks   9  reference/override checking, translate nested objects
+       uncurry  10  uncurry, translate function values to anonymous classes
+     tailcalls  11  replace tail calls by jumps
+    specialize  12  @specialized-driven class and method specialization
+ explicitouter  13  this refs to outer pointers
+       erasure  14  erase types, add interfaces for traits
+   posterasure  15  clean up erased inline classes
+      lazyvals  16  allocate bitmaps, translate lazy vals into lazified defs
+    lambdalift  17  move nested functions to top level
+  constructors  18  move field definitions into constructors
+       flatten  19  eliminate inner classes
+         mixin  20  mixin composition
+       cleanup  21  platform-specific cleanups, generate reflective calls
+    delambdafy  22  remove lambdas
+         icode  23  generate portable intermediate code
+           jvm  24  generate JVM bytecode
+      terminal  25  the last phase during a compilation run
diff --git a/test/files/run/programmatic-main.scala b/test/files/run/programmatic-main.scala
index 7bc5c5d..542ac27 100644
--- a/test/files/run/programmatic-main.scala
+++ b/test/files/run/programmatic-main.scala
@@ -1,3 +1,5 @@
+
+import scala.language.postfixOps
 import scala.tools.nsc._
 import io.Path
 
@@ -5,7 +7,7 @@ object Test {
   val cwd = Option(System.getProperty("partest.cwd")) getOrElse "."
   val basedir = Path(cwd).parent / "lib" path
   val baseargs = Array("-usejavacp", "-bootclasspath", basedir + "/scala-library.jar", "-cp", basedir + "/scala-compiler.jar")
-    
+
   def main(args: Array[String]): Unit = {
     Console.withErr(Console.out) {
       Main process (baseargs ++ "-Xpluginsdir /does/not/exist/foo/quux -Xshow-phases".split(' '))
diff --git a/test/files/run/proxy.scala b/test/files/run/proxy.scala
index ea222cb..8a6385d 100644
--- a/test/files/run/proxy.scala
+++ b/test/files/run/proxy.scala
@@ -1,16 +1,16 @@
 object Test extends App {
   val p = new Proxy {
-    def self = 2 
+    def self = 2
   }
   println(p equals 1)
   println(p equals 2)
   println(p equals 3)
   println(p equals null)
-  
+
   case class Bippy(a: String) extends Proxy {
     def self = a
   }
-  
+
   val label = Bippy("bippy!")
   println(label == label)
   println(label == "bippy!")
diff --git a/test/files/run/range-unit.scala b/test/files/run/range-unit.scala
index ece0d98..d8ebc00 100644
--- a/test/files/run/range-unit.scala
+++ b/test/files/run/range-unit.scala
@@ -6,14 +6,14 @@ object Test {
     ( (-3 to 3) ++ List(17, 127, Int.MaxValue, Int.MinValue + 1)
     ).distinct.sortBy(n => (math.abs(n), n))
   ) :+ Int.MinValue
-  
+
   // reducing output a little
   val endpoints = numbers filterNot Set(-3, -2, 2, 17, 127)
-  
+
   def num(n: Int) = {
     val frommax = Int.MaxValue - n
     val frommin = Int.MinValue - n
-    
+
     if (n > 0) {
       if (frommax == 0) "MAX"
       else if (frommax < 1000) "MAX-" + frommax
@@ -25,7 +25,7 @@ object Test {
       else "" + n
     }
   }
-  
+
   def run[T](body: => Range): List[Any] = {
     try   { val r = body ; if (r.isEmpty) List(r.length) else List(num(r.length), num(r.head), num(r.last)) }
     catch { case e: IllegalArgumentException => List("---\n    " + e) }
diff --git a/test/files/run/range.check b/test/files/run/range.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/range.scala b/test/files/run/range.scala
index f08b210..4637ab8 100644
--- a/test/files/run/range.scala
+++ b/test/files/run/range.scala
@@ -6,7 +6,7 @@ object Test {
     range.foreach(buffer += _);
     assert(buffer.toList == range.iterator.toList, buffer.toList+"/"+range.iterator.toList)
   }
-  
+
   def boundaryTests() = {
     // #4321
     assert((Int.MinValue to Int.MaxValue by Int.MaxValue).size == 3)
@@ -16,32 +16,43 @@ object Test {
       catch { case _: IllegalArgumentException => true }
     )
     assert(caught)
+    // #7432
+    val noElemAtMin = (
+      try   { (10 until 10).min ; false }
+      catch { case _: NoSuchElementException => true }
+    )
+    assert(noElemAtMin)
+    val noElemAtMax = (
+      try   { (10 until 10).max ; false }
+      catch { case _: NoSuchElementException => true }
+    )
+    assert(noElemAtMax)
   }
-  
+
   case class GR[T](val x: T)(implicit val num: Integral[T]) {
     import num._
-    
+
     def negated = GR[T](-x)
-    
+
     def gr1 = NumericRange(x, x, x)
     def gr2 = NumericRange.inclusive(x, x, x)
     def gr3 = NumericRange(x, x * fromInt(10), x)
     def gr4 = NumericRange.inclusive(x, x * fromInt(10), x)
     def gr5 = gr3.toList ::: negated.gr3.toList
-    
+
     def check = {
       assert(gr1.isEmpty && !gr2.isEmpty)
-      assert(gr3.size == 9 && gr4.size == 10)      
+      assert(gr3.size == 9 && gr4.size == 10)
       assert(gr5.sum == num.zero, gr5.toString)
       assert(!(gr3 contains (x * fromInt(10))))
       assert((gr4 contains (x * fromInt(10))))
     }
   }
-  
+
   def main(args: Array[String]): Unit = {
     implicit val imp1 = Numeric.BigDecimalAsIfIntegral
     implicit val imp2 = Numeric.DoubleAsIfIntegral
-    
+
     val _grs = List[GR[_]](
       GR(BigDecimal(5.0)),
       GR(BigInt(5)),
@@ -51,21 +62,21 @@ object Test {
     )
     val grs = _grs ::: (_grs map (_.negated))
     grs foreach (_.check)
-    
+
     assert(NumericRange(1, 10, 1) sameElements (1 until 10))
     assert(NumericRange.inclusive(1, 10, 1) sameElements (1 to 10))
     assert(NumericRange.inclusive(1, 100, 3) sameElements (1 to 100 by 3))
-    
+
     // #2518
     assert((3L to 7 by 2) sameElements List(3L, 5L, 7L))
-    
+
     rangeForeach(1 to 10);
     rangeForeach(1 until 10);
     rangeForeach(10 to 1 by -1);
     rangeForeach(10 until 1 by -1);
     rangeForeach(10 to 1 by -3);
     rangeForeach(10 until 1 by -3);
-    
+
     // living on the edges
     boundaryTests()
   }
diff --git a/test/files/run/records.scala b/test/files/run/records.scala
index 96b0b4c..f2b582b 100644
--- a/test/files/run/records.scala
+++ b/test/files/run/records.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
 trait C {
   def f: Int
 }
@@ -16,10 +19,10 @@ object Test {
   val y = new C {
     def f = 2
     def g = " world"
-  } 
-    
+  }
+
   val z: T = y
-  
+
   def main(args: Array[String]): Unit = {
     assert(x.f+z.f == 3)
     assert(x.g+z.g == "hello world")
diff --git a/test/files/run/reflect-priv-ctor.check b/test/files/run/reflect-priv-ctor.check
new file mode 100644
index 0000000..a0fb194
--- /dev/null
+++ b/test/files/run/reflect-priv-ctor.check
@@ -0,0 +1 @@
+privately constructed
diff --git a/test/files/run/reflect-priv-ctor.scala b/test/files/run/reflect-priv-ctor.scala
new file mode 100644
index 0000000..9cb3e65
--- /dev/null
+++ b/test/files/run/reflect-priv-ctor.scala
@@ -0,0 +1,22 @@
+
+import language.postfixOps
+import reflect.runtime._
+import universe._
+
+object Test {
+
+  class Foo private () {
+    override def toString = "privately constructed"
+  }
+
+  def main(args: Array[String]): Unit = {
+
+    //val foo = new Foo  // no access
+    val klass = currentMirror reflectClass typeOf[Foo].typeSymbol.asClass
+    val init  = typeOf[Foo].members find { case m: MethodSymbol => m.isConstructor case _ => false } get
+    val ctor  = klass reflectConstructor init.asMethod
+    val foo   = ctor()   // no access?
+    Console println foo
+  }
+}
+
diff --git a/test/files/run/reflection-allmirrors-tostring.check b/test/files/run/reflection-allmirrors-tostring.check
index 2a3be29..3003cce 100644
--- a/test/files/run/reflection-allmirrors-tostring.check
+++ b/test/files/run/reflection-allmirrors-tostring.check
@@ -1,14 +1,14 @@
 class mirror for C (bound to null)
 module mirror for M (bound to null)
 instance mirror for an instance of C
-field mirror for C.f1 (bound to an instance of C)
-field mirror for C.f2 (bound to an instance of C)
-method mirror for C.m1: Int (bound to an instance of C)
-method mirror for C.m2(): Int (bound to an instance of C)
-method mirror for C.m3[T >: String <: Int]: T (bound to an instance of C)
-method mirror for C.m4[A, B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C)
-method mirror for C.m5(x: => Int, y: Int*): String (bound to an instance of C)
+field mirror for private[this] val f1: Int (bound to an instance of C)
+field mirror for private[this] var f2: Int (bound to an instance of C)
+method mirror for def m1: Int (bound to an instance of C)
+method mirror for def m2(): Int (bound to an instance of C)
+method mirror for def m3[T >: String <: Int]: T (bound to an instance of C)
+method mirror for def m4[A[_], B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C)
+method mirror for def m5(x: => Int,y: Int*): String (bound to an instance of C)
 class mirror for C.C (bound to an instance of C)
 module mirror for C.M (bound to an instance of C)
-constructor mirror for C.<init>(): C (bound to null)
-constructor mirror for C.C.<init>(): C.this.C (bound to an instance of C)
+constructor mirror for def <init>(): C (bound to null)
+constructor mirror for def <init>(): C.this.C (bound to an instance of C)
diff --git a/test/files/run/reflection-allmirrors-tostring.scala b/test/files/run/reflection-allmirrors-tostring.scala
index 73afff2..f0614e9 100644
--- a/test/files/run/reflection-allmirrors-tostring.scala
+++ b/test/files/run/reflection-allmirrors-tostring.scala
@@ -1,3 +1,4 @@
+import scala.language.higherKinds
 import scala.reflect.runtime.universe._
 
 class C {
@@ -26,18 +27,18 @@ object Test extends App {
   println(cm.reflect(new C))
 
   val im = cm.reflect(new C)
-  println(im.reflectField(typeOf[C].member(newTermName("f1")).asTerm))
-  println(im.reflectField(typeOf[C].member(newTermName("f2")).asTerm))
-  println(im.reflectMethod(typeOf[C].member(newTermName("m1")).asMethod))
-  println(im.reflectMethod(typeOf[C].member(newTermName("m2")).asMethod))
-  println(im.reflectMethod(typeOf[C].member(newTermName("m3")).asMethod))
-  println(im.reflectMethod(typeOf[C].member(newTermName("m4")).asMethod))
-  println(im.reflectMethod(typeOf[C].member(newTermName("m5")).asMethod))
-  println(im.reflectClass(typeOf[C].member(newTypeName("C")).asClass))
-  println(im.reflectModule(typeOf[C].member(newTermName("M")).asModule))
+  println(im.reflectField(typeOf[C].member(TermName("f1")).asTerm))
+  println(im.reflectField(typeOf[C].member(TermName("f2")).asTerm))
+  println(im.reflectMethod(typeOf[C].member(TermName("m1")).asMethod))
+  println(im.reflectMethod(typeOf[C].member(TermName("m2")).asMethod))
+  println(im.reflectMethod(typeOf[C].member(TermName("m3")).asMethod))
+  println(im.reflectMethod(typeOf[C].member(TermName("m4")).asMethod))
+  println(im.reflectMethod(typeOf[C].member(TermName("m5")).asMethod))
+  println(im.reflectClass(typeOf[C].member(TypeName("C")).asClass))
+  println(im.reflectModule(typeOf[C].member(TermName("M")).asModule))
 
   val c = cm.staticClass("C")
-  val cc = typeOf[C].member(newTypeName("C")).asClass
-  println(cm.reflectClass(c).reflectConstructor(c.typeSignature.member(nme.CONSTRUCTOR).asMethod))
-  println(im.reflectClass(cc).reflectConstructor(cc.typeSignature.member(nme.CONSTRUCTOR).asMethod))
-}
\ No newline at end of file
+  val cc = typeOf[C].member(TypeName("C")).asClass
+  println(cm.reflectClass(c).reflectConstructor(c.info.member(termNames.CONSTRUCTOR).asMethod))
+  println(im.reflectClass(cc).reflectConstructor(cc.info.member(termNames.CONSTRUCTOR).asMethod))
+}
diff --git a/test/files/run/reflection-companion.check b/test/files/run/reflection-companion.check
new file mode 100644
index 0000000..5dbff99
--- /dev/null
+++ b/test/files/run/reflection-companion.check
@@ -0,0 +1,6 @@
+C#MOD
+C#CLS
+C#CLS
+NoSymbol#???
+NoSymbol#???
+NoSymbol#???
diff --git a/test/files/run/reflection-companion.scala b/test/files/run/reflection-companion.scala
new file mode 100644
index 0000000..0f62dea
--- /dev/null
+++ b/test/files/run/reflection-companion.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C
+object C
+
+object Test extends App {
+  type T = C
+
+  println(showRaw(symbolOf[C].companion, printKinds = true))
+  println(showRaw(symbolOf[C].companion.companion, printKinds = true))
+  println(showRaw(symbolOf[C.type].companion, printKinds = true))
+  println(showRaw(symbolOf[T].companion, printKinds = true))
+  println(showRaw(cm.staticPackage("scala").moduleClass.companion, printKinds = true))
+  println(showRaw(cm.staticPackage("scala").companion, printKinds = true))
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-companiontype.check b/test/files/run/reflection-companiontype.check
new file mode 100644
index 0000000..f87bc04
--- /dev/null
+++ b/test/files/run/reflection-companiontype.check
@@ -0,0 +1,12 @@
+TypeRefs
+TypeRef(ThisType(<empty>#PKC), C#MODC, List())
+TypeRef(ThisType(<empty>#PKC), C#CLS, List())
+TypeRef(ThisType(<empty>#PKC), C#CLS, List())
+ClassInfoTypes
+TypeRef(ThisType(<empty>#PKC), C#MODC, List())
+TypeRef(ThisType(<empty>#PKC), C#CLS, List())
+TypeRef(ThisType(<empty>#PKC), C#CLS, List())
+Unrelated
+NoType
+NoType
+NoType
diff --git a/test/files/run/reflection-companiontype.scala b/test/files/run/reflection-companiontype.scala
new file mode 100644
index 0000000..0f63457
--- /dev/null
+++ b/test/files/run/reflection-companiontype.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C
+object C
+
+object Test extends App {
+  type T = C
+
+  println("TypeRefs")
+  println(showRaw(typeOf[C].companion, printKinds = true))
+  println(showRaw(typeOf[C].companion.companion, printKinds = true))
+  println(showRaw(typeOf[C.type].companion, printKinds = true))
+  println("ClassInfoTypes")
+  println(showRaw(typeOf[C].typeSymbol.info.companion, printKinds = true))
+  println(showRaw(typeOf[C].typeSymbol.info.companion.typeSymbol.info.companion, printKinds = true))
+  println(showRaw(typeOf[C.type].typeSymbol.info.companion, printKinds = true))
+  println("Unrelated")
+  println(showRaw(typeOf[T].companion, printKinds = true))
+  println(showRaw(cm.staticPackage("scala").moduleClass.asType.toType.companion, printKinds = true))
+  println(showRaw(cm.staticPackage("scala").info.companion, printKinds = true))
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-inner-badpath.scala b/test/files/run/reflection-constructormirror-inner-badpath.scala
index 4bccff2..e7c06b3 100644
--- a/test/files/run/reflection-constructormirror-inner-badpath.scala
+++ b/test/files/run/reflection-constructormirror-inner-badpath.scala
@@ -12,8 +12,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   try {
     val cls = cm.reflectClass( sym )
diff --git a/test/files/run/reflection-constructormirror-inner-good.scala b/test/files/run/reflection-constructormirror-inner-good.scala
index 8613321..c09da5b 100644
--- a/test/files/run/reflection-constructormirror-inner-good.scala
+++ b/test/files/run/reflection-constructormirror-inner-good.scala
@@ -12,8 +12,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   val cls = cm.reflect( this ).reflectClass( sym )
   cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
diff --git a/test/files/run/reflection-constructormirror-nested-badpath.scala b/test/files/run/reflection-constructormirror-nested-badpath.scala
index 2983f18..cf0de77 100644
--- a/test/files/run/reflection-constructormirror-nested-badpath.scala
+++ b/test/files/run/reflection-constructormirror-nested-badpath.scala
@@ -8,8 +8,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   try {
     val cls = cm.reflect( this ).reflectClass( sym )
diff --git a/test/files/run/reflection-constructormirror-nested-good.scala b/test/files/run/reflection-constructormirror-nested-good.scala
index 0b7c413..363b720 100644
--- a/test/files/run/reflection-constructormirror-nested-good.scala
+++ b/test/files/run/reflection-constructormirror-nested-good.scala
@@ -8,8 +8,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   val cls = cm.reflectClass( sym )
   cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
diff --git a/test/files/run/reflection-constructormirror-toplevel-badpath.scala b/test/files/run/reflection-constructormirror-toplevel-badpath.scala
index cf92929..eda4aa0 100644
--- a/test/files/run/reflection-constructormirror-toplevel-badpath.scala
+++ b/test/files/run/reflection-constructormirror-toplevel-badpath.scala
@@ -13,8 +13,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   try {
     val cls = cm.reflect( this ).reflectClass( sym )
diff --git a/test/files/run/reflection-constructormirror-toplevel-good.scala b/test/files/run/reflection-constructormirror-toplevel-good.scala
index b68134b..9842d01 100644
--- a/test/files/run/reflection-constructormirror-toplevel-good.scala
+++ b/test/files/run/reflection-constructormirror-toplevel-good.scala
@@ -13,8 +13,8 @@ class Foo{
   val classTag = implicitly[ClassTag[R]]
   val cl = classTag.runtimeClass.getClassLoader
   val cm = runtimeMirror(cl)
-  val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
-  val sig = constructor.typeSignature
+  val constructor = expectedType.tpe.member( termNames.CONSTRUCTOR ).asMethod
+  val sig = constructor.info
   val sym = cm.classSymbol( classTag.runtimeClass )
   val cls = cm.reflectClass( sym )
   cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
diff --git a/test/files/run/reflection-enclosed-basic.scala b/test/files/run/reflection-enclosed-basic.scala
index 1dcb6c2..e001207 100644
--- a/test/files/run/reflection-enclosed-basic.scala
+++ b/test/files/run/reflection-enclosed-basic.scala
@@ -12,7 +12,7 @@ private object B6 extends B2 { override def toString = "B6"; override def foo =
 object Test extends App {
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
@@ -20,7 +20,7 @@ object Test extends App {
   def testNestedClass(name: String) = {
     val sym = cm.staticClass(name)
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
diff --git a/test/files/run/reflection-enclosed-inner-basic.scala b/test/files/run/reflection-enclosed-inner-basic.scala
index 2b2c701..fd81a8d 100644
--- a/test/files/run/reflection-enclosed-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-basic.scala
@@ -14,19 +14,19 @@ class B {
 object Test extends App {
   val b = cm.classSymbol(classTag[B].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testInnerClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflect(new B).reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
@@ -37,7 +37,7 @@ object Test extends App {
   testInnerClass("B2")
 
   def testInnerModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val moduleMirror = cm.reflect(new B).reflectModule(sym)
     val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.scala b/test/files/run/reflection-enclosed-inner-inner-basic.scala
index 1b9e19d..45dfb8a 100644
--- a/test/files/run/reflection-enclosed-inner-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-inner-basic.scala
@@ -16,19 +16,19 @@ class B {
 object Test extends App {
   val b = cm.classSymbol(classTag[B#BB].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testInnerClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val outer1 = new B
     val outer2 = new outer1.BB
     val ctorMirror = cm.reflect(outer2).reflectClass(sym).reflectConstructor(ctor)
@@ -41,7 +41,7 @@ object Test extends App {
   testInnerClass("B2")
 
   def testInnerModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val outer1 = new B
     val outer2 = new outer1.BB
diff --git a/test/files/run/reflection-enclosed-inner-nested-basic.scala b/test/files/run/reflection-enclosed-inner-nested-basic.scala
index 2800ee2..1973f47 100644
--- a/test/files/run/reflection-enclosed-inner-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-nested-basic.scala
@@ -17,19 +17,19 @@ object Test extends App {
   val outer1 = new B()
   val b = cm.moduleSymbol(classTag[outer1.BB.type].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testNestedClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflect(outer1.BB).reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
@@ -40,7 +40,7 @@ object Test extends App {
   testNestedClass("B2")
 
   def testNestedModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val moduleMirror = cm.reflect(outer1.BB).reflectModule(sym)
     val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-basic.scala b/test/files/run/reflection-enclosed-nested-basic.scala
index 8b740c2..4ff333d 100644
--- a/test/files/run/reflection-enclosed-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-basic.scala
@@ -14,19 +14,19 @@ object B {
 object Test extends App {
   val b = cm.moduleSymbol(classTag[B.type].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testNestedClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
@@ -37,7 +37,7 @@ object Test extends App {
   testNestedClass("B2")
 
   def testNestedModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val moduleMirror = cm.reflectModule(sym)
     val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-inner-basic.scala b/test/files/run/reflection-enclosed-nested-inner-basic.scala
index 7466733..d45894c 100644
--- a/test/files/run/reflection-enclosed-nested-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-inner-basic.scala
@@ -16,19 +16,19 @@ object B {
 object Test extends App {
   val b = cm.classSymbol(classTag[B.BB].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testInnerClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflect(new B.BB).reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
@@ -39,7 +39,7 @@ object Test extends App {
   testInnerClass("B2")
 
   def testInnerModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val moduleMirror = cm.reflect(new B.BB).reflectModule(sym)
     val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-nested-basic.scala b/test/files/run/reflection-enclosed-nested-nested-basic.scala
index 8335ea4..8a630ea 100644
--- a/test/files/run/reflection-enclosed-nested-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-nested-basic.scala
@@ -16,19 +16,19 @@ object B {
 object Test extends App {
   val b = cm.moduleSymbol(classTag[B.BB.type].runtimeClass)
   println(b)
-  println(b.typeSignature.declarations.toList)
+  println(b.info.decls.toList)
 
   def testMethodInvocation(instance: Any) = {
     val instanceMirror = cm.reflect(instance)
-    val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+    val method = instanceMirror.symbol.info.decl(TermName("foo")).asMethod
     val methodMirror = instanceMirror.reflectMethod(method)
     println(methodMirror())
   }
 
   def testNestedClass(name: String) = {
-    val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+    val sym = b.info.decl(TypeName(name)).asClass
     println(sym)
-    val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+    val ctor = sym.info.decl(termNames.CONSTRUCTOR).asMethod
     val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
     val instance = ctorMirror()
     println(instance)
@@ -39,7 +39,7 @@ object Test extends App {
   testNestedClass("B2")
 
   def testNestedModule(name: String) = {
-    val sym = b.typeSignature.declaration(newTermName(name)).asModule
+    val sym = b.info.decl(TermName(name)).asModule
     println(sym)
     val moduleMirror = cm.reflectModule(sym)
     val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
index 65b5257..682326b 100644
--- a/test/files/run/reflection-equality.check
+++ b/test/files/run/reflection-equality.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class X {
    def methodIntIntInt(x: Int, y: Int) = x+y
 }
@@ -22,17 +20,17 @@ im: reflect.runtime.universe.InstanceMirror
 scala> val cs: ClassSymbol = im.symbol
 cs: reflect.runtime.universe.ClassSymbol = class X
 
-scala> val ts: Type = cs.typeSignature
-ts: reflect.runtime.universe.Type = 
+scala> val ts: Type = cs.info
+ts: reflect.runtime.universe.Type =
 scala.AnyRef {
   def <init>(): X
   def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int
 }
 
-scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+scala> val ms: MethodSymbol = ts.decl(TermName("methodIntIntInt")).asMethod
 ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
 
-scala> val MethodType( _, t1 ) = ms.typeSignature
+scala> val MethodType( _, t1 ) = ms.info
 t1: reflect.runtime.universe.Type = scala.Int
 
 scala> val t2 = typeOf[scala.Int]
diff --git a/test/files/run/reflection-equality.scala b/test/files/run/reflection-equality.scala
index 8fc8272..0416bc7 100644
--- a/test/files/run/reflection-equality.scala
+++ b/test/files/run/reflection-equality.scala
@@ -10,9 +10,9 @@ object Test extends ReplTest {
     |import scala.reflect.runtime.{ currentMirror => cm }
     |def im: InstanceMirror = cm.reflect(new X)
     |val cs: ClassSymbol = im.symbol
-    |val ts: Type = cs.typeSignature
-    |val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
-    |val MethodType( _, t1 ) = ms.typeSignature
+    |val ts: Type = cs.info
+    |val ms: MethodSymbol = ts.decl(TermName("methodIntIntInt")).asMethod
+    |val MethodType( _, t1 ) = ms.info
     |val t2 = typeOf[scala.Int]
     |t1 == t2
     |t1 =:= t2
diff --git a/test/files/run/reflection-fancy-java-classes.check b/test/files/run/reflection-fancy-java-classes.check
new file mode 100644
index 0000000..258208d
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes.check
@@ -0,0 +1,12 @@
+===== JAVA POV =====
+class Foo_1$1
+getEnclosingClass = class Foo_1
+getEnclosingMethod = null
+getEnclosingConstructor = null
+isMemberClass = false
+isLocalClass = false
+isAnonymousClass = true
+
+===== SCALA POV =====
+class 1
+object Foo_1
diff --git a/test/files/run/reflection-fancy-java-classes/Foo_1.java b/test/files/run/reflection-fancy-java-classes/Foo_1.java
new file mode 100644
index 0000000..f6fd761
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes/Foo_1.java
@@ -0,0 +1,5 @@
+public class Foo_1 {
+  public static Bar bar = new Bar();
+  private static class Bar {
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-fancy-java-classes/Test_2.scala b/test/files/run/reflection-fancy-java-classes/Test_2.scala
new file mode 100644
index 0000000..271960e
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes/Test_2.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+  println("===== JAVA POV =====")
+  val jfancy = Class.forName("Foo_1$1")
+  println(jfancy)
+  println("getEnclosingClass = " + jfancy.getEnclosingClass)
+  println("getEnclosingMethod = " + jfancy.getEnclosingMethod)
+  println("getEnclosingConstructor = " + jfancy.getEnclosingConstructor)
+  println("isMemberClass = " + jfancy.isMemberClass)
+  println("isLocalClass = " + jfancy.isLocalClass)
+  println("isAnonymousClass = " + jfancy.isAnonymousClass)
+
+  println("")
+  println("===== SCALA POV =====")
+  val sfancy = cm.classSymbol(jfancy)
+  println(sfancy)
+  println(sfancy.owner)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.scala b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
index 1635402..0e75dcf 100644
--- a/test/files/run/reflection-fieldmirror-accessorsareokay.scala
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
@@ -24,6 +24,6 @@ object Test extends App {
     }
   }
 
-  test(cs.typeSignature.declaration(newTermName("x")).asTerm)
-  test(cs.typeSignature.declaration(newTermName("x_$eq")).asTerm)
+  test(cs.info.decl(TermName("x")).asTerm)
+  test(cs.info.decl(TermName("x_$eq")).asTerm)
 }
diff --git a/test/files/run/reflection-fieldmirror-ctorparam.scala b/test/files/run/reflection-fieldmirror-ctorparam.scala
index b9d50fe..b5b6b21 100644
--- a/test/files/run/reflection-fieldmirror-ctorparam.scala
+++ b/test/files/run/reflection-fieldmirror-ctorparam.scala
@@ -10,7 +10,7 @@ object Test extends App {
 
   val im: InstanceMirror = cm.reflect(a)
   val cs = im.symbol
-  val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+  val f = cs.info.decl(TermName("x")).asTerm
   try {
     val fm: FieldMirror = im.reflectField(f)
     println(fm.get)
diff --git a/test/files/run/reflection-fieldmirror-getsetval.scala b/test/files/run/reflection-fieldmirror-getsetval.scala
index 9022148..4fe0d2e 100644
--- a/test/files/run/reflection-fieldmirror-getsetval.scala
+++ b/test/files/run/reflection-fieldmirror-getsetval.scala
@@ -10,7 +10,7 @@ object Test extends App {
 
   val im: InstanceMirror = cm.reflect(a)
   val cs = im.symbol
-  val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+  val f = cs.info.decl(TermName("x" + termNames.LOCAL_SUFFIX_STRING)).asTerm
   val fm: FieldMirror = im.reflectField(f)
   println(fm.get)
   fm.set(2)
diff --git a/test/files/run/reflection-fieldmirror-getsetvar.scala b/test/files/run/reflection-fieldmirror-getsetvar.scala
index abcf396..c64b0c4 100644
--- a/test/files/run/reflection-fieldmirror-getsetvar.scala
+++ b/test/files/run/reflection-fieldmirror-getsetvar.scala
@@ -10,7 +10,7 @@ object Test extends App {
 
   val im: InstanceMirror = cm.reflect(a)
   val cs = im.symbol
-  val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+  val f = cs.info.decl(TermName("x" + termNames.LOCAL_SUFFIX_STRING)).asTerm
   val fm: FieldMirror = im.reflectField(f)
   println(fm.get)
   fm.set(2)
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
index 2b4a9bb..ddc6c42 100644
--- a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
@@ -10,7 +10,7 @@ object Test extends App {
 
   val im: InstanceMirror = cm.reflect(a)
   val cs = im.symbol
-  val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+  val f = cs.info.decl(TermName("x" + termNames.LOCAL_SUFFIX_STRING)).asTerm
   val fm: FieldMirror = im.reflectField(f)
   println(fm.symbol.isVar)
 }
diff --git a/test/files/run/reflection-fieldmirror-privatethis.scala b/test/files/run/reflection-fieldmirror-privatethis.scala
index ab838db..1ece465 100644
--- a/test/files/run/reflection-fieldmirror-privatethis.scala
+++ b/test/files/run/reflection-fieldmirror-privatethis.scala
@@ -10,7 +10,7 @@ object Test extends App {
 
   val im: InstanceMirror = cm.reflect(a)
   val cs = im.symbol
-  val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+  val f = cs.info.decl(TermName("x")).asTerm
   val fm: FieldMirror = im.reflectField(f)
   println(fm.symbol.isVar)
   println(fm.get)
diff --git a/test/files/run/reflection-fieldsymbol-navigation.scala b/test/files/run/reflection-fieldsymbol-navigation.scala
index 4448724..33dc18a 100644
--- a/test/files/run/reflection-fieldsymbol-navigation.scala
+++ b/test/files/run/reflection-fieldsymbol-navigation.scala
@@ -5,7 +5,7 @@ class C {
 }
 
 object Test extends App {
-  val x = typeOf[C].member(newTermName("x")).asTerm
+  val x = typeOf[C].member(TermName("x")).asTerm
   println(x)
   println(x.isVar)
   println(x.accessed)
diff --git a/test/files/run/reflection-idtc.check b/test/files/run/reflection-idtc.check
new file mode 100644
index 0000000..9cdeb02
--- /dev/null
+++ b/test/files/run/reflection-idtc.check
@@ -0,0 +1,6 @@
+[X]X
+Int
+===
+[X]Id[X]
+Id[Int]
+Int
diff --git a/test/files/run/reflection-idtc.scala b/test/files/run/reflection-idtc.scala
new file mode 100644
index 0000000..f9eae61
--- /dev/null
+++ b/test/files/run/reflection-idtc.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+  val tb = cm.mkToolBox()
+  val idsym = tb.typecheck(q"type Id[X] = X").symbol.asType
+  val idTC1 = idsym.info
+  println(idTC1)
+  println(appliedType(idTC1, List(typeOf[Int])))
+  println("===")
+  val idTC2 = idsym.toType.etaExpand
+  println(idTC2)
+  println(appliedType(idTC2, List(typeOf[Int])))
+  println(appliedType(idTC2, List(typeOf[Int])).dealias)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala
index b3c0081..4242530 100644
--- a/test/files/run/reflection-implClass.scala
+++ b/test/files/run/reflection-implClass.scala
@@ -10,19 +10,19 @@ object Test extends App with Outer {
   import scala.reflect.runtime.universe._
   import scala.reflect.runtime.{currentMirror => cm}
 
-  assert(cm.classSymbol(classTag[Foo].runtimeClass).typeSignature.declaration(newTermName("bar")).typeSignature ==
-    cm.classSymbol(classTag[Bar].runtimeClass).typeSignature.declaration(newTermName("foo")).typeSignature)
+  assert(cm.classSymbol(classTag[Foo].runtimeClass).info.decl(TermName("bar")).info ==
+    cm.classSymbol(classTag[Bar].runtimeClass).info.decl(TermName("foo")).info)
 
   val s1 = implClass(classTag[Foo].runtimeClass)
   assert(s1 != NoSymbol)
-  assert(s1.typeSignature != NoType)
-  assert(s1.companionSymbol.typeSignature != NoType)
-  assert(s1.companionSymbol.typeSignature.declaration(newTermName("bar")) != NoSymbol)
+  assert(s1.info != NoType)
+  assert(s1.companion.info != NoType)
+  assert(s1.companion.info.decl(TermName("bar")) != NoSymbol)
   val s2 = implClass(classTag[Bar].runtimeClass)
   assert(s2 != NoSymbol)
-  assert(s2.typeSignature != NoType)
-  assert(s2.companionSymbol.typeSignature != NoType)
-  assert(s2.companionSymbol.typeSignature.declaration(newTermName("foo")) != NoSymbol)
+  assert(s2.info != NoType)
+  assert(s2.companion.info != NoType)
+  assert(s2.companion.info.decl(TermName("foo")) != NoSymbol)
   def implClass(clazz: Class[_]) = {
     val implClass = Class.forName(clazz.getName + "$class")
     cm.classSymbol(implClass)
diff --git a/test/files/run/reflection-implicit.scala b/test/files/run/reflection-implicit.scala
index 0bcb0bc..a6e9393 100644
--- a/test/files/run/reflection-implicit.scala
+++ b/test/files/run/reflection-implicit.scala
@@ -1,3 +1,5 @@
+
+import scala.language.implicitConversions
 import scala.reflect.runtime.universe._
 
 class C {
@@ -7,9 +9,9 @@ class C {
 }
 
 object Test extends App {
-  val decls = typeOf[C].typeSymbol.typeSignature.declarations.sorted.toList.filter(sym => !sym.isTerm || (sym.isMethod && !sym.asMethod.isConstructor))
+  val decls = typeOf[C].typeSymbol.info.decls.sorted.toList.filter(sym => !sym.isTerm || (sym.isMethod && !sym.asMethod.isConstructor))
   println(decls map (_.isImplicit))
-  val param = decls.find(_.name.toString == "d").get.asMethod.paramss.last.head
-  param.typeSignature
+  val param = decls.find(_.name.toString == "d").get.asMethod.paramLists.last.head
+  param.info
   println(param.isImplicit)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
index 2d37fff..72d4098 100644
--- a/test/files/run/reflection-java-annotations.check
+++ b/test/files/run/reflection-java-annotations.check
@@ -1 +1,4 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = ' [...]
+=======
+new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 [...]
diff --git a/test/files/run/reflection-java-annotations/Test_2.scala b/test/files/run/reflection-java-annotations/Test_2.scala
index d2c3157..dec5b45 100644
--- a/test/files/run/reflection-java-annotations/Test_2.scala
+++ b/test/files/run/reflection-java-annotations/Test_2.scala
@@ -1,7 +1,9 @@
 object Test extends App {
   import scala.reflect.runtime.universe._
   val sym = typeOf[JavaAnnottee_1].typeSymbol
-  sym.typeSignature
+  sym.info
   sym.annotations foreach (_.javaArgs)
   println(sym.annotations)
+  println("=======")
+  sym.annotations.map(_.tree).map(println)
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala
index fb5668f..b934786 100644
--- a/test/files/run/reflection-java-crtp/Main_2.scala
+++ b/test/files/run/reflection-java-crtp/Main_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
   val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass
   // make sure that the E's in Enum<E extends Enum<E>> are represented by the same symbol
   val e1 = enum.typeParams(0).asType
-  val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature
+  val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.info
   println(e1, e2, e1 eq e2)
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
index 352aefa..f580296 100644
--- a/test/files/run/reflection-magicsymbols-invoke.check
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -15,12 +15,12 @@ testing Any.!=: false
 testing Any.##: 50
 testing Any.==: true
 testing Any.asInstanceOf: class scala.ScalaReflectionException: Any.asInstanceOf requires a type argument, it cannot be invoked with mirrors
-testing Any.asInstanceOf: class scala.ScalaReflectionException: scala.Any.asInstanceOf[T0]: T0 takes 0 arguments
+testing Any.asInstanceOf: class scala.ScalaReflectionException: final def asInstanceOf[T0]: T0 takes 0 arguments
 testing Any.equals: true
 testing Any.getClass: class java.lang.String
 testing Any.hashCode: 50
 testing Any.isInstanceOf: class scala.ScalaReflectionException: Any.isInstanceOf requires a type argument, it cannot be invoked with mirrors
-testing Any.isInstanceOf: class scala.ScalaReflectionException: scala.Any.isInstanceOf[T0]: Boolean takes 0 arguments
+testing Any.isInstanceOf: class scala.ScalaReflectionException: final def isInstanceOf[T0]: Boolean takes 0 arguments
 testing Any.toString: 2
 ============
 AnyVal
@@ -28,7 +28,7 @@ it's important to print the list of AnyVal's members
 if some of them change (possibly, adding and/or removing magic symbols), we must update this test
 constructor AnyVal: ()AnyVal
 method getClass: ()Class[_ <: AnyVal]
-testing AnyVal.<init>: class java.lang.InstantiationException: null
+testing AnyVal.<init>: class scala.ScalaReflectionException: unsupported symbol constructor AnyVal when invoking bytecodeless method mirror for def <init>(): AnyVal (bound to null)
 testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass
 ============
 AnyRef
@@ -36,12 +36,10 @@ it's important to print the list of AnyRef's members
 if some of them change (possibly, adding and/or removing magic symbols), we must update this test
 constructor Object: ()java.lang.Object
 method !=: (x$1: Any)Boolean
-method !=: (x$1: AnyRef)Boolean
 method ##: ()Int
 method $asInstanceOf: [T0]()T0
 method $isInstanceOf: [T0]()Boolean
 method ==: (x$1: Any)Boolean
-method ==: (x$1: AnyRef)Boolean
 method asInstanceOf: [T0]=> T0
 method clone: ()java.lang.Object
 method eq: (x$1: AnyRef)Boolean
@@ -61,9 +59,9 @@ method wait: (x$1: Long, x$2: Int)Unit
 testing Object.!=: false
 testing Object.##: 50
 testing Object.$asInstanceOf: class scala.ScalaReflectionException: AnyRef.$asInstanceOf is an internal method, it cannot be invoked with mirrors
-testing Object.$asInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$asInstanceOf[T0](): T0 takes 0 arguments
+testing Object.$asInstanceOf: class scala.ScalaReflectionException: final def $asInstanceOf[T0](): T0 takes 0 arguments
 testing Object.$isInstanceOf: class scala.ScalaReflectionException: AnyRef.$isInstanceOf is an internal method, it cannot be invoked with mirrors
-testing Object.$isInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$isInstanceOf[T0](): Boolean takes 0 arguments
+testing Object.$isInstanceOf: class scala.ScalaReflectionException: final def $isInstanceOf[T0](): Boolean takes 0 arguments
 testing Object.==: true
 testing Object.clone: class java.lang.CloneNotSupportedException: java.lang.String
 testing Object.eq: true
@@ -82,14 +80,11 @@ Array
 it's important to print the list of Array's members
 if some of them change (possibly, adding and/or removing magic symbols), we must update this test
 constructor Array: (_length: Int)Array[T]
-constructor Cloneable: ()java.lang.Cloneable
 method !=: (x$1: Any)Boolean
-method !=: (x$1: AnyRef)Boolean
 method ##: ()Int
 method $asInstanceOf: [T0]()T0
 method $isInstanceOf: [T0]()Boolean
 method ==: (x$1: Any)Boolean
-method ==: (x$1: AnyRef)Boolean
 method apply: (i: Int)T
 method asInstanceOf: [T0]=> T0
 method clone: ()Array[T]
@@ -120,5 +115,5 @@ testing String.+: 23
 ============
 CTM
 testing Predef.classOf: class scala.ScalaReflectionException: Predef.classOf is a compile-time function, it cannot be invoked with mirrors
-testing Predef.classOf: class scala.ScalaReflectionException: scala.Predef.classOf[T]: Class[T] takes 0 arguments
+testing Predef.classOf: class scala.ScalaReflectionException: def classOf[T]: Class[T] takes 0 arguments
 testing Universe.reify: class scala.ScalaReflectionException: scala.reflect.api.Universe.reify is a macro, i.e. a compile-time function, it cannot be invoked with mirrors
diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala
index 5f39370..793f78b 100644
--- a/test/files/run/reflection-magicsymbols-invoke.scala
+++ b/test/files/run/reflection-magicsymbols-invoke.scala
@@ -9,7 +9,7 @@ package scala {
 }
 
 object Test extends App {
-  def key(sym: Symbol) = sym + ": " + sym.typeSignature
+  def key(sym: Symbol) = sym + ": " + sym.info
   def test(tpe: Type, receiver: Any, method: String, args: Any*) {
     def wrap[T](op: => T) =
       try {
@@ -24,11 +24,11 @@ object Test extends App {
       }
     print(s"testing ${tpe.typeSymbol.name}.$method: ")
     wrap({
-      if (method == nme.CONSTRUCTOR.toString) {
-        val ctor = tpe.declaration(nme.CONSTRUCTOR).asMethod
+      if (method == termNames.CONSTRUCTOR.toString) {
+        val ctor = tpe.decl(termNames.CONSTRUCTOR).asMethod
         cm.reflectClass(ctor.owner.asClass).reflectConstructor(ctor)(args: _*)
       } else {
-        val meth = tpe.declaration(newTermName(method).encodedName.toTermName).asMethod
+        val meth = tpe.decl(TermName(method).encodedName.toTermName).asMethod
         cm.reflect(receiver).reflectMethod(meth)(args: _*)
       }
     })
@@ -53,8 +53,8 @@ object Test extends App {
   println("============\nAnyVal")
   println("it's important to print the list of AnyVal's members")
   println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
-  typeOf[AnyVal].declarations.toList.sortBy(key).foreach(sym => println(key(sym)))
-  test(typeOf[AnyVal], null, "<init>")
+  typeOf[AnyVal].decls.toList.sortBy(key).foreach(sym => println(key(sym)))
+  test(typeOf[AnyVal], null, termNames.CONSTRUCTOR.toString)
   test(typeOf[AnyVal], 2, "getClass")
 
   println("============\nAnyRef")
@@ -84,17 +84,17 @@ object Test extends App {
   println("============\nArray")
   println("it's important to print the list of Array's members")
   println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
-  ArrayClass.typeSignature.members.toList.sortBy(key).foreach(sym => println(key(sym)))
-  test(ArrayClass.typeSignature, Array(1, 2), "length")
-  test(ArrayClass.typeSignature, Array(1, 2), "apply", 0)
-  test(ArrayClass.typeSignature, Array(1, 2), "update", 0, 0)
-  test(ArrayClass.typeSignature, Array(1, 2), "clone")
+  ArrayClass.info.members.toList.sortBy(key).foreach(sym => println(key(sym)))
+  test(ArrayClass.info, Array(1, 2), "length")
+  test(ArrayClass.info, Array(1, 2), "apply", 0)
+  test(ArrayClass.info, Array(1, 2), "update", 0, 0)
+  test(ArrayClass.info, Array(1, 2), "clone")
 
   println("============\nOther")
   test(typeOf[String], "2", "+", 3)
 
   println("============\nCTM")
-  test(PredefModule.moduleClass.typeSignature, Predef, "classOf")
-  test(PredefModule.moduleClass.typeSignature, Predef, "classOf", typeOf[String])
+  test(PredefModule.moduleClass.info, Predef, "classOf")
+  test(PredefModule.moduleClass.info, Predef, "classOf", typeOf[String])
   test(typeOf[scala.reflect.api.Universe], scala.reflect.runtime.universe, "reify", "2")
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
index 2535e3f..a1bee76 100644
--- a/test/files/run/reflection-magicsymbols-repl.check
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
@@ -19,9 +17,9 @@ scala> class A {
 defined class A
 
 scala> def test(n: Int): Unit = {
-  val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+  val sig = typeOf[A] member TermName("foo" + n) info
   val x = sig.asInstanceOf[MethodType].params.head
-  println(x.typeSignature)
+  println(x.info)
 }
 warning: there were 1 feature warning(s); re-run with -feature for details
 test: (n: Int)Unit
diff --git a/test/files/run/reflection-magicsymbols-repl.scala b/test/files/run/reflection-magicsymbols-repl.scala
index 26127b8..c006e85 100644
--- a/test/files/run/reflection-magicsymbols-repl.scala
+++ b/test/files/run/reflection-magicsymbols-repl.scala
@@ -14,9 +14,9 @@ object Test extends ReplTest {
     |  def foo8(x: Singleton) = ???
     |}
     |def test(n: Int): Unit = {
-    |  val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+    |  val sig = typeOf[A] member TermName("foo" + n) info
     |  val x = sig.asInstanceOf[MethodType].params.head
-    |  println(x.typeSignature)
+    |  println(x.info)
     |}
     |for (i <- 1 to 8) test(i)
     |""".stripMargin
diff --git a/test/files/run/reflection-magicsymbols-vanilla.scala b/test/files/run/reflection-magicsymbols-vanilla.scala
index 32819dc..328caf9 100644
--- a/test/files/run/reflection-magicsymbols-vanilla.scala
+++ b/test/files/run/reflection-magicsymbols-vanilla.scala
@@ -1,3 +1,5 @@
+import scala.language.postfixOps
+
 class A {
   def foo1(x: Int*) = ???
   def foo2(x: => Int) = ???
@@ -12,9 +14,9 @@ class A {
 object Test extends App {
   import scala.reflect.runtime.universe._
   def test(n: Int): Unit = {
-    val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+    val sig = typeOf[A] member TermName("foo" + n) info
     val x = sig.asInstanceOf[MethodType].params.head
-    println(x.typeSignature)
+    println(x.info)
   }
   for (i <- 1 to 8) test(i)
 }
diff --git a/test/files/run/reflection-mem-typecheck.scala b/test/files/run/reflection-mem-typecheck.scala
index a312c2c..e3cabf6 100644
--- a/test/files/run/reflection-mem-typecheck.scala
+++ b/test/files/run/reflection-mem-typecheck.scala
@@ -21,6 +21,6 @@ object Test extends MemoryTest {
       foo(List(new A {}, new B {}))
     """.trim
     snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
-    tb.typeCheck(tb.parse(snippet))
+    tb.typecheck(tb.parse(snippet))
   }
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-params.scala b/test/files/run/reflection-methodsymbol-params.scala
index 45b1f96..bc1289a 100644
--- a/test/files/run/reflection-methodsymbol-params.scala
+++ b/test/files/run/reflection-methodsymbol-params.scala
@@ -13,12 +13,12 @@ class C {
 }
 
 object Test extends App {
-  println(typeOf[C].member(newTermName("x1")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("x2")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("x3")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("x4")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("y1")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("y2")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("y3")).asMethod.paramss)
-  println(typeOf[C].member(newTermName("y4")).asMethod.paramss)
+  println(typeOf[C].member(TermName("x1")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("x2")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("x3")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("x4")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("y1")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("y2")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("y3")).asMethod.paramLists)
+  println(typeOf[C].member(TermName("y4")).asMethod.paramLists)
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-returntype.scala b/test/files/run/reflection-methodsymbol-returntype.scala
index 392754d..74a9e5d 100644
--- a/test/files/run/reflection-methodsymbol-returntype.scala
+++ b/test/files/run/reflection-methodsymbol-returntype.scala
@@ -13,12 +13,12 @@ class C {
 }
 
 object Test extends App {
-  println(typeOf[C].member(newTermName("x1")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("x2")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("x3")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("x4")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("y1")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("y2")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("y3")).asMethod.returnType)
-  println(typeOf[C].member(newTermName("y4")).asMethod.returnType)
+  println(typeOf[C].member(TermName("x1")).asMethod.returnType)
+  println(typeOf[C].member(TermName("x2")).asMethod.returnType)
+  println(typeOf[C].member(TermName("x3")).asMethod.returnType)
+  println(typeOf[C].member(TermName("x4")).asMethod.returnType)
+  println(typeOf[C].member(TermName("y1")).asMethod.returnType)
+  println(typeOf[C].member(TermName("y2")).asMethod.returnType)
+  println(typeOf[C].member(TermName("y3")).asMethod.returnType)
+  println(typeOf[C].member(TermName("y4")).asMethod.returnType)
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-typeparams.scala b/test/files/run/reflection-methodsymbol-typeparams.scala
index bb0a3c3..56d37eb 100644
--- a/test/files/run/reflection-methodsymbol-typeparams.scala
+++ b/test/files/run/reflection-methodsymbol-typeparams.scala
@@ -13,12 +13,12 @@ class C {
 }
 
 object Test extends App {
-  println(typeOf[C].member(newTermName("x1")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("x2")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("x3")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("x4")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("y1")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("y2")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("y3")).asMethod.typeParams)
-  println(typeOf[C].member(newTermName("y4")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("x1")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("x2")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("x3")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("x4")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("y1")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("y2")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("y3")).asMethod.typeParams)
+  println(typeOf[C].member(TermName("y4")).asMethod.typeParams)
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-names.scala b/test/files/run/reflection-names.scala
index 2433c84..a297b85 100644
--- a/test/files/run/reflection-names.scala
+++ b/test/files/run/reflection-names.scala
@@ -4,10 +4,10 @@ object Test {
   val global = new Global(new Settings())
   import global._
 
-  val x1 = "abc" drop 1                      // "bc": String
-  val x2 = ("abc": TermName) drop 1          // "bc": TermName
-  val x3 = ("abc": TypeName) drop 1          // "bc": TypeName
-  val x4 = (("abc": TypeName): Name) drop 1  // "bc": Name
+  val x1 = "abc" drop 1                    // "bc": String
+  val x2 = TermName("abc") drop 1          // "bc": TermName
+  val x3 = TypeName("abc") drop 1          // "bc": TypeName
+  val x4 = (TypeName("abc"): Name) drop 1  // "bc": Name
 
   def main(args: Array[String]): Unit = {
     List(x1, x2, x3, x4) foreach (x => println(x.getClass.getName, x))
diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check
index 2dd96a9..03a6aef 100644
--- a/test/files/run/reflection-repl-classes.check
+++ b/test/files/run/reflection-repl-classes.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class A
 defined class A
 
@@ -19,10 +17,10 @@ scala> object defs {
   val cm = reflect.runtime.currentMirror
   val u = cm.universe
   val im = cm.reflect(new B)
-  val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+  val method = im.symbol.info.member(u.TermName("foo")).asMethod
   val mm = im.reflectMethod(method)
 }
-defined module defs
+defined object defs
 
 scala> import defs._
 import defs._
diff --git a/test/files/run/reflection-repl-classes.scala b/test/files/run/reflection-repl-classes.scala
index 80e332c..048e6b8 100644
--- a/test/files/run/reflection-repl-classes.scala
+++ b/test/files/run/reflection-repl-classes.scala
@@ -12,7 +12,7 @@ object Test extends ReplTest {
     |  val cm = reflect.runtime.currentMirror
     |  val u = cm.universe
     |  val im = cm.reflect(new B)
-    |  val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+    |  val method = im.symbol.info.member(u.TermName("foo")).asMethod
     |  val mm = im.reflectMethod(method)
     |}
     |import defs._
diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check
index d9133f7..4a223e8 100644
--- a/test/files/run/reflection-repl-elementary.check
+++ b/test/files/run/reflection-repl-elementary.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
 res0: reflect.runtime.universe.Type = scala.List[Nothing]
 
diff --git a/test/files/run/reflection-sanitychecks.scala b/test/files/run/reflection-sanitychecks.scala
index f817f23..3f4873b 100644
--- a/test/files/run/reflection-sanitychecks.scala
+++ b/test/files/run/reflection-sanitychecks.scala
@@ -32,14 +32,14 @@ object Test extends App {
   def test(tpe: Type): Unit = {
     def failsafe(action: => Any): Any = try action catch { case ex: Throwable => ex.toString }
     println(s"=========members of ${tpe.typeSymbol.name} in a mirror of D=========")
-    println("field #1: " + failsafe(im.reflectField(tpe.member(newTermName("foo")).asTerm).get))
-    println("method #1: " + failsafe(im.reflectMethod(tpe.member(newTermName("bar")).asMethod)()))
-    println("field #2: " + failsafe(im.reflectField(tpe.member(newTermName("quux")).asTerm).get))
-    println("method #2: " + failsafe(im.reflectMethod(tpe.member(newTermName("baz")).asMethod)()))
-    println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("bar")).asMethod)()))
-    println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("<init>")).asMethod)()))
-    println("class: " + failsafe(im.reflectClass(tpe.member(newTypeName("C")).asClass).reflectConstructor(typeOf[C].member(newTypeName("C")).asClass.typeSignature.member(newTermName("<init>")).asMethod)()))
-    println("object: " + failsafe(im.reflectModule(tpe.member(newTermName("O")).asModule).instance))
+    println("field #1: " + failsafe(im.reflectField(tpe.member(TermName("foo")).asTerm).get))
+    println("method #1: " + failsafe(im.reflectMethod(tpe.member(TermName("bar")).asMethod)()))
+    println("field #2: " + failsafe(im.reflectField(tpe.member(TermName("quux")).asTerm).get))
+    println("method #2: " + failsafe(im.reflectMethod(tpe.member(TermName("baz")).asMethod)()))
+    println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("bar")).asMethod)()))
+    println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("<init>")).asMethod)()))
+    println("class: " + failsafe(im.reflectClass(tpe.member(TypeName("C")).asClass).reflectConstructor(typeOf[C].member(TypeName("C")).asClass.info.member(termNames.CONSTRUCTOR).asMethod)()))
+    println("object: " + failsafe(im.reflectModule(tpe.member(TermName("O")).asModule).instance))
     println()
   }
 
diff --git a/test/files/run/reflection-scala-annotations.check b/test/files/run/reflection-scala-annotations.check
new file mode 100644
index 0000000..5bc2786
--- /dev/null
+++ b/test/files/run/reflection-scala-annotations.check
@@ -0,0 +1,7 @@
+reflection-scala-annotations.scala:5: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class jann(x: Int, y: Array[Int]) extends ClassfileAnnotation
+      ^
+new sann(1, immutable.this.List.apply[Int](1, 2))
+new jann(y = Array(1, 2), x = 2)
diff --git a/test/files/run/reflection-scala-annotations.scala b/test/files/run/reflection-scala-annotations.scala
new file mode 100644
index 0000000..f6a6895
--- /dev/null
+++ b/test/files/run/reflection-scala-annotations.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.annotation._
+
+class sann(x: Int, y: List[Int]) extends StaticAnnotation
+class jann(x: Int, y: Array[Int]) extends ClassfileAnnotation
+
+ at sann(1, List(1, 2))
+class S
+
+ at jann(y = Array(1, 2), x = 2)
+class J
+
+object Test extends App {
+  println(symbolOf[S].annotations.head.tree)
+  println(symbolOf[J].annotations.head.tree)
+}
diff --git a/test/files/run/reflection-sorted-decls.scala b/test/files/run/reflection-sorted-decls.scala
index 5616e10..8dcb0f3 100644
--- a/test/files/run/reflection-sorted-decls.scala
+++ b/test/files/run/reflection-sorted-decls.scala
@@ -2,7 +2,7 @@ object Test  {
   def main(args: Array[String]) {
     class Foo(val a: Int, val b: Int, val c: Int)
     import scala.reflect.runtime.{currentMirror => cm}
-    val decls = cm.classSymbol(classOf[Foo]).typeSignature.declarations
+    val decls = cm.classSymbol(classOf[Foo]).info.decls
     decls.sorted.toList.filter(!_.isMethod) foreach System.out.println
   }
 }
diff --git a/test/files/run/reflection-sorted-members.check b/test/files/run/reflection-sorted-members.check
index c148e19..415e073 100644
--- a/test/files/run/reflection-sorted-members.check
+++ b/test/files/run/reflection-sorted-members.check
@@ -1,4 +1,3 @@
 value a
 value b
 value c
-value x
diff --git a/test/files/run/reflection-sorted-members.scala b/test/files/run/reflection-sorted-members.scala
index a837923..fa028c9 100644
--- a/test/files/run/reflection-sorted-members.scala
+++ b/test/files/run/reflection-sorted-members.scala
@@ -5,7 +5,7 @@ object Test  {
     class Bar(val x: Int)
     class Foo(val a: Int, val b: Int, val c: Int) extends Bar(a + b + c) with T1 with T2
     import scala.reflect.runtime.{currentMirror => cm}
-    val members = cm.classSymbol(classOf[Foo]).typeSignature.members
+    val members = cm.classSymbol(classOf[Foo]).info.members
     members.sorted.toList.filter(!_.isMethod) foreach System.out.println
   }
 }
diff --git a/test/files/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala
new file mode 100644
index 0000000..f65131f
--- /dev/null
+++ b/test/files/run/reflection-sync-potpourri.scala
@@ -0,0 +1,32 @@
+import scala.reflect.runtime.universe._
+
+// this test checks that under heavily multithreaded conditions:
+// 1) scala.reflect.runtime.universe, its rootMirror and definitions are initialized correctly
+// 2) symbols are correctly materialized into PackageScopes (no dupes)
+// 3) unpickling works okay even we unpickle the same symbol a lot of times
+
+object Test extends App {
+  def foo[T: TypeTag](x: T) = typeOf[T].toString
+  val n = 1000
+  val rng = new scala.util.Random()
+  val types = List(
+    () => typeOf[java.lang.reflect.Method],
+    () => typeOf[java.lang.annotation.Annotation],
+    () => typeOf[scala.io.BufferedSource],
+    () => typeOf[scala.io.Codec])
+  val perms = types.permutations.toList
+  def force(lazytpe: () => Type): String = {
+    lazytpe().typeSymbol.info
+    lazytpe().toString
+  }
+  val diceRolls = List.fill(n)(rng.nextInt(perms.length))
+  val threads = (1 to n) map (i => new Thread(s"Reflector-$i") {
+    override def run(): Unit = {
+      val s1 = foo("42")
+      val s2 = perms(diceRolls(i - 1)).map(x => force(x)).sorted.mkString(", ")
+      assert(s1 == "String" || s1 == "java.lang.String")
+      assert(s2 == "java.lang.annotation.Annotation, java.lang.reflect.Method, scala.io.BufferedSource, scala.io.Codec")
+    }
+  })
+  threads foreach (_.start)
+}
\ No newline at end of file
diff --git a/test/files/pos/t5692c.check b/test/files/run/reflection-sync-subtypes.check
similarity index 100%
rename from test/files/pos/t5692c.check
rename to test/files/run/reflection-sync-subtypes.check
diff --git a/test/files/run/reflection-sync-subtypes.scala b/test/files/run/reflection-sync-subtypes.scala
new file mode 100644
index 0000000..7f75a46
--- /dev/null
+++ b/test/files/run/reflection-sync-subtypes.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+  val n = 1000
+  val rng = new scala.util.Random()
+  val tasks = List(
+    () => typeOf[List[Int]] <:< typeOf[List[T] forSome { type T }],
+    () => typeOf[List[T] forSome { type T }] <:< typeOf[List[Any]],
+    () => typeOf[Map[Int, Object]] <:< typeOf[Iterable[(Int, String)]],
+    () => typeOf[Expr[Any] { val mirror: rootMirror.type }] <:< typeOf[Expr[List[List[List[Int]]]]{ val mirror: rootMirror.type }])
+  val perms = tasks.permutations.toList
+  val diceRolls = List.fill(n)(rng.nextInt(perms.length))
+  val threads = (1 to n) map (i => new Thread(s"Reflector-$i") {
+    override def run(): Unit = {
+      val result = perms(diceRolls(i - 1)).map(_())
+      assert(result.sorted == List(false, false, true, true))
+    }
+  })
+  threads foreach (_.start)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-tags.check b/test/files/run/reflection-tags.check
new file mode 100644
index 0000000..375518e
--- /dev/null
+++ b/test/files/run/reflection-tags.check
@@ -0,0 +1 @@
+List()
diff --git a/test/files/run/reflection-tags.scala b/test/files/run/reflection-tags.scala
new file mode 100644
index 0000000..3d7c7b2
--- /dev/null
+++ b/test/files/run/reflection-tags.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+object Test extends App {
+  var typeMembers = typeOf[scala.reflect.api.Universe].members.filter(sym => sym.isType && !sym.isClass).toList
+  typeMembers = typeMembers.filter(_.name != TypeName("ModifiersCreator")) // type ModifiersCreator = ModifiersExtractor
+  typeMembers = typeMembers.filter(_.name != TypeName("Importer")) // deprecated
+  typeMembers = typeMembers.filter(_.name != TypeName("Internal")) // internal
+  typeMembers = typeMembers.filter(_.name != TypeName("Compat")) // internal
+  typeMembers = typeMembers.filter(_.name != TypeName("BuildApi")) // deprecated
+  val tags = typeOf[scala.reflect.api.Universe].members.filter(sym => sym.isImplicit).toList
+
+  typeMembers.foreach(_.info)
+  tags.foreach(_.info)
+
+  val outliers = typeMembers.filter(tm => !tags.exists(tag => tag.info match {
+    case NullaryMethodType(TypeRef(_, sym, targ :: Nil)) => sym == typeOf[ClassTag[_]].typeSymbol && targ.typeSymbol == tm
+    case _ => false
+  }))
+  println(outliers)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-valueclasses-derived.scala b/test/files/run/reflection-valueclasses-derived.scala
index 6b08f98..8d25e29 100644
--- a/test/files/run/reflection-valueclasses-derived.scala
+++ b/test/files/run/reflection-valueclasses-derived.scala
@@ -6,7 +6,7 @@ class C(val x: Int) extends AnyVal {
 }
 
 object Test extends App {
-  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)(2))
-  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("getClass")).asMethod)())
-  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("toString")).asMethod)())
+  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("foo")).asMethod)(2))
+  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("getClass")).asMethod)())
+  println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("toString")).asMethod)())
 }
\ No newline at end of file
diff --git a/test/files/run/reflection-valueclasses-magic.scala b/test/files/run/reflection-valueclasses-magic.scala
index c4a26e4..366b5fe 100644
--- a/test/files/run/reflection-valueclasses-magic.scala
+++ b/test/files/run/reflection-valueclasses-magic.scala
@@ -13,9 +13,9 @@ object Test extends App {
   def key(sym: Symbol) = {
     sym match {
       // initialize parameter symbols
-      case meth: MethodSymbol => meth.paramss.flatten.map(_.typeSignature)
+      case meth: MethodSymbol => meth.paramLists.flatten.map(_.info)
     }
-    sym + ": " + sym.typeSignature
+    sym + ": " + sym.info
   }
 
   def convert(value: Any, tpe: Type) = {
@@ -44,11 +44,11 @@ object Test extends App {
           val realex = scala.ExceptionUtils.unwrapThrowable(ex)
           println(realex.getClass + ": " + realex.getMessage)
       }
-    val meth = tpe.declaration(newTermName(method).encodedName.toTermName)
+    val meth = tpe.decl(TermName(method).encodedName.toTermName)
     val testees = if (meth.isMethod) List(meth.asMethod) else meth.asTerm.alternatives.map(_.asMethod)
     testees foreach (testee => {
-      val convertedArgs = args.zipWithIndex.map { case (arg, i) => convert(arg, testee.paramss.flatten.apply(i).typeSignature) }
-      print(s"testing ${tpe.typeSymbol.name}.$method(${testee.paramss.flatten.map(_.typeSignature).mkString(','.toString)}) with receiver = $receiver and args = ${convertedArgs.map(arg => arg + ' '.toString + arg.getClass).toList}: ")
+      val convertedArgs = args.zipWithIndex.map { case (arg, i) => convert(arg, testee.paramLists.flatten.apply(i).info) }
+      print(s"testing ${tpe.typeSymbol.name}.$method(${testee.paramLists.flatten.map(_.info).mkString(','.toString)}) with receiver = $receiver and args = ${convertedArgs.map(arg => arg + ' '.toString + arg.getClass).toList}: ")
       wrap(cm.reflect(receiver).reflectMethod(testee)(convertedArgs: _*))
     })
   }
diff --git a/test/files/run/reflection-valueclasses-standard.scala b/test/files/run/reflection-valueclasses-standard.scala
index 18a3d1f..b6b5a2e 100644
--- a/test/files/run/reflection-valueclasses-standard.scala
+++ b/test/files/run/reflection-valueclasses-standard.scala
@@ -5,8 +5,8 @@ import scala.reflect.{ClassTag, classTag}
 object Test extends App {
   def test[T: ClassTag: TypeTag](x: T) = {
     println(s"========${classTag[T].runtimeClass}========")
-    println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("getClass")).asMethod)())
-    println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("toString")).asMethod)())
+    println(cm.reflect(x).reflectMethod(typeOf[T].member(TermName("getClass")).asMethod)())
+    println(cm.reflect(x).reflectMethod(typeOf[T].member(TermName("toString")).asMethod)())
   }
 
   test(2.toByte)
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
index aa846b9..da78422 100644
--- a/test/files/run/reify-aliases.check
+++ b/test/files/run/reify-aliases.check
@@ -1 +1 @@
-TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
diff --git a/test/files/run/reify-each-node-type.check b/test/files/run/reify-each-node-type.check
new file mode 100644
index 0000000..afc65ad
--- /dev/null
+++ b/test/files/run/reify-each-node-type.check
@@ -0,0 +1,35 @@
+ 1                                                             s  Ident
+ 2                                                        r.List  Select
+ 3                                                r.List.apply()  Apply
+ 4                                               r.List.apply(1)  Literal
+ 5                                           r.List.apply[Int]()  TypeApply
+ 6                                                      (1: Int)  Typed
+ 7                                           (null: r.List[Int])  AppliedTypeTree
+ 8                                                    { (); () }  Block
+ 9                                        { val x: Int = 0; () }  ValDef
+10                                             { val x = 0; () }  TypeTree
+11                                          if (true) () else ()  If
+12                                      { def f: Unit = (); () }  DefDef
+13                                    { def m = NN.super.q; () }  Super
+14                       { abstract trait A extends AnyRef; () }  ClassDef Template
+15                              { def f(x: Any): Unit = (); () }  EmptyTree
+16                                          (null: r.D with r.E)  CompoundTypeTree
+17                                          { type T = Int; () }  TypeDef
+18                            { type CC[T <: r.D] = r.C[T]; () }  TypeBoundsTree
+19                          try { 0 } finally Predef.println("")  Try
+20                                               ((x: Int) => x)  Function
+21                                          { var v = 1; v = 2 }  Assign
+22  { class A extends AnyRef { def <init>() = { super.<init>();   This
+23                                             new r.List[Int]()  New
+24                                                 0: @unchecked  Annotated
+25                                         (null: r.Outer#Inner)  SelectFromTypeTree
+26                                              (null: Nil.type)  SingletonTypeTree
+27                                  (null: T forSome { type T })  ExistentialTypeTree
+28                                    { import r.{A, B=>C}; () }  Import
+29                                 { def f: Int = return 0; () }  Return
+30  { object x extends AnyRef { def <init>() = { super.<init>();  ModuleDef
+31                                         throw new Exception()  Throw
+32                                       0 match { case _ => 0 }  Match CaseDef
+33                                  0 match { case (1| 2) => 0 }  Alternative
+34                         NN.q match { case (x @ r.List) => 0 }  Bind
+35                     NN.q match { case r.UnSeq(1, (_)*) => 0 }  Star
diff --git a/test/files/run/reify-each-node-type.scala b/test/files/run/reify-each-node-type.scala
new file mode 100644
index 0000000..425061f
--- /dev/null
+++ b/test/files/run/reify-each-node-type.scala
@@ -0,0 +1,110 @@
+
+import scala.language.{ existentials, postfixOps }
+import scala.reflect.runtime.universe._
+
+object r {
+  class A
+  class B
+  class List[+A]
+  object List { def apply[A](xs: A*): List[A] = new List[A] }
+  object Nil extends List[Nothing]
+
+  trait OuterP[A] {
+    trait Inner
+    trait InnerP[B]
+  }
+  trait Outer {
+    trait Inner
+    trait InnerP[B]
+  }
+  object Un    { def unapply(x: Any)    = Some(5) }
+  object UnSeq { def unapplySeq(x: Any) = Some(Seq(5)) }
+  class C[T]
+  class D
+  trait E
+
+  trait SN {
+    def q: Any = null
+  }
+}
+
+object s {
+  import r._
+
+  trait NN extends SN {
+    def act[T](expr: Expr[T]): Unit
+
+    act(reify { s                                           /* Ident */ })
+    act(reify { r.List                                      /* Select */ })
+    act(reify { List()                                      /* Apply */ })
+    act(reify { List(1)                                     /* Literal */ })
+    act(reify { List[Int]()                                 /* TypeApply */ })
+    act(reify { 1: Int                                      /* Typed */ })
+    act(reify { null: List[Int]                             /* AppliedTypeTree */ })
+    act(reify { () ; ()                                     /* Block */ })
+    act(reify { val x: Int = 0                              /* ValDef */ })
+    act(reify { val x = 0                                   /* TypeTree */ })
+    act(reify { if (true) ()                                /* If */ })
+    act(reify { def f { }                                   /* DefDef */ })
+    act(reify { def m = super.q                             /* Super */ })
+    act(reify { trait A                                     /* ClassDef Template */ })
+    act(reify { def f(x: Any) { }                           /* EmptyTree */ })
+    act(reify { null: D with E                              /* CompoundTypeTree */ })
+    act(reify { type T = Int                                /* TypeDef */ })
+    act(reify { type CC[T <: D] = C[T]                      /* TypeBoundsTree */ })
+    act(reify { try 0 finally println("")                   /* Try */ })
+    act(reify { (x: Int) => x                               /* Function */ })
+    act(reify { var v = 1 ; v = 2                           /* Assign */ })
+    act(reify { class A() { def this(x: A) = this() }       /* This */ })
+    act(reify { new List[Int]                               /* New */ })
+    act(reify { 0: @unchecked                               /* Annotated */ })
+    act(reify { null: Outer#Inner                           /* SelectFromTypeTree */ })
+    act(reify { null: Nil.type                              /* SingletonTypeTree */ })
+    act(reify { null: (T forSome { type T })                /* ExistentialTypeTree */ })
+    act(reify { import r.{ A, B => C };                     /* Import */ })
+    act(reify { def f: Int = return 0                       /* Return */ })
+    act(reify { object x                                    /* ModuleDef */ })
+    act(reify { throw new java.lang.Exception               /* Throw */ })
+    act(reify { 0 match { case _ => 0 }                     /* Match CaseDef */ })
+    act(reify { 0 match { case 1 | 2 => 0 }                 /* Alternative */ })
+    act(reify { q match { case x @ List => 0 }              /* Bind */ })
+    act(reify { q match { case UnSeq(1, _*) => 0 }          /* Star */ })
+
+    // ``unexpected: bound type that doesn't have a tpe: Ident(newTypeName("Int"))''
+    // act(reify { r.List[T forSome { type T <: Int }]() })    // Was crashing , no longer
+    //
+    // error: exception during macro expansion:
+    // scala.MatchError: collection.this.Seq.unapplySeq[A] (of class scala.reflect.internal.Trees$TypeApply)
+    //   at scala.reflect.reify.phases.Reshape$$anon$1.extractExtractor$1(Reshape.scala:73)
+    //   at scala.reflect.reify.phases.Reshape$$anon$1.transform(Reshape.scala:82)
+    //   at scala.reflect.reify.phases.Reshape$$anon$1.transform(Reshape.scala:24)
+    //   at scala.reflect.internal.Trees$class.itransform(Trees.scala:1290)
+    //
+    // act(reify { r.List[Any]() match { case Seq(1, _*) => 1 } } )
+
+    // act(reify { List[OuterP[Int]#InnerP[Byte]]() })
+    //
+    // SI-7243
+    //
+    // test/files/run/reify-each-node-type.scala:85: error: Cannot materialize r.List.apply[r.OuterP[Int]#InnerP[Byte]]() as { ... } because:
+    // scala.reflect.macros.TypecheckException: value TypeTreeWithDeferredRefCheck is not a member of type parameter U
+    //     act(reify { List[OuterP[Int]#InnerP[Byte]]() })
+    //               ^
+    // one error found
+  }
+}
+
+object Test {
+  var idx = 0
+  val seen = scala.collection.mutable.Set[String]()
+
+  object N extends s.NN {
+    def act[T](expr: Expr[T]): Unit = {
+      idx += 1
+      val ts = expr.tree filter (_ => true) map (_.getClass.getName split "[.$]" last) filterNot seen distinct;
+      println("%2d  %60s  %s".format(idx, expr.tree.toString.replaceAll("""\s+""", " ").take(60), ts mkString " "))
+      seen ++= ts
+    }
+  }
+  def main(args: Array[String]): Unit = N
+}
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
index 1b0f3f2..29ccee3 100644
--- a/test/files/run/reify-repl-fail-gracefully.check
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import language.experimental.macros
 import language.experimental.macros
 
@@ -12,10 +10,8 @@ import scala.reflect.runtime.universe._
 scala> 
 
 scala> reify
-<console>:12: error: macros cannot be partially applied
+<console>:12: error: too few argument lists for macro invocation
               reify
               ^
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
index 99a966f..71841ff 100644
--- a/test/files/run/reify_ann1a.check
+++ b/test/files/run/reify_ann1a.check
@@ -1,5 +1,5 @@
 {
-  @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+  @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef {
     @new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
     def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
       super.<init>();
diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala
index c23048e..e3ff9e5 100644
--- a/test/files/run/reify_ann1a.scala
+++ b/test/files/run/reify_ann1a.scala
@@ -20,7 +20,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
index 6a5f32a..a046daf 100644
--- a/test/files/run/reify_ann1b.check
+++ b/test/files/run/reify_ann1b.check
@@ -1,5 +1,10 @@
+reify_ann1b.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String) extends annotation.ClassfileAnnotation
+      ^
 {
-  @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
+  @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends AnyRef {
     @new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
     def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
       super.<init>();
diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala
index 29ce602..30bafad 100644
--- a/test/files/run/reify_ann1b.scala
+++ b/test/files/run/reify_ann1b.scala
@@ -20,7 +20,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check
index ccbcb4c..a26fa42 100644
--- a/test/files/run/reify_ann2a.check
+++ b/test/files/run/reify_ann2a.check
@@ -6,7 +6,7 @@
       ()
     }
   };
-  @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+  @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef {
     @new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
     def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
       super.<init>();
diff --git a/test/files/run/reify_ann2a.scala b/test/files/run/reify_ann2a.scala
index 53423e1..515fba0 100644
--- a/test/files/run/reify_ann2a.scala
+++ b/test/files/run/reify_ann2a.scala
@@ -20,7 +20,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_ann3.check b/test/files/run/reify_ann3.check
index 8caceb2..4f1c61c 100644
--- a/test/files/run/reify_ann3.check
+++ b/test/files/run/reify_ann3.check
@@ -1,5 +1,5 @@
 {
-  class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
+  class Tree[A, +B] extends AnyRef {
     @new inline @getter() final <paramaccessor> val key: A = _;
     def <init>(key: A) = {
       super.<init>();
@@ -9,7 +9,7 @@
   ()
 }
 {
-  class Tree[A, B] extends AnyRef {
+  class Tree[A, +B] extends AnyRef {
     final <paramaccessor> private[this] val key: A = _;
     @inline @scala.annotation.meta.getter final <stable> <accessor> <paramaccessor> def key: A = Tree.this.key;
     def <init>(key: A): Tree[A,B] = {
diff --git a/test/files/run/reify_ann3.scala b/test/files/run/reify_ann3.scala
index 4162fa5..7098e92 100644
--- a/test/files/run/reify_ann3.scala
+++ b/test/files/run/reify_ann3.scala
@@ -14,7 +14,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_ann4.scala b/test/files/run/reify_ann4.scala
index 0aedb77..f642621 100644
--- a/test/files/run/reify_ann4.scala
+++ b/test/files/run/reify_ann4.scala
@@ -18,7 +18,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_ann5.scala b/test/files/run/reify_ann5.scala
index d27be3b..5e2f058 100644
--- a/test/files/run/reify_ann5.scala
+++ b/test/files/run/reify_ann5.scala
@@ -15,7 +15,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
index 0c91902..51f255b 100644
--- a/test/files/run/reify_classfileann_a.check
+++ b/test/files/run/reify_classfileann_a.check
@@ -1,3 +1,8 @@
+reify_classfileann_a.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+      ^
 {
   @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
     def <init>() = {
diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala
index 1d51688..9ae12bf 100644
--- a/test/files/run/reify_classfileann_a.scala
+++ b/test/files/run/reify_classfileann_a.scala
@@ -14,7 +14,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check
index c204fa8..05f2e5b 100644
--- a/test/files/run/reify_classfileann_b.check
+++ b/test/files/run/reify_classfileann_b.check
@@ -1,3 +1,8 @@
+reify_classfileann_b.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+      ^
 {
   class C extends AnyRef {
     def <init>() = {
diff --git a/test/files/run/reify_classfileann_b.scala b/test/files/run/reify_classfileann_b.scala
index ef19e92..a0cb8f0 100644
--- a/test/files/run/reify_classfileann_b.scala
+++ b/test/files/run/reify_classfileann_b.scala
@@ -18,7 +18,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/reify_closure8b.check b/test/files/run/reify_closure8b.check
index 5a7863f..5d48d3a 100644
--- a/test/files/run/reify_closure8b.check
+++ b/test/files/run/reify_closure8b.check
@@ -1,3 +1,3 @@
-scala.tools.reflect.ToolBoxError: reflective compilation has failed: 
+scala.tools.reflect.ToolBoxError: reflective compilation has failed:
 
 value y is not a member of Test.Foo
diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala
index c597b7a..cf81318 100644
--- a/test/files/run/reify_copypaste1.scala
+++ b/test/files/run/reify_copypaste1.scala
@@ -9,10 +9,10 @@ object Test extends App {
   val output = new java.io.ByteArrayOutputStream()
   System.setOut(new java.io.PrintStream(output))
   val toolBox = currentMirror.mkToolBox(options = "-Yreify-copypaste")
-  val reify = Select(Select(Select(Select(Ident(ScalaPackage), newTermName("reflect")), newTermName("runtime")), newTermName("universe")), newTermName("reify"))
-  val reifee = Block(List(ValDef(Modifiers(LAZY), newTermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(newTermName("x")))
+  val reify = Select(Select(Select(Select(Ident(ScalaPackage), TermName("reflect")), TermName("runtime")), TermName("universe")), TermName("reify"))
+  val reifee = Block(List(ValDef(Modifiers(LAZY), TermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(TermName("x")))
   toolBox.eval(Apply(reify, List(reifee)))
-  val Block(List(tpeCopypaste), exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))) = toolBox.parse(output.toString())
+  val Block(List(tpeCopypaste, exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))), Literal(Constant(()))) = toolBox.parse(output.toString())
   output.reset()
   toolBox.eval(Block(stats, expr))
   stdout.println(output.toString)
diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala
index a2d5465..46d5b7e 100644
--- a/test/files/run/reify_extendbuiltins.scala
+++ b/test/files/run/reify_extendbuiltins.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions, postfixOps }
 import scala.reflect.runtime.universe._
 import scala.tools.reflect.Eval
 
@@ -12,4 +14,4 @@ object Test extends App {
 
     println("10! = " + (10!))
   }.eval
-}
\ No newline at end of file
+}
diff --git a/test/files/run/reify_for1.check b/test/files/run/reify_for1.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/reify_fors_oldpatmat.flags b/test/files/run/reify_fors_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/pending/run/reify_implicits-new.check b/test/files/run/reify_implicits-new.check
similarity index 100%
rename from test/pending/run/reify_implicits-new.check
rename to test/files/run/reify_implicits-new.check
diff --git a/test/files/run/reify_implicits-new.scala b/test/files/run/reify_implicits-new.scala
new file mode 100644
index 0000000..1d90d90
--- /dev/null
+++ b/test/files/run/reify_implicits-new.scala
@@ -0,0 +1,18 @@
+
+import scala.language.{ implicitConversions, reflectiveCalls }
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+  reify {
+    implicit def arrayWrapper[A : ClassTag](x: Array[A]) =
+      new {
+        def sort(p: (A, A) => Boolean) = {
+          util.Sorting.stableSort(x, p); x
+        }
+      }
+    val x = Array(2, 3, 1, 4)
+    println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
+  }.eval
+}
diff --git a/test/pending/run/reify_implicits-old.check b/test/files/run/reify_implicits-old.check
similarity index 100%
rename from test/pending/run/reify_implicits-old.check
rename to test/files/run/reify_implicits-old.check
diff --git a/test/files/run/reify_implicits-old.scala b/test/files/run/reify_implicits-old.scala
new file mode 100644
index 0000000..a4e9048
--- /dev/null
+++ b/test/files/run/reify_implicits-old.scala
@@ -0,0 +1,17 @@
+
+import scala.language.{ implicitConversions, reflectiveCalls }
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+  reify {
+    implicit def arrayWrapper[A : ClassManifest](x: Array[A]) =
+      new {
+        def sort(p: (A, A) => Boolean) = {
+          util.Sorting.stableSort(x, p); x
+        }
+      }
+    val x = Array(2, 3, 1, 4)
+    println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
+  }.eval
+}
diff --git a/test/files/run/reify_lazyevaluation.scala b/test/files/run/reify_lazyevaluation.scala
index 5b310d9..3f2530d 100644
--- a/test/files/run/reify_lazyevaluation.scala
+++ b/test/files/run/reify_lazyevaluation.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
 import scala.reflect.runtime.universe._
 import scala.tools.reflect.Eval
 
diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check
index 1b46c90..579ecfe 100644
--- a/test/files/run/reify_lazyunit.check
+++ b/test/files/run/reify_lazyunit.check
@@ -1,3 +1,6 @@
+reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    lazy val x = { 0; println("12")}
+                   ^
 12
 one
 two
diff --git a/test/files/run/reify_maps_oldpatmat.flags b/test/files/run/reify_maps_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/reify_newimpl_11.check b/test/files/run/reify_newimpl_11.check
index 2f5cb58..c019c6d 100644
--- a/test/files/run/reify_newimpl_11.check
+++ b/test/files/run/reify_newimpl_11.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+  T defined by C in reify_newimpl_11.scala:6:11
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_13.check b/test/files/run/reify_newimpl_13.check
index d518cd7..13e3c9a 100644
--- a/test/files/run/reify_newimpl_13.check
+++ b/test/files/run/reify_newimpl_13.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+  T defined by C in reify_newimpl_13.scala:7:13
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_19.check b/test/files/run/reify_newimpl_19.check
index 8b8652f..c749d4f 100644
--- a/test/files/run/reify_newimpl_19.check
+++ b/test/files/run/reify_newimpl_19.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+  T defined by C in reify_newimpl_19.scala:7:10
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
index dcb3e28..1432d10 100644
--- a/test/files/run/reify_newimpl_22.check
+++ b/test/files/run/reify_newimpl_22.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
@@ -19,11 +17,9 @@ scala> {
   }
   println(code.eval)
 }
-<console>:15: free term: Ident(newTermName("x")) defined by res0  in <console>:14:21
+<console>:15: free term: Ident(TermName("x")) defined by res0  in <console>:14:21
                 val code = reify {
                                  ^
 2
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
index 8821246..217f0a9 100644
--- a/test/files/run/reify_newimpl_23.check
+++ b/test/files/run/reify_newimpl_23.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
@@ -18,11 +16,9 @@ scala> def foo[T]{
   }
   println(code.eval)
 }
-<console>:13: free type: Ident(newTypeName("T")) defined by foo in <console>:12:16
+<console>:13: free type: Ident(TypeName("T")) defined by foo in <console>:12:16
          val code = reify {
                           ^
 foo: [T]=> Unit
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
index d1028b9..93ad69d 100644
--- a/test/files/run/reify_newimpl_25.check
+++ b/test/files/run/reify_newimpl_25.check
@@ -1,19 +1,15 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> {
   import scala.reflect.runtime.universe._
   val x = "2"
   val tt = implicitly[TypeTag[x.type]]
   println(tt)
 }
-<console>:11: free term: Ident(newTermName("x")) defined by res0  in <console>:10:21
+<console>:11: free term: Ident(TermName("x")) defined by res0  in <console>:10:21
                 val tt = implicitly[TypeTag[x.type]]
                                    ^
 TypeTag[x.type]
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index 347f636..8e0ad87 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -1,14 +1,12 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> def foo[T]{
   import scala.reflect.runtime.universe._
   val tt = implicitly[WeakTypeTag[List[T]]]
   println(tt)
 }
-<console>:9: free type: Ident(newTypeName("T")) defined by foo in <console>:7:16
+<console>:9: free type: Ident(TypeName("T")) defined by foo in <console>:7:16
          val tt = implicitly[WeakTypeTag[List[T]]]
                             ^
 foo: [T]=> Unit
@@ -17,5 +15,3 @@ scala> foo[Int]
 WeakTypeTag[scala.List[T]]
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_newimpl_30.check b/test/files/run/reify_newimpl_30.check
index 29baac9..7557c75 100644
--- a/test/files/run/reify_newimpl_30.check
+++ b/test/files/run/reify_newimpl_30.check
@@ -1,2 +1,4 @@
-reflective toolbox has failed:
-unresolved free type variables (namely: C defined by <local Test> in reify_newimpl_30.scala:7:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+reflective toolbox failed due to unresolved free type variables:
+  C defined by <local Test> in reify_newimpl_30.scala:7:11
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_35.check b/test/files/run/reify_newimpl_35.check
index 52aaa17..f884d2c 100644
--- a/test/files/run/reify_newimpl_35.check
+++ b/test/files/run/reify_newimpl_35.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
@@ -13,5 +11,3 @@ scala> println(foo)
 Expr[List[Nothing]](Nil)
 
 scala> 
-
-scala> 
diff --git a/test/files/run/reify_newimpl_45.scala b/test/files/run/reify_newimpl_45.scala
index 2a6c68d..fd8011f 100644
--- a/test/files/run/reify_newimpl_45.scala
+++ b/test/files/run/reify_newimpl_45.scala
@@ -2,13 +2,13 @@ import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{universe => ru}
 import scala.reflect.runtime.{currentMirror => cm}
 import scala.tools.reflect.ToolBox
+import internal._
 
 object Test extends App {
   class C[T >: Null] {
     val code = reify{val x: T = "2".asInstanceOf[T]; println("ima worx: %s".format(x)); x}
-    println(code.tree.freeTypes)
-    val T = code.tree.freeTypes(0)
-    val tree = code.tree.substituteSymbols(List(T), List(definitions.StringClass))
+    println(freeTypes(code.tree))
+    val tree = substituteSymbols(code.tree, freeTypes(code.tree), List(definitions.StringClass))
     cm.mkToolBox().eval(tree)
   }
 
diff --git a/test/files/run/reify_printf.check b/test/files/run/reify_printf.check
index e69de29..3b18e51 100644
--- a/test/files/run/reify_printf.check
+++ b/test/files/run/reify_printf.check
@@ -0,0 +1 @@
+hello world
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 272856b..c4ade79 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -9,13 +9,13 @@ import scala.reflect.internal.Types
 import scala.util.matching.Regex
 
 object Test extends App {
-  val output = new ByteArrayOutputStream()
-  Console.setOut(new PrintStream(output))
+  //val output = new ByteArrayOutputStream()
+  //Console.setOut(new PrintStream(output))
   val toolbox = cm.mkToolBox()
 
   val tree = tree_printf(reify("hello %s").tree, reify("world").tree)
   val evaluated = toolbox.eval(tree)
-  assert(output.toString() == "hello world", output.toString() +" ==     hello world")
+  //assert(output.toString() == "hello world", output.toString() +" ==     hello world")
 
   /*
   // upd. Oh, good old times, our very-very first experiments with macros :)
@@ -23,7 +23,7 @@ object Test extends App {
    */
 
   var i = 0
-  def gensym(name: String) = { i += 1; newTermName(name + i) }
+  def gensym(name: String) = { i += 1; TermName(name + i) }
 
   def createTempValDef( value : Tree, tpe : Type ) : (Option[Tree],Tree) = {
     val local = gensym("temp")
@@ -59,13 +59,13 @@ object Test extends App {
       Apply(
         Select(
           Select(
-            Ident( newTermName("scala") )
-            , newTermName("Predef")
+            Ident( TermName("scala") )
+            , TermName("Predef")
           )
-          , newTermName("print")
+          , TermName("print")
         )
         , List(ref)
       ): Tree
     Block((evals ++ prints).toList, Literal(Constant(())))
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/reify_renamed_term_si5841.check b/test/files/run/reify_renamed_term_t5841.check
similarity index 100%
rename from test/files/run/reify_renamed_term_si5841.check
rename to test/files/run/reify_renamed_term_t5841.check
diff --git a/test/files/run/reify_renamed_term_si5841.scala b/test/files/run/reify_renamed_term_t5841.scala
similarity index 100%
rename from test/files/run/reify_renamed_term_si5841.scala
rename to test/files/run/reify_renamed_term_t5841.scala
diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala
index ecbf394..c385da6 100644
--- a/test/files/run/reify_this.scala
+++ b/test/files/run/reify_this.scala
@@ -1,11 +1,11 @@
 import scala.reflect.runtime.universe._
 import scala.tools.reflect.Eval
 
-trait Eval {
+trait Transvaal {
   def eval(tree: Expr[_]) = tree.eval
 }
 
-object Test extends App with Eval {
+object Test extends App with Transvaal {
   // select a value from package
   eval(reify{println("foo")})
   eval(reify{println((new Object).toString == (new Object).toString)})
@@ -17,4 +17,4 @@ object Test extends App with Eval {
   // select a value from module
   val x = 2
   eval(reify{println(x)})
-}
\ No newline at end of file
+}
diff --git a/test/files/run/repl-assign.check b/test/files/run/repl-assign.check
index c6b0458..bdc7793 100644
--- a/test/files/run/repl-assign.check
+++ b/test/files/run/repl-assign.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> var x = 10
 x: Int = 10
 
@@ -16,5 +14,3 @@ scala> y = 13
 y: Int = 13
 
 scala> 
-
-scala> 
diff --git a/test/files/run/repl-backticks.scala b/test/files/run/repl-backticks.scala
index 5eaa1ec..e40a8bc 100644
--- a/test/files/run/repl-backticks.scala
+++ b/test/files/run/repl-backticks.scala
@@ -1,14 +1,14 @@
 import scala.tools.nsc._  
 
 object Test {
-  val testCode = <code>
+  val testCode = """
     import java.lang.Thread.`yield`
     import scala.`package`.Throwable
     
     `yield`  
-  </code>.text
+  """
   
-  def main(args: Array[String]) = {
+  def main(args: Array[String]) {
     val settings = new Settings()
     settings.classpath.value = System.getProperty("java.class.path")
     val repl = new interpreter.IMain(settings)
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index 8b6434e..97ae208 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> 2 ; 3
 <console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
               2 ;;
@@ -31,7 +29,7 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
 <console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
               5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
                                                                                                 ^
-defined module Cow
+defined object Cow
 defined class Moo
 bippy: Int
 res2: Int = 105
@@ -39,7 +37,7 @@ res2: Int = 105
 scala> 
 
 scala> object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy
-defined module Bovine
+defined object Bovine
 defined class Ruminant
 res3: Int = 216
 
@@ -50,5 +48,3 @@ scala> Bovine.x
 res4: List[Any] = List(Ruminant(5), Cow, Moooooo)
 
 scala> 
-
-scala> 
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 0cb18e9..1f6d3e2 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -1,14 +1,11 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> :type List[1, 2, 3]
 <console>:1: error: identifier expected but integer literal found.
        List[1, 2, 3]
             ^
 
-
 scala> :type List(1, 2, 3)
 List[Int]
 
@@ -41,12 +38,11 @@ Int
 scala> :type protected lazy val f = 5
 <console>:5: error: lazy value f cannot be accessed in object $iw
  Access to protected value f not permitted because
- enclosing object $eval in package $line13 is not a subclass of 
+ enclosing object $eval in package $line13 is not a subclass of
  object $iw where target is defined
-  lazy val $result = `f`
+  lazy val $result = f
                                            ^
 
-
 scala> :type def f = 5
 => Int
 
@@ -81,8 +77,8 @@ TypeRef(
     )
     TypeRef(
       TypeSymbol(
-        sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
-        
+        sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
+
       )
       args = List(
         TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
@@ -103,7 +99,7 @@ PolyType(
     resultType = TypeRef(
       TypeSymbol(
         abstract trait Set[A] extends Iterable[A] with Set[A] with GenericSetTemplate[A,scala.collection.immutable.Set] with SetLike[A,scala.collection.immutable.Set[A]] with Parallelizable[A,scala.collection.parallel.immutable.ParSet[A]]
-        
+
       )
       args = List(TypeRef(TypeSymbol(abstract class Any extends )))
     )
@@ -148,8 +144,8 @@ TypeRef(
       args = List(
         TypeRef(
           TypeSymbol(
-            sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
-            
+            sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
+
           )
           args = List(
             TypeRef(
@@ -181,8 +177,8 @@ PolyType(
           args = List(
             TypeRef(
               TypeSymbol(
-                sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
-                
+                sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
+
               )
               args = List(TypeParamTypeRef(TypeParam(T <: AnyVal)))
             )
@@ -204,8 +200,8 @@ PolyType(
     params = List(TermSymbol(x: T), TermSymbol(y: List[U]))
     resultType = TypeRef(
       TypeSymbol(
-        sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
-        
+        sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
+
       )
       args = List(TypeParamTypeRef(TypeParam(U >: T)))
     )
@@ -223,5 +219,3 @@ scala> :type println("side effect!")
 Unit
 
 scala> 
-
-scala> 
diff --git a/test/files/run/repl-empty-package.check b/test/files/run/repl-empty-package.check
new file mode 100644
index 0000000..ecf79c2
--- /dev/null
+++ b/test/files/run/repl-empty-package.check
@@ -0,0 +1,7 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> println(Bippy.bippy)
+bippy!
+
+scala> 
diff --git a/test/files/run/repl-empty-package/s_1.scala b/test/files/run/repl-empty-package/s_1.scala
new file mode 100644
index 0000000..b59d16b
--- /dev/null
+++ b/test/files/run/repl-empty-package/s_1.scala
@@ -0,0 +1,3 @@
+object Bippy {
+  def bippy = "bippy!"
+}
diff --git a/test/files/run/repl-empty-package/s_2.scala b/test/files/run/repl-empty-package/s_2.scala
new file mode 100644
index 0000000..512e6dd
--- /dev/null
+++ b/test/files/run/repl-empty-package/s_2.scala
@@ -0,0 +1,5 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = "println(Bippy.bippy)"
+}
diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check
new file mode 100644
index 0000000..4908605
--- /dev/null
+++ b/test/files/run/repl-javap-app.check
@@ -0,0 +1,38 @@
+#partest java6
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :javap -app MyApp$
+public final void delayedEndpoint$MyApp$1();
+  Code:
+   Stack=2, Locals=1, Args_size=1
+   0:	getstatic	#61; //Field scala/Console$.MODULE$:Lscala/Console$;
+   3:	ldc	#63; //String Hello, delayed world.
+   5:	invokevirtual	#67; //Method scala/Console$.println:(Ljava/lang/Object;)V
+   8:	return
+  LocalVariableTable: 
+   Start  Length  Slot  Name   Signature
+   0      9      0    this       LMyApp$;
+
+scala> 
+#partest !java6
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :javap -app MyApp$
+  public final void delayedEndpoint$MyApp$1();
+    flags: ACC_PUBLIC, ACC_FINAL
+    Code:
+      stack=2, locals=1, args_size=1
+         0: getstatic     #61                 // Field scala/Console$.MODULE$:Lscala/Console$;
+         3: ldc           #63                 // String Hello, delayed world.
+         5: invokevirtual #67                 // Method scala/Console$.println:(Ljava/lang/Object;)V
+         8: return        
+      LocalVariableTable:
+        Start  Length  Slot  Name   Signature
+               0       9     0  this   LMyApp$;
+      LineNumberTable:
+        line 5: 0
+}
+
+scala> 
diff --git a/test/files/run/repl-javap-app.scala b/test/files/run/repl-javap-app.scala
new file mode 100644
index 0000000..be04920
--- /dev/null
+++ b/test/files/run/repl-javap-app.scala
@@ -0,0 +1,10 @@
+
+import scala.tools.partest.ReplTest
+
+object MyApp extends App {
+  Console println "Hello, delayed world."
+}
+
+object Test extends ReplTest {
+  def code = ":javap -app MyApp$"
+}
diff --git a/test/files/run/repl-javap-def.scala b/test/files/run/repl-javap-def.scala
new file mode 100644
index 0000000..dbd7696
--- /dev/null
+++ b/test/files/run/repl-javap-def.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |def f = 7
+    |:javap -public -raw f
+  """.stripMargin
+
+  // it should find f wrapped in repl skins. replstiltskin.
+  override def yah(res: Seq[String]) = {
+    // replstiltskin: what be my name?
+    val keywords = List("public", "class", "line")
+    def isLineClass(s: String) = keywords forall (s contains _)
+    def filtered = res filter isLineClass
+    1 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap-fun.scala b/test/files/run/repl-javap-fun.scala
new file mode 100644
index 0000000..5c9a6b7
--- /dev/null
+++ b/test/files/run/repl-javap-fun.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |object Betty {
+    | List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
+    |}
+    |:javap -fun Betty
+  """.stripMargin
+
+  // two anonfuns of Betty
+  override def yah(res: Seq[String]) = {
+    def filtered = res filter (_ contains "public final class Betty")
+    2 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap-mem.scala b/test/files/run/repl-javap-mem.scala
new file mode 100644
index 0000000..8db30e8
--- /dev/null
+++ b/test/files/run/repl-javap-mem.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |object Betty {
+    |  val ds = List(1,2,3) filter (_ % 2 == 0) map (_ * 3)
+    |  def m(vs: List[Int]) = vs filter (_ % 2 != 0) map (_ * 2)
+    |}
+    |:javap Betty#m
+  """.stripMargin
+
+  // filter for requested method member
+  override def yah(res: Seq[String]) = {
+    // cheaply, methods end in arg list
+    val p = """.*m\(.*\);""".r
+    def filtered = res filter (_ match { case p() => true case _ => false })
+    1 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap-memfun.scala b/test/files/run/repl-javap-memfun.scala
new file mode 100644
index 0000000..d2b4243
--- /dev/null
+++ b/test/files/run/repl-javap-memfun.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |object Betty {
+    | List(1,2,3) count (_ % 2 != 0)
+    | def f = List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
+    | def g = List(1,2,3) filter (_ % 2 == 0) map (_ * 3) map (_ + 1)
+    |}
+    |:javap -fun Betty#g
+  """.stripMargin
+
+  // three anonfuns of Betty#g
+  override def yah(res: Seq[String]) = {
+    def filtered = res filter (_ contains "public final class Betty")
+    3 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap-more-fun.scala b/test/files/run/repl-javap-more-fun.scala
new file mode 100644
index 0000000..e603faf
--- /dev/null
+++ b/test/files/run/repl-javap-more-fun.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |object Betty {
+    |  val ds = List(1,2,3) filter (_ % 2 == 0) map (_ * 3)
+    |  def m(vs: List[Int]) = vs filter (_ % 2 != 0) map (_ * 2)
+    |}
+    |:javap -fun Betty
+  """.stripMargin
+
+  // two anonfuns of Betty
+  override def yah(res: Seq[String]) = {
+    def filtered = res filter (_ contains "public final class Betty")
+    4 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap-outdir-funs/foo_1.scala b/test/files/run/repl-javap-outdir-funs/foo_1.scala
new file mode 100644
index 0000000..9b98e94
--- /dev/null
+++ b/test/files/run/repl-javap-outdir-funs/foo_1.scala
@@ -0,0 +1,6 @@
+
+package disktest
+
+class Foo {
+  def m(vs: List[Int]) = vs map (_ + 1)
+}
diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
new file mode 100644
index 0000000..6c6fe2d
--- /dev/null
+++ b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |:javap -fun disktest/Foo.class
+  """.stripMargin
+
+  override def yah(res: Seq[String]) =
+    // It's currently unknown why this test fails on Avian with
+    // “Failed: No anonfuns found.”, skip it for now. See SI-7630.
+    if (scala.tools.partest.utils.Properties.isAvian)
+      true
+    else {
+      def filtered = res filter (_ contains "public final class disktest.Foo")
+      1 == filtered.size
+    }
+}
diff --git a/test/files/run/repl-javap-outdir/foo_1.scala b/test/files/run/repl-javap-outdir/foo_1.scala
new file mode 100644
index 0000000..9b98e94
--- /dev/null
+++ b/test/files/run/repl-javap-outdir/foo_1.scala
@@ -0,0 +1,6 @@
+
+package disktest
+
+class Foo {
+  def m(vs: List[Int]) = vs map (_ + 1)
+}
diff --git a/test/files/run/repl-javap-outdir/run-repl_7.scala b/test/files/run/repl-javap-outdir/run-repl_7.scala
new file mode 100644
index 0000000..dc2c571
--- /dev/null
+++ b/test/files/run/repl-javap-outdir/run-repl_7.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |:javap disktest/Foo.class
+  """.stripMargin
+
+  override def yah(res: Seq[String]) = {
+    def filtered = res filter (_ contains "public class disktest.Foo")
+    1 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-javap.scala b/test/files/run/repl-javap.scala
new file mode 100644
index 0000000..7a19852
--- /dev/null
+++ b/test/files/run/repl-javap.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+  def code = """
+    |case class Betty(i: Int) { def next = Betty(i+1) }
+    |:javap Betty
+  """.stripMargin
+
+  override def yah(res: Seq[String]) = {
+    def filtered = res filter (_ contains "public class Betty")
+    1 == filtered.size
+  }
+}
diff --git a/test/files/run/repl-out-dir.check b/test/files/run/repl-out-dir.check
new file mode 100644
index 0000000..3e51c63
--- /dev/null
+++ b/test/files/run/repl-out-dir.check
@@ -0,0 +1,49 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> case class Bippy(x: Int)
+defined class Bippy
+
+scala> val x = Bippy(1)
+x: Bippy = Bippy(1)
+
+scala> $intp.showDirectory
+repl-out-dir-run.obj
+    $line1
+        $eval$.class
+        $eval.class
+    $line2
+        $eval$.class
+        $eval.class
+        $read$$iw$$iw$.class
+        $read$$iw$.class
+        $read$.class
+        $read.class
+    $line3
+        $eval$.class
+        $eval.class
+        $read$$iw$$iw$.class
+        $read$$iw$$iw$Bippy$.class
+        $read$$iw$$iw$Bippy.class
+        $read$$iw$.class
+        $read$.class
+        $read.class
+    $line4
+        $eval$.class
+        $eval.class
+        $read$$iw$$iw$.class
+        $read$$iw$.class
+        $read$.class
+        $read.class
+    $line5
+        $eval$.class
+        $eval.class
+        $read$$iw$$iw$.class
+        $read$$iw$.class
+        $read$.class
+        $read.class
+    $repl_$init.class
+    Test$.class
+    Test.class
+
+scala> 
diff --git a/test/files/run/repl-out-dir.scala b/test/files/run/repl-out-dir.scala
new file mode 100644
index 0000000..33c823a
--- /dev/null
+++ b/test/files/run/repl-out-dir.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+  override def extraSettings = s"-Yrepl-outdir ${testOutput.path}"
+
+  def code = s"""
+case class Bippy(x: Int)
+val x = Bippy(1)
+$$intp.showDirectory
+  """
+
+}
diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check
index 203b020..ab3809a 100644
--- a/test/files/run/repl-paste-2.check
+++ b/test/files/run/repl-paste-2.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> scala> 999l
 
 // Detected repl transcript paste: ctrl-D to finish.
@@ -31,7 +29,6 @@ res9: Int = 6
 
 scala> x.length + res5
 res10: Int = 12
-
 // Replaying 8 commands from transcript.
 
 scala> 999l
diff --git a/test/files/run/repl-paste-3.check b/test/files/run/repl-paste-3.check
index 2b4c941..8fae617 100644
--- a/test/files/run/repl-paste-3.check
+++ b/test/files/run/repl-paste-3.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> println(3)
 3
 
@@ -10,5 +8,3 @@ scala>   List(1,2)
 res1: List[Int] = List(1, 2)
 
 scala> 
-
-scala> 
diff --git a/test/files/run/repl-paste-4.pastie b/test/files/run/repl-paste-4.pastie
new file mode 100644
index 0000000..853a66f
--- /dev/null
+++ b/test/files/run/repl-paste-4.pastie
@@ -0,0 +1,4 @@
+
+// if we are truly companions, I can see your foo
+class Foo { private val foo = 7 }
+object Foo { def apply(f: Foo) = f.foo }
diff --git a/test/files/run/repl-paste-4.scala b/test/files/run/repl-paste-4.scala
new file mode 100644
index 0000000..0060dc1
--- /dev/null
+++ b/test/files/run/repl-paste-4.scala
@@ -0,0 +1,20 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+  def session =
+s"""|Type in expressions to have them evaluated.
+    |Type :help for more information.
+    |
+    |scala> :paste $pastie
+    |Pasting file $pastie...
+    |defined class Foo
+    |defined object Foo
+    |
+    |scala> Foo(new Foo)
+    |res0: Int = 7
+    |
+    |scala> """
+  def pastie = testPath changeExtension "pastie"
+}
+
diff --git a/test/files/run/repl-paste-raw.pastie b/test/files/run/repl-paste-raw.pastie
new file mode 100644
index 0000000..f13b4bc
--- /dev/null
+++ b/test/files/run/repl-paste-raw.pastie
@@ -0,0 +1,8 @@
+
+// a raw paste is not a script
+// hence it can be packaged
+
+package brown_paper
+
+// these are a few of my favorite things
+case class Gift (hasString: Boolean)
diff --git a/test/files/run/repl-paste-raw.scala b/test/files/run/repl-paste-raw.scala
new file mode 100644
index 0000000..2953796
--- /dev/null
+++ b/test/files/run/repl-paste-raw.scala
@@ -0,0 +1,20 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+  def session =
+s"""|Type in expressions to have them evaluated.
+    |Type :help for more information.
+    |
+    |scala> :paste -raw $pastie
+    |Pasting file $pastie...
+    |
+    |scala> val favoriteThing = brown_paper.Gift(true)
+    |favoriteThing: brown_paper.Gift = Gift(true)
+    |
+    |scala> favoriteThing.hasString
+    |res0: Boolean = true
+    |
+    |scala> """
+  def pastie = testPath changeExtension "pastie"
+}
diff --git a/test/files/run/repl-paste.check b/test/files/run/repl-paste.check
index d3e171f..97f177d 100644
--- a/test/files/run/repl-paste.check
+++ b/test/files/run/repl-paste.check
@@ -17,11 +17,10 @@ object Dingus
 
 val x = (new Dingus).y
 
-
 // Exiting paste mode, now interpreting.
 
 defined class Dingus
-defined module Dingus
+defined object Dingus
 x: Int = 110
 
 scala> 
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 9d63ecd..e56901e 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -11,22 +11,20 @@ scala> :power
 scala> // guarding against "error: reference to global is ambiguous"
 
 scala> global.emptyValDef  // "it is imported twice in the same scope by ..."
-res0: $r.global.emptyValDef.type = private val _ = _
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res0: $r.global.noSelfType.type = private val _ = _
 
 scala> val tp = ArrayClass[scala.util.Random]    // magic with tags
+warning: there were 1 feature warning(s); re-run with -feature for details
 tp: $r.global.Type = Array[scala.util.Random]
 
 scala> tp.memberType(Array_apply)                // evidence
 res1: $r.global.Type = (i: Int)scala.util.Random
 
-scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
-m: $r.treedsl.global.Match = 
-10 match {
-  case 5 => false
-  case _ => true
-}
+scala> val m = LIT(10)                           // treedsl
+m: $r.treedsl.global.Literal = 10
 
 scala> typed(m).tpe                              // typed is in scope
-res2: $r.treedsl.global.Type = Boolean
+res2: $r.treedsl.global.Type = Int(10)
 
 scala> 
diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala
index f7c88c6..4dfeb37 100644
--- a/test/files/run/repl-power.scala
+++ b/test/files/run/repl-power.scala
@@ -7,7 +7,7 @@ object Test extends ReplTest {
 global.emptyValDef  // "it is imported twice in the same scope by ..."
 val tp = ArrayClass[scala.util.Random]    // magic with tags
 tp.memberType(Array_apply)                // evidence
-val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+val m = LIT(10)                           // treedsl
 typed(m).tpe                              // typed is in scope
   """.trim
 }
diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check
index 7256b85..ed95c7b 100644
--- a/test/files/run/repl-reset.check
+++ b/test/files/run/repl-reset.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> val x1 = 1
 x1: Int = 1
 
@@ -35,6 +33,12 @@ scala> x1 + x2 + x3
 <console>:8: error: not found: value x1
               x1 + x2 + x3
               ^
+<console>:8: error: not found: value x2
+              x1 + x2 + x3
+                   ^
+<console>:8: error: not found: value x3
+              x1 + x2 + x3
+                        ^
 
 scala> val x1 = 4
 x1: Int = 4
@@ -51,5 +55,3 @@ scala> { new BippyBungus ; x1 }
 res2: Int = 4
 
 scala> 
-
-scala> 
diff --git a/test/files/run/repl-save.check b/test/files/run/repl-save.check
new file mode 100644
index 0000000..5f92868
--- /dev/null
+++ b/test/files/run/repl-save.check
@@ -0,0 +1,3 @@
+val i = 7
+val j = 8
+i * j
diff --git a/test/files/run/repl-save.scala b/test/files/run/repl-save.scala
new file mode 100644
index 0000000..4539790
--- /dev/null
+++ b/test/files/run/repl-save.scala
@@ -0,0 +1,25 @@
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+  def session =
+s"""|Type in expressions to have them evaluated.
+    |Type :help for more information.
+    |
+    |scala> val i = 7
+    |i: Int = 7
+    |
+    |scala> val j = 8
+    |j: Int = 8
+    |
+    |scala> i * j
+    |res0: Int = 56
+    |
+    |scala> :save $saveto
+    |
+    |scala> """
+  def saveto = testOutput / "session.repl"
+  override def show() = {
+    super.show()
+    Console print saveto.toFile.slurp
+  }
+}
diff --git a/test/files/run/repl-term-macros.check b/test/files/run/repl-term-macros.check
new file mode 100644
index 0000000..3580bfe
--- /dev/null
+++ b/test/files/run/repl-term-macros.check
@@ -0,0 +1,40 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.reflect.macros.blackbox.Context
+import scala.reflect.macros.blackbox.Context
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> 
+
+scala> def impl1(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
+impl1: (c: scala.reflect.macros.blackbox.Context)c.Expr[Unit]
+
+scala> def foo1: Unit = macro impl1
+defined term macro foo1: Unit
+
+scala> foo1
+
+scala> 
+
+scala> def impl2(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") }
+impl2: (c: scala.reflect.macros.blackbox.Context)()c.Expr[Unit]
+
+scala> def foo2(): Unit = macro impl2
+defined term macro foo2: ()Unit
+
+scala> foo2()
+
+scala> 
+
+scala> def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
+impl3: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int])(y: c.Expr[Int])c.Expr[Unit]
+
+scala> def foo3(x: Int)(y: Int): Unit = macro impl3
+defined term macro foo3: (x: Int)(y: Int)Unit
+
+scala> foo3(2)(3)
+
+scala> 
diff --git a/test/files/run/repl-term-macros.scala b/test/files/run/repl-term-macros.scala
new file mode 100644
index 0000000..32892b7
--- /dev/null
+++ b/test/files/run/repl-term-macros.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = """
+  import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+def impl1(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
+def foo1: Unit = macro impl1
+foo1
+
+def impl2(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") }
+def foo2(): Unit = macro impl2
+foo2()
+
+def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
+def foo3(x: Int)(y: Int): Unit = macro impl3
+foo3(2)(3)
+  """
+}
\ No newline at end of file
diff --git a/test/files/run/repl-transcript.check b/test/files/run/repl-transcript.check
index 6d22353..49891af 100644
--- a/test/files/run/repl-transcript.check
+++ b/test/files/run/repl-transcript.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> scala> class Bippity
 
 // Detected repl transcript paste: ctrl-D to finish.
@@ -19,7 +17,6 @@ scala> 1 to 100 map (_  + 1)
 res6: scala.collection.immutable.IndexedSeq[Int] = Vector(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101)
 
 scala> res6.sum + res5
-
 // Replaying 5 commands from transcript.
 
 scala> class Bippity
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
new file mode 100644
index 0000000..4836591
--- /dev/null
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -0,0 +1,45 @@
+
+import scala.tools.partest.{ SessionTest, Welcoming }
+
+// SI-7740
+object Test extends SessionTest with Welcoming {
+  def session =
+"""Welcome to Scala
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f = throw new Exception("Uh-oh")
+f: Nothing
+
+scala> f
+java.lang.Exception: Uh-oh
+  at .f(<console>:7)
+  ... 69 elided
+
+scala> def f = throw new Exception("")
+f: Nothing
+
+scala> f
+java.lang.Exception:
+  at .f(<console>:7)
+  ... 69 elided
+
+scala> def f = throw new Exception
+f: Nothing
+
+scala> f
+java.lang.Exception
+  at .f(<console>:7)
+  ... 69 elided
+
+scala> """
+
+  // normalize the "elided" lines because the frame count depends on test context
+  lazy val elided = """(\s+\.{3} )\d+( elided)""".r
+  override def normalize(line: String) = line match {
+    case welcome(w)               => w
+    case elided(ellipsis, suffix) => s"$ellipsis???$suffix"
+    case s                        => s
+  }
+  override def expected = super.expected map normalize
+}
diff --git a/test/files/run/repl-type-verbose.check b/test/files/run/repl-type-verbose.check
index 989c073..e37754a 100644
--- a/test/files/run/repl-type-verbose.check
+++ b/test/files/run/repl-type-verbose.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> // verbose!
 
 scala> :type -v def f = 5
@@ -165,7 +163,7 @@ PolyType(
       normalize = TypeRef(
         TypeSymbol(
           abstract trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable
-          
+
         )
         args = List(TypeParamTypeRef(TypeParam(T)))
       )
@@ -190,5 +188,3 @@ PolyType(
 )
 
 scala> 
-
-scala> 
diff --git a/test/files/run/resetattrs-this.scala b/test/files/run/resetattrs-this.scala
index 12afa3d..ff45d61 100644
--- a/test/files/run/resetattrs-this.scala
+++ b/test/files/run/resetattrs-this.scala
@@ -4,8 +4,8 @@ import scala.tools.reflect.ToolBox
 
 object Test extends App {
   val tb = cm.mkToolBox()
-  val tree = Select(This(cm.staticPackage("scala").moduleClass), newTermName("Predef"))
-  val ttree = tb.typeCheck(tree)
-  val rttree = tb.resetAllAttrs(ttree)
+  val tree = Select(This(cm.staticPackage("scala").moduleClass), TermName("Predef"))
+  val ttree = tb.typecheck(tree)
+  val rttree = tb.untypecheck(ttree)
   println(tb.eval(rttree) == Predef)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/richs.check b/test/files/run/richs.check
index a970a81..02a98b3 100644
--- a/test/files/run/richs.check
+++ b/test/files/run/richs.check
@@ -1,3 +1,4 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 
 RichCharTest1:
 true
diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala
index 5ee5736..4b53457 100644
--- a/test/files/run/richs.scala
+++ b/test/files/run/richs.scala
@@ -11,8 +11,8 @@ trait RichTest {
     val cn = this.getClass().getName()
     cn.substring(0, cn.length-1)
   }
-  def length[A](it: Iterator[A]) = it.toList length
-  def length[A](it: Iterable[A]) = it.toList length
+  def length[A](it: Iterator[A]) = it.toList.length
+  def length[A](it: Iterable[A]) = it.toList.length
   def run: Unit
 }
 object RichCharTest1 extends RichTest {
diff --git a/test/files/run/run-bug4840.scala b/test/files/run/run-bug4840.scala
index 5f98bc9..dda280f 100644
--- a/test/files/run/run-bug4840.scala
+++ b/test/files/run/run-bug4840.scala
@@ -1,6 +1,6 @@
 object Test {
   def g(x: Boolean): Option[String] = if (x) Some("booya") else None
-  
+
   def f1() = {
     for (x <- g(true)) yield {
       g(false) match {
@@ -9,11 +9,11 @@ object Test {
       }
     }
   }
-  
+
   def f2() = {
     for (x <- g(true) ; y <- g(true) ; z <- g(true)) yield {
       for (x <- g(true) ; y <- g(true) ; z <- g(true)) yield {
-        g(true) map { _ => 
+        g(true) map { _ =>
           (null: Any) match {
             case Some(x: Int) => x
             case _            => 5
@@ -21,7 +21,7 @@ object Test {
         }
       }
     }
-  }  
+  }
 
   def main(args: Array[String]): Unit = {
     println(f1())
diff --git a/test/files/run/runtime-richChar.scala b/test/files/run/runtime-richChar.scala
index cf18a1d..dceb70e 100644
--- a/test/files/run/runtime-richChar.scala
+++ b/test/files/run/runtime-richChar.scala
@@ -5,19 +5,19 @@ object Test extends App {
     else
       println(name + " failed: " + expected + " differs from " + got)
   }
-  
+
   testSeq("'a' to 'c'", List('a', 'b', 'c'), 'a' to 'c')
   testSeq("'a' until 'c'", List('a', 'b'), 'a' until 'c')
-  
+
   testSeq("'a' to 'b'", List('a', 'b'), 'a' to 'b')
   testSeq("'a' until 'b'", List('a'), 'a' until 'b')
-  
+
   testSeq("'a' to 'a'", List('a'), 'a' to 'a')
   testSeq("'a' until 'a'", List(), 'a' until 'a')
-  
+
   testSeq("'b' to 'a'", List(), 'b' to 'a')
   testSeq("'b' until 'a'", List(), 'b' until 'a')
-  
+
   testSeq("'c' to 'a'", List(), 'c' to 'a')
   testSeq("'c' until 'a'", List(), 'c' until 'a')
 }
diff --git a/test/files/run/runtime.check b/test/files/run/runtime.check
index 990a087..d613c9b 100644
--- a/test/files/run/runtime.check
+++ b/test/files/run/runtime.check
@@ -1,3 +1,9 @@
+runtime.scala:141: warning: comparing values of types Null and Null using `eq' will always yield true
+    check(true , null eq null, null ne null);
+                      ^
+runtime.scala:141: warning: comparing values of types Null and Null using `ne' will always yield false
+    check(true , null eq null, null ne null);
+                                    ^
 <<< Test0
 [false,true]
 [0,1,2]
diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala
index 2dcb41f..89348b2 100644
--- a/test/files/run/runtime.scala
+++ b/test/files/run/runtime.scala
@@ -125,7 +125,7 @@ object Test2Test {
 
 object Test3Test {
 
-  class Foo { override def equals(that: Any) = error("abort"); }
+  class Foo { override def equals(that: Any) = sys.error("abort"); }
 
   def check(expected: Boolean, actual1: Boolean, actual2: Boolean): Unit =
     Console.println(
@@ -171,7 +171,7 @@ object Test  {
     try {
       test;
     } catch {
-      case exception => {
+      case exception: Throwable => {
         //val name: String = Thread.currentThread().getName();
         Console.print("Exception in thread \"" + name + "\" " + exception);
         Console.println;
diff --git a/test/files/run/interpolation.flags b/test/files/run/sammy_java8.flags
similarity index 100%
rename from test/files/run/interpolation.flags
rename to test/files/run/sammy_java8.flags
diff --git a/test/files/run/sammy_java8.scala b/test/files/run/sammy_java8.scala
new file mode 100644
index 0000000..db9df7f
--- /dev/null
+++ b/test/files/run/sammy_java8.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+
+// java8 version of sammy_poly.scala
+object Test extends CompilerTest {
+  import global._
+
+  override lazy val units: List[CompilationUnit] = {
+    global.settings.Xexperimental.value = true
+
+    // This test itself does not depend on JDK8.
+    javaCompilationUnits(global)(samSource) ++
+    compilationUnits(global)(useSamSource)
+  }
+
+  private def samSource = """
+//   trait F[T, U] { def apply(x: T): U }
+public interface F<T, U> {
+    U apply(T t);
+    default void yadayada() {
+        throw new UnsupportedOperationException("yadayada");
+    }
+}
+  """
+
+  private def useSamSource = """
+class T {
+  def app[T, U](x: T)(f: F[T, U]): U = f(x)
+  app(1)(x => List(x))
+}
+  """
+
+  // We're only checking we can compile it.
+  def check(source: String, unit: global.CompilationUnit): Unit = ()
+}
diff --git a/test/files/run/scan.scala b/test/files/run/scan.scala
index f056c77..47e0a7d 100644
--- a/test/files/run/scan.scala
+++ b/test/files/run/scan.scala
@@ -6,17 +6,17 @@ object Test {
 
   def main(args: Array[String]) {
     val lst = List(1, 2, 3, 4, 5)
-    
+
     assert(lst.scanLeft(0)(_ + _) == List(0, 1, 3, 6, 10, 15))
     assert(lst.scanRight(0)(_ + _) == List(15, 14, 12, 9, 5, 0))
-    
+
     val emp = List[Int]()
     assert(emp.scanLeft(0)(_ + _) == List(0))
     assert(emp.scanRight(0)(_ + _) == List(0))
-    
+
     val stream = Stream(1, 2, 3, 4, 5)
     assert(stream.scanLeft(0)(_ + _) == Stream(0, 1, 3, 6, 10, 15))
-    
+
     assert(Stream.from(1).scanLeft(0)(_ + _).take(5) == Stream(0, 1, 3, 6, 10))
   }
 
diff --git a/test/files/run/search.check b/test/files/run/search.check
new file mode 100644
index 0000000..a885696
--- /dev/null
+++ b/test/files/run/search.check
@@ -0,0 +1,6 @@
+Found(2)
+Found(4)
+InsertionPoint(9)
+Found(2)
+Found(4)
+InsertionPoint(9)
diff --git a/test/files/run/search.scala b/test/files/run/search.scala
new file mode 100644
index 0000000..ed7fed5
--- /dev/null
+++ b/test/files/run/search.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+  import scala.collection.{LinearSeq, IndexedSeq}
+  import scala.collection.Searching.search
+
+  val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13)
+  println(ls.search(3))
+  println(ls.search(5, 3, 8))
+  println(ls.search(12))
+
+  val is = IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13)
+  println(is.search(3))
+  println(is.search(5, 3, 8))
+  println(is.search(12))
+}
diff --git a/test/files/run/seqlike-kmp.scala b/test/files/run/seqlike-kmp.scala
index 514990c..af39fda 100644
--- a/test/files/run/seqlike-kmp.scala
+++ b/test/files/run/seqlike-kmp.scala
@@ -2,7 +2,7 @@ object Test {
   val source = 0 to 99
   val idxes = (-1 to 2) ++ (97 to 100)
   def str(xs: Seq[Int]) = xs.mkString("(", ", ", ")")
-  
+
   def f(tgt: Seq[Int]) = {
     println("indexOfSlice")
     // the first index `>= from` such that...
@@ -17,11 +17,11 @@ object Test {
       println("  %s with idx <= %d = %d".format(str(tgt), x, res))
     }
   }
-  
+
   def g(idx: Int, len: Int) = {
     f(source.slice(idx, idx + len))
   }
-  
+
   def main(args: Array[String]): Unit = {
     g(97, 1)
     g(97, 2)
diff --git a/test/files/run/sequenceComparisons.scala b/test/files/run/sequenceComparisons.scala
index 5d7958b..613b37f 100644
--- a/test/files/run/sequenceComparisons.scala
+++ b/test/files/run/sequenceComparisons.scala
@@ -2,12 +2,12 @@ import scala.collection.{ mutable, immutable }
 import collection.{ Seq, Traversable }
 
 object Test {
-  // TODO: 
+  // TODO:
   //
   // SeqProxy
   // SeqForwarder
   // the commented out ones in seqMakers
-  
+
   val seqMakers = List[List[Int] => Seq[Int]](
     // scala.Array(_: _*),
     mutable.ArrayBuffer(_: _*),
@@ -23,13 +23,13 @@ object Test {
     immutable.Seq(_: _*),
     mutable.Seq(_: _*),
     immutable.Stack(_: _*),
-    // mutable.Stack(_: _*),    
+    // mutable.Stack(_: _*),
     immutable.IndexedSeq(_: _*), // was Vector
     //mutable.Vector(_: _*),
     immutable.List(_: _*),
     immutable.Stream(_: _*)
   )
-  
+
   abstract class Data[T] {
     val seq: Seq[T]
     private def seqList = seq.toList
@@ -45,50 +45,50 @@ object Test {
     }
 
     lazy val eqeq = Method(_ == _, (List(seqList), List(Nil, seqList drop 1, seqList ::: seqList)), "%s == %s")
-    
+
     val startsWithInputs: Inputs
     lazy val startsWith = Method(_ startsWith _, startsWithInputs, "%s startsWith %s")
-    
+
     val endsWithInputs: Inputs
     lazy val endsWith = Method(_ endsWith _, endsWithInputs, "%s endsWith %s")
 
     val indexOfSliceInputs: Inputs
     private def subseqTest(s1: Seq[T], s2: Seq[T]) = (s1 indexOfSlice s2) != -1
     lazy val indexOfSlice = Method(subseqTest _, indexOfSliceInputs, "(%s indexOfSlice %s) != -1")
-    
+
     val sameElementsInputs: Inputs
     lazy val sameElements = Method(_ sameElements _, sameElementsInputs, "%s sameElements %s")
-    
+
     def methodList = List(eqeq, startsWith, endsWith, indexOfSlice, sameElements)
   }
-  
+
   object test1 extends Data[Int] {
     val seq = List(1,2,3,4,5)
-    
+
     val startsWithInputs = (
       List(Nil, List(1), List(1,2), seq),
       List(List(1,2,3,4,6), seq ::: List(5), List(0))
     )
-    
+
     val endsWithInputs = (
       List(Nil, List(5), List(4,5), seq),
       List(0 :: seq, List(5,2,3,4,5), List(3,4), List(5,6))
     )
-    
+
     val indexOfSliceInputs = (
       List(Nil, List(1), List(3), List(5), List(1,2), List(2,3,4), List(4,5), seq),
       List(List(1,2,3,5), List(6), List(5,4,3,2,1), List(2,1))
     )
-    
+
     val sameElementsInputs = (
       List(List(1,2,3,4,5)),
-      List(Nil, List(1), List(1,2), List(2,3,4), List(2,3,4,5), List(2,3,4,5,1), List(1,2,3,5,4), seq reverse)
+      List(Nil, List(1), List(1,2), List(2,3,4), List(2,3,4,5), List(2,3,4,5,1), List(1,2,3,5,4), seq.reverse)
     )
   }
-  
+
   val failures = new mutable.ListBuffer[String]
   var testCount = 0
-  
+
   def assertOne(op1: Any, op2: Any, res: Boolean, str: String) {
     testCount += 1
     val resStr = str.format(op1, op2)
@@ -97,25 +97,25 @@ object Test {
       failures += ("FAIL: " + resStr)
     // assert(res, resStr)
   }
-  
+
   def runSeqs() = {
     for (s1f <- seqMakers ; s2f <- seqMakers ; testData <- List(test1)) {
       import testData._
       val scrut = s1f(seq)
-      
+
       for (Method(f, (trueList, falseList), descr) <- methodList) {
         for (s <- trueList; rhs = s2f(s))
           assertOne(scrut, rhs, f(scrut, rhs), descr)
-        
+
         for (s <- falseList; rhs = s2f(s))
           assertOne(scrut, rhs, !f(scrut, rhs), "!(" + descr + ")")
       }
     }
   }
-  
+
   def main(args: Array[String]) {
     runSeqs()
-    
+
     assert(failures.isEmpty, failures mkString "\n")
   }
 }
diff --git a/test/files/run/serialize-stream.scala b/test/files/run/serialize-stream.scala
index e424d5b..3ab9f2d 100644
--- a/test/files/run/serialize-stream.scala
+++ b/test/files/run/serialize-stream.scala
@@ -5,13 +5,13 @@ object Test {
     val bos = new java.io.ByteArrayOutputStream()
     val oos = new java.io.ObjectOutputStream(bos)
     oos.writeObject(s)
-    
+
     val ois = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(bos.toByteArray))
     val obj = ois.readObject()
     println(obj)
     println(obj.asInstanceOf[Seq[T]].toList)
   }
-  
+
   def main(args: Array[String]) {
     ser(Stream(1, 2, 3))
     ser(Stream(1))
diff --git a/test/files/run/settings-parse.check b/test/files/run/settings-parse.check
new file mode 100644
index 0000000..18145c9
--- /dev/null
+++ b/test/files/run/settings-parse.check
@@ -0,0 +1,566 @@
+0) List(-cp, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+1) List(-cp, , ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+2) List(, -cp, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+3) List(-cp, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+4) List(-cp, , , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+5) List(-cp, , -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+6) List(, -cp, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+7) List(-cp, , -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+8) List(-cp, , , -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+9) List(-cp, , -deprecation, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+10) List(-cp, , -deprecation, foo.scala, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+11) List(, -cp, , -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+12) List(-cp, , foo.scala) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+13) List(-cp, , , foo.scala) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+14) List(-cp, , foo.scala, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+15) List(, -cp, , foo.scala) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+16) List(-cp, , foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+17) List(-cp, , , foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+18) List(-cp, , foo.scala, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+19) List(-cp, , foo.scala, -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+20) List(, -cp, , foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+21) List(-deprecation, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+22) List(, -deprecation, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+23) List(-deprecation, -cp, , ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+24) List(-deprecation, , -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+25) List(-deprecation, -cp, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+26) List(, -deprecation, -cp, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+27) List(-deprecation, -cp, , , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+28) List(-deprecation, -cp, , foo.scala, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+29) List(-deprecation, , -cp, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+30) List(-deprecation, foo.scala, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+31) List(, -deprecation, foo.scala, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+32) List(-deprecation, , foo.scala, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+33) List(-deprecation, foo.scala, -cp, , ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+34) List(-deprecation, foo.scala, , -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+35) List(foo.scala, -cp, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+36) List(, foo.scala, -cp, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+37) List(foo.scala, -cp, , ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+38) List(foo.scala, , -cp, ) ==> Settings {
+  -d = .
+  -classpath = ""
+}
+
+39) List(foo.scala, -cp, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+40) List(, foo.scala, -cp, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+41) List(foo.scala, -cp, , , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+42) List(foo.scala, -cp, , -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+43) List(foo.scala, , -cp, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+44) List(foo.scala, -deprecation, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+45) List(, foo.scala, -deprecation, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+46) List(foo.scala, , -deprecation, -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+47) List(foo.scala, -deprecation, -cp, , ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+48) List(foo.scala, -deprecation, , -cp, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = ""
+}
+
+0) List(-cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+1) List(-cp, /tmp:/bippy, ) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+2) List(, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+3) List(-cp, /tmp:/bippy, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+4) List(-cp, /tmp:/bippy, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+5) List(-cp, /tmp:/bippy, -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+6) List(, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+7) List(-cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+8) List(-cp, /tmp:/bippy, , -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+9) List(-cp, /tmp:/bippy, -deprecation, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+10) List(-cp, /tmp:/bippy, -deprecation, foo.scala, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+11) List(, -cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+12) List(-cp, /tmp:/bippy, foo.scala) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+13) List(-cp, /tmp:/bippy, , foo.scala) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+14) List(-cp, /tmp:/bippy, foo.scala, ) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+15) List(, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+16) List(-cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+17) List(-cp, /tmp:/bippy, , foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+18) List(-cp, /tmp:/bippy, foo.scala, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+19) List(-cp, /tmp:/bippy, foo.scala, -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+20) List(, -cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+21) List(-deprecation, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+22) List(, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+23) List(-deprecation, -cp, /tmp:/bippy, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+24) List(-deprecation, , -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+25) List(-deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+26) List(, -deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+27) List(-deprecation, -cp, /tmp:/bippy, , foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+28) List(-deprecation, -cp, /tmp:/bippy, foo.scala, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+29) List(-deprecation, , -cp, /tmp:/bippy, foo.scala) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+30) List(-deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+31) List(, -deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+32) List(-deprecation, , foo.scala, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+33) List(-deprecation, foo.scala, -cp, /tmp:/bippy, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+34) List(-deprecation, foo.scala, , -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+35) List(foo.scala, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+36) List(, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+37) List(foo.scala, -cp, /tmp:/bippy, ) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+38) List(foo.scala, , -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -classpath = /tmp:/bippy
+}
+
+39) List(foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+40) List(, foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+41) List(foo.scala, -cp, /tmp:/bippy, , -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+42) List(foo.scala, -cp, /tmp:/bippy, -deprecation, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+43) List(foo.scala, , -cp, /tmp:/bippy, -deprecation) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+44) List(foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+45) List(, foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+46) List(foo.scala, , -deprecation, -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+47) List(foo.scala, -deprecation, -cp, /tmp:/bippy, ) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
+48) List(foo.scala, -deprecation, , -cp, /tmp:/bippy) ==> Settings {
+  -d = .
+  -deprecation = true
+  -classpath = /tmp:/bippy
+}
+
diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala
new file mode 100644
index 0000000..2754feb
--- /dev/null
+++ b/test/files/run/settings-parse.scala
@@ -0,0 +1,29 @@
+
+import scala.language.postfixOps
+import scala.tools.nsc._
+
+object Test {
+  val tokens        = List("", "-deprecation", "foo.scala")
+  val subsets       = tokens.toSet.subsets.toList
+  val permutations0 = subsets.flatMap(_.toList.permutations).distinct
+
+  def runWithCp(cp: String) = {
+    val permutations = permutations0 flatMap ("-cp CPTOKEN" :: _ permutations)
+
+    for ((p, i) <- permutations.distinct.sortBy(_ mkString "").zipWithIndex) {
+      val args           = p flatMap (_ split "\\s+") map (x => if (x == "CPTOKEN") cp else x)
+      val s              = new settings.MutableSettings(println)
+      val (ok, residual) = s.processArguments(args, processAll = true)
+
+      val expected = args filter (_ == "foo.scala")
+      assert(residual == expected, residual)
+      assert(ok, args)
+      println(s"$i) $args ==> $s")
+    }
+  }
+
+  def main(args0: Array[String]): Unit = {
+    runWithCp("")
+    runWithCp("/tmp:/bippy")
+  }
+}
diff --git a/test/files/run/shortClass.check b/test/files/run/shortClass.check
new file mode 100644
index 0000000..fbdb725
--- /dev/null
+++ b/test/files/run/shortClass.check
@@ -0,0 +1,10 @@
+bippity.bop.Foo
+bippity.bop.Foo$Bar
+bippity.bop.Foo$Bar$
+Test$$anon$1
+Test$$anon$2
+Foo
+Bar
+Bar$
+Foo with DingDongBippy
+Bar with DingDongBippy
diff --git a/test/files/run/shortClass.scala b/test/files/run/shortClass.scala
new file mode 100644
index 0000000..b7bb016
--- /dev/null
+++ b/test/files/run/shortClass.scala
@@ -0,0 +1,24 @@
+import scala.reflect.internal.util._
+
+package bippity {
+  trait DingDongBippy
+
+  package bop {
+    class Foo {
+      class Bar
+      object Bar
+    }
+  }
+}
+
+object Test {
+  import bippity._
+  import bop._
+
+  def main(args: Array[String]): Unit = {
+    val f = new Foo
+    val instances = List(f, new f.Bar, f.Bar, new Foo with DingDongBippy, new f.Bar with DingDongBippy)
+    instances map (_.getClass.getName) foreach println
+    instances map shortClassOfInstance foreach println
+  }
+}
diff --git a/test/files/run/showdecl.check b/test/files/run/showdecl.check
new file mode 100644
index 0000000..b8d7f94
--- /dev/null
+++ b/test/files/run/showdecl.check
@@ -0,0 +1,34 @@
+compile-time
+uninitialized D: class D extends 
+initialized D: class D extends C
+uninitialized x: val x: <?>
+initialized x: val x: Int
+uninitialized y: lazy val y: <?>
+initialized y: lazy val y: Int
+uninitialized z: def z: <?>
+initialized z: def z: Int
+uninitialized t: def t: <?>
+initialized t: def t[T <: Int](x: D)(y: x.W): Int
+uninitialized W: type W = String
+initialized W: type W = String
+uninitialized C: class C extends 
+initialized C: class C extends D
+uninitialized O: object O
+initialized O: object O
+runtime
+autoinitialized D: class D extends C
+autoinitialized D: class D extends C
+autoinitialized x: val x: Int
+autoinitialized x: val x: Int
+autoinitialized y: lazy val y: Int
+autoinitialized y: lazy val y: Int
+autoinitialized z: def z: Int
+autoinitialized z: def z: Int
+autoinitialized t: def t[T <: Int](x: D)(y: x.W): Int
+autoinitialized t: def t[T <: Int](x: D)(y: x.W): Int
+autoinitialized W: type W = String
+autoinitialized W: type W = String
+autoinitialized C: class C extends D
+autoinitialized C: class C extends D
+autoinitialized O: object O
+autoinitialized O: object O
diff --git a/test/files/run/showdecl/Macros_1.scala b/test/files/run/showdecl/Macros_1.scala
new file mode 100644
index 0000000..c68dd27
--- /dev/null
+++ b/test/files/run/showdecl/Macros_1.scala
@@ -0,0 +1,30 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    var messages = List[String]()
+    def println(msg: String) = messages :+= msg
+
+    import c.universe._
+    def test(sym: Symbol): Unit = {
+      println(s"uninitialized ${sym.name}: ${showDecl(sym)}")
+      sym.info
+      println(s"initialized ${sym.name}: ${showDecl(sym)}")
+    }
+
+    println("compile-time")
+    test(c.mirror.staticClass("D"))
+    test(c.mirror.staticClass("D").info.member(TermName("x")))
+    test(c.mirror.staticClass("D").info.member(TermName("y")))
+    test(c.mirror.staticClass("D").info.member(TermName("z")))
+    test(c.mirror.staticClass("D").info.member(TermName("t")))
+    test(c.mirror.staticClass("D").info.member(TypeName("W")))
+    test(c.mirror.staticClass("D").info.member(TypeName("C")))
+    test(c.mirror.staticClass("D").info.member(TermName("O")))
+
+    q"..${messages.map(msg => q"println($msg)")}"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/showdecl/Test_2.scala b/test/files/run/showdecl/Test_2.scala
new file mode 100644
index 0000000..6eb64ba
--- /dev/null
+++ b/test/files/run/showdecl/Test_2.scala
@@ -0,0 +1,32 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+  def test(sym: Symbol): Unit = {
+    println(s"autoinitialized ${sym.name}: ${showDecl(sym)}")
+    sym.info
+    println(s"autoinitialized ${sym.name}: ${showDecl(sym)}")
+  }
+
+  Macros.foo
+  println("runtime")
+  test(symbolOf[D])
+  test(typeOf[D].member(TermName("x")))
+  test(typeOf[D].member(TermName("y")))
+  test(typeOf[D].member(TermName("z")))
+  test(typeOf[D].member(TermName("t")))
+  test(typeOf[D].member(TypeName("W")))
+  test(typeOf[D].member(TypeName("C")))
+  test(typeOf[D].member(TermName("O")))
+}
+
+class C
+class D extends C {
+  val x = 2
+  lazy val y = 3
+  var z = 4
+  def t[T <: Int](x: D)(y: x.W) = 5
+  type W = String
+  class C extends D
+  object O extends C
+}
diff --git a/test/files/run/showraw_aliases.check b/test/files/run/showraw_aliases.check
index aebd354..d6a198b 100644
--- a/test/files/run/showraw_aliases.check
+++ b/test/files/run/showraw_aliases.check
@@ -1,2 +1,2 @@
-Block(List(Import(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), scala.reflect.runtime.package), [newTermName("universe") aka newTermName("ru")]))
-Block(List(Import(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), scala.reflect.runtime.package#<id>), [newTermName("universe")#<id> aka newTermName("ru")]))
+Block(List(Import(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), List(ImportSelector(TermName("universe"), <offset>, TermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), scala.reflect.runtime.package), [TermName("universe") aka TermName("ru")]))
+Block(List(Import(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), List(ImportSelector(TermName("universe"), <offset>, TermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), scala.reflect.runtime.package#<id>), [TermName("universe")#<id> aka TermName("ru")]))
diff --git a/test/files/run/showraw_aliases.scala b/test/files/run/showraw_aliases.scala
index 65b4fcb..56bd137 100644
--- a/test/files/run/showraw_aliases.scala
+++ b/test/files/run/showraw_aliases.scala
@@ -7,7 +7,7 @@ object Test extends App {
     import scala.reflect.runtime.{universe => ru}
     ru
   """)
-  val ttree = tb.typeCheck(tree)
+  val ttree = tb.typecheck(tree)
 
   def stabilizeIds(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
   def stabilizePositions(s: String) = """\d+""".r.replaceAllIn(s, "<offset>")
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
index 7fca027..4d34160 100644
--- a/test/files/run/showraw_mods.check
+++ b/test/files/run/showraw_mods.check
@@ -1 +1 @@
-Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTe [...]
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), TermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), TermName("y"), TypeTree(), Select(This(TypeName("C")), TermName("x"))), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(),  [...]
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
index b71018d..d8cb1fd 100644
--- a/test/files/run/showraw_tree.check
+++ b/test/files/run/showraw_tree.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
index 5835ffa..d7a7aa5 100644
--- a/test/files/run/showraw_tree_ids.check
+++ b/test/files/run/showraw_tree_ids.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), TypeName("String")), Select(Ident(scala.Predef#<id>), TypeName("String"))))), termNames.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), TypeName("String")), Select(Ident(scala.Predef#<id>), TypeName("String"))))), termNames.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
index c4d6685..85939b0 100644
--- a/test/files/run/showraw_tree_kinds.check
+++ b/test/files/run/showraw_tree_kinds.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), termNames.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), termNames.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
index fccb81d..7534746 100644
--- a/test/files/run/showraw_tree_types_ids.check
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), termNames.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
 [3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
-[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
 [5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), termNames.CONSTRUCTOR#<id>), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
 [5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
 [8] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala
index 198729e..883af01 100644
--- a/test/files/run/showraw_tree_types_ids.scala
+++ b/test/files/run/showraw_tree_types_ids.scala
@@ -6,6 +6,6 @@ object Test extends App {
   val tree1 = reify(new collection.immutable.HashMap[String, String])
   val tree2 = reify(new collection.mutable.HashMap[String, String])
   def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
-  println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true)))
-  println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true)))
+  println(stabilize(showRaw(tb.typecheck(tree1.tree), printIds = true, printTypes = true)))
+  println(stabilize(showRaw(tb.typecheck(tree2.tree), printIds = true, printTypes = true)))
 }
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
index f3e0f8c..de691e3 100644
--- a/test/files/run/showraw_tree_types_typed.check
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
 [3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
-[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
 [5] SingleType(ThisType(scala), scala.Predef)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
 [5] SingleType(ThisType(scala), scala.Predef)
-[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
 [8] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_typed.scala b/test/files/run/showraw_tree_types_typed.scala
index d7ccc84..3dd696c 100644
--- a/test/files/run/showraw_tree_types_typed.scala
+++ b/test/files/run/showraw_tree_types_typed.scala
@@ -5,6 +5,6 @@ object Test extends App {
   val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
   val tree1 = reify(new collection.immutable.HashMap[String, String])
   val tree2 = reify(new collection.mutable.HashMap[String, String])
-  println(showRaw(tb.typeCheck(tree1.tree), printTypes = true))
-  println(showRaw(tb.typeCheck(tree2.tree), printTypes = true))
+  println(showRaw(tb.typecheck(tree1.tree), printTypes = true))
+  println(showRaw(tb.typecheck(tree2.tree), printTypes = true))
 }
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
index b71018d..d8cb1fd 100644
--- a/test/files/run/showraw_tree_types_untyped.check
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
index a6286ba..81efcc0 100644
--- a/test/files/run/showraw_tree_ultimate.check
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
-[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
-[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
-[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
-[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
-[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
-[8] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), termNames.CONSTRUCTOR#<id>#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>#PKC), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PKC), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>#PKC), scala.collection.immutable.HashMap#<id>#CLS, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
+[5] SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD)
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), termNames.CONSTRUCTOR#<id>#CTOR), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
+[5] SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD)
+[6] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
+[8] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala
index a850762..e0d36e6 100644
--- a/test/files/run/showraw_tree_ultimate.scala
+++ b/test/files/run/showraw_tree_ultimate.scala
@@ -6,6 +6,6 @@ object Test extends App {
   val tree1 = reify(new collection.immutable.HashMap[String, String])
   val tree2 = reify(new collection.mutable.HashMap[String, String])
   def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
-  println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true)))
-  println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true)))
+  println(stabilize(showRaw(tb.typecheck(tree1.tree), printIds = true, printKinds = true, printTypes = true)))
+  println(stabilize(showRaw(tb.typecheck(tree2.tree), printIds = true, printKinds = true, printTypes = true)))
 }
\ No newline at end of file
diff --git a/test/files/run/si5045.scala b/test/files/run/si5045.scala
deleted file mode 100644
index e198b10..0000000
--- a/test/files/run/si5045.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-object Test extends App {
-
- import scala.util.matching.{ Regex, UnanchoredRegex }
-
- val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
- val dateP2 = """(\d\d\d\d)-(\d\d)-(\d\d)""" r ("year", "month", "day") unanchored
- val dateP3 =  new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") with UnanchoredRegex
-
- val yearStr = "2011"
- val dateStr = List(yearStr,"07","15").mkString("-")
-
-  def test(msg: String)(strs: Seq[String]): Unit = println("%40s  %s".format(msg, strs mkString " "))
-
-  test("extract an exact match") {
-    val dateP1(y,m,d) = dateStr
-    Seq(List(y,m,d).mkString("-"), dateStr)
-  }
-
-  test("extract from middle of string") {
-    val dateP1(y,m,d) = "Tested on "+dateStr+"."
-    Seq(List(y,m,d).mkString("-"), dateStr)
-  }
-
-  test("extract from middle of string (P2)") {
-    val dateP2(y,m,d) = "Tested on "+dateStr+"."
-    Seq(List(y,m,d).mkString("-"), dateStr)
-  }
-
-  test("extract from middle of string (P3)") {
-    val dateP2(y,m,d) = "Tested on "+dateStr+"."
-    Seq(List(y,m,d).mkString("-"), dateStr)
-  }
-
-  def copyright(in: String): String = in match {
-    case dateP1(year, month, day) => "Copyright "+year
-    case _                        => "No copyright"
-  }
-
-  test("copyright example has date") {
-    Seq(copyright("Date of this document: "+dateStr), "Copyright "+yearStr)
-  }
-
-  test("copyright example missing date") {
-    Seq(copyright("Date of this document: unknown"), "No copyright")
-  }
-}
diff --git a/test/files/run/slice-strings.scala b/test/files/run/slice-strings.scala
index 2308586..1293143 100644
--- a/test/files/run/slice-strings.scala
+++ b/test/files/run/slice-strings.scala
@@ -1,7 +1,7 @@
-object Test {  
+object Test {
   def cmp(x1: String) = {
     val x2 = x1.toList
-    
+
     -10 to 10 foreach { i =>
       assert(x1.take(i) == x2.take(i).mkString)
       assert(x1.drop(i) == x2.drop(i).mkString)
@@ -12,7 +12,7 @@ object Test {
       assert(x1.slice(idx1, idx2) == x2.slice(idx1, idx2).mkString)
     }
   }
-  
+
   def main(args: Array[String]): Unit = {
      cmp("abcde")
   }
diff --git a/test/files/run/slices.scala b/test/files/run/slices.scala
index e31ea40..107b8e6 100644
--- a/test/files/run/slices.scala
+++ b/test/files/run/slices.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
 object Test extends App {
 
   // lists
diff --git a/test/files/run/sm-interpolator.scala b/test/files/run/sm-interpolator.scala
index 7f7b9f0..b99daa2 100644
--- a/test/files/run/sm-interpolator.scala
+++ b/test/files/run/sm-interpolator.scala
@@ -1,14 +1,14 @@
 object Test extends App {
   import scala.reflect.internal.util.StringContextStripMarginOps
-  def check(actual: Any, expected: Any) = if (actual != expected) sys.error(s"expected: [$expected], actual: [$actual])")
+  def check(actual: Any, expected: Any) = if (actual != expected) sys.error(s"\nexpected:\n$expected\n\nactual:\n$actual")
 
   val bar = "|\n ||"
 
   check(
-    sm"""|ab  
+    sm"""|ab
          |de
          |${bar} | ${1}""",
-      "ab  \nde\n|\n || | 1")
+      "ab\nde\n|\n || | 1")
 
   check(
     sm"|",
diff --git a/test/files/run/spec-nlreturn.scala b/test/files/run/spec-nlreturn.scala
index ec5e722..5ab1747 100644
--- a/test/files/run/spec-nlreturn.scala
+++ b/test/files/run/spec-nlreturn.scala
@@ -1,10 +1,11 @@
+
 object Test {
   def f(): Int = {
     try {
-      val g = 1 to 10 map { i => return 16 ; i } sum;
+      val g = (1 to 10 map { i => return 16 ; i }).sum
       g
     }
-    catch { case x: runtime.NonLocalReturnControl[_] => 
+    catch { case x: runtime.NonLocalReturnControl[_] =>
       println(x.getClass.getName)
       x.value.asInstanceOf[Int]
     }
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/static-module-method.check
similarity index 100%
copy from test/files/run/eta-expand-star2.check
copy to test/files/run/static-module-method.check
diff --git a/test/files/run/static-module-method.scala b/test/files/run/static-module-method.scala
new file mode 100644
index 0000000..a869130
--- /dev/null
+++ b/test/files/run/static-module-method.scala
@@ -0,0 +1,14 @@
+// During development of delayed delambdafy there was a problem where
+// GenASM would eliminate a loadmodule for all methods defined within that module
+// even if those methods were static. This test would thus fail
+// with a verify error under -Ydelambdafy:method
+
+object Test {
+  def moduleMethod(x: String) = x
+
+  def map(x: String, f: String => String) = f(x)
+
+  def main(args: Array[String]) {
+     println(map("hello", Test.moduleMethod))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/streamWithFilter.scala b/test/files/run/streamWithFilter.scala
index 7f8f9a0..cb919d4 100644
--- a/test/files/run/streamWithFilter.scala
+++ b/test/files/run/streamWithFilter.scala
@@ -4,7 +4,7 @@ object Test {
   def isBuzz(x: Int) = x % 5 == 0
   // next line will run forever if withFilter isn't doing its thing.
   val fizzbuzzes = for (n <- nums ; if isFizz(n) ; if isBuzz(n)) yield n
-  
+
   def main(args: Array[String]): Unit = {
     fizzbuzzes take 5 foreach println
   }
diff --git a/test/files/run/stream_flatmap_odds.scala b/test/files/run/stream_flatmap_odds.scala
index 6fb202c..1935253 100644
--- a/test/files/run/stream_flatmap_odds.scala
+++ b/test/files/run/stream_flatmap_odds.scala
@@ -1,4 +1,4 @@
 object Test extends App {
 	lazy val odds: Stream[Int] = Stream(1) append ( odds flatMap {x => Stream(x + 2)} )
-	println(odds take 42 force)
+	Console println (odds take 42).force
 }
diff --git a/test/files/run/stream_length.check b/test/files/run/stream_length.check
index 9906de7..e4350aa 100644
--- a/test/files/run/stream_length.check
+++ b/test/files/run/stream_length.check
@@ -1 +1,5 @@
+#partest !avian
 Length: 970299
+#partest avian
+!!!TEST SKIPPED!!!
+See SI-7600 for further information.
diff --git a/test/files/run/stream_length.scala b/test/files/run/stream_length.scala
index 2808fbc..33929f4 100644
--- a/test/files/run/stream_length.scala
+++ b/test/files/run/stream_length.scala
@@ -10,6 +10,10 @@ object Test {
   }
 
   def main(args: Array[String]) {
-    println("Length: " + walk(3, "---").length)
+    if (scala.tools.partest.utils.Properties.isAvian) {
+      println("!!!TEST SKIPPED!!!")
+      println("See SI-7600 for further information.")
+    } else
+      println("Length: " + walk(3, "---").length)
   }
 }
diff --git a/test/files/run/streams.scala b/test/files/run/streams.scala
index 03b2622..350e103 100644
--- a/test/files/run/streams.scala
+++ b/test/files/run/streams.scala
@@ -41,7 +41,7 @@ object Test extends App {
   def powers(x: Int) = if ((x&(x-1)) == 0) Some(x) else None
   println(s3.flatMap(powers).reverse.head)
 
-  // large enough to generate StackOverflows (on most systems) 
+  // large enough to generate StackOverflows (on most systems)
   // unless the following methods are tail call optimized.
   val size = 100000
 
diff --git a/test/files/run/string-extractor.check b/test/files/run/string-extractor.check
new file mode 100644
index 0000000..47f3722
--- /dev/null
+++ b/test/files/run/string-extractor.check
@@ -0,0 +1,9 @@
+by
+BY
+oTheClown
+nope
+1: ob
+2: obby
+2: OBBY
+3: BOBO
+3: TomTomTheClown
diff --git a/test/files/run/string-extractor.scala b/test/files/run/string-extractor.scala
new file mode 100644
index 0000000..c0fe911
--- /dev/null
+++ b/test/files/run/string-extractor.scala
@@ -0,0 +1,60 @@
+final class StringExtract(val s: String) extends AnyVal {
+  def isEmpty                     = (s eq null) || (s == "")
+  def get                         = this
+  def length                      = s.length
+  def lengthCompare(n: Int)       = s.length compare n
+  def apply(idx: Int): Char       = s charAt idx
+  def head: Char                  = s charAt 0
+  def tail: String                = s drop 1
+  def drop(n: Int): StringExtract = new StringExtract(s drop n)
+
+  override def toString = s
+}
+
+final class ThreeStringExtract(val s: String) extends AnyVal {
+  def isEmpty                                      = (s eq null) || (s == "")
+  def get: (List[Int], Double, ThreeStringExtract) = ((s.length :: Nil, s.length.toDouble, this))
+  def length                                       = s.length
+  def lengthCompare(n: Int)                        = s.length compare n
+  def apply(idx: Int): Char                        = s charAt idx
+  def head: Char                                   = s charAt 0
+  def tail: String                                 = s drop 1
+  def drop(n: Int): ThreeStringExtract             = new ThreeStringExtract(s drop n)
+
+  override def toString = s
+}
+
+
+object Bippy {
+  def unapplySeq(x: Any): StringExtract = new StringExtract("" + x)
+}
+object TripleBippy {
+  def unapplySeq(x: Any): ThreeStringExtract = new ThreeStringExtract("" + x)
+}
+
+object Test {
+  def f(x: Any) = x match {
+    case Bippy('B' | 'b', 'O' | 'o', 'B' | 'b', xs @ _*) => xs
+    case _                                               => "nope"
+  }
+
+  def g(x: Any): String = x match {
+    case TripleBippy(3 :: Nil, 3.0, 'b', chars @ _*)       => "1: " + chars
+    case TripleBippy(5 :: Nil, 5.0, 'b' | 'B', chars @ _*) => "2: " + chars
+    case TripleBippy(_, _, chars @ _*)                     => "3: " + chars
+    case _                                                 => "nope"
+  }
+
+  def main(args: Array[String]): Unit = {
+    println(f("Bobby"))
+    println(f("BOBBY"))
+    println(f("BoBoTheClown"))
+    println(f("TomTomTheClown"))
+
+    println(g("bob"))
+    println(g("bobby"))
+    println(g("BOBBY"))
+    println(g("BOBO"))
+    println(g("TomTomTheClown"))
+  }
+}
diff --git a/test/files/run/stringbuilder-drop.scala b/test/files/run/stringbuilder-drop.scala
index a9e5a71..422fb2b 100644
--- a/test/files/run/stringbuilder-drop.scala
+++ b/test/files/run/stringbuilder-drop.scala
@@ -1,7 +1,7 @@
 object Test {
   def main(args: Array[String]): Unit = {
-    val s = (new StringBuilder ++= "hello world") dropRight 1 toString;
-    assert(s == "hello worl")
+    val s = (new StringBuilder ++= "hello world") dropRight 1
+    assert("" + s == "hello worl")
   }
 }
 
diff --git a/test/files/run/stringbuilder.scala b/test/files/run/stringbuilder.scala
index ef85fc0..a98f9cf 100644
--- a/test/files/run/stringbuilder.scala
+++ b/test/files/run/stringbuilder.scala
@@ -1,21 +1,24 @@
+
+import scala.language.reflectiveCalls
+
 object Test extends App {
   val str = "ABCDEFGHIJKLMABCDEFGHIJKLM"
   val surrogateStr = "an old Turkic letter: \uD803\uDC22"
-  
-  type SB = { 
+
+  type SB = {
     def indexOf(str: String): Int
     def indexOf(str: String, fromIndex: Int): Int
     def lastIndexOf(str: String): Int
     def lastIndexOf(str: String, fromIndex: Int): Int
   }
-  
+
   import scala.collection.mutable.{ StringBuilder => ScalaStringBuilder }
   import java.lang.{ StringBuilder => JavaStringBuilder }
-  
+
   val sbScala = new ScalaStringBuilder() append str
   val sbJava = new JavaStringBuilder() append str
   val sbs: List[SB] = List[SB](sbScala, sbJava)
-  
+
   def sameAnswers(f: (SB) => Int) = assert(f(sbScala) == f(sbJava))
 
   sameAnswers(_.indexOf(""))
@@ -31,10 +34,10 @@ object Test extends App {
   sameAnswers(_.lastIndexOf("QZV"))
   sameAnswers(_.lastIndexOf("GHI", 22))
   sameAnswers(_.lastIndexOf("KLM", 22))
-  
+
   // testing that the "reverse" implementation avoids reversing surrogate pairs
-  val jsb = new JavaStringBuilder(surrogateStr) reverse
-  val ssb = new ScalaStringBuilder(surrogateStr) reverseContents ;
-  
+  val jsb = new JavaStringBuilder(surrogateStr).reverse
+  val ssb = new ScalaStringBuilder(surrogateStr).reverseContents
+
   assert(jsb.toString == ssb.toString)
 }
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
index be62c57..ead61e7 100644
--- a/test/files/run/stringinterpolation_macro-run.check
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -46,6 +46,8 @@ S
 120
 120
 120
+       0X4
+She is 4 feet tall.
 120
 42
 3.400000e+00
@@ -60,3 +62,6 @@ S
 05/26/12
 05/26/12
 05/26/12
+%
+7 7 9
+7 9 9
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
index 9c59c33..ff779dd 100644
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 object Test extends App {
 
 // 'b' / 'B' (category: general)
@@ -69,6 +72,14 @@ println(f"${120 : java.lang.Integer}%d")
 println(f"${120 : java.lang.Long}%d")
 println(f"${BigInt(120)}%d")
 println(f"${new java.math.BigInteger("120")}%d")
+println(f"${4}%#10X")
+
+locally {
+  val fff  = new java.util.Formattable {
+    def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4")
+  }
+  println(f"She is ${fff}%#s feet tall.")
+}
 
 {
   implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
@@ -100,4 +111,11 @@ println(f"${c.getTime.getTime}%TD")
 
 implicit val strToDate = (x: String) => c
 println(f"""${"1234"}%TD""")
+
+
+// literals and arg indexes
+println(f"%%")
+println(f"${7}%d %<d ${9}%d")
+println(f"${7}%d %2$$d ${9}%d")
+
 }
diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala
index 36af8c4..7da104c 100644
--- a/test/files/run/structural.scala
+++ b/test/files/run/structural.scala
@@ -1,18 +1,21 @@
+
+import scala.language.{ reflectiveCalls }
+
 object test1 {
-  
+
   val o1 = new Object { override def toString = "ohone" }
   val o2 = new Object { override def toString = "ohtwo" }
-  
+
   val t1 = new Tata("tieone")
   val t2 = new Tata("tietwo")
-  
+
   class Tata(name: String) {
     override def toString = name
     def tatMe = "oy"
   }
-  
+
   class Titi extends Tata("titi")
-  
+
   object Rec {
     val a = 1
     val b = 2
@@ -41,7 +44,7 @@ object test1 {
     val y: Tata = null
     def z(t: Tata) = ()
   }
-  
+
   type rt = Object {
     val a: Int;
     val c: String;
@@ -65,7 +68,7 @@ object test1 {
     var v: Int
     val y: Tata
   }
-  
+
   def l (r: rt) {
     println(" 1. " + r.c)
     println(" 2. " + r.a + 1)
@@ -94,33 +97,33 @@ object test1 {
     println("25. " + r.y)
     println("26. " + r.e(null))
   }
-  
+
   /*def ma[T](r: Object{def e(x: T): T; val x: T}) {
     println("30. " + r.e(r.x)) // static error
   }*/
-  
+
   def mb(r: Object { def e[T](x: T): T }) {
     println("31. " + r.e[Int](4)) // while this is ok
   }
-  
+
   def m1(r: Object { def z(x: Tata): Unit }) {
     println("32. " + r.z(new Titi)) // while this is ok
   }
-  
+
   def m2[T](r: Object { def e(x: Tata): T; val x: Tata }) {
     println("33. " + r.e(r.x)) // and this too
   }
-  
+
   class Rec3[T] {
     def e(x: T): T = x
   }
-  
+
   def m3[T](r: Rec3[T], x: T) {
     println("33. " + r.e(x)) // and this too
   }
-  
+
   Rec.g(11)
-  
+
   this.l(Rec)
   this.mb(new Object{def e[T](x: T): T = x})
   this.m1(Rec)
@@ -132,7 +135,7 @@ object test2 {
   class C extends { def f() { println("1") } }
   val x1 = new C
   x1.f()
-  
+
   abstract class D extends { def f() }
   val x2 = new D { def f() { println("2") } }
   x2.f()
@@ -152,45 +155,45 @@ object test2 {
 
 object test3 {
 
-  case class Exc extends Exception
-  
+  case class Exc() extends Exception
+
   object Rec {
     def f = throw Exc()
   }
-  
+
   def m(r: { def f: Nothing }) =
     try {
       r.f
     }
     catch {
       case e: Exc => println("caught")
-      case e => println(e)
+      case e: Throwable => println(e)
     }
-  
+
   m(Rec)
-  
+
 }
 
 object test4 {
 
   class A
-  
+
   val aar = Array(new A, new A, new A)
   val nar = Array(1, 2)
-  
+
   def f(p: {def size: Int}) = println(p.size)
   //def g[T <: {def size: Int}](p: T) = println(p.size) // open issue
   //def h[T <% {def size: Int}](p: T) = println(p.size) // open issue
-  
+
   f(aar)
   f(nar)
-  
+
   //g(aar)
   //g(nar)
-  
+
   //h(aar)
   //h(nar)
-  
+
 }
 
 object Test extends App {
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
index dd9f4ef..6e99739 100644
--- a/test/files/run/synchronized.check
+++ b/test/files/run/synchronized.check
@@ -1,3 +1,4 @@
+warning: there were 14 inliner warning(s); re-run with -Yinline-warnings for details
     .|.               c1.f1:     OK
     .|.               c1.fi:     OK
     .|...             c1.fv:     OK
diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags
index 1182725..49d036a 100644
--- a/test/files/run/synchronized.flags
+++ b/test/files/run/synchronized.flags
@@ -1 +1 @@
--optimize
\ No newline at end of file
+-optimize
diff --git a/test/files/run/sysprops.scala b/test/files/run/sysprops.scala
index 4d98e2c..bdad677 100644
--- a/test/files/run/sysprops.scala
+++ b/test/files/run/sysprops.scala
@@ -3,16 +3,16 @@ import sys._
 /** Basic sys.Prop test. */
 object Test {
   val key = "ding.dong.doobie"
-  
+
   def bool() = {
     val prop = BooleanProp.valueIsTrue(key)
     assert(prop.key == key)
-  
+
     prop.clear()
     assert(!prop.value)
     assert(!prop.isSet)
     assert(prop.get != null)
-  
+
     prop set "dingus"
     assert(prop.get == "dingus")
     assert(!prop.value)
@@ -32,7 +32,7 @@ object Test {
     prop.set("523")
     assert(prop.value == 523)
     prop.set("DingusInt")
-    
+
     try { println(prop.value) ; assert(false, "should not get here") }
     catch { case _: Exception => () }
   }
@@ -41,7 +41,7 @@ object Test {
     prop.set("55.0")
     assert(prop.value == 55.0)
   }
-  
+
   def main(args: Array[String]): Unit = {
     bool()
     int()
diff --git a/test/files/run/t0017.scala b/test/files/run/t0017.scala
index e976f45..245cbb7 100644
--- a/test/files/run/t0017.scala
+++ b/test/files/run/t0017.scala
@@ -7,7 +7,7 @@ def transpose[A](arr: Array[Array[A]]) = {
 
 var my_arr = Array(Array(1,2),Array(3,4))
 
-for (i <- Array.range(0, my_arr(0).length)) yield 
+for (i <- Array.range(0, my_arr(0).length)) yield
   for (row <- my_arr) yield row(i)
 
 val transposed = transpose(my_arr)
diff --git a/test/files/run/t0091.check b/test/files/run/t0091.check
index 7ed6ff8..fd3c81a 100644
--- a/test/files/run/t0091.check
+++ b/test/files/run/t0091.check
@@ -1 +1,2 @@
 5
+5
diff --git a/test/files/run/t0091.scala b/test/files/run/t0091.scala
index eaddde0..45235eb 100644
--- a/test/files/run/t0091.scala
+++ b/test/files/run/t0091.scala
@@ -4,10 +4,13 @@ object C extends B {
   object m extends A { def x = 5 }
 }
 object Test {
-    // The type annotation here is necessary, otherwise
-    // the compiler would reference C$m$ directly.
-    def o : B = C
-    def main(argv : Array[String]) : Unit = {
-        println(o.m.x)
-    }
+  // The type annotation here is necessary, otherwise
+  // the compiler would reference C$m$ directly.
+  def o1 : B = C
+  def o2 = C
+
+  def main(argv : Array[String]) : Unit = {
+    println(o1.m.x)
+    println(o2.m.x)
+  }
 }
diff --git a/test/files/run/t0325.scala b/test/files/run/t0325.scala
index 92331ab..a126a3a 100644
--- a/test/files/run/t0325.scala
+++ b/test/files/run/t0325.scala
@@ -7,7 +7,7 @@ case class RS(self: String) {
   }
 
   def split(separator: Char): Array[String] = self.split(escape(separator))
-	
+
   def split(separators: Array[Char]): Array[String] = {
     val re = separators.foldLeft("[")(_+escape(_)) + "]"
     self.split(re)
@@ -24,10 +24,10 @@ object Test {
       else
         println(ret)
     } catch {
-      case e at _ => println(which + " failed with " + e.getClass)
+      case e: Throwable => println(which + " failed with " + e.getClass)
     }
   }
-     
+
   def main(args: Array[String]) {
     val badChars = "?*{+([\\^.$"
 
@@ -46,8 +46,8 @@ object Test {
     for ((c,str) <- badCases)
       test(("a"+c+"b").split(str.toArray),"RichString split(\""+ str + "\")")
     println
-   
+
     for ((c,str) <- badCases)
-      test(RS("a"+c+"b").split(str.toArray),"RS split(\""+ str + "\")")   
+      test(RS("a"+c+"b").split(str.toArray),"RS split(\""+ str + "\")")
   }
 }
diff --git a/test/files/run/t0421-old.scala b/test/files/run/t0421-old.scala
index 8d51013..dde89bc 100644
--- a/test/files/run/t0421-old.scala
+++ b/test/files/run/t0421-old.scala
@@ -1,4 +1,6 @@
 // ticket #421
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
 
   def transpose[A: ClassManifest](xss: Array[Array[A]]) = {
@@ -7,17 +9,17 @@ object Test extends App {
   }
 
   def scalprod(xs: Array[Double], ys: Array[Double]) = {
-    var acc = 0.0 
-    for ((x, y) <- xs zip ys) acc = acc + x * y  
+    var acc = 0.0
+    for ((x, y) <- xs zip ys) acc = acc + x * y
     acc
   }
 
   def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
-    val ysst = transpose(yss) 
+    val ysst = transpose(yss)
     val ysst1: Array[Array[Double]] = yss.transpose
     assert(ysst.deep == ysst1.deep)
     for (xs <- xss) yield
-      for (yst <- ysst) yield 
+      for (yst <- ysst) yield
         scalprod(xs, yst)
   }
 
@@ -25,6 +27,6 @@ object Test extends App {
   println(transpose(a1).deep.mkString("[", ",", "]"))
 
   println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deep.mkString("[", ",", "]"))
-  
+
   println(matmul(Array(Array(4)), Array(Array(6, 8))).deep.mkString("[", ",", "]"))
 }
diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala
index 8ba9015..b860a08 100644
--- a/test/files/run/t0432.scala
+++ b/test/files/run/t0432.scala
@@ -1,3 +1,6 @@
+
+import scala.language.reflectiveCalls
+
 object Test {
   type valueType = { def value: this.type }
 
diff --git a/test/files/run/t0486.check b/test/files/run/t0486.check
deleted file mode 100644
index dd1ec28..0000000
--- a/test/files/run/t0486.check
+++ /dev/null
@@ -1,8 +0,0 @@
-<wsdl:definitions name="service1" xmlns:tns="target1">
-    </wsdl:definitions>
-<wsdl:definitions name="service2" xmlns:tns="target2">
-    </wsdl:definitions>
-<wsdl:definitions name="service3" xmlns:tns="target3">
-    </wsdl:definitions>
-<wsdl:definitions name="service4" xmlns:tns="target4">
-    </wsdl:definitions>
diff --git a/test/files/run/t0486.scala b/test/files/run/t0486.scala
deleted file mode 100644
index d3ed8f4..0000000
--- a/test/files/run/t0486.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-object Test extends App {
-  import scala.xml._
-
-  def wsdlTemplate1(serviceName: String): Node =
-    <wsdl:definitions name={serviceName} xmlns:tns = { "target1" } >
-    </wsdl:definitions>;
-
-  def wsdlTemplate2(serviceName: String, targetNamespace: String): Node =
-    <wsdl:definitions name={serviceName} xmlns:tns = { targetNamespace } >
-    </wsdl:definitions>;
-
-  def wsdlTemplate3(serviceName: String): Node =
-    <wsdl:definitions name={serviceName} xmlns:tns = { Text("target3") } >
-    </wsdl:definitions>;
-
-  def wsdlTemplate4(serviceName: String, targetNamespace: () => String): Node =
-    <wsdl:definitions name={serviceName} xmlns:tns = { targetNamespace() } >
-    </wsdl:definitions>;
-
-  println(wsdlTemplate1("service1"))
-  println(wsdlTemplate2("service2", "target2"))
-  println(wsdlTemplate3("service3"))
-  println(wsdlTemplate4("service4", () => "target4"))
-}
diff --git a/test/files/run/t0508.scala b/test/files/run/t0508.scala
index 0f4325e..2283c46 100644
--- a/test/files/run/t0508.scala
+++ b/test/files/run/t0508.scala
@@ -9,5 +9,5 @@ object Test extends App {
     }
   }
 
-  foo(Foo.unapply, Foo("this might be fun", 10)) 
+  foo(Foo.unapply, Foo("this might be fun", 10))
 }
diff --git a/test/files/run/t0528.scala b/test/files/run/t0528.scala
index a76f602..68a9975 100644
--- a/test/files/run/t0528.scala
+++ b/test/files/run/t0528.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
 trait Sequ[A] {
   def toArray: Array[T forSome {type T <: A}]
 }
diff --git a/test/files/run/t0631.scala b/test/files/run/t0631.scala
index 5bceab6..c401ed3 100644
--- a/test/files/run/t0631.scala
+++ b/test/files/run/t0631.scala
@@ -1,5 +1,5 @@
 object Test extends App {
-  class Foo { 
+  class Foo {
     override def equals(that: Any) = {
       println("Foo.equals called")
       super.equals(that)
diff --git a/test/files/run/t0663.check b/test/files/run/t0663.check
deleted file mode 100755
index dd9be2a..0000000
--- a/test/files/run/t0663.check
+++ /dev/null
@@ -1 +0,0 @@
-<feed/>
diff --git a/test/files/run/t0663.scala b/test/files/run/t0663.scala
deleted file mode 100644
index dd0326d..0000000
--- a/test/files/run/t0663.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
-  val src = scala.io.Source.fromString("<?xml version='1.0' encoding='UTF-8'?><feed/>")
-  val parser = xml.parsing.ConstructingParser.fromSource(src, true)
-  println(parser.document)
-}
-
diff --git a/test/files/run/t0668.check b/test/files/run/t0668.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t0677-old.scala b/test/files/run/t0677-old.scala
index 6c8a3a7..8d4c3ee 100644
--- a/test/files/run/t0677-old.scala
+++ b/test/files/run/t0677-old.scala
@@ -1,5 +1,8 @@
+
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
-  class X[T: ClassManifest] { 
+  class X[T: ClassManifest] {
     val a = Array.ofDim[T](3, 4)
   }
   val x = new X[String]
diff --git a/test/files/run/t0700.check b/test/files/run/t0700.check
deleted file mode 100644
index b4eabba..0000000
--- a/test/files/run/t0700.check
+++ /dev/null
@@ -1,2 +0,0 @@
-[3.2] parsed: List(2, 2, 2)
-[3.2] parsed: List(2, 2, 2)
diff --git a/test/files/run/t0700.scala b/test/files/run/t0700.scala
deleted file mode 100644
index 5a71805..0000000
--- a/test/files/run/t0700.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import java.io.{File,StringReader}
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.{CharArrayReader, StreamReader}
-
-class TestParsers extends Parsers {
-  type Elem = Char
-
-  def p: Parser[List[Int]] = rep(p1 | p2)
-  def p1: Parser[Int] = 'a' ~ nl ~ 'b' ~ nl ^^^ 1
-  def p2: Parser[Int] = 'a' ~ nl ^^^ 2
-  def nl: Parser[Int] = rep(accept('\n') | accept('\r')) ^^^ 0
-}
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    val tstParsers = new TestParsers
-    val s = "a\na\na"
-    val r1 = new CharArrayReader(s.toCharArray())
-    val r2 = StreamReader(new StringReader(s))
-    println(tstParsers.p(r1))
-    println(tstParsers.p(r2))
-  }
-}
diff --git a/test/files/run/t0807.scala b/test/files/run/t0807.scala
index 1a1add6..1e2a266 100644
--- a/test/files/run/t0807.scala
+++ b/test/files/run/t0807.scala
@@ -1,5 +1,5 @@
 trait A
-trait B extends A { val x = println("early") } 
+trait B extends A { val x = println("early") }
 object Test extends App {
   new B {}
 }
diff --git a/test/files/run/t0883.scala b/test/files/run/t0883.scala
index adde951..c8ed06d 100644
--- a/test/files/run/t0883.scala
+++ b/test/files/run/t0883.scala
@@ -1,14 +1,14 @@
 object Foo { def apply(x: String) = new Foo(x) }
 class Foo(name: String)
 case object Bar extends Foo("Bar")
-case class Baz() extends Foo("Baz") 
+case class Baz() extends Foo("Baz")
 object Test extends App {
-  Foo("Bar") match { 
-    case Bar => println("What?") 
+  Foo("Bar") match {
+    case Bar => println("What?")
     case _ => println("OK")
   }
-  Foo("Baz") match { 
-    case Baz() => println("What?") 
+  Foo("Baz") match {
+    case Baz() => println("What?")
     case _ => println("OK")
-  }   
+  }
 }
diff --git a/test/files/run/t1005.scala b/test/files/run/t1005.scala
index 60129bc..562e2e4 100644
--- a/test/files/run/t1005.scala
+++ b/test/files/run/t1005.scala
@@ -1,19 +1,20 @@
+import scala.language.postfixOps
 object Test
 {
   class Foo[T](x : Array[AnyRef]) { def bar = x.asInstanceOf[Array[T]] }
   class Bar[T](x : Array[T]) { def bar = x.asInstanceOf[Array[AnyRef]] }
 
   object FromMono{
-     def main(args : Array[String]) = (new Foo[AnyRef](Array[AnyRef]("Halp!"))).bar
+     def mainer(args : Array[String]) = (new Foo[AnyRef](Array[AnyRef]("Halp!"))).bar
   }
 
   object FromPoly{
-    def main(args : Array[String]) = (new Bar[AnyRef](Array[AnyRef]("Halp!"))).bar
+    def mainer(args : Array[String]) = (new Bar[AnyRef](Array[AnyRef]("Halp!"))).bar
   }
-  
+
   def main(args: Array[String]): Unit = {
-    println(FromMono main null mkString)
-    println(FromPoly main null mkString)
+    println(FromMono mainer null mkString)
+    println(FromPoly mainer null mkString)
   }
 }
 
diff --git a/test/files/run/t1042.scala b/test/files/run/t1042.scala
index 1f39fff..302ff31 100644
--- a/test/files/run/t1042.scala
+++ b/test/files/run/t1042.scala
@@ -6,7 +6,7 @@ abstract class  A {
 
 case class B() extends A {
   // overloaded version is implemented, causing toString not to be implemented?
-  def toString(sb: StringBuilder): StringBuilder = error("")
+  def toString(sb: StringBuilder): StringBuilder = sys.error("")
 }
 
 object Test extends App {
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/t107.check
similarity index 100%
copy from test/files/run/virtpatmat_opt_sharing.check
copy to test/files/run/t107.check
diff --git a/test/files/run/t107.scala b/test/files/run/t107.scala
new file mode 100644
index 0000000..ab1b289
--- /dev/null
+++ b/test/files/run/t107.scala
@@ -0,0 +1,8 @@
+object Test {
+  def main(args : Array[String]) : Unit = {
+    var hash : Long = 0
+    val bytes = Array(1.toByte, 2.toByte, 3.toByte)
+    hash += bytes(0)
+    Console.println(hash)
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t1079.check b/test/files/run/t1079.check
deleted file mode 100644
index c508d53..0000000
--- a/test/files/run/t1079.check
+++ /dev/null
@@ -1 +0,0 @@
-false
diff --git a/test/files/run/t1079.scala b/test/files/run/t1079.scala
deleted file mode 100644
index ce435d2..0000000
--- a/test/files/run/t1079.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
-  println(<t user:tag=""/> == <t user:tag="X"/>)
-}
diff --git a/test/files/run/t1100.check b/test/files/run/t1100.check
deleted file mode 100644
index d3a49a4..0000000
--- a/test/files/run/t1100.check
+++ /dev/null
@@ -1,4 +0,0 @@
-[1.4] error: errors are propagated
-
-aaab
-   ^
diff --git a/test/files/run/t1100.scala b/test/files/run/t1100.scala
deleted file mode 100644
index 6b95fd6..0000000
--- a/test/files/run/t1100.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.CharSequenceReader
-
-class TestParsers extends Parsers {
-  type Elem = Char
-
-  def p: Parser[List[Char]] = rep1(p1)
-  def p1: Parser[Char] = accept('a') | err("errors are propagated")
-}
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    val tstParsers = new TestParsers
-    val s = new CharSequenceReader("aaab")
-    println(tstParsers.p(s))
-  }
-}
diff --git a/test/files/run/t1110.scala b/test/files/run/t1110.scala
index 8246438..8191778 100644
--- a/test/files/run/t1110.scala
+++ b/test/files/run/t1110.scala
@@ -1,8 +1,12 @@
+
+
+import scala.language.{ reflectiveCalls }
+
 class Stuff {
   def zoop(p: Any{def &(q: Int): Int}) = p & 7
   def floop = new { def & = "Hello" }
 
-  assert((floop &) == "Hello")
+  assert((floop.&) == "Hello")
   assert(zoop(10) == 2)
 }
 
diff --git a/test/files/run/t1141.scala b/test/files/run/t1141.scala
index ee4f2e7..732c579 100644
--- a/test/files/run/t1141.scala
+++ b/test/files/run/t1141.scala
@@ -1,7 +1,11 @@
+
+
+import scala.language.reflectiveCalls
+
 object Test extends App {
   val foo = new {
     def apply(args : String*) = args foreach println
   }
-  
+
   foo("var", "args")
 }
diff --git a/test/files/run/t1167.flags b/test/files/run/t1167.flags
new file mode 100644
index 0000000..ac96850
--- /dev/null
+++ b/test/files/run/t1167.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
\ No newline at end of file
diff --git a/test/files/run/t1167.scala b/test/files/run/t1167.scala
index c2ec4cf..3dd0a30 100644
--- a/test/files/run/t1167.scala
+++ b/test/files/run/t1167.scala
@@ -10,17 +10,17 @@ trait Test1 {
 
 /* getName
  *   Returns the binary name of the class if this class object represents a
- *   reference type that is not an array type. 
+ *   reference type that is not an array type.
  * getSimpleName
  *   Returns the simple name of the underlying class as given in the source
  *   code. Returns an empty string if the underlying class is anonymous.
  */
 abstract class Foo {
   override def toString = getClass.getSimpleName
-  
+
   abstract class Bar {
     override def toString = getClass.getSimpleName
-  } 
+  }
 }
 
 object Test extends App {
diff --git a/test/files/run/t1195-new.scala b/test/files/run/t1195-new.scala
index 0f62b14..fcb8008 100644
--- a/test/files/run/t1195-new.scala
+++ b/test/files/run/t1195-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
 import scala.reflect.runtime.universe._
 
 object Test {
@@ -9,7 +11,7 @@ object Test {
   val g1 = g()
   val h1 = h()
 
-  def m[T: WeakTypeTag](x: T) = println(weakTypeOf[T] + ", underlying = " + weakTypeOf[T].typeSymbol.typeSignature)
+  def m[T: WeakTypeTag](x: T) = println(weakTypeOf[T] + ", underlying = " + weakTypeOf[T].typeSymbol.info)
 
   def main(args: Array[String]): Unit = {
     m(f)
@@ -25,4 +27,4 @@ class A1[T] {
   class B1[U] {
     def f = { case class D(x: Int) extends A1[String] ; new D(5) }
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t1195-old.scala b/test/files/run/t1195-old.scala
index b46a3b7..f80734c 100644
--- a/test/files/run/t1195-old.scala
+++ b/test/files/run/t1195-old.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ existentials }
+
 object Test {
   def f() = { case class Bar(x: Int); Bar }
   def g() = { case class Bar(x: Int); Bar(5) }
diff --git a/test/files/run/t1220.scala b/test/files/run/t1220.scala
index 0ba188d..75e0ea0 100644
--- a/test/files/run/t1220.scala
+++ b/test/files/run/t1220.scala
@@ -1,7 +1,7 @@
 object Test extends App {
 
   class QSRichIterable[A](self: Iterable[A]) {
-    def filterMap[R](f: PartialFunction[A,R]) = 
+    def filterMap[R](f: PartialFunction[A,R]) =
       self filter (f.isDefinedAt) map f
   }
 
diff --git a/test/files/run/t1300.scala b/test/files/run/t1300.scala
index ce2f80e..aa3580e 100644
--- a/test/files/run/t1300.scala
+++ b/test/files/run/t1300.scala
@@ -4,10 +4,10 @@ object Test extends App
 //  val a1 = x1.toArray[Any]
   val a2 = Array('a','b','c','d').toArray[Any]
   val a3 = Array("e","f","g","h").toArray[Any]
-  
+
   Array.copy(a3, 0, a1, 0, 4)
   Array.copy(a2, 0, a3, 0, 4)
   Array.copy(a2, 0, a1, 0, 4)
-  
+
   println(a1.mkString + a2.mkString + a3.mkString)
 }
diff --git a/test/files/run/t1309.scala b/test/files/run/t1309.scala
index b6a75fe..8496354 100644
--- a/test/files/run/t1309.scala
+++ b/test/files/run/t1309.scala
@@ -1,6 +1,6 @@
 object Test {
   def f(ras: => IndexedSeq[Byte]): IndexedSeq[Byte] = ras
-  
+
   def main(args: Array[String]): Unit = {
     f(new Array[Byte](0))
   }
diff --git a/test/files/run/t1323.scala b/test/files/run/t1323.scala
index 8209b85..94b51bd 100644
--- a/test/files/run/t1323.scala
+++ b/test/files/run/t1323.scala
@@ -3,9 +3,9 @@ object Test extends App {
   println(" 2:" + List(1,2,3,4).indexOfSlice(List(1,2)))        //  0
   println(" 3:" + List(1,2,3,4).indexOfSlice(List(2,3)))        //  1
   println(" 4:" + List(1,2,3,4).indexOfSlice(List(3,4)))        //  2
-  println(" 5:" + List(1,2,3,4).indexOfSlice(List(4,5)))        // -1 
+  println(" 5:" + List(1,2,3,4).indexOfSlice(List(4,5)))        // -1
   println(" 6:" + List(1,2,3,4).indexOfSlice(List(2,4)))        // -1
-  println(" 7:" + List(1,2,3,4).indexOfSlice(List(4,3)))        // -1 
+  println(" 7:" + List(1,2,3,4).indexOfSlice(List(4,3)))        // -1
   println(" 8:" + List(1,2,3,4).indexOfSlice(List(1,3)))        // -1
   println(" 9:" + List(1,2,3,4).indexOfSlice(List(1,3)))        // -1
   println("10:" + List(1,2,3,4).indexOfSlice(List(1,2,3,4)))    //  0
diff --git a/test/files/run/t1333.scala b/test/files/run/t1333.scala
index 514b4ff..1696629 100644
--- a/test/files/run/t1333.scala
+++ b/test/files/run/t1333.scala
@@ -1,11 +1,11 @@
 object Test {
   case class A(x: Int)(y: Int)(z: String)
-  
+
   def f(x: Any) = x match {
     case A(x)   => x
     case _      => -1
   }
-  
+
   def main(args: Array[String]): Unit = {
     println(f(A(10)(20)("abc")))
     println(f(A(-10)(20)("abc")))
diff --git a/test/files/run/t1368.check b/test/files/run/t1368.check
new file mode 100644
index 0000000..581e8a4
--- /dev/null
+++ b/test/files/run/t1368.check
@@ -0,0 +1,3 @@
+t1368.scala:7: warning: Reference to uninitialized value blurp
+  def go3 = (new AnyRef with Happy with Sad { override val status = blurp ; val blurp = "happysad" }).status
+                                                                    ^
diff --git a/test/files/run/t1423.scala b/test/files/run/t1423.scala
index 44c6653..073483a 100644
--- a/test/files/run/t1423.scala
+++ b/test/files/run/t1423.scala
@@ -5,4 +5,4 @@ object Test extends App{
     case 1L => println(1);
     case _ => println("????");
   }
-} 
+}
diff --git a/test/files/run/t1427.check b/test/files/run/t1427.check
new file mode 100644
index 0000000..11a3d2f
--- /dev/null
+++ b/test/files/run/t1427.check
@@ -0,0 +1,3 @@
+t1427.scala:6: warning: abstract type X in type pattern Bob[_[_] <: Any] is unchecked since it is eliminated by erasure
+    case x: (Bob[X] forSome { type X[_] })  => true
+                    ^
diff --git a/test/files/run/t1427.scala b/test/files/run/t1427.scala
index 21bd71f..4b8057f 100644
--- a/test/files/run/t1427.scala
+++ b/test/files/run/t1427.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
 class Bob[K[_]] {
   def foo(other: Any) = other match {
     case x: (Bob[X] forSome { type X[_] })  => true
@@ -9,7 +12,7 @@ object Test {
   def main(args: Array[String]): Unit = {
     val x = new Bob[List]
     val results = List(x, new Bob[Set], 55) map (x foo _)
-    
+
     assert(results == List(true, true, false))
   }
 }
diff --git a/test/files/run/t1430/Bar_1.java b/test/files/run/t1430/Bar_1.java
index 4db2eaf..e49b6be 100644
--- a/test/files/run/t1430/Bar_1.java
+++ b/test/files/run/t1430/Bar_1.java
@@ -2,7 +2,7 @@ package j;
 
 interface Foo {
    public void foo();
-}                                                                                                
+}
 public interface Bar_1 extends Foo {
    public void bar();
-}                                                                                                
+}
diff --git a/test/files/run/t1430/Test_2.scala b/test/files/run/t1430/Test_2.scala
index 7af65de..278d9c7 100644
--- a/test/files/run/t1430/Test_2.scala
+++ b/test/files/run/t1430/Test_2.scala
@@ -6,7 +6,7 @@ package s {
   class Baz(x: j.Bar_1) {
     x.foo
     override def toString = "Baz"
-  }                                                                                                
+  }
 }
 
 object Test {
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index ab132b7..30c026f 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -6,7 +6,7 @@ object Test {
    *  Type inference overlooks constraints posed by type parameters in annotations on types.
    */
   
-  val testCode = <code>
+  val testCode = """
   
     class posingAs[A] extends annotation.TypeConstraint
     
@@ -14,14 +14,14 @@ object Test {
     
     val x = resolve(7: @posingAs[Any])
   
-  </code>.text
+  """
   
-  def main(args: Array[String]) = {
+  def main(args: Array[String]) {
     
     val settings = new Settings()
     settings.classpath.value = System.getProperty("java.class.path")
     val tool = new interpreter.IMain(settings)
-    val global = tool.compiler
+    val global = tool.global
 
     import global._
     import definitions._
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index aba206b..ca6bf35 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -6,7 +6,7 @@ object Test {
    *  ...
    */
   
-  val testCode = <code>
+  val testCode = """
   
     class xyz[A] extends annotation.TypeConstraint
     
@@ -25,13 +25,13 @@ object Test {
       }}
     }}
     
-  </code>.text
+  """
   
-  def main(args: Array[String]) = {
+  def main(args: Array[String]) {
     val settings = new Settings()
     settings.classpath.value = System.getProperty("java.class.path")
     val tool = new interpreter.IMain(settings)
-    val global = tool.compiler
+    val global = tool.global
 
     import global._
     import definitions._
diff --git a/test/files/run/t1503.check b/test/files/run/t1503.check
new file mode 100644
index 0000000..43eceb0
--- /dev/null
+++ b/test/files/run/t1503.check
@@ -0,0 +1 @@
+whoops
diff --git a/test/files/run/t1503.scala b/test/files/run/t1503.scala
new file mode 100644
index 0000000..1be0e74
--- /dev/null
+++ b/test/files/run/t1503.scala
@@ -0,0 +1,20 @@
+object Whatever {
+  override def equals(x: Any) = true
+}
+
+object Test extends App {
+  // this should make it abundantly clear Any is the best return type we can guarantee
+  def matchWhatever(x: Any): Any = x match { case n @ Whatever => n }
+  // when left to its own devices, and not under -Xfuture, the return type is Whatever.type
+  def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n }
+
+  // just to exercise it a bit
+  assert(matchWhatever(1) == 1)
+  assert(matchWhatever("1") == "1")
+
+  try {
+    matchWhateverCCE("1"): Whatever.type
+  } catch {
+    case _: ClassCastException => println("whoops")
+  }
+}
\ No newline at end of file
diff --git a/test/files/neg/ambiguous-float-dots2.flags b/test/files/run/t1503_future.flags
similarity index 100%
rename from test/files/neg/ambiguous-float-dots2.flags
rename to test/files/run/t1503_future.flags
diff --git a/test/files/run/t1503_future.scala b/test/files/run/t1503_future.scala
new file mode 100644
index 0000000..1e3daad
--- /dev/null
+++ b/test/files/run/t1503_future.scala
@@ -0,0 +1,17 @@
+object Whatever {
+  override def equals(x: Any) = true
+}
+
+object Test extends App {
+  // this should make it abundantly clear Any is the best return type we can guarantee
+  def matchWhatever(x: Any): Any = x match { case n @ Whatever => n }
+  // when left to its own devices, and not under -Xfuture, the return type is Whatever.type
+  def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n }
+
+  // just to exercise it a bit
+  assert(matchWhatever(1) == 1)
+  assert(matchWhatever("1") == "1")
+
+  assert(matchWhateverCCE(1) == 1)
+  assert(matchWhateverCCE("1") == "1")
+}
\ No newline at end of file
diff --git a/test/files/run/t1505.scala b/test/files/run/t1505.scala
index a246e8a..4afbb99 100644
--- a/test/files/run/t1505.scala
+++ b/test/files/run/t1505.scala
@@ -1,5 +1,3 @@
-object P extends Enumeration(0, "A", "B", "C") { val A, B, C = Value }
-
 object Q extends Enumeration {
   val A = Value("A")
   val B = Value("B")
@@ -11,9 +9,14 @@ object R extends Enumeration {
 }
 
 object Test extends App {
-  assert(P(0) == P.withName("A"))
-  assert(P.C == P.withName("C"))
-
   assert(Q(0) == Q.withName("A"))
   assert(Q.C == Q.withName("C"))
+
+  assert(R(0) == R.withName("A"))
+  assert(R.C == R.withName("C"))
+
+  var failed = false
+  try { Q.withName("x") } catch { case _: NoSuchElementException => failed = true }
+  assert(failed)
+
 }
diff --git a/test/files/run/t153.scala b/test/files/run/t153.scala
index 82492fd..3fdb423 100644
--- a/test/files/run/t153.scala
+++ b/test/files/run/t153.scala
@@ -1,5 +1,5 @@
 object Test extends App {
 	def powers(x: Int) = if ((x&(x-1))==0) Some(x) else None
 	val res = (Stream.range(1, 500000) flatMap powers).reverse
-	println(res take 42 force)
+	println((res take 42).force)
 }
diff --git a/test/files/run/t1537.scala b/test/files/run/t1537.scala
index ddbfb29..1dce501 100644
--- a/test/files/run/t1537.scala
+++ b/test/files/run/t1537.scala
@@ -4,15 +4,15 @@ trait Syntax {
 
 trait Evaluation {
   val syntax: Syntax
-  
+
   def equalInTrait = this.syntax.Foo == this.syntax.Foo
 }
 
 object Test extends Evaluation with App {
-  object syntax extends Syntax 
+  object syntax extends Syntax
 
   def equalInObject = this.syntax.Foo == this.syntax.Foo
-  
+
   println(equalInTrait)
   println(equalInObject)
 }
diff --git a/test/files/run/t1591.scala b/test/files/run/t1591.scala
index bd43f0b..6dd9605 100644
--- a/test/files/run/t1591.scala
+++ b/test/files/run/t1591.scala
@@ -1,8 +1,8 @@
 abstract class A {
-    
+
     lazy val lazyBar = bar
-    
-    object bar {        
+
+    object bar {
         val foo = 12
     }
 
diff --git a/test/files/run/t1620.check b/test/files/run/t1620.check
deleted file mode 100755
index afa1e6a..0000000
--- a/test/files/run/t1620.check
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version='1.0' encoding='utf-8'?>
-<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN" "foo.dtd">
-<foo/>
-<?xml version='1.0' encoding='utf-8'?>
-<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN">
-<foo/>
diff --git a/test/files/run/t1620.scala b/test/files/run/t1620.scala
deleted file mode 100644
index e8ea06e..0000000
--- a/test/files/run/t1620.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import java.io.PrintWriter
-import scala.xml.XML
-import scala.xml.dtd.{DocType, PublicID}
-
-object Test extends App {
-  val dt = DocType("foo", PublicID("-//Foo Corp//DTD 1.0//EN", "foo.dtd"), Seq())
-  val pw = new PrintWriter(System.out)
-  XML.write(pw, <foo/>, "utf-8", true, dt)
-  pw.println()
-  pw.flush()
-
-  val dt2 = DocType("foo", PublicID("-//Foo Corp//DTD 1.0//EN", null), Seq())
-  XML.write(pw, <foo/>, "utf-8", true, dt2)
-  pw.println()
-  pw.flush()
-}
diff --git a/test/files/run/t1718.scala b/test/files/run/t1718.scala
index 358bd1b..d1b19c3 100644
--- a/test/files/run/t1718.scala
+++ b/test/files/run/t1718.scala
@@ -1,10 +1,10 @@
 object Test extends App{
-  def matchesNull[T](mightBeNull: Array[T]): Boolean = mightBeNull match { 
+  def matchesNull[T](mightBeNull: Array[T]): Boolean = mightBeNull match {
     case null => true
     case x => false
   }
 
   val nullArray: Array[String] = null
-  println(matchesNull(nullArray)) 
+  println(matchesNull(nullArray))
 }
 
diff --git a/test/files/run/t1766.scala b/test/files/run/t1766.scala
index 2afd883..ff81da3 100644
--- a/test/files/run/t1766.scala
+++ b/test/files/run/t1766.scala
@@ -1,16 +1,19 @@
+
+import scala.language.{ reflectiveCalls }
+
 object Test extends App {
-  
+
   class C(s: String) {
-  
+
     def this(i: Int) = this("bar")
-    
+
     def f = {
       val v: { def n: Int } = new { val n = 3 }
       v.n
     }
-    
+
   }
-  
+
   new C("foo").f
-  
+
 }
diff --git a/test/files/run/t1773.scala b/test/files/run/t1773.scala
deleted file mode 100644
index c50b625..0000000
--- a/test/files/run/t1773.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test extends App
-{
-  val xs = List(
-    <a></a>,
-    <a/>,
-    <a>{ xml.NodeSeq.Empty }</a>,
-    <a>{""}</a>,
-    <a>{ if (true) "" else "I like turtles" }</a>
-  )
-  
-  for (x1 <- xs; x2 <- xs) assert (x1 xml_== x2)
-}
diff --git a/test/files/run/t1829.check b/test/files/run/t1829.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t1829.scala b/test/files/run/t1829.scala
index 7c39d33..8240527 100644
--- a/test/files/run/t1829.scala
+++ b/test/files/run/t1829.scala
@@ -1,6 +1,6 @@
 object Test{
   def main(args : Array[String]){
-    import scala.collection.immutable._  
+    import scala.collection.immutable._
     assert(IntMap.empty == HashMap.empty);
     assert(HashMap.empty == IntMap.empty);
     assert(LongMap.empty == HashMap.empty);
diff --git a/test/files/run/t1909.check b/test/files/run/t1909.check
new file mode 100644
index 0000000..7d25be6
--- /dev/null
+++ b/test/files/run/t1909.check
@@ -0,0 +1,3 @@
+t1909.scala:7: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.
+  def this(p: String) = this(try 0)
+                             ^
diff --git a/test/files/run/t1909.scala b/test/files/run/t1909.scala
new file mode 100644
index 0000000..8ead7ba
--- /dev/null
+++ b/test/files/run/t1909.scala
@@ -0,0 +1,12 @@
+// Until #1909 is fixed, if this compiles the bytecode
+// will trigger a VerifyError.  This liftings and the one
+// in 1909b.scala actually happen in two different places
+// (uncurry and lambdalifter.)
+class Ticket1909 {
+  def this(value: Int) = this()
+  def this(p: String) = this(try 0)
+}
+
+object Test extends App {
+  new Ticket1909("")
+}
diff --git a/test/files/run/t1909b.scala b/test/files/run/t1909b.scala
new file mode 100644
index 0000000..89b2af5
--- /dev/null
+++ b/test/files/run/t1909b.scala
@@ -0,0 +1,9 @@
+class Ticket1909 (x: Int) {
+  def this() = this({
+    def bar() = 5
+    bar
+  })
+}
+object Test extends App {
+  new Ticket1909()
+}
diff --git a/test/files/run/t1909c.scala b/test/files/run/t1909c.scala
new file mode 100644
index 0000000..87c0eb0
--- /dev/null
+++ b/test/files/run/t1909c.scala
@@ -0,0 +1,9 @@
+class Base(a: Any)
+
+// java.lang.VerifyError: (class: Sub, method: <init> signature: ()V) Expecting to find object/array on stack
+//  at Test$.<init>(t1909c.scala)
+class Sub() extends Base({ def bippy = 5; bippy })
+
+object Test extends App {
+  new Sub()
+}
diff --git a/test/files/run/t1987.scala b/test/files/run/t1987.scala
index 4c278ec..de869ed 100644
--- a/test/files/run/t1987.scala
+++ b/test/files/run/t1987.scala
@@ -5,7 +5,7 @@ package foo {
   package object bar {
     def duh(n: Long)   = println("long")
     def duh(n: Double) = println("double")
-    
+
     def duh2(n: Double) = println("double")
     def duh2(n: Long)   = println("long")
   }
@@ -16,7 +16,7 @@ package foo {
         bip.bar.duh(33L)
         duh(33d)
         bip.bar.duh(33d)
-        
+
         duh2(33L)
         bip.bar.duh2(33L)
         duh2(33d)
@@ -35,7 +35,7 @@ package bip {
     def duh2(n: Double) = println("double")
     def duh2(n: Long)   = println("long")
   }
-  
+
   package object bar extends Duh with Duh2 { }
   package bar {
     object Main {
@@ -44,7 +44,7 @@ package bip {
         bip.bar.duh(33L)
         duh(33d)
         bip.bar.duh(33d)
-        
+
         duh2(33L)
         bip.bar.duh2(33L)
         duh2(33d)
diff --git a/test/files/run/t2029.scala b/test/files/run/t2029.scala
index 1cbe97a..32b04f0 100644
--- a/test/files/run/t2029.scala
+++ b/test/files/run/t2029.scala
@@ -3,10 +3,10 @@ object Test{
     import scala.collection.immutable.TreeSet;
 
     val mainSet = TreeSet(1 to 5 :_*)
-  
+
     var compareCalled = false;
     val smallerSet = TreeSet(2 to 4 :_*)(Ordering[Int].reverse)
- 
+
     println(mainSet.mkString(","))
     println(smallerSet.mkString(","))
     println(smallerSet.subsetOf(mainSet));
diff --git a/test/files/run/t2074_2.scala b/test/files/run/t2074_2.scala
index 1f59e0b..4624170 100644
--- a/test/files/run/t2074_2.scala
+++ b/test/files/run/t2074_2.scala
@@ -12,7 +12,7 @@ object Test {
     def iterator = underlying.iterator
   }
   val w = IndexedSeq(1, 2, 3).view
-  
+
   def main(args: Array[String]): Unit = {
     println(v)
     println(w)
diff --git a/test/files/run/t2087-and-2400.scala b/test/files/run/t2087-and-2400.scala
index 93cd633..19a5df2 100644
--- a/test/files/run/t2087-and-2400.scala
+++ b/test/files/run/t2087-and-2400.scala
@@ -3,14 +3,14 @@ object Test
   def negativeCharMaker = new (Short => Char) { def apply(x: Short) = x.toChar }
   def main(args: Array[String]): Unit = {
     // throws exception if -100 gets to Character.valueOf
-    val x = negativeCharMaker(-100) 
-    
+    val x = negativeCharMaker(-100)
+
     // chars are unsigned, they should never be equal to negative values
     assert((-100).toShort != (-100).toChar)
     assert((-100).toChar != (-100).toShort)
     assert((-100).toChar != (-100).toByte)
     assert((-100).toByte != (-100).toChar)
-    
+
     // BoxesRunTime must agree as well
     assert(((-100).toShort: Any) != (-100).toChar)
     assert(((-100).toChar: Any) != (-100).toShort)
diff --git a/test/files/run/t2106.check b/test/files/run/t2106.check
new file mode 100644
index 0000000..f8f625f
--- /dev/null
+++ b/test/files/run/t2106.check
@@ -0,0 +1,6 @@
+t2106.scala:7: warning: Could not inline required method foo because access level required by callee not matched by caller.
+  def main(args: Array[String]): Unit = x.foo
+                                          ^
+t2106.scala:7: warning: At the end of the day, could not inline @inline-marked method foo
+  def main(args: Array[String]): Unit = x.foo
+                                          ^
diff --git a/test/files/run/t2106.flags b/test/files/run/t2106.flags
index eb4d19b..00d3643 100644
--- a/test/files/run/t2106.flags
+++ b/test/files/run/t2106.flags
@@ -1 +1 @@
--optimise
\ No newline at end of file
+-optimise -Yinline-warnings
diff --git a/test/files/run/t2106.scala b/test/files/run/t2106.scala
index e8124da..55b89da 100644
--- a/test/files/run/t2106.scala
+++ b/test/files/run/t2106.scala
@@ -4,5 +4,5 @@ class A extends Cloneable {
 
 object Test {
   val x = new A
-  def main(args: Array[String]) = x.foo
+  def main(args: Array[String]): Unit = x.foo
 }
diff --git a/test/files/run/t2124.check b/test/files/run/t2124.check
deleted file mode 100755
index 51b4046..0000000
--- a/test/files/run/t2124.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost/><q/></p>
diff --git a/test/files/run/t2124.scala b/test/files/run/t2124.scala
deleted file mode 100644
index a4fd654..0000000
--- a/test/files/run/t2124.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.xml._
-
-import scala.xml.transform._
-
-object Test {
-  val sampleXml = <p><lost/><t><s><r></r></s></t></p>
-
-  def main(args: scala.Array[String]) {
-
-    println(new RuleTransformer(new RewriteRule {
-
-        override def transform(n: Node): NodeSeq = { 
-          val result = n match {
-          case <t>{_*}</t> => <q/>
-
-          case n => n
-
-          }
-//          println ("Rewriting '" +n+ "' to: '" + result+ "'")
-
-          result
-        }
-      }).transform(sampleXml))
-  }
-}
diff --git a/test/files/run/t2125.check b/test/files/run/t2125.check
deleted file mode 100755
index 51b4046..0000000
--- a/test/files/run/t2125.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost/><q/></p>
diff --git a/test/files/run/t2125.scala b/test/files/run/t2125.scala
deleted file mode 100644
index a10ed98..0000000
--- a/test/files/run/t2125.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.xml._
-
-import scala.xml.transform._
-
-object Test {
-
-  val sampleXml = <xml:group><p><lost/><t><s><r></r></s></t></p></xml:group>
-  
-  def main(args: scala.Array[String]) {
-    println(new RuleTransformer(new RewriteRule {
-
-        override def transform(n: Node): NodeSeq = { 
-
-          val result = n match {
-
-          case <t>{_*}</t> => <q/>
-
-          case n => n
-          }
-//          println ("Rewriting '" +n+ "' to: '" + result+ "'")
-          result
-        }
-      }).transform(sampleXml))
-  }
-}
diff --git a/test/files/run/t2212.check b/test/files/run/t2212.check
index 302bd0b..8ab4d60 100644
--- a/test/files/run/t2212.check
+++ b/test/files/run/t2212.check
@@ -1,3 +1,4 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 LinkedList(1)
 LinkedList(1)
 true
diff --git a/test/files/run/t2251.check b/test/files/run/t2251.check
new file mode 100644
index 0000000..55ad2a5
--- /dev/null
+++ b/test/files/run/t2251.check
@@ -0,0 +1 @@
+Set(List(List(C), Stream(D, ?)))
diff --git a/test/files/run/t2251.flags b/test/files/run/t2251.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/run/t2251.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/run/t2251.scala b/test/files/run/t2251.scala
new file mode 100644
index 0000000..00c5619
--- /dev/null
+++ b/test/files/run/t2251.scala
@@ -0,0 +1,19 @@
+class A
+trait B[T <: B[T]] extends A
+class C extends B[C] { override def toString = "C" }
+class D extends B[D] { override def toString = "D" }
+
+class E {
+  val ys = List(List(new C), Stream(new D))
+}
+
+object Test {
+  def trav = List(List(), Stream())
+
+  def main(args: Array[String]): Unit = {
+    val f = (new E).ys _
+    var xs: Set[List[_ <: Seq[B[_]]]] = Set()
+    xs += f()
+    println(xs)
+  }
+}
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
new file mode 100644
index 0000000..4231fc6
--- /dev/null
+++ b/test/files/run/t2251b.check
@@ -0,0 +1,11 @@
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
+TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with java.io.Serializable]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t2251b.flags b/test/files/run/t2251b.flags
new file mode 100644
index 0000000..1924326
--- /dev/null
+++ b/test/files/run/t2251b.flags
@@ -0,0 +1 @@
+-Xstrict-inference
\ No newline at end of file
diff --git a/test/files/run/t2251b.scala b/test/files/run/t2251b.scala
new file mode 100644
index 0000000..b67b3ae
--- /dev/null
+++ b/test/files/run/t2251b.scala
@@ -0,0 +1,48 @@
+class A
+trait B[T <: B[T]] extends A
+class B1[T <: B1[T]] extends B[T]
+class C extends B[C] { override def toString = "C" }
+class D extends B[D] { override def toString = "D" }
+class E extends B[E] { override def toString = "E" }
+class F extends B[F] { override def toString = "F" }
+class G extends B1[G] { override def toString = "G" }
+
+object Test {
+  import scala.collection.{ mutable, immutable }
+  import scala.collection.immutable.{ Vector }
+  import scala.reflect.runtime.universe._
+  def what[T: TypeTag](x: T) = println(typeTag[T])
+
+  def main(args: Array[String]): Unit = {
+    what(List(List(new C), Stream(new D)))
+    what(List(List(new C), Stream(new D), Vector(new E), Set(new F)))
+    what(List(immutable.Vector(new C), Stream(new D)))
+    what(List(collection.Set(new F), mutable.Set(new G)))
+    what(List(collection.Set(new F), immutable.Set(new G)))
+    what(List(mutable.Set(new F), immutable.Set(new G)))
+    what(List(mutable.Seq(new F), immutable.Seq(new G)))
+    what(List(mutable.Map(new C -> new D), immutable.Map(new F -> new G)))
+    what(List(mutable.MutableList(new F), immutable.List(new G)))
+    what(List(mutable.Seq(new F), collection.Seq(new G)))
+    what(List(mutable.LinearSeq(new F), collection.IndexedSeq(new G)))
+  }
+}
+
+
+// class D extends B[D] { override def toString = "D" }
+
+
+// class E {
+//   val ys = List(List(new C), Stream(new D))
+// }
+
+// object Test {
+//   def trav = List(List(), Stream())
+
+//   def main(args: Array[String]): Unit = {
+//     val f = (new E).ys _
+//     var xs: Set[List[_ <: Seq[B[_]]]] = Set()
+//     xs += f()
+//     println(xs)
+//   }
+// }
diff --git a/test/files/run/t2276.check b/test/files/run/t2276.check
deleted file mode 100644
index 95f51c8..0000000
--- a/test/files/run/t2276.check
+++ /dev/null
@@ -1,8 +0,0 @@
-<root>
-      <subnode>
-        <version>2</version>
-      </subnode>
-      <contents>
-        <version>2</version>
-      </contents>
-    </root>
diff --git a/test/files/run/t2276.scala b/test/files/run/t2276.scala
deleted file mode 100644
index f0404e5..0000000
--- a/test/files/run/t2276.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import scala.xml._
-import scala.xml.transform._
-
-object Test extends App {
-  val inputXml : Node = 
-    <root>
-      <subnode>
-        <version>1</version>
-      </subnode>
-      <contents>
-        <version>1</version>
-      </contents>
-    </root>
-
-  object t1 extends RewriteRule {
-    override def transform(n: Node): Seq[Node] = n match {
-      case <version>{x}</version> if x.toString.toInt < 4 => <version>{x.toString.toInt+1}</version>
-      case other => other
-    }
-  }
-
-  val ruleTransformer = new RuleTransformer(t1)
-  println(ruleTransformer(inputXml))
-}
diff --git a/test/files/run/t2296c/Action.java b/test/files/run/t2296c/Action.java
index 50ba9a4..4a6b69a 100644
--- a/test/files/run/t2296c/Action.java
+++ b/test/files/run/t2296c/Action.java
@@ -8,7 +8,7 @@ public abstract class Action {
   public Action(Global glob0) {
     m_glob = glob0;
   }
-  
+
   public Action() {
     this(null);
   }
diff --git a/test/files/run/t2308a.scala b/test/files/run/t2308a.scala
index abb5680..d1144db 100644
--- a/test/files/run/t2308a.scala
+++ b/test/files/run/t2308a.scala
@@ -1,7 +1,9 @@
+
+import scala.language.{ higherKinds }
 object Test {
   trait T[M[_]]
-  
+
   def f1 = classOf[T[X] forSome { type X[_] } ]
-  
+
   def main(args: Array[String]): Unit = println(f1)
 }
diff --git a/test/pending/run/t2318.check b/test/files/run/t2318.check
similarity index 100%
rename from test/pending/run/t2318.check
rename to test/files/run/t2318.check
diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala
new file mode 100644
index 0000000..b638c43
--- /dev/null
+++ b/test/files/run/t2318.scala
@@ -0,0 +1,41 @@
+import java.security._
+
+import scala.language.{ reflectiveCalls }
+
+object Test {
+  trait Bar { def bar: Unit }
+
+  object Mgr extends SecurityManager {
+    override def checkPermission(perm: Permission) = perm match {
+      case _: java.lang.RuntimePermission                                                   => ()
+      case _: java.io.FilePermission                                                        => ()
+      case x: java.security.SecurityPermission if x.getName contains ".networkaddress."     => () // generality ftw
+      case x: java.util.PropertyPermission if x.getName == "sun.net.inetaddr.ttl"           => ()
+      case _                                                                                => super.checkPermission(perm)
+    }
+  }
+
+  def t1() = {
+    val p = Runtime.getRuntime().exec("ls");
+    type Destroyable = { def destroy() : Unit }
+    def doDestroy( obj : Destroyable ) : Unit = obj.destroy();
+    doDestroy( p );
+  }
+  def t2() = {
+    System.setSecurityManager(Mgr)
+
+    val b = new Bar { def bar = println("bar") }
+    b.bar
+
+    val structural = b.asInstanceOf[{ def bar: Unit }]
+    structural.bar
+  }
+
+  def main(args: Array[String]) {
+    // figuring this will otherwise break on windows
+    try t1()
+    catch { case _: java.io.IOException => () }
+
+    t2()
+  }
+}
diff --git a/test/files/run/t2333.scala b/test/files/run/t2333.scala
index da43386..7dc7a92 100644
--- a/test/files/run/t2333.scala
+++ b/test/files/run/t2333.scala
@@ -1,7 +1,7 @@
 class A {
     def whatever() {
         lazy val a = 1
-        lazy val b = try { 2 } catch { case _ => 0 }
+        lazy val b = try { 2 } catch { case _: Throwable => 0 }
         a
         b
 
@@ -13,4 +13,4 @@ object Test {
         val a = new A
         a.whatever
     }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t2337.scala b/test/files/run/t2337.scala
index 86a372c..edb574c 100644
--- a/test/files/run/t2337.scala
+++ b/test/files/run/t2337.scala
@@ -10,7 +10,7 @@ object Test {
         // throw new Exception("Unsupported compare " + first + "; " + second)
     }
   }
-    
+
   def main(args: Array[String]): Unit = {
     println("Both Int", -1, compare(0, 1))
     println("Both Float", 1, compare(1.0, 0.0))
diff --git a/test/files/run/t2354.scala b/test/files/run/t2354.scala
deleted file mode 100644
index 5419911..0000000
--- a/test/files/run/t2354.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.xml.parsing._
-import scala.io.Source
-
-object Test
-{
-  val xml_good = "<title><![CDATA[Hello [tag]]]></title>"
-  val xml_bad = "<title><![CDATA[Hello [tag] ]]></title>"
-
-  val parser1 = ConstructingParser.fromSource(Source.fromString(xml_good),false)
-  val parser2 = ConstructingParser.fromSource(Source.fromString(xml_bad),false)
-  
-  def main(args: Array[String]): Unit = {
-    parser1.document
-    parser2.document
-  }
-}
-
diff --git a/test/files/run/t2417.scala b/test/files/run/t2417.scala
index 2d0bc2d..8d1527e 100644
--- a/test/files/run/t2417.scala
+++ b/test/files/run/t2417.scala
@@ -1,6 +1,6 @@
 // #2417
 object Test {
-  
+
   def parallel(numThreads: Int)(block: => Unit) {
     var failure: Throwable = null
     val threads = Array.tabulate(numThreads)(i => new Thread {
@@ -8,7 +8,7 @@ object Test {
         try {
           block
           } catch {
-            case x => failure = x
+            case x: Throwable => failure = x
           }
         }
       })
@@ -16,7 +16,7 @@ object Test {
       for (t <- threads) t.join
       if (failure != null) println("FAILURE: " + failure)
     }
-  
+
     def testSet(initialSize: Int, numThreads: Int, passes: Int) {
       val orig = Set.empty ++ (1 to initialSize)
       parallel(numThreads) {
@@ -32,7 +32,7 @@ object Test {
         }
       }
     }
-  
+
     def testMap(initialSize: Int, numThreads: Int, passes: Int) {
       val orig = Map.empty ++ ((1 to initialSize) map ((_,"v")))
       parallel(numThreads) {
@@ -48,28 +48,28 @@ object Test {
         }
       }
     }
-  
+
     def main(args: Array[String]) {
       println("testing small Map that doesn't promote to HashMap...")
       testMap(4, 2, 1000000)
       println()
-  
+
       println("testing single-threaded HashMap use...")
       testMap(5, 1, 1000000)
       println()
-  
+
       println("testing HashMap.size from multiple threads...")
       testMap(5, 2, 1000000)
       println()
-  
+
       println("testing small Set that doesn't promote to HashSet...")
       testSet(4, 2, 1000000)
       println()
-  
+
       println("testing single-threaded HashSet use...")
       testSet(5, 1, 1000000)
       println()
-  
+
       println("testing HashSet.size from multiple threads...")
       testSet(5, 2, 1000000)
       println()
diff --git a/test/files/run/t2464/Annotated.java b/test/files/run/t2464/Annotated.java
new file mode 100644
index 0000000..d022f98
--- /dev/null
+++ b/test/files/run/t2464/Annotated.java
@@ -0,0 +1,5 @@
+package test;
+
+ at Connect(loadStyle = Connect.LoadStyle.EAGER)
+public class Annotated {
+}
diff --git a/test/files/run/t2464/Connect.java b/test/files/run/t2464/Connect.java
new file mode 100644
index 0000000..59349f9
--- /dev/null
+++ b/test/files/run/t2464/Connect.java
@@ -0,0 +1,20 @@
+package test;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Connect {
+
+    LoadStyle loadStyle() default LoadStyle.EAGER;
+
+    public enum LoadStyle {
+        EAGER,
+        DEFERRED,
+        LAZY
+    }
+}
diff --git a/test/files/run/t2464/Test.scala b/test/files/run/t2464/Test.scala
new file mode 100644
index 0000000..90e1a03
--- /dev/null
+++ b/test/files/run/t2464/Test.scala
@@ -0,0 +1,35 @@
+import scala.reflect.io.Streamable
+import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.tree.ClassNode
+import scala.tools.partest._
+import scala.tools.partest.BytecodeTest.modifyClassFile
+import java.io.{FileOutputStream, FileInputStream, File}
+
+object Test extends DirectTest {
+  def code = ???
+
+  def compileCode(code: String) = {
+    val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+    compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+  }
+
+  def app = """
+    object O {
+      new test.Annotated
+    }
+  """
+
+  def show(): Unit = {
+    compileCode(app)
+    modifyClassFile(new File(testOutput.toFile, "test/Annotated.class")) {
+      (cn: ClassNode) =>
+        // As investigated https://issues.scala-lang.org/browse/SI-2464?focusedCommentId=64521&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-64521
+        // classfiles in the wild sometimes lack the required InnerClass attribute for nested enums that
+        // are referenced in an annotation. I don't know what compiler or bytecode processor leaves things
+        // that way, but this test makes sure we don't crash.
+        cn.innerClasses.clear()
+        cn
+    }
+    compileCode(app)
+  }
+}
diff --git a/test/files/run/t2512.scala b/test/files/run/t2512.scala
index de20af5..8166839 100644
--- a/test/files/run/t2512.scala
+++ b/test/files/run/t2512.scala
@@ -3,11 +3,11 @@ import scala.tools.nsc.util.HashSet
 object Test {
   val runs = 10000
   class Bop
-  
+
   def main(args: Array[String]): Unit = {
     val set: HashSet[Bop] = HashSet("Bop", 16)
     (1 to runs).toList foreach (_ => set addEntry new Bop)
-    
+
     assert(runs == set.size && set.size == set.iterator.length)
   }
 }
diff --git a/test/files/run/t2514.scala b/test/files/run/t2514.scala
index 21c4afb..0bf716e 100644
--- a/test/files/run/t2514.scala
+++ b/test/files/run/t2514.scala
@@ -1,7 +1,11 @@
+
+
+import scala.language.{ implicitConversions, postfixOps, reflectiveCalls }
+
 object Test
 {
   implicit def x[A](a: A) = new { def xx = a }
-  
+
   def main(args: Array[String]): Unit = {
     val r1 = 12 xx;
     val r2 = 12.xx
@@ -9,7 +13,7 @@ object Test
     val r4 = 12.xx + 12.xx
     val r5 = 12.`xx` + 12.xx
     val r6 = 12.3.`xx` + 12.xx
-    
+
     assert(r5 == 24)
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t2526.scala b/test/files/run/t2526.scala
index d371855..53f3059 100644
--- a/test/files/run/t2526.scala
+++ b/test/files/run/t2526.scala
@@ -4,18 +4,18 @@
  */
 object Test {
   import collection._
-  
+
   def main(args: Array[String]) {
     val m = new mutable.HashMap[String, String]
-    
+
     /* Use non hash-based structure for verification */
     val keys = List("a", "b", "c", "d", "e")
     val valueSuffix = "value"
     val values = keys.map(_ + valueSuffix)
     val entries = keys.zip(values)
-    
+
     for (k <- keys) m(k) = k + valueSuffix
-    
+
     assertForeach(keys, m.keySet.iterator)
     assertForeach(keys, m.keysIterator)
     assertForeach(keys, m.keySet)
@@ -25,7 +25,7 @@ object Test {
 
     assertForeach(entries, m)
   }
-  
+
   /* Checks foreach of `actual` goes over all the elements in `expected` */
   private def assertForeach[E](expected: Traversable[E], actual: Iterator[E]): Unit = {
     val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
@@ -35,12 +35,12 @@ object Test {
     }
     assert(notYetFound.size == 0, "mutable.HashMap.foreach should have iterated over: " + notYetFound)
   }
-  
-  /* 
+
+  /*
    * Checks foreach of `actual` goes over all the elements in `expected`
    * We duplicate the method above because there is no common inteface between Traversable and
    * Iterator and we want to avoid converting between collections to ensure that we test what
-   * we mean to test. 
+   * we mean to test.
    */
   private def assertForeach[E](expected: Traversable[E], actual: Traversable[E]): Unit = {
     val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
diff --git a/test/files/run/t2552.scala b/test/files/run/t2552.scala
index 17dcac5..0c6b4f0 100644
--- a/test/files/run/t2552.scala
+++ b/test/files/run/t2552.scala
@@ -2,11 +2,11 @@ object Test extends App {
 	def testTakeWhile = {
 		val numbers = Iterator.range(0, 50)
 		val zeroTo9 = numbers.takeWhile(x => { println("p(" + x + ")"); x < 10 } )
-		
+
 		zeroTo9.foreach(println _)
-		
+
 		val zeroTo1 = Iterator.range(0, 20).takeWhile(x => { println("p(" + x + ")"); x < 2 } )
-	
+
 		println(zeroTo1.hasNext)
 		println(zeroTo1.hasNext)
 		println(zeroTo1.next)
@@ -15,16 +15,16 @@ object Test extends App {
 		println(zeroTo1.hasNext)
 		println(zeroTo1.hasNext)
 	}
-	
+
 	def testFilter = {
 		val predicate = (x: Int) => { println("p(" + x + ")"); x % 2 == 0 }
-		
+
 		val evens = Iterator.range(0, 10).filter(predicate)
-		
+
 		println(evens.hasNext)
 		println(evens.hasNext)
 		println(evens.next)
-		
+
 		evens.foreach(println _)
 	}
 
diff --git a/test/files/run/t2577.check b/test/files/run/t2577.check
new file mode 100644
index 0000000..4a584e4
--- /dev/null
+++ b/test/files/run/t2577.check
@@ -0,0 +1 @@
+Nothing
diff --git a/test/files/run/t2577.scala b/test/files/run/t2577.scala
new file mode 100644
index 0000000..6d836a3
--- /dev/null
+++ b/test/files/run/t2577.scala
@@ -0,0 +1,17 @@
+case class annot[T]() extends scala.annotation.StaticAnnotation
+
+// type inference should infer @annot[Nothing] instead of @annot[T]
+// note the T is not in scope here!
+class Foo[@annot U]
+
+object Test {
+  import scala.reflect.runtime.universe._
+  val x = new Foo
+
+  def main(args: Array[String]): Unit = {
+    val targ = typeOf[x.type].widen match {
+      case TypeRef(_, _, arg :: _) => arg
+    }
+    println(targ)
+  }
+}
diff --git a/test/files/run/t2594_tcpoly.check b/test/files/run/t2594_tcpoly.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t2594_tcpoly.scala b/test/files/run/t2594_tcpoly.scala
index e759ca8..a9d2669 100644
--- a/test/files/run/t2594_tcpoly.scala
+++ b/test/files/run/t2594_tcpoly.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
 trait Monad[M[_]] {
   def foo[A](a: M[A]): M[A]
 }
@@ -15,4 +18,4 @@ object Test {
   }
 
   def main(as: Array[String]) { BarMonad[Int] foo (new Bar[Int, Int]) }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t261.check b/test/files/run/t261.check
new file mode 100644
index 0000000..35d242b
--- /dev/null
+++ b/test/files/run/t261.check
@@ -0,0 +1,2 @@
+A
+B
diff --git a/test/files/run/t261.scala b/test/files/run/t261.scala
new file mode 100644
index 0000000..d8ddb28
--- /dev/null
+++ b/test/files/run/t261.scala
@@ -0,0 +1,11 @@
+trait A { val foo: String = "A" }
+trait B {
+   private val foo: String = "B"
+   def f = println(foo)
+}
+object Test extends A with B {
+   def main(args: Array[String]) = {
+     println(foo)
+     f
+   }
+}
\ No newline at end of file
diff --git a/test/files/run/t2636.scala b/test/files/run/t2636.scala
index 3271f79..2f55c8a 100644
--- a/test/files/run/t2636.scala
+++ b/test/files/run/t2636.scala
@@ -1,30 +1,33 @@
+
+import scala.language.{ reflectiveCalls }
+
 object Test
 {
   type Foo = { def update(x: Int, value: String): Unit }
   type Foo2 = { def update(x: Int, value: String): Int }
   type Foo3 = { def update(x: Int, value: String): Array[Int] }
-  
+
   def alen() = {
     type L1 = { def length: Int }
     def len(p: L1) = p.length
     val x: L1 = Array(1,2,3)
     len(x)
   }
-  
+
   type A1 = { def apply(x: Int): String }
   def arrApply(a: A1, x: Int) = a(x)
-  
+
   def main(args: Array[String]): Unit = {
     val arr = new Array[String](3)
     val p1: Foo = arr
     def a1 = p1(0) = "b"
 
     val p2: Foo2 = new { def update(x: Int, value: String) = { p1(1) = "o" ; 1 } }
-    def a2 = p2(0) = "c"    
-    
+    def a2 = p2(0) = "c"
+
     val p3: Foo3 = new { def update(x: Int, value: String) = { p1(2) = "b" ; Array(1) } }
     def a3 = p3(10) = "hi mom"
-    
+
     a1 ; a2 ; a3 ;
 
     assert(arr.mkString == "bob")
@@ -32,4 +35,4 @@ object Test
     assert(arrApply(arr, 1) == "o")
     assert(arrApply(new { def apply(x: Int) = "tom" }, -100) == "tom")
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t266.scala b/test/files/run/t266.scala
index 1fd6dab..20a29da 100644
--- a/test/files/run/t266.scala
+++ b/test/files/run/t266.scala
@@ -2,9 +2,9 @@
 
 trait O {
   self: Test.type =>
-  
+
   Nil foreach identity
-  
+
   def f = (1 to 10).toList map identity
 }
 
diff --git a/test/files/run/t2721.check b/test/files/run/t2721.check
deleted file mode 100644
index 2bd7656..0000000
--- a/test/files/run/t2721.check
+++ /dev/null
@@ -1,2 +0,0 @@
-root:-rootVal-sub:-subVal-
-root:-rootVal-sub:-subVal-
diff --git a/test/files/run/t2721.scala b/test/files/run/t2721.scala
deleted file mode 100644
index 93af884..0000000
--- a/test/files/run/t2721.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test
-{
-  val xml1 = <root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>
-  val xml2= scala.xml.XML.loadString("""<root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>""")
-  
-  def backslashSearch(x: xml.Elem) = "root:-"+(x \ "@{nsUri}at") +"-sub:-"+(x \ "sub" \ "@{nsUri}at") +"-"
-  
-  def main(args: Array[String]): Unit = {
-    println(backslashSearch(xml1))
-    println(backslashSearch(xml2))
-  }
-}
diff --git a/test/files/run/t2755.scala b/test/files/run/t2755.scala
index c279ecc..8d10b56 100644
--- a/test/files/run/t2755.scala
+++ b/test/files/run/t2755.scala
@@ -28,7 +28,7 @@ object Test {
     case x: Array[_]        => 6
     case _                  => 7
   }
-  
+
 
   def main(args: Array[String]): Unit = {
     println(f1(Array(1, 2, 3)))
@@ -38,7 +38,7 @@ object Test {
     println(f1(new Array[Any](10)))    // should match as Array[AnyRef]
     println(f1(Array(1L)))
     println(f1(null))
-    
+
     println(f2(Array(1, 2, 3)))
     println(f2(Array(1.0, -2.0, 3.0, 1.0)))
     println(f2(Array(1.0f, 2.0f, 3.0f, -3.0f)))
@@ -46,7 +46,7 @@ object Test {
     println(f2(new Array[Any](10)))    // should match as Array[AnyRef]
     println(f2(Array(1L)))
     println(f2(null))
-    
+
     println(f3(Array(1, 2, 3)))
     println(f3(Array(1.0, -2.0, 3.0, 1.0)))
     println(f3(Array(1.0f, 2.0f, 3.0f, -3.0f)))
diff --git a/test/files/run/t2800.scala b/test/files/run/t2800.scala
index cc6fb30..84d1de0 100644
--- a/test/files/run/t2800.scala
+++ b/test/files/run/t2800.scala
@@ -3,20 +3,20 @@ object Test {
   def f2 = (5: Any) match { case List(x @ _*) => x ; case _ => false }
   def f3 = (Nil: Any) match { case List(x @ _*) => x ; case _ => false }
   def f4 = (Array(1): Any) match { case List(x @ _*) => x ; case _ => false }
-  
+
   def f5 = ("": Any) match { case Array(x @ _*) => x ; case _ => false }
   def f6 = (5: Any) match { case Array(x @ _*) => x ; case _ => false }
   def f7 = (Nil: Any) match { case Array(x @ _*) => x ; case _ => false }
   def f8 = (Array(1): Any) match { case Array(x @ _*) => x ; case _ => false }
-  
+
   def f9 = ("": Any) match { case x @ List(_*) => x ; case _ => false }
   def f10 = ("": Any) match { case List(_*) => true ; case _ => false }
   def f11 = (Nil: Any) match { case List(_*) => true ; case _ => false }
   def f12 = ("": Any) match { case x @ Array(_*) => x ; case _ => false }
   def f13 = ("": Any) match { case Array(_*) => true ; case _ => false }
   def f14 = (Nil: Any) match { case Array(_*) => true ; case _ => false }
-  
-  
+
+
   def main(args: Array[String]): Unit = {
     println(f1)
     println(f2)
diff --git a/test/files/run/t2818.scala b/test/files/run/t2818.scala
index 19b67cb..746cdfb 100644
--- a/test/files/run/t2818.scala
+++ b/test/files/run/t2818.scala
@@ -1,6 +1,6 @@
 object Test extends App {
   println((List.range(1L, 15L) :\ 0L) (_ + _))
   println((List.range(1L, 1000000L) :\ 0L) (_ + _))
-  println((List.fill(5)(1) :\ 1) (_ - _)) 
-  println((List.fill(1000000)(1) :\ 1) (_ - _)) 
+  println((List.fill(5)(1) :\ 1) (_ - _))
+  println((List.fill(1000000)(1) :\ 1) (_ - _))
 }
diff --git a/test/files/run/t2849.scala b/test/files/run/t2849.scala
index 0995f64..cadf605 100644
--- a/test/files/run/t2849.scala
+++ b/test/files/run/t2849.scala
@@ -16,9 +16,9 @@ object Test {
 
   def ticketExample {
     var big = 100000
-    
+
     var aSortedSet: SortedSet[Int] = TreeSet(big)
-    
+
     for (i <- 1 until N) {
       aSortedSet = (aSortedSet - big) ++ (TreeSet(i, big - 1))
       big -= 1
@@ -42,7 +42,7 @@ object Test {
       }
     }
   }
-  
+
 }
 
 
diff --git a/test/files/run/t2867.scala b/test/files/run/t2867.scala
index 0d30f95..25e55ea 100644
--- a/test/files/run/t2867.scala
+++ b/test/files/run/t2867.scala
@@ -1,6 +1,6 @@
 object Test {
-  case class A(l: List[_]*) 
-  
+  case class A(l: List[_]*)
+
   def main(args: Array[String]): Unit = {
     /** Kind of sneaking a slightly different test in here as well as
      *  testing 2867.  How subversive.
@@ -9,7 +9,7 @@ object Test {
     val xs2 = List(1.0, 2.0, 3.0)
     val xs3 = List[Any](1.0f, 2.0f, 3.0f)
     val xs4 = List[Byte](1, 2, 3)
-    
+
     assert(A(List(xs1, xs2)) == A(List(xs3, xs4)))
   }
 }
diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check
index 9198280..209b679 100644
--- a/test/files/run/t2873.check
+++ b/test/files/run/t2873.check
@@ -1 +1 @@
-scala.collection.immutable.RedBlack<A>.Empty$
+RedBlack<A>.Empty$
diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala
index 8d48a8d..3a3cc59 100644
--- a/test/files/run/t2873.scala
+++ b/test/files/run/t2873.scala
@@ -1,5 +1,10 @@
+abstract class RedBlack[A] extends Serializable {
+  abstract class Tree[+B] extends Serializable
+  case object Empty extends Tree[Nothing]
+}
+
 object Test {
   def main(args: Array[String]): Unit = {
-    println(classOf[scala.collection.immutable.RedBlack[_]].getMethod("Empty").getGenericReturnType)
+    println(classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType)
   }
 }
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
index a70f993..61e3694 100644
--- a/test/files/run/t2886.check
+++ b/test/files/run/t2886.check
@@ -1,5 +1,5 @@
 ((x: Predef.String) => {
-  val x$1 = x;
-  val x$2 = x;
+  <artifact> val x$1 = x;
+  <artifact> val x$2 = x;
   Test.this.test(x$2, x$1)
 })
diff --git a/test/files/run/t2958.scala b/test/files/run/t2958.scala
index b9563a1..dcd24ec 100644
--- a/test/files/run/t2958.scala
+++ b/test/files/run/t2958.scala
@@ -2,14 +2,14 @@ object Test {
   def f(args: Array[String]) = args match {
     case Array("-p", prefix, from, to) =>
       prefix + from + to
-    
+
     case Array(from, to) =>
       from + to
 
     case _ =>
       "default"
   }
-  
+
   def main(args: Array[String]) {
     assert(f(Array("1", "2")) == "12")
   }
diff --git a/test/files/run/t3026.scala b/test/files/run/t3026.scala
index 0231c7b..22dde9c 100755
--- a/test/files/run/t3026.scala
+++ b/test/files/run/t3026.scala
@@ -3,6 +3,6 @@ object Test {
   case object RED extends Colour
   case object YELLOW extends Colour
   val items = Array(RED, YELLOW)
-  
+
   def main(args: Array[String]): Unit = items foreach println
 }
diff --git a/test/files/run/t3038.scala b/test/files/run/t3038.scala
index 986fc98..7eb69f7 100644
--- a/test/files/run/t3038.scala
+++ b/test/files/run/t3038.scala
@@ -2,18 +2,18 @@ class A {
   private lazy val a1 = "a1"
   object B
   private lazy val a2 = "a2"
-  
+
 
   @transient lazy val a3 = "a3"
   @transient private lazy val a4 = "a4"
   @transient lazy val a5 = "a5"
   @transient private lazy val a6 = "a6"
-  
+
   final val a7 = "a7"
   private final val a8 = "a8"
   @transient final val a9 = "a9"
-  
-  
+
+
 
 
   def run = {
@@ -27,13 +27,13 @@ class A {
     println(a7)
     println(a8)
     println(a9)
-  }  
+  }
 }
 
 class C extends A {
   private lazy val c1 = "c1"
   lazy val c2 = "c2"
-  
+
   private lazy val c3 = "c3"
 
   @transient lazy val c4 = "c4"
@@ -41,7 +41,7 @@ class C extends A {
   @transient lazy val c6 = "c6"
   @transient private lazy val c7 = "c7"
   lazy val c8 = "c8"
-      
+
   final val c9 = "c9"
   private final val c10 = "c10"
 
diff --git a/test/files/run/t3038c/A_1.scala b/test/files/run/t3038c/A_1.scala
index 14579fc..91564e3 100644
--- a/test/files/run/t3038c/A_1.scala
+++ b/test/files/run/t3038c/A_1.scala
@@ -59,7 +59,7 @@ class A {
     lazy val a57 = 58
     lazy val a58 = 59
     lazy val a59 = 60
-    private lazy val a60 = 61 
+    private lazy val a60 = 61
     private lazy val a61 = 62
     private lazy val a62 = 63
     private lazy val a63 = 64
@@ -69,7 +69,7 @@ class A {
     private lazy val a67 = 68
     private lazy val a68 = 69
     private lazy val a69 = 70
-    
+
     def run = {
         println(List(a0, a1, a2, a3, a4, a5, a6, a7, a8, a9,
                 a10, a11, a12, a13, a14, a15, a16, a17, a18, a19,
diff --git a/test/files/run/t3038d.scala b/test/files/run/t3038d.scala
index 6cd2d83..44fb047 100644
--- a/test/files/run/t3038d.scala
+++ b/test/files/run/t3038d.scala
@@ -16,19 +16,17 @@ trait Foo {
   }
 }
 
-
- at serializable
-class Bar extends Foo {
+class Bar extends Foo with Serializable {
   @transient protected var first: Any = null
   def size = a
   @transient var second: Any = null
-    
-  def checkMember { first }
-    
+
+  def checkMember { if (first == null) print("") }
+
   private def writeObject(out: java.io.ObjectOutputStream) {
     serializeTo(out)
   }
-  
+
   private def readObject(in: java.io.ObjectInputStream) {
     first = null
     init(in)
@@ -40,7 +38,7 @@ object Test {
     val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(bytes))
     in.readObject.asInstanceOf[A]
   }
-  
+
   private def toBytes(o: AnyRef): Array[Byte] = {
     val bos = new java.io.ByteArrayOutputStream
     val out = new java.io.ObjectOutputStream(bos)
@@ -49,7 +47,7 @@ object Test {
     bos.toByteArray
   }
 
-    
+
   def main(args: Array[String]) {
     val a1 = new Bar()
     val serialized:Array[Byte] = toBytes(a1)
diff --git a/test/files/run/t3050.scala b/test/files/run/t3050.scala
index ca9d91e..160f8b6 100644
--- a/test/files/run/t3050.scala
+++ b/test/files/run/t3050.scala
@@ -1,9 +1,9 @@
 object Test {
   def main(args: Array[String]): Unit = {
-    val x = 
+    val x =
       try { ("": Any) match { case List(_*) => true } }
-      catch { case _ => false }
-    
+      catch { case _: Throwable => false }
+
     assert(!x)
   }
 }
diff --git a/test/files/run/t3112.scala b/test/files/run/t3112.scala
index 88677fa..eb8eec6 100644
--- a/test/files/run/t3112.scala
+++ b/test/files/run/t3112.scala
@@ -7,5 +7,5 @@ object Test {
     println((Vector() ++ (0 until 32)) takeRight 0) // works
     println((Vector() ++ (0 until 33)) takeRight 0) // error
   }
-  
+
 }
\ No newline at end of file
diff --git a/test/files/run/t3150.scala b/test/files/run/t3150.scala
index 8acdb50..034703b 100644
--- a/test/files/run/t3150.scala
+++ b/test/files/run/t3150.scala
@@ -1,7 +1,7 @@
 object Test {
   case object Bob { override def equals(other: Any) = true }
   def f(x: Any) = x match { case Bob => Bob }
-  
+
   def main(args: Array[String]): Unit = {
     assert(f(Bob) eq Bob)
     assert(f(0) eq Bob)
diff --git a/test/files/run/t3158.scala b/test/files/run/t3158.scala
index 2261b5c..c824b62 100644
--- a/test/files/run/t3158.scala
+++ b/test/files/run/t3158.scala
@@ -1,6 +1,6 @@
 object Test {
   def main(args: Array[String]) {
-    println(args.map(_ => foo _).deep) 
+    println(args.map(_ => foo _).deep)
   }
 
   def foo(xs: String*) {
diff --git a/test/files/run/t3175.scala b/test/files/run/t3175.scala
index aff2e67..ea56ded 100644
--- a/test/files/run/t3175.scala
+++ b/test/files/run/t3175.scala
@@ -1,51 +1,54 @@
 /** A bit down the road this test will examine
  *  the bytecode.
  */
+
+import scala.language.reflectiveCalls
+
 object Test {
   def len(x:{ def length: Int }) = x.length
   def f1(x:{ def apply(x: Int): Long }) = x(0)
   def f2(x:{ def apply(x: Int): Byte }) = x(0)
   def f3(x:{ def apply(x: Int): String }) = x(0).length
-  
+
   def f4(x:{ def update(x: Int, y: Long): Unit }, y: Long) = x(0) = y
   def f5(x:{ def update(x: Int, y: Byte): Unit }, y: Byte) = x(0) = y
   def f6(x:{ def update(x: Int, y: String): Unit }, y: String) = x(0) = y
-  
+
   def f7(x: { def length: Any }) = x.length
-  
+
   def f8(x: { def apply(x: Int): Any }) = x(0)
   def f9(x: { def apply(x: Int): Int }) = x(0)
   def f10(x: { def apply(x: Int): Long }) = x(0)
-  
+
   // update has some interesting special cases
   def f11(x:{ def update(x: Int, y: Long): Any }, y: Long) = x(0) = y
-  def f12(x:{ def update(x: Int, y: String): AnyVal }, y: String) = x(0) = y  
+  def f12(x:{ def update(x: Int, y: String): AnyVal }, y: String) = x(0) = y
   def f13(x:{ def update(x: Int, y: String): AnyRef }, y: String) = x(0) = y
-    
+
   // doesn't work yet, see #3197
   // def fclone(x:{ def clone(): AnyRef }) = x.clone()
-  
+
   def main(args: Array[String]): Unit = {
     val longs = Array(5L)
     val bytes = Array(5: Byte)
     val strs = Array("abcde", "fghjij")
-    
+
     println(len(Array(1,2,3)) + len(Array(4.0,5.0f)) + len(Array("abc", 5)) + len("bop"))
     println(f1(longs) + f2(bytes) + f3(strs))
-    
+
     f4(longs, 1)
     f5(bytes, 1)
     f6(strs, "a")
-    
+
     println(f1(longs) + f2(bytes) + f3(strs))
-    
+
     println(f7(Array(1,2,3)))
     println(f7("def"))
-    
+
     println(f8(Array(5)))
     println(f9(Array(5)))
     println(f10(Array(5)))
-    
+
     f11(longs, 100L)
     f12(strs, "jabooboo")
     println(longs(0))
diff --git a/test/files/continuations-run/t3199b.check b/test/files/run/t3199b.check
similarity index 100%
rename from test/files/continuations-run/t3199b.check
rename to test/files/run/t3199b.check
diff --git a/test/files/run/t3199b.scala b/test/files/run/t3199b.scala
new file mode 100644
index 0000000..5010f2b
--- /dev/null
+++ b/test/files/run/t3199b.scala
@@ -0,0 +1,11 @@
+object Test {
+
+  def test() = {
+    java.util.Arrays.asList(Array(1,2,3):_*)
+  }
+
+  def main(args: Array[String]) {
+    println(test())
+  }
+
+}
diff --git a/test/files/run/t3232.scala b/test/files/run/t3232.scala
index feff7e7..900a1f5 100644
--- a/test/files/run/t3232.scala
+++ b/test/files/run/t3232.scala
@@ -4,18 +4,18 @@ object Test {
   val r2 = 1 to Int.MaxValue
   val r3 = Int.MinValue to -2
   val r4 = Int.MinValue until -1
-  
+
   // some exceptional conditions
   val e1 = () => (0 to Int.MaxValue).length
   val e2 = () => (5 until 5).last
-  
+
   def main(args: Array[String]): Unit = {
     List(r1, r2, r3, r4) foreach (x => assert(x.length == Int.MaxValue))
-    
+
     // exception required
     List(e1, e2) foreach { f =>
       try { f() ; assert(false) }
-      catch { case _ => () }
+      catch { case _: Throwable => () }
     }
   }
 }
diff --git a/test/files/run/t3235-minimal.check b/test/files/run/t3235-minimal.check
new file mode 100644
index 0000000..d7f7160
--- /dev/null
+++ b/test/files/run/t3235-minimal.check
@@ -0,0 +1,12 @@
+t3235-minimal.scala:3: warning: method round in class RichInt is deprecated: This is an integer type; there is no reason to round it.  Perhaps you meant to call this on a floating-point value?
+    assert(123456789.round == 123456789)
+                     ^
+t3235-minimal.scala:4: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it.  Perhaps you meant to call this with a floating-point value?
+    assert(math.round(123456789) == 123456789)
+                ^
+t3235-minimal.scala:5: warning: method round in class RichLong is deprecated: This is an integer type; there is no reason to round it.  Perhaps you meant to call this on a floating-point value?
+    assert(1234567890123456789L.round == 1234567890123456789L)
+                                ^
+t3235-minimal.scala:6: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it.  Perhaps you meant to call this with a floating-point value?
+    assert(math.round(1234567890123456789L) == 1234567890123456789L)
+                ^
diff --git a/test/files/neg/t5589neg.flags b/test/files/run/t3235-minimal.flags
similarity index 100%
copy from test/files/neg/t5589neg.flags
copy to test/files/run/t3235-minimal.flags
diff --git a/test/files/run/t3235-minimal.scala b/test/files/run/t3235-minimal.scala
new file mode 100644
index 0000000..dc9907b
--- /dev/null
+++ b/test/files/run/t3235-minimal.scala
@@ -0,0 +1,8 @@
+object Test {
+  def main(args: Array[String]) {
+    assert(123456789.round == 123456789)
+    assert(math.round(123456789) == 123456789)
+    assert(1234567890123456789L.round == 1234567890123456789L)
+    assert(math.round(1234567890123456789L) == 1234567890123456789L)
+  }
+}
diff --git a/test/files/run/t3242.scala b/test/files/run/t3242.scala
index f8defaa..0a449d5 100644
--- a/test/files/run/t3242.scala
+++ b/test/files/run/t3242.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
 object Test {
 
  def benchmarkA(num: Int) {
diff --git a/test/files/run/t3269.scala b/test/files/run/t3269.scala
index 6fe7271..17e42cd 100644
--- a/test/files/run/t3269.scala
+++ b/test/files/run/t3269.scala
@@ -4,6 +4,6 @@ object Test {
     println(it.next)
     it.hasNext
     it.hasNext
-    it.hasNext 
+    it.hasNext
   }
 }
diff --git a/test/files/run/t3273.scala b/test/files/run/t3273.scala
index a4dfc4b..379a8a2 100644
--- a/test/files/run/t3273.scala
+++ b/test/files/run/t3273.scala
@@ -1,7 +1,7 @@
 object Test {
   val num1: Stream[Int] = 1 #:: num1.map(_ + 1)
   val num2: Stream[Int] = 1 #:: num2.iterator.map(_ + 1).toStream
-  
+
   def main(args: Array[String]): Unit = {
     val x1 = (num1 take 10).toList
     val x2 = (num2 take 10).toList
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
index f70cb01..4ac7ef9 100644
--- a/test/files/run/t3326.scala
+++ b/test/files/run/t3326.scala
@@ -10,7 +10,7 @@ import scala.math.Ordering
  *
  *  There are 2 `++` overloads - a generic one in traversables and
  *  a map-specific one in `MapLike` - which knows about the ordering.
- *  
+ *
  *  The problem here is that the expected return type for the expression
  *  in which `++` appears drives the decision of the overload that needs
  *  to be taken.
@@ -18,36 +18,36 @@ import scala.math.Ordering
  *  `SortedMap`, but `immutable.Map` instead.
  *  This is why `collection.SortedMap` used to resort to the generic
  *  `TraversableLike.++` which knows nothing about the ordering.
- *  
+ *
  *  To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
  *  we override the `MapLike.++` overload in `collection.SortedMap` to return
  *  the proper type `SortedMap`.
  */
 object Test {
-  
+
   def main(args: Array[String]) {
     testCollectionSorted()
     testImmutableSorted()
   }
-  
+
   def testCollectionSorted() {
     import collection._
     val order = implicitly[Ordering[Int]].reverse
     var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
     var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
-    
+
     m1 += (1 -> "World")
     m1 += (2 -> "Hello")
-    
+
     m2 += (4 -> "Bar")
     m2 += (5 -> "Foo")
-    
+
     val m3: SortedMap[Int, String] = m1 ++ m2
-    
+
     println(m1)
     println(m2)
     println(m3)
-    
+
     println(m1 + (3 -> "?"))
   }
 
@@ -56,19 +56,19 @@ object Test {
     val order = implicitly[Ordering[Int]].reverse
     var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
     var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
-    
+
     m1 += (1 -> "World")
     m1 += (2 -> "Hello")
-    
+
     m2 += (4 -> "Bar")
     m2 += (5 -> "Foo")
-    
+
     val m3: SortedMap[Int, String] = m1 ++ m2
-    
+
     println(m1)
     println(m2)
     println(m3)
-    
+
     println(m1 + (3 -> "?"))
   }
 }
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/t3346a.check
similarity index 100%
copy from test/files/run/virtpatmat_opt_sharing.check
copy to test/files/run/t3346a.check
diff --git a/test/files/run/t3346a.scala b/test/files/run/t3346a.scala
new file mode 100644
index 0000000..c0a90b0
--- /dev/null
+++ b/test/files/run/t3346a.scala
@@ -0,0 +1,11 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+  class Rep[T](x : T)
+
+  class SomeOps[T](x : Rep[T]) { def foo = 1 }
+  implicit def mkOps[X, T](x : X)(implicit conv: X => Rep[T]) : SomeOps[T] = new SomeOps(conv(x))
+
+  val a: Rep[Int] = new Rep(42)
+  println(a.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/t3346d.scala b/test/files/run/t3346d.scala
new file mode 100644
index 0000000..3f79896
--- /dev/null
+++ b/test/files/run/t3346d.scala
@@ -0,0 +1,21 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+  trait TARInt
+
+  trait Basket[A,B] {
+    def iAmABasket = {}
+  }
+
+  trait BasketFactory[A,B] {
+    def create(v: A): Basket[A,B]
+  }
+
+  implicit val bf = new BasketFactory[Int,TARInt] {
+    def create(v: Int): Basket[Int,TARInt] = new Basket[Int, TARInt]{}
+  }
+
+  implicit def i2[A,B](a: A)(implicit bf: BasketFactory[A,B]): Basket[A,B] = bf.create(a)
+
+  1.iAmABasket // <-- i2 conversion not applicable
+}
\ No newline at end of file
diff --git a/test/files/run/t3346e.check b/test/files/run/t3346e.check
new file mode 100644
index 0000000..71a57ff
--- /dev/null
+++ b/test/files/run/t3346e.check
@@ -0,0 +1,12 @@
+eqw
+List(0, 2)
+List(0, 2)
+BitSet(0, 2)
+Vector(113, 119, 101)
+qwe
+List(2, 0)
+List(0!)
+BitSet(0, 2)
+qwe
+List(2, 0)
+qwe
diff --git a/test/files/run/t3346e.scala b/test/files/run/t3346e.scala
new file mode 100644
index 0000000..ac0de56
--- /dev/null
+++ b/test/files/run/t3346e.scala
@@ -0,0 +1,81 @@
+import scala.language.implicitConversions
+import scala.collection.generic.CanBuildFrom
+import scala.math.Ordering
+import collection.{TraversableLike, SeqLike}
+import collection.immutable.BitSet
+
+class QuickSort[Coll](a: Coll) {
+  //should be able to sort only something with defined order (someting like a Seq)
+  def quickSort[T](implicit ev0: Coll => SeqLike[T, Coll],
+                   cbf: CanBuildFrom[Coll, T, Coll],
+                   n: Ordering[T]): Coll = {
+    quickSortAnything(ev0, cbf, n)
+  }
+
+  //we can even sort a Set, if we really want to
+  def quickSortAnything[T](implicit ev0: Coll => TraversableLike[T, Coll],
+                           cbf: CanBuildFrom[Coll, T, Coll],
+                           n: Ordering[T]): Coll = {
+    import n._
+    if (a.size < 2) {
+      a
+    } else {
+      // We pick the first value for the pivot.
+      val pivot = a.head
+      val (lower, tmp) = a.partition(_ < pivot)
+      val (upper, same) = tmp.partition(_ > pivot)
+      val b = cbf()
+      b.sizeHint(a.size)
+      b ++= new QuickSort(lower).quickSortAnything
+      b ++= same
+      b ++= new QuickSort(upper).quickSortAnything
+      b.result
+    }
+  }
+}
+
+class FilterMap[Repr](a: Repr) {
+  def filterMap[A, B, That](f: A => Option[B])(implicit ev0: Repr => TraversableLike[A, Repr],
+                                               cbf: CanBuildFrom[Repr, B, That]): That = {
+    a.flatMap(e => f(e).toSeq)
+  }
+}
+
+class FilterMapFixed[A, Repr <% TraversableLike[A, Repr]](a: Repr) {
+  def filterMap2[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+    a.flatMap(e => f(e).toSeq)
+  }
+}
+
+object MyEnhancements {
+  implicit def toQS[Coll](a: Coll) = new QuickSort(a)
+  implicit def toFM[Coll](a: Coll) = new FilterMap(a)
+  implicit def toFM2[A, Repr <% TraversableLike[A, Repr]](a: Repr) = new FilterMapFixed(a)
+}
+
+object Test extends App {
+
+  import MyEnhancements._
+
+  println("qwe".quickSort)
+  println(Array(2, 0).quickSort.toList)
+  println(Seq(2, 0).quickSort)
+  //not very useful to sort a set, but just as a demonstration
+  println(BitSet(2, 0).quickSortAnything)
+
+  //need to hint type inferencer,
+  //probably will be able to overcome after https://issues.scala-lang.org/browse/SI-4699  and
+  // related issues are  fixed (by moving ev0 parameter from filterMap to toFM), see toFM2
+  println("qwe".filterMap((c: Char) => Some(c.toInt)))
+  println("qwe".filterMap((c: Char) => Some(c)))
+  println(Array(2, 0).filterMap((c: Int) => Some(c.toInt)).toList)
+  println(Seq(2, 0).filterMap((c: Int) => if (c < 2) Some(c + "!") else None))
+  def test(i:Int) = Option(i)
+  println(BitSet(2,0).filterMap(test))
+
+  println(toFM2("qwe").filterMap2(c => Some(c)))
+  println(toFM2(Array(2, 0)).filterMap2(c => Some(c.toInt)).toList)
+  //No implicit view available from java.lang.String => scala.collection.TraversableLike[A,java.lang.String]. :(
+  //Not anymore :)
+  println("qwe".filterMap2(c => Some(c)))
+}
diff --git a/test/files/run/t3346f.check b/test/files/run/t3346f.check
new file mode 100644
index 0000000..fd3c81a
--- /dev/null
+++ b/test/files/run/t3346f.check
@@ -0,0 +1,2 @@
+5
+5
diff --git a/test/files/run/t3346f.scala b/test/files/run/t3346f.scala
new file mode 100644
index 0000000..4799ca2
--- /dev/null
+++ b/test/files/run/t3346f.scala
@@ -0,0 +1,15 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+
+object Test extends App {
+  trait Foo[A]
+  implicit def fooString: Foo[String] = null
+  implicit def value[A](implicit foo: Foo[A]) = 5
+
+  println(implicitly[Int])
+
+  implicit def conversion[A](x: Int)(implicit foo: Foo[A]) = new {
+    def aMethod = 5
+  }
+  println(1.aMethod)
+}
diff --git a/test/files/run/t3346g.check b/test/files/run/t3346g.check
new file mode 100644
index 0000000..ce89482
--- /dev/null
+++ b/test/files/run/t3346g.check
@@ -0,0 +1 @@
+A(3,asdf)
diff --git a/test/files/run/t3346g.scala b/test/files/run/t3346g.scala
new file mode 100644
index 0000000..d7c9d79
--- /dev/null
+++ b/test/files/run/t3346g.scala
@@ -0,0 +1,9 @@
+import scala.language.implicitConversions
+
+case class A(b: Int, c: String)
+
+object Test extends App {
+  implicit def s2i(s: String): Int = s.length
+  implicit def toA[T](t: T)(implicit f: T => Int): A = A(f(t), t.toString)
+  println("asdf".copy(b = 3))
+}
\ No newline at end of file
diff --git a/test/files/run/t3346h.check b/test/files/run/t3346h.check
new file mode 100644
index 0000000..587be6b
--- /dev/null
+++ b/test/files/run/t3346h.check
@@ -0,0 +1 @@
+x
diff --git a/test/files/run/t3346h.scala b/test/files/run/t3346h.scala
new file mode 100644
index 0000000..97ebc93
--- /dev/null
+++ b/test/files/run/t3346h.scala
@@ -0,0 +1,9 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+  trait Fundep[T, U] { def u(t: T): U }
+  class C { def y = "x" }
+  implicit val FundepStringC = new Fundep[String, C]{ def u(t: String) = new C }
+  implicit def foo[T, U](x: T)(implicit y: Fundep[T, U]): U = y.u(x)
+  println("x".y)
+}
\ No newline at end of file
diff --git a/test/files/run/t3346j.check b/test/files/run/t3346j.check
new file mode 100644
index 0000000..59e8626
--- /dev/null
+++ b/test/files/run/t3346j.check
@@ -0,0 +1 @@
+Int
diff --git a/test/files/run/t3346j.scala b/test/files/run/t3346j.scala
new file mode 100644
index 0000000..98b5a87
--- /dev/null
+++ b/test/files/run/t3346j.scala
@@ -0,0 +1,11 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+  class A[T]
+  class B[T]
+  implicit def foo[T: TypeTag](a: A[T])(implicit b: B[T]) = new { def baz = typeOf[T] }
+  implicit def bar[T <: Int]: B[T] = new B[T]()
+  println(new A[Int]().baz)
+}
\ No newline at end of file
diff --git a/test/files/run/t3361.check b/test/files/run/t3361.check
new file mode 100644
index 0000000..c18bdc9
--- /dev/null
+++ b/test/files/run/t3361.check
@@ -0,0 +1 @@
+warning: there were 16 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t3361.scala b/test/files/run/t3361.scala
index 7fbc677..d49bcbc 100644
--- a/test/files/run/t3361.scala
+++ b/test/files/run/t3361.scala
@@ -40,7 +40,7 @@ object Test extends App {
   def insert_1 {
     val ten = DoubleLinkedList(1 to 10: _*)
     ten.append(DoubleLinkedList(11))
-    
+
     // Post-insert size test
     require(11 == ten.size)
     // Post-insert data test
@@ -57,7 +57,7 @@ object Test extends App {
       DoubleLinkedList().insert(ten)
     } catch {
       case _: IllegalArgumentException => require(true)
-      case _ => require(false)
+      case _: Throwable => require(false)
     }
     val zero = DoubleLinkedList(0)
     zero.insert(ten)
@@ -87,7 +87,7 @@ object Test extends App {
       DoubleLinkedList().append(ten)
     } catch {
       case _: IllegalArgumentException => require(true)
-      case _ => require(false)
+      case _: Throwable => require(false)
     }
     val zero = DoubleLinkedList(0)
     zero.append(ten)
diff --git a/test/files/run/t3376.check b/test/files/run/t3376.check
index 3a1d7d5..cc6949d 100644
--- a/test/files/run/t3376.check
+++ b/test/files/run/t3376.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class M[@specialized T] { override def toString = "mmm" }
 defined class M
 
diff --git a/test/files/run/t3395.scala b/test/files/run/t3395.scala
index 01cc431..b4990a1 100644
--- a/test/files/run/t3395.scala
+++ b/test/files/run/t3395.scala
@@ -1,11 +1,11 @@
 object Test {
   def main(args: Array[String]): Unit = {
-    Seq("") match { 
+    Seq("") match {
       case Seq("")      => println("abc")
       case Seq(_, _, x) => println(x)
     }
-    
-    Seq(1, 2, "def") match { 
+
+    Seq(1, 2, "def") match {
       case Seq("")      => println("abc")
       case Seq(_, _, x) => println(x)
     }
diff --git a/test/files/run/t3397.scala b/test/files/run/t3397.scala
index 243fe76..2c8cbed 100644
--- a/test/files/run/t3397.scala
+++ b/test/files/run/t3397.scala
@@ -1,7 +1,7 @@
 object Test {
   def main(args: Array[String]): Unit = {
     val x = Seq(Set(1,2,3),Set(4,5,6),Set(7,8,9)).transpose
-    
+
     ()
   }
 }
diff --git a/test/files/run/t3425.check b/test/files/run/t3425.check
new file mode 100644
index 0000000..5be779b
--- /dev/null
+++ b/test/files/run/t3425.check
@@ -0,0 +1,4 @@
+123
+456
+789
+789
diff --git a/test/files/run/t3425.scala b/test/files/run/t3425.scala
new file mode 100644
index 0000000..c61d107
--- /dev/null
+++ b/test/files/run/t3425.scala
@@ -0,0 +1,41 @@
+import scala.language.reflectiveCalls
+object Other {
+  abstract class Foo {
+    type R1 <:    { def x: Any }
+    type R2 <: R1 { def x: Int }
+
+    def f(x: R2) = x.x
+  }
+
+  abstract class Bar {
+    trait R0      { def x: Any }
+    type R1 <: R0 { def x: AnyVal }
+    type R2 <: R1 { def x: Int }
+
+    def f(x: R2) = x.x
+  }
+}
+object Test {
+  trait A
+  trait B
+  def x(a: (A { val y: Int }) with B { val y: Int }) = a.y
+
+  class C extends A with B {
+    val y = 456
+  }
+
+  class Bippy { def x: Int = 789 }
+
+  def main(args: Array[String]): Unit = {
+    println(x(new A with B { val y = 123 }))
+    println(x(new C))
+
+    { val foo = new Other.Foo { type R1 = Bippy ; type R2 = Bippy }
+      println(foo.f(new Bippy))
+    }
+    { val bar = new Other.Bar { type R1 = Bippy with R0 ; type R2 = R1 }
+      println(bar.f(new Bippy with bar.R0))
+    }
+  }
+}
+
diff --git a/test/files/run/t3425b.check b/test/files/run/t3425b.check
new file mode 100644
index 0000000..5d34c43
--- /dev/null
+++ b/test/files/run/t3425b.check
@@ -0,0 +1,152 @@
+==== Direct Calls ====
+
+Any{val y: P} with C{val y: P}
+Any{val y: P} with C{val y: Q}
+Any{val y: P} with C{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with C{val y: P}
+Any{val y: Q} with C{val y: Q}
+Any{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+A{val y: P} with C{val y: P}
+A{val y: P} with C{val y: Q}
+A{val y: P} with C{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with C{val y: P}
+A{val y: Q} with C{val y: Q}
+A{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with C{val y: P}
+A{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+B{val y: P} with C{val y: P}
+B{val y: P} with C{val y: Q}
+B{val y: P} with C{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with C{val y: P}
+B{val y: Q} with C{val y: Q}
+B{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with C{val y: P}
+B{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+C{val y: P} with C{val y: P}
+C{val y: P} with C{val y: Q}
+C{val y: P} with C{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with C{val y: P}
+C{val y: Q} with C{val y: Q}
+C{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with C{val y: P}
+C{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+
+
+==== Reflective Calls ====
+
+Any{val y: P} with Any{val y: P}
+Any{val y: P} with Any{val y: Q}
+Any{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: P} with A{val y: P}
+Any{val y: P} with A{val y: Q}
+Any{val y: P} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: P} with B{val y: P}
+Any{val y: P} with B{val y: Q}
+Any{val y: P} with B{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with Any{val y: P}
+Any{val y: Q} with Any{val y: Q}
+Any{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with A{val y: P}
+Any{val y: Q} with A{val y: Q}
+Any{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with B{val y: P}
+Any{val y: Q} with B{val y: Q}
+Any{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+A{val y: P} with Any{val y: P}
+A{val y: P} with Any{val y: Q}
+A{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: P} with A{val y: P}
+A{val y: P} with A{val y: Q}
+A{val y: P} with A{val y: R forSome { type R <: P with Q }}
+A{val y: P} with B{val y: P}
+A{val y: P} with B{val y: Q}
+A{val y: P} with B{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with Any{val y: P}
+A{val y: Q} with Any{val y: Q}
+A{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with A{val y: P}
+A{val y: Q} with A{val y: Q}
+A{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with B{val y: P}
+A{val y: Q} with B{val y: Q}
+A{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with A{val y: P}
+A{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with B{val y: P}
+A{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+B{val y: P} with Any{val y: P}
+B{val y: P} with Any{val y: Q}
+B{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: P} with A{val y: P}
+B{val y: P} with A{val y: Q}
+B{val y: P} with A{val y: R forSome { type R <: P with Q }}
+B{val y: P} with B{val y: P}
+B{val y: P} with B{val y: Q}
+B{val y: P} with B{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with Any{val y: P}
+B{val y: Q} with Any{val y: Q}
+B{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with A{val y: P}
+B{val y: Q} with A{val y: Q}
+B{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with B{val y: P}
+B{val y: Q} with B{val y: Q}
+B{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with A{val y: P}
+B{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with B{val y: P}
+B{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+C{val y: P} with Any{val y: P}
+C{val y: P} with Any{val y: Q}
+C{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: P} with A{val y: P}
+C{val y: P} with A{val y: Q}
+C{val y: P} with A{val y: R forSome { type R <: P with Q }}
+C{val y: P} with B{val y: P}
+C{val y: P} with B{val y: Q}
+C{val y: P} with B{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with Any{val y: P}
+C{val y: Q} with Any{val y: Q}
+C{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with A{val y: P}
+C{val y: Q} with A{val y: Q}
+C{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with B{val y: P}
+C{val y: Q} with B{val y: Q}
+C{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with A{val y: P}
+C{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with B{val y: P}
+C{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+
+
diff --git a/test/files/run/t3425b/Base_1.scala b/test/files/run/t3425b/Base_1.scala
new file mode 100644
index 0000000..bdbc124
--- /dev/null
+++ b/test/files/run/t3425b/Base_1.scala
@@ -0,0 +1,89 @@
+trait P { def reflected: Boolean }
+trait Q { def reflected: Boolean }
+class PQ(val reflected: Boolean) extends P with Q { }
+
+trait A
+trait B
+trait C { val y: P }
+class ABC extends A with B with C {
+  private def reflected = (
+    Thread.currentThread.getStackTrace
+      takeWhile (_.getMethodName != "main")
+      exists (_.toString contains "sun.reflect.")
+  )
+  lazy val y: PQ = new PQ(reflected)
+}
+
+/*** The source used to generate the second file
+     Not otherwise used in the test except that compiling
+     it helps make sure it still compiles.
+
+****/
+
+object Gen {
+  case class Tp(outer: String, elem: String) {
+    override def toString = s"$outer { val y: $elem }"
+  }
+  case class Pair(tp1: Tp, tp2: Tp) {
+    def expr = s"((new ABC): $tp)"
+    def tp   = s"($tp1) with ($tp2)"
+  }
+  val traits = Vector("Any", "A", "B", "C") map ("%6s" format _)
+  val types  = Vector("P", "Q", "R forSome { type R <: P with Q }")
+  val allTypes = for (c <- traits ; tp <- types) yield Tp(c, tp)
+  val pairs = allTypes flatMap (t1 => allTypes map (t2 => Pair(t1, t2)))
+  val indices = pairs.indices
+
+  def aliases(idx: Int) = {
+    val p = pairs(idx)
+    import p._
+    List(
+      s"type R1_$idx = $tp",
+      s"type R2_$idx = R1_$idx { val y: (${tp1.elem}) with (${tp2.elem}) }"
+    )
+  }
+
+  def mkMethodContent(pre: String)(f: Int => String) =
+    indices map (i => s"def $pre$i${f(i)}") mkString "\n  "
+
+  def content = List(
+    indices flatMap aliases mkString "\n  ",
+    mkMethodContent("f")(i => s" = { val x = ${pairs(i).expr} ; x.y.reflected -> whatis(x).toString }"),
+    mkMethodContent("g")(i => s"""(x: R1_$i) = x.y"""),
+    mkMethodContent("h")(i => s"""(x: R2_$i) = x.y""")
+  ) mkString "\n  "
+
+  def fCalls = indices map ("f" + _) mkString ("\n    ", ",\n    ", "\n  ")
+
+  def main(args: Array[String]): Unit = {
+    // One cannot attain proper appreciation for the inadequacies of
+    // string interpolation without becoming one with the newline.
+    val nl = "\\n"
+
+    println(s"""
+      |import scala.reflect.runtime.universe._
+      |import scala.language._
+      |
+      |object Test {
+      |  def whatis[T: TypeTag](x: T) = typeOf[T]
+      |  def sshow(label: String, xs: Traversable[Any]) {
+      |    println("==== " + label + " ====$nl")
+      |    xs.toList.map("" + _).sorted foreach println
+      |    println("$nl")
+      |  }
+      |
+      |  $content
+      |  lazy val fcalls = List($fCalls)
+      |
+      |  def main(args: Array[String]) {
+      |    sshow("Direct Calls", fcalls collect { case (false, n) => n })
+      |    sshow("Reflective Calls", fcalls collect { case (true, n) => n })
+      |    // For a good time try printing this - have to fix bugs in
+      |    // reflection before that's going to be a good idea
+      |    // println(typeOf[Test.type].typeSymbol.asClass.info)
+      |  }
+      |}
+      """.stripMargin.trim
+    )
+  }
+}
diff --git a/test/files/run/t3425b/Generated_2.scala b/test/files/run/t3425b/Generated_2.scala
new file mode 100644
index 0000000..d08f17e
--- /dev/null
+++ b/test/files/run/t3425b/Generated_2.scala
@@ -0,0 +1,886 @@
+import scala.reflect.runtime.universe._
+import scala.language._
+
+object Test {
+  def whatis[T: TypeTag](x: T) = typeOf[T]
+  def sshow(label: String, xs: Traversable[Any]) {
+    println("==== " + label + " ====\n")
+    xs.toList.map("" + _).sorted foreach println
+    println("\n")
+  }
+
+  type R1_0 = (   Any { val y: P }) with (   Any { val y: P })
+  type R2_0 = R1_0 { val y: (P) with (P) }
+  type R1_1 = (   Any { val y: P }) with (   Any { val y: Q })
+  type R2_1 = R1_1 { val y: (P) with (Q) }
+  type R1_2 = (   Any { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_2 = R1_2 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_3 = (   Any { val y: P }) with (     A { val y: P })
+  type R2_3 = R1_3 { val y: (P) with (P) }
+  type R1_4 = (   Any { val y: P }) with (     A { val y: Q })
+  type R2_4 = R1_4 { val y: (P) with (Q) }
+  type R1_5 = (   Any { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_5 = R1_5 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_6 = (   Any { val y: P }) with (     B { val y: P })
+  type R2_6 = R1_6 { val y: (P) with (P) }
+  type R1_7 = (   Any { val y: P }) with (     B { val y: Q })
+  type R2_7 = R1_7 { val y: (P) with (Q) }
+  type R1_8 = (   Any { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_8 = R1_8 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_9 = (   Any { val y: P }) with (     C { val y: P })
+  type R2_9 = R1_9 { val y: (P) with (P) }
+  type R1_10 = (   Any { val y: P }) with (     C { val y: Q })
+  type R2_10 = R1_10 { val y: (P) with (Q) }
+  type R1_11 = (   Any { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_11 = R1_11 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_12 = (   Any { val y: Q }) with (   Any { val y: P })
+  type R2_12 = R1_12 { val y: (Q) with (P) }
+  type R1_13 = (   Any { val y: Q }) with (   Any { val y: Q })
+  type R2_13 = R1_13 { val y: (Q) with (Q) }
+  type R1_14 = (   Any { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_14 = R1_14 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_15 = (   Any { val y: Q }) with (     A { val y: P })
+  type R2_15 = R1_15 { val y: (Q) with (P) }
+  type R1_16 = (   Any { val y: Q }) with (     A { val y: Q })
+  type R2_16 = R1_16 { val y: (Q) with (Q) }
+  type R1_17 = (   Any { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_17 = R1_17 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_18 = (   Any { val y: Q }) with (     B { val y: P })
+  type R2_18 = R1_18 { val y: (Q) with (P) }
+  type R1_19 = (   Any { val y: Q }) with (     B { val y: Q })
+  type R2_19 = R1_19 { val y: (Q) with (Q) }
+  type R1_20 = (   Any { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_20 = R1_20 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_21 = (   Any { val y: Q }) with (     C { val y: P })
+  type R2_21 = R1_21 { val y: (Q) with (P) }
+  type R1_22 = (   Any { val y: Q }) with (     C { val y: Q })
+  type R2_22 = R1_22 { val y: (Q) with (Q) }
+  type R1_23 = (   Any { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_23 = R1_23 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_24 = (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })
+  type R2_24 = R1_24 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_25 = (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })
+  type R2_25 = R1_25 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_26 = (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_26 = R1_26 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_27 = (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })
+  type R2_27 = R1_27 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_28 = (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })
+  type R2_28 = R1_28 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_29 = (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_29 = R1_29 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_30 = (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })
+  type R2_30 = R1_30 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_31 = (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })
+  type R2_31 = R1_31 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_32 = (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_32 = R1_32 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_33 = (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })
+  type R2_33 = R1_33 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_34 = (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })
+  type R2_34 = R1_34 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_35 = (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_35 = R1_35 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_36 = (     A { val y: P }) with (   Any { val y: P })
+  type R2_36 = R1_36 { val y: (P) with (P) }
+  type R1_37 = (     A { val y: P }) with (   Any { val y: Q })
+  type R2_37 = R1_37 { val y: (P) with (Q) }
+  type R1_38 = (     A { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_38 = R1_38 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_39 = (     A { val y: P }) with (     A { val y: P })
+  type R2_39 = R1_39 { val y: (P) with (P) }
+  type R1_40 = (     A { val y: P }) with (     A { val y: Q })
+  type R2_40 = R1_40 { val y: (P) with (Q) }
+  type R1_41 = (     A { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_41 = R1_41 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_42 = (     A { val y: P }) with (     B { val y: P })
+  type R2_42 = R1_42 { val y: (P) with (P) }
+  type R1_43 = (     A { val y: P }) with (     B { val y: Q })
+  type R2_43 = R1_43 { val y: (P) with (Q) }
+  type R1_44 = (     A { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_44 = R1_44 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_45 = (     A { val y: P }) with (     C { val y: P })
+  type R2_45 = R1_45 { val y: (P) with (P) }
+  type R1_46 = (     A { val y: P }) with (     C { val y: Q })
+  type R2_46 = R1_46 { val y: (P) with (Q) }
+  type R1_47 = (     A { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_47 = R1_47 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_48 = (     A { val y: Q }) with (   Any { val y: P })
+  type R2_48 = R1_48 { val y: (Q) with (P) }
+  type R1_49 = (     A { val y: Q }) with (   Any { val y: Q })
+  type R2_49 = R1_49 { val y: (Q) with (Q) }
+  type R1_50 = (     A { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_50 = R1_50 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_51 = (     A { val y: Q }) with (     A { val y: P })
+  type R2_51 = R1_51 { val y: (Q) with (P) }
+  type R1_52 = (     A { val y: Q }) with (     A { val y: Q })
+  type R2_52 = R1_52 { val y: (Q) with (Q) }
+  type R1_53 = (     A { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_53 = R1_53 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_54 = (     A { val y: Q }) with (     B { val y: P })
+  type R2_54 = R1_54 { val y: (Q) with (P) }
+  type R1_55 = (     A { val y: Q }) with (     B { val y: Q })
+  type R2_55 = R1_55 { val y: (Q) with (Q) }
+  type R1_56 = (     A { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_56 = R1_56 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_57 = (     A { val y: Q }) with (     C { val y: P })
+  type R2_57 = R1_57 { val y: (Q) with (P) }
+  type R1_58 = (     A { val y: Q }) with (     C { val y: Q })
+  type R2_58 = R1_58 { val y: (Q) with (Q) }
+  type R1_59 = (     A { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_59 = R1_59 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_60 = (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })
+  type R2_60 = R1_60 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_61 = (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })
+  type R2_61 = R1_61 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_62 = (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_62 = R1_62 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_63 = (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })
+  type R2_63 = R1_63 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_64 = (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })
+  type R2_64 = R1_64 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_65 = (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_65 = R1_65 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_66 = (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })
+  type R2_66 = R1_66 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_67 = (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })
+  type R2_67 = R1_67 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_68 = (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_68 = R1_68 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_69 = (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })
+  type R2_69 = R1_69 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_70 = (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })
+  type R2_70 = R1_70 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_71 = (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_71 = R1_71 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_72 = (     B { val y: P }) with (   Any { val y: P })
+  type R2_72 = R1_72 { val y: (P) with (P) }
+  type R1_73 = (     B { val y: P }) with (   Any { val y: Q })
+  type R2_73 = R1_73 { val y: (P) with (Q) }
+  type R1_74 = (     B { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_74 = R1_74 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_75 = (     B { val y: P }) with (     A { val y: P })
+  type R2_75 = R1_75 { val y: (P) with (P) }
+  type R1_76 = (     B { val y: P }) with (     A { val y: Q })
+  type R2_76 = R1_76 { val y: (P) with (Q) }
+  type R1_77 = (     B { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_77 = R1_77 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_78 = (     B { val y: P }) with (     B { val y: P })
+  type R2_78 = R1_78 { val y: (P) with (P) }
+  type R1_79 = (     B { val y: P }) with (     B { val y: Q })
+  type R2_79 = R1_79 { val y: (P) with (Q) }
+  type R1_80 = (     B { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_80 = R1_80 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_81 = (     B { val y: P }) with (     C { val y: P })
+  type R2_81 = R1_81 { val y: (P) with (P) }
+  type R1_82 = (     B { val y: P }) with (     C { val y: Q })
+  type R2_82 = R1_82 { val y: (P) with (Q) }
+  type R1_83 = (     B { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_83 = R1_83 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_84 = (     B { val y: Q }) with (   Any { val y: P })
+  type R2_84 = R1_84 { val y: (Q) with (P) }
+  type R1_85 = (     B { val y: Q }) with (   Any { val y: Q })
+  type R2_85 = R1_85 { val y: (Q) with (Q) }
+  type R1_86 = (     B { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_86 = R1_86 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_87 = (     B { val y: Q }) with (     A { val y: P })
+  type R2_87 = R1_87 { val y: (Q) with (P) }
+  type R1_88 = (     B { val y: Q }) with (     A { val y: Q })
+  type R2_88 = R1_88 { val y: (Q) with (Q) }
+  type R1_89 = (     B { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_89 = R1_89 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_90 = (     B { val y: Q }) with (     B { val y: P })
+  type R2_90 = R1_90 { val y: (Q) with (P) }
+  type R1_91 = (     B { val y: Q }) with (     B { val y: Q })
+  type R2_91 = R1_91 { val y: (Q) with (Q) }
+  type R1_92 = (     B { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_92 = R1_92 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_93 = (     B { val y: Q }) with (     C { val y: P })
+  type R2_93 = R1_93 { val y: (Q) with (P) }
+  type R1_94 = (     B { val y: Q }) with (     C { val y: Q })
+  type R2_94 = R1_94 { val y: (Q) with (Q) }
+  type R1_95 = (     B { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_95 = R1_95 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_96 = (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })
+  type R2_96 = R1_96 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_97 = (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })
+  type R2_97 = R1_97 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_98 = (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_98 = R1_98 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_99 = (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })
+  type R2_99 = R1_99 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_100 = (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })
+  type R2_100 = R1_100 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_101 = (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_101 = R1_101 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_102 = (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })
+  type R2_102 = R1_102 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_103 = (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })
+  type R2_103 = R1_103 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_104 = (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_104 = R1_104 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_105 = (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })
+  type R2_105 = R1_105 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_106 = (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })
+  type R2_106 = R1_106 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_107 = (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_107 = R1_107 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_108 = (     C { val y: P }) with (   Any { val y: P })
+  type R2_108 = R1_108 { val y: (P) with (P) }
+  type R1_109 = (     C { val y: P }) with (   Any { val y: Q })
+  type R2_109 = R1_109 { val y: (P) with (Q) }
+  type R1_110 = (     C { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_110 = R1_110 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_111 = (     C { val y: P }) with (     A { val y: P })
+  type R2_111 = R1_111 { val y: (P) with (P) }
+  type R1_112 = (     C { val y: P }) with (     A { val y: Q })
+  type R2_112 = R1_112 { val y: (P) with (Q) }
+  type R1_113 = (     C { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_113 = R1_113 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_114 = (     C { val y: P }) with (     B { val y: P })
+  type R2_114 = R1_114 { val y: (P) with (P) }
+  type R1_115 = (     C { val y: P }) with (     B { val y: Q })
+  type R2_115 = R1_115 { val y: (P) with (Q) }
+  type R1_116 = (     C { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_116 = R1_116 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_117 = (     C { val y: P }) with (     C { val y: P })
+  type R2_117 = R1_117 { val y: (P) with (P) }
+  type R1_118 = (     C { val y: P }) with (     C { val y: Q })
+  type R2_118 = R1_118 { val y: (P) with (Q) }
+  type R1_119 = (     C { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_119 = R1_119 { val y: (P) with (R forSome { type R <: P with Q }) }
+  type R1_120 = (     C { val y: Q }) with (   Any { val y: P })
+  type R2_120 = R1_120 { val y: (Q) with (P) }
+  type R1_121 = (     C { val y: Q }) with (   Any { val y: Q })
+  type R2_121 = R1_121 { val y: (Q) with (Q) }
+  type R1_122 = (     C { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_122 = R1_122 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_123 = (     C { val y: Q }) with (     A { val y: P })
+  type R2_123 = R1_123 { val y: (Q) with (P) }
+  type R1_124 = (     C { val y: Q }) with (     A { val y: Q })
+  type R2_124 = R1_124 { val y: (Q) with (Q) }
+  type R1_125 = (     C { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_125 = R1_125 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_126 = (     C { val y: Q }) with (     B { val y: P })
+  type R2_126 = R1_126 { val y: (Q) with (P) }
+  type R1_127 = (     C { val y: Q }) with (     B { val y: Q })
+  type R2_127 = R1_127 { val y: (Q) with (Q) }
+  type R1_128 = (     C { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_128 = R1_128 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_129 = (     C { val y: Q }) with (     C { val y: P })
+  type R2_129 = R1_129 { val y: (Q) with (P) }
+  type R1_130 = (     C { val y: Q }) with (     C { val y: Q })
+  type R2_130 = R1_130 { val y: (Q) with (Q) }
+  type R1_131 = (     C { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_131 = R1_131 { val y: (Q) with (R forSome { type R <: P with Q }) }
+  type R1_132 = (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })
+  type R2_132 = R1_132 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_133 = (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })
+  type R2_133 = R1_133 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_134 = (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })
+  type R2_134 = R1_134 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_135 = (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })
+  type R2_135 = R1_135 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_136 = (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })
+  type R2_136 = R1_136 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_137 = (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })
+  type R2_137 = R1_137 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_138 = (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })
+  type R2_138 = R1_138 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_139 = (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })
+  type R2_139 = R1_139 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_140 = (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })
+  type R2_140 = R1_140 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  type R1_141 = (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })
+  type R2_141 = R1_141 { val y: (R forSome { type R <: P with Q }) with (P) }
+  type R1_142 = (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })
+  type R2_142 = R1_142 { val y: (R forSome { type R <: P with Q }) with (Q) }
+  type R1_143 = (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })
+  type R2_143 = R1_143 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+  def f0 = { val x = ((new ABC): (   Any { val y: P }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f1 = { val x = ((new ABC): (   Any { val y: P }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f2 = { val x = ((new ABC): (   Any { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f3 = { val x = ((new ABC): (   Any { val y: P }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f4 = { val x = ((new ABC): (   Any { val y: P }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f5 = { val x = ((new ABC): (   Any { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f6 = { val x = ((new ABC): (   Any { val y: P }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f7 = { val x = ((new ABC): (   Any { val y: P }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f8 = { val x = ((new ABC): (   Any { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f9 = { val x = ((new ABC): (   Any { val y: P }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f10 = { val x = ((new ABC): (   Any { val y: P }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f11 = { val x = ((new ABC): (   Any { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f12 = { val x = ((new ABC): (   Any { val y: Q }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f13 = { val x = ((new ABC): (   Any { val y: Q }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f14 = { val x = ((new ABC): (   Any { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f15 = { val x = ((new ABC): (   Any { val y: Q }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f16 = { val x = ((new ABC): (   Any { val y: Q }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f17 = { val x = ((new ABC): (   Any { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f18 = { val x = ((new ABC): (   Any { val y: Q }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f19 = { val x = ((new ABC): (   Any { val y: Q }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f20 = { val x = ((new ABC): (   Any { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f21 = { val x = ((new ABC): (   Any { val y: Q }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f22 = { val x = ((new ABC): (   Any { val y: Q }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f23 = { val x = ((new ABC): (   Any { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f24 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f25 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f26 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f27 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f28 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f29 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f30 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f31 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f32 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f33 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f34 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f35 = { val x = ((new ABC): (   Any { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f36 = { val x = ((new ABC): (     A { val y: P }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f37 = { val x = ((new ABC): (     A { val y: P }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f38 = { val x = ((new ABC): (     A { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f39 = { val x = ((new ABC): (     A { val y: P }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f40 = { val x = ((new ABC): (     A { val y: P }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f41 = { val x = ((new ABC): (     A { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f42 = { val x = ((new ABC): (     A { val y: P }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f43 = { val x = ((new ABC): (     A { val y: P }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f44 = { val x = ((new ABC): (     A { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f45 = { val x = ((new ABC): (     A { val y: P }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f46 = { val x = ((new ABC): (     A { val y: P }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f47 = { val x = ((new ABC): (     A { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f48 = { val x = ((new ABC): (     A { val y: Q }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f49 = { val x = ((new ABC): (     A { val y: Q }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f50 = { val x = ((new ABC): (     A { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f51 = { val x = ((new ABC): (     A { val y: Q }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f52 = { val x = ((new ABC): (     A { val y: Q }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f53 = { val x = ((new ABC): (     A { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f54 = { val x = ((new ABC): (     A { val y: Q }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f55 = { val x = ((new ABC): (     A { val y: Q }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f56 = { val x = ((new ABC): (     A { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f57 = { val x = ((new ABC): (     A { val y: Q }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f58 = { val x = ((new ABC): (     A { val y: Q }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f59 = { val x = ((new ABC): (     A { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f60 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f61 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f62 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f63 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f64 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f65 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f66 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f67 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f68 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f69 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f70 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f71 = { val x = ((new ABC): (     A { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f72 = { val x = ((new ABC): (     B { val y: P }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f73 = { val x = ((new ABC): (     B { val y: P }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f74 = { val x = ((new ABC): (     B { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f75 = { val x = ((new ABC): (     B { val y: P }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f76 = { val x = ((new ABC): (     B { val y: P }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f77 = { val x = ((new ABC): (     B { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f78 = { val x = ((new ABC): (     B { val y: P }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f79 = { val x = ((new ABC): (     B { val y: P }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f80 = { val x = ((new ABC): (     B { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f81 = { val x = ((new ABC): (     B { val y: P }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f82 = { val x = ((new ABC): (     B { val y: P }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f83 = { val x = ((new ABC): (     B { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f84 = { val x = ((new ABC): (     B { val y: Q }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f85 = { val x = ((new ABC): (     B { val y: Q }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f86 = { val x = ((new ABC): (     B { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f87 = { val x = ((new ABC): (     B { val y: Q }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f88 = { val x = ((new ABC): (     B { val y: Q }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f89 = { val x = ((new ABC): (     B { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f90 = { val x = ((new ABC): (     B { val y: Q }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f91 = { val x = ((new ABC): (     B { val y: Q }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f92 = { val x = ((new ABC): (     B { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f93 = { val x = ((new ABC): (     B { val y: Q }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f94 = { val x = ((new ABC): (     B { val y: Q }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f95 = { val x = ((new ABC): (     B { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f96 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f97 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f98 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f99 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f100 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f101 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f102 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f103 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f104 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f105 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f106 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f107 = { val x = ((new ABC): (     B { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f108 = { val x = ((new ABC): (     C { val y: P }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f109 = { val x = ((new ABC): (     C { val y: P }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f110 = { val x = ((new ABC): (     C { val y: P }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f111 = { val x = ((new ABC): (     C { val y: P }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f112 = { val x = ((new ABC): (     C { val y: P }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f113 = { val x = ((new ABC): (     C { val y: P }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f114 = { val x = ((new ABC): (     C { val y: P }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f115 = { val x = ((new ABC): (     C { val y: P }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f116 = { val x = ((new ABC): (     C { val y: P }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f117 = { val x = ((new ABC): (     C { val y: P }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f118 = { val x = ((new ABC): (     C { val y: P }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f119 = { val x = ((new ABC): (     C { val y: P }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f120 = { val x = ((new ABC): (     C { val y: Q }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f121 = { val x = ((new ABC): (     C { val y: Q }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f122 = { val x = ((new ABC): (     C { val y: Q }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f123 = { val x = ((new ABC): (     C { val y: Q }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f124 = { val x = ((new ABC): (     C { val y: Q }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f125 = { val x = ((new ABC): (     C { val y: Q }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f126 = { val x = ((new ABC): (     C { val y: Q }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f127 = { val x = ((new ABC): (     C { val y: Q }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f128 = { val x = ((new ABC): (     C { val y: Q }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f129 = { val x = ((new ABC): (     C { val y: Q }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f130 = { val x = ((new ABC): (     C { val y: Q }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f131 = { val x = ((new ABC): (     C { val y: Q }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f132 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f133 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f134 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (   Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f135 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f136 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f137 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f138 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f139 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f140 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def f141 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+  def f142 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+  def f143 = { val x = ((new ABC): (     C { val y: R forSome { type R <: P with Q } }) with (     C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+  def g0(x: R1_0) = x.y
+  def g1(x: R1_1) = x.y
+  def g2(x: R1_2) = x.y
+  def g3(x: R1_3) = x.y
+  def g4(x: R1_4) = x.y
+  def g5(x: R1_5) = x.y
+  def g6(x: R1_6) = x.y
+  def g7(x: R1_7) = x.y
+  def g8(x: R1_8) = x.y
+  def g9(x: R1_9) = x.y
+  def g10(x: R1_10) = x.y
+  def g11(x: R1_11) = x.y
+  def g12(x: R1_12) = x.y
+  def g13(x: R1_13) = x.y
+  def g14(x: R1_14) = x.y
+  def g15(x: R1_15) = x.y
+  def g16(x: R1_16) = x.y
+  def g17(x: R1_17) = x.y
+  def g18(x: R1_18) = x.y
+  def g19(x: R1_19) = x.y
+  def g20(x: R1_20) = x.y
+  def g21(x: R1_21) = x.y
+  def g22(x: R1_22) = x.y
+  def g23(x: R1_23) = x.y
+  def g24(x: R1_24) = x.y
+  def g25(x: R1_25) = x.y
+  def g26(x: R1_26) = x.y
+  def g27(x: R1_27) = x.y
+  def g28(x: R1_28) = x.y
+  def g29(x: R1_29) = x.y
+  def g30(x: R1_30) = x.y
+  def g31(x: R1_31) = x.y
+  def g32(x: R1_32) = x.y
+  def g33(x: R1_33) = x.y
+  def g34(x: R1_34) = x.y
+  def g35(x: R1_35) = x.y
+  def g36(x: R1_36) = x.y
+  def g37(x: R1_37) = x.y
+  def g38(x: R1_38) = x.y
+  def g39(x: R1_39) = x.y
+  def g40(x: R1_40) = x.y
+  def g41(x: R1_41) = x.y
+  def g42(x: R1_42) = x.y
+  def g43(x: R1_43) = x.y
+  def g44(x: R1_44) = x.y
+  def g45(x: R1_45) = x.y
+  def g46(x: R1_46) = x.y
+  def g47(x: R1_47) = x.y
+  def g48(x: R1_48) = x.y
+  def g49(x: R1_49) = x.y
+  def g50(x: R1_50) = x.y
+  def g51(x: R1_51) = x.y
+  def g52(x: R1_52) = x.y
+  def g53(x: R1_53) = x.y
+  def g54(x: R1_54) = x.y
+  def g55(x: R1_55) = x.y
+  def g56(x: R1_56) = x.y
+  def g57(x: R1_57) = x.y
+  def g58(x: R1_58) = x.y
+  def g59(x: R1_59) = x.y
+  def g60(x: R1_60) = x.y
+  def g61(x: R1_61) = x.y
+  def g62(x: R1_62) = x.y
+  def g63(x: R1_63) = x.y
+  def g64(x: R1_64) = x.y
+  def g65(x: R1_65) = x.y
+  def g66(x: R1_66) = x.y
+  def g67(x: R1_67) = x.y
+  def g68(x: R1_68) = x.y
+  def g69(x: R1_69) = x.y
+  def g70(x: R1_70) = x.y
+  def g71(x: R1_71) = x.y
+  def g72(x: R1_72) = x.y
+  def g73(x: R1_73) = x.y
+  def g74(x: R1_74) = x.y
+  def g75(x: R1_75) = x.y
+  def g76(x: R1_76) = x.y
+  def g77(x: R1_77) = x.y
+  def g78(x: R1_78) = x.y
+  def g79(x: R1_79) = x.y
+  def g80(x: R1_80) = x.y
+  def g81(x: R1_81) = x.y
+  def g82(x: R1_82) = x.y
+  def g83(x: R1_83) = x.y
+  def g84(x: R1_84) = x.y
+  def g85(x: R1_85) = x.y
+  def g86(x: R1_86) = x.y
+  def g87(x: R1_87) = x.y
+  def g88(x: R1_88) = x.y
+  def g89(x: R1_89) = x.y
+  def g90(x: R1_90) = x.y
+  def g91(x: R1_91) = x.y
+  def g92(x: R1_92) = x.y
+  def g93(x: R1_93) = x.y
+  def g94(x: R1_94) = x.y
+  def g95(x: R1_95) = x.y
+  def g96(x: R1_96) = x.y
+  def g97(x: R1_97) = x.y
+  def g98(x: R1_98) = x.y
+  def g99(x: R1_99) = x.y
+  def g100(x: R1_100) = x.y
+  def g101(x: R1_101) = x.y
+  def g102(x: R1_102) = x.y
+  def g103(x: R1_103) = x.y
+  def g104(x: R1_104) = x.y
+  def g105(x: R1_105) = x.y
+  def g106(x: R1_106) = x.y
+  def g107(x: R1_107) = x.y
+  def g108(x: R1_108) = x.y
+  def g109(x: R1_109) = x.y
+  def g110(x: R1_110) = x.y
+  def g111(x: R1_111) = x.y
+  def g112(x: R1_112) = x.y
+  def g113(x: R1_113) = x.y
+  def g114(x: R1_114) = x.y
+  def g115(x: R1_115) = x.y
+  def g116(x: R1_116) = x.y
+  def g117(x: R1_117) = x.y
+  def g118(x: R1_118) = x.y
+  def g119(x: R1_119) = x.y
+  def g120(x: R1_120) = x.y
+  def g121(x: R1_121) = x.y
+  def g122(x: R1_122) = x.y
+  def g123(x: R1_123) = x.y
+  def g124(x: R1_124) = x.y
+  def g125(x: R1_125) = x.y
+  def g126(x: R1_126) = x.y
+  def g127(x: R1_127) = x.y
+  def g128(x: R1_128) = x.y
+  def g129(x: R1_129) = x.y
+  def g130(x: R1_130) = x.y
+  def g131(x: R1_131) = x.y
+  def g132(x: R1_132) = x.y
+  def g133(x: R1_133) = x.y
+  def g134(x: R1_134) = x.y
+  def g135(x: R1_135) = x.y
+  def g136(x: R1_136) = x.y
+  def g137(x: R1_137) = x.y
+  def g138(x: R1_138) = x.y
+  def g139(x: R1_139) = x.y
+  def g140(x: R1_140) = x.y
+  def g141(x: R1_141) = x.y
+  def g142(x: R1_142) = x.y
+  def g143(x: R1_143) = x.y
+  def h0(x: R2_0) = x.y
+  def h1(x: R2_1) = x.y
+  def h2(x: R2_2) = x.y
+  def h3(x: R2_3) = x.y
+  def h4(x: R2_4) = x.y
+  def h5(x: R2_5) = x.y
+  def h6(x: R2_6) = x.y
+  def h7(x: R2_7) = x.y
+  def h8(x: R2_8) = x.y
+  def h9(x: R2_9) = x.y
+  def h10(x: R2_10) = x.y
+  def h11(x: R2_11) = x.y
+  def h12(x: R2_12) = x.y
+  def h13(x: R2_13) = x.y
+  def h14(x: R2_14) = x.y
+  def h15(x: R2_15) = x.y
+  def h16(x: R2_16) = x.y
+  def h17(x: R2_17) = x.y
+  def h18(x: R2_18) = x.y
+  def h19(x: R2_19) = x.y
+  def h20(x: R2_20) = x.y
+  def h21(x: R2_21) = x.y
+  def h22(x: R2_22) = x.y
+  def h23(x: R2_23) = x.y
+  def h24(x: R2_24) = x.y
+  def h25(x: R2_25) = x.y
+  def h26(x: R2_26) = x.y
+  def h27(x: R2_27) = x.y
+  def h28(x: R2_28) = x.y
+  def h29(x: R2_29) = x.y
+  def h30(x: R2_30) = x.y
+  def h31(x: R2_31) = x.y
+  def h32(x: R2_32) = x.y
+  def h33(x: R2_33) = x.y
+  def h34(x: R2_34) = x.y
+  def h35(x: R2_35) = x.y
+  def h36(x: R2_36) = x.y
+  def h37(x: R2_37) = x.y
+  def h38(x: R2_38) = x.y
+  def h39(x: R2_39) = x.y
+  def h40(x: R2_40) = x.y
+  def h41(x: R2_41) = x.y
+  def h42(x: R2_42) = x.y
+  def h43(x: R2_43) = x.y
+  def h44(x: R2_44) = x.y
+  def h45(x: R2_45) = x.y
+  def h46(x: R2_46) = x.y
+  def h47(x: R2_47) = x.y
+  def h48(x: R2_48) = x.y
+  def h49(x: R2_49) = x.y
+  def h50(x: R2_50) = x.y
+  def h51(x: R2_51) = x.y
+  def h52(x: R2_52) = x.y
+  def h53(x: R2_53) = x.y
+  def h54(x: R2_54) = x.y
+  def h55(x: R2_55) = x.y
+  def h56(x: R2_56) = x.y
+  def h57(x: R2_57) = x.y
+  def h58(x: R2_58) = x.y
+  def h59(x: R2_59) = x.y
+  def h60(x: R2_60) = x.y
+  def h61(x: R2_61) = x.y
+  def h62(x: R2_62) = x.y
+  def h63(x: R2_63) = x.y
+  def h64(x: R2_64) = x.y
+  def h65(x: R2_65) = x.y
+  def h66(x: R2_66) = x.y
+  def h67(x: R2_67) = x.y
+  def h68(x: R2_68) = x.y
+  def h69(x: R2_69) = x.y
+  def h70(x: R2_70) = x.y
+  def h71(x: R2_71) = x.y
+  def h72(x: R2_72) = x.y
+  def h73(x: R2_73) = x.y
+  def h74(x: R2_74) = x.y
+  def h75(x: R2_75) = x.y
+  def h76(x: R2_76) = x.y
+  def h77(x: R2_77) = x.y
+  def h78(x: R2_78) = x.y
+  def h79(x: R2_79) = x.y
+  def h80(x: R2_80) = x.y
+  def h81(x: R2_81) = x.y
+  def h82(x: R2_82) = x.y
+  def h83(x: R2_83) = x.y
+  def h84(x: R2_84) = x.y
+  def h85(x: R2_85) = x.y
+  def h86(x: R2_86) = x.y
+  def h87(x: R2_87) = x.y
+  def h88(x: R2_88) = x.y
+  def h89(x: R2_89) = x.y
+  def h90(x: R2_90) = x.y
+  def h91(x: R2_91) = x.y
+  def h92(x: R2_92) = x.y
+  def h93(x: R2_93) = x.y
+  def h94(x: R2_94) = x.y
+  def h95(x: R2_95) = x.y
+  def h96(x: R2_96) = x.y
+  def h97(x: R2_97) = x.y
+  def h98(x: R2_98) = x.y
+  def h99(x: R2_99) = x.y
+  def h100(x: R2_100) = x.y
+  def h101(x: R2_101) = x.y
+  def h102(x: R2_102) = x.y
+  def h103(x: R2_103) = x.y
+  def h104(x: R2_104) = x.y
+  def h105(x: R2_105) = x.y
+  def h106(x: R2_106) = x.y
+  def h107(x: R2_107) = x.y
+  def h108(x: R2_108) = x.y
+  def h109(x: R2_109) = x.y
+  def h110(x: R2_110) = x.y
+  def h111(x: R2_111) = x.y
+  def h112(x: R2_112) = x.y
+  def h113(x: R2_113) = x.y
+  def h114(x: R2_114) = x.y
+  def h115(x: R2_115) = x.y
+  def h116(x: R2_116) = x.y
+  def h117(x: R2_117) = x.y
+  def h118(x: R2_118) = x.y
+  def h119(x: R2_119) = x.y
+  def h120(x: R2_120) = x.y
+  def h121(x: R2_121) = x.y
+  def h122(x: R2_122) = x.y
+  def h123(x: R2_123) = x.y
+  def h124(x: R2_124) = x.y
+  def h125(x: R2_125) = x.y
+  def h126(x: R2_126) = x.y
+  def h127(x: R2_127) = x.y
+  def h128(x: R2_128) = x.y
+  def h129(x: R2_129) = x.y
+  def h130(x: R2_130) = x.y
+  def h131(x: R2_131) = x.y
+  def h132(x: R2_132) = x.y
+  def h133(x: R2_133) = x.y
+  def h134(x: R2_134) = x.y
+  def h135(x: R2_135) = x.y
+  def h136(x: R2_136) = x.y
+  def h137(x: R2_137) = x.y
+  def h138(x: R2_138) = x.y
+  def h139(x: R2_139) = x.y
+  def h140(x: R2_140) = x.y
+  def h141(x: R2_141) = x.y
+  def h142(x: R2_142) = x.y
+  def h143(x: R2_143) = x.y
+  lazy val fcalls = List(
+    f0,
+    f1,
+    f2,
+    f3,
+    f4,
+    f5,
+    f6,
+    f7,
+    f8,
+    f9,
+    f10,
+    f11,
+    f12,
+    f13,
+    f14,
+    f15,
+    f16,
+    f17,
+    f18,
+    f19,
+    f20,
+    f21,
+    f22,
+    f23,
+    f24,
+    f25,
+    f26,
+    f27,
+    f28,
+    f29,
+    f30,
+    f31,
+    f32,
+    f33,
+    f34,
+    f35,
+    f36,
+    f37,
+    f38,
+    f39,
+    f40,
+    f41,
+    f42,
+    f43,
+    f44,
+    f45,
+    f46,
+    f47,
+    f48,
+    f49,
+    f50,
+    f51,
+    f52,
+    f53,
+    f54,
+    f55,
+    f56,
+    f57,
+    f58,
+    f59,
+    f60,
+    f61,
+    f62,
+    f63,
+    f64,
+    f65,
+    f66,
+    f67,
+    f68,
+    f69,
+    f70,
+    f71,
+    f72,
+    f73,
+    f74,
+    f75,
+    f76,
+    f77,
+    f78,
+    f79,
+    f80,
+    f81,
+    f82,
+    f83,
+    f84,
+    f85,
+    f86,
+    f87,
+    f88,
+    f89,
+    f90,
+    f91,
+    f92,
+    f93,
+    f94,
+    f95,
+    f96,
+    f97,
+    f98,
+    f99,
+    f100,
+    f101,
+    f102,
+    f103,
+    f104,
+    f105,
+    f106,
+    f107,
+    f108,
+    f109,
+    f110,
+    f111,
+    f112,
+    f113,
+    f114,
+    f115,
+    f116,
+    f117,
+    f118,
+    f119,
+    f120,
+    f121,
+    f122,
+    f123,
+    f124,
+    f125,
+    f126,
+    f127,
+    f128,
+    f129,
+    f130,
+    f131,
+    f132,
+    f133,
+    f134,
+    f135,
+    f136,
+    f137,
+    f138,
+    f139,
+    f140,
+    f141,
+    f142,
+    f143
+  )
+
+  def main(args: Array[String]) {
+    sshow("Direct Calls", fcalls collect { case (false, n) => n })
+    sshow("Reflective Calls", fcalls collect { case (true, n) => n })
+    // For a good time try printing this - have to fix bugs in
+    // reflection before that's going to be a good idea
+    // println(typeOf[Test.type].typeSymbol.asClass.info)
+  }
+}
diff --git a/test/files/run/t3452.check b/test/files/run/t3452.check
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/files/run/t3452.check
@@ -0,0 +1 @@
+4
diff --git a/test/files/run/t3452.scala b/test/files/run/t3452.scala
new file mode 100644
index 0000000..253fc93
--- /dev/null
+++ b/test/files/run/t3452.scala
@@ -0,0 +1,21 @@
+trait IStringPair[T] {
+  def a : String
+  def b : String
+  def build(a : String, b : String) : T
+  def cat(that : IStringPair[T]) = build(this.a + that.a, this.b + that.b)
+  override def toString = a + b
+}
+
+class StringPair(val a : String, val b : String) extends IStringPair[StringPair] {
+  def build(a : String, b : String) = new StringPair(a, b)
+  def len = a.length + b.length
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    val a = new StringPair("A", "B")
+    val b = new StringPair("1", "2")
+    val c = a cat b
+    println(c.len)
+  }
+}
diff --git a/test/files/run/t3452a.check b/test/files/run/t3452a.check
new file mode 100644
index 0000000..9ff787e
--- /dev/null
+++ b/test/files/run/t3452a.check
@@ -0,0 +1 @@
+BulkSearch.searchFor called.
diff --git a/test/files/run/t3452a/J_2.java b/test/files/run/t3452a/J_2.java
new file mode 100644
index 0000000..62057ff
--- /dev/null
+++ b/test/files/run/t3452a/J_2.java
@@ -0,0 +1,5 @@
+public class J_2 {
+       public static void main(String[] args) {
+               BulkSearchInstance.searchFor(new UpRelation());
+       }
+}
diff --git a/test/files/run/t3452a/S_1.scala b/test/files/run/t3452a/S_1.scala
new file mode 100644
index 0000000..791faf4
--- /dev/null
+++ b/test/files/run/t3452a/S_1.scala
@@ -0,0 +1,24 @@
+abstract class BulkSearch {
+       type R   <: Row
+       type Rel <: Relation [R]
+       type Corr <: Correspondence[R]
+
+       def searchFor(input: Rel): Mapping[Corr] = { println("BulkSearch.searchFor called.") ; null }
+}
+
+object BulkSearchInstance extends BulkSearch {
+       type R   = UpRow
+       type Rel = UpRelation
+       type Corr = UpCorrespondence
+}
+
+class Row
+class UpRow extends Row
+
+class Relation [R <: Row]
+class UpRelation extends Relation [UpRow]
+
+class Correspondence [R <: Row]
+class UpCorrespondence extends Correspondence [UpRow]
+
+class Mapping[MC <: Correspondence[_]]
diff --git a/test/files/run/t3452a/S_3.scala b/test/files/run/t3452a/S_3.scala
new file mode 100644
index 0000000..aaa898d
--- /dev/null
+++ b/test/files/run/t3452a/S_3.scala
@@ -0,0 +1,5 @@
+object Test {
+  def main(args: Array[String]): Unit = {
+    J_2.main(args)
+  }
+}
diff --git a/test/files/run/t3452b-bcode.check b/test/files/run/t3452b-bcode.check
new file mode 100644
index 0000000..204c3d0
--- /dev/null
+++ b/test/files/run/t3452b-bcode.check
@@ -0,0 +1,2 @@
+Search received: test
+SearchC received: test
diff --git a/test/files/run/t3452b-bcode.flags b/test/files/run/t3452b-bcode.flags
new file mode 100644
index 0000000..c30091d
--- /dev/null
+++ b/test/files/run/t3452b-bcode.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode
diff --git a/test/files/run/t3452b-bcode/J_2.java b/test/files/run/t3452b-bcode/J_2.java
new file mode 100644
index 0000000..839f334
--- /dev/null
+++ b/test/files/run/t3452b-bcode/J_2.java
@@ -0,0 +1,6 @@
+public class J_2 {
+  public static void j() {
+    StringSearch.search("test");
+    StringSearch.searchC("test");
+  }
+}
diff --git a/test/files/run/t3452b-bcode/S_1.scala b/test/files/run/t3452b-bcode/S_1.scala
new file mode 100644
index 0000000..a209f12
--- /dev/null
+++ b/test/files/run/t3452b-bcode/S_1.scala
@@ -0,0 +1,17 @@
+trait Search[M] {
+  def search(input: M): C[Int] = {
+    println("Search received: " + input)
+    null
+  }
+}
+
+class SearchC[M] {
+  def searchC(input: M): C[Int] = {
+    println("SearchC received: " + input)
+    null
+  }
+}
+
+object StringSearch extends SearchC[String] with Search[String]
+
+trait C[T]
diff --git a/test/files/run/t3452b-bcode/S_3.scala b/test/files/run/t3452b-bcode/S_3.scala
new file mode 100644
index 0000000..102b433
--- /dev/null
+++ b/test/files/run/t3452b-bcode/S_3.scala
@@ -0,0 +1,5 @@
+object Test {
+  def main(args: Array[String]): Unit = {
+    J_2.j()
+  }
+}
diff --git a/test/files/run/t3452b.check b/test/files/run/t3452b.check
new file mode 100644
index 0000000..204c3d0
--- /dev/null
+++ b/test/files/run/t3452b.check
@@ -0,0 +1,2 @@
+Search received: test
+SearchC received: test
diff --git a/test/files/run/t3452b/J_2.java b/test/files/run/t3452b/J_2.java
new file mode 100644
index 0000000..839f334
--- /dev/null
+++ b/test/files/run/t3452b/J_2.java
@@ -0,0 +1,6 @@
+public class J_2 {
+  public static void j() {
+    StringSearch.search("test");
+    StringSearch.searchC("test");
+  }
+}
diff --git a/test/files/run/t3452b/S_1.scala b/test/files/run/t3452b/S_1.scala
new file mode 100644
index 0000000..a209f12
--- /dev/null
+++ b/test/files/run/t3452b/S_1.scala
@@ -0,0 +1,17 @@
+trait Search[M] {
+  def search(input: M): C[Int] = {
+    println("Search received: " + input)
+    null
+  }
+}
+
+class SearchC[M] {
+  def searchC(input: M): C[Int] = {
+    println("SearchC received: " + input)
+    null
+  }
+}
+
+object StringSearch extends SearchC[String] with Search[String]
+
+trait C[T]
diff --git a/test/files/run/t3452b/S_3.scala b/test/files/run/t3452b/S_3.scala
new file mode 100644
index 0000000..102b433
--- /dev/null
+++ b/test/files/run/t3452b/S_3.scala
@@ -0,0 +1,5 @@
+object Test {
+  def main(args: Array[String]): Unit = {
+    J_2.j()
+  }
+}
diff --git a/test/files/run/t3452c.check b/test/files/run/t3452c.check
new file mode 100644
index 0000000..ab47181
--- /dev/null
+++ b/test/files/run/t3452c.check
@@ -0,0 +1,8 @@
+3
+3
+3
+3
+3
+3
+3
+3
diff --git a/test/files/run/t3452c.scala b/test/files/run/t3452c.scala
new file mode 100644
index 0000000..2c55767
--- /dev/null
+++ b/test/files/run/t3452c.scala
@@ -0,0 +1,113 @@
+trait Base[A, B, C] {
+  def f(x: A, y: B, z: C): Unit
+  def g(x: A, y: B, z: C) = f(x, y, z)
+  def h(x: A, y: B, z: C) = g(x, y, z)
+}
+
+trait D1[B, C] extends Base[String, B, C]
+trait D2[A, B] extends Base[A, B, String]
+trait D3[A, C] extends Base[A, String, C]
+trait D4[A] extends Base[A, String, String]
+trait D5[B] extends Base[String, B, String]
+trait D6[C] extends Base[String, String, C]
+trait D7 extends Base[String, String, String]
+
+trait E1[B, C] extends Base[String, B, C] { def f(x: String, y: B, z: C): Unit ; override def h(x: String, y: B, z: C) = g(x, y, z) }
+trait E2[A, B] extends Base[A, B, String] { def f(x: A, y: B, z: String): Unit ; override def h(x: A, y: B, z: String) = g(x, y, z) }
+trait E3[A, C] extends Base[A, String, C] { def f(x: A, y: String, z: C): Unit ; override def h(x: A, y: String, z: C) = g(x, y, z) }
+trait E4[A] extends Base[A, String, String] { def f(x: A, y: String, z: String): Unit ; override def h(x: A, y: String, z: String) = g(x, y, z) }
+trait E5[B] extends Base[String, B, String] { def f(x: String, y: B, z: String): Unit ; override def h(x: String, y: B, z: String) = g(x, y, z) }
+trait E6[C] extends Base[String, String, C] { def f(x: String, y: String, z: C): Unit ; override def h(x: String, y: String, z: C) = g(x, y, z) }
+trait E7 extends Base[String, String, String] { def f(x: String, y: String, z: String): Unit ; override def h(x: String, y: String, z: String) = g(x, y, z) }
+
+trait F1[B, C] extends Base[String, B, C] { def f(x: String, y: B, z: C): Unit = println(x.length) }
+trait F2[A, B] extends Base[A, B, String] { def f(x: A, y: B, z: String): Unit = println(z.length) }
+trait F3[A, C] extends Base[A, String, C] { def f(x: A, y: String, z: C): Unit  = println(y.length) }
+trait F4[A] extends Base[A, String, String] { def f(x: A, y: String, z: String): Unit = println(y.length) }
+trait F5[B] extends Base[String, B, String] { def f(x: String, y: B, z: String): Unit = println(x.length) }
+trait F6[C] extends Base[String, String, C] { def f(x: String, y: String, z: C): Unit = println(x.length) }
+trait F7 extends Base[String, String, String] { def f(x: String, y: String, z: String): Unit = println(x.length) }
+
+abstract class DBag extends D1[String, String] with D2[String, String] with D3[String, String] with D4[String] with D5[String] with D6[String] with D7 {
+  def f(x: String, y: String, z: String) = println(x.length + y.length + z.length)
+}
+abstract class EBag extends E1[String, String] with E2[String, String] with E3[String, String] with E4[String] with E5[String] with E6[String] with E7 {
+  def f(x: String, y: String, z: String) = println(x.length + y.length + z.length)
+}
+abstract class FBag extends F1[String, String] with F2[String, String] with F3[String, String] with F4[String] with F5[String] with F6[String] with F7 {
+  override def f(x: String, y: String, z: String) = println(x.length + y.length + z.length)
+}
+
+abstract class GBag1[A, B] extends Base[A, B, String] with D2[A, B] {
+  def f(x: A, y: B, z: String) = println(z.length)
+}
+abstract class GBag2[A] extends GBag1[A, String] with D4[A] {
+  override def f(x: A, y: String, z: String) = println(z.length)
+}
+abstract class GBag3 extends GBag2[String] with D7 {
+  override def f(x: String, y: String, z: String) = println(z.length)
+}
+class GBag extends GBag3 with D2[String, String] with D3[String, String] with D4[String] with D5[String] with D6[String] with D7 {
+}
+
+object Test {
+  def f0(x: Base[String, String, String]) = x.f("a", "b", "c")
+  def f1(x: D1[String, String])           = x.f("a", "b", "c")
+  def f2(x: D2[String, String])           = x.f("a", "b", "c")
+  def f3(x: D3[String, String])           = x.f("a", "b", "c")
+  def f4(x: D4[String])                   = x.f("a", "b", "c")
+  def f5(x: D5[String])                   = x.f("a", "b", "c")
+  def f6(x: D6[String])                   = x.f("a", "b", "c")
+  def f7(x: D7)                           = x.f("a", "b", "c")
+
+  def main(args: Array[String]): Unit = {
+    val x = new DBag { }
+    f0(x)
+    f1(x)
+    f2(x)
+    f3(x)
+    f4(x)
+    f5(x)
+    f6(x)
+    f7(x)
+  }
+}
+
+object TestE {
+  def f0(x: Base[String, String, String]) = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f1(x: E1[String, String])           = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f2(x: E2[String, String])           = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f3(x: E3[String, String])           = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f4(x: E4[String])                   = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f5(x: E5[String])                   = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f6(x: E6[String])                   = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f7(x: E7)                           = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+
+  def main(args: Array[String]): Unit = {
+    val x = new EBag { }
+    f0(x)
+    f1(x)
+    f2(x)
+    f3(x)
+    f4(x)
+    f5(x)
+    f6(x)
+    f7(x)
+  }
+}
+
+
+object TestG {
+  def f0(x: Base[String, String, String]) = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f1(x: GBag1[String, String])        = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f2(x: GBag2[String])                = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+  def f3(x: GBag3)                        = { x.f("a", "b", "c") ; x.g("a", "b", "c") ; x.h("a", "b", "c") }
+
+  def main(args: Array[String]): Unit = {
+    val x = new GBag { }
+    f0(x)
+    f1(x)
+    f2(x)
+    f3(x)
+  }
+}
diff --git a/test/files/run/t3452d/A.scala b/test/files/run/t3452d/A.scala
new file mode 100644
index 0000000..67a2080
--- /dev/null
+++ b/test/files/run/t3452d/A.scala
@@ -0,0 +1,7 @@
+trait TraversableLike[A, Repr] {
+  def tail: Repr = null.asInstanceOf[Repr]
+}
+
+abstract class AbstractTrav[A] extends TraversableLike[A, Traversable[A]]
+
+class C[A] extends AbstractTrav[A]
diff --git a/test/files/run/t3452d/Test.java b/test/files/run/t3452d/Test.java
new file mode 100644
index 0000000..875be61
--- /dev/null
+++ b/test/files/run/t3452d/Test.java
@@ -0,0 +1,12 @@
+import scala.collection.immutable.Nil;
+import scala.collection.immutable.List;
+import scala.collection.Traversable;
+
+public class Test {
+	public static void main(String[] args) {
+		C<String> c = new C<String>();
+		// TODO add a bridge during mixin so we can expose
+		// sharper generic signature for `tail`.
+		/*Traversable<String>*/ Object ls = c.tail();
+	}
+}
diff --git a/test/files/run/t3452e/A.scala b/test/files/run/t3452e/A.scala
new file mode 100644
index 0000000..939172f
--- /dev/null
+++ b/test/files/run/t3452e/A.scala
@@ -0,0 +1,4 @@
+trait F1[T, R] {
+  def andThen[A](g: R => A): Int = 0
+}
+class C1[TT, RR] extends F1[TT, RR]
diff --git a/test/files/run/t3452e/B.java b/test/files/run/t3452e/B.java
new file mode 100644
index 0000000..0268af9
--- /dev/null
+++ b/test/files/run/t3452e/B.java
@@ -0,0 +1,2 @@
+class B extends C1<String, String> {
+}
diff --git a/test/files/run/t3452e/Test.scala b/test/files/run/t3452e/Test.scala
new file mode 100644
index 0000000..fc175bf
--- /dev/null
+++ b/test/files/run/t3452e/Test.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+	new B
+}
diff --git a/test/files/run/t3452f.scala b/test/files/run/t3452f.scala
new file mode 100644
index 0000000..af64f5c
--- /dev/null
+++ b/test/files/run/t3452f.scala
@@ -0,0 +1,19 @@
+import language.higherKinds
+
+trait GenSet[A]
+
+trait GenSetTemplate[A, +CC[X] <: GenSet[X]] {
+  def empty: CC[A] = ???
+}
+
+trait SetLike[A, +This <: SetLike[A, This] with Set[A]] {
+  def empty: This
+}
+
+abstract class Set[A] extends GenSet[A] with SetLike[A,Set[A]] with GenSetTemplate[A,Set]
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    locally(classOf[Set[_]]) // trigger classloading to verify class
+  }
+}
diff --git a/test/files/run/t3452g/A.scala b/test/files/run/t3452g/A.scala
new file mode 100644
index 0000000..a3f74c1
--- /dev/null
+++ b/test/files/run/t3452g/A.scala
@@ -0,0 +1,9 @@
+trait TraversableLike[A, Repr] {
+  def tail: Repr = null.asInstanceOf[Repr]
+}
+
+abstract class AbstractTrav[A] extends TraversableLike[A, AbstractTrav[A]]
+
+object O extends AbstractTrav[String]
+
+class C[A] extends AbstractTrav[A]
diff --git a/test/files/run/t3452g/Test.java b/test/files/run/t3452g/Test.java
new file mode 100644
index 0000000..c3b4222
--- /dev/null
+++ b/test/files/run/t3452g/Test.java
@@ -0,0 +1,14 @@
+
+public class Test {
+	public static void main(String[] args) {
+		// To get better types here, we would need to
+		// add bridge during mixin so we can expose
+		// a generic return type of Traversable<A>, because the erasure
+		// of this (Traversable) differs from the erasure of the mixed
+		// method (erasure(Repr) = Object)
+
+		Object lsSharp = O.tail();
+
+		Object lsSharp2 = new C<String>().tail();
+	}
+}
diff --git a/test/files/run/t3452h.scala b/test/files/run/t3452h.scala
new file mode 100644
index 0000000..6237d3e
--- /dev/null
+++ b/test/files/run/t3452h.scala
@@ -0,0 +1,8 @@
+class Mix___eFoo_I_wBar__f extends Foo_I_ with Bar__f { f; }
+trait T
+abstract class Foo_I_ { class I extends T    ; def f: I         ; f; }
+trait Bar__f          { type  I>:Null<:T;      def f: I = {null}; f; def gobble: I = {null}}
+
+object Test extends App {
+  new Mix___eFoo_I_wBar__f
+}
diff --git a/test/files/run/t3488.check b/test/files/run/t3488.check
index 0d66ea1..314dfc7 100644
--- a/test/files/run/t3488.check
+++ b/test/files/run/t3488.check
@@ -1,2 +1,8 @@
+t3488.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  println(foo { val List(_*)=List(0); 1 } ())
+                                      ^
+t3488.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  println(foo { val List(_*)=List(0); 1 } (1))
+                                      ^
 0
 1
diff --git a/test/files/run/t3488.scala b/test/files/run/t3488.scala
index 20a1400..a8cfa9b 100644
--- a/test/files/run/t3488.scala
+++ b/test/files/run/t3488.scala
@@ -1,6 +1,6 @@
 object Test extends App {
   def foo(p: => Unit)(x:Int = 0) = x
-  
+
   println(foo { val List(_*)=List(0); 1 } ())
   println(foo { val List(_*)=List(0); 1 } (1))
 }
diff --git a/test/files/run/t3493.scala b/test/files/run/t3493.scala
index b0b7589..aafe7a3 100644
--- a/test/files/run/t3493.scala
+++ b/test/files/run/t3493.scala
@@ -3,7 +3,7 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     import scala.collection.immutable._
     val x = TreeSet("a", "b", "c", "d")
@@ -11,5 +11,5 @@ object Test {
     assert(x2.toString == "TreeSet(a, b, c, d, e)")
     assert(x2.toString == runtime.ScalaRunTime.stringOf(x2).trim)
   }
-  
+
 }
diff --git a/test/files/run/t3496.scala b/test/files/run/t3496.scala
index 80a4e6b..e1aa032 100644
--- a/test/files/run/t3496.scala
+++ b/test/files/run/t3496.scala
@@ -4,12 +4,12 @@
 
 // ticket #3496
 object Test {
-  
+
   def main(args: Array[String]) {
     val s = Stream.from(1)
     s.take(5)
     s.drop(5)
     s.splitAt(5)
   }
-  
+
 }
diff --git a/test/files/run/t3502.scala b/test/files/run/t3502.scala
index 9492b2d..cc78e54 100644
--- a/test/files/run/t3502.scala
+++ b/test/files/run/t3502.scala
@@ -5,7 +5,7 @@
 
 // ticket #3502
 object Test {
-  
+
   object GeneratePrimeFactorsLazy extends (Int => List[Int]) {
     override def apply(n:Int) = {
       val s = Stream.range(2, n / 2).filter(n % _ == 0)
@@ -13,12 +13,12 @@ object Test {
       s.headOption.map(x => x :: apply(n / x)).getOrElse(List(n))
     }
   }
-  
+
   def main(args:Array[String]) {
     // a prime number
     //val num = 623456789
     val num = 2796203
     assert(GeneratePrimeFactorsLazy(num) == List(num))
   }
-  
+
 }
diff --git a/test/files/run/t3507-new.scala b/test/files/run/t3507-new.scala
index f045755..bd16849 100644
--- a/test/files/run/t3507-new.scala
+++ b/test/files/run/t3507-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
 import scala.reflect.runtime.universe._
 
 class A {
@@ -14,4 +16,4 @@ object Test extends App {
   def mani[T: TypeTag](x: T) = println(typeOf[T])
   mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
   // --> _1 is not in scope here
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t3509.scala b/test/files/run/t3509.scala
index 7ec150e..76f8d60 100644
--- a/test/files/run/t3509.scala
+++ b/test/files/run/t3509.scala
@@ -1,7 +1,7 @@
 object Test {
 
-  class Foo(final var i:Int) 
-  
+  class Foo(final var i:Int)
+
   def main(args : Array[String]) : Unit = {
   	val foo = new Foo(0)
   	foo.i += 1
diff --git a/test/files/run/t3511.scala b/test/files/run/t3511.scala
index 9b4d581..30757b1 100644
--- a/test/files/run/t3511.scala
+++ b/test/files/run/t3511.scala
@@ -6,31 +6,31 @@ import scala.collection.immutable._
 
 // ticket #3511
 object Test {
-  
+
   def main(args: Array[String]) {
     assert(Stream.from(0).view.force.take(5) == List(0, 1, 2, 3, 4))
-    
+
     val s = Stream.from(0)
     val smap = s.view.map(_ * 2).force.take(5)
     assert(smap == List(0, 2, 4, 6, 8))
-    
+
     val sfilter = s.view.filter(_ % 2 == 0).force.take(5)
     assert(sfilter == List(0, 2, 4, 6, 8))
-    
+
     val sflatmap = s.view.flatMap(n => List(n, n * 2)).force.take(6)
     assert(sflatmap == List(0, 0, 1, 2, 2, 4))
-    
+
     val stakewhile = s.view.takeWhile(_ < 10).force
     assert(stakewhile == List.range(0, 10))
-    
+
     val szip = s.view.zip(s.map(_ / 2)).force.take(5)
     assert(szip == List((0, 0), (1, 0), (2, 1), (3, 1), (4, 2)))
-    
+
     val szipall = s.view.zipAll(List(0, 1, 2), 0, 0).force.take(5)
     assert(szipall == List((0, 0), (1, 1), (2, 2), (3, 0), (4, 0)))
-    
+
     val spatch = s.view.patch(1, List(5, 5, 5), 5).force.take(5)
     assert(spatch == List(0, 5, 5, 5, 6))
   }
-  
+
 }
diff --git a/test/files/run/t3516.scala b/test/files/run/t3516.scala
index 82a97f2..aa302ce 100644
--- a/test/files/run/t3516.scala
+++ b/test/files/run/t3516.scala
@@ -1,7 +1,7 @@
 object Test {
   def mkIterator = (1 to 5).iterator map (x => { println(x) ; x })
   def mkInfinite = Iterator continually { println(1) ; 1 }
-  
+
   def main(args: Array[String]): Unit = {
     // Stream is strict in its head so we should see 1 from each of them.
     val s1 = mkIterator.toStream
diff --git a/test/files/run/t3518.scala b/test/files/run/t3518.scala
index 36ca9da..033cc19 100644
--- a/test/files/run/t3518.scala
+++ b/test/files/run/t3518.scala
@@ -4,7 +4,7 @@ object Test {
   val r3 = 10.0 to 1.0 by -0.5
   val r4 = 1.0 until 1.0 by 1.0
   val r5 = 1 to 100 by 2
-  
+
   def main(args: Array[String]): Unit = {
     assert(r3 forall (r1 contains _))
     assert(r1 forall (r3 contains _))
diff --git a/test/files/run/t3529.scala b/test/files/run/t3529.scala
index bb82424..a5977d0 100644
--- a/test/files/run/t3529.scala
+++ b/test/files/run/t3529.scala
@@ -1,3 +1,4 @@
+import scala.language.postfixOps
 object Test {
   def main(args: Array[String]): Unit = {
     assert(1 to 10 drop 10 isEmpty)
diff --git a/test/files/run/t3530.scala b/test/files/run/t3530.scala
index 3fedc66..f6f7fb4 100644
--- a/test/files/run/t3530.scala
+++ b/test/files/run/t3530.scala
@@ -5,7 +5,7 @@ object Test {
     case xs @ List(_*) => "list: " + xs.length
     case _             => "not a list"
   })
-  
+
   def f2[T](x: List[T]) = println(x match {
     case List(_, _)       => "two"
     case List(_, _, _)    => "three"
@@ -21,9 +21,9 @@ object Test {
     f(Nil)
     f(List(1,2,3,4,5))
     f(null)
-    
+
     println
-    
+
     f2(List(1, 2))
     f2(List('a', 'b', 'c'))
     f2(List('a', 'b', 'c', 'd'))
diff --git a/test/files/run/t3540.scala b/test/files/run/t3540.scala
index 4eb3de7..5ffacb5 100644
--- a/test/files/run/t3540.scala
+++ b/test/files/run/t3540.scala
@@ -2,6 +2,6 @@ object Test {
   def main(args: Array[String]): Unit = {
     assert(List.iterate(List(1,2,3), 4)(_.tail).last.isEmpty)
     assert(Stream.iterate(Stream(1,2,3), 4)(_.tail).last.isEmpty)
-    assert(Array.iterate(Array(1,2,3), 4)(_.tail).last.isEmpty)    
+    assert(Array.iterate(Array(1,2,3), 4)(_.tail).last.isEmpty)
   }
 }
diff --git a/test/files/run/t3563.scala b/test/files/run/t3563.scala
index 2a80ef4..8abbb60 100644
--- a/test/files/run/t3563.scala
+++ b/test/files/run/t3563.scala
@@ -5,17 +5,17 @@
 
 // ticket #3563
 object Test {
-  
+
   def main(args: Array[String]) {
     var sum = 0
     val setseq = Set(1, 2, 3, 4).toSeq
     setseq.map( n => { sum += n; n * n }).head
     assert(sum == 10)
-    
+
     sum = 0
     val mapseq = Map(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4).toSeq
     mapseq.map( n => { sum += n._1; (n._1 + n._1, n._2 * n._2) }).head
     assert(sum == 10)
   }
-  
+
 }
diff --git a/test/files/run/t3569.scala b/test/files/run/t3569.scala
index 4699aea..91d437e 100644
--- a/test/files/run/t3569.scala
+++ b/test/files/run/t3569.scala
@@ -1,9 +1,9 @@
 object Test {
   final val bippy1 = 1
   final lazy val bippy2 = 2
-  
+
   lazy val lv = scala.util.Random.nextInt()
-  
+
   class X(final var x: Int)  {
     final var var1: Int = 0
     final private var var2: Int = 0
@@ -12,20 +12,20 @@ object Test {
     final val val1: Int = 1
     final private val val2: Int = 1
     final private[this] val val3: Int = 1
-    
+
     final lazy val lval1: Int = 2
     final private lazy val lval2: Int = 2
     final private[this] lazy val lval3: Int = 2
   }
   case class Y(final var x: Int, final private var y: Int, final val z1: Int, final private val z2: Int) { }
-  
+
   def f = new X(0).x += 1
   def main(args: Array[String]) {
     f
     val s = new X(0)
     s.x += 1
     println(s.x)
-    
+
     (classOf[X].getDeclaredFields map ("" + _)).sorted foreach println
     (classOf[Y].getDeclaredFields map ("" + _)).sorted foreach println
   }
diff --git a/test/files/run/t3575.scala b/test/files/run/t3575.scala
index 7ede65b..ef83e84 100644
--- a/test/files/run/t3575.scala
+++ b/test/files/run/t3575.scala
@@ -46,7 +46,7 @@ object Test {
     println(TwoShort(12, "Hello").getClass().getName());
     println(TwoShort("Hello", 12).getClass().getName());
     println(TwoShort(12, 12).getClass().getName());
-    
+
     println(TwoMinimal("Hello", "World").getClass().getName());
     println(TwoMinimal(12, "Hello").getClass().getName());
     println(TwoMinimal("Hello", 12).getClass().getName());
diff --git a/test/files/run/t3580.scala b/test/files/run/t3580.scala
index ac9f81a..50ff6c4 100644
--- a/test/files/run/t3580.scala
+++ b/test/files/run/t3580.scala
@@ -4,14 +4,14 @@
 
 
 object Test {
-  
+
   class Empty extends Traversable[Nothing] {
     def foreach[U](f: Nothing => U) {}
   }
-  
+
   def main(args: Array[String]) {
     val t = new Empty
     t.toStream
   }
-  
+
 }
diff --git a/test/files/run/t3603.scala b/test/files/run/t3603.scala
index a0821a2..a89cb70 100644
--- a/test/files/run/t3603.scala
+++ b/test/files/run/t3603.scala
@@ -2,17 +2,17 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     import collection.immutable._
-    
+
     val intmap = IntMap(1 -> 1, 2 -> 2)
     val intres = intmap.map { case (a, b) => (a, b.toString) }
     assert(intres.isInstanceOf[IntMap[_]])
-    
+
     val longmap = LongMap(1L -> 1, 2L -> 2)
     val longres = longmap.map { case (a, b) => (a, b.toString) }
     assert(longres.isInstanceOf[LongMap[_]])
   }
-  
+
 }
diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala
index 171a6a2..1293f62 100644
--- a/test/files/run/t3613.scala
+++ b/test/files/run/t3613.scala
@@ -1,7 +1,7 @@
 class Boopy {
     private val s = new Schnuck
     def observer : PartialFunction[ Any, Unit ] = s.observer
-    
+
     private class Schnuck extends javax.swing.AbstractListModel {
         model =>
         val observer : PartialFunction[ Any, Unit ] = {
@@ -10,13 +10,13 @@ class Boopy {
         def getSize = 0
         def getElementAt( idx: Int ) = ???
     }
-    
+
 }
 
 object Test {
   def main(args: Array[String]): Unit = {
     val x = new Boopy
     val o = x.observer
-    o( "Boopy" ) // --> throws runtime error    
+    o( "Boopy" ) // --> throws runtime error
   }
 }
diff --git a/test/files/run/t3619.scala b/test/files/run/t3619.scala
index 46324b4..3f4c56e 100644
--- a/test/files/run/t3619.scala
+++ b/test/files/run/t3619.scala
@@ -17,7 +17,7 @@ object Test extends App {
       }
     }
   */
-  
+
   new Meh
 }
 
diff --git a/test/files/run/t363.scala b/test/files/run/t363.scala
index 5f3f30a..c747be2 100644
--- a/test/files/run/t363.scala
+++ b/test/files/run/t363.scala
@@ -3,7 +3,7 @@ object Test {
     println("I love the smell of (Array[String])Unit in the morning.")
   }
 }
-  
+
 class Test {
   def kurtz() = "We must kill them. We must incinerate them."
 }
diff --git a/test/files/run/t3647.scala b/test/files/run/t3647.scala
index 434a13f..a970e88 100644
--- a/test/files/run/t3647.scala
+++ b/test/files/run/t3647.scala
@@ -13,10 +13,10 @@ object Test {
       "line4"
     ).iterator)
     assert(ps.filter(_ == '\n').size == 3)
-    
+
     val ps1 = PagedSeq.fromLines(List("Ok").iterator)
     assert(ps1.filter(_ == '\n').size == 0)
-    
+
     val eps = PagedSeq.fromLines(List().iterator)
     assert(eps.filter(_ == '\n').size == 0)
   }
diff --git a/test/files/run/t3651.scala b/test/files/run/t3651.scala
index 49ae173..3a6dda0 100644
--- a/test/files/run/t3651.scala
+++ b/test/files/run/t3651.scala
@@ -5,6 +5,6 @@ class LongKlass( override val a: Long ) extends Klass[Long](a)
 object Test {
   def main(args: Array[String]) {
     val lk = new LongKlass(10)
-    lk.a
+    val a = lk.a
   }
 }
diff --git a/test/files/run/t3667.check b/test/files/run/t3667.check
index bbe5d1b..6375c88 100644
--- a/test/files/run/t3667.check
+++ b/test/files/run/t3667.check
@@ -1,6 +1,3 @@
-1
-2
-3
 4
 2
 3
diff --git a/test/files/run/t3667.scala b/test/files/run/t3667.scala
index f30d57c..ada09d5 100644
--- a/test/files/run/t3667.scala
+++ b/test/files/run/t3667.scala
@@ -1,27 +1,9 @@
 object Test {
   def main(args: Array[String]) {
-    val o1 = new Outer1
-    val o2 = new Outer2
-    val o3 = new Outer3
     val o4 = new Outer4
     val o5 = new Outer5
     val o6 = new Outer6
 
-    println(1)
-    ser(new o1.Inner(1))
-    o1.Inner // make sure the Inner$module field of the Outer1 instance is initialized!
-    ser(new o1.Inner(1))
-
-    println(2)
-    ser(new o2.Inner(1))
-    o2.Inner
-    ser(new o2.Inner(1))
-
-    println(3)
-    ser(new o3.Inner(1))
-    o3.Inner
-    ser(new o3.Inner(1))
-
     println(4)
     ser(new o4.Inner(1))
     o4.Inner
@@ -54,23 +36,6 @@ object Test {
 
 }
 
- at serializable
-class Outer1 {
-  @serializable
-  class Inner(x: Int = 1)
-}
-
- at serializable
-class Outer2 {
-  case class Inner(x: Int = 1)
-}
-
- at serializable
-class Outer3 {
-  case class Inner(x: Int)
-}
-
-
 class Outer4 extends Serializable {
   class Inner(x: Int = 1) extends Serializable
 }
diff --git a/test/files/run/t3702.scala b/test/files/run/t3702.scala
index 021abcb..0cdafd9 100644
--- a/test/files/run/t3702.scala
+++ b/test/files/run/t3702.scala
@@ -3,7 +3,7 @@ object Test {
     case 5 :: _     => ()
     case List(from) => from
   }
-  
+
   def main(args: Array[String]): Unit = {
     println(foo(5 :: Nil, List(1,2,3)))
     println(foo(6 :: Nil, List(1,2,3)))
diff --git a/test/files/run/t3705.scala b/test/files/run/t3705.scala
deleted file mode 100644
index fcc020f..0000000
--- a/test/files/run/t3705.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-// package foo
-
-import scala.xml._
-object Test {
-  def updateNodes(ns: Seq[Node]): Seq[Node] =
-    for(subnode <- ns) yield subnode match {
-      case <d>{_}</d> if true => <d>abc</d>
-      case Elem(prefix, label, attribs, scope, children @ _*) =>
-        Elem(prefix, label, attribs, scope, updateNodes(children) : _*)
-      case other => other
-    }
-  def main(args: Array[String]): Unit = {
-    updateNodes(<b />)
-    
-  }
-}
-
diff --git a/test/files/run/t3714.scala b/test/files/run/t3714.scala
index c344b40..2d600f9 100644
--- a/test/files/run/t3714.scala
+++ b/test/files/run/t3714.scala
@@ -21,7 +21,7 @@ object Test {
     case BreakImpl(x) => x
     case _            => -1
   }
-  
+
   def main(args: Array[String]) {
     val break = BreakImpl(22)
     // assert(f1(break) == 22)
diff --git a/test/files/run/t3719.scala b/test/files/run/t3719.scala
index 4649f11..01dc205 100644
--- a/test/files/run/t3719.scala
+++ b/test/files/run/t3719.scala
@@ -1,7 +1,7 @@
 object Days extends Enumeration {
   type Day = DayValue
   val Mon, Tue, Wed, Thu, Fri, Sat, Sun = new DayValue // DayValue
-    
+
   protected class DayValue extends Val {
     def isWeekday: Boolean =
       this match {
@@ -21,7 +21,7 @@ object Test extends App {
     val d: Day = Mon
     d.toString
   }
-    
+
   def nameOfTue(): String =  {
     import Days._
     val d: Day = Tue
diff --git a/test/files/run/t3758-old.scala b/test/files/run/t3758-old.scala
index f00254a..d5e4a6c 100644
--- a/test/files/run/t3758-old.scala
+++ b/test/files/run/t3758-old.scala
@@ -1,3 +1,5 @@
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
   def main(args: Array[String]): Unit = {
     assert(classManifest[Array[String]].typeArguments contains classManifest[String])
@@ -7,4 +9,4 @@ object Test {
     assert(manifest[Array[Int]].typeArguments contains manifest[Int])
     assert(manifest[Array[Float]].typeArguments contains manifest[Float])
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t3822.scala b/test/files/run/t3822.scala
index eaf0a14..c358040 100644
--- a/test/files/run/t3822.scala
+++ b/test/files/run/t3822.scala
@@ -1,17 +1,17 @@
 import scala.collection.{ mutable, immutable, generic }
 import immutable.ListSet
 
-object Test {    
+object Test {
   def main(args: Array[String]): Unit = {
     val xs = ListSet(-100000 to 100001: _*)
-   
+
     assert(xs.size == 200002)
     assert(xs.sum == 100001)
-    
+
     val ys = ListSet[Int]()
     val ys1 = (1 to 12).grouped(3).foldLeft(ys)(_ ++ _)
     val ys2 = (1 to 12).foldLeft(ys)(_ + _)
-    
+
     assert(ys1 == ys2)
   }
 }
diff --git a/test/files/run/t3829.scala b/test/files/run/t3829.scala
index e3d8c56..780a6a9 100644
--- a/test/files/run/t3829.scala
+++ b/test/files/run/t3829.scala
@@ -1,11 +1,11 @@
 // ticket #3829
 object Test {
   import collection.{ mutable, immutable }
-  
+
   def main(args: Array[String]) {
     val map = immutable.Map(1 -> 2, 3 -> 4)
     assert(map.get(0) == None)
-    
+
     // Since r24255 defaultMap.get(x) returns None rather than
     // using the default, so these mostly use apply.
     val defmap = map.withDefaultValue(-1)
@@ -19,10 +19,10 @@ object Test {
     assert((defmap - 1)(0) == -1)
     assert((defmap - 1)(1) == -1)
     assert((defmap - 1)(3) == 4)
-    
+
     val mutmap = mutable.Map(1 -> 2, 2 -> 3)
     assert(mutmap.get(0) == None)
-    
+
     val defmutmap = mutmap.withDefaultValue(-1)
     assert(defmutmap(0) == -1)
     assert(defmutmap(3) == -1)
@@ -36,5 +36,5 @@ object Test {
     assert(defmutmap(1) == 2)
     assert(mutmap(1) == 2)
   }
-  
+
 }
diff --git a/test/files/run/t3832.scala b/test/files/run/t3832.scala
new file mode 100644
index 0000000..ac44358
--- /dev/null
+++ b/test/files/run/t3832.scala
@@ -0,0 +1,17 @@
+class t3832 {
+  def this(un: Int) = {
+    this()
+    def bippy = this
+    ()
+  }
+  def this(un: Boolean) = {
+    this()
+    def boppy = () => this
+    ()
+  }
+}
+
+object Test extends App {
+  new t3832(0)
+  new t3832(true)
+}
diff --git a/test/files/run/t3835.scala b/test/files/run/t3835.scala
index c120a61..766b6dd 100644
--- a/test/files/run/t3835.scala
+++ b/test/files/run/t3835.scala
@@ -1,6 +1,6 @@
 object Test extends App {
   // work around optimizer bug SI-5672  -- generates wrong bytecode for switches in arguments
-  // virtpatmat happily emits a switch for a one-case switch, whereas -Xoldpatmat did not
+  // virtpatmat happily emits a switch for a one-case switch
   // this is not the focus of this test, hence the temporary workaround
   def a = (1, 2, 3) match { case (r, \u03b8, \u03c6) => r + \u03b8 + \u03c6 }
   println(a)
diff --git a/test/files/run/t3855.scala b/test/files/run/t3855.scala
index e557142..d10aab5 100644
--- a/test/files/run/t3855.scala
+++ b/test/files/run/t3855.scala
@@ -1,13 +1,13 @@
 object Test {
   def byval[A](a: => A) = a
   def closure[A](f: () => A) = f()
-  
+
   def f1(s: String) = {
-    var n = try { s.toInt } catch { case _ => 1 }
+    var n = try { s.toInt } catch { case _: Throwable => 1 }
     byval(n)
   }
   def f2(s: String) = {
-    var n = try { s.toInt } catch { case _ => 1 }
+    var n = try { s.toInt } catch { case _: Throwable => 1 }
     closure(() => n)
   }
 
diff --git a/test/files/run/t3877.scala b/test/files/run/t3877.scala
index ce1e928..5710e98 100644
--- a/test/files/run/t3877.scala
+++ b/test/files/run/t3877.scala
@@ -11,7 +11,7 @@ object Test {
       println("test1: " + d)
     }
   }
-    
+
   def test2 {
     var d = 2
     var i = 0
@@ -25,7 +25,7 @@ object Test {
         return
     }
   }
-    
+
   def test3 {
     var d = 2
     var i = 0
@@ -34,9 +34,9 @@ object Test {
       d = b
       i += 1
       println("test3: " + d)
-    } while (d < LIMIT && i < LIMIT) 
+    } while (d < LIMIT && i < LIMIT)
   }
-    
+
   def test4 {
     var d = 2
     var i = 0
@@ -47,9 +47,9 @@ object Test {
       println("test4: " + d)
       if (d >= LIMIT || i >= LIMIT)
         return
-    } while (true) 
+    } while (true)
   }
-    
+
   def test5 {
     var d = 2
     var i = 0
@@ -58,7 +58,7 @@ object Test {
       d = b
       i += 1
       println("test5.1: " + d)
-        
+
       var e = 2
       var j = 0
       while (e < LIMIT && j < LIMIT) {
@@ -69,7 +69,7 @@ object Test {
       }
     }
   }
-    
+
 
   def main(args: Array[String]) {
     test1
diff --git a/test/files/run/t3886.scala b/test/files/run/t3886.scala
deleted file mode 100644
index 1e8e7ad..0000000
--- a/test/files/run/t3886.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
-  def main(args: Array[String]) {
-    assert( <k a="1" b="2"/> == <k a="1" b="2"/> )
-    assert( <k a="1" b="2"/> != <k a="1" b="3"/> )
-    assert( <k a="1" b="2"/> != <k a="2" b="2"/> )
-
-    assert( <k a="1" b="2"/> != <k/> )
-    assert( <k a="1" b="2"/> != <k a="1"/> )
-    assert( <k a="1" b="2"/> != <k b="2"/> )
-  }
-}
diff --git a/test/files/run/t3887.scala b/test/files/run/t3887.scala
index 81fa7a9..16ce983 100644
--- a/test/files/run/t3887.scala
+++ b/test/files/run/t3887.scala
@@ -13,4 +13,4 @@ object Test {
 
     val option1: (Int, String) = (1, "abc")
 
-} 
+}
diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check
new file mode 100644
index 0000000..844ca54
--- /dev/null
+++ b/test/files/run/t3888.check
@@ -0,0 +1 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t3888.scala b/test/files/run/t3888.scala
index e0f1453..8701b42 100644
--- a/test/files/run/t3888.scala
+++ b/test/files/run/t3888.scala
@@ -1,15 +1,18 @@
+
+// in a match, which notion of equals prevails?
+// extending Tuple doesn't seem to be at issue here.
 object Test {
 
   val T1 = new P
   private[this] val T2 = T1
 
-  def m1 = 
+  def m1 =
     (1, 2) match {
       case T1 => true
       case _ => false
     }
 
-  def m2 = 
+  def m2 =
     (1, 2) match {
       case T2 => true
       case _ => false
@@ -21,6 +24,6 @@ object Test {
   }
 }
 
-class P extends Pair(1, 1) {
+class P extends Tuple2(1, 1) {
   override def equals(x: Any) = true
-}
\ No newline at end of file
+}
diff --git a/test/pending/run/t3897.check b/test/files/run/t3897.check
similarity index 100%
rename from test/pending/run/t3897.check
rename to test/files/run/t3897.check
diff --git a/test/files/run/t3897.flags b/test/files/run/t3897.flags
new file mode 100644
index 0000000..ac96850
--- /dev/null
+++ b/test/files/run/t3897.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
\ No newline at end of file
diff --git a/test/files/run/t3897/J_2.java b/test/files/run/t3897/J_2.java
new file mode 100644
index 0000000..a4c9a98
--- /dev/null
+++ b/test/files/run/t3897/J_2.java
@@ -0,0 +1,27 @@
+import java.lang.reflect.*;
+
+public class J_2 {
+  public void f1(Class<?> clazz) {
+    Field[] fields = clazz.getDeclaredFields();
+    for (int i = 0 ; i < fields.length; i++) {
+      String name = fields[i].getName();
+      if (name.length() >= 7 && name.substring(0, 7).equals("bitmap$")) { }
+      else System.out.println("(" + name + "," + fields[i].getGenericType() + ")");
+    }
+  }
+  public void f2(Class<?> clazz) {
+    Method[] methods = clazz.getDeclaredMethods();
+    for (int i = 0 ; i < methods.length; i++) {
+      String name = methods[i].getName();
+      if (name.length() >= 7 && name.substring(0, 7).equals("bitmap$")) { }
+      else System.out.println("(" + name + "," + methods[i].getGenericReturnType() + ")");
+    }
+  }
+
+  public void javaRun() {
+    f1(One.class);
+    f2(One.class);
+    f1(Two.class);
+    f2(Two.class);
+  }
+}
\ No newline at end of file
diff --git a/test/pending/run/t3897/a_1.scala b/test/files/run/t3897/a_1.scala
similarity index 100%
rename from test/pending/run/t3897/a_1.scala
rename to test/files/run/t3897/a_1.scala
diff --git a/test/files/run/t3897/a_2.scala b/test/files/run/t3897/a_2.scala
new file mode 100644
index 0000000..7a161fc
--- /dev/null
+++ b/test/files/run/t3897/a_2.scala
@@ -0,0 +1,23 @@
+object Test {
+  def f1(clazz: Class[_]) = (
+    clazz.getDeclaredFields.toList
+     . filterNot (_.getName contains "bitmap$")
+     . map (f => (f.getName, f.getGenericType))
+     . foreach (println)
+  )
+  def f2(clazz: Class[_]) = (
+    clazz.getDeclaredMethods.toList
+     . filterNot (_.getName contains "bitmap$")
+     . map (f => (f.getName, f.getGenericReturnType))
+     . foreach (println)
+  )
+
+  def main(args: Array[String]): Unit = {
+    f1(classOf[One])
+    f2(classOf[One])
+    f1(classOf[Two])
+    f2(classOf[Two])
+
+    new J_2().javaRun
+  }
+}
diff --git a/test/files/run/t3932.scala b/test/files/run/t3932.scala
index 51fc16d..f577ef8 100644
--- a/test/files/run/t3932.scala
+++ b/test/files/run/t3932.scala
@@ -23,10 +23,10 @@ object Test {
   def main(args: Array[String]): Unit = {
     println(O1.g1)
     println(O1.g2)
-    
+
     println(O2.g1)
     println(O2.g2)
-  
+
     val o3 = new O3()
     println(o3.g1)
     println(o3.g2)
diff --git a/test/files/run/t3935.scala b/test/files/run/t3935.scala
index c66b1b0..fa1bbf8 100644
--- a/test/files/run/t3935.scala
+++ b/test/files/run/t3935.scala
@@ -9,7 +9,7 @@ object Test {
     try {
       assert(q.front != null)
     } catch {
-      case _ =>
+      case _: Throwable =>
     }
   }
 }
diff --git a/test/files/run/t3964.scala b/test/files/run/t3964.scala
index 80ba361..9b46cdb 100644
--- a/test/files/run/t3964.scala
+++ b/test/files/run/t3964.scala
@@ -1,13 +1,16 @@
+
+import scala.language.implicitConversions
+
 object Test {
-  class Base 
+  class Base
   object Bob extends Base
   class Foo { def bippy = 42 }
   class Oof { def bippy = -21 }
-  
+
   // I am more specific than you
   implicit def f1(x: Bob.type): Foo = new Foo
   implicit def f2(x: Base): Oof = new Oof
-  
+
   def main(args: Array[String]): Unit = {
     // this would of course print an unambiguous 42
     println(Bob.bippy)
diff --git a/test/files/run/t3970.check b/test/files/run/t3970.check
new file mode 100644
index 0000000..bd89fff
--- /dev/null
+++ b/test/files/run/t3970.check
@@ -0,0 +1 @@
+warning: there were 5 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t3970.scala b/test/files/run/t3970.scala
index f8c763b..35b5ce6 100644
--- a/test/files/run/t3970.scala
+++ b/test/files/run/t3970.scala
@@ -9,11 +9,11 @@ object Test {
   def main(args: Array[String]) {
     val dl = DoubleLinkedList[Int]()
     dl.remove()
-    
+
     val dl2 = DoubleLinkedList[Int](1, 2, 3)
     dl2.next.remove()
     assert(dl2 == DoubleLinkedList(1, 3))
-    
+
     val dl3 = DoubleLinkedList[Int](1, 2, 3)
     assert(dl3.drop(1) == DoubleLinkedList(2, 3))
     assert(dl3.drop(1).prev == null)
diff --git a/test/files/run/t3980.scala b/test/files/run/t3980.scala
index 4e693eb..c140176 100644
--- a/test/files/run/t3980.scala
+++ b/test/files/run/t3980.scala
@@ -2,7 +2,7 @@ object A {
   def run1 {
     lazy val x: Unit = {(); println("once")}
     x
-    x 
+    x
   }
   def run2 {
 	lazy val y: Int = 2
diff --git a/test/files/run/t3984.scala b/test/files/run/t3984.scala
index 9a2e00a..0747b0e 100644
--- a/test/files/run/t3984.scala
+++ b/test/files/run/t3984.scala
@@ -36,7 +36,7 @@ object MapBug {
       im = im + ((ih,ih))
       mm = mm + ((ih,ih))
     }
-    assert(im == mm)    
+    assert(im == mm)
     val x = IH(6,4)
     im = im - x
     mm = mm - x
diff --git a/test/files/run/t3996.check b/test/files/run/t3996.check
new file mode 100644
index 0000000..a92ddc0
--- /dev/null
+++ b/test/files/run/t3996.check
@@ -0,0 +1 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t4013.scala b/test/files/run/t4013.scala
index da7fbc5..7060559 100644
--- a/test/files/run/t4013.scala
+++ b/test/files/run/t4013.scala
@@ -5,12 +5,12 @@ trait Base[B] {
 }
 
 
-class Suba[@specialized B](override val data: Array[B]) extends Base[B] {  
+class Suba[@specialized B](override val data: Array[B]) extends Base[B] {
   assert(data != null)
 }
 
 
-class Subopt[@specialized B](override val data: Option[B]) extends Base[B] {  
+class Subopt[@specialized B](override val data: Option[B]) extends Base[B] {
   assert(data != null)
 }
 
diff --git a/test/files/run/t4013b.scala b/test/files/run/t4013b.scala
index f17eaea..1262e26 100644
--- a/test/files/run/t4013b.scala
+++ b/test/files/run/t4013b.scala
@@ -8,7 +8,7 @@ trait Base[B] {
 class M[@specialized(Int) A]
 
 
-class Sub3[@specialized(Int) B](override val data: M[B]) extends Base[B] {  
+class Sub3[@specialized(Int) B](override val data: M[B]) extends Base[B] {
   assert(data != null)
 }
 
diff --git a/test/files/run/t4023.check b/test/files/run/t4023.check
new file mode 100644
index 0000000..05f867c
--- /dev/null
+++ b/test/files/run/t4023.check
@@ -0,0 +1,21 @@
+Try 1: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
+Try 2: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
+Try 3: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
diff --git a/test/files/run/t4023.scala b/test/files/run/t4023.scala
new file mode 100644
index 0000000..518b583
--- /dev/null
+++ b/test/files/run/t4023.scala
@@ -0,0 +1,34 @@
+object Test {
+  object C {
+    class B1
+    private class B2
+    object B3
+    private object B4
+    object B5 extends B1
+    private object B6 extends B2
+
+    val classes1 = this.getClass.getDeclaredClasses
+    val classes2 = C.getClass   .getDeclaredClasses
+    val classes3 = getClass     .getDeclaredClasses
+  }
+
+  // sortBy(_.getName) introduces additional classes which we don't want to see in C,
+  // so we call sortBy outside of C.
+  object TestHelper {
+    val valuesTry1 = C.classes1.sortBy(_.getName)
+    val valuesTry2 = C.classes2.sortBy(_.getName)
+    val valuesTry3 = C.classes3.sortBy(_.getName)
+  }
+
+  def main(args: Array[String]) {
+    println("Try 1: (" + TestHelper.valuesTry1.length + " classes)")
+    TestHelper.valuesTry1.foreach(println)
+    println("Try 2: (" + TestHelper.valuesTry2.length + " classes)")
+    TestHelper.valuesTry2.foreach(println)
+    println("Try 3: (" + TestHelper.valuesTry3.length + " classes)")
+    TestHelper.valuesTry3.foreach(println)
+  }
+
+
+}
+
diff --git a/test/files/run/t4024.scala b/test/files/run/t4024.scala
index 7c62a3f..f484109 100644
--- a/test/files/run/t4024.scala
+++ b/test/files/run/t4024.scala
@@ -1,9 +1,9 @@
 object Test extends App {
 
-  val x = "abc" 
+  val x = "abc"
 
   val m = x.getClass.getMethod("toString")
-  
+
   assert(m.invoke(x, (Nil: List[AnyRef]): _*) == "abc")
 
   Test2.main(Array())
diff --git a/test/files/run/t4025.check b/test/files/run/t4025.check
index 6715003..2d4f644 100644
--- a/test/files/run/t4025.check
+++ b/test/files/run/t4025.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class Color(val red: Int)
 defined class Color
 
diff --git a/test/files/run/t4027.scala b/test/files/run/t4027.scala
index d70ca0c..585c5c8 100644
--- a/test/files/run/t4027.scala
+++ b/test/files/run/t4027.scala
@@ -7,7 +7,7 @@ import collection._
  *  Mapping, filtering, etc. on these views should return sorted maps again.
  */
 object Test extends App {
-  
+
   val sortedmap = SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
   println(sortedmap.filterKeys(_ % 2 == 0): SortedMap[Int, Boolean])
   println(sortedmap.mapValues(_ + "!"): SortedMap[Int, String])
@@ -15,7 +15,7 @@ object Test extends App {
   println(sortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
   println(sortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): SortedMap[Int, Boolean])
   println(sortedmap.mapValues(_ + "!").filter(t => t._1 < 2): SortedMap[Int, String])
-  
+
   val immsortedmap = immutable.SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
   println(immsortedmap.filterKeys(_ % 2 == 0): immutable.SortedMap[Int, Boolean])
   println(immsortedmap.mapValues(_ + "!"): immutable.SortedMap[Int, String])
@@ -23,5 +23,5 @@ object Test extends App {
   println(immsortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
   println(immsortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): immutable.SortedMap[Int, Boolean])
   println(immsortedmap.mapValues(_ + "!").filter(t => t._1 < 2): immutable.SortedMap[Int, String])
-  
+
 }
diff --git a/test/files/run/t4047.check b/test/files/run/t4047.check
index 2a942a7..3c41e6e 100644
--- a/test/files/run/t4047.check
+++ b/test/files/run/t4047.check
@@ -1,3 +1,15 @@
+t4047.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  a.foo
+    ^
+t4047.scala:24: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  a.foo
+    ^
+t4047.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  b.foo
+    ^
+t4047.scala:27: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  b.foo
+    ^
 Unit: called A.foo
 Unit: called B.foo
 Unit: called C.foo
diff --git a/test/files/run/t4047.scala b/test/files/run/t4047.scala
index cd42a8b..08989bd 100644
--- a/test/files/run/t4047.scala
+++ b/test/files/run/t4047.scala
@@ -18,7 +18,7 @@ class D extends Bar[Unit]{
   def foo = println("Unit: called D.foo")
 }
 
-object Test extends Application {
+object Test extends App {
   val a: Foo[Unit] = new A
   a.foo
   a.foo
diff --git a/test/files/run/t4054.scala b/test/files/run/t4054.scala
index 8f1f685..83a58ef 100644
--- a/test/files/run/t4054.scala
+++ b/test/files/run/t4054.scala
@@ -9,7 +9,7 @@
 object Test {
   def main(args: Array[String]) {
     val it = Iterator.from(1).map(n => n * n).scanLeft(0)(_+_)
-    
+
     assert(it.next == 0)
     assert(it.next == 1)
     assert(it.next == 5)
diff --git a/test/files/run/t4062.scala b/test/files/run/t4062.scala
index 6a5aea5..f5478e7 100644
--- a/test/files/run/t4062.scala
+++ b/test/files/run/t4062.scala
@@ -1,7 +1,7 @@
 class A(val f : String)
 
 class B(f: String) extends A(f) {
-  def foo(x: String) = x match { 
+  def foo(x: String) = x match {
     case `f`  => true
     case _    => false
   }
diff --git a/test/files/run/t4072.scala b/test/files/run/t4072.scala
index 872072a..c035fcc 100644
--- a/test/files/run/t4072.scala
+++ b/test/files/run/t4072.scala
@@ -1,12 +1,14 @@
 import scala.tools.nsc._
 
+import scala.language.{ reflectiveCalls }
+
 object Test {
   class DryRun {
-    val compiler = new Global(new Settings()) { 
+    val compiler = new Global(new Settings()) {
       lazy val test1 = new AnyRef
     }
   }
-  
+
   def main(args: Array[String]) {
     new DryRun().compiler.test1
   }
diff --git a/test/files/run/t408.scala b/test/files/run/t408.scala
index 4d3dcbc..9e51e88 100644
--- a/test/files/run/t408.scala
+++ b/test/files/run/t408.scala
@@ -2,7 +2,7 @@ object Test
 {
   val a = scala.collection.immutable.Set.empty ++ (0 to 100000)
   val b = scala.collection.immutable.Set.empty ++ (0 to 100000)
-  
+
   def main(args: Array[String]): Unit = {
     a -- b
     a -- b
diff --git a/test/files/run/t4080.check b/test/files/run/t4080.check
index 66ce31b..1953a68 100644
--- a/test/files/run/t4080.check
+++ b/test/files/run/t4080.check
@@ -1 +1,2 @@
+warning: there were 3 deprecation warning(s); re-run with -deprecation for details
 LinkedList(1, 0, 2, 3)
diff --git a/test/files/run/t4080.scala b/test/files/run/t4080.scala
index 92740ed..1d14188 100644
--- a/test/files/run/t4080.scala
+++ b/test/files/run/t4080.scala
@@ -1,4 +1,5 @@
 import scala.collection.mutable.LinkedList
+import java.util.NoSuchElementException
 
 object Test {
   def main(args: Array[String]) {
@@ -6,7 +7,7 @@ object Test {
     ll.insert(LinkedList(0))
     println(ll)
     val ll2 = LinkedList[Int]()
-    try println(ll2.head)
-    catch { case _ => () }
+    try println("Empty head? " + ll2.head)
+    catch { case _: NoSuchElementException => () }
   }
 }
diff --git a/test/files/run/t4119/J.java b/test/files/run/t4119/J.java
index 4250899..ee65d33 100644
--- a/test/files/run/t4119/J.java
+++ b/test/files/run/t4119/J.java
@@ -1,6 +1,6 @@
 package foo.bar;
 
-public abstract class J {  
+public abstract class J {
   protected void foo(J j) {
     return;
   }
diff --git a/test/files/run/t4119/S.scala b/test/files/run/t4119/S.scala
index e5e3db5..d6ae5f1 100644
--- a/test/files/run/t4119/S.scala
+++ b/test/files/run/t4119/S.scala
@@ -1,6 +1,6 @@
 class S extends foo.bar.J {
   sss =>
-  
+
   val fn = () => {
     foo(S.this)
   }
diff --git a/test/files/run/t4122.scala b/test/files/run/t4122.scala
index 7ead2b7..5ff570c 100644
--- a/test/files/run/t4122.scala
+++ b/test/files/run/t4122.scala
@@ -4,7 +4,7 @@ object Test {
   val sw3 = Seq('a', 'b')
   val sw4 = "ab".toList
   val all = List(sw, sw2, sw3, sw4)
-  
+
   def main(args: Array[String]): Unit = {
     for (s1 <- all ; s2 <- all) {
       assert(s1 == s2, s1 + " != " + s2)
diff --git a/test/files/run/t4138.check b/test/files/run/t4138.check
deleted file mode 100644
index f561b5e..0000000
--- a/test/files/run/t4138.check
+++ /dev/null
@@ -1,2 +0,0 @@
-[1.45] parsed: "lir 'de\' ' \\ \n / upa \"new\" \t parsing"
-[1.5] parsed: "s "
diff --git a/test/files/run/t4138.scala b/test/files/run/t4138.scala
deleted file mode 100644
index 131489e..0000000
--- a/test/files/run/t4138.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
-  object p extends scala.util.parsing.combinator.JavaTokenParsers
-
-  println(p.parse(p.stringLiteral, """"lir 'de\' ' \\ \n / upa \"new\" \t parsing""""))
-  println(p.parse(p.stringLiteral, """"s " lkjse""""))
-}
diff --git a/test/files/run/t4146.scala b/test/files/run/t4146.scala
deleted file mode 100644
index 93ce22b..0000000
--- a/test/files/run/t4146.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object bob extends Application {
-  var name = "Bob"
-}
-
-object Test extends App {
-  assert(bob.name == "Bob")
-}
diff --git a/test/files/run/t4148.scala b/test/files/run/t4148.scala
index 0739403..d543e09 100644
--- a/test/files/run/t4148.scala
+++ b/test/files/run/t4148.scala
@@ -1,8 +1,8 @@
 object Test {
-  val x1 = try { "aaa".asInstanceOf[Int] } catch { case _ => "cce1" }
-  val x2 = try { (5: Any).asInstanceOf[Int] } catch { case _ => "cce2" }
-  val x3 = try { (new java.lang.Short(100.toShort).asInstanceOf[Int]) } catch { case _ => "cce3" }
-  
+  val x1 = try { "aaa".asInstanceOf[Int] } catch { case _: Throwable => "cce1" }
+  val x2 = try { (5: Any).asInstanceOf[Int] } catch { case _: Throwable => "cce2" }
+  val x3 = try { (new java.lang.Short(100.toShort).asInstanceOf[Int]) } catch { case _: Throwable => "cce3" }
+
   def main(args: Array[String]): Unit = {
     List(x1, x2, x3) foreach println
   }
diff --git a/test/files/run/t4171.scala b/test/files/run/t4171.scala
index fba2fb5..7f6dfd4 100644
--- a/test/files/run/t4171.scala
+++ b/test/files/run/t4171.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
 object Test {
   val c = { class C; new C { def foo = 1 } }
   val a = { class B { def bar = 5 }; class C extends B; new C }
diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check
index b48c9ca..d94638d 100644
--- a/test/files/run/t4172.check
+++ b/test/files/run/t4172.check
@@ -1,12 +1,8 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
-scala>     val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
+scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
 warning: there were 1 feature warning(s); re-run with -feature for details
 c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C)
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t4238/J_1.java b/test/files/run/t4238/J_1.java
index 21a4b18..47a9c21 100644
--- a/test/files/run/t4238/J_1.java
+++ b/test/files/run/t4238/J_1.java
@@ -3,14 +3,14 @@ import scala.*;
 class J_1 {
 	scala.collection.mutable.HashMap<String, String> map =
 		new scala.collection.mutable.HashMap<String, String>();
-	
+
 	Function1<Tuple2<String, String>, Integer> f =
 	  new scala.runtime.AbstractFunction1<Tuple2<String, String>, Integer>() {
 	    public Integer apply(Tuple2<String, String> s) {
 	      return s._1().length();
 	    }
     };
-		
+
 	scala.collection.Seq<Integer> counts =
 	  map.groupBy(f).keys().toList();
 }
diff --git a/test/files/run/t4285.check b/test/files/run/t4285.check
index 84f8929..314c8e5 100644
--- a/test/files/run/t4285.check
+++ b/test/files/run/t4285.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> val x = Array(1,2,3,4,5,6,7)
 x: Array[Int] = Array(1, 2, 3, 4, 5, 6, 7)
 
@@ -13,5 +11,3 @@ scala> println(y.sum)
 56
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t4287inferredMethodTypes.check b/test/files/run/t4287inferredMethodTypes.check
new file mode 100644
index 0000000..56e9c09
--- /dev/null
+++ b/test/files/run/t4287inferredMethodTypes.check
@@ -0,0 +1,30 @@
+[[syntax trees at end of                     typer]] // newSource1.scala
+[0:92]package [0:0]<empty> {
+  [0:21]class A extends [7:21][23]scala.AnyRef {
+    [8:16]<paramaccessor> private[this] val a: [8]Int = _;
+    <8:20>def <init>(<8:20>a: [11]<type: [11]scala.Int> = [17:20]A.a): [7]A = <8:20>{
+      <8:20><8:20><8:20>A.super.<init>();
+      <8:20>()
+    }
+  };
+  [23:47]object A extends [32:47][49]scala.AnyRef {
+    [49]def <init>(): [32]A.type = [49]{
+      [49][49][49]A.super.<init>();
+      [32]()
+    };
+    [36:45]private[this] val a: [40]Int = [44:45]2;
+    [40]<stable> <accessor> def a: [40]Int = [40][40]A.this.a;
+    [8]<synthetic> def <init>$default$1: [8]Int = [19]A.a
+  };
+  [49:92]class B extends [57:92][65:66]A {
+    [65]def <init>(): [57]B = [65]{
+      [65][65][65]B.super.<init>([65]A.<init>$default$1);
+      [57]()
+    };
+    [70:90]def <init>([79:80]a: [79]Int): [74]B = [84:90]{
+      [84:90][84:90][84]B.this.<init>();
+      [84]()
+    }
+  }
+}
+
diff --git a/test/files/run/t4287inferredMethodTypes.scala b/test/files/run/t4287inferredMethodTypes.scala
new file mode 100644
index 0000000..f14e672
--- /dev/null
+++ b/test/files/run/t4287inferredMethodTypes.scala
@@ -0,0 +1,25 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+  override def extraSettings: String =
+    s"-usejavacp -Yinfer-argument-types -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+  override def code = """
+class A(a: Int = A.a)
+
+object A {
+  val a = 2
+}
+
+class B extends A {
+ def this(a) = this()
+}
+  """.trim
+
+  override def show(): Unit = {
+    Console.withErr(System.out) {
+      compile()
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t4288.scala b/test/files/run/t4288.scala
index eafd280..23319d1 100644
--- a/test/files/run/t4288.scala
+++ b/test/files/run/t4288.scala
@@ -1,12 +1,12 @@
 object Test {
   def f1 = scala.collection.mutable.ListBuffer(1 to 9: _*).slice(-5, -1)
-  def f2 = scala.collection.mutable.ListBuffer(1 to 9: _*).readOnly.slice(-5, -1)
+  def f2 = List(1 to 9: _*).slice(-5, -1)
   def f3 = Vector(1 to 9: _*).slice(-5, -1)
   def f4 = Traversable(1 to 9: _*).slice(-5, -1)
   def f5 = (1 to 9).toArray.slice(-5, -1)
   def f6 = (1 to 9).toStream.slice(-5, -1)
   def f7 = (1 to 9).slice(-5, -1)
-  
+
   def main(args: Array[String]): Unit = {
     List[Traversable[Int]](f1, f2, f3, f4, f5, f6, f7) foreach (x => assert(x.isEmpty, x))
   }
diff --git a/test/files/run/t4294.scala b/test/files/run/t4294.scala
index fafaf1d..e15c716 100644
--- a/test/files/run/t4294.scala
+++ b/test/files/run/t4294.scala
@@ -1,7 +1,12 @@
 object Test {
   def main(args: Array[String]) {
+    // Skip test on Avian, see SI-7600 for further information
+    if (!scala.tools.partest.utils.Properties.isAvian)
+      run()
+  }
+
+  def run(): Unit = {
     (Stream.from(1).collect{case x if x > 5000000 => x}: Stream[Int])
-    
     assert((Stream from 1 take 10 collect { case x if x <= 3 => x*x }).sum == 14)
   }
 }
diff --git a/test/files/run/t4317/S_3.scala b/test/files/run/t4317/S_3.scala
index dd04ea3..ce8e233 100644
--- a/test/files/run/t4317/S_3.scala
+++ b/test/files/run/t4317/S_3.scala
@@ -1,4 +1,4 @@
-object Test {	
+object Test {
   def main(args: Array[String]): Unit = {
     val j = new J_2()
 	  println(j.bar1())
diff --git a/test/files/run/t4332.check b/test/files/run/t4332.check
new file mode 100644
index 0000000..ff9d9b8
--- /dev/null
+++ b/test/files/run/t4332.check
@@ -0,0 +1,25 @@
+
+======================================================================
+Checking scala.collection.TraversableView
+======================================================================
+
+
+======================================================================
+Checking scala.collection.IterableView
+======================================================================
+
+
+======================================================================
+Checking scala.collection.SeqView
+======================================================================
+
+
+======================================================================
+Checking scala.collection.mutable.IndexedSeqView
+======================================================================
+
+
+======================================================================
+Checking scala.collection.immutable.StreamView
+======================================================================
+
diff --git a/test/files/run/t4332.scala b/test/files/run/t4332.scala
new file mode 100644
index 0000000..5a67922
--- /dev/null
+++ b/test/files/run/t4332.scala
@@ -0,0 +1,44 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+  override def code = ""
+  lazy val global = newCompiler("-usejavacp")
+  import global._, definitions._
+
+  override def show() {
+    new global.Run()
+    // Once we plug all of the view gaps, the output should be empty!
+    checkViews()
+  }
+
+  def isExempt(sym: Symbol) = {
+    val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform")
+    (exempt contains sym.name.decoded)
+  }
+
+  def checkView(viewType: Type, viewLikeType: Type) {
+    val sep = "=" * 70
+    println(s"\n$sep\nChecking ${viewType.typeSymbol.fullName}\n$sep")
+    val termMembers = viewType.nonPrivateMembers.toList filter (_.isTerm) map fullyInitializeSymbol
+    val inheritedFromGenericCollection
+      = termMembers filterNot (_.owner.name.decoded contains "ViewLike") filterNot (_.owner == viewType.typeSymbol)
+    def returnsView(sym: Symbol) = viewType.memberType(sym).finalResultType contains viewType.typeSymbol
+    val needOverride = inheritedFromGenericCollection filterNot isExempt filter returnsView
+
+    val grouped = needOverride.groupBy(_.owner).toSeq.sortBy { case (owner, _) => viewType baseTypeIndex owner }
+    val report = grouped.map {
+      case (owner, syms) => s"\n$owner\n${"-" * 70}\n${syms.map(_.defString).sorted.mkString("\n")}"
+    }.mkString("\n")
+    println(report)
+  }
+
+  def checkViews() {
+    import collection._
+    checkView(typeOf[TraversableView[_, _]],        typeOf[TraversableViewLike[_, _, _]])
+    checkView(typeOf[IterableView[_, _]],           typeOf[IterableViewLike[_, _, _]])
+    checkView(typeOf[SeqView[_, _]],                typeOf[SeqViewLike[_, _, _]])
+    checkView(typeOf[mutable.IndexedSeqView[_, _]], typeOf[SeqViewLike[_, _, _]])
+    checkView(typeOf[immutable.StreamView[_, _]],   typeOf[immutable.StreamViewLike[_, _, _]])
+    // Parallel views not checked, assuming we will drop them in 2.11
+  }
+}
diff --git a/test/files/run/t4332b.scala b/test/files/run/t4332b.scala
new file mode 100644
index 0000000..8ee069c
--- /dev/null
+++ b/test/files/run/t4332b.scala
@@ -0,0 +1,35 @@
+object Test extends App {
+  def check(expected: Any, actual: Any, msg: String = "") = {
+    if (expected != actual)
+      sys.error(s"($actual != $expected) $msg")
+  }
+  val ls = List(1, 3, 2, 1)
+  for (N <- -1 to (ls.length + 1)) {
+    check(ls.takeRight(N), ls.view.takeRight(N).toList, s"takeRight($N)")
+    check(ls.dropRight(N), ls.view.dropRight(N).toList, s"dropRight($N)")
+  }
+  for (N <- 1 to (ls.length + 1)) {
+    check(ls.sliding(N).toList, ls.view.sliding(N).toList.map(_.toList), s"sliding($N)")
+    check(ls.sliding(N, 2).toList, ls.view.sliding(N, 2).toList.map(_.toList), s"sliding($N, 2)")
+  }
+  for (b <- List(true, false))
+    check(ls.filterNot(x => true), ls.view.filterNot(x => true), s"filterNot($b)")
+
+  check(ls.inits.toList, ls.view.inits.toList.map(_.toList), "inits")
+  check(ls.tails.toList, ls.view.tails.toList.map(_.toList), "tails")
+
+  check(ls.combinations(2).toList.map(_.toList), ls.view.combinations(2).toList.map(_.toList), "combinations(2)")
+  check(ls.permutations.toList.map(_.toList), ls.view.permutations.toList.map(_.toList), "permutations")
+
+  check(ls.sortBy(_ * -1), ls.view.sortBy(_ * -1).toList, "sortBy")
+  check(ls.sortWith((x, y) => y < x), ls.view.sortWith((x, y) => y < x).toList, "sortWith")
+  check(ls.sorted, ls.view.sorted.toList, "sorted")
+
+  check(ls.distinct, ls.view.distinct.toList, "distinct")
+
+  check(ls.tail, ls.view.tail.toList, "tail")  
+
+  import collection.mutable.Buffer
+  check(Buffer(1, 2, 3).tail, Buffer(1, 2, 3).view.tail.toList, "Buffer#tail")  
+  check(Buffer(1, 2, 3).tail.length, Buffer(1, 2, 3).view.tail.length, "Buffer#tail#length")  
+}
diff --git a/test/files/pos/t4351.check b/test/files/run/t4351.check
similarity index 100%
rename from test/files/pos/t4351.check
rename to test/files/run/t4351.check
diff --git a/test/files/run/t4351.scala b/test/files/run/t4351.scala
new file mode 100644
index 0000000..d954d74
--- /dev/null
+++ b/test/files/run/t4351.scala
@@ -0,0 +1,21 @@
+object Test {
+  def main(args: Array[String]): Unit = {
+    try new BooleanPropImpl().value
+    catch {
+      // was: StackOverflowError
+      case e: RuntimeException => println("runtime exception")
+    }
+  }
+}
+
+trait Prop[@specialized(Boolean) +T] {
+  def value: T
+}
+
+class PropImpl[+T] extends Prop[T] {
+  def value: T = scala.sys.error("")
+}
+
+trait BooleanProp extends Prop[Boolean]
+
+class BooleanPropImpl() extends PropImpl[Boolean] with BooleanProp
diff --git a/test/files/run/t4387.scala b/test/files/run/t4387.scala
deleted file mode 100644
index 68cbe97..0000000
--- a/test/files/run/t4387.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test {
-  import xml.XML.loadString
-  def mkElem(arg: String) = <foo a="1" b="2" c="3" d="4" e={arg} />
-  
-  val x1 = mkElem("5")
-  val x2 = mkElem("50")
-
-  def main(args: Array[String]): Unit = {    
-    assert(x1 == loadString("" + x1))
-    assert(x2 != loadString("" + x1))
-  }
-}
diff --git a/test/files/run/t4396.check b/test/files/run/t4396.check
index 58f4fc5..a75e1f2 100644
--- a/test/files/run/t4396.check
+++ b/test/files/run/t4396.check
@@ -1,3 +1,4 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 hallo
 constructor
 out:22
diff --git a/test/files/run/t4396.scala b/test/files/run/t4396.scala
index ca14316..d67eaa3 100644
--- a/test/files/run/t4396.scala
+++ b/test/files/run/t4396.scala
@@ -1,4 +1,4 @@
-// #43896 
+// #43896
 trait M extends DelayedInit {
     def delayedInit(body : => Unit) {
         println("hallo")
diff --git a/test/files/run/t4398.scala b/test/files/run/t4398.scala
index 1d57eb6..87dc870 100644
--- a/test/files/run/t4398.scala
+++ b/test/files/run/t4398.scala
@@ -1,5 +1,7 @@
 
 
+import scala.language.{ postfixOps }
+
 object Test {
   def main(args: Array[String]) {
     val x = 1 to 10 toSet
diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala
index f96031d..caf1609 100644
--- a/test/files/run/t4415.scala
+++ b/test/files/run/t4415.scala
@@ -39,7 +39,7 @@ class SecondProperty extends TopProperty
 class SubclassSecondProperty extends StandardProperty
 
 trait MyProp[T]
-case class MyPropImpl[T] extends MyProp[T]
+case class MyPropImpl[T]() extends MyProp[T]
 
 object SubclassMatch {
 
diff --git a/test/files/run/t4426.scala b/test/files/run/t4426.scala
index 9575944..1cbd42d 100644
--- a/test/files/run/t4426.scala
+++ b/test/files/run/t4426.scala
@@ -5,7 +5,7 @@ object Test {
       val settings = new Settings()
       settings.classpath.value = System.getProperty("java.class.path")
 
-    object cc extends Global(settings) { 
+    object cc extends Global(settings) {
       object dummy
 
       override def computePluginPhases() = {
@@ -16,9 +16,9 @@ object Test {
     new cc.Run
     ()
   }
-  
+
   def main(args: Array[String]): Unit = {
-    
+
   }
 }
 
diff --git a/test/files/run/t4461.check b/test/files/run/t4461.check
index e9c01e7..9488669 100644
--- a/test/files/run/t4461.check
+++ b/test/files/run/t4461.check
@@ -1,3 +1,4 @@
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
 Include(End,1)
 Include(End,2)
 Include(End,3)
@@ -8,4 +9,4 @@ Include(End,7)
 Script([1] Include(Index(7),8), [2] Include(Index(8),9), [3] Include(Index(9),10))
 Include(Start,0)
 Script([1] Include(Index(0),-2), [2] Include(Index(1),-1))
-Remove(Index(0),-2)
\ No newline at end of file
+Remove(Index(0),-2)
diff --git a/test/files/run/t4461.scala b/test/files/run/t4461.scala
index adc9201..1801a65 100644
--- a/test/files/run/t4461.scala
+++ b/test/files/run/t4461.scala
@@ -9,7 +9,7 @@ object Test {
     buf.subscribe(new Subscriber[Message[Int], ObservableBuffer[Int]] {
       def notify(pub: ObservableBuffer[Int], event: Message[Int]) = println(event)
     })
-    
+
     buf += 1 // works
     buf ++= Array(2) // works
     buf ++= ArrayBuffer(3, 4) // works
diff --git a/test/files/run/t4535.scala b/test/files/run/t4535.scala
index eba7943..91c13a2 100644
--- a/test/files/run/t4535.scala
+++ b/test/files/run/t4535.scala
@@ -5,26 +5,26 @@ import collection._
 
 // #4535
 object Test {
-  
+
   def main(args: Array[String]) {
     val as = new mutable.ArrayStack[Int]
     as push 1
     as push 2
     as push 3
     println(as.reverse)
-    
+
     as push 4
     as push 5
     as push 6
     println(as.reverse)
-    
+
     println(as map { x => x })
-    
+
     for (i <- 0 until 100) {
       as push i
       assert(as == as.map(x => x))
       assert(as == as.reverse.reverse)
     }
   }
-  
+
 }
diff --git a/test/files/run/t4536.scala b/test/files/run/t4536.scala
index acd91de..6661eae 100644
--- a/test/files/run/t4536.scala
+++ b/test/files/run/t4536.scala
@@ -37,10 +37,10 @@ object dynamicMixin extends dynamicAbstractClass with dynamicTrait {
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val cls = new dynamicClass
     dynamicMixin
   }
-  
+
 }
diff --git a/test/files/run/t4537.check b/test/files/run/t4537.check
new file mode 100644
index 0000000..63739ca
--- /dev/null
+++ b/test/files/run/t4537.check
@@ -0,0 +1 @@
+b.Settings
diff --git a/test/files/run/t4537/a.scala b/test/files/run/t4537/a.scala
new file mode 100644
index 0000000..125e223
--- /dev/null
+++ b/test/files/run/t4537/a.scala
@@ -0,0 +1,5 @@
+package a
+
+private[a] object Settings {
+  val X = "a.Settings"
+}
diff --git a/test/files/run/t4537/b.scala b/test/files/run/t4537/b.scala
new file mode 100644
index 0000000..c709d49
--- /dev/null
+++ b/test/files/run/t4537/b.scala
@@ -0,0 +1,5 @@
+package b
+
+object Settings {
+  val Y = "b.Settings"
+}
diff --git a/test/files/run/t4537/c.scala b/test/files/run/t4537/c.scala
new file mode 100644
index 0000000..ee05d4b
--- /dev/null
+++ b/test/files/run/t4537/c.scala
@@ -0,0 +1,8 @@
+package b
+package c
+
+import a._
+
+object Unambiguous {
+  println(Settings.Y)
+}
diff --git a/test/files/run/t4537/d.scala b/test/files/run/t4537/d.scala
new file mode 100644
index 0000000..dd1d204
--- /dev/null
+++ b/test/files/run/t4537/d.scala
@@ -0,0 +1,6 @@
+import a._
+import b._
+
+object Test extends App {
+  println(Settings.Y)
+}
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index 5c293a8..a53f31a 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() {
   override def toString = "Bippy"
 }
@@ -15,5 +13,3 @@ scala> val f = new Foo
 f: Foo = Bippy
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t4560.scala b/test/files/run/t4560.scala
index 9979199..ee657e4 100644
--- a/test/files/run/t4560.scala
+++ b/test/files/run/t4560.scala
@@ -7,6 +7,9 @@
 // TEST 1
 // self-type is other trait
 
+
+import scala.language.{ reflectiveCalls }
+
 trait Aa
 trait Ab
 
diff --git a/test/files/run/t4570.scala b/test/files/run/t4570.scala
index 5408caf..5e1a20c 100644
--- a/test/files/run/t4570.scala
+++ b/test/files/run/t4570.scala
@@ -1,8 +1,8 @@
 object Test extends Enumeration {
   val foo = Value
   def bar = withName("foo")
-  
+
   def main(args: Array[String]): Unit = {
-    values foreach println 
+    values foreach println
   }
 }
diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check
deleted file mode 100644
index a4522ff..0000000
--- a/test/files/run/t4574.check
+++ /dev/null
@@ -1,2 +0,0 @@
-I hereby refute null!
-I denounce null as unListLike!
diff --git a/test/files/run/t4577.scala b/test/files/run/t4577.scala
new file mode 100644
index 0000000..b08100d
--- /dev/null
+++ b/test/files/run/t4577.scala
@@ -0,0 +1,38 @@
+object Test {
+  val bippy    = new Symbol("bippy")
+  val imposter = new Symbol("bippy")
+  val notBippy = new Symbol("not-bippy")
+  val syms = List(bippy, imposter, notBippy)
+
+  // the equals method should only be used for case `bippy`,
+  // for the singleton type pattern, case _: bippy.type, the spec mandates `bippy eq _` as the test
+  class Symbol(val name: String) {
+    override def equals(other: Any) = other match {
+      case x: Symbol  => name == x.name
+      case _          => false
+    }
+    override def toString = name
+  }
+
+  // TODO: test bytecode equality for f and fDirect (and g and gDirect),
+  // for now the optimizer doesn't quite get from `f` to `fDirect`
+  def f(s: Symbol) = s match {
+    case _: bippy.type  => true
+    case _              => false
+  }
+  def fDirect(s: Symbol) = bippy eq s
+
+  def g(s: Symbol) = s match {
+    case _: bippy.type => 1
+    case `bippy`       => 2
+    case _             => 3
+  }
+  def gDirect(s: Symbol) = if (bippy eq s) 1 else if (bippy == s) 2 else 3
+  
+  def main(args: Array[String]): Unit = {
+    // `syms map f` should be: true false false
+    assert(syms forall (s => f(s) == fDirect(s)))
+    // `syms map g` should be: 1 2 3
+    assert(syms forall (s => g(s) == gDirect(s)))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala
new file mode 100644
index 0000000..d233546
--- /dev/null
+++ b/test/files/run/t4594-repl-settings.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.SessionTest
+
+// Detected repl transcript paste: ctrl-D to finish.
+object Test extends SessionTest {
+  def session =
+""" |Type in expressions to have them evaluated.
+    |Type :help for more information.
+    |
+    |scala> @deprecated(message="Please don't do that.", since="Time began.") def depp = "john"
+    |depp: String
+    |
+    |scala> def a = depp
+    |warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+    |a: String
+    |
+    |scala> :settings +deprecation
+    |
+    |scala> def b = depp
+    |<console>:8: warning: method depp is deprecated: Please don't do that.
+    |       def b = depp
+    |               ^
+    |b: String
+    |
+    |scala> """
+}
diff --git a/test/files/run/t4608.scala b/test/files/run/t4608.scala
index 2d43beb..9ffcb0f 100644
--- a/test/files/run/t4608.scala
+++ b/test/files/run/t4608.scala
@@ -1,8 +1,8 @@
 // #4608
 object Test {
-  
+
   def main(args: Array[String]) {
     ((1 to 100) sliding 10).toList.par.map{_.map{i => i * i}}.flatten
   }
-  
+
 }
diff --git a/test/files/run/t4617.scala b/test/files/run/t4617.scala
index 3a877c1..2fea5e29e 100644
--- a/test/files/run/t4617.scala
+++ b/test/files/run/t4617.scala
@@ -3,7 +3,7 @@ object Test {
   def f2 = {
     lazy val d = 4D
     lazy val f = 4f
-    
+
     def bar = "Str " + (d + f)
     bar
   }
diff --git a/test/files/run/t4656.scala b/test/files/run/t4656.scala
index ab3e3cf..4f3d189 100644
--- a/test/files/run/t4656.scala
+++ b/test/files/run/t4656.scala
@@ -6,7 +6,7 @@ object Test {
     buf prependToList List(4, 5, 6)
     l
   }
-  
+
   def main(args: Array[String]): Unit = {
     println(f)
   }
diff --git a/test/files/run/t4660.scala b/test/files/run/t4660.scala
index e57bb4b..9aac10d 100644
--- a/test/files/run/t4660.scala
+++ b/test/files/run/t4660.scala
@@ -3,7 +3,7 @@ object Test {
     val traversable = 1 to 20 map (_.toString)
     def normalize(m: Map[Char, Traversable[String]]) = m.map { case (k,v) => (k, v.toList) }
 
-    val groupedFromView   = (traversable view).groupBy(_(0))
+    val groupedFromView   = traversable.view.groupBy(_(0))
     val groupedFromStrict = traversable.groupBy(_(0))
 
     assert(normalize(groupedFromView) == normalize(groupedFromStrict))
diff --git a/test/files/run/t4671.check b/test/files/run/t4671.check
index 4699818..0c36083 100644
--- a/test/files/run/t4671.check
+++ b/test/files/run/t4671.check
@@ -2,7 +2,7 @@ Type in expressions to have them evaluated.
 Type :help for more information.
 
 scala> object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
-defined module o
+defined object o
 
 scala> val s = scala.io.Source.fromFile(o.file)
 s: scala.io.BufferedSource = non-empty iterator
@@ -10,7 +10,7 @@ s: scala.io.BufferedSource = non-empty iterator
 scala> println(s.getLines.mkString("\n"))
 import scala.tools.partest.ReplTest
 
-object Test extends ReplTest {  
+object Test extends ReplTest {
   // My god...it's full of quines
   def code = """
 object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
@@ -30,7 +30,7 @@ s: scala.io.BufferedSource = non-empty iterator
 scala> println(s.mkString(""))
 import scala.tools.partest.ReplTest
 
-object Test extends ReplTest {  
+object Test extends ReplTest {
   // My god...it's full of quines
   def code = """
 object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
diff --git a/test/files/run/t4671.scala b/test/files/run/t4671.scala
index aba0138..6170104 100644
--- a/test/files/run/t4671.scala
+++ b/test/files/run/t4671.scala
@@ -1,6 +1,6 @@
 import scala.tools.partest.ReplTest
 
-object Test extends ReplTest {  
+object Test extends ReplTest {
   // My god...it's full of quines
   def code = """
 object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check
index b5cfc65..512bfd4 100644
--- a/test/files/run/t4680.check
+++ b/test/files/run/t4680.check
@@ -1,3 +1,10 @@
+t4680.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    new C { 5 }
+            ^
+t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    new { val x = 5 } with E() { 5 }
+                                 ^
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 
 
 // new C { }
diff --git a/test/files/run/t4680.scala b/test/files/run/t4680.scala
index 88611df..d5c8d0e 100644
--- a/test/files/run/t4680.scala
+++ b/test/files/run/t4680.scala
@@ -43,13 +43,13 @@ class E() extends D() {
 
 object Test {
   def p(msg: String) = println("\n\n// " + msg)
-  
+
   def main(args: Array[String]) {
     p("new C { }")
     new C { }
     p("new C { 5 }")
     new C { 5 }
-    
+
     p("new D()")
     new D()
     p("new D() { }")
diff --git a/test/files/run/t4697.scala b/test/files/run/t4697.scala
index 728d095..9559217 100644
--- a/test/files/run/t4697.scala
+++ b/test/files/run/t4697.scala
@@ -1,7 +1,7 @@
 object Test {
   var st = Stream(0)
   for (i <- 1 to 10000) st = i +: st
-  
+
   def main(args: Array[String]): Unit = {
     println(st.take(10000).sum)
   }
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
index e34e3d3..f823433 100644
--- a/test/files/run/t4729/S_2.scala
+++ b/test/files/run/t4729/S_2.scala
@@ -1,3 +1,4 @@
+import scala.language.reflectiveCalls
  // Scala class:
 class ScalaVarArgs extends J_1 {
   // -- no problem on overriding it using ordinary class
@@ -20,7 +21,7 @@ object Test {
     (new ScalaVarArgs).method("1", "2")
     (new ScalaVarArgs: J_1).method("1", "2")
 
-    //[4] Not Ok -- error when assigning anonymous class to a explictly typed val
+    //[4] Not Ok -- error when assigning anonymous class to an explictly typed val
     // Compiler error:  object creation impossible, since method method in trait VarArgs of type (s: <repeated...>[java.lang.String])Unit is not defined
     val tagged: J_1 = new J_1 {
       def method(s: String*) { println(s) }
diff --git a/test/files/run/t4742.flags b/test/files/run/t4742.flags
new file mode 100644
index 0000000..ae08446
--- /dev/null
+++ b/test/files/run/t4742.flags
@@ -0,0 +1 @@
+-Xcheckinit
\ No newline at end of file
diff --git a/test/files/run/t4742.scala b/test/files/run/t4742.scala
new file mode 100644
index 0000000..3b42c0c
--- /dev/null
+++ b/test/files/run/t4742.scala
@@ -0,0 +1,7 @@
+trait T { val x: Int = 0 }
+object O extends T { override final val x = 1 }
+
+object Test extends App {
+  // was throwing an UnitializedFieldError as constant 1 is folded into the accessor
+  assert((O: T).x == 1)
+}
diff --git a/test/files/run/si4750.check b/test/files/run/t4750.check
similarity index 100%
rename from test/files/run/si4750.check
rename to test/files/run/t4750.check
diff --git a/test/files/run/si4750.scala b/test/files/run/t4750.scala
similarity index 100%
rename from test/files/run/si4750.scala
rename to test/files/run/t4750.scala
diff --git a/test/files/run/t4752.scala b/test/files/run/t4752.scala
index 2a9fc53..3d5c166 100644
--- a/test/files/run/t4752.scala
+++ b/test/files/run/t4752.scala
@@ -2,8 +2,8 @@ object Test {
   object Bippy {
     case object Dingus
   }
-  
-  def main(args: Array[String]): Unit = {    
+
+  def main(args: Array[String]): Unit = {
     assert(None.## == "None".##, None)
     assert(Test.Bippy.Dingus.## == "Dingus".##, Test.Bippy.Dingus)
   }
diff --git a/test/files/run/t4753.scala b/test/files/run/t4753.scala
index 98f3e92..cfb252c 100644
--- a/test/files/run/t4753.scala
+++ b/test/files/run/t4753.scala
@@ -1,4 +1,4 @@
-trait A { 
+trait A {
   val actualType: Class[_]
 }
 trait B extends A {
diff --git a/test/files/run/t4761.scala b/test/files/run/t4761.scala
index a9c245d..e4bd87c 100644
--- a/test/files/run/t4761.scala
+++ b/test/files/run/t4761.scala
@@ -3,7 +3,7 @@ object Test {
     val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1).seq else List(1).par }
     println(gs.flatten)
     println(gs.transpose)
-    
+
     val s = Stream(Vector(1).par, Vector(2).par)
     println(s.flatten.toList)
     println(s.transpose.map(_.toList).toList)
diff --git a/test/files/run/t4766.scala b/test/files/run/t4766.scala
index c2a864d..d67431f 100644
--- a/test/files/run/t4766.scala
+++ b/test/files/run/t4766.scala
@@ -1,3 +1,7 @@
+
+import scala.language.postfixOps
+import scala.language.reflectiveCalls
+
 object Test extends App {
   val x = new {
     def > = 1
diff --git a/test/files/run/t4777.scala b/test/files/run/t4777.scala
index 4a811d3..6c7b856 100644
--- a/test/files/run/t4777.scala
+++ b/test/files/run/t4777.scala
@@ -3,6 +3,6 @@ class DefaultsTest(x: Int = 25) extends A(28)
 object DefaultsTest extends DefaultsTest(12)
 
 object Test extends App {
-  println(new DefaultsTest() a)
-  println(DefaultsTest a)
+  println(new DefaultsTest().a)
+  println(DefaultsTest.a)
 }
diff --git a/test/files/run/t4794.scala b/test/files/run/t4794.scala
index afe89fa..720906f 100644
--- a/test/files/run/t4794.scala
+++ b/test/files/run/t4794.scala
@@ -7,6 +7,7 @@ class Arr[@specialized A](val arr: Array[A]) {
 
 object Test {
   def main(args: Array[String]): Unit = {
-    println(classOf[Arr[_]].getMethods filter (_.getName contains "quux") size) // expect 10, not 1
+    def quuxae = classOf[Arr[_]].getMethods filter (_.getName contains "quux")
+    println(quuxae.size) // expect 10, not 1
   }
 }
diff --git a/test/files/run/t4809.scala b/test/files/run/t4809.scala
index b30d805..4b721d1 100644
--- a/test/files/run/t4809.scala
+++ b/test/files/run/t4809.scala
@@ -5,7 +5,7 @@ import scala.util.control.Breaks._
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val x = tryBreakable {
       break
@@ -14,14 +14,14 @@ object Test {
       3
     }
     assert(x == 3, x)
-    
+
     val y = tryBreakable {
       2
     } catchBreak {
       3
     }
     assert(y == 2, y)
-    
+
     val z = tryBreakable {
       break
       1.0
@@ -30,5 +30,5 @@ object Test {
     }
     assert(z == 2.0, z)
   }
-  
+
 }
diff --git a/test/files/run/t4813.check b/test/files/run/t4813.check
new file mode 100644
index 0000000..a92ddc0
--- /dev/null
+++ b/test/files/run/t4813.check
@@ -0,0 +1 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t4835.scala b/test/files/run/t4835.scala
index c964e42..26275c0 100644
--- a/test/files/run/t4835.scala
+++ b/test/files/run/t4835.scala
@@ -1,6 +1,6 @@
 /*
  * Test case for SI-4835.  This tests confirm that the fix
- * doesn't break laziness.  To test memory consumption, 
+ * doesn't break laziness.  To test memory consumption,
  * I need to confirm that OutOfMemoryError doesn't occur.
  * I could create such tests.  However, such tests consume
  * too much time and memory.
diff --git a/test/files/run/t4841-isolate-plugins.check b/test/files/run/t4841-isolate-plugins.check
new file mode 100644
index 0000000..a6462b4
--- /dev/null
+++ b/test/files/run/t4841-isolate-plugins.check
@@ -0,0 +1,2 @@
+My phase name is ploogin1_1
+My phase name is ploogin1_2
diff --git a/test/files/run/t4841-isolate-plugins/ploogin.scala b/test/files/run/t4841-isolate-plugins/ploogin.scala
new file mode 100644
index 0000000..bd8c727
--- /dev/null
+++ b/test/files/run/t4841-isolate-plugins/ploogin.scala
@@ -0,0 +1,30 @@
+
+package t4841
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin.  */
+class Ploogin(val global: Global, val name: String = "ploogin") extends Plugin {
+  import global._
+
+  val description = "A sample plugin for testing."
+  val components = List[PluginComponent](TestComponent)
+
+  private object TestComponent extends PluginComponent {
+    val global: Ploogin.this.global.type = Ploogin.this.global
+    //override val runsBefore = List("refchecks")
+    val runsAfter = List("jvm")
+    val phaseName = Ploogin.this.name
+    override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+    def newPhase(prev: Phase) = new TestPhase(prev)
+    class TestPhase(prev: Phase) extends StdPhase(prev) {
+      override def description = TestComponent.this.description
+      def apply(unit: CompilationUnit) {
+        if (settings.developer) inform(s"My phase name is $phaseName")
+      }
+    }
+  }
+}
diff --git a/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala
new file mode 100644
index 0000000..5421922
--- /dev/null
+++ b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala
@@ -0,0 +1,39 @@
+
+import tools.nsc.plugins.PluginDescription
+import tools.partest.DirectTest
+
+import java.io.File
+
+// show that plugins are on isolated class loaders
+object Test extends DirectTest {
+  override def code = "class Code"
+
+  override def extraSettings = s"-usejavacp"
+
+  // plugin named ploogin1_1 or ploogin1_2, but not ploogin2_x
+  // Although the samples are in different classloaders, the plugin
+  // loader checks for distinctness by class name, so the names must differ.
+  def pluginCode(index: Int) = s"""
+    |package t4841 {
+    |  class SamplePloogin$index(global: scala.tools.nsc.Global) extends Ploogin(global, s"$${PlooginCounter.named}_$index")
+    |  object PlooginCounter {
+    |    val count = new java.util.concurrent.atomic.AtomicInteger
+    |    def named = s"ploogin$${count.incrementAndGet}"
+    |  }
+    |}""".stripMargin.trim
+
+  def compilePlugin(i: Int) = {
+    val out  = (testOutput / s"p$i").createDirectory()
+    val args = Seq("-usejavacp", "-d", out.path)
+    compileString(newCompiler(args: _*))(pluginCode(i))
+    val xml  = PluginDescription(s"p$i", s"t4841.SamplePloogin$i").toXML
+    (out / "scalac-plugin.xml").toFile writeAll xml
+    out
+  }
+
+  override def show() = {
+    val dirs = 1 to 2 map (compilePlugin(_))
+    compile("-Xdev", s"-Xplugin:${dirs mkString ","}", "-usejavacp", "-d", testOutput.path)
+  }
+}
+
diff --git a/test/files/run/t4841-no-plugin.check b/test/files/run/t4841-no-plugin.check
new file mode 100644
index 0000000..4338f0c
--- /dev/null
+++ b/test/files/run/t4841-no-plugin.check
@@ -0,0 +1 @@
+warning: No plugin in path t4841-no-plugin-run.obj/plugins.partest
diff --git a/test/files/run/t4841-no-plugin.scala b/test/files/run/t4841-no-plugin.scala
new file mode 100644
index 0000000..d91bf7e
--- /dev/null
+++ b/test/files/run/t4841-no-plugin.scala
@@ -0,0 +1,17 @@
+
+import tools.partest.DirectTest
+
+import java.io.File
+
+// warn only if no plugin on Xplugin path
+object Test extends DirectTest {
+  override def code = "class Code"
+
+  override def extraSettings = s"-usejavacp -d ${testOutput.path}"
+
+  override def show() = {
+    val tmp = new File(testOutput.jfile, "plugins.partest").getAbsolutePath
+    compile("-Xdev", s"-Xplugin:$tmp", "-Xpluginsdir", tmp)
+  }
+}
+
diff --git a/test/files/run/t4859.check b/test/files/run/t4859.check
new file mode 100644
index 0000000..d329744
--- /dev/null
+++ b/test/files/run/t4859.check
@@ -0,0 +1,8 @@
+Inner
+Inner.i
+About to reference Inner.i
+Outer
+Inner.i
+About to reference O.N
+About to reference O.N
+About to reference O.N.apply()
diff --git a/test/files/run/t4859.scala b/test/files/run/t4859.scala
new file mode 100644
index 0000000..3c20cea
--- /dev/null
+++ b/test/files/run/t4859.scala
@@ -0,0 +1,29 @@
+object O {
+  case class N()
+  object P
+}
+
+object Outer {
+  println("Outer")
+  object Inner {
+    println("Inner")
+    def i {
+      println("Inner.i")
+    }
+  }
+}
+
+object Test {
+  def main(args: Array[String]) {
+    Outer.Inner.i // we still don't initialize Outer here (but should we?)
+
+    {println("About to reference Inner.i"); Outer}.Inner.i // Outer will be initialized.
+
+    {println("About to reference O.N"        ); O}.N
+
+    {println("About to reference O.N"        ); O}.N
+
+    {println("About to reference O.N.apply()"); O}.N.apply()
+  }
+}
+
diff --git a/test/files/run/t4871.scala b/test/files/run/t4871.scala
index 70d8b71..e25d5c1 100644
--- a/test/files/run/t4871.scala
+++ b/test/files/run/t4871.scala
@@ -5,7 +5,7 @@ object Test {
   def main(args: Array[String]): Unit = {
     val z: Class[C] = classOf
     val z2: Class[D] = classOf[D]
-    
+
     println(z)
     println(z2)
   }
diff --git a/test/files/run/t4894.scala b/test/files/run/t4894.scala
index aa3b434..2b70da1 100644
--- a/test/files/run/t4894.scala
+++ b/test/files/run/t4894.scala
@@ -4,24 +4,24 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     import collection._
     val hs = mutable.HashSet[Int]()
     hs ++= 1 to 10
     hs --= 1 to 10
-    
+
     val phs = parallel.mutable.ParHashSet[Int]()
     phs ++= 1 to 10
     for (i <- 1 to 10) assert(phs(i))
     phs --= 1 to 10
     assert(phs.isEmpty)
-    
+
     val phm = parallel.mutable.ParHashMap[Int, Int]()
     phm ++= ((1 to 10) zip (1 to 10))
     for (i <- 1 to 10) assert(phm(i) == i)
     phm --= 1 to 10
     assert(phm.isEmpty)
   }
-  
+
 }
diff --git a/test/files/run/t4895.scala b/test/files/run/t4895.scala
index a0e8c19..3842f8a 100644
--- a/test/files/run/t4895.scala
+++ b/test/files/run/t4895.scala
@@ -1,16 +1,16 @@
 object Test {
-  
+
   def checkPar(sz: Int) {
     import collection._
     val hs = mutable.HashSet[Int]() ++ (1 to sz)
     assert(hs.par.map(_ + 1).seq.toSeq.sorted == (2 to (sz + 1)))
   }
-  
+
   def main(args: Array[String]) {
     for (i <- 0 until 100) checkPar(i)
     for (i <- 100 until 1000 by 50) checkPar(i)
     for (i <- 1000 until 10000 by 500) checkPar(i)
     for (i <- 10000 until 100000 by 5000) checkPar(i)
   }
-  
+
 }
diff --git a/test/files/run/t4929.check b/test/files/run/t4929.check
deleted file mode 100644
index 0f0c913..0000000
--- a/test/files/run/t4929.check
+++ /dev/null
@@ -1 +0,0 @@
-success
\ No newline at end of file
diff --git a/test/files/run/t4929.scala b/test/files/run/t4929.scala
deleted file mode 100644
index 3208cd1..0000000
--- a/test/files/run/t4929.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-import scala.util.parsing.json._
-import java.util.concurrent._
-import collection.JavaConversions._
-
-object Test extends App {
-
-  val LIMIT = 2000
-  val THREAD_COUNT = 20
-  val count = new java.util.concurrent.atomic.AtomicInteger(0)
-
-  val begin = new CountDownLatch(THREAD_COUNT)
-  val finish = new CountDownLatch(THREAD_COUNT)
-
-  val errors = new ConcurrentLinkedQueue[Throwable]
-
-  (1 to THREAD_COUNT) foreach { i =>
-    val thread = new Thread {
-      override def run() {
-        begin.await(1, TimeUnit.SECONDS)
-        try {
-          while (count.getAndIncrement() < LIMIT && errors.isEmpty) {
-            JSON.parseFull("""{"foo": [1,2,3,4]}""")
-          }
-        } catch {
-          case t: Throwable => errors.add(t)
-        }
-
-        finish.await(10, TimeUnit.SECONDS)
-      }
-    }
-
-    thread.setDaemon(true)
-    thread.start()
-
-  }
-
-
-  errors foreach { throw(_) }
-
-  println("success")
-
-}
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
index ac14fe5..49d036a 100644
--- a/test/files/run/t4935.flags
+++ b/test/files/run/t4935.flags
@@ -1 +1 @@
--optimize
+-optimize
diff --git a/test/files/run/t4954.scala b/test/files/run/t4954.scala
index b4916e6..f0bb1cc 100644
--- a/test/files/run/t4954.scala
+++ b/test/files/run/t4954.scala
@@ -4,7 +4,7 @@ import collection._
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val m = scala.collection.mutable.LinkedHashMap("one" -> 1, "two" -> 2, "three" -> 3, "four" -> 4, "five" -> 5)
     val expected = List("one", "two", "three", "four", "five")
@@ -15,7 +15,7 @@ object Test {
     assert(m.keys.drop(3).iterator.toList == expected.drop(3))
     assert(m.keys.drop(4).iterator.toList == expected.drop(4))
     assert(m.keys.drop(5).iterator.toList == expected.drop(5))
-    
+
     val expvals = List(1, 2, 3, 4, 5)
     assert(m.values.iterator.toList == expvals)
     assert(m.values.drop(0).iterator.toList == expvals)
@@ -24,7 +24,7 @@ object Test {
     assert(m.values.drop(3).iterator.toList == expvals.drop(3))
     assert(m.values.drop(4).iterator.toList == expvals.drop(4))
     assert(m.values.drop(5).iterator.toList == expvals.drop(5))
-    
+
     val pred = (x: String) => x.length < 6
     val filtered = m.filterKeys(pred)
     assert(filtered.drop(0).keys.toList == expected.filter(pred))
@@ -32,7 +32,7 @@ object Test {
     assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2))
     assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3))
     assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4))
-    
+
     val mapped = m.mapValues(-_)
     assert(mapped.drop(0).keys.toList == expected)
     assert(mapped.drop(1).keys.toList == expected.drop(1))
@@ -41,5 +41,5 @@ object Test {
     assert(mapped.drop(4).keys.toList == expected.drop(4))
     assert(mapped.drop(5).keys.toList == expected.drop(5))
   }
-  
+
 }
diff --git a/test/files/run/t498.scala b/test/files/run/t498.scala
index b4ede95..5c10e66 100644
--- a/test/files/run/t498.scala
+++ b/test/files/run/t498.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
 object Test extends App {
 // the function passed to flatMap produces lots of empty streams, but this should not overflow the stack
 	val res = Stream.from(1).flatMap(i => if (i < 3000) Stream.empty else List(1))
diff --git a/test/files/run/t4996.check b/test/files/run/t4996.check
new file mode 100644
index 0000000..8d45b41
--- /dev/null
+++ b/test/files/run/t4996.check
@@ -0,0 +1,4 @@
+B.foo
+M.foo
+B.foo
+M.foo
\ No newline at end of file
diff --git a/test/files/run/t4996.scala b/test/files/run/t4996.scala
new file mode 100644
index 0000000..8a38e7f
--- /dev/null
+++ b/test/files/run/t4996.scala
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+trait A[@specialized(Int) T] {
+  def foo(t: T)
+}
+
+
+trait B extends A[Int] {
+  def foo(t: Int) {
+    println("B.foo")
+  }
+}
+
+
+trait M extends B {
+  abstract override def foo(t: Int) {
+    super.foo(t)
+    println("M.foo")
+  }
+}
+
+
+object C extends B with M
+
+
+object D extends B {
+  override def foo(t: Int) {
+    super.foo(t)
+    println("M.foo")
+  }
+}
+
+
+object Test {
+
+  def main(args: Array[String]) {
+    D.foo(42) // OK, prints B.foo M.foo
+    C.foo(42) // was StackOverflowError
+  }
+
+}
+
+
diff --git a/test/files/run/t5018.scala b/test/files/run/t5018.scala
index bb67a25..70bd6c5 100644
--- a/test/files/run/t5018.scala
+++ b/test/files/run/t5018.scala
@@ -7,7 +7,7 @@ import collection._
 
 
 object Test {
-  
+
   def serializeDeserialize[T <: AnyRef](obj: T) = {
     val buffer = new ByteArrayOutputStream
     val out = new ObjectOutputStream(buffer)
@@ -15,23 +15,23 @@ object Test {
     val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
     in.readObject.asInstanceOf[T]
   }
-  
+
   def main(args: Array[String]) {
     val values = mutable.Map(1 -> 1).values
     assert(serializeDeserialize(values).toList == values.toList)
-    
+
     val keyset = mutable.Map(1 -> 1).keySet
     assert(serializeDeserialize(keyset) == keyset)
-    
+
     val imkeyset = immutable.Map(1 -> 1).keySet
     assert(serializeDeserialize(imkeyset) == imkeyset)
-    
+
     val defaultmap = immutable.Map(1 -> 1).withDefaultValue(1)
     assert(serializeDeserialize(defaultmap) == defaultmap)
-    
+
     val minusmap = mutable.Map(1 -> 1).withDefault(x => -x)
     assert(serializeDeserialize(minusmap) == minusmap)
   }
-  
+
 }
 
diff --git a/test/files/run/si5045.check b/test/files/run/t5045.check
similarity index 100%
rename from test/files/run/si5045.check
rename to test/files/run/t5045.check
diff --git a/test/files/run/t5045.scala b/test/files/run/t5045.scala
new file mode 100644
index 0000000..b0c3a4d
--- /dev/null
+++ b/test/files/run/t5045.scala
@@ -0,0 +1,49 @@
+
+import scala.language.postfixOps
+
+object Test extends App {
+
+ import scala.util.matching.{ Regex, UnanchoredRegex }
+
+ val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
+ val dateP2 = """(\d\d\d\d)-(\d\d)-(\d\d)""" r ("year", "month", "day") unanchored
+ val dateP3 =  new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") with UnanchoredRegex
+
+ val yearStr = "2011"
+ val dateStr = List(yearStr,"07","15").mkString("-")
+
+  def test(msg: String)(strs: Seq[String]): Unit = println("%40s  %s".format(msg, strs mkString " "))
+
+  test("extract an exact match") {
+    val dateP1(y,m,d) = dateStr
+    Seq(List(y,m,d).mkString("-"), dateStr)
+  }
+
+  test("extract from middle of string") {
+    val dateP1(y,m,d) = "Tested on "+dateStr+"."
+    Seq(List(y,m,d).mkString("-"), dateStr)
+  }
+
+  test("extract from middle of string (P2)") {
+    val dateP2(y,m,d) = "Tested on "+dateStr+"."
+    Seq(List(y,m,d).mkString("-"), dateStr)
+  }
+
+  test("extract from middle of string (P3)") {
+    val dateP2(y,m,d) = "Tested on "+dateStr+"."
+    Seq(List(y,m,d).mkString("-"), dateStr)
+  }
+
+  def copyright(in: String): String = in match {
+    case dateP1(year, month, day) => "Copyright "+year
+    case _                        => "No copyright"
+  }
+
+  test("copyright example has date") {
+    Seq(copyright("Date of this document: "+dateStr), "Copyright "+yearStr)
+  }
+
+  test("copyright example missing date") {
+    Seq(copyright("Date of this document: unknown"), "No copyright")
+  }
+}
diff --git a/test/files/run/t5052.scala b/test/files/run/t5052.scala
deleted file mode 100644
index 9e418e8..0000000
--- a/test/files/run/t5052.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
-  assert(<elem attr={null:String} /> xml_== <elem />)
-  assert(<elem attr={None} /> xml_== <elem />)
-  assert(<elem /> xml_== <elem attr={null:String} />)
-  assert(<elem /> xml_== <elem attr={None} />)
-}
diff --git a/test/files/run/t5053.scala b/test/files/run/t5053.scala
index e46dad5..50057ce 100644
--- a/test/files/run/t5053.scala
+++ b/test/files/run/t5053.scala
@@ -1,20 +1,23 @@
+
+import scala.language.{ existentials }
+
 object Test extends App {
   {
     val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
     println(left.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
     val (l, m, r) = Seq((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
-    println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])  
+    println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
   }
   {
     val (left, right) = Iterable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
     println(left.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
     val (l, m, r) = Iterable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
-    println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])  
+    println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
   }
   {
     val (left, right) = Traversable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
     println(left.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
     val (l, m, r) = Traversable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
-    println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])  
+    println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
   }
 }
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
index 077006a..61ccfd1 100644
--- a/test/files/run/t5064.check
+++ b/test/files/run/t5064.check
@@ -1,6 +1,6 @@
-[12]            T5064.super.<init>()
-[12]            T5064.super.<init>
-[12]            this
+[53]            T5064.super.<init>()
+[53]            T5064.super.<init>
+[53]            this
 [16:23]         immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
 [16:20]         immutable.this.List.apply
 <16:20>         immutable.this.List
diff --git a/test/files/run/t5072.check b/test/files/run/t5072.check
index 8fe75f5..ddd49c7 100644
--- a/test/files/run/t5072.check
+++ b/test/files/run/t5072.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class C
 defined class C
 
@@ -10,5 +8,3 @@ scala> Thread.currentThread.getContextClassLoader.loadClass(classOf[C].getName)
 res0: Class[_] = class C
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5080.scala b/test/files/run/t5080.scala
index ce72d13..acb6167 100644
--- a/test/files/run/t5080.scala
+++ b/test/files/run/t5080.scala
@@ -1,3 +1,7 @@
+
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+
 object Test extends App {
 
   abstract class Value {
diff --git a/test/files/run/t5115.scala b/test/files/run/t5115.scala
deleted file mode 100644
index cf25214..0000000
--- a/test/files/run/t5115.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.collection.Iterable
-
-object Test extends App {
-  def assertHonorsIterableContract(i: Iterable[_]) = assert(i.size == i.iterator.size)
-
-  assertHonorsIterableContract(<a/>.attributes)
-  assertHonorsIterableContract(<a x=""/>.attributes)
-  assertHonorsIterableContract(<a y={None}/>.attributes)
-  assertHonorsIterableContract(<a y={None} x=""/>.attributes)
-  assertHonorsIterableContract(<a a="" y={None} />.attributes)
-  assertHonorsIterableContract(<a y={null:String}/>.attributes)
-  assertHonorsIterableContract(<a y={null:String} x=""/>.attributes)
-  assertHonorsIterableContract(<a a="" y={null:String} />.attributes)
-}
diff --git a/test/files/run/t5125b.scala b/test/files/run/t5125b.scala
index 29c08fe..149c49e 100644
--- a/test/files/run/t5125b.scala
+++ b/test/files/run/t5125b.scala
@@ -7,7 +7,7 @@ class C2 {
   @scala.annotation.varargs
   def f(values:String*) = println("Calling C2.f(): "  + values)
   def g(): String => Int = s => s.hashCode
-  
+
   class C3 {
     @scala.annotation.varargs
     def f(values:String*) = println("Calling C3.f(): "  + values)
diff --git a/test/files/run/t5134.scala b/test/files/run/t5134.scala
new file mode 100644
index 0000000..384442f
--- /dev/null
+++ b/test/files/run/t5134.scala
@@ -0,0 +1,8 @@
+import language._
+
+object Test extends App {
+  def b = new AnyRef {
+    def a= ()
+  }
+  b.a match { case _ => () }
+}
diff --git a/test/files/run/t5224.check b/test/files/run/t5224.check
index e15c1c9..b11480a 100644
--- a/test/files/run/t5224.check
+++ b/test/files/run/t5224.check
@@ -1,3 +1,8 @@
+t5224.scala:3: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime.  If that is what
+you want, you must write the annotation class in Java.
+class Foo(bar: String) extends annotation.ClassfileAnnotation
+      ^
 {
   @new Foo(bar = "qwe") class C extends AnyRef {
     def <init>() = {
diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check
index 5faa365..477ea4e 100644
--- a/test/files/run/t5225_2.check
+++ b/test/files/run/t5225_2.check
@@ -1,4 +1,4 @@
 {
-  def foo(@new `package`.cloneable() x: Int) = "";
+  def foo(@new elidable(0) x: Int) = "";
   ()
 }
diff --git a/test/files/run/t5225_2.scala b/test/files/run/t5225_2.scala
index d1b6074..cf0f23a 100644
--- a/test/files/run/t5225_2.scala
+++ b/test/files/run/t5225_2.scala
@@ -1,6 +1,6 @@
 import scala.reflect.runtime.universe._
 
 object Test extends App {
-  val tree = reify{def foo(@cloneable x: Int) = ""}.tree
+  val tree = reify{def foo(@annotation.elidable(0) x: Int) = ""}.tree
   println(tree.toString)
 }
\ No newline at end of file
diff --git a/test/files/run/t5229_1.check b/test/files/run/t5229_1.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5256a.scala b/test/files/run/t5256a.scala
index 84ef97b..c8cea53 100644
--- a/test/files/run/t5256a.scala
+++ b/test/files/run/t5256a.scala
@@ -7,5 +7,5 @@ object Test extends App {
   val c = cm.classSymbol(classOf[A])
   println(c)
   println(c.fullName)
-  println(c.typeSignature)
+  println(c.info)
 }
\ No newline at end of file
diff --git a/test/files/run/t5256b.scala b/test/files/run/t5256b.scala
index 0ffab8a..5cd172e 100644
--- a/test/files/run/t5256b.scala
+++ b/test/files/run/t5256b.scala
@@ -6,5 +6,5 @@ object Test extends App {
   val c = cm.classSymbol(classOf[A])
   println(c)
   println(c.fullName)
-  println(c.typeSignature)
+  println(c.info)
 }
\ No newline at end of file
diff --git a/test/files/run/t5256c.scala b/test/files/run/t5256c.scala
index d56215f..66ddd3d 100644
--- a/test/files/run/t5256c.scala
+++ b/test/files/run/t5256c.scala
@@ -7,6 +7,6 @@ object Test extends App {
     val c = cm.classSymbol(classOf[A])
     println(c)
     println(c.fullName)
-    println(c.typeSignature)
+    println(c.info)
   }
 }
\ No newline at end of file
diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check
index b7617e8..d42d234 100644
--- a/test/files/run/t5256d.check
+++ b/test/files/run/t5256d.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
@@ -21,12 +19,10 @@ class A
 scala> println(c.fullName)
 $line8.$read.$iw.$iw.$iw.$iw.A
 
-scala> println(c.typeSignature)
+scala> println(c.info)
 scala.AnyRef {
   def <init>(): A
   def foo: scala.Nothing
 }
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5256d.scala b/test/files/run/t5256d.scala
index 24ac1eb..5aa2607 100644
--- a/test/files/run/t5256d.scala
+++ b/test/files/run/t5256d.scala
@@ -8,6 +8,6 @@ class A { def foo = ??? }
 val c = cm.classSymbol(classOf[A])
 println(c)
 println(c.fullName)
-println(c.typeSignature)
+println(c.info)
   """
 }
\ No newline at end of file
diff --git a/test/files/run/t5256e.scala b/test/files/run/t5256e.scala
index f83546f..2f57ea6 100644
--- a/test/files/run/t5256e.scala
+++ b/test/files/run/t5256e.scala
@@ -6,5 +6,5 @@ object Test extends App {
   val c = cm.classSymbol(classOf[C#A])
   println(c)
   println(c.fullName)
-  println(c.typeSignature)
+  println(c.info)
 }
\ No newline at end of file
diff --git a/test/files/run/t5256f.scala b/test/files/run/t5256f.scala
index 80c7ad8..1de2592 100644
--- a/test/files/run/t5256f.scala
+++ b/test/files/run/t5256f.scala
@@ -7,7 +7,7 @@ object Test extends App {
   val c1 = cm.classSymbol(classOf[A1])
   println(c1)
   println(c1.fullName)
-  println(c1.typeSignature)
+  println(c1.info)
 
   new Test
 }
@@ -18,5 +18,5 @@ class Test {
   val c2 = cm.classSymbol(classOf[A2])
   println(c2)
   println(c2.fullName)
-  println(c2.typeSignature)
+  println(c2.info)
 }
diff --git a/test/files/run/t5256g.check b/test/files/run/t5256g.check
index c9c8d6e..cef3a41 100644
--- a/test/files/run/t5256g.check
+++ b/test/files/run/t5256g.check
@@ -1,3 +1,5 @@
-anonymous class $anon$1
+$anon
 Test.$anon$1
-A with B{def <init>(): A with B}
+A with B {
+  def <init>(): A with B
+}
diff --git a/test/files/run/t5256g.scala b/test/files/run/t5256g.scala
index 358c186..2d4c1b5 100644
--- a/test/files/run/t5256g.scala
+++ b/test/files/run/t5256g.scala
@@ -9,5 +9,5 @@ object Test extends App {
   val c = cm.classSymbol(mutant.getClass)
   println(c)
   println(c.fullName)
-  println(c.typeSignature)
+  println(c.info)
 }
diff --git a/test/files/run/t5256h.check b/test/files/run/t5256h.check
index 1b23a71..dc3e919 100644
--- a/test/files/run/t5256h.check
+++ b/test/files/run/t5256h.check
@@ -1,7 +1,7 @@
-anonymous class $anon$1
+$anon
 Test.$anon$1
 java.lang.Object {
   final private val x: Int
   def x(): Int
-  def <init>(): java.lang.Object{def x(): Int}
+  def <init>(): $anon$1
 }
diff --git a/test/files/run/t5256h.scala b/test/files/run/t5256h.scala
index fd4ffd9..f58aa6d 100644
--- a/test/files/run/t5256h.scala
+++ b/test/files/run/t5256h.scala
@@ -6,5 +6,5 @@ object Test extends App {
   val c = cm.classSymbol(mutant.getClass)
   println(c)
   println(c.fullName)
-  println(c.typeSignature)
+  println(c.info)
 }
diff --git a/test/files/run/t5262.scala b/test/files/run/t5262.scala
index fc4e57a..6744844 100644
--- a/test/files/run/t5262.scala
+++ b/test/files/run/t5262.scala
@@ -6,21 +6,21 @@
 
 
 object Test {
-  
+
   def serializationDeserialization(obj : Any) {
     val bos = new java.io.ByteArrayOutputStream()
     val out = new java.io.ObjectOutputStream(bos)
     out.writeObject(obj)
-    
+
     val arr = bos.toByteArray()
     val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(arr))
     val o = in.readObject()
     println(o)
   }
-  
+
   def main(args : Array[String]) {
     serializationDeserialization(List(1,2,3,4))
     serializationDeserialization(List(1,2,null,4))
   }
-  
+
 }
diff --git a/test/files/run/t5271_4.check b/test/files/run/t5271_4.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5272_1_oldpatmat.flags b/test/files/run/t5272_1_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5272_2_oldpatmat.flags b/test/files/run/t5272_2_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5273_1_oldpatmat.flags b/test/files/run/t5273_1_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5273_2a_oldpatmat.flags b/test/files/run/t5273_2a_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5273_2b_oldpatmat.flags b/test/files/run/t5273_2b_oldpatmat.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala
index a2d5465..6523296 100644
--- a/test/files/run/t5277_1.scala
+++ b/test/files/run/t5277_1.scala
@@ -1,6 +1,7 @@
 import scala.reflect.runtime.universe._
 import scala.tools.reflect.Eval
 
+import scala.language.{ implicitConversions, postfixOps }
 object Test extends App {
   reify {
     def fact(n: Int): BigInt =
@@ -12,4 +13,4 @@ object Test extends App {
 
     println("10! = " + (10!))
   }.eval
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t5284b.check b/test/files/run/t5284b.check
index 98d9bcb..71426ad 100644
--- a/test/files/run/t5284b.check
+++ b/test/files/run/t5284b.check
@@ -1 +1,4 @@
+t5284b.scala:27: warning: type S is unused or used in non-specializable positions.
+  def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
+      ^
 17
diff --git a/test/files/run/t5284b.scala b/test/files/run/t5284b.scala
index a9282a8..0da7972 100644
--- a/test/files/run/t5284b.scala
+++ b/test/files/run/t5284b.scala
@@ -23,6 +23,6 @@ object Foo {
 
 class Foo[@specialized(Int) T] {
   val id: T => T = x => x
-  
+
   def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
 }
diff --git a/test/files/run/t5284c.check b/test/files/run/t5284c.check
index 00750ed..cf578ad 100644
--- a/test/files/run/t5284c.check
+++ b/test/files/run/t5284c.check
@@ -1 +1,4 @@
+t5284c.scala:29: warning: type W is unused or used in non-specializable positions.
+  def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
+      ^
 3
diff --git a/test/files/run/t5284c.scala b/test/files/run/t5284c.scala
index 383b84c..dec743f 100644
--- a/test/files/run/t5284c.scala
+++ b/test/files/run/t5284c.scala
@@ -25,6 +25,6 @@ object Foo {
 
 class Foo[@specialized(Int) T] {
   val len: List[T] => Int = xs => xs.length
-  
+
   def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
 }
diff --git a/test/files/run/t5293-map.scala b/test/files/run/t5293-map.scala
new file mode 100644
index 0000000..ad1bbcf
--- /dev/null
+++ b/test/files/run/t5293-map.scala
@@ -0,0 +1,88 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+  def bench(label: String)(body: => Unit): Long = {
+    val start = System.nanoTime
+
+    0.until(10).foreach(_ => body)
+
+    val end = System.nanoTime
+
+    //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+    end - start
+  }
+
+  def benchJava(values: java.util.Map[Int, Int]) = {
+    bench("Java Map") {
+      val m = new java.util.HashMap[Int, Int]
+
+      m.putAll(values)
+    }
+  }
+
+  def benchScala(values: Iterable[(Int, Int)]) = {
+    bench("Scala Map") {
+      val m = new scala.collection.mutable.HashMap[Int, Int]
+
+      m ++= values
+    }
+  }
+
+  def benchScalaSorted(values: Iterable[(Int, Int)]) = {
+    bench("Scala Map sorted") {
+      val m = new scala.collection.mutable.HashMap[Int, Int]
+
+      m ++= values.toArray.sorted
+    }
+  }
+
+  def benchScalaPar(values: Iterable[(Int, Int)]) = {
+    bench("Scala ParMap") {
+      val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x }
+
+      m ++= values
+    }
+  }
+
+  val total = 50000
+  val values = (0 until total) zip (0 until total)
+  val map = scala.collection.mutable.HashMap.empty[Int, Int]
+
+  map ++= values
+
+  // warmup
+  for (x <- 0 until 5) {
+    benchJava(map.asJava)
+    benchScala(map)
+    benchScalaPar(map)
+    benchJava(map.asJava)
+    benchScala(map)
+    benchScalaPar(map)
+  }
+
+  val javamap = benchJava(map.asJava)
+  val scalamap = benchScala(map)
+  val scalaparmap = benchScalaPar(map)
+
+  // println(javamap)
+  // println(scalamap)
+  // println(scalaparmap)
+
+  assert(scalamap < (javamap * 10), "scalamap: " + scalamap + " vs. javamap: " + javamap)
+  assert(scalaparmap < (javamap * 10), "scalaparmap: " + scalaparmap + " vs. javamap: " + javamap)
+}
+
+
+
+
+
+
+
+
diff --git a/test/files/run/t5293.scala b/test/files/run/t5293.scala
new file mode 100644
index 0000000..c42c967
--- /dev/null
+++ b/test/files/run/t5293.scala
@@ -0,0 +1,83 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+  def bench(label: String)(body: => Unit): Long = {
+    val start = System.nanoTime
+
+    0.until(10).foreach(_ => body)
+
+    val end = System.nanoTime
+
+    //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+    end - start
+  }
+
+  def benchJava(values: java.util.Collection[Int]) = {
+    bench("Java Set") {
+      val set = new java.util.HashSet[Int]
+
+      set.addAll(values)
+    }
+  }
+
+  def benchScala(values: Iterable[Int]) = {
+    bench("Scala Set") {
+      val set = new scala.collection.mutable.HashSet[Int]
+
+      set ++= values
+    }
+  }
+
+  def benchScalaSorted(values: Iterable[Int]) = {
+    bench("Scala Set sorted") {
+      val set = new scala.collection.mutable.HashSet[Int]
+
+      set ++= values.toArray.sorted
+    }
+  }
+
+  def benchScalaPar(values: Iterable[Int]) = {
+    bench("Scala ParSet") {
+      val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x }
+
+      set ++= values
+    }
+  }
+
+  val values = 0 until 50000
+  val set = scala.collection.mutable.HashSet.empty[Int]
+
+  set ++= values
+
+  // warmup
+  for (x <- 0 until 5) {
+    benchJava(set.asJava)
+    benchScala(set)
+    benchScalaPar(set)
+    benchJava(set.asJava)
+    benchScala(set)
+    benchScalaPar(set)
+  }
+
+  val javaset = benchJava(set.asJava)
+  val scalaset = benchScala(set)
+  val scalaparset = benchScalaPar(set)
+
+  assert(scalaset < (javaset * 8), "scalaset: " + scalaset + " vs. javaset: " + javaset)
+  assert(scalaparset < (javaset * 8), "scalaparset: " + scalaparset + " vs. javaset: " + javaset)
+}
+
+
+
+
+
+
+
+
diff --git a/test/files/run/t5300.scala b/test/files/run/t5300.scala
index 073b296..0f1c807 100644
--- a/test/files/run/t5300.scala
+++ b/test/files/run/t5300.scala
@@ -1,6 +1,6 @@
 object Test {
   val pf: PartialFunction[Any, Unit] = { case _ => () }
-  
+
   def main(args: Array[String]): Unit = {
     pf orElse pf
   }
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
index 7da8726..0d7168f 100644
--- a/test/files/run/t5313.scala
+++ b/test/files/run/t5313.scala
@@ -1,13 +1,13 @@
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
-object Test extends IcodeTest {
+object Test extends IcodeComparison {
   override def printIcodeAfterPhase = "dce"
 
-  override def extraSettings: String = super.extraSettings + " -optimize"  
+  override def extraSettings: String = super.extraSettings + " -optimize"
 
   override def code =
     """class Foo {
-      def randomBoolean = util.Random.nextInt % 2 == 0
+      def randomBoolean = scala.util.Random.nextInt % 2 == 0
       def bar = {
         var kept1 = new Object
         val result = new java.lang.ref.WeakReference(kept1)
@@ -18,11 +18,11 @@ object Test extends IcodeTest {
         val erased3 = erased2 // and this
         var erased4 = erased2 // and this
         val erased5 = erased4 // and this
-        var kept2: Object = new Object // ultimately can't be eliminated 
+        var kept2: Object = new Object // ultimately can't be eliminated
         while(randomBoolean) {
           val kept3 = kept2
           kept2 = null // this can't, because it clobbers kept2, which is used
-          erased4 = null // safe to eliminate       
+          erased4 = null // safe to eliminate
           println(kept3)
         }
         var kept4 = new Object // have to keep, it's used
@@ -48,7 +48,7 @@ object Test extends IcodeTest {
 
   override def show() {
     val storeLocal = "STORE_LOCAL"
-    val lines1 = collectIcode("") filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
+    val lines1 = collectIcode() filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
     println(lines1 mkString "\n")
   }
 }
diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala
index ec17e03..dabb9ef 100644
--- a/test/files/run/t5356.scala
+++ b/test/files/run/t5356.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
 object Test {
   def f(x: Any { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
 
diff --git a/test/files/run/t5375.check b/test/files/run/t5375.check
index 7d3002f..b1a57ee 100644
--- a/test/files/run/t5375.check
+++ b/test/files/run/t5375.check
@@ -1 +1 @@
-Composite throwable
\ No newline at end of file
+Runtime exception
diff --git a/test/files/run/t5375.scala b/test/files/run/t5375.scala
index e4b329d..826ecd8 100644
--- a/test/files/run/t5375.scala
+++ b/test/files/run/t5375.scala
@@ -1,19 +1,8 @@
-
-
-
-import collection.parallel.CompositeThrowable
-
-
-
-object Test {
-  
-  def main(args: Array[String]) {
-    val foos = (1 to 1000) toSeq;
-    try {
-      foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
-    } catch {
-      case CompositeThrowable(thr) => println("Composite throwable")
-    }
+object Test extends App {
+  val foos = (1 to 1000).toSeq
+  try
+    foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
+  catch {
+    case ex: RuntimeException => println("Runtime exception")
   }
-  
 }
diff --git a/test/files/run/t5380.check b/test/files/run/t5380.check
new file mode 100644
index 0000000..731a798
--- /dev/null
+++ b/test/files/run/t5380.check
@@ -0,0 +1,9 @@
+t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    val f = () => return try { 1 } catch { case _: Throwable => 0 }
+                               ^
+t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    val f = () => return try { 1 } catch { case _: Throwable => 0 }
+                                                                ^
+t5380.scala:3: warning: enclosing method main has result type Unit: return value discarded
+    val f = () => return try { 1 } catch { case _: Throwable => 0 }
+                  ^
diff --git a/test/files/run/t5380.scala b/test/files/run/t5380.scala
index 6083161..66d12a0 100644
--- a/test/files/run/t5380.scala
+++ b/test/files/run/t5380.scala
@@ -1,6 +1,6 @@
 object Test {
   def main(args: Array[String]) {
-    val f = () => return try { 1 } catch { case _ => 0 }
+    val f = () => return try { 1 } catch { case _: Throwable => 0 }
     f()
   }
 }
diff --git a/test/files/run/t5415.check b/test/files/run/t5415.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5415.scala b/test/files/run/t5415.scala
index c12e209..65de862 100644
--- a/test/files/run/t5415.scala
+++ b/test/files/run/t5415.scala
@@ -8,5 +8,5 @@ object Test extends App{
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(code.tree)
+  val ttree = toolbox.typecheck(code.tree)
 }
diff --git a/test/pending/run/t5418.scala b/test/files/run/t5418.scala
similarity index 100%
rename from test/pending/run/t5418.scala
rename to test/files/run/t5418.scala
diff --git a/test/files/run/t5418b.check b/test/files/run/t5418b.check
index 48d82a2..f036a4b 100644
--- a/test/files/run/t5418b.check
+++ b/test/files/run/t5418b.check
@@ -1,2 +1,2 @@
 new Object().getClass()
-TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, newTypeName("?0"), List())))
+TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, TypeName("?0"), List())))
diff --git a/test/files/run/t5418b.scala b/test/files/run/t5418b.scala
index 08e8bb1..c617261 100644
--- a/test/files/run/t5418b.scala
+++ b/test/files/run/t5418b.scala
@@ -5,7 +5,7 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val tb = cm.mkToolBox()
   val untyped = reify(new Object().getClass).tree
-  val typed = tb.typeCheck(untyped)
+  val typed = tb.typecheck(untyped)
   println(typed)
   println(showRaw(typed.tpe))
 }
\ No newline at end of file
diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check
index 7b4b1d6..a46514a 100644
--- a/test/files/run/t5428.check
+++ b/test/files/run/t5428.check
@@ -1 +1,2 @@
-Stack(8, 7, 6, 5, 4, 3)
\ No newline at end of file
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+Stack(8, 7, 6, 5, 4, 3)
diff --git a/test/files/run/t5428.scala b/test/files/run/t5428.scala
index 106bb7f..44003e0 100644
--- a/test/files/run/t5428.scala
+++ b/test/files/run/t5428.scala
@@ -11,19 +11,19 @@ class A extends StackProxy[Int] {
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val a = new A
-    
+
     a push 3
     a push 4
     a push 5
-    
+
     a.push(6, 7, 8)
-    
+
     println(a)
-    
-    a pop
+
+    a.pop
   }
-  
+
 }
diff --git a/test/files/run/t5488-fn.scala b/test/files/run/t5488-fn.scala
index d17bcf9..e6efe17 100644
--- a/test/files/run/t5488-fn.scala
+++ b/test/files/run/t5488-fn.scala
@@ -4,7 +4,7 @@ class C[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(I
 object Test {
   def main(args:Array[String]) {
     def show(x: Any) = println(x.getClass.getName)
-    
+
     show(new B((x: Int) => 1))
     show(new B((x: Int) => "abc"))
     show(new B((x: Int) => ()))
diff --git a/test/files/run/t5488.scala b/test/files/run/t5488.scala
index 7bab0cd..f93cf29 100644
--- a/test/files/run/t5488.scala
+++ b/test/files/run/t5488.scala
@@ -5,15 +5,15 @@ class C0[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(
 object Test {
   def main(args:Array[String]) {
     def show(x: Any) = println(x.getClass.getName)
-    
+
     show(new A0[Int]())
     show(new A0[AnyRef]())
-    
+
     show(new B0[Int, Int]())
     show(new B0[Int, AnyRef]())
     show(new B0[AnyRef, Int]())
     show(new B0[AnyRef, AnyRef]())
-    
+
     show(new C0[Int, Int, Int]())
     show(new C0[Int, Int, AnyRef]())
     show(new C0[Int, AnyRef, Int]())
diff --git a/test/files/run/t5500b.scala b/test/files/run/t5500b.scala
index 32de858..9a49624 100644
--- a/test/files/run/t5500b.scala
+++ b/test/files/run/t5500b.scala
@@ -26,7 +26,7 @@ object Test {
     println(new C1A[Double, Int].getClass.getName)
     println(new C1A[Double, Double].getClass.getName)
     println(new C1A[Double, String].getClass.getName)
-    
+
     println(new C1B[String, Int]("abc").getClass.getName)
     println(new C1B[String, Double]("abc").getClass.getName)
     println(new C1B[String, String]("abc").getClass.getName)
@@ -36,7 +36,7 @@ object Test {
     println(new C1B[Double, Int](1d).getClass.getName)
     println(new C1B[Double, Double](1d).getClass.getName)
     println(new C1B[Double, String](1d).getClass.getName)
-    
+
     println(new C1C("abc", 123).getClass.getName)
     println(new C1C("abc", 123).getClass.getName)
     println(new C1C("a", 1d).getClass.getName)
diff --git a/test/files/run/t5514.check b/test/files/run/t5514.check
deleted file mode 100644
index c68f7c9..0000000
--- a/test/files/run/t5514.check
+++ /dev/null
@@ -1,19 +0,0 @@
-constructed reader: 10
-constructed reader: 9
-constructed reader: 8
-constructed reader: 7
-constructed reader: 6
-constructed reader: 5
-constructed reader: 4
-constructed reader: 3
-constructed reader: 2
-constructed reader: 1
-constructed reader: 0
-[0.0] parsed: List(s10, s9, s8, s7, s6, s5, s4, s3, s2, s1)
-constructed reader: 10
-constructed reader: 9
-constructed reader: 8
-constructed reader: 7
-constructed reader: 6
-constructed reader: 5
-[0.0] parsed: List(s10, s9, s8, s7, s6)
\ No newline at end of file
diff --git a/test/files/run/t5514.scala b/test/files/run/t5514.scala
deleted file mode 100644
index efd5ba6..0000000
--- a/test/files/run/t5514.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-import scala.io.Source
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-
-
-
-class DemoReader(n: Int) extends Reader[String] {
-  def atEnd = n == 0
-  def first = if (n >= 0) "s" + n else throw new IllegalArgumentException("No more input.")
-  def rest = new DemoReader(n - 1)
-  def pos = new Position {
-    def line = 0
-    def column = 0
-    def lineContents = first
-  }
-  println("constructed reader: " + n)
-}
-
-
-object Test extends App with Parsers {
-  type Elem = String
-  def startsWith(prefix: String) = acceptIf(_ startsWith prefix)("Error: " + _)
-  
-  val resrep = startsWith("s").*(new DemoReader(10))
-  Console println resrep
-  
-  val resrep5 = repN(5, startsWith("s"))(new DemoReader(10))
-  Console println resrep5
-}
-
-
diff --git a/test/files/run/t5527.check b/test/files/run/t5527.check
deleted file mode 100644
index 36bee9b..0000000
--- a/test/files/run/t5527.check
+++ /dev/null
@@ -1,99 +0,0 @@
-[[syntax trees at end of                    parser]] // newSource1.scala
-package <empty> {
-  object UselessComments extends scala.AnyRef {
-    def <init>() = {
-      super.<init>();
-      ()
-    };
-    var z = 0;
-    def test1 = {
-      object Maybe extends scala.AnyRef {
-        def <init>() = {
-          super.<init>();
-          ()
-        };
-        /** Some comment inside */
-        def nothing() = ()
-      };
-      ()
-    };
-    def test2 = {
-      var x = 4;
-      if (true)
-        {
-          x = 5;
-          val y = 6;
-          ()
-        }
-      else
-        ()
-    };
-    def test3 = {
-      if (true)
-        z = 3
-      else
-        ();
-      val t = 4;
-      0.to(4).foreach(((i) => println(i)))
-    };
-    val test4 = 'a' match {
-      case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
-      case _ => false
-    }
-  };
-  /** comments that we should keep */
-  object UsefulComments extends scala.AnyRef {
-    def <init>() = {
-      super.<init>();
-      ()
-    };
-    /** class A */
-    class A extends scala.AnyRef {
-      def <init>() = {
-        super.<init>();
-        ()
-      };
-      /** f */
-      def f(i: Int) = i;
-      /** v */
-      val v = 1;
-      /** u */
-      var u = 2
-    };
-    /** trait B */
-    abstract trait B extends scala.AnyRef {
-      def $init$() = {
-        ()
-      };
-      /** T */
-      type T >: _root_.scala.Nothing <: _root_.scala.Any;
-      /** f */
-      def f(i: Int): scala.Unit;
-      /** v */
-      val v = 1;
-      /** u */
-      var u = 2
-    };
-    /** object C */
-    object C extends scala.AnyRef {
-      def <init>() = {
-        super.<init>();
-        ()
-      };
-      /** f */
-      def f(i: Int) = i;
-      /** v */
-      val v = 1;
-      /** u */
-      var u = 2
-    };
-    /** class D */
-    @new deprecated("use ... instead", "2.10.0") class D extends scala.AnyRef {
-      def <init>() = {
-        super.<init>();
-        ()
-      }
-    }
-  }
-}
-
diff --git a/test/files/run/t5527.scala b/test/files/run/t5527.scala
deleted file mode 100644
index 2449ff6..0000000
--- a/test/files/run/t5527.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.doc.{Settings, DocFactory}
-import scala.tools.nsc.reporters.ConsoleReporter
-
-object Test extends DirectTest {
-
-  override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path
-
-  override def code = """
-    // SI-5527
-    object UselessComments {
-
-      var z = 0
-
-      def test1 = {
-        /** Some comment here */
-        object Maybe {
-          /** Some comment inside */
-          def nothing() = ()
-        }
-      }
-
-      def test2 = {
-        var x = 4
-        if (true) {
-          /** Testing 123 */
-          x = 5
-          val y = 6
-        }
-      }
-
-      def test3 = {
-        if (true)
-         z = 3
-
-        /** Calculate this result. */
-        val t = 4
-        for (i <- 0 to 4)
-          println(i)
-      }
-
-      val test4 = ('a') match {
-        /** Another digit is a giveaway. */
-        case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9'  =>
-          true
-        case _ =>
-          false
-      }
-    }
-
-    /** comments that we should keep */
-    object UsefulComments {
-      /** class A */
-      class A {
-        /** f */
-        def f(i: Int) = i
-        /** v */
-        val v = 1
-        /** u */
-        var u = 2
-      }     
-      /** trait B */
-      trait B {
-        /** T */
-        type T
-        /** f */
-        def f(i: Int)
-        /** v */
-        val v = 1
-        /** u */
-        var u = 2
-      }     
-      /** object C */
-      object C {
-        /** f */
-        def f(i: Int) = i
-        /** v */
-        val v = 1
-        /** u */
-        var u = 2
-      }
-      /** class D */
-      @deprecated("use ... instead", "2.10.0")
-      class D
-    }
-  """.trim
-
-  override def show(): Unit = {
-    // redirect err to out, for logging
-    val prevErr = System.err
-    System.setErr(System.out)
-    compile()
-    System.setErr(prevErr)
-  }
-
-  override def newCompiler(args: String*): Global = {
-    // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree
-    val settings = new Settings(_ => ())
-    val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
-    new DocFactory(new ConsoleReporter(settings), settings).compiler
-  }
-
-  override def isDebug = false // so we don't get the newSettings warning
-}
diff --git a/test/files/run/t5535.check b/test/files/run/t5535.check
index 8da9829..a0c87a4 100644
--- a/test/files/run/t5535.check
+++ b/test/files/run/t5535.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> def h()(i: Int) = 1 + i
 h: ()(i: Int)Int
 
@@ -16,5 +14,3 @@ scala> println(f(10))
 11
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5537.check b/test/files/run/t5537.check
index 68c3ebf..b9d521f 100644
--- a/test/files/run/t5537.check
+++ b/test/files/run/t5537.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> List[Predef.type]()
 res0: List[scala.Predef.type] = List()
 
@@ -16,5 +14,3 @@ scala> List[Set.type]()
 res3: List[Set.type] = List()
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5545.check b/test/files/run/t5545.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5565.scala b/test/files/run/t5565.scala
new file mode 100644
index 0000000..9ced87c
--- /dev/null
+++ b/test/files/run/t5565.scala
@@ -0,0 +1,12 @@
+import scala.language.reflectiveCalls
+import scala.language.implicitConversions
+
+object Test extends App {
+  implicit def doubleWithApproxEquals(d: Double) = new {
+    def ~==(v: Double, margin: Double = 0.001): Boolean =
+      math.abs(d - v) < margin
+  }
+
+  assert(math.abs(-4.0) ~== (4.0, 0.001))
+  assert(math.abs(-4.0) ~== 4.0)
+}
diff --git a/test/files/run/t5568.flags b/test/files/run/t5568.flags
new file mode 100644
index 0000000..ad51758
--- /dev/null
+++ b/test/files/run/t5568.flags
@@ -0,0 +1 @@
+-nowarn
diff --git a/test/files/run/t5577.scala b/test/files/run/t5577.scala
index b5d6d8c..650b3c2 100644
--- a/test/files/run/t5577.scala
+++ b/test/files/run/t5577.scala
@@ -6,22 +6,22 @@ import collection._
 
 
 object Test {
-  
+
   class AlarmingBuffer[T] extends mutable.ArrayBuffer[T] {
     override def sizeHint(x: Int) {
       println("Received a size hint: " + x)
       super.sizeHint(x)
     }
   }
-  
+
   def main(args: Array[String]) {
     val iteratorBuilder = (new AlarmingBuffer[Int]) mapResult {
       res => res.iterator
     }
-    
+
     iteratorBuilder.sizeHint(10)
     iteratorBuilder ++= (0 until 10)
     iteratorBuilder.result.foreach(println)
   }
-  
+
 }
diff --git a/test/files/run/t5583.check b/test/files/run/t5583.check
index 39b969f..af96405 100644
--- a/test/files/run/t5583.check
+++ b/test/files/run/t5583.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> var s = 0
 s: Int = 0
 
@@ -16,5 +14,3 @@ scala> println(s)
 165
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5590.scala b/test/files/run/t5590.scala
index 9c806e0..baf0503 100644
--- a/test/files/run/t5590.scala
+++ b/test/files/run/t5590.scala
@@ -7,25 +7,25 @@ import collection._
 
 
 object Test {
-  
+
   def check(obj: AnyRef) {
     println(obj)
-    
+
     val bos = new ByteArrayOutputStream()
     val out = new ObjectOutputStream(bos)
     out.writeObject(obj)
     val arr = bos.toByteArray()
     val in = new ObjectInputStream(new ByteArrayInputStream(arr))
     val deser = in.readObject()
-    
+
     println(deser)
   }
-  
+
   def main(args: Array[String]) {
     val lhm = mutable.LinkedHashMap("a" -> "a", "b" -> "b", "c" -> "c")
     val lhs = mutable.LinkedHashSet("a", "b", "c", "d", "e")
     check(lhm)
     check(lhs)
   }
-  
+
 }
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 3f19a0a..760a925 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -10,17 +10,17 @@
   [87:209]class C extends [94:209][151:159]Greeting {
     [119:139]val nameElse = _;
     [95:101]<paramaccessor> private[this] val i: [98:101]Int = _;
-    <119:139>def <init>([95]i: [98]Int) = <119:139>{
+    <95:139>def <init>(<95:101>i: [98]Int) = <95:139>{
       <119:139>val nameElse = <134:139>"Bob";
-      [94][94][94]super.<init>();
-      [94]()
+      [NoPosition][NoPosition][NoPosition]super.<init>();
+      <95:139>()
     };
     [168:184]val name = [179:184]"avc";
     [191:203][191:198]println([199:202]msg)
   };
   [215:241]object Test extends [227:241][235:238]App {
     [227]def <init>() = [227]{
-      [227][227][227]super.<init>();
+      [NoPosition][NoPosition][NoPosition]super.<init>();
       [227]()
     };
     [NoPosition]<empty>
diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala
index 60dfd01..77c2775 100644
--- a/test/files/run/t5603.scala
+++ b/test/files/run/t5603.scala
@@ -1,7 +1,7 @@
 import scala.tools.partest._
 import java.io._
 import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
+import scala.tools.cmd.CommandLineParser
 import scala.tools.nsc.{Global, Settings, CompilerCommand}
 import scala.tools.nsc.reporters.ConsoleReporter
 
@@ -36,7 +36,8 @@ object Test extends DirectTest {
 
     val settings = new Settings()
     settings.Xprintpos.value = true
+    settings.Yrangepos.value = true
     val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
-    new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+    Global(command.settings, new ConsoleReporter(settings))
   }
 }
diff --git a/test/files/run/t5610.scala b/test/files/run/t5610.scala
index f62b2df..82dabcc 100644
--- a/test/files/run/t5610.scala
+++ b/test/files/run/t5610.scala
@@ -20,7 +20,7 @@ object Test {
     val f: (String, Int*) => Unit = m(2, 3)
     f("", 5, 6)
   }
-  
+
   def foo(s: => String)(dummy: Int) = () => println(s)
 
   def m(a: Int*)(z: String, b: Int*) {
diff --git a/test/pending/run/t5610a.check b/test/files/run/t5610a.check
similarity index 100%
rename from test/pending/run/t5610a.check
rename to test/files/run/t5610a.check
diff --git a/test/pending/run/t5610a.scala b/test/files/run/t5610a.scala
similarity index 100%
rename from test/pending/run/t5610a.scala
rename to test/files/run/t5610a.scala
diff --git a/test/files/run/t5629.scala b/test/files/run/t5629.scala
index 69feddd..5b91007 100644
--- a/test/files/run/t5629.scala
+++ b/test/files/run/t5629.scala
@@ -27,10 +27,10 @@ class AnyChild extends Parent[Any] {
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     new IntChild().foo(33)
     new AnyChild().foo(33)
   }
-  
+
 }
diff --git a/test/files/run/t5629b.check b/test/files/run/t5629b.check
index 1bc0248..e0f25f0 100644
--- a/test/files/run/t5629b.check
+++ b/test/files/run/t5629b.check
@@ -2,7 +2,7 @@
 MySmartPF.apply entered...
 newPF.applyOrElse entered...
 default
-scala.MatchError: () (of class scala.runtime.BoxedUnit)
+scala.MatchError: 1 (of class java.lang.Integer)
 === pf(42):
 MySmartPF.apply entered...
 newPF.applyOrElse entered...
diff --git a/test/files/run/t5629b.scala b/test/files/run/t5629b.scala
index 6c90808..5d40220 100644
--- a/test/files/run/t5629b.scala
+++ b/test/files/run/t5629b.scala
@@ -1,10 +1,5 @@
-
-
-
-
-
 object Test extends App {
-  
+
   trait MyPF[@specialized(Int) -A] extends (A => Unit) {
     def isDefinedAt(x: A): Boolean
     def applyOrElse[A1 <: A](x: A1, default: A1 => Unit): Unit = {
@@ -12,30 +7,30 @@ object Test extends App {
       if (isDefinedAt(x)) apply(x) else default(x)
     }
   }
-  
+
   trait MySmartPF[@specialized(Int) -A] extends MyPF[A] {
     def apply(x: A): Unit = {
       println("MySmartPF.apply entered...")
-      applyOrElse(x, { _: Any => throw new MatchError })
+      applyOrElse(x, { default: Any => throw new MatchError(default) })
     }
   }
-  
+
   type T = Int
   //type T = Any
-  
+
   def newPF(test: T): MyPF[T] = new MySmartPF[T] {
     def isDefinedAt(x: T): Boolean = x != test
     override def applyOrElse[A1 <: T](x: A1, default: A1 => Unit): Unit = {
       println("newPF.applyOrElse entered...")
-      if (x != test) { println("ok"); () } else { println("default"); default(x) } 
+      if (x != test) { println("ok"); () } else { println("default"); default(x) }
     }
   }
-  
+
   val pf = newPF(1)
   println("=== pf(1):")
-  try { pf(1) } catch { case x => println(x) }
+  try { pf(1) } catch { case x: Throwable => println(x) }
   println("=== pf(42):")
   pf(42)
   println("=== done")
-  
+
 }
diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check
index 43ebd50..06c6b32 100644
--- a/test/files/run/t5655.check
+++ b/test/files/run/t5655.check
@@ -1,10 +1,8 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> object x { def x={} }
-defined module x
+defined object x
 
 scala> import x._
 import x._
@@ -26,5 +24,3 @@ and import x
               ^
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5656.scala b/test/files/run/t5656.scala
index f5ea147..5eb1df5 100644
--- a/test/files/run/t5656.scala
+++ b/test/files/run/t5656.scala
@@ -3,9 +3,9 @@
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     println(Seq(List('1', '2', '3'), List('a', 'b', 'c')).view.addString(new StringBuilder, "_"))
   }
-  
+
 }
diff --git a/test/files/run/t5676.scala b/test/files/run/t5676.scala
index b643c30..0c920e4 100644
--- a/test/files/run/t5676.scala
+++ b/test/files/run/t5676.scala
@@ -13,12 +13,12 @@ class Baz[S] extends Foo[S] {
 }
 
 object Test {
-  
+
   def main(a: Array[String]) {
     val b = new Baz[Any]
     println(b.A.foo())
     println(Modifier.isFinal(classOf[Baz[Any]].getModifiers()))
     println(Modifier.isFinal(Test.getClass.getModifiers()))
   }
-  
+
 }
diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala
index 5cef67e..ec3b1d2 100755
--- a/test/files/run/t5699.scala
+++ b/test/files/run/t5699.scala
@@ -1,5 +1,5 @@
 import scala.tools.partest.DirectTest
-import scala.tools.nsc.util.BatchSourceFile
+import scala.reflect.internal.util.BatchSourceFile
 
 object Test extends DirectTest {
   // Java code
diff --git a/test/files/run/t5704.scala b/test/files/run/t5704.scala
index ddcbcc2..495a82e 100644
--- a/test/files/run/t5704.scala
+++ b/test/files/run/t5704.scala
@@ -8,7 +8,7 @@ object Test extends App {
     def findUserByName( name:String ) = {
       val tree = reify{ "test" == name }.tree
       val toolbox = cm.mkToolBox()
-      toolbox.typeCheck(tree) match{
+      toolbox.typecheck(tree) match{
         case Apply(Select(lhs,op),rhs::Nil) =>
           println(rhs.tpe)
       }
diff --git a/test/files/run/t5713/Impls_Macros_1.scala b/test/files/run/t5713/Impls_Macros_1.scala
index 12c3da2..7b04197 100644
--- a/test/files/run/t5713/Impls_Macros_1.scala
+++ b/test/files/run/t5713/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
 package m
 
 import language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Level extends Enumeration {
   val Error = Value(5)
diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala
new file mode 100755
index 0000000..a0997f5
--- /dev/null
+++ b/test/files/run/t5717.scala
@@ -0,0 +1,21 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+  def code = ???
+
+  def compileCode(code: String) = {
+    val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+    compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+  }
+  // TODO
+  // Don't assume output is on physical disk
+  // Let the compiler tell us output dir
+  // val sc = newCompiler("-cp", classpath, "-d", testOutput.path)
+  // val out = sc.settings.outputDirs.getSingleOutput.get
+  def show(): Unit = {
+    // Don't crash when we find a file 'a' where package 'a' should go.
+    scala.reflect.io.File(testOutput.path + "/a").writeAll("a")
+    compileCode("package a { class B }")
+  }
+}
diff --git a/test/files/run/t5753_1/Impls_Macros_1.scala b/test/files/run/t5753_1/Impls_Macros_1.scala
index 1664301..ce07138 100644
--- a/test/files/run/t5753_1/Impls_Macros_1.scala
+++ b/test/files/run/t5753_1/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 trait Impls {
diff --git a/test/files/run/t5753_1/Test_2.scala b/test/files/run/t5753_1/Test_2.scala
index a277763..864d356 100644
--- a/test/files/run/t5753_1/Test_2.scala
+++ b/test/files/run/t5753_1/Test_2.scala
@@ -1,4 +1,4 @@
-object Test extends App {  
+object Test extends App {
     import Macros._
     println(foo(42))
 }
\ No newline at end of file
diff --git a/test/files/run/t5753_2/Impls_Macros_1.scala b/test/files/run/t5753_2/Impls_Macros_1.scala
index e23c0b9..d446d37 100644
--- a/test/files/run/t5753_2/Impls_Macros_1.scala
+++ b/test/files/run/t5753_2/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 trait Macro_T {
- def foo[T](c: Ctx)(s: c.Expr[T]) = s 
+ def foo[T](c: Context)(s: c.Expr[T]) = s
 }
 
 object Macros {
diff --git a/test/files/run/t5753_2/Test_2.scala b/test/files/run/t5753_2/Test_2.scala
index a277763..864d356 100644
--- a/test/files/run/t5753_2/Test_2.scala
+++ b/test/files/run/t5753_2/Test_2.scala
@@ -1,4 +1,4 @@
-object Test extends App {  
+object Test extends App {
     import Macros._
     println(foo(42))
 }
\ No newline at end of file
diff --git a/test/files/run/t576.check b/test/files/run/t576.check
index 8a1218a..6458d5d 100644
--- a/test/files/run/t576.check
+++ b/test/files/run/t576.check
@@ -1,3 +1,4 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 1
 2
 3
diff --git a/test/files/run/t576.scala b/test/files/run/t576.scala
index 756a241..5c8c9a9 100644
--- a/test/files/run/t576.scala
+++ b/test/files/run/t576.scala
@@ -1,3 +1,5 @@
+import scala.language.reflectiveCalls
+
 class A {
   override def equals(other: Any) = other match {
     case _: this.type => true
diff --git a/test/files/run/t5789.check b/test/files/run/t5789.check
index ea8d496..bcb2382 100644
--- a/test/files/run/t5789.check
+++ b/test/files/run/t5789.check
@@ -1,14 +1,10 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
-scala>     val n = 2
+scala> val n = 2
 n: Int = 2
 
 scala>     () => n
 res0: () => Int = <function0>
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t5804.scala b/test/files/run/t5804.scala
index b96a736..b6a8940 100644
--- a/test/files/run/t5804.scala
+++ b/test/files/run/t5804.scala
@@ -4,29 +4,29 @@ import collection.mutable._
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     class CustomHashMap extends HashMap[Int, Int] {
       override def initialSize = 65
-      
+
       println(table.length)
     }
-    
+
     new CustomHashMap
     new HashMap {
       println(table.length)
     }
-    
+
     class CustomHashSet extends HashSet[Int] {
       override def initialSize = 96
-      
+
       println(table.length)
     }
-    
+
     new CustomHashSet
     new HashSet {
       println(table.length)
     }
   }
-  
+
 }
diff --git a/test/files/run/t5816.scala b/test/files/run/t5816.scala
index f0279e5..e8367ea 100644
--- a/test/files/run/t5816.scala
+++ b/test/files/run/t5816.scala
@@ -6,7 +6,7 @@ object Test extends App {
   val toolbox = cm.mkToolBox()
 
   def printSource[T](expr: Expr[T]) {
-    val ttree = toolbox typeCheck expr.tree
+    val ttree = toolbox typecheck expr.tree
     println(ttree.toString)
   }
 
diff --git a/test/files/run/t5843.check b/test/files/run/t5843.check
deleted file mode 100644
index 2bf97f4..0000000
--- a/test/files/run/t5843.check
+++ /dev/null
@@ -1,9 +0,0 @@
- foo="1"
- bar="2" foo="1"
-null
- bar="2"
- foo="1"
- bar="2"
- foo="1"
- bar="2" foo="1"
- bar="2" foo="1"
diff --git a/test/files/run/t5843.scala b/test/files/run/t5843.scala
deleted file mode 100644
index 43d588c..0000000
--- a/test/files/run/t5843.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends App {
-  val foo = scala.xml.Attribute(null, "foo", "1", scala.xml.Null)
-  val bar = scala.xml.Attribute(null, "bar", "2", foo)
-  println(foo)
-  println(bar)
-  println(scala.xml.TopScope.getURI(foo.pre))
-  println(bar remove "foo")
-  println(bar remove "bar")
-  println(bar remove (null, scala.xml.TopScope, "foo"))
-  println(bar remove (null, scala.xml.TopScope, "bar"))
-
-  val ns = scala.xml.NamespaceBinding(null, "uri", scala.xml.TopScope)
-  println(bar remove (null, ns, "foo"))
-  println(bar remove (null, ns, "bar"))
-}
diff --git a/test/files/run/t5857.scala b/test/files/run/t5857.scala
index bf67bed..c82fd88 100644
--- a/test/files/run/t5857.scala
+++ b/test/files/run/t5857.scala
@@ -2,44 +2,44 @@
 
 
 object Test {
-  
+
   def time[U](b: =>U): Long = {
     val start = System.currentTimeMillis
     b
     val end = System.currentTimeMillis
-    
+
     end - start
   }
-  
+
   def main(args: Array[String]) {
     val sz = 1000000000
-    
+
     val range = 1 to sz
     check { assert(range.min == 1, range.min) }
     check { assert(range.max == sz, range.max) }
-    
+
     val descending = sz to 1 by -1
     check { assert(descending.min == 1) }
     check { assert(descending.max == sz) }
-    
+
     val numeric = 1.0 to sz.toDouble by 1
     check { assert(numeric.min == 1.0) }
     check { assert(numeric.max == sz.toDouble) }
-    
+
     val numdesc = sz.toDouble to 1.0 by -1
     check { assert(numdesc.min == 1.0) }
     check { assert(numdesc.max == sz.toDouble) }
   }
-  
+
   def check[U](b: =>U) {
     val exectime = time {
       b
     }
-    
+
     // whatever it is, it should be less than, say, 250ms
     // if `max` involves traversal, it takes over 5 seconds on a 3.2GHz i7 CPU
     //println(exectime)
     assert(exectime < 250, exectime)
   }
-  
+
 }
diff --git a/test/files/run/t5867.scala b/test/files/run/t5867.scala
index 6a86ac3..f7e9393 100644
--- a/test/files/run/t5867.scala
+++ b/test/files/run/t5867.scala
@@ -3,12 +3,12 @@ import collection.mutable.UnrolledBuffer
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val buf = UnrolledBuffer(1 to 50: _*)
     val dub = buf ++ buf
-    
+
     println(dub)
   }
-  
+
 }
diff --git a/test/files/run/t5879.check b/test/files/run/t5879.check
index b6cbda3..4bdf3f5 100644
--- a/test/files/run/t5879.check
+++ b/test/files/run/t5879.check
@@ -1,16 +1,8 @@
 Map(1 -> 1)
 1
-Map(1 -> 1)
-1
-(1,1)
-Map(1 -> 1)
-1
 (1,1)
 Map(1 -> 1)
 1
 (1,2)
 Map(1 -> 2)
 2
-(1,2)
-Map(1 -> 2)
-2
\ No newline at end of file
diff --git a/test/files/run/t5879.scala b/test/files/run/t5879.scala
index e1c07fc..83a583d 100644
--- a/test/files/run/t5879.scala
+++ b/test/files/run/t5879.scala
@@ -2,27 +2,23 @@ import collection.immutable.HashMap
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     resolveDefault()
     resolveFirst()
     resolveSecond()
     resolveMany()
   }
-  
+
   def resolveDefault() {
     val a = HashMap(1 -> "1")
     val b = HashMap(1 -> "2")
-    
+
     val r = a.merged(b)(null)
     println(r)
     println(r(1))
-    
-    val rold = a.merge(b)
-    println(rold)
-    println(rold(1))
   }
-  
+
   def resolveFirst() {
     val a = HashMap(1 -> "1")
     val b = HashMap(1 -> "2")
@@ -30,16 +26,12 @@ object Test {
       println(a)
       a
     }
-    
+
     val r = a.merged(b) { collision }
     println(r)
     println(r(1))
-    
-    val rold = a.merge(b, collision)
-    println(rold)
-    println(rold(1))
   }
-  
+
   def resolveSecond() {
     val a = HashMap(1 -> "1")
     val b = HashMap(1 -> "2")
@@ -47,28 +39,21 @@ object Test {
       println(b)
       b
     }
-    
+
     val r = a.merged(b) { collision }
     println(r)
     println(r(1))
-    
-    val rold = a.merge(b, collision)
-    println(rold)
-    println(rold(1))
   }
-  
+
   def resolveMany() {
     val a = HashMap((0 until 100) zip (0 until 100): _*)
     val b = HashMap((0 until 100) zip (100 until 200): _*)
     def collision(a: (Int, Int), b: (Int, Int)) = {
       (a._1, a._2 + b._2)
     }
-    
+
     val r = a.merged(b) { collision }
     for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
-    
-    val rold = a.merge(b, collision)
-    for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
   }
-  
+
 }
diff --git a/test/files/run/t5880.scala b/test/files/run/t5880.scala
index 4cda599..f88df90 100644
--- a/test/files/run/t5880.scala
+++ b/test/files/run/t5880.scala
@@ -5,13 +5,13 @@ import scala.collection.JavaConversions._
 
 
 object Test {
-  
+
   def main(args:Array[String]) = {
     val tests = 5000
     val jm: java.util.Map[Int, Int] = scala.collection.mutable.Map((0 until tests) zip (0 until tests).reverse: _*)
     val es = jm.entrySet()
     val it = es.iterator
-    
+
     // chi square test
     val groups = 10
     val hits = new Array[Int](groups)
@@ -28,7 +28,7 @@ object Test {
       val diffs = for (i <- 0 until groups) yield (hits(i) - expected) * (hits(i) - expected)
       diffs.sum.toDouble / expected
     }
-    
+
     while (it.hasNext) {
       val x = it.next()
       hit(x.##)
@@ -37,5 +37,5 @@ object Test {
     // println(ChiSquare)
     assert(ChiSquare < 4.0, ChiSquare + " -> " + hits.mkString(", "))
   }
-  
+
 }
diff --git a/test/files/run/t5881.scala b/test/files/run/t5881.scala
index 01bee29..04b24b7 100644
--- a/test/files/run/t5881.scala
+++ b/test/files/run/t5881.scala
@@ -1,6 +1,7 @@
+import scala.language.existentials
 import scala.reflect.ClassTag
 
 object Test extends App {
   println(implicitly[ClassTag[List[T forSome {type T <: List[T]}]]])
   println(implicitly[ClassTag[List[Any]]])
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t5894.scala b/test/files/run/t5894.scala
index abeec32..6e118fd 100644
--- a/test/files/run/t5894.scala
+++ b/test/files/run/t5894.scala
@@ -1,10 +1,11 @@
-import language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
 
 class Test
 
 object Test {
-  def foo = macro fooImpl
-  def fooImpl(c: reflect.macros.Context) = c.literalUnit
+  def foo: Unit = macro fooImpl
+  def fooImpl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
 
   def main(args: Array[String]) {
     try {
diff --git a/test/files/run/t5903a.check b/test/files/run/t5903a.check
new file mode 100644
index 0000000..ce6efd8
--- /dev/null
+++ b/test/files/run/t5903a.check
@@ -0,0 +1 @@
+(SomeTree,SomeTree)
diff --git a/test/files/run/t5903a.flags b/test/files/run/t5903a.flags
new file mode 100644
index 0000000..02ecab4
--- /dev/null
+++ b/test/files/run/t5903a.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls
\ No newline at end of file
diff --git a/test/files/run/t5903a/Macros_1.scala b/test/files/run/t5903a/Macros_1.scala
new file mode 100644
index 0000000..5d084ce
--- /dev/null
+++ b/test/files/run/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+  implicit class QuasiquoteInterpolation(c: StringContext) {
+    object nq {
+      def unapply(t: Tree): Any = macro QuasiquoteMacros.unapplyImpl
+    }
+  }
+}
+
+object QuasiquoteMacros {
+  def unapplyImpl(c: Context)(t: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def isEmpty = false
+        def get = this
+        def _1 = SomeTree
+        def _2 = SomeTree
+        def unapply(t: Tree) = this
+      }.unapply($t)
+    """
+  }
+}
diff --git a/test/files/run/t5903a/Test_2.scala b/test/files/run/t5903a/Test_2.scala
new file mode 100644
index 0000000..3a0b68b
--- /dev/null
+++ b/test/files/run/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import NewQuasiquotes._
+  SomeTree match {
+    case nq"$x + $y" => println((x, y))
+  }
+}
diff --git a/test/files/run/t5903b.check b/test/files/run/t5903b.check
new file mode 100644
index 0000000..75891bc
--- /dev/null
+++ b/test/files/run/t5903b.check
@@ -0,0 +1 @@
+oops
diff --git a/test/files/run/t5903b.flags b/test/files/run/t5903b.flags
new file mode 100644
index 0000000..02ecab4
--- /dev/null
+++ b/test/files/run/t5903b.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls
\ No newline at end of file
diff --git a/test/files/run/t5903b/Macros_1.scala b/test/files/run/t5903b/Macros_1.scala
new file mode 100644
index 0000000..29a05f7
--- /dev/null
+++ b/test/files/run/t5903b/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply[T](x: T): Any = macro Macros.unapplyImpl[T]
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def isEmpty = false
+        def get = this
+        def _1 = 2
+        def unapply(x: Int) = this
+        override def toString = "oops"
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/t5903b/Test_2.scala b/test/files/run/t5903b/Test_2.scala
new file mode 100644
index 0000000..0f6f80d
--- /dev/null
+++ b/test/files/run/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  2 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/run/t6956.check b/test/files/run/t5903c.check
similarity index 100%
rename from test/files/run/t6956.check
rename to test/files/run/t5903c.check
diff --git a/test/files/run/t5903c.flags b/test/files/run/t5903c.flags
new file mode 100644
index 0000000..02ecab4
--- /dev/null
+++ b/test/files/run/t5903c.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls
\ No newline at end of file
diff --git a/test/files/run/t5903c/Macros_1.scala b/test/files/run/t5903c/Macros_1.scala
new file mode 100644
index 0000000..34fe1d8
--- /dev/null
+++ b/test/files/run/t5903c/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply[T](x: T): Any = macro Macros.unapplyImpl[T]
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def isEmpty = false
+        def get = 2
+        def unapply(x: Int) = this
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/t5903c/Test_2.scala b/test/files/run/t5903c/Test_2.scala
new file mode 100644
index 0000000..0f6f80d
--- /dev/null
+++ b/test/files/run/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  2 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/run/t5903d.check b/test/files/run/t5903d.check
new file mode 100644
index 0000000..d81cc07
--- /dev/null
+++ b/test/files/run/t5903d.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/t5903d.flags b/test/files/run/t5903d.flags
new file mode 100644
index 0000000..02ecab4
--- /dev/null
+++ b/test/files/run/t5903d.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls
\ No newline at end of file
diff --git a/test/files/run/t5903d/Macros_1.scala b/test/files/run/t5903d/Macros_1.scala
new file mode 100644
index 0000000..f1f8dc1
--- /dev/null
+++ b/test/files/run/t5903d/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Interpolation {
+  implicit class TestInterpolation(c: StringContext) {
+    object t {
+      def unapply(x: Int): Any = macro Macros.unapplyImpl
+    }
+  }
+}
+
+object Macros {
+  def unapplyImpl(c: Context)(x: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        class Match(x: Int) {
+          def isEmpty = false
+          def get = x
+        }
+        def unapply(x: Int) = new Match(x)
+      }.unapply($x)
+    """
+  }
+}
diff --git a/test/files/run/t5903d/Test_2.scala b/test/files/run/t5903d/Test_2.scala
new file mode 100644
index 0000000..95c717a
--- /dev/null
+++ b/test/files/run/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import Interpolation._
+  42 match {
+    case t"$x" => println(x)
+  }
+}
diff --git a/test/files/run/t5912.scala b/test/files/run/t5912.scala
index 7710d04..fc879a0 100644
--- a/test/files/run/t5912.scala
+++ b/test/files/run/t5912.scala
@@ -1,6 +1,7 @@
+import scala.language.existentials
 object Test extends App{
   import scala.reflect.runtime.{currentMirror=>cm}
   import scala.tools.reflect._
   import scala.reflect.runtime.universe._
-  val tree = cm.mkToolBox().typeCheck( Literal(Constant("test")) )
-}
\ No newline at end of file
+  val tree = cm.mkToolBox().typecheck( Literal(Constant("test")) )
+}
diff --git a/test/files/run/t5923a/Macros_1.scala b/test/files/run/t5923a/Macros_1.scala
index 6d21362..9050fd4 100644
--- a/test/files/run/t5923a/Macros_1.scala
+++ b/test/files/run/t5923a/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
 import language.experimental.macros
 
 case class C[T](t: String)
@@ -7,8 +7,47 @@ object C {
 }
 
 object Macros {
-  def impl[T: c.WeakTypeTag](c: Context) = {
+  def impl[T](c: Context)(ttag: c.WeakTypeTag[T]) = {
     import c.universe._
-    reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+    import internal._
+    val ttag0 = ttag;
+    {
+      // When we're expanding implicitly[C[Nothing]], the type inferencer will see
+      // that foo[T] returns C[T] and that we request an implicit of type C[Nothing].
+      //
+      // Then the type inferencer will try to match C[T] against C[Nothing] and infer everything it can infer
+      // from that match, but not more (e.g. if we were returning Iso[T, U] and the type we were looking at was Iso[Foo, L],
+      // we wouldn't want U to be auto-inferred to Nothing, as it usually happens with normal methods,
+      // but would rather want it to remain unknown, so that our macro could take a stab at inferring it:
+      // see the comments in this commit for more information).
+      //
+      // Equipped with common sense, in our case of C[T] and C[Nothing] we would expect T to be inferred as Nothing, and then we
+      // would expect T in the corresponding macro invocation to be Nothing. Unfortunately it is not that simple.
+      //
+      // Internally the type inferencer uses Nothing as a dummy value, which stands for "don't know how to
+      // infer this type parameter". In the Iso example, matching Iso[T, U] against Iso[Foo, L] would result in
+      // T being inferred as Foo and U being inferred as Nothing (!!). Then the type inferencer will think:
+      // "Aha! U ended up being Nothing. This means that I failed to infer it,
+      // therefore the result of my work is: T -> Foo, U -> still unknown".
+      //
+      // That's all very good and works very well until Nothing is a genuine result of type inference,
+      // as in our original example of inferring T in C[T] from C[Nothing]. In that case, the inferencer becomes confused
+      // and here in the macro implementation we get weakTypeOf[T] equal to some dummy type carrying a type parameter
+      // instead of Nothing.
+      //
+      // This eccentric behavior of the type inferencer is a long-standing problem in scalac,
+      // so the best one can do for now until it's fixed is to work around, manually converting
+      // suspicious T's into Nothings. Of course, this means that we would have to approximate,
+      // because there's no way to know whether having T here stands for a failed attempt to infer Nothing
+      // or for a failed attempt to infer anything, but at least we're in full control of making the best
+      // of this sad situation.
+      implicit def ttag: WeakTypeTag[T] = {
+        val tpe = ttag0.tpe
+        val sym = tpe.typeSymbol.asType
+        if (sym.isParameter && !isSkolem(sym)) TypeTag.Nothing.asInstanceOf[TypeTag[T]]
+        else ttag0
+      }
+      reify(C[T](c.Expr[String](Literal(Constant(weakTypeOf[T].toString))).splice))
+    }
   }
 }
\ No newline at end of file
diff --git a/test/files/run/t5923c.scala b/test/files/run/t5923c.scala
new file mode 100644
index 0000000..956b256
--- /dev/null
+++ b/test/files/run/t5923c.scala
@@ -0,0 +1,4 @@
+// see neg/macro-blackbox-fundep-materialization and run/macro-whitebox-fundep-materialization
+object Test extends App {
+  // do nothing
+}
\ No newline at end of file
diff --git a/test/files/run/t5923d/Macros_1.scala b/test/files/run/t5923d/Macros_1.scala
new file mode 100644
index 0000000..1400674
--- /dev/null
+++ b/test/files/run/t5923d/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+trait MappedRow
+trait RowMapper[T <: MappedRow]
+object RowMapper {
+  implicit def mapper[T <: MappedRow]: RowMapper[T] = macro impl[T]
+  def impl[T <: MappedRow : c.WeakTypeTag](c: Context) = c.universe.reify(new RowMapper[T]{})
+}
\ No newline at end of file
diff --git a/test/files/run/t5923d/Test_2.scala b/test/files/run/t5923d/Test_2.scala
new file mode 100644
index 0000000..6be1022
--- /dev/null
+++ b/test/files/run/t5923d/Test_2.scala
@@ -0,0 +1,7 @@
+class RowA extends MappedRow
+class RowB extends MappedRow
+
+object Test extends App {
+  implicitly[RowMapper[RowA]]
+  implicitly[RowMapper[RowB]]
+}
\ No newline at end of file
diff --git a/test/files/run/t5937.scala b/test/files/run/t5937.scala
index e5bf661..9ec4ff1 100644
--- a/test/files/run/t5937.scala
+++ b/test/files/run/t5937.scala
@@ -6,7 +6,7 @@ import collection._
 
 
 object Test extends App {
-  
+
   val list: List[Int] = (immutable.Vector(1, 2, 3) :+ 4)(breakOut)
-  
+
 }
diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala
index 147ff38..7b025b4 100644
--- a/test/files/run/t5940.scala
+++ b/test/files/run/t5940.scala
@@ -4,16 +4,16 @@ object Test extends DirectTest {
   def code = ???
 
   def macros_1 = """
-    import scala.reflect.macros.Context
+    import scala.reflect.macros.blackbox.Context
 
     object Impls {
-      def impl(c: Context) = c.literalUnit
+      def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
     }
 
     object Macros {
       //import Impls._
-      def impl(c: Context) = c.literalUnit
-      def foo = macro impl
+      def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
+      def foo: Unit = macro impl
     }
   """
   def compileMacros() = {
diff --git a/test/files/run/t5942.check b/test/files/run/t5942.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t5942.scala b/test/files/run/t5942.scala
index 44a8be9..c90d29e 100644
--- a/test/files/run/t5942.scala
+++ b/test/files/run/t5942.scala
@@ -5,6 +5,6 @@ import scala.tools.reflect._
 object Test extends App {
   val tb = cm.mkToolBox()
   tb.parse("def x = {}")
-  try { tb.parse("def x = {") } catch { case _ => }
+  try { tb.parse("def x = {") } catch { case _: Throwable => }
   tb.parse("def x = {}")
 }
diff --git a/test/files/run/t5943a1.scala b/test/files/run/t5943a1.scala
index 00f4afa..6bb828d 100644
--- a/test/files/run/t5943a1.scala
+++ b/test/files/run/t5943a1.scala
@@ -5,5 +5,5 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val tb = cm.mkToolBox()
   val expr = tb.parse("1 to 3 map (_+1)")
-  println(tb.typeCheck(expr))
+  println(tb.typecheck(expr))
 }
\ No newline at end of file
diff --git a/test/files/run/t5971.scala b/test/files/run/t5971.scala
index dbd9bee..bc24255 100644
--- a/test/files/run/t5971.scala
+++ b/test/files/run/t5971.scala
@@ -12,12 +12,12 @@
  *  Mind blowing, I know.
  */
 object Test {
-  
+
   def main(args: Array[String]) {
     println("bar".view.reverse.filter(_ > 'a').mkString(","))
     println("bar".view.reverse.take(1).mkString(","))
     println("bar".view.reverse.dropWhile(_ > 'a').mkString(","))
     println("bar".view.reverse.takeWhile(_ => true).mkString(","))
   }
-  
+
 }
diff --git a/test/files/run/t5986.scala b/test/files/run/t5986.scala
index 8cf7086..b9c21a7 100644
--- a/test/files/run/t5986.scala
+++ b/test/files/run/t5986.scala
@@ -9,22 +9,22 @@ import scala.collection._
  *  and the element already exists in the set.
  */
 object Test {
-  
+
   class Foo(val name: String, val n: Int) {
     override def equals(obj: Any): Boolean = obj match { case other: Foo => name == other.name; case _ => false }
     override def hashCode = name.##
     override def toString = "Foo(" + name + ", " + n + ")"
   }
-  
+
   implicit val ordering: Ordering[Foo] = Ordering.fromLessThan[Foo] { (a, b) => a.name.compareTo(b.name) < 0 }
-  
+
   def check[S <: Set[Foo]](set: S) {
     def output(s: Set[Foo]) = println(s.toList.sorted.mkString(","))
     output(set + new Foo("bar", 2))
     output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4)))
     output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4)))
   }
-  
+
   def main(args: Array[String]) {
     check(Set(new Foo("bar", 1)))
     check(immutable.Set(new Foo("bar", 1)))
@@ -32,5 +32,5 @@ object Test {
     check(immutable.SortedSet(new Foo("bar", 1)))
     check(mutable.SortedSet(new Foo("bar", 1)))
   }
-  
+
 }
diff --git a/test/files/run/t6011c.check b/test/files/run/t6011c.check
new file mode 100644
index 0000000..088e6fd
--- /dev/null
+++ b/test/files/run/t6011c.check
@@ -0,0 +1,3 @@
+t6011c.scala:11: warning: unreachable code
+    case 1 => 3 // crash
+              ^
diff --git a/test/files/run/t6023.scala b/test/files/run/t6023.scala
index 07af368..2753b93 100644
--- a/test/files/run/t6023.scala
+++ b/test/files/run/t6023.scala
@@ -9,7 +9,7 @@ object Test extends App {
 
   // test 2: import and typecheck
   val toolbox = cm.mkToolBox()
-  val ttree = toolbox.typeCheck(tree)
+  val ttree = toolbox.typecheck(tree)
   println(ttree.toString)
 
   // test 3: import and compile
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 613d25b..a6c4db8 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -1,7 +1,7 @@
 [[syntax trees at end of                lambdalift]] // newSource1.scala
 package <empty> {
   class T extends Object {
-    <paramaccessor> val T$$classParam: Int = _;
+    <paramaccessor> val classParam: Int = _;
     def <init>(classParam: Int): T = {
       T.super.<init>();
       ()
@@ -11,34 +11,34 @@ package <empty> {
     def foo(methodParam: Int): Function0 = {
       val methodLocal: Int = 0;
       {
-        (new anonymous class $anonfun$foo$1(T.this, methodParam, methodLocal): Function0)
+        (new <$anon: Function0>(T.this, methodParam, methodLocal): Function0)
       }
     };
     def bar(barParam: Int): Object = {
-      @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(null);
+      @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
       T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
     };
     def tryy(tryyParam: Int): Function0 = {
-      var tryyLocal: runtime.IntRef = new runtime.IntRef(0);
+      var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0);
       {
-        (new anonymous class $anonfun$tryy$1(T.this, tryyParam, tryyLocal): Function0)
+        (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0)
       }
     };
-    @SerialVersionUID(0) final <synthetic> class $anonfun$foo$1 extends runtime.AbstractFunction0$mcI$sp with Serializable {
-      def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): anonymous class $anonfun$foo$1 = {
+    @SerialVersionUID(0) final <synthetic> class $anonfun$foo$1 extends scala.runtime.AbstractFunction0$mcI$sp with Serializable {
+      def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): <$anon: Function0> = {
         $anonfun$foo$1.super.<init>();
         ()
       };
       final def apply(): Int = $anonfun$foo$1.this.apply$mcI$sp();
-      <specialized> def apply$mcI$sp(): Int = $anonfun$foo$1.this.$outer.T$$classParam.+($anonfun$foo$1.this.$outer.field()).+($anonfun$foo$1.this.methodParam$1).+($anonfun$foo$1.this.methodLocal$1);
-      <synthetic> <paramaccessor> private[this] val $outer: T = _;
-      <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer;
-      final <bridge> def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply());
+      <specialized> def apply$mcI$sp(): Int = $anonfun$foo$1.this.$outer.classParam.+($anonfun$foo$1.this.$outer.field()).+($anonfun$foo$1.this.methodParam$1).+($anonfun$foo$1.this.methodLocal$1);
+      <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+      <synthetic> <stable> <artifact> def $outer(): T = $anonfun$foo$1.this.$outer;
+      final <bridge> <artifact> def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply());
       <synthetic> <paramaccessor> private[this] val methodParam$1: Int = _;
       <synthetic> <paramaccessor> private[this] val methodLocal$1: Int = _
     };
     abstract trait MethodLocalTrait$1 extends Object {
-      <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T
+      <synthetic> <stable> <artifact> def $outer(): T
     };
     object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
       def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
@@ -46,9 +46,9 @@ package <empty> {
         MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
         ()
       };
-      <synthetic> <paramaccessor> private[this] val $outer: T = _;
-      <synthetic> <stable> def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer;
-      <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer
+      <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+      <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
+      <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
     };
     final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
       MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
@@ -60,8 +60,8 @@ package <empty> {
       };
       scala.this.Predef.print(scala.Int.box(barParam$1))
     };
-    @SerialVersionUID(0) final <synthetic> class $anonfun$tryy$1 extends runtime.AbstractFunction0$mcV$sp with Serializable {
-      def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): anonymous class $anonfun$tryy$1 = {
+    @SerialVersionUID(0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
+      def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): <$anon: Function0> = {
         $anonfun$tryy$1.super.<init>();
         ()
       };
@@ -69,9 +69,9 @@ package <empty> {
       <specialized> def apply$mcV$sp(): Unit = try {
         $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1
       } finally ();
-      <synthetic> <paramaccessor> private[this] val $outer: T = _;
-      <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer;
-      final <bridge> def apply(): Object = {
+      <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+      <synthetic> <stable> <artifact> def $outer(): T = $anonfun$tryy$1.this.$outer;
+      final <bridge> <artifact> def apply(): Object = {
         $anonfun$tryy$1.this.apply();
         scala.runtime.BoxedUnit.UNIT
       };
diff --git a/test/files/run/t6028.scala b/test/files/run/t6028.scala
index cab1753..a6f920c 100644
--- a/test/files/run/t6028.scala
+++ b/test/files/run/t6028.scala
@@ -3,7 +3,7 @@ import java.io.{Console => _, _}
 
 object Test extends DirectTest {
 
-  override def extraSettings: String = "-usejavacp -Xprint:lambdalift -d " + testOutput.path
+  override def extraSettings: String = "-usejavacp -Ydelambdafy:inline -Xprint:lambdalift -d " + testOutput.path
 
   override def code = """class T(classParam: Int) {
                         |  val field: Int = 0
diff --git a/test/files/run/t603.scala b/test/files/run/t603.scala
index b8825c9..84a224a 100644
--- a/test/files/run/t603.scala
+++ b/test/files/run/t603.scala
@@ -1,4 +1,6 @@
 object forceDelay {
+  import scala.language.implicitConversions
+
   class Susp[+A](lazyValue: => A) extends Function0[A] {
     private var func: () => Any = () => lazyValue
     private var value: Any = null
@@ -22,7 +24,7 @@ object forceDelay {
 
 object Test {
   import forceDelay._
-  
+
   def main(args: Array[String]) = {
     val s: Susp[Int] = delay { Console.println("evaluating..."); 3 }
     Console.println("s = " + s)
diff --git a/test/files/run/t6052.scala b/test/files/run/t6052.scala
index 385d539..5482cfb 100644
--- a/test/files/run/t6052.scala
+++ b/test/files/run/t6052.scala
@@ -8,13 +8,13 @@
 object Test extends App {
   def seqarr(i: Int) = Array[Int]() ++ (0 until i)
   def pararr(i: Int) = seqarr(i).par
-  
+
   def check[T](i: Int, f: Int => T) {
     val gseq = seqarr(i).toSeq.groupBy(f)
     val gpar = pararr(i).groupBy(f)
     assert(gseq == gpar, (gseq, gpar))
   }
-  
+
   for (i <- 0 until 20) check(i, _ > 0)
   for (i <- 0 until 20) check(i, _ % 2)
   for (i <- 0 until 20) check(i, _ % 4)
diff --git a/test/files/run/t6064.scala b/test/files/run/t6064.scala
new file mode 100644
index 0000000..fc184dd
--- /dev/null
+++ b/test/files/run/t6064.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+  assert(Option(42) contains 42)
+  assert(Some(42) contains 42)
+  assert(Option(BigInt(42)) contains 42)
+  assert(Option(42) contains BigInt(42))
+  assert(!(None contains 42))
+  assert(Some(null) contains null)
+  assert(!(Option(null) contains null))
+}
\ No newline at end of file
diff --git a/test/files/run/t6070.scala b/test/files/run/t6070.scala
index b6af48e..434949f 100644
--- a/test/files/run/t6070.scala
+++ b/test/files/run/t6070.scala
@@ -11,7 +11,7 @@ class StringBomb extends Bomb {
   def size(that: String): Int = that.length
 }
 
-class IntBomb extends Bomb { 
+class IntBomb extends Bomb {
   type T = Int
   val x = 10
 
@@ -22,7 +22,7 @@ case class Mean(var bomb: Bomb)
 
 object Test extends App {
   def foo(x: Mean) = x match {
-    case Mean(b) => 
+    case Mean(b) =>
       // BUG: b is assumed to be a stable identifier, but it can actually be mutated
       println(b.size({ mutate(); b.x }))
   }
diff --git a/test/files/run/t6086-repl.check b/test/files/run/t6086-repl.check
index 97f20bd..115eff5 100644
--- a/test/files/run/t6086-repl.check
+++ b/test/files/run/t6086-repl.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> case class X(s: String)
 defined class X
 
diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check
index b6fc4c6..aa3e6cc 100644
--- a/test/files/run/t6102.check
+++ b/test/files/run/t6102.check
@@ -1 +1,32 @@
-hello
\ No newline at end of file
+[running phase parser on t6102.scala]
+[running phase namer on t6102.scala]
+[running phase packageobjects on t6102.scala]
+[running phase typer on t6102.scala]
+[running phase patmat on t6102.scala]
+[running phase superaccessors on t6102.scala]
+[running phase extmethods on t6102.scala]
+[running phase pickler on t6102.scala]
+[running phase refchecks on t6102.scala]
+[running phase uncurry on t6102.scala]
+[running phase tailcalls on t6102.scala]
+[running phase specialize on t6102.scala]
+[running phase explicitouter on t6102.scala]
+[running phase erasure on t6102.scala]
+[running phase posterasure on t6102.scala]
+[running phase lazyvals on t6102.scala]
+[running phase lambdalift on t6102.scala]
+[running phase constructors on t6102.scala]
+[running phase flatten on t6102.scala]
+[running phase mixin on t6102.scala]
+[running phase cleanup on t6102.scala]
+[running phase delambdafy on t6102.scala]
+[running phase icode on t6102.scala]
+#partest -optimise
+[running phase inliner on t6102.scala]
+[running phase inlinehandlers on t6102.scala]
+[running phase closelim on t6102.scala]
+[running phase constopt on t6102.scala]
+#partest
+[running phase dce on t6102.scala]
+[running phase jvm on icode]
+hello
diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags
index e35535c..726e2a9 100644
--- a/test/files/run/t6102.flags
+++ b/test/files/run/t6102.flags
@@ -1 +1 @@
- -Ydead-code 
+-Ydead-code -Ydebug -Xfatal-warnings
diff --git a/test/files/run/t6111.check b/test/files/run/t6111.check
index 7fd2e33..1f23a87 100644
--- a/test/files/run/t6111.check
+++ b/test/files/run/t6111.check
@@ -1,2 +1,3 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
 (8,8)
 (x,x)
diff --git a/test/files/run/t6111.scala b/test/files/run/t6111.scala
index 7cceea1..c0bcf17 100644
--- a/test/files/run/t6111.scala
+++ b/test/files/run/t6111.scala
@@ -1,3 +1,5 @@
+// SI-6675 DEPRECATED AUTO-TUPLING BECAUSE BAD IDEA -- MEAMAXIMACULPA
+// TODO: remove this test case in 2.12, when the deprecation will go into effect and this will no longer compile
 // slightly overkill, but a good test case for implicit resolution in extractor calls,
 // along with the real fix: an extractor pattern with 1 sub-pattern should type check for all extractors
 // that return Option[T], whatever T (even if it's a tuple)
diff --git a/test/files/run/t6113.scala b/test/files/run/t6113.scala
index 321cae8..b77a560 100644
--- a/test/files/run/t6113.scala
+++ b/test/files/run/t6113.scala
@@ -1,6 +1,8 @@
+import scala.language.higherKinds
+
 trait Foo[C[_]]
 
 object Test extends App {
   import scala.reflect.runtime.universe._
   println(typeOf[Foo[({type l[X] = (Int, X)})#l]])
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
index 49ff706..a3b09ef 100644
--- a/test/files/run/t6146b.check
+++ b/test/files/run/t6146b.check
@@ -1,3 +1,7 @@
+t6146b.scala:15: warning: match may not be exhaustive.
+It would fail on the following inputs: S2(), S3()
+  def foo(f: F[Int]) = f match { case X.S1 => }
+                       ^
 Type in expressions to have them evaluated.
 Type :help for more information.
 
@@ -41,7 +45,7 @@ scala> val mt1 = memType(S1, fTpe)
 mt1: u.Type = O.X.S1.type
 
 scala> global.typeDeconstruct.show(mt1)
-res0: String = 
+res0: String =
 TypeRef(
   pre = SingleType(pre = ThisType(object O), object X)
   TypeSymbol(class S1 extends C.this.F[T])
diff --git a/test/files/run/t6150.scala b/test/files/run/t6150.scala
index bd8af5d..f3e83e1 100644
--- a/test/files/run/t6150.scala
+++ b/test/files/run/t6150.scala
@@ -1,7 +1,3 @@
-
-
-
-
 object Test {
   import collection.{ immutable, mutable, generic }
   def TheOneTrueCBF = collection.IndexedSeq.ReusableCBF
@@ -38,7 +34,3 @@ object Test {
     check(iv.:+(4)(cbf3))
   }
 }
-
-
-
-
diff --git a/test/files/run/t6168/Context.java b/test/files/run/t6168/Context.java
new file mode 100644
index 0000000..d0fb5d2
--- /dev/null
+++ b/test/files/run/t6168/Context.java
@@ -0,0 +1,34 @@
+public class Context<ParentType> {
+    private ParentType parent;
+
+    public Context() {}
+
+    public ParentType getParent() {
+        return parent;
+    }
+
+    public void setParent(ParentType parent) {
+         this.parent = parent;
+    }
+
+    public Field<Integer> intField() {
+        return new Field<Integer>() {
+            @Override
+            public Integer get() {
+                return 0;
+            }
+
+            @Override
+            public ParentType set(Integer t) {
+                return parent;
+            }
+        };
+    }
+
+    public abstract class Field<T> { //Note this is a path dependent type
+
+        public abstract T get();
+
+        public abstract ParentType set(T t);
+    }
+}
\ No newline at end of file
diff --git a/test/files/run/t6168/JavaTest.java b/test/files/run/t6168/JavaTest.java
new file mode 100644
index 0000000..94ae916
--- /dev/null
+++ b/test/files/run/t6168/JavaTest.java
@@ -0,0 +1,8 @@
+public class JavaTest {
+  public static void main(String[] args) {
+    SomeClass a = new SomeClass();
+    SomeClass2 a2 = new SomeClass2();
+    SomeClass b = a.f.set(23).f.set(23);
+    SomeClass2 b2 = a2.f.set(23).f.set(23);
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t6168/SomeClass.java b/test/files/run/t6168/SomeClass.java
new file mode 100644
index 0000000..6f76b82
--- /dev/null
+++ b/test/files/run/t6168/SomeClass.java
@@ -0,0 +1,14 @@
+public class SomeClass {
+    private final Context<SomeClass> context = new Context<SomeClass>();
+    {
+        context.setParent(this);
+    }
+
+    public final Context<SomeClass>.Field<Integer> f = context.intField();
+
+    public SomeClass() {
+        f.set(23).f.set(23);
+    }
+}
+
+
diff --git a/test/files/run/t6168/SomeClass2.java b/test/files/run/t6168/SomeClass2.java
new file mode 100644
index 0000000..b2c7a75
--- /dev/null
+++ b/test/files/run/t6168/SomeClass2.java
@@ -0,0 +1,12 @@
+public class SomeClass2 {
+    private final Context<SomeClass2> context = new Context<SomeClass2>();
+    {
+        context.setParent(this);
+    }
+
+    public final Context<SomeClass2>.Field<Integer> f = context.intField();
+
+    public SomeClass2() {
+        f.set(23).f.set(23);
+    }
+}
\ No newline at end of file
diff --git a/test/files/run/t6168/main.scala b/test/files/run/t6168/main.scala
new file mode 100644
index 0000000..c7ad378
--- /dev/null
+++ b/test/files/run/t6168/main.scala
@@ -0,0 +1,15 @@
+
+
+object Test extends App {
+  JavaTest.main(null)
+
+  var a1 : SomeClass = new SomeClass
+  var a2 : SomeClass2 = new SomeClass2
+  //import language.implicitConversions
+  //implicit def setParentType2SomeClass(x:Any) = x.asInstanceOf[SomeClass]
+  //implicit def setParentType2SomeClass2(x:Any) = x.asInstanceOf[SomeClass2]
+  //var b : SomeClass = a.f.set(23).asInstanceOf[SomeClass].f.set(23).asInstanceOf[SomeClass]
+  //var b2 : SomeClass2 = a2.f.set(23).asInstanceOf[SomeClass2].f.set(23).asInstanceOf[SomeClass2]
+  var b1 : SomeClass =  a1.f.set(23).f.set(23)
+  var b2 : SomeClass2 = a2.f.set(23).f.set(23)
+}
diff --git a/test/files/run/t6168b/Context.java b/test/files/run/t6168b/Context.java
new file mode 100644
index 0000000..b3ea221
--- /dev/null
+++ b/test/files/run/t6168b/Context.java
@@ -0,0 +1,34 @@
+public class Context<ParentType> {
+    private ParentType parent;
+
+    public Context() {}
+
+    public ParentType getParent() {
+        return parent;
+    }
+
+    public void setParent(ParentType parent) {
+         this.parent = parent;
+    }
+
+    public Field<Integer> intField() {
+        return new Field<Integer>() {
+            @Override
+            public Integer get() {
+                return 0;
+            }
+
+            @Override
+            public ParentType set(Integer t) {
+                return parent;
+            }
+        };
+    }
+
+    public static abstract class Field<T> {
+
+        public abstract T get();
+
+        public abstract Object set(T t);
+    }
+}
\ No newline at end of file
diff --git a/test/files/run/t6168b/JavaTest.java b/test/files/run/t6168b/JavaTest.java
new file mode 100644
index 0000000..a09fa03
--- /dev/null
+++ b/test/files/run/t6168b/JavaTest.java
@@ -0,0 +1,6 @@
+public class JavaTest {
+  public static void main(String[] args) {
+    SomeClass a = new SomeClass();
+    Object b = a.f.set(23);
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t6168b/SomeClass.java b/test/files/run/t6168b/SomeClass.java
new file mode 100644
index 0000000..566c55e
--- /dev/null
+++ b/test/files/run/t6168b/SomeClass.java
@@ -0,0 +1,11 @@
+public class SomeClass {
+    private final Context<SomeClass> context = new Context<SomeClass>();
+    {
+        context.setParent(this);
+    }
+
+    public final Context.Field<Integer> f = context.intField();
+
+}
+
+
diff --git a/test/files/run/t6168b/main.scala b/test/files/run/t6168b/main.scala
new file mode 100644
index 0000000..187e9fe
--- /dev/null
+++ b/test/files/run/t6168b/main.scala
@@ -0,0 +1,8 @@
+
+
+object Test extends App {
+  JavaTest.main(null)
+
+  var a1 : SomeClass = new SomeClass
+  var b1 : Object =  a1.f.set(23)
+}
diff --git a/test/files/run/t6178.scala b/test/files/run/t6178.scala
index 0b4cf0b..41e148a 100644
--- a/test/files/run/t6178.scala
+++ b/test/files/run/t6178.scala
@@ -2,6 +2,6 @@ import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{currentMirror => cm}
 
 object Test extends App {
-  val plus = typeOf[java.lang.String].member(newTermName("$plus")).asMethod
+  val plus = typeOf[java.lang.String].member(TermName("$plus")).asMethod
   println(cm.reflect("").reflectMethod(plus).apply("2"))
 }
\ No newline at end of file
diff --git a/test/files/run/t6181.scala b/test/files/run/t6181.scala
index fb23eaf..eaa7340 100644
--- a/test/files/run/t6181.scala
+++ b/test/files/run/t6181.scala
@@ -3,6 +3,6 @@ import scala.reflect.runtime.{currentMirror => cm}
 
 object Test extends App {
   class C { def test(x: => Int) = println(x) }
-  val mm = cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("test")).asMethod)
+  val mm = cm.reflect(new C).reflectMethod(typeOf[C].member(TermName("test")).asMethod)
   mm(2)
 }
\ No newline at end of file
diff --git a/test/files/run/t6187.check b/test/files/run/t6187.check
index c0ca029..0180125 100644
--- a/test/files/run/t6187.check
+++ b/test/files/run/t6187.check
@@ -1,18 +1,18 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> import language.experimental.macros, reflect.macros.Context
-import language.experimental.macros
-import reflect.macros.Context
+scala> import scala.language.experimental.macros, scala.reflect.macros.blackbox.Context
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
 
 scala> def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
   val r = c.universe.reify { List(t.splice) }
-  c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+  c.Expr[List[T]]( c.untypecheck(r.tree) )
 }
-macroImpl: [T](c: scala.reflect.macros.Context)(t: c.Expr[T])(implicit evidence$1: c.WeakTypeTag[T])c.Expr[List[T]]
+macroImpl: [T](c: scala.reflect.macros.blackbox.Context)(t: c.Expr[T])(implicit evidence$1: c.WeakTypeTag[T])c.Expr[List[T]]
 
 scala> def demo[T](t: T): List[T] = macro macroImpl[T]
-demo: [T](t: T)List[T]
+defined term macro demo: [T](t: T)List[T]
 
 scala> def m[T](t: T): List[List[T]] =
   demo( List((t,true)) collect { case (x,true) => x } )
diff --git a/test/files/run/t6187.scala b/test/files/run/t6187.scala
index ae64291..7a39cfd 100644
--- a/test/files/run/t6187.scala
+++ b/test/files/run/t6187.scala
@@ -2,10 +2,10 @@ import scala.tools.partest.ReplTest
 
 object Test extends ReplTest {
   override def code = """
-import language.experimental.macros, reflect.macros.Context
+import scala.language.experimental.macros, scala.reflect.macros.blackbox.Context
 def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
   val r = c.universe.reify { List(t.splice) }
-  c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+  c.Expr[List[T]]( c.untypecheck(r.tree) )
 }
 def demo[T](t: T): List[T] = macro macroImpl[T]
 def m[T](t: T): List[List[T]] =
diff --git a/test/files/run/t6196.scala b/test/files/run/t6196.scala
new file mode 100644
index 0000000..a75911f
--- /dev/null
+++ b/test/files/run/t6196.scala
@@ -0,0 +1,68 @@
+import scala.collection.immutable.HashSet
+
+object Test extends App {
+
+  case class Collision(value: Int) extends Ordered[Collision] {
+    def compare(that:Collision) = value compare that.value
+
+    override def hashCode = value / 5
+  }
+
+  def testCorrectness[T : Ordering](n: Int, mkKey: Int => T) {
+    val o = implicitly[Ordering[T]]
+    val s = HashSet.empty[T] ++ (0 until n).map(mkKey)
+    for (i <- 0 until n) {
+      val ki = mkKey(i)
+      val a = s.filter(o.lt(_,ki))
+      val b = s.filterNot(o.lt(_,ki))
+      require(a.size == i && (0 until i).forall(i => a.contains(mkKey(i))))
+      require(b.size == n - i && (i until n).forall(i => b.contains(mkKey(i))))
+    }
+  }
+
+  // this tests the structural sharing of the new filter
+  // I could not come up with a simple test that tests structural sharing when only parts are reused, but
+  // at least this fails with the old and passes with the new implementation
+  def testSharing() {
+    val s = HashSet.empty[Int] ++ (0 until 100)
+    require(s.filter(_ => true) eq s)
+    require(s.filterNot(_ => false) eq s)
+  }
+
+  // this tests that neither hashCode nor equals are called during filter
+  def testNoHashing() {
+    var hashCount = 0
+    var equalsCount = 0
+    case class HashCounter(value:Int) extends Ordered[HashCounter] {
+      def compare(that:HashCounter) = value compare that.value
+
+      override def hashCode = {
+        hashCount += 1
+        value
+      }
+
+      override def equals(that:Any) = {
+        equalsCount += 1
+        that match {
+          case HashCounter(value) => this.value == value
+          case _ => false
+        }
+      }
+    }
+
+    val s = HashSet.empty[HashCounter] ++ (0 until 100).map(HashCounter)
+    val hashCount0 = hashCount
+    val equalsCount0 = equalsCount
+    val t = s.filter(_<HashCounter(50))
+    require(hashCount == hashCount0)
+    require(equalsCount == equalsCount0)
+  }
+
+  // this tests correctness of filter and filterNot for integer keys
+  testCorrectness[Int](100, identity _)
+  // this tests correctness of filter and filterNot for keys with lots of collisions
+  // this is necessary because usually collisions are rare so the collision-related code is not thoroughly tested
+  testCorrectness[Collision](100, Collision.apply _)
+  testSharing()
+  testNoHashing()
+}
diff --git a/test/files/run/t6197.check b/test/files/run/t6197.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t6198.check b/test/files/run/t6198.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t6199-mirror.scala b/test/files/run/t6199-mirror.scala
index 772a384..3fda56b 100644
--- a/test/files/run/t6199-mirror.scala
+++ b/test/files/run/t6199-mirror.scala
@@ -3,5 +3,5 @@ import scala.reflect.runtime.{currentMirror => cm}
 
 object Test extends App {
   class C { def foo = () }
-  println(cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)())
+  println(cm.reflect(new C).reflectMethod(typeOf[C].member(TermName("foo")).asMethod)())
 }
\ No newline at end of file
diff --git a/test/files/run/t6199-toolbox.scala b/test/files/run/t6199-toolbox.scala
index 89015f5..6ba5e50 100644
--- a/test/files/run/t6199-toolbox.scala
+++ b/test/files/run/t6199-toolbox.scala
@@ -4,5 +4,5 @@ import scala.tools.reflect.ToolBox
 
 object Test extends App {
   val tb = cm.mkToolBox()
-  println(tb.eval(Literal(Constant(()))))
+  println(tb.eval(q"()"))
 }
\ No newline at end of file
diff --git a/test/files/run/t6200.scala b/test/files/run/t6200.scala
new file mode 100644
index 0000000..75600cd
--- /dev/null
+++ b/test/files/run/t6200.scala
@@ -0,0 +1,68 @@
+import scala.collection.immutable.HashMap
+
+object Test extends App {
+
+  case class Collision(value: Int) extends Ordered[Collision] {
+    def compare(that: Collision) = value compare that.value
+
+    override def hashCode = value / 5
+  }
+
+  def testCorrectness[T: Ordering](n: Int, mkKey: Int => T) {
+    val o = implicitly[Ordering[T]]
+    val s = HashMap.empty[T, Unit] ++ (0 until n).map(x => mkKey(x) -> (()))
+    for (i <- 0 until n) {
+      val ki = mkKey(i)
+      val a = s.filter(kv => o.lt(kv._1, ki))
+      val b = s.filterNot(kv => o.lt(kv._1, ki))
+      require(a.size == i && (0 until i).forall(i => a.contains(mkKey(i))))
+      require(b.size == n - i && (i until n).forall(i => b.contains(mkKey(i))))
+    }
+  }
+
+  // this tests the structural sharing of the new filter
+  // I could not come up with a simple test that tests structural sharing when only parts are reused, but
+  // at least this fails with the old and passes with the new implementation
+  def testSharing() {
+    val s = HashMap.empty[Int, Unit] ++ (0 until 100).map(_ -> (()))
+    require(s.filter(_ => true) eq s)
+    require(s.filterNot(_ => false) eq s)
+  }
+
+  // this tests that neither hashCode nor equals are called during filter
+  def testNoHashing() {
+    var hashCount = 0
+    var equalsCount = 0
+    case class HashCounter(value: Int) extends Ordered[HashCounter] {
+      def compare(that: HashCounter) = value compare that.value
+
+      override def hashCode = {
+        hashCount += 1
+        value
+      }
+
+      override def equals(that: Any) = {
+        equalsCount += 1
+        that match {
+          case HashCounter(value) => this.value == value
+          case _ => false
+        }
+      }
+    }
+
+    val s = HashMap.empty[HashCounter, Unit] ++ (0 until 100).map(k => HashCounter(k) -> (()))
+    val hashCount0 = hashCount
+    val equalsCount0 = equalsCount
+    val t = s.filter(_._1 < HashCounter(50))
+    require(hashCount == hashCount0)
+    require(equalsCount == equalsCount0)
+  }
+
+  // this tests correctness of filter and filterNot for integer keys
+  testCorrectness[Int](100, identity _)
+  // this tests correctness of filter and filterNot for keys with lots of collisions
+  // this is necessary because usually collisions are rare so the collision-related code is not thoroughly tested
+  testCorrectness[Collision](100, Collision.apply _)
+  testSharing()
+  testNoHashing()
+}
diff --git a/test/files/run/t6221.check b/test/files/run/t6221.check
new file mode 100644
index 0000000..aa1bdd0
--- /dev/null
+++ b/test/files/run/t6221.check
@@ -0,0 +1 @@
+((x) => x.$percent(2).$eq$eq(0))
diff --git a/test/files/run/t6221/Macros_1.scala b/test/files/run/t6221/Macros_1.scala
new file mode 100644
index 0000000..0aeaa00
--- /dev/null
+++ b/test/files/run/t6221/Macros_1.scala
@@ -0,0 +1,23 @@
+import language.experimental.macros
+import language.implicitConversions
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.runtime.universe.Tree
+
+class ReflectiveClosure[A, B](val tree: Tree, fn: A => B) extends (A => B) {
+  def apply(x: A) = fn(x)
+}
+
+object ReflectiveClosure {
+  implicit def reflectClosure[A, B](f: A => B): ReflectiveClosure[A, B] = macro Macros.reflectiveClosureImpl[A, B]
+}
+
+object Macros {
+  def reflectiveClosureImpl[A, B](c: Context)(f: c.Expr[A => B]): c.Expr[ReflectiveClosure[A, B]] = {
+    import c.universe._
+    import internal._
+    val u = gen.mkRuntimeUniverseRef
+    val m = EmptyTree
+    val tree = c.Expr[scala.reflect.runtime.universe.Tree](Select(c.reifyTree(u, m, f.tree), newTermName("tree")))
+    c.universe.reify(new ReflectiveClosure(tree.splice, f.splice))
+  }
+}
diff --git a/test/files/run/t6221/Test_2.scala b/test/files/run/t6221/Test_2.scala
new file mode 100644
index 0000000..9f6b228
--- /dev/null
+++ b/test/files/run/t6221/Test_2.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+  implicit class PimpedList[T](val list: List[T]) {
+    def query(predicate: ReflectiveClosure[T, Boolean]): List[T] = {
+      println(predicate.tree)
+      list filter predicate
+    }
+  }
+
+  List(1, 2, 3).query(x => x % 2 == 0)
+}
\ No newline at end of file
diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check
index f83799b..4a09d19 100644
--- a/test/files/run/t6223.check
+++ b/test/files/run/t6223.check
@@ -1,4 +1,4 @@
 bar
 bar$mIc$sp
 bar$mIcI$sp
-bar$mcI$sp
\ No newline at end of file
+bar$mcI$sp
diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala
new file mode 100644
index 0000000..9f7061e
--- /dev/null
+++ b/test/files/run/t6240-universe-code-gen.scala
@@ -0,0 +1,82 @@
+import scala.tools.partest.nest.FileManager._
+
+object Test extends App {
+  val cm = reflect.runtime.currentMirror
+  val u = cm.universe
+  import u._
+
+  val JavaUniverseTpe = typeOf[reflect.runtime.JavaUniverse]
+  val DefinitionsModule = JavaUniverseTpe.member(TermName("definitions"))
+
+  def forceCode(prefix: String, tp: Type): String = {
+    def isLazyAccessorOrObject(sym: Symbol) = (
+          (sym.isMethod && sym.asMethod.isLazy)
+       || sym.isModule
+    )
+    val forcables = tp.members.sorted.filter(isLazyAccessorOrObject)
+    forcables.map {
+      sym =>
+        val path = s"$prefix.${sym.name}"
+        "    " + (
+          if (sym.isPrivate || sym.isProtected) s"// inaccessible: $path"
+          else path
+        )
+    }.mkString("\n")
+  }
+
+  val code =
+    s"""|// Generated Code, validated by run/t6240-universe-code-gen.scala
+        |package scala.reflect
+        |package runtime
+        |
+        |trait JavaUniverseForce { self: runtime.JavaUniverse  =>
+        |  def force() {
+        |    Literal(Constant(42)).duplicate
+        |    nme.flattenedName()
+        |    nme.raw
+        |    WeakTypeTag
+        |    TypeTag
+        |    TypeTag.Byte.tpe
+        |    TypeTag.Short.tpe
+        |    TypeTag.Char.tpe
+        |    TypeTag.Int.tpe
+        |    TypeTag.Long.tpe
+        |    TypeTag.Float.tpe
+        |    TypeTag.Double.tpe
+        |    TypeTag.Boolean.tpe
+        |    TypeTag.Unit.tpe
+        |    TypeTag.Any.tpe
+        |    TypeTag.AnyVal.tpe
+        |    TypeTag.AnyRef.tpe
+        |    TypeTag.Object.tpe
+        |    TypeTag.Nothing.tpe
+        |    TypeTag.Null.tpe
+        |
+        |${forceCode("this", JavaUniverseTpe)}
+        |${forceCode("definitions", DefinitionsModule.info)}
+        |${forceCode("refChecks", typeOf[scala.reflect.internal.transform.RefChecks])}
+        |${forceCode("uncurry", typeOf[scala.reflect.internal.transform.UnCurry])}
+        |${forceCode("erasure", typeOf[scala.reflect.internal.transform.Erasure])}
+        |  }
+        |}""".stripMargin
+
+  import java.io.File
+  val testFile = new File(sys.props("partest.test-path"))
+  val actualFile = new java.io.File(testFile.getParent + "/../../../src/reflect/scala/reflect/runtime/JavaUniverseForce.scala").getCanonicalFile
+  val actual = scala.io.Source.fromFile(actualFile)
+  val actualLines = actual.getLines.toList
+  val generatedLines = code.lines.toList
+  if (actualLines != generatedLines) {
+    val msg = s"""|${actualFile} must be updated.
+                  |===========================================================
+                  | DIFF:
+                  |===========================================================
+                  |${compareContents(actualLines, generatedLines)}
+                  |===========================================================
+                  | NEW CONTENTS:
+                  |===========================================================
+                  |${code}""".stripMargin
+
+    assert(false, msg)
+  }
+}
diff --git a/test/files/run/t6240a.check b/test/files/run/t6240a.check
new file mode 100644
index 0000000..29f695b
--- /dev/null
+++ b/test/files/run/t6240a.check
@@ -0,0 +1 @@
+StepTwo.type
diff --git a/test/files/run/t6240a/StepOne.java b/test/files/run/t6240a/StepOne.java
new file mode 100644
index 0000000..342d617
--- /dev/null
+++ b/test/files/run/t6240a/StepOne.java
@@ -0,0 +1,41 @@
+import java.io.File;
+import java.io.IOException;
+import java.lang.ClassNotFoundException;
+import java.lang.NoSuchMethodException;
+import java.lang.IllegalAccessException;
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.MalformedURLException;
+
+public class StepOne {
+  public static void main(String[] args)
+  throws MalformedURLException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, IOException {
+    String[] launchPaths = System.getProperty("launch.classpath").split(File.pathSeparator);
+
+    // move away StepThree
+    File tempDir = File.createTempFile("temp", Long.toString(System.nanoTime()));
+    System.setProperty("launch.step.three", tempDir.getAbsolutePath());
+    tempDir.delete();
+    tempDir.mkdir();
+    File[] testClasses = new File(launchPaths[0]).listFiles();
+    for (int i = 0; i < testClasses.length; i++) {
+      File testClass = testClasses[i];
+      if (testClass.getPath().contains("StepThree")) {
+        File testClassMoved = new File(tempDir.getAbsolutePath() + "/" + testClass.getName());
+        testClass.renameTo(testClassMoved);
+      }
+    }
+
+    // launch StepTwo
+    URL[] launchURLs = new URL[launchPaths.length];
+    for (int i = 0; i < launchPaths.length; i++) {
+      launchURLs[i] = new File(launchPaths[i]).toURL();
+    }
+    URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
+    Class<?> stepTwo = classLoader.loadClass("StepTwo");
+    Method main = stepTwo.getDeclaredMethod("main", String[].class);
+    main.invoke(null, (Object)(new String[]{}));
+  }
+}
diff --git a/test/files/run/t6240a/StepTwo.scala b/test/files/run/t6240a/StepTwo.scala
new file mode 100644
index 0000000..fc32219
--- /dev/null
+++ b/test/files/run/t6240a/StepTwo.scala
@@ -0,0 +1,7 @@
+import java.io.File
+import java.net.URLClassLoader
+
+object StepTwo extends App {
+  import scala.reflect.runtime.universe._
+  println(typeOf[StepTwo.type])
+}
\ No newline at end of file
diff --git a/test/files/run/t6240a/Test.scala b/test/files/run/t6240a/Test.scala
new file mode 100644
index 0000000..05c3678
--- /dev/null
+++ b/test/files/run/t6240a/Test.scala
@@ -0,0 +1,15 @@
+import java.io.File
+import scala.sys.process._
+
+object Test extends App {
+  def prop(key: String) = {
+    val value = System.getProperties.getProperty(key)
+    assert(value != null, key)
+    value
+  }
+  val testClassesDir = prop("partest.output")
+  assert(new File(testClassesDir).exists, testClassesDir)
+  val fullTestClassesClasspath = testClassesDir + prop("path.separator") + prop("java.class.path")
+  val javaBinary = if (new File(prop("javacmd")).isAbsolute) prop("javacmd") else prop("java.home") + "/bin/" + prop("javacmd")
+  List(javaBinary, "-cp", testClassesDir, "-Dlaunch.classpath=" + fullTestClassesClasspath, "StepOne").!
+}
\ No newline at end of file
diff --git a/test/files/run/t6240b.check b/test/files/run/t6240b.check
new file mode 100644
index 0000000..2558361
--- /dev/null
+++ b/test/files/run/t6240b.check
@@ -0,0 +1 @@
+StepThree.type
diff --git a/test/files/run/t6240b/StepOne.java b/test/files/run/t6240b/StepOne.java
new file mode 100644
index 0000000..342d617
--- /dev/null
+++ b/test/files/run/t6240b/StepOne.java
@@ -0,0 +1,41 @@
+import java.io.File;
+import java.io.IOException;
+import java.lang.ClassNotFoundException;
+import java.lang.NoSuchMethodException;
+import java.lang.IllegalAccessException;
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.MalformedURLException;
+
+public class StepOne {
+  public static void main(String[] args)
+  throws MalformedURLException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, IOException {
+    String[] launchPaths = System.getProperty("launch.classpath").split(File.pathSeparator);
+
+    // move away StepThree
+    File tempDir = File.createTempFile("temp", Long.toString(System.nanoTime()));
+    System.setProperty("launch.step.three", tempDir.getAbsolutePath());
+    tempDir.delete();
+    tempDir.mkdir();
+    File[] testClasses = new File(launchPaths[0]).listFiles();
+    for (int i = 0; i < testClasses.length; i++) {
+      File testClass = testClasses[i];
+      if (testClass.getPath().contains("StepThree")) {
+        File testClassMoved = new File(tempDir.getAbsolutePath() + "/" + testClass.getName());
+        testClass.renameTo(testClassMoved);
+      }
+    }
+
+    // launch StepTwo
+    URL[] launchURLs = new URL[launchPaths.length];
+    for (int i = 0; i < launchPaths.length; i++) {
+      launchURLs[i] = new File(launchPaths[i]).toURL();
+    }
+    URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
+    Class<?> stepTwo = classLoader.loadClass("StepTwo");
+    Method main = stepTwo.getDeclaredMethod("main", String[].class);
+    main.invoke(null, (Object)(new String[]{}));
+  }
+}
diff --git a/test/files/run/t6240b/StepThree.scala b/test/files/run/t6240b/StepThree.scala
new file mode 100644
index 0000000..210795d
--- /dev/null
+++ b/test/files/run/t6240b/StepThree.scala
@@ -0,0 +1,4 @@
+object StepThree extends App {
+  import scala.reflect.runtime.universe._
+  println(typeOf[StepThree.type])
+}
\ No newline at end of file
diff --git a/test/files/run/t6240b/StepTwo.scala b/test/files/run/t6240b/StepTwo.scala
new file mode 100644
index 0000000..88e4649
--- /dev/null
+++ b/test/files/run/t6240b/StepTwo.scala
@@ -0,0 +1,10 @@
+import java.io.File
+import java.net.URLClassLoader
+
+object StepTwo extends App {
+  val classes = new File(System.getProperty("launch.step.three"))
+  val cl = new URLClassLoader(Array(classes.toURI.toURL), getClass.getClassLoader)
+  val stepThree = cl.loadClass("StepThree")
+  val main = stepThree.getDeclaredMethod("main", classOf[Array[String]])
+  main.invoke(null, Array[String]())
+}
\ No newline at end of file
diff --git a/test/files/run/t6240b/Test.scala b/test/files/run/t6240b/Test.scala
new file mode 100644
index 0000000..05c3678
--- /dev/null
+++ b/test/files/run/t6240b/Test.scala
@@ -0,0 +1,15 @@
+import java.io.File
+import scala.sys.process._
+
+object Test extends App {
+  def prop(key: String) = {
+    val value = System.getProperties.getProperty(key)
+    assert(value != null, key)
+    value
+  }
+  val testClassesDir = prop("partest.output")
+  assert(new File(testClassesDir).exists, testClassesDir)
+  val fullTestClassesClasspath = testClassesDir + prop("path.separator") + prop("java.class.path")
+  val javaBinary = if (new File(prop("javacmd")).isAbsolute) prop("javacmd") else prop("java.home") + "/bin/" + prop("javacmd")
+  List(javaBinary, "-cp", testClassesDir, "-Dlaunch.classpath=" + fullTestClassesClasspath, "StepOne").!
+}
\ No newline at end of file
diff --git a/test/files/run/t6246.scala b/test/files/run/t6246.scala
index 28765e1..2db87aa 100644
--- a/test/files/run/t6246.scala
+++ b/test/files/run/t6246.scala
@@ -1,5 +1,6 @@
 import scala.reflect.{ClassTag, classTag}
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   def testValueClass(tag: ClassTag[_]) {
     println(s"runtimeClass = ${tag.runtimeClass}, toString = ${tag.toString}")
@@ -23,4 +24,4 @@ object Test extends App {
   testValueClass(ClassTag.Double)
   testValueClass(ClassTag.Unit)
   testValueClass(ClassTag.Boolean)
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t6253a.scala b/test/files/run/t6253a.scala
new file mode 100644
index 0000000..efa3230
--- /dev/null
+++ b/test/files/run/t6253a.scala
@@ -0,0 +1,64 @@
+import scala.collection.immutable.HashSet
+
+object Test extends App {
+
+  var hashCount = 0
+
+  /**
+   * A key that produces lots of hash collisions, to exercise the part of the code that deals with those
+   */
+  case class Collision(value: Int) {
+
+    override def hashCode = {
+      // we do not check hash counts for Collision keys because ListSet.++ uses a mutable hashset internally,
+      // so when we have hash collisions, union will call key.hashCode.
+      // hashCount += 1
+      value / 5
+    }
+  }
+
+  /**
+   * A key that is identical to int other than that it counts hashCode invocations
+   */
+  case class HashCounter(value: Int) {
+
+    override def hashCode = {
+      hashCount += 1
+      value
+    }
+  }
+
+  def testUnion[T](sizes: Seq[Int], offsets: Seq[Double], keyType: String, mkKey: Int => T) {
+    for {
+      i <- sizes
+      o <- offsets
+    } {
+      val e = HashSet.empty[T]
+      val j = (i * o).toInt
+      // create two sets of size i with overlap o
+      val a = e ++ (0 until i).map(mkKey)
+      require(a.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val b = e ++ (j until (i + j)).map(mkKey)
+      require(b.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val as = e ++ (0 until j).map(mkKey)
+      require(as.size == j, s"Building HashSet of size $j failed. Key type $keyType.")
+      val hashCount0 = hashCount
+      val u = a union b
+      require(hashCount == hashCount0, s"key.hashCode should not be called, but has been called ${hashCount - hashCount0} times. Key type $keyType.")
+      require(u == (a union scala.collection.mutable.HashSet(b.toSeq: _*)), s"Operation must still work for other sets!")
+      require(u.size == i + j, s"Expected size ${i+j}. Real size ${u.size}. Key type $keyType.")
+      for (x <- 0 until i + j)
+        require(u.contains(mkKey(x)), s"Key type $keyType. Set (0 until ${i + j}) should contain $x but does not.")
+      val a_as = a union as
+      val as_a = as union a
+      require((a_as eq a) || (a_as eq as), s"No structural sharing in a union as. Key type $keyType, a=(0 until $i) as=(0 until $j)")
+      require((as_a eq a) || (as_a eq as), s"No structural sharing in as union a. Key type $keyType, a=(0 until $i) as=(0 until $j)")
+    }
+  }
+
+  val sizes = Seq(1, 10, 100, 1000, 10000, 100000)
+  val offsets = Seq(0.0, 0.25, 0.5, 0.75, 1.0)
+  testUnion(sizes, offsets, "int", identity[Int])
+  testUnion(sizes, offsets, "hashcounter", HashCounter.apply)
+  testUnion(sizes, offsets, "collision", Collision.apply)
+}
diff --git a/test/files/run/t6253b.scala b/test/files/run/t6253b.scala
new file mode 100644
index 0000000..9cbfefd
--- /dev/null
+++ b/test/files/run/t6253b.scala
@@ -0,0 +1,62 @@
+import scala.collection.immutable.HashSet
+
+object Test extends App {
+
+  var hashCount = 0
+
+  /**
+   * A key that produces lots of hash collisions, to exercise the part of the code that deals with those
+   */
+  case class Collision(value: Int) {
+
+    override def hashCode = {
+      hashCount += 1
+      value / 5
+    }
+  }
+
+  /**
+   * A key that is identical to int other than that it counts hashCode invocations
+   */
+  case class HashCounter(value: Int) {
+
+    override def hashCode = {
+      hashCount += 1
+      value
+    }
+  }
+
+  def testIntersect[T](sizes: Seq[Int], offsets: Seq[Double], keyType: String, mkKey: Int => T) {
+    for {
+      i <- sizes
+      o <- offsets
+    } {
+      val e = HashSet.empty[T]
+      val j = (i * o).toInt
+      // create two sets of size i with overlap o
+      val a = e ++ (0 until i).map(mkKey)
+      require(a.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val b = e ++ (j until (i + j)).map(mkKey)
+      require(b.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val as = e ++ (0 until j).map(mkKey)
+      require(as.size == j, s"Building HashSet of size $j failed. Key type $keyType.")
+      val hashCount0 = hashCount
+      val u = a intersect b
+      require(hashCount == hashCount0, s"key.hashCode should not be called, but has been called ${hashCount - hashCount0} times. Key type $keyType.")
+      require(u == (a intersect scala.collection.mutable.HashSet(b.toSeq: _*)), s"Operation must still work for other sets!")
+      require(u.size == i - j, s"Expected size ${i + j}. Real size ${u.size}. Key type $keyType.")
+      for (x <- j until i)
+        require(u.contains(mkKey(x)), s"Key type $keyType. Set (0 until ${i + j}) should contain $x but does not.")
+      val a_as = a intersect as
+      val as_a = as intersect a
+      require((a_as eq as) || (a_as eq a), s"No structural sharing in a intersect as. Key type $keyType, a=(0 until $i) as=(0 until $j)")
+      require((as_a eq as) || (as_a eq a), s"No structural sharing in as intersect a. Key type $keyType, a=(0 until $i) as=(0 until $j)")
+    }
+  }
+
+  val sizes = Seq(1, 10, 100, 1000, 10000, 100000)
+  val offsets = Seq(0.0, 0.25, 0.5, 0.75, 1.0)
+  testIntersect(sizes, offsets, "int", identity[Int])
+  testIntersect(sizes, offsets, "hashcounter", HashCounter.apply)
+  testIntersect(sizes, offsets, "collision", Collision.apply)
+}
diff --git a/test/files/run/t6253c.scala b/test/files/run/t6253c.scala
new file mode 100644
index 0000000..71dfe14
--- /dev/null
+++ b/test/files/run/t6253c.scala
@@ -0,0 +1,63 @@
+import scala.collection.immutable.HashSet
+
+object Test extends App {
+
+  var hashCount = 0
+
+  /**
+   * A key that produces lots of hash collisions, to exercise the part of the code that deals with those
+   */
+  case class Collision(value: Int) {
+
+    override def hashCode = {
+      hashCount += 1
+      value / 5
+    }
+  }
+
+  /**
+   * A key that is identical to int other than that it counts hashCode invocations
+   */
+  case class HashCounter(value: Int) {
+
+    override def hashCode = {
+      hashCount += 1
+      value
+    }
+  }
+
+  def testDiff[T](sizes: Seq[Int], offsets: Seq[Double], keyType: String, mkKey: Int => T) {
+    for {
+      i <- sizes
+      o <- offsets
+    } {
+      val e = HashSet.empty[T]
+      val j = (i * o).toInt
+      // create two sets of size i with overlap o
+      val a = e ++ (0 until i).map(mkKey)
+      require(a.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val b = e ++ (j until (i + j)).map(mkKey)
+      require(b.size == i, s"Building HashSet of size $i failed. Key type $keyType.")
+      val as = e ++ (0 until j).map(mkKey)
+      require(as.size == j, s"Building HashSet of size $j failed. Key type $keyType.")
+      val hashCount0 = hashCount
+      val u = a diff b
+      require(hashCount == hashCount0, s"key.hashCode should not be called, but has been called ${hashCount - hashCount0} times. Key type $keyType.")
+      require(u == (a diff scala.collection.mutable.HashSet(b.toSeq: _*)), s"Operation must still work for other sets!")
+      require(u.size == j, s"Expected size $j. Real size ${u.size}. Key type $keyType.")
+      for (x <- 0 until j)
+        require(u.contains(mkKey(x)), s"Key type $keyType. Set (0 until ${i + j}) should contain $x but does not.")
+      require((as intersect b).isEmpty)
+      val b_as = b diff as
+      val as_b = as diff b
+      require((b_as eq b) || (b_as eq as), s"No structural sharing in b diff as. Key type $keyType, b=($j until ${i + j}) as=(0 until $j)")
+      require((as_b eq b) || (as_b eq as), s"No structural sharing in as diff b. Key type $keyType, b=($j until ${i + j}) as=(0 until $j)")
+    }
+  }
+
+  val sizes = Seq(1, 10, 100, 1000, 10000, 100000)
+  val offsets = Seq(0.0, 0.25, 0.5, 0.75, 1.0)
+  testDiff(sizes, offsets, "int", identity[Int])
+  testDiff(sizes, offsets, "hashCounter", HashCounter.apply)
+  testDiff(sizes, offsets, "collision", Collision.apply)
+}
diff --git a/test/files/run/t6259.scala b/test/files/run/t6259.scala
index 294c95e..b2c27df 100644
--- a/test/files/run/t6259.scala
+++ b/test/files/run/t6259.scala
@@ -37,9 +37,9 @@ object Early extends {
 class DoubleTrouble[X](x: AnyRef)(implicit override val tt: TypeTag[X]) extends A[X]
 
 object DoubleOk extends DoubleTrouble[String]({
-  // Drops to this.getClass and is an issue 
-  object InnerTrouble extends A[String]; 
-  InnerTrouble 
+  // Drops to this.getClass and is an issue
+  object InnerTrouble extends A[String];
+  InnerTrouble
 })
 
 object Test extends App {
diff --git a/test/files/run/t6260-delambdafy.check b/test/files/run/t6260-delambdafy.check
new file mode 100644
index 0000000..b2a7bed
--- /dev/null
+++ b/test/files/run/t6260-delambdafy.check
@@ -0,0 +1,4 @@
+f(C at 2e)
+
+Test$lambda$1$$apply
+apply
diff --git a/test/files/run/t6260-delambdafy.flags b/test/files/run/t6260-delambdafy.flags
new file mode 100644
index 0000000..48b438d
--- /dev/null
+++ b/test/files/run/t6260-delambdafy.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/run/t6260-delambdafy.scala b/test/files/run/t6260-delambdafy.scala
new file mode 100644
index 0000000..056b1ed
--- /dev/null
+++ b/test/files/run/t6260-delambdafy.scala
@@ -0,0 +1,12 @@
+class C[A](private val a: Any) extends AnyVal
+
+object Test {
+  val f = (x: C[Any]) => {println(s"f($x)"); x}
+  def main(args: Array[String]) {
+     f(new C("."))
+     val methods = f.getClass.getDeclaredMethods.map(_.getName).sorted
+     println("")
+     println(methods.mkString("\n"))
+  }
+}
+
diff --git a/test/files/run/t6260b.scala b/test/files/run/t6260b.scala
new file mode 100644
index 0000000..dd2cf4b
--- /dev/null
+++ b/test/files/run/t6260b.scala
@@ -0,0 +1,13 @@
+class C[A](val a: A) extends AnyVal
+
+class DD {
+  def foo(c: C[String]) = ()
+  def bar[A <: String](c: C[A]) = ()
+  def baz[A](c: C[A]) = ()
+}
+
+object Test extends App {
+  classOf[DD].getMethod("foo", classOf[String])
+  classOf[DD].getMethod("bar", classOf[String])
+  classOf[DD].getMethod("baz", classOf[Object])
+}
diff --git a/test/files/run/t6260c.check b/test/files/run/t6260c.check
new file mode 100644
index 0000000..1a57f2d
--- /dev/null
+++ b/test/files/run/t6260c.check
@@ -0,0 +1,5 @@
+f(C at 2e)
+
+Test$$anonfun$$apply
+apply
+g(C at 2e)
diff --git a/test/files/run/t6260c.scala b/test/files/run/t6260c.scala
new file mode 100644
index 0000000..845dc15
--- /dev/null
+++ b/test/files/run/t6260c.scala
@@ -0,0 +1,17 @@
+class C[A](private val a: Any) extends AnyVal
+
+object Test {
+  val f = (x: C[Any]) => {println(s"f($x)"); x}
+  trait T[A] {
+    def apply(a: A): A
+  }
+  val g = new T[C[Any]] { def apply(a: C[Any]) = { println(s"g($a)"); a } }
+  def main(args: Array[String]) {
+     f(new C("."))
+     val methods = f.getClass.getDeclaredMethods.map(_.getName).sorted
+     println("")
+     println(methods.mkString("\n"))
+     g.apply(new C("."))
+  }
+}
+
diff --git a/test/files/run/t6261.scala b/test/files/run/t6261.scala
index b446325..bf6d640 100644
--- a/test/files/run/t6261.scala
+++ b/test/files/run/t6261.scala
@@ -2,12 +2,6 @@ import scala.collection.immutable._
 
 object Test extends App {
 
-  def test0() {
-    val m=ListMap(1->2,3->4)
-    if(m.tail ne m.tail)
-      println("ListMap.tail uses a builder, so it is not O(1)")
-  }
-
   def test1() {
     // test that a HashTrieMap with one leaf element is not created!
     val x = HashMap.empty + (1->1) + (2->2)
@@ -92,7 +86,6 @@ object Test extends App {
     // StructureTests.printStructure(z)
     require(z.size == 2 && z.contains(a._1) && z.contains(c._1))
   }
-  test0()
   test1()
   test2()
   test3()
diff --git a/test/files/run/t6273.check b/test/files/run/t6273.check
index c1c18da..bef0b22 100644
--- a/test/files/run/t6273.check
+++ b/test/files/run/t6273.check
@@ -1,19 +1,15 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> val y = 55
 y: Int = 55
 
 scala> val x = s"""
   y = $y
 """
-x: String = 
+x: String =
 "
   y = 55
 "
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index e940975..a032a10 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -1,8 +1,8 @@
 [[syntax trees at end of                    patmat]] // newSource1.scala
 [7]package [7]<empty> {
   [7]object Case3 extends [13][106]scala.AnyRef {
-    [13]def <init>(): [13]Case3.type = [13]{
-      [13][13][13]Case3.super.<init>();
+    [106]def <init>(): [13]Case3.type = [106]{
+      [106][106][106]Case3.super.<init>();
       [13]()
     };
     [21]def unapply([29]z: [32]<type: [32]scala.Any>): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1);
@@ -24,8 +24,8 @@
     }
   };
   [113]object Case4 extends [119][217]scala.AnyRef {
-    [119]def <init>(): [119]Case4.type = [119]{
-      [119][119][119]Case4.super.<init>();
+    [217]def <init>(): [119]Case4.type = [217]{
+      [217][217][217]Case4.super.<init>();
       [119]()
     };
     [127]def unapplySeq([138]z: [141]<type: [141]scala.Any>): [127]Option[List[Int]] = [167]scala.None;
@@ -50,8 +50,8 @@
     }
   };
   [224]object Case5 extends [230][312]scala.AnyRef {
-    [230]def <init>(): [230]Case5.type = [230]{
-      [230][230][230]Case5.super.<init>();
+    [312]def <init>(): [230]Case5.type = [312]{
+      [312][312][312]Case5.super.<init>();
       [230]()
     };
     [238]def unapply([246]z: [249]<type: [249]scala.Any>): [238]Boolean = [265]true;
@@ -60,7 +60,7 @@
       [273]case5()[293]{
         [293]<synthetic> val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1);
         [293]if ([293]o7.isEmpty.unary_!)
-          [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([195]0)))
+          [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)))
             [304][304]matchEnd4([304]())
           else
             [293][293]case6()
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
index 83ba810..ece88b1 100644
--- a/test/files/run/t6288b-jump-position.check
+++ b/test/files/run/t6288b-jump-position.check
@@ -65,9 +65,9 @@ object Case3 extends Object {
   blocks: [1]
   
   1: 
-    1	THIS(Case3)
-    1	CALL_METHOD java.lang.Object.<init> (super())
-    1	RETURN(UNIT)
+    12	THIS(Case3)
+    12	CALL_METHOD java.lang.Object.<init> (super())
+    12	RETURN(UNIT)
     
   }
   Exception handlers: 
diff --git a/test/files/run/t6288b-jump-position.scala b/test/files/run/t6288b-jump-position.scala
index e22a1ab..c5f3bbe 100644
--- a/test/files/run/t6288b-jump-position.scala
+++ b/test/files/run/t6288b-jump-position.scala
@@ -1,6 +1,6 @@
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
-object Test extends IcodeTest {
+object Test extends IcodeComparison {
   override def code =
     """object Case3 {                                // 01
      |  def unapply(z: Any): Option[Int] = Some(-1)  // 02
@@ -15,8 +15,5 @@ object Test extends IcodeTest {
      |  }
      |}""".stripMargin
 
-  override def show() {
-    val lines1 = collectIcode("")
-    println(lines1 mkString "\n")
-  }
+  override def show() = showIcode()
 }
diff --git a/test/files/run/t6292.check b/test/files/run/t6292.check
new file mode 100644
index 0000000..6232ba7
--- /dev/null
+++ b/test/files/run/t6292.check
@@ -0,0 +1 @@
+warning: there were 7 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t6308.check b/test/files/run/t6308.check
new file mode 100644
index 0000000..e2577db
--- /dev/null
+++ b/test/files/run/t6308.check
@@ -0,0 +1,16 @@
+- Unspecialized type args
+// Specialized
+f1 f1$mIc$sp
+f2 f2$mIc$sp
+f3 f3$mIc$sp
+f4 f4$mIc$sp
+f5 f5$mIc$sp
+
+// Unspecialized type args
+f4(Boolean) f4
+f4(String)  f4
+
+// Ideally these would be specialized
+todo1 todo1
+todo2 todo2
+todo3 todo3
diff --git a/test/files/run/t6308.scala b/test/files/run/t6308.scala
new file mode 100644
index 0000000..d23cd6e
--- /dev/null
+++ b/test/files/run/t6308.scala
@@ -0,0 +1,45 @@
+import scala.{specialized => sp}
+
+// NOTE: `{ val c = caller; print(""); c }` is used instead of a simple `caller`,
+//       because we want to prevent tail-call optimization from eliding the stack-
+//       frames we want to inspect.
+
+object Test {
+  def caller = new Exception().getStackTrace()(1).getMethodName
+  def f1[@sp(Int) A](a: A, b: Any)             = { val c = caller; print(""); c }
+  def f2[@sp(Int) A, B](a: A, b: String)       = { val c = caller; print(""); c }
+  def f3[B, @sp(Int) A](a: A, b: List[B])      = { val c = caller; print(""); c }
+  def f4[B, @sp(Int) A](a: A, b: List[(A, B)]) = { val c = caller; print(""); c }
+
+  def f5[@sp(Int) A, B <: Object](a: A, b: B)  = { val c = caller; print(""); c }
+
+  // `uncurryTreeType` calls a TypeMap on the call to this method and we end up with new
+  // type parameter symbols, which are not found in `TypeEnv.includes(typeEnv(member), env)`
+  // in `specSym`. (One of `uncurry`'s tasks is to expand type aliases in signatures.)
+  type T = Object
+  def todo1[@sp(Int) A, B <: T](a: A, b: String)           = { val c = caller; print(""); c }
+  def todo2[@sp(Int) A, B <: AnyRef](a: A, b: String)      = { val c = caller; print(""); c }
+  def todo3[B <: List[A], @specialized(Int) A](a: A, b: B) = { val c = caller; print(""); c }
+
+  def main(args: Array[String]) {
+    val s = ""
+    val result =
+      s"""|- Unspecialized type args
+          |// Specialized
+          |f1 ${f1(1,"some ref")}
+          |f2 ${f2(1,"some ref")}
+          |f3 ${f3(1,List("some ref"))}
+          |f4 ${f4(1,Nil)}
+          |f5 ${f5(1,s)}
+          |
+          |// Unspecialized type args
+          |f4(Boolean) ${f4(Boolean,Nil)}
+          |f4(String)  ${f4("",Nil)}
+          |
+          |// Ideally these would be specialized
+          |todo1 ${todo1(1,s)}
+          |todo2 ${todo2(1,s)}
+          |todo3 ${todo3(1,List(0))}""".stripMargin
+    println(result)
+  }
+}
diff --git a/test/files/continuations-run/function1.check b/test/files/run/t6309.check
similarity index 100%
rename from test/files/continuations-run/function1.check
rename to test/files/run/t6309.check
diff --git a/test/files/run/t6309.scala b/test/files/run/t6309.scala
new file mode 100644
index 0000000..7bbca63
--- /dev/null
+++ b/test/files/run/t6309.scala
@@ -0,0 +1,16 @@
+trait A {
+  def a: Int
+}
+
+object Test {
+  def f(a: Int) = new {
+    //private val b = a
+    private[this] val b = a // crashes, sorry scalac
+  } with A {
+    def a = b
+  }
+
+  def main(args: Array[String]) {
+    println(f(7).a)
+  }
+}
diff --git a/test/files/run/t6320.check b/test/files/run/t6320.check
index e56bacd..013acc1 100644
--- a/test/files/run/t6320.check
+++ b/test/files/run/t6320.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.language.dynamics
 import scala.language.dynamics
 
@@ -13,5 +11,3 @@ scala> new Dyn(Map("foo" -> 10)).foo[Int]
 res0: Int = 10
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t6323b.scala b/test/files/run/t6323b.scala
index f530ac3..50d0900 100644
--- a/test/files/run/t6323b.scala
+++ b/test/files/run/t6323b.scala
@@ -10,7 +10,7 @@ object Test extends App {
       val lookAtMe = m.reflect(Test("a",List(5)))
       val value = u.weakTypeOf[Test]
       val members = value.members
-      val member = value.members.filter(_.name.encoded == "a")
+      val member = value.members.filter(_.name.encodedName == TermName("a"))
       val aAccessor = lookAtMe.reflectMethod(member.head.asMethod)
       val thisShouldBeA = aAccessor.apply()
       println(thisShouldBeA)
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
index 693263a..5049426 100644
--- a/test/files/run/t6329_repl.check
+++ b/test/files/run/t6329_repl.check
@@ -1,13 +1,35 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
+scala> import scala.reflect.classTag
+import scala.reflect.classTag
 
-scala> classManifest[List[_]]
+scala> classManifest[scala.List[_]]
 warning: there were 1 deprecation warning(s); re-run with -deprecation for details
-res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
 
-scala> scala.reflect.classTag[List[_]]
+scala> classTag[scala.List[_]]
 res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
 
+scala> classManifest[scala.collection.immutable.List[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
+
+scala> classTag[scala.collection.immutable.List[_]]
+res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala> classManifest[Predef.Set[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
+
+scala> classTag[Predef.Set[_]]
+res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
+
+scala> classManifest[scala.collection.immutable.Set[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
+
+scala> classTag[scala.collection.immutable.Set[_]]
+res7: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
+
 scala> 
diff --git a/test/files/run/t6329_repl.scala b/test/files/run/t6329_repl.scala
index add6d64..f210d65 100644
--- a/test/files/run/t6329_repl.scala
+++ b/test/files/run/t6329_repl.scala
@@ -2,7 +2,14 @@ import scala.tools.partest.ReplTest
 
 object Test extends ReplTest {
   def code = """
-    |classManifest[List[_]]
-    |scala.reflect.classTag[List[_]]
-    |""".stripMargin
+    |import scala.reflect.classTag
+    |classManifest[scala.List[_]]
+    |classTag[scala.List[_]]
+    |classManifest[scala.collection.immutable.List[_]]
+    |classTag[scala.collection.immutable.List[_]]
+    |classManifest[Predef.Set[_]]
+    |classTag[Predef.Set[_]]
+    |classManifest[scala.collection.immutable.Set[_]]
+    |classTag[scala.collection.immutable.Set[_]]
+  """.stripMargin
 }
diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check
index 8663184..44c41cf 100644
--- a/test/files/run/t6329_repl_bug.check
+++ b/test/files/run/t6329_repl_bug.check
@@ -1,11 +1,15 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime._
+import scala.reflect.runtime._
 
 scala> classManifest[List[_]]
-warning: there were 1 deprecation warnings; re-run with -deprecation for details
-res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
 
 scala> scala.reflect.classTag[List[_]]
 res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
diff --git a/test/files/run/t6329_repl_bug.pending b/test/files/run/t6329_repl_bug.scala
similarity index 100%
rename from test/files/run/t6329_repl_bug.pending
rename to test/files/run/t6329_repl_bug.scala
diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check
index 8282afa..ad8f4b5 100644
--- a/test/files/run/t6329_vanilla.check
+++ b/test/files/run/t6329_vanilla.check
@@ -1,2 +1,8 @@
-scala.collection.immutable.List[Any]
+scala.collection.immutable.List[<?>]
 scala.collection.immutable.List
+scala.collection.immutable.List[<?>]
+scala.collection.immutable.List
+scala.collection.immutable.Set[<?>]
+scala.collection.immutable.Set
+scala.collection.immutable.Set[<?>]
+scala.collection.immutable.Set
diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala
index a31cd5c..ec84f1f 100644
--- a/test/files/run/t6329_vanilla.scala
+++ b/test/files/run/t6329_vanilla.scala
@@ -1,4 +1,13 @@
+import scala.reflect.classTag
+
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
-  println(classManifest[List[_]])
-  println(scala.reflect.classTag[List[_]])
-}
\ No newline at end of file
+  println(classManifest[scala.List[_]])
+  println(classTag[scala.List[_]])
+  println(classManifest[scala.collection.immutable.List[_]])
+  println(classTag[scala.collection.immutable.List[_]])
+  println(classManifest[Predef.Set[_]])
+  println(classTag[Predef.Set[_]])
+  println(classManifest[scala.collection.immutable.Set[_]])
+  println(classTag[scala.collection.immutable.Set[_]])
+}
diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check
index 8282afa..640d168 100644
--- a/test/files/run/t6329_vanilla_bug.check
+++ b/test/files/run/t6329_vanilla_bug.check
@@ -1,2 +1,3 @@
-scala.collection.immutable.List[Any]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+scala.collection.immutable.List[<?>]
 scala.collection.immutable.List
diff --git a/test/files/run/t6329_vanilla_bug.pending b/test/files/run/t6329_vanilla_bug.scala
similarity index 100%
rename from test/files/run/t6329_vanilla_bug.pending
rename to test/files/run/t6329_vanilla_bug.scala
diff --git a/test/files/run/t6331.scala b/test/files/run/t6331.scala
index 4e43a76..d9d46f1 100644
--- a/test/files/run/t6331.scala
+++ b/test/files/run/t6331.scala
@@ -1,9 +1,4 @@
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.{Global, Settings, CompilerCommand}
-import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.partest.DirectTest
 
 // Test of Constant#equals, which must must account for floating point intricacies.
 object Test extends DirectTest {
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
index f966abe..3a560ea 100644
--- a/test/files/run/t6331b.scala
+++ b/test/files/run/t6331b.scala
@@ -1,12 +1,5 @@
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.{Global, Settings, CompilerCommand}
-import scala.tools.nsc.reporters.ConsoleReporter
-
-import scala.tools.partest.trace
-import scala.util.control.Exception._
+import scala.tools.partest.Util.trace
+import scala.util.control.Exception.allCatch
 
 
 object Test extends App {
diff --git a/test/files/run/t6355.check b/test/files/run/t6355.check
new file mode 100644
index 0000000..ce74ab3
--- /dev/null
+++ b/test/files/run/t6355.check
@@ -0,0 +1,2 @@
+bippy(x: Int) called with x = 42
+bippy(x: String) called with x = "42"
diff --git a/test/files/run/t6355.scala b/test/files/run/t6355.scala
new file mode 100644
index 0000000..f192139
--- /dev/null
+++ b/test/files/run/t6355.scala
@@ -0,0 +1,17 @@
+import scala.language.dynamics
+
+class A extends Dynamic {
+  def applyDynamic(method: String): B = new B(method)
+}
+class B(method: String) {
+  def apply(x: Int) = s"$method(x: Int) called with x = $x"
+  def apply(x: String) = s"""$method(x: String) called with x = "$x""""
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    val x = new A
+    println(x.bippy(42))
+    println(x.bippy("42"))
+  }
+}
diff --git a/test/files/run/t6379.check b/test/files/run/t6379.check
new file mode 100644
index 0000000..3e5dfec
--- /dev/null
+++ b/test/files/run/t6379.check
@@ -0,0 +1,14 @@
+compile-time
+uninitialized close: List(class IOException)
+initialized close: List(class IOException)
+uninitialized productElement: List(class IndexOutOfBoundsException)
+initialized productElement: List(class IndexOutOfBoundsException)
+uninitialized read: List(class IOException)
+initialized read: List(class IOException)
+runtime
+uninitialized close: List(class IOException)
+initialized close: List(class IOException)
+uninitialized productElement: List(class IndexOutOfBoundsException)
+initialized productElement: List(class IndexOutOfBoundsException)
+uninitialized read: List(class IOException)
+initialized read: List(class IOException)
diff --git a/test/files/run/t6379/Macros_1.scala b/test/files/run/t6379/Macros_1.scala
new file mode 100644
index 0000000..4f3daf4
--- /dev/null
+++ b/test/files/run/t6379/Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+import java.io._
+
+object Macros {
+  def impl(c: Context) = {
+    var messages = List[String]()
+    def println(msg: String) = messages :+= msg
+
+    import c.universe._
+    def test(sym: MethodSymbol): Unit = {
+      println(s"uninitialized ${sym.name}: ${sym.exceptions}")
+      sym.info
+      println(s"initialized ${sym.name}: ${sym.exceptions}")
+    }
+
+    println("compile-time")
+    test(typeOf[Closeable].declaration(TermName("close")).asMethod)
+    test(typeOf[Product1[_]].declaration(TermName("productElement")).asMethod)
+    test(c.mirror.staticClass("Reader").info.decl(TermName("read")).asMethod)
+
+    q"..${messages.map(msg => q"println($msg)")}"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t6379/Test_2.scala b/test/files/run/t6379/Test_2.scala
new file mode 100644
index 0000000..8e9c994
--- /dev/null
+++ b/test/files/run/t6379/Test_2.scala
@@ -0,0 +1,22 @@
+import java.io._
+import scala.reflect.runtime.universe._
+
+class Reader(fname: String) {
+  private val in = new BufferedReader(new FileReader(fname))
+  @throws[IOException]("if the file doesn't exist")
+  def read() = in.read()
+}
+
+object Test extends App {
+  def test(sym: MethodSymbol): Unit = {
+    println(s"uninitialized ${sym.name}: ${sym.exceptions}")
+    sym.info
+    println(s"initialized ${sym.name}: ${sym.exceptions}")
+  }
+
+  Macros.foo
+  println("runtime")
+  test(typeOf[Closeable].decl(TermName("close")).asMethod)
+  test(typeOf[Product1[_]].decl(TermName("productElement")).asMethod)
+  test(typeOf[Reader].decl(TermName("read")).asMethod)
+}
diff --git a/test/files/run/t6381.check b/test/files/run/t6381.check
new file mode 100644
index 0000000..4ed11d5
--- /dev/null
+++ b/test/files/run/t6381.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.language.experimental.macros
+import scala.language.experimental.macros
+
+scala> def pos_impl(c: scala.reflect.macros.blackbox.Context): c.Expr[String] = {
+  import c.universe._
+  c.Expr[String](Literal(Constant(c.enclosingPosition.getClass.toString)))
+}
+pos_impl: (c: scala.reflect.macros.blackbox.Context)c.Expr[String]
+
+scala> def pos: String = macro pos_impl
+defined term macro pos: String
+
+scala> pos
+res0: String = class scala.reflect.internal.util.RangePosition
+
+scala> 
diff --git a/test/files/run/t6381.scala b/test/files/run/t6381.scala
new file mode 100644
index 0000000..5a687c1
--- /dev/null
+++ b/test/files/run/t6381.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = """
+    |import scala.language.experimental.macros
+    |def pos_impl(c: scala.reflect.macros.blackbox.Context): c.Expr[String] = {
+    |  import c.universe._
+    |  c.Expr[String](Literal(Constant(c.enclosingPosition.getClass.toString)))
+    |}
+    |def pos: String = macro pos_impl
+    |pos
+    |""".stripMargin.trim
+
+  override def extraSettings: String = "-Yrangepos"
+}
diff --git a/test/files/run/t6385.scala b/test/files/run/t6385.scala
new file mode 100644
index 0000000..24fc3cd
--- /dev/null
+++ b/test/files/run/t6385.scala
@@ -0,0 +1,13 @@
+object Test {
+   def main(args: Array[String]) {
+      val y: AA[Int] = C(2)
+      val c: Int = y.x.y
+      assert(c == 2)
+   }
+}
+trait AA[T] extends Any {
+   def x: C[T]
+}
+case class C[T](val y: T) extends AnyVal with AA[T] {
+   def x = this
+}
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
index b7872f0..9bb9b56 100644
--- a/test/files/run/t6392b.check
+++ b/test/files/run/t6392b.check
@@ -1 +1 @@
-ModuleDef(Modifiers(), newTermName("C")#MOD, Template(List(Select(Ident(scala#PK), newTypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR#PCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(newTypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
+ModuleDef(Modifiers(), TermName("C")#MOD, Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), noSelfType, List(DefDef(Modifiers(), termNames.CONSTRUCTOR#PCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), typeNames.EMPTY), termNames.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6392b.scala b/test/files/run/t6392b.scala
index f69a5aa..e43c1a2 100644
--- a/test/files/run/t6392b.scala
+++ b/test/files/run/t6392b.scala
@@ -5,5 +5,5 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val tb = cm.mkToolBox()
   val c = tb.parse("object C")
-  println(showRaw(tb.typeCheck(c), printKinds = true))
+  println(showRaw(tb.typecheck(c), printKinds = true))
 }
\ No newline at end of file
diff --git a/test/files/run/t6394a/Macros_1.scala b/test/files/run/t6394a/Macros_1.scala
index 3d39d3e..376d85b 100644
--- a/test/files/run/t6394a/Macros_1.scala
+++ b/test/files/run/t6394a/Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c:Context): c.Expr[Any] = {
     import c.universe._
 
-    val selfTree = This(c.enclosingClass.symbol.asModule.moduleClass)
+    val selfTree = This(c.enclosingImpl.symbol.asModule.moduleClass)
     c.Expr[AnyRef](selfTree)
   }
 
diff --git a/test/files/run/t6394b/Macros_1.scala b/test/files/run/t6394b/Macros_1.scala
index 5d93e1c..1a74781 100644
--- a/test/files/run/t6394b/Macros_1.scala
+++ b/test/files/run/t6394b/Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c:Context): c.Expr[Any] = {
     import c.universe._
 
-    val selfTree = This(tpnme.EMPTY)
+    val selfTree = This(typeNames.EMPTY)
     c.Expr[AnyRef](selfTree)
   }
 
diff --git a/test/files/run/t6406-regextract.check b/test/files/run/t6406-regextract.check
new file mode 100644
index 0000000..88c5a52
--- /dev/null
+++ b/test/files/run/t6406-regextract.check
@@ -0,0 +1,4 @@
+List(1, 3)
+List(1, 3)
+List(1, 3)
+Some(2011) Some(2011)
diff --git a/test/files/run/t6406-regextract.scala b/test/files/run/t6406-regextract.scala
new file mode 100644
index 0000000..83679a5
--- /dev/null
+++ b/test/files/run/t6406-regextract.scala
@@ -0,0 +1,30 @@
+
+object Test extends App {
+  import util.matching._
+  import Regex._
+
+  val r = "(\\d+)".r
+  val q = """(\d)""".r
+  val ns = List("1,2","x","3,4")
+  val u = r.unanchored
+
+  val is = ns collect { case u(x) => x } map { case r(x) => x }
+  println(is)
+  // Match from same pattern
+  val js = (ns map { u findFirstMatchIn _ }).flatten map { case r(x) => x }
+  println(js)
+  // Match not from same pattern
+  val ks = (ns map { q findFirstMatchIn _ }).flatten map { case r(x) => x }
+  println(ks)
+
+  val t = "Last modified 2011-07-15"
+  val p1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
+  val y1: Option[String] = for {
+    p1(year, month, day) <- p1 findFirstIn t
+  } yield year
+  val y2: Option[String] = for {
+    p1(year, month, day) <- p1 findFirstMatchIn t
+  } yield year
+  println(s"$y1 $y2")
+
+}
diff --git a/test/files/run/t6411a.check b/test/files/run/t6411a.check
new file mode 100644
index 0000000..9226146
--- /dev/null
+++ b/test/files/run/t6411a.check
@@ -0,0 +1,96 @@
+meth = method yg_1
+as seen by Scala reflection: def yg_1[T](y: Y[T]): T
+as seen by Java reflection: public java.lang.Object a$.yg_1(java.lang.Object)
+result = 1
+meth = method yg_1
+as seen by Scala reflection: def yg_1[T](y: Y[T]): T
+as seen by Java reflection: public java.lang.Object a$.yg_1(java.lang.Object)
+result = 1
+meth = method yi_2
+as seen by Scala reflection: def yi_2(y: Y[Int]): Int
+as seen by Java reflection: public int a$.yi_2(java.lang.Integer)
+result = 2
+meth = method yi_2
+as seen by Scala reflection: def yi_2(y: Y[Int]): Int
+as seen by Java reflection: public int a$.yi_2(java.lang.Integer)
+result = class java.lang.IllegalArgumentException: argument type mismatch
+meth = method ys_3
+as seen by Scala reflection: def ys_3(y: Y[String]): String
+as seen by Java reflection: public java.lang.String a$.ys_3(java.lang.String)
+result = class java.lang.IllegalArgumentException: argument type mismatch
+meth = method ys_3
+as seen by Scala reflection: def ys_3(y: Y[String]): String
+as seen by Java reflection: public java.lang.String a$.ys_3(java.lang.String)
+result = 3
+meth = method ya_4
+as seen by Scala reflection: def ya_4(ys: Array[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.ya_4(Y[])
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method ya_4
+as seen by Scala reflection: def ya_4(ys: Array[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.ya_4(Y[])
+result = List(4)
+meth = method yl_5
+as seen by Scala reflection: def yl_5(ys: List[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.yl_5(scala.collection.immutable.List)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method yl_5
+as seen by Scala reflection: def yl_5(ys: List[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.yl_5(scala.collection.immutable.List)
+result = List(5)
+meth = method yni_7
+as seen by Scala reflection: def yni_7(y: => Y[Int]): Int
+as seen by Java reflection: public int a$.yni_7(scala.Function0)
+result = 7
+meth = method yns_8
+as seen by Scala reflection: def yns_8(y: => Y[String]): String
+as seen by Java reflection: public java.lang.String a$.yns_8(scala.Function0)
+result = 8
+meth = method zg_1
+as seen by Scala reflection: def zg_1[T](z: Z[T]): T
+as seen by Java reflection: public java.lang.Object a$.zg_1(Z)
+result = 1
+meth = method zg_1
+as seen by Scala reflection: def zg_1[T](z: Z[T]): T
+as seen by Java reflection: public java.lang.Object a$.zg_1(Z)
+result = 1
+meth = method zi_2
+as seen by Scala reflection: def zi_2(z: Z[Int]): Int
+as seen by Java reflection: public int a$.zi_2(Z)
+result = 2
+meth = method zi_2
+as seen by Scala reflection: def zi_2(z: Z[Int]): Int
+as seen by Java reflection: public int a$.zi_2(Z)
+result = class java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Integer
+meth = method zs_3
+as seen by Scala reflection: def zs_3(z: Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zs_3(Z)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method zs_3
+as seen by Scala reflection: def zs_3(z: Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zs_3(Z)
+result = 3
+meth = method za_4
+as seen by Scala reflection: def za_4(zs: Array[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.za_4(Z[])
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method za_4
+as seen by Scala reflection: def za_4(zs: Array[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.za_4(Z[])
+result = List(4)
+meth = method zl_5
+as seen by Scala reflection: def zl_5(zs: List[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.zl_5(scala.collection.immutable.List)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method zl_5
+as seen by Scala reflection: def zl_5(zs: List[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.zl_5(scala.collection.immutable.List)
+result = List(5)
+meth = method zni_7
+as seen by Scala reflection: def zni_7(z: => Z[Int]): Int
+as seen by Java reflection: public int a$.zni_7(scala.Function0)
+result = 7
+meth = method zns_8
+as seen by Scala reflection: def zns_8(z: => Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zns_8(scala.Function0)
+result = 8
diff --git a/test/files/run/t6411a.scala b/test/files/run/t6411a.scala
new file mode 100644
index 0000000..46c88d9
--- /dev/null
+++ b/test/files/run/t6411a.scala
@@ -0,0 +1,81 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.language.reflectiveCalls
+
+class Y[T](val i: T) extends AnyVal {
+  override def toString = s"Y($i)"
+}
+class Z[T](val i: T) extends AnyRef {
+  override def toString = s"Z($i)"
+}
+
+object a {
+  def yg_1[T](y: Y[T])           = y.i
+  def yi_2(y: Y[Int])            = y.i
+  def ys_3(y: Y[String])         = y.i
+  def ya_4(ys: Array[Y[String]]) = ys.toList.map(_.i)
+  def yl_5(ys: List[Y[String]])  = ys.map(_.i)
+  def yv_6(ys: Y[String]*)       = ys.toList.map(_.i)
+  def yni_7(y: => Y[Int])        = y.i
+  def yns_8(y: => Y[String])     = y.i
+
+  def zg_1[T](z: Z[T])           = z.i
+  def zi_2(z: Z[Int])            = z.i
+  def zs_3(z: Z[String])         = z.i
+  def za_4(zs: Array[Z[String]]) = zs.toList.map(_.i)
+  def zl_5(zs: List[Z[String]])  = zs.map(_.i)
+  def zv_6(zs: Z[String]*)       = zs.toList.map(_.i)
+  def zni_7(z: => Z[Int])        = z.i
+  def zns_8(z: => Z[String])     = z.i
+}
+
+object Test extends App {
+  def test(methName: String, arg: Any) = {
+    val moduleA = cm.reflect(a)
+    val msym = moduleA.symbol.info.decl(TermName(methName)).asMethod
+    println(s"meth = $msym")
+    val mmirror = moduleA.reflectMethod(msym)
+    val mresult =
+      try { mmirror(arg) }
+      catch {
+        case ex: Exception =>
+          val ex1 = scala.reflect.runtime.ReflectionUtils.unwrapThrowable(ex)
+          s"${ex1.getClass}: ${ex1.getMessage}"
+      }
+    println(s"as seen by Scala reflection: ${msym.asInstanceOf[scala.reflect.internal.Symbols#Symbol].defString}")
+    println(s"as seen by Java reflection: ${mmirror.asInstanceOf[{val jmeth: java.lang.reflect.Method}].jmeth}")
+    println(s"result = $mresult")
+  }
+
+  test("yg_1", new Y(1))
+  test("yg_1", new Y("1"))
+  test("yi_2", new Y(2))
+  test("yi_2", new Y("2"))
+  test("ys_3", new Y(3))
+  test("ys_3", new Y("3"))
+  test("ya_4", Array(new Y(4)))
+  test("ya_4", Array(new Y("4")))
+  test("yl_5", List(new Y(5)))
+  test("yl_5", List(new Y("5")))
+  // FIXME: disabled because of SI-7056
+  // test("yv_6", new Y(6))
+  // test("yv_6", new Y("6"))
+  test("yni_7", new Y(7))
+  test("yns_8", new Y("8"))
+
+  test("zg_1", new Z(1))
+  test("zg_1", new Z("1"))
+  test("zi_2", new Z(2))
+  test("zi_2", new Z("2"))
+  test("zs_3", new Z(3))
+  test("zs_3", new Z("3"))
+  test("za_4", Array(new Z(4)))
+  test("za_4", Array(new Z("4")))
+  test("zl_5", List(new Z(5)))
+  test("zl_5", List(new Z("5")))
+  // FIXME: disabled because of SI-7056
+  // test("zv_6", new Z(6))
+  // test("zv_6", new Z("6"))
+  test("zni_7", new Z(7))
+  test("zns_8", new Z("8"))
+}
\ No newline at end of file
diff --git a/test/files/run/t6411b.check b/test/files/run/t6411b.check
new file mode 100644
index 0000000..e20bed6
--- /dev/null
+++ b/test/files/run/t6411b.check
@@ -0,0 +1 @@
+Bar(Foo(3))
diff --git a/test/files/run/t6411b.scala b/test/files/run/t6411b.scala
new file mode 100644
index 0000000..b5c3bf8
--- /dev/null
+++ b/test/files/run/t6411b.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+
+case class Foo(n: Int) extends AnyVal
+case class Bar(foo: Foo)
+
+object Test extends App {
+  val mirror = runtimeMirror(getClass.getClassLoader)
+  val cm = mirror.reflectClass(typeOf[Bar].typeSymbol.asClass)
+  val ctor = typeOf[Bar].decl(termNames.CONSTRUCTOR).asMethod
+  val ctorm = cm.reflectConstructor(ctor)
+  println(ctorm(Foo(3)))
+}
\ No newline at end of file
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
index 178ea73..f8d5b3a 100644
--- a/test/files/run/t6439.check
+++ b/test/files/run/t6439.check
@@ -1,13 +1,11 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class A
 defined class A
 
 scala> object A // warn
-defined module A
+defined object A
 warning: previously defined class A is not a companion to object A.
 Companions must be defined together; you may wish to use :paste mode for this.
 
@@ -15,15 +13,15 @@ scala> trait B
 defined trait B
 
 scala> object B // warn
-defined module B
+defined object B
 warning: previously defined trait B is not a companion to object B.
 Companions must be defined together; you may wish to use :paste mode for this.
 
 scala> object C
-defined module C
+defined object C
 
 scala> object Bippy
-defined module Bippy
+defined object Bippy
 
 scala> class C // warn
 defined class C
@@ -40,13 +38,13 @@ scala> val D = 0 // no warn
 D: Int = 0
 
 scala> object E
-defined module E
+defined object E
 
 scala> var E = 0 // no warn
 E: Int = 0
 
 scala> object F
-defined module F
+defined object F
 
 scala> type F = Int // no warn
 defined type alias F
@@ -58,9 +56,18 @@ scala> :power
 ** global._, definitions._ also imported    **
 ** Try  :help, :vals, power.<tab>           **
 
-scala> intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+scala> object lookup {
+  import intp._
+  def apply(name: String): Symbol       = types(name) orElse terms(name)
+  def types(name: String): Symbol       = replScope lookup (TypeName(name)) orElse getClassIfDefined(name)
+  def terms(name: String): Symbol       = replScope lookup (TermName(name)) orElse getModuleIfDefined(name)
+  def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+  def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol
+  def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+}
+defined object lookup
+
+scala> lookup("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
 res0: $r.intp.global.Symbol = type F
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t6439.scala b/test/files/run/t6439.scala
index 70a2dba..53155a7 100644
--- a/test/files/run/t6439.scala
+++ b/test/files/run/t6439.scala
@@ -1,6 +1,7 @@
 import scala.tools.partest.ReplTest
 
 object Test extends ReplTest {
+
   def code = """
 class A
 object A // warn
@@ -17,6 +18,15 @@ var E = 0 // no warn
 object F
 type F = Int // no warn
 :power
-intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+object lookup {
+  import intp._
+  def apply(name: String): Symbol       = types(name) orElse terms(name)
+  def types(name: String): Symbol       = replScope lookup (TypeName(name)) orElse getClassIfDefined(name)
+  def terms(name: String): Symbol       = replScope lookup (TermName(name)) orElse getModuleIfDefined(name)
+  def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+  def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol
+  def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+}
+lookup("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
              """
 }
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
index 806279f..27d5d13 100644
--- a/test/files/run/t6440.check
+++ b/test/files/run/t6440.check
@@ -1,4 +1,4 @@
-pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
-in package <root> which is not available.
-It may be completely missing from the current classpath, or the version on
-the classpath might be incompatible with the version used when compiling U.class. ERROR
+pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference to <root>.pack1 encountered in class file 'U.class'.
+Cannot access term pack1 in package <root>. The current classpath may be
+missing a definition for <root>.pack1, or U.class may have been compiled against a version that's
+incompatible with the one found on the current classpath. ERROR
diff --git a/test/files/run/t6440b.check b/test/files/run/t6440b.check
index 9771ce5..0b642c2 100644
--- a/test/files/run/t6440b.check
+++ b/test/files/run/t6440b.check
@@ -1,4 +1,4 @@
-pos: NoPosition bad symbolic reference. A signature in U.class refers to type T
-in package pack1 which is not available.
-It may be completely missing from the current classpath, or the version on
-the classpath might be incompatible with the version used when compiling U.class. ERROR
+pos: NoPosition bad symbolic reference to pack1.T encountered in class file 'U.class'.
+Cannot access type T in package pack1. The current classpath may be
+missing a definition for pack1.T, or U.class may have been compiled against a version that's
+incompatible with the one found on the current classpath. ERROR
diff --git a/test/files/run/t6443.scala b/test/files/run/t6443.scala
index 67fe2ca..3ab8c34 100644
--- a/test/files/run/t6443.scala
+++ b/test/files/run/t6443.scala
@@ -1,3 +1,5 @@
+import scala.language.existentials
+
 class Base
 class Derived extends Base
 
diff --git a/test/files/run/t6448.check b/test/files/run/t6448.check
new file mode 100644
index 0000000..9401568
--- /dev/null
+++ b/test/files/run/t6448.check
@@ -0,0 +1,32 @@
+
+=List.collect=
+f(1)
+f(2)
+List(1)
+
+=List.collectFirst=
+f(1)
+Some(1)
+
+=Option.collect=
+f(1)
+Some(1)
+
+=Option.collect=
+f(2)
+None
+
+=Stream.collect=
+f(1)
+f(2)
+List(1)
+
+=Stream.collectFirst=
+f(1)
+Some(1)
+
+=ParVector.collect=
+(ParVector(1),2)
+
+=ParArray.collect=
+(ParArray(1),2)
diff --git a/test/files/run/t6448.scala b/test/files/run/t6448.scala
new file mode 100644
index 0000000..d0faaa9
--- /dev/null
+++ b/test/files/run/t6448.scala
@@ -0,0 +1,61 @@
+// Tests to show that various `collect` functions avoid calling
+// both `PartialFunction#isDefinedAt` and `PartialFunction#apply`.
+//
+object Test {
+  def f(i: Int) = { println("f(" + i + ")"); true }
+  class Counter {
+    var count = 0
+    def apply(i: Int) = synchronized {count += 1; true}
+  }
+
+  def testing(label: String)(body: => Any) {
+    println(s"\n=$label=")
+    println(body)
+  }
+
+  def main(args: Array[String]) {
+    testing("List.collect")(List(1, 2) collect { case x if f(x) && x < 2 => x})
+    testing("List.collectFirst")(List(1, 2) collectFirst { case x if f(x) && x < 2 => x})
+    testing("Option.collect")(Some(1) collect { case x if f(x) && x < 2 => x})
+    testing("Option.collect")(Some(2) collect { case x if f(x) && x < 2 => x})
+    testing("Stream.collect")((Stream(1, 2).collect { case x if f(x) && x < 2 => x}).toList)
+    testing("Stream.collectFirst")(Stream.continually(1) collectFirst { case x if f(x) && x < 2 => x})
+
+    import collection.parallel.ParIterable
+    import collection.parallel.immutable.ParVector
+    import collection.parallel.mutable.ParArray
+    testing("ParVector.collect") {
+      val counter = new Counter()
+      (ParVector(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count))
+    }
+
+    testing("ParArray.collect") {
+      val counter = new Counter()
+      (ParArray(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count))
+    }
+
+    object PendingTests {
+      testing("Iterator.collect")((Iterator(1, 2) collect { case x if f(x) && x < 2 => x}).toList)
+
+      testing("List.view.collect")((List(1, 2).view collect { case x if f(x) && x < 2 => x}).force)
+
+      // This would do the trick in Future.collect, but I haven't added this yet as there is a tradeoff
+      // with extra allocations to consider.
+      //
+      // pf.lift(v) match {
+      //   case Some(x) => p success x
+      //   case None    => fail(v)
+      // }
+      testing("Future.collect") {
+        import concurrent.ExecutionContext.Implicits.global
+        import concurrent.Await
+        import concurrent.duration.Duration
+        val result = concurrent.Future(1) collect { case x if f(x) => x}
+        Await.result(result, Duration.Inf)
+      }
+
+      // TODO Future.{onSuccess, onFailure, recoverWith, andThen}
+    }
+
+  }
+}
diff --git a/test/files/run/t6467.scala b/test/files/run/t6467.scala
new file mode 100644
index 0000000..dc93b69
--- /dev/null
+++ b/test/files/run/t6467.scala
@@ -0,0 +1,20 @@
+
+
+
+
+import collection._
+
+
+
+object Test extends App {
+
+  def compare(s1: String, s2: String) {
+    assert(s1 == s2, s1 + "\nvs.\n" + s2)
+  }
+
+  compare(List(1, 2, 3, 4).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234")
+  compare(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234")
+  compare(Seq(0 until 100: _*).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString)
+  compare(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString)
+
+}
\ No newline at end of file
diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check
index 7ec2963..df40722 100644
--- a/test/files/run/t6481.check
+++ b/test/files/run/t6481.check
@@ -1,3 +1,4 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
 delayed init
 new foo(1, 2)
 delayed init
diff --git a/test/files/run/t6488.check b/test/files/run/t6488.check
deleted file mode 100644
index 3582111..0000000
--- a/test/files/run/t6488.check
+++ /dev/null
@@ -1 +0,0 @@
-Success
diff --git a/test/files/run/t6488.scala b/test/files/run/t6488.scala
index 487614e..e234876 100644
--- a/test/files/run/t6488.scala
+++ b/test/files/run/t6488.scala
@@ -1,11 +1,64 @@
-import sys.process._
+import scala.sys.process._
+import scala.util.Try
+import scala.util.Properties.{ javaHome, javaClassPath }
+import java.io.{ File, IOException }
+import java.util.concurrent.CountDownLatch
+import java.util.concurrent.TimeUnit._
+import java.util.concurrent.atomic._
+
 object Test {
+  /*
   // Program that prints "Success" if the command was successfully run then destroyed
   // It will silently pass if the command "/bin/ls" does not exist
-  // It will fail due to the uncatchable exception in t6488 race condition
+  // It will fail due to the uncatchable exception in t6488 race condition,
+  // i.e., if any uncaught exceptions on spawned threads are printed.
   def main(args: Array[String]) {
     try Process("/bin/ls").run(ProcessLogger { _ => () }).destroy
     catch { case _ => () }
     println("Success")
   }
+  */
+
+  // Show that no uncaught exceptions are thrown on spawned I/O threads
+  // when the process is destroyed.  The default handler will print
+  // stack traces in the failing case.
+  def main(args: Array[String]) {
+    if (args.nonEmpty && args(0) == "data")
+      data()
+    else
+      test()          // args(0) == "jvm"
+  }
+
+  // fork the data spewer, wait for input, then destroy the process
+  def test() {
+    val f = new File(javaHome, "bin").listFiles.sorted filter (_.getName startsWith "java") find (_.canExecute) getOrElse {
+      // todo signal test runner that test is skipped
+      new File("/bin/ls")  // innocuous
+    }
+    //Process(f.getAbsolutePath).run(ProcessLogger { _ => () }).destroy
+    val reading = new CountDownLatch(1)
+    val count   = new AtomicInteger
+    def counted = count.get
+    val command = s"${f.getAbsolutePath} -classpath ${javaClassPath} Test data"
+    Try {
+      Process(command) run ProcessLogger { (s: String) =>
+        //Console println s"[[$s]]"     // java help
+        count.getAndIncrement
+        reading.countDown
+        Thread.`yield`()
+      }
+    } foreach { (p: Process) =>
+      val ok = reading.await(10, SECONDS)
+      if (!ok) Console println "Timed out waiting for process output!"
+      p.destroy()
+    }
+    //Console println s"Read count $counted lines"
+  }
+
+  // spew something
+  def data() {
+    def filler = "." * 100
+    for (i <- 1 to 1000)
+      Console println s"Outputting data line $i $filler"
+  }
 }
diff --git a/test/files/run/t6507.check b/test/files/run/t6507.check
new file mode 100644
index 0000000..3536c42
--- /dev/null
+++ b/test/files/run/t6507.check
@@ -0,0 +1,24 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :silent
+Switched off result printing.
+
+scala> class A { override def toString() = { println("!"); "A" } }
+
+scala> val a = new A
+
+scala> var b: A = new A
+
+scala> b = new A
+
+scala> new A
+
+scala> :silent
+Switched on result printing.
+
+scala> res0
+!
+res1: A = A
+
+scala> 
diff --git a/test/files/run/t6507.scala b/test/files/run/t6507.scala
new file mode 100644
index 0000000..25f0a73
--- /dev/null
+++ b/test/files/run/t6507.scala
@@ -0,0 +1,14 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  def code = """
+:silent
+class A { override def toString() = { println("!"); "A" } }
+val a = new A
+var b: A = new A
+b = new A
+new A
+:silent
+res0
+"""
+}
diff --git a/test/files/run/t6548.check b/test/files/run/t6548.check
new file mode 100644
index 0000000..5dfcb12
--- /dev/null
+++ b/test/files/run/t6548.check
@@ -0,0 +1,2 @@
+false
+List(JavaAnnotationWithNestedEnum_1(value = VALUE))
diff --git a/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
new file mode 100644
index 0000000..32004de
--- /dev/null
+++ b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
@@ -0,0 +1,17 @@
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+ at Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.FIELD,
+    ElementType.TYPE, ElementType.PARAMETER})
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface JavaAnnotationWithNestedEnum_1
+{
+    public Value value() default Value.VALUE;
+
+    public enum Value
+    {
+        VALUE;
+    }
+}
\ No newline at end of file
diff --git a/test/files/run/t6548/Test_2.scala b/test/files/run/t6548/Test_2.scala
new file mode 100644
index 0000000..cb5abd9
--- /dev/null
+++ b/test/files/run/t6548/Test_2.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class Bean {
+  @JavaAnnotationWithNestedEnum_1(JavaAnnotationWithNestedEnum_1.Value.VALUE)
+  def value = 1
+}
+
+object Test extends App {
+  println(cm.staticClass("Bean").isCaseClass)
+  println(typeOf[Bean].decl(TermName("value")).annotations)
+}
diff --git a/test/files/run/t6549.check b/test/files/run/t6549.check
index bc78aac..d5dfc5e 100644
--- a/test/files/run/t6549.check
+++ b/test/files/run/t6549.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> case class `X"`(var xxx: Any)
 defined class X$u0022
 
@@ -28,5 +26,3 @@ scala> val `"` = 0
 ": Int = 0
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t6554.check b/test/files/run/t6554.check
new file mode 100644
index 0000000..6e0af7b
--- /dev/null
+++ b/test/files/run/t6554.check
@@ -0,0 +1 @@
+public java.lang.Object Bar.minBy(java.lang.Object) / public java.lang.Object Bar.minBy(java.lang.Object)
diff --git a/test/files/run/t6554.scala b/test/files/run/t6554.scala
new file mode 100644
index 0000000..5d29d16
--- /dev/null
+++ b/test/files/run/t6554.scala
@@ -0,0 +1,11 @@
+trait Foo[A] {
+  def minBy[B](b: B): A = ???
+}
+ 
+class Bar extends Foo[Int]
+
+object Test extends App {
+   val sigs = classOf[Bar].getDeclaredMethods.map(m => s"${m.toString} / ${m.toGenericString}").sorted
+   println(sigs.mkString("\n"))
+}
+// Was public java.lang.Object Bar.minBy(java.lang.Object) / public <B> int Bar.minBy(B)
diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check
index a18a8e8..9ac115a 100644
--- a/test/files/run/t6555.check
+++ b/test/files/run/t6555.check
@@ -7,14 +7,14 @@ package <empty> {
     };
     private[this] val f: Int => Int = {
       @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
-        def <init>(): anonymous class $anonfun = {
+        def <init>(): <$anon: Int => Int> = {
           $anonfun.super.<init>();
           ()
         };
         final def apply(param: Int): Int = $anonfun.this.apply$mcII$sp(param);
         <specialized> def apply$mcII$sp(param: Int): Int = param
       };
-      (new anonymous class $anonfun(): Int => Int)
+      (new <$anon: Int => Int>(): Int => Int)
     };
     <stable> <accessor> def f(): Int => Int = Foo.this.f
   }
diff --git a/test/files/run/t6555.scala b/test/files/run/t6555.scala
index b1a6137..cc0e4d1 100644
--- a/test/files/run/t6555.scala
+++ b/test/files/run/t6555.scala
@@ -3,7 +3,7 @@ import java.io.{Console => _, _}
 
 object Test extends DirectTest {
 
-  override def extraSettings: String = "-usejavacp -Xprint:specialize -d " + testOutput.path
+  override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:inline -d " + testOutput.path
 
   override def code = "class Foo { val f = (param: Int) => param } "
 
diff --git a/test/files/run/t657.scala b/test/files/run/t657.scala
index a972609..e76b029 100644
--- a/test/files/run/t657.scala
+++ b/test/files/run/t657.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
 abstract class BaseList {
   type Node <: NodeImpl;
   implicit def convertNode(ni : NodeImpl) = ni.asInstanceOf[Node];
diff --git a/test/files/run/t6572/bar_1.scala b/test/files/run/t6572/bar_1.scala
deleted file mode 100644
index 5518ced..0000000
--- a/test/files/run/t6572/bar_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package bar
-
-abstract class IntBase[V] extends Base[Int, V]
-
-class DefaultIntBase[V <: IntProvider] extends IntBase[V] {
-  override protected def hashCode(key: Int) = key
-}
-
-trait IntProvider {
-  def int: Int
-}
-
-abstract class Base[@specialized K, V] {
-
-  protected def hashCode(key: K) = key.hashCode
-
-  def get(key: K): V = throw new RuntimeException
-
-}
\ No newline at end of file
diff --git a/test/files/run/t6572/foo_2.scala b/test/files/run/t6572/foo_2.scala
deleted file mode 100644
index 465f0b7..0000000
--- a/test/files/run/t6572/foo_2.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-//package foo
-
-import bar._
-
-class FooProvider extends IntProvider {
-  def int = 3
-}
-
-class Wrapper(users: DefaultIntBase[FooProvider]) {
-  final def user(userId: Int) = users.get(userId)
-}
-
-object Test {
-  def main(args: Array[String]) {
-    new Wrapper(new DefaultIntBase)
-  }
-}
\ No newline at end of file
diff --git a/test/files/run/t6574b.check b/test/files/run/t6574b.check
new file mode 100644
index 0000000..e10fa4f
--- /dev/null
+++ b/test/files/run/t6574b.check
@@ -0,0 +1 @@
+List(5, 4, 3, 2, 1)
diff --git a/test/files/run/t6574b.scala b/test/files/run/t6574b.scala
new file mode 100644
index 0000000..df329a3
--- /dev/null
+++ b/test/files/run/t6574b.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+  implicit class AnyOps(val i: Int) extends AnyVal {
+    private def parentsOf(x: Int): List[Int] = if (x == 0) Nil else x :: parentsOf(x - 1)
+    def parents: List[Int] = parentsOf(i)
+  }
+  println((5).parents)
+}
diff --git a/test/files/run/t6591_1.check b/test/files/run/t6591_1.check
index b6cb6c2..d1d448f 100644
--- a/test/files/run/t6591_1.check
+++ b/test/files/run/t6591_1.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), Select(Ident(A), newTypeName("I")), Select(Ident(A), newTermName("impl")))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), Select(Ident(A), TypeName("I")), Select(Ident(A), TermName("impl")))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_2.check b/test/files/run/t6591_2.check
index b2d5797..a2930b1 100644
--- a/test/files/run/t6591_2.check
+++ b/test/files/run/t6591_2.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), SelectFromTypeTree(Ident(A), newTypeName("I")), Select(Apply(Select(New(Ident(A)), nme.CONSTRUCTOR), List()), newTermName("impl")))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), SelectFromTypeTree(Ident(A), TypeName("I")), Select(Apply(Select(New(Ident(A)), termNames.CONSTRUCTOR), List()), TermName("impl")))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_3.check b/test/files/run/t6591_3.check
index a7b594b..362aafd 100644
--- a/test/files/run/t6591_3.check
+++ b/test/files/run/t6591_3.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), Select(This(newTypeName("A")), newTypeName("I")), Apply(Select(New(Select(This(newTypeName("A")), newTypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), Select(This(TypeName("A")), TypeName("I")), Apply(Select(New(Select(This(TypeName("A")), TypeName("I"))), termNames.CONSTRUCTOR), List()))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_5.check b/test/files/run/t6591_5.check
index e0b6d06..4ebc223 100644
--- a/test/files/run/t6591_5.check
+++ b/test/files/run/t6591_5.check
@@ -1 +1 @@
-Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(This(newTypeName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
+Expr(Block(List(ValDef(Modifiers(), TermName("v"), Select(Select(This(TypeName("A")), TermName("x")), TypeName("I")), Select(Ident(scala.Predef), TermName("$qmark$qmark$qmark")))), Ident(TermName("v"))))
diff --git a/test/files/run/t6591_6.check b/test/files/run/t6591_6.check
index 0c4847b..940e202 100644
--- a/test/files/run/t6591_6.check
+++ b/test/files/run/t6591_6.check
@@ -1 +1 @@
-Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(Ident(newTermName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
+Expr(Block(List(ValDef(Modifiers(), TermName("v"), Select(Select(Ident(TermName("A")), TermName("x")), TypeName("I")), Select(Ident(scala.Predef), TermName("$qmark$qmark$qmark")))), Ident(TermName("v"))))
diff --git a/test/files/run/t6591_7.scala b/test/files/run/t6591_7.scala
index b6c8d39..914842e 100644
--- a/test/files/run/t6591_7.scala
+++ b/test/files/run/t6591_7.scala
@@ -1,5 +1,6 @@
 import scala.reflect.runtime.universe._
 import scala.tools.reflect.Eval
+import internal._
 
 object Test extends App {
   locally {
@@ -13,10 +14,10 @@ object Test extends App {
         // blocked by SI-7103, though it's not the focus of this test
         // therefore I'm just commenting out the evaluation
         // println(expr.eval)
-        expr.tree.freeTerms foreach (ft => {
+        freeTerms(expr.tree) foreach (ft => {
           // blocked by SI-7104, though it's not the focus of this test
-          // therefore I'm just commenting out the call to typeSignature
-          // println(s"name = ${ft.name}, sig = ${ft.typeSignature}, stable = ${ft.isStable}")
+          // therefore I'm just commenting out the call to info
+          // println(s"name = ${ft.name}, sig = ${ft.info}, stable = ${ft.isStable}")
           println(s"name = ${ft.name}, stable = ${ft.isStable}")
         })
       }
diff --git a/test/files/run/t6608.check b/test/files/run/t6608.check
new file mode 100644
index 0000000..15628b3
--- /dev/null
+++ b/test/files/run/t6608.check
@@ -0,0 +1 @@
+(C$$yyy,true)
diff --git a/test/files/run/t6608.scala b/test/files/run/t6608.scala
new file mode 100644
index 0000000..2ba9796
--- /dev/null
+++ b/test/files/run/t6608.scala
@@ -0,0 +1,16 @@
+import reflect.runtime.universe
+
+class C {
+  private val yyy: Any = 1
+  @inline def foo = yyy
+}
+
+object Test extends App {
+  import universe._
+  val access = typeOf[C].decls
+    .toList
+    .filter(_.name.toString.endsWith("yyy"))
+    .map(x => (x.name, x.isPrivate))
+  println(access.head)
+}
+
diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala
index 0947a48..c295368 100644
--- a/test/files/run/t6611.scala
+++ b/test/files/run/t6611.scala
@@ -7,55 +7,55 @@ object Test extends App {
 
   locally {
     val a = Array("1": Object)
-    val a2 = Array[Object](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(true)
-    val a2 = Array[Boolean](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1: Short)
-    val a2 = Array[Short](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1: Byte)
-    val a2 = Array[Byte](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1)
-    val a2 = Array[Int](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1L)
-    val a2 = Array[Long](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1f)
-    val a2 = Array[Float](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(1d)
-    val a2 = Array[Double](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 
   locally {
     val a = Array(())
-    val a2 = Array[Unit](a: _*)
+    val a2 = Array(a: _*)
     assert(a ne a2)
   }
 }
diff --git a/test/files/run/t6632.check b/test/files/run/t6632.check
index 1f084b1..26cf061 100644
--- a/test/files/run/t6632.check
+++ b/test/files/run/t6632.check
@@ -1,3 +1,5 @@
 java.lang.IndexOutOfBoundsException: -1
 java.lang.IndexOutOfBoundsException: -2
 java.lang.IndexOutOfBoundsException: -3
+java.lang.IndexOutOfBoundsException: -1
+java.lang.IndexOutOfBoundsException: 5
diff --git a/test/files/run/t6632.scala b/test/files/run/t6632.scala
index c1c8d4a..f338b73 100644
--- a/test/files/run/t6632.scala
+++ b/test/files/run/t6632.scala
@@ -1,29 +1,22 @@
 object Test extends App {
   import collection.mutable.ListBuffer
-  
+
   def newLB = ListBuffer('a, 'b, 'c, 'd, 'e)
 
-  val lb0 = newLB
+  def iiobe[A](f: => A) =
+    try { f }
+    catch { case ex: IndexOutOfBoundsException => println(ex) }
 
-  try {
-    lb0.insert(-1, 'x)
-  } catch {
-    case ex: IndexOutOfBoundsException => println(ex)
-  }
+  val lb0 = newLB
+  iiobe( lb0.insert(-1, 'x) )
 
   val lb1 = newLB
-
-  try {
-    lb1.insertAll(-2, Array('x, 'y, 'z))
-  } catch {
-    case ex: IndexOutOfBoundsException => println(ex)
-  }
+  iiobe( lb1.insertAll(-2, Array('x, 'y, 'z)) )
 
   val lb2 = newLB
+  iiobe( lb2.update(-3, 'u) )
 
-  try {
-    lb2.update(-3, 'u)
-  } catch {
-    case ex: IndexOutOfBoundsException => println(ex)
-  }
-}
\ No newline at end of file
+  val lb3 = newLB
+  iiobe( lb3.updated(-1, 'u) )
+  iiobe( lb3.updated(5, 'u) )
+}
diff --git a/test/files/run/t6646.check b/test/files/run/t6646.check
index b0b7ad3..15715da 100644
--- a/test/files/run/t6646.check
+++ b/test/files/run/t6646.check
@@ -1,4 +1,4 @@
-Found NotNull
+Found NoNull
 Found lower
 Found 2
 A single ident is always a pattern
diff --git a/test/files/run/t6646.scala b/test/files/run/t6646.scala
index 150b0df..a377ac2 100644
--- a/test/files/run/t6646.scala
+++ b/test/files/run/t6646.scala
@@ -1,14 +1,14 @@
 sealed trait ColumnOption
-case object NotNull extends ColumnOption
+case object NoNull extends ColumnOption
 case object PrimaryKey extends ColumnOption
 case object lower extends ColumnOption
 
 object Test {
   def main(args: Array[String]) {
-    val l = List(PrimaryKey, NotNull, lower)
+    val l = List(PrimaryKey, NoNull, lower)
 
     // withFilter must be generated in these
-    for (option @ NotNull <- l) println("Found " + option)
+    for (option @ NoNull <- l) println("Found " + option)
     for (option @ `lower` <- l) println("Found " + option)
     for ((`lower`, i) <- l.zipWithIndex) println("Found " + i)
 
diff --git a/test/files/run/t6662/Macro_1.scala b/test/files/run/t6662/Macro_1.scala
index f373eaa..4198597 100644
--- a/test/files/run/t6662/Macro_1.scala
+++ b/test/files/run/t6662/Macro_1.scala
@@ -1,5 +1,5 @@
 import language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Demo {
   def id[T](a: T): T = macro idImpl[T]
diff --git a/test/files/run/t6662/Test_2.scala b/test/files/run/t6662/Test_2.scala
index 03a80b6..82ac54c 100644
--- a/test/files/run/t6662/Test_2.scala
+++ b/test/files/run/t6662/Test_2.scala
@@ -2,7 +2,7 @@
 
 object Test {
    def main(args: Array[String]) {
-      val s = Demo id ()
+      val s = Demo id (())
       println(s)
    }
 }
diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala
index b55718b..e18f251 100644
--- a/test/files/run/t6669.scala
+++ b/test/files/run/t6669.scala
@@ -2,7 +2,7 @@ import java.io.{ByteArrayOutputStream, PrintStream}
 
 object Test extends App {
   val baos = new ByteArrayOutputStream()
-  val ps = new PrintStream(baos) 
+  val ps = new PrintStream(baos)
 
   // first test with the default classpath
   (scala.Console withOut ps) {
diff --git a/test/files/run/t6690.check b/test/files/run/t6690.check
new file mode 100644
index 0000000..a92ddc0
--- /dev/null
+++ b/test/files/run/t6690.check
@@ -0,0 +1 @@
+warning: there were 2 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t6690.scala b/test/files/run/t6690.scala
index 43ede96..15b1817 100644
--- a/test/files/run/t6690.scala
+++ b/test/files/run/t6690.scala
@@ -1,5 +1,7 @@
 import scala.collection.mutable
 
+import scala.language.{ reflectiveCalls }
+
 object Test extends App {
   def last0(ml: mutable.MutableList[Int]) =
     ml.asInstanceOf[{def last0: mutable.LinkedList[Int]}].last0
diff --git a/test/files/run/t6719.check b/test/files/run/t6719.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/t6719.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6719.scala b/test/files/run/t6719.scala
new file mode 100644
index 0000000..847929a
--- /dev/null
+++ b/test/files/run/t6719.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+object Test extends App {
+  val tb = cm.mkToolBox()
+  val tree = tb.parse("(); val res = 0")
+  println(tb.eval(tree))
+}
\ No newline at end of file
diff --git a/test/files/run/t6731.scala b/test/files/run/t6731.scala
index 89d212e..12357b9 100644
--- a/test/files/run/t6731.scala
+++ b/test/files/run/t6731.scala
@@ -12,7 +12,7 @@ abstract class MonoDynamic extends Dynamic {
   def applyDynamic(name: String)(args: Any*): String                = show(this + "." + name + mkArgs(args: _*))
   def applyDynamicNamed(name: String)(args: (String, Any)*): String = show(this + "." + name + mkArgs(args: _*))
 
-  override def toString = this.getClass.getName split '.' last
+  override def toString = (this.getClass.getName split '.').last
 }
 
 object Mono extends MonoDynamic {
diff --git a/test/files/run/t6732.check b/test/files/run/t6732.check
new file mode 100644
index 0000000..016c6e5
--- /dev/null
+++ b/test/files/run/t6732.check
@@ -0,0 +1,4 @@
+scala#PK: true, false, true, false
+scala#PKC: false, true, true, true
+scala.collection.immutable.List#MOD: true, false, false, false
+scala.collection.immutable.List#MODC: false, true, false, false
diff --git a/test/files/run/t6732.scala b/test/files/run/t6732.scala
new file mode 100644
index 0000000..fdc1ab0
--- /dev/null
+++ b/test/files/run/t6732.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import definitions._
+
+object Test extends App {
+  def test(sym: Symbol): Unit = {
+    println(s"${showRaw(sym, printKinds = true)}: ${sym.isModule}, ${sym.isModuleClass}, ${sym.isPackage}, ${sym.isPackageClass}")
+  }
+  test(ScalaPackage)
+  test(ScalaPackageClass)
+  test(ListModule)
+  test(ListModule.moduleClass)
+}
\ No newline at end of file
diff --git a/test/files/run/t6733.check b/test/files/run/t6733.check
new file mode 100644
index 0000000..aeb595f
--- /dev/null
+++ b/test/files/run/t6733.check
@@ -0,0 +1,27 @@
+method $init$: isPrivateThis = false, isProtectedThis = false
+value pri1a: isPrivateThis = true, isProtectedThis = false
+method pri2a: isPrivateThis = true, isProtectedThis = false
+variable pri3a: isPrivateThis = true, isProtectedThis = false
+value pri4a: isPrivateThis = true, isProtectedThis = false
+lazy value pri4a: isPrivateThis = true, isProtectedThis = false
+type Pri5a: isPrivateThis = true, isProtectedThis = false
+class Pri6: isPrivateThis = true, isProtectedThis = false
+trait Pri7: isPrivateThis = true, isProtectedThis = false
+object Pri8: isPrivateThis = true, isProtectedThis = false
+value pro1a: isPrivateThis = false, isProtectedThis = true
+value pro1a: isPrivateThis = true, isProtectedThis = false
+value pro1b: isPrivateThis = false, isProtectedThis = true
+method pro2a: isPrivateThis = false, isProtectedThis = true
+method pro2b: isPrivateThis = false, isProtectedThis = true
+method pro3a: isPrivateThis = false, isProtectedThis = true
+method pro3a_=: isPrivateThis = false, isProtectedThis = true
+variable pro3a: isPrivateThis = true, isProtectedThis = false
+method pro3b: isPrivateThis = false, isProtectedThis = true
+method pro3b_=: isPrivateThis = false, isProtectedThis = true
+value pro4a: isPrivateThis = false, isProtectedThis = true
+lazy value pro4a: isPrivateThis = true, isProtectedThis = false
+type Pro5a: isPrivateThis = false, isProtectedThis = true
+type Pro5b: isPrivateThis = false, isProtectedThis = true
+class Pro6: isPrivateThis = false, isProtectedThis = true
+trait Pro7: isPrivateThis = false, isProtectedThis = true
+object Pro8: isPrivateThis = false, isProtectedThis = true
diff --git a/test/files/run/t6733.scala b/test/files/run/t6733.scala
new file mode 100644
index 0000000..df1946a
--- /dev/null
+++ b/test/files/run/t6733.scala
@@ -0,0 +1,35 @@
+import scala.reflect.runtime.universe._
+
+trait Foo {
+  private[this] val pri1a = 0
+  // private[this] val pri1b: Int
+  private[this] def pri2a = 1
+  // private[this] def pri2b: Int
+  private[this] var pri3a = 0
+  // private[this] var pri3b: Int
+  private[this] lazy val pri4a = 0
+  // private[this] lazy val pri4b: Int
+  private[this] type Pri5a = Int
+  // private[this] type Pri5b <: Int
+  private[this] class Pri6
+  private[this] trait Pri7
+  private[this] object Pri8
+
+  protected[this] val pro1a = 0
+  protected[this] val pro1b: Int
+  protected[this] def pro2a = 1
+  protected[this] def pro2b: Int
+  protected[this] var pro3a = 0
+  protected[this] var pro3b: Int
+  protected[this] lazy val pro4a = 0
+  // protected[this] lazy val pro4b: Int
+  protected[this] type Pro5a = Int
+  protected[this] type Pro5b <: Int
+  protected[this] class Pro6
+  protected[this] trait Pro7
+  protected[this] object Pro8
+}
+
+object Test extends App {
+  typeOf[Foo].decls.sorted.foreach(m => println(s"$m: isPrivateThis = ${m.isPrivateThis}, isProtectedThis = ${m.isProtectedThis}"))
+}
\ No newline at end of file
diff --git a/test/files/run/t6745-2.scala b/test/files/run/t6745-2.scala
new file mode 100644
index 0000000..5afa65d
--- /dev/null
+++ b/test/files/run/t6745-2.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+object Test extends CompilerTest {
+  import global._
+  import rootMirror._
+  import definitions._
+  import global.analyzer.{Context, ImportInfo}
+
+  override def code = """
+package context {
+}
+  """
+
+  def check(source: String, unit: global.CompilationUnit) = {
+    val context: Context = global.analyzer.rootContext(unit)
+    val importInfo: ImportInfo = context.imports.head // Predef._
+    val importedSym = importInfo.importedSymbol(termNames.CONSTRUCTOR)
+    assert(importedSym == NoSymbol, importedSym) // was "constructor Predef"
+  }
+}
diff --git a/test/files/run/t6814.check b/test/files/run/t6814.check
new file mode 100644
index 0000000..97ada77
--- /dev/null
+++ b/test/files/run/t6814.check
@@ -0,0 +1,7 @@
+List[Int]
+scala.collection.immutable.List.type
+object java.lang.RuntimeException is not a value
+List[Int]
+List
+scala.collection.immutable.List.type
+scala.collection.immutable.List.type does not take parameters
diff --git a/test/files/run/t6814/Macros_1.scala b/test/files/run/t6814/Macros_1.scala
new file mode 100644
index 0000000..0257f45
--- /dev/null
+++ b/test/files/run/t6814/Macros_1.scala
@@ -0,0 +1,24 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+
+    def test(tree: Tree, mode: c.TypecheckMode): String = {
+      try c.typecheck(tree, mode, silent = false).tpe.toString
+      catch { case c.TypecheckException(_, msg) => msg }
+    }
+
+    q"""
+      println(${test(q"List(1, 2)", c.TERMmode)})
+      println(${test(q"List", c.TERMmode)})
+      println(${test(q"RuntimeException", c.TERMmode)})
+      println(${test(tq"List[Int]", c.TYPEmode)})
+      println(${test(tq"List", c.TYPEmode)})
+      println(${test(q"List", c.TYPEmode)})
+      println(${test(q"List(1, 2)", c.TYPEmode)})
+    """
+  }
+  def foo: Unit = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t6814/Test_2.scala b/test/files/run/t6814/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/t6814/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/t6860.check b/test/files/run/t6860.check
new file mode 100644
index 0000000..c96331f
--- /dev/null
+++ b/test/files/run/t6860.check
@@ -0,0 +1,4 @@
+Bippy[String]
+Bippy[String]
+throws[Nothing]
+throws[RuntimeException]
diff --git a/test/files/run/t6860.scala b/test/files/run/t6860.scala
new file mode 100644
index 0000000..c2f8db0
--- /dev/null
+++ b/test/files/run/t6860.scala
@@ -0,0 +1,20 @@
+class Bippy[T](val value: T) extends annotation.StaticAnnotation
+
+class A {
+  @Bippy("hi") def f1: Int = 1
+  @Bippy[String]("hi") def f2: Int = 2
+
+  @throws("what do I throw?") def f3 = throw new RuntimeException
+  @throws[RuntimeException]("that's good to know!") def f4 = throw new RuntimeException
+}
+
+object Test {
+  import scala.reflect.runtime.universe._
+
+  def main(args: Array[String]): Unit = {
+    val members = typeOf[A].decls.toList
+    val tpes = members flatMap (_.annotations) map (_.tree.tpe)
+
+    tpes.map(_.toString).sorted foreach println
+  }
+}
diff --git a/test/files/run/t6863.check b/test/files/run/t6863.check
new file mode 100644
index 0000000..fea22b5
--- /dev/null
+++ b/test/files/run/t6863.check
@@ -0,0 +1,13 @@
+t6863.scala:38: warning: comparing values of types Unit and Unit using `==' will always yield true
+    assert({ () => x}.apply == ())
+                            ^
+t6863.scala:42: warning: comparing values of types Unit and Unit using `==' will always yield true
+    assert({ () => x}.apply == ())
+                            ^
+t6863.scala:46: warning: comparing values of types Unit and Unit using `==' will always yield true
+    assert({ () => x}.apply == ())
+                            ^
+t6863.scala:59: warning: comparing values of types Unit and Unit using `==' will always yield true
+    assert({ () => x }.apply == ())
+                             ^
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
index d77adb6..7210ebc 100644
--- a/test/files/run/t6863.scala
+++ b/test/files/run/t6863.scala
@@ -3,34 +3,34 @@ object Test {
   def lazyVal() = {
   	// internally lazy vals become vars which are initialized with "_", so they need to be tested just like vars do
   	lazy val x = "42"
-    assert({ () => x }.apply == "42")    
+    assert({ () => x }.apply == "42")
   }
   def ident() = {
     val y = "42"
     var x = y
-    assert({ () => x }.apply == "42")    
+    assert({ () => x }.apply == "42")
   }
   def apply() = {
     def y(x : Int) = x.toString
     var x = y(42)
-    assert({ () => x }.apply == "42")        
+    assert({ () => x }.apply == "42")
   }
   def literal() = {
     var x = "42"
-    assert({ () => x }.apply == "42")    
+    assert({ () => x }.apply == "42")
   }
   def `new`() = {
     var x = new String("42")
-    assert({ () => x }.apply == "42")            
+    assert({ () => x }.apply == "42")
   }
   def select() = {
     object Foo{val bar = "42"}
     var x = Foo.bar
-    assert({ () => x }.apply == "42")            
+    assert({ () => x }.apply == "42")
   }
   def `throw`() = {
     var x = if (true) "42" else throw new Exception("42")
-    assert({ () => x }.apply == "42")            
+    assert({ () => x }.apply == "42")
   }
   def assign() = {
     var y = 1
@@ -51,7 +51,7 @@ object Test {
     assert({ () => x }.apply == "42")
   }
   def tryCatch() = {
-    var x = try { "42" } catch { case _ => "43" }
+    var x = try { "42" } catch { case _: Throwable => "43" }
     assert({ () => x }.apply == "42")
   }
   def `if`() = {
@@ -59,7 +59,7 @@ object Test {
     assert({ () => x }.apply == ())
   }
   def ifElse() = {
-    var x = if(true) "42" else "43" 
+    var x = if(true) "42" else "43"
     assert({ () => x }.apply == "42")
   }
   def matchCase() = {
@@ -77,7 +77,7 @@ object Test {
     assert({ () => x }.apply == "42")
   }
   def labelDef() = {
-    var x = 100 match { 
+    var x = 100 match {
       case 100 => try "42" finally ()
     }
     assert({ () => x }.apply == "42")
@@ -85,8 +85,8 @@ object Test {
   def nested() = {
     var x = {
       val y = 42
-        if(true) try "42" catch {case _ => "43"}
-        else "44"       
+        if(true) try "42" catch {case _: Throwable => "43"}
+        else "44"
     }
     assert({ () => x }.apply == "42")
   }
diff --git a/test/files/run/t6908.scala b/test/files/run/t6908.scala
new file mode 100644
index 0000000..a641de9
--- /dev/null
+++ b/test/files/run/t6908.scala
@@ -0,0 +1,6 @@
+object Test {
+  def main(args: Array[String]) {
+    val set = collection.mutable.Set("1", null, "3").par
+    assert( set exists (_ eq null) )
+  }
+}
diff --git a/test/files/run/t6935.check b/test/files/run/t6935.check
new file mode 100644
index 0000000..844ca54
--- /dev/null
+++ b/test/files/run/t6935.check
@@ -0,0 +1 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t6935.scala b/test/files/run/t6935.scala
index dea2d7f..fdaf02e 100644
--- a/test/files/run/t6935.scala
+++ b/test/files/run/t6935.scala
@@ -1,14 +1,14 @@
 object Test {
 
   def main(args: Array[String]): Unit = {
-		  import java.io._
-		  val bytes = new ByteArrayOutputStream()
-		  val out = new ObjectOutputStream(bytes)
-		  out.writeObject(())
-		  out.close()
-		  val buf = bytes.toByteArray
-		  val in = new ObjectInputStream(new ByteArrayInputStream(buf))
-		  val unit = in.readObject()
-		  assert(unit == ())
+    import java.io._
+    val bytes = new ByteArrayOutputStream()
+    val out = new ObjectOutputStream(bytes)
+    out.writeObject(())
+    out.close()
+    val buf = bytes.toByteArray
+    val in = new ObjectInputStream(new ByteArrayInputStream(buf))
+    val unit = in.readObject()
+    assert(unit == ())
   }
 }
diff --git a/test/files/run/t6937.check b/test/files/run/t6937.check
index 9a1fa4c..4729dc7 100644
--- a/test/files/run/t6937.check
+++ b/test/files/run/t6937.check
@@ -1,9 +1,7 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
-scala>     import scala.reflect.runtime.{universe => ru}
+scala> import scala.reflect.runtime.{universe => ru}
 import scala.reflect.runtime.{universe=>ru}
 
 scala>     import scala.reflect.runtime.{currentMirror => cm}
@@ -22,5 +20,3 @@ scala>     apiru.typeTag[A].in(cm)
 res0: reflect.runtime.universe.TypeTag[A] = TypeTag[A]
 
 scala> 
-
-scala> 
diff --git a/test/files/run/t6955.scala b/test/files/run/t6955.scala
index 2610acd..329af68 100644
--- a/test/files/run/t6955.scala
+++ b/test/files/run/t6955.scala
@@ -1,5 +1,6 @@
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
+// this class should compile to code that uses switches (twice)
 class Switches {
   type Tag = Byte
 
@@ -10,7 +11,8 @@ class Switches {
     case _ => 0
   }
 
-  def switchOkay(i: Byte): Int = i match { // notice type of i is Byte
+  // this worked before, should keep working
+  def switchOkay(i: Byte): Int = i match {
     case 1 => 1
     case 2 => 2
     case 3 => 3
@@ -18,9 +20,15 @@ class Switches {
   }
 }
 
-object Test extends IcodeTest {
+object Test extends IcodeComparison {
   // ensure we get two switches out of this -- ignore the rest of the output for robustness
   // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
-  override def show() = println(collectIcode("").filter(x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1).size)
+  override def show() = {
+    val expected = 2
+    val actual = (collectIcode() filter {
+      x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1
+    }).size
+    assert(actual == expected)
+  }
 }
 
diff --git a/test/files/run/t6956.scala b/test/files/run/t6956.scala
index 4a6583c..3569adf 100644
--- a/test/files/run/t6956.scala
+++ b/test/files/run/t6956.scala
@@ -1,4 +1,4 @@
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
 class Switches {
   private[this] final val ONE = 1
@@ -18,9 +18,14 @@ class Switches {
   }
 }
 
-object Test extends IcodeTest {
+object Test extends IcodeComparison {
   // ensure we get two switches out of this -- ignore the rest of the output for robustness
   // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
-  override def show() = println(collectIcode("").filter(x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1).size)
+  override def show() = {
+    val expected = 2
+    val actual = (collectIcode() filter {
+      x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1
+    }).size
+    assert(actual == expected)
+  }
 }
-
diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala
index 8cfc28c..c4561b4 100644
--- a/test/files/run/t6969.scala
+++ b/test/files/run/t6969.scala
@@ -1,3 +1,7 @@
+
+
+import scala.language.{ reflectiveCalls }
+
 object Test {
   private type Clearable = { def clear(): Unit }
   private def choke() = {
diff --git a/test/files/run/t6989/Test_2.scala b/test/files/run/t6989/Test_2.scala
index e48e824..932a369 100644
--- a/test/files/run/t6989/Test_2.scala
+++ b/test/files/run/t6989/Test_2.scala
@@ -11,20 +11,20 @@ import scala.reflect.runtime.universe._
 package object foo {
   def testAll(): Unit = {
     test(typeOf[foo.PackagePrivateJavaClass].typeSymbol)
-    test(typeOf[foo.PackagePrivateJavaClass].typeSymbol.companionSymbol)
+    test(typeOf[foo.PackagePrivateJavaClass].typeSymbol.companion)
     test(typeOf[foo.JavaClass_1].typeSymbol)
-    test(typeOf[foo.JavaClass_1].typeSymbol.companionSymbol)
+    test(typeOf[foo.JavaClass_1].typeSymbol.companion)
   }
 
   def test(sym: Symbol): Unit = {
     printSymbolDetails(sym)
     if (sym.isClass || sym.isModule) {
-      sym.typeSignature.declarations.toList.sortBy(_.name.toString) foreach test
+      sym.info.decls.toList.sortBy(_.name.toString) foreach test
     }
   }
 
   def printSymbolDetails(sym: Symbol): Unit = {
-    def stableSignature(sym: Symbol) = sym.typeSignature match {
+    def stableSignature(sym: Symbol) = sym.info match {
       case ClassInfoType(_, _, _) => "ClassInfoType(...)"
       case tpe => tpe.toString
     }
diff --git a/test/files/run/t6992.check b/test/files/run/t6992.check
new file mode 100644
index 0000000..021f32e
--- /dev/null
+++ b/test/files/run/t6992.check
@@ -0,0 +1,4 @@
+Test.foo.T
+Int
+42
+42
diff --git a/test/files/run/t6992/Macros_1.scala b/test/files/run/t6992/Macros_1.scala
new file mode 100644
index 0000000..f578f2b
--- /dev/null
+++ b/test/files/run/t6992/Macros_1.scala
@@ -0,0 +1,75 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+  def foo(name: String): Any = macro foo_impl
+  def foo_impl(c: Context)(name: c.Expr[String]) = {
+    import c.universe._
+
+    val Literal(Constant(lit: String)) = name.tree
+    val anon = newTypeName(c.fresh)
+
+    c.Expr(Block(
+      ClassDef(
+        Modifiers(Flag.FINAL), anon, Nil, Template(
+          Nil, noSelfType, List(
+            DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))),
+            TypeDef(Modifiers(), TypeName(lit), Nil, TypeTree(typeOf[Int]))
+          )
+        )
+      ),
+      Apply(Select(New(Ident(anon)), termNames.CONSTRUCTOR), Nil)
+    ))
+  }
+
+  def bar(name: String): Any = macro bar_impl
+  def bar_impl(c: Context)(name: c.Expr[String]) = {
+    import c.universe._
+
+    val Literal(Constant(lit: String)) = name.tree
+    val anon = newTypeName(c.fresh)
+
+    c.Expr(Block(
+      ClassDef(
+        Modifiers(Flag.FINAL), anon, Nil, Template(
+          Nil, noSelfType, List(
+            DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))),
+            DefDef(
+              Modifiers(), TermName(lit), Nil, Nil, TypeTree(),
+              c.literal(42).tree
+            )
+          )
+        )
+      ),
+      Apply(Select(New(Ident(anon)), termNames.CONSTRUCTOR), Nil)
+    ))
+  }
+
+  def baz(name: String): Any = macro baz_impl
+  def baz_impl(c: Context)(name: c.Expr[String]) = {
+    import c.universe._
+
+    val Literal(Constant(lit: String)) = name.tree
+    val anon = newTypeName(c.fresh)
+    val wrapper = newTypeName(c.fresh)
+
+    c.Expr(Block(
+      ClassDef(
+        Modifiers(), anon, Nil, Template(
+          Nil, emptyValDef, List(
+            DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))),
+            DefDef(
+              Modifiers(), TermName(lit), Nil, Nil, TypeTree(),
+              c.literal(42).tree
+            )
+          )
+        )
+      ),
+      ClassDef(
+        Modifiers(Flag.FINAL), wrapper, Nil,
+        Template(Ident(anon) :: Nil, noSelfType, DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))) :: Nil)
+      ),
+      Apply(Select(New(Ident(wrapper)), termNames.CONSTRUCTOR), Nil)
+    ))
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t6992/Test_2.scala b/test/files/run/t6992/Test_2.scala
new file mode 100644
index 0000000..2399bf8
--- /dev/null
+++ b/test/files/run/t6992/Test_2.scala
@@ -0,0 +1,14 @@
+import scala.language.reflectiveCalls
+
+object Test extends App {
+  val foo = Macros.foo("T")
+  val ttpe = scala.reflect.runtime.universe.weakTypeOf[foo.T]
+  println(ttpe)
+  println(ttpe.typeSymbol.info)
+
+  val bar = Macros.bar("test")
+  println(bar.test)
+
+  val baz = Macros.baz("test")
+  println(baz.test)
+}
\ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined.flags b/test/files/run/t7008-scala-defined.flags
new file mode 100644
index 0000000..49f2d2c
--- /dev/null
+++ b/test/files/run/t7008-scala-defined.flags
@@ -0,0 +1 @@
+-Ybackend:GenASM
diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
index 94fd990..330db8d 100644
--- a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
+++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
@@ -1,11 +1,12 @@
 import language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
-    val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+    import c.universe._
+    val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].decls.toList
     val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
-    c.universe.reify(println(c.literal(s).splice))
+    reify(println(c.Expr[String](Literal(Constant(s))).splice))
   }
 
   def foo = macro impl
diff --git a/test/files/run/t7008-scala-defined/Test_3.scala b/test/files/run/t7008-scala-defined/Test_3.scala
index 03bb79d..ee7b9d9 100644
--- a/test/files/run/t7008-scala-defined/Test_3.scala
+++ b/test/files/run/t7008-scala-defined/Test_3.scala
@@ -4,6 +4,6 @@ object Test extends App {
   Macros.foo
   println("=============")
 
-  val decls = typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+  val decls = typeOf[ScalaClassWithCheckedExceptions_1[_]].decls.toList
   decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
 }
\ No newline at end of file
diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala
index 7a17314..3c6fe11 100644
--- a/test/files/run/t7008/Impls_Macros_2.scala
+++ b/test/files/run/t7008/Impls_Macros_2.scala
@@ -1,11 +1,12 @@
 import language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context) = {
-    val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+    import c.universe._
+    val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].decls.toList
     val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
-    c.universe.reify(println(c.literal(s).splice))
+    reify(println(c.Expr[String](Literal(Constant(s))).splice))
   }
 
   def foo = macro impl
diff --git a/test/files/run/t7008/Test_3.scala b/test/files/run/t7008/Test_3.scala
index b2961a8..99db05e 100644
--- a/test/files/run/t7008/Test_3.scala
+++ b/test/files/run/t7008/Test_3.scala
@@ -4,6 +4,6 @@ object Test extends App {
   Macros.foo
   println("=============")
 
-  val decls = typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+  val decls = typeOf[JavaClassWithCheckedExceptions_1[_]].decls.toList
   decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
 }
\ No newline at end of file
diff --git a/test/files/run/t7015.check b/test/files/run/t7015.check
new file mode 100644
index 0000000..7651fe0
--- /dev/null
+++ b/test/files/run/t7015.check
@@ -0,0 +1,11 @@
+Method returns Null type: null
+Method takes non Null type: null
+call through method null
+call through bridge null
+fetch field: null
+fetch field on companion: null
+fetch local: null
+fetch array element: null
+method that takes object: null
+method that takes anyref: null
+method that takes any: null
diff --git a/test/files/run/t7015.scala b/test/files/run/t7015.scala
new file mode 100644
index 0000000..37a73a9
--- /dev/null
+++ b/test/files/run/t7015.scala
@@ -0,0 +1,49 @@
+object Test {
+  def main(args : Array[String]) : Unit = {
+    println(s"Method returns Null type: $f")
+    println(s"Method takes non Null type: ${g(null)}")
+
+    // pass things through the g function because it expects
+    // a string. If we haven't adapted properly then we'll
+    // get verify errors
+    val b = new B
+    println(s"call through method ${g(b.f(null))}")
+    println(s"call through bridge ${g((b: A).f(null))}")
+
+    println(s"fetch field: ${g(b.nullField)}")
+    println(s"fetch field on companion: ${g(B.nullCompanionField)}")
+
+    val x = f
+    println(s"fetch local: ${g(x)}")
+
+    val nulls = Array(f, f, f)
+    println(s"fetch array element: ${g(nulls(0))}")
+
+    println(s"method that takes object: ${q(f)}")
+    println(s"method that takes anyref: ${r(f)}")
+    println(s"method that takes any: ${s(f)}")
+  }
+
+  def f: Null = null
+
+  def g(x: String) = x
+
+  def q(x: java.lang.Object) = x
+  def r(x: AnyRef) = x
+  def s(x: Any) = x
+}
+
+abstract class A {
+	def f(x: String): String
+}
+
+class B extends A {
+	val nullField = null
+
+	// this forces a bridge method because the return type is different
+	override def f(x: String) : Null = null
+}
+
+object B {
+	val nullCompanionField = null
+}
\ No newline at end of file
diff --git a/test/files/run/t7044.check b/test/files/run/t7044.check
new file mode 100644
index 0000000..ab52387
--- /dev/null
+++ b/test/files/run/t7044.check
@@ -0,0 +1,14 @@
+compile-time
+uninitialized File: <no file>
+initialized File: <no file>
+uninitialized BitSet: <no file>
+initialized BitSet: <no file>
+uninitialized C: Test_2.scala
+initialized C: Test_2.scala
+runtime
+autoinitialized File: <no file> true
+autoinitialized File: <no file> true
+autoinitialized BitSet: <no file> true
+autoinitialized BitSet: <no file> true
+autoinitialized C: <no file> true
+autoinitialized C: <no file> true
diff --git a/test/files/run/t7044/Macros_1.scala b/test/files/run/t7044/Macros_1.scala
new file mode 100644
index 0000000..3b3f8c3
--- /dev/null
+++ b/test/files/run/t7044/Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    var messages = List[String]()
+    def println(msg: String) = messages :+= msg
+
+    import c.universe._
+    def test(tpe: Type): Unit = {
+      val sym = tpe.typeSymbol
+      println(s"uninitialized ${sym.name}: ${sym.pos.source.file.name}")
+      internal.initialize(sym)
+      println(s"initialized ${sym.name}: ${sym.pos.source.file.name}")
+    }
+
+    println("compile-time")
+    test(typeOf[java.io.File])
+    test(typeOf[scala.collection.BitSet])
+    test(c.mirror.staticClass("C").toType)
+
+    q"..${messages.map(msg => q"println($msg)")}"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t7044/Test_2.scala b/test/files/run/t7044/Test_2.scala
new file mode 100644
index 0000000..8dfb349
--- /dev/null
+++ b/test/files/run/t7044/Test_2.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C
+
+object Test extends App {
+  def test(tpe: Type): Unit = {
+    val sym = tpe.typeSymbol
+    println(s"autoinitialized ${sym.name}: ${sym.pos.source.file.name} ${sym.pos.source.file.sizeOption.nonEmpty}")
+    internal.initialize(sym)
+    println(s"autoinitialized ${sym.name}: ${sym.pos.source.file.name} ${sym.pos.source.file.sizeOption.nonEmpty}")
+  }
+
+  Macros.foo
+  println("runtime")
+  test(typeOf[java.io.File])
+  test(typeOf[scala.collection.BitSet])
+  test(typeOf[C])
+}
diff --git a/test/files/run/t7045.check b/test/files/run/t7045.check
new file mode 100644
index 0000000..2813453
--- /dev/null
+++ b/test/files/run/t7045.check
@@ -0,0 +1,2 @@
+D with C
+D with C
diff --git a/test/files/run/t7045.scala b/test/files/run/t7045.scala
new file mode 100644
index 0000000..5b31a8b
--- /dev/null
+++ b/test/files/run/t7045.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C
+class D { self: C => }
+
+object Test extends App {
+  val d = cm.staticClass("D")
+  println(d.selfType)
+  d.info
+  println(d.selfType)
+}
\ No newline at end of file
diff --git a/test/files/run/t7046.scala b/test/files/run/t7046.scala
index 647a15c..f15545f 100644
--- a/test/files/run/t7046.scala
+++ b/test/files/run/t7046.scala
@@ -8,6 +8,6 @@ class E extends C
 object Test extends App {
   val c = cm.staticClass("C")
   println(c.knownDirectSubclasses)
-  c.typeSignature
+  c.info
   println(c.knownDirectSubclasses)
 }
\ No newline at end of file
diff --git a/test/files/run/t7047.check b/test/files/run/t7047.check
index e69de29..32bd581 100644
--- a/test/files/run/t7047.check
+++ b/test/files/run/t7047.check
@@ -0,0 +1,3 @@
+Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+  Macros.foo
+         ^
diff --git a/test/files/run/t7047/Impls_Macros_1.scala b/test/files/run/t7047/Impls_Macros_1.scala
index 2992e3e..787ea6c 100644
--- a/test/files/run/t7047/Impls_Macros_1.scala
+++ b/test/files/run/t7047/Impls_Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 class Foo
@@ -12,7 +12,7 @@ object Macros {
     } catch {
       case _: Exception =>
     }
-    c.literalNull
+    c.Expr[Null](Literal(Constant(null)))
   }
 
   def foo = macro impl
diff --git a/test/files/run/t7064-old-style-supercalls.scala b/test/files/run/t7064-old-style-supercalls.scala
deleted file mode 100644
index cffa7b1..0000000
--- a/test/files/run/t7064-old-style-supercalls.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-import scala.reflect.runtime.universe._
-import Flag._
-import definitions._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
-  val PARAMACCESSOR = (1L << 29).asInstanceOf[FlagSet]
-
-  // these trees can be acquired by running the following incantation:
-  //   echo 'class C(val x: Int); class D extends C(2)' > foo.scala
-  //   ./scalac -Xprint:parser -Yshow-trees-stringified -Yshow-trees-compact foo.scala
-
-  val c = ClassDef(
-    Modifiers(), newTypeName("C"), List(),
-    Template(
-      List(Select(Ident(ScalaPackage), newTypeName("AnyRef"))),
-      emptyValDef,
-      List(
-        ValDef(Modifiers(PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree),
-        DefDef(
-          Modifiers(),
-          nme.CONSTRUCTOR,
-          List(),
-          List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree))),
-          TypeTree(),
-          Block(
-            List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
-            Literal(Constant(())))))))
-  val d = ClassDef(
-    Modifiers(), newTypeName("D"), List(),
-    Template(
-      List(Ident(newTypeName("C"))),
-      emptyValDef,
-      List(
-        DefDef(
-          Modifiers(),
-          nme.CONSTRUCTOR,
-          List(),
-          List(List()),
-          TypeTree(),
-          Block(
-            List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List(Literal(Constant(2))))),
-            Literal(Constant(())))))))
-  val result = Select(Apply(Select(New(Ident(newTypeName("D"))), nme.CONSTRUCTOR), List()), newTermName("x"))
-  println(cm.mkToolBox().eval(Block(List(c, d), result)))
-}
\ No newline at end of file
diff --git a/test/files/run/t7074.check b/test/files/run/t7074.check
deleted file mode 100644
index ab9cf11..0000000
--- a/test/files/run/t7074.check
+++ /dev/null
@@ -1,9 +0,0 @@
-<a/>
-<a b="2" c="3" d="1"/>
-<a b="2" c="4" d="1" e="3" f="5"/>
-<a b="5" c="4" d="3" e="2" f="1"/>
-<a b="1" c="2" d="3" e="4" f="5"/>
-<a a:b="2" a:c="3" a:d="1"/>
-<a a:b="2" a:c="4" a:d="1" a:e="3" a:f="5"/>
-<a a:b="5" a:c="4" a:d="3" a:e="2" a:f="1"/>
-<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>
diff --git a/test/files/run/t7074.scala b/test/files/run/t7074.scala
deleted file mode 100644
index 693a076..0000000
--- a/test/files/run/t7074.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.xml.Utility.sort
-
-object Test extends App {
-  println(sort(<a/>))
-  println(sort(<a d="1" b="2" c="3"/>))
-  println(sort(<a d="1" b="2" e="3" c="4" f="5"/>))
-  println(sort(<a f="1" e="2" d="3" c="4" b="5"/>))
-  println(sort(<a b="1" c="2" d="3" e="4" f="5"/>))
-
-  println(sort(<a a:d="1" a:b="2" a:c="3"/>))
-  println(sort(<a a:d="1" a:b="2" a:e="3" a:c="4" a:f="5"/>))
-  println(sort(<a a:f="1" a:e="2" a:d="3" a:c="4" a:b="5"/>))
-  println(sort(<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>))
-}
-
diff --git a/test/files/run/t7088.check b/test/files/run/t7088.check
new file mode 100644
index 0000000..1191247
--- /dev/null
+++ b/test/files/run/t7088.check
@@ -0,0 +1,2 @@
+1
+2
diff --git a/test/files/run/t7088.scala b/test/files/run/t7088.scala
new file mode 100644
index 0000000..5f0114b
--- /dev/null
+++ b/test/files/run/t7088.scala
@@ -0,0 +1,13 @@
+object Test {
+  type Tag[X] = {type Tag = X}
+  type TaggedArray[T] = Array[T] with Tag[Any]
+
+  def method[T: scala.reflect.ClassTag](a: TaggedArray[T], value: T) {
+    a.update(0, value)
+    a foreach println
+  }
+
+  def main(args: Array[String]): Unit = {
+    method(Array(1, 2).asInstanceOf[TaggedArray[Int]], 1)
+  }
+}
diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala
index e9c0323..e7a894f 100644
--- a/test/files/run/t7096.scala
+++ b/test/files/run/t7096.scala
@@ -1,5 +1,55 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
 import scala.tools.partest._
 import scala.tools.nsc._
+import scala.reflect.runtime.{universe => ru}
+import scala.language.implicitConversions
+
+// necessary to avoid bincompat with scala-partest compiled against the old compiler
+abstract class CompilerTest extends DirectTest {
+  def check(source: String, unit: global.CompilationUnit): Unit
+
+  lazy val global: Global = newCompiler()
+  lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
+  import global._
+  import definitions.{ compilerTypeFromTag }
+
+  override def extraSettings = "-usejavacp -d " + testOutput.path
+
+  def show() = (sources, units).zipped foreach check
+
+  // Override at least one of these...
+  def code = ""
+  def sources: List[String] = List(code)
+
+  // Utility functions
+  class MkType(sym: Symbol) {
+    def apply[M](implicit t: ru.TypeTag[M]): Type =
+      if (sym eq NoSymbol) NoType
+      else appliedType(sym, compilerTypeFromTag(t))
+  }
+  implicit def mkMkType(sym: Symbol) = new MkType(sym)
+
+  def allMembers(root: Symbol): List[Symbol] = {
+    def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
+      val latest = roots flatMap (_.info.members) filterNot (seen contains _)
+      if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
+      else loop(seen ++ latest, latest)
+    }
+    loop(Set(), List(root))
+  }
+
+  class SymsInPackage(pkgName: String) {
+    def pkg     = rootMirror.getPackage(TermName(pkgName))
+    def classes = allMembers(pkg) filter (_.isClass)
+    def modules = allMembers(pkg) filter (_.isModule)
+    def symbols = classes ++ terms filterNot (_ eq NoSymbol)
+    def terms   = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
+    def tparams = classes flatMap (_.info.typeParams)
+    def tpes    = symbols map (_.tpe) distinct
+  }
+}
 
 object Test extends CompilerTest {
   import global._
@@ -23,7 +73,7 @@ class Sub extends Base {
   import syms._
 
   def check(source: String, unit: global.CompilationUnit) {
-    afterTyper {
+    exitingTyper {
       terms.filter(_.name.toString == "foo").foreach(sym => {
         val xParam = sym.tpe.paramss.flatten.head
         val annot = sym.tpe.finalResultType.annotations.head
diff --git a/test/files/run/t7120.check b/test/files/run/t7120.check
new file mode 100644
index 0000000..45a4fb7
--- /dev/null
+++ b/test/files/run/t7120.check
@@ -0,0 +1 @@
+8
diff --git a/test/files/run/t7120/Base_1.scala b/test/files/run/t7120/Base_1.scala
new file mode 100644
index 0000000..be07b4f
--- /dev/null
+++ b/test/files/run/t7120/Base_1.scala
@@ -0,0 +1,10 @@
+// This bug doesn't depend on separate compilation,
+// in the interests of minimizing the log output during
+// debugging this problem, I've split the compilation.
+
+case class Container( v: String )
+
+trait Base[ T <: AnyRef ] {
+  type UserType = T
+  protected def defect: PartialFunction[ UserType, String ]
+}
diff --git a/test/files/run/t7120/Derived_2.scala b/test/files/run/t7120/Derived_2.scala
new file mode 100644
index 0000000..e0de629
--- /dev/null
+++ b/test/files/run/t7120/Derived_2.scala
@@ -0,0 +1,9 @@
+trait Derived extends Base[ Container ] {
+  protected def defect = { case c: Container => c.v.toString }
+}
+
+// Erasure was ignoring the prefix `Derived#7001.this` when erasing
+// A1, and consequently used `Object` rather than `Container`, which
+// was only seen because that signature clashed with the bridge method.
+//
+// applyOrElse[A1 <: Derived#7001.this.UserType#7318, B1 >: String](x1: A1)
diff --git a/test/files/run/t7120/Run_3.scala b/test/files/run/t7120/Run_3.scala
new file mode 100644
index 0000000..95e7f99
--- /dev/null
+++ b/test/files/run/t7120/Run_3.scala
@@ -0,0 +1,3 @@
+object Test extends Derived with App {
+  println( defect( Container( "8" ) ) )
+}
diff --git a/test/files/run/t7120b.check b/test/files/run/t7120b.check
new file mode 100644
index 0000000..aa2f5e7
--- /dev/null
+++ b/test/files/run/t7120b.check
@@ -0,0 +1,2 @@
+public int C$D.foo(java.lang.String)
+public int C$D.foo(java.lang.String)
diff --git a/test/files/run/t7120b.scala b/test/files/run/t7120b.scala
new file mode 100644
index 0000000..0be4eb7
--- /dev/null
+++ b/test/files/run/t7120b.scala
@@ -0,0 +1,30 @@
+
+import scala.language.higherKinds
+
+trait Base[A] { type B = A; }
+class C extends Base[String] {
+  class D {
+    def foo[B1 <: B](b: B1) = 0
+  }
+}
+
+trait BaseHK[M[_], A] { type B = M[A]; }
+object BaseHK { type Id[X] = X }
+class CHK extends BaseHK[BaseHK.Id, String] {
+  class D {
+    def foo[B1 <: B](b: B1) = 0
+  }
+}
+
+
+object Test extends App {
+  val c = new C
+  val d = new c.D()
+  val meth = d.getClass.getMethods.find(_.getName == "foo").get
+  println(meth)
+
+  val chk = new CHK
+  val dhk = new chk.D()
+  val methhk = d.getClass.getMethods.find(_.getName == "foo").get
+  println(methhk)
+}
diff --git a/test/files/run/t7151.check b/test/files/run/t7151.check
new file mode 100644
index 0000000..d532d95
--- /dev/null
+++ b/test/files/run/t7151.check
@@ -0,0 +1,6 @@
+class Test$InnerObject$ isFinal = false
+class Test$InnerCase isFinal = true
+class Test$InnerNonCase isFinal = true
+class TopLevelObject$ isFinal = true
+class TopLevelCase isFinal = true
+class TopLevelNonCase isFinal = true
diff --git a/test/files/run/t7151.scala b/test/files/run/t7151.scala
new file mode 100644
index 0000000..f6492ba
--- /dev/null
+++ b/test/files/run/t7151.scala
@@ -0,0 +1,24 @@
+import java.lang.reflect.Modifier.isFinal
+
+object Test {
+  object InnerObject
+  final case class InnerCase()
+  final class InnerNonCase()
+
+  def main(args: Array[String]) {
+    def checkFinal(clazz: Class[_]) =
+      println(s"${clazz} isFinal = ${isFinal(clazz.getModifiers())}")
+
+    checkFinal(InnerObject.getClass)
+    checkFinal(classOf[InnerCase])
+    checkFinal(classOf[InnerNonCase])
+
+    checkFinal(TopLevelObject.getClass)
+    checkFinal(classOf[TopLevelCase])
+    checkFinal(classOf[TopLevelNonCase])
+  }
+}
+
+object TopLevelObject
+final case class TopLevelCase()
+final case class TopLevelNonCase()
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/t7157.check
similarity index 100%
copy from test/files/run/virtpatmat_opt_sharing.check
copy to test/files/run/t7157.check
diff --git a/test/files/run/t7157/Impls_Macros_1.scala b/test/files/run/t7157/Impls_Macros_1.scala
new file mode 100644
index 0000000..cc258b0
--- /dev/null
+++ b/test/files/run/t7157/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.blackbox.Context
+import language.experimental.macros
+
+object Macros {
+  object AImpl {
+    def a(ctx: Context)(args: ctx.Expr[Any]*): ctx.Expr[Unit] = {
+      import ctx.universe._
+      ctx.Expr[Unit](Apply(Ident(TermName("println")), List(Literal(Constant(1)))))
+    }
+  }
+
+  implicit class A(context: StringContext) {
+    def a(args: Any*): Unit = macro AImpl.a
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t7157/Test_2.scala b/test/files/run/t7157/Test_2.scala
new file mode 100644
index 0000000..cceb5ca
--- /dev/null
+++ b/test/files/run/t7157/Test_2.scala
@@ -0,0 +1,5 @@
+import Macros._
+
+object Test extends App {
+  a""
+}
\ No newline at end of file
diff --git a/test/files/run/t7171.check b/test/files/run/t7171.check
new file mode 100644
index 0000000..d826f6c
--- /dev/null
+++ b/test/files/run/t7171.check
@@ -0,0 +1,3 @@
+t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
+  final case class A()
+                   ^
diff --git a/test/files/run/t7171.flags b/test/files/run/t7171.flags
new file mode 100644
index 0000000..c02e5f2
--- /dev/null
+++ b/test/files/run/t7171.flags
@@ -0,0 +1 @@
+-unchecked
diff --git a/test/files/run/t7171.scala b/test/files/run/t7171.scala
index 97585b9..e0a1192 100644
--- a/test/files/run/t7171.scala
+++ b/test/files/run/t7171.scala
@@ -16,7 +16,7 @@ object Test extends App {
   val a1 = new t1.A()
   val a2 = new t1.A()
   assert(t1.foo(a1))
-  // as noted in the unchecked warning (tested in the corresponding neg test),
+  // as noted in the unchecked warning (also tested in the corresponding neg test),
   // the outer pointer isn't checked
   assert(t1.foo(a2))
 }
diff --git a/test/files/run/t7181.check b/test/files/run/t7181.check
new file mode 100644
index 0000000..e4b8e30
--- /dev/null
+++ b/test/files/run/t7181.check
@@ -0,0 +1,23 @@
+normal exit MainNormalExit
+finally MainNormalExit
+normal flow MainNormalExit
+
+return MainReturn
+finally MainReturn
+
+uncaught exception MainUncaughtException
+finally MainUncaughtException
+
+caught exception ExceptionNormalExit
+normal exit ExceptionNormalExit
+finally ExceptionNormalExit
+normal flow ExceptionNormalExit
+
+caught exception ExceptionReturn
+return ExceptionReturn
+finally ExceptionReturn
+
+caught exception ExceptionUncaughtException
+uncaught exception ExceptionUncaughtException
+finally ExceptionUncaughtException
+
diff --git a/test/files/run/t7181.scala b/test/files/run/t7181.scala
new file mode 100644
index 0000000..489ec31
--- /dev/null
+++ b/test/files/run/t7181.scala
@@ -0,0 +1,78 @@
+sealed abstract class Action
+// exit the try body normally
+case object MainNormalExit extends Action
+// exit the try body with a 'return'
+case object MainReturn extends Action
+// exit the try body with an uncaught exception
+case object MainUncaughtException extends Action
+// exit the try body with a caught exception and exit the exception handler normally
+case object ExceptionNormalExit extends Action
+// exit the try body with a caught exception and exit the exception handler with a 'return'
+case object ExceptionReturn extends Action
+// exit the try body with a caught exception and exit the exception handler with an uncaught exception
+case object ExceptionUncaughtException extends Action
+
+case class UncaughtException(action: Action) extends RuntimeException
+case class CaughtException(action: Action) extends RuntimeException
+
+object Test extends App {
+  def test(action: Action, expectException: Boolean = false) {
+    var gotException = false
+    val result = try
+      driver(action)
+    catch {
+      case UncaughtException(a) =>
+        gotException = true
+        a
+    }
+    if (gotException) assert(expectException, "Got unexpected exception")
+    else assert(!expectException, "Did not get expected exception")
+
+    assert(result == action, s"Expected $action but got $result")
+    println()
+  }
+
+  def driver(action: Action): Action = {
+    val result = try {
+      action match {
+        case MainNormalExit =>
+          println(s"normal exit $action")
+          action
+        case MainReturn =>
+          println(s"return $action")
+          return action
+        case MainUncaughtException =>
+          println(s"uncaught exception $action")
+          throw UncaughtException(action)
+        case _ =>
+          println(s"caught exception $action")
+          throw CaughtException(action)
+      }
+    } catch {
+      case CaughtException(action) => action match {
+        case ExceptionNormalExit =>
+          println(s"normal exit $action")
+          action
+        case ExceptionReturn =>
+          println(s"return $action")
+          return action
+        case ExceptionUncaughtException =>
+          println(s"uncaught exception $action")
+          throw UncaughtException(action)
+        case _ =>
+          sys.error(s"unexpected $action in exception handler")
+      }
+    } finally {
+      println(s"finally $action")
+    }
+    println(s"normal flow $action")
+    result
+  }
+
+  test(MainNormalExit)
+  test(MainReturn)
+  test(MainUncaughtException, true)
+  test(ExceptionNormalExit)
+  test(ExceptionReturn)
+  test(ExceptionUncaughtException, true)
+}
diff --git a/test/files/run/t7185.check b/test/files/run/t7185.check
index 455c1aa..ebf85b7 100644
--- a/test/files/run/t7185.check
+++ b/test/files/run/t7185.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> import scala.tools.reflect.ToolBox
 import scala.tools.reflect.ToolBox
 
@@ -10,21 +8,21 @@ scala> import scala.reflect.runtime.universe._
 import scala.reflect.runtime.universe._
 
 scala> object O { def apply() = 0 }
-defined module O
+defined object O
 
 scala> val ORef = reify { O }.tree
 ORef: reflect.runtime.universe.Tree = $read.O
 
 scala> val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
-tree: reflect.runtime.universe.Apply = 
+tree: reflect.runtime.universe.Apply =
 {
   {
     $read.O
   }
 }()
 
-scala> {val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
-res0: Any = 
+scala> {val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typecheck(tree): Any}
+res0: Any =
 {
   {
     $read.O.apply()
diff --git a/test/files/run/t7185.scala b/test/files/run/t7185.scala
index d9d913e..62d6424 100644
--- a/test/files/run/t7185.scala
+++ b/test/files/run/t7185.scala
@@ -7,6 +7,6 @@ import scala.reflect.runtime.universe._
 object O { def apply() = 0 }
 val ORef = reify { O }.tree
 val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
-{val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
+{val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typecheck(tree): Any}
 """
 }
diff --git a/test/files/run/t7198.check b/test/files/run/t7198.check
new file mode 100644
index 0000000..6dad496
--- /dev/null
+++ b/test/files/run/t7198.check
@@ -0,0 +1,2 @@
+The quick brown fox jumped
+And ran away with the vixen.
diff --git a/test/files/run/t7198.scala b/test/files/run/t7198.scala
new file mode 100644
index 0000000..26e1d88
--- /dev/null
+++ b/test/files/run/t7198.scala
@@ -0,0 +1,9 @@
+/* spew a few lines
+ * filter: Over the moon
+ */
+object Test extends App {
+  Console println "The quick brown fox jumped"
+  Console println "Over the moon"
+  Console println "And ran away with the vixen."
+  Console println "Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28)."
+}
diff --git a/test/files/run/t7214.scala b/test/files/run/t7214.scala
index ff1ea80..15c2c24 100644
--- a/test/files/run/t7214.scala
+++ b/test/files/run/t7214.scala
@@ -25,7 +25,7 @@ class Crash {
     def unapply(a: Alias): Option[Any] = None
   }
   (t: Any) match {
-    case Extractor() =>
+    case Extractor(_) =>
     case _ =>
   }
 
diff --git a/test/files/run/t7223.check b/test/files/run/t7223.check
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/test/files/run/t7223.check
@@ -0,0 +1 @@
+0
diff --git a/test/files/run/t7223.scala b/test/files/run/t7223.scala
new file mode 100644
index 0000000..a707e95
--- /dev/null
+++ b/test/files/run/t7223.scala
@@ -0,0 +1,11 @@
+class D(val a: () => Int => () => Any) {
+  a()(0)()
+}
+
+object Crash extends D(() => {
+  (x: Int) => {() => { new { println(x.toString) } }}
+})
+
+object Test extends App {
+  Crash
+}
diff --git a/test/files/run/t7231.check b/test/files/run/t7231.check
new file mode 100644
index 0000000..c1e4b6c
--- /dev/null
+++ b/test/files/run/t7231.check
@@ -0,0 +1,2 @@
+null
+null
diff --git a/test/files/run/t7231.scala b/test/files/run/t7231.scala
new file mode 100644
index 0000000..7d6bc81
--- /dev/null
+++ b/test/files/run/t7231.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+   val bar: Null = null
+
+   def foo(x: Array[Int]) = x
+   def baz(x: String) = x
+
+   // first line was failing
+   println(foo(bar))
+   // this line worked but good to have a double check
+   println(baz(bar))
+}
\ No newline at end of file
diff --git a/test/files/run/t7240.check b/test/files/run/t7240.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala
index 6465e18..b24b607 100644
--- a/test/files/run/t7240/Macros_1.scala
+++ b/test/files/run/t7240/Macros_1.scala
@@ -1,7 +1,7 @@
 package bakery
 
 import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 trait FailureCake {
   implicit def liftAnyFails[T: Manifest]: Any = ???
@@ -34,14 +34,14 @@ object Bakery {
         List(dslTrait("bakery.FailureCake")),
         emptyValDef,
         List(
-          DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
-            Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+          DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(),
+            Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))),
           DefDef(Modifiers(), newTermName("main"), List(), List(List()), Ident(newTypeName("Any")), transformedBody))))
 
-    def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List())
+    def constructor = Apply(Select(New(Ident(newTypeName("eval"))), termNames.CONSTRUCTOR), List())
 
     c.eval(c.Expr[Any](
-      c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor))))
+      c.untypecheck(Block(composeDSL(Literal(Constant(1))), constructor))))
 
     c.Expr[Any](Literal(Constant(1)))
   }
diff --git a/test/files/run/t7240/Test_2.scala b/test/files/run/t7240/Test_2.scala
index 2450bda..5cc2cc7 100644
--- a/test/files/run/t7240/Test_2.scala
+++ b/test/files/run/t7240/Test_2.scala
@@ -1,3 +1,3 @@
 object Test extends App {
-  bakery.Bakery.failure
-}
\ No newline at end of file
+  val v = bakery.Bakery.failure
+}
diff --git a/test/files/run/t7265.scala b/test/files/run/t7265.scala
deleted file mode 100644
index c556930..0000000
--- a/test/files/run/t7265.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-import scala.util.Properties._
-
-object Test extends App {
-
-  setProp("java.specification.version", "1.7")
-
-  assert( isJavaAtLeast("1.5"))
-  assert( isJavaAtLeast("1.6"))
-  assert( isJavaAtLeast("1.7"))
-  assert(!isJavaAtLeast("1.8"))
-  assert(!isJavaAtLeast("1.71"))
-
-  failing(isJavaAtLeast("1.a"))
-  failing(isJavaAtLeast("1"))
-  failing(isJavaAtLeast(""))
-  failing(isJavaAtLeast("."))
-  failing(isJavaAtLeast(".5"))
-  failing(isJavaAtLeast("1.7.1"))
-
-  def failing(u: =>Unit) = try {
-    u
-    assert(false, "Expected Exception")
-  } catch {
-    case _: NumberFormatException =>
-  }
-}
diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check
index dcd828a..f7a2301 100644
--- a/test/files/run/t7271.check
+++ b/test/files/run/t7271.check
@@ -1,12 +1,12 @@
 [[syntax trees at end of                    parser]] // newSource1.scala
-[0:91]package [0:0]<empty> {
-  [0:91]class C extends [8:91][91]scala.AnyRef {
+[6]package [6]<empty> {
+  [6]class C extends [8][91]scala.AnyRef {
     [8]def <init>() = [8]{
-      [8][8][8]super.<init>();
+      [NoPosition][NoPosition][NoPosition]super.<init>();
       [8]()
     };
-    [16:44]def quote = [28:44]<28:44><28:44>[28]StringContext([30:34]"foo", [40:44]"baz").s([35:39]this);
-    [51:85]def tripleQuote = [69:85]<69:85><69:85>[69]StringContext([71:75]"foo", [81:85]"baz").s([76:80]this)
+    [20]def quote = [28][28][28][28]StringContext([30]"foo", [40]"baz").s([35]this);
+    [55]def tripleQuote = [69][69][69][69]StringContext([71]"foo", [81]"baz").s([76]this)
   }
 }
 
diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala
index 6fccf14..69d5ea3 100644
--- a/test/files/run/t7271.scala
+++ b/test/files/run/t7271.scala
@@ -1,9 +1,9 @@
 import scala.tools.partest._
-import java.io._
 import scala.tools.nsc._
-import scala.tools.nsc.util.CommandLineParser
+import scala.tools.cmd.CommandLineParser
 import scala.tools.nsc.{Global, Settings, CompilerCommand}
 import scala.tools.nsc.reporters.ConsoleReporter
+import scala.reflect.internal.Positions
 
 object Test extends DirectTest {
 
@@ -29,6 +29,6 @@ object Test extends DirectTest {
     val settings = new Settings()
     settings.Xprintpos.value = true
     val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
-    new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+    new Global(command.settings, new ConsoleReporter(settings)) with Positions
   }
 }
diff --git a/test/files/run/t7290.check b/test/files/run/t7290.check
new file mode 100644
index 0000000..aff48ab
--- /dev/null
+++ b/test/files/run/t7290.check
@@ -0,0 +1,6 @@
+t7290.scala:4: warning: Pattern contains duplicate alternatives: 0
+    case 0 | 0 => 0
+         ^
+t7290.scala:5: warning: Pattern contains duplicate alternatives: 2, 3
+    case 2 | 2 | 2 | 3 | 2 | 3 => 0
+         ^
diff --git a/test/files/run/t7291b.check b/test/files/run/t7291.check
similarity index 100%
rename from test/files/run/t7291b.check
rename to test/files/run/t7291.check
diff --git a/test/files/run/t7291.scala b/test/files/run/t7291.scala
new file mode 100644
index 0000000..ca44e96
--- /dev/null
+++ b/test/files/run/t7291.scala
@@ -0,0 +1,22 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
+trait Fooable[T]
+object Fooable {
+  implicit def conjure[T]: Fooable[T] = {
+    println("conjure")
+    new Fooable[T]{}
+  }
+
+}
+
+object Test {
+  implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
+elem: Fooable[T]): Fooable[Coll[T]] = {
+    println("traversable")
+    new Fooable[Coll[T]]{}
+  }
+  def main(args: Array[String]) {
+    implicitly[Fooable[List[Any]]]
+  }
+}
diff --git a/test/files/run/t7291a.check b/test/files/run/t7291a.check
deleted file mode 100644
index 126faa1..0000000
--- a/test/files/run/t7291a.check
+++ /dev/null
@@ -1 +0,0 @@
-conjure
diff --git a/test/files/run/t7291a.flags b/test/files/run/t7291a.flags
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t7291a.scala b/test/files/run/t7291a.scala
deleted file mode 100644
index 4b7c4a4..0000000
--- a/test/files/run/t7291a.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-trait Fooable[T]
-object Fooable {
-  implicit def conjure[T]: Fooable[T] = {
-    println("conjure")
-    new Fooable[T]{}
-  }
-
-}
-
-object Test {
-  implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
-elem: Fooable[T]): Fooable[Coll[T]] = {
-    println("traversable")
-    new Fooable[Coll[T]]{}
-  }
-  def main(args: Array[String]) {
-    implicitly[Fooable[List[Any]]]
-  }
-}
diff --git a/test/files/run/t7291b.flags b/test/files/run/t7291b.flags
deleted file mode 100644
index d564f2b..0000000
--- a/test/files/run/t7291b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xdivergence211
\ No newline at end of file
diff --git a/test/files/run/t7291b.scala b/test/files/run/t7291b.scala
deleted file mode 100644
index 30c4261..0000000
--- a/test/files/run/t7291b.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-trait Fooable[T]
-object Fooable {
-  implicit def conjure[T]: Fooable[T] = {
-    println("conjure")
-    new Fooable[T]{}
-  }
-
-}
-
-object Test {
-  implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
-elem: Fooable[T]): Fooable[Coll[T]] = {
-    println("traversable")
-    new Fooable[Coll[T]]{}
-  }
-  def main(args: Array[String]) {
-    implicitly[Fooable[List[Any]]] 
-  }
-}
diff --git a/test/files/run/t7300.check b/test/files/run/t7300.check
new file mode 100644
index 0000000..51993f0
--- /dev/null
+++ b/test/files/run/t7300.check
@@ -0,0 +1,2 @@
+2
+2
diff --git a/test/files/run/t7300.scala b/test/files/run/t7300.scala
new file mode 100644
index 0000000..ec84169
--- /dev/null
+++ b/test/files/run/t7300.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+  // single line comment in multi line comment
+  /*//*/ val x = 1 */*/
+  val x = 2
+  println(x)
+
+  // single line comment in nested multi line comment
+  /*/*//*/ val y = 1 */*/*/
+  val y = 2
+  println(y)
+}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index 9667369..b7443aa 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -1,8 +1,6 @@
 Type in expressions to have them evaluated.
 Type :help for more information.
 
-scala> 
-
 scala> class M[A]
 defined class M
 
@@ -23,7 +21,7 @@ scala> convert(Some[Int](0))
  --- because ---
 argument expression's type is not compatible with formal parameter type;
  found   : Some[Int]
- required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } }
               convert(Some[Int](0))
               ^
 <console>:12: error: type mismatch;
@@ -32,7 +30,14 @@ argument expression's type is not compatible with formal parameter type;
               convert(Some[Int](0))
                                ^
 
+scala> Range(1,2).toArray: Seq[_]
+<console>:11: error: polymorphic expression cannot be instantiated to expected type;
+ found   : [B >: Int]Array[B]
+ required: Seq[_]
+              Range(1,2).toArray: Seq[_]
+                         ^
+
 scala> 0
-res1: Int = 0
+res2: Int = 0
 
 scala> 
diff --git a/test/files/run/t7319.scala b/test/files/run/t7319.scala
index 23ffeb9..65a3ed9 100644
--- a/test/files/run/t7319.scala
+++ b/test/files/run/t7319.scala
@@ -9,5 +9,6 @@ implicit def ma0[A](a: A): M[A] = null
 implicit def ma1[A](a: A): M[A] = null
 def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
 convert(Some[Int](0))
+Range(1,2).toArray: Seq[_]
 0""" // before the fix, this line, and all that followed, re-issued the implicit ambiguity error.
 }
diff --git a/test/files/run/t7326.scala b/test/files/run/t7326.scala
new file mode 100644
index 0000000..ed9471e
--- /dev/null
+++ b/test/files/run/t7326.scala
@@ -0,0 +1,64 @@
+import scala.collection.immutable.ListSet
+import scala.collection.immutable.HashSet
+
+object Test extends App {
+
+  def testCorrectness() {
+    // a key that has many hashCode collisions
+    case class Collision(i: Int) { override def hashCode = i / 5 }
+
+    def subsetTest[T](emptyA:Set[T], emptyB:Set[T], mkKey:Int => T, n:Int) {
+      val outside = mkKey(n + 1)
+      for(i <- 0 to n) {
+        val a = emptyA ++ (0 until i).map(mkKey)
+        // every set must be a subset of itself
+        require(a.subsetOf(a), "A set must be the subset of itself")
+        for(k <- 0 to i) {
+          // k <= i, so b is definitely a subset
+          val b = emptyB ++ (0 until k).map(mkKey)
+          // c has less elements than a, but contains a value that is not in a
+          // so it is not a subset, but that is not immediately obvious due to size
+          val c = b + outside
+          require(b.subsetOf(a), s"$b must be a subset of $a")
+          require(!c.subsetOf(a), s"$c must not be a subset of $a")
+        }
+      }
+    }
+
+    // test the HashSet/HashSet case
+    subsetTest(HashSet.empty[Int], HashSet.empty[Int], identity, 100)
+
+    // test the HashSet/other set case
+    subsetTest(HashSet.empty[Int], ListSet.empty[Int], identity, 100)
+
+    // test the HashSet/HashSet case for Collision keys
+    subsetTest(HashSet.empty[Collision], HashSet.empty[Collision], Collision, 100)
+
+    // test the HashSet/other set case for Collision keys
+    subsetTest(HashSet.empty[Collision], ListSet.empty[Collision], Collision, 100)
+  }
+
+  /**
+   * A main performance benefit of the new subsetOf is that we do not have to call hashCode during subsetOf
+   * since we already have the hash codes in the HashSet1 nodes.
+   */
+  def testNoHashCodeInvocationsDuringSubsetOf() = {
+    var count = 0
+
+    case class HashCodeCounter(i:Int) {
+      override def hashCode = {
+        count += 1
+        i
+      }
+    }
+
+    val a = HashSet.empty ++ (0 until 100).map(HashCodeCounter)
+    val b = HashSet.empty ++ (0 until 50).map(HashCodeCounter)
+    val count0 = count
+    val result = b.subsetOf(a)
+    require(count == count0, "key.hashCode must not be called during subsetOf of two HashSets")
+    result
+  }
+  testCorrectness()
+  testNoHashCodeInvocationsDuringSubsetOf()
+}
diff --git a/test/files/run/t7328.check b/test/files/run/t7328.check
new file mode 100644
index 0000000..e386fe7
--- /dev/null
+++ b/test/files/run/t7328.check
@@ -0,0 +1,4 @@
+Foo
+Foo(3)
+Foo(3)
+Foo(5)
diff --git a/test/files/run/t7328.scala b/test/files/run/t7328.scala
new file mode 100644
index 0000000..56956b4
--- /dev/null
+++ b/test/files/run/t7328.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+case class Foo(x: Int) extends AnyVal
+case class Bar(foo: Foo)
+
+object Test extends App {
+  val foo = typeOf[Bar].decl(TermName("foo")).asMethod
+  println(foo.returnType) // Foo
+
+  val bar = Bar(Foo(3))
+  println(bar.foo) // Foo(3)
+
+  val im = cm.reflect(bar)
+  println(im.reflectField(foo).get) // incorrectly gives java.lang.Integer(3) not Foo(3)
+  im.reflectField(foo).set(Foo(5)) // java.lang.IllegalArgumentException: Can not set int field Bar.foo to Foo
+  println(im.reflectMethod(foo)()) // incorrectly gives java.lang.Integer(3) not Foo(3)
+}
\ No newline at end of file
diff --git a/test/files/run/t7331b.check b/test/files/run/t7331b.check
index 7034a95..413c93a 100644
--- a/test/files/run/t7331b.check
+++ b/test/files/run/t7331b.check
@@ -1,3 +1,3 @@
-reflective compilation has failed: 
+reflective compilation has failed:
 
 ')' expected but eof found.
diff --git a/test/files/run/t7331c.check b/test/files/run/t7331c.check
index af9f1b1..a9dc6a7 100644
--- a/test/files/run/t7331c.check
+++ b/test/files/run/t7331c.check
@@ -1,3 +1,3 @@
-ClassDef(Modifiers(), newTypeName("C"), List(), Template(List(Select(Ident(scala), newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))))))
+ClassDef(Modifiers(), TypeName("C"), List(), Template(List(Select(Ident(scala), TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))))))
 source-<toolbox>,line-1,offset=6
 NoPosition
diff --git a/test/files/run/t7336.scala b/test/files/run/t7336.scala
index ace83f2..21b0be5 100644
--- a/test/files/run/t7336.scala
+++ b/test/files/run/t7336.scala
@@ -9,7 +9,7 @@ import scala.concurrent.duration.Duration
  *  resulting in a speedy OutOfMemoryError. Now, each array should be freed soon
  *  after it is created and the test should complete without problems.
  */
-object Test {  
+object Test {
   def main(args: Array[String]) {
     def loop(i: Int, arraySize: Int): Future[Unit] = {
       val array = new Array[Byte](arraySize)
diff --git a/test/files/run/t7337.check b/test/files/run/t7337.check
new file mode 100644
index 0000000..dd2b31f
--- /dev/null
+++ b/test/files/run/t7337.check
@@ -0,0 +1 @@
+doesnotexist does not exist or is not a directory
diff --git a/test/files/run/t7337.scala b/test/files/run/t7337.scala
new file mode 100644
index 0000000..9913f8a
--- /dev/null
+++ b/test/files/run/t7337.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+import scala.tools.cmd.CommandLineParser
+
+object Test extends DirectTest {
+  override def code = "class C"
+  override def newCompiler(args: String*): Global = {
+    val settings = newSettings((CommandLineParser tokenize ("-d doesnotexist " + extraSettings)) ++ args.toList)
+    newCompiler(settings)
+  }
+
+  override def show() {
+    try {
+      newCompiler()
+    } catch {
+      case fe: FatalError => println(fe.getMessage)
+    }
+  }
+}
diff --git a/test/files/run/t7341.check b/test/files/run/t7341.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t7341.scala b/test/files/run/t7341.scala
index dc526c6..ffea7f9 100755
--- a/test/files/run/t7341.scala
+++ b/test/files/run/t7341.scala
@@ -4,7 +4,7 @@ object Obj {
   def foo {
     returning(() => cache = ())
   }
- 
+
   def apply(): Any = {
     cache
   }
diff --git a/test/files/run/t7374.check b/test/files/run/t7374.check
new file mode 100644
index 0000000..4efa6f7
--- /dev/null
+++ b/test/files/run/t7374.check
@@ -0,0 +1,3 @@
+List(2, 3)
+ParVector(1, 2, 3)
+List(1, 2)
diff --git a/test/files/run/t7374/Some.scala b/test/files/run/t7374/Some.scala
new file mode 100644
index 0000000..3266a56
--- /dev/null
+++ b/test/files/run/t7374/Some.scala
@@ -0,0 +1,3 @@
+object SomeScala {
+  def list = List(1, 2, 3) 
+}
diff --git a/test/files/run/t7374/Test.java b/test/files/run/t7374/Test.java
new file mode 100644
index 0000000..02f8614
--- /dev/null
+++ b/test/files/run/t7374/Test.java
@@ -0,0 +1,7 @@
+public class Test {
+    public static void main(String[] args) {
+        System.out.println(SomeScala.list().tail());
+        System.out.println(SomeScala.list().par());
+        System.out.println(SomeScala.list().init());
+    }
+}
diff --git a/test/files/run/t7375b/Macros_1.scala b/test/files/run/t7375b/Macros_1.scala
index 70e79cc..b6090e7 100644
--- a/test/files/run/t7375b/Macros_1.scala
+++ b/test/files/run/t7375b/Macros_1.scala
@@ -1,5 +1,5 @@
 import language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 class C1(val n: Int) extends AnyVal
 class C2(val n: Int) extends AnyRef
@@ -11,7 +11,7 @@ object Macros {
   def foo = macro impl
   def impl(c: Context) = {
     import c.universe._
-    def test[T: c.TypeTag] = reify(println(c.literal(c.reifyRuntimeClass(c.typeOf[T]).toString).splice)).tree
+    def test[T: c.TypeTag] = reify(println(c.Expr[String](Literal(Constant(c.reifyRuntimeClass(c.typeOf[T]).toString))).splice)).tree
     def tests = Block(List(test[C1], test[C2], test[F1], test[F2]), Literal(Constant(())))
     c.Expr[Unit](tests)
   }
diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala
index 493c4dc..4b46850 100644
--- a/test/files/run/t7398.scala
+++ b/test/files/run/t7398.scala
@@ -21,6 +21,6 @@ public interface Iterator<E> {
 }
   """
 
-  // We're only checking we can parse it.
+  // We're only checking we can compile it.
   def check(source: String, unit: global.CompilationUnit): Unit = ()
 }
diff --git a/test/files/run/t7406.check b/test/files/run/t7406.check
new file mode 100644
index 0000000..f599e28
--- /dev/null
+++ b/test/files/run/t7406.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t7406.scala b/test/files/run/t7406.scala
new file mode 100644
index 0000000..a8b35c9
--- /dev/null
+++ b/test/files/run/t7406.scala
@@ -0,0 +1,14 @@
+class Arne[@specialized(Long) T](x: T) {
+  val regularVal = x
+  lazy val lazyVal = x
+
+  def apply(f: (T, T) => T): T = f(regularVal, lazyVal)
+}
+
+object Test {
+  val arne = new Arne(5L)
+  def f = arne(_ + _)
+  def main(args: Array[String]): Unit = {
+    println(f)
+  }
+}
diff --git a/test/files/run/t7407.check b/test/files/run/t7407.check
new file mode 100644
index 0000000..e965047
--- /dev/null
+++ b/test/files/run/t7407.check
@@ -0,0 +1 @@
+Hello
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
new file mode 100644
index 0000000..c8547a2
--- /dev/null
+++ b/test/files/run/t7407.flags
@@ -0,0 +1 @@
+-Ynooptimise -Ybackend:GenBCode
diff --git a/test/files/run/t7407.scala b/test/files/run/t7407.scala
new file mode 100644
index 0000000..cf67602
--- /dev/null
+++ b/test/files/run/t7407.scala
@@ -0,0 +1,11 @@
+// SI-7407
+object Test {
+
+  def main(args: Array[String]) { println(foo) }
+
+  def foo: String = {
+    try return "Hello" finally 10 match {case x => ()}
+  }
+
+}
+
diff --git a/test/files/run/t7407b.check b/test/files/run/t7407b.check
new file mode 100644
index 0000000..f302944
--- /dev/null
+++ b/test/files/run/t7407b.check
@@ -0,0 +1,2 @@
+Hello
+abc
diff --git a/test/files/run/t7407b.flags b/test/files/run/t7407b.flags
new file mode 100644
index 0000000..c8547a2
--- /dev/null
+++ b/test/files/run/t7407b.flags
@@ -0,0 +1 @@
+-Ynooptimise -Ybackend:GenBCode
diff --git a/test/files/run/t7407b.scala b/test/files/run/t7407b.scala
new file mode 100644
index 0000000..b0c0087
--- /dev/null
+++ b/test/files/run/t7407b.scala
@@ -0,0 +1,20 @@
+object Test {
+
+  def main(args: Array[String]) {
+    println(foo(true))
+    println(foo(false))
+  }
+
+  def foo(b: Boolean): String = {
+    try {
+      if(b)
+        return "Hello"
+      else
+        "abc"
+    } finally {
+      10 match {case x => ()}
+    }
+  }
+
+}
+
diff --git a/test/files/run/t7436.scala b/test/files/run/t7436.scala
new file mode 100644
index 0000000..867a931
--- /dev/null
+++ b/test/files/run/t7436.scala
@@ -0,0 +1,9 @@
+class A(val p: Int*)
+
+class B(val p1: Int) extends A(p1)
+
+object Test {
+  def main(args: Array[String]) {
+    new B(1).p1 // threw java.lang.ClassCastException: scala.collection.mutable.WrappedArray$ofInt cannot be cast to java.lang.Integer
+  }
+}
diff --git a/test/files/run/t7439.check b/test/files/run/t7439.check
index ce9e8b5..9ea09f9 100644
--- a/test/files/run/t7439.check
+++ b/test/files/run/t7439.check
@@ -1 +1,2 @@
+Recompiling after deleting t7439-run.obj/A_1.class
 pos: NoPosition Class A_1 not found - continuing with a stub. WARNING
diff --git a/test/files/run/t7439/Test_2.scala b/test/files/run/t7439/Test_2.scala
index e00e9d1..ce9b907 100644
--- a/test/files/run/t7439/Test_2.scala
+++ b/test/files/run/t7439/Test_2.scala
@@ -23,6 +23,8 @@ object Test extends StoreReporterDirectTest {
     val a1Class = new File(testOutput.path, "A_1.class")
     assert(a1Class.exists)
     assert(a1Class.delete())
+    // testIdent normalizes to separate names using '/' regardless of platform, drops all but last two parts
+    println(s"Recompiling after deleting ${a1Class.testIdent}")
 
     // bad symbolic reference error expected (but no stack trace!)
     compileCode(C)
diff --git a/test/files/run/t744.scala b/test/files/run/t744.scala
index d5e9f6d..4895e9b 100644
--- a/test/files/run/t744.scala
+++ b/test/files/run/t744.scala
@@ -5,7 +5,7 @@ trait Linked {
   }
 }
 object Test {
-  class Test extends Linked { 
+  class Test extends Linked {
     trait FileImpl extends super.FileImpl {
 //      val x: int = 1
     }
diff --git a/test/files/run/t7445.scala b/test/files/run/t7445.scala
new file mode 100644
index 0000000..e4ffeb8
--- /dev/null
+++ b/test/files/run/t7445.scala
@@ -0,0 +1,6 @@
+import scala.collection.immutable.ListMap
+
+object Test extends App {
+	val a = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5);
+	require(a.tail == ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5));
+}
diff --git a/test/files/run/t7455/Test.scala b/test/files/run/t7455/Test.scala
index b23a724..2cda922 100644
--- a/test/files/run/t7455/Test.scala
+++ b/test/files/run/t7455/Test.scala
@@ -21,7 +21,7 @@ object Test extends DirectTest {
     for {
       name <- Seq("Outer", "Outer$PrivateInner", "Outer$PrivateStaticInner", "Outer$PublicInner")
       clazz = compiler.rootMirror.staticClass(name)
-      constr <- clazz.info.member(nme.CONSTRUCTOR).alternatives
+      constr <- clazz.info.member(termNames.CONSTRUCTOR).alternatives
     } {
       println(constr.defString)
       fullyInitializeSymbol(constr)
diff --git a/test/files/run/t7475b.check b/test/files/run/t7475b.check
new file mode 100644
index 0000000..51993f0
--- /dev/null
+++ b/test/files/run/t7475b.check
@@ -0,0 +1,2 @@
+2
+2
diff --git a/test/files/run/t7475b.scala b/test/files/run/t7475b.scala
new file mode 100644
index 0000000..a205602
--- /dev/null
+++ b/test/files/run/t7475b.scala
@@ -0,0 +1,11 @@
+trait A { private val x = 1 }
+trait B { val x = 2 }
+trait C1 extends B with A { println(x) }
+trait C2 extends A with B { println(x) }
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    new C1 { }
+    new C2 { }
+  }
+}
diff --git a/test/files/run/t7482a.check b/test/files/run/t7482a.check
new file mode 100644
index 0000000..943538f
--- /dev/null
+++ b/test/files/run/t7482a.check
@@ -0,0 +1,10 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+v: java.util.ArrayList[String] = []
+
+scala>   val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+v: java.util.ArrayList[String] = []
+
+scala> 
diff --git a/test/files/run/t7482a.scala b/test/files/run/t7482a.scala
new file mode 100644
index 0000000..d674558
--- /dev/null
+++ b/test/files/run/t7482a.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  override def code = """
+  val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+  val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+  """
+}
\ No newline at end of file
diff --git a/test/files/run/t7507.scala b/test/files/run/t7507.scala
index 6c1959d..a5eab62 100644
--- a/test/files/run/t7507.scala
+++ b/test/files/run/t7507.scala
@@ -4,6 +4,10 @@ trait Cake extends Slice
 trait Slice { self: Cake =>    // must have self type that extends `Slice`
   private[this] val bippy = () // must be private[this]
   locally(bippy)
+  class C1 {
+    locally(bippy)
+    locally(self.bippy)
+  }
 }
 
 // Originally reported bug:
diff --git a/test/files/run/t7510.check b/test/files/run/t7510.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/t7533.check b/test/files/run/t7533.check
new file mode 100644
index 0000000..fa5b3ed
--- /dev/null
+++ b/test/files/run/t7533.check
@@ -0,0 +1,30 @@
+Testing Symbol.isAbstract...
+=======class C=======
+class C => true
+constructor C => false
+value x1 => true
+value x2 => false
+value x2 => false
+method y1 => true
+method y2 => false
+type T1 => true
+type T2 => false
+=======trait T=======
+trait T => true
+method $init$ => false
+value z1 => true
+value z2 => false
+value z2 => false
+method w1 => true
+method w2 => false
+type U1 => true
+type U2 => false
+=======class D=======
+class D => false
+constructor D => false
+value x1 => false
+value x1 => false
+method y1 => false
+=======object M=======
+object M => false
+constructor M => false
diff --git a/test/files/run/t7533.scala b/test/files/run/t7533.scala
new file mode 100644
index 0000000..c7bd8e8
--- /dev/null
+++ b/test/files/run/t7533.scala
@@ -0,0 +1,38 @@
+import scala.reflect.runtime.universe._
+
+abstract class C {
+  val x1: Int
+  val x2: Int = 2
+  def y1: Int
+  def y2: Int = 2
+  type T1 <: Int
+  type T2 = Int
+}
+trait T {
+  val z1: Int
+  val z2: Int = 2
+  def w1: Int
+  def w2: Int = 2
+  type U1 <: Int
+  type U2 = Int
+}
+class D extends C {
+  val x1 = 3
+  def y1 = 3
+}
+object M
+
+object Test extends App {
+  println("Testing Symbol.isAbstract...")
+  def test[T: TypeTag] = {
+    val sym = typeOf[T].typeSymbol
+    println(s"=======$sym=======")
+    def printAbstract(sym: Symbol) = println(s"$sym => ${sym.isAbstract}")
+    printAbstract(sym)
+    sym.info.decls.sorted.foreach(printAbstract)
+  }
+  test[C]
+  test[T]
+  test[D]
+  test[M.type]
+}
\ No newline at end of file
diff --git a/test/files/run/t7556/Test_2.scala b/test/files/run/t7556/Test_2.scala
index 3184873..a78c917 100644
--- a/test/files/run/t7556/Test_2.scala
+++ b/test/files/run/t7556/Test_2.scala
@@ -5,7 +5,7 @@ object Test {
     val mc = new MegaClass
     val anns = mc.getClass.getAnnotations.map(_.annotationType.getName).toList.sorted
     println(s"class annotations: $anns")
-    val N = typeTag[MegaClass].tpe.declarations.size // was:  error reading Scala signature of MegaClass: 65935
+    val N = typeTag[MegaClass].tpe.decls.size // was:  error reading Scala signature of MegaClass: 65935
     println(s"$N decls via runtime reflection")
   }
 }
diff --git a/test/files/run/t7569.check b/test/files/run/t7569.check
index aade96d..98513c3 100644
--- a/test/files/run/t7569.check
+++ b/test/files/run/t7569.check
@@ -1,8 +1,8 @@
 source-newSource1.scala,line-3,offset=49 A.this.one
 source-newSource1.scala,line-3,offset=49 A.this
-source-newSource1.scala,line-2,offset=41 A.super.<init>()
-source-newSource1.scala,line-2,offset=41 A.super.<init>
-source-newSource1.scala,line-2,offset=41 this
+source-newSource1.scala,line-4,offset=67 A.super.<init>()
+source-newSource1.scala,line-4,offset=67 A.super.<init>
+source-newSource1.scala,line-4,offset=67 this
 source-newSource1.scala,line-3,offset=49 A.this.one
 source-newSource1.scala,line-3,offset=49 A.this
 RangePosition(newSource1.scala, 55, 57, 65) scala.Int.box(1).toString()
diff --git a/test/files/run/t7570a.check b/test/files/run/t7570a.check
new file mode 100644
index 0000000..3cc58df
--- /dev/null
+++ b/test/files/run/t7570a.check
@@ -0,0 +1 @@
+C
diff --git a/test/files/run/t7570a.scala b/test/files/run/t7570a.scala
new file mode 100644
index 0000000..b8b4dde
--- /dev/null
+++ b/test/files/run/t7570a.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import definitions._
+import Flag._
+
+object Test extends App {
+  val tb = cm.mkToolBox()
+  val csym = tb.define(q"""class C { override def toString = "C" }""")
+  println(tb.eval(q"new $csym"))
+}
\ No newline at end of file
diff --git a/test/files/run/t7570b.check b/test/files/run/t7570b.check
new file mode 100644
index 0000000..0c28247
--- /dev/null
+++ b/test/files/run/t7570b.check
@@ -0,0 +1 @@
+compilation failed: reflective toolbox has failed: cannot have free terms in a top-level definition
diff --git a/test/files/run/t7570b.scala b/test/files/run/t7570b.scala
new file mode 100644
index 0000000..7d4ade5
--- /dev/null
+++ b/test/files/run/t7570b.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import definitions._
+import Flag._
+import internal._
+
+object Test extends App {
+  val tb = cm.mkToolBox()
+  val msg = internal.reificationSupport.newFreeTerm("msg", "C")
+  internal.reificationSupport.setInfo(msg, typeOf[String])
+  try {
+    val csym = tb.define(q"""class C { override def toString = $msg }""")
+    println(tb.eval(q"new $csym"))
+  } catch {
+    case ToolBoxError(message, _) => println(s"compilation failed: $message")
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t7570c.check b/test/files/run/t7570c.check
new file mode 100644
index 0000000..61e659d
--- /dev/null
+++ b/test/files/run/t7570c.check
@@ -0,0 +1,2 @@
+(class C,true,false,false)
+(object D,false,true,false)
diff --git a/test/files/run/t7570c.scala b/test/files/run/t7570c.scala
new file mode 100644
index 0000000..a5bdbff
--- /dev/null
+++ b/test/files/run/t7570c.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import definitions._
+import Flag._
+
+object Test extends App {
+  val tb = cm.mkToolBox()
+  val csym = tb.define(q"""class C { override def toString = "C" }""")
+  println((csym, csym.isClass, csym.isModule, csym.isModuleClass))
+  val dsym = tb.define(q"""object D { override def toString = "D" }""".asInstanceOf[ModuleDef])
+  println((dsym, dsym.isClass, dsym.isModule, dsym.isModuleClass))
+}
\ No newline at end of file
diff --git a/test/files/run/t7582-private-within.check b/test/files/run/t7582-private-within.check
new file mode 100644
index 0000000..b2743ff
--- /dev/null
+++ b/test/files/run/t7582-private-within.check
@@ -0,0 +1,12 @@
+private[package pack] class JavaPackagePrivate
+private[package pack] module JavaPackagePrivate
+private[package pack] module class JavaPackagePrivate
+private[package pack] field field
+private[package pack] primary constructor <init>
+private[package pack] method meth
+private[package pack] field staticField
+private[package pack] method staticMeth
+private[package pack] method <clinit>
+private[package pack] field staticField
+private[package pack] method staticMeth
+private[package pack] method <clinit>
diff --git a/test/files/run/t7582-private-within/JavaPackagePrivate.java b/test/files/run/t7582-private-within/JavaPackagePrivate.java
new file mode 100644
index 0000000..672d19b
--- /dev/null
+++ b/test/files/run/t7582-private-within/JavaPackagePrivate.java
@@ -0,0 +1,8 @@
+package pack;
+
+class JavaPackagePrivate {
+	int field = 0;
+	static int staticField = 0;
+	void meth() { }
+	static void staticMeth() { }
+}
diff --git a/test/files/run/t7582-private-within/Test.scala b/test/files/run/t7582-private-within/Test.scala
new file mode 100644
index 0000000..3d581f0
--- /dev/null
+++ b/test/files/run/t7582-private-within/Test.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.DirectTest
+
+// Testing that the `privateWithin` field is correctly populated on all
+// the related symbols (e.g. module class) under separate compilation.
+object Test extends DirectTest {
+  def code = ???
+
+  def show(): Unit = {
+    val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+    val global = newCompiler("-usejavacp", "-cp", classpath, "-d", testOutput.path)
+    import global._
+    withRun(global) { _ =>
+      def check(sym: Symbol) = {
+        sym.initialize
+        println(f"${sym.accessString}%12s ${sym.accurateKindString} ${sym.name.decode}") // we want to see private[pack] for all of these.
+      }
+      val sym = rootMirror.getRequiredClass("pack.JavaPackagePrivate")
+      val syms = Seq(sym, sym.companionModule, sym.companionModule.moduleClass)
+      (syms ++ syms.flatMap(_.info.decls)).foreach(check)
+    }
+  }
+}
diff --git a/test/files/run/t7582.check b/test/files/run/t7582.check
new file mode 100644
index 0000000..225fb1a
--- /dev/null
+++ b/test/files/run/t7582.check
@@ -0,0 +1,2 @@
+warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+2
diff --git a/test/files/run/synchronized.flags b/test/files/run/t7582.flags
similarity index 100%
copy from test/files/run/synchronized.flags
copy to test/files/run/t7582.flags
diff --git a/test/files/run/t7582/InlineHolder.scala b/test/files/run/t7582/InlineHolder.scala
new file mode 100644
index 0000000..a18b9ef
--- /dev/null
+++ b/test/files/run/t7582/InlineHolder.scala
@@ -0,0 +1,16 @@
+package p1 {
+  object InlineHolder {
+    @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+  }
+}
+
+object O {
+  @noinline
+  def x = p1.InlineHolder.inlinable
+}
+
+object Test {
+  def main(args: Array[String]) {
+    println(O.x)
+  }
+}
diff --git a/test/files/run/t7582/PackageProtectedJava.java b/test/files/run/t7582/PackageProtectedJava.java
new file mode 100644
index 0000000..b7ea2a7
--- /dev/null
+++ b/test/files/run/t7582/PackageProtectedJava.java
@@ -0,0 +1,6 @@
+package p1;
+
+// public class, protected method
+public class PackageProtectedJava {
+	static final int protectedMethod() { return 1; }
+}
diff --git a/test/files/run/t7582b.check b/test/files/run/t7582b.check
new file mode 100644
index 0000000..225fb1a
--- /dev/null
+++ b/test/files/run/t7582b.check
@@ -0,0 +1,2 @@
+warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+2
diff --git a/test/files/run/synchronized.flags b/test/files/run/t7582b.flags
similarity index 100%
copy from test/files/run/synchronized.flags
copy to test/files/run/t7582b.flags
diff --git a/test/files/run/t7582b/InlineHolder.scala b/test/files/run/t7582b/InlineHolder.scala
new file mode 100644
index 0000000..a18b9ef
--- /dev/null
+++ b/test/files/run/t7582b/InlineHolder.scala
@@ -0,0 +1,16 @@
+package p1 {
+  object InlineHolder {
+    @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+  }
+}
+
+object O {
+  @noinline
+  def x = p1.InlineHolder.inlinable
+}
+
+object Test {
+  def main(args: Array[String]) {
+    println(O.x)
+  }
+}
diff --git a/test/files/run/t7582b/PackageProtectedJava.java b/test/files/run/t7582b/PackageProtectedJava.java
new file mode 100644
index 0000000..55a44b7
--- /dev/null
+++ b/test/files/run/t7582b/PackageProtectedJava.java
@@ -0,0 +1,6 @@
+package p1;
+
+// protected class, public method
+class PackageProtectedJava {
+	public static final int protectedMethod() { return 1; }
+}
diff --git a/test/files/run/t7584.check b/test/files/run/t7584.check
new file mode 100644
index 0000000..9f53e5d
--- /dev/null
+++ b/test/files/run/t7584.check
@@ -0,0 +1,6 @@
+no calls
+call A
+a
+call B twice
+b
+b
diff --git a/test/files/disabled/t7020.flags b/test/files/run/t7584.flags
similarity index 100%
rename from test/files/disabled/t7020.flags
rename to test/files/run/t7584.flags
diff --git a/test/files/run/t7584.scala b/test/files/run/t7584.scala
new file mode 100644
index 0000000..6d7f4f7
--- /dev/null
+++ b/test/files/run/t7584.scala
@@ -0,0 +1,14 @@
+// Test case added to show the behaviour of functions with
+// by-name parameters.  The evaluation behaviour was already correct.
+//
+// We did flush out a spurious "pure expression does nothing in statement position"
+// warning, hence -Xfatal-warnings in the flags file.
+object Test extends App {
+  def foo(f: (=> Int, => Int) => Unit) = f({println("a"); 0}, {println("b"); 1})
+  println("no calls")
+  foo((a, b) => ())
+  println("call A")
+  foo((a, b) => a)
+  println("call B twice")
+  foo((a, b) => {b; b})
+}
diff --git a/test/files/run/t7584b.scala b/test/files/run/t7584b.scala
new file mode 100644
index 0000000..fd560f0
--- /dev/null
+++ b/test/files/run/t7584b.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+  def fold[A, B](f: (A, => B) => B) = (b: B) => f(null.asInstanceOf[A], b)
+  def f[A, B](x: A, y: B): B = y
+  def bip[A, B] = fold[A, B]((x, y) => f(x, y))
+  def bop[A, B] = fold[A, B](f(_, _))
+
+  // these work:
+  fold[Int, Int]((x, y) => f(x, y))(0)
+  fold[Int, Int](f(_, _))(0)
+
+  // Used to throw a ClassCastException. Since the fix for SI-7899, these issue type errors.
+  // fold[Int, Int](f _)(0)
+  // fold[Int, Int](f)(0)
+}
diff --git a/test/files/run/t7617a/Macros_1.scala b/test/files/run/t7617a/Macros_1.scala
index f9772c8..77b18c2 100644
--- a/test/files/run/t7617a/Macros_1.scala
+++ b/test/files/run/t7617a/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 object Macros {
diff --git a/test/files/run/t7617b/Macros_1.scala b/test/files/run/t7617b/Macros_1.scala
index bc91993..90fcfda 100644
--- a/test/files/run/t7617b/Macros_1.scala
+++ b/test/files/run/t7617b/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def impl(c: Context)(name: c.Expr[String])(value: c.Expr[Any]) = {
diff --git a/test/files/run/t7617b/Test_2.scala b/test/files/run/t7617b/Test_2.scala
index e27f650..e1d9acd 100644
--- a/test/files/run/t7617b/Test_2.scala
+++ b/test/files/run/t7617b/Test_2.scala
@@ -2,7 +2,7 @@ import scala.language.dynamics
 import language.experimental.macros
 
 class C extends Dynamic {
-  def updateDynamic(name: String)(value: Any) = macro Macros.impl
+  def updateDynamic(name: String)(value: Any): Unit = macro Macros.impl
 }
 
 object Test extends App {
diff --git a/test/files/run/t7634.check b/test/files/run/t7634.check
new file mode 100644
index 0000000..aea3b94
--- /dev/null
+++ b/test/files/run/t7634.check
@@ -0,0 +1,8 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+
+scala> .lines
+res1: List[String] = List(shello, world.)
+
+scala> 
diff --git a/test/files/run/t7634.scala b/test/files/run/t7634.scala
new file mode 100644
index 0000000..aeb6a5e
--- /dev/null
+++ b/test/files/run/t7634.scala
@@ -0,0 +1,22 @@
+import java.io.File
+import scala.tools.partest.ReplTest
+import scala.util.Properties.propOrElse
+
+/**
+* filter out absolute path to java
+* filter: java
+*/
+object Test extends ReplTest {
+  def java = propOrElse("javacmd", "java")
+  def code = s""":sh $java -classpath $testOutput hello.Hello
+                |.lines""".stripMargin
+}
+
+package hello {
+  object Hello {
+    def main(a: Array[String]) {
+      System.out.println("shello, world.")
+    }
+  }
+}
+
diff --git a/test/files/run/t7657/Macros_1.scala b/test/files/run/t7657/Macros_1.scala
index b1e31aa..a883f76 100644
--- a/test/files/run/t7657/Macros_1.scala
+++ b/test/files/run/t7657/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 trait T { def t(): Unit }
diff --git a/test/files/run/t7700.check b/test/files/run/t7700.check
new file mode 100644
index 0000000..ca8e686
--- /dev/null
+++ b/test/files/run/t7700.check
@@ -0,0 +1,2 @@
+public abstract java.lang.Object C.bar(java.lang.Object)
+public abstract java.lang.Object C.foo(java.lang.Object)
diff --git a/test/files/run/t7700.scala b/test/files/run/t7700.scala
new file mode 100644
index 0000000..76d16b8
--- /dev/null
+++ b/test/files/run/t7700.scala
@@ -0,0 +1,17 @@
+import scala.annotation._
+
+trait C[@specialized U] {
+  @unspecialized
+  def foo(u: U): U
+  @unspecialized
+  def bar[A](u: U) = u
+}
+
+object Test extends App {
+  val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName)
+  println(declared.mkString("\n"))
+  object CInt extends C[Int] { def foo(i: Int) = i }
+  object CAny extends C[Any] { def foo(a: Any) = a }
+  assert(CInt.foo(1) == 1)
+  assert(CAny.foo("") == "")
+}
diff --git a/test/files/run/t7711-script-args.check b/test/files/run/t7711-script-args.check
new file mode 100644
index 0000000..d107590
--- /dev/null
+++ b/test/files/run/t7711-script-args.check
@@ -0,0 +1,2 @@
+Hello, scripted test!
+What good news have you for me today?
diff --git a/test/files/run/t7711-script-args.scala b/test/files/run/t7711-script-args.scala
new file mode 100644
index 0000000..02535aa
--- /dev/null
+++ b/test/files/run/t7711-script-args.scala
@@ -0,0 +1,7 @@
+
+import scala.tools.partest.ScriptTest
+
+object Test extends ScriptTest {
+  override def extraSettings = s"${super.extraSettings} -Xlint"
+  override def argv          = Seq("good", "news")
+}
diff --git a/test/files/run/t7711-script-args.script b/test/files/run/t7711-script-args.script
new file mode 100644
index 0000000..19b7a74
--- /dev/null
+++ b/test/files/run/t7711-script-args.script
@@ -0,0 +1,12 @@
+#!/bin/bash
+exec ${SCALA_HOME}/bin/scala "$0" "$@" 2>&1
+!#
+
+Console println s"Hello, scripted test!"
+Console println s"What ${args mkString " "} have you for me today?"
+
+//def unused = 88
+//newSource1.scala:8: warning: private method in <$anon: AnyRef> is never used
+//Console println s"Hello, $argv, are you still here?"
+//newSource1.scala:9: error: not found: value argv
+
diff --git a/test/files/run/t7715.check b/test/files/run/t7715.check
new file mode 100644
index 0000000..592d7fe
--- /dev/null
+++ b/test/files/run/t7715.check
@@ -0,0 +1,3 @@
+6
+4
+4
diff --git a/test/files/run/t7715.scala b/test/files/run/t7715.scala
new file mode 100644
index 0000000..0ad3913
--- /dev/null
+++ b/test/files/run/t7715.scala
@@ -0,0 +1,24 @@
+
+import PartialFunction.cond
+import util._
+
+object Test extends App {
+
+  object I { def unapply(x: String): Option[Int] = Try(x.toInt).toOption }
+  implicit class RX(val sc: StringContext) {
+    def rx = sc.parts.mkString("(.+)").r
+  }
+
+  Console println ("2 by 4" match {
+    case rx"${I(a)} by ${I(b)}" => a+b
+    case _                      => -1
+  })
+  Console println ("2 by 4" match {
+    case rx"${_} by ${I(b)}"    => b    // pattern placeholder
+    case _                      => -1
+  })
+  Console println ("2 by 4" match {
+    case rx"$_ by ${I(b)}"      => b    // is permitted this way, too
+    case _                      => -1
+  })
+}
diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check
new file mode 100644
index 0000000..ad924f4
--- /dev/null
+++ b/test/files/run/t7747-repl.check
@@ -0,0 +1,286 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> var x = 10
+x: Int = 10
+
+scala> var y = 11
+y: Int = 11
+
+scala> x = 12
+x: Int = 12
+
+scala> y = 13
+y: Int = 13
+
+scala> val z = x * y
+z: Int = 156
+
+scala> 2 ; 3
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              2 ;;
+              ^
+res0: Int = 3
+
+scala> { 2 ; 3 }
+<console>:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              { 2 ; 3 }
+                ^
+res1: Int = 3
+
+scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+  1 +
+  2 +
+  3 } ; bippy+88+11
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+              ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+                  ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+                                         ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+                                                                                                ^
+defined object Cow
+defined class Moo
+bippy: Int
+res2: Int = 105
+
+scala> 
+
+scala> object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy
+defined object Bovine
+defined class Ruminant
+res3: Int = 216
+
+scala> Bovine.x = List(Ruminant(5), Cow, new Moo)
+Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo)
+
+scala> Bovine.x
+res4: List[Any] = List(Ruminant(5), Cow, Moooooo)
+
+scala> 
+
+scala> (2)
+res5: Int = 2
+
+scala> (2 + 2)
+res6: Int = 4
+
+scala> ((2 + 2))
+res7: Int = 4
+
+scala>   ((2 + 2))
+res8: Int = 4
+
+scala>   (  (2 + 2))
+res9: Int = 4
+
+scala>   (  (2 + 2 )  )
+res10: Int = 4
+
+scala> 5 ;   (  (2 + 2 )  ) ; ((5))
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ;   (  (2 + 2 )  ) ;;
+              ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              5 ;   (  (2 + 2 )  ) ;;
+                          ^
+res11: Int = 5
+
+scala> (((2 + 2)), ((2 + 2)))
+res12: (Int, Int) = (4,4)
+
+scala> (((2 + 2)), ((2 + 2)), 2)
+res13: (Int, Int, Int) = (4,4,2)
+
+scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString)
+res14: String = 4423
+
+scala> 
+
+scala> 55 ; ((2 + 2)) ; (1, 2, 3)
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              55 ; ((2 + 2)) ;;
+              ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              55 ; ((2 + 2)) ;;
+                       ^
+res15: (Int, Int, Int) = (1,2,3)
+
+scala> 55 ; (x: Int) => x + 1 ; () => ((5))
+<console>:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              55 ; (x: Int) => x + 1 ;;
+              ^
+res16: () => Int = <function0>
+
+scala> 
+
+scala> () => 5
+res17: () => Int = <function0>
+
+scala> 55 ; () => 5
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+              55 ;;
+              ^
+res18: () => Int = <function0>
+
+scala> () => { class X ; new X }
+res19: () => AnyRef = <function0>
+
+scala> 
+
+scala> def foo(x: Int)(y: Int)(z: Int) = x+y+z
+foo: (x: Int)(y: Int)(z: Int)Int
+
+scala> foo(5)(10)(15)+foo(5)(10)(15)
+res20: Int = 60
+
+scala> 
+
+scala> List(1) ++ List('a')
+res21: List[AnyVal] = List(1, a)
+
+scala> 
+
+scala> 1 to 100 map (_  + 1)
+res22: scala.collection.immutable.IndexedSeq[Int] = Vector(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101)
+
+scala> val x1 = 1
+x1: Int = 1
+
+scala> val x2 = 2
+x2: Int = 2
+
+scala> val x3 = 3
+x3: Int = 3
+
+scala> case class BippyBungus()
+defined class BippyBungus
+
+scala> x1 + x2 + x3
+res23: Int = 6
+
+scala> :reset
+Resetting interpreter state.
+Forgetting this session history:
+
+var x = 10
+var y = 11
+x = 12
+y = 13
+val z = x * y
+2 ; 3
+{ 2 ; 3 }
+5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+  1 +
+  2 +
+  3 } ; bippy+88+11
+object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy
+Bovine.x = List(Ruminant(5), Cow, new Moo)
+Bovine.x
+(2)
+(2 + 2)
+((2 + 2))
+  ((2 + 2))
+  (  (2 + 2))
+  (  (2 + 2 )  )
+5 ;   (  (2 + 2 )  ) ; ((5))
+(((2 + 2)), ((2 + 2)))
+(((2 + 2)), ((2 + 2)), 2)
+(((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString)
+55 ; ((2 + 2)) ; (1, 2, 3)
+55 ; (x: Int) => x + 1 ; () => ((5))
+() => 5
+55 ; () => 5
+() => { class X ; new X }
+def foo(x: Int)(y: Int)(z: Int) = x+y+z
+foo(5)(10)(15)+foo(5)(10)(15)
+List(1) ++ List('a')
+1 to 100 map (_  + 1)
+val x1 = 1
+val x2 = 2
+val x3 = 3
+case class BippyBungus()
+x1 + x2 + x3
+
+Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, Cow, Ruminant, bippy, foo, x, x1, x2, x3, y, z
+Forgetting defined types: BippyBungus, Moo, Ruminant
+
+scala> x1 + x2 + x3
+<console>:8: error: not found: value x1
+              x1 + x2 + x3
+              ^
+<console>:8: error: not found: value x2
+              x1 + x2 + x3
+                   ^
+<console>:8: error: not found: value x3
+              x1 + x2 + x3
+                        ^
+
+scala> val x1 = 4
+x1: Int = 4
+
+scala> new BippyBungus
+<console>:8: error: not found: type BippyBungus
+              new BippyBungus
+                  ^
+
+scala> class BippyBungus() { def f = 5 }
+defined class BippyBungus
+
+scala> { new BippyBungus ; x1 }
+res2: Int = 4
+
+scala> object x {class y { case object z } }
+defined object x
+
+scala> case class BippyBups()
+defined class BippyBups
+
+scala> case class PuppyPups()
+defined class PuppyPups
+
+scala> case class Bingo()
+defined class Bingo
+
+scala> List(BippyBups(), PuppyPups(), Bingo()) // show
+class $read extends Serializable {
+  def <init>() = {
+    super.<init>;
+    ()
+  };
+  class $iw extends Serializable {
+    def <init>() = {
+      super.<init>;
+      ()
+    };
+    import $line44.$read.$iw.$iw.BippyBups;
+    import $line44.$read.$iw.$iw.BippyBups;
+    import $line45.$read.$iw.$iw.PuppyPups;
+    import $line45.$read.$iw.$iw.PuppyPups;
+    import $line46.$read.$iw.$iw.Bingo;
+    import $line46.$read.$iw.$iw.Bingo;
+    class $iw extends Serializable {
+      def <init>() = {
+        super.<init>;
+        ()
+      };
+      val res3 = List(BippyBups, PuppyPups, Bingo)
+    };
+    val $iw = new $iw.<init>
+  };
+  val $iw = new $iw.<init>
+}
+object $read extends $read {
+  def <init>() = {
+    super.<init>;
+    ()
+  }
+}
+res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo())
+
+scala> 
diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala
new file mode 100644
index 0000000..0e64210
--- /dev/null
+++ b/test/files/run/t7747-repl.scala
@@ -0,0 +1,69 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+
+  override def transformSettings(s: Settings): Settings = {
+    s.Yreplclassbased.value = true
+    s
+  }
+
+  def code = """
+    |var x = 10
+    |var y = 11
+    |x = 12
+    |y = 13
+    |val z = x * y
+    |2 ; 3
+    |{ 2 ; 3 }
+    |5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+    |  1 +
+    |  2 +
+    |  3 } ; bippy+88+11
+    |
+    |object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy
+    |Bovine.x = List(Ruminant(5), Cow, new Moo)
+    |Bovine.x
+    |
+    |(2)
+    |(2 + 2)
+    |((2 + 2))
+    |  ((2 + 2))
+    |  (  (2 + 2))
+    |  (  (2 + 2 )  )
+    |5 ;   (  (2 + 2 )  ) ; ((5))
+    |(((2 + 2)), ((2 + 2)))
+    |(((2 + 2)), ((2 + 2)), 2)
+    |(((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString)
+    |
+    |55 ; ((2 + 2)) ; (1, 2, 3)
+    |55 ; (x: Int) => x + 1 ; () => ((5))
+    |
+    |() => 5
+    |55 ; () => 5
+    |() => { class X ; new X }
+    |
+    |def foo(x: Int)(y: Int)(z: Int) = x+y+z
+    |foo(5)(10)(15)+foo(5)(10)(15)
+    |
+    |List(1) ++ List('a')
+    |
+    |1 to 100 map (_  + 1)
+    |val x1 = 1
+    |val x2 = 2
+    |val x3 = 3
+    |case class BippyBungus()
+    |x1 + x2 + x3
+    |:reset
+    |x1 + x2 + x3
+    |val x1 = 4
+    |new BippyBungus
+    |class BippyBungus() { def f = 5 }
+    |{ new BippyBungus ; x1 }
+    |object x {class y { case object z } }
+    |case class BippyBups()
+    |case class PuppyPups()
+    |case class Bingo()
+    |List(BippyBups(), PuppyPups(), Bingo()) // show
+    |""".stripMargin
+}
diff --git a/test/files/run/t7763.scala b/test/files/run/t7763.scala
new file mode 100644
index 0000000..638077e
--- /dev/null
+++ b/test/files/run/t7763.scala
@@ -0,0 +1,20 @@
+object Test {
+  class A; class B
+  def main(args: Array[String]) {
+    def noExpectedType() {
+      a().asInstanceOf[B] // cast elided!
+    }
+    def withExpectedType(): B = {
+      a().asInstanceOf[B]
+    }
+    def test(a: => Any) = try {
+      a
+      sys.error("no CCE!")
+    } catch {case _: ClassCastException => }
+
+    test(noExpectedType())
+    test(withExpectedType())
+  }
+
+  def a(): Object = new A
+}
diff --git a/test/files/run/t7775.scala b/test/files/run/t7775.scala
index 5fb0327..48b0d89 100644
--- a/test/files/run/t7775.scala
+++ b/test/files/run/t7775.scala
@@ -1,4 +1,4 @@
-import scala.concurrent.{duration, future, Await, ExecutionContext}
+import scala.concurrent.{duration, Future, Await, ExecutionContext}
 import scala.tools.nsc.Settings
 import ExecutionContext.Implicits.global
 
@@ -8,7 +8,7 @@ import ExecutionContext.Implicits.global
 object Test {
   def main(args: Array[String]) {
     val tries = 1000 // YMMV
-    val compiler = future {
+    val compiler = Future {
       for(_ <- 1 to tries) new Settings(_ => {})
     }
     for(i <- 1 to tries * 10) System.setProperty(s"foo$i", i.toString)
diff --git a/test/files/run/t7777.check b/test/files/run/t7777.check
new file mode 100644
index 0000000..162ff2d
--- /dev/null
+++ b/test/files/run/t7777.check
@@ -0,0 +1,7 @@
+foo(1, 2)
+bar(4, 5)
+foo(3)
+bar(7)
+apply(6)
+apply(9)
+foo(8)
diff --git a/test/files/run/t7777/Macros_1.scala b/test/files/run/t7777/Macros_1.scala
new file mode 100644
index 0000000..1dc6d67
--- /dev/null
+++ b/test/files/run/t7777/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.language.experimental.macros
+import scala.language.dynamics
+import scala.reflect.macros.whitebox.Context
+
+class DynMacro extends Dynamic {
+  def applyDynamic(s: String)(xs: Any*): DynMacro =
+    macro DynMacro.applyDynamicMacro
+}
+
+object DynMacro extends DynMacro {
+  def applyDynamicMacro(c: Context)(s: c.Expr[String])(xs: c.Expr[Any]*): c.Expr[DynMacro] = {
+    import c.universe._
+    val Literal(Constant(n: String)) = s.tree
+    val args = xs.map(_.tree.toString).mkString("(", ", ", ")")
+    c.Expr(q"println(${ n + args }); ${c.prefix.tree}")
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t7777/Test_2.scala b/test/files/run/t7777/Test_2.scala
new file mode 100644
index 0000000..1fe8b63
--- /dev/null
+++ b/test/files/run/t7777/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  DynMacro.foo(1, 2)         // prints "foo(1, 2)"
+  DynMacro.foo(3).bar(4, 5)  // prints "bar(4, 5)", then "foo(3)"
+  DynMacro(6).bar(7)         // prints "bar(7)", then "apply(6)"
+  DynMacro.foo(8)(9)         // Fails!
+}
\ No newline at end of file
diff --git a/test/files/run/t7791-script-linenums.check b/test/files/run/t7791-script-linenums.check
new file mode 100644
index 0000000..b7d9695
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.check
@@ -0,0 +1 @@
+hello, scripted test
diff --git a/test/files/run/t7791-script-linenums.scala b/test/files/run/t7791-script-linenums.scala
new file mode 100644
index 0000000..d89b8d4
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.scala
@@ -0,0 +1,16 @@
+
+import scala.tools.partest.ScriptTest
+
+object Test extends ScriptTest {
+  object ExceptionLine {
+    def unapply(e: Exception) = Some(e.getStackTrace()(0).getLineNumber)
+  }
+  override def show() = {
+    import util._
+    Try(super.show()) match {
+      case Failure(ExceptionLine(7)) => ()
+      case Failure(e) => e.printStackTrace()
+      case Success(_) => Console println "Expected error"
+    }
+  }
+}
diff --git a/test/files/run/t7791-script-linenums.script b/test/files/run/t7791-script-linenums.script
new file mode 100644
index 0000000..403dcc2
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.script
@@ -0,0 +1,8 @@
+#!/bin/bash
+exec ${SCALA_HOME}/bin/scala "$0" "$@" 2>&1
+!#
+
+Console println s"hello, scripted test"
+
+throw new RuntimeException("failing")  // line 7
+
diff --git a/test/files/run/t7801.check b/test/files/run/t7801.check
new file mode 100644
index 0000000..d72060c
--- /dev/null
+++ b/test/files/run/t7801.check
@@ -0,0 +1,11 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> val g: scala.reflect.internal.SymbolTable = null; import g.abort
+g: scala.reflect.internal.SymbolTable = null
+import g.abort
+
+scala> class C(val a: Any) extends AnyVal
+defined class C
+
+scala> 
diff --git a/test/files/run/t7801.scala b/test/files/run/t7801.scala
new file mode 100644
index 0000000..3a3cc97
--- /dev/null
+++ b/test/files/run/t7801.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+// was crashing due to a subtle interaction of the Namer entering packages into
+// enclosing packages by mutating the scope in place without invalidating later
+// entries in the enclosing package class symbols type history.
+//
+// Sadly, I couldn't whittle the test case down further.
+object Test extends ReplTest {
+  override def code = """val g: scala.reflect.internal.SymbolTable = null; import g.abort
+                        |class C(val a: Any) extends AnyVal""".stripMargin
+
+}
diff --git a/test/files/run/t7805-repl-i.check b/test/files/run/t7805-repl-i.check
new file mode 100644
index 0000000..eecfff0
--- /dev/null
+++ b/test/files/run/t7805-repl-i.check
@@ -0,0 +1,11 @@
+Loading t7805-repl-i.script...
+import util._
+
+Welcome to Scala
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> Console println Try(8)
+Success(8)
+
+scala> 
diff --git a/test/files/run/t7805-repl-i.scala b/test/files/run/t7805-repl-i.scala
new file mode 100644
index 0000000..208cb5d
--- /dev/null
+++ b/test/files/run/t7805-repl-i.scala
@@ -0,0 +1,42 @@
+
+import scala.tools.partest.{ ReplTest, Welcoming }
+import scala.tools.nsc.{ GenericRunnerSettings, Settings }
+import scala.tools.nsc.settings.MutableSettings
+
+object Test extends ReplTest with HangingRepl with Welcoming {
+  def script = testPath changeExtension "script"
+  override def transformSettings(s: Settings) = s match {
+    case m: MutableSettings =>
+      val t = new GenericRunnerSettings(s.errorFn)
+      m copyInto t
+      t processArgumentString s"-i $script"
+      t
+    case _ => s
+  }
+  def code = "Console println Try(8)"
+}
+
+object Resulting {
+  import scala.concurrent._
+  import scala.concurrent.duration._
+  implicit class AwaitResult[A](val f: Future[A]) extends AnyVal {
+    def resultWithin(d: Duration): A = Await.result(f, d)
+  }
+}
+
+/** Test that hangs the REPL.
+ *  Usually that is the "before" case.
+ */
+trait HangingRepl extends ReplTest {
+  import scala.language.postfixOps
+  import scala.util._
+  import scala.concurrent._
+  import scala.concurrent.duration._
+  import ExecutionContext.Implicits._
+  import Resulting._
+  def timeout = 120 seconds
+  def hanging[A](a: =>A): A = Future(a) resultWithin timeout
+  override def show() = Try(hanging(super.show())) recover {
+    case e => e.printStackTrace()
+  }
+}
diff --git a/test/files/run/t7805-repl-i.script b/test/files/run/t7805-repl-i.script
new file mode 100644
index 0000000..eb2b870
--- /dev/null
+++ b/test/files/run/t7805-repl-i.script
@@ -0,0 +1 @@
+import util._
diff --git a/test/files/run/t7817-tree-gen.check b/test/files/run/t7817-tree-gen.check
new file mode 100644
index 0000000..4ed4b0d
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.check
@@ -0,0 +1,104 @@
+
+
+Joint Compilation:
+
+             typer [           O]  -  O.this
+           pickler [           O]  -  O.this
+         refchecks [           O]  -  O.this
+           uncurry [           O]  -  O.this
+        specialize [           O]  -  O.this
+     explicitouter [           O]  -  O.this
+           erasure [           O]  -  O.this
+       posterasure [           O]  -  C.this.O()
+           flatten [           O]  -  C.this.O()
+             mixin [           O]  -  test.O()
+           cleanup [           O]  -  test.O()
+
+             typer [           P]  -  P.this
+           pickler [           P]  -  P.this
+         refchecks [           P]  -  P.this
+           uncurry [           P]  -  P.this
+        specialize [           P]  -  P.this
+     explicitouter [           P]  -  P.this
+           erasure [           P]  -  P.this
+       posterasure [           P]  -  D.this.P()
+           flatten [           P]  -  D.this.P()
+             mixin [           P]  -  P()
+           cleanup [           P]  -  P()
+
+             typer [    test2.PO]  -  PO.this
+           pickler [    test2.PO]  -  PO.this
+         refchecks [    test2.PO]  -  PO.this
+           uncurry [    test2.PO]  -  PO.this
+        specialize [    test2.PO]  -  PO.this
+     explicitouter [    test2.PO]  -  PO.this
+           erasure [    test2.PO]  -  PO.this
+       posterasure [    test2.PO]  -  test2.`package`.PO
+           flatten [    test2.PO]  -  test2.`package`.PO
+             mixin [    test2.PO]  -  test2.package$PO
+           cleanup [    test2.PO]  -  test2.package$PO
+
+             typer [   test2.bar]  -  `package`.this.bar
+           pickler [   test2.bar]  -  `package`.this.bar
+         refchecks [   test2.bar]  -  `package`.this.bar
+           uncurry [   test2.bar]  -  `package`.this.bar
+        specialize [   test2.bar]  -  `package`.this.bar
+     explicitouter [   test2.bar]  -  `package`.this.bar
+           erasure [   test2.bar]  -  `package`.this.bar
+       posterasure [   test2.bar]  -  test2.`package`.bar
+           flatten [   test2.bar]  -  test2.`package`.bar
+             mixin [   test2.bar]  -  test2.`package`.bar
+           cleanup [   test2.bar]  -  test2.`package`.bar
+
+
+
+Separate Compilation:
+
+             typer [           O]  -  O.this
+           pickler [           O]  -  O.this
+         refchecks [           O]  -  O.this
+           uncurry [           O]  -  O.this
+        specialize [           O]  -  O.this
+     explicitouter [           O]  -  O.this
+           erasure [           O]  -  O.this
+       posterasure [           O]  -  C.this.O()
+           flatten [           O]  -  C.this.O()
+             mixin [           O]  -  testSep.O()
+           cleanup [           O]  -  testSep.O()
+
+             typer [           P]  -  P.this
+           pickler [           P]  -  P.this
+         refchecks [           P]  -  P.this
+           uncurry [           P]  -  P.this
+        specialize [           P]  -  P.this
+     explicitouter [           P]  -  P.this
+           erasure [           P]  -  P.this
+       posterasure [           P]  -  DSep.this.P()
+           flatten [           P]  -  DSep.this.P()
+             mixin [           P]  -  P()
+           cleanup [           P]  -  P()
+
+             typer [          PO]  -  PO.this
+           pickler [          PO]  -  PO.this
+         refchecks [          PO]  -  PO.this
+           uncurry [          PO]  -  PO.this
+        specialize [          PO]  -  PO.this
+     explicitouter [          PO]  -  PO.this
+           erasure [          PO]  -  PO.this
+       posterasure [          PO]  -  test2.`package`.PO
+           flatten [          PO]  -  test2.`package`.PO
+             mixin [          PO]  -  test2.package$PO
+           cleanup [          PO]  -  test2.package$PO
+
+             typer [testSep2.bar]  -  `package`.this.bar
+           pickler [testSep2.bar]  -  `package`.this.bar
+         refchecks [testSep2.bar]  -  `package`.this.bar
+           uncurry [testSep2.bar]  -  `package`.this.bar
+        specialize [testSep2.bar]  -  `package`.this.bar
+     explicitouter [testSep2.bar]  -  `package`.this.bar
+           erasure [testSep2.bar]  -  `package`.this.bar
+       posterasure [testSep2.bar]  -  test2.`package`.bar
+           flatten [testSep2.bar]  -  test2.`package`.bar
+             mixin [testSep2.bar]  -  test2.`package`.bar
+           cleanup [testSep2.bar]  -  test2.`package`.bar
+
diff --git a/test/files/run/t7817-tree-gen.flags b/test/files/run/t7817-tree-gen.flags
new file mode 100644
index 0000000..ce6e93b
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.flags
@@ -0,0 +1 @@
+-Ynooptimise
\ No newline at end of file
diff --git a/test/files/run/t7817-tree-gen.scala b/test/files/run/t7817-tree-gen.scala
new file mode 100644
index 0000000..a8317fd
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.scala
@@ -0,0 +1,65 @@
+import scala.tools.partest._
+
+// Testing that `mkAttributedRef` doesn't incude the package object test.`package`,
+// under joint and separate compilation.
+
+package testSep { class C { object O } }
+package testSep2 { object `package` { object PO; def bar = 0 } }
+class DSep { object P }
+
+object Test extends CompilerTest {
+  import global._
+  override def extraSettings = super.extraSettings + " -d " + testOutput.path
+  override def sources = List(
+    """
+    package test { class C { object O } }
+    class D { object P }
+    package test2 { object `package` { object PO; def bar = 0 } }
+    """
+  )
+  def check(source: String, unit: CompilationUnit) = enteringTyper {
+    def checkTree(msg: String, t: => Tree) = {
+      val run = currentRun
+      import run._
+      val phases = List(typerPhase, picklerPhase, refchecksPhase, uncurryPhase, specializePhase,
+        explicitouterPhase, erasurePhase, posterasurePhase, flattenPhase, mixinPhase, cleanupPhase)
+      for (phase <- phases) {
+        enteringPhase(phase) {
+          val error = t.exists(t => t.symbol == NoSymbol)
+          val errorStr = if (error) "!!!" else " - "
+          println(f"$phase%18s [$msg%12s] $errorStr $t")
+        }
+      }
+      println("")
+    }
+    import rootMirror._
+
+    println("\n\nJoint Compilation:\n")
+
+    {
+      val c = staticClass("test.C")
+      val o = c.info.decl(TermName("O"))
+      checkTree("O", gen.mkAttributedQualifier(o.moduleClass.thisType))
+      val d = staticClass("D")
+      val p = d.info.decl(TermName("P"))
+      checkTree("P", gen.mkAttributedQualifier(p.moduleClass.thisType))
+      val po = staticModule("test2.package").moduleClass.info.decl(TermName("PO"))
+      checkTree("test2.PO", gen.mkAttributedQualifier(po.moduleClass.thisType))
+      checkTree("test2.bar", gen.mkAttributedRef(po.owner.info.decl(TermName("bar"))))
+    }
+
+    println("\n\nSeparate Compilation:\n")
+
+    {
+      val c = typeOf[testSep.C].typeSymbol
+      val o = c.info.decl(TermName("O"))
+      checkTree("O", gen.mkAttributedQualifier(o.moduleClass.thisType))
+      val d = staticClass("DSep")
+      val p = d.info.decl(TermName("P"))
+      checkTree("P", gen.mkAttributedQualifier(p.moduleClass.thisType))
+      val po = staticModule("test2.package").moduleClass.info.decl(TermName("PO"))
+      checkTree("PO", gen.mkAttributedQualifier(po.moduleClass.thisType))
+      checkTree("testSep2.bar", gen.mkAttributedRef(po.owner.info.decl(TermName("bar"))))
+    }
+  }
+}
diff --git a/test/files/run/t7817.scala b/test/files/run/t7817.scala
new file mode 100644
index 0000000..905b8ae
--- /dev/null
+++ b/test/files/run/t7817.scala
@@ -0,0 +1,31 @@
+import language.reflectiveCalls
+
+package test {
+  class C1 {
+    object O {
+      def struct(s: {def foo: Any}) = s.foo
+    }
+  }
+  trait T {
+    object O {
+      def struct(s: {def foo: Any}) = s.foo
+    }
+  }
+  object O1 extends T
+
+  object O2 {
+    object O {
+      def struct(s: {def foo: Any}) = s.foo
+    }
+  }
+}
+
+object Test extends App {
+  object fooable { def foo = "foo" }
+  def check(result: Any) = assert(result == "foo", result.toString)
+
+  val s = new test.C1
+  check(s.O.struct(fooable))
+  check(test.O1.O.struct(fooable))
+  check(test.O2.O.struct(fooable))
+}
diff --git a/test/files/run/t7843-jsr223-service.check b/test/files/run/t7843-jsr223-service.check
new file mode 100644
index 0000000..a668df3
--- /dev/null
+++ b/test/files/run/t7843-jsr223-service.check
@@ -0,0 +1,2 @@
+n: Object = 10
+12345678910
diff --git a/test/files/run/t7843-jsr223-service.scala b/test/files/run/t7843-jsr223-service.scala
new file mode 100644
index 0000000..3111221
--- /dev/null
+++ b/test/files/run/t7843-jsr223-service.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.interpreter.IMain
+
+object Test extends App {
+  val engine = new IMain.Factory getScriptEngine()
+  engine.asInstanceOf[IMain].settings.usejavacp.value = true
+  engine put ("n", 10)
+  engine eval "1 to n.asInstanceOf[Int] foreach print"
+}
diff --git a/test/files/run/t7852.flags b/test/files/run/t7852.flags
new file mode 100644
index 0000000..f6262fd
--- /dev/null
+++ b/test/files/run/t7852.flags
@@ -0,0 +1 @@
+-Ynooptimise
diff --git a/test/files/run/t7852.scala b/test/files/run/t7852.scala
new file mode 100644
index 0000000..c93db71
--- /dev/null
+++ b/test/files/run/t7852.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import scala.tools.asm.util._
+import scala.tools.nsc.util.stringFromWriter
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+  val nullChecks = Set(asm.Opcodes.IFNONNULL, asm.Opcodes.IFNULL)
+
+  def show: Unit = {
+    def test(methodName: String, expected: Int) {
+      val classNode = loadClassNode("Lean")
+      val methodNode = getMethod(classNode, methodName)
+      val got = countNullChecks(methodNode.instructions)
+      assert(got == expected, s"expected $expected but got $got comparisons")
+    }
+    test("string", expected = 0)
+    test("module", expected = 0)
+    test("moduleIndirect", expected = 2)
+  }
+
+  def countNullChecks(insnList: asm.tree.InsnList): Int =
+    insnList.iterator.asScala.map(_.getOpcode).count(nullChecks)
+}
+
+class Lean {
+  def string {
+    "" == toString
+  }
+
+  def module {
+    Nil == (toString: Any)
+  }
+
+  def moduleIndirect {
+    val n: Nil.type = null
+    n == (toString: Any) // still need null checks here.
+  }
+}
diff --git a/test/files/run/t7859/A_1.scala b/test/files/run/t7859/A_1.scala
new file mode 100644
index 0000000..74f0709
--- /dev/null
+++ b/test/files/run/t7859/A_1.scala
@@ -0,0 +1,11 @@
+class A(private val x: Int) extends AnyVal
+
+object A {
+  val Const = new A(0)
+}
+
+class A1(protected val x: Int) extends AnyVal
+
+package p {
+  class A2(private[p] val x: Int) extends AnyVal
+}
diff --git a/test/files/run/t7859/B_2.scala b/test/files/run/t7859/B_2.scala
new file mode 100644
index 0000000..6b23af3
--- /dev/null
+++ b/test/files/run/t7859/B_2.scala
@@ -0,0 +1,47 @@
+class B private (private val b: Int) extends AnyVal
+object B {
+  val Const = new B(0)
+}
+
+// These tests will require erasure to unbox the value class.
+// We need to test under joint and separate compilation to check
+// that the 'notPRIVATE' flag on the param accessor is pickled.
+//
+// See also SI-6601.
+object Test {
+  def main(args: Array[String]) {
+    unboxA
+    unboxA1
+    unboxA2
+    unboxB
+  }
+
+  def unboxA {
+    val o: Some[A] = Some(A.Const)
+    val a = o.get
+    def id(a: A): A = a
+    id(a)
+  }
+
+  def unboxA1 {
+    val o: Some[A1] = Some(new A1(0))
+    val a = o.get
+    def id(a: A1): A1 = a
+    id(a)
+  }
+
+  def unboxA2 {
+    import p.A2
+    val o: Some[A2] = Some(new A2(0))
+    val a = o.get
+    def id(a: A2): A2 = a
+    id(a)
+  }
+
+  def unboxB {
+    val o: Some[B] = Some(B.Const)
+    val b = o.get
+    def id(b: B): B = b
+    id(b)
+  }
+}
diff --git a/test/files/run/t7868.scala b/test/files/run/t7868.scala
new file mode 100644
index 0000000..1f938ad
--- /dev/null
+++ b/test/files/run/t7868.scala
@@ -0,0 +1,13 @@
+object A {
+  def unapply(n: Int): Option[Int] = Some(n)
+
+  def run = (0: Short) match {
+    case A(_) =>
+    case _    =>
+  }
+}
+
+
+object Test extends App {
+  A.run
+}
diff --git a/test/files/run/t7868b.check b/test/files/run/t7868b.check
new file mode 100644
index 0000000..6577c4b
--- /dev/null
+++ b/test/files/run/t7868b.check
@@ -0,0 +1,6 @@
+Expr[Int]({
+  val x = (0: Short): @unchecked match {
+    case A((x @ _)) => x
+  };
+  x
+})
diff --git a/test/files/run/t7868b.scala b/test/files/run/t7868b.scala
new file mode 100644
index 0000000..759eeaf
--- /dev/null
+++ b/test/files/run/t7868b.scala
@@ -0,0 +1,11 @@
+object A {
+  def unapply(n: Int): Option[Int] = Some(1)
+}
+
+object Test extends App {
+  import reflect.runtime.universe._
+  println(reify {
+    val A(x) = (0: Short)
+    x
+  })
+}
diff --git a/test/files/run/t7871.check b/test/files/run/t7871.check
new file mode 100644
index 0000000..ce6efd8
--- /dev/null
+++ b/test/files/run/t7871.check
@@ -0,0 +1 @@
+(SomeTree,SomeTree)
diff --git a/test/files/run/t7871/Macros_1.scala b/test/files/run/t7871/Macros_1.scala
new file mode 100644
index 0000000..dca2508
--- /dev/null
+++ b/test/files/run/t7871/Macros_1.scala
@@ -0,0 +1,27 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+  implicit class QuasiquoteInterpolation(c: StringContext) {
+    object nq {
+      def unapply(t: Tree): Any = macro QuasiquoteMacros.unapplyImpl
+    }
+  }
+}
+
+object QuasiquoteMacros {
+  def unapplyImpl(c: Context)(t: c.Tree) = {
+    import c.universe._
+    q"""
+      new {
+        def unapply(t: Tree) = t match {
+          case SomeTree => Some((SomeTree, SomeTree))
+          case _ => None
+        }
+      }.unapply($t)
+    """
+  }
+}
diff --git a/test/files/run/t7871/Test_2.scala b/test/files/run/t7871/Test_2.scala
new file mode 100644
index 0000000..3a0b68b
--- /dev/null
+++ b/test/files/run/t7871/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+  import NewQuasiquotes._
+  SomeTree match {
+    case nq"$x + $y" => println((x, y))
+  }
+}
diff --git a/test/files/run/t7876.scala b/test/files/run/t7876.scala
new file mode 100644
index 0000000..aeec8c8
--- /dev/null
+++ b/test/files/run/t7876.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest._
+
+// Type constructors for FunctionN and TupleN should not be considered as function type / tuple types.
+object Test extends DirectTest {
+  override def extraSettings: String = "-usejavacp"
+
+  def code = ""
+
+  def show() {
+    val global = newCompiler()
+    new global.Run()
+    import global._, definitions._
+    val function0TC = FunctionClass(0).typeConstructor
+    val tuple1TC = TupleClass(1).typeConstructor
+    FunctionClass.seq.foreach { sym =>
+      val tc = sym.typeConstructor
+      assert(!isFunctionType(tc), s"$tc")
+      assert(!isFunctionTypeDirect(tc), s"$tc (direct)")
+    }
+    TupleClass.seq.foreach { sym =>
+      val tc = sym.typeConstructor
+      assert(!isTupleType(tc), s"$sym")
+      assert(!isTupleTypeDirect(tc), s"$tc (direct)")
+    }
+  }
+}
diff --git a/test/files/run/t7880.scala b/test/files/run/t7880.scala
new file mode 100644
index 0000000..9fbc337
--- /dev/null
+++ b/test/files/run/t7880.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+  // This should terminate in one way or another, but it shouldn't loop forever.
+  try {
+    val buffer = collection.mutable.ArrayBuffer.fill(Int.MaxValue / 2 + 1)(0)
+    buffer append 1
+  } catch { case _: OutOfMemoryError => }
+}
diff --git a/test/files/run/t7899-regression.check b/test/files/run/t7899-regression.check
new file mode 100644
index 0000000..602b03a
--- /dev/null
+++ b/test/files/run/t7899-regression.check
@@ -0,0 +1 @@
+warning: -Yinfer-by-name is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.
diff --git a/test/files/run/t7899-regression.flags b/test/files/run/t7899-regression.flags
new file mode 100644
index 0000000..553a27e
--- /dev/null
+++ b/test/files/run/t7899-regression.flags
@@ -0,0 +1 @@
+-Yinfer-by-name -deprecation
diff --git a/test/files/run/t7899-regression.scala b/test/files/run/t7899-regression.scala
new file mode 100644
index 0000000..67d38cd
--- /dev/null
+++ b/test/files/run/t7899-regression.scala
@@ -0,0 +1,24 @@
+import language.higherKinds
+
+object Test {
+  trait Monad[M[_]] {
+    def foo[A](ma: M[A])(f: M[A] => Any) = f(ma)
+  }
+  implicit def function1Covariant[T]: Monad[({type l[a] = (T => a)})#l] =
+    new Monad[({type l[a] = (T => a)})#l] {}
+
+  def main(args: Array[String]) {
+    // inference of T = (=> Any) here was outlawed by SI-7899 / 8ed7099
+    // but this pattern is used in Scalaz in just a few places and caused
+    // a regression.
+    //
+    // Inference of a by-name type doesn't *always* lead to a ClassCastException,
+    // it only gets there if a method in generic code accepts a parameter of
+    // that type.
+    //
+    // We need to introduce the stricter inference rules gradually, probably
+    // with a warning.
+    val m = implicitly[Monad[({type f[+x] = (=> Any) => x})#f]]
+    assert(m.foo[Int]((x => 0))(f => f(???)) == 0)
+  }
+}
diff --git a/test/files/run/t7899.scala b/test/files/run/t7899.scala
new file mode 100644
index 0000000..5879d4b
--- /dev/null
+++ b/test/files/run/t7899.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  def id[A](a: => A): A = null.asInstanceOf[A]
+  def foo(f: (=> Int) => Int) = () => f(???)
+  foo(id)() // should be allowed and not throw ???
+}
diff --git a/test/files/run/t7932.check b/test/files/run/t7932.check
new file mode 100644
index 0000000..13d64f1
--- /dev/null
+++ b/test/files/run/t7932.check
@@ -0,0 +1,3 @@
+warning: there were 1 feature warning(s); re-run with -feature for details
+public Category<?> C.category()
+public Category<scala.Tuple2> C.category1()
diff --git a/test/files/run/t7932.scala b/test/files/run/t7932.scala
new file mode 100644
index 0000000..8743abf
--- /dev/null
+++ b/test/files/run/t7932.scala
@@ -0,0 +1,11 @@
+class Category[M[_, _]]
+trait M[F] {
+  type X[a, b] = F
+  def category: Category[X] = null
+  def category1: Category[Tuple2] = null
+}
+abstract class C extends M[Float]
+object Test extends App {
+  val ms = classOf[C].getMethods.filter(_.getName.startsWith("category"))
+  println(ms.map(_.toGenericString).sorted.mkString("\n"))
+}
diff --git a/test/files/run/t7933.check b/test/files/run/t7933.check
new file mode 100644
index 0000000..317e967
--- /dev/null
+++ b/test/files/run/t7933.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/run/t7933.scala b/test/files/run/t7933.scala
new file mode 100644
index 0000000..b06dffc
--- /dev/null
+++ b/test/files/run/t7933.scala
@@ -0,0 +1,11 @@
+import scala.tools.nsc.interpreter.IMain
+
+object Test extends App {
+  val engine = new IMain.Factory getScriptEngine()
+  engine.asInstanceOf[IMain].settings.usejavacp.value = true
+  val res2 = engine.asInstanceOf[javax.script.Compilable]
+  res2 compile "8" eval()
+  val res5 = res2 compile """println("hello") ; 8"""
+  res5 eval()
+  res5 eval()
+}
diff --git a/test/files/run/t7974.check b/test/files/run/t7974.check
new file mode 100644
index 0000000..0be496d
--- /dev/null
+++ b/test/files/run/t7974.check
@@ -0,0 +1,104 @@
+public class Symbols {
+
+  // compiled from: Symbols.scala
+
+
+
+  // access flags 0x12
+  private final Lscala/Symbol; someSymbol3
+
+  // access flags 0xA
+  private static Lscala/Symbol; symbol$1
+
+  // access flags 0xA
+  private static Lscala/Symbol; symbol$2
+
+  // access flags 0xA
+  private static Lscala/Symbol; symbol$3
+
+  // access flags 0x9
+  public static <clinit>()V
+   L0
+    LINENUMBER 2 L0
+    GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+    LDC "Symbolic1"
+    INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+    PUTSTATIC Symbols.symbol$1 : Lscala/Symbol;
+   L1
+    LINENUMBER 3 L1
+    GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+    LDC "Symbolic2"
+    INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+    PUTSTATIC Symbols.symbol$2 : Lscala/Symbol;
+   L2
+    LINENUMBER 5 L2
+    GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+    LDC "Symbolic3"
+    INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+    PUTSTATIC Symbols.symbol$3 : Lscala/Symbol;
+    RETURN
+    MAXSTACK = 2
+    MAXLOCALS = 0
+
+  // access flags 0x1
+  public someSymbol1()Lscala/Symbol;
+   L0
+    LINENUMBER 2 L0
+    GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+    ARETURN
+   L1
+    LOCALVARIABLE this LSymbols; L0 L1 0
+    MAXSTACK = 1
+    MAXLOCALS = 1
+
+  // access flags 0x1
+  public someSymbol2()Lscala/Symbol;
+   L0
+    LINENUMBER 3 L0
+    GETSTATIC Symbols.symbol$2 : Lscala/Symbol;
+    ARETURN
+   L1
+    LOCALVARIABLE this LSymbols; L0 L1 0
+    MAXSTACK = 1
+    MAXLOCALS = 1
+
+  // access flags 0x1
+  public sameSymbol1()Lscala/Symbol;
+   L0
+    LINENUMBER 4 L0
+    GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+    ARETURN
+   L1
+    LOCALVARIABLE this LSymbols; L0 L1 0
+    MAXSTACK = 1
+    MAXLOCALS = 1
+
+  // access flags 0x1
+  public someSymbol3()Lscala/Symbol;
+   L0
+    LINENUMBER 5 L0
+    ALOAD 0
+    GETFIELD Symbols.someSymbol3 : Lscala/Symbol;
+    ARETURN
+   L1
+    LOCALVARIABLE this LSymbols; L0 L1 0
+    MAXSTACK = 1
+    MAXLOCALS = 1
+
+  // access flags 0x1
+  public <init>()V
+   L0
+    LINENUMBER 6 L0
+    ALOAD 0
+    INVOKESPECIAL java/lang/Object.<init> ()V
+   L1
+    LINENUMBER 5 L1
+    ALOAD 0
+    GETSTATIC Symbols.symbol$3 : Lscala/Symbol;
+    PUTFIELD Symbols.someSymbol3 : Lscala/Symbol;
+    RETURN
+   L2
+    LOCALVARIABLE this LSymbols; L0 L2 0
+    MAXSTACK = 2
+    MAXLOCALS = 1
+}
diff --git a/test/files/run/t7974/Symbols.scala b/test/files/run/t7974/Symbols.scala
new file mode 100644
index 0000000..2363b72
--- /dev/null
+++ b/test/files/run/t7974/Symbols.scala
@@ -0,0 +1,6 @@
+class Symbols {
+  def someSymbol1 = 'Symbolic1
+  def someSymbol2 = 'Symbolic2
+  def sameSymbol1 = 'Symbolic1
+  val someSymbol3 = 'Symbolic3
+}
diff --git a/test/files/run/t7974/Test.scala b/test/files/run/t7974/Test.scala
new file mode 100644
index 0000000..9403ea3
--- /dev/null
+++ b/test/files/run/t7974/Test.scala
@@ -0,0 +1,20 @@
+import java.io.PrintWriter;
+
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm.util._
+import scala.tools.nsc.util.stringFromWriter
+
+object Test extends BytecodeTest {
+  def show {
+    val classNode = loadClassNode("Symbols", skipDebugInfo = false)
+    val textifier = new Textifier
+    classNode.accept(new TraceClassVisitor(null, textifier, null))
+    
+    val classString = stringFromWriter(w => textifier.print(w))
+    val result =
+      classString.split('\n')
+        .dropWhile(elem => elem != "public class Symbols {")
+        .filterNot(elem => elem.startsWith("  @Lscala/reflect/ScalaSignature") || elem.startsWith("  ATTRIBUTE ScalaSig"))
+    result foreach println
+  }
+}
diff --git a/test/files/run/t7985.scala b/test/files/run/t7985.scala
new file mode 100644
index 0000000..5fe270f
--- /dev/null
+++ b/test/files/run/t7985.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  Array(1) match { case _: Array[scala.Int] => }
+}
diff --git a/test/files/run/t7985b.scala b/test/files/run/t7985b.scala
new file mode 100644
index 0000000..aaf649e
--- /dev/null
+++ b/test/files/run/t7985b.scala
@@ -0,0 +1,5 @@
+class a { type X = Int }
+
+object Test extends App {
+  Array(1) match { case _: Array[a#X] => }
+}
diff --git a/test/files/run/t8002.scala b/test/files/run/t8002.scala
new file mode 100644
index 0000000..f24a213
--- /dev/null
+++ b/test/files/run/t8002.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+  val a: Any = {
+    class A private () { private def x = 0; A.y };
+    object A {
+      def a = new A().x
+      private def y = 0
+    }
+    A.a
+  }
+  def b: Any = {
+    object A {
+      def a = new A().x
+      private def y = 0
+    }
+    class A private () { private def x = 0; A.y };
+    A.a
+  }
+  b
+}
diff --git a/test/files/run/t8015-ffc.scala b/test/files/run/t8015-ffc.scala
new file mode 100644
index 0000000..fe6781b
--- /dev/null
+++ b/test/files/run/t8015-ffc.scala
@@ -0,0 +1,7 @@
+
+object Test extends App {
+  val ms = """This is a long multiline string
+  with \u000d\u000a CRLF embedded."""
+  assert(ms.lines.size == 3, s"lines.size ${ms.lines.size}")
+  assert(ms contains "\r\n CRLF", "no CRLF")
+}
diff --git a/test/files/run/t8017.flags b/test/files/run/t8017.flags
new file mode 100644
index 0000000..48b438d
--- /dev/null
+++ b/test/files/run/t8017.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/run/t8017/value-class-lambda.scala b/test/files/run/t8017/value-class-lambda.scala
new file mode 100644
index 0000000..370023b
--- /dev/null
+++ b/test/files/run/t8017/value-class-lambda.scala
@@ -0,0 +1,40 @@
+object Test {
+  def testC {
+    val f1 = (c: C) => c.value
+    val f2 = (x: Int) => new C(x)
+    val f3 = (c1: C) => (c2: C) => (c1, c2)
+    val r1 = f2(2)
+    val r2 = f2(2)
+    val r3 = f3(r1)(r2)
+    val result = f1(r3._2)
+    assert(result == 2)
+  }
+
+  def testD {
+    val f1 = (c: D) => c.value
+    val f2 = (x: String) => new D(x)
+    val f3 = (c1: D) => (c2: D) => (c1, c2)
+    val r1 = f2("2")
+    val r2 = f2("2")
+    val r3 = f3(r1)(r2)
+    val result = f1(r3._2)
+    assert(result == "2")
+  }
+
+  def testE {
+    val f1 = (c: E[Int]) => c.value
+    val f2 = (x: Int) => new E(x)
+    val f3 = (c1: E[Int]) => (c2: E[Int]) => (c1, c2)
+    val r1 = f2(2)
+    val r2 = f2(2)
+    val r3 = f3(r1)(r2)
+    val result = f1(r3._2)
+    assert(result == 2)
+  }
+
+  def main(args: Array[String]) {
+    testC
+    testD
+    testE
+  }
+}
diff --git a/test/files/run/t8017/value-class.scala b/test/files/run/t8017/value-class.scala
new file mode 100644
index 0000000..8212393
--- /dev/null
+++ b/test/files/run/t8017/value-class.scala
@@ -0,0 +1,3 @@
+class C(val value: Int) extends AnyVal
+class D(val value: String) extends AnyVal
+class E[A](val value: A) extends AnyVal
diff --git a/test/files/run/t8046.check b/test/files/run/t8046.check
new file mode 100644
index 0000000..905b0b3
--- /dev/null
+++ b/test/files/run/t8046.check
@@ -0,0 +1,2 @@
+List(trait Op, trait Function1, class Object, class Any)
+BTS(T,Three.this.Op[Int],Int => Int,Object,Any)
diff --git a/test/files/run/t8046/Test.scala b/test/files/run/t8046/Test.scala
new file mode 100644
index 0000000..f6b525d
--- /dev/null
+++ b/test/files/run/t8046/Test.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+  override def code = ""
+  override def extraSettings: String = "-usejavacp"
+
+  override def show() {
+    val c = newCompiler()
+    new c.Run
+    import c._
+
+    val f4 = typeOf[Three].member(newTermName("f4"))
+    val f4ParamInfo = f4.paramss.head.head.info
+    println(f4ParamInfo.baseClasses)
+    println(f4ParamInfo.baseTypeSeq)
+  }
+}
+
diff --git a/test/files/run/t8046/t8046c.scala b/test/files/run/t8046/t8046c.scala
new file mode 100644
index 0000000..0b484da
--- /dev/null
+++ b/test/files/run/t8046/t8046c.scala
@@ -0,0 +1,13 @@
+import language._
+
+trait One {
+  type Op[A]
+  type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+  trait Op[A] extends (A => A)
+
+  def f4[T <: Alias[Int]](f: T) = 0
+}
+
diff --git a/test/files/run/t8047.check b/test/files/run/t8047.check
new file mode 100644
index 0000000..a6b83a4
--- /dev/null
+++ b/test/files/run/t8047.check
@@ -0,0 +1,7 @@
+doWhile$1(){
+  1;
+  if (true)
+    doWhile$1()
+  else
+    ()
+}
diff --git a/test/files/run/t8047.scala b/test/files/run/t8047.scala
new file mode 100644
index 0000000..f566054
--- /dev/null
+++ b/test/files/run/t8047.scala
@@ -0,0 +1,31 @@
+object Test extends App {
+  import scala.reflect.runtime.universe._
+  //
+  // x's owner is outer Test scope. Previosly the quasiquote expansion
+  // looked like:
+  //
+  //     object Test {
+  //       build.withFreshTermName("doWhile")(n =>
+  //         LabelDef(n, List(),
+  //           Block(
+  //             List({ val x = 1; x }),
+  //             If(Literal(Constant(true)), Apply(Ident(n), List()), Literal(Constant(())))))
+  //     }
+  //
+  // Here the proper owner is anonymous function, not the Test. Hence
+  // symbol corruption. In new encoding this is represented as:
+  //
+  //     object Test {
+  //       {
+  //         val n = build.freshTermName("doWhile")
+  //         LabelDef(n, List(),
+  //           Block(
+  //             List({ val x = 1; x }),
+  //             If(Literal(Constant(true)), Apply(Ident(n), List()), Literal(Constant(()))))
+  //       }
+  //     }
+  //
+  // Owner stays the same and life is good again.
+  //
+  println(q"do ${ val x = 1; x } while(true)")
+}
diff --git a/test/files/run/t8048a.check b/test/files/run/t8048a.check
new file mode 100644
index 0000000..8fb9e26
--- /dev/null
+++ b/test/files/run/t8048a.check
@@ -0,0 +1 @@
+Some(2)
diff --git a/test/files/run/t8048a/Macros_1.scala b/test/files/run/t8048a/Macros_1.scala
new file mode 100644
index 0000000..d13e851
--- /dev/null
+++ b/test/files/run/t8048a/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    q"if (true) Some(2) else None"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t8048a/Test_2.scala b/test/files/run/t8048a/Test_2.scala
new file mode 100644
index 0000000..4e1c8b1
--- /dev/null
+++ b/test/files/run/t8048a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  val x: Option[Int] = Macros.foo
+  println(x)
+}
\ No newline at end of file
diff --git a/test/files/run/t8048b.check b/test/files/run/t8048b.check
new file mode 100644
index 0000000..083edaa
--- /dev/null
+++ b/test/files/run/t8048b.check
@@ -0,0 +1,3 @@
+2
+2
+2
diff --git a/test/files/run/t8048b/Macros_1.scala b/test/files/run/t8048b/Macros_1.scala
new file mode 100644
index 0000000..520a6fa
--- /dev/null
+++ b/test/files/run/t8048b/Macros_1.scala
@@ -0,0 +1,37 @@
+// see the following discussions to understand what's being tested here:
+// * https://issues.scala-lang.org/browse/SI-6992
+// * https://issues.scala-lang.org/browse/SI-8048
+// * http://stackoverflow.com/questions/14370842/getting-a-structural-type-with-an-anonymous-classs-methods-from-a-macro
+// * http://stackoverflow.com/questions/18480707/method-cannot-be-accessed-in-macro-generated-class/18485004#18485004
+// * https://groups.google.com/forum/#!topic/scala-internals/eXQt-BPm4i8
+
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+  def impl1(c: Context) = {
+    import c.universe._
+    q"""
+      trait Foo { def x = 2 }
+      new Foo {}
+    """
+  }
+  def foo1: Any = macro impl1
+
+  def impl2(c: Context) = {
+    import c.universe._
+    q"""
+      class Foo { def x = 2 }
+      new Foo
+    """
+  }
+  def foo2: Any = macro impl2
+
+  def impl3(c: Context) = {
+    import c.universe._
+    q"""
+      new { def x = 2 }
+    """
+  }
+  def foo3: Any = macro impl3
+}
\ No newline at end of file
diff --git a/test/files/run/t8048b/Test_2.scala b/test/files/run/t8048b/Test_2.scala
new file mode 100644
index 0000000..fb410da
--- /dev/null
+++ b/test/files/run/t8048b/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+  println(Macros.foo1.x)
+  println(Macros.foo2.x)
+  println(Macros.foo3.x)
+}
\ No newline at end of file
diff --git a/test/files/run/t8091.check b/test/files/run/t8091.check
new file mode 100644
index 0000000..4c4e917
--- /dev/null
+++ b/test/files/run/t8091.check
@@ -0,0 +1 @@
+b&#x00f6;rk b&#x00f6;rk
diff --git a/test/files/run/t8091.scala b/test/files/run/t8091.scala
new file mode 100644
index 0000000..cd412d4
--- /dev/null
+++ b/test/files/run/t8091.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  val result = "börk börk" flatMap (ch ⇒ if (ch > 127) f"&#x${ch}%04x;" else "" + ch)
+  println(result)
+}
\ No newline at end of file
diff --git a/test/files/run/t8100.check b/test/files/run/t8100.check
new file mode 100644
index 0000000..cdd927f
--- /dev/null
+++ b/test/files/run/t8100.check
@@ -0,0 +1 @@
+Success(0)
diff --git a/test/files/run/t8100.scala b/test/files/run/t8100.scala
new file mode 100644
index 0000000..b9d0fe5
--- /dev/null
+++ b/test/files/run/t8100.scala
@@ -0,0 +1,8 @@
+object Test {
+  import scala.util.Try
+
+  def main(args: Array[String]): Unit = {
+    def stream = Stream.from(0).take(100000).map(n => None)
+    println(Try(stream.flatten.length))
+  }
+}
diff --git a/test/files/run/t8104.check b/test/files/run/t8104.check
new file mode 100644
index 0000000..40523a2
--- /dev/null
+++ b/test/files/run/t8104.check
@@ -0,0 +1,2 @@
+WeakTypeTag[<refinement>.this.Repr]
+(Int, Int)
diff --git a/test/files/run/t8104/Macros_1.scala b/test/files/run/t8104/Macros_1.scala
new file mode 100644
index 0000000..e135bd8
--- /dev/null
+++ b/test/files/run/t8104/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+  def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
+    import c.universe._
+    import definitions._
+    val fields = T.tpe.decls.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+    val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.info))
+    q"new Generic[$T]{ type Repr = $Repr }"
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t8104/Test_2.scala b/test/files/run/t8104/Test_2.scala
new file mode 100644
index 0000000..08451df
--- /dev/null
+++ b/test/files/run/t8104/Test_2.scala
@@ -0,0 +1,19 @@
+trait Generic[T] { type Repr }
+object Generic {
+  type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
+  import scala.language.experimental.macros
+  implicit def materializeGeneric[T, Repr]: Generic.Aux[T, Repr] = macro Macros.impl[T]
+}
+
+object Test extends App {
+  case class C(x: Int, y: Int)
+
+  import scala.reflect.runtime.universe._
+  def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr], tag: WeakTypeTag[Repr]) = {
+    println(tag)
+    println(tag.tpe.typeSymbol.info)
+  }
+  reprify(C(40, 2))
+
+  implicitly[Generic.Aux[C, (Int, Int)]]
+}
diff --git a/test/files/run/t8114.scala b/test/files/run/t8114.scala
deleted file mode 100644
index ecbca37..0000000
--- a/test/files/run/t8114.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-class AbstractTable[T] { type TableElementType }
-class Table[T] extends AbstractTable[T] { type TableElementType = T }
- 
-class Query[E, U]
-class TableQuery[E <: AbstractTable[_]] extends Query[E, E#TableElementType]
- 
-object Test extends App {
-  object MyTable extends TableQuery[Table[Long]]
- 
-  def list[R](q: Query[_, R]): List[R] = Nil
-  list/*[Long]*/(MyTable) collect { case x => x }
-
-  // Generates a redundant bridge method (double definition error)
-  // in 2.10.x due to (at least) the bug in erasure fixed in SI-7120
-}
diff --git a/test/files/run/t8133/A_1.scala b/test/files/run/t8133/A_1.scala
new file mode 100644
index 0000000..a2836cd
--- /dev/null
+++ b/test/files/run/t8133/A_1.scala
@@ -0,0 +1,5 @@
+//  a.scala
+package object pkg {
+  class AnyOps(val x: Any) extends AnyVal
+  def AnyOps(x: Any) = new AnyOps(x)
+}
diff --git a/test/files/run/t8133/B_2.scala b/test/files/run/t8133/B_2.scala
new file mode 100644
index 0000000..b80e109
--- /dev/null
+++ b/test/files/run/t8133/B_2.scala
@@ -0,0 +1,15 @@
+package pkg { 
+  package object other
+  package other {
+    class Crash { 
+      AnyOps(0)
+      ()
+    }
+  }
+}
+ 
+object Test {
+  def main(args: Array[String]): Unit = {
+    new pkg.other.Crash
+  }
+}
diff --git a/test/files/run/t8133b/A_1.scala b/test/files/run/t8133b/A_1.scala
new file mode 100644
index 0000000..24bbfc1
--- /dev/null
+++ b/test/files/run/t8133b/A_1.scala
@@ -0,0 +1,4 @@
+package object pkg {
+  def foo(x: Int): String = "a"
+  def foo(x: String): String = "b"
+}
diff --git a/test/files/run/t8133b/B_2.scala b/test/files/run/t8133b/B_2.scala
new file mode 100644
index 0000000..865ca0c
--- /dev/null
+++ b/test/files/run/t8133b/B_2.scala
@@ -0,0 +1,9 @@
+//  b.scala
+package pkg {
+  package object other
+  package other { class Crash { foo("") } }
+}
+ 
+object Test {
+  def main(args: Array[String]): Unit = new pkg.other.Crash
+}
diff --git a/test/files/run/t7064-old-style-supercalls.check b/test/files/run/t8153.check
similarity index 100%
rename from test/files/run/t7064-old-style-supercalls.check
rename to test/files/run/t8153.check
diff --git a/test/files/run/t8153.scala b/test/files/run/t8153.scala
new file mode 100644
index 0000000..f9b223f
--- /dev/null
+++ b/test/files/run/t8153.scala
@@ -0,0 +1,14 @@
+object Test {
+  def f() = {
+    val lb = scala.collection.mutable.ListBuffer[Int](1, 2)
+    val it = lb.iterator
+    if (it.hasNext) it.next
+    val xs = lb.toList
+    lb += 3
+    it.mkString
+  }
+
+  def main(args: Array[String]) {
+    println(f())
+  }
+}
diff --git a/test/files/run/t8177f.scala b/test/files/run/t8177f.scala
new file mode 100644
index 0000000..f50a5d9
--- /dev/null
+++ b/test/files/run/t8177f.scala
@@ -0,0 +1,20 @@
+trait Thing { type A; var p: A = _ }
+class A[T](final val x: Thing { type A = T }) {
+  type Q = T
+
+  def x1: T   = x.p
+  def x2: Q   = x.p
+  def x3: x.A = x.p
+}
+// all result types should be inferred as Int
+class B extends A[Int](null) {
+  def y1 = x1
+  def y2 = x2
+  val y3 = x3 // before SI-8177, this lead to a signature that erased to java.lang.Object
+}
+
+
+object Test extends App {
+  val methods = classOf[B].getDeclaredMethods.sortBy(_.getName)
+  assert(methods.forall(_.toGenericString.startsWith("public int")))
+}
diff --git a/test/files/run/t8188.scala b/test/files/run/t8188.scala
new file mode 100644
index 0000000..ec3a968
--- /dev/null
+++ b/test/files/run/t8188.scala
@@ -0,0 +1,25 @@
+object Test {
+  def main(args: Array[String]) {
+    import java.io.ByteArrayInputStream
+    import java.io.ByteArrayOutputStream
+    import java.io.ObjectInputStream
+    import java.io.ObjectOutputStream
+    import scala.collection.concurrent.TrieMap
+   
+    def ser[T](o: T): Array[Byte] = {
+      val baos = new ByteArrayOutputStream()
+      new ObjectOutputStream(baos).writeObject(o)
+      baos.toByteArray()
+    }
+
+    def deser[T](bs: Array[Byte]): T =
+      new ObjectInputStream(new ByteArrayInputStream(bs)).readObject().asInstanceOf[T]
+   
+    def cloneViaSerialization[T](t: T): T = deser(ser(t))
+   
+    val f = cloneViaSerialization(_: TrieMap[Int, Int])
+    val tm = TrieMap(1 -> 2)
+    assert( f(f(tm)) == tm )
+    assert( ser(tm).length == ser(f(tm)).length )
+  }
+}
diff --git a/test/files/run/t8190.check b/test/files/run/t8190.check
new file mode 100644
index 0000000..d117bf3
--- /dev/null
+++ b/test/files/run/t8190.check
@@ -0,0 +1,91 @@
+Annotation
+Constant
+Mirror
+Name
+TermName
+TypeName
+Position
+Scope
+MemberScope
+Symbol
+TermSymbol
+TypeSymbol
+MethodSymbol
+ModuleSymbol
+ClassSymbol
+FreeTermSymbol
+FreeTypeSymbol
+Type
+SingletonType
+ThisType
+SingleType
+SuperType
+ConstantType
+TypeRef
+CompoundType
+RefinedType
+ClassInfoType
+MethodType
+NullaryMethodType
+PolyType
+ExistentialType
+AnnotatedType
+TypeBounds
+BoundedWildcardType
+Tree
+TermTree
+TypTree
+SymTree
+NameTree
+RefTree
+DefTree
+MemberDef
+PackageDef
+ImplDef
+ClassDef
+ModuleDef
+ValOrDefDef
+ValDef
+DefDef
+TypeDef
+LabelDef
+ImportSelector
+Import
+Template
+Block
+CaseDef
+Alternative
+Star
+Bind
+UnApply
+Function
+Assign
+AssignOrNamedArg
+If
+Match
+Return
+Try
+Throw
+New
+Typed
+GenericApply
+TypeApply
+Apply
+Super
+This
+Select
+Ident
+ReferenceToBoxed
+Literal
+Annotated
+SingletonTypeTree
+SelectFromTypeTree
+CompoundTypeTree
+AppliedTypeTree
+TypeBoundsTree
+ExistentialTypeTree
+TypeTree
+Modifiers
+TreeCopier
+checking exhaustiveness in scala.reflect.api.Universe...
+uncovered type members: List()
diff --git a/test/files/run/t8190.scala b/test/files/run/t8190.scala
new file mode 100644
index 0000000..17ff83c
--- /dev/null
+++ b/test/files/run/t8190.scala
@@ -0,0 +1,210 @@
+import scala.reflect.runtime.universe._
+
+trait Overloads {
+  // makes sure noone erases to Any or AnyRef
+  def test(x: AnyRef) = "AnyRef"
+  def test(x: Annotation) = "Annotation"
+  def test(x: Constant) = "Constant"
+  def test(x: Mirror) = "Mirror"
+  def test(x: Name) = "Name"
+  def test(x: TermName) = "TermName"
+  def test(x: TypeName) = "TypeName"
+  def test(x: Position) = "Position"
+  def test(x: Scope) = "Scope"
+  def test(x: MemberScope) = "MemberScope"
+  def test(x: Symbol) = "Symbol"
+  def test(x: TermSymbol) = "TermSymbol"
+  def test(x: TypeSymbol) = "TypeSymbol"
+  def test(x: MethodSymbol) = "MethodSymbol"
+  def test(x: ModuleSymbol) = "ModuleSymbol"
+  def test(x: ClassSymbol) = "ClassSymbol"
+  def test(x: FreeTermSymbol) = "FreeTermSymbol"
+  def test(x: FreeTypeSymbol) = "FreeTypeSymbol"
+  def test(x: Type) = "Type"
+  def test(x: SingletonType) = "SingletonType"
+  def test(x: ThisType) = "ThisType"
+  def test(x: SingleType) = "SingleType"
+  def test(x: SuperType) = "SuperType"
+  def test(x: ConstantType) = "ConstantType"
+  def test(x: TypeRef) = "TypeRef"
+  def test(x: CompoundType) = "CompoundType"
+  def test(x: RefinedType) = "RefinedType"
+  def test(x: ClassInfoType) = "ClassInfoType"
+  def test(x: MethodType) = "MethodType"
+  def test(x: NullaryMethodType) = "NullaryMethodType"
+  def test(x: PolyType) = "PolyType"
+  def test(x: ExistentialType) = "ExistentialType"
+  def test(x: AnnotatedType) = "AnnotatedType"
+  def test(x: TypeBounds) = "TypeBounds"
+  def test(x: BoundedWildcardType) = "BoundedWildcardType"
+  def test(x: Tree) = "Tree"
+  def test(x: TermTree) = "TermTree"
+  def test(x: TypTree) = "TypTree"
+  def test(x: SymTree) = "SymTree"
+  def test(x: NameTree) = "NameTree"
+  def test(x: RefTree) = "RefTree"
+  def test(x: DefTree) = "DefTree"
+  def test(x: MemberDef) = "MemberDef"
+  def test(x: PackageDef) = "PackageDef"
+  def test(x: ImplDef) = "ImplDef"
+  def test(x: ClassDef) = "ClassDef"
+  def test(x: ModuleDef) = "ModuleDef"
+  def test(x: ValOrDefDef) = "ValOrDefDef"
+  def test(x: ValDef) = "ValDef"
+  def test(x: DefDef) = "DefDef"
+  def test(x: TypeDef) = "TypeDef"
+  def test(x: LabelDef) = "LabelDef"
+  def test(x: ImportSelector) = "ImportSelector"
+  def test(x: Import) = "Import"
+  def test(x: Template) = "Template"
+  def test(x: Block) = "Block"
+  def test(x: CaseDef) = "CaseDef"
+  def test(x: Alternative) = "Alternative"
+  def test(x: Star) = "Star"
+  def test(x: Bind) = "Bind"
+  def test(x: UnApply) = "UnApply"
+  def test(x: Function) = "Function"
+  def test(x: Assign) = "Assign"
+  def test(x: AssignOrNamedArg) = "AssignOrNamedArg"
+  def test(x: If) = "If"
+  def test(x: Match) = "Match"
+  def test(x: Return) = "Return"
+  def test(x: Try) = "Try"
+  def test(x: Throw) = "Throw"
+  def test(x: New) = "New"
+  def test(x: Typed) = "Typed"
+  def test(x: GenericApply) = "GenericApply"
+  def test(x: TypeApply) = "TypeApply"
+  def test(x: Apply) = "Apply"
+  def test(x: Super) = "Super"
+  def test(x: This) = "This"
+  def test(x: Select) = "Select"
+  def test(x: Ident) = "Ident"
+  def test(x: ReferenceToBoxed) = "ReferenceToBoxed"
+  def test(x: Literal) = "Literal"
+  def test(x: Annotated) = "Annotated"
+  def test(x: SingletonTypeTree) = "SingletonTypeTree"
+  def test(x: SelectFromTypeTree) = "SelectFromTypeTree"
+  def test(x: CompoundTypeTree) = "CompoundTypeTree"
+  def test(x: AppliedTypeTree) = "AppliedTypeTree"
+  def test(x: TypeBoundsTree) = "TypeBoundsTree"
+  def test(x: ExistentialTypeTree) = "ExistentialTypeTree"
+  def test(x: TypeTree) = "TypeTree"
+  def test(x: Modifiers) = "Modifiers"
+  def test(x: TreeCopier) = "TreeCopier"
+}
+
+object Test extends App with Overloads {
+  val buf = scala.collection.mutable.ListBuffer[String]()
+  def record(result: String): Unit = {
+    println(result)
+    buf += result
+  }
+  def check(): Unit = {
+    println("checking exhaustiveness in scala.reflect.api.Universe...")
+    var types = typeOf[scala.reflect.api.Universe].members.filter(sym => sym.isType && !sym.isClass).map(_.name.toString)
+    types = types.filter(_ != "ModifiersCreator") // type ModifiersCreator = ModifiersExtractor
+    types = types.filter(_ != "FlagSet") // type FlagSet
+    types = types.filter(_ != "RuntimeClass") // type RuntimeClass = java.lang.Class[_]
+    types = types.filter(_ != "JavaArgument") // deprecated
+    types = types.filter(_ != "LiteralArgument") // deprecated
+    types = types.filter(_ != "ArrayArgument") // deprecated
+    types = types.filter(_ != "NestedArgument") // deprecated
+    types = types.filter(_ != "Importer") // deprecated
+    types = types.filter(_ != "Internal") // internal
+    types = types.filter(_ != "Compat") // internal
+    types = types.filter(_ != "BuildApi") // deprecated
+    val diff = types.toList diff buf.toList
+    println("uncovered type members: " + diff)
+  }
+  record(test(null: Annotation))
+  record(test(null: Constant))
+  record(test(null: Mirror))
+  record(test(null: Name))
+  record(test(null: TermName))
+  record(test(null: TypeName))
+  record(test(null: Position))
+  record(test(null: Scope))
+  record(test(null: MemberScope))
+  record(test(null: Symbol))
+  record(test(null: TermSymbol))
+  record(test(null: TypeSymbol))
+  record(test(null: MethodSymbol))
+  record(test(null: ModuleSymbol))
+  record(test(null: ClassSymbol))
+  record(test(null: FreeTermSymbol))
+  record(test(null: FreeTypeSymbol))
+  record(test(null: Type))
+  record(test(null: SingletonType))
+  record(test(null: ThisType))
+  record(test(null: SingleType))
+  record(test(null: SuperType))
+  record(test(null: ConstantType))
+  record(test(null: TypeRef))
+  record(test(null: CompoundType))
+  record(test(null: RefinedType))
+  record(test(null: ClassInfoType))
+  record(test(null: MethodType))
+  record(test(null: NullaryMethodType))
+  record(test(null: PolyType))
+  record(test(null: ExistentialType))
+  record(test(null: AnnotatedType))
+  record(test(null: TypeBounds))
+  record(test(null: BoundedWildcardType))
+  record(test(null: Tree))
+  record(test(null: TermTree))
+  record(test(null: TypTree))
+  record(test(null: SymTree))
+  record(test(null: NameTree))
+  record(test(null: RefTree))
+  record(test(null: DefTree))
+  record(test(null: MemberDef))
+  record(test(null: PackageDef))
+  record(test(null: ImplDef))
+  record(test(null: ClassDef))
+  record(test(null: ModuleDef))
+  record(test(null: ValOrDefDef))
+  record(test(null: ValDef))
+  record(test(null: DefDef))
+  record(test(null: TypeDef))
+  record(test(null: LabelDef))
+  record(test(null: ImportSelector))
+  record(test(null: Import))
+  record(test(null: Template))
+  record(test(null: Block))
+  record(test(null: CaseDef))
+  record(test(null: Alternative))
+  record(test(null: Star))
+  record(test(null: Bind))
+  record(test(null: UnApply))
+  record(test(null: Function))
+  record(test(null: Assign))
+  record(test(null: AssignOrNamedArg))
+  record(test(null: If))
+  record(test(null: Match))
+  record(test(null: Return))
+  record(test(null: Try))
+  record(test(null: Throw))
+  record(test(null: New))
+  record(test(null: Typed))
+  record(test(null: GenericApply))
+  record(test(null: TypeApply))
+  record(test(null: Apply))
+  record(test(null: Super))
+  record(test(null: This))
+  record(test(null: Select))
+  record(test(null: Ident))
+  record(test(null: ReferenceToBoxed))
+  record(test(null: Literal))
+  record(test(null: Annotated))
+  record(test(null: SingletonTypeTree))
+  record(test(null: SelectFromTypeTree))
+  record(test(null: CompoundTypeTree))
+  record(test(null: AppliedTypeTree))
+  record(test(null: TypeBoundsTree))
+  record(test(null: ExistentialTypeTree))
+  record(test(null: TypeTree))
+  record(test(null: Modifiers))
+  record(test(null: TreeCopier))
+  check()
+}
\ No newline at end of file
diff --git a/test/files/run/t8192.check b/test/files/run/t8192.check
new file mode 100644
index 0000000..2423a7a
--- /dev/null
+++ b/test/files/run/t8192.check
@@ -0,0 +1,32 @@
+compile-time
+package scala
+primary constructor: NoSymbol
+object List
+primary constructor: def <init>(): scala.collection.immutable.List.type => true
+def <init>(): scala.collection.immutable.List.type => true
+trait Product1
+primary constructor: def $init$(): Unit => true
+class UninitializedFieldError
+primary constructor: def <init>(msg: String): UninitializedFieldError => true
+def <init>(msg: String): UninitializedFieldError => true
+def <init>(obj: Any): UninitializedFieldError => false
+class C
+primary constructor: def <init>(x: Int): C => true
+def <init>(x: Int): C => true
+def <init>(x: String): C => false
+runtime
+package scala
+primary constructor: NoSymbol
+object List
+primary constructor: def <init>(): scala.collection.immutable.List.type => true
+def <init>(): scala.collection.immutable.List.type => true
+trait Product1
+primary constructor: def $init$(): Unit => true
+class UninitializedFieldError
+primary constructor: def <init>(msg: String): UninitializedFieldError => true
+def <init>(msg: String): UninitializedFieldError => true
+def <init>(obj: Any): UninitializedFieldError => false
+class C
+primary constructor: def <init>(x: Int): C => true
+def <init>(x: Int): C => true
+def <init>(x: String): C => false
diff --git a/test/files/run/t8192/Macros_1.scala b/test/files/run/t8192/Macros_1.scala
new file mode 100644
index 0000000..72fb2cf
--- /dev/null
+++ b/test/files/run/t8192/Macros_1.scala
@@ -0,0 +1,45 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+import java.io._
+
+object Macros {
+  def impl(c: Context) = {
+    var messages = List[String]()
+    def println(msg: String) = messages :+= msg
+
+    import c.universe._
+    def test(sym: ClassSymbol): Unit = {
+      def fullyInitializeSymbol(sym: Symbol): Unit = {
+        val internal = c.universe.asInstanceOf[scala.reflect.internal.SymbolTable]
+        internal.definitions.fullyInitializeSymbol(sym.asInstanceOf[internal.Symbol])
+      }
+      def defString(sym: Symbol): String = {
+        val internal = c.universe.asInstanceOf[scala.reflect.internal.SymbolTable]
+        sym.asInstanceOf[internal.Symbol].defString
+      }
+      def showCtor(sym: Symbol): String = {
+        fullyInitializeSymbol(sym)
+        if (sym == NoSymbol) "NoSymbol"
+        else s"${defString(sym)} => ${sym.asMethod.isPrimaryConstructor}"
+      }
+      sym.info
+      println(sym.toString)
+      println(s"primary constructor: ${showCtor(sym.primaryConstructor)}")
+      val ctors = sym.info.members.filter(_.name == termNames.CONSTRUCTOR).map(sym => showCtor(sym))
+      ctors.toList.sorted.foreach(println)
+    }
+
+    println("compile-time")
+    // SI-8367 primaryConstructor for Java-defined classes is unstable, so I'm commenting this out
+    // test(typeOf[File].typeSymbol.asClass)
+    test(definitions.ScalaPackageClass)
+    test(definitions.ListModule.moduleClass.asClass)
+    test(typeOf[Product1[_]].typeSymbol.asClass)
+    test(typeOf[UninitializedFieldError].typeSymbol.asClass)
+    test(c.mirror.staticClass("C").asClass)
+
+    q"..${messages.map(msg => q"println($msg)")}"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t8192/Test_2.scala b/test/files/run/t8192/Test_2.scala
new file mode 100644
index 0000000..8930208
--- /dev/null
+++ b/test/files/run/t8192/Test_2.scala
@@ -0,0 +1,40 @@
+import java.io._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+
+class C(x: Int) {
+  def this(x: String) = this(x.toInt)
+}
+
+object Test extends App {
+  def test(sym: ClassSymbol): Unit = {
+    def fullyInitializeSymbol(sym: Symbol): Unit = {
+      val internal = ru.asInstanceOf[scala.reflect.internal.SymbolTable]
+      internal.definitions.fullyInitializeSymbol(sym.asInstanceOf[internal.Symbol])
+    }
+    def defString(sym: Symbol): String = {
+      val internal = ru.asInstanceOf[scala.reflect.internal.SymbolTable]
+      sym.asInstanceOf[internal.Symbol].defString
+    }
+    def showCtor(sym: Symbol): String = {
+      fullyInitializeSymbol(sym)
+      if (sym == NoSymbol) "NoSymbol"
+      else s"${defString(sym)} => ${sym.asMethod.isPrimaryConstructor}"
+    }
+    sym.info
+    println(sym.toString)
+    println(s"primary constructor: ${showCtor(sym.primaryConstructor)}")
+    val ctors = sym.info.members.filter(_.name == termNames.CONSTRUCTOR).map(sym => showCtor(sym))
+    ctors.toList.sorted.foreach(println)
+  }
+
+  Macros.foo
+  println("runtime")
+  // SI-8367 primaryConstructor for Java-defined classes is unstable, so I'm commenting this out
+  // test(typeOf[File].typeSymbol.asClass)
+  test(definitions.ScalaPackageClass)
+  test(definitions.ListModule.moduleClass.asClass)
+  test(typeOf[Product1[_]].typeSymbol.asClass)
+  test(typeOf[UninitializedFieldError].typeSymbol.asClass)
+  test(typeOf[C].typeSymbol.asClass)
+}
diff --git a/test/files/run/t8197.scala b/test/files/run/t8197.scala
new file mode 100644
index 0000000..910a3eb
--- /dev/null
+++ b/test/files/run/t8197.scala
@@ -0,0 +1,16 @@
+// SI-8197, see also SI-4592 and SI-4728
+class A
+class B
+
+class Foo(val x: A = null) {
+  def this(bla: B*) {
+    this(new A)
+  }
+}
+
+object Test extends App {
+  // both constructors of `Foo` are applicable. Overloading resolution
+  // will eliminate the alternative that uses a default argument, therefore
+  // the vararg constructor is chosen.
+  assert((new Foo).x != null)
+}
diff --git a/test/files/run/t8197b.scala b/test/files/run/t8197b.scala
new file mode 100644
index 0000000..8b3e0af
--- /dev/null
+++ b/test/files/run/t8197b.scala
@@ -0,0 +1,8 @@
+object O {
+  def foo[T](t: T) = 0
+  def foo(s: String)(implicit i: DummyImplicit = null) = 1
+}
+
+object Test extends App {
+  assert(O.foo("") == 1)
+}
diff --git a/test/files/run/t8199.scala b/test/files/run/t8199.scala
new file mode 100644
index 0000000..5099415
--- /dev/null
+++ b/test/files/run/t8199.scala
@@ -0,0 +1,105 @@
+class reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname {
+object obj0
+object obj01
+object obj012
+object obj0123
+object obj01234
+object obj012345
+object obj0123456
+object obj01234567
+object obj012345678
+object obj0123456789
+object obj01234567890
+class cls0
+class cls01
+class cls012
+class cls0123
+class cls01234
+class cls012345
+class cls0123456
+class cls01234567
+class cls012345678
+class cls0123456789
+class cls01234567890
+trait trt0 { def x = Test.checkCallerImplClassName() }
+trait trt01 { def x = Test.checkCallerImplClassName() }
+trait trt012 { def x = Test.checkCallerImplClassName() }
+trait trt0123 { def x = Test.checkCallerImplClassName() }
+trait trt01234 { def x = Test.checkCallerImplClassName() }
+trait trt012345 { def x = Test.checkCallerImplClassName() }
+trait trt0123456 { def x = Test.checkCallerImplClassName() }
+trait trt01234567 { def x = Test.checkCallerImplClassName() }
+trait trt012345678 { def x = Test.checkCallerImplClassName() }
+trait trt0123456789 { def x = Test.checkCallerImplClassName() }
+trait trt01234567890 { def x = Test.checkCallerImplClassName() }
+}
+
+object Test extends App {
+  def check(c: Class[_]) {
+    checkClassName(c.getName)
+  }
+  def checkClassName(name: String) {
+    val defaultMaxClassFileLength = 255
+    assert((name + ".class").length <= defaultMaxClassFileLength, name)
+  }
+  def checkCallerImplClassName() {
+    val name = Thread.currentThread.getStackTrace.apply(2).getClassName
+    assert(name.contains("$class"))
+    Test.checkClassName(name)
+  }
+
+  val c = new reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname
+  import c._
+
+  check(obj0.getClass)
+  check(obj01.getClass)
+  check(obj012.getClass)
+  check(obj0123.getClass)
+  check(obj01234.getClass)
+  check(obj012345.getClass)
+  check(obj0123456.getClass)
+  check(obj01234567.getClass)
+  check(obj012345678.getClass)
+  check(obj0123456789.getClass)
+  check(obj01234567890.getClass)
+
+  check(classOf[cls0])
+  check(classOf[cls01])
+  check(classOf[cls012])
+  check(classOf[cls0123])
+  check(classOf[cls01234])
+  check(classOf[cls012345])
+  check(classOf[cls0123456])
+  check(classOf[cls01234567])
+  check(classOf[cls012345678])
+  check(classOf[cls0123456789])
+  check(classOf[cls01234567890])
+
+  // interface facets
+  check(classOf[trt0])
+  check(classOf[trt01])
+  check(classOf[trt012])
+  check(classOf[trt0123])
+  check(classOf[trt01234])
+  check(classOf[trt012345])
+  check(classOf[trt0123456])
+  check(classOf[trt01234567])
+  check(classOf[trt012345678])
+  check(classOf[trt0123456789])
+  check(classOf[trt01234567890])
+
+  // impl classes are harder to find the names of to test!
+  (new trt0 {}).x
+  (new trt01 {}).x
+  (new trt012 {}).x
+  (new trt0123 {}).x
+  (new trt01234 {}).x
+  (new trt012345 {}).x
+  (new trt0123456 {}).x
+  (new trt01234567 {}).x
+  (new trt012345678 {}).x
+  (new trt0123456789 {}).x
+  (new trt01234567890 {}).x
+}
+
+// filename too long: reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname$obj012345$.class
diff --git a/test/files/run/t8233-bcode.flags b/test/files/run/t8233-bcode.flags
new file mode 100644
index 0000000..c30091d
--- /dev/null
+++ b/test/files/run/t8233-bcode.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode
diff --git a/test/files/run/t8233-bcode.scala b/test/files/run/t8233-bcode.scala
new file mode 100644
index 0000000..72d013e
--- /dev/null
+++ b/test/files/run/t8233-bcode.scala
@@ -0,0 +1,31 @@
+object Test {
+  def bar(s: String) = s;
+  val o: Option[Null] = None
+  def nullReference {
+    val a: Null = o.get
+    bar(a) // Was: VerifyError under GenICode
+  }
+
+  def literal {
+    val a: Null = null
+    bar(a)
+  }
+
+  /** Check SI-8330 for details */
+  def expectedUnitInABranch(b: Boolean): Boolean = {
+    if (b) {
+      val x = 12
+      ()
+    } else {
+      // here expected type is (unboxed) Unit
+      null
+    }
+    true
+  }
+
+  def main(args: Array[String]): Unit = {
+    try { nullReference } catch { case _: NoSuchElementException => }
+    literal
+    expectedUnitInABranch(true)
+  }
+}
diff --git a/test/files/run/t8233.scala b/test/files/run/t8233.scala
new file mode 100644
index 0000000..97a98a2
--- /dev/null
+++ b/test/files/run/t8233.scala
@@ -0,0 +1,31 @@
+object Test {
+  def bar(s: String) = s;
+  val o: Option[Null] = None
+  def nullReference {
+    val a: Null = o.get
+    bar(a) // Was: VerifyError under GenICode
+  }
+
+  def literal {
+    val a: Null = null
+    bar(a)
+  }
+
+  /** Check SI-8330 for details */
+  def expectedUnitInABranch(b: Boolean): Boolean = {
+    if (b) {
+      val x = 12
+      ()
+    } else {
+      // here expected type is (unboxed) Unit
+      null
+    }
+    true
+  }
+
+  def main(args: Array[String]): Unit = {
+    try { nullReference } catch { case _: NoSuchElementException => }
+    literal
+    expectedUnitInABranch(true) // Was: VerifyError under GenICode
+  }
+}
diff --git a/test/files/run/t8245.scala b/test/files/run/t8245.scala
new file mode 100644
index 0000000..d44defb
--- /dev/null
+++ b/test/files/run/t8245.scala
@@ -0,0 +1,14 @@
+object Test {
+  def foo(o: Option[Int]): Int = {
+    lazy val i: Int = {
+      def local: Int = {if ("".isEmpty) return 42; -42}
+      assert(local == 42)
+      o.getOrElse(return -1)
+    }
+    i + 1
+  }
+
+  def main(args: Array[String]) {
+    assert(foo(None) == -1)
+  }
+}
diff --git a/test/files/run/t8266-octal-interp.check b/test/files/run/t8266-octal-interp.check
new file mode 100644
index 0000000..66ecafd
--- /dev/null
+++ b/test/files/run/t8266-octal-interp.check
@@ -0,0 +1,30 @@
+t8266-octal-interp.scala:4: warning: Octal escape literals are deprecated, use \b instead.
+    f"a\10c",
+       ^
+t8266-octal-interp.scala:5: warning: Octal escape literals are deprecated, use \t instead.
+    f"a\11c",
+       ^
+t8266-octal-interp.scala:6: warning: Octal escape literals are deprecated, use \n instead.
+    f"a\12c",
+       ^
+t8266-octal-interp.scala:7: warning: Octal escape literals are deprecated, use \r instead.
+    f"a\15c",
+       ^
+t8266-octal-interp.scala:8: warning: Octal escape literals are deprecated, use ${'"'} or a triple-quoted literal """with embedded " or \u0022""" instead.
+    f"a\42c",
+       ^
+t8266-octal-interp.scala:9: warning: Octal escape literals are deprecated, use \\ instead.
+    f"a\134c",
+       ^
+t8266-octal-interp.scala:10: warning: Octal escape literals are deprecated, use \u0069 instead.
+    f"a\15151515c"
+       ^
+ac
+a	c
+a
+c
+a
+c
+a"c
+a\c
+ai51515c
diff --git a/test/files/neg/t5589neg.flags b/test/files/run/t8266-octal-interp.flags
similarity index 100%
copy from test/files/neg/t5589neg.flags
copy to test/files/run/t8266-octal-interp.flags
diff --git a/test/files/run/t8266-octal-interp.scala b/test/files/run/t8266-octal-interp.scala
new file mode 100644
index 0000000..f85ae03
--- /dev/null
+++ b/test/files/run/t8266-octal-interp.scala
@@ -0,0 +1,16 @@
+
+trait X {
+  def f = Seq(
+    f"a\10c",
+    f"a\11c",
+    f"a\12c",
+    f"a\15c",
+    f"a\42c",
+    f"a\134c",
+    f"a\15151515c"
+  )
+}
+
+object Test extends App with X {
+  f foreach println
+}
diff --git a/test/files/run/t8280.check b/test/files/run/t8280.check
new file mode 100644
index 0000000..ed39284
--- /dev/null
+++ b/test/files/run/t8280.check
@@ -0,0 +1,9 @@
+Int
+Int
+Int
+Int
+Int
+Int
+Int
+Int
+Int
diff --git a/test/files/run/t8280.scala b/test/files/run/t8280.scala
new file mode 100644
index 0000000..0734d63
--- /dev/null
+++ b/test/files/run/t8280.scala
@@ -0,0 +1,82 @@
+import scala.language.implicitConversions
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    Moop1.ob1
+    Moop1.ob2
+    Moop1.ob3
+    Moop2.ob1
+    Moop2.ob2
+    Moop2.ob3
+    Moop3.ob1
+    Moop3.ob2
+    Moop3.ob3
+  }
+}
+
+// int object vs.
+object Moop1 {
+  object ob1 {
+    implicit object f1 extends (Int => String) { def apply(x: Int): String = "Int" }
+    implicit object f2 extends (Long => String) { def apply(x: Long): String = "Long" }
+
+    println(5: String)
+  }
+  object ob2 {
+    implicit object f1 extends (Int => String) { def apply(x: Int): String = "Int" }
+    implicit def f2(x: Long): String = "Long"
+
+    println(5: String)
+  }
+  object ob3 {
+    implicit object f1 extends (Int => String) { def apply(x: Int): String = "Int" }
+    implicit val f2: Long => String = _ => "Long"
+
+    println(5: String)
+  }
+}
+
+// int def vs.
+object Moop2 {
+  object ob1 {
+    implicit def f1(x: Int): String = "Int"
+    implicit object f2 extends (Long => String) { def apply(x: Long): String = "Long" }
+
+    println(5: String)
+  }
+  object ob2 {
+    implicit def f1(x: Int): String = "Int"
+    implicit def f2(x: Long): String = "Long"
+
+    println(5: String)
+  }
+  object ob3 {
+    implicit def f1(x: Int): String = "Int"
+    implicit val f2: Long => String = _ => "Long"
+
+    println(5: String)
+  }
+}
+
+// int val vs.
+object Moop3 {
+  object ob1 {
+    implicit val f1: Int => String  = _ => "Int"
+    implicit object f2 extends (Long => String) { def apply(x: Long): String = "Long" }
+
+    println(5: String)
+  }
+  object ob2 {
+    implicit val f1: Int => String  = _ => "Int"
+    implicit def f2(x: Long): String = "Long"
+
+    println(5: String)
+  }
+  object ob3 {
+    implicit val f1: Int => String  = _ => "Int"
+    implicit val f2: Long => String = _ => "Long"
+
+    println(5: String)
+  }
+}
+
diff --git a/test/files/run/t6955.check b/test/files/run/t8321.check
similarity index 100%
rename from test/files/run/t6955.check
rename to test/files/run/t8321.check
diff --git a/test/files/run/t8321/Macros_1.scala b/test/files/run/t8321/Macros_1.scala
new file mode 100644
index 0000000..70e44fc
--- /dev/null
+++ b/test/files/run/t8321/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+class Bundle(val c: Context) {
+  import c.universe._
+  def impl = q"new { val x = 2 }"
+}
+
+object Macros {
+  def foo: Any = macro Bundle.impl
+}
\ No newline at end of file
diff --git a/test/files/run/t8321/Test_2.scala b/test/files/run/t8321/Test_2.scala
new file mode 100644
index 0000000..82ec323
--- /dev/null
+++ b/test/files/run/t8321/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  println(Macros.foo.x)
+}
\ No newline at end of file
diff --git a/test/files/run/t8395.scala b/test/files/run/t8395.scala
new file mode 100644
index 0000000..2570550
--- /dev/null
+++ b/test/files/run/t8395.scala
@@ -0,0 +1,9 @@
+ object Test {
+  def baz(x: Object) = {
+    val s @ (_s: String) = x
+    x
+  }
+  def main(args: Array[String]) {
+    assert(baz("1") == "1")
+  }
+}
diff --git a/test/files/run/t8425.check b/test/files/run/t8425.check
new file mode 100644
index 0000000..8379fa0
--- /dev/null
+++ b/test/files/run/t8425.check
@@ -0,0 +1 @@
+List(fresh$macro$1, $macro$2)
diff --git a/test/files/run/t8425/Macros_1.scala b/test/files/run/t8425/Macros_1.scala
new file mode 100644
index 0000000..71a9651
--- /dev/null
+++ b/test/files/run/t8425/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+  def foo: Unit = macro impl
+  def impl(c: Context) = {
+    import c.universe._
+    val test1 = c.freshName()
+    val test2 = c.freshName("$")
+    q"println(List($test1, $test2))"
+  }
+}
\ No newline at end of file
diff --git a/test/files/run/t8425/Test_2.scala b/test/files/run/t8425/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/t8425/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+  Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/t8428.scala b/test/files/run/t8428.scala
new file mode 100644
index 0000000..7da1207
--- /dev/null
+++ b/test/files/run/t8428.scala
@@ -0,0 +1,12 @@
+object Test extends App {
+  val xs = List.tabulate(4)(List(_))
+  val i = xs.map(_.iterator).reduce { (a,b) =>
+    a.hasNext
+    a ++ b
+  }
+
+  val r1 = i.toList
+  val r2 = xs.flatten.toList
+
+  assert(r1 == r2, r1)
+}
diff --git a/test/files/run/t8437.check b/test/files/run/t8437.check
new file mode 100644
index 0000000..fd3c81a
--- /dev/null
+++ b/test/files/run/t8437.check
@@ -0,0 +1,2 @@
+5
+5
diff --git a/test/files/run/t8437/Macros_1.scala b/test/files/run/t8437/Macros_1.scala
new file mode 100644
index 0000000..6286ea2
--- /dev/null
+++ b/test/files/run/t8437/Macros_1.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros._
+
+abstract class AbstractBundle(val c: blackbox.Context) {
+  import c.Expr
+  import c.universe._
+  def foo: Expr[Int] = Expr[Int](q"5")
+}
+
+class ConcreteBundle(override val c: blackbox.Context) extends AbstractBundle(c) {
+  import c.Expr
+  val bar: Expr[Int] = foo
+}
+
+object InvokeBundle {
+  def foo: Int = macro ConcreteBundle.foo // nope
+  def bar: Int = macro ConcreteBundle.bar // yep
+}
\ No newline at end of file
diff --git a/test/files/run/t8437/Test_2.scala b/test/files/run/t8437/Test_2.scala
new file mode 100644
index 0000000..47bb84a
--- /dev/null
+++ b/test/files/run/t8437/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+  println(InvokeBundle.foo)
+  println(InvokeBundle.bar)
+}
\ No newline at end of file
diff --git a/test/files/run/t874.scala b/test/files/run/t874.scala
index 41d124f..b077859 100644
--- a/test/files/run/t874.scala
+++ b/test/files/run/t874.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
 object Test {
   abstract class Base {
     val U: {
@@ -5,7 +7,7 @@ object Test {
     }
     U("xyz")(2)
   }
-  class Mix extends Base { 
+  class Mix extends Base {
     case class U[A](x1: A)(x2: Int) {
       Console.println("U created with "+x1+" and "+x2)
     }
diff --git a/test/files/run/t920.scala b/test/files/run/t920.scala
index 1e12e6b..6a7f122 100644
--- a/test/files/run/t920.scala
+++ b/test/files/run/t920.scala
@@ -7,7 +7,7 @@ object Test {
       trait Foo extends Test.Foo0 {
         def foo : B.this.type = B.this;
       }
-      class baz extends Baz with Foo { 
+      class baz extends Baz with Foo {
         override def toString = "baz"
       }
       Console.println(new baz);
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index f123bc8..7607921 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -1,3 +1,4 @@
+#partest !avian
 test Object   .f was successful
 test Final    .f was successful
 test Class    .f raised exception java.lang.StackOverflowError
@@ -51,3 +52,57 @@ test TailCall.b2 was successful
 test FancyTailCalls.tcTryLocal was successful
 test FancyTailCalls.differentInstance was successful
 test PolyObject.tramp was successful
+#partest avian
+test Object   .f was successful
+test Final    .f was successful
+test Class    .f was successful
+test SubClass .f was successful
+test Sealed   .f was successful
+test SubSealed.f was successful
+
+test O      .f was successful
+test c      .f was successful
+test O.O    .f was successful
+test O.c    .f was successful
+test c.O    .f was successful
+test c.c    .f was successful
+test O.O.O  .f was successful
+test O.O.c  .f was successful
+test O.c.O  .f was successful
+test O.c.c  .f was successful
+test c.O.O  .f was successful
+test c.O.c  .f was successful
+test c.c.O  .f was successful
+test c.c.c  .f was successful
+test O.O.O.O.f was successful
+test O.O.O.c.f was successful
+test O.O.c.O.f was successful
+test O.O.c.c.f was successful
+test O.c.O.O.f was successful
+test O.c.O.c.f was successful
+test O.c.c.O.f was successful
+test O.c.c.c.f was successful
+test c.O.O.O.f was successful
+test c.O.O.c.f was successful
+test c.O.c.O.f was successful
+test c.O.c.c.f was successful
+test c.c.O.O.f was successful
+test c.c.O.c.f was successful
+test c.c.c.O.f was successful
+test c.c.c.c.f was successful
+
+test TailCall.f1 was successful
+test TailCall.f2 was successful
+test TailCall.f3 was successful
+test TailCall.g1 was successful
+test TailCall.g2 was successful
+test TailCall.g3 was successful
+test TailCall.h1 was successful
+
+test NonTailCall.f1 0 1 2 was successful
+test NonTailCall.f2
+test TailCall.b1 was successful
+test TailCall.b2 was successful
+test FancyTailCalls.tcTryLocal was successful
+test FancyTailCalls.differentInstance was successful
+test PolyObject.tramp was successful
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 04a1a8b..1653b14 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -169,7 +169,7 @@ class TailCall[S](s: S) {
     aux[T](x, y);
   }
   final def g3[T](x: Int, y: Int, zs: List[T]): Int = {
-    def aux[U](n: Int, v: Int, ls: List[Pair[T,U]]): Int =
+    def aux[U](n: Int, v: Int, ls: List[Tuple2[T,U]]): Int =
       if (n == 0) v else aux(n - 1, v - 1, ls);
     aux(x, y, Nil);
   }
@@ -194,10 +194,10 @@ object FancyTailCalls {
 }
 
 object PolyObject extends App {
-  def tramp[A](x: Int): Int = 
+  def tramp[A](x: Int): Int =
     if (x > 0)
       tramp[A](x - 1)
-    else 
+    else
       0
 }
 
@@ -233,7 +233,7 @@ class NonTailCall {
     if (n == 0) 0
     else f2(n - 1)
   }
-  
+
 }
 
 //############################################################################
@@ -273,7 +273,7 @@ object Test {
     }
     println
   }
-    
+
   def check_overflow(name: String, closure: => Int) {
     print("test " + name)
     try {
@@ -295,7 +295,7 @@ object Test {
     while (!stop) {
       try {
         calibrator.f(n, n);
-        if (n >= Int.MaxValue / 2) error("calibration failure");
+        if (n >= Int.MaxValue / 2) sys.error("calibration failure");
         n = 2 * n;
       } catch {
         case exception: compat.Platform.StackOverflowError => stop = true
@@ -307,7 +307,7 @@ object Test {
   def main(args: Array[String]) {
     // compute min and max iteration number
     val min = 16;
-    val max = calibrate;
+    val max = if (scala.tools.partest.utils.Properties.isAvian) 10000 else calibrate
 
     // test tail calls in different contexts
     val Final     = new Final()
@@ -367,7 +367,7 @@ object Test {
     check_success("TailCall.g3", TailCall.g3(max, max, Nil), 0)
     check_success("TailCall.h1", TailCall.h1(max, max     ), 0)
     println
-    
+
     val NonTailCall = new NonTailCall
     check_success("NonTailCall.f1", NonTailCall.f1(2), 0)
     check_overflow("NonTailCall.f2", NonTailCall.f2(max))
@@ -382,17 +382,30 @@ object Test {
   }
 
   // testing explicit tailcalls.
-  
+
   import scala.util.control.TailCalls._
 
   def isEven(xs: List[Int]): TailRec[Boolean] =
     if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail))
 
   def isOdd(xs: List[Int]): TailRec[Boolean] =
-    if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail)) 
+    if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
+
+  def fib(n: Int): TailRec[Int] =
+    if (n < 2) done(n) else for {
+      x <- tailcall(fib(n - 1))
+      y <- tailcall(fib(n - 2))
+    } yield (x + y)
+
+  def rec(n: Int): TailRec[Int] =
+    if (n == 1) done(n) else for {
+      x <- tailcall(rec(n - 1))
+    } yield x
 
   assert(isEven((1 to 100000).toList).result)
-  
+  //assert(fib(40).result == 102334155) // Commented out, as it takes a long time
+  assert(rec(100000).result == 1)
+
 }
 
 //############################################################################
diff --git a/test/files/run/takeAndDrop.scala b/test/files/run/takeAndDrop.scala
index 6e87838..8d2dff0 100644
--- a/test/files/run/takeAndDrop.scala
+++ b/test/files/run/takeAndDrop.scala
@@ -1,9 +1,9 @@
-object Test {  
+object Test {
   def main(args: Array[String]): Unit = {
     val range = 1 to 10
     val target = (3 to 8).toList
     val confirm = (xs: Seq[Int]) => assert(xs.toList == target, xs)
-    
+
     confirm(range drop 2 dropRight 2)
     confirm(range drop 1 dropRight 1 drop 1 dropRight 1)
     confirm(range take 8 drop 2)
diff --git a/test/files/run/tcpoly_monads.scala b/test/files/run/tcpoly_monads.scala
index cffbcc9..978f889 100644
--- a/test/files/run/tcpoly_monads.scala
+++ b/test/files/run/tcpoly_monads.scala
@@ -1,18 +1,21 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
 trait Monads {
   /**
    * class Monad m where
    *   (>>=)  :: m a -> (a -> m b) -> m b
    *   return :: a -> m a
    *
-   * MonadTC encodes the above Haskell type class, 
+   * MonadTC encodes the above Haskell type class,
    * an instance of MonadTC corresponds to a method dictionary.
    * (see http://lampwww.epfl.ch/~odersky/talks/wg2.8-boston06.pdf)
    *
    * Note that the identity (`this') of the method dictionary does not really correspond
-   * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which 
+   * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
    * corresponds to the argument of the implicit conversion that encodes an instance of this type class)
    */
-  trait MonadTC[m[x], a] {      
+  trait MonadTC[m[x], a] {
     def unit[a](orig: a): m[a]
 
     // >>='s first argument comes from the implicit definition constructing this "method dictionary"
@@ -27,7 +30,7 @@ trait Monads {
  */
 trait OptionMonad extends Monads {
   // this implicit method encodes the Monad type class instance for Option
-  implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a] 
+  implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
     = new MonadTC[Option, a] {
         def unit[a](orig: a) = Some(orig)
         def >>=[b](fun: a => Option[b]): Option[b] = self match {
diff --git a/test/files/run/tcpoly_overriding.scala b/test/files/run/tcpoly_overriding.scala
index 86ba89f..32174ad 100644
--- a/test/files/run/tcpoly_overriding.scala
+++ b/test/files/run/tcpoly_overriding.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
 abstract class A[t[x]] {
   def b: t[Int]
 }
diff --git a/test/files/run/tcpoly_parseridioms.check b/test/files/run/tcpoly_parseridioms.check
index 5fff2ef..8bd0a08 100644
--- a/test/files/run/tcpoly_parseridioms.check
+++ b/test/files/run/tcpoly_parseridioms.check
@@ -1 +1,21 @@
+tcpoly_parseridioms.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+        case Success(next, x) => b(next) match {
+                                  ^
+tcpoly_parseridioms.scala:17: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+      def apply(in: Input): ParseResult[Tuple2[T, U]] = a(in) match {
+                                                         ^
+tcpoly_parseridioms.scala:30: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+      case Failure(_, _) => b(in) match {
+                             ^
+tcpoly_parseridioms.scala:28: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+    def apply(in: Input): ParseResult[T] = a(in) match {
+                                            ^
+tcpoly_parseridioms.scala:39: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+    def apply(in: Input): ParseResult[U] = a(in) match {
+                                            ^
 Success(List(),Plus(1,2))
diff --git a/test/files/run/tcpoly_parseridioms.scala b/test/files/run/tcpoly_parseridioms.scala
index 634240e..d22f68b 100644
--- a/test/files/run/tcpoly_parseridioms.scala
+++ b/test/files/run/tcpoly_parseridioms.scala
@@ -1,22 +1,25 @@
+
+import scala.language.{ higherKinds, implicitConversions, postfixOps }
+
 trait Parsers {
   type Input = List[Char]
-  
+
   sealed class ParseResult[+t](val next: Input)
-    case class Success[+t](override val next: Input, result: t) extends ParseResult[t](next)  
+    case class Success[+t](override val next: Input, result: t) extends ParseResult[t](next)
     case class Failure(override val next: Input, msg: String) extends ParseResult[Nothing](next)
-  
+
   abstract class Parser[+t] {
     def apply(in: Input): ParseResult[t]
   }
 
   // sequence
-  def sq[T, U](a: => Parser[T], b: => Parser[U]): Parser[Pair[T, U]] =  new Parser[Pair[T, U]] {
-      def apply(in: Input): ParseResult[Pair[T, U]] = a(in) match {
+  def sq[T, U](a: => Parser[T], b: => Parser[U]): Parser[Tuple2[T, U]] =  new Parser[Tuple2[T, U]] {
+      def apply(in: Input): ParseResult[Tuple2[T, U]] = a(in) match {
         case Success(next, x) => b(next) match {
-          case Success(next2, y) => Success(next2, Pair(x,y))
+          case Success(next2, y) => Success(next2, (x,y))
           case Failure(_, msg) => Failure(in, msg)
         }
-        case Failure(_, msg) => Failure(in, msg)  
+        case Failure(_, msg) => Failure(in, msg)
       }
     }
 
@@ -38,20 +41,20 @@ trait Parsers {
       case Failure(n, msg) => Failure(n, msg)
     }
   }
-  
+
   def accept[T](c: Char, r: T): Parser[T] = new Parser[T] {
     def apply(in: Input) = in match {
       case c2 :: n if c2 == c => Success(n, r)
       case n => Failure(n, "expected "+c+" at the head of "+n)
     }
   }
-  
-  def apply_++[s, tt](fun: Parser[s => tt], arg: Parser[s]): Parser[tt] = lift[Pair[s=>tt, s], tt]({case Pair(f, a) => f(a)})(sq(fun, arg)) 
-    
+
+  def apply_++[s, tt](fun: Parser[s => tt], arg: Parser[s]): Parser[tt] = lift[Tuple2[s=>tt, s], tt]({case (f, a) => f(a)})(sq(fun, arg))
+
   def success[u](v: u): Parser[u] = new Parser[u] {
     def apply(in: Input) = Success(in, v)
   }
-    
+
 }
 
 trait Idioms {
@@ -61,21 +64,21 @@ trait Idioms {
     def pureMethod[a](name: String, x: a): idi[a] = pure(x) // hack for Mirrors: allow passing of method names
   }
 
-  class IdiomaticTarget[idi[x], idiom <: Idiom[idi], s](i: idiom, tgt: s) { 
+  class IdiomaticTarget[idi[x], idiom <: Idiom[idi], s](i: idiom, tgt: s) {
     def dot [t](fun: s => t, name: String) = new IdiomaticApp2[idi, idiom, t](i, i.liftedApply(i.pureMethod(name, fun))(i.pure(tgt)))
   } // TODO: `.` -->  java.lang.ClassFormatError: Illegal method name "." in class Idioms$Id$
 
-  class IdiomaticFunction[idi[x], idiom <: Idiom[idi], s, t](i: idiom, fun: s => t) { 
+  class IdiomaticFunction[idi[x], idiom <: Idiom[idi], s, t](i: idiom, fun: s => t) {
     def <| (a: idi[s]) = new IdiomaticApp[idi, idiom, t](i, i.liftedApply(i.pure(fun))(a))
   }
 
   class IdiomaticApp[idi[x], idiom <: Idiom[idi], x](i: idiom, a: idi[x]) {
     // where x <: s=>t -- TODO can this be expressed without generalised constraints?
     def <> [s, t](b: idi[s]) = new IdiomaticApp[idi, idiom, t](i, i.liftedApply(a.asInstanceOf[idi[s=>t]])(b))
-    
+
     def |> : idi[x] = a
   }
-  
+
   class IdiomaticApp2[idi[x], idiom <: Idiom[idi], x](i: idiom, a: idi[x]) extends IdiomaticApp[idi, idiom, x](i, a) {
     def <| [s, t](b: idi[s]) = <>[s,t](b)
   }
@@ -86,22 +89,22 @@ trait ParserIdioms extends Parsers with Idioms {
     def liftedApply[s, t](fun: Parser[s => t])(arg: Parser[s]): Parser[t] = apply_++(fun, arg)
     def pure[a](x: a): Parser[a] = success(x)
   }
-  
-  implicit def parserIdiomFun[s, t](fun: s=>t): IdiomaticFunction[Parser, ParserIdiom.type, s, t] = 
+
+  implicit def parserIdiomFun[s, t](fun: s=>t): IdiomaticFunction[Parser, ParserIdiom.type, s, t] =
     new IdiomaticFunction[Parser, ParserIdiom.type, s, t](ParserIdiom, fun)
-  implicit def parserIdiomTgt[s](tgt: s): IdiomaticTarget[Parser, ParserIdiom.type, s] = 
+  implicit def parserIdiomTgt[s](tgt: s): IdiomaticTarget[Parser, ParserIdiom.type, s] =
     new IdiomaticTarget[Parser, ParserIdiom.type, s](ParserIdiom, tgt)
-    
+
   trait Expr
   case class Plus(a: Int, b: Int) extends Expr
-  
+
   def num = or(accept('0', 0), or(accept('1', 1),accept('2', 2)))
-  
-  // TODO: how can parserIdiom(curry2(_)) be omitted? 
+
+  // TODO: how can parserIdiom(curry2(_)) be omitted?
   def expr: Parser[Expr] = parserIdiomFun(curry2(Plus)) <| num <> num |>
-   
+
   implicit def curry2[s,t,u](fun: (s, t)=>u)(a: s)(b: t) = fun(a, b)
-  implicit def curry3[r,s,t,u](fun: (r,s, t)=>u)(a: r)(b: s)(c: t) = fun(a, b, c)  
+  implicit def curry3[r,s,t,u](fun: (r,s, t)=>u)(a: r)(b: s)(c: t) = fun(a, b, c)
 }
 
 object Test extends ParserIdioms with App {
diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala
index 5b3bc7b..4e00e72 100644
--- a/test/files/run/test-cpp.scala
+++ b/test/files/run/test-cpp.scala
@@ -3,25 +3,25 @@
  * in the copy-propagation performed before ClosureElimination.
  *
  * In the general case, the local variable 'l' is connected through
- * a alias chain with other local variables and at the end of the 
+ * an alias chain with other local variables and at the end of the
  * alias chain there may be a Value, call it 'v'.
  *
  * If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then
- * replace the instruction to load it from the cheaper place. 
+ * replace the instruction to load it from the cheaper place.
  * Otherwise, we use the local variable at the end of the alias chain
  * instead of 'l'.
  */
 
-import scala.tools.partest.IcodeTest
+import scala.tools.partest.IcodeComparison
 
-object Test extends IcodeTest {
+object Test extends IcodeComparison {
   override def printIcodeAfterPhase = "dce"
 }
 
 import scala.util.Random._
 
-/** 
- * The example in the bug report (Issue-5321): an alias chain which store 
+/**
+ * The example in the bug report (Issue-5321): an alias chain which store
  * an Unknown. Should remove local variable 'y'.
  */
 object TestBugReport {
@@ -42,20 +42,20 @@ object TestSetterInline {
 }
 
 
-/** 
+/**
  * The access of the local variable 'y' should be replaced by the
  * constant.
- */ 
+ */
 object TestAliasChainConstat {
 
   def main(args: Array[String]): Unit = {
     val x = 2
     val y = x
-    println(y) 
+    println(y)
   }
 }
 
-/** 
+/**
  * At the end of the alias chain we have a reference to 'this'.
  * The local variables should be all discarded and replace by a
  * direct reference to this
@@ -72,7 +72,7 @@ class TestAliasChainDerefThis {
 /**
  * At the end of the alias chain, there is the value of a field.
  * The use of variable 'y' should be replaced by 'x', not by an access
- * to the field 'f' since it is more costly. 
+ * to the field 'f' since it is more costly.
  */
 object TestAliasChainDerefField {
   def f = nextInt
@@ -86,7 +86,7 @@ object TestAliasChainDerefField {
 
 
 /**
- * The first time 'println' is called, 'x' is replaced by 'y' 
+ * The first time 'println' is called, 'x' is replaced by 'y'
  * and the second time, 'y' is replaced by 'x'. But none of them
  * can be removed.
  */
diff --git a/test/files/run/toolbox_console_reporter.scala b/test/files/run/toolbox_console_reporter.scala
index d672ccb..ce28086 100644
--- a/test/files/run/toolbox_console_reporter.scala
+++ b/test/files/run/toolbox_console_reporter.scala
@@ -4,10 +4,10 @@ import scala.reflect.runtime.{currentMirror => cm}
 import scala.tools.reflect.{ToolBox, mkConsoleFrontEnd}
 
 object Test extends App {
-  val oldErr = Console.err;
-  val baos = new java.io.ByteArrayOutputStream();
-  Console.setErr(new java.io.PrintStream(baos));
-  try {
+  //val oldErr = Console.err;
+  val baos = new java.io.ByteArrayOutputStream()
+  val errs = new java.io.PrintStream(baos)
+  (Console withErr errs) {
     val toolbox = cm.mkToolBox(frontEnd = mkConsoleFrontEnd(), options = "-deprecation")
     toolbox.eval(reify{
       object Utils {
@@ -18,12 +18,11 @@ object Test extends App {
       Utils.foo
     }.tree)
     println("============compiler console=============")
+    errs.flush()
     println(baos.toString);
     println("=========================================")
     println("============compiler messages============")
     toolbox.frontEnd.infos.foreach(println(_))
     println("=========================================")
-  } finally {
-    Console.setErr(oldErr);
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/toolbox_current_run_compiles.scala b/test/files/run/toolbox_current_run_compiles.scala
index b48c998..6ec7074 100644
--- a/test/files/run/toolbox_current_run_compiles.scala
+++ b/test/files/run/toolbox_current_run_compiles.scala
@@ -1,5 +1,5 @@
 package pkg {
-  import scala.reflect.macros.Context
+  import scala.reflect.macros.blackbox.Context
   import scala.language.experimental.macros
 
   object Macros {
@@ -9,7 +9,7 @@ package pkg {
       val g = c.universe.asInstanceOf[scala.tools.nsc.Global]
       c.Expr[Boolean](Literal(Constant(g.currentRun.compiles(sym.asInstanceOf[g.Symbol]))))
     }
-    def compiles[T] = macro impl[T]
+    def compiles[T]: Boolean = macro impl[T]
   }
 }
 
diff --git a/test/files/run/toolbox_parse_package.check b/test/files/run/toolbox_parse_package.check
new file mode 100644
index 0000000..4646598
--- /dev/null
+++ b/test/files/run/toolbox_parse_package.check
@@ -0,0 +1,8 @@
+package foo {
+  object bar extends scala.AnyRef {
+    def <init>() = {
+      super.<init>();
+      ()
+    }
+  }
+}
diff --git a/test/files/run/toolbox_parse_package.scala b/test/files/run/toolbox_parse_package.scala
new file mode 100644
index 0000000..62412a5
--- /dev/null
+++ b/test/files/run/toolbox_parse_package.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+  val toolbox = cm.mkToolBox()
+  println(toolbox.parse("package foo { object bar }"))
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_rangepos.check b/test/files/run/toolbox_rangepos.check
deleted file mode 100644
index b536d3f..0000000
--- a/test/files/run/toolbox_rangepos.check
+++ /dev/null
@@ -1 +0,0 @@
-RangePosition(<toolbox>, 0, 2, 5)
diff --git a/test/files/run/toolbox_rangepos.scala b/test/files/run/toolbox_rangepos.scala
deleted file mode 100644
index 41fe6da..0000000
--- a/test/files/run/toolbox_rangepos.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
-  val toolbox = cm.mkToolBox(options = "-Yrangepos")
-  val tree = toolbox.parse("2 + 2")
-  println(tree.pos)
-}
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.check b/test/files/run/toolbox_typecheck_implicitsdisabled.check
index db64e11..009ba65 100644
--- a/test/files/run/toolbox_typecheck_implicitsdisabled.check
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.check
@@ -1,5 +1,5 @@
 {
   import scala.Predef._;
-  scala.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+  scala.Predef.ArrowAssoc[Int](1).->[Int](2)
 }
 scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.scala b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
index 8a3d433..3fabdb3 100644
--- a/test/files/run/toolbox_typecheck_implicitsdisabled.scala
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
@@ -7,18 +7,18 @@ object Test extends App {
   val toolbox = cm.mkToolBox()
 
   val tree1 = Block(List(
-    Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
-    Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+    Import(Select(Ident(TermName("scala")), TermName("Predef")), List(ImportSelector(termNames.WILDCARD, -1, null, -1)))),
+    Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
   )
-  val ttree1 = toolbox.typeCheck(tree1, withImplicitViewsDisabled = false)
+  val ttree1 = toolbox.typecheck(tree1, withImplicitViewsDisabled = false)
   println(ttree1)
 
   try {
     val tree2 = Block(List(
-      Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
-      Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+      Import(Select(Ident(TermName("scala")), TermName("Predef")), List(ImportSelector(termNames.WILDCARD, -1, null, -1)))),
+      Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
     )
-    val ttree2 = toolbox.typeCheck(tree2, withImplicitViewsDisabled = true)
+    val ttree2 = toolbox.typecheck(tree2, withImplicitViewsDisabled = true)
     println(ttree2)
   } catch {
     case ex: Throwable =>
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.check b/test/files/run/toolbox_typecheck_macrosdisabled.check
index 688f379..62de375 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled.check
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.check
@@ -1,8 +1,8 @@
-{
+({
   val $u: ru.type = ru;
   val $m: $u.Mirror = ru.runtimeMirror({
   final class $anon extends scala.AnyRef {
-    def <init>(): anonymous class $anon = {
+    def <init>(): <$anon: AnyRef> = {
       $anon.super.<init>();
       ()
     };
@@ -32,10 +32,10 @@
       def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.ConstantType.apply($u.Constant.apply(2))
+        $u.internal.reificationSupport.ConstantType($u.Constant.apply(2))
       }
     };
     new $typecreator2()
   }))
-}
+}: ru.Expr[Int])
 ru.reify[Int](2)
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.scala b/test/files/run/toolbox_typecheck_macrosdisabled.scala
index 51eb63f..5466cb7 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.scala
@@ -2,6 +2,7 @@ import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{universe => ru}
 import scala.reflect.runtime.{currentMirror => cm}
 import scala.tools.reflect.ToolBox
+import internal._
 
 // Note: If you're looking at this test and you don't know why, you may
 // have accidentally changed the way type tags reify.  If so, validate
@@ -10,16 +11,16 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val toolbox = cm.mkToolBox()
   val rupkg = cm.staticModule("scala.reflect.runtime.package")
-  val rusym = build.selectTerm(rupkg, "universe")
-  val NullaryMethodType(rutpe) = rusym.typeSignature
-  val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
-  build.setTypeSignature(ru, rutpe)
+  val rusym = reificationSupport.selectTerm(rupkg, "universe")
+  val NullaryMethodType(rutpe) = rusym.info
+  val ru = reificationSupport.newFreeTerm("ru", scala.reflect.runtime.universe)
+  reificationSupport.setInfo(ru, rutpe)
 
-  val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
-  val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
+  val tree1 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
+  val ttree1 = toolbox.typecheck(tree1, withMacrosDisabled = false)
   println(ttree1)
 
-  val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
-  val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
+  val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
+  val ttree2 = toolbox.typecheck(tree2, withMacrosDisabled = true)
   println(ttree2)
 }
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.check b/test/files/run/toolbox_typecheck_macrosdisabled2.check
index e7011d1..86f8950 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.check
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.check
@@ -1,8 +1,8 @@
-{
+({
   val $u: ru.type = ru;
   val $m: $u.Mirror = ru.runtimeMirror({
   final class $anon extends scala.AnyRef {
-    def <init>(): anonymous class $anon = {
+    def <init>(): <$anon: AnyRef> = {
       $anon.super.<init>();
       ()
     };
@@ -19,7 +19,7 @@
       def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+        $u.Apply.apply($u.Select.apply($u.internal.reificationSupport.mkIdent($m.staticModule("scala.Array")), $u.TermName.apply("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
       }
     };
     new $treecreator1()
@@ -32,10 +32,10 @@
       def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
         val $u: U = $m$untyped.universe;
         val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
-        $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+        $u.internal.reificationSupport.TypeRef($u.internal.reificationSupport.ThisType($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
       }
     };
     new $typecreator2()
   }))
-}
+}: ru.Expr[Array[Int]])
 ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.scala b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
index 74fd09d..606d3d4 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
@@ -2,6 +2,7 @@ import scala.reflect.runtime.universe._
 import scala.reflect.runtime.{universe => ru}
 import scala.reflect.runtime.{currentMirror => cm}
 import scala.tools.reflect.ToolBox
+import internal._
 
 // Note: If you're looking at this test and you don't know why, you may
 // have accidentally changed the way type tags reify.  If so, validate
@@ -10,16 +11,16 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val toolbox = cm.mkToolBox()
   val rupkg = cm.staticModule("scala.reflect.runtime.package")
-  val rusym = build.selectTerm(rupkg, "universe")
-  val NullaryMethodType(rutpe) = rusym.typeSignature
-  val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
-  build.setTypeSignature(ru, rutpe)
+  val rusym = reificationSupport.selectTerm(rupkg, "universe")
+  val NullaryMethodType(rutpe) = rusym.info
+  val ru = reificationSupport.newFreeTerm("ru", scala.reflect.runtime.universe)
+  reificationSupport.setInfo(ru, rutpe)
 
-  val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
-  val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
+  val tree1 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
+  val ttree1 = toolbox.typecheck(tree1, withMacrosDisabled = false)
   println(ttree1)
 
-  val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
-  val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
+  val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
+  val ttree2 = toolbox.typecheck(tree2, withMacrosDisabled = true)
   println(ttree2)
 }
diff --git a/test/files/run/tpeCache-tyconCache.check b/test/files/run/tpeCache-tyconCache.check
new file mode 100644
index 0000000..a892f54
--- /dev/null
+++ b/test/files/run/tpeCache-tyconCache.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'.          **
+** scala.tools.nsc._ has been imported      **
+** global._, definitions._ also imported    **
+** Try  :help, :vals, power.<tab>           **
+
+scala> 
+
+scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor
+res0: Boolean = true
+
+scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor
+res1: Boolean = true
+
+scala> 
diff --git a/test/files/run/tpeCache-tyconCache.scala b/test/files/run/tpeCache-tyconCache.scala
new file mode 100644
index 0000000..f907167
--- /dev/null
+++ b/test/files/run/tpeCache-tyconCache.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+  override def code = """
+:power
+
+AnyRefClass.tpe eq AnyRefClass.typeConstructor
+AnyRefClass.tpe eq AnyRefClass.typeConstructor
+  """.trim
+}
diff --git a/test/files/run/trait-renaming/A_1.scala b/test/files/run/trait-renaming/A_1.scala
index 2c3d4f5..d0fab7b 100644
--- a/test/files/run/trait-renaming/A_1.scala
+++ b/test/files/run/trait-renaming/A_1.scala
@@ -9,7 +9,7 @@ package bippy {
       }
       new B with C { }
     }
-    
+
     def g = Class.forName("bippy.A$B$1$class")
   }
 }
diff --git a/test/files/run/transform.scala b/test/files/run/transform.scala
index 5cc1c49..d73155c 100644
--- a/test/files/run/transform.scala
+++ b/test/files/run/transform.scala
@@ -1,6 +1,6 @@
 object Test {
-  val x = 1 to 10 toBuffer
-  
+  val x = (1 to 10).toBuffer
+
   def main(args: Array[String]): Unit = {
     x transform (_ * 2)
     assert(x.sum == (1 to 10).sum * 2)
diff --git a/test/files/run/transpose.scala b/test/files/run/transpose.scala
index 3bea74b..2761a24 100644
--- a/test/files/run/transpose.scala
+++ b/test/files/run/transpose.scala
@@ -1,8 +1,8 @@
 object Test {
-  def wrap[T >: Null](body: => T) = 
+  def wrap[T >: Null](body: => T) =
     try body
     catch { case _: IllegalArgumentException => null }
-  
+
   def main(args: Array[String]): Unit = {
     assert(wrap(Nil.transpose) == Nil)
     assert(wrap(List(List(1, 2), List(1)).transpose) == null)
diff --git a/test/files/run/triemap-hash.scala b/test/files/run/triemap-hash.scala
index 7f19997..f991381 100644
--- a/test/files/run/triemap-hash.scala
+++ b/test/files/run/triemap-hash.scala
@@ -6,41 +6,41 @@ import util.hashing.Hashing
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     hashing()
     equality()
   }
-  
+
   def hashing() {
     import collection._
-    
+
     val tm = new concurrent.TrieMap[String, String](Hashing.fromFunction(x => x.length + x(0).toInt), Equiv.universal)
     tm.put("a", "b")
     tm.put("c", "d")
-    
+
     assert(tm("a") == "b")
     assert(tm("c") == "d")
-    
+
     for (i <- 0 until 1000) tm(i.toString) = i.toString
     for (i <- 0 until 1000) assert(tm(i.toString) == i.toString)
   }
-  
+
   def equality() {
     import collection._
-    
+
     val tm = new concurrent.TrieMap[String, String](Hashing.fromFunction(x => x(0).toInt), Equiv.fromFunction(_(0) == _(0)))
     tm.put("a", "b")
     tm.put("a1", "d")
     tm.put("b", "c")
-    
+
     assert(tm("a") == "d", tm)
     assert(tm("b") == "c", tm)
-    
+
     for (i <- 0 until 1000) tm(i.toString) = i.toString
     assert(tm.size == 12, tm)
     assert(tm("0") == "0", tm)
     for (i <- 1 to 9) assert(tm(i.toString) == i.toString + "99", tm)
   }
-  
+
 }
diff --git a/test/files/run/triple-quoted-expr.scala b/test/files/run/triple-quoted-expr.scala
index 0b30946..6d91ac5 100644
--- a/test/files/run/triple-quoted-expr.scala
+++ b/test/files/run/triple-quoted-expr.scala
@@ -1,18 +1,18 @@
 class A {
   def f1 = {
     val x = 5
-  
+
 """
 hi"""
   }
   def f2 = {
     val x = 5
-  
+
     """hi"""
-  }  
+  }
   def f3 = {
     val x = 5
-  
+
     "\nhi"
   }
 }
diff --git a/test/files/run/try-2.check b/test/files/run/try-2.check
index 6c4a024..987d346 100644
--- a/test/files/run/try-2.check
+++ b/test/files/run/try-2.check
@@ -1,3 +1,6 @@
+try-2.scala:41: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+      10;
+      ^
 exception happened
 
 Nothin
diff --git a/test/files/run/try-2.scala b/test/files/run/try-2.scala
index 677f0b4..b55977b 100644
--- a/test/files/run/try-2.scala
+++ b/test/files/run/try-2.scala
@@ -7,47 +7,47 @@
 object Test {
 
 
-  def tryAllUnit: Unit = 
+  def tryAllUnit: Unit =
     try {
       throw new Error();
     }
     catch {
-      case _ => Console.println("exception happened\n");
+      case _: Throwable => Console.println("exception happened\n");
     }
 
-  def tryUnitAll: Unit = 
+  def tryUnitAll: Unit =
     try {
       Console.println("Nothin");
     } catch {
-      case _ => error("Bad, bad, lama!");
+      case _: Throwable => sys.error("Bad, bad, lama!");
     }
 
-  def tryAllAll: Unit = 
+  def tryAllAll: Unit =
     try {
       throw new Error();
     } catch {
-      case _ => error("Bad, bad, lama!");
+      case _: Throwable => sys.error("Bad, bad, lama!");
     }
 
-  def tryUnitUnit: Unit = 
+  def tryUnitUnit: Unit =
     try {
       Console.println("Nothin");
     } catch {
-      case _ => Console.println("Nothin");
+      case _: Throwable => Console.println("Nothin");
     }
 
-  def tryIntUnit: Unit = 
+  def tryIntUnit: Unit =
     try {
       10;
     } catch {
-      case _ => Console.println("Huh?");
+      case _: Throwable => Console.println("Huh?");
     }
 
 
   def execute(f: => Unit) = try {
     f;
   } catch {
-    case _ => ();
+    case _: Throwable => ();
   }
 
 
@@ -55,7 +55,7 @@ object Test {
     execute(tryAllUnit);
     execute(tryUnitAll);
     execute(tryAllAll);
-    execute(tryUnitUnit); 
+    execute(tryUnitUnit);
     execute(tryIntUnit);
  }
 }
diff --git a/test/files/run/try-catch-unify.scala b/test/files/run/try-catch-unify.scala
index 8cb14d0..151e549 100644
--- a/test/files/run/try-catch-unify.scala
+++ b/test/files/run/try-catch-unify.scala
@@ -9,7 +9,7 @@ object Test {
     try {
       catching(classOf[NumberFormatException]) withTry (sys.error("O NOES"))
     } catch {
-       case t => println(t.getMessage)
+       case t: Throwable => println(t.getMessage)
     }
     println(nonFatalCatch withTry ("Hi".toDouble))
   }
diff --git a/test/files/run/try.check b/test/files/run/try.check
index 3983e26..f742ccb 100644
--- a/test/files/run/try.check
+++ b/test/files/run/try.check
@@ -1,3 +1,6 @@
+try.scala:65: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+      1+1;
+       ^
 1 + 1 = 2
 1 + 1 = 2
 1 + 1 = 2
diff --git a/test/files/run/try.scala b/test/files/run/try.scala
index ad3d606..a4fdfd7 100644
--- a/test/files/run/try.scala
+++ b/test/files/run/try.scala
@@ -17,8 +17,8 @@ object Test extends AnyRef with App {
     Console.println(
       (try { x } catch {
         case _: Error => 1;
-      }) 
-      +        
+      })
+      +
       (try { x } catch {
         case _: Error => 1;
       })
@@ -45,7 +45,7 @@ object Test extends AnyRef with App {
       instance = try {
         "" //new String();
       } catch {
-        case _ =>
+        case _: Throwable =>
           val cs = "aaa";
           if (cs.length() > 0) {
             "" //new String();
@@ -61,17 +61,17 @@ object Test extends AnyRef with App {
     Console.print("1 + 1 = ");
     try {
       if (true)
-        error("exit");
+        sys.error("exit");
       1+1;
       ()
     } catch {
-      case _ =>
+      case _: Throwable =>
         Console.println("2");
-        error("for good");
+        sys.error("for good");
     }
     Console.println("a");
   } catch {
-    case _ => ();
+    case _: Throwable => ();
   }
 
   class A {
@@ -95,7 +95,7 @@ object Test extends AnyRef with App {
      try {
        null
      } catch {
-       case _ => null
+       case _: Throwable => null
      }
 
     new AnyRef {
@@ -116,7 +116,7 @@ object Test extends AnyRef with App {
   }
 
 */
-    
+
   try1;
   try2;
   try3;
diff --git a/test/files/run/tuple-match.scala b/test/files/run/tuple-match.scala
index 57ba9cf..fcaefbf 100644
--- a/test/files/run/tuple-match.scala
+++ b/test/files/run/tuple-match.scala
@@ -1,7 +1,7 @@
 object Test {
   val FOUR = (-1, -2, -3, "bingo donkey vegas")
   val THREE = (-1, -2, -3)
-  
+
   def f(x: Any) = x match {
     case FOUR         => "FOUR"
     case (_, _, 3, _) => "4, #3"
@@ -13,7 +13,7 @@ object Test {
     case (_, 2)       => "2, #2"
     case _            => "default"
   }
-  
+
   def main(args: Array[String]): Unit = {
     println(f((1, 2, 3, 4)))
     println(f((1, 2, 30, 4)))
diff --git a/test/files/run/tuple-zipped.scala b/test/files/run/tuple-zipped.scala
index b197183..37ac529 100644
--- a/test/files/run/tuple-zipped.scala
+++ b/test/files/run/tuple-zipped.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
 object Test {
   val xs1 = List.range(1, 100)
   val xs2 = xs1.view
@@ -8,32 +11,31 @@ object Test {
   val as1 = 1 to 100 toArray
   val as2 = as1.view
   val as3 = as1 take 10
-  
+
   def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3)
   def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3)  // no infinities
   def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling
-  
+
   def main(args: Array[String]): Unit = {
     for (cc1 <- xss1 ; cc2 <- xss2) {
       val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum
       val sum2 = (cc1, cc2).zipped map (_ + _) sum
-      
+
       assert(sum1 == sum2)
     }
-    
+
     for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) {
       val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum
       val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum
-      
+
       assert(sum1 == sum2)
     }
-    
+
     assert((ss1, ss1).zipped exists ((x, y) => true))
     assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true))
-    
+
     assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000))
     assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0))
     assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3)
   }
 }
-  
diff --git a/test/files/run/tuples.scala b/test/files/run/tuples.scala
index 4854e36..a4ea9dd 100644
--- a/test/files/run/tuples.scala
+++ b/test/files/run/tuples.scala
@@ -6,6 +6,7 @@ object Test extends App {
   Console.println(xyz)
   xyz match {
     case (1, "abc", true) => Console.println("OK")
+    case _ => ???
   }
   def func(x: Int, y: String, z: Double) {
     Console.println("x = " + x + "; y = " + y + "; z = " + z);
diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala
index f9764c6..e10f8fc 100644
--- a/test/files/run/type-currying.scala
+++ b/test/files/run/type-currying.scala
@@ -1,3 +1,6 @@
+
+
+import scala.language.{ higherKinds, reflectiveCalls }
 import scala.collection.{ mutable, immutable, generic }
 import generic.CanBuildFrom
 
@@ -33,10 +36,10 @@ object Test {
 
   val immutableBippy = Partial[immutable.HashMap].values[Int]
   def make[T](xs: T*) = immutableBippy[T] ++ xs.zipWithIndex
-  
+
   val n0 = Partial[immutable.HashMap][String][Int] ++ Seq(("a", 1))
   val n1 = Partial.apply[immutable.HashMap].apply[String].apply[Int] ++ Seq(("a", 1))
-  
+
   def main(args: Array[String]): Unit = {
     println(mutableBippy)
     make('a' to 'z': _*).toList.sorted foreach println
diff --git a/test/files/run/typealias_overriding.scala b/test/files/run/typealias_overriding.scala
index 8a2d131..4b6a003 100644
--- a/test/files/run/typealias_overriding.scala
+++ b/test/files/run/typealias_overriding.scala
@@ -1,21 +1,21 @@
 // this bug (http://scala-webapps.epfl.ch/bugtracking/bugs/displayItem.do?id=1065)
-// was caused by Uncurry not normalizing all the types 
+// was caused by Uncurry not normalizing all the types
 // (more specifically the argument/return types of an anonymous Function)
 object Test extends App {
   trait AddRemove {
     type TNode <: NodeImpl;
     trait NodeImpl;
-    
+
     object removing {
       type TNode = AddRemove.this.TNode;
       def printNode(node: TNode, f: TNode => String) = Console.println(f(node))
     }
   }
-  
+
   class Linked extends AddRemove {
     type TNode = Node // can also directly write `class Node extends super.NodeImpl' -- doesn't change the bug
     class Node extends super.NodeImpl { override def toString = "LinkedNode" }
-    
+
     removing.printNode(new Node, (x: removing.TNode) => x.toString) // make inference explicit, doesn't affect the bug
   }
 
diff --git a/test/files/pos/t7461.check b/test/files/run/typecheck.check
similarity index 100%
rename from test/files/pos/t7461.check
rename to test/files/run/typecheck.check
diff --git a/test/files/run/typecheck/Macros_1.scala b/test/files/run/typecheck/Macros_1.scala
new file mode 100644
index 0000000..ee1c8da
--- /dev/null
+++ b/test/files/run/typecheck/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.whitebox._
+import scala.language.experimental.macros
+
+object Macros {
+  def impl(c: Context) = {
+    import c.universe._
+    c.typecheck(q"class C")
+    q"()"
+  }
+
+  def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/typecheck/Test_2.scala b/test/files/run/typecheck/Test_2.scala
new file mode 100644
index 0000000..01bf519
--- /dev/null
+++ b/test/files/run/typecheck/Test_2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+  Macros.foo
+
+  val tb = cm.mkToolBox()
+  tb.typecheck(q"class C")
+}
\ No newline at end of file
diff --git a/test/files/run/typed-annotated/Macros_1.scala b/test/files/run/typed-annotated/Macros_1.scala
index dd18c63..4f0660d 100644
--- a/test/files/run/typed-annotated/Macros_1.scala
+++ b/test/files/run/typed-annotated/Macros_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
 import language.experimental.macros
 
 class ann extends scala.annotation.StaticAnnotation
@@ -6,8 +6,8 @@ class ann extends scala.annotation.StaticAnnotation
 object Macros {
   def impl(c: Context) = {
     import c.universe._
-    // val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), Ident(newTypeName("Int")))
-    val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), TypeTree(weakTypeOf[Int]))
+    // val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), termNames.CONSTRUCTOR), List()), Ident(newTypeName("Int")))
+    val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), termNames.CONSTRUCTOR), List()), TypeTree(weakTypeOf[Int]))
     c.Expr[Unit](Block(
       List(ValDef(Modifiers(), newTermName("x"), tpt, Literal(Constant(42)))),
       Apply(Ident(newTermName("println")), List(Ident(newTermName("x"))))))
diff --git a/test/files/run/typetags_symbolof_x.check b/test/files/run/typetags_symbolof_x.check
new file mode 100644
index 0000000..fd0e069
--- /dev/null
+++ b/test/files/run/typetags_symbolof_x.check
@@ -0,0 +1,6 @@
+class Int
+object C
+type T
+type Id
+class Nothing
+class Null
diff --git a/test/files/run/typetags_symbolof_x.scala b/test/files/run/typetags_symbolof_x.scala
new file mode 100644
index 0000000..333c4e7
--- /dev/null
+++ b/test/files/run/typetags_symbolof_x.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class C
+object C
+
+object Test extends App {
+  type T = Int
+  type Id[X] = X
+  println(symbolOf[Int])
+  println(symbolOf[C.type])
+  println(symbolOf[T])
+  println(symbolOf[Id[_]])
+  println(symbolOf[Nothing])
+  println(symbolOf[Null])
+}
diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.check b/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/files/run/unapply.check b/test/files/run/unapply.check
new file mode 100644
index 0000000..847e3b3
--- /dev/null
+++ b/test/files/run/unapply.check
@@ -0,0 +1,3 @@
+unapply.scala:57: warning: comparing values of types Null and Null using `==' will always yield true
+    assert(doMatch2(b) == null)
+                       ^
diff --git a/test/files/run/unapply.scala b/test/files/run/unapply.scala
index 90dd4fa..0c5a70e 100644
--- a/test/files/run/unapply.scala
+++ b/test/files/run/unapply.scala
@@ -22,7 +22,7 @@ object Faa {
   def unapply(x: Any): Option[String] = if(x.isInstanceOf[Bar]) Some(x.asInstanceOf[Bar].name) else None
 }
 object FaaPrecise {
-  def unapply(x: Bar): Option[String] = Some(x.name) 
+  def unapply(x: Bar): Option[String] = Some(x.name)
 }
 object FaaPreciseSome {
   def unapply(x: Bar) = Some(x.name)  // return type Some[String]
@@ -108,14 +108,14 @@ object Test1256 {
   class Sync {
     def unapply(scrut: Any): Boolean = false
   }
-  
+
   class Buffer {
     val Get = new Sync
     val jp: PartialFunction[Any, Any] = {
       case Get() =>
     }
   }
-  
+
   def run() {
     assert(!(new Buffer).jp.isDefinedAt(42))
   }
diff --git a/test/files/run/unittest_collection.check b/test/files/run/unittest_collection.check
new file mode 100644
index 0000000..844ca54
--- /dev/null
+++ b/test/files/run/unittest_collection.check
@@ -0,0 +1 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/unittest_collection.scala b/test/files/run/unittest_collection.scala
index 3639b49..822e2b0 100644
--- a/test/files/run/unittest_collection.scala
+++ b/test/files/run/unittest_collection.scala
@@ -12,7 +12,7 @@ object Test {
   }
 
   def test(x: Buffer[String]) {
-    // testing method += 
+    // testing method +=
     x += "one"
     assert(x(0) == "one", "retrieving 'one'")
     assert(x.length == 1, "length A")
@@ -25,10 +25,10 @@ object Test {
 
     assert(x.length == 1, "length C")
 
-    try { x(1); sys.error("no exception for removed element") } 
+    try { x(1); sys.error("no exception for removed element") }
     catch { case i:IndexOutOfBoundsException => }
 
-    try { x.remove(1); sys.error("no exception for removed element") } 
+    try { x.remove(1); sys.error("no exception for removed element") }
     catch { case i:IndexOutOfBoundsException => }
 
     x += "two2"
diff --git a/test/files/run/unittest_io.scala b/test/files/run/unittest_io.scala
deleted file mode 100644
index 2cadb9b..0000000
--- a/test/files/run/unittest_io.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-object Test {
-
-  def main(args: Array[String]) {
-    UTF8Tests.run()
-    SourceTest.run()
-  }
-
-  object UTF8Tests {
-    def decode(ch: Int) = new String(Array(ch), 0, 1).getBytes("UTF-8")
-
-    def run() {
-      assert(new String( decode(0x004D), "utf8") == new String(Array(0x004D.asInstanceOf[Char])))
-      assert(new String( decode(0x0430), "utf8") == new String(Array(0x0430.asInstanceOf[Char])))
-      assert(new String( decode(0x4E8C), "utf8") == new String(Array(0x4E8C.asInstanceOf[Char])))
-      assert(new String(decode(0x10302), "utf8") == new String(Array(0xD800.asInstanceOf[Char],
-                                                                     0xDF02.asInstanceOf[Char])))
-      // a client
-      val test = "{\"a\":\"\\u0022\"}"
-      val expected = "a" -> "\""
-
-      val parsed = scala.util.parsing.json.JSON.parseFull(test)
-      val result = parsed == Some(Map(expected))
-      if(result)
-        assert(result)
-      else {
-        Console.println(parsed); assert(result)
-      }
-    }
-  }
-
-  object SourceTest {
-    def run() {
-      val s = "Here is a test string"
-      val f = io.Source.fromBytes(s.getBytes("utf-8"))
-      val b = new collection.mutable.ArrayBuffer[Char]()
-      f.copyToBuffer(b)
-      assert(s == new String(b.toArray))
-    }
-  }
-}
diff --git a/test/files/run/unittest_iterator.scala b/test/files/run/unittest_iterator.scala
index 89ccdb9..21dc64c 100644
--- a/test/files/run/unittest_iterator.scala
+++ b/test/files/run/unittest_iterator.scala
@@ -1,5 +1,5 @@
 // Some iterator grouped/sliding unit tests
-object Test {  
+object Test {
   def it = (1 to 10).iterator
   def assertThat[T](expectedLength: Int, expectedLast: Seq[T])(it: Iterator[Seq[T]]) {
     val xs = it.toList
@@ -7,15 +7,15 @@ object Test {
     assert(xs.size == expectedLength, fail("expected length " + expectedLength))
     assert(xs.last == expectedLast, fail("expected last " + expectedLast))
   }
-  
+
   def main(args: Array[String]): Unit = {
     val itSum = it.toStream.sum
     for (i <- it) {
       // sum of the groups == sum of the original
       val thisSum = ((it grouped i) map (_.sum)).toStream.sum
-      assert(thisSum == itSum, thisSum + " != " + itSum) 
+      assert(thisSum == itSum, thisSum + " != " + itSum)
     }
-    
+
     // grouped
     assertThat(4, List(10)) { it grouped 3 }
     assertThat(3, List(7, 8, 9)) { it grouped 3 withPartial false }
@@ -32,11 +32,11 @@ object Test {
     assertThat(1, (1 to 8).toList) { it.sliding(8, 8) withPartial false }
     assertThat(2, List(9, 10, -1, -1, -1)) { it.sliding(5, 8) withPadding -1 }
     assertThat(1, (1 to 5).toList) { it.sliding(5, 8) withPartial false }
-    
+
     // larger step than window
     assertThat(5, List(9)) { it.sliding(1, 2) }
     assertThat(3, List(9, 10)) { it.sliding(2, 4) }
-    
+
     // make sure it throws past the end
     val thrown = try {
       val it = List(1,2,3).sliding(2)
diff --git a/test/files/run/unreachable.scala b/test/files/run/unreachable.scala
new file mode 100644
index 0000000..bb907c9
--- /dev/null
+++ b/test/files/run/unreachable.scala
@@ -0,0 +1,128 @@
+import scala.util.Random.nextInt
+import scala.sys.error
+
+object Test extends App {
+  def unreachableNormalExit: Int = {
+    return 42
+    0
+  }
+
+  def unreachableIf: Int = {
+    return 42
+    if (nextInt % 2 == 0)
+      0
+    else
+      1
+  }
+
+  def unreachableIfBranches: Int = {
+    if (nextInt % 2 == 0)
+      return 42
+    else
+      return 42
+
+    return 0
+  }
+
+  def unreachableOneLegIf: Int = {
+    if (nextInt % 2 == 0)
+      return 42
+
+    return 42
+  }
+
+  def unreachableLeftBranch: Int = {
+    val result = if (nextInt % 2 == 0)
+      return 42
+    else
+      42
+
+    return result
+  }
+
+  def unreachableRightBranch: Int = {
+    val result = if (nextInt % 2 == 0)
+      42
+    else
+      return 42
+
+    return result
+  }
+
+  def unreachableTryCatchFinally: Int = {
+    return 42
+    try {
+      return 0
+    } catch {
+      case x: Throwable => return 1
+    } finally {
+      return 2
+    }
+    return 3
+  }
+
+  def unreachableAfterTry: Int = {
+    try {
+      return 42
+    } catch {
+      case x: Throwable => return 2
+    }
+    return 3
+  }
+
+  def unreachableAfterCatch: Int = {
+    try {
+      error("haha")
+    } catch {
+      case x: Throwable => return 42
+    }
+    return 3
+  }
+
+  def unreachableAfterFinally: Int = {
+    try {
+      return 1
+    } catch {
+      case x: Throwable => return 2
+    } finally {
+      return 42
+    }
+    return 3
+  }
+
+  def unreachableSwitch: Int = {
+  	return 42
+    val x = nextInt % 2
+    x match {
+      case 0 => return 0
+      case 1 => return 1
+      case -1 => return 2
+    }
+    3
+  }
+
+  def unreachableAfterSwitch: Int = {
+    val x = nextInt % 2
+    x match {
+      case 0 => return 42
+      case 1 => return 41 + x
+      case -1 => return 43 + x
+    }
+    2
+  }
+
+  def check(f: Int) = assert(f == 42, s"Expected 42 but got $f")
+
+  check(unreachableNormalExit)
+  check(unreachableIf)
+  check(unreachableIfBranches)
+  check(unreachableOneLegIf)
+  check(unreachableLeftBranch)
+  check(unreachableRightBranch)
+  check(unreachableTryCatchFinally)
+  check(unreachableAfterTry)
+  check(unreachableAfterCatch)
+  check(unreachableAfterFinally)
+  check(unreachableSwitch)
+  check(unreachableAfterSwitch)
+}
diff --git a/test/files/run/value-class-extractor-2.check b/test/files/run/value-class-extractor-2.check
new file mode 100644
index 0000000..5903b99
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.check
@@ -0,0 +1,8 @@
+String
+List
+Int
+Something else
+String
+List
+Int
+Something else
diff --git a/test/files/run/value-class-extractor-2.scala b/test/files/run/value-class-extractor-2.scala
new file mode 100644
index 0000000..d776c35
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.scala
@@ -0,0 +1,108 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+  def get: A  = value
+  def isEmpty = value == null
+}
+object Opt {
+  final val None = new Opt[Null](null)
+  def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+object ValueOpt {
+  // public java.lang.String unapply(java.lang.Object);
+  //        0: aload_1
+  //        1: instanceof    #16                 // class java/lang/String
+  //        4: ifeq          21
+  //        7: getstatic     #21                 // Field Opt$.MODULE$:LOpt$;
+  //       10: astore_2
+  //       11: ldc           #23                 // String String
+  //       13: checkcast     #16                 // class java/lang/String
+  //       16: astore        5
+  //       18: goto          71
+  //       21: aload_1
+  //       22: instanceof    #25                 // class scala/collection/immutable/List
+  //       25: ifeq          42
+  //       28: getstatic     #21                 // Field Opt$.MODULE$:LOpt$;
+  //       31: astore_3
+  //       32: ldc           #27                 // String List
+  //       34: checkcast     #16                 // class java/lang/String
+  //       37: astore        5
+  //       39: goto          71
+  //       42: aload_1
+  //       43: instanceof    #29                 // class java/lang/Integer
+  //       46: ifeq          64
+  //       49: getstatic     #21                 // Field Opt$.MODULE$:LOpt$;
+  //       52: astore        4
+  //       54: ldc           #31                 // String Int
+  //       56: checkcast     #16                 // class java/lang/String
+  //       59: astore        5
+  //       61: goto          71
+  //       64: getstatic     #21                 // Field Opt$.MODULE$:LOpt$;
+  //       67: pop
+  //       68: aconst_null
+  //       69: astore        5
+  //       71: aload         5
+  //       73: areturn
+  def unapply(x: Any): Opt[String] = x match {
+    case _: String  => Opt("String")
+    case _: List[_] => Opt("List")
+    case _: Int     => Opt("Int")
+    case _          => Opt.None
+  }
+}
+object RegularOpt {
+  // public scala.Option<java.lang.String> unapply(java.lang.Object);
+  //        0: aload_1
+  //        1: instanceof    #16                 // class java/lang/String
+  //        4: ifeq          20
+  //        7: new           #18                 // class scala/Some
+  //       10: dup
+  //       11: ldc           #20                 // String String
+  //       13: invokespecial #23                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       16: astore_2
+  //       17: goto          64
+  //       20: aload_1
+  //       21: instanceof    #25                 // class scala/collection/immutable/List
+  //       24: ifeq          40
+  //       27: new           #18                 // class scala/Some
+  //       30: dup
+  //       31: ldc           #27                 // String List
+  //       33: invokespecial #23                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       36: astore_2
+  //       37: goto          64
+  //       40: aload_1
+  //       41: instanceof    #29                 // class java/lang/Integer
+  //       44: ifeq          60
+  //       47: new           #18                 // class scala/Some
+  //       50: dup
+  //       51: ldc           #31                 // String Int
+  //       53: invokespecial #23                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       56: astore_2
+  //       57: goto          64
+  //       60: getstatic     #36                 // Field scala/None$.MODULE$:Lscala/None$;
+  //       63: astore_2
+  //       64: aload_2
+  //       65: areturn
+  def unapply(x: Any): Option[String] = x match {
+    case _: String  => Some("String")
+    case _: List[_] => Some("List")
+    case _: Int     => Some("Int")
+    case _          => None
+  }
+}
+
+object Test {
+  def f(x: Any) = x match {
+    case ValueOpt(s) => s
+    case _           => "Something else"
+  }
+  def g(x: Any) = x match {
+    case RegularOpt(s) => s
+    case _             => "Something else"
+  }
+  val xs = List("abc", Nil, 5, Test)
+
+  def main(args: Array[String]): Unit = {
+    xs map f foreach println
+    xs map g foreach println
+  }
+}
diff --git a/test/files/run/value-class-extractor-seq.check b/test/files/run/value-class-extractor-seq.check
new file mode 100644
index 0000000..84552a7
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.check
@@ -0,0 +1,3 @@
+Bip(1, 2, 3)
+Bip(1, 2, c @ Array(3, 4, 5): _*)
+class [I
diff --git a/test/files/run/value-class-extractor-seq.scala b/test/files/run/value-class-extractor-seq.scala
new file mode 100644
index 0000000..f17a531
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.scala
@@ -0,0 +1,59 @@
+import scala.runtime.ScalaRunTime.stringOf
+
+final class ArrayOpt[T](val xs: Array[T]) extends AnyVal {
+  def isEmpty = xs == null
+  def get = xs
+}
+
+object Bip {
+  def mkInts(xs: Array[Short]) = xs map (_.toInt)
+  def unapplySeq(x: Any): ArrayOpt[Int] = x match {
+    case xs: Array[Int]   => new ArrayOpt(xs)
+    case xs: Array[Short] => new ArrayOpt(mkInts(xs))
+    case _                => new ArrayOpt(null)
+  }
+  // public int[] unapplySeq(java.lang.Object);
+  //      0: aload_1
+  //      1: astore_2
+  //      2: aload_2
+  //      3: instanceof    #52                 // class "[I"
+  //      6: ifeq          20
+  //      9: aload_2
+  //     10: checkcast     #52                 // class "[I"
+  //     13: astore_3
+  //     14: aload_3
+  //     15: astore        4
+  //     17: goto          47
+  //     20: aload_2
+  //     21: instanceof    #58                 // class "[S"
+  //     24: ifeq          44
+  //     27: aload_2
+  //     28: checkcast     #58                 // class "[S"
+  //     31: astore        5
+  //     33: aload_0
+  //     34: aload         5
+  //     36: invokevirtual #60                 // Method mkInts:([S)[I
+  //     39: astore        4
+  //     41: goto          47
+  //     44: aconst_null
+  //     45: astore        4
+  //     47: aload         4
+  //     49: areturn
+}
+
+object Test {
+  def f(x: Any) = x match {
+    case Bip(a, b, c)      => s"Bip($a, $b, $c)"
+    case Bip(a, b, c @ _*) => s"Bip($a, $b, c @ ${stringOf(c)}: _*)"
+    case _                 => "" + x.getClass
+  }
+
+  def main(args: Array[String]): Unit = {
+    println(f(Array[Int](1,2,3)))
+    println(f(Array[Int](1,2,3,4,5)))
+    println(f(Array[Int](1)))
+  }
+  // Bip(1, 2, 3)
+  // Bip(1, 2, c @ [I at 782be20e: _*)
+  // class [I
+}
diff --git a/test/files/run/value-class-extractor.check b/test/files/run/value-class-extractor.check
new file mode 100644
index 0000000..e164471
--- /dev/null
+++ b/test/files/run/value-class-extractor.check
@@ -0,0 +1,9 @@
+'a'
+'b'
+'c'
+NoChar
+Some(a)
+Some(b)
+Some(c)
+None
+9
diff --git a/test/files/run/value-class-extractor.scala b/test/files/run/value-class-extractor.scala
new file mode 100644
index 0000000..3eaffa0
--- /dev/null
+++ b/test/files/run/value-class-extractor.scala
@@ -0,0 +1,91 @@
+final class NonNullChar(val get: Char) extends AnyVal {
+  def isEmpty = get == 0.toChar
+  override def toString = if (isEmpty) "NoChar" else s"'$get'"
+}
+object NonNullChar {
+  @inline final val None = new NonNullChar(0.toChar)
+}
+
+final class SomeProduct extends Product3[String, Int, List[String]] {
+  def canEqual(x: Any) = x.isInstanceOf[SomeProduct]
+  def _1 = "abc"
+  def _2 = 5
+  def _3 = List("bippy")
+  def isEmpty = false
+  def get = this
+}
+object SomeProduct {
+  def unapply(x: SomeProduct) = x
+}
+
+object Test {
+  def prod(x: SomeProduct): Int = x match {
+    case SomeProduct(x, y, z) => x.length + y + z.length
+    case _                    => -1
+  }
+
+  def f(x: Char): NonNullChar = x match {
+    case 'a' => new NonNullChar('a')
+    case 'b' => new NonNullChar('b')
+    case 'c' => new NonNullChar('c')
+    case _   => NonNullChar.None
+  }
+  // public char f(char);
+  //        0: iload_1
+  //        1: tableswitch   { // 97 to 99
+  //                     97: 47
+  //                     98: 42
+  //                     99: 37
+  //                default: 28
+  //           }
+  //       28: getstatic     #19                 // Field NonNullChar$.MODULE$:LNonNullChar$;
+  //       31: invokevirtual #23                 // Method NonNullChar$.None:()C
+  //       34: goto          49
+  //       37: bipush        99
+  //       39: goto          49
+  //       42: bipush        98
+  //       44: goto          49
+  //       47: bipush        97
+  //       49: ireturn
+  def g(x: Char): Option[Char] = x match {
+    case 'a' => Some('a')
+    case 'b' => Some('b')
+    case 'c' => Some('c')
+    case _   => None
+  }
+  // public scala.Option<java.lang.Object> g(char);
+  //        0: iload_1
+  //        1: tableswitch   { // 97 to 99
+  //                     97: 64
+  //                     98: 49
+  //                     99: 34
+  //                default: 28
+  //           }
+  //       28: getstatic     #33                 // Field scala/None$.MODULE$:Lscala/None$;
+  //       31: goto          76
+  //       34: new           #35                 // class scala/Some
+  //       37: dup
+  //       38: bipush        99
+  //       40: invokestatic  #41                 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+  //       43: invokespecial #44                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       46: goto          76
+  //       49: new           #35                 // class scala/Some
+  //       52: dup
+  //       53: bipush        98
+  //       55: invokestatic  #41                 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+  //       58: invokespecial #44                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       61: goto          76
+  //       64: new           #35                 // class scala/Some
+  //       67: dup
+  //       68: bipush        97
+  //       70: invokestatic  #41                 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+  //       73: invokespecial #44                 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+  //       76: areturn
+  def main(args: Array[String]): Unit = {
+    "abcd" foreach (ch => println(f(ch)))
+    "abcd" foreach (ch => println(g(ch)))
+    println(prod(new SomeProduct))
+  }
+}
+
+
diff --git a/test/files/run/value-class-partial-func-depmet.scala b/test/files/run/value-class-partial-func-depmet.scala
new file mode 100644
index 0000000..12ff64e
--- /dev/null
+++ b/test/files/run/value-class-partial-func-depmet.scala
@@ -0,0 +1,24 @@
+class C
+class A { class C }
+
+object Test {
+  def main(args: Array[String]) {
+    val a = new A
+
+    new VC("").foo(a)
+  }
+}
+
+class VC(val a: Any) extends AnyVal {
+   def foo(a: A) = {
+     val pf: PartialFunction[a.C, Any] = { case x => x }
+     (pf: PartialFunction[Null, Any]).isDefinedAt(null)
+   }
+}
+
+// 2.11.0-M6
+// test/files/run/value-class-partial-func-depmet.scala:14: error: overriding method applyOrElse in trait PartialFunction of type [A1 <: a.C, B1 >: Any](x: A1, default: A1 => B1)B1;
+//  method applyOrElse has incompatible type
+//      val pf: PartialFunction[a.C, Any] = { case x => x }
+//                                          ^
+// one error found
diff --git a/test/files/run/valueclasses-classmanifest-basic.scala b/test/files/run/valueclasses-classmanifest-basic.scala
index c2aa08e..50addda 100644
--- a/test/files/run/valueclasses-classmanifest-basic.scala
+++ b/test/files/run/valueclasses-classmanifest-basic.scala
@@ -1,5 +1,6 @@
 class Foo(val x: Int) extends AnyVal
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   println(classManifest[Foo])
-}
\ No newline at end of file
+}
diff --git a/test/files/run/valueclasses-classmanifest-existential.scala b/test/files/run/valueclasses-classmanifest-existential.scala
index 11999df..6bcd7cf 100644
--- a/test/files/run/valueclasses-classmanifest-existential.scala
+++ b/test/files/run/valueclasses-classmanifest-existential.scala
@@ -1,5 +1,6 @@
 class Foo[T](val x: T) extends AnyVal
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   println(classManifest[Foo[_]])
-}
\ No newline at end of file
+}
diff --git a/test/files/run/valueclasses-classmanifest-generic.scala b/test/files/run/valueclasses-classmanifest-generic.scala
index 280152d..5efcaed 100644
--- a/test/files/run/valueclasses-classmanifest-generic.scala
+++ b/test/files/run/valueclasses-classmanifest-generic.scala
@@ -1,5 +1,6 @@
 class Foo[T](val x: T) extends AnyVal
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test extends App {
   println(classManifest[Foo[String]])
-}
\ No newline at end of file
+}
diff --git a/test/files/run/var-arity-class-symbol.scala b/test/files/run/var-arity-class-symbol.scala
new file mode 100644
index 0000000..29fe960
--- /dev/null
+++ b/test/files/run/var-arity-class-symbol.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._, definitions._
+object Test extends App {
+  // Tuples
+  assert(TupleClass.seq.size == 22)
+  assert(TupleClass(0) == NoSymbol)
+  assert(TupleClass(23) == NoSymbol)
+  assert((1 to 22).forall { i => TupleClass(i).name.toString == s"Tuple$i" })
+  // Functions
+  assert(FunctionClass.seq.size == 23)
+  assert(FunctionClass(-1) == NoSymbol)
+  assert(FunctionClass(23) == NoSymbol)
+  assert((0 to 22).forall { i => FunctionClass(i).name.toString == s"Function$i" })
+  // Products
+  assert(ProductClass.seq.size == 23)
+  assert(ProductClass(-1) == NoSymbol)
+  assert(ProductClass(0) == UnitClass)
+  assert(ProductClass(23) == NoSymbol)
+  assert((1 to 22).forall { i => ProductClass(i).name.toString == s"Product$i" })
+}
diff --git a/test/files/run/vector1.scala b/test/files/run/vector1.scala
index b37cfe8..d536183 100644
--- a/test/files/run/vector1.scala
+++ b/test/files/run/vector1.scala
@@ -9,7 +9,7 @@ import scala.collection.mutable.Builder
 
 
 object Test {
-  
+
     def vector(label: String, n: Int): Vector[String] = {
       val a = new VectorBuilder[String]
       for (i <- 0 until n)
@@ -67,7 +67,7 @@ object Test {
 
   def test1() = {
     println("===== test1 =====")
-    
+
     val N = 150000
     val a = vector("a", N)
     val b = vectorForward("b", N)
@@ -81,33 +81,33 @@ object Test {
     println("===== test2 =====")
 
     var a: Vector[String] = Vector.empty
-    
+
     val rand = new java.util.Random
-    
+
     val N = 150000
     var min = N/2//rand.nextInt(N)
     var max = min
-    
+
     val chunkLimit = 11
-    
+
     def nextChunkSize = 3 //rand.nextInt(chunkLimit)
-    
+
     def seqBack() = for (i <- 0 until Math.min(nextChunkSize, N-max)) { a = a :+ ("a"+max); max += 1 }
     def seqFront() = for (i <- 0 until Math.min(nextChunkSize, min)) { min -= 1; a = ("a"+min) +: a }
-    
+
     try {
-      
+
     while (min > 0 || max < N) {
       seqFront()
       seqBack()
     }
   } catch {
-    case ex =>
+    case ex: Throwable =>
       //println("----------------")
       //a.debug
       throw ex
   }
-    
+
     assertVector(a, "a", 0, N)
   }
 
@@ -122,14 +122,14 @@ object Test {
     val pos = scala.util.Random.shuffle(scala.collection.mutable.WrappedArray.make[Int](Array.tabulate[Int](N)(i => i)))
 
     var b = a
-    
+
     {
       var i = 0
       while (i < N) {
         b = b.updated(pos(i), "b"+(pos(i)))
         i += 1
       }
-      
+
       assertVector(b, "b", 0, N)
     }
 
diff --git a/test/files/run/verify-ctor.scala b/test/files/run/verify-ctor.scala
index 17e4f71..528d038 100644
--- a/test/files/run/verify-ctor.scala
+++ b/test/files/run/verify-ctor.scala
@@ -1,6 +1,6 @@
 class Foo(val str: String) {
   def this(arr: Array[Char]) = this({
-    if (arr.length == 0) exit(1)
+    if (arr.length == 0) sys.exit(1)
     new String(arr)
   })
 }
diff --git a/test/files/run/view-headoption.scala b/test/files/run/view-headoption.scala
index 629b974..659c7e6 100644
--- a/test/files/run/view-headoption.scala
+++ b/test/files/run/view-headoption.scala
@@ -2,13 +2,13 @@ object Test {
   val failer    = () => { println("fail") ; None }
   val succeeder = () => { println("success") ; Some(5) }
   val fs = List(failer, succeeder, failer, failer, succeeder, failer, failer, failer, succeeder)
-  
+
   def f0 = fs.view flatMap (f => f())
   def f1 = f0.headOption
   def f2 = f0.head
   def f3 = f0.lastOption
   def f4 = f0.last
-  
+
   def main(args: Array[String]): Unit = {
     println("f1: " + f1)
     println("f2: " + f2)
diff --git a/test/files/run/view-iterator-stream.scala b/test/files/run/view-iterator-stream.scala
index f91407f..0e0c42d 100644
--- a/test/files/run/view-iterator-stream.scala
+++ b/test/files/run/view-iterator-stream.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
 import scala.collection.{ mutable, immutable, generic }
 import collection.TraversableView
 
@@ -18,7 +21,7 @@ object Test {
     }
     def and(g: Perturber): Perturber =
       new Perturber(this.labels ++ g.labels, f andThen g.f)
-    
+
     override def toString = labels mkString " -> "
   }
   object Perturber {
@@ -39,14 +42,14 @@ object Test {
       "slice(%d, %d)".format(from, until),
       _.toTraversable.slice(from, until)
     )
-  
+
   val fns = List[Perturber](toV, toI, toS, toIS)
 
   def tds(n: Int): Perturber = p(drop(n), take(n / 2), slice(1, n / 4))
   def dts(n: Int): Perturber = p(take(n), drop(n / 2), slice(1, n / 4))
   def sdt(n: Int): Perturber = p(slice(n, n * 2), drop(n / 2), take(n / 4))
   def std(n: Int): Perturber = p(slice(n, n * 2), take(n / 2), drop(n / 4))
-  
+
   val transforms = (fns.permutations map (xs => p(xs take 3: _*))).toList.distinct
   def mkOps(n: Int) = List[Perturber](tds(n), dts(n), sdt(n), std(n))
   def runOps(n: Int) = {
@@ -60,7 +63,7 @@ object Test {
     }
     ()
   }
-  
+
   def main(args: Array[String]): Unit = {
     runOps(20)
   }
diff --git a/test/files/run/viewtest.scala b/test/files/run/viewtest.scala
index bb2c28d..581958e 100755
--- a/test/files/run/viewtest.scala
+++ b/test/files/run/viewtest.scala
@@ -25,8 +25,8 @@ object Test extends App {
 2.8 regression: CCE when zipping list projection with stream
 Reported by: 	szeiger 	Owned by: 	odersky
 Priority: 	normal 	Component: 	Standard Library
-Keywords: 	collections, zip 	Cc: 	
-Fixed in version: 	
+Keywords: 	collections, zip 	Cc:
+Fixed in version:
 Description
 
 Welcome to Scala version 2.8.0.r18784-b20090925021043 (Java HotSpot(TM) Client VM, Java 1.6.0_11).
diff --git a/test/files/run/virtpatmat_alts.check b/test/files/run/virtpatmat_alts.check
index 7a4ad0a..f39e292 100644
--- a/test/files/run/virtpatmat_alts.check
+++ b/test/files/run/virtpatmat_alts.check
@@ -1 +1,7 @@
+virtpatmat_alts.scala:5: warning: match may not be exhaustive.
+  (true, true) match {
+  ^
+virtpatmat_alts.scala:9: warning: match may not be exhaustive.
+  List(5) match {
+      ^
 OK 5
diff --git a/test/files/run/virtpatmat_alts.flags b/test/files/run/virtpatmat_alts.flags
deleted file mode 100644
index 3f5a310..0000000
--- a/test/files/run/virtpatmat_alts.flags
+++ /dev/null
@@ -1 +0,0 @@
-  -Xexperimental
diff --git a/test/files/run/virtpatmat_alts.scala b/test/files/run/virtpatmat_alts.scala
index b771752..d1dfa8a 100644
--- a/test/files/run/virtpatmat_alts.scala
+++ b/test/files/run/virtpatmat_alts.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
 object Test extends App {
   (true, true) match {
     case (true, true) | (false, false) => 1
@@ -9,4 +12,4 @@ object Test extends App {
     case 7 :: Nil  => println("FAILED")
     case Nil  => println("FAILED")
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_apply.scala b/test/files/run/virtpatmat_apply.scala
index 34cb80e..b8776f4 100644
--- a/test/files/run/virtpatmat_apply.scala
+++ b/test/files/run/virtpatmat_apply.scala
@@ -1,5 +1,5 @@
 object Test extends App {
- List(1, 2, 3) match { 
+ List(1, 2, 3) match {
    case Nil => println("FAIL")
    case x :: y :: xs if xs.length == 2 => println("FAIL")
    case x :: y :: xs if xs.length == 1 => println("OK "+ y)
diff --git a/test/files/run/virtpatmat_casting.scala b/test/files/run/virtpatmat_casting.scala
index d970aba..a36daec 100644
--- a/test/files/run/virtpatmat_casting.scala
+++ b/test/files/run/virtpatmat_casting.scala
@@ -1,8 +1,9 @@
 object Test extends App {
-  println(List(1,2,3) match { 
-    case Nil => List(0) 
-// since the :: extractor's argument must be a ::, there has to be a cast before its unapply is invoked    
+  println(List(1,2,3) match {
+    case Nil => List(0)
+// since the :: extractor's argument must be a ::, there has to be a cast before its unapply is invoked
     case x :: y :: z :: a :: xs => xs ++ List(x)
     case x :: y :: z :: xs => xs ++ List(x)
+    case _ => List(0)
   })
 }
diff --git a/test/files/run/virtpatmat_literal.scala b/test/files/run/virtpatmat_literal.scala
index 5bd6b30..9a68d29 100644
--- a/test/files/run/virtpatmat_literal.scala
+++ b/test/files/run/virtpatmat_literal.scala
@@ -1,22 +1,22 @@
 object Test extends App {
  val a = 1
- 1 match { 
-   case 2 => println("FAILED") 
-   case 1 => println("OK") 
-   case `a` => println("FAILED") 
+ 1 match {
+   case 2 => println("FAILED")
+   case 1 => println("OK")
+   case `a` => println("FAILED")
  }
 
  val one = 1
- 1 match { 
-   case 2 => println("FAILED") 
-   case `one` => println("OK") 
-   case 1 => println("FAILED") 
+ 1 match {
+   case 2 => println("FAILED")
+   case `one` => println("OK")
+   case 1 => println("FAILED")
  }
 
- 1 match { 
-   case 2 => println("FAILED") 
-   case Test.one => println("OK") 
-   case 1 => println("FAILED") 
+ 1 match {
+   case 2 => println("FAILED")
+   case Test.one => println("OK")
+   case 1 => println("FAILED")
  }
 
 }
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_nested_lists.check b/test/files/run/virtpatmat_nested_lists.check
index d8263ee..ddf68ee 100644
--- a/test/files/run/virtpatmat_nested_lists.check
+++ b/test/files/run/virtpatmat_nested_lists.check
@@ -1 +1,4 @@
-2
\ No newline at end of file
+virtpatmat_nested_lists.scala:5: warning: match may not be exhaustive.
+  List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) }
+      ^
+2
diff --git a/test/files/run/virtpatmat_nested_lists.flags b/test/files/run/virtpatmat_nested_lists.flags
deleted file mode 100644
index 3f5a310..0000000
--- a/test/files/run/virtpatmat_nested_lists.flags
+++ /dev/null
@@ -1 +0,0 @@
-  -Xexperimental
diff --git a/test/files/run/virtpatmat_nested_lists.scala b/test/files/run/virtpatmat_nested_lists.scala
index fef74ce..d1aa68e 100644
--- a/test/files/run/virtpatmat_nested_lists.scala
+++ b/test/files/run/virtpatmat_nested_lists.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
 object Test extends App {
   List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) }
 }
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check
index d00491f..78ec61f 100644
--- a/test/files/run/virtpatmat_opt_sharing.check
+++ b/test/files/run/virtpatmat_opt_sharing.check
@@ -1 +1,4 @@
+virtpatmat_opt_sharing.scala:7: warning: match may not be exhaustive.
+    List(1, 3, 4, 7) match {
+        ^
 1
diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags
deleted file mode 100644
index 3f5a310..0000000
--- a/test/files/run/virtpatmat_opt_sharing.flags
+++ /dev/null
@@ -1 +0,0 @@
-  -Xexperimental
diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala
index 119e305..d2c42ca 100644
--- a/test/files/run/virtpatmat_opt_sharing.scala
+++ b/test/files/run/virtpatmat_opt_sharing.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
 object Test extends App {
   virtMatch()
   def virtMatch() = {
@@ -7,4 +10,4 @@ object Test extends App {
       case 1 :: 3 :: 4 :: 7 :: x => println(1)
     }
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_staging.scala b/test/files/run/virtpatmat_staging.scala
index c17b450..d444829 100644
--- a/test/files/run/virtpatmat_staging.scala
+++ b/test/files/run/virtpatmat_staging.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
 trait Intf {
  type Rep[+T]
  type M[+T] = Rep[Maybe[T]]
@@ -9,7 +12,7 @@ trait Intf {
 
    def zero: M[Nothing]
    def one[T](x: Rep[T]): M[T]
-   def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T]
+   def guard[T](cond: Rep[Boolean], dann: => Rep[T]): M[T]
    def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] // used for isDefinedAt
  }
 
@@ -33,7 +36,7 @@ trait Impl extends Intf {
    def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U] = ("runOrElse("+ in +", ?" + matcher("?") + ")")
    def zero: M[Nothing]                                             = "zero"
    def one[T](x: Rep[T]): M[T]                                      = "one("+x.toString+")"
-   def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T]          = "guard("+cond+","+then+")"
+   def guard[T](cond: Rep[Boolean], dann: => Rep[T]): M[T]          = s"guard($cond,$dann)"
    def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean]  = ("isSuccess("+x+", ?" + f("?") + ")")
  }
 
diff --git a/test/files/run/virtpatmat_stringinterp.scala b/test/files/run/virtpatmat_stringinterp.scala
index 213712f..c6c951e 100644
--- a/test/files/run/virtpatmat_stringinterp.scala
+++ b/test/files/run/virtpatmat_stringinterp.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ implicitConversions }
+
 object Test extends App {
   case class Node(x: Int)
 
@@ -10,4 +13,4 @@ object Test extends App {
 
   val x: Node = Node(0)
   x match { case xml"""<foo arg=$a/>""" => println(a) }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_switch.scala b/test/files/run/virtpatmat_switch.scala
index 1329c19..2f2e210 100644
--- a/test/files/run/virtpatmat_switch.scala
+++ b/test/files/run/virtpatmat_switch.scala
@@ -4,11 +4,11 @@ object Test extends App {
     case 1 => "one"
     case _ => "many"
   }
-  
+
   println(intSwitch(0))
   println(intSwitch(1))
   println(intSwitch(10))
-  
+
   def charSwitch(x: Char) = x match {
     case 'a' => "got a"
     case 'b' => "got b"
@@ -20,15 +20,15 @@ object Test extends App {
     case 'b' => "got b"
     case _ => "got some letter"
   }
-  
+
   println(charSwitch('a'))
   println(byteSwitch('b'))
   println(charSwitch('z'))
-  
+
   def implicitDefault(x: Int) = x match {
     case 0 => 0
   }
-  
+
   try {
     implicitDefault(5)
   } catch {
diff --git a/test/files/run/virtpatmat_try.scala b/test/files/run/virtpatmat_try.scala
index 46e67cb..dab2c89 100644
--- a/test/files/run/virtpatmat_try.scala
+++ b/test/files/run/virtpatmat_try.scala
@@ -8,7 +8,7 @@ object Test extends App {
   } catch { // this should emit a "catch-switch"
     case y: A => println(y.x)
     case (_ : A | _ : B)  => println("B")
-    case _ => println("other")
+    case _: Throwable => println("other")
   }
 
   try {
@@ -17,7 +17,7 @@ object Test extends App {
     // case A(x) => println(x)
     case y: A => println(y.x)
     case x@((_ : A) | (_ : B))  => println(x)
-    case _ => println("other")
+    case _: Throwable => println("other")
   }
 
  def simpleTry {
@@ -34,7 +34,7 @@ object Test extends App {
   }
 
   def wildcardTry {
-    try { bla } catch { case _ => bla }
+    try { bla } catch { case _: Throwable => bla }
   }
 
   def tryPlusFinally {
@@ -44,4 +44,4 @@ object Test extends App {
   def catchAndPassToLambda {
     try { bla } catch { case ex: Exception => val f = () => ex }
   }
-}
\ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_typed.check b/test/files/run/virtpatmat_typed.check
index cec2740..b304fa5 100644
--- a/test/files/run/virtpatmat_typed.check
+++ b/test/files/run/virtpatmat_typed.check
@@ -1 +1,4 @@
+virtpatmat_typed.scala:5: warning: unreachable code
+   case x: String => println("FAILED")
+                            ^
 OK foo
diff --git a/test/files/run/virtpatmat_typed.scala b/test/files/run/virtpatmat_typed.scala
index a9863cc..cdd6d3c 100644
--- a/test/files/run/virtpatmat_typed.scala
+++ b/test/files/run/virtpatmat_typed.scala
@@ -1,7 +1,7 @@
 object Test extends App {
- ("foo": Any) match { 
-   case x: Int => println("FAILED") 
-   case x: String => println("OK "+ x) 
-   case x: String => println("FAILED") 
+ ("foo": Any) match {
+   case x: Int => println("FAILED")
+   case x: String => println("OK "+ x)
+   case x: String => println("FAILED")
  }
 }
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapply.scala b/test/files/run/virtpatmat_unapply.scala
index 6d7b4db..a6e71f3 100644
--- a/test/files/run/virtpatmat_unapply.scala
+++ b/test/files/run/virtpatmat_unapply.scala
@@ -6,12 +6,12 @@ object IntList {
 }
 
 object Test extends App {
-  IntList(1, IntList(2, NilIL)) match { 
+  IntList(1, IntList(2, NilIL)) match {
     case IntList(a1, IntList(a2, IntList(a3, y))) => println(a1 + a2 + a3)
     case IntList(x, y) => println(x)
   }
 
-  IntList(1, IntList(2, IntList(3, NilIL))) match { 
+  IntList(1, IntList(2, IntList(3, NilIL))) match {
     case IntList(a1, IntList(a2, IntList(a3, y))) => println(a1 + a2 + a3)
     case IntList(x, y) => println(x)
   }
@@ -20,12 +20,12 @@ object Test extends App {
 // ((x1: IntList) => IntList.unapply(x1).flatMap(((x4: (Int, IntList)) => IntList.unapply(x4._2).flatMap(((x5: (Int, IntList)) => IntList.unapply(x5._2).flatMap(((x6: (Int, IntList)) => implicitly[Predef.MatchingStrategy[Option]].success(Predef.println(x4._1.+(x5._1).+(x6._1))))))))).orElse(IntList.unapply(x1).flatMap(((x7: (Int, IntList)) => implicitly[scala.Predef.MatchingStrategy[Option]].success(Predef.println(x7._1))))).orElse(implicitly[scala.Predef.MatchingStrategy[Option]].fail)) [...]
 
 /*
-  ((x1: IntList) => 
-    IntList.this.unapply(x1).flatMap[Int](((x4: (Int, IntList)) => 
-      IntList.this.unapply(x4._2).flatMap[Int](((x5: (Int, IntList)) => 
-        IntList.this.unapply(x5._2).flatMap[Int](((x6: (Int, IntList)) => 
+  ((x1: IntList) =>
+    IntList.this.unapply(x1).flatMap[Int](((x4: (Int, IntList)) =>
+      IntList.this.unapply(x4._2).flatMap[Int](((x5: (Int, IntList)) =>
+        IntList.this.unapply(x5._2).flatMap[Int](((x6: (Int, IntList)) =>
           Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).success[Int](x6._1))))))).orElse[Int](
-    IntList.this.unapply(x1).flatMap[Int](((x7: (Int, IntList)) => 
+    IntList.this.unapply(x1).flatMap[Int](((x7: (Int, IntList)) =>
       Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).success[Int](x7._1)))).orElse[Int](
     Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).fail)
   ).apply(IntList.apply(1, null))
diff --git a/test/files/run/withIndex.scala b/test/files/run/withIndex.scala
index 910b1f1..ebf1941 100644
--- a/test/files/run/withIndex.scala
+++ b/test/files/run/withIndex.scala
@@ -11,7 +11,7 @@ object Test {
     Console.println(str.zipWithIndex.toList)
     assert {
       ary.zipWithIndex match {
-        case _: Array[Pair[_,_]] => true
+        case _: Array[Tuple2[_,_]] => true
         case _ => false
       }
     }
diff --git a/test/files/run/xml-attribute.check b/test/files/run/xml-attribute.check
deleted file mode 100644
index 3cfe377..0000000
--- a/test/files/run/xml-attribute.check
+++ /dev/null
@@ -1,12 +0,0 @@
-<t/>
-<t/>
-<t/>
-<t/>
-<t/>
-<t b="1" d="2"/>
-<t b="1" d="2"/>
-<t b="1" d="2"/>
-<t a="1" d="2"/>
-<t b="1" d="2"/>
-<t a="1" b="2" c="3"/>
-<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>
diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala
deleted file mode 100644
index eb3956c..0000000
--- a/test/files/run/xml-attribute.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-import xml.Node
-
-object Test {
-  def main(args: Array[String]): Unit = {
-    val noAttr = <t/>
-    val attrNull = <t a={ null: String }/>
-    val attrNone = <t a={ None: Option[Seq[Node]] }/>
-    val preAttrNull = <t p:a={ null: String }/>
-    val preAttrNone = <t p:a={ None: Option[Seq[Node]] }/>
-    assert(noAttr == attrNull)
-    assert(noAttr == attrNone)
-    assert(noAttr == preAttrNull)
-    assert(noAttr == preAttrNone)
-
-    println(noAttr)
-    println(attrNull)
-    println(attrNone)
-    println(preAttrNull)
-    println(preAttrNone)
-
-    val xml1 = <t b="1" d="2"/>
-    val xml2 = <t a={ null: String } p:a={ null: String } b="1" c={ null: String } d="2"/>
-    val xml3 = <t b="1" c={ null: String } d="2" a={ null: String } p:a={ null: String }/>
-    assert(xml1 == xml2)
-    assert(xml1 == xml3)
-
-    println(xml1)
-    println(xml2)
-    println(xml3)
-
-    // Check if attribute order is retained
-    println(<t a="1" d="2"/>)
-    println(<t b="1" d="2"/>)
-    println(<t a="1" b="2" c="3"/>)
-    println(<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>)
-  }
-}
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
deleted file mode 100644
index 6763767..0000000
--- a/test/files/run/xml-loop-bug.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test {
-  def main(args: Array[String]): Unit = {
-    val sink = new java.io.PrintStream(new java.io.ByteArrayOutputStream())
-    Console setOut sink
-    Console setErr sink
-    scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem 
-  }
-}
diff --git a/test/files/scalacheck/CheckCollections.scala b/test/files/scalacheck/CheckCollections.scala
new file mode 100644
index 0000000..329d505
--- /dev/null
+++ b/test/files/scalacheck/CheckCollections.scala
@@ -0,0 +1,52 @@
+import org.scalacheck.Properties
+import org.scalacheck.Prop._
+
+import scala.reflect.internal.util.Collections._
+
+object Test extends Properties("reflect.internal.util.Collections") {
+  def map2ConserveOld[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
+    if (xs.isEmpty || ys.isEmpty) xs
+    else {
+      val x1 = f(xs.head, ys.head)
+      val xs1 = map2Conserve(xs.tail, ys.tail)(f)
+      if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
+      else x1 :: xs1
+    }
+
+  val testfun: (String, Int) => String = { case(x, y) =>
+    x.toLowerCase + y.toString
+  }
+  val testid: (String, Int) => String = { case (x, y) => x }
+
+  val prop1_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+    val res = map2Conserve(xs, ys)(testid)
+    res eq xs
+  }
+
+  val prop2_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+    map2Conserve(xs, ys)(testid)  == map2ConserveOld(xs, ys)(testid) &&
+    map2Conserve(xs, ys)(testfun) == map2ConserveOld(xs, ys)(testfun)
+  }
+
+  def checkStackOverflow() {
+    var xs: List[String] = Nil
+    var ys: List[Int]    = Nil
+    for (i <- 0 until 250000) {
+        xs = "X" :: xs
+        ys = 1   :: ys
+    }
+    map2Conserve(xs, ys){ case(x, y) => x.toLowerCase + y.toString }
+  }
+
+
+  val tests = List(
+    ("map2Conserve(identity)",   prop1_map2Conserve),
+    ("map2Conserve == old impl", prop2_map2Conserve)
+  )
+
+  checkStackOverflow()
+
+  for {
+    (label, prop) <- tests
+  } property(label) = prop
+}
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index 4e8480d..48f732a 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -1,25 +1,23 @@
-import org.scalacheck.{ Arbitrary, ConsoleReporter, Prop, Properties }
+import org.scalacheck.{ Arbitrary, Prop, Properties }
 import org.scalacheck.Arbitrary.{arbitrary, arbThrowable}
 import org.scalacheck.Gen.oneOf
-import org.scalacheck.util.StdRand
 import org.scalacheck.Prop._
-import org.scalacheck.Test.{Params, check}
-import org.scalacheck.ConsoleReporter.testStatsEx
+import org.scalacheck.Test.check
 import Function.tupled
 
 object Test extends Properties("Either") {
-  implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = 
+  implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
     Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
 
-  val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => error("fail")) == n)
+  val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => sys.error("fail")) == n)
 
-  val prop_either2 = forAll((n: Int) => Right(n).fold(a => error("fail"), x => x) == n)
+  val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n)
 
   val prop_swap = forAll((e: Either[Int, Int]) => e match {
     case Left(a) => e.swap.right.get == a
     case Right(b) => e.swap.left.get == b
   })
-  
+
   val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight)
 
   object CheckLeftProjection {
@@ -35,7 +33,7 @@ object Test extends Properties("Either") {
 
     val prop_exists = forAll((e: Either[Int, Int]) =>
       e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0))
-  
+
     val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
       def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
       Left(n).left.flatMap(f(_)) == f(n)})
@@ -115,7 +113,7 @@ object Test extends Properties("Either") {
   }
 
   val prop_Either_left = forAll((n: Int) => Left(n).left.get == n)
-  
+
   val prop_Either_right = forAll((n: Int) => Right(n).right.get == n)
 
   val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match {
@@ -128,12 +126,12 @@ object Test extends Properties("Either") {
     case Right(ee) => e.joinRight == ee
   })
 
-  val prop_Either_reduce = forAll((e: Either[Int, Int]) => 
+  val prop_Either_reduce = forAll((e: Either[Int, Int]) =>
     e.merge == (e match {
       case Left(a) => a
       case Right(a) => a
     }))
-    
+
   /** Hard to believe I'm "fixing" a test to reflect B before A ... */
   val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) =>
     Either.cond(c, a, b) == (if(c) Right(a) else Left(b)))
@@ -168,20 +166,14 @@ object Test extends Properties("Either") {
       ("Right.prop_seq", CheckRightProjection.prop_seq),
       ("Right.prop_option", CheckRightProjection.prop_option),
       ("prop_Either_left", prop_Either_left),
-      ("prop_Either_right", prop_Either_right),      
+      ("prop_Either_right", prop_Either_right),
       ("prop_Either_joinLeft", prop_Either_joinLeft),
-      ("prop_Either_joinRight", prop_Either_joinRight),      
-      ("prop_Either_reduce", prop_Either_reduce),      
+      ("prop_Either_joinRight", prop_Either_joinRight),
+      ("prop_Either_reduce", prop_Either_reduce),
       ("prop_Either_cond", prop_Either_cond)
     )
-  
+
   for ((label, prop) <- tests) {
     property(label) = prop
   }
-  
-  import org.scalacheck.{ Test => STest }
-  
-  def runTests() = {
-    STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this)
-  }
 }
diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala
index 736bf93..714f1c3 100644
--- a/test/files/scalacheck/Ctrie.scala
+++ b/test/files/scalacheck/Ctrie.scala
@@ -17,21 +17,21 @@ case class Wrap(i: Int) {
 /** A check mainly oriented towards checking snapshot correctness.
  */
 object Test extends Properties("concurrent.TrieMap") {
-  
+
   /* generators */
-  
+
   val sizes = choose(0, 200000)
-  
+
   val threadCounts = choose(2, 16)
-  
+
   val threadCountsAndSizes = for {
     p <- threadCounts
     sz <- sizes
   } yield (p, sz);
-  
-  
+
+
   /* helpers */
-  
+
   def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
     val threads = for (idx <- 0 until totalThreads) yield new Thread {
       setName("ParThread-" + idx)
@@ -44,11 +44,11 @@ object Test extends Properties("concurrent.TrieMap") {
         res
       }
     }
-    
+
     threads foreach (_.start())
     threads map (_.result)
   }
-  
+
   def spawn[T](body: =>T): { def get: T } = {
     val t = new Thread {
       setName("SpawnThread")
@@ -66,7 +66,7 @@ object Test extends Properties("concurrent.TrieMap") {
       }
     }
   }
-  
+
   def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = {
     val sz = totalElems
     val idx = threadIdx
@@ -76,7 +76,7 @@ object Test extends Properties("concurrent.TrieMap") {
     val end = start + elems
     (start until end)
   }
-  
+
   def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = {
     (last.size <= current.size) && {
       last forall {
@@ -84,7 +84,7 @@ object Test extends Properties("concurrent.TrieMap") {
       }
     }
   }
-  
+
   object err {
     var buffer = new StringBuilder
     def println(a: AnyRef) = buffer.append(a.toString).append("\n")
@@ -94,16 +94,16 @@ object Test extends Properties("concurrent.TrieMap") {
       clear()
     }
   }
-  
-  
+
+
   /* properties */
-  
+
   property("concurrent growing snapshots") = forAll(threadCounts, sizes) {
     (numThreads, numElems) =>
     val p = 3 //numThreads
     val sz = 102 //numElems
     val ct = new TrieMap[Wrap, Int]
-    
+
     // checker
     val checker = spawn {
       def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = {
@@ -115,23 +115,23 @@ object Test extends Properties("concurrent.TrieMap") {
       }
       check(ct.readOnlySnapshot(), 500)
     }
-    
+
     // fillers
     inParallel(p) {
       idx =>
       elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i))
     }
-    
+
     // wait for checker to finish
     val growing = true//checker.get
-    
+
     val ok = growing && ((0 until sz) forall {
       case i => ct.get(Wrap(i)) == Some(i)
     })
-    
+
     ok
   }
-  
+
   property("update") = forAll(sizes) {
     (n: Int) =>
     val ct = new TrieMap[Int, Int]
@@ -140,52 +140,52 @@ object Test extends Properties("concurrent.TrieMap") {
       case i => ct(i) == i
     }
   }
-  
+
   property("concurrent update") = forAll(threadCountsAndSizes) {
     case (p, sz) =>
       val ct = new TrieMap[Wrap, Int]
-      
+
       inParallel(p) {
         idx =>
         for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i
       }
-      
+
       (0 until sz) forall {
         case i => ct(Wrap(i)) == i
       }
   }
-  
-  
+
+
   property("concurrent remove") = forAll(threadCounts, sizes) {
     (p, sz) =>
     val ct = new TrieMap[Wrap, Int]
     for (i <- 0 until sz) ct(Wrap(i)) = i
-    
+
     inParallel(p) {
       idx =>
       for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i))
     }
-    
+
     (0 until sz) forall {
       case i => ct.get(Wrap(i)) == None
     }
   }
-  
-  
+
+
   property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
     (p, sz) =>
     val ct = new TrieMap[Wrap, Int]
-    
+
     val results = inParallel(p) {
       idx =>
       elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None)
     }
-    
+
     (results forall (_ == None)) && ((0 until sz) forall {
       case i => ct.get(Wrap(i)) == Some(i)
     })
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/HashTrieSplit.scala b/test/files/scalacheck/HashTrieSplit.scala
deleted file mode 100644
index 908c878..0000000
--- a/test/files/scalacheck/HashTrieSplit.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-
-
-
-import collection._
-
-
-
-
-// checks whether hash tries split their iterators correctly
-// even after some elements have been traversed
-object Test {
-  def main(args: Array[String]) {
-    doesSplitOk
-  }
-  
-  def doesSplitOk = {
-    val sz = 2000
-    var ht = new parallel.immutable.ParHashMap[Int, Int]
-    // println("creating trie")
-    for (i <- 0 until sz) ht += ((i + sz, i))
-    // println("created trie")
-    for (n <- 0 until (sz - 1)) {
-      // println("---------> n = " + n)
-      val pit = ht.splitter
-      val pit2 = ht.splitter
-      var i = 0
-      while (i < n) {
-        pit.next
-        pit2.next
-        i += 1
-      }
-      // println("splitting")
-      val pits = pit.split
-      val fst = pits(0).toSet
-      val snd = pits(1).toSet
-      val orig = pit2.toSet
-      if (orig.size != (fst.size + snd.size) || orig != (fst ++ snd)) {
-        println("Original: " + orig)
-        println("First: " + fst)
-        println("Second: " + snd)
-        assert(false)
-      }
-    }
-  }
-}
diff --git a/test/files/scalacheck/ReflectionExtractors.scala b/test/files/scalacheck/ReflectionExtractors.scala
new file mode 100644
index 0000000..a2615fe
--- /dev/null
+++ b/test/files/scalacheck/ReflectionExtractors.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object Test extends Properties("reflection extractors") {
+
+  val genFlag = oneOf(
+    TRAIT, INTERFACE, MUTABLE, MACRO, DEFERRED, ABSTRACT, FINAL, SEALED,
+    IMPLICIT, LAZY, OVERRIDE, PRIVATE, PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+    BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT, DEFAULTPARAM, PRESUPER,
+    DEFAULTINIT
+  )
+  val genModifiers =
+    for(flag <- genFlag; privateWithin <- genName)
+      yield Modifiers(flag, privateWithin, Nil)
+  val genTermName = for(name <- arbitrary[String]) yield TermName(name)
+  val genTypeName = for(name <- arbitrary[String]) yield TypeName(name)
+  val genName = oneOf(genTermName, genTypeName)
+
+  implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+  implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+  implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+  implicit val arbMods: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+
+  property("extract term name") = forAll { (name: TermName) =>
+    val TermName(s) = name
+    s == name.toString
+  }
+
+  property("extract type name") = forAll { (name: TypeName) =>
+    val TypeName(s) = name
+    s == name.toString
+  }
+
+  property("extract term or type name") = forAll { (name: Name) =>
+    name match {
+      case TermName(s) => s == name.toString
+      case TypeName(s) => s == name.toString
+    }
+  }
+
+  property("extract modifiers") = forAll { (mods: Modifiers) =>
+    val Modifiers(flags, priv, annots) = mods
+    flags == mods.flags &&
+    priv == mods.privateWithin &&
+    annots == mods.annotations
+  }
+}
\ No newline at end of file
diff --git a/test/files/scalacheck/Unrolled.scala b/test/files/scalacheck/Unrolled.scala
index 8067a44..34604b8 100644
--- a/test/files/scalacheck/Unrolled.scala
+++ b/test/files/scalacheck/Unrolled.scala
@@ -5,7 +5,7 @@ import Gen._
 import collection.mutable.UnrolledBuffer
 
 object Test extends Properties("UnrolledBuffer") {
-  
+
   property("concat size") = forAll { (l1: List[Int], l2: List[Int]) =>
     val u1 = new UnrolledBuffer[Int]
     u1 ++= l1
@@ -15,12 +15,12 @@ object Test extends Properties("UnrolledBuffer") {
     u1 concat u2
     totalsz == u1.size
   }
-  
+
   property("adding") = forAll { (l: List[Int]) =>
     val u = new UnrolledBuffer[Int]
     u ++= l
     u == l
   }
-  
+
 }
 
diff --git a/test/files/scalacheck/array-new.scala b/test/files/scalacheck/array-new.scala
index e13a47a..d8c69ea 100644
--- a/test/files/scalacheck/array-new.scala
+++ b/test/files/scalacheck/array-new.scala
@@ -1,4 +1,4 @@
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag // new style: use ClassTag
 import org.scalacheck._
 import Prop._
 import Gen._
diff --git a/test/files/scalacheck/array-old.scala b/test/files/scalacheck/array-old.scala
index f262bc6..03c0217 100644
--- a/test/files/scalacheck/array-old.scala
+++ b/test/files/scalacheck/array-old.scala
@@ -11,7 +11,7 @@ object Test extends Properties("Array") {
    */
   implicit def arbArray[T](implicit a: Arbitrary[T], m: Manifest[T]): Arbitrary[Array[T]] =
     Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
-  
+
   val arrGen: Gen[Array[_]] = oneOf(
     arbitrary[Array[Int]],
     arbitrary[Array[Array[Int]]],
@@ -20,7 +20,7 @@ object Test extends Properties("Array") {
     arbitrary[Array[Boolean]],
     arbitrary[Array[AnyVal]]
   )
-  
+
   // inspired by #1857 and #2352
   property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
     (c1 eq c2) || (c1 ne c2)
@@ -32,6 +32,6 @@ object Test extends Properties("Array") {
     val arr = Array.ofDim[String](i1, i2, i3)
     val flattened = arr flatMap (x => x) flatMap (x => x)
     flattened.length == i1 * i2 * i3
-  }                                                                             
+  }
 }
 
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
index af79ad4..4cfacaf 100644
--- a/test/files/scalacheck/avl.scala
+++ b/test/files/scalacheck/avl.scala
@@ -2,14 +2,12 @@ import org.scalacheck.Gen
 import org.scalacheck.Prop.forAll
 import org.scalacheck.Properties
 
-import util.logging.ConsoleLogger
-
 package scala.collection.mutable {
 
   /**
    * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1]
    */
-  abstract class AVLTreeTest(name: String) extends Properties(name) with ConsoleLogger {
+  abstract class AVLTreeTest(name: String) extends Properties(name) {
 
     def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2)
 
@@ -54,7 +52,7 @@ package scala.collection.mutable {
     } yield {
       // selected mustn't be in elements already
       val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2))
-      (selected*2+1, list) 
+      (selected*2+1, list)
     }
 
     def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
@@ -65,7 +63,7 @@ package scala.collection.mutable {
     } yield {
       // selected must be in elements already
       val list = makeAllBalancedTree(e)
-      (e(selected), list) 
+      (e(selected), list)
     }
   }
 
@@ -78,7 +76,7 @@ package scala.collection.mutable {
     }
 
     def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) {
-      case (selected: Int, trees: List[AVLTree[Int]]) => 
+      case (selected: Int, trees: List[AVLTree[Int]]) =>
       trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b)
     }
 
diff --git a/test/files/scalacheck/list.scala b/test/files/scalacheck/list.scala
index 4e1cf1f..5f6de95 100644
--- a/test/files/scalacheck/list.scala
+++ b/test/files/scalacheck/list.scala
@@ -12,7 +12,7 @@ object Test extends Properties("List") {
   property("startsWith/take") = forAll { (xs: List[Int], count: Int) => xs startsWith (xs take count) }
   property("endsWith/takeRight") = forAll { (xs: List[Int], count: Int) => xs endsWith (xs takeRight count) }
   property("fill") = forAll(choose(1, 100)) { count =>
-    forAll { (x: Int) => 
+    forAll { (x: Int) =>
       val xs = List.fill(count)(x)
       (xs.length == count) && (xs.distinct == List(x))
     }
diff --git a/test/files/scalacheck/parallel-collections/IntOperators.scala b/test/files/scalacheck/parallel-collections/IntOperators.scala
index 5c99796..4a74b91 100644
--- a/test/files/scalacheck/parallel-collections/IntOperators.scala
+++ b/test/files/scalacheck/parallel-collections/IntOperators.scala
@@ -106,7 +106,7 @@ trait IntSeqOperators extends IntOperators with SeqOperators[Int] {
     List(4, 5, 6, 7, 8, 9, 10),
     List(4, 5, 6, 7, 8, 9, 0),
     List(-4, -3, -2, -1)
-  )    
+  )
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/PairOperators.scala b/test/files/scalacheck/parallel-collections/PairOperators.scala
index 4711e21..fe85111 100644
--- a/test/files/scalacheck/parallel-collections/PairOperators.scala
+++ b/test/files/scalacheck/parallel-collections/PairOperators.scala
@@ -7,76 +7,76 @@ import scala.collection.parallel._
 trait PairOperators[K, V] extends Operators[(K, V)] {
   def koperators: Operators[K]
   def voperators: Operators[V]
-  
+
   private def zipPredicates(kps: List[K => Boolean], vps: List[V => Boolean]): List[((K, V)) => Boolean] = for {
     (kp, vp) <- koperators.countPredicates zip voperators.countPredicates
   } yield new Function1[(K, V), Boolean] {
     def apply(kv: (K, V)) = kp(kv._1) && vp(kv._2)
   }
-  
+
   /* operators */
-  
+
   def reduceOperators = for {
     (kop, vop) <- koperators.reduceOperators zip voperators.reduceOperators
   } yield new Function2[(K, V), (K, V), (K, V)] {
     def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
   }
-  
+
   def countPredicates = zipPredicates(koperators.countPredicates, voperators.countPredicates)
-  
+
   def forallPredicates = zipPredicates(koperators.forallPredicates, voperators.forallPredicates)
-  
+
   def existsPredicates = zipPredicates(koperators.existsPredicates, voperators.existsPredicates)
-  
+
   def findPredicates = zipPredicates(koperators.findPredicates, voperators.findPredicates)
-  
+
   def mapFunctions = for {
     (km, vm) <- koperators.mapFunctions zip voperators.mapFunctions
   } yield new Function1[(K, V), (K, V)] {
     def apply(kv: (K, V)) = (km(kv._1), vm(kv._2))
   }
-  
+
   def partialMapFunctions = for {
     (kpm, vpm) <- koperators.partialMapFunctions zip voperators.partialMapFunctions
   } yield new PartialFunction[(K, V), (K, V)] {
     def isDefinedAt(kv: (K, V)) = kpm.isDefinedAt(kv._1) && vpm.isDefinedAt(kv._2)
     def apply(kv: (K, V)) = (kpm(kv._1), vpm(kv._2))
   }
-  
+
   def flatMapFunctions = for {
     (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions
   } yield new Function1[(K, V), Traversable[(K, V)]] {
     def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable
   }
-  
+
   def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates)
-  
+
   def filterNotPredicates = filterPredicates
-  
+
   def partitionPredicates = filterPredicates
-  
+
   def takeWhilePredicates = zipPredicates(koperators.takeWhilePredicates, voperators.takeWhilePredicates)
-  
+
   def dropWhilePredicates = takeWhilePredicates
-  
+
   def spanPredicates = takeWhilePredicates
-  
+
   def foldArguments = for {
     ((kinit, kop), (vinit, vop)) <- koperators.foldArguments zip voperators.foldArguments
   } yield ((kinit, vinit), new Function2[(K, V), (K, V), (K, V)] {
     def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
   })
-  
+
   def addAllTraversables = for {
     (kt, vt) <- koperators.addAllTraversables zip voperators.addAllTraversables
   } yield kt.toIterable zip vt.toIterable
-  
+
   def newArray(sz: Int) = new Array[(K, V)](sz)
-  
+
   def groupByFunctions = (koperators.groupByFunctions zip voperators.groupByFunctions) map {
     opt => { (p: (K, V)) => (opt._1(p._1), opt._2(p._2)) }
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/PairValues.scala b/test/files/scalacheck/parallel-collections/PairValues.scala
index 5007c45..864dad2 100644
--- a/test/files/scalacheck/parallel-collections/PairValues.scala
+++ b/test/files/scalacheck/parallel-collections/PairValues.scala
@@ -17,7 +17,7 @@ import org.scalacheck.Arbitrary._
 trait PairValues[K, V] {
   def kvalues: Seq[Gen[K]]
   def vvalues: Seq[Gen[V]]
-  
+
   def values = for {
     kg <- kvalues
     vg <- vvalues
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
index a01c8c7..691a3e9 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
@@ -17,22 +17,25 @@ import scala.collection.parallel.ops._
 abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParArray[T]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = true
 
+  def tasksupport: TaskSupport
+
   def ofSize(vals: Seq[Gen[T]], sz: Int) = {
     val a = new mutable.ArrayBuffer[T](sz)
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) a += sample(gen)
     a
   }
-  
+
   def fromSeq(a: Seq[T]) = {
     val pa = new ParArray[T](a.size)
+    pa.tasksupport = tasksupport
     var i = 0
     for (elem <- a.toList) {
       pa(i) = elem
@@ -40,20 +43,20 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
     }
     pa
   }
-  
+
   property("array mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((f, ind) <- mapFunctions.zipWithIndex)
       yield ("op index: " + ind) |: t.map(f) == coll.map(f)
     results.reduceLeft(_ && _)
   }
-  
+
 }
 
 
-object IntParallelArrayCheck extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelArrayCheck(val tasksupport: TaskSupport) extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
   override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
     (0 until sz).toArray.toSeq
-  }, sized { sz => 
+  }, sized { sz =>
     (-sz until 0).toArray.toSeq
   })
 }
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala b/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala
index 680f6e1..db2b1ea 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala
@@ -15,7 +15,7 @@
 
 //     /**
 //      * this currently passes, but do we want it to?
-//      * does it have meaning to have an empty parallel array? 
+//      * does it have meaning to have an empty parallel array?
 //      */
 //     new ParallelArray(0)
 //     ()
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
index d2a8fa7..9805e26 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
@@ -24,18 +24,18 @@
 // extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + ", ParallelArray[" + tp + "]]") {
 //   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
 //   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
 //   type CollType = ParallelSeqView[T, ParallelArray[T], ArraySeq[T]]
-  
+
 //   def isCheckingViews = true
-  
+
 //   def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
 //     val a = new ArrayBuffer[T](sz)
 //     val gen = vals(rnd.nextInt(vals.size))
 //     for (i <- 0 until sz) a += sample(gen)
 //     a
 //   }
-  
+
 //   def fromSeq(a: Seq[T]) = {
 //     val pa = new ParallelArray[T](a.size)
 //     var i = 0
@@ -45,13 +45,13 @@
 //     }
 //     pa.view
 //   }
-  
+
 //   property("forces must be equal") = forAll(collectionPairs) { case (s, coll) =>
 //     val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2)
 //     val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force
 //     smodif == cmodif
 //   }
-  
+
 // }
 
 
@@ -68,18 +68,18 @@
 // extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + "], ParallelArray[" + tp + "].++.patch.reverse.take.reverse") {
 //   ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
 //   ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
 //   type CollType = collection.parallel.ParallelSeq[T]
-  
+
 //   def isCheckingViews = true
-  
+
 //   def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
 //     val a = new ArrayBuffer[T](sz)
 //     val gen = vals(rnd.nextInt(vals.size))
 //     for (i <- 0 until sz) a += sample(gen)
 //     a
 //   }
-  
+
 //   def fromSeq(a: Seq[T]) = {
 //     val pa = new ParallelArray[T](a.size)
 //     var i = 0
@@ -91,7 +91,7 @@
 //     val original = modified.take(modified.length / 2).reverse
 //     original
 //   }
-  
+
 // }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
index e141c39..cf15afb 100644
--- a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -18,22 +18,25 @@ import scala.collection.parallel.ops._
 abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParTrieMap[K, V]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = false
 
-  def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {  
+  def tasksupport: TaskSupport
+
+  def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
     val ct = new concurrent.TrieMap[K, V]
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) ct += sample(gen)
     ct
   }
-  
+
   def fromTraversable(t: Traversable[(K, V)]) = {
     val pct = new ParTrieMap[K, V]
+    pct.tasksupport = tasksupport
     var i = 0
     for (kv <- t.toList) {
       pct += kv
@@ -41,33 +44,33 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
     }
     pct
   }
-  
+
 }
 
 
-object IntIntParallelConcurrentTrieMapCheck extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
+class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport) extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
 with PairOperators[Int, Int]
 with PairValues[Int, Int]
 {
   def intvalues = new IntValues {}
   def kvalues = intvalues.values
   def vvalues = intvalues.values
-  
+
   val intoperators = new IntOperators {}
   def voperators = intoperators
   def koperators = intoperators
-  
+
   override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
     case pm: ParTrieMap[k, v] =>
       println("Mutable parallel ctrie")
     case _ =>
       println("could not match data structure type: " + ds.getClass)
   }
-  
+
   override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
     // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
     //   val invs = pm.brokenInvariants
-      
+
     //   val containsall = (for ((k, v) <- orig) yield {
     //     if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
     //     else {
@@ -75,8 +78,8 @@ with PairValues[Int, Int]
     //       false
     //     }
     //   }).foldLeft(true)(_ && _)
-      
-      
+
+
     //   if (invs.isEmpty) containsall
     //   else {
     //     println("Invariants broken:\n" + invs.mkString("\n"))
@@ -84,7 +87,7 @@ with PairValues[Int, Int]
     //   }
     case _ => true
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
index 0152b1b..34b3f33 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
@@ -17,22 +17,25 @@ import scala.collection.parallel.ops._
 abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParHashMap[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParHashMap[K, V]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = false
 
-  def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {  
+  def tasksupport: TaskSupport
+
+  def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
     val hm = new mutable.HashMap[K, V]
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) hm += sample(gen)
     hm
   }
-  
+
   def fromTraversable(t: Traversable[(K, V)]) = {
     val phm = new ParHashMap[K, V]
+    phm.tasksupport = tasksupport
     var i = 0
     for (kv <- t.toList) {
       phm += kv
@@ -40,33 +43,33 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
     }
     phm
   }
-  
+
 }
 
 
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
 with PairOperators[Int, Int]
 with PairValues[Int, Int]
 {
   def intvalues = new IntValues {}
   def kvalues = intvalues.values
   def vvalues = intvalues.values
-  
+
   val intoperators = new IntOperators {}
   def voperators = intoperators
   def koperators = intoperators
-  
+
   override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
     case pm: ParHashMap[k, v] =>
       println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation)
     case _ =>
       println("could not match data structure type: " + ds.getClass)
   }
-  
+
   override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
     // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
     //   val invs = pm.brokenInvariants
-      
+
     //   val containsall = (for ((k, v) <- orig) yield {
     //     if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
     //     else {
@@ -74,8 +77,8 @@ with PairValues[Int, Int]
     //       false
     //     }
     //   }).foldLeft(true)(_ && _)
-      
-      
+
+
     //   if (invs.isEmpty) containsall
     //   else {
     //     println("Invariants broken:\n" + invs.mkString("\n"))
@@ -83,7 +86,7 @@ with PairValues[Int, Int]
     //   }
     case _ => true
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
index a0a6d1a..91de247 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
@@ -17,34 +17,37 @@ import scala.collection.parallel.ops._
 abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParHashSet[T]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = false
 
-  def ofSize(vals: Seq[Gen[T]], sz: Int) = {  
+  def tasksupport: TaskSupport
+
+  def ofSize(vals: Seq[Gen[T]], sz: Int) = {
     val hm = new mutable.HashSet[T]
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) hm += sample(gen)
     hm
   }
-  
+
   def fromTraversable(t: Traversable[T]) = {
-    val phm = new ParHashSet[T]
+    val phs = new ParHashSet[T]
+    phs.tasksupport = tasksupport
     var i = 0
     for (kv <- t.toList) {
-      phm += kv
+      phs += kv
       i += 1
     }
-    phm
+    phs
   }
-  
+
 }
 
 
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
 with IntOperators
 with IntValues
 {
@@ -54,12 +57,12 @@ with IntValues
     case _ =>
       println("could not match data structure type: " + ds.getClass)
   }
-  
+
   override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match {
     // case pm: ParHashSet[t] if 1 == 0 =>
     //   // for an example of how not to write code proceed below
     //   val invs = pm.brokenInvariants
-      
+
     //   val containsall = (for (elem <- orig) yield {
     //     if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true
     //     else {
@@ -69,8 +72,8 @@ with IntValues
     //       false
     //     }
     //   }).foldLeft(true)(_ && _)
-      
-      
+
+
     //   if (invs.isEmpty) {
     //     if (!containsall) println(pm.debugInformation)
     //     containsall
@@ -80,7 +83,7 @@ with IntValues
     //   }
     case _ => true
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
index 3a2893f..9e29be5 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
@@ -17,22 +17,25 @@ import scala.collection.parallel.ops._
 abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParHashMap[K, V]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = false
-  
+
+  def tasksupport: TaskSupport
+
   def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
     var hm = new immutable.HashMap[K, V]
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) hm += sample(gen)
     hm
   }
-  
+
   def fromTraversable(t: Traversable[(K, V)]) = {
     var phm = new ParHashMap[K, V]
+    phm.tasksupport = tasksupport
     var i = 0
     for (kv <- t.toList) {
       phm += kv
@@ -40,22 +43,22 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
     }
     phm
   }
-  
+
 }
 
 
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
 with PairOperators[Int, Int]
 with PairValues[Int, Int]
 {
   def intvalues = new IntValues {}
   def kvalues = intvalues.values
   def vvalues = intvalues.values
-  
+
   val intoperators = new IntOperators {}
   def voperators = intoperators
   def koperators = intoperators
-  
+
   override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
     case pm: ParHashMap[k, v] =>
       pm.printDebugInfo
@@ -69,48 +72,51 @@ with PairValues[Int, Int]
 abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParHashSet[T]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = false
-  
+
+  def tasksupport: TaskSupport
+
   def ofSize(vals: Seq[Gen[T]], sz: Int) = {
     var hm = new immutable.HashSet[T]
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) hm += sample(gen)
     hm
   }
-  
+
   def fromTraversable(t: Traversable[T]) = {
-    var phm = new ParHashSet[T]
+    var phs = new ParHashSet[T]
+    phs.tasksupport = tasksupport
     var i = 0
     for (kv <- t.toList) {
-      phm += kv
+      phs += kv
       i += 1
     }
-    phm
+    phs
   }
-  
+
   override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
     case pm: ParHashSet[t] =>
       println("Parallel hash set")
     case _ =>
       println("could not match data structure type: " + ds.getClass)
   }
-  
+
 }
 
 
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
 with IntOperators
 with IntValues
 {
   def intvalues = new IntValues {}
   def kvalues = intvalues.values
   def vvalues = intvalues.values
-  
+
   override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
     case pm: ParHashMap[k, v] =>
       pm.printDebugInfo
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index e3f8778..774d6f4 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -16,14 +16,14 @@ import scala.collection.parallel._
 
 abstract class ParallelIterableCheck[T](collName: String) extends Properties(collName) with Operators[T] {
   type CollType <: ParIterable[T]
-  
+
   def values: Seq[Gen[T]]
   def ofSize(vals: Seq[Gen[T]], sz: Int): Iterable[T]
   def fromTraversable(t: Traversable[T]): CollType
   def isCheckingViews: Boolean
   def hasStrictOrder: Boolean
-  
-  
+
+
   def instances(vals: Seq[Gen[T]]): Gen[Iterable[T]] = oneOf(
     sized(
       sz =>
@@ -31,46 +31,46 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     ),
     for (sz <- choose(1000, 2000)) yield ofSize(vals, sz),
     for (sz <- choose(4000, 4001)) yield ofSize(vals, sz),
-    for (sz <- choose(10000, 10001)) yield ofSize(vals, sz)    
+    for (sz <- choose(10000, 10001)) yield ofSize(vals, sz)
   )
-  
+
   // used to check if constructed collection is valid
   def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = {
     // can be overriden in subclasses
     true
   }
-  
+
   def printDataStructureDebugInfo(cf: AnyRef) {
     // can be overridden in subclasses
   }
-  
+
   val rnd = new scala.util.Random
-  
+
   def sample(gen: Gen[T]): T = {
     var s = gen.sample
     while (s == None) s = gen.sample
     s.get
   }
-  
+
   def sampleValue: T = sample(values(rnd.nextInt(values.length)))
-  
+
   def collectionPairs = for (inst <- instances(values)) yield (inst, fromTraversable(inst))
-  
+
   def collectionPairsWithLengths = for (inst <- instances(values); s <- choose(0, inst.size))
     yield (inst, fromTraversable(inst), s)
-    
+
   def collectionPairsWith2Indices = for (
       inst <- instances(values);
       f <- choose(0, inst.size);
       s <- choose(0, inst.size))
     yield (inst, fromTraversable(inst), f, s)
-    
-  def collectionTriplets = for (inst <- instances(values); 
+
+  def collectionTriplets = for (inst <- instances(values);
       updStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
     val modif = inst.toSeq.patch(updStart, inst.toSeq, howMany)
     (inst, fromTraversable(inst), modif)
   }
-  
+
   def areEqual(t1: GenTraversable[T], t2: GenTraversable[T]) = if (hasStrictOrder) {
     t1 == t2 && t2 == t1
   } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
@@ -81,14 +81,14 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       i1s == i2s && i2s == i1s
     case _ => t1 == t2 && t2 == t1
   }
-  
+
   def printDebugInfo(coll: ParIterableLike[_, _, _]) {
     println("Collection debug info: ")
     coll.printDebugBuffer
     println("Task debug info: ")
     println(coll.tasksupport.debugMessages.mkString("\n"))
   }
-  
+
   def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
     printDebugInfo(coll)
     println("Operator: " + ind)
@@ -108,7 +108,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     println("tf == cf - " + (tf == cf))
     println("cf == tf - " + (cf == tf))
   }
-  
+
   property("reductions must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
     if (t.size != 0) {
       val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield {
@@ -126,7 +126,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       results.reduceLeft(_ && _)
     } else "has size 0" |: true
   }
-  
+
   property("counts must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
       val tc = t.count(pred)
@@ -142,19 +142,19 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     results.reduceLeft(_ && _)
   }
-  
+
   property("forall must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
       yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
     results.reduceLeft(_ && _)
   }
-  
+
   property("exists must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
       yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
     results.reduceLeft(_ && _)
   }
-  
+
   property("both must find or not find an element") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
       val ft = t.find(pred)
@@ -163,7 +163,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     results.reduceLeft(_ && _)
   }
-  
+
   property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
       val ms = t.map(f)
@@ -184,7 +184,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     results.reduceLeft(_ && _)
   }
-  
+
   property("collects must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
       val ps = t.collect(f)
@@ -200,12 +200,12 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     results.reduceLeft(_ && _)
   }
-  
+
   property("flatMaps must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((f, ind) <- flatMapFunctions.zipWithIndex)
       yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
   }
-  
+
   property("filters must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((p, ind) <- filterPredicates.zipWithIndex) yield {
       val tf = t.filter(p)
@@ -234,7 +234,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("op index: " + ind) |: tf == cf && cf == tf && invs
     }).reduceLeft(_ && _)
   }
-  
+
   property("filterNots must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((p, ind) <- filterNotPredicates.zipWithIndex) yield {
       val tf = t.filterNot(p)
@@ -243,7 +243,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("op index: " + ind) |: tf == cf && cf == tf
     }).reduceLeft(_ && _)
   }
-  
+
   if (!isCheckingViews) property("partitions must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
       val tpart = t.partition(p)
@@ -257,15 +257,15 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("op index: " + ind) |: tpart == cpart
     }).reduceLeft(_ && _)
   }
-  
+
   if (hasStrictOrder) property("takes must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
     ("take " + n + " elements") |: t.take(n) == coll.take(n)
   }
-  
+
   if (hasStrictOrder) property("drops must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
     ("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
   }
-  
+
   if (hasStrictOrder) property("slices must be equal") = forAll(collectionPairsWith2Indices)
   { case (t, coll, fr, slicelength) =>
     val from = if (fr < 0) 0 else fr
@@ -289,7 +289,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     ("slice from " + from + " until " + until) |: tsl == collsl
   }
-  
+
   if (hasStrictOrder) property("splits must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
     val tspl = t.splitAt(n)
     val cspl = coll.splitAt(n)
@@ -302,7 +302,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     ("splitAt " + n) |: tspl == cspl
   }
-  
+
   if (hasStrictOrder) property("takeWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
       val tt = t.takeWhile(pred)
@@ -317,7 +317,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("operator " + ind) |: tt == ct
     }).reduceLeft(_ && _)
   }
-  
+
   if (hasStrictOrder) property("spans must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
       val tsp = t.span(pred)
@@ -335,13 +335,13 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("operator " + ind) |: tsp == csp
     }).reduceLeft(_ && _)
   }
-  
+
   if (hasStrictOrder) property("dropWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
     (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
       ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
     }).reduceLeft(_ && _)
   }
-  
+
   property("folds must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
     (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
       val tres = t.foldLeft(first)(op)
@@ -356,34 +356,39 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
       ("operator " + ind) |: tres == cres
     }).reduceLeft(_ && _)
   }
-  
+
   property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
-    val toadd = colltoadd
-    val tr = t ++ toadd.iterator
-    val cr = coll ++ toadd.iterator
-    if (!areEqual(tr, cr)) {
-      println("from: " + t)
-      println("and: " + coll.iterator.toList)
-      println("adding: " + toadd)
-      println(tr.toList)
-      println(cr.iterator.toList)
-    }
-    ("adding " |: areEqual(tr, cr)) &&
-    (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
-      val tadded = t ++ trav
-      val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
-      if (!areEqual(tadded, cadded)) {
-        println("----------------------")
+    try {
+      val toadd = colltoadd
+      val tr = t ++ toadd.iterator
+      val cr = coll ++ toadd.iterator
+      if (!areEqual(tr, cr)) {
         println("from: " + t)
-        println("and: " + coll)
-        println("adding: " + trav)
-        println(tadded)
-        println(cadded)
+        println("and: " + coll.iterator.toList)
+        println("adding: " + toadd)
+        println(tr.toList)
+        println(cr.iterator.toList)
       }
-      ("traversable " + ind) |: areEqual(tadded, cadded)
-    }).reduceLeft(_ && _)
+      ("adding " |: areEqual(tr, cr)) &&
+      (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+        val tadded = t ++ trav
+        val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+        if (!areEqual(tadded, cadded)) {
+          println("----------------------")
+          println("from: " + t)
+          println("and: " + coll)
+          println("adding: " + trav)
+          println(tadded)
+          println(cadded)
+        }
+        ("traversable " + ind) |: areEqual(tadded, cadded)
+      }).reduceLeft(_ && _)
+    } catch {
+      case e: java.lang.Exception =>
+        throw e
+    }
   }
-  
+
   if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
     val tarr = newArray(t.size)
     val collarr = newArray(coll.size)
@@ -397,7 +402,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
     }
     tarr.toSeq == collarr.toSeq
   }
-  
+
   if (hasStrictOrder) property("scans must be equal") = forAll(collectionPairs) {
     case (t, coll) =>
       (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
@@ -413,7 +418,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
         ("operator " + ind) |: tscan == cscan && cscan == tscan
       }).reduceLeft(_ && _)
   }
-  
+
   property("groupBy must be equal") = forAll(collectionPairs) {
     case (t, coll) =>
       (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
@@ -429,7 +434,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
         ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
       }).reduceLeft(_ && _)
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
index b6af8f4..d4643e7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -16,15 +16,15 @@ import scala.collection.parallel._
 
 abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) {
   type CollType <: ParMap[K, V]
-  
+
   property("gets iterated keys") = forAll(collectionPairs) {
     case (t, coll) =>
     val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
     val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
     ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
     ("Par contains elements of itself" |: containsSelf.forall(_ == true))
-  }  
-   
+  }
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
index 3c6a35d..f490d94 100644
--- a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
@@ -17,18 +17,18 @@ import scala.collection.parallel.ops._
 
 
 
-object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
+class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = collection.parallel.ParSeq[Int]
-  
+
   def hasStrictOrder = true
-  
+
   def isCheckingViews = false
-  
+
   def ofSize(vals: Seq[Gen[Int]], sz: Int) = unsupported
-  
+
   override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start =>
     sized { end =>
       sized { step =>
@@ -36,22 +36,26 @@ object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") wi
       }
     }
   }
-  
+
   def fromSeq(a: Seq[Int]) = a match {
-    case r: Range => ParRange(r.start, r.end, r.step, false)
+    case r: Range =>
+      val pr = ParRange(r.start, r.end, r.step, false)
+      pr.tasksupport = tasksupport
+      pr
     case _ =>
       val pa = new parallel.mutable.ParArray[Int](a.length)
+      pa.tasksupport = tasksupport
       for (i <- 0 until a.length) pa(i) = a(i)
       pa
   }
-  
+
   override def traversable2Seq(t: Traversable[Int]): Seq[Int] = t match {
     case r: Range => r
     case _ => t.toSeq
   }
-  
+
   def values = Seq(choose(-100, 100))
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala b/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala
index dd89741..3f8a8ad 100644
--- a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala
@@ -16,13 +16,13 @@ import scala.collection.parallel._
 
 
 abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableCheck[T](collName) with SeqOperators[T] {
-  
+
   type CollType <: collection.parallel.ParSeq[T]
-  
+
 
   def ofSize(vals: Seq[Gen[T]], sz: Int): Seq[T]
   def fromSeq(s: Seq[T]): CollType
-  
+
   override def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = oneOf(
     sized(
       sz =>
@@ -31,17 +31,17 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
     for (sz <- choose(1000, 2000)) yield ofSize(vals, sz)
   )
 
-  
+
   def fromTraversable(t: Traversable[T]) = fromSeq(traversable2Seq(t))
   def traversable2Seq(t: Traversable[T]): Seq[T] = {
     if (t.isInstanceOf[Iterable[_]]) t.asInstanceOf[Iterable[T]].iterator.toList else t.toList
   }
-  
+
   override def collectionPairs: Gen[(Seq[T], CollType)] = for (inst <- instances(values)) yield (inst, fromSeq(inst))
-  
+
   override def collectionPairsWithLengths: Gen[(Seq[T], CollType, Int)] =
     for (inst <- instances(values); s <- choose(0, inst.size)) yield (inst, fromSeq(inst), s);
-  
+
   def collectionPairsWithModifiedWithLengths: Gen[(Seq[T], CollType, ParSeq[T], Int)] =
     for (inst <- instances(values); s <- choose(0, inst.size);
     updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
@@ -49,31 +49,31 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
       (inst, parcoll, parcollmodif, s)
     }
-  
+
   def collectionPairsWithModified: Gen[(Seq[T], CollType, ParSeq[T])] =
     for (inst <- instances(values); updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
       val parcoll = fromSeq(inst)
       val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
       (inst, parcoll, parcollmodif)
     }
-  
+
   def collectionPairsWithSliced: Gen[(Seq[T], CollType, ParSeq[T])] =
     for (inst <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
       val parcoll = fromSeq(inst)
       val parcollsliced = fromSeq(inst.slice(sliceStart, sliceStart + howMany))
       (inst, parcoll, parcollsliced)
     }
-  
+
   def collectionTripletsWith2Indices: Gen[(Seq[T], CollType, Seq[T], Int, Int)] =
     for (inst <- instances(values); f <- choose(0, inst.size); s <- choose(0, inst.size - f);
       third <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
       (inst, fromSeq(inst), inst.slice(sliceStart, sliceStart + howMany), f, s)
     }
-  
+
   private def modifySlightly(coll: Seq[T], updateStart: Int, howMany: Int) = {
     coll.patch(updateStart, coll, howMany)
   }
-  
+
   property("segmentLengths must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
     (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
       val slen = s.segmentLength(pred, if (len < 0) 0 else len)
@@ -87,13 +87,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("operator " + ind) |: slen == clen
     }).reduceLeft(_ && _)
   }
-  
+
   property("prefixLengths must be equal") = forAll(collectionPairs) { case (s, coll) =>
     (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
       ("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred)
     }).reduceLeft(_ && _)
   }
-  
+
   property("indexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
     (for ((pred, ind) <- indexWherePredicates.zipWithIndex) yield {
       val sind = s.indexWhere(pred, len)
@@ -108,7 +108,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("operator " + ind) |: sind == cind
     }).reduceLeft(_ && _)
   }
-  
+
   property("lastIndexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
     (for ((pred, ind) <- lastIndexWherePredicates.zipWithIndex) yield {
       val end = if (len >= s.size) s.size - 1 else len
@@ -117,7 +117,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("operator " + ind) |: sind == cind
     }).reduceLeft(_ && _)
   }
-  
+
   property("reverses must be equal") = forAll(collectionPairs) { case (s, coll) =>
     (s.length == 0 && s.getClass == classOf[collection.immutable.Range]) ||
     {
@@ -132,13 +132,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       sr == cr
     }
   }
-  
+
   property("reverseMaps must be equal") = forAll(collectionPairs) { case (s, coll) =>
     (for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield {
       ("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f)
     }).reduceLeft(_ && _)
   }
-  
+
   property("sameElements must be equal") = forAll(collectionPairsWithModifiedWithLengths) {
   case (s, coll, collmodif, len) =>
     val pos = if (len < 0) 0 else len
@@ -170,7 +170,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("collection " + ind) |: sres == pres
     }).reduceLeft(_ && _)
   }
-  
+
   property("startsWiths must be equal") = forAll(collectionPairsWithModifiedWithLengths) {
   case (s, coll, collmodif, len) =>
     val pos = if (len < 0) 0 else len
@@ -194,7 +194,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("seq " + sq) |: ss == cs
     }).reduceLeft(_ && _)
   }
-  
+
   property("endsWiths must be equal") = forAll(collectionPairsWithModified) {
   case (s, coll, collmodif) =>
     ("ends with self" |: s.endsWith(s) == coll.endsWith(s)) &&
@@ -213,18 +213,18 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       ("seq " + sq) |: sew == cew
     }).reduceLeft(_ && _)
   }
-  
+
   property("unions must be equal") = forAll(collectionPairsWithModified) { case (s, coll, collmodif) =>
     ("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) &&
     ("empty" |: s.union(Nil) == coll.union(fromSeq(Nil)))
   }
-  
+
   // This is failing with my views patch: array index out of bounds in the array iterator.
   // Couldn't see why this and only this was impacted, could use a second pair of eyes.
-  // 
+  //
   // This was failing because some corner cases weren't added to the patch method in ParSeqLike.
   // Curiously, this wasn't detected before.
-  // 
+  //
   if (!isCheckingViews) property("patches must be equal") = forAll(collectionTripletsWith2Indices) {
     case (s, coll, pat, from, repl) =>
     ("with seq" |: s.patch(from, pat, repl) == coll.patch(from, pat, repl)) &&
@@ -232,7 +232,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
     ("with empty" |: s.patch(from, Nil, repl) == coll.patch(from, fromSeq(Nil), repl)) &&
     ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1)))
   }
-  
+
   if (!isCheckingViews) property("updates must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
     val pos = if (len >= s.length) s.length - 1 else len
     if (s.length > 0) {
@@ -247,15 +247,15 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
       "from first" |: (supd == cupd)
     } else "trivially" |: true
   }
-  
+
   property("prepends must be equal") = forAll(collectionPairs) { case (s, coll) =>
     s.length == 0 || s(0) +: s == coll(0) +: coll
   }
-  
+
   property("appends must be equal") = forAll(collectionPairs) { case (s, coll) =>
     s.length == 0 || s :+ s(0) == coll :+ coll(0)
   }
-  
+
   property("padTos must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
     val someValue = sampleValue
     val sdoub = s.padTo(len * 2, someValue)
@@ -269,14 +269,14 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
     ("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) &&
     ("bigger" |: sdoub == cdoub)
   }
-  
+
   property("corresponds must be equal") = forAll(collectionPairsWithModified) { case (s, coll, modified) =>
     val modifcut = modified.toSeq.slice(0, modified.length)
     ("self" |: s.corresponds(s)(_ == _) == coll.corresponds(coll)(_ == _)) &&
     ("modified" |: s.corresponds(modified.seq)(_ == _) == coll.corresponds(modified)(_ == _)) &&
     ("modified2" |: s.corresponds(modifcut)(_ == _) == coll.corresponds(modifcut)(_ == _))
   }
-  
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala
index 4211abb..56f7832 100644
--- a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala
@@ -16,15 +16,15 @@ import scala.collection.parallel._
 
 abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) {
   type CollType <: ParSet[T]
-  
+
   property("gets iterated keys") = forAll(collectionPairs) {
     case (t, coll) =>
     val containsT = for (elem <- t) yield (coll.contains(elem))
     val containsSelf = for (elem <- coll) yield (coll.contains(elem))
     ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
     ("Par contains elements of itself" |: containsSelf.forall(_ == true))
-  }  
-   
+  }
+
 }
 
 
diff --git a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
index e4bb588..bbebd51 100644
--- a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
@@ -17,37 +17,43 @@ import scala.collection.parallel.ops._
 import immutable.Vector
 import immutable.VectorBuilder
 
+import scala.collection.parallel.TaskSupport
+
 
 
 
 abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.ParallelSeqCheck[T]("ParVector[" + tp + "]") {
   // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
   // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
-  
+
   type CollType = ParVector[T]
-  
+
   def isCheckingViews = false
-  
+
   def hasStrictOrder = true
 
+  def tasksupport: TaskSupport
+
   def ofSize(vals: Seq[Gen[T]], sz: Int) = {
     val vb = new immutable.VectorBuilder[T]()
     val gen = vals(rnd.nextInt(vals.size))
     for (i <- 0 until sz) vb += sample(gen)
     vb.result
   }
-  
+
   def fromSeq(a: Seq[T]) = {
-    val pa = ParVector.newCombiner[T]
-    for (elem <- a.toList) pa += elem
-    pa.result
+    val pc = ParVector.newCombiner[T]
+    for (elem <- a.toList) pc += elem
+    val pv = pc.result
+    pv.tasksupport = tasksupport
+    pv
   }
-  
+
 }
 
 
 
-object IntParallelVectorCheck extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelVectorCheck(val tasksupport: TaskSupport) extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
   override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
     (0 until sz).toArray.toSeq
   }, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 0a91977..a3c1df4 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -1,42 +1,58 @@
-
-
-
+/*
+ * scalac: -deprecation
+ * scalacheck: -workers 1 -minSize 0 -maxSize 4000 -minSuccessfulTests 5
+ */
 
 import org.scalacheck._
-
 import scala.collection.parallel._
 
+// package here to be able access the package-private implementation and shutdown the pool
+package scala {
 
-class ParCollProperties extends Properties("Parallel collections") {
-  /*   Collections   */
-  
-  // parallel arrays
-  include(mutable.IntParallelArrayCheck)
-  
-  // parallel ranges
-  include(immutable.ParallelRangeCheck)
+  class ParCollProperties extends Properties("Parallel collections") {
   
-  // parallel immutable hash maps (tries)
-  include(immutable.IntIntParallelHashMapCheck)
+    def includeAllTestsWith(support: TaskSupport) {
+      // parallel arrays with default task support
+      include(new mutable.IntParallelArrayCheck(support))
+    
+      // parallel ranges
+      include(new immutable.ParallelRangeCheck(support))
+    
+      // parallel immutable hash maps (tries)
+      include(new immutable.IntIntParallelHashMapCheck(support))
+    
+      // parallel immutable hash sets (tries)
+      include(new immutable.IntParallelHashSetCheck(support))
+    
+      // parallel mutable hash maps (tables)
+      include(new mutable.IntIntParallelHashMapCheck(support))
+    
+      // parallel ctrie
+      include(new mutable.IntIntParallelConcurrentTrieMapCheck(support))
+    
+      // parallel mutable hash sets (tables)
+      include(new mutable.IntParallelHashSetCheck(support))
+    
+      // parallel vectors
+      include(new immutable.IntParallelVectorCheck(support))
+    }
   
-  // parallel immutable hash sets (tries)
-  include(immutable.IntParallelHashSetCheck)
+    includeAllTestsWith(defaultTaskSupport)
   
-  // parallel mutable hash maps (tables)
-  include(mutable.IntIntParallelHashMapCheck)
-  
-  // parallel ctrie
-  include(mutable.IntIntParallelConcurrentTrieMapCheck)
-  
-  // parallel mutable hash sets (tables)
-  include(mutable.IntParallelHashSetCheck)
+    val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+    val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+    includeAllTestsWith(ectasks)
+
+    // no post test hooks in scalacheck, so cannot do:
+    // ec.shutdown()
   
-  // parallel vectors
-  include(immutable.IntParallelVectorCheck)
+  }
+
 }
 
 
-object Test {
+object Test extends scala.ParCollProperties {
+  /*
   def main(args: Array[String]) {
     val pc = new ParCollProperties
     org.scalacheck.Test.checkProperties(
@@ -51,4 +67,5 @@ object Test {
       pc
     )
   }
+  */
 }
diff --git a/test/files/scalacheck/primitive-eqeq.scala b/test/files/scalacheck/primitive-eqeq.scala
index a783805..60fe63c 100644
--- a/test/files/scalacheck/primitive-eqeq.scala
+++ b/test/files/scalacheck/primitive-eqeq.scala
@@ -4,7 +4,7 @@ import Gen._
 
 object Test extends Properties("==") {
   def equalObjectsEqualHashcodes(x: Any, y: Any) = (x != y) || (x == y && x.## == y.##)
-  
+
   // ticket #2087
   property("short/char") = forAll { (x: Short) => {
       val ch: Char = x.toChar
@@ -14,15 +14,15 @@ object Test extends Properties("==") {
 
   property("symmetry") = forAll { (x: AnyVal, y: AnyVal) => (x == y) == (y == x) }
   property("transitivity") = forAll { (x: AnyVal, y: AnyVal, z: AnyVal) => x != y || y != z || x == z }
-  
-  property("##") = forAll { 
+
+  property("##") = forAll {
     (x: Short) => {
       val anyvals = List(x.toByte, x.toChar, x, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x))
       val shortAndLarger = anyvals drop 2
 
       val result = (
-        ((anyvals, anyvals).zipped forall equalObjectsEqualHashcodes) && 
-        ((shortAndLarger, shortAndLarger).zipped forall (_ == _)) && 
+        ((anyvals, anyvals).zipped forall equalObjectsEqualHashcodes) &&
+        ((shortAndLarger, shortAndLarger).zipped forall (_ == _)) &&
         ((shortAndLarger, shortAndLarger).zipped forall ((x, y) => (x: Any) == (y: Any)))
       )
       result
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
new file mode 100644
index 0000000..7bd3714
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
@@ -0,0 +1,295 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, internal._, Flag._
+
+trait ArbitraryTreesAndNames {
+  def smallList[T](size: Int, g: Gen[T]) = {
+    val n: Int = choose(0, size / 2 + 1).sample match {
+      case Some(i) => i
+      case None => 0
+    }
+    containerOfN[List, T](n, g)
+  }
+
+  def shortIdent(len: Int) =
+    for(name <- identifier)
+      yield if(name.length <= len) name
+            else name.substring(0, len - 1)
+
+  def genTermName = for(name <- shortIdent(8)) yield TermName(name)
+  def genTypeName = for(name <- shortIdent(8)) yield TypeName(name)
+  def genName = oneOf(genTermName, genTypeName)
+
+  def genFlagSet = oneOf(
+    TRAIT, INTERFACE, MUTABLE, MACRO,
+    DEFERRED, ABSTRACT, FINAL, SEALED,
+    IMPLICIT, LAZY, OVERRIDE, PRIVATE,
+    PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+    BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT,
+    DEFAULTPARAM, PRESUPER, DEFAULTINIT
+  )
+
+  def genModifiers = for(flagset <- genFlagSet) yield Modifiers(flagset)
+
+  def genConstant =
+    for(value <- oneOf(arbitrary[Byte], arbitrary[Short], arbitrary[Char],
+                       arbitrary[Int], arbitrary[Long], arbitrary[Float],
+                       arbitrary[Double], arbitrary[Boolean], arbitrary[String]))
+      yield Constant(value)
+
+  def genAnnotated(size: Int, argGen: Int => Gen[Tree]) =
+    for(annot <- genTree(size - 1); arg <- argGen(size - 1))
+      yield Annotated(annot, arg)
+
+  def genAlternative(size: Int): Gen[Alternative] =
+    for(trees <- smallList(size, genTree(size - 1)))
+      yield Alternative(trees)
+
+  def genAppliedTypeTree(size: Int) =
+    for(tpt <- genTree(size - 1) if tpt.isType;
+        args <- smallList(size, genTree(size - 1)))
+      yield AppliedTypeTree(tpt, args)
+
+  def genApply(size: Int) =
+    for(fun <- genTree(size - 1);
+        args <- smallList(size, genTree(size - 1)))
+      yield Apply(fun, args)
+
+  def genAssign(size: Int) =
+    for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+      yield Assign(lhs, rhs)
+
+  def genAssignOrNamedArg(size: Int) =
+    for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+      yield AssignOrNamedArg(lhs, rhs)
+
+  def genBind(size: Int, nameGen: Gen[Name]) =
+    for(name <- nameGen; body <- genTree(size - 1))
+      yield Bind(name, body)
+
+  def genBlock(size: Int) =
+    for(stats <- smallList(size, genTree(size - 1)); expr <- genTree(size - 1))
+      yield Block(stats, expr)
+
+  def genCaseDef(size: Int) =
+    for(pat <- genTree(size - 1); guard <- genTree(size - 1); body <- genTree(size - 1))
+      yield CaseDef(pat, guard, body)
+
+  def genClassDef(size: Int) =
+    for(mods <- genModifiers; name <- genTypeName;
+        tparams <- smallList(size, genTypeDef(size - 1));
+        impl <- genTemplate(size - 1))
+      yield ClassDef(mods, name, tparams, impl)
+
+  def genCompoundTypeTree(size: Int) =
+    for(templ <- genTemplate(size - 1))
+      yield CompoundTypeTree(templ)
+
+  def genDefDef(size: Int) =
+    for(mods <- genModifiers; name <- genTermName;
+        tpt <- genTree(size -1); rhs <- genTree(size - 1);
+        tparams <- smallList(size, genTypeDef(size - 1));
+        vparamss <- smallList(size, smallList(size, genValDef(size - 1))))
+      yield DefDef(mods, name, tparams, vparamss, tpt, rhs)
+
+  def genExistentialTypeTree(size: Int) =
+    for(tpt <- genTree(size - 1); where <- smallList(size, oneOf(genValDef(size - 1), genTypeDef(size - 1))))
+      yield ExistentialTypeTree(tpt, where)
+
+  def genFunction(size: Int) =
+    for(vparams <- smallList(size, genValDef(size - 1)); body <- genTree(size - 1))
+      yield Function(vparams, body)
+
+  def genIdent(nameGen: Gen[Name] = genName) =
+    for(name <- nameGen) yield Ident(name)
+
+  def genIf(size: Int) =
+    for(cond <- genTree(size - 1); thenp <- genTree(size - 1); elsep <- genTree(size - 1))
+      yield If(cond, thenp, elsep)
+
+  def genImport(size: Int) =
+    for(expr <- genTree(size - 1); selectors <- smallList(size, genImportSelector(size - 1)))
+      yield Import(expr, selectors)
+
+  def genImportSelector(size: Int) =
+    for(name <- genName; namePos <- arbitrary[Int]; rename <- genName; renamePos <- arbitrary[Int])
+      yield ImportSelector(name, namePos, rename, renamePos)
+
+  def genTemplate(size: Int) =
+    for(parents <- smallList(size, genTree(size - 1));
+        self <- genValDef(size - 1);
+        body <- smallList(size, genTree(size - 1)))
+      yield Template(parents, self, body)
+
+  def genLabelDef(size: Int) =
+    for(name <- genTermName; params <- smallList(size, genIdent()); rhs <- genTree(size - 1))
+      yield LabelDef(name, params, rhs)
+
+  def genLiteral =
+    for(const <- genConstant) yield Literal(const)
+
+  def genMatch(size: Int) =
+    for(selector <- genTree(size - 1); cases <- smallList(size, genCaseDef(size - 1)))
+      yield Match(selector, cases)
+
+  def genModuleDef(size: Int) =
+    for(mods <- genModifiers; name <- genTermName; impl <- genTemplate(size - 1))
+      yield ModuleDef(mods, name, impl)
+
+  def genNew(size: Int) =
+    for(tpt <- genTree(size - 1))
+      yield New(tpt)
+
+  def genRefTree(size: Int) =
+    oneOf(genSelect(size), genIdent(), genSelectFromTypeTree(size))
+
+  def genPackageDef(size: Int) =
+    for(reftree <- genRefTree(size - 1); stats <- smallList(size, genTree(size - 1)))
+      yield PackageDef(reftree, stats)
+
+  def genTypeSelect(size: Int) =
+    for(qual <- genTree(size - 1); name <- genTypeName)
+      yield Select(qual, name)
+
+  def genSelect(size: Int, nameGen: Gen[Name] = genName) =
+    for(qual <- genTree(size - 1); name <- nameGen)
+      yield Select(qual, name)
+
+  def genSelectFromTypeTree(size: Int) =
+    for(qual <- genTreeIsType(size - 1); name <- genTypeName)
+      yield SelectFromTypeTree(qual, name)
+
+  def genReferenceToBoxed(size: Int) =
+    for(ident <- genIdent())
+      yield ReferenceToBoxed(ident)
+
+  def genReturn(size: Int) =
+    for(expr <- genTree(size - 1))
+      yield Return(expr)
+
+  def genSingletonTypeTree(size: Int) =
+    for(expr <- genTree(size - 1))
+      yield SingletonTypeTree(expr)
+
+  def genStar(size: Int) =
+    for(expr <- genTree(size - 1))
+      yield Star(expr)
+
+  def genSuper(size: Int) =
+    for(qual <- genTree(size - 1); mix <- genTypeName)
+      yield Super(qual, mix)
+
+  def genThis(size: Int) =
+    for(qual <- genTypeName)
+      yield This(qual)
+
+  def genThrow(size: Int) =
+    for(expr <- genTree(size - 1))
+      yield Throw(expr)
+
+  def genTry(size: Int) =
+    for(block <- genTree(size - 1);
+        catches <- smallList(size, genCaseDef(size - 1));
+        finalizer <- genTree(size - 1))
+      yield Try(block, catches, finalizer)
+
+  def genTypeApply(size: Int) =
+    for(fun <- genTreeIsTerm(size - 1); args <- smallList(size, genTree(size - 1)))
+      yield TypeApply(fun, args)
+
+  def genTypeBoundsTree(size: Int) =
+    for(lo <- genTree(size - 1); hi <- genTree(size - 1))
+      yield TypeBoundsTree(lo, hi)
+
+  def genTypeDef(size: Int): Gen[TypeDef] =
+    for(mods <- genModifiers; name <- genTypeName;
+        tparams <- smallList(size, genTypeDef(size - 1)); rhs <- genTree(size - 1))
+      yield TypeDef(mods, name, tparams, rhs)
+
+  def genTypeTree: Gen[TypeTree] = TypeTree()
+
+  def genTyped(size: Int) =
+    for(expr <- genTree(size - 1); tpt <- genTree(size - 1))
+      yield Typed(expr, tpt)
+
+  def genUnApply(size: Int) =
+    for(fun <- genTree(size - 1); args <- smallList(size, genTree(size - 1)))
+      yield UnApply(fun, args)
+
+  def genValDef(size: Int) =
+    for(mods <- genModifiers; name <- genTermName;
+        tpt <- genTree(size - 1); rhs <- genTree(size - 1))
+      yield ValDef(mods, name, tpt, rhs)
+
+  def genTree(size: Int): Gen[Tree] =
+    if (size <= 1) oneOf(EmptyTree: Gen[Tree], genTreeIsTerm(size), genTreeIsType(size))
+    else oneOf(genTree(1),
+               // these trees are neither terms nor types
+               genPackageDef(size - 1), genModuleDef(size - 1),
+               genCaseDef(size - 1), genDefDef(size - 1),
+               genTypeDef(size - 1), genTemplate(size - 1),
+               genClassDef(size - 1), genValDef(size - 1),
+               genImport(size - 1))
+
+  def genTreeIsTerm(size: Int): Gen[Tree] =
+    if (size <= 1) oneOf(genLiteral, genIdent(genTermName))
+    else oneOf(genTreeIsTerm(1), genBind(size - 1, genTermName),
+               genAnnotated(size - 1, genTreeIsTerm), genSelect(size - 1, genTermName),
+               genAlternative(size - 1), genApply(size - 1), genAssign(size - 1),
+               genAssignOrNamedArg(size - 1), genBlock(size - 1), genFunction(size - 1),
+               genIf(size - 1), genLabelDef(size - 1), genMatch(size - 1), genNew(size - 1),
+               genReturn(size - 1), genStar(size - 1), genSuper(size - 1), genThis(size - 1),
+               genThrow(size - 1), genTry(size - 1), genTypeApply(size - 1),
+               genTyped(size - 1), genUnApply(size - 1))
+
+  def genTreeIsType(size: Int): Gen[Tree] =
+    if (size <= 1) genIdent(genTypeName)
+    else oneOf(genTreeIsType(1), genAnnotated(size - 1, genTreeIsType),
+               genBind(size - 1, genTypeName), genSelect(size - 1, genTypeName),
+               genSingletonTypeTree(size - 1), genSelectFromTypeTree(size - 1),
+               genExistentialTypeTree(size - 1), genCompoundTypeTree(size - 1),
+               genAppliedTypeTree(size - 1), genTypeBoundsTree(size - 1))
+
+  /*  These are marker types that allow to write tests that
+   *  depend specificly on Trees that are terms or types.
+   *  They are transparently tranformed to trees through
+   *  implicit conversions and liftables for quasiquotes.
+   */
+
+  case class TreeIsTerm(tree: Tree) { require(tree.isTerm, showRaw(tree)) }
+  case class TreeIsType(tree: Tree) { require(tree.isType, showRaw(tree)) }
+
+  def genTreeIsTermWrapped(size: Int) =
+    for(tit <- genTreeIsTerm(size)) yield TreeIsTerm(tit)
+
+  def genTreeIsTypeWrapped(size: Int) =
+    for(tit <- genTreeIsType(size)) yield TreeIsType(tit)
+
+  implicit val liftTreeIsTerm = Liftable[TreeIsTerm] { _.tree }
+  implicit val liftTreeIsType = Liftable[TreeIsType] { _.tree }
+  implicit def treeIsTerm2tree(tit: TreeIsTerm): Tree = tit.tree
+  implicit def treeIsType2tree(tit: TreeIsType): Tree = tit.tree
+
+  implicit val arbConstant: Arbitrary[Constant] = Arbitrary(genConstant)
+  implicit val arbModifiers: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+  implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+  implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+  implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+
+  // Trees generators are bound by this size to make
+  // generation times shorter and less memory hungry.
+  // TODO: is there any better solution?
+  val maxTreeSize = 5
+
+  def arbitrarySized[T](gen: Int => Gen[T]) =
+    Arbitrary(sized(s => gen(s.min(maxTreeSize))))
+
+  implicit val arbLiteral: Arbitrary[Literal] = Arbitrary(genLiteral)
+  implicit val arbIdent: Arbitrary[Ident] = Arbitrary(genIdent())
+  implicit val arbValDef: Arbitrary[ValDef] = arbitrarySized(genValDef)
+  implicit val arbDefDef: Arbitrary[DefDef] = arbitrarySized(genDefDef)
+  implicit val arbTypeDef: Arbitrary[TypeDef] = arbitrarySized(genTypeDef)
+  implicit val arbBind: Arbitrary[Bind] = arbitrarySized(genBind(_, genName))
+  implicit val arbTree: Arbitrary[Tree] = arbitrarySized(genTree)
+  implicit val arbTreeIsTerm: Arbitrary[TreeIsTerm] = arbitrarySized(genTreeIsTermWrapped)
+  implicit val arbTreeIsType: Arbitrary[TreeIsType] = arbitrarySized(genTreeIsTypeWrapped)
+}
\ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
new file mode 100644
index 0000000..fd81067
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
@@ -0,0 +1,453 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
+
+object DefinitionConstructionProps
+    extends QuasiquoteProperties("definition construction")
+    with ClassConstruction
+    with TraitConstruction
+    with TypeDefConstruction
+    with ValDefConstruction
+    with PatDefConstruction
+    with DefConstruction
+    with PackageConstruction
+    with ImportConstruction {
+
+  val x: Tree = q"val x: Int"
+  property("SI-6842 a1") = test { assertEqAst(q"def f($x) = 0", "def f(x: Int) = 0") }
+  property("SI-6842 a2") = test { assertEqAst(q"class C($x)", "class C(val x: Int)") }
+  property("SI-6842 a3") = test { assertEqAst(q"class C { $x => }", "class C { x: Int => }") }
+  property("SI-6842 a4") = test { assertEqAst(q"trait B { $x => }", "trait B { x: Int => }") }
+  property("SI-6842 a5") = test { assertEqAst(q"object A { $x => }", "object A { x: Int => }") }
+
+  val t: Tree = q"type T"
+  property("SI-6842 b1") = test { assertEqAst(q"def f[$t] = 0", "def f[T] = 0") }
+  property("SI-6842 b2") = test { assertEqAst(q"class C[$t]", "class C[T]") }
+  property("SI-6842 b3") = test { assertEqAst(q"trait B[$t]", "trait B[T]") }
+}
+
+trait ClassConstruction { self: QuasiquoteProperties =>
+  val anyRef = ScalaDot(TypeName("AnyRef"))
+  val emtpyConstructor =
+    DefDef(Modifiers(), termNames.CONSTRUCTOR, List(),
+      List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(()))))
+  def classWith(name: TypeName, parents: List[Tree] = List(anyRef), body: List[DefDef] = Nil) =
+    ClassDef(
+      Modifiers(), name, List(),
+      Template(parents, emptyValDef, emtpyConstructor :: body))
+
+  property("construct case class") = test {
+    val params = q"val x: Int" :: q"val y: Int" :: Nil
+    val name = TypeName("Point")
+    assertEqAst(q"$CASE class $name(..$params)", "case class Point(x: Int, y: Int)")
+  }
+
+  property("case class bare param") = test {
+    assertEqAst(q"$CASE class Point(x: Int, y: Int)", "case class Point(private[this] val x: Int, private[this] val y: Int)")
+  }
+
+  property("generate default constructors automatically") = test {
+    val parents = List.empty[Tree]
+    assertEqAst(q"class Foo extends ..$parents", "class Foo")
+  }
+
+  property("unquote term name into class") = forAll { (rname: TypeName) =>
+    // add prefix to avoid failure in case rname is keyword
+    val name = TypeName("prefix$" + rname)
+    eqAst(q"class $name", "class " + name.toString)
+  }
+
+  property("unquote method into class") = forAll { (name: TypeName, method: DefDef) =>
+    q"class $name { $method }" ≈ classWith(name, body = List(method))
+  }
+
+  property("unquote members into class") = forAll { (name: TypeName, defs: List[DefDef], extra: DefDef) =>
+    q"""class $name {
+      ..$defs
+      $extra
+    }""" ≈ classWith(name, body = defs :+ extra)
+  }
+
+  property("unquote type name into class parents") = forAll { (name: TypeName, parent: TypeName) =>
+    q"class $name extends $parent" ≈ classWith(name, parents = List(Ident(parent)))
+  }
+
+  property("param flags are consistent with raw code") = test {
+    val pubx = q"val x: Int"
+    val privx = q"private[this] val x: Int"
+    assertEqAst(q"     class C(x: Int)", "     class C(x: Int)                  ")
+    assertEqAst(q"case class C(x: Int)", "case class C(x: Int)                  ")
+    assertEqAst(q"     class C($pubx) ", "     class C(val x: Int)              ")
+    assertEqAst(q"case class C($pubx) ", "case class C(x: Int)                  ")
+    assertEqAst(q"     class C($privx)", "     class C(x: Int)                  ")
+    assertEqAst(q"case class C($privx)", "case class C(private[this] val x: Int)")
+  }
+
+  property("SI-8333") = test {
+    assertEqAst(q"{ $NoMods class C }", "{ class C }")
+  }
+
+  property("SI-8332") = test {
+    val args = q"val a: Int; val b: Int"
+    assertEqAst(q"class C(implicit ..$args)", "class C(implicit val a: Int, val b: Int)")
+  }
+
+  property("SI-8451: inline secondary constructors") = test {
+    assertEqAst(q"class C(x: Int) { def this() = this(0) }", "class C(x: Int) { def this() = this(0) }")
+  }
+
+  property("SI-8451: unquoted secondary constructors") = test {
+    val secondaryCtor = q"def this() = this(0)"
+    assertEqAst(q"class C(x: Int) { $secondaryCtor }", "class C(x: Int) { def this() = this(0) }")
+  }
+}
+
+trait TraitConstruction { self: QuasiquoteProperties =>
+  property("unquote name into trait def") = test {
+    val Foo = TypeName("Foo")
+    assert(q"trait $Foo" ≈ q"trait Foo")
+  }
+
+  property("unquote type params into trait def") = test {
+    val tparams = q"type A" :: q"type B" :: Nil
+    assert(q"trait Foo[..$tparams]" ≈ q"trait Foo[A, B]")
+  }
+
+  property("unquote defs into trait body") = test {
+    val body = q"def foo" :: q"val bar: Baz" :: Nil
+    assert(q"trait Foo { ..$body }" ≈ q"trait Foo { def foo; val bar: Baz }")
+  }
+
+  property("unquote parents into trait") = test {
+    val parents = tq"A" :: tq"B" :: Nil
+    assert(q"trait Foo extends ..$parents" ≈ q"trait Foo extends A with B")
+  }
+
+  property("unquote early valdef into trait") = test {
+    val x = q"val x: Int = 1"
+    assertEqAst(q"trait T extends { $x } with Any", "trait T extends { val x: Int = 1} with Any")
+  }
+
+  property("construct trait with early valdef") = test {
+    assertEqAst(q"trait T extends { val x: Int = 1 } with Any", "trait T extends { val x: Int = 1 } with Any")
+  }
+
+  property("unquote defs into early block") = test {
+    val defs = q"val x: Int = 0" :: q"type Foo = Bar" :: Nil
+    assert(q"trait T extends { ..$defs } with Bippy" ≈
+           q"trait T extends { val x: Int = 0; type Foo = Bar} with Bippy")
+  }
+
+  property("fail on splicing of non-valid early tree") = test {
+    val defn = q"def x: Int = 0"
+    assertThrows[IllegalArgumentException] { q"trait T extends { $defn } with Bar" }
+  }
+}
+
+trait TypeDefConstruction { self: QuasiquoteProperties =>
+  property("unquote type name into typedef") = forAll { (name1: TypeName, name2: TypeName) =>
+    q"type $name1 = $name2" ≈ TypeDef(Modifiers(), name1, List(), Ident(name2))
+  }
+
+  property("unquote type names into type bounds") = forAll { (T1: TypeName, T2: TypeName, T3: TypeName) =>
+    q"type $T1 >: $T2 <: $T3" ≈
+      TypeDef(
+        Modifiers(DEFERRED), T1, List(),
+        TypeBoundsTree(Ident(T2), Ident(T3)))
+  }
+
+  property("unquote trees names into type bounds") = forAll { (T: TypeName, t1: Tree, t2: Tree) =>
+    q"type $T >: $t1 <: $t2" ≈
+      TypeDef(
+        Modifiers(DEFERRED), T, List(),
+        TypeBoundsTree(t1, t2))
+  }
+
+  property("unquote tparams into typedef (1)") = forAll { (T: TypeName, targs: List[TypeDef], t: Tree) =>
+    q"type $T[..$targs] = $t" ≈ TypeDef(Modifiers(), T, targs, t)
+  }
+
+  property("unquote tparams into typedef (2)") = forAll { (T: TypeName, targs1: List[TypeDef], targs2: List[TypeDef], t: Tree) =>
+    q"type $T[..$targs1, ..$targs2] = $t" ≈ TypeDef(Modifiers(), T, targs1 ++ targs2, t)
+  }
+
+  property("unquote tparams into typedef (3)") = forAll { (T: TypeName, targ: TypeDef, targs: List[TypeDef], t: Tree) =>
+    q"type $T[$targ, ..$targs] = $t" ≈ TypeDef(Modifiers(), T, targ :: targs, t)
+  }
+
+  property("unquote typename into typedef with default bounds") = forAll { (T1: TypeName, T2: TypeName, t: Tree) =>
+    q"type $T1[$T2 >: Any <: Nothing] = $t" ≈
+      TypeDef(
+        Modifiers(), T1,
+        List(TypeDef(
+          Modifiers(PARAM), T2,
+          List(),
+          TypeBoundsTree(
+            Ident(TypeName("Any")),
+            Ident(TypeName("Nothing"))))),
+        t)
+  }
+
+  property("unquote type names into compound type tree") = forAll { (T: TypeName, A: TypeName, B: TypeName) =>
+    q"type $T = $A with $B" ≈
+      TypeDef(
+        Modifiers(), T, List(),
+        CompoundTypeTree(
+          Template(List(Ident(A), Ident(B)), ValDef(Modifiers(PRIVATE), termNames.WILDCARD, TypeTree(), EmptyTree), List())))
+  }
+
+  property("unquote trees into existential type tree") = forAll {
+    (T1: TypeName, T2: TypeName, X: TypeName, Lo: TypeName, Hi: TypeName) =>
+
+    q"type $T1 = $T2[$X] forSome { type $X >: $Lo <: $Hi }" ≈
+      TypeDef(
+        Modifiers(), T1, List(),
+        ExistentialTypeTree(
+          AppliedTypeTree(Ident(T2), List(Ident(X))),
+          List(
+            TypeDef(Modifiers(DEFERRED), X, List(), TypeBoundsTree(Ident(Lo), Ident(Hi))))))
+  }
+
+  property("unquote tree into singleton type tree") = forAll { (name: TypeName, t: Tree) =>
+    q"type $name = $t.type" ≈ q"type $name = ${SingletonTypeTree(t)}"
+  }
+
+  property("unquote into applied type tree") = forAll { (T1: TypeName, T2: TypeName, args: List[Tree]) =>
+    q"type $T1 = $T2[..$args]" ≈
+      TypeDef(Modifiers(), T1, List(),
+        if(args.nonEmpty) AppliedTypeTree(Ident(T2), args) else Ident(T2))
+  }
+}
+
+trait ValDefConstruction { self: QuasiquoteProperties =>
+  property("unquote into val") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+    q"val $name: $tpt = $rhs" ≈ ValDef(Modifiers(), name, tpt, rhs)
+  }
+
+  property("unquote into var") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+    q"var $name: $tpt = $rhs" ≈ ValDef(Modifiers(MUTABLE), name, tpt, rhs)
+  }
+
+  // left tree is not a pattern due to Si-8211
+  property("SI-8202") = test {
+    assertEqAst(q"val (x: Int) = 1", "val x: Int = 1")
+  }
+}
+
+trait PatDefConstruction { self: QuasiquoteProperties =>
+  property("unquote pattern into pat def") = test {
+    val pat = pq"(a, b)"
+    assertEqAst(q"val $pat = (1, 2)", "val (a, b) = (1, 2)")
+    val tpt = tq"(Int, Int)"
+    assertEqAst(q"val $pat: $tpt = (1, 2)", "val (a, b): (Int, Int) = (1, 2)")
+  }
+
+  property("unquote pattern into pat def within other pattern (1)") = test {
+    val pat = pq"(a, b)"
+    assertEqAst(q"val Foo($pat) = Foo((1, 2))", "val Foo((a, b)) = Foo((1, 2))")
+    val tpt = tq"Foo"
+    assertEqAst(q"val Foo($pat): $tpt = Foo((1, 2))", "val Foo((a, b)): Foo = Foo((1, 2))")
+  }
+
+  property("unquote patterns into pat def within other pattern (2)") = test {
+    val pat1 = pq"(a, b)"; val pat2 = pq"(c, d)"
+    assertEqAst(q"val ($pat1, $pat2) = ((1, 2), (3, 4))", "val ((a, b), (c, d)) = ((1, 2), (3, 4))")
+    val tpt = tq"((Int, Int), (Int, Int))"
+    assertEqAst(q"val ($pat1, $pat2): $tpt = ((1, 2), (3, 4))", "val ((a, b), (c, d)): ((Int, Int), (Int, Int)) = ((1, 2), (3, 4))")
+  }
+
+  property("unquote pattern without free vars into pat def") = test {
+    val pat = pq"((1, 2), 3)"
+    assertEqAst(q"val $pat = ((1, 2), 3)", "{ val ((1, 2), 3) = ((1, 2), 3) }")
+    val tpt = tq"((Int, Int), Int)"
+    assertEqAst(q"val $pat: $tpt = ((1, 2), 3)","{ val ((1, 2), 3): ((Int, Int), Int) = ((1, 2), 3) }")
+  }
+
+  // won't result into pattern match due to SI-8211
+  property("unquote typed pat into pat def") = test {
+    val pat = pq"x: Int"
+    assertEqAst(q"val $pat = 2", "{ val x: Int = 2 }")
+  }
+}
+
+trait MethodConstruction { self: QuasiquoteProperties =>
+  property("unquote paramss into defdef") = test {
+    val paramss = List(q"val x: Int") :: List(q"val y: Int = 1") :: Nil
+    assert(q"def foo(...$paramss)" ≈ parse("def foo(x: Int)(y: Int = 1)"))
+  }
+
+  property("unquote tparams into defdef") = test {
+    val tparams = q"type A" :: q"type B <: Bippy" :: Nil
+    assert(q"def foo[..$tparams]" ≈ parse("def foo[A, B <: Bippy]"))
+  }
+
+  def assertSameAnnots(tree: {def mods: Modifiers}, annots: List[Tree]) =
+    assert(tree.mods.annotations ≈ annots,
+           s"${tree.mods.annotations} =/= ${annots}")
+
+  def assertSameAnnots(tree1: {def mods: Modifiers}, tree2: {def mods: Modifiers}) =
+    assert(tree1.mods.annotations ≈ tree2.mods.annotations,
+           s"${tree1.mods.annotations} =/= ${tree2.mods.annotations}")
+
+  property("unquote type name into annotation") = test {
+    val name = TypeName("annot")
+    assertSameAnnots(q"@$name def foo", List(q"new $name"))
+  }
+
+  property("unquote ident into annotation") = test {
+    val name = TypeName("annot")
+    val ident = Ident(name)
+    assertSameAnnots(q"@$ident def foo", List(q"new $name"))
+  }
+
+  property("unquote idents into annotation") = test {
+    val idents = List(Ident(TypeName("annot1")), Ident(TypeName("annot2")))
+    assertSameAnnots(q"@..$idents def foo",
+      idents.map { ident => Apply(Select(New(ident), termNames.CONSTRUCTOR), List()) })
+  }
+
+  property("unquote constructor calls into annotation") = test {
+    val ctorcalls = List(q"new a1", q"new a2")
+    assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
+  }
+
+  property("unquote multiple annotations (1)") = test {
+    val annot1 = q"new a1"
+    val annot2 = q"new a2"
+    val res = q"@$annot1 @$annot2 def foo"
+    assertSameAnnots(res, List(annot1, annot2))
+  }
+
+  property("unquote multiple annotations (2)") = test {
+    val annot1 = q"new a1"
+    val annots = List(q"new a2", q"new a3")
+    val res = q"@$annot1 @..$annots def foo"
+    assertSameAnnots(res, annot1 :: annots)
+  }
+
+  property("unquote annotations with arguments (1)") = test {
+    val a = q"new a(x)"
+    assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
+  }
+
+  property("unquote annotations with arguments (2)") = test {
+    val a = TypeName("a")
+    assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+  }
+
+  property("unquote annotations with arguments (3") = test {
+    val a = Ident(TypeName("a"))
+    assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+  }
+
+  property("unquote improper tree into annot") = test {
+    val t = tq"Foo[Baz]"
+    assertThrows[IllegalArgumentException] {
+      q"@$t def foo"
+    }
+  }
+
+  property("can't unquote annotations with arguments specificed twice") = test {
+    val a = q"new a(x)"
+    assertThrows[IllegalArgumentException] {
+      q"@$a(y) def foo"
+    }
+  }
+
+  property("unquote annotation with targs") = test {
+    val a = q"new Foo[A, B]"
+    assertEqAst(q"@$a def foo", "@Foo[A,B] def foo")
+  }
+
+  property("unquote annotation with multiple argument lists") = test {
+    val a = q"new Foo(a)(b)"
+    assertEqAst(q"@$a def foo", "@Foo(a)(b) def foo")
+  }
+}
+
+trait PackageConstruction { self: QuasiquoteProperties =>
+  property("unquote select into package name") = test {
+    val name = q"foo.bar"
+    assertEqAst(q"package $name { }", "package foo.bar { }")
+  }
+
+  property("splce name into package name") = test{
+    val name = TermName("bippy")
+    assertEqAst(q"package $name { }", "package bippy { }")
+  }
+
+  property("unquote members into package body") = test {
+    val members = q"class C" :: q"object O" :: Nil
+    assertEqAst(q"package foo { ..$members }", "package foo { class C; object O }")
+  }
+
+  property("unquote illegal members into package body") = test {
+    val f = q"def f"
+    assertThrows[IllegalArgumentException] { q"package foo { $f }" }
+    val v = q"val v = 0"
+    assertThrows[IllegalArgumentException] { q"package foo { $v }" }
+    val expr = q"x + 1"
+    assertThrows[IllegalArgumentException] { q"package foo { $expr }" }
+  }
+
+  property("unquote name into package object") = test {
+    val foo = TermName("foo")
+    assertEqAst(q"package object $foo", "package object foo")
+  }
+
+  property("unquote parents into package object") = test {
+    val parents = tq"a" :: tq"b" :: Nil
+    assertEqAst(q"package object foo extends ..$parents",
+                 "package object foo extends a with b")
+  }
+
+  property("unquote members into package object") = test {
+    val members = q"def foo" :: q"val x = 1" :: Nil
+    assertEqAst(q"package object foo { ..$members }",
+                 "package object foo { def foo; val x = 1 }")
+  }
+
+  property("unquote early def into package object") = test {
+    val edefs = q"val x = 1" :: q"type I = Int" :: Nil
+    assertEqAst(q"package object foo extends { ..$edefs } with Any",
+                 "package object foo extends { val x = 1; type I = Int } with Any")
+  }
+}
+
+trait DefConstruction { self: QuasiquoteProperties =>
+  property("construct implicit args (1)") = test {
+    val x = q"val x: Int"
+    assertEqAst(q"def foo(implicit $x) = x", "def foo(implicit x: Int) = x")
+  }
+
+  property("construct implicit args (2)") = test {
+    val xs = q"val x1: Int" :: q"val x2: Long" :: Nil
+    assertEqAst(q"def foo(implicit ..$xs) = x1 + x2", "def foo(implicit x1: Int, x2: Long) = x1 + x2")
+  }
+}
+
+trait ImportConstruction { self: QuasiquoteProperties =>
+  property("construct wildcard import") = test {
+    val sel = pq"_"
+    assert(q"import foo.$sel" ≈ q"import foo._")
+  }
+
+  property("construct named import") = test {
+    val sel = pq"bar"
+    assert(q"import foo.$sel" ≈ q"import foo.bar")
+  }
+
+  property("construct renaming import") = test {
+    val sel = pq"bar -> baz"
+    assert(q"import foo.$sel" ≈ q"import foo.{bar => baz}")
+  }
+
+  property("construct unimport import") = test {
+    val sels = pq"poison -> _" :: pq"_" :: Nil
+    assert(q"import foo.{..$sels}" ≈ q"import foo.{poison => _, _}")
+  }
+
+  property("construct mixed import") = test {
+    val sels = pq"a -> b" :: pq"c -> _" :: pq"_" :: Nil
+    assert(q"import foo.{..$sels}" ≈ q"import foo.{a => b, c => _, _}")
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
new file mode 100644
index 0000000..2c0e100
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
@@ -0,0 +1,290 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.SyntacticClassDef
+
+object DefinitionDeconstructionProps
+  extends QuasiquoteProperties("definition deconstruction")
+  with TraitDeconstruction
+  with ClassDeconstruction
+  with ObjectDeconstruction
+  with ModsDeconstruction
+  with ValVarDeconstruction
+  with DefDeconstruction
+  with PackageDeconstruction
+  with ImportDeconstruction
+
+trait TraitDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive trait matcher") = test {
+    def matches(line: String) {
+      val q"""$mods trait $name[..$targs]
+              extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+    }
+    matches("trait Foo")
+    matches("trait Foo[T]")
+    matches("trait Foo { def bar }")
+    matches("trait Foo extends Bar with Baz")
+    matches("trait Foo { self: Bippy => val x: Int = 1}")
+    matches("trait Foo extends { val early: Int = 1 } with Bar { val late = early }")
+    matches("private[Gap] trait Foo")
+  }
+}
+
+trait ObjectDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive object matcher") = test {
+    def matches(line: String) = {
+      val q"""$mods object $name extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+    }
+    matches("object Foo")
+    matches("object Foo extends Bar[T]")
+    matches("object Foo extends { val early: T = v } with Bar")
+    matches("object Foo extends Foo { selfy => body }")
+    matches("private[Bippy] object Foo extends Bar with Baz")
+  }
+}
+
+trait ClassDeconstruction { self: QuasiquoteProperties =>
+  property("class without params") = test {
+    val q"class $name { ..$body }" = q"class Foo { def bar = 3 }"
+    assert(body ≈ List(q"def bar = 3"))
+  }
+
+  property("class constructor") = test {
+    val q"class $name(...$argss)" = q"class Foo(x: Int)(y: Int)"
+    assert(argss.length == 2)
+  }
+
+  property("class parents") = test {
+    val q"class $name extends ..$parents" = q"class Foo extends Bar with Blah"
+    assert(parents ≈ List(tq"Bar", tq"Blah"))
+  }
+
+  property("class selfdef") = test {
+    val q"class $name { $self => }" = q"class Foo { self: T => }"
+    assert(self.name ≈ TermName("self") && self.tpt ≈ tq"T")
+  }
+
+  property("class tparams") = test {
+    val q"class $name[..$tparams]" = q"class Foo[A, B]"
+    assert(tparams.map { _.name } == List(TypeName("A"), TypeName("B")))
+  }
+
+  property("deconstruct bare case class") = test {
+    val q"$mods class $name(..$args) extends ..$parents" = q"case class Foo(x: Int)"
+  }
+
+  property("exhaustive class matcher") = test {
+    def matches(line: String) {
+      val tree = parse(line)
+      val q"""$classMods0 class $name0[..$targs0] $ctorMods0(...$argss0)
+              extends { ..$early0 } with ..$parents0 { $self0 => ..$body0 }""" = tree
+      val q"""$classMods1 class $name1[..$targs1] $ctorMods1(...$argss1)(implicit ..$impl)
+              extends { ..$early1 } with ..$parents1 { $self1 => ..$body1 }""" = tree
+    }
+    matches("class Foo")
+    matches("class Foo[T]")
+    matches("class Foo[T] @annot")
+    matches("class Foo extends Bar with Baz")
+    matches("class Foo { body }")
+    matches("class Foo extends { val early = 0 } with Any")
+    matches("abstract class Foo")
+    matches("private[Baz] class Foo")
+    matches("class Foo(first: A)(second: B)")
+    matches("class Foo(first: A) extends Bar(first) with Baz")
+    matches("class Foo private (first: A) { def bar }")
+    matches("class Foo { self => bar(self) }")
+    matches("case class Foo(x: Int)")
+  }
+
+  property("SI-7979") = test {
+    val PARAMACCESSOR = (1 << 29).toLong.asInstanceOf[FlagSet]
+    assertThrows[MatchError] {
+      val SyntacticClassDef(_, _, _, _, _, _, _, _, _) =
+        ClassDef(
+          Modifiers(), TypeName("Foo"), List(),
+          Template(
+            List(Select(Ident(TermName("scala")), TypeName("AnyRef"))),
+            noSelfType,
+            List(
+              //ValDef(Modifiers(PRIVATE | LOCAL | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree),
+              DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"),
+                Ident(TypeName("Int")), EmptyTree))), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))))))
+    }
+  }
+
+  property("SI-8332") = test {
+    val q"class C(implicit ..$args)" = q"class C(implicit i: I, j: J)"
+    val q"$imods val i: I" :: q"$jmods val j: J" :: Nil = args
+    assert(imods.hasFlag(IMPLICIT))
+    assert(jmods.hasFlag(IMPLICIT))
+  }
+}
+
+trait ModsDeconstruction { self: QuasiquoteProperties =>
+  property("deconstruct mods") = test {
+    val mods = Modifiers(IMPLICIT | PRIVATE, TermName("foobar"), Nil)
+    val q"$mods0 def foo" = q"$mods def foo"
+    assert(mods0 ≈ mods)
+  }
+
+  property("@$annot def foo") = forAll { (annotName: TypeName) =>
+    val q"@$annot def foo" = q"@$annotName def foo"
+    annot ≈ Apply(Select(New(Ident(annotName)), termNames.CONSTRUCTOR), List())
+  }
+
+  property("@$annot(..$args) def foo") = forAll { (annotName: TypeName, tree: Tree) =>
+    val q"@$annot(..$args) def foo" = q"@$annotName($tree) def foo"
+    annot ≈ Ident(annotName) && args ≈ List(tree)
+  }
+
+  property("@..$annots def foo") = test {
+    val a = q"new a"
+    val b = q"new b"
+    val q"@..$annots def foo" = q"@$a @$b def foo"
+    annots ≈ List(a, b)
+  }
+
+  property("@$annot @..$annots def foo") = test {
+    val a = q"new a"
+    val b = q"new b"
+    val c = q"new c"
+    val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
+    assert(first ≈ a)
+    assert(rest ≈ List(b, c))
+  }
+
+  property("@..$anots @$annot def foo") = test {
+    val a = q"new a"
+    val b = q"new b"
+    val c = q"new c"
+    val q"@..$init @$last def foo" = q"@$a @$b @$c def foo"
+    assert(init ≈ List(a, b))
+    assert(last ≈ c)
+  }
+}
+
+trait ValVarDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive val matcher") = test {
+    def matches(line: String) { val q"$mods val $name: $tpt = $rhs" = parse(line) }
+    matches("val x: Int")
+    matches("val x: Int = 1")
+    matches("lazy val x: Int = 1")
+    matches("implicit val x = 1")
+    assertThrows[MatchError] { matches("var x = 1") }
+  }
+
+  property("exhaustive var matcher") = test {
+    def matches(line: String) { val q"$mods var $name: $tpt = $rhs" = parse(line) }
+    matches("var x: Int")
+    matches("var x: Int = 1")
+    matches("var x = 1")
+    assertThrows[MatchError] { matches("val x = 1") }
+  }
+}
+
+trait PackageDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive package matcher") = test {
+    def matches(line: String) { val q"package $name { ..$body }" = parse(line) }
+    matches("package foo { }")
+    matches("package foo { class C }")
+    matches("package foo.bar { }")
+    matches("package bippy.bongo { object A; object B }")
+    matches("package bippy { package bongo { object O } }")
+  }
+
+  property("exhaustive package object matcher") = test {
+    def matches(line: String) {
+      val q"package object $name extends { ..$early } with ..$parents { $self => ..$body }" = parse(line)
+    }
+    matches("package object foo")
+    matches("package object foo { def baz }")
+    matches("package object foo { self => }")
+    matches("package object foo extends mammy with daddy { def baz }")
+    matches("package object foo extends { val early = 1 } with daddy")
+    assertThrows[MatchError] { matches("object foo") }
+  }
+}
+
+trait DefDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive def matcher") = test {
+    def matches(line: String) = {
+      val t = parse(line)
+      val q"$mods0 def $name0[..$targs0](...$argss0): $restpe0 = $body0" = t
+      val q"$mods1 def $name1[..$targs1](...$argss1)(implicit ..$impl1): $restpe1 = $body1" = t
+    }
+    matches("def foo = foo")
+    matches("implicit def foo: Int = 2")
+    matches("def foo[T](x: T): T = x")
+    matches("def foo[A: B] = implicitly[B[A]]")
+    matches("private def foo = 0")
+    matches("def foo[A <% B] = null")
+    matches("def foo(one: One)(two: Two) = (one, two)")
+    matches("def foo[T](args: T*) = args.toList")
+  }
+
+  property("extract implicit arg list (1)") = test {
+    val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)(implicit y: Int)"
+    assert(impl ≈ List(q"${Modifiers(IMPLICIT | PARAM)} val y: Int"))
+  }
+
+  property("extract implicit arg list (2)") = test {
+    val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)"
+    assert(impl.isEmpty)
+  }
+
+  property("SI-8451") = test {
+    val q"def this(..$params) = this(..$args)" = q"def this(x: Int) = this(0)"
+    assert(params ≈ List(q"${Modifiers(PARAM)} val x: Int"))
+    assert(args ≈ List(q"0"))
+  }
+}
+
+trait ImportDeconstruction { self: QuasiquoteProperties =>
+  property("exhaustive import matcher") = test {
+    def matches(line: String) = {
+      val q"import $ref.{..$sels}" = parse(line)
+    }
+    matches("import foo.bar")
+    matches("import foo.{bar, baz}")
+    matches("import foo.{a => b, c => d}")
+    matches("import foo.{poision => _, _}")
+    matches("import foo.bar.baz._")
+  }
+
+  property("extract import binding") = test {
+    val q"import $_.$sel" = q"import foo.bar"
+    val pq"bar" = sel
+  }
+
+  property("extract import wildcard") = test {
+    val q"import $_.$sel" = q"import foo._"
+    val pq"_" = sel
+  }
+
+  property("extract import rename") = test {
+    val q"import $_.$sel" = q"import foo.{bar => baz}"
+    val pq"bar -> baz" = sel
+    val pq"$left -> $right" = sel
+    val pq"bar" = left
+    val pq"baz" = right
+  }
+
+  property("extract import unimport") = test {
+    val q"import $_.$sel" = q"import foo.{bar => _}"
+    val pq"bar -> _" = sel
+    val pq"$left -> $right" = sel
+    val pq"bar" = left
+    val pq"_" = right
+  }
+
+  property("unquote names into import selector") = forAll {
+    (expr: Tree, plain: TermName, oldname: TermName, newname: TermName, discard: TermName) =>
+
+    val Import(expr1, List(
+      ImportSelector(plain11, _, plain12, _),
+      ImportSelector(oldname1, _, newname1, _),
+      ImportSelector(discard1, _, wildcard, _))) =
+        q"import $expr.{$plain, $oldname => $newname, $discard => _}"
+
+    expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
+    oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == termNames.WILDCARD
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/DeprecationProps.scala b/test/files/scalacheck/quasiquotes/DeprecationProps.scala
new file mode 100644
index 0000000..8e1601c
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/DeprecationProps.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._
+
+object DeprecationProps extends QuasiquoteProperties("deprecation") {
+  val tname = TypeName("Foo")
+  val tpt = tq"Foo"
+  val tpe = typeOf[Int]
+  val sym = tpe.typeSymbol.asType
+  val argss = List(q"x") :: List(q"y") :: Nil
+  val args = q"x" :: q"y" :: Nil
+
+  property("new tpt argss") = test {
+    assert(q"new $tpt(...$argss)" ≈ New(tpt, argss))
+  }
+
+  property("new tpe args") = test {
+    assert(q"new $tpe(..$args)" ≈ New(tpe, args: _*))
+  }
+
+  property("new tpe args") = test {
+    assert(q"new ${sym.toType}(..$args)" ≈ New(sym, args: _*))
+  }
+
+  property("apply sym args") = test {
+    assert(q"$sym(..$args)" ≈ Apply(sym, args: _*))
+  }
+
+  property("applyconstructor") = test {
+    assert(q"new $tpt(..$args)" ≈ ApplyConstructor(tpt, args))
+  }
+
+  property("super sym name") = test {
+    assert(q"$sym.super[$tname].x".qualifier ≈ Super(sym, tname))
+  }
+
+  property("throw tpe args") = test {
+    assert(q"throw new $tpe(..$args)" ≈ Throw(tpe, args: _*))
+  }
+
+  property("casedef pat body") = test {
+    val pat = pq"foo"
+    val body = q"bar"
+    assert(cq"$pat => $body" ≈ CaseDef(pat, body))
+  }
+
+  property("try body cases") = test {
+    val cases = (pq"a", q"b") :: (pq"c", q"d") :: Nil
+    val newcases = cases.map { case (pat, body) => cq"$pat => $body" }
+    val body = q"foo"
+    assert(q"try $body catch { case ..$newcases }" ≈ Try(body, cases: _*))
+  }
+}
\ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
new file mode 100644
index 0000000..2cba07a
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -0,0 +1,213 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+
+object ErrorProps extends QuasiquoteProperties("errors") {
+  property("can't extract two .. rankinalities in a row") = fails(
+    "Can't extract with .. here",
+    """
+      val xs = List(q"x1", q"x2")
+      val q"f(..$xs1, ..$xs2)" = xs
+    """)
+
+  property("can't unquote with given rank") = fails(
+    "Can't unquote List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]",
+    """
+      import java.lang.StringBuilder
+      val xs: List[StringBuilder] = Nil
+      q"$xs"
+    """)
+
+  property("unquote typename into typedef with default bounds") = fails(
+    "reflect.runtime.universe.Name expected but reflect.runtime.universe.TypeDef found",
+    """
+      val T1 = TypeName("T1")
+      val T2 = q"type T"
+      val t = EmptyTree
+      q"type $T1[$T2 >: _root_.scala.Any <: _root_.scala.Nothing] = $t" ≈
+        TypeDef(Modifiers(), T1, List(T2), t)
+    """)
+
+  property("can't unquote annotations with ... rank") = fails(
+    "Can't unquote with ... here",
+    """
+      val annots = List(List(q"Foo"))
+      q"@...$annots def foo"
+    """)
+
+  property("only literal string arguments") = fails(
+    "Quasiquotes can only be used with literal strings",
+    """
+      val s: String = "foo"
+      StringContext(s).q()
+    """)
+
+  property("don't know how to unquote inside of strings") = fails(
+    "Don't know how to unquote here",
+    """
+      val x: Tree = EmptyTree
+      StringContext("\"", "\"").q(x)
+    """)
+
+  property("non-liftable type ..") = fails(
+    "Can't unquote List[StringBuilder] with .., consider omitting the dots or providing an implicit instance of Liftable[StringBuilder]",
+    """
+      import java.lang.StringBuilder
+      val bazs = List(new StringBuilder)
+      q"f(..$bazs)"
+    """)
+
+  property("non-liftable type ...") = fails(
+    "Can't unquote List[List[StringBuilder]] with .., consider using ... or providing an implicit instance of Liftable[StringBuilder]",
+    """
+      import java.lang.StringBuilder
+      val bazs = List(List(new StringBuilder))
+      q"f(..$bazs)"
+    """)
+
+  property("use .. rank or provide liftable") = fails(
+    "Can't unquote List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]",
+    """
+      import java.lang.StringBuilder
+      val lst: List[StringBuilder] = Nil
+      q"f($lst)"
+    """)
+
+  property("use ... rank or provide liftable") = fails(
+    "Can't unquote List[List[StringBuilder]], consider using ... or providing an implicit instance of Liftable[List[List[StringBuilder]]]",
+    """
+      import java.lang.StringBuilder
+      val xs: List[List[StringBuilder]] = Nil
+      q"$xs"
+    """)
+
+  property("not liftable or natively supported") = fails(
+    "Can't unquote StringBuilder, consider providing an implicit instance of Liftable[StringBuilder]",
+    """
+      import java.lang.StringBuilder
+      val sb = new StringBuilder
+      q"f($sb)"
+    """)
+
+  property("can't unquote with ... rank here") = fails(
+    "Can't unquote with ... here",
+    """
+      val lst: List[List[Tree]] = Nil; val t = EmptyTree
+      q"f(...$lst, $t)"
+    """)
+
+  property("name expected") = fails(
+    "reflect.runtime.universe.Name expected but reflect.runtime.universe.Tree found",
+    """
+      val t = EmptyTree
+      q"class $t"
+    """)
+
+  property("flags or mods expected") = fails(
+    "reflect.runtime.universe.FlagSet or reflect.runtime.universe.Modifiers expected but reflect.runtime.universe.Tree found",
+    """
+      val t = EmptyTree
+      q"$t def foo"
+    """)
+
+  property("cant unquote flags together with mods") = fails(
+    "Can't unquote flags together with modifiers, consider merging flags into modifiers",
+    """
+      val f = Flag.IMPLICIT; val m = NoMods
+      q"$f $m def foo"
+    """)
+
+  property("can't unquote mods with annots") = fails(
+    "Can't unquote modifiers together with annotations, consider merging annotations into modifiers",
+    """
+      val m = NoMods
+      q"@annot $m def foo"
+    """)
+
+  property("can't unquote modifiers with inline flags") = fails(
+    "Can't unquote modifiers together with flags, consider merging flags into modifiers",
+    """
+      val m = NoMods
+      q"$m implicit def foo"
+    """)
+
+  property("can't unquote multiple mods") = fails(
+    "Can't unquote multiple modifiers, consider merging them into a single modifiers instance",
+    """
+      val m1 = NoMods; val m2 = NoMods
+      q"$m1 $m2 def foo"
+    """)
+
+  property("can't extract mods with annots") = fails(
+    "Can't extract modifiers together with annotations, consider extracting just modifiers",
+    """
+      val q"@$annot $mods def foo" = EmptyTree
+    """)
+
+  property("can't extract multiple mods") = fails(
+    "Can't extract multiple modifiers together, consider extracting a single modifiers instance",
+    """
+      val q"$m1 $m2 def foo" = EmptyTree
+    """)
+
+  property("can't unquote values of Null") = fails(
+    "Can't unquote Null, bottom type values often indicate programmer mistake",
+    """
+      val n = null
+      q"$n"
+    """)
+
+  property("can't unquote values of Nothing") = fails(
+    "Can't unquote Nothing, bottom type values often indicate programmer mistake",
+    """
+      def n = ???
+      q"$n"
+    """)
+
+  property("SI-8211: check unbound placeholder paremeters") = fails(
+    "unbound placeholder parameter",
+    """
+      q"_"
+    """)
+
+  property("SI-8211: check unbound wildcard types") = fails(
+    "unbound wildcard type",
+    """
+      tq"_"
+    """)
+
+  property("SI-8420: don't crash on splicing of non-unliftable native type (1)") = fails(
+    "Can't unquote List[reflect.runtime.universe.Symbol] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Symbol]",
+    """
+      val l: List[Symbol] = Nil
+      q"f(..$l)"
+    """)
+
+  property("SI-8420: don't crash on splicing of non-unliftable native type (2)") = fails(
+    "Can't unquote List[reflect.runtime.universe.FlagSet] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.FlagSet]",
+    """
+      val l: List[FlagSet] = Nil
+      q"f(..$l)"
+    """)
+
+  property("SI-8420: don't crash on splicing of non-unliftable native type (3)") = fails(
+    "Can't unquote List[reflect.runtime.universe.Modifiers] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Modifiers]",
+    """
+      val l: List[Modifiers] = Nil
+      q"f(..$l)"
+    """)
+
+  property("SI-8451 construction: disallow everything except for constructor calls in secondary constructor bodies") = fails(
+    "'this' expected but unquotee found",
+    """
+      val rhs1 = q"this(0)"
+      val ctor1 = q"def this(x: Int) = $rhs1"
+    """)
+
+  property("SI-8451 deconstruction: disallow everything except for constructor calls in secondary constructor bodies") = fails(
+    "'this' expected but unquotee found",
+    """
+      val q"def this(..$params) = $rhs2" = q"def this(x: Int) = this(0)"
+    """)
+
+  // // Make sure a nice error is reported in this case
+  // { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" }
+}
diff --git a/test/files/scalacheck/quasiquotes/ForProps.scala b/test/files/scalacheck/quasiquotes/ForProps.scala
new file mode 100644
index 0000000..b14d345
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/ForProps.scala
@@ -0,0 +1,70 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
+
+object ForProps extends QuasiquoteProperties("for") {
+  case class ForEnums(val value: List[Tree])
+
+  def genSimpleBind: Gen[Bind] =
+    for(name <- genTermName)
+      yield pq"$name @ _"
+
+  def genForFilter: Gen[Tree] =
+    for(cond <- genIdent(genTermName))
+      yield fq"if $cond"
+
+  def genForFrom: Gen[Tree] =
+    for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
+      yield fq"$lhs <- $rhs"
+
+  def genForEq: Gen[Tree] =
+    for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
+      yield fq"$lhs = $rhs"
+
+  def genForEnums(size: Int): Gen[ForEnums] =
+    for(first <- genForFrom; rest <- listOfN(size, oneOf(genForFrom, genForFilter, genForEq)))
+      yield new ForEnums(first :: rest)
+
+  implicit val arbForEnums: Arbitrary[ForEnums] = arbitrarySized(genForEnums)
+
+  property("construct-reconstruct for") = forAll { (enums: ForEnums, body: Tree) =>
+    val SyntacticFor(recoveredEnums, recoveredBody) = SyntacticFor(enums.value, body)
+    recoveredEnums ≈ enums.value && recoveredBody ≈ body
+  }
+
+  property("construct-reconstruct for-yield") = forAll { (enums: ForEnums, body: Tree) =>
+    val SyntacticForYield(recoveredEnums, recoveredBody) = SyntacticForYield(enums.value, body)
+    recoveredEnums ≈ enums.value && recoveredBody ≈ body
+  }
+
+  val abcde = List(fq"a <-b", fq"if c", fq"d = e")
+  val foobarbaz = pq"foo @ Bar(baz)"
+  val fv = q"f(v)"
+
+  property("construct/deconstruct for loop with fq") = test {
+    val for0 = q"for(..$abcde) $fv"
+    assertEqAst(for0, "for(a <- b; if c; d = e) f(v)")
+    val q"for(..$enums) $body" = for0
+    assert(enums ≈ abcde)
+    assert(body ≈ fv)
+  }
+
+  property("construct/deconstruct valfrom with fq") = test {
+    assert(fq"$foobarbaz <- $fv" ≈ fq"foo @ Bar(baz) <- f(v)")
+    val fq"$lhs <- $rhs" = fq"$foobarbaz <- $fv"
+    assert(lhs ≈ foobarbaz)
+    assert(rhs ≈ fv)
+  }
+
+  property("construct/deconstruct valeq with fq") = test {
+    assert(fq"$foobarbaz = $fv" ≈ fq"foo @ Bar(baz) = f(v)")
+    val fq"$lhs = $rhs" = fq"$foobarbaz = $fv"
+    assert(lhs ≈ foobarbaz)
+    assert(rhs ≈ fv)
+  }
+
+  property("construct/deconstruct filter with fq") = test {
+    assert(fq"if $fv" ≈ fq"if f(v)")
+    val fq"if $cond" = fq"if $fv"
+    assert(cond ≈ fv)
+  }
+}
\ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/LiftableProps.scala b/test/files/scalacheck/quasiquotes/LiftableProps.scala
new file mode 100644
index 0000000..a4c57ac
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/LiftableProps.scala
@@ -0,0 +1,174 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object LiftableProps extends QuasiquoteProperties("liftable") {
+  property("unquote byte") = test {
+    val c: Byte = 0
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0: Byte}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote short") = test {
+    val c: Short = 0
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0: Short}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote char") = test {
+    val c: Char = 'c'
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${'c'}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote int") = test {
+    val c: Int = 0
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0: Int}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote long") = test {
+    val c: Long = 0
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0: Long}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote float") = test {
+    val c: Float = 0.0f
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0.0f: Float}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote double") = test {
+    val c: Double = 0.0
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${0.0: Double}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote boolean") = test {
+    val c: Boolean = false
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${true}" ≈ Literal(Constant(true)))
+    assert(q"${false}" ≈ Literal(Constant(false)))
+  }
+
+  property("unquote string") = test {
+    val c: String = "s"
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${"s"}" ≈ Literal(Constant(c)))
+  }
+
+  property("unquote unit") = test {
+    val c: Unit = ()
+    assert(q"$c" ≈ Literal(Constant(c)))
+    assert(q"${()}" ≈ Literal(Constant(c)))
+  }
+
+  property("lift symbol") = test {
+    val s = rootMirror.staticClass("scala.Int")
+    assert(q"$s" ≈ Ident(s))
+  }
+
+  property("lift type") = test {
+    val tpe = rootMirror.staticClass("scala.Int").toType
+    assert(q"$tpe" ≈ TypeTree(tpe))
+  }
+
+  property("lift type tag") = test {
+    val tag = TypeTag.Int
+    assert(q"$tag" ≈ TypeTree(tag.tpe))
+  }
+
+  property("lift weak type tag") = test {
+    val tag = WeakTypeTag.Int
+    assert(q"$tag" ≈ TypeTree(tag.tpe))
+  }
+
+  property("lift constant") = test {
+    val const = Constant(0)
+    assert(q"$const" ≈ q"0")
+  }
+
+  val immutable = q"$scalapkg.collection.immutable"
+
+  property("lift list variants") = test {
+    val lst = List(1, 2)
+    assert(q"$lst" ≈ q"$immutable.List(1, 2)")
+    assert(q"f(..$lst)" ≈ q"f(1, 2)")
+    val llst = List(List(1), List(2))
+    assert(q"f(..$llst)" ≈ q"f($immutable.List(1), $immutable.List(2))")
+    assert(q"f(...$llst)" ≈ q"f(1)(2)")
+  }
+
+  property("lift list of tree") = test {
+    val lst = List(q"a", q"b")
+    assert(q"$lst" ≈ q"$immutable.List(a, b)")
+  }
+
+  property("lift tuple") = test {
+    assert(q"${(1, 2)}" ≈ q"(1, 2)")
+    assert(q"${(1, 2, 3)}" ≈ q"(1, 2, 3)")
+    assert(q"${(1, 2, 3, 4)}" ≈ q"(1, 2, 3, 4)")
+    assert(q"${(1, 2, 3, 4, 5)}" ≈ q"(1, 2, 3, 4, 5)")
+    assert(q"${(1, 2, 3, 4, 5, 6)}" ≈ q"(1, 2, 3, 4, 5, 6)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7)}" ≈ q"(1, 2, 3, 4, 5, 6, 7)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)")
+    assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)")
+  }
+
+  property("lift nil") = test {
+    val nil = Nil
+    assert(q"$nil" ≈ q"scala.collection.immutable.Nil")
+  }
+
+  property("lift some") = test {
+    val some1 = Some(1)
+    assert(q"$some1" ≈ q"scala.Some(1)")
+    val some2: Option[Int] = Some(1)
+    assert(q"$some2" ≈ q"scala.Some(1)")
+  }
+
+  property("lift none") = test {
+    val none1 = None
+    assert(q"$none1" ≈ q"scala.None")
+    val none2: Option[Int] = None
+    assert(q"$none2" ≈ q"scala.None")
+  }
+
+  property("lift left") = test {
+    val left1 = Left(1)
+    assert(q"$left1" ≈ q"scala.util.Left(1)")
+    val left2: Left[Int, Int] = Left(1)
+    assert(q"$left2" ≈ q"scala.util.Left(1)")
+    val left3: Either[Int, Int] = Left(1)
+    assert(q"$left3" ≈ q"scala.util.Left(1)")
+  }
+
+  property("lift right") = test {
+    val right1 = Right(1)
+    assert(q"$right1" ≈ q"scala.util.Right(1)")
+    val right2: Right[Int, Int] = Right(1)
+    assert(q"$right2" ≈ q"scala.util.Right(1)")
+    val right3: Either[Int, Int] = Right(1)
+    assert(q"$right3" ≈ q"scala.util.Right(1)")
+  }
+
+  property("lift xml comment") = test {
+    implicit val liftXmlComment = Liftable[xml.Comment] { comment =>
+      q"new _root_.scala.xml.Comment(${comment.commentText})"
+    }
+    assert(q"${xml.Comment("foo")}" ≈ q"<!--foo-->")
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
new file mode 100644
index 0000000..7ed95fa
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
@@ -0,0 +1,36 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object PatternConstructionProps extends QuasiquoteProperties("pattern construction") {
+  property("unquote bind") = forAll { (bind: Bind) =>
+    pq"$bind" ≈ bind
+  }
+
+  property("unquote name into bind") = forAll { (name: TermName) =>
+    pq"$name" ≈ Bind(name, Ident(termNames.WILDCARD))
+  }
+
+  property("unquote name and tree into bind") = forAll { (name: TermName, tree: Tree) =>
+    pq"$name @ $tree" ≈ Bind(name, tree)
+  }
+
+  property("unquote type name into typed") = forAll { (name: TypeName) =>
+    pq"_ : $name" ≈ Typed(Ident(termNames.WILDCARD), Ident(name))
+  }
+
+  property("unquote tree into typed") = forAll { (typ: Tree) =>
+    pq"_ : $typ" ≈ Typed(Ident(termNames.WILDCARD), typ)
+  }
+
+  property("unquote into apply") = forAll { (pat: Tree, subpat: Tree) =>
+    pq"$pat($subpat)" ≈ Apply(pat, List(subpat))
+  }
+
+  property("unquote into casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+    cq"$pat if $cond => $body" ≈ CaseDef(pat, cond, body)
+  }
+
+  property("unquote into alternative") = forAll { (first: Tree, rest: List[Tree]) =>
+    pq"$first | ..$rest" ≈ Alternative(first :: rest)
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
new file mode 100644
index 0000000..ad3266b
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
@@ -0,0 +1,44 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstruction") {
+  property("extract bind") = forAll { (bind: Bind) =>
+    val pq"$bind0" = pq"$bind"
+    bind0 ≈ bind
+  }
+
+  property("extract bind and subpattern") = forAll { (name: TermName, subp: Tree) =>
+    val pq"$name0 @ $subp0" = pq"$name @ $subp"
+    name0 ≈ name && subp0 ≈ subp
+  }
+
+  property("extract typed") = forAll { (typ: Tree) =>
+    val pq"_ : $typ0" = pq"_ : $typ"
+    typ0 ≈ typ
+  }
+
+  property("extract apply") = forAll { (pat: Tree, subpat: Tree) =>
+    val pq"$pat0($subpat0)" = pq"$pat($subpat)"
+    pat0 ≈ pat && subpat0 ≈ subpat
+  }
+
+  property("extract apply many") = forAll { (pat: Tree, subpats: List[Tree]) =>
+    val pq"$pat0(..$subpats0)" = pq"$pat(..$subpats)"
+    pat0 ≈ pat && subpats0 ≈ subpats
+  }
+
+  property("extract apply last") = forAll { (pat: Tree, subpats: List[Tree], subpatlast: Tree) =>
+    val pq"$pat0(..$subpats0, $subpatlast0)" = pq"$pat(..$subpats, $subpatlast)"
+    pat0 ≈ pat && subpats0 ≈ subpats && subpatlast0 ≈ subpatlast
+  }
+
+  property("extract casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+    val cq"$pat0 if $cond0 => $body0" = cq"$pat if $cond => $body"
+    pat0 ≈ pat && cond0 ≈ cond && body0 ≈ body
+  }
+
+  property("extract alternative") = forAll { (first: Tree, rest: List[Tree]) =>
+    val pq"$first1 | ..$rest1" = pq"$first | ..$rest"
+    first1 ≈ first && rest1 ≈ rest
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
new file mode 100644
index 0000000..6132244
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
@@ -0,0 +1,120 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.reflect.runtime.currentMirror
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.setSymbol
+
+class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers
+
+trait Helpers {
+  /** Runs a code block and returns proof confirmation
+   *  if no exception has been thrown while executing code
+   *  block. This is useful for simple one-off tests.
+   */
+  def test[T](block: => T) =
+    Prop { params =>
+      block
+      Result(Prop.Proof)
+    }
+
+  object simplify extends Transformer {
+    object SimplifiedName {
+      val st = scala.reflect.runtime.universe.asInstanceOf[scala.reflect.internal.SymbolTable]
+      val FreshName = new st.FreshNameExtractor
+      def unapply[T <: Name](name: T): Option[T] = name.asInstanceOf[st.Name] match {
+        case FreshName(prefix) =>
+          Some((if (name.isTermName) TermName(prefix) else TypeName(prefix)).asInstanceOf[T])
+      }
+    }
+
+    override def transform(tree: Tree): Tree = tree match {
+      case Ident(SimplifiedName(name))                  => Ident(name)
+      case ValDef(mods, SimplifiedName(name), tpt, rhs) => ValDef(mods, name, transform(tpt), transform(rhs))
+      case Bind(SimplifiedName(name), rhs)              => Bind(name, rhs)
+      case _ =>
+        super.transform(tree)
+    }
+
+    def apply(tree: Tree): Tree = transform(tree)
+  }
+
+  implicit class TestSimilarTree(tree1: Tree) {
+    def ≈(tree2: Tree) = simplify(tree1).equalsStructure(simplify(tree2))
+  }
+
+  implicit class TestSimilarListTree(lst: List[Tree]) {
+    def ≈(other: List[Tree]) = (lst.length == other.length) && lst.zip(other).forall { case (t1, t2) => t1 ≈ t2 }
+  }
+
+  implicit class TestSimilarListListTree(lst: List[List[Tree]]) {
+    def ≈(other: List[List[Tree]]) = (lst.length == other.length) && lst.zip(other).forall { case (l1, l2) => l1 ≈ l2 }
+  }
+
+  implicit class TestSimilarName(name: Name) {
+    def ≈(other: Name) = name == other
+  }
+
+  implicit class TestSimilarMods(mods: Modifiers) {
+    def ≈(other: Modifiers) = (mods.flags == other.flags) && (mods.privateWithin ≈ other.privateWithin) && (mods.annotations ≈ other.annotations)
+  }
+
+  def assertThrows[T <: AnyRef](f: => Any)(implicit manifest: Manifest[T]): Unit = {
+    val clazz = manifest.runtimeClass.asInstanceOf[Class[T]]
+    val thrown =
+      try {
+        f
+        false
+      } catch {
+        case u: Throwable =>
+          if (!clazz.isAssignableFrom(u.getClass))
+            assert(false, s"wrong exception: $u")
+          true
+      }
+    if(!thrown)
+      assert(false, "exception wasn't thrown")
+  }
+
+  def assertEqAst(tree: Tree, code: String) = assert(eqAst(tree, code))
+  def eqAst(tree: Tree, code: String) = tree ≈ parse(code)
+
+  val toolbox = currentMirror.mkToolBox()
+  val parse = toolbox.parse(_)
+  val compile = toolbox.compile(_)
+  val eval = toolbox.eval(_)
+
+  def typecheck(tree: Tree) = toolbox.typecheck(tree)
+
+  def typecheckTyp(tree: Tree) = {
+    val q"type $_ = $res" = typecheck(q"type T = $tree")
+    res
+  }
+
+  def typecheckPat(tree: Tree) = {
+    val q"$_ match { case $res => }" = typecheck(q"((): Any) match { case $tree => }")
+    res
+  }
+
+  def fails(msg: String, block: String) = {
+    def result(ok: Boolean, description: String = "") = {
+      val status = if (ok) Prop.Proof else Prop.False
+      val labels = if (description != "") Set(description) else Set.empty[String]
+      Prop { new Prop.Result(status, Nil, Set.empty, labels) }
+    }
+    try {
+      compile(parse(s"""
+        object Wrapper extends Helpers {
+          import scala.reflect.runtime.universe._
+          $block
+        }
+      """))
+      result(false, "given code doesn't fail to typecheck")
+    } catch {
+      case ToolBoxError(emsg, _) =>
+        if (!emsg.contains(msg))
+          result(false, s"error message '${emsg}' is not the same as expected '$msg'")
+        else
+          result(true)
+    }
+  }
+
+  val scalapkg = setSymbol(Ident(TermName("scala")), definitions.ScalaPackage)
+}
diff --git a/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala b/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala
new file mode 100644
index 0000000..a3b6137
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala
@@ -0,0 +1,75 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object RuntimeErrorProps extends QuasiquoteProperties("errors") {
+  def testFails[T](block: =>T) = test {
+    assertThrows[IllegalArgumentException] {
+      block
+    }
+  }
+
+  property("default param anon function") = testFails {
+    val param = q"val x: Int = 1"
+    q"{ $param => x + 1 }"
+  }
+
+  property("non-casedef case") = testFails {
+    val x = q"x"
+    q"foo match { case $x }"
+  }
+
+  property("non-new annotation") = testFails {
+    val annot = q"foo"
+    q"@$annot def foo"
+  }
+
+  property("non-valdef param") = testFails {
+    val param = q"foo"
+    q"def foo($param)"
+  }
+
+  property("non-valdef class param") = testFails {
+    val param = q"foo"
+    q"class Foo($param)"
+  }
+
+  property("non-typedef type param") = testFails {
+    val tparam = tq"T"
+    q"class C[$tparam]"
+  }
+
+  property("non-definition refine stat") = testFails {
+    val stat = q"foo"
+    tq"Foo { $stat }"
+  }
+
+  property("non-definition early def") = testFails {
+    val stat = q"foo"
+    q"class Foo extends { $stat } with Bar"
+  }
+
+  property("type apply for definition") = testFails {
+    val defn = q"def foo"
+    q"$defn[foo]"
+  }
+
+  property("non-val selftype") = testFails {
+    val foo = q"foo"
+    q"class Foo { $foo => }"
+  }
+
+  property("for empty enums") = testFails {
+    val enums = List.empty[Tree]
+    q"for(..$enums) 0"
+  }
+
+  property("for starts with non-from enum") = testFails {
+    val enums = fq"foo = bar" :: Nil
+    q"for(..$enums) 0"
+  }
+
+  property("for inlalid enum") = testFails {
+    val enums = q"foo" :: Nil
+    q"for(..$enums) 0"
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
new file mode 100644
index 0000000..45392de
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -0,0 +1,313 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TermConstructionProps extends QuasiquoteProperties("term construction") {
+  property("unquote single tree return tree itself") = forAll { (t: Tree) =>
+    q"$t" ≈ t
+  }
+
+  property("unquote trees into if expression") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+    q"if($t1) $t2 else $t3" ≈ If(t1, t2, t3)
+  }
+
+  property("unquote trees into ascriptiopn") = forAll { (t1: Tree, t2: Tree) =>
+    q"$t1 : $t2" ≈ Typed(t1, t2)
+  }
+
+  property("unquote trees into apply") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+    q"$t1($t2, $t3)" ≈ Apply(t1, List(t2, t3))
+  }
+
+  property("unquote trees with .. rank into apply") = forAll { (ts: List[Tree]) =>
+    q"f(..$ts)" ≈ Apply(q"f", ts)
+  }
+
+  property("unquote iterable into apply") = forAll { (trees: List[Tree]) =>
+    val itrees: Iterable[Tree] = trees
+    q"f(..$itrees)" ≈ Apply(q"f", trees)
+  }
+
+  property("unquote trees with ... rank into apply") = forAll { (ts1: List[Tree], ts2: List[Tree]) =>
+    val argss = List(ts1, ts2)
+    q"f(...$argss)" ≈ Apply(Apply(q"f", ts1), ts2)
+  }
+
+  property("unquote term name into assign") = forAll { (name: TermName, t: Tree) =>
+    q"$name = $t" ≈ Assign(Ident(name), t)
+  }
+
+  property("unquote trees into block") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+    blockInvariant(q"""{
+      $t1
+      $t2
+      $t3
+    }""", List(t1, t2, t3))
+  }
+
+
+  property("unquote tree into new") = forAll { (tree: Tree) =>
+    q"new $tree" ≈ Apply(Select(New(tree), termNames.CONSTRUCTOR), List())
+  }
+
+  property("unquote tree into return") = forAll { (tree: Tree) =>
+    q"return $tree" ≈ Return(tree)
+  }
+
+  property("unquote a list of arguments") = forAll { (fun: Tree, args: List[Tree]) =>
+    q"$fun(..$args)" ≈ Apply(fun, args)
+  }
+
+  property("unquote list and non-list fun arguments") = forAll { (fun: Tree, arg1: Tree, arg2: Tree, args: List[Tree]) =>
+    q"$fun(..$args, $arg1, $arg2)" ≈ Apply(fun, args ++ List(arg1) ++ List(arg2)) &&
+    q"$fun($arg1, ..$args, $arg2)" ≈ Apply(fun, List(arg1) ++ args ++ List(arg2)) &&
+    q"$fun($arg1, $arg2, ..$args)" ≈ Apply(fun, List(arg1) ++ List(arg2) ++ args)
+  }
+
+  property("unquote into new") = forAll { (name: TypeName, body: List[Tree]) =>
+    q"new $name { ..$body }" ≈
+      q"""{
+        final class $$anon extends $name {
+          ..$body
+        }
+        new $$anon
+      }"""
+  }
+
+  property("unquote type name into this") = forAll { (T: TypeName) =>
+    q"$T.this" ≈ This(T)
+  }
+
+  property("unquote tree into throw") = forAll { (t: Tree) =>
+    q"throw $t" ≈ Throw(t)
+  }
+
+  property("unquote trees into type apply") = forAll { (fun: TreeIsTerm, types: List[Tree]) =>
+    q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun)
+  }
+
+  property("unquote trees into while loop") = forAll { (cond: Tree, body: Tree) =>
+    val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body"
+    body1 ≈ body && cond1 ≈ cond
+  }
+
+  property("unquote trees into do while loop") = forAll { (cond: Tree, body: Tree) =>
+    val LabelDef(_, List(), Block(List(body1), If(cond1, Apply(_, List()), Literal(Constant(()))))) = q"do $body while($cond)"
+    body1 ≈ body && cond1 ≈ cond
+  }
+
+  def blockInvariant(quote: Tree, trees: List[Tree]) =
+    quote ≈ (trees match {
+      case Nil => q"{}"
+      case _ :+ last if !last.isTerm => Block(trees, q"()")
+      case head :: Nil => head
+      case init :+ last => Block(init, last)
+    })
+
+  property("unquote list of trees into block (1)") = forAll { (trees: List[Tree]) =>
+    blockInvariant(q"{ ..$trees }", trees)
+  }
+
+  property("unquote list of trees into block (2)") = forAll { (trees1: List[Tree], trees2: List[Tree]) =>
+    blockInvariant(q"{ ..$trees1 ; ..$trees2 }", trees1 ++ trees2)
+  }
+
+  property("unquote list of trees into block (3)") = forAll { (trees: List[Tree], tree: Tree) =>
+    blockInvariant(q"{ ..$trees; $tree }", trees :+ tree)
+  }
+
+  property("unquote term into brackets") = test {
+    val a = q"a"
+    assert(q"($a)" ≈ a)
+  }
+
+  property("unquote terms into tuple") = test {
+    val a1 = q"a1"
+    val a2 = q"a2"
+    val as = List(a1, a2)
+    assert(q"(..$as)" ≈ q"scala.Tuple2($a1, $a2)")
+    assert(q"(a0, ..$as)" ≈ q"scala.Tuple3(a0, $a1, $a2)")
+  }
+
+  property("unquote empty list into tuple") = test {
+    val empty = List[Tree]()
+    assert(q"(..$empty)" ≈ q"()")
+  }
+
+  property("unquote single element list into tuple") = test {
+    val xs = q"x" :: Nil
+    assert(q"(..$xs)" ≈ xs.head)
+  }
+
+  property("function param flags are the same") = test {
+    val xy = q"val x: A" :: q"val y: B" :: Nil
+    assertEqAst(q"(..$xy) => x + y", "(x: A, y: B) => x + y")
+  }
+
+  property("anonymous functions don't support default values") = test {
+    val x = q"val x: Int = 1"
+    assertThrows[IllegalArgumentException] { q"($x) => x" }
+  }
+
+  property("assign variable") = test {
+    val v = q"v"
+    val value = q"foo"
+    assertEqAst(q"$v = $value", "v = foo")
+  }
+
+  property("assign update 1") = test {
+    val v = q"v"
+    val args = q"1" :: q"2" :: Nil
+    val value = q"foo"
+    assertEqAst(q"$v(..$args) = $value", "v(1, 2) = foo")
+  }
+
+  property("assign update 2") = test {
+    val a = q"v(0)"
+    val value = q"foo"
+    assertEqAst(q"$a = $value", "v(0) = foo")
+  }
+
+  property("assign or named arg") = test {
+    val assignx = q"x = 1"
+    assertEqAst(q"f($assignx)", "f(x = 1)")
+  }
+
+  property("fresh names are regenerated at each evaluation") = test {
+    def plusOne = q"{ _ + 1 }"
+    assert(!plusOne.equalsStructure(plusOne))
+    def whileTrue = q"while(true) false"
+    assert(!whileTrue.equalsStructure(whileTrue))
+    def withEvidence = q"def foo[T: X]"
+    assert(!withEvidence.equalsStructure(withEvidence))
+  }
+
+  property("make sure inference doesn't infer any") = test {
+    val l1 = List(q"foo")
+    val l2 = List(q"bar")
+    val baz = q"baz"
+    assert(q"f(..${l1 ++ l2})" ≈ q"f(foo, bar)")
+    assert(q"f(..${l1 ++ l2}, $baz)" ≈ q"f(foo, bar, baz)")
+    assert(q"f(${if (true) q"a" else q"b"})" ≈ q"f(a)")
+  }
+
+  property("unquote iterable of non-parametric type") = test {
+    object O extends Iterable[Tree] { def iterator = List(q"foo").iterator }
+    q"f(..$O)"
+  }
+
+  property("SI-8016") = test {
+    val xs = q"1" :: q"2" :: Nil
+    assertEqAst(q"..$xs", "{1; 2}")
+    assertEqAst(q"{..$xs}", "{1; 2}")
+  }
+
+  property("SI-6842") = test {
+    val cases: List[Tree] = cq"a => b" :: cq"_ => c" :: Nil
+    assertEqAst(q"1 match { case ..$cases }", "1 match { case a => b case _ => c }")
+    assertEqAst(q"try 1 catch { case ..$cases }", "try 1 catch { case a => b case _ => c }")
+  }
+
+  property("SI-8009") = test {
+    q"`foo`".asInstanceOf[reflect.internal.SymbolTable#Ident].isBackquoted
+  }
+
+  property("SI-8148") = test {
+    val q"($a, $b) => $_" = q"_ + _"
+    assert(a.name != b.name)
+  }
+
+  property("SI-7275 a") = test {
+    val t = q"stat1; stat2"
+    assertEqAst(q"..$t", "{stat1; stat2}")
+  }
+
+  property("SI-7275 b") = test {
+    def f(t: Tree) = q"..$t"
+    assertEqAst(f(q"stat1; stat2"), "{stat1; stat2}")
+  }
+
+  property("SI-7275 c1") = test {
+    object O
+    implicit val liftO = Liftable[O.type] { _ => q"foo; bar" }
+    assertEqAst(q"f(..$O)", "f(foo, bar)")
+  }
+
+  property("SI-7275 c2") = test {
+    object O
+    implicit val liftO = Liftable[O.type] { _ => q"{ foo; bar }; { baz; bax }" }
+    assertEqAst(q"f(...$O)", "f(foo, bar)(baz, bax)")
+  }
+
+  property("SI-7275 d") = test {
+    val l = q"a; b" :: q"c; d" :: Nil
+    assertEqAst(q"f(...$l)", "f(a, b)(c, d)")
+    val l2: Iterable[Tree] = l
+    assertEqAst(q"f(...$l2)", "f(a, b)(c, d)")
+  }
+
+  property("SI-7275 e") = test {
+    val t = q"{ a; b }; { c; d }"
+    assertEqAst(q"f(...$t)", "f(a, b)(c, d)")
+  }
+
+  property("SI-7275 e2") = test {
+    val t = q"{ a; b }; c; d"
+    assertEqAst(q"f(...$t)", "f(a, b)(c)(d)")
+  }
+
+  property("remove synthetic unit") = test {
+    val q"{ ..$stats1 }" = q"{ def x = 2 }"
+    assert(stats1 ≈ List(q"def x = 2"))
+    val q"{ ..$stats2 }" = q"{ class X }"
+    assert(stats2 ≈ List(q"class X"))
+    val q"{ ..$stats3 }" = q"{ type X = Int }"
+    assert(stats3 ≈ List(q"type X = Int"))
+    val q"{ ..$stats4 }" = q"{ val x = 2 }"
+    assert(stats4 ≈ List(q"val x = 2"))
+  }
+
+  property("don't remove user-defined unit") = test {
+    val q"{ ..$stats }" = q"{ def x = 2; () }"
+    assert(stats ≈ List(q"def x = 2", q"()"))
+  }
+
+  property("empty-tree is not a block") = test {
+    assertThrows[MatchError] {
+      val q"{ ..$stats1 }" = q" "
+    }
+  }
+
+  property("empty block is synthetic unit") = test {
+    val q"()" = q"{}"
+    val q"{..$stats}" = q"{}"
+    assert(stats.isEmpty)
+    assertEqAst(q"{..$stats}", "{}")
+    assertEqAst(q"{..$stats}", "()")
+  }
+
+  property("consistent variable order") = test {
+    val q"$a = $b = $c = $d = $e = $f = $g = $h = $k = $l" = q"a = b = c = d = e = f = g = h = k = l"
+    assert(a ≈ q"a" && b ≈ q"b" && c ≈ q"c" && d ≈ q"d" && e ≈ q"e" && g ≈ q"g" && h ≈ q"h" && k ≈ q"k" && l ≈ q"l")
+  }
+
+  property("SI-8385 a") = test {
+    assertEqAst(q"(foo.x = 1)(2)", "(foo.x = 1)(2)")
+  }
+
+  property("SI-8385 b") = test {
+    assertEqAst(q"(() => ())()", "(() => ())()")
+  }
+
+  property("match scrutinee may not be empty") = test {
+    assertThrows[IllegalArgumentException] {
+      val scrutinee = q""
+      val cases = List(cq"_ =>")
+      q"$scrutinee match { case ..$cases }"
+    }
+  }
+
+  property("construct partial function") = test {
+    val cases = List(cq"a => b", cq"c => d")
+    assertEqAst(q"{ case ..$cases }", "{ case a => b case c => d }")
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
new file mode 100644
index 0000000..49ffaff
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -0,0 +1,249 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") {
+  property("f(..x) = f") = test {
+    // see SI-8008
+    assertThrows[MatchError] {
+      val q"f(..$args)" = q"f"
+    }
+  }
+
+  property("f(x)") = forAll { (x: Tree) =>
+    val q"f($x1)" = q"f($x)"
+    x1 ≈ x
+  }
+
+  property("f(..xs)") = forAll { (x1: Tree, x2: Tree) =>
+    val q"f(..$xs)" = q"f($x1, $x2)"
+    xs ≈ List(x1, x2)
+  }
+
+  property("f(y, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+    val q"f($y, ..$ys)" = q"f($x1, $x2, $x3)"
+    y ≈ x1 && ys ≈ List(x2, x3)
+  }
+
+  property("f(y1, y2, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+    val q"f($y1, $y2, ..$ys)" = q"f($x1, $x2, $x3)"
+    y1 ≈ x1 && y2 ≈ x2 && ys ≈ List(x3)
+  }
+
+  property("f(y1, ..ys, yn)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+    val q"f($y1, ..$ys, $yn)" = q"f($x1, $x2, $x3, $x4)"
+    y1 ≈ x1 && ys ≈ List(x2, x3) && yn ≈ x4
+  }
+
+  property("f(..ys, y_{n-1}, y_n)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+    val q"f(..$ys, $yn1, $yn)" = q"f($x1, $x2, $x3, $x4)"
+    ys ≈ List(x1, x2) && yn1 ≈ x3 && yn ≈ x4
+  }
+
+  property("f(...xss)") = forAll { (x1: Tree, x2: Tree) =>
+    val q"f(...$xss)" = q"f($x1)($x2)"
+    xss ≈ List(List(x1), List(x2))
+  }
+
+  property("f(...$xss)(..$last)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+    val q"f(...$xss)(..$last)" = q"f($x1)($x2)($x3)"
+    xss ≈ List(List(x1), List(x2)) && last ≈ List(x3)
+  }
+
+  property("f(...$xss)(..$lastinit, $lastlast)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+    val q"f(...$xss)(..$lastinit, $lastlast)" = q"f($x1)($x2, $x3, $x4)"
+    xss ≈ List(List(x1)) && lastinit ≈ List(x2, x3) && lastlast ≈ x4
+  }
+
+  property("f(...xss) = f") = forAll { (x1: Tree, x2: Tree) =>
+    val q"f(...$xss)" = q"f"
+    xss ≈ List()
+  }
+
+  property("deconstruct unit as tuple") = test {
+    val q"(..$xs)" = q"()"
+    assert(xs.isEmpty)
+  }
+
+  property("deconstruct tuple") = test {
+    val q"(..$xs)" = q"(a, b)"
+    assert(xs ≈ List(q"a", q"b"))
+  }
+
+  property("deconstruct tuple mixed") = test {
+    val q"($first, ..$rest)" = q"(a, b, c)"
+    assert(first ≈ q"a")
+    assert(rest ≈ List(q"b", q"c"))
+  }
+
+  property("deconstruct tuple last element") = test {
+    val q"($first, ..$rest, $last)" = q"(a, b, c, d)"
+    assert(first ≈ q"a")
+    assert(rest ≈ List(q"b", q"c"))
+    assert(last ≈ q"d")
+  }
+
+  property("deconstruct expr as tuple") = test {
+    val q"(..$elems)" = q"foo"
+    assert(elems ≈ List(q"foo"))
+  }
+
+  property("deconstruct cases") = test {
+    val q"$x match { case ..$cases }" = q"x match { case 1 => case 2 => }"
+    assert(x ≈ q"x")
+    assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+  }
+
+  property("deconstruct splitting last case") = test {
+    val q"$_ match { case ..$cases case $last }" = q"x match { case 1 => case 2 => case 3 => }"
+    assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+    assert(last ≈ cq"3 =>")
+  }
+
+  property("deconstruct block") = test {
+    val q"{ ..$xs }" = q"{ x1; x2; x3 }"
+    assert(xs ≈ List(q"x1", q"x2", q"x3"))
+  }
+
+  property("deconstruct last element of a block") = test {
+    val q"{ ..$xs; $x }" = q"x1; x2; x3; x4"
+    assert(xs ≈ List(q"x1", q"x2", q"x3"))
+    assert(x ≈ q"x4")
+  }
+
+  property("exhaustive function matcher") = test {
+    def matches(line: String) { val q"(..$args) => $body" = parse(line) }
+    matches("() => bippy")
+    matches("(y: Y) => y oh y")
+    matches("(x: X, y: Y) => x and y")
+  }
+
+  property("exhaustive new pattern") = test {
+    def matches(line: String) {
+      val q"new { ..$early } with $name[..$targs](...$vargss) with ..$mixin { $self => ..$body }" = parse(line)
+    }
+    matches("new foo")
+    matches("new foo { body }")
+    matches("new foo[t]")
+    matches("new foo(x)")
+    matches("new foo[t](x)")
+    matches("new foo[t](x) { body }")
+    matches("new foo with bar")
+    matches("new foo with bar { body }")
+    matches("new { anonymous }")
+    matches("new { val early = 1 } with Parent[Int] { body }")
+    matches("new Foo { selfie => }")
+  }
+
+  property("exhaustive assign pattern") = test {
+    def matches(tree: Tree) { val q"$rhs = $lhs" = tree }
+    matches(parse("left = right"))
+    matches(parse("arr(1) = 2"))
+    matches(AssignOrNamedArg(EmptyTree, EmptyTree))
+  }
+
+  property("deconstruct update 1") = test {
+    val q"$obj(..$args) = $value" = q"foo(bar) = baz"
+    assert(obj ≈ q"foo")
+    assert(args ≈ List(q"bar"))
+    assert(value ≈ q"baz")
+  }
+
+  property("deconstruct update 2") = test {
+    val q"$left = $value" = q"foo(bar) = baz"
+    assert(left ≈ q"foo(bar)")
+    assert(value ≈ q"baz")
+  }
+
+  property("deconstruct while loop") = test {
+    val q"while($cond) $body" = parse("while(cond) body")
+    assert(cond ≈ q"cond")
+    assert(body ≈ q"body")
+  }
+
+  property("deconstruct do while loop") = test {
+    val q"do $body while($cond)" = parse("do body while(cond)")
+    assert(cond ≈ q"cond")
+    assert(body ≈ q"body")
+  }
+
+  property("deconstruct anonymous function with placeholders") = test {
+    val q"{ $f(_) }" = q"{ foo(_) }"
+    assert(f ≈ q"foo")
+    val q"{ _.$member }" = q"{ _.foo }"
+    assert(member ≈ TermName("foo"))
+    val q"{ _ + $x }" = q"{ _ + x }"
+    assert(x ≈ q"x")
+    val q"{ _ * _ }" = q"{ _ * _ }"
+  }
+
+  property("si-8275 a") = test {
+    val cq"_ => ..$stats" = cq"_ => foo; bar"
+    assert(stats ≈ List(q"foo", q"bar"))
+  }
+
+  property("si-8275 b") = test {
+    val cq"_ => ..$init; $last" = cq"_ => a; b; c"
+    assert(init ≈ List(q"a", q"b"))
+    assert(last ≈ q"c")
+  }
+
+  property("si-8275 c") = test {
+    val cq"_ => ..$stats" = cq"_ =>"
+    assert(stats.isEmpty)
+    assertEqAst(q"{ case _ => ..$stats }", "{ case _ => }")
+  }
+
+  property("can't flatten type into block") = test {
+    assertThrows[IllegalArgumentException] {
+      val tpt = tq"List[Int]"
+      q"..$tpt; ()"
+    }
+  }
+
+  property("term select doesn't match type select") = test {
+    assertThrows[MatchError] {
+      val q"$qual.$name" = tq"foo.bar"
+    }
+  }
+
+  property("type application doesn't match applied type") = test {
+    assertThrows[MatchError] {
+      val q"$f[..$targs]" = tq"foo[bar]"
+    }
+  }
+
+  property("match doesn't match partial function") = test {
+    assertThrows[MatchError] {
+      val q"$_ match { case ..$_ }" = q"{ case _ => }"
+    }
+  }
+
+  property("deconstruct partial function") = test {
+    val q"{ case ..$cases }" = q"{ case a => b case c => d }"
+    val List(cq"a => b", cq"c => d") = cases
+  }
+
+  property("SI-8350 `new C` and `new C()` are equivalent") = test {
+    val q"new C" = q"new C()"
+    val q"new C()" = q"new C"
+  }
+
+  property("SI-8350 new applications extracted only for non-empty ctor calls") = test{
+    val q"new $c1" = q"new C()"
+    assert(c1 ≈ tq"C")
+    val q"new $c2" = q"new C(x)"
+    assert(c2 ≈ q"${tq"C"}(x)")
+  }
+
+  property("SI-8350 original test case") = test {
+    val q"new ..$parents" = q"new Foo with Bar"
+    assert(parents ≈ List(tq"Foo", tq"Bar"))
+  }
+
+  property("SI-8387 new is not an application") = test {
+    val `new` = q"new F(x)"
+    val q"$f(...$argss)" = `new`
+    assert(f ≈ `new`)
+    assert(argss.isEmpty)
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/Test.scala b/test/files/scalacheck/quasiquotes/Test.scala
new file mode 100644
index 0000000..7a26fa4
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/Test.scala
@@ -0,0 +1,19 @@
+import org.scalacheck._
+
+object Test extends Properties("quasiquotes") {
+  include(TermConstructionProps)
+  include(TermDeconstructionProps)
+  include(TypeConstructionProps)
+  include(TypeDeconstructionProps)
+  include(PatternConstructionProps)
+  include(PatternDeconstructionProps)
+  include(LiftableProps)
+  include(UnliftableProps)
+  include(ErrorProps)
+  include(RuntimeErrorProps)
+  include(DefinitionConstructionProps)
+  include(DefinitionDeconstructionProps)
+  include(DeprecationProps)
+  include(ForProps)
+  include(TypecheckedProps)
+}
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
new file mode 100644
index 0000000..27ad4c5
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
@@ -0,0 +1,42 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
+
+object TypeConstructionProps extends QuasiquoteProperties("type construction")  {
+  property("bare idents contain type names") = test {
+    tq"x" ≈ Ident(TypeName("x"))
+  }
+
+  property("unquote type names into AppliedTypeTree") = forAll { (name1: TypeName, name2: TypeName) =>
+    tq"$name1[$name2]" ≈ AppliedTypeTree(Ident(name1), List(Ident(name2)))
+  }
+
+  property("tuple type") = test {
+    val empty = List[Tree]()
+    val ts = List(tq"t1", tq"t2")
+    assert(tq"(..$empty)" ≈ ScalaDot(TypeName("Unit")))
+    assert(tq"(..$ts)" ≈ tq"scala.Tuple2[t1, t2]")
+    assert(tq"(t0, ..$ts)" ≈ tq"scala.Tuple3[t0, t1, t2]")
+  }
+
+  property("single-element tuple type") = test {
+    val ts = q"T" :: Nil
+    assert(tq"(..$ts)" ≈ ts.head)
+  }
+
+  property("refined type") = test {
+    val stats = q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil
+    assert(tq"T { ..$stats }" ≈ tq"T { def foo; val x: Int; type Y = String }")
+  }
+
+  property("function type") = test {
+    val argtpes = tq"A" :: tq"B" :: Nil
+    val restpe = tq"C"
+    assert(tq"..$argtpes => $restpe" ≈ tq"(A, B) => C")
+  }
+
+  property("empty tq") = test {
+    val tt: TypeTree = tq""
+    assert(tt.tpe == null)
+    assert(tt.original == null)
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
new file mode 100644
index 0000000..7572b27
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
@@ -0,0 +1,78 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction") {
+  property("ident(type name)") = forAll { (name: TypeName) =>
+    val t = Ident(name)
+    val tq"$t1" = t
+    t1 ≈ t
+  }
+
+  property("applied type tree") = forAll { (name1: TypeName, name2: TypeName) =>
+    val tq"$a[$b]" = AppliedTypeTree(Ident(name1), List(Ident(name2)))
+    a ≈ Ident(name1) && b ≈ Ident(name2)
+  }
+
+  property("tuple type (1)") = test {
+    val tq"(..$empty)" = tq"_root_.scala.Unit"
+    assert(empty.isEmpty)
+  }
+
+  property("tuple type (2)") = test {
+    val tq"(..$ts)" = tq"(t1, t2)"
+    assert(ts ≈ List(tq"t1", tq"t2"))
+  }
+
+  property("tuple type (3)") = test {
+    val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
+    assert(head ≈ tq"t0")
+    assert(tail ≈ List(tq"t1", tq"t2"))
+  }
+
+  property("tuple type (4)") = test {
+    val tq"(..$init, $last)" = tq"(t0, t1, t2)"
+    assert(init ≈ List(tq"t0", tq"t1"))
+    assert(last ≈ tq"t2")
+  }
+
+  property("tuple type (5)") = test {
+    val tq"(..$ts)" = tq"T"
+    assert(ts ≈ List(tq"T"))
+  }
+
+  property("refined type") = test {
+    val tq"T { ..$stats }" = tq"T { def foo; val x: Int; type Y = String }"
+    assert(stats ≈ List(q"def foo", q"val x: Int", q"type Y = String"))
+  }
+
+  property("function type (1)") = test {
+    val tq"..$argtpes => $restpe" = tq"(A, B) => C"
+    assert(argtpes ≈ List(tq"A", tq"B"))
+    assert(restpe ≈ tq"C")
+  }
+
+  property("function type (2)") = test {
+    val tq"(..$argtpes, $arglast) => $restpe" = tq"(A, B, C) => D"
+    assert(argtpes ≈ List(tq"A", tq"B"))
+    assert(arglast ≈ tq"C")
+    assert(restpe ≈ tq"D")
+  }
+
+  property("match empty type tree") = test {
+    val tq"" = TypeTree()
+    // matches because type tree isn't syntactic without original
+    val tq"" = tq"${typeOf[Int]}"
+  }
+
+  property("type select doesn't match term select") = test {
+    assertThrows[MatchError] {
+      val tq"$qual.$name" = q"foo.bar"
+    }
+  }
+
+  property("applied type doesn't match type appliction") = test {
+    assertThrows[MatchError] {
+      val tq"$tpt[..$tpts]" = q"foo[bar]"
+    }
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
new file mode 100644
index 0000000..a5d5261
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
@@ -0,0 +1,216 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
+
+object TypecheckedProps extends QuasiquoteProperties("typechecked") {
+
+
+  property("tuple term") = test {
+    val q"(..$elements)" = typecheck(q"(1, 2)")
+    assert(elements ≈ List(q"1", q"2"))
+  }
+
+  property("for/for-yield") = test {
+    val enums = fq"x <- xs" :: fq"x1 = x + 1" :: fq"if x1 % 2 == 0" :: Nil
+    val body = q"x1"
+    val xs = q"val xs = List(1, 2, 3)"
+    val q"$_; for(..$enums0) yield $body0" = typecheck(q"$xs; for(..$enums) yield $body")
+    assert(enums0 ≈ enums)
+    assert(body0 ≈ body)
+    val q"$_; for(..$enums1) $body1" = typecheck(q"$xs; for(..$enums) $body")
+    assert(enums1 ≈ enums)
+    assert(body1 ≈ body)
+  }
+
+  property("for .filter instead of .withFilter") = test {
+    val enums = fq"foo <- new Foo" :: fq"if foo != null" :: Nil
+    val body = q"foo"
+    val q"$_; for(..$enums1) yield $body1" = typecheck(q"""
+      class Foo { def map(f: Any => Any) = this; def withFilter(cond: Any => Boolean) = this }
+      for(..$enums) yield $body
+    """)
+    assert(enums1 ≈ enums)
+    assert(body1 ≈ body)
+  }
+
+  property("extract UnApply (1)") = test {
+    val q"object $_ { $_; $_; $m }" = typecheck(q"""
+      object Test {
+        class Cell(val x: Int)
+        object Cell { def unapply(c: Cell) = Some(c.x) }
+        new Cell(0) match { case Cell(v) => v }
+      }
+    """)
+    val q"$_ match { case $f(..$args) => $_ }" = m
+    assert(f ≈ pq"Test.this.Cell")
+    assert(args ≈ List(pq"v"))
+  }
+
+  property("extract UnApply (2)") = test {
+    val q"object $_ { $_; $m }" = typecheck(q"""
+      object Test {
+        case class Cell(val x: Int)
+        new Cell(0) match { case Cell(v) => v }
+      }
+    """)
+    val q"$_ match { case ${f: TypeTree}(..$args) => $_ }" = m
+    assert(f.original ≈ pq"Test.this.Cell")
+    assert(args ≈ List(pq"v"))
+  }
+
+  property("extract inferred val type") = test {
+    val typechecked = typecheck(q"val x = 42")
+    val q"val x = 42" = typechecked
+    val q"val x: ${tq""} = 42" = typechecked
+    val q"val x: ${t: Type} = 42" = typechecked
+  }
+
+  property("class with param (1)") = test {
+    val paramName = TermName("x")
+    val q"class $_($param)" = typecheck(q"class Test(val $paramName: Int)")
+
+    assert(param.name == paramName)
+  }
+
+  property("class with param (2)") = test {
+    val paramName = TermName("y")
+    val q"{class $_($param)}" = typecheck(q"class Test(val $paramName: Int = 3)")
+
+    assert(param.name == paramName)
+    assert(param.rhs ≈ q"3")
+  }
+
+  property("class with params") = test {
+    val pName1 = TermName("x1")
+    val pName2 = TermName("x2")
+    val q"{class $_($param1)(..$params2)}" = typecheck(q"class Test(val x0: Float)(val $pName1: Int = 3, $pName2: String)")
+
+    val List(p1, p2, _*) = params2
+
+    assert(p1.name == pName1)
+    assert(p2.name == pName2)
+    assert(params2.size == 2)
+  }
+
+  property("implicit class") = test {
+    val clName = TypeName("Test")
+    val paramName = TermName("x")
+    val q"{implicit class $name($param)}" = typecheck(q"implicit class $clName(val $paramName: String)")
+
+    assert(name == clName)
+    assert(param.name == paramName)
+  }
+
+  property("block with lazy") = test {
+    val lazyName = TermName("x")
+    val lazyRhsVal = 42
+    val lazyRhs = Literal(Constant(lazyRhsVal))
+    val q"{lazy val $pname = $rhs}" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}")
+
+    assert(pname == lazyName)
+    assert(rhs ≈ lazyRhs)
+  }
+
+  property("class with lazy") = test {
+    val clName = TypeName("Test")
+    val paramName = TermName("x")
+    val q"class $name{lazy val $pname = $_}" = typecheck(q"class $clName {lazy val $paramName = 42}")
+
+    assert(name == clName)
+    assert(pname == paramName)
+  }
+
+  property("case class with object") = test {
+    val defName = TermName("z")
+    val defRhsVal = 42
+    val defRhs = Literal(Constant(defRhsVal))
+    val q"object $_{ $_; object $_ extends ..$_ {def $name = $rhs} }" =
+      typecheck(q"""
+        object Test{
+          case class C(x: Int) { def y = x };
+          object C { def $defName = $defRhsVal }
+        }""")
+
+    assert(name == defName)
+    assert(rhs ≈ defRhs)
+  }
+
+  property("partial function") = test {
+    val q"{ case ..$cases }: $ascr" = typecheck(q"{ case 1 => () }: PartialFunction[Int, Unit]")
+    assert(cases ≈ q"{ case 1 => () }".cases)
+  }
+}
+
+trait TypecheckedTypes { self: QuasiquoteProperties =>
+  property("type ident") = test {
+    val q"$_; type $_ = $tpt" = typecheck(q"class C; type T = C")
+    val tq"C" = tpt
+  }
+
+  property("type select") = test {
+    val tq"scala.Int" = typecheckTyp(tq"Int")
+  }
+
+  property("this type select") = test {
+    val q"class $_ { $_; type $_ = $tpt }" = typecheck(q"class C { type A = Int; type B = this.A }")
+    val tq"this.$name" = tpt
+    val TypeName("A") = name
+  }
+
+  property("super type select") = test {
+    val q"$_; class $_ extends $_ { type $_ = $tpt }" =
+      typecheck(q"class C1 { type A = Int }; class C2 extends C1 { type B = super[C1].A }")
+    val tq"$empty.super[$c1].$a" = tpt
+    val TypeName("") = empty
+    val TypeName("C1") = c1
+    val TypeName("A") = a
+  }
+
+  property("applied type") = test {
+    val tt = typecheckTyp(q"Map[Int, Int]")
+    val tq"$tpt[..$tpts]" = tt
+    val tq"scala.this.Predef.Map" = tpt
+    val List(tq"scala.Int", tq"scala.Int") = tpts
+  }
+
+  property("tuple type") = test {
+    val tq"(..$els0)" = typecheckTyp(tq"Unit")
+    assert(els0.isEmpty)
+    val tq"(..$els1)" = typecheckTyp(tq"(Int, Int)")
+    val List(tq"scala.Int", tq"scala.Int") = els1
+  }
+
+  property("function type") = test {
+    val tq"(..$argtpes) => $restpe" = typecheckTyp(tq"(Int, Int) => Int")
+    val List(tq"scala.Int", tq"scala.Int") = argtpes
+    val tq"scala.Int" = restpe
+  }
+
+  property("compound type") = test {
+    val tq"..$parents { ..$defns }" = typecheckTyp(tq"Int { def x: Int }")
+    val List(tq"Int") = parents
+    val List(q"def x: Int") = defns
+  }
+
+  property("singleton type") = test {
+    val tq"$ref.type" = typecheckTyp(tq"scala.Predef.type")
+    val q"scala.Predef" = ref
+  }
+
+  property("type projection") = test {
+    val tq"$tpt#$name" = typecheckTyp(tq"({ type T = Int })#T")
+    val TypeName("T") = name
+    val tq"{ type T = Int }" = tpt
+  }
+
+  property("annotated type") = test {
+    val tq"$tpt @$annot" = typecheckTyp(tq"Int @unchecked")
+    val tq"scala.Int" = tpt
+    val q"new unchecked" = annot
+  }
+
+  property("existential type") = test {
+    val tq"$tpt forSome { ..$defns }" = typecheckTyp(tq"T forSome { type T }")
+    val tq"T" = tpt
+    val q"type T" :: Nil = defns
+  }
+}
diff --git a/test/files/scalacheck/quasiquotes/UnliftableProps.scala b/test/files/scalacheck/quasiquotes/UnliftableProps.scala
new file mode 100644
index 0000000..659b18e
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/UnliftableProps.scala
@@ -0,0 +1,166 @@
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object UnliftableProps extends QuasiquoteProperties("unliftable") {
+  property("unlift name") = test {
+    val termname0 = TermName("foo")
+    val typename0 = TypeName("foo")
+    val q"${termname1: TermName}" = Ident(termname0)
+    assert(termname1 == termname0)
+    val q"${typename1: TypeName}" = Ident(typename0)
+    assert(typename1 == typename0)
+    val q"${name1: Name}" = Ident(termname0)
+    assert(name1 == termname0)
+    val q"${name2: Name}" = Ident(typename0)
+    assert(name2 == typename0)
+  }
+
+  property("unlift type") = test {
+    val q"${tpe: Type}" = TypeTree(typeOf[Int])
+    assert(tpe =:= typeOf[Int])
+  }
+
+  property("unlift constant") = test {
+    val q"${const: Constant}" = Literal(Constant("foo"))
+    assert(const == Constant("foo"))
+  }
+
+  property("unlift char") = test {
+    val q"${c: Char}" = Literal(Constant('0'))
+    assert(c.isInstanceOf[Char] && c == '0')
+  }
+
+  property("unlift byte") = test {
+    val q"${b: Byte}" = Literal(Constant(0: Byte))
+    assert(b.isInstanceOf[Byte] && b == 0)
+  }
+
+  property("unlift short") = test {
+    val q"${s: Short}" = Literal(Constant(0: Short))
+    assert(s.isInstanceOf[Short] && s == 0)
+  }
+
+  property("unlift int") = test {
+    val q"${i: Int}" = Literal(Constant(0: Int))
+    assert(i.isInstanceOf[Int] && i == 0)
+  }
+
+  property("unlift long") = test {
+    val q"${l: Long}" = Literal(Constant(0L: Long))
+    assert(l.isInstanceOf[Long] && l == 0L)
+  }
+
+  property("unlift float") = test {
+    val q"${f: Float}" = Literal(Constant(0.0f: Float))
+    assert(f.isInstanceOf[Float] && f == 0.0f)
+  }
+
+  property("unlift double") = test {
+    val q"${d: Double}" = Literal(Constant(0.0: Double))
+    assert(d.isInstanceOf[Double] && d == 0.0)
+  }
+
+  property("unlift bool") = test {
+    val q"${b: Boolean}" = q"true"
+    assert(b.isInstanceOf[Boolean] && b == true)
+  }
+
+  property("unlift string") = test {
+    val q"${s: String}" = q""" "foo" """
+    assert(s.isInstanceOf[String] && s == "foo")
+  }
+
+  property("unlift scala.symbol") = test {
+    val q"${s: scala.Symbol}" = q"'foo"
+    assert(s.isInstanceOf[scala.Symbol] && s == 'foo)
+  }
+
+  implicit def unliftList[T: Unliftable]: Unliftable[List[T]] = Unliftable {
+    case q"scala.collection.immutable.List(..$args)" if args.forall { implicitly[Unliftable[T]].unapply(_).nonEmpty } =>
+      val ut = implicitly[Unliftable[T]]
+      args.flatMap { ut.unapply(_) }
+  }
+
+  property("unlift list (1)") = test {
+    val orig = List(1, 2)
+    val q"${l1: List[Int]}" = q"$orig" // q"List(1, 2)"
+    assert(l1 == orig)
+    val q"f(..${l2: List[Int]})" = q"f(..$orig)" // q"f(1, 2)
+    assert(l2 == orig)
+  }
+
+  property("unlift list (2)") = test {
+    val orig2 = List(List(1, 2), List(3))
+    val q"f(${l3: List[List[Int]]})" = q"f($orig2)" // q"f(List(List(1, 2), List(3)))
+    assert(l3 == orig2)
+    val q"f(..${l4: List[List[Int]]})" = q"f(..$orig2)" // q"f(List(1, 2), List(3))"
+    assert(l4 == orig2)
+    val q"f(...${l5: List[List[Int]]})" = q"f(...$orig2)" // q"f(1, 2)(3)
+    assert(l5 == orig2)
+  }
+
+  property("don't unlift non-tree unquotee (1)") = test {
+    val q"${a: TermName}.${b: TermName}" = q"a.b"
+    assert(a == TermName("a"))
+    assert(b == TermName("b"))
+  }
+
+  property("don't unlift non-tree unquotee (2)") = test {
+    val q"${mods: Modifiers} def foo" = q"def foo"
+    assert(mods == Modifiers(DEFERRED))
+  }
+
+  property("unlift tuple") = test {
+    val q"${t2: (Int, Int)}" = q"(1, 2)"
+    val q"${t3: (Int, Int, Int)}" = q"(1, 2, 3)"
+    val q"${t4: (Int, Int, Int, Int)}" = q"(1, 2, 3, 4)"
+    val q"${t5: (Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5)"
+    val q"${t6: (Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6)"
+    val q"${t7: (Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7)"
+    val q"${t8: (Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8)"
+    val q"${t9: (Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9)"
+    val q"${t10: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)"
+    val q"${t11: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)"
+    val q"${t12: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)"
+    val q"${t13: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)"
+    val q"${t14: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)"
+    val q"${t15: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)"
+    val q"${t16: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)"
+    val q"${t17: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)"
+    val q"${t18: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)"
+    val q"${t19: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)"
+    val q"${t20: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)"
+    val q"${t21: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)"
+    val q"${t22: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)"
+    // assert(t1 == Tuple1(1))
+    assert(t2 == (1, 2))
+    assert(t3 == (1, 2, 3))
+    assert(t4 == (1, 2, 3, 4))
+    assert(t5 == (1, 2, 3, 4, 5))
+    assert(t6 == (1, 2, 3, 4, 5, 6))
+    assert(t7 == (1, 2, 3, 4, 5, 6, 7))
+    assert(t8 == (1, 2, 3, 4, 5, 6, 7, 8))
+    assert(t9 == (1, 2, 3, 4, 5, 6, 7, 8, 9))
+    assert(t10 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
+    assert(t11 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+    assert(t12 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+    assert(t13 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))
+    assert(t14 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))
+    assert(t15 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))
+    assert(t16 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))
+    assert(t17 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))
+    assert(t18 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18))
+    assert(t19 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19))
+    assert(t20 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20))
+    assert(t21 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21))
+    assert(t22 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))
+  }
+
+  property("unlift xml comment") = test {
+    implicit val unliftXmlComment = Unliftable[xml.Comment] {
+      case q"new _root_.scala.xml.Comment(${value: String})" => xml.Comment(value)
+    }
+    val q"${comment: xml.Comment}" = q"<!--foo-->"
+    assert(comment.commentText == "foo")
+  }
+}
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 7297911..493083a 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -30,7 +30,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
   def myGen: Gen[Range]
 
   def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange)
-    
+
   def genArbitraryRange = for {
     start <- choose(Int.MinValue, Int.MaxValue)
     end <- choose(Int.MinValue, Int.MaxValue)
@@ -56,7 +56,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
   } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
 
   def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne)
-    
+
   def genRangeOpenByOne = for {
     r <- oneOf(genSmallRange, genBoundaryRange)
     if (r.end.toLong - r.start.toLong).abs <= 10000000L
@@ -127,6 +127,47 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
     (visited == expectedSize(r)) :| str(r)
   }
 
+  property("sum") = forAll(myGen) { r =>
+//    println("----------")
+//    println("sum "+str(r))
+    val rSum = r.sum
+    val expected = r.length match {
+      case 0 => 0
+      case 1 => r.head
+      case _ => ((r.head + r.last).toLong * r.length  / 2).toInt
+    }
+//   println("size: " + r.length)
+//   println("expected: " + expected)
+//   println("obtained: " + rSum)
+
+   (rSum == expected) :| str(r)
+  }
+
+/* checks that sum respects custom Numeric */
+  property("sumCustomNumeric") = forAll(myGen) { r =>
+    val mod = 65536
+    object mynum extends Numeric[Int] {
+        def plus(x: Int, y: Int): Int = (x + y) % mod
+        override def zero = 0
+
+        def fromInt(x: Int): Int = ???
+        def minus(x: Int, y: Int): Int = ???
+        def negate(x: Int): Int = ???
+        def times(x: Int, y: Int): Int = ???
+        def toDouble(x: Int): Double = ???
+        def toFloat(x: Int): Float = ???
+        def toInt(x: Int): Int = ((x % mod) + mod * 2) % mod
+        def toLong(x: Int): Long = ???
+        def compare(x: Int, y: Int): Int = ???
+      }
+
+    val rSum = r.sum(mynum)
+    val expected = mynum.toInt(r.sum)
+
+    (rSum == expected) :| str(r)
+  }
+
+
   property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
 //    println("length "+str(r))
     (r.length == expectedSize(r)) :| str(r)
@@ -224,7 +265,8 @@ object TooLargeRange extends Properties("Too Large Range") {
   property("Too large range throws exception") = forAll(genTooLargeStart) { start =>
     try   {
       val r = Range.inclusive(start, Int.MaxValue, 1)
-      println("how here? r = " + r.toString)
+      val l = r.length
+      println("how here? length = " + l + ", r = " + r.toString)
       false
     }
     catch { case _: IllegalArgumentException => true }
diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala
deleted file mode 100644
index bbc6504..0000000
--- a/test/files/scalacheck/redblack.scala
+++ /dev/null
@@ -1,213 +0,0 @@
-import org.scalacheck._
-import Prop._
-import Gen._
-
-/*
-Properties of a Red & Black Tree:
-
-A node is either red or black.
-The root is black. (This rule is used in some definitions and not others. Since the
-root can always be changed from red to black but not necessarily vice-versa this
-rule has little effect on analysis.)
-All leaves are black.
-Both children of every red node are black.
-Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
-*/
-
-abstract class RedBlackTest extends Properties("RedBlack") {
-  def minimumSize = 0
-  def maximumSize = 5
-
-  object RedBlackTest extends scala.collection.immutable.RedBlack[String] {
-    def isSmaller(x: String, y: String) = x < y
-  }
-
-  import RedBlackTest._
-
-  def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0)
-    Some(tree.iterator.drop(n).next)
-  else
-    None
-
-  def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key
-
-  def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] =
-    if (level == 0) {
-      value(Empty)
-    } else {
-      for {
-        oddOrEven <- choose(0, 2)
-        tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
-        isRed = parentIsBlack && tryRed
-        nextLevel = if (isRed) level else level - 1
-        left <- mkTree(nextLevel, !isRed, label + "L")
-        right <- mkTree(nextLevel, !isRed, label + "R")
-      } yield {
-        if (isRed)
-          RedTree(label + "N", 0, left, right)
-        else
-          BlackTree(label + "N", 0, left, right)
-      }
-    }
-
-  def genTree = for {
-    depth <- choose(minimumSize, maximumSize + 1)
-    tree <- mkTree(depth)
-  } yield tree
-
-  type ModifyParm
-  def genParm(tree: Tree[Int]): Gen[ModifyParm]
-  def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int]
-
-  def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for {
-    tree <- genTree
-    parm <- genParm(tree)
-  } yield (tree, parm, modify(tree, parm))
-}
-
-trait RedBlackInvariants {
-  self: RedBlackTest =>
-
-  import RedBlackTest._
-
-  def rootIsBlack[A](t: Tree[A]) = t.isBlack
-
-  def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
-    case Empty => t.isBlack
-    case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
-  }
-
-  def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
-    case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
-    case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
-    case Empty => true
-  }
-
-  def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
-    case Empty => List(1)
-    case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
-    case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
-  }
-
-  def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
-    case Empty => true
-    case ne: NonEmpty[_] =>
-      (
-        blackNodesToLeaves(ne).distinct.size == 1
-        && areBlackNodesToLeavesEqual(ne.left)
-        && areBlackNodesToLeavesEqual(ne.right)
-      )
-  }
-
-  def orderIsPreserved[A](t: Tree[A]): Boolean =
-    t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) }
-
-  def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
-    invariant(newTree)
-  }
-
-  property("root is black") = setup(rootIsBlack)
-  property("all leaves are black") = setup(areAllLeavesBlack)
-  property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
-  property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
-  property("ordering of keys is preserved") = setup(orderIsPreserved)
-}
-
-object TestInsert extends RedBlackTest with RedBlackInvariants {
-  import RedBlackTest._
-
-  override type ModifyParm = Int
-  override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1)
-  override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0)
-
-  def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
-    case Some((key, _)) => key.init.mkString + "MN"
-    case None => nodeAt(tree, parm - 1) match {
-      case Some((key, _)) => key.init.mkString + "RN"
-      case None  => "N"
-    }
-  }
-
-  property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
-    treeContains(newTree, generateKey(tree, parm))
-  }
-}
-
-object TestModify extends RedBlackTest {
-  import RedBlackTest._
-
-  def newValue = 1
-  override def minimumSize = 1
-  override type ModifyParm = Int
-  override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
-  override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
-    case (key, _) => tree update (key, newValue)
-  } getOrElse tree
-
-  property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
-    nodeAt(tree,parm) forall { case (key, _) =>
-      newTree.iterator contains (key, newValue)
-    }
-  }
-}
-
-object TestDelete extends RedBlackTest with RedBlackInvariants  {
-  import RedBlackTest._
-
-  override def minimumSize = 1
-  override type ModifyParm = Int
-  override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
-  override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
-    case (key, _) => tree delete key
-  } getOrElse tree
-
-  property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
-    nodeAt(tree, parm) forall { case (key, _) =>
-      !treeContains(newTree, key)
-    }
-  }
-}
-
-object TestRange extends RedBlackTest with RedBlackInvariants  {
-  import RedBlackTest._
-
-  override type ModifyParm = (Option[Int], Option[Int])
-  override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for {
-    from <- choose(0, tree.iterator.size)
-    to <- choose(0, tree.iterator.size) suchThat (from <=)
-    optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
-    optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
-  } yield (optionalFrom, optionalTo)
-
-  override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = {
-    val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
-    val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
-    tree range (from, to)
-  }
-
-  property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
-    val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
-    val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
-    ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) &&
-    ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >))))
-  }
-
-  property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
-    val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
-    val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
-    val filteredTree = (tree.iterator
-      .map(_._1)
-      .filter(key => from forall (key >=))
-      .filter(key => to forall (key <))
-      .toList)
-    filteredTree == newTree.iterator.map(_._1).toList
-  }
-}
-
-object Test extends Properties("RedBlack") {
-  include(TestInsert)
-  include(TestModify)
-  include(TestDelete)
-  include(TestRange)
-}
-
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
index bc7f92a..871444a 100644
--- a/test/files/scalacheck/redblacktree.scala
+++ b/test/files/scalacheck/redblacktree.scala
@@ -205,22 +205,22 @@ package scala.collection.immutable.redblacktree {
       filteredTree == keysIterator(newTree).toList
     }
   }
-  
+
   object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants  {
     import RB._
-    
+
     override type ModifyParm = Int
     override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
     override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
-    
+
     property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
       iterator(tree).drop(parm).toList == iterator(newTree).toList
     }
   }
-  
+
   object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants  {
     import RB._
-    
+
     override type ModifyParm = Int
     override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
     override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
@@ -229,7 +229,7 @@ package scala.collection.immutable.redblacktree {
       iterator(tree).take(parm).toList == iterator(newTree).toList
     }
   }
-  
+
   object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants  {
     import RB._
 
@@ -239,7 +239,7 @@ package scala.collection.immutable.redblacktree {
       to <- choose(from, iterator(tree).size)
     } yield (from, to)
     override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
-    
+
     property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
       iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
     }
diff --git a/test/files/scalacheck/si4147.scala b/test/files/scalacheck/si4147.scala
deleted file mode 100644
index 1453440..0000000
--- a/test/files/scalacheck/si4147.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-import org.scalacheck.Prop.forAll
-import org.scalacheck.Properties
-import org.scalacheck.ConsoleReporter.testStatsEx
-import org.scalacheck.Gen
-import org.scalacheck.ConsoleReporter
-
-
-import collection.mutable
-
-
-object Test extends Properties("Mutable TreeSet") {
-
-  val generator = Gen.listOfN(1000, Gen.chooseNum(0, 1000))
-
-  val denseGenerator = Gen.listOfN(1000, Gen.chooseNum(0, 200))
-
-  property("Insertion doesn't allow duplicates values.") = forAll(generator) { (s: List[Int]) =>
-    {
-      val t = mutable.TreeSet[Int](s: _*)
-      t == s.toSet
-    }
-  }
-
-  property("Verification of size method validity") = forAll(generator) { (s: List[Int]) =>
-    {
-      val t = mutable.TreeSet[Int](s: _*)
-      for (a <- s) {
-        t -= a
-      }
-      t.size == 0
-    }
-  }
-
-  property("All inserted elements are removed") = forAll(generator) { (s: List[Int]) =>
-    {
-      val t = mutable.TreeSet[Int](s: _*)
-      for (a <- s) {
-        t -= a
-      }
-      t == Set()
-    }
-  }
-
-  property("Elements are sorted.") = forAll(generator) { (s: List[Int]) =>
-    {
-      val t = mutable.TreeSet[Int](s: _*)
-      t.toList == s.distinct.sorted
-    }
-  }
-
-  property("Implicit CanBuildFrom resolution succeeds as well as the \"same-result-type\" principle.") =
-    forAll(generator) { (s: List[Int]) =>
-      {
-        val t = mutable.TreeSet[Int](s: _*)
-        val t2 = t.map(_ * 2)
-        t2.isInstanceOf[collection.mutable.TreeSet[Int]]
-      }
-    }
-
-  property("A view doesn't expose off bounds elements") = forAll(denseGenerator) { (s: List[Int]) =>
-    {
-      val t = mutable.TreeSet[Int](s: _*)
-      val view = t.rangeImpl(Some(50), Some(150))
-      view.filter(_ < 50) == Set[Int]() && view.filter(_ >= 150) == Set[Int]()
-    }
-  }
-}
diff --git a/test/files/scalacheck/substringTests.scala b/test/files/scalacheck/substringTests.scala
index a48356e..76260b9 100644
--- a/test/files/scalacheck/substringTests.scala
+++ b/test/files/scalacheck/substringTests.scala
@@ -6,11 +6,11 @@ object Test extends Properties("String") {
 
   property("endsWith") = Prop.forAll((a: String, b: String) => (a+b).endsWith(b))
 
-  property("concat") = Prop.forAll((a: String, b: String) => 
+  property("concat") = Prop.forAll((a: String, b: String) =>
     (a+b).length >= a.length && (a+b).length >= b.length
   )
 
-  property("substring") = Prop.forAll((a: String, b: String) => 
+  property("substring") = Prop.forAll((a: String, b: String) =>
     (a+b).substring(a.length) == b
   )
 
diff --git a/test/files/scalacheck/t2460.scala b/test/files/scalacheck/t2460.scala
index 196b437..ab29114 100644
--- a/test/files/scalacheck/t2460.scala
+++ b/test/files/scalacheck/t2460.scala
@@ -1,6 +1,5 @@
 import org.scalacheck.Prop.forAll
 import org.scalacheck.Properties
-import org.scalacheck.ConsoleReporter.testStatsEx
 import org.scalacheck.{Test => SCTest}
 import org.scalacheck.Gen
 
@@ -25,8 +24,4 @@ object Test extends Properties("Regex : Ticket 2460") {
     ("numberOfGroup", numberOfGroup),
     ("nameOfGroup", nameOfGroup)
   )
-
-  /*tests foreach {
-    case (name, p) => testStatsEx(name, SCTest.check(p))
-  }*/
 }
diff --git a/test/files/scalacheck/t4147.scala b/test/files/scalacheck/t4147.scala
new file mode 100644
index 0000000..72f6e9a
--- /dev/null
+++ b/test/files/scalacheck/t4147.scala
@@ -0,0 +1,68 @@
+import org.scalacheck.Prop.{forAll, throws}
+import org.scalacheck.Properties
+import org.scalacheck.Gen
+
+
+import collection.mutable
+
+
+object Test extends Properties("Mutable TreeSet") {
+
+  val generator = Gen.listOfN(1000, Gen.chooseNum(0, 1000))
+
+  val denseGenerator = Gen.listOfN(1000, Gen.chooseNum(0, 200))
+
+  property("Insertion doesn't allow duplicates values.") = forAll(generator) { (s: List[Int]) =>
+    {
+      val t = mutable.TreeSet[Int](s: _*)
+      t == s.toSet
+    }
+  }
+
+  property("Verification of size method validity") = forAll(generator) { (s: List[Int]) =>
+    {
+      val t = mutable.TreeSet[Int](s: _*)
+      for (a <- s) {
+        t -= a
+      }
+      t.size == 0
+    }
+  }
+
+  property("All inserted elements are removed") = forAll(generator) { (s: List[Int]) =>
+    {
+      val t = mutable.TreeSet[Int](s: _*)
+      for (a <- s) {
+        t -= a
+      }
+      t == Set()
+    }
+  }
+
+  property("Elements are sorted.") = forAll(generator) { (s: List[Int]) =>
+    {
+      val t = mutable.TreeSet[Int](s: _*)
+      t.toList == s.distinct.sorted
+    }
+  }
+
+  property("Implicit CanBuildFrom resolution succeeds as well as the \"same-result-type\" principle.") =
+    forAll(generator) { (s: List[Int]) =>
+      {
+        val t = mutable.TreeSet[Int](s: _*)
+        val t2 = t.map(_ * 2)
+        t2.isInstanceOf[collection.mutable.TreeSet[Int]]
+      }
+    }
+
+  property("A view doesn't expose off bounds elements") = forAll(denseGenerator) { (s: List[Int]) =>
+    {
+      val t = mutable.TreeSet[Int](s: _*)
+      val view = t.rangeImpl(Some(50), Some(150))
+      view.filter(_ < 50) == Set[Int]() && view.filter(_ >= 150) == Set[Int]()
+    }
+  }
+
+  property("ordering must not be null") =
+    throws(classOf[NullPointerException])(mutable.TreeSet.empty[Int](null))
+}
diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala
index 98e38c8..4b9b77d 100644
--- a/test/files/scalacheck/treeset.scala
+++ b/test/files/scalacheck/treeset.scala
@@ -149,4 +149,7 @@ object Test extends Properties("TreeSet") {
     val result = subject.foldLeft(subject)((acc, elt) => acc - elt)
     result.isEmpty
   }
+
+  property("ordering must not be null") =
+    throws(classOf[NullPointerException])(TreeSet.empty[Int](null))
 }
diff --git a/test/files/scalap/abstractClass.check b/test/files/scalap/abstractClass.check
new file mode 100644
index 0000000..95e80ac
--- /dev/null
+++ b/test/files/scalap/abstractClass.check
@@ -0,0 +1,4 @@
+abstract class AbstractClass extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo: scala.Predef.String
+}
diff --git a/test/files/scalap/abstractClass/A.scala b/test/files/scalap/abstractClass.scala
similarity index 100%
rename from test/files/scalap/abstractClass/A.scala
rename to test/files/scalap/abstractClass.scala
diff --git a/test/files/scalap/abstractClass/result.test b/test/files/scalap/abstractClass/result.test
deleted file mode 100644
index ef1daac..0000000
--- a/test/files/scalap/abstractClass/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-abstract class AbstractClass extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo : scala.Predef.String
-}
diff --git a/test/files/scalap/abstractMethod.check b/test/files/scalap/abstractMethod.check
new file mode 100644
index 0000000..0d0b1b7
--- /dev/null
+++ b/test/files/scalap/abstractMethod.check
@@ -0,0 +1,5 @@
+trait AbstractMethod extends scala.AnyRef {
+  def $init$(): scala.Unit = { /* compiled code */ }
+  def arity: scala.Int
+  def isCool: scala.Boolean = { /* compiled code */ }
+}
diff --git a/test/files/scalap/abstractMethod/A.scala b/test/files/scalap/abstractMethod.scala
similarity index 100%
rename from test/files/scalap/abstractMethod/A.scala
rename to test/files/scalap/abstractMethod.scala
diff --git a/test/files/scalap/abstractMethod/result.test b/test/files/scalap/abstractMethod/result.test
deleted file mode 100644
index 40fa02d..0000000
--- a/test/files/scalap/abstractMethod/result.test
+++ /dev/null
@@ -1,5 +0,0 @@
-trait AbstractMethod extends scala.AnyRef {
-  def $init$() : scala.Unit = { /* compiled code */ }
-  def arity : scala.Int
-  def isCool : scala.Boolean = { /* compiled code */ }
-}
diff --git a/test/files/scalap/caseClass.check b/test/files/scalap/caseClass.check
new file mode 100644
index 0000000..51ad90d
--- /dev/null
+++ b/test/files/scalap/caseClass.check
@@ -0,0 +1,20 @@
+case class CaseClass[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String) extends scala.AnyRef with scala.Product with scala.Serializable {
+  val i: A = { /* compiled code */ }
+  val s: scala.Predef.String = { /* compiled code */ }
+  def foo: scala.Int = { /* compiled code */ }
+  def copy[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String): CaseClass[A] = { /* compiled code */ }
+  override def productPrefix: java.lang.String = { /* compiled code */ }
+  def productArity: scala.Int = { /* compiled code */ }
+  def productElement(x$1: scala.Int): scala.Any = { /* compiled code */ }
+  override def productIterator: scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+  def canEqual(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
+  override def hashCode(): scala.Int = { /* compiled code */ }
+  override def toString(): java.lang.String = { /* compiled code */ }
+  override def equals(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
+}
+object CaseClass extends scala.AnyRef with scala.Serializable {
+  def this() = { /* compiled code */ }
+  final override def toString(): java.lang.String = { /* compiled code */ }
+  def apply[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String): CaseClass[A] = { /* compiled code */ }
+  def unapply[A <: scala.Seq[scala.Int]](x$0: CaseClass[A]): scala.Option[scala.Tuple2[A, scala.Predef.String]] = { /* compiled code */ }
+}
diff --git a/test/files/scalap/caseClass.scala b/test/files/scalap/caseClass.scala
new file mode 100644
index 0000000..be86714
--- /dev/null
+++ b/test/files/scalap/caseClass.scala
@@ -0,0 +1,3 @@
+case class CaseClass[A <: Seq[Int]](i: A, s: String) {
+    def foo = 239
+}
diff --git a/test/files/scalap/caseClass/A.scala b/test/files/scalap/caseClass/A.scala
deleted file mode 100644
index 95f9984..0000000
--- a/test/files/scalap/caseClass/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-case class CaseClass[A <: Seq[Int]](i: A, s: String) {
-    def foo = 239 
-}
diff --git a/test/files/scalap/caseClass/result.test b/test/files/scalap/caseClass/result.test
deleted file mode 100644
index 7d7aa4f..0000000
--- a/test/files/scalap/caseClass/result.test
+++ /dev/null
@@ -1,20 +0,0 @@
-case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends scala.AnyRef with scala.Product with scala.Serializable {
-  val i : A = { /* compiled code */ }
-  val s : scala.Predef.String = { /* compiled code */ }
-  def foo : scala.Int = { /* compiled code */ }
-  def copy[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
-  override def productPrefix : java.lang.String = { /* compiled code */ }
-  def productArity : scala.Int = { /* compiled code */ }
-  def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
-  override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
-  def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
-  override def hashCode() : scala.Int = { /* compiled code */ }
-  override def toString() : java.lang.String = { /* compiled code */ }
-  override def equals(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
-}
-object CaseClass extends scala.AnyRef with scala.Serializable {
-  def this() = { /* compiled code */ }
-  final override def toString() : java.lang.String = { /* compiled code */ }
-  def apply[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
-  def unapply[A <: scala.Seq[scala.Int]](x$0 : CaseClass[A]) : scala.Option[scala.Tuple2[A, scala.Predef.String]] = { /* compiled code */ }
-}
diff --git a/test/files/scalap/caseObject.check b/test/files/scalap/caseObject.check
new file mode 100644
index 0000000..a342e5f
--- /dev/null
+++ b/test/files/scalap/caseObject.check
@@ -0,0 +1,10 @@
+case object CaseObject extends scala.AnyRef with scala.Product with scala.Serializable {
+  def bar: scala.Int = { /* compiled code */ }
+  override def productPrefix: java.lang.String = { /* compiled code */ }
+  def productArity: scala.Int = { /* compiled code */ }
+  def productElement(x$1: scala.Int): scala.Any = { /* compiled code */ }
+  override def productIterator: scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+  def canEqual(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
+  override def hashCode(): scala.Int = { /* compiled code */ }
+  override def toString(): java.lang.String = { /* compiled code */ }
+}
diff --git a/test/files/scalap/caseObject.scala b/test/files/scalap/caseObject.scala
new file mode 100644
index 0000000..809341a
--- /dev/null
+++ b/test/files/scalap/caseObject.scala
@@ -0,0 +1,3 @@
+case object CaseObject {
+    def bar = 239
+}
diff --git a/test/files/scalap/caseObject/A.scala b/test/files/scalap/caseObject/A.scala
deleted file mode 100644
index 6a3ff10..0000000
--- a/test/files/scalap/caseObject/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-case object CaseObject {
-    def bar = 239 
-}
diff --git a/test/files/scalap/caseObject/result.test b/test/files/scalap/caseObject/result.test
deleted file mode 100644
index 867a4b2..0000000
--- a/test/files/scalap/caseObject/result.test
+++ /dev/null
@@ -1,10 +0,0 @@
-case object CaseObject extends scala.AnyRef with scala.Product with scala.Serializable {
-  def bar : scala.Int = { /* compiled code */ }
-  override def productPrefix : java.lang.String = { /* compiled code */ }
-  def productArity : scala.Int = { /* compiled code */ }
-  def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
-  override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
-  def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
-  override def hashCode() : scala.Int = { /* compiled code */ }
-  override def toString() : java.lang.String = { /* compiled code */ }
-}
diff --git a/test/files/scalap/cbnParam.check b/test/files/scalap/cbnParam.check
new file mode 100644
index 0000000..abe0129
--- /dev/null
+++ b/test/files/scalap/cbnParam.check
@@ -0,0 +1,3 @@
+class CbnParam extends scala.AnyRef {
+  def this(s: => scala.Predef.String) = { /* compiled code */ }
+}
diff --git a/test/files/scalap/cbnParam.scala b/test/files/scalap/cbnParam.scala
new file mode 100644
index 0000000..978a718
--- /dev/null
+++ b/test/files/scalap/cbnParam.scala
@@ -0,0 +1 @@
+class CbnParam(s: => String)
diff --git a/test/files/scalap/cbnParam/A.scala b/test/files/scalap/cbnParam/A.scala
deleted file mode 100644
index 2f366df..0000000
--- a/test/files/scalap/cbnParam/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-class CbnParam(s: => String) 
diff --git a/test/files/scalap/cbnParam/result.test b/test/files/scalap/cbnParam/result.test
deleted file mode 100644
index 52ecb6a..0000000
--- a/test/files/scalap/cbnParam/result.test
+++ /dev/null
@@ -1,3 +0,0 @@
-class CbnParam extends scala.AnyRef {
-  def this(s : => scala.Predef.String) = { /* compiled code */ }
-}
diff --git a/test/files/scalap/classPrivate.check b/test/files/scalap/classPrivate.check
new file mode 100644
index 0000000..cf0ffe0
--- /dev/null
+++ b/test/files/scalap/classPrivate.check
@@ -0,0 +1,10 @@
+class ClassPrivate extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def baz: scala.Int = { /* compiled code */ }
+  class Outer extends scala.AnyRef {
+    def this() = { /* compiled code */ }
+    private[ClassPrivate] def qux: scala.Int = { /* compiled code */ }
+  }
+  protected def quux: scala.Int = { /* compiled code */ }
+  private[ClassPrivate] def bar: scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/classPrivate/A.scala b/test/files/scalap/classPrivate.scala
similarity index 100%
rename from test/files/scalap/classPrivate/A.scala
rename to test/files/scalap/classPrivate.scala
diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate/result.test
deleted file mode 100644
index ab2d40c..0000000
--- a/test/files/scalap/classPrivate/result.test
+++ /dev/null
@@ -1,10 +0,0 @@
-class ClassPrivate extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def baz : scala.Int = { /* compiled code */ }
-  class Outer extends scala.AnyRef {
-    def this() = { /* compiled code */ }
-    private[ClassPrivate] def qux : scala.Int = { /* compiled code */ }
-  }
-  protected def quux : scala.Int = { /* compiled code */ }
-  private[ClassPrivate] def bar : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/classWithExistential.check b/test/files/scalap/classWithExistential.check
new file mode 100644
index 0000000..7df6bfb
--- /dev/null
+++ b/test/files/scalap/classWithExistential.check
@@ -0,0 +1,4 @@
+class ClassWithExistential extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo[A, B]: scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
+}
diff --git a/test/files/scalap/classWithExistential/A.scala b/test/files/scalap/classWithExistential.scala
similarity index 100%
rename from test/files/scalap/classWithExistential/A.scala
rename to test/files/scalap/classWithExistential.scala
diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential/result.test
deleted file mode 100644
index caee3fd..0000000
--- a/test/files/scalap/classWithExistential/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class ClassWithExistential extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
-}
diff --git a/test/files/scalap/classWithSelfAnnotation.check b/test/files/scalap/classWithSelfAnnotation.check
new file mode 100644
index 0000000..7a1c206
--- /dev/null
+++ b/test/files/scalap/classWithSelfAnnotation.check
@@ -0,0 +1,5 @@
+class ClassWithSelfAnnotation extends scala.AnyRef {
+ this: ClassWithSelfAnnotation with java.lang.CharSequence =>
+  def this() = { /* compiled code */ }
+  def foo: scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/classWithSelfAnnotation/A.scala b/test/files/scalap/classWithSelfAnnotation.scala
similarity index 100%
rename from test/files/scalap/classWithSelfAnnotation/A.scala
rename to test/files/scalap/classWithSelfAnnotation.scala
diff --git a/test/files/scalap/classWithSelfAnnotation/result.test b/test/files/scalap/classWithSelfAnnotation/result.test
deleted file mode 100644
index 82bbd9e..0000000
--- a/test/files/scalap/classWithSelfAnnotation/result.test
+++ /dev/null
@@ -1,5 +0,0 @@
-class ClassWithSelfAnnotation extends scala.AnyRef {
- this : ClassWithSelfAnnotation with java.lang.CharSequence =>
-  def this() = { /* compiled code */ }
-  def foo : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/covariantParam.check b/test/files/scalap/covariantParam.check
new file mode 100644
index 0000000..85b1400
--- /dev/null
+++ b/test/files/scalap/covariantParam.check
@@ -0,0 +1,4 @@
+class CovariantParam[+A] extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo[A](a: A): scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/covariantParam/A.scala b/test/files/scalap/covariantParam.scala
similarity index 100%
rename from test/files/scalap/covariantParam/A.scala
rename to test/files/scalap/covariantParam.scala
diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam/result.test
deleted file mode 100644
index f7a3c98..0000000
--- a/test/files/scalap/covariantParam/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class CovariantParam[+A] extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo[A](a : A) : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/defaultParameter.check b/test/files/scalap/defaultParameter.check
new file mode 100644
index 0000000..4e244d1
--- /dev/null
+++ b/test/files/scalap/defaultParameter.check
@@ -0,0 +1,3 @@
+trait DefaultParameter extends scala.AnyRef {
+  def foo(s: scala.Predef.String): scala.Unit
+}
diff --git a/test/files/scalap/defaultParameter/A.scala b/test/files/scalap/defaultParameter.scala
similarity index 100%
rename from test/files/scalap/defaultParameter/A.scala
rename to test/files/scalap/defaultParameter.scala
diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter/result.test
deleted file mode 100644
index 0c775ea..0000000
--- a/test/files/scalap/defaultParameter/result.test
+++ /dev/null
@@ -1,3 +0,0 @@
-trait DefaultParameter extends scala.AnyRef {
-  def foo(s : scala.Predef.String) : scala.Unit
-}
diff --git a/test/files/scalap/implicitParam.check b/test/files/scalap/implicitParam.check
new file mode 100644
index 0000000..46e9956
--- /dev/null
+++ b/test/files/scalap/implicitParam.check
@@ -0,0 +1,4 @@
+class ImplicitParam extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo(i: scala.Int)(implicit f: scala.Float, d: scala.Double): scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/implicitParam/A.scala b/test/files/scalap/implicitParam.scala
similarity index 100%
rename from test/files/scalap/implicitParam/A.scala
rename to test/files/scalap/implicitParam.scala
diff --git a/test/files/scalap/implicitParam/result.test b/test/files/scalap/implicitParam/result.test
deleted file mode 100644
index a2cfd60..0000000
--- a/test/files/scalap/implicitParam/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class ImplicitParam extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo(i : scala.Int)(implicit f : scala.Float, d : scala.Double) : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/packageObject.check b/test/files/scalap/packageObject.check
new file mode 100644
index 0000000..d1d0bbf
--- /dev/null
+++ b/test/files/scalap/packageObject.check
@@ -0,0 +1,5 @@
+package object PackageObject extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  type A = scala.Predef.String
+  def foo(i: scala.Int): scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/packageObject/A.scala b/test/files/scalap/packageObject.scala
similarity index 100%
rename from test/files/scalap/packageObject/A.scala
rename to test/files/scalap/packageObject.scala
diff --git a/test/files/scalap/packageObject/result.test b/test/files/scalap/packageObject/result.test
deleted file mode 100644
index 5732d92..0000000
--- a/test/files/scalap/packageObject/result.test
+++ /dev/null
@@ -1,5 +0,0 @@
-package object PackageObject extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  type A = scala.Predef.String
-  def foo(i : scala.Int) : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/paramClauses.check b/test/files/scalap/paramClauses.check
new file mode 100644
index 0000000..11c5e4b
--- /dev/null
+++ b/test/files/scalap/paramClauses.check
@@ -0,0 +1,4 @@
+class ParamClauses extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo(i: scala.Int)(s: scala.Predef.String)(t: scala.Double): scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/paramClauses/A.scala b/test/files/scalap/paramClauses.scala
similarity index 100%
rename from test/files/scalap/paramClauses/A.scala
rename to test/files/scalap/paramClauses.scala
diff --git a/test/files/scalap/paramClauses/result.test b/test/files/scalap/paramClauses/result.test
deleted file mode 100644
index 3a141e8..0000000
--- a/test/files/scalap/paramClauses/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class ParamClauses extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo(i : scala.Int)(s : scala.Predef.String)(t : scala.Double) : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/paramNames.check b/test/files/scalap/paramNames.check
new file mode 100644
index 0000000..836b3d0
--- /dev/null
+++ b/test/files/scalap/paramNames.check
@@ -0,0 +1,4 @@
+class ParamNames extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo(s: => scala.Seq[scala.Int], s2: => scala.Seq[scala.Any]): scala.Unit = { /* compiled code */ }
+}
diff --git a/test/files/scalap/paramNames/A.scala b/test/files/scalap/paramNames.scala
similarity index 100%
rename from test/files/scalap/paramNames/A.scala
rename to test/files/scalap/paramNames.scala
diff --git a/test/files/scalap/paramNames/result.test b/test/files/scalap/paramNames/result.test
deleted file mode 100644
index 85e37f8..0000000
--- a/test/files/scalap/paramNames/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class ParamNames extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo(s : => scala.Seq[scala.Int], s2 : => scala.Seq[scala.Any]) : scala.Unit = { /* compiled code */ }
-}
diff --git a/test/files/scalap/sequenceParam.check b/test/files/scalap/sequenceParam.check
new file mode 100644
index 0000000..f7bf83f
--- /dev/null
+++ b/test/files/scalap/sequenceParam.check
@@ -0,0 +1,3 @@
+class SequenceParam extends scala.AnyRef {
+  def this(s: scala.Predef.String, i: scala.Int*) = { /* compiled code */ }
+}
diff --git a/test/files/scalap/sequenceParam/A.scala b/test/files/scalap/sequenceParam.scala
similarity index 100%
rename from test/files/scalap/sequenceParam/A.scala
rename to test/files/scalap/sequenceParam.scala
diff --git a/test/files/scalap/sequenceParam/result.test b/test/files/scalap/sequenceParam/result.test
deleted file mode 100644
index 142d92f..0000000
--- a/test/files/scalap/sequenceParam/result.test
+++ /dev/null
@@ -1,3 +0,0 @@
-class SequenceParam extends scala.AnyRef {
-  def this(s : scala.Predef.String, i : scala.Int*) = { /* compiled code */ }
-}
diff --git a/test/files/scalap/simpleClass.check b/test/files/scalap/simpleClass.check
new file mode 100644
index 0000000..4675cbf
--- /dev/null
+++ b/test/files/scalap/simpleClass.check
@@ -0,0 +1,4 @@
+class SimpleClass extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def foo: scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/simpleClass/A.scala b/test/files/scalap/simpleClass.scala
similarity index 100%
rename from test/files/scalap/simpleClass/A.scala
rename to test/files/scalap/simpleClass.scala
diff --git a/test/files/scalap/simpleClass/result.test b/test/files/scalap/simpleClass/result.test
deleted file mode 100644
index 4fdf25d..0000000
--- a/test/files/scalap/simpleClass/result.test
+++ /dev/null
@@ -1,4 +0,0 @@
-class SimpleClass extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def foo : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/traitObject.check b/test/files/scalap/traitObject.check
new file mode 100644
index 0000000..f7ae4fd
--- /dev/null
+++ b/test/files/scalap/traitObject.check
@@ -0,0 +1,8 @@
+trait TraitObject extends scala.AnyRef {
+  def $init$(): scala.Unit = { /* compiled code */ }
+  def foo: scala.Int = { /* compiled code */ }
+}
+object TraitObject extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  def bar: scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/traitObject/A.scala b/test/files/scalap/traitObject.scala
similarity index 100%
rename from test/files/scalap/traitObject/A.scala
rename to test/files/scalap/traitObject.scala
diff --git a/test/files/scalap/traitObject/result.test b/test/files/scalap/traitObject/result.test
deleted file mode 100644
index 104ba14..0000000
--- a/test/files/scalap/traitObject/result.test
+++ /dev/null
@@ -1,8 +0,0 @@
-trait TraitObject extends scala.AnyRef {
-  def $init$() : scala.Unit = { /* compiled code */ }
-  def foo : scala.Int = { /* compiled code */ }
-}
-object TraitObject extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  def bar : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/typeAnnotations.check b/test/files/scalap/typeAnnotations.check
new file mode 100644
index 0000000..cba69f8
--- /dev/null
+++ b/test/files/scalap/typeAnnotations.check
@@ -0,0 +1,8 @@
+abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  @scala.specialized
+  val x: scala.Int = { /* compiled code */ }
+  @scala.specialized
+  type T
+  def compose[@scala.specialized A](x: A, y: R): A = { /* compiled code */ }
+}
diff --git a/test/files/scalap/typeAnnotations/A.scala b/test/files/scalap/typeAnnotations.scala
similarity index 100%
rename from test/files/scalap/typeAnnotations/A.scala
rename to test/files/scalap/typeAnnotations.scala
diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations/result.test
deleted file mode 100644
index 407b023..0000000
--- a/test/files/scalap/typeAnnotations/result.test
+++ /dev/null
@@ -1,8 +0,0 @@
-abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  @scala.specialized
-  val x : scala.Int = { /* compiled code */ }
-  @scala.specialized
-  type T
-  def compose[@scala.specialized A](x : A, y : R) : A = { /* compiled code */ }
-}
diff --git a/test/files/scalap/valAndVar.check b/test/files/scalap/valAndVar.check
new file mode 100644
index 0000000..98eae51
--- /dev/null
+++ b/test/files/scalap/valAndVar.check
@@ -0,0 +1,5 @@
+class ValAndVar extends scala.AnyRef {
+  def this() = { /* compiled code */ }
+  val foo: java.lang.String = { /* compiled code */ }
+  var bar: scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/valAndVar/A.scala b/test/files/scalap/valAndVar.scala
similarity index 100%
rename from test/files/scalap/valAndVar/A.scala
rename to test/files/scalap/valAndVar.scala
diff --git a/test/files/scalap/valAndVar/result.test b/test/files/scalap/valAndVar/result.test
deleted file mode 100644
index e940da9..0000000
--- a/test/files/scalap/valAndVar/result.test
+++ /dev/null
@@ -1,5 +0,0 @@
-class ValAndVar extends scala.AnyRef {
-  def this() = { /* compiled code */ }
-  val foo : java.lang.String = { /* compiled code */ }
-  var bar : scala.Int = { /* compiled code */ }
-}
diff --git a/test/files/scalap/wildcardType.check b/test/files/scalap/wildcardType.check
new file mode 100644
index 0000000..6ea6966
--- /dev/null
+++ b/test/files/scalap/wildcardType.check
@@ -0,0 +1,3 @@
+class WildcardType extends scala.AnyRef {
+  def this(f: scala.Function1[scala.Int, _]) = { /* compiled code */ }
+}
diff --git a/test/files/scalap/wildcardType/A.scala b/test/files/scalap/wildcardType.scala
similarity index 100%
rename from test/files/scalap/wildcardType/A.scala
rename to test/files/scalap/wildcardType.scala
diff --git a/test/files/scalap/wildcardType/result.test b/test/files/scalap/wildcardType/result.test
deleted file mode 100644
index e43261d..0000000
--- a/test/files/scalap/wildcardType/result.test
+++ /dev/null
@@ -1,3 +0,0 @@
-class WildcardType extends scala.AnyRef {
-  def this(f : scala.Function1[scala.Int, _]) = { /* compiled code */ }
-}
diff --git a/test/files/specialized/SI-7343.scala b/test/files/specialized/SI-7343.scala
index 5ee6830..8d14a2c 100644
--- a/test/files/specialized/SI-7343.scala
+++ b/test/files/specialized/SI-7343.scala
@@ -1,4 +1,4 @@
-class Parent[@specialized(Int) T] 
+class Parent[@specialized(Int) T]
 
 object Test extends App {
 
diff --git a/test/files/specialized/SI-7344.scala b/test/files/specialized/SI-7344.scala
index 1040460..624adb6 100644
--- a/test/files/specialized/SI-7344.scala
+++ b/test/files/specialized/SI-7344.scala
@@ -1,13 +1,13 @@
 /* Test for SI-7344, where specialized methods inside the bodies of other
  * methods are not specialized, although they might as well be. The name
- * for the specialized method should not be different depending on the 
+ * for the specialized method should not be different depending on the
  * outside method/class' specialization. */
 
 class Test[@specialized(Int, Double) X](val x: X) {
 
   def checkSpecialization[Y](@specialized(Int, Double) y: Y): X = {
 
-    // checking the specialization using the method name, which we can 
+    // checking the specialization using the method name, which we can
     // extract from an exception's stack trace. We can match just the
     // prefix, since the compiler will add a suffix to the method name
     // during lambdalift, when it lifts the local methods outside.
@@ -33,7 +33,7 @@ object Test extends App {
   val t2 = new Test(123)
   val t3 = new Test(1.3)
 
-  // we want specialization to rewire these, 
+  // we want specialization to rewire these,
   // that's why they're not in a for loop:
   t1.checkSpecialization("x")
 
@@ -49,5 +49,5 @@ object Test extends App {
   // t2.checkSpecialization(1.3)
   // t3.checkSpecialization("x")
   // t3.checkSpecialization(123)
-  // t3.checkSpecialization(1.3)  
+  // t3.checkSpecialization(1.3)
 }
diff --git a/test/files/specialized/arrays-traits.scala b/test/files/specialized/arrays-traits.scala
index 34a1c37..9183d7a 100644
--- a/test/files/specialized/arrays-traits.scala
+++ b/test/files/specialized/arrays-traits.scala
@@ -23,7 +23,7 @@ object Test {
     (new BaseS(new Array[String](1)): SuperS[String]).bar(new Array[String](1))
     println(arrayApplyCount)
     println(arrayUpdateCount)
-    
+
     (new BaseG(new Array[String](1)): SuperG[String]).foo
     println(arrayApplyCount)
     (new BaseG(new Array[String](1)): SuperG[String]).bar(new Array[String](1))
diff --git a/test/files/specialized/arrays.scala b/test/files/specialized/arrays.scala
index c946a8f..505f482 100644
--- a/test/files/specialized/arrays.scala
+++ b/test/files/specialized/arrays.scala
@@ -15,23 +15,23 @@ class Spec[@specialized(AnyRef) T](a: Array[T]) {
 
 
 object Test {
-  
+
   def main(args: Array[String]) {
     val len = 50
-    
+
     testSpec(new Array[String](len))
     println(arrayApplyCount)
-    
+
     (new Spec(new Array[String](len)))()
     println(arrayApplyCount)
-    
+
     testGeneric(new Array[String](len))
     println(arrayApplyCount)
-    
+
     (new Generic(new Array[String](len)))()
     println(arrayApplyCount)
   }
-  
+
   def testGeneric[T](a: Array[T]) = {
     var i = 0
     var sum = 0
@@ -41,7 +41,7 @@ object Test {
     }
     sum
   }
-  
+
   def testSpec[@specialized(AnyRef) T](a: Array[T]) = {
     var i = 0
     var sum = 0
@@ -51,5 +51,5 @@ object Test {
     }
     sum
   }
-  
+
 }
diff --git a/test/files/specialized/constant_lambda.check b/test/files/specialized/constant_lambda.check
new file mode 100644
index 0000000..4b095fd
--- /dev/null
+++ b/test/files/specialized/constant_lambda.check
@@ -0,0 +1,2 @@
+false
+false
diff --git a/test/files/specialized/constant_lambda.scala b/test/files/specialized/constant_lambda.scala
new file mode 100644
index 0000000..bb9a974
--- /dev/null
+++ b/test/files/specialized/constant_lambda.scala
@@ -0,0 +1,16 @@
+// during development of late delmabdafying there was a problem where
+// specialization would undo some of the work done in uncurry if the body of the
+// lambda had a constant type. That would result in a compiler crash as
+// when the delambdafy phase got a tree shape it didn't understand
+class X[@specialized(Int) A] {
+  val f = { x: A => false }
+}
+
+object Test {
+  def main(args: Array[String]) {
+    val xInt = new X[Int]
+    println(xInt.f(42))
+    val xString = new X[String]
+    println(xString.f("hello"))
+  }
+}
\ No newline at end of file
diff --git a/test/files/specialized/fft.scala b/test/files/specialized/fft.scala
index 62a6a2a..7602983 100644
--- a/test/files/specialized/fft.scala
+++ b/test/files/specialized/fft.scala
@@ -1,13 +1,13 @@
 
 /*
  * http://local.wasp.uwa.edu.au/~pbourke/miscellaneous/dft/
-   Modification of Paul Bourkes FFT code by Peter Cusack 
+   Modification of Paul Bourkes FFT code by Peter Cusack
    to utilise the Microsoft complex type.
 
-   This computes an in-place complex-to-complex FFT 
+   This computes an in-place complex-to-complex FFT
    x and y are the real and imaginary arrays of 2^m points.
    dir =  1 gives forward transform
-   dir = -1 gives reverse transform 
+   dir = -1 gives reverse transform
 */
 
 import Math.{sqrt, pow}
@@ -24,19 +24,19 @@ object Test  {
     x(j) = tmp
   }
 
-  def times(x: Complex, y: Complex): Complex = 
+  def times(x: Complex, y: Complex): Complex =
     (x._1 * y._1 - x._2 * y._2, x._1 * y._2 + x._2 * y._1)
-    
+
   def div(x: Complex, y: Complex): Complex = {
     val num = pow(y._1, 2) + pow(y._2, 2)
     ((x._1 * y._1 + x._2 * y._2)/num,
      (x._2 * y._1 - x._1 * y._2)/num)
   }
 
-  def div(x: Complex, y: Long) = 
+  def div(x: Complex, y: Long) =
     (x._1 / y, x._2 / y)
 
-  def add(x: Complex, y: Complex) = 
+  def add(x: Complex, y: Complex) =
     (x._1 + y._1, x._2 + y._2)
 
   def minus(x: Complex, y: Complex) =
@@ -49,8 +49,8 @@ object Test  {
 
    /*Calculate the number of points */
    n = 1
-   for (i <- 0l until m) 
-      n <<= 1   
+   for (i <- 0l until m)
+      n <<= 1
 
    /* Do the bit reversal */
    i2 = n >> 1
@@ -86,7 +86,7 @@ object Test  {
        for (i <- j.until(n, l2)) {
          i1 = i + l1;
          t1 = times(u, x(i1.toInt))
-         x(i1.toInt) = minus(x(i.toInt), t1) 
+         x(i1.toInt) = minus(x(i.toInt), t1)
          x(i.toInt) = add(x(i.toInt), t1)
        }
 
@@ -97,7 +97,7 @@ object Test  {
      c = (c._1, sqrt( (1.0 - c._1) / 2.0 ))
      // if (dir == 1)
      //    c.imag(-c.imag());
-     if (dir == 1) 
+     if (dir == 1)
        c = (c._1, -c._2)
 
       // c.real(sqrt((1.0 + c.real()) / 2.0));
@@ -107,8 +107,8 @@ object Test  {
    /* Scaling for forward transform */
    if (dir == 1) {
      for (i <- 0l until n)
-       x(i.toInt) = div(x(i.toInt), n)      
-   }   
+       x(i.toInt) = div(x(i.toInt), n)
+   }
   }
 
   def run() {
diff --git a/test/files/specialized/spec-ame.scala b/test/files/specialized/spec-ame.scala
index 129fb9f..017d5df 100644
--- a/test/files/specialized/spec-ame.scala
+++ b/test/files/specialized/spec-ame.scala
@@ -1,13 +1,13 @@
 // ticket #3432
 object Test {
   trait B[@specialized(Int) T] {
-    def value: T 
+    def value: T
   }
 
-  class A[@specialized(Int) T](x: T) { 
-    def foo: B[T] = new B[T] { 
-      def value = x 
-    } 
+  class A[@specialized(Int) T](x: T) {
+    def foo: B[T] = new B[T] {
+      def value = x
+    }
   }
 
   def main(args: Array[String]) {
diff --git a/test/files/specialized/spec-hlists.scala b/test/files/specialized/spec-hlists.scala
index 8c4ac8f..82e3bf7 100644
--- a/test/files/specialized/spec-hlists.scala
+++ b/test/files/specialized/spec-hlists.scala
@@ -4,7 +4,7 @@
 
 sealed trait HList {
   type Self <: HList
-  
+
   type |: [E] = HCons[E, Self]
 
   final def |: [@specialized E](elem: E): |: [E] = new HCons[E, Self](elem, this.asInstanceOf[Self])
@@ -23,7 +23,7 @@ final object HNil extends HList {
 object Test extends App {
   val l1 = new HCons(42, "foo" |: HNil)
   println(l1.getClass)
-  
+
   val l2 = 42 |: "abc" |: HNil
   println(l2.getClass)
 }
diff --git a/test/files/specialized/spec-init.scala b/test/files/specialized/spec-init.scala
index 5f27062..94cd0ee 100644
--- a/test/files/specialized/spec-init.scala
+++ b/test/files/specialized/spec-init.scala
@@ -36,7 +36,7 @@ object Test {
     (new TouchGlobal(new Object))
     Global.msg = "ok" // reset the value
     (new TouchGlobal(42))
-    
+
     println(runtime.BoxesRunTime.integerBoxCount)
   }
 }
diff --git a/test/files/specialized/spec-matrix-old.scala b/test/files/specialized/spec-matrix-old.scala
index 98735c8..4e3d3f1 100644
--- a/test/files/specialized/spec-matrix-old.scala
+++ b/test/files/specialized/spec-matrix-old.scala
@@ -1,9 +1,10 @@
 /** Test matrix multiplication with specialization.
  */
 
+ at deprecated("Suppress warnings", since="2.11")
 class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
   private val arr: Array[Array[A]] = Array.ofDim[A](rows, cols)
-  
+
   def apply(i: Int, j: Int): A = {
     if (i < 0 || i >= rows || j < 0 || j >= cols)
       throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
@@ -25,11 +26,12 @@ class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
   }
 }
 
+ at deprecated("Suppress warnings", since="2.11")
 object Test {
   def main(args: Array[String]) {
     val m = randomMatrix(200, 100)
     val n = randomMatrix(100, 200)
-    
+
     val p = mult(m, n)
     println(p(0, 0))
     println("Boxed doubles: " + runtime.BoxesRunTime.doubleBoxCount)
@@ -38,7 +40,7 @@ object Test {
 
   def randomMatrix(n: Int, m: Int) = {
     val r = new util.Random(10)
-    val x = new Matrix[Double](n, m) 
+    val x = new Matrix[Double](n, m)
     for (i <- 0 until n; j <- 0 until m)
       x(i, j) = (r.nextInt % 1000).toDouble
     x
@@ -46,7 +48,7 @@ object Test {
 
   def printMatrix[Double](m: Matrix[Double]) {
     for (i <- 0 until m.rows) {
-      for (j <- 0 until m.cols) 
+      for (j <- 0 until m.cols)
         print("%5.3f ".format(m(i, j)))
       println
     }
@@ -56,7 +58,7 @@ object Test {
     val p = new Matrix[T](m.rows, n.cols)
     import num._
 
-    for (i <- 0 until m.rows) 
+    for (i <- 0 until m.rows)
       for (j <- 0 until n.cols) {
         var sum = num.zero
         for (k <- 0 until n.rows)
@@ -68,7 +70,7 @@ object Test {
   def mult(m: Matrix[Double], n: Matrix[Double]) = {
     val p = new Matrix[Double](m.rows, n.cols)
 
-    for (i <- 0 until m.rows) 
+    for (i <- 0 until m.rows)
       for (j <- 0 until n.cols) {
         var sum = 0.0
         for (k <- 0 until n.rows)
diff --git a/test/files/specialized/spec-overrides.scala b/test/files/specialized/spec-overrides.scala
index 90dd5a4..994dd9d 100644
--- a/test/files/specialized/spec-overrides.scala
+++ b/test/files/specialized/spec-overrides.scala
@@ -17,6 +17,6 @@ object Test extends App {
   assert(d2.default == 1.0, d2.default)
   assert((d2: Base[_]).default == 1.0, (d2: Base[_]).default)
   assert((d2: D1).default == 1.0, (d2: D1).default)
-  
+
   println(runtime.BoxesRunTime.integerBoxCount)
 }
diff --git a/test/files/specialized/spec-patmatch.scala b/test/files/specialized/spec-patmatch.scala
index 979d143..9096294 100644
--- a/test/files/specialized/spec-patmatch.scala
+++ b/test/files/specialized/spec-patmatch.scala
@@ -46,7 +46,7 @@ object Test {
     (new Foo).test(42.0)
     (new Foo).test(42.0f)
     (new Foo).test(new Object)
-    
+
     println(runtime.BoxesRunTime.integerBoxCount)
   }
 
diff --git a/test/files/specialized/spec-super.check b/test/files/specialized/spec-super.check
index 4be83ca..2f4d600 100644
--- a/test/files/specialized/spec-super.check
+++ b/test/files/specialized/spec-super.check
@@ -1,3 +1,6 @@
+spec-super.scala:18: warning: class Base must be a trait. Specialized version of class Extended will inherit generic Base[Int]
+class Extended [@specialized(Int) T](t: T) extends Base[T](t) {
+      ^
 s
 1
-2
\ No newline at end of file
+2
diff --git a/test/files/specialized/spec-super.scala b/test/files/specialized/spec-super.scala
index a77f65b..056a771 100644
--- a/test/files/specialized/spec-super.scala
+++ b/test/files/specialized/spec-super.scala
@@ -7,13 +7,13 @@ object Test {
 
     val i = new Extended(1)
     println(i.foo) //infinite loop with StackOverflowError
-    
+
     println(runtime.BoxesRunTime.integerBoxCount)
   }
 }
 
-class Base[@specialized(Int) T](val t: T) { 
-  def foo() :T = t 
+class Base[@specialized(Int) T](val t: T) {
+  def foo() :T = t
 }
 class Extended [@specialized(Int) T](t: T) extends Base[T](t) {
   override def foo() :T = super.foo
diff --git a/test/files/specialized/spec-t3896.scala b/test/files/specialized/spec-t3896.scala
index 605ed0d..3c84903 100644
--- a/test/files/specialized/spec-t3896.scala
+++ b/test/files/specialized/spec-t3896.scala
@@ -1,7 +1,7 @@
 // see ticket #3896. Tests interaction between overloading, specialization and default params
 trait Atomic[@specialized(Boolean) T] {
   def x: T
-  
+
   // crash depends on the overloading: if second method is "g", no crash.
   def f(fn: T => T): Boolean = f(fn(x))
   def f[R](a: T, b: R = true) = b
@@ -12,9 +12,9 @@ object Test {
   def main(args: Array[String]): Unit = {
     val e = new AtomicBoolean(false)
     val x = e.f( (a : Boolean) => !a ) // ok
-    println( e.f( (a : Boolean) => !a ) toString ) // ok
+    println( e.f( (a : Boolean) => !a ).toString ) // ok
     println( e.f( (a : Boolean) => !a) ) // compiler crash
-    
+
     println(runtime.BoxesRunTime.integerBoxCount)
   }
 }
diff --git a/test/files/specialized/tb3651.check b/test/files/specialized/tb3651.check
index c227083..8a3f686 100644
--- a/test/files/specialized/tb3651.check
+++ b/test/files/specialized/tb3651.check
@@ -1 +1,4 @@
-0
\ No newline at end of file
+tb3651.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    lk.a
+       ^
+0
diff --git a/test/files/specialized/tc3651.check b/test/files/specialized/tc3651.check
index c227083..e2dbadf 100644
--- a/test/files/specialized/tc3651.check
+++ b/test/files/specialized/tc3651.check
@@ -1 +1,4 @@
-0
\ No newline at end of file
+tc3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    lk.a
+       ^
+0
diff --git a/test/files/specialized/td3651.check b/test/files/specialized/td3651.check
index 9aea9e0..1a709fd 100644
--- a/test/files/specialized/td3651.check
+++ b/test/files/specialized/td3651.check
@@ -1,2 +1,8 @@
+td3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    b.a
+      ^
+td3651.scala:16: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+    der.a
+        ^
+0
 0
-0
\ No newline at end of file
diff --git a/test/files/specialized/td3651.scala b/test/files/specialized/td3651.scala
index 19a4ab6..117710b 100644
--- a/test/files/specialized/td3651.scala
+++ b/test/files/specialized/td3651.scala
@@ -11,7 +11,7 @@ object Test {
     val b: Base[Double] = new Derived(10)
     b.a
     println(runtime.BoxesRunTime.doubleBoxCount)
-    
+
     val der = new Derived(10)
     der.a
     println(runtime.BoxesRunTime.doubleBoxCount)
diff --git a/test/flaky/pos/t2868.cmds b/test/flaky/pos/t2868.cmds
deleted file mode 100644
index ed8124a..0000000
--- a/test/flaky/pos/t2868.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Jann.java Nest.java
-scalac pick_1.scala
-scalac test_2.scala
diff --git a/test/junit/scala/collection/ArraySortingTest.scala b/test/junit/scala/collection/ArraySortingTest.scala
new file mode 100644
index 0000000..4e54b39
--- /dev/null
+++ b/test/junit/scala/collection/ArraySortingTest.scala
@@ -0,0 +1,29 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+/* Tests various maps by making sure they all agree on the same answers. */
+ at RunWith(classOf[JUnit4])
+class ArraySortingTest {
+  
+  class CantSortMe(val i: Int) {
+    override def equals(a: Any) = throw new IllegalArgumentException("I cannot be equalled!")
+  }
+  
+  object CanOrder extends Ordering[CantSortMe] {
+    def compare(a: CantSortMe, b: CantSortMe) = a.i compare b.i
+  }
+  
+  // Tests SI-7837
+  @Test
+  def sortByTest() {
+    val test = Array(1,2,3,4,1,3,5,7,1,4,8,1,1,1,1)
+    val cant = test.map(i => new CantSortMe(i))
+    java.util.Arrays.sort(test)
+    scala.util.Sorting.quickSort(cant)(CanOrder)
+    assert( test(6) == 1 )
+    assert( (test,cant).zipped.forall(_ == _.i) )
+  }
+}
diff --git a/test/junit/scala/collection/NumericRangeTest.scala b/test/junit/scala/collection/NumericRangeTest.scala
new file mode 100644
index 0000000..3980c31
--- /dev/null
+++ b/test/junit/scala/collection/NumericRangeTest.scala
@@ -0,0 +1,140 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.math._
+import scala.util._
+
+/* Tests various ranges by making sure they all agree on the same answers. */
+ at RunWith(classOf[JUnit4])
+class RangeConsistencyTest {
+  def r2nr[T: Integral](
+    r: Range, puff: T, stride: T, check: (T,T) => Boolean, bi: T => BigInt
+  ): List[(BigInt,Try[Int])] = {
+    val num = implicitly[Integral[T]]
+    import num._
+    val one = num.one
+    
+    if (!check(puff, fromInt(r.start))) return Nil
+    val start = puff * fromInt(r.start)
+    val sp1 = start + one
+    val sn1 = start - one
+      
+    if (!check(puff, fromInt(r.end))) return Nil
+    val end = puff * fromInt(r.end)
+    val ep1 = end + one
+    val en1 = end - one
+    
+    if (!check(stride, fromInt(r.step))) return Nil
+    val step = stride * fromInt(r.step)
+    
+    def NR(s: T, e: T, i: T) = {
+      val delta = (bi(e) - bi(s)).abs - (if (r.isInclusive) 0 else 1)
+      val n = if (r.length == 0) BigInt(0) else delta / bi(i).abs + 1
+      if (r.isInclusive) {
+        (n, Try(NumericRange.inclusive(s,e,i).length))
+      }
+      else {
+        (n, Try(NumericRange(s,e,i).length))
+      }
+    } 
+    
+    List(NR(start, end, step)) :::
+    (if (sn1 < start) List(NR(sn1, end, step)) else Nil) :::
+    (if (start < sp1) List(NR(sp1, end, step)) else Nil) :::
+    (if (en1 < end) List(NR(start, en1, step)) else Nil) :::
+    (if (end < ep1) List(NR(start, ep1, step)) else Nil)
+  }
+  
+  // Motivated by SI-4370: Wrong result for Long.MinValue to Long.MaxValue by Int.MaxValue
+  @Test
+  def rangeChurnTest() {
+    val rn = new Random(4370)
+    for (i <- 0 to 10000) { control.Breaks.breakable {
+      val start = rn.nextInt
+      val end = rn.nextInt
+      val step = rn.nextInt(4) match {
+        case 0 => 1
+        case 1 => -1
+        case 2 => (rn.nextInt(11)+2)*(2*rn.nextInt(2)+1)
+        case 3 => var x = rn.nextInt; while (x==0) x = rn.nextInt; x
+      }
+      val r = if (rn.nextBoolean) Range.inclusive(start, end, step) else Range(start, end, step)
+      
+      try { r.length }
+      catch { case iae: IllegalArgumentException => control.Breaks.break }
+      
+      val lpuff = rn.nextInt(4) match {
+        case 0 => 1L
+        case 1 => rn.nextInt(11)+2L
+        case 2 => 1L << rn.nextInt(60)
+        case 3 => math.max(1L, math.abs(rn.nextLong))
+      }
+      val lstride = rn.nextInt(4) match {
+        case 0 => lpuff
+        case 1 => 1L
+        case 2 => 1L << rn.nextInt(60)
+        case 3 => math.max(1L, math.abs(rn.nextLong))
+      }
+      val lr = r2nr[Long](
+        r, lpuff, lstride, 
+        (a,b) => { val x = BigInt(a)*BigInt(b); x.isValidLong },
+        x => BigInt(x)
+      )
+      
+      lr.foreach{ case (n,t) => assert(
+        t match {
+          case Failure(_) => n > Int.MaxValue
+          case Success(m) => n == m
+        },
+        (r.start, r.end, r.step, r.isInclusive, lpuff, lstride, n, t)
+      )}
+      
+      val bipuff = rn.nextInt(3) match {
+        case 0 => BigInt(1)
+        case 1 => BigInt(rn.nextLong) + Long.MaxValue + 2
+        case 2 => BigInt("1" + "0"*(rn.nextInt(100)+1))
+      }
+      val bistride = rn.nextInt(3) match {
+        case 0 => bipuff
+        case 1 => BigInt(1)
+        case 2 => BigInt("1" + "0"*(rn.nextInt(100)+1))
+      }
+      val bir = r2nr[BigInt](r, bipuff, bistride, (a,b) => true, identity)
+      
+      bir.foreach{ case (n,t) => assert(
+        t match {
+          case Failure(_) => n > Int.MaxValue
+          case Success(m) => n == m
+        },
+        (r.start, r.end, r.step, r.isInclusive, bipuff, bistride, n, t)
+      )}              
+    }}
+  }
+  
+  @Test
+  def testSI4370() { assert{
+    Try((Long.MinValue to Long.MaxValue by Int.MaxValue).length) match {
+      case Failure(iae: IllegalArgumentException) => true
+      case _ => false
+    }
+  }}
+  
+  @Test
+  def testSI6736() {
+    // These operations on overfull ranges should all succeed.
+    assert( (0 to Int.MaxValue).contains(4) )
+    assert( !((Int.MinValue to 0).contains(4)) )
+    assert( (Int.MinValue to 0).last == 0 )
+    assert( (Int.MinValue until 5).last == 4 )
+    assert( (-7 to -99 by -4).last == -99 && (-7 until -99 by -4).last == -95 )
+    assert( (Int.MinValue to 5) == (Int.MinValue until 6) )
+    assert( (-3 to Int.MaxValue).drop(4).length == Int.MaxValue )
+    assert( (-3 to Int.MaxValue).take(1234) == (-3 to 1230) )
+    assert( (-3 to Int.MaxValue).dropRight(4).length == Int.MaxValue )
+    assert( (-3 to Int.MaxValue).takeRight(1234).length == 1234 )
+    assert( (-3 to Int.MaxValue).dropWhile(_ <= 0).length == Int.MaxValue )
+    assert( (-3 to Int.MaxValue).span(_ <= 0) match { case (a,b) => a.length == 4 && b.length == Int.MaxValue } )
+  }
+}
diff --git a/test/junit/scala/collection/PagedSeq.scala b/test/junit/scala/collection/PagedSeq.scala
new file mode 100644
index 0000000..5f83cf6
--- /dev/null
+++ b/test/junit/scala/collection/PagedSeq.scala
@@ -0,0 +1,16 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+/* Test for SI-6615  */
+ at RunWith(classOf[JUnit4])
+class PagedSeqTest {
+  @Test
+  def rovingDoesNotNPE(): Unit = {
+    // should not NPE, and should equal the given Seq
+    assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
+  }
+}
diff --git a/test/junit/scala/collection/PriorityQueueTest.scala b/test/junit/scala/collection/PriorityQueueTest.scala
new file mode 100644
index 0000000..a14f1bf
--- /dev/null
+++ b/test/junit/scala/collection/PriorityQueueTest.scala
@@ -0,0 +1,32 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.mutable
+import java.io.{ObjectInputStream, ByteArrayInputStream, ByteArrayOutputStream, ObjectOutputStream}
+
+ at RunWith(classOf[JUnit4])
+/* Test for SI-7568  */
+class PriorityQueueTest {
+  val priorityQueue = new mutable.PriorityQueue[Int]()
+  val elements = List.fill(1000)(scala.util.Random.nextInt(Int.MaxValue))
+  priorityQueue.enqueue(elements :_*)
+
+  @Test
+  def canSerialize() {
+    val outputStream = new ByteArrayOutputStream()
+    new ObjectOutputStream(outputStream).writeObject(priorityQueue)
+  }
+
+  @Test
+  def maintainsStateWhenDeserialized() {
+    val outputStream = new ByteArrayOutputStream()
+    new ObjectOutputStream(outputStream).writeObject(priorityQueue)
+    val bytes = outputStream.toByteArray
+
+    val objectInputStream = new ObjectInputStream(new ByteArrayInputStream(bytes))
+    val deserializedPriorityQueue = objectInputStream.readObject().asInstanceOf[PriorityQueue[Int]]
+    assert(deserializedPriorityQueue.dequeueAll == elements.sorted.reverse)
+  }
+}
diff --git a/test/junit/scala/collection/QueueTest.scala b/test/junit/scala/collection/QueueTest.scala
new file mode 100644
index 0000000..9a40d8f
--- /dev/null
+++ b/test/junit/scala/collection/QueueTest.scala
@@ -0,0 +1,28 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+ at RunWith(classOf[JUnit4])
+/* Tests for collection.immutable.Queue  */
+class QueueTest {
+  val emptyQueue = Queue.empty[Int]
+  val oneAdded = emptyQueue.enqueue(1)
+  val threeAdded = emptyQueue.enqueue(1 to 3)
+
+  @Test
+  def dequeueOptionOnEmpty() {
+    assert( emptyQueue.dequeueOption == None )
+  }
+
+  @Test
+  def dequeueOptionOneAdded() {
+    assert( oneAdded.dequeueOption == Some((1,emptyQueue)) )
+  }
+
+  @Test
+  def dequeueOptionThreeAdded() {
+    assert( threeAdded.dequeueOption == Some((1,Queue(2 to 3:_*))) )
+  }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
new file mode 100644
index 0000000..eed6007
--- /dev/null
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -0,0 +1,517 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.{mutable => cm, immutable => ci}
+import scala.collection.JavaConverters._
+
+/* Tests various maps by making sure they all agree on the same answers. */
+ at RunWith(classOf[JUnit4])
+class SetMapConsistencyTest {
+  
+  trait MapBox[A] {
+    protected def oor(s: String, n: Int) = throw new IllegalArgumentException(s"Out of range for $s: $n")
+    def title: String
+    def adders: Int
+    def add(n: Int, a: A, v: Int): Unit
+    def subbers: Int
+    def sub(n: Int, a: A): Unit
+    def getters: Int
+    def get(n: Int, a: A): Int
+    def fiddlers: Int
+    def fiddle(n: Int): Unit
+    def keys: Iterator[A]
+    def has(a: A): Boolean
+  }
+  
+  
+  // Mutable map wrappers
+  
+  class BoxMutableMap[A, M <: cm.Map[A, Int]](m0: M, title0: String) extends MapBox[A] {
+    var m = m0
+    def title = title0
+    def adders = 5
+    def add(n: Int, a: A, v: Int) { n match {
+      case 0 => m += ((a, v))
+      case 1 => m(a) = v
+      case 2 => m.put(a, v)
+      case 3 => m = (m + ((a, v))).asInstanceOf[M]
+      case 4 => m = (m ++ List((a, v))).asInstanceOf[M]
+      case _ => oor("add", n)
+    }}
+    def subbers: Int = 3
+    def sub(n: Int, a: A) { n match {
+      case 0 => m -= a
+      case 1 => m = (m - a).asInstanceOf[M]
+      case 2 => m = m.filter(_._1 != a).asInstanceOf[M]
+      case _ => oor("sub", n)
+    }}
+    def getters: Int = 3
+    def get(n: Int, a: A) = n match {
+      case 0 => m.get(a).getOrElse(-1)
+      case 1 => if (m contains a) m(a) else -1
+      case 2 => m.getOrElse(a, -1)
+      case _ => oor("get", n)
+    }
+    def fiddlers: Int = 0
+    def fiddle(n: Int) { oor("fiddle", n) }
+    def keys = m.keysIterator
+    def has(a: A) = m contains a
+    override def toString = m.toString
+  }
+  
+  def boxMlm[A] = new BoxMutableMap[A, cm.ListMap[A, Int]](new cm.ListMap[A, Int], "mutable.ListMap")
+  
+  def boxMhm[A] = new BoxMutableMap[A, cm.HashMap[A, Int]](new cm.HashMap[A, Int], "mutable.HashMap")
+  
+  def boxMohm[A] = new BoxMutableMap[A, cm.OpenHashMap[A, Int]](new cm.OpenHashMap[A, Int], "mutable.OpenHashMap")
+  
+  def boxMarm[A <: AnyRef] = new BoxMutableMap[A, cm.AnyRefMap[A, Int]](new cm.AnyRefMap[A, Int](_ => -1), "mutable.AnyRefMap") {
+    private def arm: cm.AnyRefMap[A, Int] = m.asInstanceOf[cm.AnyRefMap[A, Int]]
+    override def adders = 3
+    override def subbers = 1
+    override def getters: Int = 4
+    override def get(n: Int, a: A) = n match {
+      case 0 => m.get(a).getOrElse(-1)
+      case 1 => m(a)
+      case 2 => m.getOrElse(a, -1)
+      case 3 => val x = arm.getOrNull(a); if (x==0 && !(arm contains a)) -1 else x
+      case _ => oor("get", n)
+    }
+    override def fiddlers = 2
+    override def fiddle(n: Int) { n match {
+      case 0 => m = arm.clone
+      case 1 => arm.repack
+      case _ => oor("fiddle", n)
+    }}
+  }
+  
+  def boxMjm = new BoxMutableMap[Long, cm.LongMap[Int]](new cm.LongMap[Int](_ => -1), "mutable.LongMap") {
+    private def lm: cm.LongMap[Int] = m.asInstanceOf[cm.LongMap[Int]]
+    override def adders = 3
+    override def subbers = 1
+    override def getters: Int = 4
+    override def get(n: Int, a: Long) = n match {
+      case 0 => m.get(a).getOrElse(-1)
+      case 1 => m(a)
+      case 2 => m.getOrElse(a, -1)
+      case 3 => val x = lm.getOrNull(a); if (x==0 && !(lm contains a)) -1 else x
+      case _ => oor("get", n)
+    }
+    override def fiddlers = 2
+    override def fiddle(n: Int) { n match {
+      case 0 => m = lm.clone
+      case 1 => lm.repack
+      case _ => oor("fiddle", n)
+    }}
+  }
+  
+  def boxJavaM[A] = new BoxMutableMap[A, cm.Map[A, Int]]((new java.util.HashMap[A, Int]).asScala, "java.util.HashMap") {
+    override def adders = 3
+    override def subbers = 1
+  }
+  
+  
+  // Immutable map wrappers
+  
+  class BoxImmutableMap[A, M <: ci.Map[A, Int]](m0: M, title0: String) extends MapBox[A] {
+    var m = m0
+    def title = title0
+    def adders = 2
+    def add(n: Int, a: A, v: Int) { n match {
+      case 0 => m = (m + ((a, v))).asInstanceOf[M]
+      case 1 => m = (m ++ List((a, v))).asInstanceOf[M]
+      case _ => oor("add", n)
+    }}
+    def subbers: Int = 2
+    def sub(n: Int, a: A) { n match {
+      case 0 => m = (m - a).asInstanceOf[M]
+      case 1 => m = m.filter(_._1 != a).asInstanceOf[M]
+      case _ => oor("sub", n)
+    }}
+    def getters: Int = 3
+    def get(n: Int, a: A) = n match {
+      case 0 => m.get(a).getOrElse(-1)
+      case 1 => if (m contains a) m(a) else -1
+      case 2 => m.getOrElse(a, -1)
+      case _ => oor("get", n)
+    }
+    def fiddlers: Int = 0
+    def fiddle(n: Int) { oor("fiddle", n) }
+    def keys = m.keysIterator
+    def has(a: A) = m contains a
+    override def toString = m.toString
+  }
+  
+  def boxIhm[A] = new BoxImmutableMap[A, ci.HashMap[A,Int]](new ci.HashMap[A, Int], "immutable.HashMap")
+  
+  def boxIim = new BoxImmutableMap[Int, ci.IntMap[Int]](ci.IntMap.empty[Int], "immutable.IntMap")
+  
+  def boxIjm = new BoxImmutableMap[Long, ci.LongMap[Int]](ci.LongMap.empty[Int], "immutable.LongMap")
+  
+  def boxIlm[A] = new BoxImmutableMap[A, ci.ListMap[A, Int]](new ci.ListMap[A, Int], "immutable.ListMap")
+  
+  def boxItm[A: Ordering] = new BoxImmutableMap[A, ci.TreeMap[A, Int]](new ci.TreeMap[A, Int], "immutable.TreeMap")
+    
+  
+  // Mutable set wrappers placed into the same framework (everything returns 0)
+  
+  class BoxMutableSet[A, M <: cm.Set[A]](s0: M, title0: String) extends MapBox[A] {
+    protected var m = s0
+    def title = title0
+    def adders = 5
+    def add(n: Int, a: A, v: Int) { n match {
+      case 0 => m += a
+      case 1 => m(a) = true
+      case 2 => m add a
+      case 3 => m = (m + a).asInstanceOf[M]
+      case 4 => m = (m ++ List(a)).asInstanceOf[M]
+      case _ => oor("add", n)
+    }}
+    def subbers: Int = 3
+    def sub(n: Int, a: A) { n match {
+      case 0 => m -= a
+      case 1 => m = (m - a).asInstanceOf[M]
+      case 2 => m = m.filter(_ != a).asInstanceOf[M]
+      case _ => oor("sub", n)
+    }}
+    def getters: Int = 1
+    def get(n: Int, a: A) = if (m(a)) 0 else -1
+    def fiddlers: Int = 0
+    def fiddle(n: Int) { oor("fiddle", n) }
+    def keys = m.iterator
+    def has(a: A) = m(a)
+    override def toString = m.toString
+  }
+  
+  def boxMbs = new BoxMutableSet[Int, cm.BitSet](new cm.BitSet, "mutable.BitSet")
+  
+  def boxMhs[A] = new BoxMutableSet[A, cm.HashSet[A]](new cm.HashSet[A], "mutable.HashSet")
+  
+  def boxJavaS[A] = new BoxMutableSet[A, cm.Set[A]]((new java.util.HashSet[A]).asScala, "java.util.HashSet") {
+    override def adders = 3
+    override def subbers = 1
+  }
+  
+  
+  // Immutable set wrappers placed into the same framework (everything returns 0)
+  
+  class BoxImmutableSet[A, M <: ci.Set[A]](s0: M, title0: String) extends MapBox[A] {
+    protected var m = s0
+    def title = title0
+    def adders = 2
+    def add(n: Int, a: A, v: Int) { n match {
+      case 0 => m = (m + a).asInstanceOf[M]
+      case 1 => m = (m ++ List(a)).asInstanceOf[M]
+      case _ => oor("add", n)
+    }}
+    def subbers: Int = 2
+    def sub(n: Int, a: A) { n match {
+      case 0 => m = (m - a).asInstanceOf[M]
+      case 1 => m = m.filter(_ != a).asInstanceOf[M]
+      case _ => oor("sub", n)
+    }}
+    def getters: Int = 1
+    def get(n: Int, a: A) = if (m(a)) 0 else -1
+    def fiddlers: Int = 0
+    def fiddle(n: Int) { oor("fiddle", n) }
+    def keys = m.iterator
+    def has(a: A) = m(a)
+    override def toString = m.toString
+  }
+  
+  def boxIbs = new BoxImmutableSet[Int, ci.BitSet](ci.BitSet.empty, "immutable.BitSet")
+  
+  def boxIhs[A] = new BoxImmutableSet[A, ci.HashSet[A]](ci.HashSet.empty[A], "mutable.HashSet")
+  
+  def boxIls[A] = new BoxImmutableSet[A, ci.ListSet[A]](ci.ListSet.empty[A], "mutable.ListSet")
+  
+  def boxIts[A: Ordering] = new BoxImmutableSet[A, ci.TreeSet[A]](ci.TreeSet.empty[A], "mutable.TreeSet")
+  
+  
+  // Random operations on maps
+  def churn[A](map1: MapBox[A], map2: MapBox[A], keys: Array[A], n: Int = 1000, seed: Int = 42, valuer: Int => Int = identity) = {
+    def check = map1.keys.forall(map2 has _) && map2.keys.forall(map1 has _)
+    val rn = new scala.util.Random(seed)
+    var what = new StringBuilder
+    what ++= "creation"
+    for (i <- 0 until n) {
+      if (!check) {
+        val temp = map2 match {
+          case b: BoxImmutableMap[_, _] => b.m match {
+            case hx: ci.HashMap.HashTrieMap[_,_] =>
+              val h = hx.asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+              Some((h.bitmap.toHexString, h.elems.mkString, h.size))
+            case _ => None
+          }
+          case _ => None
+        }
+        throw new Exception(s"Disagreement after ${what.result} between ${map1.title} and ${map2.title} because ${map1.keys.map(map2 has _).mkString(",")} ${map2.keys.map(map1 has _).mkString(",")} at step $i:\n$map1\n$map2\n$temp")
+      }
+      what ++= " (%d) ".format(i)
+      if (rn.nextInt(10)==0) {
+        
+        if (map1.fiddlers > 0) map1.fiddle({
+          val n = rn.nextInt(map1.fiddlers)
+          what ++= ("f"+n)
+          n
+        })
+        if (map2.fiddlers > 0) map2.fiddle({
+          val n = rn.nextInt(map2.fiddlers)
+          what ++= ("F"+n)
+          n
+        })
+      }
+      if (rn.nextBoolean) {
+        val idx = rn.nextInt(keys.length)
+        val key = keys(rn.nextInt(keys.length))
+        val n1 = rn.nextInt(map1.adders)
+        val n2 = rn.nextInt(map2.adders)
+        what ++= "+%s(%d,%d)".format(key,n1,n2)
+        map1.add(n1, key, valuer(idx))
+        map2.add(n2, key, valuer(idx))
+      }
+      else {
+        val n = rn.nextInt(keys.length)
+        val key = keys(n)
+        val n1 = rn.nextInt(map1.subbers)
+        val n2 = rn.nextInt(map2.subbers)
+        what ++= "-%s(%d,%d)".format(key, n1, n2)
+        //println(s"- $key")
+        map1.sub(n1, key)
+        map2.sub(n2, key)
+      }
+      val j = rn.nextInt(keys.length)
+      val gn1 = rn.nextInt(map1.getters)
+      val gn2 = rn.nextInt(map2.getters)
+      val g1 = map1.get(gn1, keys(j))
+      val g2 = map2.get(gn2, keys(j))
+      if (g1 != g2) {
+        val temp = map2 match {
+          case b: BoxImmutableMap[_, _] => b.m match {
+            case hx: ci.HashMap.HashTrieMap[_,_] =>
+              val h = hx.asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+              val y = (ci.HashMap.empty[A, Int] ++ h).asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+              Some(((h.bitmap.toHexString, h.elems.mkString, h.size),(y.bitmap.toHexString, y.elems.mkString, y.size)))
+            case _ => None
+          }
+          case _ => None
+        }
+        throw new Exception(s"Disagreement after ${what.result} between ${map1.title} and ${map2.title} on get of ${keys(j)} (#$j) on step $i: $g1 != $g2 using methods $gn1 and $gn2 resp.; in full\n$map1\n$map2\n$temp")
+      }
+    }
+    true
+  }
+  
+  
+  // Actual tests
+  val smallKeys = Array(0, 1, 42, 9127)
+  val intKeys = smallKeys ++ Array(-1, Int.MaxValue, Int.MinValue, -129385)
+  val longKeys = intKeys.map(_.toLong) ++ Array(Long.MaxValue, Long.MinValue, 1397198789151L, -41402148014L)
+  val stringKeys = intKeys.map(_.toString) ++ Array("", null)
+  val anyKeys = stringKeys.filter(_ != null) ++ Array(0L) ++ Array(true) ++ Array(math.Pi)
+
+  @Test
+  def churnIntMaps() {
+    val maps = Array[() => MapBox[Int]](
+      () => boxMlm[Int], () => boxMhm[Int], () => boxMohm[Int], () => boxJavaM[Int],
+      () => boxIim, () => boxIhm[Int], () => boxIlm[Int], () => boxItm[Int]
+    )
+    assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), intKeys, 2000) } )
+  }
+  
+  @Test
+  def churnLongMaps() {
+    val maps = Array[() => MapBox[Long]](
+      () => boxMjm, () => boxIjm, () => boxJavaM[Long],
+      () => boxMlm[Long], () => boxMhm[Long], () => boxMohm[Long], () => boxIhm[Long], () => boxIlm[Long]
+    )
+    assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), longKeys, 10000) } )
+  }
+  
+  @Test
+  def churnStringMaps() {
+    // Note: OpenHashMap and TreeMap won't store null, so skip strings
+    val maps = Array[() => MapBox[String]](
+      () => boxMlm[String], () => boxMhm[String], () => boxMarm[String], () => boxJavaM[String],
+      () => boxIhm[String], () => boxIlm[String]
+    )
+    assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), stringKeys, 5000) } )
+  }
+  
+  @Test
+  def churnAnyMaps() {
+    val maps = Array[() => MapBox[Any]](
+      () => boxMlm[Any], () => boxMhm[Any], () => boxMohm[Any], () => boxJavaM[Any], () => boxIhm[Any], () => boxIlm[Any]
+    )
+    assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), anyKeys, 10000) } )
+  }
+  
+  @Test
+  def churnIntSets() {
+    val sets = Array[() => MapBox[Int]](
+      () => boxMhm[Int], () => boxIhm[Int], () => boxJavaS[Int],
+      () => boxMbs, () => boxMhs[Int], () => boxIbs, () => boxIhs[Int], () => boxIls[Int], () => boxIts[Int]
+    )
+    assert( sets.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), smallKeys, 1000, valuer = _ => 0) } )
+  }
+  
+  @Test 
+  def churnAnySets() {
+    val sets = Array[() => MapBox[Any]](
+      () => boxMhm[Any], () => boxIhm[Any], () => boxJavaS[Any],
+      () => boxMhs[Any], () => boxIhs[Any], () => boxIls[Any]
+    )
+    assert( sets.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), anyKeys, 10000, valuer = _ => 0) } )
+  }
+  
+  @Test
+  def extraMutableLongMapTests() {
+    import cm.{LongMap, HashMap}
+    var lm = LongMap.empty[Long]
+    longKeys.zipWithIndex.foreach{ case (k,i) => lm(k) = i }
+    assert{ lm.map{ case (k,v) => -k*k -> v.toString }.getClass == lm.getClass }
+      
+    assert {
+      val lm2 = new LongMap[Unit](2000000)
+      for (i <- 0 until 1000000) lm2(i) = ()
+      
+      lm2.size == 1000000 && 
+        (0 to 1100000 by 100000).forall(i => (lm2 contains i) == i < 1000000)
+    }
+    
+    lm = LongMap(8L -> 22L, -5L -> 5L, Long.MinValue -> 0L)
+    
+    assert{ var s = 0L; lm.foreachKey(s += _); s == Long.MinValue + 3 }
+    assert{ var s = 0L; lm.foreachValue(s += _); s == 27L }
+    assert { 
+      val m2 = lm.mapValuesNow(_+2)
+      lm.transformValues(_+2)
+      m2 == lm && !(m2 eq lm) && (for ((_,v) <- lm) yield v).sum == 33L
+    }
+
+    assert {
+      val lm2 = new LongMap[String](_.toString)
+      lm2 += (5L -> "fish", 0L -> "unicorn")
+      val hm2 = (new HashMap[Long,String]) ++= lm2
+      List(Long.MinValue, 0L, 1L, 5L).forall(i =>
+        lm2.get(i) == hm2.get(i) &&
+        lm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
+        lm2(i) == hm2.get(i).getOrElse(i.toString) &&
+        lm2.getOrNull(i) == hm2.get(i).orNull
+      )
+    }
+  }
+  
+  @Test
+  def extraMutableAnyRefMapTests() {
+    import cm.{AnyRefMap, HashMap}
+    var arm = AnyRefMap.empty[String, Int]
+    stringKeys.zipWithIndex.foreach{ case (k,i) => arm(k) = i }
+    
+    assert{ arm.map{ case (k,v) => (if (k==null) "" else k+k) -> v.toString }.getClass == arm.getClass }
+    
+    assert {
+      val arm2 = new AnyRefMap[java.lang.Integer,Unit](2000000)
+      for (i <- 0 until 1000000) arm2(java.lang.Integer.valueOf(i)) = ()      
+      arm2.size == 1000000 && 
+        (0 to 1100000 by 100000).map(java.lang.Integer.valueOf).forall(i => (arm2 contains i) == i < 1000000)
+    }
+    
+    arm = AnyRefMap("heron" -> 22, "dove" -> 5, "budgie" -> 0)
+    
+    assert{
+      var s = ""
+      arm.foreachKey(s += _)
+      s.length == "herondovebudgie".length &&
+      s.contains("heron") &&
+      s.contains("dove") &&
+      s.contains("budgie")
+    }
+
+    assert{ var s = 0L; arm.foreachValue(s += _); s == 27L }
+
+    assert { 
+      val m2 = arm.mapValuesNow(_+2)
+      arm.transformValues(_+2)
+      m2 == arm && !(m2 eq arm) && (for ((_,v) <- arm) yield v).sum == 33L
+    }
+
+    assert {
+      val arm2 = new AnyRefMap[String, String](x => if (x==null) "null" else x)
+      arm2 += ("cod" -> "fish", "Rarity" -> "unicorn")
+      val hm2 = (new HashMap[String,String]) ++= arm2
+      List(null, "cod", "sparrow", "Rarity").forall(i =>
+        arm2.get(i) == hm2.get(i) &&
+        arm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
+        arm2(i) == hm2.get(i).getOrElse(if (i==null) "null" else i.toString) &&
+        arm2.getOrNull(i) == hm2.get(i).orNull
+      )
+    }
+  }
+  
+  @Test
+  def extraFilterTests() {
+    type M = scala.collection.Map[Int, Boolean]
+    val manyKVs = (0 to 1000).map(i => i*i*i).map(x => x -> ((x*x*x) < 0))
+    val rn = new scala.util.Random(42)
+    def mhm: M = { val m = new cm.HashMap[Int, Boolean]; m ++= manyKVs; m }
+    def mohm: M = { val m = new cm.OpenHashMap[Int, Boolean]; m ++= manyKVs; m }
+    def ihm: M = ci.HashMap.empty[Int, Boolean] ++ manyKVs
+    val densities = List(0, 0.05, 0.2, 0.5, 0.8, 0.95, 1)
+    def repeat = rn.nextInt(100) < 33
+    def pick(m: M, density: Double) = m.keys.filter(_ => rn.nextDouble < density).toSet
+    def test: Boolean = {
+      for (i <- 0 to 100) {
+        var ms = List(mhm, mohm, ihm)
+        do {
+          val density = densities(rn.nextInt(densities.length))
+          val keep = pick(ms.head, density)
+          ms = ms.map(_.filter(keep contains _._1))
+          if (!ms.sliding(2).forall(s => s(0) == s(1))) return false
+        } while (repeat)
+      }
+      true
+    }
+    assert(test)
+  }
+  
+  @Test
+  def testSI8213() {
+    val am = new scala.collection.mutable.AnyRefMap[String, Int]
+    for (i <- 0 until 1024) am += i.toString -> i
+    am.getOrElseUpdate("1024", { am.clear; -1 })
+    assert(am == scala.collection.mutable.AnyRefMap("1024" -> -1))
+    val lm = new scala.collection.mutable.LongMap[Int]
+    for (i <- 0 until 1024) lm += i.toLong -> i
+    lm.getOrElseUpdate(1024, { lm.clear; -1 })
+    assert(lm == scala.collection.mutable.LongMap(1024L -> -1))
+  }
+  
+  // Mutating when an iterator is in the wild shouldn't produce random junk in the iterator
+  // Todo: test all sets/maps this way
+  @Test
+  def testSI8154() {
+    def f() = {
+      val xs = scala.collection.mutable.AnyRefMap[String, Int]("a" -> 1)
+      val it = xs.iterator
+      it.hasNext
+      xs.clear()
+    
+      if (it.hasNext) Some(it.next)
+      else None
+    }
+    assert(f() match {
+      case Some((a,b)) if (a==null || b==null) => false
+      case _ => true
+    })
+  }
+  
+  @Test
+  def testSI8264() {
+    val hs = Set(-2147483648, 1, -45023380, -1, 1971207058, -54312241, -234243394) - -1
+    assert( hs.toList.toSet == hs )
+    assert( hs == hs.toList.toSet )
+  }
+}
diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala
new file mode 100644
index 0000000..56d8312
--- /dev/null
+++ b/test/junit/scala/collection/TraversableOnceTest.scala
@@ -0,0 +1,70 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.util.Random
+
+ at RunWith(classOf[JUnit4])
+/* Test for SI-7614 */
+class TraversableOnceTest {
+  val list = List.fill(1000)(scala.util.Random.nextInt(10000) - 5000)
+
+  // Basic emptiness check
+  @Test
+  def checkEmpty {
+    def hasException(code: => Any): Boolean = try {
+      code
+      false
+    } catch {
+      case u: UnsupportedOperationException => true
+      case t: Throwable => false
+    }
+    assert(hasException({ List[Int]().maxBy(_ * 3) }), "maxBy: on empty list should throw UnsupportedOperationException.")
+    assert(hasException({ List[Int]().minBy(_ * 3) }), "minBy: on empty list should throw UnsupportedOperationException.")
+  }
+
+  // Basic definition of minBy/maxBy.
+  @Test
+  def testCorrectness() = {
+    def f(x: Int) = -1 * x
+    val max = list.maxBy(f)
+    assert(list.forall(f(_) <= f(max)), "f(list.maxBy(f)) should ≥ f(x) where x is any element of list.")
+
+    val min = list.minBy(f)
+    assert(list.forall(f(_) >= f(min)), "f(list.minBy(f)) should ≤ f(x) where x is any element of list.")
+  }
+
+  // Ensure that it always returns the first match if more than one element have the same largest/smallest f(x).
+  // Note that this behavior is not explicitly stated before. 
+  // To make it compatible with the previous implementation, I add this behavior to docs.
+  @Test
+  def testReturnTheFirstMatch() = {
+    val d = List(1, 2, 3, 4, 5, 6, 7, 8)
+    def f(x: Int) = x % 3;
+    assert(d.maxBy(f) == 2, "If multiple elements evaluted to the largest value, maxBy should return the first one.")
+    assert(d.minBy(f) == 3, "If multiple elements evaluted to the largest value, minBy should return the first one.")
+  }
+
+  // Make sure it evaluates f no more than list.length times.
+  @Test
+  def testOnlyEvaluateOnce() = {
+    var evaluatedCountOfMaxBy = 0
+
+    val max = list.maxBy(x => {
+      evaluatedCountOfMaxBy += 1
+      x * 10
+    })
+    assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMaxBy times.")
+
+    var evaluatedCountOfMinBy = 0
+
+    val min = list.minBy(x => {
+      evaluatedCountOfMinBy += 1
+      x * 10
+    })
+    assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMinBy times.")
+  }
+
+}
diff --git a/test/junit/scala/collection/VectorTest.scala b/test/junit/scala/collection/VectorTest.scala
new file mode 100644
index 0000000..e9c4d44
--- /dev/null
+++ b/test/junit/scala/collection/VectorTest.scala
@@ -0,0 +1,51 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.mutable
+
+ at RunWith(classOf[JUnit4])
+/* Test for SI-8014 and ++ in general  */
+class VectorTest {
+  val noVec = Vector.empty[Int]
+  val smallVec = Vector.range(0,3)
+  val bigVec = Vector.range(0,64)
+  val smsm = Vector.tabulate(2 * smallVec.length)(i => (i % smallVec.length))
+  val smbig = Vector.tabulate(smallVec.length + bigVec.length)(i => 
+    if (i < smallVec.length) i else i - smallVec.length
+  )
+  val bigsm = Vector.tabulate(smallVec.length + bigVec.length)(i => 
+    if (i < bigVec.length) i else i - bigVec.length
+  )
+  val bigbig = Vector.tabulate(2 * bigVec.length)(i => (i % bigVec.length))
+
+
+  val vecs = List(noVec, smallVec, bigVec)
+  val ans = List(
+    vecs,
+    List(smallVec, smsm, smbig),
+    List(bigVec, bigsm, bigbig)
+  )
+
+  @Test
+  def vectorCat() {
+    val cats = vecs.map(a => vecs.map(a ++ _))
+    assert( cats == ans )
+  }
+
+  @Test
+  def iteratorCat() {
+    def its = vecs.map(_.toList.toIterator)
+    val cats = vecs.map(a => its.map(a ++ _))
+    println(cats)
+    assert( cats == ans )
+  }
+
+  @Test
+  def arrayCat() {
+    val ars = vecs.map(_.toArray)
+    val cats = vecs.map(a => ars.map(a ++ _))
+    assert( cats == ans )
+  }
+}
diff --git a/test/junit/scala/collection/convert/MapWrapperTest.scala b/test/junit/scala/collection/convert/MapWrapperTest.scala
new file mode 100644
index 0000000..060b6b5
--- /dev/null
+++ b/test/junit/scala/collection/convert/MapWrapperTest.scala
@@ -0,0 +1,49 @@
+package scala.collection.convert
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class MapWrapperTest {
+
+  /* Test for SI-7883 */
+  @Test
+  def testContains() {
+    import scala.collection.JavaConverters.mapAsJavaMapConverter
+    import scala.language.reflectiveCalls  // for accessing containsCounter
+
+    // A HashMap which throws an exception when the iterator() method is called.
+    // Before the fix for SI-7883, calling MapWrapper.containsKey() used to
+    // iterate through every element of the wrapped Map, and thus would crash
+    // in this case.
+    val scalaMap = new scala.collection.mutable.HashMap[String, String] {
+      var containsCounter = 0  // keep track of how often contains() has been called.
+      override def iterator = throw new UnsupportedOperationException
+
+      override def contains(key: String): Boolean = {
+        containsCounter += 1
+        super.contains(key)
+      }
+    }
+
+    val javaMap = scalaMap.asJava
+
+    scalaMap("hello") = "world"
+    scalaMap(null) = "null's value"
+
+    assertEquals(0, scalaMap.containsCounter)
+    assertTrue(javaMap.containsKey("hello"))     // positive test
+    assertTrue(javaMap.containsKey(null))        // positive test, null key
+
+    assertFalse(javaMap.containsKey("goodbye"))  // negative test
+    // Note: this case does NOT make it to scalaMap's contains() method because the runtime
+    // cast fails in MapWrapper, so the containsCounter is not incremented in this case.
+    assertFalse(javaMap.containsKey(42))         // negative test, wrong key type
+
+    assertEquals(Some("null's value"), scalaMap.remove(null))
+    assertFalse(javaMap.containsKey(null))       // negative test, null key
+    assertEquals(4, scalaMap.containsCounter)
+  }
+}
diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala
new file mode 100644
index 0000000..d1ba96f
--- /dev/null
+++ b/test/junit/scala/math/BigDecimalTest.scala
@@ -0,0 +1,225 @@
+package scala.math
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import java.math.{BigDecimal => BD, MathContext => MC}
+
+/* Tests various maps by making sure they all agree on the same answers. */
+ at RunWith(classOf[JUnit4])
+class BigDecimalTest {
+  
+  // Motivated by SI-6173: BigDecimal#isWhole implementation is very heap intensive
+  @Test
+  def isWholeTest() {
+    val wholes = List(
+      BigDecimal(1),
+      BigDecimal(10L),
+      BigDecimal(14.000),
+      BigDecimal(new BD("19127981892347012385719827340123471923850195")),
+      BigDecimal("1e1000000000"),
+      BigDecimal(14.1928857191985e22),
+      BigDecimal(14.12519823759817, new MC(2))
+    )
+    val fracs = List(
+      BigDecimal(0.1),
+      BigDecimal(new BD("1.000000000000000000000000000000000001")),
+      BigDecimal(new BD("275712375971892375127591745810580123751.99999")),
+      BigDecimal("14.19238571927581e6"),
+      BigDecimal("912834718237510238591285")/2
+    )
+    assert(wholes.forall(_.isWhole) && fracs.forall(! _.isWhole))
+  }
+
+  // Motivated by SI-6699: BigDecimal.isValidDouble behaves unexpectedly
+  @Test
+  def isValidDoubleTest() {
+    val valids = List(
+      BigDecimal(1),
+      BigDecimal(19571.125),
+      BigDecimal.decimal(0.1),
+      BigDecimal(1e15)
+    )
+    val invalids = List(
+      BigDecimal(new BD("1.0000000000000000000000000000000000000000001")),
+      BigDecimal("10e1000000"),
+      BigDecimal("10e-1000000")
+    )
+    assert(
+      valids.forall(_.isDecimalDouble) &&
+      invalids.forall(! _.isDecimalDouble)
+    )
+  }
+  
+  // Motivated by SI-6173: BigDecimal#isWhole implementation is very heap intensive
+  @Test
+  def doesNotExplodeTest() {
+    val troublemaker = BigDecimal("1e1000000000")
+    val reasonable = BigDecimal("1e1000")
+    val reasonableInt = reasonable.toBigInt
+    assert(
+      reasonable.hashCode == reasonableInt.hashCode &&
+      reasonable == reasonableInt &&
+      reasonableInt == reasonable &&
+      troublemaker.hashCode != reasonable.hashCode &&
+      !(troublemaker == reasonableInt) &&
+      !(reasonableInt == troublemaker)
+    )
+  }
+  
+  // Motivated by SI-6456: scala.math.BigDecimal should not accept a null value
+  @Test
+  def refusesNullTest() {
+    def isIAE[A](a: => A) = try { a; false } catch { case iae: IllegalArgumentException => true }
+    def isNPE[A](a: => A) = try { a; false } catch { case npe: NullPointerException => true }
+    assert(
+      isIAE(new BigDecimal(null: BD, new MC(2))) &&
+      isIAE(new BigDecimal(new BD("5.7"), null: MC)) &&
+      isNPE(BigDecimal(null: BigInt)) &&
+      isNPE(BigDecimal(null: String)) &&
+      isNPE(BigDecimal(null: Array[Char]))
+    )
+  }
+  
+  // Motivated by SI-6153: BigDecimal.hashCode() has high collision rate
+  @Test
+  def hashCodesAgreeTest() {
+    val bi: BigInt = 100000
+    val bd: BigDecimal = 100000
+    val l: Long = 100000
+    val d: Double = 100000
+    assert(
+      d.## == l.## &&
+      l.## == bd.## &&
+      bd.## == bi.## &&
+      (bd pow 4).hashCode == (bi pow 4).hashCode &&
+      BigDecimal("1e150000").hashCode != BigDecimal("1e150000").toBigInt.hashCode
+    )
+  }
+  
+  // Motivated by noticing BigDecimal(0.1f) != BigDecimal(0.1)
+  @Test
+  def consistentTenthsTest() {
+    def tenths = List[Any](
+      BigDecimal("0.1"),
+      0.1,
+      BigDecimal.decimal(0.1f),
+      BigDecimal.decimal(0.1),
+      BigDecimal(0.1),
+      BigDecimal(BigInt(1), 1),
+      BigDecimal(new BD("0.1")),
+      BigDecimal(1L, 1),
+      BigDecimal(1) / BigDecimal(10),
+      BigDecimal(10).pow(-1)
+    )
+    for (a <- tenths; b <- tenths) assert(a == b, s"$a != $b but both should be 0.1")
+  }
+  
+  // Motivated by noticing BigDecimal(123456789, mc6) != BigDecimal(123456789L, mc6)
+  // where mc6 is a MathContext that rounds to six digits
+  @Test
+  def consistentRoundingTest() {
+    val mc6 = new MC(6)
+    val sameRounding = List(
+      List(
+        123457000,
+        123457000L,
+        123457e3,
+        BigDecimal(123456789, mc6),
+        BigDecimal(123456789L, mc6),
+        BigDecimal(123456789d, mc6),
+        BigDecimal("123456789", mc6),
+        BigDecimal(Array('1','2','3','4','5','6','7','8','9'), mc6),
+        BigDecimal(BigInt(123456789), mc6),
+        BigDecimal(BigInt(1234567890), 1, mc6),
+        BigDecimal.decimal(123456789, mc6),
+        BigDecimal.decimal(123456789d, mc6),
+        BigDecimal.decimal(new BD("123456789"), mc6)
+      ),
+      List(
+        123456789,
+        123456789L,
+        123456789d,
+        new BigDecimal(new BD("123456789"), mc6),
+        new BigDecimal(new BD("123456789")),
+        BigDecimal(123456789),
+        BigDecimal(123456789L),
+        BigDecimal(123456789d),
+        BigDecimal("123456789"),
+        BigDecimal(Array('1','2','3','4','5','6','7','8','9')),
+        BigDecimal(BigInt(123456789)),
+        BigDecimal(BigInt(1234567890), 1),
+        BigDecimal.decimal(123456789),
+        BigDecimal.decimal(123456789d),
+        BigDecimal.valueOf(123456789d, mc6) 
+      )
+    )
+    sameRounding.map(_.zipWithIndex).foreach{ case xs => 
+      for ((a,i) <- xs; (b,j) <- xs) {
+        assert(a == b, s"$a != $b (#$i != #$j) but should be the same")
+        assert(a.## == b.##, s"Hash code mismatch in equal BigDecimals: #$i != #$j")
+      }
+    }
+    val List(xs, ys) = sameRounding.map(_.zipWithIndex)
+    for ((a,i) <- xs; (b,j) <- ys) assert(a != b, s"$a == $b (#$i == #$j) but should be different")
+  } 
+ 
+  // This was unexpectedly truncated in 2.10
+  @Test
+  def noPrematureRoundingTest() {
+    val text = "9791375983750284059237954823745923845928547807345082378340572986452364"
+    val same = List[Any](
+      BigInt(text), BigDecimal(text), BigDecimal(new BD(text))
+    )
+    for (a <- same; b <- same) assert(a == b, s"$a != $b but should be the same")
+  }
+  
+  // Tests attempts to make sane the representation of IEEE binary32 and binary64
+  // (i.e. Float and Double) with Scala's text-is-King BigDecimal policy
+  @Test
+  def churnRepresentationTest() {
+    val rn = new scala.util.Random(42)
+    for (i <- 1 to 1000) {
+      val d = rn.nextDouble
+      assert({
+        BigDecimal.decimal(d).isDecimalDouble &&
+        BigDecimal.binary(d).isBinaryDouble &&
+        BigDecimal.exact(d).isExactDouble
+      }, s"At least one wrong BigDecimal representation for $d")
+    }
+    for (i <- 1 to 1000) {
+      val f = rn.nextFloat
+      assert({
+        BigDecimal.decimal(f).isDecimalFloat &&
+        BigDecimal.binary(f).isBinaryFloat &&
+        BigDecimal.exact(f).isExactFloat
+      }, s"At least one wrong BigDecimal representation for $f")
+    }
+    for (i <- 1 to 1000) {
+      val ndig = 15+rn.nextInt(5)
+      val s = Array.fill(ndig)((rn.nextInt(10)+'0').toChar).mkString
+      val bi = BigInt(s)
+      val l = bi.toLong
+      val d = bi.toDouble
+      val bd = BigDecimal(bi)
+      val bd2 = BigDecimal.decimal(d)
+      assert(!bi.isValidLong || bi == l, s"Should be invalid or equal: $bi $l")
+      assert(!bi.isValidDouble || bi == d, s"Should be invalid or equal: $bi $d")
+      assert(bd == bi, s"Should be equal $bi $bd")
+      assert(bd.## == bi.##, s"Hash codes for $bi, $bd should be equal")
+      assert(bd == bd2 || bd2 != BigDecimal.exact(d) || !bi.isValidDouble,
+        s"$bd != $bd2 should only be when inexact or invalid")
+      assert(d == bd2 && bd2 == d, s"$d != $bd2 but they should equal")
+    }
+    val different = List(
+      BigDecimal.decimal(0.1),
+      BigDecimal.binary(0.1),
+      BigDecimal.binary(0.1, new MC(25)),
+      BigDecimal.exact(0.1),
+      BigDecimal.exact(0.1f),
+      BigDecimal.decimal((0.1f).toDouble)
+    )
+    for (a <- different; b <- different if (a ne b))
+      assert(a != b, "BigDecimal representations of Double mistakenly conflated")
+  }
+}
diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala
new file mode 100644
index 0000000..4f0657f
--- /dev/null
+++ b/test/junit/scala/math/NumericTest.scala
@@ -0,0 +1,18 @@
+
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class NumericTest {
+
+  /* Test for SI-8102 */
+  @Test
+  def testAbs {
+    assertTrue(-0.0.abs equals 0.0)
+    assertTrue(-0.0f.abs equals 0.0f)
+  }
+}
+
diff --git a/test/junit/scala/reflect/internal/MirrorsTest.scala b/test/junit/scala/reflect/internal/MirrorsTest.scala
new file mode 100644
index 0000000..9108af1
--- /dev/null
+++ b/test/junit/scala/reflect/internal/MirrorsTest.scala
@@ -0,0 +1,18 @@
+package scala.reflect.internal
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class MirrorsTest {
+  @Test def rootCompanionsAreConnected(): Unit = {
+    val cm = scala.reflect.runtime.currentMirror
+    import cm._
+    assertEquals("RootPackage.moduleClass == RootClass", RootClass, RootPackage.moduleClass)
+    assertEquals("RootClass.module == RootPackage", RootPackage, RootClass.module)
+    assertEquals("EmptyPackage.moduleClass == EmptyPackageClass", EmptyPackageClass, EmptyPackage.moduleClass)
+    assertEquals("EmptyPackageClass.module == EmptyPackage", EmptyPackage, EmptyPackageClass.module)
+  }
+}
\ No newline at end of file
diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala
new file mode 100644
index 0000000..4587417
--- /dev/null
+++ b/test/junit/scala/reflect/internal/PrintersTest.scala
@@ -0,0 +1,1164 @@
+package scala.reflect.internal
+
+import org.junit.Test
+import org.junit.Assert._
+import scala.tools.reflect._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror=>cm}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class PrintersTest extends BasePrintTests
+  with ClassPrintTests
+  with TraitPrintTests
+  with ValAndDefPrintTests
+  with QuasiTreesPrintTests
+  with PackagePrintTests
+
+object PrinterHelper {
+  val toolbox = cm.mkToolBox()
+
+  import scala.reflect.internal.Chars._
+  private def normalizeEOL(resultCode: String) =
+    resultCode.lines mkString s"$LF"
+
+  def assertResultCode(code: String)(parsedCode: String = "", typedCode: String = "", wrap: Boolean = false, printRoot: Boolean = false) = {
+    def toolboxTree(tree: => Tree) = try{
+        tree
+      } catch {
+        case e:scala.tools.reflect.ToolBoxError => throw new Exception(e.getMessage + ": " + code)
+      }
+
+    def wrapCode(source: String) = {
+      val context = sm"""
+      |trait PrintersContext {
+      |  class baz extends scala.annotation.StaticAnnotation;
+      |  class foo1[A, B] extends scala.annotation.StaticAnnotation;
+      |  class foo2[A, B](a: scala.Int)(b: scala.Int) extends scala.annotation.StaticAnnotation;
+      |  class foo3[Af, Bf](a: scala.Int)(b: scala.Float, c: PrintersContext.this.foo1[Af, Bf]) extends scala.annotation.StaticAnnotation;
+      |  trait A1;
+      |  trait B1;
+      |${source.trim.lines map {"  " + _} mkString s"$LF"}
+      |}"""
+
+      if (wrap) context.trim() else source.trim
+    }
+
+    val parsedTree = toolboxTree(toolbox.parse(wrapCode(code)))
+    if (!parsedCode.isEmpty())
+      assertEquals("using toolbox parser" + LF, wrapCode(parsedCode), normalizeEOL(showCode(parsedTree)))
+    if (!typedCode.isEmpty()) {
+      val typedTree = toolboxTree(toolbox.typecheck(parsedTree))
+      assertEquals("using toolbox typechecker" + LF, wrapCode(typedCode), normalizeEOL(showCode(typedTree, printRootPkg = printRoot)))
+    }
+  }
+
+  def assertTreeCode(tree: Tree)(code: String) = {
+    assertEquals("using quasiquote or given tree"+LF, code.trim, normalizeEOL(showCode(tree)))
+  }
+
+  def assertPrintedCode(source: String, checkTypedTree: Boolean = true, wrapCode: Boolean = false) = {
+    if (checkTypedTree)
+      assertResultCode(source)(source, source, wrapCode)
+    else assertResultCode(source)(parsedCode = source, wrap = wrapCode)
+  }
+
+  implicit class StrContextStripMarginOps(val stringContext: StringContext) extends util.StripMarginInterpolator
+}
+
+import PrinterHelper._
+
+trait BasePrintTests {
+  @Test def testIdent = assertTreeCode(Ident("*"))("*")
+
+  @Test def testConstant1 = assertTreeCode(Literal(Constant("*")))("\"*\"")
+
+  @Test def testConstant2 = assertTreeCode(Literal(Constant(42)))("42")
+
+  @Test def testConstantFloat = assertTreeCode(Literal(Constant(42f)))("42.0F")
+
+  @Test def testConstantDouble = assertTreeCode(Literal(Constant(42d)))("42.0")
+
+  @Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L")
+
+  @Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false)
+
+  @Test def testName1 = assertPrintedCode("class test")
+
+  @Test def testName2 = assertPrintedCode("class *")
+
+  @Test def testName4 = assertPrintedCode("class `a*`")
+
+  @Test def testName5 = assertPrintedCode("val :::: = 1")
+
+  @Test def testName6 = assertPrintedCode("val `::::t` = 1")
+
+  @Test def testName7 = assertPrintedCode("""class \/""")
+
+  @Test def testName8 = assertPrintedCode("""class \\\\""")
+
+  @Test def testName9 = assertPrintedCode("""class test_\/""")
+
+  @Test def testName10 = assertPrintedCode("""class `*_*`""")
+
+  @Test def testName11 = assertPrintedCode("""class `a_*`""")
+
+  @Test def testName12 = assertPrintedCode("""class `*_a`""")
+
+  @Test def testName13 = assertPrintedCode("""class a_a""")
+
+  @Test def testName14 = assertPrintedCode("val x$11 = 5")
+
+  @Test def testName15 = assertPrintedCode("class `[]`")
+
+  @Test def testName16 = assertPrintedCode("class `()`")
+
+  @Test def testName17 = assertPrintedCode("class `{}`")
+
+  @Test def testName18 = assertPrintedCode("class <>")
+
+  @Test def testName19 = assertPrintedCode("""class `class`""")
+
+  @Test def testName20 = assertPrintedCode("""class `test name`""")
+
+  @Test def testIfExpr1 = assertResultCode(code = sm"""
+    |val a = 1
+    |if (a > 1)
+    |  a: Int
+    |else
+    |  (a.toString): String
+    """)(
+    parsedCode = sm"""
+    |val a = 1;
+    |if (a.>(1))
+    |  ((a): Int)
+    |else
+    |  ((a.toString): String)""",
+    typedCode=sm"""
+    |val a = 1;
+    |if (PrintersContext.this.a.>(1))
+    |  ((PrintersContext.this.a): scala.Int)
+    |else
+    |  ((PrintersContext.this.a.toString()): scala.Predef.String)
+    """, wrap = true)
+
+  @Test def testIfExpr2 = assertPrintedCode(sm"""
+    |class A {
+    |  (if (true)
+    |  {
+    |    false;
+    |    ()
+    |  }
+    |else
+    |  {
+    |    true;
+    |    ()
+    |  }).toString()
+    |}""")
+
+  @Test def testIfExpr3 = assertPrintedCode(sm"""
+    |class A {
+    |  (if (true)
+    |  {
+    |    false;
+    |    ()
+    |  }
+    |else
+    |  {
+    |    true;
+    |    ()
+    |  }).toString().hashCode()
+    |}""")
+
+  //val x = true && true && false.!
+  @Test def testBooleanExpr1 = assertPrintedCode("val x = true.&&(true).&&(false.`unary_!`)", checkTypedTree = false)
+
+  //val x = true && !(true && false)
+  @Test def testBooleanExpr2 = assertPrintedCode("val x = true.&&(true.&&(false).`unary_!`)", checkTypedTree = false)
+
+  @Test def testNewExpr1 = assertResultCode(
+    code = sm"""
+    |class foo
+    |new foo()
+    |""")(
+    parsedCode = sm"""
+    |class foo;
+    |new foo()""",
+    typedCode = sm"""
+    |class foo;
+    |new PrintersContext.this.foo()
+    |""",
+    wrap = true)
+
+  @Test def testNewExpr2 = assertResultCode(
+    code = sm"""
+    |class foo
+    |new foo { "test" }
+    |""")(
+    parsedCode = sm"""
+    |class foo;
+    |{
+    |  final class $$anon extends foo {
+    |    "test"
+    |  };
+    |  new $$anon()
+    |}""",
+    typedCode = sm"""
+    |class foo;
+    |{
+    |  final class $$anon extends PrintersContext.this.foo {
+    |    "test"
+    |  };
+    |  new $$anon()
+    |}""",
+    wrap = true)
+
+  @Test def testNewExpr3 = assertPrintedCode(sm"""
+    |{
+    |  class foo[t];
+    |  new foo[scala.Int]()
+    |}""")
+
+  @Test def testNewExpr4 = assertPrintedCode(sm"""
+    |{
+    |  class foo(x: scala.Int);
+    |  val x = 5;
+    |  new foo(x)
+    |}""")
+
+  @Test def testNewExpr5 = assertPrintedCode(sm"""
+    |{
+    |  class foo[t](x: scala.Int);
+    |  val x = 5;
+    |  new foo[scala.Predef.String](x)
+    |}""")
+
+  //new foo[t](x) { () }
+  @Test def testNewExpr6 = assertResultCode(
+    code = sm"""
+    |class foo[t](x: Int)
+    |new foo[String](3) { () }
+    |""")(
+    parsedCode = sm"""
+    |{
+    |  class foo[t](x: Int);
+    |  {
+    |    final class $$anon extends foo[String](3) {
+    |      ()
+    |    };
+    |    new $$anon()
+    |  }
+    |}""",
+    typedCode = sm"""
+    |{
+    |  class foo[t](x: scala.Int);
+    |  {
+    |    final class $$anon extends foo[scala.Predef.String](3) {
+    |      ()
+    |    };
+    |    new $$anon()
+    |  }
+    |}""")
+
+  //new foo with bar
+  @Test def testNewExpr7 = assertPrintedCode(sm"""
+    |{
+    |  trait foo;
+    |  trait bar;
+    |  {
+    |    final class $$anon extends foo with bar;
+    |    new $$anon()
+    |  }
+    |}""")
+
+  //new { anonymous }
+  @Test def testNewExpr8 = assertPrintedCode(sm"""
+    |{
+    |  final class $$anon {
+    |    5
+    |  };
+    |  new $$anon()
+    |}""")
+
+  //new { val early = 1 } with Parent[Int] { body }
+  @Test def testNewExpr9 = assertPrintedCode(sm"""
+    |{
+    |  class Parent[t];
+    |  {
+    |    final class $$anon extends {
+    |      val early = 1
+    |    } with Parent[scala.Int] {
+    |      "testNewExpr"
+    |    };
+    |    new $$anon()
+    |  }
+    |}""")
+
+  //new Foo { self => }
+  @Test def testNewExpr10 = assertPrintedCode(sm"""
+    |{
+    |  class Foo;
+    |  {
+    |    final class $$anon extends Foo { self =>
+    |      
+    |    };
+    |    new $$anon()
+    |  }
+    |}""")
+
+  @Test def testReturn = assertPrintedCode("def test: scala.Int = return 42")
+
+  @Test def testFunc1 = assertResultCode(
+    code = "List(1, 2, 3).map((i: Int) => i - 1)")(
+    parsedCode = "List(1, 2, 3).map(((i: Int) => i.-(1)))",
+    typedCode = sm"scala.collection.immutable.List.apply(1, 2, 3).map(((i: scala.Int) => i.-(1)))(scala.collection.immutable.List.canBuildFrom)")
+
+  @Test def testFunc2 = assertResultCode(
+    code = "val sum: Seq[Int] => Int = _ reduceLeft (_+_)")(
+    parsedCode = "val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))",
+    typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))")
+
+  @Test def testFunc3 = assertResultCode(
+    code = "List(1, 2, 3) map (_ - 1)")(
+    parsedCode = "List(1, 2, 3).map(((x$1) => x$1.-(1))) ",
+    typedCode = "scala.collection.immutable.List.apply(1, 2, 3).map(((x$1) => x$1.-(1)))(scala.collection.immutable.List.canBuildFrom)")
+
+  @Test def testFunc4 = assertResultCode(
+    code = "val x: String => Int = ((str: String) => 1)")(
+    parsedCode = "val x: _root_.scala.Function1[String, Int] = ((str: String) => 1)",
+    typedCode = " val x: _root_.scala.Function1[_root_.scala.Predef.String, _root_.scala.Int] = ((str: _root_.scala.Predef.String) => 1)", printRoot = true)
+
+  @Test def testAssign1 = assertPrintedCode("(f.v = 5).toString", checkTypedTree = false)
+  
+  @Test def testAssign2 = assertPrintedCode("(f.v = 5)(2)", checkTypedTree = false)
+
+  @Test def testImport1 = assertPrintedCode("import scala.collection.mutable")
+
+  @Test def testImport2 = assertPrintedCode("import java.lang.{String=>Str}")
+
+  @Test def testImport3 = assertPrintedCode("import java.lang.{String=>Str, Object=>_, _}")
+
+  @Test def testImport4 = assertPrintedCode("import scala.collection._")
+}
+
+trait ClassPrintTests {
+  @Test def testClass = assertPrintedCode("class *")
+
+  @Test def testClassWithBody = assertPrintedCode(sm"""
+    |class X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testClassWithPublicParams = assertPrintedCode("class X(val x: scala.Int, val s: scala.Predef.String)")
+
+  @Test def testClassWithParams1 = assertPrintedCode("class X(x: scala.Int, s: scala.Predef.String)")
+
+  @Test def testClassWithParams2 = assertPrintedCode("class X(@test x: Int, s: String)", checkTypedTree = false)
+
+  @Test def testClassWithParams3 = assertPrintedCode("class X(implicit x: Int, s: String)", checkTypedTree = false)
+
+  @Test def testClassWithParams4 = assertPrintedCode("class X(implicit @unchecked x: Int, s: String)", checkTypedTree = false)
+
+  @Test def testClassWithParams5 = assertPrintedCode(sm"""
+    |{
+    |  class Y {
+    |    val x = 5
+    |  };
+    |  class X(override private[this] val x: scala.Int, s: scala.Predef.String) extends Y;
+    |  ()
+    |}""")
+
+  @Test def testClassWithParams6 = assertPrintedCode("class X(@test1 override private[this] val x: Int, @test2(param1 = 7) s: String) extends Y", checkTypedTree = false)
+
+  @Test def testClassWithParams7 = assertPrintedCode("class X protected (val x: scala.Int, val s: scala.Predef.String)")
+
+  @Test def testClassWithParams8 = assertPrintedCode("class X(var x: scala.Int)")
+
+  @Test def testClassWithParams9 = assertPrintedCode("def test(x: scala.Int*) = 5")
+
+  @Test def testClassWithByNameParam = assertPrintedCode("class X(x: => scala.Int)")
+
+  @Test def testClassWithDefault = assertPrintedCode(sm"""
+    |{
+    |  class X(var x: scala.Int = 5);
+    |  ()
+    |}""")
+
+  @Test def testClassWithParams10 = assertPrintedCode("class X(protected[zzz] var x: Int)", checkTypedTree = false)
+
+  @Test def testClassWithParams11 = assertPrintedCode(sm"""
+    |{
+    |  class F(x: scala.Int);
+    |  trait E {
+    |    var x: scala.Int
+    |  };
+    |  class X(override var x: scala.Int = 5) extends F(x) with E;
+    |  ()
+    |}""")
+
+  @Test def testClassWithParams12 = assertPrintedCode("class X(val y: scala.Int)()(var z: scala.Double)")
+
+  @Test def testClassWithImplicitParams = assertPrintedCode("class X(var i: scala.Int)(implicit val d: scala.Double, var f: scala.Float)")
+
+  @Test def testClassWithEarly = assertPrintedCode(sm"""
+    |class X(var i: scala.Int) extends {
+    |  val a = i;
+    |  type B
+    |} with scala.Serializable""")
+
+  @Test def testClassWithThrow1 = assertPrintedCode(sm"""
+    |class Throw1 {
+    |  throw new scala.`package`.Exception("exception!")
+    |}""")  
+
+  @Test def testClassWithThrow2 = assertPrintedCode(sm"""
+    |class Throw2 {
+    |  var msg = "   ";
+    |  val e = new scala.`package`.Exception(Throw2.this.msg);
+    |  throw Throw2.this.e
+    |}""")
+
+  /*
+    class Test {
+      val (a, b) = (1, 2)
+    }
+  */
+  @Test def testClassWithAssignmentWithTuple1 = assertPrintedCode(sm"""
+    |class Test {
+    |  private[this] val x$$1 = (scala.Tuple2.apply(1, 2): @scala.unchecked) match {
+    |    case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply(a, b)
+    |  };
+    |  val a = Test.this.x$$1._1;
+    |  val b = Test.this.x$$1._2
+    |}""")
+
+  @Test def testClassWithAssignmentWithTuple2 = assertResultCode(
+    code = sm"""
+    |class Test {
+    |  val (a, b) = (1).->(2)
+    |}""")(
+    parsedCode = sm"""
+    |class Test {
+    |  private[this] val x$$1 = ((1).->(2): @scala.unchecked) match {
+    |    case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b)
+    |  };
+    |  val a = x$$1._1;
+    |  val b = x$$1._2
+    |}""",
+    typedCode = sm"""
+    |class Test {
+    |  private[this] val x$$1 = (scala.Predef.ArrowAssoc(1).->(2): @scala.unchecked) match {
+    |    case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply(a, b)
+    |  };
+    |  val a = Test.this.x$$1._1;
+    |  val b = Test.this.x$$1._2
+    |}""")
+
+  /*
+    class Test {
+      val List(one, three, five) = List(1,3,5)
+    }
+  */
+  @Test def testClassWithPatternMatchInAssignment = assertPrintedCode(sm"""
+    |class Test {
+    |  private[this] val x$$1 = (scala.collection.immutable.List.apply(1, 3, 5): @scala.unchecked) match {
+    |    case scala.collection.immutable.List((one @ _), (three @ _), (five @ _)) => scala.Tuple3.apply(one, three, five)
+    |  };
+    |  val one = Test.this.x$$1._1;
+    |  val three = Test.this.x$$1._2;
+    |  val five = Test.this.x$$1._3
+    |}""")
+
+  //class A(l: List[_])
+  @Test def testClassWithExistentialParameter1 = assertPrintedCode(sm"""
+    |class Test(l: (scala.`package`.List[_$$1] forSome { 
+    |  type _$$1
+    |}))""")
+
+  @Test def testClassWithExistentialParameter2 = assertPrintedCode(sm"""
+    |class B(l: (scala.`package`.List[T] forSome { 
+    |  type T
+    |}))""")
+
+  @Test def testClassWithCompoundTypeTree = assertPrintedCode(sm"""
+    |{
+    |  trait A;
+    |  trait B;
+    |  abstract class C(val a: A with B) {
+    |    def method(x: A with B with C {
+    |      val x: scala.Float
+    |    }): A with B
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testClassWithSelectFromTypeTree = assertPrintedCode(sm"""
+    |{
+    |  trait A {
+    |    type T
+    |  };
+    |  class B(t: (A)#T);
+    |  ()
+    |}""")
+
+  @Test def testImplicitClass = assertPrintedCode(sm"""
+    |{
+    |  implicit class X(protected[this] var x: scala.Int);
+    |  ()
+    |}""",
+    checkTypedTree = true)
+
+  @Test def testAbstractClass = assertPrintedCode("abstract class X(protected[this] var x: scala.Int)")
+
+  @Test def testCaseClassWithParams1 = assertPrintedCode(sm"""
+    |{
+    |  case class X(x: scala.Int, s: scala.Predef.String);
+    |  ()
+    |}""")
+
+  @Test def testCaseClassWithParams2 = assertPrintedCode(sm"""
+    |{
+    |  case class X(protected val x: scala.Int, s: scala.Predef.String);
+    |  ()
+    |}""")
+
+  @Test def testCaseClassWithParams3 = assertPrintedCode(sm"""
+    |{
+    |  case class X(implicit x: scala.Int, s: scala.Predef.String);
+    |  ()
+    |}""")
+
+  @Test def testCaseClassWithParams4 = assertPrintedCode(sm"""
+    |{
+    |  trait V {
+    |    val x: scala.Int
+    |  };
+    |  case class X(override val x: scala.Int, s: scala.Predef.String) extends scala.Cloneable;
+    |  ()
+    |}""")
+
+  @Test def testCaseClassWithBody = assertPrintedCode(sm"""
+    |{
+    |  case class X() {
+    |    def y = "test"
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testLocalClass = assertPrintedCode(sm"""
+    |def test = {
+    |  class X(var a: scala.Int) {
+    |    def y = "test"
+    |  };
+    |  new X(5)
+    |}""")
+
+  @Test def testLocalCaseClass = assertPrintedCode(sm"""
+    |def test = {
+    |  case class X(var a: scala.Int) {
+    |    def y = "test"
+    |  };
+    |  new X(5)
+    |}""")
+
+  @Test def testSuperInClass = assertPrintedCode(sm"""
+    |{
+    |  trait Root {
+    |    def r = "Root"
+    |  };
+    |  class X extends Root {
+    |    def superX = super.r
+    |  };
+    |  class Y extends X with Root {
+    |    class Inner {
+    |      val myY = Y.super.r
+    |    };
+    |    def fromX = super[X].r;
+    |    def fromRoot = super[Root].r
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testThisInClass = assertPrintedCode(sm"""
+    |class Outer {
+    |  class Inner {
+    |    val outer = Outer.this
+    |  };
+    |  val self = this
+    |}""")
+
+  @Test def testCaseClassWithParamsAndBody = assertPrintedCode(sm"""
+    |{
+    |  case class X(var x: scala.Int, var s: scala.Predef.String) {
+    |    def y = "test"
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testObject = assertPrintedCode("object *")
+
+  @Test def testObjectWithBody = assertPrintedCode(sm"""
+    |object X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testObjectWithEarly1 = assertPrintedCode(sm"""
+    |object X extends {
+    |  val early: scala.Int = 42
+    |} with scala.Serializable""")
+
+  @Test def testObjectWithEarly2 = assertPrintedCode(sm"""
+    |object X extends {
+    |  val early: scala.Int = 42;
+    |  type EarlyT = scala.Predef.String
+    |} with scala.Serializable""")
+
+  @Test def testObjectWithSelf = assertPrintedCode(sm"""
+    |object Foo extends scala.Serializable { self =>
+    |  42
+    |}""")
+
+  @Test def testObjectInh = assertPrintedCode(sm"""
+    |trait Y {
+    |  private[Y] object X extends scala.Serializable with scala.Cloneable
+    |}""")
+
+  @Test def testObjectWithPatternMatch1 = assertPrintedCode(sm"""
+    |object PM1 {
+    |  scala.collection.immutable.List.apply(1, 2) match {
+    |    case (i @ _) => i
+    |  }
+    |}""")
+
+  @Test def testObjectWithPatternMatch2 = assertResultCode(
+    code = sm"""
+    |object PM2 {
+    |  List(1, 2).map {
+    |    case i if i > 5 => i
+    |  }
+    |}""")(
+    parsedCode = sm"""
+    |object PM2 {
+    |  List(1, 2).map({
+    |    case (i @ _) if i.>(5) => i
+    |  })
+    |}""")
+    /*
+    typedCode = sm"""
+    |object PM2 {
+    |  scala.collection.immutable.List.apply(1, 2).map(((x0$$1) => x0$$1 match {
+    |    case (i @ _) if i.>(5) => i
+    |  }))(scala.collection.immutable.List.canBuildFrom)
+    |}""")
+    *
+    */
+
+  @Test def testObjectWithPatternMatch3 = assertResultCode(
+    code = sm"""
+    |object PM3 {
+    |  List(1, 2).map {
+    |    case i: Int => i
+    |  }
+    |}""")(
+    parsedCode = sm"""
+    |object PM3 {
+    |  List(1, 2).map({
+    |    case (i @ ((_): Int)) => i
+    |  })
+    |}""")
+    /*
+    typedCode = sm"""
+    |object PM3 {
+    |  scala.collection.immutable.List.apply(1, 2).map(((x0$$2) => x0$$2 match {
+    |    case (i @ ((_): scala.Int)) => i
+    |  }))(scala.collection.immutable.List.canBuildFrom)
+    |}""")
+    *
+    */
+
+  @Test def testObjectWithPatternMatch4 = assertResultCode(
+    code = sm"""
+    |object PM4 {
+    |  List(1, 2).map {
+    |    case _ => 42
+    |  }
+    |}""")(
+    parsedCode = sm"""
+    |object PM4 {
+    |  List(1, 2).map({
+    |    case _ => 42
+    |  })
+    |}""")
+    /*
+    typedCode = sm"""
+    |object PM4 {
+    |  scala.collection.immutable.List.apply(1, 2).map(((x0$$3) => x0$$3 match {
+    |    case _ => 42
+    |  }))(scala.collection.immutable.List.canBuildFrom)
+    |}""")
+    *
+    */
+
+  @Test def testObjectWithPatternMatch5 = assertResultCode(
+    code = sm"""
+    |object PM5 {
+    |  List(1, 2) match {
+    |    case x :: xs => x
+    |  }
+    |}""")(
+    parsedCode = sm"""
+    |object PM5 {
+    |  List(1, 2) match {
+    |    case ::((x @ _), (xs @ _)) => x
+    |  }
+    |}""",
+    typedCode = sm"""
+    |object PM5 {
+    |  scala.collection.immutable.List.apply(1, 2) match {
+    |    case scala.`package`.::((x @ _), (xs @ _)) => x
+    |  }
+    |}""")
+
+  @Test def testObjectWithPatternMatch6 = assertResultCode(
+    code = sm"""
+    |object PM6 {
+    |  List(1, 2).map {
+    |    case (0 | 1) => true
+    |    case _ => false
+    |  }
+    |}""")(
+    parsedCode = sm"""
+    |object PM6 {
+    |  List(1, 2).map({
+    |    case (0| 1) => true
+    |    case _ => false
+    |  })
+    |}""")
+    /*
+    typedCode = sm"""
+    |object PM6 {
+    |  scala.collection.immutable.List.apply(1, 2).map(((x0$$4) => x0$$4 match {
+    |    case (0| 1) => true
+    |    case _ => false
+    |  }))(scala.collection.immutable.List.canBuildFrom)
+    |}"""
+    *
+    */
+
+  @Test def testObjectWithPatternMatch7 = assertPrintedCode(sm"""
+    |object PM7 {
+    |  scala.Predef.augmentString("abcde").toList match {
+    |    case scala.collection.Seq((car @ _), _*) => car
+    |  }
+    |}""")
+
+  @Test def testObjectWithPatternMatch8 = assertPrintedCode(sm"""
+    |{
+    |  object Extractor {
+    |    def unapply(i: scala.Int) = scala.Some.apply(i)
+    |  };
+    |  object PM9 {
+    |    42 match {
+    |      case (a @ Extractor((i @ _))) => i
+    |    }
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testObjectWithPartialFunc = assertPrintedCode(sm"""
+    |object Test {
+    |  def partFuncTest[A, B](e: scala.`package`.Either[A, B]): scala.Unit = e match {
+    |    case scala.`package`.Right(_) => ()
+    |  }
+    |}""")
+
+  @Test def testObjectWithTry = assertResultCode(
+    code = sm"""
+    |object Test {
+    |  import java.io._;
+    |  var file: PrintStream = null;
+    |  try {
+    |    val out = new FileOutputStream("myfile.txt");
+    |    file = new PrintStream(out)
+    |  } catch {
+    |    case ioe: IOException => println("ioe")
+    |    case e: Exception => println("e")
+    |  } finally println("finally")
+    |}""")(
+    parsedCode = sm"""
+    |object Test {
+    |  import java.io._;
+    |  var file: PrintStream = null;
+    |  try {
+    |    val out = new FileOutputStream("myfile.txt");
+    |    file = new PrintStream(out)
+    |  } catch {
+    |    case (ioe @ ((_): IOException)) => println("ioe")
+    |    case (e @ ((_): Exception)) => println("e")
+    |  } finally println("finally")
+    |}""",
+    typedCode = sm"""
+    |object Test {
+    |  import java.io._;
+    |  var file: java.io.PrintStream = null;
+    |  try {
+    |    val out = new java.io.FileOutputStream("myfile.txt");
+    |    Test.this.`file_=`(new java.io.PrintStream(out))
+    |  } catch {
+    |    case (ioe @ ((_): java.io.IOException)) => scala.Predef.println("ioe")
+    |    case (e @ ((_): scala.`package`.Exception)) => scala.Predef.println("e")
+    |  } finally scala.Predef.println("finally")
+    |}""")
+}
+
+trait TraitPrintTests {
+  @Test def testTrait = assertPrintedCode("trait *")
+
+  @Test def testTraitWithBody = assertPrintedCode(sm"""
+    |trait X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testTraitWithSelfTypeAndBody = assertPrintedCode(sm"""
+    |trait X { self: scala.Cloneable =>
+    |  def y = "test"
+    |}""")
+
+  @Test def testTraitWithSelf1 = assertPrintedCode(sm"""
+    |trait X { self =>
+    |  def y = "test"
+    |}""")
+
+  @Test def testTraitWithSelf2 = assertPrintedCode(sm"""
+    |trait X { self: scala.Cloneable with scala.Serializable =>
+    |  val x: scala.Int = 1
+    |}""")
+
+  @Test def testTraitTypeParams = assertPrintedCode("trait X[A, B]")
+
+  @Test def testTraitWithBody2 = assertPrintedCode(sm"""
+    |trait X {
+    |  def foo: scala.Unit;
+    |  val bar: scala.Predef.String
+    |}""")
+
+  @Test def testTraitWithInh = assertPrintedCode("trait X extends scala.Cloneable with scala.Serializable")
+
+  @Test def testTraitWithEarly1 = assertPrintedCode(sm"""
+    |trait X extends {
+    |  val x: Int = 1
+    |} with AnyRef""", checkTypedTree = false)
+
+  @Test def testTraitWithEarly2 = assertPrintedCode(sm"""
+    |trait X extends {
+    |  val x: scala.Int = 0;
+    |  type Foo = scala.Unit
+    |} with scala.Cloneable""")
+
+  @Test def testTraitWithEarly3 = assertPrintedCode(sm"""
+    |trait X extends {
+    |  val x: scala.Int = 5;
+    |  val y: scala.Double = 4.0;
+    |  type Foo;
+    |  type XString = scala.Predef.String
+    |} with scala.Serializable""")
+
+  @Test def testTraitWithEarly4 = assertPrintedCode(sm"""
+    |trait X extends {
+    |  val x: scala.Int = 5;
+    |  val y: scala.Double = 4.0;
+    |  type Foo;
+    |  type XString = scala.Predef.String
+    |} with scala.Serializable {
+    |  val z = 7
+    |}""")
+
+  @Test def testTraitWithSingletonTypeTree = assertPrintedCode(sm"""
+    |trait Test {
+    |  def testReturnSingleton(): Test.this.type
+    |}""")
+
+  @Test def testTraitWithThis = assertTreeCode(q"trait Test { this: X with Y => }")(sm"""
+    |trait Test { _ : X with Y =>
+    |  
+    |}""")
+
+  @Test def testTraitWithWhile1 = assertPrintedCode(sm"""
+    |trait Test {
+    |  while (false) 
+    |    scala.Predef.println("testing...")
+    |  
+    |}""")
+
+  @Test def testTraitWithWhile2 = assertPrintedCode(sm"""
+    |trait Test {
+    |  while (true) 
+    |    {
+    |      scala.Predef.println("testing...");
+    |      scala.Predef.println("testing...")
+    |    }
+    |  
+    |}""")
+
+  @Test def testTraitWithDoWhile1 = assertPrintedCode(sm"""
+    |trait Test {
+    |  do 
+    |    scala.Predef.println("testing...")
+    |   while (true) 
+    |}""")  
+
+  @Test def testTraitWithTypes = assertResultCode(
+    code = sm"""
+    |trait Test {
+    |  type A = Int;
+    |  type B >: Nothing <: AnyRef;
+    |  protected type C >: Nothing;
+    |  type D <: AnyRef
+    |}""")(
+    parsedCode = sm"""
+    |trait Test {
+    |  type A = Int;
+    |  type B >: Nothing <: AnyRef;
+    |  protected type C >: Nothing;
+    |  type D <: AnyRef
+    |}""",
+    typedCode = sm"""
+    |trait Test {
+    |  type A = scala.Int;
+    |  type B <: scala.AnyRef;
+    |  protected type C;
+    |  type D <: scala.AnyRef
+    |}""")  
+}
+
+trait ValAndDefPrintTests {
+  @Test def testVal1 = assertPrintedCode("val a: scala.Unit = ()")
+
+  @Test def testVal2 = assertPrintedCode("val * : scala.Unit = ()")
+
+  @Test def testVal3 = assertPrintedCode("val a_ : scala.Unit = ()")
+
+  @Test def testDef1 = assertPrintedCode("def a = ()")
+
+  @Test def testDef2 = assertPrintedCode("def * : scala.Unit = ()")
+
+  @Test def testDef3 = assertPrintedCode("def a_(x: scala.Int): scala.Unit = ()")
+
+  @Test def testDef4 = assertPrintedCode("def a_ : scala.Unit = ()")
+
+  @Test def testDef5 = assertPrintedCode("def a_(* : scala.Int): scala.Unit = ()")
+
+  @Test def testDef6 = assertPrintedCode("def a_(b_ : scala.Int) = ()")
+
+  @Test def testDef7 = assertTreeCode{ 
+    Block(
+      DefDef(NoMods, newTermName("test1"), Nil, Nil, EmptyTree, Literal(Constant(()))),
+      DefDef(NoMods, newTermName("test2"), Nil, Nil :: Nil, EmptyTree, Literal(Constant(())))
+    )
+  }(sm"""
+    |{
+    |  def test1 = ();
+    |  def test2() = ()
+    |}""")
+
+  @Test def testDef8 = {
+    val arg = ValDef(Modifiers(Flag.IMPLICIT) , newTermName("a"),
+      AppliedTypeTree(Ident(newTypeName("R")), List(Ident(newTypeName("X")))), EmptyTree)
+
+    //def m[X](implicit a: R[X]) = ()
+    val tree = DefDef(NoMods, newTermName("test"), TypeDef(NoMods, newTypeName("X"), Nil, EmptyTree) :: Nil,
+      List(List(arg)), EmptyTree, Literal(Constant(())))
+
+    assertTreeCode(tree)("def test[X](implicit a: R[X]) = ()")
+  }
+
+  @Test def testDef9 = assertPrintedCode("def a(x: scala.Int)(implicit z: scala.Double, y: scala.Float): scala.Unit = ()")
+
+  @Test def testDefWithLazyVal1 = assertResultCode(
+    code = "def a = { lazy val test: Int = 42 }")(
+    parsedCode = sm"""
+    |def a = {
+    |  lazy val test: Int = 42;
+    |  ()
+    |}
+    """,
+    typedCode = sm"""
+    |def a = {
+    |  lazy val test: scala.Int = 42;
+    |  ()
+    |}""")
+
+  @Test def testDefWithLazyVal2 = assertPrintedCode(sm"""
+    |def a = {
+    |  lazy val test = {
+    |    scala.Predef.println();
+    |    scala.Predef.println()
+    |  };
+    |  ()
+    |}""")
+
+  @Test def testDefWithParams1 = assertPrintedCode("def foo(x: scala.Int*) = ()")
+
+  @Test def testDefWithParams2 = assertPrintedCode(sm"""
+    |{
+    |  def foo(x: scala.Int)(y: scala.Int = 1) = ();
+    |  ()
+    |}""")
+
+  @Test def testDefWithTypeParams1 = assertPrintedCode(sm"""
+    |{
+    |  def foo[A, B, C](x: A)(y: scala.Int = 1): C = ().asInstanceOf[C];
+    |  ()
+    |}""")
+
+  @Test def testDefWithTypeParams2 = assertPrintedCode("def foo[A, B <: scala.AnyVal] = ()")
+
+  @Test def testDefWithAnn1 = assertPrintedCode("@annot def foo = null", checkTypedTree = false)
+
+  @Test def testDefWithAnn2 = assertPrintedCode("@a(x) def foo = null", checkTypedTree = false)
+
+  @Test def testDefWithAnn3 = assertPrintedCode("@Foo[A, B] def foo = null", checkTypedTree = false)
+
+  @Test def testDefWithAnn4 = assertPrintedCode("@Foo(a)(b)(x, y) def foo = null", checkTypedTree = false)
+
+  @Test def testDefWithAnn5 = assertPrintedCode("@Foo[A, B](a)(b) @Bar def foo(x: Int) = null", checkTypedTree = false)
+
+  @Test def testDefWithAnn6 = assertPrintedCode("@test1(new test2()) def foo = 42", checkTypedTree = false)
+
+  @Test def testDefWithAnn7 = assertPrintedCode("@`t*` def foo = 42", checkTypedTree = false)
+
+  @Test def testDefWithAnn8 = assertPrintedCode("@throws(classOf[Exception]) def foo = throw new Exception()", checkTypedTree = false)
+
+  @Test def testAnnotated1 = assertResultCode(
+    code = "def foo = 42: @baz")(
+    parsedCode = "def foo = 42: @baz",
+    typedCode = "def foo = (42: @baz)",
+    wrap = true)
+
+  @Test def testAnnotated2 = assertResultCode(
+    code = "def foo = 42: @foo2[A1, B1](4)(2)")(
+    parsedCode = "def foo = 42: @foo2[A1, B1](4)(2)",
+    typedCode = "def foo = (42: @foo2[A1, B1](4)(2))",
+    wrap = true)
+
+  @Test def testAnnotated3 = assertResultCode(
+    code = "def foo = (42: @foo1[A1, B1]): @foo2[A1, B1](4)(2)")(
+    parsedCode = "def foo = (42: @foo1[A1, B1]): @foo2[A1, B1](4)(2)",
+    typedCode = "def foo = ((42: @foo1[A1, B1]): @foo2[A1, B1](4)(2))",
+    wrap = true)
+
+  @Test def testAnnotated4 = assertResultCode(
+    code = "def foo = 42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]())")(
+    parsedCode = "def foo = 42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]())",
+    typedCode = "def foo = (42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]()))",
+    wrap = true)
+
+  @Test def testAnnotated5 = assertPrintedCode(sm"""
+    |{
+    |  val x = 5;
+    |  (x: @unchecked) match {
+    |    case ((_): scala.Int) => true
+    |    case _ => false
+    |  }
+    |}""")
+
+  @Test def testAnnotated8 = assertPrintedCode(sm"""
+    |{
+    |  val x = 5;
+    |  ((x: @unchecked): @foo3(4)(2.0F, new foo1[A1, B1]())) match {
+    |    case _ => true
+    |  }
+    |}""", wrapCode = true)
+}
+
+trait PackagePrintTests {
+  @Test def testPackage1 = assertPrintedCode(sm"""
+    |package foo.bar {
+    |  
+    |}""", checkTypedTree = false)
+
+  @Test def testPackage2 = assertPrintedCode(sm"""
+    |package foo {
+    |  class C
+    |
+    |  object D
+    |}""", checkTypedTree = false)
+
+  //package object foo extends a with b
+  @Test def testPackage3 = assertPrintedCode(sm"""
+    |package foo {
+    |  object `package` extends a with b
+    |}""", checkTypedTree = false)
+
+  //package object foo { def foo; val x = 1 }
+  @Test def testPackage4 = assertPrintedCode(sm"""
+    |package foo {
+    |  object `package` {
+    |    def foo: scala.Unit = ();
+    |    val x = 1
+    |  }
+    |}""", checkTypedTree = false)
+
+  //package object foo extends { val x = 1; type I = Int } with Any
+  @Test def testPackage5 = assertPrintedCode(sm"""
+    |package foo {
+    |  object `package` extends {
+    |    val x = 1;
+    |    type I = Int
+    |  } with AnyRef
+    |}""", checkTypedTree = false)
+}
+
+trait QuasiTreesPrintTests {
+  @Test def testQuasiIdent = assertTreeCode(q"*")("*")
+
+  @Test def testQuasiVal = assertTreeCode(q"val * : Unit = null")("val * : Unit = null")
+
+  @Test def testQuasiDef = assertTreeCode(q"def * : Unit = null")("def * : Unit = null")
+
+  @Test def testQuasiTrait = assertTreeCode(q"trait *")("trait *")
+
+  @Test def testQuasiClass = assertTreeCode(q"class *")("class *")
+
+  @Test def testQuasiClassWithPublicParams = assertTreeCode(q"class X(val x: Int, val s:String)")("class X(val x: Int, val s: String)")
+
+  @Test def testQuasiClassWithParams = assertTreeCode(q"class X(x: Int, s:String)")("class X(x: Int, s: String)")
+
+  @Test def testQuasiObject = assertTreeCode(q"object *")("object *")
+
+  @Test def testQuasiObjectWithBody = assertTreeCode(q"""object X{ def y = "test" }""")(sm"""
+    |object X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiClassWithBody = assertTreeCode(q"""class X{ def y = "test" }""")(sm"""
+    |class X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiTraitWithBody = assertTreeCode(q"""trait X{ def y = "test" }""")(sm"""
+    |trait X {
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiTraitWithSelfTypeAndBody = assertTreeCode(q"""trait X{ self: Order => def y = "test" }""")(sm"""
+    |trait X { self: Order =>
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiTraitWithSelf = assertTreeCode(q"""trait X{ self => def y = "test" }""")(sm"""
+    |trait X { self =>
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiCaseClassWithBody = assertTreeCode(q"""case class X() { def y = "test" }""")(sm"""
+    |case class X() {
+    |  def y = "test"
+    |}""")
+
+  @Test def testQuasiCaseClassWithParamsAndBody = assertTreeCode(q"""case class X(x: Int, s: String){ def y = "test" }""")(sm"""
+    |case class X(x: Int, s: String) {
+    |  def y = "test"
+    |}""")
+}
\ No newline at end of file
diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
index fd24d0a..903e705 100644
--- a/test/junit/scala/reflect/internal/util/SourceFileTest.scala
+++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
@@ -8,17 +8,14 @@ import org.junit.runners.JUnit4
 @RunWith(classOf[JUnit4])
 class SourceFileTest {
   def lineContentOf(code: String, offset: Int) =
-    new OffsetPosition(new BatchSourceFile("", code), offset).lineContent
-    //Position.offset(new BatchSourceFile("", code), offset).lineContent
+    Position.offset(new BatchSourceFile("", code), offset).lineContent
 
-  /*
   @Test
   def si8205_overflow(): Unit = {
     val file = new BatchSourceFile("", "code no newline")
     // the bug in lineToString counted until MaxValue, and the AIOOBE came from here
     assertFalse(file.isEndOfLine(Int.MaxValue))
   }
-  */
 
   @Test
   def si8205_lineToString(): Unit = {
diff --git a/test/junit/scala/reflect/internal/util/StringOpsTest.scala b/test/junit/scala/reflect/internal/util/StringOpsTest.scala
new file mode 100644
index 0000000..13d3a64
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/StringOpsTest.scala
@@ -0,0 +1,52 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class StringOpsTest {
+  @Test
+  def prefixOfNone(): Unit = {
+    val so = new StringOps { }
+    val ss = Nil
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "")
+  }
+  @Test
+  def prefixWithEmpty(): Unit = {
+    val so = new StringOps { }
+    val ss = List("abc", "", "abd")
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "")
+  }
+  @Test
+  def prefixOfOne(): Unit = {
+    val so = new StringOps { }
+    val ss = List("abc")
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "abc")
+  }
+  @Test
+  def prefixOfMany(): Unit = {
+    val so = new StringOps { }
+    val ss = List("abc", "abd", "abe")
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "ab")
+  }
+  @Test
+  def prefixOfPrefix(): Unit = {
+    val so = new StringOps { }
+    val ss = List("abc", "abcd")
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "abc")
+  }
+  @Test
+  def prefixOfPrefixMiddling(): Unit = {
+    val so = new StringOps { }
+    val ss = List("abce", "abc", "abcd")
+    val lcp = so longestCommonPrefix ss
+    assert(lcp == "abc")
+  }
+}
diff --git a/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala
new file mode 100644
index 0000000..7e3b35c
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala
@@ -0,0 +1,171 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class WeakHashSetTest {
+
+  // a class guaranteed to provide hash collisions
+  case class Collider(x : String) extends Comparable[Collider] with Serializable {
+    override def hashCode = 0
+    def compareTo(y : Collider) = this.x compareTo y.x
+  }
+
+  // basic emptiness check
+  @Test
+  def checkEmpty {
+    val hs = new WeakHashSet[String]()
+    assert(hs.size == 0)
+    hs.diagnostics.fullyValidate
+  }
+
+  // make sure += works
+  @Test
+  def checkPlusEquals {
+    val hs = new WeakHashSet[String]()
+    val elements = List("hello", "goodbye")
+    elements foreach (hs += _)
+    assert(hs.size == 2)
+    assert(hs contains "hello")
+    assert(hs contains "goodbye")
+    hs.diagnostics.fullyValidate
+  }
+
+  // make sure += works when there are collisions
+  @Test
+  def checkPlusEqualsCollisions {
+    val hs = new WeakHashSet[Collider]()
+    val elements = List("hello", "goodbye") map Collider
+    elements foreach (hs += _)
+    assert(hs.size == 2)
+    assert(hs contains Collider("hello"))
+    assert(hs contains Collider("goodbye"))
+    hs.diagnostics.fullyValidate
+  }
+
+  // add a large number of elements to force rehashing and then validate
+  @Test
+  def checkRehashing {
+    val size = 200
+    val hs = new WeakHashSet[String]()
+    val elements = (0 until size).toList map ("a" + _)
+    elements foreach (hs += _)
+    elements foreach {i => assert(hs contains i)}
+    hs.diagnostics.fullyValidate
+  }
+
+  // make sure rehashing works properly when the set is rehashed
+  @Test
+  def checkRehashCollisions {
+    val size = 200
+    val hs = new WeakHashSet[Collider]()
+    val elements = (0 until size).toList map {x => Collider("a" + x)}
+    elements foreach (hs += _)
+    elements foreach {i => assert(hs contains i)}
+    hs.diagnostics.fullyValidate
+  }
+
+  // test that unreferenced objects are removed
+  // not run in an automated environment because gc behavior can't be relied on
+  //@Test
+  def checkRemoveUnreferencedObjects {
+    val size = 200
+    val hs = new WeakHashSet[Collider]()
+    val elements = (0 until size).toList map {x => Collider("a" + x)}
+    elements foreach (hs += _)
+    // don't throw the following into a retained collection so gc
+    // can remove them
+    for (i <- 0 until size) {
+      hs += Collider("b" + i)
+    }
+    System.gc()
+    Thread.sleep(1000)
+    assert(hs.size == 200)
+    elements foreach {i => assert(hs contains i)}
+    for (i <- 0 until size) {
+      assert(!(hs contains Collider("b" + i)))
+    }
+    hs.diagnostics.fullyValidate
+  }
+
+  // make sure findOrUpdate returns the originally entered element
+  @Test
+  def checkFindOrUpdate {
+    val size = 200
+    val hs = new WeakHashSet[Collider]()
+    val elements = (0 until size).toList map {x => Collider("a" + x)}
+    elements foreach {x => assert(hs findEntryOrUpdate x eq x)}
+    for (i <- 0 until size) {
+      // when we do a lookup the result should be the same reference we
+      // original put in
+      assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i))
+    }
+    hs.diagnostics.fullyValidate
+  }
+
+  // check -= functionality
+  @Test
+  def checkMinusEquals {
+    val hs = new WeakHashSet[String]()
+    val elements = List("hello", "goodbye")
+    elements foreach (hs += _)
+    hs -= "goodbye"
+    assert(hs.size == 1)
+    assert(hs contains "hello")
+    assert(!(hs contains "goodbye"))
+    hs.diagnostics.fullyValidate
+  }
+
+  // check -= when there are collisions
+  @Test
+  def checkMinusEqualsCollisions {
+    val hs = new WeakHashSet[Collider]
+    val elements = List(Collider("hello"), Collider("goodbye"))
+    elements foreach (hs += _)
+    hs -= Collider("goodbye")
+    assert(hs.size == 1)
+    assert(hs contains Collider("hello"))
+    assert(!(hs contains Collider("goodbye")))
+    hs -= Collider("hello")
+    assert(hs.size == 0)
+    assert(!(hs contains Collider("hello")))
+    hs.diagnostics.fullyValidate
+  }
+
+  // check that the clear method actually cleans everything
+  @Test
+  def checkClear {
+    val size = 200
+    val hs = new WeakHashSet[String]()
+    val elements = (0 until size).toList map ("a" + _)
+    elements foreach (hs += _)
+    hs.clear()
+    assert(hs.size == 0)
+    elements foreach {i => assert(!(hs contains i))}
+    hs.diagnostics.fullyValidate
+  }
+
+  // check that the iterator covers all the contents
+  @Test
+  def checkIterator {
+    val hs = new WeakHashSet[String]()
+    val elements = (0 until 20).toList map ("a" + _)
+    elements foreach (hs += _)
+    assert(elements.iterator.toList.sorted == elements.sorted)
+    hs.diagnostics.fullyValidate
+  }
+
+  // check that the iterator covers all the contents even when there is a collision
+  @Test
+  def checkIteratorCollisions {
+    val hs = new WeakHashSet[Collider]
+    val elements = (0 until 20).toList map {x => Collider("a" + x)}
+    elements foreach (hs += _)
+    assert(elements.iterator.toList.sorted == elements.sorted)
+    hs.diagnostics.fullyValidate
+  }
+
+}
diff --git a/test/junit/scala/reflect/io/ZipArchiveTest.scala b/test/junit/scala/reflect/io/ZipArchiveTest.scala
new file mode 100644
index 0000000..1bcd06f
--- /dev/null
+++ b/test/junit/scala/reflect/io/ZipArchiveTest.scala
@@ -0,0 +1,37 @@
+package scala.reflect.io
+
+import java.io.{IOException, File => JFile}
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class ZipArchiveTest {
+
+  @Test
+  def corruptZip {
+    val f = JFile.createTempFile("test", ".jar")
+    val fza = new FileZipArchive(f)
+    try {
+      fza.iterator
+    } catch {
+      case x: IOException =>
+        assertTrue(x.getMessage, x.getMessage.contains(f.getPath))
+    } finally {
+      f.delete()
+    }
+  }
+
+  @Test
+  def missingFile {
+    val f = new JFile("xxx.does.not.exist")
+    val fza = new FileZipArchive(f)
+    try {
+      fza.iterator
+    } catch {
+      case x: IOException =>
+        assertTrue(x.getMessage, x.getMessage.contains(f.getPath))
+    }
+  }
+}
diff --git a/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala b/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala
new file mode 100644
index 0000000..21e338e
--- /dev/null
+++ b/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala
@@ -0,0 +1,85 @@
+package scala.tools.nsc
+package interpreter
+
+//import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+case class Tabby(width: Int = 80, isAcross: Boolean = false, marginSize: Int = 3) extends Tabulator
+case class VTabby(width: Int = 80, isAcross: Boolean = false, marginSize: Int = 3) extends VariColumnTabulator
+
+ at RunWith(classOf[JUnit4])
+class TabulatorTest {
+
+  @Test def oneliner() = {
+    val sut   = Tabby()
+    val items = List("a", "b", "c")
+    val res   = sut tabulate items
+    assert(res.size == 1)
+    assert(res(0).size == 1)
+    assert(res(0)(0) startsWith "a")
+    assert(res(0)(0) endsWith "c")
+  }
+  @Test def twoliner() = {
+    val sut   = Tabby(width = 40)
+    val items = List("a" * 15, "b" * 15, "c" * 15)
+    val res   = sut tabulate items
+    assert(res.size == 2)
+    assert(res(0).size == 2)
+    assert(res(1).size == 2)          // trailing empty strings
+    assert(res(1)(0) startsWith "b")
+  }
+  @Test def twolinerx() = {
+    val sut   = Tabby(width = 40, isAcross = true)
+    val items = List("a" * 15, "b" * 15, "c" * 15)
+    val res   = sut tabulate items
+    assert(res.size == 2)
+    assert(res(0).size == 2)
+    assert(res(1).size == 1)          // no trailing empty strings
+    assert(res(1)(0) startsWith "c")
+  }
+  // before, two 9-width cols don't fit in 20
+  // but now, 5-col and 9-col do fit.
+  @Test def twolinerVariable() = {
+    val sut   = VTabby(width = 20)
+    val items = (1 to 9) map (i => i.toString * i)
+    val rows  = sut tabulate items
+    assert(rows.size == 5)
+    assert(rows(0).size == 2)
+    assert(rows(0)(0).size == 8) // width is 55555 plus margin of 3
+  }
+  @Test def sys() = {
+    val sut   = VTabby(width = 40)
+    val items = List("BooleanProp", "PropImpl", "addShutdownHook", "error",
+                    "process", "CreatorImpl", "ShutdownHookThread", "allThreads",
+                    "exit", "props", "Prop", "SystemProperties",
+                    "env", "package", "runtime")
+    val rows  = sut tabulate items
+    assert(rows.size == 8)
+    assert(rows(0).size == 2)
+    assert(rows(0)(0).size == "ShutdownHookThread".length + sut.marginSize)   // 21
+  }
+  @Test def syswide() = {
+    val sut   = VTabby(width = 120)
+    val items = List("BooleanProp", "PropImpl", "addShutdownHook", "error",
+                    "process", "CreatorImpl", "ShutdownHookThread", "allThreads",
+                    "exit", "props", "Prop", "SystemProperties",
+                    "env", "package", "runtime")
+    val rows  = sut tabulate items
+    assert(rows.size == 2)
+    assert(rows(0).size == 8)
+    assert(rows(0)(0).size == "BooleanProp".length + sut.marginSize)  // 14
+  }
+  @Test def resultFits() = {
+    val sut   = VTabby(width = 10)
+    // each of two lines would fit, but layout is two cols of width six > 10
+    // therefore, should choose ncols = 1
+    val items = List("a", "bcd",
+                    "efg", "h")
+    val rows  = sut tabulate items
+    assert(rows.size == 4)
+    assert(rows(0).size == 1)
+    assert(rows(0)(0).size == "efg".length + sut.marginSize)  // 6
+  }
+}
diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
new file mode 100644
index 0000000..e4b5ecc
--- /dev/null
+++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
@@ -0,0 +1,52 @@
+package scala.tools.nsc
+package settings
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil.assertThrows
+
+ at RunWith(classOf[JUnit4])
+class SettingsTest {
+  @Test def booleanSettingColon() {
+    def check(args: String*): MutableSettings#BooleanSetting = {
+      val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
+      val b1 = new s.BooleanSetting("-Ytest-setting", "")
+      s.allSettings += b1
+      val (ok, residual) = s.processArguments(args.toList, processAll = true)
+      assert(residual.isEmpty)
+      b1
+    }
+    assertTrue(check("-Ytest-setting").value)
+    assertTrue(check("-Ytest-setting:true").value)
+    assertTrue(check("-Ytest-setting:TRUE").value)
+    assertFalse(check("-Ytest-setting:false").value)
+    assertFalse(check("-Ytest-setting:FALSE").value)
+    assertThrows[IllegalArgumentException](check("-Ytest-setting:rubbish"))
+  }
+
+  @Test def userSettingsHavePredecenceOverOptimize() {
+    def check(args: String*): MutableSettings#BooleanSetting = {
+      val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
+      val (ok, residual) = s.processArguments(args.toList, processAll = true)
+      assert(residual.isEmpty)
+      s.inline // among -optimize
+    }
+    assertTrue(check("-optimise").value)
+    assertFalse(check("-optimise", "-Yinline:false").value)
+    assertFalse(check("-Yinline:false", "-optimise").value)
+  }
+
+  @Test def userSettingsHavePredecenceOverLint() {
+    def check(args: String*): MutableSettings#BooleanSetting = {
+      val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
+      val (ok, residual) = s.processArguments(args.toList, processAll = true)
+      assert(residual.isEmpty)
+      s.warnAdaptedArgs // among Xlint
+    }
+    assertTrue(check("-Xlint").value)
+    assertFalse(check("-Xlint", "-Ywarn-adapted-args:false").value)
+    assertFalse(check("-Ywarn-adapted-args:false", "-Xlint").value)
+  }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
new file mode 100644
index 0000000..355771b
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
@@ -0,0 +1,67 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil.assertThrows
+import scala.reflect.internal.util.OffsetPosition
+
+ at RunWith(classOf[JUnit4])
+class CannotHaveAttrsTest {
+  object symbolTable extends SymbolTableForUnitTesting {
+    object CHA extends CannotHaveAttrs {
+      def canEqual(that: Any): Boolean = ???
+      def productArity: Int = ???
+      def productElement(n: Int): Any = ???
+    }
+    val attrlessTrees = List(CHA, EmptyTree, emptyValDef, pendingSuperCall)
+  }
+  import symbolTable._
+
+  @Test
+  def canHaveAttrsIsFalse =
+    attrlessTrees.foreach { t =>
+      assertFalse(t.canHaveAttrs)
+    }
+
+  @Test
+  def defaultPosAssignment =
+    attrlessTrees.foreach { t =>
+      assertEquals(t.pos, NoPosition)
+      t.pos = NoPosition
+      assertEquals(t.pos, NoPosition)
+      t.setPos(NoPosition)
+      assertEquals(t.pos, NoPosition)
+    }
+
+  @Test
+  def defaultTpeAssignment =
+    attrlessTrees.foreach { t =>
+      assertEquals(t.tpe, NoType)
+      t.tpe = NoType
+      assertEquals(t.tpe, NoType)
+      t.setType(NoType)
+      assertEquals(t.tpe, NoType)
+    }
+
+  @Test
+  def nonDefaultPosAssignmentFails = {
+    val pos = new OffsetPosition(null, 0)
+    attrlessTrees.foreach { t =>
+      assertThrows[IllegalArgumentException] { t.pos = pos }
+      assertThrows[IllegalArgumentException] { t.setPos(pos) }
+    }
+  }
+
+  @Test
+  def nonDefaultTpeAssignmentFails = {
+    val tpe = typeOf[Int]
+    attrlessTrees.foreach { t =>
+      assertThrows[IllegalArgumentException] { t.tpe = tpe }
+      assertThrows[IllegalArgumentException] { t.setType(tpe) }
+    }
+  }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala
new file mode 100644
index 0000000..cf09abd
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala
@@ -0,0 +1,47 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil.assertThrows
+import scala.reflect.internal.util.FreshNameCreator
+
+ at RunWith(classOf[JUnit4])
+class FreshNameExtractorTest {
+  object symbolTable extends SymbolTableForUnitTesting
+  import symbolTable._
+
+  val prefixes = List("foo$", "x$", "bar", "bippy$baz$")
+
+  @Test
+  def extractionPreservesPrefix =
+    ("" :: prefixes).foreach { creatorPrefix =>
+      prefixes.foreach { newPrefix =>
+        val Creator = new FreshNameCreator(creatorPrefix)
+        val Extractor = new FreshNameExtractor(creatorPrefix)
+        val Extractor(extractedPrefix) = TermName(Creator.newName(newPrefix))
+        assertEquals(newPrefix, extractedPrefix)
+      }
+    }
+
+  @Test
+  def extractionFailsOnCreatorPrefixMismatch = {
+    val Creator = new FreshNameCreator(prefixes.head)
+    val Extractor = new FreshNameExtractor(prefixes.tail.head)
+    assertThrows[MatchError] {
+      val Extractor(_) = TermName(Creator.newName("foo"))
+    }
+  }
+
+  @Test
+  def extractionsFailsIfNameDoesntEndWithNumber = {
+    val Creator = new FreshNameCreator(prefixes.head)
+    val Extractor = new FreshNameExtractor(prefixes.head)
+    assertThrows[MatchError] {
+      val Extractor(_) = TermName(Creator.newName("foo") + "bar")
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
new file mode 100644
index 0000000..4a39cf9
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
@@ -0,0 +1,46 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import scala.tools.testing.AssertUtil._
+import org.junit.{Ignore, Test}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class StdNamesTest {
+  object symbolTable extends SymbolTableForUnitTesting
+  import symbolTable._
+  import nme.{SPECIALIZED_SUFFIX, unspecializedName, splitSpecializedName}
+
+  @Test
+  def testNewTermNameInvalid(): Unit = {
+    assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, -1))
+    assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, 0))
+    assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, -1, 1))
+  }
+
+  @Test
+  def testUnspecializedName(): Unit = {
+    def test(expected: Name, nme: Name) {
+      assertEquals(expected, unspecializedName(nme))
+    }
+    test(TermName("Tuple2"), TermName("Tuple2$mcII" + SPECIALIZED_SUFFIX))
+    test(TermName("foo"), TermName("foo$mIcD" + SPECIALIZED_SUFFIX))
+    test(TermName("foo"), TermName("foo$mIc" + SPECIALIZED_SUFFIX))
+    test(nme.EMPTY, TermName(s"T1$SPECIALIZED_SUFFIX"))
+    test(nme.EMPTY, SPECIALIZED_SUFFIX)
+  }
+
+  @Test
+  def testSplitSpecializedName(): Unit = {
+    def test(expected: (Name, String, String), nme: Name) {
+      assertEquals(expected, splitSpecializedName(nme))
+    }
+    test((TermName("Tuple2"), "II", ""), TermName("Tuple2$mcII" + SPECIALIZED_SUFFIX))
+    test((TermName("foo"), "D", "I"), TermName("foo$mIcD" + SPECIALIZED_SUFFIX))
+    test((TermName("foo"), "", "I"), TermName("foo$mIc" + SPECIALIZED_SUFFIX))
+    test((nme.EMPTY, "T1", ""), TermName(s"T1$SPECIALIZED_SUFFIX"))
+    test((nme.EMPTY, "", ""), SPECIALIZED_SUFFIX)
+  }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
new file mode 100644
index 0000000..25d8c46
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -0,0 +1,98 @@
+package scala.tools.nsc
+package symtab
+
+import scala.reflect.ClassTag
+import scala.reflect.internal.{Phase, NoPhase, SomePhase}
+import scala.tools.util.PathResolver
+import util.ClassPath
+import io.AbstractFile
+
+/**
+ * A complete SymbolTable implementation designed to be used in JUnit tests.
+ *
+ * It enables `usejavacp` setting so classpath of JUnit runner is being used
+ * for symbol table's classpath.
+ *
+ * This class contains enough of logic implemented to make it possible to
+ * initialize definitions and inspect symbols.
+ */
+class SymbolTableForUnitTesting extends SymbolTable {
+  // Members declared in scala.reflect.api.Trees
+  override def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
+  override def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+  trait TreeCopier extends InternalTreeCopierOps
+  // these should be mocks
+  class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier
+  class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier
+
+  override def isCompilerUniverse: Boolean = true
+  def classPath = new PathResolver(settings).result
+
+  object platform extends backend.Platform {
+    val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+    lazy val loaders: SymbolTableForUnitTesting.this.loaders.type = SymbolTableForUnitTesting.this.loaders
+    def platformPhases: List[SubComponent] = Nil
+    val classPath: ClassPath[AbstractFile] = new PathResolver(settings).result
+    def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
+    def isMaybeBoxed(sym: Symbol): Boolean = ???
+    def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ???
+    def externalEquals: Symbol = ???
+    def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]): Unit = ???
+  }
+
+  object loaders extends symtab.SymbolLoaders {
+    val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+    lazy val platform: symbolTable.platform.type = symbolTable.platform
+    def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+      sym.info.member(name)
+    protected override def compileLate(srcfile: AbstractFile): Unit =
+      sys.error(s"We do not expect compileLate to be called in SymbolTableTest. The srcfile passed in is $srcfile")
+  }
+
+  class GlobalMirror extends Roots(NoSymbol) {
+    val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+    def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+    override def toString = "compiler mirror"
+  }
+
+  lazy val rootMirror: Mirror = {
+    val rm = new GlobalMirror
+    rm.init()
+    rm.asInstanceOf[Mirror]
+  }
+
+  def settings: Settings = {
+    val s = new Settings
+    // initialize classpath using java classpath
+    s.usejavacp.value = true
+    s
+  }
+
+   // Members declared in scala.reflect.internal.Required
+  def picklerPhase: scala.reflect.internal.Phase = SomePhase
+  def erasurePhase: scala.reflect.internal.Phase = SomePhase
+
+  // Members declared in scala.reflect.internal.SymbolTable
+  def currentRunId: Int = 1
+  def log(msg: => AnyRef): Unit = println(msg)
+  def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
+  val phases: Seq[Phase] = List(NoPhase, SomePhase)
+  val phaseWithId: Array[Phase] = {
+    val maxId = phases.map(_.id).max
+    val phasesArray = Array.ofDim[Phase](maxId+1)
+    phases foreach { phase =>
+      phasesArray(phase.id) = phase
+    }
+    phasesArray
+  }
+  lazy val treeInfo: scala.reflect.internal.TreeInfo{val global: SymbolTableForUnitTesting.this.type} = ???
+
+  val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
+
+  phase = SomePhase
+
+  type RuntimeClass = java.lang.Class[_]
+  implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass])
+  implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
+  implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier])
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
new file mode 100644
index 0000000..11e955a
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
@@ -0,0 +1,47 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class SymbolTableTest {
+  object symbolTable extends SymbolTableForUnitTesting
+
+  @Test
+  def initDefinitions = {
+    symbolTable.definitions.init()
+  }
+
+  @Test
+  def basicSubTypeCheck = {
+    symbolTable.definitions.init()
+    val listClassTpe = symbolTable.definitions.ListClass.tpe
+    val seqClassTpe = symbolTable.definitions.SeqClass.tpe
+    assertTrue("List should be subclass of Seq", listClassTpe <:< seqClassTpe)
+  }
+
+  /**
+   * Demonstrates how one can create symbols and type completely
+   * from scratch and perform sub type check.
+   */
+  @Test
+  def customClassesSubTypeCheck: Unit = {
+    import symbolTable._
+    symbolTable.definitions.init()
+    val rootClass = symbolTable.rootMirror.RootClass
+    val fooSymbol = rootClass.newClassSymbol("Foo": TypeName, NoPosition, 0)
+    val fooType = new ClassInfoType(Nil, EmptyScope, fooSymbol)
+    fooSymbol.info = fooType
+    val barSymbol = rootClass.newClassSymbol("Bar": TypeName, NoPosition, 0)
+    val fooTypeRef = TypeRef(fooSymbol.owner.tpe, fooSymbol, Nil)
+    val barType = new ClassInfoType(List(fooTypeRef), EmptyScope, barSymbol)
+    barSymbol.info = barType
+    assertTrue("Bar should be subclass of Foo", barSymbol.tpe <:< fooSymbol.tpe)
+    assertFalse("Foo should be a superclass of Foo", fooSymbol.tpe <:< barSymbol.tpe)
+  }
+
+}
diff --git a/test/junit/scala/tools/nsc/util/StackTraceTest.scala b/test/junit/scala/tools/nsc/util/StackTraceTest.scala
new file mode 100644
index 0000000..e765424
--- /dev/null
+++ b/test/junit/scala/tools/nsc/util/StackTraceTest.scala
@@ -0,0 +1,159 @@
+
+package scala.tools.nsc.util
+
+import scala.language.reflectiveCalls
+import scala.util._
+import PartialFunction.cond
+import Properties.isJavaAtLeast
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+trait Expecting {
+  /*
+  import org.expecty.Expecty
+  final val expect = new Expecty
+  */
+}
+
+
+ at RunWith(classOf[JUnit4])
+class StackTraceTest extends Expecting {
+  // formerly an enum
+  val CausedBy   = "Caused by: "
+  val Suppressed = "Suppressed: "
+
+  // throws
+  def sample = throw new RuntimeException("Point of failure")
+  def sampler: String = sample
+
+  // repackage with message
+  def resample: String = try { sample } catch { case e: Throwable => throw new RuntimeException("resample", e) }
+  def resampler: String = resample
+
+  // simple wrapper
+  def wrapper: String = try { sample } catch { case e: Throwable => throw new RuntimeException(e) }
+  // another onion skin
+  def rewrapper: String = try { wrapper } catch { case e: Throwable => throw new RuntimeException(e) }
+  def rewrapperer: String = rewrapper
+
+  // only an insane wretch would do this
+  def insane: String = try { sample } catch {
+    case e: Throwable =>
+      val t = new RuntimeException(e)
+      e initCause t
+      throw t
+  }
+  def insaner: String = insane
+
+  /** Java 7 */
+  val suppressable = isJavaAtLeast("1.7")
+  type Suppressing = { def addSuppressed(t: Throwable): Unit }
+
+  def repressed: String = try { sample } catch {
+    case e: Throwable =>
+      val t = new RuntimeException("My problem")
+      if (suppressable) {
+        t.asInstanceOf[Suppressing] addSuppressed e
+      }
+      throw t
+  }
+  def represser: String = repressed
+
+  // evaluating s should throw, p trims stack trace, t is the test of resulting trace string
+  def probe(s: =>String)(p: StackTraceElement => Boolean)(t: String => Unit): Unit = {
+    Try(s) recover { case e => e stackTracePrefixString p } match {
+      case Success(s) => t(s)
+      case Failure(e) => throw e
+    }
+  }
+
+  @Test def showsAllTrace() {
+    probe(sampler)(_ => true) { s =>
+      val res = s.lines.toList
+      /*
+      expect {
+        res.length > 5  // many lines
+        // these expectations may be framework-specific
+        //s contains "sbt.TestFramework"
+        //res.last contains "java.lang.Thread"
+      }
+      */
+      assert (res.length > 5)
+    }
+  }
+  @Test def showsOnlyPrefix() = probe(sample)(_.getMethodName == "sample") { s =>
+    val res = s.lines.toList
+    /*
+    expect {
+      res.length == 3   // summary + one frame + elision
+    }
+    */
+    assert (res.length == 3)
+  }
+  @Test def showsCause() = probe(resampler)(_.getMethodName != "resampler") { s =>
+    val res = s.lines.toList
+    /*
+    expect {
+      res.length == 6   // summary + one frame + elision, caused by + one frame + elision
+      res exists (_ startsWith CausedBy.toString)
+    }
+    */
+    assert (res.length == 6)
+    assert (res exists (_ startsWith CausedBy.toString))
+  }
+  @Test def showsWrappedExceptions() = probe(rewrapperer)(_.getMethodName != "rewrapperer") { s =>
+    val res = s.lines.toList
+    /*
+    expect {
+      res.length == 9   // summary + one frame + elision times three
+      res exists (_ startsWith CausedBy.toString)
+      (res collect {
+        case s if s startsWith CausedBy.toString => s
+      }).size == 2
+    }
+    */
+    assert (res.length == 9)
+    assert (res exists (_ startsWith CausedBy.toString))
+    assert ((res collect {
+        case s if s startsWith CausedBy.toString => s
+      }).size == 2)
+  }
+  @Test def dontBlowOnCycle() = probe(insaner)(_.getMethodName != "insaner") { s =>
+    val res = s.lines.toList
+    /*
+    expect {
+      res.length == 7   // summary + one frame + elision times two with extra frame
+      res exists (_ startsWith CausedBy.toString)
+    }
+    */
+    assert (res.length == 7)
+    assert (res exists (_ startsWith CausedBy.toString))
+  }
+
+  /** Java 7, but shouldn't bomb on Java 6.
+   *
+java.lang.RuntimeException: My problem
+  at scala.tools.nsc.util.StackTraceTest.repressed(StackTraceTest.scala:56)
+  ... 27 elided
+  Suppressed: java.lang.RuntimeException: Point of failure
+    at scala.tools.nsc.util.StackTraceTest.sample(StackTraceTest.scala:29)
+    at scala.tools.nsc.util.StackTraceTest.repressed(StackTraceTest.scala:54)
+    ... 27 more
+  */
+  @Test def showsSuppressed() = probe(represser)(_.getMethodName != "represser") { s =>
+    val res = s.lines.toList
+    if (suppressable) {
+      assert (res.length == 7)
+      assert (res exists (_.trim startsWith Suppressed.toString))
+    }
+    /*
+    expect {
+      res.length == 7
+      res exists (_ startsWith "  " + Suppressed.toString)
+    }
+    */
+  }
+}
diff --git a/test/junit/scala/tools/testing/AssertThrowsTest.scala b/test/junit/scala/tools/testing/AssertThrowsTest.scala
new file mode 100644
index 0000000..a70519e
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertThrowsTest.scala
@@ -0,0 +1,34 @@
+package scala.tools
+package testing
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import AssertUtil.assertThrows
+
+ at RunWith(classOf[JUnit4])
+class AssertThrowsTest {
+  class Foo extends Exception
+  class SubFoo extends Foo
+  class Bar extends Exception
+
+  @Test
+  def catchFoo = assertThrows[Foo] { throw new Foo }
+
+  @Test
+  def catchSubclass = assertThrows[Foo] { throw new SubFoo }
+
+  @Test
+  def rethrowBar =
+    assertTrue("exception wasn't rethrown", {
+      try {
+        assertThrows[Foo] { throw new Bar }
+        false
+      } catch {
+        case bar: Bar => true
+        case e: Throwable => fail(s"expected Bar but got $e"); false
+      }
+    })
+
+}
\ No newline at end of file
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
new file mode 100644
index 0000000..9efac64
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -0,0 +1,19 @@
+package scala.tools
+package testing
+
+/** This module contains additional higher-level assert statements
+ *  that are ultimately based on junit.Assert primitives.
+ */
+object AssertUtil {
+  /** Check if exception T (or a subclass) was thrown during evaluation of f.
+   *  If any other exception or throwable is found instead it will be re-thrown.
+   */
+  def assertThrows[T <: Exception](f: => Any)(implicit manifest: Manifest[T]): Unit =
+    try f
+    catch {
+      case e: Exception =>
+        val clazz = manifest.erasure.asInstanceOf[Class[T]]
+        if (!clazz.isAssignableFrom(e.getClass))
+          throw e
+    }
+}
\ No newline at end of file
diff --git a/test/junit/scala/util/TryTest.scala b/test/junit/scala/util/TryTest.scala
new file mode 100644
index 0000000..03604a8
--- /dev/null
+++ b/test/junit/scala/util/TryTest.scala
@@ -0,0 +1,35 @@
+package scala.util
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+/* Test Try's withFilter method, which was added along with the -Xfuture fix for SI-6455  */
+ at RunWith(classOf[JUnit4])
+class TryTest {
+  @Test
+  def withFilterFail(): Unit = {
+    val fail = for (x <- util.Try(1) if x > 1) yield x
+    assert(fail.isFailure)
+  }
+
+  @Test
+  def withFilterSuccess(): Unit = {
+    val success1 = for (x <- util.Try(1) if x >= 1) yield x
+    assertEquals(success1, util.Success(1))
+  }
+
+  @Test
+  def withFilterFlatMap(): Unit = {
+    val successFlatMap = for (x <- util.Try(1) if x >= 1; y <- util.Try(2) if x < y) yield x
+    assertEquals(successFlatMap, util.Success(1))
+  }
+
+  @Test
+  def withFilterForeach(): Unit = {
+    var ok = false
+    for (x <- util.Try(1) if x == 1) ok = x == 1
+    assert(ok)
+  }
+}
\ No newline at end of file
diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala
new file mode 100644
index 0000000..d25842c
--- /dev/null
+++ b/test/junit/scala/util/matching/RegexTest.scala
@@ -0,0 +1,30 @@
+
+package scala.util.matching
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class RegexTest {
+  @Test def t8022CharSequence(): Unit = {
+    val full = """.*: (.)$""".r
+    val text = "   When I use this operator: *"
+    // Testing 2.10.x compatibility of the return types of unapplySeq
+    val x :: Nil = full.unapplySeq(text: Any).get
+    val y :: Nil = full.unapplySeq(text: CharSequence).get
+    assertEquals("*", x)
+    assertEquals("*", y)
+  }
+
+  @Test def t8022Match(): Unit = {
+    val R = """(\d)""".r
+    val matchh = R.findFirstMatchIn("a1").get
+    // Testing 2.10.x compatibility of the return types of unapplySeq
+    val x :: Nil = R.unapplySeq(matchh: Any).get
+    val y :: Nil = R.unapplySeq(matchh).get
+    assertEquals("1", x)
+    assertEquals("1", y)
+  }
+}
diff --git a/test/junit/scala/util/matching/regextract-char.scala b/test/junit/scala/util/matching/regextract-char.scala
new file mode 100644
index 0000000..50fdcd9
--- /dev/null
+++ b/test/junit/scala/util/matching/regextract-char.scala
@@ -0,0 +1,58 @@
+
+package scala.util.matching
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import PartialFunction._
+
+/** Regex can match a Char.
+ *  If the pattern includes a group,
+ *  always return a single char.
+ */
+ at RunWith(classOf[JUnit4])
+class CharRegexTest {
+  implicit class Averrable(val b: Boolean) /*extends AnyVal*/ {
+    def yes = assert(b)
+    def no = assert(!b)
+  }
+  val c: Char = 'c'  // "cat"(0)
+  val d: Char = 'D'  // "Dog"(0)
+
+  @Test def comparesGroupCorrectly(): Unit = {
+    val r = """(\p{Lower})""".r
+    cond(c) { case r(x) => true } .yes
+    cond(c) { case r(_) => true } .yes
+    cond(c) { case r(_*) => true } .yes
+    cond(c) { case r() => true } .no
+
+    cond(d) { case r(x) => true } .no
+    cond(d) { case r(_) => true } .no
+    cond(d) { case r(_*) => true } .no
+    cond(d) { case r() => true } .no
+  }
+
+  @Test def comparesNoGroupCorrectly(): Unit = {
+    val rnc = """\p{Lower}""".r
+    cond(c) { case rnc(x) => true } .no
+    cond(c) { case rnc(_) => true } .no
+    cond(c) { case rnc(_*) => true } .yes
+    cond(c) { case rnc() => true } .yes
+
+    cond(d) { case rnc(x) => true } .no
+    cond(d) { case rnc(_) => true } .no
+    cond(d) { case rnc(_*) => true } .no
+    cond(d) { case rnc() => true } .no
+  }
+
+  @Test(expected = classOf[MatchError])
+  def failCorrectly(): Unit = {
+    val headAndTail = """(\p{Lower})([a-z]+)""".r
+    val n = "cat"(0) match {
+      case headAndTail(ht @ _*) => ht.size
+    }
+    assert(false, s"Match size $n")
+  }
+}
diff --git a/test/junit/scala/util/t7265.scala b/test/junit/scala/util/t7265.scala
new file mode 100644
index 0000000..71f085d
--- /dev/null
+++ b/test/junit/scala/util/t7265.scala
@@ -0,0 +1,59 @@
+
+package scala.util
+package test
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.util.PropertiesTrait
+
+/** The java version property uses the spec version
+ *  and must work for all "major.minor" and fail otherwise.
+ */
+ at RunWith(classOf[JUnit4])
+class SpecVersionTest {
+  val sut = new PropertiesTrait {
+    override def javaSpecVersion = "1.7"
+
+    override protected def pickJarBasedOn: Class[_] = ???
+    override protected def propCategory: String = "test"
+
+    // override because of vals like releaseVersion
+    override lazy val scalaProps = new java.util.Properties
+  }
+
+  @Test
+  def comparesCorrectly(): Unit = {
+    assert(sut isJavaAtLeast "1.5")
+    assert(sut isJavaAtLeast "1.6")
+    assert(sut isJavaAtLeast "1.7")
+    assert(!(sut isJavaAtLeast "1.8"))
+    assert(!(sut isJavaAtLeast "1.71"))
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def badVersion(): Unit = {
+    sut isJavaAtLeast "1.a"
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def missingVersion(): Unit = {
+    sut isJavaAtLeast "1"
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def noVersion(): Unit = {
+    sut isJavaAtLeast ""
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def dotOnly(): Unit = {
+    sut isJavaAtLeast "."
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def leadingDot(): Unit = {
+    sut isJavaAtLeast ".5"
+  }
+  @Test(expected = classOf[NumberFormatException])
+  def notASpec(): Unit = {
+    sut isJavaAtLeast "1.7.1"
+  }
+}
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
index 8a0a05d..d601f04 100644
--- a/test/osgi/src/BasicReflection.scala
+++ b/test/osgi/src/BasicReflection.scala
@@ -1,10 +1,12 @@
 package tools.test.osgi
 package reflection
 package basic
- 
+
+import scala.language.higherKinds
+
 import org.junit.Assert._
 import org.ops4j.pax.exam.CoreOptions._
- 
+
 import org.junit.Test
 import org.junit.runner.RunWith
 import org.ops4j.pax.exam
@@ -41,15 +43,15 @@ object M
 class BasicReflectionTest extends ScalaOsgiHelper {
 
   @Configuration
-  def config(): Array[exam.Option] = 
+  def config(): Array[exam.Option] =
     justReflectionOptions
 
   // Ensure Pax-exam requires C/M in our module
   def dummy = {
     new C
-    M.toString   
+    M.toString
   }
- 
+
   @Test
   def basicMirrorThroughOsgi(): Unit = {
     // Note for now just assert that we can do this stuff.
@@ -57,10 +59,10 @@ class BasicReflectionTest extends ScalaOsgiHelper {
     val cm = runtimeMirror(classOf[C].getClassLoader)
     val im = cm.reflect(new C)
     assertEquals("Unable to reflect field name!",
-                 "value f1", 
-                 im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).symbol.toString)
+                 "value f1",
+                 im.reflectField(typeOf[C].member(TermName("f1")).asTerm).symbol.toString)
     assertEquals("Unable to reflect value!",
-                 2, 
-                 im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).get)
+                 2,
+                 im.reflectField(typeOf[C].member(TermName("f1")).asTerm).get)
  }
 }
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
index bcdc5c0..084afe8 100644
--- a/test/osgi/src/ScalaOsgiHelper.scala
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -1,5 +1,5 @@
 package tools.test.osgi
- 
+
 import org.ops4j.pax.exam.CoreOptions._
 import org.ops4j.pax.exam
 import java.io.File
@@ -12,7 +12,7 @@ trait ScalaOsgiHelper {
   }
 
   private def filteredBundleFiles(names: String*): Array[exam.Option] =
-     for(bundle <- allBundleFiles; if names exists (bundle.getName contains))
+     for(bundle <- allBundleFiles; if names exists (bundle.getName contains _))
      yield makeBundle(bundle)
 
   private def makeBundle(file: File): exam.Option =
@@ -21,6 +21,8 @@ trait ScalaOsgiHelper {
   def standardOptions: Array[exam.Option]  = {
     val bundles = (allBundleFiles map makeBundle)
     bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+    // to change the local repo used (for some operations, but not all -- which is why I didn't bother):
+    // systemProperty("org.ops4j.pax.url.mvn.localRepository").value(sys.props("maven.repo.local")))
   }
 
   def justReflectionOptions: Array[exam.Option]  = {
@@ -32,5 +34,5 @@ trait ScalaOsgiHelper {
     val bundles = filteredBundleFiles("scala-library")
     bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
   }
- 
+
 }
diff --git a/test/partest b/test/partest
index 842d290..f396459 100755
--- a/test/partest
+++ b/test/partest
@@ -1,15 +1,36 @@
-#!/bin/sh
-
+#!/usr/bin/env bash
+#
 ##############################################################################
-# Scala test runner 2.8.0
+# Scala test runner 2.10.0
 ##############################################################################
-# (c) 2002-2011 LAMP/EPFL
+# (c) 2002-2013 LAMP/EPFL
 #
 # This is free software; see the distribution for copying conditions.
 # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
 # PARTICULAR PURPOSE.
 ##############################################################################
 
+findScalaHome () {
+  # see SI-2092 and SI-5792
+  local source="${BASH_SOURCE[0]}"
+  while [ -h "$source" ] ; do
+    local linked="$(readlink "$source")"
+    local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )"
+    source="$dir/$(basename "$linked")"
+  done
+  ( ( cd -P "$(dirname "$source")/.." > /dev/null ) && pwd )
+}
+
+# Use tput to detect color-capable terminal.
+term_colors=$(tput colors 2>/dev/null)
+if [[ $? == 0 ]] && [[ $term_colors -gt 2 ]]; then
+  git_diff_options="--color=always --word-diff"
+  color_opts="-Dpartest.colors=$term_colors"
+else
+  unset color_opts
+  git_diff_options="--nocolor"
+fi
+
 cygwin=false;
 darwin=false;
 case "`uname`" in
@@ -18,49 +39,60 @@ case "`uname`" in
 esac
 
 # Finding the root folder for this Scala distribution
-SOURCE=$0;
-SCRIPT=`basename "$SOURCE"`;
-while [ -h "$SOURCE" ]; do
-    SCRIPT=`basename "$SOURCE"`;
-    LOOKUP=`ls -ld "$SOURCE"`;
-    TARGET=`expr "$LOOKUP" : '.*-> \(.*\)$'`;
-    if expr "${TARGET:-.}/" : '/.*/$' > /dev/null; then
-        SOURCE=${TARGET:-.};
-    else
-        SOURCE=`dirname "$SOURCE"`/${TARGET:-.};
-    fi;
-done;
-
-# see #2092
-SCALA_HOME=`dirname "$SOURCE"`
-SCALA_HOME=`cd "$SCALA_HOME"; pwd -P`
-SCALA_HOME=`cd "$SCALA_HOME"/..; pwd`
+SCALA_HOME="$(findScalaHome)"
 
 if $cygwin; then
     SCALA_HOME=`cygpath --windows --short-name "$SCALA_HOME"`
     SCALA_HOME=`cygpath --unix "$SCALA_HOME"`
 fi
 
-# Constructing the extension classpath
-EXT_CLASSPATH=""
-if [ -z "$EXT_CLASSPATH" ] ; then
-    if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
-        for ext in "$SCALA_HOME"/lib/* ; do
-            if [ -z "$EXT_CLASSPATH" ] ; then
-                EXT_CLASSPATH="$ext"
-            else
-                EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
-            fi
-        done
-    elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
-        for lib in `echo "scala-partest scala-library scala-reflect scala-compiler diffutils"`; do
-            ext="$SCALA_HOME/build/pack/lib/$lib.jar"
-            if [ -z "$EXT_CLASSPATH" ] ; then
-                EXT_CLASSPATH="$ext"
-            else
-                EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
-            fi
-        done
+# Let ant construct the classpath used to run partest (downloading partest from maven if necessary)
+# PARTEST_CLASSPATH=""
+if [ -z "$PARTEST_CLASSPATH" ] ; then
+    if [ ! -f "$SCALA_HOME/build/pack/partest.properties" ] ; then
+        (cd "$SCALA_HOME" && ant -q test.suite.init) # builds pack, downloads partest and writes classpath to build/pack/partest.properties
+    fi
+
+    PARTEST_CLASSPATH=$( cat "$SCALA_HOME/build/pack/partest.properties" | grep partest.classpath | sed -e 's/\\:/:/g' | cut -f2- -d= )
+
+    # sanity check, disabled to save time
+    # $( javap -classpath $PARTEST_CLASSPATH scala.tools.partest.nest.NestRunner &> /dev/null ) || unset PARTEST_CLASSPATH
+fi
+
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+#     if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
+#         for ext in "$SCALA_HOME"/lib/* ; do
+#             if [ -z "$PARTEST_CLASSPATH" ] ; then
+#                 PARTEST_CLASSPATH="$ext"
+#             else
+#                 PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+#             fi
+#         done
+#     elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
+#         for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
+#             ext="$SCALA_HOME/build/pack/lib/$lib.jar"
+#             if [ -z "$PARTEST_CLASSPATH" ] ; then
+#                 PARTEST_CLASSPATH="$ext"
+#             else
+#                 PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+#             fi
+#         done
+#     fi
+# fi
+
+# Locate a javac command
+# Try: JAVA_HOME, sibling to specific JAVACMD, or PATH
+# Don't fail if there is no javac, since not all tests require it.
+if [ -z "$JAVAC_CMD" ] ; then
+    if [ -n "${JAVA_HOME}" ] && [ -f "${JAVA_HOME}/bin/javac" ] ; then
+        JAVAC_CMD="${JAVA_HOME}/bin/javac"
+    fi
+    if [ -z "$JAVAC_CMD" ] && [ -n "$JAVACMD" ] ; then
+        JDIR=`dirname "${JAVACMD}"`
+        JAVAC_CMD="${JDIR}/javac"
+    fi
+    if [ -z "$JAVAC_CMD" ] ; then
+        JAVAC_CMD=`type -p javac`
     fi
 fi
 
@@ -70,26 +102,45 @@ if $cygwin; then
     else
         format=windows
     fi
+    if [ -n "${JAVA_HOME}" ] ; then
+        JAVA_HOME=`cygpath --$format "$JAVA_HOME"`
+    fi
+    if [ -n "${JAVACMD}" ] ; then
+        JAVACMD=`cygpath --$format "$JAVACMD"`
+    fi
+    if [ -n "${JAVAC_CMD}" ] ; then
+        JAVAC_CMD=`cygpath --$format "$JAVAC_CMD"`
+    fi
     SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
-    EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
+    PARTEST_CLASSPATH=`cygpath --path --$format "$PARTEST_CLASSPATH"`
 fi
 
-# last arg wins, so if JAVA_OPTS already contains one of these options
-# the supplied argument will be used.
+# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
+# supplied argument will be used.
 # At this writing it is reported test/partest --all requires 108m permgen.
 JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
 
+# the ant task doesn't supply any options by default,
+# so don't do that here either -- note that you may want to pass -optimise
+# to mimic what happens during nightlies.
+# [ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+
 partestDebugStr=""
 if [ ! -z "${PARTEST_DEBUG}" ] ; then
   partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
 fi
 
-${JAVACMD:=java} \
-  $JAVA_OPTS -cp "$EXT_CLASSPATH" \
+# note that variables which may intentionally be empty must not
+# be quoted: otherwise an empty string will appear as a command line
+# argument, and java will think that is the program to run.
+"${JAVACMD:=java}" \
+  $JAVA_OPTS -cp "$PARTEST_CLASSPATH" \
   ${partestDebugStr} \
+  ${color_opts} \
+  -Dfile.encoding=UTF-8 \
   -Dscala.home="${SCALA_HOME}" \
   -Dpartest.javacmd="${JAVACMD}" \
   -Dpartest.java_opts="${JAVA_OPTS}" \
   -Dpartest.scalac_opts="${SCALAC_OPTS}" \
-  -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" \
-  scala.tools.partest.nest.NestRunner "$@"
+  -Dpartest.javac_cmd="${JAVAC_CMD}" \
+  scala.tools.partest.nest.ConsoleRunner "$@"
diff --git a/test/partest.bat b/test/partest.bat
index b64347c..1806e80 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -3,7 +3,7 @@
 rem ##########################################################################
 rem # Scala code runner 2.9.1.final
 rem ##########################################################################
-rem # (c) 2002-2011 LAMP/EPFL
+rem # (c) 2002-2013 LAMP/EPFL
 rem #
 rem # This is free software; see the distribution for copying conditions.
 rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
diff --git a/test/pending/continuations-pos/t3620.scala b/test/pending/continuations-pos/t3620.scala
deleted file mode 100644
index 8496ae2..0000000
--- a/test/pending/continuations-pos/t3620.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-import scala.collection.mutable.HashMap
-import scala.util.continuations._
-
-object Test extends Application {
-
-  class Store[K,V] {
-
-    trait Waiting {
-      def key: K
-      def inform(value: V): Unit
-    }
-
-    private val map = new HashMap[K, V]
-    private var waiting: List[Waiting] = Nil
-
-    def waitFor(k: K, f: (V => Unit)) {
-      map.get(k) match {
-        case Some(v) => f(v)
-        case None => {
-          val w = new Waiting {
-            def key = k
-            def inform(v: V) = f(v)
-          }
-          waiting = w :: waiting
-        }
-      }
-    }
-
-
-    def add(key: K, value: V) {
-      map(key) = value
-      val p = waiting.partition(_.key == key)
-      waiting = p._2
-      p._1.foreach(_.inform(value))
-    }
-
-    def required(key: K) = {
-      shift {
-        c: (V => Unit) => {
-          waitFor(key, c)
-        }
-      }
-    }
-
-    def option(key: Option[K]) = {
-      shift {
-        c: (Option[V] => Unit) => {
-          key match {
-            case Some(key) => waitFor(key, (v: V) => c(Some(v)))
-            case None => c(None)
-          }
-
-        }
-      }
-    }
-
-  }
-
-  val store = new Store[String, Int]
-
-  def test(p: Option[String]): Unit = {
-    reset {
-      // uncommenting the following two lines makes the compiler happy!
-//      val o = store.option(p)
-//      println(o)
-      val i = store.option(p).getOrElse(1)
-      println(i)
-    }
-  }
-
-  test(Some("a"))
-
-}
diff --git a/test/pending/continuations-run/example0.scala b/test/pending/continuations-run/example0.scala
deleted file mode 100644
index de5ea54..0000000
--- a/test/pending/continuations-run/example0.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test0.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example1.scala b/test/pending/continuations-run/example1.scala
deleted file mode 100644
index e31d6af..0000000
--- a/test/pending/continuations-run/example1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test1.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example16.scala b/test/pending/continuations-run/example16.scala
deleted file mode 100644
index 561f0ab..0000000
--- a/test/pending/continuations-run/example16.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test16Printf.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example2.scala b/test/pending/continuations-run/example2.scala
deleted file mode 100644
index 730f7cc..0000000
--- a/test/pending/continuations-run/example2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test2.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example3.scala b/test/pending/continuations-run/example3.scala
deleted file mode 100644
index 41cf1cc..0000000
--- a/test/pending/continuations-run/example3.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test3.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example4.scala b/test/pending/continuations-run/example4.scala
deleted file mode 100644
index adcc7aa..0000000
--- a/test/pending/continuations-run/example4.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test4.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example5.scala b/test/pending/continuations-run/example5.scala
deleted file mode 100644
index 241e8cd..0000000
--- a/test/pending/continuations-run/example5.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test5.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example6.scala b/test/pending/continuations-run/example6.scala
deleted file mode 100644
index 00f84fc..0000000
--- a/test/pending/continuations-run/example6.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test6.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example7.scala b/test/pending/continuations-run/example7.scala
deleted file mode 100644
index 64abc6d..0000000
--- a/test/pending/continuations-run/example7.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test7.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example8.scala b/test/pending/continuations-run/example8.scala
deleted file mode 100644
index a5f953d..0000000
--- a/test/pending/continuations-run/example8.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test8.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/example9.scala b/test/pending/continuations-run/example9.scala
deleted file mode 100644
index 09d792c..0000000
--- a/test/pending/continuations-run/example9.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-// $Id$
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    examples.continuations.Test9Monads.main(args)
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.check b/test/pending/continuations-run/foreach.check
deleted file mode 100644
index 9bab7a2..0000000
--- a/test/pending/continuations-run/foreach.check
+++ /dev/null
@@ -1,4 +0,0 @@
-1
-2
-3
-enough is enough
\ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.scala b/test/pending/continuations-run/foreach.scala
deleted file mode 100644
index 76823e7..0000000
--- a/test/pending/continuations-run/foreach.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-import scala.util.continuations.Loops._
-
-object Test {
-  
-  def main(args: Array[String]): Any = {
-    
-    
-    reset {
-      
-      val list = List(1,2,3,4,5)
-      
-      for (x <- list.suspendable) {
-        
-        shift { k: (Unit => Unit) =>
-          println(x)
-          if (x < 3)
-            k()
-          else
-            println("enough is enough")
-        }
-        
-      }
-      
-    }
-    
-    
-  }
-  
-}
\ No newline at end of file
diff --git a/test/pending/junit/scala/util/t7265.scala b/test/pending/junit/scala/util/t7265.scala
deleted file mode 100644
index 3b8fa80..0000000
--- a/test/pending/junit/scala/util/t7265.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-
-package scala.util
-package test
-
-import org.junit.Assert._
-import org.junit.Test
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-
-import scala.util.PropertiesTrait
-
-/** The java version property uses the spec version
- *  and must work for all "major.minor" and fail otherwise.
- */
- at RunWith(classOf[JUnit4])
-class SpecVersionTest {
-  val sut = new PropertiesTrait {
-    override def javaSpecVersion = "1.7"
-
-    override protected def pickJarBasedOn: Class[_] = ???
-    override protected def propCategory: String = "test"
-
-    // override because of vals like releaseVersion
-    override lazy val scalaProps = new java.util.Properties
-  }
-
-  @Test
-  def comparesCorrectly(): Unit = {
-    assert(sut isJavaAtLeast "1.5")
-    assert(sut isJavaAtLeast "1.6")
-    assert(sut isJavaAtLeast "1.7")
-    assert(!(sut isJavaAtLeast "1.8"))
-  }
-  @Test(expected = classOf[NumberFormatException])
-  def badVersion(): Unit = {
-    sut isJavaAtLeast "1.a"
-  }
-  @Test(expected = classOf[NumberFormatException])
-  def missingVersion(): Unit = {
-    sut isJavaAtLeast "1"
-  }
-  @Test(expected = classOf[NumberFormatException])
-  def notASpec(): Unit = {
-    sut isJavaAtLeast "1.7.1"
-  }
-}
diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala
index 9e0e9d9..f4964b6 100644
--- a/test/pending/jvm/cf-attributes.scala
+++ b/test/pending/jvm/cf-attributes.scala
@@ -52,14 +52,14 @@ object anonymousFunctions {
 }
 
 object anonymousClasses {
-  //InnerClass: 
+  //InnerClass:
   // public abstract #_= #_ of #_; //Foo=class anonymousClasses$Foo of class anonymousClasses$
   // public abstract #_= #_ of #_; //Foo$class=class anonymousClasses$Foo$class of class anonymousClasses$
   trait Foo {
     def foo() { println("foo"); }
     override def toString = getClass.getName
   }
-  //InnerClass: 
+  //InnerClass:
   // public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$
   val x = new Foo() {
     override def foo() { println("foo (overriden)"); }
@@ -88,16 +88,16 @@ trait Test1 {
 
 trait Test2 {
   @throws(classOf[Exception])
-  def printInnerClasses(cls: Class[_]) {
-    import java.io._, ch.epfl.lamp.fjbg._
-    val fjbgContext = new FJBGContext(49, 0)
-    val outDir = System.getProperty("partest.output", "cf-attributes.obj")
-    val fileName = outDir+File.separator+cls.getName+".class"
-    val in = new DataInputStream(new FileInputStream(fileName))
-    val jclass = fjbgContext.JClass(in)
-    println(jclass.getInnerClasses)
-    in.close()
-  }
+  // def printInnerClasses(cls: Class[_]) {
+  //   import java.io._, ch.epfl.lamp.fjbg._
+  //   val fjbgContext = new FJBGContext(49, 0)
+  //   val outDir = System.getProperty("partest.output", "cf-attributes.obj")
+  //   val fileName = outDir+File.separator+cls.getName+".class"
+  //   val in = new DataInputStream(new FileInputStream(fileName))
+  //   val jclass = fjbgContext.JClass(in)
+  //   println(jclass.getInnerClasses)
+  //   in.close()
+  // }
   def printClass(name: String) {
     try { printClass(Class.forName(name)) }
     catch { case e: Exception => println(e) }
@@ -105,7 +105,7 @@ trait Test2 {
   def printClass(cls: Class[_]) {
     println("\n[[ "+cls.getName+" ]]");
     try { printInnerClasses(cls) }
-    catch { case e: Exception => println(e) }    
+    catch { case e: Exception => println(e) }
   }
 }
 
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
index 89020de..be47d5c 100644
--- a/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
+++ b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[U <: String](c: Ctx) = ???
+  def foo[U <: String](c: Context) = ???
 }
diff --git a/test/pending/neg/plugin-after-terminal.flags b/test/pending/neg/plugin-after-terminal.flags
deleted file mode 100644
index 6a44376..0000000
--- a/test/pending/neg/plugin-after-terminal.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-after-terminal/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 3e382f3..0000000
--- a/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f174c50c4363c492362a05c72dd45b0da18fdcd8 ?plugins.jar
diff --git a/test/pending/neg/plugin-after-terminal/misc/build.sh b/test/pending/neg/plugin-after-terminal/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-after-terminal/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala b/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala
deleted file mode 100644
index 2a46073..0000000
--- a/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "afterterminal"
-  val description = "Declares one plugin that wants to be after the terminal phase"
-  val components = List[PluginComponent](thePhase)
-  
-  private object thePhase extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]("terminal")
-
-    val phaseName = ThePlugin.this.name
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-after-terminal/testsource.scala b/test/pending/neg/plugin-after-terminal/testsource.scala
deleted file mode 100644
index 519d162..0000000
--- a/test/pending/neg/plugin-after-terminal/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("afterterminal")
-}
-
diff --git a/test/pending/neg/plugin-before-parser.flags b/test/pending/neg/plugin-before-parser.flags
deleted file mode 100644
index 6325309..0000000
--- a/test/pending/neg/plugin-before-parser.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-before-parser/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
deleted file mode 100644
index e82eed7..0000000
--- a/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d7b100ad483484b598b7cd643424bd2e33898a0d ?plugins.jar
diff --git a/test/pending/neg/plugin-before-parser/misc/build.sh b/test/pending/neg/plugin-before-parser/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-before-parser/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml b/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27d..0000000
--- a/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
-  <name>beforeparser</name>
-  <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-before-parser/src/ThePlugin.scala b/test/pending/neg/plugin-before-parser/src/ThePlugin.scala
deleted file mode 100644
index 7ca8966..0000000
--- a/test/pending/neg/plugin-before-parser/src/ThePlugin.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "beforeparser"
-  val description = "Declares one plugin that wants to be before the parser phase"
-  val components = List[PluginComponent](thePhase)
-  
-  private object thePhase extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]()
-    override val runsBefore = List[String]("parser")
-
-    val phaseName = ThePlugin.this.name
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-before-parser/testsource.scala b/test/pending/neg/plugin-before-parser/testsource.scala
deleted file mode 100644
index 9928aaa..0000000
--- a/test/pending/neg/plugin-before-parser/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("beforeparser")
-}
-
diff --git a/test/pending/neg/plugin-cyclic-dependency.check b/test/pending/neg/plugin-cyclic-dependency.check
deleted file mode 100644
index a29bc3f..0000000
--- a/test/pending/neg/plugin-cyclic-dependency.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: fatal error: Cycle in compiler phase dependencies detected, phase cyclicdependency1 reacted twice!
-one error found
diff --git a/test/pending/neg/plugin-cyclic-dependency.flags b/test/pending/neg/plugin-cyclic-dependency.flags
deleted file mode 100644
index 8716aaa..0000000
--- a/test/pending/neg/plugin-cyclic-dependency.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-cyclic-dependency/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 7e565e9..0000000
--- a/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7e6be9e33a87194e7061f94f6be115619f91ada2 ?plugins.jar
diff --git a/test/pending/neg/plugin-cyclic-dependency/misc/build.sh b/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml b/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27d..0000000
--- a/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
-  <name>beforeparser</name>
-  <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala b/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala
deleted file mode 100644
index bd94ce6..0000000
--- a/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "cyclicdependency"
-  val description = "Declares two phases that have a cyclic dependency"
-  val components = List[PluginComponent](thePhase1,thePhase2)
-  
-  private object thePhase1 extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]("tailcalls","cyclicdependency2")
-
-    val phaseName = ThePlugin.this.name + "1"
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private object thePhase2 extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]("dce","cyclicdependency1")
-
-    val phaseName = ThePlugin.this.name + "2"
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/testsource.scala b/test/pending/neg/plugin-cyclic-dependency/testsource.scala
deleted file mode 100644
index f1513ec..0000000
--- a/test/pending/neg/plugin-cyclic-dependency/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("cyclicdependency")
-}
-
diff --git a/test/pending/neg/plugin-multiple-rafter.check b/test/pending/neg/plugin-multiple-rafter.check
deleted file mode 100644
index c54f884..0000000
--- a/test/pending/neg/plugin-multiple-rafter.check
+++ /dev/null
@@ -1,4 +0,0 @@
-error: fatal error: Multiple phases want to run right after the phase explicitouter
-Phases: erasure, multi-rafter, 
-Re-run with -Xgenerate-phase-graph <filename> to better see the problem.
-one error found
diff --git a/test/pending/neg/plugin-multiple-rafter.flags b/test/pending/neg/plugin-multiple-rafter.flags
deleted file mode 100644
index dcae7f2..0000000
--- a/test/pending/neg/plugin-multiple-rafter.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-multiple-rafter/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
deleted file mode 100644
index f4905fc..0000000
--- a/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2bda582b574287429ad5ee2e1d9a3effc88b0a5f ?plugins.jar
diff --git a/test/pending/neg/plugin-multiple-rafter/misc/build.sh b/test/pending/neg/plugin-multiple-rafter/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-multiple-rafter/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml b/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27d..0000000
--- a/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
-  <name>beforeparser</name>
-  <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala b/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala
deleted file mode 100644
index 819176f..0000000
--- a/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "multi-rafter"
-  val description = ""
-  val components = List[PluginComponent](thePhase)
-  
-  private object thePhase extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]()
-    override val runsRightAfter = Some("explicitouter")
-    val phaseName = ThePlugin.this.name
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-multiple-rafter/testsource.scala b/test/pending/neg/plugin-multiple-rafter/testsource.scala
deleted file mode 100644
index f73db1e..0000000
--- a/test/pending/neg/plugin-multiple-rafter/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("multi-rafter")
-}
-
diff --git a/test/pending/neg/plugin-rafter-before-1.check b/test/pending/neg/plugin-rafter-before-1.check
deleted file mode 100644
index 19ed4d2..0000000
--- a/test/pending/neg/plugin-rafter-before-1.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: fatal error: phase erasure want to run right after explicitouter, but some phase has declared to run before erasure. Re-run with -Xgenerate-phase-graph <filename> to better see the problem.
-one error found
diff --git a/test/pending/neg/plugin-rafter-before-1.flags b/test/pending/neg/plugin-rafter-before-1.flags
deleted file mode 100644
index 8bf0314..0000000
--- a/test/pending/neg/plugin-rafter-before-1.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-rafter-before-1/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 8ad591b..0000000
--- a/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-af91fd67ccef349e7f8ea662615e17796a339485 ?plugins.jar
diff --git a/test/pending/neg/plugin-rafter-before-1/misc/build.sh b/test/pending/neg/plugin-rafter-before-1/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-rafter-before-1/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml b/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27d..0000000
--- a/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
-  <name>beforeparser</name>
-  <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala b/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala
deleted file mode 100644
index 81ba85a..0000000
--- a/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "rafter-before-1"
-  val description = ""
-  val components = List[PluginComponent](thePhase1)
-  
-  private object thePhase1 extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]("refchecks")
-    override val runsBefore = List[String]("erasure")
-    val phaseName = ThePlugin.this.name
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-rafter-before-1/testsource.scala b/test/pending/neg/plugin-rafter-before-1/testsource.scala
deleted file mode 100644
index 836459d..0000000
--- a/test/pending/neg/plugin-rafter-before-1/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("rafter-before-1")
-}
-
diff --git a/test/pending/neg/plugin-rightafter-terminal.flags b/test/pending/neg/plugin-rightafter-terminal.flags
deleted file mode 100644
index 948a318..0000000
--- a/test/pending/neg/plugin-rightafter-terminal.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-rightafter-terminal/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index c2e2b9c..0000000
--- a/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8cccde4914da2058dca893783c231cda23855603 ?plugins.jar
diff --git a/test/pending/neg/plugin-rightafter-terminal/misc/build.sh b/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
deleted file mode 100755
index 8899009..0000000
--- a/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
-    mkdir -p build
-    ${SCALAC} -d build src/*.scala
-    jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
-    rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml b/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27d..0000000
--- a/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
-  <name>beforeparser</name>
-  <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala b/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala
deleted file mode 100644
index 9d6d30b..0000000
--- a/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala.test.plugins
-
-import scala.tools.nsc
-import nsc.Global
-import nsc.Phase
-import nsc.plugins.Plugin
-import nsc.plugins.PluginComponent
-
-class ThePlugin(val global: Global) extends Plugin {
-  import global._
-
-  val name = "rightafterterminal"
-  val description = "Declares one plugin that wants to be right after the terminal phase"
-  val components = List[PluginComponent](thePhase)
-  
-  private object thePhase extends PluginComponent {
-    val global = ThePlugin.this.global
-
-    val runsAfter = List[String]()
-    override val runsRightAfter = Some("terminal")
-    
-    val phaseName = ThePlugin.this.name
-
-    def newPhase(prev: Phase) = new ThePhase(prev)    
-  }
-  
-  private class ThePhase(prev: Phase) extends Phase(prev) {
-    def name = ThePlugin.this.name
-    def run {}
-  }
-}
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/testsource.scala b/test/pending/neg/plugin-rightafter-terminal/testsource.scala
deleted file mode 100644
index 7af767b..0000000
--- a/test/pending/neg/plugin-rightafter-terminal/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
-  println("rightafterterminal")
-}
-
diff --git a/test/pending/neg/t5353.check b/test/pending/neg/t5353.check
deleted file mode 100644
index 75e2435..0000000
--- a/test/pending/neg/t5353.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5353.scala:2: error: this type parameter must be specified
-  def f(x: Boolean) = if (x) Array("abc") else Array()
-                                                    ^
-one error found
diff --git a/test/pending/neg/t5353.scala b/test/pending/neg/t5353.scala
deleted file mode 100644
index 1ee869a..0000000
--- a/test/pending/neg/t5353.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
-  def f(x: Boolean) = if (x) Array("abc") else Array()
-}
diff --git a/test/files/neg/t5589neg.flags b/test/pending/neg/t5589neg.flags
similarity index 100%
rename from test/files/neg/t5589neg.flags
rename to test/pending/neg/t5589neg.flags
diff --git a/test/files/neg/t5589neg2.check b/test/pending/neg/t5589neg2.check
similarity index 100%
rename from test/files/neg/t5589neg2.check
rename to test/pending/neg/t5589neg2.check
diff --git a/test/pending/neg/t7886.scala b/test/pending/neg/t7886.scala
new file mode 100644
index 0000000..55d80a0
--- /dev/null
+++ b/test/pending/neg/t7886.scala
@@ -0,0 +1,22 @@
+trait Covariant[+A]
+trait Contra[-A] { def accept(p: A): Unit }
+trait Invariant[A] extends Covariant[A] with Contra[A]
+
+case class Unravel[A](m: Contra[A], msg: A)
+
+object Test extends Covariant[Any] {
+  def g(m: Contra[Any]): Unit = m accept 5
+  def f(x: Any): Unit = x match {
+    case Unravel(m, msg) => g(m)
+    case _               =>
+  }
+  def main(args: Array[String]) {
+    f(Unravel[String](new Contra[String] { def accept(x: String) = x.length }, ""))
+  }
+}
+// java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+//   at Test$$anon$1.accept(a.scala:18)
+//   at Test$.g(a.scala:13)
+//   at Test$.f(a.scala:15)
+//   at Test$.main(a.scala:18)
+//   at Test.main(a.scala)
diff --git a/test/pending/neg/t7886b.scala b/test/pending/neg/t7886b.scala
new file mode 100644
index 0000000..1db8be9
--- /dev/null
+++ b/test/pending/neg/t7886b.scala
@@ -0,0 +1,23 @@
+trait Covariant[+A]
+trait Contra[-A] { def accept(p: A): Unit }
+trait Invariant[A] extends Covariant[A] with Contra[A]
+
+trait T
+case class Unravel[A](m: Contra[A], msg: A) extends T
+
+object Test extends Covariant[Any] {
+  def g(m: Contra[Any]): Unit = m accept 5
+  def f(x: T): Unit = x match {
+    case Unravel(m, msg) => g(m)
+    case _               =>
+  }
+  def main(args: Array[String]) {
+    f(Unravel[String](new Contra[String] { def accept(x: String) = x.length }, ""))
+  }
+}
+// java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+//   at Test$$anon$1.accept(a.scala:18)
+//   at Test$.g(a.scala:13)
+//   at Test$.f(a.scala:15)
+//   at Test$.main(a.scala:18)
+//   at Test.main(a.scala)
diff --git a/test/files/neg/case-collision.flags b/test/pending/pos/no-widen-locals.flags
similarity index 100%
copy from test/files/neg/case-collision.flags
copy to test/pending/pos/no-widen-locals.flags
diff --git a/test/pending/pos/overloading-boundaries.scala b/test/pending/pos/overloading-boundaries.scala
new file mode 100644
index 0000000..d2e9fdb
--- /dev/null
+++ b/test/pending/pos/overloading-boundaries.scala
@@ -0,0 +1,37 @@
+package bar {
+  object bippy extends (Double => String) {
+    def apply(x: Double): String = "Double"
+  }
+}
+
+package object bar {
+  def bippy(x: Int, y: Int, z: Int) = "(Int, Int, Int)"
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    println(bar.bippy(5.5d))
+    println(bar.bippy(1, 2, 3))
+  }
+}
+
+/****
+
+% scalac3 a.scala
+a.scala:13: error: not enough arguments for method bippy: (x: Int, y: Int, z: Int)String.
+Unspecified value parameters y, z.
+    println(bar.bippy(5.5d))
+                     ^
+one error found
+
+# Comment out the call to bar.bippy(5.5d) - compiles
+% scalac3 a.scala
+
+# Compiles only from pure source though - if classes are present, fails.
+% scalac3 a.scala
+a.scala:2: error: bippy is already defined as method bippy in package object bar
+  object bippy extends (Double => String) {
+         ^
+one error found
+
+****/
diff --git a/test/pending/pos/pattern-typing.scala b/test/pending/pos/pattern-typing.scala
new file mode 100644
index 0000000..7286cc3
--- /dev/null
+++ b/test/pending/pos/pattern-typing.scala
@@ -0,0 +1,29 @@
+import scala.language.higherKinds
+
+trait Bound[B]
+
+package p1 {
+  case class Sub[B <: Bound[B]](p: B)
+  object Test {
+    def g[A](x: Bound[A]) = ()
+    def f(x: Any) = x match { case Sub(p) => g(p) }
+  }
+}
+
+package p2 {
+  trait Traversable[+A] { def head: A = ??? }
+  trait Seq[+A] extends Traversable[A] { def length: Int = ??? }
+
+  case class SubHK[B <: Bound[B], CC[X] <: Traversable[X]](xs: CC[B])
+  class MyBound extends Bound[MyBound]
+  class MySeq extends Seq[MyBound]
+
+  object Test {
+    def g[B](x: Bound[B]) = ()
+
+    def f1(x: Any) = x match { case SubHK(xs) => xs }
+    def f2[B <: Bound[B], CC[X] <: Traversable[X]](sub: SubHK[B, CC]): CC[B] = sub match { case SubHK(xs) => xs }
+    def f3 = g(f1(SubHK(new MySeq)).head)
+    def f4 = g(f2(SubHK(new MySeq)).head)
+  }
+}
diff --git a/test/pending/pos/t1751.cmds b/test/pending/pos/t1751.cmds
deleted file mode 100644
index d4a4898..0000000
--- a/test/pending/pos/t1751.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac SuiteClasses.java
-scalac A2_1.scala
-scalac A1_2.scala
diff --git a/test/pending/pos/t1782.cmds b/test/pending/pos/t1782.cmds
deleted file mode 100644
index 61f3d37..0000000
--- a/test/pending/pos/t1782.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Ann.java Days.java ImplementedBy.java
-scalac Test_1.scala
diff --git a/test/pending/pos/t1786.scala b/test/pending/pos/t1786.scala
index dca2eda..6299eb9 100644
--- a/test/pending/pos/t1786.scala
+++ b/test/pending/pos/t1786.scala
@@ -1,20 +1,27 @@
-/** This a consequence of the current type checking algorithm, where bounds
- *  are checked only after variables are instantiated. I believe this will change once we go to contraint-based type inference. Assigning low priority until then.
- * 
+/** This a consequence of the current type checking algorithm, where bounds are checked only after variables are instantiated.
+ * I believe this will change once we go to contraint-based type inference.
+ * Alternatively, we can pursue a more extensive fix to SI-6169
  *
- */
+ * The below code shows a compiler flaw in that the wildcard "_" as value for a bounded type parameter either
+ * breaks the boundary - as it result in Any - or doesn't evaluate to the boundary (as I'd hoped it to be).
+*/
+
 class SomeClass(val intValue:Int)
-class MyClass[T <: SomeClass](val myValue:T) 
+class MyClass[T <: SomeClass](val myValue:T)
+class Flooz[A >: Null <: SomeClass, T >: Null <: A](var value: T)
 
-object Test extends Application {
-  def myMethod(i:MyClass[_]) {
-    i.myValue.intValue/2      // << error i is of type Any
-  }
+class A {
+  def f1(i:MyClass[_])                       = i.myValue.intValue
+  def f2(i:MyClass[_ <: SomeClass])          = i.myValue.intValue
+  // def f3[T](i: MyClass[T])                   = i.myValue.intValue
+  def f4[T <: SomeClass](i: MyClass[T])      = i.myValue.intValue
+  // def f5[T >: Null](i: MyClass[T])           = i.myValue.intValue
+  // def f6[T >: Null <: String](i: MyClass[T]) = i.myValue.intValue + i.myValue.charAt(0)
 
-  def myMethod(i:MyClass[_ <: SomeClass]) {
-    i.myValue.intValue/2      // << works
-  }
+  // def g1[A, T](x: Flooz[A, T]) = { x.value = null ; x.value.intValue }
+  def g2(x: Flooz[_, _]) = { x.value = null ; x.value.intValue }
+
+  class MyClass2(x: MyClass[_]) { val p = x.myValue.intValue }
+  // class MyClass3[T <: String](x: MyClass[T]) { val p = x.myValue.intValue + x.myValue.length }
+  // class MyClass4[T >: Null](x: MyClass[T]) { val p = x.myValue.intValue }
 }
-/*
-The below code shows a compiler flaw in that the wildcard "_" as value for a bounded type parameter either breaks the boundry - as it result in Any - or doesnt (as id hoped it to be) evaluates to the boundy.
-*/
diff --git a/test/pending/pos/t1832.scala b/test/pending/pos/t1832.scala
deleted file mode 100644
index bca863f..0000000
--- a/test/pending/pos/t1832.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// Edit by paulp: reduced.
-trait Cloning {
-  trait Foo
-  def fn(g: Int => Unit): Foo
-
-  implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null }
-
-  val pool1 = 4 * fn { case i => i * 2 }
-  val pool2 = 4 * fn { case i: Int => i * 2 }
-}
diff --git a/test/pending/pos/t294.cmds b/test/pending/pos/t294.cmds
deleted file mode 100644
index 62c9a5a..0000000
--- a/test/pending/pos/t294.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Ann.java Ann2.java
-scalac Test_1.scala
-scalac Test_2.scala
diff --git a/test/pending/pos/t4612.scala b/test/pending/pos/t4612.scala
new file mode 100644
index 0000000..a93c12e
--- /dev/null
+++ b/test/pending/pos/t4612.scala
@@ -0,0 +1,15 @@
+class CyclicReferenceCompilerBug {
+  trait Trait[A] {
+    def foo: A
+  }
+
+  class Class extends Trait[Class] {
+    def foo = new Class
+
+    trait OtherTrait extends Trait[OtherTrait] {
+      self: Class =>
+
+      def foo = new Class
+    }
+  }
+}
diff --git a/test/pending/pos/t4695/T_1.scala b/test/pending/pos/t4695/T_1.scala
new file mode 100644
index 0000000..70fb1a7
--- /dev/null
+++ b/test/pending/pos/t4695/T_1.scala
@@ -0,0 +1,4 @@
+package foo
+
+class Bar { }
+package object Bar { }
diff --git a/test/pending/pos/t4695/T_2.scala b/test/pending/pos/t4695/T_2.scala
new file mode 100644
index 0000000..70fb1a7
--- /dev/null
+++ b/test/pending/pos/t4695/T_2.scala
@@ -0,0 +1,4 @@
+package foo
+
+class Bar { }
+package object Bar { }
diff --git a/test/pending/pos/t4717.scala b/test/pending/pos/t4717.scala
deleted file mode 100644
index 7eaa3dd..0000000
--- a/test/pending/pos/t4717.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-trait Bounds[@specialized A] {
-  // okay without `>: A`
-  def x[B >: A]: Unit = new Bounds[B] {
-    lazy val it = ???  // def or val okay
-    it
-  }
-}
\ No newline at end of file
diff --git a/test/pending/pos/t4859.scala b/test/pending/pos/t4859.scala
deleted file mode 100644
index ec5abd9..0000000
--- a/test/pending/pos/t4859.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object O {
-  C().CC()
-  D().DD()
-}
-
-case class C() {
-  case class CC()
-}
-
-case class D() {
-  class DD()
-  object DD {
-    def apply() = new DD()
-  }
-}
diff --git a/test/pending/pos/t5082.scala b/test/pending/pos/t5082.scala
new file mode 100644
index 0000000..20a6cfc
--- /dev/null
+++ b/test/pending/pos/t5082.scala
@@ -0,0 +1,8 @@
+object Test {
+  sealed trait A
+  case object A1 extends A
+}
+
+trait Something[T]
+
+case class Test() extends Something[Test.A]
diff --git a/test/pending/pos/t5259.scala b/test/pending/pos/t5259.scala
deleted file mode 100644
index 317e28a..0000000
--- a/test/pending/pos/t5259.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-object DefaultArgBogusTypeMismatch {
-
-  class A[T]
-  class B {
-    type T = this.type
-    def m(implicit a : A[T] = new A[T]) = a
-  }
-  
-  def newB = new B
-  val a1 = newB.m       // Bogus type mismatch
-
-  val stableB = new B
-  val a2 = stableB.m    // OK
-}
diff --git a/test/pending/pos/t5399.scala b/test/pending/pos/t5399.scala
deleted file mode 100644
index 89caba3..0000000
--- a/test/pending/pos/t5399.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class Test {
-  class A[T]
-  class B[T](val a: A[T])
-
-  case class CaseClass[T](x: T)
-
-  def break(existB: B[_]) = CaseClass(existB.a) match { case CaseClass(_) => }
-}
diff --git a/test/pending/pos/t5459.scala b/test/pending/pos/t5459.scala
new file mode 100644
index 0000000..971e6f8
--- /dev/null
+++ b/test/pending/pos/t5459.scala
@@ -0,0 +1,48 @@
+trait A1
+trait A2
+trait A3
+trait L1 extends A1 with A2 with A3
+
+object Test {
+  trait T1[-A <: A1]
+  trait T2[-A >: L1]
+  trait T3[ A <: A1]
+  trait T4[ A >: L1]
+  trait T5[+A <: A1]
+  trait T6[+A >: L1]
+
+  def f1(x: T1[_]) = x
+  def f2(x: T2[_]) = x
+  def f3(x: T3[_]) = x
+  def f4(x: T4[_]) = x
+  def f5(x: T5[_]) = x
+  def f6(x: T6[_]) = x
+  // a.scala:22: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
+  //   def f5(x: T5[_]) = x
+  //       ^
+
+  def g1(x: T1[_ <: A1]) = x
+  def g2(x: T2[_ >: L1]) = x
+  def g3(x: T3[_ <: A1]) = x
+  def g4(x: T4[_ >: L1]) = x
+  def g5(x: T5[_ <: A1]) = x
+  def g6(x: T6[_ >: L1]) = x
+
+  def q1(x: T1[_ >: L1]) = x
+  def q2(x: T2[_ <: A1]) = x
+  def q3(x: T3[_ >: L1]) = x
+  def q4(x: T4[_ <: A1]) = x
+  def q5(x: T5[_ >: L1]) = x
+  def q6(x: T6[_ <: A1]) = x
+  // a.scala:41: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
+  //   def q5(x: T5[_ >: L1]) = x
+  //       ^
+  // two errors found
+
+  def h1(x: T1[_ >: L1 <: A1]) = x
+  def h2(x: T2[_ >: L1 <: A1]) = x
+  def h3(x: T3[_ >: L1 <: A1]) = x
+  def h4(x: T4[_ >: L1 <: A1]) = x
+  def h5(x: T5[_ >: L1 <: A1]) = x
+  def h6(x: T6[_ >: L1 <: A1]) = x
+}
diff --git a/test/pending/pos/t5626.scala b/test/pending/pos/t5626.scala
deleted file mode 100644
index 7ab3881..0000000
--- a/test/pending/pos/t5626.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test {
-  val blob0 = new {
-    case class Foo(i : Int)
-  }
-  val foo0 = blob0.Foo(22)
-
-  val blob1 = new {
-    class Foo(i: Int)
-    object Foo { def apply(i: Int): Foo = new Foo(i) }
-  }
-  val foo1 = blob1.Foo(22)
-}
diff --git a/test/pending/pos/t5639/Foo.scala b/test/pending/pos/t5639/Foo.scala
deleted file mode 100644
index 6602150..0000000
--- a/test/pending/pos/t5639/Foo.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-class Baz 
-
-object Implicits  {
-  implicit def Baz(n: Int): Baz = new Baz
-}
diff --git a/test/pending/pos/t5654.scala b/test/pending/pos/t5654.scala
deleted file mode 100644
index eb711a5..0000000
--- a/test/pending/pos/t5654.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-case class Bomb(a: Array[_])
-case class Bomb2(a: Array[T] forSome { type T })
-class Okay1(a: Array[_])
-case class Okay2(s: Seq[_])
\ No newline at end of file
diff --git a/test/pending/pos/t5877.scala b/test/pending/pos/t5877.scala
new file mode 100644
index 0000000..b77605f
--- /dev/null
+++ b/test/pending/pos/t5877.scala
@@ -0,0 +1,5 @@
+package foo { }
+
+package object foo {
+  implicit class Foo(val s: String) { }
+}
diff --git a/test/pending/pos/t5954/T_1.scala b/test/pending/pos/t5954/T_1.scala
new file mode 100644
index 0000000..0064c59
--- /dev/null
+++ b/test/pending/pos/t5954/T_1.scala
@@ -0,0 +1,8 @@
+package p {
+  package base {
+    class X
+  }
+  package object base {
+    case class B()
+  }
+}
diff --git a/test/pending/pos/t5954/T_2.scala b/test/pending/pos/t5954/T_2.scala
new file mode 100644
index 0000000..0064c59
--- /dev/null
+++ b/test/pending/pos/t5954/T_2.scala
@@ -0,0 +1,8 @@
+package p {
+  package base {
+    class X
+  }
+  package object base {
+    case class B()
+  }
+}
diff --git a/test/pending/pos/t5954/T_3.scala b/test/pending/pos/t5954/T_3.scala
new file mode 100644
index 0000000..0064c59
--- /dev/null
+++ b/test/pending/pos/t5954/T_3.scala
@@ -0,0 +1,8 @@
+package p {
+  package base {
+    class X
+  }
+  package object base {
+    case class B()
+  }
+}
diff --git a/test/pending/pos/t6161.scala b/test/pending/pos/t6161.scala
new file mode 100644
index 0000000..5783cc8
--- /dev/null
+++ b/test/pending/pos/t6161.scala
@@ -0,0 +1,22 @@
+object t6161 {
+  trait N {
+    type Name
+  }
+
+  trait N1 extends N {
+    class Name {
+      type ThisNameType <: Name
+      def encode: ThisNameType = ???
+    }
+  }
+
+  trait S {
+    self: N => // change to N1 and it compiles
+    type NameType <: Name
+  }
+
+  object g extends S with N1
+
+  val n1: g.NameType = ???
+  val n2: g.Name = n1.encode
+}
diff --git a/test/pending/pos/t6225.scala b/test/pending/pos/t6225.scala
new file mode 100644
index 0000000..d7dff3c
--- /dev/null
+++ b/test/pending/pos/t6225.scala
@@ -0,0 +1,11 @@
+package library.x {
+  class X {
+    class Foo
+    implicit val foo = new Foo
+  }
+}
+package library { package object x extends X }
+package app {
+  import library.x._
+  object App { implicitly[Foo] }
+}
diff --git a/test/pending/pos/t8128b.scala b/test/pending/pos/t8128b.scala
new file mode 100644
index 0000000..dd44a25
--- /dev/null
+++ b/test/pending/pos/t8128b.scala
@@ -0,0 +1,18 @@
+class Optiony[X] { def isEmpty = true; def get: X = ??? }
+class Seqy[X] { def head: X = ???; def length = 0; def apply(i: Int): X = ??? }
+
+object G {
+  def unapply(m: Any): Optiony[_] = ???
+}
+
+object H {
+  def unapplySeq(m: Any): Optiony[Seqy[_]] = ???
+}
+
+object Test {
+  (0: Any) match {
+    case G(v) => v
+    case H(v) => v
+    case _ =>
+  }
+}
diff --git a/test/pending/pos/t8363b.scala b/test/pending/pos/t8363b.scala
new file mode 100644
index 0000000..393e2a0
--- /dev/null
+++ b/test/pending/pos/t8363b.scala
@@ -0,0 +1,7 @@
+class C(a: Any)
+class Test {  
+  def foo: Any = {
+    def form = 0
+    class C1 extends C({def x = form; ()})
+  }
+}
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
index 434e64c..78367cb 100644
--- a/test/pending/pos/those-kinds-are-high.scala
+++ b/test/pending/pos/those-kinds-are-high.scala
@@ -4,18 +4,18 @@ class A {
 
   class C1[T] extends Template[C1] with Container[T]
   class C2[T] extends Template[C2] with Container[T]
-  
+
   /** Target expression:
    *    List(new C1[String], new C2[String])
    */
-    
+
   // Here's what would ideally be inferred.
   //
   // scala> :type List[Template[Container] with Container[String]](new C1[String], new C2[String])
   // List[Template[Container] with Container[java.lang.String]]
   //
   // Here's what it does infer.
-  // 
+  //
   // scala> :type List(new C1[String], new C2[String])
   // <console>:8: error: type mismatch;
   //  found   : C1[String]
@@ -43,11 +43,54 @@ class A {
   //   def fFail = List(new C1[String], new C2[String])
   //                                    ^
   // two errors found
-  
+
   /** Working version explicitly typed.
    */
   def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
-  
+
   // nope
   def fFail = List(new C1[String], new C2[String])
 }
+
+
+trait Other {
+  trait GenBar[+A]
+  trait Bar[+A] extends GenBar[A]
+  trait Templ[+A, +CC[X] <: GenBar[X]]
+
+  abstract class CC1[+A] extends Templ[A, CC1] with Bar[A]
+  abstract class CC2[+A] extends Templ[A, CC2] with Bar[A]
+
+  // Compiles
+  class A1 {
+    abstract class BarFactory[CC[X] <: Bar[X]]
+
+    def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+  }
+
+  // Fails - only difference is CC covariant.
+  class A2 {
+    abstract class BarFactory[+CC[X] <: Bar[X]]
+
+    def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+    // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
+    // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
+    // <empty> has no type parameters, but type CC has one
+    //   def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+    //       ^
+    // one error found
+  }
+
+  // Compiles - CC contravariant.
+  class A3 {
+    abstract class BarFactory[-CC[X] <: Bar[X]] // with Templ[X, CC]]
+
+    def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+    // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
+    // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
+    // <empty> has no type parameters, but type CC has one
+    //   def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+    //       ^
+    // one error found
+  }
+}
diff --git a/test/pending/pos/treecheckers.flags b/test/pending/pos/treecheckers.flags
new file mode 100644
index 0000000..5319681
--- /dev/null
+++ b/test/pending/pos/treecheckers.flags
@@ -0,0 +1 @@
+-Ycheck:all
\ No newline at end of file
diff --git a/test/pending/pos/treecheckers/c1.scala b/test/pending/pos/treecheckers/c1.scala
new file mode 100644
index 0000000..b936839
--- /dev/null
+++ b/test/pending/pos/treecheckers/c1.scala
@@ -0,0 +1,12 @@
+object Test1 {
+  def f[T](xs: Array[T]): Array[T] = xs match { case xs => xs }
+  // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
+  // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
+
+  def g[T](xs: Array[T]): Array[T] = {
+    val x1: Array[T] = xs
+    def case4() = matchEnd3(x1)
+    def matchEnd3(x: Array[T]) = x
+    case4()
+  }
+}
diff --git a/test/pending/pos/treecheckers/c2.scala b/test/pending/pos/treecheckers/c2.scala
new file mode 100644
index 0000000..c893a5c
--- /dev/null
+++ b/test/pending/pos/treecheckers/c2.scala
@@ -0,0 +1 @@
+class Test2(val valueVal: Int) extends AnyVal
diff --git a/test/pending/pos/treecheckers/c3.scala b/test/pending/pos/treecheckers/c3.scala
new file mode 100644
index 0000000..e480bbf
--- /dev/null
+++ b/test/pending/pos/treecheckers/c3.scala
@@ -0,0 +1,8 @@
+import scala.collection.mutable.ArrayOps
+
+object Test3 {
+  implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
+    case x: Array[AnyRef]  => refArrayOps[AnyRef](x)
+    case x: Array[Boolean] => booleanArrayOps(x)
+  }).asInstanceOf[ArrayOps[T]]
+}
diff --git a/test/pending/pos/treecheckers/c4.scala b/test/pending/pos/treecheckers/c4.scala
new file mode 100644
index 0000000..2328131
--- /dev/null
+++ b/test/pending/pos/treecheckers/c4.scala
@@ -0,0 +1,9 @@
+sealed trait Message[+A]
+class Script[A] extends Message[A] {
+  def iterator: Iterator[Message[A]] = ???
+}
+
+trait Test4[A] {
+  def f(cmd: Message[A]): Iterator[A] = cmd match { case s: Script[t] => s.iterator flatMap f }
+  def g(cmd: Message[A]) = cmd match { case s: Script[t] => s }
+}
diff --git a/test/pending/pos/treecheckers/c5.scala b/test/pending/pos/treecheckers/c5.scala
new file mode 100644
index 0000000..43cbb65
--- /dev/null
+++ b/test/pending/pos/treecheckers/c5.scala
@@ -0,0 +1,3 @@
+trait Factory[CC[X] <: Traversable[X]]
+
+object Test5 extends Factory[Traversable]
diff --git a/test/pending/pos/treecheckers/c6.scala b/test/pending/pos/treecheckers/c6.scala
new file mode 100644
index 0000000..8283655
--- /dev/null
+++ b/test/pending/pos/treecheckers/c6.scala
@@ -0,0 +1,4 @@
+object Test6 {
+  import scala.reflect.ClassTag
+  def f[T: ClassTag] = implicitly[ClassTag[T]].runtimeClass match { case x => x }
+}
diff --git a/test/pending/pos/z1720.scala b/test/pending/pos/z1720.scala
deleted file mode 100644
index 6050f3f..0000000
--- a/test/pending/pos/z1720.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package test
-
-class Thing {
-  def info: Info[this.type] = InfoRepository.getInfo(this)
-  def info2: Info[this.type] = {
-    def self: this.type = this
-    InfoRepository.getInfo(self)
-  }
-}
-
-trait Info[T]
-case class InfoImpl[T](thing: T) extends Info[T]
-
-object InfoRepository {
-  def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
-}
\ No newline at end of file
diff --git a/test/pending/presentation/context-bounds1.check b/test/pending/presentation/context-bounds1.check
new file mode 100644
index 0000000..b444de5
--- /dev/null
+++ b/test/pending/presentation/context-bounds1.check
@@ -0,0 +1,51 @@
+reload: ContextBounds.scala
+
+askHyperlinkPos for `Blubb` at (2,23) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Foo` at (4,17) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Blubb` at (4,32) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (4,42) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (4,51) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `blubb` at (4,66) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `blubb` at (2,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Foo` at (5,18) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (5,25) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `foo` at (5,36) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (10,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (10,14) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (9,11) ContextBounds.scala
+================================================================================
diff --git a/test/pending/presentation/context-bounds1/Test.scala b/test/pending/presentation/context-bounds1/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/pending/presentation/context-bounds1/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/pending/presentation/context-bounds1/src/ContextBounds.scala b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
new file mode 100644
index 0000000..72a8f69
--- /dev/null
+++ b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
@@ -0,0 +1,13 @@
+object ContextBound {
+  val blubb = new Blubb/*#*/
+
+  def work[A: Foo/*#*/](f: Blubb/*#*/ => A/*#*/): A/*#*/ = f(blubb/*#*/) ensuring {
+    implicitly[Foo/*#*/[A/*#*/]].foo/*#*/(_) >= 42
+  }
+}
+
+trait Foo[A] {
+  def foo(a: A/*#*/): Int
+}
+
+class Blubb
\ No newline at end of file
diff --git a/test/pending/reify_typeof.check b/test/pending/reify_typeof.check
new file mode 100644
index 0000000..670f76f
--- /dev/null
+++ b/test/pending/reify_typeof.check
@@ -0,0 +1,10 @@
+Expr[Unit]({
+  val ru = `package`.universe;
+  val tpe1: ru.Type = ru.typeOf[`package`.List[Int]];
+  Predef.println(tpe1);
+  val tpe2: ru.Type = ru.typeOf(List.apply(1, 2, 3));
+  Predef.println(tpe2)
+})
+scala.List[Int]
+List[Int]
+()
diff --git a/test/pending/reify_typeof.scala b/test/pending/reify_typeof.scala
new file mode 100644
index 0000000..985c57b
--- /dev/null
+++ b/test/pending/reify_typeof.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+  val reified = reify {
+    val ru = scala.reflect.runtime.universe
+    val tpe1: ru.Type = ru.typeOf[List[Int]]
+    println(tpe1)
+    val tpe2: ru.Type = ru.typeOf(List(1, 2, 3))
+    println(tpe2)
+  }
+  println(reified)
+  println(reified.eval)
+}
\ No newline at end of file
diff --git a/test/pending/run/hk-lub-fail.scala b/test/pending/run/hk-lub-fail.scala
index b58a86e..0ac4fdd 100644
--- a/test/pending/run/hk-lub-fail.scala
+++ b/test/pending/run/hk-lub-fail.scala
@@ -25,12 +25,12 @@ object Test {
 
   // A repl session to get you started.
 /*
-  val quux1 = EmptyPackageClass.tpe.member(newTermName("Quux1"))
-  val quux2 = EmptyPackageClass.tpe.member(newTermName("Quux2"))
+  val quux1 = EmptyPackageClass.tpe.member(TermName("Quux1"))
+  val quux2 = EmptyPackageClass.tpe.member(TermName("Quux2"))
   val tps   = List(quux1, quux2) map (_.tpe)
-  val test  = EmptyPackageClass.tpe.member(newTermName("Test"))
-  val f     = test.tpe.member(newTypeName("F")).tpe
-  
+  val test  = EmptyPackageClass.tpe.member(TermName("Test"))
+  val f     = test.tpe.member(TypeName("F")).tpe
+
   val fn    = f.normalize.asInstanceOf[ExistentialType]
   val fn2   = fn.underlying.asInstanceOf[TypeRef]
 */
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
index bc0ca70..b26c442 100644
--- a/test/pending/run/idempotency-partial-functions.scala
+++ b/test/pending/run/idempotency-partial-functions.scala
@@ -20,9 +20,9 @@ object Test extends App {
     case e: ToolBoxError => println(e)
   }
   val tb = cm.mkToolBox()
-  val tpartials = tb.typeCheck(partials.tree)
+  val tpartials = tb.typecheck(partials.tree)
   println(tpartials)
-  val rtpartials = tb.resetAllAttrs(tpartials)
+  val rtpartials = tb.untypecheck(tpartials)
   println(tb.eval(rtpartials))
 }
 Test.main(null)
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-default/Impls_1.scala b/test/pending/run/macro-expand-default/Impls_1.scala
index 7cf8d59..fd5d8d7 100644
--- a/test/pending/run/macro-expand-default/Impls_1.scala
+++ b/test/pending/run/macro-expand-default/Impls_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = {
     import c.universe._
-    val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+    val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
     Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
index 15bcb58..e8170fd 100644
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
 
 object Impls {
   def foo[T: c.WeakTypeTag](c: Context): c.Expr[List[T]] = c.universe.reify {
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
index be00fd0..34182b7 100644
--- a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
+++ b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[U](c: Ctx)(x: c.Expr[U])(evidence: c.Expr[Numeric[U]]) = {
+  def foo[U](c: Context)(x: c.Expr[U])(evidence: c.Expr[Numeric[U]]) = {
     import c.universe._
-    val plusOne = Apply(Select(evidence.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusOne))
+    val plusOne = Apply(Select(evidence.tree, TermName("plus")), List(x.tree, Literal(Constant(1))))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(plusOne))
     Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Impls_1.scala b/test/pending/run/macro-expand-named/Impls_1.scala
index 7cf8d59..fd5d8d7 100644
--- a/test/pending/run/macro-expand-named/Impls_1.scala
+++ b/test/pending/run/macro-expand-named/Impls_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
+  def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = {
     import c.universe._
-    val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
-    val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+    val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+    val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
     Expr[Unit](body)
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
index 26de70c..683622b 100644
--- a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
@@ -1,12 +1,12 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+  def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
     import c.universe._
     Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
       Literal(Constant(())))
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
index 26de70c..683622b 100644
--- a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
@@ -1,12 +1,12 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Impls {
-  def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+  def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
     import c.universe._
     Block(List(
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
-      Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+      Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
       Literal(Constant(())))
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
index daedde4..741a921 100644
--- a/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
+++ b/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
@@ -1,5 +1,5 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 trait Impls {
-  def impl(c: Ctx)(x: c.Expr[Any]) = x
+  def impl(c: Context)(x: c.Expr[Any]) = x
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
index 27140a7..2735321 100644
--- a/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
+++ b/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros extends Impls {
   def foo(x: Any) = macro impl
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
index 246fc9f..b023d31 100644
--- a/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
+++ b/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
@@ -1,7 +1,7 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 trait ImplContainer {
   object Impls {
-    def foo(c: Ctx)(x: c.Expr[Any]) = x
+    def foo(c: Context)(x: c.Expr[Any]) = x
   }
 }
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
index da9445a..639d93f 100644
--- a/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
+++ b/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
@@ -1,4 +1,4 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros extends ImplContainer {
   def foo(x: Any) = macro Impls.foo
diff --git a/test/pending/run/macro-reify-array/Macros_1.scala b/test/pending/run/macro-reify-array/Macros_1.scala
index f970be5..eea0133 100644
--- a/test/pending/run/macro-reify-array/Macros_1.scala
+++ b/test/pending/run/macro-reify-array/Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[T](s: String) = macro Impls.foo[T]
 
   object Impls {
-    def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+    def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify {
       Array(s.splice)
     }
   }
diff --git a/test/pending/run/macro-reify-tagful-b/Macros_1.scala b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
index 59dbe71..f4d8062 100644
--- a/test/pending/run/macro-reify-tagful-b/Macros_1.scala
+++ b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[T](s: T) = macro Impls.foo[List[T]]
 
   object Impls {
-    def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+    def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify {
       List(s.splice)
     }
   }
diff --git a/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala b/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
index a581c47..1307052 100644
--- a/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
+++ b/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
@@ -1,10 +1,10 @@
-import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.blackbox.Context
 
 object Macros {
   def foo[T](s: T) = macro Impls.foo[List[T]]
 
   object Impls {
-    def foo[T](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+    def foo[T](c: Context)(s: c.Expr[T]) = c.universe.reify {
       List(s.splice)
     }
   }
diff --git a/test/pending/run/macro-reify-tagless-b/Test_2.scala b/test/pending/run/macro-reify-tagless-b/Test_2.scala
index 10487b1..09ca6ba 100644
--- a/test/pending/run/macro-reify-tagless-b/Test_2.scala
+++ b/test/pending/run/macro-reify-tagless-b/Test_2.scala
@@ -6,8 +6,8 @@ object Test extends App {
   import scala.reflect.runtime.{currentMirror => cm}
   import scala.tools.reflect.ToolBox
   val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
-  val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
-  val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
-  val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+  val rhs = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant("hello world"))))
+  val list = ValDef(NoMods, TermName("list"), tpt, rhs)
+  val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Ident(list.name))))
   println(cm.mkToolBox().eval(tree))
 }
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
new file mode 100644
index 0000000..c43f5f3
--- /dev/null
+++ b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.blackbox.Context
+
+object Impls {
+  def foo(c: Context) = {
+    import c.{prefix => prefix}
+    import c.universe._
+    val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+    val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+    c.Expr[Unit](body)
+  }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
new file mode 100644
index 0000000..dd2317b
--- /dev/null
+++ b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+import language.experimental.macros
+
+object Test extends App {
+  val macros = new { def foo = macro Impls.foo }
+  macros.foo
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_callccinterpreter.scala b/test/pending/run/reify_callccinterpreter.scala
index d9f7736..82c70da 100644
--- a/test/pending/run/reify_callccinterpreter.scala
+++ b/test/pending/run/reify_callccinterpreter.scala
@@ -43,15 +43,15 @@ object Test extends App {
       override def toString() = "<function>"
     }
 
-    type Environment = List[Pair[Name, Value]];
+    type Environment = List[Tuple2[Name, Value]];
 
     def lookup(x: Name, e: Environment): M[Value] = e match {
       case List() => unitM(Wrong)
-      case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+      case (y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
     }
 
-    def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
-      case Pair(Num(m), Num(n)) => unitM(Num(m + n))
+    def add(a: Value, b: Value): M[Value] = (a, b) match {
+      case (Num(m), Num(n)) => unitM(Num(m + n))
       case _ => unitM(Wrong)
     }
 
@@ -67,12 +67,12 @@ object Test extends App {
            b <- interp(r, e);
            c <- add(a, b))
                         yield c
-      case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
+      case Lam(x, t) => unitM(Fun(a => interp(t, (x, a) :: e)))
       case App(f, t) => for (a <- interp(f, e);
            b <- interp(t, e);
            c <- apply(a, b))
             yield c
-      case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
+      case Ccc(x, t) => callCC(k => interp(t, (x, Fun(k)) :: e))
     }
 
     def test(t: Term): String = showM(interp(t, List()))
diff --git a/test/pending/run/reify_implicits-new.scala b/test/pending/run/reify_implicits-new.scala
deleted file mode 100644
index 42a1dee..0000000
--- a/test/pending/run/reify_implicits-new.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.reflect.{ClassTag, classTag}
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
-  reify {
-    implicit def arrayWrapper[A : ClassTag](x: Array[A]) =
-      new {
-        def sort(p: (A, A) => Boolean) = {
-          util.Sorting.stableSort(x, p); x
-        }
-      }
-    val x = Array(2, 3, 1, 4)
-    println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
-  }.eval
-}
\ No newline at end of file
diff --git a/test/pending/run/reify_implicits-old.scala b/test/pending/run/reify_implicits-old.scala
deleted file mode 100644
index 8ff256d..0000000
--- a/test/pending/run/reify_implicits-old.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
-  reify {
-    implicit def arrayWrapper[A : ClassManifest](x: Array[A]) =
-      new {
-        def sort(p: (A, A) => Boolean) = {
-          util.Sorting.stableSort(x, p); x
-        }
-      }
-    val x = Array(2, 3, 1, 4)
-    println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
-  }.eval
-}
\ No newline at end of file
diff --git a/test/pending/run/reify_simpleinterpreter.scala b/test/pending/run/reify_simpleinterpreter.scala
index 6cf87ea..1f6d6c8 100644
--- a/test/pending/run/reify_simpleinterpreter.scala
+++ b/test/pending/run/reify_simpleinterpreter.scala
@@ -32,15 +32,15 @@ object Test extends App {
       override def toString() = "<function>"
     }
 
-    type Environment = List[Pair[Name, Value]]
+    type Environment = List[Tuple2[Name, Value]]
 
     def lookup(x: Name, e: Environment): M[Value] = e match {
       case List() => unitM(Wrong)
-      case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+      case (y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
     }
 
-    def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
-      case Pair(Num(m), Num(n)) => unitM(Num(m + n))
+    def add(a: Value, b: Value): M[Value] = (a, b) match {
+      case (Num(m), Num(n)) => unitM(Num(m + n))
       case _ => unitM(Wrong)
     }
 
@@ -56,7 +56,7 @@ object Test extends App {
            b <- interp(r, e);
            c <- add(a, b))
                         yield c
-      case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
+      case Lam(x, t) => unitM(Fun(a => interp(t, (x, a) :: e)))
       case App(f, t) => for (a <- interp(f, e);
            b <- interp(t, e);
            c <- apply(a, b))
diff --git a/test/pending/run/t2318.scala b/test/pending/run/t2318.scala
deleted file mode 100644
index e42cbb9..0000000
--- a/test/pending/run/t2318.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-import java.security._
-
-object Test {
-  trait Bar { def bar: Unit }
-  
-  object Mgr extends SecurityManager {
-    override def checkPermission(perm: Permission) = perm match {
-      case _: java.lang.RuntimePermission                                                   => ()
-      case _: java.io.FilePermission                                                        => ()
-      case x: java.security.AccessControlException if x.getName contains ".networkaddress." => () // generality ftw
-      case _                                                                                => super.checkPermission(perm)
-    }
-  }
-  
-  def t1() = {
-    val p = Runtime.getRuntime().exec("ls");
-    type Destroyable = { def destroy() : Unit }
-    def doDestroy( obj : Destroyable ) : Unit = obj.destroy(); 
-    doDestroy( p );
-  }
-  def t2() = {
-    System.setSecurityManager(Mgr)
-
-    val b = new Bar { def bar = println("bar") }
-    b.bar
-
-    val structural = b.asInstanceOf[{ def bar: Unit }]
-    structural.bar
-  }
-  
-  def main(args: Array[String]) {
-    // figuring this will otherwise break on windows
-    try t1()
-    catch { case _: java.io.IOException => () }
-    
-    t2()
-  }
-}
diff --git a/test/pending/run/t3897/J_2.java b/test/pending/run/t3897/J_2.java
deleted file mode 100644
index 178412d..0000000
--- a/test/pending/run/t3897/J_2.java
+++ /dev/null
@@ -1,27 +0,0 @@
-import java.lang.reflect.*;
-
-public class J_2 {
-  public void f1(Class<?> clazz) {
-    Field[] fields = clazz.getDeclaredFields();
-    for (int i = 0 ; i < fields.length; i++) {
-      String name = fields[i].getName();
-      if (name.length() >= 7 && name.substring(0, 7).equals("bitmap$")) { }
-      else System.out.println("(" + name + "," + fields[i].getGenericType() + ")");
-    }    
-  }
-  public void f2(Class<?> clazz) {
-    Method[] methods = clazz.getDeclaredMethods();
-    for (int i = 0 ; i < methods.length; i++) {
-      String name = methods[i].getName();
-      if (name.length() >= 7 && name.substring(0, 7).equals("bitmap$")) { }
-      else System.out.println("(" + name + "," + methods[i].getGenericReturnType() + ")");
-    }    
-  }
-
-  public void javaRun() {
-    f1(One.class);
-    f2(One.class);
-    f1(Two.class);
-    f2(Two.class);
-  }
-}
\ No newline at end of file
diff --git a/test/pending/run/t3897/a_2.scala b/test/pending/run/t3897/a_2.scala
deleted file mode 100644
index 4d9e59e..0000000
--- a/test/pending/run/t3897/a_2.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-object Test {
-  def f1(clazz: Class[_]) = (
-    clazz.getDeclaredFields.toList 
-     . filterNot (_.getName contains "bitmap$")
-     . map (f => (f.getName, f.getGenericType))
-     . foreach (println)
-  )
-  def f2(clazz: Class[_]) = (
-    clazz.getDeclaredMethods.toList 
-     . filterNot (_.getName contains "bitmap$")
-     . map (f => (f.getName, f.getGenericReturnType))
-     . foreach (println)
-  )
-
-  def main(args: Array[String]): Unit = {
-    f1(classOf[One])
-    f2(classOf[One])
-    f1(classOf[Two])
-    f2(classOf[Two])
-    
-    new J_2().javaRun
-  }
-}
diff --git a/test/pending/run/t4728.check b/test/pending/run/t4728.check
deleted file mode 100644
index 7a754f4..0000000
--- a/test/pending/run/t4728.check
+++ /dev/null
@@ -1,2 +0,0 @@
-1
-2
\ No newline at end of file
diff --git a/test/pending/run/t5293-map.scala b/test/pending/run/t5293-map.scala
deleted file mode 100644
index 2707aed..0000000
--- a/test/pending/run/t5293-map.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-import scala.collection.JavaConverters._
-
-
-
-object Test extends App {
-  
-  def bench(label: String)(body: => Unit): Long = {
-    val start = System.nanoTime
-
-    0.until(10).foreach(_ => body)
-
-    val end = System.nanoTime
-
-    //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
-    
-    end - start
-  }
-  
-  def benchJava(values: java.util.Map[Int, Int]) = {
-    bench("Java Map") {
-      val m = new java.util.HashMap[Int, Int]
-      
-      m.putAll(values)
-    }
-  }
-
-  def benchScala(values: Iterable[(Int, Int)]) = {
-    bench("Scala Map") {
-      val m = new scala.collection.mutable.HashMap[Int, Int]
-      
-      m ++= values
-    }
-  }
-  
-  def benchScalaSorted(values: Iterable[(Int, Int)]) = {
-    bench("Scala Map sorted") {
-      val m = new scala.collection.mutable.HashMap[Int, Int]
-      
-      m ++= values.toArray.sorted
-    }
-  }
-  
-  def benchScalaPar(values: Iterable[(Int, Int)]) = {
-    bench("Scala ParMap") {
-      val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x }
-      
-      m ++= values
-    }
-  }
-  
-  val total = 50000
-  val values = (0 until total) zip (0 until total)
-  val map = scala.collection.mutable.HashMap.empty[Int, Int]
-  
-  map ++= values
-  
-  // warmup
-  for (x <- 0 until 5) {
-    benchJava(map.asJava)
-    benchScala(map)
-    benchScalaPar(map)
-    benchJava(map.asJava)
-    benchScala(map)
-    benchScalaPar(map)
-  }
-  
-  val javamap = benchJava(map.asJava)
-  val scalamap = benchScala(map)
-  val scalaparmap = benchScalaPar(map)
-  
-  // println(javamap)
-  // println(scalamap)
-  // println(scalaparmap)
-  
-  assert(scalamap < (javamap * 10), "scalamap: " + scalamap + " vs. javamap: " + javamap)
-  assert(scalaparmap < (javamap * 10), "scalaparmap: " + scalaparmap + " vs. javamap: " + javamap)
-}
-
-
-
-
-
-
-
-
diff --git a/test/pending/run/t5293.scala b/test/pending/run/t5293.scala
deleted file mode 100644
index 01ead45..0000000
--- a/test/pending/run/t5293.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-
-import scala.collection.JavaConverters._
-
-
-
-object Test extends App {
-  
-  def bench(label: String)(body: => Unit): Long = {
-    val start = System.nanoTime
-
-    0.until(10).foreach(_ => body)
-
-    val end = System.nanoTime
-
-    //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
-    
-    end - start
-  }
-  
-  def benchJava(values: java.util.Collection[Int]) = {
-    bench("Java Set") {
-      val set = new java.util.HashSet[Int]
-      
-      set.addAll(values)
-    }
-  }
-
-  def benchScala(values: Iterable[Int]) = {
-    bench("Scala Set") {
-      val set = new scala.collection.mutable.HashSet[Int]
-      
-      set ++= values
-    }
-  }
-  
-  def benchScalaSorted(values: Iterable[Int]) = {
-    bench("Scala Set sorted") {
-      val set = new scala.collection.mutable.HashSet[Int]
-      
-      set ++= values.toArray.sorted
-    }
-  }
-  
-  def benchScalaPar(values: Iterable[Int]) = {
-    bench("Scala ParSet") {
-      val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x }
-      
-      set ++= values
-    }
-  }
-  
-  val values = 0 until 50000
-  val set = scala.collection.mutable.HashSet.empty[Int]
-  
-  set ++= values
-  
-  // warmup
-  for (x <- 0 until 5) {
-    benchJava(set.asJava)
-    benchScala(set)
-    benchScalaPar(set)
-    benchJava(set.asJava)
-    benchScala(set)
-    benchScalaPar(set)
-  }
-  
-  val javaset = benchJava(set.asJava)
-  val scalaset = benchScala(set)
-  val scalaparset = benchScalaPar(set)
-  
-  assert(scalaset < (javaset * 8), "scalaset: " + scalaset + " vs. javaset: " + javaset)
-  assert(scalaparset < (javaset * 8), "scalaparset: " + scalaparset + " vs. javaset: " + javaset)
-}
-
-
-
-
-
-
-
-
diff --git a/test/pending/run/t5418.check b/test/pending/run/t5418.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/pending/run/t5427a.scala b/test/pending/run/t5427a.scala
index f7cd051..a7d2092 100644
--- a/test/pending/run/t5427a.scala
+++ b/test/pending/run/t5427a.scala
@@ -4,7 +4,7 @@ object Foo { val bar = 2 }
 
 object Test extends App {
   val tpe = getType(Foo)
-  val bar = tpe.nonPrivateMember(newTermName("bar"))
+  val bar = tpe.nonPrivateMember(TermName("bar"))
   val value = getValue(Foo, bar)
   println(value)
 }
\ No newline at end of file
diff --git a/test/pending/run/t5427b.scala b/test/pending/run/t5427b.scala
index e80bd12..af1ae6e 100644
--- a/test/pending/run/t5427b.scala
+++ b/test/pending/run/t5427b.scala
@@ -5,7 +5,7 @@ class Foo { val bar = 2 }
 object Test extends App {
   val foo = new Foo
   val tpe = getType(foo)
-  val bar = tpe.nonPrivateMember(newTermName("bar"))
+  val bar = tpe.nonPrivateMember(TermName("bar"))
   val value = getValue(foo, bar)
   println(value)
 }
\ No newline at end of file
diff --git a/test/pending/run/t5427c.scala b/test/pending/run/t5427c.scala
index 7095158..ba71803 100644
--- a/test/pending/run/t5427c.scala
+++ b/test/pending/run/t5427c.scala
@@ -5,7 +5,7 @@ class Foo(bar: Int)
 object Test extends App {
   val foo = new Foo(2)
   val tpe = getType(foo)
-  val bar = tpe.nonPrivateMember(newTermName("bar"))
+  val bar = tpe.nonPrivateMember(TermName("bar"))
   bar match {
     case NoSymbol => println("no public member")
     case _ => println("i'm screwed")
diff --git a/test/pending/run/t5427d.scala b/test/pending/run/t5427d.scala
index f0cc07d..1d37dbd 100644
--- a/test/pending/run/t5427d.scala
+++ b/test/pending/run/t5427d.scala
@@ -5,7 +5,7 @@ class Foo(val bar: Int)
 object Test extends App {
   val foo = new Foo(2)
   val tpe = getType(foo)
-  val bar = tpe.nonPrivateMember(newTermName("bar"))
+  val bar = tpe.nonPrivateMember(TermName("bar"))
   val value = getValue(foo, bar)
   println(value)
 }
\ No newline at end of file
diff --git a/test/pending/run/t5866b.scala b/test/pending/run/t5866b.scala
new file mode 100644
index 0000000..44d8b11
--- /dev/null
+++ b/test/pending/run/t5866b.scala
@@ -0,0 +1,17 @@
+class Foo(val d: Double) extends AnyVal {
+  override def toString = s"Foo($d)"
+}
+
+class Bar(val d: String) extends AnyVal {
+  override def toString = s"Foo($d)"
+}
+
+object Test {
+  def main(args: Array[String]): Unit = {
+    val f: Foo = {val n: Any = null; n.asInstanceOf[Foo]}
+    println(f)
+
+    val b: Bar = {val n: Any = null; n.asInstanceOf[Bar]}
+    println(b)
+  }
+}
diff --git a/test/pending/run/t5943b1.scala b/test/pending/run/t5943b1.scala
index 0d54718..79c638f 100644
--- a/test/pending/run/t5943b1.scala
+++ b/test/pending/run/t5943b1.scala
@@ -6,5 +6,5 @@ import scala.tools.reflect.ToolBox
 object Test extends App {
   val tb = cm.mkToolBox()
   val expr = tb.parse("math.sqrt(4.0)")
-  println(tb.typeCheck(expr))
+  println(tb.typecheck(expr))
 }
\ No newline at end of file
diff --git a/test/files/run/t7733.check b/test/pending/run/t7733.check
similarity index 100%
rename from test/files/run/t7733.check
rename to test/pending/run/t7733.check
diff --git a/test/files/run/t7733/Separate_1.scala b/test/pending/run/t7733/Separate_1.scala
similarity index 100%
rename from test/files/run/t7733/Separate_1.scala
rename to test/pending/run/t7733/Separate_1.scala
diff --git a/test/files/run/t7733/Test_2.scala b/test/pending/run/t7733/Test_2.scala
similarity index 100%
rename from test/files/run/t7733/Test_2.scala
rename to test/pending/run/t7733/Test_2.scala
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.check b/test/pending/run/virtpatmat_anonfun_underscore.check
deleted file mode 100644
index e69de29..0000000
diff --git a/test/pending/shootout/fasta.scala b/test/pending/shootout/fasta.scala
index 8b71108..ae99ba5 100644
--- a/test/pending/shootout/fasta.scala
+++ b/test/pending/shootout/fasta.scala
@@ -5,7 +5,7 @@
 
 import java.io._
 
-object fasta { 
+object fasta {
    def main(args: Array[String]) = {
 
       val ALU =
@@ -18,31 +18,31 @@ object fasta {
          "AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA"
 
       val _IUB = Array(
-         Pair('a', 0.27), 
-         Pair('c', 0.12), 
-         Pair('g', 0.12), 
-         Pair('t', 0.27), 
-
-         Pair('B', 0.02), 
-         Pair('D', 0.02),
-         Pair('H', 0.02), 
-         Pair('K', 0.02), 
-         Pair('M', 0.02),
-         Pair('N', 0.02), 
-         Pair('R', 0.02), 
-         Pair('S', 0.02),
-         Pair('V', 0.02), 
-         Pair('W', 0.02), 
-         Pair('Y', 0.02)
+         ('a', 0.27),
+         ('c', 0.12),
+         ('g', 0.12),
+         ('t', 0.27),
+
+         ('B', 0.02),
+         ('D', 0.02),
+         ('H', 0.02),
+         ('K', 0.02),
+         ('M', 0.02),
+         ('N', 0.02),
+         ('R', 0.02),
+         ('S', 0.02),
+         ('V', 0.02),
+         ('W', 0.02),
+         ('Y', 0.02)
       )
 
       val IUB = makeCumulative(_IUB)
 
       val _HomoSapiens = Array(
-         Pair('a', 0.3029549426680), 
-         Pair('c', 0.1979883004921),
-         Pair('g', 0.1975473066391), 
-         Pair('t', 0.3015094502008)
+         ('a', 0.3029549426680),
+         ('c', 0.1979883004921),
+         ('g', 0.1975473066391),
+         ('t', 0.3015094502008)
       )
 
       val HomoSapiens = makeCumulative(_HomoSapiens)
@@ -61,15 +61,15 @@ object fasta {
       s.writeRandomSequence(HomoSapiens,n*5)
 
       s.close
-   } 
+   }
 
-   def makeCumulative(a: Array[Pair[Char,Double]]) = {
+   def makeCumulative(a: Array[Tuple2[Char,Double]]) = {
       var cp = 0.0
       a map (frequency =>
-         frequency match { 
-            case Pair(code,percent) => 
-               cp = cp + percent; new Frequency(code.toByte,cp) 
-         } 
+         frequency match {
+            case (code,percent) =>
+               cp = cp + percent; new Frequency(code.toByte,cp)
+         }
       )
    }
 
@@ -79,7 +79,7 @@ object fasta {
 // We could use instances of Pair or Tuple2 but specific labels
 // make the code more readable than index numbers
 
-class Frequency(_code: Byte, _percent: Double){ 
+class Frequency(_code: Byte, _percent: Double){
    var code = _code; var percent = _percent;
 }
 
@@ -101,13 +101,13 @@ class FastaOutputStream(out: OutputStream) extends BufferedOutputStream(out) {
          val m = if (n < LineLength) n else LineLength
 
          var i = 0
-         while (i < m){ 
+         while (i < m){
             if (k == kn) k = 0
             val b = alu(k)
             if (count < buf.length){ buf(count) = b; count = count + 1 }
             else { write(b) } // flush buffer
             k = k+1
-            i = i+1 
+            i = i+1
          }
 
          write(nl)
@@ -122,11 +122,11 @@ class FastaOutputStream(out: OutputStream) extends BufferedOutputStream(out) {
          val m = if (n < LineLength) n else LineLength
 
          var i = 0
-         while (i < m){ 
+         while (i < m){
             val b = selectRandom(distribution)
             if (count < buf.length){ buf(count) = b; count = count + 1 }
             else { write(b) } // flush buffer
-            i = i+1 
+            i = i+1
          }
 
          if (count < buf.length){ buf(count) = nl; count = count + 1 }
diff --git a/test/pending/shootout/revcomp.scala-2.scala b/test/pending/shootout/revcomp.scala-2.scala
index 92260ad..03fb25a 100644
--- a/test/pending/shootout/revcomp.scala-2.scala
+++ b/test/pending/shootout/revcomp.scala-2.scala
@@ -6,7 +6,7 @@
 import java.io._
 import scala.collection.mutable.Stack
 
-object revcomp { 
+object revcomp {
 
    val IUB = IUBCodeComplements
 
@@ -16,7 +16,7 @@ object revcomp {
       val a: Array[Byte] = new Array( 'z'.toByte )
 
       for (indexValue <- code zip comp)
-         indexValue match { case Pair(i,v) => a(i) = v }
+         indexValue match { case (i,v) => a(i) = v }
 
       a
    }
@@ -49,18 +49,18 @@ object revcomp {
 
       if (desc.length > 0) complementReverseWrite(desc, lines, w)
       w.close
-   } 
+   }
 
 
-   def complementReverseWrite(desc: String, lines: LineStack, 
+   def complementReverseWrite(desc: String, lines: LineStack,
          w: BufferedOutputStream) = {
 
       def inplaceComplementReverse(b: Array[Byte]) = {
-         var i = 0 
+         var i = 0
          var j = b.length - 1
          while (i < j){
-            val swap = b(i) 
-            b(i) = IUB( b(j) ) 
+            val swap = b(i)
+            b(i) = IUB( b(j) )
             b(j) = IUB( swap )
             i = i + 1
             j = j - 1
@@ -79,11 +79,11 @@ object revcomp {
       while (!lines.isEmpty) {
         val line = lines.pop
         inplaceComplementReverse(line)
-        
+
         if (isSplitLine){
            if (isFirstLine){ w.write(line); isFirstLine = false }
            else { w.write(line,0,n-k); w.write(nl); w.write(line,n-k,k) }
-        } 
+        }
         else { w.write(line); w.write(nl) }
       }
       if (isSplitLine && !isFirstLine) w.write(nl)
diff --git a/test/pending/shootout/revcomp.scala-3.scala b/test/pending/shootout/revcomp.scala-3.scala
index ae12f04..39a0409 100644
--- a/test/pending/shootout/revcomp.scala-3.scala
+++ b/test/pending/shootout/revcomp.scala-3.scala
@@ -6,7 +6,7 @@
 import java.io._
 import scala.collection.mutable.Stack
 
-object revcomp { 
+object revcomp {
    def main(args: Array[String]) = {
       val out = new FastaOutputStream(System.out)
       val in = new FastaInputStream(System.in)
@@ -17,12 +17,12 @@ object revcomp {
 
       in.close
       out.close
-   } 
+   }
 }
 
 
 trait FastaByteStream {
-   val nl = '\n'.toByte  
+   val nl = '\n'.toByte
 
    type Line = Array[Byte]
    type LineStack = Stack[Line]
@@ -31,13 +31,13 @@ trait FastaByteStream {
 
 // extend the Java BufferedInputStream class
 
-final class FastaInputStream(in: InputStream) 
+final class FastaInputStream(in: InputStream)
       extends BufferedInputStream(in) with FastaByteStream {
 
    val gt = '>'.toByte
    val sc = ';'.toByte
 
-   def readSequenceStack(): Pair[Line,LineStack] = {
+   def readSequenceStack(): Tuple2[Line,LineStack] = {
       var header: Line = null
       val lines: LineStack = new Stack
 
@@ -49,14 +49,14 @@ final class FastaInputStream(in: InputStream)
                header = line
             } else {
                pos = pos - line.length - 1   // reposition to start of line
-               return Pair(header,lines)
+               return (header,lines)
             }
          } else {
             if (c != sc) lines push line       // ';'
          }
          line = readLine()
       }
-      return Pair(header,lines)
+      return (header,lines)
    }
 
    def readLine() = {
@@ -65,7 +65,7 @@ final class FastaInputStream(in: InputStream)
       else {
          mark(128)                      // mark the start of the line
          if (count == 0) read()         // fill buffer
-        
+
          var i = markpos
          while (i < count && buf(i) != nl) i = i + 1
 
@@ -74,11 +74,11 @@ final class FastaInputStream(in: InputStream)
             while (i < count && buf(i) != nl) i = i + 1
          }
 
-         if (i < count){     
+         if (i < count){
             bytes = new Array(i - markpos)
             System.arraycopy(buf, markpos, bytes, 0, i - markpos);
             pos = i+1
-         }          
+         }
       }
       bytes
    }
@@ -87,7 +87,7 @@ final class FastaInputStream(in: InputStream)
 
 // extend the Java BufferedOutputStream class
 
-final class FastaOutputStream(in: OutputStream) 
+final class FastaOutputStream(in: OutputStream)
       extends BufferedOutputStream(in) with FastaByteStream {
 
    private val IUB = IUBCodeComplements
@@ -98,19 +98,19 @@ final class FastaOutputStream(in: OutputStream)
       val iub: Array[Byte] = new Array( 'z'.toByte )
 
       for (indexValue <- code zip comp)
-         indexValue match { case Pair(i,v) => iub(i) = v }
+         indexValue match { case (i,v) => iub(i) = v }
 
       iub
    }
 
-   def writeReverseComplement(sequence: Pair[Line,LineStack]) = {
+   def writeReverseComplement(sequence: Tuple2[Line,LineStack]) = {
 
       def inplaceComplementReverse(b: Array[Byte]) = {
-         var i = 0 
+         var i = 0
          var j = b.length - 1
          while (i < j){
-            val swap = b(i) 
-            b(i) = IUB( b(j) ) 
+            val swap = b(i)
+            b(i) = IUB( b(j) )
             b(j) = IUB( swap )
             i = i + 1
             j = j - 1
@@ -119,7 +119,7 @@ final class FastaOutputStream(in: OutputStream)
       }
 
       sequence match {
-         case Pair(header,lines) => {
+         case (header,lines) => {
 
             write(header); write(nl)
 
@@ -131,11 +131,11 @@ final class FastaOutputStream(in: OutputStream)
             while (!lines.isEmpty) {
                val line = lines.pop
                inplaceComplementReverse(line)
-        
+
                if (isSplitLine){
-                  if (isFirstLine){ write(line); isFirstLine = false } 
+                  if (isFirstLine){ write(line); isFirstLine = false }
                   else { write(line,0,LineLength-k); write(nl); write(line,LineLength-k,k) }
-               } 
+               }
                else { write(line); write(nl) }
             }
 
diff --git a/test/pending/t7629-view-bounds-removal.check b/test/pending/t7629-view-bounds-removal.check
new file mode 100644
index 0000000..dc52105
--- /dev/null
+++ b/test/pending/t7629-view-bounds-removal.check
@@ -0,0 +1,9 @@
+t7629-view-bounds-removal.scala:2: error: View bounds have been removed. Use an implicit parameter instead.
+Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
+  def f[A <% Int](a: A) = null
+          ^
+t7629-view-bounds-removal.scala:3: error: View bounds have been removed. Use an implicit parameter instead.
+Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
+  def g[C, B <: C, A <% B : Numeric](a: A) = null
+                     ^
+two errors found
diff --git a/test/pending/t7629-view-bounds-removal.flags b/test/pending/t7629-view-bounds-removal.flags
new file mode 100644
index 0000000..29f4ede
--- /dev/null
+++ b/test/pending/t7629-view-bounds-removal.flags
@@ -0,0 +1 @@
+-Xfuture
diff --git a/test/pending/t7629-view-bounds-removal.scala b/test/pending/t7629-view-bounds-removal.scala
new file mode 100644
index 0000000..a6ede1f
--- /dev/null
+++ b/test/pending/t7629-view-bounds-removal.scala
@@ -0,0 +1,4 @@
+object Test {
+  def f[A <% Int](a: A) = null
+  def g[C, B <: C, A <% B : Numeric](a: A) = null
+}
diff --git a/test/pending/typetags_typeof_x.check b/test/pending/typetags_typeof_x.check
new file mode 100644
index 0000000..832a8bc
--- /dev/null
+++ b/test/pending/typetags_typeof_x.check
@@ -0,0 +1,8 @@
+List[T]
+C
+Int
+List[Any]
+AnyRef{def x: Int}
+Null
+Nothing
+Null
diff --git a/test/pending/typetags_typeof_x.scala b/test/pending/typetags_typeof_x.scala
new file mode 100644
index 0000000..08be6d4
--- /dev/null
+++ b/test/pending/typetags_typeof_x.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+  def foo[T](x: T) = weakTypeOf(List(x))
+  println(foo(2))
+  locally { class C; println(weakTypeOf(new C)) }
+
+  println(typeOf(2))
+  println(typeOf(List(1, "1")))
+  println(typeOf(new { def x = 2 }))
+  println(typeOf[Null])
+  println(typeOf[Nothing])
+  println(typeOf(null))
+}
\ No newline at end of file
diff --git a/test/postreview.py b/test/postreview.py
deleted file mode 100644
index 2e2518f..0000000
--- a/test/postreview.py
+++ /dev/null
@@ -1,2540 +0,0 @@
-#!/usr/bin/env python
-import cookielib
-import difflib
-import getpass
-import marshal
-import mimetools
-import ntpath
-import os
-import re
-import socket
-import stat
-import subprocess
-import sys
-import tempfile
-import urllib
-import urllib2
-from optparse import OptionParser
-from tempfile import mkstemp
-from urlparse import urljoin, urlparse
-
-try:
-    from hashlib import md5
-except ImportError:
-    # Support Python versions before 2.5.
-    from md5 import md5
-
-try:
-    import json
-except ImportError:
-    import simplejson as json
-
-# This specific import is necessary to handle the paths for
-# cygwin enabled machines.
-if (sys.platform.startswith('win')
-    or sys.platform.startswith('cygwin')):
-    import ntpath as cpath
-else:
-    import posixpath as cpath
-
-###
-# Default configuration -- user-settable variables follow.
-###
-
-# The following settings usually aren't needed, but if your Review
-# Board crew has specific preferences and doesn't want to express
-# them with command line switches, set them here and you're done.
-# In particular, setting the REVIEWBOARD_URL variable will allow
-# you to make it easy for people to submit reviews regardless of
-# their SCM setup.
-#
-# Note that in order for this script to work with a reviewboard site
-# that uses local paths to access a repository, the 'Mirror path'
-# in the repository setup page must be set to the remote URL of the
-# repository.
-
-#
-# Reviewboard URL.
-#
-# Set this if you wish to hard-code a default server to always use.
-# It's generally recommended to set this using your SCM repository
-# (for those that support it -- currently only SVN, Git, and Perforce).
-#
-# For example, on SVN:
-#   $ svn propset reviewboard:url http://reviewboard.example.com .
-#
-# Or with Git:
-#   $ git config reviewboard.url http://reviewboard.example.com
-#
-# On Perforce servers version 2008.1 and above:
-#   $ p4 counter reviewboard.url http://reviewboard.example.com
-#
-# Older Perforce servers only allow numerical counters, so embedding
-# the url in the counter name is also supported:
-#   $ p4 counter reviewboard.url.http:\|\|reviewboard.example.com 1
-#
-# Note that slashes are not allowed in Perforce counter names, so replace them
-# with pipe characters (they are a safe substitute as they are not used
-# unencoded in URLs). You may need to escape them when issuing the p4 counter
-# command as above.
-#
-# If this is not possible or desired, setting the value here will let
-# you get started quickly.
-#
-# For all other repositories, a .reviewboardrc file present at the top of
-# the checkout will also work. For example:
-#
-#   $ cat .reviewboardrc
-#   REVIEWBOARD_URL = "http://reviewboard.example.com"
-#
-REVIEWBOARD_URL = None
-
-# Default submission arguments.  These are all optional; run this
-# script with --help for descriptions of each argument.
-TARGET_GROUPS   = None
-TARGET_PEOPLE   = None
-SUBMIT_AS       = None
-PUBLISH         = False
-OPEN_BROWSER    = False
-
-# Debugging.  For development...
-DEBUG           = False
-
-###
-# End user-settable variables.
-###
-
-
-VERSION = "0.8"
-
-user_config = None
-tempfiles = []
-options = None
-
-
-class APIError(Exception):
-    pass
-
-
-class RepositoryInfo:
-    """
-    A representation of a source code repository.
-    """
-    def __init__(self, path=None, base_path=None, supports_changesets=False,
-                 supports_parent_diffs=False):
-        self.path = path
-        self.base_path = base_path
-        self.supports_changesets = supports_changesets
-        self.supports_parent_diffs = supports_parent_diffs
-        debug("repository info: %s" % self)
-
-    def __str__(self):
-        return "Path: %s, Base path: %s, Supports changesets: %s" % \
-            (self.path, self.base_path, self.supports_changesets)
-
-    def set_base_path(self, base_path):
-        if not base_path.startswith('/'):
-            base_path = '/' + base_path
-        debug("changing repository info base_path from %s to %s" % \
-              (self.base_path, base_path))
-        self.base_path = base_path
-
-    def find_server_repository_info(self, server):
-        """
-        Try to find the repository from the list of repositories on the server.
-        For Subversion, this could be a repository with a different URL. For
-        all other clients, this is a noop.
-        """
-        return self
-
-
-class SvnRepositoryInfo(RepositoryInfo):
-    """
-    A representation of a SVN source code repository. This version knows how to
-    find a matching repository on the server even if the URLs differ.
-    """
-    def __init__(self, path, base_path, uuid, supports_parent_diffs=False):
-        RepositoryInfo.__init__(self, path, base_path,
-                                supports_parent_diffs=supports_parent_diffs)
-        self.uuid = uuid
-
-    def find_server_repository_info(self, server):
-        """
-        The point of this function is to find a repository on the server that
-        matches self, even if the paths aren't the same. (For example, if self
-        uses an 'http' path, but the server uses a 'file' path for the same
-        repository.) It does this by comparing repository UUIDs. If the
-        repositories use the same path, you'll get back self, otherwise you'll
-        get a different SvnRepositoryInfo object (with a different path).
-        """
-        repositories = server.get_repositories()
-
-        for repository in repositories:
-            if repository['tool'] != 'Subversion':
-                continue
-
-            info = self._get_repository_info(server, repository)
-
-            if not info or self.uuid != info['uuid']:
-                continue
-
-            repos_base_path = info['url'][len(info['root_url']):]
-            relpath = self._get_relative_path(self.base_path, repos_base_path)
-            if relpath:
-                return SvnRepositoryInfo(info['url'], relpath, self.uuid)
-
-        # We didn't find a matching repository on the server. We'll just return
-        # self and hope for the best.
-        return self
-
-    def _get_repository_info(self, server, repository):
-        try:
-            return server.get_repository_info(repository['id'])
-        except APIError, e:
-            # If the server couldn't fetch the repository info, it will return
-            # code 210. Ignore those.
-            # Other more serious errors should still be raised, though.
-            rsp = e.args[0]
-            if rsp['err']['code'] == 210:
-                return None
-
-            raise e
-
-    def _get_relative_path(self, path, root):
-        pathdirs = self._split_on_slash(path)
-        rootdirs = self._split_on_slash(root)
-
-        # root is empty, so anything relative to that is itself
-        if len(rootdirs) == 0:
-            return path
-
-        # If one of the directories doesn't match, then path is not relative
-        # to root.
-        if rootdirs != pathdirs:
-            return None
-
-        # All the directories matched, so the relative path is whatever
-        # directories are left over. The base_path can't be empty, though, so
-        # if the paths are the same, return '/'
-        if len(pathdirs) == len(rootdirs):
-            return '/'
-        else:
-            return '/'.join(pathdirs[len(rootdirs):])
-
-    def _split_on_slash(self, path):
-        # Split on slashes, but ignore multiple slashes and throw away any
-        # trailing slashes.
-        split = re.split('/*', path)
-        if split[-1] == '':
-            split = split[0:-1]
-        return split
-
-
-class ReviewBoardHTTPPasswordMgr(urllib2.HTTPPasswordMgr):
-    """
-    Adds HTTP authentication support for URLs.
-
-    Python 2.4's password manager has a bug in http authentication when the
-    target server uses a non-standard port.  This works around that bug on
-    Python 2.4 installs. This also allows post-review to prompt for passwords
-    in a consistent way.
-
-    See: http://bugs.python.org/issue974757
-    """
-    def __init__(self, reviewboard_url):
-        self.passwd  = {}
-        self.rb_url  = reviewboard_url
-        self.rb_user = None
-        self.rb_pass = None
-
-    def find_user_password(self, realm, uri):
-        if uri.startswith(self.rb_url):
-            if self.rb_user is None or self.rb_pass is None:
-                print "==> HTTP Authentication Required"
-                print 'Enter username and password for "%s" at %s' % \
-                    (realm, urlparse(uri)[1])
-                self.rb_user = raw_input('Username: ')
-                self.rb_pass = getpass.getpass('Password: ')
-
-            return self.rb_user, self.rb_pass
-        else:
-            # If this is an auth request for some other domain (since HTTP
-            # handlers are global), fall back to standard password management.
-            return urllib2.HTTPPasswordMgr.find_user_password(self, realm, uri)
-
-
-class ReviewBoardServer(object):
-    """
-    An instance of a Review Board server.
-    """
-    def __init__(self, url, info, cookie_file):
-        self.url = url
-        if self.url[-1] != '/':
-            self.url += '/'
-        self._info = info
-        self._server_info = None
-        self.cookie_file = cookie_file
-        self.cookie_jar  = cookielib.MozillaCookieJar(self.cookie_file)
-
-        # Set up the HTTP libraries to support all of the features we need.
-        cookie_handler = urllib2.HTTPCookieProcessor(self.cookie_jar)
-        password_mgr   = ReviewBoardHTTPPasswordMgr(self.url)
-        auth_handler   = urllib2.HTTPBasicAuthHandler(password_mgr)
-
-        opener = urllib2.build_opener(cookie_handler, auth_handler)
-        opener.addheaders = [('User-agent', 'post-review/' + VERSION)]
-        urllib2.install_opener(opener)
-
-    def login(self, force=False):
-        """
-        Logs in to a Review Board server, prompting the user for login
-        information if needed.
-        """
-        if not force and self.has_valid_cookie():
-            return
-
-        print "==> Review Board Login Required"
-        print "Enter username and password for Review Board at %s" % self.url
-        if options.username:
-            username = options.username
-        elif options.submit_as:
-            username = options.submit_as
-        else:
-            username = raw_input('Username: ')
-
-        if not options.password:
-            password = getpass.getpass('Password: ')
-        else:
-            password = options.password
-
-        debug('Logging in with username "%s"' % username)
-        try:
-            self.api_post('api/json/accounts/login/', {
-                'username': username,
-                'password': password,
-            })
-        except APIError, e:
-            rsp, = e.args
-
-            die("Unable to log in: %s (%s)" % (rsp["err"]["msg"],
-                                               rsp["err"]["code"]))
-
-        debug("Logged in.")
-
-    def has_valid_cookie(self):
-        """
-        Load the user's cookie file and see if they have a valid
-        'rbsessionid' cookie for the current Review Board server.  Returns
-        true if so and false otherwise.
-        """
-        try:
-            parsed_url = urlparse(self.url)
-            host = parsed_url[1]
-            path = parsed_url[2] or '/'
-
-            # Cookie files don't store port numbers, unfortunately, so
-            # get rid of the port number if it's present.
-            host = host.split(":")[0]
-
-            debug("Looking for '%s %s' cookie in %s" % \
-                  (host, path, self.cookie_file))
-            self.cookie_jar.load(self.cookie_file, ignore_expires=True)
-
-            try:
-                cookie = self.cookie_jar._cookies[host][path]['rbsessionid']
-
-                if not cookie.is_expired():
-                    debug("Loaded valid cookie -- no login required")
-                    return True
-
-                debug("Cookie file loaded, but cookie has expired")
-            except KeyError:
-                debug("Cookie file loaded, but no cookie for this server")
-        except IOError, error:
-            debug("Couldn't load cookie file: %s" % error)
-
-        return False
-
-    def new_review_request(self, changenum, submit_as=None):
-        """
-        Creates a review request on a Review Board server, updating an
-        existing one if the changeset number already exists.
-
-        If submit_as is provided, the specified user name will be recorded as
-        the submitter of the review request (given that the logged in user has
-        the appropriate permissions).
-        """
-        try:
-            debug("Attempting to create review request for %s" % changenum)
-            data = { 'repository_path': self.info.path }
-
-            if changenum:
-                data['changenum'] = changenum
-
-            if submit_as:
-                debug("Submitting the review request as %s" % submit_as)
-                data['submit_as'] = submit_as
-
-            rsp = self.api_post('api/json/reviewrequests/new/', data)
-        except APIError, e:
-            rsp, = e.args
-
-            if not options.diff_only:
-                if rsp['err']['code'] == 204: # Change number in use
-                    debug("Review request already exists. Updating it...")
-                    rsp = self.api_post(
-                        'api/json/reviewrequests/%s/update_from_changenum/' %
-                        rsp['review_request']['id'])
-                else:
-                    raise e
-
-        debug("Review request created")
-        return rsp['review_request']
-
-    def set_review_request_field(self, review_request, field, value):
-        """
-        Sets a field in a review request to the specified value.
-        """
-        rid = review_request['id']
-
-        debug("Attempting to set field '%s' to '%s' for review request '%s'" %
-              (field, value, rid))
-
-        self.api_post('api/json/reviewrequests/%s/draft/set/' % rid, {
-            field: value,
-        })
-
-    def get_review_request(self, rid):
-        """
-        Returns the review request with the specified ID.
-        """
-        rsp = self.api_get('api/json/reviewrequests/%s/' % rid)
-        return rsp['review_request']
-
-    def get_repositories(self):
-        """
-        Returns the list of repositories on this server.
-        """
-        rsp = self.api_get('/api/json/repositories/')
-        return rsp['repositories']
-
-    def get_repository_info(self, rid):
-        """
-        Returns detailed information about a specific repository.
-        """
-        rsp = self.api_get('/api/json/repositories/%s/info/' % rid)
-        return rsp['info']
-
-    def save_draft(self, review_request):
-        """
-        Saves a draft of a review request.
-        """
-        self.api_post("api/json/reviewrequests/%s/draft/save/" %
-                      review_request['id'])
-        debug("Review request draft saved")
-
-    def upload_diff(self, review_request, diff_content, parent_diff_content):
-        """
-        Uploads a diff to a Review Board server.
-        """
-        debug("Uploading diff, size: %d" % len(diff_content))
-
-        if parent_diff_content:
-            debug("Uploading parent diff, size: %d" % len(parent_diff_content))
-
-        fields = {}
-        files = {}
-
-        if self.info.base_path:
-            fields['basedir'] = self.info.base_path
-
-        files['path'] = {
-            'filename': 'diff',
-            'content': diff_content
-        }
-
-        if parent_diff_content:
-            files['parent_diff_path'] = {
-                'filename': 'parent_diff',
-                'content': parent_diff_content
-            }
-
-        self.api_post('api/json/reviewrequests/%s/diff/new/' %
-                      review_request['id'], fields, files)
-
-    def publish(self, review_request):
-        """
-        Publishes a review request.
-        """
-        debug("Publishing")
-        self.api_post('api/json/reviewrequests/%s/publish/' %
-                      review_request['id'])
-
-    def _get_server_info(self):
-        if not self._server_info:
-            self._server_info = self._info.find_server_repository_info(self)
-
-        return self._server_info
-
-    info = property(_get_server_info)
-
-    def process_json(self, data):
-        """
-        Loads in a JSON file and returns the data if successful. On failure,
-        APIError is raised.
-        """
-        rsp = json.loads(data)
-
-        if rsp['stat'] == 'fail':
-            raise APIError, rsp
-
-        return rsp
-
-    def http_get(self, path):
-        """
-        Performs an HTTP GET on the specified path, storing any cookies that
-        were set.
-        """
-        debug('HTTP GETting %s' % path)
-
-        url = self._make_url(path)
-
-        try:
-            rsp = urllib2.urlopen(url).read()
-            self.cookie_jar.save(self.cookie_file)
-            return rsp
-        except urllib2.HTTPError, e:
-            print "Unable to access %s (%s). The host path may be invalid" % \
-                (url, e.code)
-            try:
-                debug(e.read())
-            except AttributeError:
-                pass
-            die()
-
-    def _make_url(self, path):
-        """Given a path on the server returns a full http:// style url"""
-        app = urlparse(self.url)[2]
-        if path[0] == '/':
-            url = urljoin(self.url, app[:-1] + path)
-        else:
-            url = urljoin(self.url, app + path)
-
-        if not url.startswith('http'):
-            url = 'http://%s' % url
-        return url
-
-    def api_get(self, path):
-        """
-        Performs an API call using HTTP GET at the specified path.
-        """
-        return self.process_json(self.http_get(path))
-
-    def http_post(self, path, fields, files=None):
-        """
-        Performs an HTTP POST on the specified path, storing any cookies that
-        were set.
-        """
-        if fields:
-            debug_fields = fields.copy()
-        else:
-            debug_fields = {}
-
-        if 'password' in debug_fields:
-            debug_fields["password"] = "**************"
-        url = self._make_url(path)
-        debug('HTTP POSTing to %s: %s' % (url, debug_fields))
-
-        content_type, body = self._encode_multipart_formdata(fields, files)
-        headers = {
-            'Content-Type': content_type,
-            'Content-Length': str(len(body))
-        }
-
-        try:
-            r = urllib2.Request(url, body, headers)
-            data = urllib2.urlopen(r).read()
-            self.cookie_jar.save(self.cookie_file)
-            return data
-        except urllib2.URLError, e:
-            try:
-                debug(e.read())
-            except AttributeError:
-                pass
-
-            die("Unable to access %s. The host path may be invalid\n%s" % \
-                (url, e))
-        except urllib2.HTTPError, e:
-            die("Unable to access %s (%s). The host path may be invalid\n%s" % \
-                (url, e.code, e.read()))
-
-    def api_post(self, path, fields=None, files=None):
-        """
-        Performs an API call using HTTP POST at the specified path.
-        """
-        return self.process_json(self.http_post(path, fields, files))
-
-    def _encode_multipart_formdata(self, fields, files):
-        """
-        Encodes data for use in an HTTP POST.
-        """
-        BOUNDARY = mimetools.choose_boundary()
-        content = ""
-
-        fields = fields or {}
-        files = files or {}
-
-        for key in fields:
-            content += "--" + BOUNDARY + "\r\n"
-            content += "Content-Disposition: form-data; name=\"%s\"\r\n" % key
-            content += "\r\n"
-            content += fields[key] + "\r\n"
-
-        for key in files:
-            filename = files[key]['filename']
-            value = files[key]['content']
-            content += "--" + BOUNDARY + "\r\n"
-            content += "Content-Disposition: form-data; name=\"%s\"; " % key
-            content += "filename=\"%s\"\r\n" % filename
-            content += "\r\n"
-            content += value + "\r\n"
-
-        content += "--" + BOUNDARY + "--\r\n"
-        content += "\r\n"
-
-        content_type = "multipart/form-data; boundary=%s" % BOUNDARY
-
-        return content_type, content
-
-
-class SCMClient(object):
-    """
-    A base representation of an SCM tool for fetching repository information
-    and generating diffs.
-    """
-    def get_repository_info(self):
-        return None
-
-    def scan_for_server(self, repository_info):
-        """
-        Scans the current directory on up to find a .reviewboard file
-        containing the server path.
-        """
-        server_url = self._get_server_from_config(user_config, repository_info)
-        if server_url:
-            return server_url
-
-        for path in walk_parents(os.getcwd()):
-            filename = os.path.join(path, ".reviewboardrc")
-            if os.path.exists(filename):
-                config = load_config_file(filename)
-                server_url = self._get_server_from_config(config,
-                                                          repository_info)
-                if server_url:
-                    return server_url
-
-        return None
-
-    def diff(self, args):
-        """
-        Returns the generated diff and optional parent diff for this
-        repository.
-
-        The returned tuple is (diff_string, parent_diff_string)
-        """
-        return (None, None)
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        """
-        Returns the generated diff between revisions in the repository.
-        """
-        return None
-
-    def _get_server_from_config(self, config, repository_info):
-        if 'REVIEWBOARD_URL' in config:
-            return config['REVIEWBOARD_URL']
-        elif 'TREES' in config:
-            trees = config['TREES']
-            if not isinstance(trees, dict):
-                die("Warning: 'TREES' in config file is not a dict!")
-
-            if repository_info.path in trees and \
-               'REVIEWBOARD_URL' in trees[repository_info.path]:
-                return trees[repository_info.path]['REVIEWBOARD_URL']
-
-        return None
-
-
-class CVSClient(SCMClient):
-    """
-    A wrapper around the cvs tool that fetches repository
-    information and generates compatible diffs.
-    """
-    def get_repository_info(self):
-        if not check_install("cvs"):
-            return None
-
-        cvsroot_path = os.path.join("CVS", "Root")
-
-        if not os.path.exists(cvsroot_path):
-            return None
-
-        fp = open(cvsroot_path, "r")
-        repository_path = fp.read().strip()
-        fp.close()
-
-        i = repository_path.find("@")
-        if i != -1:
-            repository_path = repository_path[i + 1:]
-
-        i = repository_path.find(":")
-        if i != -1:
-            host = repository_path[:i]
-            try:
-                canon = socket.getfqdn(host)
-                repository_path = repository_path.replace('%s:' % host,
-                                                          '%s:' % canon)
-            except socket.error, msg:
-                debug("failed to get fqdn for %s, msg=%s" % (host, msg))
-
-        return RepositoryInfo(path=repository_path)
-
-    def diff(self, files):
-        """
-        Performs a diff across all modified files in a CVS repository.
-
-        CVS repositories do not support branches of branches in a way that
-        makes parent diffs possible, so we never return a parent diff
-        (the second value in the tuple).
-        """
-        return (self.do_diff(files), None)
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        """
-        Performs a diff between 2 revisions of a CVS repository.
-        """
-        revs = []
-
-        for rev in revision_range.split(":"):
-            revs += ["-r", rev]
-
-        return self.do_diff(revs)
-
-    def do_diff(self, params):
-        """
-        Performs the actual diff operation through cvs diff, handling
-        fake errors generated by CVS.
-        """
-        # Diff returns "1" if differences were found.
-        return execute(["cvs", "diff", "-uN"] + params,
-                        extra_ignore_errors=(1,))
-
-
-class ClearCaseClient(SCMClient):
-    """
-    A wrapper around the clearcase tool that fetches repository
-    information and generates compatible diffs.
-    This client assumes that cygwin is installed on windows.
-    """
-    ccroot_path = "/view/reviewboard.diffview/vobs/"
-    viewinfo = ""
-    viewtype = "snapshot"
-
-    def get_filename_hash(self, fname):
-        # Hash the filename string so its easy to find the file later on.
-        return md5(fname).hexdigest()
-
-    def get_repository_info(self):
-        if not check_install('cleartool help'):
-            return None
-
-        # We must be running this from inside a view.
-        # Otherwise it doesn't make sense.
-        self.viewinfo = execute(["cleartool", "pwv", "-short"])
-        if self.viewinfo.startswith('\*\* NONE'):
-            return None
-
-        # Returning the hardcoded clearcase root path to match the server
-        #   respository path.
-        # There is no reason to have a dynamic path unless you have
-        #   multiple clearcase repositories. This should be implemented.
-        return RepositoryInfo(path=self.ccroot_path,
-                              base_path=self.ccroot_path,
-                              supports_parent_diffs=False)
-
-    def get_previous_version(self, files):
-        file = []
-        curdir = os.getcwd()
-
-        # Cygwin case must transform a linux-like path to windows like path
-        #   including drive letter.
-        if 'cygdrive' in curdir:
-            where = curdir.index('cygdrive') + 9
-            drive_letter = curdir[where:where+1]
-            curdir = drive_letter + ":\\" + curdir[where+2:len(curdir)]
-
-        for key in files:
-            # Sometimes there is a quote in the filename. It must be removed.
-            key = key.replace('\'', '')
-            elem_path = cpath.normpath(os.path.join(curdir, key))
-
-            # Removing anything before the last /vobs
-            #   because it may be repeated.
-            elem_path_idx = elem_path.rfind("/vobs")
-            if elem_path_idx != -1:
-                elem_path = elem_path[elem_path_idx:len(elem_path)].strip("\"")
-
-            # Call cleartool to get this version and the previous version
-            #   of the element.
-            curr_version, pre_version = execute(
-                ["cleartool", "desc", "-pre", elem_path])
-            curr_version = cpath.normpath(curr_version)
-            pre_version = pre_version.split(':')[1].strip()
-
-            # If a specific version was given, remove it from the path
-            #   to avoid version duplication
-            if "@@" in elem_path:
-                elem_path = elem_path[:elem_path.rfind("@@")]
-            file.append(elem_path + "@@" + pre_version)
-            file.append(curr_version)
-
-        # Determnine if the view type is snapshot or dynamic.
-        if os.path.exists(file[0]):
-            self.viewtype = "dynamic"
-
-        return file
-
-    def get_extended_namespace(self, files):
-        """
-        Parses the file path to get the extended namespace
-        """
-        versions = self.get_previous_version(files)
-
-        evfiles = []
-        hlist = []
-
-        for vkey in versions:
-            # Verify if it is a checkedout file.
-            if "CHECKEDOUT" in vkey:
-                # For checkedout files just add it to the file list
-                #   since it cannot be accessed outside the view.
-                splversions = vkey[:vkey.rfind("@@")]
-                evfiles.append(splversions)
-            else:
-                # For checkedin files.
-                ext_path = []
-                ver = []
-                fname = ""      # fname holds the file name without the version.
-                (bpath, fpath) = cpath.splitdrive(vkey)
-                if bpath :
-                    # Windows.
-                    # The version (if specified like file.c@@/main/1)
-                    #   should be kept as a single string
-                    #   so split the path and concat the file name
-                    #   and version in the last position of the list.
-                    ver = fpath.split("@@")
-                    splversions = fpath[:vkey.rfind("@@")].split("\\")
-                    fname = splversions.pop()
-                    splversions.append(fname + ver[1])
-                else :
-                    # Linux.
-                    bpath = vkey[:vkey.rfind("vobs")+4]
-                    fpath = vkey[vkey.rfind("vobs")+5:]
-                    ver = fpath.split("@@")
-                    splversions =  ver[0][:vkey.rfind("@@")].split("/")
-                    fname = splversions.pop()
-                    splversions.append(fname + ver[1])
-
-                filename = splversions.pop()
-                bpath = cpath.normpath(bpath + "/")
-                elem_path = bpath
-
-                for key in splversions:
-                    # For each element (directory) in the path,
-                    #   get its version from clearcase.
-                    elem_path = cpath.join(elem_path, key)
-
-                    # This is the version to be appended to the extended
-                    #   path list.
-                    this_version = execute(
-                        ["cleartool", "desc", "-fmt", "%Vn",
-                        cpath.normpath(elem_path)])
-                    if this_version:
-                        ext_path.append(key + "/@@" + this_version + "/")
-                    else:
-                        ext_path.append(key + "/")
-
-                # This must be done in case we haven't specified
-                #   the version on the command line.
-                ext_path.append(cpath.normpath(fname + "/@@" +
-                    vkey[vkey.rfind("@@")+2:len(vkey)]))
-                epstr = cpath.join(bpath, cpath.normpath(''.join(ext_path)))
-                evfiles.append(epstr)
-
-                """
-                In windows, there is a problem with long names(> 254).
-                In this case, we hash the string and copy the unextended
-                  filename to a temp file whose name is the hash.
-                This way we can get the file later on for diff.
-                The same problem applies to snapshot views where the
-                  extended name isn't available.
-                The previous file must be copied from the CC server
-                  to a local dir.
-                """
-                if cpath.exists(epstr) :
-                    pass
-                else:
-                    if len(epstr) > 254 or self.viewtype == "snapshot":
-                        name = self.get_filename_hash(epstr)
-                        # Check if this hash is already in the list
-                        try:
-                            i = hlist.index(name)
-                            die("ERROR: duplicate value %s : %s" %
-                                (name, epstr))
-                        except ValueError:
-                            hlist.append(name)
-
-                        normkey = cpath.normpath(vkey)
-                        td = tempfile.gettempdir()
-                        # Cygwin case must transform a linux-like path to
-                        # windows like path including drive letter
-                        if 'cygdrive' in td:
-                            where = td.index('cygdrive') + 9
-                            drive_letter = td[where:where+1] + ":"
-                            td = cpath.join(drive_letter, td[where+1:])
-                        tf = cpath.normpath(cpath.join(td, name))
-                        if cpath.exists(tf):
-                            debug("WARNING: FILE EXISTS")
-                            os.unlink(tf)
-                        execute(["cleartool", "get", "-to", tf, normkey])
-                    else:
-                        die("ERROR: FILE NOT FOUND : %s" % epstr)
-
-        return evfiles
-
-    def get_files_from_label(self, label):
-        voblist=[]
-        # Get the list of vobs for the current view
-        allvoblist = execute(["cleartool", "lsvob", "-short"]).split()
-        # For each vob, find if the label is present
-        for vob in allvoblist:
-            try:
-                execute(["cleartool", "describe", "-local",
-                    "lbtype:%s@%s" % (label, vob)]).split()
-                voblist.append(vob)
-            except:
-                pass
-
-        filelist=[]
-        # For each vob containing the label, get the file list
-        for vob in voblist:
-            try:
-                res = execute(["cleartool", "find", vob, "-all", "-version",
-                    "lbtype(%s)" % label, "-print"])
-                filelist.extend(res.split())
-            except :
-                pass
-
-        # Return only the unique itens
-        return set(filelist)
-
-    def diff(self, files):
-        """
-        Performs a diff of the specified file and its previous version.
-        """
-        # We must be running this from inside a view.
-        # Otherwise it doesn't make sense.
-        return self.do_diff(self.get_extended_namespace(files))
-
-    def diff_label(self, label):
-        """
-        Get the files that are attached to a label and diff them
-        TODO
-        """
-        return self.diff(self.get_files_from_label(label))
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        """
-        Performs a diff between 2 revisions of a CC repository.
-        """
-        rev_str = ''
-
-        for rev in revision_range.split(":"):
-            rev_str += "-r %s " % rev
-
-        return self.do_diff(rev_str)
-
-    def do_diff(self, params):
-        # Diff returns "1" if differences were found.
-        # Add the view name and view type to the description
-        if options.description:
-            options.description = ("VIEW: " + self.viewinfo +
-                "VIEWTYPE: " + self.viewtype + "\n" + options.description)
-        else:
-            options.description = (self.viewinfo +
-                "VIEWTYPE: " + self.viewtype + "\n")
-
-        o = []
-        Feol = False
-        while len(params) > 0:
-            # Read both original and modified files.
-            onam = params.pop(0)
-            mnam = params.pop(0)
-            file_data = []
-            do_rem = False
-            # If the filename length is greater than 254 char for windows,
-            #   we copied the file to a temp file
-            #   because the open will not work for path greater than 254.
-            # This is valid for the original and
-            #   modified files if the name size is > 254.
-            for filenam in (onam, mnam) :
-                if cpath.exists(filenam) and self.viewtype == "dynamic":
-                    do_rem = False
-                    fn = filenam
-                elif len(filenam) > 254 or self.viewtype == "snapshot":
-                    fn = self.get_filename_hash(filenam)
-                    fn = cpath.join(tempfile.gettempdir(), fn)
-                    do_rem = True
-                fd = open(cpath.normpath(fn))
-                fdata = fd.readlines()
-                fd.close()
-                file_data.append(fdata)
-                # If the file was temp, it should be removed.
-                if do_rem:
-                    os.remove(filenam)
-
-            modi = file_data.pop()
-            orig = file_data.pop()
-
-            # For snapshot views, the local directories must be removed because
-            #   they will break the diff on the server. Just replacing
-            #   everything before the view name (including the view name) for
-            #   vobs do the work.
-            if (self.viewtype == "snapshot"
-                and (sys.platform.startswith('win')
-                  or sys.platform.startswith('cygwin'))):
-                    vinfo = self.viewinfo.rstrip("\r\n")
-                    mnam = "c:\\\\vobs" + mnam[mnam.rfind(vinfo) + len(vinfo):]
-                    onam = "c:\\\\vobs" + onam[onam.rfind(vinfo) + len(vinfo):]
-            # Call the diff lib to generate a diff.
-            # The dates are bogus, since they don't natter anyway.
-            # The only thing is that two spaces are needed to the server
-            #   so it can identify the heades correctly.
-            diff = difflib.unified_diff(orig, modi, onam, mnam,
-               '  2002-02-21 23:30:39.942229878 -0800',
-               '  2002-02-21 23:30:50.442260588 -0800', lineterm=' \n')
-            # Transform the generator output into a string output
-            #   Use a comprehension instead of a generator,
-            #   so 2.3.x doesn't fail to interpret.
-            diffstr = ''.join([str(l) for l in diff])
-            # Workaround for the difflib no new line at end of file
-            #   problem.
-            if not diffstr.endswith('\n'):
-                diffstr = diffstr + ("\n\\ No newline at end of file\n")
-            o.append(diffstr)
-
-        ostr = ''.join(o)
-        return (ostr, None) # diff, parent_diff (not supported)
-
-
-class SVNClient(SCMClient):
-    """
-    A wrapper around the svn Subversion tool that fetches repository
-    information and generates compatible diffs.
-    """
-    def get_repository_info(self):
-        if not check_install('svn help'):
-            return None
-
-        # Get the SVN repository path (either via a working copy or
-        # a supplied URI)
-        svn_info_params = ["svn", "info"]
-        if options.repository_url:
-            svn_info_params.append(options.repository_url)
-        data = execute(svn_info_params,
-                       ignore_errors=True)
-        m = re.search(r'^Repository Root: (.+)$', data, re.M)
-        if not m:
-            return None
-
-        path = m.group(1)
-
-        m = re.search(r'^URL: (.+)$', data, re.M)
-        if not m:
-            return None
-
-        base_path = m.group(1)[len(path):] or "/"
-
-        m = re.search(r'^Repository UUID: (.+)$', data, re.M)
-        if not m:
-            return None
-
-        return SvnRepositoryInfo(path, base_path, m.group(1))
-
-    def scan_for_server(self, repository_info):
-        # Scan first for dot files, since it's faster and will cover the
-        # user's $HOME/.reviewboardrc
-        server_url = super(SVNClient, self).scan_for_server(repository_info)
-        if server_url:
-            return server_url
-
-        return self.scan_for_server_property(repository_info)
-
-    def scan_for_server_property(self, repository_info):
-        def get_url_prop(path):
-            url = execute(["svn", "propget", "reviewboard:url", path]).strip()
-            return url or None
-
-        for path in walk_parents(os.getcwd()):
-            if not os.path.exists(os.path.join(path, ".svn")):
-                break
-
-            prop = get_url_prop(path)
-            if prop:
-                return prop
-
-        return get_url_prop(repository_info.path)
-
-    def diff(self, files):
-        """
-        Performs a diff across all modified files in a Subversion repository.
-
-        SVN repositories do not support branches of branches in a way that
-        makes parent diffs possible, so we never return a parent diff
-        (the second value in the tuple).
-        """
-        return (self.do_diff(["svn", "diff", "--diff-cmd=diff"] + files),
-                None)
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        """
-        Performs a diff between 2 revisions of a Subversion repository.
-        """
-        if options.repository_url:
-            revisions = revision_range.split(':')
-            if len(revisions) < 1:
-                return None
-            elif len(revisions) == 1:
-                revisions.append('HEAD')
-
-            # if a new path was supplied at the command line, set it
-            if len(args):
-                repository_info.set_base_path(args[0])
-
-            url = repository_info.path + repository_info.base_path
-
-            old_url = url + '@' + revisions[0]
-            new_url = url + '@' + revisions[1]
-
-            return self.do_diff(["svn", "diff", "--diff-cmd=diff", old_url,
-                                 new_url],
-                                repository_info)
-        # Otherwise, perform the revision range diff using a working copy
-        else:
-            return self.do_diff(["svn", "diff", "--diff-cmd=diff", "-r",
-                                 revision_range],
-                                repository_info)
-
-    def do_diff(self, cmd, repository_info=None):
-        """
-        Performs the actual diff operation, handling renames and converting
-        paths to absolute.
-        """
-        diff = execute(cmd, split_lines=True)
-        diff = self.handle_renames(diff)
-        diff = self.convert_to_absolute_paths(diff, repository_info)
-
-        return ''.join(diff)
-
-    def handle_renames(self, diff_content):
-        """
-        The output of svn diff is incorrect when the file in question came
-        into being via svn mv/cp. Although the patch for these files are
-        relative to its parent, the diff header doesn't reflect this.
-        This function fixes the relevant section headers of the patch to
-        portray this relationship.
-        """
-
-        # svn diff against a repository URL on two revisions appears to
-        # handle moved files properly, so only adjust the diff file names
-        # if they were created using a working copy.
-        if options.repository_url:
-            return diff_content
-
-        result = []
-
-        from_line = ""
-        for line in diff_content:
-            if line.startswith('--- '):
-                from_line = line
-                continue
-
-            # This is where we decide how mangle the previous '--- '
-            if line.startswith('+++ '):
-                to_file, _ = self.parse_filename_header(line[4:])
-                info       = self.svn_info(to_file)
-                if info.has_key("Copied From URL"):
-                    url       = info["Copied From URL"]
-                    root      = info["Repository Root"]
-                    from_file = urllib.unquote(url[len(root):])
-                    result.append(from_line.replace(to_file, from_file))
-                else:
-                    result.append(from_line) #as is, no copy performed
-
-            # We only mangle '---' lines. All others get added straight to
-            # the output.
-            result.append(line)
-
-        return result
-
-
-    def convert_to_absolute_paths(self, diff_content, repository_info):
-        """
-        Converts relative paths in a diff output to absolute paths.
-        This handles paths that have been svn switched to other parts of the
-        repository.
-        """
-
-        result = []
-
-        for line in diff_content:
-            front = None
-            if line.startswith('+++ ') or line.startswith('--- ') or line.startswith('Index: '):
-                front, line = line.split(" ", 1)
-
-            if front:
-                if line.startswith('/'): #already absolute
-                    line = front + " " + line
-                else:
-                    # filename and rest of line (usually the revision
-                    # component)
-                    file, rest = self.parse_filename_header(line)
-
-                    # If working with a diff generated outside of a working
-                    # copy, then file paths are already absolute, so just
-                    # add initial slash.
-                    if options.repository_url:
-                        path = urllib.unquote(
-                            "%s/%s" % (repository_info.base_path, file))
-                    else:
-                        info = self.svn_info(file)
-                        url  = info["URL"]
-                        root = info["Repository Root"]
-                        path = urllib.unquote(url[len(root):])
-
-                    line = front + " " + path + rest
-
-            result.append(line)
-
-        return result
-
-    def svn_info(self, path):
-        """Return a dict which is the result of 'svn info' at a given path."""
-        svninfo = {}
-        for info in execute(["svn", "info", path],
-                            split_lines=True):
-            parts = info.strip().split(": ", 1)
-            if len(parts) == 2:
-                key, value = parts
-                svninfo[key] = value
-
-        return svninfo
-
-    # Adapted from server code parser.py
-    def parse_filename_header(self, s):
-        parts = None
-        if "\t" in s:
-            # There's a \t separating the filename and info. This is the
-            # best case scenario, since it allows for filenames with spaces
-            # without much work.
-            parts = s.split("\t")
-
-        # There's spaces being used to separate the filename and info.
-        # This is technically wrong, so all we can do is assume that
-        # 1) the filename won't have multiple consecutive spaces, and
-        # 2) there's at least 2 spaces separating the filename and info.
-        if "  " in s:
-            parts = re.split(r"  +", s)
-
-        if parts:
-            parts[1] = '\t' + parts[1]
-            return parts
-
-        # strip off ending newline, and return it as the second component
-        return [s.split('\n')[0], '\n']
-
-
-class PerforceClient(SCMClient):
-    """
-    A wrapper around the p4 Perforce tool that fetches repository information
-    and generates compatible diffs.
-    """
-    def get_repository_info(self):
-        if not check_install('p4 help'):
-            return None
-
-        data = execute(["p4", "info"], ignore_errors=True)
-
-        m = re.search(r'^Server address: (.+)$', data, re.M)
-        if not m:
-            return None
-
-        repository_path = m.group(1).strip()
-
-        try:
-            hostname, port = repository_path.split(":")
-            info = socket.gethostbyaddr(hostname)
-            repository_path = "%s:%s" % (info[0], port)
-        except (socket.gaierror, socket.herror):
-            pass
-
-        return RepositoryInfo(path=repository_path, supports_changesets=True)
-
-    def scan_for_server(self, repository_info):
-        # Scan first for dot files, since it's faster and will cover the
-        # user's $HOME/.reviewboardrc
-        server_url = \
-            super(PerforceClient, self).scan_for_server(repository_info)
-
-        if server_url:
-            return server_url
-
-        return self.scan_for_server_counter(repository_info)
-
-    def scan_for_server_counter(self, repository_info):
-        """
-        Checks the Perforce counters to see if the Review Board server's url
-        is specified. Since Perforce only started supporting non-numeric
-        counter values in server version 2008.1, we support both a normal
-        counter 'reviewboard.url' with a string value and embedding the url in
-        a counter name like 'reviewboard.url.http:||reviewboard.example.com'.
-        Note that forward slashes aren't allowed in counter names, so
-        pipe ('|') characters should be used. These should be safe because they
-        should not be used unencoded in urls.
-        """
-
-        counters_text = execute(["p4", "counters"])
-
-        # Try for a "reviewboard.url" counter first.
-        m = re.search(r'^reviewboard.url = (\S+)', counters_text, re.M)
-
-        if m:
-            return m.group(1)
-
-        # Next try for a counter of the form:
-        # reviewboard_url.http:||reviewboard.example.com
-        m2 = re.search(r'^reviewboard.url\.(\S+)', counters_text, re.M)
-
-        if m2:
-            return m2.group(1).replace('|', '/')
-
-        return None
-
-    def get_changenum(self, args):
-        if len(args) == 1:
-            try:
-                return str(int(args[0]))
-            except ValueError:
-                pass
-        return None
-
-    def diff(self, args):
-        """
-        Goes through the hard work of generating a diff on Perforce in order
-        to take into account adds/deletes and to provide the necessary
-        revision information.
-        """
-        # set the P4 enviroment:
-        if options.p4_client:
-           os.environ['P4CLIENT'] = options.p4_client
-
-        if options.p4_port:
-           os.environ['P4PORT'] = options.p4_port
-
-        changenum = self.get_changenum(args)
-        if changenum is None:
-            return self._path_diff(args)
-        else:
-            return self._changenum_diff(changenum)
-
-
-    def _path_diff(self, args):
-        """
-        Process a path-style diff.  See _changenum_diff for the alternate
-        version that handles specific change numbers.
-
-        Multiple paths may be specified in `args`.  The path styles supported
-        are:
-
-        //path/to/file
-        Upload file as a "new" file.
-
-        //path/to/dir/...
-        Upload all files as "new" files.
-
-        //path/to/file[@#]rev
-        Upload file from that rev as a "new" file.
-
-        //path/to/file[@#]rev,[@#]rev
-        Upload a diff between revs.
-
-        //path/to/dir/...[@#]rev,[@#]rev
-        Upload a diff of all files between revs in that directory.
-        """
-        r_revision_range = re.compile(r'^(?P<path>//[^@#]+)' +
-                                      r'(?P<revision1>[#@][^,]+)?' +
-                                      r'(?P<revision2>,[#@][^,]+)?$')
-
-        empty_filename = make_tempfile()
-        tmp_diff_from_filename = make_tempfile()
-        tmp_diff_to_filename = make_tempfile()
-
-        diff_lines = []
-
-        for path in args:
-            m = r_revision_range.match(path)
-
-            if not m:
-                die('Path %r does not match a valid Perforce path.' % (path,))
-            revision1 = m.group('revision1')
-            revision2 = m.group('revision2')
-            first_rev_path = m.group('path')
-
-            if revision1:
-                first_rev_path += revision1
-            records = self._run_p4(['files', first_rev_path])
-
-            # Make a map for convenience.
-            files = {}
-
-            # Records are:
-            # 'rev': '1'
-            # 'func': '...'
-            # 'time': '1214418871'
-            # 'action': 'edit'
-            # 'type': 'ktext'
-            # 'depotFile': '...'
-            # 'change': '123456'
-            for record in records:
-                if record['action'] != 'delete':
-                    if revision2:
-                        files[record['depotFile']] = [record, None]
-                    else:
-                        files[record['depotFile']] = [None, record]
-
-            if revision2:
-                # [1:] to skip the comma.
-                second_rev_path = m.group('path') + revision2[1:]
-                records = self._run_p4(['files', second_rev_path])
-                for record in records:
-                    if record['action'] != 'delete':
-                        try:
-                            m = files[record['depotFile']]
-                            m[1] = record
-                        except KeyError:
-                            files[record['depotFile']] = [None, record]
-
-            old_file = new_file = empty_filename
-            changetype_short = None
-
-            for depot_path, (first_record, second_record) in files.items():
-                old_file = new_file = empty_filename
-                if first_record is None:
-                    self._write_file(depot_path + '#' + second_record['rev'],
-                                     tmp_diff_to_filename)
-                    new_file = tmp_diff_to_filename
-                    changetype_short = 'A'
-                    base_revision = 0
-                elif second_record is None:
-                    self._write_file(depot_path + '#' + first_record['rev'],
-                                     tmp_diff_from_filename)
-                    old_file = tmp_diff_from_filename
-                    changetype_short = 'D'
-                    base_revision = int(first_record['rev'])
-                else:
-                    self._write_file(depot_path + '#' + first_record['rev'],
-                                     tmp_diff_from_filename)
-                    self._write_file(depot_path + '#' + second_record['rev'],
-                                     tmp_diff_to_filename)
-                    new_file = tmp_diff_to_filename
-                    old_file = tmp_diff_from_filename
-                    changetype_short = 'M'
-                    base_revision = int(first_record['rev'])
-
-                dl = self._do_diff(old_file, new_file, depot_path,
-                                   base_revision, changetype_short,
-                                   ignore_unmodified=True)
-                diff_lines += dl
-
-        os.unlink(empty_filename)
-        os.unlink(tmp_diff_from_filename)
-        os.unlink(tmp_diff_to_filename)
-        return (''.join(diff_lines), None)
-
-    def _run_p4(self, command):
-        """Execute a perforce command using the python marshal API.
-
-        - command: A list of strings of the command to execute.
-
-        The return type depends on the command being run.
-        """
-        command = ['p4', '-G'] + command
-        p = subprocess.Popen(command, stdout=subprocess.PIPE)
-        result = []
-        has_error = False
-
-        while 1:
-            try:
-                data = marshal.load(p.stdout)
-            except EOFError:
-                break
-            else:
-                result.append(data)
-                if data.get('code', None) == 'error':
-                    has_error = True
-
-        rc = p.wait()
-
-        if rc or has_error:
-            for record in result:
-                if 'data' in record:
-                    print record['data']
-            die('Failed to execute command: %s\n' % (command,))
-
-        return result
-
-    def _changenum_diff(self, changenum):
-        """
-        Process a diff for a particular change number.  This handles both
-        pending and submitted changelists.
-
-        See _path_diff for the alternate version that does diffs of depot
-        paths.
-        """
-        # TODO: It might be a good idea to enhance PerforceDiffParser to
-        # understand that newFile could include a revision tag for post-submit
-        # reviewing.
-        cl_is_pending = False
-
-        debug("Generating diff for changenum %s" % changenum)
-
-        description = execute(["p4", "describe", "-s", changenum],
-                              split_lines=True)
-
-        if '*pending*' in description[0]:
-            cl_is_pending = True
-
-        # Get the file list
-        for line_num, line in enumerate(description):
-            if 'Affected files ...' in line:
-                break
-        else:
-            # Got to the end of all the description lines and didn't find
-            # what we were looking for.
-            die("Couldn't find any affected files for this change.")
-
-        description = description[line_num+2:]
-
-        diff_lines = []
-
-        empty_filename = make_tempfile()
-        tmp_diff_from_filename = make_tempfile()
-        tmp_diff_to_filename = make_tempfile()
-
-        for line in description:
-            line = line.strip()
-            if not line:
-                continue
-
-            m = re.search(r'\.\.\. ([^#]+)#(\d+) (add|edit|delete|integrate|branch)', line)
-            if not m:
-                die("Unsupported line from p4 opened: %s" % line)
-
-            depot_path = m.group(1)
-            base_revision = int(m.group(2))
-            if not cl_is_pending:
-                # If the changelist is pending our base revision is the one that's
-                # currently in the depot. If we're not pending the base revision is
-                # actually the revision prior to this one
-                base_revision -= 1
-
-            changetype = m.group(3)
-
-            debug('Processing %s of %s' % (changetype, depot_path))
-
-            old_file = new_file = empty_filename
-            old_depot_path = new_depot_path = None
-            changetype_short = None
-
-            if changetype == 'edit' or changetype == 'integrate':
-                # A big assumption
-                new_revision = base_revision + 1
-
-                # We have an old file, get p4 to take this old version from the
-                # depot and put it into a plain old temp file for us
-                old_depot_path = "%s#%s" % (depot_path, base_revision)
-                self._write_file(old_depot_path, tmp_diff_from_filename)
-                old_file = tmp_diff_from_filename
-
-                # Also print out the new file into a tmpfile
-                if cl_is_pending:
-                    new_file = self._depot_to_local(depot_path)
-                else:
-                    new_depot_path = "%s#%s" %(depot_path, new_revision)
-                    self._write_file(new_depot_path, tmp_diff_to_filename)
-                    new_file = tmp_diff_to_filename
-
-                changetype_short = "M"
-
-            elif changetype == 'add' or changetype == 'branch':
-                # We have a new file, get p4 to put this new file into a pretty
-                # temp file for us. No old file to worry about here.
-                if cl_is_pending:
-                    new_file = self._depot_to_local(depot_path)
-                else:
-                    self._write_file(depot_path, tmp_diff_to_filename)
-                    new_file = tmp_diff_to_filename
-                changetype_short = "A"
-
-            elif changetype == 'delete':
-                # We've deleted a file, get p4 to put the deleted file into  a temp
-                # file for us. The new file remains the empty file.
-                old_depot_path = "%s#%s" % (depot_path, base_revision)
-                self._write_file(old_depot_path, tmp_diff_from_filename)
-                old_file = tmp_diff_from_filename
-                changetype_short = "D"
-            else:
-                die("Unknown change type '%s' for %s" % (changetype, depot_path))
-
-            dl = self._do_diff(old_file, new_file, depot_path, base_revision, changetype_short)
-            diff_lines += dl
-
-        os.unlink(empty_filename)
-        os.unlink(tmp_diff_from_filename)
-        os.unlink(tmp_diff_to_filename)
-        return (''.join(diff_lines), None)
-
-    def _do_diff(self, old_file, new_file, depot_path, base_revision,
-                 changetype_short, ignore_unmodified=False):
-        """
-        Do the work of producing a diff for Perforce.
-
-        old_file - The absolute path to the "old" file.
-        new_file - The absolute path to the "new" file.
-        depot_path - The depot path in Perforce for this file.
-        base_revision - The base perforce revision number of the old file as
-            an integer.
-        changetype_short - The change type as a single character string.
-        ignore_unmodified - If True, will return an empty list if the file
-            is not changed.
-
-        Returns a list of strings of diff lines.
-        """
-        if hasattr(os, 'uname') and os.uname()[0] == 'SunOS':
-            diff_cmd = ["gdiff", "-urNp", old_file, new_file]
-        else:
-            diff_cmd = ["diff", "-urNp", old_file, new_file]
-        # Diff returns "1" if differences were found.
-        dl = execute(diff_cmd, extra_ignore_errors=(1,2),
-                     translate_newlines=False)
-
-        # If the input file has ^M characters at end of line, lets ignore them.
-        dl = dl.replace('\r\r\n', '\r\n')
-        dl = dl.splitlines(True)
-
-        cwd = os.getcwd()
-        if depot_path.startswith(cwd):
-            local_path = depot_path[len(cwd) + 1:]
-        else:
-            local_path = depot_path
-
-        # Special handling for the output of the diff tool on binary files:
-        #     diff outputs "Files a and b differ"
-        # and the code below expects the output to start with
-        #     "Binary files "
-        if len(dl) == 1 and \
-           dl[0] == ('Files %s and %s differ'% (old_file, new_file)):
-            dl = ['Binary files %s and %s differ'% (old_file, new_file)]
-
-        if dl == [] or dl[0].startswith("Binary files "):
-            if dl == []:
-                if ignore_unmodified:
-                    return []
-                else:
-                    print "Warning: %s in your changeset is unmodified" % \
-                        local_path
-
-            dl.insert(0, "==== %s#%s ==%s== %s ====\n" % \
-                (depot_path, base_revision, changetype_short, local_path))
-            dl.append('\n')
-        else:
-            m = re.search(r'(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)', dl[1])
-            if m:
-                timestamp = m.group(1)
-            else:
-                # Thu Sep  3 11:24:48 2007
-                m = re.search(r'(\w+)\s+(\w+)\s+(\d+)\s+(\d\d:\d\d:\d\d)\s+(\d\d\d\d)', dl[1])
-                if not m:
-                    die("Unable to parse diff header: %s" % dl[1])
-
-                month_map = {
-                    "Jan": "01",
-                    "Feb": "02",
-                    "Mar": "03",
-                    "Apr": "04",
-                    "May": "05",
-                    "Jun": "06",
-                    "Jul": "07",
-                    "Aug": "08",
-                    "Sep": "09",
-                    "Oct": "10",
-                    "Nov": "11",
-                    "Dec": "12",
-                }
-                month = month_map[m.group(2)]
-                day = m.group(3)
-                timestamp = m.group(4)
-                year = m.group(5)
-
-                timestamp = "%s-%s-%s %s" % (year, month, day, timestamp)
-
-            dl[0] = "--- %s\t%s#%s\n" % (local_path, depot_path, base_revision)
-            dl[1] = "+++ %s\t%s\n" % (local_path, timestamp)
-
-        return dl
-
-    def _write_file(self, depot_path, tmpfile):
-        """
-        Grabs a file from Perforce and writes it to a temp file. p4 print sets
-        the file readonly and that causes a later call to unlink fail. So we
-        make the file read/write.
-        """
-        debug('Writing "%s" to "%s"' % (depot_path, tmpfile))
-        execute(["p4", "print", "-o", tmpfile, "-q", depot_path])
-        os.chmod(tmpfile, stat.S_IREAD | stat.S_IWRITE)
-
-    def _depot_to_local(self, depot_path):
-        """
-        Given a path in the depot return the path on the local filesystem to
-        the same file.  If there are multiple results, take only the last
-        result from the where command.
-        """
-        where_output = self._run_p4(['where', depot_path])
-        return where_output[-1]['path']
-
-
-class MercurialClient(SCMClient):
-    """
-    A wrapper around the hg Mercurial tool that fetches repository
-    information and generates compatible diffs.
-    """
-    def get_repository_info(self):
-        if not check_install('hg --help'):
-            return None
-
-        data = execute(["hg", "root"], ignore_errors=True)
-        if data.startswith('abort:'):
-            # hg aborted => no mercurial repository here.
-            return None
-
-        # Elsewhere, hg root output give us the repository path.
-
-        # We save data here to use it as a fallback. See below
-        local_data = data.strip()
-
-        svn = execute(["hg", "svn", "info", ], ignore_errors=True)
-
-        if (not svn.startswith('abort:') and
-            not svn.startswith("hg: unknown command")):
-            self.type = 'svn'
-            m = re.search(r'^Repository Root: (.+)$', svn, re.M)
-
-            if not m:
-                return None
-
-            path = m.group(1)
-            m2 = re.match(r'^(svn\+ssh|http|https)://([-a-zA-Z0-9.]*@)(.*)$',
-                          path)
-            if m2:
-                path = '%s://%s' % (m2.group(1), m2.group(3))
-
-            m = re.search(r'^URL: (.+)$', svn, re.M)
-
-            if not m:
-                return None
-
-            base_path = m.group(1)[len(path):] or "/"
-            return RepositoryInfo(path=path,
-                                  base_path=base_path,
-                                  supports_parent_diffs=True)
-
-        self.type = 'hg'
-
-        # We are going to search .hg/hgrc for the default path.
-        file_name = os.path.join(local_data,'.hg', 'hgrc')
-
-        if not os.path.exists(file_name):
-            return RepositoryInfo(path=local_data, base_path='/',
-                                  supports_parent_diffs=True)
-
-        f = open(file_name)
-        data = f.read()
-        f.close()
-
-        m = re.search(r'^default\s+=\s+(.+)$', data, re.M)
-
-        if not m:
-            # Return the local path, if no default value is found.
-            return RepositoryInfo(path=local_data, base_path='/',
-                                  supports_parent_diffs=True)
-
-        path = m.group(1).strip()
-
-        return RepositoryInfo(path=path, base_path='',
-                              supports_parent_diffs=True)
-
-    def diff(self, files):
-        """
-        Performs a diff across all modified files in a Mercurial repository.
-        """
-        # We don't support parent diffs with Mercurial yet, so we always
-        # return None for the parent diff.
-        if self.type == 'svn':
-            parent = execute(['hg', 'parent', '--svn', '--template',
-                              '{node}\n']).strip()
-
-            if options.parent_branch:
-                parent = options.parent_branch
-
-            if options.guess_summary and not options.summary:
-                options.summary = execute(['hg', 'log', '-r.', '--template',
-                                            r'{desc|firstline}\n'])
-
-            if options.guess_description and not options.description:
-                numrevs = len(execute(['hg', 'log', '-r.:%s' % parent,
-                                       '--follow', '--template',
-                                       r'{rev}\n']).strip().split('\n'))
-                options.description = execute(['hg', 'log', '-r.:%s' % parent,
-                                               '--follow', '--template',
-                                               r'{desc}\n\n', '--limit',
-                                               str(numrevs-1)]).strip()
-
-            return (execute(["hg", "diff", "--svn", '-r%s:.' % parent]), None)
-
-        return (execute(["hg", "diff"] + files), None)
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        """
-        Performs a diff between 2 revisions of a Mercurial repository.
-        """
-        if self.type != 'hg':
-            raise NotImplementedError
-
-        r1, r2 = revision_range.split(':')
-        return execute(["hg", "diff", "-r", r1, "-r", r2])
-
-
-class GitClient(SCMClient):
-    """
-    A wrapper around git that fetches repository information and generates
-    compatible diffs. This will attempt to generate a diff suitable for the
-    remote repository, whether git, SVN or Perforce.
-    """
-    def get_repository_info(self):
-        if not check_install('git --help'):
-            return None
-
-        git_dir = execute(["git", "rev-parse", "--git-dir"],
-                          ignore_errors=True).strip()
-
-        if git_dir.startswith("fatal:") or not os.path.isdir(git_dir):
-            return None
-
-        # post-review in directories other than the top level of
-        # of a work-tree would result in broken diffs on the server
-        os.chdir(os.path.dirname(os.path.abspath(git_dir)))
-
-        # We know we have something we can work with. Let's find out
-        # what it is. We'll try SVN first.
-        data = execute(["git", "svn", "info"], ignore_errors=True)
-
-        m = re.search(r'^Repository Root: (.+)$', data, re.M)
-        if m:
-            path = m.group(1)
-            m = re.search(r'^URL: (.+)$', data, re.M)
-
-            if m:
-                base_path = m.group(1)[len(path):] or "/"
-                m = re.search(r'^Repository UUID: (.+)$', data, re.M)
-
-                if m:
-                    uuid = m.group(1)
-                    self.type = "svn"
-
-                    return SvnRepositoryInfo(path=path, base_path=base_path,
-                                             uuid=uuid,
-                                             supports_parent_diffs=True)
-        else:
-            # Versions of git-svn before 1.5.4 don't (appear to) support
-            # 'git svn info'.  If we fail because of an older git install,
-            # here, figure out what version of git is installed and give
-            # the user a hint about what to do next.
-            version = execute(["git", "svn", "--version"], ignore_errors=True)
-            version_parts = re.search('version (\d+)\.(\d+)\.(\d+)',
-                                      version)
-            svn_remote = execute(["git", "config", "--get",
-                                  "svn-remote.svn.url"], ignore_errors=True)
-
-            if (version_parts and
-                not self.is_valid_version((int(version_parts.group(1)),
-                                           int(version_parts.group(2)),
-                                           int(version_parts.group(3))),
-                                          (1, 5, 4)) and
-                svn_remote):
-                die("Your installation of git-svn must be upgraded to " + \
-                    "version 1.5.4 or later")
-
-        # Okay, maybe Perforce.
-        # TODO
-
-        # Nope, it's git then.
-        origin = execute(["git", "remote", "show", "origin"])
-        m = re.search(r'URL: (.+)', origin)
-        if m:
-            url = m.group(1).rstrip('/')
-            if url:
-                self.type = "git"
-                return RepositoryInfo(path=url, base_path='',
-                                      supports_parent_diffs=True)
-
-        return None
-
-    def is_valid_version(self, actual, expected):
-        """
-        Takes two tuples, both in the form:
-            (major_version, minor_version, micro_version)
-        Returns true if the actual version is greater than or equal to
-        the expected version, and false otherwise.
-        """
-        return (actual[0] > expected[0]) or \
-               (actual[0] == expected[0] and actual[1] > expected[1]) or \
-               (actual[0] == expected[0] and actual[1] == expected[1] and \
-                actual[2] >= expected[2])
-
-    def scan_for_server(self, repository_info):
-        # Scan first for dot files, since it's faster and will cover the
-        # user's $HOME/.reviewboardrc
-        server_url = super(GitClient, self).scan_for_server(repository_info)
-
-        if server_url:
-            return server_url
-
-        # TODO: Maybe support a server per remote later? Is that useful?
-        url = execute(["git", "config", "--get", "reviewboard.url"],
-                      ignore_errors=True).strip()
-        if url:
-            return url
-
-        if self.type == "svn":
-            # Try using the reviewboard:url property on the SVN repo, if it
-            # exists.
-            prop = SVNClient().scan_for_server_property(repository_info)
-
-            if prop:
-                return prop
-
-        return None
-
-    def diff(self, args):
-        """
-        Performs a diff across all modified files in the branch, taking into
-        account a parent branch.
-        """
-        parent_branch = options.parent_branch or "master"
-
-        diff_lines = self.make_diff(parent_branch)
-
-        if parent_branch != "master":
-            parent_diff_lines = self.make_diff("master", parent_branch)
-        else:
-            parent_diff_lines = None
-
-        if options.guess_summary and not options.summary:
-            options.summary = execute(["git", "log", "--pretty=format:%s",
-                                       "HEAD^.."], ignore_errors=True).strip()
-
-        if options.guess_description and not options.description:
-            options.description = execute(
-                ["git", "log", "--pretty=format:%s%n%n%b", parent_branch + ".."],
-                ignore_errors=True).strip()
-
-        return (diff_lines, parent_diff_lines)
-
-    def make_diff(self, parent_branch, source_branch=""):
-        """
-        Performs a diff on a particular branch range.
-        """
-        if self.type == "svn":
-            diff_lines = execute(["git", "diff", "--no-color", "--no-prefix",
-                                  "-r", "-u", "%s..%s" % (parent_branch,
-                                                          source_branch)],
-                                 split_lines=True)
-            return self.make_svn_diff(parent_branch, diff_lines)
-        elif self.type == "git":
-            return execute(["git", "diff", "--no-color", "--full-index",
-                            parent_branch])
-
-        return None
-
-    def make_svn_diff(self, parent_branch, diff_lines):
-        """
-        Formats the output of git diff such that it's in a form that
-        svn diff would generate. This is needed so the SVNTool in Review
-        Board can properly parse this diff.
-        """
-        rev = execute(["git", "svn", "find-rev", "master"]).strip()
-
-        if not rev:
-            return None
-
-        diff_data = ""
-        filename = ""
-        revision = ""
-        newfile = False
-
-        for line in diff_lines:
-            if line.startswith("diff "):
-                # Grab the filename and then filter this out.
-                # This will be in the format of:
-                #
-                # diff --git a/path/to/file b/path/to/file
-                info = line.split(" ")
-                diff_data += "Index: %s\n" % info[2]
-                diff_data += "=" * 67
-                diff_data += "\n"
-            elif line.startswith("index "):
-                # Filter this out.
-                pass
-            elif line.strip() == "--- /dev/null":
-                # New file
-                newfile = True
-            elif line.startswith("--- "):
-                newfile = False
-                diff_data += "--- %s\t(revision %s)\n" % \
-                             (line[4:].strip(), rev)
-            elif line.startswith("+++ "):
-                filename = line[4:].strip()
-                if newfile:
-                    diff_data += "--- %s\t(revision 0)\n" % filename
-                    diff_data += "+++ %s\t(revision 0)\n" % filename
-                else:
-                    # We already printed the "--- " line.
-                    diff_data += "+++ %s\t(working copy)\n" % filename
-            else:
-                diff_data += line
-
-        return diff_data
-
-    def diff_between_revisions(self, revision_range, args, repository_info):
-        pass
-
-
-SCMCLIENTS = (
-    SVNClient(),
-    CVSClient(),
-    GitClient(),
-    MercurialClient(),
-    PerforceClient(),
-    ClearCaseClient(),
-)
-
-def debug(s):
-    """
-    Prints debugging information if post-review was run with --debug
-    """
-    if DEBUG or options and options.debug:
-        print ">>> %s" % s
-
-
-def make_tempfile():
-    """
-    Creates a temporary file and returns the path. The path is stored
-    in an array for later cleanup.
-    """
-    fd, tmpfile = mkstemp()
-    os.close(fd)
-    tempfiles.append(tmpfile)
-    return tmpfile
-
-
-def check_install(command):
-    """
-    Try executing an external command and return a boolean indicating whether
-    that command is installed or not.  The 'command' argument should be
-    something that executes quickly, without hitting the network (for
-    instance, 'svn help' or 'git --version').
-    """
-    try:
-        p = subprocess.Popen(command.split(' '),
-                             stdin=subprocess.PIPE,
-                             stdout=subprocess.PIPE,
-                             stderr=subprocess.PIPE)
-        return True
-    except OSError:
-        return False
-
-
-def execute(command, env=None, split_lines=False, ignore_errors=False,
-            extra_ignore_errors=(), translate_newlines=True):
-    """
-    Utility function to execute a command and return the output.
-    """
-    if isinstance(command, list):
-        debug(subprocess.list2cmdline(command))
-    else:
-        debug(command)
-
-    if env:
-        env.update(os.environ)
-    else:
-        env = os.environ.copy()
-
-    env['LC_ALL'] = 'en_US.UTF-8'
-    env['LANGUAGE'] = 'en_US.UTF-8'
-
-    if sys.platform.startswith('win'):
-        p = subprocess.Popen(command,
-                             stdin=subprocess.PIPE,
-                             stdout=subprocess.PIPE,
-                             stderr=subprocess.STDOUT,
-                             shell=False,
-                             universal_newlines=translate_newlines,
-                             env=env)
-    else:
-        p = subprocess.Popen(command,
-                             stdin=subprocess.PIPE,
-                             stdout=subprocess.PIPE,
-                             stderr=subprocess.STDOUT,
-                             shell=False,
-                             close_fds=True,
-                             universal_newlines=translate_newlines,
-                             env=env)
-    if split_lines:
-        data = p.stdout.readlines()
-    else:
-        data = p.stdout.read()
-    rc = p.wait()
-    if rc and not ignore_errors and rc not in extra_ignore_errors:
-        die('Failed to execute command: %s\n%s' % (command, data))
-
-    return data
-
-
-def die(msg=None):
-    """
-    Cleanly exits the program with an error message. Erases all remaining
-    temporary files.
-    """
-    for tmpfile in tempfiles:
-        try:
-            os.unlink(tmpfile)
-        except:
-            pass
-
-    if msg:
-        print msg
-
-    sys.exit(1)
-
-
-def walk_parents(path):
-    """
-    Walks up the tree to the root directory.
-    """
-    while os.path.splitdrive(path)[1] != os.sep:
-        yield path
-        path = os.path.dirname(path)
-
-
-def load_config_file(filename):
-    """
-    Loads data from a config file.
-    """
-    config = {
-        'TREES': {},
-    }
-
-    if os.path.exists(filename):
-        try:
-            execfile(filename, config)
-        except:
-            pass
-
-    return config
-
-
-def tempt_fate(server, tool, changenum, diff_content=None,
-               parent_diff_content=None, submit_as=None, retries=3):
-    """
-    Attempts to create a review request on a Review Board server and upload
-    a diff. On success, the review request path is displayed.
-    """
-    try:
-        save_draft = False
-
-        if options.rid:
-            review_request = server.get_review_request(options.rid)
-        else:
-            review_request = server.new_review_request(changenum, submit_as)
-
-        if options.target_groups:
-            server.set_review_request_field(review_request, 'target_groups',
-                                            options.target_groups)
-            save_draft = True
-
-        if options.target_people:
-            server.set_review_request_field(review_request, 'target_people',
-                                            options.target_people)
-            save_draft = True
-
-        if options.summary:
-            server.set_review_request_field(review_request, 'summary',
-                                            options.summary)
-            save_draft = True
-
-        if options.branch:
-            server.set_review_request_field(review_request, 'branch',
-                                            options.branch)
-            save_draft = True
-
-        if options.bugs_closed:
-            server.set_review_request_field(review_request, 'bugs_closed',
-                                            options.bugs_closed)
-            save_draft = True
-
-        if options.description:
-            server.set_review_request_field(review_request, 'description',
-                                            options.description)
-            save_draft = True
-
-        if options.testing_done:
-            server.set_review_request_field(review_request, 'testing_done',
-                                            options.testing_done)
-            save_draft = True
-
-        if save_draft:
-            server.save_draft(review_request)
-    except APIError, e:
-        rsp, = e.args
-        if rsp['err']['code'] == 103: # Not logged in
-            retries = retries - 1
-
-            # We had an odd issue where the server ended up a couple of
-            # years in the future. Login succeeds but the cookie date was
-            # "odd" so use of the cookie appeared to fail and eventually
-            # ended up at max recursion depth :-(. Check for a maximum
-            # number of retries.
-            if retries >= 0:
-                server.login(force=True)
-                tempt_fate(server, tool, changenum, diff_content,
-                           parent_diff_content, submit_as, retries=retries)
-                return
-
-        if options.rid:
-            die("Error getting review request %s: %s (code %s)" % \
-                (options.rid, rsp['err']['msg'], rsp['err']['code']))
-        else:
-            die("Error creating review request: %s (code %s)" % \
-                (rsp['err']['msg'], rsp['err']['code']))
-
-
-    if not server.info.supports_changesets or not options.change_only:
-        try:
-            server.upload_diff(review_request, diff_content,
-                               parent_diff_content)
-        except APIError, e:
-            rsp, = e.args
-            print "Error uploading diff: %s (%s)" % (rsp['err']['msg'],
-                                                     rsp['err']['code'])
-            debug(rsp)
-            die("Your review request still exists, but the diff is not " +
-                "attached.")
-
-    if options.publish:
-        server.publish(review_request)
-
-    request_url = 'r/' + str(review_request['id'])
-    review_url = urljoin(server.url, request_url)
-
-    if not review_url.startswith('http'):
-        review_url = 'http://%s' % review_url
-
-    print "Review request #%s posted." % (review_request['id'],)
-    print
-    print review_url
-
-    return review_url
-
-
-def parse_options(args):
-    parser = OptionParser(usage="%prog [-pond] [-r review_id] [changenum]",
-                          version="%prog " + VERSION)
-
-    parser.add_option("-p", "--publish",
-                      dest="publish", action="store_true", default=PUBLISH,
-                      help="publish the review request immediately after "
-                           "submitting")
-    parser.add_option("-r", "--review-request-id",
-                      dest="rid", metavar="ID", default=None,
-                      help="existing review request ID to update")
-    parser.add_option("-o", "--open",
-                      dest="open_browser", action="store_true",
-                      default=OPEN_BROWSER,
-                      help="open a web browser to the review request page")
-    parser.add_option("-n", "--output-diff",
-                      dest="output_diff_only", action="store_true",
-                      default=False,
-                      help="outputs a diff to the console and exits. "
-                           "Does not post")
-    parser.add_option("--server",
-                      dest="server", default=REVIEWBOARD_URL,
-                      metavar="SERVER",
-                      help="specify a different Review Board server "
-                           "to use")
-    parser.add_option("--diff-only",
-                      dest="diff_only", action="store_true", default=False,
-                      help="uploads a new diff, but does not update "
-                           "info from changelist")
-    parser.add_option("--target-groups",
-                      dest="target_groups", default=TARGET_GROUPS,
-                      help="names of the groups who will perform "
-                           "the review")
-    parser.add_option("--target-people",
-                      dest="target_people", default=TARGET_PEOPLE,
-                      help="names of the people who will perform "
-                           "the review")
-    parser.add_option("--summary",
-                      dest="summary", default=None,
-                      help="summary of the review ")
-    parser.add_option("--description",
-                      dest="description", default=None,
-                      help="description of the review ")
-    parser.add_option("--description-file",
-                      dest="description_file", default=None,
-                      help="text file containing a description of the review")
-    parser.add_option("--guess-summary",
-                      dest="guess_summary", action="store_true",
-                      default=False,
-                      help="guess summary from the latest commit (git/"
-                           "hgsubversion only)")
-    parser.add_option("--guess-description",
-                      dest="guess_description", action="store_true",
-                      default=False,
-                      help="guess description based on commits on this branch "
-                           "(git/hgsubversion only)")
-    parser.add_option("--testing-done",
-                      dest="testing_done", default=None,
-                      help="details of testing done ")
-    parser.add_option("--testing-done-file",
-                      dest="testing_file", default=None,
-                      help="text file containing details of testing done ")
-    parser.add_option("--branch",
-                      dest="branch", default=None,
-                      help="affected branch ")
-    parser.add_option("--bugs-closed",
-                      dest="bugs_closed", default=None,
-                      help="list of bugs closed ")
-    parser.add_option("--revision-range",
-                      dest="revision_range", default=None,
-                      help="generate the diff for review based on given "
-                           "revision range")
-    parser.add_option("--label",
-                      dest="label", default=None,
-                      help="label (ClearCase Only) ")
-    parser.add_option("--submit-as",
-                      dest="submit_as", default=SUBMIT_AS, metavar="USERNAME",
-                      help="user name to be recorded as the author of the "
-                           "review request, instead of the logged in user")
-    parser.add_option("--username",
-                      dest="username", default=None, metavar="USERNAME",
-                      help="user name to be supplied to the reviewboard server")
-    parser.add_option("--password",
-                      dest="password", default=None, metavar="PASSWORD",
-                      help="password to be supplied to the reviewboard server")
-    parser.add_option("--change-only",
-                      dest="change_only", action="store_true",
-                      default=False,
-                      help="updates info from changelist, but does "
-                           "not upload a new diff (only available if your "
-                           "repository supports changesets)")
-    parser.add_option("--parent",
-                      dest="parent_branch", default=None,
-                      metavar="PARENT_BRANCH",
-                      help="the parent branch this diff should be against "
-                           "(only available if your repository supports "
-                           "parent diffs)")
-    parser.add_option("--p4-client",
-                      dest="p4_client", default=None,
-                      help="the Perforce client name that the review is in")
-    parser.add_option("--p4-port",
-                      dest="p4_port", default=None,
-                      help="the Perforce servers IP address that the review is on")
-    parser.add_option("--repository-url",
-                      dest="repository_url", default=None,
-                      help="the url for a repository for creating a diff "
-                           "outside of a working copy (currently only supported "
-                           "by Subversion).  Requires --revision-range")
-    parser.add_option("-d", "--debug",
-                      action="store_true", dest="debug", default=DEBUG,
-                      help="display debug output")
-
-    (globals()["options"], args) = parser.parse_args(args)
-
-    if options.description and options.description_file:
-        sys.stderr.write("The --description and --description-file options "
-                         "are mutually exclusive.\n")
-        sys.exit(1)
-
-    if options.description_file:
-        if os.path.exists(options.description_file):
-            fp = open(options.description_file, "r")
-            options.description = fp.read()
-            fp.close()
-        else:
-            sys.stderr.write("The description file %s does not exist.\n" %
-                             options.description_file)
-            sys.exit(1)
-
-    if options.testing_done and options.testing_file:
-        sys.stderr.write("The --testing-done and --testing-done-file options "
-                         "are mutually exclusive.\n")
-        sys.exit(1)
-
-    if options.testing_file:
-        if os.path.exists(options.testing_file):
-            fp = open(options.testing_file, "r")
-            options.testing_done = fp.read()
-            fp.close()
-        else:
-            sys.stderr.write("The testing file %s does not exist.\n" %
-                             options.testing_file)
-            sys.exit(1)
-
-    if options.repository_url and not options.revision_range:
-        sys.stderr.write("The --repository-url option requires the "
-                         "--revision-range option.\n")
-        sys.exit(1)
-
-    return args
-
-def determine_client():
-
-    repository_info = None
-    tool = None
-
-    # Try to find the SCM Client we're going to be working with.
-    for tool in SCMCLIENTS:
-        repository_info = tool.get_repository_info()
-
-        if repository_info:
-            break
-
-    if not repository_info:
-        if options.repository_url:
-            print "No supported repository could be access at the supplied url."
-        else:
-            print "The current directory does not contain a checkout from a"
-            print "supported source code repository."
-        sys.exit(1)
-
-    # Verify that options specific to an SCM Client have not been mis-used.
-    if options.change_only and not repository_info.supports_changesets:
-        sys.stderr.write("The --change-only option is not valid for the "
-                         "current SCM client.\n")
-        sys.exit(1)
-
-    if options.parent_branch and not repository_info.supports_parent_diffs:
-        sys.stderr.write("The --parent option is not valid for the "
-                         "current SCM client.\n")
-        sys.exit(1)
-
-    if ((options.p4_client or options.p4_port) and \
-        not isinstance(tool, PerforceClient)):
-        sys.stderr.write("The --p4-client and --p4-port options are not valid "
-                         "for the current SCM client.\n")
-        sys.exit(1)
-
-    return (repository_info, tool)
-
-def main():
-    if 'USERPROFILE' in os.environ:
-        homepath = os.path.join(os.environ["USERPROFILE"], "Local Settings",
-                                "Application Data")
-    elif 'HOME' in os.environ:
-        homepath = os.environ["HOME"]
-    else:
-        homepath = ''
-
-    # Load the config and cookie files
-    globals()['user_config'] = \
-        load_config_file(os.path.join(homepath, ".reviewboardrc"))
-    cookie_file = os.path.join(homepath, ".post-review-cookies.txt")
-
-    args = parse_options(sys.argv[1:])
-
-    repository_info, tool = determine_client()
-
-    # Try to find a valid Review Board server to use.
-    if options.server:
-        server_url = options.server
-    else:
-        server_url = tool.scan_for_server(repository_info)
-
-    if not server_url:
-        print "Unable to find a Review Board server for this source code tree."
-        sys.exit(1)
-
-    server = ReviewBoardServer(server_url, repository_info, cookie_file)
-
-    if repository_info.supports_changesets:
-        changenum = tool.get_changenum(args)
-    else:
-        changenum = None
-
-    if options.revision_range:
-        diff = tool.diff_between_revisions(options.revision_range, args,
-                                           repository_info)
-        parent_diff = None
-    elif options.label and isinstance(tool, ClearCaseClient):
-        diff, parent_diff = tool.diff_label(options.label)
-    else:
-        diff, parent_diff = tool.diff(args)
-
-    if options.output_diff_only:
-        print diff
-        sys.exit(0)
-
-    # Let's begin.
-    server.login()
-
-    review_url = tempt_fate(server, tool, changenum, diff_content=diff,
-                            parent_diff_content=parent_diff,
-                            submit_as=options.submit_as)
-
-    # Load the review up in the browser if requested to:
-    if options.open_browser:
-        try:
-            import webbrowser
-            if 'open_new_tab' in dir(webbrowser):
-                # open_new_tab is only in python 2.5+
-                webbrowser.open_new_tab(review_url)
-            elif 'open_new' in dir(webbrowser):
-                webbrowser.open_new(review_url)
-            else:
-                os.system( 'start %s' % review_url )
-        except:
-            print 'Error opening review URL: %s' % review_url
-
-
-if __name__ == "__main__":
-    main()
diff --git a/test/review b/test/review
deleted file mode 100755
index e1ccb9c..0000000
--- a/test/review
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/sh
-
-if [ -z $1 ] || [ "$1" = "-h" ] || [ "$1" = "--help" ] || [ "$1" = "-help" ] || [ "$1" = "-?" ]; then
-  echo "Usage: `basename $0` [rev] [args]\n"
-  echo "  [rev]  : either the revision number without leading 'r' (post-commit),"
-  echo "           or '-loc' to create a review from current local changes (pre-commit)\n"
-  echo "  [args] : optional arguments:"
-  echo "     -r ID     existing review request ID to update\n"
-  exit 1
-fi
-
-POSTREVIEW=`dirname $0`/postreview.py
-
-if [ "$1" = "-loc" ]; then
-  echo "creating review request from local changes..."
-  REVARG=""
-  LOG=""
-  SUMMARY="local changes"
-  REPO=""
-else
-  REV=$1
-  PREV=`expr $REV - 1`
-  if [ $? -ne 0 ]; then
-    echo "argument revision not a number: $REV"
-    exit 1
-  fi
-
-  echo "creating review request for changeset $REV..."
-
-  LOG="`svn log http://lampsvn.epfl.ch/svn-repos/scala -c $REV`"
-  if [ $? -ne 0 ]; then
-    echo "could not get svn log for revision $REV"
-    exit 1
-  fi
-
-  REVARG="--revision-range=$PREV:$REV"
-  SUMMARY="r$REV"
-  REPO="--repository-url=http://lampsvn.epfl.ch/svn-repos/scala"
-fi
-
-
-shift # remove parameter $1 (revision)
-
-python $POSTREVIEW --server="https://chara2.epfl.ch" $REVARG --summary="$SUMMARY" --description="$LOG" $REPO -o $@
diff --git a/test/scaladoc/resources/SI-4014_0.scala b/test/scaladoc/resources/SI-4014_0.scala
new file mode 100644
index 0000000..c398fcc
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_0.scala
@@ -0,0 +1,4 @@
+/** A template without authors.
+  *
+  */
+trait Foo
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_1.scala b/test/scaladoc/resources/SI-4014_1.scala
new file mode 100644
index 0000000..34386b5
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_1.scala
@@ -0,0 +1,5 @@
+/** A template with one author.
+  *
+  *  @author The Only Author
+  */
+trait Foo
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_2.scala b/test/scaladoc/resources/SI-4014_2.scala
new file mode 100644
index 0000000..514f7a1
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_2.scala
@@ -0,0 +1,6 @@
+/** A template with more than one author.
+  *
+  *  @author The First Author
+  *  @author The Second Author
+  */
+trait Foo
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI_4715.scala b/test/scaladoc/resources/SI_4715.scala
index 29daf43..de28695 100644
--- a/test/scaladoc/resources/SI_4715.scala
+++ b/test/scaladoc/resources/SI_4715.scala
@@ -1,7 +1,7 @@
 class SI_4715 {
   type :+:[X,Y] = Map[X,Y]
-  val withType: Int :+: Double = error("")
+  val withType: Int :+: Double = sys.error("")
 
   trait :-:[X,Y]
-  val withTrait: Int :-: Double = error("")
+  val withTrait: Int :-: Double = sys.error("")
 }
diff --git a/test/scaladoc/resources/Trac4325.scala b/test/scaladoc/resources/Trac4325.scala
index ffb968d..ccc2f19 100644
--- a/test/scaladoc/resources/Trac4325.scala
+++ b/test/scaladoc/resources/Trac4325.scala
@@ -1,5 +1,5 @@
-case class WithSynthetic
+case class WithSynthetic()
 
-case class WithObject
+case class WithObject()
 object WithObject
 
diff --git a/test/scaladoc/resources/doc-root/Any.scala b/test/scaladoc/resources/doc-root/Any.scala
index 031b7d9..fd4c287 100644
--- a/test/scaladoc/resources/doc-root/Any.scala
+++ b/test/scaladoc/resources/doc-root/Any.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/scaladoc/resources/doc-root/AnyRef.scala b/test/scaladoc/resources/doc-root/AnyRef.scala
index 7d8b9f9..362fbcf 100644
--- a/test/scaladoc/resources/doc-root/AnyRef.scala
+++ b/test/scaladoc/resources/doc-root/AnyRef.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/scaladoc/resources/doc-root/Nothing.scala b/test/scaladoc/resources/doc-root/Nothing.scala
index eed6066..57f6fac 100644
--- a/test/scaladoc/resources/doc-root/Nothing.scala
+++ b/test/scaladoc/resources/doc-root/Nothing.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/scaladoc/resources/doc-root/Null.scala b/test/scaladoc/resources/doc-root/Null.scala
index 7455e78..931beb2 100644
--- a/test/scaladoc/resources/doc-root/Null.scala
+++ b/test/scaladoc/resources/doc-root/Null.scala
@@ -1,6 +1,6 @@
 /*                     __                                               *\
 **     ________ ___   / /  ___     Scala API                            **
-**    / __/ __// _ | / /  / _ |    (c) 2002-2010, LAMP/EPFL             **
+**    / __/ __// _ | / /  / _ |    (c) 2002-2013, LAMP/EPFL             **
 **  __\ \/ /__/ __ |/ /__/ __ |    http://scala-lang.org/               **
 ** /____/\___/_/ |_/____/_/ | |                                         **
 **                          |/                                          **
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
index d6c0332..1d17e9a 100644
--- a/test/scaladoc/resources/implicits-base-res.scala
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -11,21 +11,21 @@ trait MyNumeric[R]
  *  - tests the complete type inference
  *  - the following inherited methods should appear:
  * {{{
- * def convToGtColonDoubleA(x: Double)    // pimpA3: with a constraint that T <: Double
- * def convToIntA(x: Int)                 // pimpA2: with a constraint that T = Int
- * def convToManifestA(x: T)              // pimpA7: with 2 constraints: T: Manifest and T <: Double
- * def convToMyNumericA(x: T)             // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
- * def convToNumericA(x: T)               // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
- * def convToPimpedA(x: Bar[Foo[T]])      // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: S)                // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
- * def convToPimpedA(x: T)                // pimpA0: with no constraints, SHADOWED
- * def convToTraversableOps(x: T)         // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToGtColonDoubleA(x: Double)    // enrichA3: with a constraint that T <: Double
+ * def convToIntA(x: Int)                 // enrichA2: with a constraint that T = Int
+ * def convToManifestA(x: T)              // enrichA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToMyNumericA(x: T)             // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ * def convToNumericA(x: T)               // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope
+ * def convToEnrichedA(x: Bar[Foo[T]])    // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: S)              // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
+ * def convToEnrichedA(x: T)              // enrichA0: with no constraints, SHADOWED
+ * def convToTraversableOps(x: T)         // enrichA7: with 2 constraints: T: Manifest and T <: Double
  *                                        // should not be abstract!
  * }}}
  */
 class A[T] {
-  /** This should prevent the implicitly inherited `def convToPimpedA: T` from `pimpA0` from showing up */
-  def convToPimpedA(x: T): T = sys.error("Let's check it out!")
+  /** This should prevent the implicitly inherited `def convToEnrichedA: T` from `enrichA0` from showing up */
+  def convToEnrichedA(x: T): T = sys.error("Let's check it out!")
   /** This should check implicit member elimination in the case of subtyping */
   def foo(a: T, b: AnyRef): T
 }
@@ -33,15 +33,15 @@ class A[T] {
 object A {
   import language.implicitConversions // according to SIP18
 
-  implicit def pimpA0[V](a: A[V]) = new PimpedA(a)
-  implicit def pimpA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
-  implicit def pimpA2(a: A[Int]) = new IntA(a)
-  implicit def pimpA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
-  implicit def pimpA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): PimpedA[S] = sys.error("not implemented")
-  implicit def pimpA5[Z](a: A[Z]): PimpedA[Bar[Foo[Z]]] = sys.error("not implemented")
-  implicit def pimpA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
+  implicit def enrichA0[V](a: A[V]) = new EnrichedA(a)
+  implicit def enrichA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
+  implicit def enrichA2(a: A[Int]) = new IntA(a)
+  implicit def enrichA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
+  implicit def enrichA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): EnrichedA[S] = sys.error("not implemented")
+  implicit def enrichA5[Z](a: A[Z]): EnrichedA[Bar[Foo[Z]]] = sys.error("not implemented")
+  implicit def enrichA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
   // TODO: Add H <: Double and see why it crashes for C and D -- context bounds, need to check!
-  implicit def pimpA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") }
+  implicit def enrichA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") }
 }
 
 
@@ -49,14 +49,14 @@ object A {
  *  - tests the existential type solving
  *  - the following inherited methods should appear:
  * {{{
- * def convToGtColonDoubleA(x: Double)    // pimpA3: no constraints
- * def convToManifestA(x: Double)         // pimpA7: no constraints
- * def convToMyNumericA(x: Double)        // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
- * def convToNumericA(x: Double)          // pimpA1: no constraintsd
- * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: Double)           // pimpA0: no constraints, SHADOWED
- * def convToTraversableOps(x: Double)    // pimpA7: no constraints
- *                                        // should not be abstract!
+ * def convToGtColonDoubleA(x: Double)      // enrichA3: no constraints
+ * def convToManifestA(x: Double)           // enrichA7: no constraints
+ * def convToMyNumericA(x: Double)          // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ * def convToNumericA(x: Double)            // enrichA1: no constraintsd
+ * def convToEnrichedA(x: Bar[Foo[Double]]) // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: Double)           // enrichA0: no constraints, SHADOWED
+ * def convToTraversableOps(x: Double)      // enrichA7: no constraints
+ *                                          // should not be abstract!
  * }}}
  */
 class B extends A[Double]
@@ -67,11 +67,11 @@ object B extends A
  *  - tests asSeenFrom
  *  - the following inherited methods should appear:
  * {{{
- * def convToIntA(x: Int)                 // pimpA2: no constraints
- * def convToMyNumericA(x: Int)           // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
- * def convToNumericA(x: Int)             // pimpA1: no constraints
- * def convToPimpedA(x: Int)              // pimpA0: no constraints, SHADOWED
- * def convToPimpedA(x: Bar[Foo[Int]])    // pimpA5: no constraints, SHADOWED
+ * def convToIntA(x: Int)                 // enrichA2: no constraints
+ * def convToMyNumericA(x: Int)           // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ * def convToNumericA(x: Int)             // enrichA1: no constraints
+ * def convToEnrichedA(x: Int)            // enrichA0: no constraints, SHADOWED
+ * def convToEnrichedA(x: Bar[Foo[Int]])  // enrichA5: no constraints, SHADOWED
  * }}}
  */
 class C extends A[Int]
@@ -82,10 +82,10 @@ object C extends A
  *  - tests implicit elimination
  *  - the following inherited methods should appear:
  * {{{
- * def convToMyNumericA(x: String)        // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
- * def convToNumericA(x: String)          // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
- * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: String)           // pimpA0: no constraints, SHADOWED
+ * def convToMyNumericA(x: String)        // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ * def convToNumericA(x: String)          // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ * def convToEnrichedA(x: Bar[Foo[String]]) // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: String)           // enrichA0: no constraints, SHADOWED
  * }}}
  */
 class D extends A[String]
@@ -93,12 +93,12 @@ class D extends A[String]
 object D extends A
 
 
-/** PimpedA class <br/>
+/** EnrichedA class <br/>
  *  - tests simple inheritance and asSeenFrom
  *  - A, B and C should be implicitly converted to this */
-class PimpedA[V](a: A[V]) {
-  /** The convToPimpedA: V documentation... */
-  def convToPimpedA(x: V): V = sys.error("Not implemented")
+class EnrichedA[V](a: A[V]) {
+  /** The convToEnrichedA: V documentation... */
+  def convToEnrichedA(x: V): V = sys.error("Not implemented")
 }
 
 /** NumericA class <br/>
diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala
index bd69665..ecac9c6 100644
--- a/test/scaladoc/resources/links.scala
+++ b/test/scaladoc/resources/links.scala
@@ -25,6 +25,7 @@ package scala.test.scaladoc.links {
   object Target {
     type T = Int => Int
     type S = Int
+    type ::[X] = scala.collection.immutable.::[X]
     class C
     def foo(i: Int) = 2
     def foo(z: String) = 3
@@ -46,6 +47,7 @@ package scala.test.scaladoc.links {
    *  - [[[[Target!.foo[A[_[_]]]*                          trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
    *  - [[Target$.T                                        object Target -> type T]]
    *  - [[Target$.S                                        object Target -> type S]]
+   *  - [[Target$.::                                       object Target -> type ::]]
    *  - [[Target$.foo(z:Str*                               object Target -> def foo]]
    *  - [[Target$.bar                                      object Target -> def bar]]
    *  - [[[[Target$.foo[A[_[_]]]*                          trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
diff --git a/test/scaladoc/run/SI-191-deprecated.check b/test/scaladoc/run/SI-191-deprecated.check
deleted file mode 100755
index 3925a0d..0000000
--- a/test/scaladoc/run/SI-191-deprecated.check
+++ /dev/null
@@ -1 +0,0 @@
-Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-191-deprecated.scala b/test/scaladoc/run/SI-191-deprecated.scala
deleted file mode 100755
index 4ed24ff..0000000
--- a/test/scaladoc/run/SI-191-deprecated.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.base._
-import scala.tools.nsc.doc.base.comment._
-import scala.tools.partest.ScaladocModelTest
-import java.net.{URI, URL}
-import java.io.File
-
-object Test extends ScaladocModelTest {
-
-  override def code =
-    """
-        /** See:
-         *  - [[scala.collection.Map]] Simple linking
-         *  - [[scala.collection.immutable.::]] Linking with symbolic name
-         *  - [[scala.Int]].toLong Linking to a class
-         *  - [[scala.Predef]] Linking to an object
-         *  - [[scala.Int.toLong]] Linking to a method
-         *  - [[scala]] Linking to a package
-         *  - [[scala.AbstractMethodError]] Linking to a member in the package object
-         *  - [[scala.Predef.String]] Linking to a member in an object
-         *
-         *  Don't look at:
-         *  - [[scala.NoLink]] Not linking :)
-         */
-        object Test {
-          def foo(param: Any) {}
-          def barr(l: scala.collection.immutable.List[Any]) {}
-          def bar(l: List[String]) {}   // TODO: Should be able to link to type aliases
-          def baz(d: java.util.Date) {} // Should not be resolved
-        }
-    """
-
-  def scalaURL = "http://bog.us"
-
-  override def scaladocSettings =  "-no-link-warnings -external-urls scala=" + scalaURL
-
-  def testModel(rootPackage: Package) {
-    import access._
-    val test = rootPackage._object("Test")
-
-    def check(memberDef: Def, expected: Int) {
-      val externals = memberDef.valueParams(0)(0).resultType.refEntity collect {
-        case (_, (LinkToExternal(name, url), _)) => assert(url.contains(scalaURL)); name
-      }
-      assert(externals.size == expected)
-    }
-
-    check(test._method("foo"), 1)
-    check(test._method("bar"), 0)
-    check(test._method("barr"), 2)
-    check(test._method("baz"), 0)
-
-    val expectedUrls = collection.mutable.Set[String](
-                         "scala.collection.Map",
-                         "scala.collection.immutable.::",
-                         "scala.Int",
-                         "scala.Predef$",
-                         "scala.Int at toLong:Long",
-                         "scala.package",
-                         "scala.package at AbstractMethodError=AbstractMethodError",
-                         "scala.Predef$@String=String"
-                       ).map(scalaURL + "/index.html#" + _)
-
-    def isExpectedExternalLink(l: EntityLink) = l.link match {
-      case LinkToExternal(name, url) => assert(expectedUrls contains url, url); true
-      case _ => false
-    }
-
-    assert(countLinks(test.comment.get, isExpectedExternalLink) == 8,
-           countLinks(test.comment.get, isExpectedExternalLink) + " == 8")
-  }
-}
diff --git a/test/scaladoc/run/SI-4676.scala b/test/scaladoc/run/SI-4676.scala
index b83a59a..99b3c55 100644
--- a/test/scaladoc/run/SI-4676.scala
+++ b/test/scaladoc/run/SI-4676.scala
@@ -21,6 +21,7 @@ object Test extends ScaladocModelTest {
 
     // check correct expansion of the use case signature
     val x = rootPackage._class("SI_4676")._method("x")
-    assert(x.valueParams(0)(0).resultType.name == "(String, String)", "parameter ss of method x has type (String, String")
+    val resultType = x.valueParams(0)(0).resultType.name
+    assert(resultType == "SS", s"parameter ss of method x has type $resultType, expected SS!")
   }
 }
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812.check
index 619c561..3be8a30 100644
--- a/test/scaladoc/run/SI-6812.check
+++ b/test/scaladoc/run/SI-6812.check
@@ -1 +1,2 @@
+warning: -Ymacro-no-expand is deprecated: Use -Ymacro-expand:none
 Done.
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
index fbd9588..90005b8 100644
--- a/test/scaladoc/run/SI-6812.scala
+++ b/test/scaladoc/run/SI-6812.scala
@@ -5,12 +5,12 @@ import language._
 object Test extends ScaladocModelTest {
 
   override def code = """
-    import scala.reflect.macros.Context
+    import scala.reflect.macros.blackbox.Context
     import language.experimental.macros
 
     object Macros {
       def impl(c: Context) = c.literalUnit
-      def foo = macro impl
+      def foo: Unit = macro impl
     }
 
     class C {
@@ -19,6 +19,6 @@ object Test extends ScaladocModelTest {
   """
 
   def scaladocSettings = ""
-  override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
+  override def extraSettings = super.extraSettings + " -Ymacro-no-expand -deprecation"
   def testModel(root: Package) = ()
 }
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812b.check
similarity index 100%
copy from test/scaladoc/run/SI-6812.check
copy to test/scaladoc/run/SI-6812b.check
diff --git a/test/scaladoc/run/SI-6812b.scala b/test/scaladoc/run/SI-6812b.scala
new file mode 100644
index 0000000..74dbd13
--- /dev/null
+++ b/test/scaladoc/run/SI-6812b.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+  override def code = """
+    import scala.reflect.macros.blackbox.Context
+    import language.experimental.macros
+
+    object Macros {
+      def impl(c: Context) = c.literalUnit
+      def foo: Unit = macro impl
+    }
+
+    class C {
+      def bar = Macros.foo
+    }
+  """
+
+  def scaladocSettings = ""
+  override def extraSettings = super.extraSettings + " -Ymacro-expand:none"
+  def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/run/diagrams-base.scala b/test/scaladoc/run/diagrams-base.scala
index b7aeed5..1e83a78 100644
--- a/test/scaladoc/run/diagrams-base.scala
+++ b/test/scaladoc/run/diagrams-base.scala
@@ -46,7 +46,7 @@ object Test extends ScaladocModelTest {
 
     val (incoming, outgoing) = diag.edges.partition(!_._1.isThisNode)
     assert(incoming.length == 5)
-    assert(outgoing.head._2.length == 4)
+    assert(outgoing.head._2.length == 4, s"${outgoing.head._2} has length ${outgoing.head._2.length}, expecting 4")
 
     val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
     assert(outgoingSuperclass.length == 3)
diff --git a/test/scaladoc/run/diagrams-filtering.scala b/test/scaladoc/run/diagrams-filtering.scala
index 54e3e9a..12b5f4c 100644
--- a/test/scaladoc/run/diagrams-filtering.scala
+++ b/test/scaladoc/run/diagrams-filtering.scala
@@ -57,7 +57,7 @@ object Test extends ScaladocModelTest {
     // Assert we have just 3 nodes and 2 edges
     val A = base._trait("A")
     val ADiag = A.inheritanceDiagram.get
-    assert(ADiag.nodes.length == 3)
+    assert(ADiag.nodes.length == 3, s"${ADiag.nodes} has length ${ADiag.nodes.length}, expected 3")
     assert(ADiag.edges.map(_._2.length).sum == 2)
 
     // trait C
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
index 3d57306..8f8652c 100644
--- a/test/scaladoc/run/implicits-base.scala
+++ b/test/scaladoc/run/implicits-base.scala
@@ -25,54 +25,54 @@ object Test extends ScaladocModelTest {
 
     val A = base._class("A")
 
-    // def convToPimpedA(x: T)                // pimpA0: with no constraints, SHADOWED
-    conv = A._conversion(A.qualifiedName + ".pimpA0")
+    // def convToEnrichedA(x: T)                // enrichA0: with no constraints, SHADOWED
+    conv = A._conversion(A.qualifiedName + ".enrichA0")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "T")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "T")
 
-    // def convToNumericA: T               // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
-    conv = A._conversion(A.qualifiedName + ".pimpA1")
+    // def convToNumericA: T               // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope
+    conv = A._conversion(A.qualifiedName + ".enrichA1")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToNumericA").resultType.name == "T")
 
-    // def convToIntA: Int                 // pimpA2: with a constraint that T = Int
-    conv = A._conversion(A.qualifiedName + ".pimpA2")
+    // def convToIntA: Int                 // enrichA2: with a constraint that T = Int
+    conv = A._conversion(A.qualifiedName + ".enrichA2")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToIntA").resultType.name == "Int")
 
-    // def convToGtColonDoubleA: Double    // pimpA3: with a constraint that T <: Double
-    conv = A._conversion(A.qualifiedName + ".pimpA3")
+    // def convToGtColonDoubleA: Double    // enrichA3: with a constraint that T <: Double
+    conv = A._conversion(A.qualifiedName + ".enrichA3")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
 
-    // def convToPimpedA: S                // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
-    conv = A._conversion(A.qualifiedName + ".pimpA4")
+    // def convToEnrichedA: S                // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+    conv = A._conversion(A.qualifiedName + ".enrichA4")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 3)
-    assert(conv._member("convToPimpedA").resultType.name == "S")
+    assert(conv._member("convToEnrichedA").resultType.name == "S")
 
-    // def convToPimpedA: Bar[Foo[T]]      // pimpA5: no constraints
-    conv = A._conversion(A.qualifiedName + ".pimpA5")
+    // def convToEnrichedA: Bar[Foo[T]]      // enrichA5: no constraints
+    conv = A._conversion(A.qualifiedName + ".enrichA5")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[T]]")
 
-    // def convToMyNumericA: T             // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
-    conv = A._conversion(A.qualifiedName + ".pimpA6")
+    // def convToMyNumericA: T             // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+    conv = A._conversion(A.qualifiedName + ".enrichA6")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToMyNumericA").resultType.name == "T")
 
-    // def convToManifestA: T              // pimpA7: with 2 constraints: T: Manifest and T <: Double
-    // def convToTraversableOps: T         // pimpA7: with 2 constraints: T: Manifest and T <: Double
+    // def convToManifestA: T              // enrichA7: with 2 constraints: T: Manifest and T <: Double
+    // def convToTraversableOps: T         // enrichA7: with 2 constraints: T: Manifest and T <: Double
                                            // should not be abstract!
-    conv = A._conversion(A.qualifiedName + ".pimpA7")
+    conv = A._conversion(A.qualifiedName + ".enrichA7")
     assert(conv.members.length == 2)
     assert(conv.constraints.length == 2)
     assert(conv._member("convToManifestA").resultType.name == "T")
@@ -84,45 +84,45 @@ object Test extends ScaladocModelTest {
     val B = base._class("B")
 
     // these conversions should not affect B
-    assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
-    assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+    assert(B._conversions(A.qualifiedName + ".enrichA2").isEmpty)
+    assert(B._conversions(A.qualifiedName + ".enrichA4").isEmpty)
 
-    // def convToPimpedA(x: Double)           // pimpA0: no constraints, SHADOWED
-    conv = B._conversion(A.qualifiedName + ".pimpA0")
+    // def convToEnrichedA(x: Double)           // enrichA0: no constraints, SHADOWED
+    conv = B._conversion(A.qualifiedName + ".enrichA0")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Double")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Double")
 
-    // def convToNumericA: Double          // pimpA1: no constraintsd
-    conv = B._conversion(A.qualifiedName + ".pimpA1")
+    // def convToNumericA: Double          // enrichA1: no constraintsd
+    conv = B._conversion(A.qualifiedName + ".enrichA1")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
     assert(conv._member("convToNumericA").resultType.name == "Double")
 
-    // def convToGtColonDoubleA: Double    // pimpA3: no constraints
-    conv = B._conversion(A.qualifiedName + ".pimpA3")
+    // def convToGtColonDoubleA: Double    // enrichA3: no constraints
+    conv = B._conversion(A.qualifiedName + ".enrichA3")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
     assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
 
-    // def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints
-    conv = B._conversion(A.qualifiedName + ".pimpA5")
+    // def convToEnrichedA: Bar[Foo[Double]] // enrichA5: no constraints
+    conv = B._conversion(A.qualifiedName + ".enrichA5")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Double]]")
 
-    // def convToMyNumericA: Double        // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
-    conv = B._conversion(A.qualifiedName + ".pimpA6")
+    // def convToMyNumericA: Double        // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+    conv = B._conversion(A.qualifiedName + ".enrichA6")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToMyNumericA").resultType.name == "Double")
 
-    // def convToManifestA: Double         // pimpA7: no constraints
-    // def convToTraversableOps: Double    // pimpA7: no constraints
+    // def convToManifestA: Double         // enrichA7: no constraints
+    // def convToTraversableOps: Double    // enrichA7: no constraints
     //                                     // should not be abstract!
-    conv = B._conversion(A.qualifiedName + ".pimpA7")
+    conv = B._conversion(A.qualifiedName + ".enrichA7")
     assert(conv.members.length == 2)
     assert(conv.constraints.length == 0)
     assert(conv._member("convToManifestA").resultType.name == "Double")
@@ -134,38 +134,38 @@ object Test extends ScaladocModelTest {
     val C = base._class("C")
 
     // these conversions should not affect C
-    assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
-    assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
-    assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+    assert(C._conversions(A.qualifiedName + ".enrichA3").isEmpty)
+    assert(C._conversions(A.qualifiedName + ".enrichA4").isEmpty)
+    assert(C._conversions(A.qualifiedName + ".enrichA7").isEmpty)
 
-    // def convToPimpedA(x: Int)           // pimpA0: no constraints, SHADOWED
-    conv = C._conversion(A.qualifiedName + ".pimpA0")
+    // def convToEnrichedA(x: Int)           // enrichA0: no constraints, SHADOWED
+    conv = C._conversion(A.qualifiedName + ".enrichA0")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Int")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Int")
 
-    // def convToNumericA: Int             // pimpA1: no constraints
-    conv = C._conversion(A.qualifiedName + ".pimpA1")
+    // def convToNumericA: Int             // enrichA1: no constraints
+    conv = C._conversion(A.qualifiedName + ".enrichA1")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
     assert(conv._member("convToNumericA").resultType.name == "Int")
 
-    // def convToIntA: Int                 // pimpA2: no constraints
-    conv = C._conversion(A.qualifiedName + ".pimpA2")
+    // def convToIntA: Int                 // enrichA2: no constraints
+    conv = C._conversion(A.qualifiedName + ".enrichA2")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
     assert(conv._member("convToIntA").resultType.name == "Int")
 
-    // def convToPimpedA: Bar[Foo[Int]]    // pimpA5: no constraints
-    conv = C._conversion(A.qualifiedName + ".pimpA5")
+    // def convToEnrichedA: Bar[Foo[Int]]    // enrichA5: no constraints
+    conv = C._conversion(A.qualifiedName + ".enrichA5")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Int]]")
 
-    // def convToMyNumericA: Int           // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
-    conv = C._conversion(A.qualifiedName + ".pimpA6")
+    // def convToMyNumericA: Int           // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+    conv = C._conversion(A.qualifiedName + ".enrichA6")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToMyNumericA").resultType.name == "Int")
@@ -175,33 +175,33 @@ object Test extends ScaladocModelTest {
     val D = base._class("D")
 
     // these conversions should not affect D
-    assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
-    assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
-    assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
-    assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+    assert(D._conversions(A.qualifiedName + ".enrichA2").isEmpty)
+    assert(D._conversions(A.qualifiedName + ".enrichA3").isEmpty)
+    assert(D._conversions(A.qualifiedName + ".enrichA4").isEmpty)
+    assert(D._conversions(A.qualifiedName + ".enrichA7").isEmpty)
 
-    // def convToPimpedA(x: String)        // pimpA0: no constraints, SHADOWED
-    conv = D._conversion(A.qualifiedName + ".pimpA0")
+    // def convToEnrichedA(x: String)        // enrichA0: no constraints, SHADOWED
+    conv = D._conversion(A.qualifiedName + ".enrichA0")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "String")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "String")
 
-    // def convToNumericA: String          // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
-    conv = D._conversion(A.qualifiedName + ".pimpA1")
+    // def convToNumericA: String          // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+    conv = D._conversion(A.qualifiedName + ".enrichA1")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToNumericA").resultType.name == "String")
 
-    // def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints
-    conv = D._conversion(A.qualifiedName + ".pimpA5")
+    // def convToEnrichedA: Bar[Foo[String]] // enrichA5: no constraints
+    conv = D._conversion(A.qualifiedName + ".enrichA5")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 0)
-    assert(isShadowed(conv._member("convToPimpedA")))
-    assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
+    assert(isShadowed(conv._member("convToEnrichedA")))
+    assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[String]]")
 
-    // def convToMyNumericA: String        // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
-    conv = D._conversion(A.qualifiedName + ".pimpA6")
+    // def convToMyNumericA: String        // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+    conv = D._conversion(A.qualifiedName + ".enrichA6")
     assert(conv.members.length == 1)
     assert(conv.constraints.length == 1)
     assert(conv._member("convToMyNumericA").resultType.name == "String")
diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala
index fde24ed..64441c2 100644
--- a/test/scaladoc/run/links.scala
+++ b/test/scaladoc/run/links.scala
@@ -26,7 +26,7 @@ object Test extends ScaladocModelTest {
 
     val memberLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToMember[_, _]])
     val templateLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToTpl[_]])
-    assert(memberLinks == 17,  memberLinks +   " == 17 (the member links in object TEST)")
+    assert(memberLinks == 18,  memberLinks +   " == 18 (the member links in object TEST)")
     assert(templateLinks == 6, templateLinks + " ==  6 (the template links in object TEST)")
   }
 }
diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check
new file mode 100644
index 0000000..bfaa4ad
--- /dev/null
+++ b/test/scaladoc/run/t5527.check
@@ -0,0 +1,132 @@
+newSource1.scala:47: warning: discarding unmoored doc comment
+        /** Document this crucial constant for posterity.
+        ^
+newSource1.scala:64: warning: discarding unmoored doc comment
+        /*************************\
+        ^
+newSource1.scala:73: warning: discarding unmoored doc comment
+        val i = 10 */** Important!
+                   ^
+[[syntax trees at end of                    parser]] // newSource1.scala
+package <empty> {
+  object UselessComments extends scala.AnyRef {
+    def <init>() = {
+      super.<init>();
+      ()
+    };
+    var z = 0;
+    def test1 = {
+      object Maybe extends scala.AnyRef {
+        def <init>() = {
+          super.<init>();
+          ()
+        };
+        /** Some comment inside */
+        def nothing() = ()
+      };
+      ()
+    };
+    def test2 = {
+      var x = 4;
+      if (true)
+        {
+          x = 5;
+          val y = 6;
+          ()
+        }
+      else
+        ()
+    };
+    def test3 = {
+      if (true)
+        z = 3
+      else
+        ();
+      val t = 4;
+      0.to(4).foreach(((i) => println(i)))
+    };
+    val test4 = 'a' match {
+      case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
+      case _ => false
+    };
+    def test5: scala.Unit = if (true)
+      $qmark$qmark$qmark
+    else
+      ();
+    def test6 = {
+      val u = 4;
+      0.to(u).foreach(((i) => println(i)))
+    };
+    def test7 = {
+      val u = 4;
+      0.to(u).foreach(((i) => println(i)))
+    };
+    def test8 = {
+      val z = "fancy";
+      z.replace("fanc", "arts")
+    };
+    def test9 = {
+      val i = 10.$times(10);
+      assert(i.$eq$eq(100))
+    }
+  };
+  /** comments that we should keep */
+  object UsefulComments extends scala.AnyRef {
+    def <init>() = {
+      super.<init>();
+      ()
+    };
+    /** class A */
+    class A extends scala.AnyRef {
+      def <init>() = {
+        super.<init>();
+        ()
+      };
+      /** f */
+      def f(i: Int) = i;
+      /** v */
+      val v = 1;
+      /** u */
+      var u = 2
+    };
+    /** trait B */
+    abstract trait B extends scala.AnyRef {
+      def $init$() = {
+        ()
+      };
+      /** T */
+      type T;
+      /** f */
+      def f(i: Int): scala.Unit;
+      /** v */
+      val v = 1;
+      /** u */
+      var u = 2
+    };
+    /** object C */
+    object C extends scala.AnyRef {
+      def <init>() = {
+        super.<init>();
+        ()
+      };
+      /** f */
+      def f(i: Int) = i;
+      /** v */
+      val v = 1;
+      /** u */
+      var u = 2
+    };
+    /** class D */
+    @new deprecated("use ... instead", "2.10.0") class D extends scala.AnyRef {
+      def <init>() = {
+        super.<init>();
+        ()
+      }
+    };
+    /** Get the simple value.
+       *  @return the default value
+       */
+    def value: Int = 7
+  }
+}
+
diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala
new file mode 100644
index 0000000..770d4ad
--- /dev/null
+++ b/test/scaladoc/run/t5527.scala
@@ -0,0 +1,155 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.doc.{Settings, DocFactory}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+  override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path
+
+  override def code = """
+    // SI-5527
+    object UselessComments {
+
+      var z = 0
+
+      def test1 = {
+        /** Some comment here */
+        object Maybe {
+          /** Some comment inside */
+          def nothing() = ()
+        }
+      }
+
+      def test2 = {
+        var x = 4
+        if (true) {
+          /** Testing 123 */
+          x = 5
+          val y = 6
+        }
+      }
+
+      def test3 = {
+        if (true)
+         z = 3
+
+        /** Calculate this result. */
+        val t = 4
+        for (i <- 0 to 4)
+          println(i)
+      }
+
+      val test4 = ('a') match {
+        /** Another digit is a giveaway. */
+        case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9'  =>
+          true
+        case _ =>
+          false
+      }
+
+      def test5 {
+        /** @martin is this right? It shouldn't flag me as scaladoc. */
+        if (true) ???
+      }
+
+      def test6 = {
+        /** Document this crucial constant for posterity.
+         *  Don't forget to dedoc this comment if you refactor to a local.
+         *  @author Paul Phillips
+         */
+        val u = 4
+        for (i <- 0 to u)
+          println(i)
+      }
+      def test7 = {
+        /** Some standard tags are tolerated locally and shouldn't trigger a warning.
+         *  @note Don't change this unless you know what you're doing. This means you.
+         */
+        val u = 4
+        for (i <- 0 to u)
+          println(i)
+      }
+      def test8 = {
+        /*************************\
+         * Fancy ASCII Art Block *
+         *   @author som-snytt   *
+        \*************************/
+        // this is just a local
+        val z = "fancy"
+        z replace ("fanc", "arts")
+      }
+      def test9 = {
+        val i = 10 */** Important!
+                     *  We have to multiply here!
+                     *  @author community
+                     *  @see SI-1234
+                     */
+                10
+        assert(i == 100)
+      }
+    }
+
+    /** comments that we should keep */
+    object UsefulComments {
+      /** class A */
+      class A {
+        /** f */
+        def f(i: Int) = i
+        /** v */
+        val v = 1
+        /** u */
+        var u = 2
+      }     
+      /** trait B */
+      trait B {
+        /** T */
+        type T
+        /** f */
+        def f(i: Int)
+        /** v */
+        val v = 1
+        /** u */
+        var u = 2
+      }     
+      /** object C */
+      object C {
+        /** f */
+        def f(i: Int) = i
+        /** v */
+        val v = 1
+        /** u */
+        var u = 2
+      }
+      /** class D */
+      @deprecated("use ... instead", "2.10.0")
+      class D
+
+      /** Get the simple value.
+       *  @return the default value
+       */
+      // an intervening line comment
+      /* I had more to say, but didn't want to pollute the scaladoc. */
+      def value: Int = 7
+    }
+  """.trim
+
+  override def show(): Unit = {
+    // redirect err to out, for logging
+    val prevErr = System.err
+    System.setErr(System.out)
+    compile()
+    System.setErr(prevErr)
+  }
+
+  override def newCompiler(args: String*): Global = {
+    // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree
+    val settings = new Settings(_ => ())
+    val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+    new DocFactory(new ConsoleReporter(settings), settings).compiler
+  }
+
+  override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/scaladoc/run/t7124.check b/test/scaladoc/run/t7124.check
new file mode 100644
index 0000000..96b627a
--- /dev/null
+++ b/test/scaladoc/run/t7124.check
@@ -0,0 +1,3 @@
+List()
+List(Paragraph(Text(macro)))
+Done.
diff --git a/test/scaladoc/run/t7124.scala b/test/scaladoc/run/t7124.scala
new file mode 100644
index 0000000..e827200
--- /dev/null
+++ b/test/scaladoc/run/t7124.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+  override def code = """
+  import scala.language.experimental.macros
+  class Test {
+     def print(): Unit = macro ???
+  }
+                      """
+
+  def scaladocSettings = ""
+
+  def testModel(root: Package) = {
+    import access._
+    val p = root._class("Test")._method("print")
+
+    println(p.annotations) // no annotations
+    println(p.flags) // a 'macro' flag
+  }
+}
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/t7876.check
similarity index 100%
copy from test/scaladoc/run/SI-6812.check
copy to test/scaladoc/run/t7876.check
diff --git a/test/scaladoc/run/t7876.scala b/test/scaladoc/run/t7876.scala
new file mode 100644
index 0000000..63c63f8
--- /dev/null
+++ b/test/scaladoc/run/t7876.scala
@@ -0,0 +1,19 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+  override def code = """
+  import language.higherKinds
+  trait T[M[_]]
+  class C extends T[Function0]
+  class D extends T[Tuple1]
+                      """
+
+  def scaladocSettings = ""
+
+  def testModel(rootPackage: Package) = {
+    import access._
+    // did not crash
+  }
+}
diff --git a/test/scaladoc/run/t7876b.check b/test/scaladoc/run/t7876b.check
new file mode 100644
index 0000000..21aaf3b
--- /dev/null
+++ b/test/scaladoc/run/t7876b.check
@@ -0,0 +1,3 @@
+foo: FInt
+foo: TInt
+Done.
diff --git a/test/scaladoc/run/t7876b.scala b/test/scaladoc/run/t7876b.scala
new file mode 100644
index 0000000..4d5b8c2
--- /dev/null
+++ b/test/scaladoc/run/t7876b.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+// Don't dealias just to print a Function or Tuple type.
+object Test extends ScaladocModelTest {
+
+  override def code = """
+  class Test {
+     type FInt = Function0[Int]
+     type TInt = Tuple1[Int]
+     def foo: FInt
+     def bar: TInt
+  }
+                      """
+
+  def scaladocSettings = ""
+
+  def testModel(rootPackage: Package) = {
+    import access._
+    List("foo", "bar").foreach { name =>
+      println("foo: " + rootPackage._class("Test")._method(name).resultType.name)
+    }
+  }
+}
diff --git a/test/scaladoc/run/t8407.check b/test/scaladoc/run/t8407.check
new file mode 100644
index 0000000..06c860b
--- /dev/null
+++ b/test/scaladoc/run/t8407.check
@@ -0,0 +1,4 @@
+newSource:4: warning: not found: type $NotFound
+   *  @usecase def zipWithIndex: $NotFound
+               ^
+Done.
diff --git a/test/scaladoc/run/t8407.scala b/test/scaladoc/run/t8407.scala
new file mode 100644
index 0000000..2df9f4f
--- /dev/null
+++ b/test/scaladoc/run/t8407.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+  override def code = """
+class C {
+  /**
+   *  @usecase def zipWithIndex: $NotFound
+   *
+   */
+  def zipWithIndex: Int = ???
+}
+  """
+
+  def scaladocSettings = ""
+
+  def testModel(root: Package) = {
+    // just testing that it doesn't error out.
+  }
+}
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index 96174d2..ff64a25 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -25,10 +25,10 @@ class Factory(val g: Global, val s: doc.Settings)
   }
 
   def parseComment(s: String): Option[Inline] =
-    strip(parse(s, "", scala.tools.nsc.util.NoPosition))
+    strip(parse(s, "", scala.tools.nsc.util.NoPosition, null))
 
   def createBody(s: String) =
-    parse(s, "", scala.tools.nsc.util.NoPosition).body
+    parse(s, "", scala.tools.nsc.util.NoPosition, null).body
 }
 
 object Test extends Properties("CommentFactory") {
@@ -45,7 +45,7 @@ object Test extends Properties("CommentFactory") {
       with MemberLookup)
   }
 
-  def parse(src: String, dst: Inline) = {
+  def parse(src: String, dst: Inline): Boolean = {
     factory.parseComment(src) match {
         case Some(inline) =>
           inline == dst
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 13eacf7..56328ea 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -2,6 +2,8 @@ import org.scalacheck._
 import org.scalacheck.Prop._
 
 import java.net.{URLClassLoader, URLDecoder}
+import scala.collection.mutable
+import scala.xml.NodeSeq
 
 object XMLUtil {
   import scala.xml._
@@ -34,21 +36,25 @@ object Test extends Properties("HtmlFactory") {
     // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
     // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
     // this test _will_ fail again some time in the future.
-    val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
-    val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
-    (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+    // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+    val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+    val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+    paths mkString java.io.File.pathSeparator
   }
 
   def createFactory = {
     val settings = new Settings({Console.err.println(_)})
+    settings.scaladocQuietRun = true
+    settings.nowarn.value = true
     settings.classpath.value = getClasspath
+    settings.docAuthor.value = true
 
     val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
     new DocFactory(reporter, settings)
   }
 
-  def createTemplates(basename: String) = {
-    val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
+  def createTemplates(basename: String): collection.Map[String, NodeSeq] = {
+    val result = mutable.Map[String, NodeSeq]()
 
     createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
       case Some(universe) => {
@@ -57,7 +63,7 @@ object Test extends Properties("HtmlFactory") {
           result += (page.absoluteLinkTo(page.path) -> page.body)
         })
       }
-      case _ => ;
+      case _ =>
     }
 
     result
@@ -558,12 +564,13 @@ object Test extends Properties("HtmlFactory") {
   property("Comment inheritance: Correct explicit inheritance for override") =
   checkText("explicit-inheritance-override.scala")(
     (Some("InheritDocDerived"),
-     """def function[T](arg1: T, arg2: String): Double
+      """def function[T](arg1: T, arg2: String): Double
         Starting line
         Starting line
         The base comment. And another sentence...
         The base comment. And another sentence...
         Ending line
+        Author: StartAuthor a Scala developer EndAuthor
           T       StartT the type of the first argument EndT
           arg1    Start1 The T term comment End1
           arg2    Start2 The string comment End2
@@ -584,12 +591,13 @@ object Test extends Properties("HtmlFactory") {
   property("Comment inheritance: Correct explicit inheritance for usecase") =
   checkText("explicit-inheritance-usecase.scala")(
     (Some("UseCaseInheritDoc"),
-     """def function[T](arg1: T, arg2: String): Double
+      """def function[T](arg1: T, arg2: String): Double
         [use case] Starting line
         [use case] Starting line
         The base comment. And another sentence...
         The base comment. And another sentence...
         Ending line
+        Author: StartAuthor a Scala developer EndAuthor
           T       StartT the type of the first argument EndT
           arg1    Start1 The T term comment End1
           arg2    Start2 The string comment End2
@@ -658,6 +666,45 @@ object Test extends Properties("HtmlFactory") {
     }
   }
 
+  property("SI-4014: Scaladoc omits @author: no authors") = {
+    val noAuthors = createTemplates("SI-4014_0.scala")("Foo.html")
+
+    noAuthors match {
+      case node: scala.xml.Node => {
+        val s = node.toString
+        ! s.contains("Author")
+      }
+      case _ => false
+    }
+  }
+
+  property("SI-4014: Scaladoc omits @author: one author") = {
+    val oneAuthor = createTemplates("SI-4014_1.scala")("Foo.html")
+
+    oneAuthor match {
+      case node: scala.xml.Node => {
+        val s = node.toString
+        s.contains("<h6>Author:</h6>")
+        s.contains("<p>The Only Author\n</p>")
+      }
+      case _ => false
+    }
+  }
+
+  property("SI-4014: Scaladoc omits @author: two authors") = {
+    val twoAuthors = createTemplates("SI-4014_2.scala")("Foo.html")
+
+    twoAuthors match {
+      case node: scala.xml.Node => {
+        val s = node.toString
+        s.contains("<h6>Authors:</h6>")
+        s.contains("<p>The First Author\n</p>")
+        s.contains("<p>The Second Author\n</p>")
+      }
+      case _ => false
+    }
+  }
+
   {
     val files = createTemplates("basic.scala")
     //println(files)
@@ -680,7 +727,7 @@ object Test extends Properties("HtmlFactory") {
 
     property("package object") = files("com/example/p1/package.html") match {
       case node: scala.xml.Node =>
-        node.toString contains "com.example.p1.package#packageObjectMethod"
+        node.toString contains "com.example.p1#packageObjectMethod"
       case _ => false
     }
 
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
index 5aef38e..b8b9f92 100644
--- a/test/scaladoc/scalacheck/IndexScriptTest.scala
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -8,14 +8,20 @@ import java.net.{URLClassLoader, URLDecoder}
 object Test extends Properties("IndexScript") {
 
   def getClasspath = {
-    val loader = Thread.currentThread.getContextClassLoader
-    val paths = loader.asInstanceOf[URLClassLoader].getURLs
-    val morepaths = loader.getParent.asInstanceOf[URLClassLoader].getURLs
-    (paths ++ morepaths).map(u => URLDecoder.decode(u.getPath)).mkString(java.io.File.pathSeparator)
+    // these things can be tricky
+    // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+    // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+    // this test _will_ fail again some time in the future.
+    // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+    val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+    val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+    paths mkString java.io.File.pathSeparator
   }
 
   val docFactory = {
     val settings = new doc.Settings({Console.err.println(_)})
+    settings.scaladocQuietRun = true
+    settings.nowarn.value = true
     settings.classpath.value = getClasspath
     val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
     new doc.DocFactory(reporter, settings)
@@ -35,7 +41,7 @@ object Test extends Properties("IndexScript") {
     }
 
   property("allPackages") = {
-    createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+    createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
       case Some(index) =>
         index.allPackages.map(_.toString) == List(
           "scala",
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index e114767..abc0e5d 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -12,19 +12,19 @@ object Test extends Properties("Index") {
     // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
     // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
     // this test _will_ fail again some time in the future.
-    val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
-    val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
-    (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+    // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+    val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+    val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+    paths mkString java.io.File.pathSeparator
   }
 
   val docFactory = {
     val settings = new doc.Settings({Console.err.println(_)})
-
+    settings.scaladocQuietRun = true
+    settings.nowarn.value = true
     settings.classpath.value = getClasspath
-    println(settings.classpath.value)
 
     val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
-
     new doc.DocFactory(reporter, settings)
   }
 
@@ -56,7 +56,7 @@ object Test extends Properties("Index") {
   }
 
   property("path") = {
-    createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+    createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
       case Some(index) =>
         index.path == List("index.html")
       case None => false
@@ -64,14 +64,21 @@ object Test extends Properties("Index") {
   }
 
   property("title") = {
-    createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+    createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
       case Some(index) =>
         index.title == ""
 
       case None => false
     }
   }
+  property("browser contants a script element") = {
+    createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
+      case Some(index) =>
+        (index.browser \ "script").size == 1
 
+      case None => false
+    }
+  }
   property("package objects in index") = {
     createIndex("test/scaladoc/resources/SI-5558.scala") match {
       case Some(index) =>
diff --git a/test/script-tests/jar-manifest/run-test.check b/test/script-tests/jar-manifest/run-test.check
index ef59a6c..546bf6b 100644
--- a/test/script-tests/jar-manifest/run-test.check
+++ b/test/script-tests/jar-manifest/run-test.check
@@ -1,4 +1,4 @@
-Scala code runner version 2.10.0.r26038-b20111121102734 -- Copyright 2002-2011, LAMP/EPFL
+Scala code runner version 2.10.0.r26038-b20111121102734 -- Copyright 2002-2013, LAMP/EPFL
 
 % pwd
 /scala/trunk/test/script-tests/jar-manifest/target
diff --git a/test/support/java-tests.txt b/test/support/java-tests.txt
new file mode 100644
index 0000000..e0a3fdd
--- /dev/null
+++ b/test/support/java-tests.txt
@@ -0,0 +1,97 @@
+test/files/buildmanager/t2280
+test/files/buildmanager/t3045
+test/files/buildmanager/t3133
+test/files/jvm/deprecation
+test/files/jvm/t1143-2
+test/files/jvm/t1342
+test/files/jvm/t1464
+test/files/jvm/t2470
+test/files/jvm/t2570
+test/files/jvm/t2585
+test/files/jvm/t3003
+test/files/jvm/t3415
+test/files/jvm/ticket2163
+test/files/jvm/ticket4283
+test/files/jvm/varargs
+test/files/neg/abstract-class-error
+test/files/neg/java-access-neg
+test/files/neg/primitive-sigs-1
+test/files/neg/protected-static-fail
+test/files/neg/t0673
+test/files/neg/t1548
+test/files/neg/t3663
+test/files/neg/t3757
+test/files/neg/t4851
+test/files/pos/chang
+test/files/pos/ilya
+test/files/pos/ilya2
+test/files/pos/java-access-pos
+test/files/pos/javaReadsSigs
+test/files/pos/protected-static
+test/files/pos/raw-map
+test/files/pos/signatures
+test/files/pos/super
+test/files/pos/t0288
+test/files/pos/t0695
+test/files/pos/t1101
+test/files/pos/t1102
+test/files/pos/t1150
+test/files/pos/t1152
+test/files/pos/t1176
+test/files/pos/t1186
+test/files/pos/t1196
+test/files/pos/t1197
+test/files/pos/t1203
+test/files/pos/t1230
+test/files/pos/t1231
+test/files/pos/t1232
+test/files/pos/t1235
+test/files/pos/t1254
+test/files/pos/t1263
+test/files/pos/t1409
+test/files/pos/t1459
+test/files/pos/t1642
+test/files/pos/t1711
+test/files/pos/t1745
+test/files/pos/t1751
+test/files/pos/t1782
+test/files/pos/t1836
+test/files/pos/t1840
+test/files/pos/t1937
+test/files/pos/t2377
+test/files/pos/t2409
+test/files/pos/t2413
+test/files/pos/t2433
+test/files/pos/t2464
+test/files/pos/t2569
+test/files/pos/t2868
+test/files/pos/t294
+test/files/pos/t2940
+test/files/pos/t2956
+test/files/pos/t3249
+test/files/pos/t3349
+test/files/pos/t3404
+test/files/pos/t3429
+test/files/pos/t3486
+test/files/pos/t3521
+test/files/pos/t3567
+test/files/pos/t3622
+test/files/pos/t3642
+test/files/pos/t3938
+test/files/pos/t3946
+test/files/pos/t4402
+test/files/pos/t4603
+test/files/pos/t4737
+test/files/pos/t5644
+test/files/pos/t5703
+test/files/run/inner-parse
+test/files/run/t1430
+test/files/run/t2296a
+test/files/run/t2296b
+test/files/run/t3452a
+test/files/run/t3452b
+test/files/run/t3897
+test/files/run/t4119
+test/files/run/t4238
+test/files/run/t4317
+test/files/run/t4891
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 92ef3a0..654ba21 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -208,7 +208,7 @@ pullJarFile() {
   local sha1=$(cat ${jar}${desired_ext})
   local jar_dir=$(dirname $jar)
   local jar_name=${jar#$jar_dir/}
-  local version=${sha1% ?$jar_name}
+  local version=${sha1%% *}
   local remote_uri=${version}/${jar#$basedir/}
   echo "Resolving [${remote_uri}]"
   pullJarFileToCache $remote_uri $version
diff --git a/tools/buildcp b/tools/buildcp
deleted file mode 100755
index 766ab81..0000000
--- a/tools/buildcp
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-#
-
-[[ $# -eq 1 ]] || { echo "Usage: $0 <locker|quick|...>"; exit 0; }
-
-dir=$(dirname $0)
-lib=$($dir/abspath $dir/../lib)
-build=$($dir/abspath $dir/../build)
-cp=$($dir/cpof $build/$1/classes):$build/asm/classes
-
-echo $cp:$lib/fjbg.jar:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*'
diff --git a/tools/compare-java-sigs b/tools/compare-java-sigs
new file mode 100644
index 0000000..99ab775
--- /dev/null
+++ b/tools/compare-java-sigs
@@ -0,0 +1,56 @@
+#!/bin/sh
+#
+# Compare javac -Xprint (i.e. see signatures from java point of view)
+# for the given classes.
+#
+# Sample:
+#
+# % SCALA_HOME=/scala/inst/29 SCALA_BUILD=/scala/inst/3 tools/compare-java-sigs 'scala.Predef$'
+#
+# Comparing javac -Xprint for scala.Predef$ based on '/scala/inst/29' and '/scala/inst/3'
+# 3c3
+# < public final class Predef$ extends scala.LowPriorityImplicits implements scala.ScalaObject {
+# ---
+# > public final class Predef$ extends scala.LowPriorityImplicits {
+# 7d6
+# <   private final scala.SpecializableCompanion AnyRef;
+# 21,22d19
+# <   public scala.SpecializableCompanion AnyRef();
+# <
+# 68a66,67
+# >   public scala.runtime.Nothing$ $qmark$qmark$qmark();
+# >
+# 225c224,226
+# <   public scala.collection.immutable.StringOps augmentString(java.lang.String x);
+# ---
+# >   public scala.runtime.StringFormat any2stringfmt(java.lang.Object x);
+# >
+# >   public java.lang.String augmentString(java.lang.String x);
+# 227c228
+# <   public java.lang.String unaugmentString(scala.collection.immutable.StringOps x);
+# ---
+# >   public java.lang.String unaugmentString(java.lang.String x);
+#
+
+set -e
+
+[[ $# -gt 0 ]] || {
+  echo "Usage: $(basename $0) <class> <class> ..."
+  echo ""
+  echo "# Example usage"
+  echo "SCALA_HOME=/scala/inst/29 SCALA_BUILD=/scala/inst/3 \\"
+  echo "  $(basename $0) scala.Function1 scala.runtime.AbstractFunction1"
+  exit 0
+}
+
+home1=$(cd ${SCALA_HOME:-/scala/inst/3} && pwd)
+home2=$(cd ${SCALA_BUILD:-$(dirname $BASH_SOURCE)/../build/pack} && pwd)
+
+echo "Comparing javac -Xprint for $@ based on '$home1' and '$home2'"
+tmpdir=$(mktemp -dt $(basename $BASH_SOURCE))
+
+cd $tmpdir
+javac -Xprint -cp $home1:$home1/lib/'*' "$@" > before.txt
+javac -Xprint -cp $home2:$home2/lib/'*' "$@" > after.txt
+
+diff before.txt after.txt && echo "No differences in javac -Xprint output."
diff --git a/tools/diffPickled b/tools/diffPickled
deleted file mode 100755
index b4a345d..0000000
--- a/tools/diffPickled
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/sh
-#
-# Shows the difference in pickler output between two variations on a class.
-#
-# If quick and strap are built normally you can run
-#
-#   diffPickled foo.bar.Baz
-#
-# to see any differences between them in that class.
-
-USAGE="Usage: $0 classpath1 classpath2 class"
-TOOLSDIR=`dirname $0`
-BUILDDIR="${TOOLSDIR}/../build"
-QUICKDIR="${BUILDDIR}/quick"
-STRAPDIR="${BUILDDIR}/strap"
-
-CP1=""
-CP2=""
-CLASS=""
-
-if [ $# == 1 ] ; then
-  if [ -e ${QUICKDIR} ] && [ -e ${STRAPDIR} ] ; then
-    CP1=`${TOOLSDIR}/quickcp`
-    CP2=`${TOOLSDIR}/strapcp`
-    CLASS=$1
-  else
-    echo $USAGE
-    echo "(If only one argument is given, $QUICKDIR and $STRAPDIR must exist.)"
-    exit 1
-  fi
-elif [ $# == 3 ] ; then
-  CP1=$1
-  CP2=$2
-  CLASS=$3
-else
-  echo $USAGE
-  exit 1
-fi
-
-TMPDIR="/tmp/scala_pickle_diff${RANDOM}"
-
-if mkdir -m 0700 "$TMPDIR" 2>/dev/null ; then
-  ${TOOLSDIR}/showPickled -cp $CP1 $CLASS > "${TMPDIR}/out1.txt"
-  ${TOOLSDIR}/showPickled -cp $CP2 $CLASS > "${TMPDIR}/out2.txt"
-  diff "${TMPDIR}/out1.txt" "${TMPDIR}/out2.txt"
-  rm -rf ${TMPDIR}
-else
-  echo "Failed to create temporary directory ${TMPDIR}."
-  exit 1
-fi
-
diff --git a/tools/epfl-build b/tools/epfl-build
deleted file mode 100755
index dd66307..0000000
--- a/tools/epfl-build
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-#
-# builds nightlies
-
-[[ $# -gt 0 ]] || {
-  cat <<EOM
-Usage: $0 <version> [opt opt ...]
-
-  Everything after the version is supplied to scalac and partest.
-  Example:   $0 -Xcheckinit -Ycheck:all
-
-Environment variables:
-  extra_ant_targets   Additional ant targets to run after nightly
-
-EOM
-  exit 0
-}
-
-# version isn't actually used at present.
-scalaVersion="$1" && shift
-scalaArgs="-Dscalac.args=\"$@\" -Dpartest.scalac_opts=\"$@\""
-
-ant all.clean && ./pull-binary-libs.sh
-
-ant $scalaArgs build-opt &&
-ant $scalaArgs nightly &&
-for target in $extra_ant_targets; do ant $target ; done
-# [[ -n "$BUILD_DOCSCOMP" ]] && ant docscomp
diff --git a/tools/epfl-publish b/tools/epfl-publish
deleted file mode 100755
index cdf1882..0000000
--- a/tools/epfl-publish
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-#
-# publishes nightly build if $publish_to is set in environment.
-#
-
-[[ $# -eq 1 ]] || {
-  cat <<EOM
-Usage: $0 <scala version>
-
-Environment variables:
-  publish_to   rsync destination
-EOM
-  exit 0
-}
-version="$1"
-
-[[ -d dists/archives ]] || {
-  echo "Can't find build, has it completed? No directory at dists/archives"
-  exit 1
-}
-
-if [[ -z $publish_to ]]; then
-  echo "Nothing to publish."
-else
-  echo "Publishing nightly build to $publish_to"
-  # Archive Scala nightly distribution
-  rsync -az --exclude scala-latest-sources.tgz dists/archives/ "$publish_to/distributions"
-  # only publish scaladoc nightly for trunk
-  [[ $version == "master" ]] && rsync -az build/scaladoc/ "$publish_to/docs"
-  # sbaz
-  [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz"
-fi
diff --git a/tools/locker_scala b/tools/locker_scala
deleted file mode 100755
index 02d2efc..0000000
--- a/tools/locker_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/lockercp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/locker_scalac b/tools/locker_scalac
deleted file mode 100755
index c4b28b7..0000000
--- a/tools/locker_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/lockercp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/lockercp b/tools/lockercp
deleted file mode 100755
index 43c72dd..0000000
--- a/tools/lockercp
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-$(dirname $0)/buildcp locker
diff --git a/tools/make-release-notes b/tools/make-release-notes
deleted file mode 100755
index dcd206f..0000000
--- a/tools/make-release-notes
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env bash
-
-# This tool is used to build a *scaffold* of a release note that you can fill in details with before posting to the list.
-# It aims to provide *all* the information you need, and probably need to prune it before releasing.
-# Author: jsuereth
-
-fixMessages() {
-  local tag1="$1"
-  local tag2="$2"
-  git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges --grep "SI-"
-}
-
-allcommitMessages() {
-  local tag1="$1"
-  local tag2="$2"
-  git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges
-}
-
-authors() {
- local tag1="$1"
- local tag2="$2"
- git log $tag1..$tag2 --format=format:%an --no-merges | sort | uniq -c | sort -rh
-}
-
-
-message() {
-  local tag1="$1"
-  local tag2="$2"
-
-  echo "A new release of Scala is available!  Please point your build tools at ${tag2#v}"
-  echo 
-  echo "Here's a list of the issues that have been fixed since ${tag1#v}: "
-  fixMessages "$tag1" "$tag2"
-  echo
-  echo
-  echo "Special thanks to all the contributions!"
-  echo "------- --------------------------------"
-  authors "$tag1" "$tag2"
-  echo "------- --------------------------------"
-  echo
-  echo
-  echo "Here's a complete list of changes:"
-  allcommitMessages "$tag1" "$tag2"
-}
-
-
-message "$1" "$2"
-
-
diff --git a/tools/packcp b/tools/packcp
deleted file mode 100755
index ecc7ee1..0000000
--- a/tools/packcp
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-$dir/cpof $dir/../build/pack/lib
diff --git a/tools/partest-ack b/tools/partest-ack
new file mode 100755
index 0000000..ab722e3
--- /dev/null
+++ b/tools/partest-ack
@@ -0,0 +1,131 @@
+#!/usr/bin/env bash
+#
+# wrapper around partest for fine-grained test selection via ack
+
+declare quiet failed update partest_debug
+declare cotouched since sortCommand
+declare -a ack_args partest_args scalac_args
+declare -r standard_ack_args="--noenv -s --java --scala --type-add=scala:ext:flags,check --files-with-matches"
+
+partest_args=( --show-diff )
+bindir="$(cd "$(dirname "$0")" && pwd)"
+base="$bindir/.."
+cd "$base" || { echo "Could not change to base directory $base" && exit 1; }
+filesdir="test/files"
+sortCommand="sort -u"
+partestPaths="$bindir/partest-paths"
+
+[[ -x "$partestPaths" ]] || { echo "Cannot find partest-paths in $partestPaths" && exit 1; }
+
+[[ $# -gt 0 ]] || {
+  cat <<EOM
+Usage: $0 <regex> [-dfquvp] [ack options]
+
+  -f          pass --failed to partest
+  -q          pass --terse to partest
+  -u          pass --update-check to partest
+  -p <path>   select tests appearing in commits where <path> was also modified
+  -s <time>   select tests touched since <time> (git format, e.g. 1.month.ago)
+  -r          run tests in random order
+
+Given a regular expression (and optionally, any arguments accepted by ack)
+runs all the tests for which any associated file matches the regex.  Associated
+files include .check and .flags files.  Tests in directories will match if any
+file matches.  A file can match the regex by its contents or by its name.
+
+You must have ack version 2.12+ installed: http://beyondgrep.com/ack-2.12-single-file
+
+Examples:
+
+  > tools/partest-ack 'case (class|object) Baz'
+  % testsWithMatchingPaths         ...  0
+  % testsWithMatchingCode          ...  3
+  # 3 tests to run.
+
+  > tools/partest-ack -s 12.hours.ago
+  % testsTouchedSinceGitTime       ...  33
+  # 33 tests to run.
+
+  > tools/partest-ack -p src/library/scala/Enumeration.scala
+  % testsModifiedInSameCommit      ...  80
+  # 80 tests to run.
+
+  > tools/partest-ack -f
+  % tests-which-failed             ...  42
+  # 42 tests to run.
+
+  > tools/partest-ack "kinds of the type arguments"
+  % testsWithMatchingPaths         ...  0
+  % testsWithMatchingCode          ...  6
+  # 6 tests to run.
+EOM
+
+  exit 0
+}
+
+while getopts :fuvdrp:s: opt; do
+  case $opt in
+    f) failed=true && partest_args+=" --failed" ;;
+    p) cotouched="$cotouched $OPTARG" ;;
+    r) sortCommand="randomSort" ;;
+    s) since="$OPTARG" ;;
+    q) partest_args+=" --terse" ;;
+    u) partest_args+=" --update-check" ;;
+    v) partest_args+=" --verbose" ;;
+    :) echo "Option -$OPTARG requires an argument." >&2 ;;
+    *) ack_args+="-$OPTARG" ;;  # don't drop unknown args, assume they're for ack
+  esac
+done
+
+shift $((OPTIND-1))
+ack_args=( "${ack_args[@]}" "$@" )
+
+# These methods all just create paths which may or may not be tests
+# all are filtered through partest-paths which limits the output to actual tests
+regexPathTests () { find "$filesdir" | ack --noenv "$@"; }
+failedTests ()    { for p in $(find "$filesdir" -name '*.log'); do p1=${p%.log} && p2=${p1%-*} && echo "$p2"; done; }
+sinceTests()      { git log --since="$@" --name-only --pretty="format:" -- "$filesdir"; }
+regexCodeTests () { ack $standard_ack_args "$@" -- "$filesdir"; }
+sameCommitTests() { for rev in $(git rev-list HEAD -- "$@"); do git --no-pager show --pretty="format:" --name-only "$rev" -- "$filesdir"; done; }
+
+countStdout () {
+  local -i count=0
+  while read line; do
+    printf "$line\n" && count+=1
+  done
+
+  printf >&2 " $count\n"
+}
+
+randomSort () {
+  sort -u | while read line; do echo "$RANDOM $line"; done | sort | sed -E 's/^[0-9]+ //'
+}
+
+testRun () {
+  local description="$1" && shift
+  printf >&2 "%% tests %-25s ... " "$description"
+  "$@" | "$partestPaths" | countStdout | egrep -v '^[ ]*$'
+}
+
+allMatches() {
+  [[ -n $ack_args ]]    && testRun "with matching paths" regexPathTests "${ack_args[@]}"
+  [[ -n $ack_args ]]    && testRun "with matching code" regexCodeTests "${ack_args[@]}"
+  [[ -n $cotouched ]]   && testRun "modified in same commit" sameCommitTests $cotouched
+  [[ -n $since ]]       && testRun "modified since time" sinceTests "$since"
+  [[ -n $failed ]]      && testRun "failed on last run" failedTests
+}
+
+paths=$(allMatches | $sortCommand)
+
+[[ -z $paths ]] && [[ -z $failed ]] && echo >&2 "No matching tests." && exit 0;
+
+count=$(echo $(echo "$paths" | wc -w))
+[[ "$count" -eq 0 ]] && echo >&2 "No tests to run." && exit 0;
+
+# Output a command line which will re-run these same tests.
+echo "# $count tests to run."
+printf "%-52s %s\n" "$base/test/partest ${partest_args[*]}" "\\"
+for path in $paths; do  printf "  %-50s %s\n" "$path" "\\"; done
+echo ""
+
+test/partest ${partest_args[*]} $paths
diff --git a/tools/partest-paths b/tools/partest-paths
new file mode 100755
index 0000000..6ce403a
--- /dev/null
+++ b/tools/partest-paths
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# Given a list of files on stdin, translates them into a set
+# of tests covering those files. That means paths which aren't
+# part of a test are dropped and the rest are rewritten to the
+# primary test path, with duplicates dropped.
+
+cd "$(dirname "$0")/.."
+
+# We have to enumerate good test dirs since partest chokes and fails
+# on continuations, bench, etc. tests
+pathRegex="test/files/(pos|neg|jvm|run|scalap|presentation)/[^/.]+([.]scala)?\$"
+
+# Echo the argument only if it matches our idea of a test and exists.
+isPath () { [[ "$1" =~ $pathRegex ]] && [[ -e "$1" ]]; }
+
+# Filter stdin down to actual test paths.
+asTestPaths() {
+  while read -r p; do
+    # Matched file at the standard test depth
+    p1="${p%.*}" && isPath "$p1.scala" && echo "$p1.scala" && continue
+    # Or, matched file may be in a test subdirectory, so strip the last path segment and check
+    p2="${p1%/*}" && isPath "$p2" && echo "$p2" && continue
+  done
+}
+
+asTestPaths | sort -u
diff --git a/tools/quick_scala b/tools/quick_scala
deleted file mode 100755
index 16938dd..0000000
--- a/tools/quick_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/quickcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/quick_scalac b/tools/quick_scalac
deleted file mode 100755
index 1b9a036..0000000
--- a/tools/quick_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/quickcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/quickcp b/tools/quickcp
deleted file mode 100755
index 25d46e5..0000000
--- a/tools/quickcp
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-$(dirname $0)/buildcp quick
diff --git a/tools/remotetest b/tools/remotetest
deleted file mode 100755
index fb89794..0000000
--- a/tools/remotetest
+++ /dev/null
@@ -1,230 +0,0 @@
-
-
-# 
-# Remote build&test script.
-# Author: Aleksandar Prokopec
-# 
-
-
-SCRIPTNAME="..:: RemoteTest ::.."
-DESC="This script pushes the current git repo to a remote bare repo. \
-It then checks out the source tree in a workspace repo and starts the\
- build and all the tests. It can also initialize the remote bare repo\
- and the workspace repo. It assumes that the current repo refspec has\
- been set for the remote bare repository - .git/config of the current\
- repo must have a remote called <server> and the corresponding master\
- branch. Git should, naturally, be installed on both systems.\
- "
-USAGE="   Usage: remotetest (--help|--init|--all|--incr|--clear) <user> <server> <bare-repo-path> <workspace-repo-path> [logfile]"
-
-
-
-function title()
-{
-  echo
-  echo $SCRIPTNAME
-  echo
-}
-
-function usage()
-{
-  title
-  echo $DESC
-  echo 
-  echo $USAGE
-}
-
-
-function error()
-{
-  echo $1
-  echo "Failed."
-  exit 1
-}
-
-function success()
-{
-  echo "Success!"
-  exit 0
-}
-
-function instruct()
-{
-  usage
-  error
-}
-
-function help()
-{
-  usage
-  echo
-  echo "Make sure you have git installed on both your computer and the server, as well as java and ant."
-  echo "Add your ssh key to the list of authorized keys on the server (see .ssh dir in your home). This is not required, but makes life easier, as you will have to answer fewer passwords."
-  echo "To initialize the remote repositories on a server 'server.url.com', see the following example:"
-  echo
-  echo "> tools/remotetest --init jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
-  echo
-  echo "If you decide you no longer want this remote repository to be tracked (this also tries to delete remote repos on the server):"
-  echo
-  echo "> tools/remotetest --clear jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
-  echo 
-  echo "Once the initialization is successful, simply run: "
-  echo
-  echo "> tools/remotetest --all jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
-  echo 
-  echo "Optionally, build and test results will be saved into the logfile on the server (an additional, last argument). Be aware that problems arise should you push an ammended commit over a previously pushed commit - this has nothing to do with this script per se."
-  echo
-  echo "  Example workflow:"
-  echo 
-  echo "          -------------------          "
-  echo "         |                   |         "
-  echo "         V                   |         "
-  echo "   init ---> [ all | incr ] ---> clear "
-  echo
-  echo "Complete argument list:"
-  echo "  --help       prints this help"
-  echo "  --init       initializes remote repos"
-  echo "  --clear      deletes remote repos and removes the remote repo reference from local git repo"
-  echo "  --all        pushes the newest version, checks it out at the server, cleans all, builds and tests"
-  echo "  --incr       incremental does the same as --all, but does not clean the directory before testing"
-}
-
-
-if [ $# -lt 1 ]
-then
-  instruct
-fi
-
-
-
-if [ $# -lt 5 ]
-then
-  if [[ $1 = "--help" ]]
-  then
-    help
-    success
-  else
-    instruct
-  fi
-fi
-
-COMMAND=$1
-USER=$2
-LOCATION=$3
-BAREREPO=$4
-WORKREPO=$5
-LOGFILE=$6
-
-
-if [[ $COMMAND = "--help" ]]
-then
-  help
-  success
-fi
-
-
-
-
-# 
-# Init
-# 
-
-
-if [[ $COMMAND = "--init" ]]
-then
-  echo "Initializing."
-  
-  # init bare repo
-  ssh $USER@$LOCATION "mkdir $BAREREPO"
-  ssh $USER@$LOCATION "cd $BAREREPO; git init; git config --bool core.bare true"
-  if [ $? -ne 0 ]
-  then
-    error "Could not initialize bare repo."
-  fi
-  
-  # add remote bare repo
-  git remote add $LOCATION $USER@$LOCATION:$BAREREPO
-  
-  # push to bare repo
-  git push $LOCATION master
-  if [ $? -ne 0 ]
-  then
-    error "Could not push to bare repo."
-  fi
-  
-  # init and checkout work repo
-  ssh $USER@$LOCATION "git clone $BAREREPO $WORKREPO"
-  if [ $? -ne 0 ]
-  then
-    error "Could not init working repo."
-  fi
-  
-  success
-fi
-
-
-
-
-# 
-# Clear.
-# 
-
-
-if [[ $COMMAND = "--clear" ]]
-then
-  echo "Clearing remote and deleting remote repos."
-  git remote rm $LOCATION
-  ssh $USER@$LOCATION "rm -rf $BAREREPO"
-  ssh $USER@$LOCATION "cd $WORKREPO; ant all.clean; rm -rf $WORKREPO"
-  
-  echo "Removed remote repo $LOCATION."
-  success
-fi
-
-
-
-
-# 
-# Test.
-# 
-
-
-if [[ $COMMAND = "--all" || $COMMAND = "--incr" ]]
-then
-  # proceed
-  echo "Starting remote build and testing."
-else
-  error "Unrecognized command $COMMAND."
-fi
-
-# if it's not the init operation, proceed normally
-# push to remote bare repo
-git push $LOCATION master
-if [ $? -ne 0 ]
-then
-  error "Could not push to bare repo - push from local machine failed."
-fi
-
-# remotely checkout the repo
-ssh $USER@$LOCATION "cd $WORKREPO; git pull origin master"
-if [ $? -ne 0 ]
-then
-  error "Could not remotely pull from bare repo to work repo."
-fi
-
-# clean the build dir if not incremental
-if [[ $COMMAND = "--all" ]]
-then
-  ssh $USER@$LOCATION "cd $WORKREPO; ant all.clean"
-fi
-
-# run the build and tests
-SET_ANT_OPTS='export ANT_OPTS="-XX:MaxPermSize=192M -Xmx1536m"; echo $ANT_OPTS'
-echo "Set ant options command: $SET_ANT_OPTS"
-ssh $USER@$LOCATION "cd $WORKREPO; $SET_ANT_OPTS; ant nightly | tee -a $LOGFILE"
-
-success
-
-
-
-
diff --git a/tools/showPickled b/tools/showPickled
deleted file mode 100755
index 27421c3..0000000
--- a/tools/showPickled
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/sh
-#
-# Shows the pickled scala data in a classfile.
-
-if [ $# == 0 ] ; then
-  echo "Usage: $0 [--bare] [-cp classpath] <class*>"
-  exit 1
-fi
-
-TOOLSDIR=`dirname $0`
-CPOF="$TOOLSDIR/cpof"
-
-PACK="$TOOLSDIR/../build/pack/lib"
-QUICK="$TOOLSDIR/../build/quick/classes"
-STARR="$TOOLSDIR/../lib"
-CP=""
-
-if [ -f "${PACK}/scala-library.jar" ] ; then
-  CP=`${TOOLSDIR}/packcp`
-elif [ -d "${QUICK}/library" ] ; then
-  CP=`${TOOLSDIR}/quickcp`
-else
-  CP=`${TOOLSDIR}/starrcp`
-fi
-
-if [ "$1" == "-cp" ] ; then
-  shift
-  CP="${1}:${CP}"
-  shift
-fi
-
-java -cp "$CP" scala.tools.nsc.util.ShowPickled $*
diff --git a/tools/starr_scala b/tools/starr_scala
deleted file mode 100755
index 9b0fb60..0000000
--- a/tools/starr_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/starrcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/starr_scalac b/tools/starr_scalac
deleted file mode 100755
index 972eeaf..0000000
--- a/tools/starr_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/starrcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/starrcp b/tools/starrcp
deleted file mode 100755
index 76f40fd..0000000
--- a/tools/starrcp
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-$dir/cpof $dir/../lib
diff --git a/tools/strapcp b/tools/strapcp
deleted file mode 100755
index 6a46b4e..0000000
--- a/tools/strapcp
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-strap="$dir/../build/strap/classes"
-[[ -d $strap ]] || { echo "Error: no directory at $strap"; exit 1; }
-
-cp=$($dir/cpof $strap)
-fjbg=$($dir/abspath $dir/../lib/fjbg.jar)
-asm=$($dir/abspath $dir/../build/asm/classes)
-
-echo $cp:$fjbg:$asm
diff --git a/tools/test-renamer b/tools/test-renamer
deleted file mode 100755
index 5a7fc3d..0000000
--- a/tools/test-renamer
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-#
-# Despite its shameful hackiness, checked in for posterity.
-# It'll live on forever in the git history; then I can remove it.
-
-# set -e
-shopt -s nullglob
-
-cd $(dirname $0)/../test
-
-rename_pattern='^.*/bug[0-9]+.*?(\.scala)?$'
-
-targets ()
-{
-  ls -d pending/*/* disabled/*/* | egrep "$rename_pattern"
-}
-
-showRun ()
-{
-  echo "$@"
-  "$@"
-}
-
-for path in $(targets); do
-  if [[ -f "$path" ]]; then
-    # echo "$path"
-    dir=$(dirname "$path")
-    file=$(basename "$path")
-    base=${file%%.scala}
-    num=${base##bug}
-
-    (cd "$dir" && 
-      for file in ${base}.*; do
-        ext=${file##*.}
-        newname="t${num}.${ext}"
-        
-        if [[ -e "$newname" ]]; then
-          echo "Hey, $newname already exists."
-        else
-          showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
-          showRun mv "$file" "$newname"
-        fi
-      done
-    )
-  fi
-  
-  if [[ -d "$path" ]]; then
-    dir=$(dirname "$path")
-    file=$(basename "$path")
-    base="$file"
-    num=${base##bug}
-    
-    (cd "$dir" &&
-      for file in $file ${file}.*; do
-        ext=${file##*.}
-        if [[ "$ext" != "$file" ]]; then
-          newname="t${num}.${ext}"
-        else
-          newname="t${num}"
-          for file0 in ${file}/*; do
-            showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file0"
-          done
-        fi
-        
-        if [[ -e "$newname" ]]; then
-          echo "Hey, $newname already exists."
-        else
-          if [[ -f "$file" ]]; then
-            showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
-          fi
-          showRun mv "$file" "$newname"
-        fi
-      done
-    )
-    
-  fi
-
-done
-# 
-# for d in files/*/*; do
-#   [[ -d "$d" ]] && do_dir "$d"
-# done
diff --git a/tools/updatescalacheck b/tools/updatescalacheck
deleted file mode 100755
index c4b80dd..0000000
--- a/tools/updatescalacheck
+++ /dev/null
@@ -1,130 +0,0 @@
-# 
-# 
-# ScalaCheck update script.
-# 
-# 
-
-
-# vars
-TMPFILE=`mktemp`
-SCALACHECK_REL_DIR=src/scalacheck
-DESC="Updates ScalaCheck sources from ScalaCheck nightly branch."
-WARN="Make sure your repository checkout is clean. Will remove and delete existing ScalaCheck source in <path-to-scala-repo>/$SCALACHECK_REL_DIR!"
-USAGE="  Usage: updatescalacheck <path-to-scala-repo>"
-
-
-# functions
-function error() {
-  rm $TMPFILE
-  exit 1
-}
-
-function success() {
-  rm $TMPFILE
-  exit 0
-}
-
-
-
-# check num args
-if [ $# -ne 1 ]
-then
-  echo $DESC
-  echo $WARN
-  echo "Must provide path to scala repo checkout dir."
-  echo $USAGE
-  error
-fi
-
-if [[ $1 = "--help" ]]
-then
-  echo $DESC
-  echo $WARN
-  echo $USAGE
-  error
-fi
-
-if [ ! -d $1 ]
-then
-  echo "The folder $1 does not exist."
-  error
-fi
-
-# go to scala dir
-SCALA_DIR=$1
-cd $SCALA_DIR
-
-# 
-# check if checkout is svn and up to date
-# otherwise check if its git and up to date
-# 
-if [ -d .svn ] || [ -d _svn ]
-then
-  #
-  # svn repo - check if clean
-  #
-  svn status > $TMPFILE
-  if [ $? -ne 0 ]
-  then
-    echo "Detected .svn dir, but svn status returns an error. Check if this is really an .svn repo."
-    error
-  fi
-  echo "svn status output: "
-  cat $TMPFILE
-  echo "grep found: "
-  cat $TMPFILE | grep "^\(?\|A\|D\|M\|C\|!\|~\)"
-  GREPRETCODE=$?
-  echo "grep return code: $GREPRETCODE"
-  if [ $GREPRETCODE -eq 0 ]
-  then
-    echo "Working directory does not seem to be clean. Do a clean checkout and try again."
-    error
-  fi
-  echo "Checkout appears to be clean."
-elif [ -d .git ]
-then
-  #
-  # git repo - check if clean
-  #
-  git status --porcelain > $TMPFILE
-  if [ $? -ne 0 ]
-  then
-    echo "Detected .git dir, but git status returns an error. Check if this is really a .git repo."
-    error
-  fi
-  echo "git status output: "
-  cat $TMPFILE
-  echo "grep found: "
-  cat $TMPFILE | grep "^\(A\|M\|D\|R\|C\|U\)"
-  GREPRETCODE=$?
-  echo "grep return code: $GREPRETCODE"
-  if [ $GREPRETCODE -eq 0 ]
-  then
-    echo "Working directory does not seem to be clean. Do a clean checkout and try again."
-    error
-  fi
-  echo "Checkout appears to be clean."
-else
-  # no repo detected
-  echo "The directory $SCALA_DIR does not seem to be a repository."
-  error
-fi
-
-# check if ScalaCheck source dir exists
-if [ ! -d $SCALACHECK_REL_DIR ]
-then
-  echo "ScalaCheck source dir does not seem to exist in: $SCALA_DIR/$SCALACHECK_REL_DIR"
-  echo "Please create one and try again."
-  error
-fi
-
-# go to ScalaCheck source dir
-cd $SCALACHECK_DIR
-
-# update sources
-svn export --force https://scalacheck.googlecode.com/svn/branches/scalanightly/src/main/scala .
-
-# remove unneeded class
-rm org/scalacheck/ScalaCheckFramework.scala
-
-success
diff --git a/versions.properties b/versions.properties
new file mode 100644
index 0000000..4e0485f
--- /dev/null
+++ b/versions.properties
@@ -0,0 +1,34 @@
+#Fri, 04 Apr 2014 23:11:56 +0200
+# NOTE: this file determines the content of the scala-distribution
+# via scala-dist-pom.xml and scala-library-all-pom.xml
+# when adding new properties that influence a release,
+# also add them to the update.versions mechanism in build.xml,
+# which is used by scala-release-2.11.x in scala/jenkins-scripts
+starr.version=2.11.0-RC4
+starr.use.released=1
+
+# These are the versions of the modules that go with this release.
+# These properties are used during PR validation and in dbuild builds.
+
+# e.g. 2.11.0-RC1, 2.11
+scala.binary.version=2.11.0-RC4
+# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1
+# this defines the dependency on scala-continuations-plugin in scala-dist's pom
+scala.full.version=2.11.0-RC4
+
+# external modules shipped with distribution, as specified by scala-library-all's pom
+scala-xml.version.number=1.0.1
+scala-parser-combinators.version.number=1.0.1
+scala-continuations-plugin.version.number=1.0.1
+scala-continuations-library.version.number=1.0.1
+scala-swing.version.number=1.0.1
+akka-actor.version.number=2.3.2
+actors-migration.version.number=1.1.0
+
+# external modules, used internally (not shipped)
+partest.version.number=1.0.0
+scalacheck.version.number=1.11.3
+
+# TODO: modularize the compiler
+#scala-compiler-doc.version.number=1.0.0-RC1
+#scala-compiler-interactive.version.number=1.0.0-RC1

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-java/scala.git



More information about the pkg-java-commits mailing list